summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChunseok Lee <chunseok.lee@samsung.com>2020-03-05 15:10:09 +0900
committerChunseok Lee <chunseok.lee@samsung.com>2020-03-05 15:22:53 +0900
commitd91a039e0eda6fd70dcd22672b8ce1817c1ca50e (patch)
tree62668ec548cf31fadbbf4e99522999ad13434a25
parentbd11b24234d7d43dfe05a81c520aa01ffad06e42 (diff)
downloadnnfw-d91a039e0eda6fd70dcd22672b8ce1817c1ca50e.tar.gz
nnfw-d91a039e0eda6fd70dcd22672b8ce1817c1ca50e.tar.bz2
nnfw-d91a039e0eda6fd70dcd22672b8ce1817c1ca50e.zip
catch up to tizen_5.5 and remove unness dir
- update to tizen_5.5 - remove dirs
-rw-r--r--.ctags1
-rw-r--r--.gitattributes1
-rw-r--r--.gitignore78
-rw-r--r--.mailmap57
-rw-r--r--CMakeLists.txt91
-rw-r--r--CONTRIBTUTORS (renamed from contrib/tflite_classify/.FORMATCHECKED)0
-rw-r--r--COPYRIGHT1
-rw-r--r--LICENSE47
-rw-r--r--Makefile265
-rw-r--r--Makefile.template183
-rw-r--r--README.md74
-rw-r--r--cmake/ApplyCompileFlags.cmake15
-rw-r--r--cmake/CfgOptionFlags.cmake26
-rw-r--r--cmake/config/config_aarch64-linux.cmake32
-rw-r--r--cmake/config/config_aarch64-tizen.cmake55
-rw-r--r--cmake/config/config_arm64-android.cmake44
-rw-r--r--cmake/config/config_armv7l-linux.cmake32
-rw-r--r--cmake/config/config_armv7l-tizen.cmake60
-rw-r--r--cmake/config/config_x86_64-linux.cmake7
-rw-r--r--cmake/modules/ExternalSourceTools.cmake58
-rw-r--r--cmake/option/identify_platform.cmake48
-rw-r--r--cmake/option/option_aarch64-linux.cmake16
-rw-r--r--cmake/option/option_aarch64-tizen.cmake15
-rw-r--r--cmake/option/option_arm64-android.cmake9
-rw-r--r--cmake/option/option_armv7l-linux.cmake32
-rw-r--r--cmake/option/option_armv7l-tizen.cmake20
-rw-r--r--cmake/option/option_linux.cmake33
-rw-r--r--cmake/option/option_x86_64-linux.cmake12
-rw-r--r--cmake/packages/ARMComputeConfig.cmake81
-rw-r--r--cmake/packages/ARMComputeSourceConfig.cmake18
-rw-r--r--cmake/packages/AbslSourceConfig.cmake19
-rw-r--r--cmake/packages/EigenSourceConfig.cmake19
-rw-r--r--cmake/packages/FarmhashSourceConfig.cmake19
-rw-r--r--cmake/packages/FlatBuffersSourceConfig.cmake19
-rw-r--r--cmake/packages/GEMMLowpSourceConfig.cmake19
-rw-r--r--cmake/packages/GTestConfig.cmake49
-rw-r--r--cmake/packages/NEON2SSESourceConfig.cmake19
-rw-r--r--cmake/packages/NoniusConfig.cmake18
-rw-r--r--cmake/packages/NoniusSourceConfig.cmake18
-rw-r--r--cmake/packages/TensorFlowSourceConfig.cmake18
-rw-r--r--cmake/packages/TensorflowConfig.cmake44
-rw-r--r--contrib/CMakeLists.txt6
-rw-r--r--contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h65
-rw-r--r--contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h56
-rw-r--r--contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp142
-rw-r--r--contrib/benchmark_acl/src/Benchmark.cpp74
-rw-r--r--contrib/detection/detection.cpp73
-rw-r--r--contrib/labs/jniacl/src/io_accessor.cc100
-rw-r--r--contrib/labs/jniacl/src/io_accessor.h93
-rw-r--r--contrib/labs/jniacl/src/jniacl_main.cc39
-rw-r--r--contrib/labs/kerneltesting/CMakeLists.txt19
-rw-r--r--contrib/labs/kerneltesting/conv2d/CMakeLists.txt15
-rw-r--r--contrib/labs/kerneltesting/conv2d/OperationUtils.h90
-rw-r--r--contrib/labs/kerneltesting/conv2d/common.h89
-rw-r--r--contrib/labs/kerneltesting/conv2d/compatibility.h78
-rw-r--r--contrib/labs/kerneltesting/conv2d/io_accessor.cpp124
-rw-r--r--contrib/labs/kerneltesting/conv2d/io_accessor.h104
-rw-r--r--contrib/labs/kerneltesting/conv2d/nnfw_conv2d_test.cpp607
-rw-r--r--contrib/labs/kerneltesting/conv2d/optimized_ops.h339
-rw-r--r--contrib/labs/kerneltesting/conv2d/types.h146
-rw-r--r--contrib/labs/opencl_test/CMakeLists.txt12
-rw-r--r--contrib/labs/opencl_test/src/opencl_test.cc397
-rw-r--r--contrib/labs/tflite_examples/src/conv.cpp330
-rw-r--r--contrib/tflite_classify/CMakeLists.txt22
-rw-r--r--contrib/tflite_classify/src/InferenceInterface.h93
-rw-r--r--contrib/tflite_test/tflite_test.cpp236
-rw-r--r--docs/HowToContribute.md24
-rw-r--r--docs/doxygen/Doxyfile2500
-rw-r--r--docs/fig/compiler_flow.png (renamed from include/.FORMATCHECKED)0
-rw-r--r--docs/fig/nnfw_compiler_structure.png (renamed from libs/.FORMATCHECKED)0
-rw-r--r--docs/fig/nnfw_compiler_structure.pptx (renamed from runtimes/neurun/.FORMATCHECKED)0
-rw-r--r--docs/fig/nnfw_components.png (renamed from runtimes/pure_arm_compute/.FORMATCHECKED)0
-rw-r--r--docs/fig/nnfw_components.pptx (renamed from tests/.FORMATCHECKED)0
-rw-r--r--docs/fig/nnfw_nativeapi_flow.png (renamed from tools/.FORMATCHECKED)0
-rw-r--r--docs/fig/nnfw_nativeapi_flow.pptx0
-rw-r--r--docs/fig/nnfw_nnapi_flow.png0
-rw-r--r--docs/fig/nnfw_nnapi_flow.pptx0
-rw-r--r--docs/fig/nnfw_runtime_behavior.png0
-rw-r--r--docs/fig/nnfw_runtime_behavior.pptx0
-rw-r--r--docs/fig/nnfw_runtime_structure.png0
-rw-r--r--docs/fig/nnfw_runtime_structure.pptx0
-rw-r--r--docs/fig/runtime_nativeapi_flow.png0
-rw-r--r--docs/howto.md36
-rw-r--r--docs/howto/CrossBuildForArm.md107
-rw-r--r--docs/howto/HowToUseDockerImage.md168
-rw-r--r--docs/nncc/design.md10
-rw-r--r--docs/nncc/getting_started.md73
-rw-r--r--docs/nncc/images/nncc_components.png0
-rw-r--r--docs/nncc/images/nncc_idef0_a0.png0
-rw-r--r--docs/nncc/images/nncc_idef0_a1.png0
-rw-r--r--docs/nncc/images/nncc_idef0_a12.png0
-rw-r--r--docs/nncc/project/detailed_level_design.md329
-rw-r--r--docs/nncc/project/development_document.md257
-rw-r--r--docs/nncc/project/high_level_design.md457
-rw-r--r--docs/nncc/project/requirements_specification.md272
-rw-r--r--docs/nncc/project/test_plan.md442
-rw-r--r--docs/nncc/project_guide.md27
-rw-r--r--docs/nncc/roadmap.md6
-rw-r--r--docs/nnfw/2018/fig/nnfw_architecture.png0
-rw-r--r--docs/nnfw/2018/fig/nnfw_architecture.pptx0
-rw-r--r--docs/nnfw/2018/project/2018_requirement_specification.md (renamed from docs/project/2018_requirement_specification.md)0
-rw-r--r--docs/nnfw/2018/roadmap.md (renamed from docs/roadmap.md)0
-rw-r--r--docs/nnfw/2018/workgroups.md (renamed from docs/workgroups.md)0
-rw-r--r--docs/nnfw/HowToImplementOperatorKernel.md (renamed from docs/HowToImplementOperatorKernel.md)0
-rw-r--r--docs/nnfw/fig/nnfw_architecture.png0
-rw-r--r--docs/nnfw/fig/nnfw_architecture.pptx0
-rw-r--r--docs/nnfw/fig/nnfw_behavior.png0
-rw-r--r--docs/nnfw/fig/nnfw_behavior.pptx0
-rw-r--r--docs/nnfw/howto.md37
-rw-r--r--docs/nnfw/howto/BuildTFfromSource.md (renamed from docs/howto/BuildTFfromSource.md)0
-rw-r--r--docs/nnfw/howto/CrossBuildForAarch64.md (renamed from docs/howto/CrossBuildForAarch64.md)0
-rw-r--r--docs/nnfw/howto/CrossBuildForArm.md129
-rw-r--r--docs/nnfw/howto/HowToAddUnittest.md (renamed from docs/howto/HowToAddUnittest.md)0
-rw-r--r--docs/nnfw/howto/HowToTestManualy.md64
-rw-r--r--docs/nnfw/howto/HowToUseDockerImage.md158
-rw-r--r--docs/nnfw/howto/HowtoMakeSampleAppOnNnfw.md132
-rw-r--r--docs/nnfw/howto/device/xu3-dip.png0
-rw-r--r--docs/nnfw/howto/device/xu3_tizen.md140
-rw-r--r--docs/nnfw/howto/device/xu3_ubuntu.md (renamed from docs/howto/device/xu3_ubuntu.md)0
-rw-r--r--docs/nnfw/howto/device/xu4_tizen.md (renamed from docs/howto/device/xu4_tizen.md)0
-rw-r--r--docs/nnfw/howto/device/xu4_ubuntu.md (renamed from docs/howto/device/xu4_ubuntu.md)0
-rw-r--r--docs/nnfw/project/2018_high_level_design.md (renamed from docs/project/2018_high_level_design.md)0
-rw-r--r--docs/nnfw/project/2019_requirement_specification.md131
-rw-r--r--docs/nnfw/roadmap.md76
-rw-r--r--docs/nnfw/tests/Convolution_manual_3x3.xlsx0
-rw-r--r--docs/nnfw/tests/Softmax_manual.xlsx0
-rw-r--r--docs/nnfw/workgroups.md65
-rw-r--r--externals/CMakeLists.txt122
-rw-r--r--externals/nnapi_test_generator/README.md11
-rw-r--r--include/NeuralNetworks.h2578
-rw-r--r--include/NeuralNetworksEx.h670
-rw-r--r--include/NeuralNetworksLoadHelpers.h132
-rw-r--r--infra/command/build-docker-image46
-rw-r--r--infra/command/format198
-rw-r--r--infra/command/gen-coverage-report78
-rw-r--r--infra/command/install-githooks15
-rw-r--r--infra/command/pylint32
-rw-r--r--infra/config/build.configuration1
-rw-r--r--infra/docker/Dockerfile65
-rw-r--r--infra/git-hooks/pre-push.sh32
-rw-r--r--infra/nncc/3rdparty/.gitignore1
-rw-r--r--infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default1
-rw-r--r--infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info3
-rw-r--r--infra/nncc/CMakeLists.txt144
-rw-r--r--infra/nncc/cmake/modules/Asserts.cmake8
-rw-r--r--infra/nncc/cmake/modules/ExternalProjectTools.cmake3
-rw-r--r--infra/nncc/cmake/modules/ExternalSourceTools.cmake97
-rw-r--r--infra/nncc/cmake/modules/ListFile.cmake12
-rw-r--r--infra/nncc/cmake/modules/OptionTools.cmake9
-rw-r--r--infra/nncc/cmake/modules/OptionalTargetTools.cmake5
-rw-r--r--infra/nncc/cmake/modules/StampTools.cmake18
-rw-r--r--infra/nncc/cmake/modules/TargetRequire.cmake45
-rw-r--r--infra/nncc/cmake/modules/ThirdPartyTools.cmake42
-rw-r--r--infra/nncc/cmake/modules/add_subdirectories.cmake (renamed from cmake/modules/ExtendCMakeFunction.cmake)0
-rw-r--r--infra/nncc/cmake/packages/AbseilConfig.cmake37
-rw-r--r--infra/nncc/cmake/packages/AbseilSourceConfig.cmake24
-rw-r--r--infra/nncc/cmake/packages/Caffe/CMakeLists.txt15
-rw-r--r--infra/nncc/cmake/packages/CaffeConfig.cmake62
-rw-r--r--infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt6
-rw-r--r--infra/nncc/cmake/packages/CaffeProtoConfig.cmake24
-rw-r--r--infra/nncc/cmake/packages/CaffeSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/EigenConfig.cmake17
-rw-r--r--infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake26
-rw-r--r--infra/nncc/cmake/packages/EigenSourceConfig.cmake19
-rw-r--r--infra/nncc/cmake/packages/Farmhash/CMakeLists.txt3
-rw-r--r--infra/nncc/cmake/packages/FarmhashConfig.cmake17
-rw-r--r--infra/nncc/cmake/packages/FarmhashSourceConfig.cmake19
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersConfig.cmake135
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake25
-rw-r--r--infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake9
-rw-r--r--infra/nncc/cmake/packages/GEMMLowpConfig.cmake20
-rw-r--r--infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake19
-rw-r--r--infra/nncc/cmake/packages/GFlagsConfig.cmake35
-rw-r--r--infra/nncc/cmake/packages/GFlagsSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/GLogConfig.cmake24
-rw-r--r--infra/nncc/cmake/packages/GTestConfig.cmake86
-rw-r--r--infra/nncc/cmake/packages/GTestSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake52
-rw-r--r--infra/nncc/cmake/packages/GoogleNSyncConfig.cmake62
-rw-r--r--infra/nncc/cmake/packages/LLVMConfig.cmake45
-rw-r--r--infra/nncc/cmake/packages/NEON2SSEConfig.cmake17
-rw-r--r--infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake19
-rw-r--r--infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt6
-rw-r--r--infra/nncc/cmake/packages/ONNXProtoConfig.cmake25
-rw-r--r--infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake29
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake20
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake20
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake20
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/ProtobufConfig.cmake139
-rw-r--r--infra/nncc/cmake/packages/ProtobufSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/PytorchSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/TensorFlowConfig.cmake53
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt41
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake62
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake9
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt38
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake55
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake9
-rw-r--r--infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake104
-rw-r--r--infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake9
-rw-r--r--infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt78
-rw-r--r--infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh6
-rw-r--r--infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake18
-rw-r--r--infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/TensorFlowVersionChecker.c9
-rw-r--r--infra/nncc/command/build11
-rw-r--r--infra/nncc/command/check-copyright62
-rw-r--r--infra/nncc/command/configure10
-rw-r--r--infra/nncc/command/docker-nncc10
-rw-r--r--infra/nncc/command/docker-run10
-rw-r--r--infra/nncc/command/docker-shell11
-rw-r--r--infra/nncc/command/test13
-rw-r--r--infra/nncc/config/build.configuration1
-rw-r--r--infra/nncc/config/docker.configuration46
-rw-r--r--infra/nnfw/CMakeLists.txt107
-rw-r--r--infra/nnfw/cmake/ApplyCompileFlags.cmake33
-rw-r--r--infra/nnfw/cmake/CfgOptionFlags.cmake65
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_aarch64-linux.cmake12
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake13
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake9
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake18
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake18
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_linux.cmake30
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_x86_64-linux.cmake12
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake39
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake62
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake44
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake38
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake66
-rw-r--r--infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake27
-rw-r--r--infra/nnfw/cmake/modules/ExternalProjectTools.cmake (renamed from cmake/modules/ExternalProjectTools.cmake)0
-rw-r--r--infra/nnfw/cmake/modules/ExternalSourceTools.cmake102
-rw-r--r--infra/nnfw/cmake/modules/IdentifyPlatform.cmake45
-rw-r--r--infra/nnfw/cmake/modules/OptionTools.cmake (renamed from cmake/modules/OptionTools.cmake)0
-rw-r--r--infra/nnfw/cmake/options/options_aarch64-linux.cmake8
-rw-r--r--infra/nnfw/cmake/options/options_aarch64-tizen.cmake10
-rw-r--r--infra/nnfw/cmake/options/options_arm64-android.cmake7
-rw-r--r--infra/nnfw/cmake/options/options_armv7l-linux.cmake8
-rw-r--r--infra/nnfw/cmake/options/options_armv7l-tizen.cmake12
-rw-r--r--infra/nnfw/cmake/options/options_x86_64-linux.cmake7
-rw-r--r--infra/nnfw/cmake/packages/ARMCompute/SConstruct309
-rw-r--r--infra/nnfw/cmake/packages/ARMComputeConfig.cmake205
-rw-r--r--infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake13
-rw-r--r--infra/nnfw/cmake/packages/AbslSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/BoostConfig.cmake58
-rw-r--r--infra/nnfw/cmake/packages/BoostSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/EigenConfig.cmake (renamed from cmake/packages/EigenConfig.cmake)0
-rw-r--r--infra/nnfw/cmake/packages/EigenSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/Enum34SourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FP16SourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FXdivSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FlatBuffersConfig.cmake (renamed from cmake/packages/FlatBuffersConfig.cmake)0
-rw-r--r--infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/GTestConfig.cmake43
-rw-r--r--infra/nnfw/cmake/packages/HDF5Config.cmake31
-rw-r--r--infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake20
-rw-r--r--infra/nnfw/cmake/packages/NNPACKConfig.cmake51
-rw-r--r--infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake20
-rw-r--r--infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++433
-rw-r--r--infra/nnfw/cmake/packages/NoniusConfig.cmake26
-rw-r--r--infra/nnfw/cmake/packages/NoniusSourceConfig.cmake13
-rw-r--r--infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake13
-rw-r--r--infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/PeachpySourceConfig.cmake31
-rw-r--r--infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/SixSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt64
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake69
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake18
-rw-r--r--infra/nnfw/cmake/packages/TensorflowConfig.cmake44
-rw-r--r--infra/nnfw/command/build24
-rw-r--r--infra/nnfw/command/configure12
-rw-r--r--infra/nnfw/command/copyright-check55
-rw-r--r--infra/nnfw/command/docker-run12
-rw-r--r--infra/nnfw/command/docker-run-user12
-rw-r--r--infra/nnfw/command/docker-shell13
-rw-r--r--infra/nnfw/command/doxygen7
-rw-r--r--infra/nnfw/command/gen-coverage-report59
-rw-r--r--infra/nnfw/command/install16
-rw-r--r--infra/nnfw/config/build.configuration (renamed from scripts/config/build.configuration)0
-rw-r--r--infra/nnfw/config/docker.configuration44
-rw-r--r--infra/nnfw/config/gbs.conf20
-rw-r--r--infra/nnfw/doxygen/Doxyfile2502
-rw-r--r--infra/scripts/build_nnpkg.sh19
-rw-r--r--infra/scripts/docker_build_cross_arm_benchmark_model.sh49
-rw-r--r--infra/scripts/docker_build_cross_arm_neurun.sh46
-rw-r--r--infra/scripts/docker_build_cross_arm_neurun_release.sh47
-rw-r--r--infra/scripts/docker_build_cross_arm_pacl.sh47
-rw-r--r--infra/scripts/docker_build_cross_arm_pacl_release.sh48
-rw-r--r--infra/scripts/docker_build_cross_coverage.sh47
-rw-r--r--infra/scripts/docker_build_test_x64.sh53
-rw-r--r--infra/scripts/docker_build_tizen_cross.sh49
-rw-r--r--infra/scripts/docker_build_tizen_gbs.sh29
-rw-r--r--infra/scripts/docker_coverage_report.sh23
-rw-r--r--infra/scripts/test_arm_neurun_acl_cl.sh36
-rw-r--r--infra/scripts/test_arm_neurun_acl_neon.sh40
-rw-r--r--infra/scripts/test_arm_neurun_cpu.sh47
-rw-r--r--infra/scripts/test_arm_neurun_mixed.sh44
-rw-r--r--infra/scripts/test_arm_nnpkg.sh24
-rw-r--r--infra/scripts/test_arm_pacl.sh24
-rw-r--r--infra/scripts/test_coverage.sh46
-rw-r--r--infra/scripts/test_neurun_interp.sh18
-rw-r--r--infra/scripts/test_tizen_neurun_acl_cl.sh30
-rw-r--r--infra/scripts/test_tizen_neurun_mixed.sh38
-rw-r--r--infra/scripts/test_x64_neurun_cpu.sh20
-rw-r--r--infra/scripts/tizen_xu4_test.sh161
-rw-r--r--libs/ARMComputeEx/CMakeLists.txt21
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/OpenCLEx.h79
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLActivationLayerExKernel.h78
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgMinMaxKernel.h106
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArithmeticSubtractionExKernel.h81
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBatchToSpaceNDKernel.h58
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLComparisonOpKernel.h61
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLExpKernel.h57
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherKernel.h104
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h129
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNormalizationLayerExKernel.h81
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPadLayerKernel.h60
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPermuteExKernel.h73
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPixelWiseDivisionKernel.h125
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h69
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSquaredDifferenceKernel.h59
-rw-r--r--libs/ARMComputeEx/arm_compute/core/CL/kernels/CLStridedSliceExKernel.h142
-rw-r--r--libs/ARMComputeEx/arm_compute/core/NEON/kernels/NENormalizationLayerExKernel.h113
-rw-r--r--libs/ARMComputeEx/arm_compute/core/TypesEx.h100
-rw-r--r--libs/ARMComputeEx/arm_compute/core/UtilsEx.h37
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLActivationLayerEx.h63
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgMinMax.h114
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArithmeticSubtractionEx.h62
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLComparisonOp.h42
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLExp.h38
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGather.h60
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNormalizationLayerEx.h77
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPadLayerEx.h47
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPermuteEx.h51
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSquaredDifference.h40
-rw-r--r--libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NENormalizationLayerEx.h83
-rw-r--r--libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp409
-rw-r--r--libs/ARMComputeEx/src/core/CL/OpenCLEx.cpp123
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/activation_layer_ex.cl89
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl94
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_ex.cl74
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl126
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/batch_to_space_nd.cl70
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl84
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl146
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op.cl86
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op_quantized.cl93
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl69
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl84
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/exp.cl57
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/gather.cl98
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl88
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl48
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/pad.cl86
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/permute_ex.cl72
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_float.cl88
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_int.cl80
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl111
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl74
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl88
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl152
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl163
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl69
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/squared_difference.cl75
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/strided_slice_ex.cl63
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl103
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl130
-rw-r--r--libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl271
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLActivationLayerExKernel.cpp211
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLArgMinMaxKernel.cpp159
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLArithmeticSubtractionExKernel.cpp216
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLBatchToSpaceNDKernel.cpp117
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp173
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp102
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLComparisonOpKernel.cpp212
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp109
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLExpKernel.cpp77
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLGatherKernel.cpp129
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp177
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp89
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLNormalizationLayerExKernel.cpp166
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp185
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLPadLayerKernel.cpp149
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLPermuteExKernel.cpp126
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLPixelWiseDivisionKernel.cpp280
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp181
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp238
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp113
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLSquaredDifferenceKernel.cpp170
-rw-r--r--libs/ARMComputeEx/src/core/CL/kernels/CLStridedSliceExKernel.cpp253
-rw-r--r--libs/ARMComputeEx/src/core/NEON/kernels/NENormalizationLayerExKernel.cpp294
-rw-r--r--libs/ARMComputeEx/src/core/UtilsEx.cpp34
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLActivationLayerEx.cpp35
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLArgMinMax.cpp120
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLArithmeticSubtractionEx.cpp46
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLBatchToSpaceND.cpp28
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLComparisonOp.cpp40
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLExp.cpp28
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLGather.cpp34
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLNormalizationLayerEx.cpp50
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLPadLayerEx.cpp28
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLPermuteEx.cpp36
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLPixelWiseDivision.cpp49
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp123
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLSquaredDifference.cpp39
-rw-r--r--libs/ARMComputeEx/src/runtime/CL/functions/CLStridedSliceEx.cpp30
-rw-r--r--libs/ARMComputeEx/src/runtime/NEON/functions/NENormalizationLayerEx.cpp74
-rw-r--r--libs/cpp14/include/cpp14/memory.h29
-rw-r--r--libs/misc/CMakeLists.txt13
-rw-r--r--libs/misc/examples/tensor_index_iterator.cpp74
-rw-r--r--libs/misc/include/misc/EnvVar.h107
-rw-r--r--libs/misc/include/misc/environment.h130
-rw-r--r--libs/misc/include/misc/kernel/RandomObject.h77
-rw-r--r--libs/misc/include/misc/tensor/Index.h105
-rw-r--r--libs/misc/include/misc/tensor/IndexEnumerator.h131
-rw-r--r--libs/misc/include/misc/tensor/Shape.h152
-rw-r--r--libs/misc/src/environment.cpp95
-rw-r--r--libs/misc/src/tensor/Comparator.cpp40
-rw-r--r--libs/misc/src/tensor/Shape.cpp99
-rw-r--r--libs/profiling/CMakeLists.txt5
-rw-r--r--libs/profiling/include/profiling/profile_buffer.h170
-rw-r--r--libs/profiling/include/profiling/profiler.h203
-rw-r--r--libs/profiling/include/profiling/time.h35
-rw-r--r--libs/profiling/src/profiling/time.cpp55
-rw-r--r--libs/tflite/CMakeLists.txt12
-rw-r--r--libs/tflite/include/tflite/Assert.h45
-rw-r--r--libs/tflite/include/tflite/Diff.h199
-rw-r--r--libs/tflite/include/tflite/FeatureView.h106
-rw-r--r--libs/tflite/include/tflite/Quantization.h44
-rw-r--r--libs/tflite/include/tflite/Session.h69
-rw-r--r--libs/tflite/include/tflite/TensorLogger.h168
-rw-r--r--libs/tflite/include/tflite/TensorUtils.h54
-rw-r--r--libs/tflite/include/tflite/TensorView.h120
-rw-r--r--libs/tflite/include/tflite/ext/kernels/Abs.h41
-rw-r--r--libs/tflite/include/tflite/ext/kernels/CustomOps.h60
-rw-r--r--libs/tflite/include/tflite/ext/kernels/SquaredDifference.h76
-rw-r--r--libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h75
-rw-r--r--libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h41
-rw-r--r--libs/tflite/include/tflite/ext/kernels/register.h42
-rw-r--r--libs/tflite/include/tflite/ext/nnapi_delegate.h97
-rw-r--r--libs/tflite/include/tflite/interp/Builder.h53
-rw-r--r--libs/tflite/include/tflite/interp/FlatBufferBuilder.h64
-rw-r--r--libs/tflite/include/tflite/interp/FunctionBuilder.h67
-rw-r--r--libs/tflite/src/Diff.cpp598
-rw-r--r--libs/tflite/src/TensorShapeUtils.cpp48
-rw-r--r--libs/tflite/src/ext/kernels/Abs.cpp103
-rw-r--r--libs/tflite/src/ext/kernels/SquaredDifference.cpp112
-rw-r--r--libs/tflite/src/ext/kernels/TensorFlowMax.cpp405
-rw-r--r--libs/tflite/src/ext/kernels/TensorFlowSum.cpp400
-rw-r--r--libs/tflite/src/ext/kernels/register.cpp221
-rw-r--r--libs/tflite/src/ext/nnapi_delegate.cpp1209
-rw-r--r--libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc106
-rw-r--r--libs/tflite/src/interp/FlatBufferBuilder.cpp40
-rw-r--r--libs/tflite/src/interp/FunctionBuilder.cpp34
-rwxr-xr-xnnas38
-rwxr-xr-xnncc43
-rwxr-xr-xnnfw41
-rw-r--r--nnpackage/examples/one_op_in_tflite/add.tflite0
-rw-r--r--nnpackage/examples/one_op_in_tflite/metadata/MANIFEST7
-rw-r--r--nnpackage/schema/circle_schema.fbs804
-rw-r--r--nnpackage/spec/00_requirement.md28
-rw-r--r--nnpackage/spec/10_packaging_and_manifest.md92
-rw-r--r--nnpackage/spec/20_model_and_operators.md90
-rw-r--r--nnpackage/spec/30_custom_op.md86
-rw-r--r--packaging/flatbuffers.tar.gz0
-rw-r--r--packaging/nnfw.spec50
-rw-r--r--res/BVLCCaffeTests/BatchNorm_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/BatchNorm_000/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/Concat_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Concat_000/test.prototxt23
-rw-r--r--res/BVLCCaffeTests/Convolution_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_000/test.prototxt23
-rw-r--r--res/BVLCCaffeTests/Convolution_001/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_001/test.prototxt24
-rw-r--r--res/BVLCCaffeTests/Convolution_002/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_002/test.prototxt24
-rw-r--r--res/BVLCCaffeTests/Convolution_003/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_003/test.prototxt25
-rw-r--r--res/BVLCCaffeTests/Convolution_004/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_004/test.prototxt24
-rw-r--r--res/BVLCCaffeTests/Convolution_005/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_005/test.prototxt21
-rw-r--r--res/BVLCCaffeTests/Convolution_006/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_006/test.prototxt24
-rw-r--r--res/BVLCCaffeTests/Convolution_007/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Convolution_007/test.prototxt26
-rw-r--r--res/BVLCCaffeTests/Eltwise_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Eltwise_000/test.prototxt20
-rw-r--r--res/BVLCCaffeTests/Eltwise_001/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Eltwise_001/test.prototxt20
-rw-r--r--res/BVLCCaffeTests/Input_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Input_000/test.prototxt8
-rw-r--r--res/BVLCCaffeTests/Input_001/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Input_001/test.prototxt12
-rw-r--r--res/BVLCCaffeTests/Pooling_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Pooling_000/test.prototxt16
-rw-r--r--res/BVLCCaffeTests/Pooling_001/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Pooling_001/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/Pooling_002/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Pooling_002/test.prototxt16
-rw-r--r--res/BVLCCaffeTests/Pooling_003/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Pooling_003/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/Pooling_004/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Pooling_004/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/Pooling_005/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Pooling_005/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/ReLU_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/ReLU_000/test.prototxt14
-rw-r--r--res/BVLCCaffeTests/Regression_0000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Regression_0000/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/Scale_000/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Scale_000/test.prototxt16
-rw-r--r--res/BVLCCaffeTests/Scale_001/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/Scale_001/test.prototxt17
-rw-r--r--res/BVLCCaffeTests/inception_c1/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/inception_c1/test.prototxt252
-rw-r--r--res/BVLCCaffeTests/residual/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/residual/test.prototxt72
-rw-r--r--res/BVLCCaffeTests/residual_bn/INFERENCE0
-rw-r--r--res/BVLCCaffeTests/residual_bn/test.prototxt110
-rw-r--r--res/TensorFlowTests/NET_0000/test.info3
-rw-r--r--res/TensorFlowTests/NET_0000/test.pbtxt102
-rw-r--r--res/TensorFlowTests/NET_0001/test.info2
-rw-r--r--res/TensorFlowTests/NET_0001/test.pbtxt160
-rw-r--r--res/TensorFlowTests/NET_0002/test.info2
-rw-r--r--res/TensorFlowTests/NET_0002/test.pbtxt135
-rw-r--r--res/TensorFlowTests/NET_0003/test.info2
-rw-r--r--res/TensorFlowTests/NET_0003/test.pbtxt244
-rw-r--r--res/TensorFlowTests/NET_0003/test.py15
-rw-r--r--res/TensorFlowTests/NET_0004/test.info2
-rw-r--r--res/TensorFlowTests/NET_0004/test.pbtxt218
-rw-r--r--res/TensorFlowTests/NET_0004/test.py16
-rw-r--r--res/TensorFlowTests/NET_0005/test.info2
-rw-r--r--res/TensorFlowTests/NET_0005/test.pbtxt120
-rw-r--r--res/TensorFlowTests/NET_0006/test.info2
-rw-r--r--res/TensorFlowTests/NET_0006/test.pbtxt149
-rw-r--r--res/TensorFlowTests/NET_0007/test.info2
-rw-r--r--res/TensorFlowTests/NET_0007/test.pbtxt151
-rw-r--r--res/TensorFlowTests/NET_0008/test.info2
-rw-r--r--res/TensorFlowTests/NET_0008/test.pbtxt151
-rw-r--r--res/TensorFlowTests/NET_0009/test.info2
-rw-r--r--res/TensorFlowTests/NET_0009/test.pbtxt194
-rw-r--r--res/TensorFlowTests/NET_0010/test.info2
-rw-r--r--res/TensorFlowTests/NET_0010/test.pbtxt109
-rw-r--r--res/TensorFlowTests/NET_0011/test.info2
-rw-r--r--res/TensorFlowTests/NET_0011/test.pbtxt129
-rw-r--r--res/TensorFlowTests/NET_0012/test.info2
-rw-r--r--res/TensorFlowTests/NET_0012/test.pbtxt52
-rw-r--r--res/TensorFlowTests/NET_0013/test.info2
-rw-r--r--res/TensorFlowTests/NET_0013/test.pbtxt72
-rw-r--r--res/TensorFlowTests/NET_0014/test.info2
-rw-r--r--res/TensorFlowTests/NET_0014/test.pbtxt106
-rw-r--r--res/TensorFlowTests/NET_0015/test.info2
-rw-r--r--res/TensorFlowTests/NET_0015/test.pbtxt74
-rw-r--r--res/TensorFlowTests/NET_0016/test.info2
-rw-r--r--res/TensorFlowTests/NET_0016/test.pbtxt271
-rw-r--r--res/TensorFlowTests/NET_0017/test.info2
-rw-r--r--res/TensorFlowTests/NET_0017/test.pbtxt61
-rw-r--r--res/TensorFlowTests/NET_0018/test.info2
-rw-r--r--res/TensorFlowTests/NET_0018/test.pbtxt63
-rw-r--r--res/TensorFlowTests/NET_0019/test.info2
-rw-r--r--res/TensorFlowTests/NET_0019/test.pbtxt89
-rw-r--r--res/TensorFlowTests/NET_0020/test.info2
-rw-r--r--res/TensorFlowTests/NET_0020/test.pbtxt112
-rw-r--r--res/TensorFlowTests/NET_0021/test.info2
-rw-r--r--res/TensorFlowTests/NET_0021/test.pbtxt65
-rw-r--r--res/TensorFlowTests/NET_0022/test.info5
-rw-r--r--res/TensorFlowTests/NET_0022/test.pbtxt70
-rw-r--r--res/TensorFlowTests/NET_0023/test.info2
-rw-r--r--res/TensorFlowTests/NET_0023/test.pbtxt72
-rw-r--r--res/TensorFlowTests/NET_0024/test.info3
-rw-r--r--res/TensorFlowTests/NET_0024/test.pbtxt76
-rw-r--r--res/TensorFlowTests/NET_0025/test.info3
-rw-r--r--res/TensorFlowTests/NET_0025/test.pbtxt167
-rw-r--r--res/TensorFlowTests/NET_0026/test.info3
-rw-r--r--res/TensorFlowTests/NET_0026/test.pbtxt305
-rw-r--r--res/TensorFlowTests/NET_0027/test.info3
-rw-r--r--res/TensorFlowTests/NET_0027/test.pbtxt488
-rw-r--r--res/TensorFlowTests/NET_0028/test.info3
-rw-r--r--res/TensorFlowTests/NET_0028/test.pbtxt373
-rw-r--r--res/TensorFlowTests/REGRESSION_0000/test.info2
-rw-r--r--res/TensorFlowTests/REGRESSION_0000/test.pbtxt68
-rw-r--r--res/TensorFlowTests/UNIT_Add_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Add_000/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_Add_001/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Add_001/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_Add_002/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Add_002/test.pbtxt61
-rw-r--r--res/TensorFlowTests/UNIT_Add_003/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Add_003/test.pbtxt58
-rw-r--r--res/TensorFlowTests/UNIT_Add_004/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Add_004/test.pbtxt62
-rw-r--r--res/TensorFlowTests/UNIT_Add_005/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Add_005/test.pbtxt49
-rw-r--r--res/TensorFlowTests/UNIT_AvgPool_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_AvgPool_000/test.pbtxt74
-rw-r--r--res/TensorFlowTests/UNIT_AvgPool_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_AvgPool_001/test.pbtxt74
-rw-r--r--res/TensorFlowTests/UNIT_BiasAdd_000/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_BiasAdd_000/test.pbtxt57
-rw-r--r--res/TensorFlowTests/UNIT_BiasAdd_001/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_BiasAdd_001/test.pbtxt57
-rw-r--r--res/TensorFlowTests/UNIT_BiasAdd_002/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_BiasAdd_002/test.pbtxt51
-rw-r--r--res/TensorFlowTests/UNIT_Concat_000/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_Concat_000/test.pbtxt110
-rw-r--r--res/TensorFlowTests/UNIT_Concat_001/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_Concat_001/test.pbtxt143
-rw-r--r--res/TensorFlowTests/UNIT_Concat_002/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Concat_002/test.pbtxt92
-rw-r--r--res/TensorFlowTests/UNIT_Const_000/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_Const_000/test.pbtxt32
-rw-r--r--res/TensorFlowTests/UNIT_Const_001/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_Const_001/test.pbtxt32
-rw-r--r--res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.pbtxt136
-rw-r--r--res/TensorFlowTests/UNIT_Conv2D_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Conv2D_000/test.pbtxt76
-rw-r--r--res/TensorFlowTests/UNIT_CustomOp_000/customop.conf22
-rw-r--r--res/TensorFlowTests/UNIT_CustomOp_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_CustomOp_000/test.pbtxt53
-rw-r--r--res/TensorFlowTests/UNIT_CustomOp_001/customop.conf19
-rw-r--r--res/TensorFlowTests/UNIT_CustomOp_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_CustomOp_001/test.pbtxt38
-rw-r--r--res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.pbtxt115
-rw-r--r--res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.pbtxt115
-rw-r--r--res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.pbtxt158
-rw-r--r--res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.pbtxt158
-rw-r--r--res/TensorFlowTests/UNIT_MaxPool_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_MaxPool_000/test.pbtxt74
-rw-r--r--res/TensorFlowTests/UNIT_MaxPool_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_MaxPool_001/test.pbtxt48
-rw-r--r--res/TensorFlowTests/UNIT_Mean_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Mean_000/test.pbtxt68
-rw-r--r--res/TensorFlowTests/UNIT_Mean_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Mean_001/test.pbtxt68
-rw-r--r--res/TensorFlowTests/UNIT_Mean_002/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Mean_002/test.pbtxt68
-rw-r--r--res/TensorFlowTests/UNIT_Mean_003/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Mean_003/test.pbtxt68
-rw-r--r--res/TensorFlowTests/UNIT_Mul_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Mul_000/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_Mul_001/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Mul_001/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_Mul_002/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Mul_002/test.pbtxt61
-rw-r--r--res/TensorFlowTests/UNIT_Placeholder_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Placeholder_000/test.pbtxt40
-rw-r--r--res/TensorFlowTests/UNIT_RealDiv_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_RealDiv_000/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_RealDiv_001/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_RealDiv_001/test.pbtxt61
-rw-r--r--res/TensorFlowTests/UNIT_Relu6_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Relu6_000/test.pbtxt30
-rw-r--r--res/TensorFlowTests/UNIT_Relu_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Relu_000/test.pbtxt40
-rw-r--r--res/TensorFlowTests/UNIT_Reshape_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Reshape_000/test.pbtxt63
-rw-r--r--res/TensorFlowTests/UNIT_Rsqrt_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Rsqrt_000/test.pbtxt40
-rw-r--r--res/TensorFlowTests/UNIT_Shape_000/test.info1
-rw-r--r--res/TensorFlowTests/UNIT_Shape_000/test.pbtxt41
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_000/test.pbtxt32
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_001/test.pbtxt35
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_002/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_002/test.pbtxt38
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_003/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Softmax_003/test.pbtxt41
-rw-r--r--res/TensorFlowTests/UNIT_Sqrt_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Sqrt_000/test.pbtxt40
-rw-r--r--res/TensorFlowTests/UNIT_SquaredDifference_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_SquaredDifference_000/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_SquaredDifference_001/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_SquaredDifference_001/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_000/test.pbtxt28
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_001/test.pbtxt35
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_002/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_002/test.pbtxt35
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_003/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Squeeze_003/test.pbtxt35
-rw-r--r--res/TensorFlowTests/UNIT_StopGradient_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_StopGradient_000/test.pbtxt34
-rw-r--r--res/TensorFlowTests/UNIT_StopGradient_001/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_StopGradient_001/test.pbtxt40
-rw-r--r--res/TensorFlowTests/UNIT_Sub_000/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Sub_000/test.pbtxt70
-rw-r--r--res/TensorFlowTests/UNIT_Sub_001/test.info3
-rw-r--r--res/TensorFlowTests/UNIT_Sub_001/test.pbtxt61
-rw-r--r--res/TensorFlowTests/UNIT_Tanh_000/test.info2
-rw-r--r--res/TensorFlowTests/UNIT_Tanh_000/test.pbtxt40
-rwxr-xr-xrun36
-rw-r--r--runtimes/CMakeLists.txt4
-rw-r--r--runtimes/contrib/CMakeLists.txt1
-rw-r--r--runtimes/contrib/README.md (renamed from contrib/README.md)0
-rw-r--r--runtimes/contrib/TFLiteSharp/README.md (renamed from contrib/TFLiteSharp/README.md)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteNative/CMakeLists.txt (renamed from contrib/TFLiteSharp/TFLiteNative/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h69
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h55
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp142
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteNative/tflite-native.pc.in (renamed from contrib/TFLiteSharp/TFLiteNative/tflite-native.pc.in)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp.sln (renamed from contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp.sln)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.Libraries.cs (renamed from contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.Libraries.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.TFLite.cs (renamed from contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.TFLite.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/TFLiteSharp.csproj (renamed from contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/TFLiteSharp.csproj)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Datatype.cs (renamed from contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Datatype.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Interpreter.cs (renamed from contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Interpreter.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest.sln (renamed from contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest.sln)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/Program.cs (renamed from contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/Program.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/TFLiteSharpTest.csproj (renamed from contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/TFLiteSharpTest.csproj)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp.csproj (renamed from contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp.csproj)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_App.cs (renamed from contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_App.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_Main.cs (renamed from contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_Main.cs)0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mobilenet_v1_1.0_224.tflite0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse1.bmp0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse_224.bmp0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/shared/res/TFLiteTestApp.png0
-rw-r--r--runtimes/contrib/TFLiteSharp/TFLiteTestApp/tizen-manifest.xml (renamed from contrib/TFLiteSharp/TFLiteTestApp/tizen-manifest.xml)0
-rw-r--r--runtimes/contrib/TFLiteSharp/packaging/TFLiteSharp.manifest (renamed from contrib/TFLiteSharp/packaging/TFLiteSharp.manifest)0
-rw-r--r--runtimes/contrib/TFLiteSharp/packaging/TFLiteSharp.spec (renamed from contrib/TFLiteSharp/packaging/TFLiteSharp.spec)0
-rw-r--r--runtimes/contrib/TFLiteSharp/packaging/tflite-native.manifest (renamed from contrib/TFLiteSharp/packaging/tflite-native.manifest)0
-rw-r--r--runtimes/contrib/android_tflite/CMakeLists.txt45
-rw-r--r--runtimes/contrib/benchmark_acl/.FORMATDENY0
-rw-r--r--runtimes/contrib/benchmark_acl/CMakeLists.txt (renamed from contrib/benchmark_acl/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/benchmark_acl/src/Benchmark.cpp74
-rw-r--r--runtimes/contrib/benchmark_acl/src/Benchmark.h (renamed from contrib/benchmark_acl/src/Benchmark.h)0
-rw-r--r--runtimes/contrib/benchmark_acl/src/benchmark_googlenet.cpp (renamed from contrib/benchmark_acl/src/benchmark_googlenet.cpp)0
-rw-r--r--runtimes/contrib/benchmark_acl/src/benchmark_inception_v3.cpp (renamed from contrib/benchmark_acl/src/benchmark_inception_v3.cpp)0
-rw-r--r--runtimes/contrib/benchmark_acl/src/benchmark_mobilenet.cpp (renamed from contrib/benchmark_acl/src/benchmark_mobilenet.cpp)0
-rw-r--r--runtimes/contrib/custom_op/README.md25
-rw-r--r--runtimes/contrib/custom_op/customOp-workflow.png0
-rw-r--r--runtimes/contrib/detection/CMakeLists.txt (renamed from contrib/detection/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/detection/detection.cpp74
-rw-r--r--runtimes/contrib/labs/CMakeLists.txt (renamed from contrib/labs/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/labs/jniacl/CMakeLists.txt (renamed from contrib/labs/jniacl/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/labs/jniacl/src/io_accessor.cc96
-rw-r--r--runtimes/contrib/labs/jniacl/src/io_accessor.h93
-rw-r--r--runtimes/contrib/labs/jniacl/src/jniacl_main.cc37
-rw-r--r--runtimes/contrib/labs/opencl_test/CMakeLists.txt11
-rw-r--r--runtimes/contrib/labs/opencl_test/README.md (renamed from contrib/labs/opencl_test/README.md)0
-rw-r--r--runtimes/contrib/labs/opencl_test/src/opencl_test.cc386
-rw-r--r--runtimes/contrib/labs/tflite_examples/CMakeLists.txt (renamed from contrib/labs/tflite_examples/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/labs/tflite_examples/src/conv.cpp330
-rw-r--r--runtimes/contrib/mlapse/CMakeLists.txt8
-rw-r--r--runtimes/contrib/mlapse/README.md3
-rw-r--r--runtimes/contrib/mlapse/tfl/CMakeLists.txt12
-rw-r--r--runtimes/contrib/mlapse/tfl/driver.cc280
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.cc67
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.h50
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.cc24
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.h77
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.cc124
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.h63
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.cc17
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.h75
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/tfl/load.cc55
-rw-r--r--runtimes/contrib/mlapse/tfl/mlapse/tfl/load.h40
-rw-r--r--runtimes/contrib/tflite_classify/CMakeLists.txt22
-rw-r--r--runtimes/contrib/tflite_classify/src/ImageClassifier.cc (renamed from contrib/tflite_classify/src/ImageClassifier.cc)0
-rw-r--r--runtimes/contrib/tflite_classify/src/ImageClassifier.h (renamed from contrib/tflite_classify/src/ImageClassifier.h)0
-rw-r--r--runtimes/contrib/tflite_classify/src/InferenceInterface.cc (renamed from contrib/tflite_classify/src/InferenceInterface.cc)0
-rw-r--r--runtimes/contrib/tflite_classify/src/InferenceInterface.h93
-rw-r--r--runtimes/contrib/tflite_classify/src/tflite_classify.cc (renamed from contrib/tflite_classify/src/tflite_classify.cc)0
-rw-r--r--runtimes/contrib/tflite_test/CMakeLists.txt (renamed from contrib/tflite_test/CMakeLists.txt)0
-rw-r--r--runtimes/contrib/tflite_test/tflite_test.cpp239
-rw-r--r--runtimes/contrib/uben/CMakeLists.txt29
-rw-r--r--runtimes/contrib/uben/Convolution.cpp429
-rw-r--r--runtimes/contrib/uben/Softmax.cpp54
-rw-r--r--runtimes/contrib/xtrace/CMakeLists.txt16
-rw-r--r--runtimes/contrib/xtrace/src/benchmark_event.cc36
-rw-r--r--runtimes/contrib/xtrace/src/benchmark_event.h77
-rw-r--r--runtimes/contrib/xtrace/src/benchmark_runner.cc122
-rw-r--r--runtimes/contrib/xtrace/src/benchmark_runner.h37
-rw-r--r--runtimes/contrib/xtrace/src/event_collector.cc157
-rw-r--r--runtimes/contrib/xtrace/src/event_collector.h39
-rw-r--r--runtimes/contrib/xtrace/src/event_recorder.cc130
-rw-r--r--runtimes/contrib/xtrace/src/event_recorder.h69
-rw-r--r--runtimes/contrib/xtrace/src/str.h38
-rw-r--r--runtimes/contrib/xtrace/src/xtrace.cc64
-rw-r--r--runtimes/include/NeuralNetworks.h6444
-rw-r--r--runtimes/include/NeuralNetworksEx.h747
-rw-r--r--runtimes/include/NeuralNetworksExtensions.h117
-rw-r--r--runtimes/include/nnfw.h200
-rw-r--r--runtimes/include/nnfw_dev.h65
-rw-r--r--runtimes/libs/ARMComputeEx/CMakeLists.txt32
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/CLKernelLibraryEx.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/CLKernelLibraryEx.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgOperationKernel.h101
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLCastKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLCastKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLEmbeddingLookupKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLEmbeddingLookupKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherExKernel.h109
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h129
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNegKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNegKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPReLUKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPReLUKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLReduceOperationKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLReduceOperationKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h69
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTopKV2Kernel.h (renamed from libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTopKV2Kernel.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.h85
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/CPP/kernels/CPPUpsampleKernelEx.h72
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/NEON/NEElementwiseOperationFuncs.h69
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEBinaryLogicalOperationKernel.h70
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEElementwiseUnaryKernelEx.h102
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEPReLUKernel.h84
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/TypesEx.h58
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/UtilsEx.h47
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/core/utils/misc/ShapeCalculatorEx.h129
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/CLFunctionsEx.h41
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgOperation.h106
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBatchToSpaceND.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBatchToSpaceND.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLCast.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLCast.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLDepthToSpace.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLDepthToSpace.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLEmbeddingLookup.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLEmbeddingLookup.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLFullyConnectedReshapingLayer.h84
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGatherEx.h62
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLHashtableLookup.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLHashtableLookup.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLLogicalNot.h38
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNeg.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNeg.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPReLU.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPReLU.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPixelWiseDivision.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPixelWiseDivision.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLRNNLayerEx.h104
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLReduceOperation.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLReduceOperation.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToBatchND.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToBatchND.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToDepth.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToDepth.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLStridedSliceEx.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLStridedSliceEx.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTopKV2.h (renamed from libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTopKV2.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayer.h157
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayerUpsample.h79
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/CPP/functions/CPPUpsampleEx.h49
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/NEFunctionsEx.h29
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEArgMinMax.h81
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEBinaryLogicalOperation.h98
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEElementwiseUnaryLayerEx.h54
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h83
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEPReLU.h47
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NERNNLayerEx.h114
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceMeanEx.h83
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceSum.h82
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NETransposeConvLayer.h162
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericGather.h84
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericReshapeLayer.h87
-rw-r--r--runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/Utils.h112
-rw-r--r--runtimes/libs/ARMComputeEx/resolve_includes.py (renamed from libs/ARMComputeEx/resolve_includes.py)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp354
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl113
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl167
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl106
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl198
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl161
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl113
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/gather_ex.cl139
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl117
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers.h (renamed from libs/ARMComputeEx/src/core/CL/cl_kernels/helpers.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers_asymm.h (renamed from libs/ARMComputeEx/src/core/CL/cl_kernels/helpers_asymm.h)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl55
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl135
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl96
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl114
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl188
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl250
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl161
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl98
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl129
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl269
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLArgOperationKernel.cpp157
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp172
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp102
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp116
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLEmbeddingLookupKernel.cpp (renamed from libs/ARMComputeEx/src/core/CL/kernels/CLEmbeddingLookupKernel.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLGatherExKernel.cpp181
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp178
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp88
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp186
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp179
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp241
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp124
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTopKV2Kernel.cpp (renamed from libs/ARMComputeEx/src/core/CL/kernels/CLTopKV2Kernel.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.cpp164
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/CPP/kernels/CPPUpsampleKernelEx.cpp102
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/NEON/NEElementwiseOperationFuncs.cpp346
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEBinaryLogicalOperationKernel.cpp237
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEElementwiseUnaryKernelEx.cpp205
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEPReLUKernel.cpp274
-rw-r--r--runtimes/libs/ARMComputeEx/src/core/UtilsEx.cpp45
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/CLFunctionsEx.cpp20
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLArgOperation.cpp120
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLBinaryLogicalOp.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLBinaryLogicalOp.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLCast.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLCast.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLDepthToSpace.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLDepthToSpace.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLEmbeddingLookup.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLEmbeddingLookup.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLFullyConnectedReshapingLayer.cpp58
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLGatherEx.cpp36
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLHashtableLookup.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLHashtableLookup.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLNeg.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLNeg.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLPReLU.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLPReLU.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLRNNLayerEx.cpp147
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp125
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToBatchND.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToBatchND.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToDepth.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToDepth.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTopKV2.cpp (renamed from libs/ARMComputeEx/src/runtime/CL/functions/CLTopKV2.cpp)0
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayer.cpp238
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayerUpsample.cpp67
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/CPP/functions/CPPUpsampleEx.cpp37
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/NEFunctionsEx.cpp20
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEArgMinMax.cpp109
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEBinaryLogicalOperation.cpp70
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEElementwiseUnaryLayerEx.cpp44
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEFullyConnectedReshapingLayer.cpp56
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEPReLU.cpp39
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NERNNLayerEx.cpp146
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceMeanEx.cpp164
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceSum.cpp165
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NETransposeConvLayer.cpp307
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericGather.cpp92
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericReshapeLayer.cpp128
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/misc/functions/Utils.cpp38
-rw-r--r--runtimes/libs/ARMComputeEx/src/runtime/topk_v2.h (renamed from libs/ARMComputeEx/src/runtime/topk_v2.h)0
-rw-r--r--runtimes/libs/CMakeLists.txt (renamed from libs/CMakeLists.txt)0
-rw-r--r--runtimes/libs/cker/CMakeLists.txt2
-rw-r--r--runtimes/libs/cker/README.md7
-rw-r--r--runtimes/libs/cker/include/cker/Shape.h286
-rw-r--r--runtimes/libs/cker/include/cker/Types.h51
-rw-r--r--runtimes/libs/cker/include/cker/Utils.h67
-rw-r--r--runtimes/libs/cker/include/cker/gemmlowp/FixedPoint.h289
-rw-r--r--runtimes/libs/cker/include/cker/operation/Add.h95
-rw-r--r--runtimes/libs/cker/include/cker/operation/AveragePool.h160
-rw-r--r--runtimes/libs/cker/include/cker/operation/Concatenation.h93
-rw-r--r--runtimes/libs/cker/include/cker/operation/Conv.h217
-rw-r--r--runtimes/libs/cker/include/cker/operation/DepthwiseConv.h217
-rw-r--r--runtimes/libs/cker/include/cker/operation/FullyConnected.h144
-rw-r--r--runtimes/libs/cker/include/cker/operation/MaxPool.h150
-rw-r--r--runtimes/libs/cker/include/cker/operation/SoftMax.h164
-rw-r--r--runtimes/libs/cpp14/CMakeLists.txt (renamed from libs/cpp14/CMakeLists.txt)0
-rw-r--r--runtimes/libs/cpp14/include/cpp14/memory.h66
-rw-r--r--runtimes/libs/jsoncpp/CMakeLists.txt6
-rw-r--r--runtimes/libs/jsoncpp/README.md11
-rw-r--r--runtimes/libs/jsoncpp/json/json-forwards.h315
-rw-r--r--runtimes/libs/jsoncpp/json/json.h2133
-rw-r--r--runtimes/libs/jsoncpp/jsoncpp.cpp5651
-rw-r--r--runtimes/libs/misc/CMakeLists.txt14
-rw-r--r--runtimes/libs/misc/examples/tensor_index_iterator.cpp74
-rw-r--r--runtimes/libs/misc/include/misc/EnvVar.h120
-rw-r--r--runtimes/libs/misc/include/misc/benchmark.h (renamed from libs/misc/include/misc/benchmark.h)0
-rw-r--r--runtimes/libs/misc/include/misc/feature/Index.h (renamed from libs/misc/include/misc/feature/Index.h)0
-rw-r--r--runtimes/libs/misc/include/misc/feature/IndexIterator.h (renamed from libs/misc/include/misc/feature/IndexIterator.h)0
-rw-r--r--runtimes/libs/misc/include/misc/feature/Object.h (renamed from libs/misc/include/misc/feature/Object.h)0
-rw-r--r--runtimes/libs/misc/include/misc/feature/Reader.h (renamed from libs/misc/include/misc/feature/Reader.h)0
-rw-r--r--runtimes/libs/misc/include/misc/feature/Shape.h (renamed from libs/misc/include/misc/feature/Shape.h)0
-rw-r--r--runtimes/libs/misc/include/misc/feature/TextFormatter.h (renamed from libs/misc/include/misc/feature/TextFormatter.h)0
-rw-r--r--runtimes/libs/misc/include/misc/fp32.h (renamed from libs/misc/include/misc/fp32.h)0
-rw-r--r--runtimes/libs/misc/include/misc/kernel/IndexIterator.h (renamed from libs/misc/include/misc/kernel/IndexIterator.h)0
-rw-r--r--runtimes/libs/misc/include/misc/kernel/Reader.h (renamed from libs/misc/include/misc/kernel/Reader.h)0
-rw-r--r--runtimes/libs/misc/include/misc/kernel/Shape.h (renamed from libs/misc/include/misc/kernel/Shape.h)0
-rw-r--r--runtimes/libs/misc/include/misc/matrix/IndexIterator.h (renamed from libs/misc/include/misc/matrix/IndexIterator.h)0
-rw-r--r--runtimes/libs/misc/include/misc/matrix/Reader.h (renamed from libs/misc/include/misc/matrix/Reader.h)0
-rw-r--r--runtimes/libs/misc/include/misc/matrix/Shape.h (renamed from libs/misc/include/misc/matrix/Shape.h)0
-rw-r--r--runtimes/libs/misc/include/misc/polymorphic_downcast.h43
-rw-r--r--runtimes/libs/misc/include/misc/string_helpers.h45
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Comparator.h (renamed from libs/misc/include/misc/tensor/Comparator.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Diff.h (renamed from libs/misc/include/misc/tensor/Diff.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Index.h107
-rw-r--r--runtimes/libs/misc/include/misc/tensor/IndexEnumerator.h131
-rw-r--r--runtimes/libs/misc/include/misc/tensor/IndexFormatter.h (renamed from libs/misc/include/misc/tensor/IndexFormatter.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/IndexIterator.h (renamed from libs/misc/include/misc/tensor/IndexIterator.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/NonIncreasingStride.h (renamed from libs/misc/include/misc/tensor/NonIncreasingStride.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Object.h (renamed from libs/misc/include/misc/tensor/Object.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Reader.h (renamed from libs/misc/include/misc/tensor/Reader.h)0
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Shape.h150
-rw-r--r--runtimes/libs/misc/include/misc/tensor/Zipper.h (renamed from libs/misc/include/misc/tensor/Zipper.h)0
-rw-r--r--runtimes/libs/misc/include/misc/vector.h (renamed from libs/misc/include/misc/vector.h)0
-rw-r--r--runtimes/libs/misc/include/misc/vector/Object.h (renamed from libs/misc/include/misc/vector/Object.h)0
-rw-r--r--runtimes/libs/misc/include/misc/vector/Reader.h (renamed from libs/misc/include/misc/vector/Reader.h)0
-rw-r--r--runtimes/libs/misc/src/tensor/Comparator.cpp38
-rw-r--r--runtimes/libs/misc/src/tensor/IndexFormatter.cpp (renamed from libs/misc/src/tensor/IndexFormatter.cpp)0
-rw-r--r--runtimes/libs/misc/src/tensor/NonIncreasingStride.cpp (renamed from libs/misc/src/tensor/NonIncreasingStride.cpp)0
-rw-r--r--runtimes/libs/misc/src/tensor/Shape.cpp107
-rw-r--r--runtimes/libs/nnapi/CMakeLists.txt3
-rw-r--r--runtimes/libs/nnapi/v1.1/CMakeLists.txt4
-rw-r--r--runtimes/libs/nnapi/v1.1/include/NeuralNetworksExShim.h (renamed from include/NeuralNetworksExShim.h)0
-rw-r--r--runtimes/libs/nnapi/v1.1/include/NeuralNetworksLoadHelpers.h141
-rw-r--r--runtimes/libs/nnapi/v1.1/include/NeuralNetworksShim.h (renamed from include/NeuralNetworksShim.h)0
-rw-r--r--runtimes/libs/nnapi/v1.2/CMakeLists.txt4
-rw-r--r--runtimes/libs/nnapi/v1.2/include/NeuralNetworksExShim.h65
-rw-r--r--runtimes/libs/nnapi/v1.2/include/NeuralNetworksLoadHelpers.h138
-rw-r--r--runtimes/libs/nnapi/v1.2/include/NeuralNetworksShim.h1136
-rw-r--r--runtimes/libs/nnapi/v1.2/include/NeuralNetworksTypes.h163
-rw-r--r--runtimes/libs/profiling/CMakeLists.txt7
-rw-r--r--runtimes/libs/profiling/include/profiling/profile_buffer.h170
-rw-r--r--runtimes/libs/profiling/include/profiling/profiler.h203
-rw-r--r--runtimes/libs/profiling/include/profiling/profiling.h (renamed from libs/profiling/include/profiling/profiling.h)0
-rw-r--r--runtimes/libs/profiling/include/profiling/time.h35
-rw-r--r--runtimes/libs/profiling/src/profiling/time.cpp55
-rw-r--r--runtimes/libs/rua/CMakeLists.txt4
-rw-r--r--runtimes/libs/rua/README.md4
-rw-r--r--runtimes/libs/rua/anchor/CMakeLists.txt9
-rw-r--r--runtimes/libs/rua/anchor/include/rua/Anchor.h38
-rw-r--r--runtimes/libs/rua/anchor/src/Anchor.cpp33
-rw-r--r--runtimes/libs/rua/core/CMakeLists.txt3
-rw-r--r--runtimes/libs/rua/core/include/rua/Service.h158
-rw-r--r--runtimes/libs/rua/dyn/CMakeLists.txt8
-rw-r--r--runtimes/libs/rua/dyn/include/rua/DynamicBinder.h35
-rw-r--r--runtimes/libs/rua/dyn/src/DynamicBinder.cpp353
-rw-r--r--runtimes/libs/rua/shim/CMakeLists.txt4
-rw-r--r--runtimes/libs/rua/shim/include/rua/Shim.h192
-rw-r--r--runtimes/libs/srcn/CMakeLists.txt24
-rw-r--r--runtimes/libs/srcn/include/srcn/conv_type.h74
-rw-r--r--runtimes/libs/srcn/include/srcn/srcn_conv.h65
-rw-r--r--runtimes/libs/srcn/src/common.h162
-rw-r--r--runtimes/libs/srcn/src/conv_sgemm_multithreads.cc483
-rw-r--r--runtimes/libs/srcn/src/conv_sgemm_multithreads.h86
-rw-r--r--runtimes/libs/srcn/src/conv_sgemm_singlethread.cc366
-rw-r--r--runtimes/libs/srcn/src/conv_sgemm_singlethread.h73
-rw-r--r--runtimes/libs/srcn/src/conv_sparse.cc271
-rw-r--r--runtimes/libs/srcn/src/conv_sparse.h79
-rw-r--r--runtimes/libs/srcn/src/conv_winograd.cc341
-rw-r--r--runtimes/libs/srcn/src/conv_winograd.h72
-rw-r--r--runtimes/libs/srcn/src/conv_winograd_batch.cc304
-rw-r--r--runtimes/libs/srcn/src/conv_winograd_batch.h67
-rw-r--r--runtimes/libs/srcn/src/deconv_sgemm_multithreads.cc387
-rw-r--r--runtimes/libs/srcn/src/deconv_sgemm_multithreads.h85
-rw-r--r--runtimes/libs/srcn/src/depthwise_conv.cc2639
-rw-r--r--runtimes/libs/srcn/src/direct_conv_colmajor.cc5872
-rw-r--r--runtimes/libs/srcn/src/direct_conv_colmajor.h33
-rw-r--r--runtimes/libs/srcn/src/sgemm_kernel.cc2508
-rw-r--r--runtimes/libs/srcn/src/sgemm_kernel.h52
-rw-r--r--runtimes/libs/srcn/src/sgemm_pack.cc2316
-rw-r--r--runtimes/libs/srcn/src/sgemm_pack.h73
-rw-r--r--runtimes/libs/srcn/src/sgemm_singlethread.cc689
-rw-r--r--runtimes/libs/srcn/src/sgemm_singlethread.h88
-rw-r--r--runtimes/libs/srcn/src/sgemm_test.cc1883
-rw-r--r--runtimes/libs/srcn/src/srcn_conv.cc614
-rw-r--r--runtimes/libs/srcn/src/winograd.h148
-rw-r--r--runtimes/libs/tflite/CMakeLists.txt26
-rw-r--r--runtimes/libs/tflite/include/tflite/Assert.h45
-rw-r--r--runtimes/libs/tflite/include/tflite/Diff.h200
-rw-r--r--runtimes/libs/tflite/include/tflite/FeatureView.h108
-rw-r--r--runtimes/libs/tflite/include/tflite/InputIndex.h (renamed from libs/tflite/include/tflite/InputIndex.h)0
-rw-r--r--runtimes/libs/tflite/include/tflite/InterpreterSession.h (renamed from libs/tflite/include/tflite/InterpreterSession.h)0
-rw-r--r--runtimes/libs/tflite/include/tflite/NNAPISession.h (renamed from libs/tflite/include/tflite/NNAPISession.h)0
-rw-r--r--runtimes/libs/tflite/include/tflite/OutputIndex.h (renamed from libs/tflite/include/tflite/OutputIndex.h)0
-rw-r--r--runtimes/libs/tflite/include/tflite/Quantization.h44
-rw-r--r--runtimes/libs/tflite/include/tflite/Session.h69
-rw-r--r--runtimes/libs/tflite/include/tflite/TensorLogger.h168
-rw-r--r--runtimes/libs/tflite/include/tflite/TensorShapeUtils.h (renamed from libs/tflite/include/tflite/TensorShapeUtils.h)0
-rw-r--r--runtimes/libs/tflite/include/tflite/TensorUtils.h54
-rw-r--r--runtimes/libs/tflite/include/tflite/TensorView.h120
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/kernels/Abs.h41
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/kernels/CustomOps.h62
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h76
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h75
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h41
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/kernels/register.h42
-rw-r--r--runtimes/libs/tflite/include/tflite/ext/nnapi_delegate.h96
-rw-r--r--runtimes/libs/tflite/include/tflite/interp/Builder.h53
-rw-r--r--runtimes/libs/tflite/include/tflite/interp/FlatBufferBuilder.h64
-rw-r--r--runtimes/libs/tflite/include/tflite/interp/FunctionBuilder.h67
-rw-r--r--runtimes/libs/tflite/src/Diff.cpp596
-rw-r--r--runtimes/libs/tflite/src/FeatureView.cpp (renamed from libs/tflite/src/FeatureView.cpp)0
-rw-r--r--runtimes/libs/tflite/src/Quantization.cpp (renamed from libs/tflite/src/Quantization.cpp)0
-rw-r--r--runtimes/libs/tflite/src/TensorShapeUtils.cpp29
-rw-r--r--runtimes/libs/tflite/src/TensorView.test.cpp (renamed from libs/tflite/src/TensorView.test.cpp)0
-rw-r--r--runtimes/libs/tflite/src/ext/kernels/Abs.cpp103
-rw-r--r--runtimes/libs/tflite/src/ext/kernels/SquaredDifference.cpp109
-rw-r--r--runtimes/libs/tflite/src/ext/kernels/TensorFlowMax.cpp405
-rw-r--r--runtimes/libs/tflite/src/ext/kernels/TensorFlowSum.cpp400
-rw-r--r--runtimes/libs/tflite/src/ext/kernels/register.cpp247
-rw-r--r--runtimes/libs/tflite/src/ext/nnapi_delegate.cpp1238
-rw-r--r--runtimes/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc123
-rw-r--r--runtimes/libs/tflite/src/interp/FlatBufferBuilder.cpp40
-rw-r--r--runtimes/libs/tflite/src/interp/FunctionBuilder.cpp34
-rw-r--r--runtimes/libs/xdata/CMakeLists.txt7
-rw-r--r--runtimes/libs/xdata/README.md3
-rw-r--r--runtimes/libs/xdata/include/xdata.h22
-rw-r--r--runtimes/libs/xdata/include/xdata/trace.h95
-rw-r--r--runtimes/libs/xdata/src/trace.cpp42
-rw-r--r--runtimes/libs/xprobe/CMakeLists.txt9
-rw-r--r--runtimes/libs/xprobe/include/xprobe.h49
-rw-r--r--runtimes/libs/xprobe/include/xprobe/trace.h44
-rw-r--r--runtimes/libs/xprobe/src/trace.cpp71
-rw-r--r--runtimes/libs/xray/CMakeLists.txt3
-rw-r--r--runtimes/libs/xray/event/CMakeLists.txt2
-rw-r--r--runtimes/libs/xray/event/include/xray/event.h53
-rw-r--r--runtimes/libs/xray/event/include/xray/event_category.h33
-rw-r--r--runtimes/libs/xray/event/include/xray/event_code.h42
-rw-r--r--runtimes/libs/xray/mux/CMakeLists.txt9
-rw-r--r--runtimes/libs/xray/mux/include/xray/mux.h62
-rw-r--r--runtimes/libs/xray/mux/src/mux.cc34
-rw-r--r--runtimes/libs/xray/pipe/CMakeLists.txt3
-rw-r--r--runtimes/libs/xray/pipe/include/xray/pipe.h85
-rw-r--r--runtimes/logging/CMakeLists.txt5
-rw-r--r--runtimes/logging/src/nnapi_logging.cc35
-rw-r--r--runtimes/neurun/CMakeLists.txt65
-rw-r--r--runtimes/neurun/backend/CMakeLists.txt8
-rw-r--r--runtimes/neurun/backend/acl_cl/Backend.h64
-rw-r--r--runtimes/neurun/backend/acl_cl/CLTimer.h108
-rw-r--r--runtimes/neurun/backend/acl_cl/CMakeLists.txt21
-rw-r--r--runtimes/neurun/backend/acl_cl/Config.cc44
-rw-r--r--runtimes/neurun/backend/acl_cl/Config.h44
-rw-r--r--runtimes/neurun/backend/acl_cl/ConstantInitializer.cc214
-rw-r--r--runtimes/neurun/backend/acl_cl/ConstantInitializer.h60
-rw-r--r--runtimes/neurun/backend/acl_cl/KernelGenerator.cc2034
-rw-r--r--runtimes/neurun/backend/acl_cl/KernelGenerator.h105
-rw-r--r--runtimes/neurun/backend/acl_cl/PluginClassesAllocator.cc33
-rw-r--r--runtimes/neurun/backend/acl_cl/ShapeFixer.cc361
-rw-r--r--runtimes/neurun/backend/acl_cl/ShapeFixer.h105
-rw-r--r--runtimes/neurun/backend/acl_cl/TensorBuilder.h42
-rw-r--r--runtimes/neurun/backend/acl_cl/TensorManager.h84
-rw-r--r--runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.cc165
-rw-r--r--runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.h66
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/CLSubTensor.cc44
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/CLSubTensor.h63
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/CLTensor.cc62
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/CLTensor.h73
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/ICLTensor.h49
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/Object.cc46
-rw-r--r--runtimes/neurun/backend/acl_cl/operand/Object.h60
-rw-r--r--runtimes/neurun/backend/acl_common/AclFunction.h60
-rw-r--r--runtimes/neurun/backend/acl_common/AclInternalBufferManager.h97
-rw-r--r--runtimes/neurun/backend/acl_common/AclLinearMemoryManager.h110
-rw-r--r--runtimes/neurun/backend/acl_common/AclMemoryManager.h120
-rw-r--r--runtimes/neurun/backend/acl_common/AclTensorManager.h315
-rw-r--r--runtimes/neurun/backend/acl_common/CMakeLists.txt21
-rw-r--r--runtimes/neurun/backend/acl_common/Convert.cc196
-rw-r--r--runtimes/neurun/backend/acl_common/Convert.h72
-rw-r--r--runtimes/neurun/backend/acl_common/IACLTensor.cc63
-rw-r--r--runtimes/neurun/backend/acl_common/IACLTensor.h62
-rw-r--r--runtimes/neurun/backend/acl_common/Swizzle.h161
-rw-r--r--runtimes/neurun/backend/acl_common/TemplTensorBuilder.h617
-rw-r--r--runtimes/neurun/backend/acl_neon/Backend.h64
-rw-r--r--runtimes/neurun/backend/acl_neon/CMakeLists.txt21
-rw-r--r--runtimes/neurun/backend/acl_neon/Config.cc33
-rw-r--r--runtimes/neurun/backend/acl_neon/Config.h48
-rw-r--r--runtimes/neurun/backend/acl_neon/ConstantInitializer.cc189
-rw-r--r--runtimes/neurun/backend/acl_neon/ConstantInitializer.h57
-rw-r--r--runtimes/neurun/backend/acl_neon/KernelGenerator.cc1726
-rw-r--r--runtimes/neurun/backend/acl_neon/KernelGenerator.h97
-rw-r--r--runtimes/neurun/backend/acl_neon/PluginClassesAllocator.cc33
-rw-r--r--runtimes/neurun/backend/acl_neon/ShapeFixer.cc332
-rw-r--r--runtimes/neurun/backend/acl_neon/ShapeFixer.h96
-rw-r--r--runtimes/neurun/backend/acl_neon/TensorBuilder.h42
-rw-r--r--runtimes/neurun/backend/acl_neon/TensorManager.h83
-rw-r--r--runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.cc151
-rw-r--r--runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.h66
-rw-r--r--runtimes/neurun/backend/acl_neon/operand/INETensor.h45
-rw-r--r--runtimes/neurun/backend/acl_neon/operand/NESubTensor.cc44
-rw-r--r--runtimes/neurun/backend/acl_neon/operand/NESubTensor.h63
-rw-r--r--runtimes/neurun/backend/acl_neon/operand/NETensor.cc45
-rw-r--r--runtimes/neurun/backend/acl_neon/operand/NETensor.h62
-rw-r--r--runtimes/neurun/backend/cpu/Backend.h63
-rw-r--r--runtimes/neurun/backend/cpu/CMakeLists.txt29
-rw-r--r--runtimes/neurun/backend/cpu/Config.cc33
-rw-r--r--runtimes/neurun/backend/cpu/Config.h52
-rw-r--r--runtimes/neurun/backend/cpu/ConstantInitializer.cc83
-rw-r--r--runtimes/neurun/backend/cpu/ConstantInitializer.h54
-rw-r--r--runtimes/neurun/backend/cpu/KernelGenerator.cc455
-rw-r--r--runtimes/neurun/backend/cpu/KernelGenerator.h67
-rw-r--r--runtimes/neurun/backend/cpu/MemoryManager.cc93
-rw-r--r--runtimes/neurun/backend/cpu/MemoryManager.h65
-rw-r--r--runtimes/neurun/backend/cpu/MemoryPlanner.cc123
-rw-r--r--runtimes/neurun/backend/cpu/MemoryPlanner.h168
-rw-r--r--runtimes/neurun/backend/cpu/MemoryPlanner.test.cc127
-rw-r--r--runtimes/neurun/backend/cpu/MemoryPlannerFactory.cc47
-rw-r--r--runtimes/neurun/backend/cpu/MemoryPlannerFactory.h45
-rw-r--r--runtimes/neurun/backend/cpu/PluginClassesAllocator.cc33
-rw-r--r--runtimes/neurun/backend/cpu/ShapeFixer.cc94
-rw-r--r--runtimes/neurun/backend/cpu/ShapeFixer.h63
-rw-r--r--runtimes/neurun/backend/cpu/TensorBuilder.cc141
-rw-r--r--runtimes/neurun/backend/cpu/TensorBuilder.h92
-rw-r--r--runtimes/neurun/backend/cpu/TensorManager.cc100
-rw-r--r--runtimes/neurun/backend/cpu/TensorManager.h66
-rw-r--r--runtimes/neurun/backend/cpu/kernel/AddLayer.cc87
-rw-r--r--runtimes/neurun/backend/cpu/kernel/AddLayer.h76
-rw-r--r--runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.cc115
-rw-r--r--runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.h85
-rw-r--r--runtimes/neurun/backend/cpu/kernel/ConcatLayer.cc136
-rw-r--r--runtimes/neurun/backend/cpu/kernel/ConcatLayer.h73
-rw-r--r--runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.cc139
-rw-r--r--runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.h86
-rw-r--r--runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.cc138
-rw-r--r--runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.h89
-rw-r--r--runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.cc119
-rw-r--r--runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.h76
-rw-r--r--runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.cc115
-rw-r--r--runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.h85
-rw-r--r--runtimes/neurun/backend/cpu/kernel/OperationUtils.cc239
-rw-r--r--runtimes/neurun/backend/cpu/kernel/OperationUtils.h149
-rw-r--r--runtimes/neurun/backend/cpu/kernel/PermuteLayer.cc68
-rw-r--r--runtimes/neurun/backend/cpu/kernel/PermuteLayer.h211
-rw-r--r--runtimes/neurun/backend/cpu/kernel/ReshapeLayer.cc54
-rw-r--r--runtimes/neurun/backend/cpu/kernel/ReshapeLayer.h65
-rw-r--r--runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.cc171
-rw-r--r--runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.h71
-rw-r--r--runtimes/neurun/backend/cpu/operand/Tensor.cc43
-rw-r--r--runtimes/neurun/backend/cpu/operand/Tensor.h76
-rw-r--r--runtimes/neurun/backend/hi_perf_cpu/CMakeLists.txt44
-rw-r--r--runtimes/neurun/backend/hi_perf_cpu/HighPerformanceBackend.test.cc42
-rw-r--r--runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.cc18
-rw-r--r--runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.h48
-rw-r--r--runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.cc18
-rw-r--r--runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.h45
-rw-r--r--runtimes/neurun/backend/srcn/Backend.h63
-rw-r--r--runtimes/neurun/backend/srcn/CMakeLists.txt20
-rw-r--r--runtimes/neurun/backend/srcn/Config.cc33
-rw-r--r--runtimes/neurun/backend/srcn/Config.h45
-rw-r--r--runtimes/neurun/backend/srcn/ConstantInitializer.cc145
-rw-r--r--runtimes/neurun/backend/srcn/ConstantInitializer.h56
-rw-r--r--runtimes/neurun/backend/srcn/KernelGenerator.cc102
-rw-r--r--runtimes/neurun/backend/srcn/KernelGenerator.h56
-rw-r--r--runtimes/neurun/backend/srcn/MemoryManager.cc93
-rw-r--r--runtimes/neurun/backend/srcn/MemoryManager.h65
-rw-r--r--runtimes/neurun/backend/srcn/MemoryPlanner.cc123
-rw-r--r--runtimes/neurun/backend/srcn/MemoryPlanner.h168
-rw-r--r--runtimes/neurun/backend/srcn/MemoryPlannerFactory.cc47
-rw-r--r--runtimes/neurun/backend/srcn/MemoryPlannerFactory.h45
-rw-r--r--runtimes/neurun/backend/srcn/PluginClassesAllocator.cc33
-rw-r--r--runtimes/neurun/backend/srcn/ShapeFixer.cc39
-rw-r--r--runtimes/neurun/backend/srcn/ShapeFixer.h52
-rw-r--r--runtimes/neurun/backend/srcn/TensorBuilder.cc116
-rw-r--r--runtimes/neurun/backend/srcn/TensorBuilder.h92
-rw-r--r--runtimes/neurun/backend/srcn/TensorManager.cc100
-rw-r--r--runtimes/neurun/backend/srcn/TensorManager.h66
-rw-r--r--runtimes/neurun/backend/srcn/kernel/OperationUtils.cc90
-rw-r--r--runtimes/neurun/backend/srcn/kernel/OperationUtils.h75
-rw-r--r--runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.cc122
-rw-r--r--runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.h80
-rw-r--r--runtimes/neurun/backend/srcn/operand/Tensor.cc43
-rw-r--r--runtimes/neurun/backend/srcn/operand/Tensor.h76
-rw-r--r--runtimes/neurun/core/CMakeLists.txt18
-rw-r--r--runtimes/neurun/core/include/backend/Backend.h65
-rw-r--r--runtimes/neurun/core/include/backend/CustomKernel.h82
-rw-r--r--runtimes/neurun/core/include/backend/CustomKernelRegistry.h50
-rw-r--r--runtimes/neurun/core/include/backend/ExecTime.h111
-rw-r--r--runtimes/neurun/core/include/backend/IConfig.h45
-rw-r--r--runtimes/neurun/core/include/backend/IConstantInitializer.h260
-rw-r--r--runtimes/neurun/core/include/backend/IKernelGenerator.h63
-rw-r--r--runtimes/neurun/core/include/backend/IMemoryManager.h49
-rw-r--r--runtimes/neurun/core/include/backend/IShapeFixer.h56
-rw-r--r--runtimes/neurun/core/include/backend/ITensorBuilder.h94
-rw-r--r--runtimes/neurun/core/include/backend/ITensorManager.h56
-rw-r--r--runtimes/neurun/core/include/backend/JSONExecTime.h96
-rw-r--r--runtimes/neurun/core/include/backend/operand/IObject.h42
-rw-r--r--runtimes/neurun/core/include/backend/operand/ITensor.h52
-rw-r--r--runtimes/neurun/core/include/backend/operand/Object.h57
-rw-r--r--runtimes/neurun/core/include/compiler/Compiler.h91
-rw-r--r--runtimes/neurun/core/include/compiler/IExecutionBuilder.h39
-rw-r--r--runtimes/neurun/core/include/compiler/SubTensorInfo.h83
-rw-r--r--runtimes/neurun/core/include/exec/Execution.h118
-rw-r--r--runtimes/neurun/core/include/exec/ExecutionObservers.h65
-rw-r--r--runtimes/neurun/core/include/exec/IExecutor.h72
-rw-r--r--runtimes/neurun/core/include/exec/IFunction.h37
-rw-r--r--runtimes/neurun/core/include/exec/IODescription.h64
-rw-r--r--runtimes/neurun/core/include/exec/NopFunction.h54
-rw-r--r--runtimes/neurun/core/include/graph/BackendSet.h40
-rw-r--r--runtimes/neurun/core/include/graph/Graph.h204
-rw-r--r--runtimes/neurun/core/include/graph/LowerInfoMap.h42
-rw-r--r--runtimes/neurun/core/include/graph/operand/LowerInfo.h93
-rw-r--r--runtimes/neurun/core/include/graph/operand/ParentInfo.h79
-rw-r--r--runtimes/neurun/core/include/graph/operand/PermuteFactor.h133
-rw-r--r--runtimes/neurun/core/include/graph/operation/LowerInfo.h54
-rw-r--r--runtimes/neurun/core/include/model/Data.h75
-rw-r--r--runtimes/neurun/core/include/model/DataType.h57
-rw-r--r--runtimes/neurun/core/include/model/Index.h42
-rw-r--r--runtimes/neurun/core/include/model/InternalType.h68
-rw-r--r--runtimes/neurun/core/include/model/Layout.h67
-rw-r--r--runtimes/neurun/core/include/model/Model.h40
-rw-r--r--runtimes/neurun/core/include/model/Operand.h121
-rw-r--r--runtimes/neurun/core/include/model/OperandConstraint.h61
-rw-r--r--runtimes/neurun/core/include/model/OperandIndexMap.h34
-rw-r--r--runtimes/neurun/core/include/model/OperandIndexSequence.h59
-rw-r--r--runtimes/neurun/core/include/model/OperandInfo.h98
-rw-r--r--runtimes/neurun/core/include/model/Operands.h39
-rw-r--r--runtimes/neurun/core/include/model/Operation.h88
-rw-r--r--runtimes/neurun/core/include/model/OperationIndexList.h52
-rw-r--r--runtimes/neurun/core/include/model/OperationIndexMap.h34
-rw-r--r--runtimes/neurun/core/include/model/OperationVisitor.h51
-rw-r--r--runtimes/neurun/core/include/model/Operations.Include.h75
-rw-r--r--runtimes/neurun/core/include/model/Operations.h36
-rw-r--r--runtimes/neurun/core/include/model/Operations.lst81
-rw-r--r--runtimes/neurun/core/include/model/Shape.h83
-rw-r--r--runtimes/neurun/core/include/model/Subgraph.h86
-rw-r--r--runtimes/neurun/core/include/model/Subgraphs.h81
-rw-r--r--runtimes/neurun/core/include/model/TypeInfo.h59
-rw-r--r--runtimes/neurun/core/include/model/operation/AbsNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/AddNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/ArgMaxNode.h61
-rw-r--r--runtimes/neurun/core/include/model/operation/AvgPool2DNode.h69
-rw-r--r--runtimes/neurun/core/include/model/operation/CastNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/ComparisonNode.h72
-rw-r--r--runtimes/neurun/core/include/model/operation/ConcatNode.h58
-rw-r--r--runtimes/neurun/core/include/model/operation/Conv2DNode.h68
-rw-r--r--runtimes/neurun/core/include/model/operation/CustomNode.h65
-rw-r--r--runtimes/neurun/core/include/model/operation/DepthToSpaceNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/DepthwiseConv2DNode.h69
-rw-r--r--runtimes/neurun/core/include/model/operation/DequantizeNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/DivNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/EmbeddingLookupNode.h50
-rw-r--r--runtimes/neurun/core/include/model/operation/ExpNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/FloorNode.h51
-rw-r--r--runtimes/neurun/core/include/model/operation/FullyConnectedNode.h66
-rw-r--r--runtimes/neurun/core/include/model/operation/GatherNode.h64
-rw-r--r--runtimes/neurun/core/include/model/operation/HashtableLookupNode.h57
-rw-r--r--runtimes/neurun/core/include/model/operation/L2NormalizationNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/L2Pool2DNode.h68
-rw-r--r--runtimes/neurun/core/include/model/operation/LSTMNode.h90
-rw-r--r--runtimes/neurun/core/include/model/operation/LocalResponseNormalizationNode.h66
-rw-r--r--runtimes/neurun/core/include/model/operation/LogicalAndNode.h50
-rw-r--r--runtimes/neurun/core/include/model/operation/LogicalNotNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/LogicalOrNode.h50
-rw-r--r--runtimes/neurun/core/include/model/operation/LogisticNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/MaxPool2DNode.h68
-rw-r--r--runtimes/neurun/core/include/model/operation/MeanNode.h62
-rw-r--r--runtimes/neurun/core/include/model/operation/MulNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/NegNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/PReLUNode.h50
-rw-r--r--runtimes/neurun/core/include/model/operation/PadNode.h51
-rw-r--r--runtimes/neurun/core/include/model/operation/PermuteNode.h78
-rw-r--r--runtimes/neurun/core/include/model/operation/RNNNode.h71
-rw-r--r--runtimes/neurun/core/include/model/operation/RSQRTNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/ReLU1Node.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/ReLU6Node.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/ReLUNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/ReduceMaxNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/ReduceMinNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/ReduceSumNode.h61
-rw-r--r--runtimes/neurun/core/include/model/operation/ReshapeNode.h51
-rw-r--r--runtimes/neurun/core/include/model/operation/ResizeBilinearNode.h64
-rw-r--r--runtimes/neurun/core/include/model/operation/SQRTNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/SoftmaxNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/SpaceToDepthNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/SplitNode.h58
-rw-r--r--runtimes/neurun/core/include/model/operation/SquaredDifferenceNode.h50
-rw-r--r--runtimes/neurun/core/include/model/operation/SqueezeNode.h59
-rw-r--r--runtimes/neurun/core/include/model/operation/StridedSliceNode.h68
-rw-r--r--runtimes/neurun/core/include/model/operation/SubNode.h63
-rw-r--r--runtimes/neurun/core/include/model/operation/TanhNode.h49
-rw-r--r--runtimes/neurun/core/include/model/operation/TopKV2Node.h69
-rw-r--r--runtimes/neurun/core/include/model/operation/TransposeConvNode.h67
-rw-r--r--runtimes/neurun/core/include/model/operation/TransposeNode.h66
-rw-r--r--runtimes/neurun/core/include/model/operation/UnpackNode.h58
-rw-r--r--runtimes/neurun/core/include/util/Config.lst40
-rw-r--r--runtimes/neurun/core/include/util/ConfigSource.h55
-rw-r--r--runtimes/neurun/core/include/util/Coordinates.h103
-rw-r--r--runtimes/neurun/core/include/util/GeneralConfigSource.h44
-rw-r--r--runtimes/neurun/core/include/util/IConfigSource.h46
-rw-r--r--runtimes/neurun/core/include/util/ITimer.h59
-rw-r--r--runtimes/neurun/core/include/util/Index.h158
-rw-r--r--runtimes/neurun/core/include/util/ObjectManager.h144
-rw-r--r--runtimes/neurun/core/include/util/Padding.h42
-rw-r--r--runtimes/neurun/core/include/util/Set.h166
-rw-r--r--runtimes/neurun/core/include/util/ShapeInference.h61
-rw-r--r--runtimes/neurun/core/include/util/Utils.h52
-rw-r--r--runtimes/neurun/core/include/util/feature/Coordinate4D.h111
-rw-r--r--runtimes/neurun/core/include/util/feature/nchw/View.h106
-rw-r--r--runtimes/neurun/core/include/util/feature/nhwc/Reader.h73
-rw-r--r--runtimes/neurun/core/include/util/feature/nhwc/Utils.h (renamed from runtimes/neurun/src/util/feature/nhwc/Utils.h)0
-rw-r--r--runtimes/neurun/core/include/util/feature/nhwc/View.h91
-rw-r--r--runtimes/neurun/core/include/util/logging.h61
-rw-r--r--runtimes/neurun/core/src/backend/Backend.cc30
-rw-r--r--runtimes/neurun/core/src/backend/BackendManager.cc124
-rw-r--r--runtimes/neurun/core/src/backend/BackendManager.h81
-rw-r--r--runtimes/neurun/core/src/backend/CustomKernel.cc97
-rw-r--r--runtimes/neurun/core/src/backend/CustomKernelRegistry.cc46
-rw-r--r--runtimes/neurun/core/src/backend/ExecTime.cc133
-rw-r--r--runtimes/neurun/core/src/backend/JSONExecTime.cc231
-rw-r--r--runtimes/neurun/core/src/compiler/BackendResolver.cc47
-rw-r--r--runtimes/neurun/core/src/compiler/BackendResolver.h102
-rw-r--r--runtimes/neurun/core/src/compiler/Compiler.cc122
-rw-r--r--runtimes/neurun/core/src/compiler/ExecutorFactory.cc351
-rw-r--r--runtimes/neurun/core/src/compiler/ExecutorFactory.h52
-rw-r--r--runtimes/neurun/core/src/compiler/HEScheduler.cc577
-rw-r--r--runtimes/neurun/core/src/compiler/HEScheduler.h164
-rw-r--r--runtimes/neurun/core/src/compiler/IScheduler.h38
-rw-r--r--runtimes/neurun/core/src/compiler/Linear.cc355
-rw-r--r--runtimes/neurun/core/src/compiler/Linear.h108
-rw-r--r--runtimes/neurun/core/src/compiler/ManualScheduler.cc111
-rw-r--r--runtimes/neurun/core/src/compiler/ManualScheduler.h36
-rw-r--r--runtimes/neurun/core/src/compiler/OperandContext.cc45
-rw-r--r--runtimes/neurun/core/src/compiler/OperandContext.h63
-rw-r--r--runtimes/neurun/core/src/compiler/OperationValidator.cc879
-rw-r--r--runtimes/neurun/core/src/compiler/OperationValidator.h77
-rw-r--r--runtimes/neurun/core/src/compiler/ParamChecker.cc33
-rw-r--r--runtimes/neurun/core/src/compiler/ParamChecker.h73
-rw-r--r--runtimes/neurun/core/src/compiler/SubTensorAnalyzer.cc78
-rw-r--r--runtimes/neurun/core/src/compiler/SubTensorAnalyzer.h70
-rw-r--r--runtimes/neurun/core/src/dumper/dot/DotBuilder.cc83
-rw-r--r--runtimes/neurun/core/src/dumper/dot/DotBuilder.h62
-rw-r--r--runtimes/neurun/core/src/dumper/dot/DotDumper.cc198
-rw-r--r--runtimes/neurun/core/src/dumper/dot/DotDumper.h60
-rw-r--r--runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.cc56
-rw-r--r--runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.h59
-rw-r--r--runtimes/neurun/core/src/dumper/dot/Node.cc56
-rw-r--r--runtimes/neurun/core/src/dumper/dot/Node.h127
-rw-r--r--runtimes/neurun/core/src/dumper/dot/OperandNode.cc60
-rw-r--r--runtimes/neurun/core/src/dumper/dot/OperandNode.h79
-rw-r--r--runtimes/neurun/core/src/dumper/dot/OperationNode.cc47
-rw-r--r--runtimes/neurun/core/src/dumper/dot/OperationNode.h62
-rw-r--r--runtimes/neurun/core/src/exec/DataflowExecutor.cc206
-rw-r--r--runtimes/neurun/core/src/exec/DataflowExecutor.h110
-rw-r--r--runtimes/neurun/core/src/exec/Execution.cc108
-rw-r--r--runtimes/neurun/core/src/exec/ExecutionObservers.cc77
-rw-r--r--runtimes/neurun/core/src/exec/ExecutorBase.cc140
-rw-r--r--runtimes/neurun/core/src/exec/ExecutorBase.h124
-rw-r--r--runtimes/neurun/core/src/exec/FunctionSequence.cc62
-rw-r--r--runtimes/neurun/core/src/exec/FunctionSequence.h56
-rw-r--r--runtimes/neurun/core/src/exec/Job.cc36
-rw-r--r--runtimes/neurun/core/src/exec/Job.h77
-rw-r--r--runtimes/neurun/core/src/exec/LinearExecutor.cc27
-rw-r--r--runtimes/neurun/core/src/exec/LinearExecutor.h69
-rw-r--r--runtimes/neurun/core/src/exec/ParallelExecutor.cc140
-rw-r--r--runtimes/neurun/core/src/exec/ParallelExecutor.h73
-rw-r--r--runtimes/neurun/core/src/exec/ParallelScheduler.cc170
-rw-r--r--runtimes/neurun/core/src/exec/ParallelScheduler.h158
-rw-r--r--runtimes/neurun/core/src/exec/Sink.h182
-rw-r--r--runtimes/neurun/core/src/exec/Source.h187
-rw-r--r--runtimes/neurun/core/src/exec/interp/Buffer.h94
-rw-r--r--runtimes/neurun/core/src/exec/interp/ExecEnv.h168
-rw-r--r--runtimes/neurun/core/src/exec/interp/ExecManager.cc125
-rw-r--r--runtimes/neurun/core/src/exec/interp/ExecManager.h72
-rw-r--r--runtimes/neurun/core/src/exec/interp/Interpreter.cc202
-rw-r--r--runtimes/neurun/core/src/exec/interp/Interpreter.h67
-rw-r--r--runtimes/neurun/core/src/exec/interp/Registration.h52
-rw-r--r--runtimes/neurun/core/src/exec/interp/Tensor.cc54
-rw-r--r--runtimes/neurun/core/src/exec/interp/Tensor.h179
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/Add.cc146
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/AvgPool2D.cc129
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/Concat.cc152
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/Conv2D.cc154
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/DepthwiseConv.cc159
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/FullyConnected.cc137
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/MaxPool2D.cc128
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/OperationUtil.h110
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/Reshape.cc66
-rw-r--r--runtimes/neurun/core/src/exec/interp/operations/SoftMax.cc164
-rw-r--r--runtimes/neurun/core/src/graph/Graph.cc589
-rw-r--r--runtimes/neurun/core/src/graph/dumper/Dumper.cc583
-rw-r--r--runtimes/neurun/core/src/graph/dumper/Dumper.h96
-rw-r--r--runtimes/neurun/core/src/graph/operand/LowerInfo.cc30
-rw-r--r--runtimes/neurun/core/src/graph/operand/Shape4DConvert.h57
-rw-r--r--runtimes/neurun/core/src/graph/operation/LowerInfo.cc34
-rw-r--r--runtimes/neurun/core/src/graph/pass/OperandPass.cc36
-rw-r--r--runtimes/neurun/core/src/graph/pass/OperandPass.h53
-rw-r--r--runtimes/neurun/core/src/graph/pass/OperationPass.cc36
-rw-r--r--runtimes/neurun/core/src/graph/pass/OperationPass.h71
-rw-r--r--runtimes/neurun/core/src/graph/pass/Pass.cc (renamed from runtimes/neurun/src/graph/pass/Pass.cc)0
-rw-r--r--runtimes/neurun/core/src/graph/pass/Pass.h (renamed from runtimes/neurun/src/graph/pass/Pass.h)0
-rw-r--r--runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.cc199
-rw-r--r--runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.h87
-rw-r--r--runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.cc210
-rw-r--r--runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.h59
-rw-r--r--runtimes/neurun/core/src/graph/verifier/Verifier.cc97
-rw-r--r--runtimes/neurun/core/src/graph/verifier/Verifier.h68
-rw-r--r--runtimes/neurun/core/src/library_info.cc17
-rw-r--r--runtimes/neurun/core/src/model/LayoutSet.cc66
-rw-r--r--runtimes/neurun/core/src/model/LayoutSet.h58
-rw-r--r--runtimes/neurun/core/src/model/Operand.cc80
-rw-r--r--runtimes/neurun/core/src/model/OperandConstraint.cc28
-rw-r--r--runtimes/neurun/core/src/model/OperandIndexSequence.cc58
-rw-r--r--runtimes/neurun/core/src/model/Operation.cc57
-rw-r--r--runtimes/neurun/core/src/model/OperationIndexList.cc37
-rw-r--r--runtimes/neurun/core/src/model/Shape.cc86
-rw-r--r--runtimes/neurun/core/src/model/Subgraph.cc58
-rw-r--r--runtimes/neurun/core/src/model/Subgraphs.cc73
-rw-r--r--runtimes/neurun/core/src/model/TypeInfo.cc47
-rw-r--r--runtimes/neurun/core/src/model/operation/AbsNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/AddNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/ArgMaxNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/AvgPool2DNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/CastNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/ComparisonNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/ConcatNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/Conv2DNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/CustomNode.cc47
-rw-r--r--runtimes/neurun/core/src/model/operation/DepthToSpaceNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/DepthwiseConv2DNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/DequantizeNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/DivNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/EmbeddingLookupNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/ExpNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/FloorNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/FullyConnectedNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/GatherNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/HashtableLookupNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/L2NormalizationNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/L2Pool2DNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/LSTMNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/LocalResponseNormalizationNode.cc41
-rw-r--r--runtimes/neurun/core/src/model/operation/LogicalAndNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/LogicalNotNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/LogicalOrNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/LogisticNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/MaxPool2DNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/MeanNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/MulNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/NegNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/PReLUNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/PadNode.cc37
-rw-r--r--runtimes/neurun/core/src/model/operation/PermuteNode.cc45
-rw-r--r--runtimes/neurun/core/src/model/operation/RNNNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/RSQRTNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/ReLU1Node.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/ReLU6Node.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/ReLUNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/ReduceMaxNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/ReduceMinNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/ReduceSumNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/ReshapeNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/ResizeBilinearNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/SQRTNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/SoftmaxNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/SpaceToDepthNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/SplitNode.cc33
-rw-r--r--runtimes/neurun/core/src/model/operation/SquaredDifferenceNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/SqueezeNode.cc37
-rw-r--r--runtimes/neurun/core/src/model/operation/StridedSliceNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/SubNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/TanhNode.cc39
-rw-r--r--runtimes/neurun/core/src/model/operation/TopKV2Node.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/TransposeConvNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/TransposeNode.cc40
-rw-r--r--runtimes/neurun/core/src/model/operation/UnpackNode.cc33
-rw-r--r--runtimes/neurun/core/src/util/ConfigSource.cc115
-rw-r--r--runtimes/neurun/core/src/util/EnvConfigSource.cc40
-rw-r--r--runtimes/neurun/core/src/util/EnvConfigSource.h41
-rw-r--r--runtimes/neurun/core/src/util/GeneralConfigSource.cc45
-rw-r--r--runtimes/neurun/core/src/util/Padding.cc120
-rw-r--r--runtimes/neurun/core/src/util/ShapeInference.cc202
-rw-r--r--runtimes/neurun/core/src/util/Utils.cc68
-rw-r--r--runtimes/neurun/frontend/CMakeLists.txt1
-rw-r--r--runtimes/neurun/frontend/api/CMakeLists.txt12
-rw-r--r--runtimes/neurun/frontend/api/nnfw_dev.cc228
-rw-r--r--runtimes/neurun/frontend/api/wrapper/nnfw_api.cc366
-rw-r--r--runtimes/neurun/frontend/api/wrapper/nnfw_api.hpp77
-rw-r--r--runtimes/neurun/frontend/nnapi/ANeuralNetworksModel.test.cc25
-rw-r--r--runtimes/neurun/frontend/nnapi/CMakeLists.txt21
-rw-r--r--runtimes/neurun/frontend/nnapi/compilation.cc110
-rw-r--r--runtimes/neurun/frontend/nnapi/event.cc36
-rw-r--r--runtimes/neurun/frontend/nnapi/execution.cc411
-rw-r--r--runtimes/neurun/frontend/nnapi/memory.cc42
-rw-r--r--runtimes/neurun/frontend/nnapi/model.cc396
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.cc42
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.h42
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.cc43
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.h44
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.cc190
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.h56
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.cc46
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.h39
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc257
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.h72
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.cc95
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.h81
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.cc1524
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.h61
-rw-r--r--runtimes/neurun/frontend/tflite/CMakeLists.txt17
-rw-r--r--runtimes/neurun/frontend/tflite/loader.cc700
-rw-r--r--runtimes/neurun/frontend/tflite/loader.h115
-rw-r--r--runtimes/neurun/frontend/tflite/schema.fbs794
-rw-r--r--runtimes/neurun/frontend/tflite/schema_generated.h7272
-rw-r--r--runtimes/neurun/src/backend/BackendManager.cc118
-rw-r--r--runtimes/neurun/src/backend/BackendManager.h94
-rw-r--r--runtimes/neurun/src/backend/CMakeLists.txt2
-rw-r--r--runtimes/neurun/src/backend/acl_cl/CMakeLists.txt15
-rw-r--r--runtimes/neurun/src/backend/acl_cl/Config.cc32
-rw-r--r--runtimes/neurun/src/backend/acl_cl/Config.h47
-rw-r--r--runtimes/neurun/src/backend/acl_cl/Convert.cc87
-rw-r--r--runtimes/neurun/src/backend/acl_cl/Convert.h47
-rw-r--r--runtimes/neurun/src/backend/acl_cl/PluginClassesAllocator.cc43
-rw-r--r--runtimes/neurun/src/backend/acl_cl/StageGenerator.cc593
-rw-r--r--runtimes/neurun/src/backend/acl_cl/StageGenerator.h54
-rw-r--r--runtimes/neurun/src/backend/acl_cl/Swizzle.h95
-rw-r--r--runtimes/neurun/src/backend/acl_cl/TensorBuilder.cc246
-rw-r--r--runtimes/neurun/src/backend/acl_cl/TensorBuilder.h94
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.cc61
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.h63
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/CLTensor.cc81
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/CLTensor.h67
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.cc48
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.h73
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/Object.cc43
-rw-r--r--runtimes/neurun/src/backend/acl_cl/operand/Object.h62
-rw-r--r--runtimes/neurun/src/backend/cpu/CMakeLists.txt18
-rw-r--r--runtimes/neurun/src/backend/cpu/Config.cc33
-rw-r--r--runtimes/neurun/src/backend/cpu/Config.h51
-rw-r--r--runtimes/neurun/src/backend/cpu/MemoryPlanner.cc127
-rw-r--r--runtimes/neurun/src/backend/cpu/MemoryPlanner.h166
-rw-r--r--runtimes/neurun/src/backend/cpu/PluginClassesAllocator.cc43
-rw-r--r--runtimes/neurun/src/backend/cpu/StageGenerator.cc547
-rw-r--r--runtimes/neurun/src/backend/cpu/StageGenerator.h55
-rw-r--r--runtimes/neurun/src/backend/cpu/TensorBuilder.cc124
-rw-r--r--runtimes/neurun/src/backend/cpu/TensorBuilder.h82
-rw-r--r--runtimes/neurun/src/backend/cpu/operand/Object.cc37
-rw-r--r--runtimes/neurun/src/backend/cpu/operand/Object.h61
-rw-r--r--runtimes/neurun/src/backend/cpu/operand/Tensor.cc39
-rw-r--r--runtimes/neurun/src/backend/cpu/operand/Tensor.h74
-rw-r--r--runtimes/neurun/src/backend/interface/IConfig.h44
-rw-r--r--runtimes/neurun/src/backend/interface/IStageGenerator.h72
-rw-r--r--runtimes/neurun/src/backend/interface/ITensorBuilder.h79
-rw-r--r--runtimes/neurun/src/backend/interface/operand/IObject.h43
-rw-r--r--runtimes/neurun/src/backend/interface/operand/ITensor.h49
-rw-r--r--runtimes/neurun/src/compiler/BackendResolver.cc27
-rw-r--r--runtimes/neurun/src/compiler/BackendResolver.h88
-rw-r--r--runtimes/neurun/src/compiler/Compiler.cc124
-rw-r--r--runtimes/neurun/src/compiler/Compiler.h73
-rw-r--r--runtimes/neurun/src/compiler/ConstantInitializer.cc188
-rw-r--r--runtimes/neurun/src/compiler/ConstantInitializer.h43
-rw-r--r--runtimes/neurun/src/compiler/OperationValidator.cc121
-rw-r--r--runtimes/neurun/src/compiler/OperationValidator.h56
-rw-r--r--runtimes/neurun/src/compiler/Plan.cc27
-rw-r--r--runtimes/neurun/src/compiler/Plan.h71
-rw-r--r--runtimes/neurun/src/compiler/PlanBuilder.cc60
-rw-r--r--runtimes/neurun/src/compiler/PlanBuilder.h72
-rw-r--r--runtimes/neurun/src/compiler/SubTensorAnalyzer.cc78
-rw-r--r--runtimes/neurun/src/compiler/SubTensorAnalyzer.h66
-rw-r--r--runtimes/neurun/src/compiler/SubTensorInfo.h84
-rw-r--r--runtimes/neurun/src/compiler/TensorInfo.h60
-rw-r--r--runtimes/neurun/src/compiler/operand/Context.cc47
-rw-r--r--runtimes/neurun/src/compiler/operand/Context.h66
-rw-r--r--runtimes/neurun/src/compiler/operation/Sequence.cc30
-rw-r--r--runtimes/neurun/src/compiler/operation/Sequence.h54
-rw-r--r--runtimes/neurun/src/dumper/dot/DotBuilder.cc85
-rw-r--r--runtimes/neurun/src/dumper/dot/DotBuilder.h79
-rw-r--r--runtimes/neurun/src/dumper/dot/DotDumper.cc115
-rw-r--r--runtimes/neurun/src/dumper/dot/DotDumper.h63
-rw-r--r--runtimes/neurun/src/dumper/dot/DotNodeInfo.cc108
-rw-r--r--runtimes/neurun/src/dumper/dot/DotNodeInfo.h71
-rw-r--r--runtimes/neurun/src/dumper/dot/DotOperandInfo.cc129
-rw-r--r--runtimes/neurun/src/dumper/dot/DotOperandInfo.h77
-rw-r--r--runtimes/neurun/src/dumper/dot/IDotInfo.h67
-rw-r--r--runtimes/neurun/src/exec/Sink.h137
-rw-r--r--runtimes/neurun/src/exec/Source.h139
-rw-r--r--runtimes/neurun/src/exec/interface/IFunction.h36
-rw-r--r--runtimes/neurun/src/frontend/compilation.cc78
-rw-r--r--runtimes/neurun/src/frontend/event.cc31
-rw-r--r--runtimes/neurun/src/frontend/execution.cc328
-rw-r--r--runtimes/neurun/src/frontend/memory.cc45
-rw-r--r--runtimes/neurun/src/frontend/model.cc480
-rw-r--r--runtimes/neurun/src/frontend/wrapper/compilation.cc31
-rw-r--r--runtimes/neurun/src/frontend/wrapper/compilation.h43
-rw-r--r--runtimes/neurun/src/frontend/wrapper/event.h24
-rw-r--r--runtimes/neurun/src/frontend/wrapper/execution.h69
-rw-r--r--runtimes/neurun/src/frontend/wrapper/memory.cc31
-rw-r--r--runtimes/neurun/src/frontend/wrapper/memory.h38
-rw-r--r--runtimes/neurun/src/frontend/wrapper/model.cc58
-rw-r--r--runtimes/neurun/src/frontend/wrapper/model.h47
-rw-r--r--runtimes/neurun/src/graph/Graph.cc334
-rw-r--r--runtimes/neurun/src/graph/Graph.h152
-rw-r--r--runtimes/neurun/src/graph/Index.h80
-rw-r--r--runtimes/neurun/src/graph/Model.h40
-rw-r--r--runtimes/neurun/src/graph/dumper/Dumper.cc110
-rw-r--r--runtimes/neurun/src/graph/dumper/Dumper.h50
-rw-r--r--runtimes/neurun/src/graph/operand/BackendSet.cc77
-rw-r--r--runtimes/neurun/src/graph/operand/BackendSet.h72
-rw-r--r--runtimes/neurun/src/graph/operand/Layout.h54
-rw-r--r--runtimes/neurun/src/graph/operand/LayoutSet.cc69
-rw-r--r--runtimes/neurun/src/graph/operand/LayoutSet.h61
-rw-r--r--runtimes/neurun/src/graph/operand/LowerInfo.cc30
-rw-r--r--runtimes/neurun/src/graph/operand/LowerInfo.h82
-rw-r--r--runtimes/neurun/src/graph/operand/ParentInfo.h79
-rw-r--r--runtimes/neurun/src/graph/operand/Shape4DConvert.h57
-rw-r--r--runtimes/neurun/src/graph/operation/LowerInfo.cc33
-rw-r--r--runtimes/neurun/src/graph/operation/LowerInfo.h45
-rw-r--r--runtimes/neurun/src/graph/pass/OperandPass.cc36
-rw-r--r--runtimes/neurun/src/graph/pass/OperandPass.h56
-rw-r--r--runtimes/neurun/src/graph/pass/OperationPass.cc36
-rw-r--r--runtimes/neurun/src/graph/pass/OperationPass.h71
-rw-r--r--runtimes/neurun/src/graph/pass/PermutationEliminationPass.cc192
-rw-r--r--runtimes/neurun/src/graph/pass/PermutationEliminationPass.h87
-rw-r--r--runtimes/neurun/src/graph/pass/PermutationInsertionPass.cc191
-rw-r--r--runtimes/neurun/src/graph/pass/PermutationInsertionPass.h57
-rw-r--r--runtimes/neurun/src/graph/verifier/Verifier.cc97
-rw-r--r--runtimes/neurun/src/graph/verifier/Verifier.h68
-rw-r--r--runtimes/neurun/src/kernel/CMakeLists.txt2
-rw-r--r--runtimes/neurun/src/kernel/acl_cl/CLFunction.h55
-rw-r--r--runtimes/neurun/src/kernel/acl_cl/CMakeLists.txt13
-rw-r--r--runtimes/neurun/src/kernel/acl_cl/ConcatLayer.cc159
-rw-r--r--runtimes/neurun/src/kernel/acl_cl/ConcatLayer.h67
-rw-r--r--runtimes/neurun/src/kernel/cpu/AvgPoolLayer.cc118
-rw-r--r--runtimes/neurun/src/kernel/cpu/AvgPoolLayer.h78
-rw-r--r--runtimes/neurun/src/kernel/cpu/CMakeLists.txt14
-rw-r--r--runtimes/neurun/src/kernel/cpu/ConcatLayer.cc138
-rw-r--r--runtimes/neurun/src/kernel/cpu/ConcatLayer.h66
-rw-r--r--runtimes/neurun/src/kernel/cpu/ConvolutionLayer.cc228
-rw-r--r--runtimes/neurun/src/kernel/cpu/ConvolutionLayer.h79
-rw-r--r--runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.cc114
-rw-r--r--runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.h69
-rw-r--r--runtimes/neurun/src/kernel/cpu/MaxPoolLayer.cc117
-rw-r--r--runtimes/neurun/src/kernel/cpu/MaxPoolLayer.h78
-rw-r--r--runtimes/neurun/src/kernel/cpu/OperationUtils.cc230
-rw-r--r--runtimes/neurun/src/kernel/cpu/OperationUtils.h150
-rw-r--r--runtimes/neurun/src/kernel/cpu/PermuteLayer.cc201
-rw-r--r--runtimes/neurun/src/kernel/cpu/PermuteLayer.h58
-rw-r--r--runtimes/neurun/src/kernel/cpu/ReshapeLayer.cc57
-rw-r--r--runtimes/neurun/src/kernel/cpu/ReshapeLayer.h58
-rw-r--r--runtimes/neurun/src/kernel/cpu/SoftMaxLayer.cc174
-rw-r--r--runtimes/neurun/src/kernel/cpu/SoftMaxLayer.h64
-rw-r--r--runtimes/neurun/src/library_info.cc17
-rw-r--r--runtimes/neurun/src/linear/Linear.cc199
-rw-r--r--runtimes/neurun/src/linear/Linear.h84
-rw-r--r--runtimes/neurun/src/model/operand/Data.h78
-rw-r--r--runtimes/neurun/src/model/operand/DataType.h43
-rw-r--r--runtimes/neurun/src/model/operand/Index.h51
-rw-r--r--runtimes/neurun/src/model/operand/IndexSet.cc61
-rw-r--r--runtimes/neurun/src/model/operand/IndexSet.h62
-rw-r--r--runtimes/neurun/src/model/operand/Object.cc128
-rw-r--r--runtimes/neurun/src/model/operand/Object.h135
-rw-r--r--runtimes/neurun/src/model/operand/Set.cc84
-rw-r--r--runtimes/neurun/src/model/operand/Set.h61
-rw-r--r--runtimes/neurun/src/model/operand/Shape.cc94
-rw-r--r--runtimes/neurun/src/model/operand/Shape.h63
-rw-r--r--runtimes/neurun/src/model/operand/TypeInfo.cc35
-rw-r--r--runtimes/neurun/src/model/operand/TypeInfo.h64
-rw-r--r--runtimes/neurun/src/model/operation/AddNode.cc49
-rw-r--r--runtimes/neurun/src/model/operation/AddNode.h54
-rw-r--r--runtimes/neurun/src/model/operation/AvgPool2DNode.cc62
-rw-r--r--runtimes/neurun/src/model/operation/AvgPool2DNode.h68
-rw-r--r--runtimes/neurun/src/model/operation/ConcatNode.cc59
-rw-r--r--runtimes/neurun/src/model/operation/ConcatNode.h56
-rw-r--r--runtimes/neurun/src/model/operation/Conv2DNode.cc59
-rw-r--r--runtimes/neurun/src/model/operation/Conv2DNode.h67
-rw-r--r--runtimes/neurun/src/model/operation/FullyConnectedNode.cc52
-rw-r--r--runtimes/neurun/src/model/operation/FullyConnectedNode.h63
-rw-r--r--runtimes/neurun/src/model/operation/Index.h35
-rw-r--r--runtimes/neurun/src/model/operation/IndexList.cc40
-rw-r--r--runtimes/neurun/src/model/operation/IndexList.h55
-rw-r--r--runtimes/neurun/src/model/operation/MaxPool2DNode.cc62
-rw-r--r--runtimes/neurun/src/model/operation/MaxPool2DNode.h68
-rw-r--r--runtimes/neurun/src/model/operation/Node.Include.h27
-rw-r--r--runtimes/neurun/src/model/operation/Node.cc54
-rw-r--r--runtimes/neurun/src/model/operation/Node.h84
-rw-r--r--runtimes/neurun/src/model/operation/NodeVisitor.h43
-rw-r--r--runtimes/neurun/src/model/operation/Op.lst32
-rw-r--r--runtimes/neurun/src/model/operation/OperandConstraint.cc28
-rw-r--r--runtimes/neurun/src/model/operation/OperandConstraint.h61
-rw-r--r--runtimes/neurun/src/model/operation/PermuteNode.cc41
-rw-r--r--runtimes/neurun/src/model/operation/PermuteNode.h62
-rw-r--r--runtimes/neurun/src/model/operation/ReshapeNode.cc50
-rw-r--r--runtimes/neurun/src/model/operation/ReshapeNode.h50
-rw-r--r--runtimes/neurun/src/model/operation/Set.cc67
-rw-r--r--runtimes/neurun/src/model/operation/Set.h63
-rw-r--r--runtimes/neurun/src/model/operation/SoftmaxNode.cc50
-rw-r--r--runtimes/neurun/src/model/operation/SoftmaxNode.h60
-rw-r--r--runtimes/neurun/src/util/Padding.cc75
-rw-r--r--runtimes/neurun/src/util/Padding.h51
-rw-r--r--runtimes/neurun/src/util/Utils.cc42
-rw-r--r--runtimes/neurun/src/util/Utils.h43
-rw-r--r--runtimes/neurun/src/util/config/Config.lst34
-rw-r--r--runtimes/neurun/src/util/config/ConfigManager.cc74
-rw-r--r--runtimes/neurun/src/util/config/ConfigManager.h71
-rw-r--r--runtimes/neurun/src/util/feature/Coordinate4D.h89
-rw-r--r--runtimes/neurun/src/util/feature/nchw/View.h106
-rw-r--r--runtimes/neurun/src/util/feature/nhwc/Reader.h72
-rw-r--r--runtimes/neurun/src/util/feature/nhwc/View.h88
-rw-r--r--runtimes/neurun/src/util/logging.h59
-rw-r--r--runtimes/neurun/test/CMakeLists.txt15
-rw-r--r--runtimes/neurun/test/backend/cpu/MemoryPlanner.cc127
-rw-r--r--runtimes/neurun/test/core/backend/ExecTime.test.cc98
-rw-r--r--runtimes/neurun/test/core/compiler/Scheduler.cc554
-rw-r--r--runtimes/neurun/test/core/exec/ExecInstance.cc312
-rw-r--r--runtimes/neurun/test/core/exec/interp/ExecManager.cc338
-rw-r--r--runtimes/neurun/test/graph/Graph.cc45
-rw-r--r--runtimes/neurun/test/graph/Index.cc4
-rw-r--r--runtimes/neurun/test/graph/MockNode.h16
-rw-r--r--runtimes/neurun/test/graph/operand/IndexSet.cc32
-rw-r--r--runtimes/neurun/test/graph/operand/LayoutSet.cc6
-rw-r--r--runtimes/neurun/test/graph/operand/Set.cc29
-rw-r--r--runtimes/neurun/test/graph/operand/UseDef.cc34
-rw-r--r--runtimes/neurun/test/graph/operation/Set.cc20
-rw-r--r--runtimes/neurun/test/graph/operation/SetIO.cc86
-rw-r--r--runtimes/neurun/test/graph/verifier/Verifier.cc31
-rw-r--r--runtimes/neurun/test/model.cc25
-rw-r--r--runtimes/neurun/test/util/ShapeInference.cc233
-rw-r--r--runtimes/pure_arm_compute/CMakeLists.txt3
-rw-r--r--runtimes/pure_arm_compute/src/compilation.cc936
-rw-r--r--runtimes/pure_arm_compute/src/internal/arm_compute/Cast.cc41
-rw-r--r--runtimes/pure_arm_compute/src/internal/arm_compute/Cast.h12
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/FeatureLoggingLayer.h113
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.cc88
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.h82
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.cc66
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.h77
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.cc181
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.h51
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleArithmeticAddition.h133
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.cc110
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.h51
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.cc79
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.h70
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.cc110
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.h51
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.cc108
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.h61
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.cc140
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.h46
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.cc75
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.h39
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.cc77
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.h51
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.cc172
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.h44
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.cc53
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.h39
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.cc142
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.h50
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.cc110
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.h64
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.cc155
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.h59
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.cc74
-rw-r--r--runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.h51
-rw-r--r--runtimes/pure_arm_compute/src/internal/nnapi/tensor/Reader.h2
-rw-r--r--runtimes/pure_arm_compute/src/internal/nnapi/tensor/View.h2
-rw-r--r--runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.cc8
-rw-r--r--runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.h4
-rw-r--r--runtimes/pure_arm_compute/src/internal/op/Gather.cc8
-rw-r--r--runtimes/pure_arm_compute/src/internal/op/Gather.h6
-rw-r--r--runtimes/pure_arm_compute/src/internal/op/Split.cc13
-rw-r--r--runtimes/pure_arm_compute/src/internal/op/Split.h2
-rw-r--r--runtimes/pure_arm_compute/src/logging.h2
-rw-r--r--runtimes/pure_arm_compute/src/model.cc14
-rw-r--r--scripts/command/build25
-rw-r--r--scripts/command/build-acl77
-rw-r--r--scripts/command/build-docker52
-rw-r--r--scripts/command/configure85
-rw-r--r--scripts/command/docker-run10
-rw-r--r--scripts/command/docker-run-user10
-rw-r--r--scripts/command/docker-shell11
-rwxr-xr-xscripts/command/docker_build_cross_arm_ubuntu.sh30
-rwxr-xr-xscripts/command/docker_build_tizen_cross.sh27
-rwxr-xr-xscripts/command/docker_coverage_report.sh9
-rwxr-xr-xscripts/command/docker_cross_test_coverage_build.sh21
-rw-r--r--scripts/command/docker_env_neurun1
-rw-r--r--scripts/command/docker_env_pureacl_tflite_benchmark_model2
-rwxr-xr-xscripts/command/docker_gbs_build.sh26
-rwxr-xr-xscripts/command/docker_run_test.sh18
-rwxr-xr-xscripts/command/format-checker.sh178
-rwxr-xr-xscripts/command/gen_coverage_report.sh57
-rw-r--r--scripts/command/install16
-rw-r--r--scripts/command/nnfw_docker14
-rw-r--r--scripts/command/nnfw_docker_tizen14
-rwxr-xr-xscripts/command/tizen_xu4_test.sh155
-rw-r--r--scripts/config/docker.configuration44
-rw-r--r--scripts/config/gbs.conf21
-rw-r--r--scripts/config/image_name.configuration3
-rw-r--r--scripts/docker/Dockerfile19
-rw-r--r--scripts/docker/Dockerfile_tizen13
-rwxr-xr-xscripts/git-hooks/install_hooks.sh11
-rwxr-xr-xscripts/git-hooks/pre-push32
-rw-r--r--tests/CMakeLists.txt1
-rw-r--r--tests/custom_op/CMakeLists.txt6
-rw-r--r--tests/custom_op/apps/CMakeLists.txt20
-rw-r--r--tests/custom_op/apps/FillFrom/CMakeLists.txt4
-rw-r--r--tests/custom_op/apps/FillFrom/FillFrom_runner.cc227
-rw-r--r--tests/custom_op/kernels/CMakeLists.txt9
-rw-r--r--tests/custom_op/kernels/FillFrom/CMakeLists.txt1
-rw-r--r--tests/custom_op/kernels/FillFrom/FillFromKernel.cc53
-rw-r--r--tests/custom_op/nnpkgs/FillFrom/FillFrom.json115
-rw-r--r--tests/custom_op/nnpkgs/FillFrom/FillFrom.tflite0
-rw-r--r--tests/custom_op/nnpkgs/FillFrom/metadata/MANIFEST7
-rwxr-xr-xtests/framework/run_test.sh18
-rw-r--r--tests/framework/tests/MODELS/mobilenet/config.sh1
-rw-r--r--tests/framework/tests/concat/2D/config.sh1
-rw-r--r--tests/framework/tests/custom/abs/config.sh1
-rw-r--r--tests/framework/tests/custom/squared_difference/config.sh1
-rw-r--r--tests/framework/tests/custom/tensorflowmax/config.sh1
-rw-r--r--tests/framework/tests/custom/tensorflowsum/config.sh1
-rw-r--r--tests/framework/tests/exp/config.sh1
-rw-r--r--tests/framework/tests/hashtable_lookup/config.sh1
-rw-r--r--tests/framework/tests/logistic/config.sh1
-rw-r--r--tests/framework/tests/neg/config.sh1
-rw-r--r--tests/framework/tests/pad/pad1/config.sh1
-rw-r--r--tests/framework/tests/pad/pad2/config.sh1
-rw-r--r--tests/framework/tests/reduce_max/config.sh1
-rw-r--r--tests/framework/tests/reduce_mean/test1/config.sh2
-rw-r--r--tests/framework/tests/reduce_mean/test2/config.sh2
-rw-r--r--tests/framework/tests/rsqrt/config.sh1
-rw-r--r--tests/framework/tests/sub/broadcast/config.sh1
-rw-r--r--tests/framework/tests/transpose/config.sh (renamed from tests/framework/tests/tranpose/config.sh)0
-rw-r--r--tests/framework/tests/transpose_conv/same/config.sh1
-rw-r--r--tests/framework/tests/transpose_conv/valid/config.sh1
-rw-r--r--tests/nnapi/CMakeLists.txt51
-rw-r--r--tests/nnapi/nnapi_gtest.skip.armv7l-linux57
-rw-r--r--tests/nnapi/nnapi_gtest.skip.armv7l-linux.acl_neon30
-rw-r--r--tests/nnapi/nnapi_gtest.skip.armv7l-linux.cpu77
-rw-r--r--tests/nnapi/nnapi_gtest.skip.armv7l-linux.neurun117
-rw-r--r--tests/nnapi/nnapi_gtest.skip.armv7l-linux.pacl38
-rw-r--r--tests/nnapi/nnapi_gtest.skip.armv7l-tizen60
-rw-r--r--tests/nnapi/nnapi_gtest.skip.noarch.interp72
-rw-r--r--tests/nnapi/nnapi_gtest.skip.x86_64-linux130
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/README.md11
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/include/TestHarness.h (renamed from externals/nnapi_test_generator/include/TestHarness.h)0
-rw-r--r--[-rwxr-xr-x]tests/nnapi/nnapi_test_generator/android-p/slicing.py (renamed from externals/nnapi_test_generator/slicing.py)0
-rw-r--r--[-rwxr-xr-x]tests/nnapi/nnapi_test_generator/android-p/test_generator.py (renamed from externals/nnapi_test_generator/test_generator.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/conv_1_h3_w2_SAME.mod.py (renamed from externals/nnapi_test_generator/tests/P_conv/conv_1_h3_w2_SAME.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_conv/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_conv/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/depthwise_conv.bin.mod.py (renamed from externals/nnapi_test_generator/tests/P_depthwise_conv/depthwise_conv.bin.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_depthwise_conv/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_depthwise_conv/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/explicit_add.mod.py (renamed from externals/nnapi_test_generator/tests/P_explicit/explicit_add.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_explicit/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_explicit/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_float/addfloat.mod.py (renamed from externals/nnapi_test_generator/tests/P_float/addfloat.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_float/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_float/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_float/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_float/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_full/addfloat.mod.py (renamed from externals/nnapi_test_generator/tests/P_full/addfloat.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_full/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_full/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_full/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_full/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/lstm.mod.py (renamed from externals/nnapi_test_generator/tests/P_lstm/lstm.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_lstm/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_lstm/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/averpoolfloat.mod.py (renamed from externals/nnapi_test_generator/tests/P_quantized_avgpool/averpoolfloat.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_quantized_avgpool/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_quantized_avgpool/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/quantized.mod.py (renamed from externals/nnapi_test_generator/tests/P_quantized_conv/quantized.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_quantized_conv/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_quantized_conv/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_vts_full/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_vts_full/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/vts_full.mod.py (renamed from externals/nnapi_test_generator/tests/P_vts_full/vts_full.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/addfloat.mod.py (renamed from externals/nnapi_test_generator/tests/P_vts_operands/addfloat.mod.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_vts_operands/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_vts_operands/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/stderr.txt.expect (renamed from externals/nnapi_test_generator/tests/P_weird/stderr.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/stdout.txt.expect (renamed from externals/nnapi_test_generator/tests/P_weird/stdout.txt.expect)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/weird_add.mod.py (renamed from externals/nnapi_test_generator/tests/P_weird/weird_add.mod.py)0
-rw-r--r--[-rwxr-xr-x]tests/nnapi/nnapi_test_generator/android-p/tests/test.py (renamed from externals/nnapi_test_generator/tests/test.py)0
-rw-r--r--[-rwxr-xr-x]tests/nnapi/nnapi_test_generator/android-p/vts_generator.py (renamed from externals/nnapi_test_generator/vts_generator.py)0
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/README.md408
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/cts_generator.py314
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/include/TestHarness.h426
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/spec_visualizer.py266
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/spec_viz_template.html438
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/test_generator.py1236
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/lstm_float.mod.py145
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stdout.txt.expect107
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py43
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stdout.txt.expect82
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/conv_float.mod.py35
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stdout.txt.expect85
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/mean_implicit.mod.py41
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stdout.txt.expect262
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/conv_float.mod.py52
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stdout.txt.expect1848
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/add_internal.mod.py71
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stdout.txt.expect98
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/conv_float.mod.py61
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stdout.txt.expect1848
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/conv_quant8.mod.py35
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stdout.txt.expect85
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/conv_float.mod.py44
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stderr.txt.expect3
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stdout.txt.expect3688
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/lstm_float.mod.py145
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stdout.txt.expect322
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py43
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stdout.txt.expect150
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/conv_float.mod.py35
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stdout.txt.expect142
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/mean_implicit.mod.py41
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stdout.txt.expect381
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/conv_float.mod.py52
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stdout.txt.expect3548
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/add_internal.mod.py71
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stdout.txt.expect312
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/conv_float.mod.py61
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stdout.txt.expect3548
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/conv_quant8.mod.py35
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stdout.txt.expect142
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/conv_float.mod.py46
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stderr.txt.expect2
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stdout.txt.expect7084
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/tests/test.py328
-rw-r--r--tests/nnapi/nnapi_test_generator/android-q-beta/vts_generator.py361
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_float_1.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_float_2.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_int32.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_neg_axis_float.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_neg_axis_int32.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_quant8.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/argmax_ex_quant8_neg_axis.mod.py2
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_2D_2D_float_1.mod.py22
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_2D_2D_float_2.mod.py23
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_2D_3D_float_1.mod.py22
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_2D_3D_float_2.mod.py23
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_3D_2D_float_1.mod.py29
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_3D_2D_float_2.mod.py29
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_3D_2D_float_3.mod.py26
-rw-r--r--tests/nnapi/specs/Ex/gather_ex_4D_float.mod.py41
-rw-r--r--tests/nnapi/specs/Ex/greater_equal_ex.mod.py35
-rw-r--r--tests/nnapi/specs/Ex/less_ex.mod.py35
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_2D_float_1.mod.py54
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_2D_float_2.mod.py54
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_2D_int_1.mod.py25
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_2D_int_2.mod.py25
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_3D_float_1.mod.py54
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_3D_float_2.mod.py54
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_3D_int_1.mod.py25
-rw-r--r--tests/nnapi/specs/Ex/pack_ex_3D_int_2.mod.py25
-rw-r--r--tests/nnapi/specs/Ex/prelu_ex_broadcast_float_1.mod.py23
-rw-r--r--tests/nnapi/specs/Ex/prelu_ex_broadcast_quant8_1.mod.py24
-rw-r--r--tests/nnapi/specs/Ex/prelu_ex_float_2.mod.py22
-rw-r--r--tests/nnapi/specs/Ex/prelu_ex_quant8_1.mod.py23
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_2D_float.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_2D_int32.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_C.mod.py34
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_HW.mod.py34
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_float.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_float_1.mod.py17
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_float_2.mod.py17
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_quant8_1.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/reduce_max_ex_quant8_2.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/split_ex_1D_float.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_1D_int32.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_float_1.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_float_2.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_float_3.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_int32_1.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_int32_2.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_int32_3.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_int32_4.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_int32_5.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/split_ex_4D_quant8.mod.py3
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_2D_float.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_2D_int32.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_C.mod.py34
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_HW.mod.py34
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_float.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_float_1.mod.py17
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_float_2.mod.py17
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_1.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_2.mod.py18
-rw-r--r--tests/nnapi/specs/Ex/transpose_conv_ex_float_4.mod.py56
-rw-r--r--tests/nnapi/specs/Ex/transpose_conv_ex_int_1.mod.py24
-rw-r--r--tests/nnapi/specs/Ex/transpose_conv_ex_int_2.mod.py25
-rw-r--r--tests/nnapi/specs/Ex/transpose_conv_ex_int_3.mod.py27
-rw-r--r--tests/nnapi/specs/Ex/unpack_ex_3D_float_1.mod.py8
-rw-r--r--tests/nnapi/specs/Ex/unpack_ex_3D_float_2.mod.py12
-rw-r--r--tests/nnapi/specs/Ex/unpack_ex_3D_int_1.mod.py8
-rw-r--r--tests/nnapi/specs/Ex/unpack_ex_3D_int_2.mod.py12
-rw-r--r--tests/nnapi/specs/V1_0/conv_1_h3_w2_SAME.mod.py2
-rw-r--r--tests/nnapi/specs/V1_0/conv_1_h3_w2_VALID.mod.py2
-rw-r--r--tests/nnapi/specs/V1_0/conv_3_h3_w2_SAME.mod.py2
-rw-r--r--tests/nnapi/specs/V1_0/conv_3_h3_w2_VALID.mod.py2
-rw-r--r--tests/nnapi/specs/V1_0/depthwise_conv.mod.py2
-rw-r--r--tests/nnapi/specs/V1_0/lstm3.mod.py1
-rw-r--r--tests/nnapi/specs/V1_0/lstm3_state.mod.py1
-rw-r--r--tests/nnapi/specs/V1_0/lstm3_state2.mod.py1
-rw-r--r--tests/nnapi/specs/V1_0/lstm3_state3.mod.py1
-rw-r--r--tests/nnapi/specs/V1_0/mobilenet_224_gender_basic_fixed.mod.py64
-rw-r--r--tests/nnapi/specs/V1_0/mobilenet_quantized.mod.py65
-rw-r--r--tests/nnapi/specs/V1_0/resize_bilinear.mod.py4
-rw-r--r--tests/nnapi/specs/V1_0/resize_bilinear_2.mod.py4
-rw-r--r--tests/nnapi/specs/V1_0/svdf_bias_present.mod.py138
-rwxr-xr-xtests/nnapi/specs/generate_test.sh2
-rw-r--r--tests/nnapi/src/TestValidation.cpp31
-rwxr-xr-xtests/scripts/common.sh28
-rw-r--r--tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt31
-rw-r--r--tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt11
-rw-r--r--tests/scripts/neurun_frameworktest_list.txt9
-rw-r--r--tests/scripts/neurun_frameworktest_list.x86-64.cpu.txt10
-rw-r--r--tests/scripts/pacl_frameworktest_list.armv7l-linux.txt36
-rwxr-xr-xtests/scripts/py/test_driver.py13
-rwxr-xr-xtests/scripts/run_benchmark.sh28
-rwxr-xr-xtests/scripts/run_benchmark_acl.sh113
-rwxr-xr-xtests/scripts/run_benchmark_op.sh5
-rw-r--r--tests/scripts/run_benchmark_tflite_model.in1
-rwxr-xr-xtests/scripts/run_benchmark_tflite_model.sh2
-rwxr-xr-xtests/scripts/run_frameworktest.sh21
-rwxr-xr-xtests/scripts/run_unittest.sh2
-rwxr-xr-xtests/scripts/test_driver.sh14
-rw-r--r--tests/scripts/test_performance.sh182
-rw-r--r--tests/tools/CMakeLists.txt9
-rw-r--r--tests/tools/nnapi_test/src/nnapi_test.cc2
-rw-r--r--tests/tools/nnpackage_run/CMakeLists.txt32
-rw-r--r--tests/tools/nnpackage_run/README.md22
-rw-r--r--tests/tools/nnpackage_run/src/args.cc108
-rw-r--r--tests/tools/nnpackage_run/src/args.h53
-rw-r--r--tests/tools/nnpackage_run/src/nnpackage_run.cc259
-rw-r--r--tests/tools/nnpackage_run/src/tensor_dumper.cc54
-rw-r--r--tests/tools/nnpackage_run/src/tensor_dumper.h47
-rw-r--r--tests/tools/tflite_benchmark/CMakeLists.txt5
-rw-r--r--tests/tools/tflite_benchmark/src/tflite_benchmark.cc60
-rw-r--r--tests/tools/tflite_benchmark_model/CMakeLists.txt8
-rw-r--r--tests/tools/tflite_benchmark_model/README.md12
-rw-r--r--tests/tools/tflite_benchmark_model/benchmark_tflite_model.cc14
-rw-r--r--tests/tools/tflite_benchmark_model/profile_summarizer.cc4
-rw-r--r--tests/tools/tflite_loader/CMakeLists.txt23
-rw-r--r--tests/tools/tflite_loader/src/args.cc83
-rw-r--r--tests/tools/tflite_loader/src/args.h51
-rw-r--r--tests/tools/tflite_loader/src/tflite_loader.cc289
-rw-r--r--tests/tools/tflite_run/CMakeLists.txt9
-rw-r--r--tests/tools/tflite_run/src/args.cc31
-rw-r--r--tests/tools/tflite_run/src/args.h6
-rw-r--r--tests/tools/tflite_run/src/bin_image.h2
-rw-r--r--tests/tools/tflite_run/src/tensor_dumper.cc18
-rw-r--r--tests/tools/tflite_run/src/tensor_dumper.h16
-rw-r--r--tests/tools/tflite_run/src/tensor_loader.cc37
-rw-r--r--tests/tools/tflite_run/src/tensor_loader.h6
-rw-r--r--tests/tools/tflite_run/src/tflite_run.cc67
-rw-r--r--tools/CMakeLists.txt7
-rwxr-xr-xtools/cross/build_android_rootfs.sh2
-rwxr-xr-xtools/cross/build_rootfs.sh4
-rwxr-xr-xtools/extract_weights_from_tflite/extract.py13
-rwxr-xr-xtools/extract_weights_from_tflite/extract_from_tflite.sh2
-rwxr-xr-xtools/extract_weights_from_tflite/print_op.py10
-rwxr-xr-xtools/image_importer/image_importer.py4
-rw-r--r--tools/kbenchmark/Args.cc106
-rw-r--r--tools/kbenchmark/Args.h55
-rw-r--r--tools/kbenchmark/CMakeLists.txt29
-rw-r--r--tools/kbenchmark/ConfigFile.h115
-rw-r--r--tools/kbenchmark/Driver.cc147
-rw-r--r--tools/kbenchmark/Operation.h37
-rw-r--r--tools/kbenchmark/OperationLoader.h60
-rw-r--r--tools/kbenchmark/Operations.lst23
-rw-r--r--tools/kbenchmark/README.md81
-rw-r--r--tools/kbenchmark/Utils.h70
-rw-r--r--tools/kbenchmark/kernels/CMakeLists.txt1
-rw-r--r--tools/kbenchmark/kernels/acl_cl/CMakeLists.txt20
-rw-r--r--tools/kbenchmark/kernels/acl_cl/Convolution.cpp514
-rw-r--r--tools/kbenchmark/kernels/acl_cl/TransposeConv.cpp393
-rw-r--r--tools/kbenchmark/kernels/acl_common/Utils.h102
-rw-r--r--tools/kbenchmark/kernels/acl_neon/CMakeLists.txt19
-rw-r--r--tools/kbenchmark/kernels/acl_neon/Convolution.cpp489
-rw-r--r--tools/kbenchmark/kernels/acl_neon/TransposeConv.cpp293
-rw-r--r--tools/kbenchmark/operations/Convolution.h73
-rw-r--r--tools/kbenchmark/operations/TransposeConv.h73
-rw-r--r--tools/lcov-to-cobertura-xml/.FORMATDENY0
-rw-r--r--tools/lcov-to-cobertura-xml/README.md (renamed from externals/lcov-to-cobertura-xml/README.md)0
-rw-r--r--[-rwxr-xr-x]tools/lcov-to-cobertura-xml/lcov_cobertura.py (renamed from externals/lcov-to-cobertura-xml/lcov_cobertura.py)0
-rw-r--r--tools/nnapi_quickcheck/lib/env.cpp6
-rw-r--r--tools/nnapi_quickcheck/tests/add_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_2.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_3.cpp26
-rw-r--r--tools/nnapi_quickcheck/tests/add_4.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_5.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_6.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_7.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_8.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_9.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/add_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/avg_pool_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/avg_pool_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/cast_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/cast_2.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/cast_q_to_f_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/concat_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/concat_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/conv_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/conv_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/dconv_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/dconv_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/dequantize_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/div_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/div_2.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/fully_connected_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/fully_connected_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/gather_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/gather_2.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/logistic_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/max_pool_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/max_pool_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/mul_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/mul_2.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/mul_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/relu1_1.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/relu6_1.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/relu6_quan_1.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/relu_1.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/relu_2.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/relu_3.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/relu_quan_1.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/reshape_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/reshape_quan_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/resize_bilinear_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/softmax_1.cpp22
-rw-r--r--tools/nnapi_quickcheck/tests/softmax_2.cpp22
-rw-r--r--tools/nnapi_quickcheck/tests/softmax_quan_1.cpp22
-rw-r--r--tools/nnapi_quickcheck/tests/split_1.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/split_2.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/split_3.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/split_4.cpp20
-rw-r--r--tools/nnapi_quickcheck/tests/sub_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/sub_2.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/sub_3.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/sub_4.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/sub_5.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/sub_6.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/tanh_1.cpp18
-rw-r--r--tools/nnapi_quickcheck/tests/topk_v2_1.cpp18
-rw-r--r--tools/nnpackage_tool/model2nnpkg/README.md21
-rw-r--r--tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh70
-rw-r--r--tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/README.md26
-rw-r--r--tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh84
-rw-r--r--tools/nnpackage_tool/nnpackager/nnpackager.py65
-rw-r--r--tools/nnpackage_tool/nnpkg_test/README.md42
-rw-r--r--tools/nnpackage_tool/nnpkg_test/list22
-rw-r--r--tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh122
-rw-r--r--tools/nnpackage_tool/tflite2circle/README.md28
-rw-r--r--tools/nnpackage_tool/tflite2circle/tflite2circle.sh73
-rw-r--r--tools/nnpackage_tool/tflite2circle/tflitejson2circlejson.py28
-rw-r--r--tools/opencl_tool/CMakeLists.txt4
-rw-r--r--tools/pbfile_tool/convert_pb_to_pbtxt.py54
-rwxr-xr-xtools/pbfile_tool/pb_info.py3
-rw-r--r--tools/tensorflow_model_freezer/base_freezer.py43
-rw-r--r--tools/tensorflow_model_freezer/model_freezer_util.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/ARGMAX_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/ARGMIN_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/DIV_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/LOGICAL_AND_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/LOGICAL_NOT_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/LOGICAL_OR_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/MUL_gen.py2
-rw-r--r--tools/tensorflow_model_freezer/sample/Operation_gen.py20
-rwxr-xr-xtools/tensorflow_model_freezer/sample/SQUEEZE_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/STACK_gen.py2
-rwxr-xr-xtools/tensorflow_model_freezer/sample/TOPK_gen.py2
-rw-r--r--tools/tensorflow_model_freezer/sample/UNSTACK_gen.py2
-rw-r--r--tools/tflite_accuracy/CMakeLists.txt13
-rw-r--r--tools/tflite_accuracy/README.md37
-rw-r--r--tools/tflite_accuracy/src/labels.h1023
-rw-r--r--tools/tflite_accuracy/src/tflite_accuracy.cc494
-rw-r--r--tools/tflitefile_tool/config_saver.py131
-rwxr-xr-xtools/tflitefile_tool/model_parser.py35
-rw-r--r--tools/tflitefile_tool/model_printer.py8
-rw-r--r--tools/tflitefile_tool/model_saver.py36
-rwxr-xr-xtools/tflitefile_tool/operation.py32
-rw-r--r--tools/tflitefile_tool/option_printer.py34
-rw-r--r--tools/tflitefile_tool/select_operator.py2
-rw-r--r--tools/tflitefile_tool/tflite/AbsOptions.py28
-rw-r--r--tools/tflitefile_tool/tflite/BidirectionalSequenceLSTMOptions.py74
-rw-r--r--tools/tflitefile_tool/tflite/BidirectionalSequenceRNNOptions.py14
-rw-r--r--tools/tflitefile_tool/tflite/BuiltinOperator.py8
-rw-r--r--tools/tflitefile_tool/tflite/BuiltinOptions.py11
-rw-r--r--tools/tflitefile_tool/tflite/CustomQuantization.py61
-rw-r--r--tools/tflitefile_tool/tflite/FakeQuantOptions.py8
-rw-r--r--tools/tflitefile_tool/tflite/FloorModOptions.py28
-rw-r--r--tools/tflitefile_tool/tflite/LSHProjectionOptions.py4
-rw-r--r--tools/tflitefile_tool/tflite/LeakyReluOptions.py39
-rw-r--r--tools/tflitefile_tool/tflite/MirrorPadMode.py8
-rw-r--r--tools/tflitefile_tool/tflite/MirrorPadOptions.py39
-rw-r--r--tools/tflitefile_tool/tflite/QuantizationDetails.py8
-rw-r--r--tools/tflitefile_tool/tflite/QuantizationParameters.py36
-rw-r--r--tools/tflitefile_tool/tflite/RangeOptions.py28
-rw-r--r--tools/tflitefile_tool/tflite/ResizeNearestNeighborOptions.py40
-rw-r--r--tools/tflitefile_tool/tflite/SplitVOptions.py39
-rw-r--r--tools/tflitefile_tool/tflite/SquaredDifferenceOptions.py28
-rw-r--r--tools/tflitefile_tool/tflite/Tensor.py4
-rw-r--r--tools/tflitefile_tool/tflite/TensorType.py1
-rw-r--r--tools/tflitefile_tool/tflite/UnidirectionalSequenceLSTMOptions.py74
-rwxr-xr-xtools/tflkit/freeze_graph.sh26
-rw-r--r--tools/tflkit/summarize_pb.py2
-rwxr-xr-xtools/tflkit/tflite_convert.sh42
2292 files changed, 178770 insertions, 50386 deletions
diff --git a/.ctags b/.ctags
index e3d621775..6f33a26d3 100644
--- a/.ctags
+++ b/.ctags
@@ -1,5 +1,6 @@
--extra=+f
--exclude=Product
+--exclude=build
--exclude=tags
--exclude=tests/framework/cache
--exclude=tools/cross/rootfs
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 000000000..b8eec3df8
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+tests/nnapi/specs/* linguist-detectable=false
diff --git a/.gitignore b/.gitignore
index da6e28990..5a5286016 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,59 +1,12 @@
-# Prerequisites
-*.d
-
-# Compiled Object files
-*.slo
-*.lo
-*.o
-*.obj
-
-# Precompiled Headers
-*.gch
-*.pch
-
-# Compiled Dynamic libraries
-*.so
-*.dylib
-*.dll
-
-# Fortran module files
-*.mod
-*.smod
-
-# Compiled Static libraries
-*.lai
-*.la
-*.a
-*.lib
-
-# Executables
-*.exe
-*.out
-*.app
-
# Working Path
/Product
+/build
/tools/cross/rootfs
-# Tools
-/.vscode
-/.svace-dir
-
# Compiled python3 code cache
**/__pycache__
*.pyc
-# vim temp files
-.*.sw*
-
-# ctags files
-tags
-
-# gnu-global files
-GPATH
-GTAGS
-GRTAGS
-
# Test cache for model download
/tests/framework/cache
@@ -70,28 +23,21 @@ GRTAGS
/tools/cross/ndk
# ignore the embeded cl_kernels
-/libs/ARMComputeEx/src/core/CL/cl_kernels/*.clembed
-/libs/ARMComputeEx/src/core/CL/cl_kernels/*.hembed
+/**/*.clembed
+/**/*.hembed
-# External stamp file
-/externals/*.stamp
-
-# External library
-/externals/eigen
-/externals/farmhash
-/externals/flatbuffers
-/externals/gemmlowp
-/externals/gtest
-/externals/neon_2_sse
-/externals/tensorflow
-/externals/acl
-/externals/absl
-/externals/nonius
+# External directory
+/externals
# tflkit info files
/tools/tflkit/*.info
# Generated tests
/tests/nnapi/src/generated
-/tests/runtime/neural_networks_test/generated
-/runtimes/tests/neural_networks_test/generated
+
+# Coverage
+/gcov
+/coverage
+
+# Makefile
+/Makefile
diff --git a/.mailmap b/.mailmap
new file mode 100644
index 000000000..12696d5aa
--- /dev/null
+++ b/.mailmap
@@ -0,0 +1,57 @@
+Aleksei Grebenkin <a.grebenkin@samsung.com>
+Andrew Tischenko <a.tischenko@partner.samsung.com>
+Andrey Shedko <a.shedko@samsung.com>
+Andrey Shedko <a.shedko@samsung.com> Андрей Шедько/AI Tools Lab /SRR/Assistant Engineer/삼성전자 <a.shedko@partner.samsung.com>
+Andrey Shedko <a.shedko@samsung.com> Андрей Шедько/AI Tools Lab /SRR/Assistant Engineer/삼성전자 <a.shedko@samsung.com>
+Cheongyo Bahk <ch.bahk@samsung.com>
+Chunseok Lee <chunseok.lee@samsung.com>
+Denis Maksimenko <d.maksimenko@partner.samsung.com>
+Devansh Bansal <b.devansh@samsung.com>
+Dilshodzhon Poshshoev <d.poshshoev@samsung.com>
+Dmitry Mozolev <d.mozolev@samsung.com>
+Efimov Alexander <a.efimov@samsung.com>
+Hanjoung Lee <hanjoung.lee@samsung.com>
+Hyeongseok Oh <hseok82.oh@samsung.com>
+HyungGyu Choi <hk0110.choi@samsung.com>
+Hyunsik Yoon <hyunsik.yoon@samsung.com>
+Inki Dae <inki.dae@samsung.com>
+Ivan Ivanovich Kulagin <i.kulagin@samsung.com>
+Ivan Vagin <ivan.vagin@samsung.com>
+Jiseob Jang <jiseob.jang@samsung.com>
+Jiyoung Yun <jy910.yun@samsung.com>
+Jonghyun Park <jh1302.park@samsung.com>
+Junghyun Kim <jh0822.kim@samsung.com>
+Kshitiz Bansal <kshitiz.b@samsung.com>
+Myungjae Lee <mjae.lee@samsung.com>
+Pavel Fattakhov <p.fattakhov@partner.samsung.com>
+Pavel Ilyutchenko <p.iliutchenk@samsung.com>
+Pavel Ilyutchenko <p.iliutchenk@samsung.com> Павел Ильютченко/AI Tools Lab /SRR/Assistant Engineer/삼성전자 <p.iliutchenk@partner.samsung.com>
+Pavel Ilyutchenko <p.iliutchenk@samsung.com> Павел Ильютченко/AI Tools Lab /SRR/Assistant Engineer/삼성전자 <p.iliutchenk@samsung.com>
+Prasanna R <prasanna.r@samsung.com>
+Praveen Doreswamy Naidu <praveen.dn@samsung.com>
+Roman Rusyaev <r.rusyaev@samsung.com>
+Saehie Park <saehie.park@samsung.com>
+Sanggyu Lee <sg5.lee@samsung.com>
+Sangjung Woo <sangjung.woo@samsung.com>
+Sangmin Seo <sangmin7.seo@samsung.com>
+Saulo Aldighieri Moraes <s.moraes@samsung.com>
+Saurav Babu <saurav.babu@samsung.com>
+Seok Namkoong <sk.namkoong@samsung.com>
+Seongwoo Chae <sw4670.chae@samsung.com>
+Sergei Barannikov <s.barannikov@samsung.com>
+Sergei Chicherin <s.chicherin@samsung.com>
+Sergey Vostokov <s.vostokov@samsung.com>
+Shubham Gupta <shub98.gupta@samsung.com>
+Siva Sai Vaddipati <siva.sai@samsung.com>
+Sujin Kim <sjsujin.kim@samsung.com>
+Sung-Jae Lee <sj925.lee@samsung.com>
+Sungjin Choi <lotieye.choi@samsung.com>
+Tanuj Tekriwal <tanuj.tekri@samsung.com>
+Timur Otellovich Ablyazimov <t.ablyazimov@samsung.com>
+Vishal Keshav <vishal.k1@samsung.com>
+Vitaliy Cherepanov <v.cherepanov@samsung.com>
+Vladimir Plazun <v.plazun@samsung.com>
+Vladimir Plazun <v.plazun@samsung.com> Vladimir Plazun/AI Tools Lab/Engineer/삼성전자 <v.plazun@partner.samsung.com>
+Vladimir Plazun <v.plazun@samsung.com> Vladimir Plazun/AI Tools Lab /SRR/Engineer/삼성전자 <v.plazun@partner.samsung.com>
+Yongseop Kim <yons.kim@samsung.com>
+Yuri Novikov <y.novikov@partner.samsung.com>
diff --git a/CMakeLists.txt b/CMakeLists.txt
deleted file mode 100644
index 34824ff71..000000000
--- a/CMakeLists.txt
+++ /dev/null
@@ -1,91 +0,0 @@
-cmake_minimum_required(VERSION 3.1)
-
-project(nnfw)
-
-macro(nnfw_include PREFIX)
- include("${CMAKE_SOURCE_DIR}/cmake/modules/${PREFIX}.cmake")
-endmacro(nnfw_include)
-
-macro(nnfw_find_package PREFIX)
- find_package(${PREFIX} CONFIG NO_DEFAULT_PATH PATHS ${CMAKE_SOURCE_DIR}/cmake/packages ${ARGN})
-endmacro(nnfw_find_package)
-
-set(CMAKE_CXX_STANDARD 11)
-
-# identify platform: HOST_PLATFORM, TARGET_PLATFORM and related
-include("cmake/option/identify_platform.cmake")
-
-# platform specific options
-include("cmake/option/option_${TARGET_PLATFORM}.cmake")
-
-# apply compilation flags
-# note: this should be placed after cmake/option/option_xxx.cmake files
-include("cmake/ApplyCompileFlags.cmake")
-
-# Configuration flags
-include("cmake/CfgOptionFlags.cmake")
-# and besides CfgOptionFlags.cmake that can be given outside
-# OBS_BUILD: build boolean flag that tizen in OBS build
-# COVERAGE_BUILD: build boolean flag that enables converage test
-# ROOTFS_ARM: arm rootfs path for cross building
-# ROOTFS_ARM64: arm 64bit rootfs path for cross building, linux,tizen,android
-# TARGET_ARCH: target architecture string for cross building
-# TARGET_OS: target os string for cross building
-
-# NOTE '${CMAKE_INSTALL_PREFIX}/lib' should be added as a link directory as
-# CI server places pre-built ARM compute libraries on this directory.
-link_directories(${CMAKE_INSTALL_PREFIX}/lib)
-
-# Download configuration
-option(DOWNLOAD_TENSORFLOW "Download Tensorflow source" ON)
-option(DOWNLOAD_ABSL "Download Absl source" ON)
-option(DOWNLOAD_EIGEN "Download Eigen source" ON)
-option(DOWNLOAD_FARMHASH "Download farmhash source" ON)
-option(DOWNLOAD_GEMMLOWP "Download GEMM low precesion library source" ON)
-option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
-option(DOWNLOAD_FLATBUFFERS "Download FlatBuffers source" ON)
-option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
-option(DOWNLOAD_NONIUS "Download nonius source" ON)
-
-# GTest support
-option(BUILD_GTEST "Download and build Google Test" ON)
-nnfw_find_package(GTest QUIET)
-
-# NOTE Workaround to avoid build fail by tensorflow (or acl) package version mismatch on obs build
-if(OBS_BUILD)
- add_definitions(-DOBS_BUILD)
-endif(OBS_BUILD)
-
-nnfw_include(ExtendCMakeFunction)
-
-# TODO For now Android build is being enabled incrementally so not all subdirectories are added yet.
-# However we are going to have the same subdirectories with other OS eventually.
-if("${TARGET_OS}" STREQUAL "android")
-
- include_directories(externals/tensorflow)
- include_directories(externals/acl)
- include_directories(externals/acl/include)
- include_directories(externals/flatbuffers/include)
- include_directories(include)
-
- add_subdirectory(libs)
- add_subdirectory(externals)
- add_subdirectory(tests/tools/nnapi_test)
- add_subdirectory(tests/tools/tflite_benchmark)
- add_subdirectory(tests/nnapi)
-
- add_subdirectory(runtimes)
-
-else("${TARGET_OS}" STREQUAL "android") # General case (non-android build)
-
- if(NOT OBS_BUILD)
- add_subdirectory(externals)
- endif(NOT OBS_BUILD)
-
- add_subdirectory(contrib)
- add_subdirectory(libs)
- add_subdirectory(runtimes)
- add_subdirectory(tests)
- add_subdirectory(tools)
-
-endif("${TARGET_OS}" STREQUAL "android")
diff --git a/contrib/tflite_classify/.FORMATCHECKED b/CONTRIBTUTORS
index e69de29bb..e69de29bb 100644
--- a/contrib/tflite_classify/.FORMATCHECKED
+++ b/CONTRIBTUTORS
diff --git a/COPYRIGHT b/COPYRIGHT
new file mode 100644
index 000000000..3c788156c
--- /dev/null
+++ b/COPYRIGHT
@@ -0,0 +1 @@
+Copyright (c) <2019> <Samsung Inner Source Community> All Rights Reserved.
diff --git a/LICENSE b/LICENSE
index 4d3b3ab13..2411d90dc 100644
--- a/LICENSE
+++ b/LICENSE
@@ -2,6 +2,7 @@ This file provides full text of licenses used in this project
- Apache Licence 2.0
- MIT
+- BSD-2-Clause
...............................................................................
@@ -231,3 +232,49 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+
+.............................................................................
+COPYRIGHT
+
+All contributions by the University of California:
+Copyright (c) 2014-2017 The Regents of the University of California (Regents)
+All rights reserved.
+
+All other contributions:
+Copyright (c) 2014-2017, the respective contributors
+All rights reserved.
+
+Caffe uses a shared copyright model: each contributor holds copyright over
+their contributions to Caffe. The project versioning records all such
+contribution and copyright details. If a contributor wants to further mark
+their specific copyright on a particular contribution, they should indicate
+their copyright solely in the commit message of the change when it is
+committed.
+
+LICENSE
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+CONTRIBUTION AGREEMENT
+
+By contributing to the BVLC/caffe repository through pull-request, comment,
+or otherwise, the contributor releases their content to the
+license and copyright terms herein. \ No newline at end of file
diff --git a/Makefile b/Makefile
deleted file mode 100644
index fda5b0c01..000000000
--- a/Makefile
+++ /dev/null
@@ -1,265 +0,0 @@
-TARGET_ARCH?=$(shell uname -p)
-BUILD_TYPE?=Debug
-CROSS_BUILD?=0
-HOST_OS?=linux
-TARGET_OS?=linux
-PARALLEL_BUILD?=1
-## TODO: fix obs build break
-OBS_BUILD?=OFF
-COVERAGE_BUILD?=0
-BENCHMARK_ACL_BUILD?=0
-OPTIONS?=
-GENERATE_FILES?=1
-
-# make TARGET and TYPE to lowercase
-TARGET_ARCH_LC=$(shell echo $(TARGET_ARCH) | tr A-Z a-z)
-BUILD_TYPE_LC=$(shell echo $(BUILD_TYPE) | tr A-Z a-z)
-# we need base name 'arm` for all arm arch
-TARGET_ARCH_BASE=$(TARGET_ARCH_LC)
-ifneq (,$(findstring arm64,$(TARGET_ARCH_BASE)))
- # arm64 as target-arch comes from Android
- TARGET_ARCH_BASE=arm64
- ifdef ROOTFS_DIR
- ROOTFS_ARM64=$(ROOTFS_DIR)
- export ROOTFS_ARM64
- endif
- # For now Android is the only option for arm64
- TARGET_OS:=android
-else ifneq (,$(findstring arm,$(TARGET_ARCH_BASE)))
- TARGET_ARCH_BASE=arm
- ifdef ROOTFS_DIR
- ROOTFS_ARM=$(ROOTFS_DIR)
- export ROOTFS_ARM
- endif
-else ifneq (,$(findstring aarch64,$(TARGET_ARCH_BASE)))
- # aarch64 as target-arch comes from all except for Android
- TARGET_ARCH_BASE=aarch64
- ifdef ROOTFS_DIR
- ROOTFS_ARM64=$(ROOTFS_DIR)
- export ROOTFS_ARM64
- endif
-endif
-# Todo: we may set CROSS_BUILD=1 when ROOTFS_DIR is given
-# the toolchain file, only for cross build
-ifeq ($(CROSS_BUILD),1)
- TOOLCHAIN_FILE=cmake/config/config_$(TARGET_ARCH_LC)-$(TARGET_OS).cmake
- OPTION_TOOLCHAIN=-DCMAKE_TOOLCHAIN_FILE=$(TOOLCHAIN_FILE)
-else
- OPTION_TOOLCHAIN=
-endif
-
-ifeq ($(COVERAGE_BUILD),1)
- OPTIONS+= -DCOVERAGE_BUILD=1
-else
- OPTIONS+= -DCOVERAGE_BUILD=0
-endif
-
-ifeq ($(BENCHMARK_ACL_BUILD),1)
- OPTIONS+= -DBUILD_BENCHMARK_ACL=1
-endif
-
-ifeq ($(PARALLEL_BUILD),1)
- # Get number of processors (linux only for now)
- ifeq ($(HOST_OS),linux)
- NPROCS?=$(shell grep -c ^processor /proc/cpuinfo)
- endif
-endif
-
-NPROCS?=1
-WORKHOME=Product
-WORKFOLDER=$(TARGET_ARCH_LC)-$(TARGET_OS).$(BUILD_TYPE_LC)
-WORKDIR=$(WORKHOME)/$(WORKFOLDER)
-
-BUILD_ROOT=$(WORKDIR)/obj
-INSTALL_ROOT=$(WORKDIR)/out
-
-BUILD_FOLDER=$(WORKFOLDER)/obj
-INSTALL_FOLDER=$(WORKFOLDER)/out
-BUILD_ALIAS=$(WORKHOME)/obj
-INSTALL_ALIAS=$(WORKHOME)/out
-
-INSTALL_PATH?=$(CURDIR)/$(WORKDIR)/out
-
-TIMESTAMP_CONFIGURE=$(WORKDIR)/CONFIGURE
-TIMESTAMP_BUILD=$(WORKDIR)/BUILD
-TIMESTAMP_INSTALL=$(WORKDIR)/INSTALL
-
-#
-# for Build Arm Compute Library with SCONS
-#
-ACL_FOLDER=externals/acl
-ACL_COMMAND=scons -j${NPROCS} neon=1 opencl=1 examples=0 embed_kernels=1 os=$(TARGET_OS)
-ifeq ($(TARGET_ARCH_LC),armv7l)
- ACL_COMMAND+= arch=armv7a
- ACL_BUILD_OUT=armv7a-$(TARGET_OS)
-else ifeq ($(TARGET_ARCH_LC),aarch64)
- ACL_COMMAND+= arch=arm64-v8a
- ACL_BUILD_OUT=arm64-v8a-$(TARGET_OS)
-else ifeq ($(TARGET_ARCH_BASE),arm64)
- ACL_COMMAND+= arch=arm64-v8a
- ACL_BUILD_OUT=arm64-v8a-$(TARGET_OS)
-endif
-ifeq ($(TARGET_OS),android)
- ACL_COMMAND+= Werror=0
- ANDROID_GNUSTL_PATH=$(ROOTFS_ARM64)/bin:$(ROOTFS_ARM64)/aarch64-linux-android/bin:$$PATH
-else
- ACL_COMMAND+= Werror=1
-endif
-ifeq ($(BUILD_TYPE_LC),debug)
- ACL_COMMAND+= debug=1 asserts=1
-endif
-ACL_FOLDER_NAME=$(ACL_BUILD_OUT).$(BUILD_TYPE_LC)
-ACL_COMMAND+= build_dir=$(ACL_FOLDER_NAME)
-ACL_FOLDER_BUILD=$(ACL_FOLDER)/build/$(ACL_FOLDER_NAME)
-#ifneq ($(EXT_ACL_FOLDER),)
-# ACL_FOLDER_BUILD=$(EXT_ACL_FOLDER)
-#endif
-
-all: build
-
-###
-### Command (public)
-###
-configure: configure_internal
-
-build: build_internal
-
-install: $(TIMESTAMP_INSTALL)
-
-clean:
- rm -rf $(WORKDIR)
- rm -rf externals/*.stamp
-
-acl: internal_acl_build internal_acl_install
-
-check: check_internal
-
-runtime: runtime_build_internal
-
-testbuild: test_build_internal
-
-tflite: tflite_build_internal
-
-###
-### Command (internal)
-###
-configure_internal:
-ifeq ($(GENERATE_FILES),1)
- tests/nnapi/specs/generate_test.sh
-endif
-
- mkdir -p $(BUILD_ROOT)
- cmake -B$(CURDIR)/$(BUILD_ROOT) -H$(CURDIR) \
- -DCMAKE_INSTALL_PREFIX=$(INSTALL_PATH) \
- -DCMAKE_BUILD_TYPE=$(BUILD_TYPE_LC) -DTARGET_ARCH=$(TARGET_ARCH_LC) \
- -DHOST_OS=$(HOST_OS) \
- -DTARGET_OS=$(TARGET_OS) \
- -DOBS_BUILD=$(OBS_BUILD) \
- $(OPTION_TOOLCHAIN) \
- $(OPTIONS)
- touch $(TIMESTAMP_CONFIGURE)
-
-build_internal: $(BUILD_ROOT)
- cd $(BUILD_ROOT) && make -j $(NPROCS) all
- rm -rf $(BUILD_ALIAS)
- ln -s $(BUILD_FOLDER) $(BUILD_ALIAS)
- touch $(TIMESTAMP_BUILD)
-
-install_internal:
- cd $(BUILD_ROOT) && make install
- rm -rf $(INSTALL_ALIAS)
- ln -s $(INSTALL_FOLDER) $(INSTALL_ALIAS)
- touch $(TIMESTAMP_INSTALL)
-
-internal_acl_build: configure_internal
-#ifeq ($(EXT_ACL_FOLDER),)
-ifeq ($(TARGET_OS),android)
- cd $(ACL_FOLDER) && CXX=clang++ CC=clang PATH=$(ANDROID_GNUSTL_PATH) $(ACL_COMMAND)
-else
- cd $(ACL_FOLDER) && $(ACL_COMMAND)
-endif
-#endif
-
-internal_acl_install:
- @echo $(ACL_FOLDER_BUILD)
- @mkdir -vp $(INSTALL_PATH)/lib
- @cp -v $(ACL_FOLDER_BUILD)/libarm_compute_core.so $(INSTALL_PATH)/lib/.
- @cp -v $(ACL_FOLDER_BUILD)/libarm_compute_graph.so $(INSTALL_PATH)/lib/.
- @cp -v $(ACL_FOLDER_BUILD)/libarm_compute.so $(INSTALL_PATH)/lib/.
-
-external_acl:
-ifneq ($(EXT_ACL_FOLDER),)
- @mkdir -vp $(INSTALL_PATH)/lib
- @cp -v $(EXT_ACL_FOLDER)/libarm_compute_core.so $(INSTALL_PATH)/lib/.
- @cp -v $(EXT_ACL_FOLDER)/libarm_compute_graph.so $(INSTALL_PATH)/lib/.
- @cp -v $(EXT_ACL_FOLDER)/libarm_compute.so $(INSTALL_PATH)/lib/.
-endif
-
-ifeq ($(CROSS_BUILD),1)
-check_internal: test_internal_cross
-else
-check_internal: test_internal
-endif
-
-# TODO: Enable tests on cross build
-test_internal_cross:
- @echo "Can't do tests on cross build"
-
-test_internal:
- tests/scripts/test_driver.sh .
-
-build_test_suite: install_internal
- @echo "packaging test suite"
- @rm -rf $(INSTALL_ROOT)/test-suite.tar.gz
- @tar -zcf test-suite.tar.gz tests/scripts tests/framework $(INSTALL_ALIAS) --dereference
- @mv test-suite.tar.gz $(INSTALL_ROOT)/.
-
-build_coverage_suite: install_internal
- @echo "packaging test-coverage suite"
- @rm -rf $(INSTALL_ROOT)/coverage-suite.tar.gz
- @find Product -name "*.gcno" > include_lists.txt
- @pwd | grep -o '/' | wc -l > tests/scripts/build_path_depth.txt
- @tar -zcf coverage-suite.tar.gz tests/scripts tests/framework $(INSTALL_ALIAS) --dereference -T include_lists.txt
- @rm -rf include_lists.txt tests/scripts/build_path_depth.txt
- @mv coverage-suite.tar.gz $(INSTALL_ROOT)/.
-
-runtime_build_internal: $(BUILD_ROOT)
- cd $(BUILD_ROOT) && make -j $(NPROCS) nnapi_pure_arm_compute
- rm -rf $(BUILD_ALIAS)
- ln -s $(BUILD_FOLDER) $(BUILD_ALIAS)
- touch $(TIMESTAMP_BUILD)
-
-test_build_internal: $(BUILD_ROOT)
- # Build test
- cd $(BUILD_ROOT) && make -j $(NPROCS) nnapi_test
- cd $(BUILD_ROOT) && make -j $(NPROCS) tflite_benchmark
- # Build unittest
- cd $(BUILD_ROOT) && make -j $(NPROCS) nnapi_gtest
- rm -rf $(BUILD_ALIAS)
- ln -s $(BUILD_FOLDER) $(BUILD_ALIAS)
- touch $(TIMESTAMP_BUILD)
-
-tflite_build_internal: $(BUILD_ROOT)
- # Build test
- cd $(BUILD_ROOT) && make -j $(NPROCS) tensorflow-lite gtest_main
- rm -rf $(BUILD_ALIAS)
- ln -s $(BUILD_FOLDER) $(BUILD_ALIAS)
- touch $(TIMESTAMP_BUILD)
-
-###
-### Timestamps
-###
-$(WORKDIR):
- mkdir -p $@
-
-$(BUILD_ROOT): $(WORKDIR)
- make configure_internal
-
-$(TIMESTAMP_CONFIGURE):
- make configure_internal
-
-$(TIMESTAMP_BUILD): $(TIMESTAMP_CONFIGURE)
- make build_internal
-
-$(TIMESTAMP_INSTALL): $(TIMESTAMP_BUILD)
- make install_internal
diff --git a/Makefile.template b/Makefile.template
new file mode 100644
index 000000000..a0ae361b8
--- /dev/null
+++ b/Makefile.template
@@ -0,0 +1,183 @@
+HOST_ARCH?=$(shell uname -p)
+TARGET_ARCH?=$(shell uname -p)
+BUILD_TYPE?=Debug
+CROSS_BUILD?=0
+HOST_OS?=linux
+TARGET_OS?=linux
+PARALLEL_BUILD?=1
+## TODO: fix obs build break
+OBS_BUILD?=OFF
+COVERAGE_BUILD?=0
+BENCHMARK_ACL_BUILD?=0
+OPTIONS?=
+GENERATE_NNAPI_TESTS?=1
+TFLITE_MAJOR_VER?=1
+TFLITE_MINOR_VER?=13
+
+# make TARGET and TYPE to lowercase
+HOST_ARCH_LC=$(shell echo $(HOST_ARCH) | tr A-Z a-z)
+TARGET_ARCH_LC=$(shell echo $(TARGET_ARCH) | tr A-Z a-z)
+BUILD_TYPE_LC=$(shell echo $(BUILD_TYPE) | tr A-Z a-z)
+# we need base name 'arm` for all arm arch
+TARGET_ARCH_BASE=$(TARGET_ARCH_LC)
+ifneq (,$(findstring arm64,$(TARGET_ARCH_BASE)))
+ # arm64 as target-arch comes from Android
+ TARGET_ARCH_BASE=arm64
+ # For now Android is the only option for arm64
+ TARGET_OS:=android
+else ifneq (,$(findstring arm,$(TARGET_ARCH_BASE)))
+ TARGET_ARCH_BASE=arm
+else ifneq (,$(findstring aarch64,$(TARGET_ARCH_BASE)))
+ # aarch64 as target-arch comes from all except for Android
+ TARGET_ARCH_BASE=aarch64
+endif
+# Set CROSS_BUILD=1 when ROOTFS_DIR is given, and TARGET_ARCH is different to HOST_ARCH.
+ifneq ($(ROOTFS_DIR),)
+ifneq ($(TARGET_ARCH_LC),$(HOST_ARCH_LC))
+ CROSS_BUILD=$(if $(wildcard $(ROOTFS_DIR)),1,0)
+endif
+endif
+# the toolchain file, only for cross build
+ifeq ($(CROSS_BUILD),1)
+ TOOLCHAIN_FILE=cmake/buildtool/cross/toolchain_$(TARGET_ARCH_LC)-$(TARGET_OS).cmake
+ OPTIONS+= -DCMAKE_TOOLCHAIN_FILE=$(TOOLCHAIN_FILE)
+endif
+
+ifeq ($(COVERAGE_BUILD),1)
+ OPTIONS+= -DENABLE_COVERAGE=ON
+else
+ OPTIONS+= -DENABLE_COVERAGE=OFF
+endif
+
+ifeq ($(BENCHMARK_ACL_BUILD),1)
+ OPTIONS+= -DBUILD_BENCHMARK_ACL=1
+endif
+
+ifneq ($(EXT_ACL_FOLDER),)
+ OPTIONS+= -DBUILD_ARMCOMPUTE=OFF
+ OPTIONS+= -DARMCompute_EXTDIR=$(EXT_ACL_FOLDER)
+endif
+
+ifneq ($(EXTERNAL_VOLUME),)
+ OPTIONS+= -DNNFW_EXTERNALS_DIR=$(EXTERNAL_VOLUME)
+endif
+
+ifneq ($(OBS_BUILD),OFF)
+ # Use pre-installed google test library
+ OPTIONS+= -DBUILD_GTEST=FALSE
+ OPTIONS+= -DBUILD_ARMCOMPUTE=FALSE
+ OPTIONS+= -DDOWNLOAD_ARMCOMPUTE=FALSE
+ OPTIONS+= -DDOWNLOAD_TENSORFLOW=FALSE
+ # Use pre-installed tensorflow lite library
+ OPTIONS+= -DBUILD_TENSORFLOW_LITE=FALSE
+ # Disable logging runtime build for Tizen
+ OPTIONS+= -DBUILD_LOGGING=FALSE
+ OPTIONS+= -DBUILD_TOOLS=FALSE
+ # Disable "tflite-run" and "tflite_benchmark_model" for Tizen
+ OPTIONS+= -DBUILD_TFLITE_RUN=FALSE
+ OPTIONS+= -DBUILD_TFLITE_BENCHMARK_MODEL=FALSE
+endif
+
+ifeq ($(GENERATE_NNAPI_TESTS),1)
+ OPTIONS+= -DGENERATE_RUNTIME_NNAPI_TESTS=ON
+endif
+
+ifeq ($(PARALLEL_BUILD),1)
+ # Get number of processors (linux only for now)
+ ifeq ($(HOST_OS),linux)
+ NPROCS?=$(shell grep -c ^processor /proc/cpuinfo)
+ endif
+endif
+
+NPROCS?=1
+WORKHOME=Product
+WORKFOLDER=$(TARGET_ARCH_LC)-$(TARGET_OS).$(BUILD_TYPE_LC)
+WORKDIR=$(WORKHOME)/$(WORKFOLDER)
+
+BUILD_ROOT=$(WORKDIR)/obj
+INSTALL_ROOT=$(WORKDIR)/out
+
+BUILD_FOLDER=$(WORKFOLDER)/obj
+INSTALL_FOLDER=$(WORKFOLDER)/out
+BUILD_ALIAS=$(WORKHOME)/obj
+INSTALL_ALIAS=$(WORKHOME)/out
+
+INSTALL_PATH?=$(CURDIR)/$(WORKDIR)/out
+
+TIMESTAMP_CONFIGURE=$(WORKDIR)/CONFIGURE
+TIMESTAMP_BUILD=$(WORKDIR)/BUILD
+TIMESTAMP_INSTALL=$(WORKDIR)/INSTALL
+
+all: build
+
+###
+### Command (public)
+###
+configure: configure_internal
+
+build: build_internal
+
+install: $(TIMESTAMP_INSTALL)
+
+clean:
+ rm -rf $(WORKDIR)
+
+distclean:
+ rm -rf $(WORKDIR)
+ rm -rf externals/*.stamp
+ rm -rf tests/nnapi/src/generated/
+
+###
+### Command (internal)
+###
+configure_internal:
+ NNFW_BUILD_DIR="$(CURDIR)/$(BUILD_ROOT)" NNFW_INSTALL_PREFIX=$(INSTALL_PATH) ./nnfw configure \
+ -DCMAKE_BUILD_TYPE=$(BUILD_TYPE_LC) -DTARGET_ARCH=$(TARGET_ARCH_LC) \
+ -DHOST_OS=$(HOST_OS) \
+ -DTARGET_OS=$(TARGET_OS) \
+ -DTFLITE_MAJOR_VER=$(TFLITE_MAJOR_VER) \
+ -DTFLITE_MINOR_VER=$(TFLITE_MINOR_VER) \
+ $(OPTIONS)
+ touch $(TIMESTAMP_CONFIGURE)
+
+build_internal: $(BUILD_ROOT)
+ cd $(BUILD_ROOT) && make -j $(NPROCS) all
+ rm -rf $(BUILD_ALIAS)
+ ln -s $(BUILD_FOLDER) $(BUILD_ALIAS)
+ touch $(TIMESTAMP_BUILD)
+
+install_internal:
+ cd $(BUILD_ROOT) && make install
+ rm -rf $(INSTALL_ALIAS)
+ ln -s $(INSTALL_FOLDER) $(INSTALL_ALIAS)
+ touch $(TIMESTAMP_INSTALL)
+
+build_test_suite: install_internal
+ @echo "packaging test suite"
+ @rm -rf $(INSTALL_ROOT)/test-suite.tar.gz
+ @tar -zcf test-suite.tar.gz tests/scripts tests/framework infra $(INSTALL_ALIAS) --dereference
+ @mv test-suite.tar.gz $(INSTALL_ROOT)/.
+
+build_coverage_suite: install_internal
+ @echo "packaging test-coverage suite"
+ @rm -rf $(INSTALL_ROOT)/coverage-suite.tar.gz
+ @find Product -name "*.gcno" > include_lists.txt
+ @pwd | grep -o '/' | wc -l > tests/scripts/build_path_depth.txt
+ @tar -zcf coverage-suite.tar.gz tests/scripts tests/framework tools/lcov-to-cobertura-xml \
+ nnas nnfw infra runtimes $(INSTALL_ALIAS) --dereference -T include_lists.txt
+ @rm -rf include_lists.txt tests/scripts/build_path_depth.txt
+ @mv coverage-suite.tar.gz $(INSTALL_ROOT)/.
+
+###
+### Timestamps
+###
+$(WORKDIR):
+ mkdir -p $@
+
+$(BUILD_ROOT): $(WORKDIR) configure_internal
+
+$(TIMESTAMP_CONFIGURE): configure_internal
+
+$(TIMESTAMP_BUILD): $(TIMESTAMP_CONFIGURE) build_internal
+
+$(TIMESTAMP_INSTALL): $(TIMESTAMP_BUILD) install_internal
diff --git a/README.md b/README.md
index 952c09883..53a4b2805 100644
--- a/README.md
+++ b/README.md
@@ -3,18 +3,32 @@
A high-performance, on-device neural network inference framework
## Goal
+
This project _nnfw_ aims at providing a high-performance, on-device neural network (NN) inference
framework that performs inference of a given NN model on processors, such as CPU, GPU, or NPU, in
-the target platform, such as Tizen and Smart Machine Platform (SMP).
+the target platform, such as the Linux kernel based OS including Tizen.
## Project Documents
-- [Roadmap](docs/roadmap.md)
-- [SW Requirement Specification](docs/project/2018_requirement_specification.md)
-- [SW High Level Design](docs/project/2018_high_level_design.md)
+
+- [Roadmap](docs/nnfw/roadmap.md)
+- [SW Requirement Specification](docs/nnfw/project/2019_requirement_specification.md)
+- [SW High Level Design](docs/nnfw/project/2018_high_level_design.md)
## Getting started
-- For the contribution, please refer to our [contribution guide](docs/HowToContribute.md).
-- You can also find how-to documents [HERE](docs/howto.md).
+
+- For the contribution, please refer to our [contribution guide](docs/nnfw/HowToContribute.md).
+- You can also find how-to documents [HERE](docs/nnfw/howto.md).
+
+## Maintainers
+
+- Sung-Jae Lee <<sj925.lee@samsung.com>>
+- Chunseok Lee <<chunseok.lee@samsung.com>>
+
+## Committers
+
+- Hyeongseok Oh <<hseok82.oh@samsung.com>>
+- Hanjoung Lee <<hanjoung.lee@samsung.com>>
+- Sharan Allur <<sharan.allur@samsung.com>>
## Feature Request (NEW)
@@ -22,16 +36,56 @@ You can suggest development of nnfw's features that are not yet available.
The functions requested so far can be checked in the [popular feature request](https://github.sec.samsung.net/STAR/nnfw/issues?utf8=%E2%9C%93&q=is%3Aopen+is%3Aissue+label%3AFEATURE_REQUEST+sort%3Areactions-%2B1-desc) list.
-- If the feature you want is on the list, :+1: to the body of the issue. The feature with the most
+- If the feature you want is on the list, :+1: to the body of the issue. The feature with the most
:+1: is placed at the top of the list. When adding new features, we will prioritize them with this reference.
Of course, it is good to add an additional comment which describes your request in detail.
- For features not listed, [create a new issue](https://github.sec.samsung.net/STAR/nnfw/issues/new).
Sooner or later, the maintainer will tag the `FEATURE_REQUEST` label and appear on the list.
-We expect most current feature requests to be focused on operator kernel implementations.
-It is good to make a request, but it is better if you contribute by yourself. See the following guide,
-[How to Implement Operator Kernel](docs/HowToImplementOperatorKernel.md), for help.
+We expect one of the most frequent feature requests would be the operator kernel implementation.
+It is good to make a request, but it is better if you contribute by yourself. See the following guide,
+[How to Implement Operator Kernel](docs/nnfw/HowToImplementOperatorKernel.md), for help.
We are looking forward to your participation.
Thank you in advance!
+
+# nncc
+Re-targetable neural network (NN) model compilation framework
+
+## Goals
+nncc, which stands for neural network compiler collection, aims to provide a general framework for
+compiling a given NN model to an artifact that runs on various target devices such as CPU, GPU, or
+NPU.
+
+## Maintainers
+
+- Sung-Jae Lee <<sj925.lee@samsung.com>>
+- Jonghyun Park <<jh1302.park@samsung.com>>
+
+## Committers
+
+- Saehie Park <<saehie.park@samsung.com>>
+- Hyeongseok Oh <<hseok82.oh@samsung.com>>
+- Efimov Alexander <<a.efimov@samsung.com>>
+
+----
+
+## Notice
+
+### 22/07/2019
+
+Congratulations! On July 22nd, 2019, _nnfw_ repo and
+[_nncc_](https://github.sec.samsung.net/STAR/nncc) repo are finally integrated into single one. Now
+all activities related to the development of _nnas(Neural Network Acceleration Solution)_ will
+proceed in this integrated _nnfw_ repo. The old _nncc_ repo will only be maintained for follow up on
+remaining issues and for preserving development history. The following notice will remain in place
+until the update of documents in integrated repo is complete.
+
+### 02/05/2019
+
+~~We are currently working on [_nncc_](https://github.sec.samsung.net/STAR/nncc) as a sibling project.
+In our plan, the two projects will soon be integrated into one, and focusing on their roles as
+front-end(_nncc_) and back-end(_nnfw_), respectively. It will accompany the physical combination of
+the github repo.~~ You can find the latest roadmap of the integrated project
+[here](https://github.sec.samsung.net/orgs/STAR/projects/1).
diff --git a/cmake/ApplyCompileFlags.cmake b/cmake/ApplyCompileFlags.cmake
deleted file mode 100644
index e2124bb1f..000000000
--- a/cmake/ApplyCompileFlags.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-# add common flags
-foreach(FLAG ${FLAGS_COMMON})
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAG}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}")
-endforeach()
-
-# add c flags
-foreach(FLAG ${FLAGS_CONLY})
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAG}")
-endforeach()
-
-# add cxx flags
-foreach(FLAG ${FLAGS_CXXONLY})
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}")
-endforeach()
diff --git a/cmake/CfgOptionFlags.cmake b/cmake/CfgOptionFlags.cmake
deleted file mode 100644
index f14033592..000000000
--- a/cmake/CfgOptionFlags.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-# Configuration flags
-#
-option(BUILD_PURE_ARM_COMPUTE "Build pure_arm_compute runtime" ON)
-option(BUILD_NEURUN "Build neurun" OFF) #if implementation is done, it would replace nn runtime.
-option(BUILD_RUNTIME_NNAPI_TEST "Build Runtime NN API Generated Test" ON)
-option(BUILD_TFLITE_BENCHMARK_MODEL "Build tflite benchmark model" OFF)
-option(BUILD_NNAPI_QUICKCHECK "Build NN API Quickcheck tools" OFF)
-#
-# Configuration flags for contrib
-#
-option(BUILD_BENCHMARK_ACL "Build ARM Compute Library Benchmarks" OFF)
-option(BUILD_DETECTION_APP "Build detection example app" OFF)
-option(BUILD_LABS "Build lab projects" OFF)
-option(BUILD_TFLITE_TEST "Build tensorflow lite test" OFF)
-option(BUILD_TFLITE_CLASSIFY_APP "Build tflite_classify app" OFF)
-
-if("${TARGET_ARCH}" STREQUAL "armv7l" AND NOT "${TARGET_OS}" STREQUAL "tizen")
- set(BUILD_PURE_ARM_COMPUTE ON)
-endif()
-
-# On x86, disable pureacl/new runtine build which depends on arm compute library
-if("${TARGET_ARCH}" STREQUAL "x86_64")
- set(BUILD_PURE_ARM_COMPUTE OFF)
- set(BUILD_NEURUN OFF)
-endif()
diff --git a/cmake/config/config_aarch64-linux.cmake b/cmake/config/config_aarch64-linux.cmake
deleted file mode 100644
index 4879c5817..000000000
--- a/cmake/config/config_aarch64-linux.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# config for aarch64-linux
-#
-include(CMakeForceCompiler)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_PROCESSOR aarch64)
-
-set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
-set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)
-
-# where is the target environment
-set(ROOTFS_ARM64 $ENV{ROOTFS_ARM64})
-if(NOT EXISTS "${ROOTFS_ARM64}/lib/aarch64-linux-gnu")
- set(ROOTFS_ARM64 "${CMAKE_CURRENT_LIST_DIR}/../../tools/cross/rootfs/arm64")
-endif()
-
-set(CMAKE_SYSROOT ${ROOTFS_ARM64})
-set(CMAKE_FIND_ROOT_PATH ${ROOTFS_ARM64})
-set(CMAKE_SHARED_LINKER_FLAGS
- "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_ARM64}"
- CACHE INTERNAL "" FORCE)
-set(CMAKE_EXE_LINKER_FLAGS
- "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_ARM64}"
- CACHE INTERNAL "" FORCE)
-
-# search for programs in the build host directories
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-
-# for libraries and headers in the target directories
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
diff --git a/cmake/config/config_aarch64-tizen.cmake b/cmake/config/config_aarch64-tizen.cmake
deleted file mode 100644
index 422174712..000000000
--- a/cmake/config/config_aarch64-tizen.cmake
+++ /dev/null
@@ -1,55 +0,0 @@
-#
-# config for aarch64-linux
-#
-include(CMakeForceCompiler)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_PROCESSOR aarch64)
-
-set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc-5)
-set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++-5)
-
-set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/6.2.1")
-
-# where is the target environment
-set(ROOTFS_ARM64 $ENV{ROOTFS_ARM64})
-if(NOT EXISTS "${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
- set(ROOTFS_ARM64 "${CMAKE_SOURCE_DIR}/tools/cross/rootfs/arm64")
-endif()
-
-set(CMAKE_SYSROOT ${ROOTFS_ARM64})
-set(CMAKE_FIND_ROOT_PATH ${ROOTFS_ARM64})
-set(CMAKE_SHARED_LINKER_FLAGS
- "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_ARM64}"
- CACHE INTERNAL "" FORCE)
-set(CMAKE_EXE_LINKER_FLAGS
- "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_ARM64}"
- CACHE INTERNAL "" FORCE)
-
-# search for programs in the build host directories
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-
-# for libraries and headers in the target directories
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
-
-add_compile_options(--sysroot=${ROOTFS_ARM64})
-
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_ARM64}")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_ARM64}")
-
-include_directories(SYSTEM ${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
-include_directories(SYSTEM ${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
-add_compile_options(-Wno-deprecated-declarations) # compile-time option
-add_compile_options(-D__extern_always_inline=inline) # compile-time option
-
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -B${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_ARM64}/lib64")
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_ARM64}/usr/lib64")
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -B${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_ARM64}/lib64")
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_ARM64}/usr/lib64")
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_ARM64}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
diff --git a/cmake/config/config_arm64-android.cmake b/cmake/config/config_arm64-android.cmake
deleted file mode 100644
index 1c096b583..000000000
--- a/cmake/config/config_arm64-android.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-set(ANDROID_STANDALONE $ENV{ROOTFS_ARM64})
-set(CROSS_NDK_TOOLCHAIN ${ANDROID_STANDALONE}/bin)
-set(CROSS_ROOTFS ${ANDROID_STANDALONE}/sysroot)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_VERSION 1)
-set(CMAKE_SYSTEM_PROCESSOR aarch64)
-
-## Specify the toolchain
-set(TOOLCHAIN "aarch64-linux-android")
-set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
-set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
-
-find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
-find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
-find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
-
-add_compile_options(--sysroot=${CROSS_ROOTFS})
-add_compile_options(-fPIE)
-
-## Needed for Android or bionic specific conditionals
-#add_compile_options(-D__ANDROID__)
-#add_compile_options(-D__BIONIC__)
-
-## NOTE Not sure this is safe. This may cause side effects.
-## Without this, it cannot find `std::stol`, `std::stoi` and so on, with android toolchain
-add_compile_options(-D_GLIBCXX_USE_C99=1)
-
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-
-set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
diff --git a/cmake/config/config_armv7l-linux.cmake b/cmake/config/config_armv7l-linux.cmake
deleted file mode 100644
index e092596a2..000000000
--- a/cmake/config/config_armv7l-linux.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# config for arm-linux
-#
-include(CMakeForceCompiler)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_PROCESSOR armv7l)
-
-set(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)
-set(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
-
-# where is the target environment
-set(ROOTFS_ARM $ENV{ROOTFS_ARM})
-if(NOT EXISTS "${ROOTFS_ARM}/lib/arm-linux-gnueabihf")
- set(ROOTFS_ARM "${CMAKE_CURRENT_LIST_DIR}/../../tools/cross/rootfs/arm")
-endif()
-
-set(CMAKE_SYSROOT ${ROOTFS_ARM})
-set(CMAKE_FIND_ROOT_PATH ${ROOTFS_ARM})
-set(CMAKE_SHARED_LINKER_FLAGS
- "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_ARM}"
- CACHE INTERNAL "" FORCE)
-set(CMAKE_EXE_LINKER_FLAGS
- "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_ARM}"
- CACHE INTERNAL "" FORCE)
-
-# search for programs in the build host directories
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-
-# for libraries and headers in the target directories
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
diff --git a/cmake/config/config_armv7l-tizen.cmake b/cmake/config/config_armv7l-tizen.cmake
deleted file mode 100644
index 7971d9156..000000000
--- a/cmake/config/config_armv7l-tizen.cmake
+++ /dev/null
@@ -1,60 +0,0 @@
-#
-# config for arm-linux
-#
-include(CMakeForceCompiler)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_PROCESSOR armv7l)
-
-set(CMAKE_C_COMPILER arm-linux-gnueabi-gcc-5)
-set(CMAKE_CXX_COMPILER arm-linux-gnueabi-g++-5)
-
-set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
-
-# where is the target environment
-set(ROOTFS_ARM $ENV{ROOTFS_ARM})
-if(NOT EXISTS "${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
- set(ROOTFS_ARM "${CMAKE_SOURCE_DIR}/tools/cross/rootfs/armel")
-endif()
-
-set(CMAKE_SYSROOT ${ROOTFS_ARM})
-set(CMAKE_FIND_ROOT_PATH ${ROOTFS_ARM})
-set(CMAKE_SHARED_LINKER_FLAGS
- "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_ARM}"
- CACHE INTERNAL "" FORCE)
-set(CMAKE_EXE_LINKER_FLAGS
- "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_ARM}"
- CACHE INTERNAL "" FORCE)
-
-# search for programs in the build host directories
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-
-# for libraries and headers in the target directories
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
-
-
-
-add_compile_options(-mthumb)
-add_compile_options(-mfpu=neon-vfpv4)
-add_compile_options(-mfloat-abi=softfp)
-add_compile_options(--sysroot=${ROOTFS_ARM})
-
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_ARM}")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_ARM}")
-
-include_directories(SYSTEM ${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
-include_directories(SYSTEM ${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
-add_compile_options(-Wno-deprecated-declarations) # compile-time option
-add_compile_options(-D__extern_always_inline=inline) # compile-time option
-
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -B${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_ARM}/lib")
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_ARM}/usr/lib")
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -B${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_ARM}/lib")
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_ARM}/usr/lib")
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_ARM}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
diff --git a/cmake/config/config_x86_64-linux.cmake b/cmake/config/config_x86_64-linux.cmake
deleted file mode 100644
index 3dee8766c..000000000
--- a/cmake/config/config_x86_64-linux.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-#
-# config for x86_64-linux
-#
-include(CMakeForceCompiler)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_PROCESSOR x86_64)
diff --git a/cmake/modules/ExternalSourceTools.cmake b/cmake/modules/ExternalSourceTools.cmake
deleted file mode 100644
index 76ebb5500..000000000
--- a/cmake/modules/ExternalSourceTools.cmake
+++ /dev/null
@@ -1,58 +0,0 @@
-function(ExternalSource_Download PREFIX URL)
- get_filename_component(FILENAME ${URL} NAME)
-
- set(CACHE_DIR "${CMAKE_SOURCE_DIR}/externals")
- set(OUT_DIR "${CACHE_DIR}/${PREFIX}")
- set(TMP_DIR "${CACHE_DIR}/${PREFIX}-tmp")
-
- set(DOWNLOAD_PATH "${CACHE_DIR}/${PREFIX}-${FILENAME}")
- set(STAMP_PATH "${CACHE_DIR}/${PREFIX}.stamp")
-
- if(NOT EXISTS "${CACHE_DIR}")
- file(MAKE_DIRECTORY "${CACHE_DIR}")
- endif(NOT EXISTS "${CACHE_DIR}")
-
- # TODO Check MD5 for correctness
- set(MATCH_URL FALSE)
- if(EXISTS "${STAMP_PATH}")
- file(READ "${STAMP_PATH}" SAVED_URL)
- if("${SAVED_URL}" STREQUAL "${URL}")
- set(MATCH_URL TRUE)
- endif("${SAVED_URL}" STREQUAL "${URL}")
- endif(EXISTS "${STAMP_PATH}")
-
- if(NOT EXISTS "${STAMP_PATH}" OR NOT EXISTS "${OUT_DIR}" OR NOT MATCH_URL)
- file(REMOVE_RECURSE "${OUT_DIR}")
- file(REMOVE_RECURSE "${TMP_DIR}")
-
- file(MAKE_DIRECTORY "${TMP_DIR}")
-
- message("-- Download ${PREFIX} from ${URL}")
- file(DOWNLOAD ${URL} "${DOWNLOAD_PATH}")
- message("-- Download ${PREFIX} from ${URL} - done")
-
- message("-- Extract ${PREFIX}")
- execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz "${DOWNLOAD_PATH}"
- WORKING_DIRECTORY "${TMP_DIR}")
- file(REMOVE "${DOWNLOAD_PATH}")
- message("-- Extract ${PREFIX} - done")
-
- message("-- Cleanup ${PREFIX}")
- file(GLOB contents "${TMP_DIR}/*")
- list(LENGTH contents n)
- if(NOT n EQUAL 1 OR NOT IS_DIRECTORY "${contents}")
- set(contents "${TMP_DIR}")
- endif()
-
- get_filename_component(contents ${contents} ABSOLUTE)
-
- file(RENAME ${contents} "${OUT_DIR}")
- file(REMOVE_RECURSE "${TMP_DIR}")
- file(WRITE "${STAMP_PATH}" "${URL}")
- message("-- Cleanup ${PREFIX} - done")
- endif()
-
- set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
-endfunction(ExternalSource_Download)
-
-set(ExternalSourceTools_FOUND TRUE)
diff --git a/cmake/option/identify_platform.cmake b/cmake/option/identify_platform.cmake
deleted file mode 100644
index 3f62b524f..000000000
--- a/cmake/option/identify_platform.cmake
+++ /dev/null
@@ -1,48 +0,0 @@
-# set host platform to build
-if(NOT HOST_ARCH OR "${HOST_ARCH}" STREQUAL "")
- set(HOST_ARCH ${CMAKE_HOST_SYSTEM_PROCESSOR})
-endif()
-
-# set target platform to run
-if(NOT TARGET_ARCH OR "${TARGET_ARCH}" STREQUAL "")
- set(TARGET_ARCH "${HOST_ARCH}")
-endif()
-
-if(NOT DEFINED TARGET_OS)
- set(TARGET_OS "${HOST_OS}")
-endif()
-
-if("${HOST_ARCH}" STREQUAL "x86_64")
- set(HOST_ARCH_BASE ${HOST_ARCH})
-elseif("${HOST_ARCH}" STREQUAL "armv7l")
- set(HOST_ARCH_BASE "arm")
-elseif("${HOST_ARCH}" STREQUAL "arm64")
- set(HOST_ARCH_BASE "arm64")
-elseif("${HOST_ARCH}" STREQUAL "aarch64")
- set(HOST_ARCH_BASE "aarch64")
-else()
- message(FATAL_ERROR "'${HOST_ARCH}' architecture is not supported")
-endif()
-
-if("${TARGET_ARCH}" STREQUAL "x86_64")
- set(TARGET_ARCH_BASE ${TARGET_ARCH})
-elseif("${TARGET_ARCH}" STREQUAL "armv7l")
- set(TARGET_ARCH_BASE "arm")
-elseif("${TARGET_ARCH}" STREQUAL "arm64")
- set(TARGET_ARCH_BASE "arm64")
-elseif("${TARGET_ARCH}" STREQUAL "aarch64")
- set(TARGET_ARCH_BASE "aarch64")
-else()
- message(FATAL_ERROR "'${TARGET_ARCH}' architecture is not supported")
-endif()
-
-# Determine native or cross build
-if("${HOST_ARCH}" STREQUAL "${TARGET_ARCH}")
- set(BUILD_IS_NATIVE True)
-else()
- set(BUILD_IS_NATIVE False)
-endif()
-
-# host & target platform name
-set(HOST_PLATFORM "${HOST_ARCH}-${HOST_OS}")
-set(TARGET_PLATFORM "${TARGET_ARCH}-${TARGET_OS}")
diff --git a/cmake/option/option_aarch64-linux.cmake b/cmake/option/option_aarch64-linux.cmake
deleted file mode 100644
index fa5c9205c..000000000
--- a/cmake/option/option_aarch64-linux.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-#
-# aarch64 linux compile options
-#
-
-message(STATUS "Building for AARCH64 Linux")
-
-# include linux common
-include("cmake/option/option_linux.cmake")
-
-if(NOT EXISTS "${ROOTFS_ARM64}/lib/aarch64-linux-gnu")
- message(FATAL_ERROR "Please prepare RootFS for ARM64")
-endif()
-
-# addition for aarch64-linux
-set(FLAGS_COMMON ${FLAGS_COMMON}
- )
diff --git a/cmake/option/option_aarch64-tizen.cmake b/cmake/option/option_aarch64-tizen.cmake
deleted file mode 100644
index 5e37f35a9..000000000
--- a/cmake/option/option_aarch64-tizen.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-#
-# aarch64 tizen compile options
-#
-
-message(STATUS "Building for AARCH64 Tizen")
-
-# TODO : add and use option_tizen if something uncommon comes up
-# include linux common
-include("cmake/option/option_linux.cmake")
-
-# TODO : support rootfs setting for tizen cross-build
-
-# addition for aarch64-tizen
-set(FLAGS_COMMON ${FLAGS_COMMON}
- )
diff --git a/cmake/option/option_arm64-android.cmake b/cmake/option/option_arm64-android.cmake
deleted file mode 100644
index 3ce2c815b..000000000
--- a/cmake/option/option_arm64-android.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-include("cmake/option/option_linux.cmake")
-
-# On Android, pthread is contained in bionic(libc)
-set(LIB_PTHREAD "")
-
-# SIMD for arm64
-set(FLAGS_COMMON ${FLAGS_COMMON}
- "-ftree-vectorize"
- )
diff --git a/cmake/option/option_armv7l-linux.cmake b/cmake/option/option_armv7l-linux.cmake
deleted file mode 100644
index b295b4a82..000000000
--- a/cmake/option/option_armv7l-linux.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# armv7l linux compile options
-#
-
-message(STATUS "Building for ARMv7l Linux")
-
-# include linux common
-include("cmake/option/option_linux.cmake")
-
-if(NOT EXISTS "${ROOTFS_ARM}/lib/arm-linux-gnueabihf")
- message(FATAL_ERROR "Please prepare RootFS for ARM")
-endif()
-
-# addition for arm-linux
-set(FLAGS_COMMON ${FLAGS_COMMON}
- "-mcpu=cortex-a7"
- "-mfloat-abi=hard"
- "-mfpu=neon-vfpv4"
- "-funsafe-math-optimizations"
- "-ftree-vectorize"
- "-fPIC"
- )
-
-# remove warning from arm cl
-# https://github.com/ARM-software/ComputeLibrary/issues/330
-set(GCC_VERSION_DISABLE_WARNING 6.0)
-if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER GCC_VERSION_DISABLE_WARNING)
- message(STATUS "GCC version higher than ${GCC_VERSION_DISABLE_WARNING}")
- set(FLAGS_CXXONLY ${FLAGS_CXXONLY}
- "-Wno-ignored-attributes"
- )
-endif()
diff --git a/cmake/option/option_armv7l-tizen.cmake b/cmake/option/option_armv7l-tizen.cmake
deleted file mode 100644
index 6040eb6b8..000000000
--- a/cmake/option/option_armv7l-tizen.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# armv7l tizen compile options
-#
-
-message(STATUS "Building for ARMv7l(softfp) Tizen")
-
-# TODO : add and use option_tizen if something uncommon comes up
-# include linux common
-include("cmake/option/option_linux.cmake")
-
-# TODO : support rootfs setting for tizen cross-build
-
-# addition for arm-linux
-set(FLAGS_COMMON ${FLAGS_COMMON}
- "-mcpu=cortex-a8"
- "-mfloat-abi=softfp"
- "-mfpu=neon-vfpv4"
- "-funsafe-math-optimizations"
- "-ftree-vectorize"
- )
diff --git a/cmake/option/option_linux.cmake b/cmake/option/option_linux.cmake
deleted file mode 100644
index 8cae4e9a1..000000000
--- a/cmake/option/option_linux.cmake
+++ /dev/null
@@ -1,33 +0,0 @@
-#
-# linux common compile options
-#
-
-# flags for build type: debug, release
-set(CMAKE_C_FLAGS_DEBUG "-O0 -g -DDEBUG")
-set(CMAKE_CXX_FLAGS_DEBUG "-O0 -g -DDEBUG")
-set(CMAKE_C_FLAGS_RELEASE "-O2 -DNDEBUG")
-set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
-
-# test-coverage build flag
-if("${COVERAGE_BUILD}" STREQUAL "1")
- set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE ON)
- set(FLAGS_COMMON "${FLAGS_COMMON} -fprofile-arcs -ftest-coverage")
- set(CMAKE_EXE_LINKER_FLAGS
- "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage")
-endif()
-
-#
-# linux common variable and settings
-#
-
-# lib pthread as a variable (pthread must be disabled on android)
-set(LIB_PTHREAD pthread)
-
-# nnfw common path
-set(NNFW_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/include)
-set(NNFW_EXTERNALS_DIR ${CMAKE_SOURCE_DIR}/externals)
-
-# External sources to build tflite
-# If already downloaded files are in tensorflow/tensorflow/contrib/lite/downloads,
-# set TFLITE_DEPEND_DIR to ${NNFW_EXTERNALS_DIR}/tensorflow/tensorflow/contrib/lite/downloads
-set(TFLITE_DEPEND_DIR ${NNFW_EXTERNALS_DIR})
diff --git a/cmake/option/option_x86_64-linux.cmake b/cmake/option/option_x86_64-linux.cmake
deleted file mode 100644
index 6f4d0ebfc..000000000
--- a/cmake/option/option_x86_64-linux.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-#
-# x86_64 linux compile options
-#
-message(STATUS "Building for x86-64 Linux")
-
-# include linux common
-include("cmake/option/option_linux.cmake")
-
-# SIMD for x86
-set(FLAGS_COMMON ${FLAGS_COMMON}
- "-msse4"
- )
diff --git a/cmake/packages/ARMComputeConfig.cmake b/cmake/packages/ARMComputeConfig.cmake
deleted file mode 100644
index 656163a8e..000000000
--- a/cmake/packages/ARMComputeConfig.cmake
+++ /dev/null
@@ -1,81 +0,0 @@
-function(_ARMCompute_Import)
- nnfw_find_package(ARMComputeSource QUIET)
-
- if(NOT ARMComputeSource_FOUND)
- set(ARMCompute_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT ARMComputeSource_FOUND)
-
- include(FindPackageHandleStandardArgs)
-
- list(APPEND ARMCompute_INCLUDE_SEARCH_PATHS /usr/include)
-
- list(APPEND ARMCompute_LIB_SEARCH_PATHS /usr/lib)
-
- find_path(INCLUDE_DIR NAMES arm_compute/core/ITensor.h PATHS ${ARMCompute_INCLUDE_SEARCH_PATHS})
-
- find_library(CORE_LIBRARY NAMES arm_compute_core PATHS ${ARMCompute_LIB_SEARCH_PATHS})
- find_library(RUNTIME_LIBRARY NAMES arm_compute PATHS ${ARMCompute_LIB_SEARCH_PATHS})
- find_library(GRAPH_LIBRARY NAMES arm_compute_graph PATHS ${ARMCompute_LIB_SEARCH_PATHS})
-
- if(NOT INCLUDE_DIR)
- set(INCLUDE_DIR ${CMAKE_SOURCE_DIR}/externals/acl ${CMAKE_SOURCE_DIR}/externals/acl/include)
- endif(NOT INCLUDE_DIR)
-
- # NOTE '${CMAKE_INSTALL_PREFIX}/lib' should be searched as CI server places
- # pre-built ARM compute libraries on this directory
- if(NOT CORE_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_core.so)
- set(CORE_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_core.so)
- endif()
-
- if(NOT CORE_LIBRARY)
- return()
- set(ARMCompute_FOUND FALSE PARENT_SCOPE)
- endif()
-
- if(NOT RUNTIME_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute.so)
- set(RUNTIME_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute.so)
- endif()
-
- if(NOT RUNTIME_LIBRARY)
- return()
- set(ARMCompute_FOUND FALSE PARENT_SCOPE)
- endif()
-
- if(NOT GRAPH_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_graph.so)
- set(GRAPH_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_graph.so)
- endif()
-
- if(NOT GRAPH_LIBRARY)
- return()
- set(ARMCompute_FOUND FALSE PARENT_SCOPE)
- endif()
-
- if(NOT TARGET arm_compute_core)
- add_library(arm_compute_core INTERFACE)
- target_include_directories(arm_compute_core INTERFACE ${INCLUDE_DIR})
- target_link_libraries(arm_compute_core INTERFACE dl ${LIB_PTHREAD})
- target_link_libraries(arm_compute_core INTERFACE ${CORE_LIBRARY})
- if (${TARGET_OS} STREQUAL "tizen")
- target_link_libraries(arm_compute_core INTERFACE OpenCL)
- endif()
- endif(NOT TARGET arm_compute_core)
-
- if(NOT TARGET arm_compute)
- add_library(arm_compute INTERFACE)
- target_include_directories(arm_compute INTERFACE ${INCLUDE_DIR})
- target_link_libraries(arm_compute INTERFACE ${RUNTIME_LIBRARY})
- target_link_libraries(arm_compute INTERFACE arm_compute_core)
- endif(NOT TARGET arm_compute)
-
- if(NOT TARGET arm_compute_graph)
- add_library(arm_compute_graph INTERFACE)
- target_include_directories(arm_compute_graph INTERFACE ${INCLUDE_DIR})
- target_link_libraries(arm_compute_graph INTERFACE ${GRAPH_LIBRARY})
- target_link_libraries(arm_compute_graph INTERFACE arm_compute)
- endif(NOT TARGET arm_compute_graph)
-
- set(ARMCompute_FOUND TRUE PARENT_SCOPE)
-endfunction(_ARMCompute_Import)
-
-_ARMCompute_Import()
diff --git a/cmake/packages/ARMComputeSourceConfig.cmake b/cmake/packages/ARMComputeSourceConfig.cmake
deleted file mode 100644
index c1a31d047..000000000
--- a/cmake/packages/ARMComputeSourceConfig.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-function(_ARMComputeSource_import)
- if(NOT DOWNLOAD_ARMCOMPUTE)
- set(ARMComputeSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_ARMCOMPUTE)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(ARMCOMPUTE_URL ${EXTERNAL_DOWNLOAD_SERVER}/ARM-software/ComputeLibrary/archive/v18.11.tar.gz)
- ExternalSource_Download("acl" ${ARMCOMPUTE_URL})
-
- set(ARMComputeSource_DIR ${acl_SOURCE_DIR} PARENT_SCOPE)
- set(ARMComputeSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_ARMComputeSource_import)
-
-_ARMComputeSource_import()
diff --git a/cmake/packages/AbslSourceConfig.cmake b/cmake/packages/AbslSourceConfig.cmake
deleted file mode 100644
index 9075b7397..000000000
--- a/cmake/packages/AbslSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_AbslSource_import)
- if(NOT DOWNLOAD_ABSL)
- set(AbslSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_ABSL)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE The following URL comes from TensorFlow 1.12
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(ABSL_URL ${EXTERNAL_DOWNLOAD_SERVER}/abseil/abseil-cpp/archive/48cd2c3f351ff188bc85684b84a91b6e6d17d896.tar.gz)
- ExternalSource_Download("absl" ${ABSL_URL})
-
- set(AbslSource_DIR ${absl_SOURCE_DIR} PARENT_SCOPE)
- set(AbslSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_AbslSource_import)
-
-_AbslSource_import()
diff --git a/cmake/packages/EigenSourceConfig.cmake b/cmake/packages/EigenSourceConfig.cmake
deleted file mode 100644
index dd94e069e..000000000
--- a/cmake/packages/EigenSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_EigenSource_import)
- if(NOT DOWNLOAD_EIGEN)
- set(EigenSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_EIGEN)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads Eign from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://bitbucket.org")
- set(EIGEN_URL ${EXTERNAL_DOWNLOAD_SERVER}/eigen/eigen/get/fd6845384b86.tar.gz)
- ExternalSource_Download("eigen" ${EIGEN_URL})
-
- set(EigenSource_DIR ${eigen_SOURCE_DIR} PARENT_SCOPE)
- set(EigenSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_EigenSource_import)
-
-_EigenSource_import()
diff --git a/cmake/packages/FarmhashSourceConfig.cmake b/cmake/packages/FarmhashSourceConfig.cmake
deleted file mode 100644
index 802367968..000000000
--- a/cmake/packages/FarmhashSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_FarmhashSource_import)
- if(NOT DOWNLOAD_FARMHASH)
- set(FarmhashSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_FARMHASH)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads farmhash from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(FARMHASH_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz)
- ExternalSource_Download("farmhash" ${FARMHASH_URL})
-
- set(FarmhashSource_DIR ${farmhash_SOURCE_DIR} PARENT_SCOPE)
- set(FarmhashSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_FarmhashSource_import)
-
-_FarmhashSource_import()
diff --git a/cmake/packages/FlatBuffersSourceConfig.cmake b/cmake/packages/FlatBuffersSourceConfig.cmake
deleted file mode 100644
index 5f142bff7..000000000
--- a/cmake/packages/FlatBuffersSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_FlatBuffersSource_import)
- if(NOT DOWNLOAD_FLATBUFFERS)
- set(FlatBuffersSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_FLATBUFFERS)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads FlatBuffers from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(FLATBUFFERS_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/flatbuffers/archive/1f5eae5d6a135ff6811724f6c57f911d1f46bb15.tar.gz)
- ExternalSource_Download("flatbuffers" ${FLATBUFFERS_URL})
-
- set(FlatBuffersSource_DIR ${flatbuffers_SOURCE_DIR} PARENT_SCOPE)
- set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_FlatBuffersSource_import)
-
-_FlatBuffersSource_import()
diff --git a/cmake/packages/GEMMLowpSourceConfig.cmake b/cmake/packages/GEMMLowpSourceConfig.cmake
deleted file mode 100644
index 613ff29b5..000000000
--- a/cmake/packages/GEMMLowpSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_GEMMLowpSource_import)
- if(NOT DOWNLOAD_GEMMLOWP)
- set(GEMMLowpSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_GEMMLOWP)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 uses the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(GEMMLOWP_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/gemmlowp/archive/38ebac7b059e84692f53e5938f97a9943c120d98.tar.gz)
- ExternalSource_Download("gemmlowp" ${GEMMLOWP_URL})
-
- set(GEMMLowpSource_DIR ${gemmlowp_SOURCE_DIR} PARENT_SCOPE)
- set(GEMMLowpSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_GEMMLowpSource_import)
-
-_GEMMLowpSource_import()
diff --git a/cmake/packages/GTestConfig.cmake b/cmake/packages/GTestConfig.cmake
deleted file mode 100644
index 62d2e8967..000000000
--- a/cmake/packages/GTestConfig.cmake
+++ /dev/null
@@ -1,49 +0,0 @@
-if(OBS_BUILD)
- enable_testing()
- find_package(GTest REQUIRED)
- include_directories(${GTEST_INCLUDE_DIR})
- set(GTest_FOUND TRUE)
- return()
-endif(OBS_BUILD)
-
-if(${BUILD_GTEST})
- nnfw_include(ExternalSourceTools)
- nnfw_include(ExternalProjectTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(GTEST_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/googletest/archive/release-1.8.0.tar.gz)
- ExternalSource_Download("gtest" ${GTEST_URL})
-
- # gtest_SOURCE_DIR is used in gtest subdirectorty's cmake
- set(sourcedir_gtest ${gtest_SOURCE_DIR})
- unset(gtest_SOURCE_DIR)
-
- if(NOT TARGET gtest_main)
- add_extdirectory(${sourcedir_gtest} gtest EXCLUDE_FROM_ALL)
- endif(NOT TARGET gtest_main)
-
- set(GTest_FOUND TRUE)
- return()
-endif(${BUILD_GTEST})
-
-### Find and use pre-installed Google Test
-find_package(GTest)
-find_package(Threads)
-
-if(${GTEST_FOUND} AND TARGET Threads::Threads)
- if(NOT TARGET gtest)
- add_library(gtest INTERFACE)
- target_include_directories(gtest INTERFACE ${GTEST_INCLUDE_DIRS})
- target_link_libraries(gtest INTERFACE ${GTEST_LIBRARIES} Threads::Threads)
- endif(NOT TARGET gtest)
-
- if(NOT TARGET gtest_main)
- add_library(gtest_main INTERFACE)
- target_include_directories(gtest_main INTERFACE ${GTEST_INCLUDE_DIRS})
- target_link_libraries(gtest_main INTERFACE gtest)
- target_link_libraries(gtest_main INTERFACE ${GTEST_MAIN_LIBRARIES})
- endif(NOT TARGET gtest_main)
-
- set(GTest_FOUND TRUE)
-endif(${GTEST_FOUND} AND TARGET Threads::Threads)
diff --git a/cmake/packages/NEON2SSESourceConfig.cmake b/cmake/packages/NEON2SSESourceConfig.cmake
deleted file mode 100644
index b656f5700..000000000
--- a/cmake/packages/NEON2SSESourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_NEON2SSESource_import)
- if(NOT DOWNLOAD_NEON2SSE)
- set(NEON2SSESource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_NEON2SSE)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads NEON2SSE from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(NEON2SSE_URL ${EXTERNAL_DOWNLOAD_SERVER}/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz)
- ExternalSource_Download("neon_2_sse" ${NEON2SSE_URL})
-
- set(NEON2SSESource_DIR ${neon_2_sse_SOURCE_DIR} PARENT_SCOPE)
- set(NEON2SSESource_FOUND TRUE PARENT_SCOPE)
-endfunction(_NEON2SSESource_import)
-
-_NEON2SSESource_import()
diff --git a/cmake/packages/NoniusConfig.cmake b/cmake/packages/NoniusConfig.cmake
deleted file mode 100644
index 35f373a8c..000000000
--- a/cmake/packages/NoniusConfig.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-function(_Nonius_import)
- nnfw_find_package(NoniusSource QUIET)
-
- if(NOT NoniusSource_FOUND)
- set(Nonius_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT NoniusSource_FOUND)
-
- if(NOT TARGET nonius)
- message(STATUS "Found nonius: TRUE")
- add_library(nonius INTERFACE)
- target_include_directories(nonius INTERFACE "${NoniusSource_DIR}/include")
- endif(NOT TARGET nonius)
-
- set(Nonius_FOUND TRUE PARENT_SCOPE)
-endfunction(_Nonius_import)
-
-_Nonius_import()
diff --git a/cmake/packages/NoniusSourceConfig.cmake b/cmake/packages/NoniusSourceConfig.cmake
deleted file mode 100644
index 7349946db..000000000
--- a/cmake/packages/NoniusSourceConfig.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-function(_NoniusSource_import)
- if(NOT DOWNLOAD_NONIUS)
- set(NoniusSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_NONIUS)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(NONIUS_URL ${EXTERNAL_DOWNLOAD_SERVER}/libnonius/nonius/archive/v1.2.0-beta.1.tar.gz)
- ExternalSource_Download("nonius" ${NONIUS_URL})
-
- set(NoniusSource_DIR ${nonius_SOURCE_DIR} PARENT_SCOPE)
- set(NoniusSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_NoniusSource_import)
-
-_NoniusSource_import()
diff --git a/cmake/packages/TensorFlowSourceConfig.cmake b/cmake/packages/TensorFlowSourceConfig.cmake
deleted file mode 100644
index 5828334c7..000000000
--- a/cmake/packages/TensorFlowSourceConfig.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-function(_TensorFlowSource_import)
- if(NOT DOWNLOAD_TENSORFLOW)
- set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_TENSORFLOW)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(TENSORFLOW_URL ${EXTERNAL_DOWNLOAD_SERVER}/tensorflow/tensorflow/archive/v1.12.0.tar.gz)
- ExternalSource_Download("tensorflow" ${TENSORFLOW_URL})
-
- set(TensorFlowSource_DIR ${tensorflow_SOURCE_DIR} PARENT_SCOPE)
- set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_TensorFlowSource_import)
-
-_TensorFlowSource_import()
diff --git a/cmake/packages/TensorflowConfig.cmake b/cmake/packages/TensorflowConfig.cmake
deleted file mode 100644
index ab4e2715e..000000000
--- a/cmake/packages/TensorflowConfig.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-function(_Tensorflow_Import)
- if(NOT DEFINED TENSORFLOW_DIR)
- set(TENSORFLOW_DIR ${CMAKE_SOURCE_DIR}/externals/tensorflow)
- endif(NOT DEFINED TENSORFLOW_DIR)
-
- if(NOT DEFINED NSYNC_ARCH)
- set(NSYNC_ARCH "default")
- endif(NOT DEFINED NSYNC_ARCH)
-
- set(TENSROFLOW_MAKEFILE_DIR "${TENSORFLOW_DIR}/tensorflow/contrib/makefile")
- set(TENSORFLOW_GEN_DIR "${TENSROFLOW_MAKEFILE_DIR}/gen")
- set(TENSORFLOW_DOWNLOADS_DIR "${TENSROFLOW_MAKEFILE_DIR}/downloads")
-
- if(NOT EXISTS "${TENSORFLOW_GEN_DIR}/lib/libtensorflow-core.a")
- set(Tensorflow_FOUND FALSE PARENT_SCOPE)
- return()
- endif()
-
- if(NOT EXISTS "${TENSORFLOW_DOWNLOADS_DIR}/nsync/builds/${NSYNC_ARCH}.linux.c++11/libnsync.a")
- set(Tensorflow_FOUND FALSE PARENT_SCOPE)
- return()
- endif()
-
- if(NOT TARGET tensorflow-core)
- add_library(tensorflow-core INTERFACE)
-
- target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_DIR}")
- target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_GEN_DIR}/proto")
- target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_GEN_DIR}/protobuf/include")
- target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_DOWNLOADS_DIR}/eigen")
- target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_DOWNLOADS_DIR}/nsync/public")
-
- target_link_libraries(tensorflow-core INTERFACE -Wl,--whole-archive "${TENSORFLOW_GEN_DIR}/lib/libtensorflow-core.a" -Wl,--no-whole-archive)
- target_link_libraries(tensorflow-core INTERFACE "${TENSORFLOW_GEN_DIR}/protobuf/lib/libprotobuf.a")
- target_link_libraries(tensorflow-core INTERFACE "${TENSORFLOW_DOWNLOADS_DIR}/nsync/builds/${NSYNC_ARCH}.linux.c++11/libnsync.a")
- target_link_libraries(tensorflow-core INTERFACE ${LIB_PTHREAD} dl)
-
- message(STATUS "Found Tensorflow (lib: ${TENSORFLOW_GEN_DIR}/lib/libtensorflow-core.a")
- endif()
-
- set(Tensorflow_FOUND TRUE PARENT_SCOPE)
-endfunction(_Tensorflow_Import)
-
-_Tensorflow_Import()
diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt
deleted file mode 100644
index 78417eacb..000000000
--- a/contrib/CMakeLists.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-file(GLOB CONTRIB_CMAKE_FILES "*/CMakeLists.txt")
-
-foreach(CONTRIB_CMAKE_FILE ${CONTRIB_CMAKE_FILES})
- get_filename_component(CONTRIB_BASE ${CONTRIB_CMAKE_FILE} DIRECTORY)
- add_subdirectory(${CONTRIB_BASE})
-endforeach(CONTRIB_CMAKE_FILE ${CONTRIB_CMAKE_FILES})
diff --git a/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h b/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h
deleted file mode 100644
index cf51219fd..000000000
--- a/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the License);
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _TFLITE_LOG_H_
-#define _TFLITE_LOG_H_
-
-#ifdef __cplusplus
-extern "C" {
-#endif /*__cplusplus*/
-
-#define ERROR 1
-#define WARNING 2
-#define INFO 3
-#define DEBUG 4
-
-#ifdef __TIZEN__
-#include <dlog/dlog.h>
-#ifdef LOG_TAG
-#undef LOG_TAG
-#endif // LOG_TAG
-#define LOG_TAG "TFLITE_NATIVE"
-
-#define TFLITE_NATIVE_LOG(log_level, format, args...) \
- do { \
- switch (log_level) { \
- case ERROR: \
- LOGE(format, ## args); \
- case WARNING: \
- LOGE(format, ## args); \
- default: \
- LOGI(format, ## args); \
- } \
- } while (0)
-#else // __TIZEN__
-#define LEVEL_TO_STR(level) (\
- ((level) == ERROR) ? "ERROR" : \
- ((level) == WARNING) ? "WARNING" : \
- ((level) == INFO) ? "INFO": \
- ((level) == DEBUG) ? "DEBUG" : "DEFAULT")
-#define TFLITE_NATIVE_LOG(log_level, format, args...) \
- do { \
- printf("%s: %s: ", LEVEL_TO_STR(log_level), __FILE__); \
- printf(format, ## args); \
- printf("\n"); \
- }while (0)
-#endif // __TIZEN__
-
-#ifdef __cplusplus
-}
-#endif /*__cplusplus*/
-
-#endif /*_TFLITE_LOG_H*/
diff --git a/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h b/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h
deleted file mode 100644
index 7fddb5400..000000000
--- a/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the License);
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _TFLITE_NATIVEWRAPPER_H_
-#define _TFLITE_NATIVEWRAPPER_H_
-
-#include "tensorflow/contrib/lite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/string_util.h"
-#include "tensorflow/contrib/lite/tools/mutable_op_resolver.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif /*__cplusplus*/
-
-typedef enum
-{
- /** 32-bit signed integer. */
- INT32 = 1,
-
- /** 32-bit single precision floating point. */
- FLOAT32 = 2,
-
- /** 8-bit unsigned integer. */
- UINT8 = 3,
-
- /** 64-bit signed integer. */
- INT64 = 4
-} TFLiteNativeType;
-
-void tflite_interpreter_setNumThreads(long* interpreterHandle, int numThreads);
-
-long long tflite_flatbuffermodel_BuildFromFile(char* modelPath);
-
-long long tflite_builder_interpreterBuilder(long* modelHandle);
-
-void* tflite_interpreter_run(long* interpreterHandle, void* values, int inputLength, int dataType);
-
-#ifdef __cplusplus
-}
-#endif /*__cplusplus*/
-
-#endif /*_TFLITE_NATIVEWRAPPER_H_*/
diff --git a/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp b/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp
deleted file mode 100644
index 413304637..000000000
--- a/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the License);
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <iostream>
-#include "tflite_nativewrapper.h"
-#include "tflite_log.h"
-#include <string.h>
-#include <unistd.h>
-#include <errno.h>
-#include <assert.h>
-
-int getNumBytes(TFLiteNativeType dataType)
-{
- switch (dataType) {
- case INT32:
- return 4;
- case FLOAT32:
- return 4;
- case UINT8:
- return 1;
- case INT64:
- return 8;
- default:
- return 1;
- }
-}
-
-/// <summary>
-/// Set the number of threads available to the interpreter.
-/// </summary>
-/// <param name="interpreterHandle">Handle of the interpreter instance.</param>
-/// <param name="numThreads">Number of threads.</param>
-void tflite_interpreter_setNumThreads(long* interpreterHandle, int numThreads)
-{
- assert(interpreterHandle != nullptr);
- tflite::Interpreter* interpreter = reinterpret_cast<tflite::Interpreter*>(*interpreterHandle);
-
- interpreter->SetNumThreads(numThreads);
-
- TFLITE_NATIVE_LOG(DEBUG, "Number of threads: %d", numThreads);
- return;
-}
-
-/// <summary>
-/// Creates a Flat Buffer Model from the given .tflite model.
-/// </summary>
-/// <param name="modelPath">Path of the model.</param>
-long long
-tflite_flatbuffermodel_BuildFromFile(char* modelPath)
-{
- if (modelPath == nullptr) {
- TFLITE_NATIVE_LOG(ERROR, "Invalid parameter");
- return 0;
- }
- TFLITE_NATIVE_LOG(ERROR, "Model Path: %s", modelPath);
-
- if (access(modelPath, F_OK) == -1) {
- TFLITE_NATIVE_LOG(ERROR, "Failed to access model [%s]",
- strerror(errno));
- return 0;
- }
-
- auto model = tflite::FlatBufferModel::BuildFromFile(modelPath);
-
- TFLITE_NATIVE_LOG(DEBUG, "Successfully loaded model");
- return reinterpret_cast<long>(model.release());
-}
-
-/// <summary>
-/// Creates an interpreter instance taking the flatbuffer model as input.
-/// </summary>
-/// <param name="modelHandle">Address of the flatbuffer model.</param>
-long long
-tflite_builder_interpreterBuilder(long* modelHandle)
-{
- assert(modelHandle != nullptr);
- tflite::FlatBufferModel* model = reinterpret_cast<tflite::FlatBufferModel*>(*modelHandle);
-
- tflite::ops::builtin::BuiltinOpResolver resolver;
- std::unique_ptr<tflite::Interpreter> interpreter;
-
- TfLiteStatus status = tflite::InterpreterBuilder (*model, resolver)(&interpreter);
-
- if (status != kTfLiteOk) {
- TFLITE_NATIVE_LOG(DEBUG, "Cannot create interpreter");
- return 0;
- }
- TFLITE_NATIVE_LOG(DEBUG, "CheckPoint interpreter");
- return reinterpret_cast<long>(interpreter.release());
-}
-
-/// <summary>
-/// Runs the inference given the inputs.
-/// </summary>
-/// <param name="interpreterHandle">Address of the interpreter instance.</param>
-/// <param name="values">Input values for the model.</param>
-/// <param name="inpLength">Length of the input.</param>
-/// <param name="dataType">Data type key of the input.</param>
-void* tflite_interpreter_run(long* interpreterHandle, void* values, int inputLength,
- int dataType)
-{
- assert(interpreterHandle != nullptr);
- tflite::Interpreter* interpreter = reinterpret_cast<tflite::Interpreter*>(*interpreterHandle);
-
- int inputTensorIndex = interpreter->inputs()[0];
-
- //TODO:: input tensor size will be passed as a parameter. It is hardcoded for now.
- interpreter->ResizeInputTensor(inputTensorIndex,
- { 1, 224, 224, 3 });
-
- if (interpreter->AllocateTensors() != kTfLiteOk) {
- TFLITE_NATIVE_LOG(ERROR, "Failed to allocate tensors!");
- return nullptr;
- }
-
- float* inputTensorPointer = interpreter->typed_tensor<float>(inputTensorIndex);
-
- int numBytes = getNumBytes((TFLiteNativeType) dataType);
-
- memcpy(inputTensorPointer, values, inputLength * numBytes);
-
- if (interpreter->Invoke() != kTfLiteOk) {
- TFLITE_NATIVE_LOG(ERROR, "Failed to invoke");
- }
-
- float* output = interpreter->typed_output_tensor<float>(0);
- return output;
-}
-
diff --git a/contrib/benchmark_acl/src/Benchmark.cpp b/contrib/benchmark_acl/src/Benchmark.cpp
deleted file mode 100644
index ba6001232..000000000
--- a/contrib/benchmark_acl/src/Benchmark.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Benchmark.h"
-
-#include <cstdlib>
-
-Count::Count() : _value(1)
-{
- auto env = std::getenv("COUNT");
-
- if (env)
- {
- _value = atoi(env);
- }
-}
-
-uint32_t Count::value(void) const { return _value; }
-
-#include <boost/accumulators/accumulators.hpp>
-#include <boost/accumulators/statistics/stats.hpp>
-#include <boost/accumulators/statistics/mean.hpp>
-
-#include <iostream>
-#include <chrono>
-
-using namespace boost::accumulators;
-
-void run_benchmark(arm_compute::graph::frontend::Stream &graph)
-{
- // NOTE Here the number of warming-up iterations is hardcoded
- // TODO Decide the number of warming-up iterations appropriately
- for (uint32_t n = 0; n < 3; ++n)
- {
- auto beg = std::chrono::steady_clock::now();
- graph.run();
- auto end = std::chrono::steady_clock::now();
- auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - beg);
-
- std::cout << "Warming-up " << n << ": " << elapsed.count() << "ms" << std::endl;
- }
-
- accumulator_set<double, stats<tag::mean>> acc;
-
- const Count count;
-
- for (uint32_t n = 0; n < count.value(); ++n)
- {
- auto beg = std::chrono::steady_clock::now();
- graph.run();
- auto end = std::chrono::steady_clock::now();
- auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - beg);
-
- std::cout << "Iteration " << n << ": " << elapsed.count() << "ms" << std::endl;
-
- acc(elapsed.count());
- }
-
- std::cout << "--------" << std::endl;
- std::cout << "Mean: " << mean(acc) << "ms" << std::endl;
-}
diff --git a/contrib/detection/detection.cpp b/contrib/detection/detection.cpp
deleted file mode 100644
index 8a988ccf5..000000000
--- a/contrib/detection/detection.cpp
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <tensorflow/core/public/session.h>
-
-#include <iostream>
-#include <stdexcept>
-
-#include <cassert>
-#include <cstring>
-
-#include "misc/benchmark.h"
-
-#define CHECK_TF(e) { \
- if(!(e).ok()) \
- { \
- throw std::runtime_error{"'" #e "' FAILED"}; \
- } \
-}
-
-int main(int argc, char **argv)
-{
- if (argc < 2)
- {
- std::cerr << "USAGE: " << argv[0] << " [T/F model path] [output 0] [output 1] ..." << std::endl;
- return 255;
- }
-
- std::vector<std::string> output_nodes;
-
- for (int argn = 2; argn < argc; ++argn)
- {
- output_nodes.emplace_back(argv[argn]);
- }
-
- tensorflow::Session* sess;
-
- CHECK_TF(tensorflow::NewSession(tensorflow::SessionOptions(), &sess));
-
- tensorflow::GraphDef graph_def;
-
- CHECK_TF(ReadBinaryProto(tensorflow::Env::Default(), argv[1], &graph_def));
- CHECK_TF(sess->Create(graph_def));
-
- tensorflow::Tensor input(tensorflow::DT_FLOAT, tensorflow::TensorShape({1, 320, 320, 3}));
- std::vector<tensorflow::Tensor> outputs;
-
- for (uint32_t n = 0; n < 5; ++n)
- {
- std::chrono::milliseconds elapsed(0);
-
- nnfw::misc::benchmark::measure(elapsed) << [&] (void) {
- CHECK_TF(sess->Run({{"input_node", input}}, output_nodes, {}, &outputs));
- };
-
- std::cout << "Takes " << elapsed.count() << "ms" << std::endl;
- }
-
- return 0;
-}
diff --git a/contrib/labs/jniacl/src/io_accessor.cc b/contrib/labs/jniacl/src/io_accessor.cc
deleted file mode 100644
index 103660716..000000000
--- a/contrib/labs/jniacl/src/io_accessor.cc
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (c) 2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#include "io_accessor.h"
-#include <ostream>
-#include <android/log.h>
-
-bool InputAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- // Subtract the mean value from each channel
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- *reinterpret_cast<float *>(tensor.ptr_to_element(id)) = _test_input;
- _test_input += _inc ? 1.0 : 0.0;
-
- __android_log_print(ANDROID_LOG_DEBUG, "LOG_TAG", "Input %d, %d = %lf\r\n",
- id.y(), id.x(), *reinterpret_cast<float *>(tensor.ptr_to_element(id)));
- });
- return true;
-}
-
-bool OutputAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- // Subtract the mean value from each channel
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- __android_log_print(ANDROID_LOG_DEBUG, "Output", "Input %d, %d = %lf\r\n",
- id.y(), id.x(), *reinterpret_cast<float *>(tensor.ptr_to_element(id)));
- });
- return false; // end the network
-}
-
-bool WeightAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- // Subtract the mean value from each channel
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- *reinterpret_cast<float *>(tensor.ptr_to_element(id)) = _test_weight;
- _test_weight += _inc ? 1.0 : 0.0;
- });
- return true;
-}
-
-bool BiasAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- // Subtract the mean value from each channel
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- *reinterpret_cast<float *>(tensor.ptr_to_element(id)) = 0.0;
- });
- return true;
-}
diff --git a/contrib/labs/jniacl/src/io_accessor.h b/contrib/labs/jniacl/src/io_accessor.h
deleted file mode 100644
index 4033020e0..000000000
--- a/contrib/labs/jniacl/src/io_accessor.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (c) 2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#ifndef __IO_ACCESSOR_H__
-#define __IO_ACCESSOR_H__
-
-#include <arm_compute/graph/ITensorAccessor.h>
-
-class InputAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- InputAccessor(bool inc) : _inc(inc) { _test_input = 1.0; }
- InputAccessor(InputAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-
-private:
- bool _inc;
- float _test_input;
-};
-
-class OutputAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- OutputAccessor() = default;
- OutputAccessor(OutputAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-};
-
-class WeightAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- WeightAccessor(bool inc) : _inc(inc) { _test_weight = 1.0; }
- WeightAccessor(WeightAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-
-private:
- bool _inc;
- float _test_weight;
-};
-
-class BiasAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- BiasAccessor() = default;
- BiasAccessor(BiasAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-};
-
-#endif // __IO_ACCESSOR_H__
diff --git a/contrib/labs/jniacl/src/jniacl_main.cc b/contrib/labs/jniacl/src/jniacl_main.cc
deleted file mode 100644
index 515f28732..000000000
--- a/contrib/labs/jniacl/src/jniacl_main.cc
+++ /dev/null
@@ -1,39 +0,0 @@
-#include <jni.h>
-#include <string>
-
-#include <arm_compute/graph/Graph.h>
-#include <arm_compute/graph/Nodes.h>
-
-#include "io_accessor.h"
-
-extern "C" JNIEXPORT jstring JNICALL
-Java_com_samsung_testaclexec_ActivityMain_RunACLJNI(JNIEnv *env, jobject)
-{
- using arm_compute::DataType;
- using arm_compute::graph::Tensor;
- using arm_compute::graph::TargetHint;
- using arm_compute::graph::Graph;
- using arm_compute::TensorInfo;
- using arm_compute::TensorShape;
-
- arm_compute::graph::Graph graph;
- TargetHint target_hint = TargetHint::OPENCL;
- bool autoinc = true;
-
- graph << target_hint
- << Tensor(TensorInfo(TensorShape(3U, 3U, 1U, 1U), 1, DataType::F32),
- std::unique_ptr<InputAccessor>(new InputAccessor(autoinc)))
- << arm_compute::graph::ConvolutionLayer(
- 3U, 3U, 1U,
- std::unique_ptr<WeightAccessor>(new WeightAccessor(autoinc)),
- std::unique_ptr<BiasAccessor>(new BiasAccessor()),
- arm_compute::PadStrideInfo(1, 1, 0, 0))
- << Tensor(std::unique_ptr<OutputAccessor>(new OutputAccessor()));
- ;
-
- graph.run();
-
- std::string hello = "SoftMax Run OK";
-
- return env->NewStringUTF(hello.c_str());
-}
diff --git a/contrib/labs/kerneltesting/CMakeLists.txt b/contrib/labs/kerneltesting/CMakeLists.txt
deleted file mode 100644
index 5792d0fe8..000000000
--- a/contrib/labs/kerneltesting/CMakeLists.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-if(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
- return()
-endif(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
-
-nnfw_find_package(ARMCompute REQUIRED)
-
-function(add_kerneltesting TESTNAME SRC_FILES)
- link_directories(${CMAKE_INSTALL_PREFIX}/lib)
- add_executable(${TESTNAME} ${SRC_FILES})
- target_include_directories(${TESTNAME} PUBLIC
- ${NNFW_INCLUDE_DIR})
- target_link_libraries(${TESTNAME} nnfw_lib_misc arm_compute_graph)
- install(TARGETS ${TESTNAME} DESTINATION bin)
-endfunction()
-
-# TODO: Enable conv2d on Tizen
-if (NOT ${TARGET_OS} STREQUAL "tizen")
- add_subdirectory(conv2d)
-endif()
diff --git a/contrib/labs/kerneltesting/conv2d/CMakeLists.txt b/contrib/labs/kerneltesting/conv2d/CMakeLists.txt
deleted file mode 100644
index 25e01f584..000000000
--- a/contrib/labs/kerneltesting/conv2d/CMakeLists.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-set(KERNELTESTING_CONV2D kerneltesting_conv2d)
-
-set(KERNELTESTING_CONV2D_SRCS "nnfw_conv2d_test.cpp"
- "io_accessor.cpp")
-
-set(GEMLOWP_INCUDE ${TFLITE_DEPEND_DIR}/gemmlowp/public)
-set(EIGN_INCLUDE ${TFLITE_DEPEND_DIR}/eigen
- ${TFLITE_DEPEND_DIR}/eigen/Eigen)
-
-add_kerneltesting(${KERNELTESTING_CONV2D} "${KERNELTESTING_CONV2D_SRCS}")
-
-target_include_directories(${KERNELTESTING_CONV2D} PUBLIC
- ${GEMLOWP_INCUDE}
- ${EIGN_INCLUDE}
- )
diff --git a/contrib/labs/kerneltesting/conv2d/OperationUtils.h b/contrib/labs/kerneltesting/conv2d/OperationUtils.h
deleted file mode 100644
index 0beac80a4..000000000
--- a/contrib/labs/kerneltesting/conv2d/OperationUtils.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_ML_NN_COMMON_OPERATIONS_UTILS_H
-#define ANDROID_ML_NN_COMMON_OPERATIONS_UTILS_H
-
-#include <cstdint>
-#include <vector>
-#include <ostream>
-
-#define LOG(ERROR) std::cerr
-
-// Macro to check if the input parameters for operation are valid or not.
-#define NN_CHECK(v) \
- do { \
- if (!(v)) { \
- LOG(ERROR) << "NN_CHECK failed: " << #v << "'\n"; \
- return false; \
- } \
- } while(0);
-
-#define NN_CHECK_EQ(actual, expected) \
- NN_CHECK((actual) == (expected))
-
-#define NN_OPS_CHECK NN_CHECK
-
-enum PaddingScheme {
- kPaddingUnknown = 0,
- kPaddingSame = 1,
- kPaddingValid = 2,
-};
-
-enum class FusedActivationFunc : int32_t {
- NONE = 0,
- RELU = 1,
- RELU1 = 2,
- RELU6 = 3,
-};
-
-
-#define ANDROID_NN_MACRO_DISPATCH(macro) \
- switch (activation) { \
- case (int32_t) FusedActivationFunc::NONE: \
- macro(kNone); \
- break; \
- case (int32_t) FusedActivationFunc::RELU: \
- macro(kRelu); \
- break; \
- case (int32_t) FusedActivationFunc::RELU1: \
- macro(kRelu1); \
- break; \
- case (int32_t) FusedActivationFunc::RELU6: \
- macro(kRelu6); \
- break; \
- default: \
- LOG(ERROR) << "Unsupported fused activation function type"; \
- return false; \
- }
-
-
-#endif // ANDROID_ML_NN_COMMON_OPERATIONS_UTILS_H
diff --git a/contrib/labs/kerneltesting/conv2d/common.h b/contrib/labs/kerneltesting/conv2d/common.h
deleted file mode 100644
index 8e675e664..000000000
--- a/contrib/labs/kerneltesting/conv2d/common.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_COMMON_H_
-#define ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_COMMON_H_
-
-#ifndef USE_NEON
-#if defined(__ARM_NEON__) || defined(__ARM_NEON)
-#define USE_NEON
-#include <arm_neon.h>
-#endif
-#endif
-
-#include <gemmlowp.h>
-#include "types.h"
-
-template <FusedActivationFunctionType Ac>
-struct ActivationFunctionImpl {};
-
-template <>
-struct ActivationFunctionImpl<FusedActivationFunctionType::kNone> {
- static float Eval(float x) { return x; }
-};
-
-template <>
-struct ActivationFunctionImpl<FusedActivationFunctionType::kRelu> {
- static float Eval(float x) { return x < 0.f ? 0.f : x; }
-};
-
-template <>
-struct ActivationFunctionImpl<FusedActivationFunctionType::kRelu1> {
- static float Eval(float x) { return x > 1.f ? 1.f : x < -1.f ? -1.f : x; }
-};
-
-template <>
-struct ActivationFunctionImpl<FusedActivationFunctionType::kRelu6> {
- static float Eval(float x) { return x > 6.f ? 6.f : x < 0.f ? 0.f : x; }
-};
-
-template <FusedActivationFunctionType Ac>
-float ActivationFunction(float x) {
- return ActivationFunctionImpl<Ac>::Eval(x);
-}
-
-inline int32 MultiplyByQuantizedMultiplierSmallerThanOne(
- int32 x, int32 quantized_multiplier, int right_shift) {
- using gemmlowp::RoundingDivideByPOT;
- using gemmlowp::SaturatingRoundingDoublingHighMul;
- return RoundingDivideByPOT(
- SaturatingRoundingDoublingHighMul(x, quantized_multiplier), right_shift);
-}
-
-inline int32 MultiplyByQuantizedMultiplierGreaterThanOne(
- int32 x, int32 quantized_multiplier, int left_shift) {
- using gemmlowp::SaturatingRoundingDoublingHighMul;
- return SaturatingRoundingDoublingHighMul(x * (1 << left_shift),
- quantized_multiplier);
-}
-
-#endif // ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_COMMON_H_
diff --git a/contrib/labs/kerneltesting/conv2d/compatibility.h b/contrib/labs/kerneltesting/conv2d/compatibility.h
deleted file mode 100644
index db8ba04bc..000000000
--- a/contrib/labs/kerneltesting/conv2d/compatibility.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_COMPATIBILITY_H_
-#define ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_COMPATIBILITY_H_
-
-#ifndef ANDROID_ML_NN_COMPATIBILITY
-#define ANDROID_ML_NN_COMPATIBILITY
-
-#include <cassert>
-#include <cstdint>
-
-#ifndef DCHECK
-#define DCHECK(condition) (condition) ? (void)0 : assert(false)
-#endif
-
-#ifndef DCHECK_EQ
-#define DCHECK_EQ(x, y) ((x) == (y)) ? (void)0 : assert(false)
-#endif
-
-#ifndef DCHECK_GE
-#define DCHECK_GE(x, y) ((x) >= (y)) ? (void)0 : assert(false)
-#endif
-
-#ifndef DCHECK_GT
-#define DCHECK_GT(x, y) ((x) > (y)) ? (void)0 : assert(false)
-#endif
-
-#ifndef DCHECK_LE
-#define DCHECK_LE(x, y) ((x) <= (y)) ? (void)0 : assert(false)
-#endif
-
-#ifndef DCHECK_LT
-#define DCHECK_LT(x, y) ((x) < (y)) ? (void)0 : assert(false)
-#endif
-
-#ifndef CHECK_EQ
-#define CHECK_EQ(x, y) ((x) == (y)) ? (void)0 : assert(false)
-#endif
-
-using uint8 = std::uint8_t;
-using int16 = std::int16_t;
-using uint16 = std::uint16_t;
-using int32 = std::int32_t;
-using uint32 = std::uint32_t;
-
-#endif
-
-#endif // ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_COMPATIBILITY_H_
diff --git a/contrib/labs/kerneltesting/conv2d/io_accessor.cpp b/contrib/labs/kerneltesting/conv2d/io_accessor.cpp
deleted file mode 100644
index 6d3cd9d04..000000000
--- a/contrib/labs/kerneltesting/conv2d/io_accessor.cpp
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (c) 2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#include "io_accessor.h"
-
-InputAccessor::InputAccessor(const float* inputData, const Shape& inputShape)
- : _inputData(inputData)
- , _inputShape(inputShape)
-{
-}
-
-WeightAccessor::WeightAccessor(const float* filterData, const Shape& filterShape)
- : _filterData(filterData)
- , _filterShape(filterShape)
-{
-}
-
-BiasAccessor::BiasAccessor(const float* biasData, const Shape& biasShape)
- : _biasData(biasData)
- , _biasShape(biasShape)
-{
-}
-
-OutputAccessor::OutputAccessor(float* outputData, const Shape& outputShape)
- : _outputData(outputData)
- , _outputShape(outputShape)
-{
-}
-
-bool InputAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- uint32_t width = getSizeOfDimension(_inputShape, 2);
- uint32_t offset = id.y() * width + id.x();
- *reinterpret_cast<float *>(tensor.ptr_to_element(id)) =
- *(_inputData + offset);
- });
- return true;
-}
-
-bool WeightAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- uint32_t width = getSizeOfDimension(_filterShape, 2);
- uint32_t offset = id.y() * width + id.x();
- *reinterpret_cast<float *>(tensor.ptr_to_element(id)) =
- *(_filterData + offset);
- });
- return true;
-}
-
-bool BiasAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- uint32_t width = getSizeOfDimension(_biasShape, 2);
- uint32_t offset = id.y() * width + id.x();
- *reinterpret_cast<float *>(tensor.ptr_to_element(id)) =
- *(_biasData + offset);
- });
- return true;
-}
-
-bool OutputAccessor::access_tensor(arm_compute::ITensor &tensor)
-{
- arm_compute::Window window;
- window.use_tensor_dimensions(tensor.info()->tensor_shape());
-
- execute_window_loop(window, [&](const arm_compute::Coordinates& id)
- {
- uint32_t width = getSizeOfDimension(_outputShape, 2);
- uint32_t offset = id.y() * width + id.x();
- *(_outputData + offset) =
- *reinterpret_cast<float *>(tensor.ptr_to_element(id));
- });
- return false; // end the network
-}
diff --git a/contrib/labs/kerneltesting/conv2d/io_accessor.h b/contrib/labs/kerneltesting/conv2d/io_accessor.h
deleted file mode 100644
index 0201f7242..000000000
--- a/contrib/labs/kerneltesting/conv2d/io_accessor.h
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (c) 2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#ifndef __CONV2D_IO_ACCESSOR_H__
-#define __CONV2D_IO_ACCESSOR_H__
-
-#include <arm_compute/graph/ITensorAccessor.h>
-#include <arm_compute/runtime/CL/CLFunctions.h>
-
-#include "types.h"
-
-class InputAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- InputAccessor(const float* inputData, const Shape& inputShape);
- InputAccessor(InputAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-
-private:
- const float* _inputData;
- const Shape& _inputShape;
-};
-
-class WeightAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- WeightAccessor(const float* filterData, const Shape& filterShape);
- WeightAccessor(WeightAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-
-private:
- const float* _filterData;
- const Shape& _filterShape;
-};
-
-class BiasAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- BiasAccessor(const float* biasData, const Shape& biasShape);
- BiasAccessor(BiasAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-
-private:
- const float* _biasData;
- const Shape& _biasShape;
-};
-
-class OutputAccessor : public arm_compute::graph::ITensorAccessor
-{
-public:
- OutputAccessor(float* outputData, const Shape& outputShape);
- OutputAccessor(OutputAccessor&&) = default;
-
- // Inherited methods overriden:
- bool access_tensor(arm_compute::ITensor& tensor) override;
-
-private:
- float* _outputData;
- const Shape& _outputShape;
-};
-
-#endif // __CONV2D_IO_ACCESSOR_H__
diff --git a/contrib/labs/kerneltesting/conv2d/nnfw_conv2d_test.cpp b/contrib/labs/kerneltesting/conv2d/nnfw_conv2d_test.cpp
deleted file mode 100644
index 190be016e..000000000
--- a/contrib/labs/kerneltesting/conv2d/nnfw_conv2d_test.cpp
+++ /dev/null
@@ -1,607 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (c) 2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <iostream>
-#include <vector>
-#include <cassert>
-
-#include <Eigen/Core>
-#include <gemmlowp.h>
-
-#include "types.h"
-#include "common.h"
-#include "optimized_ops.h"
-#include "OperationUtils.h"
-
-#include <arm_compute/graph.h>
-
-#include <arm_compute/runtime/CL/CLFunctions.h>
-#include <arm_compute/runtime/CL/functions/CLConvolution.h>
-
-#include "io_accessor.h"
-#include "misc/environment.h"
-
-static constexpr int kStaticBufferSize = 1605632;
-static char static_scratch_buffer[kStaticBufferSize];
-
-#define ANDROID_NN_CONV_PARAMETERS(Type) \
- uint32_t height = getSizeOfDimension(inputShape, 1); \
- uint32_t width = getSizeOfDimension(inputShape, 2); \
- uint32_t filterHeight = getSizeOfDimension(filterShape, 1); \
- uint32_t filterWidth = getSizeOfDimension(filterShape, 2); \
- uint32_t outHeight = getSizeOfDimension(outputShape, 1); \
- uint32_t outWidth = getSizeOfDimension(outputShape, 2); \
- uint32_t inDepth = getSizeOfDimension(inputShape, 3); \
- \
- uint32_t paddingHeight = (uint32_t)padding_top; \
- uint32_t paddingWidth = (uint32_t)padding_left; \
- \
- Dims<4> im2colDim; \
- im2colDim.sizes[3] = (int)getSizeOfDimension(outputShape, 0); \
- im2colDim.sizes[2] = (int)getSizeOfDimension(outputShape, 1); \
- im2colDim.sizes[1] = (int)getSizeOfDimension(outputShape, 2); \
- im2colDim.sizes[0] = (int)inDepth * filterHeight * filterWidth; \
- \
- im2colDim.strides[0] = 1; \
- for (int i=1; i<4; i++) { \
- im2colDim.strides[i] = im2colDim.strides[i-1] * im2colDim.sizes[i-1]; \
- } \
- \
- Type* im2colData = nullptr; \
- int im2colByteSize = sizeof(Type); \
- for (int i=0; i<4; i++) { \
- im2colByteSize *= im2colDim.sizes[i]; \
- } \
- if (im2colByteSize <= kStaticBufferSize) { \
- im2colData = reinterpret_cast<Type *>(static_scratch_buffer); \
- } else { \
- im2colData = new (std::nothrow) Type[im2colByteSize / sizeof(Type)]; \
- }
-
-
-bool convFloat32(const float* inputData, const Shape& inputShape,
- const float* filterData, const Shape& filterShape,
- const float* biasData, const Shape& biasShape,
- int32_t padding_left, int32_t padding_right,
- int32_t padding_top, int32_t padding_bottom,
- int32_t stride_width, int32_t stride_height,
- int32_t activation,
- float* outputData, const Shape& outputShape) {
-
- ANDROID_NN_CONV_PARAMETERS(float)
-
- #define ANDROID_NN_CONV(activation) \
- Conv<FusedActivationFunctionType::activation>( \
- inputData, convertShapeToDims(inputShape), \
- filterData, convertShapeToDims(filterShape), \
- biasData, convertShapeToDims(biasShape), \
- stride_width, stride_height, paddingWidth, paddingHeight, \
- outputData, convertShapeToDims(outputShape), \
- im2colData, im2colDim)
-
- ANDROID_NN_MACRO_DISPATCH(ANDROID_NN_CONV)
-
- #undef ANDROID_NN_CONV
-
- if (im2colByteSize > kStaticBufferSize) {
- delete[] im2colData;
- }
- return true;
-}
-
-//-----------------------------------------------------------------------------
-
-using arm_compute::DataType;
-using arm_compute::graph::Target;
-using arm_compute::graph::TensorDescriptor;
-using arm_compute::TensorShape;
-using arm_compute::graph::frontend::InputLayer;
-using arm_compute::graph::frontend::OutputLayer;
-
-namespace acl_graph {
-
-bool convFloat32(const float* inputData, const Shape& inputShape,
- const float* filterData, const Shape& filterShape,
- const float* biasData, const Shape& biasShape,
- int32_t padding_left, int32_t padding_right,
- int32_t padding_top, int32_t padding_bottom,
- int32_t stride_width, int32_t stride_height,
- int32_t activation,
- float* outputData, const Shape& outputShape)
-{
- // Try with simple build-run with ACL Layer
- arm_compute::graph::frontend::Stream graph{0, "ACL_CONV2D_TEST"};
-
- Target target_hint = nnfw::misc::get_env_int("NNFW_ACL_USENEON")
- ? Target::NEON : Target::CL;
-
- // Not sure about which index is which value
- uint32_t tsi_c = getSizeOfDimension(inputShape, 0);
- uint32_t tsi_h = getSizeOfDimension(inputShape, 1);
- uint32_t tsi_w = getSizeOfDimension(inputShape, 2);
- uint32_t tsi_n = getSizeOfDimension(inputShape, 3);
-
- uint32_t tsk_h = getSizeOfDimension(filterShape, 1);
- uint32_t tsk_w = getSizeOfDimension(filterShape, 2);
- uint32_t tsk_n = getSizeOfDimension(filterShape, 3);
-
- graph << target_hint
- << InputLayer(TensorDescriptor(TensorShape(tsi_w, tsi_h, tsi_c, tsi_n), DataType::F32),
- std::unique_ptr<InputAccessor>(new InputAccessor(inputData, inputShape)))
- << arm_compute::graph::frontend::ConvolutionLayer(
- tsk_w, tsk_h, tsk_n,
- std::unique_ptr<WeightAccessor>(new WeightAccessor(filterData, filterShape)),
- std::unique_ptr<BiasAccessor>(new BiasAccessor(biasData, biasShape)),
- arm_compute::PadStrideInfo(stride_width, stride_height, padding_top, padding_bottom))
- ;
- if (activation != static_cast<int32_t>(FusedActivationFunc::NONE)) {
- arm_compute::ActivationLayerInfo::ActivationFunction actFunc =
- arm_compute::ActivationLayerInfo::ActivationFunction::RELU;
-
- graph << arm_compute::graph::frontend::ActivationLayer(arm_compute::ActivationLayerInfo(actFunc));
- // Activation does not provide output Tensor and makes next layer fail to add to graph
- // when it's the last(output) layer. To solve this, need to add a dummy layer.
- uint32_t tso_c = getSizeOfDimension(outputShape, 0);
- uint32_t tso_h = getSizeOfDimension(outputShape, 1);
- uint32_t tso_w = getSizeOfDimension(outputShape, 2);
- uint32_t tso_n = getSizeOfDimension(outputShape, 3);
- graph << arm_compute::graph::frontend::ReshapeLayer(TensorShape(tso_w, tso_h, tso_c, tso_n));
- }
- graph << OutputLayer(std::unique_ptr<OutputAccessor>(new OutputAccessor(outputData, outputShape)))
- ;
-
- graph.run();
-
- return true;
-}
-
-} // namespace acl_graph
-
-//-----------------------------------------------------------------------------
-
-using arm_compute::TensorInfo;
-
-namespace acl_runtime {
-
-TensorShape calculate_convolution_layer_output_shape(
- const arm_compute::TensorShape &input_shape,
- const arm_compute::TensorShape &weights_shape,
- const arm_compute::PadStrideInfo &conv_info)
-{
- unsigned int output_width = 0;
- unsigned int output_height = 0;
-
- // Get output width and height
- std::tie(output_width, output_height) =
- arm_compute::scaled_dimensions(
- input_shape.x(), input_shape.y(),
- weights_shape.x(), weights_shape.y(),
- conv_info);
-
- // Create output shape
- TensorShape output_shape = input_shape;
- output_shape.set(0, output_width);
- output_shape.set(1, output_height);
- output_shape.set(2, weights_shape[3]);
-
- return output_shape;
-}
-
-bool convFloat32(const float* inputData, const Shape& inputShape,
- const float* filterData, const Shape& filterShape,
- const float* biasData, const Shape& biasShape,
- int32_t padding_left, int32_t padding_right,
- int32_t padding_top, int32_t padding_bottom,
- int32_t stride_width, int32_t stride_height,
- int32_t activation,
- float* outputData, const Shape& outputShape)
-{
- arm_compute::CLScheduler::get().default_init();
-
- uint32_t tsi_c = getSizeOfDimension(inputShape, 0);
- uint32_t tsi_h = getSizeOfDimension(inputShape, 1);
- uint32_t tsi_w = getSizeOfDimension(inputShape, 2);
- uint32_t tsi_n = getSizeOfDimension(inputShape, 3);
-
- uint32_t tsk_h = getSizeOfDimension(filterShape, 1);
- uint32_t tsk_w = getSizeOfDimension(filterShape, 2);
- uint32_t tsk_n = getSizeOfDimension(filterShape, 3);
-
- TensorShape input_shape = TensorShape(tsi_w, tsi_h, tsi_c, tsi_n);
- TensorShape filter_shape = TensorShape(tsi_w, tsi_h, tsi_c, tsi_n);
- arm_compute::PadStrideInfo conv_info =
- arm_compute::PadStrideInfo(stride_width, stride_height, padding_top, padding_bottom);
-
- TensorShape output_shape = calculate_convolution_layer_output_shape(
- input_shape, filter_shape, conv_info);
-
- uint32_t tso_c = output_shape[0];
- uint32_t tso_w = output_shape[1];
- uint32_t tso_h = output_shape[2];
- uint32_t tso_n = output_shape[3];
-
- arm_compute::CLTensor input, output, bias, filter;
-
- input.allocator()->init(TensorInfo(tsi_w, tsi_h, arm_compute::Format::F32));
- output.allocator()->init(TensorInfo(tso_w, tso_h, arm_compute::Format::F32));
- bias.allocator()->init(TensorInfo(tso_w, tso_h, arm_compute::Format::F32));
- filter.allocator()->init(TensorInfo(tsk_w, tsk_h, arm_compute::Format::F32));
-
- input.allocator()->allocate();
- output.allocator()->allocate();
- bias.allocator()->allocate();
- filter.allocator()->allocate();
-
- input.map();
- InputAccessor ia(inputData, inputShape);
- ia.access_tensor(input);
- input.unmap();
-
- bias.map();
- BiasAccessor ba(biasData, biasShape);
- ba.access_tensor(bias);
- bias.unmap();
-
- filter.map();
- WeightAccessor fa(filterData, filterShape);
- fa.access_tensor(filter);
- filter.unmap();
-
- arm_compute::CLConvolutionLayer conv_f;
- conv_f.configure(&input, &filter, &bias, &output, conv_info);
-
- arm_compute::CLScheduler::get().sync();
-
- conv_f.run();
-
- output.map();
- OutputAccessor oa(outputData, outputShape);
- oa.access_tensor(output);
- output.unmap();
-
- return true;
-}
-
-} // namespace acl_runtime
-
-//-----------------------------------------------------------------------------
-
-enum COMPUTE_TYPE {
- COMPUTE_DEFAULT = 0,
- COMPUTE_ACLGRAPH,
- COMPUTE_ACLRT
-};
-
-bool convFloat32(const float* inputData, const Shape& inputShape,
- const float* filterData, const Shape& filterShape,
- const float* biasData, const Shape& biasShape,
- int32_t padding_left, int32_t padding_right,
- int32_t padding_top, int32_t padding_bottom,
- int32_t stride_width, int32_t stride_height,
- int32_t activation,
- float* outputData, const Shape& outputShape,
- COMPUTE_TYPE compType) {
-
- switch (compType)
- {
- case COMPUTE_DEFAULT :
- return convFloat32(inputData, inputShape, filterData, filterShape,
- biasData, biasShape, padding_left, padding_right,
- padding_top, padding_bottom, stride_width, stride_height,
- activation, outputData, outputShape);
-
- case COMPUTE_ACLGRAPH :
- return acl_graph::convFloat32(inputData, inputShape, filterData, filterShape,
- biasData, biasShape, padding_left, padding_right,
- padding_top, padding_bottom, stride_width, stride_height,
- activation, outputData, outputShape);
-
- case COMPUTE_ACLRT :
- return acl_runtime::convFloat32(inputData, inputShape, filterData, filterShape,
- biasData, biasShape, padding_left, padding_right,
- padding_top, padding_bottom, stride_width, stride_height,
- activation, outputData, outputShape);
- }
- return false;
-}
-
-//-----------------------------------------------------------------------------
-
-void dumpData(const char* name, const float* data, const Shape& shape)
-{
- uint32_t height = getSizeOfDimension(shape, 1);
- uint32_t width = getSizeOfDimension(shape, 2);
-
- std::cout << "---" << name << "---" << std::endl;
- for (int h = 0; h < height; h++) {
- std::cout << "H=" << h << " | ";
- for (int w = 0; w < width; w++) {
- std::cout << data[h * width + w] << ",";
- }
- std::cout << std::endl;
- }
-}
-
-void initData(float* outputData, int num, float value)
-{
- for (int i = 0; i < num; i++) {
- *(outputData + i) = value;
- }
-}
-
-void initDataSeq(float* outputData, int num, float value)
-{
- for (int i = 0; i < num; i++) {
- *(outputData + i) = value;
- value += 1.0;
- }
-}
-
-// compareData
-// return true if result == expected with the shape info,
-// otherwise false
-bool compareData(const float* result, const float* expected, const Shape& shape)
-{
- NN_CHECK_EQ(shape.dimensions.size(), 4);
-
- uint32_t height = getSizeOfDimension(shape, 1);
- uint32_t width = getSizeOfDimension(shape, 2);
- uint32_t numitems = height * width;
- for (int item = 0; item < numitems; item++) {
- if (*(result + item) != *(expected + item)) {
- LOG(ERROR) << "compareData failed: result " << *(result + item)
- << ", expected " << *(expected + item) << std::endl;
- return false;
- }
- }
- return true;
-}
-
-int test_3x3_1x1_one(COMPUTE_TYPE comptype)
-{
- float inputData[9];
- const Shape inputShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float filterData[9];
- const Shape filterShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float biasData[1] = { 1.0 };
- const Shape biasShape = { OperandType::FLOAT32, {1,1,1,1}, 1.0, 0 };
- int32_t padding_left = 0;
- int32_t padding_right = 0;
- int32_t padding_top = 0;
- int32_t padding_bottom = 0;
- int32_t stride_width = 1;
- int32_t stride_height = 1;
- int32_t activation = static_cast<int32_t>(FusedActivationFunc::RELU);
- float* outputData = new float[9];
- const Shape outputShape = { OperandType::FLOAT32, {1,1,1,1}, 1.0, 0 };
- float* expectData = new float[9];
- bool bret;
-
- initData(inputData, sizeof(inputData) / sizeof(inputData[0]), 1.0);
- initData(filterData, sizeof(filterData) / sizeof(filterData[0]), 1.0);
- initData(outputData, sizeof(outputData) / sizeof(outputData[0]), 0.0);
- initData(expectData, sizeof(expectData) / sizeof(expectData[0]), 0.0);
-
- bret = convFloat32(inputData, inputShape,
- filterData, filterShape,
- biasData, biasShape,
- padding_left, padding_right,
- padding_top, padding_bottom,
- stride_width, stride_height,
- activation,
- expectData, outputShape,
- COMPUTE_DEFAULT);
-
- bret = convFloat32(inputData, inputShape,
- filterData, filterShape,
- biasData, biasShape,
- padding_left, padding_right,
- padding_top, padding_bottom,
- stride_width, stride_height,
- activation,
- outputData, outputShape,
- comptype);
-
- dumpData("Input ", inputData, inputShape);
- dumpData("Filter ", filterData, filterShape);
- dumpData("Bias ", biasData, biasShape);
- dumpData("Output ", outputData, outputShape);
- std::cout << std::endl;
-
- bret = compareData(outputData, expectData, outputShape);
-
- delete outputData;
- delete expectData;
-
- if (!bret)
- {
- LOG(ERROR) << "TEST FAILED " << __FUNCTION__ << std::endl;
- return -1;
- }
- return 0;
-}
-
-int test_3x3_3x3_one(void)
-{
- float inputData[9];
- const Shape inputShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float filterData[9];
- const Shape filterShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float biasData[1] = { 1.0 };
- const Shape biasShape = { OperandType::FLOAT32, {1,1,1,1}, 1.0, 0 };
- int32_t padding_left = 1;
- int32_t padding_right = 1;
- int32_t padding_top = 1;
- int32_t padding_bottom = 1;
- int32_t stride_width = 1;
- int32_t stride_height = 1;
- int32_t activation = static_cast<int32_t>(FusedActivationFunc::RELU);
- float* outputData = new float[9];
- const Shape outputShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float* expectData = new float[9];
- bool bret;
-
- initData(inputData, sizeof(inputData) / sizeof(inputData[0]), 1.0);
- initData(filterData, sizeof(filterData) / sizeof(filterData[0]), 1.0);
- initData(outputData, sizeof(outputData) / sizeof(outputData[0]), 0.0);
- initData(expectData, sizeof(expectData) / sizeof(expectData[0]), 0.0);
-
- bret = convFloat32(inputData, inputShape,
- filterData, filterShape,
- biasData, biasShape,
- padding_left, padding_right,
- padding_top, padding_bottom,
- stride_width, stride_height,
- activation,
- expectData, outputShape,
- COMPUTE_DEFAULT);
-
- bret = convFloat32(inputData, inputShape,
- filterData, filterShape,
- biasData, biasShape,
- padding_left, padding_right,
- padding_top, padding_bottom,
- stride_width, stride_height,
- activation,
- outputData, outputShape,
- COMPUTE_ACLGRAPH);
-
- dumpData("Input ", inputData, inputShape);
- dumpData("Filter ", filterData, filterShape);
- dumpData("Bias ", biasData, biasShape);
- dumpData("Output ", outputData, outputShape);
- std::cout << std::endl;
-
- bret = compareData(outputData, expectData, outputShape);
-
- delete outputData;
- delete expectData;
-
- if (!bret)
- {
- LOG(ERROR) << "TEST FAILED " << __FUNCTION__ << std::endl;
- return -1;
- }
- return 0;
-}
-
-int test_3x3_3x3_seq(void)
-{
- float inputData[9];
- const Shape inputShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float filterData[9];
- const Shape filterShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float biasData[1] = { 1.0 };
- const Shape biasShape = { OperandType::FLOAT32, {1,1,1,1}, 1.0, 0 };
- int32_t padding_left = 1;
- int32_t padding_right = 1;
- int32_t padding_top = 1;
- int32_t padding_bottom = 1;
- int32_t stride_width = 1;
- int32_t stride_height = 1;
- int32_t activation = static_cast<int32_t>(FusedActivationFunc::RELU);
- float* outputData = new float[9];
- const Shape outputShape = { OperandType::FLOAT32, {1,3,3,1}, 1.0, 0 };
- float* expectData = new float[9];
- bool bret;
-
- initDataSeq(inputData, sizeof(inputData) / sizeof(inputData[0]), 1.0);
- initDataSeq(filterData, sizeof(filterData) / sizeof(filterData[0]), 1.0);
- initDataSeq(outputData, sizeof(outputData) / sizeof(outputData[0]), 0.0);
- initData(expectData, sizeof(expectData) / sizeof(expectData[0]), 0.0);
-
- bret = convFloat32(inputData, inputShape,
- filterData, filterShape,
- biasData, biasShape,
- padding_left, padding_right,
- padding_top, padding_bottom,
- stride_width, stride_height,
- activation,
- expectData, outputShape,
- COMPUTE_DEFAULT);
-
- bret = convFloat32(inputData, inputShape,
- filterData, filterShape,
- biasData, biasShape,
- padding_left, padding_right,
- padding_top, padding_bottom,
- stride_width, stride_height,
- activation,
- outputData, outputShape,
- COMPUTE_ACLGRAPH);
-
- dumpData("Input ", inputData, inputShape);
- dumpData("Filter ", filterData, filterShape);
- dumpData("Bias ", biasData, biasShape);
- dumpData("Output ", outputData, outputShape);
- std::cout << std::endl;
-
- bret = compareData(outputData, expectData, outputShape);
-
- delete outputData;
- delete expectData;
-
- if (!bret)
- {
- LOG(ERROR) << "TEST FAILED " << __FUNCTION__ << std::endl;
- return -1;
- }
- return 0;
-}
-
-int main(int argc, char* argv[])
-{
- int result;
-
- // input 3x3, output 1x1, all data 1.0
- result = test_3x3_1x1_one(COMPUTE_ACLGRAPH);
- if (result) return result;
- result = test_3x3_1x1_one(COMPUTE_ACLRT);
- if (result) return result;
-
- // input 3x3, output 3x3, all data 1.0
- result = test_3x3_3x3_one();
- if (result) return result;
-
- result = test_3x3_3x3_seq();
- if (result) return result;
-
- return result;
-}
diff --git a/contrib/labs/kerneltesting/conv2d/optimized_ops.h b/contrib/labs/kerneltesting/conv2d/optimized_ops.h
deleted file mode 100644
index 1d8c4ff28..000000000
--- a/contrib/labs/kerneltesting/conv2d/optimized_ops.h
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_OPTIMIZED_OPS_H_
-#define ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_OPTIMIZED_OPS_H_
-
-// Make a local VectorMap typedef allowing to map a float array
-// as a Eigen matrix expression. The same explanation as for VectorMap
-// above also applies here.
-template <typename Scalar>
-using MatrixMap = typename std::conditional<
- std::is_const<Scalar>::value,
- Eigen::Map<const Eigen::Matrix<typename std::remove_const<Scalar>::type,
- Eigen::Dynamic, Eigen::Dynamic>>,
- Eigen::Map<Eigen::Matrix<Scalar, Eigen::Dynamic, Eigen::Dynamic>>>::type;
-
-template <typename Scalar, int N>
-MatrixMap<Scalar> MapAsMatrixWithFirstDimAsRows(Scalar* data,
- const Dims<N>& dims) {
- const int rows = dims.sizes[0];
- int cols = 1;
- for (int d = 1; d < N; d++) {
- cols *= dims.sizes[d];
- }
- return MatrixMap<Scalar>(data, rows, cols);
-}
-
-template <typename Scalar, int N>
-MatrixMap<Scalar> MapAsMatrixWithLastDimAsCols(Scalar* data,
- const Dims<N>& dims) {
- const int cols = dims.sizes[N - 1];
- int rows = 1;
- for (int d = 0; d < N - 1; d++) {
- rows *= dims.sizes[d];
- }
- return MatrixMap<Scalar>(data, rows, cols);
-}
-
-template <typename T>
-inline void ExtractPatchIntoBufferColumn(
- const Dims<4>& input_dims, int w, int h, int b, int kheight, int kwidth,
- int stride_width, int stride_height, int pad_width, int pad_height,
- int in_width, int in_height, int in_depth, int single_buffer_length,
- int buffer_id, const T* in_data, T* conv_buffer_data, uint8 byte_zero) {
- gemmlowp::ScopedProfilingLabel label("ExtractPatchIntoBufferColumn");
- // This chunk of code reshapes all the inputs corresponding to
- // output (b, h, w) to a column vector in conv_buffer(:, buffer_id).
- const int kwidth_times_indepth = kwidth * in_depth;
- const int inwidth_times_indepth = in_width * in_depth;
- const int ih_ungated_start = h * stride_height - pad_height;
- const int ih_ungated_end = (ih_ungated_start + kheight);
- const int ih_end = std::min(ih_ungated_end, in_height);
- const int iw_ungated_start = w * stride_width - pad_width;
- const int iw_ungated_end = (iw_ungated_start + kwidth);
- const int iw_end = std::min(iw_ungated_end, in_width);
- // If the patch is off the edge of the input image, skip writing those rows
- // and columns from the patch into the output array.
- const int h_offset = std::max(0, -ih_ungated_start);
- const int w_offset = std::max(0, -iw_ungated_start);
- const int ih_start = std::max(0, ih_ungated_start);
- const int iw_start = std::max(0, iw_ungated_start);
- const int single_row_num =
- std::min(kwidth - w_offset, in_width - iw_start) * in_depth;
- const int output_row_offset = (buffer_id * single_buffer_length);
- int out_offset =
- output_row_offset + (h_offset * kwidth + w_offset) * in_depth;
- int in_offset = Offset(input_dims, 0, iw_start, ih_start, b);
-
- // Express all of the calculations as padding around the input patch.
- const int top_padding = h_offset;
- const int bottom_padding = (ih_ungated_end - ih_end);
- const int left_padding = w_offset;
- const int right_padding = (iw_ungated_end - iw_end);
- assert(single_row_num ==
- ((kwidth - (left_padding + right_padding)) * in_depth));
-
- // Write out zeroes to the elements representing the top rows of the input
- // patch that are off the edge of the input image.
- if (top_padding > 0) {
- const int top_row_elements = (top_padding * kwidth * in_depth);
- memset(conv_buffer_data + output_row_offset, byte_zero,
- (top_row_elements * sizeof(T)));
- }
-
- // If the patch is on the interior of the input image horizontally, just copy
- // over the rows sequentially, otherwise add zero padding at the start or end.
- if ((left_padding == 0) && (right_padding == 0)) {
- for (int ih = ih_start; ih < ih_end; ++ih) {
- memcpy(conv_buffer_data + out_offset, in_data + in_offset,
- single_row_num * sizeof(T));
- out_offset += kwidth_times_indepth;
- in_offset += inwidth_times_indepth;
- }
- } else {
- for (int ih = ih_start; ih < ih_end; ++ih) {
- if (left_padding > 0) {
- const int left_start = (out_offset - (left_padding * in_depth));
- memset(conv_buffer_data + left_start, byte_zero,
- (left_padding * in_depth * sizeof(T)));
- }
- memcpy(conv_buffer_data + out_offset, in_data + in_offset,
- single_row_num * sizeof(T));
- if (right_padding > 0) {
- const int right_start = (out_offset + single_row_num);
- memset(conv_buffer_data + right_start, byte_zero,
- (right_padding * in_depth * sizeof(T)));
- }
- out_offset += kwidth_times_indepth;
- in_offset += inwidth_times_indepth;
- }
- }
-
- // If the bottom of the patch falls off the input image, pad the values
- // representing those input rows with zeroes.
- if (bottom_padding > 0) {
- const int bottom_row_elements = (bottom_padding * kwidth * in_depth);
- const int bottom_start =
- output_row_offset +
- ((top_padding + (ih_end - ih_start)) * kwidth * in_depth);
- memset(conv_buffer_data + bottom_start, byte_zero,
- (bottom_row_elements * sizeof(T)));
- }
-}
-
-#ifdef USE_NEON
-template <FusedActivationFunctionType Ac>
-void AddBiasAndEvalActivationFunction(const float* bias_data,
- const Dims<4>& bias_dims,
- float* array_data,
- const Dims<4>& array_dims) {
- gemmlowp::ScopedProfilingLabel label("AddBiasAndEvalActivationFunction");
- const int bias_size = bias_dims.sizes[3] * bias_dims.strides[3];
- const int array_size = array_dims.sizes[3] * array_dims.strides[3];
- DCHECK_EQ((array_size % bias_size), 0);
- float* array_ptr = array_data;
- float* array_end_ptr = array_ptr + array_size;
- const auto zero = vdupq_n_f32(0);
- const auto six = vdupq_n_f32(6);
- const auto neg_one = vdupq_n_f32(-1);
- const auto one = vdupq_n_f32(1);
- for (; array_ptr != array_end_ptr; array_ptr += bias_size) {
- int i = 0;
- for (; i <= bias_size - 16; i += 16) {
- auto b0 = vld1q_f32(bias_data + i);
- auto b1 = vld1q_f32(bias_data + i + 4);
- auto b2 = vld1q_f32(bias_data + i + 8);
- auto b3 = vld1q_f32(bias_data + i + 12);
- auto a0 = vld1q_f32(array_ptr + i);
- auto a1 = vld1q_f32(array_ptr + i + 4);
- auto a2 = vld1q_f32(array_ptr + i + 8);
- auto a3 = vld1q_f32(array_ptr + i + 12);
- auto x0 = vaddq_f32(a0, b0);
- auto x1 = vaddq_f32(a1, b1);
- auto x2 = vaddq_f32(a2, b2);
- auto x3 = vaddq_f32(a3, b3);
- if (Ac == FusedActivationFunctionType::kRelu ||
- Ac == FusedActivationFunctionType::kRelu6) {
- x0 = vmaxq_f32(zero, x0);
- x1 = vmaxq_f32(zero, x1);
- x2 = vmaxq_f32(zero, x2);
- x3 = vmaxq_f32(zero, x3);
- if (Ac == FusedActivationFunctionType::kRelu6) {
- x0 = vminq_f32(six, x0);
- x1 = vminq_f32(six, x1);
- x2 = vminq_f32(six, x2);
- x3 = vminq_f32(six, x3);
- }
- } else if (Ac == FusedActivationFunctionType::kRelu1) {
- x0 = vmaxq_f32(neg_one, x0);
- x1 = vmaxq_f32(neg_one, x1);
- x2 = vmaxq_f32(neg_one, x2);
- x3 = vmaxq_f32(neg_one, x3);
- x0 = vminq_f32(one, x0);
- x1 = vminq_f32(one, x1);
- x2 = vminq_f32(one, x2);
- x3 = vminq_f32(one, x3);
- }
- vst1q_f32(array_ptr + i, x0);
- vst1q_f32(array_ptr + i + 4, x1);
- vst1q_f32(array_ptr + i + 8, x2);
- vst1q_f32(array_ptr + i + 12, x3);
- }
- for (; i <= bias_size - 4; i += 4) {
- auto b = vld1q_f32(bias_data + i);
- auto a = vld1q_f32(array_ptr + i);
- auto x = vaddq_f32(a, b);
- if (Ac == FusedActivationFunctionType::kRelu ||
- Ac == FusedActivationFunctionType::kRelu6) {
- x = vmaxq_f32(zero, x);
- if (Ac == FusedActivationFunctionType::kRelu6) {
- x = vminq_f32(six, x);
- }
- } else if (Ac == FusedActivationFunctionType::kRelu1) {
- x = vmaxq_f32(neg_one, x);
- x = vminq_f32(one, x);
- }
- vst1q_f32(array_ptr + i, x);
- }
- for (; i < bias_size; i++) {
- array_ptr[i] = ActivationFunction<Ac>(array_ptr[i] + bias_data[i]);
- }
- }
-}
-#else // not NEON
-template <FusedActivationFunctionType Ac>
-void AddBiasAndEvalActivationFunction(const float* bias_data,
- const Dims<4>& bias_dims,
- float* array_data,
- const Dims<4>& array_dims) {
- gemmlowp::ScopedProfilingLabel label("AddBiasAndEvalActivationFunction");
- const int bias_size = bias_dims.sizes[3] * bias_dims.strides[3];
- const int array_size = array_dims.sizes[3] * array_dims.strides[3];
- DCHECK_EQ((array_size % bias_size), 0);
- for (int array_offset = 0; array_offset < array_size;
- array_offset += bias_size) {
- for (int i = 0; i < bias_size; i++) {
- array_data[array_offset + i] =
- ActivationFunction<Ac>(array_data[array_offset + i] + bias_data[i]);
- }
- }
-}
-#endif
-
-template <typename Lhs, typename Rhs, typename Result>
-void Gemm(const Eigen::MatrixBase<Lhs>& lhs, const Eigen::MatrixBase<Rhs>& rhs,
- Eigen::MatrixBase<Result>* result) {
- if (rhs.cols() == 1) {
- gemmlowp::ScopedProfilingLabel label("GEMV");
- result->col(0).noalias() = lhs * rhs.col(0);
- } else {
- gemmlowp::ScopedProfilingLabel label("GEMM");
- result->noalias() = lhs * rhs;
- }
-}
-
-template <typename T>
-void Im2col(const T* input_data, const Dims<4>& input_dims, int stride_width,
- int stride_height, int pad_width, int pad_height, int kheight,
- int kwidth, uint8 byte_zero, T* output_data,
- const Dims<4>& output_dims) {
- gemmlowp::ScopedProfilingLabel label("Im2col");
- DCHECK(IsPackedWithoutStrides(input_dims));
- DCHECK(IsPackedWithoutStrides(output_dims));
- const int batches = MatchingArraySize(input_dims, 3, output_dims, 3);
- const int input_depth = ArraySize(input_dims, 0);
- const int input_width = ArraySize(input_dims, 1);
- const int input_height = ArraySize(input_dims, 2);
- const int output_depth = ArraySize(output_dims, 0);
- const int output_width = ArraySize(output_dims, 1);
- const int output_height = ArraySize(output_dims, 2);
-
- int buffer_id = 0;
- // Loop over the output nodes.
- for (int b = 0; b < batches; ++b) {
- for (int h = 0; h < output_height; ++h) {
- for (int w = 0; w < output_width; ++w) {
- ExtractPatchIntoBufferColumn(
- input_dims, w, h, b, kheight, kwidth, stride_width, stride_height,
- pad_width, pad_height, input_width, input_height, input_depth,
- output_depth, buffer_id, input_data, output_data, byte_zero);
- ++buffer_id;
- }
- }
- }
-}
-
-template <FusedActivationFunctionType Ac>
-void Conv(const float* input_data, const Dims<4>& input_dims,
- const float* filter_data, const Dims<4>& filter_dims,
- const float* bias_data, const Dims<4>& bias_dims, int stride_width,
- int stride_height, int pad_width, int pad_height, float* output_data,
- const Dims<4>& output_dims, float* im2col_data,
- const Dims<4>& im2col_dims) {
- (void)im2col_data;
- (void)im2col_dims;
- gemmlowp::ScopedProfilingLabel label("Conv");
-
- const float* gemm_input_data = nullptr;
- const Dims<4>* gemm_input_dims = nullptr;
- const int filter_width = ArraySize(filter_dims, 1);
- const int filter_height = ArraySize(filter_dims, 2);
- const bool need_im2col = stride_width != 1 || stride_height != 1 ||
- filter_width != 1 || filter_height != 1;
- if (need_im2col) {
- DCHECK(im2col_data);
- Im2col(input_data, input_dims, stride_width, stride_height, pad_width,
- pad_height, filter_height, filter_width, 0, im2col_data,
- im2col_dims);
- gemm_input_data = im2col_data;
- gemm_input_dims = &im2col_dims;
- } else {
- DCHECK(!im2col_data);
- gemm_input_data = input_data;
- gemm_input_dims = &input_dims;
- }
-
- const auto im2col_matrix_map =
- MapAsMatrixWithFirstDimAsRows(gemm_input_data, *gemm_input_dims);
- const auto filter_matrix_map =
- MapAsMatrixWithLastDimAsCols(filter_data, filter_dims);
- auto output_matrix_map =
- MapAsMatrixWithFirstDimAsRows(output_data, output_dims);
-
- Gemm(filter_matrix_map.transpose(), im2col_matrix_map, &output_matrix_map);
-
- AddBiasAndEvalActivationFunction<Ac>(bias_data, bias_dims, output_data,
- output_dims);
-}
-
-#endif // ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_OPTIMIZED_OPS_H_
diff --git a/contrib/labs/kerneltesting/conv2d/types.h b/contrib/labs/kerneltesting/conv2d/types.h
deleted file mode 100644
index 3d09457c7..000000000
--- a/contrib/labs/kerneltesting/conv2d/types.h
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_TYPES_H_
-#define ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_TYPES_H_
-
-enum class OperandType : int32_t {
- FLOAT32 = 0,
- INT32 = 1,
- UINT32 = 2,
- TENSOR_FLOAT32 = 3,
- TENSOR_INT32 = 4,
- TENSOR_QUANT8_ASYMM = 5,
- OEM = 10000,
- TENSOR_OEM_BYTE = 10001,
-};
-
-#include "compatibility.h"
-
-enum class FusedActivationFunctionType { kNone, kRelu6, kRelu1, kRelu };
-
-template <int N>
-struct Dims {
- int sizes[N];
- int strides[N];
-};
-
-// The type and dimensions of an operand.
-struct Shape {
- OperandType type;
- std::vector<uint32_t> dimensions;
- float scale;
- int32_t offset;
-};
-
-inline uint32_t getSizeOfDimension(const Shape& shape, uint32_t dimensionIdx) {
- if (dimensionIdx >= shape.dimensions.size()) {
- // TODO, log the error
- return 0;
- }
- return shape.dimensions[dimensionIdx];
-}
-
-inline Dims<4> convertShapeToDims(const Shape& shape) {
- Dims<4> dims;
- for (int i=0; i<4; i++) {
- dims.sizes[i] = 1;
- }
-
- if (shape.dimensions.size() == 1) {
- dims.sizes[0] = (int)getSizeOfDimension(shape, 0);
- } else {
- for (int i=0; i<4; i++) {
- int src = (int)shape.dimensions.size()-i-1;
- if (src >= 0) {
- dims.sizes[i] = (int)getSizeOfDimension(shape, src);
- }
- }
- }
-
- dims.strides[0] = 1;
- for (int i = 1; i<4; i++) {
- dims.strides[i] = dims.strides[i-1] * dims.sizes[i-1];
- }
- return dims;
-}
-
-inline int Offset(const Dims<4>& dims, int i0, int i1, int i2, int i3) {
- DCHECK(i0 >= 0 && i0 < dims.sizes[0]);
- DCHECK(i1 >= 0 && i1 < dims.sizes[1]);
- DCHECK(i2 >= 0 && i2 < dims.sizes[2]);
- DCHECK(i3 >= 0 && i3 < dims.sizes[3]);
- return i0 * dims.strides[0] + i1 * dims.strides[1] + i2 * dims.strides[2] +
- i3 * dims.strides[3];
-}
-
-// Get array size, DCHECKing that the dim index is in range.
-template <int N>
-int ArraySize(const Dims<N>& array, int index) {
- DCHECK(index >= 0 && index < N);
- return array.sizes[index];
-}
-
-// Get common array size, DCHECKing that they all agree.
-template <typename ArrayType1, typename ArrayType2>
-int MatchingArraySize(const ArrayType1& array1, int index1,
- const ArrayType2& array2, int index2) {
- DCHECK_EQ(ArraySize(array1, index1), ArraySize(array2, index2));
- return ArraySize(array1, index1);
-}
-
-template <typename ArrayType1, typename ArrayType2, typename... Args>
-int MatchingArraySize(const ArrayType1& array1, int index1,
- const ArrayType2& array2, int index2, Args... args) {
- DCHECK_EQ(ArraySize(array1, index1), ArraySize(array2, index2));
- return MatchingArraySize(array1, index1, args...);
-}
-
-inline int RequiredBufferSizeForDims(const Dims<4>& dims) {
- int max_offset = 0;
- for (int i = 0; i < 4; i++) {
- max_offset += (dims.sizes[i] - 1) * dims.strides[i];
- }
- return max_offset + 1;
-}
-
-template <int N>
-bool IsPackedWithoutStrides(const Dims<N>& dims) {
- int expected_stride = 1;
- for (int d = 0; d < N; d++) {
- if (dims.strides[d] != expected_stride) return false;
- expected_stride *= dims.sizes[d];
- }
- return true;
-}
-
-#endif // ANDROID_ML_NN_COMMON_OPERATIONS_INTERNAL_TYPES_H_
diff --git a/contrib/labs/opencl_test/CMakeLists.txt b/contrib/labs/opencl_test/CMakeLists.txt
deleted file mode 100644
index 5e99fa051..000000000
--- a/contrib/labs/opencl_test/CMakeLists.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-if(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
- return()
-endif(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
-
-list(APPEND OPENCL_INFO_SOURCE "src/opencl_test.cc")
-
-add_executable(opencl_test ${OPENCL_INFO_SOURCE})
-target_include_directories(opencl_test PUBLIC ${CMAKE_SOURCE_DIR}/externals/acl)
-target_include_directories(opencl_test PUBLIC ${CMAKE_SOURCE_DIR}/externals/acl/include)
-target_include_directories(opencl_test PUBLIC ${CMAKE_SOURCE_DIR}/libs/ARMComputeEx)
-target_link_libraries(opencl_test arm_compute)
-target_link_libraries(opencl_test arm_compute_ex)
diff --git a/contrib/labs/opencl_test/src/opencl_test.cc b/contrib/labs/opencl_test/src/opencl_test.cc
deleted file mode 100644
index 93994ae43..000000000
--- a/contrib/labs/opencl_test/src/opencl_test.cc
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*******************************************************************************
- * Copyright (c) 2008-2015 The Khronos Group Inc.
- *
- * Permission is hereby granted, free of charge, to any person obtaining a
- * copy of this software and/or associated documentation files (the
- * "Materials"), to deal in the Materials without restriction, including
- * without limitation the rights to use, copy, modify, merge, publish,
- * distribute, sublicense, and/or sell copies of the Materials, and to
- * permit persons to whom the Materials are furnished to do so, subject to
- * the following conditions:
- *
- * The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Materials.
- *
- * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
- * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
- * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
- * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
- ******************************************************************************/
-
-#include "arm_compute/core/CL/OpenCL.h"
-#include "arm_compute/core/CL/OpenCLEx.h"
-
-#include <iostream>
-#include <vector>
-
-void printDeviceInfo(int n, cl::Device &device, cl::Device &default_device)
-{
- bool is_default = (device() == default_device());
- std::cout << "\t\t\t#" << n << " Device: (id: " << device() << ") "
- << (is_default ? " -> default" : "") << "\n";
-
- const auto name = device.getInfo<CL_DEVICE_NAME>();
- std::cout << "\t\t\t\tName: " << name << "\n";
-
- const auto compute_unit = device.getInfo<CL_DEVICE_MAX_COMPUTE_UNITS>();
- std::cout << "\t\t\t\tMax Compute Unit: " << compute_unit << "\n";
-
- const auto max_work_item_size = device.getInfo<CL_DEVICE_MAX_WORK_ITEM_SIZES>();
- std::cout << "\t\t\t\tMax Work Item Size: [";
- for (auto size : max_work_item_size)
- std::cout << size << ",";
- std::cout << "]\n";
-
- const auto max_work_group_size = device.getInfo<CL_DEVICE_MAX_WORK_GROUP_SIZE>();
- std::cout << "\t\t\t\tMax Work Grpup Size: " << max_work_group_size << "\n";
-
- const auto max_clock_frequency = device.getInfo<CL_DEVICE_MAX_CLOCK_FREQUENCY>();
- std::cout << "\t\t\t\tMax Clock Frequency: " << max_clock_frequency << "\n";
-
- std::cout << "\n";
-}
-
-
-class OpenCLGpu
-{
- public:
- cl::Platform platform_;
- cl::Context context_;
- cl::vector<cl::Device> devices_;
- std::vector<cl::CommandQueue*> q_;
- cl::Program program_;
-
- OpenCLGpu()
- {
- cl_int cl_error;
-
- platform_ = cl::Platform::getDefault();
-
- try
- {
- cl_context_properties properties[3] = {
- CL_CONTEXT_PLATFORM, (cl_context_properties)platform_(), 0
- };
-
- context_ = cl::Context(CL_DEVICE_TYPE_GPU, properties, NULL, NULL, &cl_error);
- }
- catch (cl::Error &err) // thrown when there is no Context for this platform
- {
- std::cout << "\t\t No Context Found\n";
- return;
- }
-
- devices_ = context_.getInfo<CL_CONTEXT_DEVICES>();
-
- for (int dev_id = 0; dev_id < devices_.size(); dev_id++)
- {
- cl::CommandQueue* que = new cl::CommandQueue(context_, devices_[dev_id]);
- q_.emplace_back(que);
- }
- }
-
- ~OpenCLGpu()
- {
- for (auto each_q : q_)
- delete each_q;
- }
-
- void buildProgram(std::string& kernel_source_code)
- {
- std::vector<std::string> programStrings {kernel_source_code};
-
- program_ = cl::Program(context_, programStrings);
-
- try
- {
- program_.build("-cl-std=CL1.2");
- }
- catch (cl::Error &err)
- {
- cl_int buildErr = CL_SUCCESS;
- auto buildInfo = program_.getBuildInfo<CL_PROGRAM_BUILD_LOG>(&buildErr);
- for (auto &pair : buildInfo) {
- std::cerr << pair.second << std::endl << std::endl;
- }
- }
- }
-};
-
-
-void checkContextMem()
-{
- cl_int cl_error;
-
- // get context, devices
- //
- std::cout << "\nChecking if devices in GPU shares the same memory address:\n\n";
-
- OpenCLGpu gpu;
-
- std::cout << "\nDevices in GPU:\n\n";
-
- auto &devices = gpu.devices_;
- auto default_device = cl::Device::getDefault();
-
- int d = 0;
- for (auto device : devices)
- printDeviceInfo(++d, device, default_device);
-
- if (d < 2)
- {
- std::cout << "\t\t This options works when there are n (>= 2) devices.\n";
- return;
- }
-
- // allocate and map memory
-
- typedef cl_int T;
- const int items_per_device = 128;
- const int length = items_per_device * devices.size();
-
- std::vector<T> input(length);
- std::vector<T> output(length, 0);
-
- for (int i = 0; i < length; i++)
- input[i] = i;
-
- cl::Buffer input_buf(gpu.context_, (cl_mem_flags)CL_MEM_USE_HOST_PTR, length*sizeof(T), input.data(), &cl_error);
- cl::Buffer output_buf(gpu.context_, (cl_mem_flags)CL_MEM_USE_HOST_PTR, length*sizeof(T), output.data(), &cl_error);
-
- // compile test cl code
-
- std::string kernel_source {
- "typedef int T; \n" \
- "kernel void memory_test( \n" \
- " const int dev_id, \n" \
- " global T* input, \n" \
- " global T* output, \n" \
- " const int start_idx, \n" \
- " const int count) \n" \
- "{ \n" \
- " int input_idx = get_global_id(0); \n" \
- " if(input_idx < count) \n" \
- " { \n" \
- " int output_idx = start_idx + input_idx; \n" \
- " output[output_idx] = input[input_idx] + dev_id; \n" \
- " } \n" \
- "} \n"
- };
-
- gpu.buildProgram(kernel_source);
-
- try
- {
- auto kernel_functor = cl::KernelFunctor<cl_int, cl::Buffer, cl::Buffer, cl_int, cl_int>
- (gpu.program_, "memory_test"); // name should be same as cl function name
-
- // create a queue per device and queue a kernel job
-
- for (int dev_id = 0; dev_id < devices.size(); dev_id++)
- {
- kernel_functor(
- cl::EnqueueArgs(
- *(gpu.q_[dev_id]),
- cl::NDRange(items_per_device)),
- (cl_int)dev_id, // dev id
- input_buf,
- output_buf,
- (cl_int)(items_per_device * dev_id), // start index
- (cl_int)(items_per_device), // count
- cl_error
- );
- }
-
- // sync
-
- for (d = 0; d < devices.size(); d++)
- (gpu.q_[d])->finish();
-
- // check if memory state changed by all devices
-
- cl::copy(*(gpu.q_[0]), output_buf, begin(output), end(output));
-
- bool use_same_memory = true;
-
- for (int dev_id = 0; dev_id < devices.size(); dev_id++)
- {
- for (int i = 0; i < items_per_device; ++i)
- {
- int output_idx = items_per_device * dev_id + i;
- if (output[output_idx] != input[i] + dev_id)
- {
- std::cout << "Output[" << output_idx << "] : "
- << "expected = " << input[i] + dev_id
- << "; actual = " << output[output_idx] << "\n";
- use_same_memory = false;
- break;
- }
- }
- }
-
- if (use_same_memory)
- std::cout << "\n=> Mapped memory addresses used by devices in GPU are same.\n\n";
- else
- std::cout << "\n=> Mapped memory addresses used by devices in GPU are different.\n\n";
- }
- catch (cl::Error &err)
- {
- std::cerr << "error: code: " << err.err() << ", what: " << err.what() << std::endl;
- }
-}
-
-void printHelp()
-{
- std::cout << "opencl information: \n\n";
- std::cout << "\t -h : help\n";
- std::cout << "\t -g : print if memory map is shared among devices in GPU (in default platform)\n\n";
- std::cout << "\t -s : test for synchronized work by two devices in a GPU\n\n";
-}
-
-#include <mutex>
-#include <chrono>
-#include <thread>
-#include <condition_variable>
-
-#define MAX_DEVICE_NUM 8 // just for testing
-
-int kernel_idx[MAX_DEVICE_NUM];
-unsigned char kernel_completed = 0x00; // bit 0 = 1 means kernel by device[0] was completed.
-unsigned char kernel_completed_flag; // if comparing kernel_completed with this var, all kernels are completed
-int device_num;
-std::mutex kernel_complete_handler_mutex;
-
-std::condition_variable wakeup_main;
-std::mutex wakeup_main_mutex;
-
-void notifyKernelFinished(cl_event ev, cl_int ev_info, void * device_idx)
-{
- std::cout << "callback from device[" << *((int*)device_idx) << "] : ==> completed.\n";
-
- std::unique_lock<std::mutex> lock(kernel_complete_handler_mutex);
-
- kernel_completed |= 0x01 << *((int*)device_idx);
- if (kernel_completed == kernel_completed_flag)
- wakeup_main.notify_one();
-}
-
-void testSync()
-{
- OpenCLGpu gpu;
-
- cl_int cl_error;
- typedef cl_int T;
- const int items_per_device = 1024*768;
- const int length = items_per_device * gpu.devices_.size();
-
- std::vector<T> output(length, 0);
-
- cl::Buffer output_buf(gpu.context_, (cl_mem_flags)CL_MEM_USE_HOST_PTR, length*sizeof(T), output.data(), &cl_error);
-
- std::string kernel_source {
- "kernel void test(global float* output, const int count) \n" \
- "{ \n" \
- " int idx = get_global_id(0); \n" \
- " if(idx < count) \n" \
- " { \n" \
- " float x = hypot(idx/1.111, idx*1.111); \n" \
- " for (int y = 0; y < 200; y++) \n" \
- " x = rootn(log(pown(rootn(log(pown(x, 20)), 5), 20)), 5); \n" \
- " output[idx] = x; \n" \
- " } \n" \
- "} \n"
- };
-
- gpu.buildProgram(kernel_source);
-
- try
- {
- auto kernel_functor = cl::KernelFunctor<cl::Buffer, cl_int>
- (gpu.program_, "test"); // name should be same as cl function name
-
- // variable init
- cl::Event ev[MAX_DEVICE_NUM];
-
- device_num = gpu.devices_.size();
-
- kernel_completed = 0;
- kernel_completed_flag = 0;
- for (int i = 0; i < device_num; i++)
- {
- kernel_idx[i] = i;
- kernel_completed_flag |= 0x01 << i;
- }
-
- // create a queue per device and queue a kernel job
- // queueing with callback function
- for (int dev_id = 0; dev_id < gpu.devices_.size(); dev_id++)
- {
- ev[dev_id] = kernel_functor(
- cl::EnqueueArgs(
- *(gpu.q_[dev_id]),
- cl::NDRange(items_per_device)),
- output_buf,
- (cl_int)(items_per_device), // count
- cl_error
- );
- ev[dev_id].setCallback(CL_COMPLETE, notifyKernelFinished, (void*)(kernel_idx+dev_id));
-
- // how to check kernel execution status
- //
- // auto status = ev[dev_id].getInfo<CL_EVENT_COMMAND_EXECUTION_STATUS>();
- // std::cout << "Event status = " << (status == CL_QUEUED ? "CL_QUEUED" : status == CL_SUBMITTED ? "CL_SUBMITTED" : status == CL_COMPLETE ? "CL_COMPLETE" : "unknown")
- // << std::endl;
- // std::cout << "Event status code = " << status << std::endl;
- }
-
- // long wait until kernels are over
- {
- std::unique_lock<std::mutex> lk(wakeup_main_mutex);
- wakeup_main.wait(lk, []{ return (kernel_completed == kernel_completed_flag); });
-
- std::cout << "all devices were completed.\n";
- }
- }
- catch (cl::Error &err)
- {
- std::cerr << "error: code: " << err.err() << ", what: " << err.what() << std::endl;
- }
-
-}
-
-int main(const int argc, char **argv)
-{
- if (argc < 2)
- printHelp();
- else
- {
- std::string option = argv[1];
-
- if (option == "-h") // help
- printHelp();
- else if (option == "-g") // check if devices in GPU uses same memory address
- checkContextMem();
- else if (option == "-s") // check synchronization between devices in GPU
- testSync();
- }
- return 0;
-}
diff --git a/contrib/labs/tflite_examples/src/conv.cpp b/contrib/labs/tflite_examples/src/conv.cpp
deleted file mode 100644
index e517da9f3..000000000
--- a/contrib/labs/tflite_examples/src/conv.cpp
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/ext/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
-
-#include <iostream>
-
-using namespace tflite;
-using namespace nnfw::tflite;
-
-namespace vector
-{
-
-template <typename T> struct View
-{
- virtual ~View() = default;
-
- virtual int32_t size(void) const = 0;
- virtual T at(uint32_t off) const = 0;
-};
-}
-
-namespace feature
-{
-
-struct Shape
-{
- int32_t C;
- int32_t H;
- int32_t W;
-};
-
-template <typename T> struct View
-{
- virtual ~View() = default;
-
- virtual const Shape &shape(void) const = 0;
- virtual T at(uint32_t ch, uint32_t row, uint32_t col) const = 0;
-};
-}
-
-namespace kernel
-{
-
-struct Shape
-{
- int32_t N;
- int32_t C;
- int32_t H;
- int32_t W;
-};
-
-template <typename T> struct View
-{
- virtual ~View() = default;
-
- virtual const Shape &shape(void) const = 0;
- virtual T at(uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) const = 0;
-};
-}
-
-const int32_t N = 1;
-const int32_t C = 2;
-
-class SampleBiasObject final : public vector::View<float>
-{
-public:
- SampleBiasObject() : _size(N)
- {
- // DO NOTHING
- }
-
-public:
- int32_t size(void) const override { return _size; }
-
- float at(uint32_t off) const override { return 0.0f; }
-
-private:
- int32_t _size;
-};
-
-class SampleFeatureObject final : public feature::View<float>
-{
-public:
- SampleFeatureObject()
- {
- _shape.C = C;
- _shape.H = 3;
- _shape.W = 4;
-
- const uint32_t size = _shape.C * _shape.H * _shape.W;
-
- for (uint32_t off = 0; off < size; ++off)
- {
- _value.emplace_back(off);
- }
-
- assert(_value.size() == size);
- }
-
-public:
- const feature::Shape &shape(void) const override { return _shape; };
-
- float at(uint32_t ch, uint32_t row, uint32_t col) const override
- {
- return _value.at(ch * _shape.H * _shape.W + row * _shape.W + col);
- }
-
-public:
- float &at(uint32_t ch, uint32_t row, uint32_t col)
- {
- return _value.at(ch * _shape.H * _shape.W + row * _shape.W + col);
- }
-
-private:
- feature::Shape _shape;
- std::vector<float> _value;
-};
-
-class SampleKernelObject final : public kernel::View<float>
-{
-public:
- SampleKernelObject()
- {
- _shape.N = N;
- _shape.C = C;
- _shape.H = 3;
- _shape.W = 4;
-
- const uint32_t size = _shape.N * _shape.C * _shape.H * _shape.W;
-
- for (uint32_t off = 0; off < size; ++off)
- {
- _value.emplace_back(off);
- }
-
- assert(_value.size() == size);
- }
-
-public:
- const kernel::Shape &shape(void) const override { return _shape; };
-
- float at(uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) const override
- {
- return _value.at(nth * _shape.C * _shape.H * _shape.W + ch * _shape.H * _shape.W +
- row * _shape.W + col);
- }
-
-private:
- kernel::Shape _shape;
- std::vector<float> _value;
-};
-
-int main(int argc, char **argv)
-{
- const SampleFeatureObject ifm;
- const SampleKernelObject kernel;
- const SampleBiasObject bias;
-
- const int32_t IFM_C = ifm.shape().C;
- const int32_t IFM_H = ifm.shape().H;
- const int32_t IFM_W = ifm.shape().W;
-
- const int32_t KER_N = kernel.shape().N;
- const int32_t KER_C = kernel.shape().C;
- const int32_t KER_H = kernel.shape().H;
- const int32_t KER_W = kernel.shape().W;
-
- const int32_t OFM_C = kernel.shape().N;
- const int32_t OFM_H = (IFM_H - KER_H) + 1;
- const int32_t OFM_W = (IFM_W - KER_W) + 1;
-
- // Assumption on this example
- assert(IFM_C == KER_C);
- assert(KER_N == bias.size());
-
- // Comment from 'context.h'
- //
- // Parameters for asymmetric quantization. Quantized values can be converted
- // back to float using:
- // real_value = scale * (quantized_value - zero_point);
- //
- // Q: Is this necessary?
- TfLiteQuantizationParams quantization;
-
- quantization.scale = 1;
- quantization.zero_point = 0;
-
- Interpreter interp;
-
- // On AddTensors(N) call, T/F Lite interpreter creates N tensors whose index is [0 ~ N)
- interp.AddTensors(5);
-
- // Configure OFM
- interp.SetTensorParametersReadWrite(0, kTfLiteFloat32 /* type */, "output" /* name */,
- {1 /*N*/, OFM_H, OFM_W, OFM_C} /* dims */, quantization);
-
- // Configure IFM
- interp.SetTensorParametersReadWrite(1, kTfLiteFloat32 /* type */, "input" /* name */,
- {1 /*N*/, IFM_H, IFM_W, IFM_C} /* dims */, quantization);
-
- // Configure Filter
- const uint32_t kernel_size = KER_N * KER_C * KER_H * KER_W;
- float kernel_data[kernel_size] = {
- 0.0f,
- };
-
- // Fill kernel data in NHWC order
- {
- uint32_t off = 0;
-
- for (uint32_t nth = 0; nth < KER_N; ++nth)
- {
- for (uint32_t row = 0; row < KER_H; ++row)
- {
- for (uint32_t col = 0; col < KER_W; ++col)
- {
- for (uint32_t ch = 0; ch < KER_C; ++ch)
- {
- const auto value = kernel.at(nth, ch, row, col);
- kernel_data[off++] = value;
- }
- }
- }
- }
-
- assert(kernel_size == off);
- }
-
- interp.SetTensorParametersReadOnly(
- 2, kTfLiteFloat32 /* type */, "filter" /* name */, {KER_N, KER_H, KER_W, KER_C} /* dims */,
- quantization, reinterpret_cast<const char *>(kernel_data), sizeof(kernel_data));
-
- // Configure Bias
- const uint32_t bias_size = bias.size();
- float bias_data[bias_size] = {
- 0.0f,
- };
-
- // Fill bias data
- for (uint32_t off = 0; off < bias.size(); ++off)
- {
- bias_data[off] = bias.at(off);
- }
-
- interp.SetTensorParametersReadOnly(3, kTfLiteFloat32 /* type */, "bias" /* name */,
- {bias.size()} /* dims */, quantization,
- reinterpret_cast<const char *>(bias_data), sizeof(bias_data));
-
- // Add Convolution Node
- //
- // NOTE AddNodeWithParameters take the ownership of param, and deallocate it with free
- // So, param should be allocated with malloc
- TfLiteConvParams *param = reinterpret_cast<TfLiteConvParams *>(malloc(sizeof(TfLiteConvParams)));
-
- param->padding = kTfLitePaddingValid;
- param->stride_width = 1;
- param->stride_height = 1;
- param->activation = kTfLiteActRelu;
-
- // Run Convolution and store its result into Tensor #0
- // - Read IFM from Tensor #1
- // - Read Filter from Tensor #2,
- // - Read Bias from Tensor #3
- interp.AddNodeWithParameters({1, 2, 3}, {0}, nullptr, 0, reinterpret_cast<void *>(param),
- BuiltinOpResolver().FindOp(BuiltinOperator_CONV_2D, 1));
-
- // Set Tensor #1 as Input #0, and Tensor #0 as Output #0
- interp.SetInputs({1});
- interp.SetOutputs({0});
-
- // Let's use NNAPI (if possible)
- interp.UseNNAPI(true);
-
- // Allocate Tensor
- interp.AllocateTensors();
-
- // Fill IFM data in HWC order
- {
- uint32_t off = 0;
-
- for (uint32_t row = 0; row < ifm.shape().H; ++row)
- {
- for (uint32_t col = 0; col < ifm.shape().W; ++col)
- {
- for (uint32_t ch = 0; ch < ifm.shape().C; ++ch)
- {
- const auto value = ifm.at(ch, row, col);
- interp.typed_input_tensor<float>(0)[off++] = value;
- }
- }
- }
- }
-
- // Let's Rock-n-Roll!
- interp.Invoke();
-
- // Print OFM
- {
- uint32_t off = 0;
-
- for (uint32_t row = 0; row < OFM_H; ++row)
- {
- for (uint32_t col = 0; col < OFM_W; ++col)
- {
- for (uint32_t ch = 0; ch < kernel.shape().N; ++ch)
- {
- std::cout << interp.typed_output_tensor<float>(0)[off++] << std::endl;
- }
- }
- }
- }
-
- return 0;
-}
diff --git a/contrib/tflite_classify/CMakeLists.txt b/contrib/tflite_classify/CMakeLists.txt
deleted file mode 100644
index 3ae4adcc7..000000000
--- a/contrib/tflite_classify/CMakeLists.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-if(NOT BUILD_TFLITE_CLASSIFY_APP)
- return()
-endif(NOT BUILD_TFLITE_CLASSIFY_APP)
-
-list(APPEND SOURCES "src/tflite_classify.cc")
-list(APPEND SOURCES "src/ImageClassifier.cc")
-list(APPEND SOURCES "src/InferenceInterface.cc")
-
-## Required package
-find_package(OpenCV REQUIRED)
-find_package(Boost REQUIRED COMPONENTS system filesystem)
-
-# Without this line, this appliation couldn't search the opencv library that were already installed in ${ROOTFS_ARM}/usr/lib/arm-linux-gnueabihf directory
-set(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${ROOTFS_ARM}/usr/lib/arm-linux-gnueabihf -Wl,--rpath=${ROOTFS_ARM}/lib/arm-linux-gnueabihf")
-
-add_executable(tflite_classify ${SOURCES})
-target_include_directories(tflite_classify PRIVATE src)
-target_link_libraries(tflite_classify tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_tflite)
-target_link_libraries(tflite_classify ${Boost_LIBRARIES})
-target_link_libraries(tflite_classify ${OpenCV_LIBRARIES})
-
-install(TARGETS tflite_classify DESTINATION bin)
diff --git a/contrib/tflite_classify/src/InferenceInterface.h b/contrib/tflite_classify/src/InferenceInterface.h
deleted file mode 100644
index 06ebb3dad..000000000
--- a/contrib/tflite_classify/src/InferenceInterface.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file InferenceInterface.h
- * @brief This file contains class for running the actual inference model
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __TFLITE_CLASSIFY_INFERENCE_INTERFACE_H__
-#define __TFLITE_CLASSIFY_INFERENCE_INTERFACE_H__
-
-#include "tflite/ext/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-
-#include "tflite/InterpreterSession.h"
-#include "tflite/NNAPISession.h"
-
-#include <iostream>
-#include <string>
-
-/**
- * @brief Class to define a inference interface for recognizing data
- */
-class InferenceInterface
-{
-public:
- /**
- * @brief Construct a new InferenceInterface object with parameters
- * @param[in] model_file The filepath of the model FlatBuffer protocol buffer
- * @param[in] use_nnapi The flag to distinguish between TfLite interpreter and NNFW runtime
- */
- InferenceInterface(const std::string &model_file, const bool use_nnapi);
-
- /**
- * @brief Destructor an InferenceInterface object
- */
- ~InferenceInterface();
-
- /**
- * @brief Copy the input data into model
- * @param[in] input_name The label of the image input node
- * @param[in] data The actual data to be copied into input tensor
- * @param[in] batch The number of batch size
- * @param[in] height The number of height size
- * @param[in] width The number of width size
- * @param[in] channel The number of channel size
- * @return N/A
- */
- void feed(const std::string &input_name, const std::vector<float> &data, const int batch,
- const int height, const int width, const int channel);
- /**
- * @brief Run the inference call
- * @param[in] output_name The label of the output node
- * @return N/A
- */
- void run(const std::string &output_name);
-
- /**
- * @brief Copy the output tensor back into the output array
- * @param[in] output_node The label of the output node
- * @param[in] outputs The output data array
- * @return N/A
- */
- void fetch(const std::string &output_name, std::vector<float> &outputs);
-
- /**
- * @brief Get tensor size
- * @param[in] name The label of the node
- * @result The size of tensor
- */
- int getTensorSize(const std::string &name);
-
-private:
- std::unique_ptr<tflite::Interpreter> _interpreter;
- std::unique_ptr<tflite::FlatBufferModel> _model;
- std::shared_ptr<nnfw::tflite::Session> _sess;
-};
-
-#endif // __TFLITE_CLASSIFY_INFERENCE_INTERFACE_H__
diff --git a/contrib/tflite_test/tflite_test.cpp b/contrib/tflite_test/tflite_test.cpp
deleted file mode 100644
index 4ee9e5a8d..000000000
--- a/contrib/tflite_test/tflite_test.cpp
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <tensorflow/contrib/lite/model.h>
-#include <tensorflow/core/public/session.h>
-
-#include "tflite/Assert.h"
-#include "tflite/Session.h"
-#include "tflite/InterpreterSession.h"
-#include "tflite/NNAPISession.h"
-#include "tflite/ext/kernels/register.h"
-
-#include "misc/fp32.h"
-
-#include <iostream>
-
-#include <string>
-#include <vector>
-
-#define TF_ENSURE(e) { \
- if(!(e).ok()) \
- { \
- throw std::runtime_error{"'" #e "' FAILED"}; \
- } \
-}
-
-using namespace tflite;
-using namespace tflite::ops::builtin;
-
-std::unique_ptr<FlatBufferModel> BuildModelFromFile(const std::string &path)
-{
- static StderrReporter reporter;
- return FlatBufferModel::BuildFromFile(path.c_str(), &reporter);
-}
-
-std::unique_ptr<Interpreter> BuildInterpFromModel(const std::unique_ptr<FlatBufferModel> &model)
-{
- std::unique_ptr<Interpreter> interp;
-
- BuiltinOpResolver resolver;
- InterpreterBuilder builder(*model, resolver);
-
- TFLITE_ENSURE(builder(&interp));
-
- return std::move(interp);
-}
-
-tensorflow::TensorShape asTensorflowShape(const TfLiteTensor *tensor)
-{
- tensorflow::TensorShape shape;
-
- const int rank = tensor->dims->size;
-
- for (int axis = 0; axis < rank; ++axis)
- {
- shape.AddDim(tensor->dims->data[axis]);
- }
-
- return shape;
-}
-
-uint32_t count_elements(const TfLiteTensor *tensor)
-{
- const int rank = tensor->dims->size;
-
- if (rank == 0)
- {
- return 0;
- }
-
- uint32_t res = 1;
-
- for (int axis = 0; axis < rank; ++axis)
- {
- res *= tensor->dims->data[axis];
- }
-
- return res;
-}
-
-int main(int argc, char **argv)
-{
- bool use_nnapi = false;
-
- if (std::getenv("USE_NNAPI") != nullptr)
- {
- use_nnapi = true;
- }
-
- if (argc < 3)
- {
- std::cerr << "USAGE: " << argv[0] << " [T/F lite model] [T/F model]" << std::endl;
- return 255;
- }
-
- //
- // Prepare Tensorflow Lite session
- //
- const std::string lite_model_path{argv[1]};
-
- auto lite_model = BuildModelFromFile(lite_model_path);
- auto lite_interp = BuildInterpFromModel(lite_model);
-
- std::shared_ptr<nnfw::tflite::Session> lite_sess;
-
- if (use_nnapi)
- {
- lite_sess = std::make_shared<nnfw::tflite::NNAPISession>(lite_interp.get());
- }
- else
- {
- lite_sess = std::make_shared<nnfw::tflite::InterpreterSession>(lite_interp.get());
- }
-
- //
- // Prepare Tensorflow session
- //
- const std::string full_model_path{argv[2]};
-
- tensorflow::Session* full_sess;
- tensorflow::GraphDef full_model;
-
- TF_ENSURE(tensorflow::NewSession(tensorflow::SessionOptions(), &full_sess));
- TF_ENSURE(ReadBinaryProto(tensorflow::Env::Default(), full_model_path, &full_model));
- TF_ENSURE(full_sess->Create(full_model));
-
- //
- //
- //
- std::vector<tensorflow::Tensor> input_nodes;
- std::vector<std::string> input_names;
-
- for (uint32_t n = 0; n < lite_interp->inputs().size(); ++n)
- {
- const TfLiteTensor *tensor = lite_interp->tensor(lite_interp->inputs().at(n));
-
- input_nodes.emplace_back(tensorflow::DT_FLOAT, asTensorflowShape(tensor));
- input_names.emplace_back(tensor->name);
- }
-
- assert(input_nodes.size() == input_names.size());
- assert(input_nodes.size() == lite_interp->inputs().size());
-
- std::vector<std::string> output_names;
- std::vector<tensorflow::Tensor> output_nodes;
-
- for (uint32_t n = 0; n < lite_interp->outputs().size(); ++n)
- {
- const TfLiteTensor *tensor = lite_interp->tensor(lite_interp->outputs().at(n));
-
- output_names.emplace_back(tensor->name);
- }
-
- assert(output_names.size() == lite_interp->outputs().size());
- // output_nodes will be initialized after Tensorflow Session run
- assert(output_nodes.size() == 0);
-
- //
- // Prepare inference
- //
- lite_sess->prepare();
-
- // TODO Feed Inputs (for both Tensorflow and Tensorflow Lite)
- std::vector<std::pair<std::string, tensorflow::Tensor>> inputs;
-
- for (uint32_t n = 0; n < input_nodes.size(); ++n)
- {
- inputs.emplace_back(input_names.at(0), input_nodes.at(0));
- }
-
- //
- // Run inference
- //
- TF_ENSURE(full_sess->Run(inputs, output_names, {}, &output_nodes));
-
- lite_sess->run();
-
- //
- // Compare Output
- //
- auto equals = [] (float lhs, float rhs) {
- // TODO Allow users to set tolerance
- if (nnfw::misc::fp32::absolute_epsilon_equal(lhs, rhs))
- {
- return true;
- }
-
- return nnfw::misc::fp32::epsilon_equal(lhs, rhs);
- };
-
- const uint32_t output_count = output_names.size();
-
- bool matched = true;
-
- for (uint32_t n = 0; n < output_count; ++n)
- {
- const TfLiteTensor *tensor = lite_interp->tensor(lite_interp->outputs().at(n));
-
- // TODO Compare shape
-
- const auto element_count = count_elements(tensor);
-
- std::cout << "Compare output #" << n << "(" << tensor->name << ", " << element_count << " elements)" << std::endl;
- for (uint32_t index = 0; index < element_count; ++index)
- {
- const auto full_value = output_nodes.at(n).flat<float>().data()[index];
- const auto lite_value = lite_sess->interp()->typed_output_tensor<float>(n)[index];
-
- if (!equals(full_value, lite_value))
- {
- std::cerr << full_value << " is expected, but " << lite_value << " is obtaeind (at " << n << ":" << index << ")" << std::endl;
- matched = false;
- }
- }
- }
-
- //
- // Cleanup
- //
- lite_sess->teardown();
-
- return matched ? 0 : 255;
-}
diff --git a/docs/HowToContribute.md b/docs/HowToContribute.md
index e62666998..c6f89c3cf 100644
--- a/docs/HowToContribute.md
+++ b/docs/HowToContribute.md
@@ -19,8 +19,8 @@ This section explains the steps to create a pull request (PR).
1. Create an issue
- Maintainers will accept your contribution only when it is well aligned with the [roadmap and
- design principles](./roadmap.md) of _nnfw_. So, it is optional, but recommended for contributors
+ Maintainers will accept your contribution only when it is well aligned with the roadmap and
+ design principles of [_nnfw_](./nnfw/roadmap.md) and [_nncc_](./nncc/roadmap.md). So, it is optional, but recommended for contributors
to create an issue and have a discussion with maintainers before writing code.
1. Create a draft PR
@@ -53,10 +53,16 @@ This section explains the steps to create a pull request (PR).
1. Request review
- Please assign reviewers if you need review from them. Maintainers will honor your review request,
- and accept your pull request only when all the reviewer approve your pull request. Note that this
- does **NOT** mean that you should assign reviewers. Maintainers (or reviewers) will review your
- pull request even without explicit review request.
+ It is recommended to assign reviewers yourself. Maintainers will honor your review request,
+ and accept your pull request only when
+
+ - Approved by 1+ reviewers
+ - 0 rejection(Request Changes)
+ - 0 pending review request
+ - All the reviewers in the list must approve your pull request
+
+ You can add/remove pending review requests in the middle of the review process. Maintainers
+ (or reviewers) could review your pull request even without explicit review request.
1. Update per feedback
@@ -64,9 +70,3 @@ This section explains the steps to create a pull request (PR).
your pull request upon such feedbacks. These update commits will be squashed into the first
commit of your pull request later. Please do **NOT** include a sign-off message or write a full
description for update commits.
-
-
-# Note
-
-This document is originated from the [contribution guide in
-nncc](https://github.sec.samsung.net/STAR/nncc/blob/master/doc/contribution_guide.md).
diff --git a/docs/doxygen/Doxyfile b/docs/doxygen/Doxyfile
deleted file mode 100644
index 632282770..000000000
--- a/docs/doxygen/Doxyfile
+++ /dev/null
@@ -1,2500 +0,0 @@
-# Doxyfile 1.8.13
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project.
-#
-# All text after a double hash (##) is considered a comment and is placed in
-# front of the TAG it is preceding.
-#
-# All text after a single hash (#) is considered a comment and will be ignored.
-# The format is:
-# TAG = value [value, ...]
-# For lists, items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (\" \").
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file
-# that follow. The default is UTF-8 which is also the encoding used for all text
-# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
-# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
-# for the list of possible encodings.
-# The default value is: UTF-8.
-
-DOXYFILE_ENCODING = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
-# double-quotes, unless you are using Doxywizard) that should identify the
-# project for which the documentation is generated. This name is used in the
-# title of most generated pages and in a few other places.
-# The default value is: My Project.
-
-PROJECT_NAME = nnfw
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
-# could be handy for archiving the generated documentation or if some version
-# control system is used.
-
-PROJECT_NUMBER =
-
-# Using the PROJECT_BRIEF tag one can provide an optional one line description
-# for a project that appears at the top of each page and should give viewer a
-# quick idea about the purpose of the project. Keep the description short.
-
-PROJECT_BRIEF =
-
-# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
-# in the documentation. The maximum height of the logo should not exceed 55
-# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
-# the logo to the output directory.
-
-PROJECT_LOGO =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
-# into which the generated documentation will be written. If a relative path is
-# entered, it will be relative to the location where doxygen was started. If
-# left blank the current directory will be used.
-
-OUTPUT_DIRECTORY =
-
-# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
-# directories (in 2 levels) under the output directory of each output format and
-# will distribute the generated files over these directories. Enabling this
-# option can be useful when feeding doxygen a huge amount of source files, where
-# putting all generated files in the same directory would otherwise causes
-# performance problems for the file system.
-# The default value is: NO.
-
-CREATE_SUBDIRS = NO
-
-# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
-# characters to appear in the names of generated files. If set to NO, non-ASCII
-# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
-# U+3044.
-# The default value is: NO.
-
-ALLOW_UNICODE_NAMES = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
-# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
-# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
-# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
-# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
-# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
-# Ukrainian and Vietnamese.
-# The default value is: English.
-
-OUTPUT_LANGUAGE = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
-# descriptions after the members that are listed in the file and class
-# documentation (similar to Javadoc). Set to NO to disable this.
-# The default value is: YES.
-
-BRIEF_MEMBER_DESC = YES
-
-# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
-# description of a member or function before the detailed description
-#
-# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-# The default value is: YES.
-
-REPEAT_BRIEF = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator that is
-# used to form the text in various listings. Each string in this list, if found
-# as the leading text of the brief description, will be stripped from the text
-# and the result, after processing the whole list, is used as the annotated
-# text. Otherwise, the brief description is used as-is. If left blank, the
-# following values are used ($name is automatically replaced with the name of
-# the entity):The $name class, The $name widget, The $name file, is, provides,
-# specifies, contains, represents, a, an and the.
-
-ABBREVIATE_BRIEF = "The $name class" \
- "The $name widget" \
- "The $name file" \
- is \
- provides \
- specifies \
- contains \
- represents \
- a \
- an \
- the
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# doxygen will generate a detailed section even if there is only a brief
-# description.
-# The default value is: NO.
-
-ALWAYS_DETAILED_SEC = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-# The default value is: NO.
-
-INLINE_INHERITED_MEMB = NO
-
-# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
-# before files name in the file list and in the header files. If set to NO the
-# shortest path that makes the file name unique will be used
-# The default value is: YES.
-
-FULL_PATH_NAMES = YES
-
-# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
-# Stripping is only done if one of the specified strings matches the left-hand
-# part of the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the path to
-# strip.
-#
-# Note that you can specify absolute paths here, but also relative paths, which
-# will be relative from the directory where doxygen is started.
-# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-
-STRIP_FROM_PATH = ../../../nnfw
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
-# path mentioned in the documentation of a class, which tells the reader which
-# header file to include in order to use a class. If left blank only the name of
-# the header file containing the class definition is used. Otherwise one should
-# specify the list of include paths that are normally passed to the compiler
-# using the -I flag.
-
-STRIP_FROM_INC_PATH =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
-# less readable) file names. This can be useful is your file systems doesn't
-# support long names like on DOS, Mac, or CD-ROM.
-# The default value is: NO.
-
-SHORT_NAMES = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
-# first line (until the first dot) of a Javadoc-style comment as the brief
-# description. If set to NO, the Javadoc-style will behave just like regular Qt-
-# style comments (thus requiring an explicit @brief command for a brief
-# description.)
-# The default value is: NO.
-
-JAVADOC_AUTOBRIEF = NO
-
-# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
-# line (until the first dot) of a Qt-style comment as the brief description. If
-# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
-# requiring an explicit \brief command for a brief description.)
-# The default value is: NO.
-
-QT_AUTOBRIEF = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
-# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
-# a brief description. This used to be the default behavior. The new default is
-# to treat a multi-line C++ comment block as a detailed description. Set this
-# tag to YES if you prefer the old behavior instead.
-#
-# Note that setting this tag to YES also means that rational rose comments are
-# not recognized any more.
-# The default value is: NO.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
-# documentation from any documented member that it re-implements.
-# The default value is: YES.
-
-INHERIT_DOCS = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
-# page for each member. If set to NO, the documentation of a member will be part
-# of the file/class/namespace that contains it.
-# The default value is: NO.
-
-SEPARATE_MEMBER_PAGES = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
-# uses this value to replace tabs by spaces in code fragments.
-# Minimum value: 1, maximum value: 16, default value: 4.
-
-TAB_SIZE = 4
-
-# This tag can be used to specify a number of aliases that act as commands in
-# the documentation. An alias has the form:
-# name=value
-# For example adding
-# "sideeffect=@par Side Effects:\n"
-# will allow you to put the command \sideeffect (or @sideeffect) in the
-# documentation, which will result in a user-defined paragraph with heading
-# "Side Effects:". You can put \n's in the value part of an alias to insert
-# newlines.
-
-ALIASES =
-
-# This tag can be used to specify a number of word-keyword mappings (TCL only).
-# A mapping has the form "name=value". For example adding "class=itcl::class"
-# will allow you to use the command class in the itcl::class meaning.
-
-TCL_SUBST =
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
-# only. Doxygen will then generate output that is more tailored for C. For
-# instance, some of the names that are used will be different. The list of all
-# members will be omitted, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_FOR_C = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
-# Python sources only. Doxygen will then generate output that is more tailored
-# for that language. For instance, namespaces will be presented as packages,
-# qualified scopes will look different, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_JAVA = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources. Doxygen will then generate output that is tailored for Fortran.
-# The default value is: NO.
-
-OPTIMIZE_FOR_FORTRAN = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for VHDL.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_VHDL = NO
-
-# Doxygen selects the parser to use depending on the extension of the files it
-# parses. With this tag you can assign which parser to use for a given
-# extension. Doxygen has a built-in mapping, but you can override or extend it
-# using this tag. The format is ext=language, where ext is a file extension, and
-# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
-# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
-# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
-# Fortran. In the later case the parser tries to guess whether the code is fixed
-# or free formatted code, this is the default for Fortran type files), VHDL. For
-# instance to make doxygen treat .inc files as Fortran files (default is PHP),
-# and .f files as C (default is Fortran), use: inc=Fortran f=C.
-#
-# Note: For files without extension you can use no_extension as a placeholder.
-#
-# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
-# the files are not read by doxygen.
-
-EXTENSION_MAPPING =
-
-# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
-# according to the Markdown format, which allows for more readable
-# documentation. See http://daringfireball.net/projects/markdown/ for details.
-# The output of markdown processing is further processed by doxygen, so you can
-# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
-# case of backward compatibilities issues.
-# The default value is: YES.
-
-MARKDOWN_SUPPORT = YES
-
-# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
-# to that level are automatically included in the table of contents, even if
-# they do not have an id attribute.
-# Note: This feature currently applies only to Markdown headings.
-# Minimum value: 0, maximum value: 99, default value: 0.
-# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
-
-TOC_INCLUDE_HEADINGS = 0
-
-# When enabled doxygen tries to link words that correspond to documented
-# classes, or namespaces to their corresponding documentation. Such a link can
-# be prevented in individual cases by putting a % sign in front of the word or
-# globally by setting AUTOLINK_SUPPORT to NO.
-# The default value is: YES.
-
-AUTOLINK_SUPPORT = YES
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should set this
-# tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string);
-# versus func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-# The default value is: NO.
-
-BUILTIN_STL_SUPPORT = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-# The default value is: NO.
-
-CPP_CLI_SUPPORT = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
-# will parse them like normal C++ but will assume all classes use public instead
-# of private inheritance when no explicit protection keyword is present.
-# The default value is: NO.
-
-SIP_SUPPORT = NO
-
-# For Microsoft's IDL there are propget and propput attributes to indicate
-# getter and setter methods for a property. Setting this option to YES will make
-# doxygen to replace the get and set methods by a property in the documentation.
-# This will only work if the methods are indeed getting or setting a simple
-# type. If this is not the case, or you want to show the methods anyway, you
-# should set this option to NO.
-# The default value is: YES.
-
-IDL_PROPERTY_SUPPORT = YES
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-# The default value is: NO.
-
-DISTRIBUTE_GROUP_DOC = NO
-
-# If one adds a struct or class to a group and this option is enabled, then also
-# any nested class or struct is added to the same group. By default this option
-# is disabled and one has to add nested compounds explicitly via \ingroup.
-# The default value is: NO.
-
-GROUP_NESTED_COMPOUNDS = NO
-
-# Set the SUBGROUPING tag to YES to allow class member groups of the same type
-# (for instance a group of public functions) to be put as a subgroup of that
-# type (e.g. under the Public Functions section). Set it to NO to prevent
-# subgrouping. Alternatively, this can be done per class using the
-# \nosubgrouping command.
-# The default value is: YES.
-
-SUBGROUPING = YES
-
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
-# are shown inside the group in which they are included (e.g. using \ingroup)
-# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
-# and RTF).
-#
-# Note that this feature does not work in combination with
-# SEPARATE_MEMBER_PAGES.
-# The default value is: NO.
-
-INLINE_GROUPED_CLASSES = NO
-
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
-# with only public data fields or simple typedef fields will be shown inline in
-# the documentation of the scope in which they are defined (i.e. file,
-# namespace, or group documentation), provided this scope is documented. If set
-# to NO, structs, classes, and unions are shown on a separate page (for HTML and
-# Man pages) or section (for LaTeX and RTF).
-# The default value is: NO.
-
-INLINE_SIMPLE_STRUCTS = NO
-
-# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
-# enum is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically be
-# useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-# The default value is: NO.
-
-TYPEDEF_HIDES_STRUCT = NO
-
-# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
-# cache is used to resolve symbols given their name and scope. Since this can be
-# an expensive process and often the same symbol appears multiple times in the
-# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
-# doxygen will become slower. If the cache is too large, memory is wasted. The
-# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
-# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
-# symbols. At the end of a run doxygen will report the cache usage and suggest
-# the optimal cache size from a speed point of view.
-# Minimum value: 0, maximum value: 9, default value: 0.
-
-LOOKUP_CACHE_SIZE = 2
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
-# documentation are documented, even if no documentation was available. Private
-# class members and static file members will be hidden unless the
-# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
-# Note: This will also disable the warnings about undocumented members that are
-# normally produced when WARNINGS is set to YES.
-# The default value is: NO.
-
-EXTRACT_ALL = YES
-
-# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
-# be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIVATE = NO
-
-# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
-# scope will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PACKAGE = NO
-
-# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
-# included in the documentation.
-# The default value is: NO.
-
-EXTRACT_STATIC = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
-# locally in source files will be included in the documentation. If set to NO,
-# only classes defined in header files are included. Does not have any effect
-# for Java sources.
-# The default value is: YES.
-
-EXTRACT_LOCAL_CLASSES = YES
-
-# This flag is only useful for Objective-C code. If set to YES, local methods,
-# which are defined in the implementation section but not in the interface are
-# included in the documentation. If set to NO, only methods in the interface are
-# included.
-# The default value is: NO.
-
-EXTRACT_LOCAL_METHODS = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base name of
-# the file that contains the anonymous namespace. By default anonymous namespace
-# are hidden.
-# The default value is: NO.
-
-EXTRACT_ANON_NSPACES = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
-# undocumented members inside documented classes or files. If set to NO these
-# members will be included in the various overviews, but no documentation
-# section is generated. This option has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_MEMBERS = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy. If set
-# to NO, these classes will be included in the various overviews. This option
-# has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_CLASSES = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
-# (class|struct|union) declarations. If set to NO, these declarations will be
-# included in the documentation.
-# The default value is: NO.
-
-HIDE_FRIEND_COMPOUNDS = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
-# documentation blocks found inside the body of a function. If set to NO, these
-# blocks will be appended to the function's detailed documentation block.
-# The default value is: NO.
-
-HIDE_IN_BODY_DOCS = NO
-
-# The INTERNAL_DOCS tag determines if documentation that is typed after a
-# \internal command is included. If the tag is set to NO then the documentation
-# will be excluded. Set it to YES to include the internal documentation.
-# The default value is: NO.
-
-INTERNAL_DOCS = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
-# names in lower-case letters. If set to YES, upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-# The default value is: system dependent.
-
-CASE_SENSE_NAMES = NO
-
-# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
-# their full class and namespace scopes in the documentation. If set to YES, the
-# scope will be hidden.
-# The default value is: NO.
-
-HIDE_SCOPE_NAMES = NO
-
-# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
-# append additional text to a page's title, such as Class Reference. If set to
-# YES the compound reference will be hidden.
-# The default value is: NO.
-
-HIDE_COMPOUND_REFERENCE= NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
-# the files that are included by a file in the documentation of that file.
-# The default value is: YES.
-
-SHOW_INCLUDE_FILES = YES
-
-# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
-# grouped member an include statement to the documentation, telling the reader
-# which file to include in order to use the member.
-# The default value is: NO.
-
-SHOW_GROUPED_MEMB_INC = NO
-
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
-# files with double quotes in the documentation rather than with sharp brackets.
-# The default value is: NO.
-
-FORCE_LOCAL_INCLUDES = NO
-
-# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
-# documentation for inline members.
-# The default value is: YES.
-
-INLINE_INFO = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
-# (detailed) documentation of file and class members alphabetically by member
-# name. If set to NO, the members will appear in declaration order.
-# The default value is: YES.
-
-SORT_MEMBER_DOCS = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
-# descriptions of file, namespace and class members alphabetically by member
-# name. If set to NO, the members will appear in declaration order. Note that
-# this will also influence the order of the classes in the class list.
-# The default value is: NO.
-
-SORT_BRIEF_DOCS = NO
-
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
-# (brief and detailed) documentation of class members so that constructors and
-# destructors are listed first. If set to NO the constructors will appear in the
-# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
-# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
-# member documentation.
-# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
-# detailed member documentation.
-# The default value is: NO.
-
-SORT_MEMBERS_CTORS_1ST = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
-# of group names into alphabetical order. If set to NO the group names will
-# appear in their defined order.
-# The default value is: NO.
-
-SORT_GROUP_NAMES = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
-# fully-qualified names, including namespaces. If set to NO, the class list will
-# be sorted only by class name, not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the alphabetical
-# list.
-# The default value is: NO.
-
-SORT_BY_SCOPE_NAME = NO
-
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
-# type resolution of all parameters of a function it will reject a match between
-# the prototype and the implementation of a member function even if there is
-# only one candidate or it is obvious which candidate to choose by doing a
-# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
-# accept a match between prototype and implementation in such cases.
-# The default value is: NO.
-
-STRICT_PROTO_MATCHING = NO
-
-# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
-# list. This list is created by putting \todo commands in the documentation.
-# The default value is: YES.
-
-GENERATE_TODOLIST = YES
-
-# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
-# list. This list is created by putting \test commands in the documentation.
-# The default value is: YES.
-
-GENERATE_TESTLIST = YES
-
-# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
-# list. This list is created by putting \bug commands in the documentation.
-# The default value is: YES.
-
-GENERATE_BUGLIST = YES
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
-# the deprecated list. This list is created by putting \deprecated commands in
-# the documentation.
-# The default value is: YES.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional documentation
-# sections, marked by \if <section_label> ... \endif and \cond <section_label>
-# ... \endcond blocks.
-
-ENABLED_SECTIONS =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
-# initial value of a variable or macro / define can have for it to appear in the
-# documentation. If the initializer consists of more lines than specified here
-# it will be hidden. Use a value of 0 to hide initializers completely. The
-# appearance of the value of individual variables and macros / defines can be
-# controlled using \showinitializer or \hideinitializer command in the
-# documentation regardless of this setting.
-# Minimum value: 0, maximum value: 10000, default value: 30.
-
-MAX_INITIALIZER_LINES = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
-# the bottom of the documentation of classes and structs. If set to YES, the
-# list will mention the files that were used to generate the documentation.
-# The default value is: YES.
-
-SHOW_USED_FILES = YES
-
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
-# will remove the Files entry from the Quick Index and from the Folder Tree View
-# (if specified).
-# The default value is: YES.
-
-SHOW_FILES = YES
-
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
-# page. This will remove the Namespaces entry from the Quick Index and from the
-# Folder Tree View (if specified).
-# The default value is: YES.
-
-SHOW_NAMESPACES = YES
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command command input-file, where command is the value of the
-# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
-# by doxygen. Whatever the program writes to standard output is used as the file
-# version. For an example see the documentation.
-
-FILE_VERSION_FILTER =
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
-# by doxygen. The layout file controls the global structure of the generated
-# output files in an output format independent way. To create the layout file
-# that represents doxygen's defaults, run doxygen with the -l option. You can
-# optionally specify a file name after the option, if omitted DoxygenLayout.xml
-# will be used as the name of the layout file.
-#
-# Note that if you run doxygen from a directory containing a file called
-# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
-# tag is left empty.
-
-LAYOUT_FILE =
-
-# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
-# the reference definitions. This must be a list of .bib files. The .bib
-# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
-# For LaTeX the style of the bibliography can be controlled using
-# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
-# search path. See also \cite for info how to create references.
-
-CITE_BIB_FILES =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated to
-# standard output by doxygen. If QUIET is set to YES this implies that the
-# messages are off.
-# The default value is: NO.
-
-QUIET = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
-# this implies that the warnings are on.
-#
-# Tip: Turn warnings on while writing the documentation.
-# The default value is: YES.
-
-WARNINGS = YES
-
-# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
-# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
-# will automatically be disabled.
-# The default value is: YES.
-
-WARN_IF_UNDOCUMENTED = YES
-
-# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some parameters
-# in a documented function, or documenting parameters that don't exist or using
-# markup commands wrongly.
-# The default value is: YES.
-
-WARN_IF_DOC_ERROR = YES
-
-# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
-# are documented, but have no documentation for their parameters or return
-# value. If set to NO, doxygen will only warn about wrong or incomplete
-# parameter documentation, but not about the absence of documentation.
-# The default value is: NO.
-
-WARN_NO_PARAMDOC = NO
-
-# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
-# a warning is encountered.
-# The default value is: NO.
-
-WARN_AS_ERROR = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that doxygen
-# can produce. The string should contain the $file, $line, and $text tags, which
-# will be replaced by the file and line number from which the warning originated
-# and the warning text. Optionally the format may contain $version, which will
-# be replaced by the version of the file (if it could be obtained via
-# FILE_VERSION_FILTER)
-# The default value is: $file:$line: $text.
-
-WARN_FORMAT = "$file:$line: $text"
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning and error
-# messages should be written. If left blank the output is written to standard
-# error (stderr).
-
-WARN_LOGFILE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag is used to specify the files and/or directories that contain
-# documented source files. You may enter file names like myfile.cpp or
-# directories like /usr/src/myproject. Separate the files or directories with
-# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
-# Note: If this tag is empty the current directory is searched.
-
-INPUT = ../../../nnfw
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
-# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: http://www.gnu.org/software/libiconv) for the list of
-# possible encodings.
-# The default value is: UTF-8.
-
-INPUT_ENCODING = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
-# *.h) to filter out the source-files in the directories.
-#
-# Note that for custom extensions or not directly supported extensions you also
-# need to set EXTENSION_MAPPING for the extension otherwise the files are not
-# read by doxygen.
-#
-# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
-# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
-# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
-# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
-# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf.
-
-FILE_PATTERNS = *.c \
- *.cc \
- *.cxx \
- *.cpp \
- *.c++ \
- *.java \
- *.ii \
- *.ixx \
- *.ipp \
- *.i++ \
- *.inl \
- *.idl \
- *.ddl \
- *.odl \
- *.h \
- *.hh \
- *.hxx \
- *.hpp \
- *.h++ \
- *.cs \
- *.d \
- *.php \
- *.php4 \
- *.php5 \
- *.phtml \
- *.inc \
- *.m \
- *.markdown \
- *.md \
- *.mm \
- *.dox \
- *.py \
- *.pyw \
- *.f90 \
- *.f95 \
- *.f03 \
- *.f08 \
- *.f \
- *.for \
- *.tcl \
- *.vhd \
- *.vhdl \
- *.ucf \
- *.qsf
-
-# The RECURSIVE tag can be used to specify whether or not subdirectories should
-# be searched for input files as well.
-# The default value is: NO.
-
-RECURSIVE = YES
-
-# The EXCLUDE tag can be used to specify files and/or directories that should be
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-#
-# Note that relative paths are relative to the directory from which doxygen is
-# run.
-
-EXCLUDE = ../../../nnfw/Product \
- ../../../nnfw/tools/cross/rootfs \
- ../../../nnfw/externals \
- ../../../nnfw/externals/acl \
- ../../../nnfw/externals/tensorflow \
- ../../../nnfw/tests/framework/cache \
- ../../../nnfw/runtimes/tests/neural_networks_test/generated/models \
- .caffemodel \
- .bin
-
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
-# directories that are symbolic links (a Unix file system feature) are excluded
-# from the input.
-# The default value is: NO.
-
-EXCLUDE_SYMLINKS = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories.
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories for example use the pattern */test/*
-
-EXCLUDE_PATTERNS =
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories use the pattern */test/*
-
-EXCLUDE_SYMBOLS =
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or directories
-# that contain example code fragments that are included (see the \include
-# command).
-
-EXAMPLE_PATH =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank all
-# files are included.
-
-EXAMPLE_PATTERNS = *
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude commands
-# irrespective of the value of the RECURSIVE tag.
-# The default value is: NO.
-
-EXAMPLE_RECURSIVE = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or directories
-# that contain images that are to be included in the documentation (see the
-# \image command).
-
-IMAGE_PATH =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command:
-#
-# <filter> <input-file>
-#
-# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
-# name of an input file. Doxygen will then use the output that the filter
-# program writes to standard output. If FILTER_PATTERNS is specified, this tag
-# will be ignored.
-#
-# Note that the filter must not add or remove lines; it is applied before the
-# code is scanned, but not when the output code is generated. If lines are added
-# or removed, the anchors will not be placed correctly.
-#
-# Note that for custom extensions or not directly supported extensions you also
-# need to set EXTENSION_MAPPING for the extension otherwise the files are not
-# properly processed by doxygen.
-
-INPUT_FILTER =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form: pattern=filter
-# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
-# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
-# patterns match the file name, INPUT_FILTER is applied.
-#
-# Note that for custom extensions or not directly supported extensions you also
-# need to set EXTENSION_MAPPING for the extension otherwise the files are not
-# properly processed by doxygen.
-
-FILTER_PATTERNS =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER) will also be used to filter the input files that are used for
-# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
-# The default value is: NO.
-
-FILTER_SOURCE_FILES = NO
-
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
-# it is also possible to disable source filtering for a specific pattern using
-# *.ext= (so without naming a filter).
-# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
-
-FILTER_SOURCE_PATTERNS =
-
-# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
-# is part of the input, its contents will be placed on the main page
-# (index.html). This can be useful if you have a project on for instance GitHub
-# and want to reuse the introduction page also for the doxygen output.
-
-USE_MDFILE_AS_MAINPAGE = roadmap.md
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
-# generated. Documented entities will be cross-referenced with these sources.
-#
-# Note: To get rid of all source code in the generated output, make sure that
-# also VERBATIM_HEADERS is set to NO.
-# The default value is: NO.
-
-SOURCE_BROWSER = YES
-
-# Setting the INLINE_SOURCES tag to YES will include the body of functions,
-# classes and enums directly into the documentation.
-# The default value is: NO.
-
-INLINE_SOURCES = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
-# special comment blocks from generated source code fragments. Normal C, C++ and
-# Fortran comments will always remain visible.
-# The default value is: YES.
-
-STRIP_CODE_COMMENTS = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
-# function all documented functions referencing it will be listed.
-# The default value is: NO.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES then for each documented function
-# all documented entities called/used by that function will be listed.
-# The default value is: NO.
-
-REFERENCES_RELATION = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
-# to YES then the hyperlinks from functions in REFERENCES_RELATION and
-# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
-# link to the documentation.
-# The default value is: YES.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
-# source code will show a tooltip with additional information such as prototype,
-# brief description and links to the definition and documentation. Since this
-# will make the HTML file larger and loading of large files a bit slower, you
-# can opt to disable this feature.
-# The default value is: YES.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-SOURCE_TOOLTIPS = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code will
-# point to the HTML generated by the htags(1) tool instead of doxygen built-in
-# source browser. The htags tool is part of GNU's global source tagging system
-# (see http://www.gnu.org/software/global/global.html). You will need version
-# 4.8.6 or higher.
-#
-# To use it do the following:
-# - Install the latest version of global
-# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
-# - Make sure the INPUT points to the root of the source tree
-# - Run doxygen as normal
-#
-# Doxygen will invoke htags (and that will in turn invoke gtags), so these
-# tools must be available from the command line (i.e. in the search path).
-#
-# The result: instead of the source browser generated by doxygen, the links to
-# source code will now point to the output of htags.
-# The default value is: NO.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-USE_HTAGS = NO
-
-# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
-# verbatim copy of the header file for each class for which an include is
-# specified. Set to NO to disable this.
-# See also: Section \class.
-# The default value is: YES.
-
-VERBATIM_HEADERS = YES
-
-# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the
-# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the
-# cost of reduced performance. This can be particularly helpful with template
-# rich C++ code for which doxygen's built-in parser lacks the necessary type
-# information.
-# Note: The availability of this option depends on whether or not doxygen was
-# generated with the -Duse-libclang=ON option for CMake.
-# The default value is: NO.
-
-CLANG_ASSISTED_PARSING = NO
-
-# If clang assisted parsing is enabled you can provide the compiler with command
-# line options that you would normally use when invoking the compiler. Note that
-# the include paths will already be set by doxygen for the files and directories
-# specified with INPUT and INCLUDE_PATH.
-# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
-
-CLANG_OPTIONS =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
-# compounds will be generated. Enable this if the project contains a lot of
-# classes, structs, unions or interfaces.
-# The default value is: YES.
-
-ALPHABETICAL_INDEX = YES
-
-# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
-# which the alphabetical index list will be split.
-# Minimum value: 1, maximum value: 20, default value: 5.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-COLS_IN_ALPHA_INDEX = 5
-
-# In case all classes in a project start with a common prefix, all classes will
-# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
-# can be used to specify a prefix (or a list of prefixes) that should be ignored
-# while generating the index headers.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-IGNORE_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
-# The default value is: YES.
-
-GENERATE_HTML = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_OUTPUT = html
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
-# generated HTML page (for example: .htm, .php, .asp).
-# The default value is: .html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FILE_EXTENSION = .html
-
-# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
-# each generated HTML page. If the tag is left blank doxygen will generate a
-# standard header.
-#
-# To get valid HTML the header file that includes any scripts and style sheets
-# that doxygen needs, which is dependent on the configuration options used (e.g.
-# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
-# default header using
-# doxygen -w html new_header.html new_footer.html new_stylesheet.css
-# YourConfigFile
-# and then modify the file new_header.html. See also section "Doxygen usage"
-# for information on how to generate the default header that doxygen normally
-# uses.
-# Note: The header is subject to change so you typically have to regenerate the
-# default header when upgrading to a newer version of doxygen. For a description
-# of the possible markers and block names see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_HEADER =
-
-# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
-# generated HTML page. If the tag is left blank doxygen will generate a standard
-# footer. See HTML_HEADER for more information on how to generate a default
-# footer and what special commands can be used inside the footer. See also
-# section "Doxygen usage" for information on how to generate the default footer
-# that doxygen normally uses.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FOOTER =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
-# sheet that is used by each HTML page. It can be used to fine-tune the look of
-# the HTML output. If left blank doxygen will generate a default style sheet.
-# See also section "Doxygen usage" for information on how to generate the style
-# sheet that doxygen normally uses.
-# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
-# it is more robust and this tag (HTML_STYLESHEET) will in the future become
-# obsolete.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_STYLESHEET =
-
-# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
-# cascading style sheets that are included after the standard style sheets
-# created by doxygen. Using this option one can overrule certain style aspects.
-# This is preferred over using HTML_STYLESHEET since it does not replace the
-# standard style sheet and is therefore more robust against future updates.
-# Doxygen will copy the style sheet files to the output directory.
-# Note: The order of the extra style sheet files is of importance (e.g. the last
-# style sheet in the list overrules the setting of the previous ones in the
-# list). For an example see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_STYLESHEET =
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the HTML output directory. Note
-# that these files will be copied to the base HTML output directory. Use the
-# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
-# files will be copied as-is; there are no commands or markers available.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_FILES =
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
-# will adjust the colors in the style sheet and background images according to
-# this color. Hue is specified as an angle on a colorwheel, see
-# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
-# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
-# purple, and 360 is red again.
-# Minimum value: 0, maximum value: 359, default value: 220.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_HUE = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
-# in the HTML output. For a value of 0 the output will use grayscales only. A
-# value of 255 will produce the most vivid colors.
-# Minimum value: 0, maximum value: 255, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_SAT = 100
-
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
-# luminance component of the colors in the HTML output. Values below 100
-# gradually make the output lighter, whereas values above 100 make the output
-# darker. The value divided by 100 is the actual gamma applied, so 80 represents
-# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
-# change the gamma.
-# Minimum value: 40, maximum value: 240, default value: 80.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_GAMMA = 80
-
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to YES can help to show when doxygen was last run and thus if the
-# documentation is up to date.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP = NO
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_SECTIONS = NO
-
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
-# shown in the various tree structured indices initially; the user can expand
-# and collapse entries dynamically later on. Doxygen will expand the tree to
-# such a level that at most the specified number of entries are visible (unless
-# a fully collapsed tree already exceeds this amount). So setting the number of
-# entries 1 will produce a full collapsed tree by default. 0 is a special value
-# representing an infinite number of entries and will result in a full expanded
-# tree by default.
-# Minimum value: 0, maximum value: 9999, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_INDEX_NUM_ENTRIES = 100
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files will be
-# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: http://developer.apple.com/tools/xcode/), introduced with
-# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
-# Makefile in the HTML output directory. Running make will produce the docset in
-# that directory and running make install will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
-# for more information.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_DOCSET = NO
-
-# This tag determines the name of the docset feed. A documentation feed provides
-# an umbrella under which multiple documentation sets from a single provider
-# (such as a company or product suite) can be grouped.
-# The default value is: Doxygen generated docs.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_FEEDNAME = "Doxygen generated docs"
-
-# This tag specifies a string that should uniquely identify the documentation
-# set bundle. This should be a reverse domain-name style string, e.g.
-# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_BUNDLE_ID = org.doxygen.Project
-
-# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
-# the documentation publisher. This should be a reverse domain-name style
-# string, e.g. com.mycompany.MyDocSet.documentation.
-# The default value is: org.doxygen.Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_ID = org.doxygen.Publisher
-
-# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
-# The default value is: Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_NAME = Publisher
-
-# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
-# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
-# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
-# Windows.
-#
-# The HTML Help Workshop contains a compiler that can convert all HTML output
-# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
-# files are now used as the Windows 98 help format, and will replace the old
-# Windows help format (.hlp) on all Windows platforms in the future. Compressed
-# HTML files also contain an index, a table of contents, and you can search for
-# words in the documentation. The HTML workshop also contains a viewer for
-# compressed HTML files.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_HTMLHELP = NO
-
-# The CHM_FILE tag can be used to specify the file name of the resulting .chm
-# file. You can add a path in front of the file if the result should not be
-# written to the html output directory.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_FILE =
-
-# The HHC_LOCATION tag can be used to specify the location (absolute path
-# including file name) of the HTML help compiler (hhc.exe). If non-empty,
-# doxygen will try to run the HTML help compiler on the generated index.hhp.
-# The file has to be specified with full path.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-HHC_LOCATION =
-
-# The GENERATE_CHI flag controls if a separate .chi index file is generated
-# (YES) or that it should be included in the master .chm file (NO).
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-GENERATE_CHI = NO
-
-# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
-# and project file content.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_INDEX_ENCODING =
-
-# The BINARY_TOC flag controls whether a binary table of contents is generated
-# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
-# enables the Previous and Next buttons.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-BINARY_TOC = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members to
-# the table of contents of the HTML help documentation and to the tree view.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-TOC_EXPAND = NO
-
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
-# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
-# (.qch) of the generated HTML documentation.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_QHP = NO
-
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
-# the file name of the resulting .qch file. The path specified is relative to
-# the HTML output folder.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QCH_FILE =
-
-# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
-# Project output. For more information please see Qt Help Project / Namespace
-# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_NAMESPACE = org.doxygen.Project
-
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
-# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
-# folders).
-# The default value is: doc.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_VIRTUAL_FOLDER = doc
-
-# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
-# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_NAME =
-
-# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
-# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_ATTRS =
-
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
-# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_SECT_FILTER_ATTRS =
-
-# The QHG_LOCATION tag can be used to specify the location of Qt's
-# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
-# generated .qhp file.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHG_LOCATION =
-
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
-# generated, together with the HTML files, they form an Eclipse help plugin. To
-# install this plugin and make it available under the help contents menu in
-# Eclipse, the contents of the directory containing the HTML and XML files needs
-# to be copied into the plugins directory of eclipse. The name of the directory
-# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
-# After copying Eclipse needs to be restarted before the help appears.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_ECLIPSEHELP = NO
-
-# A unique identifier for the Eclipse help plugin. When installing the plugin
-# the directory name containing the HTML and XML files should also have this
-# name. Each documentation set should have its own identifier.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
-
-ECLIPSE_DOC_ID = org.doxygen.Project
-
-# If you want full control over the layout of the generated HTML pages it might
-# be necessary to disable the index and replace it with your own. The
-# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
-# of each HTML page. A value of NO enables the index and the value YES disables
-# it. Since the tabs in the index contain the same information as the navigation
-# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-DISABLE_INDEX = NO
-
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
-# structure should be generated to display hierarchical information. If the tag
-# value is set to YES, a side panel will be generated containing a tree-like
-# index structure (just like the one that is generated for HTML Help). For this
-# to work a browser that supports JavaScript, DHTML, CSS and frames is required
-# (i.e. any modern browser). Windows users are probably better off using the
-# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
-# further fine-tune the look of the index. As an example, the default style
-# sheet generated by doxygen has an example that shows how to put an image at
-# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
-# the same information as the tab index, you could consider setting
-# DISABLE_INDEX to YES when enabling this option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_TREEVIEW = NO
-
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
-# doxygen will group on one line in the generated HTML documentation.
-#
-# Note that a value of 0 will completely suppress the enum values from appearing
-# in the overview section.
-# Minimum value: 0, maximum value: 20, default value: 4.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-ENUM_VALUES_PER_LINE = 4
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
-# to set the initial width (in pixels) of the frame in which the tree is shown.
-# Minimum value: 0, maximum value: 1500, default value: 250.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-TREEVIEW_WIDTH = 250
-
-# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
-# external symbols imported via tag files in a separate window.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-EXT_LINKS_IN_WINDOW = NO
-
-# Use this tag to change the font size of LaTeX formulas included as images in
-# the HTML documentation. When you change the font size after a successful
-# doxygen run you need to manually remove any form_*.png images from the HTML
-# output directory to force them to be regenerated.
-# Minimum value: 8, maximum value: 50, default value: 10.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_FONTSIZE = 10
-
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT = YES
-
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# http://www.mathjax.org) which uses client side Javascript for the rendering
-# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
-# installed or if you want to formulas look prettier in the HTML output. When
-# enabled you may also need to install MathJax separately and configure the path
-# to it using the MATHJAX_RELPATH option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-USE_MATHJAX = NO
-
-# When MathJax is enabled you can set the default output format to be used for
-# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
-# Possible values are: HTML-CSS (which is slower, but has the best
-# compatibility), NativeMML (i.e. MathML) and SVG.
-# The default value is: HTML-CSS.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_FORMAT = HTML-CSS
-
-# When MathJax is enabled you need to specify the location relative to the HTML
-# output directory using the MATHJAX_RELPATH option. The destination directory
-# should contain the MathJax.js script. For instance, if the mathjax directory
-# is located at the same level as the HTML output directory, then
-# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
-# Content Delivery Network so you can quickly see the result without installing
-# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from http://www.mathjax.org before deployment.
-# The default value is: http://cdn.mathjax.org/mathjax/latest.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
-
-# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
-# extension names that should be enabled during MathJax rendering. For example
-# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_EXTENSIONS =
-
-# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
-# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
-# example see the documentation.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_CODEFILE =
-
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
-# the HTML output. The underlying search engine uses javascript and DHTML and
-# should work on any modern browser. Note that when using HTML help
-# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
-# there is already a search function so this one should typically be disabled.
-# For large projects the javascript based search engine can be slow, then
-# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
-# search using the keyboard; to jump to the search box use <access key> + S
-# (what the <access key> is depends on the OS and browser, but it is typically
-# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
-# key> to jump into the search results window, the results can be navigated
-# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
-# the search. The filter options can be selected when the cursor is inside the
-# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
-# to select a filter and <Enter> or <escape> to activate or cancel the filter
-# option.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-SEARCHENGINE = YES
-
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
-# implemented using a web server instead of a web client using Javascript. There
-# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
-# setting. When disabled, doxygen will generate a PHP script for searching and
-# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
-# and searching needs to be provided by external tools. See the section
-# "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SERVER_BASED_SEARCH = NO
-
-# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
-# script for searching. Instead the search results are written to an XML file
-# which needs to be processed by an external indexer. Doxygen will invoke an
-# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
-# search results.
-#
-# Doxygen ships with an example indexer (doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/).
-#
-# See the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH = NO
-
-# The SEARCHENGINE_URL should point to a search engine hosted by a web server
-# which will return the search results when EXTERNAL_SEARCH is enabled.
-#
-# Doxygen ships with an example indexer (doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/). See the section "External Indexing and
-# Searching" for details.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHENGINE_URL =
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
-# search data is written to a file for indexing by an external tool. With the
-# SEARCHDATA_FILE tag the name of this file can be specified.
-# The default file is: searchdata.xml.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHDATA_FILE = searchdata.xml
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
-# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
-# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
-# projects and redirect the results back to the right project.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH_ID =
-
-# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
-# projects other than the one defined by this configuration file, but that are
-# all added to the same external search index. Each project needs to have a
-# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
-# to a relative location where the documentation can be found. The format is:
-# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTRA_SEARCH_MAPPINGS =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
-# The default value is: YES.
-
-GENERATE_LATEX = NO
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_OUTPUT = latex
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked.
-#
-# Note that when enabling USE_PDFLATEX this option is only used for generating
-# bitmaps for formulas in the HTML output, but not in the Makefile that is
-# written to the output directory.
-# The default file is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_CMD_NAME = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
-# index for LaTeX.
-# The default file is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-MAKEINDEX_CMD_NAME = makeindex
-
-# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-COMPACT_LATEX = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used by the
-# printer.
-# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
-# 14 inches) and executive (7.25 x 10.5 inches).
-# The default value is: a4.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PAPER_TYPE = a4
-
-# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
-# that should be included in the LaTeX output. The package can be specified just
-# by its name or with the correct syntax as to be used with the LaTeX
-# \usepackage command. To get the times font for instance you can specify :
-# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
-# To use the option intlimits with the amsmath package you can specify:
-# EXTRA_PACKAGES=[intlimits]{amsmath}
-# If left blank no extra packages will be included.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-EXTRA_PACKAGES =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
-# generated LaTeX document. The header should contain everything until the first
-# chapter. If it is left blank doxygen will generate a standard header. See
-# section "Doxygen usage" for information on how to let doxygen write the
-# default header to a separate file.
-#
-# Note: Only use a user-defined header if you know what you are doing! The
-# following commands have a special meaning inside the header: $title,
-# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
-# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
-# string, for the replacement values of the other commands the user is referred
-# to HTML_HEADER.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HEADER =
-
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
-# generated LaTeX document. The footer should contain everything after the last
-# chapter. If it is left blank doxygen will generate a standard footer. See
-# LATEX_HEADER for more information on how to generate a default footer and what
-# special commands can be used inside the footer.
-#
-# Note: Only use a user-defined footer if you know what you are doing!
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_FOOTER =
-
-# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
-# LaTeX style sheets that are included after the standard style sheets created
-# by doxygen. Using this option one can overrule certain style aspects. Doxygen
-# will copy the style sheet files to the output directory.
-# Note: The order of the extra style sheet files is of importance (e.g. the last
-# style sheet in the list overrules the setting of the previous ones in the
-# list).
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_STYLESHEET =
-
-# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the LATEX_OUTPUT output
-# directory. Note that the files will be copied as-is; there are no commands or
-# markers available.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_FILES =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
-# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
-# contain links (just like the HTML output) instead of page references. This
-# makes the output suitable for online browsing using a PDF viewer.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PDF_HYPERLINKS = YES
-
-# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
-# the PDF file directly from the LaTeX files. Set this option to YES, to get a
-# higher quality PDF documentation.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-USE_PDFLATEX = YES
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
-# command to the generated LaTeX files. This will instruct LaTeX to keep running
-# if errors occur, instead of asking the user for help. This option is also used
-# when generating formulas in HTML.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BATCHMODE = NO
-
-# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
-# index chapters (such as File Index, Compound Index, etc.) in the output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HIDE_INDICES = NO
-
-# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
-# code with syntax highlighting in the LaTeX output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_SOURCE_CODE = NO
-
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
-# bibliography, e.g. plainnat, or ieeetr. See
-# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
-# The default value is: plain.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BIB_STYLE = plain
-
-# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_TIMESTAMP = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
-# RTF output is optimized for Word 97 and may not look too pretty with other RTF
-# readers/editors.
-# The default value is: NO.
-
-GENERATE_RTF = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: rtf.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_OUTPUT = rtf
-
-# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-COMPACT_RTF = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
-# contain hyperlink fields. The RTF file will contain links (just like the HTML
-# output) instead of page references. This makes the output suitable for online
-# browsing using Word or some other Word compatible readers that support those
-# fields.
-#
-# Note: WordPad (write) and others do not support links.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_HYPERLINKS = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's config
-# file, i.e. a series of assignments. You only have to provide replacements,
-# missing definitions are set to their default value.
-#
-# See also section "Doxygen usage" for information on how to generate the
-# default style sheet that doxygen normally uses.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_STYLESHEET_FILE =
-
-# Set optional variables used in the generation of an RTF document. Syntax is
-# similar to doxygen's config file. A template extensions file can be generated
-# using doxygen -e rtf extensionFile.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_EXTENSIONS_FILE =
-
-# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
-# with syntax highlighting in the RTF output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_SOURCE_CODE = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
-# classes and files.
-# The default value is: NO.
-
-GENERATE_MAN = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it. A directory man3 will be created inside the directory specified by
-# MAN_OUTPUT.
-# The default directory is: man.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_OUTPUT = man
-
-# The MAN_EXTENSION tag determines the extension that is added to the generated
-# man pages. In case the manual section does not start with a number, the number
-# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
-# optional.
-# The default value is: .3.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_EXTENSION = .3
-
-# The MAN_SUBDIR tag determines the name of the directory created within
-# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
-# MAN_EXTENSION with the initial . removed.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_SUBDIR =
-
-# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
-# will generate one additional man file for each entity documented in the real
-# man page(s). These additional files only source the real man page, but without
-# them the man command would be unable to find the correct page.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_LINKS = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
-# captures the structure of the code including all documentation.
-# The default value is: NO.
-
-GENERATE_XML = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: xml.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_OUTPUT = xml
-
-# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
-# listings (including syntax highlighting and cross-referencing information) to
-# the XML output. Note that enabling this will significantly increase the size
-# of the XML output.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_PROGRAMLISTING = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
-# that can be used to generate PDF.
-# The default value is: NO.
-
-GENERATE_DOCBOOK = NO
-
-# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
-# front of it.
-# The default directory is: docbook.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_OUTPUT = docbook
-
-# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
-# program listings (including syntax highlighting and cross-referencing
-# information) to the DOCBOOK output. Note that enabling this will significantly
-# increase the size of the DOCBOOK output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_PROGRAMLISTING = NO
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
-# AutoGen Definitions (see http://autogen.sf.net) file that captures the
-# structure of the code including all documentation. Note that this feature is
-# still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_AUTOGEN_DEF = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
-# file that captures the structure of the code including all documentation.
-#
-# Note that this feature is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_PERLMOD = NO
-
-# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
-# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
-# output from the Perl module output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_LATEX = NO
-
-# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
-# formatted so it can be parsed by a human reader. This is useful if you want to
-# understand what is going on. On the other hand, if this tag is set to NO, the
-# size of the Perl module output will be much smaller and Perl will parse it
-# just the same.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_PRETTY = YES
-
-# The names of the make variables in the generated doxyrules.make file are
-# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
-# so different doxyrules.make files included by the same Makefile don't
-# overwrite each other's variables.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
-# C-preprocessor directives found in the sources and include files.
-# The default value is: YES.
-
-ENABLE_PREPROCESSING = YES
-
-# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
-# in the source code. If set to NO, only conditional compilation will be
-# performed. Macro expansion can be done in a controlled way by setting
-# EXPAND_ONLY_PREDEF to YES.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-MACRO_EXPANSION = NO
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
-# the macro expansion is limited to the macros specified with the PREDEFINED and
-# EXPAND_AS_DEFINED tags.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_ONLY_PREDEF = NO
-
-# If the SEARCH_INCLUDES tag is set to YES, the include files in the
-# INCLUDE_PATH will be searched if a #include is found.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SEARCH_INCLUDES = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by the
-# preprocessor.
-# This tag requires that the tag SEARCH_INCLUDES is set to YES.
-
-INCLUDE_PATH =
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will be
-# used.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-INCLUDE_FILE_PATTERNS =
-
-# The PREDEFINED tag can be used to specify one or more macro names that are
-# defined before the preprocessor is started (similar to the -D option of e.g.
-# gcc). The argument of the tag is a list of macros of the form: name or
-# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
-# is assumed. To prevent a macro definition from being undefined via #undef or
-# recursively expanded use the := operator instead of the = operator.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-PREDEFINED =
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
-# tag can be used to specify a list of macro names that should be expanded. The
-# macro definition that is found in the sources will be used. Use the PREDEFINED
-# tag if you want to use a different macro definition that overrules the
-# definition found in the source code.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_AS_DEFINED =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
-# remove all references to function-like macros that are alone on a line, have
-# an all uppercase name, and do not end with a semicolon. Such function macros
-# are typically used for boiler-plate code, and will confuse the parser if not
-# removed.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SKIP_FUNCTION_MACROS = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES tag can be used to specify one or more tag files. For each tag
-# file the location of the external documentation should be added. The format of
-# a tag file without this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where loc1 and loc2 can be relative or absolute paths or URLs. See the
-# section "Linking to external documentation" for more information about the use
-# of tag files.
-# Note: Each tag file must have a unique name (where the name does NOT include
-# the path). If a tag file is not located in the directory in which doxygen is
-# run, you must also specify the path to the tagfile here.
-
-TAGFILES =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
-# tag file that is based on the input files it reads. See section "Linking to
-# external documentation" for more information about the usage of tag files.
-
-GENERATE_TAGFILE =
-
-# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
-# the class index. If set to NO, only the inherited external classes will be
-# listed.
-# The default value is: NO.
-
-ALLEXTERNALS = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
-# in the modules index. If set to NO, only the current project's groups will be
-# listed.
-# The default value is: YES.
-
-EXTERNAL_GROUPS = YES
-
-# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
-# the related pages index. If set to NO, only the current project's pages will
-# be listed.
-# The default value is: YES.
-
-EXTERNAL_PAGES = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of 'which perl').
-# The default file (with absolute path) is: /usr/bin/perl.
-
-PERL_PATH = /usr/bin/perl
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see:
-# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where
-# the mscgen tool resides. If left empty the tool is assumed to be found in the
-# default search path.
-
-MSCGEN_PATH =
-
-# You can include diagrams made with dia in doxygen documentation. Doxygen will
-# then run dia to produce the diagram and insert it in the documentation. The
-# DIA_PATH tag allows you to specify the directory where the dia binary resides.
-# If left empty dia is assumed to be found in the default search path.
-
-DIA_PATH =
-
-# If set to YES the inheritance and collaboration graphs will hide inheritance
-# and usage relations if the target is undocumented or is not a class.
-# The default value is: YES.
-
-HIDE_UNDOC_RELATIONS = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
-# Bell Labs. The other options in this section have no effect if this option is
-# set to NO
-# The default value is: NO.
-
-HAVE_DOT = YES
-
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
-# to run in parallel. When set to 0 doxygen will base this on the number of
-# processors available in the system. You can set it explicitly to a value
-# larger than 0 to get control over the balance between CPU load and processing
-# speed.
-# Minimum value: 0, maximum value: 32, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_NUM_THREADS = 0
-
-# When you want a differently looking font in the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME = Calibri
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE = 10
-
-# By default doxygen will tell dot to use the default font as specified with
-# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
-# the path where dot can find it using this tag.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTPATH =
-
-# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
-# each documented class showing the direct and indirect inheritance relations.
-# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CLASS_GRAPH = YES
-
-# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
-# graph for each documented class showing the direct and indirect implementation
-# dependencies (inheritance, containment, and class references variables) of the
-# class with other documented classes.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-COLLABORATION_GRAPH = YES
-
-# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
-# groups, showing the direct groups dependencies.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GROUP_GRAPHS = YES
-
-# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LOOK = NO
-
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
-# class node. If there are many fields or methods and many nodes the graph may
-# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
-# number of items for each type to make the size more manageable. Set this to 0
-# for no limit. Note that the threshold may be exceeded by 50% before the limit
-# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
-# but if the number exceeds 15, the total amount of fields shown is limited to
-# 10.
-# Minimum value: 0, maximum value: 100, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LIMIT_NUM_FIELDS = 10
-
-# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
-# collaboration graphs will show the relations between templates and their
-# instances.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-TEMPLATE_RELATIONS = NO
-
-# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
-# YES then doxygen will generate a graph for each documented file showing the
-# direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDE_GRAPH = YES
-
-# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
-# set to YES then doxygen will generate a graph for each documented file showing
-# the direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDED_BY_GRAPH = YES
-
-# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command. Disabling a call graph can be
-# accomplished by means of the command \hidecallgraph.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALL_GRAPH = YES
-
-# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command. Disabling a caller graph can be
-# accomplished by means of the command \hidecallergraph.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALLER_GRAPH = YES
-
-# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
-# hierarchy of all classes instead of a textual one.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GRAPHICAL_HIERARCHY = YES
-
-# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
-# dependencies a directory has on other directories in a graphical way. The
-# dependency relations are determined by the #include relations between the
-# files in the directories.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DIRECTORY_GRAPH = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot. For an explanation of the image formats see the section
-# output formats in the documentation of the dot tool (Graphviz (see:
-# http://www.graphviz.org/)).
-# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
-# to make the SVG files visible in IE 9+ (other browsers do not have this
-# requirement).
-# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
-# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
-# png:gdiplus:gdiplus.
-# The default value is: png.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_IMAGE_FORMAT = png
-
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
-# enable generation of interactive SVG images that allow zooming and panning.
-#
-# Note that this requires a modern browser other than Internet Explorer. Tested
-# and working are Firefox, Chrome, Safari, and Opera.
-# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
-# the SVG files visible. Older versions of IE do not have SVG support.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INTERACTIVE_SVG = NO
-
-# The DOT_PATH tag can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_PATH = /usr/local/bin/dot
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the \dotfile
-# command).
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOTFILE_DIRS =
-
-# The MSCFILE_DIRS tag can be used to specify one or more directories that
-# contain msc files that are included in the documentation (see the \mscfile
-# command).
-
-MSCFILE_DIRS =
-
-# The DIAFILE_DIRS tag can be used to specify one or more directories that
-# contain dia files that are included in the documentation (see the \diafile
-# command).
-
-DIAFILE_DIRS =
-
-# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
-# path where java can find the plantuml.jar file. If left blank, it is assumed
-# PlantUML is not used or called during a preprocessing step. Doxygen will
-# generate a warning when it encounters a \startuml command in this case and
-# will not generate output for the diagram.
-
-PLANTUML_JAR_PATH =
-
-# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
-# configuration file for plantuml.
-
-PLANTUML_CFG_FILE =
-
-# When using plantuml, the specified paths are searched for files specified by
-# the !include statement in a plantuml block.
-
-PLANTUML_INCLUDE_PATH =
-
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
-# that will be shown in the graph. If the number of nodes in a graph becomes
-# larger than this value, doxygen will truncate the graph, which is visualized
-# by representing a node as a red box. Note that doxygen if the number of direct
-# children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
-# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-# Minimum value: 0, maximum value: 10000, default value: 50.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_GRAPH_MAX_NODES = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
-# generated by dot. A depth value of 3 means that only nodes reachable from the
-# root by following a path via at most 3 edges will be shown. Nodes that lay
-# further from the root node will be omitted. Note that setting this option to 1
-# or 2 may greatly reduce the computation time needed for large code bases. Also
-# note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-# Minimum value: 0, maximum value: 1000, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-MAX_DOT_GRAPH_DEPTH = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, because dot on Windows does not seem
-# to support this out of the box.
-#
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_TRANSPARENT = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10) support
-# this, this feature is disabled by default.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_MULTI_TARGETS = NO
-
-# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
-# explaining the meaning of the various boxes and arrows in the dot generated
-# graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GENERATE_LEGEND = YES
-
-# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
-# files that are used to generate the various graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_CLEANUP = YES
diff --git a/include/.FORMATCHECKED b/docs/fig/compiler_flow.png
index e69de29bb..e69de29bb 100644
--- a/include/.FORMATCHECKED
+++ b/docs/fig/compiler_flow.png
diff --git a/libs/.FORMATCHECKED b/docs/fig/nnfw_compiler_structure.png
index e69de29bb..e69de29bb 100644
--- a/libs/.FORMATCHECKED
+++ b/docs/fig/nnfw_compiler_structure.png
diff --git a/runtimes/neurun/.FORMATCHECKED b/docs/fig/nnfw_compiler_structure.pptx
index e69de29bb..e69de29bb 100644
--- a/runtimes/neurun/.FORMATCHECKED
+++ b/docs/fig/nnfw_compiler_structure.pptx
diff --git a/runtimes/pure_arm_compute/.FORMATCHECKED b/docs/fig/nnfw_components.png
index e69de29bb..e69de29bb 100644
--- a/runtimes/pure_arm_compute/.FORMATCHECKED
+++ b/docs/fig/nnfw_components.png
diff --git a/tests/.FORMATCHECKED b/docs/fig/nnfw_components.pptx
index e69de29bb..e69de29bb 100644
--- a/tests/.FORMATCHECKED
+++ b/docs/fig/nnfw_components.pptx
diff --git a/tools/.FORMATCHECKED b/docs/fig/nnfw_nativeapi_flow.png
index e69de29bb..e69de29bb 100644
--- a/tools/.FORMATCHECKED
+++ b/docs/fig/nnfw_nativeapi_flow.png
diff --git a/docs/fig/nnfw_nativeapi_flow.pptx b/docs/fig/nnfw_nativeapi_flow.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_nativeapi_flow.pptx
diff --git a/docs/fig/nnfw_nnapi_flow.png b/docs/fig/nnfw_nnapi_flow.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_nnapi_flow.png
diff --git a/docs/fig/nnfw_nnapi_flow.pptx b/docs/fig/nnfw_nnapi_flow.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_nnapi_flow.pptx
diff --git a/docs/fig/nnfw_runtime_behavior.png b/docs/fig/nnfw_runtime_behavior.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_runtime_behavior.png
diff --git a/docs/fig/nnfw_runtime_behavior.pptx b/docs/fig/nnfw_runtime_behavior.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_runtime_behavior.pptx
diff --git a/docs/fig/nnfw_runtime_structure.png b/docs/fig/nnfw_runtime_structure.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_runtime_structure.png
diff --git a/docs/fig/nnfw_runtime_structure.pptx b/docs/fig/nnfw_runtime_structure.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/nnfw_runtime_structure.pptx
diff --git a/docs/fig/runtime_nativeapi_flow.png b/docs/fig/runtime_nativeapi_flow.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/fig/runtime_nativeapi_flow.png
diff --git a/docs/howto.md b/docs/howto.md
deleted file mode 100644
index 866f56115..000000000
--- a/docs/howto.md
+++ /dev/null
@@ -1,36 +0,0 @@
-## Build Requires
-
-If you are building this project, then the following modules must be installed on your system:
-
-- CMake
-- Boost C++ libraries
-
-```
-$ sudo apt-get install cmake libboost-all-dev
-```
-
-## How to use (simple) NNAPI Binding
-
-This repo provides a T/F Lite Model loader(named ``tflite_run``), and simple NNAPI binding.
-
-Let's type the following commands, and see what happens!
-```
-$ make install
-$ USE_NNAPI=1 LD_LIBRARY_PATH="$(pwd)/Product/obj/runtimes/logging:$(pwd)/Product/out/lib" Product/out/bin/tflite_run [T/F Lite Flatbuffer Model Path]
-```
-
-## How to get pre-built T/F Lite Flatbuffer models?
-Google provides several pre-built T/F Lite models. Please check [this article](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/lite/g3doc/models.md)
-
-
-## Build How-to
-- [Cross building for ARM](howto/CrossBuildForArm.md)
-- [Cross building for AARCH64](howto/CrossBuildForAarch64.md)
-- [Build using prebuilt docker image](howto/HowToUseDockerImage.md)
-
-
-## Other how-to documents
-- [Building TensorFlow and TOCO from source](howto/BuildTFfromSource.md)
-- [How to setup XU3 with Ubuntu 16.04](howto/device/xu3_ubuntu.md)
-- [How to setup XU4 with Ubuntu 16.04](howto/device/xu4_ubuntu.md)
-- [How to add unittest using gtest](howto/HowToAddUnittest.md)
diff --git a/docs/howto/CrossBuildForArm.md b/docs/howto/CrossBuildForArm.md
deleted file mode 100644
index e307596d0..000000000
--- a/docs/howto/CrossBuildForArm.md
+++ /dev/null
@@ -1,107 +0,0 @@
-# Cross building for ARM
-
-## Prepare Ubuntu RootFS
-
-Install required packages
-
-```
-sudo apt-get install qemu qemu-user-static binfmt-support debootstrap
-```
-
-Use `build_rootfs.sh` script to prepare Root File System. You should have `sudo`
-
-```
-sudo ./tools/cross/build_rootfs.sh arm
-```
-- supports `arm`(default) and `arm64` architecutre for now
-- supports `xenial`(default) and `trusty` release
-
-To see the options,
-```
-./tools/cross/build_rootfs.sh -h
-```
-
-RootFS will be prepared at `tools/cross/rootfs/arm` folder.
-
-## Prepare RootFS at alternative folder
-
-Use `ROOTFS_DIR` to a full path to prepare at alternative path.
-
-```
-ROOTFS_DIR=/home/user/rootfs/arm-xenial sudo ./tools/cross/build_rootfs.sh arm
-```
-
-## Using proxy
-
-If you need to use proxy server while building the rootfs, use `--setproxy` option.
-
-```
-# for example,
-sudo ./tools/cross/build_rootfs.sh arm --setproxy="1.2.3.4:8080"
-# or
-sudo ./tools/cross/build_rootfs.sh arm --setproxy="proxy.server.com:8888"
-```
-
-This will put `apt` proxy settings in `rootfs/etc/apt/apt.conf.d/90proxy` file
-for `http`, `https` and `ftp` protocol.
-
-## Install ARM Cross Toolchain
-
-We recommend you have g++ >= 6 installed on your system because NN generated tests require it.
-
-On Ubuntu 16.04 or older, follow the next steps:
-
-```
-cd ~/your/path
-wget https://releases.linaro.org/components/toolchain/binaries/7.2-2017.11/arm-linux-gnueabihf/gcc-linaro-7.2.1-2017.11-x86_64_arm-linux-gnueabihf.tar.xz
-tar xvf gcc-linaro-7.2.1-2017.11-x86_64_arm-linux-gnueabihf.tar.xz
-echo 'PATH=~/your/path/gcc-linaro-7.2.1-2017.11-x86_64_arm-linux-gnueabihf/bin:$PATH' >> ~/.bashrc
-```
-
-On Ubuntu 18.04 LTS, you can install using `apt-get`.
-Choose g++ version whatever you prefer: 6, 7 or 8.
-
-```
-sudo apt-get install g++-{6,7,8}-arm-linux-gnueabihf
-```
-
-Make sure you get `libstdc++.so` updated on your target with your new toolchain's corresponding one.
-
-For example, if you installed gcc-linaro-7.2.1-2017.11 above, do
-
-```
-wget https://releases.linaro.org/components/toolchain/binaries/7.2-2017.11/arm-linux-gnueabihf/runtime-gcc-linaro-7.2.1-2017.11-arm-linux-gnueabihf.tar.xz
-tar xvf runtime-gcc-linaro-7.2.1-2017.11-arm-linux-gnueabihf.tar.xz
-```
-
-Then, copy `libstdc++.so.6.0.24` into `/usr/lib/arm-linux-gnueabihf`, and update symbolic links on your device.
-
-## Build and install ARM Compute Library
-
-```
-CROSS_BUILD=1 TARGET_ARCH=armv7l make acl
-```
-Mostly you only need once of ACL build. This will build and install to `Product/(target_arch-os)/out/bin` folder.
-- this is required for ARM on Ubuntu
-
-## Build nnfw
-
-Give `TARGET_ARCH` variable to set the target architecture
-
-```
-CROSS_BUILD=1 TARGET_ARCH=armv7l make all install
-```
-- supports `armv7l` and `aarch64` for now
-
-If you used `ROOTFS_DIR` to prepare in alternative folder, you should also give this to makefile.
-
-```
-ROOTFS_DIR=ROOTFS_ARM=/path/to/your/rootfs/arm \
-CROSS_BUILD=1 TARGET_ARCH=armv7l make all install
-```
-
-## Run test
-
-```
- ./tests/scripts/test_driver.sh --artifactpath=.
-```
diff --git a/docs/howto/HowToUseDockerImage.md b/docs/howto/HowToUseDockerImage.md
deleted file mode 100644
index a28502cf0..000000000
--- a/docs/howto/HowToUseDockerImage.md
+++ /dev/null
@@ -1,168 +0,0 @@
-# How to use docker image of nnfw
-
-We have a docker image to build `nnfw` repo.
-
-This docker image is built from https://github.sec.samsung.net/STAR/nnfw/blob/master/docker/Dockerfile and based on Ubuntu 16.04.
-And prebuilt docker image is available from Samsung private docker registry.
-
-This document describes how to use prebuilt docker image when developing `nnfw`.
-
-## How to install docker
-
-Follow [Installing Docker](https://docs.docker.com/)
-
-- For Ubuntu, follow [Installing Docker on Ubuntu](https://docs.docker.com/install/linux/docker-ce/ubuntu/)
-
-These are the actual steps to install using apt package manager:
-```
-$ sudo apt-get install \
- apt-transport-https \
- ca-certificates \
- curl \
- software-properties-common
-$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
-$ sudo apt-key fingerprint 0EBFCD88
-```
-```
-$ sudo add-apt-repository \
- "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
- $(lsb_release -cs) \
- stable"
-$ sudo apt-get update
-```
-```
-$ sudo apt-get install docker-ce
-```
-
-## Configure docker daemon
-
-1. Set HTTP/HTTPS proxy
-
- * For Ubuntu, follow [Setting HTTP/HTTPS proxy environment variables](https://docs.docker.com/v17.09/engine/admin/systemd/#httphttps-proxy)
-
-If you are behind an HTTP or HTTPS proxy server, you will need to add this configuration in the Docker systemd service file.
-These are the actual steps to set an HTTP/HTTPS proxy environment variable:
-```
-$ sudo mkdir -p /etc/systemd/system/docker.service.d
-$ sudo vi etc/systemd/system/docker.service.d/http-proxy.conf
-```
-```
-[Service]
-Environment="HTTP_PROXY=http://10.112.1.184:8080/" "HTTPS_PROXY=https://10.112.1.184:8080/" "NO_PROXY=localhost,127.0.0.1"
-```
-```
-$ sudo systemctl daemon-reload
-$ sudo systemctl restart docker
-$ systemctl show --property=Environment docker
-```
-
-2. Edit configuration file of docker daemon
-
-First you have to add Samsung private docker reigstry to your docker daemon.
-Depending on your docker daemon installed, there are two ways of configuration.
-
-
-If there is a `/etc/default/docker`, please edit the file as below.
-```
-$ sudo vi /etc/default/docker
-
-DOCKER_OPTS="--insecure-registry docker.sec.samsung.net:5000"
-```
-
-If there is a `/etc/docker/daemon.json`, please edit the file as below.
-```
-{
- ...,
- "insecure-registries": [..., "docker.sec.samsung.net:5000"]
-}
-```
-
-3. Then restart docker daemon as below.
-
-```
-$ sudo service docker restart // Ubuntu 14.04
-
-or
-
-$ sudo systemctl restart docker // Ubuntu 16.04
-```
-
-## Install docker image of `nnfw`
-
-Let's pull docker image for `nnfw` repo and tag it to `nnfw_docker:latest`
-
-```
-$ docker pull docker.sec.samsung.net:5000/star/nnfw/nnfw_docker:1.5
-$ docker tag docker.sec.samsung.net:5000/star/nnfw/nnfw_docker:1.5 nnfw_docker:latest
-```
-
-If you would like to build `nnfw` tizen package using gbs, pull `nnfw_docker_tizen`.
-```
-$ docker pull docker.sec.samsung.net:5000/star/nnfw/nnfw_docker_tizen:1.2
-$ docker tag docker.sec.samsung.net:5000/star/nnfw/nnfw_docker_tizen:1.2 nnfw_docker_tizen:latest
-```
-
-## Build docker image instead of pull
-
-You can build docker image in your environment instead of pull docker image from server.
-
-```
-$ cd nnfw
-$ ./run build-docker
-```
-
-Default docker image name is `nnfw_docker`. If you want to change image name and/or tag, use `-t` or `--tag` option
-
-```
-$ cd nnfw
-$ ./run build-docker -t nnfw_docker_test
-```
-
-You can use options supported by `docker build` command (ex. `--network` option)
-
-```
-$ cd nnfw
-$ ./run build-docker --network=host --no-cache
-```
-
-If you want to build docker image for tizen build, use `--tizen` option
-
-```
-```
-$ cd nnfw
-$ ./run build-docker --tizen
-```
-```
-
-
-## Use docker image to build `nnfw`
-Three different targets for `nnfw` can be built using docker image.
-
-1. Build `nnfw` for `x86_64` target
-```
-$ cd nnfw
-$ docker run --rm -v $(pwd):/opt/nnfw -w /opt/nnfw nnfw_docker make install
-```
-or use `docker_run_test.sh` for convenience as below.
-```
-$ cd nnfw
-$ ./run docker_run_test.sh
-```
-You can find built artifacts at `nnfw/Product/x86_64-linux.debug`.
-
-2. Cross build `nnfw` for ARM on x86_64 host
-
-You should prepare RootFS, following [Cross Building for ARM](https://github.sec.samsung.net/STAR/nnfw/blob/master/docs/howto/CrossBuildForArm.md) except ACL build and cross build steps. Then execute below commands. If your RootFS directory is different with below directory, change it to correct path and ensure the path is absolute.
-```
-$ cd nnfw
-$ ROOTFS_DIR=$(pwd)/tools/cross/rootfs/arm \
-./run docker_build_cross_arm_ubuntu.sh
-```
-You can find built artifacts at `nnfw/Product/armv7l-linux.debug/`.
-
-3. Build `nnfw` for Tizen ARM package on x86_64 host
-```
-$ cd nnfw
-$ ./run docker_gbs_build.sh
-```
-You can find built artifacts at `Product/out/rpm`.
diff --git a/docs/nncc/design.md b/docs/nncc/design.md
new file mode 100644
index 000000000..a01d6fec4
--- /dev/null
+++ b/docs/nncc/design.md
@@ -0,0 +1,10 @@
+This document describes basic principles behind _nncc_ design.
+
+## Goals and non-goals
+
+As mentioned in README.md, _nncc_ aims to provide a general framework for compiling a given NN model
+to an artifact that runs on a target device (such as CPU, GPU, or NPU).
+
+More specifically, _nncc_ aims to create an efficient artifact (in terms of throughput or memory)
+for a specific target via focusing on a restricted set of NN operations. It is not the goal of _nncc_
+to support all the known NN operations although _nncc_ will keep trying to broaden its coverage.
diff --git a/docs/nncc/getting_started.md b/docs/nncc/getting_started.md
new file mode 100644
index 000000000..8f01bd2a4
--- /dev/null
+++ b/docs/nncc/getting_started.md
@@ -0,0 +1,73 @@
+#### Prerequisites
+
+The following toolchains are needed to build _nncc_ project:
+ - CMake (>= 3.1)
+ - g++ (>= 4.8)
+
+#### How to build _nncc_ with docker
+
+_nncc_ provides ``Dockerfile`` in order to make it easy to setup development environment.
+
+One may build ``nncc`` docker image with the following command:
+```
+nncc$ cat infra/docker/Dockerfile | docker build -t nncc -
+...
+```
+
+By default, this ``Dockerfile`` uses "archive.ubuntu.com" which may be quite slow. One may use mirror site via ``UBUNTU_MIRROR`` variable.
+For example, one may enable the use of ``kr.archive.ubuntu.com`` via the following command
+```
+nncc$ cat infra/docker/Dockerfile | docker build --build-arg UBUNTU_MIRROR="kr.archive.ubuntu.com" -t nncc -
+...
+```
+
+One who works behind proxy should provide proxy configuration via the following command:
+```
+nncc$ cat infra/docker/Dockerfile | docker build --build-arg HTTP_PROXY=<HTTP proxy address> --build-arg HTTPS_PROXY=<HTTPS proxy address> -t nncc -
+...
+```
+One may use simplified command if ``HTTP_PROXY`` and ``HTTPS_PROXY`` environment variables are already set:
+```
+nncc$ export
+...
+declare -x HTTP_PROXY=...
+declare -x HTTPS_PROXY=...
+...
+nncc$ cat infra/docker/Dockerfile | docker build --build-arg HTTP_PROXY --build-arg HTTPS_PROXY -t nncc -
+...
+```
+
+Note that these configurations are orthogonal to each other. One may freely combine these options as follows:
+```
+nncc$ cat infra/docker/Dockerfile | docker build --build-arg HTTP_PROXY --build-arg HTTPS_PROXY --build-arg UBUNTU_MIRROR="kr.archive.ubuntu.com" -t nncc -
+```
+
+One may easily build _nncc_ with the following command once ``nncc`` docker image is built.
+```
+nncc$ ./nncc docker-nncc configure
+...
+nncc$ ./nncc docker-nncc build
+...
+```
+
+#### How to build _nncc_ with ninja
+
+You may build _nncc_ with ninja (instead of make) if ninja is available. Please try the following commands:
+```
+nncc$ rm -rf build
+nncc$ ./nncc configure -G Ninja
+nncc$ ./nncc build
+```
+
+#### How to build and run _nncc_ unittests
+
+_nncc_ includes various unittests to check its correctness. One may build and run these unittests via the following command:
+```
+nncc$ rm -rf build
+nncc$ ./nncc configure -DENABLE_TEST=1
+nncc$ ./nncc build
+nncc$ ./nncc test
+```
+
+**NOTE** As _nncc_ unittests are implemented on top of google test framework (_gtest_), _nncc_ build script will automatically download _gtest_ 1.8 from public GitHub.
+If you are not able to access public GitHub from your machine, please override download URL via ``GTEST_URL`` environment variable.
diff --git a/docs/nncc/images/nncc_components.png b/docs/nncc/images/nncc_components.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nncc/images/nncc_components.png
diff --git a/docs/nncc/images/nncc_idef0_a0.png b/docs/nncc/images/nncc_idef0_a0.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nncc/images/nncc_idef0_a0.png
diff --git a/docs/nncc/images/nncc_idef0_a1.png b/docs/nncc/images/nncc_idef0_a1.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nncc/images/nncc_idef0_a1.png
diff --git a/docs/nncc/images/nncc_idef0_a12.png b/docs/nncc/images/nncc_idef0_a12.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nncc/images/nncc_idef0_a12.png
diff --git a/docs/nncc/project/detailed_level_design.md b/docs/nncc/project/detailed_level_design.md
new file mode 100644
index 000000000..50fb8fa13
--- /dev/null
+++ b/docs/nncc/project/detailed_level_design.md
@@ -0,0 +1,329 @@
+# SW Detailed Level Design
+
+**Revision history**
+
+| Ver. | Date | Contents | Author | Approver |
+| ---- | ---------- | ----------------- | ----------------- | ------------ |
+| 0.1 | 2018.06.20 | Initial version | Vostokov Sergey | Sung-Jae Lee |
+| 0.2 | 2018.06.21 | SE member review | Alexey Kondrashov | |
+| 1.0 | 2018.06.22 | Final DR1 version | Vostokov Sergey | Sung-Jae Lee |
+
+**Terminology and Abbreviation**
+
+| | |
+| ------------ | ------------------------------------------------------------- |
+| OS | Operating System |
+| OS API | Application interface of OS |
+| HW | Hardware |
+| SW | Software |
+| NN | Neural Network |
+| NN model | Neural network model (Instance of NN built with ML framework) |
+| NN compiler | The compiler for neural network |
+| ML framework | The machine learning framework |
+| TF/TF Lite | Tensorflow/Tensorflow Lite ML framework |
+| IR | Intermediate representation |
+| CI/CI system | Continuous integration system |
+| UI | The user interface |
+| GUI | The graphical user interface |
+| CLI | The command-line interface |
+
+**References**
+
+\[1\] Vostokov Sergey, [SW Requirements Specification](requirements_specification.md)
+
+\[2\] Vostokov Sergey, [SW High-Level Design](high_level_design.md)
+
+## Overview
+
+### Scope
+
+The main goal of the project is to develop a compiler for neural
+networks to produce executable artefact for specified SW and HW
+platform.
+
+The development scope includes the following components:
+
+ - Develop importer module to parse, verify and represent NN model for
+ further optimization and compilation
+ - Develop code emitters to produce executable binary for CPU and GPU
+
+
+**2018 year goals:**
+
+ - Support TensorFlow Lite NN model format
+ - Support Caffe NN model format
+ - Support Caffe2 NN model format (Optional)
+ - Support compilation of MobileNet NN
+ - Support compilation of Inception v3 NN
+ - Support ARM CPU
+ - Support ARM GPU (Mali)
+ - Support Tizen OS
+ - Support SmartMachine OS
+(Optional)
+
+| Product | Target Model Name | Comment |
+| ------------------- | ------------------------------ | ---------------- |
+| Tizen phone | Tizen TM2 | Reference device |
+| Tizen device | Odroid XU4 | Reference board |
+| SmartMachine target | Microvision mv8890, exynos8890 | Reference device |
+
+Table 1-1. Target Model
+
+### Design Consideration
+
+Deep learning software demands reliability and performance. The common
+approach which comes from the history is to develop a SW framework
+(machine learning framework) which would compute each step of the neural
+network inference process using supported hardware. This approach is
+used in many popular solutions like Google Tensorflow/Tensorflow Lite,
+Caffe/2, etc. Traditionally, neural network developers build a
+computation graph and then an appropriate machine learning framework
+interprets it. The latest discoveries in AI field show that the
+node-visitor method of execution is inefficient. As a result, a second
+approach has been worked out by the industry, which is a neural network
+compiler that executes code more efficiently.
+
+This document presents the design of the *nncc*, a neural network
+compiler collection. The design should provide the easiest way to extend
+the functionality of the *nncc* by adding new modules with the following
+features:
+
+ - Support neural networks produced by various machine learning
+ frameworks;
+ - Produce an artefact taking advantages of various hardware
+ including specialized processors like NPU;
+ - Apply new domain specific optimization techniques over given NN.
+
+Non-functional requirements to the developed software are well-described
+in the SW Requirements Specification, such requirements are not shown
+here to avoid duplication.
+
+### Constraints
+
+See constraints in SW Requirements Specification.
+
+
+<table>
+<colgroup>
+<col style="width: 24%" />
+<col style="width: 64%" />
+<col style="width: 10%" />
+</colgroup>
+<thead>
+<tr class="header">
+<th>Item</th>
+<th>Assumptions, Dependencies and the Constraints</th>
+<th>Reference</th>
+</tr>
+</thead>
+<tbody>
+<tr class="odd">
+<td>Tizen SW Platform</td>
+<td><dl>
+<dt>The following items should be provided:</dt>
+<dd><ul>
+<li>Tizen API</li>
+<li>Tizen kernel</li>
+<li>Tizen FW</li>
+<li>Tizen SDK</li>
+<li>Tizen naming convention</li>
+</ul>
+</dd>
+</dl></td>
+<td>- <a href="www.tizen.org" class="uri">www.tizen.org</a> <br>- <a href="wiki.tizen.org" class="uri">wiki.tizen.org</a> <br>- <a href="developer.tizen.org" class="uri">developer.tizen.org</a></td>
+</tr>
+<tr class="even">
+<td>SmartMachine OS Platform</td>
+<td><dl>
+<dt>The following items should be provided:</dt>
+<dd><ul>
+<li>SmartMachine API</li>
+<li>SmartMachine kernel</li>
+<li>SmartMachine FW</li>
+<li>SmartMachine SDK</li>
+<li>SmartMachine naming convention</li>
+</ul>
+</dd>
+</dl></td>
+<td>- <a href="http://suprem.sec.samsung.net/confluence/pages/viewpage.action?pageId=81833987">Platform confluence</a> <br>- <a href="https://github.sec.samsung.net/RS7-SmartMachine">Github</a> <br>- <a href="http://suprem.sec.samsung.net/confluence/display/ASEC/Adaptive+AUTOSAR">Functional Safety confluence</a></td>
+</tr>
+<tr class="odd">
+<td>Host OS</td>
+<td>Linux-based OS (Ubuntu, Archlinux, etc)</td>
+<td>- <a href="https://www.ubuntu.com/">Ubuntu site</a> <br>- <a href="https://www.archlinux.org/">Archlinux site</a></td>
+</tr>
+<tr class="even">
+<td>Tizen target HW</td>
+<td>The reference device should be provided: Tizen TM2</td>
+<td></td>
+</tr>
+<tr class="odd">
+<td>SmartMachine target HW</td>
+<td>The reference device should be provided</td>
+<td></td>
+</tr>
+</tbody>
+</table>
+Table 1-2. Assumptions, Dependecies and the Constraints</caption>
+
+## SW Detailed Structure Design
+
+### SW Block Structure
+
+Top-Level Components of the nncc descriped in HLD. More detailed
+structure and class diagram will be available after development
+completion.
+
+### SW Block Feature
+
+1. Initialization: configure all internal modules (see
+ [{Initialization} Detailed Design](#initialization-detailed-design))
+2. Frontend: Import NN model (see [{Import NN model} Detailed
+ Design](#import-nn-model-detailed-design))
+ - *Caffe frontend*: includes the parser of Caffe NN model format,
+ verifier to ensure that parsed data is valid and consentient,
+ and Caffe-specific IR converter
+ - *Caffe2 frontend*: includes the parser of Caffe2 NN model
+ format, verifier to ensure that parsed data is valid and
+ consentient, and Caffe2-specific IR converter to Model IR
+ - *Tensorflow Lite frontend*: includes the parser of Tensorflow NN
+ model format with automatic version recognition feature,
+ verifier to ensure that parsed data is valid and consentient,
+ and Tensorflow Lite-specific IR converter to Model IR
+3. Backend: Generate the code (see [{Generate the code} Detailed
+ Design](#generate-the-code-detailed-design))
+ - *Interpreter:* As it was described in SW High-Level Document
+ imported NN model may proceed through three step of Intermediate
+ representation: Model IR, Coarse-Grained IR, Fine-Grained IR.
+ The Interpreter backend uses each this IR to do inference of
+ given NN model. As the output, the user gets the resulting
+ calculation of all NN ops included into original computation
+ graph.
+ - *Binary*:This type refers to generating binary code that can be
+ executed on the target device. NN compiler can generate code
+ that is either executed solely on CPU or takes advantage of the
+ GPU when possible if the corresponding target was specified. The
+ user may want to incorporate 3rd party libraries included into
+ target firmware or delivered with the application package. In
+ this case, the compiler prepares the data following EABI
+ convention and embeds an invocation of high-level functions by
+ appropriate symbol.
+ - *Soft*: Resulting program is a generated source code in
+ high-level programming language C or C++. Here there are two
+ options: the first one is to generate the source code that does
+ not depend on libraries outside of itself, with the exception of
+ system libraries. The second one is to include the code to
+ invoke high-level functions from 3rd party libraries. For
+ example, it may be an invocation of matrix multiplication from
+ GEMM library.
+
+## SW Detailed Operation Design
+
+### {Initialization} Detailed Design
+
+#### Major Function
+
+To provide a valid configuration session for all modules of *nncc* using
+user input from the command line/config file/environment variables.
+
+#### Operation Sequence
+
+Initialization of the *nncc* includes command line option processing,
+configuration of its subsystems as well as any error checking possible
+at this stage. It consists of the following steps:
+
+1. Collect all command line options and verify their format for
+ validity (no syntax errors etc.)
+
+2. Check for validity and then process general options
+
+3. Load subsystem modules
+
+4. For each one of them:
+
+ - Configure
+ - Pass command line options
+ - Check command line options for validity (for example, check
+ that every required option is present)
+
+At the end of this process each subsystem is configured and has access
+to all data needed for its operation.
+
+### {Import NN model} Detailed Design
+
+#### Major Function
+
+To convert given NN model from framework-specific IR to Model IR for
+further processing.
+
+#### Operation Sequence
+
+As you may see on the diagram, neural network import is the main
+function of the compiler front-end part. The result of this operation is
+a computation graph which is presented as Model IR.
+
+![image](../images/nncc_idef0_a12.png)
+
+The import process consists of three parts:
+
+1. NN model parsing
+2. Verification of the result from the previous step
+3. Converting the model to the Model IR
+
+During the first step, file or files containing the model are read and
+represented in some format specific to each NN framework.
+
+Verification step is included to ensure that:
+
+ - None of the files constituting the model are damaged
+ - Model format corresponds to the specified one
+ - Version of the model format corresponds to the specified one
+
+The most important step is accurately converting the model from the
+framework-specific representation to the Model IR. This conversion
+includes:
+
+ - *Translation of the NN model computation graph to the Model IR
+ computation graph.* During the translation new nodes may be
+ introduced - for example, a high-level NN operation may be split
+ into a few smaller ones.
+ - *NN model parameter layout conversion.* The way parameters (also
+ known as weights) of a model are layed out in each specific NN
+ framework may differ, and it is necessary to convert such layout
+ into a unified format.
+ - *NN operation parameter conversion.* Each NN operation has a set
+ of its own parameters describing the way this operation should be
+ performed, and these parameters also differ between frameworks.
+
+Resulting Model IR is equivalent to the initial NN model in terms of how
+NN model inputs would be transformed into its outputs if all the
+operations in the Model IR were executed.
+
+### {Generate the code} Detailed Design
+
+Development in progress. Will be described on Completion DR.
+
+## Interface Design
+
+Development in progress. Will be described on DR2.
+
+## SW Code Structure
+
+| Directory | Description |
+| ------------------------ | -------------------------------------------------------------------- |
+| / | source codes of the build system, main README file |
+| /contrib | Incubating projects |
+| /doc | Contains the documentation of the project |
+| /doc/project | Contains project management documents (SRS, SDD, STD, HLD, DLD, etc) |
+| /libs | Contains the source of the libraries which are used by the nncc |
+| /libs/core | Contains the source code of the core library of nncc |
+| /libs/frontend | Contains the source code of supported frontend's plugins |
+| /libs/frontend/caffe | The source code for the Caffe frontend |
+| /libs/frontend/caffe2 | The source code for the Caffe2 frontend |
+| /libs/frontend/tflite | The source code for the Tensorflow Lite frontend |
+| /libs/backend | Contains the source code of supported backend’ plugins |
+| /libs/backend/cpu | Contains the source code of CPU backend |
+| /libs/backend/gpu | Contains the source code of GPU backend |
+| /libs/backend/3rd\_party | Contains the source code of backend to utilize 3rd party libraries |
+| /scripts | Various scripts for building and testing the nncc |
+| /tools | The source code of the executables |
diff --git a/docs/nncc/project/development_document.md b/docs/nncc/project/development_document.md
new file mode 100644
index 000000000..8315dd3b6
--- /dev/null
+++ b/docs/nncc/project/development_document.md
@@ -0,0 +1,257 @@
+# SW Development Document
+
+**Revision history**
+
+| Ver. | Date | Contents | Author | Approver |
+| ---- | ---------- | --------------------------- | --------------- | ------------ |
+| 0.1 | 2018.04.12 | Initial version | Vostokov Sergey | Sung-Jae Lee |
+| 0.2 | 2018.04.16 | SE member in-charge review | Ilya Lopatin | |
+| 1.0 | 2018.04.17 | Final Execution DR version | Vostokov Sergey | Sung-Jae Lee |
+| 1.1 | 2018.04.17 | Add SW Quality Verification | Vostokov Sergey | Sung-Jae Lee |
+
+**Terminology and Abbreviation**
+
+| | |
+| ------------ | ------------------------------------------------------------- |
+| OS | Operating System |
+| OS API | Application interface of OS |
+| HW | Hardware |
+| SW | Software |
+| NN | Neural Network |
+| NN model | Neural network model (Instance of NN built with ML framework) |
+| NN compiler | The compiler for neural network |
+| ML framework | The machine learning framework |
+| TF/TF Lite | Tensorflow/Tensorflow Lite ML framework |
+| IR | Intermediate representation |
+| CI/CI system | Continuous integration system |
+| UI | The user interface |
+| GUI | The graphical user interface |
+| CLI | The command-line interface |
+
+## Project Overview
+
+### Purpose and Scope
+
+The main goal of the project is to develop a compiler for neural networks to produce executable artefact for specified SW and HW platform.
+
+The development scope includes the following components:
+
+ - Develop importer module to parse, verify and represent NN model for further optimization and compilation
+ - Develop code emitters to produce executable binary for CPU and GPU
+
+
+**2018 year goals:**
+
+ - Support TensorFlow Lite NN model format
+ - Support Caffe NN model format
+ - Support Caffe2 NN model format (Optional)
+ - Support compilation of MobileNet NN
+ - Support compilation of Inception v3 NN
+ - Support ARM CPU
+ - Support ARM GPU (Mali)
+ - Support Tizen OS
+ - Support SmartMachine OS (Optional)
+
+| Product | Target Model Name | Comment |
+| ------------------- | ------------------------------ | ---------------- |
+| Tizen phone | Tizen TM2 | Reference device |
+| Tizen device | Odroid XU4 | Reference board |
+| SmartMachine target | Microvision mv8890, exynos8890 | Reference device |
+
+### Assumptions, Dependencies and Constraints
+
+<table>
+<colgroup>
+<col style="width: 26%" />
+<col style="width: 46%" />
+<col style="width: 26%" />
+</colgroup>
+<thead>
+<tr class="header">
+<th>Item</th>
+<th>Assumptions, Dependencies and the Constraints</th>
+<th>Reference</th>
+</tr>
+</thead>
+<tbody>
+<tr class="odd">
+<td>Tizen SW Platform</td>
+<td><dl>
+<dt>The following items should be provided:</dt>
+<dd><ul>
+<li>Tizen API</li>
+<li>Tizen kernel</li>
+<li>Tizen FW</li>
+<li>Tizen SDK</li>
+<li>Tizen naming convention</li>
+</ul>
+</dd>
+</dl></td>
+<td><ul>
+<li><a href="www.tizen.org" class="uri">www.tizen.org</a></li>
+<li><a href="wiki.tizen.org" class="uri">wiki.tizen.org</a></li>
+<li><a href="developer.tizen.org" class="uri">developer.tizen.org</a></li>
+</ul></td>
+</tr>
+<tr class="even">
+<td>SmartMachine OS Platform</td>
+<td><dl>
+<dt>The following items should be provided:</dt>
+<dd><ul>
+<li>SmartMachine API</li>
+<li>SmartMachine kernel</li>
+<li>SmartMachine FW</li>
+<li>SmartMachine SDK</li>
+<li>SmartMachine naming convention</li>
+</ul>
+</dd>
+</dl></td>
+<td>- <a href="http://suprem.sec.samsung.net/confluence/pages/viewpage.action?pageId=81833987">Platform confluence</a> <br>- <a href="https://github.sec.samsung.net/RS7-SmartMachine">Github</a> <br>- <a href="http://suprem.sec.samsung.net/confluence/display/ASEC/Adaptive+AUTOSAR">Functional Safety confluence</a></td>
+</tr>
+<tr class="odd">
+<td>Host OS</td>
+<td>Linux-based OS (Ubuntu, Archlinux, etc)</td>
+<td>- <a href="https://www.ubuntu.com/">Ubuntu site</a> <br>- <a href="https://www.archlinux.org/">Archlinux site</a></td>
+</tr>
+<tr class="even">
+<td>Tizen target HW</td>
+<td>The reference device should be provided: Tizen TM2</td>
+<td></td>
+</tr>
+<tr class="odd">
+<td>SmartMachine target HW</td>
+<td>The reference device should be provided</td>
+<td></td>
+</tr>
+</tbody>
+</table>
+
+## Development Plan And Result
+
+### Development Schedule
+
+| Task | Deliverable | Plan start | Plan end | Result start | Result end | Responsibility |
+| ------------------------------------ | --------------------------------- | ---------- | -------- | ------------ | ---------- | -------------- |
+| Prepare SW requirements | SRS | 04.2018 | 04.2018 | | | S. Vostokov |
+| Prepare initial SW Test Document | STD | 04.2018 | 04.2018 | | | S. Vostokov |
+| Prepare Initial Project Plan | SDD | 04.2018 | 04.2018 | | | S. Vostokov |
+| Prepare SW Test Document | STD | 04.2018 | 06.2018 | | | S. Vostokov |
+| Prepare design document | HLD, DLD | 05.2018 | 08.2018 | | | S. Vostokov |
+| Prepare test result | STD, UTR | 04.2018 | 10.2018 | | | S. Vostokov |
+| Prepare project completion documents | SDD, Project completion report | 05.2018 | 12.2018 | | | S. Vostokov |
+| Implement Caffe Importer | Caffe NN model Importer | 05.2018 | 09.2018 | | | S. Vostokov |
+| Implement code emitter for CPU | Code emitter | 05.2018 | 09.2018 | | | S. Vostokov |
+| Implement TF Lite Importer | TensorFlow Lite NN model Importer | 05.2018 | 11.2018 | | | S. Vostokov |
+| Implement code emitter for GPU | Code emitter | 02.2018 | 11.2018 | | | S. Vostokov |
+
+### SW Metrics
+
+| Category | Metric | Collection Method | Collection Period | Planned | Actual | Responsibility |
+| -------- | ---------------------------------------------------------------------- | ------------------------ | ----------------------- | ----------------- | ------ | -------------- |
+| Quality | Test pass rate | GTest | 22.02.2018 - 31.12.2018 | 100% | | S. Vostokov |
+| Quality | Defects density | Defect management system | 22.02.2018 - 31.12.2018 | \<= 1 defect/KLOC | | S. Vostokov |
+| Quality | Defects removal rate | Defect management system | 22.02.2018 - 31.12.2018 | 100% | | S. Vostokov |
+| Quality | Critical defects | Static analysis | 22.02.2018 - 31.12.2018 | 0 | | S. Vostokov |
+| Quality | Major defects | Static analysis | 22.02.2018 - 31.12.2018 | 0 | | S. Vostokov |
+| Quality | Code review issue removal | Samsung Research github | 22.02.2018 - 31.12.2018 | 100% | | S. Vostokov |
+| Quality | Comments Rate | `cloc` tool | 22.02.2018 - 31.12.2018 | Exceed 20% | | S. Vostokov |
+| Quality | Cyclomatic Complexity | SVACE | 22.02.2018 - 31.12.2018 | \< 50 | | S. Vostokov |
+| Quality | Unused Items (Unused Files, Unused Functions, Unused Global Variables) | gcc/g++ | 22.02.2018 - 31.12.2018 | 0 | | S. Vostokov |
+| Process | Project On-time Completion Rate | PLM | 22.02.2018 - 31.12.2018 | 100% | | S. Vostokov |
+| Process | Milestone On-time Completion Rate | PLM | 22.02.2018 - 31.12.2018 | 100% | | S. Vostokov |
+| Process | Process compliance | Audit | 22.02.2018 - 31.12.2018 | 100% | | S. Vostokov |
+
+### SW Configurations Management
+
+#### Document
+
+| No | Configuration Item | Location | Submitter |
+| -- | ---------------------------- | -------- | ----------- |
+| 1 | SW Requirement Specification | PLM | S. Vostokov |
+| 2 | SW Development Document | PLM | S. Vostokov |
+| 3 | SW High Level Document | PLM | S. Vostokov |
+| 4 | SW Detailed Level Document | PLM | S. Vostokov |
+| 5 | SW System Test Document | PLM | S. Vostokov |
+| 6 | SW Unit Test Report | PLM | S. Vostokov |
+
+#### SW Source Code
+
+SW Repository:
+<https://github.sec.samsung.net/STAR/nncc>
+
+ git clone https://github.sec.samsung.net/STAR/nncc.git
+
+#### Baseline
+
+| Phase | Baseline Name | SW Configuration Item |
+| ------------------ | ------------------ | ------------------------------------------------------------------------------------------- |
+| 04.2018 Plan | Execution DR | SW Requirement Specification, SW Development Document, System Test Document initial version |
+| 06.2018 Execution | DR1 | System Test Document |
+| 08.2018 Execution | Design document | SW High Level Document, SW Detailed Design Document |
+| 09.2018 Execution | DR2 | |
+| 10.2018 Execution | Test report | SW System Test Document (result), SW Unit Test Report |
+| 12.2018 Completion | Project Completion | Project Completion Report |
+
+## SW Quality Verification
+
+### SW Verification
+
+| No | Verification Item | Quality Goal | Tool | Phase | Development Team Member in Charge | Result | Note |
+| -- | -------------------------------- | ------------------------------------------ | -------- | --------- | --------------------------------- | ------ | ---- |
+| 1 | Open source License Verification | Clear violations of open source obligation | ProtexIP | Execution | Vostokov Sergey | | |
+| 2 | Potential Defect | Fix all defects | Svace | Test | Vostokov Sergey | | |
+| 3 | System Defect | Fix Critical/ Major defects | Github | Test | Vostokov Sergey | | |
+
+### Static Analysis
+
+| No | Activity | Schedule | Result | Comment |
+| -- | --------------------------- | ---------- | ------ | ------- |
+| 1 | SA Verification I (SVACE) | 28.09.2018 | | |
+| 2 | SA Verification II (SVACE) | 30.11.2018 | | |
+| 2 | SA Verification III (SVACE) | 31.12.2018 | | |
+
+### Coding Standard
+
+| No | Activity | Schedule | Result | Comment |
+| -- | ----------------------------------------------------- | -------- | ------ | ------- |
+| 1 | Coding standard enforcement with `clang-format` tool. | Regular | | |
+
+
+### Convergence (integration testing)
+
+Out of scope since the integration with other SW is not required by SW
+Requirement Specification.
+
+### Dynamic Analysis
+
+| No | Activity | Schedule | Result | Comment |
+| -- | ------------------- | ---------- | ------ | ------- |
+| 1 | DA Verification I | 28.09.2018 | | |
+| 2 | DA Verification II | 30.11.2018 | | |
+| 2 | DA Verification III | 31.12.2018 | | |
+
+
+### Architecture Analysis
+
+SW architecture verification is managed by HQ.
+
+### SW Security
+
+Out of the project scope since the project is not related to SW security.
+
+### Code Review
+
+| No | Activity | Schedule | Result | Comment |
+| -- | ----------- | -------- | ------ | ------------------------------------------------------------------- |
+| 1 | Code review | Regular | | All code is reviewed manually using `github` tool before committing |
+
+## Risk Management
+
+| Priority | Risk Description | Risk Reduction Solution | Schedule | Result | Responsibility |
+| -------- | ------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | ----------------- | ------ | -------------- |
+| 1 | Project scope is changed due extra HQ request | Discuss the new requirements via email and messenger, update SRS | 02.2018 - 12.2018 | | S. Vostokov |
+| 2 | Unavoidable technical difficulties during requirements implementation | Submit requirements changes and get confirmation from HQ | 02.2018 - 12.2018 | | S. Vostokov |
+| 3 | Not enough HR | Hire team members as soon as possible, request assistance from other teams | 02.2018 - 12.2018 | | S. Vostokov |
+| 4 | Use of GPL code | Minimize usage of GPL code, wrap GPL modules with well-defined interfaces so they can be easily replaced. | 02.2018 - 12.2018 | | S. Vostokov |
+| 5 | Requirements would change due external or internal circumstances, e.g. new technology or product launch | Discuss project changes and make corrections | 02.2018 - 12.2018 | | S. Vostokov |
+
diff --git a/docs/nncc/project/high_level_design.md b/docs/nncc/project/high_level_design.md
new file mode 100644
index 000000000..a15aaca4a
--- /dev/null
+++ b/docs/nncc/project/high_level_design.md
@@ -0,0 +1,457 @@
+# SW High Level Design
+
+**Revision history**
+
+| Ver. | Date | Contents | Author | Approver |
+| ---- | ---------- | ----------------- | ----------------- | ------------ |
+| 0.1 | 2018.05.25 | Initial version | Vostokov Sergey | Sung-Jae Lee |
+| 0.2 | 2018.06.21 | SE member review | Alexey Kondrashov | |
+| 1.0 | 2018.06.22 | Final DR1 version | Vostokov Sergey | Sung-Jae Lee |
+
+**Terminology and Abbreviation**
+
+| Terminology | Description |
+| ------------ | ------------------------------------------------------------- |
+| OS | Operating System |
+| OS API | Application interface of OS |
+| HW | Hardware |
+| SW | Software |
+| NN | Neural Network |
+| NN model | Neural network model (Instance of NN built with ML framework) |
+| NN compiler | The compiler for neural network |
+| ML framework | The machine learning framework |
+| TF/TF Lite | Tensorflow/Tensorflow Lite ML framework |
+| IR | Intermediate representation |
+| CI/CI system | Continuous integration system |
+| UI | The user interface |
+| GUI | The graphical user interface |
+| CLI | The command-line interface |
+
+**References**
+
+\[1\] Vostokov Sergey, [SW Requirements Specification](requirements_specification.md)
+
+## Overview
+
+### Scope
+
+The main goal of the project is to develop a compiler for neural
+networks to produce executable artefact for specified SW and HW
+platform.
+
+The development scope includes the following components:
+
+ - Develop importer module to parse, verify and represent NN model for
+ further optimization and compilation
+ - Develop code emitters to produce executable binary for CPU and GPU
+
+
+**2018 year goals:**
+
+ - Support TensorFlow Lite NN model format
+ - Support Caffe NN model format
+ - Support Caffe2 NN model format (Optional)
+ - Support compilation of MobileNet NN
+ - Support compilation of Inception v3 NN
+ - Support ARM CPU
+ - Support ARM GPU (Mali)
+ - Support Tizen OS
+ - Support SmartMachine OS(Optional)
+
+| Product | Target Model Name | Comment |
+| ------------------- | ------------------------------ | ---------------- |
+| Tizen phone | Tizen TM2 | Reference device |
+| Tizen device | Odroid XU4 | Reference board |
+| SmartMachine target | Microvision mv8890, exynos8890 | Reference device |
+
+Table 1-1. Target Model
+
+### Design Consideration
+
+Deep learning software demands reliability and performance. The common
+approach which comes from the history is to develop a SW framework
+(machine learning framework) which would compute each step of the neural
+network inference process using supported hardware. This approach is
+used in many popular solutions like Google Tensorflow/Tensorflow Lite,
+Caffe/2, etc. Traditionally, neural network developers build a
+computation graph and then an appropriate machine learning framework
+interprets it. The latest discoveries in AI field show that the
+node-visitor method of execution is inefficient. As a result, a second
+approach has been worked out by the industry, which is a neural network
+compiler that executes code more efficiently.
+
+This document presents the design of the *nncc*, a neural network
+compiler collection. The design should provide the easiest way to extend
+the functionality of the *nncc* by adding new modules with the following
+features:
+
+ - Support neural networks produced by various machine learning
+ frameworks;
+ - Produce an artefact taking advantages of various hardware
+ including specialized processors like NPU;
+ - Apply new domain specific optimization techniques over given NN.
+
+### Constraints
+
+See constraints in SW Requirements Specification.
+
+<table>
+<colgroup>
+<col style="width: 24%" />
+<col style="width: 64%" />
+<col style="width: 10%" />
+</colgroup>
+<thead>
+<tr class="header">
+<th>Item</th>
+<th>Assumptions, Dependencies and the Constraints</th>
+<th>Reference</th>
+</tr>
+</thead>
+<tbody>
+<tr class="odd">
+<td>Tizen SW Platform</td>
+<td><dl>
+<dt>The following items should be provided:</dt>
+<dd><ul>
+<li>Tizen API</li>
+<li>Tizen kernel</li>
+<li>Tizen FW</li>
+<li>Tizen SDK</li>
+<li>Tizen naming convention</li>
+</ul>
+</dd>
+</dl></td>
+<td>- <a href="www.tizen.org" class="uri">www.tizen.org</a> <br>- <a href="wiki.tizen.org" class="uri">wiki.tizen.org</a> <br>- <a href="developer.tizen.org" class="uri">developer.tizen.org</a></td>
+</tr>
+<tr class="even">
+<td>SmartMachine OS Platform</td>
+<td><dl>
+<dt>The following items should be provided:</dt>
+<dd><ul>
+<li>SmartMachine API</li>
+<li>SmartMachine kernel</li>
+<li>SmartMachine FW</li>
+<li>SmartMachine SDK</li>
+<li>SmartMachine naming convention</li>
+</ul>
+</dd>
+</dl></td>
+<td>- <a href="http://suprem.sec.samsung.net/confluence/pages/viewpage.action?pageId=81833987">Platform confluence</a> <br>- <a href="https://github.sec.samsung.net/RS7-SmartMachine">Github</a> <br>- <a href="http://suprem.sec.samsung.net/confluence/display/ASEC/Adaptive+AUTOSAR">Functional Safety confluence</a></td>
+</tr>
+<tr class="odd">
+<td>Host OS</td>
+<td>Linux-based OS (Ubuntu, Archlinux, etc)</td>
+<td>- <a href="https://www.ubuntu.com/">Ubuntu site</a> <br>- <a href="https://www.archlinux.org/">Archlinux site</a></td>
+</tr>
+<tr class="even">
+<td>Tizen target HW</td>
+<td>The reference device should be provided: Tizen TM2</td>
+<td></td>
+</tr>
+<tr class="odd">
+<td>SmartMachine target HW</td>
+<td>The reference device should be provided</td>
+<td></td>
+</tr>
+</tbody>
+</table>
+Table 1-2. Assumptions, Dependecies and the Constraints</caption>
+
+## SW System Architecture Design
+
+### Overall Architecture
+
+The picture below presents the result of high-level analysis of the
+requirements which **nncc** should satisfy. It describes the main
+function **Compilation** of the compiler collection using IDEF0
+(functional modeling) notation. The full information on IDEF family of
+modeling languages is available at this link on [Wikipedia:
+IDEF](https://en.wikipedia.org/wiki/IDEF).
+
+![image](../images/nncc_idef0_a0.png)
+
+Figure 1. Top-Level Context Diagram of compilation function.
+
+
+The short explanation of the **Figure 1**:
+
+**1. Input entities:**
+
+ - *NN Model instance:* It is the main input of *nncc*. The compiler
+ takes from a user information describing a neural network which
+ should be compiled. In most cases, this NN is produced by a
+ machine learning framework and stored in one or many files. The
+ contents of these files constitute the essence of the neural
+ network. Here it is denoted as an instance of NN model.
+ - *Command line options:* In order to provide the most convenient
+ way to use the compiler, it should be configurable. Current design
+ presents a tool which has a Command Line Interface (CLI). Command
+ line options are a symbolic representation of directions
+ instructing the compiler how to set up a working session to get
+ the desired result.
+
+**2. Output:**
+
+ - *Target binaries:* Everything that is produced by the compilation
+ operation. In general case the result may consist of one or more
+ files. Each of them may be one of the following: an executable, a
+ source code file, a log/verification/error report. For example,
+ when we require the compiler to compile a neural network for
+ execution on GPU, the output artefact may be OpenCL/C/C++ source
+ code, or a binary containing invocation of the procedures
+ delegating the calculations to GPU.
+
+**3. Rules and notations:**
+
+ - *NN Model specification:* Each machine learning framework has its
+ own architecture design and uses its own format to
+ serialize/deserialize computation graphs which represent neural
+ networks. On a storage device, it may be saved as a file or many
+ files using a unique markup of binary data. To enable *nncc* to
+ read such data and process it, in the future it should recognize
+ the format of the container. Importer/parser subsystem of *nncc*
+ stores the full knowledge of the NN specifications and is
+ responsible for reading and parsing NN models (see [Import NN
+ model](#import-nn-model)).
+ - *High-Level and Low-Level Optimization techniques:* Before
+ deployment, a neural network developer might want to verify their
+ product and optimize it by size and performance. There are many
+ techniques for reducing the common size of neural network weights
+ and improving performance of the inference. NN optimization
+ activity can be automated by implementing each technique in the
+ middleend according to its specifications (see [Apply
+ Optimizations](#apply-optimizations)).
+ - *Target Runtime Environment (TRE):* In the case when the compiler
+ produces the binary for execution on a specific SW platform, it
+ should take into account the common API of this SW Platform. It
+ includes the full public API of a chosen OS available to the 3rd
+ party developers.
+ - *Target Instruction Set Architecture (Target ISA):* Resulting
+ artefact is always executed on a SW Platform using some specified
+ API. The user may want to generate the artefact that would use
+ OpenBlas or Arm Compute Library or something else (if supported by
+ the compiler), to perform calculations. In order to provide such
+ possibility, *nncc* should be aware of the API to the specified
+ 3rd party libraries.
+ - *Device specifications:* Some of the optimization techniques may
+ take into account the technological features of the computing
+ device, like the time to perform some specific calculations. Such
+ information is very helpful during optimization of the final code
+ of the compiled artefact because it may be used to select an
+ optimal sequence of command invocations in order to achieve the
+ best performance.
+
+**4. Mechanism:**
+
+ - *Optimizing NN Compiler:* The implemented compiler itself. Since
+ *nncc* is dedicated to producing the code for the most efficient
+ execution, we may regard the tool as optimizing.
+ - *Host OS:* Since the compiler is a tool that works in some SW
+ Environment, the main Top-Level SW system is an Operating System.
+ In the SW Requirements specification it may be defined as a
+ Linux-like OS, for example Ubuntu, Archlinux, etc.
+
+### Composition of Architecture
+
+The compiler consists of three main parts: frontend, middleend, backend.
+Together they form a Neural Network instance processing pipeline.
+Moreover, there is one additional part that is in charge of the compiler
+configuration.
+
+![image](../images/nncc_components.png)
+
+Figure 2. Top-Level Components of the
+*nncc*.
+
+| Layer or Subsystem Name | Description |
+| ----------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- |
+| Frontend | Imports a specified Neural Network, presents it as a computation graph |
+| Middleend | Provides various optimizations over the computation graph; at the end transforms it to internal IR |
+| Backend | Produces the specified artefact as a result of compilation procedure using specified parameters describing the target OS, target HW, etc |
+| Configuration system | Accepts command line options and configures *nncc* according to their contents |
+
+
+The detailed decomposition of the main function **Compilation** is
+presented on the diagram A1 below.
+
+### Interface
+
+Similar to any console application the *nncc* CLI accepts two types of
+options:
+
+ - Options that have values, for example, a name of the output executable
+ - Options that don't have values (switches) that turn various features on and off
+
+Additionally, options can be general and subsystem-specific.
+
+General options direct the process of the neural network compilation as
+a whole, and also control the utility functions like the verbosity of
+the messages that *nncc* outputs during the compilation process.
+
+Subsystem-specific options control each respective subsystem:
+
+ - Frontend subsystem takes options that point to the NN model to
+ compile, which format it has, which version of the format and so
+ on.
+ - Middleend subsystem takes options that either turn on specific
+ optimizations for the NN model, or just point at the more desired
+ outcome, for example "target performance efficiency" or "target
+ memory efficiency".
+ - Backend subsystem takes options that describe the desired target
+ device or architecture and so on.
+
+For better usability, high-level options are also supported. A single
+high-level option is mapped to a group of lower level options, similarly
+to how it is done with conventional compiler drivers, like gcc. This way
+by choosing a single Middleend option "target performance", nncc will
+automatically choose a number of performance optimizations by itself.
+
+## SW System Operation Design
+
+The Figure 3 presents a more detailed composition of the main function
+**Compilation**. As it was shown in previous section [Composition of
+Architecture](#composition-of-architecture) it is composed of 5
+subfunctions:
+
+ - Setup and configure each module - *Block 1* (See
+ [Initialization](#initialization) section)
+ - Import the specified neural network - *Block 2* (See [Import NN
+ model](#import-nn-model) section)
+ - Apply High-Level optimizations - *Block 3* (See [Apply
+ Optimizations](#apply-optimizations) section)
+ - Apply Low-Level optimizations - *Block 4* (See [Apply
+ Optimizations](#apply-optimizations) section)
+ - Generate the output code for specified target - *Block 5* (See
+ [Generate the code](#generate-the-code) section)
+
+![image](../images/nncc_idef0_a1.png)
+
+Figure 3. Decomposition of top-Level function **Compilation**.
+
+### Initialization
+
+At this stage the initialization of all submodules of the *nncc*
+happens. This procedure starts from command line option processing till
+selection of all required and correctly configured modules. At the
+parsing stage the configuration system checks its own consistency. If
+command line option set is not enought to establish a valid
+configuration the environment variables will be used. Also, almost all
+configuration options can be read from config file if it is specified in
+command line.
+
+### Import NN model
+
+The major function of the *nncc* frontend is to import specified NN
+model. It means that frontend should recognize the format of given NN
+model, parse all internal structures (load computation graph using
+framework specific IR: NN topology, NN ops, weights), verify their
+correctness and convert to Model IR.
+
+### Apply Optimizations
+
+There are two levels of neural network optimizations in *nncc*.
+
+First one is High-Level Optimizations, they are applied to the Model IR,
+which is output by the NN Import subsystem.
+
+#### High-Level Optimizations
+
+High-Level optimizations can be divided into two groups:
+
+ - optimizations aimed at reducing the size of the resulting model -
+ *size optimizations*
+ - optimizations aimed at reducing the inference time of the model -
+ *performance optimizations*
+
+These two groups are not mutually exclusive. Some optimization
+techniques positively affect both size and performance, while some of
+them might reduce the size of the model at some performance cost.
+
+High-Level Optimizations in this sense are purely
+neural-network-specific, as they attempt to improve the model by
+manipulating the computation graph and the weights. For example, some
+techniques search for unused parts of the computation graph and remove
+them, or they search for the parts of the graph that can be merged
+together and thus gain some performance. Other techniques manipulate the
+neural network weights - either reduce their amount or modify their
+values in a way that allows for the reduced storage consumption.
+
+Currently, High-Level Optimizations are out of scope of the project.
+
+#### Low-Level Optimization
+
+The Low-Level Optimizations are applied by the compiler closer to the
+end of the whole compilation process, before the executable generation.
+The input for this stage of *nncc* is the Coarse-Grained IR, which is
+output but High-Level Optimization subsystem.
+
+### Generate the code
+
+Present architecture allows for several backend solutions, depending on
+target specified. Those solutions can be divided into 3 types:
+
+ - *Interpretation.* At every step inference can be carried out by
+ interpreting IR produced after that step.
+ - *Soft backend.* Resulting program can be generated as source code
+ in high-level programming language (e.g., C/C++) that does not
+ depend on libraries outside of itself, with the exception of
+ system libraries.
+ - *Hardware (Binary) backend.* This type refers to generating binary
+ code that can be executed on target device. NN compiler can
+ generate code that is either executed solely on CPU, or takes
+ advantage of the GPU when possible if corresponding target was
+ specified.
+
+Third-party libraries incorporation can be done either in form of source
+code or by compiling a binary artefact.
+
+## Appendix 1. Traceability Matrix
+
+The following table shows mapping between SW Requirements Specification
+and SW High-Level Design
+Document.
+
+| Requirement | Description | Section |
+| ----------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------- |
+| RF-1 (Frontend: Tensorflow Lite) | The compiler should support import of NN model in Tensorflow Lite format (parsing & verification of data scheme v0-v3, 50 NN ops) | [Import NN model](#import-nn-model) |
+| RF-2 (Frontend: Caffe) | The compiler should support import of NN model in Caffe format (parsing & verification) | [Import NN model](#import-nn-model) |
+| RF-3 (Frontend: Caffe2 (Optional)) | The compiler should support import of NN model in Caffe2 format (parsing & verification) | [Import NN model](#import-nn-model) |
+| RF-4 (Frontend: lossless import) | The frontend should use the lossless approach while it is converting any NN model to IR | [Import NN model](#import-nn-model) |
+| RF-5 (Frontend: Inception\_v3) | The frontend should successful import the Inception V3 NN model | [Import NN model](#import-nn-model) |
+| RF-6 (Frontend: MobileNet) | The frontend should successful import the MobileNet NN model | [Import NN model](#import-nn-model) |
+| RF-7 (Backend: ARM CPU) | The compiler should produce executable for ARM CPU | [Generate the code](#generate-the-code) |
+| RF-8 (Backend: ARM GPU) | The compiler should produce the binary that takes advantages of GPU when it was specified before compilation | [Generate the code](#generate-the-code) |
+| RF-9 (Backend: Artefact type) | The compiler should produce executable as a shared library or as a static library | [Generate the code](#generate-the-code) |
+| RF-10 (Backend: Inception\_v3) | The compiler should produce the valid compiled artefact for Inception v3 NN model | [Generate the code](#generate-the-code) |
+| RF-11 (Backend: MobileNet) | The compiler should produce the valid compiled artefact for MobileNet NN model | [Generate the code](#generate-the-code) |
+| RF-12 (Config: command line) | The compiler should get configuration parameters from command line | [Initialization](#initialization) |
+| RF-13 (Config: config file (Optional)) | The compiler should get configuration parameters from config file | [Initialization](#initialization) |
+| RF-14 (Config: environment variable (Optional)) | The compiler should get configuration parameters from environment variables | [Initialization](#initialization) |
+| RF-15 (Artefact: result) | The artefact should provide comparable result to the original NN model for the same input data | [Generate the code](#generate-the-code) |
+| RF-16 (Artefact: input verifications) | The artefact should verify any input data and check consistency | [Generate the code](#generate-the-code) |
+| RF-17 (Artefact: GPU) | The artefact should take advantage of the GPU for GPU-enabled operations | [Generate the code](#generate-the-code) |
+| RF-18 (Artefact: CPU) | The artefact should take advantage of CPU if it was specified | [Generate the code](#generate-the-code) |
+
+**Design Module of S/W Architecture**
+
+| Requirement | Import NN model | Generate the code | Initialization |
+| ----------------------------------------------- | --------------- | ----------------- | -------------- |
+| RF-1 (Frontend: Tensorflow Lite) | O | | |
+| RF-2 (Frontend: Caffe) | O | | |
+| RF-3 (Frontend: Caffe2 (Optional)) | O | | |
+| RF-4 (Frontend: lossless import) | O | | |
+| RF-5 (Frontend: Inception\_v3) | O | | |
+| RF-6 (Frontend: MobileNet) | O | | |
+| RF-7 (Backend: ARM CPU) | | O | |
+| RF-8 (Backend: ARM GPU) | | O | |
+| RF-9 (Backend: Artefact type) | | O | |
+| RF-10 (Backend: Inception\_v3) | | O | |
+| RF-11 (Backend: MobileNet) | | O | |
+| RF-12 (Config: command line) | | | O |
+| RF-13 (Config: config file (Optional)) | | | O |
+| RF-14 (Config: environment variable (Optional)) | | | O |
+| RF-15 (Artefact: result) | | O | |
+| RF-16 (Artefact: input verifications) | | O | |
+| RF-17 (Artefact: GPU) | | O | |
+| RF-18 (Artefact: CPU) | | O | |
diff --git a/docs/nncc/project/requirements_specification.md b/docs/nncc/project/requirements_specification.md
new file mode 100644
index 000000000..7a6fce762
--- /dev/null
+++ b/docs/nncc/project/requirements_specification.md
@@ -0,0 +1,272 @@
+# SW Requirements Specification
+
+
+**Revision history**
+
+| Ver. | Date | Contents | Author | Approver |
+| ---- | ---------- | ------------------------------------------ | ------------------ | ------------ |
+| 0.1 | 2018.04.11 | Initial version | Vostokov Sergey | Sung-Jae Lee |
+| 0.2 | 2018.04.11 | SE member in-charge review | Aleksei Kondrashov | |
+| 1.0 | 2018.04.13 | Final Execution DR version | Vostokov Sergey | Sung-Jae Lee |
+| 1.1 | 2018.05.24 | Add new requirement in Source code section | Vostokov Sergey | Sung-Jae Lee |
+
+## Introduction
+
+### Purpose and scope
+
+The main goal of the project is to develop a compiler for neural
+networks to produce executable artefact for specified SW and HW
+platform.
+
+The development scope includes the following components:
+
+ - Develop importer module to parse, verify and represent NN model for
+ further optimization and compilation
+ - Develop code emitters to produce executable binary for CPU and GPU
+
+2018 year goals:
+
+ - Support TensorFlow Lite NN model format
+ - Support Caffe NN model format
+ - Support Caffe2 NN model format (Optional)
+ - Support compilation of MobileNet NN
+ - Support compilation of Inception v3 NN
+ - Support ARM CPU
+ - Support ARM GPU (Mali)
+ - Support Tizen OS
+ - Support SmartMachine OS (Optional)
+
+### Terminology and Abbreviation
+
+| | |
+| ------------ | ------------------------------------------------------------- |
+| OS | Operating System |
+| OS API | Application interface of OS |
+| HW | Hardware |
+| SW | Software |
+| NN | Neural Network |
+| NN model | Neural network model (Instance of NN built with ML framework) |
+| NN compiler | The compiler for neural network |
+| ML framework | The machine learning framework |
+| TF/TF Lite | Tensorflow/Tensorflow Lite ML framework |
+| IR | Intermediate representation |
+| CI/CI system | Continuous integration system |
+| UI | The user interface |
+| GUI | The graphical user interface |
+| CLI | The command-line interface |
+
+### SW System Architecture
+
+The main components of the compiler are the following:
+
+ - Configuration system
+ - Importer (convert supported NN model to Model IR before
+ optimization)
+ - High-Level optimization (Applies HW independent optimizations)
+ - Low-Level optimization (Applies optimizations appropriate to the
+ specified target HW)
+ - Code emitter (Produces the binary to take advantages of CPU and/or
+ GPU)
+
+![image](../images/nncc_idef0_a1.png)
+
+### Relevant Industry Standards
+
+Architecture design is described using IDEF notation. Since the nncc is a part of open source STAR Platform project
+any other industry standards not required and/or applicable.
+
+## SW Functional Requirements
+
+### Frontend
+
+| ID | Requirement Name | Description |
+| ---- | --------------------------- | --------------------------------------------------------------------------------------------------------------------------------- |
+| RF-1 | Frontend: Tensorflow Lite | The compiler should support import of NN model in Tensorflow Lite format (parsing & verification of data scheme v0-v3, 50 NN ops) |
+| RF-2 | Frontend: Caffe | The compiler should support import of NN model in Caffe format (parsing & verification) |
+| RF-3 | Frontend: Caffe2 (Optional) | The compiler should support import of NN model in Caffe2 format (parsing & verification) |
+| RF-4 | Frontend: lossless import | The front-end should use the lossless approach while it is converting any NN model to IR |
+| RF-5 | Frontend: Inception\_v3 | The front-end should successful import the Inception V3 NN model |
+| RF-6 | Frontend: MobileNet | The front-end should successful import the MobileNet NN model |
+
+### High-Level optimization
+
+No special requirements
+
+### Low-Level optimization
+
+No special requirements
+
+### Backend
+
+| ID | Requirement Name | Description |
+| ----- | ---------------------- | ------------------------------------------------------------------------------------------------------------ |
+| RF-7 | Backend: ARM CPU | The compiler should produce executable for ARM CPU |
+| RF-8 | Backend: ARM GPU | The compiler should produce the binary that takes advantages of GPU when it was specified before compilation |
+| RF-9 | Backend: Artefact type | The compiler should produce executable as a shared library or as a static library |
+| RF-10 | Backend: Inception\_v3 | The compiler should produce the valid compiled artefact for Inception v3 NN model |
+| RF-11 | Backend: MobileNet | The compiler should produce the valid compiled artefact for MobileNet NN model |
+
+### Configuration
+
+| ID | Requirement Name | Description |
+| ----- | --------------------------------------- | --------------------------------------------------------------------------- |
+| RF-12 | Config: command line | The compiler should get configuration parameters from command line |
+| RF-13 | Config: config file (Optional) | The compiler should get configuration parameters from config file |
+| RF-14 | Config: environment variable (Optional) | The compiler should get configuration parameters from environment variables |
+
+### Compiled Artefact
+
+| ID | Requirement Name | Description |
+| ----- | ----------------------------- | ---------------------------------------------------------------------------------------------- |
+| RF-15 | Artefact: result | The artefact should provide comparable result to the original NN model for the same input data |
+| RF-16 | Artefact: input verifications | The artefact should verify any input data and check consistency |
+| RF-17 | Artefact: GPU | The artefact should take advantage of the GPU for GPU-enabled operations |
+| RF-18 | Artefact: CPU | The artefact should take advantage of CPU if it was specified |
+
+## SW Non-Functional Requirements
+
+### The compiler
+
+#### Performance
+
+No special requirements
+
+#### SW capacity
+
+No special requirements
+
+#### Reliability
+
+| ID | Requirement Name | Description |
+| ----- | ------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| RNF-1 | Reliability: input | The compiler should produce correct executable in order to utilize CPU and GPU when the correct input data is provided. If the incorrect input data are provided the compiler should not produce a compiled artefact, but inform user about all errors which were met |
+
+#### Security
+
+No special requirements
+
+#### Usability
+
+No special requirements
+
+#### Availability
+
+No special requirements
+
+#### Maintainability
+
+No special
+requirements
+
+#### Extendibility
+
+| ID | Requirement Name | Description |
+| ----- | ----------------------- | ------------------------------------------------------------------------------------------------------------------------- |
+| RNF-2 | Extendibility: frontend | The compiler design and implementations should provide possibility to add new features to front-end: new NN models format |
+| RNF-3 | Extendibility: backend | The compiler design and implementations should provide possibility to add new features to backend (new targets) |
+
+#### Testability
+
+| ID | Requirement Name | Description |
+| ----- | ------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| RNF-4 | Testability: environment | The test environment should be built in order to verify compiler functionality, product build status, artefact build/execution status, artefact calculation result and calculation memory footprint and performance |
+
+#### Portability
+
+| ID | Requirement Name | Description |
+| ----- | ------------------ | --------------------------------------------------- |
+| RNF-5 | Portability: Linux | The compiler should be portable with Linux-based OS |
+
+#### Scalability
+
+No special requirements
+
+#### Expandability
+
+No special
+requirements
+
+#### Configurability
+
+| ID | Requirement Name | Description |
+| ----- | --------------------------------------- | --------------------------------------------------------------------------------- |
+| RNF-6 | Configurability: command line | The compiler should support applying configuration through command line options. |
+| RNF-7 | Configurability: file (Optional) | The compiler should support applying configuration through configuration file. |
+| RNF-8 | Configurability: environment (Optional) | The compiler should support applying configuration through environment variables. |
+
+### The compiled artefact
+
+No special
+requirements
+
+### The source code
+
+| ID | Requirement Name | Description |
+| ------ | ---------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| RNF-9 | Legislation | All source code files should follows its original license and general project license without any conflicts |
+| RNF-10 | Legitimacy | The project should have its own general license |
+| RNF-11 | Coding style | Each source code file should follow the one defined for the project coding style |
+| RNF-12 | Contrib | RNF-9, RNF-10, RNF-11 are applicable only for the final release version of source code. These requirements are not applicable to the source code placed in development branch or any folder which is used as temporary storage for the source code under development. |
+
+## SW Interface Requirements
+
+### The compiler interface
+
+#### User Interface
+
+| ID | Requirement Name | Description |
+| ----- | ---------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
+| RIF-1 | Compiler UI: no interaction | The compiler should not require any user interation during compilation (completed compilations, fatal exit) |
+| RIF-2 | Compiler UI: CLI | The compiler is considering as a command line tool which proceed parameters from command line and/or config file, environment variables |
+| RIF-3 | Compiler UI: input | The compiler should provide the facility to specify NN model to be compiled |
+| RIF-4 | Compiler UI: target device | The compiler should provide the facility to specify result target device (CPU or GPU) |
+| RIF-5 | Compiler UI: target platform | The compiler should provide the facility to specify result target SW platform |
+| RIF-6 | Compiler UI: output | The compiler should provide the facility to specify result target name |
+| RIF-7 | Compiler UI: target type | The compiler should provide the facility to specify result target type: shared or static library |
+
+#### Hardware Interface
+
+| ID | Requirement Name | Description |
+| ----- | -------------------------------- | --------------------------------------------------------------------------- |
+| RIF-8 | Compiler HWI: x86\_64 executable | The solution should provide executables to run on x86\_64-compatible system |
+
+#### Software Interface
+
+| ID | Requirement Name | Description |
+| ------ | ------------------------------------------ | ------------------------------------------------------------------------------------------------ |
+| RIF-9 | Compiler SWI: frontend plugin | The compiler should provide the SW interface in order to add support of the new NN model formats |
+| RIF-10 | Compiler SWI: backend plugin (HW) | The compiler should provide the SW interface in order to add support of the new HW |
+| RIF-11 | Compiler SWI: backend plugin (SW Platform) | The compiler should provide the SW interface in order to add support of the new SW Platform |
+
+#### Communication Interface
+
+No requirements for communication interface.
+
+### The compiled artefact interface
+
+#### User Interface
+
+| ID | Requirement Name | Description |
+| ------ | ------------------- | ----------------------------------- |
+| RIF-12 | Artefact UI: no GUI | Command line UI in text is suitable |
+
+#### Hardware Interface
+
+| ID | Requirement Name | Description |
+| ------ | ----------------- | ----------------------------------------------------------------------------- |
+| RIF-13 | Artefact HWI: CPU | The artefact should use ARM CPU instruction set when it was built for ARM CPU |
+| RIF-14 | Artefact HWI: GPU | The artefact should use ARM GPU instruction set when it was build for ARM GPU |
+
+#### Software Interface
+
+| ID | Requirement Name | Description |
+| ------ | -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
+| RIF-15 | Artefact SWI: GPU driver | The artefact should use ARM GPU driver to invoke calculations when it was built for ARM GPU |
+| RIF-16 | Artefact SWI: C/C++ header | The artefact should provide C/C++ interface in order to use it in other applications |
+| RIF-17 | Artefact SWI: shared type | The compiled artefact should be a shared library in order to share it between several executables when it was specified before compilation |
+| RIF-18 | Artefact SWI: static type | The compiled artefact should be a static library in order to be built-in to an executable when it was specified before compilation |
+| RIF-19 | Artefact SWI: Info | The artefact should provide SW interface in order to get the actual status of calculation process (progress, errors, final result) |
+
+#### Communication Interface
+
+No requirements for communication interface.
diff --git a/docs/nncc/project/test_plan.md b/docs/nncc/project/test_plan.md
new file mode 100644
index 000000000..a1f0f0a97
--- /dev/null
+++ b/docs/nncc/project/test_plan.md
@@ -0,0 +1,442 @@
+# SW System Test Document
+
+**Revision history**
+
+| Ver. | Date | Contents | Author | Approver |
+| ---- | ---------- | -------------------------- | ------------------ | ------------ |
+| 0.1 | 2018.04.12 | Initial version | Vostokov Sergey | Sung-Jae Lee |
+| 0.2 | 2018.04.13 | SE member in-charge review | Aleksei Kondrashov | |
+| 1.0 | 2018.04.17 | Final Execution DR version | Vostokov Sergey | Sung-Jae Lee |
+| 1.1 | 2018.06.20 | DR1 version | Vostokov Sergey | Sung-Jae Lee |
+
+**Terminology and Abbreviation**
+
+| | |
+| ------------ | ------------------------------------------------------------- |
+| OS | Operating System |
+| OS API | Application interface of OS |
+| HW | Hardware |
+| SW | Software |
+| NN | Neural Network |
+| NN model | Neural network model (Instance of NN built with ML framework) |
+| NN compiler | The compiler for neural network |
+| ML framework | The machine learning framework |
+| TF/TF Lite | Tensorflow/Tensorflow Lite ML framework |
+| IR | Intermediate representation |
+| CI/CI system | Continuous integration system |
+| UI | The user interface |
+| GUI | The graphical user interface |
+| CLI | The command-line interface |
+
+**References**
+
+\[1\] Vostokov Sergey, [SW Requirements Specification](requirements_specification.md)
+
+## SW System Test Overview
+
+### Purpose
+
+Software testing is an investigation to provide the quality of the
+product under test and to reduce risk of its failure to users or
+customers. Purpose of testing is to detect software failures so that
+defects may be discovered and corrected.
+
+Software system test procedure is a collection of processes and methods
+used to ensure quality. An additional goal is to make sure that the
+product follows regulations and meets the quality standards expected by
+the customer.
+
+### Scope
+
+As the number of possible tests for every software is practically
+infinite, we use some strategy to select tests that are feasible for the
+available time and resources.
+
+Software system tests attempt to cover requirements listed in the [SW
+Requirement
+Specification](https://github.sec.samsung.net/STAR/nncc/doc/project/requirements_specification.md).
+
+Since the projest outcome is a compiler then its testing are in
+different domain than many other kinds of application or system testing.
+They are dedicated to find all possible issues that cause the following
+bugs:
+
+ - Compiler crashes (also known as an ICE or Internal Compiler Error)
+
+ - Compiler hangs (kind of infinite loop in the compiler)
+
+ - Bad code generation (a result of incorrect compiler output):
+
+ - Bad code generation that leads to a crash in the application
+ - “Silent” bad code generation
+
+ - Compiler throughput issues (Issues that affect the amount of time
+ the compiler takes to compile code )
+
+ - Code quality issues (Issues that affect the performance of the
+ compiled application)
+
+ - Compiler feature correctness issues (This class of bugs involves the
+ compiler generating correct code, but not doing what a particular
+ feature specifies should be
+done)
+
+## SW System Test Items
+
+### Functions to be tested
+
+| Feature | Test Item ID | Test Item description |
+| ---------------------------------------- | ------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| RF-1, RIF-3 - RIF-7 | TST-1 | Test suite checks NN ops import from Tensorflow Lite format by loading NN model that consists of a single NN op. One test for each NN op. |
+| RF-2, RIF-3 - RIF-7 | TST-2 | Test suite checks NN ops import from Caffe format by loading NN model that consists of a single NN op. One test for each NN op. |
+| RF-3, RIF-3 - RIF-7 | TST-3 | Test suite checks NN ops import from Caffe2 format by loading NN model that consists of a single NN op. One test for each NN op. |
+| RF-5, RIF-3 - RIF-7 | TST-4 | The test should verify successful loading the Inception V3 NN model |
+| RF-6, RIF-3 - RIF-7 | TST-5 | The test should verify successful loading the MobileNet NN model |
+| RF-4 | TST-6 | The test suite should automatically verify the completeness of information that was read from the raw data by comparing it with serialized raw data from Model IR |
+| RF-7, RF-18, RIF-13 | TST-7 | The unit test should automatically verify successful execution of binary on target ARM CPU |
+| RF-8, RF-17, RIF-14, RIF-15 | TST-8 | The unit test should automatically verify successful execution of calculation on GPU |
+| RF-9, RNF-1, RIF-17, RIF-18 | TST-9 | Unit test should verify the existence and format of binary (shared or static) in accordance to specified options |
+| RF-10 | TST-10 | Unit test should verify that compiler produces a compiled artefact for the Inception V3 NN model (Validity of compiled artefact is checked by other tests) |
+| RF-11 | TST-11 | Unit test should verify that compiler produces a compiled artefact for the MobileNet NN model (Validity of compiled artefact is checked by other tests) |
+| RF-12, RF-13, RF-14, RNF-6, RNF-7, RNF-8 | TST-12 | The test suite should verify correctness of configuration object by unit testing |
+| RF-15, RNF-1 | TST-13 | The test suite is to verify the correctness of calculations by comparing the result of original NN model and the result of compiled artefact on the same input data |
+| RF-16 | TST-14 | Unit test should verify that the incorrect input data is processed with an error message without unexpected termination of the application |
+| RNF-4, RNF-5, RIF-8 | TST-15 | A Linux-based OS should be used while the test environment are built. |
+| RIF-16 | TST-16 | The unit test should verify the existence and validity of generated C/C++ header for compiled artefact |
+
+Table 2-1. Test Item
+
+**The following requirements can be tested only manually:**
+
+ - Non-functional requirements: RNF-2, RNF-3 (They would be tested
+ during development)
+ - Interface requirements: RIF-1, RIF-2, RIF-9 - RIF-12, RIF-19
+
+### Functions not to be tested
+
+The following requirements cannot be tested:
+
+ - The source code requirements (RNF-9. RNF-10. RNF-11)
+
+## SW System Test Procedure
+
+### Test approaches
+
+While implementation of the project deliverables several kinds of
+testing are used. All of them are performed automatically by continuous
+integration system since it is developed. CI system subscribes on source
+code modification in the version control system. The configuration does
+not allow any changes to be merged into the main line if these changes
+do not pass merge mandatory tests.
+
+ - **Code style check** (Merge mandatory test): to verify consistency
+ of coding style
+ - **Build test** (Merge mandatory test): to verify the current build
+ - **Unit tests**: to verify SW system consistency. All new implemented
+ features, code refactoring, optimizations must not cause unit test
+ failure. Each unit test reflect the exact logic of testing
+ component, thus, it should be adopted any time when program logic
+ changes.
+ - **System tests**: to verify the feature quality as well as
+ compliance with its specified requirements.
+ - **Manual-based UI testing approach**: for interface requirements,
+ which cannot be automated
+
+### Test Pass/Fail Criteria
+
+All tests (unit/system) must be executed without any issues at any time
+for newly implemented, refactored, or changed code.
+
+### Test Start/Suspension/Resumption criteria
+
+Two mandatory tests (code style check and build test) are performed for
+every pool request (PR) before it is merged. The configuration of
+continuous integration system (CI) does not allow to merge the changes
+into devel branch if they does not pass the tests.
+
+Unit and feature testing are performed for the devel branch
+automatically. The merge to master branch (release) are possible when
+all these tests passed.
+
+### Regression Test strategy
+
+If a new issue is detected and it is not covered by an existing test
+then a new test will be developed. In other case the issue should be
+resolved.
+
+### Test tools
+
+| | |
+| ------------------------------- | ------------------------------------------------------------------------------------ |
+| Source code static verification | AEGIS (CODE pre-commit test suite: static/structure/open source violation analyzers) |
+| Test execution | CMake |
+| Defect management | Samsung Research GitHub |
+| Continuous Integration system | HQ CI (CODE) |
+
+Table 3-1. Test Tools
+
+## SW System Test Schedule Plan
+
+### Test task & schedule
+
+| | | | |
+| -------------- | ----------------------- | -------------- | -------------------------------------- |
+| Task | Schedule | Responsibility | Detailed Task |
+| Unit testing | 01.04.2018 - 31.12.2018 | All | All unit tests should be carried out |
+| System testing | 01.04.2018 - 31.12.2018 | All | All system tests should be carried out |
+
+Table 4-1. Test Tasks and Schedule
+
+### Test Resource organization plan
+
+#### Test environment
+
+| Type/Model | Operating System | Usage |
+| ---------- | --------------------------------- | ------------------------------------------------------------------------ |
+| PC/x86 | Ubuntu GNU/Linux version \>=14.04 | Build system with unit tests. System and system tests are performed too. |
+| Tizen TM2 | Tizen | Unit and system testing |
+| Odroid XU4 | Tizen | Unit and system testing |
+
+Table 4-2. Hardware / Operating System
+
+| Type | Spec | Usage |
+| ------------------- | ----------------------------------------------------- | ------------------------------------------------------------------------------- |
+| Library | Google test | Organize test code and provide utility methods |
+| VCS | Samsung github | The source code version controlling system |
+| CI | CODE | The HQ CI system |
+| Build system | CMake | Run test and check status |
+| Device connectivity | sdb | Send tools to the device and provide shell to run it |
+| Management tool | The CODE (Collaborative Open Development Environment) | Source code version control, code review, issue tracker, Continuous Integration |
+
+Table 4-3. Software
+
+### Risk management plan
+
+| Risk | Description | Probability | Countermeasures |
+| ------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- | ----------- | --------------------------------------------------------------------------------------- |
+| SmartMachine OS SDK toolchain is not available | In order to support compilation for SmartMachine OS the SDK is required. The compiler would have dependency of a SmartMachine OS SDK toolchain. | High | Suspend support of SmartMachine OS, and make plans when SmartMachine OS SDK is released |
+| SmartMachine OS targets are not available | To perform testing of executables for SmartMachine OS the specified targets are required. | High | Request targets or SW emulator when SmartMachine OS is released |
+| HQ CI does not support target testing | Some tests required the target devices to be run on it. The provided CI system may not support such type of testing. | High | Set CI environment on site |
+| Targets for testing/development are not available | Full automatic testing may take a long time. It also required target devices to execute the binaries. | Medium | Request/Buy enough amount of devices |
+
+Table 4-5. Risk Management
+
+### SW configuration management plan
+
+#### SW Configuration items identification
+
+| No | Document number | SW configuration Item | File name |
+| -- | ------------------------- | ------------------------------ | ------------------------------------------- |
+| 1 | SRR-RAJ0118ZZ-BWRF-STD001 | System Test Document | 18 NN compiler and Optimizer (STD) v1.0.pdf |
+| 2 | SRR-RAJ0118ZZ-BWRF-STS001 | System Test Case Specification | 18 NN compiler and Optimizer (STS) v1.0.pdf |
+| 3 | SRR-RAJ0118ZZ-BWRF-UTR001 | Unit Test Report | 18 NN compiler and Optimizer (UTR) v1.0.pdf |
+
+Table 4-6. SW Configuration Items List
+
+#### Directory Structure
+
+| Directory | Description |
+| ------------------------ | -------------------------------------------------------------------- |
+| / | source codes of the build system, main README file |
+| /contrib | Incubating projects |
+| /doc | Contains the documentation of the project |
+| /doc/project | Contains project management documents (SRS, SDD, STD, HLD, DLD, etc) |
+| /libs | Contains the source of the libraries which are used by the nncc |
+| /libs/core | Contains the source code of the core library of nncc |
+| /libs/frontend | Contains the source code of supported frontend's plugins |
+| /libs/frontend/caffe | The source code for the Caffe frontend |
+| /libs/frontend/caffe2 | The source code for the Caffe2 frontend |
+| /libs/frontend/tflite | The source code for the Tensorflow Lite frontend |
+| /libs/backend | Contains the source code of supported backend plugins |
+| /libs/backend/cpu | Contains the source code of CPU backend |
+| /libs/backend/gpu | Contains the source code of GPU backend |
+| /libs/backend/3rd\_party | Contains the source code of backend to utilize 3rd party libraries |
+| /scripts | Various scripts for building and testing the nncc |
+| /tools | The source code of the executables |
+
+Table 4-7. Directory Structure
+
+#### Baseline
+
+| Test Round | Baseline Name | Configuration Item | Schedule |
+| ---------- | ------------- | ---------------------------------------------------- | ---------- |
+| Round 1 | The nncc v0.5 | SRR-RAJ0118ZZ-BWRF-STD001, SRR-RAJ0118ZZ-BWRF-UTR001 | 01.09.2018 |
+| Round 2 | The nncc v1.0 | SRR-RAJ0118ZZ-BWRF-STD002, SRR-RAJ0118ZZ-BWRF-UTR002 | 01.12.2018 |
+
+Table 4-8. Baselines
+
+## SW System Test Case
+
+| TestItem ID | Testcase ID | Test Procedures | Expected Results |
+| ----------- | ----------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| TST-1 | TST-1-1 | Import a NN consisting of a single Tensorflow Lite ADD operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-2 | Import a NN consisting of a single Tensorflow Lite AVERAGE\_POOL\_2D operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-3 | Import a NN consisting of a single Tensorflow Lite CONCATENATION operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-4 | Import a NN consisting of a single Tensorflow Lite CONV\_2D operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-5 | Import a NN consisting of a single Tensorflow Lite DEPTHWISE\_CONV\_2D operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-6 | Import a NN consisting of a single Tensorflow Lite DEQUANTIZE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-7 | Import a NN consisting of a single Tensorflow Lite EMBEDDING\_LOOKUP operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-8 | Import a NN consisting of a single Tensorflow Lite FULLY\_CONNECTED operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-9 | Import a NN consisting of a single Tensorflow Lite HASHTABLE\_LOOKUP operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-10 | Import a NN consisting of a single Tensorflow Lite L2\_NORMALIZATION operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-11 | Import a NN consisting of a single Tensorflow Lite L2\_POOL\_2D operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-12 | Import a NN consisting of a single Tensorflow Lite LOCAL\_RESPONSE\_NORMALIZATION operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-13 | Import a NN consisting of a single Tensorflow Lite LOGISTIC operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-14 | Import a NN consisting of a single Tensorflow Lite LSH\_PROJECTION operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-15 | Import a NN consisting of a single Tensorflow Lite LSTM operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-16 | Import a NN consisting of a single Tensorflow Lite MAX\_POOL\_2D operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-17 | Import a NN consisting of a single Tensorflow Lite MUL operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-18 | Import a NN consisting of a single Tensorflow Lite RELU operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-19 | Import a NN consisting of a single Tensorflow Lite RELU\_N1\_TO\_1 operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-20 | Import a NN consisting of a single Tensorflow Lite RELU6 operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-21 | Import a NN consisting of a single Tensorflow Lite RESHAPE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-22 | Import a NN consisting of a single Tensorflow Lite RESIZE\_BILINEAR operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-23 | Import a NN consisting of a single Tensorflow Lite RNN operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-24 | Import a NN consisting of a single Tensorflow Lite SOFTMAX operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-25 | Import a NN consisting of a single Tensorflow Lite SPACE\_TO\_DEPTH operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-26 | Import a NN consisting of a single Tensorflow Lite SVDF operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-27 | Import a NN consisting of a single Tensorflow Lite TANH operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-28 | Import a NN consisting of a single Tensorflow Lite CONCAT\_EMBEDDINGS operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-29 | Import a NN consisting of a single Tensorflow Lite SKIP\_GRAM operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-30 | Import a NN consisting of a single Tensorflow Lite CALL operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-31 | Import a NN consisting of a single Tensorflow Lite CUSTOM operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-32 | Import a NN consisting of a single Tensorflow Lite EMBEDDING\_LOOKUP\_SPARSE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-33 | Import a NN consisting of a single Tensorflow Lite PAD operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-34 | Import a NN consisting of a single Tensorflow Lite UNIDIRECTIONAL\_SEQUENCE\_RNN operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-35 | Import a NN consisting of a single Tensorflow Lite GATHER operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-36 | Import a NN consisting of a single Tensorflow Lite BATCH\_TO\_SPACE\_ND operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-37 | Import a NN consisting of a single Tensorflow Lite SPACE\_TO\_BATCH\_ND operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-38 | Import a NN consisting of a single Tensorflow Lite TRANSPOSE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-39 | Import a NN consisting of a single Tensorflow Lite MEAN operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-40 | Import a NN consisting of a single Tensorflow Lite SUB operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-41 | Import a NN consisting of a single Tensorflow Lite DIV operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-42 | Import a NN consisting of a single Tensorflow Lite SQUEEZE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-43 | Import a NN consisting of a single Tensorflow Lite UNIDIRECTIONAL\_SEQUENCE\_LSTM operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-44 | Import a NN consisting of a single Tensorflow Lite STRIDED\_SLICE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-45 | Import a NN consisting of a single Tensorflow Lite BIDIRECTIONAL\_SEQUENCE\_RNN operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-46 | Import a NN consisting of a single Tensorflow Lite EXP operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-47 | Import a NN consisting of a single Tensorflow Lite TOPK\_V2 operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-48 | Import a NN consisting of a single Tensorflow Lite SPLIT operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-49 | Import a NN consisting of a single Tensorflow Lite LOG\_SOFTMAX operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-50 | Import a NN consisting of a single Tensorflow Lite DELEGATE operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-51 | Import a NN consisting of a single Tensorflow Lite BIDIRECTIONAL\_SEQUENCE\_LSTM operation | During import no crashes or error messages occurred |
+| TST-1 | TST-1-52 | Import a NN consisting of a single Tensorflow Lite CAST operation | During import no crashes or error messages occurred |
+| TST-2 | TST-2-1 | Import a NN consisting of Caffe ImageData layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-2 | Import a NN consisting of Caffe Data layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-3 | Import a NN consisting of Caffe HDF5Input layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-4 | Import a NN consisting of two Caffe layers - Input layer and HDF5Output layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-5 | Import a NN consisting of Caffe Input layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-6 | Import a NN consisting of Caffe WindowData layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-7 | Import a NN consisting of Caffe MemoryData layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-8 | Import a NN consisting of Caffe DummyData layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-9 | Import a NN consisting of two Caffe layers - Input layer and Convolution layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-10 | Import a NN consisting of two Caffe layers - Input layer and Pooling layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-11 | Import a NN consisting of two Caffe layers - Input layer and SPP layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-12 | Import a NN consisting of two Caffe layers - Input layer and Crop layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-13 | Import a NN consisting of two Caffe layers - Input layer and Deconvolution layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-14 | Import a NN consisting of two Caffe layers - Input layer and Im2Col layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-15 | Import a NN consisting of two Caffe layers - Input layer and Recurrent layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-16 | Import a NN consisting of two Caffe layers - Input layer and RNN layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-17 | Import a NN consisting of two Caffe layers - Input layer and LSTM layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-18 | Import a NN consisting of two Caffe layers - Input layer and InnerProduct layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-19 | Import a NN consisting of two Caffe layers - Input layer and Dropout layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-20 | Import a NN consisting of two Caffe layers - Input layer and Embed layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-21 | Import a NN consisting of two Caffe layers - Input layer and LRN layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-22 | Import a NN consisting of two Caffe layers - Input layer and MVN layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-23 | Import a NN consisting of two Caffe layers - Input layer and BatchNorm layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-24 | Import a NN consisting of two Caffe layers - Input layer and ReLU layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-25 | Import a NN consisting of two Caffe layers - Input layer and PReLU layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-26 | Import a NN consisting of two Caffe layers - Input layer and ELU layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-27 | Import a NN consisting of two Caffe layers - Input layer and Sigmoid layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-28 | Import a NN consisting of two Caffe layers - Input layer and TanH layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-29 | Import a NN consisting of two Caffe layers - Input layer and AbsVal layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-30 | Import a NN consisting of two Caffe layers - Input layer and Power layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-31 | Import a NN consisting of two Caffe layers - Input layer and Exp layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-32 | Import a NN consisting of two Caffe layers - Input layer and Log layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-33 | Import a NN consisting of two Caffe layers - Input layer and BNLL layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-34 | Import a NN consisting of two Caffe layers - Input layer and Threshold layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-35 | Import a NN consisting of two Caffe layers - Input layer and Bias layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-36 | Import a NN consisting of two Caffe layers - Input layer and Scale layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-37 | Import a NN consisting of two Caffe layers - Input layer and Flatten layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-38 | Import a NN consisting of two Caffe layers - Input layer and Reshape layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-39 | Import a NN consisting of two Caffe layers - Input layer and BatchReindex layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-40 | Import a NN consisting of two Caffe layers - Input layer and Split layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-41 | Import a NN consisting of two Caffe layers - Input layer and Concat layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-42 | Import a NN consisting of two Caffe layers - Input layer and Slice layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-43 | Import a NN consisting of two Caffe layers - Input layer and Eltwise layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-44 | Import a NN consisting of two Caffe layers - Input layer and Filter layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-45 | Import a NN consisting of two Caffe layers - Input layer and Parameter layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-46 | Import a NN consisting of two Caffe layers - Input layer and Reduction layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-47 | Import a NN consisting of two Caffe layers - Input layer and Silence layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-48 | Import a NN consisting of two Caffe layers - Input layer and ArgMax layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-49 | Import a NN consisting of two Caffe layers - Input layer and Softmax layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-50 | Import a NN consisting of two Caffe layers - Input layer and Python layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-51 | Import a NN consisting of two Caffe layers - Input layer and MultinomialLogisticLoss layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-52 | Import a NN consisting of two Caffe layers - Input layer and Infogain layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-53 | Import a NN consisting of two Caffe layers - Input layer and SoftmaxWithLoss layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-54 | Import a NN consisting of two Caffe layers - Input layer and EuclideanLoss layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-55 | Import a NN consisting of two Caffe layers - Input layer and HingeLoss layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-56 | Import a NN consisting of two Caffe layers - Input layer and SigmoidCrossEntropyLoss layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-57 | Import a NN consisting of two Caffe layers - Input layer and Accuracy layer | During import no crashes or error messages occurred |
+| TST-2 | TST-2-58 | Import a NN consisting of two Caffe layers - Input layer and ContrastiveLoss layer | During import no crashes or error messages occurred |
+| TST-3 | TST-3-1 | Import a NN consisting of a single Caffe2 Add operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-2 | Import a NN consisting of a single Caffe2 AveragePool2D operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-3 | Import a NN consisting of a single Caffe2 Concat operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-4 | Import a NN consisting of a single Caffe2 Conv2D operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-5 | Import a NN consisting of a single Caffe2 FC operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-6 | Import a NN consisting of a single Caffe2 LRN operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-7 | Import a NN consisting of a single Caffe2 Sigmoid operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-8 | Import a NN consisting of a single Caffe2 MaxPool2D operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-9 | Import a NN consisting of a single Caffe2 Mul operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-10 | Import a NN consisting of a single Caffe2 Relu operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-11 | Import a NN consisting of a single Caffe2 Reshape operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-12 | Import a NN consisting of a single Caffe2 Softmax operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-13 | Import a NN consisting of a single Caffe2 Tanh operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-14 | Import a NN consisting of a single Caffe2 PadImage operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-15 | Import a NN consisting of a single Caffe2 BatchToSpace operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-16 | Import a NN consisting of a single Caffe2 SpaceToBatch operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-17 | Import a NN consisting of a single Caffe2 Transpose operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-18 | Import a NN consisting of a single Caffe2 Mean operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-19 | Import a NN consisting of a single Caffe2 Sub operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-20 | Import a NN consisting of a single Caffe2 Div operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-21 | Import a NN consisting of a single Caffe2 Squeeze operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-22 | Import a NN consisting of a single Caffe2 Exp operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-23 | Import a NN consisting of a single Caffe2 TopK operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-24 | Import a NN consisting of a single Caffe2 Split operation | During import no crashes or error messages occurred |
+| TST-3 | TST-3-25 | Import a NN consisting of a single Caffe2 Cast operation | During import no crashes or error messages occurred |
+| TST-4 | TST-4-1 | Import Inception V3 NN model | During import no crashes or error messages occurred |
+| TST-5 | TST-5-1 | Import MobileNet NN model | During import no crashes or error messages occurred |
+| TST-6 | TST-6-1 | Import Inception V3 NN model, serialize all model weights, compare serialized data with the initial NN model | Test executed successfully, serialized weights are equal to initial model weights |
+| TST-6 | TST-6-2 | Import MobileNet NN model, serialize all model weigths, compare serialized data with the initial NN model | Test executed successfully, serialized weights are equal to initial model weights |
+| TST-7 | TST-7-1 | Generate binary for the Inception V3 NN model and run its inference on a device with ARM CPU | Test executed successfully, no crashes occurred, inference result was output, amount and format of the outputs corresponds to the expected NN model outputs |
+| TST-7 | TST-7-2 | Generate binary for the MobileNet NN model and run its inference on a device with ARM CPU | Test executed successfully, no crashes occurred, inference result was output, amount and format of the outputs corresponds to the expected NN model outputs |
+| TST-8 | TST-8-1 | Generate binary for the Inception V3 NN model and run its inference on a GPU-enabled device | Test executed successfully, no crashes occurred, inference result was output, amount and format of the outputs corresponds to the expected NN model outputs |
+| TST-8 | TST-8-2 | Generate binary for the MobileNet V3 NN model and run its inference on a GPU-enabled device | Test executed successfully, no crashes occurred, inference result was output, amount and format of the outputs corresponds to the expected NN model outputs |
+| TST-9 | TST-9-1 | Provide correct NN model, compile it as a static library, then check that corresponding binary exists and it is a static library | Test executed successfully |
+| TST-9 | TST-9-2 | Provide correct NN model, compile it as a shared library, then check that corresponding binary exists and it is a shared library | Test executed successfully |
+| TST-9 | TST-9-3 | Provide incorrect model, compile it as a static library, then check that no compiled artifact is produced | Test executed successfully |
+| TST-9 | TST-9-4 | Provide incorrect model, compile it as a shared library, then check that no compiled artifact is produced | Test executed successfully |
+| TST-10 | TST-10-1 | Check that a static library is provided after compiling Inception V3 as a static library | Test executed successfully |
+| TST-10 | TST-10-2 | Check that a shared library is provided after compiling Inception V3 as a shared library | Test executed successfully |
+| TST-11 | TST-11-1 | Check that a static library is provided after compiling MobileNet as a static library | Test executed successfully |
+| TST-11 | TST-11-2 | Check that a shared library is provided after compiling MobileNet as a shared library | Test executed successfully |
+| TST-12 | TST-12-1 | Check that configuration object is constructed correctly when getting configuration parameters from command line | Test executed successfully |
+| TST-12 | TST-12-2 | Check that configuration object is constructed correctly when getting configuration parameters from config file | Test executed successfully |
+| TST-12 | TST-12-3 | Check that configuration object is constructed correctly when getting configuration parameters from environment variables | Test executed successfully |
+| TST-13 | TST-13-1 | Compile Inception V3 as static library for CPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-2 | Compile Inception V3 as shared library for CPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-3 | Compile Inception V3 as static library for GPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-4 | Compile Inception V3 as shared library for GPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-5 | Compile MobileNet as static library for CPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-6 | Compile MobileNet as shared library for CPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-7 | Compile MobileNet as static library for GPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-13 | TST-13-8 | Compile MobileNet as shared library for GPU, provide it and the original model with same correct input data, then compare the result from original model with the result from compiled artifact | Test executed successfully, results are comparable |
+| TST-14 | TST-14-1 | Provide compiled Inception V3 artifact with invalid input, check that no unexpected termination occurs | Test executed successfully |
+| TST-14 | TST-14-2 | Provide compiled Inception V3 artifact with invalid input, check that an error message is provided | Test executed successfully |
+| TST-14 | TST-14-3 | Provide compiled MobileNet artifact with invalid input, check that no unexpected termination occurs | Test executed successfully |
+| TST-14 | TST-14-4 | Provide compiled MobileNet artifact with invalid input, check that an error message is provided | Test executed successfully |
+| TST-15 | TST-15-1 | Check that the OS used during test environment build is Linux-based | Test executed successfully |
+| TST-16 | TST-16-1 | Compile a valid NN model, then check that C/C++ header corresponding to compiled artifact exists | Test executed successfully |
+| TST-16 | TST-16-2 | Compile a valid NN model, then if C/C++ header corresponding to compiled artifact exists, verify its validity | Test executed successfully |
+
+Table 5-1. System Test case
diff --git a/docs/nncc/project_guide.md b/docs/nncc/project_guide.md
new file mode 100644
index 000000000..af6a5acfd
--- /dev/null
+++ b/docs/nncc/project_guide.md
@@ -0,0 +1,27 @@
+### How to create your own project
+_nncc_ aims to make it easy to develop optimized, retargetable NN compilers. Anyone or team interested in _nncc_ can create a new incubating project.
+
+#### Subject
+Subject is related to NN(Neural Network) complier. Some examples are below, but not limited:
+- NN IR(Intermediate Representation)
+- Extended frontend and backend
+- High-performance (model optimization, memory optimization, scheduling, etc.)
+- Tools (verification, benchmark, visualization, etc.)
+- Tutorial, testbed
+
+#### How to propose
+There is no formal proposal process. Anyone can submit an issue or a PR as a starting point of a proposal. It would be helpful that the submissions have documents or descriptions containing the followings to share your idea and concept and attract new contibutors to your project (not mandatory):
+- Overview, goal or architecture description to explain your project
+- How-to guide including building and running your programs
+
+#### Directory to use
+- A directory under `compiler/`, which starts with your project name.
+
+#### Requirement
+- A project should follow the formal review process that _nncc_ is currently using [[(How to create a Pull Request (in contribution guide)](contribution_guide.md#how-to-create-a-pull-request)].
+
+#### How to enable format checker
+- Create a `.FORMATCHECKED` file in your project directory for format checker to check the source code of the directory and its subdirectories.
+
+#### How to contribute`
+Anyone who wants to contribute can create and submit PRs and issues following [nncc contribution_guide](contribution_guide.md). _nncc_ always welcomes your contribution.
diff --git a/docs/nncc/roadmap.md b/docs/nncc/roadmap.md
new file mode 100644
index 000000000..d2227e8be
--- /dev/null
+++ b/docs/nncc/roadmap.md
@@ -0,0 +1,6 @@
+## 2018
+
+In 2018, _nncc_ will provide Caffe/TensorFlow Lite frontends and ARM CPU/GPU backends built on top of
+well-specified common (re-targetable) intermediate representation (IR) which is expressive enough to
+encode Inception(v3) and MobileNet, and is flexible enough to support next-gen H/W architectures, such
+as DSP or NPU.
diff --git a/docs/nnfw/2018/fig/nnfw_architecture.png b/docs/nnfw/2018/fig/nnfw_architecture.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/2018/fig/nnfw_architecture.png
diff --git a/docs/nnfw/2018/fig/nnfw_architecture.pptx b/docs/nnfw/2018/fig/nnfw_architecture.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/2018/fig/nnfw_architecture.pptx
diff --git a/docs/project/2018_requirement_specification.md b/docs/nnfw/2018/project/2018_requirement_specification.md
index 90e3937ef..90e3937ef 100644
--- a/docs/project/2018_requirement_specification.md
+++ b/docs/nnfw/2018/project/2018_requirement_specification.md
diff --git a/docs/roadmap.md b/docs/nnfw/2018/roadmap.md
index aca206889..aca206889 100644
--- a/docs/roadmap.md
+++ b/docs/nnfw/2018/roadmap.md
diff --git a/docs/workgroups.md b/docs/nnfw/2018/workgroups.md
index b258c3971..b258c3971 100644
--- a/docs/workgroups.md
+++ b/docs/nnfw/2018/workgroups.md
diff --git a/docs/HowToImplementOperatorKernel.md b/docs/nnfw/HowToImplementOperatorKernel.md
index 715575a5f..715575a5f 100644
--- a/docs/HowToImplementOperatorKernel.md
+++ b/docs/nnfw/HowToImplementOperatorKernel.md
diff --git a/docs/nnfw/fig/nnfw_architecture.png b/docs/nnfw/fig/nnfw_architecture.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/fig/nnfw_architecture.png
diff --git a/docs/nnfw/fig/nnfw_architecture.pptx b/docs/nnfw/fig/nnfw_architecture.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/fig/nnfw_architecture.pptx
diff --git a/docs/nnfw/fig/nnfw_behavior.png b/docs/nnfw/fig/nnfw_behavior.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/fig/nnfw_behavior.png
diff --git a/docs/nnfw/fig/nnfw_behavior.pptx b/docs/nnfw/fig/nnfw_behavior.pptx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/fig/nnfw_behavior.pptx
diff --git a/docs/nnfw/howto.md b/docs/nnfw/howto.md
new file mode 100644
index 000000000..48e26a28a
--- /dev/null
+++ b/docs/nnfw/howto.md
@@ -0,0 +1,37 @@
+## Build Requires
+
+If you are building this project, then the following modules must be installed on your system:
+
+- CMake
+- Boost C++ libraries
+
+```
+$ sudo apt-get install cmake libboost-all-dev
+```
+
+## How to use (simple) NNAPI Binding
+
+This repo provides a T/F Lite Model loader(named ``tflite_run``), and simple NNAPI binding.
+
+Let's type the following commands, and see what happens!
+```
+$ make install
+$ USE_NNAPI=1 LD_LIBRARY_PATH="$(pwd)/Product/obj/runtimes/logging:$(pwd)/Product/out/lib" Product/out/bin/tflite_run [T/F Lite Flatbuffer Model Path]
+```
+
+## How to get pre-built T/F Lite Flatbuffer models?
+Google provides several pre-built T/F Lite models. Please check [this page](https://www.tensorflow.org/lite/models)
+
+
+## Build How-to
+- [Cross building for ARM](howto/CrossBuildForArm.md)
+- [Cross building for AARCH64](howto/CrossBuildForAarch64.md)
+- [Build using prebuilt docker image](howto/HowToUseDockerImage.md)
+
+
+## Other how-to documents
+- [Building TensorFlow and TOCO from source](howto/BuildTFfromSource.md)
+- [How to setup XU3 with Ubuntu 16.04](howto/device/xu3_ubuntu.md)
+- [How to setup XU4 with Ubuntu 16.04](howto/device/xu4_ubuntu.md)
+- [How to add unittest using gtest](howto/HowToAddUnittest.md)
+- [How to manually test NNFW on single model/input pair](howto/HowToTestManualy.md)
diff --git a/docs/howto/BuildTFfromSource.md b/docs/nnfw/howto/BuildTFfromSource.md
index 3880d5ab9..3880d5ab9 100644
--- a/docs/howto/BuildTFfromSource.md
+++ b/docs/nnfw/howto/BuildTFfromSource.md
diff --git a/docs/howto/CrossBuildForAarch64.md b/docs/nnfw/howto/CrossBuildForAarch64.md
index f3dc55236..f3dc55236 100644
--- a/docs/howto/CrossBuildForAarch64.md
+++ b/docs/nnfw/howto/CrossBuildForAarch64.md
diff --git a/docs/nnfw/howto/CrossBuildForArm.md b/docs/nnfw/howto/CrossBuildForArm.md
new file mode 100644
index 000000000..110dde861
--- /dev/null
+++ b/docs/nnfw/howto/CrossBuildForArm.md
@@ -0,0 +1,129 @@
+# Cross building for ARM
+
+## Prepare Ubuntu RootFS
+
+Install required packages
+
+```
+sudo apt-get install qemu qemu-user-static binfmt-support debootstrap
+```
+
+Use `build_rootfs.sh` script to prepare Root File System. You should have `sudo`
+
+```
+sudo ./tools/cross/build_rootfs.sh arm
+```
+- supports `arm`(default) and `arm64` architecutre for now
+- supports `xenial`(default) and `trusty` release
+
+To see the options,
+```
+./tools/cross/build_rootfs.sh -h
+```
+
+RootFS will be prepared at `tools/cross/rootfs/arm` folder.
+
+### Prepare RootFS at alternative folder
+
+Use `ROOTFS_DIR` to a full path to prepare at alternative path.
+
+```
+ROOTFS_DIR=/home/user/rootfs/arm-xenial sudo ./tools/cross/build_rootfs.sh arm
+```
+
+### Using proxy
+
+If you need to use proxy server while building the rootfs, use `--setproxy` option.
+
+```
+# for example,
+sudo ./tools/cross/build_rootfs.sh arm --setproxy="1.2.3.4:8080"
+# or
+sudo ./tools/cross/build_rootfs.sh arm --setproxy="proxy.server.com:8888"
+```
+
+This will put `apt` proxy settings in `rootfs/etc/apt/apt.conf.d/90proxy` file
+for `http`, `https` and `ftp` protocol.
+
+## Install ARM Cross Toolchain
+
+We recommend you have g++ >= 6 installed on your system because NN generated tests require it.
+
+- On Ubuntu 16.04 or older, follow the next steps:
+
+```
+cd ~/your/path
+wget https://releases.linaro.org/components/toolchain/binaries/7.2-2017.11/arm-linux-gnueabihf/gcc-linaro-7.2.1-2017.11-x86_64_arm-linux-gnueabihf.tar.xz
+tar xvf gcc-linaro-7.2.1-2017.11-x86_64_arm-linux-gnueabihf.tar.xz
+echo 'PATH=~/your/path/gcc-linaro-7.2.1-2017.11-x86_64_arm-linux-gnueabihf/bin:$PATH' >> ~/.bashrc
+```
+
+- On Ubuntu 18.04 LTS, you can install using `apt-get`.
+Choose g++ version whatever you prefer: 6, 7 or 8.
+
+```
+sudo apt-get install g++-{6,7,8}-arm-linux-gnueabihf
+```
+
+Make sure you get `libstdc++.so` updated on your target with your new toolchain's corresponding one.
+
+For example, if you installed gcc-linaro-7.2.1-2017.11 above, do
+
+```
+wget https://releases.linaro.org/components/toolchain/binaries/7.2-2017.11/arm-linux-gnueabihf/runtime-gcc-linaro-7.2.1-2017.11-arm-linux-gnueabihf.tar.xz
+tar xvf runtime-gcc-linaro-7.2.1-2017.11-arm-linux-gnueabihf.tar.xz
+```
+
+Then, copy `libstdc++.so.6.0.24` into `/usr/lib/arm-linux-gnueabihf`, and update symbolic links on your device.
+
+## Build and install ARM Compute Library
+
+Mostly you only need once of ACL build.
+
+ACL will be automatically installed in `externals/acl` when you build nnfw without any changes.
+
+You can check ACL source information in `cmake/packages/ARMComputeSourceConfig.cmake`
+
+## Build nnfw
+
+Give `TARGET_ARCH` variable to set the target architecture.
+
+If you used `ROOTFS_DIR` to prepare in alternative folder, you should also give this to makefile.
+
+```
+CROSS_BUILD=1 TARGET_ARCH=armv7l make all install
+
+# If ROOTFS_DIR is in alternative folder
+ROOTFS_DIR=/path/to/your/rootfs/arm \
+CROSS_BUILD=1 TARGET_ARCH=armv7l make all install
+```
+
+You can also omit the `CROSS_BUILD=1` option if you explicitly pass `ROOTFS_DIR`. In that case, if
+the `TARGET_ARCH` are differs from the hostarchitecture, the make script automatically applies
+`CROSS_BUILD=1`. So, if you set `ROOTFS_DIR` as an environment variable, you can simply perform
+normal build and cross build as follows.
+
+```
+export ROOTFS_DIR = xxx
+...
+make all install # do normal build
+TARGET_ARCH = armv7l make all install # do cross build
+```
+
+If you want to build neurun, you should switch on `BUILD_NEURUN` option in `cmake/CfgOptionFlags.cmake`
+```
+option(BUILD_NEURUN "Build neurun" ON)
+```
+
+## Run test
+- PureACL
+```
+ ./tests/scripts/test_driver.sh --artifactpath=.
+```
+
+- neurun
+```
+ ./tests/scripts/test_driver.sh --artifactpath=. \
+ --ldlibrarypath=Product/out/lib/neurun:Product/out/lib \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_cl.txt
+```
diff --git a/docs/howto/HowToAddUnittest.md b/docs/nnfw/howto/HowToAddUnittest.md
index 5bb75b258..5bb75b258 100644
--- a/docs/howto/HowToAddUnittest.md
+++ b/docs/nnfw/howto/HowToAddUnittest.md
diff --git a/docs/nnfw/howto/HowToTestManualy.md b/docs/nnfw/howto/HowToTestManualy.md
new file mode 100644
index 000000000..545f97a1f
--- /dev/null
+++ b/docs/nnfw/howto/HowToTestManualy.md
@@ -0,0 +1,64 @@
+# How to test NNFW on single model/input pair
+
+1. Select backend through environment variables:
+ * acl_cl: `export OP_BACKEND_ALLOPS=acl_cl`
+ * acl_neon: `export OP_BACKEND_ALLOPS=acl_neon`
+ * cpu: `export OP_BACKEND_ALLOPS=cpu`
+ * different backends for different operations:
+ ```
+ unset OP_BACKEND_ALLOPS
+ export OP_BACKEND_Conv2DNode=cpu
+ export OP_BACKEND_MaxPool2DNode=acl_cl
+ export OP_BACKEND_AvgPool2DNode=acl_neon
+ ```
+
+2. Select executor through environment variable:
+ * linear: `export EXECUTOR=Linear`
+ * dataflow: `export EXECUTOR=Dataflow`
+ * parallel: `export EXECUTOR=Parallel`
+
+3. Set library path: `export LD_LIBRARY_PATH=/path/to/nnfw/Product/armv7l-linux.debug/out/lib`
+
+## Test NNFW through NNAPI
+
+### Testing on random input
+1. Generate random input, get reference result using tflite interpreter, dump input and result into file:
+ ```
+ /path/to/tflite_run --tflite /path/to/model.tflite --dump /path/to/out.dat
+ ```
+2. Inference with NNFW NNAPI and compare result with reference one:
+ ```
+ USE_NNAPI=1 /path/to/tflite_run --tflite /path/to/model.tflite ---compare /path/to/out.dat
+ ```
+
+### Testing on particular input
+1. Prepare input:
+
+ `tflite_run` consumes input as sequence of floats.
+
+ For example, you could convert `.jpg` image into such format file with next python3 script:
+ ```
+ from PIL import Image
+ import numpy as np
+
+ img = Image.open("./image.jpg")
+ np_img = np.array(img.getdata()).reshape(img.size[0], img.size[1], 3).astype(np.float32) / 255.
+
+ with open('./converted_image.dat', 'wb') as f:
+ for i in np_img.flatten('C'):
+ f.write(i)
+ ```
+
+2. Get reference result using tflite interpreter, dump input and result into file:
+
+ ```
+ /path/to/tflite_run --tflite /path/to/model.tflite --input /path/to/input.dat --dump /path/to/out.dat
+ ```
+3. Inference with NNFW NNAPI and compare result with reference one:
+ ```
+ USE_NNAPI=1 /path/to/tflite_run --tflite /path/to/model.tflite ---compare /path/to/out.dat
+ ```
+
+## Test NNFW through NNPackage
+
+TODO: fill this section when NNPackage will be implemented
diff --git a/docs/nnfw/howto/HowToUseDockerImage.md b/docs/nnfw/howto/HowToUseDockerImage.md
new file mode 100644
index 000000000..0b2a859b3
--- /dev/null
+++ b/docs/nnfw/howto/HowToUseDockerImage.md
@@ -0,0 +1,158 @@
+# How to use docker image of nnfw
+
+We have a docker image to build `nnfw` repo.
+
+This docker image is built from https://github.sec.samsung.net/STAR/nnfw/blob/master/infra/docker/Dockerfile and based on Ubuntu 16.04.
+And prebuilt docker image is available from Samsung private docker registry.
+
+This document describes how to use prebuilt docker image when developing `nnfw`.
+
+## How to install docker
+
+Follow [Installing Docker](https://docs.docker.com/)
+
+- For Ubuntu, follow [Installing Docker on Ubuntu](https://docs.docker.com/install/linux/docker-ce/ubuntu/)
+
+These are the actual steps to install using apt package manager:
+```
+$ sudo apt-get install \
+ apt-transport-https \
+ ca-certificates \
+ curl \
+ software-properties-common
+$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
+$ sudo apt-key fingerprint 0EBFCD88
+```
+```
+$ sudo add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) \
+ stable"
+$ sudo apt-get update
+```
+```
+$ sudo apt-get install docker-ce
+```
+
+## Configure docker daemon
+
+1. Set HTTP/HTTPS proxy
+
+ * For Ubuntu, follow [Setting HTTP/HTTPS proxy environment variables](https://docs.docker.com/v17.09/engine/admin/systemd/#httphttps-proxy)
+
+If you are behind an HTTP or HTTPS proxy server, you will need to add this configuration in the Docker systemd service file.
+These are the actual steps to set an HTTP/HTTPS proxy environment variable:
+```
+$ sudo mkdir -p /etc/systemd/system/docker.service.d
+$ sudo vi /etc/systemd/system/docker.service.d/http-proxy.conf
+```
+```
+[Service]
+Environment="HTTP_PROXY=http://10.112.1.184:8080/" "HTTPS_PROXY=https://10.112.1.184:8080/" "NO_PROXY=localhost,127.0.0.1"
+```
+```
+$ sudo systemctl daemon-reload
+$ sudo systemctl restart docker
+$ systemctl show --property=Environment docker
+```
+
+2. Edit configuration file of docker daemon
+
+First you have to add Samsung private docker reigstry to your docker daemon.
+Depending on your docker daemon installed, there are two ways of configuration.
+
+
+If there is a `/etc/default/docker`, please edit the file as below.
+```
+$ sudo vi /etc/default/docker
+
+DOCKER_OPTS="--insecure-registry docker.sec.samsung.net:5000"
+```
+
+If there is a `/etc/docker/daemon.json`, please edit the file as below.
+```
+{
+ ...,
+ "insecure-registries": [..., "docker.sec.samsung.net:5000"]
+}
+```
+
+3. Then restart docker daemon as below.
+
+```
+$ sudo service docker restart // Ubuntu 14.04
+
+or
+
+$ sudo systemctl restart docker // Ubuntu 16.04
+```
+
+## Install docker image of `nnfw`
+
+Let's pull docker image for `nnfw` repo and tag it to `nnfw_docker:latest`
+
+```
+$ docker pull docker.sec.samsung.net:5000/star/nnfw/nnfw_docker:1.5
+$ docker tag docker.sec.samsung.net:5000/star/nnfw/nnfw_docker:1.5 nnfw_docker:latest
+```
+
+If you would like to build `nnfw` tizen package using gbs, pull `nnfw_docker_tizen`.
+```
+$ docker pull docker.sec.samsung.net:5000/star/nnfw/nnfw_docker_tizen:1.2
+$ docker tag docker.sec.samsung.net:5000/star/nnfw/nnfw_docker_tizen:1.2 nnfw_docker_tizen:latest
+```
+
+## Build docker image instead of pull
+
+You can build docker image in your environment instead of pull docker image from server.
+
+```
+$ cd nnfw
+$ ./nnfw build-docker
+```
+
+Default docker image name is `nnfw_docker`. If you want to change image name and/or tag, use `-t` or `--tag` option
+
+```
+$ cd nnfw
+$ ./nnfw build-docker -t nnfw_docker_test
+```
+
+You can use options supported by `docker build` command (ex. `--network` option)
+
+```
+$ cd nnfw
+$ ./nnfw build-docker --network=host --no-cache
+```
+
+## Use docker image to build `nnfw`
+Three different targets for `nnfw` can be built using docker image.
+
+1. Build `nnfw` for `x86_64` target
+```
+$ cd nnfw
+$ docker run --rm -v $(pwd):/opt/nnfw -w /opt/nnfw nnfw_docker make install
+```
+or use `docker_build_test_x64.sh` for convenience as below.
+```
+$ cd nnfw
+$ ./infra/scripts/docker_build_test_x64.sh
+```
+You can find built artifacts at `nnfw/Product/x86_64-linux.debug`.
+
+2. Cross build `nnfw` for ARM on x86_64 host
+
+You should prepare RootFS, following [Cross Building for ARM](https://github.sec.samsung.net/STAR/nnfw/blob/master/docs/howto/CrossBuildForArm.md) except ACL build and cross build steps. Then execute below commands. If your RootFS directory is different with below directory, change it to correct path and ensure the path is absolute.
+```
+$ cd nnfw
+$ ROOTFS_DIR=$(pwd)/tools/cross/rootfs/arm \
+./infra/scripts/docker_build_cross_arm_neurun.sh
+```
+You can find built artifacts at `nnfw/Product/armv7l-linux.debug/`.
+
+3. Build `nnfw` for Tizen ARM package on x86_64 host
+```
+$ cd nnfw
+$ ./infra/scripts/docker_build_tizen_gbs.sh
+```
+You can find built artifacts at `Product/out/rpm`.
diff --git a/docs/nnfw/howto/HowtoMakeSampleAppOnNnfw.md b/docs/nnfw/howto/HowtoMakeSampleAppOnNnfw.md
new file mode 100644
index 000000000..d272a8390
--- /dev/null
+++ b/docs/nnfw/howto/HowtoMakeSampleAppOnNnfw.md
@@ -0,0 +1,132 @@
+# How to make a sample app on nnfw
+
+Our runtime `neurun` support `NNAPI` as interface currently. To use `NNAPI` efficiently, one of solution is to use tensorflow lite. We support additional library to help using tensorflow lite in `/libs/tflite`. (this library is not official support)
+
+To use tensorflow lite, you need to prepare tensorflow lite model file, and you should know input/output tensor name. Then write sample app.
+
+## Prepare loaded tensorflow lite model object
+
+You can select one of kernel register: tensorflow lite official kernel register or extended register (for pre-implemented custom op)
+```
+#include "tensorflow/lite/kernels/register.h"
+#include "tflite/ext/kernels/register.h"
+```
+
+To use tensorflow lite interpreter, need tensorflow lite interpreter session header
+```
+#include "tflite/InterpreterSession.h"
+```
+
+For NNAPI usage, need NNAPI session header
+```
+#include "tflite/NNAPISession.h"
+```
+
+Load the model object into `FlatBuffer`, create a tensorflow lite operator resolver `BuiltinOpResolver` and construct a tensorflow interpreter builder using them:
+```
+tflite::StderrReporter error_reporter;
+auto model = tflite::FlatBufferModel::BuildFromFile(model_file.c_str(), &error_reporter);
+
+// TODO: determine which BuiltinOpResolver and prepend namespace
+BuiltinOpResolver resolver;
+
+tflite::InterpreterBuilder builder(*model, resolver);
+```
+
+Create a tensorflow interpreter and init the builder using it:
+```
+std::unique_ptr<tflite::Interpreter> interpreter;
+builder(&interpreter);
+```
+
+Create a tensorflow lite session to use NNAPI:
+```
+std::shared_ptr<nnfw::tflite::Session> sess = std::make_shared<nnfw::tflite::NNAPISession>(interpreter.get());
+```
+
+If you want to use tensorflow lite interpreter instead of NNAPI, then:
+```
+std::shared_ptr<nnfw::tflite::Session> sess = std::make_shared<nnfw::tflite::InterpreterSession>(interpreter.get());
+```
+
+`NNAPISession` constructs a computational graph from the interpreter and builds the model.
+
+## Prepare tensors memory allocation and model input for inference
+
+Allocate the memory for tensors of `tflite::Interpreter`:
+```
+sess->prepare();
+```
+
+Prepare inputs. How to prepare is out of scope and task specific.<br/>
+Copy the input data into model, i.e. into `interpreter->inputs`. This is tensorflow specific, not nnfw, so one can use any method, that is applicable to Tensorflow, e.g.:
+```
+for (const auto &id : interpreter->inputs())
+{
+ if (interpreter->tensor(id)->name == input_name)
+ {
+ float *p = interpreter->tensor(id)->data.f;
+
+ for (int y = 0; y < height; ++y)
+ {
+ for (int x = 0; x < width; ++x)
+ {
+ for (int c = 0; c < channel; ++c)
+ {
+ *p++ = data[y * width * channel + x * channel + c];
+ }
+ }
+ }
+ }
+}
+```
+where:<br/>
+`input_name` - name of the inputs of the model;<br/>
+`data` - source vector of size `height * width * channel`.
+
+## Run the inference and get outputs
+
+Run the inference
+```
+sess->run();
+```
+
+Get the result from `interpreter->outputs()`. This is tensorflow lite specific, not nnfw, so one can use any method, that is applicable to tensorflow lite, e.g.:
+```
+for (const auto &id : interpreter->outputs())
+{
+ if (interpreter->tensor(id)->name == output_name)
+ {
+ float *p = interpreter->tensor(id)->data.f;
+
+ for (int i = 0; i < result.capacity(); ++i)
+ {
+ result.push_back(p[i]);
+ }
+ }
+}
+```
+where:<br/>
+`output_name` - name of the outputs of the model;<br/>
+`result` - float vector, where to put output. Its size can be calculated using
+```
+for (const auto &id : interpreter->outputs())
+{
+ if (interpreter->tensor(id)->name == output_name)
+ {
+ TfLiteTensor *t = interpreter->tensor(id);
+ int v = 1;
+ for (int i = 0; i < t->dims->size; ++i)
+ {
+ v *= t->dims->data[i];
+ }
+ return v;
+ }
+}
+return -1;
+```
+
+Release the session
+```
+sess->teardown();
+```
diff --git a/docs/nnfw/howto/device/xu3-dip.png b/docs/nnfw/howto/device/xu3-dip.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/howto/device/xu3-dip.png
diff --git a/docs/nnfw/howto/device/xu3_tizen.md b/docs/nnfw/howto/device/xu3_tizen.md
new file mode 100644
index 000000000..6473ab9a8
--- /dev/null
+++ b/docs/nnfw/howto/device/xu3_tizen.md
@@ -0,0 +1,140 @@
+# About
+
+This will describe how to flash microSD with Tizen-5.5 for ODroid XU3.
+
+Host environment is Ubuntu 18.04
+
+This document will explain the only on eMMC + XU3.
+
+# Download files
+
+## Images
+
+Boot
+- https://download.tizen.org/snapshots/tizen/unified/latest/images/standard/tv-boot-armv7l-odroidxu3/
+- download the biggest file
+
+Root FS
+- https://download.tizen.org/snapshots/tizen/unified/latest/images/standard/tv-wayland-armv7l-odroidu3/
+- download the biggest file
+
+U-Boot images
+```
+wget https://github.com/hardkernel/u-boot/raw/odroidxu3-v2012.07/sd_fuse/hardkernel_1mb_uboot/bl1.bin.hardkernel
+wget https://github.com/hardkernel/u-boot/raw/odroidxu3-v2012.07/sd_fuse/hardkernel_1mb_uboot/bl2.bin.hardkernel.1mb_uboot
+wget https://github.com/hardkernel/u-boot/raw/odroidxu3-v2012.07/sd_fuse/hardkernel_1mb_uboot/tzsw.bin.hardkernel
+```
+
+You also need `u-boot-mmc.bin` that is inside `tizen-unified_20180425.2_tv-boot-armv7l-odroidxu3.tar.gz` file.
+```
+tar xvf tizen-unified_20180425.2_tv-boot-armv7l-odroidxu3.tar.gz u-boot-mmc.bin
+```
+
+
+## Flashing script
+
+Download [sd_fusing_xu4.sh](https://git.tizen.org/cgit/platform/kernel/u-boot/plain/scripts/tizen/sd_fusing_xu4.sh?h=tizen)
+
+This file name has `xu4` but it works on also xu3.
+
+
+## Files
+
+```
+dragon@loki:~/Works/tizen/odroid-xu3/flashing$ ls -l
+total 1316
+-rw-rw-r-- 1 dragon dragon 15616 9월 5 14:41 bl1.bin.hardkernel
+-rw-rw-r-- 1 dragon dragon 14592 9월 5 14:41 bl2.bin.hardkernel.1mb_uboot
+-rw-rw-r-- 1 dragon dragon 262144 9월 5 14:41 tzsw.bin.hardkernel
+-rwxr-xr-x 1 dragon dragon 1048576 9월 4 15:17 u-boot-mmc.bin
+```
+
+# Flash
+
+Host environment
+- Ubuntu 18.04
+- eMMC connected through microUSB from xu3 to host
+
+## Flash boot files
+
+on target
+```
+...
+
+CPU: Exynos5422 @ 800 MHz
+
+Model: Odroid XU3 based on EXYNOS5422
+Board: Odroid XU3 based on EXYNOS5422
+Type: xu3
+DRAM: 2 GiB
+MMC: EXYNOS DWMMC: 0, EXYNOS DWMMC: 1
+In: serial
+Out: serial
+Err: serial
+Net: No ethernet found.
+Hit any key to stop autoboot: 0
+ODROID-XU3 #
+
+ODROID-XU3 # mmc list
+EXYNOS DWMMC: 0 (eMMC)
+EXYNOS DWMMC: 1
+
+ODROID-XU3 # ums 0 mmc 0
+
+UMS: LUN 0, dev 0, hwpart 0, sector 0x0, count 0x1d5a000
+
+/
+```
+
+then on host
+```
+$ sudo fdisk -l
+..........
+
+Partition table entries are not in disk order
+
+Disk /dev/sdh: 32.0 GB, 32010928128 bytes
+
+64 heads, 32 sectors/track, 30528 cylinders, total 62521344 sectors
+
+Units = sectors of 1 * 512 = 512 bytes
+
+Sector size (logical/physical): 512 bytes / 512 bytes
+
+I/O size (minimum/optimal): 512 bytes / 512 bytes
+
+Disk identifier: 0x00000000
+
+
+Device Boot Start End Blocks Id System
+
+/dev/sdh1 * 8192 139263 65536 e W95 FAT16 (LBA) ..........
+```
+
+```
+$ sudo ../sd_fusing_xu4.sh -d /dev/sdh --format \
+ -b bl1.bin.hardkernel bl2.bin.hardkernel.1mb_uboot tzsw.bin.hardkernel u-boot-mmc.bin
+...
+```
+
+`--format` option will, 1) delete current partition 2) create new partition table, 3) format each partitions.
+
+- If you meet `./sd_fusing_xu4-u1604.sh: line 147: pv: command not found` message and want to remove this message, install pv package by `sudo apt-get install pv`
+
+## Flash image files
+```
+$ sudo ../sd_fusing_xu4.sh -d /dev/sdh \
+ -b tizen-unified_20190905.1_tv-boot-armv7l-odroidxu3.tar.gz \
+ tizen-unified_20190905.1_tv-wayland-armv7l-odroidxu3.tar.gz
+```
+
+# After boot
+
+Follow [xu4_tizen](xu4_tizen.md)
+
+# References
+
+- http://suprem.sec.samsung.net/confluence/display/KS/Odroid+XU3
+- http://suprem.sec.samsung.net/confluence/pages/viewpage.action?pageId=104635990
+- http://suprem.sec.samsung.net/confluence/pages/viewpage.action?spaceKey=TPLAB&title=XU3+Image+Flashing
+- http://download.tizen.org/snapshots/tizen/unified/latest/images/standard/
diff --git a/docs/howto/device/xu3_ubuntu.md b/docs/nnfw/howto/device/xu3_ubuntu.md
index 38dbc69b0..38dbc69b0 100644
--- a/docs/howto/device/xu3_ubuntu.md
+++ b/docs/nnfw/howto/device/xu3_ubuntu.md
diff --git a/docs/howto/device/xu4_tizen.md b/docs/nnfw/howto/device/xu4_tizen.md
index 3481be206..3481be206 100644
--- a/docs/howto/device/xu4_tizen.md
+++ b/docs/nnfw/howto/device/xu4_tizen.md
diff --git a/docs/howto/device/xu4_ubuntu.md b/docs/nnfw/howto/device/xu4_ubuntu.md
index 7b8a3aa2b..7b8a3aa2b 100644
--- a/docs/howto/device/xu4_ubuntu.md
+++ b/docs/nnfw/howto/device/xu4_ubuntu.md
diff --git a/docs/project/2018_high_level_design.md b/docs/nnfw/project/2018_high_level_design.md
index 7be495b34..7be495b34 100644
--- a/docs/project/2018_high_level_design.md
+++ b/docs/nnfw/project/2018_high_level_design.md
diff --git a/docs/nnfw/project/2019_requirement_specification.md b/docs/nnfw/project/2019_requirement_specification.md
new file mode 100644
index 000000000..937530766
--- /dev/null
+++ b/docs/nnfw/project/2019_requirement_specification.md
@@ -0,0 +1,131 @@
+# Software Requirement Specification
+
+## Background
+Artificial intelligence (AI) techniques are getting popular and utilized in various products and
+services. While the cloud-based AI techniques have been used to perform compute/memory intensive
+inferences because of the powerful servers on cloud, on-device AI technologies are recently drawing
+attention from the mobile industry for response time reduction, privacy protection, and
+connection-less AI service. Big mobile players, such as Google, Apple, and Huawei, are investing
+their research effort on the on-device AI technologies and already announced hardware and software
+on-device AI solutions. Samsung is not leading this trend currently, but since on-device AI area is
+just started and still in the initial state, there are still opportunities and possibilities to
+reduce the gap between pioneer companies and Samsung. We believe on-device AI will become a key
+differentiator for mobile phone, TV, and other home appliances, and thus developing on-device AI
+software stack is of paramount importance in order to take leadership in the on-device AI
+technology.
+
+Although the vision of on-device AI is promising, enabling on-device AI involves unique technical
+challenges compared to traditional cloud-based approach. This is because on-device AI tries to
+conduct inference tasks solely on device without connecting to cloud resources. Specifically,
+hardware resources on device, such as processor performance, memory capacity, and power budget, are
+very scarce and limit the compute capability, which is typically required to execute complicated
+neural network (NN) models. For example, in one product requirement, a mobile device should consume
+less than 1.2W and could use at most 2W only for 10 minutes due to thermal issue. Next, on-device
+AI software stack needs to support diverse device environments, since embedded platforms may consist
+of heterogeneous compute devices, such as CPU, GPU, DSP, or neural processing unit (NPU), and use
+different OS platforms, such as Tizen, Android, or various embedded Linux.
+
+To tackle the challenges above and to have the leadership on on-device AI technology, this project
+aims at developing a neural network inference framework specialized and optimized for on-device AI.
+
+
+## Product Context
+
+This project _nnfw_ aims at providing a high-performance, on-device neural network (NN) inference
+framework that performs inference of a given NN model on processors, such as CPU, GPU, or NPU, in
+the target platform, such as Tizen and Smart Machine Platform (SMP).
+
+### Expected Value
+
+We expect the following would be possible with _nnfw_:
+
+- To improve user experience by reducing the service response time.
+- To provide AI services without network connection while achieving similar performance.
+- To protect personal information and company confidential by limiting data transfer to the network.
+
+
+### Success Criteria
+
+The goals of this project are:
+
+- Supports mixed acceleration using CPU and GPU.
+ + for operator coverage flexibility.
+ + for flexible utilization of computing resource on device.
+- Define Common IR and Runtime APIs and perform successful inference using it.
+ + for _nncc_ integration as a frontend, and _nnfw_ itself to concentrate on the backend.
+- Support of user implemented kernel extension for custom operator.
+- Construction of SW infrastructure to support SR NPU.
+
+
+### Target
+
+_nnfw_ targets following platforms and target devices:
+
+- Odroid-XU4 running Tizen 5.5 (Primary)
+- A variety of Android based mobile phones (Secondary)
+
+
+### Product Roadmap
+
+- March: Set up milestones, tasks, workgroups, initial code structure, and build/test
+ infrastructure.
+- May: Tizen M1 release / Execute InceptionV3 with static scheduling by mixing CPU and GPU on
+ Odroid-XU4.
+- August: Perform inference using NN Package and Runtime API.
+- October: Tizen M2 release / Completed neural network acceleration SW stack integrated with NN
+ Compiler.
+- December: Release NPU SDK v1.0 pre-alpha.
+
+
+## Requirements
+
+### Functionality Requirements
+
+_nnfw_ has the following functionality requirements:
+
+1. CPU/GPU mixed acceleration
+ - Description
+ + Run the model using a mixture of CPU and GPU
+ - Validation
+ + Run the InceptionV3 model by selecting CPU or GPU for individual operators.
+ + Confirm execution results against ground truth(for example, the result of using CPU or GPU
+ alone).
+1. Support its own input format
+ - Description
+ + Define and support its own input format to ensure file format independence
+ + Define and implement of Common IR
+ - Validation
+ + Read and execute the input model described by Common IR.
+ + Confirm execution results against ground truth(for example, the result of using NNAPI).
+1. Support its own API
+ - Description
+ + Define and implement its own API to replace current NNAPI.
+ - Validation
+ + Perform unit tests and integration tests for individual APIs.
+1. Custom operator support
+ - Description
+ + Defines a specification that describes a custom operation, and provides a mechanism for
+ writing and installing the kernel implementation needed to run it at runtime.
+ - Validation
+ + Load and execute the input format that contains the model using the custom op and the kernel
+ implementation of the custom op.
+ + Confirm execution results against ground truth.
+1. Prepare SW infrastructure for NPU support
+ - Description
+ + The runtime must be able to read and process the model information developed for the purpose
+ of execution in NPU.
+ - Validation
+ + Read the model developed for the NPU and run it on the NPU.
+ + Confirm execution results against ground truth.
+
+
+### Non-Functionality Requirements
+
+1. Optimizing mixed acceleration performance
+ - Description
+ + Ensure performance above the individual acceleration averages for mixed acceleration using
+ CPU and GPU.
+ - Validation
+ + Measure the inference time for mixed accelerations for the target model.
+ + Compare the result to the average value of the CPU or GPU alone acceleration time for the
+ same model.
diff --git a/docs/nnfw/roadmap.md b/docs/nnfw/roadmap.md
new file mode 100644
index 000000000..c04bab66b
--- /dev/null
+++ b/docs/nnfw/roadmap.md
@@ -0,0 +1,76 @@
+This document describes roadmap of 2019 NN Runtime (or _nnfw_) project.
+
+# Goal
+
+This project _nnfw_ aims at providing a high-performance, on-device neural network (NN) inference
+framework that performs inference of a given NN model on processors, such as CPU, GPU, or NPU, in
+the target platform, such as Tizen and Android.
+
+Last year in 2018, we already saw significant gains in accelerating with a single CPU or GPU
+back-end. Now we want to gain more benefits by using a mixture of CPU and GPU according to each
+operation characteristic. It could give us an opportunity to have a high degree of freedom in terms
+of operator coverage, and possibly provide better performance compared to single back-end
+acceleration.
+
+On the other hand, we are going to introduce a new compiler to the front-end. This will support a
+variety of deep learning frameworks in relatively spacious host PC environments, while the runtime
+running on the target device is intended to take a smaller burden. In this process, the compiler and
+the runtime will effectively share information among themselves by the Common IR, which is referred
+to as the NN Package.
+
+# Architecture
+
+![nnfw_architecture](./fig/nnfw_architecture.png)
+
+The figure above illustrates the overall architecture and scope of _nnfw_, along with _nncc_, a
+sibling project, to help understand. In this document, we will deal specifically with _nnfw_.
+
+The _nnfw_ can be divided into three parts which is NN API and NN Runtime, as well as NN Compute
+that is provided by the platform.
+
+1. NN API
+ - Provide a common interface to application.
+ - Last year, Android NN API was selected for seamless integration with TF Lite. As long as our
+ NN runtime provides Android NN API as an interface, TF Lite can link to our NN runtime without
+ any modification.
+ - In choosing Android NN API, we expected standardization and rapid adoption. But the results
+ were far less than that. We could not control its specifications, and its growth rate was too
+ slow to accommodate our needs. So we try to define our own new one, NN Runtime API, in this
+ year. (Once the new API is stable, we provide a way to replace the Android NN API and it will
+ naturally be deprecated.)
+1. NN Runtime
+ - It already provides significant performance improvements using CPU or GPU acceleration. Now we
+ want to add the flexibility to this by providing various functions suitable to specific device
+ configuration.
+ - Mixed back-end acceleration enables various usage scenarios according to device-specific CPU
+ or GPU configurations and usage conditions.
+ - By introducing an interpreter, it will respond to dynamic conditions that the compiler can not
+ handle, and will effectively utilize the memory through the memory manager.
+1. NN Compute
+ - Provide computation acceleration library, such as ACL, or device driver for NPU.
+ - This layer will be provided by OS platform, and we will use the library or device driver as it
+ is. We may request a specific version to the Platform team, but we don't expect we will be
+ modifying the library.
+ - In this year, we will also introduce an extension mechanism to support custom operations on
+ this part.
+
+# Deliverables
+
+- On-Device AI SW stack for Tizen
+ + Advanced runtime support with interpreter, memory manager, and execution planner.
+ + Provides back-end flexibility, such as CPU/GPU mixed acceleration
+ + Well designed custom op support.
+ + Basic infrastructure for NPU support.
+- Specification and implementation of Common IR and Runtime API
+
+# Milestones
+
+- [Project Milestones](https://github.sec.samsung.net/orgs/STAR/projects/1)
+- [Monthly Milestones](https://github.sec.samsung.net/STAR/nnfw/projects/25)
+
+# Workgroups (WGs)
+
+- We organize WGs for major topics, and each WG will be working on its own major topic by breaking
+ it into small tasks/issues, performing them inside WG, and collaborating between WGs.
+- The WG information can be found [here](workgroups.md).
+
diff --git a/docs/nnfw/tests/Convolution_manual_3x3.xlsx b/docs/nnfw/tests/Convolution_manual_3x3.xlsx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/tests/Convolution_manual_3x3.xlsx
diff --git a/docs/nnfw/tests/Softmax_manual.xlsx b/docs/nnfw/tests/Softmax_manual.xlsx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/nnfw/tests/Softmax_manual.xlsx
diff --git a/docs/nnfw/workgroups.md b/docs/nnfw/workgroups.md
new file mode 100644
index 000000000..f6f4f9587
--- /dev/null
+++ b/docs/nnfw/workgroups.md
@@ -0,0 +1,65 @@
+# 2019 Workgroups
+
+For faster communication and development, we organize workgroups (WGs) based on major topics
+described in [WBS of 2019 NN Acceleration Solution v2.0 Development
+(#4919)](https://github.sec.samsung.net/STAR/nnfw/issues/4919). All WGs will work together to
+achieve the goal of _nnfw_ project, but each WG will define its own tasks and milestones, set its
+own sprints, and conduct its tasks. All WGs will sync up through github (note that github is our
+primary communication channel, and thus using github for communication is highly recommended) and
+on/off-line meetings.
+
+Current WGs based on the major topics in
+[#4919](https://github.sec.samsung.net/STAR/nnfw/issues/4919) and their root issue links are as
+follows:
+
+1. Runtime WG
+ - Tasks
+ + Runtime improvement
+ + Heterogeneous computing support
+ + Runtime API
+ - Members
+ + Hanjoung Lee(@wateret), Hyeongseok Oh(@hseok82-oh), Chunseok Lee(@chunseok-lee),
+ Yongseop Kim(@YongseopKim), Jiseob Jang(@jiseob-jang), Sanggyu Lee(@sg5-lee),
+ Seongwoo Chae(@sw4670-chae), Sung-Jae Lee(@sj925-lee)
+ + Dilshodzhon Poshshoev(@d-poshshoev), Efimov Alexander(@a-efimov),
+ Pavel Ilyutchenko(@p-iliutchenk)
+2. NN Package WG
+ - Tasks
+ + Common IR design : Hyeongseok Oh(@hseok82-oh), Cheongyo Bahk(@ch-bahk),
+ Seok NamKoong(@sk-namkoong)
+ + Importer/Exporter
+ + Custom Operator Support
+ - Members
+ + Sanggyu Lee(@sg5-lee), Hyeongseok Oh(@hseok82-oh), Cheongyo Bahk(@ch-bahk),
+ Hyun Sik Yoon(@hyunsik-yoon), Seok NamKoong(@sk-namkoong)
+ + Vladimir Plazun(@v-plazun), Sergei Barannikov(@s-barannikov)
+3. Kernel Optimization WG
+ - Tasks
+ + NN model analysis
+ + NN operator analysis
+ + Kernel implementation optimization
+ - Members
+ + Chunseok Lee(@chunseok-lee), Jiyoung Yun(@jy910-yun), Jiseob Jang(@jiseob-jang),
+ SaeHie Park(@seanshpark)
+ + Andrey Shedko(@a-shedko), Sergei Barannikov(@s-barannikov)
+
+There is one unique workgroup containing most _nncc_ tasks. They are working closely together as a
+front-end to the runtime. Since _nncc_ and _nnfw_ will be merged soon, it is natural to think of
+them as one team. In fact, most of the members are already involved in two things.
+
+4. Compiler Frontend
+ - Tasks
+ + NN Compiler (@in memory) IR design & implementation : Jonghyun Park(@jh1302.park)
+ + TensorFlow model support
+ + ONNX model support
+ + Target independent graph optimization
+ - Members
+ + Seok NamKoong(@sk-namkoong), Cheongyo Bahk(@ch-bahk), Hyun Sik Yoon(@hyunsik-yoon),
+ Jiyoung Yun(@jy910-yun), Seongwoo Chae(@sw4670-chae), Jiseob Jang(@jiseob-jang),
+ SaeHie Park(@seanshpark)
+ + Pavel Ilyutchenko(@p-iliutchenk), Sergei Barannikov(@s-barannikov) , Ivan Vagin(@ivan-vagin)
+
+# How to join
+
+If you would like to participate in any WGs above or create a new WG, please create an issue or
+leave a comment at [#4919](https://github.sec.samsung.net/STAR/nnfw/issues/4919).
diff --git a/externals/CMakeLists.txt b/externals/CMakeLists.txt
deleted file mode 100644
index 1c8a14a7d..000000000
--- a/externals/CMakeLists.txt
+++ /dev/null
@@ -1,122 +0,0 @@
-set(TENSORFLOW_BASE ${CMAKE_CURRENT_SOURCE_DIR}/tensorflow)
-set(TENSORFLOW_LITE_BASE ${TENSORFLOW_BASE}/tensorflow/contrib/lite)
-
-# Required source & package
-nnfw_find_package(AbslSource REQUIRED)
-nnfw_find_package(Eigen REQUIRED)
-nnfw_find_package(FarmhashSource REQUIRED)
-nnfw_find_package(FlatBuffersSource REQUIRED)
-nnfw_find_package(GEMMLowpSource REQUIRED)
-nnfw_find_package(TensorFlowSource REQUIRED)
-
-#
-# Tensorflow Lite library
-#
-file(GLOB TFLITE_CORE_SRCS "${TENSORFLOW_LITE_BASE}/*.c" "${TENSORFLOW_LITE_BASE}/*.cc")
-file(GLOB TFLITE_CORE_TESTS "${TENSORFLOW_LITE_BASE}/*test*.cc")
-list(REMOVE_ITEM TFLITE_CORE_SRCS ${TFLITE_CORE_TESTS})
-
-file(GLOB_RECURSE TFLITE_KERNEL_SRCS "${TENSORFLOW_LITE_BASE}/kernels/*.cc")
-file(GLOB_RECURSE TFLITE_KERNEL_TESTS "${TENSORFLOW_LITE_BASE}/kernels/*test*.cc")
-list(REMOVE_ITEM TFLITE_KERNEL_SRCS ${TFLITE_KERNEL_TESTS})
-
-file(GLOB TFLITE_LIB_SRCS "${TENSORFLOW_LITE_BASE}/c/*.c" "${TENSORFLOW_LITE_BASE}/c/*.cc")
-file(GLOB TFLITE_LIB_TESTS "${TENSORFLOW_LITE_BASE}/c/*test*.cc")
-list(REMOVE_ITEM TFLITE_LIB_SRCS ${TFLITE_LIB_TESTS})
-
-file(GLOB TFLITE_API_SRCS "${TENSORFLOW_LITE_BASE}/core/api/*.c" "${TENSORFLOW_LITE_BASE}/core/api/*.cc")
-file(GLOB TFLITE_API_TESTS "${TENSORFLOW_LITE_BASE}/core/api/*test*.cc")
-list(REMOVE_ITEM TFLITE_API_SRCS ${TFLITE_API_TESTS})
-
-file(GLOB TFLITE_PROFILING_SRCS "${TENSORFLOW_LITE_BASE}/profiling/*.cc")
-file(GLOB TFLITE_PROFILING_TESTS "${TENSORFLOW_LITE_BASE}/profiling/*test*.cc")
-list(REMOVE_ITEM TFLITE_PROFILING_SRCS ${TFLITE_PROFILING_TESTS})
-
-# We will use our own BuiltinOpResolver
-list(REMOVE_ITEM TFLITE_KERNEL_SRCS "${TENSORFLOW_LITE_BASE}/kernels/register.cc")
-# We will use our own summarizer
-list(REMOVE_ITEM TFLITE_PROFILING_SRCS "${TENSORFLOW_LITE_BASE}/profiling/profile_summarizer.cc")
-list(APPEND TFLITE_SRCS ${TFLITE_CORE_SRCS})
-list(APPEND TFLITE_SRCS ${TFLITE_KERNEL_SRCS})
-list(APPEND TFLITE_SRCS ${TFLITE_LIB_SRCS})
-list(APPEND TFLITE_SRCS ${TFLITE_API_SRCS})
-list(APPEND TFLITE_SRCS ${TFLITE_PROFILING_SRCS})
-
-list(APPEND TFLITE_SRCS "${TFLITE_DEPEND_DIR}/farmhash/src/farmhash.cc")
-
-list(APPEND TFLITE_INCLUDES "${CMAKE_CURRENT_SOURCE_DIR}/tensorflow")
-list(APPEND TFLITE_INCLUDES "${TFLITE_DEPEND_DIR}/absl")
-list(APPEND TFLITE_INCLUDES "${TFLITE_DEPEND_DIR}/gemmlowp")
-list(APPEND TFLITE_INCLUDES "${TFLITE_DEPEND_DIR}/farmhash/src")
-list(APPEND TFLITE_INCLUDES "${TFLITE_DEPEND_DIR}/flatbuffers/include")
-
-if(BUILD_IS_NATIVE AND NOT HOST_ARCH_BASE STREQUAL "arm")
- # Flatbuffer build
- nnfw_find_package(FlatBuffers)
- # Required external sourcefor x86-64 build
- nnfw_find_package(NEON2SSESource REQUIRED)
- list(APPEND TFLITE_INCLUDES "${TFLITE_DEPEND_DIR}/neon_2_sse")
-endif()
-
-# This kernels are not used on nnfw
-## spectrogram
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/audio_spectrogram.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/audio_spectrogram_test.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/spectrogram.cc")
-## mfcc
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc_dct.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc_mel_filterbank.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/mfcc.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/mfcc_test.cc")
-
-if("${TARGET_OS}" STREQUAL "android")
- if(NOT DEFINED NDK_DIR)
- file(GLOB NDK_DIRS "${CMAKE_SOURCE_DIR}/tools/cross/ndk/*")
- list(LENGTH NDK_DIRS NDK_DIRS_COUNT)
- if (NDK_DIRS_COUNT EQUAL 1)
- set(NDK_DIR "${NDK_DIRS}")
- endif(NDK_DIRS_COUNT EQUAL 1)
- endif(NOT DEFINED NDK_DIR)
-
- if(NOT DEFINED NDK_DIR)
- message(FATAL_ERROR "NDK_DIR should be specified via environment variable")
- endif()
- message(STATUS "Found NDK: ${NDK_DIR}")
- list(APPEND TFLITE_INCLUDES "${NDK_DIR}")
-endif()
-
-add_library(tensorflow-lite ${TFLITE_SRCS})
-target_include_directories(tensorflow-lite PUBLIC ${TFLITE_INCLUDES})
-target_compile_definitions(tensorflow-lite PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
-if(BUILD_TFLITE_BENCHMARK_MODEL)
- target_compile_definitions(tensorflow-lite PUBLIC "TFLITE_PROFILING_ENABLED")
-endif()
-target_link_libraries(tensorflow-lite eigen ${LIB_PTHREAD} dl)
-
-if("${TARGET_OS}" STREQUAL "android")
- target_link_libraries(tensorflow-lite log)
- #
- # Tensorflow Lite JNI library
- #
- set(TFLITE_JNI_BASE ${TENSORFLOW_LITE_BASE}/java/src/main/native)
- set(TFLITE_JNI_SRCS ${TFLITE_JNI_BASE}/duration_utils_jni.cc
- ${TFLITE_JNI_BASE}/exception_jni.cc
- ${TFLITE_JNI_BASE}/nativeinterpreterwrapper_jni.cc
- ${TFLITE_JNI_BASE}/tensor_jni.cc
- ${TFLITE_JNI_BASE}/tensorflow_lite_jni.cc
- ${TFLITE_JNI_BASE}/builtin_ops_jni.cc
- )
- set(TFLITE_JNI_INCLUDES ${TENSORFLOW_LITE_BASE}/java/src/native)
-
- # We need this for running vanilla tflite
- # TODO remove this when nnfw is used
- set(TFLITE_SRCS_V ${TENSORFLOW_LITE_BASE}/kernels/register.cc)
-
- # TODO use tensorflow-lite static library instead of compiling all the sources again
- add_library(tensorflowlite_jni SHARED ${TFLITE_JNI_SRCS} ${TFLITE_SRCS} ${TFLITE_SRCS_V})
- target_include_directories(tensorflowlite_jni PUBLIC ${TFLITE_JNI_INCLUDES} ${TFLITE_INCLUDES})
- target_link_libraries(tensorflowlite_jni eigen ${LIB_PTHREAD} dl)
- target_link_libraries(tensorflowlite_jni log)
- install(TARGETS tensorflowlite_jni DESTINATION lib)
-endif()
diff --git a/externals/nnapi_test_generator/README.md b/externals/nnapi_test_generator/README.md
deleted file mode 100644
index 37d3af0c7..000000000
--- a/externals/nnapi_test_generator/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# nnapi test generator
-
-_nnapi test generator_ aims at generating NN public C API tests.
-
-To generate tests, run the following command.
-
-```
-$ runtimes/tests/neural_networks_test/specs/generate_test.sh
-```
-
-Original code is at https://android.googlesource.com/platform/frameworks/ml/+/efd22b6.
diff --git a/include/NeuralNetworks.h b/include/NeuralNetworks.h
deleted file mode 100644
index 6414af688..000000000
--- a/include/NeuralNetworks.h
+++ /dev/null
@@ -1,2578 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @addtogroup NeuralNetworks
- * @{
- */
-
-/**
- * @file NeuralNetworks.h
- */
-
-#ifndef ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_H
-#define ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_H
-
-/******************************************************************
- *
- * IMPORTANT NOTICE:
- *
- * This file is part of Android's set of stable system headers
- * exposed by the Android NDK (Native Development Kit).
- *
- * Third-party source AND binary code relies on the definitions
- * here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES.
- *
- * - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES)
- * - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS
- * - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY
- * - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES
- */
-
-#include <stddef.h>
-#include <stdint.h>
-#include <sys/cdefs.h>
-
-__BEGIN_DECLS
-
-/**
- * Operand types.
- *
- * The type of operands that can be added to a model.
- *
- * Although we define many types, most operators accept just a few
- * types. Most used are {@link ANEURALNETWORKS_TENSOR_FLOAT32},
- * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
- * and {@link ANEURALNETWORKS_INT32}.
- */
-typedef enum {
- /** A 32 bit floating point scalar value. */
- ANEURALNETWORKS_FLOAT32 = 0,
- /** A signed 32 bit integer scalar value. */
- ANEURALNETWORKS_INT32 = 1,
- /** An unsigned 32 bit integer scalar value. */
- ANEURALNETWORKS_UINT32 = 2,
-
- /** A tensor of 32 bit floating point values. */
- ANEURALNETWORKS_TENSOR_FLOAT32 = 3,
- /** A tensor of 32 bit integer values. */
- ANEURALNETWORKS_TENSOR_INT32 = 4,
- /**
- * A tensor of 8 bit integers that represent real numbers.
- *
- * Attached to this tensor are two numbers that can be used to convert the
- * 8 bit integer to the real value and vice versa. These two numbers are:
- * - scale: a 32 bit floating point value greater than zero.
- * - zeroPoint: a 32 bit integer, in range [0, 255].
- *
- * The formula is:
- * real_value = (integer_value - zeroPoint) * scale.
- */
- ANEURALNETWORKS_TENSOR_QUANT8_ASYMM = 5,
-} OperandCode;
-
-/**
- * Operation types.
- *
- * The type of operations that can be added to a model.
- */
-typedef enum {
- /**
- * Adds two tensors, element-wise.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible
- * dimensions. The output is the sum of both input tensors, optionally
- * modified by an activation function.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its
- * way forward.
- *
- * Example:
- *
- * input1.dimension = {4, 1, 2}
- * input2.dimension = {5, 4, 3, 1}
- * output.dimension = {5, 4, 3, 2}
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The sum, a tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_ADD = 0,
-
- /**
- * Performs a 2-D average pooling operation.
- *
- * The output dimensions are functions of the filter dimensions, stride, and
- * padding.
- *
- * The values in the output tensor are computed as:
- *
- * output[batch, row, col, channel] =
- * sum_{i, j}(input[batch, row + i, col + j, channel]) / sum(1)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4, with "NHWC" (i.e., Num_samples, Height, Width,
- * and Channels) data layout.
- *
- * Both explicit padding and implicit padding are supported.
- *
- * Inputs (explicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the left, in the ‘width’ dimension.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the right, in the ‘width’ dimension.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the top, in the ‘height’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the bottom, in the ‘height’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * width.
- * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * height.
- * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Inputs (implicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
- * padding scheme, has to be one of the
- * {@link PaddingCode} values.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * width.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * height.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape
- [batches, out_height, out_width, depth].
- */
- ANEURALNETWORKS_AVERAGE_POOL_2D = 1,
-
- /**
- * Concatenates the input tensors along the given dimension.
- *
- * The input tensors must have identical {@link OperandCode} and the same
- * dimensions except the dimension along the concatenation axis.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0 ~ n-1: The list of n input tensors, of shape
- * [D0, D1, ..., Daxis(i), ..., Dm]. For inputs of
- * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, all input tensors
- * must have the same scale and zeroPoint.
- * * n: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
- * concatenation axis.
- *
- * Outputs:
- * * 0: The output, a tensor of the same {@link OperandCode} as the input
- * tensors. The output shape is [D0, D1, ..., sum(Daxis(i)), ..., Dm].
- */
- ANEURALNETWORKS_CONCATENATION = 2,
-
- /**
- * Performs an 2-D convolution operation.
- *
- * The CONV_2D op sweeps a 2-D filter that can mix channels together over a
- * batch of images, applying the filter to each window of each image of the
- * appropriate size.
- *
- * The output dimensions are functions of the filter dimensions, stride, and
- * padding.
- *
- * The values in the output tensor are computed as:
- *
- * output[batch, row, col, channel] =
- * sum_{i, j} (
- * input[batch, row + i, col + j, k] *
- * filter[channel, row + i, col + j, k] +
- * bias[channel]
- * )
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Both explicit padding and implicit padding are supported.
- *
- * Inputs (explicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
- * specifying the input.
- * * 1: A 4-D tensor, of shape
- * [depth_out, filter_height, filter_width, depth_in], specifying the
- * filter.
- * * 2: A 1-D tensor, of shape [depth_out], specifying the bias.
- * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias
- * should also be of {@link ANEURALNETWORKS_TENSOR_FLOAT32}. For input
- * tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias
- * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
- * 0 and bias_scale == input_scale * filter_scale.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the left, in the ‘width’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the right, in the ‘width’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the top, in the ‘height’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the bottom, in the ‘height’ dimension.
- * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Inputs (implicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
- * specifying the input.
- * * 1: A 4-D tensor, of shape
- * [depth_out, filter_height, filter_width, depth_in], specifying the
- * filter.
- * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
- * tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias should
- * also be of {@link ANEURALNETWORKS_TENSOR_FLOAT32}. For input tensor
- * of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
- * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
- * bias_scale == input_scale * filter_scale.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
- * padding scheme, has to be one of the
- * {@link PaddingCode} values.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape
- * [batches, out_height, out_width, depth_out]. For output tensor of
- * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the following condition
- * must be satisfied: output_scale > input_scale * filter_scale.
- */
- ANEURALNETWORKS_CONV_2D = 3,
-
- /**
- * Performs a depthwise 2-D convolution operation.
- *
- * Given an input tensor of shape [batches, height, width, depth_in] and a
- * filter tensor of shape [1, filter_height, filter_width, depth_out]
- * containing depth_out convolutional filters of depth 1, DEPTHWISE_CONV
- * applies a different filter to each input channel (expanding from 1
- * channel to channel_multiplier channels for each), then concatenates the
- * results together.
- *
- * The output has depth_out = depth_in * depth_multiplier channels.
- * The output dimensions are functions of the filter dimensions, stride, and
- * padding.
- *
- * The values in the output tensor are computed as:
- *
- * output[b, i, j, k * channel_multiplier + q] =
- * sum_{di, dj} (
- * input[b, strides[1] * i + di, strides[2] * j + dj, k] *
- * filter[1, di, dj, k * channel_multiplier + q]
- * )
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Both explicit padding and implicit padding are supported.
- *
- * Inputs (explicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
- * specifying the input.
- * * 1: A 4-D tensor, of shape [1, filter_height, filter_width, depth_out],
- * specifying the filter.
- * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
- * tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias should
- * also be of {@link ANEURALNETWORKS_TENSOR_FLOAT32}. For input tensor
- * of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
- * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
- * bias_scale == input_scale * filter_scale.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the left, in the ‘width’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the right, in the ‘width’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the top, in the ‘height’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the bottom, in the ‘height’ dimension.
- * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 9: An {@link ANEURALNETWORKS_INT32} scalar, specifying the depthwise
- * multiplier.
- * * 10: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Inputs (implicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
- * specifying the input.
- * * 1: A 4-D tensor, of shape [1, filter_height, filter_width, depth_out],
- * specifying the filter.
- * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
- * tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias should
- * also be of {@link ANEURALNETWORKS_TENSOR_FLOAT32}. For input tensor
- * of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
- * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
- * bias_scale == input_scale * filter_scale.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
- * padding scheme, has to be one of the
- * {@link PaddingCode} values.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the depthwise
- * multiplier.
- * * 7: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape
- * [batches, out_height, out_width, depth_out]. For output tensor of
- * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the following condition
- * must be satisfied: output_scale > input_scale * filter_scale.
- */
- ANEURALNETWORKS_DEPTHWISE_CONV_2D = 4,
-
- /**
- * Rearranges data from depth into blocks of spatial data.
- *
- * More specifically, this op outputs a copy of the input tensor where
- * values from the depth dimension are moved in spatial blocks to the height
- * and width dimensions. The value block_size indicates the input block size
- * and how the data is moved.
- *
- * Chunks of data of size block_size * block_size from depth are rearranged
- * into non-overlapping blocks of size block_size x block_size.
- *
- * The width of the output tensor is input_depth * block_size, whereas the
- * height is input_height * block_size. The depth of the input tensor must
- * be divisible by block_size * block_size
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Inputs:
- * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
- * specifying the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the block_size.
- * block_size must be >=1 and block_size * block_size must be a divisor
- * of the input depth.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape [batch, height*block_size,
- * width*block_size, depth/(block_size*block_size)].
- */
- ANEURALNETWORKS_DEPTH_TO_SPACE = 5,
-
- /**
- * Dequantizes the input tensor.
- *
- * The formula is:
- *
- * output = (input - zeroPoint) * scale.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0, but with
- * {@link ANEURALNETWORKS_TENSOR_FLOAT32}.
- */
- ANEURALNETWORKS_DEQUANTIZE = 6,
-
- /**
- * Looks up sub-tensors in the input tensor.
- *
- * This operator takes for input a tensor of values (Values) and
- * a one-dimensional tensor of selection indices (Lookups).
- * The output tensor is the concatenation of sub-tensors of Values as
- * selected by Lookups.
- *
- * Think of Values as being sliced along its first dimension:
- * The entries in Lookups select which slices are concatenated together
- * to create the output tensor.
- *
- * For example, if Values has shape of [40, 200, 300] and
- * Lookups has shape of [3], all three values found in Lookups are
- * expected to be between 0 and 39. The resulting tensor must
- * have shape of [3, 200, 300].
- *
- * If a value in Lookups is out of bounds, the operation must fail
- * and an error must be reported.
- *
- * Inputs:
- * * 0: Lookups. A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}.
- * The values are indices into the first dimension of Values.
- * * 1: Values. An n-D tensor, where n >= 2, from which sub-tensors are
- * extracted.
- *
- * Output:
- * * 0: A n-D tensor with the same rank and shape as the Values
- * tensor, except for the first dimension which has the same size
- * as Lookups' only dimension.
- */
- ANEURALNETWORKS_EMBEDDING_LOOKUP = 7,
-
- /**
- * Computes element-wise floor() on the input tensor.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and dimensions as
- * the input tensor.
- */
- ANEURALNETWORKS_FLOOR = 8,
-
- /**
- * Denotes a fully (densely) connected layer, which connects all elements
- * in the input tensor with each element in the output tensor.
- *
- * This layer implements the operation:
- *
- * outputs = activation(inputs * weights’ + bias)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor of at least rank 2, specifying the input. If rank is
- * greater than 2, then it gets flattened to a 2-D Tensor. The
- * (flattened) 2-D Tensor is reshaped (if necessary) to
- * [batch_size, input_size], where "input_size" corresponds to the
- * number of inputs to the layer, matching the second dimension of
- * weights, and "batch_size" is calculated by dividing the number of
- * elements by "input_size".
- * * 1: A 2-D tensor, specifying the weights, of shape
- * [num_units, input_size], where "num_units" corresponds to the number
- * of output nodes.
- * * 2: A 1-D tensor, of shape [num_units], specifying the bias. For input
- * tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias should
- * also be of {@link ANEURALNETWORKS_TENSOR_FLOAT32}. For input tensor
- * of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
- * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
- * bias_scale == input_scale * filter_scale.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The output tensor, of shape [batch_size, num_units]. For output
- * tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the following
- * condition must be satisfied:
- * output_scale > input_scale * filter_scale.
- */
- ANEURALNETWORKS_FULLY_CONNECTED = 9,
-
- /**
- * Looks up sub-tensors in the input tensor using a key-value map.
- *
- * This operator takes for input a tensor of values (Values),
- * a one-dimensional tensor of selection values (Lookups) and
- * a one-dimensional tensor that maps these values to Values
- * indexes. The output tensor is the concatenation of sub-tensors of
- * Values as selected by Lookups via Keys.
- *
- * Think of Values as being sliced along its outer-most dimension.
- * The output is a concatenation of selected slices, with one slice
- * for each entry of Lookups. The slice selected is the one at the
- * same index as the Maps entry that matches the value in Lookups.
- *
- * For a hit, the corresponding sub-tensor of Values is included
- * in the Output tensor. For a miss, the corresponding sub-tensor in
- * Output must have zero values.
- *
- * For example, if Values has shape of [40, 200, 300],
- * Keys should have a shape of [40]. If Lookups tensor has shape
- * of [3], three slices are being concatenated, so the resulting tensor
- * must have the shape of [3, 200, 300]. If the first entry in Lookups
- * has the value 123456, that value must be located in Keys tensor.
- * If the sixth entry of Keys contains 123456, the sixth slice of Values
- * must be selected. If no entry in Keys has 123456, a slice of zeroes
- * must be concatenated.
- *
- * Inputs:
- * * 0: Lookups. A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor with
- * shape [ k ].
- * * 1: Keys. A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor with shape
- * [ n ]; Keys and Values pair represent a map, i.e., the ith element
- * in Keys (Keys[i]) is the key to select the ith sub-tensor in Values
- * (Values[i]), where 0 <= i <= n-1. Keys tensor *MUST* be sorted in
- * ascending order.
- * * 2: Values. A tensor with shape of [ n, … ]; i.e., the first dimension
- * must be n.
- *
- * Outputs:
- * * 0: Output. A tensor with shape [ k …].
- * * 1: Hits. A boolean tensor with shape [ k ] indicates whether the lookup
- * hits (True) or not (False).
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- */
- ANEURALNETWORKS_HASHTABLE_LOOKUP = 10,
-
- /**
- * Applies L2 normalization along the depth dimension.
- *
- * The values in the output tensor are computed as:
- *
- * output[batch, row, col, channel] =
- * input[batch, row, col, channel] /
- * sqrt(sum_{c} pow(input[batch, row, col, c], 2))
- *
- * For input tensor with more dimensions, independently normalizes each 1-D
- * slice along dimension dim.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: 4, with "NHWC" data layout (i.e., Num_samples,
- * Height, Width, and Channels).
- *
- * Inputs:
- * * 0: A 4-D tensor, of shape [batches, height, width, depth].
- *
- * Outputs:
- * * 0: The output 4-D tensor, of the same shape as input
- * [batches, height, width, depth].
- */
- ANEURALNETWORKS_L2_NORMALIZATION = 11,
-
- /**
- * Performs an 2-D L2 pooling operation.
- *
- * The output dimensions are functions of the filter dimensions, stride, and
- * padding.
- *
- * The values in the output tensor are computed as:
- *
- * output[batch, row, col, channel] =
- * sqrt(sum_{i, j} pow(input[batch, row + i, col + j, channel], 2) /
- * sum(1))
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Both explicit padding and implicit padding are supported.
- *
- * Inputs (explicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the left, in the ‘width’ dimension.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the right, in the ‘width’ dimension.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the top, in the ‘height’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the bottom, in the ‘height’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * width.
- * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * height.
- * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Inputs (implicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
- * padding scheme, has to be one of the
- * {@link PaddingCode} values.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * width.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * height.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape
- * [batches, out_height, out_width, depth].
- */
- ANEURALNETWORKS_L2_POOL_2D = 12,
-
- /**
- * Applies Local Response Normalization along the depth dimension.
- *
- * The 4-D input tensor is treated as a 3-D array of 1-D vectors (along the
- * last dimension), and each vector is normalized independently. Within a
- * given vector, each component is divided by the weighted, squared sum of
- * inputs within depth_radius.
- *
- * The output is calculated using this formula:
- *
- * sqr_sum[a, b, c, d] = sum(
- * pow(input[a, b, c, d - depth_radius : d + depth_radius + 1], 2))
- * output = input / pow((bias + alpha * sqr_sum), beta)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Inputs:
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the radius of
- * the normalization window.
- * * 2: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the bias, must
- * not be zero.
- * * 3: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the scale
- * factor, alpha.
- * * 4: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the exponent,
- * beta.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- */
- ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION = 13,
-
- /**
- * Computes sigmoid activation on the input tensor element-wise.
- *
- * The output is calculated using this formula:
- *
- * output = 1 / (1 + exp(-input))
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- * For {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
- * the scale must be 1.f / 256 and the zeroPoint must be 0.
- */
- ANEURALNETWORKS_LOGISTIC = 14,
-
- /**
- * Projects an input to a bit vector via locality senstive hashing.
- *
- * Inputs:
- * * 0: Hash functions. Dim.size == 2, DataType: Float.
- * Tensor[0].Dim[0]: Number of hash functions.
- * Tensor[0].Dim[1]: Number of seeds per hash functions.
- * Tensor[0].Dim[1] <= 32 in sparse case.
- *
- * * 1: Input. Dim.size >= 1, no restriction on DataType.
- * * 2: Weight. Optional. Dim.size == 1, DataType: Float.
- * If not set, each input element is considered to have the same weight
- * of 1.0.
- * Tensor[1].Dim[0] == Tensor[2].Dim[0]
- * * 3: Type:
- * Sparse: Value LSHProjectionType_SPARSE(=1).
- * Computed bit vector is considered to be sparse.
- * Each output element is an int32 made up of multiple bits
- * computed from hash functions.
- *
- * Dense: Value LSHProjectionType_DENSE(=2).
- * Computed bit vector is considered to be dense. Each output
- * element represents a bit and can take the value of either
- * 0 or 1.
- *
- * Outputs:
- * * 0: If the projection type is sparse:
- * Output.Dim == { Tensor[0].Dim[0] }
- * A tensor of int32 that represents hash signatures.
- * If the projection type is Dense:
- * Output.Dim == { Tensor[0].Dim[0] * Tensor[0].Dim[1] }
- * A flattened tensor that represents projected bit vectors.
- */
- ANEURALNETWORKS_LSH_PROJECTION = 15,
-
- /**
- * Performs a single time step in a Long Short-Term Memory (LSTM) layer
- *
- * The LSTM operation is described by the following equations.
- *
- * \f{eqnarray*}{
- * i_t =& \sigma(W_{xi}x_t+W_{hi}h_{t-1}+W_{ci}C_{t-1}+b_i) & \\
- * f_t =& \sigma(W_{xf}x_t+W_{hf}h_{t-1}+W_{cf}C_{t-1}+b_f) & \\
- * C_t =& clip(f_t \odot C_{t-1} + i_t \odot
- * g(W_{xc}x_t+W_{hc}h_{t-1}+b_c),\ t_{cell}) & \\
- * o_t =& \sigma(W_{xo}x_t+W_{ho}h_{t-1}+W_{co}C_t+b_o) & \\
- * & & \\
- * & clip(W_{proj}(o_t \odot g(C_t))+b_{proj},\ t_{proj})
- * & if\ there\ is\ a\ projection; \\
- * h_t =& & \\
- * & o_t \odot g(C_t) & otherwise. \\
- * \f}
- * Where:
- * * \f$x_t\f$ is the input,
- * * \f$i_t\f$ is the input gate,
- * * \f$f_t\f$ is the forget gate,
- * * \f$C_t\f$ is the cell state,
- * * \f$o_t\f$ is the output,
- * * \f$h_t\f$ is the output state,
- * * \f$\sigma\f$ is the logistic sigmoid function,
- * * \f$g\f$ is the cell input and cell output activation function, usually
- * \f$tahn\f$,
- * * \f$W_{xi}\f$ is the input-to-input weight matrix,
- * * \f$W_{hi}\f$ is the recurrent to input weight matrix,
- * * \f$W_{ci}\f$ is the cell-to-input weight matrix,
- * * \f$b_i\f$ is the input gate bias,
- * * \f$W_{xf}\f$ is the input-to-forget weight matrix,
- * * \f$W_{hf}\f$ is the recurrent-to-forget weight matrix,
- * * \f$W_{cf}\f$ is the cell-to-forget weight matrix,
- * * \f$b_f\f$ is the forget gate bias,
- * * \f$W_{xc}\f$ is the input-to-cell weight matrix,
- * * \f$W_{hc}\f$ is the recurrent-to-cell weight matrix,
- * * \f$b_c\f$ is the cell bias,
- * * \f$W_{xo}\f$ is the input-to-output weight matrix,
- * * \f$W_{ho}\f$ is the recurrent-to-output weight matrix,
- * * \f$W_{co}\f$ is the cell-to-output weight matrix,
- * * \f$b_o\f$ is the output gate bias,
- * * \f$W_{proj}\f$ is the projection weight matrix,
- * * \f$b_{proj}\f$ is the projection bias,
- * * \f$t_{cell}\f$ is the threshold for clipping the cell state, and
- * * \f$t_{proj}\f$ is the threshold for clipping the projected output.
- * * \f$\odot\f$ is the
- * <a href="https://en.wikipedia.org/wiki/Hadamard_product_(matrices)">
- * Hadamard product</a> that takes two matrices and produces another
- * matrix, each element of which is the product of the corresponding
- * elements of the input matrices.
- *
- * The operation has the following independently optional inputs:
- * * The input-to-input weights (\f$W_{xi}\f$), recurrent-to-input weights
- * (\f$W_{hi}\f$), cell-to-input (\f$W_{ci}\f$) weights, and input gate
- * bias (\f$b_i\f$) either all have values, or none of them have values
- * (i.e., all set to null). If they have no values, coupling of input and
- * forget gates (CIFG) is used, in which case the input gate (\f$i_t\f$)
- * is calculated using the following equation instead.
- * \f{eqnarray*}{
- * i_t = 1 - f_t
- * \f}
- * * The cell-to-forget weights (\f$W_{cf}\f$) and cell-to-output weights
- * (\f$W_{co}\f$) either both have values or neither of them have values.
- * If they have values, the peephole optimization is used. Additionally,
- * if CIFG is not used, cell-to-input weights (\f$W_{ci}\f$) is also
- * required to have values for peephole optimization.
- * * The projection weights (\f$W_{proj}\f$) is required only for the
- * recurrent projection layer, and should otherwise have no value.
- * * The projection bias (\f$b_{proj}\f$) may (but not required to) have a
- * value if the recurrent projection layer exists, and should otherwise
- * have no value.
- *
- * References:
- *
- * The default non-peephole non-CIFG implementation is based on:
- * http://www.bioinf.jku.at/publications/older/2604.pdf
- * S. Hochreiter and J. Schmidhuber. "Long Short-Term Memory". Neural
- * Computation, 9(8):1735-1780, 1997.
- *
- * The peephole implementation and projection layer is based on:
- * https://research.google.com/pubs/archive/43905.pdf
- * Hasim Sak, Andrew Senior, and Francoise Beaufays. "Long short-term memory
- * recurrent neural network architectures for large scale acoustic
- * modeling." INTERSPEECH, 2014.
- * (However, the concept of peephole optimization was introduced in work
- * prior to this paper.)
- *
- * The coupling of input and forget gate (CIFG) is based on:
- * http://arxiv.org/pdf/1503.04069.pdf
- * Greff et al. "LSTM: A Search Space Odyssey"
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Inputs:
- * * 0: The input (\f$x_t\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, input_size], where “batch_size” corresponds to the
- * batching dimension, and “input_size” is the size of the input.
- * * 1: The input-to-input weights (\f$W_{xi}\f$). Optional.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, input_size], where “num_units” corresponds to the
- * number of cell units.
- * * 2: The input-to-forget weights (\f$W_{xf}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, input_size].
- * * 3: The input-to-cell weights (\f$W_{xc}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, input_size].
- * * 4: The input-to-output weights (\f$W_{xo}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, input_size].
- * * 5: The recurrent-to-input weights (\f$W_{hi}\f$). Optional.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, output_size], where “output_size” corresponds to either
- * the number of cell units (i.e., “num_units”), or the second
- * dimension of the “projection_weights”, if defined.
- * * 6: The recurrent-to-forget weights (\f$W_{hf}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, output_size].
- * * 7: The recurrent-to-cell weights (\f$W_{hc}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, output_size].
- * * 8: The recurrent-to-output weights (\f$W_{ho}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, output_size].
- * * 9: The cell-to-input weights (\f$W_{ci}\f$). Optional.
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 10:The cell-to-forget weights (\f$W_{cf}\f$). Optional.
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 11:The cell-to-output weights (\f$W_{co}\f$). Optional.
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 12:The input gate bias (\f$b_i\f$). Optional.
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 13:The forget gate bias (\f$b_f\f$).
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 14:The cell bias (\f$b_c\f$).
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 15:The output gate bias (\f$b_o\f$).
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 16:The projection weights (\f$W_{proj}\f$). Optional.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [output_size, num_units].
- * * 17:The projection bias (\f$b_{proj}\f$). Optional.
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [output_size].
- * * 18:The output state (in) (\f$h_{t-1}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, output_size].
- * * 19:The cell state (in) (\f$C_{t-1}\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units].
- * * 20:The activation function (\f$g\f$).
- * A value indicating the activation function:
- * <ul>
- * <li>0: None;
- * <li>1: Relu;
- * <li>3: Relu6;
- * <li>4: Tanh;
- * <li>6: Sigmoid.
- * </ul>
- * * 21:The clipping threshold (\f$t_{cell}\f$) for the cell state, such
- * that values are bound within [-cell_clip, cell_clip]. If set to 0.0
- * then clipping is disabled.
- * * 22:The clipping threshold (\f$t_{proj}\f$) for the output from the
- * projection layer, such that values are bound within
- * [-proj_clip, proj_clip]. If set to 0.0 then clipping is disabled.
- *
- * Outputs:
- * * 0: The scratch buffer.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units * 4] with CIFG, or
- * [batch_size, num_units * 3] without CIFG.
- * * 1: The output state (out) (\f$h_t\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, output_size].
- * * 2: The cell state (out) (\f$C_t\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units].
- * * 3: The output (\f$o_t\f$).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, output_size]. This is effectively the same as the
- * current “output state (out)” value.
- */
- ANEURALNETWORKS_LSTM = 16,
-
- /**
- * Performs an 2-D max pooling operation.
- *
- * The output dimensions are functions of the filter dimensions, stride, and
- * padding.
- *
- * The values in the output tensor are computed as:
- *
- * output[batch, row, col, channel] =
- * max_{i, j} (input[batch, row + i, col + j, channel])
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Both explicit padding and implicit padding are supported.
- *
- * Inputs (explicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the left, in the ‘width’ dimension.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the right, in the ‘width’ dimension.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the top, in the ‘height’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
- * the bottom, in the ‘height’ dimension.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * width.
- * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * height.
- * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Inputs (implicit padding):
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
- * padding scheme, has to be one of the
- * {@link PaddingCode} values.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘height’ dimension.
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * width.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
- * height.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape
- * [batches, out_height, out_width, depth].
- */
- ANEURALNETWORKS_MAX_POOL_2D = 17,
-
- /**
- * Multiplies two tensors, element-wise.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible
- * dimensions. The output is the product of both input tensors, optionally
- * modified by an activation function.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the resulting output is the maximum size along each dimension
- * of the input operands. It starts with the trailing dimensions, and works
- * its way forward.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: The product, a tensor of the same {@link OperandCode} as input0.
- * For output tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
- * the following condition must be satisfied:
- * output_scale > input1_scale * input2_scale.
- */
- ANEURALNETWORKS_MUL = 18,
-
- /**
- * Computes rectified linear activation on the input tensor element-wise.
- *
- * The output is calculated using this formula:
- *
- * output = max(0, input)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- */
- ANEURALNETWORKS_RELU = 19,
-
- /**
- * Computes rectified linear 1 activation on the input tensor element-wise.
- *
- * The output is calculated using this formula:
- *
- * output = min(1.f, max(-1.f, input))
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- */
- ANEURALNETWORKS_RELU1 = 20,
-
- /**
- * Computes rectified linear 6 activation on the input tensor element-wise.
- *
- * The output is calculated using this formula:
- *
- * output = min(6, max(0, input))
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- */
- ANEURALNETWORKS_RELU6 = 21,
-
- /**
- * Reshapes a tensor.
- *
- * Given tensor, this operation returns a tensor that has the same values as
- * tensor, but with a newly specified shape.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor, specifying the tensor to be reshaped.
- * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, defining the
- * shape of the output tensor. The number of elements implied by shape
- * must be the same as the number of elements in the input tensor.
- *
- * Outputs:
- * * 0: The output tensor, of shape specified by the input shape.
- */
- ANEURALNETWORKS_RESHAPE = 22,
-
- /**
- * Resizes images to given size using the bilinear interpretation.
- *
- * Resized images must be distorted if their output aspect ratio is not the
- * same as input aspect ratio. The corner pixels of output may not be the
- * same as corner pixels of input.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Inputs:
- * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
- * the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
- * height of the output tensor.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
- * width of the output tensor.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape
- * [batches, new_height, new_width, depth].
- */
- ANEURALNETWORKS_RESIZE_BILINEAR = 23,
-
- /**
- * A basic recurrent neural network layer.
- *
- * This layer implements the operation:
- * outputs = state = activation(inputs * input_weights +
- * state * recurrent_weights + bias)
- *
- * Where:
- * * “input_weights” is a weight matrix that multiplies the inputs;
- * * “recurrent_weights” is a weight matrix that multiplies the current
- * “state” which itself is the output from the previous time step
- * computation;
- * * “bias” is a bias vector (added to each output vector in the batch);
- * * “activation” is the function passed as the “fused_activation_function”
- * argument (if not “NONE”).
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Inputs:
- * * 0: input.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32} of shape
- * [batch_size, input_size], where “batch_size” corresponds to the
- * batching dimension, and “input_size” is the size of the input.
- * * 1: weights.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, input_size], where “num_units” corresponds to the
- * number of units.
- * * 2: recurrent_weights.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, num_units], with columns corresponding to the weights
- * from each unit.
- * * 3: bias.
- * A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units].
- * * 4: hidden state (in).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units].
- * * 5: fused_activation_function.
- * An optional {@link FuseCode} value indicating the
- * activation function. If “NONE” is specified then it results in a
- * linear activation.
- *
- * Outputs:
- * * 0: hidden state (out).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units].
- *
- * * 1: output.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units]. This is effectively the same as the
- * current state value.
- */
- ANEURALNETWORKS_RNN = 24,
-
- /**
- * Computes the softmax activation on the input tensor element-wise, per
- * batch, by normalizing the input vector so the maximum coefficient is
- * zero.
- *
- * The output is calculated using this formula:
- *
- * output[batch, i] =
- * exp((input[batch, i] - max(input[batch, :])) * beta) /
- * sum_{k}{exp((input[batch, k] - max(input[batch, :])) * beta)}
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 2 or 4.
- *
- * Inputs:
- * * 0: A 2-D or 4-D tensor, specifying the tensor to be reshaped.
- * * 1: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the positive
- * scaling factor for the exponent, beta.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- * For {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
- * the scale must be 1.f / 256 and the zeroPoint must be 0.
- */
- ANEURALNETWORKS_SOFTMAX = 25,
-
- /**
- * Rearranges blocks of spatial data, into depth.
- *
- * More specifically, this op outputs a copy of the input tensor where
- * values from the height and width dimensions are moved to the depth
- * dimension. The value block_size indicates the input block size and how
- * the data is moved.
- *
- * Chunks of data of size block_size * block_size from depth are rearranged
- * into non-overlapping blocks of size block_size x block_size.
- *
- * The depth of the output tensor is input_depth * block_size * block_size.
- * The input tensor's height and width must be divisible by block_size.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4, with "NHWC" data layout.
- *
- * Inputs:
- * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
- * specifying the input.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the block_size.
- * block_size must be >=1 and block_size must be a divisor of both the
- * input height and width.
- *
- * Outputs:
- * * 0: The output 4-D tensor, of shape [batches, height/block_size,
- * width/block_size, depth_in*block_size*block_size].
- */
- ANEURALNETWORKS_SPACE_TO_DEPTH = 26,
-
- /**
- * SVDF op is a kind of stateful layer derived from the notion that a
- * densely connected layer that's processing a sequence of input frames can
- * be approximated by using a singular value decomposition of each of its
- * nodes. The implementation is based on:
- *
- * https://research.google.com/pubs/archive/43813.pdf
- *
- * P. Nakkiran, R. Alvarez, R. Prabhavalkar, C. Parada.
- * “Compressing Deep Neural Networks using a Rank-Constrained Topology”.
- * INTERSPEECH, 2015.
- *
- * It processes the incoming input using a 2-stage filtering mechanism:
- * * stage 1 performs filtering on the "features" dimension, whose outputs
- * get pushed into a memory of fixed-size memory_size.
- * * stage 2 performs filtering on the "time" dimension of the memory_size
- * memoized outputs of stage 1.
- *
- * Specifically, for rank 1, this layer implements the operation:
- *
- * memory = push(conv1d(inputs, weights_feature, feature_dim,
- * "ANEURALNETWORKS_PADDING_VALID"));
- * outputs = activation(memory * weights_time + bias);
- *
- * Where:
- * * “weights_feature” is a weights matrix that processes the inputs (by
- * convolving the input with every “feature filter”), and whose outputs
- * get pushed, stacked in order, into the fixed-size “memory” (the oldest
- * entry gets dropped);
- * * “weights_time” is a weights matrix that processes the “memory” (by a
- * batched matrix multiplication on the num_units);
- * * “bias” is an optional bias vector (added to each output vector in the
- * batch); and
- * * “activation” is the function passed as the “fused_activation_function”
- * argument (if not “NONE”).
- *
- * Each rank adds a dimension to the weights matrices by means of stacking
- * the filters.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Inputs:
- * * 0: input.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, input_size], where “batch_size” corresponds to the
- * batching dimension, and “input_size” is the size of the input.
- * * 1: weights_feature.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, input_size], where “num_units” corresponds to the
- * number of units.
- * * 2: weights_time.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [num_units, memory_size], where “memory_size” corresponds to the
- * fixed-size of the memory.
- * * 3: bias.
- * An optional 1-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32},
- * of shape [num_units].
- * * 4: state (in).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, (memory_size - 1) * num_units * rank].
- * * 5: rank.
- * The rank of the SVD approximation.
- * * 6: fused_activation_function.
- * An optional {@link FuseCode} value indicating the
- * activation function. If “NONE” is specified then it results in a
- * linear activation.
- *
- * Outputs:
- * * 0: state (out).
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, (memory_size - 1) * num_units * rank].
- * * 1: output.
- * A 2-D tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, of shape
- * [batch_size, num_units].
- */
- ANEURALNETWORKS_SVDF = 27,
-
- /**
- * Computes hyperbolic tangent of input tensor element-wise.
- *
- * The output is calculated using this formula:
- *
- * output = tanh(input)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4.
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor of same shape as input0.
- */
- ANEURALNETWORKS_TANH = 28,
-
- // TODO: make the description easier to understand.
- /**
- * BatchToSpace for N-dimensional tensors.
- *
- * This operation reshapes the batch dimension (dimension 0) into M + 1
- * dimensions of shape block_shape + [batch], interleaves these blocks back
- * into the grid defined by the spatial dimensions [1, ..., M], to obtain a
- * result with the same rank as the input.
- *
- * This is the reverse of SpaceToBatch.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the tensor to be reshaped
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the block
- * sizes for each spatial dimension of the input tensor. All values
- * must be >= 1.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_BATCH_TO_SPACE_ND = 29,
-
- /**
- * Element-wise division of two tensors.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible
- * dimensions. The output is the result of dividing the first input tensor
- * by the second, optionally modified by an activation function.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Example:
- * input1.dimension = {4, 1, 2}
- * input2.dimension = {5, 4, 3, 1}
- * output.dimension = {5, 4, 3, 2}
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_DIV = 30,
-
- /**
- * Computes the mean of elements across dimensions of a tensor.
- *
- * Reduces the input tensor along the given dimensions to reduce. Unless
- * keep_dims is true, the rank of the tensor is reduced by 1 for each entry
- * in axis. If keep_dims is true, the reduced dimensions are retained with
- * length 1.
- *
- * If dimensions to reduce have no entries, all dimensions are reduced, and
- * a tensor with a single element is returned.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
- * to reduce. If None (the default), reduces all dimensions. Must be in
- * the range [-rank(input_tensor), rank(input_tensor)).
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, keep_dims. If positive,
- * retains reduced dimensions with length 1.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_MEAN = 31,
-
- /**
- * Pads a tensor.
- *
- * This operation pads a tensor according to the specified paddings.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the tensor to be padded.
- * * 1: A 2-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the paddings
- * for each spatial dimension of the input tensor. The shape of the
- * tensor must be {rank(input0), 2}.
- * padding[i, 0] specifies the number of elements to be padded in the
- * front of dimension i.
- * padding[i, 1] specifies the number of elements to be padded after the
- * end of dimension i.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0. The
- * output tensor has the same rank as input0, and each
- * dimension of the output tensor has the same size as the
- * corresponding dimension of the input tensor plus the size
- * of the padding:
- * output0.dimension[i] =
- * padding[i, 0] + input0.dimension[i] + padding[i, 1]
- */
- ANEURALNETWORKS_PAD = 32,
-
- // TODO: make the description easier to understand.
- /**
- * SpaceToBatch for N-Dimensional tensors.
- *
- * This operation divides "spatial" dimensions [1, ..., M] of the input into
- * a grid of blocks of shape block_shape, and interleaves these blocks with
- * the "batch" dimension (0) such that in the output, the spatial dimensions
- * [1, ..., M] correspond to the position within the grid, and the batch
- * dimension combines both the position within a spatial block and the
- * original batch position. Prior to division into blocks, the spatial
- * dimensions of the input are optionally zero padded according to paddings.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the input.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the block
- * sizes for each spatial dimension of the input tensor. All values
- * must be >= 1.
- * * 2: A 2-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the paddings
- * for each spatial dimension of the input tensor. All values must be
- * >= 0. The shape of the tensor must be {rank(input0), 2}.
- * padding[i, 0] specifies the number of element to be padded in the
- * front of dimension i.
- * padding[i, 1] specifies the number of element to be padded after the
- * end of dimension i.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_SPACE_TO_BATCH_ND = 33,
-
- /**
- * Removes dimensions of size 1 from the shape of a tensor.
- *
- * Given a tensor input, this operation returns a tensor of the same
- * {@link OperandCode} with all dimensions of size 1 removed. If you don't
- * want to remove all size 1 dimensions, you can remove specific size 1
- * dimensions by specifying the axes (input1).
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, the tensor to be squeezed.
- * * 1: An optional 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The
- * dimensions to squeeze. If specified only squeezes the dimensions
- * listed. Otherwise, squeezes all dimensions. The dimension index
- * starts at 0. An error must be reported if squeezing a dimension that
- * is not 1.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0. Contains the
- * same data as input, but has one or more dimensions of size 1
- * removed.
- */
- ANEURALNETWORKS_SQUEEZE = 34,
-
- /**
- * Extracts a strided slice of a tensor.
- *
- * Roughly speaking, this op extracts a slice of size (end - begin) / stride
- * from the given input tensor. Starting at the location specified by begin
- * the slice continues by adding stride to the index until all dimensions
- * are not less than end. Note that a stride can be negative, which causes a
- * reverse slice.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the tensor to be sliced.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the starts of
- * the dimensions of the input tensor to be sliced. The length must be
- * of rank(input0).
- * * 2: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the ends of
- * the dimensions of the input tensor to be sliced. The length must be
- * of rank(input0).
- * * 3: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the strides of
- * the dimensions of the input tensor to be sliced. The length must be
- * of rank(input0).
- * * 4: An {@link ANEURALNETWORKS_INT32} scalar, begin_mask. If the ith bit
- * of begin_mask is set, begin[i] is ignored and the fullest possible
- * range in that dimension is used instead.
- * * 5: An {@link ANEURALNETWORKS_INT32} scalar, end_mask. If the ith bit of
- * end_mask is set, end[i] is ignored and the fullest possible range in
- * that dimension is used instead.
- * * 6: An {@link ANEURALNETWORKS_INT32} scalar, shrink_axis_mask. An int32
- * mask. If the ith bit of shrink_axis_mask is set, it implies that the
- * ith specification shrinks the dimensionality by 1. A slice of size 1
- * starting from begin[i] in the dimension must be preserved.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_STRIDED_SLICE = 35,
-
- /**
- * Element-wise subtraction of two tensors.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible
- * dimensions. The output is the result of subtracting the second input
- * tensor from the first one, optionally modified by an activation function.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Example:
- * input1.dimension = {4, 1, 2}
- * input2.dimension = {5, 4, 3, 1}
- * output.dimension = {5, 4, 3, 2}
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
- * {@link FuseCode} values. Specifies the activation to
- * invoke on the result.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_SUB = 36,
-
- /**
- * Transposes the input tensor, permuting the dimensions according to the
- * perm tensor.
- *
- * The returned tensor's dimension i corresponds to the input dimension
- * perm[i]. If perm is not given, it is set to (n-1...0), where n is the
- * rank of the input tensor. Hence by default, this operation performs a
- * regular matrix transpose on 2-D input Tensors.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the tensor to be transposed.
- * * 1: An optional 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32},
- * the permutation of the dimensions of the input tensor.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_TRANSPOSE = 37,
-} OperationCode;
-
-/**
- * Fused activation function types.
- *
- */
-typedef enum {
- /** NO fused activation function. */
- ANEURALNETWORKS_FUSED_NONE = 0,
- /** Fused ReLU activation function. */
- ANEURALNETWORKS_FUSED_RELU = 1,
- /** Fused ReLU1 activation function. */
- ANEURALNETWORKS_FUSED_RELU1 = 2,
- /** Fused ReLU6 activation function. */
- ANEURALNETWORKS_FUSED_RELU6 = 3,
-} FuseCode;
-
-/**
- * Implicit padding algorithms.
- *
- */
-typedef enum {
- /**
- * SAME padding.
- * Padding on both ends are the "same":
- * padding_to_beginning = total_padding / 2
- * padding_to_end = (total_padding + 1)/2.
- * i.e., for even number of padding, padding to both ends are exactly
- * the same; for odd number of padding, padding to the ending is bigger
- * than the padding to the beginning by 1.
- *
- * total_padding is a function of input, stride and filter size.
- * It could be computed as follows:
- * out_size = (input + stride - 1) / stride;
- * needed_input = (out_size - 1) * stride + filter_size
- * total_padding = max(0, needed_input - output_size)
- * The computation is the same for the horizontal and vertical directions.
- */
- ANEURALNETWORKS_PADDING_SAME = 1,
-
- /**
- * VALID padding.
- * No padding. When the input size is not evenly divisible by
- * the filter size, the input at the end that could not fill
- * the whole filter tile will simply be ignored.
- */
- ANEURALNETWORKS_PADDING_VALID = 2,
-} PaddingCode;
-
-/**
- * Execution preferences.
- */
-typedef enum {
- /**
- * Prefer executing in a way that minimizes battery drain.
- * This is desirable for compilations that will be executed often.
- */
- ANEURALNETWORKS_PREFER_LOW_POWER = 0,
- /**
- * Prefer returning a single answer as fast as possible, even if this causes
- * more power consumption.
- */
- ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER = 1,
- /**
- * Prefer maximizing the throughput of successive frames, for example when
- * processing successive frames coming from the camera.
- */
- ANEURALNETWORKS_PREFER_SUSTAINED_SPEED = 2,
-} PreferenceCode;
-
-/**
- * Result codes.
- */
-typedef enum {
- ANEURALNETWORKS_NO_ERROR = 0,
- ANEURALNETWORKS_OUT_OF_MEMORY = 1,
- ANEURALNETWORKS_INCOMPLETE = 2,
- ANEURALNETWORKS_UNEXPECTED_NULL = 3,
- ANEURALNETWORKS_BAD_DATA = 4,
- ANEURALNETWORKS_OP_FAILED = 5,
- ANEURALNETWORKS_BAD_STATE = 6,
- ANEURALNETWORKS_UNMAPPABLE = 7,
-} ResultCode;
-
-/**
- * For {@link ANeuralNetworksModel_setOperandValue}, values with a
- * length smaller or equal to this will be immediately copied into
- * the model. The size is in bytes.
- */
-enum {
- ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES = 128
-};
-
-/**
- * ANeuralNetworksMemory is an opaque type that represents memory.
- *
- * This type is used to represent shared memory, memory mapped files,
- * and similar memories.
- *
- * By using shared memory, a program can efficiently communicate to the
- * runtime and drivers the tensors that define a model. See
- * {@link ANeuralNetworksModel_setOperandValueFromMemory}. An application
- * should typically create one shared memory object that contains every tensor
- * needed to define a model. {@link ANeuralNetworksMemory_createFromFd} can be
- * used to create shared memory from a file handle.
- *
- * Memory objects can also be used to specify the input and output arguments of
- * an execution. See {@link ANeuralNetworksExecution_setInputFromMemory}
- * and {@link ANeuralNetworksExecution_setOutputFromMemory}.
- */
-typedef struct ANeuralNetworksMemory ANeuralNetworksMemory;
-
-/**
- * ANeuralNetworksModel is an opaque type that contains a description of the
- * mathematical operations that constitute the model.
- *
- * <p>Build the model by calling<ul>
- * <li>{@link ANeuralNetworksModel_create}</li>
- * <li>{@link ANeuralNetworksModel_addOperation}</li>
- * <li>{@link ANeuralNetworksModel_addOperand}</li>
- * </ul>
- *
- * This forms a graph in which each operation and operand is a node, a
- * directed edge from an operand to an operation indicates that the
- * operand is an input to the operation, and a directed edge from an
- * operation to an operand indicates that the operand is an output
- * from the operation. This graph must be acyclic.
- *
- * A model is completed by calling {@link ANeuralNetworksModel_finish}.
- * A model is destroyed by calling {@link ANeuralNetworksModel_free}.
- *
- * <p>A model cannot be modified once {@link ANeuralNetworksModel_finish}
- * has been called on it.</p>
- *
- * <p>It is the application's responsibility to make sure that only one thread
- * modifies a model at a given time. It is however safe for more than one
- * thread to use the model once {@link ANeuralNetworksModel_finish} has returned.</p>
- *
- * <p>It is also the application's responsibility to ensure that there are no other
- * uses of the model after calling {@link ANeuralNetworksModel_free}.
- * This includes any compilation or execution object created using the model.</p>
- */
-typedef struct ANeuralNetworksModel ANeuralNetworksModel;
-
-/**
- * ANeuralNetworksCompilation is an opaque type that can be used to compile
- * a machine learning model.
- *
- * <p>To use:<ul>
- * <li>Create a new compilation instance by calling the
- * {@link ANeuralNetworksCompilation_create} function.</li>
- * <li>Set any desired properties on the compilation (for example,
- * {@link ANeuralNetworksCompilation_setPreference}).</li>
- * <li>Complete the compilation with {@link ANeuralNetworksCompilation_finish}.</li>
- * <li>Use the compilation as many times as needed
- * with {@link ANeuralNetworksExecution_create}.</li>
- * <li>Destroy the compilation with {@link ANeuralNetworksCompilation_free}
- * once all executions using the compilation have completed.</li></ul></p>
- *
- * A compilation is completed by calling {@link ANeuralNetworksCompilation_finish}.
- * A compilation is destroyed by calling {@link ANeuralNetworksCompilation_free}.
- *
- * <p>A compilation cannot be modified once {@link ANeuralNetworksCompilation_finish}
- * has been called on it.</p>
- *
- * <p>It is the application's responsibility to make sure that only
- * one thread modifies a compilation at a given time. It is however
- * safe for more than one thread to use the compilation once
- * {@link ANeuralNetworksCompilation_finish} has returned.</p>
- *
- * <p>It is also the application's responsibility to ensure that there are no other
- * uses of the compilation after calling {@link ANeuralNetworksCompilation_free}.
- * This includes any execution object created using the compilation.</p>
- */
-typedef struct ANeuralNetworksCompilation ANeuralNetworksCompilation;
-
-/**
- * ANeuralNetworksExecution is an opaque type that can be used to apply a machine
- * learning model to a set of inputs.
- *
- * <p>To use:<ul>
- * <li>Create a new execution instance by calling the
- * {@link ANeuralNetworksExecution_create} function.</li>
- * <li>Associate input buffers or memory regions to the model inputs with
- * {@link ANeuralNetworksExecution_setInput} or
- * {@link ANeuralNetworksExecution_setInputFromMemory}.</li>
- * <li>Associate output buffers or memory regions to the model outputs with
- * {@link ANeuralNetworksExecution_setOutput} or
- * {@link ANeuralNetworksExecution_setOutputFromMemory}.</li>
- * <li>Apply the model with {@link ANeuralNetworksExecution_startCompute}.</li>
- * <li>Wait for the execution to complete with {@link
- * ANeuralNetworksEvent_wait}.</li>
- * <li>Destroy the execution with
- * {@link ANeuralNetworksExecution_free}.</li></ul></p>
- *
- * <p>An output buffer or memory region must not overlap with any
- * other output buffer or memory region, with an input buffer or
- * memory region, or with an operand value in a memory object
- * ({@link ANeuralNetworksModel_setOperandValueFromMemory}).</p>
- *
- * <p>An execution cannot be modified once {@link ANeuralNetworksExecution_startCompute}
- * has been called on it.</p>
- *
- * <p>An execution can be applied to a model with
- * {@link ANeuralNetworksExecution_startCompute} only once. Create new executions
- * to do new evaluations of the model.</p>
- *
- * <p>It is the application's responsibility to make sure that only one thread
- * modifies an execution at a given time. It is however safe for more than one
- * thread to use {@link ANeuralNetworksEvent_wait} at the same time.</p>
- *
- * <p>It is also the application's responsibility to ensure that there are no other
- * uses of the execution after calling {@link ANeuralNetworksExecution_free}.</p>
- */
-typedef struct ANeuralNetworksExecution ANeuralNetworksExecution;
-
-/**
- * ANeuralNetworksOperandType describes the type of an operand.
- * This structure is used to describe both scalars and tensors.
- *
- * A tensor operand type must have a specified rank (number of
- * dimensions) but may have any of its dimensions unspecified.
- *
- * A tensor operand type with all dimensions specified is "fully
- * specified". Whenever possible (i.e., whenever the dimensions are
- * known at model construction time), a tensor operand type should be
- * (but is not required to be) fully specified, in order to enable the
- * best possible performance.
- *
- * If a tensor operand's type is not fully specified, the dimensions
- * of the operand are deduced from the operand types and values of the
- * operation for which that operand is an output.
- *
- * <p>In the following situations, a tensor operand type must be fully
- * specified:<ul>
- * <li>The operand has a constant value, set by
- * {@link ANeuralNetworksModel_setOperandValue} (with a
- * non-nullptr buffer) or
- * {@link ANeuralNetworksModel_setOperandValueFromMemory}.</li>
- * <li>The operand is a model input or model output (see
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}). A
- * fully specified tensor operand type must either be provided
- * to {@link ANeuralNetworksModel_addOperand}; or it must be
- * provided to the corresponding
- * {@link ANeuralNetworksExecution_setInput},
- * {@link ANeuralNetworksExecution_setInputFromMemory},
- * {@link ANeuralNetworksExecution_setOutput}, or
- * {@link ANeuralNetworksModel_setOperandValueFromMemory}.
- * EXCEPTION: If the input or output is optional and omitted
- * (by passing nullptr for buffer to
- * {@link ANeuralNetworksExecution_setInput} or
- * {@link ANeuralNetworksExecution_setOutput}) then it need
- * not have a fully specified tensor operand type.</li></ul>
- *
- * A tensor operand type with some number of unspecified dimensions is
- * represented by setting each unspecified dimension to 0.
- */
-typedef struct ANeuralNetworksOperandType {
- /** The data type, e.g ANEURALNETWORKS_INT8. */
- int32_t type;
- /** The number of dimensions (rank). It should be 0 for scalars. */
- uint32_t dimensionCount;
- /** The dimensions of the tensor. It should be nullptr for scalars. */
- const uint32_t* dimensions;
- /** These two fields are only used for quantized tensors.
- * They should be zero for scalars and non-fixed point tensors.
- * The dequantized value of each entry is (value - zeroPoint) * scale.
- */
- float scale;
- int32_t zeroPoint;
-} ANeuralNetworksOperandType;
-
-typedef int32_t ANeuralNetworksOperationType;
-
-/**
- * ANeuralNetworksEvent is an opaque type that represents an event
- * that will be signaled once an execution completes.
- */
-typedef struct ANeuralNetworksEvent ANeuralNetworksEvent;
-
-
-/**
- * Creates a shared memory object from a file descriptor.
- *
- * The shared memory is backed by a file descriptor via mmap.
- * See {@link ANeuralNetworksMemory} for a description on how to use
- * this shared memory.
- *
- * @param size The requested size in bytes.
- * Must not be larger than the file size.
- * @param prot The desired memory protection for the mapping.
- * It is either PROT_NONE or the bitwise OR of one or
- * more of the following flags: PROT_READ, PROT_WRITE.
- * @param fd The requested file descriptor.
- * The file descriptor has to be mmap-able. The file
- * descriptor will be duplicated.
- * @param offset The offset to the beginning of the file of the area to map.
- * The offset has to be aligned to a page size.
- * @param memory The memory object to be created.
- * Set to NULL if unsuccessful.
- *
- * @return ANEURALNETWORKS_NO_ERROR if the request completed normally.
- */
-int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t offset,
- ANeuralNetworksMemory** memory);
-
-/**
- * Delete a memory object.
- *
- * Destroys the object used by the run time to keep track of the memory.
- * This will free the underlying actual memory if no other code has open
- * handles to this memory.
- *
- * @param memory The memory object to be freed.
- */
-void ANeuralNetworksMemory_free(ANeuralNetworksMemory* memory);
-
-/**
- * Create an empty {@link ANeuralNetworksModel}.
- *
- * <p>This only creates the object. Computation is performed once
- * {@link ANeuralNetworksExecution_startCompute} is invoked.
- *
- * The model should be constructed with calls to
- * {@link ANeuralNetworksModel_addOperation} and
- * {@link ANeuralNetworksModel_addOperand}
- *
- * <p>{@link ANeuralNetworksModel_finish} should be called once the model
- * has been fully constructed.</p>
- *
- * <p>{@link ANeuralNetworksModel_free} should be called once the model
- * is no longer needed.</p>
- *
- * @param model The {@link ANeuralNetworksModel} to be created.
- * Set to NULL if unsuccessful.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_create(ANeuralNetworksModel** model);
-
-/**
- * Destroy a model.
- *
- * The model need not have been finished by a call to
- * {@link ANeuralNetworksModel_finish}.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @param model The model to be destroyed. Passing NULL is acceptable and
- * results in no operation.
- */
-void ANeuralNetworksModel_free(ANeuralNetworksModel* model);
-
-/**
- * Indicate that we have finished modifying a model. Required before
- * calling {@link ANeuralNetworksCompilation_create}.
- *
- * An application is responsible to make sure that no other thread uses
- * the model at the same time.
- *
- * This function must only be called once for a given model.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @param model The model to be finished.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_finish(ANeuralNetworksModel* model);
-
-/**
- * Add an operand to a model.
- *
- * The order in which the operands are added is important. The first one added
- * to a model will have the index value 0, the second 1, etc. These indexes are
- * used as operand identifiers in
- * {@link ANeuralNetworksModel_addOperation},
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs},
- * {@link ANeuralNetworksModel_setOperandValue},
- * {@link ANeuralNetworksModel_setOperandValueFromMemory},
- * {@link ANeuralNetworksExecution_setInput},
- * {@link ANeuralNetworksExecution_setInputFromMemory},
- * {@link ANeuralNetworksExecution_setOutput},
- * {@link ANeuralNetworksExecution_setOutputFromMemory} and
- * {@link ANeuralNetworksExecution_setOperandValue}.
- *
- * <p>Every operand must be referenced in exactly one of the following
- * ways:<ul>
- * <li>It is identified as a model input with
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}.</li>
- * <li>It is identified as a constant with
- * {@link ANeuralNetworksModel_setOperandValue} or
- * {@link ANeuralNetworksModel_setOperandValueFromMemory}.</li>
- * <li>It is identified as an output of exactly one operation with
- * {@link ANeuralNetworksModel_addOperation}.</li></p>
- * <p>An operand that is identified as a model input or as a constant
- * must not also be identified as a model output with
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}.</p>
- *
- * To build a model that can accommodate inputs of various sizes, as
- * you may want to do for a CNN, leave unspecified the dimensions that
- * will vary at run time. If you do so, fully specify dimensions
- * when calling {@link ANeuralNetworksExecution_setInput} or
- * {@link ANeuralNetworksExecution_setInputFromMemory}.
- *
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @param model The model to be modified.
- * @param type The {@link ANeuralNetworksOperandType} that describes the shape
- * of the operand. Neither the {@link ANeuralNetworksOperandType}
- * nor the dimensions it points to need to outlive the call to
- * {@link ANeuralNetworksModel_addOperand}.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_addOperand(ANeuralNetworksModel* model,
- const ANeuralNetworksOperandType* type);
-
-/**
- * Sets an operand to a constant value.
- *
- * Values of length smaller or equal to
- * {@link ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES}
- * are immediately copied into the model.
- *
- * For values of length greater than {@link ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES},
- * a pointer to the buffer is stored within the model. The application is responsible
- * for not changing the content of this region until all executions using this model
- * have completed. As the data may be copied during processing, modifying the data
- * after this call yields undefined results.
- *
- * For large tensors, using {@link ANeuralNetworksModel_setOperandValueFromMemory}
- * is likely to be more efficient.
- *
- * To indicate that an optional operand should be considered missing,
- * pass nullptr for buffer and 0 for length.
- *
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @param model The model to be modified.
- * @param index The index of the model operand we're setting.
- * @param buffer A pointer to the data to use.
- * @param length The size in bytes of the data value.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel* model, int32_t index,
- const void* buffer, size_t length);
-
-/**
- * Sets an operand to a value stored in a memory object.
- *
- * The content of the memory is not copied. A reference to that memory is stored
- * inside the model. The application is responsible for not changing the content
- * of the memory region until all executions using this model have completed.
- * As the data may be copied during processing, modifying the data after this call
- * yields undefined results.
- *
- * To indicate that an optional operand should be considered missing,
- * use {@link ANeuralNetworksModel_setOperandValue} instead, passing nullptr for buffer.
- *
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @param model The model to be modified.
- * @param index The index of the model operand we're setting.
- * @param buffer A pointer to the data to use.
- * @param memory The memory containing the data.
- * @param offset This specifies the location of the data within the memory.
- * The offset is in bytes from the start of memory.
- * @param length The size in bytes of the data value.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel* model, int32_t index,
- const ANeuralNetworksMemory* memory,
- size_t offset, size_t length);
-
-/**
- * Add an operation to a model.
- *
- * @param model The model to be modified.
- * @param type The {@link ANeuralNetworksOperationType} of the operation.
- * @param inputCount The number of entries in the inputs array.
- * @param inputs An array of indexes identifying each operand.
- * @param outputCount The number of entries in the outputs array.
- * @param outputs An array of indexes identifying each operand.
- *
- * The operands specified by inputs and outputs must have been
- * previously added by calls to {@link ANeuralNetworksModel_addOperand}.
- *
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_addOperation(ANeuralNetworksModel* model,
- ANeuralNetworksOperationType type, uint32_t inputCount,
- const uint32_t* inputs, uint32_t outputCount,
- const uint32_t* outputs);
-
-/**
- * Specifies which operands will be the model's inputs and
- * outputs. Every model must have at least one input and one output.
- *
- * An operand cannot be used for both input and output. Doing so will
- * return an error.
- *
- * @param model The model to be modified.
- * @param inputCount The number of entries in the inputs array.
- * @param inputs An array of indexes identifying the input operands.
- * @param outputCount The number of entries in the outputs array.
- * @param outputs An array of indexes identifying the output operands.
- *
- * The operands specified by inputs and outputs must have been
- * previously added by calls to {@link ANeuralNetworksModel_addOperand}.
- *
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- */
-int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel* model, uint32_t inputCount,
- const uint32_t* inputs, uint32_t outputCount,
- const uint32_t* outputs);
-
-/**
- * Specifies whether {@link ANEURALNETWORKS_TENSOR_FLOAT32} is allowed to be
- * calculated with range and/or precision as low as that of the IEEE 754 16-bit
- * floating-point format. By default, {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * must be calculated using at least the range and precision of the IEEE 754
- * 32-bit floating-point format.
- *
- * @param model The model to be modified.
- * @param allow 'true' indicates {@link ANEURALNETWORKS_TENSOR_FLOAT32} may be
- * calculated with range and/or precision as low as that of the
- * IEEE 754 16-bit floating point format. 'false' indicates
- * {@link ANEURALNETWORKS_TENSOR_FLOAT32} must be calculated using
- * at least the range and precision of the IEEE 754 32-bit floating
- * point format.
- *
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.
- *
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- */
-int ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel* model, bool allow);
-
-/**
- * Create a {@link ANeuralNetworksCompilation} to compile the given model.
- *
- * <p>This only creates the object. Compilation is only performed once
- * {@link ANeuralNetworksCompilation_finish} is invoked.</p>
- *
- * <p>{@link ANeuralNetworksCompilation_finish} should be called once
- * all desired properties have been set on the compilation.</p>
- *
- * <p>{@link ANeuralNetworksModel_free} should be called once the compilation
- * is no longer needed.</p>
- *
- * <p>The provided model must outlive the compilation.</p>
- *
- * The model must already have been finished by a call to
- * {@link ANeuralNetworksModel_finish}.
- *
- * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
- *
- * @param model The {@link ANeuralNetworksModel} to be compiled.
- * @param compilation The newly created object or NULL if unsuccessful.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
- * if the model is invalid.
- */
-int ANeuralNetworksCompilation_create(ANeuralNetworksModel* model,
- ANeuralNetworksCompilation** compilation);
-
-/**
- * Destroy a compilation.
- *
- * The compilation need not have been finished by a call to
- * {@link ANeuralNetworksModel_finish}.
- *
- * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
- *
- * @param compilation The compilation to be destroyed. Passing NULL is acceptable and
- * results in no operation.
- */
-void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation* compilation);
-
-/**
- * Sets the execution preference.
- *
- * <p>Provides guidance to the runtime when trade-offs are possible.</p>
- *
- * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
- *
- * @param compilation The compilation to be modified.
- * @param preference Either {@link PREFER_LOW_POWER},
- * {@link PREFER_SINGLE_FAST_ANSWER}, or
- * {@link PREFER_SUSTAINED_SPEED}.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation* compilation,
- int32_t preference);
-
-/**
- * Indicate that we have finished modifying a compilation. Required before
- * calling {@link ANeuralNetworksExecution_create}.
- *
- * An application is responsible to make sure that no other thread uses
- * the compilation at the same time.
- *
- * This function must only be called once for a given compilation.
- *
- * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
- *
- * @param compilation The compilation to be finished.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation* compilation);
-
-/**
- * Create a {@link ANeuralNetworksExecution} to apply the given compilation.
- * This only creates the object. Computation is only performed once
- * {@link ANeuralNetworksExecution_startCompute} is invoked.
- *
- * <p>The provided compilation must outlive the execution.</p>
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param compilation The {@link ANeuralNetworksCompilation} to be evaluated.
- * @param execution The newly created object or NULL if unsuccessful.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
- * if the compilation is invalid.
- */
-int ANeuralNetworksExecution_create(ANeuralNetworksCompilation* compilation,
- ANeuralNetworksExecution** execution);
-
-/**
- * Destroy an execution.
- *
- * <p>If called on an execution for which
- * {@link ANeuralNetworksExecution_startCompute} has been called, the
- * function will return immediately but will mark the execution to be deleted
- * once the computation completes. The related {@link ANeuralNetworksEvent}
- * will be signaled and the {@link ANeuralNetworksEvent_wait} will return
- * ANEURALNETWORKS_ERROR_DELETED.
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param execution The execution to be destroyed. Passing NULL is acceptable and
- * results in no operation.
- */
-void ANeuralNetworksExecution_free(ANeuralNetworksExecution* execution);
-
-/**
- * Associate a user buffer with an input of the model of the
- * {@link ANeuralNetworksExecution}.
- *
- * <p>The provided buffer must outlive the execution.</p>
- *
- * If the input is optional, you can indicate that it is omitted by
- * passing nullptr for buffer and 0 for length.
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param execution The execution to be modified.
- * @param index The index of the input argument we are setting. It is
- * an index into the lists passed to
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
- * the index associated with
- * {@link ANeuralNetworksModel_addOperand}.
- * @param type The {@link ANeuralNetworksOperandType} of the
- * operand. Unless the input is omitted, this should be
- * used to specify the dimensions that were left
- * unspecified when the operand was added to the
- * model. All other properties of the type must be the
- * same as specified in the model. If the type is the same
- * as specified when the model was built, NULL can be
- * passed. Neither the {@link ANeuralNetworksOperandType}
- * nor the dimensions it points to need to outlive the call
- * to {@link ANeuralNetworksExecution_setInput}.
- * @param buffer The buffer containing the data.
- * @param length The length in bytes of the buffer.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
- * name is not recognized or the buffer is too small for the input.
- */
-int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution* execution, int32_t index,
- const ANeuralNetworksOperandType* type, const void* buffer,
- size_t length);
-
-/**
- * Associate part of a memory object with an input of the model of the
- * {@link ANeuralNetworksExecution}.
- *
- * <p>The provided memory must outlive the execution.</p>
- *
- * If the input is optional, you can indicate that it is omitted by
- * using {@link ANeuralNetworks_setInput} instead, passing nullptr for buffer
- * and 0 for length.
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param execution The execution to be modified.
- * @param index The index of the input argument we are setting. It is
- * an index into the lists passed to
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
- * the index associated with {@link ANeuralNetworksModel_addOperand}.
- * @param type The {@link ANeuralNetworksOperandType} of the
- * operand. This should be used to specify the dimensions
- * that were left unspecified when the operand was added
- * to the model. All other properties of the type must be
- * the same as specified in the model. If the type is the
- * same as specified when the model was built, NULL can be
- * passed. Neither the {@link ANeuralNetworksOperandType}
- * nor the dimensions it points to need to outlive the call
- * to {@link ANeuralNetworksExecution_setInputFromMemory}.
- * @param memory The memory containing the data.
- * @param offset This specifies the location of the data within the memory.
- * The offset is in bytes from the start of memory.
- * @param length The size in bytes of the data value.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
- * name is not recognized or the buffer is too small for the input.
- */
-int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
- const ANeuralNetworksOperandType* type,
- const ANeuralNetworksMemory* memory, size_t offset,
- size_t length);
-
-/**
- * Associate a user buffer with an output of the model of the
- * {@link ANeuralNetworksExecution}.
- *
- * If the output is optional, you can indicate that it is omitted by
- * passing nullptr for buffer and 0 for length.
- *
- * <p>The provided buffer must outlive the execution.</p>
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param execution The execution to be modified.
- * @param index The index of the output argument we are setting. It is
- * an index into the lists passed to
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
- * the index associated with {@link ANeuralNetworksModel_addOperand}.
- * @param type The {@link ANeuralNetworksOperandType} of the
- * operand. Unless the output is omitted, this should be
- * used to specify the dimensions that were left
- * unspecified when the operand was added to the
- * model. All other properties of the type must be the
- * same as specified in the model. If the type is the same
- * as specified when the model was built, NULL can be
- * passed. Neither the {@link ANeuralNetworksOperandType}
- * nor the dimensions it points to need to outlive the call
- * to {@link ANeuralNetworksExecution_setOutput}.
- * @param buffer The buffer where the data is to be written.
- * @param length The length in bytes of the buffer.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
- * name is not recognized or the buffer is too small for the output.
- */
-int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution* execution, int32_t index,
- const ANeuralNetworksOperandType* type, void* buffer,
- size_t length);
-
-/**
- * Associate part of a memory object with an output of the model of the
- * {@link ANeuralNetworksExecution}.
- *
- * If the output is optional, you can indicate that it is omitted by
- * using {@link ANeuralNetworks_setOutput} instead, passing nullptr for buffer
- * and 0 for length.
- *
- * <p>The provided memory must outlive the execution.</p>
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param execution The execution to be modified.
- * @param index The index of the output argument we are setting. It is
- * an index into the lists passed to
- * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
- * the index associated with {@link ANeuralNetworksModel_addOperand}.
- * @param type The {@link ANeuralNetworksOperandType} of the operand. This should be
- * used to specify the dimensions that were left
- * unspecified when the operand was added to the
- * model. All other properties of the type must be the
- * same as specified in the model. If the type is the same
- * as specified when the model was built, NULL can be
- * passed. Neither the {@link ANeuralNetworksOperandType}
- * nor the dimensions it points to need to outlive the call
- * to {@link ANeuralNetworksExecution_setOutputFromMemory}.
- * @param memory The memory where the data is to be stored.
- * @param offset This specifies the location of the data within the memory.
- * The offset is in bytes from the start of memory.
- * @param length The length in bytes of the data value.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
- * name is not recognized or the buffer is too small for the output.
- */
-int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
- const ANeuralNetworksOperandType* type,
- const ANeuralNetworksMemory* memory, size_t offset,
- size_t length);
-
-/**
- * Schedule evaluation of the execution.
- *
- * <p>Schedules evaluation of the execution. Once the model has been
- * applied and the outputs are ready to be consumed, the returned event will be
- * signaled. Use {@link ANeuralNetworksEvent_wait} to wait for that event.
- * </p>
- *
- * Multiple executions can be scheduled and evaluated concurrently. The
- * runtime makes no guarantee on the ordering of completion of
- * executions. If it's important to the application, the application
- * should enforce the ordering by using
- * {@link ANeuralNetworksEvent_wait}.
- *
- * ANeuralNetworksEvent_wait must be called to recuperate the resources used
- * by the execution.
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @param execution The execution to be scheduled and executed.
- * @param event The event that will be signaled on completion. event is set to
- * NULL if there's an error.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution* execution,
- ANeuralNetworksEvent** event);
-
-/**
- * Waits until the execution completes.
- *
- * More than one thread can wait on an event. When the execution completes,
- * all threads will be released.
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- *
- * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
- */
-int ANeuralNetworksEvent_wait(ANeuralNetworksEvent* event);
-
-/**
- * Destroys the event.
- *
- * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
- */
-void ANeuralNetworksEvent_free(ANeuralNetworksEvent* event);
-
-__END_DECLS
-
-#endif // ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_H
-
-/** @} */
diff --git a/include/NeuralNetworksEx.h b/include/NeuralNetworksEx.h
deleted file mode 100644
index 727ca9484..000000000
--- a/include/NeuralNetworksEx.h
+++ /dev/null
@@ -1,670 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file NeuralNetworksEx.h
- * @brief This file contains ANeuralNetworksModel_addOperationEx function definition
- * @ingroup COM_AI_RUNTIME
- */
-#ifndef NN_RUNTIME_NEURAL_NETWORKS_EX_H
-#define NN_RUNTIME_NEURAL_NETWORKS_EX_H
-
-#include <sys/cdefs.h>
-
-__BEGIN_DECLS
-
-/**
- * @brief Extended operation types
- */
-typedef enum {
- /** extends operation. */
-
- /**
- * Casts a tensor/tensor-values to a new type
- *
- * The output value is calucated as:
- *
- * output = new_type(input)
- *
- * Ex:
- * X = {1.8,2.2}, dtype of X = float32
- * Y = Cast(X), dtype of Y = int32
- * then Y = {1,2}
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
- */
- ANEURALNETWORKS_CAST_EX = 50000,
-
- /**
- * Gathers values along an axis.
- *
- * Produces an output tensor with shape
- * input0.dimension[:axis] + indices.dimension + input0.dimension[axis + 1:]
- * where:
- * # Vector indices (output is rank(input0)).
- * output[a_0, ..., a_n, i, b_0, ..., b_n] =
- * input0[a_0, ..., a_n, indices[i], b_0, ..., b_n]
- *
- * # Higher rank indices (output is rank(input0) + rank(indices) - 1).
- * output[a_0, ..., a_n, i, ..., j, b_0, ... b_n] =
- * input0[a_0, ..., a_n, indices[i, ..., j], b_0, ..., b_n]
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: from 1
- *
- * Inputs:
- * * 0: An n-D tensor from which to gather values.
- * * 1: A k-D tensor {@link ANEURALNETWORKS_TENSOR_INT32} of indices.
- * The values must be in the bounds of the corresponding dimensions
- * of input0.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis.
- * Negative index is used to specify axis from the end
- * (e.g. -1 for the last axis). Must be in the range [-n, n).
- *
- * Outputs:
- * * 0: An (n + k - 1)-D tensor with the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_GATHER_EX = 50001, /**< Gather slices according to indexes and axis */
-
- /**
- * Finds values and indices of the k largest entries for the last dimension.
- *
- * Resulting values in each dimensions are sorted in descending order. If
- * two values are equal, the one with larger index appears first.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: from 1
- *
- * Inputs:
- * * 0: input, an n-D tensor specifying the input.
- * * 1: k, an {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
- * top elements to look for along the last dimension.
- *
- * Outputs:
- * * 0: An n-D tensor of the same type as the input, containing the k
- * largest elements along each last dimensional slice.
- * * 1: An n-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32}
- * containing the indices of values within the last dimension of input.
- */
- ANEURALNETWORKS_TOPK_V2_EX = 50002,
-
- /**
- * Computes the maximum of elements across dimensions of a tensor.
- *
- * Reduces the input tensor along the given dimensions to reduce.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
- * to reduce.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_TENSORFLOW_MAX_EX = 50003,
-
- /**
- * Splits a tensor along a given axis into num_splits subtensors.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: from 1
- *
- * Inputs:
- * * 0: An n-D tensor to split.
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis along
- * which to split.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar indicating the number of
- * splits along given axis. Must evenly divide axis size.
- *
- * Outputs:
- * * 0 ~ (num_splits - 1): Resulting subtensors.
- */
- ANEURALNETWORKS_SPLIT_EX = 50004, /**< Splits a tensor into sub tensors */
-
- /**
- * Computes element-wise reciprocal of square root of the input tensor.
- *
- * The output is calculated using this formula:
- *
- * output = 1/sqrt(input)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
- */
- ANEURALNETWORKS_RSQRT_EX = 50005,
-
- /**
- * Computes element-wise squared difference on the input tensors.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
- * The output is the result of squaring of difference given by subtracting the second input tensor
- * from the first one.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_SQUARED_DIFFERENCE_EX = 50006,
-
- /**
- * Computes numerical negative value element-wise on the input tensor.
- *
- * Given an input tensor of {@link OperandCode},
- * The output is the numerical negative value element-wise on the input tensor.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
- */
- ANEURALNETWORKS_NEG_EX = 50007,
-
- /**
- * Computes exponential value element-wise on the input tensor.
- *
- * Given an input tensor of {@link OperandCode},
- * The output is the exponential value element-wise on the input tensor.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the input.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
- */
- ANEURALNETWORKS_EXP_EX = 50008,
-
- /**
- * Computes the sum of elements across dimensions of a tensor.
- *
- * Reduces the input tensor along the given dimensions to reduce.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
- * to reduce.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_REDUCE_SUM_EX = 50009,
-
- /**
- * A transposed convolutional layer carries out a regular convolution
- * but reverts its spatial transformation.
- * Transpose convolution basically performs convolution with transposed weights.
- *
- * Supported tensor {@link OperandCode}:
- * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * {@link ANEURALNETWORKS_TENSOR_INT32}
- *
- * Supported tensor rank: only 4
- *
- * Inputs:
- * 0: An {@link ANEURALNETWORKS_INT32} 1-D four element tensor, specifying the output shape.
- * 1: A 4-D tensor, of shape [depth_out, filter_height, filter_width, depth_in],
- * specifying the filter.
- * 2: A 4-D tensor, of shape [batches, height, width, depth_in], specifying the input.
- * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding type.
- * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the ‘width’ dimension.
- * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
- * walking through input in the height dimension.
- *
- * Outputs:
- * 0: The output 4-D tensor, of shape [batches, out_height, out_width, depth_out].
- */
- ANEURALNETWORKS_TRANSPOSE_CONV_EX = 50010,
-
- /**
- * Computes element-wise truth value by comparing the two input tensors for equality.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
- * The output is the result of comparison of two input tensors.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- *
- * Outputs:
- * * 0: A boolean tensor indicating the truth value of (x == y)
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- */
- ANEURALNETWORKS_EQUAL_EX = 50011,
-
- /**
- * Computes element-wise absolute value of the input tensor.
- *
- * The output is calculated using this formula:
- *
- * output = fabs(input)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
- */
- ANEURALNETWORKS_ABS_EX = 50012,
- /**
- * Packs a list of rank-R tensors into one rank- (R+1) tensor along the axis dimension.
- *
- * The input tensors must have identical {@link OperandCode} and the same
- * dimensions.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 3
- *
- * Inputs:
- * * 0 ~ n-1: The list of n input tensors, of shape
- * [D0, D1, ..., Daxis(i), ..., Dm]. For inputs of
- * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, all input tensors
- * must have the same scale and zeroPoint.
- * * n: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
- * number of input tensors.
- * * n+1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
- * pack axis.
- *
- * Outputs:
- * * 0: The output, a tensor of the same {@link OperandCode} as the input
- * tensors. The output shape is [D0, D1, ..., N at Daxis(i), ..., Dm+1]
- * where N is the number of tensors to be packed.
- */
- ANEURALNETWORKS_PACK_EX = 50013,
- /**
- * Unpacks a given rank-R tensors into num_splits rank- (R-1) tensors along the axis dimention.
- * num_splits has to respect integral divisibility of dimention value along axis dimention
- * of the input.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: The input shape is [D0, D1, ..., N at Daxis(i), ..., Dm+1].
- * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
- * number of splits along unpack axis.
- * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
- * unpack axis.
- *
- * Outputs:
- * * 0 ~ n-1: The list of n output tensors, of shape
- * [D0, D1, ..., Daxis(i), ..., Dm]. The output tensors are of the same
- * {@link OperandCode} as the input tensor 0.
- */
- ANEURALNETWORKS_UNPACK_EX = 50014,
-
- /**
- * Find index with the largest value across axes of a input tensor.
- *
- * Reduces the input tensor along the given dimensions to reduce. The reduced
- * dimensions are retained with length 1.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the input.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
- * to reduce. Must be in the range [-rank(input_tensor), rank(input_tensor)).
- *
- * Outputs:
- * * 0: A output tensor of {@link ANEURALNETWORKS_TENSOR_INT32}.
- * The rank of output tensor should be same rank of input0.
- */
- ANEURALNETWORKS_ARGMAX_EX = 50015,
-
- /**
- * Element-wise square root computation of the input tensor.
- *
- * The output is calculated using this formula:
- *
- * output = sqrt(input)
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- *
- * Outputs:
- * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
- */
- ANEURALNETWORKS_SQRT_EX = 50016,
-
- /**
- * Computes element-wise truth value by comparing the input tensors for non-equality.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
- * The output is the result of comparison of two input tensors.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D tensor, specifying the first input.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- *
- * Outputs:
- * * 0: A boolean tensor indicating the truth value of non-equality of input tensors
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- */
- ANEURALNETWORKS_NOT_EQUAL_EX = 50017,
-
- /**
- * Computes element-wise truth value of the input tensor negation.
- *
- * Takes one input tensor.
- * The output is the negation, which is logical complement, of the input tensor.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D boolean tensor, specifying the input.
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True. A zero indicates otherwise.
- *
- * Outputs:
- * * 0: A boolean tensor of the same size as input indicating the truth value of (NOT x)
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True. A zero indicates otherwise.
- */
- ANEURALNETWORKS_LOGICAL_NOT_EX = 50018,
-
- /**
- * Computes element-wise truth value of two input tensors for LOGICAL AND.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
- * The output is the result of comparison of two input tensors.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D boolean tensor, specifying the first input.
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- *
- * Outputs:
- * * 0: A boolean tensor indicating the truth value of two input tensors for LOGICAL AND.
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- */
- ANEURALNETWORKS_LOGICAL_AND_EX = 50019,
-
- /**
- * Computes element-wise truth value of two input tensors for LOGICAL OR.
- *
- * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
- * The output is the result of comparison of two input tensors.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: An n-D boolean tensor, specifying the first input.
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0.
- *
- * Outputs:
- * * 0: A boolean tensor indicating the truth value of two input tensors for LOGICAL OR.
- * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
- * and scale 1.0f.
- * A non-zero byte represents True, a hit. A zero indicates otherwise.
- */
- ANEURALNETWORKS_LOGICAL_OR_EX = 50020,
-
- /**
- * Computes the minimum of elements across dimensions of a tensor.
- *
- * Reduces the input tensor along the given dimensions to reduce.
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_INT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
- * to reduce.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_REDUCE_MIN_EX = 500021,
-
- /**
- * Parametric Rectified Linear Unit.
- *
- * It follows: f(x) = alpha * x for x < 0, f(x) = x for x >= 0, where alpha
- * is a learned array with the same {@link OperandCode} and compatible
- * dimensions as input x.
- *
- * Two dimensions are compatible when:
- * 1. they are equal, or
- * 2. one of them is 1
- *
- * The size of the output is the maximum size along each dimension of the
- * input operands. It starts with the trailing dimensions, and works its way
- * forward.
- *
- * Example:
- * input.dimension = {4, 1, 2}
- * alpha.dimension = {5, 4, 3, 1}
- * output.dimension = {5, 4, 3, 2}
- *
- * Supported tensor {@link OperandCode}:
- * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
- * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
- *
- * Supported tensor rank: up to 4
- *
- * Inputs:
- * * 0: A tensor, specifying the input.
- * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
- * as input0, specifying the alpha.
- *
- * Outputs:
- * * 0: A tensor of the same {@link OperandCode} as input0.
- */
- ANEURALNETWORKS_PRELU_EX = 500022,
-} OperationCodeEx; // extends OperationCode
-
-typedef OperationCodeEx ANeuralNetworksOperationTypeEx;
-
-/**
- * @brief Add an extended operation to a model.
- *
- * @param[in] model The model to be modified.
- * @param[in] type The type of extended operation.
- * @param[in] inputCount The number of entries in the inputs array.
- * @param[in] inputs An array of indexes identifying each operand.
- * @param[in] outputCount The number of entries in the outputs array.
- * @param[in] outputs An array of indexes identifying each operand.
- *
- * @note The operands specified by inputs and outputs must have been
- * previously added by calls to {@link ANeuralNetworksModel_addOperand}.\n
- * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
- * called will return an error.\n
- * See {@link ANeuralNetworksModel} for information on multithreaded usage.
- *
- * @return ANEURALNETWORKS_NO_ERROR if successful.
- */
-int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
- ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
- const uint32_t *inputs, uint32_t outputCount,
- const uint32_t *outputs);
-
-__END_DECLS
-
-#endif // NN_RUNTIME_NEURAL_NETWORKS_EX_H
diff --git a/include/NeuralNetworksLoadHelpers.h b/include/NeuralNetworksLoadHelpers.h
deleted file mode 100644
index eeef2b5ec..000000000
--- a/include/NeuralNetworksLoadHelpers.h
+++ /dev/null
@@ -1,132 +0,0 @@
-/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- Copyright 2017 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from part of the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/nnapi/NeuralNetworksShim.h'
-
-/**
- * @file NeuralNetworksLoadHelpers.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains functions to load NN API runtime library
- */
-
-#ifndef __NEURAL_NETWORKS_LOAD_HELPER_H__
-#define __NEURAL_NETWORKS_LOAD_HELPER_H__
-
-#include <dlfcn.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-/**
- * @brief Print log data
- * @param[in] format Format string of @c printf
- * @param[in] args Argument after format string. (Same with @c printf)
- */
-#define NNAPI_LOG(format, ...) printf(format "\n", __VA_ARGS__);
-
-/**
- * @brief Create a function pointer named @c fn after loading NN API library
- * @param[in] name Name of a function
- */
-#define LOAD_FUNCTION(name) \
- static name##_fn fn = reinterpret_cast<name##_fn>(nnfw::loadFunction(#name));
-
-/**
- * @brief Run @c fn function. @c fn is created by @ref LOAD_FUNCTION
- * @param[in] args List of arguments for the function @c fn
- */
-#define EXECUTE_FUNCTION(...) \
- if (fn != nullptr) { \
- fn(__VA_ARGS__); \
- }
-
-/**
- * @brief Run @c fn function. @c fn is created by @ref LOAD_FUNCTION
- * @param[in] args List of arguments for the function @c fn
- * @return the return value of @c fn
- */
-#define EXECUTE_FUNCTION_RETURN(...) return fn != nullptr ? fn(__VA_ARGS__) : 0;
-
-namespace nnfw
-{
-
-/**
- * @brief Load NN API library
- * @param[in] name path of NN API library
- * @return a symbol table handle of NN API library
- */
-inline void* loadLibrary(const char* name) {
- // TODO: change RTLD_LOCAL? Assumes there can be multiple instances of nn
- // api RT
- void* handle = nullptr;
-#if 1 //#ifdef __ANDROID__
- handle = dlopen(name, RTLD_LAZY | RTLD_LOCAL);
- if (handle == nullptr) {
- NNAPI_LOG("nnapi error: unable to open library %s", name);
- }
-#endif
- return handle;
-}
-
-/**
- * @brief Load libneuralnetworks.so and return handle of library
- * @return a symbol table handle of NN API library
- */
-inline void* getLibraryHandle() {
- static void* handle = loadLibrary("libneuralnetworks.so");
- return handle;
-}
-
-/**
- * @brief Return function ptr in libneuralnetworks.so
- * @param[in] name Name of function
- * @return function pointer
- */
-inline void* loadFunction(const char* name) {
- void* fn = nullptr;
- if (getLibraryHandle() != nullptr) {
- fn = dlsym(getLibraryHandle(), name);
- }
- if (fn == nullptr) {
- NNAPI_LOG("nnapi error: unable to open function %s", name);
- abort();
- }
- else {
-#ifdef _GNU_SOURCE
- Dl_info info;
- dladdr(fn, &info);
- NNAPI_LOG("nnapi function '%s' is loaded from '%s' ", name, info.dli_fname);
-#endif // _GNU_SOURCE
- }
- return fn;
-}
-
-/**
- * @brief Check if libneuralnetworks.so can be loaded
- * @return @c true if loading is successful, otherwise @c false.
- */
-inline bool NNAPIExists() {
- static bool nnapi_is_available = getLibraryHandle();
- return nnapi_is_available;
-}
-
-} // namespace nnfw
-
-#endif // __NEURAL_NETWORKS_LOAD_HELPER_H__
diff --git a/infra/command/build-docker-image b/infra/command/build-docker-image
new file mode 100644
index 000000000..5b1dbb5e1
--- /dev/null
+++ b/infra/command/build-docker-image
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+function Usage()
+{
+ echo "Usage: $0 $(basename ${BASH_SOURCE[0]}) [OPTIONS]"
+ echo ""
+ echo "Options:"
+ echo " --tizen Build docker image for tizen build"
+ echo "Options can use as docker build option:"
+ docker build --help
+}
+
+DOCKER_FILE_RPATH="infra/docker/Dockerfile"
+DOCKER_BUILD_ARGS=()
+
+# Handle argument for this script
+# Set default docker image name, tag
+for i in "$@"
+do
+ case ${i} in
+ -h|--help|help)
+ Usage
+ exit 1
+ ;;
+ esac
+done
+
+DOCKER_BUILD_ARGS+="-t ${DOCKER_IMAGE_NAME:-nnas}"
+
+# Argument for docker build commands
+for i in "$@"
+do
+ case ${i} in
+ -h|--help|help)
+ # Already handled argument
+ ;;
+ *)
+ DOCKER_BUILD_ARGS+=(${i})
+ ;;
+ esac
+done
+
+docker build --build-arg http_proxy="${http_proxy}" \
+ --build-arg https_proxy="${https_proxy}" \
+ ${DOCKER_BUILD_ARGS[@]} \
+ - < ${NNAS_PROJECT_PATH}/${DOCKER_FILE_RPATH}
diff --git a/infra/command/format b/infra/command/format
new file mode 100644
index 000000000..1015c4473
--- /dev/null
+++ b/infra/command/format
@@ -0,0 +1,198 @@
+#!/bin/bash
+
+INVALID_EXIT=0
+FILES_TO_CHECK=()
+DIRECTORIES_NOT_TO_BE_TESTED=()
+
+function pushd () {
+ command pushd "$@" > /dev/null
+}
+
+function popd () {
+ command popd "$@" > /dev/null
+}
+
+function command_exists() {
+ command -v $1 > /dev/null 2>&1
+}
+
+function check_newline() {
+ # Check all files (CMakeLists.txt, *.cl, ... not only for C++, Python)
+ if [[ ${#FILES_TO_CHECK} -ne 0 ]]; then
+ CRCHECK=$(file ${FILES_TO_CHECK} | grep 'with CR')
+ fi
+ FILES_TO_FIX=($(echo "$CRCHECK" | grep "with CRLF line" | cut -d':' -f1))
+ for f in ${FILES_TO_FIX[@]}; do
+ tr -d '\r' < $f > $f.fixed && cat $f.fixed > $f && rm $f.fixed
+ done
+ FILES_TO_FIX=($(echo "${CRCHECK}" | grep "with CR line" | cut -d':' -f1))
+ for f in ${FILES_TO_FIX[@]}; do
+ tr '\r' '\n' < $f > $f.fixed && cat $f.fixed > $f && rm $f.fixed
+ done
+}
+
+function check_permission() {
+ # Check all files except script
+ FILES_TO_CHECK_PERMISSION=()
+ for f in ${FILES_TO_CHECK[@]}; do
+ # Manually ignore permission checking
+ if [[ ${f} == !(nnas|nnfw|nncc|*.sh|*.py) ]]; then
+ FILES_TO_CHECK_PERMISSION+=("${f}")
+ fi
+ done
+
+ if [[ ${#FILES_TO_CHECK_PERMISSION} -eq 0 ]]; then
+ return
+ fi
+ for FILE_TO_CHECK in ${FILES_TO_CHECK_PERMISSION[@]}; do
+ RESULT=$(stat -c '%A' ${FILE_TO_CHECK} | grep 'x')
+ if [ "${RESULT}" != "" ]; then
+ chmod a-x ${FILE_TO_CHECK}
+ fi
+ done
+}
+
+function check_cpp_files() {
+ if [[ ${__Check_CPP} -eq 0 ]]; then
+ echo "[SKIPPED] C/C++ check is skipped"
+ return
+ fi
+
+ CLANG_FORMAT_CANDIDATES=()
+ CLANG_FORMAT_CANDIDATES+=("clang-format")
+ CLANG_FORMAT_CANDIDATES+=("clang-format-3.9")
+
+ for CLANG_FORMAT_CANDIDATE in ${CLANG_FORMAT_CANDIDATES[@]}; do
+ if command_exists ${CLANG_FORMAT_CANDIDATE} ; then
+ CLANG_FORMAT="${CLANG_FORMAT_CANDIDATE}"
+ fi
+ done
+
+ if [[ -z ${CLANG_FORMAT} ]]; then
+ echo "[ERROR] clang-format is unavailable"
+ echo
+ echo "Please install clang-format before running format check"
+ exit 1
+ fi
+
+ # Check c++ files
+ FILES_TO_CHECK_CPP=()
+ for f in ${FILES_TO_CHECK[@]}; do
+ # Manually ignore style checking
+ if [[ ${f} == +(*/NeuralNetworks.h|*/NeuralNetworksExtensions.h) ]]; then
+ continue
+ fi
+
+ # File extension to check
+ if [[ ${f} == +(*.h|*.hpp|*.cpp|*.cc|*.c|*.cl) ]]; then
+ FILES_TO_CHECK_CPP+=("${f}")
+ fi
+ done
+
+ # Skip by '.FORMATDENY' file
+ for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
+ FILES_TO_CHECK_CPP=(${FILES_TO_CHECK_CPP[*]/$s*/})
+ done
+
+ if [[ ${#FILES_TO_CHECK_CPP} -ne 0 ]]; then
+ clang-format-3.9 -i ${FILES_TO_CHECK_CPP[@]}
+ EXIT_CODE=$?
+ if [[ ${EXIT_CODE} -ne 0 ]]; then
+ INVALID_EXIT=${EXIT_CODE}
+ fi
+ fi
+}
+
+function check_python_files() {
+ if [[ ${__Check_PYTHON} -eq 0 ]]; then
+ echo "[SKIPPED] Python check is skipped"
+ return
+ fi
+
+ if ! command_exists yapf; then
+ echo "[ERROR] yapf is unavailable"
+ echo " Please install yapf."
+ exit 1
+ fi
+
+ # Check python files
+ FILES_TO_CHECK_PYTHON=()
+ for f in ${FILES_TO_CHECK[@]}; do
+ # File extension to check
+ if [[ ${f} == *.py ]]; then
+ FILES_TO_CHECK_PYTHON+=("${f}")
+ fi
+ done
+ for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
+ skip=${s#'.'/}/
+ FILES_TO_CHECK_PYTHON=(${FILES_TO_CHECK_PYTHON[*]/$skip*/})
+ done
+
+ if [[ ${#FILES_TO_CHECK_PYTHON} -ne 0 ]]; then
+ yapf -i --style='{based_on_style: pep8, column_limit: 90}' ${FILES_TO_CHECK_PYTHON[@]}
+ EXIT_CODE=$?
+ if [[ ${EXIT_CODE} -ne 0 ]]; then
+ INVALID_EXIT=${EXIT_CODE}
+ fi
+ fi
+}
+
+pushd ${NNAS_PROJECT_PATH}
+
+if [ -n "$(git diff)" ]; then
+ echo "[WARNING] Commit all the changes before running format check"
+ echo " format.patch file will contain unstaged files"
+fi
+
+__Check_CPP=${CHECK_CPP:-"1"}
+__Check_PYTHON=${CHECK_PYTHON:-"1"}
+
+FILES_TO_CHECK=$(git ls-files -co --exclude-standard)
+if [[ "${CHECK_DIFF_ONLY}" = "1" ]]; then
+ MASTER_EXIST=$(git rev-parse --verify master)
+ CURRENT_BRANCH=$(git branch | grep \* | cut -d ' ' -f2-)
+ DIFF_COMMITS=`git log --graph --oneline master..HEAD | wc -l`
+ if [[ -z "${MASTER_EXIST}" ]]; then
+ echo "Cannot found local master branch"
+ elif [[ "${CURRENT_BRANCH}" = "master" ]]; then
+ echo "Current branch is master"
+ else
+ FILES_TO_CHECK=$(git diff --name-only --diff-filter=d HEAD~${DIFF_COMMITS}..HEAD)
+ fi
+fi
+
+for DIR_NOT_TO_BE_TESTED in $(git ls-files -co --exclude-standard '*/.FORMATDENY'); do
+ DIRECTORIES_NOT_TO_BE_TESTED+=($(dirname "${DIR_NOT_TO_BE_TESTED}"))
+done
+
+check_newline
+check_permission
+check_cpp_files
+check_python_files
+
+DIFF=$(git diff | tee format.patch)
+
+popd
+
+if [[ -z "${CRCHECK}" ]] && [[ ! -n "${DIFF}" ]] && [[ ${INVALID_EXIT} -eq 0 ]]; then
+ echo "[PASSED] Format checker succeed."
+ return
+fi
+
+# Something went wrong
+
+if [[ ! -z "${CRCHECK}" ]]; then
+ echo "[FAILED] Please use LF for newline for following files."
+ echo "${CRCHECK}"
+fi
+
+if [[ ${PATCHFILE_SIZE} -ne 0 ]]; then
+ echo "[FAILED] Format checker failed and update code to follow convention."
+ echo " You can find changes in format.patch"
+fi
+
+if [[ ${INVALID_EXIT} -ne 0 ]]; then
+ echo "[[FAILED] Invalid format checker exit."
+fi
+
+exit 1
diff --git a/infra/command/gen-coverage-report b/infra/command/gen-coverage-report
new file mode 100644
index 000000000..5f928eca5
--- /dev/null
+++ b/infra/command/gen-coverage-report
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+import "build.configuration"
+
+BUILD_WORKSPACE_PATH="${NNAS_PROJECT_PATH}/${BUILD_WORKSPACE_RPATH}"
+
+LCOV_PATH=$(command -v lcov)
+GENHTML_PATH=$(command -v genhtml)
+
+if [[ -z "${LCOV_PATH}" ]]; then
+ echo "ERROR: 'lcov' is not found"
+ exit 255
+fi
+
+if [[ -z "${GENHTML_PATH}" ]]; then
+ echo "ERROR: 'genhtml' is not found"
+ exit 255
+fi
+
+if [[ -z "${GCOV_PATH}" ]]; then
+ GCOV_PATH=$(command -v gcov)
+ if [[ -z "${GCOV_PATH}" ]]; then
+ echo "ERROR: 'gcov' is not found"
+ exit 255
+ fi
+fi
+
+OUTPUT_TAG="${NNAS_COVERAGE:-coverage}"
+OUTPUT_PATH="${NNAS_COVERAGE_PATH:-${NNAS_PROJECT_PATH}/${OUTPUT_TAG}}"
+
+if [[ -e "${OUTPUT_PATH}" ]]; then
+ echo "ERROR: '${OUTPUT_PATH}' already exists"
+ exit 255
+fi
+
+mkdir -p "${OUTPUT_PATH}"
+
+RAW_BASE_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.base.raw.info"
+RAW_TEST_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.test.raw.info"
+RAW_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.raw.info"
+EXTRACTED_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.extracted.info"
+EXCLUDED_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.excluded.info"
+COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.info"
+HTML_PATH="${OUTPUT_PATH}/html"
+
+CANDIDATES=()
+
+for CANDIDATE in "$@";
+do
+ CANDIDATES+=("${NNAS_PROJECT_PATH}/${CANDIDATE}/*")
+done
+
+# Capture initial zero coverage data
+"${LCOV_PATH}" -c -i -d "${BUILD_WORKSPACE_PATH}" --gcov-tool ${GCOV_PATH} -o "${RAW_BASE_COVERAGE_INFO_PATH}"
+
+# Capture tests coverage data
+"${LCOV_PATH}" -c -d "${BUILD_WORKSPACE_PATH}" --gcov-tool ${GCOV_PATH} -o "${RAW_TEST_COVERAGE_INFO_PATH}"
+
+# Append zero coverage data and tests coverage data
+"${LCOV_PATH}" -o "${RAW_COVERAGE_INFO_PATH}" \
+ -a "${RAW_BASE_COVERAGE_INFO_PATH}" \
+ -a "${RAW_TEST_COVERAGE_INFO_PATH}"
+
+# Extract data for particular pathes
+"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${EXTRACTED_COVERAGE_INFO_PATH}" \
+ "${CANDIDATES[@]}"
+
+# Exclude *.test.cpp files from coverage report
+"${LCOV_PATH}" -r "${EXTRACTED_COVERAGE_INFO_PATH}" -o "${EXCLUDED_COVERAGE_INFO_PATH}" \
+ '*.test.cpp'
+
+# Final coverage data
+cp -v ${EXCLUDED_COVERAGE_INFO_PATH} ${COVERAGE_INFO_PATH}
+
+# Gen html
+"${GENHTML_PATH}" "${EXCLUDED_COVERAGE_INFO_PATH}" \
+ --prefix "${NNAS_PROJECT_PATH}" \
+ --output-directory "${HTML_PATH}"
diff --git a/infra/command/install-githooks b/infra/command/install-githooks
new file mode 100644
index 000000000..e624aa6d7
--- /dev/null
+++ b/infra/command/install-githooks
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+REPO_HOOKS_PATH=$NNAS_PROJECT_PATH/infra/git-hooks
+GIT_HOOKS_PATH=$NNAS_PROJECT_PATH/.git/hooks
+
+# Create symbolic links to hooks dir
+if [ -e $GIT_HOOKS_PATH/pre-push ]; then
+ echo "Backup old $GIT_HOOKS_PATH/pre-push to $GIT_HOOKS_PATH/pre-push~"
+ mv -v $GIT_HOOKS_PATH/pre-push $GIT_HOOKS_PATH/pre-push~
+elif [ -h $GIT_HOOKS_PATH/pre-push ]; then
+ ls -l $GIT_HOOKS_PATH/pre-push
+ echo "Remove broken symlink $GIT_HOOKS_PATH/pre-push"
+ rm -v $GIT_HOOKS_PATH/pre-push
+fi
+ln -sv $REPO_HOOKS_PATH/pre-push.sh $GIT_HOOKS_PATH/pre-push
diff --git a/infra/command/pylint b/infra/command/pylint
new file mode 100644
index 000000000..d20f89169
--- /dev/null
+++ b/infra/command/pylint
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+INVALID_EXIT=0
+
+__Check_PYLINT=${CHECK_PYLINT:-"1"}
+
+DIRECTORIES_NOT_TO_BE_TESTED=()
+
+for DIR_NOT_TO_BE_TESTED in $(find -name '.FORMATDENY' -exec dirname {} \;); do
+ DIRECTORIES_NOT_TO_BE_TESTED+=("$DIR_NOT_TO_BE_TESTED")
+done
+
+PYTHON_FILES_TO_CHECK=$(git ls-files '*.py')
+ARR=($PYTHON_FILES_TO_CHECK)
+for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
+ skip=${s#'.'/}/
+ ARR=(${ARR[*]//$skip*/})
+done
+PYTHON_FILES_TO_CHECK=${ARR[*]}
+if [[ ${#PYTHON_FILES_TO_CHECK} -ne 0 ]]; then
+ pylint --disable=all --enable="E0110, E0203, W1401, W1402, W0221, W0199, W0111, E0237, E1111, E1128, E0701, E0703, E1300, W1302, W1300, W1501, E0012, E0111, W0211, E1310, E1003, W0702, W0711, W1502, E0712, W0640, W0124, E0116, W0102, W1505, W0402, E0108, E0241, W0705, W0109, W0123, W0122, W0106, W1305, E1303, E0102, W0604, W0603, W0602, W0601, W0406, E0240, E0239, E0100, E0604, W1307, E0303, E1139, E1126, E1127, E0238, E0236, E0113, E1130, W1202, E1201, W1201, E1206, E1205, E1200, W0150, E0202, E0704, W0410, W1303, W1306, E1304, E1125, E1302, W0223, E0213, W0233, E0107, E0115, E0117, E1134, E1133, E0103, E0711, W0104, W0105, E0702, E0710, W0623, W0622, E1124, W0404, E0402, E1132, E0101, E0104, W0222, E0114, W0231, E0001, E1306, E1305, E1121, E0112, E1301, E0632, E0603, E0602, W0108, W0107, W0101, E0011, E1137, E1131, E1138, E1135, W1304, W1301, E0601, W0120, E1700" $PYTHON_FILES_TO_CHECK
+ EXIT_CODE=$?
+ if [[ $EXIT_CODE -ne 0 ]]; then
+ INVALID_EXIT=$EXIT_CODE
+ fi
+fi
+
+if [[ $INVALID_EXIT -eq 0 ]]; then
+ echo "[PASSED] Format checker succeed."
+ return
+fi
+exit 1
diff --git a/infra/config/build.configuration b/infra/config/build.configuration
new file mode 100644
index 000000000..10e41879b
--- /dev/null
+++ b/infra/config/build.configuration
@@ -0,0 +1 @@
+BUILD_WORKSPACE_RPATH=${NNAS_WORKSPACE:-build}
diff --git a/infra/docker/Dockerfile b/infra/docker/Dockerfile
new file mode 100644
index 000000000..2ca3b44c2
--- /dev/null
+++ b/infra/docker/Dockerfile
@@ -0,0 +1,65 @@
+FROM ubuntu:16.04
+
+ARG UBUNTU_MIRROR
+ENV http_proxy $http_proxy
+ENV https_proxy $https_proxy
+
+RUN if [ -n "$http_proxy" ] ; then echo "Acquire::http::proxy \"${http_proxy}\";" >> /etc/apt/apt.conf ; fi
+RUN if [ -n "$https_proxy" ] ; then echo "Acquire::https::proxy \"${https_proxy}\";" >> /etc/apt/apt.conf ; fi
+RUN if [ -n "$UBUNTU_MIRROR" ] ; then sed "s/archive.ubuntu.com/${UBUNTU_MIRROR}/g" -i /etc/apt/sources.list ; fi
+
+# Install 'add-apt-repository'
+RUN apt-get update && apt-get -qqy install software-properties-common
+
+# Build tool
+RUN apt-get update && apt-get -qqy install build-essential cmake scons git lcov
+
+# Install extra dependencies (Caffe, nnkit)
+RUN apt-get update && apt-get -qqy install libboost-all-dev libgflags-dev libgoogle-glog-dev libatlas-base-dev libhdf5-dev
+
+# Install protocol buffer
+RUN apt-get update && apt-get -qqy install libprotobuf-dev protobuf-compiler
+
+# Additonal tools
+RUN apt-get update && apt-get -qqy install doxygen graphviz wget unzip clang-format-3.9 python3 python3-pip hdf5-tools
+RUN pip3 install yapf==0.22.0 numpy
+
+# Install google test (source)
+RUN apt-get update && apt-get -qqy install libgtest-dev
+
+###
+### NOTE: Don't add new package install using apt-get or pip below this line
+###
+
+# Install native build tool gcc version 6.x
+RUN add-apt-repository ppa:ubuntu-toolchain-r/test && apt-get update && apt-get -qqy install gcc-6 g++-6
+RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-6 60 --slave /usr/bin/g++ g++ /usr/bin/g++-6 && update-alternatives --config gcc
+
+# Install cross build tool gcc version 6.x
+RUN wget https://releases.linaro.org/components/toolchain/binaries/6.3-2017.02/arm-linux-gnueabihf/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf.tar.xz -O gcc-hardfp.tar.xz -nv
+RUN wget https://releases.linaro.org/components/toolchain/binaries/6.2-2016.11/arm-linux-gnueabi/gcc-linaro-6.2.1-2016.11-x86_64_arm-linux-gnueabi.tar.xz -O gcc-softfp.tar.xz -nv
+RUN tar -xf gcc-hardfp.tar.xz -C /opt/ && rm -rf gcc-hardfp.tar.xz
+RUN tar -xf gcc-softfp.tar.xz -C /opt/ && rm -rf gcc-softfp.tar.xz
+ENV PATH "/opt/gcc-linaro-6.2.1-2016.11-x86_64_arm-linux-gnueabi/bin:/opt/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf/bin:$PATH"
+
+###
+### NOTE: Don't add build & install process using installed buildtool above this line
+###
+
+# Build and install google test static libraries
+WORKDIR /root/gtest
+RUN cmake /usr/src/gtest
+RUN make
+RUN mv *.a /usr/lib
+WORKDIR /root
+RUN rm -rf gtest
+
+# Install gbs & sdb
+RUN echo 'deb [trusted=yes] http://download.tizen.org/tools/latest-release/Ubuntu_16.04/ /' | cat >> /etc/apt/sources.list
+RUN apt-get update && apt-get -qqy install gbs
+RUN wget http://download.tizen.org/sdk/tizenstudio/official/binary/sdb_3.1.4_ubuntu-64.zip -O sdb.zip
+RUN unzip -d tmp sdb.zip && rm sdb.zip
+RUN cp tmp/data/tools/sdb /usr/bin/. && rm -rf tmp
+
+# Clean archives (to reduce image size)
+RUN apt-get clean -y
diff --git a/infra/git-hooks/pre-push.sh b/infra/git-hooks/pre-push.sh
new file mode 100644
index 000000000..ce751333a
--- /dev/null
+++ b/infra/git-hooks/pre-push.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+# An example hook script to verify what is about to be pushed. Called by "git
+# push" after it has checked the remote status, but before anything has been
+# pushed. If this script exits with a non-zero status nothing will be pushed.
+#
+# This hook is called with the following parameters:
+#
+# $1 -- Name of the remote to which the push is being done
+# $2 -- URL to which the push is being done
+#
+# If pushing without using a named remote those arguments will be equal.
+#
+# Information about the commits which are being pushed is supplied as lines to
+# the standard input in the form:
+#
+# <local ref> <local sha1> <remote ref> <remote sha1>
+#
+# This sample shows how to prevent push of commits where the log message starts
+# with "WIP" (work in progress).
+
+remote="$1"
+url="$2"
+
+# RUN FORMAT CHECKER
+
+REPO_PATH=$(git rev-parse --show-toplevel)
+cd $REPO_PATH
+
+CHECK_DIFF_ONLY=1 ./nnas format
+
+exit $?
diff --git a/infra/nncc/3rdparty/.gitignore b/infra/nncc/3rdparty/.gitignore
new file mode 100644
index 000000000..c3d773e35
--- /dev/null
+++ b/infra/nncc/3rdparty/.gitignore
@@ -0,0 +1 @@
+URL.local
diff --git a/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default b/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default
new file mode 100644
index 000000000..1fff1b4f3
--- /dev/null
+++ b/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default
@@ -0,0 +1 @@
+https://bitbucket.org/eigen/eigen/get/fd6845384b86.tar.gz
diff --git a/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info b/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info
new file mode 100644
index 000000000..8e7a3c2f0
--- /dev/null
+++ b/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info
@@ -0,0 +1,3 @@
+This URL originates from TensorFlow 1.12
+
+Please check https://github.com/tensorflow/tensorflow/blob/v1.12.0/tensorflow/workspace.bzl#L121
diff --git a/infra/nncc/CMakeLists.txt b/infra/nncc/CMakeLists.txt
new file mode 100644
index 000000000..12b840636
--- /dev/null
+++ b/infra/nncc/CMakeLists.txt
@@ -0,0 +1,144 @@
+cmake_minimum_required(VERSION 3.1)
+
+project(nncc)
+
+enable_testing()
+
+set(CMAKE_CXX_STANDARD 11)
+
+# This feature works with CMake 3.5.2 or later. However, using previous versions does not produce
+# an error. We are still officially using CMake 3.1.0, but put this code for the sake of semantic
+# support in various development tools.
+# Todo: Someday, CMake needs to be updated to 3.7.2 or later to take advantage of improvements
+# such as `cmake-server`.
+set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
+
+set(NNCC_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../.." CACHE
+ INTERNAL "Where to find nncc top-level source directory"
+)
+
+set(NNCC_EXTERNALS_DIR
+ "${NNCC_PROJECT_SOURCE_DIR}/externals" CACHE
+ INTERNAL "Where to download external dependencies"
+)
+set(NNCC_OVERLAY_DIR "${CMAKE_BINARY_DIR}/overlay" CACHE
+ INTERNAL "Where locally built external dependencies are installed")
+
+# This allows find_package to access configurations installed inside overlay
+list(APPEND CMAKE_PREFIX_PATH "${NNCC_OVERLAY_DIR}")
+
+macro(nncc_include PREFIX)
+ include("${NNCC_PROJECT_SOURCE_DIR}/infra/nncc/cmake/modules/${PREFIX}.cmake")
+endmacro(nncc_include)
+
+macro(nncc_find_package PREFIX)
+ find_package(${PREFIX} CONFIG NO_DEFAULT_PATH
+ PATHS ${NNCC_PROJECT_SOURCE_DIR}/infra/nncc/cmake/packages
+ ${ARGN}
+ )
+endmacro(nncc_find_package)
+
+# nncc_find_resource(NAME) will update the following variables
+#
+# NAME_FOUND
+# NAME_DIR
+#
+# TODO Explain how to add a resource in README.md
+function(nncc_find_resource NAME)
+ set(RESOURCE_DIR "${NNCC_PROJECT_SOURCE_DIR}/res/${NAME}")
+
+ if(NOT IS_DIRECTORY ${RESOURCE_DIR})
+ set(${NAME}_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT IS_DIRECTORY ${RESOURCE_DIR})
+
+ set(${NAME}_DIR ${RESOURCE_DIR} PARENT_SCOPE)
+ set(${NAME}_FOUND TRUE PARENT_SCOPE)
+endfunction(nncc_find_resource)
+
+###
+### CMake configuration
+###
+if(NOT CMAKE_BUILD_TYPE)
+ set(CMAKE_BUILD_TYPE "Debug" CACHE STRING "Type of build" FORCE)
+endif(NOT CMAKE_BUILD_TYPE)
+message(STATUS "Use '${CMAKE_BUILD_TYPE}' configuration")
+
+# Prefer -pthread to -lpthread for find_package(Threads ...)
+#
+# std::thread code compiled only with -lpthread emits the following runtime error (on GCC 4.8.4)
+#
+# terminate called after throwing an instance of 'std::system_error'
+# what(): Enable multithreading to use std::thread: Operation not permitted
+#
+set(THREADS_PREFER_PTHREAD_FLAG TRUE)
+
+###
+### Configuration
+###
+option(DOWNLOAD_PROTOBUF "Download Protocol Buffer source" ON)
+option(BUILD_PROTOBUF "Locally build Protocol Buffer from the downloaded source" ON)
+option(DOWNLOAD_EIGEN "Download Eigen source" ON)
+option(DOWNLOAD_FARMHASH "Download farmhash source" ON)
+option(DOWNLOAD_GEMMLOWP "Download GEMM low precesion library source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
+option(DOWNLOAD_GFLAGS "Download GFlags source" OFF)
+option(DOWNLOAD_FLATBUFFERS "Download FlatBuffers source" ON)
+option(BUILD_FLATBUFFERS "Locally build Flatbuffers from the downloaded source" ON)
+option(DOWNLOAD_TENSORFLOW "Download TensorFlow source" ON)
+option(DOWNLOAD_CAFFE "Download Caffe source" ON)
+option(DOWNLOAD_PYTORCH "Download Pytorch source" ON)
+option(DOWNLOAD_ONNX "Download ONNX source" ON)
+option(DOWNLOAD_ABSEIL "Download Abseil-cpp source" ON)
+
+option(DOWNLOAD_GTEST "Download Google Test source" ON)
+option(BUILD_GTEST "Build Google Test from the downloaded source" ON)
+
+nncc_find_package(GTest QUIET)
+
+option(ENABLE_TEST "Build Tests using Google Test" ${GTest_FOUND})
+
+if(${ENABLE_TEST} AND NOT ${GTest_FOUND})
+ message(FATAL_ERROR "Google Test is required to enable test")
+endif(${ENABLE_TEST} AND NOT ${GTest_FOUND})
+
+option(ENABLE_COVERAGE "Build for coverage test" OFF)
+if(${ENABLE_COVERAGE} AND NOT ${ENABLE_TEST})
+ message(FATAL_ERROR "Test should be enabled to measure test coverage")
+endif(${ENABLE_COVERAGE} AND NOT ${ENABLE_TEST})
+
+if(${ENABLE_TEST})
+ include(CTest)
+endif(${ENABLE_TEST})
+
+option(ENABLE_CONTRIB_BUILD "Build incubating projects under contrib/" ON)
+option(ENABLE_STRICT_BUILD "Treat warning as error" OFF)
+
+###
+### Target
+###
+add_library(nncc_common INTERFACE)
+if(ENABLE_STRICT_BUILD)
+ # TODO Remove -Wno-reoder
+ target_compile_options(nncc_common INTERFACE -Werror -Wall -Wextra -Wno-reorder)
+endif(ENABLE_STRICT_BUILD)
+
+add_library(nncc_coverage INTERFACE)
+if(ENABLE_COVERAGE)
+ target_compile_options(nncc_coverage INTERFACE -g -O0 -fprofile-arcs -ftest-coverage)
+ target_link_libraries(nncc_coverage INTERFACE gcov)
+endif(ENABLE_COVERAGE)
+
+###
+### Function
+###
+# TODO Remove this nncc_include
+nncc_include(OptionalTargetTools)
+nncc_include(add_subdirectories)
+
+###
+### Components
+###
+if(ENABLE_CONTRIB_BUILD)
+ add_subdirectory("${NNCC_PROJECT_SOURCE_DIR}/compiler" "${CMAKE_BINARY_DIR}/compiler")
+endif(ENABLE_CONTRIB_BUILD)
diff --git a/infra/nncc/cmake/modules/Asserts.cmake b/infra/nncc/cmake/modules/Asserts.cmake
new file mode 100644
index 000000000..e40097e31
--- /dev/null
+++ b/infra/nncc/cmake/modules/Asserts.cmake
@@ -0,0 +1,8 @@
+# AssertTarget(t) stops the build if target "t" does not exist.
+function(AssertTarget TGT)
+ if(TARGET ${TGT})
+ return()
+ endif(TARGET ${TGT})
+
+ message(FATAL_ERROR "${TGT} target does not exist")
+endfunction(AssertTarget TGT)
diff --git a/infra/nncc/cmake/modules/ExternalProjectTools.cmake b/infra/nncc/cmake/modules/ExternalProjectTools.cmake
new file mode 100644
index 000000000..11446c051
--- /dev/null
+++ b/infra/nncc/cmake/modules/ExternalProjectTools.cmake
@@ -0,0 +1,3 @@
+macro(add_extdirectory DIR TAG)
+ add_subdirectory(${DIR} "${CMAKE_BINARY_DIR}/externals/${TAG}")
+endmacro(add_extdirectory)
diff --git a/infra/nncc/cmake/modules/ExternalSourceTools.cmake b/infra/nncc/cmake/modules/ExternalSourceTools.cmake
new file mode 100644
index 000000000..3baaeba8e
--- /dev/null
+++ b/infra/nncc/cmake/modules/ExternalSourceTools.cmake
@@ -0,0 +1,97 @@
+#
+# ExternalSource_Download(VAR ...)
+#
+function(ExternalSource_Download PREFIX)
+ include(CMakeParseArguments)
+ nncc_include(StampTools)
+
+ cmake_parse_arguments(ARG "" "DIRNAME;URL;CHECKSUM" "" ${ARGN})
+
+ # Configure URL
+ if(ARG_URL)
+ set(URL ${ARG_URL})
+ else()
+ # Use the first unparsed argument as URL (for backward compatibility)
+ list(GET ARG_UNPARSED_ARGUMENTS 0 URL)
+ endif(ARG_URL)
+
+ # Configure DIRNAME
+ if(NOT ARG_DIRNAME)
+ # Use PREFIX as DIRNAME (for backward compatibility)
+ set(DIRNAME ${PREFIX})
+ else()
+ set(DIRNAME ${ARG_DIRNAME})
+ endif(NOT ARG_DIRNAME)
+
+ get_filename_component(FILENAME ${URL} NAME)
+
+ set(CACHE_DIR "${NNCC_EXTERNALS_DIR}")
+ set(OUT_DIR "${CACHE_DIR}/${DIRNAME}")
+ set(TMP_DIR "${CACHE_DIR}/${DIRNAME}-tmp")
+
+ set(DOWNLOAD_PATH "${CACHE_DIR}/${DIRNAME}-${FILENAME}")
+ set(STAMP_PATH "${CACHE_DIR}/${DIRNAME}.stamp")
+
+ if(NOT EXISTS "${CACHE_DIR}")
+ file(MAKE_DIRECTORY "${CACHE_DIR}")
+ endif(NOT EXISTS "${CACHE_DIR}")
+
+ # Compare URL in STAMP file and the given URL
+ Stamp_Check(URL_CHECK "${STAMP_PATH}" "${URL}")
+
+ if(NOT URL_CHECK)
+ file(REMOVE "${STAMP_PATH}")
+ file(REMOVE_RECURSE "${OUT_DIR}")
+ file(REMOVE_RECURSE "${TMP_DIR}")
+
+ file(MAKE_DIRECTORY "${TMP_DIR}")
+
+ message("-- Download ${PREFIX} from ${URL}")
+ file(DOWNLOAD ${URL} "${DOWNLOAD_PATH}" SHOW_PROGRESS)
+ message("-- Download ${PREFIX} from ${URL} - done")
+
+ # Verify checksum
+ if(ARG_CHECKSUM)
+ message(STATUS "Verify ${PREFIX} archive")
+ string(REPLACE "=" ";" CHECKSUM_SPEC "${ARG_CHECKSUM}")
+
+ list(GET CHECKSUM_SPEC 0 CHECKSUM_ALG)
+ list(GET CHECKSUM_SPEC 1 CHECKSUM_VAL)
+ string(STRIP "${CHECKSUM_VAL}" CHECKSUM_VAL)
+
+ set(EXPECTED_CHECKSUM ${CHECKSUM_VAL})
+ file(${CHECKSUM_ALG} "${DOWNLOAD_PATH}" OBTAINED_CHECKSUM)
+
+ if(NOT (EXPECTED_CHECKSUM STREQUAL OBTAINED_CHECKSUM))
+ message(STATUS "CHECKSUM MISMATCH")
+ message(STATUS " expected: ${EXPECTED_CHECKSUM}")
+ message(STATUS " obtained: ${OBTAINED_CHECKSUM}")
+ message(FATAL_ERROR "Verify ${PREFIX} archive - fail")
+ endif(NOT (EXPECTED_CHECKSUM STREQUAL OBTAINED_CHECKSUM))
+
+ message(STATUS "Verify ${PREFIX} archive - done")
+ endif(ARG_CHECKSUM)
+
+ message("-- Extract ${PREFIX}")
+ execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz "${DOWNLOAD_PATH}"
+ WORKING_DIRECTORY "${TMP_DIR}")
+ file(REMOVE "${DOWNLOAD_PATH}")
+ message("-- Extract ${PREFIX} - done")
+
+ message("-- Cleanup ${PREFIX}")
+ file(GLOB contents "${TMP_DIR}/*")
+ list(LENGTH contents n)
+ if(NOT n EQUAL 1 OR NOT IS_DIRECTORY "${contents}")
+ set(contents "${TMP_DIR}")
+ endif()
+
+ get_filename_component(contents ${contents} ABSOLUTE)
+
+ file(RENAME ${contents} "${OUT_DIR}")
+ file(REMOVE_RECURSE "${TMP_DIR}")
+ file(WRITE "${STAMP_PATH}" "${URL}")
+ message("-- Cleanup ${PREFIX} - done")
+ endif()
+
+ set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
+endfunction(ExternalSource_Download)
diff --git a/infra/nncc/cmake/modules/ListFile.cmake b/infra/nncc/cmake/modules/ListFile.cmake
new file mode 100644
index 000000000..aee0d162a
--- /dev/null
+++ b/infra/nncc/cmake/modules/ListFile.cmake
@@ -0,0 +1,12 @@
+# Read a file and create a list variable
+#
+# HOW TO USE
+#
+# ListFile_Read("A.txt" A_LIST)
+#
+function(ListFile_Read FILENAME VARNAME)
+ file(READ ${FILENAME} content)
+ # Reference: http://public.kitware.com/pipermail/cmake/2007-May/014236.html
+ STRING(REGEX REPLACE "\n" ";" content "${content}")
+ set(${VARNAME} ${content} PARENT_SCOPE)
+endfunction(ListFile_Read)
diff --git a/infra/nncc/cmake/modules/OptionTools.cmake b/infra/nncc/cmake/modules/OptionTools.cmake
new file mode 100644
index 000000000..0ca50f7c3
--- /dev/null
+++ b/infra/nncc/cmake/modules/OptionTools.cmake
@@ -0,0 +1,9 @@
+function(envoption PREFIX DEFAULT_VALUE)
+ set(VALUE ${DEFAULT_VALUE})
+
+ if(DEFINED ENV{${PREFIX}})
+ set(VALUE $ENV{${PREFIX}})
+ endif()
+
+ set(${PREFIX} ${VALUE} PARENT_SCOPE)
+endfunction(envoption)
diff --git a/infra/nncc/cmake/modules/OptionalTargetTools.cmake b/infra/nncc/cmake/modules/OptionalTargetTools.cmake
new file mode 100644
index 000000000..8bf2c37ef
--- /dev/null
+++ b/infra/nncc/cmake/modules/OptionalTargetTools.cmake
@@ -0,0 +1,5 @@
+macro(optional_target_link_libraries NAME)
+ if(TARGET ${NAME})
+ target_link_libraries(${NAME} ${ARGN})
+ endif(TARGET ${NAME})
+endmacro(optional_target_link_libraries)
diff --git a/infra/nncc/cmake/modules/StampTools.cmake b/infra/nncc/cmake/modules/StampTools.cmake
new file mode 100644
index 000000000..d38e033ff
--- /dev/null
+++ b/infra/nncc/cmake/modules/StampTools.cmake
@@ -0,0 +1,18 @@
+# Stamp_Check(VARNAME PATH CONTENT)
+# Stamp_Check sets VARNAME as TRUE if a file exists at "PATH", and its content is same as "CONTENT"
+# Stamp_Check sets VARNAME as FALSE otherwise
+function(Stamp_Check VARNAME PATH EXPECTED_CONTENT)
+ if(NOT EXISTS "${PATH}")
+ set(${VARNAME} FALSE PARENT_SCOPE)
+ return()
+ endif(NOT EXISTS "${PATH}")
+
+ file(READ ${PATH} OBTAINED_CONTENT)
+
+ if(NOT EXPECTED_CONTENT STREQUAL OBTAINED_CONTENT)
+ set(${VARNAME} FALSE PARENT_SCOPE)
+ return()
+ endif(NOT EXPECTED_CONTENT STREQUAL OBTAINED_CONTENT)
+
+ set(${VARNAME} TRUE PARENT_SCOPE)
+endfunction(Stamp_Check)
diff --git a/infra/nncc/cmake/modules/TargetRequire.cmake b/infra/nncc/cmake/modules/TargetRequire.cmake
new file mode 100644
index 000000000..801600dd9
--- /dev/null
+++ b/infra/nncc/cmake/modules/TargetRequire.cmake
@@ -0,0 +1,45 @@
+# TargetRequire_Check(NAME t1 t2 t3 ...)
+#
+# TargetRequire_Check(NAME ...) sets "NAME" as TRUE if all the required targets are
+# available, and FALSE otherwise.
+function(TargetRequire_Check VARNAME)
+ set(${VARNAME} TRUE PARENT_SCOPE)
+ foreach(REQUIRED_TARGET IN ITEMS ${ARGN})
+ if(NOT TARGET ${REQUIRED_TARGET})
+ set(${VARNAME} FALSE PARENT_SCOPE)
+ return()
+ endif(NOT TARGET ${REQUIRED_TARGET})
+ endforeach(REQUIRED_TARGET)
+endfunction(TargetRequire_Check)
+
+# TargetRequire_Assert(t1 t2 t3 ...)
+#
+# TargetRequire_Assert(...) stops CMake immediately if there is a target required but unavailable.
+function(TargetRequire_Assert)
+ unset(MISSING_TARGETS)
+
+ foreach(REQUIRED_TARGET IN ITEMS ${ARGN})
+ if(NOT TARGET ${REQUIRED_TARGET})
+ list(APPEND MISSING_TARGETS ${REQUIRED_TARGET})
+ endif(NOT TARGET ${REQUIRED_TARGET})
+ endforeach(REQUIRED_TARGET)
+
+ list(LENGTH MISSING_TARGETS MISSING_COUNT)
+
+ if(NOT MISSING_COUNT EQUAL 0)
+ message(FATAL_ERROR "${MISSING_TARGETS} are required, but unavailable")
+ endif(NOT MISSING_COUNT EQUAL 0)
+endfunction(TargetRequire_Assert)
+
+# TargetRequire_Return(t1 t2 t3 ...)
+#
+# TargetRequire_Return(...) returns immediately if there is a target required but unavailable.
+#
+# NOTE "macro" is inevitable to make "return" inside affect the caller.
+macro(TargetRequire_Return)
+ foreach(REQUIRED_TARGET IN ITEMS ${ARGN})
+ if(NOT TARGET ${REQUIRED_TARGET})
+ return()
+ endif(NOT TARGET ${REQUIRED_TARGET})
+ endforeach(REQUIRED_TARGET)
+endmacro(TargetRequire_Return)
diff --git a/infra/nncc/cmake/modules/ThirdPartyTools.cmake b/infra/nncc/cmake/modules/ThirdPartyTools.cmake
new file mode 100644
index 000000000..8fbeacf6e
--- /dev/null
+++ b/infra/nncc/cmake/modules/ThirdPartyTools.cmake
@@ -0,0 +1,42 @@
+function(ThirdParty_URL VARNAME)
+ # PACKAGE (mandatory)
+ # VERSION (mandatory)
+ # ENV ... (optional, for backward compatibility)
+
+ include(CMakeParseArguments)
+
+ cmake_parse_arguments(ARG "" "PACKAGE;VERSION;ENV" "" ${ARGN})
+
+ if(NOT ARG_PACKAGE)
+ message(FATAL_ERROR "PACKAGE is missing")
+ endif(NOT ARG_PACKAGE)
+
+ if(NOT ARG_VERSION)
+ message(FATAL_ERROR "VERSION is missing")
+ endif(NOT ARG_VERSION)
+
+ set(PACKAGE_INFO_DIR "${NNCC_PROJECT_SOURCE_DIR}/infra/nncc/3rdparty/${ARG_PACKAGE}/${ARG_VERSION}")
+ set(PACKAGE_URL_FILE "${PACKAGE_INFO_DIR}/URL.default")
+ set(PACKAGE_URL_LOCAL_FILE "${PACKAGE_INFO_DIR}/URL.local")
+
+ if(NOT EXISTS "${PACKAGE_URL_FILE}")
+ message(FATAL_ERROR "URL file does not exist")
+ endif()
+
+ # Read URL from "[PACKAGE NAME]/[PACKAGE VERSION]/URL.default"
+ file(STRINGS "${PACKAGE_URL_FILE}" VALUE)
+
+ # Read URL from "[PACKAGE NAME]/[PACAKGE VERSION]/URL.local" (if it exists)
+ if(EXISTS "${PACKAGE_URL_LOCAL_FILE}")
+ file(STRINGS "${PACKAGE_URL_LOCAL_FILE}" VALUE)
+ endif()
+
+ # Read URL from process environment (if ENV option is specified)
+ if(ARG_ENV)
+ if(DEFINED ENV{${ARG_ENV}})
+ set(VALUE $ENV{${ARG_ENV}})
+ endif()
+ endif(ARG_ENV)
+
+ set("${VARNAME}" "${VALUE}" PARENT_SCOPE)
+endfunction(ThirdParty_URL)
diff --git a/cmake/modules/ExtendCMakeFunction.cmake b/infra/nncc/cmake/modules/add_subdirectories.cmake
index 06b7c768d..06b7c768d 100644
--- a/cmake/modules/ExtendCMakeFunction.cmake
+++ b/infra/nncc/cmake/modules/add_subdirectories.cmake
diff --git a/infra/nncc/cmake/packages/AbseilConfig.cmake b/infra/nncc/cmake/packages/AbseilConfig.cmake
new file mode 100644
index 000000000..4c731008a
--- /dev/null
+++ b/infra/nncc/cmake/packages/AbseilConfig.cmake
@@ -0,0 +1,37 @@
+function(_Abseil_import)
+ nncc_find_package(AbseilSource QUIET)
+
+ if(NOT AbseilSource_FOUND)
+ message("Abseil: NOT FOUND (Cannot access source)")
+ set(Abseil_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT AbseilSource_FOUND)
+
+ if(NOT TARGET abseil)
+ nncc_include(ExternalProjectTools)
+
+ # NOTE Turn off abseil testing
+ set(BUILD_TESTING OFF)
+ add_extdirectory("${AbseilSource_DIR}" ABSEIL)
+
+ add_library(abseil INTERFACE)
+ target_link_libraries(abseil INTERFACE
+ # From "Available Abseil CMake Public Targets" in CMake/README.md
+ absl::base
+ absl::algorithm
+ absl::container
+ absl::debugging
+ absl::memory
+ absl::meta
+ absl::numeric
+ absl::strings
+ absl::synchronization
+ absl::time
+ absl::utility
+ )
+ endif(NOT TARGET abseil)
+
+ set(Abseil_FOUND TRUE PARENT_SCOPE)
+endfunction(_Abseil_import)
+
+_Abseil_import()
diff --git a/infra/nncc/cmake/packages/AbseilSourceConfig.cmake b/infra/nncc/cmake/packages/AbseilSourceConfig.cmake
new file mode 100644
index 000000000..d980ac653
--- /dev/null
+++ b/infra/nncc/cmake/packages/AbseilSourceConfig.cmake
@@ -0,0 +1,24 @@
+function(_AbseilSource_import)
+ if(NOT DOWNLOAD_ABSEIL)
+ set(AbseilSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_ABSEIL)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 downloads abseil from the following URL
+ # - https://github.com/abseil/abseil-cpp/archive/48cd2c3f351ff188bc85684b84a91b6e6d17d896.tar.gz
+ #
+ # The last change of "48cd2c3f351" was commited on 2018.09.27
+ #
+ # Let's use the latest released version (2018-12 release)
+ envoption(ABSEIL_URL https://github.com/abseil/abseil-cpp/archive/20181200.tar.gz)
+
+ ExternalSource_Download(ABSEIL ${ABSEIL_URL})
+
+ set(AbseilSource_DIR ${ABSEIL_SOURCE_DIR} PARENT_SCOPE)
+ set(AbseilSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_AbseilSource_import)
+
+_AbseilSource_import()
diff --git a/infra/nncc/cmake/packages/Caffe/CMakeLists.txt b/infra/nncc/cmake/packages/Caffe/CMakeLists.txt
new file mode 100644
index 000000000..51b723edd
--- /dev/null
+++ b/infra/nncc/cmake/packages/Caffe/CMakeLists.txt
@@ -0,0 +1,15 @@
+file(GLOB CORE_SOURCES "${CaffeSource_DIR}/src/caffe/*.cpp" "${CaffeSource_DIR}/src/caffe/util/*.cpp")
+file(GLOB LAYER_SOURCES "${CaffeSource_DIR}/src/caffe/layers/*.cpp")
+
+add_library(caffe SHARED ${CORE_SOURCES} ${LAYER_SOURCES})
+target_compile_definitions(caffe PUBLIC CPU_ONLY)
+target_include_directories(caffe PUBLIC ${CaffeSource_DIR}/include)
+target_include_directories(caffe PRIVATE ${Boost_INCLUDE_DIRS})
+target_include_directories(caffe PRIVATE ${HDF5_INCLUDE_DIRS})
+target_include_directories(caffe PRIVATE ${Atlas_INCLUDE_DIRS})
+target_link_libraries(caffe caffeproto)
+target_link_libraries(caffe glog)
+target_link_libraries(caffe gflags)
+target_link_libraries(caffe ${Boost_LIBRARIES})
+target_link_libraries(caffe ${HDF5_LIBRARIES} ${HDF5_HL_LIBRARIES})
+target_link_libraries(caffe ${Atlas_LIBRARIES})
diff --git a/infra/nncc/cmake/packages/CaffeConfig.cmake b/infra/nncc/cmake/packages/CaffeConfig.cmake
new file mode 100644
index 000000000..7b5eb2f2e
--- /dev/null
+++ b/infra/nncc/cmake/packages/CaffeConfig.cmake
@@ -0,0 +1,62 @@
+function(_Caffe_import)
+ nncc_find_package(CaffeSource QUIET)
+
+ if(NOT CaffeSource_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT CaffeSource_FOUND)
+
+ nncc_find_package(CaffeProto QUIET)
+
+ if(NOT CaffeProto_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ find_package(Boost 1.54 COMPONENTS system thread filesystem QUIET)
+
+ if(NOT Boost_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ find_package(HDF5 COMPONENTS HL QUIET)
+
+ if(NOT HDF5_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ list(APPEND CMAKE_MODULE_PATH ${CaffeSource_DIR}/cmake/Modules)
+
+ find_package(Atlas QUIET)
+
+ if(NOT ATLAS_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ nncc_find_package(GLog QUIET)
+
+ if(NOT GLog_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ nncc_find_package(GFlags QUIET)
+
+ if(NOT GFlags_FOUND)
+ set(Caffe_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ if(NOT TARGET caffe)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Caffe" caffe)
+ message(STATUS "Found Caffe: TRUE")
+ endif(NOT TARGET caffe)
+
+ set(Caffe_FOUND TRUE PARENT_SCOPE)
+endfunction(_Caffe_import)
+
+_Caffe_import()
diff --git a/infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt b/infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt
new file mode 100644
index 000000000..f9f8724a0
--- /dev/null
+++ b/infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt
@@ -0,0 +1,6 @@
+Protobuf_Generate(CAFFE_PROTO "${CMAKE_CURRENT_BINARY_DIR}/generated/caffe" "${CaffeSource_DIR}/src" "caffe/proto/caffe.proto")
+
+add_library(caffeproto STATIC ${CAFFE_PROTO_SOURCES})
+set_target_properties(caffeproto PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(caffeproto PUBLIC ${CAFFE_PROTO_INCLUDE_DIRS})
+target_link_libraries(caffeproto libprotobuf)
diff --git a/infra/nncc/cmake/packages/CaffeProtoConfig.cmake b/infra/nncc/cmake/packages/CaffeProtoConfig.cmake
new file mode 100644
index 000000000..33c239509
--- /dev/null
+++ b/infra/nncc/cmake/packages/CaffeProtoConfig.cmake
@@ -0,0 +1,24 @@
+function(_CaffeProto_import)
+ nncc_find_package(CaffeSource QUIET)
+
+ if(NOT CaffeSource_FOUND)
+ set(CaffeProto_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT CaffeSource_FOUND)
+
+ nncc_find_package(Protobuf QUIET)
+
+ if(NOT Protobuf_FOUND)
+ set(CaffeProto_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Protobuf_FOUND)
+
+ if(NOT TARGET caffeproto)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/CaffeProto" caffeproto)
+ endif(NOT TARGET caffeproto)
+
+ set(CaffeProto_FOUND TRUE PARENT_SCOPE)
+endfunction(_CaffeProto_import)
+
+_CaffeProto_import()
diff --git a/infra/nncc/cmake/packages/CaffeSourceConfig.cmake b/infra/nncc/cmake/packages/CaffeSourceConfig.cmake
new file mode 100644
index 000000000..91d334235
--- /dev/null
+++ b/infra/nncc/cmake/packages/CaffeSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_CaffeSource_import)
+ if(NOT DOWNLOAD_CAFFE)
+ set(CaffeSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_CAFFE)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(CAFFE_URL https://github.com/BVLC/caffe/archive/1.0.tar.gz)
+
+ ExternalSource_Download(CAFFE ${CAFFE_URL})
+
+ set(CaffeSource_DIR ${CAFFE_SOURCE_DIR} PARENT_SCOPE)
+ set(CaffeSource_FOUND ${DOWNLOAD_CAFFE} PARENT_SCOPE)
+endfunction(_CaffeSource_import)
+
+_CaffeSource_import()
diff --git a/infra/nncc/cmake/packages/EigenConfig.cmake b/infra/nncc/cmake/packages/EigenConfig.cmake
new file mode 100644
index 000000000..ac5164f68
--- /dev/null
+++ b/infra/nncc/cmake/packages/EigenConfig.cmake
@@ -0,0 +1,17 @@
+function(_Eigen_import)
+ nncc_find_package(EigenSource QUIET)
+
+ if(NOT EigenSource_FOUND)
+ set(Eigen_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT EigenSource_FOUND)
+
+ if(NOT TARGET eigen)
+ add_library(eigen INTERFACE)
+ target_include_directories(eigen INTERFACE "${EigenSource_DIR}")
+ endif(NOT TARGET eigen)
+
+ set(EigenSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_Eigen_import)
+
+_Eigen_import()
diff --git a/infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake b/infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake
new file mode 100644
index 000000000..bf0f94d29
--- /dev/null
+++ b/infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake
@@ -0,0 +1,26 @@
+# find_package rejects version with commit number. Commit ID is appended to the package name
+# as a workaround.
+#
+# TODO Find a better way
+function(_import)
+ if(NOT DOWNLOAD_EIGEN)
+ set(EigenSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_EIGEN)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(ThirdPartyTools)
+
+ ThirdParty_URL(EIGEN_URL PACKAGE Eigen VERSION fd6845384b86)
+
+ ExternalSource_Download(EIGEN
+ DIRNAME EIGEN-fd6845384b86
+ CHECKSUM MD5=4c884968ede816a84c70e2cd2c81de8d
+ ${EIGEN_URL}
+ )
+
+ set(EigenSource_DIR ${EIGEN_SOURCE_DIR} PARENT_SCOPE)
+ set(EigenSource-fd6845384b86_FOUND TRUE PARENT_SCOPE)
+endfunction(_import)
+
+_import()
diff --git a/infra/nncc/cmake/packages/EigenSourceConfig.cmake b/infra/nncc/cmake/packages/EigenSourceConfig.cmake
new file mode 100644
index 000000000..f87f53304
--- /dev/null
+++ b/infra/nncc/cmake/packages/EigenSourceConfig.cmake
@@ -0,0 +1,19 @@
+function(_EigenSource_import)
+ if(NOT DOWNLOAD_EIGEN)
+ set(EigenSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_EIGEN)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ # NOTE The following URL comes from TensorFlow 1.7
+ envoption(EIGEN_URL https://bitbucket.org/eigen/eigen/get/2355b229ea4c.tar.gz)
+
+ ExternalSource_Download(EIGEN ${EIGEN_URL})
+
+ set(EigenSource_DIR ${EIGEN_SOURCE_DIR} PARENT_SCOPE)
+ set(EigenSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_EigenSource_import)
+
+_EigenSource_import()
diff --git a/infra/nncc/cmake/packages/Farmhash/CMakeLists.txt b/infra/nncc/cmake/packages/Farmhash/CMakeLists.txt
new file mode 100644
index 000000000..3da57a498
--- /dev/null
+++ b/infra/nncc/cmake/packages/Farmhash/CMakeLists.txt
@@ -0,0 +1,3 @@
+add_library(farmhash "${FarmhashSource_DIR}/src/farmhash.cc")
+set_target_properties(farmhash PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(farmhash PUBLIC "${FarmhashSource_DIR}/src")
diff --git a/infra/nncc/cmake/packages/FarmhashConfig.cmake b/infra/nncc/cmake/packages/FarmhashConfig.cmake
new file mode 100644
index 000000000..68f3d7c49
--- /dev/null
+++ b/infra/nncc/cmake/packages/FarmhashConfig.cmake
@@ -0,0 +1,17 @@
+function(_Farmhash_import)
+ nncc_find_package(FarmhashSource QUIET)
+
+ if(NOT FarmhashSource_FOUND)
+ set(Farmhash_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT FarmhashSource_FOUND)
+
+ if(NOT TARGET farmhash)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Farmhash" farmhash)
+ endif(NOT TARGET farmhash)
+
+ set(Farmhash_FOUND TRUE PARENT_SCOPE)
+endfunction(_Farmhash_import)
+
+_Farmhash_import()
diff --git a/infra/nncc/cmake/packages/FarmhashSourceConfig.cmake b/infra/nncc/cmake/packages/FarmhashSourceConfig.cmake
new file mode 100644
index 000000000..207909fab
--- /dev/null
+++ b/infra/nncc/cmake/packages/FarmhashSourceConfig.cmake
@@ -0,0 +1,19 @@
+function(_FarmhashSource_import)
+ if(NOT DOWNLOAD_FARMHASH)
+ set(FarmhashSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_FARMHASH)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ # NOTE TensorFlow 1.7 downloads farmhash from the following URL
+ envoption(FARMHASH_URL https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz)
+
+ ExternalSource_Download(FARMHASH ${FARMHASH_URL})
+
+ set(FarmhashSource_DIR ${FARMHASH_SOURCE_DIR} PARENT_SCOPE)
+ set(FarmhashSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_FarmhashSource_import)
+
+_FarmhashSource_import()
diff --git a/infra/nncc/cmake/packages/FlatBuffersConfig.cmake b/infra/nncc/cmake/packages/FlatBuffersConfig.cmake
new file mode 100644
index 000000000..45511ca5e
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersConfig.cmake
@@ -0,0 +1,135 @@
+function(_FlatBuffers_import)
+ find_package(Flatbuffers QUIET)
+ set(FlatBuffers_FOUND ${Flatbuffers_FOUND} PARENT_SCOPE)
+endfunction(_FlatBuffers_import)
+
+function(_FlatBuffers_build)
+ if(NOT BUILD_FLATBUFFERS)
+ return()
+ endif(NOT BUILD_FLATBUFFERS)
+
+ nncc_find_package(FlatBuffersSource EXACT 1.10 QUIET)
+
+ if(NOT FlatBuffersSource_FOUND)
+ # Source is not available
+ return()
+ endif(NOT FlatBuffersSource_FOUND)
+
+ # TODO Introduce helper functions
+ set(FLATBUFFERS_BUILD "${CMAKE_BINARY_DIR}/externals/FLATBUFFERS/build")
+ set(FLATBUFFERS_INSTALL "${NNCC_OVERLAY_DIR}")
+
+ set(STAMP_PATH "${FLATBUFFERS_INSTALL}/FLATBUFFERS.stamp")
+ set(LOG_PATH "${FLATBUFFERS_INSTALL}/FLATBUFFERS.log")
+
+ if(EXISTS ${STAMP_PATH})
+ return()
+ endif(EXISTS ${STAMP_PATH})
+
+ message(STATUS "Build Flatbuffers from ${FlatBuffersSource_DIR}")
+
+ file(MAKE_DIRECTORY ${FLATBUFFERS_BUILD})
+ file(MAKE_DIRECTORY ${FLATBUFFERS_INSTALL})
+
+ # NOTE Do NOT retry Flatbuffers build once it fails
+ file(WRITE "${STAMP_PATH}")
+
+ execute_process(COMMAND ${CMAKE_COMMAND}
+ -DCMAKE_INSTALL_PREFIX=${FLATBUFFERS_INSTALL}
+ -DCMAKE_BUILD_TYPE=Release
+ ${FlatBuffersSource_DIR}
+ OUTPUT_FILE ${LOG_PATH}
+ ERROR_FILE ${LOG_PATH}
+ WORKING_DIRECTORY ${FLATBUFFERS_BUILD}
+ RESULT_VARIABLE BUILD_EXITCODE)
+
+ execute_process(COMMAND ${CMAKE_COMMAND} --build . -- install
+ OUTPUT_FILE ${LOG_PATH}
+ ERROR_FILE ${LOG_PATH}
+ WORKING_DIRECTORY ${FLATBUFFERS_BUILD}
+ RESULT_VARIABLE INSTALL_EXITCODE)
+
+ if(BUILD_EXITCODE EQUAL 0 AND INSTALL_EXITCODE EQUAL 0)
+ message(STATUS "Succeeded in building Flatbuffers")
+ else()
+ message(FATAL_ERROR "Fail to build Flatbuffers (check '${LOG_PATH}' for details)")
+ endif(BUILD_EXITCODE EQUAL 0 AND INSTALL_EXITCODE EQUAL 0)
+endfunction(_FlatBuffers_build)
+
+_FlatBuffers_build()
+_FlatBuffers_import()
+
+if(FlatBuffers_FOUND)
+ if(NOT TARGET flatbuffers)
+ add_library(flatbuffers INTERFACE)
+ target_link_libraries(flatbuffers INTERFACE flatbuffers::flatbuffers)
+ message(STATUS "Found FlatBuffers: TRUE")
+ endif(NOT TARGET flatbuffers)
+
+ function(FlatBuffers_Generate PREFIX OUTPUT_DIR SCHEMA_DIR)
+ get_filename_component(abs_output_dir ${OUTPUT_DIR} ABSOLUTE)
+ get_filename_component(abs_schema_dir ${SCHEMA_DIR} ABSOLUTE)
+
+ foreach(schema ${ARGN})
+ get_filename_component(schema_fn "${schema}" NAME)
+ get_filename_component(dir "${schema}" DIRECTORY)
+
+ get_filename_component(schema_fn_we "${schema_fn}" NAME_WE)
+
+ list(APPEND SCHEMA_FILES "${abs_schema_dir}/${schema}")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${schema_fn_we}_generated.h")
+ endforeach()
+
+ add_custom_command(OUTPUT ${OUTPUT_FILES}
+ COMMAND ${CMAKE_COMMAND} -E make_directory "${abs_output_dir}"
+ COMMAND "$<TARGET_FILE:flatbuffers::flatc>" -c --no-includes
+ --no-union-value-namespacing
+ --gen-object-api -o "${abs_output_dir}"
+ ${SCHEMA_FILES}
+ DEPENDS flatbuffers::flatc)
+
+ set(${PREFIX}_SOURCES ${OUTPUT_FILES} PARENT_SCOPE)
+ set(${PREFIX}_INCLUDE_DIRS ${abs_output_dir} PARENT_SCOPE)
+ endfunction(FlatBuffers_Generate)
+
+ function(FlatBuffers_Target TGT)
+ set(oneValueArgs OUTPUT_DIR SCHEMA_DIR)
+ set(multiValueArgs SCHEMA_FILES)
+ cmake_parse_arguments(ARG "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
+
+ get_filename_component(abs_output_dir ${ARG_OUTPUT_DIR} ABSOLUTE)
+ get_filename_component(abs_schema_dir ${ARG_SCHEMA_DIR} ABSOLUTE)
+
+ # Let's reset list variables before using them
+ # NOTE THIS DOES NOT AFFECT parent scope
+ unset(SCHEMA_FILES)
+ unset(OUTPUT_FILES)
+
+ foreach(schema ${ARG_SCHEMA_FILES})
+ get_filename_component(schema_fn "${schema}" NAME)
+ get_filename_component(dir "${schema}" DIRECTORY)
+
+ get_filename_component(schema_fn_we "${schema_fn}" NAME_WE)
+
+ list(APPEND SCHEMA_FILES "${abs_schema_dir}/${schema}")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${schema_fn_we}_generated.h")
+ endforeach()
+
+ # Generate headers
+ add_custom_command(OUTPUT ${OUTPUT_FILES}
+ COMMAND ${CMAKE_COMMAND} -E make_directory "${abs_output_dir}"
+ COMMAND "$<TARGET_FILE:flatbuffers::flatc>" -c --no-includes
+ --no-union-value-namespacing
+ --gen-object-api -o "${abs_output_dir}"
+ ${SCHEMA_FILES}
+ DEPENDS ${SCHEMA_FILES}
+ COMMENT "Generate '${TGT}' headers")
+
+ # NOTE This header-only library is deliberately declared as STATIC library
+ # to avoid possible scope issues related with generated files
+ add_library(${TGT} STATIC ${OUTPUT_FILES})
+ set_target_properties(${TGT} PROPERTIES LINKER_LANGUAGE CXX)
+ target_include_directories(${TGT} PUBLIC "${ARG_OUTPUT_DIR}")
+ target_link_libraries(${TGT} PUBLIC flatbuffers)
+ endfunction(FlatBuffers_Target)
+endif(FlatBuffers_FOUND)
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake b/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake
new file mode 100644
index 000000000..c5f4dc9b7
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_FlatBuffersSource_import)
+ if(NOT DOWNLOAD_FLATBUFFERS)
+ set(FlatBuffersSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_FLATBUFFERS)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(FLATBUFFERS_1_10_URL https://github.com/google/flatbuffers/archive/v1.10.0.tar.gz)
+
+ ExternalSource_Download(FLATBUFFERS DIRNAME FLATBUFFERS-1.10 ${FLATBUFFERS_1_10_URL})
+
+ set(FlatBuffersSource_DIR ${FLATBUFFERS_SOURCE_DIR} PARENT_SCOPE)
+ set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_FlatBuffersSource_import)
+
+_FlatBuffersSource_import()
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake b/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake
new file mode 100644
index 000000000..6585f21d5
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.10")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake b/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake
new file mode 100644
index 000000000..46935b9f7
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_FlatBuffersSource_import)
+ if(NOT DOWNLOAD_FLATBUFFERS)
+ set(FlatBuffersSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_FLATBUFFERS)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(FLATBUFFERS_1_8_URL https://github.com/google/flatbuffers/archive/v1.8.0.tar.gz)
+
+ ExternalSource_Download(FLATBUFFERS DIRNAME FLATBUFFERS-1.8 ${FLATBUFFERS_1_8_URL})
+
+ set(FlatBuffersSource_DIR ${FLATBUFFERS_SOURCE_DIR} PARENT_SCOPE)
+ set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_FlatBuffersSource_import)
+
+_FlatBuffersSource_import()
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake b/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake
new file mode 100644
index 000000000..ac5e9b2b9
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.8")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake b/infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake
new file mode 100644
index 000000000..63a9ccdd0
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake
@@ -0,0 +1,25 @@
+function(_FlatBuffersSource_import)
+ if(NOT DOWNLOAD_FLATBUFFERS)
+ set(FlatBuffersSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_FLATBUFFERS)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ # Each TensorFlow needs a specific version of Flatbuffers
+ # - TensorFlow 1.7 downloads it from https://github.com/google/flatbuffers/archive/971a68110e4.tar.gz
+ # - TensorFlow 1.12 downloads it from https://github.com/google/flatbuffers/archive/1f5eae5d6a1.tar.gz
+ #
+ # Let's use 1.10 released in 2018.10 (compatible with 1f5eae5d6a1).
+ #
+ # TODO Manage multiple versions
+ envoption(FLATBUFFERS_URL https://github.com/google/flatbuffers/archive/v1.10.0.tar.gz)
+
+ ExternalSource_Download(FLATBUFFERS ${FLATBUFFERS_URL})
+
+ set(FlatBuffersSource_DIR ${FLATBUFFERS_SOURCE_DIR} PARENT_SCOPE)
+ set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_FlatBuffersSource_import)
+
+_FlatBuffersSource_import()
diff --git a/infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake b/infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake
new file mode 100644
index 000000000..ac9e22e51
--- /dev/null
+++ b/infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake
@@ -0,0 +1,9 @@
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(NOT PACKAGE_FIND_VERSION)
+ # This package works only when find_package(...) call has no EXACT option
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(NOT PACKAGE_FIND_VERSION)
diff --git a/infra/nncc/cmake/packages/GEMMLowpConfig.cmake b/infra/nncc/cmake/packages/GEMMLowpConfig.cmake
new file mode 100644
index 000000000..f469a0a08
--- /dev/null
+++ b/infra/nncc/cmake/packages/GEMMLowpConfig.cmake
@@ -0,0 +1,20 @@
+function(_GEMMLowp_import)
+ nncc_find_package(GEMMLowpSource QUIET)
+
+ if(NOT GEMMLowpSource_FOUND)
+ set(GEMMLowp_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT GEMMLowpSource_FOUND)
+
+ if(NOT TARGET gemmlowp)
+ find_package(Threads REQUIRED)
+
+ add_library(gemmlowp INTERFACE)
+ target_include_directories(gemmlowp INTERFACE ${GEMMLowpSource_DIR})
+ target_link_libraries(gemmlowp INTERFACE Threads::Threads)
+ endif(NOT TARGET gemmlowp)
+
+ set(GEMMLowp_FOUND TRUE PARENT_SCOPE)
+endfunction(_GEMMLowp_import)
+
+_GEMMLowp_import()
diff --git a/infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake b/infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake
new file mode 100644
index 000000000..a18a4cdc0
--- /dev/null
+++ b/infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake
@@ -0,0 +1,19 @@
+function(_GEMMLowpSource_import)
+ if(NOT DOWNLOAD_GEMMLOWP)
+ set(GEMMLowpSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_GEMMLOWP)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ # NOTE TensorFlow 1.7 uses the following URL
+ envoption(GEMMLOWP_URL https://github.com/google/gemmlowp/archive/7c7c744640ddc3d0af18fb245b4d23228813a71b.zip)
+
+ ExternalSource_Download(GEMMLOWP ${GEMMLOWP_URL})
+
+ set(GEMMLowpSource_DIR ${GEMMLOWP_SOURCE_DIR} PARENT_SCOPE)
+ set(GEMMLowpSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_GEMMLowpSource_import)
+
+_GEMMLowpSource_import()
diff --git a/infra/nncc/cmake/packages/GFlagsConfig.cmake b/infra/nncc/cmake/packages/GFlagsConfig.cmake
new file mode 100644
index 000000000..891320a95
--- /dev/null
+++ b/infra/nncc/cmake/packages/GFlagsConfig.cmake
@@ -0,0 +1,35 @@
+function(_GFlags_import)
+ if(TARGET gflags)
+ set(GFlags_FOUND True PARENT_SCOPE)
+ return()
+ endif()
+
+ nncc_find_package(GFlagsSource QUIET)
+
+ if(GFlagsSource_FOUND)
+ nncc_include(ExternalProjectTools)
+ # build shared multi-threading gflag library
+ set(BUILD_SHARED_LIBS On)
+ set(BUILD_STATIC_LIBS Off)
+ set(BUILD_gflags_LIB On)
+ set(BUILD_gflags_nothreads_LIB Off)
+ add_extdirectory(${GFLAGS_SOURCE_DIR} gflags)
+ else(GFlagsSource_FOUND)
+ set(GFLAGS_ROOT_DIR "" CACHE PATH "Folder contains GFlags")
+ find_path(GFLAGS_INCLUDE_DIR gflags/gflags.h PATHS ${GFLAGS_ROOT_DIR})
+ find_library(GFLAGS_LIBRARY gflags)
+
+ if(NOT GFLAGS_INCLUDE_DIR)
+ set(GFlags_FOUND False PARENT_SCOPE)
+ return()
+ endif(NOT GFLAGS_INCLUDE_DIR)
+
+ add_library(gflags INTERFACE)
+ target_include_directories(gflags INTERFACE ${GFLAGS_INCLUDE_DIR})
+ target_link_libraries(gflags INTERFACE ${GFLAGS_LIBRARY})
+ endif(GFlagsSource_FOUND)
+
+ set(GFlags_FOUND True PARENT_SCOPE)
+endfunction(_GFlags_import)
+
+_GFlags_import()
diff --git a/infra/nncc/cmake/packages/GFlagsSourceConfig.cmake b/infra/nncc/cmake/packages/GFlagsSourceConfig.cmake
new file mode 100644
index 000000000..17970b0ec
--- /dev/null
+++ b/infra/nncc/cmake/packages/GFlagsSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_GFlagsSource_import)
+ if(NOT DOWNLOAD_GFLAGS)
+ set(GFlagsSource_FOUND False PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_GFLAGS)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(GFLAGS_URL https://github.com/gflags/gflags/archive/v2.2.1.tar.gz)
+
+ ExternalSource_Download(GFLAGS ${GFLAGS_URL})
+
+ set(GFLAGS_SOURCE_DIR ${GFLAGS_SOURCE_DIR} PARENT_SCOPE)
+ set(GFlagsSource_FOUND True PARENT_SCOPE)
+endfunction(_GFlagsSource_import)
+
+_GFlagsSource_import()
diff --git a/infra/nncc/cmake/packages/GLogConfig.cmake b/infra/nncc/cmake/packages/GLogConfig.cmake
new file mode 100644
index 000000000..e5ed02cd3
--- /dev/null
+++ b/infra/nncc/cmake/packages/GLogConfig.cmake
@@ -0,0 +1,24 @@
+function(_GLog_import)
+ if(TARGET glog)
+ set(GLog_FOUND True PARENT_SCOPE)
+ return()
+ endif()
+
+ set(GLOG_ROOT_DIR "" CACHE PATH "Folder contains Google Log")
+ find_path(GLOG_INCLUDE_DIR glog/logging.h PATHS ${GLOG_ROOT_DIR})
+ find_library(GLOG_LIBRARY glog)
+
+ if(NOT GLOG_INCLUDE_DIR)
+ set(GLog_FOUND False PARENT_SCOPE)
+ return()
+ endif(NOT GLOG_INCLUDE_DIR)
+
+ add_library(glog INTERFACE)
+ target_include_directories(glog INTERFACE ${GLOG_INCLUDE_DIR} ${GFLAGS_INCLUDE_DIR})
+ target_link_libraries(glog INTERFACE ${GLOG_LIBRARY} gflags)
+
+ message(STATUS "Found GLog: TRUE")
+ set(GLog_FOUND True PARENT_SCOPE)
+endfunction(_GLog_import)
+
+_GLog_import()
diff --git a/infra/nncc/cmake/packages/GTestConfig.cmake b/infra/nncc/cmake/packages/GTestConfig.cmake
new file mode 100644
index 000000000..27f96b27d
--- /dev/null
+++ b/infra/nncc/cmake/packages/GTestConfig.cmake
@@ -0,0 +1,86 @@
+function(_GTest_build)
+ if(NOT BUILD_GTEST)
+ return()
+ endif(NOT BUILD_GTEST)
+
+ nncc_find_package(GTestSource QUIET)
+
+ if(NOT GTestSource_FOUND)
+ return()
+ endif(NOT GTestSource_FOUND)
+
+ # TODO Introduce helper functions
+ set(GTEST_SOURCE_DIR "${GTestSource_DIR}")
+ set(GTEST_BUILD_DIR "${CMAKE_BINARY_DIR}/externals/GTEST/build")
+ set(GTEST_INSTALL_DIR "${NNCC_OVERLAY_DIR}")
+
+ set(STAMP_PATH "${GTEST_INSTALL_DIR}/GTEST.stamp")
+ set(LOG_PATH "${GTEST_INSTALL_DIR}/GTEST.log")
+
+ if(EXISTS ${STAMP_PATH})
+ return()
+ endif(EXISTS ${STAMP_PATH})
+
+ message(STATUS "Google Test Package: Source found (path: ${GTEST_SOURCE_DIR})")
+
+ file(MAKE_DIRECTORY ${GTEST_BUILD_DIR})
+ file(MAKE_DIRECTORY ${GTEST_INSTALL_DIR})
+
+ # NOTE Do NOT retry build once it failed
+ file(WRITE "${STAMP_PATH}")
+
+ execute_process(COMMAND ${CMAKE_COMMAND}
+ -DCMAKE_INSTALL_PREFIX=${GTEST_INSTALL_DIR}
+ -DCMAKE_BUILD_TYPE=Release
+ ${GTestSource_DIR}
+ OUTPUT_FILE ${LOG_PATH}
+ ERROR_FILE ${LOG_PATH}
+ WORKING_DIRECTORY ${GTEST_BUILD_DIR}
+ RESULT_VARIABLE BUILD_EXITCODE)
+
+ if(NOT BUILD_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "Google Test Package: Build failed (check '${LOG_PATH}' for details)")
+ endif(NOT BUILD_EXITCODE EQUAL 0)
+
+ execute_process(COMMAND ${CMAKE_COMMAND} --build . -- install
+ OUTPUT_FILE ${LOG_PATH}
+ ERROR_FILE ${LOG_PATH}
+ WORKING_DIRECTORY ${GTEST_BUILD_DIR}
+ RESULT_VARIABLE INSTALL_EXITCODE)
+
+ if(NOT INSTALL_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "Google Test Package: Installation failed (check '${LOG_PATH}' for details)")
+ endif(NOT INSTALL_EXITCODE EQUAL 0)
+
+ message(STATUS "Google Test Package: Done")
+endfunction(_GTest_build)
+
+_GTest_build()
+
+### Find and use pre-installed Google Test
+find_package(GTest)
+find_package(Threads)
+
+if(${GTEST_FOUND} AND TARGET Threads::Threads)
+ if(NOT TARGET gtest)
+ add_library(gtest INTERFACE)
+ target_include_directories(gtest INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest INTERFACE ${GTEST_LIBRARIES} Threads::Threads)
+ endif(NOT TARGET gtest)
+
+ if(NOT TARGET gtest_main)
+ add_library(gtest_main INTERFACE)
+ target_include_directories(gtest_main INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest_main INTERFACE gtest)
+ target_link_libraries(gtest_main INTERFACE ${GTEST_MAIN_LIBRARIES})
+
+ # GTest_AddTest(TGT ...) creates an executable target and registers that executable as a CMake test
+ function(GTest_AddTest TGT)
+ add_executable(${TGT} ${ARGN})
+ target_link_libraries(${TGT} gtest_main)
+ add_test(${TGT} ${TGT})
+ endfunction(GTest_AddTest)
+ endif(NOT TARGET gtest_main)
+
+ set(GTest_FOUND TRUE)
+endif(${GTEST_FOUND} AND TARGET Threads::Threads)
diff --git a/infra/nncc/cmake/packages/GTestSourceConfig.cmake b/infra/nncc/cmake/packages/GTestSourceConfig.cmake
new file mode 100644
index 000000000..d7c9d53c6
--- /dev/null
+++ b/infra/nncc/cmake/packages/GTestSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_GTestSource_import)
+ if(NOT DOWNLOAD_GTEST)
+ set(GTestSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_GTEST)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(GTEST_URL https://github.com/google/googletest/archive/release-1.8.0.tar.gz)
+
+ ExternalSource_Download(GTEST ${GTEST_URL})
+
+ set(GTestSource_DIR ${GTEST_SOURCE_DIR} PARENT_SCOPE)
+ set(GTestSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_GTestSource_import)
+
+_GTestSource_import()
diff --git a/infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake b/infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake
new file mode 100644
index 000000000..3fdc86102
--- /dev/null
+++ b/infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake
@@ -0,0 +1,52 @@
+# https://github.com/google/double-conversion
+set(GOOGLE_DOUBLE_CONVERSION_PREFIX "/usr" CACHE PATH "Google DoubleConversion install prefix")
+
+function(_GoogleDoubleConversion_import)
+ # Find the header & lib
+ find_library(GoogleDoubleConversion_LIB
+ NAMES double-conversion
+ PATHS "${GOOGLE_DOUBLE_CONVERSION_PREFIX}/lib"
+ )
+
+ find_path(GoogleDoubleConversion_INCLUDE_DIR
+ NAMES double-conversion/double-conversion.h
+ PATHS "${GOOGLE_DOUBLE_CONVERSION_PREFIX}/include"
+ )
+
+ # TODO Version check
+ set(GoogleDoubleConversion_FOUND TRUE)
+
+ if(NOT GoogleDoubleConversion_LIB)
+ set(GoogleDoubleConversion_FOUND FALSE)
+ endif(NOT GoogleDoubleConversion_LIB)
+
+ if(NOT GoogleDoubleConversion_INCLUDE_DIR)
+ set(GoogleDoubleConversion_FOUND FALSE)
+ endif(NOT GoogleDoubleConversion_INCLUDE_DIR)
+
+ set(GoogleDoubleConversion_FOUND ${GoogleDoubleConversion_FOUND} PARENT_SCOPE)
+
+ unset(MESSAGE)
+ list(APPEND MESSAGE "Found Google Double Conversion")
+
+ if(NOT GoogleDoubleConversion_FOUND)
+ list(APPEND MESSAGE ": FALSE")
+ else(NOT GoogleDoubleConversion_FOUND)
+ list(APPEND MESSAGE " (include: ${GoogleDoubleConversion_INCLUDE_DIR} library: ${GoogleDoubleConversion_LIB})")
+
+ # Add target
+ if(NOT TARGET google_double_conversion)
+ # NOTE IMPORTED target may be more appropriate for this case
+ add_library(google_double_conversion INTERFACE)
+ target_link_libraries(google_double_conversion INTERFACE ${GoogleDoubleConversion_LIB})
+ target_include_directories(google_double_conversion INTERFACE ${GoogleDoubleConversion_INCLUDE_DIR})
+
+ add_library(Google::DoubleConversion ALIAS google_double_conversion)
+ endif(NOT TARGET google_double_conversion)
+ endif(NOT GoogleDoubleConversion_FOUND)
+
+ message(STATUS ${MESSAGE})
+ set(GoogleDoubleConversion_FOUND ${GoogleDoubleConversion_FOUND} PARENT_SCOPE)
+endfunction(_GoogleDoubleConversion_import)
+
+_GoogleDoubleConversion_import()
diff --git a/infra/nncc/cmake/packages/GoogleNSyncConfig.cmake b/infra/nncc/cmake/packages/GoogleNSyncConfig.cmake
new file mode 100644
index 000000000..1fdf8cc20
--- /dev/null
+++ b/infra/nncc/cmake/packages/GoogleNSyncConfig.cmake
@@ -0,0 +1,62 @@
+# https://github.com/google/nsync
+set(GOOGLE_NSYNC_PREFIX "/usr" CACHE PATH "Where to find Google NSync library")
+
+function(_GoogleNSync_import)
+ # Find the header & lib
+ find_library(GoogleNSync_C_LIB
+ NAMES nsync
+ PATHS "${GOOGLE_NSYNC_PREFIX}/lib"
+ )
+
+ find_library(GoogleNSync_CPP_LIB
+ NAMES nsync_cpp
+ PATHS "${GOOGLE_NSYNC_PREFIX}/lib"
+ )
+
+ find_path(GoogleNSync_INCLUDE_DIR
+ NAMES nsync.h
+ PATHS "${GOOGLE_NSYNC_PREFIX}/include"
+ )
+
+ message(STATUS "GoogleNSync_C_LIB: ${GoogleNSync_C_LIB}")
+ message(STATUS "GoogleNSync_CPP_LIB: ${GoogleNSync_CPP_LIB}")
+ message(STATUS "GoogleNSync_INCLUDE_DIR: ${GoogleNSync_INCLUDE_DIR}")
+
+ set(GoogleNSync_FOUND TRUE)
+
+ if(NOT GoogleNSync_C_LIB)
+ set(GoogleNSync_FOUND FALSE)
+ endif(NOT GoogleNSync_C_LIB)
+
+ if(NOT GoogleNSync_CPP_LIB)
+ set(GoogleNSync_FOUND FALSE)
+ endif(NOT GoogleNSync_CPP_LIB)
+
+ if(NOT GoogleNSync_INCLUDE_DIR)
+ set(GoogleNSync_FOUND FALSE)
+ endif(NOT GoogleNSync_INCLUDE_DIR)
+
+ unset(MESSAGE)
+ list(APPEND MESSAGE "Found Google NSync")
+
+ if(NOT GoogleNSync_FOUND)
+ list(APPEND MESSAGE ": FALSE")
+ else(NOT GoogleNSync_FOUND)
+ list(APPEND MESSAGE " (include: ${GoogleNSync_INCLUDE_DIR} library: ${GoogleNSync_C_LIB} ${GoogleNSync_CPP_LIB})")
+
+ # Add target
+ if(NOT TARGET google_nsync)
+ # NOTE IMPORTED target may be more appropriate for this case
+ add_library(google_nsync INTERFACE)
+ target_link_libraries(google_nsync INTERFACE ${GoogleNSync_C_LIB} ${GoogleNSync_CPP_LIB})
+ target_include_directories(google_nsync INTERFACE ${GoogleNSync_INCLUDE_DIR})
+
+ add_library(Google::NSync ALIAS google_nsync)
+ endif(NOT TARGET google_nsync)
+ endif(NOT GoogleNSync_FOUND)
+
+ message(STATUS ${MESSAGE})
+ set(GoogleNSync_FOUND ${GoogleNSync_FOUND} PARENT_SCOPE)
+endfunction(_GoogleNSync_import)
+
+_GoogleNSync_import()
diff --git a/infra/nncc/cmake/packages/LLVMConfig.cmake b/infra/nncc/cmake/packages/LLVMConfig.cmake
new file mode 100644
index 000000000..0f8faec7f
--- /dev/null
+++ b/infra/nncc/cmake/packages/LLVMConfig.cmake
@@ -0,0 +1,45 @@
+function(_LLVM_import)
+
+ if(NOT TARGET llvm)
+ find_program(llvm_config "llvm-config")
+ if (NOT llvm_config)
+ return()
+ endif(NOT llvm_config)
+ message(STATUS "Found llvm-config: ${llvm_config}")
+
+ # get llvm compile options
+ execute_process(COMMAND ${llvm_config} --cppflags OUTPUT_VARIABLE
+ LLVM_CPPFLAGS_STR OUTPUT_STRIP_TRAILING_WHITESPACE)
+ # split one string to list of option items
+ string(REPLACE " " ";" LLVM_CPPFLAGS ${LLVM_CPPFLAGS_STR})
+ execute_process(COMMAND ${llvm_config} --has-rtti OUTPUT_VARIABLE
+ LLVM_HAS_RTTI OUTPUT_STRIP_TRAILING_WHITESPACE)
+ if("${LLVM_HAS_RTTI}" STREQUAL "NO")
+ list(APPEND LLVM_CPPFLAGS "-fno-rtti")
+ endif()
+ # note: "llvm-config --cxxflags" returns whole string but also includes
+ # unwanted "-O3 -DNDEBUG" and several "-Wno-" options so this is not used
+
+ # get llvm link options
+ execute_process(COMMAND ${llvm_config} --ldflags OUTPUT_VARIABLE
+ LLVM_LINKFLAGS OUTPUT_STRIP_TRAILING_WHITESPACE)
+ execute_process(COMMAND ${llvm_config} --system-libs
+ OUTPUT_VARIABLE LLVM_LINKSYSLIBS OUTPUT_STRIP_TRAILING_WHITESPACE)
+ execute_process(COMMAND ${llvm_config} --libs core mcjit native
+ OUTPUT_VARIABLE LLVM_LINKLIBS OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+ add_library(llvm INTERFACE)
+
+ foreach(ONE_CPPFLAG ${LLVM_CPPFLAGS})
+ target_compile_options(llvm INTERFACE ${ONE_CPPFLAG})
+ endforeach()
+ target_link_libraries(llvm INTERFACE ${LLVM_LINKFLAGS})
+ target_link_libraries(llvm INTERFACE ${LLVM_LINKLIBS})
+ target_link_libraries(llvm INTERFACE ${LLVM_LINKSYSLIBS})
+
+ endif(NOT TARGET llvm)
+
+ set(LLVM_FOUND TRUE PARENT_SCOPE)
+endfunction(_LLVM_import)
+
+_LLVM_import()
diff --git a/infra/nncc/cmake/packages/NEON2SSEConfig.cmake b/infra/nncc/cmake/packages/NEON2SSEConfig.cmake
new file mode 100644
index 000000000..c7f0c294e
--- /dev/null
+++ b/infra/nncc/cmake/packages/NEON2SSEConfig.cmake
@@ -0,0 +1,17 @@
+function(_NEON2SSE_import)
+ nncc_find_package(NEON2SSESource QUIET)
+
+ if(NOT NEON2SSESource_FOUND)
+ set(NEON2SSE_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT NEON2SSESource_FOUND)
+
+ if(NOT TARGET neon2sse)
+ add_library(neon2sse INTERFACE)
+ target_include_directories(neon2sse INTERFACE "${NEON2SSESource_DIR}")
+ endif(NOT TARGET neon2sse)
+
+ set(NEON2SSE_FOUND TRUE PARENT_SCOPE)
+endfunction(_NEON2SSE_import)
+
+_NEON2SSE_import()
diff --git a/infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake b/infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake
new file mode 100644
index 000000000..f66c5cf41
--- /dev/null
+++ b/infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake
@@ -0,0 +1,19 @@
+function(_NEON2SSESource_import)
+ if(NOT DOWNLOAD_NEON2SSE)
+ set(NEON2SSESource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_NEON2SSE)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ # NOTE TensorFlow 1.7 downloads NEON2SSE from the following URL
+ envoption(NEON2SSE_URL https://github.com/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz)
+
+ ExternalSource_Download(NEON2SSE ${NEON2SSE_URL})
+
+ set(NEON2SSESource_DIR ${NEON2SSE_SOURCE_DIR} PARENT_SCOPE)
+ set(NEON2SSESource_FOUND TRUE PARENT_SCOPE)
+endfunction(_NEON2SSESource_import)
+
+_NEON2SSESource_import()
diff --git a/infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt b/infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt
new file mode 100644
index 000000000..8291958a4
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt
@@ -0,0 +1,6 @@
+Protobuf_Generate(ONNX_PROTO "${CMAKE_CURRENT_BINARY_DIR}/generated" "${ONNXSource_DIR}" "onnx/onnx.proto")
+
+add_library(onnxproto STATIC ${ONNX_PROTO_SOURCES})
+set_target_properties(onnxproto PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(onnxproto PUBLIC ${ONNX_PROTO_INCLUDE_DIRS})
+target_link_libraries(onnxproto libprotobuf)
diff --git a/infra/nncc/cmake/packages/ONNXProtoConfig.cmake b/infra/nncc/cmake/packages/ONNXProtoConfig.cmake
new file mode 100644
index 000000000..a6dc7a01d
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXProtoConfig.cmake
@@ -0,0 +1,25 @@
+function(_ONNXProto_import)
+ nncc_find_package(ONNXSource EXACT 1.3.0 QUIET)
+
+ if(NOT ONNXSource_FOUND)
+ set(ONNXProto_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ONNXSource_FOUND)
+
+ nncc_find_package(Protobuf QUIET)
+
+ if(NOT Protobuf_FOUND)
+ set(ONNXProto_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Protobuf_FOUND)
+
+ if(NOT TARGET onnxproto)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/ONNXProto" onnxproto)
+ endif(NOT TARGET onnxproto)
+
+ message(STATUS "Found ONNX: TRUE")
+ set(ONNXProto_FOUND TRUE PARENT_SCOPE)
+endfunction(_ONNXProto_import)
+
+_ONNXProto_import()
diff --git a/infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake b/infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake
new file mode 100644
index 000000000..cfccfff88
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake
@@ -0,0 +1,29 @@
+set(ONNXRUNTIME_PREFIX "/usr" CACHE PATH "The location of pre-installed ONNX Runtime library")
+
+# TODO Download ONNXRuntime binaries
+
+function(_ONNXRuntime_import)
+ # Find the header & lib
+ find_library(ONNXRuntime_LIB NAMES onnxruntime PATHS "${ONNXRUNTIME_PREFIX}/lib")
+ find_path(ONNXRuntime_INCLUDE_DIR NAMES onnxruntime_c_api.h PATHS "${ONNXRUNTIME_PREFIX}/include")
+
+ if(NOT ONNXRuntime_LIB OR NOT ONNXRuntime_INCLUDE_DIR)
+ message(STATUS "Found ONNXRuntime: FALSE")
+
+ set(ONNXRuntime_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ONNXRuntime_LIB OR NOT ONNXRuntime_INCLUDE_DIR)
+
+ # Add onnxruntime target
+ if(NOT TARGET onnxruntime)
+ message(STATUS "Found ONNXRuntime (include: ${ONNXRuntime_INCLUDE_DIR}, library: ${ONNXRuntime_LIB})")
+
+ add_library(onnxruntime INTERFACE)
+ target_link_libraries(onnxruntime INTERFACE ${ONNXRuntime_LIB})
+ target_include_directories(onnxruntime INTERFACE ${ONNXRuntime_INCLUDE_DIR})
+ endif(NOT TARGET onnxruntime)
+
+ set(ONNXRuntime_FOUND TRUE PARENT_SCOPE)
+endfunction(_ONNXRuntime_import)
+
+_ONNXRuntime_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake b/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake
new file mode 100644
index 000000000..0ff33232d
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_ONNXSource_import)
+ if(NOT DOWNLOAD_ONNX)
+ set(ONNXSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_ONNX)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(ONNX_1_3_0_URL https://github.com/onnx/onnx/archive/v1.3.0.zip)
+
+ ExternalSource_Download(ONNX DIRNAME ONNX-1.3.0
+ CHECKSUM MD5=5d588ffcf43bb18f99a67c015c97f92e
+ URL ${ONNX_1_3_0_URL})
+
+ set(ONNXSource_DIR ${ONNX_SOURCE_DIR} PARENT_SCOPE)
+ set(ONNXSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_ONNXSource_import)
+
+_ONNXSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake b/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake
new file mode 100644
index 000000000..0ecf9d222
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.3.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake b/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake
new file mode 100644
index 000000000..b60e9446e
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_ONNXSource_import)
+ if(NOT DOWNLOAD_ONNX)
+ set(ONNXSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_ONNX)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(ONNX_1_4_1_URL https://github.com/onnx/onnx/archive/v1.4.1.zip)
+
+ ExternalSource_Download(ONNX DIRNAME ONNX-1.4.1
+ CHECKSUM MD5=604b43a22fbc758f32ae9f3a4fb9d397
+ URL ${ONNX_1_4_1_URL})
+
+ set(ONNXSource_DIR ${ONNX_SOURCE_DIR} PARENT_SCOPE)
+ set(ONNXSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_ONNXSource_import)
+
+_ONNXSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake b/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake
new file mode 100644
index 000000000..802b464da
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.4.1")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake b/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake
new file mode 100644
index 000000000..7f890e911
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_ONNXSource_import)
+ if(NOT DOWNLOAD_ONNX)
+ set(ONNXSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_ONNX)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(ONNX_1_5_0_URL https://github.com/onnx/onnx/archive/v1.5.0.zip)
+
+ ExternalSource_Download(ONNX DIRNAME ONNX-1.5.0
+ CHECKSUM MD5=1a5fe554569a3819705b26de33d8fe02
+ URL ${ONNX_1_5_0_URL})
+
+ set(ONNXSource_DIR ${ONNX_SOURCE_DIR} PARENT_SCOPE)
+ set(ONNXSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_ONNXSource_import)
+
+_ONNXSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake b/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake
new file mode 100644
index 000000000..70b2804b0
--- /dev/null
+++ b/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.5.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/ProtobufConfig.cmake b/infra/nncc/cmake/packages/ProtobufConfig.cmake
new file mode 100644
index 000000000..9064d1140
--- /dev/null
+++ b/infra/nncc/cmake/packages/ProtobufConfig.cmake
@@ -0,0 +1,139 @@
+# NOTE This function is unused, but remains for future reference
+function(_Protobuf_module_import)
+ # Let's use find_package here not to export unnecessary definitions
+ find_package(Protobuf MODULE QUIET)
+
+ if(NOT PROTOBUF_FOUND)
+ set(Protobuf_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT PROTOBUF_FOUND)
+
+ if(NOT TARGET protoc)
+ add_executable(protoc IMPORTED)
+ set_target_properties(protoc PROPERTIES IMPORTED_LOCATION ${PROTOBUF_PROTOC_EXECUTABLE})
+ endif(NOT TARGET protoc)
+
+ if(NOT TARGET libprotobuf)
+ add_library(libprotobuf INTERFACE)
+ target_include_directories(libprotobuf INTERFACE ${PROTOBUF_INCLUDE_DIRS})
+ target_link_libraries(libprotobuf INTERFACE ${PROTOBUF_LIBRARIES})
+ endif(NOT TARGET libprotobuf)
+
+ set(Protobuf_FOUND TRUE PARENT_SCOPE)
+endfunction(_Protobuf_module_import)
+
+function(_Protobuf_import)
+ # Let's use find_package here not to export unnecessary definitions
+ # NOTE Here we use "exact" match to avoid possible infinite loop
+ find_package(protobuf EXACT 3.5.2 QUIET)
+
+ if(NOT protobuf_FOUND)
+ set(Protobuf_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT protobuf_FOUND)
+
+ if(NOT TARGET libprotobuf)
+ add_library(libprotobuf INTERFACE)
+ target_link_libraries(libprotobuf INTERFACE protobuf::libprotobuf)
+ endif(NOT TARGET libprotobuf)
+
+ set(Protobuf_FOUND TRUE PARENT_SCOPE)
+endfunction(_Protobuf_import)
+
+function(_Protobuf_build)
+ if(NOT BUILD_PROTOBUF)
+ return()
+ endif(NOT BUILD_PROTOBUF)
+
+ nncc_find_package(ProtobufSource QUIET)
+
+ if(NOT ProtobufSource_FOUND)
+ # Source is not available
+ return()
+ endif(NOT ProtobufSource_FOUND)
+
+ # TODO Introduce helper functions
+ set(PROTOBUF_BUILD_DIR "${CMAKE_BINARY_DIR}/externals/PROTOBUF/build")
+ set(PROTOBUF_INSTALL_DIR "${NNCC_OVERLAY_DIR}")
+
+ set(STAMP_PATH "${PROTOBUF_INSTALL_DIR}/PROTOBUF.stamp")
+ set(LOG_PATH "${PROTOBUF_INSTALL_DIR}/PROTOBUF.log")
+
+ if(EXISTS ${STAMP_PATH})
+ return()
+ endif(EXISTS ${STAMP_PATH})
+
+ message(STATUS "Build Protocol Buffer from ${ProtobufSource_DIR}")
+
+ file(MAKE_DIRECTORY ${PROTOBUF_BUILD_DIR})
+ file(MAKE_DIRECTORY ${PROTOBUF_INSTALL_DIR})
+
+ # NOTE Do NOT retry Protocol Buffer build
+ file(WRITE "${STAMP_PATH}")
+
+ execute_process(COMMAND ${CMAKE_COMMAND}
+ -DCMAKE_INSTALL_PREFIX=${PROTOBUF_INSTALL_DIR}
+ -DCMAKE_BUILD_TYPE=Release
+ -DCMAKE_CXX_FLAGS="-fPIC"
+ -Dprotobuf_BUILD_TESTS=OFF
+ -Dprotobuf_WITH_ZLIB=OFF
+ "${ProtobufSource_DIR}/cmake"
+ OUTPUT_FILE ${LOG_PATH}
+ ERROR_FILE ${LOG_PATH}
+ WORKING_DIRECTORY ${PROTOBUF_BUILD_DIR}
+ RESULT_VARIABLE CONFIGURE_EXITCODE)
+
+ if(NOT CONFIGURE_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "Fail to configure Protocol Buffer (check '${LOG_PATH}' for details)")
+ endif(NOT CONFIGURE_EXITCODE EQUAL 0)
+
+ execute_process(COMMAND ${CMAKE_COMMAND} --build . -- install
+ OUTPUT_FILE ${LOG_PATH}
+ ERROR_FILE ${LOG_PATH}
+ WORKING_DIRECTORY ${PROTOBUF_BUILD_DIR}
+ RESULT_VARIABLE BUILD_AND_INSTALL_EXITCODE)
+
+ if(NOT BUILD_AND_INSTALL_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "Fail to build/install Protocol Buffer (check '${LOG_PATH}' for details)")
+ endif(NOT BUILD_AND_INSTALL_EXITCODE EQUAL 0)
+
+ message(STATUS "Succeeded in building Protocol Buffer")
+endfunction(_Protobuf_build)
+
+_Protobuf_build()
+_Protobuf_import()
+
+if(Protobuf_FOUND)
+ function(Protobuf_Generate PREFIX OUTPUT_DIR PROTO_DIR)
+ get_filename_component(abs_output_dir ${OUTPUT_DIR} ABSOLUTE)
+ get_filename_component(abs_proto_dir ${PROTO_DIR} ABSOLUTE)
+
+ # Let's reset variables before using them
+ # NOTE This DOES NOT AFFECT variables in the parent scope
+ unset(PROTO_FILES)
+ unset(OUTPUT_FILES)
+
+ foreach(proto ${ARGN})
+ get_filename_component(fil "${proto}" NAME)
+ get_filename_component(dir "${proto}" DIRECTORY)
+
+ get_filename_component(fil_we "${fil}" NAME_WE)
+
+ get_filename_component(abs_fil "${abs_proto_base}/${proto}" ABSOLUTE)
+ get_filename_component(abs_dir "${abs_fil}" DIRECTORY)
+
+ list(APPEND PROTO_FILES "${abs_proto_dir}/${proto}")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb.h")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb.cc")
+ endforeach()
+
+ add_custom_command(OUTPUT ${OUTPUT_FILES}
+ COMMAND ${CMAKE_COMMAND} -E make_directory "${abs_output_dir}"
+ COMMAND "$<TARGET_FILE:protobuf::protoc>" --cpp_out "${abs_output_dir}" -I "${abs_proto_dir}" ${PROTO_FILES}
+ DEPENDS ${PROTO_FILES})
+
+ set(${PREFIX}_SOURCES ${OUTPUT_FILES} PARENT_SCOPE)
+ set(${PREFIX}_INCLUDE_DIRS ${abs_output_dir} PARENT_SCOPE)
+ set(${PREFIX}_LIBRARIES protobuf::libprotobuf PARENT_SCOPE)
+ endfunction(Protobuf_Generate)
+endif(Protobuf_FOUND)
diff --git a/infra/nncc/cmake/packages/ProtobufSourceConfig.cmake b/infra/nncc/cmake/packages/ProtobufSourceConfig.cmake
new file mode 100644
index 000000000..89176eb61
--- /dev/null
+++ b/infra/nncc/cmake/packages/ProtobufSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_ProtobufSource_import)
+ if(NOT DOWNLOAD_PROTOBUF)
+ set(ProtobufSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_PROTOBUF)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(PROTOBUF_URL https://github.com/protocolbuffers/protobuf/archive/v3.5.2.tar.gz)
+
+ ExternalSource_Download(PROTOBUF ${PROTOBUF_URL})
+
+ set(ProtobufSource_DIR ${PROTOBUF_SOURCE_DIR} PARENT_SCOPE)
+ set(ProtobufSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_ProtobufSource_import)
+
+_ProtobufSource_import()
diff --git a/infra/nncc/cmake/packages/PytorchSourceConfig.cmake b/infra/nncc/cmake/packages/PytorchSourceConfig.cmake
new file mode 100644
index 000000000..c28bc7c00
--- /dev/null
+++ b/infra/nncc/cmake/packages/PytorchSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_PytorchSource_import)
+ if(NOT DOWNLOAD_PYTORCH)
+ set(PytorchSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_PYTORCH)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(PYTORCH_URL https://github.com/pytorch/pytorch/archive/v0.4.1.tar.gz)
+
+ ExternalSource_Download(PYTORCH ${PYTORCH_URL})
+
+ set(PytorchSource_DIR ${PYTORCH_SOURCE_DIR} PARENT_SCOPE)
+ set(PytorchSource_FOUND ${DOWNLOAD_PYTORCH} PARENT_SCOPE)
+endfunction(_PytorchSource_import)
+
+_PytorchSource_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowConfig.cmake b/infra/nncc/cmake/packages/TensorFlowConfig.cmake
new file mode 100644
index 000000000..14d2fdf26
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowConfig.cmake
@@ -0,0 +1,53 @@
+set(TENSORFLOW_PREFIX "/usr" CACHE PATH "The location of pre-installed TensorFlow library")
+set(TENSORFLOW_VERSION_REQUIRED "1.12.0")
+
+# TODO Build TensorFlow from the (downloaded) source
+
+function(_TensorFlow_import)
+ # Find the header & lib
+ find_library(TensorFlow_LIB NAMES tensorflow PATHS "${TENSORFLOW_PREFIX}/lib")
+ find_path(TensorFlow_INCLUDE_DIR NAMES tensorflow/c/c_api.h PATHS "${TENSORFLOW_PREFIX}/include")
+
+ if(NOT TensorFlow_LIB OR NOT TensorFlow_INCLUDE_DIR)
+ message(STATUS "Found TensorFlow: FALSE")
+
+ set(TensorFlow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT TensorFlow_LIB OR NOT TensorFlow_INCLUDE_DIR)
+
+ # Check TensorFlow version
+ try_run(RUN_RESULT_VAR COMPILE_RESULT_VAR
+ ${CMAKE_BINARY_DIR}
+ ${CMAKE_CURRENT_LIST_DIR}/TensorFlowVersionChecker.c
+ COMPILE_DEFINITIONS -I${TensorFlow_INCLUDE_DIR}
+ LINK_LIBRARIES ${TensorFlow_LIB}
+ ARGS ${TENSORFLOW_VERSION_REQUIRED})
+
+ if(NOT COMPILE_RESULT_VAR)
+ message(STATUS "Failed to build TensorFlowVersionChecker. Your libtensorflow may be built on different version of Ubuntu.")
+ message(STATUS "Found TensorFlow: FALSE")
+ set(TensorFlow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT COMPILE_RESULT_VAR)
+
+ if(NOT RUN_RESULT_VAR EQUAL 0)
+ message(STATUS "you need tensorflow version ${TENSORFLOW_VERSION_REQUIRED}")
+ message(STATUS "Found TensorFlow: FALSE")
+ set(TensorFlow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT RUN_RESULT_VAR EQUAL 0)
+
+ # Add tensorflow target (if necessary)
+ if(NOT TARGET tensorflow)
+ message(STATUS "Found TensorFlow (include: ${TensorFlow_INCLUDE_DIR}, library: ${TensorFlow_LIB})")
+
+ # NOTE IMPORTED target may be more appropriate for this case
+ add_library(tensorflow INTERFACE)
+ target_link_libraries(tensorflow INTERFACE ${TensorFlow_LIB})
+ target_include_directories(tensorflow INTERFACE ${TensorFlow_INCLUDE_DIR})
+ endif(NOT TARGET tensorflow)
+
+ set(TensorFlow_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlow_import)
+
+_TensorFlow_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt b/infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt
new file mode 100644
index 000000000..068022fcf
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt
@@ -0,0 +1,41 @@
+# NOTE The followings SHOULD be defined before using this CMakeLists.txt
+#
+# 'TensorFlowSource_DIR' variable
+# 'FlatBuffersSource_DIR' variable
+# 'eigen' target
+# 'gemmlowp' target
+# 'neon2sse' target
+# 'farmhash' target
+# 'abseil' target
+#
+message(STATUS "Build TensorFlow Lite from ${TensorFlowSource_DIR}")
+
+set(TensorFlowLiteSource_DIR ${TensorFlowSource_DIR}/tensorflow/contrib/lite)
+
+file(GLOB CORE_SRCS "${TensorFlowLiteSource_DIR}/*.c" "${TensorFlowLiteSource_DIR}/*.cc" "${TensorFlowLiteSource_DIR}/c/*.c" "${TensorFlowLiteSource_DIR}/core/api/*.cc")
+file(GLOB_RECURSE CORE_TESTS "${TensorFlowLiteSource_DIR}/*test*.cc")
+list(REMOVE_ITEM CORE_SRCS ${CORE_TESTS})
+
+file(GLOB_RECURSE KERNEL_SRCS "${TensorFlowLiteSource_DIR}/kernels/*.cc")
+file(GLOB_RECURSE KERNEL_TESTS "${TensorFlowLiteSource_DIR}/kernels/*test*.cc")
+list(REMOVE_ITEM KERNEL_SRCS ${KERNEL_TESTS})
+# Exclude buggy kernel(s) from the build
+#list(REMOVE_ITEM KERNEL_SRCS "${TensorFlowLiteSource_DIR}/kernels/internal/spectrogram.cc")
+
+list(APPEND SRCS ${CORE_SRCS})
+list(APPEND SRCS ${KERNEL_SRCS})
+
+include(CheckCXXCompilerFlag)
+
+CHECK_CXX_COMPILER_FLAG(-Wno-extern-c-compat COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
+
+add_library(tensorflowlite-1.12 ${SRCS})
+set_target_properties(tensorflowlite-1.12 PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(tensorflowlite-1.12 PUBLIC ${TensorFlowSource_DIR})
+target_include_directories(tensorflowlite-1.12 PUBLIC ${FlatBuffersSource_DIR}/include)
+target_compile_options(tensorflowlite-1.12 PUBLIC -Wno-ignored-attributes)
+if(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
+ target_compile_options(tensorflowlite-1.12 PUBLIC -Wno-extern-c-compat)
+endif(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
+target_compile_definitions(tensorflowlite-1.12 PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
+target_link_libraries(tensorflowlite-1.12 eigen gemmlowp neon2sse farmhash abseil dl)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake b/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake
new file mode 100644
index 000000000..2f4ff0a46
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake
@@ -0,0 +1,62 @@
+function(_TensorFlowLite_import)
+ nncc_find_package(TensorFlowSource EXACT 1.12 QUIET)
+
+ if(NOT TensorFlowSource_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT TensorFlowSource_FOUND)
+
+ # TensorFlow 1.12 downloads FlatBuffers from https://github.com/google/flatbuffers/archive/1f5eae5d6a1.tar.gz
+ #
+ # Let's use 1.10 released in 2018.10 (compatible with 1f5eae5d6a1).
+ nncc_find_package(FlatBuffersSource EXACT 1.10 QUIET)
+
+ if(NOT FlatBuffersSource_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT FlatBuffersSource_FOUND)
+
+ nncc_find_package(Farmhash QUIET)
+
+ if(NOT Farmhash_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Farmhash_FOUND)
+
+ nncc_find_package(Eigen QUIET)
+
+ if(NOT Eigen_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Eigen_FOUND)
+
+ nncc_find_package(GEMMLowp QUIET)
+
+ if(NOT GEMMLowp_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT GEMMLowp_FOUND)
+
+ nncc_find_package(NEON2SSE QUIET)
+
+ if(NOT NEON2SSE_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT NEON2SSE_FOUND)
+
+ nncc_find_package(Abseil QUIET)
+
+ if(NOT Abseil_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Abseil_FOUND)
+
+ if(NOT TARGET tensorflowlite-1.12)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Lite" tflite-1.12)
+ endif(NOT TARGET tensorflowlite-1.12)
+
+ set(TensorFlowLite_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowLite_import)
+
+_TensorFlowLite_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake b/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake
new file mode 100644
index 000000000..4a57b655b
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake
@@ -0,0 +1,9 @@
+set(PACKAGE_VERSION "1.12")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt b/infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt
new file mode 100644
index 000000000..c5e89eebe
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt
@@ -0,0 +1,38 @@
+# NOTE The followings SHOULD be defined before using this CMakeLists.txt
+#
+# 'TensorFlowSource_DIR' variable
+# 'FlatBuffersSource_DIR' variable
+# 'eigen' target
+# 'gemmlowp' target
+# 'neon2sse' target
+# 'farmhash' target
+#
+set(TensorFlowLiteSource_DIR ${TensorFlowSource_DIR}/tensorflow/contrib/lite)
+
+file(GLOB CORE_SRCS "${TensorFlowLiteSource_DIR}/*.c" "${TensorFlowLiteSource_DIR}/*.cc")
+file(GLOB CORE_TESTS "${TensorFlowLiteSource_DIR}/*test*.cc")
+list(REMOVE_ITEM CORE_SRCS ${CORE_TESTS})
+
+file(GLOB_RECURSE KERNEL_SRCS "${TensorFlowLiteSource_DIR}/kernels/*.cc")
+file(GLOB_RECURSE KERNEL_TESTS "${TensorFlowLiteSource_DIR}/kernels/*test*.cc")
+list(REMOVE_ITEM KERNEL_SRCS ${KERNEL_TESTS})
+# Exclude buggy kernel(s) from the build
+list(REMOVE_ITEM KERNEL_SRCS "${TensorFlowLiteSource_DIR}/kernels/internal/spectrogram.cc")
+
+list(APPEND SRCS ${CORE_SRCS})
+list(APPEND SRCS ${KERNEL_SRCS})
+
+include(CheckCXXCompilerFlag)
+
+CHECK_CXX_COMPILER_FLAG(-Wno-extern-c-compat COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
+
+add_library(tensorflowlite-1.7 ${SRCS})
+set_target_properties(tensorflowlite-1.7 PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(tensorflowlite-1.7 PUBLIC ${TensorFlowSource_DIR})
+target_include_directories(tensorflowlite-1.7 PUBLIC ${FlatBuffersSource_DIR}/include)
+target_compile_options(tensorflowlite-1.7 PUBLIC -Wno-ignored-attributes)
+if(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
+ target_compile_options(tensorflowlite-1.7 PUBLIC -Wno-extern-c-compat)
+endif(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
+target_compile_definitions(tensorflowlite-1.7 PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
+target_link_libraries(tensorflowlite-1.7 eigen gemmlowp neon2sse farmhash dl)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake b/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake
new file mode 100644
index 000000000..44bc817e1
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake
@@ -0,0 +1,55 @@
+function(_TensorFlowLite_import)
+ nncc_find_package(TensorFlowSource EXACT 1.7 QUIET)
+
+ if(NOT TensorFlowSource_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT TensorFlowSource_FOUND)
+
+ # TensorFlow 1.7 downloads FlatBuffers from https://github.com/google/flatbuffers/archive/971a68110e4.tar.gz
+ #
+ # FlatBuffers 1.8 is compatible with 971a68110e4.
+ nncc_find_package(FlatBuffersSource EXACT 1.8 QUIET)
+
+ if(NOT FlatBuffersSource_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT FlatBuffersSource_FOUND)
+
+ nncc_find_package(Farmhash QUIET)
+
+ if(NOT Farmhash_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Farmhash_FOUND)
+
+ nncc_find_package(Eigen QUIET)
+
+ if(NOT Eigen_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Eigen_FOUND)
+
+ nncc_find_package(GEMMLowp QUIET)
+
+ if(NOT GEMMLowp_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT GEMMLowp_FOUND)
+
+ nncc_find_package(NEON2SSE QUIET)
+
+ if(NOT NEON2SSE_FOUND)
+ set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT NEON2SSE_FOUND)
+
+ if(NOT TARGET tensorflowlite-1.7)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Lite" tflite-1.7)
+ endif(NOT TARGET tensorflowlite-1.7)
+
+ set(TensorFlowLite_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowLite_import)
+
+_TensorFlowLite_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake b/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake
new file mode 100644
index 000000000..46609dc10
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake
@@ -0,0 +1,9 @@
+set(PACKAGE_VERSION "1.7")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake
new file mode 100644
index 000000000..5963ce418
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake
@@ -0,0 +1,104 @@
+function(_TensorFlowProtoText_import)
+ macro(require_package PKGNAME)
+ nncc_find_package(${PKGNAME} ${ARGN} QUIET)
+ if(NOT ${PKGNAME}_FOUND)
+ message(STATUS "Found TensorFlowProtoText: FALSE (${PKGNAME} is missing)")
+ set(TensorFlowProtoText_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ${PKGNAME}_FOUND)
+ endmacro(require_package)
+
+ require_package(TensorFlowSource EXACT 1.12)
+ require_package(Abseil)
+ require_package(Eigen)
+ require_package(Protobuf)
+ require_package(GoogleDoubleConversion)
+ require_package(GoogleNSync)
+
+ if(NOT TARGET tensorflow-prototext-1.12)
+ nncc_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/build" TensorFlowProtoText-1.12)
+ endif(NOT TARGET tensorflow-prototext-1.12)
+
+ set(TensorFlowProtoText_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowProtoText_import)
+
+_TensorFlowProtoText_import()
+
+if(TensorFlowProtoText_FOUND)
+ # CMAKE_CURRENT_LIST_DIR
+ #
+ # ... The value has dynamic scope. ... Therefore the value of the variable inside a macro
+ # or function is the directory of the file invoking the bottom-most entry on the call stack,
+ # not the directory of the file containing the macro or function definition.
+ #
+ # Reference: https://cmake.org/cmake/help/v3.1/variable/CMAKE_CURRENT_LIST_DIR.html
+ set(TENSORLFLOW_PROTO_TEXT_1_12_CMAKE_DIR
+ "${CMAKE_CURRENT_LIST_DIR}" CACHE INTERNAL
+ "Where to find make_directories"
+ )
+
+ # Comments from "gen_proto_text_functions.cc"
+ # >
+ # > Main program to take input protos and write output pb_text source files that
+ # > contain generated proto text input and output functions.
+ # >
+ # > Main expects:
+ # > - First argument is output path
+ # > - Second argument is the relative path of the protos to the root. E.g.,
+ # > for protos built by a rule in tensorflow/core, this will be
+ # > tensorflow/core.
+ # > - Then any number of source proto file names, plus one source name must be
+ # > placeholder.txt from this gen tool's package. placeholder.txt is
+ # > ignored for proto resolution, but is used to determine the root at which
+ # > the build tool has placed the source proto files.
+ # >
+ function(ProtoText_Generate PREFIX OUTPUT_DIR)
+ # THIS SHOULD SUCCEED!
+ nncc_find_package(TensorFlowSource EXACT 1.12 REQUIRED)
+
+ set(OUTPUT_REL "tensorflow")
+ set(PROTO_DIR "${TensorFlowSource_DIR}")
+
+ set(PROTO_INPUTS ${ARGN})
+ list(APPEND PROTO_INPUTS "tensorflow/tools/proto_text/placeholder.txt")
+
+ get_filename_component(abs_output_dir ${OUTPUT_DIR} ABSOLUTE)
+ get_filename_component(abs_proto_dir ${TensorFlowSource_DIR} ABSOLUTE)
+
+ # Let's reset variables before using them
+ # NOTE This DOES NOT AFFECT variables in the parent scope
+ unset(PROTO_FILES)
+ unset(OUTPUT_DIRS)
+ unset(OUTPUT_FILES)
+
+ foreach(proto ${PROTO_INPUTS})
+ get_filename_component(fil "${proto}" NAME)
+ get_filename_component(dir "${proto}" DIRECTORY)
+
+ get_filename_component(fil_we "${fil}" NAME_WE)
+
+ get_filename_component(abs_fil "${abs_proto_base}/${proto}" ABSOLUTE)
+ get_filename_component(abs_dir "${abs_fil}" DIRECTORY)
+
+ list(APPEND PROTO_FILES "${abs_proto_dir}/${proto}")
+
+ if(NOT ${fil} STREQUAL "placeholder.txt")
+ list(APPEND OUTPUT_DIRS "${abs_output_dir}/${dir}")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb_text.h")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb_text-impl.h")
+ list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb_text.cc")
+ endif(NOT ${fil} STREQUAL "placeholder.txt")
+ endforeach()
+
+ add_custom_command(OUTPUT ${OUTPUT_FILES}
+ # "make_directory" in CMake 3.1 cannot create multiple directories at once.
+ # COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPUT_DIRS}
+ COMMAND "${TENSORLFLOW_PROTO_TEXT_1_12_CMAKE_DIR}/make_directories.sh" ${OUTPUT_DIRS}
+ COMMAND "$<TARGET_FILE:tensorflow-prototext-1.12>" "${abs_output_dir}/${OUTPUT_REL}" "${OUTPUT_REL}" ${PROTO_FILES}
+ DEPENDS ${PROTO_FILES})
+
+ set(${PREFIX}_SOURCES ${OUTPUT_FILES} PARENT_SCOPE)
+ set(${PREFIX}_INCLUDE_DIRS ${abs_output_dir} PARENT_SCOPE)
+ endfunction(ProtoText_Generate)
+endif(TensorFlowProtoText_FOUND)
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake
new file mode 100644
index 000000000..4a57b655b
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake
@@ -0,0 +1,9 @@
+set(PACKAGE_VERSION "1.12")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt
new file mode 100644
index 000000000..86d6e6fe5
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt
@@ -0,0 +1,78 @@
+message(STATUS "Build TensorFlowProtoText from '${TensorFlowSource_DIR}'")
+
+#
+# Build "proto_text" tool
+#
+unset(SOURCE_FILES)
+
+macro(Source_Add RPATH)
+ list(APPEND SOURCE_FILES "${TensorFlowSource_DIR}/${RPATH}")
+endmacro(Source_Add)
+
+# This list comes from "tensorflow/contrib/makefile/proto_text_cc_files.txt"
+Source_Add(tensorflow/core/lib/core/status.cc)
+Source_Add(tensorflow/core/lib/core/threadpool.cc)
+Source_Add(tensorflow/core/lib/hash/hash.cc)
+Source_Add(tensorflow/core/lib/io/inputstream_interface.cc)
+Source_Add(tensorflow/core/lib/io/random_inputstream.cc)
+Source_Add(tensorflow/core/lib/io/buffered_inputstream.cc)
+Source_Add(tensorflow/core/lib/io/inputbuffer.cc)
+Source_Add(tensorflow/core/lib/io/iterator.cc)
+Source_Add(tensorflow/core/lib/io/path.cc)
+Source_Add(tensorflow/core/lib/strings/numbers.cc)
+Source_Add(tensorflow/core/lib/strings/scanner.cc)
+Source_Add(tensorflow/core/lib/strings/str_util.cc)
+Source_Add(tensorflow/core/lib/strings/strcat.cc)
+Source_Add(tensorflow/core/lib/strings/stringprintf.cc)
+Source_Add(tensorflow/core/lib/strings/proto_text_util.cc)
+Source_Add(tensorflow/core/platform/cpu_info.cc)
+Source_Add(tensorflow/core/platform/denormal.cc)
+Source_Add(tensorflow/core/platform/env.cc)
+Source_Add(tensorflow/core/platform/env_time.cc)
+Source_Add(tensorflow/core/platform/file_system.cc)
+Source_Add(tensorflow/core/platform/file_system_helper.cc)
+Source_Add(tensorflow/core/platform/protobuf_util.cc)
+Source_Add(tensorflow/core/platform/setround.cc)
+Source_Add(tensorflow/core/platform/tracing.cc)
+Source_Add(tensorflow/core/platform/posix/env.cc)
+Source_Add(tensorflow/core/platform/posix/env_time.cc)
+Source_Add(tensorflow/core/platform/posix/error.cc)
+Source_Add(tensorflow/core/platform/posix/load_library.cc)
+Source_Add(tensorflow/core/platform/posix/port.cc)
+Source_Add(tensorflow/core/platform/posix/posix_file_system.cc)
+Source_Add(tensorflow/core/platform/default/logging.cc)
+Source_Add(tensorflow/core/platform/default/mutex.cc)
+Source_Add(tensorflow/core/platform/default/protobuf.cc)
+
+Source_Add(tensorflow/tools/proto_text/gen_proto_text_functions_lib.cc)
+Source_Add(tensorflow/tools/proto_text/gen_proto_text_functions.cc)
+
+unset(PROTO_FILES)
+
+macro(Proto_Add RPATH)
+ list(APPEND PROTO_FILES "${RPATH}")
+endmacro(Proto_Add)
+
+Proto_Add(tensorflow/core/lib/core/error_codes.proto)
+Proto_Add(tensorflow/core/framework/types.proto)
+Proto_Add(tensorflow/core/framework/tensor.proto)
+Proto_Add(tensorflow/core/framework/tensor_shape.proto)
+Proto_Add(tensorflow/core/framework/summary.proto)
+Proto_Add(tensorflow/core/framework/resource_handle.proto)
+
+Protobuf_Generate(PROTO_TEXT_PROTO
+ "${CMAKE_CURRENT_BINARY_DIR}/generated/proto_text"
+ "${TensorFlowSource_DIR}"
+ ${PROTO_FILES}
+)
+
+add_executable(tensorflow-prototext-1.12 ${SOURCE_FILES} ${PROTO_TEXT_PROTO_SOURCES})
+target_include_directories(tensorflow-prototext-1.12 PRIVATE ${TensorFlowSource_DIR})
+target_include_directories(tensorflow-prototext-1.12 PRIVATE ${PROTO_TEXT_PROTO_INCLUDE_DIRS})
+
+target_link_libraries(tensorflow-prototext-1.12 PRIVATE abseil)
+target_link_libraries(tensorflow-prototext-1.12 PRIVATE eigen)
+target_link_libraries(tensorflow-prototext-1.12 PRIVATE ${PROTO_TEXT_PROTO_LIBRARIES})
+target_link_libraries(tensorflow-prototext-1.12 PRIVATE Google::DoubleConversion)
+target_link_libraries(tensorflow-prototext-1.12 PRIVATE Google::NSync)
+target_link_libraries(tensorflow-prototext-1.12 PRIVATE dl)
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh
new file mode 100644
index 000000000..1fb2ab683
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+while [[ $# -ne 0 ]]; do
+ DIR=$1; shift
+ mkdir -p "${DIR}"
+done
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake b/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake
new file mode 100644
index 000000000..b3adef052
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_TensorFlowSource_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(TENSORFLOW_1_12_URL https://github.com/tensorflow/tensorflow/archive/v1.12.0.tar.gz)
+
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-1.12 ${TENSORFLOW_1_12_URL})
+
+ set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowSource_import)
+
+_TensorFlowSource_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake b/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake
new file mode 100644
index 000000000..8cfdbf8e5
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.12")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake b/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake
new file mode 100644
index 000000000..f9df7dc18
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_TensorFlowSource_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(TENSORFLOW_1_14_URL https://github.com/tensorflow/tensorflow/archive/v1.14.0.tar.gz)
+
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-1.14 ${TENSORFLOW_1_14_URL})
+
+ set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowSource_import)
+
+_TensorFlowSource_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake b/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake
new file mode 100644
index 000000000..e9d7db2be
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.14")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake b/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake
new file mode 100644
index 000000000..11dbf01c5
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_TensorFlowSource_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nncc_include(ExternalSourceTools)
+ nncc_include(OptionTools)
+
+ envoption(TENSORFLOW_URL https://github.com/tensorflow/tensorflow/archive/v1.7.0.tar.gz)
+
+ ExternalSource_Download(TENSORFLOW ${TENSORFLOW_URL})
+
+ set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowSource_import)
+
+_TensorFlowSource_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake b/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake
new file mode 100644
index 000000000..b1d5282b2
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.7")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowVersionChecker.c b/infra/nncc/cmake/packages/TensorFlowVersionChecker.c
new file mode 100644
index 000000000..6161ef74a
--- /dev/null
+++ b/infra/nncc/cmake/packages/TensorFlowVersionChecker.c
@@ -0,0 +1,9 @@
+#include <string.h>
+#include <tensorflow/c/c_api.h>
+
+int main(int argc, char **argv)
+{
+ if (argc >= 2 && !strcmp(argv[1], TF_Version()))
+ return 0;
+ return 255;
+}
diff --git a/infra/nncc/command/build b/infra/nncc/command/build
new file mode 100644
index 000000000..86082c188
--- /dev/null
+++ b/infra/nncc/command/build
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+import "build.configuration"
+
+BUILD_WORKSPACE_PATH="${NNCC_PROJECT_PATH}/${BUILD_WORKSPACE_RPATH}"
+
+if [[ ! -d "${BUILD_WORKSPACE_PATH}" ]]; then
+ echo "'${BUILD_WORKSPACE_RPATH}' does not exist. Please run 'configure' first"
+ exit 255
+fi
+cd "${BUILD_WORKSPACE_PATH}" && cmake --build . -- "$@"
diff --git a/infra/nncc/command/check-copyright b/infra/nncc/command/check-copyright
new file mode 100644
index 000000000..b70785971
--- /dev/null
+++ b/infra/nncc/command/check-copyright
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# HOW TO USE
+#
+# Create .COPYRIGHT file at the root of your project (compiler/[PROJECT]/.COPYRIGHT)
+# with the copyright pattern required for your project.
+#
+# echo "Copyright (c) [0-9]\+ Samsung Electronics Co., Ltd. All Rights Reserved" > compiler/[PROJECT]/.COPYRIGHT
+#
+# DISCLAIMER
+#
+# This check works only when your copyright notice is of the following form:
+#
+# /**
+# * [Copyright notice]
+# ...
+# */
+#
+# NOTE
+#
+# The current implementation does not validate YEAR in the copyright notice.
+#
+# TODO Validate YEAR without FALSE POSTIVIES
+#
+# It already turns out that checking the initial commit year introduces
+# FALSE POSITIVES if there are relocated files.
+INVALID_FILES=()
+
+for COPYRIGHT_PATH in $(ls ${NNCC_PROJECT_PATH}/compiler/*/.COPYRIGHT); do
+ PROJECT_PATH="$(dirname ${COPYRIGHT_PATH})"
+ PROJECT_NAME="$(basename ${PROJECT_PATH})"
+
+ CANDIDATE_FILES=$(find "${PROJECT_PATH}" -iname '*.h' -o -iname '*.hpp' -o -iname '*.cpp' -o -iname '*.c')
+
+ # Skip copyright check if there is no candidate files
+ #
+ # NOTE "git ls-files" with no argument will enumerate all the files in the repo
+ if [[ -z ${CANDIDATE_FILES} ]]; then
+ continue
+ fi
+
+ for TRACKED_FILE in $(git ls-files $CANDIDATE_FILES); do
+ MATCHED=$(cat "${NNCC_PROJECT_PATH}/${TRACKED_FILE}" | head -n2 | tail -n1 | sed 's/^ \* //g' | grep -f "${COPYRIGHT_PATH}" | wc -l)
+
+ if [[ ${MATCHED} -ne 1 ]]; then
+ INVALID_FILES+=(${TRACKED_FILE})
+ fi
+ done
+done
+
+if [[ ${#INVALID_FILES[@]} -ne 0 ]]; then
+ echo ">> FAILED <<"
+ echo
+ echo "PLEASE CHECK THE FOLLOWING FILES"
+ for INVALID_FILE in "${INVALID_FILES[@]}"; do
+ echo "- ${INVALID_FILE}"
+ done
+ exit 255
+fi
+
+echo ">> PASSED <<"
+exit 0
diff --git a/infra/nncc/command/configure b/infra/nncc/command/configure
new file mode 100644
index 000000000..2648cb893
--- /dev/null
+++ b/infra/nncc/command/configure
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+import "build.configuration"
+
+BUILD_WORKSPACE_PATH="${NNCC_PROJECT_PATH}/${BUILD_WORKSPACE_RPATH}"
+
+mkdir -p "${BUILD_WORKSPACE_PATH}"
+
+cd "${BUILD_WORKSPACE_PATH}"
+cmake "${NNCC_PROJECT_PATH}/infra/nncc" "$@"
diff --git a/infra/nncc/command/docker-nncc b/infra/nncc/command/docker-nncc
new file mode 100644
index 000000000..0eea016c6
--- /dev/null
+++ b/infra/nncc/command/docker-nncc
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+import "docker.configuration"
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME ./nncc "$@"
+EXITCODE=$?
+
+docker_cleanup
+
+exit $EXITCODE
diff --git a/infra/nncc/command/docker-run b/infra/nncc/command/docker-run
new file mode 100644
index 000000000..863b2b8f1
--- /dev/null
+++ b/infra/nncc/command/docker-run
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+import "docker.configuration"
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "$@"
+EXITCODE=$?
+
+docker_cleanup
+
+exit $EXITCODE
diff --git a/infra/nncc/command/docker-shell b/infra/nncc/command/docker-shell
new file mode 100644
index 000000000..7f8449855
--- /dev/null
+++ b/infra/nncc/command/docker-shell
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+import "docker.configuration"
+
+DOCKER_RUN_OPTS+=" -it"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME /bin/bash
+EXITCODE=$?
+
+docker_cleanup
+
+exit $EXITCODE
diff --git a/infra/nncc/command/test b/infra/nncc/command/test
new file mode 100644
index 000000000..96ddd7a17
--- /dev/null
+++ b/infra/nncc/command/test
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+import "build.configuration"
+
+BUILD_WORKSPACE_PATH="${NNCC_PROJECT_PATH}/${BUILD_WORKSPACE_RPATH}"
+
+if [[ ! -d "${BUILD_WORKSPACE_PATH}" ]]; then
+ echo "'${BUILD_WORKSPACE_RPATH}' does not exist. Please run 'configure' first"
+ exit 255
+fi
+
+export CTEST_OUTPUT_ON_FAILURE=1
+cd "${BUILD_WORKSPACE_PATH}" && ctest "$@"
diff --git a/infra/nncc/config/build.configuration b/infra/nncc/config/build.configuration
new file mode 100644
index 000000000..25ffb6ee0
--- /dev/null
+++ b/infra/nncc/config/build.configuration
@@ -0,0 +1 @@
+BUILD_WORKSPACE_RPATH=${NNCC_WORKSPACE:-build}
diff --git a/infra/nncc/config/docker.configuration b/infra/nncc/config/docker.configuration
new file mode 100644
index 000000000..7078585a2
--- /dev/null
+++ b/infra/nncc/config/docker.configuration
@@ -0,0 +1,46 @@
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnas}
+echo "Using docker image ${DOCKER_IMAGE_NAME}"
+
+if [ -z "`docker images ${DOCKER_IMAGE_NAME}`" ]; then
+ echo "Need docker image!"
+ exit 1
+fi
+
+HOST_PATH="$NNCC_PROJECT_PATH"
+DOCKER_PATH="$NNCC_PROJECT_PATH"
+
+export GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES=" -v $HOST_PATH:$DOCKER_PATH"
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+DOCKER_ENV_VARS+=" -e CAFFE_URL"
+DOCKER_ENV_VARS+=" -e GTEST_URL"
+DOCKER_ENV_VARS+=" -e EIGEN_URL"
+DOCKER_ENV_VARS+=" -e GEMMLOWP_URL"
+DOCKER_ENV_VARS+=" -e FLATBUFFERS_URL"
+DOCKER_ENV_VARS+=" -e FARMHASH_URL"
+DOCKER_ENV_VARS+=" -e NEON2SSE_URL"
+DOCKER_ENV_VARS+=" -e TENSORFLOW_URL"
+
+DOCKER_ENV_VARS+=" -e NNCC_WORKSPACE"
+
+DOCKER_RUN_OPTS="${DOCKER_OPTS}"
+DOCKER_RUN_OPTS+=" --rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_PATH"
+
+function docker_cleanup()
+{
+ # Newly created files during during docker run can have different ownership.
+ # This may cause some problems, for example, some jenkins slaves or developers
+ # can't remove built files due to lack of permission.
+ # To address this issue, let's change owner of all files
+ # in nncc to owner of nncc.
+ NNCC_OWNER_UID=$(stat -c "%u" $HOST_PATH)
+ NNCC_OWNER_GID=$(stat -c "%g" $HOST_PATH)
+
+ CMD="chown -R $NNCC_OWNER_UID:$NNCC_OWNER_GID $DOCKER_PATH"
+ docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+}
diff --git a/infra/nnfw/CMakeLists.txt b/infra/nnfw/CMakeLists.txt
new file mode 100644
index 000000000..f2eb2f3ba
--- /dev/null
+++ b/infra/nnfw/CMakeLists.txt
@@ -0,0 +1,107 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+project(nnfw)
+
+enable_testing()
+
+set(NNFW_EXTERNALS_DIR
+ "${CMAKE_CURRENT_LIST_DIR}/../../externals" CACHE
+ INTERNAL "Where to download external dependencies"
+)
+set(NNFW_OVERLAY_DIR "${CMAKE_BINARY_DIR}/overlay" CACHE
+ INTERNAL "Where locally built external dependencies are installed")
+
+# This allows find_package to access configurations installed inside overlay
+list(APPEND CMAKE_PREFIX_PATH "${NNFW_OVERLAY_DIR}")
+
+macro(nnfw_include PREFIX)
+ include("${CMAKE_SOURCE_DIR}/cmake/modules/${PREFIX}.cmake")
+endmacro(nnfw_include)
+
+# 'find_package()' wrapper to find in cmake/packages folder
+#
+# Example:
+# nnfw_find_package(Boost): Load settings from 'BoostConfig.cmake' file
+# - this may drop warnings like "-- Could NOT find Boost (missing: Boost_DIR)
+# nnfw_find_package(Boost QUIET): Load settings silently, without warnings
+# nnfw_find_package(Boost REQUIRED): Load settings but stop with error when failed
+macro(nnfw_find_package PREFIX)
+ find_package(${PREFIX} CONFIG NO_DEFAULT_PATH PATHS ${CMAKE_SOURCE_DIR}/cmake/packages ${ARGN})
+endmacro(nnfw_find_package)
+
+set(CMAKE_CXX_STANDARD 11)
+set(CMAKE_CXX_EXTENSIONS OFF)
+
+# This feature works with CMake 3.5.2 or later. However, using previous versions does not produce
+# an error. We are still officially using CMake 3.5.1, but put this code for the sake of semantic
+# support in various development tools.
+# Todo: Someday, CMake needs to be updated to 3.7.2 or later to take advantage of improvements
+# such as `cmake-server`.
+set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
+
+# identify platform: HOST_PLATFORM, TARGET_PLATFORM and related
+# note: this should be placed before flags and options setting
+nnfw_include(IdentifyPlatform)
+
+# apply compilation flags
+# note: this should be placed after cmake/option/option_xxx.cmake files
+include("cmake/ApplyCompileFlags.cmake")
+
+# Configuration flags
+include("cmake/CfgOptionFlags.cmake")
+# and besides CfgOptionFlags.cmake that can be given outside
+# COVERAGE_BUILD: build boolean flag that enables converage test
+# ROOTFS_DIR: rootfs path for cross building
+# TARGET_ARCH: target architecture string for cross building
+# TARGET_OS: target os string for cross building
+
+nnfw_find_package(GTest QUIET)
+
+option(ENABLE_TEST "Build Tests using Google Test" ${GTest_FOUND})
+
+if(${ENABLE_TEST} AND NOT ${GTest_FOUND})
+ message(FATAL_ERROR "Google Test is required to enable test")
+endif(${ENABLE_TEST} AND NOT ${GTest_FOUND})
+
+if(${ENABLE_COVERAGE} AND NOT ${ENABLE_TEST})
+ message(FATAL_ERROR "Test should be enabled to measure test coverage")
+endif(${ENABLE_COVERAGE} AND NOT ${ENABLE_TEST})
+
+add_library(nnfw_common INTERFACE)
+if(ENABLE_STRICT_BUILD)
+ target_compile_options(nnfw_common INTERFACE -Werror -Wall -Wextra)
+endif(ENABLE_STRICT_BUILD)
+
+# TODO Replace using default build option setting in cmake/buildtool/config/config_linux.cmake
+# to link nnfw_coverage on each module which want to check coverage
+add_library(nnfw_coverage INTERFACE)
+if(ENABLE_COVERAGE)
+ target_compile_options(nnfw_coverage INTERFACE -g -O -DDEBUG -fprofile-arcs -ftest-coverage)
+ target_link_libraries(nnfw_coverage INTERFACE gcov)
+endif(ENABLE_COVERAGE)
+
+nnfw_include(ExtendCMakeFunction)
+
+set(NNFW_SOURCE_ROOT "${CMAKE_SOURCE_DIR}/../..")
+
+add_library(nnfw-header INTERFACE)
+target_include_directories(nnfw-header INTERFACE ${NNFW_SOURCE_ROOT}/runtimes/include)
+
+# TODO Support android build via fine-control for each component
+# - Introduce BUILD_CONTRIB option
+# - Set "BUILD_TFLITE_BENCHMARK_MODEL" as OFF for android build
+#
+# The original android build script (for future reference)
+#
+# add_subdirectory(libs)
+# add_subdirectory(tests/tools/nnapi_test)
+# add_subdirectory(tests/tools/tflite_benchmark)
+# add_subdirectory(tests/nnapi)
+#
+# add_subdirectory(runtimes)
+
+add_subdirectory(${NNFW_SOURCE_ROOT}/runtimes/contrib contrib)
+add_subdirectory(${NNFW_SOURCE_ROOT}/runtimes/libs libs)
+add_subdirectory(${NNFW_SOURCE_ROOT}/runtimes runtimes)
+add_subdirectory(${NNFW_SOURCE_ROOT}/tests tests)
+add_subdirectory(${NNFW_SOURCE_ROOT}/tools tools)
diff --git a/infra/nnfw/cmake/ApplyCompileFlags.cmake b/infra/nnfw/cmake/ApplyCompileFlags.cmake
new file mode 100644
index 000000000..5098fd0f3
--- /dev/null
+++ b/infra/nnfw/cmake/ApplyCompileFlags.cmake
@@ -0,0 +1,33 @@
+#
+# Platform independent compile flag setting
+#
+# flags for build type: debug, release
+set(CMAKE_C_FLAGS_DEBUG "-O0 -g -DDEBUG")
+set(CMAKE_CXX_FLAGS_DEBUG "-O0 -g -DDEBUG")
+set(CMAKE_C_FLAGS_RELEASE "-O2 -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
+
+#
+# Platform specific compile flag setting
+#
+include("cmake/buildtool/config/config_${TARGET_PLATFORM}.cmake")
+
+#
+# Apply compile flags
+# note: this should be placed after cmake/buildtool/config/config_xxx.cmake files
+#
+# add common flags
+foreach(FLAG ${FLAGS_COMMON})
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAG}")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}")
+endforeach()
+
+# add c flags
+foreach(FLAG ${FLAGS_CONLY})
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAG}")
+endforeach()
+
+# add cxx flags
+foreach(FLAG ${FLAGS_CXXONLY})
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}")
+endforeach()
diff --git a/infra/nnfw/cmake/CfgOptionFlags.cmake b/infra/nnfw/cmake/CfgOptionFlags.cmake
new file mode 100644
index 000000000..d431f30a7
--- /dev/null
+++ b/infra/nnfw/cmake/CfgOptionFlags.cmake
@@ -0,0 +1,65 @@
+include(CMakeDependentOption)
+
+#
+# Platfor specific configuration
+# note: this should be placed before platform common setting for option setting priority
+# (platform specific setting have higher priority)
+#
+include("cmake/options/options_${TARGET_PLATFORM}.cmake")
+
+#
+# Build configuration for project
+#
+option(BUILD_EXT_MULTITHREAD "Build external build using multi thread" ON)
+option(BUILD_NEURUN "Build neurun" ON)
+option(BUILD_LOGGING "Build logging runtime" ON)
+option(BUILD_PURE_ARM_COMPUTE "Build pure_arm_compute runtime" OFF)
+CMAKE_DEPENDENT_OPTION(BUILD_RUNTIME_NNAPI_TEST "Build Runtime NN API Generated Test"
+ # Set BUILD_RUNTIME_NNAPI_TEST as ON
+ # if CMAKE_COMPILER_IS_GNUCC AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 6.2
+ ON "CMAKE_COMPILER_IS_GNUCC;NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 6.2"
+ # Otherwise set BUILD_RUNTIME_NNAPI_TEST as OFF
+ OFF)
+option(BUILD_TFLITE_RUN "Build tflite-run" ON)
+option(BUILD_TFLITE_BENCHMARK_MODEL "Build tflite benchmark model" OFF)
+option(BUILD_NNAPI_QUICKCHECK "Build NN API Quickcheck tools" OFF)
+option(BUILD_TOOLS "Bulid nnfw projects under tools/" ON)
+option(BUILD_TFLITE_ACCURACY "Build tflite accuracy tool" OFF)
+option(BUILD_NNPACKAGE_RUN "Build nnpackge_run" ON)
+option(BUILD_SRCN_KERNEL "Build srcn kernel" OFF)
+option(ENVVAR_NEURUN_CONFIG "Use environment variable for neurun configuration" ON)
+#
+# Build configuration for contrib
+#
+option(BUILD_BENCHMARK_ACL "Build ARM Compute Library Benchmarks" OFF)
+option(BUILD_DETECTION_APP "Build detection example app" OFF)
+option(BUILD_LABS "Build lab projects" OFF)
+option(BUILD_TFLITE_TEST "Build tensorflow lite test" OFF)
+option(BUILD_TFLITE_CLASSIFY_APP "Build tflite_classify app" OFF)
+option(BUILD_UBEN "Build micro-benchmark (uben) suite" OFF)
+option(BUILD_MLAPSE "Build mlapse benchmark toolkit" OFF)
+option(BUILD_TFLITE_LOADER "Build tensorflow lite file loader" ON)
+option(BUILD_TFLITE_LOADER_TEST_TOOL "Build tflite loader testing tool" ON)
+option(BUILD_KBENCHMARK "Build kernel benchmark tool" OFF)
+#
+# Download configuration
+#
+option(DOWNLOAD_TENSORFLOW "Download Tensorflow source" ON)
+option(DOWNLOAD_ABSL "Download Absl source" ON)
+option(DOWNLOAD_EIGEN "Download Eigen source" ON)
+option(DOWNLOAD_FARMHASH "Download farmhash source" ON)
+option(DOWNLOAD_GEMMLOWP "Download GEMM low precesion library source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
+option(DOWNLOAD_NNPACK "Download NNPACK source" ON)
+option(DOWNLOAD_FLATBUFFERS "Download FlatBuffers source" ON)
+option(BUILD_TENSORFLOW_LITE "Build TensorFlow Lite from the downloaded source" ON)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(DOWNLOAD_NONIUS "Download nonius source" ON)
+option(DOWNLOAD_BOOST "Download boost source" OFF)
+option(BUILD_BOOST "Build boost source" OFF)
+#
+# GTest support
+#
+option(BUILD_GTEST "Download and build Google Test" ON)
+option(ENABLE_STRICT_BUILD "Treat warning as error" ON)
+option(ENABLE_COVERAGE "Build for coverage test" OFF)
diff --git a/infra/nnfw/cmake/buildtool/config/config_aarch64-linux.cmake b/infra/nnfw/cmake/buildtool/config/config_aarch64-linux.cmake
new file mode 100644
index 000000000..f859bbfe5
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_aarch64-linux.cmake
@@ -0,0 +1,12 @@
+#
+# aarch64 linux compile options
+#
+
+message(STATUS "Building for AARCH64 Linux")
+
+# include linux common
+include("cmake/buildtool/config/config_linux.cmake")
+
+# addition for aarch64-linux
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ )
diff --git a/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake b/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake
new file mode 100644
index 000000000..fa1e8b078
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake
@@ -0,0 +1,13 @@
+#
+# aarch64 tizen compile options
+#
+
+message(STATUS "Building for AARCH64 Tizen")
+
+# TODO : add and use option_tizen if something uncommon comes up
+# include linux common
+include("cmake/buildtool/config/config_linux.cmake")
+
+# addition for aarch64-tizen
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ )
diff --git a/infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake b/infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake
new file mode 100644
index 000000000..037541c58
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake
@@ -0,0 +1,9 @@
+include("cmake/buildtool/config/config_linux.cmake")
+
+# On Android, pthread is contained in bionic(libc)
+set(LIB_PTHREAD "")
+
+# SIMD for arm64
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ "-ftree-vectorize"
+ )
diff --git a/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake b/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake
new file mode 100644
index 000000000..6d6459f0f
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake
@@ -0,0 +1,18 @@
+#
+# armv7l linux compile options
+#
+
+message(STATUS "Building for ARMv7l Linux")
+
+# include linux common
+include("cmake/buildtool/config/config_linux.cmake")
+
+# addition for arm-linux
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ "-mcpu=cortex-a7"
+ "-mfloat-abi=hard"
+ "-mfpu=neon-vfpv4"
+ "-funsafe-math-optimizations"
+ "-ftree-vectorize"
+ "-mfp16-format=ieee"
+ )
diff --git a/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake b/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake
new file mode 100644
index 000000000..0bbe0ddca
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake
@@ -0,0 +1,18 @@
+#
+# armv7l tizen compile options
+#
+
+message(STATUS "Building for ARMv7l(softfp) Tizen")
+
+# TODO : add and use option_tizen if something uncommon comes up
+# include linux common
+include("cmake/buildtool/config/config_linux.cmake")
+
+# addition for arm-linux
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ "-mcpu=cortex-a8"
+ "-mfloat-abi=softfp"
+ "-mfpu=neon-vfpv4"
+ "-funsafe-math-optimizations"
+ "-ftree-vectorize"
+ )
diff --git a/infra/nnfw/cmake/buildtool/config/config_linux.cmake b/infra/nnfw/cmake/buildtool/config/config_linux.cmake
new file mode 100644
index 000000000..a17bbb2bd
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_linux.cmake
@@ -0,0 +1,30 @@
+#
+# linux common compile options
+#
+
+# test-coverage build flag
+if("${COVERAGE_BUILD}" STREQUAL "1")
+ set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE ON)
+ set(FLAGS_COMMON "${FLAGS_COMMON} -fprofile-arcs -ftest-coverage")
+ set(CMAKE_EXE_LINKER_FLAGS
+ "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage")
+ set(CMAKE_C_FLAGS_DEBUG "-O -g -DDEBUG")
+ set(CMAKE_CXX_FLAGS_DEBUG "-O -g -DDEBUG")
+endif()
+
+#
+# linux common variable and settings
+#
+
+# remove warning from arm cl
+# https://github.com/ARM-software/ComputeLibrary/issues/330
+set(GCC_VERSION_DISABLE_WARNING 6.0)
+if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER GCC_VERSION_DISABLE_WARNING)
+ message(STATUS "GCC version higher than ${GCC_VERSION_DISABLE_WARNING}")
+ set(FLAGS_CXXONLY ${FLAGS_CXXONLY}
+ "-Wno-ignored-attributes"
+ )
+endif()
+
+# lib pthread as a variable (pthread must be disabled on android)
+set(LIB_PTHREAD pthread)
diff --git a/infra/nnfw/cmake/buildtool/config/config_x86_64-linux.cmake b/infra/nnfw/cmake/buildtool/config/config_x86_64-linux.cmake
new file mode 100644
index 000000000..528e48396
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_x86_64-linux.cmake
@@ -0,0 +1,12 @@
+#
+# x86_64 linux compile options
+#
+message(STATUS "Building for x86-64 Linux")
+
+# include linux common
+include("cmake/buildtool/config/config_linux.cmake")
+
+# SIMD for x86
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ "-msse4"
+ )
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake
new file mode 100644
index 000000000..2d1a08d2b
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake
@@ -0,0 +1,39 @@
+#
+# config for aarch64-linux
+#
+include(CMakeForceCompiler)
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_PROCESSOR aarch64)
+
+set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
+set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)
+
+# where is the target environment
+set(NNFW_SOURCE_ROOT "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_AARCH64 "${NNFW_SOURCE_ROOT}/tools/cross/rootfs/arm64")
+include("${NNFW_SOURCE_ROOT}/infra/nnfw/cmake/modules/OptionTools.cmake")
+
+envoption(ROOTFS_DIR ${ROOTFS_AARCH64})
+if(NOT EXISTS "${ROOTFS_DIR}/lib/aarch64-linux-gnu")
+ message(FATAL_ERROR "Please prepare RootFS for AARCH64")
+endif()
+
+set(CMAKE_SYSROOT ${ROOTFS_DIR})
+set(CMAKE_FIND_ROOT_PATH ${ROOTFS_DIR})
+set(CMAKE_SHARED_LINKER_FLAGS
+ "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+set(CMAKE_EXE_LINKER_FLAGS
+ "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+
+# search for programs in the build host directories
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+
+# for libraries and headers in the target directories
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+
+# Set cache variable to ignore try-run error by find_package(Threads REQUIRED) on cross build
+set(THREADS_PTHREAD_ARG "2" CACHE STRING "Result from TRY_RUN" FORCE)
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake
new file mode 100644
index 000000000..9e1cf3b11
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake
@@ -0,0 +1,62 @@
+#
+# config for aarch64-linux
+#
+include(CMakeForceCompiler)
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_PROCESSOR aarch64)
+
+set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc-5)
+set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++-5)
+
+set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/6.2.1")
+
+# where is the target environment
+set(NNFW_SOURCE_ROOT "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_AARCH64 "${NNFW_SOURCE_ROOT}/tools/cross/rootfs/arm64")
+include("${NNFW_SOURCE_ROOT}/infra/nnfw/cmake/modules/OptionTools.cmake")
+
+envoption(ROOTFS_DIR ${ROOTFS_AARCH64})
+if(NOT EXISTS "${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+ message(FATAL_ERROR "Please prepare RootFS for tizen aarch64")
+endif()
+
+set(CMAKE_SYSROOT ${ROOTFS_DIR})
+set(CMAKE_FIND_ROOT_PATH ${ROOTFS_DIR})
+set(CMAKE_SHARED_LINKER_FLAGS
+ "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+set(CMAKE_EXE_LINKER_FLAGS
+ "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+
+# search for programs in the build host directories
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+
+# for libraries and headers in the target directories
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+
+# Set cache variable to ignore try-run error by find_package(Threads REQUIRED) on cross build
+set(THREADS_PTHREAD_ARG "2" CACHE STRING "Result from TRY_RUN" FORCE)
+
+add_compile_options(--sysroot=${ROOTFS_DIR})
+
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}")
+
+include_directories(SYSTEM ${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+include_directories(SYSTEM ${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
+add_compile_options(-Wno-deprecated-declarations) # compile-time option
+add_compile_options(-D__extern_always_inline=inline) # compile-time option
+
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -B${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_DIR}/lib64")
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib64")
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -B${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_DIR}/lib64")
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib64")
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake
new file mode 100644
index 000000000..c0c707dd3
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake
@@ -0,0 +1,44 @@
+set(ANDROID_STANDALONE $ENV{ROOTFS_DIR})
+set(CROSS_NDK_TOOLCHAIN ${ANDROID_STANDALONE}/bin)
+set(CROSS_ROOTFS ${ANDROID_STANDALONE}/sysroot)
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_VERSION 1)
+set(CMAKE_SYSTEM_PROCESSOR aarch64)
+
+## Specify the toolchain
+set(TOOLCHAIN "aarch64-linux-android")
+set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
+set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
+
+find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
+find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
+find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
+find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
+find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
+find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
+find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
+
+add_compile_options(--sysroot=${CROSS_ROOTFS})
+add_compile_options(-fPIE)
+
+## Needed for Android or bionic specific conditionals
+#add_compile_options(-D__ANDROID__)
+#add_compile_options(-D__BIONIC__)
+
+## NOTE Not sure this is safe. This may cause side effects.
+## Without this, it cannot find `std::stol`, `std::stoi` and so on, with android toolchain
+add_compile_options(-D_GLIBCXX_USE_C99=1)
+
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+
+set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake
new file mode 100644
index 000000000..8e3619879
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake
@@ -0,0 +1,38 @@
+#
+# config for arm-linux
+#
+include(CMakeForceCompiler)
+include("${CMAKE_CURRENT_LIST_DIR}/../../modules/OptionTools.cmake")
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_PROCESSOR armv7l)
+
+set(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)
+set(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
+
+# where is the target environment
+set(ROOTFS_ARM "${CMAKE_CURRENT_LIST_DIR}/../../../../../tools/cross/rootfs/arm")
+envoption(ROOTFS_DIR ${ROOTFS_ARM})
+
+if(NOT EXISTS "${ROOTFS_DIR}/lib/arm-linux-gnueabihf")
+ message(FATAL_ERROR "Please prepare RootFS for ARM")
+endif()
+
+set(CMAKE_SYSROOT ${ROOTFS_DIR})
+set(CMAKE_FIND_ROOT_PATH ${ROOTFS_DIR})
+set(CMAKE_SHARED_LINKER_FLAGS
+ "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+set(CMAKE_EXE_LINKER_FLAGS
+ "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+
+# search for programs in the build host directories
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+
+# for libraries and headers in the target directories
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+
+# Set cache variable to ignore try-run error by find_package(Threads REQUIRED) on cross build
+set(THREADS_PTHREAD_ARG "2" CACHE STRING "Result from TRY_RUN" FORCE)
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake
new file mode 100644
index 000000000..eb8e63832
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake
@@ -0,0 +1,66 @@
+#
+# config for arm-linux
+#
+include(CMakeForceCompiler)
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_PROCESSOR armv7l)
+
+set(CMAKE_C_COMPILER arm-linux-gnueabi-gcc)
+set(CMAKE_CXX_COMPILER arm-linux-gnueabi-g++)
+
+set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
+
+# where is the target environment
+set(NNFW_SOURCE_ROOT "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_ARM "${NNFW_SOURCE_ROOT}/tools/cross/rootfs/armel")
+include("${NNFW_SOURCE_ROOT}/infra/nnfw/cmake/modules/OptionTools.cmake")
+
+envoption(ROOTFS_DIR ${ROOTFS_ARM})
+if(NOT EXISTS "${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ message(FATAL_ERROR "Please prepare RootFS for tizen ARM softfp")
+endif()
+
+set(CMAKE_SYSROOT ${ROOTFS_DIR})
+set(CMAKE_FIND_ROOT_PATH ${ROOTFS_DIR})
+set(CMAKE_SHARED_LINKER_FLAGS
+ "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+set(CMAKE_EXE_LINKER_FLAGS
+ "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}"
+ CACHE INTERNAL "" FORCE)
+
+# search for programs in the build host directories
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+
+# for libraries and headers in the target directories
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+
+# Set cache variable to ignore try-run error by find_package(Threads REQUIRED) on cross build
+set(THREADS_PTHREAD_ARG "2" CACHE STRING "Result from TRY_RUN" FORCE)
+
+
+add_compile_options(-mthumb)
+add_compile_options(-mfpu=neon-vfpv4)
+add_compile_options(-mfloat-abi=softfp)
+add_compile_options(--sysroot=${ROOTFS_DIR})
+
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --sysroot=${ROOTFS_DIR}")
+
+include_directories(SYSTEM ${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+include_directories(SYSTEM ${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
+add_compile_options(-Wno-deprecated-declarations) # compile-time option
+add_compile_options(-D__extern_always_inline=inline) # compile-time option
+
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -B${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_DIR}/lib")
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib")
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -B${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_DIR}/lib")
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib")
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
diff --git a/infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake b/infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake
new file mode 100644
index 000000000..06b7c768d
--- /dev/null
+++ b/infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake
@@ -0,0 +1,27 @@
+function(list_subdirectories OUTPUT_VARIABLE)
+ cmake_parse_arguments(ARG "" "" "EXCLUDES" ${ARGN})
+
+ file(GLOB PROJECT_FILES
+ RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
+ "*/CMakeLists.txt")
+
+ foreach(PROJECT_FILE IN ITEMS ${PROJECT_FILES})
+ get_filename_component(PROJECT_DIR ${PROJECT_FILE} DIRECTORY)
+ list(FIND ARG_EXCLUDES ${PROJECT_DIR} PROJECT_INDEX)
+ if(${PROJECT_INDEX} EQUAL -1)
+ list(APPEND PROJECT_LIST ${PROJECT_DIR})
+ endif(${PROJECT_INDEX} EQUAL -1)
+ endforeach(PROJECT_FILE)
+
+ set(${OUTPUT_VARIABLE} ${PROJECT_LIST} PARENT_SCOPE)
+endfunction(list_subdirectories)
+
+function(add_subdirectories)
+ cmake_parse_arguments(ARG "" "" "EXCLUDES" ${ARGN})
+
+ list_subdirectories(PROJECT_DIRS EXCLUDES ${ARG_EXCLUDES})
+
+ foreach(PROJECT_DIR IN ITEMS ${PROJECT_DIRS})
+ add_subdirectory(${PROJECT_DIR})
+ endforeach(PROJECT_DIR)
+endfunction(add_subdirectories)
diff --git a/cmake/modules/ExternalProjectTools.cmake b/infra/nnfw/cmake/modules/ExternalProjectTools.cmake
index 71d74e5a6..71d74e5a6 100644
--- a/cmake/modules/ExternalProjectTools.cmake
+++ b/infra/nnfw/cmake/modules/ExternalProjectTools.cmake
diff --git a/infra/nnfw/cmake/modules/ExternalSourceTools.cmake b/infra/nnfw/cmake/modules/ExternalSourceTools.cmake
new file mode 100644
index 000000000..01a0c4d00
--- /dev/null
+++ b/infra/nnfw/cmake/modules/ExternalSourceTools.cmake
@@ -0,0 +1,102 @@
+function(ExternalSource_Download PREFIX URL)
+ get_filename_component(FILENAME ${URL} NAME)
+
+ set(CACHE_DIR "${NNFW_EXTERNALS_DIR}")
+ set(OUT_DIR "${CACHE_DIR}/${PREFIX}")
+ set(TMP_DIR "${CACHE_DIR}/${PREFIX}-tmp")
+
+ set(DOWNLOAD_PATH "${CACHE_DIR}/${PREFIX}-${FILENAME}")
+ set(STAMP_PATH "${CACHE_DIR}/${PREFIX}.stamp")
+
+ if(NOT EXISTS "${CACHE_DIR}")
+ file(MAKE_DIRECTORY "${CACHE_DIR}")
+ endif(NOT EXISTS "${CACHE_DIR}")
+
+ # TODO Check MD5 for correctness
+ set(MATCH_URL FALSE)
+ if(EXISTS "${STAMP_PATH}")
+ file(READ "${STAMP_PATH}" SAVED_URL)
+ if("${SAVED_URL}" STREQUAL "${URL}")
+ set(MATCH_URL TRUE)
+ endif("${SAVED_URL}" STREQUAL "${URL}")
+ endif(EXISTS "${STAMP_PATH}")
+
+ if(NOT EXISTS "${STAMP_PATH}" OR NOT EXISTS "${OUT_DIR}" OR NOT MATCH_URL)
+ file(REMOVE_RECURSE "${OUT_DIR}")
+ file(REMOVE_RECURSE "${TMP_DIR}")
+
+ file(MAKE_DIRECTORY "${TMP_DIR}")
+
+ message(STATUS "Download ${PREFIX} from ${URL}")
+ file(DOWNLOAD ${URL} "${DOWNLOAD_PATH}"
+ STATUS status
+ LOG log)
+
+ list(GET status 0 status_code)
+ list(GET status 1 status_string)
+
+ if(NOT status_code EQUAL 0)
+ message(FATAL_ERROR "error: downloading '${URL}' failed
+ status_code: ${status_code}
+ status_string: ${status_string}
+ log: ${log}")
+ endif()
+ message(STATUS "Download ${PREFIX} from ${URL} - done")
+
+ message(STATUS "Extract ${PREFIX}")
+ execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz "${DOWNLOAD_PATH}"
+ WORKING_DIRECTORY "${TMP_DIR}")
+ file(REMOVE "${DOWNLOAD_PATH}")
+ message(STATUS "Extract ${PREFIX} - done")
+
+ message(STATUS "Cleanup ${PREFIX}")
+ file(GLOB contents "${TMP_DIR}/*")
+ list(LENGTH contents n)
+ if(NOT n EQUAL 1 OR NOT IS_DIRECTORY "${contents}")
+ set(contents "${TMP_DIR}")
+ endif()
+
+ get_filename_component(contents ${contents} ABSOLUTE)
+
+ file(RENAME ${contents} "${OUT_DIR}")
+ file(REMOVE_RECURSE "${TMP_DIR}")
+ file(WRITE "${STAMP_PATH}" "${URL}")
+ message(STATUS "Cleanup ${PREFIX} - done")
+ endif()
+
+ set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
+endfunction(ExternalSource_Download)
+
+function(ExternalSource_Get PREFIX DOWNLOAD_FLAG URL)
+ set(CACHE_DIR "${NNFW_EXTERNALS_DIR}")
+ set(OUT_DIR "${CACHE_DIR}/${PREFIX}")
+ set(STAMP_PATH "${CACHE_DIR}/${PREFIX}.stamp")
+
+ if(NOT EXISTS "${CACHE_DIR}")
+ file(MAKE_DIRECTORY "${CACHE_DIR}")
+ endif(NOT EXISTS "${CACHE_DIR}")
+
+ # TODO Check MD5 for correctness
+ set(MATCH_URL FALSE)
+ if(EXISTS "${STAMP_PATH}")
+ file(READ "${STAMP_PATH}" SAVED_URL)
+ if("${SAVED_URL}" STREQUAL "${URL}")
+ set(MATCH_URL TRUE)
+ endif("${SAVED_URL}" STREQUAL "${URL}")
+ endif(EXISTS "${STAMP_PATH}")
+
+ set(SOURCE_GET TRUE)
+
+ if(NOT EXISTS "${STAMP_PATH}" OR NOT EXISTS "${OUT_DIR}" OR NOT MATCH_URL)
+ if(NOT DOWNLOAD_FLAG)
+ set(SOURCE_GET FALSE)
+ else(NOT DOWNLOAD_FLAG)
+ ExternalSource_Download(${PREFIX} ${URL})
+ endif(NOT DOWNLOAD_FLAG)
+ endif()
+
+ set(${PREFIX}_SOURCE_GET ${SOURCE_GET} PARENT_SCOPE)
+ set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
+endfunction(ExternalSource_Get)
+
+set(ExternalSourceTools_FOUND TRUE)
diff --git a/infra/nnfw/cmake/modules/IdentifyPlatform.cmake b/infra/nnfw/cmake/modules/IdentifyPlatform.cmake
new file mode 100644
index 000000000..9313eefcf
--- /dev/null
+++ b/infra/nnfw/cmake/modules/IdentifyPlatform.cmake
@@ -0,0 +1,45 @@
+# set host platform to build
+if(NOT HOST_ARCH OR "${HOST_ARCH}" STREQUAL "")
+ set(HOST_ARCH ${CMAKE_HOST_SYSTEM_PROCESSOR})
+endif()
+
+# set target platform to run
+if(NOT TARGET_ARCH OR "${TARGET_ARCH}" STREQUAL "")
+ set(TARGET_ARCH "${HOST_ARCH}")
+endif()
+
+if(NOT DEFINED HOST_OS)
+ string(TOLOWER ${CMAKE_HOST_SYSTEM_NAME} HOST_OS)
+endif()
+
+if(NOT DEFINED TARGET_OS)
+ set(TARGET_OS "${HOST_OS}")
+endif()
+
+if("${HOST_ARCH}" STREQUAL "x86_64")
+ set(HOST_ARCH_BASE ${HOST_ARCH})
+elseif("${HOST_ARCH}" STREQUAL "armv7l")
+ set(HOST_ARCH_BASE "arm")
+elseif("${HOST_ARCH}" STREQUAL "arm64")
+ set(HOST_ARCH_BASE "arm64")
+elseif("${HOST_ARCH}" STREQUAL "aarch64")
+ set(HOST_ARCH_BASE "aarch64")
+else()
+ message(FATAL_ERROR "'${HOST_ARCH}' architecture is not supported")
+endif()
+
+if("${TARGET_ARCH}" STREQUAL "x86_64")
+ set(TARGET_ARCH_BASE ${TARGET_ARCH})
+elseif("${TARGET_ARCH}" STREQUAL "armv7l")
+ set(TARGET_ARCH_BASE "arm")
+elseif("${TARGET_ARCH}" STREQUAL "arm64")
+ set(TARGET_ARCH_BASE "arm64")
+elseif("${TARGET_ARCH}" STREQUAL "aarch64")
+ set(TARGET_ARCH_BASE "aarch64")
+else()
+ message(FATAL_ERROR "'${TARGET_ARCH}' architecture is not supported")
+endif()
+
+# host & target platform name
+set(HOST_PLATFORM "${HOST_ARCH}-${HOST_OS}")
+set(TARGET_PLATFORM "${TARGET_ARCH}-${TARGET_OS}")
diff --git a/cmake/modules/OptionTools.cmake b/infra/nnfw/cmake/modules/OptionTools.cmake
index 066d53078..066d53078 100644
--- a/cmake/modules/OptionTools.cmake
+++ b/infra/nnfw/cmake/modules/OptionTools.cmake
diff --git a/infra/nnfw/cmake/options/options_aarch64-linux.cmake b/infra/nnfw/cmake/options/options_aarch64-linux.cmake
new file mode 100644
index 000000000..fbe49d421
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_aarch64-linux.cmake
@@ -0,0 +1,8 @@
+#
+# aarch64 linux cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
+option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
diff --git a/infra/nnfw/cmake/options/options_aarch64-tizen.cmake b/infra/nnfw/cmake/options/options_aarch64-tizen.cmake
new file mode 100644
index 000000000..66ee34284
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_aarch64-tizen.cmake
@@ -0,0 +1,10 @@
+#
+# aarch64 tizen cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
+option(DOWNLOAD_NNPACK "Download NNPACK library source" OFF)
+option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
+option(ENVVAR_NEURUN_CONFIG "Use environment variable for neurun configuration" OFF)
diff --git a/infra/nnfw/cmake/options/options_arm64-android.cmake b/infra/nnfw/cmake/options/options_arm64-android.cmake
new file mode 100644
index 000000000..486b3bb96
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_arm64-android.cmake
@@ -0,0 +1,7 @@
+# arm64 android cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" ON)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
+
diff --git a/infra/nnfw/cmake/options/options_armv7l-linux.cmake b/infra/nnfw/cmake/options/options_armv7l-linux.cmake
new file mode 100644
index 000000000..f06f998d9
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_armv7l-linux.cmake
@@ -0,0 +1,8 @@
+#
+# armv7l linux cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
+option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
diff --git a/infra/nnfw/cmake/options/options_armv7l-tizen.cmake b/infra/nnfw/cmake/options/options_armv7l-tizen.cmake
new file mode 100644
index 000000000..3ba48f332
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_armv7l-tizen.cmake
@@ -0,0 +1,12 @@
+#
+# armv7l tizen cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
+option(DOWNLOAD_NNPACK "Download NNPACK library source" OFF)
+option(BUILD_TFLITE_LOADER "Build tensorflow lite file loader" ON)
+option(BUILD_TFLITE_LOADER_TEST_TOOL "Build tflite loader testing tool" OFF)
+option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
+option(ENVVAR_NEURUN_CONFIG "Use environment variable for neurun configuration" OFF)
diff --git a/infra/nnfw/cmake/options/options_x86_64-linux.cmake b/infra/nnfw/cmake/options/options_x86_64-linux.cmake
new file mode 100644
index 000000000..179e5bdb2
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_x86_64-linux.cmake
@@ -0,0 +1,7 @@
+#
+# x86_64 linux cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" OFF)
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" OFF)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
diff --git a/infra/nnfw/cmake/packages/ARMCompute/SConstruct b/infra/nnfw/cmake/packages/ARMCompute/SConstruct
new file mode 100644
index 000000000..9c0e4a84e
--- /dev/null
+++ b/infra/nnfw/cmake/packages/ARMCompute/SConstruct
@@ -0,0 +1,309 @@
+# Copyright (c) 2016, 2017 ARM Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import SCons
+import os
+import subprocess
+
+def version_at_least(version, required):
+ end = min(len(version), len(required))
+
+ for i in range(0, end, 2):
+ if int(version[i]) < int(required[i]):
+ return False
+ elif int(version[i]) > int(required[i]):
+ return True
+
+ return True
+
+vars = Variables("scons")
+vars.AddVariables(
+ BoolVariable("debug", "Debug", False),
+ BoolVariable("asserts", "Enable asserts (this flag is forced to 1 for debug=1)", False),
+ BoolVariable("logging", "Logging (this flag is forced to 1 for debug=1)", False),
+ EnumVariable("arch", "Target Architecture", "armv7a", allowed_values=("armv7a", "arm64-v8a", "arm64-v8.2-a", "arm64-v8.2-a-sve", "x86_32", "x86_64")),
+ EnumVariable("os", "Target OS", "linux", allowed_values=("linux", "android", "tizen", "bare_metal")),
+ EnumVariable("build", "Build type", "cross_compile", allowed_values=("native", "cross_compile", "embed_only")),
+ BoolVariable("examples", "Build example programs", True),
+ BoolVariable("Werror", "Enable/disable the -Werror compilation flag", True),
+ BoolVariable("standalone", "Builds the tests as standalone executables, links statically with libgcc, libstdc++ and libarm_compute", False),
+ BoolVariable("opencl", "Enable OpenCL support", True),
+ BoolVariable("neon", "Enable Neon support", False),
+ BoolVariable("gles_compute", "Enable OpenGL ES Compute Shader support", False),
+ BoolVariable("embed_kernels", "Embed OpenCL kernels and OpenGL ES compute shaders in library binary", True),
+ BoolVariable("set_soname", "Set the library's soname and shlibversion (requires SCons 2.4 or above)", False),
+ BoolVariable("openmp", "Enable OpenMP backend", False),
+ BoolVariable("cppthreads", "Enable C++11 threads backend", True),
+ PathVariable("build_dir", "Specify sub-folder for the build", ".", PathVariable.PathAccept),
+ PathVariable("install_dir", "Specify sub-folder for the install", "", PathVariable.PathAccept),
+ ("extra_cxx_flags", "Extra CXX flags to be appended to the build command", ""),
+ ("extra_link_flags", "Extra LD flags to be appended to the build command", ""),
+ ("compiler_cache", "Command to prefix to the C and C++ compiler (e.g ccache)", "")
+)
+
+env = Environment(platform="posix", variables=vars, ENV = os.environ)
+build_path = env['build_dir']
+# If build_dir is a relative path then add a #build/ prefix:
+if not env['build_dir'].startswith('/'):
+ SConsignFile('build/%s/.scons' % build_path)
+ build_path = "#build/%s" % build_path
+else:
+ SConsignFile('%s/.scons' % build_path)
+
+install_path = env['install_dir']
+#If the install_dir is a relative path then assume it's from inside build_dir
+if not env['install_dir'].startswith('/') and install_path != "":
+ install_path = "%s/%s" % (build_path, install_path)
+
+env.Append(LIBPATH = [build_path])
+Export('env')
+Export('vars')
+
+def install_lib( lib ):
+ # If there is no install folder, then there is nothing to do:
+ if install_path == "":
+ return lib
+ return env.Install( "%s/lib/" % install_path, lib)
+def install_bin( bin ):
+ # If there is no install folder, then there is nothing to do:
+ if install_path == "":
+ return bin
+ return env.Install( "%s/bin/" % install_path, bin)
+def install_include( inc ):
+ if install_path == "":
+ return inc
+ return env.Install( "%s/include/" % install_path, inc)
+
+Export('install_lib')
+Export('install_bin')
+
+Help(vars.GenerateHelpText(env))
+
+if env['build'] == "embed_only":
+ SConscript('./SConscript', variant_dir=build_path, duplicate=0)
+ Return()
+
+if env['neon'] and 'x86' in env['arch']:
+ print("Cannot compile NEON for x86")
+ Exit(1)
+
+if env['set_soname'] and not version_at_least(SCons.__version__, "2.4"):
+ print("Setting the library's SONAME / SHLIBVERSION requires SCons 2.4 or above")
+ print("Update your version of SCons or use set_soname=0")
+ Exit(1)
+
+if env['os'] == 'bare_metal':
+ if env['cppthreads'] or env['openmp']:
+ print("ERROR: OpenMP and C++11 threads not supported in bare_metal. Use cppthreads=0 openmp=0")
+ Exit(1)
+
+env.Append(CXXFLAGS = ['-Wno-deprecated-declarations','-Wall','-DARCH_ARM',
+ '-Wextra','-Wno-unused-parameter','-pedantic','-Wdisabled-optimization','-Wformat=2',
+ '-Winit-self','-Wstrict-overflow=2','-Wswitch-default',
+ '-fpermissive','-std=gnu++11','-Wno-vla','-Woverloaded-virtual',
+ '-Wctor-dtor-privacy','-Wsign-promo','-Weffc++','-Wno-format-nonliteral','-Wno-overlength-strings','-Wno-strict-overflow'])
+
+env.Append(CPPDEFINES = ['_GLIBCXX_USE_NANOSLEEP'])
+
+default_cpp_compiler = 'g++' if env['os'] != 'android' else 'clang++'
+default_c_compiler = 'gcc' if env['os'] != 'android' else 'clang'
+cpp_compiler = os.environ.get('CXX', default_cpp_compiler)
+c_compiler = os.environ.get('CC', default_c_compiler)
+
+if env['os'] == 'android' and ( 'clang++' not in cpp_compiler or 'clang' not in c_compiler ):
+ print( "WARNING: Only clang is officially supported to build the Compute Library for Android")
+
+if 'clang++' in cpp_compiler:
+ env.Append(CXXFLAGS = ['-Wno-format-nonliteral','-Wno-deprecated-increment-bool','-Wno-vla-extension','-Wno-mismatched-tags'])
+else:
+ env.Append(CXXFLAGS = ['-Wlogical-op','-Wnoexcept','-Wstrict-null-sentinel','-Wno-implicit-fallthrough'])
+
+if env['cppthreads']:
+ env.Append(CPPDEFINES = [('ARM_COMPUTE_CPP_SCHEDULER', 1)])
+
+if env['openmp']:
+ if 'clang++' in cpp_compiler:
+ print( "Clang does not support OpenMP. Use scheduler=cpp.")
+ Exit(1)
+
+ env.Append(CPPDEFINES = [('ARM_COMPUTE_OPENMP_SCHEDULER', 1)])
+ env.Append(CXXFLAGS = ['-fopenmp'])
+ env.Append(LINKFLAGS = ['-fopenmp'])
+
+prefix = ""
+if env['arch'] == 'armv7a':
+ env.Append(CXXFLAGS = ['-march=armv7-a', '-mthumb', '-mfpu=neon'])
+
+ if env['os'] == 'linux':
+ prefix = "arm-linux-gnueabihf-"
+ env.Append(CXXFLAGS = ['-mfloat-abi=hard'])
+ elif env['os'] == 'bare_metal':
+ prefix = "arm-eabi-"
+ env.Append(CXXFLAGS = ['-mfloat-abi=hard'])
+ elif env['os'] == 'android':
+ prefix = "arm-linux-androideabi-"
+ env.Append(CXXFLAGS = ['-mfloat-abi=softfp'])
+ elif env['os'] == 'tizen':
+ prefix = "arm-linux-gnueabi-"
+ env.Append(CXXFLAGS = ['-mfloat-abi=softfp'])
+elif env['arch'] == 'arm64-v8a':
+ env.Append(CXXFLAGS = ['-march=armv8-a'])
+ env.Append(CPPDEFINES = ['ARM_COMPUTE_AARCH64_V8A','NO_DOT_IN_TOOLCHAIN'])
+ if env['os'] == 'linux':
+ prefix = "aarch64-linux-gnu-"
+ elif env['os'] == 'bare_metal':
+ prefix = "aarch64-elf-"
+ elif env['os'] == 'android':
+ prefix = "aarch64-linux-android-"
+ elif env['os'] == 'tizen':
+ prefix = "aarch64-linux-gnu-"
+ if 'clang++' in cpp_compiler:
+ env.Append(CXXFLAGS = ['-no-integrated-as'])
+elif 'arm64-v8.2-a' in env['arch']:
+ if env['arch'] == 'arm64-v8.2-a-sve':
+ if env['os'] != 'bare_metal':
+ print("Only bare metal SVE is supported at the moment")
+ Exit(1)
+ env.Append(CXXFLAGS = ['-march=armv8.2-a+sve+fp16+dotprod'])
+ else:
+ env.Append(CXXFLAGS = ['-march=armv8.2-a+fp16']) # explicitly enable fp16 extension otherwise __ARM_FEATURE_FP16_VECTOR_ARITHMETIC is undefined
+ if env['os'] == 'linux':
+ prefix = "aarch64-linux-gnu-"
+ elif env['os'] == 'bare_metal':
+ prefix = "aarch64-elf-"
+ elif env['os'] == 'android':
+ prefix = "aarch64-linux-android-"
+ elif env['os'] == 'tizen':
+ prefix = "aarch64-linux-gnu-"
+ env.Append(CPPDEFINES = ['ARM_COMPUTE_AARCH64_V8_2','NO_DOT_IN_TOOLCHAIN'])
+ if 'clang++' in cpp_compiler:
+ env.Append(CXXFLAGS = ['-no-integrated-as'])
+elif env['arch'] == 'x86_32':
+ env.Append(CCFLAGS = ['-m32'])
+ env.Append(LINKFLAGS = ['-m32'])
+elif env['arch'] == 'x86_64':
+ env.Append(CCFLAGS = ['-m64'])
+ env.Append(LINKFLAGS = ['-m64'])
+
+if env['build'] == 'native':
+ prefix = ""
+
+env['CC'] = env['compiler_cache']+" "+prefix + c_compiler
+env['CXX'] = env['compiler_cache']+" "+prefix + cpp_compiler
+env['LD'] = prefix + "ld"
+env['AS'] = prefix + "as"
+env['AR'] = prefix + "ar"
+env['RANLIB'] = prefix + "ranlib"
+
+if not GetOption("help"):
+ try:
+ compiler_ver = subprocess.check_output(env['CXX'].split() + ["-dumpversion"]).strip()
+ except OSError:
+ print("ERROR: Compiler '%s' not found" % env['CXX'])
+ Exit(1)
+
+ if 'clang++' not in cpp_compiler:
+ if env['arch'] == 'arm64-v8.2-a' and not version_at_least(compiler_ver, '6.2.1'):
+ print("GCC 6.2.1 or newer is required to compile armv8.2-a code")
+ Exit(1)
+ elif env['arch'] == 'arm64-v8a' and not version_at_least(compiler_ver, '4.9'):
+ print("GCC 4.9 or newer is required to compile NEON code for AArch64")
+ Exit(1)
+
+ if version_at_least(compiler_ver, '6.1'):
+ env.Append(CXXFLAGS = ['-Wno-ignored-attributes'])
+
+ if compiler_ver == '4.8.3':
+ env.Append(CXXFLAGS = ['-Wno-array-bounds'])
+
+if env['standalone']:
+ env.Append(CXXFLAGS = ['-fPIC'])
+ env.Append(LINKFLAGS = ['-static-libgcc','-static-libstdc++'])
+
+if env['Werror']:
+ env.Append(CXXFLAGS = ['-Werror'])
+
+if env['os'] == 'android':
+ env.Append(CPPDEFINES = ['ANDROID'])
+ env.Append(LINKFLAGS = ['-pie', '-static-libstdc++'])
+elif env['os'] == 'bare_metal':
+ env.Append(LINKFLAGS = ['-static'])
+ env.Append(LINKFLAGS = ['-specs=rdimon.specs'])
+ env.Append(CXXFLAGS = ['-fPIC'])
+ env.Append(CPPDEFINES = ['NO_MULTI_THREADING'])
+ env.Append(CPPDEFINES = ['BARE_METAL'])
+
+if env['opencl']:
+ if env['os'] in ['bare_metal'] or env['standalone']:
+ print("Cannot link OpenCL statically, which is required for bare metal / standalone builds")
+ Exit(1)
+
+if env['gles_compute']:
+ if env['os'] in ['bare_metal'] or env['standalone']:
+ print("Cannot link OpenGLES statically, which is required for bare metal / standalone builds")
+ Exit(1)
+
+if env["os"] not in ["android", "bare_metal"] and (env['opencl'] or env['cppthreads']):
+ env.Append(LIBS = ['pthread'])
+
+if env['opencl'] or env['gles_compute']:
+ if env['embed_kernels']:
+ env.Append(CPPDEFINES = ['EMBEDDED_KERNELS'])
+
+if env['debug']:
+ env['asserts'] = True
+ env['logging'] = True
+ env.Append(CXXFLAGS = ['-O0','-g','-gdwarf-2'])
+ env.Append(CPPDEFINES = ['ARM_COMPUTE_DEBUG_ENABLED'])
+else:
+ env.Append(CXXFLAGS = ['-O3','-ftree-vectorize'])
+
+if env['asserts']:
+ env.Append(CPPDEFINES = ['ARM_COMPUTE_ASSERTS_ENABLED'])
+ env.Append(CXXFLAGS = ['-fstack-protector-strong'])
+
+if env['logging']:
+ env.Append(CPPDEFINES = ['ARM_COMPUTE_LOGGING_ENABLED'])
+
+env.Append(CPPPATH = ['#/include', "#"])
+env.Append(CXXFLAGS = env['extra_cxx_flags'])
+env.Append(LINKFLAGS = env['extra_link_flags'])
+
+Default( install_include("arm_compute"))
+Default( install_include("support"))
+
+Export('version_at_least')
+
+if env['opencl']:
+ SConscript("./opencl-1.2-stubs/SConscript", variant_dir="%s/opencl-1.2-stubs" % build_path, duplicate=0)
+
+if env['gles_compute'] and env['os'] != 'android':
+ env.Append(CPPPATH = ['#/include/linux'])
+ SConscript("./opengles-3.1-stubs/SConscript", variant_dir="%s/opengles-3.1-stubs" % build_path, duplicate=0)
+
+SConscript('./SConscript', variant_dir=build_path, duplicate=0)
+
+if env['examples'] and env['os'] != 'bare_metal':
+ SConscript('./examples/SConscript', variant_dir='%s/examples' % build_path, duplicate=0)
+
+if env['os'] != 'bare_metal':
+ SConscript('./tests/SConscript', variant_dir='%s/tests' % build_path, duplicate=0)
diff --git a/infra/nnfw/cmake/packages/ARMComputeConfig.cmake b/infra/nnfw/cmake/packages/ARMComputeConfig.cmake
new file mode 100644
index 000000000..ccf96692e
--- /dev/null
+++ b/infra/nnfw/cmake/packages/ARMComputeConfig.cmake
@@ -0,0 +1,205 @@
+function(_ARMCompute_Import)
+ include(FindPackageHandleStandardArgs)
+
+ list(APPEND ARMCompute_LIB_SEARCH_PATHS ${ARMCompute_PREFIX})
+
+ find_path(INCLUDE_DIR NAMES arm_compute/core/ITensor.h PATHS ${ARMCompute_INCLUDE_SEARCH_PATHS})
+
+ find_library(CORE_LIBRARY NAMES arm_compute_core PATHS ${ARMCompute_LIB_SEARCH_PATHS})
+ find_library(RUNTIME_LIBRARY NAMES arm_compute PATHS ${ARMCompute_LIB_SEARCH_PATHS})
+ find_library(GRAPH_LIBRARY NAMES arm_compute_graph PATHS ${ARMCompute_LIB_SEARCH_PATHS})
+
+ if(NOT INCLUDE_DIR)
+ nnfw_find_package(ARMComputeSource QUIET)
+ if (NOT ARMComputeSource_FOUND)
+ set(ARMCompute_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+ set(INCLUDE_DIR ${NNFW_EXTERNALS_DIR}/acl ${NNFW_EXTERNALS_DIR}/acl/include)
+ endif(NOT INCLUDE_DIR)
+
+ # NOTE '${CMAKE_INSTALL_PREFIX}/lib' should be searched as CI server places
+ # pre-built ARM compute libraries on this directory
+ if(NOT CORE_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_core.so)
+ set(CORE_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_core.so)
+ endif()
+
+ if(NOT CORE_LIBRARY)
+ set(ARMCompute_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ if(NOT RUNTIME_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute.so)
+ set(RUNTIME_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute.so)
+ endif()
+
+ if(NOT RUNTIME_LIBRARY)
+ set(ARMCompute_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ if(NOT GRAPH_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_graph.so)
+ set(GRAPH_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_graph.so)
+ endif()
+
+ if(NOT GRAPH_LIBRARY)
+ set(ARMCompute_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ if(NOT TARGET arm_compute_core)
+ add_library(arm_compute_core INTERFACE)
+ target_include_directories(arm_compute_core SYSTEM INTERFACE ${INCLUDE_DIR})
+ target_link_libraries(arm_compute_core INTERFACE dl ${LIB_PTHREAD})
+ target_link_libraries(arm_compute_core INTERFACE ${CORE_LIBRARY})
+ endif(NOT TARGET arm_compute_core)
+
+ if(NOT TARGET arm_compute)
+ add_library(arm_compute INTERFACE)
+ target_include_directories(arm_compute SYSTEM INTERFACE ${INCLUDE_DIR})
+ target_link_libraries(arm_compute INTERFACE ${RUNTIME_LIBRARY})
+ target_link_libraries(arm_compute INTERFACE arm_compute_core)
+ endif(NOT TARGET arm_compute)
+
+ if(NOT TARGET arm_compute_graph)
+ add_library(arm_compute_graph INTERFACE)
+ target_include_directories(arm_compute_graph SYSTEM INTERFACE ${INCLUDE_DIR})
+ target_link_libraries(arm_compute_graph INTERFACE ${GRAPH_LIBRARY})
+ target_link_libraries(arm_compute_graph INTERFACE arm_compute)
+ endif(NOT TARGET arm_compute_graph)
+
+ set(ARMCompute_FOUND TRUE PARENT_SCOPE)
+endfunction(_ARMCompute_Import)
+
+### Check whether library exists
+function(_ARMCompute_Check VAR LIBDIR)
+ set(FOUND TRUE)
+
+ if(NOT EXISTS "${LIBDIR}/libarm_compute_core.so")
+ set(FOUND FALSE)
+ endif()
+
+ if(NOT EXISTS "${LIBDIR}/libarm_compute.so")
+ set(FOUND FALSE)
+ endif()
+
+ if(NOT EXISTS "${LIBDIR}/libarm_compute_graph.so")
+ set(FOUND FALSE)
+ endif()
+
+ set(${VAR} ${FOUND} PARENT_SCOPE)
+endfunction(_ARMCompute_Check)
+
+# Let's build and install ARMCompute libraries
+# NOTE This function silently returns on error
+function(_ARMCompute_Build ARMCompute_PREFIX)
+ ### Check whether library exists
+ _ARMCompute_Check(ARMCompute_FOUND ${ARMCompute_PREFIX})
+
+ if(ARMCompute_FOUND)
+ return()
+ endif(ARMCompute_FOUND)
+
+ ### Let's build with SCONS
+ nnfw_find_package(ARMComputeSource QUIET)
+
+ if(NOT ARMComputeSource_FOUND)
+ return()
+ endif(NOT ARMComputeSource_FOUND)
+
+ find_program(SCONS_PATH scons)
+
+ if(NOT SCONS_PATH)
+ return()
+ endif(NOT SCONS_PATH)
+
+ if(CMAKE_BUILD_TYPE)
+ string(TOLOWER "${CMAKE_BUILD_TYPE}" SCON_BUILD_TYPE)
+ else(CMAKE_BUILD_TYPE)
+ set(SCON_BUILD_TYPE "release")
+ endif(CMAKE_BUILD_TYPE)
+
+ #### Architecture-specific configurations
+ if(TARGET_ARCH STREQUAL "armv7l")
+ set(BUILD_ARCH "armv7a")
+ set(BUILD_DIR "${BUILD_ARCH}-${TARGET_OS}.${SCON_BUILD_TYPE}")
+ endif()
+
+ if(TARGET_ARCH STREQUAL "aarch64")
+ set(BUILD_ARCH "arm64-v8a")
+ set(BUILD_DIR "${BUILD_ARCH}-${TARGET_OS}.${SCON_BUILD_TYPE}")
+ endif()
+
+ if(TARGET_ARCH STREQUAL "arm64")
+ set(BUILD_ARCH "arm64-v8a")
+ set(BUILD_DIR "${BUILD_ARCH}-${TARGET_OS}.${SCON_BUILD_TYPE}")
+ endif()
+
+ #### Platform-specific configurations
+ #### TODO Support android
+
+ #### Mode-specific configurations
+ if(SCON_BUILD_TYPE STREQUAL "debug")
+ list(APPEND SCONS_OPTIONS "debug=1")
+ endif()
+
+ #### Generic configurations
+ list(APPEND SCONS_OPTIONS "neon=1")
+ list(APPEND SCONS_OPTIONS "opencl=1")
+ list(APPEND SCONS_OPTIONS "examples=0")
+ list(APPEND SCONS_OPTIONS "Werror=0")
+ list(APPEND SCONS_OPTIONS "os=${TARGET_OS}")
+ include(ProcessorCount)
+ ProcessorCount(N)
+ if((NOT N EQUAL 0) AND BUILD_EXT_MULTITHREAD)
+ list(APPEND SCONS_OPTIONS -j${N})
+ endif()
+ if(DEFINED BUILD_ARCH)
+ list(APPEND SCONS_OPTIONS "arch=${BUILD_ARCH}")
+ endif(DEFINED BUILD_ARCH)
+
+ if(DEFINED BUILD_DIR)
+ list(APPEND SCONS_OPTIONS "build_dir=${BUILD_DIR}")
+ endif(DEFINED BUILD_DIR)
+
+ message(STATUS "Build ARMCompute with ${SCONS_PATH} ('${SCONS_OPTIONS}'")
+
+ # Copy externals/SConstruct to externals/acl/ for Tizen build support.
+ # TODO The change of externals/SConstruct should be upstreamed to ARM Compute Library community layer.
+ execute_process(COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_CURRENT_LIST_DIR}/ARMCompute/SConstruct" "${ARMComputeSource_DIR}")
+
+ # Build ARMCompute libraries with SCONS
+ # NOTE ARMCompute SConstruct unconditioanlly appends "arm-linux-gnueabihf-" prefix for linux
+ execute_process(COMMAND /usr/bin/env CC=gcc CXX=g++ "${SCONS_PATH}" ${SCONS_OPTIONS}
+ WORKING_DIRECTORY ${ARMComputeSource_DIR}
+ RESULT_VARIABLE ARMCompute_BUILD)
+
+ # Install ARMCompute libraries
+ # Ps. CI server will copy below installed libraries to target device to test.
+ execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${ARMCompute_PREFIX}"
+ WORKING_DIRECTORY ${ARMComputeSource_DIR}
+ RESULT_VARIABLE ARMCompute_BUILD)
+ execute_process(COMMAND ${CMAKE_COMMAND} -E copy "build/${BUILD_DIR}/libarm_compute_core.so" "${ARMCompute_PREFIX}"
+ COMMAND ${CMAKE_COMMAND} -E copy "build/${BUILD_DIR}/libarm_compute.so" "${ARMCompute_PREFIX}"
+ COMMAND ${CMAKE_COMMAND} -E copy "build/${BUILD_DIR}/libarm_compute_graph.so" "${ARMCompute_PREFIX}"
+ WORKING_DIRECTORY ${ARMComputeSource_DIR}
+ RESULT_VARIABLE ARMCompute_BUILD)
+endfunction(_ARMCompute_Build)
+
+
+set(ARMCompute_PREFIX ${CMAKE_INSTALL_PREFIX}/lib)
+
+# This is a workaround for CI issues
+# Ps. CI server will copy below installed libraries to target device to test.
+# TODO Remove this workaround
+if(DEFINED ARMCompute_EXTDIR)
+ execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${ARMCompute_PREFIX}")
+ execute_process(COMMAND ${CMAKE_COMMAND} -E copy "${ARMCompute_EXTDIR}/libarm_compute_core.so" "${ARMCompute_PREFIX}"
+ COMMAND ${CMAKE_COMMAND} -E copy "${ARMCompute_EXTDIR}/libarm_compute.so" "${ARMCompute_PREFIX}"
+ COMMAND ${CMAKE_COMMAND} -E copy "${ARMCompute_EXTDIR}/libarm_compute_graph.so" "${ARMCompute_PREFIX}")
+endif(DEFINED ARMCompute_EXTDIR)
+
+if(BUILD_ARMCOMPUTE)
+ _ARMCompute_Build("${ARMCompute_PREFIX}")
+endif(BUILD_ARMCOMPUTE)
+_ARMCompute_Import()
diff --git a/infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake b/infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake
new file mode 100644
index 000000000..ef7384d7c
--- /dev/null
+++ b/infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake
@@ -0,0 +1,13 @@
+function(_ARMComputeSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(ARMCOMPUTE_URL ${EXTERNAL_DOWNLOAD_SERVER}/ARM-software/ComputeLibrary/archive/v19.05.tar.gz)
+ ExternalSource_Get("acl" ${DOWNLOAD_ARMCOMPUTE} ${ARMCOMPUTE_URL})
+
+ set(ARMComputeSource_DIR ${acl_SOURCE_DIR} PARENT_SCOPE)
+ set(ARMComputeSource_FOUND ${acl_SOURCE_GET} PARENT_SCOPE)
+endfunction(_ARMComputeSource_import)
+
+_ARMComputeSource_import()
diff --git a/infra/nnfw/cmake/packages/AbslSourceConfig.cmake b/infra/nnfw/cmake/packages/AbslSourceConfig.cmake
new file mode 100644
index 000000000..685550d52
--- /dev/null
+++ b/infra/nnfw/cmake/packages/AbslSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_AbslSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # NOTE The following URL comes from TensorFlow 1.12
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(ABSL_URL ${EXTERNAL_DOWNLOAD_SERVER}/abseil/abseil-cpp/archive/389ec3f906f018661a5308458d623d01f96d7b23.tar.gz)
+ ExternalSource_Get("absl" ${DOWNLOAD_ABSL} ${ABSL_URL})
+
+ set(AbslSource_DIR ${absl_SOURCE_DIR} PARENT_SCOPE)
+ set(AbslSource_FOUND ${absl_SOURCE_GET} PARENT_SCOPE)
+endfunction(_AbslSource_import)
+
+_AbslSource_import()
diff --git a/infra/nnfw/cmake/packages/BoostConfig.cmake b/infra/nnfw/cmake/packages/BoostConfig.cmake
new file mode 100644
index 000000000..26ad78922
--- /dev/null
+++ b/infra/nnfw/cmake/packages/BoostConfig.cmake
@@ -0,0 +1,58 @@
+# Let's build and install Boost libraries
+function(_Boost_Build Boost_PREFIX)
+ nnfw_find_package(BoostSource QUIET)
+
+ if(NOT BoostSource_FOUND)
+ return()
+ endif(NOT BoostSource_FOUND)
+
+ #### Generic configurations
+ if(NOT EXISTS ${BoostSource_DIR}/b2)
+ execute_process(COMMAND "${BoostSource_DIR}/bootstrap.sh"
+ WORKING_DIRECTORY ${BoostSource_DIR}
+ RESULT_VARIABLE Boost_BUILD)
+ endif()
+
+ set(BoostBuild_DIR ${BoostSource_DIR})
+ set(BoostInstall_DIR ${Boost_PREFIX})
+
+ unset(Boost_Options)
+
+ list(APPEND Boost_Options --build-dir=${BoostBuild_DIR})
+ list(APPEND Boost_Options --prefix=${BoostInstall_DIR})
+ list(APPEND Boost_Options --with-log)
+ list(APPEND Boost_Options --with-program_options)
+ list(APPEND Boost_Options --with-system)
+ list(APPEND Boost_Options --with-filesystem)
+
+ set(JAM_FILENAME ${BoostBuild_DIR}/user-config.jam)
+
+ file(WRITE ${JAM_FILENAME} "using gcc : local : ${CMAKE_CXX_COMPILER} ;\n")
+ list(APPEND Boost_Options toolset=gcc-local)
+
+ # Install Boost libraries
+ execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${BoostInstall_DIR}")
+ execute_process(COMMAND /usr/bin/env BOOST_BUILD_PATH="${BoostBuild_DIR}" ${BoostSource_DIR}/b2 install ${Boost_Options}
+ WORKING_DIRECTORY ${BoostSource_DIR})
+
+endfunction(_Boost_Build)
+
+# Find pre-installed boost library and update Boost variables.
+find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
+if(Boost_FOUND)
+ return()
+endif()
+
+set(Boost_PREFIX ${CMAKE_INSTALL_PREFIX})
+
+if(BUILD_BOOST)
+ _Boost_Build("${Boost_PREFIX}")
+
+ # Let's use locally built boost to system-wide one so sub modules
+ # needing Boost library and header files can search for them
+ # in ${Boost_PREFIX} directory
+ list(APPEND CMAKE_PREFIX_PATH "${Boost_PREFIX}")
+
+ # We built boost library so update Boost variables.
+ find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
+endif(BUILD_BOOST)
diff --git a/infra/nnfw/cmake/packages/BoostSourceConfig.cmake b/infra/nnfw/cmake/packages/BoostSourceConfig.cmake
new file mode 100644
index 000000000..1b81316fd
--- /dev/null
+++ b/infra/nnfw/cmake/packages/BoostSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_BoostSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # EXTERNAL_DOWNLOAD_SERVER will be overwritten by CI server to use mirror server.
+ envoption(EXTERNAL_DOWNLOAD_SERVER "http://sourceforge.net")
+ set(BOOST_URL ${EXTERNAL_DOWNLOAD_SERVER}/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz)
+ ExternalSource_Get("boost" ${DOWNLOAD_BOOST} ${BOOST_URL})
+
+ set(BoostSource_DIR ${boost_SOURCE_DIR} PARENT_SCOPE)
+ set(BoostSource_FOUND ${boost_SOURCE_GET} PARENT_SCOPE)
+endfunction(_BoostSource_import)
+
+_BoostSource_import()
diff --git a/infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake b/infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake
new file mode 100644
index 000000000..0939ba3b3
--- /dev/null
+++ b/infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_cpuinfoSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(CPUINFO_URL ${EXTERNAL_DOWNLOAD_SERVER}/pytorch/cpuinfo/archive/d5e37adf1406cf899d7d9ec1d317c47506ccb970.tar.gz)
+
+ ExternalSource_Get("cpuinfo" ${DOWNLOAD_NNPACK} ${CPUINFO_URL})
+
+ set(CPUINFO_SOURCE_DIR ${cpuinfo_SOURCE_DIR} PARENT_SCOPE)
+ set(CPUINFO_SOURCE_FOUND ${cpuinfo_SOURCE_GET} PARENT_SCOPE)
+endfunction(_cpuinfoSource_import)
+
+_cpuinfoSource_import()
diff --git a/cmake/packages/EigenConfig.cmake b/infra/nnfw/cmake/packages/EigenConfig.cmake
index 0feb0890a..0feb0890a 100644
--- a/cmake/packages/EigenConfig.cmake
+++ b/infra/nnfw/cmake/packages/EigenConfig.cmake
diff --git a/infra/nnfw/cmake/packages/EigenSourceConfig.cmake b/infra/nnfw/cmake/packages/EigenSourceConfig.cmake
new file mode 100644
index 000000000..e9b0ba8f1
--- /dev/null
+++ b/infra/nnfw/cmake/packages/EigenSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_EigenSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 downloads Eign from the following URL
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://bitbucket.org")
+ set(EIGEN_URL ${EXTERNAL_DOWNLOAD_SERVER}/eigen/eigen/get/88fc23324517.tar.gz)
+ ExternalSource_Get("eigen" ${DOWNLOAD_EIGEN} ${EIGEN_URL})
+
+ set(EigenSource_DIR ${eigen_SOURCE_DIR} PARENT_SCOPE)
+ set(EigenSource_FOUND ${eigen_SOURCE_GET} PARENT_SCOPE)
+endfunction(_EigenSource_import)
+
+_EigenSource_import()
diff --git a/infra/nnfw/cmake/packages/Enum34SourceConfig.cmake b/infra/nnfw/cmake/packages/Enum34SourceConfig.cmake
new file mode 100644
index 000000000..96b6ca85a
--- /dev/null
+++ b/infra/nnfw/cmake/packages/Enum34SourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_enum34Source_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://bitbucket.org")
+ set(ENUM34_URL ${EXTERNAL_DOWNLOAD_SERVER}/stoneleaf/enum34/get/1.1.6.tar.gz)
+
+ ExternalSource_Get("python_enum" ${DOWNLOAD_NNPACK} ${ENUM34_URL})
+
+ set(PYTHON_ENUM_SOURCE_DIR ${python_enum_SOURCE_DIR} PARENT_SCOPE)
+ set(PYTHON_ENUM_SOURCE_FOUND ${python_enum_SOURCE_GET} PARENT_SCOPE)
+endfunction(_enum34Source_import)
+
+_enum34Source_import()
diff --git a/infra/nnfw/cmake/packages/FP16SourceConfig.cmake b/infra/nnfw/cmake/packages/FP16SourceConfig.cmake
new file mode 100644
index 000000000..7df52948e
--- /dev/null
+++ b/infra/nnfw/cmake/packages/FP16SourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_FP16Source_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(FP16_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/FP16/archive/febbb1c163726b5db24bed55cc9dc42529068997.tar.gz)
+
+ ExternalSource_Get("FP16" ${DOWNLOAD_NNPACK} ${FP16_URL})
+
+ set(FP16_SOURCE_DIR ${FP16_SOURCE_DIR} PARENT_SCOPE)
+ set(FP16_SOURCE_FOUND ${FP16_SOURCE_GET} PARENT_SCOPE)
+endfunction(_FP16Source_import)
+
+_FP16Source_import()
diff --git a/infra/nnfw/cmake/packages/FXdivSourceConfig.cmake b/infra/nnfw/cmake/packages/FXdivSourceConfig.cmake
new file mode 100644
index 000000000..2ea574ab8
--- /dev/null
+++ b/infra/nnfw/cmake/packages/FXdivSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_FXdivSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(FXDIV_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/FXdiv/archive/f8c5354679ec2597792bc70a9e06eff50c508b9a.tar.gz)
+
+ ExternalSource_Get("FXdiv" ${DOWNLOAD_NNPACK} ${FXDIV_URL})
+
+ set(FXDIV_SOURCE_DIR ${FXdiv_SOURCE_DIR} PARENT_SCOPE)
+ set(FXDIV_SOURCE_FOUND ${FXdiv_SOURCE_GET} PARENT_SCOPE)
+endfunction(_FXdivSource_import)
+
+_FXdivSource_import()
diff --git a/infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake b/infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake
new file mode 100644
index 000000000..b2cb9886d
--- /dev/null
+++ b/infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_FarmhashSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 downloads farmhash from the following URL
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(FARMHASH_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz)
+ ExternalSource_Get("farmhash" ${DOWNLOAD_FARMHASH} ${FARMHASH_URL})
+
+ set(FarmhashSource_DIR ${farmhash_SOURCE_DIR} PARENT_SCOPE)
+ set(FarmhashSource_FOUND ${farmhash_SOURCE_GET} PARENT_SCOPE)
+endfunction(_FarmhashSource_import)
+
+_FarmhashSource_import()
diff --git a/cmake/packages/FlatBuffersConfig.cmake b/infra/nnfw/cmake/packages/FlatBuffersConfig.cmake
index 064673158..064673158 100644
--- a/cmake/packages/FlatBuffersConfig.cmake
+++ b/infra/nnfw/cmake/packages/FlatBuffersConfig.cmake
diff --git a/infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake b/infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake
new file mode 100644
index 000000000..f8a85effc
--- /dev/null
+++ b/infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_FlatBuffersSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 downloads FlatBuffers from the following URL
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(FLATBUFFERS_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/flatbuffers/archive/1f5eae5d6a135ff6811724f6c57f911d1f46bb15.tar.gz)
+ ExternalSource_Get("flatbuffers" ${DOWNLOAD_FLATBUFFERS} ${FLATBUFFERS_URL})
+
+ set(FlatBuffersSource_DIR ${flatbuffers_SOURCE_DIR} PARENT_SCOPE)
+ set(FlatBuffersSource_FOUND ${flatbuffers_SOURCE_GET} PARENT_SCOPE)
+endfunction(_FlatBuffersSource_import)
+
+_FlatBuffersSource_import()
diff --git a/infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake b/infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake
new file mode 100644
index 000000000..51b8ff993
--- /dev/null
+++ b/infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_GEMMLowpSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 uses the following URL
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(GEMMLOWP_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/gemmlowp/archive/38ebac7b059e84692f53e5938f97a9943c120d98.tar.gz)
+ ExternalSource_Get("gemmlowp" ${DOWNLOAD_GEMMLOWP} ${GEMMLOWP_URL})
+
+ set(GEMMLowpSource_DIR ${gemmlowp_SOURCE_DIR} PARENT_SCOPE)
+ set(GEMMLowpSource_FOUND ${gemmlowp_SOURCE_GET} PARENT_SCOPE)
+endfunction(_GEMMLowpSource_import)
+
+_GEMMLowpSource_import()
diff --git a/infra/nnfw/cmake/packages/GTestConfig.cmake b/infra/nnfw/cmake/packages/GTestConfig.cmake
new file mode 100644
index 000000000..990a3d52e
--- /dev/null
+++ b/infra/nnfw/cmake/packages/GTestConfig.cmake
@@ -0,0 +1,43 @@
+if(${BUILD_GTEST})
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(ExternalProjectTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(GTEST_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/googletest/archive/release-1.8.0.tar.gz)
+ ExternalSource_Get("gtest" TRUE ${GTEST_URL})
+
+ # gtest_SOURCE_DIR is used in gtest subdirectorty's cmake
+ set(sourcedir_gtest ${gtest_SOURCE_DIR})
+ unset(gtest_SOURCE_DIR)
+
+ if(NOT TARGET gtest_main)
+ add_extdirectory(${sourcedir_gtest} gtest EXCLUDE_FROM_ALL)
+ endif(NOT TARGET gtest_main)
+
+ set(GTest_FOUND TRUE)
+ return()
+endif(${BUILD_GTEST})
+
+### Find and use pre-installed Google Test
+find_package(GTest)
+find_package(Threads)
+
+if(${GTEST_FOUND} AND TARGET Threads::Threads)
+ if(NOT TARGET gtest)
+ add_library(gtest INTERFACE)
+ target_include_directories(gtest INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest INTERFACE ${GTEST_LIBRARIES} Threads::Threads)
+ endif(NOT TARGET gtest)
+
+ if(NOT TARGET gtest_main)
+ add_library(gtest_main INTERFACE)
+ target_include_directories(gtest_main INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest_main INTERFACE gtest)
+ target_link_libraries(gtest_main INTERFACE ${GTEST_MAIN_LIBRARIES})
+ endif(NOT TARGET gtest_main)
+
+ # TODO Check whether this command is necessary or not
+ include_directories(${GTEST_INCLUDE_DIR})
+ set(GTest_FOUND TRUE)
+endif(${GTEST_FOUND} AND TARGET Threads::Threads)
diff --git a/infra/nnfw/cmake/packages/HDF5Config.cmake b/infra/nnfw/cmake/packages/HDF5Config.cmake
new file mode 100644
index 000000000..1f90deaf9
--- /dev/null
+++ b/infra/nnfw/cmake/packages/HDF5Config.cmake
@@ -0,0 +1,31 @@
+unset(HDF5_DIR CACHE)
+find_package(HDF5 QUIET)
+
+if (NOT HDF5_FOUND)
+ # Give second chance for some systems where sytem find_package config mode fails
+ unset(HDF5_FOUND)
+
+ find_path(HDF5_INCLUDE_DIRS NAMES hdf5.h PATH_SUFFIXES include/hdf5/serial)
+
+ if (NOT HDF5_INCLUDE_DIRS)
+ set(HDF5_FOUND FALSE)
+ return()
+ endif()
+
+ if (HDF5_USE_STATIC_LIBRARIES)
+ find_library(HDF5_LIBRARIES libhdf5.a)
+ else (HDF5_USE_STATIC_LIBRARIES)
+ find_library(HDF5_LIBRARIES libhdf5.so)
+ endif(HDF5_USE_STATIC_LIBRARIES)
+
+ if (NOT HDF5_LIBRARIES)
+ set(HDF5_FOUND FALSE)
+ return()
+ endif()
+ list(APPEND HDF5_LIBRARIES "sz" "z" "dl" "m")
+
+ set(HDF5_FOUND TRUE)
+endif()
+
+# Append missing libaec which is required by libsz, which is required by libhdf5
+list(APPEND HDF5_LIBRARIES "aec")
diff --git a/infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake b/infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake
new file mode 100644
index 000000000..114a51245
--- /dev/null
+++ b/infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_NEON2SSESource_import)
+ # TODO Remove this workaround once target preset is ready
+ if(NOT (TARGET_ARCH_BASE STREQUAL "x86_64"))
+ set(NEON2SSESource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT (TARGET_ARCH_BASE STREQUAL "x86_64"))
+
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 downloads NEON2SSE from the following URL
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(NEON2SSE_URL ${EXTERNAL_DOWNLOAD_SERVER}/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz)
+ ExternalSource_Get("neon_2_sse" ${DOWNLOAD_NEON2SSE} ${NEON2SSE_URL})
+
+ set(NEON2SSESource_DIR ${neon_2_sse_SOURCE_DIR} PARENT_SCOPE)
+ set(NEON2SSESource_FOUND ${neon_2_sse_SOURCE_GET} PARENT_SCOPE)
+endfunction(_NEON2SSESource_import)
+
+_NEON2SSESource_import()
diff --git a/infra/nnfw/cmake/packages/NNPACKConfig.cmake b/infra/nnfw/cmake/packages/NNPACKConfig.cmake
new file mode 100644
index 000000000..97382b71e
--- /dev/null
+++ b/infra/nnfw/cmake/packages/NNPACKConfig.cmake
@@ -0,0 +1,51 @@
+function(_NNPACK_Import)
+ nnfw_find_package(NNPACKSource QUIET)
+
+ if(NOT NNPACK_SOURCE_FOUND)
+ set(NNPACK_FOUND FALSE PARENT_SCOPE)
+ message(STATUS "NNPACK not found")
+ return()
+ endif(NOT NNPACK_SOURCE_FOUND)
+
+ nnfw_find_package(CpuinfoSource REQUIRED)
+ nnfw_find_package(FP16Source REQUIRED)
+ nnfw_find_package(FXdivSource REQUIRED)
+ nnfw_find_package(PSIMDSource REQUIRED)
+ nnfw_find_package(PthreadpoolSource REQUIRED)
+ nnfw_find_package(SixSource REQUIRED)
+ nnfw_find_package(Enum34Source REQUIRED)
+ nnfw_find_package(OpcodesSource REQUIRED)
+ nnfw_find_package(PeachpySource QUIET)
+
+ if(NOT PYTHON_PEACHPY_SOURCE_FOUND)
+ set(NNPACK_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT PYTHON_PEACHPY_SOURCE_FOUND)
+
+ # workaround for CI
+ set(THREADS_PTHREAD_ARG "2" CACHE STRING "Forcibly set by CMakeLists.txt." FORCE)
+ if(NOT TARGET nnpack)
+ # Allows us to build nnpack at build time
+ set(NNPACK_BUILD_TESTS OFF CACHE BOOL "")
+ set(NNPACK_BUILD_BENCHMARKS OFF CACHE BOOL "")
+ set(NNPACK_LIBRARY_TYPE "static" CACHE STRING "")
+ set(PTHREADPOOL_LIBRARY_TYPE "static" CACHE STRING "")
+ set(CPUINFO_LIBRARY_TYPE "static" CACHE STRING "")
+ nnfw_include(ExternalProjectTools)
+ add_extdirectory("${NNPACK_SOURCE_DIR}" nnpack EXCLUDE_FROM_ALL)
+ # We build static versions of nnpack and pthreadpool but link
+ # them into a shared library (high-perf-backend), so they need PIC.
+ set_property(TARGET nnpack PROPERTY POSITION_INDEPENDENT_CODE ON)
+ set_property(TARGET pthreadpool PROPERTY POSITION_INDEPENDENT_CODE ON)
+ set_property(TARGET cpuinfo PROPERTY POSITION_INDEPENDENT_CODE ON)
+ endif()
+
+ set(NNPACK_FOUND TRUE PARENT_SCOPE)
+ set(NNPACK_INCLUDE_DIRS
+ $<TARGET_PROPERTY:nnpack,INCLUDE_DIRECTORIES>
+ $<TARGET_PROPERTY:pthreadpool,INCLUDE_DIRECTORIES> PARENT_SCOPE)
+ set(NNPACK_LIBRARIES $<TARGET_FILE:nnpack> $<TARGET_FILE:cpuinfo> PARENT_SCOPE)
+
+endfunction(_NNPACK_Import)
+
+_NNPACK_Import()
diff --git a/infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake b/infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake
new file mode 100644
index 000000000..b6b5b01bd
--- /dev/null
+++ b/infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_NNPACKSource_import)
+ if(NOT DOWNLOAD_NNPACK)
+ set(NNPACKSource_FOUND FALSE PARENT_SCOPE)
+ message(WARN "NNPACK not downloaded")
+ return()
+ endif(NOT DOWNLOAD_NNPACK)
+
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(NNPACK_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/NNPACK/archive/c039579abe21f5756e0f0e45e8e767adccc11852.tar.gz)
+ ExternalSource_Get("NNPACK" ${DOWNLOAD_NNPACK} ${NNPACK_URL})
+
+ set(NNPACK_SOURCE_DIR ${NNPACK_SOURCE_DIR} PARENT_SCOPE)
+ set(NNPACK_INCLUDE_DIR ${NNPACK_SOURCE_DIR}/include PARENT_SCOPE)
+ set(NNPACK_SOURCE_FOUND ${NNPACK_SOURCE_GET} PARENT_SCOPE)
+endfunction(_NNPACKSource_import)
+
+_NNPACKSource_import()
diff --git a/infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++ b/infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++
new file mode 100644
index 000000000..fa159c6a4
--- /dev/null
+++ b/infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++
@@ -0,0 +1,433 @@
+"<!DOCTYPE html>\n"
+"<html>\n"
+" <head>\n"
+" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n"
+" <title>{$title} - nonius report</title>\n"
+" <style type=\"text/css\"> body {\n"
+" left: 0;\n"
+" right: 0;\n"
+" top: 0;\n"
+" bottom: 0;\n"
+" margin: 0;\n"
+"}\n"
+"\n"
+".plotly .modebar {\n"
+" margin-top: 2em !important;\n"
+"}\n"
+"\n"
+"#header {\n"
+" background-color: black;\n"
+" z-index: 2;\n"
+" position: absolute;\n"
+" left: 0;\n"
+" right: 0;\n"
+" color: white;\n"
+" font-family: monospace;\n"
+" padding-left: 0;\n"
+" text-align: center;\n"
+" font-size: 1.2em;\n"
+" font-weight: bold;\n"
+" line-height: 2em;\n"
+"}\n"
+"\n"
+".select {\n"
+" position: relative;\n"
+" display: inline-block;\n"
+" font-size: 1em;\n"
+" font-weight: bold;\n"
+" font-size: 1em;\n"
+"}\n"
+"\n"
+".select .arr {\n"
+" background: #000;\n"
+" bottom: 5px;\n"
+" position: absolute;\n"
+" right: 5px;\n"
+" top: 5px;\n"
+" width: 0px;\n"
+" pointer-events: none;\n"
+"}\n"
+"\n"
+".select .arr:before {\n"
+" content: '';\n"
+" position: absolute;\n"
+" top: 50%;\n"
+" right: 14px;\n"
+" margin-top: -5px;\n"
+" pointer-events: none;\n"
+" border-top: 10px solid white;\n"
+" border-left: 10px solid transparent;\n"
+" border-right: 10px solid transparent;\n"
+"}\n"
+"\n"
+".select .arr:after {\n"
+" content: '';\n"
+" position: absolute;\n"
+" top: 50%;\n"
+" right: 18px;\n"
+" margin-top: -5px;\n"
+" pointer-events: none;\n"
+" border-top: 6px solid black;\n"
+" border-left: 6px solid transparent;\n"
+" border-right: 6px solid transparent;\n"
+"}\n"
+"\n"
+".select select {\n"
+" outline: none;\n"
+" -webkit-appearance: none;\n"
+" display: block;\n"
+" padding: 0 3em 0 1.5em;\n"
+" margin: 0.3em;\n"
+"\n"
+" transition: border-color 0.2s;\n"
+" border: 2px solid #aaa;\n"
+" border-radius: 0px;\n"
+"\n"
+" background: black;\n"
+" color: white;\n"
+" font-family: inherit;\n"
+" font-size: inherit;\n"
+" line-height: inherit;\n"
+" font-weight: inherit;\n"
+"}\n"
+"\n"
+".select select:focus {\n"
+" border: 2px solid white;\n"
+" color: white;\n"
+"}\n"
+"\n"
+"#plot {\n"
+" position: absolute;\n"
+" min-width: 300px;\n"
+" min-height: 200px;\n"
+" left: 0;\n"
+" right: 0;\n"
+" top: 2em;\n"
+" bottom: 1em;\n"
+"}\n"
+"\n"
+"#footer {\n"
+" position: absolute;\n"
+" bottom: 0;\n"
+" left: 0;\n"
+" right: 0;\n"
+" font-family: monospace;\n"
+" font-size: 0.9em;\n"
+" text-align: center;\n"
+" text-transform: lowercase;\n"
+" background-color: #bbb;\n"
+" line-height: 2em;\n"
+"}\n"
+" </style>\n"
+" <script type=\"text/javascript\"> /**\n"
+"* plotly.js (basic - minified) v1.15.0\n"
+"* Copyright 2012-2016, Plotly, Inc.\n"
+"* All rights reserved.\n"
+"* Licensed under the MIT license\n"
+"*/\n"
+,
+"!function(t){if(\"object\"==typeof exports&&\"undefined\"!=typeof module)module.exports=t();else if(\"function\"==typeof define&&define.amd)define([],t);else{var e;e=\"undefined\"!=typeof window?window:\"undefined\"!=typeof global?global:\"undefined\"!=typeof self?self:this,e.Plotly=t()}}(function(){var t;return function e(t,n,r){function a(i,l){if(!n[i]){if(!t[i]){var s=\"function\"==typeof require&&require;if(!l&&s)return s(i,!0);if(o)return o(i,!0);var c=new Error(\"Cannot find module '\"+i+\"'\");throw c.code=\"MODULE_NOT_FOUND\",c}var u=n[i]={exports:{}};t[i][0].call(u.exports,function(e){var n=t[i][1][e];return a(n?n:e)},u,u.exports,e,t,n,r)}return n[i].exports}for(var o=\"function\"==typeof require&&require,i=0;i<r.length;i++)a(r[i]);return a}({1:[function(t,e,n){\"use strict\";var r=t(\"../src/plotly\"),a={\"X,X div\":\"font-family:'Open Sans', verdana, arial, sans-serif;margin:0;padding:0;\",\"X input,X button\":\"font-family:'Open Sans', verdana, arial, sans-serif;\",\"X input:focus,X button:focus\":\"outline:none;\",\"X a\":\"text-decoration:none;\",\"X a:hover\":\"text-decoration:none;\",\"X .crisp\":\"shape-rendering:crispEdges;\",\"X .user-select-none\":\"-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;\",\"X svg\":\"overflow:hidden;\",\"X svg a\":\"fill:#447adb;\",\"X svg a:hover\":\"fill:#3c6dc5;\",\"X .main-svg\":\"position:absolute;top:0;left:0;pointer-events:none;\",\"X .main-svg .draglayer\":\"pointer-events:all;\",\"X .cursor-pointer\":\"cursor:pointer;\",\"X .cursor-crosshair\":\"cursor:crosshair;\",\"X .cursor-move\":\"cursor:move;\",\"X .cursor-col-resize\":\"cursor:col-resize;\",\"X .cursor-row-resize\":\"cursor:row-resize;\",\"X .cursor-ns-resize\":\"cursor:ns-resize;\",\"X .cursor-ew-resize\":\"cursor:ew-resize;\",\"X .cursor-sw-resize\":\"cursor:sw-resize;\",\"X .cursor-s-resize\":\"cursor:s-resize;\",\"X .cursor-se-resize\":\"cursor:se-resize;\",\"X .cursor-w-resize\":\"cursor:w-resize;\",\"X .cursor-e-resize\":\"cursor:e-resize;\",\"X .cursor-nw-resize\":\"cursor:nw-resize;\",\"X .cursor-n-resize\":\"cursor:n-resize;\",\"X .cursor-ne-resize\":\"cursor:ne-resize;\",\"X .modebar\":\"position:absolute;top:2px;right:2px;z-index:1001;background:rgba(255,255,255,0.7);\",\"X .modebar--hover\":\"opacity:0;-webkit-transition:opacity 0.3s ease 0s;-moz-transition:opacity 0.3s ease 0s;-ms-transition:opacity 0.3s ease 0s;-o-transition:opacity 0.3s ease 0s;transition:opacity 0.3s ease 0s;\",\"X:hover .modebar--hover\":\"opacity:1;\",\"X .modebar-group\":\"float:left;display:inline-block;box-sizing:border-box;margin-left:8px;position:relative;vertical-align:middle;white-space:nowrap;\",\"X .modebar-group:first-child\":\"margin-left:0px;\",\"X .modebar-btn\":\"position:relative;font-size:16px;padding:3px 4px;cursor:pointer;line-height:normal;box-sizing:border-box;\",\"X .modebar-btn svg\":\"position:relative;top:2px;\",\"X .modebar-btn path\":\"fill:rgba(0,31,95,0.3);\",\"X .modebar-btn.active path,X .modebar-btn:hover path\":\"fill:rgba(0,22,72,0.5);\",\"X .modebar-btn.modebar-btn--logo\":\"padding:3px 1px;\",\"X .modebar-btn.modebar-btn--logo path\":\"fill:#447adb !important;\",\"X [data-title]:before,X [data-title]:after\":\"position:absolute;-webkit-transform:translate3d(0, 0, 0);-moz-transform:translate3d(0, 0, 0);-ms-transform:translate3d(0, 0, 0);-o-transform:translate3d(0, 0, 0);transform:translate3d(0, 0, 0);display:none;opacity:0;z-index:1001;pointer-events:none;top:110%;right:50%;\",\"X [data-title]:hover:before,X [data-title]:hover:after\":\"display:block;opacity:1;\",\"X [data-title]:before\":\"content:'';position:absolute;background:transparent;border:6px solid transparent;z-index:1002;margin-top:-12px;border-bottom-color:#69738a;margin-right:-6px;\",\"X [data-title]:after\":\"content:attr(data-title);background:#69738a;color:white;padding:8px 10px;font-size:12px;line-height:12px;white-space:nowrap;margin-right:-18px;border-radius:2px;\",\"X .select-outline\":\"fill:none;stroke-width:1;shape-rendering:crispEdges;\",\"X .select-outline-1\":\"stroke:white;\",\"X .select-outline-2\":\"stroke:black;stroke-dasharray:2px 2px;\",Y:\"font-family:'Open Sans';position:fixed;top:50px;right:20px;z-index:10000;font-size:10pt;max-width:180px;\",\"Y p\":\"margin:0;\",\"Y .notifier-note\":\"min-width:180px;max-width:250px;border:1px solid #fff;z-index:3000;margin:0;background-color:#8c97af;background-color:rgba(140,151,175,0.9);color:#fff;padding:10px;\",\"Y .notifier-close\":\"color:#fff;opacity:0.8;float:right;padding:0 5px;background:none;border:none;font-size:20px;font-weight:bold;line-height:20px;\",\"Y .notifier-close:hover\":\"color:#444;text-decoration:none;cursor:pointer;\"};for(var o in a){var i=o.replace(/^,/,\" ,\").replace(/X/g,\".js-plotly-plot .plotly\").replace(/Y/g,\".plotly-notifier\");r.Lib.addStyleRule(i,a[o])}},{\"../src/plotly\":107}],2:[function(t,e,n){\"use strict\";e.exports={undo:{width:857.1,path:\"m857 350q0-87-34-166t-91-137-137-92-166-34q-96 0-183 41t-147 114q-4 6-4 13t5 11l76 77q6 5 14 5 9-1 13-7 41-53 100-82t126-29q58 0 110 23t92 61 61 91 22 111-22 111-61 91-92 61-110 23q-55 0-105-20t-90-57l77-77q17-16 8-38-10-23-33-23h-250q-15 0-25 11t-11 25v250q0 24 22 33 22 10 39-8l72-72q60 57 137 88t159 31q87 0 166-34t137-92 91-137 34-166z\",ascent:850,descent:-150},home:{width:928.6,path:\"m786 296v-267q0-15-11-26t-25-10h-214v214h-143v-214h-214q-15 0-25 10t-11 26v267q0 1 0 2t0 2l321 264 321-264q1-1 1-4z m124 39l-34-41q-5-5-12-6h-2q-7 0-12 3l-386 322-386-322q-7-4-13-4-7 2-12 7l-35 41q-4 5-3 13t6 12l401 334q18 15 42 15t43-15l136-114v109q0 8 5 13t13 5h107q8 0 13-5t5-13v-227l122-102q5-5 6-12t-4-13z\",ascent:850,descent:-150},\"camera-retro\":{width:1e3,path:\"m518 386q0 8-5 13t-13 5q-37 0-63-27t-26-63q0-8 5-13t13-5 12 5 5 13q0 23 16 38t38 16q8 0 13 5t5 13z m125-73q0-59-42-101t-101-42-101 42-42 101 42 101 101 42 101-42 42-101z m-572-320h858v71h-858v-71z m643 320q0 89-62 152t-152 62-151-62-63-152 63-151 151-63 152 63 62 151z m-571 358h214v72h-214v-72z m-72-107h858v143h-462l-36-71h-360v-72z m929 143v-714q0-30-21-51t-50-21h-858q-29 0-50 21t-21 51v714q0 30 21 51t50 21h858q29 0 50-21t21-51z\",ascent:850,descent:-150},zoombox:{width:1e3,path:\"m1000-25l-250 251c40 63 63 138 63 218 0 224-182 406-407 406-224 0-406-182-406-406s183-406 407-406c80 0 155 22 218 62l250-250 125 125z m-812 250l0 438 437 0 0-438-437 0z m62 375l313 0 0-312-313 0 0 312z\",ascent:850,descent:-150},pan:{width:1e3,path:\"m1000 350l-187 188 0-125-250 0 0 250 125 0-188 187-187-187 125 0 0-250-250 0 0 125-188-188 186-187 0 125 252 0 0-250-125 0 187-188 188 188-125 0 0 250 250 0 0-126 187 188z\",ascent:850,descent:-150},zoom_plus:{width:1e3,path:\"m1 787l0-875 875 0 0 875-875 0z m687-500l-187 0 0-187-125 0 0 187-188 0 0 125 188 0 0 187 125 0 0-187 187 0 0-125z\",ascent:850,descent:-150},zoom_minus:{width:1e3,path:\"m0 788l0-876 875 0 0 876-875 0z m688-500l-500 0 0 125 500 0 0-125z\",ascent:850,descent:-150},autoscale:{width:1e3,path:\"m250 850l-187 0-63 0 0-62 0-188 63 0 0 188 187 0 0 62z m688 0l-188 0 0-62 188 0 0-188 62 0 0 188 0 62-62 0z m-875-938l0 188-63 0 0-188 0-62 63 0 187 0 0 62-187 0z m875 188l0-188-188 0 0-62 188 0 62 0 0 62 0 188-62 0z m-125 188l-1 0-93-94-156 156 156 156 92-93 2 0 0 250-250 0 0-2 93-92-156-156-156 156 94 92 0 2-250 0 0-250 0 0 93 93 157-156-157-156-93 94 0 0 0-250 250 0 0 0-94 93 156 157 156-157-93-93 0 0 250 0 0 250z\",ascent:850,descent:-150},tooltip_basic:{width:1500,path:\"m375 725l0 0-375-375 375-374 0-1 1125 0 0 750-1125 0z\",ascent:850,descent:-150},tooltip_compare:{width:1125,path:\"m187 786l0 2-187-188 188-187 0 0 937 0 0 373-938 0z m0-499l0 1-187-188 188-188 0 0 937 0 0 376-938-1z\",ascent:850,descent:-150},plotlylogo:{width:1542,path:\"m0-10h182v-140h-182v140z m228 146h183v-286h-183v286z m225 714h182v-1000h-182v1000z m225-285h182v-715h-182v715z m225 142h183v-857h-183v857z m231-428h182v-429h-182v429z m225-291h183v-138h-183v138z\",ascent:850,descent:-150},\"z-axis\":{width:1e3,path:\"m833 5l-17 108v41l-130-65 130-66c0 0 0 38 0 39 0-1 36-14 39-25 4-15-6-22-16-30-15-12-39-16-56-20-90-22-187-23-279-23-261 0-341 34-353 59 3 60 228 110 228 110-140-8-351-35-351-116 0-120 293-142 474-142 155 0 477 22 477 142 0 50-74 79-163 96z m-374 94c-58-5-99-21-99-40 0-24 65-43 144-43 79 0 143 19 143 43 0 19-42 34-98 40v216h87l-132 135-133-135h88v-216z m167 515h-136v1c16 16 31 34 46 52l84 109v54h-230v-71h124v-1c-16-17-28-32-44-51l-89-114v-51h245v72z\",ascent:850,descent:-150},\"3d_rotate\":{width:1e3,path:\"m922 660c-5 4-9 7-14 11-359 263-580-31-580-31l-102 28 58-400c0 1 1 1 2 2 118 108 351 249 351 249s-62 27-100 42c88 83 222 183 347 122 16-8 30-17 44-27-2 1-4 2-6 4z m36-329c0 0 64 229-88 296-62 27-124 14-175-11 157-78 225-208 249-266 8-19 11-31 11-31 2 5 6 15 11 32-5-13-8-20-8-20z m-775-239c70-31 117-50 198-32-121 80-199 346-199 346l-96-15-58-12c0 0 55-226 155-287z m603 133l-317-139c0 0 4-4 19-14 7-5 24-15 24-15s-177-147-389 4c235-287 536-112 536-112l31-22 100 299-4-1z m-298-153c6-4 14-9 24-15 0 0-17 10-24 15z\",ascent:850,descent:-150},camera:{width:1e3,path:\"m500 450c-83 0-150-67-150-150 0-83 67-150 150-150 83 0 150 67 150 150 0 83-67 150-150 150z m400 150h-120c-16 0-34 13-39 29l-31 93c-6 15-23 28-40 28h-340c-16 0-34-13-39-28l-31-94c-6-15-23-28-40-28h-120c-55 0-100-45-100-100v-450c0-55 45-100 100-100h800c55 0 100 45 100 100v450c0 55-45 100-100 100z m-400-550c-138 0-250 112-250 250 0 138 112 250 250 250 138 0 250-112 250-250 0-138-112-250-250-250z m365 380c-19 0-35 16-35 35 0 19 16 35 35 35 19 0 35-16 35-35 0-19-16-35-35-35z\",ascent:850,descent:-150},movie:{width:1e3,path:\"m938 413l-188-125c0 37-17 71-44 94 64 38 107 107 107 187 0 121-98 219-219 219-121 0-219-98-219-219 0-61 25-117 66-156h-115c30 33 49 76 49 125 0 103-84 187-187 187s-188-84-188-187c0-57 26-107 65-141-38-22-65-62-65-109v-250c0-70 56-126 125-126h500c69 0 125 56 125 126l188-126c34 0 62 28 62 63v375c0 35-28 63-62 63z m-750 0c-69 0-125 56-125 125s56 125 125 125 125-56 125-125-56-125-125-125z m406-1c-87 0-157 70-157 157 0 86 70 156 157 156s156-70 156-156-70-157-156-157z\",ascent:850,descent:-150},question:{width:857.1,path:\"m500 82v107q0 8-5 13t-13 5h-107q-8 0-13-5t-5-13v-107q0-8 5-13t13-5h107q8 0 13 5t5 13z m143 375q0 49-31 91t-77 65-95 23q-136 0-2"
+,
+"07-119-9-14 4-24l74-55q4-4 10-4 9 0 14 7 30 38 48 51 19 14 48 14 27 0 48-15t21-33q0-21-11-34t-38-25q-35-16-65-48t-29-70v-20q0-8 5-13t13-5h107q8 0 13 5t5 13q0 10 12 27t30 28q18 10 28 16t25 19 25 27 16 34 7 45z m214-107q0-117-57-215t-156-156-215-58-216 58-155 156-58 215 58 215 155 156 216 58 215-58 156-156 57-215z\",ascent:850,descent:-150},disk:{width:857.1,path:\"m214-7h429v214h-429v-214z m500 0h72v500q0 8-6 21t-11 20l-157 156q-5 6-19 12t-22 5v-232q0-22-15-38t-38-16h-322q-22 0-37 16t-16 38v232h-72v-714h72v232q0 22 16 38t37 16h465q22 0 38-16t15-38v-232z m-214 518v178q0 8-5 13t-13 5h-107q-7 0-13-5t-5-13v-178q0-8 5-13t13-5h107q7 0 13 5t5 13z m357-18v-518q0-22-15-38t-38-16h-750q-23 0-38 16t-16 38v750q0 22 16 38t38 16h517q23 0 50-12t42-26l156-157q16-15 27-42t11-49z\",ascent:850,descent:-150},lasso:{width:1031,path:\"m1018 538c-36 207-290 336-568 286-277-48-473-256-436-463 10-57 36-108 76-151-13-66 11-137 68-183 34-28 75-41 114-42l-55-70 0 0c-2-1-3-2-4-3-10-14-8-34 5-45 14-11 34-8 45 4 1 1 2 3 2 5l0 0 113 140c16 11 31 24 45 40 4 3 6 7 8 11 48-3 100 0 151 9 278 48 473 255 436 462z m-624-379c-80 14-149 48-197 96 42 42 109 47 156 9 33-26 47-66 41-105z m-187-74c-19 16-33 37-39 60 50-32 109-55 174-68-42-25-95-24-135 8z m360 75c-34-7-69-9-102-8 8 62-16 128-68 170-73 59-175 54-244-5-9 20-16 40-20 61-28 159 121 317 333 354s407-60 434-217c28-159-121-318-333-355z\",ascent:850,descent:-150},selectbox:{width:1e3,path:\"m0 850l0-143 143 0 0 143-143 0z m286 0l0-143 143 0 0 143-143 0z m285 0l0-143 143 0 0 143-143 0z m286 0l0-143 143 0 0 143-143 0z m-857-286l0-143 143 0 0 143-143 0z m857 0l0-143 143 0 0 143-143 0z m-857-285l0-143 143 0 0 143-143 0z m857 0l0-143 143 0 0 143-143 0z m-857-286l0-143 143 0 0 143-143 0z m286 0l0-143 143 0 0 143-143 0z m285 0l0-143 143 0 0 143-143 0z m286 0l0-143 143 0 0 143-143 0z\",ascent:850,descent:-150}}},{}],3:[function(t,e,n){e.exports=t(\"../src/traces/bar\")},{\"../src/traces/bar\":148}],4:[function(t,e,n){e.exports=t(\"../src/core\")},{\"../src/core\":83}],5:[function(t,e,n){\"use strict\";var r=t(\"./core\");r.register([t(\"./bar\"),t(\"./pie\")]),e.exports=r},{\"./bar\":3,\"./core\":4,\"./pie\":6}],6:[function(t,e,n){e.exports=t(\"../src/traces/pie\")},{\"../src/traces/pie\":160}],7:[function(t,e,n){function r(){this._events=this._events||{},this._maxListeners=this._maxListeners||void 0}function a(t){return\"function\"==typeof t}function o(t){return\"number\"==typeof t}function i(t){return\"object\"==typeof t&&null!==t}function l(t){return void 0===t}e.exports=r,r.EventEmitter=r,r.prototype._events=void 0,r.prototype._maxListeners=void 0,r.defaultMaxListeners=10,r.prototype.setMaxListeners=function(t){if(!o(t)||0>t||isNaN(t))throw TypeError(\"n must be a positive number\");return this._maxListeners=t,this},r.prototype.emit=function(t){var e,n,r,o,s,c;if(this._events||(this._events={}),\"error\"===t&&(!this._events.error||i(this._events.error)&&!this._events.error.length)){if(e=arguments[1],e instanceof Error)throw e;throw TypeError('Uncaught, unspecified \"error\" event.')}if(n=this._events[t],l(n))return!1;if(a(n))switch(arguments.length){case 1:n.call(this);break;case 2:n.call(this,arguments[1]);break;case 3:n.call(this,arguments[1],arguments[2]);break;default:o=Array.prototype.slice.call(arguments,1),n.apply(this,o)}else if(i(n))for(o=Array.prototype.slice.call(arguments,1),c=n.slice(),r=c.length,s=0;r>s;s++)c[s].apply(this,o);return!0},r.prototype.addListener=function(t,e){var n;if(!a(e))throw TypeError(\"listener must be a function\");return this._events||(this._events={}),this._events.newListener&&this.emit(\"newListener\",t,a(e.listener)?e.listener:e),this._events[t]?i(this._events[t])?this._events[t].push(e):this._events[t]=[this._events[t],e]:this._events[t]=e,i(this._events[t])&&!this._events[t].warned&&(n=l(this._maxListeners)?r.defaultMaxListeners:this._maxListeners,n&&n>0&&this._events[t].length>n&&(this._events[t].warned=!0,console.error(\"(node) warning: possible EventEmitter memory leak detected. %d listeners added. Use emitter.setMaxListeners() to increase limit.\",this._events[t].length),\"function\"==typeof console.trace&&console.trace())),this},r.prototype.on=r.prototype.addListener,r.prototype.once=function(t,e){function n(){this.removeListener(t,n),r||(r=!0,e.apply(this,arguments))}if(!a(e))throw TypeError(\"listener must be a function\");var r=!1;return n.listener=e,this.on(t,n),this},r.prototype.removeListener=function(t,e){var n,r,o,l;if(!a(e))throw TypeError(\"listener must be a function\");if(!this._events||!this._events[t])return this;if(n=this._events[t],o=n.length,r=-1,n===e||a(n.listener)&&n.listener===e)delete this._events[t],this._events.removeListener&&this.emit(\"removeListener\",t,e);else if(i(n)){for(l=o;l-- >0;)if(n[l]===e||n[l].listener&&n[l].listener===e){r=l;break}if(0>r)return this;1===n.length?(n.length=0,delete this._events[t]):n.splice(r,1),this._events.removeListener&&this.emit(\"removeListener\",t,e)}return this},r.prototype.removeAllListeners=function(t){var e,n;if(!this._events)return this;if(!this._events.removeListener)return 0===arguments.length?this._events={}:this._events[t]&&delete this._events[t],this;if(0===arguments.length){for(e in this._events)\"removeListener\"!==e&&this.removeAllListeners(e);return this.removeAllListeners(\"removeListener\"),this._events={},this}if(n=this._events[t],a(n))this.removeListener(t,n);else if(n)for(;n.length;)this.removeListener(t,n[n.length-1]);return delete this._events[t],this},r.prototype.listeners=function(t){var e;return e=this._events&&this._events[t]?a(this._events[t])?[this._events[t]]:this._events[t].slice():[]},r.prototype.listenerCount=function(t){if(this._events){var e=this._events[t];if(a(e))return 1;if(e)return e.length}return 0},r.listenerCount=function(t,e){return t.listenerCount(e)}},{}],8:[function(t,e,n){function r(){u=!1,l.length?c=l.concat(c):f=-1,c.length&&a()}function a(){if(!u){var t=setTimeout(r);u=!0;for(var e=c.length;e;){for(l=c,c=[];++f<e;)l&&l[f].run();f=-1,e=c.length}l=null,u=!1,clearTimeout(t)}}function o(t,e){this.fun=t,this.array=e}function i(){}var l,s=e.exports={},c=[],u=!1,f=-1;s.nextTick=function(t){var e=new Array(arguments.length-1);if(arguments.length>1)for(var n=1;n<arguments.length;n++)e[n-1]=arguments[n];c.push(new o(t,e)),1!==c.length||u||setTimeout(a,0)},o.prototype.run=function(){this.fun.apply(null,this.array)},s.title=\"browser\",s.browser=!0,s.env={},s.argv=[],s.version=\"\",s.versions={},s.on=i,s.addListener=i,s.once=i,s.off=i,s.removeListener=i,s.removeAllListeners=i,s.emit=i,s.binding=function(t){throw new Error(\"process.binding is not supported\")},s.cwd=function(){return\"/\"},s.chdir=function(t){throw new Error(\"process.chdir is not supported\")},s.umask=function(){return 0}},{}],9:[function(e,n,r){!function(){function e(t){return t&&(t.ownerDocument||t.document||t).documentElement}function r(t){return t&&(t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView)}function a(t,e){return e>t?-1:t>e?1:t>=e?0:NaN}function o(t){return null===t?NaN:+t}function i(t){return!isNaN(t)}function l(t){return{left:function(e,n,r,a){for(arguments.length<3&&(r=0),arguments.length<4&&(a=e.length);a>r;){var o=r+a>>>1;t(e[o],n)<0?r=o+1:a=o}return r},right:function(e,n,r,a){for(arguments.length<3&&(r=0),arguments.length<4&&(a=e.length);a>r;){var o=r+a>>>1;t(e[o],n)>0?a=o:r=o+1}return r}}}function s(t){return t.length}function c(t){for(var e=1;t*e%1;)e*=10;return e}function u(t,e){for(var n in e)Object.defineProperty(t.prototype,n,{value:e[n],enumerable:!1})}function f(){this._=Object.create(null)}function d(t){return(t+=\"\")===ki||t[0]===Mi?Mi+t:t}function h(t){return(t+=\"\")[0]===Mi?t.slice(1):t}function p(t){return d(t)in this._}function g(t){return(t=d(t))in this._&&delete this._[t]}function v(){var t=[];for(var e in this._)t.push(h(e));return t}function m(){var t=0;for(var e in this._)++t;return t}function y(){for(var t in this._)return!1;return!0}function x(){this._=Object.create(null)}function b(t){return t}function _(t,e,n){return function(){var r=n.apply(e,arguments);return r===e?t:r}}function w(t,e){if(e in t)return e;e=e.charAt(0).toUpperCase()+e.slice(1);for(var n=0,r=Ai.length;r>n;++n){var a=Ai[n]+e;if(a in t)return a}}function k(){}function M(){}function A(t){function e(){for(var e,r=n,a=-1,o=r.length;++a<o;)(e=r[a].on)&&e.apply(this,arguments);return t}var n=[],r=new f;return e.on=function(e,a){var o,i=r.get(e);return arguments.length<2?i&&i.on:(i&&(i.on=null,n=n.slice(0,o=n.indexOf(i)).concat(n.slice(o+1)),r.remove(e)),a&&n.push(r.set(e,{on:a})),t)},e}function L(){ui.event.preventDefault()}function T(){for(var t,e=ui.event;t=e.sourceEvent;)e=t;return e}function z(t){for(var e=new M,n=0,r=arguments.length;++n<r;)e[arguments[n]]=A(e);return e.of=function(n,r){return function(a){try{var o=a.sourceEvent=ui.event;a.target=t,ui.event=a,e[a.type].apply(n,r)}finally{ui.event=o}}},e}function S(t){return Ti(t,Ci),t}function E(t){return\"function\"==typeof t?t:function(){return zi(t,this)}}function C(t){return\"function\"==typeof t?t:function(){return Si(t,this)}}function O(t,e){function n(){this.removeAttribute(t)}function r(){this.removeAttributeNS(t.space,t.local)}function a(){this.setAttribute(t,e)}function o(){this.setAttributeNS(t.space,t.local,e)}function i(){var n=e.apply(this,arguments);null==n?this.removeAttribute(t):this.setAttribute(t,n)}function l(){var n=e.apply(this,arguments);null==n?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,n)}return t=ui.ns.qualify(t),null==e?t.local?r:n:\"function\"==typeof e?t.local?l:i:t.local?o:a}function P(t){return t.trim().replace(/\\s+/g,\" \")}function N(t){return new RegExp(\"(?:^|\\\\s+)\"+ui.requote(t)+\"(?:\\\\s+|$)\",\"g\")}function D(t){return(t+\"\").trim().split(/^|\\s+/)}function I(t,e){function n(){for(var n=-1;++n<a;)t[n](this,e)}function r(){for(var n=-1,r=e.apply(this,arguments);++n<a;)t[n](this,r)}t=D(t).map(R);var a=t.length;return\"function\"==typeof e?r:n}function R(t){var e=N(t);return function(n,r){if(a=n.classList)return r?a.add(t):a.remove(t);var"
+,
+" a=n.getAttribute(\"class\")||\"\";r?(e.lastIndex=0,e.test(a)||n.setAttribute(\"class\",P(a+\" \"+t))):n.setAttribute(\"class\",P(a.replace(e,\" \")))}}function j(t,e,n){function r(){this.style.removeProperty(t)}function a(){this.style.setProperty(t,e,n)}function o(){var r=e.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,n)}return null==e?r:\"function\"==typeof e?o:a}function q(t,e){function n(){delete this[t]}function r(){this[t]=e}function a(){var n=e.apply(this,arguments);null==n?delete this[t]:this[t]=n}return null==e?n:\"function\"==typeof e?a:r}function F(t){function e(){var e=this.ownerDocument,n=this.namespaceURI;return n===Oi&&e.documentElement.namespaceURI===Oi?e.createElement(t):e.createElementNS(n,t)}function n(){return this.ownerDocument.createElementNS(t.space,t.local)}return\"function\"==typeof t?t:(t=ui.ns.qualify(t)).local?n:e}function B(){var t=this.parentNode;t&&t.removeChild(this)}function H(t){return{__data__:t}}function V(t){return function(){return Ei(this,t)}}function Z(t){return arguments.length||(t=a),function(e,n){return e&&n?t(e.__data__,n.__data__):!e-!n}}function Y(t,e){for(var n=0,r=t.length;r>n;n++)for(var a,o=t[n],i=0,l=o.length;l>i;i++)(a=o[i])&&e(a,i,n);return t}function U(t){return Ti(t,Ni),t}function X(t){var e,n;return function(r,a,o){var i,l=t[o].update,s=l.length;for(o!=n&&(n=o,e=0),a>=e&&(e=a+1);!(i=l[e])&&++e<s;);return i}}function G(t,e,n){function r(){var e=this[i];e&&(this.removeEventListener(t,e,e.$),delete this[i])}function a(){var a=s(e,di(arguments));r.call(this),this.addEventListener(t,this[i]=a,a.$=n),a._=e}function o(){var e,n=new RegExp(\"^__on([^.]+)\"+ui.requote(t)+\"$\");for(var r in this)if(e=r.match(n)){var a=this[r];this.removeEventListener(e[1],a,a.$),delete this[r]}}var i=\"__on\"+t,l=t.indexOf(\".\"),s=$;l>0&&(t=t.slice(0,l));var c=Di.get(t);return c&&(t=c,s=Q),l?e?a:r:e?k:o}function $(t,e){return function(n){var r=ui.event;ui.event=n,e[0]=this.__data__;try{t.apply(this,e)}finally{ui.event=r}}}function Q(t,e){var n=$(t,e);return function(t){var e=this,r=t.relatedTarget;r&&(r===e||8&r.compareDocumentPosition(e))||n.call(e,t)}}function W(t){var n=\".dragsuppress-\"+ ++Ri,a=\"click\"+n,o=ui.select(r(t)).on(\"touchmove\"+n,L).on(\"dragstart\"+n,L).on(\"selectstart\"+n,L);if(null==Ii&&(Ii=\"onselectstart\"in t?!1:w(t.style,\"userSelect\")),Ii){var i=e(t).style,l=i[Ii];i[Ii]=\"none\"}return function(t){if(o.on(n,null),Ii&&(i[Ii]=l),t){var e=function(){o.on(a,null)};o.on(a,function(){L(),e()},!0),setTimeout(e,0)}}}function J(t,e){e.changedTouches&&(e=e.changedTouches[0]);var n=t.ownerSVGElement||t;if(n.createSVGPoint){var a=n.createSVGPoint();if(0>ji){var o=r(t);if(o.scrollX||o.scrollY){n=ui.select(\"body\").append(\"svg\").style({position:\"absolute\",top:0,left:0,margin:0,padding:0,border:\"none\"},\"important\");var i=n[0][0].getScreenCTM();ji=!(i.f||i.e),n.remove()}}return ji?(a.x=e.pageX,a.y=e.pageY):(a.x=e.clientX,a.y=e.clientY),a=a.matrixTransform(t.getScreenCTM().inverse()),[a.x,a.y]}var l=t.getBoundingClientRect();return[e.clientX-l.left-t.clientLeft,e.clientY-l.top-t.clientTop]}function K(){return ui.event.changedTouches[0].identifier}function tt(t){return t>0?1:0>t?-1:0}function et(t,e,n){return(e[0]-t[0])*(n[1]-t[1])-(e[1]-t[1])*(n[0]-t[0])}function nt(t){return t>1?0:-1>t?Bi:Math.acos(t)}function rt(t){return t>1?Zi:-1>t?-Zi:Math.asin(t)}function at(t){return((t=Math.exp(t))-1/t)/2}function ot(t){return((t=Math.exp(t))+1/t)/2}function it(t){return((t=Math.exp(2*t))-1)/(t+1)}function lt(t){return(t=Math.sin(t/2))*t}function st(){}function ct(t,e,n){return this instanceof ct?(this.h=+t,this.s=+e,void(this.l=+n)):arguments.length<2?t instanceof ct?new ct(t.h,t.s,t.l):kt(\"\"+t,Mt,ct):new ct(t,e,n)}function ut(t,e,n){function r(t){return t>360?t-=360:0>t&&(t+=360),60>t?o+(i-o)*t/60:180>t?i:240>t?o+(i-o)*(240-t)/60:o}function a(t){return Math.round(255*r(t))}var o,i;return t=isNaN(t)?0:(t%=360)<0?t+360:t,e=isNaN(e)?0:0>e?0:e>1?1:e,n=0>n?0:n>1?1:n,i=.5>=n?n*(1+e):n+e-n*e,o=2*n-i,new xt(a(t+120),a(t),a(t-120))}function ft(t,e,n){return this instanceof ft?(this.h=+t,this.c=+e,void(this.l=+n)):arguments.length<2?t instanceof ft?new ft(t.h,t.c,t.l):t instanceof ht?gt(t.l,t.a,t.b):gt((t=At((t=ui.rgb(t)).r,t.g,t.b)).l,t.a,t.b):new ft(t,e,n)}function dt(t,e,n){return isNaN(t)&&(t=0),isNaN(e)&&(e=0),new ht(n,Math.cos(t*=Yi)*e,Math.sin(t)*e)}function ht(t,e,n){return this instanceof ht?(this.l=+t,this.a=+e,void(this.b=+n)):arguments.length<2?t instanceof ht?new ht(t.l,t.a,t.b):t instanceof ft?dt(t.h,t.c,t.l):At((t=xt(t)).r,t.g,t.b):new ht(t,e,n)}function pt(t,e,n){var r=(t+16)/116,a=r+e/500,o=r-n/200;return a=vt(a)*nl,r=vt(r)*rl,o=vt(o)*al,new xt(yt(3.2404542*a-1.5371385*r-.4985314*o),yt(-.969266*a+1.8760108*r+.041556*o),yt(.0556434*a-.2040259*r+1.0572252*o))}function gt(t,e,n){return t>0?new ft(Math.atan2(n,e)*Ui,Math.sqrt(e*e+n*n),t):new ft(NaN,NaN,t)}function vt(t){return t>.206893034?t*t*t:(t-4/29)/7.787037}function mt(t){return t>.008856?Math.pow(t,1/3):7.787037*t+4/29}function yt(t){return Math.round(255*(.00304>=t?12.92*t:1.055*Math.pow(t,1/2.4)-.055))}function xt(t,e,n){return this instanceof xt?(this.r=~~t,this.g=~~e,void(this.b=~~n)):arguments.length<2?t instanceof xt?new xt(t.r,t.g,t.b):kt(\"\"+t,xt,ut):new xt(t,e,n)}function bt(t){return new xt(t>>16,t>>8&255,255&t)}function _t(t){return bt(t)+\"\"}function wt(t){return 16>t?\"0\"+Math.max(0,t).toString(16):Math.min(255,t).toString(16)}function kt(t,e,n){var r,a,o,i=0,l=0,s=0;if(r=/([a-z]+)\\((.*)\\)/.exec(t=t.toLowerCase()))switch(a=r[2].split(\",\"),r[1]){case\"hsl\":return n(parseFloat(a[0]),parseFloat(a[1])/100,parseFloat(a[2])/100);case\"rgb\":return e(Tt(a[0]),Tt(a[1]),Tt(a[2]))}return(o=ll.get(t))?e(o.r,o.g,o.b):(null==t||\"#\"!==t.charAt(0)||isNaN(o=parseInt(t.slice(1),16))||(4===t.length?(i=(3840&o)>>4,i=i>>4|i,l=240&o,l=l>>4|l,s=15&o,s=s<<4|s):7===t.length&&(i=(16711680&o)>>16,l=(65280&o)>>8,s=255&o)),e(i,l,s))}function Mt(t,e,n){var r,a,o=Math.min(t/=255,e/=255,n/=255),i=Math.max(t,e,n),l=i-o,s=(i+o)/2;return l?(a=.5>s?l/(i+o):l/(2-i-o),r=t==i?(e-n)/l+(n>e?6:0):e==i?(n-t)/l+2:(t-e)/l+4,r*=60):(r=NaN,a=s>0&&1>s?0:r),new ct(r,a,s)}function At(t,e,n){t=Lt(t),e=Lt(e),n=Lt(n);var r=mt((.4124564*t+.3575761*e+.1804375*n)/nl),a=mt((.2126729*t+.7151522*e+.072175*n)/rl),o=mt((.0193339*t+.119192*e+.9503041*n)/al);return ht(116*a-16,500*(r-a),200*(a-o))}function Lt(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Tt(t){var e=parseFloat(t);return\"%\"===t.charAt(t.length-1)?Math.round(2.55*e):e}function zt(t){return\"function\"==typeof t?t:function(){return t}}function St(t){return function(e,n,r){return 2===arguments.length&&\"function\"==typeof n&&(r=n,n=null),Et(e,n,t,r)}}function Et(t,e,n,r){function a(){var t,e=s.status;if(!e&&Ot(s)||e>=200&&300>e||304===e){try{t=n.call(o,s)}catch(r){return void i.error.call(o,r)}i.load.call(o,t)}else i.error.call(o,s)}var o={},i=ui.dispatch(\"beforesend\",\"progress\",\"load\",\"error\"),l={},s=new XMLHttpRequest,c=null;return!this.XDomainRequest||\"withCredentials\"in s||!/^(http(s)?:)?\\/\\//.test(t)||(s=new XDomainRequest),\"onload\"in s?s.onload=s.onerror=a:s.onreadystatechange=function(){s.readyState>3&&a()},s.onprogress=function(t){var e=ui.event;ui.event=t;try{i.progress.call(o,s)}finally{ui.event=e}},o.header=function(t,e){return t=(t+\"\").toLowerCase(),arguments.length<2?l[t]:(null==e?delete l[t]:l[t]=e+\"\",o)},o.mimeType=function(t){return arguments.length?(e=null==t?null:t+\"\",o):e},o.responseType=function(t){return arguments.length?(c=t,o):c},o.response=function(t){return n=t,o},[\"get\",\"post\"].forEach(function(t){o[t]=function(){return o.send.apply(o,[t].concat(di(arguments)))}}),o.send=function(n,r,a){if(2===arguments.length&&\"function\"==typeof r&&(a=r,r=null),s.open(n,t,!0),null==e||\"accept\"in l||(l.accept=e+\",*/*\"),s.setRequestHeader)for(var u in l)s.setRequestHeader(u,l[u]);return null!=e&&s.overrideMimeType&&s.overrideMimeType(e),null!=c&&(s.responseType=c),null!=a&&o.on(\"error\",a).on(\"load\",function(t){a(null,t)}),i.beforesend.call(o,s),s.send(null==r?null:r),o},o.abort=function(){return s.abort(),o},ui.rebind(o,i,\"on\"),null==r?o:o.get(Ct(r))}function Ct(t){return 1===t.length?function(e,n){t(null==e?n:null)}:t}function Ot(t){var e=t.responseType;return e&&\"text\"!==e?t.response:t.responseText}function Pt(t,e,n){var r=arguments.length;2>r&&(e=0),3>r&&(n=Date.now());var a=n+e,o={c:t,t:a,n:null};return cl?cl.n=o:sl=o,cl=o,ul||(fl=clearTimeout(fl),ul=1,dl(Nt)),o}function Nt(){var t=Dt(),e=It()-t;e>24?(isFinite(e)&&(clearTimeout(fl),fl=setTimeout(Nt,e)),ul=0):(ul=1,dl(Nt))}function Dt(){for(var t=Date.now(),e=sl;e;)t>=e.t&&e.c(t-e.t)&&(e.c=null),e=e.n;return t}function It(){for(var t,e=sl,n=1/0;e;)e.c?(e.t<n&&(n=e.t),e=(t=e).n):e=t?t.n=e.n:sl=e.n;return cl=t,n}function Rt(t,e){return e-(t?Math.ceil(Math.log(t)/Math.LN10):1)}function jt(t,e){var n=Math.pow(10,3*wi(8-e));return{scale:e>8?function(t){return t/n}:function(t){return t*n},symbol:t}}function qt(t){var e=t.decimal,n=t.thousands,r=t.grouping,a=t.currency,o=r&&n?function(t,e){for(var a=t.length,o=[],i=0,l=r[0],s=0;a>0&&l>0&&(s+l+1>e&&(l=Math.max(1,e-s)),o.push(t.substring(a-=l,a+l)),!((s+=l+1)>e));)l=r[i=(i+1)%r.length];return o.reverse().join(n)}:b;return function(t){var n=pl.exec(t),r=n[1]||\" \",i=n[2]||\">\",l=n[3]||\"-\",s=n[4]||\"\",c=n[5],u=+n[6],f=n[7],d=n[8],h=n[9],p=1,g=\"\",v=\"\",m=!1,y=!0;switch(d&&(d=+d.substring(1)),(c||\"0\"===r&&\"=\"===i)&&(c=r=\"0\",i=\"=\"),h){case\"n\":f=!0,h=\"g\";break;case\"%\":p=100,v=\"%\",h=\"f\";break;case\"p\":p=100,v=\"%\",h=\"r\";break;case\"b\":case\"o\":case\"x\":case\"X\":\"#\"===s&&(g=\"0\"+h.toLowerCase());case\"c\":y=!1;case\"d\":m=!0,d=0;break;case\"s\":p=-1,h=\"r\"}\"$\"===s&&(g=a[0],v=a[1]),\"r\"!=h||d||(h=\"g\"),null!=d&&(\"g\"==h?d=Math.max(1,Math.min(21,d)):\"e\"!=h&&\"f\"!=h||(d=Math.max(0,Math.min(20,d)))),h=gl.get(h)||Ft;var x=c&&f;return function(t){var n=v;if(m&&t%1)return\"\";var a=0>t||0===t&&0>1/t?(t=-t,\"-\"):\"-\"===l?\"\":l;if(0>p){var s=ui.formatPrefix(t,d);t=s.scale(t),n=s.symbol+v}else t*=p;t=h(t,d);var b,_,w=t.lastIndex"
+,
+"Of(\".\");if(0>w){var k=y?t.lastIndexOf(\"e\"):-1;0>k?(b=t,_=\"\"):(b=t.substring(0,k),_=t.substring(k))}else b=t.substring(0,w),_=e+t.substring(w+1);!c&&f&&(b=o(b,1/0));var M=g.length+b.length+_.length+(x?0:a.length),A=u>M?new Array(M=u-M+1).join(r):\"\";return x&&(b=o(A+b,A.length?u-_.length:1/0)),a+=g,t=b+_,(\"<\"===i?a+t+A:\">\"===i?A+a+t:\"^\"===i?A.substring(0,M>>=1)+a+t+A.substring(M):a+(x?t:A+t))+n}}}function Ft(t){return t+\"\"}function Bt(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function Ht(t,e,n){function r(e){var n=t(e),r=o(n,1);return r-e>e-n?n:r}function a(n){return e(n=t(new ml(n-1)),1),n}function o(t,n){return e(t=new ml(+t),n),t}function i(t,r,o){var i=a(t),l=[];if(o>1)for(;r>i;)n(i)%o||l.push(new Date(+i)),e(i,1);else for(;r>i;)l.push(new Date(+i)),e(i,1);return l}function l(t,e,n){try{ml=Bt;var r=new Bt;return r._=t,i(r,e,n)}finally{ml=Date}}t.floor=t,t.round=r,t.ceil=a,t.offset=o,t.range=i;var s=t.utc=Vt(t);return s.floor=s,s.round=Vt(r),s.ceil=Vt(a),s.offset=Vt(o),s.range=l,t}function Vt(t){return function(e,n){try{ml=Bt;var r=new Bt;return r._=e,t(r,n)._}finally{ml=Date}}}function Zt(t){function e(t){function e(e){for(var n,a,o,i=[],l=-1,s=0;++l<r;)37===t.charCodeAt(l)&&(i.push(t.slice(s,l)),null!=(a=xl[n=t.charAt(++l)])&&(n=t.charAt(++l)),(o=z[n])&&(n=o(e,null==a?\"e\"===n?\" \":\"0\":a)),i.push(n),s=l+1);return i.push(t.slice(s,l)),i.join(\"\")}var r=t.length;return e.parse=function(e){var r={y:1900,m:0,d:1,H:0,M:0,S:0,L:0,Z:null},a=n(r,t,e,0);if(a!=e.length)return null;\"p\"in r&&(r.H=r.H%12+12*r.p);var o=null!=r.Z&&ml!==Bt,i=new(o?Bt:ml);return\"j\"in r?i.setFullYear(r.y,0,r.j):\"W\"in r||\"U\"in r?(\"w\"in r||(r.w=\"W\"in r?1:0),i.setFullYear(r.y,0,1),i.setFullYear(r.y,0,\"W\"in r?(r.w+6)%7+7*r.W-(i.getDay()+5)%7:r.w+7*r.U-(i.getDay()+6)%7)):i.setFullYear(r.y,r.m,r.d),i.setHours(r.H+(r.Z/100|0),r.M+r.Z%100,r.S,r.L),o?i._:i},e.toString=function(){return t},e}function n(t,e,n,r){for(var a,o,i,l=0,s=e.length,c=n.length;s>l;){if(r>=c)return-1;\n"
+,
+"if(a=e.charCodeAt(l++),37===a){if(i=e.charAt(l++),o=S[i in xl?e.charAt(l++):i],!o||(r=o(t,n,r))<0)return-1}else if(a!=n.charCodeAt(r++))return-1}return r}function r(t,e,n){w.lastIndex=0;var r=w.exec(e.slice(n));return r?(t.w=k.get(r[0].toLowerCase()),n+r[0].length):-1}function a(t,e,n){b.lastIndex=0;var r=b.exec(e.slice(n));return r?(t.w=_.get(r[0].toLowerCase()),n+r[0].length):-1}function o(t,e,n){L.lastIndex=0;var r=L.exec(e.slice(n));return r?(t.m=T.get(r[0].toLowerCase()),n+r[0].length):-1}function i(t,e,n){M.lastIndex=0;var r=M.exec(e.slice(n));return r?(t.m=A.get(r[0].toLowerCase()),n+r[0].length):-1}function l(t,e,r){return n(t,z.c.toString(),e,r)}function s(t,e,r){return n(t,z.x.toString(),e,r)}function c(t,e,r){return n(t,z.X.toString(),e,r)}function u(t,e,n){var r=x.get(e.slice(n,n+=2).toLowerCase());return null==r?-1:(t.p=r,n)}var f=t.dateTime,d=t.date,h=t.time,p=t.periods,g=t.days,v=t.shortDays,m=t.months,y=t.shortMonths;e.utc=function(t){function n(t){try{ml=Bt;var e=new ml;return e._=t,r(e)}finally{ml=Date}}var r=e(t);return n.parse=function(t){try{ml=Bt;var e=r.parse(t);return e&&e._}finally{ml=Date}},n.toString=r.toString,n},e.multi=e.utc.multi=ue;var x=ui.map(),b=Ut(g),_=Xt(g),w=Ut(v),k=Xt(v),M=Ut(m),A=Xt(m),L=Ut(y),T=Xt(y);p.forEach(function(t,e){x.set(t.toLowerCase(),e)});var z={a:function(t){return v[t.getDay()]},A:function(t){return g[t.getDay()]},b:function(t){return y[t.getMonth()]},B:function(t){return m[t.getMonth()]},c:e(f),d:function(t,e){return Yt(t.getDate(),e,2)},e:function(t,e){return Yt(t.getDate(),e,2)},H:function(t,e){return Yt(t.getHours(),e,2)},I:function(t,e){return Yt(t.getHours()%12||12,e,2)},j:function(t,e){return Yt(1+vl.dayOfYear(t),e,3)},L:function(t,e){return Yt(t.getMilliseconds(),e,3)},m:function(t,e){return Yt(t.getMonth()+1,e,2)},M:function(t,e){return Yt(t.getMinutes(),e,2)},p:function(t){return p[+(t.getHours()>=12)]},S:function(t,e){return Yt(t.getSeconds(),e,2)},U:function(t,e){return Yt(vl.sundayOfYear(t),e,2)},w:function(t){return t.getDay()},W:function(t,e){return Yt(vl.mondayOfYear(t),e,2)},x:e(d),X:e(h),y:function(t,e){return Yt(t.getFullYear()%100,e,2)},Y:function(t,e){return Yt(t.getFullYear()%1e4,e,4)},Z:se,\"%\":function(){return\"%\"}},S={a:r,A:a,b:o,B:i,c:l,d:ne,e:ne,H:ae,I:ae,j:re,L:le,m:ee,M:oe,p:u,S:ie,U:$t,w:Gt,W:Qt,x:s,X:c,y:Jt,Y:Wt,Z:Kt,\"%\":ce};return e}function Yt(t,e,n){var r=0>t?\"-\":\"\",a=(r?-t:t)+\"\",o=a.length;return r+(n>o?new Array(n-o+1).join(e)+a:a)}function Ut(t){return new RegExp(\"^(?:\"+t.map(ui.requote).join(\"|\")+\")\",\"i\")}function Xt(t){for(var e=new f,n=-1,r=t.length;++n<r;)e.set(t[n].toLowerCase(),n);return e}function Gt(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+1));return r?(t.w=+r[0],n+r[0].length):-1}function $t(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n));return r?(t.U=+r[0],n+r[0].length):-1}function Qt(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n));return r?(t.W=+r[0],n+r[0].length):-1}function Wt(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+4));return r?(t.y=+r[0],n+r[0].length):-1}function Jt(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+2));return r?(t.y=te(+r[0]),n+r[0].length):-1}function Kt(t,e,n){return/^[+-]\\d{4}$/.test(e=e.slice(n,n+5))?(t.Z=-e,n+5):-1}function te(t){return t+(t>68?1900:2e3)}function ee(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+2));return r?(t.m=r[0]-1,n+r[0].length):-1}function ne(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+2));return r?(t.d=+r[0],n+r[0].length):-1}function re(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+3));return r?(t.j=+r[0],n+r[0].length):-1}function ae(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+2));return r?(t.H=+r[0],n+r[0].length):-1}function oe(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+2));return r?(t.M=+r[0],n+r[0].length):-1}function ie(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+2));return r?(t.S=+r[0],n+r[0].length):-1}function le(t,e,n){bl.lastIndex=0;var r=bl.exec(e.slice(n,n+3));return r?(t.L=+r[0],n+r[0].length):-1}function se(t){var e=t.getTimezoneOffset(),n=e>0?\"-\":\"+\",r=wi(e)/60|0,a=wi(e)%60;return n+Yt(r,\"0\",2)+Yt(a,\"0\",2)}function ce(t,e,n){_l.lastIndex=0;var r=_l.exec(e.slice(n,n+1));return r?n+r[0].length:-1}function ue(t){for(var e=t.length,n=-1;++n<e;)t[n][0]=this(t[n][0]);return function(e){for(var n=0,r=t[n];!r[1](e);)r=t[++n];return r[0](e)}}function fe(){}function de(t,e,n){var r=n.s=t+e,a=r-t,o=r-a;n.t=t-o+(e-a)}function he(t,e){t&&Al.hasOwnProperty(t.type)&&Al[t.type](t,e)}function pe(t,e,n){var r,a=-1,o=t.length-n;for(e.lineStart();++a<o;)r=t[a],e.point(r[0],r[1],r[2]);e.lineEnd()}function ge(t,e){var n=-1,r=t.length;for(e.polygonStart();++n<r;)pe(t[n],e,1);e.polygonEnd()}function ve(){function t(t,e){t*=Yi,e=e*Yi/2+Bi/4;var n=t-r,i=n>=0?1:-1,l=i*n,s=Math.cos(e),c=Math.sin(e),u=o*c,f=a*s+u*Math.cos(l),d=u*i*Math.sin(l);Tl.add(Math.atan2(d,f)),r=t,a=s,o=c}var e,n,r,a,o;zl.point=function(i,l){zl.point=t,r=(e=i)*Yi,a=Math.cos(l=(n=l)*Yi/2+Bi/4),o=Math.sin(l)},zl.lineEnd=function(){t(e,n)}}function me(t){var e=t[0],n=t[1],r=Math.cos(n);return[r*Math.cos(e),r*Math.sin(e),Math.sin(n)]}function ye(t,e){return t[0]*e[0]+t[1]*e[1]+t[2]*e[2]}function xe(t,e){return[t[1]*e[2]-t[2]*e[1],t[2]*e[0]-t[0]*e[2],t[0]*e[1]-t[1]*e[0]]}function be(t,e){t[0]+=e[0],t[1]+=e[1],t[2]+=e[2]}function _e(t,e){return[t[0]*e,t[1]*e,t[2]*e]}function we(t){var e=Math.sqrt(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=e,t[1]/=e,t[2]/=e}function ke(t){return[Math.atan2(t[1],t[0]),rt(t[2])]}function Me(t,e){return wi(t[0]-e[0])<qi&&wi(t[1]-e[1])<qi}function Ae(t,e){t*=Yi;var n=Math.cos(e*=Yi);Le(n*Math.cos(t),n*Math.sin(t),Math.sin(e))}function Le(t,e,n){++Sl,Cl+=(t-Cl)/Sl,Ol+=(e-Ol)/Sl,Pl+=(n-Pl)/Sl}function Te(){function t(t,a){t*=Yi;var o=Math.cos(a*=Yi),i=o*Math.cos(t),l=o*Math.sin(t),s=Math.sin(a),c=Math.atan2(Math.sqrt((c=n*s-r*l)*c+(c=r*i-e*s)*c+(c=e*l-n*i)*c),e*i+n*l+r*s);El+=c,Nl+=c*(e+(e=i)),Dl+=c*(n+(n=l)),Il+=c*(r+(r=s)),Le(e,n,r)}var e,n,r;Fl.point=function(a,o){a*=Yi;var i=Math.cos(o*=Yi);e=i*Math.cos(a),n=i*Math.sin(a),r=Math.sin(o),Fl.point=t,Le(e,n,r)}}function ze(){Fl.point=Ae}function Se(){function t(t,e){t*=Yi;var n=Math.cos(e*=Yi),i=n*Math.cos(t),l=n*Math.sin(t),s=Math.sin(e),c=a*s-o*l,u=o*i-r*s,f=r*l-a*i,d=Math.sqrt(c*c+u*u+f*f),h=r*i+a*l+o*s,p=d&&-nt(h)/d,g=Math.atan2(d,h);Rl+=p*c,jl+=p*u,ql+=p*f,El+=g,Nl+=g*(r+(r=i)),Dl+=g*(a+(a=l)),Il+=g*(o+(o=s)),Le(r,a,o)}var e,n,r,a,o;Fl.point=function(i,l){e=i,n=l,Fl.point=t,i*=Yi;var s=Math.cos(l*=Yi);r=s*Math.cos(i),a=s*Math.sin(i),o=Math.sin(l),Le(r,a,o)},Fl.lineEnd=function(){t(e,n),Fl.lineEnd=ze,Fl.point=Ae}}function Ee(t,e){function n(n,r){return n=t(n,r),e(n[0],n[1])}return t.invert&&e.invert&&(n.invert=function(n,r){return n=e.invert(n,r),n&&t.invert(n[0],n[1])}),n}function Ce(){return!0}function Oe(t,e,n,r,a){var o=[],i=[];if(t.forEach(function(t){if(!((e=t.length-1)<=0)){var e,n=t[0],r=t[e];if(Me(n,r)){a.lineStart();for(var l=0;e>l;++l)a.point((n=t[l])[0],n[1]);return void a.lineEnd()}var s=new Ne(n,t,null,!0),c=new Ne(n,null,s,!1);s.o=c,o.push(s),i.push(c),s=new Ne(r,t,null,!1),c=new Ne(r,null,s,!0),s.o=c,o.push(s),i.push(c)}}),i.sort(e),Pe(o),Pe(i),o.length){for(var l=0,s=n,c=i.length;c>l;++l)i[l].e=s=!s;for(var u,f,d=o[0];;){for(var h=d,p=!0;h.v;)if((h=h.n)===d)return;u=h.z,a.lineStart();do{if(h.v=h.o.v=!0,h.e){if(p)for(var l=0,c=u.length;c>l;++l)a.point((f=u[l])[0],f[1]);else r(h.x,h.n.x,1,a);h=h.n}else{if(p){u=h.p.z;for(var l=u.length-1;l>=0;--l)a.point((f=u[l])[0],f[1])}else r(h.x,h.p.x,-1,a);h=h.p}h=h.o,u=h.z,p=!p}while(!h.v);a.lineEnd()}}}function Pe(t){if(e=t.length){for(var e,n,r=0,a=t[0];++r<e;)a.n=n=t[r],n.p=a,a=n;a.n=n=t[0],n.p=a}}function Ne(t,e,n,r){this.x=t,this.z=e,this.o=n,this.e=r,this.v=!1,this.n=this.p=null}function De(t,e,n,r){return function(a,o){function i(e,n){var r=a(e,n);t(e=r[0],n=r[1])&&o.point(e,n)}function l(t,e){var n=a(t,e);v.point(n[0],n[1])}function s(){y.point=l,v.lineStart()}function c(){y.point=i,v.lineEnd()}function u(t,e){g.push([t,e]);var n=a(t,e);b.point(n[0],n[1])}function f(){b.lineStart(),g=[]}function d(){u(g[0][0],g[0][1]),b.lineEnd();var t,e=b.clean(),n=x.buffer(),r=n.length;if(g.pop(),p.push(g),g=null,r)if(1&e){t=n[0];var a,r=t.length-1,i=-1;if(r>0){for(_||(o.polygonStart(),_=!0),o.lineStart();++i<r;)o.point((a=t[i])[0],a[1]);o.lineEnd()}}else r>1&&2&e&&n.push(n.pop().concat(n.shift())),h.push(n.filter(Ie))}var h,p,g,v=e(o),m=a.invert(r[0],r[1]),y={point:i,lineStart:s,lineEnd:c,polygonStart:function(){y.point=u,y.lineStart=f,y.lineEnd=d,h=[],p=[]},polygonEnd:function(){y.point=i,y.lineStart=s,y.lineEnd=c,h=ui.merge(h);var t=He(m,p);h.length?(_||(o.polygonStart(),_=!0),Oe(h,je,t,n,o)):t&&(_||(o.polygonStart(),_=!0),o.lineStart(),n(null,null,1,o),o.lineEnd()),_&&(o.polygonEnd(),_=!1),h=p=null},sphere:function(){o.polygonStart(),o.lineStart(),n(null,null,1,o),o.lineEnd(),o.polygonEnd()}},x=Re(),b=e(x),_=!1;return y}}function Ie(t){return t.length>1}function Re(){var t,e=[];return{lineStart:function(){e.push(t=[])},point:function(e,n){t.push([e,n])},lineEnd:k,buffer:function(){var n=e;return e=[],t=null,n},rejoin:function(){e.length>1&&e.push(e.pop().concat(e.shift()))}}}function je(t,e){return((t=t.x)[0]<0?t[1]-Zi-qi:Zi-t[1])-((e=e.x)[0]<0?e[1]-Zi-qi:Zi-e[1])}function qe(t){var e,n=NaN,r=NaN,a=NaN;return{lineStart:function(){t.lineStart(),e=1},point:function(o,i){var l=o>0?Bi:-Bi,s=wi(o-n);wi(s-Bi)<qi?(t.point(n,r=(r+i)/2>0?Zi:-Zi),t.point(a,r),t.lineEnd(),t.lineStart(),t.point(l,r),t.point(o,r),e=0):a!==l&&s>=Bi&&(wi(n-a)<qi&&(n-=a*qi),wi(o-l)<qi&&(o-=l*qi),r=Fe(n,r,o,i),t.point(a,r),t.lineEnd(),t.lineStart(),t.point(l,r),e=0),t.point(n=o,r=i),a=l},lineEnd:function(){t.lineEnd(),n=r=NaN},clean:function(){return 2-e}}}function Fe(t,e,n,r){var a,o,i=Math.sin(t-n);return wi(i)>qi?Math.atan((Math.sin(e)*(o=Math.cos(r))*Math.sin(n)-Math.sin(r)*(a=Math.cos(e))*Math.sin(t))/(a*o*i)):(e+r)/2}function Be(t,e,n,r){var a;if(null==t)a=n*Zi,r.point(-Bi,a),r.point(0,a),r.point(Bi,a),r.point(Bi,0),r.point(Bi,-a),r.point(0,-a),r.point(-Bi,-a),r.point(-B"
+,
+"i,0),r.point(-Bi,a);else if(wi(t[0]-e[0])>qi){var o=t[0]<e[0]?Bi:-Bi;a=n*o/2,r.point(-o,a),r.point(0,a),r.point(o,a)}else r.point(e[0],e[1])}function He(t,e){var n=t[0],r=t[1],a=[Math.sin(n),-Math.cos(n),0],o=0,i=0;Tl.reset();for(var l=0,s=e.length;s>l;++l){var c=e[l],u=c.length;if(u)for(var f=c[0],d=f[0],h=f[1]/2+Bi/4,p=Math.sin(h),g=Math.cos(h),v=1;;){v===u&&(v=0),t=c[v];var m=t[0],y=t[1]/2+Bi/4,x=Math.sin(y),b=Math.cos(y),_=m-d,w=_>=0?1:-1,k=w*_,M=k>Bi,A=p*x;if(Tl.add(Math.atan2(A*w*Math.sin(k),g*b+A*Math.cos(k))),o+=M?_+w*Hi:_,M^d>=n^m>=n){var L=xe(me(f),me(t));we(L);var T=xe(a,L);we(T);var z=(M^_>=0?-1:1)*rt(T[2]);(r>z||r===z&&(L[0]||L[1]))&&(i+=M^_>=0?1:-1)}if(!v++)break;d=m,p=x,g=b,f=t}}return(-qi>o||qi>o&&0>Tl)^1&i}function Ve(t){function e(t,e){return Math.cos(t)*Math.cos(e)>o}function n(t){var n,o,s,c,u;return{lineStart:function(){c=s=!1,u=1},point:function(f,d){var h,p=[f,d],g=e(f,d),v=i?g?0:a(f,d):g?a(f+(0>f?Bi:-Bi),d):0;if(!n&&(c=s=g)&&t.lineStart(),g!==s&&(h=r(n,p),(Me(n,h)||Me(p,h))&&(p[0]+=qi,p[1]+=qi,g=e(p[0],p[1]))),g!==s)u=0,g?(t.lineStart(),h=r(p,n),t.point(h[0],h[1])):(h=r(n,p),t.point(h[0],h[1]),t.lineEnd()),n=h;else if(l&&n&&i^g){var m;v&o||!(m=r(p,n,!0))||(u=0,i?(t.lineStart(),t.point(m[0][0],m[0][1]),t.point(m[1][0],m[1][1]),t.lineEnd()):(t.point(m[1][0],m[1][1]),t.lineEnd(),t.lineStart(),t.point(m[0][0],m[0][1])))}!g||n&&Me(n,p)||t.point(p[0],p[1]),n=p,s=g,o=v},lineEnd:function(){s&&t.lineEnd(),n=null},clean:function(){return u|(c&&s)<<1}}}function r(t,e,n){var r=me(t),a=me(e),i=[1,0,0],l=xe(r,a),s=ye(l,l),c=l[0],u=s-c*c;if(!u)return!n&&t;var f=o*s/u,d=-o*c/u,h=xe(i,l),p=_e(i,f),g=_e(l,d);be(p,g);var v=h,m=ye(p,v),y=ye(v,v),x=m*m-y*(ye(p,p)-1);if(!(0>x)){var b=Math.sqrt(x),_=_e(v,(-m-b)/y);if(be(_,p),_=ke(_),!n)return _;var w,k=t[0],M=e[0],A=t[1],L=e[1];k>M&&(w=k,k=M,M=w);var T=M-k,z=wi(T-Bi)<qi,S=z||qi>T;if(!z&&A>L&&(w=A,A=L,L=w),S?z?A+L>0^_[1]<(wi(_[0]-k)<qi?A:L):A<=_[1]&&_[1]<=L:T>Bi^(k<=_[0]&&_[0]<=M)){var E=_e(v,(-m+b)/y);return be(E,p),[_,ke(E)]}}}function a(e,n){var r=i?t:Bi-t,a=0;return-r>e?a|=1:e>r&&(a|=2),-r>n?a|=4:n>r&&(a|=8),a}var o=Math.cos(t),i=o>0,l=wi(o)>qi,s=mn(t,6*Yi);return De(e,n,s,i?[0,-t]:[-Bi,t-Bi])}function Ze(t,e,n,r){return function(a){var o,i=a.a,l=a.b,s=i.x,c=i.y,u=l.x,f=l.y,d=0,h=1,p=u-s,g=f-c;if(o=t-s,p||!(o>0)){if(o/=p,0>p){if(d>o)return;h>o&&(h=o)}else if(p>0){if(o>h)return;o>d&&(d=o)}if(o=n-s,p||!(0>o)){if(o/=p,0>p){if(o>h)return;o>d&&(d=o)}else if(p>0){if(d>o)return;h>o&&(h=o)}if(o=e-c,g||!(o>0)){if(o/=g,0>g){if(d>o)return;h>o&&(h=o)}else if(g>0){if(o>h)return;o>d&&(d=o)}if(o=r-c,g||!(0>o)){if(o/=g,0>g){if(o>h)return;o>d&&(d=o)}else if(g>0){if(d>o)return;h>o&&(h=o)}return d>0&&(a.a={x:s+d*p,y:c+d*g}),1>h&&(a.b={x:s+h*p,y:c+h*g}),a}}}}}}function Ye(t,e,n,r){function a(r,a){return wi(r[0]-t)<qi?a>0?0:3:wi(r[0]-n)<qi?a>0?2:1:wi(r[1]-e)<qi?a>0?1:0:a>0?3:2}function o(t,e){return i(t.x,e.x)}function i(t,e){var n=a(t,1),r=a(e,1);return n!==r?n-r:0===n?e[1]-t[1]:1===n?t[0]-e[0]:2===n?t[1]-e[1]:e[0]-t[0]}return function(l){function s(t){for(var e=0,n=v.length,r=t[1],a=0;n>a;++a)for(var o,i=1,l=v[a],s=l.length,c=l[0];s>i;++i)o=l[i],c[1]<=r?o[1]>r&&et(c,o,t)>0&&++e:o[1]<=r&&et(c,o,t)<0&&--e,c=o;return 0!==e}function c(o,l,s,c){var u=0,f=0;if(null==o||(u=a(o,s))!==(f=a(l,s))||i(o,l)<0^s>0){do c.point(0===u||3===u?t:n,u>1?r:e);while((u=(u+s+4)%4)!==f)}else c.point(l[0],l[1])}function u(a,o){return a>=t&&n>=a&&o>=e&&r>=o}function f(t,e){u(t,e)&&l.point(t,e)}function d(){S.point=p,v&&v.push(m=[]),M=!0,k=!1,_=w=NaN}function h(){g&&(p(y,x),b&&k&&T.rejoin(),g.push(T.buffer())),S.point=f,k&&l.lineEnd()}function p(t,e){t=Math.max(-Hl,Math.min(Hl,t)),e=Math.max(-Hl,Math.min(Hl,e));var n=u(t,e);if(v&&m.push([t,e]),M)y=t,x=e,b=n,M=!1,n&&(l.lineStart(),l.point(t,e));else if(n&&k)l.point(t,e);else{var r={a:{x:_,y:w},b:{x:t,y:e}};z(r)?(k||(l.lineStart(),l.point(r.a.x,r.a.y)),l.point(r.b.x,r.b.y),n||l.lineEnd(),A=!1):n&&(l.lineStart(),l.point(t,e),A=!1)}_=t,w=e,k=n}var g,v,m,y,x,b,_,w,k,M,A,L=l,T=Re(),z=Ze(t,e,n,r),S={point:f,lineStart:d,lineEnd:h,polygonStart:function(){l=T,g=[],v=[],A=!0},polygonEnd:function(){l=L,g=ui.merge(g);var e=s([t,r]),n=A&&e,a=g.length;(n||a)&&(l.polygonStart(),n&&(l.lineStart(),c(null,null,1,l),l.lineEnd()),a&&Oe(g,o,e,c,l),l.polygonEnd()),g=v=m=null}};return S}}function Ue(t){var e=0,n=Bi/3,r=cn(t),a=r(e,n);return a.parallels=function(t){return arguments.length?r(e=t[0]*Bi/180,n=t[1]*Bi/180):[e/Bi*180,n/Bi*180]},a}function Xe(t,e){function n(t,e){var n=Math.sqrt(o-2*a*Math.sin(e))/a;return[n*Math.sin(t*=a),i-n*Math.cos(t)]}var r=Math.sin(t),a=(r+Math.sin(e))/2,o=1+r*(2*a-r),i=Math.sqrt(o)/a;return n.invert=function(t,e){var n=i-e;return[Math.atan2(t,n)/a,rt((o-(t*t+n*n)*a*a)/(2*a))]},n}function Ge(){function t(t,e){Zl+=a*t-r*e,r=t,a=e}var e,n,r,a;$l.point=function(o,i){$l.point=t,e=r=o,n=a=i},$l.lineEnd=function(){t(e,n)}}function $e(t,e){Yl>t&&(Yl=t),t>Xl&&(Xl=t),Ul>e&&(Ul=e),e>Gl&&(Gl=e)}function Qe(){function t(t,e){i.push(\"M\",t,\",\",e,o)}function e(t,e){i.push(\"M\",t,\",\",e),l.point=n}function n(t,e){i.push(\"L\",t,\",\",e)}function r(){l.point=t}function a(){i.push(\"Z\")}var o=We(4.5),i=[],l={point:t,lineStart:function(){l.point=e},lineEnd:r,polygonStart:function(){l.lineEnd=a},polygonEnd:function(){l.lineEnd=r,l.point=t},pointRadius:function(t){return o=We(t),l},result:function(){if(i.length){var t=i.join(\"\");return i=[],t}}};return l}function We(t){return\"m0,\"+t+\"a\"+t+\",\"+t+\" 0 1,1 0,\"+-2*t+\"a\"+t+\",\"+t+\" 0 1,1 0,\"+2*t+\"z\"}function Je(t,e){Cl+=t,Ol+=e,++Pl}function Ke(){function t(t,r){var a=t-e,o=r-n,i=Math.sqrt(a*a+o*o);Nl+=i*(e+t)/2,Dl+=i*(n+r)/2,Il+=i,Je(e=t,n=r)}var e,n;Wl.point=function(r,a){Wl.point=t,Je(e=r,n=a)}}function tn(){Wl.point=Je}function en(){function t(t,e){var n=t-r,o=e-a,i=Math.sqrt(n*n+o*o);Nl+=i*(r+t)/2,Dl+=i*(a+e)/2,Il+=i,i=a*t-r*e,Rl+=i*(r+t),jl+=i*(a+e),ql+=3*i,Je(r=t,a=e)}var e,n,r,a;Wl.point=function(o,i){Wl.point=t,Je(e=r=o,n=a=i)},Wl.lineEnd=function(){t(e,n)}}function nn(t){function e(e,n){t.moveTo(e+i,n),t.arc(e,n,i,0,Hi)}function n(e,n){t.moveTo(e,n),l.point=r}function r(e,n){t.lineTo(e,n)}function a(){l.point=e}function o(){t.closePath()}var i=4.5,l={point:e,lineStart:function(){l.point=n},lineEnd:a,polygonStart:function(){l.lineEnd=o},polygonEnd:function(){l.lineEnd=a,l.point=e},pointRadius:function(t){return i=t,l},result:k};return l}function rn(t){function e(t){return(l?r:n)(t)}function n(e){return ln(e,function(n,r){n=t(n,r),e.point(n[0],n[1])})}function r(e){function n(n,r){n=t(n,r),e.point(n[0],n[1])}function r(){x=NaN,M.point=o,e.lineStart()}function o(n,r){var o=me([n,r]),i=t(n,r);a(x,b,y,_,w,k,x=i[0],b=i[1],y=n,_=o[0],w=o[1],k=o[2],l,e),e.point(x,b)}function i(){M.point=n,e.lineEnd()}function s(){r(),M.point=c,M.lineEnd=u}function c(t,e){o(f=t,d=e),h=x,p=b,g=_,v=w,m=k,M.point=o}function u(){a(x,b,y,_,w,k,h,p,f,g,v,m,l,e),M.lineEnd=i,i()}var f,d,h,p,g,v,m,y,x,b,_,w,k,M={point:n,lineStart:r,lineEnd:i,polygonStart:function(){e.polygonStart(),M.lineStart=s},polygonEnd:function(){e.polygonEnd(),M.lineStart=r}};return M}function a(e,n,r,l,s,c,u,f,d,h,p,g,v,m){var y=u-e,x=f-n,b=y*y+x*x;if(b>4*o&&v--){var _=l+h,w=s+p,k=c+g,M=Math.sqrt(_*_+w*w+k*k),A=Math.asin(k/=M),L=wi(wi(k)-1)<qi||wi(r-d)<qi?(r+d)/2:Math.atan2(w,_),T=t(L,A),z=T[0],S=T[1],E=z-e,C=S-n,O=x*E-y*C;(O*O/b>o||wi((y*E+x*C)/b-.5)>.3||i>l*h+s*p+c*g)&&(a(e,n,r,l,s,c,z,S,L,_/=M,w/=M,k,v,m),m.point(z,S),a(z,S,L,_,w,k,u,f,d,h,p,g,v,m))}}var o=.5,i=Math.cos(30*Yi),l=16;return e.precision=function(t){return arguments.length?(l=(o=t*t)>0&&16,e):Math.sqrt(o)},e}function an(t){var e=rn(function(e,n){return t([e*Ui,n*Ui])});return function(t){return un(e(t))}}function on(t){this.stream=t}function ln(t,e){return{point:e,sphere:function(){t.sphere()},lineStart:function(){t.lineStart()},lineEnd:function(){t.lineEnd()},polygonStart:function(){t.polygonStart()},polygonEnd:function(){t.polygonEnd()}}}function sn(t){return cn(function(){return t})()}function cn(t){function e(t){return t=l(t[0]*Yi,t[1]*Yi),[t[0]*d+s,c-t[1]*d]}function n(t){return t=l.invert((t[0]-s)/d,(c-t[1])/d),t&&[t[0]*Ui,t[1]*Ui]}function r(){l=Ee(i=hn(m,y,x),o);var t=o(g,v);return s=h-t[0]*d,c=p+t[1]*d,a()}function a(){return u&&(u.valid=!1,u=null),e}var o,i,l,s,c,u,f=rn(function(t,e){return t=o(t,e),[t[0]*d+s,c-t[1]*d]}),d=150,h=480,p=250,g=0,v=0,m=0,y=0,x=0,_=Bl,w=b,k=null,M=null;return e.stream=function(t){return u&&(u.valid=!1),u=un(_(i,f(w(t)))),u.valid=!0,u},e.clipAngle=function(t){return arguments.length?(_=null==t?(k=t,Bl):Ve((k=+t)*Yi),a()):k},e.clipExtent=function(t){return arguments.length?(M=t,w=t?Ye(t[0][0],t[0][1],t[1][0],t[1][1]):b,a()):M},e.scale=function(t){return arguments.length?(d=+t,r()):d},e.translate=function(t){return arguments.length?(h=+t[0],p=+t[1],r()):[h,p]},e.center=function(t){return arguments.length?(g=t[0]%360*Yi,v=t[1]%360*Yi,r()):[g*Ui,v*Ui]},e.rotate=function(t){return arguments.length?(m=t[0]%360*Yi,y=t[1]%360*Yi,x=t.length>2?t[2]%360*Yi:0,r()):[m*Ui,y*Ui,x*Ui]},ui.rebind(e,f,\"precision\"),function(){return o=t.apply(this,arguments),e.invert=o.invert&&n,r()}}function un(t){return ln(t,function(e,n){t.point(e*Yi,n*Yi)})}function fn(t,e){return[t,e]}function dn(t,e){return[t>Bi?t-Hi:-Bi>t?t+Hi:t,e]}function hn(t,e,n){return t?e||n?Ee(gn(t),vn(e,n)):gn(t):e||n?vn(e,n):dn}function pn(t){return function(e,n){return e+=t,[e>Bi?e-Hi:-Bi>e?e+Hi:e,n]}}function gn(t){var e=pn(t);return e.invert=pn(-t),e}function vn(t,e){function n(t,e){var n=Math.cos(e),l=Math.cos(t)*n,s=Math.sin(t)*n,c=Math.sin(e),u=c*r+l*a;return[Math.atan2(s*o-u*i,l*r-c*a),rt(u*o+s*i)]}var r=Math.cos(t),a=Math.sin(t),o=Math.cos(e),i=Math.sin(e);return n.invert=function(t,e){var n=Math.cos(e),l=Math.cos(t)*n,s=Math.sin(t)*n,c=Math.sin(e),u=c*o-s*i;return[Math.atan2(s*o+c*i,l*r+u*a),rt(u*r-l*a)]},n}function mn(t,e){var n=Math.cos(t),r=Math.sin(t);return function(a,o,i,l){var s=i*e;null!=a?(a=yn(n,a),o=yn(n,o),(i>0?o>a:a>o)&&(a+=i*Hi)):(a=t+i*Hi,o=t-.5*s);for(var c,u=a;i>0?u>o:o>u;u-=s)l.point((c=ke([n,-r*Math."
+,
+"cos(u),-r*Math.sin(u)]))[0],c[1])}}function yn(t,e){var n=me(e);n[0]-=t,we(n);var r=nt(-n[1]);return((-n[2]<0?-r:r)+2*Math.PI-qi)%(2*Math.PI)}function xn(t,e,n){var r=ui.range(t,e-qi,n).concat(e);return function(t){return r.map(function(e){return[t,e]})}}function bn(t,e,n){var r=ui.range(t,e-qi,n).concat(e);return function(t){return r.map(function(e){return[e,t]})}}function _n(t){return t.source}function wn(t){return t.target}function kn(t,e,n,r){var a=Math.cos(e),o=Math.sin(e),i=Math.cos(r),l=Math.sin(r),s=a*Math.cos(t),c=a*Math.sin(t),u=i*Math.cos(n),f=i*Math.sin(n),d=2*Math.asin(Math.sqrt(lt(r-e)+a*i*lt(n-t))),h=1/Math.sin(d),p=d?function(t){var e=Math.sin(t*=d)*h,n=Math.sin(d-t)*h,r=n*s+e*u,a=n*c+e*f,i=n*o+e*l;return[Math.atan2(a,r)*Ui,Math.atan2(i,Math.sqrt(r*r+a*a))*Ui]}:function(){return[t*Ui,e*Ui]};return p.distance=d,p}function Mn(){function t(t,a){var o=Math.sin(a*=Yi),i=Math.cos(a),l=wi((t*=Yi)-e),s=Math.cos(l);Jl+=Math.atan2(Math.sqrt((l=i*Math.sin(l))*l+(l=r*o-n*i*s)*l),n*o+r*i*s),e=t,n=o,r=i}var e,n,r;Kl.point=function(a,o){e=a*Yi,n=Math.sin(o*=Yi),r=Math.cos(o),Kl.point=t},Kl.lineEnd=function(){Kl.point=Kl.lineEnd=k}}function An(t,e){function n(e,n){var r=Math.cos(e),a=Math.cos(n),o=t(r*a);return[o*a*Math.sin(e),o*Math.sin(n)]}return n.invert=function(t,n){var r=Math.sqrt(t*t+n*n),a=e(r),o=Math.sin(a),i=Math.cos(a);return[Math.atan2(t*o,r*i),Math.asin(r&&n*o/r)]},n}function Ln(t,e){function n(t,e){i>0?-Zi+qi>e&&(e=-Zi+qi):e>Zi-qi&&(e=Zi-qi);var n=i/Math.pow(a(e),o);return[n*Math.sin(o*t),i-n*Math.cos(o*t)]}var r=Math.cos(t),a=function(t){return Math.tan(Bi/4+t/2)},o=t===e?Math.sin(t):Math.log(r/Math.cos(e))/Math.log(a(e)/a(t)),i=r*Math.pow(a(t),o)/o;return o?(n.invert=function(t,e){var n=i-e,r=tt(o)*Math.sqrt(t*t+n*n);return[Math.atan2(t,n)/o,2*Math.atan(Math.pow(i/r,1/o))-Zi]},n):zn}function Tn(t,e){function n(t,e){var n=o-e;return[n*Math.sin(a*t),o-n*Math.cos(a*t)]}var r=Math.cos(t),a=t===e?Math.sin(t):(r-Math.cos(e))/(e-t),o=r/a+t;return wi(a)<qi?fn:(n.invert=function(t,e){var n=o-e;return[Math.atan2(t,n)/a,o-tt(a)*Math.sqrt(t*t+n*n)]},n)}function zn(t,e){return[t,Math.log(Math.tan(Bi/4+e/2))]}function Sn(t){var e,n=sn(t),r=n.scale,a=n.translate,o=n.clipExtent;return n.scale=function(){var t=r.apply(n,arguments);return t===n?e?n.clipExtent(null):n:t},n.translate=function(){var t=a.apply(n,arguments);return t===n?e?n.clipExtent(null):n:t},n.clipExtent=function(t){var i=o.apply(n,arguments);if(i===n){if(e=null==t){var l=Bi*r(),s=a();o([[s[0]-l,s[1]-l],[s[0]+l,s[1]+l]])}}else e&&(i=null);return i},n.clipExtent(null)}function En(t,e){return[Math.log(Math.tan(Bi/4+e/2)),-t]}function Cn(t){return t[0]}function On(t){return t[1]}function Pn(t){for(var e=t.length,n=[0,1],r=2,a=2;e>a;a++){for(;r>1&&et(t[n[r-2]],t[n[r-1]],t[a])<=0;)--r;n[r++]=a}return n.slice(0,r)}function Nn(t,e){return t[0]-e[0]||t[1]-e[1]}function Dn(t,e,n){return(n[0]-e[0])*(t[1]-e[1])<(n[1]-e[1])*(t[0]-e[0])}function In(t,e,n,r){var a=t[0],o=n[0],i=e[0]-a,l=r[0]-o,s=t[1],c=n[1],u=e[1]-s,f=r[1]-c,d=(l*(s-c)-f*(a-o))/(f*i-l*u);return[a+d*i,s+d*u]}function Rn(t){var e=t[0],n=t[t.length-1];return!(e[0]-n[0]||e[1]-n[1])}function jn(){or(this),this.edge=this.site=this.circle=null}function qn(t){var e=fs.pop()||new jn;return e.site=t,e}function Fn(t){Qn(t),ss.remove(t),fs.push(t),or(t)}function Bn(t){var e=t.circle,n=e.x,r=e.cy,a={x:n,y:r},o=t.P,i=t.N,l=[t];Fn(t);for(var s=o;s.circle&&wi(n-s.circle.x)<qi&&wi(r-s.circle.cy)<qi;)o=s.P,l.unshift(s),Fn(s),s=o;l.unshift(s),Qn(s);for(var c=i;c.circle&&wi(n-c.circle.x)<qi&&wi(r-c.circle.cy)<qi;)i=c.N,l.push(c),Fn(c),c=i;l.push(c),Qn(c);var u,f=l.length;for(u=1;f>u;++u)c=l[u],s=l[u-1],nr(c.edge,s.site,c.site,a);s=l[0],c=l[f-1],c.edge=tr(s.site,c.site,null,a),$n(s),$n(c)}function Hn(t){for(var e,n,r,a,o=t.x,i=t.y,l=ss._;l;)if(r=Vn(l,i)-o,r>qi)l=l.L;else{if(a=o-Zn(l,i),!(a>qi)){r>-qi?(e=l.P,n=l):a>-qi?(e=l,n=l.N):e=n=l;break}if(!l.R){e=l;break}l=l.R}var s=qn(t);if(ss.insert(e,s),e||n){if(e===n)return Qn(e),n=qn(e.site),ss.insert(s,n),s.edge=n.edge=tr(e.site,s.site),$n(e),void $n(n);if(!n)return void(s.edge=tr(e.site,s.site));Qn(e),Qn(n);var c=e.site,u=c.x,f=c.y,d=t.x-u,h=t.y-f,p=n.site,g=p.x-u,v=p.y-f,m=2*(d*v-h*g),y=d*d+h*h,x=g*g+v*v,b={x:(v*y-h*x)/m+u,y:(d*x-g*y)/m+f};nr(n.edge,c,p,b),s.edge=tr(c,t,null,b),n.edge=tr(t,p,null,b),$n(e),$n(n)}}function Vn(t,e){var n=t.site,r=n.x,a=n.y,o=a-e;if(!o)return r;var i=t.P;if(!i)return-(1/0);n=i.site;var l=n.x,s=n.y,c=s-e;if(!c)return l;var u=l-r,f=1/o-1/c,d=u/c;return f?(-d+Math.sqrt(d*d-2*f*(u*u/(-2*c)-s+c/2+a-o/2)))/f+r:(r+l)/2}function Zn(t,e){var n=t.N;if(n)return Vn(n,e);var r=t.site;return r.y===e?r.x:1/0}function Yn(t){this.site=t,this.edges=[]}function Un(t){for(var e,n,r,a,o,i,l,s,c,u,f=t[0][0],d=t[1][0],h=t[0][1],p=t[1][1],g=ls,v=g.length;v--;)if(o=g[v],o&&o.prepare())for(l=o.edges,s=l.length,i=0;s>i;)u=l[i].end(),r=u.x,a=u.y,c=l[++i%s].start(),e=c.x,n=c.y,(wi(r-e)>qi||wi(a-n)>qi)&&(l.splice(i,0,new rr(er(o.site,u,wi(r-f)<qi&&p-a>qi?{x:f,y:wi(e-f)<qi?n:p}:wi(a-p)<qi&&d-r>qi?{x:wi(n-p)<qi?e:d,y:p}:wi(r-d)<qi&&a-h>qi?{x:d,y:wi(e-d)<qi?n:h}:wi(a-h)<qi&&r-f>qi?{x:wi(n-h)<qi?e:f,y:h}:null),o.site,null)),++s)}function Xn(t,e){return e.angle-t.angle}function Gn(){or(this),this.x=this.y=this.arc=this.site=this.cy=null}function $n(t){var e=t.P,n=t.N;if(e&&n){var r=e.site,a=t.site,o=n.site;if(r!==o){var i=a.x,l=a.y,s=r.x-i,c=r.y-l,u=o.x-i,f=o.y-l,d=2*(s*f-c*u);if(!(d>=-Fi)){var h=s*s+c*c,p=u*u+f*f,g=(f*h-c*p)/d,v=(s*p-u*h)/d,f=v+l,m=ds.pop()||new Gn;m.arc=t,m.site=a,m.x=g+i,m.y=f+Math.sqrt(g*g+v*v),m.cy=f,t.circle=m;for(var y=null,x=us._;x;)if(m.y<x.y||m.y===x.y&&m.x<=x.x){if(!x.L){y=x.P;break}x=x.L}else{if(!x.R){y=x;break}x=x.R}us.insert(y,m),y||(cs=m)}}}}function Qn(t){var e=t.circle;e&&(e.P||(cs=e.N),us.remove(e),ds.push(e),or(e),t.circle=null)}function Wn(t){for(var e,n=is,r=Ze(t[0][0],t[0][1],t[1][0],t[1][1]),a=n.length;a--;)e=n[a],(!Jn(e,t)||!r(e)||wi(e.a.x-e.b.x)<qi&&wi(e.a.y-e.b.y)<qi)&&(e.a=e.b=null,n.splice(a,1))}function Jn(t,e){var n=t.b;if(n)return!0;var r,a,o=t.a,i=e[0][0],l=e[1][0],s=e[0][1],c=e[1][1],u=t.l,f=t.r,d=u.x,h=u.y,p=f.x,g=f.y,v=(d+p)/2,m=(h+g)/2;if(g===h){if(i>v||v>=l)return;if(d>p){if(o){if(o.y>=c)return}else o={x:v,y:s};n={x:v,y:c}}else{if(o){if(o.y<s)return}else o={x:v,y:c};n={x:v,y:s}}}else if(r=(d-p)/(g-h),a=m-r*v,-1>r||r>1)if(d>p){if(o){if(o.y>=c)return}else o={x:(s-a)/r,y:s};n={x:(c-a)/r,y:c}}else{if(o){if(o.y<s)return}else o={x:(c-a)/r,y:c};n={x:(s-a)/r,y:s}}else if(g>h){if(o){if(o.x>=l)return}else o={x:i,y:r*i+a};n={x:l,y:r*l+a}}else{if(o){if(o.x<i)return}else o={x:l,y:r*l+a};n={x:i,y:r*i+a}}return t.a=o,t.b=n,!0}function Kn(t,e){this.l=t,this.r=e,this.a=this.b=null}function tr(t,e,n,r){var a=new Kn(t,e);return is.push(a),n&&nr(a,t,e,n),r&&nr(a,e,t,r),ls[t.i].edges.push(new rr(a,t,e)),ls[e.i].edges.push(new rr(a,e,t)),a}function er(t,e,n){var r=new Kn(t,null);return r.a=e,r.b=n,is.push(r),r}function nr(t,e,n,r){t.a||t.b?t.l===n?t.b=r:t.a=r:(t.a=r,t.l=e,t.r=n)}function rr(t,e,n){var r=t.a,a=t.b;this.edge=t,this.site=e,this.angle=n?Math.atan2(n.y-e.y,n.x-e.x):t.l===e?Math.atan2(a.x-r.x,r.y-a.y):Math.atan2(r.x-a.x,a.y-r.y)}function ar(){this._=null}function or(t){t.U=t.C=t.L=t.R=t.P=t.N=null}function ir(t,e){var n=e,r=e.R,a=n.U;a?a.L===n?a.L=r:a.R=r:t._=r,r.U=a,n.U=r,n.R=r.L,n.R&&(n.R.U=n),r.L=n}function lr(t,e){var n=e,r=e.L,a=n.U;a?a.L===n?a.L=r:a.R=r:t._=r,r.U=a,n.U=r,n.L=r.R,n.L&&(n.L.U=n),r.R=n}function sr(t){for(;t.L;)t=t.L;return t}function cr(t,e){var n,r,a,o=t.sort(ur).pop();for(is=[],ls=new Array(t.length),ss=new ar,us=new ar;;)if(a=cs,o&&(!a||o.y<a.y||o.y===a.y&&o.x<a.x))o.x===n&&o.y===r||(ls[o.i]=new Yn(o),Hn(o),n=o.x,r=o.y),o=t.pop();else{if(!a)break;Bn(a.arc)}e&&(Wn(e),Un(e));var i={cells:ls,edges:is};return ss=us=is=ls=null,i}function ur(t,e){return e.y-t.y||e.x-t.x}function fr(t,e,n){return(t.x-n.x)*(e.y-t.y)-(t.x-e.x)*(n.y-t.y)}function dr(t){return t.x}function hr(t){return t.y}function pr(){return{leaf:!0,nodes:[],point:null,x:null,y:null}}function gr(t,e,n,r,a,o){if(!t(e,n,r,a,o)){var i=.5*(n+a),l=.5*(r+o),s=e.nodes;s[0]&&gr(t,s[0],n,r,i,l),s[1]&&gr(t,s[1],i,r,a,l),s[2]&&gr(t,s[2],n,l,i,o),s[3]&&gr(t,s[3],i,l,a,o)}}function vr(t,e,n,r,a,o,i){var l,s=1/0;return function c(t,u,f,d,h){if(!(u>o||f>i||r>d||a>h)){if(p=t.point){var p,g=e-t.x,v=n-t.y,m=g*g+v*v;if(s>m){var y=Math.sqrt(s=m);r=e-y,a=n-y,o=e+y,i=n+y,l=p}}for(var x=t.nodes,b=.5*(u+d),_=.5*(f+h),w=e>=b,k=n>=_,M=k<<1|w,A=M+4;A>M;++M)if(t=x[3&M])switch(3&M){case 0:c(t,u,f,b,_);break;case 1:c(t,b,f,d,_);break;case 2:c(t,u,_,b,h);break;case 3:c(t,b,_,d,h)}}}(t,r,a,o,i),l}function mr(t,e){t=ui.rgb(t),e=ui.rgb(e);var n=t.r,r=t.g,a=t.b,o=e.r-n,i=e.g-r,l=e.b-a;return function(t){return\"#\"+wt(Math.round(n+o*t))+wt(Math.round(r+i*t))+wt(Math.round(a+l*t))}}function yr(t,e){var n,r={},a={};for(n in t)n in e?r[n]=_r(t[n],e[n]):a[n]=t[n];for(n in e)n in t||(a[n]=e[n]);return function(t){for(n in r)a[n]=r[n](t);return a}}function xr(t,e){return t=+t,e=+e,function(n){return t*(1-n)+e*n}}function br(t,e){var n,r,a,o=ps.lastIndex=gs.lastIndex=0,i=-1,l=[],s=[];for(t+=\"\",e+=\"\";(n=ps.exec(t))&&(r=gs.exec(e));)(a=r.index)>o&&(a=e.slice(o,a),l[i]?l[i]+=a:l[++i]=a),(n=n[0])===(r=r[0])?l[i]?l[i]+=r:l[++i]=r:(l[++i]=null,s.push({i:i,x:xr(n,r)})),o=gs.lastIndex;return o<e.length&&(a=e.slice(o),l[i]?l[i]+=a:l[++i]=a),l.length<2?s[0]?(e=s[0].x,function(t){return e(t)+\"\"}):function(){return e}:(e=s.length,function(t){for(var n,r=0;e>r;++r)l[(n=s[r]).i]=n.x(t);return l.join(\"\")})}function _r(t,e){for(var n,r=ui.interpolators.length;--r>=0&&!(n=ui.interpolators[r](t,e)););return n}function wr(t,e){var n,r=[],a=[],o=t.length,i=e.length,l=Math.min(t.length,e.length);for(n=0;l>n;++n)r.push(_r(t[n],e[n]));for(;o>n;++n)a[n]=t[n];for(;i>n;++n)a[n]=e[n];return function(t){for(n=0;l>n;++n)a[n]=r[n](t);return a}}function kr(t){return function(e){return 0>=e?0:e>=1?1:t(e)}}function Mr(t){return function(e){return 1-t(1-e)}}function Ar(t){return function(e){return.5*(.5>e?t(2*e):2-t(2-2*e))}"
+,
+"}function Lr(t){return t*t}function Tr(t){return t*t*t}function zr(t){if(0>=t)return 0;if(t>=1)return 1;var e=t*t,n=e*t;return 4*(.5>t?n:3*(t-e)+n-.75)}function Sr(t){return function(e){return Math.pow(e,t)}}function Er(t){return 1-Math.cos(t*Zi)}function Cr(t){return Math.pow(2,10*(t-1))}function Or(t){return 1-Math.sqrt(1-t*t)}function Pr(t,e){var n;return arguments.length<2&&(e=.45),arguments.length?n=e/Hi*Math.asin(1/t):(t=1,n=e/4),function(r){return 1+t*Math.pow(2,-10*r)*Math.sin((r-n)*Hi/e)}}function Nr(t){return t||(t=1.70158),function(e){return e*e*((t+1)*e-t)}}function Dr(t){return 1/2.75>t?7.5625*t*t:2/2.75>t?7.5625*(t-=1.5/2.75)*t+.75:2.5/2.75>t?7.5625*(t-=2.25/2.75)*t+.9375:7.5625*(t-=2.625/2.75)*t+.984375}function Ir(t,e){t=ui.hcl(t),e=ui.hcl(e);var n=t.h,r=t.c,a=t.l,o=e.h-n,i=e.c-r,l=e.l-a;return isNaN(i)&&(i=0,r=isNaN(r)?e.c:r),isNaN(o)?(o=0,n=isNaN(n)?e.h:n):o>180?o-=360:-180>o&&(o+=360),function(t){return dt(n+o*t,r+i*t,a+l*t)+\"\"}}function Rr(t,e){t=ui.hsl(t),e=ui.hsl(e);var n=t.h,r=t.s,a=t.l,o=e.h-n,i=e.s-r,l=e.l-a;return isNaN(i)&&(i=0,r=isNaN(r)?e.s:r),isNaN(o)?(o=0,n=isNaN(n)?e.h:n):o>180?o-=360:-180>o&&(o+=360),function(t){return ut(n+o*t,r+i*t,a+l*t)+\"\"}}function jr(t,e){t=ui.lab(t),e=ui.lab(e);var n=t.l,r=t.a,a=t.b,o=e.l-n,i=e.a-r,l=e.b-a;return function(t){return pt(n+o*t,r+i*t,a+l*t)+\"\"}}function qr(t,e){return e-=t,function(n){return Math.round(t+e*n)}}function Fr(t){var e=[t.a,t.b],n=[t.c,t.d],r=Hr(e),a=Br(e,n),o=Hr(Vr(n,e,-a))||0;e[0]*n[1]<n[0]*e[1]&&(e[0]*=-1,e[1]*=-1,r*=-1,a*=-1),this.rotate=(r?Math.atan2(e[1],e[0]):Math.atan2(-n[0],n[1]))*Ui,this.translate=[t.e,t.f],this.scale=[r,o],this.skew=o?Math.atan2(a,o)*Ui:0}function Br(t,e){return t[0]*e[0]+t[1]*e[1]}function Hr(t){var e=Math.sqrt(Br(t,t));return e&&(t[0]/=e,t[1]/=e),e}function Vr(t,e,n){return t[0]+=n*e[0],t[1]+=n*e[1],t}function Zr(t){return t.length?t.pop()+\",\":\"\"}function Yr(t,e,n,r){if(t[0]!==e[0]||t[1]!==e[1]){var a=n.push(\"translate(\",null,\",\",null,\")\");r.push({i:a-4,x:xr(t[0],e[0])\n"
+,
+"},{i:a-2,x:xr(t[1],e[1])})}else(e[0]||e[1])&&n.push(\"translate(\"+e+\")\")}function Ur(t,e,n,r){t!==e?(t-e>180?e+=360:e-t>180&&(t+=360),r.push({i:n.push(Zr(n)+\"rotate(\",null,\")\")-2,x:xr(t,e)})):e&&n.push(Zr(n)+\"rotate(\"+e+\")\")}function Xr(t,e,n,r){t!==e?r.push({i:n.push(Zr(n)+\"skewX(\",null,\")\")-2,x:xr(t,e)}):e&&n.push(Zr(n)+\"skewX(\"+e+\")\")}function Gr(t,e,n,r){if(t[0]!==e[0]||t[1]!==e[1]){var a=n.push(Zr(n)+\"scale(\",null,\",\",null,\")\");r.push({i:a-4,x:xr(t[0],e[0])},{i:a-2,x:xr(t[1],e[1])})}else 1===e[0]&&1===e[1]||n.push(Zr(n)+\"scale(\"+e+\")\")}function $r(t,e){var n=[],r=[];return t=ui.transform(t),e=ui.transform(e),Yr(t.translate,e.translate,n,r),Ur(t.rotate,e.rotate,n,r),Xr(t.skew,e.skew,n,r),Gr(t.scale,e.scale,n,r),t=e=null,function(t){for(var e,a=-1,o=r.length;++a<o;)n[(e=r[a]).i]=e.x(t);return n.join(\"\")}}function Qr(t,e){return e=(e-=t=+t)||1/e,function(n){return(n-t)/e}}function Wr(t,e){return e=(e-=t=+t)||1/e,function(n){return Math.max(0,Math.min(1,(n-t)/e))}}function Jr(t){for(var e=t.source,n=t.target,r=ta(e,n),a=[e];e!==r;)e=e.parent,a.push(e);for(var o=a.length;n!==r;)a.splice(o,0,n),n=n.parent;return a}function Kr(t){for(var e=[],n=t.parent;null!=n;)e.push(t),t=n,n=n.parent;return e.push(t),e}function ta(t,e){if(t===e)return t;for(var n=Kr(t),r=Kr(e),a=n.pop(),o=r.pop(),i=null;a===o;)i=a,a=n.pop(),o=r.pop();return i}function ea(t){t.fixed|=2}function na(t){t.fixed&=-7}function ra(t){t.fixed|=4,t.px=t.x,t.py=t.y}function aa(t){t.fixed&=-5}function oa(t,e,n){var r=0,a=0;if(t.charge=0,!t.leaf)for(var o,i=t.nodes,l=i.length,s=-1;++s<l;)o=i[s],null!=o&&(oa(o,e,n),t.charge+=o.charge,r+=o.charge*o.cx,a+=o.charge*o.cy);if(t.point){t.leaf||(t.point.x+=Math.random()-.5,t.point.y+=Math.random()-.5);var c=e*n[t.point.index];t.charge+=t.pointCharge=c,r+=c*t.point.x,a+=c*t.point.y}t.cx=r/t.charge,t.cy=a/t.charge}function ia(t,e){return ui.rebind(t,e,\"sort\",\"children\",\"value\"),t.nodes=t,t.links=da,t}function la(t,e){for(var n=[t];null!=(t=n.pop());)if(e(t),(a=t.children)&&(r=a.length))for(var r,a;--r>=0;)n.push(a[r])}function sa(t,e){for(var n=[t],r=[];null!=(t=n.pop());)if(r.push(t),(o=t.children)&&(a=o.length))for(var a,o,i=-1;++i<a;)n.push(o[i]);for(;null!=(t=r.pop());)e(t)}function ca(t){return t.children}function ua(t){return t.value}function fa(t,e){return e.value-t.value}function da(t){return ui.merge(t.map(function(t){return(t.children||[]).map(function(e){return{source:t,target:e}})}))}function ha(t){return t.x}function pa(t){return t.y}function ga(t,e,n){t.y0=e,t.y=n}function va(t){return ui.range(t.length)}function ma(t){for(var e=-1,n=t[0].length,r=[];++e<n;)r[e]=0;return r}function ya(t){for(var e,n=1,r=0,a=t[0][1],o=t.length;o>n;++n)(e=t[n][1])>a&&(r=n,a=e);return r}function xa(t){return t.reduce(ba,0)}function ba(t,e){return t+e[1]}function _a(t,e){return wa(t,Math.ceil(Math.log(e.length)/Math.LN2+1))}function wa(t,e){for(var n=-1,r=+t[0],a=(t[1]-r)/e,o=[];++n<=e;)o[n]=a*n+r;return o}function ka(t){return[ui.min(t),ui.max(t)]}function Ma(t,e){return t.value-e.value}function Aa(t,e){var n=t._pack_next;t._pack_next=e,e._pack_prev=t,e._pack_next=n,n._pack_prev=e}function La(t,e){t._pack_next=e,e._pack_prev=t}function Ta(t,e){var n=e.x-t.x,r=e.y-t.y,a=t.r+e.r;return.999*a*a>n*n+r*r}function za(t){function e(t){u=Math.min(t.x-t.r,u),f=Math.max(t.x+t.r,f),d=Math.min(t.y-t.r,d),h=Math.max(t.y+t.r,h)}if((n=t.children)&&(c=n.length)){var n,r,a,o,i,l,s,c,u=1/0,f=-(1/0),d=1/0,h=-(1/0);if(n.forEach(Sa),r=n[0],r.x=-r.r,r.y=0,e(r),c>1&&(a=n[1],a.x=a.r,a.y=0,e(a),c>2))for(o=n[2],Oa(r,a,o),e(o),Aa(r,o),r._pack_prev=o,Aa(o,a),a=r._pack_next,i=3;c>i;i++){Oa(r,a,o=n[i]);var p=0,g=1,v=1;for(l=a._pack_next;l!==a;l=l._pack_next,g++)if(Ta(l,o)){p=1;break}if(1==p)for(s=r._pack_prev;s!==l._pack_prev&&!Ta(s,o);s=s._pack_prev,v++);p?(v>g||g==v&&a.r<r.r?La(r,a=l):La(r=s,a),i--):(Aa(r,o),a=o,e(o))}var m=(u+f)/2,y=(d+h)/2,x=0;for(i=0;c>i;i++)o=n[i],o.x-=m,o.y-=y,x=Math.max(x,o.r+Math.sqrt(o.x*o.x+o.y*o.y));t.r=x,n.forEach(Ea)}}function Sa(t){t._pack_next=t._pack_prev=t}function Ea(t){delete t._pack_next,delete t._pack_prev}function Ca(t,e,n,r){var a=t.children;if(t.x=e+=r*t.x,t.y=n+=r*t.y,t.r*=r,a)for(var o=-1,i=a.length;++o<i;)Ca(a[o],e,n,r)}function Oa(t,e,n){var r=t.r+n.r,a=e.x-t.x,o=e.y-t.y;if(r&&(a||o)){var i=e.r+n.r,l=a*a+o*o;i*=i,r*=r;var s=.5+(r-i)/(2*l),c=Math.sqrt(Math.max(0,2*i*(r+l)-(r-=l)*r-i*i))/(2*l);n.x=t.x+s*a+c*o,n.y=t.y+s*o-c*a}else n.x=t.x+r,n.y=t.y}function Pa(t,e){return t.parent==e.parent?1:2}function Na(t){var e=t.children;return e.length?e[0]:t.t}function Da(t){var e,n=t.children;return(e=n.length)?n[e-1]:t.t}function Ia(t,e,n){var r=n/(e.i-t.i);e.c-=r,e.s+=n,t.c+=r,e.z+=n,e.m+=n}function Ra(t){for(var e,n=0,r=0,a=t.children,o=a.length;--o>=0;)e=a[o],e.z+=n,e.m+=n,n+=e.s+(r+=e.c)}function ja(t,e,n){return t.a.parent===e.parent?t.a:n}function qa(t){return 1+ui.max(t,function(t){return t.y})}function Fa(t){return t.reduce(function(t,e){return t+e.x},0)/t.length}function Ba(t){var e=t.children;return e&&e.length?Ba(e[0]):t}function Ha(t){var e,n=t.children;return n&&(e=n.length)?Ha(n[e-1]):t}function Va(t){return{x:t.x,y:t.y,dx:t.dx,dy:t.dy}}function Za(t,e){var n=t.x+e[3],r=t.y+e[0],a=t.dx-e[1]-e[3],o=t.dy-e[0]-e[2];return 0>a&&(n+=a/2,a=0),0>o&&(r+=o/2,o=0),{x:n,y:r,dx:a,dy:o}}function Ya(t){var e=t[0],n=t[t.length-1];return n>e?[e,n]:[n,e]}function Ua(t){return t.rangeExtent?t.rangeExtent():Ya(t.range())}function Xa(t,e,n,r){var a=n(t[0],t[1]),o=r(e[0],e[1]);return function(t){return o(a(t))}}function Ga(t,e){var n,r=0,a=t.length-1,o=t[r],i=t[a];return o>i&&(n=r,r=a,a=n,n=o,o=i,i=n),t[r]=e.floor(o),t[a]=e.ceil(i),t}function $a(t){return t?{floor:function(e){return Math.floor(e/t)*t},ceil:function(e){return Math.ceil(e/t)*t}}:Ls}function Qa(t,e,n,r){var a=[],o=[],i=0,l=Math.min(t.length,e.length)-1;for(t[l]<t[0]&&(t=t.slice().reverse(),e=e.slice().reverse());++i<=l;)a.push(n(t[i-1],t[i])),o.push(r(e[i-1],e[i]));return function(e){var n=ui.bisect(t,e,1,l)-1;return o[n](a[n](e))}}function Wa(t,e,n,r){function a(){var a=Math.min(t.length,e.length)>2?Qa:Xa,s=r?Wr:Qr;return i=a(t,e,s,n),l=a(e,t,s,_r),o}function o(t){return i(t)}var i,l;return o.invert=function(t){return l(t)},o.domain=function(e){return arguments.length?(t=e.map(Number),a()):t},o.range=function(t){return arguments.length?(e=t,a()):e},o.rangeRound=function(t){return o.range(t).interpolate(qr)},o.clamp=function(t){return arguments.length?(r=t,a()):r},o.interpolate=function(t){return arguments.length?(n=t,a()):n},o.ticks=function(e){return eo(t,e)},o.tickFormat=function(e,n){return no(t,e,n)},o.nice=function(e){return Ka(t,e),a()},o.copy=function(){return Wa(t,e,n,r)},a()}function Ja(t,e){return ui.rebind(t,e,\"range\",\"rangeRound\",\"interpolate\",\"clamp\")}function Ka(t,e){return Ga(t,$a(to(t,e)[2])),Ga(t,$a(to(t,e)[2])),t}function to(t,e){null==e&&(e=10);var n=Ya(t),r=n[1]-n[0],a=Math.pow(10,Math.floor(Math.log(r/e)/Math.LN10)),o=e/r*a;return.15>=o?a*=10:.35>=o?a*=5:.75>=o&&(a*=2),n[0]=Math.ceil(n[0]/a)*a,n[1]=Math.floor(n[1]/a)*a+.5*a,n[2]=a,n}function eo(t,e){return ui.range.apply(ui,to(t,e))}function no(t,e,n){var r=to(t,e);if(n){var a=pl.exec(n);if(a.shift(),\"s\"===a[8]){var o=ui.formatPrefix(Math.max(wi(r[0]),wi(r[1])));return a[7]||(a[7]=\".\"+ro(o.scale(r[2]))),a[8]=\"f\",n=ui.format(a.join(\"\")),function(t){return n(o.scale(t))+o.symbol}}a[7]||(a[7]=\".\"+ao(a[8],r)),n=a.join(\"\")}else n=\",.\"+ro(r[2])+\"f\";return ui.format(n)}function ro(t){return-Math.floor(Math.log(t)/Math.LN10+.01)}function ao(t,e){var n=ro(e[2]);return t in Ts?Math.abs(n-ro(Math.max(wi(e[0]),wi(e[1]))))+ +(\"e\"!==t):n-2*(\"%\"===t)}function oo(t,e,n,r){function a(t){return(n?Math.log(0>t?0:t):-Math.log(t>0?0:-t))/Math.log(e)}function o(t){return n?Math.pow(e,t):-Math.pow(e,-t)}function i(e){return t(a(e))}return i.invert=function(e){return o(t.invert(e))},i.domain=function(e){return arguments.length?(n=e[0]>=0,t.domain((r=e.map(Number)).map(a)),i):r},i.base=function(n){return arguments.length?(e=+n,t.domain(r.map(a)),i):e},i.nice=function(){var e=Ga(r.map(a),n?Math:Ss);return t.domain(e),r=e.map(o),i},i.ticks=function(){var t=Ya(r),i=[],l=t[0],s=t[1],c=Math.floor(a(l)),u=Math.ceil(a(s)),f=e%1?2:e;if(isFinite(u-c)){if(n){for(;u>c;c++)for(var d=1;f>d;d++)i.push(o(c)*d);i.push(o(c))}else for(i.push(o(c));c++<u;)for(var d=f-1;d>0;d--)i.push(o(c)*d);for(c=0;i[c]<l;c++);for(u=i.length;i[u-1]>s;u--);i=i.slice(c,u)}return i},i.tickFormat=function(t,n){if(!arguments.length)return zs;arguments.length<2?n=zs:\"function\"!=typeof n&&(n=ui.format(n));var r=Math.max(1,e*t/i.ticks().length);return function(t){var i=t/o(Math.round(a(t)));return e-.5>i*e&&(i*=e),r>=i?n(t):\"\"}},i.copy=function(){return oo(t.copy(),e,n,r)},Ja(i,t)}function io(t,e,n){function r(e){return t(a(e))}var a=lo(e),o=lo(1/e);return r.invert=function(e){return o(t.invert(e))},r.domain=function(e){return arguments.length?(t.domain((n=e.map(Number)).map(a)),r):n},r.ticks=function(t){return eo(n,t)},r.tickFormat=function(t,e){return no(n,t,e)},r.nice=function(t){return r.domain(Ka(n,t))},r.exponent=function(i){return arguments.length?(a=lo(e=i),o=lo(1/e),t.domain(n.map(a)),r):e},r.copy=function(){return io(t.copy(),e,n)},Ja(r,t)}function lo(t){return function(e){return 0>e?-Math.pow(-e,t):Math.pow(e,t)}}function so(t,e){function n(n){return o[((a.get(n)||(\"range\"===e.t?a.set(n,t.push(n)):NaN))-1)%o.length]}function r(e,n){return ui.range(t.length).map(function(t){return e+n*t})}var a,o,i;return n.domain=function(r){if(!arguments.length)return t;t=[],a=new f;for(var o,i=-1,l=r.length;++i<l;)a.has(o=r[i])||a.set(o,t.push(o));return n[e.t].apply(n,e.a)},n.range=function(t){return arguments.length?(o=t,i=0,e={t:\"range\",a:arguments},n):o},n.rangePoints=function(a,l){arguments.length<2&&(l=0);var s=a[0],c=a[1],u=t.length<2?(s=(s+c)/2,0):(c-s)/(t.length-1+l);return o=r(s+u*l/2,u),i=0,e={t:\"rangePoints\",a:arguments},n},n.rangeRoundPoints=function(a,l){arguments.length<2&&(l=0);var s=a[0],c=a[1],u=t.length<2?(s=c=Math.round((s+c)/2),0):(c-s)/"
+,
+"(t.length-1+l)|0;return o=r(s+Math.round(u*l/2+(c-s-(t.length-1+l)*u)/2),u),i=0,e={t:\"rangeRoundPoints\",a:arguments},n},n.rangeBands=function(a,l,s){arguments.length<2&&(l=0),arguments.length<3&&(s=l);var c=a[1]<a[0],u=a[c-0],f=a[1-c],d=(f-u)/(t.length-l+2*s);return o=r(u+d*s,d),c&&o.reverse(),i=d*(1-l),e={t:\"rangeBands\",a:arguments},n},n.rangeRoundBands=function(a,l,s){arguments.length<2&&(l=0),arguments.length<3&&(s=l);var c=a[1]<a[0],u=a[c-0],f=a[1-c],d=Math.floor((f-u)/(t.length-l+2*s));return o=r(u+Math.round((f-u-(t.length-l)*d)/2),d),c&&o.reverse(),i=Math.round(d*(1-l)),e={t:\"rangeRoundBands\",a:arguments},n},n.rangeBand=function(){return i},n.rangeExtent=function(){return Ya(e.a[0])},n.copy=function(){return so(t,e)},n.domain(t)}function co(t,e){function n(){var n=0,a=e.length;for(l=[];++n<a;)l[n-1]=ui.quantile(t,n/a);return r}function r(t){return isNaN(t=+t)?void 0:e[ui.bisect(l,t)]}var l;return r.domain=function(e){return arguments.length?(t=e.map(o).filter(i).sort(a),n()):t},r.range=function(t){return arguments.length?(e=t,n()):e},r.quantiles=function(){return l},r.invertExtent=function(n){return n=e.indexOf(n),0>n?[NaN,NaN]:[n>0?l[n-1]:t[0],n<l.length?l[n]:t[t.length-1]]},r.copy=function(){return co(t,e)},n()}function uo(t,e,n){function r(e){return n[Math.max(0,Math.min(i,Math.floor(o*(e-t))))]}function a(){return o=n.length/(e-t),i=n.length-1,r}var o,i;return r.domain=function(n){return arguments.length?(t=+n[0],e=+n[n.length-1],a()):[t,e]},r.range=function(t){return arguments.length?(n=t,a()):n},r.invertExtent=function(e){return e=n.indexOf(e),e=0>e?NaN:e/o+t,[e,e+1/o]},r.copy=function(){return uo(t,e,n)},a()}function fo(t,e){function n(n){return n>=n?e[ui.bisect(t,n)]:void 0}return n.domain=function(e){return arguments.length?(t=e,n):t},n.range=function(t){return arguments.length?(e=t,n):e},n.invertExtent=function(n){return n=e.indexOf(n),[t[n-1],t[n]]},n.copy=function(){return fo(t,e)},n}function ho(t){function e(t){return+t}return e.invert=e,e.domain=e.range=function(n){return arguments.length?(t=n.map(e),e):t},e.ticks=function(e){return eo(t,e)},e.tickFormat=function(e,n){return no(t,e,n)},e.copy=function(){return ho(t)},e}function po(){return 0}function go(t){return t.innerRadius}function vo(t){return t.outerRadius}function mo(t){return t.startAngle}function yo(t){return t.endAngle}function xo(t){return t&&t.padAngle}function bo(t,e,n,r){return(t-n)*e-(e-r)*t>0?0:1}function _o(t,e,n,r,a){var o=t[0]-e[0],i=t[1]-e[1],l=(a?r:-r)/Math.sqrt(o*o+i*i),s=l*i,c=-l*o,u=t[0]+s,f=t[1]+c,d=e[0]+s,h=e[1]+c,p=(u+d)/2,g=(f+h)/2,v=d-u,m=h-f,y=v*v+m*m,x=n-r,b=u*h-d*f,_=(0>m?-1:1)*Math.sqrt(Math.max(0,x*x*y-b*b)),w=(b*m-v*_)/y,k=(-b*v-m*_)/y,M=(b*m+v*_)/y,A=(-b*v+m*_)/y,L=w-p,T=k-g,z=M-p,S=A-g;return L*L+T*T>z*z+S*S&&(w=M,k=A),[[w-s,k-c],[w*n/x,k*n/x]]}function wo(t){function e(e){function i(){c.push(\"M\",o(t(u),l))}for(var s,c=[],u=[],f=-1,d=e.length,h=zt(n),p=zt(r);++f<d;)a.call(this,s=e[f],f)?u.push([+h.call(this,s,f),+p.call(this,s,f)]):u.length&&(i(),u=[]);return u.length&&i(),c.length?c.join(\"\"):null}var n=Cn,r=On,a=Ce,o=ko,i=o.key,l=.7;return e.x=function(t){return arguments.length?(n=t,e):n},e.y=function(t){return arguments.length?(r=t,e):r},e.defined=function(t){return arguments.length?(a=t,e):a},e.interpolate=function(t){return arguments.length?(i=\"function\"==typeof t?o=t:(o=Ds.get(t)||ko).key,e):i},e.tension=function(t){return arguments.length?(l=t,e):l},e}function ko(t){return t.length>1?t.join(\"L\"):t+\"Z\"}function Mo(t){return t.join(\"L\")+\"Z\"}function Ao(t){for(var e=0,n=t.length,r=t[0],a=[r[0],\",\",r[1]];++e<n;)a.push(\"H\",(r[0]+(r=t[e])[0])/2,\"V\",r[1]);return n>1&&a.push(\"H\",r[0]),a.join(\"\")}function Lo(t){for(var e=0,n=t.length,r=t[0],a=[r[0],\",\",r[1]];++e<n;)a.push(\"V\",(r=t[e])[1],\"H\",r[0]);return a.join(\"\")}function To(t){for(var e=0,n=t.length,r=t[0],a=[r[0],\",\",r[1]];++e<n;)a.push(\"H\",(r=t[e])[0],\"V\",r[1]);return a.join(\"\")}function zo(t,e){return t.length<4?ko(t):t[1]+Co(t.slice(1,-1),Oo(t,e))}function So(t,e){return t.length<3?Mo(t):t[0]+Co((t.push(t[0]),t),Oo([t[t.length-2]].concat(t,[t[1]]),e))}function Eo(t,e){return t.length<3?ko(t):t[0]+Co(t,Oo(t,e))}function Co(t,e){if(e.length<1||t.length!=e.length&&t.length!=e.length+2)return ko(t);var n=t.length!=e.length,r=\"\",a=t[0],o=t[1],i=e[0],l=i,s=1;if(n&&(r+=\"Q\"+(o[0]-2*i[0]/3)+\",\"+(o[1]-2*i[1]/3)+\",\"+o[0]+\",\"+o[1],a=t[1],s=2),e.length>1){l=e[1],o=t[s],s++,r+=\"C\"+(a[0]+i[0])+\",\"+(a[1]+i[1])+\",\"+(o[0]-l[0])+\",\"+(o[1]-l[1])+\",\"+o[0]+\",\"+o[1];for(var c=2;c<e.length;c++,s++)o=t[s],l=e[c],r+=\"S\"+(o[0]-l[0])+\",\"+(o[1]-l[1])+\",\"+o[0]+\",\"+o[1]}if(n){var u=t[s];r+=\"Q\"+(o[0]+2*l[0]/3)+\",\"+(o[1]+2*l[1]/3)+\",\"+u[0]+\",\"+u[1]}return r}function Oo(t,e){for(var n,r=[],a=(1-e)/2,o=t[0],i=t[1],l=1,s=t.length;++l<s;)n=o,o=i,i=t[l],r.push([a*(i[0]-n[0]),a*(i[1]-n[1])]);return r}function Po(t){if(t.length<3)return ko(t);var e=1,n=t.length,r=t[0],a=r[0],o=r[1],i=[a,a,a,(r=t[1])[0]],l=[o,o,o,r[1]],s=[a,\",\",o,\"L\",Ro(js,i),\",\",Ro(js,l)];for(t.push(t[n-1]);++e<=n;)r=t[e],i.shift(),i.push(r[0]),l.shift(),l.push(r[1]),jo(s,i,l);return t.pop(),s.push(\"L\",r),s.join(\"\")}function No(t){if(t.length<4)return ko(t);for(var e,n=[],r=-1,a=t.length,o=[0],i=[0];++r<3;)e=t[r],o.push(e[0]),i.push(e[1]);for(n.push(Ro(js,o)+\",\"+Ro(js,i)),--r;++r<a;)e=t[r],o.shift(),o.push(e[0]),i.shift(),i.push(e[1]),jo(n,o,i);return n.join(\"\")}function Do(t){for(var e,n,r=-1,a=t.length,o=a+4,i=[],l=[];++r<4;)n=t[r%a],i.push(n[0]),l.push(n[1]);for(e=[Ro(js,i),\",\",Ro(js,l)],--r;++r<o;)n=t[r%a],i.shift(),i.push(n[0]),l.shift(),l.push(n[1]),jo(e,i,l);return e.join(\"\")}function Io(t,e){var n=t.length-1;if(n)for(var r,a,o=t[0][0],i=t[0][1],l=t[n][0]-o,s=t[n][1]-i,c=-1;++c<=n;)r=t[c],a=c/n,r[0]=e*r[0]+(1-e)*(o+a*l),r[1]=e*r[1]+(1-e)*(i+a*s);return Po(t)}function Ro(t,e){return t[0]*e[0]+t[1]*e[1]+t[2]*e[2]+t[3]*e[3]}function jo(t,e,n){t.push(\"C\",Ro(Is,e),\",\",Ro(Is,n),\",\",Ro(Rs,e),\",\",Ro(Rs,n),\",\",Ro(js,e),\",\",Ro(js,n))}function qo(t,e){return(e[1]-t[1])/(e[0]-t[0])}function Fo(t){for(var e=0,n=t.length-1,r=[],a=t[0],o=t[1],i=r[0]=qo(a,o);++e<n;)r[e]=(i+(i=qo(a=o,o=t[e+1])))/2;return r[e]=i,r}function Bo(t){for(var e,n,r,a,o=[],i=Fo(t),l=-1,s=t.length-1;++l<s;)e=qo(t[l],t[l+1]),wi(e)<qi?i[l]=i[l+1]=0:(n=i[l]/e,r=i[l+1]/e,a=n*n+r*r,a>9&&(a=3*e/Math.sqrt(a),i[l]=a*n,i[l+1]=a*r));for(l=-1;++l<=s;)a=(t[Math.min(s,l+1)][0]-t[Math.max(0,l-1)][0])/(6*(1+i[l]*i[l])),o.push([a||0,i[l]*a||0]);return o}function Ho(t){return t.length<3?ko(t):t[0]+Co(t,Bo(t))}function Vo(t){for(var e,n,r,a=-1,o=t.length;++a<o;)e=t[a],n=e[0],r=e[1]-Zi,e[0]=n*Math.cos(r),e[1]=n*Math.sin(r);return t}function Zo(t){function e(e){function s(){g.push(\"M\",l(t(m),f),u,c(t(v.reverse()),f),\"Z\")}for(var d,h,p,g=[],v=[],m=[],y=-1,x=e.length,b=zt(n),_=zt(a),w=n===r?function(){return h}:zt(r),k=a===o?function(){return p}:zt(o);++y<x;)i.call(this,d=e[y],y)?(v.push([h=+b.call(this,d,y),p=+_.call(this,d,y)]),m.push([+w.call(this,d,y),+k.call(this,d,y)])):v.length&&(s(),v=[],m=[]);return v.length&&s(),g.length?g.join(\"\"):null}var n=Cn,r=Cn,a=0,o=On,i=Ce,l=ko,s=l.key,c=l,u=\"L\",f=.7;return e.x=function(t){return arguments.length?(n=r=t,e):r},e.x0=function(t){return arguments.length?(n=t,e):n},e.x1=function(t){return arguments.length?(r=t,e):r},e.y=function(t){return arguments.length?(a=o=t,e):o},e.y0=function(t){return arguments.length?(a=t,e):a},e.y1=function(t){return arguments.length?(o=t,e):o},e.defined=function(t){return arguments.length?(i=t,e):i},e.interpolate=function(t){return arguments.length?(s=\"function\"==typeof t?l=t:(l=Ds.get(t)||ko).key,c=l.reverse||l,u=l.closed?\"M\":\"L\",e):s},e.tension=function(t){return arguments.length?(f=t,e):f},e}function Yo(t){return t.radius}function Uo(t){return[t.x,t.y]}function Xo(t){return function(){var e=t.apply(this,arguments),n=e[0],r=e[1]-Zi;return[n*Math.cos(r),n*Math.sin(r)]}}function Go(){return 64}function $o(){return\"circle\"}function Qo(t){var e=Math.sqrt(t/Bi);return\"M0,\"+e+\"A\"+e+\",\"+e+\" 0 1,1 0,\"+-e+\"A\"+e+\",\"+e+\" 0 1,1 0,\"+e+\"Z\"}function Wo(t){return function(){var e,n,r;(e=this[t])&&(r=e[n=e.active])&&(r.timer.c=null,r.timer.t=NaN,--e.count?delete e[n]:delete this[t],e.active+=.5,r.event&&r.event.interrupt.call(this,this.__data__,r.index))}}function Jo(t,e,n){return Ti(t,Ys),t.namespace=e,t.id=n,t}function Ko(t,e,n,r){var a=t.id,o=t.namespace;return Y(t,\"function\"==typeof n?function(t,i,l){t[o][a].tween.set(e,r(n.call(t,t.__data__,i,l)))}:(n=r(n),function(t){t[o][a].tween.set(e,n)}))}function ti(t){return null==t&&(t=\"\"),function(){this.textContent=t}}function ei(t){return null==t?\"__transition__\":\"__transition_\"+t+\"__\"}function ni(t,e,n,r,a){function o(t){var e=g.delay;return c.t=e+s,t>=e?i(t-e):void(c.c=i)}function i(n){var a=p.active,o=p[a];o&&(o.timer.c=null,o.timer.t=NaN,--p.count,delete p[a],o.event&&o.event.interrupt.call(t,t.__data__,o.index));for(var i in p)if(r>+i){var f=p[i];f.timer.c=null,f.timer.t=NaN,--p.count,delete p[i]}c.c=l,Pt(function(){return c.c&&l(n||1)&&(c.c=null,c.t=NaN),1},0,s),p.active=r,g.event&&g.event.start.call(t,t.__data__,e),h=[],g.tween.forEach(function(n,r){(r=r.call(t,t.__data__,e))&&h.push(r)}),d=g.ease,u=g.duration}function l(a){for(var o=a/u,i=d(o),l=h.length;l>0;)h[--l].call(t,i);return o>=1?(g.event&&g.event.end.call(t,t.__data__,e),--p.count?delete p[r]:delete t[n],1):void 0}var s,c,u,d,h,p=t[n]||(t[n]={active:0,count:0}),g=p[r];g||(s=a.time,c=Pt(o,0,s),g=p[r]={tween:new f,time:s,timer:c,delay:a.delay,duration:a.duration,ease:a.ease,index:e},a=null,++p.count)}function ri(t,e,n){t.attr(\"transform\",function(t){var r=e(t);return\"translate(\"+(isFinite(r)?r:n(t))+\",0)\"})}function ai(t,e,n){t.attr(\"transform\",function(t){var r=e(t);return\"translate(0,\"+(isFinite(r)?r:n(t))+\")\"})}function oi(t){return t.toISOString()}function ii(t,e,n){function r(e){return t(e)}function a(t,n){var r=t[1]-t[0],a=r/n,o=ui.bisect(tc,a);return o==tc.length?[e.year,to(t.map(function(t){return t/31536e6}),n)[2]]:o?e[a/tc[o-1]<tc[o]/a?o-1:o]:[rc,to(t,n)[2]]}return r.invert=function(e){return li"
+,
+"(t.invert(e))},r.domain=function(e){return arguments.length?(t.domain(e),r):t.domain().map(li)},r.nice=function(t,e){function n(n){return!isNaN(n)&&!t.range(n,li(+n+1),e).length}var o=r.domain(),i=Ya(o),l=null==t?a(i,10):\"number\"==typeof t&&a(i,t);return l&&(t=l[0],e=l[1]),r.domain(Ga(o,e>1?{floor:function(e){for(;n(e=t.floor(e));)e=li(e-1);return e},ceil:function(e){for(;n(e=t.ceil(e));)e=li(+e+1);return e}}:t))},r.ticks=function(t,e){var n=Ya(r.domain()),o=null==t?a(n,10):\"number\"==typeof t?a(n,t):!t.range&&[{range:t},e];return o&&(t=o[0],e=o[1]),t.range(n[0],li(+n[1]+1),1>e?1:e)},r.tickFormat=function(){return n},r.copy=function(){return ii(t.copy(),e,n)},Ja(r,t)}function li(t){return new Date(t)}function si(t){return JSON.parse(t.responseText)}function ci(t){var e=hi.createRange();return e.selectNode(hi.body),e.createContextualFragment(t.responseText)}var ui={version:\"3.5.16\"},fi=[].slice,di=function(t){return fi.call(t)},hi=this.document;if(hi)try{di(hi.documentElement.childNodes)[0].nodeType}catch(pi){di=function(t){for(var e=t.length,n=new Array(e);e--;)n[e]=t[e];return n}}if(Date.now||(Date.now=function(){return+new Date}),hi)try{hi.createElement(\"DIV\").style.setProperty(\"opacity\",0,\"\")}catch(gi){var vi=this.Element.prototype,mi=vi.setAttribute,yi=vi.setAttributeNS,xi=this.CSSStyleDeclaration.prototype,bi=xi.setProperty;vi.setAttribute=function(t,e){mi.call(this,t,e+\"\")},vi.setAttributeNS=function(t,e,n){yi.call(this,t,e,n+\"\")},xi.setProperty=function(t,e,n){bi.call(this,t,e+\"\",n)}}ui.ascending=a,ui.descending=function(t,e){return t>e?-1:e>t?1:e>=t?0:NaN},ui.min=function(t,e){var n,r,a=-1,o=t.length;if(1===arguments.length){for(;++a<o;)if(null!=(r=t[a])&&r>=r){n=r;break}for(;++a<o;)null!=(r=t[a])&&n>r&&(n=r)}else{for(;++a<o;)if(null!=(r=e.call(t,t[a],a))&&r>=r){n=r;break}for(;++a<o;)null!=(r=e.call(t,t[a],a))&&n>r&&(n=r)}return n},ui.max=function(t,e){var n,r,a=-1,o=t.length;if(1===arguments.length){for(;++a<o;)if(null!=(r=t[a])&&r>=r){n=r;break}for(;++a<o;)null!=(r=t[a])&&r>n&&(n=r)}else{for(;++a<o;)if(null!=(r=e.call(t,t[a],a))&&r>=r){n=r;break}for(;++a<o;)null!=(r=e.call(t,t[a],a))&&r>n&&(n=r)}return n},ui.extent=function(t,e){var n,r,a,o=-1,i=t.length;if(1===arguments.length){for(;++o<i;)if(null!=(r=t[o])&&r>=r){n=a=r;break}for(;++o<i;)null!=(r=t[o])&&(n>r&&(n=r),r>a&&(a=r))}else{for(;++o<i;)if(null!=(r=e.call(t,t[o],o))&&r>=r){n=a=r;break}for(;++o<i;)null!=(r=e.call(t,t[o],o))&&(n>r&&(n=r),r>a&&(a=r))}return[n,a]},ui.sum=function(t,e){var n,r=0,a=t.length,o=-1;if(1===arguments.length)for(;++o<a;)i(n=+t[o])&&(r+=n);else for(;++o<a;)i(n=+e.call(t,t[o],o))&&(r+=n);return r},ui.mean=function(t,e){var n,r=0,a=t.length,l=-1,s=a;if(1===arguments.length)for(;++l<a;)i(n=o(t[l]))?r+=n:--s;else for(;++l<a;)i(n=o(e.call(t,t[l],l)))?r+=n:--s;return s?r/s:void 0},ui.quantile=function(t,e){var n=(t.length-1)*e+1,r=Math.floor(n),a=+t[r-1],o=n-r;return o?a+o*(t[r]-a):a},ui.median=function(t,e){var n,r=[],l=t.length,s=-1;if(1===arguments.length)for(;++s<l;)i(n=o(t[s]))&&r.push(n);else for(;++s<l;)i(n=o(e.call(t,t[s],s)))&&r.push(n);return r.length?ui.quantile(r.sort(a),.5):void 0},ui.variance=function(t,e){var n,r,a=t.length,l=0,s=0,c=-1,u=0;if(1===arguments.length)for(;++c<a;)i(n=o(t[c]))&&(r=n-l,l+=r/++u,s+=r*(n-l));else for(;++c<a;)i(n=o(e.call(t,t[c],c)))&&(r=n-l,l+=r/++u,s+=r*(n-l));return u>1?s/(u-1):void 0},ui.deviation=function(){var t=ui.variance.apply(this,arguments);return t?Math.sqrt(t):t};var _i=l(a);ui.bisectLeft=_i.left,ui.bisect=ui.bisectRight=_i.right,ui.bisector=function(t){return l(1===t.length?function(e,n){return a(t(e),n)}:t)},ui.shuffle=function(t,e,n){(o=arguments.length)<3&&(n=t.length,2>o&&(e=0));for(var r,a,o=n-e;o;)a=Math.random()*o--|0,r=t[o+e],t[o+e]=t[a+e],t[a+e]=r;return t},ui.permute=function(t,e){for(var n=e.length,r=new Array(n);n--;)r[n]=t[e[n]];return r},ui.pairs=function(t){for(var e,n=0,r=t.length-1,a=t[0],o=new Array(0>r?0:r);r>n;)o[n]=[e=a,a=t[++n]];return o},ui.transpose=function(t){if(!(a=t.length))return[];for(var e=-1,n=ui.min(t,s),r=new Array(n);++e<n;)for(var a,o=-1,i=r[e]=new Array(a);++o<a;)i[o]=t[o][e];return r},ui.zip=function(){return ui.transpose(arguments)},ui.keys=function(t){var e=[];for(var n in t)e.push(n);return e},ui.values=function(t){var e=[];for(var n in t)e.push(t[n]);return e},ui.entries=function(t){var e=[];for(var n in t)e.push({key:n,value:t[n]});return e},ui.merge=function(t){for(var e,n,r,a=t.length,o=-1,i=0;++o<a;)i+=t[o].length;for(n=new Array(i);--a>=0;)for(r=t[a],e=r.length;--e>=0;)n[--i]=r[e];return n};var wi=Math.abs;ui.range=function(t,e,n){if(arguments.length<3&&(n=1,arguments.length<2&&(e=t,t=0)),(e-t)/n===1/0)throw new Error(\"infinite range\");var r,a=[],o=c(wi(n)),i=-1;if(t*=o,e*=o,n*=o,0>n)for(;(r=t+n*++i)>e;)a.push(r/o);else for(;(r=t+n*++i)<e;)a.push(r/o);return a},ui.map=function(t,e){var n=new f;if(t instanceof f)t.forEach(function(t,e){n.set(t,e)});else if(Array.isArray(t)){var r,a=-1,o=t.length;if(1===arguments.length)for(;++a<o;)n.set(a,t[a]);else for(;++a<o;)n.set(e.call(t,r=t[a],a),r)}else for(var i in t)n.set(i,t[i]);return n};var ki=\"__proto__\",Mi=\"\\x00\";u(f,{has:p,get:function(t){return this._[d(t)]},set:function(t,e){return this._[d(t)]=e},remove:g,keys:v,values:function(){var t=[];for(var e in this._)t.push(this._[e]);return t},entries:function(){var t=[];for(var e in this._)t.push({key:h(e),value:this._[e]});return t},size:m,empty:y,forEach:function(t){for(var e in this._)t.call(this,h(e),this._[e])}}),ui.nest=function(){function t(e,i,l){if(l>=o.length)return r?r.call(a,i):n?i.sort(n):i;for(var s,c,u,d,h=-1,p=i.length,g=o[l++],v=new f;++h<p;)(d=v.get(s=g(c=i[h])))?d.push(c):v.set(s,[c]);return e?(c=e(),u=function(n,r){c.set(n,t(e,r,l))}):(c={},u=function(n,r){c[n]=t(e,r,l)}),v.forEach(u),c}function e(t,n){if(n>=o.length)return t;var r=[],a=i[n++];return t.forEach(function(t,a){r.push({key:t,values:e(a,n)})}),a?r.sort(function(t,e){return a(t.key,e.key)}):r}var n,r,a={},o=[],i=[];return a.map=function(e,n){return t(n,e,0)},a.entries=function(n){return e(t(ui.map,n,0),0)},a.key=function(t){return o.push(t),a},a.sortKeys=function(t){return i[o.length-1]=t,a},a.sortValues=function(t){return n=t,a},a.rollup=function(t){return r=t,a},a},ui.set=function(t){var e=new x;if(t)for(var n=0,r=t.length;r>n;++n)e.add(t[n]);return e},u(x,{has:p,add:function(t){return this._[d(t+=\"\")]=!0,t},remove:g,values:v,size:m,empty:y,forEach:function(t){for(var e in this._)t.call(this,h(e))}}),ui.behavior={},ui.rebind=function(t,e){for(var n,r=1,a=arguments.length;++r<a;)t[n=arguments[r]]=_(t,e,e[n]);return t};var Ai=[\"webkit\",\"ms\",\"moz\",\"Moz\",\"o\",\"O\"];ui.dispatch=function(){for(var t=new M,e=-1,n=arguments.length;++e<n;)t[arguments[e]]=A(t);return t},M.prototype.on=function(t,e){var n=t.indexOf(\".\"),r=\"\";if(n>=0&&(r=t.slice(n+1),t=t.slice(0,n)),t)return arguments.length<2?this[t].on(r):this[t].on(r,e);if(2===arguments.length){if(null==e)for(t in this)this.hasOwnProperty(t)&&this[t].on(r,null);return this}},ui.event=null,ui.requote=function(t){return t.replace(Li,\"\\\\$&\")};var Li=/[\\\\\\^\\$\\*\\+\\?\\|\\[\\]\\(\\)\\.\\{\\}]/g,Ti={}.__proto__?function(t,e){t.__proto__=e}:function(t,e){for(var n in e)t[n]=e[n]},zi=function(t,e){return e.querySelector(t)},Si=function(t,e){return e.querySelectorAll(t)},Ei=function(t,e){var n=t.matches||t[w(t,\"matchesSelector\")];return(Ei=function(t,e){return n.call(t,e)})(t,e)};\"function\"==typeof Sizzle&&(zi=function(t,e){return Sizzle(t,e)[0]||null},Si=Sizzle,Ei=Sizzle.matchesSelector),ui.selection=function(){return ui.select(hi.documentElement)};var Ci=ui.selection.prototype=[];Ci.select=function(t){var e,n,r,a,o=[];t=E(t);for(var i=-1,l=this.length;++i<l;){o.push(e=[]),e.parentNode=(r=this[i]).parentNode;for(var s=-1,c=r.length;++s<c;)(a=r[s])?(e.push(n=t.call(a,a.__data__,s,i)),n&&\"__data__\"in a&&(n.__data__=a.__data__)):e.push(null)}return S(o)},Ci.selectAll=function(t){var e,n,r=[];t=C(t);for(var a=-1,o=this.length;++a<o;)for(var i=this[a],l=-1,s=i.length;++l<s;)(n=i[l])&&(r.push(e=di(t.call(n,n.__data__,l,a))),e.parentNode=n);return S(r)};var Oi=\"http://www.w3.org/1999/xhtml\",Pi={svg:\"http://www.w3.org/2000/svg\",xhtml:Oi,xlink:\"http://www.w3.org/1999/xlink\",xml:\"http://www.w3.org/XML/1998/namespace\",xmlns:\"http://www.w3.org/2000/xmlns/\"};ui.ns={prefix:Pi,qualify:function(t){var e=t.indexOf(\":\"),n=t;return e>=0&&\"xmlns\"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),Pi.hasOwnProperty(n)?{space:Pi[n],local:t}:t}},Ci.attr=function(t,e){if(arguments.length<2){if(\"string\"==typeof t){var n=this.node();return t=ui.ns.qualify(t),t.local?n.getAttributeNS(t.space,t.local):n.getAttribute(t)}for(e in t)this.each(O(e,t[e]));return this}return this.each(O(t,e))},Ci.classed=function(t,e){if(arguments.length<2){if(\"string\"==typeof t){var n=this.node(),r=(t=D(t)).length,a=-1;if(e=n.classList){for(;++a<r;)if(!e.contains(t[a]))return!1}else for(e=n.getAttribute(\"class\");++a<r;)if(!N(t[a]).test(e))return!1;return!0}for(e in t)this.each(I(e,t[e]));return this}return this.each(I(t,e))},Ci.style=function(t,e,n){var a=arguments.length;if(3>a){if(\"string\"!=typeof t){2>a&&(e=\"\");for(n in t)this.each(j(n,t[n],e));return this}if(2>a){var o=this.node();return r(o).getComputedStyle(o,null).getPropertyValue(t)}n=\"\"}return this.each(j(t,e,n))},Ci.property=function(t,e){if(arguments.length<2){if(\"string\"==typeof t)return this.node()[t];for(e in t)this.each(q(e,t[e]));return this}return this.each(q(t,e))},Ci.text=function(t){return arguments.length?this.each(\"function\"==typeof t?function(){var e=t.apply(this,arguments);this.textContent=null==e?\"\":e}:null==t?function(){this.textContent=\"\"}:function(){this.textContent=t}):this.node().textContent},Ci.html=function(t){return arguments.length?this.each(\"function\"==typeof t?function(){var e=t.apply(this,arguments);this.innerHTML=null==e?\"\":e}:null==t?function(){this.innerHTML=\"\"}:function(){this.innerHTML=t}):this.node().innerHTML},Ci.append=function(t){return t=F(t),this.select(function(){return this.appendChild(t.apply(t"
+,
+"his,arguments))})},Ci.insert=function(t,e){return t=F(t),e=E(e),this.select(function(){return this.insertBefore(t.apply(this,arguments),e.apply(this,arguments)||null)})},Ci.remove=function(){return this.each(B)},Ci.data=function(t,e){function n(t,n){var r,a,o,i=t.length,u=n.length,d=Math.min(i,u),h=new Array(u),p=new Array(u),g=new Array(i);if(e){var v,m=new f,y=new Array(i);for(r=-1;++r<i;)(a=t[r])&&(m.has(v=e.call(a,a.__data__,r))?g[r]=a:m.set(v,a),y[r]=v);for(r=-1;++r<u;)(a=m.get(v=e.call(n,o=n[r],r)))?a!==!0&&(h[r]=a,a.__data__=o):p[r]=H(o),m.set(v,!0);for(r=-1;++r<i;)r in y&&m.get(y[r])!==!0&&(g[r]=t[r])}else{for(r=-1;++r<d;)a=t[r],o=n[r],a?(a.__data__=o,h[r]=a):p[r]=H(o);for(;u>r;++r)p[r]=H(n[r]);for(;i>r;++r)g[r]=t[r]}p.update=h,p.parentNode=h.parentNode=g.parentNode=t.parentNode,l.push(p),s.push(h),c.push(g)}var r,a,o=-1,i=this.length;if(!arguments.length){for(t=new Array(i=(r=this[0]).length);++o<i;)(a=r[o])&&(t[o]=a.__data__);return t}var l=U([]),s=S([]),c=S([]);if(\"function\"==typeof t)for(;++o<i;)n(r=this[o],t.call(r,r.parentNode.__data__,o));else for(;++o<i;)n(r=this[o],t);return s.enter=function(){return l},s.exit=function(){return c},s},Ci.datum=function(t){return arguments.length?this.property(\"__data__\",t):this.property(\"__data__\")},Ci.filter=function(t){var e,n,r,a=[];\"function\"!=typeof t&&(t=V(t));for(var o=0,i=this.length;i>o;o++){a.push(e=[]),e.parentNode=(n=this[o]).parentNode;for(var l=0,s=n.length;s>l;l++)(r=n[l])&&t.call(r,r.__data__,l,o)&&e.push(r)}return S(a)},Ci.order=function(){for(var t=-1,e=this.length;++t<e;)for(var n,r=this[t],a=r.length-1,o=r[a];--a>=0;)(n=r[a])&&(o&&o!==n.nextSibling&&o.parentNode.insertBefore(n,o),o=n);return this},Ci.sort=function(t){t=Z.apply(this,arguments);for(var e=-1,n=this.length;++e<n;)this[e].sort(t);return this.order()},Ci.each=function(t){return Y(this,function(e,n,r){t.call(e,e.__data__,n,r)})},Ci.call=function(t){var e=di(arguments);return t.apply(e[0]=this,e),this},Ci.empty=function(){return!this.node();\n"
+,
+"},Ci.node=function(){for(var t=0,e=this.length;e>t;t++)for(var n=this[t],r=0,a=n.length;a>r;r++){var o=n[r];if(o)return o}return null},Ci.size=function(){var t=0;return Y(this,function(){++t}),t};var Ni=[];ui.selection.enter=U,ui.selection.enter.prototype=Ni,Ni.append=Ci.append,Ni.empty=Ci.empty,Ni.node=Ci.node,Ni.call=Ci.call,Ni.size=Ci.size,Ni.select=function(t){for(var e,n,r,a,o,i=[],l=-1,s=this.length;++l<s;){r=(a=this[l]).update,i.push(e=[]),e.parentNode=a.parentNode;for(var c=-1,u=a.length;++c<u;)(o=a[c])?(e.push(r[c]=n=t.call(a.parentNode,o.__data__,c,l)),n.__data__=o.__data__):e.push(null)}return S(i)},Ni.insert=function(t,e){return arguments.length<2&&(e=X(this)),Ci.insert.call(this,t,e)},ui.select=function(t){var n;return\"string\"==typeof t?(n=[zi(t,hi)],n.parentNode=hi.documentElement):(n=[t],n.parentNode=e(t)),S([n])},ui.selectAll=function(t){var e;return\"string\"==typeof t?(e=di(Si(t,hi)),e.parentNode=hi.documentElement):(e=di(t),e.parentNode=null),S([e])},Ci.on=function(t,e,n){var r=arguments.length;if(3>r){if(\"string\"!=typeof t){2>r&&(e=!1);for(n in t)this.each(G(n,t[n],e));return this}if(2>r)return(r=this.node()[\"__on\"+t])&&r._;n=!1}return this.each(G(t,e,n))};var Di=ui.map({mouseenter:\"mouseover\",mouseleave:\"mouseout\"});hi&&Di.forEach(function(t){\"on\"+t in hi&&Di.remove(t)});var Ii,Ri=0;ui.mouse=function(t){return J(t,T())};var ji=this.navigator&&/WebKit/.test(this.navigator.userAgent)?-1:0;ui.touch=function(t,e,n){if(arguments.length<3&&(n=e,e=T().changedTouches),e)for(var r,a=0,o=e.length;o>a;++a)if((r=e[a]).identifier===n)return J(t,r)},ui.behavior.drag=function(){function t(){this.on(\"mousedown.drag\",o).on(\"touchstart.drag\",i)}function e(t,e,r,o,i){return function(){function l(){var t,n,r=e(d,g);r&&(t=r[0]-x[0],n=r[1]-x[1],p|=t|n,x=r,h({type:\"drag\",x:r[0]+c[0],y:r[1]+c[1],dx:t,dy:n}))}function s(){e(d,g)&&(m.on(o+v,null).on(i+v,null),y(p),h({type:\"dragend\"}))}var c,u=this,f=ui.event.target.correspondingElement||ui.event.target,d=u.parentNode,h=n.of(u,arguments),p=0,g=t(),v=\".drag\"+(null==g?\"\":\"-\"+g),m=ui.select(r(f)).on(o+v,l).on(i+v,s),y=W(f),x=e(d,g);a?(c=a.apply(u,arguments),c=[c.x-x[0],c.y-x[1]]):c=[0,0],h({type:\"dragstart\"})}}var n=z(t,\"drag\",\"dragstart\",\"dragend\"),a=null,o=e(k,ui.mouse,r,\"mousemove\",\"mouseup\"),i=e(K,ui.touch,b,\"touchmove\",\"touchend\");return t.origin=function(e){return arguments.length?(a=e,t):a},ui.rebind(t,n,\"on\")},ui.touches=function(t,e){return arguments.length<2&&(e=T().touches),e?di(e).map(function(e){var n=J(t,e);return n.identifier=e.identifier,n}):[]};var qi=1e-6,Fi=qi*qi,Bi=Math.PI,Hi=2*Bi,Vi=Hi-qi,Zi=Bi/2,Yi=Bi/180,Ui=180/Bi,Xi=Math.SQRT2,Gi=2,$i=4;ui.interpolateZoom=function(t,e){var n,r,a=t[0],o=t[1],i=t[2],l=e[0],s=e[1],c=e[2],u=l-a,f=s-o,d=u*u+f*f;if(Fi>d)r=Math.log(c/i)/Xi,n=function(t){return[a+t*u,o+t*f,i*Math.exp(Xi*t*r)]};else{var h=Math.sqrt(d),p=(c*c-i*i+$i*d)/(2*i*Gi*h),g=(c*c-i*i-$i*d)/(2*c*Gi*h),v=Math.log(Math.sqrt(p*p+1)-p),m=Math.log(Math.sqrt(g*g+1)-g);r=(m-v)/Xi,n=function(t){var e=t*r,n=ot(v),l=i/(Gi*h)*(n*it(Xi*e+v)-at(v));return[a+l*u,o+l*f,i*n/ot(Xi*e+v)]}}return n.duration=1e3*r,n},ui.behavior.zoom=function(){function t(t){t.on(C,f).on(Wi+\".zoom\",h).on(\"dblclick.zoom\",p).on(N,d)}function e(t){return[(t[0]-M.x)/M.k,(t[1]-M.y)/M.k]}function n(t){return[t[0]*M.k+M.x,t[1]*M.k+M.y]}function a(t){M.k=Math.max(T[0],Math.min(T[1],t))}function o(t,e){e=n(e),M.x+=t[0]-e[0],M.y+=t[1]-e[1]}function i(e,n,r,i){e.__chart__={x:M.x,y:M.y,k:M.k},a(Math.pow(2,i)),o(v=n,r),e=ui.select(e),S>0&&(e=e.transition().duration(S)),e.call(t.event)}function l(){_&&_.domain(b.range().map(function(t){return(t-M.x)/M.k}).map(b.invert)),k&&k.domain(w.range().map(function(t){return(t-M.y)/M.k}).map(w.invert))}function s(t){E++||t({type:\"zoomstart\"})}function c(t){l(),t({type:\"zoom\",scale:M.k,translate:[M.x,M.y]})}function u(t){--E||(t({type:\"zoomend\"}),v=null)}function f(){function t(){l=1,o(ui.mouse(a),d),c(i)}function n(){f.on(O,null).on(P,null),h(l),u(i)}var a=this,i=D.of(a,arguments),l=0,f=ui.select(r(a)).on(O,t).on(P,n),d=e(ui.mouse(a)),h=W(a);Zs.call(a),s(i)}function d(){function t(){var t=ui.touches(p);return h=M.k,t.forEach(function(t){t.identifier in v&&(v[t.identifier]=e(t))}),t}function n(){var e=ui.event.target;ui.select(e).on(b,r).on(_,l),w.push(e);for(var n=ui.event.changedTouches,a=0,o=n.length;o>a;++a)v[n[a].identifier]=null;var s=t(),c=Date.now();if(1===s.length){if(500>c-x){var u=s[0];i(p,u,v[u.identifier],Math.floor(Math.log(M.k)/Math.LN2)+1),L()}x=c}else if(s.length>1){var u=s[0],f=s[1],d=u[0]-f[0],h=u[1]-f[1];m=d*d+h*h}}function r(){var t,e,n,r,i=ui.touches(p);Zs.call(p);for(var l=0,s=i.length;s>l;++l,r=null)if(n=i[l],r=v[n.identifier]){if(e)break;t=n,e=r}if(r){var u=(u=n[0]-t[0])*u+(u=n[1]-t[1])*u,f=m&&Math.sqrt(u/m);t=[(t[0]+n[0])/2,(t[1]+n[1])/2],e=[(e[0]+r[0])/2,(e[1]+r[1])/2],a(f*h)}x=null,o(t,e),c(g)}function l(){if(ui.event.touches.length){for(var e=ui.event.changedTouches,n=0,r=e.length;r>n;++n)delete v[e[n].identifier];for(var a in v)return void t()}ui.selectAll(w).on(y,null),k.on(C,f).on(N,d),A(),u(g)}var h,p=this,g=D.of(p,arguments),v={},m=0,y=\".zoom-\"+ui.event.changedTouches[0].identifier,b=\"touchmove\"+y,_=\"touchend\"+y,w=[],k=ui.select(p),A=W(p);n(),s(g),k.on(C,null).on(N,n)}function h(){var t=D.of(this,arguments);y?clearTimeout(y):(Zs.call(this),g=e(v=m||ui.mouse(this)),s(t)),y=setTimeout(function(){y=null,u(t)},50),L(),a(Math.pow(2,.002*Qi())*M.k),o(v,g),c(t)}function p(){var t=ui.mouse(this),n=Math.log(M.k)/Math.LN2;i(this,t,e(t),ui.event.shiftKey?Math.ceil(n)-1:Math.floor(n)+1)}var g,v,m,y,x,b,_,w,k,M={x:0,y:0,k:1},A=[960,500],T=Ji,S=250,E=0,C=\"mousedown.zoom\",O=\"mousemove.zoom\",P=\"mouseup.zoom\",N=\"touchstart.zoom\",D=z(t,\"zoomstart\",\"zoom\",\"zoomend\");return Wi||(Wi=\"onwheel\"in hi?(Qi=function(){return-ui.event.deltaY*(ui.event.deltaMode?120:1)},\"wheel\"):\"onmousewheel\"in hi?(Qi=function(){return ui.event.wheelDelta},\"mousewheel\"):(Qi=function(){return-ui.event.detail},\"MozMousePixelScroll\")),t.event=function(t){t.each(function(){var t=D.of(this,arguments),e=M;Hs?ui.select(this).transition().each(\"start.zoom\",function(){M=this.__chart__||{x:0,y:0,k:1},s(t)}).tween(\"zoom:zoom\",function(){var n=A[0],r=A[1],a=v?v[0]:n/2,o=v?v[1]:r/2,i=ui.interpolateZoom([(a-M.x)/M.k,(o-M.y)/M.k,n/M.k],[(a-e.x)/e.k,(o-e.y)/e.k,n/e.k]);return function(e){var r=i(e),l=n/r[2];this.__chart__=M={x:a-r[0]*l,y:o-r[1]*l,k:l},c(t)}}).each(\"interrupt.zoom\",function(){u(t)}).each(\"end.zoom\",function(){u(t)}):(this.__chart__=M,s(t),c(t),u(t))})},t.translate=function(e){return arguments.length?(M={x:+e[0],y:+e[1],k:M.k},l(),t):[M.x,M.y]},t.scale=function(e){return arguments.length?(M={x:M.x,y:M.y,k:null},a(+e),l(),t):M.k},t.scaleExtent=function(e){return arguments.length?(T=null==e?Ji:[+e[0],+e[1]],t):T},t.center=function(e){return arguments.length?(m=e&&[+e[0],+e[1]],t):m},t.size=function(e){return arguments.length?(A=e&&[+e[0],+e[1]],t):A},t.duration=function(e){return arguments.length?(S=+e,t):S},t.x=function(e){return arguments.length?(_=e,b=e.copy(),M={x:0,y:0,k:1},t):_},t.y=function(e){return arguments.length?(k=e,w=e.copy(),M={x:0,y:0,k:1},t):k},ui.rebind(t,D,\"on\")};var Qi,Wi,Ji=[0,1/0];ui.color=st,st.prototype.toString=function(){return this.rgb()+\"\"},ui.hsl=ct;var Ki=ct.prototype=new st;Ki.brighter=function(t){return t=Math.pow(.7,arguments.length?t:1),new ct(this.h,this.s,this.l/t)},Ki.darker=function(t){return t=Math.pow(.7,arguments.length?t:1),new ct(this.h,this.s,t*this.l)},Ki.rgb=function(){return ut(this.h,this.s,this.l)},ui.hcl=ft;var tl=ft.prototype=new st;tl.brighter=function(t){return new ft(this.h,this.c,Math.min(100,this.l+el*(arguments.length?t:1)))},tl.darker=function(t){return new ft(this.h,this.c,Math.max(0,this.l-el*(arguments.length?t:1)))},tl.rgb=function(){return dt(this.h,this.c,this.l).rgb()},ui.lab=ht;var el=18,nl=.95047,rl=1,al=1.08883,ol=ht.prototype=new st;ol.brighter=function(t){return new ht(Math.min(100,this.l+el*(arguments.length?t:1)),this.a,this.b)},ol.darker=function(t){return new ht(Math.max(0,this.l-el*(arguments.length?t:1)),this.a,this.b)},ol.rgb=function(){return pt(this.l,this.a,this.b)},ui.rgb=xt;var il=xt.prototype=new st;il.brighter=function(t){t=Math.pow(.7,arguments.length?t:1);var e=this.r,n=this.g,r=this.b,a=30;return e||n||r?(e&&a>e&&(e=a),n&&a>n&&(n=a),r&&a>r&&(r=a),new xt(Math.min(255,e/t),Math.min(255,n/t),Math.min(255,r/t))):new xt(a,a,a)},il.darker=function(t){return t=Math.pow(.7,arguments.length?t:1),new xt(t*this.r,t*this.g,t*this.b)},il.hsl=function(){return Mt(this.r,this.g,this.b)},il.toString=function(){return\"#\"+wt(this.r)+wt(this.g)+wt(this.b)};var ll=ui.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsa"
+,
+"lmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});ll.forEach(function(t,e){ll.set(t,bt(e))}),ui.functor=zt,ui.xhr=St(b),ui.dsv=function(t,e){function n(t,n,o){arguments.length<3&&(o=n,n=null);var i=Et(t,e,null==n?r:a(n),o);return i.row=function(t){return arguments.length?i.response(null==(n=t)?r:a(t)):n},i}function r(t){return n.parse(t.responseText)}function a(t){return function(e){return n.parse(e.responseText,t)}}function o(e){return e.map(i).join(t)}function i(t){return l.test(t)?'\"'+t.replace(/\\\"/g,'\"\"')+'\"':t}var l=new RegExp('[\"'+t+\"\\n]\"),s=t.charCodeAt(0);return n.parse=function(t,e){var r;return n.parseRows(t,function(t,n){if(r)return r(t,n-1);var a=new Function(\"d\",\"return {\"+t.map(function(t,e){return JSON.stringify(t)+\": d[\"+e+\"]\"}).join(\",\")+\"}\");r=e?function(t,n){return e(a(t),n)}:a})},n.parseRows=function(t,e){function n(){if(u>=c)return i;if(a)return a=!1,o;var e=u;if(34===t.charCodeAt(e)){for(var n=e;n++<c;)if(34===t.charCodeAt(n)){if(34!==t.charCodeAt(n+1))break;++n}u=n+2;var r=t.charCodeAt(n+1);return 13===r?(a=!0,10===t.charCodeAt(n+2)&&++u):10===r&&(a=!0),t.slice(e+1,n).replace(/\"\"/g,'\"')}for(;c>u;){var r=t.charCodeAt(u++),l=1;if(10===r)a=!0;else if(13===r)a=!0,10===t.charCodeAt(u)&&(++u,++l);else if(r!==s)continue;return t.slice(e,u-l)}return t.slice(e)}for(var r,a,o={},i={},l=[],c=t.length,u=0,f=0;(r=n())!==i;){for(var d=[];r!==o&&r!==i;)d.push(r),r=n();e&&null==(d=e(d,f++))||l.push(d)}return l},n.format=function(e){if(Array.isArray(e[0]))return n.formatRows(e);var r=new x,a=[];return e.forEach(function(t){for(var e in t)r.has(e)||a.push(r.add(e))}),[a.map(i).join(t)].concat(e.map(function(e){return a.map(function(t){return i(e[t])}).join(t)})).join(\"\\n\")},n.formatRows=function(t){return t.map(o).join(\"\\n\")},n},ui.csv=ui.dsv(\",\",\"text/csv\"),ui.tsv=ui.dsv(\" \",\"text/tab-separated-values\");var sl,cl,ul,fl,dl=this[w(this,\"requestAnimationFrame\")]||function(t){setTimeout(t,17)};ui.timer=function(){Pt.apply(this,arguments)},ui.timer.flush=function(){Dt(),It()},ui.round=function(t,e){return e?Math.round(t*(e=Math.pow(10,e)))/e:Math.round(t)};var hl=[\"y\",\"z\",\"a\",\"f\",\"p\",\"n\",\"\\xb5\",\"m\",\"\",\"k\",\"M\",\"G\",\"T\",\"P\",\"E\",\"Z\",\"Y\"].map(jt);ui.formatPrefix=function(t,e){var n=0;return(t=+t)&&(0>t&&(t*=-1),e&&(t=ui.round(t,Rt(t,e))),n=1+Math.floor(1e-12+Math.log(t)/Math.LN10),n=Math.max(-24,Math.min(24,3*Math.floor((n-1)/3)))),hl[8+n/3]};var pl=/(?:([^{])?([<>=^]))?([+\\- ])?([$#])?(0)?(\\d+)?(,)?(\\.-?\\d+)?([a-z%])?/i,gl=ui.map({b:function(t){return t.toString(2)},c:function(t){return String.fromCharCode(t)},o:function(t){return t.toString(8)},x:function(t){return t.toString(16)},X:function(t){return t.toString(16).toUpperCase()},g:function(t,e){return t.toPrecision(e)},e:function(t,e){return t.toExponential(e)},f:function(t,e){return t.toFixed(e)},r:function(t,e){return(t=ui.round(t,Rt(t,e))).toFixed(Math.max(0,Math.min(20,Rt(t*(1+1e-15),e))))}}),vl=ui.time={},ml=Date;Bt.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){yl.setUTCDate.apply(this._,arguments)},setDay:function(){yl.setUTCDay.apply(this._,arguments)},setFullYear:function(){yl.setUTCFullYear.apply(this._,arguments)},setHours:function(){yl.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){yl.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){yl.setUTCMinutes.apply(this._,arguments)},setMonth:function(){yl.setUTCMonth.apply(this._,arguments)},setSeconds:function(){yl.setUTCSeconds.apply(this._,arguments)},setTime:function(){yl.setTime.apply(this._,arguments)}};var yl=Date.prototype;vl.year=Ht(function(t){return t=vl.day(t),t.setMonth(0,1),t},function(t,e){t.setFullYear(t.getFullYear()+e)},function(t){return t.getFullYear()}),vl.years=vl.year.range,vl.years.utc=vl.year.utc.range,vl.day=Ht(function(t){var e=new ml(2e3,0);return e.setFullYear(t.getFullYear(),t.getMonth(),t.getDate()),e},function(t,e){t.setDate(t.getDate()+e)},function(t){return t.getDate()-1}),vl.days=vl.day.range,vl.days.utc=vl.day.utc.range,vl.dayOfYear=function(t){var e=vl.year(t);return Math.floor((t-e-6e4*(t.getTimezoneOffset()-e.getTimezoneOffset()))/864e5)},[\"sunday\",\"monday\",\"tuesday\",\"wednesday\",\"thursday\",\"friday\",\"saturday\"].forEach(function(t,e){e=7-e;var n=vl[t]=Ht(function(t){return(t=vl.day(t)).setDate(t.getDate()-(t.getDay()+e)%7),t},function(t,e){t.setDate(t.getDate()+7*Math.floor(e))},function(t){var n=vl.year(t).getDay();return Math.floor((vl.dayOfYear(t)+(n+e)%7)/7)-(n!==e)});vl[t+\"s\"]=n.range,vl[t+\"s\"].utc=n.utc.range,vl[t+\"OfYear\"]=function(t){var n=vl.year(t).getDay();return Math.floor((vl.dayOfYear(t)+(n+e)%7)/7)}}),vl.week=vl.sunday,vl.weeks=vl.sunday.range,vl.weeks.utc=vl.sunday.utc.range,vl.weekOfYear=vl.sundayOfYear;var xl={\"-\":\"\",_:\" \",0:\"0\"},bl=/^\\s*\\d+/,_l=/^%/;ui.locale=function(t){return{numberFormat:qt(t),timeFormat:Zt(t)}};var wl=ui.locale({decimal:\".\",thousands:\",\",grouping:[3],currency:[\"$\",\"\"],dateTime:\"%a %b %e %X %Y\",date:\"%m/%d/%Y\",time:\"%H:%M:%S\",periods:[\"AM\",\"PM\"],days:[\"Sunday\",\"Monday\",\"Tuesday\",\"Wednesday\",\"Thursday\",\"Friday\",\"Saturday\"],shortDays:[\"Sun\",\"Mon\",\"Tue\",\"Wed\",\"Thu\",\"Fri\",\"Sat\"],months:[\"January\",\"February\",\"March\",\"April\",\"May\",\"June\",\"July\",\"August\",\"September\",\"October\",\"November\",\"December\"],shortMonths:[\"Jan\",\"Feb\",\"Mar\",\"Apr\",\"May\",\"Jun\",\"Jul\",\"Aug\",\"Sep\",\"Oct\",\"Nov\",\"Dec\"]});ui.format=wl.numberFormat,ui.geo={},fe.prototype={s:0,t:0,add:function(t){de(t,this.t,kl),de(kl.s,this.s,this),this.s?this.t+=kl.t:this.s=kl.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var kl=new fe;ui.geo.stream=function(t,e){t&&Ml.hasOwnProperty(t.type)?Ml[t.type](t,e):he(t,e)};var Ml={Feature:function(t,e){he(t.geometry,e)},FeatureCollection:function(t,e){for(var n=t.features,r=-1,a=n.length;++r<a;)he(n[r].geometry,e)}},Al={Sphere:function(t,e){e.sphere()},Point:function(t,e){t=t.coordinates,e.point(t[0],t[1],t[2])},MultiPoint:function(t,e){for(var n=t.coordinates,r=-1,a=n.length;++r<a;)t=n[r],e.point(t[0],t[1],t[2])},LineString:function(t,e){pe(t.coordinates,e,0)},MultiLineString:function(t,e){for(var n=t.coordinates,r=-1,a=n.length;++r<a;)pe(n[r],e,0)},Polygon:function(t,e){ge(t.coordinates,e)},MultiPolygon:function(t,e){for(var n=t.coordinates,r=-1,a=n.length;++r<a;)ge(n[r],e)},GeometryCollection:function(t,e){for(var n=t.geometries,r=-1,a=n.length;++r<a;)he(n[r],e)}};ui.geo.area=function(t){return Ll=0,ui.geo.stream(t,zl),Ll};var Ll,Tl=new fe,zl={sphere:function(){Ll+=4*Bi},point:k,lineStart:k,lineEnd:k,polygonStart:function(){Tl.reset(),zl.lineStart=ve},polygonEnd:function(){var t=2*Tl;Ll+=0>t?4*Bi+t:t,zl.lineStart=zl.lineEnd=zl.point=k}};ui.geo.bounds=function(){function t(t,e){x.push(b=[u=t,d=t]),f>e&&(f=e),e>h&&(h=e)}function e(e,n){var r=me([e*Yi,n*Yi]);if(m){var a=xe(m,r),o=[a[1],-a[0],0],i=xe(o,a);we(i),i=ke(i);var s=e-p,c=s>0?1:-1,g=i[0]*Ui*c,v=wi(s)>180;if(v^(g>c*p&&c*e>g)){var y=i[1]*Ui;y>h&&(h=y)}else if(g=(g+360)%360-180,v^(g>c*p&&c*e>g)){var y=-i[1]*Ui;f>y&&(f=y)}else f>n&&(f=n),n>h&&(h=n);v?p>e?l(u,e)>l(u,d)&&(d=e):l(e,d)>l(u,d)&&(u=e):d>=u?(u>e&&(u=e),e>d&&(d=e)):e>p?l(u,e)>l(u,d)&&(d=e):l(e,d)>l(u,d)&&(u=e)}else t(e,n);m=r,p=e}function n(){_.point=e}function r(){b[0]=u,b[1]=d,_.point=t,m=null}function a(t,n){if(m){var r=t-p;y+=wi(r)>180?r+(r>0?360:-360):r}else g=t,v=n;zl.point(t,n),e(t,n)}function o(){zl.lineStart()}function i(){a(g,v),zl.lineEnd(),wi(y)>qi&&(u=-(d=180)),b[0]=u,b[1]=d,m=null}function l(t,e){return(e-=t)<0?e+360:e}function s(t,e){return t[0]-e[0]}function c(t,e){return e[0]<=e[1]?e[0]<=t&&t<=e[1]:t<e[0]||e[1]<t}var u,f,d,h,p,g,v,m,y,x,b,_={point:t,lineStart:n,lineEnd:r,polygonStart:function(){_.point=a,_.lineStart=o,_.lineEnd=i,y=0,zl.polygonStart()},polygonEnd:function(){zl.polygonEnd(),_.point=t,_.lineStart=n,_.lineEnd=r,0>Tl?(u=-(d=180),f=-(h=90)):y>qi?h=90:-qi>y&&(f=-90),b[0]=u,b[1]=d}};return function(t){h=d=-(u=f=1/0),x=[],ui.geo.stream(t,_);var e=x.length;if(e){x.sort(s);for(var n,r=1,a=x[0],o=[a];e>r;++r)n=x[r],c(n[0],a)||c(n[1],a)?(l(a[0],n[1])>l(a[0],a[1])&&(a[1]=n[1]),l(n[0],a[1])>l(a[0],a[1])&&(a[0]=n[0])):o.push(a=n);for(var i,n,p=-(1/0),e=o.length-1,r=0,a=o[e];e>=r;a=n,++r)n=o[r],(i=l(a[1],n[0]))>p&&(p=i,u=n[0],d=a[1])}return x=b=null,u===1/0||f===1/0?[[NaN,NaN],[NaN,NaN]]:"
+,
+"[[u,f],[d,h]]}}(),ui.geo.centroid=function(t){Sl=El=Cl=Ol=Pl=Nl=Dl=Il=Rl=jl=ql=0,ui.geo.stream(t,Fl);var e=Rl,n=jl,r=ql,a=e*e+n*n+r*r;return Fi>a&&(e=Nl,n=Dl,r=Il,qi>El&&(e=Cl,n=Ol,r=Pl),a=e*e+n*n+r*r,Fi>a)?[NaN,NaN]:[Math.atan2(n,e)*Ui,rt(r/Math.sqrt(a))*Ui]};var Sl,El,Cl,Ol,Pl,Nl,Dl,Il,Rl,jl,ql,Fl={sphere:k,point:Ae,lineStart:Te,lineEnd:ze,polygonStart:function(){Fl.lineStart=Se},polygonEnd:function(){Fl.lineStart=Te}},Bl=De(Ce,qe,Be,[-Bi,-Bi/2]),Hl=1e9;ui.geo.clipExtent=function(){var t,e,n,r,a,o,i={stream:function(t){return a&&(a.valid=!1),a=o(t),a.valid=!0,a},extent:function(l){return arguments.length?(o=Ye(t=+l[0][0],e=+l[0][1],n=+l[1][0],r=+l[1][1]),a&&(a.valid=!1,a=null),i):[[t,e],[n,r]]}};return i.extent([[0,0],[960,500]])},(ui.geo.conicEqualArea=function(){return Ue(Xe)}).raw=Xe,ui.geo.albers=function(){return ui.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},ui.geo.albersUsa=function(){function t(t){var o=t[0],i=t[1];return e=null,n(o,i),e||(r(o,i),e)||a(o,i),e}var e,n,r,a,o=ui.geo.albers(),i=ui.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),l=ui.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),s={point:function(t,n){e=[t,n]}};return t.invert=function(t){var e=o.scale(),n=o.translate(),r=(t[0]-n[0])/e,a=(t[1]-n[1])/e;return(a>=.12&&.234>a&&r>=-.425&&-.214>r?i:a>=.166&&.234>a&&r>=-.214&&-.115>r?l:o).invert(t)},t.stream=function(t){var e=o.stream(t),n=i.stream(t),r=l.stream(t);return{point:function(t,a){e.point(t,a),n.point(t,a),r.point(t,a)},sphere:function(){e.sphere(),n.sphere(),r.sphere()},lineStart:function(){e.lineStart(),n.lineStart(),r.lineStart()},lineEnd:function(){e.lineEnd(),n.lineEnd(),r.lineEnd()},polygonStart:function(){e.polygonStart(),n.polygonStart(),r.polygonStart()},polygonEnd:function(){e.polygonEnd(),n.polygonEnd(),r.polygonEnd()}}},t.precision=function(e){return arguments.length?(o.precision(e),i.precision(e),l.precision(e),t):o.precision()},t.scale=function(e){return arguments.length?(o.scale(e),i.scale(.35*e),l.scale(e),t.translate(o.translate())):o.scale()},t.translate=function(e){if(!arguments.length)return o.translate();var c=o.scale(),u=+e[0],f=+e[1];return n=o.translate(e).clipExtent([[u-.455*c,f-.238*c],[u+.455*c,f+.238*c]]).stream(s).point,r=i.translate([u-.307*c,f+.201*c]).clipExtent([[u-.425*c+qi,f+.12*c+qi],[u-.214*c-qi,f+.234*c-qi]]).stream(s).point,a=l.translate([u-.205*c,f+.212*c]).clipExtent([[u-.214*c+qi,f+.166*c+qi],[u-.115*c-qi,f+.234*c-qi]]).stream(s).point,t},t.scale(1070)};var Vl,Zl,Yl,Ul,Xl,Gl,$l={point:k,lineStart:k,lineEnd:k,polygonStart:function(){Zl=0,$l.lineStart=Ge},polygonEnd:function(){$l.lineStart=$l.lineEnd=$l.point=k,Vl+=wi(Zl/2)}},Ql={point:$e,lineStart:k,lineEnd:k,polygonStart:k,polygonEnd:k},Wl={point:Je,lineStart:Ke,lineEnd:tn,polygonStart:function(){Wl.lineStart=en},polygonEnd:function(){Wl.point=Je,Wl.lineStart=Ke,Wl.lineEnd=tn}};ui.geo.path=function(){function t(t){return t&&(\"function\"==typeof l&&o.pointRadius(+l.apply(this,arguments)),i&&i.valid||(i=a(o)),ui.geo.stream(t,i)),o.result()}function e(){return i=null,t}var n,r,a,o,i,l=4.5;return t.area=function(t){return Vl=0,ui.geo.stream(t,a($l)),Vl},t.centroid=function(t){return Cl=Ol=Pl=Nl=Dl=Il=Rl=jl=ql=0,ui.geo.stream(t,a(Wl)),ql?[Rl/ql,jl/ql]:Il?[Nl/Il,Dl/Il]:Pl?[Cl/Pl,Ol/Pl]:[NaN,NaN]},t.bounds=function(t){return Xl=Gl=-(Yl=Ul=1/0),ui.geo.stream(t,a(Ql)),[[Yl,Ul],[Xl,Gl]]},t.projection=function(t){return arguments.length?(a=(n=t)?t.stream||an(t):b,e()):n},t.context=function(t){return arguments.length?(o=null==(r=t)?new Qe:new nn(t),\"function\"!=typeof l&&o.pointRadius(l),e()):r},t.pointRadius=function(e){return arguments.length?(l=\"function\"==typeof e?e:(o.pointRadius(+e),+e),t):l},t.projection(ui.geo.albersUsa()).context(null)},ui.geo.transform=function(t){return{stream:function(e){var n=new on(e);for(var r in t)n[r]=t[r];return n}}},on.prototype={point:function(t,e){this.stream.point(t,e)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},ui.geo.projection=sn,ui.geo.projectionMutator=cn,(ui.geo.equirectangular=function(){return sn(fn)}).raw=fn.invert=fn,ui.geo.rotation=function(t){function e(e){return e=t(e[0]*Yi,e[1]*Yi),e[0]*=Ui,e[1]*=Ui,e}return t=hn(t[0]%360*Yi,t[1]*Yi,t.length>2?t[2]*Yi:0),e.invert=function(e){return e=t.invert(e[0]*Yi,e[1]*Yi),e[0]*=Ui,e[1]*=Ui,e},e},dn.invert=fn,ui.geo.circle=function(){function t(){var t=\"function\"==typeof r?r.apply(this,arguments):r,e=hn(-t[0]*Yi,-t[1]*Yi,0).invert,a=[];return n(null,null,1,{point:function(t,n){a.push(t=e(t,n)),t[0]*=Ui,t[1]*=Ui}}),{type:\"Polygon\",coordinates:[a]}}var e,n,r=[0,0],a=6;return t.origin=function(e){return arguments.length?(r=e,t):r},t.angle=function(r){return arguments.length?(n=mn((e=+r)*Yi,a*Yi),t):e},t.precision=function(r){return arguments.length?(n=mn(e*Yi,(a=+r)*Yi),t):a},t.angle(90)},ui.geo.distance=function(t,e){var n,r=(e[0]-t[0])*Yi,a=t[1]*Yi,o=e[1]*Yi,i=Math.sin(r),l=Math.cos(r),s=Math.sin(a),c=Math.cos(a),u=Math.sin(o),f=Math.cos(o);return Math.atan2(Math.sqrt((n=f*i)*n+(n=c*u-s*f*l)*n),s*u+c*f*l)},ui.geo.graticule=function(){function t(){return{type:\"MultiLineString\",coordinates:e()}}function e(){return ui.range(Math.ceil(o/v)*v,a,v).map(d).concat(ui.range(Math.ceil(c/m)*m,s,m).map(h)).concat(ui.range(Math.ceil(r/p)*p,n,p).filter(function(t){return wi(t%v)>qi}).map(u)).concat(ui.range(Math.ceil(l/g)*g,i,g).filter(function(t){return wi(t%m)>qi}).map(f))}var n,r,a,o,i,l,s,c,u,f,d,h,p=10,g=p,v=90,m=360,y=2.5;return t.lines=function(){return e().map(function(t){return{type:\"LineString\",coordinates:t}})},t.outline=function(){return{type:\"Polygon\",coordinates:[d(o).concat(h(s).slice(1),d(a).reverse().slice(1),h(c).reverse().slice(1))]}},t.extent=function(e){return arguments.length?t.majorExtent(e).minorExtent(e):t.minorExtent()},t.majorExtent=function(e){return arguments.length?(o=+e[0][0],a=+e[1][0],c=+e[0][1],s=+e[1][1],o>a&&(e=o,o=a,a=e),c>s&&(e=c,c=s,s=e),t.precision(y)):[[o,c],[a,s]]},t.minorExtent=function(e){return arguments.length?(r=+e[0][0],n=+e[1][0],l=+e[0][1],i=+e[1][1],r>n&&(e=r,r=n,n=e),l>i&&(e=l,l=i,i=e),t.precision(y)):[[r,l],[n,i]]},t.step=function(e){return arguments.length?t.majorStep(e).minorStep(e):t.minorStep()},t.majorStep=function(e){return arguments.length?(v=+e[0],m=+e[1],t):[v,m]},t.minorStep=function(e){return arguments.length?(p=+e[0],g=+e[1],t):[p,g]},t.precision=function(e){return arguments.length?(y=+e,u=xn(l,i,90),f=bn(r,n,y),d=xn(c,s,90),h=bn(o,a,y),t):y},t.majorExtent([[-180,-90+qi],[180,90-qi]]).minorExtent([[-180,-80-qi],[180,80+qi]])},ui.geo.greatArc=function(){function t(){return{type:\"LineString\",coordinates:[e||r.apply(this,arguments),n||a.apply(this,arguments)]}}var e,n,r=_n,a=wn;return t.distance=function(){return ui.geo.distance(e||r.apply(this,arguments),n||a.apply(this,arguments))},t.source=function(n){return arguments.length?(r=n,e=\"function\"==typeof n?null:n,t):r},t.target=function(e){return arguments.length?(a=e,n=\"function\"==typeof e?null:e,t):a},t.precision=function(){return arguments.length?t:0},t},ui.geo.interpolate=function(t,e){return kn(t[0]*Yi,t[1]*Yi,e[0]*Yi,e[1]*Yi)},ui.geo.length=function(t){return Jl=0,ui.geo.stream(t,Kl),Jl};var Jl,Kl={sphere:k,point:k,lineStart:Mn,lineEnd:k,polygonStart:k,polygonEnd:k},ts=An(function(t){return Math.sqrt(2/(1+t))},function(t){return 2*Math.asin(t/2)});(ui.geo.azimuthalEqualArea=function(){return sn(ts)}).raw=ts;var es=An(function(t){var e=Math.acos(t);return e&&e/Math.sin(e)},b);(ui.geo.azimuthalEquidistant=function(){return sn(es)}).raw=es,(ui.geo.conicConformal=function(){return Ue(Ln)}).raw=Ln,(ui.geo.conicEquidistant=function(){return Ue(Tn)}).raw=Tn;var ns=An(function(t){return 1/t},Math.atan);(ui.geo.gnomonic=function(){return sn(ns)}).raw=ns,zn.invert=function(t,e){return[t,2*Math.atan(Math.exp(e))-Zi]},(ui.geo.mercator=function(){return Sn(zn)}).raw=zn;var rs=An(function(){return 1},Math.asin);(ui.geo.orthographic=function(){return sn(rs)}).raw=rs;var as=An(function(t){return 1/(1+t)},function(t){return 2*Math.atan(t)});(ui.geo.stereographic=function(){return sn(as)}).raw=as,En.invert=function(t,e){return[-e,2*Math.atan(Math.exp(t))-Zi]},(ui.geo.transverseMercator=function(){var t=Sn(En),e=t.center,n=t.rotate;return t.center=function(t){return t?e([-t[1],t[0]]):(t=e(),[t[1],-t[0]])},t.rotate=function(t){return t?n([t[0],t[1],t.length>2?t[2]+90:90]):(t=n(),[t[0],t[1],t[2]-90])},n([0,0,90])}).raw=En,ui.geom={},ui.geom.hull=function(t){function e(t){if(t.length<3)return[];var e,a=zt(n),o=zt(r),i=t.length,l=[],s=[];for(e=0;i>e;e++)l.push([+a.call(this,t[e],e),+o.call(this,t[e],e),e]);for(l.sort(Nn),e=0;i>e;e++)s.push([l[e][0],-l[e][1]]);var c=Pn(l),u=Pn(s),f=u[0]===c[0],d=u[u.length-1]===c[c.length-1],h=[];for(e=c.length-1;e>=0;--e)h.push(t[l[c[e]][2]]);for(e=+f;e<u.length-d;++e)h.push(t[l[u[e]][2]]);return h}var n=Cn,r=On;return arguments.length?e(t):(e.x=function(t){return arguments.length?(n=t,e):n},e.y=function(t){return arguments.length?(r=t,e):r},e)},ui.geom.polygon=function(t){return Ti(t,os),t};var os=ui.geom.polygon.prototype=[];os.area=function(){for(var t,e=-1,n=this.length,r=this[n-1],a=0;++e<n;)t=r,r=this[e],a+=t[1]*r[0]-t[0]*r[1];return.5*a},os.centroid=function(t){var e,n,r=-1,a=this.length,o=0,i=0,l=this[a-1];for(arguments.length||(t=-1/(6*this.area()));++r<a;)e=l,l=this[r],n=e[0]*l[1]-l[0]*e[1],o+=(e[0]+l[0])*n,i+=(e[1]+l[1])*n;return[o*t,i*t]},os.clip=function(t){for(var e,n,r,a,o,i,l=Rn(t),s=-1,c=this.length-Rn(this),u=this[c-1];++s<c;){for(e=t.slice(),t.length=0,a=this[s],o=e[(r=e.length-l)-1],n=-1;++n<r;)i=e[n],Dn(i,u,a)?(Dn(o,u,a)||t.push(In(o,i,u,a)),t.push(i)):Dn(o,u,a)&&t.push(In(o,i,u,a)),o=i;l&&t.push(t[0]),u=a}return t};var is,ls,ss,cs,us,fs=[],ds=[];Yn.prototype.prepare=function(){for(var t,e=this.edges,n=e"
+,
+".length;n--;)t=e[n].edge,t.b&&t.a||e.splice(n,1);return e.sort(Xn),e.length},rr.prototype={start:function(){return this.edge.l===this.site?this.edge.a:this.edge.b},end:function(){return this.edge.l===this.site?this.edge.b:this.edge.a}},ar.prototype={insert:function(t,e){var n,r,a;if(t){if(e.P=t,e.N=t.N,t.N&&(t.N.P=e),t.N=e,t.R){for(t=t.R;t.L;)t=t.L;t.L=e}else t.R=e;n=t}else this._?(t=sr(this._),e.P=null,e.N=t,t.P=t.L=e,n=t):(e.P=e.N=null,this._=e,n=null);for(e.L=e.R=null,e.U=n,e.C=!0,t=e;n&&n.C;)r=n.U,n===r.L?(a=r.R,a&&a.C?(n.C=a.C=!1,r.C=!0,t=r):(t===n.R&&(ir(this,n),t=n,n=t.U),n.C=!1,r.C=!0,lr(this,r))):(a=r.L,a&&a.C?(n.C=a.C=!1,r.C=!0,t=r):(t===n.L&&(lr(this,n),t=n,n=t.U),n.C=!1,r.C=!0,ir(this,r))),n=t.U;this._.C=!1},remove:function(t){t.N&&(t.N.P=t.P),t.P&&(t.P.N=t.N),t.N=t.P=null;var e,n,r,a=t.U,o=t.L,i=t.R;if(n=o?i?sr(i):o:i,a?a.L===t?a.L=n:a.R=n:this._=n,o&&i?(r=n.C,n.C=t.C,n.L=o,o.U=n,n!==i?(a=n.U,n.U=t.U,t=n.R,a.L=t,n.R=i,i.U=n):(n.U=a,a=n,t=n.R)):(r=t.C,t=n),t&&(t.U=a),!r){if(t&&t.C)return void(t.C=!1);do{if(t===this._)break;if(t===a.L){if(e=a.R,e.C&&(e.C=!1,a.C=!0,ir(this,a),e=a.R),e.L&&e.L.C||e.R&&e.R.C){e.R&&e.R.C||(e.L.C=!1,e.C=!0,lr(this,e),e=a.R),e.C=a.C,a.C=e.R.C=!1,ir(this,a),t=this._;break}}else if(e=a.L,e.C&&(e.C=!1,a.C=!0,lr(this,a),e=a.L),e.L&&e.L.C||e.R&&e.R.C){e.L&&e.L.C||(e.R.C=!1,e.C=!0,ir(this,e),e=a.L),e.C=a.C,a.C=e.L.C=!1,lr(this,a),t=this._;break}e.C=!0,t=a,a=a.U}while(!t.C);t&&(t.C=!1)}}},ui.geom.voronoi=function(t){function e(t){var e=new Array(t.length),r=l[0][0],a=l[0][1],o=l[1][0],i=l[1][1];return cr(n(t),l).cells.forEach(function(n,l){var s=n.edges,c=n.site,u=e[l]=s.length?s.map(function(t){var e=t.start();return[e.x,e.y]}):c.x>=r&&c.x<=o&&c.y>=a&&c.y<=i?[[r,i],[o,i],[o,a],[r,a]]:[];u.point=t[l]}),e}function n(t){return t.map(function(t,e){return{x:Math.round(o(t,e)/qi)*qi,y:Math.round(i(t,e)/qi)*qi,i:e}})}var r=Cn,a=On,o=r,i=a,l=hs;return t?e(t):(e.links=function(t){return cr(n(t)).edges.filter(function(t){return t.l&&t.r}).map(function(e){\n"
+,
+"return{source:t[e.l.i],target:t[e.r.i]}})},e.triangles=function(t){var e=[];return cr(n(t)).cells.forEach(function(n,r){for(var a,o,i=n.site,l=n.edges.sort(Xn),s=-1,c=l.length,u=l[c-1].edge,f=u.l===i?u.r:u.l;++s<c;)a=u,o=f,u=l[s].edge,f=u.l===i?u.r:u.l,r<o.i&&r<f.i&&fr(i,o,f)<0&&e.push([t[r],t[o.i],t[f.i]])}),e},e.x=function(t){return arguments.length?(o=zt(r=t),e):r},e.y=function(t){return arguments.length?(i=zt(a=t),e):a},e.clipExtent=function(t){return arguments.length?(l=null==t?hs:t,e):l===hs?null:l},e.size=function(t){return arguments.length?e.clipExtent(t&&[[0,0],t]):l===hs?null:l&&l[1]},e)};var hs=[[-1e6,-1e6],[1e6,1e6]];ui.geom.delaunay=function(t){return ui.geom.voronoi().triangles(t)},ui.geom.quadtree=function(t,e,n,r,a){function o(t){function o(t,e,n,r,a,o,i,l){if(!isNaN(n)&&!isNaN(r))if(t.leaf){var s=t.x,u=t.y;if(null!=s)if(wi(s-n)+wi(u-r)<.01)c(t,e,n,r,a,o,i,l);else{var f=t.point;t.x=t.y=t.point=null,c(t,f,s,u,a,o,i,l),c(t,e,n,r,a,o,i,l)}else t.x=n,t.y=r,t.point=e}else c(t,e,n,r,a,o,i,l)}function c(t,e,n,r,a,i,l,s){var c=.5*(a+l),u=.5*(i+s),f=n>=c,d=r>=u,h=d<<1|f;t.leaf=!1,t=t.nodes[h]||(t.nodes[h]=pr()),f?a=c:l=c,d?i=u:s=u,o(t,e,n,r,a,i,l,s)}var u,f,d,h,p,g,v,m,y,x=zt(l),b=zt(s);if(null!=e)g=e,v=n,m=r,y=a;else if(m=y=-(g=v=1/0),f=[],d=[],p=t.length,i)for(h=0;p>h;++h)u=t[h],u.x<g&&(g=u.x),u.y<v&&(v=u.y),u.x>m&&(m=u.x),u.y>y&&(y=u.y),f.push(u.x),d.push(u.y);else for(h=0;p>h;++h){var _=+x(u=t[h],h),w=+b(u,h);g>_&&(g=_),v>w&&(v=w),_>m&&(m=_),w>y&&(y=w),f.push(_),d.push(w)}var k=m-g,M=y-v;k>M?y=v+k:m=g+M;var A=pr();if(A.add=function(t){o(A,t,+x(t,++h),+b(t,h),g,v,m,y)},A.visit=function(t){gr(t,A,g,v,m,y)},A.find=function(t){return vr(A,t[0],t[1],g,v,m,y)},h=-1,null==e){for(;++h<p;)o(A,t[h],f[h],d[h],g,v,m,y);--h}else t.forEach(A.add);return f=d=t=u=null,A}var i,l=Cn,s=On;return(i=arguments.length)?(l=dr,s=hr,3===i&&(a=n,r=e,n=e=0),o(t)):(o.x=function(t){return arguments.length?(l=t,o):l},o.y=function(t){return arguments.length?(s=t,o):s},o.extent=function(t){return arguments.length?(null==t?e=n=r=a=null:(e=+t[0][0],n=+t[0][1],r=+t[1][0],a=+t[1][1]),o):null==e?null:[[e,n],[r,a]]},o.size=function(t){return arguments.length?(null==t?e=n=r=a=null:(e=n=0,r=+t[0],a=+t[1]),o):null==e?null:[r-e,a-n]},o)},ui.interpolateRgb=mr,ui.interpolateObject=yr,ui.interpolateNumber=xr,ui.interpolateString=br;var ps=/[-+]?(?:\\d+\\.?\\d*|\\.?\\d+)(?:[eE][-+]?\\d+)?/g,gs=new RegExp(ps.source,\"g\");ui.interpolate=_r,ui.interpolators=[function(t,e){var n=typeof e;return(\"string\"===n?ll.has(e.toLowerCase())||/^(#|rgb\\(|hsl\\()/i.test(e)?mr:br:e instanceof st?mr:Array.isArray(e)?wr:\"object\"===n&&isNaN(e)?yr:xr)(t,e)}],ui.interpolateArray=wr;var vs=function(){return b},ms=ui.map({linear:vs,poly:Sr,quad:function(){return Lr},cubic:function(){return Tr},sin:function(){return Er},exp:function(){return Cr},circle:function(){return Or},elastic:Pr,back:Nr,bounce:function(){return Dr}}),ys=ui.map({\"in\":b,out:Mr,\"in-out\":Ar,\"out-in\":function(t){return Ar(Mr(t))}});ui.ease=function(t){var e=t.indexOf(\"-\"),n=e>=0?t.slice(0,e):t,r=e>=0?t.slice(e+1):\"in\";return n=ms.get(n)||vs,r=ys.get(r)||b,kr(r(n.apply(null,fi.call(arguments,1))))},ui.interpolateHcl=Ir,ui.interpolateHsl=Rr,ui.interpolateLab=jr,ui.interpolateRound=qr,ui.transform=function(t){var e=hi.createElementNS(ui.ns.prefix.svg,\"g\");return(ui.transform=function(t){if(null!=t){e.setAttribute(\"transform\",t);var n=e.transform.baseVal.consolidate()}return new Fr(n?n.matrix:xs)})(t)},Fr.prototype.toString=function(){return\"translate(\"+this.translate+\")rotate(\"+this.rotate+\")skewX(\"+this.skew+\")scale(\"+this.scale+\")\"};var xs={a:1,b:0,c:0,d:1,e:0,f:0};ui.interpolateTransform=$r,ui.layout={},ui.layout.bundle=function(){return function(t){for(var e=[],n=-1,r=t.length;++n<r;)e.push(Jr(t[n]));return e}},ui.layout.chord=function(){function t(){var t,c,f,d,h,p={},g=[],v=ui.range(o),m=[];for(n=[],r=[],t=0,d=-1;++d<o;){for(c=0,h=-1;++h<o;)c+=a[d][h];g.push(c),m.push(ui.range(o)),t+=c}for(i&&v.sort(function(t,e){return i(g[t],g[e])}),l&&m.forEach(function(t,e){t.sort(function(t,n){return l(a[e][t],a[e][n])})}),t=(Hi-u*o)/t,c=0,d=-1;++d<o;){for(f=c,h=-1;++h<o;){var y=v[d],x=m[y][h],b=a[y][x],_=c,w=c+=b*t;p[y+\"-\"+x]={index:y,subindex:x,startAngle:_,endAngle:w,value:b}}r[y]={index:y,startAngle:f,endAngle:c,value:g[y]},c+=u}for(d=-1;++d<o;)for(h=d-1;++h<o;){var k=p[d+\"-\"+h],M=p[h+\"-\"+d];(k.value||M.value)&&n.push(k.value<M.value?{source:M,target:k}:{source:k,target:M})}s&&e()}function e(){n.sort(function(t,e){return s((t.source.value+t.target.value)/2,(e.source.value+e.target.value)/2)})}var n,r,a,o,i,l,s,c={},u=0;return c.matrix=function(t){return arguments.length?(o=(a=t)&&a.length,n=r=null,c):a},c.padding=function(t){return arguments.length?(u=t,n=r=null,c):u},c.sortGroups=function(t){return arguments.length?(i=t,n=r=null,c):i},c.sortSubgroups=function(t){return arguments.length?(l=t,n=null,c):l},c.sortChords=function(t){return arguments.length?(s=t,n&&e(),c):s},c.chords=function(){return n||t(),n},c.groups=function(){return r||t(),r},c},ui.layout.force=function(){function t(t){return function(e,n,r,a){if(e.point!==t){var o=e.cx-t.x,i=e.cy-t.y,l=a-n,s=o*o+i*i;if(s>l*l/m){if(g>s){var c=e.charge/s;t.px-=o*c,t.py-=i*c}return!0}if(e.point&&s&&g>s){var c=e.pointCharge/s;t.px-=o*c,t.py-=i*c}}return!e.charge}}function e(t){t.px=ui.event.x,t.py=ui.event.y,s.resume()}var n,r,a,o,i,l,s={},c=ui.dispatch(\"start\",\"tick\",\"end\"),u=[1,1],f=.9,d=bs,h=_s,p=-30,g=ws,v=.1,m=.64,y=[],x=[];return s.tick=function(){if((a*=.99)<.005)return n=null,c.end({type:\"end\",alpha:a=0}),!0;var e,r,s,d,h,g,m,b,_,w=y.length,k=x.length;for(r=0;k>r;++r)s=x[r],d=s.source,h=s.target,b=h.x-d.x,_=h.y-d.y,(g=b*b+_*_)&&(g=a*i[r]*((g=Math.sqrt(g))-o[r])/g,b*=g,_*=g,h.x-=b*(m=d.weight+h.weight?d.weight/(d.weight+h.weight):.5),h.y-=_*m,d.x+=b*(m=1-m),d.y+=_*m);if((m=a*v)&&(b=u[0]/2,_=u[1]/2,r=-1,m))for(;++r<w;)s=y[r],s.x+=(b-s.x)*m,s.y+=(_-s.y)*m;if(p)for(oa(e=ui.geom.quadtree(y),a,l),r=-1;++r<w;)(s=y[r]).fixed||e.visit(t(s));for(r=-1;++r<w;)s=y[r],s.fixed?(s.x=s.px,s.y=s.py):(s.x-=(s.px-(s.px=s.x))*f,s.y-=(s.py-(s.py=s.y))*f);c.tick({type:\"tick\",alpha:a})},s.nodes=function(t){return arguments.length?(y=t,s):y},s.links=function(t){return arguments.length?(x=t,s):x},s.size=function(t){return arguments.length?(u=t,s):u},s.linkDistance=function(t){return arguments.length?(d=\"function\"==typeof t?t:+t,s):d},s.distance=s.linkDistance,s.linkStrength=function(t){return arguments.length?(h=\"function\"==typeof t?t:+t,s):h},s.friction=function(t){return arguments.length?(f=+t,s):f},s.charge=function(t){return arguments.length?(p=\"function\"==typeof t?t:+t,s):p},s.chargeDistance=function(t){return arguments.length?(g=t*t,s):Math.sqrt(g)},s.gravity=function(t){return arguments.length?(v=+t,s):v},s.theta=function(t){return arguments.length?(m=t*t,s):Math.sqrt(m)},s.alpha=function(t){return arguments.length?(t=+t,a?t>0?a=t:(n.c=null,n.t=NaN,n=null,c.end({type:\"end\",alpha:a=0})):t>0&&(c.start({type:\"start\",alpha:a=t}),n=Pt(s.tick)),s):a},s.start=function(){function t(t,r){if(!n){for(n=new Array(a),s=0;a>s;++s)n[s]=[];for(s=0;c>s;++s){var o=x[s];n[o.source.index].push(o.target),n[o.target.index].push(o.source)}}for(var i,l=n[e],s=-1,u=l.length;++s<u;)if(!isNaN(i=l[s][t]))return i;return Math.random()*r}var e,n,r,a=y.length,c=x.length,f=u[0],g=u[1];for(e=0;a>e;++e)(r=y[e]).index=e,r.weight=0;for(e=0;c>e;++e)r=x[e],\"number\"==typeof r.source&&(r.source=y[r.source]),\"number\"==typeof r.target&&(r.target=y[r.target]),++r.source.weight,++r.target.weight;for(e=0;a>e;++e)r=y[e],isNaN(r.x)&&(r.x=t(\"x\",f)),isNaN(r.y)&&(r.y=t(\"y\",g)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(o=[],\"function\"==typeof d)for(e=0;c>e;++e)o[e]=+d.call(this,x[e],e);else for(e=0;c>e;++e)o[e]=d;if(i=[],\"function\"==typeof h)for(e=0;c>e;++e)i[e]=+h.call(this,x[e],e);else for(e=0;c>e;++e)i[e]=h;if(l=[],\"function\"==typeof p)for(e=0;a>e;++e)l[e]=+p.call(this,y[e],e);else for(e=0;a>e;++e)l[e]=p;return s.resume()},s.resume=function(){return s.alpha(.1)},s.stop=function(){return s.alpha(0)},s.drag=function(){return r||(r=ui.behavior.drag().origin(b).on(\"dragstart.force\",ea).on(\"drag.force\",e).on(\"dragend.force\",na)),arguments.length?void this.on(\"mouseover.force\",ra).on(\"mouseout.force\",aa).call(r):r},ui.rebind(s,c,\"on\")};var bs=20,_s=1,ws=1/0;ui.layout.hierarchy=function(){function t(a){var o,i=[a],l=[];for(a.depth=0;null!=(o=i.pop());)if(l.push(o),(c=n.call(t,o,o.depth))&&(s=c.length)){for(var s,c,u;--s>=0;)i.push(u=c[s]),u.parent=o,u.depth=o.depth+1;r&&(o.value=0),o.children=c}else r&&(o.value=+r.call(t,o,o.depth)||0),delete o.children;return sa(a,function(t){var n,a;e&&(n=t.children)&&n.sort(e),r&&(a=t.parent)&&(a.value+=t.value)}),l}var e=fa,n=ca,r=ua;return t.sort=function(n){return arguments.length?(e=n,t):e},t.children=function(e){return arguments.length?(n=e,t):n},t.value=function(e){return arguments.length?(r=e,t):r},t.revalue=function(e){return r&&(la(e,function(t){t.children&&(t.value=0)}),sa(e,function(e){var n;e.children||(e.value=+r.call(t,e,e.depth)||0),(n=e.parent)&&(n.value+=e.value)})),e},t},ui.layout.partition=function(){function t(e,n,r,a){var o=e.children;if(e.x=n,e.y=e.depth*a,e.dx=r,e.dy=a,o&&(i=o.length)){var i,l,s,c=-1;for(r=e.value?r/e.value:0;++c<i;)t(l=o[c],n,s=l.value*r,a),n+=s}}function e(t){var n=t.children,r=0;if(n&&(a=n.length))for(var a,o=-1;++o<a;)r=Math.max(r,e(n[o]));return 1+r}function n(n,o){var i=r.call(this,n,o);return t(i[0],0,a[0],a[1]/e(i[0])),i}var r=ui.layout.hierarchy(),a=[1,1];return n.size=function(t){return arguments.length?(a=t,n):a},ia(n,r)},ui.layout.pie=function(){function t(i){var l,s=i.length,c=i.map(function(n,r){return+e.call(t,n,r)}),u=+(\"function\"==typeof r?r.apply(this,arguments):r),f=(\"function\"==typeof a?a.apply(this,arguments):a)-u,d=Math.min(Math.abs(f)/s,+(\"function\"==typeof o?o.apply(this,arguments):o)),h=d*(0>f?-1:1),p=ui.sum(c),g=p?(f-s*h)/p:0,v=ui.range(s),m=[];return null!=n&&v.sort(n===ks?function(t,e){return c[e]-c[t]}:function(t,e){ret"
+,
+"urn n(i[t],i[e])}),v.forEach(function(t){m[t]={data:i[t],value:l=c[t],startAngle:u,endAngle:u+=l*g+h,padAngle:d}}),m}var e=Number,n=ks,r=0,a=Hi,o=0;return t.value=function(n){return arguments.length?(e=n,t):e},t.sort=function(e){return arguments.length?(n=e,t):n},t.startAngle=function(e){return arguments.length?(r=e,t):r},t.endAngle=function(e){return arguments.length?(a=e,t):a},t.padAngle=function(e){return arguments.length?(o=e,t):o},t};var ks={};ui.layout.stack=function(){function t(l,s){if(!(d=l.length))return l;var c=l.map(function(n,r){return e.call(t,n,r)}),u=c.map(function(e){return e.map(function(e,n){return[o.call(t,e,n),i.call(t,e,n)]})}),f=n.call(t,u,s);c=ui.permute(c,f),u=ui.permute(u,f);var d,h,p,g,v=r.call(t,u,s),m=c[0].length;for(p=0;m>p;++p)for(a.call(t,c[0][p],g=v[p],u[0][p][1]),h=1;d>h;++h)a.call(t,c[h][p],g+=u[h-1][p][1],u[h][p][1]);return l}var e=b,n=va,r=ma,a=ga,o=ha,i=pa;return t.values=function(n){return arguments.length?(e=n,t):e},t.order=function(e){return arguments.length?(n=\"function\"==typeof e?e:Ms.get(e)||va,t):n},t.offset=function(e){return arguments.length?(r=\"function\"==typeof e?e:As.get(e)||ma,t):r},t.x=function(e){return arguments.length?(o=e,t):o},t.y=function(e){return arguments.length?(i=e,t):i},t.out=function(e){return arguments.length?(a=e,t):a},t};var Ms=ui.map({\"inside-out\":function(t){var e,n,r=t.length,a=t.map(ya),o=t.map(xa),i=ui.range(r).sort(function(t,e){return a[t]-a[e]}),l=0,s=0,c=[],u=[];for(e=0;r>e;++e)n=i[e],s>l?(l+=o[n],c.push(n)):(s+=o[n],u.push(n));return u.reverse().concat(c)},reverse:function(t){return ui.range(t.length).reverse()},\"default\":va}),As=ui.map({silhouette:function(t){var e,n,r,a=t.length,o=t[0].length,i=[],l=0,s=[];for(n=0;o>n;++n){for(e=0,r=0;a>e;e++)r+=t[e][n][1];r>l&&(l=r),i.push(r)}for(n=0;o>n;++n)s[n]=(l-i[n])/2;return s},wiggle:function(t){var e,n,r,a,o,i,l,s,c,u=t.length,f=t[0],d=f.length,h=[];for(h[0]=s=c=0,n=1;d>n;++n){for(e=0,a=0;u>e;++e)a+=t[e][n][1];for(e=0,o=0,l=f[n][0]-f[n-1][0];u>e;++e){for(r=0,i=(t[e][n][1]-t[e][n-1][1])/(2*l);e>r;++r)i+=(t[r][n][1]-t[r][n-1][1])/l;o+=i*t[e][n][1]}h[n]=s-=a?o/a*l:0,c>s&&(c=s)}for(n=0;d>n;++n)h[n]-=c;return h},expand:function(t){var e,n,r,a=t.length,o=t[0].length,i=1/a,l=[];for(n=0;o>n;++n){for(e=0,r=0;a>e;e++)r+=t[e][n][1];if(r)for(e=0;a>e;e++)t[e][n][1]/=r;else for(e=0;a>e;e++)t[e][n][1]=i}for(n=0;o>n;++n)l[n]=0;return l},zero:ma});ui.layout.histogram=function(){function t(t,o){for(var i,l,s=[],c=t.map(n,this),u=r.call(this,c,o),f=a.call(this,u,c,o),o=-1,d=c.length,h=f.length-1,p=e?1:1/d;++o<h;)i=s[o]=[],i.dx=f[o+1]-(i.x=f[o]),i.y=0;if(h>0)for(o=-1;++o<d;)l=c[o],l>=u[0]&&l<=u[1]&&(i=s[ui.bisect(f,l,1,h)-1],i.y+=p,i.push(t[o]));return s}var e=!0,n=Number,r=ka,a=_a;return t.value=function(e){return arguments.length?(n=e,t):n},t.range=function(e){return arguments.length?(r=zt(e),t):r},t.bins=function(e){return arguments.length?(a=\"number\"==typeof e?function(t){return wa(t,e)}:zt(e),t):a},t.frequency=function(n){return arguments.length?(e=!!n,t):e},t},ui.layout.pack=function(){function t(t,o){var i=n.call(this,t,o),l=i[0],s=a[0],c=a[1],u=null==e?Math.sqrt:\"function\"==typeof e?e:function(){return e};if(l.x=l.y=0,sa(l,function(t){t.r=+u(t.value)}),sa(l,za),r){var f=r*(e?1:Math.max(2*l.r/s,2*l.r/c))/2;sa(l,function(t){t.r+=f}),sa(l,za),sa(l,function(t){t.r-=f})}return Ca(l,s/2,c/2,e?1:1/Math.max(2*l.r/s,2*l.r/c)),i}var e,n=ui.layout.hierarchy().sort(Ma),r=0,a=[1,1];return t.size=function(e){return arguments.length?(a=e,t):a},t.radius=function(n){return arguments.length?(e=null==n||\"function\"==typeof n?n:+n,t):e},t.padding=function(e){return arguments.length?(r=+e,t):r},ia(t,n)},ui.layout.tree=function(){function t(t,a){var u=i.call(this,t,a),f=u[0],d=e(f);if(sa(d,n),d.parent.m=-d.z,la(d,r),c)la(f,o);else{var h=f,p=f,g=f;la(f,function(t){t.x<h.x&&(h=t),t.x>p.x&&(p=t),t.depth>g.depth&&(g=t)});var v=l(h,p)/2-h.x,m=s[0]/(p.x+l(p,h)/2+v),y=s[1]/(g.depth||1);la(f,function(t){t.x=(t.x+v)*m,t.y=t.depth*y})}return u}function e(t){for(var e,n={A:null,children:[t]},r=[n];null!=(e=r.pop());)for(var a,o=e.children,i=0,l=o.length;l>i;++i)r.push((o[i]=a={_:o[i],parent:e,children:(a=o[i].children)&&a.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:i}).a=a);return n.children[0]}function n(t){var e=t.children,n=t.parent.children,r=t.i?n[t.i-1]:null;if(e.length){Ra(t);var o=(e[0].z+e[e.length-1].z)/2;r?(t.z=r.z+l(t._,r._),t.m=t.z-o):t.z=o}else r&&(t.z=r.z+l(t._,r._));t.parent.A=a(t,r,t.parent.A||n[0])}function r(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function a(t,e,n){if(e){for(var r,a=t,o=t,i=e,s=a.parent.children[0],c=a.m,u=o.m,f=i.m,d=s.m;i=Da(i),a=Na(a),i&&a;)s=Na(s),o=Da(o),o.a=t,r=i.z+f-a.z-c+l(i._,a._),r>0&&(Ia(ja(i,t,n),t,r),c+=r,u+=r),f+=i.m,c+=a.m,d+=s.m,u+=o.m;i&&!Da(o)&&(o.t=i,o.m+=f-u),a&&!Na(s)&&(s.t=a,s.m+=c-d,n=t)}return n}function o(t){t.x*=s[0],t.y=t.depth*s[1]}var i=ui.layout.hierarchy().sort(null).value(null),l=Pa,s=[1,1],c=null;return t.separation=function(e){return arguments.length?(l=e,t):l},t.size=function(e){return arguments.length?(c=null==(s=e)?o:null,t):c?null:s},t.nodeSize=function(e){return arguments.length?(c=null==(s=e)?null:o,t):c?s:null},ia(t,i)},ui.layout.cluster=function(){function t(t,o){var i,l=e.call(this,t,o),s=l[0],c=0;sa(s,function(t){var e=t.children;e&&e.length?(t.x=Fa(e),t.y=qa(e)):(t.x=i?c+=n(t,i):0,t.y=0,i=t)});var u=Ba(s),f=Ha(s),d=u.x-n(u,f)/2,h=f.x+n(f,u)/2;return sa(s,a?function(t){t.x=(t.x-s.x)*r[0],t.y=(s.y-t.y)*r[1]}:function(t){t.x=(t.x-d)/(h-d)*r[0],t.y=(1-(s.y?t.y/s.y:1))*r[1]}),l}var e=ui.layout.hierarchy().sort(null).value(null),n=Pa,r=[1,1],a=!1;return t.separation=function(e){return arguments.length?(n=e,t):n},t.size=function(e){return arguments.length?(a=null==(r=e),t):a?null:r},t.nodeSize=function(e){return arguments.length?(a=null!=(r=e),t):a?r:null},ia(t,e)},ui.layout.treemap=function(){function t(t,e){for(var n,r,a=-1,o=t.length;++a<o;)r=(n=t[a]).value*(0>e?0:e),n.area=isNaN(r)||0>=r?0:r}function e(n){var o=n.children;if(o&&o.length){var i,l,s,c=f(n),u=[],d=o.slice(),p=1/0,g=\"slice\"===h?c.dx:\"dice\"===h?c.dy:\"slice-dice\"===h?1&n.depth?c.dy:c.dx:Math.min(c.dx,c.dy);for(t(d,c.dx*c.dy/n.value),u.area=0;(s=d.length)>0;)u.push(i=d[s-1]),u.area+=i.area,\"squarify\"!==h||(l=r(u,g))<=p?(d.pop(),p=l):(u.area-=u.pop().area,a(u,g,c,!1),g=Math.min(c.dx,c.dy),u.length=u.area=0,p=1/0);u.length&&(a(u,g,c,!0),u.length=u.area=0),o.forEach(e)}}function n(e){var r=e.children;if(r&&r.length){var o,i=f(e),l=r.slice(),s=[];for(t(l,i.dx*i.dy/e.value),s.area=0;o=l.pop();)s.push(o),s.area+=o.area,null!=o.z&&(a(s,o.z?i.dx:i.dy,i,!l.length),s.length=s.area=0);r.forEach(n)}}function r(t,e){for(var n,r=t.area,a=0,o=1/0,i=-1,l=t.length;++i<l;)(n=t[i].area)&&(o>n&&(o=n),n>a&&(a=n));return r*=r,e*=e,r?Math.max(e*a*p/r,r/(e*o*p)):1/0}function a(t,e,n,r){var a,o=-1,i=t.length,l=n.x,c=n.y,u=e?s(t.area/e):0;if(e==n.dx){for((r||u>n.dy)&&(u=n.dy);++o<i;)a=t[o],a.x=l,a.y=c,a.dy=u,l+=a.dx=Math.min(n.x+n.dx-l,u?s(a.area/u):0);a.z=!0,a.dx+=n.x+n.dx-l,n.y+=u,n.dy-=u}else{for((r||u>n.dx)&&(u=n.dx);++o<i;)a=t[o],a.x=l,a.y=c,a.dx=u,c+=a.dy=Math.min(n.y+n.dy-c,u?s(a.area/u):0);a.z=!1,a.dy+=n.y+n.dy-c,n.x+=u,n.dx-=u}}function o(r){var a=i||l(r),o=a[0];return o.x=o.y=0,o.value?(o.dx=c[0],o.dy=c[1]):o.dx=o.dy=0,i&&l.revalue(o),t([o],o.dx*o.dy/o.value),(i?n:e)(o),d&&(i=a),a}var i,l=ui.layout.hierarchy(),s=Math.round,c=[1,1],u=null,f=Va,d=!1,h=\"squarify\",p=.5*(1+Math.sqrt(5));return o.size=function(t){return arguments.length?(c=t,o):c},o.padding=function(t){function e(e){var n=t.call(o,e,e.depth);return null==n?Va(e):Za(e,\"number\"==typeof n?[n,n,n,n]:n)}function n(e){return Za(e,t)}if(!arguments.length)return u;var r;return f=null==(u=t)?Va:\"function\"==(r=typeof t)?e:\"number\"===r?(t=[t,t,t,t],n):n,o},o.round=function(t){return arguments.length?(s=t?Math.round:Number,o):s!=Number},o.sticky=function(t){return arguments.length?(d=t,i=null,o):d},o.ratio=function(t){return arguments.length?(p=t,o):p},o.mode=function(t){return arguments.length?(h=t+\"\",o):h},ia(o,l)},ui.random={normal:function(t,e){var n=arguments.length;return 2>n&&(e=1),1>n&&(t=0),function(){var n,r,a;do n=2*Math.random()-1,r=2*Math.random()-1,a=n*n+r*r;while(!a||a>1);return t+e*n*Math.sqrt(-2*Math.log(a)/a)}},logNormal:function(){var t=ui.random.normal.apply(ui,arguments);return function(){return Math.exp(t())}},bates:function(t){var e=ui.random.irwinHall(t);return function(){return e()/t}},irwinHall:function(t){return function(){for(var e=0,n=0;t>n;n++)e+=Math.random();return e}}},ui.scale={};var Ls={floor:b,ceil:b};ui.scale.linear=function(){return Wa([0,1],[0,1],_r,!1)};var Ts={s:1,g:1,p:1,r:1,e:1};ui.scale.log=function(){return oo(ui.scale.linear().domain([0,1]),10,!0,[1,10])};var zs=ui.format(\".0e\"),Ss={floor:function(t){return-Math.ceil(-t)},ceil:function(t){return-Math.floor(-t)}};ui.scale.pow=function(){return io(ui.scale.linear(),1,[0,1])},ui.scale.sqrt=function(){return ui.scale.pow().exponent(.5)},ui.scale.ordinal=function(){return so([],{t:\"range\",a:[[]]})},ui.scale.category10=function(){return ui.scale.ordinal().range(Es)},ui.scale.category20=function(){return ui.scale.ordinal().range(Cs)},ui.scale.category20b=function(){return ui.scale.ordinal().range(Os)},ui.scale.category20c=function(){return ui.scale.ordinal().range(Ps)};var Es=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(_t),Cs=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(_t),Os=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(_t),Ps=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(_t);ui.scale.quantile=function(){return co([],[])},ui.scal"
+,
+"e.quantize=function(){return uo(0,1,[0,1])},ui.scale.threshold=function(){return fo([.5],[0,1])},ui.scale.identity=function(){return ho([0,1])},ui.svg={},ui.svg.arc=function(){function t(){var t=Math.max(0,+n.apply(this,arguments)),c=Math.max(0,+r.apply(this,arguments)),u=i.apply(this,arguments)-Zi,f=l.apply(this,arguments)-Zi,d=Math.abs(f-u),h=u>f?0:1;if(t>c&&(p=c,c=t,t=p),d>=Vi)return e(c,h)+(t?e(t,1-h):\"\")+\"Z\";var p,g,v,m,y,x,b,_,w,k,M,A,L=0,T=0,z=[];if((m=(+s.apply(this,arguments)||0)/2)&&(v=o===Ns?Math.sqrt(t*t+c*c):+o.apply(this,arguments),h||(T*=-1),c&&(T=rt(v/c*Math.sin(m))),t&&(L=rt(v/t*Math.sin(m)))),c){y=c*Math.cos(u+T),x=c*Math.sin(u+T),b=c*Math.cos(f-T),_=c*Math.sin(f-T);var S=Math.abs(f-u-2*T)<=Bi?0:1;if(T&&bo(y,x,b,_)===h^S){var E=(u+f)/2;y=c*Math.cos(E),x=c*Math.sin(E),b=_=null}}else y=x=0;if(t){w=t*Math.cos(f-L),k=t*Math.sin(f-L),M=t*Math.cos(u+L),A=t*Math.sin(u+L);var C=Math.abs(u-f+2*L)<=Bi?0:1;if(L&&bo(w,k,M,A)===1-h^C){var O=(u+f)/2;w=t*Math.cos(O),k=t*Math.sin(O),M=A=null}}else w=k=0;if(d>qi&&(p=Math.min(Math.abs(c-t)/2,+a.apply(this,arguments)))>.001){g=c>t^h?0:1;var P=p,N=p;if(Bi>d){var D=null==M?[w,k]:null==b?[y,x]:In([y,x],[M,A],[b,_],[w,k]),I=y-D[0],R=x-D[1],j=b-D[0],q=_-D[1],F=1/Math.sin(Math.acos((I*j+R*q)/(Math.sqrt(I*I+R*R)*Math.sqrt(j*j+q*q)))/2),B=Math.sqrt(D[0]*D[0]+D[1]*D[1]);N=Math.min(p,(t-B)/(F-1)),P=Math.min(p,(c-B)/(F+1))}if(null!=b){var H=_o(null==M?[w,k]:[M,A],[y,x],c,P,h),V=_o([b,_],[w,k],c,P,h);p===P?z.push(\"M\",H[0],\"A\",P,\",\",P,\" 0 0,\",g,\" \",H[1],\"A\",c,\",\",c,\" 0 \",1-h^bo(H[1][0],H[1][1],V[1][0],V[1][1]),\",\",h,\" \",V[1],\"A\",P,\",\",P,\" 0 0,\",g,\" \",V[0]):z.push(\"M\",H[0],\"A\",P,\",\",P,\" 0 1,\",g,\" \",V[0])}else z.push(\"M\",y,\",\",x);if(null!=M){var Z=_o([y,x],[M,A],t,-N,h),Y=_o([w,k],null==b?[y,x]:[b,_],t,-N,h);p===N?z.push(\"L\",Y[0],\"A\",N,\",\",N,\" 0 0,\",g,\" \",Y[1],\"A\",t,\",\",t,\" 0 \",h^bo(Y[1][0],Y[1][1],Z[1][0],Z[1][1]),\",\",1-h,\" \",Z[1],\"A\",N,\",\",N,\" 0 0,\",g,\" \",Z[0]):z.push(\"L\",Y[0],\"A\",N,\",\",N,\" 0 0,\",g,\" \",Z[0])}else z.push(\"L\",w,\",\",k)}else z.push(\"M\",y,\",\",x),null!=b&&z.push(\"A\",c,\",\",c,\" 0 \",S,\",\",h,\" \",b,\",\",_),z.push(\"L\",w,\",\",k),null!=M&&z.push(\"A\",t,\",\",t,\" 0 \",C,\",\",1-h,\" \",M,\",\",A);return z.push(\"Z\"),z.join(\"\")}function e(t,e){return\"M0,\"+t+\"A\"+t+\",\"+t+\" 0 1,\"+e+\" 0,\"+-t+\"A\"+t+\",\"+t+\" 0 1,\"+e+\" 0,\"+t}var n=go,r=vo,a=po,o=Ns,i=mo,l=yo,s=xo;return t.innerRadius=function(e){return arguments.length?(n=zt(e),t):n},t.outerRadius=function(e){return arguments.length?(r=zt(e),t):r},t.cornerRadius=function(e){return arguments.length?(a=zt(e),t):a},t.padRadius=function(e){return arguments.length?(o=e==Ns?Ns:zt(e),t):o},t.startAngle=function(e){return arguments.length?(i=zt(e),t):i},t.endAngle=function(e){return arguments.length?(l=zt(e),t):l},t.padAngle=function(e){return arguments.length?(s=zt(e),t):s},t.centroid=function(){var t=(+n.apply(this,arguments)+ +r.apply(this,arguments))/2,e=(+i.apply(this,arguments)+ +l.apply(this,arguments))/2-Zi;return[Math.cos(e)*t,Math.sin(e)*t]},t};var Ns=\"auto\";ui.svg.line=function(){return wo(b)};var Ds=ui.map({linear:ko,\"linear-closed\":Mo,step:Ao,\"step-before\":Lo,\"step-after\":To,basis:Po,\"basis-open\":No,\"basis-closed\":Do,bundle:Io,cardinal:Eo,\"cardinal-open\":zo,\"cardinal-closed\":So,monotone:Ho});Ds.forEach(function(t,e){e.key=t,e.closed=/-closed$/.test(t)});var Is=[0,2/3,1/3,0],Rs=[0,1/3,2/3,0],js=[0,1/6,2/3,1/6];ui.svg.line.radial=function(){var t=wo(Vo);return t.radius=t.x,delete t.x,t.angle=t.y,delete t.y,t},Lo.reverse=To,To.reverse=Lo,ui.svg.area=function(){return Zo(b)},ui.svg.area.radial=function(){var t=Zo(Vo);return t.radius=t.x,delete t.x,t.innerRadius=t.x0,delete t.x0,t.outerRadius=t.x1,delete t.x1,t.angle=t.y,delete t.y,t.startAngle=t.y0,delete t.y0,t.endAngle=t.y1,delete t.y1,t},ui.svg.chord=function(){function t(t,l){var s=e(this,o,t,l),c=e(this,i,t,l);return\"M\"+s.p0+r(s.r,s.p1,s.a1-s.a0)+(n(s,c)?a(s.r,s.p1,s.r,s.p0):a(s.r,s.p1,c.r,c.p0)+r(c.r,c.p1,c.a1-c.a0)+a(c.r,c.p1,s.r,s.p0))+\"Z\"}function e(t,e,n,r){var a=e.call(t,n,r),o=l.call(t,a,r),i=s.call(t,a,r)-Zi,u=c.call(t,a,r)-Zi;return{r:o,a0:i,a1:u,p0:[o*Math.cos(i),o*Math.sin(i)],p1:[o*Math.cos(u),o*Math.sin(u)]}}function n(t,e){return t.a0==e.a0&&t.a1==e.a1}function r(t,e,n){return\"A\"+t+\",\"+t+\" 0 \"+ +(n>Bi)+\",1 \"+e}function a(t,e,n,r){return\"Q 0,0 \"+r}var o=_n,i=wn,l=Yo,s=mo,c=yo;return t.radius=function(e){return arguments.length?(l=zt(e),t):l},t.source=function(e){return arguments.length?(o=zt(e),t):o},t.target=function(e){return arguments.length?(i=zt(e),t):i},t.startAngle=function(e){return arguments.length?(s=zt(e),t):s},t.endAngle=function(e){return arguments.length?(c=zt(e),t):c},t},ui.svg.diagonal=function(){function t(t,a){var o=e.call(this,t,a),i=n.call(this,t,a),l=(o.y+i.y)/2,s=[o,{x:o.x,y:l},{x:i.x,y:l},i];return s=s.map(r),\"M\"+s[0]+\"C\"+s[1]+\" \"+s[2]+\" \"+s[3]}var e=_n,n=wn,r=Uo;return t.source=function(n){return arguments.length?(e=zt(n),t):e},t.target=function(e){return arguments.length?(n=zt(e),t):n},t.projection=function(e){return arguments.length?(r=e,t):r},t},ui.svg.diagonal.radial=function(){var t=ui.svg.diagonal(),e=Uo,n=t.projection;return t.projection=function(t){return arguments.length?n(Xo(e=t)):e},t},ui.svg.symbol=function(){function t(t,r){return(qs.get(e.call(this,t,r))||Qo)(n.call(this,t,r))}var e=$o,n=Go;return t.type=function(n){return arguments.length?(e=zt(n),t):e},t.size=function(e){return arguments.length?(n=zt(e),t):n},t};var qs=ui.map({circle:Qo,cross:function(t){var e=Math.sqrt(t/5)/2;return\"M\"+-3*e+\",\"+-e+\"H\"+-e+\"V\"+-3*e+\"H\"+e+\"V\"+-e+\"H\"+3*e+\"V\"+e+\"H\"+e+\"V\"+3*e+\"H\"+-e+\"V\"+e+\"H\"+-3*e+\"Z\"},diamond:function(t){var e=Math.sqrt(t/(2*Bs)),n=e*Bs;return\"M0,\"+-e+\"L\"+n+\",0 0,\"+e+\" \"+-n+\",0Z\"},square:function(t){var e=Math.sqrt(t)/2;return\"M\"+-e+\",\"+-e+\"L\"+e+\",\"+-e+\" \"+e+\",\"+e+\" \"+-e+\",\"+e+\"Z\"},\"triangle-down\":function(t){var e=Math.sqrt(t/Fs),n=e*Fs/2;return\"M0,\"+n+\"L\"+e+\",\"+-n+\" \"+-e+\",\"+-n+\"Z\"},\"triangle-up\":function(t){var e=Math.sqrt(t/Fs),n=e*Fs/2;return\"M0,\"+-n+\"L\"+e+\",\"+n+\" \"+-e+\",\"+n+\"Z\"}});ui.svg.symbolTypes=qs.keys();var Fs=Math.sqrt(3),Bs=Math.tan(30*Yi);Ci.transition=function(t){for(var e,n,r=Hs||++Us,a=ei(t),o=[],i=Vs||{time:Date.now(),ease:zr,delay:0,duration:250},l=-1,s=this.length;++l<s;){o.push(e=[]);for(var c=this[l],u=-1,f=c.length;++u<f;)(n=c[u])&&ni(n,u,a,r,i),e.push(n)}return Jo(o,a,r)},Ci.interrupt=function(t){return this.each(null==t?Zs:Wo(ei(t)))};var Hs,Vs,Zs=Wo(ei()),Ys=[],Us=0;Ys.call=Ci.call,Ys.empty=Ci.empty,Ys.node=Ci.node,Ys.size=Ci.size,ui.transition=function(t,e){return t&&t.transition?Hs?t.transition(e):t:ui.selection().transition(t)},ui.transition.prototype=Ys,Ys.select=function(t){var e,n,r,a=this.id,o=this.namespace,i=[];t=E(t);for(var l=-1,s=this.length;++l<s;){i.push(e=[]);for(var c=this[l],u=-1,f=c.length;++u<f;)(r=c[u])&&(n=t.call(r,r.__data__,u,l))?(\"__data__\"in r&&(n.__data__=r.__data__),ni(n,u,o,a,r[o][a]),e.push(n)):e.push(null)}return Jo(i,o,a)},Ys.selectAll=function(t){var e,n,r,a,o,i=this.id,l=this.namespace,s=[];t=C(t);for(var c=-1,u=this.length;++c<u;)for(var f=this[c],d=-1,h=f.length;++d<h;)if(r=f[d]){o=r[l][i],n=t.call(r,r.__data__,d,c),s.push(e=[]);for(var p=-1,g=n.length;++p<g;)(a=n[p])&&ni(a,p,l,i,o),e.push(a)}return Jo(s,l,i)},Ys.filter=function(t){var e,n,r,a=[];\"function\"!=typeof t&&(t=V(t));for(var o=0,i=this.length;i>o;o++){a.push(e=[]);for(var n=this[o],l=0,s=n.length;s>l;l++)(r=n[l])&&t.call(r,r.__data__,l,o)&&e.push(r)}return Jo(a,this.namespace,this.id)},Ys.tween=function(t,e){var n=this.id,r=this.namespace;return arguments.length<2?this.node()[r][n].tween.get(t):Y(this,null==e?function(e){e[r][n].tween.remove(t)}:function(a){a[r][n].tween.set(t,e)})},Ys.attr=function(t,e){function n(){this.removeAttribute(l)}function r(){this.removeAttributeNS(l.space,l.local)}function a(t){return null==t?n:(t+=\"\",function(){var e,n=this.getAttribute(l);return n!==t&&(e=i(n,t),function(t){this.setAttribute(l,e(t))})})}function o(t){return null==t?r:(t+=\"\",function(){var e,n=this.getAttributeNS(l.space,l.local);return n!==t&&(e=i(n,t),function(t){this.setAttributeNS(l.space,l.local,e(t))})})}if(arguments.length<2){for(e in t)this.attr(e,t[e]);return this}var i=\"transform\"==t?$r:_r,l=ui.ns.qualify(t);return Ko(this,\"attr.\"+t,e,l.local?o:a)},Ys.attrTween=function(t,e){function n(t,n){var r=e.call(this,t,n,this.getAttribute(a));return r&&function(t){this.setAttribute(a,r(t))}}function r(t,n){var r=e.call(this,t,n,this.getAttributeNS(a.space,a.local));return r&&function(t){this.setAttributeNS(a.space,a.local,r(t))}}var a=ui.ns.qualify(t);return this.tween(\"attr.\"+t,a.local?r:n)},Ys.style=function(t,e,n){function a(){this.style.removeProperty(t)}function o(e){return null==e?a:(e+=\"\",function(){var a,o=r(this).getComputedStyle(this,null).getPropertyValue(t);return o!==e&&(a=_r(o,e),function(e){this.style.setProperty(t,a(e),n)})})}var i=arguments.length;if(3>i){if(\"string\"!=typeof t){2>i&&(e=\"\");for(n in t)this.style(n,t[n],e);return this}n=\"\"}return Ko(this,\"style.\"+t,e,o)},Ys.styleTween=function(t,e,n){function a(a,o){var i=e.call(this,a,o,r(this).getComputedStyle(this,null).getPropertyValue(t));return i&&function(e){this.style.setProperty(t,i(e),n)}}return arguments.length<3&&(n=\"\"),this.tween(\"style.\"+t,a)},Ys.text=function(t){return Ko(this,\"text\",t,ti)},Ys.remove=function(){var t=this.namespace;return this.each(\"end.transition\",function(){var e;this[t].count<2&&(e=this.parentNode)&&e.removeChild(this)})},Ys.ease=function(t){var e=this.id,n=this.namespace;return arguments.length<1?this.node()[n][e].ease:(\"function\"!=typeof t&&(t=ui.ease.apply(ui,arguments)),Y(this,function(r){r[n][e].ease=t}))},Ys.delay=function(t){var e=this.id,n=this.namespace;return arguments.length<1?this.node()[n][e].delay:Y(this,\"function\"==typeof t?function(r,a,o){r[n][e].delay=+t.call(r,r.__data__,a,o)}:(t=+t,function(r){r[n][e].delay=t}))},Ys.duration=function(t){var e=this.id,n=this.namespace;return arguments.length<1?this.node()[n][e].duration:Y(this,\"function\"==typeof t?function(r,a,o){r[n][e].duration=Math.max(1,t.call(r,r.__"
+,
+"data__,a,o))}:(t=Math.max(1,t),function(r){r[n][e].duration=t}))},Ys.each=function(t,e){var n=this.id,r=this.namespace;if(arguments.length<2){var a=Vs,o=Hs;try{Hs=n,Y(this,function(e,a,o){Vs=e[r][n],t.call(e,e.__data__,a,o)})}finally{Vs=a,Hs=o}}else Y(this,function(a){var o=a[r][n];(o.event||(o.event=ui.dispatch(\"start\",\"end\",\"interrupt\"))).on(t,e)});return this},Ys.transition=function(){for(var t,e,n,r,a=this.id,o=++Us,i=this.namespace,l=[],s=0,c=this.length;c>s;s++){l.push(t=[]);for(var e=this[s],u=0,f=e.length;f>u;u++)(n=e[u])&&(r=n[i][a],ni(n,u,i,o,{time:r.time,ease:r.ease,delay:r.delay+r.duration,duration:r.duration})),t.push(n)}return Jo(l,i,o)},ui.svg.axis=function(){function t(t){t.each(function(){var t,c=ui.select(this),u=this.__chart__||n,f=this.__chart__=n.copy(),d=null==s?f.ticks?f.ticks.apply(f,l):f.domain():s,h=null==e?f.tickFormat?f.tickFormat.apply(f,l):b:e,p=c.selectAll(\".tick\").data(d,f),g=p.enter().insert(\"g\",\".domain\").attr(\"class\",\"tick\").style(\"opacity\",qi),v=ui.transition(p.exit()).style(\"opacity\",qi).remove(),m=ui.transition(p.order()).style(\"opacity\",1),y=Math.max(a,0)+i,x=Ua(f),_=c.selectAll(\".domain\").data([0]),w=(_.enter().append(\"path\").attr(\"class\",\"domain\"),ui.transition(_));g.append(\"line\"),g.append(\"text\");var k,M,A,L,T=g.select(\"line\"),z=m.select(\"line\"),S=p.select(\"text\").text(h),E=g.select(\"text\"),C=m.select(\"text\"),O=\"top\"===r||\"left\"===r?-1:1;if(\"bottom\"===r||\"top\"===r?(t=ri,k=\"x\",A=\"y\",M=\"x2\",L=\"y2\",S.attr(\"dy\",0>O?\"0em\":\".71em\").style(\"text-anchor\",\"middle\"),w.attr(\"d\",\"M\"+x[0]+\",\"+O*o+\"V0H\"+x[1]+\"V\"+O*o)):(t=ai,k=\"y\",A=\"x\",M=\"y2\",L=\"x2\",S.attr(\"dy\",\".32em\").style(\"text-anchor\",0>O?\"end\":\"start\"),w.attr(\"d\",\"M\"+O*o+\",\"+x[0]+\"H0V\"+x[1]+\"H\"+O*o)),T.attr(L,O*a),E.attr(A,O*y),z.attr(M,0).attr(L,O*a),C.attr(k,0).attr(A,O*y),f.rangeBand){var P=f,N=P.rangeBand()/2;u=f=function(t){return P(t)+N}}else u.rangeBand?u=f:v.call(t,f,u);g.call(t,u,f),m.call(t,f,f)})}var e,n=ui.scale.linear(),r=Xs,a=6,o=6,i=3,l=[10],s=null;return t.scale=function(e){\n"
+,
+"return arguments.length?(n=e,t):n},t.orient=function(e){return arguments.length?(r=e in Gs?e+\"\":Xs,t):r},t.ticks=function(){return arguments.length?(l=di(arguments),t):l},t.tickValues=function(e){return arguments.length?(s=e,t):s},t.tickFormat=function(n){return arguments.length?(e=n,t):e},t.tickSize=function(e){var n=arguments.length;return n?(a=+e,o=+arguments[n-1],t):a},t.innerTickSize=function(e){return arguments.length?(a=+e,t):a},t.outerTickSize=function(e){return arguments.length?(o=+e,t):o},t.tickPadding=function(e){return arguments.length?(i=+e,t):i},t.tickSubdivide=function(){return arguments.length&&t},t};var Xs=\"bottom\",Gs={top:1,right:1,bottom:1,left:1};ui.svg.brush=function(){function t(r){r.each(function(){var r=ui.select(this).style(\"pointer-events\",\"all\").style(\"-webkit-tap-highlight-color\",\"rgba(0,0,0,0)\").on(\"mousedown.brush\",o).on(\"touchstart.brush\",o),i=r.selectAll(\".background\").data([0]);i.enter().append(\"rect\").attr(\"class\",\"background\").style(\"visibility\",\"hidden\").style(\"cursor\",\"crosshair\"),r.selectAll(\".extent\").data([0]).enter().append(\"rect\").attr(\"class\",\"extent\").style(\"cursor\",\"move\");var l=r.selectAll(\".resize\").data(g,b);l.exit().remove(),l.enter().append(\"g\").attr(\"class\",function(t){return\"resize \"+t}).style(\"cursor\",function(t){return $s[t]}).append(\"rect\").attr(\"x\",function(t){return/[ew]$/.test(t)?-3:null}).attr(\"y\",function(t){return/^[ns]/.test(t)?-3:null}).attr(\"width\",6).attr(\"height\",6).style(\"visibility\",\"hidden\"),l.style(\"display\",t.empty()?\"none\":null);var s,f=ui.transition(r),d=ui.transition(i);c&&(s=Ua(c),d.attr(\"x\",s[0]).attr(\"width\",s[1]-s[0]),n(f)),u&&(s=Ua(u),d.attr(\"y\",s[0]).attr(\"height\",s[1]-s[0]),a(f)),e(f)})}function e(t){t.selectAll(\".resize\").attr(\"transform\",function(t){return\"translate(\"+f[+/e$/.test(t)]+\",\"+d[+/^s/.test(t)]+\")\"})}function n(t){t.select(\".extent\").attr(\"x\",f[0]),t.selectAll(\".extent,.n>rect,.s>rect\").attr(\"width\",f[1]-f[0])}function a(t){t.select(\".extent\").attr(\"y\",d[0]),t.selectAll(\".extent,.e>rect,.w>rect\").attr(\"height\",d[1]-d[0])}function o(){function o(){32==ui.event.keyCode&&(S||(x=null,C[0]-=f[1],C[1]-=d[1],S=2),L())}function g(){32==ui.event.keyCode&&2==S&&(C[0]+=f[1],C[1]+=d[1],S=0,L())}function v(){var t=ui.mouse(_),r=!1;b&&(t[0]+=b[0],t[1]+=b[1]),S||(ui.event.altKey?(x||(x=[(f[0]+f[1])/2,(d[0]+d[1])/2]),C[0]=f[+(t[0]<x[0])],C[1]=d[+(t[1]<x[1])]):x=null),T&&m(t,c,0)&&(n(M),r=!0),z&&m(t,u,1)&&(a(M),r=!0),r&&(e(M),k({type:\"brush\",mode:S?\"move\":\"resize\"}))}function m(t,e,n){var r,a,o=Ua(e),s=o[0],c=o[1],u=C[n],g=n?d:f,v=g[1]-g[0];return S&&(s-=u,c-=v+u),r=(n?p:h)?Math.max(s,Math.min(c,t[n])):t[n],S?a=(r+=u)+v:(x&&(u=Math.max(s,Math.min(c,2*x[n]-r))),r>u?(a=r,r=u):a=u),g[0]!=r||g[1]!=a?(n?l=null:i=null,g[0]=r,g[1]=a,!0):void 0}function y(){v(),M.style(\"pointer-events\",\"all\").selectAll(\".resize\").style(\"display\",t.empty()?\"none\":null),ui.select(\"body\").style(\"cursor\",null),O.on(\"mousemove.brush\",null).on(\"mouseup.brush\",null).on(\"touchmove.brush\",null).on(\"touchend.brush\",null).on(\"keydown.brush\",null).on(\"keyup.brush\",null),E(),k({type:\"brushend\"})}var x,b,_=this,w=ui.select(ui.event.target),k=s.of(_,arguments),M=ui.select(_),A=w.datum(),T=!/^(n|s)$/.test(A)&&c,z=!/^(e|w)$/.test(A)&&u,S=w.classed(\"extent\"),E=W(_),C=ui.mouse(_),O=ui.select(r(_)).on(\"keydown.brush\",o).on(\"keyup.brush\",g);if(ui.event.changedTouches?O.on(\"touchmove.brush\",v).on(\"touchend.brush\",y):O.on(\"mousemove.brush\",v).on(\"mouseup.brush\",y),M.interrupt().selectAll(\"*\").interrupt(),S)C[0]=f[0]-C[0],C[1]=d[0]-C[1];else if(A){var P=+/w$/.test(A),N=+/^n/.test(A);b=[f[1-P]-C[0],d[1-N]-C[1]],C[0]=f[P],C[1]=d[N]}else ui.event.altKey&&(x=C.slice());M.style(\"pointer-events\",\"none\").selectAll(\".resize\").style(\"display\",null),ui.select(\"body\").style(\"cursor\",w.style(\"cursor\")),k({type:\"brushstart\"}),v()}var i,l,s=z(t,\"brushstart\",\"brush\",\"brushend\"),c=null,u=null,f=[0,0],d=[0,0],h=!0,p=!0,g=Qs[0];return t.event=function(t){t.each(function(){var t=s.of(this,arguments),e={x:f,y:d,i:i,j:l},n=this.__chart__||e;this.__chart__=e,Hs?ui.select(this).transition().each(\"start.brush\",function(){i=n.i,l=n.j,f=n.x,d=n.y,t({type:\"brushstart\"})}).tween(\"brush:brush\",function(){var n=wr(f,e.x),r=wr(d,e.y);return i=l=null,function(a){f=e.x=n(a),d=e.y=r(a),t({type:\"brush\",mode:\"resize\"})}}).each(\"end.brush\",function(){i=e.i,l=e.j,t({type:\"brush\",mode:\"resize\"}),t({type:\"brushend\"})}):(t({type:\"brushstart\"}),t({type:\"brush\",mode:\"resize\"}),t({type:\"brushend\"}))})},t.x=function(e){return arguments.length?(c=e,g=Qs[!c<<1|!u],t):c},t.y=function(e){return arguments.length?(u=e,g=Qs[!c<<1|!u],t):u},t.clamp=function(e){return arguments.length?(c&&u?(h=!!e[0],p=!!e[1]):c?h=!!e:u&&(p=!!e),t):c&&u?[h,p]:c?h:u?p:null},t.extent=function(e){var n,r,a,o,s;return arguments.length?(c&&(n=e[0],r=e[1],u&&(n=n[0],r=r[0]),i=[n,r],c.invert&&(n=c(n),r=c(r)),n>r&&(s=n,n=r,r=s),n==f[0]&&r==f[1]||(f=[n,r])),u&&(a=e[0],o=e[1],c&&(a=a[1],o=o[1]),l=[a,o],u.invert&&(a=u(a),o=u(o)),a>o&&(s=a,a=o,o=s),a==d[0]&&o==d[1]||(d=[a,o])),t):(c&&(i?(n=i[0],r=i[1]):(n=f[0],r=f[1],c.invert&&(n=c.invert(n),r=c.invert(r)),n>r&&(s=n,n=r,r=s))),u&&(l?(a=l[0],o=l[1]):(a=d[0],o=d[1],u.invert&&(a=u.invert(a),o=u.invert(o)),a>o&&(s=a,a=o,o=s))),c&&u?[[n,a],[r,o]]:c?[n,r]:u&&[a,o])},t.clear=function(){return t.empty()||(f=[0,0],d=[0,0],i=l=null),t},t.empty=function(){return!!c&&f[0]==f[1]||!!u&&d[0]==d[1]},ui.rebind(t,s,\"on\")};var $s={n:\"ns-resize\",e:\"ew-resize\",s:\"ns-resize\",w:\"ew-resize\",nw:\"nwse-resize\",ne:\"nesw-resize\",se:\"nwse-resize\",sw:\"nesw-resize\"},Qs=[[\"n\",\"e\",\"s\",\"w\",\"nw\",\"ne\",\"se\",\"sw\"],[\"e\",\"w\"],[\"n\",\"s\"],[]],Ws=vl.format=wl.timeFormat,Js=Ws.utc,Ks=Js(\"%Y-%m-%dT%H:%M:%S.%LZ\");Ws.iso=Date.prototype.toISOString&&+new Date(\"2000-01-01T00:00:00.000Z\")?oi:Ks,oi.parse=function(t){var e=new Date(t);return isNaN(e)?null:e},oi.toString=Ks.toString,vl.second=Ht(function(t){return new ml(1e3*Math.floor(t/1e3))},function(t,e){t.setTime(t.getTime()+1e3*Math.floor(e))},function(t){return t.getSeconds()}),vl.seconds=vl.second.range,vl.seconds.utc=vl.second.utc.range,vl.minute=Ht(function(t){return new ml(6e4*Math.floor(t/6e4))},function(t,e){t.setTime(t.getTime()+6e4*Math.floor(e))},function(t){return t.getMinutes()}),vl.minutes=vl.minute.range,vl.minutes.utc=vl.minute.utc.range,vl.hour=Ht(function(t){var e=t.getTimezoneOffset()/60;return new ml(36e5*(Math.floor(t/36e5-e)+e))},function(t,e){t.setTime(t.getTime()+36e5*Math.floor(e))},function(t){return t.getHours()}),vl.hours=vl.hour.range,vl.hours.utc=vl.hour.utc.range,vl.month=Ht(function(t){return t=vl.day(t),t.setDate(1),t},function(t,e){t.setMonth(t.getMonth()+e)},function(t){return t.getMonth()}),vl.months=vl.month.range,vl.months.utc=vl.month.utc.range;var tc=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],ec=[[vl.second,1],[vl.second,5],[vl.second,15],[vl.second,30],[vl.minute,1],[vl.minute,5],[vl.minute,15],[vl.minute,30],[vl.hour,1],[vl.hour,3],[vl.hour,6],[vl.hour,12],[vl.day,1],[vl.day,2],[vl.week,1],[vl.month,1],[vl.month,3],[vl.year,1]],nc=Ws.multi([[\".%L\",function(t){return t.getMilliseconds()}],[\":%S\",function(t){return t.getSeconds()}],[\"%I:%M\",function(t){return t.getMinutes()}],[\"%I %p\",function(t){return t.getHours()}],[\"%a %d\",function(t){return t.getDay()&&1!=t.getDate()}],[\"%b %d\",function(t){return 1!=t.getDate()}],[\"%B\",function(t){return t.getMonth()}],[\"%Y\",Ce]]),rc={range:function(t,e,n){return ui.range(Math.ceil(t/n)*n,+e,n).map(li)},floor:b,ceil:b};ec.year=vl.year,vl.scale=function(){return ii(ui.scale.linear(),ec,nc)};var ac=ec.map(function(t){return[t[0].utc,t[1]]}),oc=Js.multi([[\".%L\",function(t){return t.getUTCMilliseconds()}],[\":%S\",function(t){return t.getUTCSeconds()}],[\"%I:%M\",function(t){return t.getUTCMinutes()}],[\"%I %p\",function(t){return t.getUTCHours()}],[\"%a %d\",function(t){return t.getUTCDay()&&1!=t.getUTCDate()}],[\"%b %d\",function(t){return 1!=t.getUTCDate()}],[\"%B\",function(t){return t.getUTCMonth()}],[\"%Y\",Ce]]);ac.year=vl.year.utc,vl.scale.utc=function(){return ii(ui.scale.linear(),ac,oc)},ui.text=St(function(t){return t.responseText}),ui.json=function(t,e){return Et(t,\"application/json\",si,e)},ui.html=function(t,e){return Et(t,\"text/html\",ci,e)},ui.xml=St(function(t){return t.responseXML}),\"function\"==typeof t&&t.amd?(this.d3=ui,t(ui)):\"object\"==typeof n&&n.exports?n.exports=ui:this.d3=ui}()},{}],10:[function(e,n,r){(function(r,a){(function(){\"use strict\";function o(t){return\"function\"==typeof t||\"object\"==typeof t&&null!==t}function i(t){return\"function\"==typeof t}function l(t){U=t}function s(t){Q=t}function c(){return function(){r.nextTick(p)}}function u(){return function(){Y(p)}}function f(){var t=0,e=new K(p),n=document.createTextNode(\"\");return e.observe(n,{characterData:!0}),function(){n.data=t=++t%2}}function d(){var t=new MessageChannel;return t.port1.onmessage=p,function(){t.port2.postMessage(0)}}function h(){return function(){setTimeout(p,1)}}function p(){for(var t=0;$>t;t+=2){var e=nt[t],n=nt[t+1];e(n),nt[t]=void 0,nt[t+1]=void 0}$=0}function g(){try{var t=e,n=t(\"vertx\");return Y=n.runOnLoop||n.runOnContext,u()}catch(r){return h()}}function v(t,e){var n=this,r=n._state;if(r===it&&!t||r===lt&&!e)return this;var a=new this.constructor(y),o=n._result;if(r){var i=arguments[r-1];Q(function(){N(r,a,i,o)})}else E(n,a,t,e);return a}function m(t){var e=this;if(t&&\"object\"==typeof t&&t.constructor===e)return t;var n=new e(y);return L(n,t),n}function y(){}function x(){return new TypeError(\"You cannot resolve a promise with itself\")}function b(){return new TypeError(\"A promises callback cannot return that same promise.\")}function _(t){try{return t.then}catch(e){return st.error=e,st}}function w(t,e,n,r){try{t.call(e,n,r)}catch(a){return a}}function k(t,e,n){Q(function(t){var r=!1,a=w(n,e,function(n){r||(r=!0,e!==n?L(t,n):z(t,n))},function(e){r||(r=!0,S(t,e))},\"Settle: \"+(t._label||\" unknown promise\"));!r&&a&&(r=!0,S(t,a))},t)}function M(t,e){e._state===it?z(t,e._result):e._state===lt?S(t,"
+,
+"e._result):E(e,void 0,function(e){L(t,e)},function(e){S(t,e)})}function A(t,e,n){e.constructor===t.constructor&&n===rt&&constructor.resolve===at?M(t,e):n===st?S(t,st.error):void 0===n?z(t,e):i(n)?k(t,e,n):z(t,e)}function L(t,e){t===e?S(t,x()):o(e)?A(t,e,_(e)):z(t,e)}function T(t){t._onerror&&t._onerror(t._result),C(t)}function z(t,e){t._state===ot&&(t._result=e,t._state=it,0!==t._subscribers.length&&Q(C,t))}function S(t,e){t._state===ot&&(t._state=lt,t._result=e,Q(T,t))}function E(t,e,n,r){var a=t._subscribers,o=a.length;t._onerror=null,a[o]=e,a[o+it]=n,a[o+lt]=r,0===o&&t._state&&Q(C,t)}function C(t){var e=t._subscribers,n=t._state;if(0!==e.length){for(var r,a,o=t._result,i=0;i<e.length;i+=3)r=e[i],a=e[i+n],r?N(n,r,a,o):a(o);t._subscribers.length=0}}function O(){this.error=null}function P(t,e){try{return t(e)}catch(n){return ct.error=n,ct}}function N(t,e,n,r){var a,o,l,s,c=i(n);if(c){if(a=P(n,r),a===ct?(s=!0,o=a.error,a=null):l=!0,e===a)return void S(e,b())}else a=r,l=!0;e._state!==ot||(c&&l?L(e,a):s?S(e,o):t===it?z(e,a):t===lt&&S(e,a))}function D(t,e){try{e(function(e){L(t,e)},function(e){S(t,e)})}catch(n){S(t,n)}}function I(t){return new gt(this,t).promise}function R(t){function e(t){L(a,t)}function n(t){S(a,t)}var r=this,a=new r(y);if(!G(t))return S(a,new TypeError(\"You must pass an array to race.\")),a;for(var o=t.length,i=0;a._state===ot&&o>i;i++)E(r.resolve(t[i]),void 0,e,n);return a}function j(t){var e=this,n=new e(y);return S(n,t),n}function q(){throw new TypeError(\"You must pass a resolver function as the first argument to the promise constructor\")}function F(){throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\")}function B(t){this._id=ht++,this._state=void 0,this._result=void 0,this._subscribers=[],y!==t&&(\"function\"!=typeof t&&q(),this instanceof B?D(this,t):F())}function H(t,e){this._instanceConstructor=t,this.promise=new t(y),Array.isArray(e)?(this._input=e,this.length=e.length,this._remaining=e.length,this._result=new Array(this.length),0===this.length?z(this.promise,this._result):(this.length=this.length||0,this._enumerate(),0===this._remaining&&z(this.promise,this._result))):S(this.promise,this._validationError())}function V(){var t;if(\"undefined\"!=typeof a)t=a;else if(\"undefined\"!=typeof self)t=self;else try{t=Function(\"return this\")()}catch(e){throw new Error(\"polyfill failed because global object is unavailable in this environment\")}var n=t.Promise;n&&\"[object Promise]\"===Object.prototype.toString.call(n.resolve())&&!n.cast||(t.Promise=pt)}var Z;Z=Array.isArray?Array.isArray:function(t){return\"[object Array]\"===Object.prototype.toString.call(t)};var Y,U,X,G=Z,$=0,Q=function(t,e){nt[$]=t,nt[$+1]=e,$+=2,2===$&&(U?U(p):X())},W=\"undefined\"!=typeof window?window:void 0,J=W||{},K=J.MutationObserver||J.WebKitMutationObserver,tt=\"undefined\"!=typeof r&&\"[object process]\"==={}.toString.call(r),et=\"undefined\"!=typeof Uint8ClampedArray&&\"undefined\"!=typeof importScripts&&\"undefined\"!=typeof MessageChannel,nt=new Array(1e3);X=tt?c():K?f():et?d():void 0===W&&\"function\"==typeof e?g():h();var rt=v,at=m,ot=void 0,it=1,lt=2,st=new O,ct=new O,ut=I,ft=R,dt=j,ht=0,pt=B;B.all=ut,B.race=ft,B.resolve=at,B.reject=dt,B._setScheduler=l,B._setAsap=s,B._asap=Q,B.prototype={constructor:B,then:rt,\"catch\":function(t){return this.then(null,t)}};var gt=H;H.prototype._validationError=function(){return new Error(\"Array Methods must be provided an Array\")},H.prototype._enumerate=function(){for(var t=this.length,e=this._input,n=0;this._state===ot&&t>n;n++)this._eachEntry(e[n],n)},H.prototype._eachEntry=function(t,e){var n=this._instanceConstructor,r=n.resolve;if(r===at){var a=_(t);if(a===rt&&t._state!==ot)this._settledAt(t._state,e,t._result);else if(\"function\"!=typeof a)this._remaining--,this._result[e]=t;else if(n===pt){var o=new n(y);A(o,t,a),this._willSettleAt(o,e)}else this._willSettleAt(new n(function(e){e(t)}),e)}else this._willSettleAt(r(t),e)},H.prototype._settledAt=function(t,e,n){var r=this.promise;r._state===ot&&(this._remaining--,t===lt?S(r,n):this._result[e]=n),0===this._remaining&&z(r,this._result)},H.prototype._willSettleAt=function(t,e){var n=this;E(t,void 0,function(t){n._settledAt(it,e,t)},function(t){n._settledAt(lt,e,t)})};var vt=V,mt={Promise:pt,polyfill:vt};\"function\"==typeof t&&t.amd?t(function(){return mt}):\"undefined\"!=typeof n&&n.exports?n.exports=mt:\"undefined\"!=typeof this&&(this.ES6Promise=mt),vt()}).call(this)}).call(this,e(\"_process\"),\"undefined\"!=typeof global?global:\"undefined\"!=typeof self?self:\"undefined\"!=typeof window?window:{})},{_process:8}],11:[function(t,e,n){\"use strict\";function r(t){for(var e,n=t.length,r=0;n>r;r++)if(e=t.charCodeAt(r),(9>e||e>13)&&32!==e&&133!==e&&160!==e&&5760!==e&&6158!==e&&(8192>e||e>8205)&&8232!==e&&8233!==e&&8239!==e&&8287!==e&&8288!==e&&12288!==e&&65279!==e)return!1;return!0}e.exports=function(t){var e=typeof t;if(\"string\"===e){var n=t;if(t=+t,0===t&&r(n))return!1}else if(\"number\"!==e)return!1;return 1>t-t}},{}],12:[function(t,e,n){function r(t,e){var n=e[0],r=e[1],a=e[2],o=e[3],i=n+n,l=r+r,s=a+a,c=n*i,u=r*i,f=r*l,d=a*i,h=a*l,p=a*s,g=o*i,v=o*l,m=o*s;return t[0]=1-f-p,t[1]=u+m,t[2]=d-v,t[3]=0,t[4]=u-m,t[5]=1-c-p,t[6]=h+g,t[7]=0,t[8]=d+v,t[9]=h-g,t[10]=1-c-f,t[11]=0,t[12]=0,t[13]=0,t[14]=0,t[15]=1,t}e.exports=r},{}],13:[function(e,n,r){!function(){function e(t,n){if(t=t?t:\"\",n=n||{},t instanceof e)return t;if(!(this instanceof e))return new e(t,n);var a=r(t);this._originalInput=t,this._r=a.r,this._g=a.g,this._b=a.b,this._a=a.a,this._roundA=B(100*this._a)/100,this._format=n.format||a.format,this._gradientType=n.gradientType,this._r<1&&(this._r=B(this._r)),this._g<1&&(this._g=B(this._g)),this._b<1&&(this._b=B(this._b)),this._ok=a.ok,this._tc_id=q++}function r(t){var e={r:0,g:0,b:0},n=1,r=!1,o=!1;return\"string\"==typeof t&&(t=D(t)),\"object\"==typeof t&&(t.hasOwnProperty(\"r\")&&t.hasOwnProperty(\"g\")&&t.hasOwnProperty(\"b\")?(e=a(t.r,t.g,t.b),r=!0,o=\"%\"===String(t.r).substr(-1)?\"prgb\":\"rgb\"):t.hasOwnProperty(\"h\")&&t.hasOwnProperty(\"s\")&&t.hasOwnProperty(\"v\")?(t.s=O(t.s),t.v=O(t.v),e=s(t.h,t.s,t.v),r=!0,o=\"hsv\"):t.hasOwnProperty(\"h\")&&t.hasOwnProperty(\"s\")&&t.hasOwnProperty(\"l\")&&(t.s=O(t.s),t.l=O(t.l),e=i(t.h,t.s,t.l),r=!0,o=\"hsl\"),t.hasOwnProperty(\"a\")&&(n=t.a)),n=A(n),{ok:r,format:t.format||o,r:H(255,V(e.r,0)),g:H(255,V(e.g,0)),b:H(255,V(e.b,0)),a:n}}function a(t,e,n){return{r:255*L(t,255),g:255*L(e,255),b:255*L(n,255)}}function o(t,e,n){t=L(t,255),e=L(e,255),n=L(n,255);var r,a,o=V(t,e,n),i=H(t,e,n),l=(o+i)/2;if(o==i)r=a=0;else{var s=o-i;switch(a=l>.5?s/(2-o-i):s/(o+i),o){case t:r=(e-n)/s+(n>e?6:0);break;case e:r=(n-t)/s+2;break;case n:r=(t-e)/s+4}r/=6}return{h:r,s:a,l:l}}function i(t,e,n){function r(t,e,n){return 0>n&&(n+=1),n>1&&(n-=1),1/6>n?t+6*(e-t)*n:.5>n?e:2/3>n?t+(e-t)*(2/3-n)*6:t}var a,o,i;if(t=L(t,360),e=L(e,100),n=L(n,100),0===e)a=o=i=n;else{var l=.5>n?n*(1+e):n+e-n*e,s=2*n-l;a=r(s,l,t+1/3),o=r(s,l,t),i=r(s,l,t-1/3)}return{r:255*a,g:255*o,b:255*i}}function l(t,e,n){t=L(t,255),e=L(e,255),n=L(n,255);var r,a,o=V(t,e,n),i=H(t,e,n),l=o,s=o-i;if(a=0===o?0:s/o,o==i)r=0;else{switch(o){case t:r=(e-n)/s+(n>e?6:0);break;case e:r=(n-t)/s+2;break;case n:r=(t-e)/s+4}r/=6}return{h:r,s:a,v:l}}function s(t,e,n){t=6*L(t,360),e=L(e,100),n=L(n,100);var r=F.floor(t),a=t-r,o=n*(1-e),i=n*(1-a*e),l=n*(1-(1-a)*e),s=r%6,c=[n,i,o,o,l,n][s],u=[l,n,n,i,o,o][s],f=[o,o,l,n,n,i][s];return{r:255*c,g:255*u,b:255*f}}function c(t,e,n,r){var a=[C(B(t).toString(16)),C(B(e).toString(16)),C(B(n).toString(16))];return r&&a[0].charAt(0)==a[0].charAt(1)&&a[1].charAt(0)==a[1].charAt(1)&&a[2].charAt(0)==a[2].charAt(1)?a[0].charAt(0)+a[1].charAt(0)+a[2].charAt(0):a.join(\"\")}function u(t,e,n,r){var a=[C(P(r)),C(B(t).toString(16)),C(B(e).toString(16)),C(B(n).toString(16))];return a.join(\"\")}function f(t,n){n=0===n?0:n||10;var r=e(t).toHsl();return r.s-=n/100,r.s=T(r.s),e(r)}function d(t,n){n=0===n?0:n||10;var r=e(t).toHsl();return r.s+=n/100,r.s=T(r.s),e(r)}function h(t){return e(t).desaturate(100)}function p(t,n){n=0===n?0:n||10;var r=e(t).toHsl();return r.l+=n/100,r.l=T(r.l),e(r)}function g(t,n){n=0===n?0:n||10;var r=e(t).toRgb();return r.r=V(0,H(255,r.r-B(255*-(n/100)))),r.g=V(0,H(255,r.g-B(255*-(n/100)))),r.b=V(0,H(255,r.b-B(255*-(n/100)))),e(r)}function v(t,n){n=0===n?0:n||10;var r=e(t).toHsl();return r.l-=n/100,r.l=T(r.l),e(r)}function m(t,n){var r=e(t).toHsl(),a=(B(r.h)+n)%360;return r.h=0>a?360+a:a,e(r)}function y(t){var n=e(t).toHsl();return n.h=(n.h+180)%360,e(n)}function x(t){var n=e(t).toHsl(),r=n.h;return[e(t),e({h:(r+120)%360,s:n.s,l:n.l}),e({h:(r+240)%360,s:n.s,l:n.l})]}function b(t){var n=e(t).toHsl(),r=n.h;return[e(t),e({h:(r+90)%360,s:n.s,l:n.l}),e({h:(r+180)%360,s:n.s,l:n.l}),e({h:(r+270)%360,s:n.s,l:n.l})]}function _(t){var n=e(t).toHsl(),r=n.h;return[e(t),e({h:(r+72)%360,s:n.s,l:n.l}),e({h:(r+216)%360,s:n.s,l:n.l})]}function w(t,n,r){n=n||6,r=r||30;var a=e(t).toHsl(),o=360/r,i=[e(t)];for(a.h=(a.h-(o*n>>1)+720)%360;--n;)a.h=(a.h+o)%360,i.push(e(a));return i}function k(t,n){n=n||6;for(var r=e(t).toHsv(),a=r.h,o=r.s,i=r.v,l=[],s=1/n;n--;)l.push(e({h:a,s:o,v:i})),i=(i+s)%1;return l}function M(t){var e={};for(var n in t)t.hasOwnProperty(n)&&(e[t[n]]=n);return e}function A(t){return t=parseFloat(t),(isNaN(t)||0>t||t>1)&&(t=1),t}function L(t,e){S(t)&&(t=\"100%\");var n=E(t);return t=H(e,V(0,parseFloat(t))),n&&(t=parseInt(t*e,10)/100),F.abs(t-e)<1e-6?1:t%e/parseFloat(e)}function T(t){return H(1,V(0,t))}function z(t){return parseInt(t,16)}function S(t){return\"string\"==typeof t&&-1!=t.indexOf(\".\")&&1===parseFloat(t)}function E(t){return\"string\"==typeof t&&-1!=t.indexOf(\"%\")}function C(t){return 1==t.length?\"0\"+t:\"\"+t}function O(t){return 1>=t&&(t=100*t+\"%\"),t}function P(t){return Math.round(255*parseFloat(t)).toString(16)}function N(t){return z(t)/255}function D(t){t=t.replace(R,\"\").replace(j,\"\").toLowerCase();var e=!1;if(Y[t])t=Y[t],e=!0;else if(\"transparent\"==t)return{r:0,g:0,b:0,a:0,format:\"name\"};var n;return"
+,
+"(n=X.rgb.exec(t))?{r:n[1],g:n[2],b:n[3]}:(n=X.rgba.exec(t))?{r:n[1],g:n[2],b:n[3],a:n[4]}:(n=X.hsl.exec(t))?{h:n[1],s:n[2],l:n[3]}:(n=X.hsla.exec(t))?{h:n[1],s:n[2],l:n[3],a:n[4]}:(n=X.hsv.exec(t))?{h:n[1],s:n[2],v:n[3]}:(n=X.hsva.exec(t))?{h:n[1],s:n[2],v:n[3],a:n[4]}:(n=X.hex8.exec(t))?{a:N(n[1]),r:z(n[2]),g:z(n[3]),b:z(n[4]),format:e?\"name\":\"hex8\"}:(n=X.hex6.exec(t))?{r:z(n[1]),g:z(n[2]),b:z(n[3]),format:e?\"name\":\"hex\"}:(n=X.hex3.exec(t))?{r:z(n[1]+\"\"+n[1]),g:z(n[2]+\"\"+n[2]),b:z(n[3]+\"\"+n[3]),format:e?\"name\":\"hex\"}:!1}function I(t){var e,n;return t=t||{level:\"AA\",size:\"small\"},e=(t.level||\"AA\").toUpperCase(),n=(t.size||\"small\").toLowerCase(),\"AA\"!==e&&\"AAA\"!==e&&(e=\"AA\"),\"small\"!==n&&\"large\"!==n&&(n=\"small\"),{level:e,size:n}}var R=/^\\s+/,j=/\\s+$/,q=0,F=Math,B=F.round,H=F.min,V=F.max,Z=F.random;e.prototype={isDark:function(){return this.getBrightness()<128},isLight:function(){return!this.isDark()},isValid:function(){return this._ok},getOriginalInput:function(){return this._originalInput},getFormat:function(){return this._format},getAlpha:function(){return this._a},getBrightness:function(){var t=this.toRgb();return(299*t.r+587*t.g+114*t.b)/1e3},getLuminance:function(){var t,e,n,r,a,o,i=this.toRgb();return t=i.r/255,e=i.g/255,n=i.b/255,r=.03928>=t?t/12.92:Math.pow((t+.055)/1.055,2.4),a=.03928>=e?e/12.92:Math.pow((e+.055)/1.055,2.4),o=.03928>=n?n/12.92:Math.pow((n+.055)/1.055,2.4),.2126*r+.7152*a+.0722*o},setAlpha:function(t){return this._a=A(t),this._roundA=B(100*this._a)/100,this},toHsv:function(){var t=l(this._r,this._g,this._b);return{h:360*t.h,s:t.s,v:t.v,a:this._a}},toHsvString:function(){var t=l(this._r,this._g,this._b),e=B(360*t.h),n=B(100*t.s),r=B(100*t.v);return 1==this._a?\"hsv(\"+e+\", \"+n+\"%, \"+r+\"%)\":\"hsva(\"+e+\", \"+n+\"%, \"+r+\"%, \"+this._roundA+\")\"},toHsl:function(){var t=o(this._r,this._g,this._b);return{h:360*t.h,s:t.s,l:t.l,a:this._a}},toHslString:function(){var t=o(this._r,this._g,this._b),e=B(360*t.h),n=B(100*t.s),r=B(100*t.l);return 1==this._a?\"hsl(\"+e+\", \"+n+\"%, \"+r+\"%)\":\"hsla(\"+e+\", \"+n+\"%, \"+r+\"%, \"+this._roundA+\")\"},toHex:function(t){return c(this._r,this._g,this._b,t)},toHexString:function(t){return\"#\"+this.toHex(t)},toHex8:function(){return u(this._r,this._g,this._b,this._a)},toHex8String:function(){return\"#\"+this.toHex8()},toRgb:function(){return{r:B(this._r),g:B(this._g),b:B(this._b),a:this._a}},toRgbString:function(){return 1==this._a?\"rgb(\"+B(this._r)+\", \"+B(this._g)+\", \"+B(this._b)+\")\":\"rgba(\"+B(this._r)+\", \"+B(this._g)+\", \"+B(this._b)+\", \"+this._roundA+\")\"},toPercentageRgb:function(){return{r:B(100*L(this._r,255))+\"%\",g:B(100*L(this._g,255))+\"%\",b:B(100*L(this._b,255))+\"%\",a:this._a}},toPercentageRgbString:function(){return 1==this._a?\"rgb(\"+B(100*L(this._r,255))+\"%, \"+B(100*L(this._g,255))+\"%, \"+B(100*L(this._b,255))+\"%)\":\"rgba(\"+B(100*L(this._r,255))+\"%, \"+B(100*L(this._g,255))+\"%, \"+B(100*L(this._b,255))+\"%, \"+this._roundA+\")\"},toName:function(){return 0===this._a?\"transparent\":this._a<1?!1:U[c(this._r,this._g,this._b,!0)]||!1},toFilter:function(t){var n=\"#\"+u(this._r,this._g,this._b,this._a),r=n,a=this._gradientType?\"GradientType = 1, \":\"\";if(t){var o=e(t);r=o.toHex8String()}return\"progid:DXImageTransform.Microsoft.gradient(\"+a+\"startColorstr=\"+n+\",endColorstr=\"+r+\")\"},toString:function(t){var e=!!t;t=t||this._format;var n=!1,r=this._a<1&&this._a>=0,a=!e&&r&&(\"hex\"===t||\"hex6\"===t||\"hex3\"===t||\"name\"===t);return a?\"name\"===t&&0===this._a?this.toName():this.toRgbString():(\"rgb\"===t&&(n=this.toRgbString()),\"prgb\"===t&&(n=this.toPercentageRgbString()),\"hex\"!==t&&\"hex6\"!==t||(n=this.toHexString()),\"hex3\"===t&&(n=this.toHexString(!0)),\"hex8\"===t&&(n=this.toHex8String()),\"name\"===t&&(n=this.toName()),\"hsl\"===t&&(n=this.toHslString()),\"hsv\"===t&&(n=this.toHsvString()),n||this.toHexString())},clone:function(){return e(this.toString())},_applyModification:function(t,e){var n=t.apply(null,[this].concat([].slice.call(e)));return this._r=n._r,this._g=n._g,this._b=n._b,this.setAlpha(n._a),this},lighten:function(){return this._applyModification(p,arguments)},brighten:function(){return this._applyModification(g,arguments)},darken:function(){return this._applyModification(v,arguments)},desaturate:function(){return this._applyModification(f,arguments)},saturate:function(){return this._applyModification(d,arguments)},greyscale:function(){return this._applyModification(h,arguments)},spin:function(){return this._applyModification(m,arguments)},_applyCombination:function(t,e){return t.apply(null,[this].concat([].slice.call(e)))},analogous:function(){return this._applyCombination(w,arguments)},complement:function(){return this._applyCombination(y,arguments)},monochromatic:function(){return this._applyCombination(k,arguments)},splitcomplement:function(){return this._applyCombination(_,arguments)},triad:function(){return this._applyCombination(x,arguments)},tetrad:function(){return this._applyCombination(b,arguments)}},e.fromRatio=function(t,n){if(\"object\"==typeof t){var r={};for(var a in t)t.hasOwnProperty(a)&&(\"a\"===a?r[a]=t[a]:r[a]=O(t[a]));t=r}return e(t,n)},e.equals=function(t,n){return t&&n?e(t).toRgbString()==e(n).toRgbString():!1},e.random=function(){return e.fromRatio({r:Z(),g:Z(),b:Z()})},e.mix=function(t,n,r){r=0===r?0:r||50;var a,o=e(t).toRgb(),i=e(n).toRgb(),l=r/100,s=2*l-1,c=i.a-o.a;a=s*c==-1?s:(s+c)/(1+s*c),a=(a+1)/2;var u=1-a,f={r:i.r*a+o.r*u,g:i.g*a+o.g*u,b:i.b*a+o.b*u,a:i.a*l+o.a*(1-l)};return e(f)},e.readability=function(t,n){var r=e(t),a=e(n);return(Math.max(r.getLuminance(),a.getLuminance())+.05)/(Math.min(r.getLuminance(),a.getLuminance())+.05)},e.isReadable=function(t,n,r){var a,o,i=e.readability(t,n);switch(o=!1,a=I(r),a.level+a.size){case\"AAsmall\":case\"AAAlarge\":o=i>=4.5;break;case\"AAlarge\":o=i>=3;break;case\"AAAsmall\":o=i>=7}return o},e.mostReadable=function(t,n,r){var a,o,i,l,s=null,c=0;r=r||{},o=r.includeFallbackColors,i=r.level,l=r.size;for(var u=0;u<n.length;u++)a=e.readability(t,n[u]),a>c&&(c=a,s=e(n[u]));return e.isReadable(t,s,{level:i,size:l})||!o?s:(r.includeFallbackColors=!1,e.mostReadable(t,[\"#fff\",\"#000\"],r))};var Y=e.names={aliceblue:\"f0f8ff\",antiquewhite:\"faebd7\",aqua:\"0ff\",aquamarine:\"7fffd4\",azure:\"f0ffff\",beige:\"f5f5dc\",bisque:\"ffe4c4\",black:\"000\",blanchedalmond:\"ffebcd\",blue:\"00f\",blueviolet:\"8a2be2\",brown:\"a52a2a\",burlywood:\"deb887\",burntsienna:\"ea7e5d\",cadetblue:\"5f9ea0\",chartreuse:\"7fff00\",chocolate:\"d2691e\",coral:\"ff7f50\",cornflowerblue:\"6495ed\",cornsilk:\"fff8dc\",crimson:\"dc143c\",cyan:\"0ff\",darkblue:\"00008b\",darkcyan:\"008b8b\",darkgoldenrod:\"b8860b\",darkgray:\"a9a9a9\",darkgreen:\"006400\",darkgrey:\"a9a9a9\",darkkhaki:\"bdb76b\",darkmagenta:\"8b008b\",darkolivegreen:\"556b2f\",darkorange:\"ff8c00\",darkorchid:\"9932cc\",darkred:\"8b0000\",darksalmon:\"e9967a\",darkseagreen:\"8fbc8f\",darkslateblue:\"483d8b\",darkslategray:\"2f4f4f\",darkslategrey:\"2f4f4f\",darkturquoise:\"00ced1\",darkviolet:\"9400d3\",deeppink:\"ff1493\",deepskyblue:\"00bfff\",dimgray:\"696969\",dimgrey:\"696969\",dodgerblue:\"1e90ff\",firebrick:\"b22222\",floralwhite:\"fffaf0\",forestgreen:\"228b22\",fuchsia:\"f0f\",gainsboro:\"dcdcdc\",ghostwhite:\"f8f8ff\",gold:\"ffd700\",goldenrod:\"daa520\",gray:\"808080\",green:\"008000\",greenyellow:\"adff2f\",grey:\"808080\",honeydew:\"f0fff0\",hotpink:\"ff69b4\",indianred:\"cd5c5c\",indigo:\"4b0082\",ivory:\"fffff0\",khaki:\"f0e68c\",lavender:\"e6e6fa\",lavenderblush:\"fff0f5\",lawngreen:\"7cfc00\",lemonchiffon:\"fffacd\",lightblue:\"add8e6\",lightcoral:\"f08080\",lightcyan:\"e0ffff\",lightgoldenrodyellow:\"fafad2\",lightgray:\"d3d3d3\",lightgreen:\"90ee90\",lightgrey:\"d3d3d3\",lightpink:\"ffb6c1\",lightsalmon:\"ffa07a\",lightseagreen:\"20b2aa\",lightskyblue:\"87cefa\",lightslategray:\"789\",lightslategrey:\"789\",lightsteelblue:\"b0c4de\",lightyellow:\"ffffe0\",lime:\"0f0\",limegreen:\"32cd32\",linen:\"faf0e6\",magenta:\"f0f\",maroon:\"800000\",mediumaquamarine:\"66cdaa\",mediumblue:\"0000cd\",mediumorchid:\"ba55d3\",mediumpurple:\"9370db\",mediumseagreen:\"3cb371\",mediumslateblue:\"7b68ee\",mediumspringgreen:\"00fa9a\",mediumturquoise:\"48d1cc\",mediumvioletred:\"c71585\",midnightblue:\"191970\",mintcream:\"f5fffa\",mistyrose:\"ffe4e1\",moccasin:\"ffe4b5\",navajowhite:\"ffdead\",navy:\"000080\",oldlace:\"fdf5e6\",olive:\"808000\",olivedrab:\"6b8e23\",orange:\"ffa500\",orangered:\"ff4500\",orchid:\"da70d6\",palegoldenrod:\"eee8aa\",palegreen:\"98fb98\",paleturquoise:\"afeeee\",palevioletred:\"db7093\",papayawhip:\"ffefd5\",peachpuff:\"ffdab9\",peru:\"cd853f\",pink:\"ffc0cb\",plum:\"dda0dd\",powderblue:\"b0e0e6\",purple:\"800080\",rebeccapurple:\"663399\",red:\"f00\",rosybrown:\"bc8f8f\",royalblue:\"4169e1\",saddlebrown:\"8b4513\",salmon:\"fa8072\",sandybrown:\"f4a460\",seagreen:\"2e8b57\",seashell:\"fff5ee\",sienna:\"a0522d\",silver:\"c0c0c0\",skyblue:\"87ceeb\",slateblue:\"6a5acd\",slategray:\"708090\",slategrey:\"708090\",snow:\"fffafa\",springgreen:\"00ff7f\",steelblue:\"4682b4\",tan:\"d2b48c\",teal:\"008080\",thistle:\"d8bfd8\",tomato:\"ff6347\",turquoise:\"40e0d0\",violet:\"ee82ee\",wheat:\"f5deb3\",white:\"fff\",whitesmoke:\"f5f5f5\",yellow:\"ff0\",yellowgreen:\"9acd32\"},U=e.hexNames=M(Y),X=function(){var t=\"[-\\\\+]?\\\\d+%?\",e=\"[-\\\\+]?\\\\d*\\\\.\\\\d+%?\",n=\"(?:\"+e+\")|(?:\"+t+\")\",r=\"[\\\\s|\\\\(]+(\"+n+\")[,|\\\\s]+(\"+n+\")[,|\\\\s]+(\"+n+\")\\\\s*\\\\)?\",a=\"[\\\\s|\\\\(]+(\"+n+\")[,|\\\\s]+(\"+n+\")[,|\\\\s]+(\"+n+\")[,|\\\\s]+(\"+n+\")\\\\s*\\\\)?\";return{rgb:new RegExp(\"rgb\"+r),rgba:new RegExp(\"rgba\"+a),hsl:new RegExp(\"hsl\"+r),hsla:new RegExp(\"hsla\"+a),hsv:new RegExp(\"hsv\"+r),hsva:new RegExp(\"hsva\"+a),hex3:/^#?([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex6:/^#?([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})$/,hex8:/^#?([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})$/}}();\"undefined\"!=typeof n&&n.exports?n.exports=e:\"function\"==typeof t&&t.amd?t(function(){return e}):window.tinycolor=e}()},{}],14:[function(t,e,n){\"use strict\";e.exports=[\"\",{path:\"M-2.4,-3V3L0.6,0Z\",backoff:.6},{path:\"M-3.7,-2.5V2.5L1.3,0Z\",backoff:1.3},{path:\"M-4.45,-3L-1.65,-0.2V0.2L-4.45,3L1.55,0Z\",backoff:1.55},{path:\"M-2.2,-2.2L-0.2,-0.2V0.2L-2.2,2.2L-1.4,3L1.6,0L-1.4,-3Z\",backoff:1.6},{path:\"M-4.4,-2.1L-0.6,-0.2V0.2L-4.4,2.1L-4,3L2,0L-4,-3Z\",backoff:2},{path:\"M2,0A2,2 0 1,1 0,-2A2,2 0 0,1 2,0Z\",backoff:0},{path:\"M2,2V-2H-2V2Z"
+,
+"\",backoff:0}]},{}],15:[function(t,e,n){\"use strict\";var r=t(\"./arrow_paths\"),a=t(\"../../plots/font_attributes\"),o=t(\"../../plots/cartesian/constants\"),i=t(\"../../lib/extend\").extendFlat;e.exports={_isLinkedToArray:!0,text:{valType:\"string\"},textangle:{valType:\"angle\",dflt:0},font:i({},a,{}),opacity:{valType:\"number\",min:0,max:1,dflt:1},align:{valType:\"enumerated\",values:[\"left\",\"center\",\"right\"],dflt:\"center\"},bgcolor:{valType:\"color\",dflt:\"rgba(0,0,0,0)\"},bordercolor:{valType:\"color\",dflt:\"rgba(0,0,0,0)\"},borderpad:{valType:\"number\",min:0,dflt:1},borderwidth:{valType:\"number\",min:0,dflt:1},showarrow:{valType:\"boolean\",dflt:!0},arrowcolor:{valType:\"color\"},arrowhead:{valType:\"integer\",min:0,max:r.length,dflt:1},arrowsize:{valType:\"number\",min:.3,dflt:1},arrowwidth:{valType:\"number\",min:.1},ax:{valType:\"number\",dflt:-10},ay:{valType:\"number\",dflt:-30},axref:{valType:\"enumerated\",dflt:\"pixel\",values:[\"pixel\",o.idRegex.x.toString()]},ayref:{valType:\"enumerated\",dflt:\"pixel\",values:[\"pixel\",o.idRegex.y.toString()]},xref:{valType:\"enumerated\",values:[\"paper\",o.idRegex.x.toString()]},x:{valType:\"number\"},xanchor:{valType:\"enumerated\",values:[\"auto\",\"left\",\"center\",\"right\"],dflt:\"auto\"},yref:{valType:\"enumerated\",values:[\"paper\",o.idRegex.y.toString()]},y:{valType:\"number\"},yanchor:{valType:\"enumerated\",values:[\"auto\",\"top\",\"middle\",\"bottom\"],dflt:\"auto\"},_deprecated:{ref:{valType:\"string\"}}}},{\"../../lib/extend\":88,\"../../plots/cartesian/constants\":115,\"../../plots/font_attributes\":128,\"./arrow_paths\":14}],16:[function(t,e,n){\"use strict\";function r(t,e){function n(e,n){return c.coerce(t,r,v.layoutAttributes,e,n)}var r={};n(\"opacity\"),n(\"align\"),n(\"bgcolor\");var a=n(\"bordercolor\"),o=f.opacity(a);n(\"borderpad\");var i=n(\"borderwidth\"),l=n(\"showarrow\");l&&(n(\"arrowcolor\",o?r.bordercolor:f.defaultLine),n(\"arrowhead\"),n(\"arrowsize\"),n(\"arrowwidth\",2*(o&&i||1)),n(\"ax\"),n(\"ay\"),n(\"axref\"),n(\"ayref\"),c.noneOrAll(t,r,[\"ax\",\"ay\"])),n(\"text\",l?\"&nbsp;\":\"new text\"),n(\"textangle\"),c.coerceFont(n,\"font\",e.font);\n"
+,
+"for(var s=[\"x\",\"y\"],d=0;2>d;d++){var h=s[d],p={_fullLayout:e},g=u.coerceRef(t,r,p,h),m=u.coerceARef(t,r,p,h),y=.5;if(\"paper\"!==g){var x=u.getFromId(p,g);if(y=x.range[0]+y*(x.range[1]-x.range[0]),-1!==[\"date\",\"category\"].indexOf(x.type)&&\"string\"==typeof t[h]){var b;if(\"date\"===x.type){if(b=c.dateTime2ms(t[h]),b!==!1&&(t[h]=b),m===g){var _=c.dateTime2ms(t[\"a\"+h]);_!==!1&&(t[\"a\"+h]=_)}}else(x._categories||[]).length&&(b=x._categories.indexOf(t[h]),-1!==b&&(t[h]=b))}}n(h,y),l||n(h+\"anchor\")}return c.noneOrAll(t,r,[\"x\",\"y\"]),r}function a(t){var e=t._fullLayout;e.annotations.forEach(function(e){var n=u.getFromId(t,e.xref),r=u.getFromId(t,e.yref);if(n||r){var a=(e._xsize||0)/2,o=e._xshift||0,i=(e._ysize||0)/2,l=e._yshift||0,s=a-o,c=a+o,f=i-l,d=i+l;if(e.showarrow){var h=3*e.arrowsize*e.arrowwidth;s=Math.max(s,h),c=Math.max(c,h),f=Math.max(f,h),d=Math.max(d,h)}n&&n.autorange&&u.expand(n,[n.l2c(e.x)],{ppadplus:c,ppadminus:s}),r&&r.autorange&&u.expand(r,[r.l2c(e.y)],{ppadplus:d,ppadminus:f})}})}function o(t,e,n,r,a,o,i,l){var s=n-t,c=a-t,u=i-a,f=r-e,d=o-e,h=l-o,p=s*h-u*f;if(0===p)return null;var g=(c*h-u*d)/p,v=(c*f-s*d)/p;return 0>v||v>1||0>g||g>1?null:{x:t+s*g,y:e+f*g}}var i=t(\"d3\"),l=t(\"fast-isnumeric\"),s=t(\"../../plotly\"),c=t(\"../../lib\"),u=t(\"../../plots/cartesian/axes\"),f=t(\"../color\"),d=t(\"../drawing\"),h=t(\"../../lib/svg_text_utils\"),p=t(\"../../lib/setcursor\"),g=t(\"../dragelement\"),v=e.exports={};v.ARROWPATHS=t(\"./arrow_paths\"),v.layoutAttributes=t(\"./attributes\"),v.supplyLayoutDefaults=function(t,e){for(var n=t.annotations||[],a=e.annotations=[],o=0;o<n.length;o++)a.push(r(n[o]||{},e))},v.drawAll=function(t){var e=t._fullLayout;e._infolayer.selectAll(\".annotation\").remove();for(var n=0;n<e.annotations.length;n++)v.draw(t,n);return s.Plots.previousPromises(t)},v.add=function(t){var e=t._fullLayout.annotations.length;s.relayout(t,\"annotations[\"+e+\"]\",\"add\")},v.draw=function(t,e,n,a){function m(t){return t.call(d.font,K).attr({\"text-anchor\":{left:\"start\",right:\"end\"}[B.align]||\"middle\"}),h.convertToTspans(t,y),t}function y(){function n(t,e){return\"auto\"===e&&(e=1/3>t?\"left\":t>2/3?\"right\":\"center\"),{center:0,middle:0,left:.5,bottom:-.5,right:-.5,top:.5}[e]}tt.selectAll(\"tspan.line\").attr({y:0,x:0});var r=G.select(\".annotation-math-group\"),a=!r.empty(),l=d.bBox((a?r:tt).node()),h=l.width,m=l.height,y=Math.round(h+2*W),x=Math.round(m+2*W);B._w=h,B._h=m;var b=!1;if([\"x\",\"y\"].forEach(function(e){var r,a=B[e+\"ref\"]||e,o=u.getFromId(t,a),i=(Y+(\"x\"===e?0:90))*Math.PI/180,l=y*Math.abs(Math.cos(i))+x*Math.abs(Math.sin(i)),s=B[e+\"anchor\"];if(o){if(!o.autorange&&(B[e]-o.range[0])*(B[e]-o.range[1])>0&&(B[\"a\"+e+\"ref\"]===a?(B[\"a\"+e]-o.range[0])*(B[\"a\"+e]-o.range[1])>0&&(b=!0):b=!0,b))return;Z[e]=o._offset+o.l2p(B[e]),r=.5}else r=B[e],\"y\"===e&&(r=1-r),Z[e]=\"x\"===e?S.l+S.w*r:S.t+S.h*r;var c=0;B[\"a\"+e+\"ref\"]===a?Z[\"aa\"+e]=o._offset+o.l2p(B[\"a\"+e]):(c=B.showarrow?B[\"a\"+e]:l*n(r,s),Z[e]+=c),B[\"_\"+e+\"type\"]=o&&o.type,B[\"_\"+e+\"size\"]=l,B[\"_\"+e+\"shift\"]=c}),b)return void G.remove();var w,k;B.showarrow&&(w=B.axref===B.xref?Z.x:c.constrain(Z.x-B.ax,1,_.width-1),k=B.ayref===B.yref?Z.y:c.constrain(Z.y-B.ay,1,_.height-1)),Z.x=c.constrain(Z.x,1,_.width-1),Z.y=c.constrain(Z.y,1,_.height-1);var M=W-l.top,A=W-l.left;a?r.select(\"svg\").attr({x:W-1,y:W}):(tt.attr({x:A,y:M}),tt.selectAll(\"tspan.line\").attr({y:M,x:A})),J.call(d.setRect,$/2,$/2,y-$,x-$);var L=0,T=0;L=B.axref===B.xref?Math.round(Z.aax-y/2):Math.round(Z.x-y/2),T=B.ayref===B.yref?Math.round(Z.aay-x/2):Math.round(Z.y-x/2),G.call(c.setTranslate,L,T);var z=\"annotations[\"+e+\"]\",E=function(n,r){i.select(t).selectAll('.annotation-arrow-g[data-index=\"'+e+'\"]').remove();var a,l;a=B.axref===B.xref?Z.aax+n:Z.x+n,l=B.ayref===B.yref?Z.aay+r:Z.y+r;var u=c.rotationXYMatrix(Y,a,l),d=c.apply2DTransform(u),h=c.apply2DTransform2(u),p=J.attr(\"width\")/2,m=J.attr(\"height\")/2,y=[[a-p,l-m,a-p,l+m],[a-p,l+m,a+p,l+m],[a+p,l+m,a+p,l-m],[a+p,l-m,a-p,l-m]].map(h);if(!y.reduce(function(t,e){return t^!!o(w,k,w+1e6,k+1e6,e[0],e[1],e[2],e[3])},!1)){y.forEach(function(t){var e=o(a,l,w,k,t[0],t[1],t[2],t[3]);e&&(a=e.x,l=e.y)});var x=B.arrowwidth,b=B.arrowcolor,_=U.append(\"g\").style({opacity:f.opacity(b)}).classed(\"annotation-arrow-g\",!0).attr(\"data-index\",String(e)),M=_.append(\"path\").attr(\"d\",\"M\"+a+\",\"+l+\"L\"+w+\",\"+k).style(\"stroke-width\",x+\"px\").call(f.stroke,f.rgb(b));v.arrowhead(M,B.arrowhead,\"end\",B.arrowsize);var A=_.append(\"path\").classed(\"annotation\",!0).classed(\"anndrag\",!0).attr({\"data-index\":String(e),d:\"M3,3H-3V-3H3ZM0,0L\"+(a-w)+\",\"+(l-k),transform:\"translate(\"+w+\",\"+k+\")\"}).style(\"stroke-width\",x+6+\"px\").call(f.stroke,\"rgba(0,0,0,0)\").call(f.fill,\"rgba(0,0,0,0)\");if(t._context.editable){var L,T,E;g.init({element:A.node(),prepFn:function(){var t=c.getTranslate(G);T=t.x,E=t.y,L={},H&&H.autorange&&(L[H._name+\".autorange\"]=!0),V&&V.autorange&&(L[V._name+\".autorange\"]=!0)},moveFn:function(t,e){_.attr(\"transform\",\"translate(\"+t+\",\"+e+\")\");var n=d(T,E),r=n[0]+t,a=n[1]+e;G.call(c.setTranslate,r,a),L[z+\".x\"]=H?B.x+t/H._m:(w+t-S.l)/S.w,L[z+\".y\"]=V?B.y+e/V._m:1-(k+e-S.t)/S.h,B.axref===B.xref&&(L[z+\".ax\"]=H?B.ax+t/H._m:(w+t-S.l)/S.w),B.ayref===B.yref&&(L[z+\".ay\"]=V?B.ay+e/V._m:1-(k+e-S.t)/S.h),X.attr({transform:\"rotate(\"+Y+\",\"+r+\",\"+a+\")\"})},doneFn:function(e){if(e){s.relayout(t,L);var n=document.querySelector(\".js-notes-box-panel\");n&&n.redraw(n.selectedObj)}}})}}};B.showarrow&&E(0,0);var C=c.rotationXYMatrix(Y,Z.x,Z.y),O=c.apply2DTransform(C);if(t._context.editable){var P,N,D;g.init({element:G.node(),prepFn:function(){var t=c.getTranslate(G);P=t.x,N=t.y,D={}},moveFn:function(t,e){G.call(c.setTranslate,P+t,N+e);var n=\"pointer\";if(B.showarrow)B.axref===B.xref?D[z+\".ax\"]=H.p2l(H.l2p(B.ax)+t):D[z+\".ax\"]=B.ax+t,B.ayref===B.yref?D[z+\".ay\"]=V.p2l(V.l2p(B.ay)+e):D[z+\".ay\"]=B.ay+e,E(t,e);else{if(H)D[z+\".x\"]=B.x+t/H._m;else{var r=B._xsize/S.w,a=B.x+B._xshift/S.w-r/2;D[z+\".x\"]=g.align(a+t/S.w,r,0,1,B.xanchor)}if(V)D[z+\".y\"]=B.y+e/V._m;else{var o=B._ysize/S.h,i=B.y-B._yshift/S.h-o/2;D[z+\".y\"]=g.align(i-e/S.h,o,0,1,B.yanchor)}H&&V||(n=g.getCursor(H?.5:D[z+\".x\"],V?.5:D[z+\".y\"],B.xanchor,B.yanchor))}var l=O(P,N),s=l[0]+t,u=l[1]+e;G.call(c.setTranslate,P+t,N+e),X.attr({transform:\"rotate(\"+Y+\",\"+s+\",\"+u+\")\"}),p(G,n)},doneFn:function(e){if(p(G),e){s.relayout(t,D);var n=document.querySelector(\".js-notes-box-panel\");n&&n.redraw(n.selectedObj)}}})}}var x,b=t.layout,_=t._fullLayout;if(!l(e)||-1===e){if(!e&&Array.isArray(a))return b.annotations=a,v.supplyLayoutDefaults(b,_),void v.drawAll(t);if(\"remove\"===a)return delete b.annotations,_.annotations=[],void v.drawAll(t);if(n&&\"add\"!==a){for(x=0;x<_.annotations.length;x++)v.draw(t,x,n,a);return}e=_.annotations.length,_.annotations.push({})}if(!n&&a){if(\"remove\"===a){for(_._infolayer.selectAll('.annotation[data-index=\"'+e+'\"]').remove(),_.annotations.splice(e,1),b.annotations.splice(e,1),x=e;x<_.annotations.length;x++)_._infolayer.selectAll('.annotation[data-index=\"'+(x+1)+'\"]').attr(\"data-index\",String(x)),v.draw(t,x);return}if(\"add\"===a||c.isPlainObject(a)){_.annotations.splice(e,0,{});var w=c.isPlainObject(a)?c.extendFlat({},a):{text:\"New text\"};for(b.annotations?b.annotations.splice(e,0,w):b.annotations=[w],x=_.annotations.length-1;x>e;x--)_._infolayer.selectAll('.annotation[data-index=\"'+(x-1)+'\"]').attr(\"data-index\",String(x)),v.draw(t,x)}}_._infolayer.selectAll('.annotation[data-index=\"'+e+'\"]').remove();var k=b.annotations[e],M=_.annotations[e];if(k){var A={xref:k.xref,yref:k.yref},L={};\"string\"==typeof n&&n?L[n]=a:c.isPlainObject(n)&&(L=n);var T=Object.keys(L);for(x=0;x<T.length;x++){var z=T[x];c.nestedProperty(k,z).set(L[z])}var S=_._size,E=[\"x\",\"y\"];for(x=0;2>x;x++){var C=E[x];if(void 0===L[C]&&void 0!==k[C]){var O=u.getFromId(t,u.coerceRef(A,{},t,C)),P=u.getFromId(t,u.coerceRef(k,{},t,C)),N=k[C],D=M[\"_\"+C+\"type\"];if(void 0!==L[C+\"ref\"]){var I=\"auto\"===k[C+\"anchor\"],R=\"x\"===C?S.w:S.h,j=(M[\"_\"+C+\"size\"]||0)/(2*R);if(O&&P)N=(N-O.range[0])/(O.range[1]-O.range[0]),N=P.range[0]+N*(P.range[1]-P.range[0]);else if(O){if(N=(N-O.range[0])/(O.range[1]-O.range[0]),N=O.domain[0]+N*(O.domain[1]-O.domain[0]),I){var q=N+j,F=N-j;2/3>N+F?N=F:N+q>4/3&&(N=q)}}else P&&(I&&(1/3>N?N+=j:N>2/3&&(N-=j)),N=(N-P.domain[0])/(P.domain[1]-P.domain[0]),N=P.range[0]+N*(P.range[1]-P.range[0]))}P&&P===O&&D&&(\"log\"===D&&\"log\"!==P.type?N=Math.pow(10,N):\"log\"!==D&&\"log\"===P.type&&(N=N>0?Math.log(N)/Math.LN10:void 0)),k[C]=N}}var B=r(k,_);_.annotations[e]=B;var H=u.getFromId(t,B.xref),V=u.getFromId(t,B.yref),Z={x:0,y:0},Y=+B.textangle||0,U=_._infolayer.append(\"g\").classed(\"annotation\",!0).attr(\"data-index\",String(e)).style(\"opacity\",B.opacity).on(\"click\",function(){t._dragging=!1,t.emit(\"plotly_clickannotation\",{index:e,annotation:k,fullAnnotation:B})}),X=U.append(\"g\").classed(\"annotation-text-g\",!0).attr(\"data-index\",String(e)),G=X.append(\"g\"),$=B.borderwidth,Q=B.borderpad,W=$+Q,J=G.append(\"rect\").attr(\"class\",\"bg\").style(\"stroke-width\",$+\"px\").call(f.stroke,B.bordercolor).call(f.fill,B.bgcolor),K=B.font,tt=G.append(\"text\").classed(\"annotation\",!0).attr(\"data-unformatted\",B.text).text(B.text);t._context.editable?tt.call(h.makeEditable,G).call(m).on(\"edit\",function(n){B.text=n,this.attr({\"data-unformatted\":B.text}),this.call(m);var r={};r[\"annotations[\"+e+\"].text\"]=B.text,H&&H.autorange&&(r[H._name+\".autorange\"]=!0),V&&V.autorange&&(r[V._name+\".autorange\"]=!0),s.relayout(t,r)}):tt.call(m),X.attr({transform:\"rotate(\"+Y+\",\"+Z.x+\",\"+Z.y+\")\"}).call(d.setPosition,Z.x,Z.y)}},v.arrowhead=function(t,e,n,r){l(r)||(r=1);var a=t.node(),o=v.ARROWPATHS[e||0];if(o){\"string\"==typeof n&&n||(n=\"end\");var s,c,u,h,p=(d.getPx(t,\"stroke-width\")||1)*r,g=t.style(\"stroke\")||f.defaultLine,m=t.style(\"stroke-opacity\")||1,y=n.indexOf(\"start\")>=0,x=n.indexOf(\"end\")>=0,b=o.backoff*p;if(\"line\"===a.nodeName){if(s={x:+t.attr(\"x1\"),y:+t.attr(\"y1\")},c={x:+t.attr(\"x2\"),y:+t.attr(\"y2\")},u=Math.atan2(s.y-c.y,s.x-c.x),h=u+Math.PI,b){var _=b*Math.cos(u),w=b*Math.sin(u);y&&(s.x-=_,s.y-=w,t.attr({x1:s.x,y1:s.y})),x&&(c.x+=_,c.y+=w,t.attr({x2:c.x,y2:c.y}))}}else i"
+,
+"f(\"path\"===a.nodeName){var k=a.getTotalLength(),M=\"\";if(y){var A=a.getPointAtLength(0),L=a.getPointAtLength(.1);u=Math.atan2(A.y-L.y,A.x-L.x),s=a.getPointAtLength(Math.min(b,k)),b&&(M=\"0px,\"+b+\"px,\")}if(x){var T=a.getPointAtLength(k),z=a.getPointAtLength(k-.1);if(h=Math.atan2(T.y-z.y,T.x-z.x),c=a.getPointAtLength(Math.max(0,k-b)),b){var S=M?2*b:b;M+=k-S+\"px,\"+k+\"px\"}}else M&&(M+=k+\"px\");M&&t.style(\"stroke-dasharray\",M)}var E=function(n,r){e>5&&(r=0),i.select(a.parentElement).append(\"path\").attr({\"class\":t.attr(\"class\"),d:o.path,transform:\"translate(\"+n.x+\",\"+n.y+\")rotate(\"+180*r/Math.PI+\")scale(\"+p+\")\"}).style({fill:g,opacity:m,\"stroke-width\":0})};y&&E(s,u),x&&E(c,h)}},v.calcAutorange=function(t){var e=t._fullLayout,n=e.annotations;if(n.length&&t._fullData.length){var r={};n.forEach(function(t){r[t.xref]=!0,r[t.yref]=!0});var o=u.list(t).filter(function(t){return t.autorange&&r[t._id]});if(o.length)return c.syncOrAsync([v.drawAll,a],t)}}},{\"../../lib\":89,\"../../lib/setcursor\":98,\"../../lib/svg_text_utils\":100,\"../../plotly\":107,\"../../plots/cartesian/axes\":110,\"../color\":18,\"../dragelement\":39,\"../drawing\":41,\"./arrow_paths\":14,\"./attributes\":15,d3:9,\"fast-isnumeric\":11}],17:[function(t,e,n){\"use strict\";n.defaults=[\"#1f77b4\",\"#ff7f0e\",\"#2ca02c\",\"#d62728\",\"#9467bd\",\"#8c564b\",\"#e377c2\",\"#7f7f7f\",\"#bcbd22\",\"#17becf\"],n.defaultLine=\"#444\",n.lightLine=\"#eee\",n.background=\"#fff\",n.lightFraction=1e3/11},{}],18:[function(t,e,n){\"use strict\";function r(t){if(o(t)||\"string\"!=typeof t)return t;var e=t.trim();if(\"rgb\"!==e.substr(0,3))return t;var n=e.match(/^rgba?\\s*\\(([^()]*)\\)$/);if(!n)return t;var r=n[1].trim().split(/\\s*[\\s,]\\s*/),a=\"a\"===e.charAt(3)&&4===r.length;if(!a&&3!==r.length)return t;for(var i=0;i<r.length;i++){if(!r[i].length)return t;if(r[i]=Number(r[i]),!(r[i]>=0))return t;if(3===i)r[i]>1&&(r[i]=1);else if(r[i]>=1)return t}var l=Math.round(255*r[0])+\", \"+Math.round(255*r[1])+\", \"+Math.round(255*r[2]);return a?\"rgba(\"+l+\", \"+r[3]+\")\":\"rgb(\"+l+\")\"}var a=t(\"tinycolor2\"),o=t(\"fast-isnumeric\"),i=e.exports={},l=t(\"./attributes\");i.defaults=l.defaults,i.defaultLine=l.defaultLine,i.lightLine=l.lightLine,i.background=l.background,i.tinyRGB=function(t){var e=t.toRgb();return\"rgb(\"+Math.round(e.r)+\", \"+Math.round(e.g)+\", \"+Math.round(e.b)+\")\"},i.rgb=function(t){return i.tinyRGB(a(t))},i.opacity=function(t){return t?a(t).getAlpha():0},i.addOpacity=function(t,e){var n=a(t).toRgb();return\"rgba(\"+Math.round(n.r)+\", \"+Math.round(n.g)+\", \"+Math.round(n.b)+\", \"+e+\")\"},i.combine=function(t,e){var n=a(t).toRgb();if(1===n.a)return a(t).toRgbString();var r=a(e||i.background).toRgb(),o=1===r.a?r:{r:255*(1-r.a)+r.r*r.a,g:255*(1-r.a)+r.g*r.a,b:255*(1-r.a)+r.b*r.a},l={r:o.r*(1-n.a)+n.r*n.a,g:o.g*(1-n.a)+n.g*n.a,b:o.b*(1-n.a)+n.b*n.a};return a(l).toRgbString()},i.stroke=function(t,e){var n=a(e);t.style({stroke:i.tinyRGB(n),\"stroke-opacity\":n.getAlpha()})},i.fill=function(t,e){var n=a(e);t.style({fill:i.tinyRGB(n),\"fill-opacity\":n.getAlpha()})},i.clean=function(t){if(t&&\"object\"==typeof t){var e,n,a,o,l=Object.keys(t);for(e=0;e<l.length;e++)if(a=l[e],o=t[a],\"color\"===a.substr(a.length-5))if(Array.isArray(o))for(n=0;n<o.length;n++)o[n]=r(o[n]);else t[a]=r(o);else if(\"colorscale\"===a.substr(a.length-10)&&Array.isArray(o))for(n=0;n<o.length;n++)Array.isArray(o[n])&&(o[n][1]=r(o[n][1]));else if(Array.isArray(o)){var s=o[0];if(!Array.isArray(s)&&s&&\"object\"==typeof s)for(n=0;n<o.length;n++)i.clean(o[n])}else o&&\"object\"==typeof o&&i.clean(o)}}},{\"./attributes\":17,\"fast-isnumeric\":11,tinycolor2:13}],19:[function(t,e,n){\"use strict\";var r=t(\"../../plots/cartesian/layout_attributes\"),a=t(\"../../plots/font_attributes\"),o=t(\"../../lib/extend\").extendFlat;e.exports={thicknessmode:{valType:\"enumerated\",values:[\"fraction\",\"pixels\"],dflt:\"pixels\"},thickness:{valType:\"number\",min:0,dflt:30},lenmode:{valType:\"enumerated\",values:[\"fraction\",\"pixels\"],dflt:\"fraction\"},len:{valType:\"number\",min:0,dflt:1},x:{valType:\"number\",dflt:1.02,min:-2,max:3},xanchor:{valType:\"enumerated\",values:[\"left\",\"center\",\"right\"],dflt:\"left\"},xpad:{valType:\"number\",min:0,dflt:10},y:{valType:\"number\",dflt:.5,min:-2,max:3},yanchor:{valType:\"enumerated\",values:[\"top\",\"middle\",\"bottom\"],dflt:\"middle\"},ypad:{valType:\"number\",min:0,dflt:10},outlinecolor:r.linecolor,outlinewidth:r.linewidth,bordercolor:r.linecolor,borderwidth:{valType:\"number\",min:0,dflt:0},bgcolor:{valType:\"color\",dflt:\"rgba(0,0,0,0)\"},tickmode:r.tickmode,nticks:r.nticks,tick0:r.tick0,dtick:r.dtick,tickvals:r.tickvals,ticktext:r.ticktext,ticks:o({},r.ticks,{dflt:\"\"}),ticklen:r.ticklen,tickwidth:r.tickwidth,tickcolor:r.tickcolor,showticklabels:r.showticklabels,tickfont:r.tickfont,tickangle:r.tickangle,tickformat:r.tickformat,tickprefix:r.tickprefix,showtickprefix:r.showtickprefix,ticksuffix:r.ticksuffix,showticksuffix:r.showticksuffix,exponentformat:r.exponentformat,showexponent:r.showexponent,title:{valType:\"string\",dflt:\"Click to enter colorscale title\"},titlefont:o({},a,{}),titleside:{valType:\"enumerated\",values:[\"right\",\"top\",\"bottom\"],dflt:\"top\"}}},{\"../../lib/extend\":88,\"../../plots/cartesian/layout_attributes\":119,\"../../plots/font_attributes\":128}],20:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"../../plots/cartesian/tick_value_defaults\"),o=t(\"../../plots/cartesian/tick_mark_defaults\"),i=t(\"../../plots/cartesian/tick_label_defaults\"),l=t(\"./attributes\");e.exports=function(t,e,n){function s(t,e){return r.coerce(u,c,l,t,e)}var c=e.colorbar={},u=t.colorbar||{},f=s(\"thicknessmode\");s(\"thickness\",\"fraction\"===f?30/(n.width-n.margin.l-n.margin.r):30);var d=s(\"lenmode\");s(\"len\",\"fraction\"===d?1:n.height-n.margin.t-n.margin.b),s(\"x\"),s(\"xanchor\"),s(\"xpad\"),s(\"y\"),s(\"yanchor\"),s(\"ypad\"),r.noneOrAll(u,c,[\"x\",\"y\"]),s(\"outlinecolor\"),s(\"outlinewidth\"),s(\"bordercolor\"),s(\"borderwidth\"),s(\"bgcolor\"),a(u,c,s,\"linear\"),i(u,c,s,\"linear\",{outerTicks:!1,font:n.font,noHover:!0}),o(u,c,s,\"linear\",{outerTicks:!1,font:n.font,noHover:!0}),s(\"title\"),r.coerceFont(s,\"titlefont\",n.font),s(\"titleside\")}},{\"../../lib\":89,\"../../plots/cartesian/tick_label_defaults\":125,\"../../plots/cartesian/tick_mark_defaults\":126,\"../../plots/cartesian/tick_value_defaults\":127,\"./attributes\":19}],21:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"tinycolor2\"),o=t(\"../../plotly\"),i=t(\"../../plots/plots\"),l=t(\"../../plots/cartesian/axes\"),s=t(\"../dragelement\"),c=t(\"../../lib\"),u=t(\"../../lib/extend\").extendFlat,f=t(\"../../lib/setcursor\"),d=t(\"../drawing\"),h=t(\"../color\"),p=t(\"../titles\"),g=t(\"../../plots/cartesian/axis_defaults\"),v=t(\"../../plots/cartesian/position_defaults\"),m=t(\"../../plots/cartesian/layout_attributes\"),y=t(\"./attributes\");e.exports=function(t,e){function n(){function y(t,e){return c.coerce(W,J,m,t,e)}function _(){if(-1!==[\"top\",\"bottom\"].indexOf(b.titleside)){var e=at.select(\".cbtitle\"),n=e.select(\"text\"),o=[-b.outlinewidth/2,b.outlinewidth/2],i=e.select(\".h\"+J._id+\"title-math-group\").node(),s=15.6;if(n.node()&&(s=1.3*parseInt(n.style(\"font-size\"),10)),i?(it=d.bBox(i).height,it>s&&(o[1]-=(it-s)/2)):n.node()&&!n.classed(\"js-placeholder\")&&(it=d.bBox(e.node()).height),it){if(it+=5,\"top\"===b.titleside)J.domain[1]-=it/A.h,o[1]*=-1;else{J.domain[0]+=it/A.h;var u=Math.max(1,n.selectAll(\"tspan.line\").size());o[1]+=(1-u)*s}e.attr(\"transform\",\"translate(\"+o+\")\"),J.setScale()}}at.selectAll(\".cbfills,.cblines,.cbaxis\").attr(\"transform\",\"translate(0,\"+Math.round(A.h*(1-J.domain[1]))+\")\");var f=at.select(\".cbfills\").selectAll(\"rect.cbfill\").data(S);f.enter().append(\"rect\").classed(\"cbfill\",!0).style(\"stroke\",\"none\"),f.exit().remove(),f.each(function(t,e){var n=[0===e?T[0]:(S[e]+S[e-1])/2,e===S.length-1?T[1]:(S[e]+S[e+1])/2].map(J.c2p).map(Math.round);e!==S.length-1&&(n[1]+=n[1]>n[0]?1:-1);var o=C(t).replace(\"e-\",\"\"),i=a(o).toHexString();r.select(this).attr({x:U,width:Math.max(q,2),y:r.min(n),height:Math.max(r.max(n)-r.min(n),2),fill:i})});var h=at.select(\".cblines\").selectAll(\"path.cbline\").data(b.line.color&&b.line.width?z:[]);return h.enter().append(\"path\").classed(\"cbline\",!0),h.exit().remove(),h.each(function(t){r.select(this).attr(\"d\",\"M\"+U+\",\"+(Math.round(J.c2p(t))+b.line.width/2%1)+\"h\"+q).call(d.lineGroupStyle,b.line.width,E(t),b.line.dash)}),J._axislayer.selectAll(\"g.\"+J._id+\"tick,path\").remove(),J._pos=U+q+(b.outlinewidth||0)/2-(\"outside\"===b.ticks?1:0),J.side=\"right\",c.syncOrAsync([function(){return l.doTicks(t,J,!0)},function(){if(-1===[\"top\",\"bottom\"].indexOf(b.titleside)){var e=J.titlefont.size,n=J._offset+J._length/2,a=A.l+(J.position||0)*A.w+(\"right\"===J.side?10+e*(J.showticklabels?1:.5):-10-e*(J.showticklabels?.5:0));w(\"h\"+J._id+\"title\",{avoid:{selection:r.select(t).selectAll(\"g.\"+J._id+\"tick\"),side:b.titleside,offsetLeft:A.l,offsetTop:A.t,maxShift:M.width},attributes:{x:a,y:n,\"text-anchor\":\"middle\"},transform:{rotate:\"-90\",offset:0}})}}])}function w(e,n){var r,a=x();r=i.traceIs(a,\"markerColorscale\")?\"marker.colorbar.title\":\"colorbar.title\";var o={propContainer:J,propName:r,traceIndex:a.index,dfltName:\"colorscale\",containerGroup:at.select(\".cbtitle\")},l=\"h\"===e.charAt(0)?e.substr(1):\"h\"+e;at.selectAll(\".\"+l+\",.\"+l+\"-math-group\").remove(),p.draw(t,e,u(o,n||{}))}function k(){var n=q+b.outlinewidth/2+d.bBox(J._axislayer.node()).width;if(I=ot.select(\"text\"),I.node()&&!I.classed(\"js-placeholder\")){var r,a=ot.select(\".h\"+J._id+\"title-math-group\").node();r=a&&-1!==[\"top\",\"bottom\"].indexOf(b.titleside)?d.bBox(a).width:d.bBox(ot.node()).right-U-A.l,n=Math.max(n,r)}var o=2*b.xpad+n+b.borderwidth+b.outlinewidth/2,l=$-Q;at.select(\".cbbg\").attr({x:U-b.xpad-(b.borderwidth+b.outlinewidth)/2,y:Q-Z,width:Math.max(o,2),height:Math.max(l+2*Z,2)}).call(h.fill,b.bgcolor).call(h.stroke,b.bordercolor).style({\"stroke-width\":b.borderwidth}),at.selectAll(\".cboutline\").attr({x:U,y:Q+b.ypad+(\"top\"===b.titleside?it:0),width:Math.max(q,2),height:Math.max(l-2*b.ypad-it,2)}).call(h.stroke,b.outlinecolor).style({fill:\"None\",\"stroke-width\":b.outlinewidth});var s=({center:.5,right:1}[b.xanchor]||0)*o;at.attr(\"transform\",\"translate(\"+(A.l-s)+\",\"+A.t+"
+,
+"\")\"),i.autoMargin(t,e,{x:b.x,y:b.y,l:o*({right:1,center:.5}[b.xanchor]||0),r:o*({left:1,center:.5}[b.xanchor]||0),t:l*({bottom:1,middle:.5}[b.yanchor]||0),b:l*({top:1,middle:.5}[b.yanchor]||0)})}var M=t._fullLayout,A=M._size;if(\"function\"!=typeof b.fillcolor&&\"function\"!=typeof b.line.color)return void M._infolayer.selectAll(\"g.\"+e).remove();var L,T=r.extent((\"function\"==typeof b.fillcolor?b.fillcolor:b.line.color).domain()),z=[],S=[],E=\"function\"==typeof b.line.color?b.line.color:function(){return b.line.color},C=\"function\"==typeof b.fillcolor?b.fillcolor:function(){return b.fillcolor},O=b.levels.end+b.levels.size/100,P=b.levels.size,N=1.001*T[0]-.001*T[1],D=1.001*T[1]-.001*T[0];for(L=b.levels.start;0>(L-O)*P;L+=P)L>N&&D>L&&z.push(L);if(\"function\"==typeof b.fillcolor)if(b.filllevels)for(O=b.filllevels.end+b.filllevels.size/100,P=b.filllevels.size,L=b.filllevels.start;0>(L-O)*P;L+=P)L>T[0]&&L<T[1]&&S.push(L);else S=z.map(function(t){return t-b.levels.size/2}),S.push(S[S.length-1]+b.levels.size);else b.fillcolor&&\"string\"==typeof b.fillcolor&&(S=[0]);b.levels.size<0&&(z.reverse(),S.reverse());var I,R=M.height-M.margin.t-M.margin.b,j=M.width-M.margin.l-M.margin.r,q=Math.round(b.thickness*(\"fraction\"===b.thicknessmode?j:1)),F=q/A.w,B=Math.round(b.len*(\"fraction\"===b.lenmode?R:1)),H=B/A.h,V=b.xpad/A.w,Z=(b.borderwidth+b.outlinewidth)/2,Y=b.ypad/A.h,U=Math.round(b.x*A.w+b.xpad),X=b.x-F*({middle:.5,right:1}[b.xanchor]||0),G=b.y+H*(({top:-.5,bottom:.5}[b.yanchor]||0)-.5),$=Math.round(A.h*(1-G)),Q=$-B,W={type:\"linear\",range:T,tickmode:b.tickmode,nticks:b.nticks,tick0:b.tick0,dtick:b.dtick,tickvals:b.tickvals,ticktext:b.ticktext,ticks:b.ticks,ticklen:b.ticklen,tickwidth:b.tickwidth,tickcolor:b.tickcolor,showticklabels:b.showticklabels,tickfont:b.tickfont,tickangle:b.tickangle,tickformat:b.tickformat,exponentformat:b.exponentformat,showexponent:b.showexponent,showtickprefix:b.showtickprefix,tickprefix:b.tickprefix,showticksuffix:b.showticksuffix,ticksuffix:b.ticksuffix,title:b.title,titlefont:b.titlefont,anchor:\"free\",position:1},J={},K={letter:\"y\",font:M.font,noHover:!0};if(g(W,J,y,K),v(W,J,y,K),J._id=\"y\"+e,J._gd=t,J.position=b.x+V+F,n.axis=J,-1!==[\"top\",\"bottom\"].indexOf(b.titleside)&&(J.titleside=b.titleside,J.titlex=b.x+V,J.titley=G+(\"top\"===b.titleside?H-Y:Y)),b.line.color&&\"auto\"===b.tickmode){J.tickmode=\"linear\",J.tick0=b.levels.start;var tt=b.levels.size,et=c.constrain(($-Q)/50,4,15)+1,nt=(T[1]-T[0])/((b.nticks||et)*tt);if(nt>1){var rt=Math.pow(10,Math.floor(Math.log(nt)/Math.LN10));tt*=rt*c.roundUp(nt/rt,[2,5,10]),(Math.abs(b.levels.start)/b.levels.size+1e-6)%1<2e-6&&(J.tick0=0)}J.dtick=tt}J.domain=[G+Y,G+H-Y],J.setScale();var at=M._infolayer.selectAll(\"g.\"+e).data([0]);at.enter().append(\"g\").classed(e,!0).each(function(){var t=r.select(this);t.append(\"rect\").classed(\"cbbg\",!0),t.append(\"g\").classed(\"cbfills\",!0),t.append(\"g\").classed(\"cblines\",!0),t.append(\"g\").classed(\"cbaxis\",!0).classed(\"crisp\",!0),t.append(\"g\").classed(\"cbtitleunshift\",!0).append(\"g\").classed(\"cbtitle\",!0),t.append(\"rect\").classed(\"cboutline\",!0),t.select(\".cbtitle\").datum(0)}),at.attr(\"transform\",\"translate(\"+Math.round(A.l)+\",\"+Math.round(A.t)+\")\");var ot=at.select(\".cbtitleunshift\").attr(\"transform\",\"translate(-\"+Math.round(A.l)+\",-\"+Math.round(A.t)+\")\");J._axislayer=at.select(\".cbaxis\");var it=0;if(-1!==[\"top\",\"bottom\"].indexOf(b.titleside)){var lt,st=A.l+(b.x+V)*A.w,ct=J.titlefont.size;lt=\"top\"===b.titleside?(1-(G+H-Y))*A.h+A.t+3+.75*ct:(1-(G+Y))*A.h+A.t-3-.25*ct,w(J._id+\"title\",{attributes:{x:st,y:lt,\"text-anchor\":\"start\"}})}var ut=c.syncOrAsync([i.previousPromises,_,i.previousPromises,k],t);if(ut&&ut.then&&(t._promises||[]).push(ut),t._context.editable){var ft,dt,ht;s.init({element:at.node(),prepFn:function(){ft=at.attr(\"transform\"),f(at)},moveFn:function(t,e){at.attr(\"transform\",ft+\" translate(\"+t+\",\"+e+\")\"),dt=s.align(X+t/A.w,F,0,1,b.xanchor),ht=s.align(G-e/A.h,H,0,1,b.yanchor);var n=s.getCursor(dt,ht,b.xanchor,b.yanchor);f(at,n)},doneFn:function(e){f(at),e&&void 0!==dt&&void 0!==ht&&o.restyle(t,{\"colorbar.x\":dt,\"colorbar.y\":ht},x().index)}})}return ut}function x(){var n,r,a=e.substr(2);for(n=0;n<t._fullData.length;n++)if(r=t._fullData[n],r.uid===a)return r}var b={};return Object.keys(y).forEach(function(t){b[t]=null}),b.fillcolor=null,b.line={color:null,width:null,dash:null},b.levels={start:null,end:null,size:null},b.filllevels=null,Object.keys(b).forEach(function(t){n[t]=function(e){return arguments.length?(b[t]=c.isPlainObject(b[t])?c.extendFlat(b[t],e):e,n):b[t]}}),n.options=function(t){return Object.keys(t).forEach(function(e){\"function\"==typeof n[e]&&n[e](t[e])}),n},n._opts=b,n}},{\"../../lib\":89,\"../../lib/extend\":88,\"../../lib/setcursor\":98,\"../../plotly\":107,\"../../plots/cartesian/axes\":110,\"../../plots/cartesian/axis_defaults\":111,\"../../plots/cartesian/layout_attributes\":119,\"../../plots/cartesian/position_defaults\":122,\"../../plots/plots\":130,\"../color\":18,\"../dragelement\":39,\"../drawing\":41,\"../titles\":81,\"./attributes\":19,d3:9,tinycolor2:13}],22:[function(t,e,n){\"use strict\";e.exports=function(t){return\"object\"==typeof t.colorbar&&null!==t.colorbar}},{}],23:[function(t,e,n){\"use strict\";n.attributes=t(\"./attributes\"),n.supplyDefaults=t(\"./defaults\"),n.draw=t(\"./draw\"),n.hasColorbar=t(\"./has_colorbar\")},{\"./attributes\":19,\"./defaults\":20,\"./draw\":21,\"./has_colorbar\":22}],24:[function(t,e,n){\"use strict\";e.exports={zauto:{valType:\"boolean\",dflt:!0},zmin:{valType:\"number\",dflt:null},zmax:{valType:\"number\",dflt:null},colorscale:{valType:\"colorscale\"},autocolorscale:{valType:\"boolean\",dflt:!0},reversescale:{valType:\"boolean\",dflt:!1},showscale:{valType:\"boolean\",dflt:!0}}},{}],25:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"./scales\"),o=t(\"./flip_scale\");e.exports=function(t,e,n,i){var l,s;n?(l=r.nestedProperty(t,n).get(),s=r.nestedProperty(t._input,n).get()):(l=t,s=t._input);var c=l[i+\"auto\"],u=l[i+\"min\"],f=l[i+\"max\"],d=l.colorscale;c===!1&&void 0!==u||(u=r.aggNums(Math.min,null,e)),c===!1&&void 0!==f||(f=r.aggNums(Math.max,null,e)),u===f&&(u-=.5,f+=.5),l[i+\"min\"]=u,l[i+\"max\"]=f,s[i+\"min\"]=u,s[i+\"max\"]=f,l.autocolorscale&&(d=0>u*f?a.RdBu:u>=0?a.Reds:a.Blues,s.colorscale=d,l.reversescale&&(d=o(d)),l.colorscale=d)}},{\"../../lib\":89,\"./flip_scale\":29,\"./scales\":36}],26:[function(t,e,n){\"use strict\";var r=t(\"./attributes\"),a=t(\"../../lib/extend\").extendDeep;t(\"./scales.js\");e.exports=function(t){return{color:{valType:\"color\",arrayOk:!0},colorscale:a({},r.colorscale,{}),cauto:a({},r.zauto,{}),cmax:a({},r.zmax,{}),cmin:a({},r.zmin,{}),autocolorscale:a({},r.autocolorscale,{}),reversescale:a({},r.reversescale,{})}}},{\"../../lib/extend\":88,\"./attributes\":24,\"./scales.js\":36}],27:[function(t,e,n){\"use strict\";var r=t(\"./scales\");e.exports=r.RdBu},{\"./scales\":36}],28:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../lib\"),o=t(\"../colorbar/has_colorbar\"),i=t(\"../colorbar/defaults\"),l=t(\"./is_valid_scale\"),s=t(\"./flip_scale\");e.exports=function(t,e,n,c,u){var f=u.prefix,d=u.cLetter,h=f.slice(0,f.length-1),p=f?a.nestedProperty(t,h).get()||{}:t,g=f?a.nestedProperty(e,h).get()||{}:e,v=p[d+\"min\"],m=p[d+\"max\"],y=p.colorscale,x=r(v)&&r(m)&&m>v;c(f+d+\"auto\",!x),c(f+d+\"min\"),c(f+d+\"max\");var b;void 0!==y&&(b=!l(y)),c(f+\"autocolorscale\",b);var _=c(f+\"colorscale\"),w=c(f+\"reversescale\");if(w&&(g.colorscale=s(_)),\"marker.line.\"!==f){var k;f&&(k=o(p));var M=c(f+\"showscale\",k);M&&i(p,g,n)}}},{\"../../lib\":89,\"../colorbar/defaults\":20,\"../colorbar/has_colorbar\":22,\"./flip_scale\":29,\"./is_valid_scale\":33,\"fast-isnumeric\":11}],29:[function(t,e,n){\"use strict\";e.exports=function(t){for(var e,n=t.length,r=new Array(n),a=n-1,o=0;a>=0;a--,o++)e=t[a],r[o]=[1-e[0],e[1]];return r}},{}],30:[function(t,e,n){\"use strict\";var r=t(\"./scales\"),a=t(\"./default_scale\"),o=t(\"./is_valid_scale_array\");e.exports=function(t,e){function n(){try{t=r[t]||JSON.parse(t)}catch(n){t=e}}return e||(e=a),t?(\"string\"==typeof t&&(n(),\"string\"==typeof t&&n()),o(t)?t:e):e}},{\"./default_scale\":27,\"./is_valid_scale_array\":34,\"./scales\":36}],31:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../lib\"),o=t(\"./is_valid_scale\");e.exports=function(t,e){var n=e?a.nestedProperty(t,e).get()||{}:t,i=n.color,l=!1;if(Array.isArray(i))for(var s=0;s<i.length;s++)if(r(i[s])){l=!0;break}return\"object\"==typeof n&&null!==n&&(l||n.showscale===!0||r(n.cmin)&&r(n.cmax)||o(n.colorscale)||\"object\"==typeof n.colorbar&&null!==n.colorbar)}},{\"../../lib\":89,\"./is_valid_scale\":33,\"fast-isnumeric\":11}],32:[function(t,e,n){\"use strict\";n.scales=t(\"./scales\"),n.defaultScale=t(\"./default_scale\"),n.attributes=t(\"./attributes\"),n.handleDefaults=t(\"./defaults\"),n.calc=t(\"./calc\"),n.hasColorscale=t(\"./has_colorscale\"),n.isValidScale=t(\"./is_valid_scale\"),n.getScale=t(\"./get_scale\"),n.flipScale=t(\"./flip_scale\"),n.makeScaleFunction=t(\"./make_scale_function\")},{\"./attributes\":24,\"./calc\":25,\"./default_scale\":27,\"./defaults\":28,\"./flip_scale\":29,\"./get_scale\":30,\"./has_colorscale\":31,\"./is_valid_scale\":33,\"./make_scale_function\":35,\"./scales\":36}],33:[function(t,e,n){\"use strict\";var r=t(\"./scales\"),a=t(\"./is_valid_scale_array\");e.exports=function(t){return void 0!==r[t]?!0:a(t)}},{\"./is_valid_scale_array\":34,\"./scales\":36}],34:[function(t,e,n){\"use strict\";var r=t(\"tinycolor2\");e.exports=function(t){var e=0;if(!Array.isArray(t)||t.length<2)return!1;if(!t[0]||!t[t.length-1])return!1;if(0!==+t[0][0]||1!==+t[t.length-1][0])return!1;for(var n=0;n<t.length;n++){var a=t[n];if(2!==a.length||+a[0]<e||!r(a[1]).isValid())return!1;e=+a[0]}return!0}},{tinycolor2:13}],35:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"tinycolor2\"),o=t(\"fast-isnumeric\"),i=t(\"../../lib\"),l=t(\"../color\");e.exports=function(t,e,n){for(var s,c=t.length,u=new Array(c),f=new Array(c),d=0;c>d;d++)s=t[d],u[d]=e+s[0]*(n-e),f[d]=a(s[1]).toRgb();var h=r.scale.linear().domain(u).interpolate(r.interpolateObject).range(f);return function(t){if(o(t)){var r=i.constrain(t,e,n),s=h(r);return a(s).toRgbString()}return a(t).isVali"
+,
+"d()?t:l.defaultLine}}},{\"../../lib\":89,\"../color\":18,d3:9,\"fast-isnumeric\":11,tinycolor2:13}],36:[function(t,e,n){\"use strict\";e.exports={Greys:[[0,\"rgb(0,0,0)\"],[1,\"rgb(255,255,255)\"]],YlGnBu:[[0,\"rgb(8,29,88)\"],[.125,\"rgb(37,52,148)\"],[.25,\"rgb(34,94,168)\"],[.375,\"rgb(29,145,192)\"],[.5,\"rgb(65,182,196)\"],[.625,\"rgb(127,205,187)\"],[.75,\"rgb(199,233,180)\"],[.875,\"rgb(237,248,217)\"],[1,\"rgb(255,255,217)\"]],Greens:[[0,\"rgb(0,68,27)\"],[.125,\"rgb(0,109,44)\"],[.25,\"rgb(35,139,69)\"],[.375,\"rgb(65,171,93)\"],[.5,\"rgb(116,196,118)\"],[.625,\"rgb(161,217,155)\"],[.75,\"rgb(199,233,192)\"],[.875,\"rgb(229,245,224)\"],[1,\"rgb(247,252,245)\"]],YlOrRd:[[0,\"rgb(128,0,38)\"],[.125,\"rgb(189,0,38)\"],[.25,\"rgb(227,26,28)\"],[.375,\"rgb(252,78,42)\"],[.5,\"rgb(253,141,60)\"],[.625,\"rgb(254,178,76)\"],[.75,\"rgb(254,217,118)\"],[.875,\"rgb(255,237,160)\"],[1,\"rgb(255,255,204)\"]],Bluered:[[0,\"rgb(0,0,255)\"],[1,\"rgb(255,0,0)\"]],RdBu:[[0,\"rgb(5,10,172)\"],[.35,\"rgb(106,137,247)\"],[.5,\"rgb(190,190,190)\"],[.6,\"rgb(220,170,132)\"],[.7,\"rgb(230,145,90)\"],[1,\"rgb(178,10,28)\"]],Reds:[[0,\"rgb(220,220,220)\"],[.2,\"rgb(245,195,157)\"],[.4,\"rgb(245,160,105)\"],[1,\"rgb(178,10,28)\"]],Blues:[[0,\"rgb(5,10,172)\"],[.35,\"rgb(40,60,190)\"],[.5,\"rgb(70,100,245)\"],[.6,\"rgb(90,120,245)\"],[.7,\"rgb(106,137,247)\"],[1,\"rgb(220,220,220)\"]],Picnic:[[0,\"rgb(0,0,255)\"],[.1,\"rgb(51,153,255)\"],[.2,\"rgb(102,204,255)\"],[.3,\"rgb(153,204,255)\"],[.4,\"rgb(204,204,255)\"],[.5,\"rgb(255,255,255)\"],[.6,\"rgb(255,204,255)\"],[.7,\"rgb(255,153,255)\"],[.8,\"rgb(255,102,204)\"],[.9,\"rgb(255,102,102)\"],[1,\"rgb(255,0,0)\"]],Rainbow:[[0,\"rgb(150,0,90)\"],[.125,\"rgb(0,0,200)\"],[.25,\"rgb(0,25,255)\"],[.375,\"rgb(0,152,255)\"],[.5,\"rgb(44,255,150)\"],[.625,\"rgb(151,255,0)\"],[.75,\"rgb(255,234,0)\"],[.875,\"rgb(255,111,0)\"],[1,\"rgb(255,0,0)\"]],Portland:[[0,\"rgb(12,51,131)\"],[.25,\"rgb(10,136,186)\"],[.5,\"rgb(242,211,56)\"],[.75,\"rgb(242,143,56)\"],[1,\"rgb(217,30,30)\"]],Jet:[[0,\"rgb(0,0,131)\"],[.125,\"rgb(0,60,170)\"],[.375,\"rgb(5,255,255)\"],[.625,\"rgb(255,255,0)\"],[.875,\"rgb(250,0,0)\"],[1,\"rgb(128,0,0)\"]],\n"
+,
+"Hot:[[0,\"rgb(0,0,0)\"],[.3,\"rgb(230,0,0)\"],[.6,\"rgb(255,210,0)\"],[1,\"rgb(255,255,255)\"]],Blackbody:[[0,\"rgb(0,0,0)\"],[.2,\"rgb(230,0,0)\"],[.4,\"rgb(230,210,0)\"],[.7,\"rgb(255,255,255)\"],[1,\"rgb(160,200,255)\"]],Earth:[[0,\"rgb(0,0,130)\"],[.1,\"rgb(0,180,180)\"],[.2,\"rgb(40,210,40)\"],[.4,\"rgb(230,230,50)\"],[.6,\"rgb(120,70,20)\"],[1,\"rgb(255,255,255)\"]],Electric:[[0,\"rgb(0,0,0)\"],[.15,\"rgb(30,0,100)\"],[.4,\"rgb(120,0,100)\"],[.6,\"rgb(160,90,0)\"],[.8,\"rgb(230,200,0)\"],[1,\"rgb(255,250,220)\"]],Viridis:[[0,\"#440154\"],[.06274509803921569,\"#48186a\"],[.12549019607843137,\"#472d7b\"],[.18823529411764706,\"#424086\"],[.25098039215686274,\"#3b528b\"],[.3137254901960784,\"#33638d\"],[.3764705882352941,\"#2c728e\"],[.4392156862745098,\"#26828e\"],[.5019607843137255,\"#21918c\"],[.5647058823529412,\"#1fa088\"],[.6274509803921569,\"#28ae80\"],[.6901960784313725,\"#3fbc73\"],[.7529411764705882,\"#5ec962\"],[.8156862745098039,\"#84d44b\"],[.8784313725490196,\"#addc30\"],[.9411764705882353,\"#d8e219\"],[1,\"#fde725\"]]}},{}],37:[function(t,e,n){\"use strict\";e.exports=function(t,e,n,r,a){var o=(t-n)/(r-n),i=o+e/(r-n),l=(o+i)/2;return\"left\"===a||\"bottom\"===a?o:\"center\"===a||\"middle\"===a?l:\"right\"===a||\"top\"===a?i:2/3-l>o?o:i>4/3-l?i:l}},{}],38:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=[[\"sw-resize\",\"s-resize\",\"se-resize\"],[\"w-resize\",\"move\",\"e-resize\"],[\"nw-resize\",\"n-resize\",\"ne-resize\"]];e.exports=function(t,e,n,o){return t=\"left\"===n?0:\"center\"===n?1:\"right\"===n?2:r.constrain(Math.floor(3*t),0,2),e=\"bottom\"===o?0:\"middle\"===o?1:\"top\"===o?2:r.constrain(Math.floor(3*e),0,2),a[e][t]}},{\"../../lib\":89}],39:[function(t,e,n){\"use strict\";function r(){var t=document.createElement(\"div\");t.className=\"dragcover\";var e=t.style;return e.position=\"fixed\",e.left=0,e.right=0,e.top=0,e.bottom=0,e.zIndex=999999999,e.background=\"none\",document.body.appendChild(t),t}function a(t){t._dragging=!1,t._replotPending&&o.plot(t)}var o=t(\"../../plotly\"),i=t(\"../../lib\"),l=t(\"../../plots/cartesian/constants\"),s=e.exports={};s.align=t(\"./align\"),s.getCursor=t(\"./cursor\");var c=t(\"./unhover\");s.unhover=c.wrapped,s.unhoverRaw=c.raw,s.init=function(t){function e(e){return t.element.onmousemove=p,g._dragged=!1,g._dragging=!0,c=e.clientX,u=e.clientY,h=e.target,f=(new Date).getTime(),f-g._mouseDownTime<m?v+=1:(v=1,g._mouseDownTime=f),t.prepFn&&t.prepFn(e,c,u),d=r(),d.onmousemove=n,d.onmouseup=o,d.onmouseout=o,d.style.cursor=window.getComputedStyle(t.element).cursor,i.pauseEvent(e)}function n(e){var n=e.clientX-c,r=e.clientY-u,a=t.minDrag||l.MINDRAG;return Math.abs(n)<a&&(n=0),Math.abs(r)<a&&(r=0),(n||r)&&(g._dragged=!0,s.unhover(g)),t.moveFn&&t.moveFn(n,r,g._dragged),i.pauseEvent(e)}function o(e){if(p=t.element.onmousemove,t.setCursor&&(t.element.onmousemove=t.setCursor),d.onmousemove=null,d.onmouseup=null,d.onmouseout=null,i.removeElement(d),!g._dragging)return void(g._dragged=!1);if(g._dragging=!1,(new Date).getTime()-g._mouseDownTime>m&&(v=Math.max(v-1,1)),t.doneFn&&t.doneFn(g._dragged,v),!g._dragged){var n=document.createEvent(\"MouseEvents\");n.initEvent(\"click\",!0,!0),h.dispatchEvent(n)}return a(g),g._dragged=!1,i.pauseEvent(e)}var c,u,f,d,h,p,g=i.getPlotDiv(t.element)||{},v=1,m=l.DBLCLICKDELAY;g._mouseDownTime||(g._mouseDownTime=0),p=t.element.onmousemove,t.setCursor&&(t.element.onmousemove=t.setCursor),t.element.onmousedown=e,t.element.style.pointerEvents=\"all\"}},{\"../../lib\":89,\"../../plotly\":107,\"../../plots/cartesian/constants\":115,\"./align\":37,\"./cursor\":38,\"./unhover\":40}],40:[function(t,e,n){\"use strict\";var r=t(\"../../lib/events\"),a=e.exports={};a.wrapped=function(t,e,n){\"string\"==typeof t&&(t=document.getElementById(t)),t._hoverTimer&&(clearTimeout(t._hoverTimer),t._hoverTimer=void 0),a.raw(t,e,n)},a.raw=function(t,e){var n=t._fullLayout;e||(e={}),e.target&&r.triggerHandler(t,\"plotly_beforehover\",e)===!1||(n._hoverlayer.selectAll(\"g\").remove(),e.target&&t._hoverdata&&t.emit(\"plotly_unhover\",{points:t._hoverdata}),t._hoverdata=void 0)}},{\"../../lib/events\":87}],41:[function(t,e,n){\"use strict\";function r(t,e,n,r){var o=t[0]-e[0],i=t[1]-e[1],l=n[0]-e[0],s=n[1]-e[1],c=Math.pow(o*o+i*i,b/2),u=Math.pow(l*l+s*s,b/2),f=(u*u*o-c*c*l)*r,d=(u*u*i-c*c*s)*r,h=3*u*(c+u),p=3*c*(c+u);return[[a.round(e[0]+(h&&f/h),2),a.round(e[1]+(h&&d/h),2)],[a.round(e[0]-(p&&f/p),2),a.round(e[1]-(p&&d/p),2)]]}var a=t(\"d3\"),o=t(\"fast-isnumeric\"),i=t(\"../../plots/plots\"),l=t(\"../color\"),s=t(\"../colorscale\"),c=t(\"../../lib\"),u=t(\"../../lib/svg_text_utils\"),f=t(\"../../constants/xmlns_namespaces\"),d=t(\"../../traces/scatter/subtypes\"),h=t(\"../../traces/scatter/make_bubble_size_func\"),p=e.exports={};p.font=function(t,e,n,r){e&&e.family&&(r=e.color,n=e.size,e=e.family),e&&t.style(\"font-family\",e),n+1&&t.style(\"font-size\",n+\"px\"),r&&t.call(l.fill,r)},p.setPosition=function(t,e,n){t.attr(\"x\",e).attr(\"y\",n)},p.setSize=function(t,e,n){t.attr(\"width\",e).attr(\"height\",n)},p.setRect=function(t,e,n,r,a){t.call(p.setPosition,e,n).call(p.setSize,r,a)},p.translatePoints=function(t,e,n){t.each(function(t){var r=t.xp||e.c2p(t.x),i=t.yp||n.c2p(t.y),l=a.select(this);o(r)&&o(i)?\"text\"===this.nodeName?l.attr(\"x\",r).attr(\"y\",i):l.attr(\"transform\",\"translate(\"+r+\",\"+i+\")\"):l.remove()})},p.getPx=function(t,e){return Number(t.style(e).replace(/px$/,\"\"))},p.crispRound=function(t,e,n){return e&&o(e)?t._context.staticPlot?e:1>e?1:Math.round(e):n||0},p.lineGroupStyle=function(t,e,n,r){t.style(\"fill\",\"none\").each(function(t){var o=(((t||[])[0]||{}).trace||{}).line||{},i=e||o.width||0,s=r||o.dash||\"\";a.select(this).call(l.stroke,n||o.color).call(p.dashLine,s,i)})},p.dashLine=function(t,e,n){var r=Math.max(n,3);\"solid\"===e?e=\"\":\"dot\"===e?e=r+\"px,\"+r+\"px\":\"dash\"===e?e=3*r+\"px,\"+3*r+\"px\":\"longdash\"===e?e=5*r+\"px,\"+5*r+\"px\":\"dashdot\"===e?e=3*r+\"px,\"+r+\"px,\"+r+\"px,\"+r+\"px\":\"longdashdot\"===e&&(e=5*r+\"px,\"+2*r+\"px,\"+r+\"px,\"+2*r+\"px\"),t.style({\"stroke-dasharray\":e,\"stroke-width\":n+\"px\"})},p.fillGroupStyle=function(t){t.style(\"stroke-width\",0).each(function(e){var n=a.select(this);try{n.call(l.fill,e[0].trace.fillcolor)}catch(r){c.error(r,t),n.remove()}})};var g=t(\"./symbol_defs\");p.symbolNames=[],p.symbolFuncs=[],p.symbolNeedLines={},p.symbolNoDot={},p.symbolList=[],Object.keys(g).forEach(function(t){var e=g[t];p.symbolList=p.symbolList.concat([e.n,t,e.n+100,t+\"-open\"]),p.symbolNames[e.n]=t,p.symbolFuncs[e.n]=e.f,e.needLine&&(p.symbolNeedLines[e.n]=!0),e.noDot?p.symbolNoDot[e.n]=!0:p.symbolList=p.symbolList.concat([e.n+200,t+\"-dot\",e.n+300,t+\"-open-dot\"])});var v=p.symbolNames.length,m=\"M0,0.5L0.5,0L0,-0.5L-0.5,0Z\";p.symbolNumber=function(t){if(\"string\"==typeof t){var e=0;t.indexOf(\"-open\")>0&&(e=100,t=t.replace(\"-open\",\"\")),t.indexOf(\"-dot\")>0&&(e+=200,t=t.replace(\"-dot\",\"\")),t=p.symbolNames.indexOf(t),t>=0&&(t+=e)}return t%100>=v||t>=400?0:Math.floor(Math.max(t,0))},p.pointStyle=function(t,e){if(t.size()){var n=e.marker,r=n.line;if(i.traceIs(e,\"symbols\")){var o=h(e);t.attr(\"d\",function(t){var r;r=\"various\"===t.ms||\"various\"===n.size?3:d.isBubble(e)?o(t.ms):(n.size||6)/2,t.mrc=r;var a=p.symbolNumber(t.mx||n.symbol)||0,i=a%100;return t.om=a%200>=100,p.symbolFuncs[i](r)+(a>=200?m:\"\")}).style(\"opacity\",function(t){return(t.mo+1||n.opacity+1)-1})}var s=(e._input||{}).marker||{},c=p.tryColorscale(n,s,\"\"),u=p.tryColorscale(n,s,\"line.\");t.each(function(t){var e,o,i;t.so?(i=r.outlierwidth,o=r.outliercolor,e=n.outliercolor):(i=(t.mlw+1||r.width+1||(t.trace?t.trace.marker.line.width:0)+1)-1,o=\"mlc\"in t?t.mlcc=u(t.mlc):Array.isArray(r.color)?l.defaultLine:r.color,e=\"mc\"in t?t.mcc=c(t.mc):Array.isArray(n.color)?l.defaultLine:n.color||\"rgba(0,0,0,0)\");var s=a.select(this);t.om?s.call(l.stroke,e).style({\"stroke-width\":(i||1)+\"px\",fill:\"none\"}):(s.style(\"stroke-width\",i+\"px\").call(l.fill,e),i&&s.call(l.stroke,o))})}},p.tryColorscale=function(t,e,n){var r=c.nestedProperty(t,n+\"color\").get(),a=c.nestedProperty(t,n+\"colorscale\").get(),i=c.nestedProperty(t,n+\"cauto\").get(),l=c.nestedProperty(t,n+\"cmin\"),u=c.nestedProperty(t,n+\"cmax\"),f=l.get(),d=u.get();return a&&Array.isArray(r)?(!i&&o(f)&&o(d)||(f=1/0,d=-(1/0),r.forEach(function(t){o(t)&&(f>t&&(f=+t),t>d&&(d=+t))}),f>d&&(f=0,d=1),l.set(f),u.set(d),c.nestedProperty(e,n+\"cmin\").set(f),c.nestedProperty(e,n+\"cmax\").set(d)),s.makeScaleFunction(a,f,d)):c.identity};var y={start:1,end:-1,middle:0,bottom:1,top:-1},x=1.3;p.textPointStyle=function(t,e){t.each(function(t){var n=a.select(this),r=t.tx||e.text;if(!r||Array.isArray(r))return void n.remove();var i=t.tp||e.textposition,l=-1!==i.indexOf(\"top\")?\"top\":-1!==i.indexOf(\"bottom\")?\"bottom\":\"middle\",s=-1!==i.indexOf(\"left\")?\"end\":-1!==i.indexOf(\"right\")?\"start\":\"middle\",c=t.ts||e.textfont.size,f=t.mrc?t.mrc/.8+1:0;c=o(c)&&c>0?c:0,n.call(p.font,t.tf||e.textfont.family,c,t.tc||e.textfont.color).attr(\"text-anchor\",s).text(r).call(u.convertToTspans);var d=a.select(this.parentNode),h=n.selectAll(\"tspan.line\"),g=((h[0].length||1)-1)*x+1,v=y[s]*f,m=.75*c+y[l]*f+(y[l]-1)*g*c/2;d.attr(\"transform\",\"translate(\"+v+\",\"+m+\")\"),g>1&&h.attr({x:n.attr(\"x\"),y:n.attr(\"y\")})})};var b=.5;p.smoothopen=function(t,e){if(t.length<3)return\"M\"+t.join(\"L\");var n,a=\"M\"+t[0],o=[];for(n=1;n<t.length-1;n++)o.push(r(t[n-1],t[n],t[n+1],e));for(a+=\"Q\"+o[0][0]+\" \"+t[1],n=2;n<t.length-1;n++)a+=\"C\"+o[n-2][1]+\" \"+o[n-1][0]+\" \"+t[n];return a+=\"Q\"+o[t.length-3][1]+\" \"+t[t.length-1]},p.smoothclosed=function(t,e){if(t.length<3)return\"M\"+t.join(\"L\")+\"Z\";var n,a=\"M\"+t[0],o=t.length-1,i=[r(t[o],t[0],t[1],e)];for(n=1;o>n;n++)i.push(r(t[n-1],t[n],t[n+1],e));for(i.push(r(t[o-1],t[o],t[0],e)),n=1;o>=n;n++)a+=\"C\"+i[n-1][1]+\" \"+i[n][0]+\" \"+t[n];return a+=\"C\"+i[o][1]+\" \"+i[0][0]+\" \"+t[0]+\"Z\"};var _={hv:function(t,e){return\"H\"+a.round(e[0],2)+\"V\"+a.round(e[1],2)},vh:function(t,e){return\"V\"+a.round(e[1],2)+\"H\"+a.round(e[0],2)},hvh:function(t,e){return\"H\"+a.round((t[0]+e[0])/2,2)+\"V\"+a.round(e[1],2)+\"H\"+a.round(e[0],2)},vhv:function(t,e){return\"V\"+a.round((t[1]+e[1])/2,2)+\"H\"+a.round(e[0],2)+\"V\"+a.round(e[1],2)}},w=function(t,e){return\"L\"+a.round(e[0],2)+\",\"+a.round(e[1],2)};p.steps=function(t){var e=_[t]||w;return function(t){f"
+,
+"or(var n=\"M\"+a.round(t[0][0],2)+\",\"+a.round(t[0][1],2),r=1;r<t.length;r++)n+=e(t[r-1],t[r]);return n}},p.makeTester=function(t){var e=a.select(\"body\").selectAll(\"#js-plotly-tester\").data([0]);e.enter().append(\"svg\").attr(\"id\",\"js-plotly-tester\").attr(f.svgAttrs).style({position:\"absolute\",left:\"-10000px\",top:\"-10000px\",width:\"9000px\",height:\"9000px\",\"z-index\":\"1\"});var n=e.selectAll(\".js-reference-point\").data([0]);n.enter().append(\"path\").classed(\"js-reference-point\",!0).attr(\"d\",\"M0,0H1V1H0Z\").style({\"stroke-width\":0,fill:\"black\"}),e.node()._cache||(e.node()._cache={}),t._tester=e,t._testref=n};var k=[],M=1e4;p.bBox=function(t){var e=t.attributes[\"data-bb\"];if(e&&e.value)return c.extendFlat({},k[e.value]);var n=a.select(\"#js-plotly-tester\"),r=n.node(),o=t.cloneNode(!0);r.appendChild(o),a.select(o).attr({x:0,y:0,transform:\"\"});var i=o.getBoundingClientRect(),l=n.select(\".js-reference-point\").node().getBoundingClientRect();r.removeChild(o);var s={height:i.height,width:i.width,left:i.left-l.left,top:i.top-l.top,right:i.right-l.left,bottom:i.bottom-l.top};return k.length>=M&&(a.selectAll(\"[data-bb]\").attr(\"data-bb\",null),k=[]),t.setAttribute(\"data-bb\",k.length),k.push(s),c.extendFlat({},s)},p.setClipUrl=function(t,e){if(!e)return void t.attr(\"clip-path\",null);var n=\"#\"+e,r=a.select(\"base\");r.size()&&r.attr(\"href\")&&(n=window.location.href+n),t.attr(\"clip-path\",\"url(\"+n+\")\")}},{\"../../constants/xmlns_namespaces\":82,\"../../lib\":89,\"../../lib/svg_text_utils\":100,\"../../plots/plots\":130,\"../../traces/scatter/make_bubble_size_func\":181,\"../../traces/scatter/subtypes\":186,\"../color\":18,\"../colorscale\":32,\"./symbol_defs\":42,d3:9,\"fast-isnumeric\":11}],42:[function(t,e,n){\"use strict\";var r=t(\"d3\");e.exports={circle:{n:0,f:function(t){var e=r.round(t,2);return\"M\"+e+\",0A\"+e+\",\"+e+\" 0 1,1 0,-\"+e+\"A\"+e+\",\"+e+\" 0 0,1 \"+e+\",0Z\"}},square:{n:1,f:function(t){var e=r.round(t,2);return\"M\"+e+\",\"+e+\"H-\"+e+\"V-\"+e+\"H\"+e+\"Z\"}},diamond:{n:2,f:function(t){var e=r.round(1.3*t,2);return\"M\"+e+\",0L0,\"+e+\"L-\"+e+\",0L0,-\"+e+\"Z\"}},cross:{n:3,f:function(t){var e=r.round(.4*t,2),n=r.round(1.2*t,2);return\"M\"+n+\",\"+e+\"H\"+e+\"V\"+n+\"H-\"+e+\"V\"+e+\"H-\"+n+\"V-\"+e+\"H-\"+e+\"V-\"+n+\"H\"+e+\"V-\"+e+\"H\"+n+\"Z\"}},x:{n:4,f:function(t){var e=r.round(.8*t/Math.sqrt(2),2),n=\"l\"+e+\",\"+e,a=\"l\"+e+\",-\"+e,o=\"l-\"+e+\",-\"+e,i=\"l-\"+e+\",\"+e;return\"M0,\"+e+n+a+o+a+o+i+o+i+n+i+n+\"Z\"}},\"triangle-up\":{n:5,f:function(t){var e=r.round(2*t/Math.sqrt(3),2),n=r.round(t/2,2),a=r.round(t,2);return\"M-\"+e+\",\"+n+\"H\"+e+\"L0,-\"+a+\"Z\"}},\"triangle-down\":{n:6,f:function(t){var e=r.round(2*t/Math.sqrt(3),2),n=r.round(t/2,2),a=r.round(t,2);return\"M-\"+e+\",-\"+n+\"H\"+e+\"L0,\"+a+\"Z\"}},\"triangle-left\":{n:7,f:function(t){var e=r.round(2*t/Math.sqrt(3),2),n=r.round(t/2,2),a=r.round(t,2);return\"M\"+n+\",-\"+e+\"V\"+e+\"L-\"+a+\",0Z\"}},\"triangle-right\":{n:8,f:function(t){var e=r.round(2*t/Math.sqrt(3),2),n=r.round(t/2,2),a=r.round(t,2);return\"M-\"+n+\",-\"+e+\"V\"+e+\"L\"+a+\",0Z\"}},\"triangle-ne\":{n:9,f:function(t){var e=r.round(.6*t,2),n=r.round(1.2*t,2);return\"M-\"+n+\",-\"+e+\"H\"+e+\"V\"+n+\"Z\"}},\"triangle-se\":{n:10,f:function(t){var e=r.round(.6*t,2),n=r.round(1.2*t,2);return\"M\"+e+\",-\"+n+\"V\"+e+\"H-\"+n+\"Z\"}},\"triangle-sw\":{n:11,f:function(t){var e=r.round(.6*t,2),n=r.round(1.2*t,2);return\"M\"+n+\",\"+e+\"H-\"+e+\"V-\"+n+\"Z\"}},\"triangle-nw\":{n:12,f:function(t){var e=r.round(.6*t,2),n=r.round(1.2*t,2);return\"M-\"+e+\",\"+n+\"V-\"+e+\"H\"+n+\"Z\"}},pentagon:{n:13,f:function(t){var e=r.round(.951*t,2),n=r.round(.588*t,2),a=r.round(-t,2),o=r.round(t*-.309,2),i=r.round(.809*t,2);return\"M\"+e+\",\"+o+\"L\"+n+\",\"+i+\"H-\"+n+\"L-\"+e+\",\"+o+\"L0,\"+a+\"Z\"}},hexagon:{n:14,f:function(t){var e=r.round(t,2),n=r.round(t/2,2),a=r.round(t*Math.sqrt(3)/2,2);return\"M\"+a+\",-\"+n+\"V\"+n+\"L0,\"+e+\"L-\"+a+\",\"+n+\"V-\"+n+\"L0,-\"+e+\"Z\"}},hexagon2:{n:15,f:function(t){var e=r.round(t,2),n=r.round(t/2,2),a=r.round(t*Math.sqrt(3)/2,2);return\"M-\"+n+\",\"+a+\"H\"+n+\"L\"+e+\",0L\"+n+\",-\"+a+\"H-\"+n+\"L-\"+e+\",0Z\"}},octagon:{n:16,f:function(t){var e=r.round(.924*t,2),n=r.round(.383*t,2);return\"M-\"+n+\",-\"+e+\"H\"+n+\"L\"+e+\",-\"+n+\"V\"+n+\"L\"+n+\",\"+e+\"H-\"+n+\"L-\"+e+\",\"+n+\"V-\"+n+\"Z\"}},star:{n:17,f:function(t){var e=1.4*t,n=r.round(.225*e,2),a=r.round(.951*e,2),o=r.round(.363*e,2),i=r.round(.588*e,2),l=r.round(-e,2),s=r.round(e*-.309,2),c=r.round(.118*e,2),u=r.round(.809*e,2),f=r.round(.382*e,2);return\"M\"+n+\",\"+s+\"H\"+a+\"L\"+o+\",\"+c+\"L\"+i+\",\"+u+\"L0,\"+f+\"L-\"+i+\",\"+u+\"L-\"+o+\",\"+c+\"L-\"+a+\",\"+s+\"H-\"+n+\"L0,\"+l+\"Z\"}},hexagram:{n:18,f:function(t){var e=r.round(.66*t,2),n=r.round(.38*t,2),a=r.round(.76*t,2);return\"M-\"+a+\",0l-\"+n+\",-\"+e+\"h\"+a+\"l\"+n+\",-\"+e+\"l\"+n+\",\"+e+\"h\"+a+\"l-\"+n+\",\"+e+\"l\"+n+\",\"+e+\"h-\"+a+\"l-\"+n+\",\"+e+\"l-\"+n+\",-\"+e+\"h-\"+a+\"Z\"}},\"star-triangle-up\":{n:19,f:function(t){var e=r.round(t*Math.sqrt(3)*.8,2),n=r.round(.8*t,2),a=r.round(1.6*t,2),o=r.round(4*t,2),i=\"A \"+o+\",\"+o+\" 0 0 1 \";return\"M-\"+e+\",\"+n+i+e+\",\"+n+i+\"0,-\"+a+i+\"-\"+e+\",\"+n+\"Z\"}},\"star-triangle-down\":{n:20,f:function(t){var e=r.round(t*Math.sqrt(3)*.8,2),n=r.round(.8*t,2),a=r.round(1.6*t,2),o=r.round(4*t,2),i=\"A \"+o+\",\"+o+\" 0 0 1 \";return\"M\"+e+\",-\"+n+i+\"-\"+e+\",-\"+n+i+\"0,\"+a+i+e+\",-\"+n+\"Z\"}},\"star-square\":{n:21,f:function(t){var e=r.round(1.1*t,2),n=r.round(2*t,2),a=\"A \"+n+\",\"+n+\" 0 0 1 \";return\"M-\"+e+\",-\"+e+a+\"-\"+e+\",\"+e+a+e+\",\"+e+a+e+\",-\"+e+a+\"-\"+e+\",-\"+e+\"Z\"}},\"star-diamond\":{n:22,f:function(t){var e=r.round(1.4*t,2),n=r.round(1.9*t,2),a=\"A \"+n+\",\"+n+\" 0 0 1 \";return\"M-\"+e+\",0\"+a+\"0,\"+e+a+e+\",0\"+a+\"0,-\"+e+a+\"-\"+e+\",0Z\"}},\"diamond-tall\":{n:23,f:function(t){var e=r.round(.7*t,2),n=r.round(1.4*t,2);return\"M0,\"+n+\"L\"+e+\",0L0,-\"+n+\"L-\"+e+\",0Z\"}},\"diamond-wide\":{n:24,f:function(t){var e=r.round(1.4*t,2),n=r.round(.7*t,2);return\"M0,\"+n+\"L\"+e+\",0L0,-\"+n+\"L-\"+e+\",0Z\"}},hourglass:{n:25,f:function(t){var e=r.round(t,2);return\"M\"+e+\",\"+e+\"H-\"+e+\"L\"+e+\",-\"+e+\"H-\"+e+\"Z\"},noDot:!0},bowtie:{n:26,f:function(t){var e=r.round(t,2);return\"M\"+e+\",\"+e+\"V-\"+e+\"L-\"+e+\",\"+e+\"V-\"+e+\"Z\"},noDot:!0},\"circle-cross\":{n:27,f:function(t){var e=r.round(t,2);return\"M0,\"+e+\"V-\"+e+\"M\"+e+\",0H-\"+e+\"M\"+e+\",0A\"+e+\",\"+e+\" 0 1,1 0,-\"+e+\"A\"+e+\",\"+e+\" 0 0,1 \"+e+\",0Z\"},needLine:!0,noDot:!0},\"circle-x\":{n:28,f:function(t){var e=r.round(t,2),n=r.round(t/Math.sqrt(2),2);return\"M\"+n+\",\"+n+\"L-\"+n+\",-\"+n+\"M\"+n+\",-\"+n+\"L-\"+n+\",\"+n+\"M\"+e+\",0A\"+e+\",\"+e+\" 0 1,1 0,-\"+e+\"A\"+e+\",\"+e+\" 0 0,1 \"+e+\",0Z\"},needLine:!0,noDot:!0},\"square-cross\":{n:29,f:function(t){var e=r.round(t,2);return\"M0,\"+e+\"V-\"+e+\"M\"+e+\",0H-\"+e+\"M\"+e+\",\"+e+\"H-\"+e+\"V-\"+e+\"H\"+e+\"Z\"},needLine:!0,noDot:!0},\"square-x\":{n:30,f:function(t){var e=r.round(t,2);return\"M\"+e+\",\"+e+\"L-\"+e+\",-\"+e+\"M\"+e+\",-\"+e+\"L-\"+e+\",\"+e+\"M\"+e+\",\"+e+\"H-\"+e+\"V-\"+e+\"H\"+e+\"Z\"},needLine:!0,noDot:!0},\"diamond-cross\":{n:31,f:function(t){var e=r.round(1.3*t,2);return\"M\"+e+\",0L0,\"+e+\"L-\"+e+\",0L0,-\"+e+\"ZM0,-\"+e+\"V\"+e+\"M-\"+e+\",0H\"+e},needLine:!0,noDot:!0},\"diamond-x\":{n:32,f:function(t){var e=r.round(1.3*t,2),n=r.round(.65*t,2);return\"M\"+e+\",0L0,\"+e+\"L-\"+e+\",0L0,-\"+e+\"ZM-\"+n+\",-\"+n+\"L\"+n+\",\"+n+\"M-\"+n+\",\"+n+\"L\"+n+\",-\"+n},needLine:!0,noDot:!0},\"cross-thin\":{n:33,f:function(t){var e=r.round(1.4*t,2);return\"M0,\"+e+\"V-\"+e+\"M\"+e+\",0H-\"+e},needLine:!0,noDot:!0},\"x-thin\":{n:34,f:function(t){var e=r.round(t,2);return\"M\"+e+\",\"+e+\"L-\"+e+\",-\"+e+\"M\"+e+\",-\"+e+\"L-\"+e+\",\"+e},needLine:!0,noDot:!0},asterisk:{n:35,f:function(t){var e=r.round(1.2*t,2),n=r.round(.85*t,2);return\"M0,\"+e+\"V-\"+e+\"M\"+e+\",0H-\"+e+\"M\"+n+\",\"+n+\"L-\"+n+\",-\"+n+\"M\"+n+\",-\"+n+\"L-\"+n+\",\"+n},needLine:!0,noDot:!0},hash:{n:36,f:function(t){var e=r.round(t/2,2),n=r.round(t,2);return\"M\"+e+\",\"+n+\"V-\"+n+\"m-\"+n+\",0V\"+n+\"M\"+n+\",\"+e+\"H-\"+n+\"m0,-\"+n+\"H\"+n},needLine:!0},\"y-up\":{n:37,f:function(t){var e=r.round(1.2*t,2),n=r.round(1.6*t,2),a=r.round(.8*t,2);return\"M-\"+e+\",\"+a+\"L0,0M\"+e+\",\"+a+\"L0,0M0,-\"+n+\"L0,0\"},needLine:!0,noDot:!0},\"y-down\":{n:38,f:function(t){var e=r.round(1.2*t,2),n=r.round(1.6*t,2),a=r.round(.8*t,2);return\"M-\"+e+\",-\"+a+\"L0,0M\"+e+\",-\"+a+\"L0,0M0,\"+n+\"L0,0\"},needLine:!0,noDot:!0},\"y-left\":{n:39,f:function(t){var e=r.round(1.2*t,2),n=r.round(1.6*t,2),a=r.round(.8*t,2);return\"M\"+a+\",\"+e+\"L0,0M\"+a+\",-\"+e+\"L0,0M-\"+n+\",0L0,0\"},needLine:!0,noDot:!0},\"y-right\":{n:40,f:function(t){var e=r.round(1.2*t,2),n=r.round(1.6*t,2),a=r.round(.8*t,2);return\"M-\"+a+\",\"+e+\"L0,0M-\"+a+\",-\"+e+\"L0,0M\"+n+\",0L0,0\"},needLine:!0,noDot:!0},\"line-ew\":{n:41,f:function(t){var e=r.round(1.4*t,2);return\"M\"+e+\",0H-\"+e},needLine:!0,noDot:!0},\"line-ns\":{n:42,f:function(t){var e=r.round(1.4*t,2);return\"M0,\"+e+\"V-\"+e},needLine:!0,noDot:!0},\"line-ne\":{n:43,f:function(t){var e=r.round(t,2);return\"M\"+e+\",-\"+e+\"L-\"+e+\",\"+e},needLine:!0,noDot:!0},\"line-nw\":{n:44,f:function(t){var e=r.round(t,2);return\"M\"+e+\",\"+e+\"L-\"+e+\",-\"+e},needLine:!0,noDot:!0}}},{d3:9}],43:[function(t,e,n){\"use strict\";e.exports={visible:{valType:\"boolean\"},type:{valType:\"enumerated\",values:[\"percent\",\"constant\",\"sqrt\",\"data\"]},symmetric:{valType:\"boolean\"},array:{valType:\"data_array\"},arrayminus:{valType:\"data_array\"},value:{valType:\"number\",min:0,dflt:10},valueminus:{valType:\"number\",min:0,dflt:10},traceref:{valType:\"integer\",min:0,dflt:0},tracerefminus:{valType:\"integer\",min:0,dflt:0},copy_ystyle:{valType:\"boolean\"},copy_zstyle:{valType:\"boolean\"},color:{valType:\"color\"},thickness:{valType:\"number\",min:0,dflt:2},width:{valType:\"number\",min:0},_deprecated:{opacity:{valType:\"number\"}}}},{}],44:[function(t,e,n){\"use strict\";function r(t,e,n,r){var o=e[\"error_\"+r]||{},s=o.visible&&-1!==[\"linear\",\"log\"].indexOf(n.type),c=[];if(s){for(var u=l(o),f=0;f<t.length;f++){var d=t[f],h=d[r];if(a(n.c2l(h))){var p=u(h,f);if(a(p[0])&&a(p[1])){var g=d[r+\"s\"]=h-p[0],v=d[r+\"h\"]=h+p[1];c.push(g,v)}}}i.expand(n,c,{padded:!0})}}var a=t(\"fast-isnumeric\"),o=t(\"../../plots/plots\"),i=t(\"../../plots/cartesian/axes\"),l=t(\"./compute_error\");e.exports=function(t){for(var e=t.calcdata,n=0;n<e.length;n++){var a=e[n],l=a[0].trace;if(o.traceIs(l,\"errorBarsOK\")){var s=i.getFromId(t,l.xaxis),c=i.getFromId(t,l.yaxis);r(a,l,s,\"x\"),r(a,l,c,\"y\")}}}},{\"../../plots/cartesian/axes\":110,\"../../plots/plots\":130,\"./compute_error\":45,\"fast-isnumeric\":11}],45:[function(t,e,n){\"use strict\";function r(t,e){return\"percent\"===t?function(t){return Math.abs(t*e/10"
+,
+"0)}:\"constant\"===t?function(){return Math.abs(e)}:\"sqrt\"===t?function(t){return Math.sqrt(Math.abs(t))}:void 0}e.exports=function(t){var e=t.type,n=t.symmetric;if(\"data\"===e){var a=t.array,o=t.arrayminus;return n||void 0===o?function(t,e){var n=+a[e];return[n,n]}:function(t,e){return[+o[e],+a[e]]}}var i=r(e,t.value),l=r(e,t.valueminus);return n||void 0===t.valueminus?function(t){var e=i(t);return[e,e]}:function(t){return[l(t),i(t)]}}},{}],46:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../plots/plots\"),o=t(\"../../lib\"),i=t(\"./attributes\");e.exports=function(t,e,n,l){function s(t,e){return o.coerce(f,u,i,t,e)}var c=\"error_\"+l.axis,u=e[c]={},f=t[c]||{},d=void 0!==f.array||void 0!==f.value||\"sqrt\"===f.type,h=s(\"visible\",d);if(h!==!1){var p=s(\"type\",\"array\"in f?\"data\":\"percent\"),g=!0;if(\"sqrt\"!==p&&(g=s(\"symmetric\",!((\"data\"===p?\"arrayminus\":\"valueminus\")in f))),\"data\"===p){var v=s(\"array\");if(v||(u.array=[]),s(\"traceref\"),!g){var m=s(\"arrayminus\");m||(u.arrayminus=[]),s(\"tracerefminus\")}}else\"percent\"!==p&&\"constant\"!==p||(s(\"value\"),g||s(\"valueminus\"));var y=\"copy_\"+l.inherit+\"style\";if(l.inherit){var x=e[\"error_\"+l.inherit];(x||{}).visible&&s(y,!(f.color||r(f.thickness)||r(f.width)))}l.inherit&&u[y]||(s(\"color\",n),s(\"thickness\"),s(\"width\",a.traceIs(e,\"gl3d\")?0:4))}}},{\"../../lib\":89,\"../../plots/plots\":130,\"./attributes\":43,\"fast-isnumeric\":11}],47:[function(t,e,n){\"use strict\";var r=e.exports={};r.attributes=t(\"./attributes\"),r.supplyDefaults=t(\"./defaults\"),r.calc=t(\"./calc\"),r.calcFromTrace=function(t,e){for(var n=t.x||[],a=t.y,o=n.length||a.length,i=new Array(o),l=0;o>l;l++)i[l]={x:n[l],y:a[l]};return i[0].trace=t,r.calc({calcdata:[i],_fullLayout:e}),i},r.plot=t(\"./plot\"),r.style=t(\"./style\"),r.hoverInfo=function(t,e,n){(e.error_y||{}).visible&&(n.yerr=t.yh-t.y,e.error_y.symmetric||(n.yerrneg=t.y-t.ys)),(e.error_x||{}).visible&&(n.xerr=t.xh-t.x,e.error_x.symmetric||(n.xerrneg=t.x-t.xs))}},{\"./attributes\":43,\"./calc\":44,\"./defaults\":46,\"./plot\":48,\"./style\":49}],48:[function(t,e,n){\"use strict\";function r(t,e,n){var r={x:e.c2p(t.x),y:n.c2p(t.y)};return void 0!==t.yh&&(r.yh=n.c2p(t.yh),r.ys=n.c2p(t.ys),o(r.ys)||(r.noYS=!0,r.ys=n.c2p(t.ys,!0))),void 0!==t.xh&&(r.xh=e.c2p(t.xh),r.xs=e.c2p(t.xs),o(r.xs)||(r.noXS=!0,r.xs=e.c2p(t.xs,!0))),r}var a=t(\"d3\"),o=t(\"fast-isnumeric\"),i=t(\"../../lib\"),l=t(\"../../traces/scatter/subtypes\");e.exports=function(t,e){var n=e.x(),s=e.y();t.each(function(t){var e=t[0].trace,c=e.error_x||{},u=e.error_y||{},f=l.hasMarkers(e)&&e.marker.maxdisplayed>0;if(u.visible||c.visible){var d=a.select(this).selectAll(\"g.errorbar\").data(i.identity);d.enter().append(\"g\").classed(\"errorbar\",!0),d.each(function(t){var e=a.select(this),i=r(t,n,s);if(!f||t.vis){var l;if(u.visible&&o(i.x)&&o(i.yh)&&o(i.ys)){var d=u.width;l=\"M\"+(i.x-d)+\",\"+i.yh+\"h\"+2*d+\"m-\"+d+\",0V\"+i.ys,i.noYS||(l+=\"m-\"+d+\",0h\"+2*d),e.append(\"path\").classed(\"yerror\",!0).attr(\"d\",l)}if(c.visible&&o(i.y)&&o(i.xh)&&o(i.xs)){var h=(c.copy_ystyle?u:c).width;l=\"M\"+i.xh+\",\"+(i.y-h)+\"v\"+2*h+\"m0,-\"+h+\"H\"+i.xs,i.noXS||(l+=\"m0,-\"+h+\"v\"+2*h),e.append(\"path\").classed(\"xerror\",!0).attr(\"d\",l)}}})}})}},{\"../../lib\":89,\"../../traces/scatter/subtypes\":186,d3:9,\"fast-isnumeric\":11}],49:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"../color\");e.exports=function(t){t.each(function(t){var e=t[0].trace,n=e.error_y||{},o=e.error_x||{},i=r.select(this);i.selectAll(\"path.yerror\").style(\"stroke-width\",n.thickness+\"px\").call(a.stroke,n.color),o.copy_ystyle&&(o=n),i.selectAll(\"path.xerror\").style(\"stroke-width\",o.thickness+\"px\").call(a.stroke,o.color)})}},{\"../color\":18,d3:9}],50:[function(t,e,n){\"use strict\";var r=t(\"../../plots/cartesian/constants\");e.exports={_isLinkedToArray:!0,source:{valType:\"string\"},layer:{valType:\"enumerated\",values:[\"below\",\"above\"],dflt:\"above\"},sizex:{valType:\"number\",dflt:0},sizey:{valType:\"number\",dflt:0},sizing:{valType:\"enumerated\",values:[\"fill\",\"contain\",\"stretch\"],dflt:\"contain\"},opacity:{valType:\"number\",min:0,max:1,dflt:1},x:{valType:\"number\",dflt:0},y:{valType:\"number\",dflt:0},xanchor:{valType:\"enumerated\",values:[\"left\",\"center\",\"right\"],dflt:\"left\"},yanchor:{valType:\"enumerated\",values:[\"top\",\"middle\",\"bottom\"],dflt:\"top\"},xref:{valType:\"enumerated\",values:[\"paper\",r.idRegex.x.toString()],dflt:\"paper\"},yref:{valType:\"enumerated\",values:[\"paper\",r.idRegex.y.toString()],dflt:\"paper\"}}},{\"../../plots/cartesian/constants\":115}],51:[function(t,e,n){\"use strict\";function r(t,e,n){function r(n,r){return o.coerce(t,e,i,n,r)}e=e||{},r(\"source\"),r(\"layer\"),r(\"x\"),r(\"y\"),r(\"xanchor\"),r(\"yanchor\"),r(\"sizex\"),r(\"sizey\"),r(\"sizing\"),r(\"opacity\");for(var l=0;2>l;l++){var s={_fullLayout:n},c=[\"x\",\"y\"][l];a.coerceRef(t,e,s,c,\"paper\")}return e}var a=t(\"../../plots/cartesian/axes\"),o=t(\"../../lib\"),i=t(\"./attributes\");e.exports=function(t,e){if(t.images&&Array.isArray(t.images))for(var n=t.images,a=e.images=[],o=0;o<n.length;o++){var i=n[o];if(i.source){var l=r(n[o]||{},a[o]||{},e);a.push(l)}}}},{\"../../lib\":89,\"../../plots/cartesian/axes\":110,\"./attributes\":50}],52:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"../drawing\"),o=t(\"../../plots/cartesian/axes\");e.exports=function(t){function e(e){var n=r.select(this),a=new Promise(function(t){function r(){n.remove(),t()}var a=new Image;a.setAttribute(\"crossOrigin\",\"anonymous\"),a.onerror=r,a.onload=function(){var t=document.createElement(\"canvas\");t.width=this.width,t.height=this.height;var e=t.getContext(\"2d\");e.drawImage(this,0,0);var r=t.toDataURL(\"image/png\");n.attr(\"xlink:href\",r)},n.on(\"error\",r),n.on(\"load\",t),a.src=e.source});t._promises.push(a)}function n(e){var n=r.select(this),i=o.getFromId(t,e.xref),s=o.getFromId(t,e.yref),c=l._size,u=i?Math.abs(i.l2p(e.sizex)-i.l2p(0)):e.sizex*c.w,f=s?Math.abs(s.l2p(e.sizey)-s.l2p(0)):e.sizey*c.h,d=u*h.x[e.xanchor].offset+c.l,p=f*h.y[e.yanchor].offset+c.t,g=h.x[e.xanchor].sizing+h.y[e.yanchor].sizing,v=(i?i.l2p(e.x):e.x*c.w)+d,m=(s?s.l2p(e.y):c.h-e.y*c.h)+p;switch(e.sizing){case\"fill\":g+=\" slice\";break;case\"stretch\":g=\"none\"}n.attr({x:v,y:m,width:u,height:f,preserveAspectRatio:g,opacity:e.opacity});var y=i?i._id:\"\",x=s?s._id:\"\",b=y+x;n.call(a.setClipUrl,\"clip\"+l._uid+b)}function i(t,e){return t.source+e}var l=t._fullLayout,s=[],c=[],u=[];if(l.images){for(var f=0;f<l.images.length;f++){var d=l.images[f];\"below\"===d.layer&&\"paper\"!==d.xref&&\"paper\"!==d.yref?c.push(d):\"above\"===d.layer?s.push(d):u.push(d)}var h={x:{left:{sizing:\"xMin\",offset:0},center:{sizing:\"xMid\",offset:-0.5},right:{sizing:\"xMax\",offset:-1}},y:{top:{sizing:\"YMin\",offset:0},middle:{sizing:\"YMid\",offset:-0.5},bottom:{sizing:\"YMax\",offset:-1}}},p=l._imageLowerLayer.selectAll(\"image\").data(u,i),g=l._imageSubplotLayer.selectAll(\"image\").data(c,i),v=l._imageUpperLayer.selectAll(\"image\").data(s,i);p.enter().append(\"image\").each(e),g.enter().append(\"image\").each(e),v.enter().append(\"image\").each(e),p.exit().remove(),g.exit().remove(),v.exit().remove(),p.each(n),g.each(n),v.each(n)}}},{\"../../plots/cartesian/axes\":110,\"../drawing\":41,d3:9}],53:[function(t,e,n){\"use strict\";var r=t(\"./draw\"),a=t(\"./defaults\"),o=t(\"./attributes\");e.exports={draw:r,layoutAttributes:o,supplyLayoutDefaults:a}},{\"./attributes\":50,\"./defaults\":51,\"./draw\":52}],54:[function(t,e,n){\"use strict\";n.isRightAnchor=function(t){return\"right\"===t.xanchor||\"auto\"===t.xanchor&&t.x>=2/3},n.isCenterAnchor=function(t){return\"center\"===t.xanchor||\"auto\"===t.xanchor&&t.x>1/3&&t.x<2/3},n.isBottomAnchor=function(t){return\"bottom\"===t.yanchor||\"auto\"===t.yanchor&&t.y<=1/3},n.isMiddleAnchor=function(t){return\"middle\"===t.yanchor||\"auto\"===t.yanchor&&t.y>1/3&&t.y<2/3}},{}],55:[function(t,e,n){\"use strict\";var r=t(\"../../plots/font_attributes\"),a=t(\"../color/attributes\"),o=t(\"../../lib/extend\").extendFlat;e.exports={bgcolor:{valType:\"color\"},bordercolor:{valType:\"color\",dflt:a.defaultLine},borderwidth:{valType:\"number\",min:0,dflt:0},font:o({},r,{}),orientation:{valType:\"enumerated\",values:[\"v\",\"h\"],dflt:\"v\"},traceorder:{valType:\"flaglist\",flags:[\"reversed\",\"grouped\"],extras:[\"normal\"]},tracegroupgap:{valType:\"number\",min:0,dflt:10},x:{valType:\"number\",min:-2,max:3,dflt:1.02},xanchor:{valType:\"enumerated\",values:[\"auto\",\"left\",\"center\",\"right\"],dflt:\"left\"},y:{valType:\"number\",min:-2,max:3,dflt:1},yanchor:{valType:\"enumerated\",values:[\"auto\",\"top\",\"middle\",\"bottom\"],dflt:\"auto\"}}},{\"../../lib/extend\":88,\"../../plots/font_attributes\":128,\"../color/attributes\":17}],56:[function(t,e,n){\"use strict\";e.exports={scrollBarWidth:4,scrollBarHeight:20,scrollBarColor:\"#808BA4\",scrollBarMargin:4}},{}],57:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"../../plots/plots\"),o=t(\"./attributes\"),i=t(\"./helpers\");e.exports=function(t,e,n){function l(t,e){return r.coerce(d,h,o,t,e)}for(var s,c,u,f,d=t.legend||{},h=e.legend={},p=0,g=\"normal\",v=0;v<n.length;v++){var m=n[v];i.legendGetsTrace(m)&&(p++,a.traceIs(m,\"pie\")&&p++),(a.traceIs(m,\"bar\")&&\"stack\"===e.barmode||-1!==[\"tonextx\",\"tonexty\"].indexOf(m.fill))&&(g=i.isGrouped({traceorder:g})?\"grouped+reversed\":\"reversed\"),void 0!==m.legendgroup&&\"\"!==m.legendgroup&&(g=i.isReversed({traceorder:g})?\"reversed+grouped\":\"grouped\")}var y=r.coerce(t,e,a.layoutAttributes,\"showlegend\",p>1);if(y!==!1){if(l(\"bgcolor\",e.paper_bgcolor),l(\"bordercolor\"),l(\"borderwidth\"),r.coerceFont(l,\"font\",e.font),l(\"orientation\"),\"h\"===h.orientation){var x=t.xaxis;x&&x.rangeslider&&x.rangeslider.visible?(s=0,u=\"left\",c=1.1,f=\"bottom\"):(s=0,u=\"left\",c=-.1,f=\"top\")}l(\"traceorder\",g),i.isGrouped(e.legend)&&l(\"tracegroupgap\"),l(\"x\",s),l(\"xanchor\",u),l(\"y\",c),l(\"yanchor\",f),r.noneOrAll(d,h,[\"x\",\"y\"])}}},{\"../../lib\":89,\"../../plots/plots\":130,\"./attributes\":55,\"./helpers\":60}],58:[function(t,e,n){\"use strict\";function r(t,e){function n(n){u.util.convertToTspans(n,function(){n.selectAll(\"tspan.line\").attr({x:n.attr(\"x\")}),t.call(o,e)})}var r=t.data()[0][0],a=e._fullLayout,i=r.trace,l=d.traceIs(i,\"pie\"),s=i.index,c=l?r.label:i.name,f=t.selectAll(\"text.legendtext\").data([0]);f.enter().append(\"text\").classed(\"legen"
+,
+"dtext\",!0),f.attr({x:40,y:0,\"data-unformatted\":c}).style(\"text-anchor\",\"start\").classed(\"user-select-none\",!0).call(p.font,a.legend.font).text(c),e._context.editable&&!l?f.call(u.util.makeEditable).call(n).on(\"edit\",function(t){this.attr({\"data-unformatted\":t}),this.text(t).call(n),this.text()||(t=\" \"),u.restyle(e,\"name\",t,s)}):f.call(n)}function a(t,e){var n=e._fullLayout.hiddenlabels?e._fullLayout.hiddenlabels.slice():[],r=t.selectAll(\"rect\").data([0]);r.enter().append(\"rect\").classed(\"legendtoggle\",!0).style(\"cursor\",\"pointer\").attr(\"pointer-events\",\"all\").call(g.fill,\"rgba(0,0,0,0)\"),r.on(\"click\",function(){if(!e._dragged){var r,a,o=t.data()[0][0],i=e._fullData,l=o.trace,s=l.legendgroup,c=[];if(d.traceIs(l,\"pie\")){var f=o.label,h=n.indexOf(f);-1===h?n.push(f):n.splice(h,1),u.relayout(e,\"hiddenlabels\",n)}else{if(\"\"===s)c=[l.index];else for(var p=0;p<i.length;p++)r=i[p],r.legendgroup===s&&c.push(r.index);a=l.visible===!0?\"legendonly\":!0,u.restyle(e,\"visible\",a,c)}}})}function o(t,e){var n,r,a=t.data()[0][0],o=t.selectAll(\".legendtoggle\"),i=t.select(\"g[class*=math-group]\"),l=e._fullLayout.legend,s=1.3*l.font.size;if(!a.trace.showlegend)return void t.remove();if(i.node()){var c=p.bBox(i.node());n=c.height,r=c.width,f.setTranslate(i,0,n/4)}else{var u=t.selectAll(\".legendtext\"),d=t.selectAll(\".legendtext>tspan\"),h=d[0].length||1;n=s*h,r=u.node()&&p.bBox(u.node()).width;var g=s*(.3+(1-h)/2);u.attr(\"y\",g),d.attr(\"y\",g)}n=Math.max(n,16)+3,o.attr({x:0,y:-n/2,height:n}),a.height=n,a.width=r}function i(t,e,n){var r=t._fullLayout,a=r.legend,o=a.borderwidth,i=x.isGrouped(a);if(x.isVertical(a))i&&e.each(function(t,e){f.setTranslate(this,0,e*a.tracegroupgap)}),a.width=0,a.height=0,n.each(function(t){var e=t[0],n=e.height,r=e.width;f.setTranslate(this,o,5+o+a.height+n/2),a.height+=n,a.width=Math.max(a.width,r)}),a.width+=45+2*o,a.height+=10+2*o,i&&(a.height+=(a._lgroupsLength-1)*a.tracegroupgap),n.selectAll(\".legendtoggle\").attr(\"width\",(t._context.editable?0:a.width)+40),a.width=Math.ceil(a.width),\n"
+,
+"a.height=Math.ceil(a.height);else if(i){a.width=0,a.height=0;for(var l=[a.width],s=e.data(),u=0,d=s.length;d>u;u++){var h=s[u].map(function(t){return t[0].width}),p=40+Math.max.apply(null,h);a.width+=a.tracegroupgap+p,l.push(a.width)}e.each(function(t,e){f.setTranslate(this,l[e],0)}),e.each(function(){var t=c.select(this),e=t.selectAll(\"g.traces\"),n=0;e.each(function(t){var e=t[0],r=e.height;f.setTranslate(this,0,5+o+n+r/2),n+=r}),a.height=Math.max(a.height,n)}),a.height+=10+2*o,a.width+=2*o,a.width=Math.ceil(a.width),a.height=Math.ceil(a.height),n.selectAll(\".legendtoggle\").attr(\"width\",t._context.editable?0:a.width)}else a.width=0,a.height=0,n.each(function(t){var e=t[0],n=40+e.width,r=a.tracegroupgap||5;f.setTranslate(this,o+a.width,5+o+e.height/2),a.width+=r+n,a.height=Math.max(a.height,e.height)}),a.width+=2*o,a.height+=10+2*o,a.width=Math.ceil(a.width),a.height=Math.ceil(a.height),n.selectAll(\".legendtoggle\").attr(\"width\",t._context.editable?0:a.width)}function l(t){var e=t._fullLayout,n=e.legend,r=\"left\";b.isRightAnchor(n)?r=\"right\":b.isCenterAnchor(n)&&(r=\"center\");var a=\"top\";b.isBottomAnchor(n)?a=\"bottom\":b.isMiddleAnchor(n)&&(a=\"middle\"),d.autoMargin(t,\"legend\",{x:n.x,y:n.y,l:n.width*({right:1,center:.5}[r]||0),r:n.width*({left:1,center:.5}[r]||0),b:n.height*({top:1,middle:.5}[a]||0),t:n.height*({bottom:1,middle:.5}[a]||0)})}function s(t){var e=t._fullLayout,n=e.legend,r=\"left\";b.isRightAnchor(n)?r=\"right\":b.isCenterAnchor(n)&&(r=\"center\"),d.autoMargin(t,\"legend\",{x:n.x,y:.5,l:n.width*({right:1,center:.5}[r]||0),r:n.width*({left:1,center:.5}[r]||0),b:0,t:0})}var c=t(\"d3\"),u=t(\"../../plotly\"),f=t(\"../../lib\"),d=t(\"../../plots/plots\"),h=t(\"../dragelement\"),p=t(\"../drawing\"),g=t(\"../color\"),v=t(\"./constants\"),m=t(\"./get_legend_data\"),y=t(\"./style\"),x=t(\"./helpers\"),b=t(\"./anchor_utils\");e.exports=function(t){function e(t,e){L.attr(\"data-scroll\",e).call(f.setTranslate,0,e),T.call(p.setRect,j,t,v.scrollBarWidth,v.scrollBarHeight),M.select(\"rect\").attr({y:x.borderwidth-e})}var n=t._fullLayout,o=\"legend\"+n._uid;if(n._infolayer&&t.calcdata){var x=n.legend,_=n.showlegend&&m(t.calcdata,x),w=n.hiddenlabels||[];if(!n.showlegend||!_.length)return n._infolayer.selectAll(\".legend\").remove(),n._topdefs.select(\"#\"+o).remove(),void d.autoMargin(t,\"legend\");var k=n._infolayer.selectAll(\"g.legend\").data([0]);k.enter().append(\"g\").attr({\"class\":\"legend\",\"pointer-events\":\"all\"});var M=n._topdefs.selectAll(\"#\"+o).data([0]);M.enter().append(\"clipPath\").attr(\"id\",o).append(\"rect\");var A=k.selectAll(\"rect.bg\").data([0]);A.enter().append(\"rect\").attr({\"class\":\"bg\",\"shape-rendering\":\"crispEdges\"}),A.call(g.stroke,x.bordercolor),A.call(g.fill,x.bgcolor),A.style(\"stroke-width\",x.borderwidth+\"px\");var L=k.selectAll(\"g.scrollbox\").data([0]);L.enter().append(\"g\").attr(\"class\",\"scrollbox\");var T=k.selectAll(\"rect.scrollbar\").data([0]);T.enter().append(\"rect\").attr({\"class\":\"scrollbar\",rx:20,ry:2,width:0,height:0}).call(g.fill,\"#808BA4\");var z=L.selectAll(\"g.groups\").data(_);z.enter().append(\"g\").attr(\"class\",\"groups\"),z.exit().remove();var S=z.selectAll(\"g.traces\").data(f.identity);S.enter().append(\"g\").attr(\"class\",\"traces\"),S.exit().remove(),S.call(y).style(\"opacity\",function(t){var e=t[0].trace;return d.traceIs(e,\"pie\")?-1!==w.indexOf(t[0].label)?.5:1:\"legendonly\"===e.visible?.5:1}).each(function(){c.select(this).call(r,t).call(a,t)});var E=0!==k.enter().size();E&&(i(t,z,S),l(t));var C=0,O=n.width,P=0,N=n.height;i(t,z,S),x.height>N?s(t):l(t);var D=n._size,I=D.l+D.w*x.x,R=D.t+D.h*(1-x.y);b.isRightAnchor(x)?I-=x.width:b.isCenterAnchor(x)&&(I-=x.width/2),b.isBottomAnchor(x)?R-=x.height:b.isMiddleAnchor(x)&&(R-=x.height/2);var j=x.width,q=D.w;j>q?(I=D.l,j=q):(I+j>O&&(I=O-j),C>I&&(I=C),j=Math.min(O-I,x.width));var F=x.height,B=D.h;F>B?(R=D.t,F=B):(R+F>N&&(R=N-F),P>R&&(R=P),F=Math.min(N-R,x.height)),f.setTranslate(k,I,R);var H,V,Z=F-v.scrollBarHeight-2*v.scrollBarMargin,Y=x.height-F;if(x.height<=F||t._context.staticPlot)A.attr({width:j-x.borderwidth,height:F-x.borderwidth,x:x.borderwidth/2,y:x.borderwidth/2}),f.setTranslate(L,0,0),M.select(\"rect\").attr({width:j-2*x.borderwidth,height:F-2*x.borderwidth,x:x.borderwidth,y:x.borderwidth}),L.call(p.setClipUrl,o);else{H=v.scrollBarMargin,V=L.attr(\"data-scroll\")||0,A.attr({width:j-2*x.borderwidth+v.scrollBarWidth+v.scrollBarMargin,height:F-x.borderwidth,x:x.borderwidth/2,y:x.borderwidth/2}),M.select(\"rect\").attr({width:j-2*x.borderwidth+v.scrollBarWidth+v.scrollBarMargin,height:F-2*x.borderwidth,x:x.borderwidth,y:x.borderwidth-V}),L.call(p.setClipUrl,o),E&&e(H,V),k.on(\"wheel\",null),k.on(\"wheel\",function(){V=f.constrain(L.attr(\"data-scroll\")-c.event.deltaY/Z*Y,-Y,0),H=v.scrollBarMargin-V/Y*Z,e(H,V),c.event.preventDefault()}),T.on(\".drag\",null),L.on(\".drag\",null);var U=c.behavior.drag().on(\"drag\",function(){H=f.constrain(c.event.y-v.scrollBarHeight/2,v.scrollBarMargin,v.scrollBarMargin+Z),V=-(H-v.scrollBarMargin)/Z*Y,e(H,V)});T.call(U),L.call(U)}if(t._context.editable){var X,G,$,Q;k.classed(\"cursor-move\",!0),h.init({element:k.node(),prepFn:function(){var t=f.getTranslate(k);$=t.x,Q=t.y},moveFn:function(t,e){var n=$+t,r=Q+e;f.setTranslate(k,n,r),X=h.align(n,0,D.l,D.l+D.w,x.xanchor),G=h.align(r,0,D.t+D.h,D.t,x.yanchor)},doneFn:function(e){e&&void 0!==X&&void 0!==G&&u.relayout(t,{\"legend.x\":X,\"legend.y\":G})}})}}}},{\"../../lib\":89,\"../../plotly\":107,\"../../plots/plots\":130,\"../color\":18,\"../dragelement\":39,\"../drawing\":41,\"./anchor_utils\":54,\"./constants\":56,\"./get_legend_data\":59,\"./helpers\":60,\"./style\":62,d3:9}],59:[function(t,e,n){\"use strict\";var r=t(\"../../plots/plots\"),a=t(\"./helpers\");e.exports=function(t,e){function n(t,n){if(\"\"!==t&&a.isGrouped(e))-1===s.indexOf(t)?(s.push(t),c=!0,l[t]=[[n]]):l[t].push([n]);else{var r=\"~~i\"+f;s.push(r),l[r]=[[n]],f++}}var o,i,l={},s=[],c=!1,u={},f=0;for(o=0;o<t.length;o++){var d=t[o],h=d[0],p=h.trace,g=p.legendgroup;if(a.legendGetsTrace(p)&&p.showlegend)if(r.traceIs(p,\"pie\"))for(u[g]||(u[g]={}),i=0;i<d.length;i++){var v=d[i].label;u[g][v]||(n(g,{label:v,color:d[i].color,i:d[i].i,trace:p}),u[g][v]=!0)}else n(g,h)}if(!s.length)return[];var m,y,x=s.length;if(c&&a.isGrouped(e))for(y=new Array(x),o=0;x>o;o++)m=l[s[o]],y[o]=a.isReversed(e)?m.reverse():m;else{for(y=[new Array(x)],o=0;x>o;o++)m=l[s[o]][0],y[0][a.isReversed(e)?x-o-1:o]=m;x=1}return e._lgroupsLength=x,y}},{\"../../plots/plots\":130,\"./helpers\":60}],60:[function(t,e,n){\"use strict\";var r=t(\"../../plots/plots\");n.legendGetsTrace=function(t){return t.visible&&r.traceIs(t,\"showLegend\")},n.isGrouped=function(t){return-1!==(t.traceorder||\"\").indexOf(\"grouped\")},n.isVertical=function(t){return\"h\"!==t.orientation},n.isReversed=function(t){return-1!==(t.traceorder||\"\").indexOf(\"reversed\")}},{\"../../plots/plots\":130}],61:[function(t,e,n){\"use strict\";var r=e.exports={};r.layoutAttributes=t(\"./attributes\"),r.supplyLayoutDefaults=t(\"./defaults\"),r.draw=t(\"./draw\"),r.style=t(\"./style\")},{\"./attributes\":55,\"./defaults\":57,\"./draw\":58,\"./style\":62}],62:[function(t,e,n){\"use strict\";function r(t){var e=t[0].trace,n=e.visible&&e.fill&&\"none\"!==e.fill,r=h.hasLines(e),a=s.select(this).select(\".legendfill\").selectAll(\"path\").data(n?[t]:[]);a.enter().append(\"path\").classed(\"js-fill\",!0),a.exit().remove(),a.attr(\"d\",\"M5,0h30v6h-30z\").call(f.fillGroupStyle);var o=s.select(this).select(\".legendlines\").selectAll(\"path\").data(r?[t]:[]);o.enter().append(\"path\").classed(\"js-line\",!0).attr(\"d\",\"M5,0h30\"),o.exit().remove(),o.call(f.lineGroupStyle)}function a(t){function e(t,e,n){var r=c.nestedProperty(i,t).get(),a=Array.isArray(r)&&e?e(r):r;if(n){if(a<n[0])return n[0];if(a>n[1])return n[1]}return a}function n(t){return t[0]}var r,a,o=t[0],i=o.trace,l=h.hasMarkers(i),u=h.hasText(i),d=h.hasLines(i);if(l||u||d){var p={},g={};l&&(p.mc=e(\"marker.color\",n),p.mo=e(\"marker.opacity\",c.mean,[.2,1]),p.ms=e(\"marker.size\",c.mean,[2,16]),p.mlc=e(\"marker.line.color\",n),p.mlw=e(\"marker.line.width\",c.mean,[0,5]),g.marker={sizeref:1,sizemin:1,sizemode:\"diameter\"}),d&&(g.line={width:e(\"line.width\",n,[0,10])}),u&&(p.tx=\"Aa\",p.tp=e(\"textposition\",n),p.ts=10,p.tc=e(\"textfont.color\",n),p.tf=e(\"textfont.family\",n)),r=[c.minExtend(o,p)],a=c.minExtend(i,g)}var v=s.select(this).select(\"g.legendpoints\"),m=v.selectAll(\"path.scatterpts\").data(l?r:[]);m.enter().append(\"path\").classed(\"scatterpts\",!0).attr(\"transform\",\"translate(20,0)\"),m.exit().remove(),m.call(f.pointStyle,a),l&&(r[0].mrc=3);var y=v.selectAll(\"g.pointtext\").data(u?r:[]);y.enter().append(\"g\").classed(\"pointtext\",!0).append(\"text\").attr(\"transform\",\"translate(20,0)\"),y.exit().remove(),y.selectAll(\"text\").call(f.textPointStyle,a)}function o(t){var e=t[0].trace,n=e.marker||{},r=n.line||{},a=s.select(this).select(\"g.legendpoints\").selectAll(\"path.legendbar\").data(u.traceIs(e,\"bar\")?[t]:[]);a.enter().append(\"path\").classed(\"legendbar\",!0).attr(\"d\",\"M6,6H-6V-6H6Z\").attr(\"transform\",\"translate(20,0)\"),a.exit().remove(),a.each(function(t){var e=(t.mlw+1||r.width+1)-1,a=s.select(this);a.style(\"stroke-width\",e+\"px\").call(d.fill,t.mc||n.color),e&&a.call(d.stroke,t.mlc||r.color)})}function i(t){var e=t[0].trace,n=s.select(this).select(\"g.legendpoints\").selectAll(\"path.legendbox\").data(u.traceIs(e,\"box\")&&e.visible?[t]:[]);n.enter().append(\"path\").classed(\"legendbox\",!0).attr(\"d\",\"M6,6H-6V-6H6Z\").attr(\"transform\",\"translate(20,0)\"),n.exit().remove(),n.each(function(t){var n=(t.lw+1||e.line.width+1)-1,r=s.select(this);r.style(\"stroke-width\",n+\"px\").call(d.fill,t.fc||e.fillcolor),n&&r.call(d.stroke,t.lc||e.line.color)})}function l(t){var e=t[0].trace,n=s.select(this).select(\"g.legendpoints\").selectAll(\"path.legendpie\").data(u.traceIs(e,\"pie\")&&e.visible?[t]:[]);n.enter().append(\"path\").classed(\"legendpie\",!0).attr(\"d\",\"M6,6H-6V-6H6Z\").attr(\"transform\",\"translate(20,0)\"),n.exit().remove(),n.size()&&n.call(p,t[0],e)}var s=t(\"d3\"),c=t(\"../../lib\"),u=t(\"../../plots/plots\"),f=t(\"../drawing\"),d=t(\"../color\"),h=t(\"../../traces/scatter/subtypes\"),p=t(\"../../traces/pie/style_one\");e.exports=function(t){t.each(functio"
+,
+"n(t){var e=s.select(this),n=e.selectAll(\"g.legendfill\").data([t]);n.enter().append(\"g\").classed(\"legendfill\",!0);var r=e.selectAll(\"g.legendlines\").data([t]);r.enter().append(\"g\").classed(\"legendlines\",!0);var a=e.selectAll(\"g.legendsymbols\").data([t]);a.enter().append(\"g\").classed(\"legendsymbols\",!0),a.style(\"opacity\",t[0].trace.opacity),a.selectAll(\"g.legendpoints\").data([t]).enter().append(\"g\").classed(\"legendpoints\",!0)}).each(o).each(i).each(l).each(r).each(a)}},{\"../../lib\":89,\"../../plots/plots\":130,\"../../traces/pie/style_one\":165,\"../../traces/scatter/subtypes\":186,\"../color\":18,\"../drawing\":41,d3:9}],63:[function(t,e,n){\"use strict\";function r(t,e){var n=e.currentTarget,r=n.getAttribute(\"data-attr\"),a=n.getAttribute(\"data-val\")||!0,o=t._fullLayout,i={};if(\"zoom\"===r){for(var l,s,u=\"in\"===a?.5:2,f=(1+u)/2,d=(1-u)/2,h=c.Axes.list(t,null,!0),p=0;p<h.length;p++)if(l=h[p],!l.fixedrange)if(s=l._name,\"auto\"===a)i[s+\".autorange\"]=!0;else if(\"reset\"===a)if(void 0===l._rangeInitial)i[s+\".autorange\"]=!0;else{var g=l._rangeInitial.slice();i[s+\".range[0]\"]=g[0],i[s+\".range[1]\"]=g[1]}else{var v=l.range;i[s+\".range[0]\"]=f*v[0]+d*v[1],i[s+\".range[1]\"]=f*v[1]+d*v[0]}}else\"hovermode\"!==r||\"x\"!==a&&\"y\"!==a||(a=o._isHoriz?\"y\":\"x\",n.setAttribute(\"data-val\",a)),i[r]=a;c.relayout(t,i)}function a(t,e){for(var n=e.currentTarget,r=n.getAttribute(\"data-attr\"),a=n.getAttribute(\"data-val\")||!0,o=t._fullLayout,i=c.Plots.getSubplotIds(o,\"gl3d\"),l={},s=r.split(\".\"),u=0;u<i.length;u++)l[i[u]+\".\"+s[1]]=a;c.relayout(t,l)}function o(t,e){for(var n=e.currentTarget,r=n.getAttribute(\"data-attr\"),a=t._fullLayout,o=c.Plots.getSubplotIds(a,\"gl3d\"),i=0;i<o.length;i++){var l=o[i],s=a[l],u=s._scene;\"resetDefault\"===r?u.setCameraToDefault():\"resetLastSave\"===r&&u.setCamera(s.camera)}}function i(t,e){var n=e.currentTarget,r=n._previousVal||!1,a=t.layout,o=t._fullLayout,i=c.Plots.getSubplotIds(o,\"gl3d\"),l=[\"xaxis\",\"yaxis\",\"zaxis\"],s=[\"showspikes\",\"spikesides\",\"spikethickness\",\"spikecolor\"],f={},d={},h={};if(r)h=u.extendDeep(a,r),n._previousVal=null;else{h={\"allaxes.showspikes\":!1};for(var p=0;p<i.length;p++){var g=i[p],v=o[g],m=f[g]={};m.hovermode=v.hovermode,h[g+\".hovermode\"]=!1;for(var y=0;3>y;y++){var x=l[y];d=m[x]={};for(var b=0;b<s.length;b++){var _=s[b];d[_]=v[x][_]}}}n._previousVal=u.extendDeep({},f)}c.relayout(t,h)}function l(t,e){for(var n=e.currentTarget,r=n.getAttribute(\"data-attr\"),a=n.getAttribute(\"data-val\")||!0,o=t._fullLayout,i=c.Plots.getSubplotIds(o,\"geo\"),l=0;l<i.length;l++){var s=o[i[l]]._geo;if(\"zoom\"===r){var u=s.projection.scale(),f=\"in\"===a?2*u:.5*u;s.projection.scale(f),s.zoom.scale(f),s.render()}else\"reset\"===r&&s.zoomReset()}}function s(t){var e,n=t._fullLayout;e=n._has(\"cartesian\")?n._isHoriz?\"y\":\"x\":\"closest\";var r=t._fullLayout.hovermode?!1:e;c.relayout(t,\"hovermode\",r)}var c=t(\"../../plotly\"),u=t(\"../../lib\"),f=t(\"../../snapshot/download\"),d=t(\"../../../build/ploticon\"),h=e.exports={};h.toImage={name:\"toImage\",title:\"Download plot as a png\",icon:d.camera,click:function(t){var e=\"png\";u.notifier(\"Taking snapshot - this may take a few seconds\",\"long\"),u.isIE()&&(u.notifier(\"IE only supports svg. Changing format to svg.\",\"long\"),e=\"svg\"),f(t,{format:e}).then(function(t){u.notifier(\"Snapshot succeeded - \"+t,\"long\")}).catch(function(){u.notifier(\"Sorry there was a problem downloading your snapshot!\",\"long\")})}},h.sendDataToCloud={name:\"sendDataToCloud\",title:\"Save and edit plot in cloud\",icon:d.disk,click:function(t){c.Plots.sendDataToCloud(t)}},h.zoom2d={name:\"zoom2d\",title:\"Zoom\",attr:\"dragmode\",val:\"zoom\",icon:d.zoombox,click:r},h.pan2d={name:\"pan2d\",title:\"Pan\",attr:\"dragmode\",val:\"pan\",icon:d.pan,click:r},h.select2d={name:\"select2d\",title:\"Box Select\",attr:\"dragmode\",val:\"select\",icon:d.selectbox,click:r},h.lasso2d={name:\"lasso2d\",title:\"Lasso Select\",attr:\"dragmode\",val:\"lasso\",icon:d.lasso,click:r},h.zoomIn2d={name:\"zoomIn2d\",title:\"Zoom in\",attr:\"zoom\",val:\"in\",icon:d.zoom_plus,click:r},h.zoomOut2d={name:\"zoomOut2d\",title:\"Zoom out\",attr:\"zoom\",val:\"out\",icon:d.zoom_minus,click:r},h.autoScale2d={name:\"autoScale2d\",title:\"Autoscale\",attr:\"zoom\",val:\"auto\",icon:d.autoscale,click:r},h.resetScale2d={name:\"resetScale2d\",title:\"Reset axes\",attr:\"zoom\",val:\"reset\",icon:d.home,click:r},h.hoverClosestCartesian={name:\"hoverClosestCartesian\",title:\"Show closest data on hover\",attr:\"hovermode\",val:\"closest\",icon:d.tooltip_basic,gravity:\"ne\",click:r},h.hoverCompareCartesian={name:\"hoverCompareCartesian\",title:\"Compare data on hover\",attr:\"hovermode\",val:function(t){return t._fullLayout._isHoriz?\"y\":\"x\"},icon:d.tooltip_compare,gravity:\"ne\",click:r},h.zoom3d={name:\"zoom3d\",title:\"Zoom\",attr:\"scene.dragmode\",val:\"zoom\",icon:d.zoombox,click:a},h.pan3d={name:\"pan3d\",title:\"Pan\",attr:\"scene.dragmode\",val:\"pan\",icon:d.pan,click:a},h.orbitRotation={name:\"orbitRotation\",title:\"orbital rotation\",attr:\"scene.dragmode\",val:\"orbit\",icon:d[\"3d_rotate\"],click:a},h.tableRotation={name:\"tableRotation\",title:\"turntable rotation\",attr:\"scene.dragmode\",val:\"turntable\",icon:d[\"z-axis\"],click:a},h.resetCameraDefault3d={name:\"resetCameraDefault3d\",title:\"Reset camera to default\",attr:\"resetDefault\",icon:d.home,click:o},h.resetCameraLastSave3d={name:\"resetCameraLastSave3d\",title:\"Reset camera to last save\",attr:\"resetLastSave\",icon:d.movie,click:o},h.hoverClosest3d={name:\"hoverClosest3d\",title:\"Toggle show closest data on hover\",attr:\"hovermode\",val:null,toggle:!0,icon:d.tooltip_basic,gravity:\"ne\",click:i},h.zoomInGeo={name:\"zoomInGeo\",title:\"Zoom in\",attr:\"zoom\",val:\"in\",icon:d.zoom_plus,click:l},h.zoomOutGeo={name:\"zoomOutGeo\",title:\"Zoom out\",attr:\"zoom\",val:\"out\",icon:d.zoom_minus,click:l},h.resetGeo={name:\"resetGeo\",title:\"Reset\",attr:\"reset\",val:null,icon:d.autoscale,click:l},h.hoverClosestGeo={name:\"hoverClosestGeo\",title:\"Toggle show closest data on hover\",attr:\"hovermode\",val:null,toggle:!0,icon:d.tooltip_basic,gravity:\"ne\",click:s},h.hoverClosestGl2d={name:\"hoverClosestGl2d\",title:\"Toggle show closest data on hover\",attr:\"hovermode\",val:null,toggle:!0,icon:d.tooltip_basic,gravity:\"ne\",click:s},h.hoverClosestPie={name:\"hoverClosestPie\",title:\"Toggle show closest data on hover\",attr:\"hovermode\",val:\"closest\",icon:d.tooltip_basic,gravity:\"ne\",click:s},h.toggleHover={name:\"toggleHover\",title:\"Toggle show closest data on hover\",attr:\"hovermode\",val:null,toggle:!0,icon:d.tooltip_basic,gravity:\"ne\",click:function(t,e){s(t),i(t,e)}},h.resetViews={name:\"resetViews\",title:\"Reset views\",icon:d.home,click:function(t,e){var n=e.currentTarget;n.setAttribute(\"data-attr\",\"zoom\"),n.setAttribute(\"data-val\",\"reset\"),r(t,e),n.setAttribute(\"data-attr\",\"resetLastSave\"),o(t,e)}}},{\"../../../build/ploticon\":2,\"../../lib\":89,\"../../plotly\":107,\"../../snapshot/download\":137}],64:[function(t,e,n){\"use strict\";function r(t){this.container=t.container,this.element=document.createElement(\"div\"),this.update(t.graphInfo,t.buttons),this.container.appendChild(this.element)}function a(t,e){var n=t._fullLayout,a=new r({graphInfo:t,container:n._paperdiv.node(),buttons:e});return n._privateplot&&o.select(a.element).append(\"span\").classed(\"badge-private float--left\",!0).text(\"PRIVATE\"),a}var o=t(\"d3\"),i=t(\"../../lib\"),l=t(\"../../../build/ploticon\"),s=r.prototype;s.update=function(t,e){this.graphInfo=t;var n=this.graphInfo._context;\"hover\"===n.displayModeBar?this.element.className=\"modebar modebar--hover\":this.element.className=\"modebar\";var r=!this.hasButtons(e),a=this.hasLogo!==n.displaylogo;(r||a)&&(this.removeAllButtons(),this.updateButtons(e),n.displaylogo&&(this.element.appendChild(this.getLogo()),this.hasLogo=!0)),this.updateActiveButton()},s.updateButtons=function(t){var e=this;this.buttons=t,this.buttonElements=[],this.buttonsNames=[],this.buttons.forEach(function(t){var n=e.createGroup();t.forEach(function(t){var r=t.name;if(!r)throw new Error(\"must provide button 'name' in button config\");if(-1!==e.buttonsNames.indexOf(r))throw new Error(\"button name '\"+r+\"' is taken\");e.buttonsNames.push(r);var a=e.createButton(t);e.buttonElements.push(a),n.appendChild(a)}),e.element.appendChild(n)})},s.createGroup=function(){var t=document.createElement(\"div\");return t.className=\"modebar-group\",t},s.createButton=function(t){var e=this,n=document.createElement(\"a\");n.setAttribute(\"rel\",\"tooltip\"),n.className=\"modebar-btn\";var r=t.title;void 0===r&&(r=t.name),(r||0===r)&&n.setAttribute(\"data-title\",r),void 0!==t.attr&&n.setAttribute(\"data-attr\",t.attr);var a=t.val;void 0!==a&&(\"function\"==typeof a&&(a=a(this.graphInfo)),n.setAttribute(\"data-val\",a));var o=t.click;if(\"function\"!=typeof o)throw new Error(\"must provide button 'click' function in button config\");return n.addEventListener(\"click\",function(n){t.click(e.graphInfo,n),e.updateActiveButton(n.currentTarget)}),n.setAttribute(\"data-toggle\",t.toggle||!1),t.toggle&&n.classList.add(\"active\"),n.appendChild(this.createIcon(t.icon||l.question)),n.setAttribute(\"data-gravity\",t.gravity||\"n\"),n},s.createIcon=function(t){var e=t.ascent-t.descent,n=\"http://www.w3.org/2000/svg\",r=document.createElementNS(n,\"svg\"),a=document.createElementNS(n,\"path\");return r.setAttribute(\"height\",\"1em\"),r.setAttribute(\"width\",t.width/e+\"em\"),r.setAttribute(\"viewBox\",[0,0,t.width,e].join(\" \")),a.setAttribute(\"d\",t.path),a.setAttribute(\"transform\",\"matrix(1 0 0 -1 0 \"+t.ascent+\")\"),r.appendChild(a),r},s.updateActiveButton=function(t){var e=this.graphInfo._fullLayout,n=void 0!==t?t.getAttribute(\"data-attr\"):null;this.buttonElements.forEach(function(t){var r=t.getAttribute(\"data-val\")||!0,a=t.getAttribute(\"data-attr\"),l=\"true\"===t.getAttribute(\"data-toggle\"),s=o.select(t);if(l)a===n&&s.classed(\"active\",!s.classed(\"active\"));else{var c=null===a?a:i.nestedProperty(e,a).get();s.classed(\"active\",c===r)}})},s.hasButtons=function(t){var e=this.buttons;if(!e)return!1;if(t.length!==e.length)return!1;for(var n=0;n<t.length;++n){if(t[n].length!==e[n].length)return!1;for(var r=0;r<t[n].length;r++)if(t[n][r].name!==e[n][r].name)return!1}return!0},s.getLogo=function(){var t="
+,
+"this.createGroup(),e=document.createElement(\"a\");return e.href=\"https://plot.ly/\",e.target=\"_blank\",e.setAttribute(\"data-title\",\"Produced with Plotly\"),e.className=\"modebar-btn plotlyjsicon modebar-btn--logo\",e.appendChild(this.createIcon(l.plotlylogo)),t.appendChild(e),t},s.removeAllButtons=function(){for(;this.element.firstChild;)this.element.removeChild(this.element.firstChild);this.hasLogo=!1},s.destroy=function(){i.removeElement(this.container.querySelector(\".modebar\"))},e.exports=a},{\"../../../build/ploticon\":2,\"../../lib\":89,d3:9}],65:[function(t,e,n){\"use strict\";function r(t,e,n){function r(t){for(var n=[],r=0;r<t.length;r++){var a=t[r];-1===e.indexOf(a)&&n.push(f[a])}v.push(n)}var l=t._fullLayout,s=t._fullData,c=l._has(\"cartesian\"),u=l._has(\"gl3d\"),d=l._has(\"geo\"),h=l._has(\"pie\"),p=l._has(\"gl2d\"),g=l._has(\"ternary\"),v=[];if(r([\"toImage\",\"sendDataToCloud\"]),(c||p||h||g)+d+u>1)return r([\"resetViews\",\"toggleHover\"]),i(v,n);u&&(r([\"zoom3d\",\"pan3d\",\"orbitRotation\",\"tableRotation\"]),r([\"resetCameraDefault3d\",\"resetCameraLastSave3d\"]),r([\"hoverClosest3d\"])),d&&(r([\"zoomInGeo\",\"zoomOutGeo\",\"resetGeo\"]),r([\"hoverClosestGeo\"]));var m=a(l),y=[];return((c||p)&&!m||g)&&(y=[\"zoom2d\",\"pan2d\"]),(c||g)&&o(s)&&(y.push(\"select2d\"),y.push(\"lasso2d\")),y.length&&r(y),!c&&!p||m||g||r([\"zoomIn2d\",\"zoomOut2d\",\"autoScale2d\",\"resetScale2d\"]),c&&h?r([\"toggleHover\"]):p?r([\"hoverClosestGl2d\"]):c?r([\"hoverClosestCartesian\",\"hoverCompareCartesian\"]):h&&r([\"hoverClosestPie\"]),i(v,n)}function a(t){for(var e=s.Axes.list({_fullLayout:t},null,!0),n=!0,r=0;r<e.length;r++)if(!e[r].fixedrange){n=!1;break}return n}function o(t){for(var e=!1,n=0;n<t.length&&!e;n++){var r=t[n];r._module&&r._module.selectPoints&&(\"scatter\"===r.type||\"scatterternary\"===r.type?(c.hasMarkers(r)||c.hasText(r))&&(e=!0):e=!0)}return e}function i(t,e){if(e.length)if(Array.isArray(e[0]))for(var n=0;n<e.length;n++)t.push(e[n]);else t.push(e);return t}function l(t){for(var e=0;e<t.length;e++)for(var n=t[e],r=0;r<n.length;r++){var a=n[r];if(\"string\"==typeof a){if(void 0===f[a])throw new Error([\"*modeBarButtons* configuration options\",\"invalid button name\"].join(\" \"));t[e][r]=f[a]}}return t}var s=t(\"../../plotly\"),c=t(\"../../traces/scatter/subtypes\"),u=t(\"./\"),f=t(\"./buttons\");e.exports=function(t){var e=t._fullLayout,n=t._context,a=e._modeBar;if(!n.displayModeBar)return void(a&&(a.destroy(),delete e._modeBar));if(!Array.isArray(n.modeBarButtonsToRemove))throw new Error([\"*modeBarButtonsToRemove* configuration options\",\"must be an array.\"].join(\" \"));if(!Array.isArray(n.modeBarButtonsToAdd))throw new Error([\"*modeBarButtonsToAdd* configuration options\",\"must be an array.\"].join(\" \"));var o,i=n.modeBarButtons;o=Array.isArray(i)&&i.length?l(i):r(t,n.modeBarButtonsToRemove,n.modeBarButtonsToAdd),a?a.update(t,o):e._modeBar=u(t,o)}},{\"../../plotly\":107,\"../../traces/scatter/subtypes\":186,\"./\":64,\"./buttons\":63}],66:[function(t,e,n){\"use strict\";var r=t(\"../../plots/font_attributes\"),a=t(\"../color/attributes\"),o=t(\"../../lib/extend\").extendFlat,i=t(\"./button_attributes\");i=o(i,{_isLinkedToArray:!0}),e.exports={visible:{valType:\"boolean\"},buttons:i,x:{valType:\"number\",min:-2,max:3},xanchor:{valType:\"enumerated\",values:[\"auto\",\"left\",\"center\",\"right\"],dflt:\"left\"},y:{valType:\"number\",min:-2,max:3},yanchor:{valType:\"enumerated\",values:[\"auto\",\"top\",\"middle\",\"bottom\"],dflt:\"bottom\"},font:o({},r,{}),bgcolor:{valType:\"color\",dflt:a.lightLine},bordercolor:{valType:\"color\",dflt:a.defaultLine},borderwidth:{valType:\"number\",min:0,dflt:0}}},{\"../../lib/extend\":88,\"../../plots/font_attributes\":128,\"../color/attributes\":17,\"./button_attributes\":67}],67:[function(t,e,n){\"use strict\";e.exports={step:{valType:\"enumerated\",values:[\"month\",\"year\",\"day\",\"hour\",\"minute\",\"second\",\"all\"],dflt:\"month\"},stepmode:{valType:\"enumerated\",values:[\"backward\",\"todate\"],dflt:\"backward\"},count:{valType:\"number\",min:0,dflt:1},label:{valType:\"string\"}}},{}],68:[function(t,e,n){\"use strict\";e.exports={yPad:.02,minButtonWidth:30,rx:3,ry:3,activeColor:\"#d3d3d3\"}},{}],69:[function(t,e,n){\"use strict\";function r(t,e){function n(t,e){return o.coerce(r,a,l,t,e)}for(var r,a,i=t.buttons||[],s=e.buttons=[],c=0;c<i.length;c++){r=i[c],a={};var u=n(\"step\");\"all\"!==u&&(n(\"stepmode\"),n(\"count\")),n(\"label\"),s.push(a)}return s}function a(t,e,n){for(var r=n.filter(function(n){return e[n].anchor===t._id}),a=0,o=0;o<r.length;o++)a=Math.max(e[r[o]].domain[1],a);return[t.domain[0],a+s.yPad]}var o=t(\"../../lib\"),i=t(\"./attributes\"),l=t(\"./button_attributes\"),s=t(\"./constants\");e.exports=function(t,e,n,l){function s(t,e){return o.coerce(c,u,i,t,e)}var c=t.rangeselector||{},u=e.rangeselector={},f=r(c,u),d=s(\"visible\",f.length>0);if(d){var h=a(e,n,l);s(\"x\",h[0]),s(\"y\",h[1]),o.noneOrAll(t,e,[\"x\",\"y\"]),s(\"xanchor\"),s(\"yanchor\"),o.coerceFont(s,\"font\",n.font),s(\"bgcolor\"),s(\"bordercolor\"),s(\"borderwidth\")}}},{\"../../lib\":89,\"./attributes\":66,\"./button_attributes\":67,\"./constants\":68}],70:[function(t,e,n){\"use strict\";function r(t){for(var e=m.list(t,\"x\",!0),n=[],r=0;r<e.length;r++){var a=e[r];a.rangeselector&&a.rangeselector.visible&&n.push(a)}return n}function a(t){return t._id}function o(t,e,n){if(\"all\"===e.step)return t.autorange===!0;var r=Object.keys(n);return t.range[0]===n[r[0]]&&t.range[1]===n[r[1]]}function i(t,e,n){var r=t.selectAll(\"rect\").data([0]);r.enter().append(\"rect\").classed(\"selector-rect\",!0),r.attr(\"shape-rendering\",\"crispEdges\"),r.attr({rx:x.rx,ry:x.ry}),r.call(p.stroke,e.bordercolor).call(p.fill,l(e,n)).style(\"stroke-width\",e.borderwidth+\"px\")}function l(t,e){return e.isActive||e.isHovered?x.activeColor:t.bgcolor}function s(t,e,n){function r(t){v.convertToTspans(t)}var a=t.selectAll(\"text\").data([0]);a.enter().append(\"text\").classed(\"selector-text\",!0).classed(\"user-select-none\",!0),a.attr(\"text-anchor\",\"middle\"),a.call(g.font,e.font).text(c(n)).call(r)}function c(t){return t.label?t.label:\"all\"===t.step?\"all\":t.count+t.step.charAt(0)}function u(t,e,n,r){n.width=0,n.height=0;var a=n.borderwidth;e.each(function(){var t=f.select(this),e=t.select(\".selector-text\"),r=e.selectAll(\"tspan\"),a=1.3*n.font.size,o=r[0].length||1,i=Math.max(a*o,16)+3;n.height=Math.max(n.height,i)}),e.each(function(){var t=f.select(this),e=t.select(\".selector-rect\"),r=t.select(\".selector-text\"),o=r.selectAll(\"tspan\"),i=r.node()&&g.bBox(r.node()).width,l=1.3*n.font.size,s=o[0].length||1,c=Math.max(i+10,x.minButtonWidth);t.attr(\"transform\",\"translate(\"+(a+n.width)+\",\"+a+\")\"),e.attr({x:0,y:0,width:c,height:n.height});var u={x:c/2,y:n.height/2-(s-1)*l/2+3};r.attr(u),o.attr(u),n.width+=c+5}),e.selectAll(\"rect\").attr(\"height\",n.height);var o=t._fullLayout._size;n.lx=o.l+o.w*n.x,n.ly=o.t+o.h*(1-n.y);var i=\"left\";y.isRightAnchor(n)&&(n.lx-=n.width,i=\"right\"),y.isCenterAnchor(n)&&(n.lx-=n.width/2,i=\"center\");var l=\"top\";y.isBottomAnchor(n)&&(n.ly-=n.height,l=\"bottom\"),y.isMiddleAnchor(n)&&(n.ly-=n.height/2,l=\"middle\"),n.width=Math.ceil(n.width),n.height=Math.ceil(n.height),n.lx=Math.round(n.lx),n.ly=Math.round(n.ly),h.autoMargin(t,r+\"-range-selector\",{x:n.x,y:n.y,l:n.width*({right:1,center:.5}[i]||0),r:n.width*({left:1,center:.5}[i]||0),b:n.height*({top:1,middle:.5}[l]||0),t:n.height*({bottom:1,middle:.5}[l]||0)})}var f=t(\"d3\"),d=t(\"../../plotly\"),h=t(\"../../plots/plots\"),p=t(\"../color\"),g=t(\"../drawing\"),v=t(\"../../lib/svg_text_utils\"),m=t(\"../../plots/cartesian/axis_ids\"),y=t(\"../legend/anchor_utils\"),x=t(\"./constants\"),b=t(\"./get_update_object\");e.exports=function(t){var e=t._fullLayout,n=e._infolayer.selectAll(\".rangeselector\").data(r(t),a);n.enter().append(\"g\").classed(\"rangeselector\",!0),n.exit().remove(),n.style({cursor:\"pointer\",\"pointer-events\":\"all\"}),n.each(function(e){var n=f.select(this),r=e,a=r.rangeselector,l=n.selectAll(\"g.button\").data(a.buttons);l.enter().append(\"g\").classed(\"button\",!0),l.exit().remove(),l.each(function(e){var n=f.select(this),l=b(r,e);e.isActive=o(r,e,l),n.call(i,a,e),n.call(s,a,e),n.on(\"click\",function(){t._dragged||d.relayout(t,l)}),n.on(\"mouseover\",function(){e.isHovered=!0,n.call(i,a,e)}),n.on(\"mouseout\",function(){e.isHovered=!1,n.call(i,a,e)})}),u(t,l,a,r._name),n.attr(\"transform\",\"translate(\"+a.lx+\",\"+a.ly+\")\")})}},{\"../../lib/svg_text_utils\":100,\"../../plotly\":107,\"../../plots/cartesian/axis_ids\":112,\"../../plots/plots\":130,\"../color\":18,\"../drawing\":41,\"../legend/anchor_utils\":54,\"./constants\":68,\"./get_update_object\":71,d3:9}],71:[function(t,e,n){\"use strict\";function r(t,e){var n,r=t.range,o=new Date(r[1]),i=e.step,l=e.count;switch(e.stepmode){case\"backward\":n=a.time[i].offset(o,-l).getTime();break;case\"todate\":var s=a.time[i].offset(o,-(l-1));n=a.time[i].floor(s).getTime()}var c=r[1];return[n,c]}var a=t(\"d3\");e.exports=function(t,e){var n=t._name,a={};if(\"all\"===e.step)a[n+\".autorange\"]=!0;else{var o=r(t,e);a[n+\".range[0]\"]=o[0],a[n+\".range[1]\"]=o[1]}return a}},{d3:9}],72:[function(t,e,n){\"use strict\";n.attributes=t(\"./attributes\"),n.supplyLayoutDefaults=t(\"./defaults\"),n.draw=t(\"./draw\")},{\"./attributes\":66,\"./defaults\":69,\"./draw\":70}],73:[function(t,e,n){\"use strict\";var r=t(\"../color/attributes\");e.exports={bgcolor:{valType:\"color\",dflt:r.background},bordercolor:{valType:\"color\",dflt:r.defaultLine},borderwidth:{valType:\"integer\",dflt:0,min:0},range:{valType:\"info_array\",items:[{valType:\"number\"},{valType:\"number\"}]},thickness:{valType:\"number\",dflt:.15,min:0,max:1},visible:{valType:\"boolean\",dflt:!0}}},{\"../color/attributes\":17}],74:[function(t,e,n){\"use strict\";var r=t(\"../../plotly\"),a=t(\"../../plots/cartesian/axes\"),o=t(\"../../lib\"),i=t(\"../../constants/xmlns_namespaces\").svg,l=t(\"./helpers\"),s=t(\"./range_plot\");e.exports=function(t){function e(t){var e=h.range[0],n=h.range[1],r=n-e,a=t/p*r+e;return a=o.constrain(a,e,n)}function n(t,e){t=t||-(1/0),e=e||1/0;var n=h.range[0],r=h.range[1],a=r-n,o=(t-n)/a*p,i=(e-n)/a*p;u(o,i)}function c(e,n){window.requestAnimationFrame?window.requestAnimationFrame(function(){r.relayout(t,\"xaxis.range\",[e,n])}):setTimeout(function(){r.relayout(t,\"xaxis.range\",[e,n])},16)}"
+,
+"function u(t,e){if(t=o.constrain(t,0,p),e=o.constrain(e,0,p),t>e){var n=e;e=t,t=n}l.setAttributes(w,{\"data-min\":t,\"data-max\":e}),l.setAttributes(P,{x:t,width:e-t}),l.setAttributes(A,{width:t}),l.setAttributes(L,{x:e,width:p-e}),l.setAttributes(T,{transform:\"translate(\"+(t-v-1)+\")\"}),l.setAttributes(E,{transform:\"translate(\"+e+\")\"})}var f=t._fullLayout,d=f._infolayer.selectAll(\"g.range-slider\"),h=f.xaxis.rangeslider,p=f._size.w,g=(f.height-f.margin.b-f.margin.t)*h.thickness,v=2,m=Math.floor(h.borderwidth/2),y=f.margin.l,x=f.height-g-f.margin.b,b=0,_=p,w=document.createElementNS(i,\"g\");l.setAttributes(w,{\"class\":\"range-slider\",\"data-min\":b,\"data-max\":_,\"pointer-events\":\"all\",transform:\"translate(\"+y+\",\"+x+\")\"});var k=document.createElementNS(i,\"rect\"),M=h.borderwidth%2===0?h.borderwidth:h.borderwidth-1;l.setAttributes(k,{fill:h.bgcolor,stroke:h.bordercolor,\"stroke-width\":h.borderwidth,height:g+M,width:p+M,transform:\"translate(-\"+m+\", -\"+m+\")\",\"shape-rendering\":\"crispEdges\"});var A=document.createElementNS(i,\"rect\");l.setAttributes(A,{x:0,width:b,height:g,fill:\"rgba(0,0,0,0.4)\"});var L=document.createElementNS(i,\"rect\");l.setAttributes(L,{x:_,width:p-_,height:g,fill:\"rgba(0,0,0,0.4)\"});var T=document.createElementNS(i,\"g\"),z=document.createElementNS(i,\"rect\"),S=document.createElementNS(i,\"rect\");l.setAttributes(T,{transform:\"translate(\"+(b-v-1)+\")\"}),l.setAttributes(z,{width:10,height:g,x:-6,fill:\"transparent\",cursor:\"col-resize\"}),l.setAttributes(S,{width:v,height:g/2,y:g/4,rx:1,fill:\"white\",stroke:\"#666\",\"shape-rendering\":\"crispEdges\"}),l.appendChildren(T,[S,z]);var E=document.createElementNS(i,\"g\"),C=document.createElementNS(i,\"rect\"),O=document.createElementNS(i,\"rect\");l.setAttributes(E,{transform:\"translate(\"+_+\")\"}),l.setAttributes(C,{width:10,height:g,x:-2,fill:\"transparent\",cursor:\"col-resize\"}),l.setAttributes(O,{width:v,height:g/2,y:g/4,rx:1,fill:\"white\",stroke:\"#666\",\"shape-rendering\":\"crispEdges\"}),l.appendChildren(E,[O,C]);var P=document.createElementNS(i,\"rect\");\n"
+,
+"l.setAttributes(P,{x:b,width:_-b,height:g,cursor:\"ew-resize\",fill:\"transparent\"}),w.addEventListener(\"mousedown\",function(t){function n(t){var n,r,f=+t.clientX-o;switch(a){case P:w.style.cursor=\"ew-resize\",n=+l+f,r=+s+f,u(n,r),c(e(n),e(r));break;case z:w.style.cursor=\"col-resize\",n=+l+f,r=+s,u(n,r),c(e(n),e(r));break;case C:w.style.cursor=\"col-resize\",n=+l,r=+s+f,u(n,r),c(e(n),e(r));break;default:w.style.cursor=\"ew-resize\",n=i,r=i+f,u(n,r),c(e(n),e(r))}}function r(){window.removeEventListener(\"mousemove\",n),window.removeEventListener(\"mouseup\",r),w.style.cursor=\"auto\"}var a=t.target,o=t.clientX,i=o-w.getBoundingClientRect().left,l=w.getAttribute(\"data-min\"),s=w.getAttribute(\"data-max\");window.addEventListener(\"mousemove\",n),window.addEventListener(\"mouseup\",r)}),h.range||(h.range=a.getAutoRange(f.xaxis));var N=s(t,p,g);l.appendChildren(w,[k,N,A,L,P,T,E]),n(f.xaxis.range[0],f.xaxis.range[1]),d.data([0]).enter().append(function(){return h.setRange=n,w})}},{\"../../constants/xmlns_namespaces\":82,\"../../lib\":89,\"../../plotly\":107,\"../../plots/cartesian/axes\":110,\"./helpers\":76,\"./range_plot\":78}],75:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"./attributes\");e.exports=function(t,e,n,o){function i(t,e){return r.coerce(l,s,a,t,e)}if(t[n].rangeslider){var l=\"object\"==typeof t[n].rangeslider?t[n].rangeslider:{},s=e[n].rangeslider={};if(i(\"bgcolor\"),i(\"bordercolor\"),i(\"borderwidth\"),i(\"thickness\"),i(\"visible\"),i(\"range\"),s.range&&!e[n].autorange){var c=s.range,u=e[n].range;c[0]=Math.min(c[0],u[0]),c[1]=Math.max(c[1],u[1])}else e[n]._needsExpand=!0;s.visible&&o.forEach(function(t){var n=e[t]||{};n.fixedrange=!0,e[t]=n})}}},{\"../../lib\":89,\"./attributes\":73}],76:[function(t,e,n){\"use strict\";n.setAttributes=function(t,e){for(var n in e)t.setAttribute(n,e[n])},n.appendChildren=function(t,e){for(var n=0;n<e.length;n++)e[n]&&t.appendChild(e[n])}},{}],77:[function(t,e,n){\"use strict\";function r(t){if(t._fullLayout.xaxis){var e=t._fullLayout,n=e._infolayer.selectAll(\"g.range-slider\"),r=e.xaxis.rangeslider;if(!r||!r.visible)return n.data([]).exit().remove(),void a.autoMargin(t,\"range-slider\");var i=(e.height-e.margin.b-e.margin.t)*r.thickness,l=Math.floor(r.borderwidth/2);0!==n[0].length||e._has(\"gl2d\")||o(t);var s=e.xaxis._boundingBox?e.xaxis._boundingBox.height:0;a.autoMargin(t,\"range-slider\",{x:0,y:0,l:0,r:0,t:0,b:i+e.margin.b+s,pad:15+2*l})}}var a=t(\"../../plots/plots\"),o=t(\"./create_slider\"),i=t(\"./defaults\");e.exports={draw:r,supplyLayoutDefaults:i}},{\"../../plots/plots\":130,\"./create_slider\":74,\"./defaults\":75}],78:[function(t,e,n){\"use strict\";function r(t,e){for(var n=e.makeCalcdata(t||[],e._id[0]),r=0;r<n.length;r++)n[r]=e.c2l(n[r]);return n}function a(t,e,n,r){var a,o,f;if(t.line){a=document.createElementNS(u,\"path\");var d=s.smoothopen(e,t.line.smoothing||0);c.setAttributes(a,{d:d,fill:\"none\",stroke:t.line?t.line.color:\"transparent\",\"stroke-width\":t.line.width/2||1,opacity:1})}if(t.marker){o=document.createElementNS(u,\"g\");var h=e.map(function(e,n){var r,a=document.createElementNS(u,\"g\"),o=document.createElementNS(u,\"path\");return r=Array.isArray(t.marker.size)?\"number\"==typeof t.marker.size[n]?Math.max(t.marker.size[n]/(t.marker.sizeref||1)/15,0):0:Math.max(t.marker.size/15,2),c.setAttributes(o,{d:l[t.marker.symbol].f(r),fill:t.marker.color,stroke:t.marker.line.color,\"stroke-width\":t.marker.line.width,opacity:t.marker.opacity}),c.setAttributes(a,{transform:\"translate(\"+e[0]+\",\"+e[1]+\")\"}),a.appendChild(o),a});c.appendChildren(o,h)}if(\"none\"!==t.fill){switch(f=document.createElementNS(u,\"path\"),t.fill){case\"tozeroy\":e.unshift([e[0][0],r]),e.push([e[e.length-1][0],r]);break;case\"tozerox\":e.unshift([0,e[e.length-1][1]]);break;default:i.warn(\"Fill type \"+t.fill+\" not supported for range slider! (yet...)\")}var p=s.smoothopen(e,t.line.smoothing||0);c.setAttributes(f,{d:p,fill:t.fillcolor||\"transparent\"})}return[a,o,f]}var o=t(\"d3\"),i=t(\"../../lib\"),l=t(\"../drawing/symbol_defs\"),s=t(\"../drawing\"),c=t(\"./helpers\"),u=t(\"../../constants/xmlns_namespaces\").svg;e.exports=function f(t,e,n){var l=t._fullLayout,d=t._fullData,h=l.xaxis,p=l.yaxis,g=h.rangeslider.range[0],v=h.rangeslider.range[1],m=p.range[0],y=p.range[1],x=document.createElementNS(u,\"path\");x.setAttribute(\"d\",[\"M0,0\",e+\",0\",e+\",\"+n,\"0,\"+n,\"Z\"].join(\" \"));var b=document.createElementNS(u,\"clipPath\");b.setAttribute(\"id\",\"range-clip-path\"),b.appendChild(x);var _=document.createElementNS(u,\"defs\");_.appendChild(b);var f=document.createElementNS(u,\"g\");o.select(f).call(s.setClipUrl,\"range-clip-path\"),f.appendChild(_);for(var w=[\"scatter\"],k=0;k<d.length;k++){var M=d[k],A=[];if(w.indexOf(M.type)<0)i.warn(\"Trace type \"+M.type+\" not supported for range slider!\");else{for(var L=r(M,h),T=r(M,p),z=0;z<L.length;z++){var S=e*(L[z]-g)/(v-g),E=n*(1-(T[z]-m)/(y-m));isNaN(S)||isNaN(E)||A.push([S,E])}c.appendChildren(f,a(M,A,e,n))}}return f}},{\"../../constants/xmlns_namespaces\":82,\"../../lib\":89,\"../drawing\":41,\"../drawing/symbol_defs\":42,\"./helpers\":76,d3:9}],79:[function(t,e,n){\"use strict\";var r=t(\"../annotations/attributes\"),a=t(\"../../traces/scatter/attributes\"),o=t(\"../../lib/extend\").extendFlat,i=a.line;e.exports={_isLinkedToArray:!0,type:{valType:\"enumerated\",values:[\"circle\",\"rect\",\"path\",\"line\"]},layer:{valType:\"enumerated\",values:[\"below\",\"above\"],dflt:\"above\"},xref:o({},r.xref,{}),x0:{valType:\"any\"},x1:{valType:\"any\"},yref:o({},r.yref,{}),y0:{valType:\"any\"},y1:{valType:\"any\"},path:{valType:\"string\"},opacity:{valType:\"number\",min:0,max:1,dflt:1},line:{color:i.color,width:i.width,dash:i.dash},fillcolor:{valType:\"color\",dflt:\"rgba(0,0,0,0)\"}}},{\"../../lib/extend\":88,\"../../traces/scatter/attributes\":167,\"../annotations/attributes\":15}],80:[function(t,e,n){\"use strict\";function r(t,e){function n(e,n){return M.coerce(t,r,E.layoutAttributes,e,n)}var r={};n(\"layer\"),n(\"opacity\"),n(\"fillcolor\"),n(\"line.color\"),n(\"line.width\"),n(\"line.dash\");for(var a=t.path?\"path\":\"rect\",i=n(\"type\",a),l=[\"x\",\"y\"],s=0;2>s;s++){var c=l[s],u={_fullLayout:e},f=A.coerceRef(t,r,u,c);if(\"path\"!==i){var d=.25,h=.75;if(\"paper\"!==f){var p=A.getFromId(u,f),g=o(p);d=g(p.range[0]+d*(p.range[1]-p.range[0])),h=g(p.range[0]+h*(p.range[1]-p.range[0]))}n(c+\"0\",d),n(c+\"1\",h)}}return\"path\"===i?n(\"path\"):M.noneOrAll(t,r,[\"x0\",\"x1\",\"y0\",\"y1\"]),r}function a(t){return\"category\"===t.type?t.c2l:t.d2l}function o(t){return\"category\"===t.type?t.l2c:t.l2d}function i(t,e){t.layout.shapes=e,E.supplyLayoutDefaults(t.layout,t._fullLayout),E.drawAll(t)}function l(t){delete t.layout.shapes,t._fullLayout.shapes=[],E.drawAll(t)}function s(t,e,n){for(var r=0;r<t._fullLayout.shapes.length;r++)E.draw(t,r,e,n)}function c(t,e){h(t,e).selectAll('[data-index=\"'+e+'\"]').remove(),t._fullLayout.shapes.splice(e,1),t.layout.shapes.splice(e,1);for(var n=e;n<t._fullLayout.shapes.length;n++)h(t,n).selectAll('[data-index=\"'+(n+1)+'\"]').attr(\"data-index\",n),E.draw(t,n)}function u(t,e,n){t._fullLayout.shapes.splice(e,0,{});var r=M.isPlainObject(n)?M.extendFlat({},n):{text:\"New text\"};t.layout.shapes?t.layout.shapes.splice(e,0,r):t.layout.shapes=[r];for(var a=t._fullLayout.shapes.length-1;a>e;a--)h(t,a).selectAll('[data-index=\"'+(a-1)+'\"]').attr(\"data-index\",a),E.draw(t,a)}function f(t,e,n,i){function l(n){var r={\"data-index\":e,\"fill-rule\":\"evenodd\",d:x(t,E)},a=E.line.width?E.line.color:\"rgba(0,0,0,0)\",o=n.append(\"path\").attr(r).style(\"opacity\",E.opacity).call(L.stroke,a).call(L.fill,E.fillcolor).call(T.dashLine,E.line.dash,E.line.width);C&&o.call(T.setClipUrl,\"clip\"+t._fullLayout._uid+C),t._context.editable&&d(t,o,E,e)}var s,c;h(t,e).selectAll('[data-index=\"'+e+'\"]').remove();var u=t.layout.shapes[e];if(u){var f={xref:u.xref,yref:u.yref},g={};\"string\"==typeof n&&n?g[n]=i:M.isPlainObject(n)&&(g=n);var v=Object.keys(g);for(s=0;s<v.length;s++){var m=v[s];M.nestedProperty(u,m).set(g[m])}var y=[\"x0\",\"x1\",\"y0\",\"y1\"];for(s=0;4>s;s++){var b=y[s];if(void 0===g[b]&&void 0!==u[b]){var _,w=b.charAt(0),k=A.getFromId(t,A.coerceRef(f,{},t,w)),z=A.getFromId(t,A.coerceRef(u,{},t,w)),S=u[b];void 0!==g[w+\"ref\"]&&(k?(_=a(k)(S),S=(_-k.range[0])/(k.range[1]-k.range[0])):S=(S-z.domain[0])/(z.domain[1]-z.domain[0]),z?(_=z.range[0]+S*(z.range[1]-z.range[0]),S=o(z)(_)):S=k.domain[0]+S*(k.domain[1]-k.domain[0])),u[b]=S}}var E=r(u,t._fullLayout);t._fullLayout.shapes[e]=E;var C;if(\"below\"!==E.layer)C=(E.xref+E.yref).replace(/paper/g,\"\"),l(t._fullLayout._shapeUpperLayer);else if(\"paper\"===E.xref&&\"paper\"===E.yref)C=\"\",l(t._fullLayout._shapeLowerLayer);else{var O,P=t._fullLayout._plots||{},N=Object.keys(P);for(s=0,c=N.length;c>s;s++)O=P[N[s]],C=N[s],p(t,E,O)&&l(O.shapelayer)}}}function d(t,e,n,r){function a(t){var n=W.right-W.left,r=W.bottom-W.top,a=t.clientX-W.left,o=t.clientY-W.top,i=n>G&&r>$&&!t.shiftKey?z.getCursor(a/n,1-o/r):\"move\";S(e,i),X=i.split(\"-\")[0]}function o(e){B=A.getFromId(t,n.xref),H=A.getFromId(t,n.yref),V=m(t,B),Z=m(t,H,!0),Y=y(t,B),U=y(t,H,!0);var o=\"shapes[\"+r+\"]\";\"path\"===n.type?(q=n.path,F=o+\".path\"):(u=V(n.x0),f=Z(n.y0),d=V(n.x1),h=Z(n.y1),p=o+\".x0\",g=o+\".y0\",_=o+\".x1\",w=o+\".y1\"),d>u?(T=u,P=o+\".x0\",R=\"x0\",E=d,N=o+\".x1\",j=\"x1\"):(T=d,P=o+\".x1\",R=\"x1\",E=u,N=o+\".x0\",j=\"x0\"),h>f?(M=f,C=o+\".y0\",D=\"y0\",L=h,O=o+\".y1\",I=\"y1\"):(M=h,C=o+\".y1\",D=\"y1\",L=f,O=o+\".y0\",I=\"y0\"),c={},a(e),Q.moveFn=\"move\"===X?l:s}function i(n){S(e),n&&k.relayout(t,c)}function l(r,a){if(\"path\"===n.type){var o=function(t){return Y(V(t)+r)};B&&\"date\"===B.type&&(o=v(o));var i=function(t){return U(Z(t)+a)};H&&\"date\"===H.type&&(i=v(i)),n.path=b(q,o,i),c[F]=n.path}else c[p]=n.x0=Y(u+r),c[g]=n.y0=U(f+a),c[_]=n.x1=Y(d+r),c[w]=n.y1=U(h+a);e.attr(\"d\",x(t,n))}function s(r,a){if(\"path\"===n.type){var o=function(t){return Y(V(t)+r)};B&&\"date\"===B.type&&(o=v(o));var i=function(t){return U(Z(t)+a)};H&&\"date\"===H.type&&(i=v(i)),n.path=b(q,o,i),c[F]=n.path}else{var l=~X.indexOf(\"n\")?M+a:M,s=~X.indexOf(\"s\")?L+a:L,u=~X.indexOf(\"w\")?T+r:T,f=~X.indexOf(\"e\")?E+r:E;s-l>$&&(c[C]=n[D]=U(l),c[O]=n[I]=U(s)),f-u>G&&(c[P]=n[R]=Y(u),c[N]=n[j]=Y(f))}e.attr(\"d\",x(t,n))}var c,u,f,d,h,p,g,_,w,M,L,T,E,C,O,P,N,D,I,R,j,q"
+,
+",F,B,H,V,Z,Y,U,X,G=10,$=10,Q={setCursor:a,element:e.node(),prepFn:o,doneFn:i},W=Q.element.getBoundingClientRect();z.init(Q)}function h(t,e){var n=t._fullLayout.shapes[e],r=t._fullLayout._shapeUpperLayer;return n?\"below\"===n.layer&&(r=\"paper\"===n.xref&&\"paper\"===n.yref?t._fullLayout._shapeLowerLayer:t._fullLayout._shapeSubplotLayer):M.log(\"getShapeLayer: undefined shape: index\",e),r}function p(t,e,n){var r=k.Axes.getFromId(t,n.id,\"x\")._id,a=k.Axes.getFromId(t,n.id,\"y\")._id,o=\"below\"===e.layer,i=r===e.xref||a===e.yref,l=!!n.shapelayer;return o&&i&&l}function g(t){return function(e){return e.replace&&(e=e.replace(\"_\",\" \")),t(e)}}function v(t){return function(e){return t(e).replace(\" \",\"_\")}}function m(t,e,n){var r,o=t._fullLayout._size;if(e){var i=a(e);r=function(t){return e._offset+e.l2p(i(t,!0))},\"date\"===e.type&&(r=g(r))}else r=n?function(t){return o.t+o.h*(1-t)}:function(t){return o.l+o.w*t};return r}function y(t,e,n){var r,a=t._fullLayout._size;if(e){var i=o(e);r=function(t){return i(e.p2l(t-e._offset))}}else r=n?function(t){return 1-(t-a.t)/a.h}:function(t){return(t-a.l)/a.w};return r}function x(t,e){var n,r,o,i,l=e.type,s=A.getFromId(t,e.xref),c=A.getFromId(t,e.yref),u=t._fullLayout._size;if(s?(n=a(s),r=function(t){return s._offset+s.l2p(n(t,!0))}):r=function(t){return u.l+u.w*t},c?(o=a(c),i=function(t){return c._offset+c.l2p(o(t,!0))}):i=function(t){return u.t+u.h*(1-t)},\"path\"===l)return s&&\"date\"===s.type&&(r=g(r)),c&&\"date\"===c.type&&(i=g(i)),E.convertPath(e.path,r,i);var f=r(e.x0),d=r(e.x1),h=i(e.y0),p=i(e.y1);if(\"line\"===l)return\"M\"+f+\",\"+h+\"L\"+d+\",\"+p;if(\"rect\"===l)return\"M\"+f+\",\"+h+\"H\"+d+\"V\"+p+\"H\"+f+\"Z\";var v=(f+d)/2,m=(h+p)/2,y=Math.abs(v-f),x=Math.abs(m-h),b=\"A\"+y+\",\"+x,_=v+y+\",\"+m,w=v+\",\"+(m-x);return\"M\"+_+b+\" 0 1,1 \"+w+b+\" 0 0,1 \"+_+\"Z\"}function b(t,e,n){return t.replace(C,function(t){var r=0,a=t.charAt(0),o=P[a],i=N[a],l=D[a],s=t.substr(1).replace(O,function(t){return r>=l?t:(o[r]?t=e(t):i[r]&&(t=n(t)),r++,t)});return a+s})}function _(t,e,n,r,a){var o=\"category\"===t.type?Number:t.d2c;if(void 0!==e)return[o(e),o(n)];if(r){var i,l,s,c,u,f=1/0,d=-(1/0),h=r.match(C);for(\"date\"===t.type&&(o=g(o)),i=0;i<h.length;i++)l=h[i],s=a[l.charAt(0)].drawn,void 0!==s&&(c=h[i].substr(1).match(O),!c||c.length<s||(u=o(c[s]),f>u&&(f=u),u>d&&(d=u)));return d>=f?[f,d]:void 0}}var w=t(\"fast-isnumeric\"),k=t(\"../../plotly\"),M=t(\"../../lib\"),A=t(\"../../plots/cartesian/axes\"),L=t(\"../color\"),T=t(\"../drawing\"),z=t(\"../dragelement\"),S=t(\"../../lib/setcursor\"),E=e.exports={};E.layoutAttributes=t(\"./attributes\"),E.supplyLayoutDefaults=function(t,e){for(var n=t.shapes||[],a=e.shapes=[],o=0;o<n.length;o++)a.push(r(n[o]||{},e))},E.drawAll=function(t){var e=t._fullLayout;e._shapeUpperLayer.selectAll(\"path\").remove(),e._shapeLowerLayer.selectAll(\"path\").remove(),e._shapeSubplotLayer.selectAll(\"path\").remove();for(var n=0;n<e.shapes.length;n++)E.draw(t,n)},E.add=function(t){var e=t._fullLayout.shapes.length;k.relayout(t,\"shapes[\"+e+\"]\",\"add\")},E.draw=function(t,e,n,r){if(!w(e)||-1===e){if(!e&&Array.isArray(r))return void i(t,r);if(\"remove\"===r)return void l(t);if(n&&\"add\"!==r)return void s(t,n,r);e=t._fullLayout.shapes.length,t._fullLayout.shapes.push({})}if(!n&&r){if(\"remove\"===r)return void c(t,e);(\"add\"===r||M.isPlainObject(r))&&u(t,e,r)}f(t,e,n,r)};var C=/[MLHVQCTSZ][^MLHVQCTSZ]*/g,O=/[^\\s,]+/g,P={M:{0:!0,drawn:0},L:{0:!0,drawn:0},H:{0:!0,drawn:0},V:{},Q:{0:!0,2:!0,drawn:2},C:{0:!0,2:!0,4:!0,drawn:4},T:{0:!0,drawn:0},S:{0:!0,2:!0,drawn:2},Z:{}},N={M:{1:!0,drawn:1},L:{1:!0,drawn:1},H:{},V:{0:!0,drawn:0},Q:{1:!0,3:!0,drawn:3},C:{1:!0,3:!0,5:!0,drawn:5},T:{1:!0,drawn:1},S:{1:!0,3:!0,drawn:5},Z:{}},D={M:2,L:2,H:1,V:1,Q:4,C:6,T:2,S:4,Z:0};E.convertPath=function(t,e,n){return t.replace(C,function(t){var r=0,a=t.charAt(0),o=P[a],i=N[a],l=D[a],s=t.substr(1).replace(O,function(t){return o[r]?t=e(t):i[r]&&(t=n(t)),r++,r>l&&(t=\"X\"),t});return r>l&&(s=s.replace(/[\\s,]*X.*/,\"\"),M.log(\"Ignoring extra params in segment \"+t)),a+s})},E.calcAutorange=function(t){var e,n,r,a,o,i=t._fullLayout,l=i.shapes;if(l.length&&t._fullData.length)for(e=0;e<l.length;e++)n=l[e],r=n.line.width/2,\"paper\"!==n.xref&&(a=A.getFromId(t,n.xref),o=_(a,n.x0,n.x1,n.path,P),o&&A.expand(a,o,{ppad:r})),\"paper\"!==n.yref&&(a=A.getFromId(t,n.yref),o=_(a,n.y0,n.y1,n.path,N),o&&A.expand(a,o,{ppad:r}))}},{\"../../lib\":89,\"../../lib/setcursor\":98,\"../../plotly\":107,\"../../plots/cartesian/axes\":110,\"../color\":18,\"../dragelement\":39,\"../drawing\":41,\"./attributes\":79,\"fast-isnumeric\":11}],81:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"fast-isnumeric\"),o=t(\"../../plotly\"),i=t(\"../../plots/plots\"),l=t(\"../../lib\"),s=t(\"../drawing\"),c=t(\"../color\"),u=t(\"../../lib/svg_text_utils\"),f=e.exports={};f.draw=function(t,e,n){function f(t){l.syncOrAsync([d,h],t)}function d(e){return e.attr(\"transform\",_?\"rotate(\"+[_.rotate,b.x,b.y]+\") translate(0, \"+_.offset+\")\":null),e.style({\"font-family\":M,\"font-size\":r.round(A,2)+\"px\",fill:c.rgb(L),opacity:T*c.opacity(L),\"font-weight\":i.fontWeight}).attr(b).call(u.convertToTspans).attr(b),e.selectAll(\"tspan.line\").attr(b),i.previousPromises(t)}function h(t){var e=r.select(t.node().parentNode);if(x&&x.selection&&x.side&&S){e.attr(\"transform\",null);var n=0,o={left:\"right\",right:\"left\",top:\"bottom\",bottom:\"top\"}[x.side],i=-1!==[\"left\",\"top\"].indexOf(x.side)?-1:1,c=a(x.pad)?x.pad:2,u=s.bBox(e.node()),f={left:0,top:0,right:k.width,bottom:k.height},d=x.maxShift||(f[x.side]-u[x.side])*(\"left\"===x.side||\"top\"===x.side?-1:1);if(0>d?n=d:(u.left-=x.offsetLeft,u.right-=x.offsetLeft,u.top-=x.offsetTop,u.bottom-=x.offsetTop,x.selection.each(function(){var t=s.bBox(this);l.bBoxIntersect(u,t,c)&&(n=Math.max(n,i*(t[x.side]-u[o])+c))}),n=Math.min(d,n)),n>0||0>d){var h={left:[-n,0],right:[n,0],top:[0,-n],bottom:[0,n]}[x.side];e.attr(\"transform\",\"translate(\"+h+\")\")}}}function p(){T=0,z=!0,S=C,k._infolayer.select(\".\"+e).attr({\"data-unformatted\":S}).text(S).on(\"mouseover.opacity\",function(){r.select(this).transition().duration(100).style(\"opacity\",1)}).on(\"mouseout.opacity\",function(){r.select(this).transition().duration(1e3).style(\"opacity\",0)})}var g=n.propContainer,v=n.propName,m=n.traceIndex,y=n.dfltName,x=n.avoid||{},b=n.attributes,_=n.transform,w=n.containerGroup,k=t._fullLayout,M=g.titlefont.family,A=g.titlefont.size,L=g.titlefont.color,T=1,z=!1,S=g.title.trim();\"\"===S&&(T=0),S.match(/Click to enter .+ title/)&&(T=.2,z=!0),w||(w=k._infolayer.selectAll(\".g-\"+e).data([0]),w.enter().append(\"g\").classed(\"g-\"+e,!0));var E=w.selectAll(\"text\").data([0]);E.enter().append(\"text\"),E.text(S).attr(\"class\",e),E.attr({\"data-unformatted\":S}).call(f);var C=\"Click to enter \"+y+\" title\";t._context.editable?(S||p(),E.call(u.makeEditable).on(\"edit\",function(e){void 0!==m?o.restyle(t,v,e,m):o.relayout(t,v,e)}).on(\"cancel\",function(){this.text(this.attr(\"data-unformatted\")).call(f)}).on(\"input\",function(t){this.text(t||\" \").attr(b).selectAll(\"tspan.line\").attr(b)})):S&&!S.match(/Click to enter .+ title/)||E.remove(),E.classed(\"js-placeholder\",z)}},{\"../../lib\":89,\"../../lib/svg_text_utils\":100,\"../../plotly\":107,\"../../plots/plots\":130,\"../color\":18,\"../drawing\":41,d3:9,\"fast-isnumeric\":11}],82:[function(t,e,n){\"use strict\";n.xmlns=\"http://www.w3.org/2000/xmlns/\",n.svg=\"http://www.w3.org/2000/svg\",n.xlink=\"http://www.w3.org/1999/xlink\",n.svgAttrs={xmlns:n.svg,\"xmlns:xlink\":n.xlink}},{}],83:[function(t,e,n){\"use strict\";var r=t(\"./plotly\");n.version=\"1.15.0\",n.plot=r.plot,n.newPlot=r.newPlot,n.restyle=r.restyle,n.relayout=r.relayout,n.redraw=r.redraw,n.extendTraces=r.extendTraces,n.prependTraces=r.prependTraces,n.addTraces=r.addTraces,n.deleteTraces=r.deleteTraces,n.moveTraces=r.moveTraces,n.purge=r.purge,n.setPlotConfig=t(\"./plot_api/set_plot_config\"),n.register=r.register,n.toImage=t(\"./plot_api/to_image\"),n.downloadImage=t(\"./snapshot/download\"),n.validate=t(\"./plot_api/validate\"),n.Icons=t(\"../build/ploticon\"),n.Plots=r.Plots,n.Fx=r.Fx,n.Snapshot=r.Snapshot,n.PlotSchema=r.PlotSchema,n.Queue=r.Queue,n.d3=t(\"d3\")},{\"../build/ploticon\":2,\"./plot_api/set_plot_config\":104,\"./plot_api/to_image\":105,\"./plot_api/validate\":106,\"./plotly\":107,\"./snapshot/download\":137,d3:9}],84:[function(t,e,n){\"use strict\";\"undefined\"!=typeof MathJax?(n.MathJax=!0,MathJax.Hub.Config({messageStyle:\"none\",skipStartupTypeset:!0,displayAlign:\"left\",tex2jax:{inlineMath:[[\"$\",\"$\"],[\"\\\\(\",\"\\\\)\"]]}}),MathJax.Hub.Configured()):n.MathJax=!1},{}],85:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"tinycolor2\"),o=t(\"./nested_property\"),i=t(\"../components/colorscale/get_scale\"),l=(Object.keys(t(\"../components/colorscale/scales\")),/^([2-9]|[1-9][0-9]+)$/);n.valObjects={data_array:{coerceFunction:function(t,e,n){Array.isArray(t)?e.set(t):void 0!==n&&e.set(n)}},enumerated:{coerceFunction:function(t,e,n,r){r.coerceNumber&&(t=+t),-1===r.values.indexOf(t)?e.set(n):e.set(t)}},\"boolean\":{coerceFunction:function(t,e,n){t===!0||t===!1?e.set(t):e.set(n)}},number:{coerceFunction:function(t,e,n,a){!r(t)||void 0!==a.min&&t<a.min||void 0!==a.max&&t>a.max?e.set(n):e.set(+t)}},integer:{coerceFunction:function(t,e,n,a){t%1||!r(t)||void 0!==a.min&&t<a.min||void 0!==a.max&&t>a.max?e.set(n):e.set(+t)}},string:{coerceFunction:function(t,e,n,r){if(\"string\"!=typeof t){var a=\"number\"==typeof t;r.strict!==!0&&a?e.set(String(t)):e.set(n)}else r.noBlank&&!t?e.set(n):e.set(t)}},color:{coerceFunction:function(t,e,n){a(t).isValid()?e.set(t):e.set(n)}},colorscale:{coerceFunction:function(t,e,n){e.set(i(t,n))}},angle:{coerceFunction:function(t,e,n){\"auto\"===t?e.set(\"auto\"):r(t)?(Math.abs(t)>180&&(t-=360*Math.round(t/360)),e.set(+t)):e.set(n)}},subplotid:{coerceFunction:function(t,e,n){var r=n.length;return\"string\"==typeof t&&t.substr(0,r)===n&&l.test(t.substr(r))?void e.set(t):void e.set(n)},validateFunction:function(t,e){var n=e.dflt,r=n.length;return t===n?!0:\"string\"!=typeof t?!1:!(t.substr(0,r)!==n||!l.test(t.substr(r)))}},flaglist:{coerceFunction:function(t,e,n,r){if(\"string\"!=typeof t)return void e.set(n);if(-1!==(r.extras||[]).indexOf(t)"
+,
+")return void e.set(t);for(var a=t.split(\"+\"),o=0;o<a.length;){var i=a[o];-1===r.flags.indexOf(i)||a.indexOf(i)<o?a.splice(o,1):o++}a.length?e.set(a.join(\"+\")):e.set(n)}},any:{coerceFunction:function(t,e,n){void 0===t?e.set(n):e.set(t)}},info_array:{coerceFunction:function(t,e,r,a){if(!Array.isArray(t))return void e.set(r);var o=a.items,i=[];r=Array.isArray(r)?r:[];for(var l=0;l<o.length;l++)n.coerce(t,i,o,\"[\"+l+\"]\",r[l]);e.set(i)},validateFunction:function(t,e){if(!Array.isArray(t))return!1;var r=e.items;if(t.length!==r.length)return!1;for(var a=0;a<r.length;a++){var o=n.validate(t[a],e.items[a]);if(!o)return!1}return!0}}},n.coerce=function(t,e,r,a,i){var l=o(r,a).get(),s=o(t,a),c=o(e,a),u=s.get();return void 0===i&&(i=l.dflt),l.arrayOk&&Array.isArray(u)?(c.set(u),u):(n.valObjects[l.valType].coerceFunction(u,c,i,l),c.get())},n.coerce2=function(t,e,r,a,i){var l=o(t,a),s=n.coerce(t,e,r,a,i);return l.get()?s:!1},n.coerceFont=function(t,e,n){var r={};return n=n||{},r.family=t(e+\".family\",n.family),r.size=t(e+\".size\",n.size),r.color=t(e+\".color\",n.color),r},n.validate=function(t,e){var r=n.valObjects[e.valType];if(e.arrayOk&&Array.isArray(t))return!0;if(r.validateFunction)return r.validateFunction(t,e);var a={},o=a,i={set:function(t){o=t}};return r.coerceFunction(t,i,a,e),o!==a}},{\"../components/colorscale/get_scale\":30,\"../components/colorscale/scales\":36,\"./nested_property\":93,\"fast-isnumeric\":11,tinycolor2:13}],86:[function(t,e,n){\"use strict\";function r(t,e){return String(t+Math.pow(10,e)).substr(1)}function a(t){var e;return e=x.test(t)?\"Y\":\"y\",e+=b.test(t)?\"b\":\"\"}function o(t){var e;return e=w.test(t)?_.test(t)?\"I\":\"H\":\"D\"}var i=t(\"d3\"),l=t(\"fast-isnumeric\"),s=t(\"../lib\");n.dateTime2ms=function(t){try{if(t.getTime)return+t}catch(e){return!1}var n,r,a,o,i=String(t).split(\" \");if(i.length>2)return!1;var s=i[0].split(\"-\");if(s.length>3||3!==s.length&&i[1])return!1;if(4===s[0].length)n=Number(s[0]);else{if(2!==s[0].length)return!1;var c=(new Date).getFullYear();n=((Number(s[0])-c+70)%100+200)%100+c-70}return l(n)?1===s.length?new Date(n,0,1).getTime():(r=Number(s[1])-1,s[1].length>2||!(r>=0&&11>=r)?!1:2===s.length?new Date(n,r,1).getTime():(a=Number(s[2]),s[2].length>2||!(a>=1&&31>=a)?!1:(a=new Date(n,r,a).getTime(),i[1]?(s=i[1].split(\":\"),s.length>3?!1:(o=Number(s[0]),s[0].length>2||!(o>=0&&23>=o)?!1:(a+=36e5*o,1===s.length?a:(r=Number(s[1]),s[1].length>2||!(r>=0&&59>=r)?!1:(a+=6e4*r,2===s.length?a:(t=Number(s[2]),t>=0&&60>t?a+1e3*t:!1)))))):a))):!1},n.isDateTime=function(t){return n.dateTime2ms(t)!==!1},n.ms2DateTime=function(t,e){if(\"undefined\"==typeof i)return void s.error(\"d3 is not defined.\");e||(e=0);var n=new Date(t),a=i.time.format(\"%Y-%m-%d\")(n);return 7776e6>e?(a+=\" \"+r(n.getHours(),2),432e6>e&&(a+=\":\"+r(n.getMinutes(),2),108e5>e&&(a+=\":\"+r(n.getSeconds(),2),3e5>e&&(a+=\".\"+r(n.getMilliseconds(),3)))),a.replace(/([:\\s]00)*\\.?[0]*$/,\"\")):a};var c={H:[\"%H:%M:%S~%L\",\"%H:%M:%S\",\"%H:%M\"],I:[\"%I:%M:%S~%L%p\",\"%I:%M:%S%p\",\"%I:%M%p\"],D:[\"%H\",\"%I%p\",\"%Hh\"]},u={Y:[\"%Y~%m~%d\",\"%Y%m%d\",\"%y%m%d\",\"%m~%d~%Y\",\"%d~%m~%Y\"],Yb:[\"%b~%d~%Y\",\"%d~%b~%Y\",\"%Y~%d~%b\",\"%Y~%b~%d\"],y:[\"%m~%d~%y\",\"%d~%m~%y\",\"%y~%m~%d\"],yb:[\"%b~%d~%y\",\"%d~%b~%y\",\"%y~%d~%b\",\"%y~%b~%d\"]},f=i.time.format.utc,d={Y:{H:[\"%Y~%m~%dT%H:%M:%S\",\"%Y~%m~%dT%H:%M:%S~%L\"].map(f),I:[],D:[\"%Y%m%d%H%M%S\",\"%Y~%m\",\"%m~%Y\"].map(f)},Yb:{H:[],I:[],D:[\"%Y~%b\",\"%b~%Y\"].map(f)},y:{H:[],I:[],D:[]},yb:{H:[],I:[],D:[]}};[\"Y\",\"Yb\",\"y\",\"yb\"].forEach(function(t){u[t].forEach(function(e){d[t].D.push(f(e)),[\"H\",\"I\",\"D\"].forEach(function(n){c[n].forEach(function(r){var a=d[t][n];a.push(f(e+\"~\"+r)),a.push(f(r+\"~\"+e))})})})});var h=/[a-z]*/g,p=function(t){return t.substr(0,3)},g=/(mon|tue|wed|thu|fri|sat|sun|the|of|st|nd|rd|th)/g,v=/[\\s,\\/\\-\\.\\(\\)]+/g,m=/~?([ap])~?m(~|$)/,y=function(t,e){return e+\"m \"},x=/\\d\\d\\d\\d/,b=/(^|~)[a-z]{3}/,_=/[ap]m/,w=/:/,k=/q([1-4])/,M=[\"31~mar\",\"30~jun\",\"30~sep\",\"31~dec\"],A=function(t,e){return M[e-1]},L=/ ?([+\\-]\\d\\d:?\\d\\d|Z)$/;n.parseDate=function(t){if(t.getTime)return t;if(\"string\"!=typeof t)return!1;t=t.toLowerCase().replace(h,p).replace(g,\"\").replace(v,\"~\").replace(m,y).replace(k,A).trim().replace(L,\"\");var e,n,r=null,i=a(t),l=o(t);e=d[i][l],n=e.length;for(var s=0;n>s&&!(r=e[s].parse(t));s++);if(!(r instanceof Date))return!1;var c=r.getTimezoneOffset();return r.setTime(r.getTime()+60*c*1e3),r}},{\"../lib\":89,d3:9,\"fast-isnumeric\":11}],87:[function(t,e,n){\"use strict\";var r=t(\"events\").EventEmitter,a={init:function(t){if(t._ev instanceof r)return t;var e=new r;return t._ev=e,t.on=e.on.bind(e),t.once=e.once.bind(e),t.removeListener=e.removeListener.bind(e),t.removeAllListeners=e.removeAllListeners.bind(e),t.emit=function(n,r){\"undefined\"!=typeof jQuery&&jQuery(t).trigger(n,r),e.emit(n,r)},t},triggerHandler:function(t,e,n){var r,a;\"undefined\"!=typeof jQuery&&(r=jQuery(t).triggerHandler(e,n));var o=t._ev;if(!o)return r;var i=o._events[e];if(!i)return r;\"function\"==typeof i&&(i=[i]);for(var l=i.pop(),s=0;s<i.length;s++)i[s](n);return a=l(n),void 0!==r?r:a},purge:function(t){return delete t._ev,delete t.on,delete t.once,delete t.removeListener,delete t.removeAllListeners,delete t.emit,t}};e.exports=a},{events:7}],88:[function(t,e,n){\"use strict\";function r(t,e){var n,r;for(n=0;n<t.length;n++){if(r=t[n],null!==r&&\"object\"==typeof r)return!1;void 0!==r&&(e[n]=r)}return!0}function a(t,e,n){var l,s,c,u,f,d,h,p=t[0],g=t.length;if(2===g&&i(p)&&i(t[1])&&0===p.length){if(h=r(t[1],p))return p;p.splice(0,p.length)}for(var v=1;g>v;v++){l=t[v];for(s in l)c=p[s],u=l[s],e&&u&&(o(u)||(f=i(u)))?(f?(f=!1,d=c&&i(c)?c:[]):d=c&&o(c)?c:{},p[s]=a([d,u],e,n)):(\"undefined\"!=typeof u||n)&&(p[s]=u)}return p}var o=t(\"./is_plain_object.js\"),i=Array.isArray;n.extendFlat=function(){return a(arguments,!1,!1)},n.extendDeep=function(){return a(arguments,!0,!1)},n.extendDeepAll=function(){return a(arguments,!0,!0)}},{\"./is_plain_object.js\":90}],89:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=e.exports={};a.nestedProperty=t(\"./nested_property\"),a.isPlainObject=t(\"./is_plain_object\");var o=t(\"./coerce\");a.valObjects=o.valObjects,a.coerce=o.coerce,a.coerce2=o.coerce2,a.coerceFont=o.coerceFont,a.validate=o.validate;var i=t(\"./dates\");a.dateTime2ms=i.dateTime2ms,a.isDateTime=i.isDateTime,a.ms2DateTime=i.ms2DateTime,a.parseDate=i.parseDate;var l=t(\"./search\");a.findBin=l.findBin,a.sorterAsc=l.sorterAsc,a.sorterDes=l.sorterDes,a.distinctVals=l.distinctVals,a.roundUp=l.roundUp;var s=t(\"./stats\");a.aggNums=s.aggNums,a.len=s.len,a.mean=s.mean,a.variance=s.variance,a.stdev=s.stdev,a.interp=s.interp;var c=t(\"./matrix\");a.init2dArray=c.init2dArray,a.transposeRagged=c.transposeRagged,a.dot=c.dot,a.translationMatrix=c.translationMatrix,a.rotationMatrix=c.rotationMatrix,a.rotationXYMatrix=c.rotationXYMatrix,a.apply2DTransform=c.apply2DTransform,a.apply2DTransform2=c.apply2DTransform2;var u=t(\"./extend\");a.extendFlat=u.extendFlat,a.extendDeep=u.extendDeep,a.extendDeepAll=u.extendDeepAll;var f=t(\"./loggers\");a.log=f.log,a.warn=f.warn,a.error=f.error,a.notifier=t(\"./notifier\"),a.swapAttrs=function(t,e,n,r){n||(n=\"x\"),r||(r=\"y\");for(var o=0;o<e.length;o++){var i=e[o],l=a.nestedProperty(t,i.replace(\"?\",n)),s=a.nestedProperty(t,i.replace(\"?\",r)),c=l.get();l.set(s.get()),s.set(c)}},a.pauseEvent=function(t){return t.stopPropagation&&t.stopPropagation(),t.preventDefault&&t.preventDefault(),t.cancelBubble=!0,!1},a.constrain=function(t,e,n){return e>n?Math.max(n,Math.min(e,t)):Math.max(e,Math.min(n,t))},a.bBoxIntersect=function(t,e,n){return n=n||0,t.left<=e.right+n&&e.left<=t.right+n&&t.top<=e.bottom+n&&e.top<=t.bottom+n},a.identity=function(t){return t},a.randstr=function d(t,e,n){if(n||(n=16),void 0===e&&(e=24),0>=e)return\"0\";var r,a,o,i=Math.log(Math.pow(2,e))/Math.log(n),l=\"\";for(r=2;i===1/0;r*=2)i=Math.log(Math.pow(2,e/r))/Math.log(n)*r;var s=i-Math.floor(i);for(r=0;r<Math.floor(i);r++)o=Math.floor(Math.random()*n).toString(n),l=o+l;s&&(a=Math.pow(n,s),o=Math.floor(Math.random()*a).toString(n),l=o+l);var c=parseInt(l,n);return t&&t.indexOf(l)>-1||c!==1/0&&c>=Math.pow(2,e)?d(t,e,n):l},a.OptionControl=function(t,e){t||(t={}),e||(e=\"opt\");var n={};return n.optionList=[],n._newoption=function(r){r[e]=t,n[r.name]=r,n.optionList.push(r)},n[\"_\"+e]=t,n},a.smooth=function(t,e){if(e=Math.round(e)||0,2>e)return t;var n,r,a,o,i=t.length,l=2*i,s=2*e-1,c=new Array(s),u=new Array(i);for(n=0;s>n;n++)c[n]=(1-Math.cos(Math.PI*(n+1)/e))/(2*e);for(n=0;i>n;n++){for(o=0,r=0;s>r;r++)a=n+r+1-e,-i>a?a-=l*Math.round(a/l):a>=l&&(a-=l*Math.floor(a/l)),0>a?a=-1-a:a>=i&&(a=l-1-a),o+=t[a]*c[r];u[n]=o}return u},a.syncOrAsync=function(t,e,n){function r(){return a.syncOrAsync(t,e,n)}for(var o,i;t.length;)if(i=t.splice(0,1)[0],o=i(e),o&&o.then)return o.then(r).then(void 0,a.promiseError);return n&&n(e)},a.stripTrailingSlash=function(t){return\"/\"===t.substr(-1)?t.substr(0,t.length-1):t},a.noneOrAll=function(t,e,n){if(t){var r,a,o=!1,i=!0;for(r=0;r<n.length;r++)a=t[n[r]],void 0!==a&&null!==a?o=!0:i=!1;if(o&&!i)for(r=0;r<n.length;r++)t[n[r]]=e[n[r]]}},a.pushUnique=function(t,e){return e&&-1===t.indexOf(e)&&t.push(e),t},a.mergeArray=function(t,e,n){if(Array.isArray(t))for(var r=Math.min(t.length,e.length),a=0;r>a;a++)e[a][n]=t[a]},a.minExtend=function(t,e){var n={};\"object\"!=typeof e&&(e={});var r,o,i,l=3,s=Object.keys(t);for(r=0;r<s.length;r++)o=s[r],i=t[o],\"_\"!==o.charAt(0)&&\"function\"!=typeof i&&(\"module\"===o?n[o]=i:Array.isArray(i)?n[o]=i.slice(0,l):i&&\"object\"==typeof i?n[o]=a.minExtend(t[o],e[o]):n[o]=i);for(s=Object.keys(e),r=0;r<s.length;r++)o=s[r],i=e[o],\"object\"==typeof i&&o in n&&\"object\"==typeof n[o]||(n[o]=i);return n},a.titleCase=function(t){return t.charAt(0).toUpperCase()+t.substr(1)},a.containsAny=function(t,e){for(var n=0;n<e.length;n++)if(-1!==t.indexOf(e[n]))return!0;return!1},a.getPlotDiv=function(t){for(;t&&t.removeAttribute;t=t.parentNode)if(a.isPlotDiv(t))return t},a.isPlotDiv=function(t){var e=r.select(t);return e.size()&&e.classed(\"js-plotly-plot\")},a.removeElement=function(t){var e=t&&t.parentNode;e&&e.removeChild(t)},a.add"
+,
+"StyleRule=function(t,e){if(!a.styleSheet){var n=document.createElement(\"style\");n.appendChild(document.createTextNode(\"\")),document.head.appendChild(n),a.styleSheet=n.sheet}var r=a.styleSheet;r.insertRule?r.insertRule(t+\"{\"+e+\"}\",0):r.addRule?r.addRule(t,e,0):a.warn(\"addStyleRule failed\")},a.getTranslate=function(t){var e=/.*\\btranslate\\((\\d*\\.?\\d*)[^\\d]*(\\d*\\.?\\d*)[^\\d].*/,n=t.attr?\"attr\":\"getAttribute\",r=t[n](\"transform\")||\"\",a=r.replace(e,function(t,e,n){return[e,n].join(\" \")}).split(\" \");return{x:+a[0]||0,y:+a[1]||0}},a.setTranslate=function(t,e,n){var r=/(\\btranslate\\(.*?\\);?)/,a=t.attr?\"attr\":\"getAttribute\",o=t.attr?\"attr\":\"setAttribute\",i=t[a](\"transform\")||\"\";return e=e||0,n=n||0,i=i.replace(r,\"\").trim(),i+=\" translate(\"+e+\", \"+n+\")\",i=i.trim(),t[o](\"transform\",i),i},a.getScale=function(t){var e=/.*\\bscale\\((\\d*\\.?\\d*)[^\\d]*(\\d*\\.?\\d*)[^\\d].*/,n=t.attr?\"attr\":\"getAttribute\",r=t[n](\"transform\")||\"\",a=r.replace(e,function(t,e,n){return[e,n].join(\" \")}).split(\" \");return{x:+a[0]||1,y:+a[1]||1}},a.setScale=function(t,e,n){var r=/(\\bscale\\(.*?\\);?)/,a=t.attr?\"attr\":\"getAttribute\",o=t.attr?\"attr\":\"setAttribute\",i=t[a](\"transform\")||\"\";return e=e||1,n=n||1,i=i.replace(r,\"\").trim(),i+=\" scale(\"+e+\", \"+n+\")\",i=i.trim(),t[o](\"transform\",i),i},a.setPointGroupScale=function(t,e,n){var r,a,o;return e=e||1,n=n||1,a=1===e&&1===n?\"\":\" scale(\"+e+\",\"+n+\")\",o=/\\s*sc.*/,t.each(function(){r=(this.getAttribute(\"transform\")||\"\").replace(o,\"\"),r+=a,r=r.trim(),this.setAttribute(\"transform\",r)}),a},a.isIE=function(){return\"undefined\"!=typeof window.navigator.msSaveBlob},a.objectFromPath=function(t,e){for(var n,r=t.split(\".\"),a=n={},o=0;o<r.length;o++){var i=r[o],l=null,s=r[o].match(/(.*)\\[([0-9]+)\\]/);s?(i=s[1],l=s[2],n=n[i]=[],o===r.length-1?n[l]=e:n[l]={},n=n[l]):(o===r.length-1?n[i]=e:n[i]={},n=n[i])}return a},a.numSeparate=function(t,e){if(\"string\"!=typeof e||0===e.length)throw new Error(\"Separator string required for formatting!\");\"number\"==typeof t&&(t=String(t));var n=/(\\d+)(\\d{3})/,r=e.charAt(0),a=e.charAt(1),o=t.split(\".\"),i=o[0],l=o.length>1?r+o[1]:\"\";\n"
+,
+"if(a&&(o.length>1||i.length>4))for(;n.test(i);)i=i.replace(n,\"$1\"+a+\"$2\");return i+l}},{\"./coerce\":85,\"./dates\":86,\"./extend\":88,\"./is_plain_object\":90,\"./loggers\":91,\"./matrix\":92,\"./nested_property\":93,\"./notifier\":94,\"./search\":97,\"./stats\":99,d3:9}],90:[function(t,e,n){\"use strict\";e.exports=function(t){return\"[object Object]\"===Object.prototype.toString.call(t)&&Object.getPrototypeOf(t)===Object.prototype}},{}],91:[function(t,e,n){\"use strict\";var r=t(\"../plot_api/plot_config\"),a=e.exports={};a.log=function(){if(r.logging>1){for(var t=[\"LOG:\"],e=0;e<arguments.length;e++)t.push(arguments[e]);console.trace?console.trace.apply(console,t):console.log.apply(console,t)}},a.warn=function(){if(r.logging>0){for(var t=[\"WARN:\"],e=0;e<arguments.length;e++)t.push(arguments[e]);console.trace?console.trace.apply(console,t):console.log.apply(console,t)}},a.error=function(){if(r.logging>0){for(var t=[\"ERROR:\"],e=0;e<arguments.length;e++)t.push(arguments[e]);console.error.apply(console,arguments)}}},{\"../plot_api/plot_config\":102}],92:[function(t,e,n){\"use strict\";n.init2dArray=function(t,e){for(var n=new Array(t),r=0;t>r;r++)n[r]=new Array(e);return n},n.transposeRagged=function(t){var e,n,r=0,a=t.length;for(e=0;a>e;e++)r=Math.max(r,t[e].length);var o=new Array(r);for(e=0;r>e;e++)for(o[e]=new Array(a),n=0;a>n;n++)o[e][n]=t[n][e];return o},n.dot=function(t,e){if(!t.length||!e.length||t.length!==e.length)return null;var r,a,o=t.length;if(t[0].length)for(r=new Array(o),a=0;o>a;a++)r[a]=n.dot(t[a],e);else if(e[0].length){var i=n.transposeRagged(e);for(r=new Array(i.length),a=0;a<i.length;a++)r[a]=n.dot(t,i[a])}else for(r=0,a=0;o>a;a++)r+=t[a]*e[a];return r},n.translationMatrix=function(t,e){return[[1,0,t],[0,1,e],[0,0,1]]},n.rotationMatrix=function(t){var e=t*Math.PI/180;return[[Math.cos(e),-Math.sin(e),0],[Math.sin(e),Math.cos(e),0],[0,0,1]]},n.rotationXYMatrix=function(t,e,r){return n.dot(n.dot(n.translationMatrix(e,r),n.rotationMatrix(t)),n.translationMatrix(-e,-r))},n.apply2DTransform=function(t){return function(){var e=arguments;3===e.length&&(e=e[0]);var r=1===arguments.length?e[0]:[e[0],e[1]];return n.dot(t,[r[0],r[1],1]).slice(0,2)}},n.apply2DTransform2=function(t){var e=n.apply2DTransform(t);return function(t){return e(t.slice(0,2)).concat(e(t.slice(2,4)))}}},{}],93:[function(t,e,n){\"use strict\";function r(t,e){return function(){var n,a,o,i,l,s=t;for(i=0;i<e.length-1;i++){if(n=e[i],-1===n){for(a=!0,o=[],l=0;l<s.length;l++)o[l]=r(s[l],e.slice(i+1))(),o[l]!==o[0]&&(a=!1);return a?o[0]:o}if(\"number\"==typeof n&&!Array.isArray(s))return;if(s=s[n],\"object\"!=typeof s||null===s)return}if(\"object\"==typeof s&&null!==s&&(o=s[e[i]],null!==o))return o}}function a(t,e){var n=[\"annotations\",\"shapes\",\"range\",\"domain\",\"buttons\"],r=-1===n.indexOf(e);return Array.isArray(t)&&r}function o(t,e){return function(n){var r,o,u=t,f=[t],d=c(n)&&!a(n,e[e.length-1]);for(o=0;o<e.length-1;o++){if(r=e[o],\"number\"==typeof r&&!Array.isArray(u))throw\"array index but container is not an array\";if(-1===r){if(d=!i(u,e.slice(o+1),n))break;return}if(!l(u,r,e[o+1],d))break;if(u=u[r],\"object\"!=typeof u||null===u)throw\"container is not an object\";f.push(u)}d?(o===e.length-1&&delete u[e[o]],s(f)):u[e[o]]=n}}function i(t,e,n){var r,a=Array.isArray(n),i=!0,s=n,u=a?!1:c(n),f=e[0];for(r=0;r<t.length;r++)a&&(s=n[r%n.length],u=c(s)),u&&(i=!1),l(t,r,f,u)&&o(t[r],e)(s);return i}function l(t,e,n,r){if(void 0===t[e]){if(r)return!1;\"number\"==typeof n?t[e]=[]:t[e]={}}return!0}function s(t){var e,n,r,o,i;for(e=t.length-1;e>=0;e--){if(r=t[e],i=!1,Array.isArray(r))for(n=r.length-1;n>=0;n--)c(r[n])?i?r[n]=void 0:r.pop():i=!0;else if(\"object\"==typeof r&&null!==r)for(o=Object.keys(r),i=!1,n=o.length-1;n>=0;n--)c(r[o[n]])&&!a(r[o[n]],o[n])?delete r[o[n]]:i=!0;if(i)return}}function c(t){return void 0===t||null===t?!0:\"object\"!=typeof t?!1:Array.isArray(t)?!t.length:!Object.keys(t).length}function u(t,e,n){return{set:function(){throw\"bad container\"},get:function(){},astr:e,parts:n,obj:t}}var f=t(\"fast-isnumeric\");e.exports=function(t,e){if(f(e))e=String(e);else if(\"string\"!=typeof e||\"[-1]\"===e.substr(e.length-4))throw\"bad property string\";for(var n,a,i,l=0,s=e.split(\".\");l<s.length;){if(n=String(s[l]).match(/^([^\\[\\]]*)((\\[\\-?[0-9]*\\])+)$/)){if(n[1])s[l]=n[1];else{if(0!==l)throw\"bad property string\";s.splice(0,1)}for(a=n[2].substr(1,n[2].length-2).split(\"][\"),i=0;i<a.length;i++)l++,s.splice(l,0,Number(a[i]))}l++}return\"object\"!=typeof t?u(t,e,s):{set:o(t,s),get:r(t,s),astr:e,parts:s,obj:t}}},{\"fast-isnumeric\":11}],94:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"fast-isnumeric\"),o=[];e.exports=function(t,e){function n(t){t.duration(700).style(\"opacity\",0).each(\"end\",function(t){var e=o.indexOf(t);-1!==e&&o.splice(e,1),r.select(this).remove()})}if(-1===o.indexOf(t)){o.push(t);var i=1e3;a(e)?i=e:\"long\"===e&&(i=3e3);var l=r.select(\"body\").selectAll(\".plotly-notifier\").data([0]);l.enter().append(\"div\").classed(\"plotly-notifier\",!0);var s=l.selectAll(\".notifier-note\").data(o);s.enter().append(\"div\").classed(\"notifier-note\",!0).style(\"opacity\",0).each(function(t){var e=r.select(this);e.append(\"button\").classed(\"notifier-close\",!0).html(\"&times;\").on(\"click\",function(){e.transition().call(n)}),e.append(\"p\").html(t),e.transition().duration(700).style(\"opacity\",1).transition().delay(i).call(n)})}}},{d3:9,\"fast-isnumeric\":11}],95:[function(t,e,n){\"use strict\";var r=t(\"./matrix\").dot,a=e.exports={};a.tester=function(t){function e(t,e){var n=t[0],r=t[1];return a>n||n>o||i>r||r>l?!1:!e||!c(t)}function n(t,e){var n=t[0],s=t[1];if(a>n||n>o||i>s||s>l)return!1;var c,u,f,d,h,p=r.length,g=r[0][0],v=r[0][1],m=0;for(c=1;p>c;c++)if(u=g,f=v,g=r[c][0],v=r[c][1],d=Math.min(u,g),!(d>n||n>Math.max(u,g)||s>Math.max(f,v)))if(s<Math.min(f,v))n!==d&&m++;else{if(h=g===u?s:f+(n-u)*(v-f)/(g-u),s===h)return 1!==c||!e;h>=s&&n!==d&&m++}return m%2===1}var r=t.slice(),a=r[0][0],o=a,i=r[0][1],l=i;r.push(r[0]);for(var s=1;s<r.length;s++)a=Math.min(a,r[s][0]),o=Math.max(o,r[s][0]),i=Math.min(i,r[s][1]),l=Math.max(l,r[s][1]);var c,u=!1;return 5===r.length&&(r[0][0]===r[1][0]?r[2][0]===r[3][0]&&r[0][1]===r[3][1]&&r[1][1]===r[2][1]&&(u=!0,c=function(t){return t[0]===r[0][0]}):r[0][1]===r[1][1]&&r[2][1]===r[3][1]&&r[0][0]===r[3][0]&&r[1][0]===r[2][0]&&(u=!0,c=function(t){return t[1]===r[0][1]})),{xmin:a,xmax:o,ymin:i,ymax:l,pts:r,contains:u?e:n,isRect:u}};var o=a.isSegmentBent=function(t,e,n,a){var o,i,l,s=t[e],c=[t[n][0]-s[0],t[n][1]-s[1]],u=r(c,c),f=Math.sqrt(u),d=[-c[1]/f,c[0]/f];for(o=e+1;n>o;o++)if(i=[t[o][0]-s[0],t[o][1]-s[1]],l=r(i,c),0>l||l>u||Math.abs(r(i,d))>a)return!0;return!1};a.filter=function(t,e){function n(n){t.push(n);var l=r.length,s=a;r.splice(i+1);for(var c=s+1;c<t.length;c++)(c===t.length-1||o(t,s,c+1,e))&&(r.push(t[c]),r.length<l-2&&(a=c,i=r.length-1),s=c)}var r=[t[0]],a=0,i=0;if(t.length>1){var l=t.pop();n(l)}return{addPt:n,raw:t,filtered:r}}},{\"./matrix\":92}],96:[function(t,e,n){\"use strict\";function r(t,e){for(var n,r=[],o=0;o<e.length;o++)n=e[o],n===t?r[o]=n:\"object\"==typeof n?r[o]=Array.isArray(n)?a.extendDeep([],n):a.extendDeepAll({},n):r[o]=n;return r}var a=t(\"../lib\"),o=t(\"../plot_api/plot_config\"),i={};i.add=function(t,e,n,r,a){var i,l;return t.undoQueue=t.undoQueue||{index:0,queue:[],sequence:!1},l=t.undoQueue.index,t.autoplay?void(t.undoQueue.inSequence||(t.autoplay=!1)):(!t.undoQueue.sequence||t.undoQueue.beginSequence?(i={undo:{calls:[],args:[]},redo:{calls:[],args:[]}},t.undoQueue.queue.splice(l,t.undoQueue.queue.length-l,i),t.undoQueue.index+=1):i=t.undoQueue.queue[l-1],t.undoQueue.beginSequence=!1,i&&(i.undo.calls.unshift(e),i.undo.args.unshift(n),i.redo.calls.push(r),i.redo.args.push(a)),void(t.undoQueue.queue.length>o.queueLength&&(t.undoQueue.queue.shift(),t.undoQueue.index--)))},i.startSequence=function(t){t.undoQueue=t.undoQueue||{index:0,queue:[],sequence:!1},t.undoQueue.sequence=!0,t.undoQueue.beginSequence=!0},i.stopSequence=function(t){t.undoQueue=t.undoQueue||{index:0,queue:[],sequence:!1},t.undoQueue.sequence=!1,t.undoQueue.beginSequence=!1},i.undo=function(t){var e,n;if(t.framework&&t.framework.isPolar)return void t.framework.undo();if(!(void 0===t.undoQueue||isNaN(t.undoQueue.index)||t.undoQueue.index<=0)){for(t.undoQueue.index--,e=t.undoQueue.queue[t.undoQueue.index],t.undoQueue.inSequence=!0,n=0;n<e.undo.calls.length;n++)i.plotDo(t,e.undo.calls[n],e.undo.args[n]);t.undoQueue.inSequence=!1,t.autoplay=!1}},i.redo=function(t){var e,n;if(t.framework&&t.framework.isPolar)return void t.framework.redo();if(!(void 0===t.undoQueue||isNaN(t.undoQueue.index)||t.undoQueue.index>=t.undoQueue.queue.length)){for(e=t.undoQueue.queue[t.undoQueue.index],t.undoQueue.inSequence=!0,n=0;n<e.redo.calls.length;n++)i.plotDo(t,e.redo.calls[n],e.redo.args[n]);t.undoQueue.inSequence=!1,t.autoplay=!1,t.undoQueue.index++}},i.plotDo=function(t,e,n){t.autoplay=!0,n=r(t,n),e.apply(null,n)},e.exports=i},{\"../lib\":89,\"../plot_api/plot_config\":102}],97:[function(t,e,n){\"use strict\";function r(t,e){return e>t}function a(t,e){return e>=t}function o(t,e){return t>e}function i(t,e){return t>=e}var l=t(\"fast-isnumeric\"),s=t(\"../lib\");n.findBin=function(t,e,n){if(l(e.start))return n?Math.ceil((t-e.start)/e.size)-1:Math.floor((t-e.start)/e.size);var c,u,f=0,d=e.length,h=0;for(u=e[e.length-1]>=e[0]?n?r:a:n?i:o;d>f&&h++<100;)c=Math.floor((f+d)/2),u(e[c],t)?f=c+1:d=c;return h>90&&s.log(\"Long binary search...\"),f-1},n.sorterAsc=function(t,e){return t-e},n.sorterDes=function(t,e){return e-t},n.distinctVals=function(t){var e=t.slice();e.sort(n.sorterAsc);for(var r=e.length-1,a=e[r]-e[0]||1,o=a/(r||1)/1e4,i=[e[0]],l=0;r>l;l++)e[l+1]>e[l]+o&&(a=Math.min(a,e[l+1]-e[l]),i.push(e[l+1]));return{vals:i,minDiff:a}},n.roundUp=function(t,e,n){for(var r,a=0,o=e.length-1,i=0,l=n?0:1,s=n?1:0,c=n?Math.ceil:Math.floor;o>a&&i++<100;)r=c((a+o)/2),e[r]<=t?a=r+l:o=r-s;return e[a]}},{\"../lib\":89,\"fast-isnumeric\":11}],98:[function(t,e,n){\"use strict\";e.exports=function(t,e){(t.attr(\"class\")||\"\").split(\" \").forEach(function(e){0===e.inde"
+,
+"xOf(\"cursor-\")&&t.classed(e,!1)}),e&&t.classed(\"cursor-\"+e,!0)}},{}],99:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\");n.aggNums=function(t,e,a,o){var i,l;if(o||(o=a.length),r(e)||(e=!1),Array.isArray(a[0])){for(l=new Array(o),i=0;o>i;i++)l[i]=n.aggNums(t,e,a[i]);a=l}for(i=0;o>i;i++)r(e)?r(a[i])&&(e=t(+e,+a[i])):e=a[i];return e},n.len=function(t){return n.aggNums(function(t){return t+1},0,t)},n.mean=function(t,e){return e||(e=n.len(t)),n.aggNums(function(t,e){return t+e},0,t)/e},n.variance=function(t,e,a){return e||(e=n.len(t)),r(a)||(a=n.mean(t,e)),n.aggNums(function(t,e){return t+Math.pow(e-a,2)},0,t)/e},n.stdev=function(t,e,r){return Math.sqrt(n.variance(t,e,r))},n.interp=function(t,e){if(!r(e))throw\"n should be a finite number\";if(e=e*t.length-.5,0>e)return t[0];if(e>t.length-1)return t[t.length-1];var n=e%1;return n*t[Math.ceil(e)]+(1-n)*t[Math.floor(e)]}},{\"fast-isnumeric\":11}],100:[function(t,e,n){\"use strict\";function r(t,e){return t.node().getBoundingClientRect()[e]}function a(t){return t.replace(/(<|&lt;|&#60;)/g,\"\\\\lt \").replace(/(>|&gt;|&#62;)/g,\"\\\\gt \")}function o(t,e,n){var r=\"math-output-\"+s.Lib.randstr([],64),o=c.select(\"body\").append(\"div\").attr({id:r}).style({visibility:\"hidden\",position:\"absolute\"}).style({\"font-size\":e.fontSize+\"px\"}).text(a(t));MathJax.Hub.Queue([\"Typeset\",MathJax.Hub,o.node()],function(){var e=c.select(\"body\").select(\"#MathJax_SVG_glyphs\");if(o.select(\".MathJax_SVG\").empty()||!o.select(\"svg\").node())u.log(\"There was an error in the tex syntax.\",t),n();else{var r=o.select(\"svg\").node().getBoundingClientRect();n(o.select(\".MathJax_SVG\"),e,r)}o.remove()})}function i(t){for(var e=s.util.html_entity_decode(t),n=e.split(/(<[^<>]*>)/).map(function(t){var e=t.match(/<(\\/?)([^ >]*)\\s*(.*)>/i),n=e&&e[2].toLowerCase(),r=h[n];if(void 0!==r){var a=e[1],o=e[3],i=o.match(/^style\\s*=\\s*\"([^\"]+)\"\\s*/i);if(\"a\"===n){if(a)return\"</a>\";if(\"href\"!==o.substr(0,4).toLowerCase())return\"<a>\";var l=o.substr(4).replace(/[\"']/g,\"\").replace(/=/,\"\").replace(/&/g,\"&amp;\"),c=document.createElement(\"a\");return c.href=l,-1===p.indexOf(c.protocol)?\"<a>\":'<a xlink:show=\"new\" xlink:href=\"'+l+'\">'}if(\"br\"===n)return\"<br>\";if(a)return\"sup\"===n?'</tspan><tspan dy=\"0.42em\">&#x200b;</tspan>':\"sub\"===n?'</tspan><tspan dy=\"-0.21em\">&#x200b;</tspan>':\"</tspan>\";var u=\"<tspan\";return\"sup\"!==n&&\"sub\"!==n||(u=\"&#x200b;\"+u),i&&(i=i[1].replace(/(^|;)\\s*color:/,\"$1 fill:\"),r=(r?r+\";\":\"\")+i),u+(r?' style=\"'+r+'\"':\"\")+\">\"}return s.util.xml_entity_encode(t).replace(/</g,\"&lt;\")}),r=[],a=n.indexOf(\"<br>\");a>0;a=n.indexOf(\"<br>\",a+1))r.push(a);var o=0;r.forEach(function(t){for(var e=t+o,r=n.slice(0,e),a=\"\",i=r.length-1;i>=0;i--){var l=r[i].match(/<(\\/?).*>/i);if(l&&\"<br>\"!==r[i]){l[1]||(a=r[i]);break}}a&&(n.splice(e+1,0,a),n.splice(e,0,\"</tspan>\"),o+=2)});var i=n.join(\"\"),l=i.split(/<br>/gi);return l.length>1&&(n=l.map(function(t,e){return'<tspan class=\"line\" dy=\"'+1.3*e+'em\">'+t+\"</tspan>\"})),n.join(\"\")}function l(t,e,n){var r,a,o,i=n.horizontalAlign,l=n.verticalAlign||\"top\",s=t.node().getBoundingClientRect(),c=e.node().getBoundingClientRect();return a=\"bottom\"===l?function(){return s.bottom-r.height}:\"middle\"===l?function(){return s.top+(s.height-r.height)/2}:function(){return s.top},o=\"right\"===i?function(){return s.right-r.width}:\"center\"===i?function(){return s.left+(s.width-r.width)/2}:function(){return s.left},function(){return r=this.node().getBoundingClientRect(),this.style({top:a()-c.top+\"px\",left:o()-c.left+\"px\",\"z-index\":1e3}),this}}var s=t(\"../plotly\"),c=t(\"d3\"),u=t(\"../lib\"),f=t(\"../constants/xmlns_namespaces\"),d=e.exports={};c.selection.prototype.appendSVG=function(t){for(var e=['<svg xmlns=\"',f.svg,'\" ','xmlns:xlink=\"',f.xlink,'\">',t,\"</svg>\"].join(\"\"),n=(new DOMParser).parseFromString(e,\"application/xml\"),r=n.documentElement.firstChild;r;)this.node().appendChild(this.node().ownerDocument.importNode(r,!0)),r=r.nextSibling;return n.querySelector(\"parsererror\")?(u.log(n.querySelector(\"parsererror div\").textContent),null):c.select(this.node().lastChild)},d.html_entity_decode=function(t){var e=c.select(\"body\").append(\"div\").style({display:\"none\"}).html(\"\"),n=t.replace(/(&[^;]*;)/gi,function(t){return\"&lt;\"===t?\"&#60;\":\"&rt;\"===t?\"&#62;\":e.html(t).text()});return e.remove(),n},d.xml_entity_encode=function(t){return t.replace(/&(?!\\w+;|\\#[0-9]+;| \\#x[0-9A-F]+;)/g,\"&amp;\")},d.convertToTspans=function(t,e){function n(){h.empty()||(p=u.attr(\"class\")+\"-math\",h.select(\"svg.\"+p).remove()),t.text(\"\").style({visibility:\"visible\",\"white-space\":\"pre\"}),d=t.appendSVG(l),d||t.text(a),t.select(\"a\").size()&&t.style(\"pointer-events\",\"all\"),e&&e.call(u)}var a=t.text(),l=i(a),u=t,f=!u.attr(\"data-notex\")&&l.match(/([^$]*)([$]+[^$]*[$]+)([^$]*)/),d=a,h=c.select(u.node().parentNode);if(!h.empty()){var p=u.attr(\"class\")?u.attr(\"class\").split(\" \")[0]:\"text\";p+=\"-math\",h.selectAll(\"svg.\"+p).remove(),h.selectAll(\"g.\"+p+\"-group\").remove(),t.style({visibility:null});for(var g=t.node();g&&g.removeAttribute;g=g.parentNode)g.removeAttribute(\"data-bb\");if(f){var v=s.Lib.getPlotDiv(u.node());(v&&v._promises||[]).push(new Promise(function(t){u.style({visibility:\"hidden\"});var a={fontSize:parseInt(u.style(\"font-size\"),10)};o(f[2],a,function(a,o,i){h.selectAll(\"svg.\"+p).remove(),h.selectAll(\"g.\"+p+\"-group\").remove();var l=a&&a.select(\"svg\");if(!l||!l.node())return n(),void t();var s=h.append(\"g\").classed(p+\"-group\",!0).attr({\"pointer-events\":\"none\"});s.node().appendChild(l.node()),o&&o.node()&&l.node().insertBefore(o.node().cloneNode(!0),l.node().firstChild),l.attr({\"class\":p,height:i.height,preserveAspectRatio:\"xMinYMin meet\"}).style({overflow:\"visible\",\"pointer-events\":\"none\"});var c=u.style(\"fill\")||\"black\";l.select(\"g\").attr({fill:c,stroke:c});var f=r(l,\"width\"),d=r(l,\"height\"),g=+u.attr(\"x\")-f*{start:0,middle:.5,end:1}[u.attr(\"text-anchor\")||\"start\"],v=parseInt(u.style(\"font-size\"),10)||r(u,\"height\"),m=-v/4;\"y\"===p[0]?(s.attr({transform:\"rotate(\"+[-90,+u.attr(\"x\"),+u.attr(\"y\")]+\") translate(\"+[-f/2,m-d/2]+\")\"}),l.attr({x:+u.attr(\"x\"),y:+u.attr(\"y\")})):\"l\"===p[0]?l.attr({x:u.attr(\"x\"),y:m-d/2}):\"a\"===p[0]?l.attr({x:0,y:m}):l.attr({x:g,y:+u.attr(\"y\")+m-d/2}),e&&e.call(u,s),t(s)})}))}else n();return t}};var h={sup:'font-size:70%\" dy=\"-0.6em',sub:'font-size:70%\" dy=\"0.3em',b:\"font-weight:bold\",i:\"font-style:italic\",a:\"\",span:\"\",br:\"\",em:\"font-style:italic;font-weight:bold\"},p=[\"http:\",\"https:\",\"mailto:\"],g=new RegExp(\"</?(\"+Object.keys(h).join(\"|\")+\")( [^>]*)?/?>\",\"g\");d.plainText=function(t){return(t||\"\").replace(g,\" \")},d.makeEditable=function(t,e,n){function r(){o(),i.style({opacity:0});var t,e=d.attr(\"class\");t=e?\".\"+e.split(\" \")[0]+\"-math-group\":\"[class*=-math-group]\",t&&c.select(i.node().parentNode).select(t).style({opacity:0})}function a(t){var e=t.node(),n=document.createRange();n.selectNodeContents(e);var r=window.getSelection();r.removeAllRanges(),r.addRange(n),e.focus()}function o(){var t=c.select(s.Lib.getPlotDiv(i.node())),e=t.select(\".svg-container\"),r=e.append(\"div\");r.classed(\"plugin-editable editable\",!0).style({position:\"absolute\",\"font-family\":i.style(\"font-family\")||\"Arial\",\"font-size\":i.style(\"font-size\")||12,color:n.fill||i.style(\"fill\")||\"black\",opacity:1,\"background-color\":n.background||\"transparent\",outline:\"#ffffff33 1px solid\",margin:[-parseFloat(i.style(\"font-size\"))/8+1,0,0,-1].join(\"px \")+\"px\",padding:\"0\",\"box-sizing\":\"border-box\"}).attr({contenteditable:!0}).text(n.text||i.attr(\"data-unformatted\")).call(l(i,e,n)).on(\"blur\",function(){i.text(this.textContent).style({opacity:1});var t,e=c.select(this).attr(\"class\");t=e?\".\"+e.split(\" \")[0]+\"-math-group\":\"[class*=-math-group]\",t&&c.select(i.node().parentNode).select(t).style({opacity:0});var n=this.textContent;c.select(this).transition().duration(0).remove(),c.select(document).on(\"mouseup\",null),u.edit.call(i,n)}).on(\"focus\",function(){var t=this;c.select(document).on(\"mouseup\",function(){return c.event.target===t?!1:void(document.activeElement===r.node()&&r.node().blur())})}).on(\"keyup\",function(){27===c.event.which?(i.style({opacity:1}),c.select(this).style({opacity:0}).on(\"blur\",function(){return!1}).transition().remove(),u.cancel.call(i,this.textContent)):(u.input.call(i,this.textContent),c.select(this).call(l(i,e,n)))}).on(\"keydown\",function(){13===c.event.which&&this.blur()}).call(a)}n||(n={});var i=this,u=c.dispatch(\"edit\",\"input\",\"cancel\"),f=c.select(this.node()).style({\"pointer-events\":\"all\"}),d=e||f;return e&&f.style({\"pointer-events\":\"none\"}),n.immediate?r():d.on(\"click\",r),c.rebind(this,u,\"on\")}},{\"../constants/xmlns_namespaces\":82,\"../lib\":89,\"../plotly\":107,d3:9}],101:[function(t,e,n){\"use strict\";function r(t){var e;if(\"string\"==typeof t){if(e=document.getElementById(t),null===e)throw new Error(\"No DOM element with id '\"+t+\"' exists on the page.\");return e}if(null===t||void 0===t)throw new Error(\"DOM element provided is null or undefined\");return t}function a(t,e){t._fullLayout._paperdiv.style(\"background\",\"white\"),O.defaultConfig.setBackground(t,e)}function o(t,e){t._context||(t._context=P.extendFlat({},O.defaultConfig));var n=t._context;e&&(Object.keys(e).forEach(function(t){t in n&&(\"setBackground\"===t&&\"opaque\"===e[t]?n[t]=a:n[t]=e[t])}),e.plot3dPixelRatio&&!n.plotGlPixelRatio&&(n.plotGlPixelRatio=n.plot3dPixelRatio)),n.staticPlot&&(n.editable=!1,n.autosizable=!1,n.scrollZoom=!1,n.doubleClick=!1,n.showTips=!1,n.showLink=!1,n.displayModeBar=!1)}function i(t,e,n){var r=S.select(t).selectAll(\".plot-container\").data([0]);r.enter().insert(\"div\",\":first-child\").classed(\"plot-container plotly\",!0);var a=r.selectAll(\".svg-container\").data([0]);a.enter().append(\"div\").classed(\"svg-container\",!0).style(\"position\",\"relative\"),a.html(\"\"),e&&(t.data=e),n&&(t.layout=n),O.micropolar.manager.fillLayout(t),\"initial\"===t._fullLayout.autosize&&t._context.autosizable&&(w(t,{}),t._fullLayout.autosize=n.autosize=!0),a.style({width:t._fullLayout.width+\"px\",height:t._fullLayout.height+\"px\"}),t.framework=O.micropolar.manager.framework(t),t.framework({data:t.data,layout:"
+,
+"t.layout},a.node()),t.framework.setUndoPoint();var o=t.framework.svg(),i=1,l=t._fullLayout.title;\"\"!==l&&l||(i=0);var s=\"Click to enter title\",c=function(){this.call(O.util.convertToTspans)},u=o.select(\".title-group text\").call(c);if(t._context.editable){u.attr({\"data-unformatted\":l}),l&&l!==s||(i=.2,u.attr({\"data-unformatted\":s}).text(s).style({opacity:i}).on(\"mouseover.opacity\",function(){S.select(this).transition().duration(100).style(\"opacity\",1)}).on(\"mouseout.opacity\",function(){S.select(this).transition().duration(1e3).style(\"opacity\",0)}));var f=function(){this.call(O.util.makeEditable).on(\"edit\",function(e){t.framework({layout:{title:e}}),this.attr({\"data-unformatted\":e}).text(e).call(c),this.call(f)}).on(\"cancel\",function(){var t=this.attr(\"data-unformatted\");this.text(t).call(c)})};u.call(f)}return t._context.setBackground(t,t._fullLayout.paper_bgcolor),I.addLinks(t),Promise.resolve()}function l(t){var e,n;t||(t={}),t.xaxis1&&(t.xaxis||(t.xaxis=t.xaxis1),delete t.xaxis1),t.yaxis1&&(t.yaxis||(t.yaxis=t.yaxis1),delete t.yaxis1);var r=O.Axes.list({_fullLayout:t});for(e=0;e<r.length;e++){var a=r[e];a.anchor&&\"free\"!==a.anchor&&(a.anchor=O.Axes.cleanId(a.anchor)),a.overlaying&&(a.overlaying=O.Axes.cleanId(a.overlaying)),a.type||(a.isdate?a.type=\"date\":a.islog?a.type=\"log\":a.isdate===!1&&a.islog===!1&&(a.type=\"linear\")),\"withzero\"!==a.autorange&&\"tozero\"!==a.autorange||(a.autorange=!0,a.rangemode=\"tozero\"),delete a.islog,delete a.isdate,delete a.categories,f(a,\"domain\")&&delete a.domain,void 0!==a.autotick&&(void 0===a.tickmode&&(a.tickmode=a.autotick?\"auto\":\"linear\"),delete a.autotick)}void 0===t.annotations||Array.isArray(t.annotations)||(P.warn(\"Annotations must be an array.\"),delete t.annotations);var o=(t.annotations||[]).length;for(e=0;o>e;e++){var i=t.annotations[e];i.ref&&(\"paper\"===i.ref?(i.xref=\"paper\",i.yref=\"paper\"):\"data\"===i.ref&&(i.xref=\"x\",i.yref=\"y\"),delete i.ref),s(i,\"xref\"),s(i,\"yref\")}void 0===t.shapes||Array.isArray(t.shapes)||(P.warn(\"Shapes must be an array.\"),delete t.shapes);var l=(t.shapes||[]).length;for(e=0;l>e;e++){var c=t.shapes[e];s(c,\"xref\"),s(c,\"yref\")}var u=t.legend;u&&(u.x>3?(u.x=1.02,u.xanchor=\"left\"):u.x<-2&&(u.x=-.02,u.xanchor=\"right\"),u.y>3?(u.y=1.02,u.yanchor=\"bottom\"):u.y<-2&&(u.y=-.02,u.yanchor=\"top\")),\"rotate\"===t.dragmode&&(t.dragmode=\"orbit\"),t.scene1&&(t.scene||(t.scene=t.scene1),delete t.scene1);var d=I.getSubplotIds(t,\"gl3d\");for(e=0;e<d.length;e++){var h=t[d[e]],p=h.cameraposition;if(Array.isArray(p)&&4===p[0].length){var g=p[0],v=p[1],m=p[2],y=E([],g),x=[];for(n=0;3>n;++n)x[n]=v[e]+m*y[2+4*n];h.camera={eye:{x:x[0],y:x[1],z:x[2]},center:{x:v[0],y:v[1],z:v[2]},up:{x:y[1],y:y[5],z:y[9]}},delete h.cameraposition}}return j.clean(t),t}function s(t,e){var n=t[e],r=e.charAt(0);n&&\"paper\"!==n&&(t[e]=O.Axes.cleanId(n,r))}function c(t,e){for(var n=[],r=(t.concat(Array.isArray(e)?e:[]).filter(function(t){return\"uid\"in t}).map(function(t){return t.uid})),a=0;a<t.length;a++){var o,i=t[a];if(!(\"uid\"in i)||-1!==n.indexOf(i.uid)){var l;for(o=0;100>o&&(l=P.randstr(r),-1!==n.indexOf(l));o++);i.uid=P.randstr(r),r.push(i.uid)}if(n.push(i.uid),\"histogramy\"===i.type&&\"xbins\"in i&&!(\"ybins\"in i)&&(i.ybins=i.xbins,delete i.xbins),i.error_y&&\"opacity\"in i.error_y){var s=j.defaults,c=i.error_y.color||(I.traceIs(i,\"bar\")?j.defaultLine:s[a%s.length]);i.error_y.color=j.addOpacity(j.rgb(c),j.opacity(c)*i.error_y.opacity),delete i.error_y.opacity}if(\"bardir\"in i&&(\"h\"!==i.bardir||!I.traceIs(i,\"bar\")&&\"histogram\"!==i.type.substr(0,9)||(i.orientation=\"h\",b(i)),delete i.bardir),\"histogramy\"===i.type&&b(i),\"histogramx\"!==i.type&&\"histogramy\"!==i.type||(i.type=\"histogram\"),\"scl\"in i&&(i.colorscale=i.scl,delete i.scl),\"reversescl\"in i&&(i.reversescale=i.reversescl,delete i.reversescl),i.xaxis&&(i.xaxis=O.Axes.cleanId(i.xaxis,\"x\")),i.yaxis&&(i.yaxis=O.Axes.cleanId(i.yaxis,\"y\")),I.traceIs(i,\"gl3d\")&&i.scene&&(i.scene=I.subplotsRegistry.gl3d.cleanId(i.scene)),I.traceIs(i,\"pie\")||(Array.isArray(i.textposition)?i.textposition=i.textposition.map(u):i.textposition&&(i.textposition=u(i.textposition))),I.traceIs(i,\"2dMap\")&&(\"YIGnBu\"===i.colorscale&&(i.colorscale=\"YlGnBu\"),\"YIOrRd\"===i.colorscale&&(i.colorscale=\"YlOrRd\")),I.traceIs(i,\"markerColorscale\")&&i.marker){var d=i.marker;\"YIGnBu\"===d.colorscale&&(d.colorscale=\"YlGnBu\"),\"YIOrRd\"===d.colorscale&&(d.colorscale=\"YlOrRd\")}if(\"surface\"===i.type&&P.isPlainObject(i.contours)){var h=[\"x\",\"y\",\"z\"];for(o=0;o<h.length;o++){var p=i.contours[h[o]];P.isPlainObject(p)&&(p.highlightColor&&(p.highlightcolor=p.highlightColor,delete p.highlightColor),p.highlightWidth&&(p.highlightwidth=p.highlightWidth,delete p.highlightWidth))}}f(i,\"line\")&&delete i.line,\"marker\"in i&&(f(i.marker,\"line\")&&delete i.marker.line,f(i,\"marker\")&&delete i.marker),j.clean(i)}}function u(t){var e=\"middle\",n=\"center\";return-1!==t.indexOf(\"top\")?e=\"top\":-1!==t.indexOf(\"bottom\")&&(e=\"bottom\"),-1!==t.indexOf(\"left\")?n=\"left\":-1!==t.indexOf(\"right\")&&(n=\"right\"),e+\" \"+n}function f(t,e){return e in t&&\"object\"==typeof t[e]&&0===Object.keys(t[e]).length}function d(t){var e,n=O.Axes.list(t),r=t._fullData,a=t._fullLayout,o=t.calcdata=new Array(r.length);for(t.firstscatter=!0,t.numboxes=0,t._hmpixcount=0,t._hmlumcount=0,a._piecolormap={},a._piedefaultcolorcount=0,e=0;e<n.length;e++)n[e]._categories=n[e]._initialCategories.slice();for(e=0;e<r.length;e++){var i=r[e],l=i._module,s=[];l&&i.visible===!0&&l.calc&&(s=l.calc(t,i)),Array.isArray(s)&&s[0]||(s=[{x:!1,y:!1}]),s[0].t||(s[0].t={}),s[0].trace=i,o[e]=s}}function h(t,e){var n,r,a=e+1,o=[];for(n=0;n<t.length;n++)r=t[n],0>r?o.push(a+r):o.push(r);return o}function p(t,e,n){var r,a;for(r=0;r<e.length;r++){if(a=e[r],a!==parseInt(a,10))throw new Error(\"all values in \"+n+\" must be integers\");if(a>=t.data.length||a<-t.data.length)throw new Error(n+\" must be valid indices for gd.data.\");if(e.indexOf(a,r+1)>-1||a>=0&&e.indexOf(-t.data.length+a)>-1||0>a&&e.indexOf(t.data.length+a)>-1)throw new Error(\"each index in \"+n+\" must be unique.\")}}function g(t,e,n){if(!Array.isArray(t.data))throw new Error(\"gd.data must be an array.\");if(\"undefined\"==typeof e)throw new Error(\"currentIndices is a required argument.\");if(Array.isArray(e)||(e=[e]),p(t,e,\"currentIndices\"),\"undefined\"==typeof n||Array.isArray(n)||(n=[n]),\"undefined\"!=typeof n&&p(t,n,\"newIndices\"),\"undefined\"!=typeof n&&e.length!==n.length)throw new Error(\"current and new indices must be of equal length.\")}function v(t,e,n){var r,a;if(!Array.isArray(t.data))throw new Error(\"gd.data must be an array.\");if(\"undefined\"==typeof e)throw new Error(\"traces must be defined.\");for(Array.isArray(e)||(e=[e]),r=0;r<e.length;r++)if(a=e[r],\"object\"!=typeof a||Array.isArray(a)||null===a)throw new Error(\"all values in traces array must be non-array objects\");if(\"undefined\"==typeof n||Array.isArray(n)||(n=[n]),\"undefined\"!=typeof n&&n.length!==e.length)throw new Error(\"if indices is specified, traces.length must equal indices.length\")}function m(t,e,n,r){var a=P.isPlainObject(r);if(!Array.isArray(t.data))throw new Error(\"gd.data must be an array\");if(!P.isPlainObject(e))throw new Error(\"update must be a key:value object\");if(\"undefined\"==typeof n)throw new Error(\"indices must be an integer or array of integers\");p(t,n,\"indices\");for(var o in e){if(!Array.isArray(e[o])||e[o].length!==n.length)throw new Error(\"attribute \"+o+\" must be an array of length equal to indices array length\");if(a&&(!(o in r)||!Array.isArray(r[o])||r[o].length!==e[o].length))throw new Error(\"when maxPoints is set as a key:value object it must contain a 1:1 corrispondence with the keys and number of traces in the update object\")}}function y(t,e,n,r){var a,o,i,l,s,c=P.isPlainObject(r),u=[];Array.isArray(n)||(n=[n]),n=h(n,t.data.length-1);for(var f in e)for(var d=0;d<n.length;d++){if(a=t.data[n[d]],i=P.nestedProperty(a,f),o=i.get(),l=e[f][d],!Array.isArray(l))throw new Error(\"attribute: \"+f+\" index: \"+d+\" must be an array\");if(!Array.isArray(o))throw new Error(\"cannot extend missing or non-array attribute: \"+f);s=c?r[f][d]:r,C(s)||(s=-1),u.push({prop:i,target:o,insert:l,maxp:Math.floor(s)})}return u}function x(t,e,n,r,a,o){m(t,e,n,r);for(var i,l,s,c=y(t,e,n,r),u=[],f={},d={},h=0;h<c.length;h++)l=c[h].prop,s=c[h].maxp,i=a(c[h].target,c[h].insert),s>=0&&s<i.length&&(u=o(i,s)),s=c[h].target.length,l.set(i),Array.isArray(f[l.astr])||(f[l.astr]=[]),Array.isArray(d[l.astr])||(d[l.astr]=[]),f[l.astr].push(u),d[l.astr].push(s);return{update:f,maxPoints:d}}function b(t){var e;if(P.swapAttrs(t,[\"?\",\"?0\",\"d?\",\"?bins\",\"nbins?\",\"autobin?\",\"?src\",\"error_?\"]),Array.isArray(t.z)&&Array.isArray(t.z[0])&&(t.transpose?delete t.transpose:t.transpose=!0),t.error_x&&t.error_y){var n=t.error_y,r=\"copy_ystyle\"in n?n.copy_ystyle:!(n.color||n.thickness||n.width);P.swapAttrs(t,[\"error_?.copy_ystyle\"]),r&&P.swapAttrs(t,[\"error_?.color\",\"error_?.thickness\",\"error_?.width\"])}if(t.hoverinfo){var a=t.hoverinfo.split(\"+\");for(e=0;e<a.length;e++)\"x\"===a[e]?a[e]=\"y\":\"y\"===a[e]&&(a[e]=\"x\");t.hoverinfo=a.join(\"+\")}}function _(t){var e,n={left:0,right:0,bottom:0,top:0};if(t)for(e in t)t.hasOwnProperty(e)&&(n.left+=t[e].left||0,n.right+=t[e].right||0,n.bottom+=t[e].bottom||0,n.top+=t[e].top||0);return n}function w(t,e){var n,r,a,o=t._fullLayout,i=t._context;if(t.emit(\"plotly_autosize\"),t._context.fillFrame)a=window.innerWidth,r=window.innerHeight,document.body.style.overflow=\"hidden\";else if(C(i.frameMargins)&&i.frameMargins>0){var l=_(t._boundingBoxMargins),s=l.left+l.right,c=l.bottom+l.top,u=o._container.node().getBoundingClientRect(),f=1-2*i.frameMargins;a=Math.round(f*(u.width-s)),r=Math.round(f*(u.height-c))}else n=window.getComputedStyle(t),r=parseFloat(n.height)||o.height,a=parseFloat(n.width)||o.width;return Math.abs(o.width-a)>1||Math.abs(o.height-r)>1?(o.height=t.layout.height=r,o.width=t.layout.width=a):\"initial\"!==o.autosize&&(delete e.autosize,o.autosize=t.layout.autosize=!0),I.sanitizeMargins(o),e}function k(t){var e=S.select(t),n=t._fullLayout;if(n._container=e.selectAll(\".pl"
+,
+"ot-container\").data([0]),n._container.enter().insert(\"div\",\":first-child\").classed(\"plot-container\",!0).classed(\"plotly\",!0),n._paperdiv=n._container.selectAll(\".svg-container\").data([0]),n._paperdiv.enter().append(\"div\").classed(\"svg-container\",!0).style(\"position\",\"relative\"),\"initial\"===n.autosize&&(w(t,{}),n.autosize=!0,t.layout.autosize=!0),n._glcontainer=n._paperdiv.selectAll(\".gl-container\").data([0]),n._glcontainer.enter().append(\"div\").classed(\"gl-container\",!0),n._geocontainer=n._paperdiv.selectAll(\".geo-container\").data([0]),n._geocontainer.enter().append(\"div\").classed(\"geo-container\",!0),n._paperdiv.selectAll(\".main-svg\").remove(),n._paper=n._paperdiv.insert(\"svg\",\":first-child\").classed(\"main-svg\",!0),n._toppaper=n._paperdiv.append(\"svg\").classed(\"main-svg\",!0),!n._uid){var r=[];S.selectAll(\"defs\").each(function(){this.id&&r.push(this.id.split(\"-\")[1])}),n._uid=P.randstr(r)}n._paperdiv.selectAll(\".main-svg\").attr(G.svgAttrs),n._defs=n._paper.append(\"defs\").attr(\"id\",\"defs-\"+n._uid),n._topdefs=n._toppaper.append(\"defs\").attr(\"id\",\"topdefs-\"+n._uid),n._draggers=n._paper.append(\"g\").classed(\"draglayer\",!0);var a=n._paper.append(\"g\").classed(\"layer-below\",!0);n._imageLowerLayer=a.append(\"g\").classed(\"imagelayer\",!0),n._shapeLowerLayer=a.append(\"g\").classed(\"shapelayer\",!0);var o=O.Axes.getSubplots(t);o.join(\"\")!==Object.keys(t._fullLayout._plots||{}).join(\"\")&&M(t,o),n._has(\"cartesian\")&&A(t,o),n._ternarylayer=n._paper.append(\"g\").classed(\"ternarylayer\",!0);var i=n._paper.selectAll(\".layer-subplot\");n._imageSubplotLayer=i.selectAll(\".imagelayer\"),n._shapeSubplotLayer=i.selectAll(\".shapelayer\");var l=n._paper.append(\"g\").classed(\"layer-above\",!0);n._imageUpperLayer=l.append(\"g\").classed(\"imagelayer\",!0),n._shapeUpperLayer=l.append(\"g\").classed(\"shapelayer\",!0),n._pielayer=n._paper.append(\"g\").classed(\"pielayer\",!0),n._glimages=n._paper.append(\"g\").classed(\"glimages\",!0),n._geoimages=n._paper.append(\"g\").classed(\"geoimages\",!0),n._infolayer=n._toppaper.append(\"g\").classed(\"infolayer\",!0),\n"
+,
+"n._zoomlayer=n._toppaper.append(\"g\").classed(\"zoomlayer\",!0),n._hoverlayer=n._toppaper.append(\"g\").classed(\"hoverlayer\",!0),t.emit(\"plotly_framework\");var s=P.syncOrAsync([L,function(){return O.Axes.doTicks(t,\"redraw\")},R.init],t);return s&&s.then&&t._promises.push(s),s}function M(t,e){function n(e,n){return function(){return O.Axes.getFromId(t,e,n)}}for(var r,a,o=t._fullLayout._plots={},i=0;i<e.length;i++)r=e[i],a=o[r]={},a.id=r,a.x=n(r,\"x\"),a.y=n(r,\"y\"),a.xaxis=a.x(),a.yaxis=a.y()}function A(t,e){function n(t){t.append(\"g\").classed(\"imagelayer\",!0),t.append(\"g\").classed(\"maplayer\",!0),t.append(\"g\").classed(\"barlayer\",!0),t.append(\"g\").classed(\"boxlayer\",!0),t.append(\"g\").classed(\"scatterlayer\",!0)}var r=t._fullLayout,a=[];r._paper.selectAll(\"g.subplot\").data(e).enter().append(\"g\").classed(\"subplot\",!0).each(function(o){var i=r._plots[o],l=i.plotgroup=S.select(this).classed(o,!0),s=i.xaxis,c=i.yaxis;i.overlays=[];var u=O.Axes.getFromId(t,s.overlaying)||s;u!==s&&u.overlaying&&(u=s,s.overlaying=!1);var f=O.Axes.getFromId(t,c.overlaying)||c;f!==c&&f.overlaying&&(f=c,c.overlaying=!1);var d=u._id+f._id;if(d!==o&&-1!==e.indexOf(d))i.mainplot=d,a.push(i),s.domain=u.domain.slice(),c.domain=f.domain.slice();else{i.bg=l.append(\"rect\").style(\"stroke-width\",0);var h=l.append(\"g\").classed(\"layer-subplot\",!0);i.shapelayer=h.append(\"g\").classed(\"shapelayer\",!0),i.imagelayer=h.append(\"g\").classed(\"imagelayer\",!0),i.gridlayer=l.append(\"g\"),i.overgrid=l.append(\"g\"),i.zerolinelayer=l.append(\"g\"),i.overzero=l.append(\"g\"),i.plot=l.append(\"g\").call(n),i.overplot=l.append(\"g\"),i.xlines=l.append(\"path\"),i.ylines=l.append(\"path\"),i.overlines=l.append(\"g\"),i.xaxislayer=l.append(\"g\"),i.yaxislayer=l.append(\"g\"),i.overaxes=l.append(\"g\")}i.draglayer=r._draggers.append(\"g\")}),a.forEach(function(t){var e=r._plots[t.mainplot];e.overlays.push(t),t.gridlayer=e.overgrid.append(\"g\"),t.zerolinelayer=e.overzero.append(\"g\"),t.plot=e.overplot.append(\"g\").call(n),t.xlines=e.overlines.append(\"path\"),t.ylines=e.overlines.append(\"path\"),t.xaxislayer=e.overaxes.append(\"g\"),t.yaxislayer=e.overaxes.append(\"g\")}),e.forEach(function(t){var e=r._plots[t];e.xlines.style(\"fill\",\"none\").classed(\"crisp\",!0),e.ylines.style(\"fill\",\"none\").classed(\"crisp\",!0)})}function L(t){return P.syncOrAsync([I.doAutoMargin,T],t)}function T(t){var e,n=t._fullLayout,r=n._size,a=O.Axes.list(t);for(e=0;e<a.length;e++)a[e]._linepositions={};n._paperdiv.style({width:n.width+\"px\",height:n.height+\"px\"}).selectAll(\".main-svg\").call(q.setSize,n.width,n.height),t._context.setBackground(t,n.paper_bgcolor);var o=[];return n._paper.selectAll(\"g.subplot\").each(function(e){var a=n._plots[e],i=O.Axes.getFromId(t,e,\"x\"),l=O.Axes.getFromId(t,e,\"y\");i.setScale(),l.setScale(),a.bg&&a.bg.call(q.setRect,i._offset-r.p,l._offset-r.p,i._length+2*r.p,l._length+2*r.p).call(j.fill,n.plot_bgcolor),a.clipId=\"clip\"+n._uid+e+\"plot\";var s=n._defs.selectAll(\"g.clips\").selectAll(\"#\"+a.clipId).data([0]);s.enter().append(\"clipPath\").attr({\"class\":\"plotclip\",id:a.clipId}).append(\"rect\"),s.selectAll(\"rect\").attr({width:i._length,height:l._length}),a.plot.call(P.setTranslate,i._offset,l._offset),a.plot.call(q.setClipUrl,a.clipId);var c=q.crispRound(t,i.linewidth,1),u=q.crispRound(t,l.linewidth,1),f=r.p+u,d=\"M\"+-f+\",\",h=\"h\"+(i._length+2*f),p=\"free\"===i.anchor&&-1===o.indexOf(i._id),g=r.h*(1-(i.position||0))+c/2%1,v=i.anchor===l._id&&(i.mirror||\"top\"!==i.side)||\"all\"===i.mirror||\"allticks\"===i.mirror||i.mirrors&&i.mirrors[l._id+\"bottom\"],m=l._length+r.p+c/2,y=i.anchor===l._id&&(i.mirror||\"top\"===i.side)||\"all\"===i.mirror||\"allticks\"===i.mirror||i.mirrors&&i.mirrors[l._id+\"top\"],x=-r.p-c/2,b=r.p,_=v?0:c,w=y?0:c,k=\",\"+(-b-w)+\"v\"+(l._length+2*b+w+_),M=\"free\"===l.anchor&&-1===o.indexOf(l._id),A=r.w*(l.position||0)+u/2%1,L=l.anchor===i._id&&(l.mirror||\"right\"!==l.side)||\"all\"===l.mirror||\"allticks\"===l.mirror||l.mirrors&&l.mirrors[i._id+\"left\"],T=-r.p-u/2,z=l.anchor===i._id&&(l.mirror||\"right\"===l.side)||\"all\"===l.mirror||\"allticks\"===l.mirror||l.mirrors&&l.mirrors[i._id+\"right\"],S=i._length+r.p+u/2;i._linepositions[e]=[v?m:void 0,y?x:void 0,p?g:void 0],i.anchor===l._id?i._linepositions[e][3]=\"top\"===i.side?x:m:p&&(i._linepositions[e][3]=g),l._linepositions[e]=[L?T:void 0,z?S:void 0,M?A:void 0],l.anchor===i._id?l._linepositions[e][3]=\"right\"===l.side?S:T:M&&(l._linepositions[e][3]=A);var E=\"translate(\"+i._offset+\",\"+l._offset+\")\",C=E,N=E;p&&(C=\"translate(\"+i._offset+\",\"+r.t+\")\",x+=l._offset-r.t,m+=l._offset-r.t),M&&(N=\"translate(\"+r.l+\",\"+l._offset+\")\",T+=i._offset-r.l,S+=i._offset-r.l),a.xlines.attr(\"transform\",C).attr(\"d\",(v?d+m+h:\"\")+(y?d+x+h:\"\")+(p?d+g+h:\"\")||\"M0,0\").style(\"stroke-width\",c+\"px\").call(j.stroke,i.showline?i.linecolor:\"rgba(0,0,0,0)\"),a.ylines.attr(\"transform\",N).attr(\"d\",(L?\"M\"+T+k:\"\")+(z?\"M\"+S+k:\"\")+(M?\"M\"+A+k:\"\")||\"M0,0\").attr(\"stroke-width\",u+\"px\").call(j.stroke,l.showline?l.linecolor:\"rgba(0,0,0,0)\"),a.xaxislayer.attr(\"transform\",C),a.yaxislayer.attr(\"transform\",N),a.gridlayer.attr(\"transform\",E),a.zerolinelayer.attr(\"transform\",E),a.draglayer.attr(\"transform\",E),p&&o.push(i._id),M&&o.push(l._id)}),O.Axes.makeClipPaths(t),z(t),X(t),t._promises.length&&Promise.all(t._promises)}function z(t){var e=t._fullLayout;U.draw(t,\"gtitle\",{propContainer:e,propName:\"title\",dfltName:\"Plot\",attributes:{x:e.width/2,y:e._size.t/2,\"text-anchor\":\"middle\"}})}var S=t(\"d3\"),E=t(\"gl-mat4/fromQuat\"),C=t(\"fast-isnumeric\"),O=t(\"../plotly\"),P=t(\"../lib\"),N=t(\"../lib/events\"),D=t(\"../lib/queue\"),I=t(\"../plots/plots\"),R=t(\"../plots/cartesian/graph_interact\"),j=t(\"../components/color\"),q=t(\"../components/drawing\"),F=t(\"../components/errorbars\"),B=t(\"../components/images\"),H=t(\"../components/legend\"),V=t(\"../components/rangeslider\"),Z=t(\"../components/rangeselector\"),Y=t(\"../components/shapes\"),U=t(\"../components/titles\"),X=t(\"../components/modebar/manage\"),G=t(\"../constants/xmlns_namespaces\");O.plot=function(t,e,n,a){function s(){var e,n,r,a=t.calcdata;for(H.draw(t),Z.draw(t),e=0;e<a.length;e++)n=a[e],r=n[0].trace,r.visible===!0&&r._module.colorbar?r._module.colorbar(t,n):I.autoMargin(t,\"cb\"+r.uid);return I.doAutoMargin(t),I.previousPromises(t)}function u(){var e=JSON.stringify(A._size)===E?[]:[s,L];return P.syncOrAsync(e.concat(R.init),t)}function f(){if(T){for(var e,n,r=I.getSubplotIds(A,\"cartesian\"),a=A._modules,o=0;o<r.length;o++){e=A._plots[r[o]];for(var i=0;i<a.length;i++)n=a[i],n.setPositions&&n.setPositions(t,e)}return F.calc(t),P.syncOrAsync([Y.calcAutorange,O.Annotations.calcAutorange,h],t)}}function h(){for(var e=O.Axes.list(t,\"\",!0),n=0;n<e.length;n++)O.Axes.doAutoRange(e[n])}function p(){return O.Axes.doTicks(t,\"redraw\")}function g(){var e,n=t.calcdata;for(e=0;e<n.length;e++){var r=n[e][0].trace,a=r.visible===!0,o=r.uid;a&&I.traceIs(r,\"2dMap\")||A._paper.selectAll(\".hm\"+o+\",.contour\"+o+\",#clip\"+o).remove(),a&&r._module.colorbar||A._infolayer.selectAll(\".cb\"+o).remove()}var i=A._basePlotModules;for(e=0;e<i.length;e++)i[e].plot(t);return I.style(t),Y.drawAll(t),O.Annotations.drawAll(t),I.addLinks(t),t._replotting=!1,I.previousPromises(t)}function v(){Y.drawAll(t),B.draw(t),O.Annotations.drawAll(t),H.draw(t),V.draw(t),Z.draw(t)}function m(){t.emit(\"plotly_afterplot\")}t=r(t),N.init(t);var y=N.triggerHandler(t,\"plotly_beforeplot\",[e,n,a]);if(y===!1)return Promise.reject();e||n||P.isPlotDiv(t)||P.warn(\"Calling Plotly.plot as if redrawing but this container doesn't yet have a plot.\",t),o(t,a),n||(n={}),S.select(t).classed(\"js-plotly-plot\",!0),q.makeTester(t),t._promises=[];var x=0===(t.data||[]).length&&Array.isArray(e);if(Array.isArray(e)&&(c(e,t.data),x?t.data=e:t.data.push.apply(t.data,e),t.empty=!1),t.layout&&!x||(t.layout=l(n)),t._dragging)return t._replotPending=!0,Promise.reject();if(t._replotPending=!1,I.supplyDefaults(t),e&&e[0]&&e[0].r)return i(t,e,n);t._replotting=!0;var b=t._fullData.length>0,_=O.Axes.getSubplots(t).join(\"\"),w=Object.keys(t._fullLayout._plots||{}).join(\"\"),M=w===_;b?t.framework===k&&!x&&M||(t.framework=k,k(t)):M?x&&k(t):(t.framework=k,k(t)),x&&O.Axes.saveRangeInitial(t);var A=t._fullLayout,T=!t.calcdata||t.calcdata.length!==(t.data||[]).length;T&&d(t);for(var z=0;z<t.calcdata.length;z++)t.calcdata[z][0].trace=t._fullData[z];var E=JSON.stringify(A._size);return P.syncOrAsync([I.previousPromises,s,u,f,L,p,g,v],t,m),Promise.all(t._promises).then(function(){return t},function(){P.log(\"Clearing previous rejected promises from queue.\"),t._promises=[]})},O.redraw=function(t){return t=r(t),P.isPlotDiv(t)?(t.calcdata=void 0,O.plot(t).then(function(){return t.emit(\"plotly_redraw\"),t})):void P.warn(\"This element is not a Plotly plot.\",t)},O.newPlot=function(t,e,n,a){return t=r(t),I.cleanPlot([],{},t._fullData||{},t._fullLayout||{}),I.purge(t),O.plot(t,e,n,a)},O.extendTraces=function $(t,e,n,a){t=r(t);var o=x(t,e,n,a,function(t,e){return t.concat(e)},function(t,e){return t.splice(0,t.length-e)}),i=O.redraw(t),l=[t,o.update,n,o.maxPoints];return D.add(t,O.prependTraces,l,$,arguments),i},O.prependTraces=function Q(t,e,n,a){t=r(t);var o=x(t,e,n,a,function(t,e){return e.concat(t)},function(t,e){return t.splice(e,t.length)}),i=O.redraw(t),l=[t,o.update,n,o.maxPoints];return D.add(t,O.extendTraces,l,Q,arguments),i},O.addTraces=function W(t,e,n){t=r(t);var a,o,i=[],l=O.deleteTraces,s=W,u=[t,i],f=[t,e];for(v(t,e,n),Array.isArray(e)||(e=[e]),c(e,t.data),a=0;a<e.length;a+=1)t.data.push(e[a]);for(a=0;a<e.length;a++)i.push(-e.length+a);if(\"undefined\"==typeof n)return o=O.redraw(t),D.add(t,l,u,s,f),o;Array.isArray(n)||(n=[n]);try{g(t,i,n)}catch(d){throw t.data.splice(t.data.length-e.length,e.length),d}return D.startSequence(t),D.add(t,l,u,s,f),o=O.moveTraces(t,i,n),D.stopSequence(t),o},O.deleteTraces=function J(t,e){t=r(t);var n,a,o=[],i=O.addTraces,l=J,s=[t,o,e],c=[t,e];if(\"undefined\"==typeof e)throw new Error(\"indices must be an integer or array of integers.\");for(Array.isArray(e)||(e=[e]),p(t,e,\"indices\"),e=h(e,t.data.length-1),e.sort(P.sorterDes),n=0;n<e.length;n+=1)a=t.data.splice(e[n],1)[0],o.push(a);var u=O.redraw("
+,
+"t);return D.add(t,i,s,l,c),u},O.moveTraces=function K(t,e,n){t=r(t);var a,o=[],i=[],l=K,s=K,c=[t,n,e],u=[t,e,n];if(g(t,e,n),e=Array.isArray(e)?e:[e],\"undefined\"==typeof n)for(n=[],a=0;a<e.length;a++)n.push(-e.length+a);for(n=Array.isArray(n)?n:[n],e=h(e,t.data.length-1),n=h(n,t.data.length-1),a=0;a<t.data.length;a++)-1===e.indexOf(a)&&o.push(t.data[a]);for(a=0;a<e.length;a++)i.push({newIndex:n[a],trace:t.data[e[a]]});for(i.sort(function(t,e){return t.newIndex-e.newIndex}),a=0;a<i.length;a+=1)o.splice(i[a].newIndex,0,i[a].trace);t.data=o;var f=O.redraw(t);return D.add(t,l,c,s,u),f},O.restyle=function tt(t,e,n,a){function o(){return a.map(function(){})}function i(t){var e=O.Axes.id2name(t);-1===p.indexOf(e)&&p.push(e)}function l(t){return\"LAYOUT\"+t+\".autorange\"}function s(t){return\"LAYOUT\"+t+\".range\"}function c(e,n,r){if(Array.isArray(e))return void e.forEach(function(t){c(t,n,r)});if(!(e in d)){var i;i=\"LAYOUT\"===e.substr(0,6)?P.nestedProperty(t.layout,e.replace(\"LAYOUT\",\"\")):P.nestedProperty(t.data[a[r]],e),e in L||(L[e]=o()),void 0===L[e][r]&&(L[e][r]=i.get()),void 0!==n&&i.set(n)}}t=r(t);var u,f=t._fullLayout,d={};if(\"string\"==typeof e)d[e]=n;else{if(!P.isPlainObject(e))return P.warn(\"Restyle fail.\",e,n,a),Promise.reject();d=e,void 0===a&&(a=n)}Object.keys(d).length&&(t.changed=!0),C(a)?a=[a]:Array.isArray(a)&&a.length||(a=t.data.map(function(t,e){return e}));var h=[\"mode\",\"visible\",\"type\",\"orientation\",\"fill\",\"histfunc\",\"histnorm\",\"text\",\"x\",\"y\",\"z\",\"a\",\"b\",\"c\",\"xtype\",\"x0\",\"dx\",\"ytype\",\"y0\",\"dy\",\"xaxis\",\"yaxis\",\"line.width\",\"connectgaps\",\"transpose\",\"zsmooth\",\"showscale\",\"marker.showscale\",\"zauto\",\"marker.cauto\",\"autocolorscale\",\"marker.autocolorscale\",\"colorscale\",\"marker.colorscale\",\"reversescale\",\"marker.reversescale\",\"autobinx\",\"nbinsx\",\"xbins\",\"xbins.start\",\"xbins.end\",\"xbins.size\",\"autobiny\",\"nbinsy\",\"ybins\",\"ybins.start\",\"ybins.end\",\"ybins.size\",\"autocontour\",\"ncontours\",\"contours\",\"contours.coloring\",\"error_y\",\"error_y.visible\",\"error_y.value\",\"error_y.type\",\"error_y.traceref\",\"error_y.array\",\"error_y.symmetric\",\"error_y.arrayminus\",\"error_y.valueminus\",\"error_y.tracerefminus\",\"error_x\",\"error_x.visible\",\"error_x.value\",\"error_x.type\",\"error_x.traceref\",\"error_x.array\",\"error_x.symmetric\",\"error_x.arrayminus\",\"error_x.valueminus\",\"error_x.tracerefminus\",\"swapxy\",\"swapxyaxes\",\"orientationaxes\",\"marker.colors\",\"values\",\"labels\",\"label0\",\"dlabel\",\"sort\",\"textinfo\",\"textposition\",\"textfont.size\",\"textfont.family\",\"textfont.color\",\"insidetextfont.size\",\"insidetextfont.family\",\"insidetextfont.color\",\"outsidetextfont.size\",\"outsidetextfont.family\",\"outsidetextfont.color\",\"hole\",\"scalegroup\",\"domain\",\"domain.x\",\"domain.y\",\"domain.x[0]\",\"domain.x[1]\",\"domain.y[0]\",\"domain.y[1]\",\"tilt\",\"tiltaxis\",\"depth\",\"direction\",\"rotation\",\"pull\",\"line.showscale\",\"line.cauto\",\"line.autocolorscale\",\"line.reversescale\",\"marker.line.showscale\",\"marker.line.cauto\",\"marker.line.autocolorscale\",\"marker.line.reversescale\"];for(u=0;u<a.length;u++)if(I.traceIs(t._fullData[a[u]],\"box\")){h.push(\"name\");break}var p,g=[\"marker\",\"marker.size\",\"textfont\",\"boxpoints\",\"jitter\",\"pointpos\",\"whiskerwidth\",\"boxmean\"],v=[\"zmin\",\"zmax\",\"zauto\",\"marker.cmin\",\"marker.cmax\",\"marker.cauto\",\"line.cmin\",\"line.cmax\",\"marker.line.cmin\",\"marker.line.cmax\",\"contours.start\",\"contours.end\",\"contours.size\",\"contours.showlines\",\"line\",\"line.smoothing\",\"line.shape\",\"error_y.width\",\"error_x.width\",\"error_x.copy_ystyle\",\"marker.maxdisplayed\"],m=[\"type\",\"x\",\"y\",\"x0\",\"y0\",\"orientation\",\"xaxis\",\"yaxis\"],y=!1,x=!1,_=!1,w=!1,k=!1,M=!1,A={},L={},T={},z=[\"cartesian\",\"pie\",\"ternary\"];f._basePlotModules.forEach(function(t){-1===z.indexOf(t.name)&&(y=!0)});var S=[\"zmin\",\"zmax\"],E=[\"xbins.start\",\"xbins.end\",\"xbins.size\"],N=[\"ybins.start\",\"ybins.end\",\"ybins.size\"],R=[\"contours.start\",\"contours.end\",\"contours.size\"];for(var j in d){var q,F,B,V,Z,Y=d[j];if(A[j]=Y,\"LAYOUT\"!==j.substr(0,6)){for(\"transforms\"===j.substr(0,10)&&(y=!0),L[j]=o(),u=0;u<a.length;u++){if(q=t.data[a[u]],F=t._fullData[a[u]],B=P.nestedProperty(q,j),V=B.get(),Z=Array.isArray(Y)?Y[u%Y.length]:Y,-1!==S.indexOf(j))c(\"zauto\",!1,u);else if(\"colorscale\"===j)c(\"autocolorscale\",!1,u);else if(\"autocolorscale\"===j)c(\"colorscale\",void 0,u);else if(\"marker.colorscale\"===j)c(\"marker.autocolorscale\",!1,u);else if(\"marker.autocolorscale\"===j)c(\"marker.colorscale\",void 0,u);else if(\"zauto\"===j)c(S,void 0,u);else if(-1!==E.indexOf(j))c(\"autobinx\",!1,u);else if(\"autobinx\"===j)c(E,void 0,u);else if(-1!==N.indexOf(j))c(\"autobiny\",!1,u);else if(\"autobiny\"===j)c(N,void 0,u);else if(-1!==R.indexOf(j))c(\"autocontour\",!1,u);else if(\"autocontour\"===j)c(R,void 0,u);else if(-1!==[\"x0\",\"dx\"].indexOf(j)&&F.x&&\"scaled\"!==F.xtype)c(\"xtype\",\"scaled\",u);else if(-1!==[\"y0\",\"dy\"].indexOf(j)&&F.y&&\"scaled\"!==F.ytype)c(\"ytype\",\"scaled\",u);else if(\"colorbar.thicknessmode\"===j&&B.get()!==Z&&-1!==[\"fraction\",\"pixels\"].indexOf(Z)&&F.colorbar){var U=-1!==[\"top\",\"bottom\"].indexOf(F.colorbar.orient)?f.height-f.margin.t-f.margin.b:f.width-f.margin.l-f.margin.r;c(\"colorbar.thickness\",F.colorbar.thickness*(\"fraction\"===Z?1/U:U),u)}else if(\"colorbar.lenmode\"===j&&B.get()!==Z&&-1!==[\"fraction\",\"pixels\"].indexOf(Z)&&F.colorbar){var X=-1!==[\"top\",\"bottom\"].indexOf(F.colorbar.orient)?f.width-f.margin.l-f.margin.r:f.height-f.margin.t-f.margin.b;c(\"colorbar.len\",F.colorbar.len*(\"fraction\"===Z?1/X:X),u)}else\"colorbar.tick0\"===j||\"colorbar.dtick\"===j?c(\"colorbar.tickmode\",\"linear\",u):\"colorbar.tickmode\"===j&&c([\"colorbar.tick0\",\"colorbar.dtick\"],void 0,u);if(\"type\"===j&&\"pie\"===Z!=(\"pie\"===V)){var G=\"x\",$=\"y\";\"bar\"!==Z&&\"bar\"!==V||\"h\"!==q.orientation||(G=\"y\",$=\"x\"),P.swapAttrs(q,[\"?\",\"?src\"],\"labels\",G),P.swapAttrs(q,[\"d?\",\"?0\"],\"label\",G),P.swapAttrs(q,[\"?\",\"?src\"],\"values\",$),\"pie\"===V?(P.nestedProperty(q,\"marker.color\").set(P.nestedProperty(q,\"marker.colors\").get()),f._pielayer.selectAll(\"g.trace\").remove()):I.traceIs(q,\"cartesian\")&&(P.nestedProperty(q,\"marker.colors\").set(P.nestedProperty(q,\"marker.color\").get()),T[q.xaxis||\"x\"]=!0,T[q.yaxis||\"y\"]=!0)}L[j][u]=V;var Q=[\"swapxy\",\"swapxyaxes\",\"orientation\",\"orientationaxes\"];if(-1!==Q.indexOf(j)){if(\"orientation\"===j){if(B.set(Z),B.get()===L[j][u])continue}else\"orientationaxes\"===j&&(q.orientation={v:\"h\",h:\"v\"}[F.orientation]);b(q)}else B.set(Z)}if(-1!==[\"swapxyaxes\",\"orientationaxes\"].indexOf(j)&&O.Axes.swap(t,a),\"orientationaxes\"===j){var W=P.nestedProperty(t.layout,\"hovermode\");\"x\"===W.get()?W.set(\"y\"):\"y\"===W.get()&&W.set(\"x\")}if(-1!==a.indexOf(0)&&-1!==m.indexOf(j)&&(O.Axes.clearTypes(t,a),y=!0),-1!==[\"autobinx\",\"autobiny\",\"zauto\"].indexOf(j)&&Z===!1||(k=!0),(-1!==[\"colorbar\",\"line\"].indexOf(B.parts[0])||\"marker\"===B.parts[0]&&\"colorbar\"===B.parts[1])&&(M=!0),-1!==h.indexOf(j)){if(-1!==[\"orientation\",\"type\"].indexOf(j)){for(p=[],u=0;u<a.length;u++){var J=t.data[a[u]];I.traceIs(J,\"cartesian\")&&(i(J.xaxis||\"x\"),i(J.yaxis||\"y\"),\"type\"===e&&c([\"autobinx\",\"autobiny\"],!0,u))}c(p.map(l),!0,0),c(p.map(s),[0,1],0)}y=!0}else-1!==v.indexOf(j)?_=!0:-1!==g.indexOf(j)&&(x=!0)}else B=P.nestedProperty(t.layout,j.replace(\"LAYOUT\",\"\")),L[j]=[B.get()],B.set(Array.isArray(Y)?Y[0]:Y),y=!0}var K=Object.keys(T);t:for(u=0;u<K.length;u++){for(var et=K[u],nt=et.charAt(0),rt=nt+\"axis\",at=0;at<t.data.length;at++)if(I.traceIs(t.data[at],\"cartesian\")&&(t.data[at][rt]||nt)===et)continue t;c(\"LAYOUT\"+O.Axes.id2name(et),null,0)}D.add(t,tt,[t,L,a],tt,[t,A,a]);var ot=!1;O.Axes.list(t).forEach(function(t){t.autorange&&(ot=!0)}),(y||w||x&&ot)&&(t.calcdata=void 0);var it;w?it=[function(){var e=t.layout;return t.layout=void 0,O.plot(t,\"\",e)}]:y||_||x?it=[O.plot]:(I.supplyDefaults(t),it=[I.previousPromises],k&&it.push(function(){var e,n,r;for(e=0;e<t.calcdata.length;e++)n=t.calcdata[e],r=(((n[0]||{}).trace||{})._module||{}).arraysToCalcdata,r&&r(n);return I.style(t),H.draw(t),I.previousPromises(t)}),M&&it.push(function(){return t.calcdata.forEach(function(t){if((t[0].t||{}).cb){var e=t[0].trace,n=t[0].t.cb;I.traceIs(e,\"contour\")&&n.line({width:e.contours.showlines!==!1?e.line.width:0,dash:e.line.dash,color:\"line\"===e.contours.coloring?n._opts.line.color:e.line.color}),I.traceIs(e,\"markerColorscale\")?n.options(e.marker.colorbar)():n.options(e.colorbar)()}}),I.previousPromises(t)}));var lt=P.syncOrAsync(it,t);return lt&&lt.then||(lt=Promise.resolve()),lt.then(function(){return t.emit(\"plotly_restyle\",P.extendDeep([],[A,a])),t})},O.relayout=function et(t,e,n){function a(t,e){if(Array.isArray(t))return void t.forEach(function(t){a(t,e)});if(!(t in v)){var n=P.nestedProperty(p,t);t in T||(T[t]=n.get()),void 0!==e&&n.set(e)}}function o(t,e){var n=O.Axes.id2name(t[e+\"ref\"]||e);return(g[n]||{}).autorange}function i(t){var e=t[\"xaxis.range\"]?t[\"xaxis.range\"][0]:t[\"xaxis.range[0]\"],n=t[\"xaxis.range\"]?t[\"xaxis.range\"][1]:t[\"xaxis.range[1]\"],r=g.xaxis&&g.xaxis.rangeslider?g.xaxis.rangeslider:{};r.visible&&(e||n?g.xaxis.rangeslider.setRange(e,n):t[\"xaxis.autorange\"]&&g.xaxis.rangeslider.setRange())}if(t=r(t),t.framework&&t.framework.isPolar)return Promise.resolve(t);var l,s,c,u,f,d,h,p=t.layout,g=t._fullLayout,v={},m=!1,y=!1,x=!1,b=!1,_=!1,k=!1;if(\"string\"==typeof e)v[e]=n;else{if(!P.isPlainObject(e))return P.warn(\"Relayout fail.\",e,n),Promise.reject();v=e}for(Object.keys(v).length&&(t.changed=!0),c=Object.keys(v),s=O.Axes.list(t),h=0;h<c.length;h++){if(0===c[h].indexOf(\"allaxes\")){for(var M=0;M<s.length;M++)f=s[M]._id.substr(1),d=-1!==f.indexOf(\"scene\")?f+\".\":\"\",l=c[h].replace(\"allaxes\",d+s[M]._name),v[l]||(v[l]=v[c[h]]);delete v[c[h]]}c[h].match(/^annotations\\[[0-9-]+\\].ref$/)&&(u=v[c[h]].split(\"y\"),v[c[h].replace(\"ref\",\"xref\")]=u[0],v[c[h].replace(\"ref\",\"yref\")]=2===u.length?\"y\"+u[1]:\"paper\",delete v[c[h]])}var A={},T={},S=[\"height\",\"width\"];for(var E in v){var C=P.nestedProperty(p,E),N=v[E],R=C.parts.length,j=\"string\"==typeof C.parts[R-1]?R-1:R-2,q=C.parts[j],F=C.parts[j-1]+\".\"+q,V=C.parts.slice(0,j).join(\".\"),Z=P.nestedProperty(t.layout,V).get(),Y=P.nestedProperty(g,V).get();if(A[E]=N,T[E]=\"reverse\"===q?N:C.get(),-1!==S.indexOf(E)?a(\"autosize\",!1):\""
+,
+"autosize\"===E?a(S,void 0):F.match(/^[xyz]axis[0-9]*\\.range(\\[[0|1]\\])?$/)?a(V+\".autorange\",!1):F.match(/^[xyz]axis[0-9]*\\.autorange$/)?a([V+\".range[0]\",V+\".range[1]\"],void 0):F.match(/^aspectratio\\.[xyz]$/)?a(C.parts[0]+\".aspectmode\",\"manual\"):F.match(/^aspectmode$/)?a([V+\".x\",V+\".y\",V+\".z\"],void 0):\"tick0\"===q||\"dtick\"===q?a(V+\".tickmode\",\"linear\"):\"tickmode\"===q?a([V+\".tick0\",V+\".dtick\"],void 0):/[xy]axis[0-9]*?$/.test(q)&&!Object.keys(N||{}).length?_=!0:/[xy]axis[0-9]*\\.categoryorder$/.test(F)?_=!0:/[xy]axis[0-9]*\\.categoryarray/.test(F)&&(_=!0),-1!==F.indexOf(\"rangeslider\")&&(_=!0),\"type\"===q&&\"log\"===Y.type!=(\"log\"===N)){var U=Z;if(U&&U.range)if(Y.autorange)\"log\"===N&&(U.range=U.range[1]>U.range[0]?[1,2]:[2,1]);else{var G=U.range[0],$=U.range[1];\"log\"===N?(0>=G&&0>=$&&a(V+\".autorange\",!0),0>=G?G=$/1e6:0>=$&&($=G/1e6),a(V+\".range[0]\",Math.log(G)/Math.LN10),a(V+\".range[1]\",Math.log($)/Math.LN10)):(a(V+\".range[0]\",Math.pow(10,G)),a(V+\".range[1]\",Math.pow(10,$)))}else a(V+\".autorange\",!0)}if(\"reverse\"===q)Z.range?Z.range.reverse():(a(V+\".autorange\",!0),Z.range=[1,0]),Y.autorange?_=!0:b=!0;else if(\"annotations\"===C.parts[0]||\"shapes\"===C.parts[0]){var Q=C.parts[1],W=C.parts[0],J=p[W]||[],K=O[P.titleCase(W)],tt=J[Q]||{};2===C.parts.length&&(\"add\"===v[E]||P.isPlainObject(v[E])?T[E]=\"remove\":\"remove\"===v[E]?-1===Q?(T[W]=J,delete T[E]):T[E]=tt:P.log(\"???\",v)),!o(tt,\"x\")&&!o(tt,\"y\")||P.containsAny(E,[\"color\",\"opacity\",\"align\",\"dash\"])||(_=!0),K.draw(t,Q,C.parts.slice(2).join(\".\"),v[E]),delete v[E]}else if(\"images\"===C.parts[0]){var nt=P.objectFromPath(E,N);P.extendDeepAll(t.layout,nt),B.supplyLayoutDefaults(t.layout,t._fullLayout),B.draw(t)}else if(\"mapbox\"===C.parts[0]&&\"layers\"===C.parts[1]){P.extendDeepAll(t.layout,P.objectFromPath(E,N));var rt=(t._fullLayout.mapbox||{}).layers||[],at=C.parts[2]+1-rt.length;for(h=0;at>h;h++)rt.push({});b=!0}else 0===C.parts[0].indexOf(\"scene\")?b=!0:0===C.parts[0].indexOf(\"geo\")?b=!0:0===C.parts[0].indexOf(\"ternary\")?b=!0:!g._has(\"gl2d\")||-1===E.indexOf(\"axis\")&&\"plot_bgcolor\"!==C.parts[0]?\"hiddenlabels\"===E?_=!0:-1!==C.parts[0].indexOf(\"legend\")?m=!0:-1!==E.indexOf(\"title\")?y=!0:-1!==C.parts[0].indexOf(\"bgcolor\")?x=!0:C.parts.length>1&&P.containsAny(C.parts[1],[\"tick\",\"exponent\",\"grid\",\"zeroline\"])?y=!0:-1!==E.indexOf(\".linewidth\")&&-1!==E.indexOf(\"axis\")?y=x=!0:C.parts.length>1&&-1!==C.parts[1].indexOf(\"line\")?x=!0:C.parts.length>1&&\"mirror\"===C.parts[1]?y=x=!0:\"margin.pad\"===E?y=x=!0:\"margin\"===C.parts[0]||\"autorange\"===C.parts[1]||\"rangemode\"===C.parts[1]||\"type\"===C.parts[1]||\"domain\"===C.parts[1]||E.match(/^(bar|box|font)/)?_=!0:-1!==[\"hovermode\",\"dragmode\"].indexOf(E)?k=!0:-1===[\"hovermode\",\"dragmode\",\"height\",\"width\",\"autosize\"].indexOf(E)&&(b=!0):b=!0,C.set(N)}D.add(t,et,[t,T],et,[t,A]),v.autosize&&(v=w(t,v)),(v.height||v.width||v.autosize)&&(_=!0);var ot=Object.keys(v),it=[I.previousPromises];if(b||_)it.push(function(){return t.layout=void 0,_&&(t.calcdata=void 0),O.plot(t,\"\",p)});else if(ot.length&&(I.supplyDefaults(t),g=t._fullLayout,m&&it.push(function(){return H.draw(t),I.previousPromises(t)}),x&&it.push(L),y&&it.push(function(){return O.Axes.doTicks(t,\"redraw\"),z(t),I.previousPromises(t)}),k)){var lt;for(X(t),O.Fx.supplyLayoutDefaults(t.layout,g,t._fullData),O.Fx.init(t),lt=I.getSubplotIds(g,\"gl3d\"),h=0;h<lt.length;h++)f=g[lt[h]]._scene,f.updateFx(g.dragmode,g.hovermode);for(lt=I.getSubplotIds(g,\"gl2d\"),h=0;h<lt.length;h++)f=g._plots[lt[h]]._scene2d,f.updateFx(g);for(lt=I.getSubplotIds(g,\"geo\"),h=0;h<lt.length;h++){var st=g[lt[h]]._geo;st.updateFx(g.hovermode)}}var ct=P.syncOrAsync(it,t);return ct&&ct.then||(ct=Promise.resolve(t)),ct.then(function(){var e=P.extendDeep({},A);return i(e),t.emit(\"plotly_relayout\",e),t})},O.purge=function(t){t=r(t);var e=t._fullLayout||{},n=t._fullData||[];return I.cleanPlot([],{},n,e),I.purge(t),N.purge(t),e._container&&e._container.remove(),delete t._context,delete t._replotPending,delete t._mouseDownTime,delete t._hmpixcount,delete t._hmlumcount,t}},{\"../components/color\":18,\"../components/drawing\":41,\"../components/errorbars\":47,\"../components/images\":53,\"../components/legend\":61,\"../components/modebar/manage\":65,\"../components/rangeselector\":72,\"../components/rangeslider\":77,\"../components/shapes\":80,\"../components/titles\":81,\"../constants/xmlns_namespaces\":82,\"../lib\":89,\"../lib/events\":87,\"../lib/queue\":96,\"../plotly\":107,\"../plots/cartesian/graph_interact\":117,\"../plots/plots\":130,d3:9,\"fast-isnumeric\":11,\"gl-mat4/fromQuat\":12}],102:[function(t,e,n){\"use strict\";function r(t,e){try{t._fullLayout._paper.style(\"background\",e)}catch(n){a.error(n)}}var a=t(\"../lib\");e.exports={staticPlot:!1,editable:!1,queueLength:0,autosizable:!1,fillFrame:!1,frameMargins:0,scrollZoom:!1,doubleClick:\"reset+autosize\",showTips:!0,showLink:!1,sendData:!0,linkText:\"Edit chart\",showSources:!1,displayModeBar:\"hover\",modeBarButtonsToRemove:[],modeBarButtonsToAdd:[],modeBarButtons:!1,displaylogo:!0,plotGlPixelRatio:2,setBackground:r,topojsonURL:\"https://cdn.plot.ly/\",mapboxAccessToken:null,logging:!1}},{\"../lib\":89}],103:[function(t,e,n){\"use strict\";function r(t){var e=m.attributes,n=c({type:t}),r=f(t),a=h(t),o={},i={};o.type=null,b(o,e),o=l(n.attributes,o,\"attributes\",t),void 0!==a.attributes&&b(o,a.attributes),o.type=t,o=u(o),s(o),z.traces[t]=x({},r,{attributes:o}),void 0!==n.layoutAttributes&&(i=l(n.layoutAttributes,i,\"layoutAttributes\",t),s(i),z.traces[t].layoutAttributes=i)}function a(){var t=m.layoutAttributes,e={};e=l(t,e,\"layoutAttributes\",\"*\"),e=d(e),e=p(e),e=u(e),s(e),g(e),z.layout={layoutAttributes:e}}function o(t){var e=m.transformsRegistry[t],n={};n=l(n,e.attributes||{},\"attributes\",\"*\"),n=u(n),s(n),g(n),z.transforms[t]={attributes:n}}function i(){z.defs={valObjects:y.valObjects,metaKeys:T.concat([\"description\",\"role\"])}}function l(t,e,n,r){var a,o,i,s,u;return Object.keys(t).forEach(function(f){return f===w?void Object.keys(t[f]).forEach(function(s){a=c({module:t[f][s]}),void 0!==a&&(o=a[n],i=l(o,{},n,r),y.nestedProperty(e,s).set(b({},i)))}):f===k?void Object.keys(t[f]).forEach(function(a){a===r&&(s=c({module:t[f][a]}),void 0!==s&&(u=s[n],u=l(u,{},n,r),_(e,u)))}):void(e[f]=y.isPlainObject(t[f])?_({},t[f]):t[f])}),e}function s(t){function e(t){return{valType:\"string\"}}function n(t,n,r){C.isValObject(t)?\"data_array\"===t.valType?(t.role=\"data\",r[n+\"src\"]=e(n)):t.arrayOk===!0&&(r[n+\"src\"]=e(n)):y.isPlainObject(t)&&(t.role=\"object\")}C.crawl(t,n)}function c(t){if(\"type\"in t)return\"area\"===t.type?{attributes:S}:m.getModule({type:t.type});var e=m.subplotsRegistry,n=t.module;return e[n]?e[n]:\"module\"in t?v[n]:void 0}function u(t){return Object.keys(t).forEach(function(e){\"_\"===e.charAt(0)&&-1===T.indexOf(e)&&delete t[e]}),t}function f(t){return\"area\"===t?{}:m.modules[t].meta||{}}function d(t){return x(t,{radialaxis:E.radialaxis,angularaxis:E.angularaxis}),x(t,E.layout),t}function h(t){if(\"area\"===t)return{};var e=m.subplotsRegistry,n=Object.keys(e).filter(function(e){return m.traceIs({type:t},e)})[0];return void 0===n?{}:e[n]}function p(t){var e=m.subplotsRegistry;return Object.keys(t).forEach(function(n){Object.keys(e).forEach(function(r){var a,o=e[r];o.attrRegex&&(a=\"cartesian\"===r||\"gl2d\"===r?o.attrRegex.x.test(n)||o.attrRegex.y.test(n):o.attrRegex.test(n),a&&(t[n][M]=!0))})}),t}function g(t){function e(t,e,n){if(t[A]===!0){var r=e.substr(0,e.length-1);delete t[A],n[e]={items:{}},n[e].items[r]=t,n[e].role=\"object\"}}C.crawl(t,e)}var v=t(\"../plotly\"),m=t(\"../plots/plots\"),y=t(\"../lib\"),x=y.extendFlat,b=y.extendDeep,_=y.extendDeepAll,w=\"_nestedModules\",k=\"_composedModules\",M=\"_isSubplotObj\",A=\"_isLinkedToArray\",L=\"_deprecated\",T=[M,A,L],z={traces:{},layout:{},transforms:{},defs:{}},S=t(\"../plots/polar/area_attributes\"),E=t(\"../plots/polar/axis_attributes\"),C=e.exports={};C.get=function(){return m.allTypes.concat(\"area\").forEach(r),a(),Object.keys(m.transformsRegistry).forEach(o),i(),z},C.crawl=function(t,e){Object.keys(t).forEach(function(n){var r=t[n];-1===T.indexOf(n)&&(e(r,n,t),C.isValObject(r)||y.isPlainObject(r)&&C.crawl(r,e))})},C.isValObject=function(t){return t&&void 0!==t.valType}},{\"../lib\":89,\"../plotly\":107,\"../plots/plots\":130,\"../plots/polar/area_attributes\":131,\"../plots/polar/axis_attributes\":132}],104:[function(t,e,n){\"use strict\";var r=t(\"../plotly\"),a=t(\"../lib\");e.exports=function(t){return a.extendFlat(r.defaultConfig,t)}},{\"../lib\":89,\"../plotly\":107}],105:[function(t,e,n){\"use strict\";function r(e,n){var r=t(\"../snapshot\"),l=new Promise(function(t,l){function s(){var t=r.getDelay(f._fullLayout);return new Promise(function(e,a){setTimeout(function(){var t=r.toSVG(f),o=document.createElement(\"canvas\");o.id=i.randstr(),r.svgToImg({format:n.format,width:f._fullLayout.width,height:f._fullLayout.height,canvas:o,svg:t,promise:!0}).then(function(t){f&&document.body.removeChild(f),e(t)}).catch(function(t){a(t)})},t)})}n=n||{},n.format=n.format||\"png\";var c=function(t){return void 0===t||null===t?!0:!!(a(t)&&t>1)};c(n.width)&&c(n.height)||l(new Error(\"Height and width should be pixel values.\"));var u=r.clone(e,{format:\"png\",height:n.height,width:n.width}),f=u.td;f.style.position=\"absolute\",f.style.left=\"-5000px\",document.body.appendChild(f);var d=r.getRedrawFunc(f);o.plot(f,u.data,u.layout,u.config).then(d).then(s).then(function(e){t(e)}).catch(function(t){l(t)})});return l}var a=t(\"fast-isnumeric\"),o=t(\"../plotly\"),i=t(\"../lib\");e.exports=r},{\"../lib\":89,\"../plotly\":107,\"../snapshot\":139,\"fast-isnumeric\":11}],106:[function(t,e,n){\"use strict\";function r(t,e,n,a,o,c){c=c||[];for(var u=Object.keys(t),d=0;d<u.length;d++){var h=u[d];if(\"transforms\"!==h){var v=c.slice();v.push(h);var m=t[h],y=e[h],x=s(n,h);if(l(n,h))if(p(m)&&p(y))r(m,y,x,a,o,v);else if(x.items&&g(m))for(var b=h.substr(0,h.length-1),_=0;_<m.length;_++){var w=x.items[b],k=v.slice();k.push(_),r(m[_],y[_],w,a,o,k)}else!p(m)&&p(y)?a.push(i(\"object\",o,v,m)):!g(m)&&g(y)&&\"info_array\"!==x.valType?a.push(i(\"array\",o,v,m)):h in e?f.validate(m,x)||a.push(i(\"value\",o,v,m)):a.push(i(\"u"
+,
+"nused\",o,v,m));else a.push(i(\"schema\",o,v))}}return a}function a(t,e){for(var n=0;n<e.length;n++){var r=e[n].type,a=t.traces[r].layoutAttributes;a&&f.extendFlat(t.layout.layoutAttributes,a)}return t.layout.layoutAttributes}function o(t){return g(t)?\"In data trace \"+t[1]+\", \":\"In \"+t+\", \"}function i(t,e,n,r){n=n||\"\";var a,o;g(e)?(a=e[0],o=e[1]):(a=e,o=null);var i=u(n),l=v[t](e,i,r);return f.log(l),{code:t,container:a,trace:o,path:n,astr:i,msg:l}}function l(t,e){var n=c(e),r=n.keyMinusId,a=n.id;return r in t&&t[r]._isSubplotObj&&a?!0:e in t}function s(t,e){var n=c(e);return t[n.keyMinusId]}function c(t){var e=/([2-9]|[1-9][0-9]+)$/,n=t.split(e)[0],r=t.substr(n.length,t.length);return{keyMinusId:n,id:r}}function u(t){if(!g(t))return String(t);for(var e=\"\",n=0;n<t.length;n++){var r=t[n];\"number\"==typeof r?e=e.substr(0,e.length-1)+\"[\"+r+\"]\":e+=r,n<t.length-1&&(e+=\".\")}return e}var f=t(\"../lib\"),d=t(\"../plots/plots\"),h=t(\"./plot_schema\"),p=f.isPlainObject,g=Array.isArray;e.exports=function(t,e){var n,o,l=h.get(),s=[],c={};g(t)?(c.data=f.extendDeep([],t),n=t):(c.data=[],n=[],s.push(i(\"array\",\"data\"))),p(e)?(c.layout=f.extendDeep({},e),o=e):(c.layout={},o={},arguments.length>1&&s.push(i(\"object\",\"layout\"))),d.supplyDefaults(c);for(var u=c._fullData,v=n.length,m=0;v>m;m++){var y=n[m],x=[\"data\",m];if(p(y)){var b=u[m],_=b.type,w=l.traces[_].attributes;w.type={valType:\"enumerated\",values:[_]},b.visible===!1&&y.visible!==!1&&s.push(i(\"invisible\",x)),r(y,b,w,s,x);var k=y.transforms,M=b.transforms;if(k){g(k)||s.push(i(\"array\",x,[\"transforms\"])),x.push(\"transforms\");for(var A=0;A<k.length;A++){var L=[\"transforms\",A],T=k[A].type;if(p(k[A])){var z=l.transforms[T]?l.transforms[T].attributes:{};z.type={valType:\"enumerated\",values:Object.keys(l.transforms)},r(k[A],M[A],z,s,x,L)}else s.push(i(\"object\",x,L))}}}else s.push(i(\"object\",x))}var S=c._fullLayout,E=a(l,u);return r(o,S,E,s,\"layout\"),0===s.length?void 0:s};var v={object:function(t,e){var n;return n=\"layout\"===t&&\"\"===e?\"The layout argument\":\"data\"===t[0]&&\"\"===e?\"Trace \"+t[1]+\" in the data argument\":o(t)+\"key \"+e,\n"
+,
+"n+\" must be linked to an object container\"},array:function(t,e){var n;return n=\"data\"===t?\"The data argument\":o(t)+\"key \"+e,n+\" must be linked to an array container\"},schema:function(t,e){return o(t)+\"key \"+e+\" is not part of the schema\"},unused:function(t,e,n){var r=p(n)?\"container\":\"key\";return o(t)+r+\" \"+e+\" did not get coerced\"},invisible:function(t){return\"Trace \"+t[1]+\" got defaulted to be not visible\"},value:function(t,e,n){return[o(t)+\"key \"+e,\"is set to an invalid value (\"+n+\")\"].join(\" \")}}},{\"../lib\":89,\"../plots/plots\":130,\"./plot_schema\":103}],107:[function(t,e,n){\"use strict\";t(\"es6-promise\").polyfill();var r=n.Lib=t(\"./lib\");n.util=t(\"./lib/svg_text_utils\"),n.Queue=t(\"./lib/queue\"),t(\"../build/plotcss\"),n.MathJaxConfig=t(\"./fonts/mathjax_config\"),n.defaultConfig=t(\"./plot_api/plot_config\");var a=n.Plots=t(\"./plots/plots\");n.Axes=t(\"./plots/cartesian/axes\"),n.Fx=t(\"./plots/cartesian/graph_interact\"),n.micropolar=t(\"./plots/polar/micropolar\"),n.Color=t(\"./components/color\"),n.Drawing=t(\"./components/drawing\"),n.Colorscale=t(\"./components/colorscale\"),n.Colorbar=t(\"./components/colorbar\"),n.ErrorBars=t(\"./components/errorbars\"),n.Annotations=t(\"./components/annotations\"),n.Shapes=t(\"./components/shapes\"),n.Legend=t(\"./components/legend\"),n.Images=t(\"./components/images\"),n.ModeBar=t(\"./components/modebar\"),n.register=function(t){if(!t)throw new Error(\"No argument passed to Plotly.register.\");t&&!Array.isArray(t)&&(t=[t]);for(var e=0;e<t.length;e++){var n=t[e];if(!n)throw new Error(\"Invalid module was attempted to be registered!\");switch(n.moduleType){case\"trace\":a.register(n,n.name,n.categories,n.meta),a.subplotsRegistry[n.basePlotModule.name]||a.registerSubplot(n.basePlotModule);break;case\"transform\":if(\"string\"!=typeof n.name)throw new Error(\"Transform module *name* must be a string.\");var o=\"Transform module \"+n.name;if(\"function\"!=typeof n.transform)throw new Error(o+\" is missing a *transform* function.\");r.isPlainObject(n.attributes)||r.log(o+\" registered without an *attributes* object.\"),\"function\"!=typeof n.supplyDefaults&&r.log(o+\" registered without a *supplyDefaults* function.\"),a.transformsRegistry[n.name]=n;break;default:throw new Error(\"Invalid module was attempted to be registered!\")}}},n.register(t(\"./traces/scatter\")),t(\"./plot_api/plot_api\"),n.PlotSchema=t(\"./plot_api/plot_schema\"),n.Snapshot=t(\"./snapshot\")},{\"../build/plotcss\":1,\"./components/annotations\":16,\"./components/color\":18,\"./components/colorbar\":23,\"./components/colorscale\":32,\"./components/drawing\":41,\"./components/errorbars\":47,\"./components/images\":53,\"./components/legend\":61,\"./components/modebar\":64,\"./components/shapes\":80,\"./fonts/mathjax_config\":84,\"./lib\":89,\"./lib/queue\":96,\"./lib/svg_text_utils\":100,\"./plot_api/plot_api\":101,\"./plot_api/plot_config\":102,\"./plot_api/plot_schema\":103,\"./plots/cartesian/axes\":110,\"./plots/cartesian/graph_interact\":117,\"./plots/plots\":130,\"./plots/polar/micropolar\":133,\"./snapshot\":139,\"./traces/scatter\":177,\"es6-promise\":10}],108:[function(t,e,n){\"use strict\";e.exports={type:{valType:\"enumerated\",values:[],dflt:\"scatter\"},visible:{valType:\"enumerated\",values:[!0,!1,\"legendonly\"],dflt:!0},showlegend:{valType:\"boolean\",dflt:!0},legendgroup:{valType:\"string\",dflt:\"\"},opacity:{valType:\"number\",min:0,max:1,dflt:1},name:{valType:\"string\"},uid:{valType:\"string\",dflt:\"\"},hoverinfo:{valType:\"flaglist\",flags:[\"x\",\"y\",\"z\",\"text\",\"name\"],extras:[\"all\",\"none\"],dflt:\"all\"},stream:{token:{valType:\"string\",noBlank:!0,strict:!0},maxpoints:{valType:\"number\",min:0}}}},{}],109:[function(t,e,n){\"use strict\";e.exports={xaxis:{valType:\"subplotid\",dflt:\"x\"},yaxis:{valType:\"subplotid\",dflt:\"y\"}}},{}],110:[function(t,e,n){\"use strict\";function r(t){var e,n,r=t.tickvals,a=t.ticktext,o=new Array(r.length),i=1.0001*t.range[0]-1e-4*t.range[1],s=1.0001*t.range[1]-1e-4*t.range[0],c=Math.min(i,s),u=Math.max(i,s),f=0;for(Array.isArray(a)||(a=[]),n=0;n<r.length;n++)e=t.d2l(r[n]),e>c&&u>e&&(void 0===a[n]?o[f]=L.tickText(t,e):o[f]=l(t,e,String(a[n])),f++);return f<r.length&&o.splice(f,r.length-f),o}function a(t,e,n){return e*_.roundUp(t/e,n)}function o(t){var e,n=t.dtick;if(t._tickexponent=0,x(n)||\"string\"==typeof n||(n=1),\"category\"===t.type)t._tickround=null;else if(x(n)||\"L\"===n.charAt(0))if(\"date\"===t.type)n>=864e5?t._tickround=\"d\":n>=36e5?t._tickround=\"H\":n>=6e4?t._tickround=\"M\":n>=1e3?t._tickround=\"S\":t._tickround=3-Math.round(Math.log(n/2)/Math.LN10);else{x(n)||(n=Number(n.substr(1))),t._tickround=2-Math.floor(Math.log(n)/Math.LN10+.01),e=\"log\"===t.type?Math.pow(10,Math.max(t.range[0],t.range[1])):Math.max(Math.abs(t.range[0]),Math.abs(t.range[1]));var r=Math.floor(Math.log(e)/Math.LN10+.01);Math.abs(r)>3&&(\"SI\"===t.exponentformat||\"B\"===t.exponentformat?t._tickexponent=3*Math.round((r-1)/3):t._tickexponent=r)}else\"M\"===n.charAt(0)?t._tickround=2===n.length?\"m\":\"y\":t._tickround=null}function i(t,e){var n=t.match(B),r=new Date(e);if(n){var a=Math.min(+n[1]||6,6),o=String(e/1e3%1+2.0000005).substr(2,a).replace(/0+$/,\"\")||\"0\";return y.time.format(t.replace(B,o))(r)}return y.time.format(t)(r)}function l(t,e,n){var r=t.tickfont||t._gd._fullLayout.font;return{x:e,dx:0,dy:0,text:n||\"\",fontSize:r.size,font:r.family,fontColor:r.color}}function s(t,e,n,r){var a,o=e.x,l=t._tickround,s=new Date(o),c=\"\";n&&t.hoverformat?a=i(t.hoverformat,o):t.tickformat?a=i(t.tickformat,o):(r&&(x(l)?l+=2:l={y:\"m\",m:\"d\",d:\"H\",H:\"M\",M:\"S\",S:2}[l]),\"y\"===l?a=D(s):\"m\"===l?a=I(s):(o!==t._tmin||n||(c=\"<br>\"+D(s)),\"d\"===l?a=R(s):\"H\"===l?a=j(s):(o!==t._tmin||n||(c=\"<br>\"+R(s)+\", \"+D(s)),a=q(s),\"M\"!==l&&(a+=F(s),\"S\"!==l&&(a+=d(m(o/1e3,1),t,\"none\",n).substr(1)))))),e.text=a+c}function c(t,e,n,r,a){var o=t.dtick,i=e.x;if(!r||\"string\"==typeof o&&\"L\"===o.charAt(0)||(o=\"L3\"),t.tickformat||\"string\"==typeof o&&\"L\"===o.charAt(0))e.text=d(Math.pow(10,i),t,a,r);else if(x(o)||\"D\"===o.charAt(0)&&m(i+.01,1)<.1)if(-1!==[\"e\",\"E\",\"power\"].indexOf(t.exponentformat)){var l=Math.round(i);0===l?e.text=1:1===l?e.text=\"10\":l>1?e.text=\"10<sup>\"+l+\"</sup>\":e.text=\"10<sup>\\u2212\"+-l+\"</sup>\",e.fontSize*=1.25}else e.text=d(Math.pow(10,i),t,\"\",\"fakehover\"),\"D1\"===o&&\"y\"===t._id.charAt(0)&&(e.dy-=e.fontSize/6);else{if(\"D\"!==o.charAt(0))throw\"unrecognized dtick \"+String(o);e.text=String(Math.round(Math.pow(10,m(i,1)))),e.fontSize*=.75}if(\"D1\"===t.dtick){var s=String(e.text).charAt(0);\"0\"!==s&&\"1\"!==s||(\"y\"===t._id.charAt(0)?e.dx-=e.fontSize/4:(e.dy+=e.fontSize/2,e.dx+=(t.range[1]>t.range[0]?1:-1)*e.fontSize*(0>i?.5:.25)))}}function u(t,e){var n=t._categories[Math.round(e.x)];void 0===n&&(n=\"\"),e.text=String(n)}function f(t,e,n,r,a){\"all\"===t.showexponent&&Math.abs(e.x/t.dtick)<1e-6&&(a=\"hide\"),e.text=d(e.x,t,a,r)}function d(t,e,n,r){var a=0>t,i=e._tickround,l=n||e.exponentformat||\"B\",s=e._tickexponent,c=e.tickformat;if(r){var u={exponentformat:e.exponentformat,dtick:\"none\"===e.showexponent?e.dtick:x(t)?Math.abs(t)||1:1,range:\"none\"===e.showexponent?e.range:[0,t||1]};o(u),i=(Number(u._tickround)||0)+4,s=u._tickexponent,e.hoverformat&&(c=e.hoverformat)}if(c)return y.format(c)(t).replace(/-/g,\"\\u2212\");var f=Math.pow(10,-i)/2;if(\"none\"===l&&(s=0),t=Math.abs(t),f>t)t=\"0\",a=!1;else{if(t+=f,s&&(t*=Math.pow(10,-s),i+=s),0===i)t=String(Math.floor(t));else if(0>i){t=String(Math.round(t)),t=t.substr(0,t.length+i);for(var d=i;0>d;d++)t+=\"0\"}else{t=String(t);var h=t.indexOf(\".\")+1;h&&(t=t.substr(0,h+i).replace(/\\.?0+$/,\"\"))}t=_.numSeparate(t,e._gd._fullLayout.separators)}if(s&&\"hide\"!==l){var p;p=0>s?\"\\u2212\"+-s:\"power\"!==l?\"+\"+s:String(s),\"e\"===l||(\"SI\"===l||\"B\"===l)&&(s>12||-15>s)?t+=\"e\"+p:\"E\"===l?t+=\"E\"+p:\"power\"===l?t+=\"&times;10<sup>\"+p+\"</sup>\":\"B\"===l&&9===s?t+=\"B\":\"SI\"!==l&&\"B\"!==l||(t+=H[s/3+5])}return a?\"\\u2212\"+t:t}function h(t,e){var n,r,a=[];for(n=0;n<e.length;n++){var o=[],i=t._fullData[e[n]].xaxis,l=t._fullData[e[n]].yaxis;if(i&&l){for(r=0;r<a.length;r++)-1===a[r].x.indexOf(i)&&-1===a[r].y.indexOf(l)||o.push(r);if(o.length){var s,c=a[o[0]];if(o.length>1)for(r=1;r<o.length;r++)s=a[o[r]],p(c.x,s.x),p(c.y,s.y);p(c.x,[i]),p(c.y,[l])}else a.push({x:[i],y:[l]})}}return a}function p(t,e){for(var n=0;n<e.length;n++)-1===t.indexOf(e[n])&&t.push(e[n])}function g(t,e,n){var r,a,o=[],i=[],l=t.layout;for(r=0;r<e.length;r++)o.push(L.getFromId(t,e[r]));for(r=0;r<n.length;r++)i.push(L.getFromId(t,n[r]));var s=Object.keys(o[0]),c=[\"anchor\",\"domain\",\"overlaying\",\"position\",\"side\",\"tickangle\"],u=[\"linear\",\"log\"];for(r=0;r<s.length;r++){var f=s[r],d=o[0][f],h=i[0][f],p=!0,g=!1,m=!1;if(\"_\"!==f.charAt(0)&&\"function\"!=typeof d&&-1===c.indexOf(f)){for(a=1;a<o.length&&p;a++){var y=o[a][f];\"type\"===f&&-1!==u.indexOf(d)&&-1!==u.indexOf(y)&&d!==y?g=!0:y!==d&&(p=!1)}for(a=1;a<i.length&&p;a++){var x=i[a][f];\"type\"===f&&-1!==u.indexOf(h)&&-1!==u.indexOf(x)&&h!==x?m=!0:i[a][f]!==h&&(p=!1)}p&&(g&&(l[o[0]._name].type=\"linear\"),m&&(l[i[0]._name].type=\"linear\"),v(l,f,o,i))}}for(r=0;r<t._fullLayout.annotations.length;r++){var b=t._fullLayout.annotations[r];-1!==e.indexOf(b.xref)&&-1!==n.indexOf(b.yref)&&_.swapAttrs(l.annotations[r],[\"?\"])}}function v(t,e,n,r){var a,o=_.nestedProperty,i=o(t[n[0]._name],e).get(),l=o(t[r[0]._name],e).get();for(\"title\"===e&&(\"Click to enter X axis title\"===i&&(i=\"Click to enter Y axis title\"),\"Click to enter Y axis title\"===l&&(l=\"Click to enter X axis title\")),a=0;a<n.length;a++)o(t,n[a]._name+\".\"+e).set(l);for(a=0;a<r.length;a++)o(t,r[a]._name+\".\"+e).set(i)}function m(t,e){return(t%e+e)%e}var y=t(\"d3\"),x=t(\"fast-isnumeric\"),b=t(\"../../plotly\"),_=t(\"../../lib\"),w=t(\"../../lib/svg_text_utils\"),k=t(\"../../components/titles\"),M=t(\"../../components/color\"),A=t(\"../../components/drawing\"),L=e.exports={};L.layoutAttributes=t(\"./layout_attributes\"),L.supplyLayoutDefaults=t(\"./layout_defaults\"),L.setConvert=t(\"./set_convert\");var T=t(\"./axis_ids\");L.id2name=T.id2name,L.cleanId=T.cleanId,L.list=T.list,L.listIds=T.listIds,L.getFromId=T.getFromId,L.getFromTrace=T.getFromTrace,L.coerceRef=function(t,e,n,r,a){var o=n._fullLayout._has(\"gl2d\")?[]:L.listIds(n,r),i=r+\"ref\",l={};return l[i]={va"
+,
+"lType:\"enumerated\",values:o.concat([\"paper\"]),dflt:a||o[0]||\"paper\"},_.coerce(t,e,l,i)},L.coerceARef=function(t,e,n,r,a){var o=n._fullLayout._has(\"gl2d\")?[]:L.listIds(n,r),i=\"a\"+r+\"ref\",l={};return l[i]={valType:\"enumerated\",values:o.concat([\"pixel\"]),dflt:a||\"pixel\"||o[0]},_.coerce(t,e,l,i)},L.clearTypes=function(t,e){Array.isArray(e)&&e.length||(e=t._fullData.map(function(t,e){return e})),e.forEach(function(e){var n=t.data[e];delete(L.getFromId(t,n.xaxis)||{}).type,delete(L.getFromId(t,n.yaxis)||{}).type})},L.counterLetter=function(t){var e=t.charAt(0);return\"x\"===e?\"y\":\"y\"===e?\"x\":void 0},L.minDtick=function(t,e,n,r){-1===[\"log\",\"category\"].indexOf(t.type)&&r?null===t._minDtick?(t._minDtick=e,t._forceTick0=n):t._minDtick&&((t._minDtick/e+1e-6)%1<2e-6&&((n-t._forceTick0)/e%1+1.000001)%1<2e-6?(t._minDtick=e,t._forceTick0=n):((e/t._minDtick+1e-6)%1>2e-6||((n-t._forceTick0)/t._minDtick%1+1.000001)%1>2e-6)&&(t._minDtick=0)):t._minDtick=0},L.getAutoRange=function(t){var e,n=[],r=t._min[0].val,a=t._max[0].val;for(e=1;e<t._min.length&&r===a;e++)r=Math.min(r,t._min[e].val);for(e=1;e<t._max.length&&r===a;e++)a=Math.max(a,t._max[e].val);var o,i,l,s,c,u,f,d=0,h=t.range&&t.range[1]<t.range[0];for(\"reversed\"===t.autorange&&(h=!0,t.autorange=!0),e=0;e<t._min.length;e++)for(i=t._min[e],o=0;o<t._max.length;o++)l=t._max[o],f=l.val-i.val,u=t._length-i.pad-l.pad,f>0&&u>0&&f/u>d&&(s=i,c=l,d=f/u);return r===a?n=h?[r+1,\"normal\"!==t.rangemode?0:r-1]:[\"normal\"!==t.rangemode?0:r-1,r+1]:d&&(\"linear\"!==t.type&&\"-\"!==t.type||(\"tozero\"===t.rangemode&&s.val>=0?s={val:0,pad:0}:\"nonnegative\"===t.rangemode&&(s.val-d*s.pad<0&&(s={val:0,pad:0}),c.val<0&&(c={val:1,pad:0})),d=(c.val-s.val)/(t._length-s.pad-c.pad)),n=[s.val-d*s.pad,c.val+d*c.pad],n[0]===n[1]&&(n=[n[0]-1,n[0]+1]),h&&n.reverse()),n},L.doAutoRange=function(t){t._length||t.setScale();var e=t._min&&t._max&&t._min.length&&t._max.length;if(t.autorange&&e){t.range=L.getAutoRange(t);var n=t._gd.layout[t._name];n||(t._gd.layout[t._name]=n={}),n!==t&&(n.range=t.range.slice(),n.autorange=t.autorange)}},L.saveRangeInitial=function(t,e){for(var n=L.list(t,\"\",!0),r=!1,a=0;a<n.length;a++){var o=n[a],i=void 0===o._rangeInitial,l=i||!(o.range[0]===o._rangeInitial[0]&&o.range[1]===o._rangeInitial[1]);(i&&o.autorange===!1||e&&l)&&(o._rangeInitial=o.range.slice(),r=!0)}return r};var z=Number.MAX_VALUE/2;L.expand=function(t,e,n){function r(t){if(Array.isArray(t))return function(e){return Math.max(Number(t[e]||0),0)};var e=Math.max(Number(t||0),0);return function(){return e}}function a(n){function r(t){return x(t)&&Math.abs(t)<z}if(s=e[n],x(s)){if(f=b(n)+m,d=_(n)+m,p=s-k(n),g=s+w(n),\"log\"===t.type&&g/10>p&&(p=g/10),c=t.c2l(p),u=t.c2l(g),y&&(c=Math.min(0,c),u=Math.max(0,u)),r(c)){for(h=!0,i=0;i<t._min.length&&h;i++)l=t._min[i],l.val<=c&&l.pad>=d?h=!1:l.val>=c&&l.pad<=d&&(t._min.splice(i,1),i--);h&&t._min.push({val:c,pad:y&&0===c?0:d})}if(r(u)){for(h=!0,i=0;i<t._max.length&&h;i++)l=t._max[i],l.val>=u&&l.pad>=f?h=!1:l.val<=u&&l.pad<=f&&(t._max.splice(i,1),i--);h&&t._max.push({val:u,pad:y&&0===u?0:f})}}}if((t.autorange||t._needsExpand)&&e){t._min||(t._min=[]),t._max||(t._max=[]),n||(n={}),t._m||t.setScale();var o,i,l,s,c,u,f,d,h,p,g,v=e.length,m=n.padded?.05*t._length:0,y=n.tozero&&(\"linear\"===t.type||\"-\"===t.type),b=r((t._m>0?n.ppadplus:n.ppadminus)||n.ppad||0),_=r((t._m>0?n.ppadminus:n.ppadplus)||n.ppad||0),w=r(n.vpadplus||n.vpad),k=r(n.vpadminus||n.vpad);for(o=0;6>o;o++)a(o);for(o=v-1;o>5;o--)a(o)}},L.autoBin=function(t,e,n,r){function a(t){return(1+100*(t-h)/f.dtick)%100<2}var o=_.aggNums(Math.min,null,t),i=_.aggNums(Math.max,null,t);if(\"category\"===e.type)return{start:o-.5,end:i+.5,size:1};var l;if(n)l=(i-o)/n;else{var s=_.distinctVals(t),c=Math.pow(10,Math.floor(Math.log(s.minDiff)/Math.LN10)),u=c*_.roundUp(s.minDiff/c,[.9,1.9,4.9,9.9],!0);l=Math.max(u,2*_.stdev(t)/Math.pow(t.length,r?.25:.4))}var f={type:\"log\"===e.type?\"linear\":e.type,range:[o,i]};L.autoTicks(f,l);var d,h=L.tickIncrement(L.tickFirst(f),f.dtick,\"reverse\");if(\"number\"==typeof f.dtick){for(var p=0,g=0,v=0,m=0,y=0;y<t.length;y++)t[y]%1===0?v++:x(t[y])||m++,a(t[y])&&p++,a(t[y]+f.dtick/2)&&g++;var b=t.length-m;if(v===b&&\"date\"!==e.type)f.dtick<1?h=o-.5*f.dtick:h-=.5;else if(.1*b>g&&(p>.3*b||a(o)||a(i))){var w=f.dtick/2;h+=o>h+w?w:-w}var k=1+Math.floor((i-h)/f.dtick);d=h+k*f.dtick}else for(d=h;i>=d;)d=L.tickIncrement(d,f.dtick);return{start:h,end:d,size:f.dtick}},L.calcTicks=function(t){if(\"array\"===t.tickmode)return r(t);if(\"auto\"===t.tickmode||!t.dtick){var e,n=t.nticks;n||(\"category\"===t.type?(e=t.tickfont?1.2*(t.tickfont.size||12):15,n=t._length/e):(e=\"y\"===t._id.charAt(0)?40:80,n=_.constrain(t._length/e,4,9)+1)),L.autoTicks(t,Math.abs(t.range[1]-t.range[0])/n),t._minDtick>0&&t.dtick<2*t._minDtick&&(t.dtick=t._minDtick,t.tick0=t._forceTick0)}t.tick0||(t.tick0=\"date\"===t.type?new Date(2e3,0,1).getTime():0),o(t),t._tmin=L.tickFirst(t);var a=t.range[1]<t.range[0],i=[],l=1.0001*t.range[1]-1e-4*t.range[0];\"category\"===t.type&&(l=a?Math.max(-.5,l):Math.min(t._categories.length-.5,l));for(var s=t._tmin;(a?s>=l:l>=s)&&(i.push(s),!(i.length>1e3));s=L.tickIncrement(s,t.dtick,a));t._tmax=i[i.length-1];for(var c=new Array(i.length),u=0;u<i.length;u++)c[u]=L.tickText(t,i[u]);return c};var S=[2,5,10],E=[1,2,3,6,12],C=[1,2,5,10,15,30],O=[1,2,3,7,14],P=[-.046,0,.301,.477,.602,.699,.778,.845,.903,.954,1],N=[-.301,0,.301,.699,1];L.autoTicks=function(t,e){var n;if(\"date\"===t.type)t.tick0=new Date(2e3,0,1).getTime(),e>157788e5?(e/=315576e5,n=Math.pow(10,Math.floor(Math.log(e)/Math.LN10)),t.dtick=\"M\"+12*a(e,n,S)):e>12096e5?(e/=26298e5,t.dtick=\"M\"+a(e,1,E)):e>432e5?(t.dtick=a(e,864e5,O),t.tick0=new Date(2e3,0,2).getTime()):e>18e5?t.dtick=a(e,36e5,E):e>3e4?t.dtick=a(e,6e4,C):e>500?t.dtick=a(e,1e3,C):(n=Math.pow(10,Math.floor(Math.log(e)/Math.LN10)),t.dtick=a(e,n,S));else if(\"log\"===t.type)if(t.tick0=0,e>.7)t.dtick=Math.ceil(e);else if(Math.abs(t.range[1]-t.range[0])<1){var r=1.5*Math.abs((t.range[1]-t.range[0])/e);e=Math.abs(Math.pow(10,t.range[1])-Math.pow(10,t.range[0]))/r,n=Math.pow(10,Math.floor(Math.log(e)/Math.LN10)),t.dtick=\"L\"+a(e,n,S)}else t.dtick=e>.3?\"D2\":\"D1\";else\"category\"===t.type?(t.tick0=0,t.dtick=Math.ceil(Math.max(e,1))):(t.tick0=0,n=Math.pow(10,Math.floor(Math.log(e)/Math.LN10)),t.dtick=a(e,n,S));if(0===t.dtick&&(t.dtick=1),!x(t.dtick)&&\"string\"!=typeof t.dtick){var o=t.dtick;throw t.dtick=1,\"ax.dtick error: \"+String(o)}},L.tickIncrement=function(t,e,n){var r=n?-1:1;if(x(e))return t+r*e;var a=e.charAt(0),o=r*Number(e.substr(1));if(\"M\"===a){var i=new Date(t);return i.setMonth(i.getMonth()+o)}if(\"L\"===a)return Math.log(Math.pow(10,t)+o)/Math.LN10;if(\"D\"===a){var l=\"D2\"===e?N:P,s=t+.01*r,c=_.roundUp(m(s,1),l,n);return Math.floor(s)+Math.log(y.round(Math.pow(10,c),1))/Math.LN10}throw\"unrecognized dtick \"+String(e)},L.tickFirst=function(t){var e=t.range[1]<t.range[0],n=e?Math.floor:Math.ceil,r=1.0001*t.range[0]-1e-4*t.range[1],a=t.dtick,o=t.tick0;if(x(a)){var i=n((r-o)/a)*a+o;return\"category\"===t.type&&(i=_.constrain(i,0,t._categories.length-1)),i}var l,s,c,u=a.charAt(0),f=Number(a.substr(1));if(\"M\"===u){for(l=new Date(o),r=new Date(r),s=12*(r.getFullYear()-l.getFullYear())+r.getMonth()-l.getMonth(),c=l.setMonth(l.getMonth()+(Math.round(s/f)+(e?1:-1))*f);e?c>r:r>c;)c=L.tickIncrement(c,a,e);return c}if(\"L\"===u)return Math.log(n((Math.pow(10,r)-o)/f)*f+o)/Math.LN10;if(\"D\"===u){var d=\"D2\"===a?N:P,h=_.roundUp(m(r,1),d,e);return Math.floor(r)+Math.log(y.round(Math.pow(10,h),1))/Math.LN10}throw\"unrecognized dtick \"+String(a)};var D=y.time.format(\"%Y\"),I=y.time.format(\"%b %Y\"),R=y.time.format(\"%b %-d\"),j=y.time.format(\"%b %-d %Hh\"),q=y.time.format(\"%H:%M\"),F=y.time.format(\":%S\"),B=/%(\\d?)f/g;L.tickText=function(t,e,n){function r(r){var a;return void 0===r?!0:n?\"none\"===r:(a={first:t._tmin,last:t._tmax}[r],\"all\"!==r&&e!==a)}var a,o,i=l(t,e),d=\"array\"===t.tickmode,h=n||d;if(d&&Array.isArray(t.ticktext)){var p=Math.abs(t.range[1]-t.range[0])/1e4;for(o=0;o<t.ticktext.length&&!(Math.abs(e-t.d2l(t.tickvals[o]))<p);o++);if(o<t.ticktext.length)return i.text=String(t.ticktext[o]),i}return a=\"none\"!==t.exponentformat&&r(t.showexponent)?\"hide\":\"\",\"date\"===t.type?s(t,i,n,h):\"log\"===t.type?c(t,i,n,h,a):\"category\"===t.type?u(t,i):f(t,i,n,h,a),t.tickprefix&&!r(t.showtickprefix)&&(i.text=t.tickprefix+i.text),t.ticksuffix&&!r(t.showticksuffix)&&(i.text+=t.ticksuffix),i};var H=[\"f\",\"p\",\"n\",\"&mu;\",\"m\",\"\",\"k\",\"M\",\"G\",\"T\"];L.subplotMatch=/^x([0-9]*)y([0-9]*)$/,L.getSubplots=function(t,e){function n(t,e){return-1!==t.indexOf(e._id)}var r,a,o,i=[],l=t.data||[];for(r=0;r<l.length;r++){var s=l[r];if(s.visible!==!1&&\"legendonly\"!==s.visible&&(b.Plots.traceIs(s,\"cartesian\")||b.Plots.traceIs(s,\"gl2d\"))){var c=s.xaxis||\"x\",u=s.yaxis||\"y\";o=c+u,-1===i.indexOf(o)&&i.push(o)}}var f=L.list(t,\"\",!0);for(r=0;r<f.length;r++){var d=f[r],h=d._id.charAt(0),p=\"free\"===d.anchor?\"x\"===h?\"y\":\"x\":d.anchor,g=L.getFromId(t,p),v=!1;for(a=0;a<i.length;a++)if(n(i[a],d)){v=!0;break}\"free\"===d.anchor&&v||g&&(o=\"x\"===h?d._id+g._id:g._id+d._id,-1===i.indexOf(o)&&i.push(o))}var m=L.subplotMatch,y=[];for(r=0;r<i.length;r++)o=i[r],m.test(o)&&y.push(o);return y.sort(function(t,e){var n=t.match(m),r=e.match(m);return n[1]===r[1]?+(n[2]||1)-(r[2]||1):+(n[1]||0)-(r[1]||0)}),e?L.findSubplotsWithAxis(y,e):y},L.findSubplotsWithAxis=function(t,e){for(var n=new RegExp(\"x\"===e._id.charAt(0)?\"^\"+e._id+\"y\":e._id+\"$\"),r=[],a=0;a<t.length;a++){var o=t[a];n.test(o)&&r.push(o)}return r},L.makeClipPaths=function(t){var e,n,r=t._fullLayout,a=r._defs,o={_offset:0,_length:r.width,_id:\"\"},i={_offset:0,_length:r.height,_id:\"\"},l=L.list(t,\"x\",!0),s=L.list(t,\"y\",!0),c=[];for(e=0;e<l.length;e++)for(c.push({x:l[e],y:i}),n=0;n<s.length;n++)0===e&&c.push({x:o,y:s[n]}),c.push({x:l[e],y:s[n]});var u=a.selectAll(\"g.clips\").data([0]);u.enter().append(\"g\").classed(\"clips\",!0);var f=u.selectAll(\".axesclip\").data(c,function(t){return t.x._id+t.y._id});f.enter().append(\"clipPath\").classed(\"axesclip\",!0).attr(\"id\",function(t){return\"clip\"+r"
+,
+"._uid+t.x._id+t.y._id}).append(\"rect\"),f.exit().remove(),f.each(function(t){y.select(this).select(\"rect\").attr({x:t.x._offset||0,y:t.y._offset||0,width:t.x._length||1,height:t.y._length||1})})},L.doTicks=function(t,e,n){function r(t){var e=c.l2p(t.x);return e>1&&e<c._length-1}function a(t,e){var n=t.selectAll(\"path.\"+E).data(\"inside\"===c.ticks?H:z,S);e&&c.ticks?(n.enter().append(\"path\").classed(E,1).classed(\"ticks\",1).classed(\"crisp\",1).call(M.stroke,c.tickcolor).style(\"stroke-width\",j+\"px\").attr(\"d\",e),n.attr(\"transform\",h),n.exit().remove()):n.remove()}function o(n,r){function a(t,e){t.each(function(t){var n=p(e),r=y.select(this),a=r.select(\".text-math-group\"),o=h(t)+(x(e)&&0!==+e?\" rotate(\"+e+\",\"+f(t)+\",\"+(d(t)-t.fontSize/2)+\")\":\"\");if(a.empty()){var i=r.select(\"text\").attr({transform:o,\"text-anchor\":n});i.empty()||i.selectAll(\"tspan.line\").attr({x:i.attr(\"x\"),y:i.attr(\"y\")})}else{var l=A.bBox(a.node()).width*{end:-.5,start:.5}[n];a.attr(\"transform\",o+(l?\"translate(\"+l+\",0)\":\"\"))}})}function o(){return L.length&&Promise.all(L)}function l(){if(a(u,c.tickangle),\"x\"===v&&!x(c.tickangle)&&(\"log\"!==c.type||\"D\"!==String(c.dtick).charAt(0))){var t=[];for(u.each(function(e){var n=y.select(this),r=n.select(\".text-math-group\"),a=c.l2p(e.x);r.empty()&&(r=n.select(\"text\"));var o=A.bBox(r.node());t.push({top:0,bottom:10,height:10,left:a-o.width/2,right:a+o.width/2+2,width:o.width+2})}),g=0;g<t.length-1;g++)if(_.bBoxIntersect(t[g],t[g+1])){M=30;break}if(M){var n=Math.abs((z[z.length-1].x-z[0].x)*c._m)/(z.length-1);2.5*k>n&&(M=90),a(u,M)}c._lastangle=M}return i(e),e+\" done\"}function s(){c._boundingBox=n.node().getBoundingClientRect()}var u=n.selectAll(\"g.\"+E).data(z,S);if(!c.showticklabels||!x(r))return u.remove(),void i(e);var f,d,p,m,b;\"x\"===v?(b=\"bottom\"===F?1:-1,f=function(t){return t.dx+D*b},m=r+(N+P)*b,d=function(t){return t.dy+m+t.fontSize*(\"bottom\"===F?1:-.5)},p=function(t){return x(t)&&0!==t&&180!==t?0>t*b?\"end\":\"start\":\"middle\"}):(b=\"right\"===F?1:-1,d=function(t){return t.dy+t.fontSize/2-D*b},f=function(t){return t.dx+r+(N+P+(90===Math.abs(c.tickangle)?t.fontSize/2:0))*b},p=function(t){return x(t)&&90===Math.abs(t)?\"middle\":\"right\"===F?\"start\":\"end\"});var k=0,M=0,L=[];u.enter().append(\"g\").classed(E,1).append(\"text\").attr(\"text-anchor\",\"middle\").each(function(e){var n=y.select(this),r=t._promises.length;n.call(A.setPosition,f(e),d(e)).call(A.font,e.font,e.fontSize,e.fontColor).text(e.text).call(w.convertToTspans),r=t._promises[r],r?L.push(t._promises.pop().then(function(){a(n,c.tickangle)})):a(n,c.tickangle)}),u.exit().remove(),u.each(function(t){k=Math.max(k,t.fontSize)}),a(u,c._lastangle||c.tickangle);var T=_.syncOrAsync([o,l,s]);return T&&T.then&&t._promises.push(T),T}function i(e){if(!n){var r,a,o,i,l=T.getFromId(t,e),s=y.select(t).selectAll(\"g.\"+e+\"tick\"),c={selection:s,side:l.side},f=e.charAt(0),d=t._fullLayout._size,h=1.5,p=l.titlefont.size;if(s.size()){var g=y.select(s.node().parentNode).attr(\"transform\").match(/translate\\(([-\\.\\d]+),([-\\.\\d]+)\\)/);g&&(c.offsetLeft=+g[1],c.offsetTop=+g[2])}\"x\"===f?(a=\"free\"===l.anchor?{_offset:d.t+(1-(l.position||0))*d.h,_length:0}:T.getFromId(t,l.anchor),o=l._offset+l._length/2,i=a._offset+(\"top\"===l.side?-10-p*(h+(l.showticklabels?1:0)):a._length+10+p*(h+(l.showticklabels?1.5:.5))),l.rangeslider&&l.rangeslider.visible&&l._boundingBox&&(i+=(u.height-u.margin.b-u.margin.t)*l.rangeslider.thickness+l._boundingBox.height),c.side||(c.side=\"bottom\")):(a=\"free\"===l.anchor?{_offset:d.l+(l.position||0)*d.w,_length:0}:T.getFromId(t,l.anchor),i=l._offset+l._length/2,o=a._offset+(\"right\"===l.side?a._length+10+p*(h+(l.showticklabels?1:.5)):-10-p*(h+(l.showticklabels?.5:0))),r={rotate:\"-90\",offset:0},c.side||(c.side=\"left\")),k.draw(t,e+\"title\",{propContainer:l,propName:l._name+\".title\",dfltName:f.toUpperCase()+\" axis\",avoid:c,transform:r,attributes:{x:o,y:i,\"text-anchor\":\"middle\"}})}}function l(t,e){return t.visible!==!0||t.xaxis+t.yaxis!==e?!1:b.Plots.traceIs(t,\"bar\")&&t.orientation==={x:\"h\",y:\"v\"}[v]?!0:t.fill&&t.fill.charAt(t.fill.length-1)===v}function s(e,n,a){var o=e.gridlayer,i=e.zerolinelayer,s=e[\"hidegrid\"+v]?[]:H,u=c._gridpath||\"M0,0\"+(\"x\"===v?\"v\":\"h\")+n._length,f=o.selectAll(\"path.\"+C).data(c.showgrid===!1?[]:s,S);if(f.enter().append(\"path\").classed(C,1).classed(\"crisp\",1).attr(\"d\",u).each(function(t){c.zeroline&&(\"linear\"===c.type||\"-\"===c.type)&&Math.abs(t.x)<c.dtick/100&&y.select(this).remove()}),f.attr(\"transform\",h).call(M.stroke,c.gridcolor||\"#ddd\").style(\"stroke-width\",I+\"px\"),f.exit().remove(),i){for(var d=!1,p=0;p<t._fullData.length;p++)if(l(t._fullData[p],a)){d=!0;break}var g=c.range[0]*c.range[1]<=0&&c.zeroline&&(\"linear\"===c.type||\"-\"===c.type)&&s.length&&(d||r({x:0})||!c.showline),m=i.selectAll(\"path.\"+O).data(g?[{x:0}]:[]);m.enter().append(\"path\").classed(O,1).classed(\"zl\",1).classed(\"crisp\",1).attr(\"d\",u),m.attr(\"transform\",h).call(M.stroke,c.zerolinecolor||M.defaultLine).style(\"stroke-width\",R+\"px\"),m.exit().remove()}}var c,u=t._fullLayout,f=!1;if(\"object\"==typeof e)c=e,e=c._id,f=!0;else if(c=L.getFromId(t,e),\"redraw\"===e&&u._paper.selectAll(\"g.subplot\").each(function(t){var e=u._plots[t],n=e.x(),r=e.y();e.xaxislayer.selectAll(\".\"+n._id+\"tick\").remove(),e.yaxislayer.selectAll(\".\"+r._id+\"tick\").remove(),e.gridlayer.selectAll(\"path\").remove(),e.zerolinelayer.selectAll(\"path\").remove()}),!e||\"redraw\"===e)return _.syncOrAsync(L.list(t,\"\",!0).map(function(n){return function(){if(n._id){var r=L.doTicks(t,n._id);return\"redraw\"===e&&(n._r=n.range.slice()),r}}}));c.tickformat||(-1===[\"none\",\"e\",\"E\",\"power\",\"SI\",\"B\"].indexOf(c.exponentformat)&&(c.exponentformat=\"e\"),-1===[\"all\",\"first\",\"last\",\"none\"].indexOf(c.showexponent)&&(c.showexponent=\"all\")),c.range=[+c.range[0],+c.range[1]],c.setScale();var d,h,p,g,v=e.charAt(0),m=L.counterLetter(e),z=L.calcTicks(c),S=function(t){return t.text+t.x+c.mirror},E=e+\"tick\",C=e+\"grid\",O=e+\"zl\",P=(c.linewidth||1)/2,N=(\"outside\"===c.ticks?c.ticklen:1)+(c.linewidth||0),D=0,I=A.crispRound(t,c.gridwidth,1),R=A.crispRound(t,c.zerolinewidth,I),j=A.crispRound(t,c.tickwidth,1);if(c._counterangle&&\"outside\"===c.ticks){var q=c._counterangle*Math.PI/180;N=c.ticklen*Math.cos(q)+(c.linewidth||0),D=c.ticklen*Math.sin(q)}if(\"x\"===v)d=[\"bottom\",\"top\"],h=function(t){return\"translate(\"+c.l2p(t.x)+\",0)\"},p=function(t,e){if(c._counterangle){var n=c._counterangle*Math.PI/180;return\"M0,\"+t+\"l\"+Math.sin(n)*e+\",\"+Math.cos(n)*e}return\"M0,\"+t+\"v\"+e};else{if(\"y\"!==v)return void _.warn(\"Unrecognized doTicks axis:\",e);d=[\"left\",\"right\"],h=function(t){return\"translate(0,\"+c.l2p(t.x)+\")\"},p=function(t,e){if(c._counterangle){var n=c._counterangle*Math.PI/180;return\"M\"+t+\",0l\"+Math.cos(n)*e+\",\"+-Math.sin(n)*e}return\"M\"+t+\",0h\"+e}}var F=c.side||d[0],B=[-1,1,F===d[1]?1:-1];\"inside\"!==c.ticks==(\"x\"===v)&&(B=B.map(function(t){return-t}));var H=z.filter(r);if(f){if(a(c._axislayer,p(c._pos+P*B[2],B[2]*c.ticklen)),c._counteraxis){var V={gridlayer:c._gridlayer,zerolinelayer:c._zerolinelayer};s(V,c._counteraxis)}return o(c._axislayer,c._pos)}var Z=L.getSubplots(t,c).map(function(t){var e=u._plots[t];if(u._has(\"cartesian\")){var n=e[v+\"axislayer\"],r=c._linepositions[t]||[],i=e[m](),l=i._id===c.anchor,f=[!1,!1,!1],h=\"\";if(\"allticks\"===c.mirror?f=[!0,!0,!1]:l&&(\"ticks\"===c.mirror?f=[!0,!0,!1]:f[d.indexOf(F)]=!0),c.mirrors)for(g=0;2>g;g++){var y=c.mirrors[i._id+d[g]];\"ticks\"!==y&&\"labels\"!==y||(f[g]=!0)}return void 0!==r[2]&&(f[2]=!0),f.forEach(function(t,e){var n=r[e],a=B[e];t&&x(n)&&(h+=p(n+P*a,a*c.ticklen))}),a(n,h),s(e,i,t),o(n,r[3])}}).filter(function(t){return t&&t.then});return Z.length?Promise.all(Z):0},L.swap=function(t,e){for(var n=h(t,e),r=0;r<n.length;r++)g(t,n[r].x,n[r].y)}},{\"../../components/color\":18,\"../../components/drawing\":41,\"../../components/titles\":81,\"../../lib\":89,\"../../lib/svg_text_utils\":100,\"../../plotly\":107,\"./axis_ids\":112,\"./layout_attributes\":119,\"./layout_defaults\":120,\"./set_convert\":124,d3:9,\"fast-isnumeric\":11}],111:[function(t,e,n){\"use strict\";function r(t,e){if(\"-\"===t.type){var n=t._id,r=n.charAt(0);-1!==n.indexOf(\"scene\")&&(n=r);var s=l(e,n,r);if(s){if(\"histogram\"===s.type&&r==={v:\"y\",h:\"x\"}[s.orientation||\"v\"])return void(t.type=\"linear\");if(o(s,r)){for(var c,u=a(s),f=[],d=0;d<e.length;d++)c=e[d],p.traceIs(c,\"box\")&&(c[r+\"axis\"]||r)===n&&(void 0!==c[u]?f.push(c[u][0]):void 0!==c.name?f.push(c.name):f.push(\"text\"));t.type=i(f)}else t.type=i(s[r]||[s[r+\"0\"]])}}}function a(t){return{v:\"x\",h:\"y\"}[t.orientation||\"v\"]}function o(t,e){var n=a(t);return p.traceIs(t,\"box\")&&e===n&&void 0===t[n]&&void 0===t[n+\"0\"]}function i(t){return c(t)?\"date\":u(t)?\"category\":s(t)?\"linear\":\"-\"}function l(t,e,n){for(var r=0;r<t.length;r++){var a=t[r];if((a[n+\"axis\"]||n)===e){if(o(a,n))return a;if((a[n]||[]).length||a[n+\"0\"])return a}}}function s(t){if(!t)return!1;for(var e=0;e<t.length;e++)if(f(t[e]))return!0;return!1}function c(t){for(var e,n=0,r=0,a=Math.max(1,(t.length-1)/1e3),o=0;o<t.length;o+=a)e=t[Math.round(o)],h.isDateTime(e)&&(n+=1),f(e)&&(r+=1);return n>2*r}function u(t){for(var e,n=Math.max(1,(t.length-1)/1e3),r=0,a=0,o=0;o<t.length;o+=n)e=k(t[Math.round(o)]),f(e)?r++:\"string\"==typeof e&&\"\"!==e&&\"None\"!==e&&a++;return a>2*r}var f=t(\"fast-isnumeric\"),d=t(\"tinycolor2\").mix,h=t(\"../../lib\"),p=t(\"../plots\"),g=t(\"../../components/color/attributes\").lightFraction,v=t(\"./layout_attributes\"),m=t(\"./tick_value_defaults\"),y=t(\"./tick_mark_defaults\"),x=t(\"./tick_label_defaults\"),b=t(\"./category_order_defaults\"),_=t(\"./set_convert\"),w=t(\"./ordered_categories\"),k=t(\"./clean_datum\"),M=t(\"./axis_ids\");e.exports=function(t,e,n,a){function o(n,r){return h.coerce2(t,e,v,n,r)}var i=a.letter,l=a.font||{},s=\"Click to enter \"+(a.title||i.toUpperCase()+\" axis\")+\" title\";a.name&&(e._name=a.name,e._id=M.name2id(a.name));var c=n(\"type\");\"-\"===c&&(r(e,a.data),\"-\"===e.type?e.type=\"linear\":c=t.type=e.type),_(e);var u=n(\"color\"),p=u===t.color?u:l.color;n(\"title\",s),h.coerceFont(n,\"titlefont\",{family:l.family,size:Math.round(1.2*l.size),color:p});var k=2===(t.range||[]).length&&"
+,
+"f(t.range[0])&&f(t.range[1]),A=n(\"autorange\",!k);A&&n(\"rangemode\");var L=n(\"range\",[-1,\"x\"===i?6:4]);L[0]===L[1]&&(e.range=[L[0]-1,L[0]+1]),h.noneOrAll(t.range,e.range,[0,1]),n(\"fixedrange\"),m(t,e,n,c),x(t,e,n,c,a),y(t,e,n,a),b(t,e,n);var T=o(\"linecolor\",u),z=o(\"linewidth\"),S=n(\"showline\",!!T||!!z);S||(delete e.linecolor,delete e.linewidth),(S||e.ticks)&&n(\"mirror\");var E=o(\"gridcolor\",d(u,a.bgColor,g).toRgbString()),C=o(\"gridwidth\"),O=n(\"showgrid\",a.showGrid||!!E||!!C);O||(delete e.gridcolor,delete e.gridwidth);var P=o(\"zerolinecolor\",u),N=o(\"zerolinewidth\"),D=n(\"zeroline\",a.showGrid||!!P||!!N);return D||(delete e.zerolinecolor,delete e.zerolinewidth),e._initialCategories=\"category\"===c?w(i,e.categoryorder,e.categoryarray,a.data):[],e}},{\"../../components/color/attributes\":17,\"../../lib\":89,\"../plots\":130,\"./axis_ids\":112,\"./category_order_defaults\":113,\"./clean_datum\":114,\"./layout_attributes\":119,\"./ordered_categories\":121,\"./set_convert\":124,\"./tick_label_defaults\":125,\"./tick_mark_defaults\":126,\"./tick_value_defaults\":127,\"fast-isnumeric\":11,tinycolor2:13}],112:[function(t,e,n){\"use strict\";function r(t,e,n){function r(t,n){for(var r=Object.keys(t),a=/^[xyz]axis[0-9]*/,o=[],i=0;i<r.length;i++){var l=r[i];e&&l.charAt(0)!==e||a.test(l)&&o.push(n+l)}return o.sort()}var o=t._fullLayout;if(!o)return[];var i=r(o,\"\");if(n)return i;for(var l=a.getSubplotIds(o,\"gl3d\")||[],s=0;s<l.length;s++){var c=l[s];i=i.concat(r(o[c],c+\".\"))}return i}var a=t(\"../plots\"),o=t(\"../../lib\"),i=t(\"./constants\");n.id2name=function(t){if(\"string\"==typeof t&&t.match(i.AX_ID_PATTERN)){var e=t.substr(1);return\"1\"===e&&(e=\"\"),t.charAt(0)+\"axis\"+e}},n.name2id=function(t){if(t.match(i.AX_NAME_PATTERN)){var e=t.substr(5);return\"1\"===e&&(e=\"\"),t.charAt(0)+e}},n.cleanId=function(t,e){if(t.match(i.AX_ID_PATTERN)&&(!e||t.charAt(0)===e)){var n=t.substr(1).replace(/^0+/,\"\");return\"1\"===n&&(n=\"\"),t.charAt(0)+n}},n.list=function(t,e,n){return r(t,e,n).map(function(e){return o.nestedProperty(t._fullLayout,e).get();\n"
+,
+"})},n.listIds=function(t,e){return r(t,e,!0).map(n.name2id)},n.getFromId=function(t,e,r){var a=t._fullLayout;return\"x\"===r?e=e.replace(/y[0-9]*/,\"\"):\"y\"===r&&(e=e.replace(/x[0-9]*/,\"\")),a[n.id2name(e)]},n.getFromTrace=function(t,e,r){var o=t._fullLayout,i=null;if(a.traceIs(e,\"gl3d\")){var l=e.scene;\"scene\"===l.substr(0,5)&&(i=o[l][r+\"axis\"])}else i=n.getFromId(t,e[r+\"axis\"]||r);return i}},{\"../../lib\":89,\"../plots\":130,\"./constants\":115}],113:[function(t,e,n){\"use strict\";e.exports=function(t,e,n){if(\"category\"===e.type){var r,a=t.categoryarray,o=Array.isArray(a)&&a.length>0;o&&(r=\"array\");var i=n(\"categoryorder\",r);\"array\"===i&&n(\"categoryarray\"),o||\"array\"!==i||(e.categoryorder=\"trace\")}}},{}],114:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../lib\");e.exports=function(t){try{if(\"object\"==typeof t&&null!==t&&t.getTime)return a.ms2DateTime(t);if(\"string\"!=typeof t&&!r(t))return\"\";t=t.toString().replace(/['\"%,$# ]/g,\"\")}catch(e){a.error(e,t)}return t}},{\"../../lib\":89,\"fast-isnumeric\":11}],115:[function(t,e,n){\"use strict\";e.exports={idRegex:{x:/^x([2-9]|[1-9][0-9]+)?$/,y:/^y([2-9]|[1-9][0-9]+)?$/},attrRegex:{x:/^xaxis([2-9]|[1-9][0-9]+)?$/,y:/^yaxis([2-9]|[1-9][0-9]+)?$/},BADNUM:void 0,xAxisMatch:/^xaxis[0-9]*$/,yAxisMatch:/^yaxis[0-9]*$/,AX_ID_PATTERN:/^[xyz][0-9]*$/,AX_NAME_PATTERN:/^[xyz]axis[0-9]*$/,DBLCLICKDELAY:300,MINDRAG:8,MINSELECT:12,MINZOOM:20,DRAGGERSIZE:20,MAXDIST:20,YANGLE:60,HOVERARROWSIZE:6,HOVERTEXTPAD:3,HOVERFONTSIZE:13,HOVERFONT:\"Arial, sans-serif\",HOVERMINTIME:50,BENDPX:1.5,REDRAWDELAY:50}},{}],116:[function(t,e,n){\"use strict\";function r(t,e){var n,r=t.range[e],a=Math.abs(r-t.range[1-e]);return\"date\"===t.type?u.ms2DateTime(r,a):\"log\"===t.type?(n=Math.ceil(Math.max(0,-Math.log(a)/Math.LN10))+3,l.format(\".\"+n+\"g\")(Math.pow(10,r))):(n=Math.floor(Math.log(Math.abs(r))/Math.LN10)-Math.floor(Math.log(a)/Math.LN10)+4,l.format(\".\"+String(n)+\"g\")(r))}function a(t,e){return t?\"nsew\"===t?\"pan\"===e?\"move\":\"crosshair\":t.toLowerCase()+\"-resize\":\"pointer\"}function o(t){l.select(t).selectAll(\".zoombox,.js-zoombox-backdrop,.js-zoombox-menu,.zoombox-corners\").remove()}function i(t){var e=[\"lasso\",\"select\"];return-1!==e.indexOf(t)}var l=t(\"d3\"),s=t(\"tinycolor2\"),c=t(\"../../plotly\"),u=t(\"../../lib\"),f=t(\"../../lib/svg_text_utils\"),d=t(\"../../components/color\"),h=t(\"../../components/drawing\"),p=t(\"../../lib/setcursor\"),g=t(\"../../components/dragelement\"),v=t(\"./axes\"),m=t(\"./select\"),y=t(\"./constants\"),x=!0;e.exports=function(t,e,n,l,b,_,w,k){function M(t,e){for(var n=0;n<t.length;n++)if(!t[n].fixedrange)return e;return\"\"}function A(t){t[0]=Number(t[0]),t[1]=Number(t[1])}function L(e,n,r){var a=nt.getBoundingClientRect();at=n-a.left,ot=r-a.top,it={l:at,r:at,w:0,t:ot,b:ot,h:0},lt=t._hmpixcount?t._hmlumcount/t._hmpixcount:s(t._fullLayout.plot_bgcolor).getLuminance(),st=\"M0,0H\"+H+\"V\"+V+\"H0V0\",ct=!1,ut=\"xy\",ft=ht.append(\"path\").attr(\"class\",\"zoombox\").style({fill:lt>.2?\"rgba(0,0,0,0)\":\"rgba(255,255,255,0)\",\"stroke-width\":0}).attr(\"transform\",\"translate(\"+pt+\", \"+gt+\")\").attr(\"d\",st+\"Z\"),dt=ht.append(\"path\").attr(\"class\",\"zoombox-corners\").style({fill:d.background,stroke:d.defaultLine,\"stroke-width\":1,opacity:0}).attr(\"transform\",\"translate(\"+pt+\", \"+gt+\")\").attr(\"d\",\"M0,0Z\"),T();for(var o=0;o<Q.length;o++)A(Q[o].range)}function T(){ht.selectAll(\".select-outline\").remove()}function z(t,e){var n=Math.max(0,Math.min(H,t+at)),r=Math.max(0,Math.min(V,e+ot)),a=Math.abs(n-at),o=Math.abs(r-ot),i=Math.floor(Math.min(o,a,Y)/2);it.l=Math.min(at,n),it.r=Math.max(at,n),it.t=Math.min(ot,r),it.b=Math.max(ot,r),!J||o<Math.min(Math.max(.6*a,Z),Y)?Z>a?(ut=\"\",it.r=it.l,it.t=it.b,dt.attr(\"d\",\"M0,0Z\")):(it.t=0,it.b=V,ut=\"x\",dt.attr(\"d\",\"M\"+(it.l-.5)+\",\"+(ot-Y-.5)+\"h-3v\"+(2*Y+1)+\"h3ZM\"+(it.r+.5)+\",\"+(ot-Y-.5)+\"h3v\"+(2*Y+1)+\"h-3Z\")):!W||a<Math.min(.6*o,Y)?(it.l=0,it.r=H,ut=\"y\",dt.attr(\"d\",\"M\"+(at-Y-.5)+\",\"+(it.t-.5)+\"v-3h\"+(2*Y+1)+\"v3ZM\"+(at-Y-.5)+\",\"+(it.b+.5)+\"v3h\"+(2*Y+1)+\"v-3Z\")):(ut=\"xy\",dt.attr(\"d\",\"M\"+(it.l-3.5)+\",\"+(it.t-.5+i)+\"h3v\"+-i+\"h\"+i+\"v-3h-\"+(i+3)+\"ZM\"+(it.r+3.5)+\",\"+(it.t-.5+i)+\"h-3v\"+-i+\"h\"+-i+\"v-3h\"+(i+3)+\"ZM\"+(it.r+3.5)+\",\"+(it.b+.5-i)+\"h-3v\"+i+\"h\"+-i+\"v3h\"+(i+3)+\"ZM\"+(it.l-3.5)+\",\"+(it.b+.5-i)+\"h3v\"+i+\"h\"+i+\"v3h-\"+(i+3)+\"Z\")),it.w=it.r-it.l,it.h=it.b-it.t,ft.attr(\"d\",st+\"M\"+it.l+\",\"+it.t+\"v\"+it.h+\"h\"+it.w+\"v-\"+it.h+\"h-\"+it.w+\"Z\"),ct||(ft.transition().style(\"fill\",lt>.2?\"rgba(0,0,0,0.4)\":\"rgba(255,255,255,0.3)\").duration(200),dt.transition().style(\"opacity\",1).duration(200),ct=!0)}function S(t,e,n){var r,a,o;for(r=0;r<t.length;r++)a=t[r],a.fixedrange||(o=a.range,a.range=[o[0]+(o[1]-o[0])*e,o[0]+(o[1]-o[0])*n])}function E(e,n){return Math.min(it.h,it.w)<2*Z?(2===n&&D(),o(t)):(\"xy\"!==ut&&\"x\"!==ut||S(F,it.l/H,it.r/H),\"xy\"!==ut&&\"y\"!==ut||S(B,(V-it.b)/V,(V-it.t)/V),o(t),I(ut),void(x&&t.data&&t._context.showTips&&(u.notifier(\"Double-click to<br>zoom back out\",\"long\"),x=!1)))}function C(e,n){var a=1===(w+k).length;if(e)I();else if(2!==n||a){if(1===n&&a){var o=w?B[0]:F[0],i=\"s\"===w||\"w\"===k?0:1,l=o._name+\".range[\"+i+\"]\",s=r(o,i),u=\"left\",d=\"middle\";if(o.fixedrange)return;w?(d=\"n\"===w?\"top\":\"bottom\",\"right\"===o.side&&(u=\"right\")):\"e\"===k&&(u=\"right\"),et.call(f.makeEditable,null,{immediate:!0,background:j.paper_bgcolor,text:String(s),fill:o.tickfont?o.tickfont.color:\"#444\",horizontalAlign:u,verticalAlign:d}).on(\"edit\",function(e){var n=\"category\"===o.type?o.c2l(e):o.d2l(e);void 0!==n&&c.relayout(t,l,n)})}}else D()}function O(e){function n(t,e,n){if(!t.fixedrange){A(t.range);var r=t.range,a=r[0]+(r[1]-r[0])*e;t.range=[a+(r[0]-a)*n,a+(r[1]-a)*n]}}if(t._context.scrollZoom||j._enablescrollzoom){var r=t.querySelector(\".plotly\");if(!(r.scrollHeight-r.clientHeight>10||r.scrollWidth-r.clientWidth>10)){clearTimeout(mt);var a=-e.deltaY;if(isFinite(a)||(a=e.wheelDelta/10),!isFinite(a))return void u.log(\"Did not find wheel motion attributes: \",e);var o,i=Math.exp(-Math.min(Math.max(a,-20),20)/100),l=xt.draglayer.select(\".nsewdrag\").node().getBoundingClientRect(),s=(e.clientX-l.left)/l.width,c=vt[0]+vt[2]*s,f=(l.bottom-e.clientY)/l.height,d=vt[1]+vt[3]*(1-f);if(k){for(o=0;o<F.length;o++)n(F[o],s,i);vt[2]*=i,vt[0]=c-vt[2]*s}if(w){for(o=0;o<B.length;o++)n(B[o],f,i);vt[3]*=i,vt[1]=d-vt[3]*(1-f)}return R(vt),N(w,k),mt=setTimeout(function(){vt=[0,0,H,V],I()},yt),u.pauseEvent(e)}}}function P(t,e){function n(t,e){for(var n=0;n<t.length;n++){var r=t[n];r.fixedrange||(r.range=[r._r[0]-e/r._m,r._r[1]-e/r._m])}}function r(t){return 1-(t>=0?Math.min(t,.9):1/(1/Math.max(t,-.3)+3.222))}function a(t,e,n){for(var a=1-e,o=0,i=0;i<t.length;i++){var l=t[i];l.fixedrange||(o=i,l.range[e]=l._r[a]+(l._r[e]-l._r[a])/r(n/l._length))}return t[o]._length*(t[o]._r[e]-t[o].range[e])/(t[o]._r[e]-t[o]._r[a])}return\"ew\"===W||\"ns\"===J?(W&&n(F,t),J&&n(B,e),R([W?-t:0,J?-e:0,H,V]),void N(J,W)):(\"w\"===W?t=a(F,0,t):\"e\"===W?t=a(F,1,-t):W||(t=0),\"n\"===J?e=a(B,1,e):\"s\"===J?e=a(B,0,-e):J||(e=0),R([\"w\"===W?t:0,\"n\"===J?e:0,H-t,V-e]),void N(J,W))}function N(e,n){function r(t){for(o=0;o<t.length;o++)t[o].fixedrange||i.push(t[o]._id)}function a(r,a){var l;for(o=0;o<r.length;o++)l=r[o],(n&&-1!==i.indexOf(l.xref)||e&&-1!==i.indexOf(l.yref))&&a.draw(t,o)}var o,i=[];for(n&&r(F),e&&r(B),o=0;o<i.length;o++)v.doTicks(t,i[o],!0);a(j.annotations||[],c.Annotations),a(j.shapes||[],c.Shapes),a(j.images||[],c.Images)}function D(){var e,n,r,a=t._context.doubleClick,o=(W?F:[]).concat(J?B:[]),i={};if(\"autosize\"===a)for(n=0;n<o.length;n++)e=o[n],e.fixedrange||(i[e._name+\".autorange\"]=!0);else if(\"reset\"===a)for(n=0;n<o.length;n++)e=o[n],e._rangeInitial?(r=e._rangeInitial.slice(),i[e._name+\".range[0]\"]=r[0],i[e._name+\".range[1]\"]=r[1]):i[e._name+\".autorange\"]=!0;else if(\"reset+autosize\"===a)for(n=0;n<o.length;n++)e=o[n],e.fixedrange||(void 0===e._rangeInitial||e.range[0]===e._rangeInitial[0]&&e.range[1]===e._rangeInitial[1]?i[e._name+\".autorange\"]=!0:(r=e._rangeInitial.slice(),i[e._name+\".range[0]\"]=r[0],i[e._name+\".range[1]\"]=r[1]));t.emit(\"plotly_doubleclick\",null),c.relayout(t,i)}function I(e){for(var n={},r=0;r<Q.length;r++){var a=Q[r];e&&-1===e.indexOf(a._id.charAt(0))||(a._r[0]!==a.range[0]&&(n[a._name+\".range[0]\"]=a.range[0]),a._r[1]!==a.range[1]&&(n[a._name+\".range[1]\"]=a.range[1]),a.range=a._r.slice())}R([0,0,H,V]),c.relayout(t,n)}function R(t){for(var e=j._plots,n=Object.keys(e),r=0;r<n.length;r++){var a=e[n[r]],o=a.x(),i=a.y(),l=k&&-1!==F.indexOf(o)&&!o.fixedrange,s=w&&-1!==B.indexOf(i)&&!i.fixedrange,c=l?o._length/t[2]:1,f=s?i._length/t[3]:1,d=l?t[0]:0,h=s?t[1]:0,p=l?t[0]/t[2]*o._length:0,g=s?t[1]/t[3]*i._length:0,v=o._offset-p,m=i._offset-g;j._defs.selectAll(\"#\"+a.clipId).call(u.setTranslate,d,h).call(u.setScale,1/c,1/f),a.plot.call(u.setTranslate,v,m).call(u.setScale,c,f).selectAll(\".points\").selectAll(\".point\").call(u.setPointGroupScale,1/c,1/f)}}for(var j=t._fullLayout,q=[e].concat(w&&k?e.overlays:[]),F=[e.x()],B=[e.y()],H=F[0]._length,V=B[0]._length,Z=y.MINDRAG,Y=y.MINZOOM,U=w+k===\"nsew\",X=1;X<q.length;X++){var G=q[X].x(),$=q[X].y();-1===F.indexOf(G)&&F.push(G),-1===B.indexOf($)&&B.push($)}var Q=F.concat(B),W=M(F,k),J=M(B,w),K=a(J+W,j.dragmode),tt=w+k+\"drag\",et=e.draglayer.selectAll(\".\"+tt).data([0]);et.enter().append(\"rect\").classed(\"drag\",!0).classed(tt,!0).style({fill:\"transparent\",\"stroke-width\":0}).attr(\"data-subplot\",e.id),et.call(h.setRect,n,l,b,_).call(p,K);var nt=et.node();if(!J&&!W&&!i(j.dragmode))return nt.onmousedown=null,nt.style.pointerEvents=U?\"all\":\"none\",nt;var rt={element:nt,gd:t,plotinfo:e,xaxes:F,yaxes:B,doubleclick:D,prepFn:function(e,n,r){var a=t._fullLayout.dragmode;U?e.shiftKey&&(a=\"pan\"===a?\"zoom\":\"pan\"):a=\"pan\",\"lasso\"===a?rt.minDrag=1:rt.minDrag=void 0,\"zoom\"===a?(rt.moveFn=z,rt.doneFn=E,L(e,n,r)):\"pan\"===a?(rt.moveFn=P,rt.doneFn=C,T()):i(a)&&m(e,n,r,rt,a)}};g.init(rt);var at,ot,it,lt,st,ct,ut,ft,dt,ht=t._fullLayout._zoomlayer,pt=e.x()._offset,gt=e.y()._offset,vt=[0,0,H,V],mt=null,yt=y.REDRAWDELAY,xt=e.mainplot?j._plots[e.mainplot]:e;return w.length*k.length!==1&&(void 0!==nt.onwheel?nt.onwheel=O:void 0!==nt.onmousewheel&&(nt.onmousewheel="
+,
+"O)),nt}},{\"../../components/color\":18,\"../../components/dragelement\":39,\"../../components/drawing\":41,\"../../lib\":89,\"../../lib/setcursor\":98,\"../../lib/svg_text_utils\":100,\"../../plotly\":107,\"./axes\":110,\"./constants\":115,\"./select\":123,d3:9,tinycolor2:13}],117:[function(t,e,n){\"use strict\";function r(t,e){for(var n=[],r=t.length;r>0;r--)n.push(e);return n}function a(t,e){for(var n=[],r=0;r<t.length;r++)n.push(t[r].p2c(e));return n}function o(t,e){return function(n){var r=t(n),a=e(n);return Math.sqrt(r*r+a*a)}}function i(t,e,n){if(\"pie\"===n)return void t.emit(\"plotly_hover\",{points:[e]});n||(n=\"xy\");var o=Array.isArray(n)?n:[n],i=t._fullLayout,h=i._plots||[],p=h[n];if(p){var y=p.overlays.map(function(t){return t.id});o=o.concat(y)}for(var b=o.length,M=new Array(b),A=new Array(b),L=0;b>L;L++){var T=o[L],z=h[T];if(z)M[L]=w.getFromId(t,z.xaxis._id),A[L]=w.getFromId(t,z.yaxis._id);else{var S=i[T]._subplot;M[L]=S.xaxis,A[L]=S.yaxis}}var E=e.hovermode||i.hovermode;if(-1===[\"x\",\"y\",\"closest\"].indexOf(E)||!t.calcdata||t.querySelector(\".zoombox\")||t._dragging)return _.unhoverRaw(t,e);var C,O,P,N,D,I,R,j,q,F,B,H,V=[],Z=[];if(Array.isArray(e))for(E=\"array\",P=0;P<e.length;P++)D=t.calcdata[e[P].curveNumber||0],Z.push(D);else{for(N=0;N<t.calcdata.length;N++)D=t.calcdata[N],I=D[0].trace,-1!==o.indexOf(l(I))&&Z.push(D);var Y,U;if(e.target&&\"clientX\"in e&&\"clientY\"in e){if(m.triggerHandler(t,\"plotly_beforehover\",e)===!1)return;var X=e.target.getBoundingClientRect();if(Y=e.clientX-X.left,U=e.clientY-X.top,0>Y||Y>X.width||0>U||U>X.height)return _.unhoverRaw(t,e)}else Y=\"xpx\"in e?e.xpx:M[0]._length/2,U=\"ypx\"in e?e.ypx:A[0]._length/2;if(C=\"xval\"in e?r(o,e.xval):a(M,Y),O=\"yval\"in e?r(o,e.yval):a(A,U),!g(C[0])||!g(O[0]))return v.warn(\"Plotly.Fx.hover failed\",e,t),_.unhoverRaw(t,e)}var G=1/0;for(N=0;N<Z.length;N++)if(D=Z[N],D&&D[0]&&D[0].trace&&D[0].trace.visible===!0){if(I=D[0].trace,R=o.indexOf(l(I)),j=E,B={cd:D,trace:I,xa:M[R],ya:A[R],name:t.data.length>1||-1!==I.hoverinfo.indexOf(\"name\")?I.name:void 0,index:!1,distance:Math.min(G,k.MAXDIST),color:x.defaultLine,x0:void 0,x1:void 0,y0:void 0,y1:void 0,xLabelVal:void 0,yLabelVal:void 0,zLabelVal:void 0,text:void 0},H=V.length,\"array\"===j){var $=e[N];\"pointNumber\"in $?(B.index=$.pointNumber,j=\"closest\"):(j=\"\",\"xval\"in $&&(q=$.xval,j=\"x\"),\"yval\"in $&&(F=$.yval,j=j?\"closest\":\"y\"))}else q=C[R],F=O[R];if(I._module&&I._module.hoverPoints){var Q=I._module.hoverPoints(B,q,F,j);if(Q)for(var W,J=0;J<Q.length;J++)W=Q[J],g(W.x0)&&g(W.y0)&&V.push(s(W,E))}else v.log(\"Unrecognized trace type in hover:\",I);\"closest\"===E&&V.length>H&&(V.splice(0,H),G=V[0].distance)}if(0===V.length)return _.unhoverRaw(t,e);var K=\"y\"===E&&Z.length>1;V.sort(function(t,e){return t.distance-e.distance});var tt=x.combine(i.plot_bgcolor||x.background,i.paper_bgcolor),et={hovermode:E,rotateLabels:K,bgColor:tt,container:i._hoverlayer,outerContainer:i._paperdiv},nt=c(V,et);u(V,K?\"xa\":\"ya\"),f(nt,K);var rt=t._hoverdata,at=[];for(P=0;P<V.length;P++){var ot=V[P],it={data:ot.trace._input,fullData:ot.trace,curveNumber:ot.trace.index,pointNumber:ot.index,x:ot.xVal,y:ot.yVal,xaxis:ot.xa,yaxis:ot.ya};void 0!==ot.zLabelVal&&(it.z=ot.zLabelVal),at.push(it)}t._hoverdata=at,d(t,e,rt)&&(rt&&t.emit(\"plotly_unhover\",{points:rt}),t.emit(\"plotly_hover\",{points:t._hoverdata,xaxes:M,yaxes:A,xvals:C,yvals:O}))}function l(t){return t.subplot||t.xaxis+t.yaxis}function s(t,e){t.posref=\"y\"===e?(t.x0+t.x1)/2:(t.y0+t.y1)/2,t.x0=v.constrain(t.x0,0,t.xa._length),t.x1=v.constrain(t.x1,0,t.xa._length),t.y0=v.constrain(t.y0,0,t.ya._length),t.y1=v.constrain(t.y1,0,t.ya._length);var n;if(void 0!==t.xLabelVal){n=\"log\"===t.xa.type&&t.xLabelVal<=0;var r=w.tickText(t.xa,t.xa.c2l(n?-t.xLabelVal:t.xLabelVal),\"hover\");n?0===t.xLabelVal?t.xLabel=\"0\":t.xLabel=\"-\"+r.text:t.xLabel=r.text,t.xVal=t.xa.c2d(t.xLabelVal)}if(void 0!==t.yLabelVal){n=\"log\"===t.ya.type&&t.yLabelVal<=0;var a=w.tickText(t.ya,t.ya.c2l(n?-t.yLabelVal:t.yLabelVal),\"hover\");n?0===t.yLabelVal?t.yLabel=\"0\":t.yLabel=\"-\"+a.text:t.yLabel=a.text,t.yVal=t.ya.c2d(t.yLabelVal)}if(void 0!==t.zLabelVal&&(t.zLabel=String(t.zLabelVal)),!(isNaN(t.xerr)||\"log\"===t.xa.type&&t.xerr<=0)){var o=w.tickText(t.xa,t.xa.c2l(t.xerr),\"hover\").text;void 0!==t.xerrneg?t.xLabel+=\" +\"+o+\" / -\"+w.tickText(t.xa,t.xa.c2l(t.xerrneg),\"hover\").text:t.xLabel+=\" &plusmn; \"+o,\"x\"===e&&(t.distance+=1)}if(!(isNaN(t.yerr)||\"log\"===t.ya.type&&t.yerr<=0)){var i=w.tickText(t.ya,t.ya.c2l(t.yerr),\"hover\").text;void 0!==t.yerrneg?t.yLabel+=\" +\"+i+\" / -\"+w.tickText(t.ya,t.ya.c2l(t.yerrneg),\"hover\").text:t.yLabel+=\" &plusmn; \"+i,\"y\"===e&&(t.distance+=1)}var l=t.trace.hoverinfo;return\"all\"!==l&&(l=l.split(\"+\"),-1===l.indexOf(\"x\")&&(t.xLabel=void 0),-1===l.indexOf(\"y\")&&(t.yLabel=void 0),-1===l.indexOf(\"z\")&&(t.zLabel=void 0),-1===l.indexOf(\"text\")&&(t.text=void 0),-1===l.indexOf(\"name\")&&(t.name=void 0)),t}function c(t,e){var n,r,a=e.hovermode,o=e.rotateLabels,i=e.bgColor,l=e.container,s=e.outerContainer,c=t[0],u=c.xa,f=c.ya,d=\"y\"===a?\"yLabel\":\"xLabel\",g=c[d],v=(String(g)||\"\").split(\" \")[0],m=s.node().getBoundingClientRect(),_=m.top,w=m.width,M=m.height,A=c.distance<=k.MAXDIST&&(\"x\"===a||\"y\"===a);for(n=0;n<t.length;n++){r=t[n].trace.hoverinfo;var T=r.split(\"+\");if(-1===T.indexOf(\"all\")&&-1===T.indexOf(a)){A=!1;break}}var z=l.selectAll(\"g.axistext\").data(A?[0]:[]);z.enter().append(\"g\").classed(\"axistext\",!0),z.exit().remove(),z.each(function(){var e=h.select(this),n=e.selectAll(\"path\").data([0]),r=e.selectAll(\"text\").data([0]);n.enter().append(\"path\").style({fill:x.defaultLine,\"stroke-width\":\"1px\",stroke:x.background}),r.enter().append(\"text\").call(b.font,N,P,x.background).attr(\"data-notex\",1),r.text(g).call(y.convertToTspans).call(b.setPosition,0,0).selectAll(\"tspan.line\").call(b.setPosition,0,0),e.attr(\"transform\",\"\");var o=r.node().getBoundingClientRect();if(\"x\"===a){r.attr(\"text-anchor\",\"middle\").call(b.setPosition,0,\"top\"===u.side?_-o.bottom-C-O:_-o.top+C+O).selectAll(\"tspan.line\").attr({x:r.attr(\"x\"),y:r.attr(\"y\")});var i=\"top\"===u.side?\"-\":\"\";n.attr(\"d\",\"M0,0L\"+C+\",\"+i+C+\"H\"+(O+o.width/2)+\"v\"+i+(2*O+o.height)+\"H-\"+(O+o.width/2)+\"V\"+i+C+\"H-\"+C+\"Z\"),e.attr(\"transform\",\"translate(\"+(u._offset+(c.x0+c.x1)/2)+\",\"+(f._offset+(\"top\"===u.side?0:f._length))+\")\")}else{r.attr(\"text-anchor\",\"right\"===f.side?\"start\":\"end\").call(b.setPosition,(\"right\"===f.side?1:-1)*(O+C),_-o.top-o.height/2).selectAll(\"tspan.line\").attr({x:r.attr(\"x\"),y:r.attr(\"y\")});var l=\"right\"===f.side?\"\":\"-\";n.attr(\"d\",\"M0,0L\"+l+C+\",\"+C+\"V\"+(O+o.height/2)+\"h\"+l+(2*O+o.width)+\"V-\"+(O+o.height/2)+\"H\"+l+C+\"V-\"+C+\"Z\"),e.attr(\"transform\",\"translate(\"+(u._offset+(\"right\"===f.side?u._length:0))+\",\"+(f._offset+(c.y0+c.y1)/2)+\")\")}t=t.filter(function(t){return void 0!==t.zLabelVal||(t[d]||\"\").split(\" \")[0]===v})});var S=l.selectAll(\"g.hovertext\").data(t,function(t){return[t.trace.index,t.index,t.x0,t.y0,t.name,t.attr,t.xa,t.ya||\"\"].join(\",\")});return S.enter().append(\"g\").classed(\"hovertext\",!0).each(function(){var t=h.select(this);t.append(\"rect\").call(x.fill,x.addOpacity(i,.8)),t.append(\"text\").classed(\"name\",!0).call(b.font,N,P),t.append(\"path\").style(\"stroke-width\",\"1px\"),t.append(\"text\").classed(\"nums\",!0).call(b.font,N,P)}),S.exit().remove(),S.each(function(t){var e=h.select(this).attr(\"transform\",\"\"),n=\"\",r=\"\",l=x.opacity(t.color)?t.color:x.defaultLine,s=x.combine(l,i),c=p(s).getBrightness()>128?\"#000\":x.background;if(t.name&&void 0===t.zLabelVal){var u=document.createElement(\"p\");u.innerHTML=t.name,n=u.textContent||\"\",n.length>15&&(n=n.substr(0,12)+\"...\")}void 0!==t.extraText&&(r+=t.extraText),void 0!==t.zLabel?(void 0!==t.xLabel&&(r+=\"x: \"+t.xLabel+\"<br>\"),void 0!==t.yLabel&&(r+=\"y: \"+t.yLabel+\"<br>\"),r+=(r?\"z: \":\"\")+t.zLabel):A&&t[a+\"Label\"]===g?r=t[(\"x\"===a?\"y\":\"x\")+\"Label\"]||\"\":void 0===t.xLabel?void 0!==t.yLabel&&(r=t.yLabel):r=void 0===t.yLabel?t.xLabel:\"(\"+t.xLabel+\", \"+t.yLabel+\")\",t.text&&!Array.isArray(t.text)&&(r+=(r?\"<br>\":\"\")+t.text),\"\"===r&&(\"\"===n&&e.remove(),r=n);var f=e.select(\"text.nums\").style(\"fill\",c).call(b.setPosition,0,0).text(r).attr(\"data-notex\",1).call(y.convertToTspans);f.selectAll(\"tspan.line\").call(b.setPosition,0,0);var d=e.select(\"text.name\"),v=0;n&&n!==r?(d.style(\"fill\",s).text(n).call(b.setPosition,0,0).attr(\"data-notex\",1).call(y.convertToTspans),d.selectAll(\"tspan.line\").call(b.setPosition,0,0),v=d.node().getBoundingClientRect().width+2*O):(d.remove(),e.select(\"rect\").remove()),e.select(\"path\").style({fill:s,stroke:c});var m,k,T=f.node().getBoundingClientRect(),z=t.xa._offset+(t.x0+t.x1)/2,S=t.ya._offset+(t.y0+t.y1)/2,E=Math.abs(t.x1-t.x0),P=Math.abs(t.y1-t.y0),N=T.width+C+O+v;t.ty0=_-T.top,t.bx=T.width+2*O,t.by=T.height+2*O,t.anchor=\"start\",t.txwidth=T.width,t.tx2width=v,t.offset=0,o?(t.pos=z,m=M>=S+P/2+N,k=S-P/2-N>=0,\"top\"!==t.idealAlign&&m||!k?m?(S+=P/2,t.anchor=\"start\"):t.anchor=\"middle\":(S-=P/2,t.anchor=\"end\")):(t.pos=S,m=w>=z+E/2+N,k=z-E/2-N>=0,\"left\"!==t.idealAlign&&m||!k?m?(z+=E/2,t.anchor=\"start\"):t.anchor=\"middle\":(z-=E/2,t.anchor=\"end\")),f.attr(\"text-anchor\",t.anchor),v&&d.attr(\"text-anchor\",t.anchor),e.attr(\"transform\",\"translate(\"+z+\",\"+S+\")\"+(o?\"rotate(\"+L+\")\":\"\"))}),S}function u(t,e){function n(t){var e=t[0],n=t[t.length-1];if(a=e.pmin-e.pos-e.dp+e.size,o=n.pos+n.dp+n.size-e.pmax,a>.01){for(l=t.length-1;l>=0;l--)t[l].dp+=a;r=!1}if(!(.01>o)){if(-.01>a){for(l=t.length-1;l>=0;l--)t[l].dp-=o;r=!1}if(r){var c=0;for(i=0;i<t.length;i++)s=t[i],s.pos+s.dp+s.size>e.pmax&&c++;for(i=t.length-1;i>=0&&!(0>=c);i--)s=t[i],s.pos>e.pmax-1&&(s.del=!0,c--);for(i=0;i<t.length&&!(0>=c);i++)if(s=t[i],s.pos<e.pmin+1)for(s.del=!0,c--,o=2*s.size,l=t.length-1;l>=0;l--)t[l].dp-=o;for(i=t.length-1;i>=0&&!(0>=c);i--)s=t[i],s.pos+s.dp+s.size>e.pmax&&(s.del=!0,c--)}}}for(var r,a,o,i,l,s,c,u=0,f=t.map(function(t,n){var r=t[e];return[{i:n,dp:0,pos:t.pos,posref:t.posref,size:t.by*(\"x\"===r._id.charAt(0)?z:1)/2,pmin:r._offset,pmax:r._offset+r._length}]}).sort(function(t,e){return t[0].posref-e[0].posref});!r&&u<=t.length;){for(u++,r=!0,i=0;i<f.length-1;){var d=f[i],h=f[i+1"
+,
+"],p=d[d.length-1],g=h[0];if(a=p.pos+p.dp+p.size-g.pos-g.dp+g.size,a>.01&&p.pmin===g.pmin&&p.pmax===g.pmax){for(l=h.length-1;l>=0;l--)h[l].dp+=a;for(d.push.apply(d,h),f.splice(i+1,1),c=0,l=d.length-1;l>=0;l--)c+=d[l].dp;for(o=c/d.length,l=d.length-1;l>=0;l--)d[l].dp-=o;r=!1}else i++}f.forEach(n)}for(i=f.length-1;i>=0;i--){var v=f[i];for(l=v.length-1;l>=0;l--){var m=v[l],y=t[m.i];y.offset=m.dp,y.del=m.del}}}function f(t,e){t.each(function(t){var n=h.select(this);if(t.del)return void n.remove();var r=\"end\"===t.anchor?-1:1,a=n.select(\"text.nums\"),o={start:1,end:-1,middle:0}[t.anchor],i=o*(C+O),l=i+o*(t.txwidth+O),s=0,c=t.offset;\"middle\"===t.anchor&&(i-=t.tx2width/2,l-=t.tx2width/2),e&&(c*=-E,s=t.offset*S),n.select(\"path\").attr(\"d\",\"middle\"===t.anchor?\"M-\"+t.bx/2+\",-\"+t.by/2+\"h\"+t.bx+\"v\"+t.by+\"h-\"+t.bx+\"Z\":\"M0,0L\"+(r*C+s)+\",\"+(C+c)+\"v\"+(t.by/2-C)+\"h\"+r*t.bx+\"v-\"+t.by+\"H\"+(r*C+s)+\"V\"+(c-C)+\"Z\"),a.call(b.setPosition,i+s,c+t.ty0-t.by/2+O).selectAll(\"tspan.line\").attr({x:a.attr(\"x\"),y:a.attr(\"y\")}),t.tx2width&&(n.select(\"text.name, text.name tspan.line\").call(b.setPosition,l+o*O+s,c+t.ty0-t.by/2+O),n.select(\"rect\").call(b.setRect,l+(o-1)*t.tx2width/2+s,c-t.by/2-1,t.tx2width,t.by+2))})}function d(t,e,n){if(!e.target)return!1;if(!n||n.length!==t._hoverdata.length)return!0;for(var r=n.length-1;r>=0;r--){var a=n[r],o=t._hoverdata[r];if(a.curveNumber!==o.curveNumber||String(a.pointNumber)!==String(o.pointNumber))return!0}return!1}var h=t(\"d3\"),p=t(\"tinycolor2\"),g=t(\"fast-isnumeric\"),v=t(\"../../lib\"),m=t(\"../../lib/events\"),y=t(\"../../lib/svg_text_utils\"),x=t(\"../../components/color\"),b=t(\"../../components/drawing\"),_=t(\"../../components/dragelement\"),w=t(\"./axes\"),k=t(\"./constants\"),M=t(\"./dragbox\"),A=e.exports={};A.unhover=_.unhover,A.layoutAttributes={dragmode:{valType:\"enumerated\",values:[\"zoom\",\"pan\",\"select\",\"lasso\",\"orbit\",\"turntable\"],dflt:\"zoom\"},hovermode:{valType:\"enumerated\",values:[\"x\",\"y\",\"closest\",!1]}},A.supplyLayoutDefaults=function(t,e,n){function r(n,r){return v.coerce(t,e,A.layoutAttributes,n,r)}r(\"dragmode\");var a;if(e._has(\"cartesian\")){var o=e._isHoriz=A.isHoriz(n);a=o?\"y\":\"x\"}else a=\"closest\";r(\"hovermode\",a)},A.isHoriz=function(t){for(var e=!0,n=0;n<t.length;n++){var r=t[n];if(\"h\"!==r.orientation){e=!1;break}}return e},A.init=function(t){var e=t._fullLayout;if(e._has(\"cartesian\")&&!t._context.staticPlot){var n=Object.keys(e._plots||{}).sort(function(t,n){if((e._plots[t].mainplot&&!0)===(e._plots[n].mainplot&&!0)){var r=t.split(\"y\"),a=n.split(\"y\");return r[0]===a[0]?Number(r[1]||1)-Number(a[1]||1):Number(r[0]||1)-Number(a[0]||1)}return e._plots[t].mainplot?1:-1});n.forEach(function(n){var r=e._plots[n];if(e._has(\"cartesian\")){var a=r.x(),o=r.y(),i=(a._linepositions[n]||[])[3],l=(o._linepositions[n]||[])[3],s=k.DRAGGERSIZE;if(g(i)&&\"top\"===a.side&&(i-=s),g(l)&&\"right\"!==o.side&&(l-=s),!r.mainplot){var c=M(t,r,0,0,a._length,o._length,\"ns\",\"ew\");c.onmousemove=function(r){A.hover(t,r,n),e._lasthover=c,e._hoversubplot=n},c.onmouseout=function(e){t._dragging||_.unhover(t,e)},c.onclick=function(e){A.click(t,e)},M(t,r,-s,-s,s,s,\"n\",\"w\"),M(t,r,a._length,-s,s,s,\"n\",\"e\"),M(t,r,-s,o._length,s,s,\"s\",\"w\"),M(t,r,a._length,o._length,s,s,\"s\",\"e\")}g(i)&&(\"free\"===a.anchor&&(i-=e._size.h*(1-o.domain[1])),M(t,r,.1*a._length,i,.8*a._length,s,\"\",\"ew\"),M(t,r,0,i,.1*a._length,s,\"\",\"w\"),M(t,r,.9*a._length,i,.1*a._length,s,\"\",\"e\")),g(l)&&(\"free\"===o.anchor&&(l-=e._size.w*a.domain[0]),M(t,r,l,.1*o._length,s,.8*o._length,\"ns\",\"\"),M(t,r,l,.9*o._length,s,.1*o._length,\"s\",\"\"),M(t,r,l,0,s,.1*o._length,\"n\",\"\"))}});var r=e._hoverlayer.node();r.onmousemove=function(n){n.target=e._lasthover,A.hover(t,n,e._hoversubplot)},r.onclick=function(n){n.target=e._lasthover,A.click(t,n)},r.onmousedown=function(t){e._lasthover.onmousedown(t)}}};var L=k.YANGLE,T=Math.PI*L/180,z=1/Math.sin(T),S=Math.cos(T),E=Math.sin(T),C=k.HOVERARROWSIZE,O=k.HOVERTEXTPAD,P=k.HOVERFONTSIZE,N=k.HOVERFONT;A.hover=function(t,e,n){return\"string\"==typeof t&&(t=document.getElementById(t)),void 0===t._lastHoverTime&&(t._lastHoverTime=0),void 0!==t._hoverTimer&&(clearTimeout(t._hoverTimer),t._hoverTimer=void 0),Date.now()>t._lastHoverTime+k.HOVERMINTIME?(i(t,e,n),void(t._lastHoverTime=Date.now())):void(t._hoverTimer=setTimeout(function(){i(t,e,n),t._lastHoverTime=Date.now(),t._hoverTimer=void 0},k.HOVERMINTIME))},A.getDistanceFunction=function(t,e,n,r){return\"closest\"===t?r||o(e,n):\"x\"===t?e:n},A.getClosest=function(t,e,n){if(n.index!==!1)n.index>=0&&n.index<t.length?n.distance=0:n.index=!1;else for(var r=0;r<t.length;r++){var a=e(t[r]);a<=n.distance&&(n.index=r,n.distance=a)}return n},A.loneHover=function(t,e){var n={color:t.color||x.defaultLine,x0:t.x0||t.x||0,x1:t.x1||t.x||0,y0:t.y0||t.y||0,y1:t.y1||t.y||0,xLabel:t.xLabel,yLabel:t.yLabel,zLabel:t.zLabel,text:t.text,name:t.name,idealAlign:t.idealAlign,trace:{index:0,hoverinfo:\"\"},xa:{_offset:0},ya:{_offset:0},index:0},r=h.select(e.container),a=e.outerContainer?h.select(e.outerContainer):r,o={hovermode:\"closest\",rotateLabels:!1,bgColor:e.bgColor||x.background,container:r,outerContainer:a},i=c([n],o);return f(i,o.rotateLabels),i.node()},A.loneUnhover=function(t){var e=t instanceof h.selection?t:h.select(t);e.selectAll(\"g.hovertext\").remove()},A.click=function(t,e){t._hoverdata&&e&&e.target&&(t.emit(\"plotly_click\",{points:t._hoverdata}),e.stopImmediatePropagation&&e.stopImmediatePropagation())},A.inbox=function(t,e){return 0>t*e||0===t?k.MAXDIST*(.6-.3/Math.max(3,Math.abs(t-e))):1/0}},{\"../../components/color\":18,\"../../components/dragelement\":39,\"../../components/drawing\":41,\"../../lib\":89,\"../../lib/events\":87,\"../../lib/svg_text_utils\":100,\"./axes\":110,\"./constants\":115,\"./dragbox\":116,d3:9,\"fast-isnumeric\":11,tinycolor2:13}],118:[function(t,e,n){\"use strict\";var r=t(\"../plots\"),a=t(\"./constants\");n.name=\"cartesian\",n.attr=[\"xaxis\",\"yaxis\"],n.idRoot=[\"x\",\"y\"],n.idRegex=a.idRegex,n.attrRegex=a.attrRegex,n.attributes=t(\"./attributes\"),n.plot=function(t){function e(t,e){for(var n=[],r=0;r<t.length;r++){var a=t[r],o=a[0].trace;o.xaxis+o.yaxis===e&&n.push(a)}return n}function n(t,e){for(var n=[],r=0;r<t.length;r++){var a=t[r],o=a[0].trace;o._module===e&&o.visible===!0&&n.push(a)}return n}for(var a=t._fullLayout,o=r.getSubplotIds(a,\"cartesian\"),i=t.calcdata,l=a._modules,s=0;s<o.length;s++){var c=o[s],u=a._plots[c],f=e(i,c);u.plot&&u.plot.selectAll(\"g.trace\").remove();for(var d=0;d<l.length;d++){var h=l[d];if(\"cartesian\"===h.basePlotModule.name){var p=n(f,h);h.plot(t,u,p)}}}}},{\"../plots\":130,\"./attributes\":109,\"./constants\":115}],119:[function(t,e,n){\"use strict\";var r=t(\"../font_attributes\"),a=t(\"../../components/color/attributes\"),o=t(\"../../lib/extend\").extendFlat,i=t(\"../../components/rangeslider/attributes\"),l=t(\"../../components/rangeselector/attributes\"),s=t(\"./constants\");e.exports={color:{valType:\"color\",dflt:a.defaultLine},title:{valType:\"string\"},titlefont:o({},r,{}),type:{valType:\"enumerated\",values:[\"-\",\"linear\",\"log\",\"date\",\"category\"],dflt:\"-\"},autorange:{valType:\"enumerated\",values:[!0,!1,\"reversed\"],dflt:!0},rangemode:{valType:\"enumerated\",values:[\"normal\",\"tozero\",\"nonnegative\"],dflt:\"normal\"},range:{valType:\"info_array\",items:[{valType:\"number\"},{valType:\"number\"}]},rangeslider:i,rangeselector:l,fixedrange:{valType:\"boolean\",dflt:!1},tickmode:{valType:\"enumerated\",values:[\"auto\",\"linear\",\"array\"]},nticks:{valType:\"integer\",min:0,dflt:0},tick0:{valType:\"number\",dflt:0},dtick:{valType:\"any\",dflt:1},tickvals:{valType:\"data_array\"},ticktext:{valType:\"data_array\"},ticks:{valType:\"enumerated\",values:[\"outside\",\"inside\",\"\"]},mirror:{valType:\"enumerated\",values:[!0,\"ticks\",!1,\"all\",\"allticks\"],dflt:!1},ticklen:{valType:\"number\",min:0,dflt:5},tickwidth:{valType:\"number\",min:0,dflt:1},tickcolor:{valType:\"color\",dflt:a.defaultLine},showticklabels:{valType:\"boolean\",dflt:!0},tickfont:o({},r,{}),tickangle:{valType:\"angle\",dflt:\"auto\"},tickprefix:{valType:\"string\",dflt:\"\"},showtickprefix:{valType:\"enumerated\",values:[\"all\",\"first\",\"last\",\"none\"],dflt:\"all\"},ticksuffix:{valType:\"string\",dflt:\"\"},showticksuffix:{valType:\"enumerated\",values:[\"all\",\"first\",\"last\",\"none\"],dflt:\"all\"},showexponent:{valType:\"enumerated\",values:[\"all\",\"first\",\"last\",\"none\"],dflt:\"all\"},exponentformat:{valType:\"enumerated\",values:[\"none\",\"e\",\"E\",\"power\",\"SI\",\"B\"],dflt:\"B\"},tickformat:{valType:\"string\",dflt:\"\"},hoverformat:{valType:\"string\",dflt:\"\"},showline:{valType:\"boolean\",dflt:!1},linecolor:{valType:\"color\",dflt:a.defaultLine},linewidth:{valType:\"number\",min:0,dflt:1},showgrid:{valType:\"boolean\"},gridcolor:{valType:\"color\",dflt:a.lightLine},gridwidth:{valType:\"number\",min:0,dflt:1},zeroline:{valType:\"boolean\"},zerolinecolor:{valType:\"color\",dflt:a.defaultLine},zerolinewidth:{valType:\"number\",dflt:1},anchor:{valType:\"enumerated\",values:[\"free\",s.idRegex.x.toString(),s.idRegex.y.toString()]},side:{valType:\"enumerated\",values:[\"top\",\"bottom\",\"left\",\"right\"]},overlaying:{valType:\"enumerated\",values:[\"free\",s.idRegex.x.toString(),s.idRegex.y.toString()]},domain:{valType:\"info_array\",items:[{valType:\"number\",min:0,max:1},{valType:\"number\",min:0,max:1}],dflt:[0,1]},position:{valType:\"number\",min:0,max:1,dflt:0},categoryorder:{valType:\"enumerated\",values:[\"trace\",\"category ascending\",\"category descending\",\"array\"],dflt:\"trace\"},categoryarray:{valType:\"data_array\"},_deprecated:{autotick:{valType:\"boolean\"}}}},{\"../../components/color/attributes\":17,\"../../components/rangeselector/attributes\":66,\"../../components/rangeslider/attributes\":73,\"../../lib/extend\":88,\"../font_attributes\":128,\"./constants\":115}],120:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"../plots\"),o=t(\"../../components/color\"),i=t(\"../../components/rangeslider\"),l=t(\"../../components/rangeselector\"),s=t(\"./constants\"),c=t(\"./layout_attributes\"),u=t(\"./axis_defaults\"),f=t(\"./position_defaults\"),d=t(\"./axis_ids\");e.exports=function(t,e,n){function h(t,e){var n=Number(t.substr(5)||1),r=Number(e.substr(5)||1);return n-r}var p,g=Object.keys(t),v=[],m=[],y=[],x=[],b={},_={};for(p=0;"
+,
+"p<n.length;p++){var w,k,M=n[p];if(a.traceIs(M,\"cartesian\"))w=v,k=m;else{if(!a.traceIs(M,\"gl2d\"))continue;w=y,k=x}var A=d.id2name(M.xaxis),L=d.id2name(M.yaxis);if(A&&-1===w.indexOf(A)&&w.push(A),L&&-1===k.indexOf(L)&&k.push(L),a.traceIs(M,\"2dMap\")&&(b[A]=!0,b[L]=!0),a.traceIs(M,\"oriented\")){var T=\"h\"===M.orientation?L:A;_[T]=!0}}var z=e._has(\"gl3d\")||e._has(\"geo\");if(!z)for(p=0;p<g.length;p++){var S=g[p];-1===y.indexOf(S)&&-1===v.indexOf(S)&&s.xAxisMatch.test(S)?v.push(S):-1===x.indexOf(S)&&-1===m.indexOf(S)&&s.yAxisMatch.test(S)&&m.push(S)}v.length&&m.length&&r.pushUnique(e._basePlotModules,a.subplotsRegistry.cartesian);var E=v.concat(y).sort(h),C=m.concat(x).sort(h),O=E.concat(C),P=o.background;E.length&&C.length&&(P=r.coerce(t,e,a.layoutAttributes,\"plot_bgcolor\"));var N=o.combine(P,e.paper_bgcolor);O.forEach(function(a){function o(t,e){return r.coerce(l,s,c,t,e)}var i=a.charAt(0),l=t[a]||{},s={},h={letter:i,font:e.font,outerTicks:b[a],showGrid:!_[a],name:a,data:n,bgColor:N},p={letter:i,counterAxes:{x:C,y:E}[i].map(d.name2id),overlayableAxes:{x:E,y:C}[i].filter(function(e){return e!==a&&!(t[e]||{}).overlaying}).map(d.name2id)};u(l,s,o,h),f(l,s,o,p),e[a]=s,t[a]||\"-\"===l.type||(t[a]={type:l.type})}),O.forEach(function(n){var r=n.charAt(0),a=t[n],o=e[n],s={x:C,y:E}[r];i.supplyLayoutDefaults(t,e,n,s),\"x\"===r&&\"date\"===o.type&&l.supplyLayoutDefaults(a,o,e,s)})}},{\"../../components/color\":18,\"../../components/rangeselector\":72,\"../../components/rangeslider\":77,\"../../lib\":89,\"../plots\":130,\"./axis_defaults\":111,\"./axis_ids\":112,\"./constants\":115,\"./layout_attributes\":119,\"./position_defaults\":122}],121:[function(t,e,n){\"use strict\";function r(t,e,n){var r,o,i,l,s,c=[],u=n.map(function(e){return e[t]}),f=a.bisector(e).left;for(r=0;r<u.length;r++)for(i=u[r],o=0;o<i.length;o++)l=i[o],null!==l&&void 0!==l&&(s=f(c,l),s<c.length-1&&c[s]===l||c.splice(s,0,l));return c}var a=t(\"d3\");e.exports=function(t,e,n,o){switch(e){case\"array\":return Array.isArray(n)?n.slice():[];case\"category ascending\":\n"
+,
+"return r(t,a.ascending,o);case\"category descending\":return r(t,a.descending,o);case\"trace\":return[];default:return[]}}},{d3:9}],122:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../lib\");e.exports=function(t,e,n,o){var i=o.counterAxes||[],l=o.overlayableAxes||[],s=o.letter,c=a.coerce(t,e,{anchor:{valType:\"enumerated\",values:[\"free\"].concat(i),dflt:r(t.position)?\"free\":i[0]||\"free\"}},\"anchor\");\"free\"===c&&n(\"position\"),a.coerce(t,e,{side:{valType:\"enumerated\",values:\"x\"===s?[\"bottom\",\"top\"]:[\"left\",\"right\"],dflt:\"x\"===s?\"bottom\":\"left\"}},\"side\");var u=!1;if(l.length&&(u=a.coerce(t,e,{overlaying:{valType:\"enumerated\",values:[!1].concat(l),dflt:!1}},\"overlaying\")),!u){var f=n(\"domain\");f[0]>f[1]-.01&&(e.domain=[0,1]),a.noneOrAll(t.domain,e.domain,[0,1])}return e}},{\"../../lib\":89,\"fast-isnumeric\":11}],123:[function(t,e,n){\"use strict\";function r(t){return t._id}var a=t(\"../../lib/polygon\"),o=t(\"../../components/color\"),i=t(\"./axes\"),l=t(\"./constants\"),s=a.filter,c=a.tester,u=l.MINSELECT;e.exports=function(t,e,n,a,f){function d(t){var e=\"y\"===t._id.charAt(0)?1:0;return function(n){return t.p2d(n[e])}}function h(t,e){return t-e}var p,g=a.gd._fullLayout._zoomlayer,v=a.element.getBoundingClientRect(),m=a.plotinfo.x()._offset,y=a.plotinfo.y()._offset,x=e-v.left,b=n-v.top,_=x,w=b,k=\"M\"+x+\",\"+b,M=a.xaxes[0]._length,A=a.yaxes[0]._length,L=a.xaxes.map(r),T=a.yaxes.map(r),z=a.xaxes.concat(a.yaxes);\"lasso\"===f&&(p=s([[x,b]],l.BENDPX));var S=g.selectAll(\"path.select-outline\").data([1,2]);S.enter().append(\"path\").attr(\"class\",function(t){return\"select-outline select-outline-\"+t}).attr(\"transform\",\"translate(\"+m+\", \"+y+\")\").attr(\"d\",k+\"Z\");var E,C,O,P,N,D=g.append(\"path\").attr(\"class\",\"zoombox-corners\").style({fill:o.background,stroke:o.defaultLine,\"stroke-width\":1}).attr(\"transform\",\"translate(\"+m+\", \"+y+\")\").attr(\"d\",\"M0,0Z\"),I=[],R=a.gd,j=[];for(E=0;E<R.calcdata.length;E++)if(C=R.calcdata[E],O=C[0].trace,O._module&&O._module.selectPoints)if(a.subplot){if(O.subplot!==a.subplot)continue;I.push({selectPoints:O._module.selectPoints,cd:C,xaxis:a.xaxes[0],yaxis:a.yaxes[0]})}else{if(-1===L.indexOf(O.xaxis))continue;if(-1===T.indexOf(O.yaxis))continue;I.push({selectPoints:O._module.selectPoints,cd:C,xaxis:i.getFromId(R,O.xaxis),yaxis:i.getFromId(R,O.yaxis)})}a.moveFn=function(t,e){var n,r;_=Math.max(0,Math.min(M,t+x)),w=Math.max(0,Math.min(A,e+b));var o=Math.abs(_-x),i=Math.abs(w-b);for(\"select\"===f?(i<Math.min(.6*o,u)?(n=c([[x,0],[x,A],[_,A],[_,0]]),D.attr(\"d\",\"M\"+n.xmin+\",\"+(b-u)+\"h-4v\"+2*u+\"h4ZM\"+(n.xmax-1)+\",\"+(b-u)+\"h4v\"+2*u+\"h-4Z\")):o<Math.min(.6*i,u)?(n=c([[0,b],[0,w],[M,w],[M,b]]),D.attr(\"d\",\"M\"+(x-u)+\",\"+n.ymin+\"v-4h\"+2*u+\"v4ZM\"+(x-u)+\",\"+(n.ymax-1)+\"v4h\"+2*u+\"v-4Z\")):(n=c([[x,b],[x,w],[_,w],[_,b]]),D.attr(\"d\",\"M0,0Z\")),S.attr(\"d\",\"M\"+n.xmin+\",\"+n.ymin+\"H\"+(n.xmax-1)+\"V\"+(n.ymax-1)+\"H\"+n.xmin+\"Z\")):\"lasso\"===f&&(p.addPt([_,w]),n=c(p.filtered),S.attr(\"d\",\"M\"+p.filtered.join(\"L\")+\"Z\")),j=[],E=0;E<I.length;E++)P=I[E],[].push.apply(j,P.selectPoints(P,n));if(N={points:j},\"select\"===f){var l,s=N.range={};for(E=0;E<z.length;E++)r=z[E],l=r._id.charAt(0),s[r._id]=[r.p2d(n[l+\"min\"]),r.p2d(n[l+\"max\"])].sort(h)}else{var g=N.lassoPoints={};for(E=0;E<z.length;E++)r=z[E],g[r._id]=p.filtered.map(d(r))}a.gd.emit(\"plotly_selecting\",N)},a.doneFn=function(t,e){if(D.remove(),t||2!==e)a.gd.emit(\"plotly_selected\",N);else{for(S.remove(),E=0;E<I.length;E++)P=I[E],P.selectPoints(P,!1);R.emit(\"plotly_deselect\",null)}}}},{\"../../components/color\":18,\"../../lib/polygon\":95,\"./axes\":110,\"./constants\":115}],124:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"fast-isnumeric\"),o=t(\"../../lib\"),i=t(\"./constants\"),l=t(\"./clean_datum\"),s=t(\"./axis_ids\");e.exports=function(t){function e(e,n){if(e>0)return Math.log(e)/Math.LN10;if(0>=e&&n&&t.range&&2===t.range.length){var r=t.range[0],a=t.range[1];return.5*(r+a-3*u*Math.abs(r-a))}return i.BADNUM}function n(t){return Math.pow(10,t)}function c(t){return a(t)?Number(t):i.BADNUM}var u=10;if(t.c2l=\"log\"===t.type?e:c,t.l2c=\"log\"===t.type?n:c,t.l2d=function(e){return t.c2d(t.l2c(e))},t.p2d=function(e){return t.l2d(t.p2l(e))},t.setScale=function(){var e,n=t._gd._fullLayout._size;if(t._categories||(t._categories=[]),t.overlaying){var r=s.getFromId(t._gd,t.overlaying);t.domain=r.domain}for(t.range&&2===t.range.length&&t.range[0]!==t.range[1]||(t.range=[-1,1]),e=0;2>e;e++)a(t.range[e])||(t.range[e]=a(t.range[1-e])?t.range[1-e]*(e?10:.1):e?1:-1),t.range[e]<-(Number.MAX_VALUE/2)?t.range[e]=-(Number.MAX_VALUE/2):t.range[e]>Number.MAX_VALUE/2&&(t.range[e]=Number.MAX_VALUE/2);if(\"y\"===t._id.charAt(0)?(t._offset=n.t+(1-t.domain[1])*n.h,t._length=n.h*(t.domain[1]-t.domain[0]),t._m=t._length/(t.range[0]-t.range[1]),t._b=-t._m*t.range[1]):(t._offset=n.l+t.domain[0]*n.w,t._length=n.w*(t.domain[1]-t.domain[0]),t._m=t._length/(t.range[1]-t.range[0]),t._b=-t._m*t.range[0]),!isFinite(t._m)||!isFinite(t._b))throw o.notifier(\"Something went wrong with axis scaling\",\"long\"),t._gd._replotting=!1,new Error(\"axis scaling\")},t.l2p=function(e){return a(e)?r.round(t._b+t._m*e,2):i.BADNUM},t.p2l=function(e){return(e-t._b)/t._m},t.c2p=function(e,n){return t.l2p(t.c2l(e,n))},t.p2c=function(e){return t.l2c(t.p2l(e))},-1!==[\"linear\",\"log\",\"-\"].indexOf(t.type))t.c2d=c,t.d2c=function(t){return t=l(t),a(t)?Number(t):i.BADNUM},t.d2l=function(e,n){return\"log\"===t.type?t.c2l(t.d2c(e),n):t.d2c(e)};else if(\"date\"===t.type){if(t.c2d=function(t){return a(t)?o.ms2DateTime(t):i.BADNUM},t.d2c=function(t){return a(t)?Number(t):o.dateTime2ms(t)},t.d2l=t.d2c,t.range&&t.range.length>1)try{var f=t.range.map(o.dateTime2ms);!a(t.range[0])&&a(f[0])&&(t.range[0]=f[0]),!a(t.range[1])&&a(f[1])&&(t.range[1]=f[1])}catch(d){o.error(d,t.range)}}else\"category\"===t.type&&(t.c2d=function(e){return t._categories[Math.round(e)]},t.d2c=function(e){null!==e&&void 0!==e&&-1===t._categories.indexOf(e)&&t._categories.push(e);var n=t._categories.indexOf(e);return-1===n?i.BADNUM:n},t.d2l=t.d2c);t.makeCalcdata=function(e,n){var r,a,o;if(n in e)for(r=e[n],a=new Array(r.length),o=0;o<r.length;o++)a[o]=t.d2c(r[o]);else{var i=n+\"0\"in e?t.d2c(e[n+\"0\"]):0,l=e[\"d\"+n]?Number(e[\"d\"+n]):1;for(r=e[{x:\"y\",y:\"x\"}[n]],a=new Array(r.length),o=0;o<r.length;o++)a[o]=i+o*l}return a},t._min=[],t._max=[],t._minDtick=null,t._forceTick0=null}},{\"../../lib\":89,\"./axis_ids\":112,\"./clean_datum\":114,\"./constants\":115,d3:9,\"fast-isnumeric\":11}],125:[function(t,e,n){\"use strict\";function r(t){var e=[\"showexponent\",\"showtickprefix\",\"showticksuffix\"],n=e.filter(function(e){return void 0!==t[e]}),r=function(e){return t[e]===t[n[0]]};return n.every(r)||1===n.length?t[n[0]]:void 0}var a=t(\"../../lib\");e.exports=function(t,e,n,o,i){var l=r(t),s=n(\"tickprefix\");s&&n(\"showtickprefix\",l);var c=n(\"ticksuffix\");c&&n(\"showticksuffix\",l);var u=n(\"showticklabels\");if(u){var f=i.font||{},d=e.color===t.color?e.color:f.color;if(a.coerceFont(n,\"tickfont\",{family:f.family,size:f.size,color:d}),n(\"tickangle\"),\"category\"!==o){var h=n(\"tickformat\");h||\"date\"===o||(n(\"showexponent\",l),n(\"exponentformat\"))}}\"category\"===o||i.noHover||n(\"hoverformat\")}},{\"../../lib\":89}],126:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"./layout_attributes\");e.exports=function(t,e,n,o){var i=r.coerce2(t,e,a,\"ticklen\"),l=r.coerce2(t,e,a,\"tickwidth\"),s=r.coerce2(t,e,a,\"tickcolor\",e.color),c=n(\"ticks\",o.outerTicks||i||l||s?\"outside\":\"\");c||(delete e.ticklen,delete e.tickwidth,delete e.tickcolor)}},{\"../../lib\":89,\"./layout_attributes\":119}],127:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\");e.exports=function(t,e,n,a){var o=\"auto\";\"array\"!==t.tickmode||\"log\"!==a&&\"date\"!==a||(t.tickmode=\"auto\"),Array.isArray(t.tickvals)?o=\"array\":t.dtick&&r(t.dtick)&&(o=\"linear\");var i=n(\"tickmode\",o);if(\"auto\"===i)n(\"nticks\");else if(\"linear\"===i)n(\"tick0\"),n(\"dtick\");else{var l=n(\"tickvals\");void 0===l?e.tickmode=\"auto\":n(\"ticktext\")}}},{\"fast-isnumeric\":11}],128:[function(t,e,n){\"use strict\";e.exports={family:{valType:\"string\",noBlank:!0,strict:!0},size:{valType:\"number\",min:1},color:{valType:\"color\"}}},{}],129:[function(t,e,n){\"use strict\";var r=t(\"../plotly\"),a=t(\"./font_attributes\"),o=t(\"../components/color/attributes\"),i=r.Lib.extendFlat;e.exports={font:{family:i({},a.family,{dflt:'\"Open Sans\", verdana, arial, sans-serif'}),size:i({},a.size,{dflt:12}),color:i({},a.color,{dflt:o.defaultLine})},title:{valType:\"string\",dflt:\"Click to enter Plot title\"},titlefont:i({},a,{}),autosize:{valType:\"enumerated\",values:[!0,!1,\"initial\"]},width:{valType:\"number\",min:10,dflt:700},height:{valType:\"number\",min:10,dflt:450},margin:{l:{valType:\"number\",min:0,dflt:80},r:{valType:\"number\",min:0,dflt:80},t:{valType:\"number\",min:0,dflt:100},b:{valType:\"number\",min:0,dflt:80},pad:{valType:\"number\",min:0,dflt:0},autoexpand:{valType:\"boolean\",dflt:!0}},paper_bgcolor:{valType:\"color\",dflt:o.background},plot_bgcolor:{valType:\"color\",dflt:o.background},separators:{valType:\"string\",dflt:\".,\"},hidesources:{valType:\"boolean\",dflt:!1},smith:{valType:\"enumerated\",values:[!1],dflt:!1},showlegend:{valType:\"boolean\"},_composedModules:{\"*\":\"Fx\"},_nestedModules:{xaxis:\"Axes\",yaxis:\"Axes\",scene:\"gl3d\",geo:\"geo\",legend:\"Legend\",annotations:\"Annotations\",shapes:\"Shapes\",images:\"Images\",ternary:\"ternary\",mapbox:\"mapbox\"}}},{\"../components/color/attributes\":17,\"../plotly\":107,\"./font_attributes\":128}],130:[function(t,e,n){\"use strict\";function r(t){return\"object\"==typeof t&&(t=t.type),t}function a(t,e){e.text(\"\");var n=e.append(\"a\").attr({\"xlink:xlink:href\":\"#\",\"class\":\"link--impt link--embedview\",\"font-weight\":\"bold\"}).text(t._context.linkText+\" \"+String.fromCharCode(187));if(t._context.sendData)n.on(\"click\",function(){h.sendDataToCloud(t)});else{var r=window.location.pathname.split(\"/\"),a=window.location.search;n.attr({\"xlink:xlink:show\":\"new\",\"xlink:xlink:href\":\"/\"+r[2].split(\".\")[0]+\"/\"+r[1]+a})}}function o(t,e){for(var n=f.isPlainObject,r=Array.isArray,a=Object.keys(e),i=0;i<a.length;i++){var l=a[i],s=e[l],c=t[l];if(\"_\"===l.charAt(0)||\"function\"==typeof s){if(l in t)co"
+,
+"ntinue;t[l]=s}else if(r(s)&&r(c))for(var u=0;u<s.length;u++)n(s[u])&&n(c[u])&&o(c[u],s[u]);else n(s)&&n(c)&&(o(c,s),Object.keys(c).length||delete t[l])}}function i(t,e,n){if(Array.isArray(t.transforms))for(var r=t.transforms,a=e.transforms=[],o=0;o<r.length;o++){var i,l=r[o],s=l.type,c=y[s];c||f.warn(\"Unrecognized transform type \"+s+\".\"),c&&c.supplyDefaults?(i=c.supplyDefaults(l,e,n),i.type=s):i=f.extendFlat({},l),a.push(i)}}function l(t,e,n){for(var r=t.transforms,a=[t],o=0;o<r.length;o++){var i=r[o],l=i.type,s=y[l];s&&(a=s.transform(a,{transform:i,fullTrace:t,fullData:e,layout:n}))}return a}var s=t(\"d3\"),c=t(\"fast-isnumeric\"),u=t(\"../plotly\"),f=t(\"../lib\"),d=t(\"../components/color\"),h=e.exports={},p=h.modules={},g=h.allTypes=[],v=h.allCategories={},m=h.subplotsRegistry={},y=h.transformsRegistry={};h.attributes=t(\"./attributes\"),h.attributes.type.values=g,h.fontAttrs=t(\"./font_attributes\"),h.layoutAttributes=t(\"./layout_attributes\"),h.fontWeight=\"normal\",h.register=function(t,e,n,r){if(p[e])return void f.log(\"Type \"+e+\" already registered\");for(var a={},o=0;o<n.length;o++)a[n[o]]=!0,v[n[o]]=!0;p[e]={_module:t,categories:a},r&&Object.keys(r).length&&(p[e].meta=r),g.push(e)},h.getModule=function(t){if(void 0!==t.r)return f.warn(\"Tried to put a polar trace on an incompatible graph of cartesian data. Ignoring this dataset.\",t),!1;var e=p[r(t)];return e?e._module:!1},h.traceIs=function(t,e){if(t=r(t),\"various\"===t)return!1;var n=p[t];return n||(void 0!==t&&f.log(\"Unrecognized trace type \"+t+\".\"),n=p[h.attributes.type.dflt]),!!n.categories[e]},h.registerSubplot=function(t){var e=t.name;return m[e]?void f.log(\"Plot type \"+e+\" already registered.\"):void(m[e]=t)},h.findSubplotIds=function(t,e){var n=[];if(void 0===h.subplotsRegistry[e])return n;for(var r=h.subplotsRegistry[e].attr,a=0;a<t.length;a++){var o=t[a];h.traceIs(o,e)&&-1===n.indexOf(o[r])&&n.push(o[r])}return n},h.getSubplotIds=function(t,e){var n=h.subplotsRegistry[e];if(void 0===n)return[];if(!(\"cartesian\"!==e||t._has&&t._has(\"cartesian\")))return[];if(!(\"gl2d\"!==e||t._has&&t._has(\"gl2d\")))return[];if(\"cartesian\"===e||\"gl2d\"===e)return Object.keys(t._plots||{});for(var r=n.idRegex,a=Object.keys(t),o=[],i=0;i<a.length;i++){var l=a[i];r.test(l)&&o.push(l)}var s=n.idRoot.length;return o.sort(function(t,e){var n=+(t.substr(s)||1),r=+(e.substr(s)||1);return n-r}),o},h.getSubplotData=function(t,e,n){if(void 0===h.subplotsRegistry[e])return[];for(var r,a=h.subplotsRegistry[e].attr,o=[],i=0;i<t.length;i++)if(r=t[i],\"gl2d\"===e&&h.traceIs(r,\"gl2d\")){var l=u.Axes.subplotMatch,s=\"x\"+n.match(l)[1],c=\"y\"+n.match(l)[2];r[a[0]]===s&&r[a[1]]===c&&o.push(r)}else r[a]===n&&o.push(r);return o},h.redrawText=function(t){return t.data&&t.data[0]&&t.data[0].r?void 0:new Promise(function(e){setTimeout(function(){u.Annotations.drawAll(t),u.Legend.draw(t),(t.calcdata||[]).forEach(function(t){t[0]&&t[0].t&&t[0].t.cb&&t[0].t.cb()}),e(h.previousPromises(t))},300)})},h.resize=function(t){return new Promise(function(e,n){t&&\"none\"!==s.select(t).style(\"display\")||n(new Error(\"Resize must be passed a plot div element.\")),t._redrawTimer&&clearTimeout(t._redrawTimer),t._redrawTimer=setTimeout(function(){if((t._fullLayout||{}).autosize){var n=t.changed;t.autoplay=!0,u.relayout(t,{autosize:!0}),t.changed=n,e(t)}},100)})},h.previousPromises=function(t){return(t._promises||[]).length?Promise.all(t._promises).then(function(){t._promises=[]}):void 0},h.addLinks=function(t){var e=t._fullLayout,n=e._paper.selectAll(\"text.js-plot-link-container\").data([0]);n.enter().append(\"text\").classed(\"js-plot-link-container\",!0).style({\"font-family\":'\"Open Sans\", Arial, sans-serif',\"font-size\":\"12px\",fill:d.defaultLine,\"pointer-events\":\"all\"}).each(function(){var t=s.select(this);t.append(\"tspan\").classed(\"js-link-to-tool\",!0),t.append(\"tspan\").classed(\"js-link-spacer\",!0),t.append(\"tspan\").classed(\"js-sourcelinks\",!0)});var r=n.node(),o={y:e._paper.attr(\"height\")-9};document.body.contains(r)&&r.getComputedTextLength()>=e.width-20?(o[\"text-anchor\"]=\"start\",o.x=5):(o[\"text-anchor\"]=\"end\",o.x=e._paper.attr(\"width\")-7),n.attr(o);var i=n.select(\".js-link-to-tool\"),l=n.select(\".js-link-spacer\"),c=n.select(\".js-sourcelinks\");t._context.showSources&&t._context.showSources(t),t._context.showLink&&a(t,i),l.text(i.text()&&c.text()?\" - \":\"\")},h.sendDataToCloud=function(t){t.emit(\"plotly_beforeexport\");var e=window.PLOTLYENV&&window.PLOTLYENV.BASE_URL||\"https://plot.ly\",n=s.select(t).append(\"div\").attr(\"id\",\"hiddenform\").style(\"display\",\"none\"),r=n.append(\"form\").attr({action:e+\"/external\",method:\"post\",target:\"_blank\"}),a=r.append(\"input\").attr({type:\"text\",name:\"data\"});return a.node().value=h.graphJson(t,!1,\"keepdata\"),r.node().submit(),n.remove(),t.emit(\"plotly_afterexport\"),!1},h.supplyDefaults=function(t){var e,n=t._fullLayout||{},r=t._fullLayout={},a=t.layout||{},i=t._fullData||[],l=t._fullData=[],s=t.data||[];h.supplyLayoutGlobalDefaults(a,r),r._dataLength=s.length,h.supplyDataDefaults(s,l,r),r._has=h._hasPlotType.bind(r);var c=r._modules;for(e=0;e<c.length;e++){var f=c[e];f.cleanData&&f.cleanData(l)}if(i.length===s.length)for(e=0;e<l.length;e++)o(l[e],i[e]);h.supplyLayoutModuleDefaults(a,r,l),r._hasCartesian=r._has(\"cartesian\"),r._hasGeo=r._has(\"geo\"),r._hasGL3D=r._has(\"gl3d\"),r._hasGL2D=r._has(\"gl2d\"),r._hasTernary=r._has(\"ternary\"),r._hasPie=r._has(\"pie\"),h.cleanPlot(l,r,i,n),o(r,n),h.doAutoMargin(t);var d=u.Axes.list(t);for(e=0;e<d.length;e++){var p=d[e];p._gd=t,p.setScale()}if((t.calcdata||[]).length===l.length)for(e=0;e<l.length;e++){var g=l[e];(t.calcdata[e][0]||{}).trace=g}},h._hasPlotType=function(t){for(var e=this._basePlotModules||[],n=0;n<e.length;n++){var r=e[n];if(r.name===t)return!0}return!1},h.cleanPlot=function(t,e,n,r){var a,o,i=r._basePlotModules||[];for(a=0;a<i.length;a++){var l=i[a];l.clean&&l.clean(t,e,n,r)}var s=!!r._paper,c=!!r._infolayer;t:for(a=0;a<n.length;a++){var u=n[a],f=u.uid;for(o=0;o<t.length;o++){var d=t[o];if(f===d.uid)continue t}s&&r._paper.selectAll(\".hm\"+f+\",.contour\"+f+\",#clip\"+f).remove(),c&&r._infolayer.selectAll(\".cb\"+f).remove()}},h.supplyDataDefaults=function(t,e,n){function r(t){e.push(t);var n=t._module;n&&(f.pushUnique(a,n),f.pushUnique(o,t._module.basePlotModule),i++)}for(var a=n._modules=[],o=n._basePlotModules=[],i=0,s=0;s<t.length;s++){var c=t[s],u=h.supplyTraceDefaults(c,i,n);if(u.transforms&&u.transforms.length)for(var d=l(u,e,n),p=0;p<d.length;p++){var g=d[p],v=h.supplyTraceDefaults(g,i,n);g.uid=v.uid=u.uid+p,v.index=s,v._input=c,v._fullInput=u,v._expandedIndex=i,v._expandedInput=g,r(v)}else u.index=s,u._input=c,u._expandedIndex=i,r(u)}},h.supplyTraceDefaults=function(t,e,n){function r(e,n){return f.coerce(t,o,h.attributes,e,n)}function a(e,n){return h.traceIs(o,e)?f.coerce(t,o,h.subplotsRegistry[e].attributes,n):void 0}var o={},l=d.defaults[e%d.defaults.length],s=r(\"visible\");r(\"type\"),r(\"uid\");for(var c=Object.keys(m),u=0;u<c.length;u++){var p=c[u];if(-1===[\"cartesian\",\"gl2d\"].indexOf(p)){var g=m[p].attr;g&&a(p,g)}}if(s){var v=h.getModule(o);o._module=v,r(\"hoverinfo\",1===n._dataLength?\"x+y+z+text\":void 0),v&&v.supplyDefaults(t,o,l,n),r(\"name\",\"trace \"+e),h.traceIs(o,\"noOpacity\")||r(\"opacity\"),a(\"cartesian\",\"xaxis\"),a(\"cartesian\",\"yaxis\"),a(\"gl2d\",\"xaxis\"),a(\"gl2d\",\"yaxis\"),h.traceIs(o,\"showLegend\")&&(r(\"showlegend\"),r(\"legendgroup\")),i(t,o,n)}return o},h.supplyLayoutGlobalDefaults=function(t,e){function n(n,r){return f.coerce(t,e,h.layoutAttributes,n,r)}var r=f.coerceFont(n,\"font\");n(\"title\"),f.coerceFont(n,\"titlefont\",{family:r.family,size:Math.round(1.4*r.size),color:r.color});var a=n(\"autosize\",t.width&&t.height?!1:\"initial\");n(\"width\"),n(\"height\"),n(\"margin.l\"),n(\"margin.r\"),n(\"margin.t\"),n(\"margin.b\"),n(\"margin.pad\"),n(\"margin.autoexpand\"),\"initial\"!==a&&h.sanitizeMargins(e),n(\"paper_bgcolor\"),n(\"separators\"),n(\"hidesources\"),n(\"smith\")},h.supplyLayoutModuleDefaults=function(t,e,n){var r,a;u.Axes.supplyLayoutDefaults(t,e,n);var o=e._basePlotModules;for(r=0;r<o.length;r++)a=o[r],\"cartesian\"!==a.name&&a.supplyLayoutDefaults&&a.supplyLayoutDefaults(t,e,n);var i=e._modules;for(r=0;r<i.length;r++)a=i[r],a.supplyLayoutDefaults&&a.supplyLayoutDefaults(t,e,n);var l=[\"Fx\",\"Annotations\",\"Shapes\",\"Legend\",\"Images\"];for(r=0;r<l.length;r++)a=l[r],u[a]&&u[a].supplyLayoutDefaults(t,e,n)},h.purge=function(t){var e=t._fullLayout||{};void 0!==e._glcontainer&&e._glcontainer.remove(),void 0!==e._geocontainer&&e._geocontainer.remove(),e._modeBar&&e._modeBar.destroy(),delete t.data,delete t.layout,delete t._fullData,delete t._fullLayout,delete t.calcdata,delete t.framework,delete t.empty,delete t.fid,delete t.undoqueue,delete t.undonum,delete t.autoplay,delete t.changed,delete t._tester,delete t._testref,delete t._promises,delete t._redrawTimer,delete t._replotting,delete t.firstscatter,delete t.hmlumcount,delete t.hmpixcount,delete t.numboxes,delete t._hoverTimer,delete t._lastHoverTime,t.removeAllListeners&&t.removeAllListeners()},h.style=function(t){for(var e=t._fullLayout._modules,n=0;n<e.length;n++){var r=e[n];r.style&&r.style(t)}},h.sanitizeMargins=function(t){if(t&&t.margin){var e,n=t.width,r=t.height,a=t.margin,o=n-(a.l+a.r),i=r-(a.t+a.b);0>o&&(e=(n-1)/(a.l+a.r),a.l=Math.floor(e*a.l),a.r=Math.floor(e*a.r)),0>i&&(e=(r-1)/(a.t+a.b),a.t=Math.floor(e*a.t),a.b=Math.floor(e*a.b))}},h.autoMargin=function(t,e,n){var r=t._fullLayout;if(r._pushmargin||(r._pushmargin={}),r.margin.autoexpand!==!1){if(n){var a=void 0===n.pad?12:n.pad;n.l+n.r>.5*r.width&&(n.l=n.r=0),n.b+n.t>.5*r.height&&(n.b=n.t=0),r._pushmargin[e]={l:{val:n.x,size:n.l+a},r:{val:n.x,size:n.r+a},b:{val:n.y,size:n.b+a},t:{val:n.y,size:n.t+a}}}else delete r._pushmargin[e];t._replotting||h.doAutoMargin(t)}},h.doAutoMargin=function(t){var e=t._fullLayout;e._size||(e._size={}),e._pushmargin||(e._pushmargin={});var n=e._size,r=JSON.stringify(n),a=Math.max(e.margin.l||0,0),o=Math.max(e.margin.r||0,0),i=Math.max(e.margin.t||0,0),l=Math.max(e.margin.b||0,0),s=e._pushmargin;return e.margin.autoexpand!==!1&&(s.base={l:{val:0,size"
+,
+":a},r:{val:1,size:o},t:{val:1,size:i},b:{val:0,size:l}},Object.keys(s).forEach(function(t){var n=s[t].l||{},r=s[t].b||{},u=n.val,f=n.size,d=r.val,h=r.size;Object.keys(s).forEach(function(t){if(c(f)&&s[t].r){var n=s[t].r.val,r=s[t].r.size;if(n>u){var p=(f*n+(r-e.width)*u)/(n-u),g=(r*(1-u)+(f-e.width)*(1-n))/(n-u);p>=0&&g>=0&&p+g>a+o&&(a=p,o=g)}}if(c(h)&&s[t].t){var v=s[t].t.val,m=s[t].t.size;if(v>d){var y=(h*v+(m-e.height)*d)/(v-d),x=(m*(1-d)+(h-e.height)*(1-v))/(v-d);y>=0&&x>=0&&y+x>l+i&&(l=y,i=x)}}})})),n.l=Math.round(a),n.r=Math.round(o),n.t=Math.round(i),n.b=Math.round(l),n.p=Math.round(e.margin.pad),n.w=Math.round(e.width)-n.l-n.r,n.h=Math.round(e.height)-n.t-n.b,t._replotting||\"{}\"===r||r===JSON.stringify(e._size)?void 0:u.plot(t)},h.graphJson=function(t,e,n,r,a){function o(t){if(\"function\"==typeof t)return null;if(f.isPlainObject(t)){var e,r,a={};for(e in t)if(\"function\"!=typeof t[e]&&-1===[\"_\",\"[\"].indexOf(e.charAt(0))){if(\"keepdata\"===n){if(\"src\"===e.substr(e.length-3))continue}else if(\"keepstream\"===n){if(r=t[e+\"src\"],\"string\"==typeof r&&r.indexOf(\":\")>0&&!f.isPlainObject(t.stream))continue}else if(\"keepall\"!==n&&(r=t[e+\"src\"],\"string\"==typeof r&&r.indexOf(\":\")>0))continue;a[e]=o(t[e])}return a}return Array.isArray(t)?t.map(o):t&&t.getTime?f.ms2DateTime(t):t}(a&&e&&!t._fullData||a&&!e&&!t._fullLayout)&&h.supplyDefaults(t);var i=a?t._fullData:t.data,l=a?t._fullLayout:t.layout,s={data:(i||[]).map(function(t){var n=o(t);return e&&delete n.fit,n})};return e||(s.layout=o(l)),t.framework&&t.framework.isPolar&&(s=t.framework.getConfig()),\"object\"===r?s:JSON.stringify(s)}},{\"../components/color\":18,\"../lib\":89,\"../plotly\":107,\"./attributes\":108,\"./font_attributes\":128,\"./layout_attributes\":129,d3:9,\"fast-isnumeric\":11}],131:[function(t,e,n){\"use strict\";var r=t(\"../../traces/scatter/attributes\"),a=r.marker;e.exports={r:r.r,t:r.t,marker:{color:a.color,size:a.size,symbol:a.symbol,opacity:a.opacity}}},{\"../../traces/scatter/attributes\":167}],132:[function(t,e,n){\"use strict\";function r(t,e){var n={showline:{valType:\"boolean\"},showticklabels:{valType:\"boolean\"},tickorientation:{valType:\"enumerated\",values:[\"horizontal\",\"vertical\"]},ticklen:{valType:\"number\",min:0},tickcolor:{valType:\"color\"},ticksuffix:{valType:\"string\"},endpadding:{valType:\"number\"},visible:{valType:\"boolean\"}};return o({},e,n)}var a=t(\"../cartesian/layout_attributes\"),o=t(\"../../lib/extend\").extendFlat,i=o({},a.domain,{});e.exports={radialaxis:r(\"radial\",{range:{valType:\"info_array\",items:[{valType:\"number\"},{valType:\"number\"}]},domain:i,orientation:{valType:\"number\"}}),angularaxis:r(\"angular\",{range:{valType:\"info_array\",items:[{valType:\"number\",dflt:0},{valType:\"number\",dflt:360}]},domain:i}),layout:{direction:{valType:\"enumerated\",values:[\"clockwise\",\"counterclockwise\"]},orientation:{valType:\"angle\"}}}},{\"../../lib/extend\":88,\"../cartesian/layout_attributes\":119}],133:[function(t,e,n){var r=t(\"../../plotly\"),a=t(\"d3\"),o=e.exports={version:\"0.2.2\",manager:t(\"./micropolar_manager\")},i=r.Lib.extendDeepAll;o.Axis=function(){function t(t){n=t||n;var c=s.data,f=s.layout;return(\"string\"==typeof n||n.nodeName)&&(n=a.select(n)),n.datum(c).each(function(t,n){function s(t,e){return l(t)%360+f.orientation}var c=t.slice();u={data:o.util.cloneJson(c),layout:o.util.cloneJson(f)};var d=0;c.forEach(function(t,e){t.color||(t.color=f.defaultColorRange[d],d=(d+1)%f.defaultColorRange.length),t.strokeColor||(t.strokeColor=\"LinePlot\"===t.geometry?t.color:a.rgb(t.color).darker().toString()),u.data[e].color=t.color,u.data[e].strokeColor=t.strokeColor,u.data[e].strokeDash=t.strokeDash,u.data[e].strokeSize=t.strokeSize});var h=c.filter(function(t,e){var n=t.visible;return\"undefined\"==typeof n||n===!0}),p=!1,g=h.map(function(t,e){return p=p||\"undefined\"!=typeof t.groupId,t});if(p){var v=a.nest().key(function(t,e){return\"undefined\"!=typeof t.groupId?t.groupId:\"unstacked\"}).entries(g),m=[],y=v.map(function(t,e){if(\"unstacked\"===t.key)return t.values;var n=t.values[0].r.map(function(t,e){return 0});return t.values.forEach(function(t,e,r){t.yStack=[n],m.push(n),n=o.util.sumArrays(t.r,n)}),t.values});h=a.merge(y)}h.forEach(function(t,e){t.t=Array.isArray(t.t[0])?t.t:[t.t],t.r=Array.isArray(t.r[0])?t.r:[t.r]});var x=Math.min(f.width-f.margin.left-f.margin.right,f.height-f.margin.top-f.margin.bottom)/2;x=Math.max(10,x);var b,_=[f.margin.left+x,f.margin.top+x];if(p){var w=a.max(o.util.sumArrays(o.util.arrayLast(h).r[0],o.util.arrayLast(m)));b=[0,w]}else b=a.extent(o.util.flattenArray(h.map(function(t,e){return t.r})));f.radialAxis.domain!=o.DATAEXTENT&&(b[0]=0),r=a.scale.linear().domain(f.radialAxis.domain!=o.DATAEXTENT&&f.radialAxis.domain?f.radialAxis.domain:b).range([0,x]),u.layout.radialAxis.domain=r.domain();var k,M=o.util.flattenArray(h.map(function(t,e){return t.t})),A=\"string\"==typeof M[0];A&&(M=o.util.deduplicate(M),k=M.slice(),M=a.range(M.length),h=h.map(function(t,e){var n=t;return t.t=[M],p&&(n.yStack=t.yStack),n}));var L=h.filter(function(t,e){return\"LinePlot\"===t.geometry||\"DotPlot\"===t.geometry}).length===h.length,T=null===f.needsEndSpacing?A||!L:f.needsEndSpacing,z=f.angularAxis.domain&&f.angularAxis.domain!=o.DATAEXTENT&&!A&&f.angularAxis.domain[0]>=0,S=z?f.angularAxis.domain:a.extent(M),E=Math.abs(M[1]-M[0]);L&&!A&&(E=0);var C=S.slice();T&&A&&(C[1]+=E);var O=f.angularAxis.ticksCount||4;O>8&&(O=O/(O/8)+O%8),f.angularAxis.ticksStep&&(O=(C[1]-C[0])/O);var P=f.angularAxis.ticksStep||(C[1]-C[0])/(O*(f.minorTicks+1));k&&(P=Math.max(Math.round(P),1)),C[2]||(C[2]=P);var N=a.range.apply(this,C);if(N=N.map(function(t,e){return parseFloat(t.toPrecision(12))}),l=a.scale.linear().domain(C.slice(0,2)).range(\"clockwise\"===f.direction?[0,360]:[360,0]),u.layout.angularAxis.domain=l.domain(),u.layout.angularAxis.endPadding=T?E:0,e=a.select(this).select(\"svg.chart-root\"),\"undefined\"==typeof e||e.empty()){var D=\"<svg xmlns='http://www.w3.org/2000/svg' class='chart-root'>' + '<g class='outer-group'>' + '<g class='chart-group'>' + '<circle class='background-circle'></circle>' + '<g class='geometry-group'></g>' + '<g class='radial axis-group'>' + '<circle class='outside-circle'></circle>' + '</g>' + '<g class='angular axis-group'></g>' + '<g class='guides-group'><line></line><circle r='0'></circle></g>' + '</g>' + '<g class='legend-group'></g>' + '<g class='tooltips-group'></g>' + '<g class='title-group'><text></text></g>' + '</g>' + '</svg>\",I=(new DOMParser).parseFromString(D,\"application/xml\"),R=this.appendChild(this.ownerDocument.importNode(I.documentElement,!0));e=a.select(R)}e.select(\".guides-group\").style({\"pointer-events\":\"none\"}),e.select(\".angular.axis-group\").style({\"pointer-events\":\"none\"}),e.select(\".radial.axis-group\").style({\"pointer-events\":\"none\"});var j,q=e.select(\".chart-group\"),F={fill:\"none\",stroke:f.tickColor},B={\"font-size\":f.font.size,\"font-family\":f.font.family,fill:f.font.color,\"text-shadow\":[\"-1px 0px\",\"1px -1px\",\"-1px 1px\",\"1px 1px\"].map(function(t,e){return\" \"+t+\" 0 \"+f.font.outlineColor}).join(\",\")};if(f.showLegend){j=e.select(\".legend-group\").attr({transform:\"translate(\"+[x,f.margin.top]+\")\"}).style({display:\"block\"});var H=h.map(function(t,e){var n=o.util.cloneJson(t);return n.symbol=\"DotPlot\"===t.geometry?t.dotType||\"circle\":\"LinePlot\"!=t.geometry?\"square\":\"line\",n.visibleInLegend=\"undefined\"==typeof t.visibleInLegend||t.visibleInLegend,n.color=\"LinePlot\"===t.geometry?t.strokeColor:t.color,n});o.Legend().config({data:h.map(function(t,e){return t.name||\"Element\"+e}),legendConfig:i({},o.Legend.defaultConfig().legendConfig,{container:j,elements:H,reverseOrder:f.legend.reverseOrder})})();var V=j.node().getBBox();x=Math.min(f.width-V.width-f.margin.left-f.margin.right,f.height-f.margin.top-f.margin.bottom)/2,x=Math.max(10,x),_=[f.margin.left+x,f.margin.top+x],r.range([0,x]),u.layout.radialAxis.domain=r.domain(),j.attr(\"transform\",\"translate(\"+[_[0]+x,_[1]-x]+\")\")}else j=e.select(\".legend-group\").style({display:\"none\"});e.attr({width:f.width,height:f.height}).style({opacity:f.opacity}),q.attr(\"transform\",\"translate(\"+_+\")\").style({cursor:\"crosshair\"});var Z=[(f.width-(f.margin.left+f.margin.right+2*x+(V?V.width:0)))/2,(f.height-(f.margin.top+f.margin.bottom+2*x))/2];if(Z[0]=Math.max(0,Z[0]),Z[1]=Math.max(0,Z[1]),e.select(\".outer-group\").attr(\"transform\",\"translate(\"+Z+\")\"),f.title){var Y=e.select(\"g.title-group text\").style(B).text(f.title),U=Y.node().getBBox();Y.attr({x:_[0]-U.width/2,y:_[1]-x-20})}var X=e.select(\".radial.axis-group\");if(f.radialAxis.gridLinesVisible){var G=X.selectAll(\"circle.grid-circle\").data(r.ticks(5));G.enter().append(\"circle\").attr({\"class\":\"grid-circle\"}).style(F),G.attr(\"r\",r),G.exit().remove()}X.select(\"circle.outside-circle\").attr({r:x}).style(F);var $=e.select(\"circle.background-circle\").attr({r:x}).style({fill:f.backgroundColor,stroke:f.stroke});if(f.radialAxis.visible){var Q=a.svg.axis().scale(r).ticks(5).tickSize(5);X.call(Q).attr({transform:\"rotate(\"+f.radialAxis.orientation+\")\"}),X.selectAll(\".domain\").style(F),X.selectAll(\"g>text\").text(function(t,e){return this.textContent+f.radialAxis.ticksSuffix}).style(B).style({\"text-anchor\":\"start\"}).attr({x:0,y:0,dx:0,dy:0,transform:function(t,e){return\"horizontal\"===f.radialAxis.tickOrientation?\"rotate(\"+-f.radialAxis.orientation+\") translate(\"+[0,B[\"font-size\"]]+\")\":\"translate(\"+[0,B[\"font-size\"]]+\")\"}}),X.selectAll(\"g>line\").style({stroke:\"black\"})}var W=e.select(\".angular.axis-group\").selectAll(\"g.angular-tick\").data(N),J=W.enter().append(\"g\").classed(\"angular-tick\",!0);W.attr({transform:function(t,e){return\"rotate(\"+s(t,e)+\")\"}}).style({display:f.angularAxis.visible?\"block\":\"none\"}),W.exit().remove(),J.append(\"line\").classed(\"grid-line\",!0).classed(\"major\",function(t,e){return e%(f.minorTicks+1)==0}).classed(\"minor\",function(t,e){return!(e%(f.minorTicks+1)==0)}).style(F),J.selectAll(\".minor\").style({stroke:f.minorTickColor}),W.select(\"line.grid-line\").attr({x1:f.tickLength?x-f.tickLength:0,x2:x}).style({display:f.angularAxis.gridLinesVisible?\"block\":\"none\"}),J.append(\"text\""
+,
+").classed(\"axis-text\",!0).style(B);var K=W.select(\"text.axis-text\").attr({x:x+f.labelOffset,dy:\".35em\",transform:function(t,e){var n=s(t,e),r=x+f.labelOffset,a=f.angularAxis.tickOrientation;return\"horizontal\"==a?\"rotate(\"+-n+\" \"+r+\" 0)\":\"radial\"==a?270>n&&n>90?\"rotate(180 \"+r+\" 0)\":null:\"rotate(\"+(180>=n&&n>0?-90:90)+\" \"+r+\" 0)\"}}).style({\"text-anchor\":\"middle\",display:f.angularAxis.labelsVisible?\"block\":\"none\"}).text(function(t,e){return e%(f.minorTicks+1)!=0?\"\":k?k[t]+f.angularAxis.ticksSuffix:t+f.angularAxis.ticksSuffix}).style(B);f.angularAxis.rewriteTicks&&K.text(function(t,e){return e%(f.minorTicks+1)!=0?\"\":f.angularAxis.rewriteTicks(this.textContent,e)});var tt=a.max(q.selectAll(\".angular-tick text\")[0].map(function(t,e){return t.getCTM().e+t.getBBox().width}));j.attr({transform:\"translate(\"+[x+tt,f.margin.top]+\")\"});var et=e.select(\"g.geometry-group\").selectAll(\"g\").size()>0,nt=e.select(\"g.geometry-group\").selectAll(\"g.geometry\").data(h);if(nt.enter().append(\"g\").attr({\"class\":function(t,e){return\"geometry geometry\"+e}}),nt.exit().remove(),h[0]||et){var rt=[];h.forEach(function(t,e){var n={};n.radialScale=r,n.angularScale=l,n.container=nt.filter(function(t,n){return n==e}),n.geometry=t.geometry,n.orientation=f.orientation,n.direction=f.direction,n.index=e,rt.push({data:t,geometryConfig:n})});var at=a.nest().key(function(t,e){return\"undefined\"!=typeof t.data.groupId||\"unstacked\"}).entries(rt),ot=[];at.forEach(function(t,e){\"unstacked\"===t.key?ot=ot.concat(t.values.map(function(t,e){return[t]})):ot.push(t.values)}),ot.forEach(function(t,e){var n;n=Array.isArray(t)?t[0].geometryConfig.geometry:t.geometryConfig.geometry;var r=t.map(function(t,e){return i(o[n].defaultConfig(),t)});o[n]().config(r)()})}var it,lt,st=e.select(\".guides-group\"),ct=e.select(\".tooltips-group\"),ut=o.tooltipPanel().config({container:ct,fontSize:8})(),ft=o.tooltipPanel().config({container:ct,fontSize:8})(),dt=o.tooltipPanel().config({container:ct,hasTick:!0})();if(!A){var ht=st.select(\"line\").attr({\n"
+,
+"x1:0,y1:0,y2:0}).style({stroke:\"grey\",\"pointer-events\":\"none\"});q.on(\"mousemove.angular-guide\",function(t,e){var n=o.util.getMousePos($).angle;ht.attr({x2:-x,transform:\"rotate(\"+n+\")\"}).style({opacity:.5});var r=(n+180+360-f.orientation)%360;it=l.invert(r);var a=o.util.convertToCartesian(x+12,n+180);ut.text(o.util.round(it)).move([a[0]+_[0],a[1]+_[1]])}).on(\"mouseout.angular-guide\",function(t,e){st.select(\"line\").style({opacity:0})})}var pt=st.select(\"circle\").style({stroke:\"grey\",fill:\"none\"});q.on(\"mousemove.radial-guide\",function(t,e){var n=o.util.getMousePos($).radius;pt.attr({r:n}).style({opacity:.5}),lt=r.invert(o.util.getMousePos($).radius);var a=o.util.convertToCartesian(n,f.radialAxis.orientation);ft.text(o.util.round(lt)).move([a[0]+_[0],a[1]+_[1]])}).on(\"mouseout.radial-guide\",function(t,e){pt.style({opacity:0}),dt.hide(),ut.hide(),ft.hide()}),e.selectAll(\".geometry-group .mark\").on(\"mouseover.tooltip\",function(t,n){var r=a.select(this),i=r.style(\"fill\"),l=\"black\",s=r.style(\"opacity\")||1;if(r.attr({\"data-opacity\":s}),\"none\"!=i){r.attr({\"data-fill\":i}),l=a.hsl(i).darker().toString(),r.style({fill:l,opacity:1});var c={t:o.util.round(t[0]),r:o.util.round(t[1])};A&&(c.t=k[t[0]]);var u=\"t: \"+c.t+\", r: \"+c.r,f=this.getBoundingClientRect(),d=e.node().getBoundingClientRect(),h=[f.left+f.width/2-Z[0]-d.left,f.top+f.height/2-Z[1]-d.top];dt.config({color:l}).text(u),dt.move(h)}else i=r.style(\"stroke\"),r.attr({\"data-stroke\":i}),l=a.hsl(i).darker().toString(),r.style({stroke:l,opacity:1})}).on(\"mousemove.tooltip\",function(t,e){return 0!=a.event.which?!1:void(a.select(this).attr(\"data-fill\")&&dt.show())}).on(\"mouseout.tooltip\",function(t,e){dt.hide();var n=a.select(this),r=n.attr(\"data-fill\");r?n.style({fill:r,opacity:n.attr(\"data-opacity\")}):n.style({stroke:n.attr(\"data-stroke\"),opacity:n.attr(\"data-opacity\")})})}),d}var e,n,r,l,s={data:[],layout:{}},c={},u={},f=a.dispatch(\"hover\"),d={};return d.render=function(e){return t(e),this},d.config=function(t){if(!arguments.length)return s;var e=o.util.cloneJson(t);return e.data.forEach(function(t,e){s.data[e]||(s.data[e]={}),i(s.data[e],o.Axis.defaultConfig().data[0]),i(s.data[e],t)}),i(s.layout,o.Axis.defaultConfig().layout),i(s.layout,e.layout),this},d.getLiveConfig=function(){return u},d.getinputConfig=function(){return c},d.radialScale=function(t){return r},d.angularScale=function(t){return l},d.svg=function(){return e},a.rebind(d,f,\"on\"),d},o.Axis.defaultConfig=function(t,e){var n={data:[{t:[1,2,3,4],r:[10,11,12,13],name:\"Line1\",geometry:\"LinePlot\",color:null,strokeDash:\"solid\",strokeColor:null,strokeSize:\"1\",visibleInLegend:!0,opacity:1}],layout:{defaultColorRange:a.scale.category10().range(),title:null,height:450,width:500,margin:{top:40,right:40,bottom:40,left:40},font:{size:12,color:\"gray\",outlineColor:\"white\",family:\"Tahoma, sans-serif\"},direction:\"clockwise\",orientation:0,labelOffset:10,radialAxis:{domain:null,orientation:-45,ticksSuffix:\"\",visible:!0,gridLinesVisible:!0,tickOrientation:\"horizontal\",rewriteTicks:null},angularAxis:{domain:[0,360],ticksSuffix:\"\",visible:!0,gridLinesVisible:!0,labelsVisible:!0,tickOrientation:\"horizontal\",rewriteTicks:null,ticksCount:null,ticksStep:null},minorTicks:0,tickLength:null,tickColor:\"silver\",minorTickColor:\"#eee\",backgroundColor:\"none\",needsEndSpacing:null,showLegend:!0,legend:{reverseOrder:!1},opacity:1}};return n},o.util={},o.DATAEXTENT=\"dataExtent\",o.AREA=\"AreaChart\",o.LINE=\"LinePlot\",o.DOT=\"DotPlot\",o.BAR=\"BarChart\",o.util._override=function(t,e){for(var n in t)n in e&&(e[n]=t[n])},o.util._extend=function(t,e){for(var n in t)e[n]=t[n]},o.util._rndSnd=function(){return 2*Math.random()-1+(2*Math.random()-1)+(2*Math.random()-1)},o.util.dataFromEquation2=function(t,e){var n=e||6,r=a.range(0,360+n,n).map(function(e,n){var r=e*Math.PI/180,a=t(r);return[e,a]});return r},o.util.dataFromEquation=function(t,e,n){var r=e||6,o=[],i=[];a.range(0,360+r,r).forEach(function(e,n){var r=e*Math.PI/180,a=t(r);o.push(e),i.push(a)});var l={t:o,r:i};return n&&(l.name=n),l},o.util.ensureArray=function(t,e){if(\"undefined\"==typeof t)return null;var n=[].concat(t);return a.range(e).map(function(t,e){return n[e]||n[0]})},o.util.fillArrays=function(t,e,n){return e.forEach(function(e,r){t[e]=o.util.ensureArray(t[e],n)}),t},o.util.cloneJson=function(t){return JSON.parse(JSON.stringify(t))},o.util.validateKeys=function(t,e){\"string\"==typeof e&&(e=e.split(\".\"));var n=e.shift();return t[n]&&(!e.length||objHasKeys(t[n],e))},o.util.sumArrays=function(t,e){return a.zip(t,e).map(function(t,e){return a.sum(t)})},o.util.arrayLast=function(t){return t[t.length-1]},o.util.arrayEqual=function(t,e){for(var n=Math.max(t.length,e.length,1);n-- >=0&&t[n]===e[n];);return-2===n},o.util.flattenArray=function(t){for(var e=[];!o.util.arrayEqual(e,t);)e=t,t=[].concat.apply([],t);return t},o.util.deduplicate=function(t){return t.filter(function(t,e,n){return n.indexOf(t)==e})},o.util.convertToCartesian=function(t,e){var n=e*Math.PI/180,r=t*Math.cos(n),a=t*Math.sin(n);return[r,a]},o.util.round=function(t,e){var n=e||2,r=Math.pow(10,n);return Math.round(t*r)/r},o.util.getMousePos=function(t){var e=a.mouse(t.node()),n=e[0],r=e[1],o={};return o.x=n,o.y=r,o.pos=e,o.angle=180*(Math.atan2(r,n)+Math.PI)/Math.PI,o.radius=Math.sqrt(n*n+r*r),o},o.util.duplicatesCount=function(t){for(var e,n={},r={},a=0,o=t.length;o>a;a++)e=t[a],e in n?(n[e]++,r[e]=n[e]):n[e]=1;return r},o.util.duplicates=function(t){return Object.keys(o.util.duplicatesCount(t))},o.util.translator=function(t,e,n,r){if(r){var a=n.slice();n=e,e=a}var o=e.reduce(function(t,e){return\"undefined\"!=typeof t?t[e]:void 0},t);\"undefined\"!=typeof o&&(e.reduce(function(t,n,r){return\"undefined\"!=typeof t?(r===e.length-1&&delete t[n],t[n]):void 0},t),n.reduce(function(t,e,r){return\"undefined\"==typeof t[e]&&(t[e]={}),r===n.length-1&&(t[e]=o),t[e]},t))},o.PolyChart=function(){function t(){var t=n[0].geometryConfig,e=t.container;\"string\"==typeof e&&(e=a.select(e)),e.datum(n).each(function(e,n){function r(e,n){var r=t.radialScale(e[1]),a=(t.angularScale(e[0])+t.orientation)*Math.PI/180;return{r:r,t:a}}function o(t){var e=t.r*Math.cos(t.t),n=t.r*Math.sin(t.t);return{x:e,y:n}}var i=!!e[0].data.yStack,s=e.map(function(t,e){return i?a.zip(t.data.t[0],t.data.r[0],t.data.yStack[0]):a.zip(t.data.t[0],t.data.r[0])}),c=t.angularScale,u=t.radialScale.domain()[0],f={};f.bar=function(n,r,o){var i=e[o].data,l=t.radialScale(n[1])-t.radialScale(0),s=t.radialScale(n[2]||0),u=i.barWidth;a.select(this).attr({\"class\":\"mark bar\",d:\"M\"+[[l+s,-u/2],[l+s,u/2],[s,u/2],[s,-u/2]].join(\"L\")+\"Z\",transform:function(e,n){return\"rotate(\"+(t.orientation+c(e[0]))+\")\"}})},f.dot=function(t,n,i){var l=t[2]?[t[0],t[1]+t[2]]:t,s=a.svg.symbol().size(e[i].data.dotSize).type(e[i].data.dotType)(t,n);a.select(this).attr({\"class\":\"mark dot\",d:s,transform:function(t,e){var n=o(r(l));return\"translate(\"+[n.x,n.y]+\")\"}})};var d=a.svg.line.radial().interpolate(e[0].data.lineInterpolation).radius(function(e){return t.radialScale(e[1])}).angle(function(e){return t.angularScale(e[0])*Math.PI/180});f.line=function(n,r,o){var i=n[2]?s[o].map(function(t,e){return[t[0],t[1]+t[2]]}):s[o];if(a.select(this).each(f.dot).style({opacity:function(t,n){return+e[o].data.dotVisible},fill:v.stroke(n,r,o)}).attr({\"class\":\"mark dot\"}),!(r>0)){var l=a.select(this.parentNode).selectAll(\"path.line\").data([0]);l.enter().insert(\"path\"),l.attr({\"class\":\"line\",d:d(i),transform:function(e,n){return\"rotate(\"+(t.orientation+90)+\")\"},\"pointer-events\":\"none\"}).style({fill:function(t,e){return v.fill(n,r,o)},\"fill-opacity\":0,stroke:function(t,e){return v.stroke(n,r,o)},\"stroke-width\":function(t,e){return v[\"stroke-width\"](n,r,o)},\"stroke-dasharray\":function(t,e){return v[\"stroke-dasharray\"](n,r,o)},opacity:function(t,e){return v.opacity(n,r,o)},display:function(t,e){return v.display(n,r,o)}})}};var h=t.angularScale.range(),p=Math.abs(h[1]-h[0])/s[0].length*Math.PI/180,g=a.svg.arc().startAngle(function(t){return-p/2}).endAngle(function(t){return p/2}).innerRadius(function(e){return t.radialScale(u+(e[2]||0))}).outerRadius(function(e){return t.radialScale(u+(e[2]||0))+t.radialScale(e[1])});f.arc=function(e,n,r){a.select(this).attr({\"class\":\"mark arc\",d:g,transform:function(e,n){return\"rotate(\"+(t.orientation+c(e[0])+90)+\")\"}})};var v={fill:function(t,n,r){return e[r].data.color},stroke:function(t,n,r){return e[r].data.strokeColor},\"stroke-width\":function(t,n,r){return e[r].data.strokeSize+\"px\"},\"stroke-dasharray\":function(t,n,r){return l[e[r].data.strokeDash]},opacity:function(t,n,r){return e[r].data.opacity},display:function(t,n,r){return\"undefined\"==typeof e[r].data.visible||e[r].data.visible?\"block\":\"none\"}},m=a.select(this).selectAll(\"g.layer\").data(s);m.enter().append(\"g\").attr({\"class\":\"layer\"});var y=m.selectAll(\"path.mark\").data(function(t,e){return t});y.enter().append(\"path\").attr({\"class\":\"mark\"}),y.style(v).each(f[t.geometryType]),y.exit().remove(),m.exit().remove()})}var e,n=[o.PolyChart.defaultConfig()],r=a.dispatch(\"hover\"),l={solid:\"none\",dash:[5,2],dot:[2,5]};return t.config=function(t){return arguments.length?(t.forEach(function(t,e){n[e]||(n[e]={}),i(n[e],o.PolyChart.defaultConfig()),i(n[e],t)}),this):n},t.getColorScale=function(){return e},a.rebind(t,r,\"on\"),t},o.PolyChart.defaultConfig=function(){var t={data:{name:\"geom1\",t:[[1,2,3,4]],r:[[1,2,3,4]],dotType:\"circle\",dotSize:64,dotVisible:!1,barWidth:20,color:\"#ffa500\",strokeSize:1,strokeColor:\"silver\",strokeDash:\"solid\",opacity:1,index:0,visible:!0,visibleInLegend:!0},geometryConfig:{geometry:\"LinePlot\",geometryType:\"arc\",direction:\"clockwise\",orientation:0,container:\"body\",radialScale:null,angularScale:null,colorScale:a.scale.category20()}};return t},o.BarChart=function(){return o.PolyChart()},o.BarChart.defaultConfig=function(){var t={geometryConfig:{geometryType:\"bar\"}};return t},o.AreaChart=function(){return o.PolyChart()},o.AreaChart.defaultConfig=function(){var t={geometryConfig:{geometryType:\"arc\"}};return t},o.DotPlot=function(){ret"
+,
+"urn o.PolyChart()},o.DotPlot.defaultConfig=function(){var t={geometryConfig:{geometryType:\"dot\",dotType:\"circle\"}};return t},o.LinePlot=function(){return o.PolyChart()},o.LinePlot.defaultConfig=function(){var t={geometryConfig:{geometryType:\"line\"}};return t},o.Legend=function(){function t(){var n=e.legendConfig,r=e.data.map(function(t,e){return[].concat(t).map(function(t,r){var a=i({},n.elements[e]);return a.name=t,a.color=[].concat(n.elements[e].color)[r],a})}),o=a.merge(r);o=o.filter(function(t,e){return n.elements[e]&&(n.elements[e].visibleInLegend||\"undefined\"==typeof n.elements[e].visibleInLegend)}),n.reverseOrder&&(o=o.reverse());var l=n.container;(\"string\"==typeof l||l.nodeName)&&(l=a.select(l));var s=o.map(function(t,e){return t.color}),c=n.fontSize,u=null==n.isContinuous?\"number\"==typeof o[0]:n.isContinuous,f=u?n.height:c*o.length,d=l.classed(\"legend-group\",!0),h=d.selectAll(\"svg\").data([0]),p=h.enter().append(\"svg\").attr({width:300,height:f+c,xmlns:\"http://www.w3.org/2000/svg\",\"xmlns:xlink\":\"http://www.w3.org/1999/xlink\",version:\"1.1\"});p.append(\"g\").classed(\"legend-axis\",!0),p.append(\"g\").classed(\"legend-marks\",!0);var g=a.range(o.length),v=a.scale[u?\"linear\":\"ordinal\"]().domain(g).range(s),m=a.scale[u?\"linear\":\"ordinal\"]().domain(g)[u?\"range\":\"rangePoints\"]([0,f]),y=function(t,e){var n=3*e;return\"line\"===t?\"M\"+[[-e/2,-e/12],[e/2,-e/12],[e/2,e/12],[-e/2,e/12]]+\"Z\":-1!=a.svg.symbolTypes.indexOf(t)?a.svg.symbol().type(t).size(n)():a.svg.symbol().type(\"square\").size(n)()};if(u){var x=h.select(\".legend-marks\").append(\"defs\").append(\"linearGradient\").attr({id:\"grad1\",x1:\"0%\",y1:\"0%\",x2:\"0%\",y2:\"100%\"}).selectAll(\"stop\").data(s);x.enter().append(\"stop\"),x.attr({offset:function(t,e){return e/(s.length-1)*100+\"%\"}}).style({\"stop-color\":function(t,e){return t}}),h.append(\"rect\").classed(\"legend-mark\",!0).attr({height:n.height,width:n.colorBandWidth,fill:\"url(#grad1)\"})}else{var b=h.select(\".legend-marks\").selectAll(\"path.legend-mark\").data(o);b.enter().append(\"path\").classed(\"legend-mark\",!0),b.attr({transform:function(t,e){return\"translate(\"+[c/2,m(e)+c/2]+\")\"},d:function(t,e){var n=t.symbol;return y(n,c)},fill:function(t,e){return v(e)}}),b.exit().remove()}var _=a.svg.axis().scale(m).orient(\"right\"),w=h.select(\"g.legend-axis\").attr({transform:\"translate(\"+[u?n.colorBandWidth:c,c/2]+\")\"}).call(_);return w.selectAll(\".domain\").style({fill:\"none\",stroke:\"none\"}),w.selectAll(\"line\").style({fill:\"none\",stroke:u?n.textColor:\"none\"}),w.selectAll(\"text\").style({fill:n.textColor,\"font-size\":n.fontSize}).text(function(t,e){return o[e].name}),t}var e=o.Legend.defaultConfig(),n=a.dispatch(\"hover\");return t.config=function(t){return arguments.length?(i(e,t),this):e},a.rebind(t,n,\"on\"),t},o.Legend.defaultConfig=function(t,e){var n={data:[\"a\",\"b\",\"c\"],legendConfig:{elements:[{symbol:\"line\",color:\"red\"},{symbol:\"square\",color:\"yellow\"},{symbol:\"diamond\",color:\"limegreen\"}],height:150,colorBandWidth:30,fontSize:12,container:\"body\",isContinuous:null,textColor:\"grey\",reverseOrder:!1}};return n},o.tooltipPanel=function(){var t,e,n,r={container:null,hasTick:!1,fontSize:12,color:\"white\",padding:5},l=\"tooltip-\"+o.tooltipPanel.uid++,s=10,c=function(){t=r.container.selectAll(\"g.\"+l).data([0]);var a=t.enter().append(\"g\").classed(l,!0).style({\"pointer-events\":\"none\",display:\"none\"});return n=a.append(\"path\").style({fill:\"white\",\"fill-opacity\":.9}).attr({d:\"M0 0\"}),e=a.append(\"text\").attr({dx:r.padding+s,dy:.3*+r.fontSize}),c};return c.text=function(o){var i=a.hsl(r.color).l,l=i>=.5?\"#aaa\":\"white\",u=i>=.5?\"black\":\"white\",f=o||\"\";e.style({fill:u,\"font-size\":r.fontSize+\"px\"}).text(f);var d=r.padding,h=e.node().getBBox(),p={fill:r.color,stroke:l,\"stroke-width\":\"2px\"},g=h.width+2*d+s,v=h.height+2*d;return n.attr({d:\"M\"+[[s,-v/2],[s,-v/4],[r.hasTick?0:s,0],[s,v/4],[s,v/2],[g,v/2],[g,-v/2]].join(\"L\")+\"Z\"}).style(p),t.attr({transform:\"translate(\"+[s,-v/2+2*d]+\")\"}),t.style({display:\"block\"}),c},c.move=function(e){return t?(t.attr({transform:\"translate(\"+[e[0],e[1]]+\")\"}).style({display:\"block\"}),c):void 0},c.hide=function(){return t?(t.style({display:\"none\"}),c):void 0},c.show=function(){return t?(t.style({display:\"block\"}),c):void 0},c.config=function(t){return i(r,t),c},c},o.tooltipPanel.uid=1,o.adapter={},o.adapter.plotly=function(){var t={};return t.convert=function(t,e){var n={};if(t.data&&(n.data=t.data.map(function(t,n){var r=i({},t),a=[[r,[\"marker\",\"color\"],[\"color\"]],[r,[\"marker\",\"opacity\"],[\"opacity\"]],[r,[\"marker\",\"line\",\"color\"],[\"strokeColor\"]],[r,[\"marker\",\"line\",\"dash\"],[\"strokeDash\"]],[r,[\"marker\",\"line\",\"width\"],[\"strokeSize\"]],[r,[\"marker\",\"symbol\"],[\"dotType\"]],[r,[\"marker\",\"size\"],[\"dotSize\"]],[r,[\"marker\",\"barWidth\"],[\"barWidth\"]],[r,[\"line\",\"interpolation\"],[\"lineInterpolation\"]],[r,[\"showlegend\"],[\"visibleInLegend\"]]];return a.forEach(function(t,n){o.util.translator.apply(null,t.concat(e))}),e||delete r.marker,e&&delete r.groupId,e?(\"LinePlot\"===r.geometry?(r.type=\"scatter\",r.dotVisible===!0?(delete r.dotVisible,r.mode=\"lines+markers\"):r.mode=\"lines\"):\"DotPlot\"===r.geometry?(r.type=\"scatter\",r.mode=\"markers\"):\"AreaChart\"===r.geometry?r.type=\"area\":\"BarChart\"===r.geometry&&(r.type=\"bar\"),delete r.geometry):(\"scatter\"===r.type?\"lines\"===r.mode?r.geometry=\"LinePlot\":\"markers\"===r.mode?r.geometry=\"DotPlot\":\"lines+markers\"===r.mode&&(r.geometry=\"LinePlot\",r.dotVisible=!0):\"area\"===r.type?r.geometry=\"AreaChart\":\"bar\"===r.type&&(r.geometry=\"BarChart\"),delete r.mode,delete r.type),r}),!e&&t.layout&&\"stack\"===t.layout.barmode)){var r=o.util.duplicates(n.data.map(function(t,e){return t.geometry}));n.data.forEach(function(t,e){var a=r.indexOf(t.geometry);-1!=a&&(n.data[e].groupId=a)})}if(t.layout){var l=i({},t.layout),s=[[l,[\"plot_bgcolor\"],[\"backgroundColor\"]],[l,[\"showlegend\"],[\"showLegend\"]],[l,[\"radialaxis\"],[\"radialAxis\"]],[l,[\"angularaxis\"],[\"angularAxis\"]],[l.angularaxis,[\"showline\"],[\"gridLinesVisible\"]],[l.angularaxis,[\"showticklabels\"],[\"labelsVisible\"]],[l.angularaxis,[\"nticks\"],[\"ticksCount\"]],[l.angularaxis,[\"tickorientation\"],[\"tickOrientation\"]],[l.angularaxis,[\"ticksuffix\"],[\"ticksSuffix\"]],[l.angularaxis,[\"range\"],[\"domain\"]],[l.angularaxis,[\"endpadding\"],[\"endPadding\"]],[l.radialaxis,[\"showline\"],[\"gridLinesVisible\"]],[l.radialaxis,[\"tickorientation\"],[\"tickOrientation\"]],[l.radialaxis,[\"ticksuffix\"],[\"ticksSuffix\"]],[l.radialaxis,[\"range\"],[\"domain\"]],[l.angularAxis,[\"showline\"],[\"gridLinesVisible\"]],[l.angularAxis,[\"showticklabels\"],[\"labelsVisible\"]],[l.angularAxis,[\"nticks\"],[\"ticksCount\"]],[l.angularAxis,[\"tickorientation\"],[\"tickOrientation\"]],[l.angularAxis,[\"ticksuffix\"],[\"ticksSuffix\"]],[l.angularAxis,[\"range\"],[\"domain\"]],[l.angularAxis,[\"endpadding\"],[\"endPadding\"]],[l.radialAxis,[\"showline\"],[\"gridLinesVisible\"]],[l.radialAxis,[\"tickorientation\"],[\"tickOrientation\"]],[l.radialAxis,[\"ticksuffix\"],[\"ticksSuffix\"]],[l.radialAxis,[\"range\"],[\"domain\"]],[l.font,[\"outlinecolor\"],[\"outlineColor\"]],[l.legend,[\"traceorder\"],[\"reverseOrder\"]],[l,[\"labeloffset\"],[\"labelOffset\"]],[l,[\"defaultcolorrange\"],[\"defaultColorRange\"]]];if(s.forEach(function(t,n){o.util.translator.apply(null,t.concat(e))}),e?(\"undefined\"!=typeof l.tickLength&&(l.angularaxis.ticklen=l.tickLength,delete l.tickLength),l.tickColor&&(l.angularaxis.tickcolor=l.tickColor,delete l.tickColor)):(l.angularAxis&&\"undefined\"!=typeof l.angularAxis.ticklen&&(l.tickLength=l.angularAxis.ticklen),l.angularAxis&&\"undefined\"!=typeof l.angularAxis.tickcolor&&(l.tickColor=l.angularAxis.tickcolor)),l.legend&&\"boolean\"!=typeof l.legend.reverseOrder&&(l.legend.reverseOrder=\"normal\"!=l.legend.reverseOrder),l.legend&&\"boolean\"==typeof l.legend.traceorder&&(l.legend.traceorder=l.legend.traceorder?\"reversed\":\"normal\",delete l.legend.reverseOrder),l.margin&&\"undefined\"!=typeof l.margin.t){var c=[\"t\",\"r\",\"b\",\"l\",\"pad\"],u=[\"top\",\"right\",\"bottom\",\"left\",\"pad\"],f={};a.entries(l.margin).forEach(function(t,e){f[u[c.indexOf(t.key)]]=t.value}),l.margin=f}e&&(delete l.needsEndSpacing,delete l.minorTickColor,delete l.minorTicks,delete l.angularaxis.ticksCount,delete l.angularaxis.ticksCount,delete l.angularaxis.ticksStep,delete l.angularaxis.rewriteTicks,delete l.angularaxis.nticks,delete l.radialaxis.ticksCount,delete l.radialaxis.ticksCount,delete l.radialaxis.ticksStep,delete l.radialaxis.rewriteTicks,delete l.radialaxis.nticks),n.layout=l}return n},t}},{\"../../plotly\":107,\"./micropolar_manager\":134,d3:9}],134:[function(t,e,n){\"use strict\";var r=t(\"../../plotly\"),a=t(\"d3\"),o=t(\"./undo_manager\"),i=e.exports={},l=r.Lib.extendDeepAll;i.framework=function(t){function e(e,o){return o&&(f=o),a.select(a.select(f).node().parentNode).selectAll(\".svg-container>*:not(.chart-root)\").remove(),n=n?l(n,e):e,c||(c=r.micropolar.Axis()),u=r.micropolar.adapter.plotly().convert(n),c.config(u).render(f),t.data=n.data,t.layout=n.layout,i.fillLayout(t),n}var n,s,c,u,f,d=new o;return e.isPolar=!0,e.svg=function(){return c.svg()},e.getConfig=function(){return n},e.getLiveConfig=function(){return r.micropolar.adapter.plotly().convert(c.getLiveConfig(),!0)},e.getLiveScales=function(){return{t:c.angularScale(),r:c.radialScale()}},e.setUndoPoint=function(){var t=this,e=r.micropolar.util.cloneJson(n);!function(e,n){d.add({undo:function(){n&&t(n)},redo:function(){t(e)}})}(e,s),s=r.micropolar.util.cloneJson(e)},e.undo=function(){d.undo()},e.redo=function(){d.redo()},e},i.fillLayout=function(t){var e=a.select(t).selectAll(\".plot-container\"),n=e.selectAll(\".svg-container\"),o=t.framework&&t.framework.svg&&t.framework.svg(),i={width:800,height:600,paper_bgcolor:r.Color.background,_container:e,_paperdiv:n,_paper:o};t._fullLayout=l(i,t.layout)}},{\"../../plotly\":107,\"./undo_manager\":135,d3:9}],135:[function(t,e,n){\"use strict\";e.exports=function(){function t(t,e){return t?(a=!0,t[e](),a=!1,this):this}var e,n=[],r=-1,a=!1;return{add:function(t){return a?this:(n.splice(r+1,n.length-r),n.push(t),r=n.length-1,this)},setCallback:function(t){e=t},undo:function(){var a=n[r];return a?(t(a,\"undo\"),r-=1,e&&e(a.undo),thi"
+,
+"s):this},redo:function(){var a=n[r+1];return a?(t(a,\"redo\"),r+=1,e&&e(a.redo),this):this},clear:function(){n=[],r=-1},hasUndo:function(){return-1!==r},hasRedo:function(){return r<n.length-1},getCommands:function(){return n},getPreviousCommand:function(){return n[r-1]},getIndex:function(){return r}}}},{}],136:[function(t,e,n){\"use strict\";function r(t){var e;switch(t){case\"themes__thumb\":e={autosize:!0,width:150,height:150,title:\"\",showlegend:!1,margin:{l:5,r:5,t:5,b:5,pad:0},annotations:[]};break;case\"thumbnail\":e={title:\"\",hidesources:!0,showlegend:!1,borderwidth:0,bordercolor:\"\",margin:{l:1,r:1,t:1,b:1,pad:0},annotations:[]};break;default:e={}}return e}function a(t){var e=[\"xaxis\",\"yaxis\",\"zaxis\"];return e.indexOf(t.slice(0,5))>-1}var o=t(\"../plotly\"),i=o.Lib.extendFlat,l=o.Lib.extendDeep;e.exports=function(t,e){t.framework&&t.framework.isPolar&&(t=t.framework.getConfig());var n,s=t.data,c=t.layout,u=l([],s),f=l({},c,r(e.tileClass));if(e.width&&(f.width=e.width),e.height&&(f.height=e.height),\"thumbnail\"===e.tileClass||\"themes__thumb\"===e.tileClass){f.annotations=[];var d=Object.keys(f);for(n=0;n<d.length;n++)a(d[n])&&(f[d[n]].title=\"\");for(n=0;n<u.length;n++){var h=u[n];h.showscale=!1,h.marker&&(h.marker.showscale=!1),\"pie\"===h.type&&(h.textposition=\"none\")}}if(Array.isArray(e.annotations))for(n=0;n<e.annotations.length;n++)f.annotations.push(e.annotations[n]);var p=o.Plots.getSubplotIds(f,\"gl3d\");if(p.length){var g={};for(\"thumbnail\"===e.tileClass&&(g={title:\"\",showaxeslabels:!1,showticklabels:!1,linetickenable:!1}),n=0;n<p.length;n++){var v=p[n];i(f[v].xaxis,g),i(f[v].yaxis,g),i(f[v].zaxis,g),f[v]._scene=null}}var m=document.createElement(\"div\");e.tileClass&&(m.className=e.tileClass);var y={td:m,layout:f,data:u,config:{staticPlot:void 0===e.staticPlot?!0:e.staticPlot,plotGlPixelRatio:void 0===e.plotGlPixelRatio?2:e.plotGlPixelRatio,displaylogo:e.displaylogo||!1,showLink:e.showLink||!1,showTips:e.showTips||!1}};return\"transparent\"!==e.setBackground&&(y.config.setBackground=e.setBackground||\"opaque\"),y.td.defaultLayout=r(e.tileClass),y}},{\"../plotly\":107}],137:[function(t,e,n){\"use strict\";function r(t,e){return e=e||{},e.format=e.format||\"png\",new Promise(function(n,r){t._snapshotInProgress&&r(new Error(\"Snapshotting already in progress.\")),o.isIE()&&\"svg\"!==e.format&&r(new Error(\"Sorry IE does not support downloading from canvas. Try {format:'svg'} instead.\")),t._snapshotInProgress=!0;var l=a(t,e),s=e.filename||t.fn||\"newplot\";s+=\".\"+e.format,l.then(function(e){return t._snapshotInProgress=!1,i(e,s)}).then(function(t){n(t)}).catch(function(e){t._snapshotInProgress=!1,r(e)})})}var a=t(\"../plot_api/to_image\"),o=t(\"../lib\"),i=t(\"./filesaver\");e.exports=r},{\"../lib\":89,\"../plot_api/to_image\":105,\"./filesaver\":138}],138:[function(t,e,n){\"use strict\";var r=function(t,e){var n=document.createElement(\"a\"),r=\"download\"in n,a=/Version\\/[\\d\\.]+.*Safari/.test(navigator.userAgent),o=new Promise(function(o,i){\"undefined\"!=typeof navigator&&/MSIE [1-9]\\./.test(navigator.userAgent)&&i(new Error(\"IE < 10 unsupported\")),a&&(document.location.href=\"data:application/octet-stream\"+t.slice(t.search(/[,;]/)),o(e)),e||(e=\"download\"),r&&(n.href=t,n.download=e,document.body.appendChild(n),n.click(),document.body.removeChild(n),o(e)),\"undefined\"!=typeof navigator&&navigator.msSaveBlob&&(navigator.msSaveBlob(new Blob([t]),e),o(e)),i(new Error(\"download error\"))});return o};e.exports=r},{}],139:[function(t,e,n){\"use strict\";function r(t){return t._has&&(t._has(\"gl3d\")||t._has(\"gl2d\"))?500:0}function a(t){return t.data&&t.data[0]&&t.data[0].r?void 0:function(){(t.calcdata||[]).forEach(function(t){t[0]&&t[0].t&&t[0].t.cb&&t[0].t.cb()})}}var o={getDelay:r,getRedrawFunc:a,clone:t(\"./cloneplot\"),toSVG:t(\"./tosvg\"),svgToImg:t(\"./svgtoimg\"),toImage:t(\"./toimage\"),downloadImage:t(\"./download\")};e.exports=o},{\"./cloneplot\":136,\"./download\":137,\"./svgtoimg\":140,\"./toimage\":141,\"./tosvg\":142}],140:[function(t,e,n){\"use strict\";function r(t){var e=t.emitter||new o,n=new Promise(function(r,o){var i=window.Image,l=t.svg,s=t.format||\"png\";if(a.isIE()&&(l=l.replace(/\"/gi,\"'\"),l=l.replace(/(\\('#)(.*)('\\))/gi,'(\"$2\")'),l=l.replace(/(\\\\')/gi,'\"'),\"svg\"!==s)){var c=new Error(\"Sorry IE does not support downloading from canvas. Try {format:'svg'} instead.\");return o(c),t.promise?n:e.emit(\"error\",c)}var u=t.canvas,f=u.getContext(\"2d\"),d=new i,h=\"data:image/svg+xml,\"+encodeURIComponent(l);u.height=t.height||150,u.width=t.width||300,d.onload=function(){var n;switch(\"svg\"!==s&&f.drawImage(d,0,0),s){case\"jpeg\":n=u.toDataURL(\"image/jpeg\");break;case\"png\":n=u.toDataURL(\"image/png\");break;case\"webp\":n=u.toDataURL(\"image/webp\");break;case\"svg\":n=h;break;default:if(o(new Error(\"Image format is not jpeg, png or svg\")),!t.promise)return e.emit(\"error\",\"Image format is not jpeg, png or svg\")}r(n),t.promise||e.emit(\"success\",n)},d.onerror=function(n){return o(n),t.promise?void 0:e.emit(\"error\",n)},d.src=h});return t.promise?n:e}var a=t(\"../lib\"),o=t(\"events\").EventEmitter;e.exports=r},{\"../lib\":89,events:7}],141:[function(t,e,n){\"use strict\";function r(t,e){function n(){var t=r.getDelay(c._fullLayout);setTimeout(function(){var t=o.Snapshot.toSVG(c),n=document.createElement(\"canvas\");n.id=i.randstr(),l=o.Snapshot.svgToImg({format:e.format,width:c._fullLayout.width,height:c._fullLayout.height,canvas:n,emitter:l,svg:t}),l.clean=function(){c&&document.body.removeChild(c)}},t)}var r=o.Snapshot,l=new a,s=r.clone(t,{format:\"png\"}),c=s.td;c.style.position=\"absolute\",c.style.left=\"-5000px\",document.body.appendChild(c);var u=r.getRedrawFunc(c);return o.plot(c,s.data,s.layout,s.config).then(u).then(n).catch(function(t){l.emit(\"error\",t)}),l}var a=t(\"events\").EventEmitter,o=t(\"../plotly\"),i=t(\"../lib\");e.exports=r},{\"../lib\":89,\"../plotly\":107,events:7}],142:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"../lib/svg_text_utils\"),o=t(\"../components/drawing\"),i=t(\"../components/color\"),l=t(\"../constants/xmlns_namespaces\");e.exports=function(t,e){var n,s=t._fullLayout,c=s._paper,u=s._toppaper;c.insert(\"rect\",\":first-child\").call(o.setRect,0,0,s.width,s.height).call(i.fill,s.paper_bgcolor);var f=s._basePlotModules||[];for(n=0;n<f.length;n++){var d=f[n];d.toSVG&&d.toSVG(t)}if(u){var h=u.node().childNodes,p=Array.prototype.slice.call(h);for(n=0;n<p.length;n++){var g=p[n];g.childNodes.length&&c.node().appendChild(g)}}s._draggers&&s._draggers.remove(),c.node().style.background=\"\",c.selectAll(\"text\").attr(\"data-unformatted\",null).each(function(){var t=r.select(this);if(\"hidden\"===t.style(\"visibility\"))return void t.remove();var e=t.style(\"font-family\");e&&-1!==e.indexOf('\"')&&t.style(\"font-family\",e.replace(/\"/g,\"TOBESTRIPPED\"))}),\"pdf\"!==e&&\"eps\"!==e||c.selectAll(\"#MathJax_SVG_glyphs path\").attr(\"stroke-width\",0),c.node().setAttributeNS(l.xmlns,\"xmlns\",l.svg),c.node().setAttributeNS(l.xmlns,\"xmlns:xlink\",l.xlink);var v=(new window.XMLSerializer).serializeToString(c.node());return v=a.html_entity_decode(v),v=a.xml_entity_encode(v),v=v.replace(/(\"TOBESTRIPPED)|(TOBESTRIPPED\")/g,\"'\")}},{\"../components/color\":18,\"../components/drawing\":41,\"../constants/xmlns_namespaces\":82,\"../lib/svg_text_utils\":100,d3:9}],143:[function(t,e,n){\"use strict\";var r=t(\"../../lib\").mergeArray;e.exports=function(t){var e=t[0].trace,n=e.marker,a=n.line;r(e.text,t,\"tx\"),r(n.opacity,t,\"mo\"),r(n.color,t,\"mc\"),r(a.color,t,\"mlc\"),r(a.width,t,\"mlw\")}},{\"../../lib\":89}],144:[function(t,e,n){\"use strict\";var r=t(\"../scatter/attributes\"),a=t(\"../../components/colorscale/color_attributes\"),o=t(\"../../lib/extend\").extendFlat,i=r.marker,l=i.line,s=o({},l.width,{dflt:0}),c=o({},{width:s},a(\"marker.line\")),u=o({},{showscale:i.showscale,line:c},a(\"marker\"));e.exports={x:r.x,x0:r.x0,dx:r.dx,y:r.y,y0:r.y0,dy:r.dy,text:r.text,orientation:{valType:\"enumerated\",values:[\"v\",\"h\"]},marker:u,r:r.r,t:r.t,_nestedModules:{error_y:\"ErrorBars\",error_x:\"ErrorBars\",\"marker.colorbar\":\"Colorbar\"},_deprecated:{bardir:{valType:\"enumerated\",values:[\"v\",\"h\"]}}}},{\"../../components/colorscale/color_attributes\":26,\"../../lib/extend\":88,\"../scatter/attributes\":167}],145:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../plots/cartesian/axes\"),o=t(\"../../components/colorscale/has_colorscale\"),i=t(\"../../components/colorscale/calc\");e.exports=function(t,e){var n,l,s,c=a.getFromId(t,e.xaxis||\"x\"),u=a.getFromId(t,e.yaxis||\"y\"),f=e.orientation||(e.x&&!e.y?\"h\":\"v\");\"h\"===f?(l=c.makeCalcdata(e,\"x\"),n=u.makeCalcdata(e,\"y\")):(l=u.makeCalcdata(e,\"y\"),n=c.makeCalcdata(e,\"x\"));var d=Math.min(n.length,l.length),h=[];for(s=0;d>s;s++)r(n[s])&&h.push({p:n[s],s:l[s],b:0});return o(e,\"marker\")&&i(e,e.marker.color,\"marker\",\"c\"),o(e,\"marker.line\")&&i(e,e.marker.line.color,\"marker.line\",\"c\"),h}},{\"../../components/colorscale/calc\":25,\"../../components/colorscale/has_colorscale\":31,\"../../plots/cartesian/axes\":110,\"fast-isnumeric\":11}],146:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"../../components/color\"),o=t(\"../scatter/xy_defaults\"),i=t(\"../bar/style_defaults\"),l=t(\"../../components/errorbars/defaults\"),s=t(\"./attributes\");e.exports=function(t,e,n,c){function u(n,a){return r.coerce(t,e,s,n,a)}var f=o(t,e,u);return f?(u(\"orientation\",e.x&&!e.y?\"h\":\"v\"),u(\"text\"),i(t,e,u,n,c),l(t,e,a.defaultLine,{axis:\"y\"}),void l(t,e,a.defaultLine,{axis:\"x\",inherit:\"y\"})):void(e.visible=!1)}},{\"../../components/color\":18,\"../../components/errorbars/defaults\":46,\"../../lib\":89,\"../bar/style_defaults\":154,\"../scatter/xy_defaults\":188,\"./attributes\":144}],147:[function(t,e,n){\"use strict\";var r=t(\"../../plots/cartesian/graph_interact\"),a=t(\"../../components/errorbars\"),o=t(\"../../components/color\");e.exports=function(t,e,n,i){var l,s=t.cd,c=s[0].trace,u=s[0].t,f=t.xa,d=t.ya,h=\"closest\"===i?u.barwidth/2:u.dbar*(1-f._gd._fullLayout.bargap)/2;l=\"closest\"!==i?function(t){return t.p}:\"h\"===c.orientation?function(t){return t.y}:function(t){return t.x};var p,g;\"h\"===c.orientation?(p=function(t){return r.inbox(t.b-e,t.x-e)+(t.x-e)/(t.x-t.b)},g=function(t){var e=l(t)-n;ret"
+,
+"urn r.inbox(e-h,e+h)}):(g=function(t){return r.inbox(t.b-n,t.y-n)+(t.y-n)/(t.y-t.b)},p=function(t){var n=l(t)-e;return r.inbox(n-h,n+h)});var v=r.getDistanceFunction(i,p,g);if(r.getClosest(s,v,t),t.index!==!1){var m=s[t.index],y=m.mcc||c.marker.color,x=m.mlcc||c.marker.line.color,b=m.mlw||c.marker.line.width;return o.opacity(y)?t.color=y:o.opacity(x)&&b&&(t.color=x),\"h\"===c.orientation?(t.x0=t.x1=f.c2p(m.x,!0),t.xLabelVal=m.s,t.y0=d.c2p(l(m)-h,!0),t.y1=d.c2p(l(m)+h,!0),t.yLabelVal=m.p):(t.y0=t.y1=d.c2p(m.y,!0),t.yLabelVal=m.s,t.x0=f.c2p(l(m)-h,!0),t.x1=f.c2p(l(m)+h,!0),t.xLabelVal=m.p),m.tx&&(t.text=m.tx),a.hoverInfo(m,c,t),[t]}}},{\"../../components/color\":18,\"../../components/errorbars\":47,\"../../plots/cartesian/graph_interact\":117}],148:[function(t,e,n){\"use strict\";var r={};r.attributes=t(\"./attributes\"),r.layoutAttributes=t(\"./layout_attributes\"),r.supplyDefaults=t(\"./defaults\"),r.supplyLayoutDefaults=t(\"./layout_defaults\"),r.calc=t(\"./calc\"),r.setPositions=t(\"./set_positions\"),r.colorbar=t(\"../scatter/colorbar\"),r.arraysToCalcdata=t(\"./arrays_to_calcdata\"),r.plot=t(\"./plot\"),r.style=t(\"./style\"),r.hoverPoints=t(\"./hover\"),r.moduleType=\"trace\",r.name=\"bar\",r.basePlotModule=t(\"../../plots/cartesian\"),r.categories=[\"cartesian\",\"bar\",\"oriented\",\"markerColorscale\",\"errorBarsOK\",\"showLegend\"],r.meta={},e.exports=r},{\"../../plots/cartesian\":118,\"../scatter/colorbar\":170,\"./arrays_to_calcdata\":143,\"./attributes\":144,\"./calc\":145,\"./defaults\":146,\"./hover\":147,\"./layout_attributes\":149,\"./layout_defaults\":150,\"./plot\":151,\"./set_positions\":152,\"./style\":153}],149:[function(t,e,n){\"use strict\";e.exports={barmode:{valType:\"enumerated\",values:[\"stack\",\"group\",\"overlay\",\"relative\"],dflt:\"group\"},barnorm:{valType:\"enumerated\",values:[\"\",\"fraction\",\"percent\"],dflt:\"\"},bargap:{valType:\"number\",min:0,max:1},bargroupgap:{valType:\"number\",min:0,max:1,dflt:0}}},{}],150:[function(t,e,n){\"use strict\";var r=t(\"../../plots/plots\"),a=t(\"../../plots/cartesian/axes\"),o=t(\"../../lib\"),i=t(\"./layout_attributes\");\n"
+,
+"e.exports=function(t,e,n){function l(n,r){return o.coerce(t,e,i,n,r)}for(var s=!1,c=!1,u=!1,f={},d=0;d<n.length;d++){var h=n[d];if(r.traceIs(h,\"bar\")){if(s=!0,\"overlay\"!==t.barmode&&\"stack\"!==t.barmode){var p=h.xaxis+h.yaxis;f[p]&&(u=!0),f[p]=!0}if(h.visible&&\"histogram\"===h.type){var g=a.getFromId({_fullLayout:e},h[\"v\"===h.orientation?\"xaxis\":\"yaxis\"]);\"category\"!==g.type&&(c=!0)}}}if(s){var v=l(\"barmode\");\"overlay\"!==v&&l(\"barnorm\"),l(\"bargap\",c&&!u?0:.2),l(\"bargroupgap\")}}},{\"../../lib\":89,\"../../plots/cartesian/axes\":110,\"../../plots/plots\":130,\"./layout_attributes\":149}],151:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"fast-isnumeric\"),o=t(\"../../lib\"),i=t(\"../../components/color\"),l=t(\"../../components/errorbars\"),s=t(\"./arrays_to_calcdata\");e.exports=function(t,e,n){var c=e.x(),u=e.y(),f=t._fullLayout,d=e.plot.select(\".barlayer\").selectAll(\"g.trace.bars\").data(n).enter().append(\"g\").attr(\"class\",\"trace bars\");d.append(\"g\").attr(\"class\",\"points\").each(function(e){var n=e[0].t,l=e[0].trace;s(e),r.select(this).selectAll(\"path\").data(o.identity).enter().append(\"path\").each(function(e){function o(t){return 0===f.bargap&&0===f.bargroupgap?r.round(Math.round(t)-m,2):t}function s(t,e){return Math.abs(t-e)>=2?o(t):t>e?Math.ceil(t):Math.floor(t)}var d,h,p,g;if(\"h\"===l.orientation?(p=u.c2p(n.poffset+e.p,!0),g=u.c2p(n.poffset+e.p+n.barwidth,!0),d=c.c2p(e.b,!0),h=c.c2p(e.s+e.b,!0)):(d=c.c2p(n.poffset+e.p,!0),h=c.c2p(n.poffset+e.p+n.barwidth,!0),g=u.c2p(e.s+e.b,!0),p=u.c2p(e.b,!0)),!(a(d)&&a(h)&&a(p)&&a(g)&&d!==h&&p!==g))return void r.select(this).remove();var v=(e.mlw+1||l.marker.line.width+1||(e.trace?e.trace.marker.line.width:0)+1)-1,m=r.round(v/2%1,2);if(!t._context.staticPlot){var y=i.opacity(e.mc||l.marker.color),x=1>y||v>.01?o:s;d=x(d,h),h=x(h,d),p=x(p,g),g=x(g,p)}r.select(this).attr(\"d\",\"M\"+d+\",\"+p+\"V\"+g+\"H\"+h+\"V\"+p+\"Z\")})}),d.call(l.plot,e)}},{\"../../components/color\":18,\"../../components/errorbars\":47,\"../../lib\":89,\"./arrays_to_calcdata\":143,d3:9,\"fast-isnumeric\":11}],152:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../plots/plots\"),o=t(\"../../plots/cartesian/axes\"),i=t(\"../../lib\");e.exports=function(t,e){var n,l,s=t._fullLayout,c=e.x(),u=e.y();[\"v\",\"h\"].forEach(function(f){function d(e){function n(t){t[p]=t.p+d}var r=[];e.forEach(function(e){t.calcdata[e].forEach(function(t){r.push(t.p)})});var a=i.distinctVals(r),l=a.vals,c=a.minDiff,u=!1,f=[];\"group\"===s.barmode&&e.forEach(function(e){u||(t.calcdata[e].forEach(function(t){u||f.forEach(function(e){Math.abs(t.p-e)<c&&(u=!0)})}),u||t.calcdata[e].forEach(function(t){f.push(t.p)}))}),o.minDtick(v,c,l[0],u),o.expand(v,l,{vpad:c/2}),c*=1-s.bargap,u&&(c/=h.length);for(var d,g=0;g<e.length;g++){var m=t.calcdata[e[g]][0].t;m.barwidth=c*(1-s.bargroupgap),m.poffset=((u?(2*g+1-e.length)*c:0)-m.barwidth)/2,m.dbar=a.minDiff,d=m.poffset+m.barwidth/2,t.calcdata[e[g]].forEach(n)}}var h=[],p={v:\"x\",h:\"y\"}[f],g={v:\"y\",h:\"x\"}[f],v=e[p](),m=e[g]();if(t._fullData.forEach(function(t,e){t.visible===!0&&a.traceIs(t,\"bar\")&&t.orientation===f&&t.xaxis===c._id&&t.yaxis===u._id&&h.push(e)}),h.length){\"overlay\"===s.barmode?h.forEach(function(t){d([t])}):d(h);var y=\"stack\"===s.barmode,x=\"relative\"===s.barmode,b=s.barnorm;if(y||x||b){var _,w,k,M=m.l2c(m.c2l(0)),A=M,L={},T=t.calcdata[h[0]][0].t.barwidth/100,z=0,S=!0;for(n=0;n<h.length;n++)for(w=t.calcdata[h[n]],l=0;l<w.length;l++)if(r(w[l].s)){z=Math.round(w[l].p/T),x&&w[l].s<0&&(z=\"-\"+z);var E=L[z]||0;(y||x)&&(w[l].b=E),_=w[l].b+w[l].s,L[z]=E+w[l].s,(y||x)&&(w[l][g]=_,!b&&r(m.c2l(_))&&(M=Math.max(M,_),A=Math.min(A,_)))}if(b){var C=\"fraction\"===b?1:100,O=!1,P=C/1e9;for(S=!1,A=0,M=y?C:0,n=0;n<h.length;n++)for(w=t.calcdata[h[n]],l=0;l<w.length;l++)O=x&&w[l].s<0,z=Math.round(w[l].p/T),O&&(z=\"-\"+z),k=C/L[z],O&&(k*=-1),w[l].b*=k,w[l].s*=k,_=w[l].b+w[l].s,w[l][g]=_,r(m.c2l(_))&&(A-P>_&&(S=!0,A=_),_>M+P&&(S=!0,M=_))}o.expand(m,[A,M],{tozero:!0,padded:S})}else{var N=function(t){return t[g]=t.s,t.s};for(n=0;n<h.length;n++)o.expand(m,t.calcdata[h[n]].map(N),{tozero:!0,padded:!0})}}})}},{\"../../lib\":89,\"../../plots/cartesian/axes\":110,\"../../plots/plots\":130,\"fast-isnumeric\":11}],153:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"../../components/color\"),o=t(\"../../components/drawing\"),i=t(\"../../components/errorbars\");e.exports=function(t){var e=r.select(t).selectAll(\"g.trace.bars\"),n=e.size(),l=t._fullLayout;e.style(\"opacity\",function(t){return t[0].trace.opacity}).each(function(t){(\"stack\"===l.barmode&&n>1||0===l.bargap&&0===l.bargroupgap&&!t[0].trace.marker.line.width)&&r.select(this).attr(\"shape-rendering\",\"crispEdges\")}),e.selectAll(\"g.points\").each(function(t){var e=t[0].trace,n=e.marker,i=n.line,l=(e._input||{}).marker||{},s=o.tryColorscale(n,l,\"\"),c=o.tryColorscale(n,l,\"line.\");r.select(this).selectAll(\"path\").each(function(t){var e,o,l=(t.mlw+1||i.width+1)-1,u=r.select(this);e=\"mc\"in t?t.mcc=s(t.mc):Array.isArray(n.color)?a.defaultLine:n.color,u.style(\"stroke-width\",l+\"px\").call(a.fill,e),l&&(o=\"mlc\"in t?t.mlcc=c(t.mlc):Array.isArray(i.color)?a.defaultLine:i.color,u.call(a.stroke,o))})}),e.call(i.style)}},{\"../../components/color\":18,\"../../components/drawing\":41,\"../../components/errorbars\":47,d3:9}],154:[function(t,e,n){\"use strict\";var r=t(\"../../components/color\"),a=t(\"../../components/colorscale/has_colorscale\"),o=t(\"../../components/colorscale/defaults\");e.exports=function(t,e,n,i,l){n(\"marker.color\",i),a(t,\"marker\")&&o(t,e,l,n,{prefix:\"marker.\",cLetter:\"c\"}),n(\"marker.line.color\",r.defaultLine),a(t,\"marker.line\")&&o(t,e,l,n,{prefix:\"marker.line.\",cLetter:\"c\"}),n(\"marker.line.width\")}},{\"../../components/color\":18,\"../../components/colorscale/defaults\":28,\"../../components/colorscale/has_colorscale\":31}],155:[function(t,e,n){\"use strict\";var r=t(\"../../components/color/attributes\"),a=t(\"../../plots/font_attributes\"),o=t(\"../../plots/attributes\"),i=t(\"../../lib/extend\").extendFlat;e.exports={labels:{valType:\"data_array\"},label0:{valType:\"number\",dflt:0},dlabel:{valType:\"number\",dflt:1},values:{valType:\"data_array\"},marker:{colors:{valType:\"data_array\"},line:{color:{valType:\"color\",dflt:r.defaultLine,arrayOk:!0},width:{valType:\"number\",min:0,dflt:0,arrayOk:!0}}},text:{valType:\"data_array\"},scalegroup:{valType:\"string\",dflt:\"\"},textinfo:{valType:\"flaglist\",flags:[\"label\",\"text\",\"value\",\"percent\"],extras:[\"none\"]},hoverinfo:i({},o.hoverinfo,{flags:[\"label\",\"text\",\"value\",\"percent\",\"name\"]}),textposition:{valType:\"enumerated\",values:[\"inside\",\"outside\",\"auto\",\"none\"],dflt:\"auto\",arrayOk:!0},textfont:i({},a,{}),insidetextfont:i({},a,{}),outsidetextfont:i({},a,{}),domain:{x:{valType:\"info_array\",items:[{valType:\"number\",min:0,max:1},{valType:\"number\",min:0,max:1}],dflt:[0,1]},y:{valType:\"info_array\",items:[{valType:\"number\",min:0,max:1},{valType:\"number\",min:0,max:1}],dflt:[0,1]}},hole:{valType:\"number\",min:0,max:1,dflt:0},sort:{valType:\"boolean\",dflt:!0},direction:{valType:\"enumerated\",values:[\"clockwise\",\"counterclockwise\"],dflt:\"counterclockwise\"},rotation:{valType:\"number\",min:-360,max:360,dflt:0},pull:{valType:\"number\",min:0,max:1,dflt:0,arrayOk:!0}}},{\"../../components/color/attributes\":17,\"../../lib/extend\":88,\"../../plots/attributes\":108,\"../../plots/font_attributes\":128}],156:[function(t,e,n){\"use strict\";function r(t,e){for(var n=[],r=0;r<t.length;r++){var a=t[r],o=a[0].trace;o._module===e&&o.visible===!0&&n.push(a)}return n}var a=t(\"../../plots/plots\");n.name=\"pie\",n.plot=function(t){var e=a.getModule(\"pie\"),n=r(t.calcdata,e);n.length&&e.plot(t,n)},n.clean=function(t,e,n,r){var a=r._has&&r._has(\"pie\"),o=e._has&&e._has(\"pie\");a&&!o&&r._pielayer.selectAll(\"g.trace\").remove()}},{\"../../plots/plots\":130}],157:[function(t,e,n){\"use strict\";function r(t){if(!s){var e=i.defaults;s=e.slice();var n;for(n=0;n<e.length;n++)s.push(o(e[n]).lighten(20).toHexString());for(n=0;n<i.defaults.length;n++)s.push(o(e[n]).darken(20).toHexString())}return s[t%s.length]}var a=t(\"fast-isnumeric\"),o=t(\"tinycolor2\"),i=t(\"../../components/color\"),l=t(\"./helpers\");e.exports=function(t,e){var n,s,c,u,f,d,h=e.values,p=e.labels,g=[],v=t._fullLayout,m=v._piecolormap,y={},x=!1,b=0,_=v.hiddenlabels||[];if(e.dlabel)for(p=new Array(h.length),n=0;n<h.length;n++)p[n]=String(e.label0+n*e.dlabel);for(n=0;n<h.length;n++)s=h[n],a(s)&&(s=+s,0>s||(c=p[n],void 0!==c&&\"\"!==c||(c=n),c=String(c),void 0===y[c]&&(y[c]=!0,u=o(e.marker.colors[n]),u.isValid()?(u=i.addOpacity(u,u.getAlpha()),m[c]||(m[c]=u)):m[c]?u=m[c]:(u=!1,x=!0),f=-1!==_.indexOf(c),f||(b+=s),g.push({v:s,label:c,color:u,i:n,hidden:f}))));if(e.sort&&g.sort(function(t,e){return e.v-t.v}),x)for(n=0;n<g.length;n++)d=g[n],d.color===!1&&(m[d.label]=d.color=r(v._piedefaultcolorcount),v._piedefaultcolorcount++);if(g[0]&&(g[0].vTotal=b),e.textinfo&&\"none\"!==e.textinfo){var w,k=-1!==e.textinfo.indexOf(\"label\"),M=-1!==e.textinfo.indexOf(\"text\"),A=-1!==e.textinfo.indexOf(\"value\"),L=-1!==e.textinfo.indexOf(\"percent\"),T=v.separators;for(n=0;n<g.length;n++)d=g[n],w=k?[d.label]:[],M&&e.text[d.i]&&w.push(e.text[d.i]),A&&w.push(l.formatPieValue(d.v,T)),L&&w.push(l.formatPiePercent(d.v/b,T)),d.text=w.join(\"<br>\")}return g};var s},{\"../../components/color\":18,\"./helpers\":159,\"fast-isnumeric\":11,tinycolor2:13}],158:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"./attributes\");e.exports=function(t,e,n,o){function i(n,o){return r.coerce(t,e,a,n,o)}var l=r.coerceFont,s=i(\"values\");if(!Array.isArray(s)||!s.length)return void(e.visible=!1);var c=i(\"labels\");Array.isArray(c)||(i(\"label0\"),i(\"dlabel\"));var u=i(\"marker.line.width\");u&&i(\"marker.line.color\");var f=i(\"marker.colors\");Array.isArray(f)||(e.marker.colors=[]),i(\"scalegroup\");var d=i(\"text\"),h=i(\"textinfo\",Array.isArray(d)?\"text+percent\":\"percent\");if(i(\"hoverinfo\",1===o._dataLength?\"label+text+value+percent\":void 0),h&&\"none\"!==h){var p=i(\"textposition\"),g=Array.isArray(p)||\"auto\"===p,v=g||\"inside\"===p,m=g||\"outside\"===p;if(v||m){var y=l(i,\"textfont\",o.font);v&&l(i,\"insidetextfont\",y),m&&l(i,\"outsidetextfont\",y)}}i(\"domain.x\"),i(\"domain.y\"),i(\"hol"
+,
+"e\"),i(\"sort\"),i(\"direction\"),i(\"rotation\"),i(\"pull\")}},{\"../../lib\":89,\"./attributes\":155}],159:[function(t,e,n){\"use strict\";var r=t(\"../../lib\");n.formatPiePercent=function(t,e){var n=(100*t).toPrecision(3);return-1!==n.lastIndexOf(\".\")&&(n=n.replace(/[.]?0+$/,\"\")),r.numSeparate(n,e)+\"%\"},n.formatPieValue=function(t,e){var n=t.toPrecision(10);return-1!==n.lastIndexOf(\".\")&&(n=n.replace(/[.]?0+$/,\"\")),r.numSeparate(n,e)}},{\"../../lib\":89}],160:[function(t,e,n){\"use strict\";var r={};r.attributes=t(\"./attributes\"),r.supplyDefaults=t(\"./defaults\"),r.supplyLayoutDefaults=t(\"./layout_defaults\"),r.layoutAttributes=t(\"./layout_attributes\"),r.calc=t(\"./calc\"),r.plot=t(\"./plot\"),r.style=t(\"./style\"),r.styleOne=t(\"./style_one\"),r.moduleType=\"trace\",r.name=\"pie\",r.basePlotModule=t(\"./base_plot\"),r.categories=[\"pie\",\"showLegend\"],r.meta={},e.exports=r},{\"./attributes\":155,\"./base_plot\":156,\"./calc\":157,\"./defaults\":158,\"./layout_attributes\":161,\"./layout_defaults\":162,\"./plot\":163,\"./style\":164,\"./style_one\":165}],161:[function(t,e,n){\"use strict\";e.exports={hiddenlabels:{valType:\"data_array\"}}},{}],162:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"./layout_attributes\");e.exports=function(t,e){function n(n,o){return r.coerce(t,e,a,n,o)}n(\"hiddenlabels\")}},{\"../../lib\":89,\"./layout_attributes\":161}],163:[function(t,e,n){\"use strict\";function r(t,e,n){var r=Math.sqrt(t.width*t.width+t.height*t.height),o=t.width/t.height,i=Math.PI*Math.min(e.v/n.vTotal,.5),l=1-n.trace.hole,s=a(e,n),c={scale:s*n.r*2/r,rCenter:1-s,rotate:0};if(c.scale>=1)return c;var u=o+1/(2*Math.tan(i)),f=n.r*Math.min(1/(Math.sqrt(u*u+.5)+u),l/(Math.sqrt(o*o+l/2)+o)),d={scale:2*f/t.height,rCenter:Math.cos(f/n.r)-f*o/n.r,rotate:(180/Math.PI*e.midangle+720)%180-90},h=1/o,p=h+1/(2*Math.tan(i)),g=n.r*Math.min(1/(Math.sqrt(p*p+.5)+p),l/(Math.sqrt(h*h+l/2)+h)),v={scale:2*g/t.width,rCenter:Math.cos(g/n.r)-g/o/n.r,rotate:(180/Math.PI*e.midangle+810)%180-90},m=v.scale>d.scale?v:d;return c.scale<1&&m.scale>c.scale?m:c}function a(t,e){if(t.v===e.vTotal&&!e.trace.hole)return 1;var n=Math.PI*Math.min(t.v/e.vTotal,.5);return Math.min(1/(1+1/Math.sin(n)),(1-e.trace.hole)/2)}function o(t,e){var n=e.pxmid[0],r=e.pxmid[1],a=t.width/2,o=t.height/2;return 0>n&&(a*=-1),0>r&&(o*=-1),{scale:1,rCenter:1,rotate:0,x:a+Math.abs(o)*(a>0?1:-1)/2,y:o/(1+n*n/(r*r)),outside:!0}}function i(t,e){function n(t,e){return t.pxmid[1]-e.pxmid[1]}function r(t,e){return e.pxmid[1]-t.pxmid[1]}function a(t,n){n||(n={});var r,a,o,l,d,h,g=n.labelExtraY+(i?n.yLabelMax:n.yLabelMin),v=i?t.yLabelMin:t.yLabelMax,m=i?t.yLabelMax:t.yLabelMin,y=t.cyFinal+c(t.px0[1],t.px1[1]),x=g-v;if(x*f>0&&(t.labelExtraY=x),Array.isArray(e.pull))for(a=0;a<p.length;a++)o=p[a],o===t||(e.pull[t.i]||0)>=e.pull[o.i]||((t.pxmid[1]-o.pxmid[1])*f>0?(l=o.cyFinal+c(o.px0[1],o.px1[1]),x=l-v-t.labelExtraY,x*f>0&&(t.labelExtraY+=x)):(m+t.labelExtraY-y)*f>0&&(r=3*u*Math.abs(a-p.indexOf(t)),d=o.cxFinal+s(o.px0[0],o.px1[0]),h=d+r-(t.cxFinal+t.pxmid[0])-t.labelExtraX,h*u>0&&(t.labelExtraX+=h)))}var o,i,l,s,c,u,f,d,h,p,g,v,m;for(i=0;2>i;i++)for(l=i?n:r,c=i?Math.max:Math.min,f=i?1:-1,o=0;2>o;o++){for(s=o?Math.max:Math.min,u=o?1:-1,d=t[i][o],d.sort(l),h=t[1-i][o],p=h.concat(d),v=[],g=0;g<d.length;g++)void 0!==d[g].yLabelMid&&v.push(d[g]);for(m=!1,g=0;i&&g<h.length;g++)if(void 0!==h[g].yLabelMid){m=h[g];break}for(g=0;g<v.length;g++){var y=g&&v[g-1];m&&!g&&(y=m),a(v[g],y)}}}function l(t,e){var n,r,a,o,i,l,s,u,f,d,h=[];for(a=0;a<t.length;a++){if(i=t[a][0],l=i.trace,n=e.w*(l.domain.x[1]-l.domain.x[0]),r=e.h*(l.domain.y[1]-l.domain.y[0]),s=l.tiltaxis*Math.PI/180,u=l.pull,Array.isArray(u))for(u=0,o=0;o<l.pull.length;o++)l.pull[o]>u&&(u=l.pull[o]);i.r=Math.min(n/c(l.tilt,Math.sin(s),l.depth),r/c(l.tilt,Math.cos(s),l.depth))/(2+2*u),i.cx=e.l+e.w*(l.domain.x[1]+l.domain.x[0])/2,i.cy=e.t+e.h*(2-l.domain.y[1]-l.domain.y[0])/2,l.scalegroup&&-1===h.indexOf(l.scalegroup)&&h.push(l.scalegroup)}for(o=0;o<h.length;o++){for(d=1/0,f=h[o],a=0;a<t.length;a++)i=t[a][0],i.trace.scalegroup===f&&(d=Math.min(d,i.r*i.r/i.vTotal));for(a=0;a<t.length;a++)i=t[a][0],i.trace.scalegroup===f&&(i.r=Math.sqrt(d*i.vTotal))}}function s(t){function e(t){var e=f.r*Math.sin(t),n=-f.r*Math.cos(t);return h?[e*(1-l*r*r)+n*i*l,e*i*l+n*(1-l*a*a),Math.sin(o)*(n*a-e*r)]:[e,n]}var n,r,a,o,i,l,s,c,u,f=t[0],d=f.trace,h=d.tilt,p=d.rotation*Math.PI/180,g=2*Math.PI/f.vTotal,v=\"px0\",m=\"px1\";if(\"counterclockwise\"===d.direction){for(s=0;s<t.length&&t[s].hidden;s++);if(s===t.length)return;p+=g*t[s].v,g*=-1,v=\"px1\",m=\"px0\"}for(h&&(o=h*Math.PI/180,n=d.tiltaxis*Math.PI/180,i=Math.sin(n)*Math.cos(n),l=1-Math.cos(o),r=Math.sin(n),a=Math.cos(n)),u=e(p),s=0;s<t.length;s++)c=t[s],c.hidden||(c[v]=u,p+=g*c.v/2,c.pxmid=e(p),c.midangle=p,p+=g*c.v/2,u=e(p),c[m]=u,c.largeArc=c.v>f.vTotal/2?1:0)}function c(t,e,n){if(!t)return 1;var r=Math.sin(t*Math.PI/180);return Math.max(.01,n*r*Math.abs(e)+2*Math.sqrt(1-r*r*e*e))}var u=t(\"d3\"),f=t(\"../../plots/cartesian/graph_interact\"),d=t(\"../../components/color\"),h=t(\"../../components/drawing\"),p=t(\"../../lib/svg_text_utils\"),g=t(\"./helpers\");e.exports=function(t,e){var n=t._fullLayout;l(e,n._size);var c=n._pielayer.selectAll(\"g.trace\").data(e);c.enter().append(\"g\").attr({\"stroke-linejoin\":\"round\",\"class\":\"trace\"}),c.exit().remove(),c.order(),c.each(function(e){var l=u.select(this),c=e[0],v=c.trace,m=0,y=(v.depth||0)*c.r*Math.sin(m)/2,x=v.tiltaxis||0,b=x*Math.PI/180,_=[y*Math.sin(b),y*Math.cos(b)],w=c.r*Math.cos(m),k=l.selectAll(\"g.part\").data(v.tilt?[\"top\",\"sides\"]:[\"top\"]);k.enter().append(\"g\").attr(\"class\",function(t){return t+\" part\"}),k.exit().remove(),k.order(),s(e),l.selectAll(\".top\").each(function(){var l=u.select(this).selectAll(\"g.slice\").data(e);l.enter().append(\"g\").classed(\"slice\",!0),l.exit().remove();var s=[[[],[]],[[],[]]],m=!1;l.each(function(i){function l(e){var r=t._fullLayout,o=t._fullData[v.index],l=o.hoverinfo;if(\"all\"===l&&(l=\"label+text+value+percent+name\"),!t._dragging&&r.hovermode!==!1&&\"none\"!==l&&l){var s=a(i,c),u=k+i.pxmid[0]*(1-s),d=M+i.pxmid[1]*(1-s),h=n.separators,p=[];-1!==l.indexOf(\"label\")&&p.push(i.label),o.text&&o.text[i.i]&&-1!==l.indexOf(\"text\")&&p.push(o.text[i.i]),-1!==l.indexOf(\"value\")&&p.push(g.formatPieValue(i.v,h)),-1!==l.indexOf(\"percent\")&&p.push(g.formatPiePercent(i.v/c.vTotal,h)),f.loneHover({x0:u-s*c.r,x1:u+s*c.r,y:d,text:p.join(\"<br>\"),name:-1!==l.indexOf(\"name\")?o.name:void 0,color:i.color,idealAlign:i.pxmid[0]<0?\"left\":\"right\"},{container:r._hoverlayer.node(),outerContainer:r._paper.node()}),f.hover(t,e,\"pie\"),T=!0}}function d(e){t.emit(\"plotly_unhover\",{points:[e]}),T&&(f.loneUnhover(n._hoverlayer.node()),T=!1)}function y(){t._hoverdata=[i],t._hoverdata.trace=e.trace,f.click(t,{target:!0})}function b(t,e,n,r){return\"a\"+r*c.r+\",\"+r*w+\" \"+x+\" \"+i.largeArc+(n?\" 1 \":\" 0 \")+r*(e[0]-t[0])+\",\"+r*(e[1]-t[1])}if(i.hidden)return void u.select(this).selectAll(\"path,g\").remove();s[i.pxmid[1]<0?0:1][i.pxmid[0]<0?0:1].push(i);var k=c.cx+_[0],M=c.cy+_[1],A=u.select(this),L=A.selectAll(\"path.surface\").data([i]),T=!1;if(L.enter().append(\"path\").classed(\"surface\",!0).style({\"pointer-events\":\"all\"}),A.select(\"path.textline\").remove(),A.on(\"mouseover\",l).on(\"mouseout\",d).on(\"click\",y),v.pull){var z=+(Array.isArray(v.pull)?v.pull[i.i]:v.pull)||0;z>0&&(k+=z*i.pxmid[0],M+=z*i.pxmid[1])}i.cxFinal=k,i.cyFinal=M;var S=v.hole;if(i.v===c.vTotal){var E=\"M\"+(k+i.px0[0])+\",\"+(M+i.px0[1])+b(i.px0,i.pxmid,!0,1)+b(i.pxmid,i.px0,!0,1)+\"Z\";S?L.attr(\"d\",\"M\"+(k+S*i.px0[0])+\",\"+(M+S*i.px0[1])+b(i.px0,i.pxmid,!1,S)+b(i.pxmid,i.px0,!1,S)+\"Z\"+E):L.attr(\"d\",E)}else{var C=b(i.px0,i.px1,!0,1);if(S){var O=1-S;L.attr(\"d\",\"M\"+(k+S*i.px1[0])+\",\"+(M+S*i.px1[1])+b(i.px1,i.px0,!1,S)+\"l\"+O*i.px0[0]+\",\"+O*i.px0[1]+C+\"Z\")}else L.attr(\"d\",\"M\"+k+\",\"+M+\"l\"+i.px0[0]+\",\"+i.px0[1]+C+\"Z\")}var P=Array.isArray(v.textposition)?v.textposition[i.i]:v.textposition,N=A.selectAll(\"g.slicetext\").data(i.text&&\"none\"!==P?[0]:[]);N.enter().append(\"g\").classed(\"slicetext\",!0),N.exit().remove(),N.each(function(){var t=u.select(this).selectAll(\"text\").data([0]);t.enter().append(\"text\").attr(\"data-notex\",1),t.exit().remove(),t.text(i.text).attr({\"class\":\"slicetext\",transform:\"\",\"data-bb\":\"\",\"text-anchor\":\"middle\",x:0,y:0}).call(h.font,\"outside\"===P?v.outsidetextfont:v.insidetextfont).call(p.convertToTspans),t.selectAll(\"tspan.line\").attr({x:0,y:0});var e,n=h.bBox(t.node());\"outside\"===P?e=o(n,i):(e=r(n,i,c),\"auto\"===P&&e.scale<1&&(t.call(h.font,v.outsidetextfont),v.outsidetextfont.family===v.insidetextfont.family&&v.outsidetextfont.size===v.insidetextfont.size||(t.attr({\"data-bb\":\"\"}),n=h.bBox(t.node())),e=o(n,i)));var a=k+i.pxmid[0]*e.rCenter+(e.x||0),l=M+i.pxmid[1]*e.rCenter+(e.y||0);e.outside&&(i.yLabelMin=l-n.height/2,i.yLabelMid=l,i.yLabelMax=l+n.height/2,i.labelExtraX=0,i.labelExtraY=0,m=!0),t.attr(\"transform\",\"translate(\"+a+\",\"+l+\")\"+(e.scale<1?\"scale(\"+e.scale+\")\":\"\")+(e.rotate?\"rotate(\"+e.rotate+\")\":\"\")+\"translate(\"+-(n.left+n.right)/2+\",\"+-(n.top+n.bottom)/2+\")\")})}),m&&i(s,v),l.each(function(t){if(t.labelExtraX||t.labelExtraY){var e=u.select(this),n=e.select(\"g.slicetext text\");n.attr(\"transform\",\"translate(\"+t.labelExtraX+\",\"+t.labelExtraY+\")\"+n.attr(\"transform\"));var r=t.cxFinal+t.pxmid[0],a=t.cyFinal+t.pxmid[1],o=\"M\"+r+\",\"+a,i=(t.yLabelMax-t.yLabelMin)*(t.pxmid[0]<0?-1:1)/4;if(t.labelExtraX){var l=t.labelExtraX*t.pxmid[1]/t.pxmid[0],s=t.yLabelMid+t.labelExtraY-(t.cyFinal+t.pxmid[1]);o+=Math.abs(l)>Math.abs(s)?\"l\"+s*t.pxmid[0]/t.pxmid[1]+\",\"+s+\"H\"+(r+t.labelExtraX+i):\"l\"+t.labelExtraX+\",\"+l+\"v\"+(s-l)+\"h\"+i}else o+=\"V\"+(t.yLabelMid+t.labelExtraY)+\"h\"+i;e.append(\"path\").classed(\"textline\",!0).call(d.stroke,v.outsidetextfont.color).attr({\"stroke-width\":Math.min(2,v.outsidetextfont.size/8),d:o,fill:\"none\"})}})})}),setTimeout(function(){c.selectAll(\"tspan\").each(function(){var t=u.select(this);t.attr(\"dy\")&&t.attr(\"dy\",t.attr(\"dy\"))})},0)}},{\"../../components/color\":18,\"../../components/drawing\":41,\"../../lib/svg_text_utils\":100,\"../../plots/cartesian/graph_interact\":117,\"./helpers"
+,
+"\":159,d3:9}],164:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"./style_one\");e.exports=function(t){t._fullLayout._pielayer.selectAll(\".trace\").each(function(t){var e=t[0],n=e.trace,o=r.select(this);o.style({opacity:n.opacity}),o.selectAll(\".top path.surface\").each(function(t){r.select(this).call(a,t,n)})})}},{\"./style_one\":165,d3:9}],165:[function(t,e,n){\"use strict\";var r=t(\"../../components/color\");e.exports=function(t,e,n){var a=n.marker.line.color;Array.isArray(a)&&(a=a[e.i]||r.defaultLine);var o=n.marker.line.width||0;Array.isArray(o)&&(o=o[e.i]||0),t.style({\"stroke-width\":o,fill:e.color}).call(r.stroke,a)}},{\"../../components/color\":18}],166:[function(t,e,n){\"use strict\";var r=t(\"../../lib\");e.exports=function(t){var e=t[0].trace,n=e.marker;if(r.mergeArray(e.text,t,\"tx\"),r.mergeArray(e.textposition,t,\"tp\"),e.textfont&&(r.mergeArray(e.textfont.size,t,\"ts\"),r.mergeArray(e.textfont.color,t,\"tc\"),r.mergeArray(e.textfont.family,t,\"tf\")),n&&n.line){var a=n.line;r.mergeArray(n.opacity,t,\"mo\"),r.mergeArray(n.symbol,t,\"mx\"),r.mergeArray(n.color,t,\"mc\"),r.mergeArray(a.color,t,\"mlc\"),r.mergeArray(a.width,t,\"mlw\")}}},{\"../../lib\":89}],167:[function(t,e,n){\"use strict\";var r=t(\"../../components/colorscale/color_attributes\"),a=t(\"../../components/drawing\"),o=(t(\"./constants\"),t(\"../../lib/extend\").extendFlat);e.exports={x:{valType:\"data_array\"},x0:{valType:\"any\",dflt:0},dx:{valType:\"number\",dflt:1},y:{valType:\"data_array\"},y0:{valType:\"any\",dflt:0},dy:{valType:\"number\",dflt:1},text:{valType:\"string\",dflt:\"\",arrayOk:!0},mode:{valType:\"flaglist\",flags:[\"lines\",\"markers\",\"text\"],extras:[\"none\"]},hoveron:{valType:\"flaglist\",flags:[\"points\",\"fills\"]},line:{color:{valType:\"color\"},width:{valType:\"number\",min:0,dflt:2},shape:{valType:\"enumerated\",values:[\"linear\",\"spline\",\"hv\",\"vh\",\"hvh\",\"vhv\"],dflt:\"linear\"},smoothing:{valType:\"number\",min:0,max:1.3,dflt:1},dash:{valType:\"string\",values:[\"solid\",\"dot\",\"dash\",\"longdash\",\"dashdot\",\"longdashdot\"],dflt:\"solid\"}},connectgaps:{valType:\"boolean\",dflt:!1},fill:{valType:\"enumerated\",values:[\"none\",\"tozeroy\",\"tozerox\",\"tonexty\",\"tonextx\",\"toself\",\"tonext\"],dflt:\"none\"},fillcolor:{valType:\"color\"},marker:o({},{symbol:{valType:\"enumerated\",values:a.symbolList,dflt:\"circle\",arrayOk:!0},opacity:{valType:\"number\",min:0,max:1,arrayOk:!0},size:{valType:\"number\",min:0,dflt:6,arrayOk:!0},maxdisplayed:{valType:\"number\",min:0,dflt:0},sizeref:{valType:\"number\",dflt:1},sizemin:{valType:\"number\",min:0,dflt:0},sizemode:{valType:\"enumerated\",values:[\"diameter\",\"area\"],dflt:\"diameter\"},showscale:{valType:\"boolean\",dflt:!1},line:o({},{width:{valType:\"number\",min:0,arrayOk:!0}},r(\"marker.line\"))},r(\"marker\")),textposition:{valType:\"enumerated\",values:[\"top left\",\"top center\",\"top right\",\"middle left\",\"middle center\",\"middle right\",\"bottom left\",\"bottom center\",\"bottom right\"],dflt:\"middle center\",arrayOk:!0},textfont:{family:{valType:\"string\",noBlank:!0,strict:!0,arrayOk:!0},size:{valType:\"number\",min:1,arrayOk:!0},color:{valType:\"color\",arrayOk:!0}},r:{valType:\"data_array\"},t:{valType:\"data_array\"},_nestedModules:{error_y:\"ErrorBars\",error_x:\"ErrorBars\",\"marker.colorbar\":\"Colorbar\"}}},{\"../../components/colorscale/color_attributes\":26,\"../../components/drawing\":41,\"../../lib/extend\":88,\"./constants\":172}],168:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\"),a=t(\"../../plots/cartesian/axes\"),o=t(\"../../lib\"),i=t(\"./subtypes\"),l=t(\"./colorscale_calc\");e.exports=function(t,e){var n,s,c,u=a.getFromId(t,e.xaxis||\"x\"),f=a.getFromId(t,e.yaxis||\"y\"),d=u.makeCalcdata(e,\"x\"),h=f.makeCalcdata(e,\"y\"),p=Math.min(d.length,h.length);u._minDtick=0,f._minDtick=0,d.length>p&&d.splice(p,d.length-p),h.length>p&&h.splice(p,h.length-p);var g={padded:!0},v={padded:!0};if(i.hasMarkers(e)){if(n=e.marker,s=n.size,Array.isArray(s)){var m={type:\"linear\"};a.setConvert(m),s=m.makeCalcdata(e.marker,\"size\"),s.length>p&&s.splice(p,s.length-p)}var y,x=1.6*(e.marker.sizeref||1);y=\"area\"===e.marker.sizemode?function(t){return Math.max(Math.sqrt((t||0)/x),3)}:function(t){return Math.max((t||0)/x,3)},g.ppad=v.ppad=Array.isArray(s)?s.map(y):y(s)}l(e),!(\"tozerox\"===e.fill||\"tonextx\"===e.fill&&t.firstscatter)||d[0]===d[p-1]&&h[0]===h[p-1]?e.error_y.visible||-1===[\"tonexty\",\"tozeroy\"].indexOf(e.fill)&&(i.hasMarkers(e)||i.hasText(e))||(g.padded=!1,g.ppad=0):g.tozero=!0,!(\"tozeroy\"===e.fill||\"tonexty\"===e.fill&&t.firstscatter)||d[0]===d[p-1]&&h[0]===h[p-1]?-1!==[\"tonextx\",\"tozerox\"].indexOf(e.fill)&&(v.padded=!1):v.tozero=!0,a.expand(u,d,g),a.expand(f,h,v);var b=new Array(p);for(c=0;p>c;c++)b[c]=r(d[c])&&r(h[c])?{x:d[c],y:h[c]}:{x:!1,y:!1};return void 0!==typeof s&&o.mergeArray(s,b,\"ms\"),t.firstscatter=!1,b}},{\"../../lib\":89,\"../../plots/cartesian/axes\":110,\"./colorscale_calc\":171,\"./subtypes\":186,\"fast-isnumeric\":11}],169:[function(t,e,n){\"use strict\";e.exports=function(t){var e,n,r,a,o;for(e=0;e<t.length;e++)if(n=t[e],r=n.fill,\"none\"!==r&&\"scatter\"===n.type&&(n.opacity=void 0,\"tonexty\"===r||\"tonextx\"===r))for(a=e-1;a>=0;a--)if(o=t[a],\"scatter\"===o.type&&o.xaxis===n.xaxis&&o.yaxis===n.yaxis){o.opacity=void 0;break}}},{}],170:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"fast-isnumeric\"),o=t(\"../../lib\"),i=t(\"../../plots/plots\"),l=t(\"../../components/colorscale/get_scale\"),s=t(\"../../components/colorbar/draw\");e.exports=function(t,e){var n=e[0].trace,c=n.marker,u=\"cb\"+n.uid;if(t._fullLayout._infolayer.selectAll(\".\"+u).remove(),void 0===c||!c.showscale)return void i.autoMargin(t,u);var f=l(c.colorscale),d=c.color,h=c.cmin,p=c.cmax;a(h)||(h=o.aggNums(Math.min,null,d)),a(p)||(p=o.aggNums(Math.max,null,d));var g=e[0].t.cb=s(t,u);g.fillcolor(r.scale.linear().domain(f.map(function(t){return h+t[0]*(p-h)})).range(f.map(function(t){return t[1]}))).filllevels({start:h,end:p,size:(p-h)/254}).options(c.colorbar)()}},{\"../../components/colorbar/draw\":21,\"../../components/colorscale/get_scale\":30,\"../../lib\":89,\"../../plots/plots\":130,d3:9,\"fast-isnumeric\":11}],171:[function(t,e,n){\"use strict\";var r=t(\"../../components/colorscale/has_colorscale\"),a=t(\"../../components/colorscale/calc\"),o=t(\"./subtypes\");e.exports=function(t){o.hasLines(t)&&r(t,\"line\")&&a(t,t.line.color,\"line\",\"c\"),o.hasMarkers(t)&&(r(t,\"marker\")&&a(t,t.marker.color,\"marker\",\"c\"),r(t,\"marker.line\")&&a(t,t.marker.line.color,\"marker.line\",\"c\"))}},{\"../../components/colorscale/calc\":25,\"../../components/colorscale/has_colorscale\":31,\"./subtypes\":186}],172:[function(t,e,n){\"use strict\";e.exports={PTS_LINESONLY:20}},{}],173:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"./attributes\"),o=t(\"./constants\"),i=t(\"./subtypes\"),l=t(\"./xy_defaults\"),s=t(\"./marker_defaults\"),c=t(\"./line_defaults\"),u=t(\"./line_shape_defaults\"),f=t(\"./text_defaults\"),d=t(\"./fillcolor_defaults\"),h=t(\"../../components/errorbars/defaults\");e.exports=function(t,e,n,p){function g(n,o){return r.coerce(t,e,a,n,o)}var v=l(t,e,g),m=v<o.PTS_LINESONLY?\"lines+markers\":\"lines\";if(!v)return void(e.visible=!1);g(\"text\"),g(\"mode\",m),i.hasLines(e)&&(c(t,e,n,p,g),u(t,e,g),g(\"connectgaps\")),i.hasMarkers(e)&&s(t,e,n,p,g),i.hasText(e)&&f(t,e,p,g);var y=[];(i.hasMarkers(e)||i.hasText(e))&&(g(\"marker.maxdisplayed\"),y.push(\"points\")),g(\"fill\"),\"none\"!==e.fill&&(d(t,e,n,g),i.hasLines(e)||u(t,e,g)),\"tonext\"!==e.fill&&\"toself\"!==e.fill||y.push(\"fills\"),g(\"hoveron\",y.join(\"+\")||\"points\"),h(t,e,n,{axis:\"y\"}),h(t,e,n,{axis:\"x\",inherit:\"y\"})}},{\"../../components/errorbars/defaults\":46,\"../../lib\":89,\"./attributes\":167,\"./constants\":172,\"./fillcolor_defaults\":174,\"./line_defaults\":178,\"./line_shape_defaults\":180,\"./marker_defaults\":182,\"./subtypes\":186,\"./text_defaults\":187,\"./xy_defaults\":188}],174:[function(t,e,n){\"use strict\";var r=t(\"../../components/color\");e.exports=function(t,e,n,a){var o=!1;if(e.marker){var i=e.marker.color,l=(e.marker.line||{}).color;i&&!Array.isArray(i)?o=i:l&&!Array.isArray(l)&&(o=l)}a(\"fillcolor\",r.addOpacity((e.line||{}).color||o||n,.5))}},{\"../../components/color\":18}],175:[function(t,e,n){\"use strict\";var r=t(\"../../components/color\"),a=t(\"./subtypes\");e.exports=function(t,e){var n,o;if(\"lines\"===t.mode)return n=t.line.color,n&&r.opacity(n)?n:t.fillcolor;if(\"none\"===t.mode)return t.fill?t.fillcolor:\"\";var i=e.mcc||(t.marker||{}).color,l=e.mlcc||((t.marker||{}).line||{}).color;return o=i&&r.opacity(i)?i:l&&r.opacity(l)&&(e.mlw||((t.marker||{}).line||{}).width)?l:\"\",o?r.opacity(o)<.3?r.addOpacity(o,.3):o:(n=(t.line||{}).color,n&&r.opacity(n)&&a.hasLines(t)&&t.line.width?n:t.fillcolor)}},{\"../../components/color\":18,\"./subtypes\":186}],176:[function(t,e,n){\"use strict\";var r=t(\"../../lib\"),a=t(\"../../plots/cartesian/graph_interact\"),o=t(\"../../plots/cartesian/constants\"),i=t(\"../../components/errorbars\"),l=t(\"./get_trace_color\"),s=t(\"../../components/color\");e.exports=function(t,e,n,c){var u=t.cd,f=u[0].trace,d=t.xa,h=t.ya,p=d.c2p(e),g=h.c2p(n),v=[p,g];if(-1!==f.hoveron.indexOf(\"points\")){var m=function(t){var e=Math.max(3,t.mrc||0);return Math.max(Math.abs(d.c2p(t.x)-p)-e,1-3/e)},y=function(t){var e=Math.max(3,t.mrc||0);return Math.max(Math.abs(h.c2p(t.y)-g)-e,1-3/e)},x=function(t){var e=Math.max(3,t.mrc||0),n=d.c2p(t.x)-p,r=h.c2p(t.y)-g;return Math.max(Math.sqrt(n*n+r*r)-e,1-3/e)},b=a.getDistanceFunction(c,m,y,x);if(a.getClosest(u,b,t),t.index!==!1){var _=u[t.index],w=d.c2p(_.x,!0),k=h.c2p(_.y,!0),M=_.mrc||1;return r.extendFlat(t,{color:l(f,_),x0:w-M,x1:w+M,xLabelVal:_.x,y0:k-M,y1:k+M,yLabelVal:_.y}),_.tx?t.text=_.tx:f.text&&(t.text=f.text),i.hoverInfo(_,f,t),[t]}}if(-1!==f.hoveron.indexOf(\"fills\")&&f._polygons){var A,L,T,z,S,E,C,O,P,N=f._polygons,D=[],I=!1,R=1/0,j=-(1/0),q=1/0,F=-(1/0);for(A=0;A<N.length;A++)T=N[A],T.contains(v)&&(I=!I,D.push(T),q=Math.min(q,T.ymin),F=Math.max(F,T.ymax));if(I){q=Math.max(q,0),F=Math.min(F,h._length);var B=(q+F)/2;for(A=0;A<D.length;A++)for(z=D[A].pts,L=1;L<z.length;L++)O=z[L-1][1],P=z[L][1],O>B!=P>=B&&(E=z[L-1][0],C=z[L][0],S=E+(C-E)*(B-O)/(P-O),R=Math.min(R,S),j=Math.max(j,S));R=Math.max(R,0),j=Math.min(j,d._length);var H=s.defaultLine;return s.opacity(f.fillcolor)?H"
+,
+"=f.fillcolor:s.opacity((f.line||{}).color)&&(H=f.line.color),r.extendFlat(t,{distance:o.MAXDIST+10,x0:R,x1:j,y0:B,y1:B,color:H}),delete t.index,f.text&&!Array.isArray(f.text)?t.text=String(f.text):t.text=f.name,[t]}}}},{\"../../components/color\":18,\"../../components/errorbars\":47,\"../../lib\":89,\"../../plots/cartesian/constants\":115,\"../../plots/cartesian/graph_interact\":117,\"./get_trace_color\":175}],177:[function(t,e,n){\"use strict\";var r={},a=t(\"./subtypes\");r.hasLines=a.hasLines,r.hasMarkers=a.hasMarkers,r.hasText=a.hasText,r.isBubble=a.isBubble,r.attributes=t(\"./attributes\"),r.supplyDefaults=t(\"./defaults\"),r.cleanData=t(\"./clean_data\"),r.calc=t(\"./calc\"),r.arraysToCalcdata=t(\"./arrays_to_calcdata\"),r.plot=t(\"./plot\"),r.colorbar=t(\"./colorbar\"),r.style=t(\"./style\"),r.hoverPoints=t(\"./hover\"),r.selectPoints=t(\"./select\"),r.moduleType=\"trace\",r.name=\"scatter\",r.basePlotModule=t(\"../../plots/cartesian\"),r.categories=[\"cartesian\",\"symbols\",\"markerColorscale\",\"errorBarsOK\",\"showLegend\"],r.meta={},e.exports=r},{\"../../plots/cartesian\":118,\"./arrays_to_calcdata\":166,\"./attributes\":167,\"./calc\":168,\"./clean_data\":169,\"./colorbar\":170,\"./defaults\":173,\"./hover\":176,\"./plot\":183,\"./select\":184,\"./style\":185,\"./subtypes\":186}],178:[function(t,e,n){\"use strict\";var r=t(\"../../components/colorscale/has_colorscale\"),a=t(\"../../components/colorscale/defaults\");e.exports=function(t,e,n,o,i){var l=(t.marker||{}).color;if(i(\"line.color\",n),r(t,\"line\"))a(t,e,o,i,{prefix:\"line.\",cLetter:\"c\"});else{var s=(Array.isArray(l)?!1:l)||n;i(\"line.color\",s)}i(\"line.width\"),i(\"line.dash\")}},{\"../../components/colorscale/defaults\":28,\"../../components/colorscale/has_colorscale\":31}],179:[function(t,e,n){\"use strict\";var r=t(\"../../plots/cartesian/axes\");e.exports=function(t,e){function n(e){var n=w.c2p(t[e].x),r=k.c2p(t[e].y);return n===z||r===z?!1:[n,r]}function a(t){var e=t[0]/w._length,n=t[1]/k._length;return(1+10*Math.max(0,-e,e-1,-n,n-1))*A}function o(t,e){var n=t[0]-e[0],r=t[1]-e[1];return Math.sqrt(n*n+r*r);\n"
+"}var i,l,s,c,u,f,d,h,p,g,v,m,y,x,b,_,w=e.xaxis,k=e.yaxis,M=e.connectGaps,A=e.baseTolerance,L=e.linear,T=[],z=r.BADNUM,S=.2,E=new Array(t.length),C=0;for(i=0;i<t.length;i++)if(l=n(i)){for(C=0,E[C++]=l,i++;i<t.length;i++){if(c=n(i),!c){if(M)continue;break}if(L){if(d=o(c,l),!(d<a(c)*S)){for(p=[(c[0]-l[0])/d,(c[1]-l[1])/d],u=l,v=d,m=x=b=0,h=!1,s=c,i++;i<t.length;i++){if(f=n(i),!f){if(M)continue;break}if(g=[f[0]-l[0],f[1]-l[1]],_=g[0]*p[1]-g[1]*p[0],x=Math.min(x,_),b=Math.max(b,_),b-x>a(f))break;s=f,y=g[0]*p[0]+g[1]*p[1],y>v?(v=y,c=f,h=!1):m>y&&(m=y,u=f,h=!0)}if(h?(E[C++]=c,s!==u&&(E[C++]=u)):(u!==l&&(E[C++]=u),s!==c&&(E[C++]=c)),E[C++]=s,i>=t.length||!f)break;E[C++]=f,l=f}}else E[C++]=c}T.push(E.slice(0,C))}return T}},{\"../../plots/cartesian/axes\":110}],180:[function(t,e,n){\"use strict\";e.exports=function(t,e,n){var r=n(\"line.shape\");\"spline\"===r&&n(\"line.smoothing\")}},{}],181:[function(t,e,n){\"use strict\";var r=t(\"fast-isnumeric\");e.exports=function(t){var e=t.marker,n=e.sizeref||1,a=e.sizemin||0,o=\"area\"===e.sizemode?function(t){return Math.sqrt(t/n)}:function(t){return t/n};return function(t){var e=o(t/2);return r(e)&&e>0?Math.max(e,a):0}}},{\"fast-isnumeric\":11}],182:[function(t,e,n){\"use strict\";var r=t(\"../../components/color\"),a=t(\"../../components/colorscale/has_colorscale\"),o=t(\"../../components/colorscale/defaults\"),i=t(\"./subtypes\");e.exports=function(t,e,n,l,s){var c,u=i.isBubble(t),f=(t.line||{}).color;f&&(n=f),s(\"marker.symbol\"),s(\"marker.opacity\",u?.7:1),s(\"marker.size\"),s(\"marker.color\",n),a(t,\"marker\")&&o(t,e,l,s,{prefix:\"marker.\",cLetter:\"c\"}),c=f&&!Array.isArray(f)&&e.marker.color!==f?f:u?r.background:r.defaultLine,s(\"marker.line.color\",c),a(t,\"marker.line\")&&o(t,e,l,s,{prefix:\"marker.line.\",cLetter:\"c\"}),s(\"marker.line.width\",u?1:0),u&&(s(\"marker.sizeref\"),s(\"marker.sizemin\"),s(\"marker.sizemode\"))}},{\"../../components/color\":18,\"../../components/colorscale/defaults\":28,\"../../components/colorscale/has_colorscale\":31,\"./subtypes\":186}],183:[function(t,e,n){\"use strict\";function r(t,e,n){var r=e.x(),o=e.y(),i=a.extent(r.range.map(r.l2c)),l=a.extent(o.range.map(o.l2c));n.forEach(function(t,e){var r=t[0].trace;if(c.hasMarkers(r)){var a=r.marker.maxdisplayed;if(0!==a){var o=t.filter(function(t){return t.x>=i[0]&&t.x<=i[1]&&t.y>=l[0]&&t.y<=l[1]}),s=Math.ceil(o.length/a),u=0;n.forEach(function(t,n){var r=t[0].trace;c.hasMarkers(r)&&r.marker.maxdisplayed>0&&e>n&&u++});var f=Math.round(u*s/3+Math.floor(u/3)*s/7.1);t.forEach(function(t){delete t.vis}),o.forEach(function(t,e){0===Math.round((e+f)%s)&&(t.vis=!0)})}}})}var a=t(\"d3\"),o=t(\"../../lib\"),i=t(\"../../components/drawing\"),l=t(\"../../components/errorbars\"),s=t(\"../../lib/polygon\").tester,c=t(\"./subtypes\"),u=t(\"./arrays_to_calcdata\"),f=t(\"./line_points\");e.exports=function(t,e,n){function d(t){return t.filter(function(t){return t.vis})}r(t,e,n);var h=e.x(),p=e.y(),g=e.plot.select(\".scatterlayer\").selectAll(\"g.trace.scatter\").data(n);g.enter().append(\"g\").attr(\"class\",\"trace scatter\").style(\"stroke-miterlimit\",2),g.call(l.plot,e);var v,m,y,x,b=\"\",_=[];g.each(function(t){var e=t[0].trace,n=e.line,r=a.select(this);if(e.visible===!0&&(m=e.fill.charAt(e.fill.length-1),\"x\"!==m&&\"y\"!==m&&(m=\"\"),t[0].node3=r,u(t),c.hasLines(e)||\"none\"!==e.fill)){var o,l,d,g,w,k=\"\",M=\"\";v=\"tozero\"===e.fill.substr(0,6)||\"toself\"===e.fill||\"to\"===e.fill.substr(0,2)&&!b?r.append(\"path\").classed(\"js-fill\",!0):null,x&&(y=x.datum(t)),x=r.append(\"path\").classed(\"js-fill\",!0),-1!==[\"hv\",\"vh\",\"hvh\",\"vhv\"].indexOf(n.shape)?(d=i.steps(n.shape),g=i.steps(n.shape.split(\"\").reverse().join(\"\"))):d=g=\"spline\"===n.shape?function(t){var e=t[t.length-1];return t[0][0]===e[0]&&t[0][1]===e[1]?i.smoothclosed(t.slice(1),n.smoothing):i.smoothopen(t,n.smoothing)}:function(t){return\"M\"+t.join(\"L\")},w=function(t){return g(t.reverse())};var A,L=f(t,{xaxis:h,yaxis:p,connectGaps:e.connectgaps,baseTolerance:Math.max(n.width||1,3)/4,linear:\"linear\"===n.shape}),T=e._polygons=new Array(L.length);for(A=0;A<L.length;A++)e._polygons[A]=s(L[A]);if(L.length){var z=L[0][0],S=L[L.length-1],E=S[S.length-1];for(A=0;A<L.length;A++){var C=L[A];o=d(C),l=w(C),k?m?(k+=\"L\"+o.substr(1),M=l+(\"L\"+M.substr(1))):(k+=\"Z\"+o,M=l+\"Z\"+M):(k=o,M=l),c.hasLines(e)&&C.length>1&&r.append(\"path\").classed(\"js-line\",!0).style(\"vector-effect\",\"non-scaling-stroke\").attr(\"d\",o)}v?z&&E&&(m?(\"y\"===m?z[1]=E[1]=p.c2p(0,!0):\"x\"===m&&(z[0]=E[0]=h.c2p(0,!0)),v.attr(\"d\",k+\"L\"+E+\"L\"+z+\"Z\")):v.attr(\"d\",k+\"Z\")):\"tonext\"===e.fill.substr(0,6)&&k&&b&&(\"tonext\"===e.fill?y.attr(\"d\",k+\"Z\"+b+\"Z\"):y.attr(\"d\",k+\"L\"+b.substr(1)+\"Z\"),e._polygons=e._polygons.concat(_)),b=M,_=T}}}),g.selectAll(\"path:not([d])\").remove(),g.append(\"g\").attr(\"class\",\"points\").each(function(t){var e=t[0].trace,n=a.select(this),r=c.hasMarkers(e),l=c.hasText(e);!r&&!l||e.visible!==!0?n.remove():(r&&n.selectAll(\"path.point\").data(e.marker.maxdisplayed?d:o.identity).enter().append(\"path\").classed(\"point\",!0).call(i.translatePoints,h,p),l&&n.selectAll(\"g\").data(e.marker.maxdisplayed?d:o.identity).enter().append(\"g\").append(\"text\").call(i.translatePoints,h,p))})}},{\"../../components/drawing\":41,\"../../components/errorbars\":47,\"../../lib\":89,\"../../lib/polygon\":95,\"./arrays_to_calcdata\":166,\"./line_points\":179,\"./subtypes\":186,d3:9}],184:[function(t,e,n){\"use strict\";var r=t(\"./subtypes\"),a=.2;e.exports=function(t,e){var n,o,i,l,s=t.cd,c=t.xaxis,u=t.yaxis,f=[],d=s[0].trace,h=d.index,p=d.marker,g=!r.hasMarkers(d)&&!r.hasText(d);if(d.visible===!0&&!g){var v=Array.isArray(p.opacity)?1:p.opacity;if(e===!1)for(n=0;n<s.length;n++)s[n].dim=0;else for(n=0;n<s.length;n++)o=s[n],i=c.c2p(o.x),l=u.c2p(o.y),e.contains([i,l])?(f.push({curveNumber:h,pointNumber:n,x:o.x,y:o.y}),o.dim=0):o.dim=1;return s[0].node3.selectAll(\"path.point\").style(\"opacity\",function(t){return((t.mo+1||v+1)-1)*(t.dim?a:1)}),s[0].node3.selectAll(\"text\").style(\"opacity\",function(t){return t.dim?a:1}),f}}},{\"./subtypes\":186}],185:[function(t,e,n){\"use strict\";var r=t(\"d3\"),a=t(\"../../components/drawing\"),o=t(\"../../components/errorbars\");e.exports=function(t){var e=r.select(t).selectAll(\"g.trace.scatter\");e.style(\"opacity\",function(t){return t[0].trace.opacity}),e.selectAll(\"g.points\").each(function(t){r.select(this).selectAll(\"path.point\").call(a.pointStyle,t.trace||t[0].trace),r.select(this).selectAll(\"text\").call(a.textPointStyle,t.trace||t[0].trace)}),e.selectAll(\"g.trace path.js-line\").call(a.lineGroupStyle),e.selectAll(\"g.trace path.js-fill\").call(a.fillGroupStyle),e.call(o.style)}},{\"../../components/drawing\":41,\"../../components/errorbars\":47,d3:9}],186:[function(t,e,n){\"use strict\";e.exports={hasLines:function(t){return t.visible&&t.mode&&-1!==t.mode.indexOf(\"lines\")},hasMarkers:function(t){return t.visible&&t.mode&&-1!==t.mode.indexOf(\"markers\")},hasText:function(t){return t.visible&&t.mode&&-1!==t.mode.indexOf(\"text\")},isBubble:function(t){return\"object\"==typeof t.marker&&Array.isArray(t.marker.size)}}},{}],187:[function(t,e,n){\"use strict\";var r=t(\"../../lib\");e.exports=function(t,e,n,a){a(\"textposition\"),r.coerceFont(a,\"textfont\",n.font)}},{\"../../lib\":89}],188:[function(t,e,n){\"use strict\";e.exports=function(t,e,n){var r,a=n(\"x\"),o=n(\"y\");if(a)o?(r=Math.min(a.length,o.length),r<a.length&&(e.x=a.slice(0,r)),r<o.length&&(e.y=o.slice(0,r))):(r=a.length,n(\"y0\"),n(\"dy\"));else{if(!o)return 0;r=e.y.length,n(\"x0\"),n(\"dx\")}return r}},{}]},{},[5])(5)}); </script>\n"
+" </head>\n"
+" <body>\n"
+" <div id=\"header\">\n"
+" <label>plot:</label>\n"
+" <div class=\"select\">\n"
+" <span class=\"arr\"></span>\n"
+,
+" <select id=\"chooser\">\n"
+" <option value=\"summary\">summary</option>\n"
+" {% for run in runs %}\n"
+" <option value=\"{$loop.index0}\">samples\n"
+" {% for param in run.params %}\n"
+" | {$param.name}={$param.value}\n"
+" {% endfor %}\n"
+" </option>\n"
+" {% endfor %}\n"
+" </select>\n"
+" </div>\n"
+" </div>\n"
+" <div id=\"plot\"></div>\n"
+" <div id=\"footer\">Generated with <a href=\"http://flamingdangerzone.com/nonius\">nonius</a></div>\n"
+" <script type=\"text/javascript\"> !function () {\n"
+" var data = {\n"
+" title: '{$title}',\n"
+" units: '{$units}',\n"
+" logarithmic: {$logarithmic},\n"
+" param: '{$runparam}',\n"
+" runs: [\n"
+" {% for run in runs %}{\n"
+" params: {\n"
+" {% for param in run.params %}'{$param.name}': '{$param.value}',\n"
+" {% endfor %}\n"
+" },\n"
+" benchmarks: [\n"
+" {% for benchmark in run.benchmarks %}{\n"
+" name: '{$benchmark.name}',\n"
+" mean: {$benchmark.mean},\n"
+" stddev: {$benchmark.stddev},\n"
+" samples: [\n"
+" {% for sample in benchmark.samples %}{$sample}, {% endfor %}\n"
+" ],\n"
+" },{% endfor %}\n"
+" ]\n"
+" },{% endfor %}\n"
+" ]\n"
+" };\n"
+"\n"
+" var plotdiv = document.getElementById(\"plot\");\n"
+" window.addEventListener(\"resize\", function() {\n"
+" Plotly.Plots.resize(plotdiv);\n"
+" });\n"
+"\n"
+" var chooser = document.getElementById(\"chooser\");\n"
+" chooser.addEventListener(\"change\", choosePlot);\n"
+" chooser.addEventListener(\"blur\", chooser.focus.bind(chooser));\n"
+" chooser.focus();\n"
+"\n"
+" var legendStyle = {\n"
+" font: { family: 'monospace' },\n"
+" borderwidth: 2,\n"
+" bordercolor: 'black'\n"
+" }\n"
+"\n"
+" function choosePlot() {\n"
+" var plot = chooser.options[chooser.selectedIndex].value;\n"
+" if (plot == 'summary') {\n"
+" if (data.runs.length > 1) {\n"
+" plotSummary();\n"
+" } else {\n"
+" plotSingleSummary();\n"
+" }\n"
+" } else {\n"
+" plotSamples(plot);\n"
+" }\n"
+" }\n"
+"\n"
+" function plotSamples(plot) {\n"
+" var run = data.runs[plot];\n"
+" var traces = run.benchmarks.map(function (b, i) {\n"
+" return {\n"
+" name: b.name,\n"
+" type: 'scatter',\n"
+" mode: 'markers',\n"
+" marker: { symbol: i },\n"
+" y: b.samples,\n"
+" x: b.samples.map(function (_, i) { return i; })\n"
+" }\n"
+" });\n"
+" var layout = {\n"
+" title: data.title,\n"
+" showLegend: true,\n"
+" xaxis: { title: 'Measurement' },\n"
+" yaxis: {\n"
+" title: 'Time (' + data.units + ')',\n"
+" rangemode: 'tozero',\n"
+" zeroline: true\n"
+" },\n"
+" legend: legendStyle\n"
+" };\n"
+" Plotly.newPlot(plotdiv, traces, layout);\n"
+" }\n"
+"\n"
+" function plotSummary() {\n"
+" var traces = data.runs[0].benchmarks.map(function (b, i) {\n"
+" return {\n"
+" name: b.name,\n"
+" type: 'scatter',\n"
+" mode: 'markers',\n"
+" marker: { symbol: i },\n"
+" x: data.runs.map(function (r) { return r.params['LAYER']; }),\n"
+" y: data.runs.map(function (r) { return r.benchmarks[i].mean; }),\n"
+" }\n"
+" });\n"
+" var layout = {\n"
+" title: data.title,\n"
+" showLegend: true,\n"
+" xaxis: {\n"
+" title: 'Layer',\n"
+" tickmode: 'array',\n"
+" tickvals: data.runs.map(function (r) { return r.params['LAYER']; }),\n"
+" type: data.logarithmic ? 'log' : '',\n"
+" },\n"
+" yaxis: {\n"
+" title: 'Time (' + data.units + ')',\n"
+" rangemode: 'tozero',\n"
+" zeroline: true,\n"
+" type: data.logarithmic ? 'log' : '',\n"
+" },\n"
+" legend: legendStyle\n"
+" };\n"
+" Plotly.newPlot(plotdiv, traces, layout);\n"
+" }\n"
+"\n"
+" function plotSingleSummary() {\n"
+" var traces = data.runs[0].benchmarks.map(function (b, i) {\n"
+" return {\n"
+" type: 'bar',\n"
+" name: b.name,\n"
+" x: [ 0 ],\n"
+" y: [ b.mean ],\n"
+" error_y: {\n"
+" type: 'data',\n"
+" array: [ b.stddev ],\n"
+" visible: true\n"
+" }\n"
+" }\n"
+" });\n"
+" var layout = {\n"
+" title: data.title,\n"
+" showLegend: true,\n"
+" xaxis: {\n"
+" title: '',\n"
+" showticklabels: false,\n"
+" },\n"
+" yaxis: {\n"
+" title: 'Time (' + data.units + ')',\n"
+" rangemode: 'tozero',\n"
+" zeroline: true\n"
+" },\n"
+" legend: legendStyle\n"
+" };\n"
+" Plotly.newPlot(plotdiv, traces, layout);\n"
+" }\n"
+"\n"
+" choosePlot();\n"
+"}();\n"
+" </script>\n"
+" </body>\n"
+"</html>\n"
diff --git a/infra/nnfw/cmake/packages/NoniusConfig.cmake b/infra/nnfw/cmake/packages/NoniusConfig.cmake
new file mode 100644
index 000000000..2b5778ebb
--- /dev/null
+++ b/infra/nnfw/cmake/packages/NoniusConfig.cmake
@@ -0,0 +1,26 @@
+function(_Nonius_import)
+ nnfw_find_package(NoniusSource QUIET)
+
+ if(NOT NoniusSource_FOUND)
+ set(Nonius_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT NoniusSource_FOUND)
+
+ if(NOT TARGET nonius)
+ message(STATUS "Found nonius: TRUE")
+ add_library(nonius INTERFACE)
+ target_include_directories(nonius INTERFACE "${NoniusSource_DIR}/include")
+ endif(NOT TARGET nonius)
+
+ if(BUILD_KBENCHMARK)
+ # Copy html_report_template.g.h++ file to externals/nonius.
+ # This header file is modified to show the html summary view according to the layer in kbenchmark.
+ execute_process(COMMAND ${CMAKE_COMMAND} -E copy
+ "${CMAKE_CURRENT_LIST_DIR}/Nonius/html_report_template.g.h++"
+ "${NoniusSource_DIR}/include/nonius/detail")
+ endif(BUILD_KBENCHMARK)
+
+ set(Nonius_FOUND TRUE PARENT_SCOPE)
+endfunction(_Nonius_import)
+
+_Nonius_import()
diff --git a/infra/nnfw/cmake/packages/NoniusSourceConfig.cmake b/infra/nnfw/cmake/packages/NoniusSourceConfig.cmake
new file mode 100644
index 000000000..5dde6b476
--- /dev/null
+++ b/infra/nnfw/cmake/packages/NoniusSourceConfig.cmake
@@ -0,0 +1,13 @@
+function(_NoniusSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(NONIUS_URL ${EXTERNAL_DOWNLOAD_SERVER}/libnonius/nonius/archive/v1.2.0-beta.1.tar.gz)
+ ExternalSource_Get("nonius" ${DOWNLOAD_NONIUS} ${NONIUS_URL})
+
+ set(NoniusSource_DIR ${nonius_SOURCE_DIR} PARENT_SCOPE)
+ set(NoniusSource_FOUND ${nonius_SOURCE_GET} PARENT_SCOPE)
+endfunction(_NoniusSource_import)
+
+_NoniusSource_import()
diff --git a/infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake b/infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake
new file mode 100644
index 000000000..635249f64
--- /dev/null
+++ b/infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake
@@ -0,0 +1,13 @@
+function(_PeachpySource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(PEACHPY_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/Opcodes/archive/6e2b0cd9f1403ecaf164dea7019dd54db5aea252.tar.gz)
+ ExternalSource_Get("python_opcodes" ${DOWNLOAD_NNPACK} ${PEACHPY_URL})
+
+ set(PYTHON_OPCODES_SOURCE_DIR ${python_opcodes_SOURCE_DIR} PARENT_SCOPE)
+ set(PYTHON_OPCODES_SOURCE_FOUND ${python_opcodes_SOURCE_GET} PARENT_SCOPE)
+endfunction(_PeachpySource_import)
+
+_PeachpySource_import()
diff --git a/infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake b/infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake
new file mode 100644
index 000000000..0f208cd55
--- /dev/null
+++ b/infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_PSIMDSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(PSIMD_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/psimd/archive/90a938f30ba414ada2f4b00674ee9631d7d85e19.tar.gz)
+
+ ExternalSource_Get("PSIMD" ${DOWNLOAD_NNPACK} ${PSIMD_URL})
+
+ set(PSIMD_SOURCE_DIR ${PSIMD_SOURCE_DIR} PARENT_SCOPE)
+ set(PSIMD_SOURCE_FOUND ${PSIMD_SOURCE_GET} PARENT_SCOPE)
+endfunction(_PSIMDSource_import)
+
+_PSIMDSource_import()
diff --git a/infra/nnfw/cmake/packages/PeachpySourceConfig.cmake b/infra/nnfw/cmake/packages/PeachpySourceConfig.cmake
new file mode 100644
index 000000000..4cfd682c7
--- /dev/null
+++ b/infra/nnfw/cmake/packages/PeachpySourceConfig.cmake
@@ -0,0 +1,31 @@
+function(_PeachpySource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(PEACHPY_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/PeachPy/archive/01d15157a973a4ae16b8046313ddab371ea582db.tar.gz)
+
+ ExternalSource_Get("python_peachpy" ${DOWNLOAD_NNPACK} ${PEACHPY_URL})
+ FIND_PACKAGE(PythonInterp)
+
+ nnfw_find_package(SixSource REQUIRED)
+ nnfw_find_package(Enum34Source REQUIRED)
+ nnfw_find_package(OpcodesSource REQUIRED)
+
+ # Generate opcodes:
+ SET(ENV{PYTHONPATH} ${python_peachpy_SOURCE_DIR}:${PYTHON_SIX_SOURCE_DIR}:${PYTHON_ENUM_SOURCE_DIR}:${PYTHON_OPCODES_SOURCE_DIR})
+ EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} ./codegen/x86_64.py
+ WORKING_DIRECTORY ${python_peachpy_SOURCE_DIR}
+ RESULT_VARIABLE BUILT_PP)
+
+ if(NOT BUILT_PP EQUAL 0)
+ # Mark PYTHON_PEACHPY_SOURCE_FOUND as FALSE if source generation fails
+ set(PYTHON_PEACHPY_SOURCE_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT BUILT_PP EQUAL 0)
+
+ set(PYTHON_PEACHPY_SOURCE_DIR ${python_peachpy_SOURCE_DIR} PARENT_SCOPE)
+ set(PYTHON_PEACHPY_SOURCE_FOUND ${python_peachpy_SOURCE_GET} PARENT_SCOPE)
+endfunction(_PeachpySource_import)
+
+_PeachpySource_import()
diff --git a/infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake b/infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake
new file mode 100644
index 000000000..0c3b61ac4
--- /dev/null
+++ b/infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_pthreadpoolSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(PTHREADPOOL_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/pthreadpool/archive/6673a4c71fe35e077c6843a74017d9c25610c537.tar.gz)
+
+ ExternalSource_Get("pthreadpool" ${DOWNLOAD_NNPACK} ${PTHREADPOOL_URL})
+
+ set(PTHREADPOOL_SOURCE_DIR ${pthreadpool_SOURCE_DIR} PARENT_SCOPE)
+ set(PTHREADPOOL_SOURCE_FOUND ${pthreadpool_SOURCE_GET} PARENT_SCOPE)
+endfunction(_pthreadpoolSource_import)
+
+_pthreadpoolSource_import()
diff --git a/infra/nnfw/cmake/packages/SixSourceConfig.cmake b/infra/nnfw/cmake/packages/SixSourceConfig.cmake
new file mode 100644
index 000000000..309ead302
--- /dev/null
+++ b/infra/nnfw/cmake/packages/SixSourceConfig.cmake
@@ -0,0 +1,14 @@
+function(_SIXSource_import)
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(six_URL ${EXTERNAL_DOWNLOAD_SERVER}/benjaminp/six/archive/1.11.0.tar.gz)
+
+ ExternalSource_Get("python_six" ${DOWNLOAD_NNPACK} ${six_URL})
+
+ set(PYTHON_SIX_SOURCE_DIR ${python_six_SOURCE_DIR} PARENT_SCOPE)
+ set(PYTHON_SIX_SOURCE_FOUND ${python_six_SOURCE_GET} PARENT_SCOPE)
+endfunction(_SIXSource_import)
+
+_SIXSource_import()
diff --git a/infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt b/infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt
new file mode 100644
index 000000000..93676525a
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt
@@ -0,0 +1,64 @@
+set(TENSORFLOW_LITE_BASE ${TensorFlowSource_DIR}/tensorflow/lite)
+
+#
+# Tensorflow Lite library
+#
+file(GLOB TFLITE_CORE_SRCS "${TENSORFLOW_LITE_BASE}/*.c" "${TENSORFLOW_LITE_BASE}/*.cc" "${TENSORFLOW_LITE_BASE}/core/*.cc")
+file(GLOB TFLITE_CORE_TESTS "${TENSORFLOW_LITE_BASE}/*test*.cc")
+list(REMOVE_ITEM TFLITE_CORE_SRCS ${TFLITE_CORE_TESTS})
+
+file(GLOB_RECURSE TFLITE_KERNEL_SRCS "${TENSORFLOW_LITE_BASE}/kernels/*.cc")
+file(GLOB_RECURSE TFLITE_KERNEL_TESTS "${TENSORFLOW_LITE_BASE}/kernels/*test*.cc")
+list(REMOVE_ITEM TFLITE_KERNEL_SRCS ${TFLITE_KERNEL_TESTS})
+
+file(GLOB TFLITE_LIB_SRCS "${TENSORFLOW_LITE_BASE}/c/*.c" "${TENSORFLOW_LITE_BASE}/c/*.cc")
+file(GLOB TFLITE_LIB_TESTS "${TENSORFLOW_LITE_BASE}/c/*test*.cc")
+list(REMOVE_ITEM TFLITE_LIB_SRCS ${TFLITE_LIB_TESTS})
+
+file(GLOB TFLITE_API_SRCS "${TENSORFLOW_LITE_BASE}/core/api/*.c" "${TENSORFLOW_LITE_BASE}/core/api/*.cc")
+file(GLOB TFLITE_API_TESTS "${TENSORFLOW_LITE_BASE}/core/api/*test*.cc")
+list(REMOVE_ITEM TFLITE_API_SRCS ${TFLITE_API_TESTS})
+
+file(GLOB TFLITE_PROFILING_SRCS "${TENSORFLOW_LITE_BASE}/profiling/*.cc")
+file(GLOB TFLITE_PROFILING_TESTS "${TENSORFLOW_LITE_BASE}/profiling/*test*.cc")
+list(REMOVE_ITEM TFLITE_PROFILING_SRCS ${TFLITE_PROFILING_TESTS})
+
+# We will use our own BuiltinOpResolver
+list(REMOVE_ITEM TFLITE_KERNEL_SRCS "${TENSORFLOW_LITE_BASE}/kernels/register.cc")
+# We will use our own summarizer
+list(REMOVE_ITEM TFLITE_PROFILING_SRCS "${TENSORFLOW_LITE_BASE}/profiling/profile_summarizer.cc")
+list(APPEND TFLITE_SRCS ${TFLITE_CORE_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_KERNEL_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_LIB_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_API_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_PROFILING_SRCS})
+
+list(APPEND TFLITE_SRCS "${FarmhashSource_DIR}/src/farmhash.cc")
+
+list(APPEND TFLITE_INCLUDES "${TensorFlowSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${AbslSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${GEMMLowpSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${FarmhashSource_DIR}/src")
+list(APPEND TFLITE_INCLUDES "${FlatBuffersSource_DIR}/include")
+
+if(NEON2SSESource_FOUND)
+ list(APPEND TFLITE_INCLUDES "${NEON2SSESource_DIR}")
+endif(NEON2SSESource_FOUND)
+
+# This kernels are not used on nnfw
+## spectrogram
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/audio_spectrogram.cc")
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/audio_spectrogram_test.cc")
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/spectrogram.cc")
+## mfcc
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc.cc")
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc_dct.cc")
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc_mel_filterbank.cc")
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/mfcc.cc")
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/mfcc_test.cc")
+
+add_library(tensorflow-lite STATIC ${TFLITE_SRCS})
+target_include_directories(tensorflow-lite SYSTEM PUBLIC ${TFLITE_INCLUDES})
+target_compile_definitions(tensorflow-lite PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
+set_property(TARGET tensorflow-lite PROPERTY POSITION_INDEPENDENT_CODE ON)
+target_link_libraries(tensorflow-lite eigen ${LIB_PTHREAD} dl)
diff --git a/infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake b/infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake
new file mode 100644
index 000000000..4dbc1d14c
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake
@@ -0,0 +1,69 @@
+# NOTE This line prevents multiple definitions of tensorflow-lite target
+if(TARGET tensorflow-lite)
+ set(TensorFlowLite_FOUND TRUE)
+ return()
+endif(TARGET tensorflow-lite)
+
+if(BUILD_TENSORFLOW_LITE)
+ macro(return_unless VAR)
+ if(NOT ${VAR})
+ set(TensorFlowLite_FOUND PARENT_SCOPE)
+ return()
+ endif(NOT ${VAR})
+ endmacro(return_unless)
+
+ # Required packages
+ nnfw_find_package(AbslSource QUIET)
+ return_unless(AbslSource_FOUND)
+ nnfw_find_package(Eigen QUIET)
+ return_unless(Eigen_FOUND)
+ nnfw_find_package(FarmhashSource QUIET)
+ return_unless(FarmhashSource_FOUND)
+ nnfw_find_package(FlatBuffersSource QUIET)
+ return_unless(FlatBuffersSource_FOUND)
+ nnfw_find_package(GEMMLowpSource QUIET)
+ return_unless(GEMMLowpSource_FOUND)
+ nnfw_find_package(TensorFlowSource QUIET)
+ return_unless(TensorFlowSource_FOUND)
+
+ # Optional packages
+ nnfw_find_package(NEON2SSESource QUIET)
+
+ nnfw_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/TensorFlowLite" tflite)
+
+ set(TensorFlowLite_FOUND TRUE)
+ return()
+endif(BUILD_TENSORFLOW_LITE)
+
+# Use pre-built TensorFlow Lite
+find_path(TFLITE_INCLUDE_DIR NAMES tensorflow/lite/interpreter.h)
+find_library(TFLITE_LIB NAMES tensorflow-lite)
+
+if(NOT TFLITE_INCLUDE_DIR)
+ set(TensorFlowLite_FOUND FALSE)
+ return()
+endif(NOT TFLITE_INCLUDE_DIR)
+
+if(NOT TFLITE_LIB)
+ set(TensorFlowLite_FOUND FALSE)
+ return()
+endif(NOT TFLITE_LIB)
+
+message(STATUS "Found TensorFlow Lite: TRUE (include: ${TFLITE_INCLUDE_DIR}, lib: ${TFLITE_LIB}")
+
+# TODO Use IMPORTED target
+add_library(tensorflow-lite INTERFACE)
+target_include_directories(tensorflow-lite SYSTEM INTERFACE ${TFLITE_INCLUDE_DIR})
+target_link_libraries(tensorflow-lite INTERFACE ${TFLITE_LIB})
+
+# Prefer -pthread to -lpthread
+set(THREADS_PREFER_PTHREAD_FLAG TRUE)
+set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
+find_package(Threads QUIET)
+
+if(Threads_FOUND)
+ target_link_libraries(tensorflow-lite INTERFACE ${CMAKE_THREAD_LIBS_INIT})
+endif(Threads_FOUND)
+
+set(TensorFlowLite_FOUND TRUE)
diff --git a/infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake b/infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake
new file mode 100644
index 000000000..f9fd3af13
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_TensorFlowSource_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nnfw_include(ExternalSourceTools)
+ nnfw_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(TENSORFLOW_URL ${EXTERNAL_DOWNLOAD_SERVER}/tensorflow/tensorflow/archive/v1.13.1.tar.gz)
+ ExternalSource_Get("tensorflow" ${DOWNLOAD_TENSORFLOW} ${TENSORFLOW_URL})
+
+ set(TensorFlowSource_DIR ${tensorflow_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND ${tensorflow_SOURCE_GET} PARENT_SCOPE)
+endfunction(_TensorFlowSource_import)
+
+_TensorFlowSource_import()
diff --git a/infra/nnfw/cmake/packages/TensorflowConfig.cmake b/infra/nnfw/cmake/packages/TensorflowConfig.cmake
new file mode 100644
index 000000000..5df000819
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorflowConfig.cmake
@@ -0,0 +1,44 @@
+function(_Tensorflow_Import)
+ if(NOT DEFINED TENSORFLOW_DIR)
+ set(TENSORFLOW_DIR ${NNFW_EXTERNALS_DIR}/tensorflow)
+ endif(NOT DEFINED TENSORFLOW_DIR)
+
+ if(NOT DEFINED NSYNC_ARCH)
+ set(NSYNC_ARCH "default")
+ endif(NOT DEFINED NSYNC_ARCH)
+
+ set(TENSROFLOW_MAKEFILE_DIR "${TENSORFLOW_DIR}/tensorflow/makefile")
+ set(TENSORFLOW_GEN_DIR "${TENSROFLOW_MAKEFILE_DIR}/gen")
+ set(TENSORFLOW_DOWNLOADS_DIR "${TENSROFLOW_MAKEFILE_DIR}/downloads")
+
+ if(NOT EXISTS "${TENSORFLOW_GEN_DIR}/lib/libtensorflow-core.a")
+ set(Tensorflow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ if(NOT EXISTS "${TENSORFLOW_DOWNLOADS_DIR}/nsync/builds/${NSYNC_ARCH}.linux.c++11/libnsync.a")
+ set(Tensorflow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif()
+
+ if(NOT TARGET tensorflow-core)
+ add_library(tensorflow-core INTERFACE)
+
+ target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_DIR}")
+ target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_GEN_DIR}/proto")
+ target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_GEN_DIR}/protobuf/include")
+ target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_DOWNLOADS_DIR}/eigen")
+ target_include_directories(tensorflow-core INTERFACE "${TENSORFLOW_DOWNLOADS_DIR}/nsync/public")
+
+ target_link_libraries(tensorflow-core INTERFACE -Wl,--whole-archive "${TENSORFLOW_GEN_DIR}/lib/libtensorflow-core.a" -Wl,--no-whole-archive)
+ target_link_libraries(tensorflow-core INTERFACE "${TENSORFLOW_GEN_DIR}/protobuf/lib/libprotobuf.a")
+ target_link_libraries(tensorflow-core INTERFACE "${TENSORFLOW_DOWNLOADS_DIR}/nsync/builds/${NSYNC_ARCH}.linux.c++11/libnsync.a")
+ target_link_libraries(tensorflow-core INTERFACE ${LIB_PTHREAD} dl)
+
+ message(STATUS "Found Tensorflow (lib: ${TENSORFLOW_GEN_DIR}/lib/libtensorflow-core.a")
+ endif()
+
+ set(Tensorflow_FOUND TRUE PARENT_SCOPE)
+endfunction(_Tensorflow_Import)
+
+_Tensorflow_Import()
diff --git a/infra/nnfw/command/build b/infra/nnfw/command/build
new file mode 100644
index 000000000..c9fe814c8
--- /dev/null
+++ b/infra/nnfw/command/build
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+import "build.configuration"
+
+if [[ ! -d "${BUILD_ALIAS}" ]]; then
+ echo "'${BUILD_ALIAS}' does not exist. Please run 'configure' first"
+ exit 255
+fi
+
+# Set parallel build
+# TODO Use argument instead of environment variable
+HOST_OS=${HOST_OS:-linux}
+NPROCS=${NPROCS:-1}
+PARALLEL_BUILD=${PARALLEL_BUILD:-1}
+
+if [ "${PARALLEL_BUILD}" == "1" ]; then
+ # Get number of processors (linux only for now)
+ if [ "${HOST_OS}" == "linux" ]; then
+ NPROCS="$(grep -c ^processor /proc/cpuinfo)"
+ fi
+fi
+
+cd ${BUILD_ALIAS}
+make -j ${NPROCS} "$@"
diff --git a/infra/nnfw/command/configure b/infra/nnfw/command/configure
new file mode 100644
index 000000000..2f47dfedc
--- /dev/null
+++ b/infra/nnfw/command/configure
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+import "build.configuration"
+
+BUILD_PATH="${NNFW_BUILD_DIR:-${BUILD_ALIAS}}"
+INSTALL_PATH="${NNFW_INSTALL_PREFIX:-${INSTALL_ALIAS}}"
+
+# Create "BUILD_PATH"
+mkdir -p "${BUILD_PATH}"
+
+cd "${BUILD_PATH}"
+cmake "${NNFW_PROJECT_PATH}"/infra/nnfw -DCMAKE_INSTALL_PREFIX="${INSTALL_PATH}" "$@"
diff --git a/infra/nnfw/command/copyright-check b/infra/nnfw/command/copyright-check
new file mode 100644
index 000000000..9401e69d3
--- /dev/null
+++ b/infra/nnfw/command/copyright-check
@@ -0,0 +1,55 @@
+#!/bin/bash
+
+INVALID_EXIT=0
+
+check_copyright_year() {
+ DIRECTORIES_NOT_TO_BE_TESTED=$2
+ YEAR=`date +"%Y"`
+ CORRECT_COPYRIGHT="Copyright (c) $YEAR Samsung Electronics Co"
+ FILE_EXT_TO_SEARCH="\.h$\|\.hpp$\|\.cc$\|\.cpp$\|\.cl$"
+
+ # Check newly added files
+ #this also includes files, that were moved here from another dir
+ NEW_FILES_OF_SUBDIR_TO_CHECK=$(git whatchanged --diff-filter=A --since "01/01/2019"\
+ --oneline --name-only --pretty=format: . | sort | uniq\
+ | grep $FILE_EXT_TO_SEARCH)
+ ARR=($NEW_FILES_OF_SUBDIR_TO_CHECK)
+ for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
+ if [[ $s = $TEST_DIR* ]]; then
+ skip=${s#$TEST_DIR/}/
+ ARR=(${ARR[*]//$skip*})
+ fi
+ done
+ NEW_FILES_OF_SUBDIR_TO_CHECK=${ARR[*]}
+ if [[ ${#NEW_FILES_OF_SUBDIR_TO_CHECK} -ne 0 ]]; then
+ for f in $NEW_FILES_OF_SUBDIR_TO_CHECK; do
+ [[ -f "$f" ]] || continue
+
+ CREATED_YEAR=$(git log --follow --format=%aD $f | tail -1)
+ [[ $CREATED_YEAR != *"$YEAR"* ]] && continue
+
+ COPYRIGHT_YEAR=$(sed -rn '0,/.*Copyright \(c\) ([^ ]+).*/ s//\1/p' $f)
+ if [[ $COPYRIGHT_YEAR != $YEAR ]]; then
+ [[ -z "$COPYRIGHT_YEAR" ]] && COPYRIGHT_YEAR="None"
+ echo "Copyright year of $f is incorrect: expected $YEAR, found $COPYRIGHT_YEAR"
+ INVALID_EXIT=1
+ elif ! grep -q "$CORRECT_COPYRIGHT" $f; then
+ echo "Copyright format of $f is incorrect: expected $CORRECT_COPYRIGHT"
+ INVALID_EXIT=1
+ fi
+ done
+ fi
+}
+
+DIRECTORIES_NOT_TO_BE_TESTED=()
+
+for DIR_NOT_TO_BE_TESTED in $(find -name '.FORMATDENY' -exec dirname {} \;); do
+ DIRECTORIES_NOT_TO_BE_TESTED+=("$DIR_NOT_TO_BE_TESTED")
+done
+
+check_copyright_year $DIRECTORIES_NOT_TO_BE_TESTED
+
+if [[ $INVALID_EXIT -ne 0 ]]; then
+ echo "[FAILED] Invalid copyright check exit."
+ exit 1
+fi
diff --git a/infra/nnfw/command/docker-run b/infra/nnfw/command/docker-run
new file mode 100644
index 000000000..b523d61a3
--- /dev/null
+++ b/infra/nnfw/command/docker-run
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+import "docker.configuration"
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "$@"
+EXITCODE=$?
+
+docker_cleanup
+
+if [ $EXITCODE -ne 0 ]; then
+ exit $EXITCODE
+fi
diff --git a/infra/nnfw/command/docker-run-user b/infra/nnfw/command/docker-run-user
new file mode 100644
index 000000000..3b5a96142
--- /dev/null
+++ b/infra/nnfw/command/docker-run-user
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+import "docker.configuration"
+
+DOCKER_RUN_OPTS+=" -u $(id -u):$(id -g)"
+
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "$@"
+EXITCODE=$?
+
+if [ $EXITCODE -ne 0 ]; then
+ exit $EXITCODE
+fi
diff --git a/infra/nnfw/command/docker-shell b/infra/nnfw/command/docker-shell
new file mode 100644
index 000000000..1413a7ade
--- /dev/null
+++ b/infra/nnfw/command/docker-shell
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+import "docker.configuration"
+
+DOCKER_RUN_OPTS+=" -it"
+docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "/bin/bash"
+EXITCODE=$?
+
+docker_cleanup
+
+if [ $EXITCODE -ne 0 ]; then
+ exit $EXITCODE
+fi
diff --git a/infra/nnfw/command/doxygen b/infra/nnfw/command/doxygen
new file mode 100644
index 000000000..f455934e4
--- /dev/null
+++ b/infra/nnfw/command/doxygen
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+pushd ${NNFW_PROJECT_PATH} > /dev/null
+
+doxygen infra/nnfw/doxygen/Doxyfile
+
+popd > /dev/null
diff --git a/infra/nnfw/command/gen-coverage-report b/infra/nnfw/command/gen-coverage-report
new file mode 100644
index 000000000..8fd398db3
--- /dev/null
+++ b/infra/nnfw/command/gen-coverage-report
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+# This file is based on https://github.sec.samsung.net/STAR/nncc/pull/80
+
+LCOV_PATH=$(command -v lcov)
+GENHTML_PATH=$(command -v genhtml)
+
+SRC_PREFIX=${SRC_PREFIX:-${NNFW_PROJECT_PATH}}
+
+if [[ -z "${LCOV_PATH}" ]]; then
+ echo "ERROR: 'lcov' is not found"
+ exit 255
+fi
+
+if [[ -z "${GENHTML_PATH}" ]]; then
+ echo "ERROR: 'genhtml' is not found"
+ exit 255
+fi
+
+if [[ -z "${GCOV_PATH}" ]]; then
+ GCOV_PATH=$(command -v gcov)
+ if [[ -z "${GCOV_PATH}" ]]; then
+ echo "ERROR: 'gcov' is not found"
+ exit 255
+ fi
+fi
+
+OUTPUT_PATH="$1"
+
+if [[ -z "${OUTPUT_PATH}" ]]; then
+ OUTPUT_PATH="$NNFW_PROJECT_PATH/coverage"
+fi
+
+if [[ -e "${OUTPUT_PATH}" ]]; then
+ echo "ERROR: '${OUTPUT_PATH}' already exists"
+ exit 255
+fi
+
+mkdir -p "${OUTPUT_PATH}"
+
+RAW_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.raw.info"
+LIBS_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.libs.info"
+INCLUDE_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.include.info"
+RUNTIMES_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.runtimes.info"
+TOOLS_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.tools.info"
+FINAL_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.info"
+HTML_PATH="${OUTPUT_PATH}/html"
+COVERTURA_PATH="${OUTPUT_PATH}/nnfw_coverage.xml"
+
+"${LCOV_PATH}" -c -d "${NNFW_PROJECT_PATH}" --gcov-tool ${GCOV_PATH} -o "${RAW_COVERAGE_INFO_PATH}"
+#"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${LIBS_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/libs/*"
+#"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${INCLUDE_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/include/*"
+"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${RUNTIMES_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/runtimes/*"
+"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${TOOLS_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/tests/tools/*"
+#"${LCOV_PATH}" -a "${LIBS_COVERAGE_INFO_PATH}" -a "${INCLUDE_COVERAGE_INFO_PATH}" \
+# -a "${RUNTIMES_COVERAGE_INFO_PATH}" -a "${TOOLS_COVERAGE_INFO_PATH}" \
+# -o "${FINAL_COVERAGE_INFO_PATH}"
+"${LCOV_PATH}" -a "${RUNTIMES_COVERAGE_INFO_PATH}" -a "${TOOLS_COVERAGE_INFO_PATH}" -o "${FINAL_COVERAGE_INFO_PATH}"
+"${GENHTML_PATH}" "${FINAL_COVERAGE_INFO_PATH}" --output-directory "${HTML_PATH}" ${GENHTML_FLAG:-}
diff --git a/infra/nnfw/command/install b/infra/nnfw/command/install
new file mode 100644
index 000000000..2bacb876b
--- /dev/null
+++ b/infra/nnfw/command/install
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+import "build.configuration"
+
+if [[ ! -d "${BUILD_ALIAS}" ]]; then
+ echo "'${BUILD_ALIAS}' does not exist. Please run 'configure' first"
+ exit 255
+fi
+
+if [[ ! -d "${INSTALL_ALIAS}" ]]; then
+ echo "'${INSTALL_ALIAS}' does not exist. Please run 'configure' first"
+ exit 255
+fi
+
+cd ${BUILD_ALIAS}
+make install
diff --git a/scripts/config/build.configuration b/infra/nnfw/config/build.configuration
index cdbf3bf4b..cdbf3bf4b 100644
--- a/scripts/config/build.configuration
+++ b/infra/nnfw/config/build.configuration
diff --git a/infra/nnfw/config/docker.configuration b/infra/nnfw/config/docker.configuration
new file mode 100644
index 000000000..b7e9ad30f
--- /dev/null
+++ b/infra/nnfw/config/docker.configuration
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnas}
+echo "Using docker image ${DOCKER_IMAGE_NAME}"
+
+if [ -z "`docker images ${DOCKER_IMAGE_NAME}`" ]; then
+ echo "Need docker image!"
+ exit 1
+fi
+
+HOST_PATH="$NNFW_PROJECT_PATH"
+DOCKER_PATH="$NNFW_PROJECT_PATH"
+
+export GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES+=" -v $HOST_PATH:$DOCKER_PATH"
+
+if [[ ! -z $ENV_FILE ]]; then
+ DOCKER_ENV_VARS+=" --env-file ${ENV_FILE} "
+fi
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+DOCKER_ENV_VARS+=" -e EXTERNAL_DOWNLOAD_SERVER"
+DOCKER_ENV_VARS+=" -e GENERATE_NNAPI_TESTS"
+
+DOCKER_RUN_OPTS="${DOCKER_OPTS}"
+DOCKER_RUN_OPTS+=" --rm"
+DOCKER_RUN_OPTS+=" -w $DOCKER_PATH"
+
+function docker_cleanup()
+{
+ # Newly created files during during docker run can have different ownership.
+ # This may cause some problems, for example, some jenkins slaves or developers
+ # can't remove built files due to lack of permission.
+ # To address this issue, let's change owner of all files
+ # in nncc to owner of nncc.
+ NNFW_OWNER_UID=$(stat -c "%u" $HOST_PATH)
+ NNFW_OWNER_GID=$(stat -c "%g" $HOST_PATH)
+
+ CMD="chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID $DOCKER_PATH"
+ docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
+}
diff --git a/infra/nnfw/config/gbs.conf b/infra/nnfw/config/gbs.conf
new file mode 100644
index 000000000..515cadaba
--- /dev/null
+++ b/infra/nnfw/config/gbs.conf
@@ -0,0 +1,20 @@
+[general]
+#Current profile name which should match a profile section name
+profile = profile.tizen
+
+[profile.tizen]
+user=obs_viewer
+obs = obs.tizen
+repos = repo.tizen_base,repo.tizen_mobile
+buildroot = /home/GBS-ROOT/
+
+[obs.tizen]
+url = http://api.tizen.org
+
+[repo.tizen_mobile]
+url = http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/
+
+[repo.tizen_base]
+url = http://download.tizen.org/snapshots/tizen/base/latest/repos/standard/packages/
+
+
diff --git a/infra/nnfw/doxygen/Doxyfile b/infra/nnfw/doxygen/Doxyfile
new file mode 100644
index 000000000..c04a7bb0f
--- /dev/null
+++ b/infra/nnfw/doxygen/Doxyfile
@@ -0,0 +1,2502 @@
+# Doxyfile 1.8.13
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all text
+# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
+# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME = nnfw
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER =
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF =
+
+# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
+# in the documentation. The maximum height of the logo should not exceed 55
+# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
+# the logo to the output directory.
+
+PROJECT_LOGO =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY = docs/doxygen
+
+# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS = NO
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF = "The $name class" \
+ "The $name widget" \
+ "The $name file" \
+ is \
+ provides \
+ specifies \
+ contains \
+ represents \
+ a \
+ an \
+ the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH = ./
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
+# page for each member. If set to NO, the documentation of a member will be part
+# of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines.
+
+ALIASES =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
+# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
+# Fortran. In the later case the parser tries to guess whether the code is fixed
+# or free formatted code, this is the default for Fortran type files), VHDL. For
+# instance to make doxygen treat .inc files as Fortran files (default is PHP),
+# and .f files as C (default is Fortran), use: inc=Fortran f=C.
+#
+# Note: For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING =
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT = YES
+
+# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
+# to that level are automatically included in the table of contents, even if
+# they do not have an id attribute.
+# Note: This feature currently applies only to Markdown headings.
+# Minimum value: 0, maximum value: 99, default value: 0.
+# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
+
+TOC_INCLUDE_HEADINGS = 0
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by putting a % sign in front of the word or
+# globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# If one adds a struct or class to a group and this option is enabled, then also
+# any nested class or struct is added to the same group. By default this option
+# is disabled and one has to add nested compounds explicitly via \ingroup.
+# The default value is: NO.
+
+GROUP_NESTED_COMPOUNDS = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE = 2
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL = YES
+
+# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE = NO
+
+# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO,
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. If set to YES, local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO, only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO, these classes will be included in the various overviews. This option
+# has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO, these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO, these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES, upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES = NO
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES, the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES = NO
+
+# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
+# append additional text to a page's title, such as Class Reference. If set to
+# YES the compound reference will be hidden.
+# The default value is: NO.
+
+HIDE_COMPOUND_REFERENCE= NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
+# list. This list is created by putting \todo commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
+# list. This list is created by putting \test commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES, the
+# list will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
+
+CITE_BIB_FILES =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO, doxygen will only warn about wrong or incomplete
+# parameter documentation, but not about the absence of documentation.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC = NO
+
+# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
+# a warning is encountered.
+# The default value is: NO.
+
+WARN_AS_ERROR = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
+
+INPUT =
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# read by doxygen.
+#
+# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
+# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
+# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
+# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
+# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf.
+
+FILE_PATTERNS = *.c \
+ *.cc \
+ *.cxx \
+ *.cpp \
+ *.c++ \
+ *.java \
+ *.ii \
+ *.ixx \
+ *.ipp \
+ *.i++ \
+ *.inl \
+ *.idl \
+ *.ddl \
+ *.odl \
+ *.h \
+ *.hh \
+ *.hxx \
+ *.hpp \
+ *.h++ \
+ *.cs \
+ *.d \
+ *.php \
+ *.php4 \
+ *.php5 \
+ *.phtml \
+ *.inc \
+ *.m \
+ *.markdown \
+ *.md \
+ *.mm \
+ *.dox \
+ *.py \
+ *.pyw \
+ *.f90 \
+ *.f95 \
+ *.f03 \
+ *.f08 \
+ *.f \
+ *.for \
+ *.tcl \
+ *.vhd \
+ *.vhdl \
+ *.ucf \
+ *.qsf
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE = Product/ \
+ report/ \
+ runtimes/contrib/ \
+ docs/doxygen/html/ \
+ externals/ \
+ packaging/ \
+ runtimes/pure_arm_compute/ \
+ tests/framework/ \
+ tests/nnapi/src/generated/ \
+ tests/nnapi/specs/ \
+ tools/
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+FILTER_PATTERNS =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE = docs/nnfw/roadmap.md
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER = YES
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# function all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see http://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS = YES
+
+# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the
+# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the
+# cost of reduced performance. This can be particularly helpful with template
+# rich C++ code for which doxygen's built-in parser lacks the necessary type
+# information.
+# Note: The availability of this option depends on whether or not doxygen was
+# generated with the -Duse-libclang=ON option for CMake.
+# The default value is: NO.
+
+CLANG_ASSISTED_PARSING = NO
+
+# If clang assisted parsing is enabled you can provide the compiler with command
+# line options that you would normally use when invoking the compiler. Note that
+# the include paths will already be set by doxygen for the files and directories
+# specified with INPUT and INCLUDE_PATH.
+# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
+
+CLANG_OPTIONS =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefore more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list). For an example see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the style sheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to YES can help to show when doxygen was last run and thus if the
+# documentation is up to date.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP = NO
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler (hhc.exe). If non-empty,
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated
+# (YES) or that it should be included in the master .chm file (NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated
+# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS =
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW = NO
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH = 250
+
+# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# http://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from http://www.mathjax.org before deployment.
+# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when enabling USE_PDFLATEX this option is only used for generating
+# bitmaps for formulas in the HTML output, but not in the Makefile that is
+# written to the output directory.
+# The default file is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE = a4
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. The package can be specified just
+# by its name or with the correct syntax as to be used with the LaTeX
+# \usepackage command. To get the times font for instance you can specify :
+# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
+# To use the option intlimits with the amsmath package you can specify:
+# EXTRA_PACKAGES=[intlimits]{amsmath}
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
+# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
+# string, for the replacement values of the other commands the user is referred
+# to HTML_HEADER.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER =
+
+# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# LaTeX style sheets that are included after the standard style sheets created
+# by doxygen. Using this option one can overrule certain style aspects. Doxygen
+# will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_STYLESHEET =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS = YES
+
+# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES, to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE = plain
+
+# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_TIMESTAMP = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's config
+# file, i.e. a series of assignments. You only have to provide replacements,
+# missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's config file. A template extensions file can be generated
+# using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE =
+
+# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
+# with syntax highlighting in the RTF output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_SOURCE_CODE = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION = .3
+
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR =
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT = xml
+
+# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT = docbook
+
+# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
+# program listings (including syntax highlighting and cross-referencing
+# information) to the DOCBOOK output. Note that enabling this will significantly
+# increase the size of the DOCBOOK output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_PROGRAMLISTING = NO
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
+# AutoGen Definitions (see http://autogen.sf.net) file that captures the
+# structure of the code including all documentation. Note that this feature is
+# still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO, the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
+# in the source code. If set to NO, only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES, the include files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
+# the class index. If set to NO, only the inherited external classes will be
+# listed.
+# The default value is: NO.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS = YES
+
+# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
+
+PERL_PATH = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH =
+
+# If set to YES the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT = YES
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS = 0
+
+# When you want a differently looking font in the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME = Calibri
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH =
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command. Disabling a call graph can be
+# accomplished by means of the command \hidecallgraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH = YES
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command. Disabling a caller graph can be
+# accomplished by means of the command \hidecallergraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH = YES
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. For an explanation of the image formats see the section
+# output formats in the documentation of the dot tool (Graphviz (see:
+# http://www.graphviz.org/)).
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
+# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
+# png:gdiplus:gdiplus.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH = /usr/local/bin/dot
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS =
+
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file. If left blank, it is assumed
+# PlantUML is not used or called during a preprocessing step. Doxygen will
+# generate a warning when it encounters a \startuml command in this case and
+# will not generate output for the diagram.
+
+PLANTUML_JAR_PATH =
+
+# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
+# configuration file for plantuml.
+
+PLANTUML_CFG_FILE =
+
+# When using plantuml, the specified paths are searched for files specified by
+# the !include statement in a plantuml block.
+
+PLANTUML_INCLUDE_PATH =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS = NO
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP = YES
diff --git a/infra/scripts/build_nnpkg.sh b/infra/scripts/build_nnpkg.sh
new file mode 100644
index 000000000..221c7210f
--- /dev/null
+++ b/infra/scripts/build_nnpkg.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+# Test suite: nnpkg-test-suite.tar.gz
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="${CURRENT_PATH}/../../"
+
+pushd ${ROOT_PATH} > /dev/null
+
+for f in `find build/compiler/tf2tflite -name "UNIT*" | cut -d'.' -f1 | sort | uniq`;
+do
+ tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh -o nnpkg-tcs -i ${f%/*} $(basename $f);
+done
+
+tar -zcf nnpkg-test-suite.tar.gz tools/nnpackage_tool/nnpkg_test nnpkg-tcs
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_benchmark_model.sh b/infra/scripts/docker_build_cross_arm_benchmark_model.sh
new file mode 100644
index 000000000..0bc20b142
--- /dev/null
+++ b/infra/scripts/docker_build_cross_arm_benchmark_model.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [ -z $DOCKER_IMAGE_NAME ]; then
+ echo "It will use default docker image name"
+fi
+
+# Mirror server setting
+if [ -z $EXTERNAL_DOWNLOAD_SERVER ]; then
+ echo "It will not use mirror server"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e BENCHMARK_ACL_BUILD=1"
+DOCKER_ENV_VARS+=" -e BUILD_TYPE=Release"
+
+pushd $ROOT_PATH > /dev/null
+
+# TODO use command instead of makefile
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="export OPTIONS='-DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_NEURUN=OFF -DBUILD_TFLITE_BENCHMARK_MODEL=ON -DBUILD_TFLITE_LOADER=OFF' && \
+ cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_neurun.sh b/infra/scripts/docker_build_cross_arm_neurun.sh
new file mode 100644
index 000000000..c1014c57c
--- /dev/null
+++ b/infra/scripts/docker_build_cross_arm_neurun.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+
+pushd $ROOT_PATH > /dev/null
+
+# TODO use command instead of makefile
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_neurun_release.sh b/infra/scripts/docker_build_cross_arm_neurun_release.sh
new file mode 100644
index 000000000..c8c3e997e
--- /dev/null
+++ b/infra/scripts/docker_build_cross_arm_neurun_release.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e BUILD_TYPE=release"
+
+pushd $ROOT_PATH > /dev/null
+
+# TODO use command instead of makefile
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_pacl.sh b/infra/scripts/docker_build_cross_arm_pacl.sh
new file mode 100644
index 000000000..2091287c0
--- /dev/null
+++ b/infra/scripts/docker_build_cross_arm_pacl.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+
+pushd $ROOT_PATH > /dev/null
+
+# TODO use command instead of makefile
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="export OPTIONS='-DBUILD_NEURUN=OFF -DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_TFLITE_LOADER=OFF' && \
+ cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "${CMD}"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_pacl_release.sh b/infra/scripts/docker_build_cross_arm_pacl_release.sh
new file mode 100644
index 000000000..fdac80071
--- /dev/null
+++ b/infra/scripts/docker_build_cross_arm_pacl_release.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e BUILD_TYPE=release"
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+pushd $ROOT_PATH > /dev/null
+
+# TODO use command instead of makefile
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="export OPTIONS='-DBUILD_NEURUN=OFF -DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_TFLITE_LOADER=OFF' && \
+ cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "${CMD}"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_coverage.sh b/infra/scripts/docker_build_cross_coverage.sh
new file mode 100644
index 000000000..661e85b00
--- /dev/null
+++ b/infra/scripts/docker_build_cross_coverage.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e COVERAGE_BUILD=1"
+
+pushd $ROOT_PATH > /dev/null
+
+# TODO use command instead of makefile
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="cp -nv Makefile.template Makefile && \
+ make all install build_coverage_suite"
+./nnfw docker-run bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_test_x64.sh b/infra/scripts/docker_build_test_x64.sh
new file mode 100644
index 000000000..a6078b755
--- /dev/null
+++ b/infra/scripts/docker_build_test_x64.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+pushd $ROOT_PATH > /dev/null
+
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+# Disable nnpackage_run build: mismatch between buildtool for CI and installed hdf5
+CMD="export OPTIONS='-DBUILD_NNPACKAGE_RUN=OFF' && \
+ cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "$CMD"
+EXIT_CODE=$?
+
+if [ ${EXIT_CODE} -ne 0 ]; then
+ exit ${EXIT_CODE}
+fi
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+set -e
+
+export DOCKER_ENV_VARS=" -e MODELFILE_SERVER=$MODELFILE_SERVER"
+./nnfw docker-run-user bash -c "./infra/scripts/test_x64_neurun_cpu.sh"
+./nnfw docker-run-user bash -c "./infra/scripts/test_neurun_interp.sh"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_tizen_cross.sh b/infra/scripts/docker_build_tizen_cross.sh
new file mode 100644
index 000000000..bcd0378ac
--- /dev/null
+++ b/infra/scripts/docker_build_tizen_cross.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare rootfs
+if [ ! -d $ROOTFS_DIR ]; then
+ echo "It will use default rootfs path"
+else
+ DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
+ DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
+fi
+
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
+ DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+DOCKER_ENV_VARS+=" -e TARGET_OS=tizen"
+DOCKER_ENV_VARS+=" -e BUILD_TYPE=release"
+# Disable arm compute build (use rootfs)
+DOCKER_ENV_VARS+=" -e OPTIONS=-DBUILD_ARMCOMPUTE=OFF"
+
+# Mirror server setting
+if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
+ echo "It will not use mirror server"
+fi
+
+pushd $ROOT_PATH > /dev/null
+
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+CMD="cp -nv Makefile.template Makefile && \
+ make all install build_test_suite"
+./nnfw docker-run bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_tizen_gbs.sh b/infra/scripts/docker_build_tizen_gbs.sh
new file mode 100644
index 000000000..501cd3fdd
--- /dev/null
+++ b/infra/scripts/docker_build_tizen_gbs.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+GBS_RPM_DIR=$ROOT_PATH/Product/out/rpm
+mkdir -p $GBS_RPM_DIR
+DOCKER_VOLUMES=" -v $GBS_RPM_DIR:/opt/rpm"
+
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name for tizen gbs build"
+ DOCKER_IMAGE_NAME="nnfw_docker_tizen"
+fi
+
+DOCKER_ENV_VARS=" --privileged"
+
+pushd $ROOT_PATH > /dev/null
+
+CMD="gbs -c $ROOT_PATH/infra/nnfw/config/gbs.conf build \
+ -A armv7l --profile=profile.tizen --clean --include-all --define '$GBS_DEFINE' && \
+ cp -rf /home/GBS-ROOT/local/repos/tizen/armv7l/RPMS/*.rpm /opt/rpm/"
+
+export DOCKER_ENV_VARS
+export DOCKER_VOLUMES
+./nnfw docker-run bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/docker_coverage_report.sh b/infra/scripts/docker_coverage_report.sh
new file mode 100644
index 000000000..c9bd9f1f5
--- /dev/null
+++ b/infra/scripts/docker_coverage_report.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+CMD="GCOV_PATH=arm-linux-gnueabihf-gcov NNAS_WORKSPACE=Product ./nnas gen-coverage-report runtimes &&
+ tar -zcf coverage/coverage_report.tar.gz coverage/html &&
+ python tools/lcov-to-cobertura-xml/lcov_cobertura.py coverage/coverage.info -o coverage/nnfw_coverage.xml"
+
+./nnfw docker-run-user bash -c "$CMD"
+
+popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_acl_cl.sh b/infra/scripts/test_arm_neurun_acl_cl.sh
new file mode 100644
index 000000000..c41862514
--- /dev/null
+++ b/infra/scripts/test_arm_neurun_acl_cl.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+export OP_BACKEND_ALLOPS=acl_cl
+
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux ./Product/out/unittest/nnapi_gtest.skip
+export EXECUTOR=Linear
+source ./tests/scripts/test_driver.sh \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_cl" .
+
+export EXECUTOR=Dataflow
+source ./tests/scripts/test_driver.sh \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_cl" .
+
+export EXECUTOR=Parallel
+source ./tests/scripts/test_driver.sh \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_cl" .
+
+popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_acl_neon.sh b/infra/scripts/test_arm_neurun_acl_neon.sh
new file mode 100644
index 000000000..3c6e6ce02
--- /dev/null
+++ b/infra/scripts/test_arm_neurun_acl_neon.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+export OP_BACKEND_ALLOPS=acl_neon
+
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux.acl_neon ./Product/out/unittest/nnapi_gtest.skip
+export EXECUTOR=Linear
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_neon" .
+
+export EXECUTOR=Dataflow
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_neon" .
+
+export EXECUTOR=Parallel
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_neon" .
+
+
+popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_cpu.sh b/infra/scripts/test_arm_neurun_cpu.sh
new file mode 100644
index 000000000..6bf48598c
--- /dev/null
+++ b/infra/scripts/test_arm_neurun_cpu.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+export OP_BACKEND_ALLOPS=cpu
+
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux.cpu ./Product/out/unittest/nnapi_gtest.skip
+export EXECUTOR=Linear
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/cpu" .
+
+export EXECUTOR=Dataflow
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/cpu" .
+
+export EXECUTOR=Parallel
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/cpu" .
+
+# Test tflite_loader
+source ./tests/scripts/test_driver.sh \
+ --frameworktest \
+ --framework_driverbin="$ROOT_PATH/Product/out/bin/tflite_loader_test_tool" \
+ --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/cpu" .
+
+popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_mixed.sh b/infra/scripts/test_arm_neurun_mixed.sh
new file mode 100644
index 000000000..7ba9d2098
--- /dev/null
+++ b/infra/scripts/test_arm_neurun_mixed.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+export EXECUTOR=Linear
+
+# NOTE Fixed backend assignment by type of operation
+# TODO Enhance this with randomized test
+BACKENDS=(cpu acl_cl acl_neon)
+unset OP_BACKEND_ALLOPS
+export OP_BACKEND_Conv2DNode=cpu
+export OP_BACKEND_MaxPool2DNode=acl_cl
+export OP_BACKEND_AvgPool2DNode=acl_neon
+export ACL_LAYOUT=NCHW
+
+# Get the intersect of framework test list files(each backend has a lsit)
+TESTLIST_PREFIX="tests/scripts/neurun_frameworktest_list.armv7l"
+cat $TESTLIST_PREFIX.${BACKENDS[0]}.txt | sort > $TESTLIST_PREFIX.intersect.txt
+for BACKEND in $BACKENDS; do
+ comm -12 <(sort $TESTLIST_PREFIX.intersect.txt) <(sort $TESTLIST_PREFIX.$BACKEND.txt) > $TESTLIST_PREFIX.intersect.next.txt
+ mv $TESTLIST_PREFIX.intersect.next.txt $TESTLIST_PREFIX.intersect.txt
+done
+
+# Run the test
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux ./Product/out/unittest/nnapi_gtest.skip
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=$TESTLIST_PREFIX.intersect.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib/neurun:$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/mixed" .
+
+popd > /dev/null
diff --git a/infra/scripts/test_arm_nnpkg.sh b/infra/scripts/test_arm_nnpkg.sh
new file mode 100644
index 000000000..23759a319
--- /dev/null
+++ b/infra/scripts/test_arm_nnpkg.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="${CURRENT_PATH}/../../"
+
+pushd ${ROOT_PATH} > /dev/null
+
+EXITCODE=0
+PKG_LIST=$(cat tools/nnpackage_tool/nnpkg_test/list)
+for f in ${PKG_LIST}
+do
+ tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh -d -i nnpkg-tcs $f
+ EXITCODE_F=$?
+
+ if [ ${EXITCODE_F} -ne 0 ]; then
+ EXITCODE=${EXITCODE_F}
+ fi
+done
+
+popd > /dev/null
+
+exit ${EXITCODE}
diff --git a/infra/scripts/test_arm_pacl.sh b/infra/scripts/test_arm_pacl.sh
new file mode 100644
index 000000000..eb50e2610
--- /dev/null
+++ b/infra/scripts/test_arm_pacl.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux.pacl ./Product/out/unittest/nnapi_gtest.skip
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=tests/scripts/pacl_frameworktest_list.armv7l-linux.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib/pureacl:$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report" .
+
+popd > /dev/null
diff --git a/infra/scripts/test_coverage.sh b/infra/scripts/test_coverage.sh
new file mode 100644
index 000000000..7dd5ece28
--- /dev/null
+++ b/infra/scripts/test_coverage.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+export MODELFILE_SERVER=$MODELFILE_SERVER
+
+if [[ ! -e $ROOT_PATH/tests/scripts/build_path_depth.txt ]]; then
+ echo "Cannot find prefix strip file"
+ exit 1
+fi
+
+set -e
+
+export GCOV_PREFIX_STRIP=`cat $ROOT_PATH/tests/scripts/build_path_depth.txt`
+
+pushd $ROOT_PATH > /dev/null
+
+./infra/scripts/test_arm_neurun_acl_cl.sh
+./infra/scripts/test_arm_neurun_acl_neon.sh
+./infra/scripts/test_arm_neurun_cpu.sh
+./infra/scripts/test_arm_neurun_mixed.sh
+
+# Enable all logs (acl_cl kernel)
+NEURUN_LOG_ENABLE=1 GRAPH_DOT_DUMP=1 ./infra/scripts/test_arm_neurun_acl_cl.sh
+
+# Interpreter
+./infra/scripts/test_neurun_interp.sh
+
+if [[ -e ${ROOT_PATH}/tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh ]]; then
+ ./infra/scripts/test_arm_nnpkg.sh
+fi
+
+# Pack coverage test data: coverage-data.tar.gz
+find Product -type f \( -iname *.gcda -or -iname *.gcno \) > include_lists.txt
+tar -zcf coverage-data.tar.gz nnas nnfw infra runtimes tools -T include_lists.txt
+rm -rf include_lists.txt
+
+popd > /dev/null
diff --git a/infra/scripts/test_neurun_interp.sh b/infra/scripts/test_neurun_interp.sh
new file mode 100644
index 000000000..6687e8089
--- /dev/null
+++ b/infra/scripts/test_neurun_interp.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+export DISABLE_COMPILE=1
+
+cp -v ./Product/out/unittest/nnapi_gtest.skip.noarch.interp ./Product/out/unittest/nnapi_gtest.skip
+./tests/scripts/test_driver.sh \
+ --ldlibrarypath=$ROOT_PATH/Product/out/lib --unittest .
diff --git a/infra/scripts/test_tizen_neurun_acl_cl.sh b/infra/scripts/test_tizen_neurun_acl_cl.sh
new file mode 100644
index 000000000..d09895463
--- /dev/null
+++ b/infra/scripts/test_tizen_neurun_acl_cl.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+export OP_BACKEND_ALLOPS=acl_cl
+
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-tizen ./Product/out/unittest/nnapi_gtest.skip
+export EXECUTOR=Linear
+source ./tests/scripts/test_driver.sh \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_cl" .
+
+export EXECUTOR=Dataflow
+source ./tests/scripts/test_driver.sh \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_cl" .
+
+export EXECUTOR=Parallel
+source ./tests/scripts/test_driver.sh \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/acl_cl" .
+
+popd > /dev/null
diff --git a/infra/scripts/test_tizen_neurun_mixed.sh b/infra/scripts/test_tizen_neurun_mixed.sh
new file mode 100644
index 000000000..ef1781486
--- /dev/null
+++ b/infra/scripts/test_tizen_neurun_mixed.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+export EXECUTOR=Linear
+
+# NOTE Fixed backend assignment by type of operation
+# TODO Enhance this with randomized test
+BACKENDS=(cpu acl_cl acl_neon)
+unset OP_BACKEND_ALLOPS
+export OP_BACKEND_Conv2DNode=cpu
+export OP_BACKEND_MaxPool2DNode=acl_cl
+export OP_BACKEND_AvgPool2DNode=acl_neon
+export ACL_LAYOUT=NCHW
+
+# Get the intersect of framework test list files(each backend has a lsit)
+TESTLIST_PREFIX="tests/scripts/neurun_frameworktest_list.armv7l"
+cat $TESTLIST_PREFIX.${BACKENDS[0]}.txt | sort > $TESTLIST_PREFIX.intersect.txt
+for BACKEND in $BACKENDS; do
+ comm -12 <(sort $TESTLIST_PREFIX.intersect.txt) <(sort $TESTLIST_PREFIX.$BACKEND.txt) > $TESTLIST_PREFIX.intersect.next.txt
+ mv $TESTLIST_PREFIX.intersect.next.txt $TESTLIST_PREFIX.intersect.txt
+done
+
+# Run the test
+cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-tizen ./Product/out/unittest/nnapi_gtest.skip
+source ./tests/scripts/test_driver.sh \
+ --frameworktest_list_file=$TESTLIST_PREFIX.intersect.txt \
+ --ldlibrarypath="$ROOT_PATH/Product/out/lib/neurun:$ROOT_PATH/Product/out/lib" \
+ --reportdir="$ROOT_PATH/report/mixed" .
+
+popd > /dev/null
diff --git a/infra/scripts/test_x64_neurun_cpu.sh b/infra/scripts/test_x64_neurun_cpu.sh
new file mode 100644
index 000000000..c522ea1fb
--- /dev/null
+++ b/infra/scripts/test_x64_neurun_cpu.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# Model download server setting
+if [[ -z $MODELFILE_SERVER ]]; then
+ echo "Need model file server setting"
+ exit 1
+fi
+
+export BACKENDS=cpu
+export OP_BACKEND_ALLOPS=cpu
+
+./tests/scripts/test_driver.sh \
+ --ldlibrarypath=$ROOT_PATH/Product/out/lib \
+ --frameworktest_list_file=./tests/scripts/neurun_frameworktest_list.x86-64.cpu.txt \
+ --reportdir=$ROOT_PATH/report/ .
diff --git a/infra/scripts/tizen_xu4_test.sh b/infra/scripts/tizen_xu4_test.sh
new file mode 100644
index 000000000..19aa0297e
--- /dev/null
+++ b/infra/scripts/tizen_xu4_test.sh
@@ -0,0 +1,161 @@
+#!/bin/bash
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+HOST_HOME=$SCRIPT_ROOT/../..
+if [ -z "$TEST_ROOT" ]; then
+ TEST_ROOT=/opt/usr/nnfw-test
+fi
+
+function Usage()
+{
+ echo "Usage: ./tizen_xu4_test.sh --rpm-dir=path/to/rpm-dir --unittest --verification"
+ echo "Usage: ./tizen_xu4_test.sh --test-suite-path=path/to/test-suite.tar.gz --unittest --verification"
+ echo "--rpm-dir : directory containing nnfw.rpm and nnfw-test.rpm"
+ echo "--test-suite-path : filepath to test-suite.tar.gz"
+ echo "--unittest : run unittest"
+ echo "--verification : run verification"
+ echo "--framework : run framework"
+ echo "--gcov-dir : directory to save gcov files"
+}
+
+
+function prepare_rpm_test()
+{
+ echo "======= Test with rpm packages(gbs build) ======="
+ # clean up
+ $SDB_CMD shell rm -rf $TEST_ROOT
+ $SDB_CMD shell mkdir -p $TEST_ROOT
+ # install nnfw nnfw-test rpms
+ for file in $RPM_DIR/*
+ do
+ $SDB_CMD push $file $TEST_ROOT
+ $SDB_CMD shell rpm -Uvh $TEST_ROOT/$(basename $file) --force --nodeps
+ done
+
+ # download tflite model files
+ pushd $HOST_HOME
+ tests/framework/run_test.sh --download=on
+ find tests -name "*.zip" -exec rm {} \;
+ tar -zcf cache.tar.gz tests/framework/cache
+ $SDB_CMD push cache.tar.gz $TEST_ROOT/.
+ rm -rf cache.tar.gz
+ $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT
+}
+
+function prepare_suite_test()
+{
+ echo "======= Test with test-suite(cross build) ======="
+ # clean up
+ $SDB_CMD shell rm -rf $TEST_ROOT
+ $SDB_CMD shell mkdir -p $TEST_ROOT
+
+ # install test-suite
+ $SDB_CMD push $TEST_SUITE_PATH $TEST_ROOT/$(basename $TEST_SUITE_PATH)
+ $SDB_CMD shell tar -zxf $TEST_ROOT/$(basename $TEST_SUITE_PATH) -C $TEST_ROOT
+
+ # download tflite model files
+ pushd $HOST_HOME
+ tests/framework/run_test.sh --download=on
+ find tests -name "*.zip" -exec rm {} \;
+ tar -zcf cache.tar.gz tests/framework/cache
+ $SDB_CMD push cache.tar.gz $TEST_ROOT/.
+ rm -rf cache.tar.gz
+ $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT
+}
+
+
+# Parse command argv
+for i in "$@"
+do
+ case $i in
+ -h|--help|help)
+ Usage
+ exit 1
+ ;;
+ --rpm-dir=*)
+ RPM_DIR=${i#*=}
+ ;;
+ --test-suite-path=*)
+ TEST_SUITE_PATH=${i#*=}
+ ;;
+ --unittest)
+ UNITTEST=on
+ ;;
+ --verification)
+ VERIFICATION=on
+ ;;
+ --framework)
+ FRAMEWORK=on
+ ;;
+ --gcov-dir=*)
+ GCOV_DIR=${i#*=}
+ ;;
+ esac
+ shift
+done
+
+
+N=`sdb devices 2>/dev/null | wc -l`
+
+# exit if no device found
+if [[ $N -le 1 ]]; then
+ echo "No device found."
+ exit 1;
+fi
+
+NUM_DEV=$(($N-1))
+echo "device list"
+DEVICE_LIST=`sdb devices 2>/dev/null`
+echo "$DEVICE_LIST" | tail -n"$NUM_DEV"
+
+if [ -z "$SERIAL" ]; then
+ SERIAL=`echo "$DEVICE_LIST" | tail -n1 | awk '{print $1}'`
+fi
+SDB_CMD="sdb -s $SERIAL "
+
+# root on, remount as rw
+$SDB_CMD root on
+$SDB_CMD shell mount -o rw,remount /
+
+SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$SCRIPT_ROOT/../
+
+if [ -z "$RPM_DIR" ] && [ -z "$TEST_SUITE_PATH" ]; then
+ echo "Please provide --rpm-dir or --test-suite-path"
+ exit 255
+fi
+
+if [ ! -z "$RPM_DIR" ]; then
+ prepare_rpm_test
+else
+ prepare_suite_test
+fi
+
+# For tizen, we run acl_cl and mixed test
+$SDB_CMD shell /bin/bash -c "IGNORE_MD5=1 $TEST_ROOT/infra/scripts/test_tizen_neurun_acl_cl.sh"
+$SDB_CMD shell /bin/bash -c "IGNORE_MD5=1 $TEST_ROOT/infra/scripts/test_tizen_neurun_mixed.sh"
+
+# run unittest
+if [ "$UNITTEST" == "on" ]; then
+ $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --unittest --artifactpath=$TEST_ROOT
+fi
+
+# run framework test
+if [ "$FRAMEWORK" == "on" ]; then
+ $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --frameworktest --artifactpath=$TEST_ROOT
+fi
+
+# run verification
+if [ "$VERIFICATION" == "on" ]; then
+ $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --verification --artifactpath=$TEST_ROOT
+fi
+
+# pull gcov files
+if [ -n "$GCOV_DIR" ]; then
+ $SDB_CMD shell 'rm -rf /home/gcov && mkdir -p /home/gcov'
+ $SDB_CMD shell 'find / -type f \( -iname "*.gcda" -or -iname "*.gcno" \) -exec cp {} /home/gcov/. \;'
+ $SDB_CMD shell 'cd /home/ && tar -zcvf gcov.tar.gz ./gcov '
+ cd $GCOV_DIR
+ sdb pull /home/gcov.tar.gz
+ tar -zxvf gcov.tar.gz
+fi
diff --git a/libs/ARMComputeEx/CMakeLists.txt b/libs/ARMComputeEx/CMakeLists.txt
deleted file mode 100644
index 2483fb55d..000000000
--- a/libs/ARMComputeEx/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-if("${TARGET_ARCH}" STREQUAL "x86_64")
- return()
-endif()
-
-nnfw_find_package(ARMCompute REQUIRED)
-
-set(ACL_EX_BASE ${CMAKE_SOURCE_DIR}/libs/ARMComputeEx)
-
-file(GLOB_RECURSE ACL_EX_SRCS "${ACL_EX_BASE}/*.cpp")
-
-# generate embeded cl_kernel
-execute_process (
- WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}/libs/ARMComputeEx"
- COMMAND bash -c "python resolve_includes.py"
-)
-
-add_library(arm_compute_ex SHARED ${ACL_EX_SRCS})
-set_target_properties(arm_compute_ex PROPERTIES COMPILE_FLAGS "-DEMBEDDED_KERNELS=1")
-target_include_directories(arm_compute_ex PUBLIC ${CMAKE_SOURCE_DIR}/libs/ARMComputeEx)
-target_link_libraries(arm_compute_ex arm_compute_core)
-install(TARGETS arm_compute_ex DESTINATION lib)
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/OpenCLEx.h b/libs/ARMComputeEx/arm_compute/core/CL/OpenCLEx.h
deleted file mode 100644
index dbda354d6..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/OpenCLEx.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#ifndef __ARM_COMPUTE_OPENCLEX_H__
-#define __ARM_COMPUTE_OPENCLEX_H__
-
-#include <string>
-#include <utility>
-
-/* Configure the Khronos C++ wrapper to target OpenCL 1.2: */
-#ifndef ARM_COMPUTE_NO_EXCEPTIONS
-#define CL_HPP_ENABLE_EXCEPTIONS
-#endif // ARM_COMPUTE_NO_EXCEPTIONS
-#define CL_HPP_CL_1_2_DEFAULT_BUILD
-#define CL_HPP_TARGET_OPENCL_VERSION 110
-#define CL_HPP_MINIMUM_OPENCL_VERSION 110
-#include <CL/cl2.hpp>
-
-namespace arm_compute
-{
-/** Class for loading OpenCL symbols. */
-class CLSymbolsEx final
-{
-private:
- CLSymbolsEx() = default;
- void load_symbols(void *handle);
-
-public:
- /** Get the static instance of CLSymbols.
- *
- * @return The static instance of CLSymbols.
- */
- static CLSymbolsEx &get();
- /** Load symbols from the given OpenCL library path.
- *
- * @param[in] library Path to the OpenCL library.
- *
- * @return True if loading the library is successful.
- */
- bool load(const std::string &library);
- /** Load symbols from any of the default OpenCL library names.
- *
- * @return True if loading any library is successful.
- */
- bool load_default();
-
-#define DECLARE_FUNCTION_PTR(func_name) std::function<decltype(func_name)> func_name##_ptr = nullptr
-
- DECLARE_FUNCTION_PTR(clGetEventInfo);
- DECLARE_FUNCTION_PTR(clSetEventCallback);
-
-#undef DECLARE_FUNCTION_PTR
-
-private:
- std::pair<bool, bool> _loaded{false, false};
-};
-} // namespace arm_compute
-#endif /* __ARM_COMPUTE_OPENCLEX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLActivationLayerExKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLActivationLayerExKernel.h
deleted file mode 100644
index 080cc47ef..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLActivationLayerExKernel.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLACTIVATIONLAYEREXKERNEL_H__
-#define __ARM_COMPUTE_CLACTIVATIONLAYEREXKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-#include "arm_compute/core/TypesEx.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Interface for the activation layer kernel. */
-class CLActivationLayerExKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLActivationLayerExKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLActivationLayerExKernel(const CLActivationLayerExKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLActivationLayerExKernel &operator=(const CLActivationLayerExKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLActivationLayerExKernel(CLActivationLayerExKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLActivationLayerExKernel &operator=(CLActivationLayerExKernel &&) = default;
- /** Default destructor */
- ~CLActivationLayerExKernel() = default;
- /** Set the input and output tensor.
- *
- * @note If the output tensor is a nullptr, the activation function will be performed in-place
- *
- * @param[in, out] input Source tensor. In case of @p output tensor = nullptr, this tensor will
- * store the result
- * of the activation function. Data types supported:
- * QASYMM8/F16/F32.
- * @param[out] output Destination tensor. Data type supported: same as @p input
- * @param[in] act_info Activation layer information.
- */
- void configure(ICLTensor *input, ICLTensor *output, ActivationLayerInfoEx act_info);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLActivationLayerKernel
- *
- * @param[in] input Source tensor info. In case of @p output tensor info = nullptr, this tensor
- * will store the result
- * of the activation function. Data types supported: QASYMM8/F16/F32.
- * @param[in] output Destination tensor info. Data type supported: same as @p input
- * @param[in] act_info Activation layer information.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const ActivationLayerInfoEx &act_info);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- ICLTensor *_input;
- ICLTensor *_output;
- bool _run_in_place;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLACTIVATIONLAYEREXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgMinMaxKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgMinMaxKernel.h
deleted file mode 100644
index b91a26159..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgMinMaxKernel.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLArgMinMaxKernel.h
- * @brief This file defines CLArgMinMaxKernel
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __ARM_COMPUTE_CLARG_MIN_MAX_KERNEL_H__
-#define __ARM_COMPUTE_CLARG_MIN_MAX_KERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-#include "arm_compute/core/TypesEx.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
- * @brief Class to define interface for the argminmax max kernel.
- */
-class CLArgMinMaxKernel : public ICLKernel
-{
-public:
- /**
- * @brief Default constructor.
- */
- CLArgMinMaxKernel();
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers).
- * @param [in] copiedInstance Const reference of CLArgMinMaxKernel to be copied
- */
- CLArgMinMaxKernel(const CLArgMinMaxKernel &) = delete;
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers).
- * @param [in] copiedInstance Const reference of CLArgMinMaxKernel to be copied
- * @return Reference of this instance
- */
- CLArgMinMaxKernel &operator=(const CLArgMinMaxKernel &) = delete;
- /**
- * @brief Allow instances of this class to be moved
- * @param [in] movedInstance Rvalue reference of CLArgMinMaxKernel to be moved
- */
- CLArgMinMaxKernel(CLArgMinMaxKernel &&) = default;
- /**
- * @brief Allow instances of this class to be moved
- * @param [in] movedInstance Rvalue reference of CLArgMinMaxKernel to be moved
- * @return Reference of this instance
- */
- CLArgMinMaxKernel &operator=(CLArgMinMaxKernel &&) = default;
- /**
- * @brief Initialise the kernel's input, output and border mode.
- * @param[in] input An input tensor. Data types supported: U8/QASYMM8/S32/F32.
- * @param[out] output The output tensor, Data types supported: same as @p input.
- * @param[in] argminmax_axis Axis to argminmax
- * return N/A
- */
- void configure(const ICLTensor *input, ICLTensor *output, const uint32_t argminmax_axis,
- ArgOperation op);
- /**
- * @brief Static function to check if given info will lead to a valid configuration of @ref
- * CLArgMinMaxKernel
- * @param[in] input An input tensor info. Data types supported: U8/QASYMM8/S32/F32.
- * @param[in] output The output tensor info, Data types supported: same as @p input1.
- * @param[in] argminmax_axis Axis to argminmax
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const uint32_t argminmax_axis, ArgOperation op);
-
- /*
- * @brief Run CLArgMinMaxKernel op
- * @param[in] window Window to be used for in_slice
- * @param[in] queue cl::CommandQueue
- * @return N/A
- */
- void run(const Window &window, cl::CommandQueue &queue) override;
- /*
- * @brief Run CLArgMinMaxKernel op on CPU
- * @param[in] queue cl::CommandQueue
- * @return N/A
- */
- void run_on_cpu(cl::CommandQueue &queue);
-
-private:
- const ICLTensor *_input;
- ICLTensor *_output;
- uint32_t _argminmax_axis;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLargminmaxMAXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArithmeticSubtractionExKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArithmeticSubtractionExKernel.h
deleted file mode 100644
index 9a765f310..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArithmeticSubtractionExKernel.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLARITHMETICSUBTRACTIONEXKERNEL_H__
-#define __ARM_COMPUTE_CLARITHMETICSUBTRACTIONEXKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Interface for the arithmetic subtraction kernel (support broadcasting)
- *
- * Arithmetic subtraction is computed by:
- * @f[ output(x,y) = input1(x,y) - input2(x,y) @f]
- */
-class CLArithmeticSubtractionExKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLArithmeticSubtractionExKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLArithmeticSubtractionExKernel(const CLArithmeticSubtractionExKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLArithmeticSubtractionExKernel &operator=(const CLArithmeticSubtractionExKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLArithmeticSubtractionExKernel(CLArithmeticSubtractionExKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLArithmeticSubtractionExKernel &operator=(CLArithmeticSubtractionExKernel &&) = default;
- /** Default destructor */
- ~CLArithmeticSubtractionExKernel() = default;
-
- /** Initialise the kernel's inputs, output and convertion policy.
- *
- * @param[in] input1 First tensor input. Data types supported: U8/S16/F16/F32.
- * @param[in] input2 Second tensor input. Data types supported: U8/S16/F16/F32.
- * @param[out] output Output tensor. Data types supported: U8 (Only if both inputs are U8),
- * S16/F16/F32.
- * @param[in] policy Policy to use to handle overflow.
- */
- void configure(const ICLTensor *input1, const ICLTensor *input2, ICLTensor *output,
- ConvertPolicy policy);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLArithmeticSubtractionExKernel
- *
- * @param[in] input1 First tensor input info. Data types supported: U8/S16/F16/F32.
- * @param[in] input2 Second tensor input info. Data types supported: U8/S16/F16/F32.
- * @param[in] output Output tensor info. Data types supported: U8 (Only if both inputs are U8),
- * S16/F16/F32.
- * @param[in] policy Policy to use to handle overflow.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, ConvertPolicy policy);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
- BorderSize border_size() const override;
-
-private:
- const ICLTensor *_input1; /**< Source tensor 1 */
- const ICLTensor *_input2; /**< Source tensor 2 */
- ICLTensor *_output; /**< Destination tensor */
-};
-} // namespace arm_compute
-#endif /* __ARM_COMPUTE_CLARITHMETICSUBTRACTIONEXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBatchToSpaceNDKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBatchToSpaceNDKernel.h
deleted file mode 100644
index 1387897c9..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBatchToSpaceNDKernel.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLBATCH_TO_SPACE_ND_KERNEL_H__
-#define __ARM_COMPUTE_CLBATCH_TO_SPACE_ND_KERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to perform BATCH_TO_SPACE_ND operation */
-class CLBatchToSpaceNDKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLBatchToSpaceNDKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLBatchToSpaceNDKernel(const CLBatchToSpaceNDKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLBatchToSpaceNDKernel &operator=(const CLBatchToSpaceNDKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLBatchToSpaceNDKernel(CLBatchToSpaceNDKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLBatchToSpaceNDKernel &operator=(CLBatchToSpaceNDKernel &&) = default;
- /** Default destructor */
- ~CLBatchToSpaceNDKernel() = default;
- /** Initialise the kernel's input and output.
- *
- * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S16/S32/F16/F32.
- * @param[in] output Output tensor. Data types supported: U8/QASYMM8/S16/S32/F16/F32.
- */
- void configure(const ICLTensor *input, ICLTensor *output, const int32_t *block_size);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input; /**< Source tensor */
- ICLTensor *_output; /**< Destination tensor */
-};
-
-} // namespace arm_compute
-#endif /* __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLComparisonOpKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLComparisonOpKernel.h
deleted file mode 100644
index f5f455993..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLComparisonOpKernel.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLCOMPARISON_OP_KERNEL_H__
-#define __ARM_COMPUTE_CLCOMPARISON_OP_KERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-#include "arm_compute/core/TypesEx.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to check if values in both tensors are equal*/
-class CLComparisonOpKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLComparisonOpKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers). */
- CLComparisonOpKernel(const CLComparisonOpKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers). */
- CLComparisonOpKernel &operator=(const CLComparisonOpKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLComparisonOpKernel(CLComparisonOpKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLComparisonOpKernel &operator=(CLComparisonOpKernel &&) = default;
- /** Initialize the kernel's input, output.
- *
- * @param[in] input1 Source tensor1.
- * @param[in] input2 Source tensor2.
- * @param[out] output Output tensor.
- */
- void configure(const ICLTensor *input1, const ICLTensor *input2, ICLTensor *output,
- const ComparisonOperation &op);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
- BorderSize border_size() const override;
-
-private:
- const ICLTensor *_input1;
- const ICLTensor *_input2;
- ICLTensor *_output;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLCOMPARISON_OP_KERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLExpKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLExpKernel.h
deleted file mode 100644
index a6ea539f8..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLExpKernel.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLEXPKERNEL_H__
-#define __ARM_COMPUTE_CLEXPKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to perform an exponential operation */
-class CLExpKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLExpKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLExpKernel(const CLExpKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLExpKernel &operator=(const CLExpKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLExpKernel(CLExpKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLExpKernel &operator=(CLExpKernel &&) = default;
- /** Default destructor */
- ~CLExpKernel() = default;
- /** Set the source, destination of the kernel
- *
- * @param[in] input Source tensor. Data type supported: F32.
- * @param[out] output Destination tensor. Data type supported: F32.
- */
- void configure(const ICLTensor *input, ICLTensor *output);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input;
- ICLTensor *_output;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLEXPKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherKernel.h
deleted file mode 100644
index 7e35a80b0..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherKernel.h
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLGatherKernel.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file defines CLGatherKernel class
- */
-
-#ifndef __ARM_COMPUTE_CLGATHERKERNEL_H__
-#define __ARM_COMPUTE_CLGATHERKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
- * @brief Class to define an interface for the gather kernel.
- */
-class CLGatherKernel : public ICLKernel
-{
-public:
- /**
- * @brief Construct CLGatherKernel object
- * */
- CLGatherKernel();
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers).
- */
- CLGatherKernel(const CLGatherKernel &) = delete;
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers).
- */
- CLGatherKernel &operator=(const CLGatherKernel &) = delete;
-
- /**
- * @brief Construct CLGatherKernel object by using default move constructor
- * @param[in] CLGatherKernel object to move
- */
- CLGatherKernel(CLGatherKernel &&) = default;
-
- /**
- * @brief Move assignment operator
- * @param[in] CLGatherKernel object to move
- */
- CLGatherKernel &operator=(CLGatherKernel &&) = default;
-
- /**
- * @brief Initialise the kernel's input, output and border mode.
- * @param[in] input1 An input tensor. Data types supported: U8/S32/F32.
- * @param[in] input2 An input tensor. Data types supported: S32.
- * @param[out] output The output tensor, Data types supported: same as @p input1.
- * @return N/A
- */
- void configure(const ICLTensor *input1, const ICLTensor *input2, ICLTensor *output);
-
- /**
- * @brief Static function to check if given info will lead to a valid configuration of @ref
- * CLGatherKernel
- * @param[in] input1 An input tensor. Data types supported: U8/S32/F32.
- * @param[in] input2 An input tensor. Data types supported: S32.
- * @param[out] output The output tensor, Data types supported: same as @p input1.
- * @return a status
- */
- static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output);
-
- /**
- * @brief Enqueue the OpenCL kernel to process the given window on the passed OpenCL command
- * queue.
- * @note The queue is *not* flushed by this method, and therefore the kernel will not have
- * been executed by the time this method returns.
- * @param[in] window Region on which to execute the kernel. (Must be a valid region of
- * the window returned by window()).
- * @param[in,out] queue Command queue on which to enqueue the kernel.@return N/A
- * @return N/A
- */
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input1;
- const ICLTensor *_input2;
- ICLTensor *_output;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLGATHERKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h
deleted file mode 100644
index c3fc15637..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLHashtableLookupKernel.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file defines CLHashtableLookupKernel class
- */
-
-#ifndef __ARM_COMPUTE_CLHASHTABLELOOKUPKERNEL_H__
-#define __ARM_COMPUTE_CLHASHTABLELOOKUPKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-#include "arm_compute/runtime/CL/CLTensor.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
-* @brief Class to perform HashtableLookup operation with opencl kernel
-*/
-class CLHashtableLookupKernel : public ICLKernel
-{
-public:
- /**
- * @brief Construct a CLHashtableLookupKernel object
- * */
- CLHashtableLookupKernel();
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers)
- * */
- CLHashtableLookupKernel(const CLHashtableLookupKernel &) = delete;
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers)
- * */
- CLHashtableLookupKernel &operator=(const CLHashtableLookupKernel &) = delete;
-
- /**
- * @brief Construct a CLHashtableLookupKernel object by using default move constructor
- * @param[in] CLHashtableLookupKernel object to move
- * */
- CLHashtableLookupKernel(CLHashtableLookupKernel &&) = default;
-
- /**
- * @brief Move assignment operator
- * @param[in] CLHashtableLookupKernel object to move
- * */
- CLHashtableLookupKernel &operator=(CLHashtableLookupKernel &&) = default;
-
- /**
- * @brief Destruct this object
- * */
- ~CLHashtableLookupKernel() = default;
-
- /**
- * @brief Set the input and output of the kernel
- * @param[in] lookups Lookups 1D tensor that values are indices into the first dimension of
- * input.
- * @param[in] keys Keys 1D tensor. keys and input pair represent a map.
- * Data types supported: S32
- * @param[in] input Source tensor.
- * Data types supported: U8/S8/QASYMM8/U16/S16/U32/S32/F16/F32
- * @param[out] output Destination tensor. Data types and data layouts supported: Same as @p
- * input.
- * @param[out] hits Hits 1D tensor. A boolean tensor that indicates whether the lookup hits
- * (True) or not (False). Data types supported: U8/QASYMM8
- * @return N/A
- */
- void configure(const ICLTensor *lookups, const ICLTensor *keys, const ICLTensor *input,
- ICLTensor *output, ICLTensor *hits);
-
- /**
- * @brief Static function to check if given info will lead to a valid configuration of @ref
- * CLHashtableLookupKernel
- * @param[in] lookups The lookups tensor info. Data types supported: S32.
- * @param[in] keys The keys tensor info. keys and input pair represent a map.
- * Data types supported: S32
- * @param[in] input The input tensor info.
- * Data types supported: U8/S8/QASYMM8/U16/S16/U32/S32/F16/F32
- * @param[out] output The output tensor. Data types and data layouts supported: Same as @p
- * input.
- * @param[out] hits The hits tensor info. A boolean tensor that indicates whether the lookup
- * hits
- * (True) or not (False). Data types supported: U8/QASYMM8
- * @return a status
- */
- static Status validate(const ITensorInfo *lookups, const ITensorInfo *keys,
- const ITensorInfo *input, const ITensorInfo *output,
- const ITensorInfo *hits);
-
- /**
- * @brief Enqueue the OpenCL kernel to process the given window on the passed OpenCL command
- * queue.
- * @note The queue is *not* flushed by this method, and therefore the kernel will not have
- * been executed by the time this method returns.
- * @param[in] window Region on which to execute the kernel. (Must be a valid region of
- * the window returned by window()).
- * @param[in,out] queue Command queue on which to enqueue the kernel.@return N/A
- * @return N/A
- */
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_lookups; /** Lookups tensor */
- const ICLTensor *_keys; /** Keys tensor */
- const ICLTensor *_input; /** Source tensor */
- ICLTensor *_output; /** Destination tensor */
- ICLTensor *_hits; /** Hits tensor */
- std::unique_ptr<CLTensor> _lookup_indices{nullptr}; /** Lookup indices tensor */
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLHASHTABLELOOKUPKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNormalizationLayerExKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNormalizationLayerExKernel.h
deleted file mode 100644
index 181a6226a..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNormalizationLayerExKernel.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLNORMALIZATIONLAYEREXKERNEL_H__
-#define __ARM_COMPUTE_CLNORMALIZATIONLAYEREXKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Interface for the normalization layer kernel.
- */
-class CLNormalizationLayerExKernel : public ICLKernel
-{
-public:
- /** Constructor */
- CLNormalizationLayerExKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLNormalizationLayerExKernel(const CLNormalizationLayerExKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLNormalizationLayerExKernel &operator=(const CLNormalizationLayerExKernel &) = delete;
- /** Default Move Constructor. */
- CLNormalizationLayerExKernel(CLNormalizationLayerExKernel &&) = default;
- /** Default move assignment operator */
- CLNormalizationLayerExKernel &operator=(CLNormalizationLayerExKernel &&) = default;
- /** Set the input and output tensors.
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data types supported:
- * F16/F32.
- * @param[out] output Destination tensor. Output will have the same number of dimensions as
- * input. Data types supported: same as @p input.
- * @param[in] norm_info Normalization layer information like the normalization type,
- * normalization size and other parameters.
- */
- void configure(const ICLTensor *input, ICLTensor *output, NormalizationLayerInfo norm_info);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLNormalizationLayerKernel
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data types supported:
- * F16/F32.
- * @param[in] output Destination tensor. Output will have the same number of dimensions as
- * input. Data types supported: same as @p input.
- * @param[in] norm_info Normalization layer information like the normalization type, normalization
- * size and other parameters.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- NormalizationLayerInfo norm_info);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
- BorderSize border_size() const override;
-
-private:
- const ICLTensor *_input;
- ICLTensor *_output;
- BorderSize _border_size;
- bool _is_in_map;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLNORMALIZATIONLAYEREXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPadLayerKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPadLayerKernel.h
deleted file mode 100644
index cbaa2adee..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPadLayerKernel.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
-* Copyright (c) 2016-2018 ARM Limited.
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-#ifndef __ARM_COMPUTE_CLPADLAYERKERNEL_H__
-#define __ARM_COMPUTE_CLPADLAYERKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to perform PAD operation */
-class CLPadLayerKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLPadLayerKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLPadLayerKernel(const CLPadLayerKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLPadLayerKernel &operator=(const CLPadLayerKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLPadLayerKernel(CLPadLayerKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLPadLayerKernel &operator=(CLPadLayerKernel &&) = default;
- /** Default destructor */
- ~CLPadLayerKernel() = default;
- /** Initialise the kernel's input and output.
- *
- * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S16/S32/F16/F32.
- * @param[in] output Output tensor. Data types supported: U8/QASYMM8/S16/S32/F16/F32.
- * @param[in] pad_size Padding Size tensor. Data types supported : S32
- */
- void configure(const ICLTensor *input, ICLTensor *output, ICLTensor *pad_size);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input; /**< Source tensor */
- ICLTensor *_output; /**< Destination tensor */
- ICLTensor *_pad_size; /**< Padding Size tensor */
-};
-
-} // namespace arm_compute
-#endif /* __ARM_COMPUTE_CLPADLAYERKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPermuteExKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPermuteExKernel.h
deleted file mode 100644
index 3434deee8..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPermuteExKernel.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLPERMUTEEXKERNEL_H__
-#define __ARM_COMPUTE_CLPERMUTEEXKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to perform tensor permutation.
- *
- * Permutes given a permutation vector
- */
-class CLPermuteExKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLPermuteExKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLPermuteExKernel(const CLPermuteExKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLPermuteExKernel &operator=(const CLPermuteExKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLPermuteExKernel(CLPermuteExKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLPermuteExKernel &operator=(CLPermuteExKernel &&) = default;
- /** Set the input and output of the kernel.
- *
- * @param[in] input The input tensor to permute. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] output The output tensor. Data types supported: Same as @p input
- * @param[in] perm Permutation vector
- */
- void configure(const ICLTensor *input, ICLTensor *output, const PermutationVector &perm);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLPermuteKernel
- *
- * @param[in] input First tensor input info. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32.
- * @param[in] output Output tensor info. Data types supported: same as @p input.
- * @param[in] perm Permutation vector
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const PermutationVector &perm);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input;
- ICLTensor *_output;
- PermutationVector _perm;
-};
-} // arm_compute
-#endif /*__ARM_COMPUTE_CLPERMUTEEXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPixelWiseDivisionKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPixelWiseDivisionKernel.h
deleted file mode 100644
index d579f5d8f..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPixelWiseDivisionKernel.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLPixelWiseDivisionKernel.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file defines CLPixelWiseDivisionKernel class
- */
-
-#ifndef __ARM_COMPUTE_CLPIXELWISEDIVISIONKERNEL_H__
-#define __ARM_COMPUTE_CLPIXELWISEDIVISIONKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
- * @brief Interface for the pixelwise division kernel.
- */
-class CLPixelWiseDivisionKernel : public ICLKernel
-{
-public:
- /**
- * @brief Construct a CLPixelWiseDivisionKernel object
- */
- CLPixelWiseDivisionKernel();
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers).
- */
- CLPixelWiseDivisionKernel(const CLPixelWiseDivisionKernel &) = delete;
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers).
- */
- CLPixelWiseDivisionKernel &operator=(const CLPixelWiseDivisionKernel &) = delete;
-
- /**
- * @brief Construct a CLPixelWiseDivisionKernel object by using move constructor
- * @param[in] CLPixelWiseDivisionKernel object to move
- */
- CLPixelWiseDivisionKernel(CLPixelWiseDivisionKernel &&) = default;
-
- /**
- * @brief Allow instances of this class to be moved
- * @param[in] CLPixelWiseDivisionKernel object to move
- */
- CLPixelWiseDivisionKernel &operator=(CLPixelWiseDivisionKernel &&) = default;
-
- /**
- * @brief Initialise the kernel's input, output and border mode.
- * @param[in] input1 An input tensor. Data types supported: U8/S16/F16/F32.
- * @param[in] input2 An input tensor. Data types supported: same as @p input1.
- * @param[out] output The output tensor, Data types supported: same as @p input1. Note:
- * U8 requires both inputs to be U8.
- * @param[in] scale Scale to apply after division.
- * Scale must be positive and its value must be either 1/255 or 1/2^n
- * where n is between 0 and 15.
- * @param[in] overflow_policy Overflow policy. Supported overflow policies: Wrap, Saturate
- * @param[in] rounding_policy Rounding policy. Supported rounding modes: to zero, to nearest
- * even.
- * @return N/A
- */
- void configure(const ICLTensor *input1, const ICLTensor *input2, ICLTensor *output, float scale,
- ConvertPolicy overflow_policy, RoundingPolicy rounding_policy);
-
- /**
- * @brief Static function to check if given info will lead to a valid configuration of @ref
- * CLPixelWiseDivisionKernel
- * @param[in] input1 An input tensor info. Data types supported: U8/S16/F16/F32.
- * @param[in] input2 An input tensor info. Data types supported: same as @p input1.
- * @param[in] output The output tensor info, Data types supported: same as @p input1.
- * Note: U8 requires both inputs to be U8.
- * @param[in] scale Scale to apply after division.
- * Scale must be positive and its value must be either 1/255 or 1/2^n
- * where n is between 0 and 15.
- * @param[in] overflow_policy Overflow policy. Supported overflow policies: Wrap, Saturate
- * @param[in] rounding_policy Rounding policy. Supported rounding modes: to zero, to nearest even.
- * @return a status
- */
- static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, float scale, ConvertPolicy overflow_policy,
- RoundingPolicy rounding_policy);
-
- /**
- * @brief Enqueue the OpenCL kernel to process the given window on the passed OpenCL command
- * queue.
- * @note The queue is *not* flushed by this method, and therefore the kernel will not have
- * been executed by the time this method returns.
- * @param[in] window Region on which to execute the kernel. (Must be a valid region of
- * the window returned by window()).
- * @param[in,out] queue Command queue on which to enqueue the kernel.@return N/A
- * @return N/A
- */
- void run(const Window &window, cl::CommandQueue &queue) override;
-
- /**
- * @brief The size of the border for that kernel
- * @return The width in number of elements of the border.
- */
- BorderSize border_size() const override;
-
-private:
- const ICLTensor *_input1;
- const ICLTensor *_input2;
- ICLTensor *_output;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLPIXELWISEDIVISIONKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h
deleted file mode 100644
index 68534f1ab..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__
-#define __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to perform SPACE_TO_BATCH_ND operation */
-class CLSpaceToBatchNDKernel final : public ICLKernel
-{
-public:
- /** Default constructor */
- CLSpaceToBatchNDKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLSpaceToBatchNDKernel(const CLSpaceToBatchNDKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- CLSpaceToBatchNDKernel &operator=(const CLSpaceToBatchNDKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLSpaceToBatchNDKernel(CLSpaceToBatchNDKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLSpaceToBatchNDKernel &operator=(CLSpaceToBatchNDKernel &&) = default;
- /** Default destructor */
- ~CLSpaceToBatchNDKernel() = default;
- /** Initialise the kernel's input and output.
- *
- * @note The data layout of input and output must be the same.
- * @note The number of dimensions of input and output must be 4, and `spatial` dimensions
- * are height and width.
- * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S16/F16/S32/F32.
- * Data layout supported: NCHW/NHWC
- * @param[in] block_size Block size tensor. Data types supported: S32.
- * @param[in] padding_size Padding size tensor. Data types supported: S32.
- * @param[out] output Output tensor. Data types supported: U8/QASYMM8/S16/F16/S32/F32.
- * Data layout supported: NCHW/NHWC
- */
- void configure(const ICLTensor *input, const ICLTensor *block_size, const ICLTensor *padding_size,
- ICLTensor *output);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input; /**< Source tensor */
- const ICLTensor *_block_size; /**< Block size tensor */
- const ICLTensor *_padding_size; /**< Padding size tensor */
- ICLTensor *_output; /**< Destination tensor */
-};
-
-} // namespace arm_compute
-
-#endif /* __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSquaredDifferenceKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSquaredDifferenceKernel.h
deleted file mode 100644
index a4c44e35d..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSquaredDifferenceKernel.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLSQUARED_DIFFERENCE_KERNEL_H__
-#define __ARM_COMPUTE_CLSQUARED_DIFFERENCE_KERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** OpenCL kernel to return squared difference value of two tensors (x-y)^2*/
-class CLSquaredDifferenceKernel : public ICLKernel
-{
-public:
- /** Default constructor */
- CLSquaredDifferenceKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers). */
- CLSquaredDifferenceKernel(const CLSquaredDifferenceKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers). */
- CLSquaredDifferenceKernel &operator=(const CLSquaredDifferenceKernel &) = delete;
- /** Allow instances of this class to be moved */
- CLSquaredDifferenceKernel(CLSquaredDifferenceKernel &&) = default;
- /** Allow instances of this class to be moved */
- CLSquaredDifferenceKernel &operator=(CLSquaredDifferenceKernel &&) = default;
- /** Initialize the kernel's input, output.
- *
- * @param[in] input1 Source tensor1.
- * @param[in] input2 Source tensor2.
- * @param[out] output Output tensor.
- */
- void configure(const ICLTensor *input1, const ICLTensor *input2, ICLTensor *output);
-
- // Inherited methods overridden:
- void run(const Window &window, cl::CommandQueue &queue) override;
-
- BorderSize border_size() const override;
-
-private:
- const ICLTensor *_input1;
- const ICLTensor *_input2;
- ICLTensor *_output;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLSQUARED_DIFFERENCE_KERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLStridedSliceExKernel.h b/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLStridedSliceExKernel.h
deleted file mode 100644
index 6368c380e..000000000
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLStridedSliceExKernel.h
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLStridedSliceExKernel.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file defines CLStridedSliceExKernel class
- */
-
-#ifndef __ARM_COMPUTE_CLSTRIDEDSLICEEXKERNEL_H__
-#define __ARM_COMPUTE_CLSTRIDEDSLICEEXKERNEL_H__
-
-#include "arm_compute/core/CL/ICLKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
-* @brief Class to define an interface for the kernel to extract a strided slice of a tensor
-*/
-class CLStridedSliceExKernel : public ICLKernel
-{
-public:
- /**
- * @brief Construct a CLStridedSliceExKernel object
- * */
- CLStridedSliceExKernel();
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers)
- * */
- CLStridedSliceExKernel(const CLStridedSliceExKernel &) = delete;
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers)
- * */
- CLStridedSliceExKernel &operator=(const CLStridedSliceExKernel &) = delete;
-
- /**
- * @brief Construct a CLStridedSliceExKernel object by using default move constructor
- * @param[in] CLStridedSliceExKernel object to move
- * */
- CLStridedSliceExKernel(CLStridedSliceExKernel &&) = default;
-
- /**
- * @brief Move assignment operator
- * @param[in] CLStridedSliceExKernel object to move
- * */
- CLStridedSliceExKernel &operator=(CLStridedSliceExKernel &&) = default;
-
- /**
- * @brief Destruct this object
- * */
- ~CLStridedSliceExKernel() = default;
-
- /**
- * @brief Set the input and output of the kernel
- * @param[in] input Source tensor. Data type supported:
- * U8/S8/QASYMM8/U16/S16/U32/S32/F16/F32
- * @param[out] output Destination tensor. Data type supported: Same as @p input
- * @param[in] beginData The begin tensor. Data types supported: S32.
- * The number of dimensions must be 1.
- * The length must be the same as the number of dimensions of input.
- * @param[in] endData The end tensor. Data types supported: S32.
- * The number of dimensions must be 1.
- * The length must be the same as the number of dimensions of input.
- * @param[in] strideData The stride tensor. Data types supported: S32.
- * The number of dimensions must be 1.
- * The length must be the same as the number of dimensions of input.
- * @param[in] beginMask Mask for begin
- * @param[in] endMask Mask for end
- * @param[in] shrinkAxisMask Mask for shrink axis.
- * @return N/A
- */
- void configure(const ICLTensor *input, ICLTensor *output, ICLTensor *beginData,
- ICLTensor *endData, ICLTensor *stridesData, int32_t beginMask, int32_t endMask,
- int32_t shrinkAxisMask);
-
- /**
- * @brief Static function to check if given info will lead to a valid configuration of @ref
- * CLStridedSliceExKernel
- * @param[in] input The input tensor info. Data types supported:
- * U8/S8/QASYMM8/U16/S16/U32/S32/F16/F32
- * @param[in] output The output tensor info, Data types supported: same as @p input1.
- * @param[in] begin The begin tensor info. Data types supported: S32.
- * The number of dimensions must be 1.
- * The length must be the same as the number of dimensions of input.
- * @param[in] end The end tensor info. Data types supported: S32.
- * The number of dimensions must be 1.
- * The length must be the same as the number of dimensions of input.
- * @param[in] stride The stride tensor info. Data types supported: S32.
- * The number of dimensions must be 1.
- * The length must be the same as the number of dimensions of input.
- * @param[in] beginMask Mask for begin
- * @param[in] endMask Mask for end
- * @param[in] shrinkAxisMask Mask for shrink axis.
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const ITensorInfo *begin, const ITensorInfo *end,
- const ITensorInfo *stride, int32_t beginMask, int32_t endMask,
- int32_t shrinkAxisMask);
-
- /**
- * @brief Enqueue the OpenCL kernel to process the given window on the passed OpenCL command
- * queue.
- * @note The queue is *not* flushed by this method, and therefore the kernel will not have
- * been executed by the time this method returns.
- * @param[in] window Region on which to execute the kernel. (Must be a valid region of
- * the window returned by window()).
- * @param[in,out] queue Command queue on which to enqueue the kernel.@return N/A
- * @return N/A
- */
- void run(const Window &window, cl::CommandQueue &queue) override;
-
-private:
- const ICLTensor *_input; /** Source tensor */
- ICLTensor *_output; /** Destination tensor */
- ICLTensor *_beginData; /** Start indices of input tensor */
- ICLTensor *_endData; /** Stop indices of input tensor */
- ICLTensor *_stridesData; /** Strides tensor */
- int32_t _beginMask; /** Begin mask */
- int32_t _endMask; /** End mask */
- int32_t _shrinkAxisMask; /** Shrink axis mask */
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLSTRIDEDSLICEEXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NENormalizationLayerExKernel.h b/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NENormalizationLayerExKernel.h
deleted file mode 100644
index f7bf72985..000000000
--- a/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NENormalizationLayerExKernel.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_NENORMALIZATIONLAYEREXKERNEL_H__
-#define __ARM_COMPUTE_NENORMALIZATIONLAYEREXKERNEL_H__
-
-#include "arm_compute/core/NEON/INEKernel.h"
-
-namespace arm_compute
-{
-class ITensor;
-
-/** Interface for the normalization layer kernel.
- */
-class NENormalizationLayerExKernel : public INEKernel
-{
-public:
- const char *name() const override { return "NENormalizationLayerKernel"; }
- /** Default constructor */
- NENormalizationLayerExKernel();
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- NENormalizationLayerExKernel(const NENormalizationLayerExKernel &) = delete;
- /** Prevent instances of this class from being copied (As this class contains pointers) */
- NENormalizationLayerExKernel &operator=(const NENormalizationLayerExKernel &) = delete;
- /** Default Move Constructor. */
- NENormalizationLayerExKernel(NENormalizationLayerExKernel &&) = default;
- /** Default move assignment operator */
- NENormalizationLayerExKernel &operator=(NENormalizationLayerExKernel &&) = default;
- /** Default destructor */
- ~NENormalizationLayerExKernel() = default;
- /** Set the input and output tensors.
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data types
- * supported: FP16/F32.
- * @param[in] input_squared Source with each element has been squared. 3 lower dims represent a
- * single input with dimensions [width, height, IFM],
- * Data type supported: same as @p input
- * @param[out] output Destination tensor. Output will have the same number of dimensions as
- * input. Data type supported: same as @p input
- * @param[in] norm_info Normalization layer information like the normalization type,
- * normalization size and other parameters.
- */
- void configure(const ITensor *input, const ITensor *input_squared, ITensor *output,
- NormalizationLayerInfo norm_info);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * NENormalizationLayerKernel
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data types
- * supported: FP16/F32.
- * @param[in] input_squared Source with each element has been squared. 3 lower dims represent a
- * single input with dimensions [width, height, IFM],
- * Data type supported: same as @p input
- * @param[in] output Destination tensor. Output will have the same number of dimensions as
- * input. Data type supported: same as @p input
- * @param[in] norm_info Normalization layer information like the normalization type,
- * normalization size and other parameters.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *input_squared,
- const ITensorInfo *output, NormalizationLayerInfo norm_info);
-
- // Inherited methods overridden:
- void run(const Window &window, const ThreadInfo &info) override;
- BorderSize border_size() const override;
-
-private:
- /** Function to perform normalization depending on the given template
- * dimension. The second template parameter specifies whether the
- * normalization has to be 1D or 2D.
- *
- * @note Only supported normalizations are:
- * - 1D over X or Z
- * - 2D over X and Y
- *
- * @param[in] window Region on which to execute the kernel.
- */
- template <DataType dt, unsigned int dim, bool do_2D_norm>
- void normalize_float(const Window &window);
-
- /** Common signature for all the specialised normalization functions
- *
- * @param[in] window Region on which to execute the kernel.
- */
- using NormalizationFunctionEx = void (NENormalizationLayerExKernel::*)(const Window &window);
-
-private:
- NormalizationFunctionEx _func;
- const ITensor *_input;
- const ITensor *_input_squared;
- ITensor *_output;
- NormalizationLayerInfo _norm_info;
- BorderSize _border_size;
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_NENORMALIZATIONLAYEREXKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/TypesEx.h b/libs/ARMComputeEx/arm_compute/core/TypesEx.h
deleted file mode 100644
index 8381f1cc6..000000000
--- a/libs/ARMComputeEx/arm_compute/core/TypesEx.h
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_TYPESEX_H__
-#define __ARM_COMPUTE_TYPESEX_H__
-
-#include <cmath>
-#include <cstddef>
-#include <cstdint>
-#include <string>
-#include <utility>
-
-namespace arm_compute
-{
-
-/** Available ArgIndex operations **/
-enum class ArgOperation
-{
- MAX,
- MIN,
-};
-
-/** Available reduce operations */
-enum class ReduceOperation
-{
- MAX, /**< Max */
- MEAN, /**< Mean */
- SUM, /**< Sum */
- MIN, /**< Min */
-};
-
-/** Available binary logical operations */
-enum class BinaryLogicalOperation
-{
- AND, /**< AND */
- OR, /**< OR */
-};
-
-enum class ComparisonOperation
-{
- EQUAL, /**< EQUAL */
- NOT_EQUAL, /**< NOT_EQUAL */
-};
-
-/** Activation Layer Information class */
-class ActivationLayerInfoEx
-{
-public:
- /** Available activation functions */
- enum class ActivationFunction
- {
- RSQRT /**< Inverse Square root ( \f$ f(x) = \rsqrt{x} \f$ )*/
- };
-
- ActivationLayerInfoEx() = default;
- /** Default Constructor
- *
- * @param[in] f The activation function to use.
- * @param[in] a (Optional) The alpha parameter used by some activation functions
- * (@ref ActivationFunction::BOUNDED_RELU, @ref ActivationFunction::LU_BOUNDED_RELU,
- * @ref ActivationFunction::LINEAR, @ref ActivationFunction::TANH).
- * @param[in] b (Optional) The beta parameter used by some activation functions (@ref
- * ActivationFunction::LINEAR, @ref ActivationFunction::LU_BOUNDED_RELU, @ref
- * ActivationFunction::TANH).
- */
- ActivationLayerInfoEx(ActivationFunction f, float a = 0.0f, float b = 0.0f)
- : _act(f), _a(a), _b(b), _enabled(true)
- {
- }
- /** Get the type of activation function */
- ActivationFunction activation() const { return _act; }
- /** Get the alpha value */
- float a() const { return _a; }
- /** Get the beta value */
- float b() const { return _b; }
- /** Check if initialised */
- bool enabled() const { return _enabled; }
-
-private:
- ActivationFunction _act = {ActivationLayerInfoEx::ActivationFunction::RSQRT};
- float _a = {};
- float _b = {};
- bool _enabled = {false};
-};
-
-} // namespace arm_compute
-#endif /* __ARM_COMPUTE_TYPESEX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/UtilsEx.h b/libs/ARMComputeEx/arm_compute/core/UtilsEx.h
deleted file mode 100644
index 8dd68a0c3..000000000
--- a/libs/ARMComputeEx/arm_compute/core/UtilsEx.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_UTILSEX_H__
-#define __ARM_COMPUTE_UTILSEX_H__
-
-#include "arm_compute/core/TypesEx.h"
-
-#include <cstdint>
-#include <cstdlib>
-#include <sstream>
-#include <string>
-
-namespace arm_compute
-{
-/** Translates a given activation function to a string.
- *
- * @param[in] act @ref ActivationLayerInfo::ActivationFunction to be translated to string.
- *
- * @return The string describing the activation function.
- */
-const std::string &string_from_activation_func_ex(ActivationLayerInfoEx::ActivationFunction act);
-}
-#endif /*__ARM_COMPUTE_UTILSEX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLActivationLayerEx.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLActivationLayerEx.h
deleted file mode 100644
index 7e578550f..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLActivationLayerEx.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLACTIVATIONLAYEREX_H__
-#define __ARM_COMPUTE_CLACTIVATIONLAYEREX_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-#include "arm_compute/core/TypesEx.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Basic function to run @ref CLActivationLayerExKernel
- *
- * @note The function simulates an activation layer with the specified activation function.
- */
-class CLActivationLayerEx : public ICLSimpleFunction
-{
-public:
- /** Set the input and output tensor.
- *
- * @note If the output tensor is a nullptr or is equal to the input, the activation function will
- * be performed in-place
- *
- * @param[in, out] input Source tensor. In case of @p output tensor = nullptr, this tensor will
- * store the result
- * of the activation function. Data types supported:
- * QASYMM8/F16/F32.
- * @param[out] output Destination tensor. Data type supported: same as @p input
- * @param[in] act_info Activation layer parameters.
- */
- void configure(ICLTensor *input, ICLTensor *output, ActivationLayerInfoEx act_info);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLActivationLayer
- *
- * @param[in] input Source tensor info. In case of @p output tensor info = nullptr, this tensor
- * will store the result
- * of the activation function. Data types supported: QASYMM8/F16/F32.
- * @param[in] output Destination tensor info. Data type supported: same as @p input
- * @param[in] act_info Activation layer information.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const ActivationLayerInfoEx &act_info);
-};
-}
-#endif /* __ARM_COMPUTE_CLACTIVATIONLAYEREX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgMinMax.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgMinMax.h
deleted file mode 100644
index 8044c58af..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgMinMax.h
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLArgMinMax.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains arm_compute::CLArgMinMax class
- */
-
-#ifndef __ARM_COMPUTE_CLARG_MIN_MAX_H__
-#define __ARM_COMPUTE_CLARG_MIN_MAX_H__
-
-#include "arm_compute/core/CL/kernels/CLArgMinMaxKernel.h"
-#include "arm_compute/runtime/CL/CLTensor.h"
-#include "arm_compute/runtime/IFunction.h"
-#include "arm_compute/core/TypesEx.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
- * @brief Class to execute CLArgMinMax operation
- */
-class CLArgMinMax : public IFunction
-{
-public:
- /**
- * @brief Construct a new CLArgMinMax object
- */
- CLArgMinMax();
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers)
- */
- CLArgMinMax(const CLArgMinMax &) = delete;
-
- /**
- * @brief Prevent instances of this class from being copied (As this class contains pointers)
- */
- CLArgMinMax &operator=(const CLArgMinMax &) = delete;
-
- /**
- * @brief Construct a new CLArgMinMax object by using copy constructor
- * @param[in] CLArgMinMax object to move
- */
- CLArgMinMax(CLArgMinMax &&) = default;
-
- /**
- * @brief Assign a CLArgMinMax object.
- * @param[in] CLArgMinMax object to assign. This object will be moved.
- */
- CLArgMinMax &operator=(CLArgMinMax &&) = default;
-
- /**
- * @brief Initialise the kernel's inputs and outputs.
- * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S32/F32.
- * @param[out] output The result of argminmaxMax operation. Data types supported: same as @p
- * input.
- * @param[in] axis Axis to argminmax. It must be sorted and no duplicates.
- * @param[in] is_min True for ArgMin operation.
- * @param[in] is_max Ture for ArgMax operation.
- * @return N/A
- */
- void configure(ICLTensor *input, ICLTensor *output, std::vector<uint32_t> argminmax_axis,
- ArgOperation op);
-
- /**
- * @brief Static function to check if given info will lead to a valid configuration
- * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S32/F32.
- * @param[in] axis Axis to argminmax
- * @param[out] output The result of argminmaxMax operation. Data types supported: same as @p
- * input.
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const std::vector<uint32_t> &argminmax_axis,
- const ITensorInfo *output, ArgOperation op);
-
- /**
- * @brief Run the kernels contained in the function
- * This operation works on CPU on GPU depending on the value of argminmax_MAX_RUN_ON_CPU macro
- * in CLArgMinMax.cpp.
- * If argminmax_MAX_RUN_ON_CPU == 1, CPU runs this operation.
- * Otherwise GPU runs this operation.
- * @return N/A
- */
- void run() override;
-
-private:
- ICLTensor *_input;
- ICLTensor *_output;
- std::vector<uint32_t> _argminmax_axis;
- ArgOperation _arg_op;
-
- std::unique_ptr<CLTensor[]> _interm_tensors{nullptr};
- std::unique_ptr<CLArgMinMaxKernel[]> _argminmax_kernels{nullptr};
- size_t _num_of_kernels;
-};
-}
-#endif /*__ARM_COMPUTE_CLargminmax_MAX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArithmeticSubtractionEx.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArithmeticSubtractionEx.h
deleted file mode 100644
index 34e6c6334..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArithmeticSubtractionEx.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLARITHMETICSUBTRACTIONEX_H__
-#define __ARM_COMPUTE_CLARITHMETICSUBTRACTIONEX_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Basic function to run @ref CLArithmeticSubtractionExKernel
- *
- * @note The tensor data type for the inputs must be U8/S16/F16/F32.
- * @note The function performs an arithmetic subtraction between two tensors.
- */
-class CLArithmeticSubtractionEx : public ICLSimpleFunction
-{
-public:
- /** Initialise the kernel's inputs, output and convertion policy.
- *
- * @param[in, out] input1 An input tensor. Data types supported: U8/S16/F16/F32.
- * The input tensor is [in, out] because its TensorInfo might be modified
- * inside the kernel in case of broadcasting of dimension 0.
- * @param[in, out] input2 An input tensor. Data types supported: same as @p input1.
- * The input tensor is [in, out] because its TensorInfo might be modified
- * inside the kernel in case of broadcasting of dimension 0.
- * @param[out] output Output tensor. Data types supported: U8 (Only if both inputs are U8),
- * S16/F16/F32.
- * @param[in] policy Policy to use to handle overflow.
- */
- void configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output, ConvertPolicy policy);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLArithmeticSubtractionEx
- *
- * @param[in] input1 First tensor input info. Data types supported: U8/S16/F16/F32.
- * @param[in] input2 Second tensor input info. Data types supported: U8/S16/F16/F32.
- * @param[in] output Output tensor info. Data types supported: U8 (Only if both inputs are U8),
- * S16/F16/F32.
- * @param[in] policy Policy to use to handle overflow.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, ConvertPolicy policy);
-};
-}
-#endif /* __ARM_COMPUTE_CLARITHMETICSUBTRACTIONEX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLComparisonOp.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLComparisonOp.h
deleted file mode 100644
index 1b0d70e7f..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLComparisonOp.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLCOMPARISON_OP_H__
-#define __ARM_COMPUTE_CLCOMPARISON_OP_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-#include "arm_compute/core/TypesEx.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-class CLComparisonOp : public ICLSimpleFunction
-{
-public:
- /** Initialise the function's source and destination.
- *
- * @param[in] input1 Source tensor1. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32.
- * @param[in] input2 Source tensor2. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32.
- * @param[out] output Output tensor. Data types supported: Same as @p input.
- */
- void configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output,
- const ComparisonOperation &op);
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLCOMPARISON_OP_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLExp.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLExp.h
deleted file mode 100644
index 2d0fc23a4..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLExp.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLEXP_H__
-#define __ARM_COMPUTE_CLEXP_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Basic function to run @ref CLExpKernel */
-class CLExp : public ICLSimpleFunction
-{
-public:
- /** Set the source, destination of the kernel
- *
- * @param[in] input Source tensor. Data type supported: F32.
- * @param[out] output Destination tensor. Data type supported: F32.
- */
- void configure(const ICLTensor *input, ICLTensor *output);
-};
-}
-#endif /* __ARM_COMPUTE_CLEXP_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGather.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGather.h
deleted file mode 100644
index f7fd3cda1..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGather.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CLGather.h
- * @brief This file contains CLGather class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __ARM_COMPUTE_CLGATHER_H__
-#define __ARM_COMPUTE_CLGATHER_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/**
- * @brief Class to to run @ref CLGatherKernel.
- */
-class CLGather : public ICLSimpleFunction
-{
-public:
- /**
- * @brief Initialise the kernel's inputs, output and convertion policy.
- * @param[in] input1 An input tensor. Data types supported: U8/S32/F32.
- * @param[in] input2 An indexes tensor. Data types supported: S32.
- * @param[out] output The output tensor, Data types supported: same as @p input1.
- * @return N/A
- */
- void configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output);
-
- /**
- * @brief Static function to check if given info will lead to a valid configuration
- * of @ref CLGather
- * @param[in] input1 An input tensor. Data types supported: U8/S32/F32.
- * @param[in] input2 An indexes tensor. Data types supported: S32.
- * @param[out] output The output tensor, Data types supported: same as @p input1.
- * @return a status
- */
- static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output);
-};
-}
-#endif /*__ARM_COMPUTE_CLGATHER_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNormalizationLayerEx.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNormalizationLayerEx.h
deleted file mode 100644
index 4077245d5..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNormalizationLayerEx.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLNORMALIZATIONLAYEREX_H__
-#define __ARM_COMPUTE_CLNORMALIZATIONLAYEREX_H__
-
-#include "arm_compute/runtime/IFunction.h"
-
-#include "arm_compute/core/CL/kernels/CLFillBorderKernel.h"
-#include "arm_compute/core/CL/kernels/CLNormalizationLayerExKernel.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Basic function to compute a normalization layer. This function calls the following CL kernels:
- *
- * -# @ref CLFillBorderKernel
- * -# @ref CLNormalizationLayerKernelEx
- *
- */
-class CLNormalizationLayerEx : public IFunction
-{
-public:
- /** Default constructor */
- CLNormalizationLayerEx();
- /** Set the input and output tensors.
- *
- * @param[in, out] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data types
- * supported: F16/F32 (Written to by the border handler)
- * @param[out] output Destination tensor. Dimensions, data type and number of channels must
- * match the input ones.
- * @param[in] norm_info Normalization layer information like the normalization type,
- * normalization size and other parameters.
- */
- void configure(ICLTensor *input, ICLTensor *output, const NormalizationLayerInfo &norm_info);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * CLNormalizationLayer
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data types supported:
- * F16/F32
- * @param[in] output Destination tensor. Dimensions, data type and number of channels must
- * match the input ones.
- * @param[in] norm_info Normalization layer information like the normalization type, normalization
- * size and other parameters.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const NormalizationLayerInfo &norm_info);
-
- // Inherited methods overridden:
- void run() override;
-
-private:
- CLNormalizationLayerExKernel _norm_kernel; /**< Normalization layer kernel to run */
- CLFillBorderKernel _border_handler; /**< Kernel to handle borders */
-};
-}
-#endif /* __ARM_COMPUTE_CLNORMALIZATIONLAYEREX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPadLayerEx.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPadLayerEx.h
deleted file mode 100644
index d6ea486d1..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPadLayerEx.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
-* Copyright (c) 2016-2018 ARM Limited.
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-#ifndef __ARM_COMPUTE_CLPADLAYEREX_H__
-#define __ARM_COMPUTE_CLPADLAYEREX_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Basic function to run @ref CLPadLayerKernel
- *
- * @note The tensor data type for the inputs must be U8/QASYMM8/S16/S32/F16/F32.
- * @note The function converts the input tensor to the tensor of the output tensor's type.
- */
-class CLPadLayerEx : public ICLSimpleFunction
-{
-public:
- /** Initialise the kernel's input and output.
- *
- * @param[in] input Input tensor. Data types supported:
- * U8/QASYMM8/S16/S32/F16/F32.
- * @param[out] output Output tensor. Data types supported:
- * U8/QASYMM8/S16/S32/F16/F32.
- * @param[in] pad_size Tensor for Padding values in NHWC format shape [n, 2],
- * where n is the rank of tensor . Data types supported: S32
- */
- void configure(ICLTensor *input, ICLTensor *output, ICLTensor *pad_size);
-};
-
-} // namespace arm_compute
-#endif /* __ARM_COMPUTE_CLPADLAYEREX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPermuteEx.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPermuteEx.h
deleted file mode 100644
index 9a0cc213c..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPermuteEx.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLPERMUTEEX_H__
-#define __ARM_COMPUTE_CLPERMUTEEX_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-/** Basic function to execute an @ref CLPermuteKernel. */
-class CLPermuteEx : public ICLSimpleFunction
-{
-public:
- /** Set the input and output tensors.
- *
- * @param[in] input The input tensor to permute. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] output The output tensor. Data types supported: Same as @p input
- * @param[in] perm Permutation vector
- */
- void configure(const ICLTensor *input, ICLTensor *output, const PermutationVector &perm);
- /** Static function to check if given info will lead to a valid configuration of @ref CLPermute.
- *
- * @param[in] input First tensor input info. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32.
- * @param[in] output Output tensor info. Data types supported: same as @p input.
- * @param[in] perm Permutation vector
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const PermutationVector &perm);
-};
-}
-#endif /*__ARM_COMPUTE_CLPERMUTEEX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSquaredDifference.h b/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSquaredDifference.h
deleted file mode 100644
index 3610ba71c..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSquaredDifference.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_CLSQUARED_DIFFERENCE_H__
-#define __ARM_COMPUTE_CLSQUARED_DIFFERENCE_H__
-
-#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
-
-namespace arm_compute
-{
-class ICLTensor;
-
-class CLSquaredDifference : public ICLSimpleFunction
-{
-public:
- /** Initialise the function's source and destination.
- *
- * @param[in] input1 Source tensor1. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32.
- * @param[in] input2 Source tensor2. Data types supported:
- * U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32.
- * @param[out] output Output tensor. Data types supported: Same as @p input.
- */
- void configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output);
-};
-} // namespace arm_compute
-#endif /*__ARM_COMPUTE_CLSQUARED_DIFFERENCE_H__*/
diff --git a/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NENormalizationLayerEx.h b/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NENormalizationLayerEx.h
deleted file mode 100644
index fa7408ecd..000000000
--- a/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NENormalizationLayerEx.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __ARM_COMPUTE_NENORMALIZATIONLAYEREX_H__
-#define __ARM_COMPUTE_NENORMALIZATIONLAYEREX_H__
-
-#include "arm_compute/runtime/IFunction.h"
-
-#include "arm_compute/core/NEON/kernels/NEFillBorderKernel.h"
-#include "arm_compute/core/NEON/kernels/NENormalizationLayerExKernel.h"
-#include "arm_compute/core/NEON/kernels/NEPixelWiseMultiplicationKernel.h"
-#include "arm_compute/runtime/MemoryGroup.h"
-
-namespace arm_compute
-{
-class ITensor;
-
-/** Basic function to compute a normalization layer. This function calls the following NEON kernels:
- *
- * -# @ref NEPixelWiseMultiplicationKernel
- * -# @ref NEFillBorderKernel
- * -# @ref NENormalizationLayerKernelEx
- *
- */
-class NENormalizationLayerEx : public IFunction
-{
-public:
- /** Default constructor */
- NENormalizationLayerEx(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
- /** Set the input and output tensors.
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data type supported:
- * F16/F32
- * @param[out] output Destination with the same dimensions, data type and number of channels of
- * @p input
- * @param[in] norm_info Normalization layer information like the normalization type,
- * normalization size and other parameters.
- */
- void configure(const ITensor *input, ITensor *output, const NormalizationLayerInfo &norm_info);
- /** Static function to check if given info will lead to a valid configuration of @ref
- * NENormalizationLayer
- *
- * @param[in] input Source tensor. 3 lower dims represent a single input with dimensions
- * [width, height, IFM],
- * and an optional 4th dimension for batch of inputs. Data type supported:
- * F16/F32
- * @param[in] output Destination with the same dimensions, data type and number of channels of
- * @p input
- * @param[in] norm_info Normalization layer information like the normalization type, normalization
- * size and other parameters.
- *
- * @return a status
- */
- static Status validate(const ITensorInfo *input, const ITensorInfo *output,
- const NormalizationLayerInfo &norm_info);
-
- // Inherited methods overridden:
- void run() override;
-
-private:
- MemoryGroup _memory_group; /**< Function memory group */
- NENormalizationLayerExKernel _norm_kernel; /**< Normalization layer kernel */
- NEPixelWiseMultiplicationKernel _multiply_kernel; /**< Pixel multiplication kernel */
- NEFillBorderKernel _border_handler; /**< Kernel to handle borders */
- Tensor _input_squared; /**< The intermediate buffer which stores results of squaring input */
-};
-}
-#endif /* __ARM_COMPUTE_NENORMALIZATIONLAYEREX_H__ */
diff --git a/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp b/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp
deleted file mode 100644
index 05ecdeb22..000000000
--- a/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp
+++ /dev/null
@@ -1,409 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#include "arm_compute/core/CL/CLKernelLibrary.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/Error.h"
-#include "arm_compute/core/Utils.h"
-
-#include <algorithm>
-#include <fstream>
-#include <iostream>
-#include <utility>
-#include <vector>
-
-using namespace arm_compute;
-
-const std::map<std::string, std::string> CLKernelLibraryEx::_kernel_program_map = {
- // ARMComputeEx kernels
- {"activation_layer_ex", "activation_layer_ex.cl"},
- {"arg_op", "arg_operation.cl"},
- {"arithmetic_sub_ex", "arithmetic_op_ex.cl"},
- {"arithmetic_add_qasymm8", "arithmetic_op_quantized.cl"},
- {"batch_to_space_nd", "batch_to_space_nd.cl"},
- {"binary_logical_op", "binary_logical_op.cl"},
- {"cast", "cast.cl"},
- {"cast_qasymm_in", "cast.cl"},
- {"cast_qasymm_out", "cast.cl"},
- {"comparison_op", "comparison_op.cl"},
- {"comparison_op_qasymm8", "comparison_op_quantized.cl"},
- {"depth_to_space", "depth_to_space.cl"},
- {"embedding_lookup", "embedding_lookup.cl"},
- {"exp_layer", "exp.cl"},
- {"gather", "gather.cl"},
- {"gather_1d", "gather.cl"},
- {"gather_1d_out", "gather.cl"},
- {"hashtable_lookup", "hashtable_lookup.cl"},
- {"neg_tensor", "neg_tensor.cl"},
- {"pad", "pad.cl"},
- {"permute_generic", "permute_ex.cl"},
- {"pixelwise_mul_qasymm8", "pixelwise_mul_quantized.cl"},
- {"pixelwise_div_float", "pixelwise_div_float.cl"},
- {"pixelwise_div_int", "pixelwise_div_int.cl"},
- {"prelu", "prelu.cl"},
- {"prelu_qasymm8", "prelu_quantized.cl"},
- {"reduce_min_max", "reduce_operation.cl"},
- {"reduce_sum_mean", "reduce_operation.cl"},
- {"squared_difference", "squared_difference.cl"},
- {"strided_slice_ex", "strided_slice_ex.cl"},
- {"topkv2_init", "topkv2.cl"},
- {"topkv2_find_first_negative", "topkv2.cl"},
- {"topkv2_reorder_negatives", "topkv2.cl"},
- {"topkv2_store", "topkv2.cl"},
- {"radixsort_histogram", "topkv2_radixsort.cl"},
- {"radixsort_scanhistograms", "topkv2_radixsort.cl"},
- {"radixsort_pastehistograms", "topkv2_radixsort.cl"},
- {"radixsort_reorder", "topkv2_radixsort.cl"},
- {"topkv2_quicksort", "topkv2_quicksort.cl"},
- {"space_to_batch_4d_nchw", "space_to_batch.cl"},
- {"space_to_batch_4d_nhwc", "space_to_batch.cl"},
- {"space_to_depth", "space_to_depth.cl"},
-};
-
-const std::map<std::string, std::string> CLKernelLibraryEx::_program_source_map = {
-#ifdef EMBEDDED_KERNELS
- {
- "activation_layer_ex.cl",
-#include "./cl_kernels/activation_layer_ex.clembed"
- },
- {
- "arg_operation.cl",
-#include "./cl_kernels/arg_operation.clembed"
- },
- {
- "arithmetic_op_ex.cl",
-#include "./cl_kernels/arithmetic_op_ex.clembed"
- },
- {
- "batch_to_space_nd.cl",
-#include "./cl_kernels/batch_to_space_nd.clembed"
- },
- {
- "cast.cl",
-#include "./cl_kernels/cast.clembed"
- },
- {
- "comparison_op.cl",
-#include "./cl_kernels/comparison_op.clembed"
- },
- {
- "comparison_op_quantized.cl",
-#include "./cl_kernels/comparison_op_quantized.clembed"
- },
- {
- "embedding_lookup.cl",
-#include "./cl_kernels/embedding_lookup.clembed"
- },
- {
- "depth_to_space.cl",
-#include "./cl_kernels/depth_to_space.clembed"
- },
- {
- "exp.cl",
-#include "./cl_kernels/exp.clembed"
- },
- {
- "gather.cl",
-#include "./cl_kernels/gather.clembed"
- },
- {
- "hashtable_lookup.cl",
-#include "./cl_kernels/hashtable_lookup.clembed"
- },
- {
- "helpers.h",
-#include "./cl_kernels/helpers.hembed"
- },
- {
- "helpers_asymm.h",
-#include "./cl_kernels/helpers_asymm.hembed"
- },
- {
- "binary_logical_op.cl",
-#include "./cl_kernels/binary_logical_op.clembed"
- },
- {
- "neg_tensor.cl",
-#include "./cl_kernels/neg_tensor.clembed"
- },
- {
- "pad.cl",
-#include "./cl_kernels/pad.clembed"
- },
- {
- "pixelwise_div_float.cl",
-#include "./cl_kernels/pixelwise_div_float.clembed"
- },
- {
- "pixelwise_div_int.cl",
-#include "./cl_kernels/pixelwise_div_int.clembed"
- },
- {
- "prelu.cl",
-#include "./cl_kernels/prelu.clembed"
- },
- {
- "prelu_quantized.cl",
-#include "./cl_kernels/prelu_quantized.clembed"
- },
- {
- "reduce_operation.cl",
-#include "./cl_kernels/reduce_operation.clembed"
- },
- {
- "space_to_batch.cl",
-#include "./cl_kernels/space_to_batch.clembed"
- },
- {
- "space_to_depth.cl",
-#include "./cl_kernels/space_to_depth.clembed"
- },
- {
- "squared_difference.cl",
-#include "./cl_kernels/squared_difference.clembed"
- },
- {
- "strided_slice_ex.cl",
-#include "./cl_kernels/strided_slice_ex.clembed"
- },
- {
- "topkv2.cl",
-#include "./cl_kernels/topkv2.clembed"
- },
- {
- "topkv2_radixsort.cl",
-#include "./cl_kernels/topkv2_radixsort.clembed"
- },
- {
- "topkv2_quicksort.cl",
-#include "./cl_kernels/topkv2_quicksort.clembed"
- },
- {
- "permute_ex.cl",
-#include "./cl_kernels/permute_ex.clembed"
- },
-
-#endif /* EMBEDDED_KERNELS */
-};
-
-CLKernelLibraryEx::CLKernelLibraryEx()
- : _context(), _device(), _kernel_path("."), _programs_map(), _built_programs_map()
-{
- opencl_is_available(); // Make sure the OpenCL symbols are initialised *before* the
- // CLKernelLibraryEx is built
-}
-
-CLKernelLibraryEx &CLKernelLibraryEx::get()
-{
- static CLKernelLibraryEx _kernel_library;
- return _kernel_library;
-}
-
-Kernel CLKernelLibraryEx::create_kernel(const std::string &kernel_name,
- const StringSet &build_options_set) const
-{
- // Find which program contains the kernel
- auto kernel_program_it = _kernel_program_map.find(kernel_name);
-
- if (_kernel_program_map.end() == kernel_program_it)
- {
- ARM_COMPUTE_ERROR("Kernel %s not found in the CLKernelLibrary", kernel_name.c_str());
- }
- std::string concat_str;
-
- if (fp16_supported())
- {
- concat_str += " -DARM_COMPUTE_OPENCL_FP16_ENABLED=1 ";
- }
-
- if (get_cl_version(_device) == CLVersion::CL20)
- {
- concat_str += " -cl-std=CL2.0 ";
- }
- else if (arm_non_uniform_workgroup_supported(_device))
- {
- concat_str += " -cl-arm-non-uniform-work-group-size ";
- }
- else
- {
- ARM_COMPUTE_ERROR("Non uniform workgroup size is not supported!!");
- }
-
- // Check if the program has been built before with same build options.
- const std::string program_name = kernel_program_it->second;
- const std::string build_options = stringify_set(build_options_set) + concat_str;
-
- const std::string built_program_name = program_name + "_" + build_options;
- auto built_program_it = _built_programs_map.find(built_program_name);
-
- cl::Program cl_program;
-
- if (_built_programs_map.end() != built_program_it)
- {
- // If program has been built, retrieve to create kernel from it
- cl_program = built_program_it->second;
- }
- else
- {
- // Get program
- Program program = load_program(program_name);
-
- // Build program
- cl_program = program.build(build_options);
-
- // Add built program to internal map
- _built_programs_map.emplace(built_program_name, cl_program);
- }
-
- // Create and return kernel
- return Kernel(kernel_name, cl_program);
-}
-
-void CLKernelLibraryEx::add_built_program(const std::string &built_program_name,
- cl::Program program)
-{
- _built_programs_map.emplace(built_program_name, program);
-}
-
-bool CLKernelLibraryEx::fp16_supported() const { return ::fp16_supported(_device); }
-
-bool CLKernelLibraryEx::int64_base_atomics_supported() const
-{
- return device_supports_extension(_device, "cl_khr_int64_base_atomics");
-}
-
-const Program &CLKernelLibraryEx::load_program(const std::string &program_name) const
-{
- const auto program_it = _programs_map.find(program_name);
-
- if (program_it != _programs_map.end())
- {
- return program_it->second;
- }
-
- Program program;
-
-#ifdef EMBEDDED_KERNELS
- const auto program_source_it = _program_source_map.find(program_name);
-
- if (_program_source_map.end() == program_source_it)
- {
- ARM_COMPUTE_ERROR("Embedded program for %s does not exist.", program_name.c_str());
- }
-
- program = Program(_context, program_name, program_source_it->second);
-#else /* EMBEDDED_KERNELS */
- // Check for binary
- std::string source_name = _kernel_path + program_name;
- std::string binary_name = source_name + "bin";
-
- if (std::ifstream(binary_name).is_open())
- {
- const std::string program_binary = read_file(binary_name, true);
- program = Program(_context, _device, program_name,
- std::vector<unsigned char>(program_binary.begin(), program_binary.end()));
- }
- else if (std::ifstream(source_name).is_open())
- {
- program = Program(_context, program_name, read_file(source_name, false));
- }
- else
- {
- ARM_COMPUTE_ERROR("Kernel file %s does not exist.", source_name.c_str());
- }
-#endif /* EMBEDDED_KERNELS */
-
- // Insert program to program map
- const auto new_program = _programs_map.emplace(program_name, std::move(program));
-
- return new_program.first->second;
-}
-
-std::string CLKernelLibraryEx::stringify_set(const StringSet &s) const
-{
- std::string concat_set;
-
-#ifndef EMBEDDED_KERNELS
- concat_set += "-I" + _kernel_path + " ";
-#endif /* EMBEDDED_KERNELS */
-
- // Concatenate set
- for (const auto &el : s)
- {
- concat_set += " " + el;
- }
-
- return concat_set;
-}
-
-std::string CLKernelLibraryEx::get_program_source(const std::string &program_name)
-{
- const auto program_source_it = _program_source_map.find(program_name);
-
- if (program_source_it == _program_source_map.end())
- {
- ARM_COMPUTE_ERROR("Embedded program for %s does not exist.", program_name.c_str());
- }
-
- return program_source_it->second;
-}
-
-size_t CLKernelLibraryEx::max_local_workgroup_size(const cl::Kernel &kernel) const
-{
- size_t result;
-
- size_t err = kernel.getWorkGroupInfo(_device, CL_KERNEL_WORK_GROUP_SIZE, &result);
- ARM_COMPUTE_ERROR_ON_MSG(
- err != 0,
- "clGetKernelWorkGroupInfo failed to return the maximum workgroup size for the kernel");
- ARM_COMPUTE_UNUSED(err);
-
- return result;
-}
-
-cl::NDRange CLKernelLibraryEx::default_ndrange() const
-{
- // GPUTarget _target = get_target_from_device(_device);
- cl::Device device = cl::Device::getDefault();
- GPUTarget _target = get_target_from_device(device);
- cl::NDRange default_range;
-
- switch (_target)
- {
- case GPUTarget::MIDGARD:
- case GPUTarget::T600:
- case GPUTarget::T700:
- case GPUTarget::T800:
- default_range = cl::NDRange(128u, 1);
- break;
- default:
- default_range = cl::NullRange;
- }
-
- return default_range;
-}
-
-std::string CLKernelLibraryEx::get_device_version() { return _device.getInfo<CL_DEVICE_VERSION>(); }
diff --git a/libs/ARMComputeEx/src/core/CL/OpenCLEx.cpp b/libs/ARMComputeEx/src/core/CL/OpenCLEx.cpp
deleted file mode 100644
index cbda169fb..000000000
--- a/libs/ARMComputeEx/src/core/CL/OpenCLEx.cpp
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * SPDX-License-Identifier: MIT
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to
- * deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include "arm_compute/core/CL/OpenCLEx.h"
-
-#include <dlfcn.h>
-#include <iostream>
-
-namespace arm_compute
-{
-CLSymbolsEx &CLSymbolsEx::get()
-{
- static CLSymbolsEx symbols;
- return symbols;
-}
-
-bool CLSymbolsEx::load_default()
-{
- static const std::vector<std::string> libraries{"libOpenCL.so", "libGLES_mali.so", "libmali.so"};
-
- if (_loaded.first)
- {
- return _loaded.second;
- }
-
- // Indicate that default loading has been tried
- _loaded.first = true;
-
- for (const auto &lib : libraries)
- {
- if (load(lib))
- {
- return true;
- }
- }
-
- std::cerr << "Couldn't find any OpenCL library.\n";
- return false;
-}
-
-bool CLSymbolsEx::load(const std::string &library)
-{
- void *handle = dlopen(library.c_str(), RTLD_LAZY | RTLD_LOCAL);
-
- if (handle == nullptr)
- {
- std::cerr << "Can't load " << library << ": " << dlerror() << "\n";
- // Set status of loading to failed
- _loaded.second = false;
- return false;
- }
-
-#define LOAD_FUNCTION_PTR(func_name, handle) \
- func_name##_ptr = reinterpret_cast<decltype(func_name) *>(dlsym(handle, #func_name));
-
- LOAD_FUNCTION_PTR(clGetEventInfo, handle);
- LOAD_FUNCTION_PTR(clSetEventCallback, handle);
-
-#undef LOAD_FUNCTION_PTR
-
- // Don't call dlclose(handle) or all the symbols will be unloaded !
-
- // Disable default loading and set status to successful
- _loaded = std::make_pair(true, true);
-
- return true;
-}
-
-} // namespace arm_compute
-
-cl_int clGetEventInfo(cl_event event, cl_event_info param_name, size_t param_value_size,
- void *param_value, size_t *param_value_size_ret)
-{
- arm_compute::CLSymbolsEx::get().load_default();
- auto func = arm_compute::CLSymbolsEx::get().clGetEventInfo_ptr;
- if (func != nullptr)
- {
- return func(event, param_name, param_value_size, param_value, param_value_size_ret);
- }
- else
- {
- return CL_OUT_OF_RESOURCES;
- }
-}
-
-cl_int clSetEventCallback(cl_event event, cl_int command_exec_callback_type,
- void(CL_CALLBACK *pfn_ev_notify)(cl_event ev, cl_int ev_cmd_exec_status,
- void *user_data),
- void *user_data)
-{
- arm_compute::CLSymbolsEx::get().load_default();
- auto func = arm_compute::CLSymbolsEx::get().clSetEventCallback_ptr;
- if (func != nullptr)
- {
- return func(event, command_exec_callback_type, pfn_ev_notify, user_data);
- }
- else
- {
- return CL_OUT_OF_RESOURCES;
- }
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/activation_layer_ex.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/activation_layer_ex.cl
deleted file mode 100644
index f54c7bde3..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/activation_layer_ex.cl
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#define TYPE VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)
-
-#define CONST_ONE 1.f
-#define DIV_OP(a, b) ((a) / (b))
-#define RSQRT_OP(a) DIV_OP(CONST_ONE, sqrt((a)))
-
-// Inverse Square-root Activation
-inline TYPE rsqrt_op(TYPE x)
-{
- return RSQRT_OP(x);
-}
-
-#define ACTIVATION_OP2(op, x) op##_op(x)
-#define ACTIVATION_OP(op, x) ACTIVATION_OP2(op, x)
-
-#if defined(ACT)
-
-/** This performs an activation function floating point inputs.
- *
- * @note In order to perform the activation function "in-place", the pre-processor -DIN_PLACE must be passed at compile time
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- * @note Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @note Activation function should be given as a preprocessor argument using -DACT=name. e.g. -DACT=TANH
- * @note A, B variables required by some activation functions are set using -DA_VAL= and -DB_VAL= respectively.
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void activation_layer_ex(
- TENSOR3D_DECLARATION(input)
-#ifndef IN_PLACE
- ,
- TENSOR3D_DECLARATION(output)
-#endif /* not IN_PLACE */
-)
-{
- // Get pixels pointer
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
-#ifdef IN_PLACE
- Tensor3D output = input;
-#else /* IN_PLACE */
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-#endif /* IN_PLACE */
-
- // Load data
- TYPE data = VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr);
-
- // Perform activation
- data = ACTIVATION_OP(ACT, data);
-
- // Store result
- VSTORE(VEC_SIZE)
- (data, 0, (__global DATA_TYPE *)output.ptr);
-}
-
-#endif /* defined(ACT) */
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl
deleted file mode 100644
index 9a6921d7c..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
-/** Perform arg_max/arg_min
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size. e.g. -DDEPTH_OUT=16
- * @attention Operation type(code) specifying which operation to perform should be passed as preprocessor argument using
- * -DOP_CODE = number. e.g. -DOP_CODE=1
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[in] input_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] input_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[out] output_ptr Pointer to the destination image. Supported data types: U32
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] axis Axis through which reduction occurs for max value index
- * @param[in] dim Dimension across the axis to be reduced.
- */
-
-__kernel void arg_op(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- const int axis,
- const int dim)
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int indices[4] =
- {
- get_global_id(0),
- get_global_id(1),
- get_global_id(2) % DEPTH_OUT,
- get_global_id(2) / DEPTH_OUT,
- };
-
- DATA_TYPE value = *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3]));
- DATA_TYPE tval = value;
- int idx = 0;
- for(int i = 1; i < dim; ++i)
- {
- indices[axis] = i;
-
- #if OP_CODE == 1 // ArgMax
- value = max(value, *((__global DATA_TYPE *)
- tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3])));
- #elif OP_CODE == 2 //ArgMin
- value = min(value, *((__global DATA_TYPE *)
- tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3])));
- #else
- return;
-
- #endif
-
- if(tval!=value)
- {
- idx = indices[axis];
- tval = value;
- }
- }
-
- *((__global uint *)out.ptr) = idx;
-}
-#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_ex.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_ex.cl
deleted file mode 100644
index 2ed698951..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_ex.cl
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifdef SATURATE
-#define SUB(x, y) sub_sat((x), (y))
-#else /* SATURATE */
-#define SUB(x, y) (x) - (y)
-#endif /* SATURATE */
-
-/** This function subtracts one tensors from another.
- *
- * @attention The input and output data_types need to be passed at compile time using -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
- * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=short
- * @attention To perform saturating operation -DSATURATE has to be passed to the compiler otherwise wrapping policy will be used.
- *
- * @param[in] in1_ptr Pointer to the source tensor. Supported data types: U8, S16
- * @param[in] in1_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] in1_step_x in1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in1_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] in1_step_y in1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] in1_step_z in1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[in] in2_ptr Pointer to the source tensor. Supported data types: U8, S16
- * @param[in] in2_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] in2_step_x in2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in2_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] in2_step_y in2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] in2_step_z in2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[out] out_ptr Pointer to the destination tensor. Supported data types: U8, S16
- * @param[in] out_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] out_step_x out_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] out_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] out_step_y out_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] out_step_z out_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void arithmetic_sub_ex(
- TENSOR3D_DECLARATION(in1),
- TENSOR3D_DECLARATION(in2),
- TENSOR3D_DECLARATION(out))
-{
- // Get pixels pointer
- Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
- Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
- Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
-
- // Load values
- VEC_DATA_TYPE(DATA_TYPE_OUT, 16)
- in_a = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(DATA_TYPE_OUT, 16));
- VEC_DATA_TYPE(DATA_TYPE_OUT, 16)
- in_b = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(DATA_TYPE_OUT, 16));
-
- // Calculate and store result
- vstore16(SUB(in_a, in_b), 0, (__global DATA_TYPE_OUT *)out.ptr);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl
deleted file mode 100644
index 5cd0a4309..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers_asymm.h"
-
-#ifdef SATURATE
-#define ADD(x, y) add_sat((x), (y))
-#define SUB(x, y) sub_sat((x), (y))
-#else /* SATURATE */
-#define ADD(x, y) (x) + (y)
-#define SUB(x, y) (x) - (y)
-#endif /* SATURATE */
-
-/** Performs a pixelwise addition used to quantize down the int32 accumulator values of GEMMLowp to QASYMM8
- *
- * The following computations will be performed:
- *
- * -# Add offset terms to inputs
- -# Get scaled value of two inputs
- * -# Add inputs
- * -# Add offset terms to final result
- * -# Multiply each entry of result by result_mult_int
- * -# Shift the int32 accumulator by result_shift
- * -# Clamp the resulting int32 values to the [0..255] range and cast to QASYMM8.
- *
- * @attention The inputs and output data types need to be passed at compile time using -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
- * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=uchar
- * @attention The number of bits to shift left of input tensors must be passed at compile time using -DLEFT_SHIFT
- * @attention The offset, scalar scale factor and number of bits to shift right of input tensors must be passed at compile time using -DIN1_OFFSET, -RIN1_MULT_INT, -DIN1_SHIFT, -DIN2_OFFSET, -RIN2_MULT_INT and -DIN2_SHIFT
- * @attention The offset, scalar scale factor and number of bits to shift right of output tensor must be passed at compile time using -DRESULT_OFFSET, -RESULT_MULT_INT and -DRESULT_SHIFT
- *
- * @attention The input and output data_types need to be passed at compile time using -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
- * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=uchar
- * @attention The inputs and output scale information of qasymm8 need to be passed at compile time using -DSCALE_IN1, -DSCALE_IN2 and -DSCALE_OUT:
- * e.g. -DSCALE_IN1=1.f -DSCALE_IN2=1.f -DSCALE_OUT=2.f
- * @attention The inputs and output scale offset need to be passed at compile time using -DOFFSET_IN1, -DOFFSET_IN2 and -DOFFSET_OUT:
- * e.g. -DOFFSET_IN1=0 -DOFFSET_IN2=0 -DOFFSET_OUT=0
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention To perform saturating operation -DSATURATE has to be passed to the compiler otherwise wrapping policy will be used.
- *
- * @param[in] in1_ptr Pointer to the source tensor. Supported data types: QASYMM8
- * @param[in] in1_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] in1_step_x in1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in1_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] in1_step_y in1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] in1_step_z in1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[in] in2_ptr Pointer to the source tensor. Supported data types: QASYMM8
- * @param[in] in2_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] in2_step_x in2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in2_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] in2_step_y in2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] in2_step_z in2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[out] out_ptr Pointer to the destination tensor. Supported data types: QASYMM8
- * @param[in] out_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] out_step_x out_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] out_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] out_step_y out_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] out_step_z out_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void arithmetic_add_qasymm8(
- TENSOR3D_DECLARATION(in1),
- TENSOR3D_DECLARATION(in2),
- TENSOR3D_DECLARATION(out))
-{
- // Get pixels pointer
- Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
- Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
- Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
-
- // Load data
- VEC_DATA_TYPE(int, 16)
- in1_data = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(int, 16));
- VEC_DATA_TYPE(int, 16)
- in2_data = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(int, 16));
-
- // Get scaled value of two inputs
- VEC_DATA_TYPE(int, 16) in1_val = in1_data + (VEC_DATA_TYPE(int, 16))(IN1_OFFSET);
- VEC_DATA_TYPE(int, 16) in2_val = in2_data + (VEC_DATA_TYPE(int, 16))(IN2_OFFSET);
-
- VEC_DATA_TYPE(int, 16) left_shift = (VEC_DATA_TYPE(int, 16))1 << (VEC_DATA_TYPE(int, 16))(LEFT_SHIFT);
- VEC_DATA_TYPE(int, 16) shifted_in1_val = in1_val * left_shift;
- VEC_DATA_TYPE(int, 16) shifted_in2_val = in2_val * left_shift;
-
- VEC_DATA_TYPE(int, 16) scaled_in1_val = ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(shifted_in1_val, IN1_MULT_INT, IN1_SHIFT, 16);
- VEC_DATA_TYPE(int, 16) scaled_in2_val = ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(shifted_in2_val, IN2_MULT_INT, IN2_SHIFT, 16);
-
- // Add inputs and multiply with a multiplier smaller than 1
- VEC_DATA_TYPE(int, 16) sum_val = scaled_in1_val + scaled_in2_val;
- VEC_DATA_TYPE(int, 16) out_val = ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(sum_val, RESULT_MULT_INT, RESULT_SHIFT, 16);
- out_val += (VEC_DATA_TYPE(int, 16))(RESULT_OFFSET);
-
- VEC_DATA_TYPE(uchar, 16) res = CONVERT(out_val, VEC_DATA_TYPE(uchar, 16));
-
-// TODO: Apply min-max BOUND to support fuse with relu.
-/*
-#if defined(MIN_BOUND)
- res = max(res, (uchar16)MIN_BOUND);
-#endif // defined(MIN_BOUND)
-#if defined(MAX_BOUND)
- res = min(res, (uchar16)MAX_BOUND);
-#endif // defined(MAX_BOUND)
-*/
-
- // Store result
- VSTORE(16)(CONVERT(res, VEC_DATA_TYPE(DATA_TYPE_OUT, 16)),
- 0, (__global DATA_TYPE_OUT *)out.ptr);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/batch_to_space_nd.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/batch_to_space_nd.cl
deleted file mode 100644
index ad6a48a02..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/batch_to_space_nd.cl
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE0) && defined(BLOCK_SIZE1) && defined(BATCH_OUT)
-/** Perform batch to space rearrangement of tensor
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Output tensor batch should be given as a preprocessor argument using -DBATCH_OUT=size. e.g. -DBATCH_OUT=16
- * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE0=size. e.g. -DBLOCK_SIZE0=1
- *
- * @param[in] input_ptr Pointer to the source tensor. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p inpu
-t_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in
-bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the destination tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void batch_to_space_nd(
- TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output))
- {
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int out_index[4]={0};
- int in_index[4]={0};
-
- out_index[0] = get_global_id(0);//W
- out_index[1] = get_global_id(1);//H
- out_index[2] = get_global_id(2) % DEPTH_OUT;//C
- out_index[3] = get_global_id(2) / DEPTH_OUT;//N
-
- in_index[0] = out_index[0]/BLOCK_SIZE1;
- in_index[1] = out_index[1]/BLOCK_SIZE0;
- in_index[2] = out_index[2];
- in_index[3] = out_index[3] + ((out_index[1] % BLOCK_SIZE0) * BLOCK_SIZE0 + out_index[0] % BLOCK_SIZE1) * BATCH_OUT;
-
- *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)tensor4D_offset(&in, in_index[0], in_index[1], in_index[2], in_index[3]));
- }
-#endif // defined(DATA_TYPE) && defined(DEPTH_IN) && defined(BLOCK_SIZE0) && defined(BLOCK_SIZE1) && defined(BATCH_OUT)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl
deleted file mode 100644
index bea61f53e..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(OP_CODE) && defined(DATA_TYPE)
-/** returns truth value of the two input tensors for BINARY LOGICAL OP.
- * where BINARY LOGICAL OP can be AND, OR.
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=uchar
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention Operation type(code) specifying which operation to perform should be passed as preprocessor argument using
- * -DOP_CODE = number. e.g. -DOP_CODE=1
- *
- * @param[in] input1_ptr Pointer to the source tensor. Supported data types: QASYMM8
- * @param[in] input1_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input1_step_x input1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input1_step_y input1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[in] input2_ptr Pointer to the source tensor.Supported data types: QASYMM8
- * @param[in] input2_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input2_step_x input2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input2_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input2_step_y input2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input2_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input2_step_z input2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input2_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: QASYMM8
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- */
-__kernel void binary_logical_op(
- TENSOR3D_DECLARATION(input1),
- TENSOR3D_DECLARATION(input2),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input1 = CONVERT_TO_TENSOR3D_STRUCT(input1);
- Tensor3D input2 = CONVERT_TO_TENSOR3D_STRUCT(input2);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- #if OP_CODE == 1 // LOGICAL AND
- VSTORE(VEC_SIZE)
- (CONVERT(VLOAD(VEC_SIZE)
- (0, (__global DATA_TYPE *)input1.ptr) && VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input2.ptr),
- VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)), 0, (__global DATA_TYPE *)output.ptr);
-
- #elif OP_CODE == 2 // LOGICAL OR
- VSTORE(VEC_SIZE)
- (CONVERT(VLOAD(VEC_SIZE)
- (0, (__global DATA_TYPE *)input1.ptr) || VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input2.ptr),
- VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)), 0, (__global DATA_TYPE *)output.ptr);
-
- #else // OP NOT SUPPORTED
- return
-
- #endif
-}
-#endif //if defined(OP_CODE) && defined(DATA_TYPE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl
deleted file mode 100644
index 3d4675e5d..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef SCALE
-#define SCALE 1.0f
-#endif
-#ifndef OFFSET
-#define OFFSET 0
-#endif
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE_IN) && defined(DATA_TYPE_OUT)
-/** Perform a cast operation on an input tensor.
- *
- * @attention Data types of both input and output can be passed using the -DDATA_TYPE_IN and -DDATA_TYPE_OUT compile flag, e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT=int
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void cast(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VSTORE(VEC_SIZE)(CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input.ptr),
- VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)),
- 0, (__global DATA_TYPE_OUT *)output.ptr);
-}
-
-/** Perform a cast operation on an QASYMM8 input tensor.
- * @attention Data types of both input and output can be passed using the -DDATA_TYPE_IN and -DDATA_TYPE_OUT compile flag, e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT=int
- * @attention Offset and Scale of input should be given as a preprocessor argument using -DOFFSET=int, -DSCALE=float. e.g. -DOFFSET=1, -DSCALE=0.5
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void cast_qasymm_in(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VEC_DATA_TYPE(DATA_TYPE_IN, VEC_SIZE) in_data =
- VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input.ptr);
- VEC_DATA_TYPE(int, VEC_SIZE) offset = (VEC_DATA_TYPE(int, VEC_SIZE))(OFFSET);
- VEC_DATA_TYPE(float, VEC_SIZE) scale = (VEC_DATA_TYPE(float, VEC_SIZE))(SCALE);
-
- VEC_DATA_TYPE(int, VEC_SIZE) tmp = CONVERT(in_data, VEC_DATA_TYPE(int, VEC_SIZE)) - offset;
- VEC_DATA_TYPE(float, VEC_SIZE) out_data = CONVERT(tmp, VEC_DATA_TYPE(float, VEC_SIZE)) * scale;
-
- VSTORE(VEC_SIZE)(CONVERT(out_data, VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)),
- 0, (__global DATA_TYPE_OUT *)output.ptr);
-}
-
-
-/** Perform a cast operation on an QASYMM8 output tensor.
- * @attention Data types of both input and output can be passed using the -DDATA_TYPE_IN and -DDATA_TYPE_OUT compile flag, e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT=int
- * @attention Offset and Scale of output should be given as a preprocessor argument using -DOFFSET=int, -DSCALE=float. e.g. -DOFFSET=1, -DSCALE=0.5
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: U8
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void cast_qasymm_out(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VEC_DATA_TYPE(DATA_TYPE_IN, VEC_SIZE) in_data =
- VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input.ptr);
- VEC_DATA_TYPE(int, VEC_SIZE) offset = (VEC_DATA_TYPE(int, VEC_SIZE))(OFFSET);
- VEC_DATA_TYPE(float, VEC_SIZE) scale = (VEC_DATA_TYPE(float, VEC_SIZE))(SCALE);
-
- VEC_DATA_TYPE(float, VEC_SIZE) tmp = CONVERT(in_data, VEC_DATA_TYPE(float, VEC_SIZE)) / scale;
- VEC_DATA_TYPE(float, VEC_SIZE) out_data = tmp + CONVERT(offset, VEC_DATA_TYPE(float, VEC_SIZE));
-
- VSTORE(VEC_SIZE)(CONVERT(out_data, VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)),
- 0, (__global DATA_TYPE_OUT *)output.ptr);
-}
-#endif // defined(DATA_TYPE_IN) && defined(DATA_TYPE_OUT)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op.cl
deleted file mode 100644
index 765072556..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op.cl
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE_IN) && defined(DATA_TYPE_OUT) && defined(OP_CODE)
-/** Returns truth value of comparison operators.
- * Comparison operators may be equal, not_equal etc.
- *
- * @attention The input and output data types need to be passed at compile time using -DDATA_TYPE_IN, -DDATA_TYPE_OUT,
- * e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT = uchar
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention Operation type(code) specifying which operation to perform should be passed as preprocessor argument using
- * -DOP_CODE = number. e.g. -DOP_CODE=1
- *
- * @param[in] input1_ptr Pointer to the source tensor. Supported data types: U8/S8/U16/S16/F16/U32/S32/F32
- * @param[in] input1_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input1_step_x input1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input1_step_y input1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[in] input2_ptr Pointer to the source tensor. Supported data types: U8/S8/U16/S16/F16/U32/S32/F32
- * @param[in] input2_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input2_step_x input2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input2_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input2_step_y input2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input2_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input2_step_z input2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input2_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: QASYMM8
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void comparison_op(
- TENSOR3D_DECLARATION(input1),
- TENSOR3D_DECLARATION(input2),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input1 = CONVERT_TO_TENSOR3D_STRUCT(input1);
- Tensor3D input2 = CONVERT_TO_TENSOR3D_STRUCT(input2);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- #if OP_CODE == 1 //EQUAL
- VSTORE(VEC_SIZE)
- (CONVERT(VLOAD(VEC_SIZE)
- (0, (__global DATA_TYPE_IN *)input1.ptr) == VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input2.ptr),
- VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)),0, (__global DATA_TYPE_OUT *)output.ptr);
-
- #elif OP_CODE == 2 //NOT_EQUAL
- VSTORE(VEC_SIZE)
- (CONVERT(VLOAD(VEC_SIZE)
- (0, (__global DATA_TYPE_IN *)input1.ptr) != VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input2.ptr),
- VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)), 0, (__global DATA_TYPE_OUT *)output.ptr);
-
- #else // OP NOT SUPPORTED
- return;
-
- #endif
-}
-#endif // defined(DATA_TYPE_IN) && defined(DATA_TYPE_OUT) && defined(OP_CODE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op_quantized.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op_quantized.cl
deleted file mode 100644
index 1eb305f7b..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/comparison_op_quantized.cl
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-#define SUB(x, y) (x) - (y)
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(OFFSET_IN1) && defined(OFFSET_IN2) && defined(SCALE_IN1) && defined(SCALE_IN2) && defined(DATA_TYPE_OUT)
-
-#define VEC_FLOAT VEC_DATA_TYPE(float, VEC_SIZE)
-#define VEC_INT VEC_DATA_TYPE(int, VEC_SIZE)
-#define VEC_OUT VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)
-
-/** Returns the truth value of comparison .
- * @attention Offset and Scale of both input should be given as a preprocessor argument using -DOFFSET_IN1=int, -DOFFSET_IN2=int, -DSCALE_IN1=float and -DSCALE_IN2=float. e.g. -DOFFSET_IN1=1, -DOFFSET_IN2=0, -DSCALE_IN1=0.5, -DSCALE_IN2=0.5
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention Operation type(code) specifying which operation to perform should be passed as preprocessor argument using
- * -DOP_CODE = number. e.g. -DOP_CODE=1
- *
- * @param[in] input1_ptr Pointer to the source tensor. Supported data types: QASYMM8
- * @param[in] input1_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input1_step_x input1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input1_step_y input1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[in] input2_ptr Pointer to the source tensor. Supported data types: QASYMM8
- * @param[in] input2_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input2_step_x input2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input2_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input2_step_y input2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input2_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input2_step_z input2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input2_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: QASYMM8
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void comparison_op_qasymm8(
- TENSOR3D_DECLARATION(in1),
- TENSOR3D_DECLARATION(in2),
- TENSOR3D_DECLARATION(out))
-{
- // Get pixels pointer
- Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
- Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
- Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
-
- VEC_INT in_a = CONVERT(VLOAD(VEC_SIZE)(0, (__global uchar *)in1.ptr), VEC_INT);
- VEC_INT in_b = CONVERT(VLOAD(VEC_SIZE)(0, (__global uchar *)in2.ptr), VEC_INT);
-
- in_a = SUB(in_a, (VEC_INT)((int)OFFSET_IN1));
- in_b = SUB(in_b, (VEC_INT)((int)OFFSET_IN2));
-
- const VEC_FLOAT in1f32 = CONVERT(in_a, VEC_FLOAT) * (VEC_FLOAT)((float)SCALE_IN1);
- const VEC_FLOAT in2f32 = CONVERT(in_b, VEC_FLOAT) * (VEC_FLOAT)((float)SCALE_IN2);
-
- #if OPCODE == 1 //EQUAL QUANTIZED
- VSTORE(VEC_SIZE)(CONVERT(in1f32 == in2f32, VEC_OUT), 0, (__global DATA_TYPE_OUT *)out.ptr);
-
- #elif OPCODE == 2 //NOT EQUAL QUANTIZED
- VSTORE(VEC_SIZE)(CONVERT(in1f32 != in2f32, VEC_OUT), 0, (__global DATA_TYPE_OUT *)out.ptr);
-
- #else // OP NOT SUPPORTED
- return;
-
- #endif
-}
-#endif // defined(OFFSET_IN1) && defined(OFFSET_IN2) && defined(SCALE_IN1) && defined(SCALE_IN2) && defined(DATA_TYPE_OUT)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl
deleted file mode 100644
index fef2243e7..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE)
-/** Perform space to depth rearrangement of tensor
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Input tensor depth should be given as a preprocessor argument using -DDEPTH_IN=size. e.g. -DDEPTH_IN=16
- * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE=size. e.g. -DBLOCK_SIZE=1
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p inpu
-t_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in
-bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void depth_to_space(
- TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output))
- {
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int out_index[4]={0};
- int in_index[4]={0};
-
- out_index[0] = get_global_id(0);//W
- out_index[1] = get_global_id(1);//H
- out_index[2] = get_global_id(2) % DEPTH_OUT;//C
- out_index[3] = get_global_id(2) / DEPTH_OUT;//B
-
- in_index[0] = out_index[0]/BLOCK_SIZE;
- in_index[1] = out_index[1]/BLOCK_SIZE;
- in_index[2] = out_index[2] + ((out_index[1] % BLOCK_SIZE) * BLOCK_SIZE + out_index[0] % BLOCK_SIZE) * DEPTH_OUT;
- in_index[3] = out_index[3];
-
- *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)tensor4D_offset(&in, in_index[0], in_index[1], in_index[2],in_index[3]));
- }
-#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl
deleted file mode 100644
index 348458fe9..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
-/** Perform embedding_lookup of input tensor
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=depth. e.g. -DDEPTH_OUT=16
- * @attention Number of input dimensions are passed as a preprocessor argument using -DNUM_DIMS=size, e.g. -DNUM_DIMS=4
- *
- * @param[in] input_ptr Pointer to the source tensor. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[in] input_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] input_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- * @param[in] lookups_ptr Pointer to the lookups vector. Supported data types: S32
- * @param[in] lookups_stride_x Stride of the lookups vector in X dimension (in bytes)
- * @param[in] lookups_step_x lookups_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] lookups_offset_first_element_in_bytes The offset of the first element in the lookups vector
- */
-
-__kernel void embedding_lookup(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- VECTOR_DECLARATION(lookups))
-{
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, DEPTH_OUT);
-
- Vector lups = CONVERT_TO_VECTOR_STRUCT_NO_STEP(lookups);
-
- //lookup ids for based on the tensor dimensions
- int lup_id[4] = {0};
-
- lup_id[0] = (NUM_DIMS == 1)?*((__global int *)vector_offset(&lups,get_global_id(0)))
- :get_global_id(0);
- lup_id[1] = (NUM_DIMS == 2)?*((__global int *)vector_offset(&lups,get_global_id(1)))
- :get_global_id(1);
- lup_id[2] = (NUM_DIMS == 3)?*((__global int *)vector_offset(&lups,get_global_id(2)))
- :get_global_id(2)%DEPTH_OUT;
- lup_id[3] = (NUM_DIMS == 4)?*((__global int *)vector_offset(&lups, get_global_id(2) / DEPTH_OUT))
- :get_global_id(2) / DEPTH_OUT;
-
- in.ptr += input_offset_first_element_in_bytes + lup_id[0] * input_step_x + lup_id[1] * input_step_y
- + lup_id[2] * input_step_z + lup_id[3] * input_step_w;
-
- VSTORE(VEC_SIZE)(CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)in.ptr), VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)),
- 0, (__global DATA_TYPE *)out.ptr);
-}
-#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/exp.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/exp.cl
deleted file mode 100644
index 69d94f30a..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/exp.cl
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE)
-/** Perform an exponential operation on an input tensor.
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @note Can only take floating point data types.
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void exp_layer(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VSTORE(VEC_SIZE)
- (exp(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr)), 0, (__global DATA_TYPE *)output.ptr);
-}
-#endif // defined(DATA_TYPE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/gather.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/gather.cl
deleted file mode 100644
index 6b767d6c9..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/gather.cl
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-/** Perform gather
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- *
- * @param[in] input1_ptr Pointer to the first source tensor. Supported data types: U8/S32/F32
- * @param[in] input1_stride_x Stride of the first source tensor in X dimension (in bytes)
- * @param[in] input1_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the first source tensor in Y dimension (in bytes)
- * @param[in] input1_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the first source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the first source tensor
- * @param[in] input2_ptr Pointer to the first source tensor. Supported data types: U32
- * @param[in] input2_stride_x Stride of the first source tensor in X dimension (in bytes)
- * @param[in] input2_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input2_offset_first_element_in_bytes The offset of the first element in the first source tensor
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void gather(IMAGE_DECLARATION(input1),
- VECTOR_DECLARATION(input2),
- IMAGE_DECLARATION(output))
-{
- Image in1 = CONVERT_TO_IMAGE_STRUCT_NO_STEP(input1);
- Vector in2 = CONVERT_TO_VECTOR_STRUCT(input2);
- Image out = CONVERT_TO_IMAGE_STRUCT_NO_STEP(output);
-
- VEC_DATA_TYPE(DATA_TYPE_IN2, 2)
- in2_data = CONVERT(vload2(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(DATA_TYPE_IN2, 2));
-
- //TODO: performance tuning for memcopy
- int index = in2_data.s0;
- int stride=input1_stride_y/input1_stride_x;
-
- for(int i=0; i<stride; i++){
- *((__global DATA_TYPE_OUT *)offset(&out, i,get_global_id(0)))=*((__global DATA_TYPE_IN1 *)offset(&in1, i,index));
- }
-}
-
-__kernel void gather_1d_out(IMAGE_DECLARATION(input1),
- VECTOR_DECLARATION(input2),
- VECTOR_DECLARATION(output))
-{
- Image in1 = CONVERT_TO_IMAGE_STRUCT_NO_STEP(input1);
- Vector in2 = CONVERT_TO_VECTOR_STRUCT(input2);
- Vector out = CONVERT_TO_VECTOR_STRUCT_NO_STEP(output);
-
- VEC_DATA_TYPE(DATA_TYPE_IN2, 2)
- in2_data = CONVERT(vload2(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(DATA_TYPE_IN2, 2));
-
- //TODO: performance tuning for memcopy
- int index = in2_data.s0;
- int stride=input1_stride_y/input1_stride_x;
-
- for(int i=0; i<stride; i++){
- *((__global DATA_TYPE_OUT *)vector_offset(&out, i+get_global_id(0)))=*((__global DATA_TYPE_IN1 *)offset(&in1, i, index));
- }
-}
-
-__kernel void gather_1d(VECTOR_DECLARATION(input1),
- VECTOR_DECLARATION(input2),
- VECTOR_DECLARATION(output))
-{
- Vector in1 = CONVERT_TO_VECTOR_STRUCT_NO_STEP(input1);
- Vector in2 = CONVERT_TO_VECTOR_STRUCT(input2);
- Vector out = CONVERT_TO_VECTOR_STRUCT_NO_STEP(output);
-
- VEC_DATA_TYPE(DATA_TYPE_IN2, 2)
- in2_data = CONVERT(vload2(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(DATA_TYPE_IN2, 2));
-
- //TODO: performance tuning for memcopy
- int index = in2_data.s0;
- *((__global DATA_TYPE_OUT *)vector_offset(&out, get_global_id(0)))=*((__global DATA_TYPE_IN1 *)vector_offset(&in1, index));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl
deleted file mode 100644
index ed7409852..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
-/** Perform hashtable_lookup of input tensor
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=depth. e.g. -DDEPTH_OUT=16
- * @attention Number of input dimensions are passed as a preprocessor argument using -DNUM_DIMS=size, e.g. -DNUM_DIMS=4
- *
- * @param[in] input_ptr Pointer to the source tensor. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[in] input_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] input_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- * @param[in] lookups_ptr Pointer to the lookups vector. Supported data types: S32
- * @param[in] lookups_stride_x Stride of the lookups vector in X dimension (in bytes)
- * @param[in] lookups_step_x lookups_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] lookups_offset_first_element_in_bytes The offset of the first element in the lookups vector
- */
-__kernel void hashtable_lookup(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- VECTOR_DECLARATION(lookups))
-{
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, DEPTH_OUT);
-
- Vector lups = CONVERT_TO_VECTOR_STRUCT_NO_STEP(lookups);
-
- int lup_id[4] = {0};
-
- lup_id[0] = (NUM_DIMS == 1)?*((__global int *)vector_offset(&lups,get_global_id(0)))
- :get_global_id(0);
- lup_id[1] = (NUM_DIMS == 2)?*((__global int *)vector_offset(&lups,get_global_id(1)))
- :get_global_id(1);
- lup_id[2] = (NUM_DIMS == 3)?*((__global int *)vector_offset(&lups,get_global_id(2)))
- :get_global_id(2)%DEPTH_OUT;
- lup_id[3] = (NUM_DIMS == 4)?*((__global int *)vector_offset(&lups, get_global_id(2) / DEPTH_OUT))
- :get_global_id(2) / DEPTH_OUT;
-
- if (lup_id[NUM_DIMS-1] < 0)
- {
- VSTORE(VEC_SIZE)((VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE))0, 0, (__global DATA_TYPE *)out.ptr);
- return;
- }
-
- in.ptr += input_offset_first_element_in_bytes + lup_id[0] * input_step_x + lup_id[1] * input_step_y
- + lup_id[2] * input_step_z + lup_id[3] * input_step_w;
-
- VSTORE(VEC_SIZE)(CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)in.ptr), VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)),
- 0, (__global DATA_TYPE *)out.ptr);
-}
-#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl
deleted file mode 100644
index e3aa463db..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE)
-/** Performs a negation of input tensor.
- *
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- *
- * @param[in] in_ptr Pointer to the source image. Supported data types: S16/S32/F16/F32.
- * @param[in] in_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in_step_x in_stride_x * number of elements along X processed per work item (in bytes)
- * @param[in] in_offset_first_element_in_bytes Offset of the first element in the source image
- * @param[out] out_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] out_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] out_step_x out_stride_x * number of elements along X processed per work item (in bytes)
- * @param[in] out_offset_first_element_in_bytes Offset of the first element in the destination image
- */
-__kernel void neg_tensor(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VSTORE(VEC_SIZE)
- (-VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr), 0, (__global DATA_TYPE *)output.ptr);
-}
-#endif // defined(DATA_TYPE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/pad.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/pad.cl
deleted file mode 100644
index ecf4696e9..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/pad.cl
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(IW) && defined(IH) && defined(ID) && defined(IB) && defined(DEPTH_OUT) && defined(ZERO_VALUE)
-/** Perform space to depth rearrangement of tensor
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size. e.g. -DDEPTH_OUT=16
- * @attention Input dimensions should be passed as a preprocessor argument using -DIW(width), -DIH(height), -DID(depth) and -DIB(batch). e.g. -DIW = 4
- * @attention The value to be set by pad value using -DZERO_VALUE=value. e.g. -DZERO_VALUE=0
- *
- * @param[in] input_ptr Pointer to the source tensor. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source tensor
- *
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p inpu
-t_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in
-bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the destination tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- *
- * @param[in] pad_values Padding values for each of the dimensions. Only pad values for Up(for
- * batch), Top(for height), Left(for width) and Front(for depth) are
- * required. Supported data type: S32
- */
-
-__kernel void pad(
- TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- const int4 pad_values)
- {
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int index[4]={0};
-
- index[0] = get_global_id(0);//W
- index[1] = get_global_id(1);//H
- index[2] = get_global_id(2) % DEPTH_OUT;//C
- index[3] = get_global_id(2) / DEPTH_OUT;//N
-
- if (index[0] < pad_values.x || index[0] >= (IW + pad_values.x) ||
- index[1] < pad_values.y || index[1] >= (IH + pad_values.y) ||
- index[2] < pad_values.z || index[2] >= (ID + pad_values.z) ||
- index[3] < pad_values.w || index[3] >= (IB + pad_values.w))
- {
- *((__global DATA_TYPE *)out.ptr) = (DATA_TYPE)ZERO_VALUE;
- }
- else
- {
- *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)
- tensor4D_offset(&in, index[0] - pad_values.x,
- index[1] - pad_values.y,
- index[2] - pad_values.z,
- index[3] - pad_values.w));
- }
- }
-
-#endif //if defined(IW) && defined(IH) && defined(ID) && defined(IB) && defined(DEPTH_OUT) && defined(ZERO_VALUE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/permute_ex.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/permute_ex.cl
deleted file mode 100644
index 7cc8b0354..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/permute_ex.cl
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_IN) && defined(P1) && defined(P2) && defined(P3) && defined(P4)
-/** Perform a Generic permute operation on an input tensor of Shape DCHW.
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Input tensor depth should be given as a preprocessor argument using -DDEPTH_IN=size. e.g. -DDEPTH_IN=16
- * @attention Permutation vector is passed as a preprocessor arguement using -DP1, -DP2, -DP3 and -DP4=int, e.g. -DP1=2
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/QASYMM8/U1
-6/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in b
-ytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in b
-ytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in b
-ytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p inpu
-t_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in
-bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void permute_generic(
- TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output))
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, DEPTH_IN);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(output, 0);
-
- int out_index[4];
- int in_index[4];
- in_index[0] = get_global_id(0);//W
- in_index[1] = get_global_id(1);//H
- in_index[2] = get_global_id(2) % DEPTH_IN;//C
- in_index[3] = get_global_id(2) / DEPTH_IN;//B
- out_index[0] = in_index[P1];
- out_index[1] = in_index[P2];
- out_index[2] = in_index[P3];
- out_index[3] = in_index[P4];
-
- *((__global DATA_TYPE *)tensor4D_offset(&out, out_index[0],out_index[1],out_index[2],out_index[3])) = *((__global DATA_TYPE *)in.ptr);
-}
-#endif // defined(DATA_TYPE) && defined(DEPTH_IN) && defined(P1) && defined(P2) && defined(P3) && defined(P4)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_float.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_float.cl
deleted file mode 100644
index aa05121b1..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_float.cl
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifdef SATURATE
-#define CONVERT_OP_FLOAT_STR(x, type, round) (convert_##type##_sat##round(x))
-#else /* SATURATE */
-#define CONVERT_OP_FLOAT_STR(x, type, round) (convert_##type##round(x))
-#endif /* SATURATE */
-#define CONVERT_OP_FLOAT(x, type, round) CONVERT_OP_FLOAT_STR(x, type, round)
-
-/** Performs a pixelwise division with float scale of either integer or float inputs.
- *
- * @attention The inputs and output data types need to be passed at compile time using -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
- * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=ushort -DDATA_TYPE_OUT=short
- * @attention The data type of the intermediate result of the division should passed as well using -DDATA_TYPE_RES.
- * e.g. If one of inputs is S16 -DDATA_TYPE_RES=int should be passed else -DDATA_TYPE_RES=short.
- * @attention -DDATA_TYPE_FLOAT must be passed if floating point inputs are provided.
- *
- * @param[in] in1_ptr Pointer to the source image. Supported data types: U8, S16, F16, F32
- * @param[in] in1_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in1_step_x in1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in1_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] in1_step_y in1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_stride_z Stride of the source image in Y dimension (in bytes)
- * @param[in] in1_step_z in1_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[in] in2_ptr Pointer to the source image. Supported data types: U8, S16, F16, F32
- * @param[in] in2_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in2_step_x in2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in2_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] in2_step_y in2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_stride_z Stride of the source image in Y dimension (in bytes)
- * @param[in] in2_step_z in2_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] out_ptr Pointer to the destination image. Supported data types: U8, S16, F16, F32
- * @param[in] out_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] out_step_x out_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] out_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] out_step_y out_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_stride_z Stride of the destination image in Y dimension (in bytes)
- * @param[in] out_step_z out_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] scale Float scaling factor. Supported data types: F32
- */
-__kernel void pixelwise_div_float(
- TENSOR3D_DECLARATION(in1),
- TENSOR3D_DECLARATION(in2),
- TENSOR3D_DECLARATION(out),
- const float scale)
-{
- // Get pixels pointer
- Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
- Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
- Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
-
- // Load data
- VEC_DATA_TYPE(DATA_TYPE_RES, 16)
- in1_data = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(DATA_TYPE_RES, 16));
- VEC_DATA_TYPE(DATA_TYPE_RES, 16)
- in2_data = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(DATA_TYPE_RES, 16));
-
- // Perform division
-#ifdef DATA_TYPE_FLOAT
- VEC_DATA_TYPE(DATA_TYPE_OUT, 16)
- res = CONVERT(in1_data / in2_data * (DATA_TYPE_RES)scale, VEC_DATA_TYPE(DATA_TYPE_OUT, 16));
-#else /* DATA_TYPE_FLOAT */
- VEC_DATA_TYPE(DATA_TYPE_OUT, 16)
- res = CONVERT_OP_FLOAT(CONVERT_OP_FLOAT((convert_float16(in1_data / in2_data) * scale), VEC_DATA_TYPE(DATA_TYPE_RES, 16), ROUND), VEC_DATA_TYPE(DATA_TYPE_OUT, 16), ROUND);
-#endif /* DATA_TYPE_FLOAT */
-
- // Store result
- vstore16(res, 0, (__global DATA_TYPE_OUT *)out.ptr);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_int.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_int.cl
deleted file mode 100644
index fdfb78003..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_div_int.cl
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(SATURATE)
-#define CONVERT_OP_INT_STR(x, type, size) (convert_##type##size##_sat(x))
-#else // SATURATE
-#define CONVERT_OP_INT_STR(x, type, size) (convert_##type##size(x))
-#endif // SATURATE
-#define CONVERT_OP_INT(x, type, size) CONVERT_OP_INT_STR(x, type, size)
-
-#define DIV_OP(x, y, scale, type, size) CONVERT_OP_INT((x) / (y) >> scale, type, size)
-
-/** Performs a pixelwise division with integer scale of integer inputs.
- *
- * @attention The inputs and output data types need to be passed at compile time using -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
- * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=ushort -DDATA_TYPE_OUT=short
- * @attention The data_type of the intermediate result of the division should passed as well using -DDATA_TYPE_RES.
- * e.g. If one of inputs is S16 -DDATA_TYPE_RES=int should be passed else -DDATA_TYPE_RES=short.
- *
- * @param[in] in1_ptr Pointer to the source image. Supported data types: U8/S16
- * @param[in] in1_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in1_step_x in1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in1_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] in1_step_y in1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_stride_z Stride of the source image in Y dimension (in bytes)
- * @param[in] in1_step_z in1_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[in] in2_ptr Pointer to the source image. Supported data types: same as @p in1_ptr
- * @param[in] in2_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in2_step_x in2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in2_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] in2_step_y in2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_stride_z Stride of the source image in Y dimension (in bytes)
- * @param[in] in2_step_z in2_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] out_ptr Pointer to the destination image. Supported data types: same as @p in1_ptr
- * @param[in] out_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] out_step_x out_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] out_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] out_step_y out_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_stride_z Stride of the destination image in Y dimension (in bytes)
- * @param[in] out_step_z out_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] scale Integer scaling factor. Supported data types: S32
- */
-__kernel void pixelwise_div_int(
- TENSOR3D_DECLARATION(in1),
- TENSOR3D_DECLARATION(in2),
- TENSOR3D_DECLARATION(out),
- const uint scale)
-{
- // Get pixels pointer
- Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
- Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
- Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
-
- // Load data
- VEC_DATA_TYPE(DATA_TYPE_RES, 16)
- in1_data = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(DATA_TYPE_RES, 16));
- VEC_DATA_TYPE(DATA_TYPE_RES, 16)
- in2_data = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(DATA_TYPE_RES, 16));
-
- // Perform division and store result
- vstore16(DIV_OP(in1_data, in2_data, scale, DATA_TYPE_OUT, 16), 0, (__global DATA_TYPE_OUT *)out.ptr);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl
deleted file mode 100644
index ab1307e64..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers_asymm.h"
-
-#ifdef SATURATE
-#define CONVERT_OP_FLOAT_STR(x, type, round) (convert_##type##_sat##round(x))
-#else /* SATURATE */
-#define CONVERT_OP_FLOAT_STR(x, type, round) (convert_##type##round(x))
-#endif /* SATURATE */
-#define CONVERT_OP_FLOAT(x, type, round) CONVERT_OP_FLOAT_STR(x, type, round)
-
-#if defined(RESULT_OFFSET) && defined(RESULT_MULT_INT) && defined(RESULT_SHIFT)
-/** Performs a pixelwise multiplication used to quantize down the int32 accumulator values of GEMMLowp to QASYMM8
- *
- * The following computations will be performed by the kernel:
- *
- * -# Add offset terms to inputs
- * -# Multiply inputs
- * -# Add offset terms to final result
- * -# Multiply each entry of result by result_mult_int
- * -# Shift the int32 accumulator by result_shift
- * -# Clamp the resulting int32 values to the [0..255] range and cast to QASYMM8.
- *
- * @attention The inputs and output data types need to be passed at compile time using -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
- * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=uchar
- * @attention The offset factor of inputs must be passed at compile time using -DIN1_OFFSET and -DIN2_OFFSET
- * @attention The offset, scalar scale factor and number of bits to shift right of output tensor must be passed at compile time using -DRESULT_OFFSET, -RESULT_MULT_INT and -DRESULT_SHIFT
- *
- * @param[in] in1_ptr Pointer to the source image. Supported data types: U8
- * @param[in] in1_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in1_step_x in1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in1_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] in1_step_y in1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_stride_z Stride of the source image in Y dimension (in bytes)
- * @param[in] in1_step_z in1_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[in] in2_ptr Pointer to the source image. Supported data types: U8
- * @param[in] in2_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] in2_step_x in2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] in2_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] in2_step_y in2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_stride_z Stride of the source image in Y dimension (in bytes)
- * @param[in] in2_step_z in2_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] out_ptr Pointer to the destination image. Supported data types: U8
- * @param[in] out_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] out_step_x out_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] out_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] out_step_y out_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_stride_z Stride of the destination image in Y dimension (in bytes)
- * @param[in] out_step_z out_stride_z * number of elements along Y processed per workitem(in bytes)
- * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] scale Float scaling factor. Supported data types: F32
- */
-__kernel void pixelwise_mul_qasymm8(
- TENSOR3D_DECLARATION(in1),
- TENSOR3D_DECLARATION(in2),
- TENSOR3D_DECLARATION(out),
- const float scale)
-{
- // Get pixels pointer
- Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
- Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
- Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
-
- // Load data
- VEC_DATA_TYPE(int, 16)
- in1_data = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(int, 16));
- VEC_DATA_TYPE(int, 16)
- in2_data = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(int, 16));
-
- // Perform multiplication of two inputs
- VEC_DATA_TYPE(int, 16) in1_val = in1_data + (VEC_DATA_TYPE(int, 16))(IN1_OFFSET);
- VEC_DATA_TYPE(int, 16) in2_val = in2_data + (VEC_DATA_TYPE(int, 16))(IN2_OFFSET);
- VEC_DATA_TYPE(int, 16) out_val = in1_val * in2_val;
-
- // Multiply with a multiplier smaller than 1
- out_val = ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(out_val, RESULT_MULT_INT, RESULT_SHIFT, 16);
- out_val += (VEC_DATA_TYPE(int, 16))(RESULT_OFFSET);
-
- VEC_DATA_TYPE(uchar, 16) res = CONVERT(out_val, VEC_DATA_TYPE(uchar, 16));
-
-// TODO: Apply min-max BOUND to support fuse with relu.
-/*
-#if defined(MIN_BOUND)
- res = max(res, (uchar16)MIN_BOUND);
-#endif // defined(MIN_BOUND)
-#if defined(MAX_BOUND)
- res = min(res, (uchar16)MAX_BOUND);
-#endif // defined(MAX_BOUND)
-*/
-
- // Store result
- VSTORE(16)(CONVERT(res, VEC_DATA_TYPE(DATA_TYPE_OUT, 16)),
- 0, (__global DATA_TYPE_OUT *)out.ptr);
-}
-#endif // defined(RESULT_OFFSET) && defined(RESULT_MULT_INT) && defined(RESULT_SHIFT)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl
deleted file mode 100644
index 68da2ba32..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE)
-/** Returns result of prelu function implemented as below:
- * f(input) = alpha * input for input < 0, f(input) = input for input >= 0.
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @note Can only take floating point data types.
- *
- * @param[in] input1_ptr Pointer to the source image. Supported Data types : F16/F32
- * @param[in] input1_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input1_step_x input1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input1_step_y input1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source image
- *
- * @param[in] alpha_ptr Pointer to the source image. Supported Data types : F16/F32
- * @param[in] alpha_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] alpha_step_x input2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] alpha_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] alpha_step_y input2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] alpha_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] alpha_step_z input2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] alpha_offset_first_element_in_bytes The offset of the first element in the source image
- *
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void prelu(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(alpha),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D alpha = CONVERT_TO_TENSOR3D_STRUCT(alpha);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VSTORE(VEC_SIZE)
- (VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr) < 0 ?
- VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr) * VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)alpha.ptr) :
- VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr),
- 0, (__global DATA_TYPE *)output.ptr);
-
-}
-#endif // defined(DATA_TYPE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl
deleted file mode 100644
index 7e97b7ed6..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-#define SUB(x, y) (x) - (y)
-
-#if defined(OFF_IN1) && defined(OFF_IN2) && defined(OFF_OUT) && defined(SCALE_IN1) && defined(SCALE_IN2) && defined(SCALE_OUT) && defined(VEC_SIZE)
-
-#define VEC_FLOAT VEC_DATA_TYPE(float, VEC_SIZE)
-#define VEC_INT VEC_DATA_TYPE(int, VEC_SIZE)
-#define VEC_UCHAR VEC_DATA_TYPE(uchar, VEC_SIZE)
-#define CONVERT_RTE(x, type) (convert_##type##_rte((x)))
-#define CONVERT_DOWN(x, type) CONVERT_RTE(x, type)
-
-/** Returns result of prelu function implemented as below:
- * f(input) = alpha * input for input < 0, f(input) = input for input >= 0.
- *
- * @attention Data type can be passed using the -DDATA_TYPE_IN compile flag, e.g. -DDATA_TYPE_IN=uchar
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @note Can only take uchar data types.
- *
- * @param[in] input1_ptr Pointer to the source image. Supported Data types : QASYMM8
- * @param[in] input1_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input1_step_x input1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input1_step_y input1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source image
- *
- * @param[in] alpha_ptr Pointer to the source image. Supported Data types : QASYMM8
- * @param[in] alpha_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] alpha_step_x input2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] alpha_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] alpha_step_y input2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] alpha_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] alpha_step_z input2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] alpha_offset_first_element_in_bytes The offset of the first element in the source image
- *
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void prelu_qasymm8(
- TENSOR3D_DECLARATION(input),
- TENSOR3D_DECLARATION(alpha),
- TENSOR3D_DECLARATION(output))
-{
- // Get pixels pointer
- Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
- Tensor3D alpha = CONVERT_TO_TENSOR3D_STRUCT(alpha);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VEC_INT in_a = CONVERT(VLOAD(VEC_SIZE)(0, (__global uchar *)input.ptr), VEC_INT);
- VEC_INT in_b = CONVERT(VLOAD(VEC_SIZE)(0, (__global uchar *)alpha.ptr), VEC_INT);
-
- in_a = SUB(in_a, (VEC_INT)((int)OFF_IN1));
- in_b = SUB(in_b, (VEC_INT)((int)OFF_IN2));
-
- const VEC_FLOAT in1f32 = CONVERT(in_a, VEC_FLOAT) * (VEC_FLOAT)((float)SCALE_IN1);
- const VEC_FLOAT in2f32 = CONVERT(in_b, VEC_FLOAT) * (VEC_FLOAT)((float)SCALE_IN2);
- const VEC_FLOAT outf32 = in1f32 < 0 ? in1f32 * in2f32 : in1f32;
- const VEC_FLOAT qresf32 = outf32 / ((VEC_FLOAT)(float)SCALE_OUT) + ((VEC_FLOAT)((float)OFF_OUT));
- const VEC_UCHAR res = CONVERT_SAT(CONVERT_DOWN(qresf32, VEC_INT), VEC_UCHAR);
-
- VSTORE(VEC_SIZE)
- (res, 0, (__global uchar *)output.ptr);
-}
-
-#endif // defined(OFF_IN1) && defined(OFF_IN2) && defined(OFF_OUT) && defined(SCALE_IN1) && defined(SCALE_IN2) && defined(SCALE_OUT) && defined(VEC_SIZE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl
deleted file mode 100644
index 8bef49363..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
-/** Perform reduce max/min
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size. e.g. -DDEPTH_OUT=16
- * @attention Operation type(code) specifying which operation to perform should be passed as preprocessor argument using
- * -DOP_CODE = number. e.g. -DOP_CODE=1
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[in] input_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] input_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] axis Axis through which reduction occurs
- * @param[in] dim Dimension across the axis to be reduced.
- */
-__kernel void reduce_min_max(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- const int axis,
- const int dim)
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int indices[4] =
- {
- get_global_id(0),
- get_global_id(1),
- get_global_id(2) % DEPTH_OUT,
- get_global_id(2) / DEPTH_OUT,
- };
-
- DATA_TYPE value = *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3]));
- for(int i = 1; i < dim; ++i)
- {
- indices[axis] = i;
-
- #if OP_CODE == 1 // REDUCE_MAX
- value = max(value, *((__global DATA_TYPE *)
- tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3])));
-
- #elif OP_CODE == 2 // REDUCE_MIN
- value = min(value, *((__global DATA_TYPE *)
- tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3])));
-
- #else // OP NOT SUPPORTED
- return;
-
- #endif
- }
-
- *((__global DATA_TYPE *)out.ptr) = value;
-}
-
-/** Perform reduce sum/mean
- *
- * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g. -DDATA_TYPE=short
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size. e.g. -DDEPTH_OUT=16
- * @attention Operation type(code) specifying which operation to perform should be passed as preprocessor argument using
- * -DOP_CODE = number. e.g. -DOP_CODE=1
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[in] input_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] input_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] axis Axis through which reduction occurs
- * @param[in] dim Dimension across the axis to be reduced.
- */
-__kernel void reduce_sum_mean(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- const int axis,
- const int dim)
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int indices[4] =
- {
- get_global_id(0),
- get_global_id(1),
- get_global_id(2) % DEPTH_OUT,
- get_global_id(2) / DEPTH_OUT,
- };
-
- DATA_TYPE sum_value = (DATA_TYPE)0;
- for(int i = 0; i < dim; ++i)
- {
- indices[axis] = i;
- sum_value += *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3]));
- }
-
- #if OP_CODE == 3 // REDUCE_SUM
- *((__global DATA_TYPE *)out.ptr) = sum_value;
-
- #elif OP_CODE == 4 // REDUCE_MEAN
- *((__global DATA_TYPE *)out.ptr) = sum_value / CONVERT(dim, DATA_TYPE);
-
- #else // OP NOT SUPPORTED
- return;
-
- #endif
-}
-#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl
deleted file mode 100644
index a0fc2d5a9..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) && defined(WIDTH_IN) && defined(ZERO_VALUE)
-/** Perform space to batch with input of 4D and NCHW format
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size. e.g. -DDEPTH_OUT=16
- * @attention Input tensor batch should be given as a preprocessor argument using -DBATCH_IN=size. e.g. -DBATCH_IN=16
- * @attention Input tensor height should be given as a preprocessor argument using -DHEIGHT_IN=size. e.g. -DHEIGHT_IN=16
- * @attention Input tensor width should be given as a preprocessor argument using -DHEIGHT_IN=size. e.g. -DWIDTH_IN=16
- * @attention The value to be set by pad value using -DZERO_VALUE=value. e.g. -DZERO_VALUE=0
- *
- * @param[in] input_ptr Pointer to the source tensor. Supported data types: U8/S8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_stride_w Stride of the destination tensor in W dimension (in bytes)
- * @param[in] input_step_w input_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the destination tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- * @param[in] block_size_ptr Pointer to the source tensor. Supported data types: S32
- * @param[in] block_size_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] block_size_step_x block_size_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] block_size_offset_first_element_in_bytes The offset of the first element in the destination tensor
- * @param[in] padding_size_ptr Pointer to the source tensor. Supported data types: S32
- * @param[in] padding_size_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] padding_size_step_x padding_size_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] padding_size_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] padding_size_step_y padding_size_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] padding_size_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void space_to_batch_4d_nchw(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- VECTOR_DECLARATION(block_size),
- IMAGE_DECLARATION(padding_size))
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int block_size_x = *((__global int *)(block_size_ptr));
- int block_size_y = *((__global int *)(block_size_ptr + block_size_stride_x));
- int shift_x = (get_global_id(2) / DEPTH_OUT / BATCH_IN) % block_size_x;
- int shift_y = (get_global_id(2) / DEPTH_OUT / BATCH_IN) / block_size_x;
-
- int in_index[4] = {0, };
- in_index[0] = get_global_id(0) * block_size_x + shift_x - *((__global int *)(padding_size_ptr));
- in_index[1] = get_global_id(1) * block_size_y + shift_y - *((__global int *)(padding_size_ptr + padding_size_stride_y));
- in_index[2] = get_global_id(2) % DEPTH_OUT;
- in_index[3] = (get_global_id(2) / DEPTH_OUT) % BATCH_IN;
-
- if (in_index[0] < 0 || in_index[0] >= WIDTH_IN || in_index[1] < 0 || in_index[1] >= HEIGHT_IN)
- {
- *((__global DATA_TYPE *)out.ptr) = (DATA_TYPE)ZERO_VALUE;
- }
- else
- {
- *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)tensor4D_offset(&in, in_index[0], in_index[1], in_index[2], in_index[3]));
- }
-}
-#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) && defined(WIDTH_IN) && defined(ZERO_VALUE)
-
-#if defined(DATA_TYPE) && defined(HEIGHT_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) && defined(WIDTH_IN) && defined(ZERO_VALUE) && defined(VEC_SIZE)
-/** Perform space to batch with input of 4D and NHWC format
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Output tensor depth should be given as a preprocessor argument using -DHEIGHT_OUT=size. e.g. -DHEIGHT_OUT=16
- * @attention Input tensor batch should be given as a preprocessor argument using -DBATCH_IN=size. e.g. -DBATCH_IN=16
- * @attention Input tensor height should be given as a preprocessor argument using -DHEIGHT_IN=size. e.g. -DHEIGHT_IN=16
- * @attention Input tensor width should be given as a preprocessor argument using -DHEIGHT_IN=size. e.g. -DWIDTH_IN=16
- * @attention The value to be set by pad value using -DZERO_VALUE=value. e.g. -DZERO_VALUE=0
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- *
- * @param[in] input_ptr Pointer to the source tensor. Supported data types: U8/S8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_stride_w Stride of the destination tensor in W dimension (in bytes)
- * @param[in] input_step_w input_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source tensor
- * @param[out] output_ptr Pointer to the destination tensor. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination tensor in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination tensor in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the destination tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the destination tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination tensor
- * @param[in] block_size_ptr Pointer to the source tensor. Supported data types: S32
- * @param[in] block_size_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] block_size_step_x block_size_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] block_size_offset_first_element_in_bytes The offset of the first element in the destination tensor
- * @param[in] padding_size_ptr Pointer to the source tensor. Supported data types: S32
- * @param[in] padding_size_stride_x Stride of the source tensor in X dimension (in bytes)
- * @param[in] padding_size_step_x padding_size_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] padding_size_stride_y Stride of the source tensor in Y dimension (in bytes)
- * @param[in] padding_size_step_y padding_size_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] padding_size_offset_first_element_in_bytes The offset of the first element in the destination tensor
- */
-__kernel void space_to_batch_4d_nhwc(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- VECTOR_DECLARATION(block_size),
- IMAGE_DECLARATION(padding_size))
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, HEIGHT_OUT);
-
- int block_size_x = *((__global int *)(block_size_ptr));
- int block_size_y = *((__global int *)(block_size_ptr + block_size_stride_x));
- int shift_x = (get_global_id(2) / HEIGHT_OUT / BATCH_IN) % block_size_x;
- int shift_y = (get_global_id(2) / HEIGHT_OUT / BATCH_IN) / block_size_x;
-
- int in_index[4] = {0, };
- in_index[0] = get_global_id(0) * VEC_SIZE;
- in_index[1] = get_global_id(1) * block_size_x + shift_x - *((__global int *)(padding_size_ptr));
- in_index[2] = get_global_id(2) % HEIGHT_OUT * block_size_y + shift_y - *((__global int *)(padding_size_ptr + padding_size_stride_y));
- in_index[3] = (get_global_id(2) / HEIGHT_OUT) % BATCH_IN;
-
- if (in_index[1] < 0 || in_index[1] >= WIDTH_IN || in_index[2] < 0 || in_index[2] >= HEIGHT_IN)
- {
- VSTORE(VEC_SIZE)((VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE))ZERO_VALUE, 0, (__global DATA_TYPE *)out.ptr);
- }
- else
- {
- VSTORE(VEC_SIZE)(CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)tensor4D_offset(&in, in_index[0], in_index[1], in_index[2], in_index[3])),
- VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)),
- 0, (__global DATA_TYPE *)out.ptr);
- }
-}
-
-#endif // defined(DATA_TYPE) && defined(HEIGHT_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) && defined(WIDTH_IN) && defined(ZERO_VALUE) && defined(VEC_SIZE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl
deleted file mode 100644
index f6977045a..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016, 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(DATA_TYPE) && defined(DEPTH_IN) && defined(BLOCK_SIZE)
-/** Perform space to depth rearrangement of tensor
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Input tensor depth should be given as a preprocessor argument using -DDEPTH_IN=size. e.g. -DDEPTH_IN=16
- * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE=size. e.g. -DBLOCK_SIZE=1
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p inpu
-t_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in
-bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void space_to_depth(
- TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output))
- {
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, DEPTH_IN);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(output, 0);
-
- int out_index[4]={0};
- int in_index[4]={0};
-
- in_index[0] = get_global_id(0);//W
- in_index[1] = get_global_id(1);//H
- in_index[2] = get_global_id(2) % DEPTH_IN;//C
- in_index[3] = get_global_id(2) / DEPTH_IN;//B
-
- out_index[0] = in_index[0]/BLOCK_SIZE;
- out_index[1] = in_index[1]/BLOCK_SIZE;
- out_index[2] = in_index[2] + ((in_index[1] % BLOCK_SIZE) * BLOCK_SIZE + in_index[0] % BLOCK_SIZE) * DEPTH_IN;
- out_index[3] = in_index[3];
-
- *((__global DATA_TYPE *)tensor4D_offset(&out, out_index[0],out_index[1],out_index[2],out_index[3])) = *((__global DATA_TYPE *)in.ptr);
- }
-#endif // defined(DATA_TYPE) && defined(DEPTH_IN) && defined(BLOCK_SIZE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/squared_difference.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/squared_difference.cl
deleted file mode 100644
index 3e1a5c97f..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/squared_difference.cl
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#ifndef VEC_SIZE
-#define VEC_SIZE 1
-#endif
-
-#if defined(DATA_TYPE)
-/** Returns true value of squared_difference of two tensors.
- *
- * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
- * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g. -DVEC_SIZE=16
- * @note Can only take floating point data types.
- *
- * @param[in] input1_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input1_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input1_step_x input1_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input1_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input1_step_y input1_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input1_step_z input1_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source image
- *
- * @param[in] input2_ptr Pointer to the source image. Supported data types: F16/F32
- * @param[in] input2_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input2_step_x input2_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input2_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input2_step_y input2_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input2_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input2_step_z input2_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input2_offset_first_element_in_bytes The offset of the first element in the source image
- *
- * @param[out] output_ptr Pointer to the destination image. Supported data types: F16/F32
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- */
-__kernel void squared_difference(
- TENSOR3D_DECLARATION(input1),
- TENSOR3D_DECLARATION(input2),
- TENSOR3D_DECLARATION(output))
-{
- Tensor3D input1 = CONVERT_TO_TENSOR3D_STRUCT(input1);
- Tensor3D input2 = CONVERT_TO_TENSOR3D_STRUCT(input2);
- Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
-
- VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)
- diff = VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input1.ptr)- VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input2.ptr);
-
- VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)
- sq_diff = diff * diff;
-
- VSTORE(VEC_SIZE)
- (sq_diff, 0, (__global DATA_TYPE *)output.ptr);
-}
-#endif // defined(DATA_TYPE)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/strided_slice_ex.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/strided_slice_ex.cl
deleted file mode 100644
index b39c55b96..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/strided_slice_ex.cl
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "helpers.h"
-
-#if defined(ELEMENT_DATA_TYPE) && defined(DEPTH_OUT)
-/** Extracts a strided slice up to 4-dimensions
- *
- * @note Datatype should be given as a preprocessor argument using -DELEMENT_DATA_TYPE=type. e.g. -DELEMENT_DATA_TYPE=short
- * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size. e.g. -DDEPTH_OUT=16
- *
- * @param[in] input_ptr Pointer to the source image. Supported data types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
- * @param[in] input_stride_x Stride of the source image in X dimension (in bytes)
- * @param[in] input_step_x input_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] input_stride_y Stride of the source image in Y dimension (in bytes)
- * @param[in] input_step_y input_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] input_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] input_step_z input_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source image
- * @param[out] output_ptr Pointer to the destination image. Supported data types: same as @p input_ptr
- * @param[in] output_stride_x Stride of the destination image in X dimension (in bytes)
- * @param[in] output_step_x output_stride_x * number of elements along X processed per workitem(in bytes)
- * @param[in] output_stride_y Stride of the destination image in Y dimension (in bytes)
- * @param[in] output_step_y output_stride_y * number of elements along Y processed per workitem(in bytes)
- * @param[in] output_stride_z Stride of the source tensor in Z dimension (in bytes)
- * @param[in] output_step_z output_stride_z * number of elements along Z processed per workitem(in bytes)
- * @param[in] output_stride_w Stride of the source tensor in W dimension (in bytes)
- * @param[in] output_step_w output_stride_w * number of elements along W processed per workitem(in bytes)
- * @param[in] output_offset_first_element_in_bytes The offset of the first element in the destination image
- * @param[in] starts The stride of X dimension of input tensor to be sliced. Supported data types: S32
- * @param[in] strides The stride of Y dimension of input tensor to be sliced. Supported data types: S32
- */
-__kernel void strided_slice_ex(TENSOR4D_DECLARATION(input),
- TENSOR4D_DECLARATION(output),
- const int4 starts,
- const int4 strides)
-{
- Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
- Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
-
- int4 indices_in =
- {
- starts.x + (strides.x * get_global_id(0)),
- starts.y + (strides.y * get_global_id(1)),
- starts.z + (strides.z * (get_global_id(2) % DEPTH_OUT)),
- starts.w + (strides.w * (get_global_id(2) / DEPTH_OUT)),
- };
- *((__global ELEMENT_DATA_TYPE *)out.ptr) = *((__global ELEMENT_DATA_TYPE *)tensor4D_offset(&in, indices_in.x, indices_in.y, indices_in.z, indices_in.w));
-}
-#endif // defined(ELEMENT_DATA_TYPE) && defined(DEPTH_OUT)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl
deleted file mode 100644
index d97f23a47..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "helpers.h"
-
-__kernel void topkv2_init(VECTOR_DECLARATION(input),
- __global float* in_key_buf,
- __global int* in_ind_buf,
- const int n)
-{
- int gid = get_global_id(0);
- int lws = get_local_size(0);
- int groups = get_num_groups(0);
- int gws = lws * groups;
- int iter = n / gws;
-
- Vector input = CONVERT_TO_VECTOR_STRUCT_NO_STEP(input);
-
- for(int i = 0; i < iter; ++i)
- {
- int idx = i * gws + gid;
- in_key_buf[idx] = *(__global float*)(input.ptr + idx * input.stride_x);
- in_ind_buf[idx] = idx;
- }
-}
-
-__kernel void topkv2_find_first_negative(
- __global float *out_key_buf,
- __global int *first_negative_idx,
- int n)
-{
- int gid = get_global_id(0);
-
- if( gid == n - 1 )
- {
- // if the last item is positive, the first negative index is n.
- if( out_key_buf[gid] > 0.f )
- *first_negative_idx = n;
- } else if ( gid == 0 ) {
- // if the first item is negative, set it 0.
- if( out_key_buf[gid] < 0.f )
- *first_negative_idx = 0;
- } else {
- // if its left is positive and it is negative, then it is the first negative item.
- if( out_key_buf[gid-1] > 0.f && out_key_buf[gid] < 0.f )
- *first_negative_idx = gid;
- }
-}
-
-__kernel void topkv2_reorder_negatives(
- __global float* in_key_buf,
- __global float* out_key_buf,
- __global float* in_ind_buf,
- __global float* out_ind_buf,
- __global int* first_negative_idx,
- int n)
-{
- int gid = get_global_id(0);
-
- int num_negs = n - *first_negative_idx;
- int in_idx;
-
- if( gid < num_negs ) {
- in_idx = n - 1 - gid;
- } else {
- in_idx = gid - num_negs;
- }
-
- out_key_buf[gid] = in_key_buf[in_idx];
- out_ind_buf[gid] = in_ind_buf[in_idx];
-}
-
-__kernel void topkv2_store(
- VECTOR_DECLARATION(values),
- VECTOR_DECLARATION(indices),
- __global float *out_key_buf,
- __global int *out_ind_buf,
- int n)
-{
- int gid = get_global_id(0);
-
- Vector values = CONVERT_TO_VECTOR_STRUCT_NO_STEP(values);
- Vector indices = CONVERT_TO_VECTOR_STRUCT_NO_STEP(indices);
-
- int idx = n - 1 - gid;
-
- *(__global float*)(values.ptr + gid * values.stride_x) = out_key_buf[idx];
- *(__global int*)(indices.ptr + gid * indices.stride_x) = out_ind_buf[idx];
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl
deleted file mode 100644
index 0292fab04..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "helpers.h"
-
-__global inline float* get_vec_elem(Vector* vec, int idx)
-{
- return (__global float*)(vec->ptr + idx * vec->stride_x);
-}
-
-__global inline int* get_vec_elem_int(Vector* vec, int idx)
-{
- return (__global int*)(vec->ptr + idx * vec->stride_x);
-}
-
-// A utility function to swap two elements
-void swap(__global float *a, __global float *b)
-{
- float t = *a;
- *a = *b;
- *b = t;
-}
-
-void swap_idx(__global int *a, __global int *b)
-{
- int t = *a;
- *a = *b;
- *b = t;
-}
-
-/* This function is same in both iterative and recursive*/
-int partition (Vector* arr, __global int* indices, int l, int h)
-{
- float x = *get_vec_elem(arr, h);
- int i = (l - 1);
-
- for (int j = l; j <= h- 1; j++)
- {
- if (*get_vec_elem(arr, j) >= x)
- {
- i++;
- swap (get_vec_elem(arr,i), get_vec_elem(arr,j));
- swap_idx(&indices[i], &indices[j]);
- }
- }
- swap (get_vec_elem(arr, i + 1), get_vec_elem(arr, h));
- swap_idx(&indices[i + 1], &indices[h]);
- return (i + 1);
-}
-
-/* A[] --> Array to be sorted,
- l --> Starting index,
- h --> Ending index */
-void quickSortIterative (Vector* arr, __global int* indices,
- __global int *stack, int l, int h)
-{
- // Create an auxiliary stack
-
- // initialize top of stack
- int top = -1;
-
- // push initial values of l and h to stack
- stack[ ++top ] = l;
- stack[ ++top ] = h;
-
- // Keep popping from stack while is not empty
- while ( top >= 0 )
- {
- // Pop h and l
- h = stack[ top-- ];
- l = stack[ top-- ];
-
- // Set pivot element at its correct position
- // in sorted array
- int p = partition( arr, indices, l, h );
-
- // If there are elements on left side of pivot,
- // then push left side to stack
- if ( p-1 > l )
- {
- stack[ ++top ] = l;
- stack[ ++top ] = p - 1;
- }
-
- // If there are elements on right side of pivot,
- // then push right side to stack
- if ( p+1 < h )
- {
- stack[ ++top ] = p + 1;
- stack[ ++top ] = h;
- }
- }
-}
-
-__kernel void topkv2_quicksort(VECTOR_DECLARATION(input),
- VECTOR_DECLARATION(topk_values), VECTOR_DECLARATION(topk_indices),
- __global int* indices, __global int* temp_stack, int k, int n)
-{
- Vector input = CONVERT_TO_VECTOR_STRUCT_NO_STEP(input);
- Vector topk_values = CONVERT_TO_VECTOR_STRUCT_NO_STEP(topk_values);
- Vector topk_indices = CONVERT_TO_VECTOR_STRUCT_NO_STEP(topk_indices);
-
- for( int i = 0; i < n; ++i )
- {
- indices[i] = i;
- }
-
- quickSortIterative(&input, indices, temp_stack, 0, n-1);
-
- // extract k items.
- for(int i = 0; i < k; ++i)
- {
- *get_vec_elem(&topk_values, i) = *get_vec_elem(&input, i);
- *get_vec_elem_int(&topk_indices, i) = indices[i];
- }
-}
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl b/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl
deleted file mode 100644
index c2c2d89a4..000000000
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// reference:
-// https://code.google.com/archive/p/ocl-radix-sort/source/default/source
-// OpenCL kernel sources for the CLRadixSort class
-// the #include does not exist in OpenCL
-// Copyright Philippe Helluy, Université de Strasbourg, France, 2011, helluy@math.unistra.fr
-// licensed under the GNU Lesser General Public License see http://www.gnu.org/copyleft/lesser.html
-// if you find this software usefull you can cite the following work in your reports or articles:
-// Philippe HELLUY, A portable implementation of the radix sort algorithm in OpenCL, 2011.
-// http://hal.archives-ouvertes.fr/hal-00596730
-
-// Reference for floating point radix sort:
-// http://www.codercorner.com/RadixSortRevisited.htm
-
-// compute the histogram for each radix and each virtual processor for the pass
-__kernel void radixsort_histogram(__global float* in_key_buf,
- __global int* d_Histograms,
- const int pass,
- __local int* loc_histo,
- const int n)
-{
- int it = get_local_id(0); // i local number of the processor
- int ig = get_global_id(0); // global number = i + g I
-
- int gr = get_group_id(0); // g group number
-
- int groups = get_num_groups(0);
- int items = get_local_size(0);
-
- // set the local histograms to zero
- for(int ir=0;ir<_RADIX;ir++){
- loc_histo[ir * items + it] = 0;
- }
-
- barrier(CLK_LOCAL_MEM_FENCE);
-
- // range of keys that are analyzed by the work item
- int size= n/groups/items; // size of the sub-list
- int start= ig * size; // beginning of the sub-list
-
- unsigned int key;
- int shortkey,k;
-
- // compute the index
- // the computation depends on the transposition
- for(int j = 0; j < size ; j++) {
-#ifdef TRANSPOSE
- k= groups * items * j + ig;
-#else
- k=j+start;
-#endif
-
- key = *((__global unsigned int*)(in_key_buf + k));
-
- // extract the group of _BITS bits of the pass
- // the result is in the range 0.._RADIX-1
- shortkey=(( key >> (pass * _BITS)) & (_RADIX-1));
-
- // increment the local histogram
- loc_histo[shortkey * items + it ]++;
- }
-
- barrier(CLK_LOCAL_MEM_FENCE);
-
- // copy the local histogram to the global one
- for(int ir=0;ir<_RADIX;ir++) {
- d_Histograms[items * (ir * groups + gr) + it] = loc_histo[ir * items + it];
- }
-
- barrier(CLK_GLOBAL_MEM_FENCE);
-}
-
-// initial transpose of the list for improving
-// coalescent memory access
-__kernel void transpose(const __global int* invect,
- __global int* outvect,
- const int nbcol,
- const int nbrow,
- const __global int* inperm,
- __global int* outperm,
- __local int* blockmat,
- __local int* blockperm,
- const int tilesize){
-
- int i0 = get_global_id(0)*tilesize; // first row index
- int j = get_global_id(1); // column index
-
- int jloc = get_local_id(1); // local column index
-
- // fill the cache
- for(int iloc=0;iloc<tilesize;iloc++){
- int k=(i0+iloc)*nbcol+j; // position in the matrix
- blockmat[iloc*tilesize+jloc]=invect[k];
-#ifdef PERMUT
- blockperm[iloc*tilesize+jloc]=inperm[k];
-#endif
- }
-
- barrier(CLK_LOCAL_MEM_FENCE);
-
- // first row index in the transpose
- int j0=get_group_id(1)*tilesize;
-
- // put the cache at the good place
- for(int iloc=0;iloc<tilesize;iloc++){
- int kt=(j0+iloc)*nbrow+i0+jloc; // position in the transpose
- outvect[kt]=blockmat[jloc*tilesize+iloc];
-#ifdef PERMUT
- outperm[kt]=blockperm[jloc*tilesize+iloc];
-#endif
- }
-
-}
-
-// each virtual processor reorders its data using the scanned histogram
-__kernel void radixsort_reorder(__global float* in_key,
- __global float* out_key,
- __global int* d_Histograms,
- const int pass,
- __global int* indices_in,
- __global int* indices_out,
- __local int* loc_histo,
- const int n){
-
- int it = get_local_id(0);
- int ig = get_global_id(0);
-
- int gr = get_group_id(0);
- int groups=get_num_groups(0);
- int items=get_local_size(0);
-
- int start= ig *(n/groups/items);
- int size= n/groups/items;
-
- // take the histogram in the cache
- for(int ir=0;ir<_RADIX;ir++){
- loc_histo[ir * items + it]=
- d_Histograms[items * (ir * groups + gr) + it];
- }
- barrier(CLK_LOCAL_MEM_FENCE);
-
- int newpos,shortkey,k,newpost;
- unsigned int key;
-
- for(int j= 0; j< size;j++){
-#ifdef TRANSPOSE
- k= groups * items * j + ig;
-#else
- k=j+start;
-#endif
- float org_value = in_key[k];
- key = *(__global unsigned int*)(in_key + k);
- shortkey=((key >> (pass * _BITS)) & (_RADIX-1));
-
- newpos=loc_histo[shortkey * items + it];
-
-#ifdef TRANSPOSE
- int ignew,jnew;
- ignew= newpos/(n/groups/items);
- jnew = newpos%(n/groups/items);
- newpost = jnew * (groups*items) + ignew;
-#else
- newpost=newpos;
-#endif
-
- //d_outKeys[newpost]= key; // killing line !!!
- out_key[newpost] = org_value;
-
-#ifdef PERMUT
- indices_out[newpost] = indices_in[k];
-#endif
-
- newpos++;
- loc_histo[shortkey * items + it]=newpos;
- }
-}
-
-// perform a parallel prefix sum (a scan) on the local histograms
-// (see Blelloch 1990) each workitem worries about two memories
-// see also http://http.developer.nvidia.com/GPUGems3/gpugems3_ch39.html
-__kernel void radixsort_scanhistograms(__global int* histo, __local int* temp, __global int* globsum)
-{
- int it = get_local_id(0);
- int ig = get_global_id(0);
- int decale = 1;
- int n=get_local_size(0) * 2 ;
- int gr=get_group_id(0);
-
- // load input into local memory
- // up sweep phase
- temp[2*it] = histo[2*ig];
- temp[2*it+1] = histo[2*ig+1];
-
- // parallel prefix sum (algorithm of Blelloch 1990)
- for (int d = n>>1; d > 0; d >>= 1){
- barrier(CLK_LOCAL_MEM_FENCE);
- if (it < d){
- int ai = decale*(2*it+1)-1;
- int bi = decale*(2*it+2)-1;
- temp[bi] += temp[ai];
- }
- decale *= 2;
- }
-
- // store the last element in the global sum vector
- // (maybe used in the next step for constructing the global scan)
- // clear the last element
- if (it == 0) {
- globsum[gr]=temp[n-1];
- temp[n - 1] = 0;
- }
-
- // down sweep phase
- for (int d = 1; d < n; d *= 2){
- decale >>= 1;
- barrier(CLK_LOCAL_MEM_FENCE);
-
- if (it < d){
- int ai = decale*(2*it+1)-1;
- int bi = decale*(2*it+2)-1;
-
- int t = temp[ai];
- temp[ai] = temp[bi];
- temp[bi] += t;
- }
-
- }
- barrier(CLK_LOCAL_MEM_FENCE);
-
- // write results to device memory
-
- histo[2*ig] = temp[2*it];
- histo[2*ig+1] = temp[2*it+1];
-
- barrier(CLK_GLOBAL_MEM_FENCE);
-
-}
-
-// use the global sum for updating the local histograms
-// each work item updates two values
-__kernel void radixsort_pastehistograms( __global int* histo,__global int* globsum)
-{
- int ig = get_global_id(0);
- int gr=get_group_id(0);
-
- int s;
-
- s=globsum[gr];
-
- // write results to device memory
- histo[2*ig] += s;
- histo[2*ig+1] += s;
-
- barrier(CLK_GLOBAL_MEM_FENCE);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLActivationLayerExKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLActivationLayerExKernel.cpp
deleted file mode 100644
index 1fdd2f98f..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLActivationLayerExKernel.cpp
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLActivationLayerExKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-#include "arm_compute/core/UtilsEx.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- const ActivationLayerInfoEx &act_info)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
- DataType::F16, DataType::F32);
-
- // Checks performed when output is configured
- if ((output != nullptr) && (output->total_size() != 0))
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(input, output);
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
- }
-
- return Status{};
-}
-
-std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input, ITensorInfo *output)
-{
- if (output != nullptr)
- {
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- // Output auto inizialitation if not yet initialized
- auto_init_if_empty(*output, *input);
- }
-
- const unsigned int num_elems_processed_per_iteration = 16 / input->element_size();
-
- Window win = calculate_max_window(*input, Steps(num_elems_processed_per_iteration));
- bool window_changed = false;
-
- if (output != nullptr)
- {
- AccessWindowHorizontal input_access(input, 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
- window_changed = update_window_and_padding(win, input_access, output_access);
- output_access.set_valid_region(win, input->valid_region());
- }
- else
- {
- window_changed = update_window_and_padding(
- win, AccessWindowHorizontal(input, 0, num_elems_processed_per_iteration));
- }
-
- Status err = (window_changed)
- ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
- : Status{};
- return std::make_pair(err, win);
-}
-} // namespace
-
-CLActivationLayerExKernel::CLActivationLayerExKernel()
- : _input(nullptr), _output(nullptr), _run_in_place(false)
-{
-}
-
-void CLActivationLayerExKernel::configure(ICLTensor *input, ICLTensor *output,
- ActivationLayerInfoEx act_info)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input);
-
- _run_in_place = (output == nullptr) || (output == input);
-
- if (output != nullptr)
- {
- // Output auto inizialitation if not yet initialized
- auto_init_if_empty(*output->info(), *input->info()->clone());
- }
-
- ARM_COMPUTE_ERROR_THROW_ON(
- validate_arguments(input->info(), (output != nullptr) ? output->info() : nullptr, act_info));
-
- const unsigned int num_elems_processed_per_iteration = 16 / input->info()->element_size();
- const DataType dt = input->info()->data_type();
- float a_const = act_info.a();
- float b_const = act_info.b();
- int a_const_int = 0;
- int b_const_int = 0;
-
- // Create quantized version of constants a, b if needed
- if (is_data_type_quantized(dt))
- {
- a_const_int =
- input->info()->quantization_info().quantize(a_const, RoundingPolicy::TO_NEAREST_UP);
- b_const_int =
- input->info()->quantization_info().quantize(b_const, RoundingPolicy::TO_NEAREST_UP);
- }
-
- // Set build options
- std::set<std::string> build_opts;
- build_opts.emplace(
- ("-DACT=" + lower_string(string_from_activation_func_ex(act_info.activation()))));
- build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(dt)));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
-
- if (is_data_type_quantized(dt))
- {
- build_opts.emplace(("-DA_VAL=" + support::cpp11::to_string(a_const_int)));
- build_opts.emplace(("-DB_VAL=" + support::cpp11::to_string(b_const_int)));
-
- const int o1 = input->info()->quantization_info().offset;
- // Quantized value of 0 corresponds to the offset o1
- build_opts.emplace(("-DCONST_0=" + support::cpp11::to_string(o1)));
-
- // Set scale and offset of the input and output if they have different quantization info
- if (is_data_type_quantized_asymmetric(dt) && output != nullptr)
- {
- const float s1 = input->info()->quantization_info().scale;
- const float s2 = output->info()->quantization_info().scale;
- const int o2 = output->info()->quantization_info().offset;
-
- if (o1 != o2 || s1 != s2)
- {
- build_opts.emplace(("-DS1_VAL=" + float_to_string_with_full_precision(s1)));
- build_opts.emplace(("-DS2_VAL=" + float_to_string_with_full_precision(s2)));
- build_opts.emplace(("-DO1_VAL=" + support::cpp11::to_string(o1)));
- build_opts.emplace(("-DO2_VAL=" + support::cpp11::to_string(o2)));
- }
- }
- }
- else
- {
- build_opts.emplace(("-DA_VAL=" + float_to_string_with_full_precision(a_const)));
- build_opts.emplace(("-DB_VAL=" + float_to_string_with_full_precision(b_const)));
- }
-
- build_opts.emplace((_run_in_place) ? "-DIN_PLACE" : "");
-
- // Create kernel
- std::string kernel_name = std::string("activation_layer_ex");
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- // Make sure _kernel is initialized before calling the parent's configure
- _input = input;
- _output = output;
-
- // Configure kernel window
- auto win_config =
- validate_and_configure_window(input->info(), (_run_in_place) ? nullptr : output->info());
- ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
- ICLKernel::configure_internal(win_config.second);
-
- // Set config_id for enabling LWS tuning
- _config_id = "activation_layer_ex_";
- _config_id += lower_string(string_from_data_type(dt));
- _config_id += "_";
- _config_id += support::cpp11::to_string(input->info()->dimension(0));
- _config_id += "_";
- _config_id += support::cpp11::to_string(input->info()->dimension(1));
-}
-
-Status CLActivationLayerExKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
- const ActivationLayerInfoEx &act_info)
-{
- const bool run_in_place = (output == nullptr) || (output == input);
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, act_info));
- ARM_COMPUTE_RETURN_ON_ERROR(
- validate_and_configure_window(input->clone().get(),
- (run_in_place) ? nullptr : output->clone().get())
- .first);
-
- return Status{};
-}
-
-void CLActivationLayerExKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
- Window slice = collapsed.first_slice_window_3D();
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input, slice);
- if (!_run_in_place)
- {
- add_3D_tensor_argument(idx, _output, slice);
- }
- enqueue(queue, *this, slice, lws_hint());
- } while (collapsed.slide_window_slice_3D(slice));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLArgMinMaxKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLArgMinMaxKernel.cpp
deleted file mode 100644
index c1a2ad0be..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLArgMinMaxKernel.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLArgMinMaxKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-const TensorShape inferOutputShape(const TensorShape &input_shape, const uint32_t argminmax_axis)
-{
- TensorShape out_shape{input_shape};
-
- out_shape.set(argminmax_axis, 1);
-
- return out_shape;
-}
-} // namespace
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- const uint32_t argminmax_axis, ArgOperation op)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::S32, DataType::F32,
- DataType::U8);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
-
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DIMENSIONS(input, output);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->tensor_shape().total_size() == 0,
- "Inputs are not broadcast compatible");
-
- const TensorShape output_shape = inferOutputShape(input->tensor_shape(), argminmax_axis);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output_shape.total_size() != output->tensor_shape().total_size(),
- "output shape's size does not match argminmax_axis");
-
- const auto num_dimensions = input->tensor_shape().num_dimensions();
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- argminmax_axis >= 0 && argminmax_axis < num_dimensions,
- "argminmax_axis must be greater than or equal to 0 and less than (input's rank).");
- return Status{};
-}
-
-} // namespace
-
-CLArgMinMaxKernel::CLArgMinMaxKernel() : _input(nullptr), _output(nullptr), _argminmax_axis() {}
-
-void CLArgMinMaxKernel::configure(const ICLTensor *input, ICLTensor *output,
- const uint32_t argminmax_axis, ArgOperation op)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), argminmax_axis));
-
- _input = input;
- _output = output;
- _argminmax_axis = argminmax_axis;
-
- std::unique_ptr<ITensorInfo> output_info = output->info()->clone();
- output_info->set_tensor_shape(inferOutputShape(input->info()->tensor_shape(), argminmax_axis));
-
- // Construct kernel name for argmax and argmin based on axis
- std::string kernel_name = "arg_op";
- int op_code = 0;
- if (op == ArgOperation::MAX)
- {
- op_code = 1;
- }
- else if (op == ArgOperation::MIN)
- {
- op_code = 2;
- }
- else
- throw std::runtime_error("Operation not supported, yet");
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(output_info->data_type()));
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output_info->dimension(2)));
- build_opts.emplace("-DOP_CODE=" + support::cpp11::to_string(op_code));
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*output_info, Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output_info->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output_info->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-Status CLArgMinMaxKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
- const uint32_t argminmax_axis, ArgOperation op)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, argminmax_axis, op));
-
- return Status{};
-}
-
-void CLArgMinMaxKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &shape_in = _input->info()->tensor_shape();
-
- unsigned int idx = 2 * num_arguments_per_4D_tensor(); // Skip the input and output parameters
-
- _kernel.setArg<cl_int>(idx++, _argminmax_axis);
- _kernel.setArg<cl_int>(idx++, shape_in[_argminmax_axis]);
-
- Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup input slice
- Window slice_in(slice_out);
- slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_in.set(3, Window::Dimension(0, 0, 0));
-
- // Copy output's shape in order to use for recovering at end of this method
- const TensorShape shape_out = _output->info()->tensor_shape();
- _output->info()->set_tensor_shape(inferOutputShape(shape_in, _argminmax_axis));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- enqueue(queue, *this, slice_out);
- } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
-
- // Recover output's shape of output tensor
- _output->info()->set_tensor_shape(shape_out);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLArithmeticSubtractionExKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLArithmeticSubtractionExKernel.cpp
deleted file mode 100644
index 1c505b4d5..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLArithmeticSubtractionExKernel.cpp
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLArithmeticSubtractionExKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_arguments(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, ConvertPolicy policy)
-{
- ARM_COMPUTE_UNUSED(policy);
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::U8, DataType::S16,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::U8, DataType::S16,
- DataType::F16, DataType::F32);
-
- const TensorShape &out_shape =
- TensorShape::broadcast_shape(input1->tensor_shape(), input2->tensor_shape());
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
- "Inputs are not broadcast compatible");
-
- // Validate in case of configured output
- if (output->total_size() > 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::S16,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- output->data_type() == DataType::U8 &&
- (input1->data_type() != DataType::U8 || input2->data_type() != DataType::U8),
- "Output can only be U8 if both inputs are U8");
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
- "Wrong shape for output");
- }
-
- return Status{};
-}
-
-std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input1, ITensorInfo *input2,
- ITensorInfo *output)
-{
- const std::pair<TensorShape, ValidRegion> broadcast_pair =
- ITensorInfo::broadcast_shape_and_valid_region(*input1, *input2);
- const TensorShape &out_shape = broadcast_pair.first;
- const ValidRegion &valid_region = broadcast_pair.second;
-
- // Auto initialize output if not initialized
- {
- set_shape_if_empty(*output, out_shape);
-
- if (input1->data_type() == DataType::S16 || input2->data_type() == DataType::S16)
- {
- set_format_if_unknown(*output, Format::S16);
- }
- else if (input1->data_type() == DataType::F16 && input2->data_type() == DataType::F16)
- {
- set_format_if_unknown(*output, Format::F16);
- }
- else if (input1->data_type() == DataType::F32 || input2->data_type() == DataType::F32)
- {
- set_format_if_unknown(*output, Format::F32);
- }
- }
-
- Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
- Window win_input1 = win.broadcast_if_dimension_le_one(*input1);
- Window win_input2 = win.broadcast_if_dimension_le_one(*input2);
-
- AccessWindowHorizontal input1_access(input1, 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal input2_access(input2, 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win_input1, input1_access) ||
- update_window_and_padding(win_input2, input2_access) ||
- update_window_and_padding(win, output_access);
-
- output_access.set_valid_region(win, valid_region);
-
- Status err = (window_changed)
- ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
- : Status{};
- return std::make_pair(err, win);
-}
-} // namespace
-
-CLArithmeticSubtractionExKernel::CLArithmeticSubtractionExKernel()
- : _input1(nullptr), _input2(nullptr), _output(nullptr)
-{
-}
-
-void CLArithmeticSubtractionExKernel::configure(const ICLTensor *input1, const ICLTensor *input2,
- ICLTensor *output, ConvertPolicy policy)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input1, input2, output);
- ARM_COMPUTE_ERROR_THROW_ON(
- validate_arguments(input1->info(), input2->info(), output->info(), policy));
-
- // Configure kernel window
- auto win_config = validate_and_configure_window(input1->info(), input2->info(), output->info());
- ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
-
- _input1 = input1;
- _input2 = input2;
- _output = output;
-
- const bool has_float_out = is_data_type_float(output->info()->data_type());
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace((policy == ConvertPolicy::WRAP || has_float_out) ? "-DWRAP" : "-DSATURATE");
- build_opts.emplace("-DDATA_TYPE_IN1=" + get_cl_type_from_data_type(input1->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_IN2=" + get_cl_type_from_data_type(input2->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_OUT=" + get_cl_type_from_data_type(output->info()->data_type()));
-
- // Create kernel
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("arithmetic_sub_ex", build_opts));
-
- ICLKernel::configure_internal(win_config.second);
-}
-
-Status CLArithmeticSubtractionExKernel::validate(const ITensorInfo *input1,
- const ITensorInfo *input2,
- const ITensorInfo *output, ConvertPolicy policy)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input1, input2, output);
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input1, input2, output, policy));
- ARM_COMPUTE_RETURN_ON_ERROR(validate_and_configure_window(input1->clone().get(),
- input2->clone().get(),
- output->clone().get())
- .first);
-
- return Status{};
-}
-
-void CLArithmeticSubtractionExKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &in_shape1 = _input1->info()->tensor_shape();
- const TensorShape &in_shape2 = _input2->info()->tensor_shape();
- const TensorShape &out_shape = _output->info()->tensor_shape();
-
- bool can_collapse = true;
- if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
- {
- can_collapse =
- (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
- for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
- {
- can_collapse = (in_shape1[d] == in_shape2[d]);
- }
- }
-
- bool has_collapsed = false;
- Window collapsed =
- can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
- : window;
-
- const TensorShape &in_shape1_collapsed =
- has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
- const TensorShape &in_shape2_collapsed =
- has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
-
- Window slice = collapsed.first_slice_window_3D();
- Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
- Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
-
- do
- {
- unsigned int idx = 0;
-
- add_3D_tensor_argument(idx, _input1, slice_input1);
- add_3D_tensor_argument(idx, _input2, slice_input2);
- add_3D_tensor_argument(idx, _output, slice);
-
- enqueue(queue, *this, slice);
-
- collapsed.slide_window_slice_3D(slice_input1);
- collapsed.slide_window_slice_3D(slice_input2);
- } while (collapsed.slide_window_slice_3D(slice));
-}
-
-BorderSize CLArithmeticSubtractionExKernel::border_size() const
-{
- const unsigned int replicateSize =
- _output->info()->dimension(0) -
- std::min(_input1->info()->dimension(0), _input2->info()->dimension(0));
- const unsigned int border =
- std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
- return BorderSize(0, border, 0, 0);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLBatchToSpaceNDKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLBatchToSpaceNDKernel.cpp
deleted file mode 100644
index b0016d23c..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLBatchToSpaceNDKernel.cpp
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLBatchToSpaceNDKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- const int32_t *block_size)
-{
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size[0] >= 1 && block_size[1] >= 1,
- "Block size should be greater than or equal to 1.");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(2) == output->dimension(2),
- "Input Depth should be equal to Output Depth");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- output->dimension(3) * block_size[0] * block_size[1] == input->dimension(3),
- "Input batch should be equal to (output batch * block size[0] *block size[1])");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(!(output->dimension(0) % block_size[1]) &&
- !(output->dimension(1) % block_size[0]),
- "Output height and width should be divisible by block size[0] "
- "and block_size[1] respectively");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG((output->dimension(0) == input->dimension(0) * block_size[1]) &&
- (output->dimension(1) == input->dimension(1) * block_size[0]),
- "Output height and width should be equal to "
- "input_height*blocksize[0] and input_width*blocksize[1] "
- "respectively");
-
- return Status{};
-}
-
-} // namespace
-
-CLBatchToSpaceNDKernel::CLBatchToSpaceNDKernel() : _input(nullptr), _output(nullptr) {}
-
-void CLBatchToSpaceNDKernel::configure(const ICLTensor *input, ICLTensor *output,
- const int32_t *block_size)
-{
-
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), block_size));
-
- _input = input;
- _output = output;
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DBLOCK_SIZE0=" + support::cpp11::to_string(block_size[0]));
- build_opts.emplace("-DBLOCK_SIZE1=" + support::cpp11::to_string(block_size[1]));
- build_opts.emplace("-DBATCH_OUT=" + support::cpp11::to_string(output->info()->dimension(3)));
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
-
- // Create kernel
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("batch_to_space_nd", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*output->info(), Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output->info()->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-void CLBatchToSpaceNDKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
-
- Window slice_in = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup output slice
- Window slice_out(slice_in);
- slice_out.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_out.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_out.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_out.set(3, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_out);
- add_4D_tensor_argument(idx, _output, slice_in);
- enqueue(queue, *this, slice_in);
- } while (window.slide_window_slice_4D(slice_out) && window.slide_window_slice_4D(slice_in));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp
deleted file mode 100644
index 3d2f2c702..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_parameters(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output)
-{
- const TensorShape &out_shape =
- TensorShape::broadcast_shape(input1->tensor_shape(), input2->tensor_shape());
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::U8, DataType::QASYMM8);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::U8, DataType::QASYMM8);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
- "Inputs are not broadcast compatible");
- // Validate in case of configured output
- if (output->total_size() > 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8,
- DataType::QASYMM8);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
- "Wrong shape for output");
- }
- return Status{};
-}
-} // namespace
-
-CLBinaryLogicalOpKernel::CLBinaryLogicalOpKernel()
- : _input1(nullptr), _input2(nullptr), _output(nullptr)
-{
-}
-
-void CLBinaryLogicalOpKernel::configure(const ICLTensor *input1, const ICLTensor *input2,
- ICLTensor *output, BinaryLogicalOperation op)
-{
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, input2);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_parameters(input1->info(), input2->info(), output->info()));
-
- _input1 = input1;
- _input2 = input2;
- _output = output;
-
- // Create kernel
- std::string kernel_name = "binary_logical_op";
- std::set<std::string> build_opts;
- build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input1->info()->data_type())));
-
- int op_code = 0;
- switch (op)
- {
- case BinaryLogicalOperation::AND:
- op_code = 1;
- break;
- case BinaryLogicalOperation::OR:
- op_code = 2;
- break;
- default:
- throw std::runtime_error("Operation not supported, yet");
- }
-
- build_opts.emplace(("-DOP_CODE=" + support::cpp11::to_string(op_code)));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
-
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- const std::pair<TensorShape, ValidRegion> broadcast_pair =
- ITensorInfo::broadcast_shape_and_valid_region(*input1->info(), *input2->info());
-
- const TensorShape &out_shape = broadcast_pair.first;
- const ValidRegion &valid_region = broadcast_pair.second;
-
- Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
- Window win_input1 = win.broadcast_if_dimension_le_one(*input1->info());
- Window win_input2 = win.broadcast_if_dimension_le_one(*input2->info());
-
- AccessWindowHorizontal input1_access(input1->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal input2_access(input2->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win_input1, input1_access) ||
- update_window_and_padding(win_input2, input2_access) ||
- update_window_and_padding(win, output_access);
-
- output_access.set_valid_region(win, valid_region);
-
- ICLKernel::configure_internal(win);
-}
-
-void CLBinaryLogicalOpKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &in_shape1 = _input1->info()->tensor_shape();
- const TensorShape &in_shape2 = _input2->info()->tensor_shape();
- const TensorShape &out_shape = _output->info()->tensor_shape();
-
- bool can_collapse = true;
- if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
- {
- can_collapse =
- (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
- for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
- {
- can_collapse = (in_shape1[d] == in_shape2[d]);
- }
- }
-
- bool has_collapsed = false;
- Window collapsed =
- can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
- : window;
-
- const TensorShape &in_shape1_collapsed =
- has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
- const TensorShape &in_shape2_collapsed =
- has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
-
- Window slice = collapsed.first_slice_window_3D();
- Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
- Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input1, slice_input1);
- add_3D_tensor_argument(idx, _input2, slice_input2);
- add_3D_tensor_argument(idx, _output, slice);
-
- enqueue(queue, *this, slice);
-
- collapsed.slide_window_slice_3D(slice_input1);
- collapsed.slide_window_slice_3D(slice_input2);
- } while (collapsed.slide_window_slice_3D(slice));
-}
-
-BorderSize CLBinaryLogicalOpKernel::border_size() const
-{
- const unsigned int replicateSize =
- _output->info()->dimension(0) -
- std::min(_input1->info()->dimension(0), _input2->info()->dimension(0));
- const unsigned int border =
- std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
- return BorderSize(0, border, 0, 0);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp
deleted file mode 100644
index bf7ebae3f..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLCastKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-CLCastKernel::CLCastKernel() : _input(nullptr), _output(nullptr) {}
-
-void CLCastKernel::configure(const ICLTensor *input, ICLTensor *output)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
-
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_SHAPES(input, output);
-
- _input = input;
- _output = output;
-
- constexpr unsigned int num_elems_processed_per_iteration = 16;
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE_IN=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_OUT=" + get_cl_type_from_data_type(output->info()->data_type()));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
-
- // Create kernel
- if (is_data_type_quantized_asymmetric(input->info()->data_type()))
- {
- const float scale_in = input->info()->quantization_info().scale;
- const int offset_in = input->info()->quantization_info().offset;
- build_opts.emplace("-DSCALE=" + float_to_string_with_full_precision(scale_in));
- build_opts.emplace("-DOFFSET=" + support::cpp11::to_string(offset_in));
-
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("cast_qasymm_in", build_opts));
- }
- else if (is_data_type_quantized_asymmetric(output->info()->data_type()))
- {
- const float scale_in = output->info()->quantization_info().scale;
- const int offset_in = output->info()->quantization_info().offset;
- build_opts.emplace("-DSCALE=" + float_to_string_with_full_precision(scale_in));
- build_opts.emplace("-DOFFSET=" + support::cpp11::to_string(offset_in));
-
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("cast_qasymm_out", build_opts));
- }
- else
- {
- _kernel = static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("cast", build_opts));
- }
-
- // Configure kernel window
- Window win = calculate_max_window(*input->info(), Steps(num_elems_processed_per_iteration));
- AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
- update_window_and_padding(win, input_access, output_access);
- output_access.set_valid_region(win, input->info()->valid_region());
-
- ICLKernel::configure_internal(win);
-}
-
-void CLCastKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
- Window slice = collapsed.first_slice_window_3D();
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input, slice);
- add_3D_tensor_argument(idx, _output, slice);
- enqueue(queue, *this, slice);
- } while (collapsed.slide_window_slice_3D(slice));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLComparisonOpKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLComparisonOpKernel.cpp
deleted file mode 100644
index 5af5b16ea..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLComparisonOpKernel.cpp
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLComparisonOpKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_arguments(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output)
-{
- const TensorShape &out_shape =
- TensorShape::broadcast_shape(input1->tensor_shape(), input2->tensor_shape());
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::U8, DataType::U16,
- DataType::S16, DataType::F16, DataType::S32,
- DataType::F32, DataType::QASYMM8);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::U8, DataType::U16,
- DataType::S16, DataType::F16, DataType::S32,
- DataType::F32, DataType::QASYMM8);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
- "Inputs are not broadcast compatible");
- // Validate in case of configured output
- if (output->total_size() > 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::QASYMM8);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
- "Wrong shape for output");
- }
- return Status{};
-}
-} // namespace
-
-CLComparisonOpKernel::CLComparisonOpKernel() : _input1(nullptr), _input2(nullptr), _output(nullptr)
-{
-}
-
-void CLComparisonOpKernel::configure(const ICLTensor *input1, const ICLTensor *input2,
- ICLTensor *output, const ComparisonOperation &op)
-{
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, input2);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input1->info(), input2->info(), output->info()));
-
- _input1 = input1;
- _input2 = input2;
- _output = output;
-
- // Create kernel
- std::string kernel_name = "comparison_op";
- int op_code = 0;
-
- switch (op)
- {
- case ComparisonOperation::EQUAL:
- op_code = 1;
- break;
- case ComparisonOperation::NOT_EQUAL:
- op_code = 2;
- break;
- default:
- throw std::runtime_error(" Operation not supported, yet");
- }
-
- std::set<std::string> build_opts;
- build_opts.emplace(("-DOP_CODE=" + support::cpp11::to_string(op_code)));
- build_opts.emplace(("-DDATA_TYPE_IN=" + get_cl_type_from_data_type(input1->info()->data_type())));
- build_opts.emplace(
- ("-DDATA_TYPE_OUT=" + get_cl_type_from_data_type(output->info()->data_type())));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
-
- if (is_data_type_quantized_asymmetric(input1->info()->data_type()) &&
- ((input1->info()->quantization_info().offset != input2->info()->quantization_info().offset) ||
- (input1->info()->quantization_info().scale != input2->info()->quantization_info().scale)))
- {
- build_opts.emplace("-DOFFSET_IN1=" +
- support::cpp11::to_string(input1->info()->quantization_info().offset));
- build_opts.emplace("-DOFFSET_IN2=" +
- support::cpp11::to_string(input2->info()->quantization_info().offset));
- build_opts.emplace("-DSCALE_IN1=" +
- support::cpp11::to_string(input1->info()->quantization_info().scale));
- build_opts.emplace("-DSCALE_IN2=" +
- support::cpp11::to_string(input2->info()->quantization_info().scale));
- kernel_name += "_qasymm8";
- }
-
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- const std::pair<TensorShape, ValidRegion> broadcast_pair =
- ITensorInfo::broadcast_shape_and_valid_region(*input1->info(), *input2->info());
-
- const TensorShape &out_shape = broadcast_pair.first;
- const ValidRegion &valid_region = broadcast_pair.second;
-
- // Auto initialize output if not initialized
- {
- set_shape_if_empty(*output->info(), out_shape);
-
- if (input1->info()->data_type() == DataType::S16 ||
- input2->info()->data_type() == DataType::S16)
- {
- set_format_if_unknown(*output->info(), Format::S16);
- }
- else if (input1->info()->data_type() == DataType::F16 &&
- input2->info()->data_type() == DataType::F16)
- {
- set_format_if_unknown(*output->info(), Format::F16);
- }
- else if (input1->info()->data_type() == DataType::F32 ||
- input2->info()->data_type() == DataType::F32)
- {
- set_format_if_unknown(*output->info(), Format::F32);
- }
- }
-
- Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
- Window win_input1 = win.broadcast_if_dimension_le_one(*input1->info());
- Window win_input2 = win.broadcast_if_dimension_le_one(*input2->info());
-
- AccessWindowHorizontal input1_access(input1->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal input2_access(input2->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win_input1, input1_access) ||
- update_window_and_padding(win_input2, input2_access) ||
- update_window_and_padding(win, output_access);
-
- output_access.set_valid_region(win, valid_region);
-
- ICLKernel::configure_internal(win);
-}
-
-void CLComparisonOpKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &in_shape1 = _input1->info()->tensor_shape();
- const TensorShape &in_shape2 = _input2->info()->tensor_shape();
- const TensorShape &out_shape = _output->info()->tensor_shape();
-
- bool can_collapse = true;
- if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
- {
- can_collapse =
- (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
- for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
- {
- can_collapse = (in_shape1[d] == in_shape2[d]);
- }
- }
-
- bool has_collapsed = false;
- Window collapsed =
- can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
- : window;
-
- const TensorShape &in_shape1_collapsed =
- has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
- const TensorShape &in_shape2_collapsed =
- has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
-
- Window slice = collapsed.first_slice_window_3D();
- Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
- Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input1, slice_input1);
- add_3D_tensor_argument(idx, _input2, slice_input2);
- add_3D_tensor_argument(idx, _output, slice);
-
- enqueue(queue, *this, slice);
-
- collapsed.slide_window_slice_3D(slice_input1);
- collapsed.slide_window_slice_3D(slice_input2);
- } while (collapsed.slide_window_slice_3D(slice));
-}
-
-BorderSize CLComparisonOpKernel::border_size() const
-{
- const unsigned int replicateSize =
- _output->info()->dimension(0) -
- std::min(_input1->info()->dimension(0), _input2->info()->dimension(0));
- const unsigned int border =
- std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
- return BorderSize(0, border, 0, 0);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp
deleted file mode 100644
index c386e3312..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- const int32_t block_size)
-{
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size >= 1,
- "Block size should be greater than or equal to 1.");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(0) == input->dimension(0) * block_size,
- "Output width should be equal to (Input width * block size)");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(1) == input->dimension(1) * block_size,
- "Output height should be equal to (Input height * block size)");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(2) % (block_size * block_size) == 0,
- "Input depth should be divisible by (block size * block size)");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- output->dimension(2) == input->dimension(2) / (block_size * block_size),
- "Output depth should be equal to (Input depth / (block size * block size))");
-
- return Status{};
-}
-} // namespace
-
-CLDepthToSpaceKernel::CLDepthToSpaceKernel() : _input(nullptr), _output(nullptr)
-{
- // DO NOTHING
-}
-
-void CLDepthToSpaceKernel::configure(const ICLTensor *input, ICLTensor *output,
- const int32_t block_size)
-{
-
- _input = input;
- _output = output;
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DBLOCK_SIZE=" + support::cpp11::to_string(block_size));
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("depth_to_space", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*output->info(), Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output->info()->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-void CLDepthToSpaceKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
-
- Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup input slice
- Window slice_in(slice_out);
- slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_in.set(3, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- enqueue(queue, *this, slice_out);
- } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLExpKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLExpKernel.cpp
deleted file mode 100644
index b1ee21bdc..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLExpKernel.cpp
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLExpKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-CLExpKernel::CLExpKernel() : _input(nullptr), _output(nullptr) {}
-
-void CLExpKernel::configure(const ICLTensor *input, ICLTensor *output)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
-
- // Auto initialize output
- auto_init_if_empty(*output->info(), input->info()->tensor_shape(), 1, input->info()->data_type(),
- input->info()->quantization_info());
-
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F32);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_SHAPES(input, output);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
-
- _input = input;
- _output = output;
-
- constexpr unsigned int num_elems_processed_per_iteration = 4;
-
- // Create kernel
- std::set<std::string> build_opts;
- build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("exp_layer", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*input->info(), Steps(num_elems_processed_per_iteration));
- AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
- update_window_and_padding(win, input_access, output_access);
- output_access.set_valid_region(win, input->info()->valid_region());
-
- ICLKernel::configure_internal(win);
-}
-
-void CLExpKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
- Window slice = collapsed.first_slice_window_3D();
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input, slice);
- add_3D_tensor_argument(idx, _output, slice);
- enqueue(queue, *this, slice);
- } while (collapsed.slide_window_slice_3D(slice));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLGatherKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLGatherKernel.cpp
deleted file mode 100644
index ae2801e2b..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLGatherKernel.cpp
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLGatherKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 1;
-
-Status validate_arguments(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::U8, DataType::S32,
- DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::S32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::S32,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, output);
-
- return Status{};
-}
-
-} // namespace
-
-CLGatherKernel::CLGatherKernel() : _input1(nullptr), _input2(nullptr), _output(nullptr) {}
-
-void CLGatherKernel::configure(const ICLTensor *input1, const ICLTensor *input2, ICLTensor *output)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input1, input2, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input1->info(), input2->info(), output->info()));
-
- _input1 = input1;
- _input2 = input2;
- _output = output;
-
- // Construct kernel name
- std::string kernel_name = "gather";
- if (input1->info()->num_dimensions() == 1)
- {
- kernel_name = "gather_1d";
- }
- else if (input1->info()->num_dimensions() == 2)
- {
- if (_output->info()->num_dimensions() == 1)
- {
- kernel_name = "gather_1d_out";
- }
- }
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE_IN1=" + get_cl_type_from_data_type(input1->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_IN2=" + get_cl_type_from_data_type(input2->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_OUT=" + get_cl_type_from_data_type(output->info()->data_type()));
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*input2->info(), Steps(num_elems_processed_per_iteration));
- output->info()->set_valid_region(ValidRegion(Coordinates(), output->info()->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-Status CLGatherKernel::validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input1, input2, output);
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input1, input2, output));
-
- return Status{};
-}
-
-void CLGatherKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
-
- if (_input1->info()->num_dimensions() == 1)
- {
- Window slice = window.first_slice_window_1D();
-
- unsigned int idx = 0;
- add_1D_tensor_argument(idx, _input1, slice);
- add_1D_tensor_argument(idx, _input2, slice);
- add_1D_tensor_argument(idx, _output, slice);
- enqueue(queue, *this, slice);
- }
- else if (_input1->info()->num_dimensions() == 2)
- {
- Window window_collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimY);
- Window slice = window.collapse_if_possible(ICLKernel::window(), Window::DimX);
-
- // Set inputs
- unsigned int idx = 0;
- add_2D_tensor_argument(idx, _input1, window_collapsed);
- add_1D_tensor_argument(idx, _input2, slice);
- if (_output->info()->num_dimensions() == 1)
- {
- add_1D_tensor_argument(idx, _output, slice);
- }
- else
- {
- add_2D_tensor_argument(idx, _output, window_collapsed);
- }
- enqueue(queue, *this, slice);
- }
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp
deleted file mode 100644
index cd7b21c6d..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLHashtableLookupKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input, ITensorInfo *output)
-{
- Window win = calculate_max_window(*output, Steps(num_elems_processed_per_iteration));
- AccessWindowHorizontal input_access(input, 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win, input_access, output_access);
- input_access.set_valid_region(win, output->valid_region());
-
- Status err = (window_changed)
- ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
- : Status{};
- return std::make_pair(err, win);
-}
-} // namespace
-
-CLHashtableLookupKernel::CLHashtableLookupKernel()
- : _input(nullptr), _output(nullptr), _lookups(nullptr)
-{
-}
-
-Status CLHashtableLookupKernel::validate(const ITensorInfo *lookups, const ITensorInfo *keys,
- const ITensorInfo *input, const ITensorInfo *output,
- const ITensorInfo *hits)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(lookups, keys, input, output, hits);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(
- input, 1, DataType::U8, DataType::S8, DataType::QASYMM8, DataType::U16, DataType::S16,
- DataType::U32, DataType::S32, DataType::F16, DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(lookups, 1, DataType::S32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(keys, 1, DataType::S32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(hits, 1, DataType::U8, DataType::QASYMM8);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->tensor_shape().total_size() == 0,
- "Output's shape was not set");
-
- ARM_COMPUTE_ERROR_ON(lookups->dimensions(0) == hits->dimensions(0) &&
- output->dimension(output->num_dimensions() - 1) == lookups->dimension(0));
- ARM_COMPUTE_ERROR_ON(input->num_dimensions() < 2 && input->num_dimensions() > 4);
- ARM_COMPUTE_ERROR_ON(lookups->num_dimensions() > 1);
- ARM_COMPUTE_ERROR_ON(keys->num_dimensions() > 1);
- ARM_COMPUTE_ERROR_ON(hits->num_dimensions() > 1);
-
- return Status{};
-}
-
-void CLHashtableLookupKernel::configure(const ICLTensor *lookups, const ICLTensor *keys,
- const ICLTensor *input, ICLTensor *output, ICLTensor *hits)
-{
- ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), output->info(), lookups->info()));
-
- _lookups = lookups;
- _keys = keys;
- _input = input;
- _output = output;
- _hits = hits;
-
- // Make _lookup_indices tensor
- _lookup_indices = arm_compute::support::cpp14::make_unique<CLTensor>();
- _lookup_indices->allocator()->init(
- TensorInfo(lookups->info()->tensor_shape(), lookups->info()->num_channels(), DataType::S32));
- _lookup_indices->allocator()->allocate();
-
- // Set kernel build options
- std::stringstream kernel_name;
- std::set<std::string> build_opts;
- kernel_name << "hashtable_lookup";
-
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration));
- build_opts.emplace("-DNUM_DIMS=" + support::cpp11::to_string(_input->info()->num_dimensions()));
-
- // Create kernel
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel(kernel_name.str(), build_opts));
-
- // Configure kernel window
- auto win_config = validate_and_configure_window(input->info(), output->info());
- ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
- ICLKernel::configure_internal(win_config.second);
-}
-
-void CLHashtableLookupKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
-
- const_cast<ICLTensor *>(_lookups)->map(queue);
- const_cast<ICLTensor *>(_keys)->map(queue);
- _hits->map(queue);
- _lookup_indices->map(queue);
-
- // Set values of hits
- const int32_t *lookups_buf =
- reinterpret_cast<int32_t *>(const_cast<ICLTensor *>(_lookups)->buffer());
- const int32_t *keys_buf = reinterpret_cast<int32_t *>(const_cast<ICLTensor *>(_keys)->buffer());
- uint8_t *hits_buf = reinterpret_cast<uint8_t *>(_hits->buffer());
- int32_t *lookup_indices_buf = reinterpret_cast<int32_t *>(_lookup_indices->buffer());
-
- std::map<int32_t, size_t> key_map;
- const size_t keys_num = _keys->info()->dimension(0);
- for (size_t key_index = 0; key_index < keys_num; key_index++)
- {
- key_map[keys_buf[key_index]] = key_index;
- }
-
- const size_t lookups_num = _lookups->info()->dimension(0);
- for (size_t i = 0; i < lookups_num; ++i)
- {
- const auto lookup_value = lookups_buf[i];
- const auto it = key_map.find(lookup_value);
- if (it != key_map.end())
- {
-#if defined(DEBUG)
- if (it->second >= lookups_num)
- ARM_COMPUTE_ERROR("HashTable Lookup: index out of bounds.");
-#endif // defined(DEBUG)
- lookup_indices_buf[i] = static_cast<int32_t>(it->second);
- hits_buf[i] = static_cast<uint8_t>(1);
- }
- else
- {
- lookup_indices_buf[i] = -1;
- hits_buf[i] = static_cast<uint8_t>(0);
- }
- }
-
- const_cast<ICLTensor *>(_lookups)->unmap(queue);
- const_cast<ICLTensor *>(_keys)->unmap(queue);
- _hits->unmap(queue);
- _lookup_indices->unmap(queue);
-
- Window win = window.collapse(ICLKernel::window(), 2, 4);
-
- Window win_lookup;
- win_lookup.set(Window::DimX, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, win);
- add_4D_tensor_argument(idx, _output, win);
- add_1D_tensor_argument(idx, _lookup_indices.get(), win_lookup);
-
- enqueue(queue, *this, win);
- } while (window.slide_window_slice_4D(win) && window.slide_window_slice_1D(win_lookup));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp
deleted file mode 100644
index 80d99dd3b..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLNegKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output)
-{
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::S16, DataType::S32,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::S16, DataType::S32,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DIMENSIONS(input->info()->tensor_shape(),
- output->info()->tensor_shape());
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
-
- return Status{};
-}
-
-} // namespace
-
-CLNegKernel::CLNegKernel() : _input(nullptr), _output(nullptr) {}
-
-void CLNegKernel::configure(const ICLTensor *input, ICLTensor *output)
-{
-
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info()));
-
- _input = input;
- _output = output;
-
- constexpr unsigned int num_elems_processed_per_iteration = 16;
-
- // Create kernel
- std::set<std::string> build_opts;
- build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("neg_tensor", build_opts));
-
- // Configure window
- Window win = calculate_max_window(*input->info(), Steps(num_elems_processed_per_iteration));
-
- AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
- update_window_and_padding(win, input_access, output_access);
- output_access.set_valid_region(win, input->info()->valid_region());
-
- ICLKernel::configure_internal(win);
-}
-
-void CLNegKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
- Window slice = collapsed.first_slice_window_3D();
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input, slice);
- add_3D_tensor_argument(idx, _output, slice);
- enqueue(queue, *this, slice, lws_hint());
- } while (collapsed.slide_window_slice_3D(slice));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLNormalizationLayerExKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLNormalizationLayerExKernel.cpp
deleted file mode 100644
index 12bbe910f..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLNormalizationLayerExKernel.cpp
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLNormalizationLayerExKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibrary.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- NormalizationLayerInfo norm_info)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(output);
-
- // Checks performed when output is configured
- if (output->total_size() != 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(input, output);
- }
-
- return Status{};
-}
-
-std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input, ITensorInfo *output,
- NormalizationLayerInfo norm_info)
-{
- // Output tensor auto initialization if not yet initialized
- auto_init_if_empty(*output, *input->clone());
-
- const unsigned int norm_size = norm_info.norm_size();
- bool is_in_map = norm_info.is_in_map();
-
- const unsigned int border_width = is_in_map ? std::min(norm_size / 2, 3U) : 0;
- const BorderSize border_size = BorderSize(0, border_width);
-
- const unsigned int num_elems_processed_per_iteration = 4;
- const unsigned int num_elems_read_per_iteration =
- is_in_map ? (num_elems_processed_per_iteration + 2 * (norm_size / 2))
- : num_elems_processed_per_iteration;
-
- Window win = calculate_max_window(*input, Steps(num_elems_processed_per_iteration));
-
- // We do not use a Rectangle window for IN_MAP_2D as we clamp the top and bottom accesses inside
- // the kernel, avoiding padding
- AccessWindowHorizontal input_access(input, -border_size.left, num_elems_read_per_iteration);
- AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win, input_access, output_access);
-
- output_access.set_valid_region(win, input->valid_region());
-
- Status err = (window_changed)
- ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
- : Status{};
- return std::make_pair(err, win);
-}
-} // namespace
-
-CLNormalizationLayerExKernel::CLNormalizationLayerExKernel()
- : _input(nullptr), _output(nullptr), _border_size(0), _is_in_map(false)
-{
-}
-
-BorderSize CLNormalizationLayerExKernel::border_size() const { return _border_size; }
-
-void CLNormalizationLayerExKernel::configure(const ICLTensor *input, ICLTensor *output,
- NormalizationLayerInfo norm_info)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
-
- // Output tensor auto initialization if not yet initialized
- auto_init_if_empty(*output->info(), *input->info()->clone());
-
- // Perform validation step
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), norm_info));
-
- _input = input;
- _output = output;
-
- const unsigned int num_elems_processed_per_iteration = 4;
- const bool is_in_map_2D = (norm_info.type() == NormType::IN_MAP_2D);
-
- // Set build options
- CLBuildOptions build_opts;
- build_opts.add_option(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
- build_opts.add_option(
- ("-DCOEFF=" + float_to_string_with_full_precision(norm_info.scale_coeff())));
- build_opts.add_option(("-DBETA=" + float_to_string_with_full_precision(norm_info.beta())));
- build_opts.add_option(("-DKAPPA=" + float_to_string_with_full_precision(norm_info.kappa())));
- build_opts.add_option(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
- build_opts.add_option(("-DRADIUS=" + support::cpp11::to_string(norm_info.norm_size())));
- build_opts.add_option(("-DNUM_SLICES=" + support::cpp11::to_string(input->info()->dimension(2))));
- build_opts.add_option_if(is_in_map_2D, "-DIN_MAP_2D");
-
- // Create kernel
- std::string kernel_name =
- _is_in_map ? "normalization_layer_in_map" : "normalization_layer_cross_map";
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibrary::get().create_kernel(kernel_name, build_opts.options()));
-
- // Configure kernel window
- auto win_config = validate_and_configure_window(input->info(), output->info(), norm_info);
- ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
- ICLKernel::configure_internal(win_config.second);
-
- // Set config_id for enabling LWS tuning
- _config_id = "normalization_layer_";
- _config_id += lower_string(string_from_data_type(input->info()->data_type()));
- _config_id += "_";
- _config_id += support::cpp11::to_string(
- static_cast<std::underlying_type<NormType>::type>(norm_info.type()));
- _config_id += "_";
- _config_id += support::cpp11::to_string(norm_info.norm_size());
- _config_id += "_";
- _config_id += support::cpp11::to_string(input->info()->dimension(0));
- _config_id += "_";
- _config_id += support::cpp11::to_string(input->info()->dimension(1));
-}
-
-Status CLNormalizationLayerExKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
- NormalizationLayerInfo norm_info)
-{
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, norm_info));
- ARM_COMPUTE_RETURN_ON_ERROR(
- validate_and_configure_window(input->clone().get(), output->clone().get(), norm_info).first);
-
- return Status{};
-}
-
-void CLNormalizationLayerExKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
-
- const int collapsed_dimension = _is_in_map ? Window::DimZ : 4;
- Window window_collapsed = window.collapse_if_possible(ICLKernel::window(), collapsed_dimension);
- Window slice = window_collapsed.first_slice_window_3D();
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input, slice);
- add_3D_tensor_argument(idx, _output, slice);
- enqueue(queue, *this, slice);
- } while (window_collapsed.slide_window_slice_3D(slice));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp
deleted file mode 100644
index 241f8ae4d..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLPReLUKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_info(const ITensorInfo *input, const ITensorInfo *alpha, const ITensorInfo *output)
-{
- const TensorShape &out_shape =
- TensorShape::broadcast_shape(input->tensor_shape(), alpha->tensor_shape());
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F16, DataType::F32,
- DataType::QASYMM8);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(alpha, 1, DataType::F16, DataType::F32,
- DataType::QASYMM8);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
- "Inputs are not broadcast compatible");
- // Validate in case of configured output
- if (output->total_size() > 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
- "Wrong shape for output");
- }
- return Status{};
-}
-} // namespace
-
-CLPReLUKernel::CLPReLUKernel() : _input(nullptr), _alpha(nullptr), _output(nullptr) {}
-
-void CLPReLUKernel::configure(const ICLTensor *input, const ICLTensor *alpha, ICLTensor *output)
-{
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, alpha);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), alpha->info(), output->info()));
-
- _input = input;
- _alpha = alpha;
- _output = output;
-
- // Create kernel
- std::string kernel_name = "prelu";
- std::set<std::string> build_opts;
- build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
-
- if (is_data_type_quantized_asymmetric(input->info()->data_type()))
- {
- build_opts.emplace("-DOFF_IN1=" +
- support::cpp11::to_string(input->info()->quantization_info().offset));
- build_opts.emplace("-DOFF_IN2=" +
- support::cpp11::to_string(alpha->info()->quantization_info().offset));
- build_opts.emplace("-DOFF_OUT=" +
- support::cpp11::to_string(output->info()->quantization_info().offset));
- build_opts.emplace("-DSCALE_IN1=" +
- support::cpp11::to_string(input->info()->quantization_info().scale));
- build_opts.emplace("-DSCALE_IN2=" +
- support::cpp11::to_string(alpha->info()->quantization_info().scale));
- build_opts.emplace("-DSCALE_OUT=" +
- support::cpp11::to_string(output->info()->quantization_info().scale));
- kernel_name += "_qasymm8";
- }
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- const std::pair<TensorShape, ValidRegion> broadcast_pair =
- ITensorInfo::broadcast_shape_and_valid_region(*input->info(), *alpha->info());
-
- const TensorShape &out_shape = broadcast_pair.first;
- const ValidRegion &valid_region = broadcast_pair.second;
-
- // Auto initialize output if not initialized
- {
- set_shape_if_empty(*output->info(), out_shape);
-
- if (input->info()->data_type() == DataType::F16 && alpha->info()->data_type() == DataType::F16)
- {
- set_format_if_unknown(*output->info(), Format::F16);
- }
- else if (input->info()->data_type() == DataType::F32 ||
- alpha->info()->data_type() == DataType::F32)
- {
- set_format_if_unknown(*output->info(), Format::F32);
- }
- }
-
- Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
- Window win_input1 = win.broadcast_if_dimension_le_one(*input->info());
- Window win_input2 = win.broadcast_if_dimension_le_one(*alpha->info());
-
- AccessWindowHorizontal input1_access(input->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal input2_access(alpha->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win_input1, input1_access) ||
- update_window_and_padding(win_input2, input2_access) ||
- update_window_and_padding(win, output_access);
-
- output_access.set_valid_region(win, valid_region);
-
- ICLKernel::configure_internal(win);
-}
-
-void CLPReLUKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &in_shape1 = _input->info()->tensor_shape();
- const TensorShape &in_shape2 = _alpha->info()->tensor_shape();
- const TensorShape &out_shape = _output->info()->tensor_shape();
-
- bool can_collapse = true;
- if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
- {
- can_collapse =
- (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
- for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
- {
- can_collapse = (in_shape1[d] == in_shape2[d]);
- }
- }
-
- bool has_collapsed = false;
- Window collapsed =
- can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
- : window;
-
- const TensorShape &in_shape1_collapsed =
- has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
- const TensorShape &in_shape2_collapsed =
- has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
-
- Window slice = collapsed.first_slice_window_3D();
- Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
- Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input, slice_input1);
- add_3D_tensor_argument(idx, _alpha, slice_input2);
- add_3D_tensor_argument(idx, _output, slice);
-
- enqueue(queue, *this, slice);
-
- collapsed.slide_window_slice_3D(slice_input1);
- collapsed.slide_window_slice_3D(slice_input2);
- } while (collapsed.slide_window_slice_3D(slice));
-}
-
-BorderSize CLPReLUKernel::border_size() const
-{
- const unsigned int replicateSize =
- _output->info()->dimension(0) -
- std::min(_input->info()->dimension(0), _alpha->info()->dimension(0));
- const unsigned int border =
- std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
- return BorderSize(0, border, 0, 0);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLPadLayerKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLPadLayerKernel.cpp
deleted file mode 100644
index 99b54c822..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLPadLayerKernel.cpp
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLPadLayerKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input_info, const ITensorInfo *output_info,
- const ITensorInfo *pad_size_info)
-{
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input_info, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output_info, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(pad_size_info, 1, DataType::S32);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input_info->num_dimensions() > 0 &&
- input_info->num_dimensions() <= 4,
- "Pad kernel supports upto 4-D input tensor");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- input_info->num_dimensions() == output_info->num_dimensions(),
- "output tensor should have same number of dimensions as input tensor");
-
- if (input_info->data_type() == DataType::QASYMM8)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input_info->quantization_info() !=
- output_info->quantization_info(),
- "The input and output quantization info are different!");
- }
-
- return Status{};
-}
-
-} // namespace
-
-CLPadLayerKernel::CLPadLayerKernel() : _input(nullptr), _output(nullptr), _pad_size(nullptr) {}
-
-void CLPadLayerKernel::configure(const ICLTensor *input, ICLTensor *output, ICLTensor *pad_size)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output, pad_size);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), pad_size->info()));
-
- _input = input;
- _output = output;
- _pad_size = pad_size;
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
- build_opts.emplace("-DIB=" + support::cpp11::to_string(input->info()->dimension(3)));
- build_opts.emplace("-DIW=" + support::cpp11::to_string(input->info()->dimension(0)));
- build_opts.emplace("-DIH=" + support::cpp11::to_string(input->info()->dimension(1)));
- build_opts.emplace("-DID=" + support::cpp11::to_string(input->info()->dimension(2)));
- if (input->info()->data_type() == DataType::QASYMM8)
- {
- build_opts.emplace("-DZERO_VALUE=" +
- support::cpp11::to_string(input->info()->quantization_info().offset));
- }
- else
- {
- build_opts.emplace("-DZERO_VALUE=" + support::cpp11::to_string(0));
- }
-
- // Create kernel
- _kernel = static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("pad", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*output->info(), Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output->info()->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-void CLPadLayerKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
-
- _pad_size->map(queue);
-
- // Padding values only for up, top, left and front are required based on the rank of tensor
- int rank = _pad_size->info()->dimension(1);
-
- auto pad_batch_up =
- (rank == 4) ? *reinterpret_cast<const int32_t *>(_pad_size->ptr_to_element({0, 0})) : 0;
- auto pad_height_top =
- (rank >= 2)
- ? *reinterpret_cast<const int32_t *>(_pad_size->ptr_to_element({0, (rank == 2) ? 0 : 1}))
- : 0;
- auto pad_width_left = (rank >= 1)
- ? *reinterpret_cast<const int32_t *>(
- _pad_size->ptr_to_element({0, (rank == 4) ? 2 : rank - 1}))
- : 0;
- auto pad_depth_front =
- (rank >= 3)
- ? *reinterpret_cast<const int32_t *>(_pad_size->ptr_to_element({0, (rank == 3) ? 0 : 3}))
- : 0;
-
- _pad_size->unmap(queue);
-
- // Pad_values which needs to be passed
- const cl_int4 paddingValues = {
- {static_cast<cl_int>(pad_width_left), static_cast<cl_int>(pad_height_top),
- static_cast<cl_int>(pad_depth_front), static_cast<cl_int>(pad_batch_up)}};
-
- Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup output slice
- Window slice_in(slice_out);
- slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_in.set(3, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- _kernel.setArg<cl_int4>(idx++, paddingValues);
- enqueue(queue, *this, slice_out);
- } while (window.slide_window_slice_4D(slice_out) && window.slide_window_slice_4D(slice_in));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLPermuteExKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLPermuteExKernel.cpp
deleted file mode 100644
index aa094761c..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLPermuteExKernel.cpp
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLPermuteExKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-#include "arm_compute/core/utils/misc/ShapeCalculator.h"
-
-using namespace arm_compute;
-
-namespace
-{
-TensorShape get_output_shape(const ITensorInfo *input, const PermutationVector &perm)
-{
- TensorShape output_shape = input->tensor_shape();
- permute(output_shape, perm);
- return output_shape;
-}
-
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- const PermutationVector &perm)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(
- input, 1, DataType::U8, DataType::S8, DataType::QASYMM8, DataType::U16, DataType::S16,
- DataType::U32, DataType::S32, DataType::F16, DataType::F32);
-
- const TensorShape output_shape =
- misc::shape_calculator::compute_permutation_output_shape(*input, perm);
-
- // Validate configured output
- if (output->total_size() != 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DIMENSIONS(output->tensor_shape(), output_shape);
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
- }
- return Status{};
-}
-} // namespace
-
-CLPermuteExKernel::CLPermuteExKernel() : _input(nullptr), _output(nullptr), _perm() {}
-
-void CLPermuteExKernel::configure(const ICLTensor *input, ICLTensor *output,
- const PermutationVector &perm)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), perm));
-
- _input = input;
- _output = output;
- _perm = perm;
-
- const TensorShape output_shape = get_output_shape(input->info(), perm);
- // Output auto inizialitation if not yet initialized
- auto_init_if_empty(*output->info(), input->info()->clone()->set_tensor_shape(output_shape));
-
- // Create kernel
- std::set<std::string> build_opts;
-
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DDEPTH_IN=" + support::cpp11::to_string(input->info()->dimension(2)));
-
- // New positions of batch(D), height(H), width(w) and channel(C) based on permutation vector
- build_opts.emplace("-DP1=" + support::cpp11::to_string(perm[0]));
- build_opts.emplace("-DP2=" + support::cpp11::to_string(perm[1]));
- build_opts.emplace("-DP3=" + support::cpp11::to_string(perm[2]));
- build_opts.emplace("-DP4=" + support::cpp11::to_string(perm[3]));
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("permute_generic", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*input->info(), Steps());
-
- // The CLPermute doesn't need padding so update_window_and_padding() can be skipped
- Coordinates coord;
- coord.set_num_dimensions(output->info()->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-Status CLPermuteExKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
- const PermutationVector &perm)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, perm));
-
- return Status{};
-}
-
-void CLPermuteExKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
-
- Window slice_in = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup output slice
- Window slice_out(slice_in);
- slice_out.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_out.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_out.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_out.set(3, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- enqueue(queue, *this, slice_in);
- } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLPixelWiseDivisionKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLPixelWiseDivisionKernel.cpp
deleted file mode 100644
index b985aa737..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLPixelWiseDivisionKernel.cpp
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLPixelWiseDivisionKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_arguments(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, float scale, ConvertPolicy overflow_policy,
- RoundingPolicy rounding_policy)
-{
- ARM_COMPUTE_UNUSED(overflow_policy);
- ARM_COMPUTE_UNUSED(rounding_policy);
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::U8, DataType::S16,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::U8, DataType::S16,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(scale < 0, "Scale cannot be negative.");
-
- const TensorShape &out_shape =
- TensorShape::broadcast_shape(input1->tensor_shape(), input2->tensor_shape());
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
- "Inputs are not broadcast compatible");
-
- // Validate in case of configured output
- if (output->total_size() > 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::S16,
- DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- output->data_type() == DataType::U8 &&
- (input1->data_type() != DataType::U8 || input2->data_type() != DataType::U8),
- "Output can only be U8 if both inputs are U8");
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
- "Wrong shape for output");
- }
-
- return Status{};
-}
-
-std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input1, ITensorInfo *input2,
- ITensorInfo *output)
-{
- const std::pair<TensorShape, ValidRegion> broadcast_pair =
- ITensorInfo::broadcast_shape_and_valid_region(*input1, *input2);
- const TensorShape &out_shape = broadcast_pair.first;
- const ValidRegion &valid_region = broadcast_pair.second;
-
- // Auto initialize output if not initialized
- {
- set_shape_if_empty(*output, out_shape);
-
- if (input1->data_type() == DataType::S16 || input2->data_type() == DataType::S16)
- {
- set_format_if_unknown(*output, Format::S16);
- }
- else if (input1->data_type() == DataType::F32 || input2->data_type() == DataType::F32)
- {
- set_format_if_unknown(*output, Format::F32);
- }
- }
-
- Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
- Window win_input1 = win.broadcast_if_dimension_le_one(*input1);
- Window win_input2 = win.broadcast_if_dimension_le_one(*input2);
-
- AccessWindowHorizontal input1_access(input1, 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal input2_access(input2, 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win_input1, input1_access) ||
- update_window_and_padding(win_input2, input2_access) ||
- update_window_and_padding(win, output_access);
-
- output_access.set_valid_region(win, valid_region);
-
- Status err = (window_changed)
- ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
- : Status{};
- return std::make_pair(err, win);
-}
-} // namespace
-
-CLPixelWiseDivisionKernel::CLPixelWiseDivisionKernel()
- : _input1(nullptr), _input2(nullptr), _output(nullptr)
-{
-}
-
-void CLPixelWiseDivisionKernel::configure(const ICLTensor *input1, const ICLTensor *input2,
- ICLTensor *output, float scale,
- ConvertPolicy overflow_policy,
- RoundingPolicy rounding_policy)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input1, input2, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input1->info(), input2->info(), output->info(),
- scale, overflow_policy, rounding_policy));
-
- // Configure kernel window
- auto win_config = validate_and_configure_window(input1->info(), input2->info(), output->info());
- ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
-
- _input1 = input1;
- _input2 = input2;
- _output = output;
-
- int scale_int = -1;
- // Extract sign, exponent and mantissa
- int exponent = 0;
- float normalized_mantissa = std::frexp(scale, &exponent);
- // Use int scaling if factor is equal to 1/2^n for 0 <= n <= 15
- // frexp returns 0.5 as mantissa which means that the exponent will be in the range of -1 <= e <=
- // 14
- // Moreover, it will be negative as we deal with 1/2^n
- if ((normalized_mantissa == 0.5f) && (-14 <= exponent) && (exponent <= 1))
- {
- // Store the positive exponent. We know that we compute 1/2^n
- // Additionally we need to subtract 1 to compensate that frexp used a mantissa of 0.5
- scale_int = std::abs(exponent - 1);
- }
-
- std::string data_type;
- std::string compute_type;
- // Check if it has float inputs and output
- if (is_data_type_float(input1->info()->data_type()) ||
- is_data_type_float(input2->info()->data_type()))
- {
- scale_int = -1;
- compute_type = (input1->info()->data_type() == DataType::F32 ||
- input2->info()->data_type() == DataType::F32)
- ? "float"
- : "half";
- data_type = "DATA_TYPE_FLOAT";
- }
- else
- {
- if (input1->info()->data_type() == DataType::S16 ||
- input2->info()->data_type() == DataType::S16)
- {
- compute_type = "int";
- }
- else
- {
- compute_type = "ushort";
- }
- data_type = "DATA_TYPE_INT";
- }
-
- // Construct kernel name
- std::string kernel_name = "pixelwise_div";
- kernel_name += (scale_int >= 0) ? "_int" : "_float";
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace(
- (overflow_policy == ConvertPolicy::WRAP || is_data_type_float(output->info()->data_type()))
- ? "-DWRAP"
- : "-DSATURATE");
- build_opts.emplace((rounding_policy == RoundingPolicy::TO_ZERO) ? "-DROUND=_rtz"
- : "-DROUND=_rte");
- build_opts.emplace("-DDATA_TYPE_IN1=" + get_cl_type_from_data_type(input1->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_IN2=" + get_cl_type_from_data_type(input2->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_OUT=" + get_cl_type_from_data_type(output->info()->data_type()));
- build_opts.emplace("-DDATA_TYPE_RES=" + compute_type);
- build_opts.emplace("-D" + data_type);
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- // Set scale argument
- unsigned int idx = 3 * num_arguments_per_3D_tensor(); // Skip the inputs and output parameters
-
- if (scale_int >= 0)
- {
- _kernel.setArg(idx++, scale_int);
- }
- else
- {
- _kernel.setArg(idx++, scale);
- }
-
- ICLKernel::configure_internal(win_config.second);
-}
-
-Status CLPixelWiseDivisionKernel::validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, float scale,
- ConvertPolicy overflow_policy,
- RoundingPolicy rounding_policy)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input1, input2, output);
- ARM_COMPUTE_RETURN_ON_ERROR(
- validate_arguments(input1, input2, output, scale, overflow_policy, rounding_policy));
- ARM_COMPUTE_RETURN_ON_ERROR(validate_and_configure_window(input1->clone().get(),
- input2->clone().get(),
- output->clone().get())
- .first);
-
- return Status{};
-}
-
-void CLPixelWiseDivisionKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &in_shape1 = _input1->info()->tensor_shape();
- const TensorShape &in_shape2 = _input2->info()->tensor_shape();
- const TensorShape &out_shape = _output->info()->tensor_shape();
-
- bool can_collapse = true;
- if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
- {
- can_collapse =
- (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
- for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); ++d)
- {
- can_collapse = (in_shape1[d] == in_shape2[d]);
- }
- }
-
- bool has_collapsed = false;
- Window collapsed =
- can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
- : window;
-
- const TensorShape &in_shape1_collapsed =
- has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
- const TensorShape &in_shape2_collapsed =
- has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
-
- Window slice = collapsed.first_slice_window_3D();
- Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
- Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input1, slice_input1);
- add_3D_tensor_argument(idx, _input2, slice_input2);
- add_3D_tensor_argument(idx, _output, slice);
- enqueue(queue, *this, slice);
-
- collapsed.slide_window_slice_3D(slice_input1);
- collapsed.slide_window_slice_3D(slice_input2);
- } while (collapsed.slide_window_slice_3D(slice));
-}
-
-BorderSize CLPixelWiseDivisionKernel::border_size() const
-{
- const unsigned int replicateSize =
- _output->info()->dimension(0) -
- std::min(_input1->info()->dimension(0), _input2->info()->dimension(0));
- const unsigned int border =
- std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
- return BorderSize(0, border, 0, 0);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp
deleted file mode 100644
index f581780e1..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLReduceOperationKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-namespace
-{
-// NOTE This is necessary because it is not guaranteed that the axis positions of input and output
-// are the same.
-const TensorShape inferOutputShape(const TensorShape &input_shape, const uint32_t axis)
-{
- TensorShape out_shape{input_shape};
-
- out_shape.set(axis, 1);
-
- return out_shape;
-}
-} // namespace
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, const uint32_t axis,
- ReduceOperation op)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, output);
-
- if (output->total_size() != 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
- }
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QASYMM8, DataType::F16,
- DataType::F32, DataType::S32);
- if (op == ReduceOperation::MEAN || op == ReduceOperation::SUM)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->data_type() == DataType::QASYMM8,
- "Not support QASYMM8, yet");
- }
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->tensor_shape().total_size() == 0,
- "Inputs are not broadcast compatible");
-
- const auto num_dimensions = input->tensor_shape().num_dimensions();
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- axis >= 0 && axis < num_dimensions,
- "axis must be greater than or equal to 0 and less than (input's rank).");
-
- const TensorShape output_shape = inferOutputShape(input->tensor_shape(), axis);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(output_shape.total_size() != output->tensor_shape().total_size(),
- "output shape's size does not match axis");
-
- return Status{};
-}
-} // namespace
-
-CLReduceOperationKernel::CLReduceOperationKernel() : _input(nullptr), _output(nullptr), _axis() {}
-
-void CLReduceOperationKernel::configure(const ICLTensor *input, ICLTensor *output,
- const uint32_t axis, ReduceOperation op)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
-
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), axis, op));
-
- _input = input;
- _output = output;
- _axis = axis;
-
- std::unique_ptr<ITensorInfo> output_info = output->info()->clone();
- output_info->set_tensor_shape(inferOutputShape(input->info()->tensor_shape(), axis));
-
- // Construct kernel name
- std::string kernel_name;
- int op_code = 0;
- if (op == ReduceOperation::MAX)
- {
- kernel_name = "reduce_min_max";
- op_code = 1;
- }
- else if (op == ReduceOperation::MIN)
- {
- kernel_name = "reduce_min_max";
- op_code = 2;
- }
- else if (op == ReduceOperation::SUM)
- {
- kernel_name = "reduce_sum_mean";
- op_code = 3;
- }
- else if (op == ReduceOperation::MEAN)
- {
- kernel_name = "reduce_sum_mean";
- op_code = 4;
- }
- else
- throw std::runtime_error("Operation not supported, yet");
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(output_info->data_type()));
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output_info->dimension(2)));
- build_opts.emplace("-DOP_CODE=" + support::cpp11::to_string(op_code));
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*output_info, Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output_info->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output_info->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-Status CLReduceOperationKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
- const uint32_t axis, ReduceOperation op)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, axis, op));
-
- return Status{};
-}
-
-void CLReduceOperationKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &shape_in = _input->info()->tensor_shape();
-
- unsigned int idx = 2 * num_arguments_per_4D_tensor(); // Skip the input and output parameters
-
- _kernel.setArg<cl_int>(idx++, _axis);
- _kernel.setArg<cl_int>(idx++, shape_in[_axis]);
-
- // Support dimensions up to 4
- Window slice_out = window.collapse(ICLKernel::window(), 2, 4);
-
- // Setup input slice
- Window slice_in(slice_out);
- slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_in.set(3, Window::Dimension(0, 0, 0));
-
- // Copy output's shape in order to use for recovering at end of this method
- // TODO Remove changing and recovering output's shape if it is guaranteed that the axis positions
- // of input and output are the same
- const TensorShape shape_out = _output->info()->tensor_shape();
- _output->info()->set_tensor_shape(inferOutputShape(shape_in, _axis));
-
- idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- enqueue(queue, *this, slice_out);
-
- // Recover output's shape of output tensor
- _output->info()->set_tensor_shape(shape_out);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp
deleted file mode 100644
index 6b0697e89..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp
+++ /dev/null
@@ -1,238 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *block_size,
- const ITensorInfo *padding_size, const ITensorInfo *output)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::F16, DataType::S32,
- DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(block_size, 1, DataType::S32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(padding_size, 1, DataType::S32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::F16, DataType::S32,
- DataType::F32);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->num_dimensions() != output->num_dimensions(),
- "The number of dimensions of input should be equal to output");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->data_layout() != output->data_layout(),
- "The input and output layouts are different!");
-
- // TODO Support other cases
- if (input->num_dimensions() == 4 && input->data_layout() == DataLayout::NCHW)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(2) != output->dimension(2),
- "Input Depth should be equal to Output Depth");
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size->dimension(0) != 2 ||
- padding_size->dimension(1) != 2,
- "Only 2-dimensional spatial block's size was wrong");
- }
- else if (input->num_dimensions() == 4 && input->data_layout() == DataLayout::NHWC)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(0) != output->dimension(0),
- "Input Depth should be equal to Output Depth");
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size->dimension(0) != 2 ||
- padding_size->dimension(1) != 2,
- "Only 2-dimensional spatial block's size was wrong");
- }
- else
- {
- ARM_COMPUTE_RETURN_ERROR_MSG("CLSpaceToBatchNDKernel supports only 4-dimensional input");
- }
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->num_dimensions() < 2 && input->num_dimensions() > 4,
- "CLSpaceToBatchNDKernel supports dimensions up to 4");
-
- if (input->data_type() == DataType::QASYMM8)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->quantization_info() != output->quantization_info(),
- "The input and output quantization info are different!");
- }
-
- return Status{};
-}
-
-} // namespace
-
-CLSpaceToBatchNDKernel::CLSpaceToBatchNDKernel() : _input(nullptr), _output(nullptr) {}
-
-void CLSpaceToBatchNDKernel::configure(const ICLTensor *input, const ICLTensor *block_size,
- const ICLTensor *padding_size, ICLTensor *output)
-{
-
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(
- validate_arguments(input->info(), block_size->info(), padding_size->info(), output->info()));
-
- _input = input;
- _block_size = block_size;
- _padding_size = padding_size;
- _output = output;
-
- // Set kernel build options
- // TODO Support other cases
- std::string kernel_name = "space_to_batch_4d";
- std::set<std::string> build_opts;
- Window win;
-
- if (input->info()->data_layout() == DataLayout::NCHW)
- {
- kernel_name += "_nchw";
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
- build_opts.emplace("-DHEIGHT_IN=" + support::cpp11::to_string(input->info()->dimension(1)));
- build_opts.emplace("-DWIDTH_IN=" + support::cpp11::to_string(input->info()->dimension(0)));
-
- win = calculate_max_window(*output->info(), Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output->info()->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
- }
- else if (input->info()->data_layout() == DataLayout::NHWC)
- {
- kernel_name += "_nhwc";
- build_opts.emplace("-DHEIGHT_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
- build_opts.emplace("-DHEIGHT_IN=" + support::cpp11::to_string(input->info()->dimension(2)));
- build_opts.emplace("-DWIDTH_IN=" + support::cpp11::to_string(input->info()->dimension(1)));
- build_opts.emplace("-DVEC_SIZE=" +
- support::cpp11::to_string(num_elems_processed_per_iteration));
-
- win = calculate_max_window(*output->info(), Steps(num_elems_processed_per_iteration));
- AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win, input_access, output_access);
- input_access.set_valid_region(win, output->info()->valid_region());
-
- if (window_changed)
- {
- ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!");
- }
- }
- else
- {
- ARM_COMPUTE_ERROR("Unsupported layout");
- }
-
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DBATCH_IN=" + support::cpp11::to_string(input->info()->dimension(3)));
- if (input->info()->data_type() == DataType::QASYMM8)
- {
- build_opts.emplace("-DZERO_VALUE=" +
- support::cpp11::to_string(input->info()->quantization_info().offset));
- }
- else
- {
- build_opts.emplace("-DZERO_VALUE=" + support::cpp11::to_string(0));
- }
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
-
- // Configure kernel window
- ICLKernel::configure_internal(win);
-}
-
-void CLSpaceToBatchNDKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
-
-#if defined(DEBUG)
- const_cast<ICLTensor *>(_block_size)->map(queue);
- const_cast<ICLTensor *>(_padding_size)->map(queue);
-
- const size_t num_dimensions = _input->info()->num_dimensions();
- const size_t num_spacial_dimensions = _block_size->info()->dimension(0);
- int32_t batch_size = _input->info()->dimension(num_dimensions - 1);
- for (size_t i = 0; i < num_spacial_dimensions; ++i)
- {
- const int32_t block_size = *reinterpret_cast<int32_t *>(_block_size->ptr_to_element({i}));
- const int32_t padding_size_pre =
- *reinterpret_cast<int32_t *>(_padding_size->ptr_to_element({0, i}));
- const int32_t padding_size_post =
- *reinterpret_cast<int32_t *>(_padding_size->ptr_to_element({1, i}));
-
- ARM_COMPUTE_ERROR_ON_MSG(block_size < 1, "Block size should be greater than or equal to 1");
- ARM_COMPUTE_ERROR_ON_MSG(padding_size_pre < 0 && padding_size_post < 0,
- "Padding size should be greater than or equal to 0");
-
- if (num_dimensions == 4 && _input->info()->data_layout() == DataLayout::NCHW)
- {
- ARM_COMPUTE_ERROR_ON_MSG(
- _output->info()->dimension(i) !=
- (_input->info()->dimension(i) + padding_size_pre + padding_size_post) / block_size,
- "Dimension value of spatial block does not match output's dimension value");
- }
- else
- {
- ARM_COMPUTE_ERROR_ON_MSG(
- _output->info()->dimension(num_dimensions - num_spacial_dimensions - 1 + i) !=
- (_input->info()->dimension(num_dimensions - num_spacial_dimensions - 1 + i) +
- padding_size_pre + padding_size_post) /
- block_size,
- "Dimension value of spatial block does not match output's dimension value");
- }
-
- batch_size *= block_size;
- }
- ARM_COMPUTE_ERROR_ON_MSG(
- _output->info()->dimension(num_dimensions - 1) != batch_size,
- "Output batch size should be equal to input batch size * (multiplication of all block size)");
-
- const_cast<ICLTensor *>(_block_size)->unmap(queue);
- const_cast<ICLTensor *>(_padding_size)->unmap(queue);
-#endif // defined(DEBUG)
-
- Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup output slice
- Window slice_in(slice_out);
- slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_in.set(3, Window::Dimension(0, 0, 0));
-
- // Set block size window
- Window win_block = calculate_max_window(*_block_size->info(), Steps());
-
- // Set padding size window
- Window win_padding = calculate_max_window(*_padding_size->info(), Steps());
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- add_1D_tensor_argument(idx, _block_size, win_block);
- add_2D_tensor_argument(idx, _padding_size, win_padding);
- enqueue(queue, *this, slice_out);
- } while (window.slide_window_slice_4D(slice_out) && window.slide_window_slice_4D(slice_in));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp
deleted file mode 100644
index 5d6329edc..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
- const int32_t block_size)
-{
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
- DataType::S16, DataType::S32, DataType::F16,
- DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size >= 1,
- "Block size should be greater than or equal to 1.");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(3) == output->dimension(3),
- "Input batch should be equal to Output batch");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- input->dimension(2) * block_size * block_size == output->dimension(2),
- "Output depth should be equal to (input depth * block size *block size)");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(!(input->dimension(0) % block_size) &&
- !(input->dimension(1) % block_size),
- "Input height and width should be divisible by block size");
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG((output->dimension(0) == (input->dimension(0) / block_size)) &&
- (output->dimension(1) == (input->dimension(1) / block_size)),
- "Output height and width should be equal to "
- "input_height/blocksize and input_width/blocksize respectively");
-
- return Status{};
-}
-
-} // namespace
-
-CLSpaceToDepthKernel::CLSpaceToDepthKernel() : _input(nullptr), _output(nullptr) {}
-
-void CLSpaceToDepthKernel::configure(const ICLTensor *input, ICLTensor *output,
- const int32_t block_size)
-{
-
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), block_size));
-
- _input = input;
- _output = output;
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DBLOCK_SIZE=" + support::cpp11::to_string(block_size));
- build_opts.emplace("-DDEPTH_IN=" + support::cpp11::to_string(input->info()->dimension(2)));
-
- // Create kernel
- _kernel =
- static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("space_to_depth", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*input->info(), Steps());
-
- Coordinates coord;
- coord.set_num_dimensions(output->info()->num_dimensions());
- output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
-
- ICLKernel::configure_internal(win);
-}
-
-void CLSpaceToDepthKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
-
- Window slice_in = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup output slice
- Window slice_out(slice_in);
- slice_out.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_out.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_out.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_out.set(3, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- enqueue(queue, *this, slice_in);
- } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLSquaredDifferenceKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLSquaredDifferenceKernel.cpp
deleted file mode 100644
index 260bc39f1..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLSquaredDifferenceKernel.cpp
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLSquaredDifferenceKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-namespace
-{
-constexpr unsigned int num_elems_processed_per_iteration = 16;
-
-Status validate(const ITensorInfo *input1, const ITensorInfo *input2, const ITensorInfo *output)
-{
- const TensorShape &out_shape =
- TensorShape::broadcast_shape(input1->tensor_shape(), input2->tensor_shape());
-
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::F16, DataType::F32);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
- "Inputs are not broadcast compatible");
- // Validate in case of configured output
- if (output->total_size() > 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::F16, DataType::F32);
- ARM_COMPUTE_RETURN_ERROR_ON_MSG(
- detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
- "Wrong shape for output");
- }
- return Status{};
-}
-} // namespace
-
-CLSquaredDifferenceKernel::CLSquaredDifferenceKernel()
- : _input1(nullptr), _input2(nullptr), _output(nullptr)
-{
-}
-
-void CLSquaredDifferenceKernel::configure(const ICLTensor *input1, const ICLTensor *input2,
- ICLTensor *output)
-{
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, input2);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, output);
- ARM_COMPUTE_ERROR_THROW_ON(validate(input1->info(), input2->info(), output->info()));
-
- _input1 = input1;
- _input2 = input2;
- _output = output;
-
- // Create kernel
- std::set<std::string> build_opts;
- build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input1->info()->data_type())));
- build_opts.emplace(
- ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("squared_difference", build_opts));
-
- const std::pair<TensorShape, ValidRegion> broadcast_pair =
- ITensorInfo::broadcast_shape_and_valid_region(*input1->info(), *input2->info());
-
- const TensorShape &out_shape = broadcast_pair.first;
- const ValidRegion &valid_region = broadcast_pair.second;
-
- // Auto initialize output if not initialized
- {
- set_shape_if_empty(*output->info(), out_shape);
-
- if (input1->info()->data_type() == DataType::F16 &&
- input2->info()->data_type() == DataType::F16)
- {
- set_format_if_unknown(*output->info(), Format::F16);
- }
- else if (input1->info()->data_type() == DataType::F32 ||
- input2->info()->data_type() == DataType::F32)
- {
- set_format_if_unknown(*output->info(), Format::F32);
- }
- }
-
- Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
- Window win_input1 = win.broadcast_if_dimension_le_one(*input1->info());
- Window win_input2 = win.broadcast_if_dimension_le_one(*input2->info());
-
- AccessWindowHorizontal input1_access(input1->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal input2_access(input2->info(), 0, num_elems_processed_per_iteration);
- AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
-
- bool window_changed = update_window_and_padding(win_input1, input1_access) ||
- update_window_and_padding(win_input2, input2_access) ||
- update_window_and_padding(win, output_access);
-
- output_access.set_valid_region(win, valid_region);
-
- ICLKernel::configure_internal(win);
-}
-
-void CLSquaredDifferenceKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
-
- const TensorShape &in_shape1 = _input1->info()->tensor_shape();
- const TensorShape &in_shape2 = _input2->info()->tensor_shape();
- const TensorShape &out_shape = _output->info()->tensor_shape();
-
- bool can_collapse = true;
- if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
- {
- can_collapse =
- (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
- for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
- {
- can_collapse = (in_shape1[d] == in_shape2[d]);
- }
- }
-
- bool has_collapsed = false;
- Window collapsed =
- can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
- : window;
-
- const TensorShape &in_shape1_collapsed =
- has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
- const TensorShape &in_shape2_collapsed =
- has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
-
- Window slice = collapsed.first_slice_window_3D();
- Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
- Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
-
- do
- {
- unsigned int idx = 0;
- add_3D_tensor_argument(idx, _input1, slice_input1);
- add_3D_tensor_argument(idx, _input2, slice_input2);
- add_3D_tensor_argument(idx, _output, slice);
-
- enqueue(queue, *this, slice);
-
- collapsed.slide_window_slice_3D(slice_input1);
- collapsed.slide_window_slice_3D(slice_input2);
- } while (collapsed.slide_window_slice_3D(slice));
-}
-
-BorderSize CLSquaredDifferenceKernel::border_size() const
-{
- const unsigned int replicateSize =
- _output->info()->dimension(0) -
- std::min(_input1->info()->dimension(0), _input2->info()->dimension(0));
- const unsigned int border =
- std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
- return BorderSize(0, border, 0, 0);
-}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLStridedSliceExKernel.cpp b/libs/ARMComputeEx/src/core/CL/kernels/CLStridedSliceExKernel.cpp
deleted file mode 100644
index 48146a43a..000000000
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLStridedSliceExKernel.cpp
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/CL/kernels/CLStridedSliceExKernel.h"
-
-#include "arm_compute/core/CL/CLHelpers.h"
-#include "arm_compute/core/CL/CLKernelLibraryEx.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-#include "arm_compute/core/TensorInfo.h"
-
-using namespace arm_compute;
-
-CLStridedSliceExKernel::CLStridedSliceExKernel()
- : _input(nullptr), _output(nullptr), _beginData(nullptr), _endData(nullptr),
- _stridesData(nullptr), _beginMask(0), _endMask(0), _shrinkAxisMask(0)
-{
-}
-
-Status CLStridedSliceExKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
- const ITensorInfo *begin, const ITensorInfo *end,
- const ITensorInfo *strides, int32_t beginMask,
- int32_t endMask, int32_t shrinkAxisMask)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output, begin, end, strides);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(
- input, 1, DataType::U8, DataType::S8, DataType::QASYMM8, DataType::U16, DataType::S16,
- DataType::U32, DataType::S32, DataType::F16, DataType::F32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(begin, 1, DataType::S32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(end, 1, DataType::S32);
- ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(strides, 1, DataType::S32);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
-
- ARM_COMPUTE_ERROR_ON(begin->num_dimensions() != 1 || begin->dimension(0) > 4);
- ARM_COMPUTE_ERROR_ON_MISMATCHING_DIMENSIONS(begin->tensor_shape(), end->tensor_shape(),
- strides->tensor_shape());
-
- return Status{};
-}
-
-// Return the index for the first element along that axis. This index will be a
-// positive integer between [0, axisSize - 1] that can be used to index
-// directly into the data.
-inline int32_t StartForAxis(int32_t beginMask, int32_t begin, int32_t stride,
- const TensorShape &inputShape, int32_t axis)
-{
- // Begin with the specified index
- int32_t start = begin;
-
- // beginMask override
- if (beginMask & 1 << axis)
- {
- if (stride > 0)
- {
- // Forward iteration - use the first element. These values will get
- // clamped below (Note: We could have set them to 0 and axisSize-1, but
- // use lowest() and max() to maintain symmetry with StopForAxis())
- start = std::numeric_limits<int32_t>::lowest();
- }
- else
- {
- // Backward iteration - use the last element.
- start = std::numeric_limits<int32_t>::max();
- }
- }
-
- // Handle negative indices
- int32_t axisSize = inputShape[axis];
- if (start < 0)
- {
- start += axisSize;
- }
-
- // Clamping
- start = arm_compute::utility::clamp(start, 0, axisSize - 1);
-
- return start;
-}
-
-// Return the "real" index for the end of iteration along that axis. This is an
-// "end" in the traditional C sense, in that it points to one past the last
-// element. ie. So if you were iterating through all elements of a 1D array of
-// size 4, this function would return 4 as the stop, because it is one past the
-// "real" indices of 0, 1, 2 & 3.
-inline int32_t StopForAxis(int32_t endMask, int32_t end, int32_t stride,
- const TensorShape &inputShape, int32_t axis)
-{
- // Begin with the specified index
- int32_t stop = end;
-
- // endMask override
- if (endMask & (1 << axis))
- {
- if (stride > 0)
- {
- // Forward iteration - use the last element. These values will get
- // clamped below
- stop = std::numeric_limits<int32_t>::max();
- }
- else
- {
- // Backward iteration - use the first element.
- stop = std::numeric_limits<int32_t>::lowest();
- }
- }
-
- // Handle negative indices
- int32_t axisSize = inputShape[axis];
- if (stop < 0)
- {
- stop += axisSize;
- }
-
- // Clamping
- // Because the end index points one past the last element, we need slightly
- // different clamping ranges depending on the direction.
- if (stride > 0)
- {
- // Forward iteration
- stop = arm_compute::utility::clamp(stop, 0, axisSize);
- }
- else
- {
- // Backward iteration
- stop = arm_compute::utility::clamp(stop, -1, axisSize - 1);
- }
-
- return stop;
-}
-
-inline int32_t getOutDim(int32_t start, int32_t stop, int32_t stride)
-{
- int32_t ret = 0;
- if (stride > 0)
- {
- ret = ((stop - start - 1) / stride) + 1;
- }
- else
- {
- ret = ((stop - start + 1) / stride) + 1;
- }
- ARM_COMPUTE_ERROR_ON_MSG(ret < 0, "The dimension must be the natural number");
- return ret;
-}
-
-void CLStridedSliceExKernel::configure(const ICLTensor *input, ICLTensor *output,
- ICLTensor *beginData, ICLTensor *endData,
- ICLTensor *stridesData, int32_t beginMask, int32_t endMask,
- int32_t shrinkAxisMask)
-{
- ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), output->info(), beginData->info(),
- endData->info(), stridesData->info(), beginMask, endMask,
- shrinkAxisMask));
-
- _input = input;
- _output = output;
- _beginData = beginData;
- _endData = endData;
- _stridesData = stridesData;
- _beginMask = beginMask;
- _endMask = endMask;
- _shrinkAxisMask = shrinkAxisMask;
-
- // Set kernel build options
- std::set<std::string> build_opts;
- build_opts.emplace("-DELEMENT_DATA_TYPE=" +
- get_cl_type_from_data_type(input->info()->data_type()));
- build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
-
- // Create kernel
- _kernel = static_cast<cl::Kernel>(
- CLKernelLibraryEx::get().create_kernel("strided_slice_ex", build_opts));
-
- // Configure kernel window
- Window win = calculate_max_window(*output->info(), Steps());
- ICLKernel::configure_internal(win);
-}
-
-void CLStridedSliceExKernel::run(const Window &window, cl::CommandQueue &queue)
-{
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
-
- _beginData->map(queue);
- _endData->map(queue);
- _stridesData->map(queue);
-
- std::vector<int32_t> starts;
- std::vector<int32_t> strides;
-
- for (uint32_t n = 0; n < _beginData->info()->tensor_shape().total_size(); ++n)
- {
- const TensorShape shape = _input->info()->tensor_shape();
- starts.emplace_back(
- StartForAxis(_beginMask, reinterpret_cast<int32_t *>(_beginData->buffer())[n],
- reinterpret_cast<int32_t *>(_stridesData->buffer())[n], shape, n));
-
- strides.emplace_back(reinterpret_cast<int32_t *>(_stridesData->buffer())[n]);
- }
-
- for (uint32_t n = _beginData->info()->tensor_shape().total_size(); n < 4; n++)
- {
- starts.emplace_back(0);
- strides.emplace_back(1);
- }
- // TODO: Apply shrinkAxisMask
-
- _beginData->unmap(queue);
- _stridesData->unmap(queue);
- _endData->unmap(queue);
-
- unsigned int idx = 2 * num_arguments_per_4D_tensor(); // Skip the input and output parameters
- const cl_int4 startsArg = {{
- static_cast<cl_int>(starts[0]), static_cast<cl_int>(starts[1]),
- static_cast<cl_int>(starts[2]), static_cast<cl_int>(starts[3]),
- }};
- _kernel.setArg<cl_int4>(idx++, startsArg);
-
- const cl_int4 stridesArg = {{
- static_cast<cl_int>(strides[0]), static_cast<cl_int>(strides[1]),
- static_cast<cl_int>(strides[2]), static_cast<cl_int>(strides[3]),
- }};
- _kernel.setArg<cl_int4>(idx++, stridesArg);
-
- Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
-
- // Setup output slice
- Window slice_in(slice_out);
- slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
- slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
- slice_in.set(3, Window::Dimension(0, 0, 0));
-
- do
- {
- unsigned int idx = 0;
- add_4D_tensor_argument(idx, _input, slice_in);
- add_4D_tensor_argument(idx, _output, slice_out);
- enqueue(queue, *this, slice_out);
- } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
-}
diff --git a/libs/ARMComputeEx/src/core/NEON/kernels/NENormalizationLayerExKernel.cpp b/libs/ARMComputeEx/src/core/NEON/kernels/NENormalizationLayerExKernel.cpp
deleted file mode 100644
index 3b5782c25..000000000
--- a/libs/ARMComputeEx/src/core/NEON/kernels/NENormalizationLayerExKernel.cpp
+++ /dev/null
@@ -1,294 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/NEON/kernels/NENormalizationLayerExKernel.h"
-
-#include "arm_compute/core/Helpers.h"
-#include "arm_compute/core/NEON/NEMath.h"
-
-using namespace arm_compute;
-
-namespace
-{
-Status validate_arguments(const ITensorInfo *input, const ITensorInfo *input_squared,
- const ITensorInfo *output, const NormalizationLayerInfo &norm_info)
-{
- ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, input_squared, output);
- ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F16, DataType::F32);
-
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, input_squared);
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(input, input_squared);
-
- // Checks performed when output is configured
- if (output->total_size() != 0)
- {
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
- ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(input, output);
- }
-
- return Status{};
-}
-
-std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input,
- ITensorInfo *input_squared,
- ITensorInfo *output,
- const NormalizationLayerInfo &norm_info)
-{
- unsigned int num_elems_processed_per_iteration = 16 / input->element_size();
- const unsigned int num_elems_read_per_iteration =
- num_elems_processed_per_iteration + 2 * (norm_info.norm_size() / 2);
- const unsigned int num_rows =
- (norm_info.type() == NormType::IN_MAP_2D) ? norm_info.norm_size() : 1;
- const unsigned int border_width =
- (norm_info.is_cross_map()) ? 0 : std::min<unsigned int>(norm_info.norm_size() / 2, 3U);
- BorderSize border_size = BorderSize(0, border_width);
- bool window_changed = false;
-
- // Configure window
- Window win = calculate_max_window(*input, Steps(num_elems_processed_per_iteration));
-
- AccessWindowRectangle input_access(input, -border_size.left, 0, num_elems_read_per_iteration,
- num_rows);
- AccessWindowRectangle input_squared_access(input_squared, -border_size.left, 0,
- num_elems_read_per_iteration, num_rows);
-
- if (output->total_size() != 0)
- {
- AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
- window_changed =
- update_window_and_padding(win, input_access, input_squared_access, output_access);
- output_access.set_valid_region(win, input->valid_region());
- }
- else
- {
- window_changed = update_window_and_padding(win, input_access, input_squared_access);
- }
-
- Status err = (window_changed)
- ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
- : Status{};
- return std::make_pair(err, win);
-}
-} // namespace
-
-NENormalizationLayerExKernel::NENormalizationLayerExKernel()
- : _func(nullptr), _input(nullptr), _input_squared(nullptr), _output(nullptr),
- _norm_info(NormType::IN_MAP_1D), _border_size()
-{
-}
-
-BorderSize NENormalizationLayerExKernel::border_size() const { return _border_size; }
-
-void NENormalizationLayerExKernel::configure(const ITensor *input, const ITensor *input_squared,
- ITensor *output, NormalizationLayerInfo norm_info)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, input_squared, output);
- // Output tensor auto initialization if not yet initialized
- auto_init_if_empty(*output->info(), *input->info());
-
- // Perform validation step
- ARM_COMPUTE_ERROR_THROW_ON(
- validate_arguments(input->info(), input_squared->info(), output->info(), norm_info));
-
- const unsigned int border_width =
- (norm_info.is_cross_map()) ? 0 : std::min<unsigned int>(norm_info.norm_size() / 2, 3U);
-
- _input = input;
- _input_squared = input_squared;
- _output = output;
- _norm_info = norm_info;
- _border_size = BorderSize(0, border_width);
-
- switch (_input->info()->data_type())
- {
- case DataType::F32:
- {
- switch (norm_info.type())
- {
- case NormType::IN_MAP_1D:
- _func = &NENormalizationLayerExKernel::normalize_float<DataType::F32, 0, false>;
- break;
- case NormType::IN_MAP_2D:
- // Normalize over X and Y
- _func = &NENormalizationLayerExKernel::normalize_float<DataType::F32, 0, true>;
- break;
- case NormType::CROSS_MAP:
- _func = &NENormalizationLayerExKernel::normalize_float<DataType::F32, 2, false>;
- break;
- default:
- break;
- }
- break;
- }
- case DataType::F16:
- {
- switch (norm_info.type())
- {
- case NormType::IN_MAP_1D:
- _func = &NENormalizationLayerExKernel::normalize_float<DataType::F16, 0, false>;
- break;
- case NormType::IN_MAP_2D:
- // Normalize over X and Y
- _func = &NENormalizationLayerExKernel::normalize_float<DataType::F16, 0, true>;
- break;
- case NormType::CROSS_MAP:
- _func = &NENormalizationLayerExKernel::normalize_float<DataType::F16, 2, false>;
- break;
- default:
- break;
- }
- break;
- }
- default:
- ARM_COMPUTE_ERROR("NOT SUPPORTED!");
- }
-
- // Configure kernel window
- auto win_config = validate_and_configure_window(input->info(), input_squared->info(),
- output->info(), norm_info);
- ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
- INEKernel::configure(win_config.second);
-}
-
-template <DataType dt, unsigned int dim, bool do_2D_norm>
-void NENormalizationLayerExKernel::normalize_float(const Window &window)
-{
- Iterator input(_input, window);
- Iterator input_squared(_input_squared, window);
- Iterator output(_output, window);
-
- const int dim_y = 1;
- const int radius = _norm_info.norm_size();
- const int total_size = _input->info()->dimension(dim) - 1;
- const int input_squared_stride = _input_squared->info()->strides_in_bytes()[dim];
- // We account padding across X only and we iterate over rows
- const int min_left = (dim == 2) ? 0 : -static_cast<int>(border_size().left);
- const int max_right = (dim == 2) ? total_size : total_size + border_size().left;
- const int min_top = 0;
- const int max_bottom = _input->info()->dimension(dim_y) - 1;
-
- if (dt == DataType::F32)
- {
- const float32x4_t coeff_vec = vdupq_n_f32(_norm_info.scale_coeff());
- const float32x4_t beta_vec = vdupq_n_f32(_norm_info.beta());
- const float32x4_t kappa_vec = vdupq_n_f32(_norm_info.kappa());
-
- execute_window_loop(
- window,
- [&](const Coordinates &id) {
- // Get range to normalize
- const int current_row = do_2D_norm ? id[dim_y] : 0;
- const int current_slice = id[dim];
- const int first_row = do_2D_norm ? std::max(current_row - radius, min_top) : 0;
- const int last_row = do_2D_norm ? std::min(current_row + radius, max_bottom) : 0;
- const int first_slice = std::max(current_slice - radius, min_left);
- const int last_slice = std::min(current_slice + radius, max_right);
-
- // Accumulate 2D In-Map values
- float32x4_t accu = vdupq_n_f32(0.f);
- for (int j = first_row; j <= last_row; j++)
- {
- // Compute row displacement
- const int row = (j - current_row) * _input_squared->info()->strides_in_bytes()[dim_y];
- const uint8_t *const input_squared_ptr =
- input_squared.ptr() + row - (current_slice * input_squared_stride);
- for (int i = first_slice; i <= last_slice; ++i)
- {
- accu = vaddq_f32(accu, vld1q_f32(reinterpret_cast<const float *>(
- input_squared_ptr + i * input_squared_stride)));
- }
- }
-
- // Normalize
- const float32x4_t normalized = vpowq_f32(vmlaq_f32(kappa_vec, coeff_vec, accu), beta_vec);
- const float32x4_t normalized_pixel = vmulq_f32(
- vld1q_f32(reinterpret_cast<const float *>(input.ptr())), vinvq_f32(normalized));
- vst1q_f32(reinterpret_cast<float *>(output.ptr()), normalized_pixel);
- },
- input, input_squared, output);
- }
-#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
- else if (dt == DataType::F16)
- {
- const float16x8_t coeff_vec = vdupq_n_f16(_norm_info.scale_coeff());
- const float16x8_t beta_vec_f16 = vdupq_n_f16(_norm_info.beta());
- const float16x8_t kappa_vec = vdupq_n_f16(_norm_info.kappa());
-
- execute_window_loop(
- window,
- [&](const Coordinates &id) {
- // Get range to normalize
- const int current_row = do_2D_norm ? id[dim_y] : 0;
- const int current_slice = id[dim];
- const int first_row = do_2D_norm ? std::max(current_row - radius, min_top) : 0;
- const int last_row = do_2D_norm ? std::min(current_row + radius, max_bottom) : 0;
- const int first_slice = std::max(current_slice - radius, min_left);
- const int last_slice = std::min(current_slice + radius, max_right);
-
- // Accumulate 2D In-Map values
- float16x8_t accu = vdupq_n_f16(0.f);
- for (int j = first_row; j <= last_row; j++)
- {
- // Compute row displacement
- const int row = (j - current_row) * _input_squared->info()->strides_in_bytes()[dim_y];
- const uint8_t *const input_squared_ptr =
- input_squared.ptr() + row - (current_slice * input_squared_stride);
- for (int i = first_slice; i <= last_slice; ++i)
- {
- accu = vaddq_f16(accu, vld1q_f16(reinterpret_cast<const float16_t *>(
- input_squared_ptr + i * input_squared_stride)));
- }
- }
-
- const float16x8_t norm_f16 =
- vpowq_f16(vaddq_f16(kappa_vec, vmulq_f16(coeff_vec, accu)), beta_vec_f16);
- const float16x8_t normalized_pixel = vmulq_f16(
- vld1q_f16(reinterpret_cast<const float16_t *>(input.ptr())), vinvq_f16(norm_f16));
- vst1q_f16(reinterpret_cast<float16_t *>(output.ptr()), normalized_pixel);
- },
- input, input_squared, output);
- }
-#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
- else
- {
- ARM_COMPUTE_ERROR("Not supported");
- }
-}
-
-Status NENormalizationLayerExKernel::validate(const ITensorInfo *input,
- const ITensorInfo *input_squared,
- const ITensorInfo *output,
- const NormalizationLayerInfo norm_info)
-{
- ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, input_squared, output, norm_info));
- ARM_COMPUTE_RETURN_ON_ERROR(validate_and_configure_window(input->clone().get(),
- input_squared->clone().get(),
- output->clone().get(), norm_info)
- .first);
-
- return Status{};
-}
-
-void NENormalizationLayerExKernel::run(const Window &window, const ThreadInfo &info)
-{
- ARM_COMPUTE_UNUSED(info);
- ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
- ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(INEKernel::window(), window);
- ARM_COMPUTE_ERROR_ON(_func == nullptr);
-
- // Run function
- (this->*_func)(window);
-}
diff --git a/libs/ARMComputeEx/src/core/UtilsEx.cpp b/libs/ARMComputeEx/src/core/UtilsEx.cpp
deleted file mode 100644
index b63093bbb..000000000
--- a/libs/ARMComputeEx/src/core/UtilsEx.cpp
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/core/UtilsEx.h"
-
-#include <cstdint>
-#include <fstream>
-#include <map>
-#include <string>
-
-using namespace arm_compute;
-
-const std::string &
-arm_compute::string_from_activation_func_ex(ActivationLayerInfoEx::ActivationFunction act)
-{
- static std::map<ActivationLayerInfoEx::ActivationFunction, const std::string> act_map = {
- {ActivationLayerInfoEx::ActivationFunction::RSQRT, "RSQRT"},
- };
-
- return act_map[act];
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLActivationLayerEx.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLActivationLayerEx.cpp
deleted file mode 100644
index 1e52fc429..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLActivationLayerEx.cpp
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLActivationLayerEx.h"
-
-#include "arm_compute/core/CL/kernels/CLActivationLayerExKernel.h"
-
-using namespace arm_compute;
-
-void CLActivationLayerEx::configure(ICLTensor *input, ICLTensor *output,
- ActivationLayerInfoEx act_info)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLActivationLayerExKernel>();
- k->configure(input, output, act_info);
- _kernel = std::move(k);
-}
-
-Status CLActivationLayerEx::validate(const ITensorInfo *input, const ITensorInfo *output,
- const ActivationLayerInfoEx &act_info)
-{
- return CLActivationLayerExKernel::validate(input, output, act_info);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLArgMinMax.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLArgMinMax.cpp
deleted file mode 100644
index dff743e89..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLArgMinMax.cpp
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLArgMinMax.h"
-
-#include "arm_compute/core/CL/kernels/CLArgMinMaxKernel.h"
-#include "arm_compute/runtime/CL/CLScheduler.h"
-
-namespace arm_compute
-{
-
-CLArgMinMax::CLArgMinMax()
- : _input(nullptr), _output(nullptr), _argminmax_axis(), _interm_tensors(), _argminmax_kernels(),
- _num_of_kernels()
-{
-}
-
-void CLArgMinMax::configure(ICLTensor *input, ICLTensor *output, std::vector<uint32_t> axis,
- ArgOperation op)
-{
- ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), output->info(), axis, op));
- _input = input;
- _output = output;
- _argminmax_axis = axis;
- _arg_op = op;
- // NOTE The argminmax_axis must have no duplication.
- _num_of_kernels = axis.size();
- const size_t num_of_interm_tensors = _num_of_kernels - 1;
-
- _interm_tensors = arm_compute::support::cpp14::make_unique<CLTensor[]>(num_of_interm_tensors);
- _argminmax_kernels =
- arm_compute::support::cpp14::make_unique<CLArgMinMaxKernel[]>(_num_of_kernels);
-
- TensorShape shape{input->info()->tensor_shape()};
- for (size_t i = 0; i < num_of_interm_tensors; i++)
- {
- shape.set(_argminmax_axis[i], 1);
- _interm_tensors[i].allocator()->init(
- TensorInfo(shape, input->info()->num_channels(), input->info()->data_type()));
- _interm_tensors[i].allocator()->allocate();
- }
-
- // Set a vector that is ordered ICLTensors sequentially.
- std::vector<ICLTensor *> tensors;
- tensors.emplace_back(input);
- for (size_t i = 0; i < num_of_interm_tensors; i++)
- {
- tensors.emplace_back(_interm_tensors.get() + i);
- }
- tensors.emplace_back(output);
-
- // Apply ArgMinMax on all kernels
- for (size_t i = 0; i < _num_of_kernels; i++)
- {
- _argminmax_kernels[i].configure(tensors[i], tensors[i + 1], _argminmax_axis[i], op);
- }
-}
-
-Status CLArgMinMax::validate(const ITensorInfo *input, const std::vector<uint32_t> &argminmax_axis,
- const ITensorInfo *output, ArgOperation op)
-{
- const size_t num_of_kernels = argminmax_axis.size();
- const size_t num_of_interm_tensors = num_of_kernels - 1;
-
- // Create temporary tensor infos
- auto interm_tensors =
- arm_compute::support::cpp14::make_unique<TensorInfo[]>(num_of_interm_tensors);
-
- // Create intermediate tensor info
- TensorShape shape{input->tensor_shape()};
-
- for (size_t i = 0; i < num_of_interm_tensors; i++)
- {
- shape.set(argminmax_axis[i], 1);
- interm_tensors[i].set_data_type(input->data_type());
- interm_tensors[i].set_tensor_shape(shape);
- interm_tensors[i].set_num_channels(input->num_channels());
- }
-
- // Set a vector that is ordered ITensorInfo sequentially.
- std::vector<const ITensorInfo *> tensors;
- tensors.emplace_back(input);
- for (size_t i = 0; i < num_of_interm_tensors; i++)
- {
- tensors.emplace_back(interm_tensors.get() + i);
- }
- tensors.emplace_back(output);
-
- // Validate argminmax only on all kernels
- for (size_t i = 0; i < num_of_kernels; i++)
- {
- ARM_COMPUTE_RETURN_ON_ERROR(
- CLArgMinMaxKernel::validate(tensors[i], tensors[i + 1], argminmax_axis[i], op));
- }
-
- return Status{};
-}
-
-void CLArgMinMax::run()
-{
- for (size_t i = 0; i < _num_of_kernels; ++i)
- {
- CLScheduler::get().enqueue(_argminmax_kernels[i]);
- }
-}
-
-} // namespace arm_compute
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLArithmeticSubtractionEx.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLArithmeticSubtractionEx.cpp
deleted file mode 100644
index 3f403c80a..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLArithmeticSubtractionEx.cpp
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLArithmeticSubtractionEx.h"
-
-#include "arm_compute/core/CL/ICLTensor.h"
-#include "arm_compute/core/CL/kernels/CLArithmeticSubtractionExKernel.h"
-
-using namespace arm_compute;
-
-void CLArithmeticSubtractionEx::configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output,
- ConvertPolicy policy)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLArithmeticSubtractionExKernel>();
- k->configure(input1, input2, output, policy);
- _kernel = std::move(k);
-
- if (output->info()->dimension(0) > 1)
- {
- ICLTensor *broadcasted_info = (input1->info()->dimension(0) == 1) ? input1 : input2;
-
- if (broadcasted_info->info()->dimension(0) == 1)
- {
- _border_handler.configure(broadcasted_info, _kernel->border_size(), BorderMode::REPLICATE);
- }
- }
-}
-
-Status CLArithmeticSubtractionEx::validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, ConvertPolicy policy)
-{
- return CLArithmeticSubtractionExKernel::validate(input1, input2, output, policy);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLBatchToSpaceND.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLBatchToSpaceND.cpp
deleted file mode 100644
index 26e3798cc..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLBatchToSpaceND.cpp
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLBatchToSpaceND.h"
-
-#include "arm_compute/core/CL/kernels/CLBatchToSpaceNDKernel.h"
-
-using namespace arm_compute;
-
-void CLBatchToSpaceND::configure(ICLTensor *input, ICLTensor *output, const int32_t *block_size)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLBatchToSpaceNDKernel>();
- k->configure(input, output, block_size);
- _kernel = std::move(k);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLComparisonOp.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLComparisonOp.cpp
deleted file mode 100644
index f6a745a25..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLComparisonOp.cpp
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLComparisonOp.h"
-
-#include "arm_compute/core/CL/kernels/CLComparisonOpKernel.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-void CLComparisonOp::configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output,
- const ComparisonOperation &op)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLComparisonOpKernel>();
- k->configure(input1, input2, output, op);
- _kernel = std::move(k);
-
- if (output->info()->dimension(0) > 1)
- {
- ICLTensor *broadcasted_info = (input1->info()->dimension(0) == 1) ? input1 : input2;
-
- if (broadcasted_info->info()->dimension(0) == 1)
- {
- _border_handler.configure(broadcasted_info, _kernel->border_size(), BorderMode::REPLICATE);
- }
- }
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLExp.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLExp.cpp
deleted file mode 100644
index 411fa8700..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLExp.cpp
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLExp.h"
-
-#include "arm_compute/core/CL/kernels/CLExpKernel.h"
-
-using namespace arm_compute;
-
-void CLExp::configure(const ICLTensor *input, ICLTensor *output)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLExpKernel>();
- k->configure(input, output);
- _kernel = std::move(k);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLGather.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLGather.cpp
deleted file mode 100644
index fb056fe45..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLGather.cpp
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLGather.h"
-
-#include "arm_compute/core/CL/kernels/CLGatherKernel.h"
-
-using namespace arm_compute;
-
-void CLGather::configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLGatherKernel>();
- k->configure(input1, input2, output);
- _kernel = std::move(k);
-}
-
-Status CLGather::validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output)
-{
- return CLGatherKernel::validate(input1, input2, output);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLNormalizationLayerEx.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLNormalizationLayerEx.cpp
deleted file mode 100644
index 276c4557a..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLNormalizationLayerEx.cpp
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLNormalizationLayerEx.h"
-
-#include "arm_compute/runtime/CL/CLScheduler.h"
-
-using namespace arm_compute;
-
-CLNormalizationLayerEx::CLNormalizationLayerEx() : _norm_kernel(), _border_handler() {}
-
-void CLNormalizationLayerEx::configure(ICLTensor *input, ICLTensor *output,
- const NormalizationLayerInfo &norm_info)
-{
- ARM_COMPUTE_ERROR_ON(input == nullptr);
-
- // Configure normalization kernel
- _norm_kernel.configure(input, output, norm_info);
-
- // Fill the border by 3 elements since we need vload4 in the IN_MAP normalization kernel
- _border_handler.configure(input, _norm_kernel.border_size(), BorderMode::CONSTANT, PixelValue(0));
-}
-
-Status CLNormalizationLayerEx::validate(const ITensorInfo *input, const ITensorInfo *output,
- const NormalizationLayerInfo &norm_info)
-{
- return CLNormalizationLayerExKernel::validate(input, output, norm_info);
-}
-
-void CLNormalizationLayerEx::run()
-{
- // Run border handler
- CLScheduler::get().enqueue(_border_handler, false);
-
- // Run normalization kernel
- CLScheduler::get().enqueue(_norm_kernel);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLPadLayerEx.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLPadLayerEx.cpp
deleted file mode 100644
index 5265b6c34..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLPadLayerEx.cpp
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
-* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
-* Copyright (c) 2016-2018 ARM Limited.
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-#include "arm_compute/runtime/CL/functions/CLPadLayerEx.h"
-
-#include "arm_compute/core/CL/kernels/CLPadLayerKernel.h"
-
-using namespace arm_compute;
-
-void CLPadLayerEx::configure(ICLTensor *input, ICLTensor *output, ICLTensor *pad_size)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLPadLayerKernel>();
- k->configure(input, output, pad_size);
- _kernel = std::move(k);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLPermuteEx.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLPermuteEx.cpp
deleted file mode 100644
index fb363270d..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLPermuteEx.cpp
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLPermuteEx.h"
-
-#include "arm_compute/core/CL/kernels/CLPermuteExKernel.h"
-
-using namespace arm_compute;
-
-void CLPermuteEx::configure(const ICLTensor *input, ICLTensor *output,
- const PermutationVector &perm)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLPermuteExKernel>();
- k->configure(input, output, perm);
- _kernel = std::move(k);
-}
-
-Status CLPermuteEx::validate(const ITensorInfo *input, const ITensorInfo *output,
- const PermutationVector &perm)
-{
- ARM_COMPUTE_RETURN_ON_ERROR(CLPermuteExKernel::validate(input, output, perm));
- return Status{};
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLPixelWiseDivision.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLPixelWiseDivision.cpp
deleted file mode 100644
index dc0baa8dd..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLPixelWiseDivision.cpp
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLPixelWiseDivision.h"
-
-#include "arm_compute/core/CL/ICLTensor.h"
-#include "arm_compute/core/CL/kernels/CLPixelWiseDivisionKernel.h"
-
-using namespace arm_compute;
-
-void CLPixelWiseDivision::configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output,
- float scale, ConvertPolicy overflow_policy,
- RoundingPolicy rounding_policy)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLPixelWiseDivisionKernel>();
- k->configure(input1, input2, output, scale, overflow_policy, rounding_policy);
- _kernel = std::move(k);
-
- if (output->info()->dimension(0) > 1)
- {
- ICLTensor *broadcasted_info = (input1->info()->dimension(0) == 1) ? input1 : input2;
-
- if (broadcasted_info->info()->dimension(0) == 1)
- {
- _border_handler.configure(broadcasted_info, _kernel->border_size(), BorderMode::REPLICATE);
- }
- }
-}
-
-Status CLPixelWiseDivision::validate(const ITensorInfo *input1, const ITensorInfo *input2,
- const ITensorInfo *output, float scale,
- ConvertPolicy overflow_policy, RoundingPolicy rounding_policy)
-{
- return CLPixelWiseDivisionKernel::validate(input1, input2, output, scale, overflow_policy,
- rounding_policy);
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp
deleted file mode 100644
index 2b8d82706..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLReduceOperation.h"
-
-#include "arm_compute/core/CL/kernels/CLReduceOperationKernel.h"
-#include "arm_compute/runtime/CL/CLScheduler.h"
-
-using namespace arm_compute;
-
-CLReduceOperation::CLReduceOperation()
- : _input(nullptr), _output(nullptr), _axis(), _interm_tensors(), _reduce_kernels()
-{
-}
-
-Status CLReduceOperation::validate(const ITensorInfo *input, const ITensorInfo *output,
- const std::set<uint32_t> &axis, const ReduceOperation &op)
-{
- const size_t num_of_kernels = axis.size();
- const size_t num_of_interm_tensors = num_of_kernels - 1;
-
- // Create temporary tensor infos
- auto interm_tensors =
- arm_compute::support::cpp14::make_unique<TensorInfo[]>(num_of_interm_tensors);
-
- // Create intermediate tensor info
- TensorShape shape{input->tensor_shape()};
-
- auto it = axis.begin();
- for (size_t i = 0; i < num_of_interm_tensors; ++i, ++it)
- {
- shape.set(*it, 1);
- interm_tensors[i].set_data_type(input->data_type());
- interm_tensors[i].set_tensor_shape(shape);
- interm_tensors[i].set_num_channels(input->num_channels());
- }
-
- // Set a vector that is ordered ITensorInfo sequentially.
- std::vector<const ITensorInfo *> tensors;
- tensors.emplace_back(input);
- for (size_t i = 0; i < num_of_interm_tensors; ++i)
- {
- tensors.emplace_back(interm_tensors.get() + i);
- }
- tensors.emplace_back(output);
-
- // Validate ReduceOperation only on all kernels
- it = axis.begin();
- for (size_t i = 0; i < num_of_kernels; ++i, ++it)
- {
- ARM_COMPUTE_RETURN_ON_ERROR(
- CLReduceOperationKernel::validate(tensors[i], tensors[i + 1], *it, op));
- }
-
- return Status{};
-}
-
-void CLReduceOperation::configure(ICLTensor *input, ICLTensor *output,
- const std::set<uint32_t> &axis, ReduceOperation op)
-{
- ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), output->info(), axis, op));
-
- _axis = axis;
-
- _input = input;
- _output = output;
-
- // NOTE The axis must have no duplication.
- const size_t num_of_kernels = axis.size();
- const size_t num_of_interm_tensors = num_of_kernels - 1;
-
- _interm_tensors = arm_compute::support::cpp14::make_unique<CLTensor[]>(num_of_interm_tensors);
- _reduce_kernels =
- arm_compute::support::cpp14::make_unique<CLReduceOperationKernel[]>(num_of_kernels);
-
- TensorShape shape{input->info()->tensor_shape()};
- auto it = axis.begin();
- for (size_t i = 0; i < num_of_interm_tensors; ++i, ++it)
- {
- shape.set(*it, 1);
- _interm_tensors[i].allocator()->init(
- TensorInfo(shape, input->info()->num_channels(), input->info()->data_type()));
- _interm_tensors[i].allocator()->allocate();
- }
-
- // Set a vector that is ordered ICLTensors sequentially.
- std::vector<ICLTensor *> tensors;
- tensors.emplace_back(input);
- for (size_t i = 0; i < num_of_interm_tensors; ++i)
- {
- tensors.emplace_back(_interm_tensors.get() + i);
- }
- tensors.emplace_back(output);
-
- // Apply ReduceOperation on all kernels
- it = axis.begin();
- for (size_t i = 0; i < num_of_kernels; ++i, ++it)
- {
- _reduce_kernels[i].configure(tensors[i], tensors[i + 1], *it, op);
- }
-}
-
-void CLReduceOperation::run()
-{
- const size_t num_of_kernels = _axis.size();
- for (size_t i = 0; i < num_of_kernels; ++i)
- {
- CLScheduler::get().enqueue(_reduce_kernels[i]);
- }
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLSquaredDifference.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLSquaredDifference.cpp
deleted file mode 100644
index dc6e4af44..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLSquaredDifference.cpp
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLSquaredDifference.h"
-
-#include "arm_compute/core/CL/kernels/CLSquaredDifferenceKernel.h"
-#include "arm_compute/core/CL/ICLTensor.h"
-
-using namespace arm_compute;
-
-void CLSquaredDifference::configure(ICLTensor *input1, ICLTensor *input2, ICLTensor *output)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLSquaredDifferenceKernel>();
- k->configure(input1, input2, output);
- _kernel = std::move(k);
-
- if (output->info()->dimension(0) > 1)
- {
- ICLTensor *broadcasted_info = (input1->info()->dimension(0) == 1) ? input1 : input2;
-
- if (broadcasted_info->info()->dimension(0) == 1)
- {
- _border_handler.configure(broadcasted_info, _kernel->border_size(), BorderMode::REPLICATE);
- }
- }
-}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLStridedSliceEx.cpp b/libs/ARMComputeEx/src/runtime/CL/functions/CLStridedSliceEx.cpp
deleted file mode 100644
index be7353493..000000000
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLStridedSliceEx.cpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2017 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/CL/functions/CLStridedSliceEx.h"
-
-#include "arm_compute/core/CL/kernels/CLStridedSliceExKernel.h"
-
-using namespace arm_compute;
-
-void CLStridedSliceEx::configure(const ICLTensor *input, ICLTensor *output, ICLTensor *beginData,
- ICLTensor *endData, ICLTensor *stridesData, int32_t beginMask,
- int32_t endMask, int32_t shrinkAxisMask)
-{
- auto k = arm_compute::support::cpp14::make_unique<CLStridedSliceExKernel>();
- k->configure(input, output, beginData, endData, stridesData, beginMask, endMask, shrinkAxisMask);
- _kernel = std::move(k);
-}
diff --git a/libs/ARMComputeEx/src/runtime/NEON/functions/NENormalizationLayerEx.cpp b/libs/ARMComputeEx/src/runtime/NEON/functions/NENormalizationLayerEx.cpp
deleted file mode 100644
index 988e92715..000000000
--- a/libs/ARMComputeEx/src/runtime/NEON/functions/NENormalizationLayerEx.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (c) 2016-2018 ARM Limited.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "arm_compute/runtime/NEON/functions/NENormalizationLayerEx.h"
-#include "arm_compute/runtime/NEON/NEScheduler.h"
-
-using namespace arm_compute;
-
-NENormalizationLayerEx::NENormalizationLayerEx(std::shared_ptr<IMemoryManager> memory_manager)
- : _memory_group(std::move(memory_manager)), _norm_kernel(), _multiply_kernel(),
- _border_handler(), _input_squared()
-{
-}
-
-void NENormalizationLayerEx::configure(const ITensor *input, ITensor *output,
- const NormalizationLayerInfo &norm_info)
-{
- ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
-
- TensorInfo tensor_info(input->info()->tensor_shape(), 1, input->info()->data_type(),
- input->info()->quantization_info());
- _input_squared.allocator()->init(tensor_info);
-
- // Manage intermediate buffers
- _memory_group.manage(&_input_squared);
-
- // Configure kernels
- _norm_kernel.configure(input, &_input_squared, output, norm_info);
- _multiply_kernel.configure(input, input, &_input_squared, 1.0f, ConvertPolicy::SATURATE,
- RoundingPolicy::TO_ZERO);
- _border_handler.configure(&_input_squared, _norm_kernel.border_size(), BorderMode::CONSTANT,
- PixelValue(0.0f));
-
- // Allocate the tensor once the configure methods have been called
- _input_squared.allocator()->allocate();
-}
-
-Status NENormalizationLayerEx::validate(const ITensorInfo *input, const ITensorInfo *output,
- const NormalizationLayerInfo &norm_info)
-{
- // Perform validation step
- ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, output);
-
- ARM_COMPUTE_RETURN_ON_ERROR(
- NENormalizationLayerExKernel::validate(input, input, output, norm_info));
- ARM_COMPUTE_RETURN_ON_ERROR(NEPixelWiseMultiplicationKernel::validate(
- input, input, output, 1.0f, ConvertPolicy::SATURATE, RoundingPolicy::TO_ZERO));
-
- return Status{};
-}
-
-void NENormalizationLayerEx::run()
-{
- _memory_group.acquire();
-
- NEScheduler::get().schedule(&_multiply_kernel, Window::DimY);
- NEScheduler::get().schedule(&_border_handler, Window::DimY);
- NEScheduler::get().schedule(&_norm_kernel, Window::DimY);
-
- _memory_group.release();
-}
diff --git a/libs/cpp14/include/cpp14/memory.h b/libs/cpp14/include/cpp14/memory.h
deleted file mode 100644
index b3e678baa..000000000
--- a/libs/cpp14/include/cpp14/memory.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * @file memory.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains @c make_unique which is not supported by C++11
- */
-#ifndef __NNFW_CPP14_MEMORY_H__
-#define __NNFW_CPP14_MEMORY_H__
-
-#include <memory>
-
-namespace nnfw
-{
-namespace cpp14
-{
-/**
- * @brief Provide @c make_unique function supported from C++14
- * @param[in] args List of arguments with which an instance of T will be constructed.
- * @return @c std::unique_ptr of an instance of type T
- */
-template <typename T, typename... Args> std::unique_ptr<T> make_unique(Args &&... args)
-{
- // NOTE std::make_unique is missing in C++11 standard
- return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
-}
-
-} // napesapce cpp14
-} // namespace nnfw
-
-#endif // __NNFW_CPP14_MEMORY_H__
diff --git a/libs/misc/CMakeLists.txt b/libs/misc/CMakeLists.txt
deleted file mode 100644
index cd01695fb..000000000
--- a/libs/misc/CMakeLists.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-# Library `nnfw_lib_misc`
-set(NNFW_UTILITY_SRCS src/environment.cpp)
-list(APPEND NNFW_UTILITY_SRCS src/tensor/Shape.cpp)
-list(APPEND NNFW_UTILITY_SRCS src/tensor/NonIncreasingStride.cpp)
-list(APPEND NNFW_UTILITY_SRCS src/tensor/IndexFormatter.cpp)
-list(APPEND NNFW_UTILITY_SRCS src/tensor/Comparator.cpp)
-
-add_library(nnfw_lib_misc STATIC ${NNFW_UTILITY_SRCS})
-target_include_directories(nnfw_lib_misc PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
-set_target_properties(nnfw_lib_misc PROPERTIES POSITION_INDEPENDENT_CODE ON)
-
-add_executable(nnfw_tensor_index_iterator "examples/tensor_index_iterator.cpp")
-target_link_libraries(nnfw_tensor_index_iterator nnfw_lib_misc)
diff --git a/libs/misc/examples/tensor_index_iterator.cpp b/libs/misc/examples/tensor_index_iterator.cpp
deleted file mode 100644
index 8a19dac87..000000000
--- a/libs/misc/examples/tensor_index_iterator.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "misc/tensor/IndexIterator.h"
-
-#include <array>
-
-#include <iostream>
-#include <algorithm>
-
-#include <cassert>
-
-void test_iterate(void)
-{
- const nnfw::misc::tensor::Shape shape{3, 4, 7};
-
- std::array<int, 3 * 4 * 7> array;
-
- array.fill(0);
-
- using nnfw::misc::tensor::iterate;
- using nnfw::misc::tensor::Index;
-
- iterate(shape) << [&](const Index &index) {
- assert(index.rank() == shape.rank());
-
- const size_t rank = index.rank();
-
- uint32_t offset = index.at(0);
-
- for (size_t axis = 1; axis < rank; ++axis)
- {
- offset *= shape.dim(axis);
- offset += index.at(axis);
- }
-
- array[offset] += 1;
- };
-
- assert(std::all_of(array.begin(), array.end(), [](int num) { return num == 1; }));
-}
-
-int main(int argc, char **argv)
-{
- test_iterate();
-
- nnfw::misc::tensor::Shape shape{3, 4, 3, 4};
-
- std::cout << "Iterate over tensor{3, 4, 3, 4}" << std::endl;
-
- nnfw::misc::tensor::iterate(shape) << [](const nnfw::misc::tensor::Index &index) {
- std::cout << "rank: " << index.rank() << std::endl;
-
- for (size_t d = 0; d < index.rank(); ++d)
- {
- std::cout << " offset(" << d << ") = " << index.at(d) << std::endl;
- }
- };
-
- return 0;
-}
diff --git a/libs/misc/include/misc/EnvVar.h b/libs/misc/include/misc/EnvVar.h
deleted file mode 100644
index 47206d4c0..000000000
--- a/libs/misc/include/misc/EnvVar.h
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file EnvVar.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains nnfw::misc::EnvVar class
- */
-
-#ifndef __NNFW_MISC_ENV_VAR__
-#define __NNFW_MISC_ENV_VAR__
-
-#include <algorithm>
-#include <array>
-#include <cstdlib>
-#include <string>
-
-namespace nnfw
-{
-namespace misc
-{
-/**
- * @brief Class to access environment variable
- */
-class EnvVar
-{
-public:
- /**
- * @brief Construct a new EnvVar object
- * @param[in] key environment variable
- */
- EnvVar(const std::string &key)
- {
- const char *value = std::getenv(key.c_str());
- if (value == nullptr)
- {
- // An empty string is considered as an empty value
- _value = "";
- }
- else
- {
- _value = value;
- }
- }
-
- /**
- * @brief Get environment variable of string type
- * @param[in] def Default value of environment variable
- * @return Defaut value passed as a parameter when there is no environment variable,
- * otherwise the value of environment variable passed into constructor
- */
- std::string asString(const std::string &def) const
- {
- if (_value.empty())
- return def;
- return _value;
- }
-
- /**
- * @brief Get environment variable of boolean type
- * @param[in] def Default value of environment variable
- * @return Defaut value passed as a parameter when there is no environment variable,
- * otherwise the value of environment variable passed into constructor
- */
- bool asBool(bool def) const
- {
- if (_value.empty())
- return def;
- static const std::array<std::string, 5> false_list{"0", "OFF", "FALSE", "N", "NO"};
- auto false_found = std::find(false_list.begin(), false_list.end(), _value);
- return (false_found == false_list.end());
- }
-
- /**
- * @brief Get environment variable of int type
- * @param[in] def Default value of environment variable
- * @return Defaut value passed as a parameter when there is no environment variable,
- * otherwise the value of environment variable passed into constructor
- */
- int asInt(int def) const
- {
- if (_value.empty())
- return def;
- return std::stoi(_value);
- }
-
-private:
- std::string _value;
-};
-
-} // namespace misc
-} // namespace nnfw
-
-#endif // __NNFW_MISC_ENV_VAR__
diff --git a/libs/misc/include/misc/environment.h b/libs/misc/include/misc/environment.h
deleted file mode 100644
index 8e6bd00d5..000000000
--- a/libs/misc/include/misc/environment.h
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file environment.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains utility functions and classes to access environment variables
- */
-
-#ifndef __UTIL_ENVIRONMENT_H__
-#define __UTIL_ENVIRONMENT_H__
-
-namespace nnfw
-{
-namespace misc
-{
-
-/**
- * @brief Get the environment variable of int type
- * @param[in] name Name of the environment variable
- * @param[in] defaultValue Default value when the value of environment variable does not exist
- * @return The int value of the environment variable
- */
-int get_env_int(const char *name, int defaultValue = 0);
-
-/**
- * @brief Get the environment variable of bool type
- * @param[in] name Name of the environment variable
- * @param[in] defaultValue Default value when the value of environment variable does not exist
- * @return @c 0 if the value of the environment variable is @c "0", @c 1 in case of other number
- */
-bool get_env_bool(const char *name, bool defaultValue = false);
-}
-}
-
-#include <string>
-
-namespace nnfw
-{
-namespace misc
-{
-namespace env
-{
-/**
- * @brief Parent struct of @ref IntAccessor and @ref FloatAccessor
- * @tparam T Type of the value of environment variable
- */
-template <typename T> struct Accessor
-{
- /**
- * @brief Destroy the Accessor object
- */
- virtual ~Accessor() = default;
- /**
- * @brief Read the value of environment variable
- * @param[out] out The value of environment variable
- * @return @c true if accessing environment variable is successful,
- * @c false if there is exist no such environment variable
- */
- virtual bool access(T &out) const = 0;
-};
-
-/**
- * @brief Class to read int environment variable
- */
-class IntAccessor : public Accessor<int>
-{
-public:
- /**
- * @brief Construct a new IntAccessor object
- * @param[in] tag Name of environment variable
- */
- IntAccessor(const std::string &tag);
-
-public:
- /**
- * @brief Read the value of environment variable
- * @param[out] out The value of environment variable
- * @return @c true if accessing environment variable is successful,
- * @c false if there is exist no such environment variable
- */
- bool access(int &out) const override;
-
-private:
- std::string _tag;
-};
-
-/**
- * @brief Class to read float environment variable
- */
-class FloatAccessor : public Accessor<float>
-{
-public:
- /**
- * @brief Construct a new FloatAccessor object
- * @param[in] tag Name of environment variable
- */
- FloatAccessor(const std::string &tag);
-
-public:
- /**
- * @brief Read the value of environment variable
- * @param[out] out The value of environment variable
- * @return @c true if accessing environment variable is successful,
- * @c false if there is exist no such environment variable
- */
- bool access(float &out) const override;
-
-private:
- std::string _tag;
-};
-
-} // namespace env
-} // namespace misc
-} // namespace nnfw
-
-#endif // __UTIL_ENVIRONMENT_H__
diff --git a/libs/misc/include/misc/kernel/RandomObject.h b/libs/misc/include/misc/kernel/RandomObject.h
deleted file mode 100644
index 4b58b0c7f..000000000
--- a/libs/misc/include/misc/kernel/RandomObject.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file RandomObject.h
- * @brief This file contains RandomObject class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_MISC_KERNEL_RANDOM_OBJECT_H__
-#define __NNFW_MISC_KERNEL_RANDOM_OBJECT_H__
-
-#include "misc/kernel/Shape.h"
-#include "misc/kernel/Reader.h"
-
-#include <vector>
-
-namespace nnfw
-{
-namespace misc
-{
-namespace kernel
-{
-
-template <typename T> class RandomObject final : public Reader<T>
-{
-public:
- RandomObject(const Shape &shape) : _shape{shape}
- {
- const uint32_t size = _shape.N * _shape.C * _shape.H * _shape.W;
-
- // TODO Use random number
- for (uint32_t off = 0; off < size; ++off)
- {
- _value.emplace_back(static_cast<float>(off));
- }
- }
-
-public:
- const Shape &shape(void) const { return _shape; }
-
-public:
- T at(uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) const override
- {
- uint32_t index = 0;
-
- index += nth * _shape.C * _shape.H * _shape.W;
- index += ch * _shape.H * _shape.W;
- index += row * _shape.W;
- index += col;
-
- return _value.at(index);
- }
-
-private:
- const Shape _shape;
- std::vector<T> _value;
-};
-
-} // namespace kernel
-} // namespace misc
-} // namespace nnfw
-
-#endif // __NNFW_MISC_KERNEL_RANDOM_OBJECT_H__
diff --git a/libs/misc/include/misc/tensor/Index.h b/libs/misc/include/misc/tensor/Index.h
deleted file mode 100644
index a08d7099e..000000000
--- a/libs/misc/include/misc/tensor/Index.h
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Index.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains nnfw::misc::tensor::Index struct
- */
-#ifndef __NNFW_MISC_TENSOR_INDEX_H__
-#define __NNFW_MISC_TENSOR_INDEX_H__
-
-#include <cstdint>
-#include <cstddef>
-
-#include <vector>
-#include <initializer_list>
-
-namespace nnfw
-{
-namespace misc
-{
-namespace tensor
-{
-
-/**
- * @brief Struct to represent index of each dimension of a tensor
- */
-struct Index
-{
-public:
- /**
- * @brief Construct a new @c Index object
- * @param[in] rank Rank of a tensor
- */
- Index(size_t rank) { _offsets.resize(rank); }
-
-public:
- /**
- * @brief Construct a new @c Index object
- * @param[in] offsets Rank of a tensor of @c std::initializer_list<int32_t> type
- */
- Index(std::initializer_list<int32_t> offsets) : _offsets{offsets}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Get the rank
- * @return Rank that this @c Index object can handle
- */
- size_t rank(void) const { return _offsets.size(); }
-
-public:
- /**
- * @brief Get the index n'th dimension
- * @param[in] n Dimension
- * @return index of n'th dimension
- */
- int32_t at(size_t n) const { return _offsets.at(n); }
-
- /**
- * @brief Get the reference of the index n'th dimension
- * @param[in] n Dimension
- * @return reference of index of n'th dimension
- */
- int32_t &at(size_t n) { return _offsets.at(n); }
-
-private:
- std::vector<int32_t> _offsets;
-};
-
-/**
- * @brief Copy an @c Index with reversed order
- * @param[in] origin @c Index object to copy
- * @return an @c Index object with reversed order
- * @note This is used to convert NNAPI tensor index to ARM tensor index or vice versa
- */
-inline static Index copy_reverse(const Index &origin)
-{
- size_t rank = origin.rank();
- Index target(rank);
- for (int i = 0; i < rank; i++)
- target.at(i) = origin.at(rank - 1 - i);
- return target;
-}
-
-} // namespace tensor
-} // namespace misc
-} // namespace nnfw
-
-#endif // __NNFW_MISC_TENSOR_INDEX_H__
diff --git a/libs/misc/include/misc/tensor/IndexEnumerator.h b/libs/misc/include/misc/tensor/IndexEnumerator.h
deleted file mode 100644
index 4912ea289..000000000
--- a/libs/misc/include/misc/tensor/IndexEnumerator.h
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file IndexEnumerator.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains nnfw::misc::tensor::IndexEnumerator class
- */
-
-#ifndef __NNFW_MISC_TENSOR_INDEX_ENUMERATOR_H__
-#define __NNFW_MISC_TENSOR_INDEX_ENUMERATOR_H__
-
-#include "misc/tensor/Shape.h"
-#include "misc/tensor/Index.h"
-
-namespace nnfw
-{
-namespace misc
-{
-namespace tensor
-{
-/**
- * @brief Class to enumerate index of a tensor
- *
- */
-class IndexEnumerator
-{
-public:
- /**
- * @brief Construct a new @c IndexEnumerator object
- * @param[in] shape Shape of tensor of which index will be enumerate
- */
- explicit IndexEnumerator(const Shape &shape) : _shape(shape), _index(shape.rank()), _cursor(0)
- {
- const size_t rank = _shape.rank();
-
- for (size_t axis = 0; axis < rank; ++axis)
- {
- _index.at(axis) = 0;
- }
-
- for (_cursor = 0; _cursor < rank; ++_cursor)
- {
- if (_index.at(_cursor) < _shape.dim(_cursor))
- {
- break;
- }
- }
- }
-
-public:
- /**
- * @brief Prevent constructing @c IndexEnumerator object by using R-value reference
- */
- IndexEnumerator(IndexEnumerator &&) = delete;
- /**
- * @brief Prevent copy constructor
- */
- IndexEnumerator(const IndexEnumerator &) = delete;
-
-public:
- /**
- * @brief Check if more enumeration is available
- * @return @c true if more @c advance() is available, otherwise @c false
- */
- bool valid(void) const { return _cursor < _shape.rank(); }
-
-public:
- /**
- * @brief Get the current index to enumerate
- * @return Current index
- */
- const Index &curr(void) const { return _index; }
-
-public:
- /**
- * @brief Advance index by +1
- */
- void advance(void)
- {
- const size_t rank = _shape.rank();
-
- // Find axis to be updated
- while ((_cursor < rank) && !(_index.at(_cursor) + 1 < _shape.dim(_cursor)))
- {
- ++_cursor;
- }
-
- if (_cursor == rank)
- {
- return;
- }
-
- // Update index
- _index.at(_cursor) += 1;
-
- for (size_t axis = 0; axis < _cursor; ++axis)
- {
- _index.at(axis) = 0;
- }
-
- // Update cursor
- _cursor = 0;
- }
-
-public:
- const Shape _shape; //!< Shape to enumerate
-
-private:
- size_t _cursor;
- Index _index;
-};
-
-} // namespace tensor
-} // namespace misc
-} // namespace nnfw
-
-#endif // __NNFW_MISC_TENSOR_INDEX_ENUMERATOR_H__
diff --git a/libs/misc/include/misc/tensor/Shape.h b/libs/misc/include/misc/tensor/Shape.h
deleted file mode 100644
index 6e6c23502..000000000
--- a/libs/misc/include/misc/tensor/Shape.h
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Shape.h
- * @ingroup COM_AI_RUNTIME
- * @brief This file contains nnfw::misc::tensor::Shape class
- */
-
-#ifndef __NNFW_MISC_TENSOR_SHAPE_H__
-#define __NNFW_MISC_TENSOR_SHAPE_H__
-
-#include <cstdint>
-#include <cstddef>
-#include <deque>
-#include <initializer_list>
-#include <ostream>
-#include <string>
-
-namespace nnfw
-{
-namespace misc
-{
-namespace tensor
-{
-
-/**
- * @brief Class to represent shape of a tensor
- */
-class Shape
-{
-public:
- /**
- * @brief Construct a new Shape object
- * @param[in] rank Rank of a tensor
- */
- Shape(size_t rank) { _dimensions.resize(rank); }
-
-public:
- /**
- * @brief Construct a new Shape object
- * @param[in] dimensions @c initializer_list<int32_t> of dimensions of tensor
- */
- Shape(const std::initializer_list<int32_t> &dimensions) : _dimensions{dimensions}
- {
- // DO NOTHING
- }
-
- /**
- * @brief Construct a new Shape object
- * @param[in] origin @c Shape object to copy
- */
- Shape(const Shape &origin) = default;
-
-public:
- /**
- * @brief Add dimension to the beginning
- * @param[in] d dimension to add to the beginning
- * @return N/A
- */
- void prepend(int32_t d) { _dimensions.emplace_front(d); }
-
- /**
- * @brief Add dimension to the back
- * @param[in] d dimension to add to the back
- * @return N/A
- */
- void append(int32_t d) { _dimensions.emplace_back(d); }
-
-public:
- /**
- * @brief Get the rank of this shape
- * @return rank
- */
- size_t rank(void) const { return _dimensions.size(); }
-
-public:
- /**
- * @brief Get specific dimension
- * @param[in] n Index of dimension
- * @return n'th dimension
- */
- int32_t dim(size_t n) const { return _dimensions.at(n); }
-
- /**
- * @brief Get the reference of specific dimension
- * @param[in] n Index of dimension
- * @return Reference of n'th dimension
- */
- int32_t &dim(size_t n) { return _dimensions.at(n); }
-
-public:
- /**
- * @brief Get the number of elements specified by this shape
- * @return The number of elements
- */
- size_t element_nums() const
- {
- size_t nums = 1;
- for (auto d : _dimensions)
- {
- nums *= d;
- }
- return nums;
- }
-
-private:
- std::deque<int32_t> _dimensions;
-
-public:
- /**
- * @brief Get a @c Shape object after parsing string
- * @param[in] s String of dimension list. Accepted format is numbers separated by comma.
- * @return @c Shape object
- */
- static Shape from(const std::string &s);
-};
-
-/**
- * @brief Check equality of two @c Shape
- * @param[in] Shape First shape to compare
- * @param[in] Shape Second shape to compare
- * @return @c true if both shapes are equal, otherwise @c false
- */
-bool operator==(const Shape &, const Shape &);
-
-/**
- * @brief Send @c Shape to @c std::ostream
- * @param[in] os @c std::ostream to process this @c Shape
- * @param[in] shape @c Shape to send to @c ostream
- * @return Reference of @c std::ostream
- */
-std::ostream &operator<<(std::ostream &os, const Shape &shape);
-
-} // namespace tensor
-} // namespace misc
-} // namespace nnfw
-
-#endif // __NNFW_MISC_TENSOR_SHAPE_H__
diff --git a/libs/misc/src/environment.cpp b/libs/misc/src/environment.cpp
deleted file mode 100644
index e39f18d62..000000000
--- a/libs/misc/src/environment.cpp
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <string.h>
-#include <cstdlib>
-#include <string>
-
-#include "misc/environment.h"
-
-namespace nnfw
-{
-namespace misc
-{
-
-int get_env_int(const char *name, int defaultValue)
-{
- const char *value = std::getenv(name);
- if (value != nullptr)
- return std::stoi(value);
- return defaultValue;
-}
-
-bool get_env_bool(const char *name, bool defaultValue)
-{
- const char *value = std::getenv(name);
- if (value != nullptr)
- {
- return std::stoi(value) != 0;
- }
-
- return defaultValue;
-}
-
-} // namespace misc
-} // namespace nnfw
-
-namespace nnfw
-{
-namespace misc
-{
-namespace env
-{
-
-IntAccessor::IntAccessor(const std::string &tag) : _tag{tag}
-{
- // DO NOTHING
-}
-
-bool IntAccessor::access(int &out) const
-{
- auto value = std::getenv(_tag.c_str());
-
- if (value == nullptr)
- {
- return false;
- }
-
- out = std::stoi(value);
- return true;
-}
-
-FloatAccessor::FloatAccessor(const std::string &tag) : _tag{tag}
-{
- // DO NOTHING
-}
-
-bool FloatAccessor::access(float &out) const
-{
- auto value = std::getenv(_tag.c_str());
-
- if (value == nullptr)
- {
- return false;
- }
-
- out = std::stof(value);
- return true;
-}
-
-} // namespace env
-} // namespace misc
-} // namespace nnfw
diff --git a/libs/misc/src/tensor/Comparator.cpp b/libs/misc/src/tensor/Comparator.cpp
deleted file mode 100644
index 013c9eed2..000000000
--- a/libs/misc/src/tensor/Comparator.cpp
+++ /dev/null
@@ -1,40 +0,0 @@
-#include "misc/tensor/Comparator.h"
-#include "misc/tensor/Zipper.h"
-
-#include "misc/fp32.h"
-
-namespace nnfw
-{
-namespace misc
-{
-namespace tensor
-{
-
-std::vector<Diff<float>> Comparator::compare(const Shape &shape, const Reader<float> &expected,
- const Reader<float> &obtained,
- Observer *observer) const
-{
- std::vector<Diff<float>> res;
-
- zip(shape, expected, obtained) <<
- [&](const Index &index, float expected_value, float obtained_value) {
- const auto relative_diff = nnfw::misc::fp32::relative_diff(expected_value, obtained_value);
-
- if (!_compare_fn(expected_value, obtained_value))
- {
- res.emplace_back(index, expected_value, obtained_value);
- }
-
- // Update max_diff_index, if necessary
- if (observer != nullptr)
- {
- observer->notify(index, expected_value, obtained_value);
- }
- };
-
- return res;
-}
-
-} // namespace tensor
-} // namespace misc
-} // namespace nnfw
diff --git a/libs/misc/src/tensor/Shape.cpp b/libs/misc/src/tensor/Shape.cpp
deleted file mode 100644
index 675695e8e..000000000
--- a/libs/misc/src/tensor/Shape.cpp
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "misc/tensor/Shape.h"
-
-#include <cassert>
-
-namespace nnfw
-{
-namespace misc
-{
-namespace tensor
-{
-
-bool operator==(const Shape &lhs, const Shape &rhs)
-{
- if (lhs.rank() != rhs.rank())
- {
- return false;
- }
-
- for (size_t axis = 0; axis < lhs.rank(); ++axis)
- {
- if (lhs.dim(axis) != rhs.dim(axis))
- {
- return false;
- }
- }
-
- return true;
-}
-
-Shape Shape::from(const std::string &str)
-{
- Shape shape(0);
-
- bool pending = false;
- int value = 0;
-
- for (const char *cur = str.c_str(); true; ++cur)
- {
- if (*cur == ',' || *cur == '\0')
- {
- if (pending)
- {
- shape.append(value);
- }
-
- if (*cur == '\0')
- {
- break;
- }
-
- pending = false;
- value = 0;
- continue;
- }
-
- assert(*cur >= '0' && *cur <= '9');
-
- pending = true;
- value *= 10;
- value += *cur - '0';
- }
-
- return shape;
-}
-
-std::ostream &operator<<(std::ostream &os, const Shape &shape)
-{
- if (shape.rank() > 0)
- {
- os << shape.dim(0);
-
- for (uint32_t axis = 1; axis < shape.rank(); ++axis)
- {
- os << "," << shape.dim(axis);
- }
- }
-
- return os;
-}
-
-} // namespace tensor
-} // namespace misc
-} // namespace nnfw
diff --git a/libs/profiling/CMakeLists.txt b/libs/profiling/CMakeLists.txt
deleted file mode 100644
index 7169508a1..000000000
--- a/libs/profiling/CMakeLists.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-file(GLOB_RECURSE SOURCES "src/*.cpp")
-
-add_library(nnfw_lib_profiling STATIC ${SOURCES})
-set_property(TARGET nnfw_lib_profiling PROPERTY POSITION_INDEPENDENT_CODE ON)
-target_include_directories(nnfw_lib_profiling PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
diff --git a/libs/profiling/include/profiling/profile_buffer.h b/libs/profiling/include/profiling/profile_buffer.h
deleted file mode 100644
index 83cd3eb2b..000000000
--- a/libs/profiling/include/profiling/profile_buffer.h
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/profiling/profile_buffer.h
-#ifndef TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILE_BUFFER_H_
-#define TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILE_BUFFER_H_
-
-#include <cstddef>
-#include <cstdint>
-
-#include "profiling/time.h"
-
-namespace tflite {
-namespace profiling {
-
-// A profiling event.
-struct ProfileEvent {
- // Describes the type of event.
- // The event_metadata field may contain additional data for interpreting
- // the event.
- enum class EventType {
- // Default event type, the metadata field has no special significance.
- DEFAULT = 0,
- // The event is an operator invocation and the event_metadata field is the
- // index of operator node.
- OPERATOR_INVOKE_EVENT = 1
- };
-
- // Label of the event. This usually describes the event.
- const char* tag;
- // Timestamp in microseconds when the event began.
- uint64_t begin_timestamp_us;
- // Timestamp in microseconds when the event ended.
- uint64_t end_timestamp_us;
- // The field containing the type of event. This must be one of the event types
- // in EventType.
- EventType event_type;
- // Extra data describing the details of the event.
- uint32_t event_metadata;
-};
-} // namespace profiling
-} // namespace tflite
-
-#ifdef TFLITE_PROFILING_ENABLED
-
-#include <sys/time.h>
-#include <vector>
-
-namespace tflite {
-namespace profiling {
-constexpr uint32_t kInvalidEventHandle = static_cast<uint32_t>(~0) - 1;
-
-// A ring buffer of profile events.
-// This class is not thread safe.
-class ProfileBuffer {
- public:
- ProfileBuffer(uint32_t max_num_entries, bool enabled)
- : enabled_(enabled), current_index_(0), event_buffer_(max_num_entries) {}
-
- // Adds an event to the buffer with begin timestamp set to the current
- // timestamp. Returns a handle to event that can be used to call EndEvent. If
- // buffer is disabled this has no affect.
- // The tag of the event should remain valid till the buffer is valid.
- uint32_t BeginEvent(const char* tag, ProfileEvent::EventType event_type,
- uint32_t event_metadata) {
- if (!enabled_) {
- return kInvalidEventHandle;
- }
- uint64_t timestamp = time::NowMicros();
- int index = current_index_ % event_buffer_.size();
- event_buffer_[index].tag = tag;
- event_buffer_[index].event_type = event_type;
- event_buffer_[index].event_metadata = event_metadata;
- event_buffer_[index].begin_timestamp_us = timestamp;
- event_buffer_[index].end_timestamp_us = 0;
- current_index_++;
- return index;
- }
-
- // Sets the enabled state of buffer to |enabled|
- void SetEnabled(bool enabled) { enabled_ = enabled; }
-
- // Sets the end timestamp for event for the handle to current time.
- // If the buffer is disabled or previous event has been overwritten this
- // operation has not effect.
- void EndEvent(uint32_t event_handle) {
- if (!enabled_ || event_handle == kInvalidEventHandle ||
- event_handle > current_index_) {
- return;
- }
- const uint32_t max_size = event_buffer_.size();
- if (current_index_ > (max_size + event_handle)) {
- // Ignore, buffer has already overflowed.
- return;
- }
-
- int event_index = event_handle % max_size;
- event_buffer_[event_index].end_timestamp_us = time::NowMicros();
- }
-
- // Returns the size of the buffer.
- size_t Size() const {
- return (current_index_ >= event_buffer_.size()) ? event_buffer_.size()
- : current_index_;
- }
-
- // Resets the buffer.
- void Reset() {
- enabled_ = false;
- current_index_ = 0;
- }
-
- // Returns the profile event at the given index. If the index is invalid a
- // nullptr is returned. The return event may get overwritten if more events
- // are added to buffer.
- const struct ProfileEvent* const At(int index) const {
- size_t size = Size();
- if (index >= size) {
- return nullptr;
- }
- const uint32_t max_size = event_buffer_.size();
- uint32_t start =
- (current_index_ > max_size) ? current_index_ % max_size : max_size;
- index = (index + start) % max_size;
- return &event_buffer_[index];
- }
-
- private:
- bool enabled_;
- uint32_t current_index_;
- std::vector<ProfileEvent> event_buffer_;
-};
-} // namespace profiling
-} // namespace tflite
-#endif // TFLITE_PROFILING_ENABLED
-#endif // TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILE_BUFFER_H_
-
-// clang-format on
diff --git a/libs/profiling/include/profiling/profiler.h b/libs/profiling/include/profiling/profiler.h
deleted file mode 100644
index 953042da3..000000000
--- a/libs/profiling/include/profiling/profiler.h
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/profiling/profiler.h
-#ifndef TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILER_H_
-#define TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILER_H_
-
-#include <vector>
-
-#include "profiling/profile_buffer.h"
-
-#ifdef TFLITE_PROFILING_ENABLED
-
-namespace tflite {
-namespace profiling {
-class ScopedProfile;
-class ScopedOperatorProfile;
-
-// Controls whether profiling is enabled or disabled and collects profiles.
-// TFLite is used on platforms that don't have posix threads, so the profiler is
-// kept as simple as possible. It is designed to be used only on a single
-// thread.
-//
-// Profiles are collected using Scoped*Profile objects that begin and end a
-// profile event.
-// An example usage is shown in the example below:
-//
-// Say Worker class has a DoWork method and we are interested in profiling
-// the overall execution time for DoWork and time spent in Task1 and Task2
-// functions.
-//
-// class Worker {
-// public:
-// void DoWork() {
-// ScopedProfile(&controller, "DoWork");
-// Task1();
-// Task2();
-// .....
-// }
-//
-// void Task1() {
-// ScopedProfile(&controller, "Task1");
-// ....
-// }
-//
-// void Task2() {
-// ScopedProfile(&controller, "Task2");
-// }
-//
-// Profiler profiler;
-// }
-//
-// We instrument the functions that need to be profiled.
-//
-// Profile can be collected by enable profiling and then getting profile
-// events.
-//
-// void ProfileWorker() {
-// Worker worker;
-// worker.profiler.EnableProfiling();
-// worker.DoWork();
-// worker.profiler.DisableProfiling();
-// // Profiling is complete, extract profiles.
-// auto profile_events = worker.profiler.GetProfiles();
-// }
-//
-//
-class Profiler {
- public:
- Profiler() : buffer_(1024, false) {}
-
- void StartProfiling() { buffer_.SetEnabled(true); }
- void StopProfiling() { buffer_.SetEnabled(false); }
- void Reset() { buffer_.Reset(); }
- std::vector<const ProfileEvent*> GetProfileEvents() {
- std::vector<const ProfileEvent*> profile_events;
- profile_events.reserve(buffer_.Size());
- for (size_t i = 0; i < buffer_.Size(); i++) {
- profile_events.push_back(buffer_.At(i));
- }
- return profile_events;
- }
-
- private:
- friend class ScopedProfile;
- friend class ScopedOperatorProfile;
- ProfileBuffer* GetProfileBuffer() { return &buffer_; }
- ProfileBuffer buffer_;
-};
-
-class ScopedProfile {
- public:
- // Adds a profile event to profile that begins with the construction
- // of object and ends when the object goes out of scope.
- // The lifetime of tag should be at least the lifetime of profiler.
-
- ScopedProfile(Profiler* profiler, const char* tag)
- : buffer_(nullptr), event_handle_(0) {
- if (profiler) {
- buffer_ = profiler->GetProfileBuffer();
- event_handle_ =
- buffer_->BeginEvent(tag, ProfileEvent::EventType::DEFAULT, 0);
- }
- }
- ~ScopedProfile() {
- if (buffer_) {
- buffer_->EndEvent(event_handle_);
- }
- }
-
- private:
- ProfileBuffer* buffer_;
- int32_t event_handle_;
-};
-
-class ScopedOperatorProfile {
- public:
- // Adds a profile event to profile that begins with the construction
- // of object and ends when the object goes out of scope.
- // The lifetime of tag should be at least the lifetime of profiler.
- ScopedOperatorProfile(Profiler* profiler, const char* tag, int node_index)
- : buffer_(nullptr), event_handle_(0) {
- if (profiler) {
- buffer_ = profiler->GetProfileBuffer();
- event_handle_ = buffer_->BeginEvent(
- tag, ProfileEvent::EventType::OPERATOR_INVOKE_EVENT, node_index);
- }
- }
-
- ~ScopedOperatorProfile() {
- if (buffer_) {
- buffer_->EndEvent(event_handle_);
- }
- }
-
- private:
- ProfileBuffer* buffer_;
- int32_t event_handle_;
-};
-
-} // namespace profiling
-} // namespace tflite
-
-#define VARNAME_UNIQ(name, ctr) name##ctr
-
-#define SCOPED_OPERATOR_PROFILE(profiler, node_index) \
- tflite::profiling::ScopedOperatorProfile VARNAME_UNIQ( \
- _profile_, __COUNTER__)((profiler), "OpInvoke", (node_index))
-#else
-
-namespace tflite {
-namespace profiling {
-// A noop version of profiler when profiling is disabled.
-class Profiler {
- public:
- Profiler() {}
- void StartProfiling() {}
- void StopProfiling() {}
- void Reset() {}
- std::vector<const ProfileEvent*> GetProfileEvents() { return {}; }
-};
-} // namespace profiling
-} // namespace tflite
-
-#define SCOPED_OPERATOR_PROFILE(profiler, node_index)
-
-#endif // TFLITE_PROFILING_ENABLED
-
-#endif // TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILER_H_
-
-// clang-format on
diff --git a/libs/profiling/include/profiling/time.h b/libs/profiling/include/profiling/time.h
deleted file mode 100644
index 4b194944d..000000000
--- a/libs/profiling/include/profiling/time.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/profiling/time.h
-#ifndef TENSORFLOW_CONTRIB_LITE_PROFILING_TIME_H_
-#define TENSORFLOW_CONTRIB_LITE_PROFILING_TIME_H_
-
-#include <cstdint>
-
-namespace tflite {
-namespace profiling {
-namespace time {
-uint64_t NowMicros();
-} // namespace time
-} // namespace profiling
-} // namespace tflite
-#endif // TENSORFLOW_CONTRIB_LITE_PROFILING_TIME_H_
-
-// clang-format on
diff --git a/libs/profiling/src/profiling/time.cpp b/libs/profiling/src/profiling/time.cpp
deleted file mode 100644
index 92d8595f8..000000000
--- a/libs/profiling/src/profiling/time.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/profiling/time.cpp
-#include "profiling/time.h"
-
-#if defined(_MSC_VER)
-#include <chrono> // NOLINT(build/c++11)
-#else
-#include <sys/time.h>
-#endif
-
-namespace tflite {
-namespace profiling {
-namespace time {
-
-#if defined(_MSC_VER)
-
-uint64_t NowMicros() {
- return std::chrono::duration_cast<std::chrono::microseconds>(
- std::chrono::system_clock::now().time_since_epoch())
- .count();
-}
-
-#else
-
-uint64_t NowMicros() {
- struct timeval tv;
- gettimeofday(&tv, nullptr);
- return static_cast<uint64_t>(tv.tv_sec) * 1000000 + tv.tv_usec;
-}
-
-#endif // defined(_MSC_VER)
-
-} // namespace time
-} // namespace profiling
-} // namespace tflite
-
-// clang-format on
diff --git a/libs/tflite/CMakeLists.txt b/libs/tflite/CMakeLists.txt
deleted file mode 100644
index e844d1c68..000000000
--- a/libs/tflite/CMakeLists.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-file(GLOB_RECURSE SOURCES "src/*.cpp")
-file(GLOB_RECURSE TESTS "src/*.test.cpp")
-list(REMOVE_ITEM SOURCES ${TESTS})
-
-add_library(nnfw_lib_tflite STATIC ${SOURCES})
-set_target_properties(nnfw_lib_tflite PROPERTIES POSITION_INDEPENDENT_CODE ON)
-target_include_directories(nnfw_lib_tflite PUBLIC ${NNFW_INCLUDE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/include)
-target_link_libraries(nnfw_lib_tflite tensorflow-lite ${LIB_PTHREAD} dl)
-target_link_libraries(nnfw_lib_tflite nnfw_lib_misc)
-
-add_executable(nnfw_lib_tflite_test_TensorView src/TensorView.test.cpp)
-target_link_libraries(nnfw_lib_tflite_test_TensorView nnfw_lib_tflite)
diff --git a/libs/tflite/include/tflite/Assert.h b/libs/tflite/include/tflite/Assert.h
deleted file mode 100644
index 6d12d37f6..000000000
--- a/libs/tflite/include/tflite/Assert.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Assert.h
- * @brief This file contains helper function of assertion
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_ASSERT_H__
-#define __NNFW_TFLITE_ASSERT_H__
-
-#include "tensorflow/contrib/lite/context.h"
-
-#include <sstream>
-
-#define STR_DETAIL(value) #value
-#define STR(value) STR_DETAIL(value)
-
-#define TFLITE_ENSURE(exp) \
- { \
- const TfLiteStatus status = (exp); \
- \
- if (status != kTfLiteOk) \
- { \
- std::ostringstream ss; \
- ss << #exp << " failed (" << __FILE__ << ":" << __LINE__ << ")"; \
- throw std::runtime_error{ss.str()}; \
- } \
- }
-
-#endif // __NNFW_TFLITE_ASSERT_H__
diff --git a/libs/tflite/include/tflite/Diff.h b/libs/tflite/include/tflite/Diff.h
deleted file mode 100644
index 15c672831..000000000
--- a/libs/tflite/include/tflite/Diff.h
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Diff.h
- * @brief This file contains classes for testing correctess of implementation
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_DIFF_H__
-#define __NNFW_TFLITE_DIFF_H__
-
-#include "tensorflow/contrib/lite/interpreter.h"
-
-#include "misc/tensor/Index.h"
-#include "misc/tensor/Diff.h"
-#include "misc/tensor/Shape.h"
-#include "misc/tensor/Comparator.h"
-
-#include "tflite/TensorView.h"
-
-#include <functional>
-#include <vector>
-
-/**
- * @brief Class to define TfLite interpreter match application
- */
-class TfLiteInterpMatchApp
-{
-public:
- /**
- * @brief Construct a new TfLiteInterpMatchApp object with Comparator
- * @param[in] comparator Comparator object for tensor comparation
- */
- TfLiteInterpMatchApp(const nnfw::misc::tensor::Comparator &comparator)
- : _verbose{false}, _comparator(comparator)
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Get reference verbose for debugging information
- * @return Reference of verbose value
- */
- int &verbose(void) { return _verbose; }
-
-private:
- int _verbose;
-
-public:
- /**
- * @brief Run two interpreter and return the output matching
- * @param[in] pure Interpreter object of expected(with TfLite)
- * @param[in] nnapi Interpreter object of obtained(through NNAPI)
- * @return @c true if two Interpreter results are same, otherwise @c false
- */
- bool run(::tflite::Interpreter &pure, ::tflite::Interpreter &nnapi) const;
- /**
- * @brief Compare two TensorView values and return the match result
- * @param[in] expected TensorView object to read expected values
- * @param[in] obtained TensorView object to read obtained values
- * @param[in] id Tensor ID value used for debug message
- * @return @c true if two TensorView values are same, otherwise @c false
- */
- template <typename T>
- bool compareSingleTensorView(const nnfw::tflite::TensorView<T> &expected,
- const nnfw::tflite::TensorView<T> &obtained, int id) const;
-
-private:
- const nnfw::misc::tensor::Comparator &_comparator;
-};
-
-#include "tflite/interp/Builder.h"
-#include "tflite/Quantization.h"
-
-#include <random>
-
-/**
- * @brief Class to generate random values
- */
-class RandomGenerator
-{
-public:
- /**
- * @brief Construct a new RandomGenerator object
- * @param[in] seed Random seed value
- * @param[in] mean Mean value of normal random number generation
- * @param[in] stddev Standard deviation of random number generation
- * @param[in] quantization TfLiteQuantizationParams type to represent quantization value
- * (not used yet)
- */
- RandomGenerator(int seed, float mean, float stddev,
- const TfLiteQuantizationParams quantization = make_default_quantization())
- : _rand{seed}, _dist{mean, stddev}, _quantization{quantization}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Generate random numbers for type T
- * @param[in] s Shape value
- * @param[in] i Index value
- * @return Random generated value
- * @note This is same as T generate(void) as two input parameters are not used
- */
- template <typename T>
- T generate(const ::nnfw::misc::tensor::Shape &, const ::nnfw::misc::tensor::Index &)
- {
- return generate<T>();
- }
-
- /**
- * @brief Generate random numbers for type T
- * @return Random generated value
- */
- template <typename T> T generate(void) { return _dist(_rand); }
-
-private:
- std::minstd_rand _rand;
- std::normal_distribution<float> _dist;
- const TfLiteQuantizationParams _quantization;
-};
-
-template <> uint8_t RandomGenerator::generate<uint8_t>(void);
-
-/**
- * @brief Structure for NNAPI correctness test
- */
-struct RandomTestParam
-{
- int verbose; //!< Verbosity of debug information
- int tolerance; //!< Torlerance of value difference
- int tensor_logging = 0; //!< Save logging to a file if not 0
- std::string log_path = ""; //!< Path of log file, meaningful only when tensor_logging is 1
-};
-
-/**
- * @brief Class to define Random test runner
- */
-class RandomTestRunner
-{
-public:
- /**
- * @brief Construct a new RandomTestRunner object
- * @param[in] seed Random seed value
- * @param[in] param RandomTestParam object for test runner
- * @param[in] quantization TfLiteQuantizationParams type to represent quantization value
- */
- RandomTestRunner(int seed, const RandomTestParam &param,
- const TfLiteQuantizationParams quantization = make_default_quantization())
- : _randgen{seed, 0.0f, 2.0f, quantization}, _param{param}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Run the random test runner
- * @param[in] builder Interpreter Builder used to run
- * @return 0 if test succeeds, otherwise failure
- */
- int run(const nnfw::tflite::Builder &builder);
-
-public:
- /**
- * @brief Get RandomGenerator reference
- * @return RandomGenerator reference
- */
- RandomGenerator &generator() { return _randgen; };
-
-private:
- RandomGenerator _randgen;
- const RandomTestParam _param;
-
-public:
- /**
- * @brief Create a RandomTestRunner object
- * @param[in] seed Random seed value
- * @return RandomGenerator object
- */
- static RandomTestRunner make(int seed);
-};
-
-#endif // __NNFW_TFLITE_DIFF_H__
diff --git a/libs/tflite/include/tflite/FeatureView.h b/libs/tflite/include/tflite/FeatureView.h
deleted file mode 100644
index 06cbf4b14..000000000
--- a/libs/tflite/include/tflite/FeatureView.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file FeatureView.h
- * @brief This file contains FeatureView class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_FEATURE_VIEW_H__
-#define __NNFW_TFLITE_FEATURE_VIEW_H__
-
-#include "tensorflow/contrib/lite/interpreter.h"
-
-#include "tflite/InputIndex.h"
-#include "tflite/OutputIndex.h"
-
-#include "misc/feature/Shape.h"
-#include "misc/feature/Reader.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-template <typename T> class FeatureView;
-
-/**
- * @brief Class to support reading element of float type feature
- */
-template <> class FeatureView<float> : public nnfw::misc::feature::Reader<float>
-{
-public:
- /**
- * @brief Construct a new FeatureView object
- * @param[in] interp Interpreter to read from
- * @param[in] index InputIndex index of input
- */
- FeatureView(::tflite::Interpreter &interp, const InputIndex &index);
- /**
- * @brief Construct a new FeatureView object
- * @param[in] interp Interpreter to read from
- * @param[in] index OutputIndex index of output
- */
- FeatureView(::tflite::Interpreter &interp, const OutputIndex &index);
-
-public:
- /**
- * @brief Get value of element using channel, row and column index
- * @param[in] ch Channel index
- * @param[in] row Row index
- * @param[in] col Column index
- * @return Value of element
- */
- float at(uint32_t ch, uint32_t row, uint32_t col) const;
- /**
- * @brief Get reference of element using channel, row and column index
- * @param[in] ch Channel index
- * @param[in] row Row index
- * @param[in] col Column index
- * @return Reference of element
- */
- float &at(uint32_t ch, uint32_t row, uint32_t col);
-
-private:
- /**
- * @brief Get offset of element from channel, row and column index
- * @param[in] ch Channel index
- * @param[in] row Row index
- * @param[in] col Column index
- * @return Offset of element
- */
- uint32_t getElementOffset(uint32_t ch, uint32_t row, uint32_t col) const
- {
- uint32_t res = 0;
-
- // TensorFlow Lite assumes that NHWC ordering for tessor
- res += row * _shape.W * _shape.C;
- res += col * _shape.C;
- res += ch;
-
- return res;
- }
-
-private:
- nnfw::misc::feature::Shape _shape;
- float *_base;
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_FEATURE_VIEW_H__
diff --git a/libs/tflite/include/tflite/Quantization.h b/libs/tflite/include/tflite/Quantization.h
deleted file mode 100644
index 4a8a0f1ac..000000000
--- a/libs/tflite/include/tflite/Quantization.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Quantization.h
- * @brief This file contains BitwiseIntToFloat union and quantization related
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_QUANTIZATION_H__
-#define __NNFW_TFLITE_QUANTIZATION_H__
-
-/**
- * @brief Union to provide bitwise conversion of integer and float
- */
-union BitwiseIntToFloat {
- int i;
- float f;
-};
-
-static const float FLOAT_NEAREST_TO_1 = BitwiseIntToFloat{0x3f7fffff}.f;
-
-#include "tensorflow/contrib/lite/context.h"
-
-/**
- * @brief Get TfLiteQuantizationParams object with default values
- * @return TfLiteQuantizationParams object
- */
-TfLiteQuantizationParams make_default_quantization(void);
-
-#endif // __NNFW_TFLITE_QUANTIZATION_H__
diff --git a/libs/tflite/include/tflite/Session.h b/libs/tflite/include/tflite/Session.h
deleted file mode 100644
index 4f2e5c54d..000000000
--- a/libs/tflite/include/tflite/Session.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Session.h
- * @brief This file contains Session class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_SESSION_H__
-#define __NNFW_TFLITE_SESSION_H__
-
-#include <tensorflow/contrib/lite/interpreter.h>
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Structure to provide interface methods of interpreter session
- */
-struct Session
-{
- /**
- * @brief Destruct Session object using default destructor
- */
- virtual ~Session() = default;
-
- /**
- * @brief Get the Interpreter object pointer
- * @return The Interpreter object pointer
- */
- virtual ::tflite::Interpreter *interp(void) = 0;
-
- /**
- * @brief Prepare the session
- * @return @c true if prepare method succeeded, otherwise @c false
- */
- virtual bool prepare(void) = 0;
- /**
- * @brief Run the session
- * @return @c true if run method succeeded, otherwise @c false
- */
- virtual bool run(void) = 0;
- /**
- * @brief Teardown(release) the session
- * @return @c true if teardown method succeeded, otherwise @c false
- */
- virtual bool teardown(void) = 0;
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_INTERP_SESSION_H__
diff --git a/libs/tflite/include/tflite/TensorLogger.h b/libs/tflite/include/tflite/TensorLogger.h
deleted file mode 100644
index e56a76b58..000000000
--- a/libs/tflite/include/tflite/TensorLogger.h
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file TensorLogger.h
- * @brief This file contains TensorLogger class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_TENSOR_LOGGER_H__
-#define __NNFW_TFLITE_TENSOR_LOGGER_H__
-
-#include "misc/tensor/IndexIterator.h"
-#include "tflite/TensorView.h"
-
-#include <tensorflow/contrib/lite/interpreter.h>
-#include <tensorflow/contrib/lite/context.h>
-#include <fstream>
-#include <iomanip>
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Class to write input and output value / shape into a file in python form
- * @note This is a utility to write input and output value / shape into a file in python form.\n
- * any python app can load this value by running the python code below:\n
- * exec(open(filename).read())\n
- * generated python code looks like the following: \n
- * tensor_shape_gen = []\n
- * tensor_value_gen = []\n\n
- * tensor_shape_gen.append("{2, 1, 2}")\n
- * tensor_value_gen.append([1, 2, 3, 4])\n\n
- * tensor_shape_gen.append("{2}")\n
- * tensor_value_gen.append([1, 2])\n\n
- * tensor_shape_gen.append("{2, 1, 2}")\n
- * tensor_value_gen.append([1, 4, 3, 8])\n
- */
-class TensorLogger
-{
-private:
- std::ofstream _outfile;
-
-public:
- /**
- * @brief Get TensorLogger instance
- * @return The TensorLogger instance
- */
- static TensorLogger &instance()
- {
- static TensorLogger instance;
- return instance;
- }
-
- /**
- * @brief Save the tensor details to file from interpreter
- * @param[in] path The file path to save
- * @param[in] interp The TfLite interpreter
- */
- void save(const std::string &path, ::tflite::Interpreter &interp)
- {
- open(path);
-
- int log_index = 0;
- for (const auto id : interp.inputs())
- {
- _outfile << "# input tensors" << std::endl;
- printTensor(interp, id, log_index++);
- }
- for (const auto id : interp.outputs())
- {
- _outfile << "# output tensors" << std::endl;
- printTensor(interp, id, log_index++);
- }
- close();
- }
-
-private:
- void open(const std::string &path)
- {
- if (!_outfile.is_open())
- _outfile.open(path, std::ios_base::out);
-
- _outfile << "# ------ file: " << path << " ------" << std::endl
- << "tensor_shape_gen = []" << std::endl
- << "tensor_value_gen = []" << std::endl
- << std::endl;
- }
-
- void printTensor(::tflite::Interpreter &interp, const int id, const int log_index)
- {
- const TfLiteTensor *tensor = interp.tensor(id);
-
- _outfile << "# tensor name: " << tensor->name << std::endl;
- _outfile << "# tflite::interpreter.tensor(" << id << ") -> "
- "tensor_value_gen["
- << log_index << "]" << std::endl;
-
- if (tensor->type == kTfLiteInt32)
- {
- printTensorShape(tensor);
- printTensorValue<int32_t>(tensor, tensor->data.i32);
- }
- else if (interp.tensor(id)->type == kTfLiteUInt8)
- {
- printTensorShape(tensor);
- printTensorValue<uint8_t>(tensor, tensor->data.uint8);
- }
- else if (tensor->type == kTfLiteFloat32)
- {
- printTensorShape(tensor);
- printTensorValue<float>(tensor, tensor->data.f);
- }
- }
-
- void printTensorShape(const TfLiteTensor *tensor)
- {
- _outfile << "tensor_shape_gen.append('{";
-
- size_t r = 0;
- for (; r < tensor->dims->size - 1; r++)
- {
- _outfile << tensor->dims->data[r] << ", ";
- }
- _outfile << tensor->dims->data[r];
-
- _outfile << "}')" << std::endl;
- }
-
- template <typename T> void printTensorValue(const TfLiteTensor *tensor, T *tensor_data_ptr)
- {
- _outfile << "tensor_value_gen.append([";
-
- _outfile << std::fixed << std::setprecision(10);
-
- const T *end = reinterpret_cast<const T *>(tensor->data.raw_const + tensor->bytes);
- for (T *ptr = tensor_data_ptr; ptr < end; ptr++)
- _outfile << *ptr << ", ";
-
- _outfile << "])" << std::endl << std::endl;
- }
-
- void close()
- {
- _outfile << "# --------- tensor shape and value defined above ---------" << std::endl;
- _outfile.close();
- }
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_TENSOR_LOGGER_H__
diff --git a/libs/tflite/include/tflite/TensorUtils.h b/libs/tflite/include/tflite/TensorUtils.h
deleted file mode 100644
index 6266c5dff..000000000
--- a/libs/tflite/include/tflite/TensorUtils.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file TensorUtils.h
- * @brief This file contains utilities function
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_TENSOR_UTILS_H__
-#define __NNFW_TFLITE_TENSOR_UTILS_H__
-
-#include <tensorflow/contrib/lite/context.h>
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Get @c true if tensor type is kTfLiteFloat32, otherwise @c false
- * @param[in] tensor The tensor object to be compared
- * @return @c true if tensor type is kTfLiteFloat32, otherwise @c false
- */
-inline bool isFloatTensor(const TfLiteTensor *tensor) { return tensor->type == kTfLiteFloat32; }
-
-/**
- * @brief Get @c true if tensor is 4-D tensor and the first dimension length is 1,
- * otherwise @c false
- * @param[in] tensor The tensor object to be compared
- * @return @c true if tensor is 4-D tensor and the first dimension length is 1, otherwise @c false
- */
-inline bool isFeatureTensor(const TfLiteTensor *tensor)
-{
- return (tensor->dims->size == 4) && (tensor->dims->data[0] == 1);
-}
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_TENSOR_UTILS_H__
diff --git a/libs/tflite/include/tflite/TensorView.h b/libs/tflite/include/tflite/TensorView.h
deleted file mode 100644
index 79c754c78..000000000
--- a/libs/tflite/include/tflite/TensorView.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file TensorView.h
- * @brief This file contains TensorView class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_TENSOR_VIEW_H__
-#define __NNFW_TFLITE_TENSOR_VIEW_H__
-
-#include "tensorflow/contrib/lite/interpreter.h"
-
-#include "misc/tensor/Shape.h"
-#include "misc/tensor/Index.h"
-#include "misc/tensor/Reader.h"
-#include "misc/tensor/NonIncreasingStride.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Class to define TensorView which is inherited from nnfw::misc::tensor::Reader<T> class
- */
-template <typename T> class TensorView final : public nnfw::misc::tensor::Reader<T>
-{
-public:
- /**
- * @brief Construct a TensorView object with base and shape informations
- * @param[in] shape The shape of a tensor
- * @param[in] base The base address of a tensor
- */
- TensorView(const nnfw::misc::tensor::Shape &shape, T *base) : _shape{shape}, _base{base}
- {
- // Set 'stride'
- _stride.init(_shape);
- }
-
-public:
- /**
- * @brief Get shape of tensor
- * @return Reference of shape
- */
- const nnfw::misc::tensor::Shape &shape(void) const { return _shape; }
-
-public:
- /**
- * @brief Get value of tensor index
- * @param[in] index The tensor index
- * @return The value at the index
- */
- T at(const nnfw::misc::tensor::Index &index) const override
- {
- const auto offset = _stride.offset(index);
- return *(_base + offset);
- }
-
-public:
- /**
- * @brief Get reference value of tensor index
- * @param[in] index The tensor index
- * @return The reference value at the index
- */
- T &at(const nnfw::misc::tensor::Index &index)
- {
- const auto offset = _stride.offset(index);
- return *(_base + offset);
- }
-
-private:
- nnfw::misc::tensor::Shape _shape; /**< The tensor shape */
-
-public:
- T *_base; /**< The base address of tensor */
- nnfw::misc::tensor::NonIncreasingStride _stride; /**< The NonIncreasingStride object */
-
-public:
- // TODO Introduce Operand ID class
- /**
- * @brief Create TensorView object using given parameters
- * @param[in] interp The TfLite interpreter
- * @param[in] tensor_index The tensor index
- * @return The new TensorView<T> object
- */
- static TensorView<T> make(::tflite::Interpreter &interp, int tensor_index)
- {
- auto tensor_ptr = interp.tensor(tensor_index);
-
- // Set 'shape'
- nnfw::misc::tensor::Shape shape(tensor_ptr->dims->size);
-
- for (uint32_t axis = 0; axis < shape.rank(); ++axis)
- {
- shape.dim(axis) = tensor_ptr->dims->data[axis];
- }
-
- return TensorView<T>(shape, interp.typed_tensor<T>(tensor_index));
- }
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_TENSOR_VIEW_H__
diff --git a/libs/tflite/include/tflite/ext/kernels/Abs.h b/libs/tflite/include/tflite/ext/kernels/Abs.h
deleted file mode 100644
index 74e4aa658..000000000
--- a/libs/tflite/include/tflite/ext/kernels/Abs.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NNFW_TFLITE_EXT_KERNELS_ABS_H__
-#define __NNFW_TFLITE_EXT_KERNELS_ABS_H__
-
-#include "tensorflow/contrib/lite/context.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace Abs
-{
-
-void *InitAbs(TfLiteContext *context, const char *buffer, size_t length);
-void FreeAbs(TfLiteContext *context, void *buffer);
-TfLiteStatus PrepareAbs(TfLiteContext *context, TfLiteNode *node);
-TfLiteStatus EvalAbs(TfLiteContext *context, TfLiteNode *node);
-
-} // namespace Abs
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_KERNELS_ABS_H__
diff --git a/libs/tflite/include/tflite/ext/kernels/CustomOps.h b/libs/tflite/include/tflite/ext/kernels/CustomOps.h
deleted file mode 100644
index 3f9459bb2..000000000
--- a/libs/tflite/include/tflite/ext/kernels/CustomOps.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file CustomOps.h
- * @brief This file contains registration of custom operands
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_EXT_KERNELS_CUSTOM_OP_H__
-#define __NNFW_TFLITE_EXT_KERNELS_CUSTOM_OP_H__
-
-#include "tensorflow/contrib/lite/context.h"
-#include "tflite/ext/kernels/TensorFlowMax.h"
-#include "tflite/ext/kernels/SquaredDifference.h"
-#include "tflite/ext/kernels/TensorFlowSum.h"
-#include "tflite/ext/kernels/Abs.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-
-#define REGISTER_FUNCTION(Name) \
- TfLiteRegistration *Register_##Name(void) \
- { \
- static TfLiteRegistration r = { \
- Name::Init##Name, Name::Free##Name, Name::Prepare##Name, Name::Eval##Name, \
- }; \
- r.custom_name = #Name; \
- return &r; \
- }
-
-REGISTER_FUNCTION(TensorFlowMax)
-REGISTER_FUNCTION(SquaredDifference)
-REGISTER_FUNCTION(TensorFlowSum)
-REGISTER_FUNCTION(Abs)
-
-#undef REGISTER_FUNCTION
-
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_KERNELS_CUSTOM_OP_H__
diff --git a/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h b/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h
deleted file mode 100644
index 492523c02..000000000
--- a/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file SquaredDifference.h
- * @brief This file contains SquaredDifference namespace and SquaredDifference function
- * definitions
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_EXT_KERNELS_SQUARED_DIFFERENCE_H__
-#define __NNFW_TFLITE_EXT_KERNELS_SQUARED_DIFFERENCE_H__
-
-#include "tensorflow/contrib/lite/context.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace SquaredDifference
-{
-
-/**
- * @brief Initialize SquaredDifference operand using the contents of buffer
- * @param[in] context The TfLite context
- * @param[in] buffer The buffer with contents
- * @param[in] length The buffer length
- * @return The void pointer for user data
- */
-void *InitSquaredDifference(TfLiteContext *context, const char *buffer, size_t length);
-
-/**
- * @brief Release any memory it might have allocated via 'InitSquaredDifference'
- * @param[in] context The TfLite context
- * @param[in] buffer The buffer with contents
- * @return N/A
- */
-void FreeSquaredDifference(TfLiteContext *context, void *buffer);
-
-/**
- * @brief Prepare the SquaredDifference operand for execution
- * @param[in] context The TfLite context
- * @param[in] node The operand node
- * @return The TfLite status
- */
-TfLiteStatus PrepareSquaredDifference(TfLiteContext *context, TfLiteNode *node);
-
-/**
- * @brief Evaluation the SquaredDifference operand for execution
- * @param[in] context The TfLite context
- * @param[in] node The operand node
- * @return The TfLite status
- */
-TfLiteStatus EvalSquaredDifference(TfLiteContext *context, TfLiteNode *node);
-
-} // namespace SquaredDifference
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_KERNELS_SQUARED_DIFFERENCE_H__
diff --git a/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h b/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h
deleted file mode 100644
index d31d76483..000000000
--- a/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file TensorFlowMax.h
- * @brief This file contains TensorFlowMax namespace and TensorFlowMax function definitions
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
-#define __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
-
-#include "tensorflow/contrib/lite/context.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace TensorFlowMax
-{
-
-/**
- * @brief Initialize TensorFlowMax operand using the contents of buffer
- * @param[in] context The TfLite context
- * @param[in] buffer The buffer with contents
- * @param[in] length The buffer length
- * @return The void pointer for user data
- */
-void *InitTensorFlowMax(TfLiteContext *context, const char *buffer, size_t length);
-
-/**
- * @brief Release any memory it might have allocated via 'InitTensorFlowMax'
- * @param[in] context The TfLite context
- * @param[in] buffer The buffer with contents
- * @return N/A
- */
-void FreeTensorFlowMax(TfLiteContext *context, void *buffer);
-
-/**
- * @brief Prepare the TensorFlowMax operand for execution
- * @param[in] context The TfLite context
- * @param[in] node The operand node
- * @return The TfLite status
- */
-TfLiteStatus PrepareTensorFlowMax(TfLiteContext *context, TfLiteNode *node);
-
-/**
- * @brief Evaluation the TensorFlowMax operand for execution
- * @param[in] context The TfLite context
- * @param[in] node The operand node
- * @return The TfLite status
- */
-TfLiteStatus EvalTensorFlowMax(TfLiteContext *context, TfLiteNode *node);
-
-} // namespace TensorFlowMax
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
diff --git a/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h b/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h
deleted file mode 100644
index 66783cf41..000000000
--- a/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_SUM_H__
-#define __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_SUM_H__
-
-#include "tensorflow/contrib/lite/context.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace TensorFlowSum
-{
-
-void *InitTensorFlowSum(TfLiteContext *context, const char *buffer, size_t length);
-void FreeTensorFlowSum(TfLiteContext *context, void *buffer);
-TfLiteStatus PrepareTensorFlowSum(TfLiteContext *context, TfLiteNode *node);
-TfLiteStatus EvalTensorFlowSum(TfLiteContext *context, TfLiteNode *node);
-
-} // namespace TensorFlowSum
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_SUM_H__
diff --git a/libs/tflite/include/tflite/ext/kernels/register.h b/libs/tflite/include/tflite/ext/kernels/register.h
deleted file mode 100644
index 124af7abc..000000000
--- a/libs/tflite/include/tflite/ext/kernels/register.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- Copyright 2017 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from the following file (in TensorFlow)
-// 'externals/tensorflow/tensorflow/contrib/lite/kernels/register.h'
-#ifndef __NNFW_TFLITE_EXT_KERNELS_REGISTER_H__
-#define __NNFW_TFLITE_EXT_KERNELS_REGISTER_H__
-
-#include <unordered_map>
-#include "tensorflow/contrib/lite/context.h"
-#include "tensorflow/contrib/lite/model.h"
-
-namespace nnfw {
-namespace tflite {
-
-class BuiltinOpResolver : public ::tflite::MutableOpResolver {
- public:
- BuiltinOpResolver();
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_KERNELS_REGISTER_H__
-
-// clang-format on
diff --git a/libs/tflite/include/tflite/ext/nnapi_delegate.h b/libs/tflite/include/tflite/ext/nnapi_delegate.h
deleted file mode 100644
index 3aac01af7..000000000
--- a/libs/tflite/include/tflite/ext/nnapi_delegate.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- Copyright 2017 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This header is derived from the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/nnapi_delegate.h'
-#ifndef __NNFW_TFLITE_EXT_NNAPI_DELEGATE_H__
-#define __NNFW_TFLITE_EXT_NNAPI_DELEGATE_H__
-
-#include "tensorflow/contrib/lite/allocation.h"
-#ifdef OBS_BUILD
-#include "tensorflow/contrib/lite/context.h"
-#include "tensorflow/contrib/lite/error_reporter.h"
-#else
-#include "tensorflow/contrib/lite/c/c_api_internal.h"
-#include "tensorflow/contrib/lite/core/api/error_reporter.h"
-#endif
-#include "tensorflow/contrib/lite/interpreter.h"
-#include "NeuralNetworksShim.h"
-
-class ANeuralNetworksModel;
-class ANeuralNetworksMemory;
-class ANeuralNetworksCompilation;
-
-namespace nnfw {
-namespace tflite {
-
-class NNAPIAllocation : public ::tflite::MMAPAllocation {
- public:
- NNAPIAllocation(const char* filename, ::tflite::ErrorReporter* error_reporter);
- ~NNAPIAllocation();
-
- size_t offset(const void* ptr) const {
- auto signed_offset = reinterpret_cast<const uint8_t*>(ptr) -
- reinterpret_cast<const uint8_t*>(mmapped_buffer_);
-
- return static_cast<size_t>(signed_offset);
- }
-
- ANeuralNetworksMemory* memory() const { return handle_; }
- bool valid() const override { return handle_ != nullptr; }
-
- private:
- mutable ANeuralNetworksMemory* handle_ = nullptr;
-};
-
-class NNAPIDelegate {
- public:
- ~NNAPIDelegate();
-
- // Convert a tflite graph to NNAPI
- TfLiteStatus BuildGraph(::tflite::Interpreter* interpreter);
-
- // Run
- TfLiteStatus Invoke(::tflite::Interpreter* interpreter);
-
- // Whether the current platform supports NNAPI delegation.
- static bool IsSupported();
-
- private:
- // The NN API model handle
- ANeuralNetworksModel* nn_model_ = nullptr;
- // The NN API compilation handle
- ANeuralNetworksCompilation* nn_compiled_model_ = nullptr;
- // Model status
- TfLiteStatus model_status_ = kTfLiteOk;
-
- // List of state tensors for LSTM, RNN, SVDF.
- // NN API does not allow ops to maintain states across multiple
- // invocations. We need to manually create state input tensors from
- // corresponding state output tensors of TFLite operations, and map them
- // correctly.
- std::vector<int> model_states_inputs_; // holds NNAPI operand ids
- std::vector<int> model_states_outputs_; // holds TFLite tensor ids
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_EXT_NNAPI_DELEGATE_H__
-
-// clang-format on
diff --git a/libs/tflite/include/tflite/interp/Builder.h b/libs/tflite/include/tflite/interp/Builder.h
deleted file mode 100644
index b4d082419..000000000
--- a/libs/tflite/include/tflite/interp/Builder.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Builder.h
- * @brief This file contains Builder structure
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_INTERP_BUILDER_H__
-#define __NNFW_TFLITE_INTERP_BUILDER_H__
-
-#include <tensorflow/contrib/lite/interpreter.h>
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Structure to Builder
- */
-struct Builder
-{
- /**
- * @brief Destroy the Builder object
- */
- virtual ~Builder() = default;
-
- /**
- * @brief Build a FlatBuffer model
- * @return The TfLite interpreter object
- */
- virtual std::unique_ptr<::tflite::Interpreter> build(void) const = 0;
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_INTERP_BUILDER_H__
diff --git a/libs/tflite/include/tflite/interp/FlatBufferBuilder.h b/libs/tflite/include/tflite/interp/FlatBufferBuilder.h
deleted file mode 100644
index 13470b8c5..000000000
--- a/libs/tflite/include/tflite/interp/FlatBufferBuilder.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file FlatBufferBuilder.h
- * @brief This file contains FlatBufferBuilder class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_INTERP_FLAT_BUFFER_BUILDER_H__
-#define __NNFW_TFLITE_INTERP_FLAT_BUFFER_BUILDER_H__
-
-#include <tensorflow/contrib/lite/model.h>
-
-#include "tflite/interp/Builder.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Class to define FlatBufferBuilder which is inherited from Builder
- */
-class FlatBufferBuilder final : public Builder
-{
-public:
- /**
- * @brief Construct a FlatBufferBuilder object with FlatBufferModel of TfLite
- * @param[in] model The TfLite Flatbuffer model
- */
- FlatBufferBuilder(const ::tflite::FlatBufferModel &model) : _model{model}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Build a FlatBuffer model
- * @return The TfLite interpreter pointer address
- */
- std::unique_ptr<::tflite::Interpreter> build(void) const override;
-
-private:
- const ::tflite::FlatBufferModel &_model;
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_INTERP_FLAT_BUFFER_BUILDER_H__
diff --git a/libs/tflite/include/tflite/interp/FunctionBuilder.h b/libs/tflite/include/tflite/interp/FunctionBuilder.h
deleted file mode 100644
index 064375939..000000000
--- a/libs/tflite/include/tflite/interp/FunctionBuilder.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file FunctionBuilder.h
- * @brief This file contains FunctionBuilder class
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NNFW_TFLITE_INTERP_FUNCTION_BUILDER_H__
-#define __NNFW_TFLITE_INTERP_FUNCTION_BUILDER_H__
-
-#include <tensorflow/contrib/lite/model.h>
-
-#include "tflite/interp/Builder.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-/**
- * @brief Class to define FunctionBuilder which is inherited from Builder
- */
-class FunctionBuilder final : public Builder
-{
-public:
- using SetupFunc = std::function<void(::tflite::Interpreter &)>;
-
-public:
- /**
- * @brief Construct a FunctionBuilder object with SetupFunction
- * @param[in] fn The SetupFunc object
- */
- FunctionBuilder(const SetupFunc &fn) : _fn{fn}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Build a SetupFunc
- * @return The TfLite interpreter pointer address
- */
- std::unique_ptr<::tflite::Interpreter> build(void) const override;
-
-private:
- SetupFunc _fn;
-};
-
-} // namespace tflite
-} // namespace nnfw
-
-#endif // __NNFW_TFLITE_INTERP_FUNCTION_BUILDER_H__
diff --git a/libs/tflite/src/Diff.cpp b/libs/tflite/src/Diff.cpp
deleted file mode 100644
index 45ef06110..000000000
--- a/libs/tflite/src/Diff.cpp
+++ /dev/null
@@ -1,598 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/Diff.h"
-#include "tflite/ext/nnapi_delegate.h"
-
-#include "misc/fp32.h"
-
-#include "misc/tensor/IndexIterator.h"
-#include "misc/tensor/IndexFormatter.h"
-#include "misc/tensor/Zipper.h"
-#include "misc/tensor/Comparator.h"
-
-#include "misc/environment.h"
-
-#include <iostream>
-#include <cassert>
-
-class DiffSummary : public nnfw::misc::tensor::Comparator::Observer
-{
-public:
- DiffSummary()
- : max_abs_diff_index(0), max_abs_diff_expected{0.0f}, max_abs_diff_obtained{0.0f},
- max_abs_diff_value{0.0f}, max_rel_diff_index(0), max_rel_diff_expected{0.0f},
- max_rel_diff_obtained{0.0f}, max_rel_diff_value{0.0f}
- {
- // DO NOTHING
- }
-
-public:
- void notify(const nnfw::misc::tensor::Index &index, float expected, float obtained) override;
-
-public:
- nnfw::misc::tensor::Index max_abs_diff_index;
- float max_abs_diff_expected;
- float max_abs_diff_obtained;
- float max_abs_diff_value;
-
- nnfw::misc::tensor::Index max_rel_diff_index;
- float max_rel_diff_expected;
- float max_rel_diff_obtained;
- float max_rel_diff_value;
-};
-
-void DiffSummary::notify(const nnfw::misc::tensor::Index &index, float expected, float obtained)
-{
- const auto abs_diff_value = std::fabs(expected - obtained);
-
- if (max_abs_diff_value < abs_diff_value)
- {
- max_abs_diff_index = index;
- max_abs_diff_value = abs_diff_value;
- max_abs_diff_expected = expected;
- max_abs_diff_obtained = obtained;
- }
-
- const auto rel_diff_value = nnfw::misc::fp32::relative_diff(expected, obtained);
-
- if (max_rel_diff_value < rel_diff_value)
- {
- max_rel_diff_index = index;
- max_rel_diff_value = rel_diff_value;
- max_rel_diff_expected = expected;
- max_rel_diff_obtained = obtained;
- }
-}
-
-template <typename T>
-bool TfLiteInterpMatchApp::compareSingleTensorView(const nnfw::tflite::TensorView<T> &expected,
- const nnfw::tflite::TensorView<T> &obtained,
- int id) const
-{
- std::vector<nnfw::misc::tensor::Diff<T>> diffs;
- assert(expected.shape() == obtained.shape());
-
- using nnfw::misc::tensor::zip;
- using nnfw::misc::tensor::Index;
-
- zip(expected.shape(), expected, obtained)
- << [&](const Index &index, T expected_value, T obtained_value) {
- if (expected_value != obtained_value)
- {
- diffs.emplace_back(index, expected_value, obtained_value);
- }
- };
-
- // TODO Unify summary generation code
- if (diffs.size() == 0)
- {
- std::cout << " Tensor #" << id << ": MATCHED" << std::endl;
- }
- else
- {
- std::cout << " Tensor #" << id << ": UNMATCHED" << std::endl;
- std::cout << " " << diffs.size() << " diffs are detected" << std::endl;
- }
-
- if (diffs.size() > 0 && _verbose != 0)
- {
- std::cout << " ---- Details ---" << std::endl;
- for (const auto &diff : diffs)
- {
- std::cout << " Diff at [" << nnfw::misc::tensor::IndexFormatter(diff.index) << "]"
- << std::endl;
- std::cout << " expected: " << diff.expected << std::endl;
- std::cout << " obtained: " << diff.obtained << std::endl;
- }
- }
-
- return diffs.size() == 0;
-}
-
-template <>
-bool TfLiteInterpMatchApp::compareSingleTensorView<float>(
- const nnfw::tflite::TensorView<float> &expected,
- const nnfw::tflite::TensorView<float> &obtained, int id) const
-{
- DiffSummary summary;
-
- assert(expected.shape() == obtained.shape());
- auto diffs = _comparator.compare(expected.shape(), expected, obtained, &summary);
-
- // TODO Unify summary generation code
- if (diffs.size() == 0)
- {
- std::cout << " Tensor #" << id << ": MATCHED" << std::endl;
- }
- else
- {
- std::cout << " Tensor #" << id << ": UNMATCHED" << std::endl;
- std::cout << " " << diffs.size() << " diffs are detected" << std::endl;
- }
-
- // Print out max_diff
- if (summary.max_abs_diff_value > 0)
- {
- std::cout << " Max absolute diff at ["
- << nnfw::misc::tensor::IndexFormatter(summary.max_abs_diff_index) << "]" << std::endl;
- std::cout << " expected: " << summary.max_abs_diff_expected << std::endl;
- std::cout << " obtained: " << summary.max_abs_diff_obtained << std::endl;
- std::cout << " absolute diff: " << summary.max_abs_diff_value << std::endl;
- }
-
- if (summary.max_rel_diff_value > 0)
- {
- const auto tolerance_level = summary.max_rel_diff_value / FLT_EPSILON;
-
- std::cout << " Max relative diff at ["
- << nnfw::misc::tensor::IndexFormatter(summary.max_rel_diff_index) << "]" << std::endl;
- std::cout << " expected: " << summary.max_rel_diff_expected << std::endl;
- std::cout << " obtained: " << summary.max_rel_diff_obtained << std::endl;
- std::cout << " relative diff: " << summary.max_rel_diff_value << std::endl;
- std::cout << " (tolerance level = " << tolerance_level << ")" << std::endl;
- }
-
- if (diffs.size() > 0)
- {
- if (_verbose != 0)
- {
- std::cout << " ---- Details ---" << std::endl;
- for (const auto &diff : diffs)
- {
- const auto absolute_diff = std::fabs(diff.expected - diff.obtained);
- const auto relative_diff = nnfw::misc::fp32::relative_diff(diff.expected, diff.obtained);
- const auto tolerance_level = relative_diff / FLT_EPSILON;
-
- std::cout << " Diff at [" << nnfw::misc::tensor::IndexFormatter(diff.index) << "]"
- << std::endl;
- std::cout << " expected: " << diff.expected << std::endl;
- std::cout << " obtained: " << diff.obtained << std::endl;
- std::cout << " absolute diff: " << absolute_diff << std::endl;
- std::cout << " relative diff: " << relative_diff << std::endl;
- std::cout << " (tolerance level = " << tolerance_level << ")" << std::endl;
- }
- }
-
- return false;
- }
- return true;
-}
-
-#include <map>
-
-bool TfLiteInterpMatchApp::run(::tflite::Interpreter &interp, ::tflite::Interpreter &nnapi) const
-{
- assert(interp.outputs() == nnapi.outputs());
-
- bool all_matched = true;
-
- using Comparator = std::function<bool(int id, ::tflite::Interpreter &, ::tflite::Interpreter &)>;
-
- std::map<TfLiteType, Comparator> comparators;
-
- comparators[kTfLiteUInt8] = [this](int id, ::tflite::Interpreter &interp,
- ::tflite::Interpreter &nnapi) {
- const auto expected = nnfw::tflite::TensorView<uint8_t>::make(interp, id);
- const auto obtained = nnfw::tflite::TensorView<uint8_t>::make(nnapi, id);
-
- return compareSingleTensorView(expected, obtained, id);
- };
-
- comparators[kTfLiteInt32] = [this](int id, ::tflite::Interpreter &interp,
- ::tflite::Interpreter &nnapi) {
- const auto expected = nnfw::tflite::TensorView<int32_t>::make(interp, id);
- const auto obtained = nnfw::tflite::TensorView<int32_t>::make(nnapi, id);
-
- return compareSingleTensorView(expected, obtained, id);
- };
-
- comparators[kTfLiteFloat32] = [this](int id, ::tflite::Interpreter &interp,
- ::tflite::Interpreter &nnapi) {
- const auto expected = nnfw::tflite::TensorView<float>::make(interp, id);
- const auto obtained = nnfw::tflite::TensorView<float>::make(nnapi, id);
-
- return compareSingleTensorView(expected, obtained, id);
- };
-
- comparators[kTfLiteBool] = [this](int id, ::tflite::Interpreter &interp,
- ::tflite::Interpreter &nnapi) {
- const auto expected = nnfw::tflite::TensorView<bool>::make(interp, id);
- const auto obtained = nnfw::tflite::TensorView<bool>::make(nnapi, id);
-
- return compareSingleTensorView(expected, obtained, id);
- };
-
- for (const auto &id : interp.outputs())
- {
- assert(interp.tensor(id)->type == nnapi.tensor(id)->type);
-
- auto it = comparators.find(interp.tensor(id)->type);
-
- if (it == comparators.end())
- {
- throw std::runtime_error{"Not supported output type"};
- }
-
- const auto &comparator = it->second;
-
- if (!comparator(id, interp, nnapi))
- {
- all_matched = false;
- }
- }
-
- return all_matched;
-}
-
-#include "misc/tensor/Object.h"
-
-using namespace std::placeholders;
-
-template <> uint8_t RandomGenerator::generate<uint8_t>(void)
-{
- // The value of type_range is 255.
- float type_range = static_cast<float>(std::numeric_limits<uint8_t>::max()) -
- static_cast<float>(std::numeric_limits<uint8_t>::min());
- // Most _dist values range from -5.0 to 5.0.
- float min_range = -5.0f;
- float max_range = 5.0f;
- return static_cast<uint8_t>((_dist(_rand) - min_range) * type_range / (max_range - min_range));
-}
-
-#include "tflite/TensorLogger.h"
-//
-// Random Test Runner
-//
-int RandomTestRunner::run(const nnfw::tflite::Builder &builder)
-{
- auto tfl_interp = builder.build();
- auto nnapi = builder.build();
-
- tfl_interp->UseNNAPI(false);
-
- // Allocate Tensors
- tfl_interp->AllocateTensors();
- nnapi->AllocateTensors();
-
- assert(tfl_interp->inputs() == nnapi->inputs());
-
- using ::tflite::Interpreter;
- using Initializer = std::function<void(int id, Interpreter *, Interpreter *)>;
-
- std::map<TfLiteType, Initializer> initializers;
- std::map<TfLiteType, Initializer> reseters;
-
- // Generate singed 32-bit integer (s32) input
- initializers[kTfLiteInt32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteInt32);
- assert(nnapi->tensor(id)->type == kTfLiteInt32);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<int32_t>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<int32_t>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- int32_t value = 0;
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- // TODO Generate random values
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- ++value;
- };
- };
-
- // Generate singed 32-bit integer (s32) input
- reseters[kTfLiteInt32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteInt32);
- assert(nnapi->tensor(id)->type == kTfLiteInt32);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<int32_t>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<int32_t>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- int32_t value = 0;
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- // TODO Generate random values
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- initializers[kTfLiteUInt8] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteUInt8);
- assert(nnapi->tensor(id)->type == kTfLiteUInt8);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<uint8_t>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<uint8_t>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- auto fp = static_cast<uint8_t (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
- const ::nnfw::misc::tensor::Index &)>(
- &RandomGenerator::generate<uint8_t>);
- const nnfw::misc::tensor::Object<uint8_t> data(tfl_interp_view.shape(),
- std::bind(fp, _randgen, _1, _2));
- assert(tfl_interp_view.shape() == data.shape());
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- const auto value = data.at(ind);
-
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- reseters[kTfLiteUInt8] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteUInt8);
- assert(nnapi->tensor(id)->type == kTfLiteUInt8);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<uint8_t>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<uint8_t>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- auto fp = static_cast<uint8_t (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
- const ::nnfw::misc::tensor::Index &)>(
- &RandomGenerator::generate<uint8_t>);
- const nnfw::misc::tensor::Object<uint8_t> data(tfl_interp_view.shape(),
- std::bind(fp, _randgen, _1, _2));
- assert(tfl_interp_view.shape() == data.shape());
-
- uint8_t value = 0;
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- initializers[kTfLiteFloat32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteFloat32);
- assert(nnapi->tensor(id)->type == kTfLiteFloat32);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<float>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<float>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- auto fp = static_cast<float (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
- const ::nnfw::misc::tensor::Index &)>(
- &RandomGenerator::generate<float>);
- const nnfw::misc::tensor::Object<float> data(tfl_interp_view.shape(),
- std::bind(fp, _randgen, _1, _2));
-
- assert(tfl_interp_view.shape() == data.shape());
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- const auto value = data.at(ind);
-
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- reseters[kTfLiteFloat32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteFloat32);
- assert(nnapi->tensor(id)->type == kTfLiteFloat32);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<float>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<float>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- auto fp = static_cast<float (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
- const ::nnfw::misc::tensor::Index &)>(
- &RandomGenerator::generate<float>);
- const nnfw::misc::tensor::Object<float> data(tfl_interp_view.shape(),
- std::bind(fp, _randgen, _1, _2));
-
- assert(tfl_interp_view.shape() == data.shape());
-
- float value = 0;
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- initializers[kTfLiteBool] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteBool);
- assert(nnapi->tensor(id)->type == kTfLiteBool);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<bool>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<bool>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- auto fp = static_cast<bool (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
- const ::nnfw::misc::tensor::Index &)>(
- &RandomGenerator::generate<bool>);
- const nnfw::misc::tensor::Object<bool> data(tfl_interp_view.shape(),
- std::bind(fp, _randgen, _1, _2));
-
- assert(tfl_interp_view.shape() == data.shape());
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- const auto value = data.at(ind);
-
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- reseters[kTfLiteBool] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
- assert(tfl_interp->tensor(id)->type == kTfLiteBool);
- assert(nnapi->tensor(id)->type == kTfLiteBool);
-
- auto tfl_interp_view = nnfw::tflite::TensorView<bool>::make(*tfl_interp, id);
- auto nnapi_view = nnfw::tflite::TensorView<bool>::make(*nnapi, id);
-
- assert(tfl_interp_view.shape() == nnapi_view.shape());
-
- auto fp = static_cast<bool (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
- const ::nnfw::misc::tensor::Index &)>(
- &RandomGenerator::generate<bool>);
- const nnfw::misc::tensor::Object<bool> data(tfl_interp_view.shape(),
- std::bind(fp, _randgen, _1, _2));
-
- assert(tfl_interp_view.shape() == data.shape());
-
- bool value = false;
-
- nnfw::misc::tensor::iterate(tfl_interp_view.shape())
- << [&](const nnfw::misc::tensor::Index &ind) {
- tfl_interp_view.at(ind) = value;
- nnapi_view.at(ind) = value;
- };
- };
-
- // Fill IFM with random numbers
- for (const auto id : tfl_interp->inputs())
- {
- assert(tfl_interp->tensor(id)->type == nnapi->tensor(id)->type);
-
- auto it = initializers.find(tfl_interp->tensor(id)->type);
-
- if (it == initializers.end())
- {
- throw std::runtime_error{"Not supported input type"};
- }
-
- it->second(id, tfl_interp.get(), nnapi.get());
- }
-
- // Fill OFM with 0
- for (const auto id : tfl_interp->outputs())
- {
- assert(tfl_interp->tensor(id)->type == nnapi->tensor(id)->type);
-
- auto it = reseters.find(tfl_interp->tensor(id)->type);
-
- if (it == reseters.end())
- {
- throw std::runtime_error{"Not supported input type"};
- }
-
- it->second(id, tfl_interp.get(), nnapi.get());
- }
-
- std::cout << "[NNAPI TEST] Run T/F Lite Interpreter without NNAPI" << std::endl;
- tfl_interp->Invoke();
-
- std::cout << "[NNAPI TEST] Run T/F Lite Interpreter with NNAPI" << std::endl;
-
- char *env = getenv("UPSTREAM_DELEGATE");
-
- if (env && !std::string(env).compare("1"))
- {
- nnapi->UseNNAPI(true);
- nnapi->Invoke();
- }
- else
- {
- nnfw::tflite::NNAPIDelegate d;
-
- if (d.BuildGraph(nnapi.get()))
- {
- throw std::runtime_error{"Failed to BuildGraph"};
- }
-
- if (d.Invoke(nnapi.get()))
- {
- throw std::runtime_error{"Failed to BuildGraph"};
- }
- }
-
- // Compare OFM
- std::cout << "[NNAPI TEST] Compare the result" << std::endl;
-
- const auto tolerance = _param.tolerance;
-
- auto equals = [tolerance](float lhs, float rhs) {
- // NOTE Hybrid approach
- // TODO Allow users to set tolerance for absolute_epsilon_equal
- if (nnfw::misc::fp32::absolute_epsilon_equal(lhs, rhs))
- {
- return true;
- }
-
- return nnfw::misc::fp32::epsilon_equal(lhs, rhs, tolerance);
- };
-
- nnfw::misc::tensor::Comparator comparator(equals);
- TfLiteInterpMatchApp app(comparator);
-
- app.verbose() = _param.verbose;
-
- bool res = app.run(*tfl_interp, *nnapi);
-
- if (!res)
- {
- return 255;
- }
-
- std::cout << "[NNAPI TEST] PASSED" << std::endl;
-
- if (_param.tensor_logging)
- nnfw::tflite::TensorLogger::instance().save(_param.log_path, *tfl_interp);
-
- return 0;
-}
-
-RandomTestRunner RandomTestRunner::make(int seed)
-{
- RandomTestParam param;
-
- param.verbose = 0;
- param.tolerance = 1;
-
- nnfw::misc::env::IntAccessor("VERBOSE").access(param.verbose);
- nnfw::misc::env::IntAccessor("TOLERANCE").access(param.tolerance);
-
- return RandomTestRunner{seed, param};
-}
diff --git a/libs/tflite/src/TensorShapeUtils.cpp b/libs/tflite/src/TensorShapeUtils.cpp
deleted file mode 100644
index b5d906719..000000000
--- a/libs/tflite/src/TensorShapeUtils.cpp
+++ /dev/null
@@ -1,48 +0,0 @@
-#include "tflite/TensorShapeUtils.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-nnfw::misc::tensor::Shape broadcast(const nnfw::misc::tensor::Shape &lhs_shape,
- const nnfw::misc::tensor::Shape &rhs_shape)
-{
- const uint32_t lhs_rank = lhs_shape.rank();
- const uint32_t rhs_rank = rhs_shape.rank();
- const uint32_t out_rank = std::max(lhs_rank, rhs_rank);
-
- // TODO Simplify implementation
- std::vector<int32_t> lhs_normalized_dims;
- std::vector<int32_t> rhs_normalized_dims;
-
- for (uint32_t n = 0; n < out_rank - lhs_rank; ++n)
- {
- lhs_normalized_dims.emplace_back(1);
- }
- for (uint32_t axis = 0; axis < lhs_rank; ++axis)
- {
- lhs_normalized_dims.emplace_back(lhs_shape.dim(axis));
- }
-
- for (uint32_t n = 0; n < out_rank - rhs_rank; ++n)
- {
- rhs_normalized_dims.emplace_back(1);
- }
- for (uint32_t axis = 0; axis < rhs_rank; ++axis)
- {
- rhs_normalized_dims.emplace_back(rhs_shape.dim(axis));
- }
-
- nnfw::misc::tensor::Shape out_shape(out_rank);
-
- for (uint32_t axis = 0; axis < out_rank; ++axis)
- {
- out_shape.dim(axis) = std::max(lhs_normalized_dims.at(axis), rhs_normalized_dims.at(axis));
- }
-
- return out_shape;
-}
-
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/ext/kernels/Abs.cpp b/libs/tflite/src/ext/kernels/Abs.cpp
deleted file mode 100644
index 7e9c2338d..000000000
--- a/libs/tflite/src/ext/kernels/Abs.cpp
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/ext/kernels/Abs.h"
-#include "tensorflow/contrib/lite/kernels/kernel_util.h"
-
-#include <iostream>
-#include <cmath>
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace Abs
-{
-
-void *InitAbs(TfLiteContext *context, const char *buffer, size_t length) { return nullptr; }
-
-void FreeAbs(TfLiteContext *context, void *buffer) {}
-
-TfLiteStatus PrepareAbs(TfLiteContext *context, TfLiteNode *node)
-{
- TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 1);
- TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
-
- const TfLiteTensor *input = ::tflite::GetInput(context, node, 0);
- TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
-
- TF_LITE_ENSURE_EQ(context, input->type, output->type);
-
- return context->ResizeTensor(context, output, TfLiteIntArrayCopy(input->dims));
-}
-
-TfLiteStatus EvalAbs(TfLiteContext *context, TfLiteNode *node)
-{
- const TfLiteTensor *input = ::tflite::GetInput(context, node, 0);
- TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
- size_t elements = ::tflite::NumElements(input);
- switch (input->type)
- {
- case kTfLiteFloat32:
- {
- auto *in = input->data.f;
- auto *in_end = in + elements;
- auto *out = output->data.f;
- for (; in < in_end; in++, out++)
- *out = std::abs(*in);
- return kTfLiteOk;
- }
- case kTfLiteInt32:
- {
- auto *in = input->data.i32;
- auto *in_end = in + elements;
- auto *out = output->data.i32;
- for (; in < in_end; in++, out++)
- *out = std::abs(*in);
- return kTfLiteOk;
- }
- case kTfLiteInt64:
- {
- auto *in = input->data.i64;
- auto *in_end = in + elements;
- auto *out = output->data.i64;
- for (; in < in_end; in++, out++)
- *out = std::abs(*in);
- return kTfLiteOk;
- }
- case kTfLiteUInt8:
- {
- auto *in = input->data.uint8;
- auto *in_end = in + elements;
- auto *out = output->data.uint8;
- for (; in < in_end; in++, out++)
- *out = std::abs(*in);
- return kTfLiteOk;
- }
- default:
- {
- context->ReportError(context, "Input type %d is not supported", input->type);
- return kTfLiteError;
- }
- }
-}
-
-} // namespace Abs
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/ext/kernels/SquaredDifference.cpp b/libs/tflite/src/ext/kernels/SquaredDifference.cpp
deleted file mode 100644
index 8ac2b1de0..000000000
--- a/libs/tflite/src/ext/kernels/SquaredDifference.cpp
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/ext/kernels/SquaredDifference.h"
-#include "tensorflow/contrib/lite/kernels/kernel_util.h"
-
-#include <iostream>
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace SquaredDifference
-{
-
-void *InitSquaredDifference(TfLiteContext *context, const char *buffer, size_t length)
-{
- return nullptr;
-}
-
-void FreeSquaredDifference(TfLiteContext *context, void *buffer) {}
-
-TfLiteStatus PrepareSquaredDifference(TfLiteContext *context, TfLiteNode *node)
-{
- TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 2);
- TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
-
- const TfLiteTensor *input1 = ::tflite::GetInput(context, node, 0);
- const TfLiteTensor *input2 = ::tflite::GetInput(context, node, 1);
- TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
-
- TF_LITE_ENSURE_EQ(context, input1->type, input2->type);
- TF_LITE_ENSURE_EQ(context, input1->type, output->type);
-
- return context->ResizeTensor(context, output, TfLiteIntArrayCopy(input1->dims));
-}
-
-TfLiteStatus EvalSquaredDifference(TfLiteContext *context, TfLiteNode *node)
-{
-
- const TfLiteTensor *input1 = ::tflite::GetInput(context, node, 0);
- const TfLiteTensor *input2 = ::tflite::GetInput(context, node, 1);
-
- TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
-
- size_t elements = ::tflite::NumElements(input1);
-
- switch (input1->type)
- {
- case kTfLiteFloat32:
- {
- const float *in1 = input1->data.f;
- const float *in2 = input2->data.f;
- const float *in_end1 = in1 + elements;
- float *out = output->data.f;
-
- for (; in1 < in_end1; in1++, in2++, out++)
- *out = ((*in1 - *in2) * (*in1 - *in2));
-
- return kTfLiteOk;
- }
- case kTfLiteInt32:
- {
- const int *in1 = input1->data.i32;
- const int *in2 = input2->data.i32;
- const int *in_end1 = in1 + elements;
- int *out = output->data.i32;
-
- for (; in1 < in_end1; in1++, in2++, out++)
- *out = ((*in1 - *in2) * (*in1 - *in2));
-
- return kTfLiteOk;
- }
- case kTfLiteInt64:
- {
- const int64_t *in1 = input1->data.i64;
- const int64_t *in2 = input1->data.i64;
- const int64_t *in_end1 = in1 + elements;
- int64_t *out = output->data.i64;
-
- for (; in1 < in_end1; in1++, in2++, out++)
- *out = ((*in1 - *in2) * (*in1 - *in2));
-
- return kTfLiteOk;
- }
- default:
- {
- context->ReportError(context, "InputType is %d Unsupported", input1->type);
- return kTfLiteError;
- }
- }
-}
-
-} // namespace SquaredDifference
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/ext/kernels/TensorFlowMax.cpp b/libs/tflite/src/ext/kernels/TensorFlowMax.cpp
deleted file mode 100644
index d72ad242c..000000000
--- a/libs/tflite/src/ext/kernels/TensorFlowMax.cpp
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/ext/kernels/TensorFlowMax.h"
-#include "tensorflow/contrib/lite/kernels/kernel_util.h"
-
-#include <iostream>
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace TensorFlowMax
-{
-
-struct TensorFlowMaxOp
-{
- TensorFlowMaxOp(TfLiteContext *context, TfLiteNode *node)
- {
- input = ::tflite::GetInput(context, node, 0);
- axis = ::tflite::GetInput(context, node, 1);
- output = ::tflite::GetOutput(context, node, 0);
- }
- const TfLiteTensor *input;
- const TfLiteTensor *axis;
- TfLiteTensor *output;
-};
-
-void *InitTensorFlowMax(TfLiteContext *context, const char *buffer, size_t length)
-{
- // Creates two temp tensors to store index and axis for internal
- // implementation only.
- auto *scratch_tensor_index = new int;
- context->AddTensors(context, 2, scratch_tensor_index);
- return scratch_tensor_index;
-}
-
-void FreeTensorFlowMax(TfLiteContext *context, void *buffer)
-{
- delete static_cast<TensorFlowMaxOp *>(buffer);
-}
-
-// Resizes the temp tensor that stores resolved axis.
-TfLiteStatus ResizeTempAxis(TfLiteContext *context, TensorFlowMaxOp *op_context,
- TfLiteTensor *resolved_axis)
-{
- TfLiteIntArray *axis_size = TfLiteIntArrayCreate(1);
- axis_size->data[0] = static_cast<int>(::tflite::NumElements(op_context->axis));
- return context->ResizeTensor(context, resolved_axis, axis_size);
-}
-
-// Resizes output array based on the input size and resolved axis.
-TfLiteStatus ResizeOutputTensor(TfLiteContext *context, TensorFlowMaxOp *op_context)
-{
- size_t num_axis = ::tflite::NumElements(op_context->axis);
- TfLiteIntArray *input_dims = op_context->input->dims;
- int input_num_dims = ::tflite::NumDimensions(op_context->input);
- const int *axis = op_context->axis->data.i32;
-
- {
- // Calculates size of reducing axis.
- int num_reduce_axis = num_axis;
- for (int i = 0; i < num_axis; ++i)
- {
- int current = axis[i];
- if (current < 0)
- {
- current += input_num_dims;
- }
- TF_LITE_ENSURE(context, current >= 0 && current < input_num_dims);
- for (int j = 0; j < i; ++j)
- {
- int previous = axis[j];
- if (previous < 0)
- {
- previous += input_num_dims;
- }
- if (current == previous)
- {
- --num_reduce_axis;
- break;
- }
- }
- }
- // Determines output dimensions.
- int output_num_dims = ::tflite::NumDimensions(op_context->output);
- TF_LITE_ENSURE(context, (input_num_dims == output_num_dims) ||
- (input_num_dims - num_reduce_axis == output_num_dims));
-
- if (input_num_dims == output_num_dims)
- {
- TfLiteIntArray *output_dims = TfLiteIntArrayCopy(input_dims);
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- int current = axis[axis_idx];
- output_dims->data[current] = 1;
- }
- return context->ResizeTensor(context, op_context->output, output_dims);
- }
- else
- {
- TfLiteIntArray *output_dims = TfLiteIntArrayCreate(output_num_dims);
- int num_skip_axis = 0;
- for (int idx = 0; idx < input_num_dims; ++idx)
- {
- bool is_axis = false;
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- if (axis[axis_idx] == idx || axis[axis_idx] + input_num_dims == idx)
- {
- ++num_skip_axis;
- is_axis = true;
- break;
- }
- }
- if (!is_axis)
- {
- output_dims->data[idx - num_skip_axis] = input_dims->data[idx];
- }
- }
- return context->ResizeTensor(context, op_context->output, output_dims);
- }
- }
-}
-
-// Initializes temp tensors to store index and resolved axis.
-TfLiteStatus InitializeTemporaries(TfLiteContext *context, TfLiteNode *node,
- TensorFlowMaxOp *op_context)
-{
- // Creates a temp index to iterate through input data.
- int *scratch_tensor_index = reinterpret_cast<int *>(node->user_data);
- TfLiteIntArrayFree(node->temporaries);
- node->temporaries = TfLiteIntArrayCreate(2);
- node->temporaries->data[0] = *scratch_tensor_index;
- TfLiteTensor *scratch_tensor = &context->tensors[node->temporaries->data[0]];
- scratch_tensor->type = kTfLiteInt32;
- scratch_tensor->allocation_type = kTfLiteArenaRw;
- TfLiteIntArray *index_size = TfLiteIntArrayCreate(1);
- index_size->data[0] = ::tflite::NumDimensions(op_context->input);
- TF_LITE_ENSURE_OK(context, context->ResizeTensor(context, scratch_tensor, index_size));
-
- // Creates a temp tensor to store resolved axis given input data.
- node->temporaries->data[1] = *scratch_tensor_index + 1;
- TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
- resolved_axis->type = kTfLiteInt32;
- return kTfLiteOk;
-}
-
-TfLiteStatus PrepareTensorFlowMax(TfLiteContext *context, TfLiteNode *node)
-{
- TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 2);
- TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
-
- TensorFlowMaxOp op_context(context, node);
- TF_LITE_ENSURE_OK(context, InitializeTemporaries(context, node, &op_context));
-
- TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
- // Leaves work to Eval if axis is not constant; else resizes output.
- if (!::tflite::IsConstantTensor(op_context.axis))
- {
- ::tflite::SetTensorToDynamic(op_context.output);
- ::tflite::SetTensorToDynamic(resolved_axis);
- return kTfLiteOk;
- }
- resolved_axis->allocation_type = kTfLiteArenaRw;
- TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
- return ResizeOutputTensor(context, &op_context);
-}
-
-// Gets offset of index if expanded on axis. When expanded, the flattened offset
-// will not change, if the output index changes on the given axis. For example,
-// if you have a 2D tensor and you are expanding to 3D on axis 0,
-// then index (0, 1, 2) and index (1, 1, 2) will map from the same flattened
-// offset.
-inline size_t ExpandedInputOffset(const int num_dims, const int *dims, const int *index,
- const int num_axis, const int *axis)
-{
- size_t offset = 0;
- int out_idx = 0;
- for (int in_idx = 0; in_idx < num_dims; ++in_idx)
- {
- // if we need to expand this axis
- bool is_axis = false;
- if (axis != nullptr)
- {
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- if (in_idx == axis[axis_idx])
- {
- is_axis = true;
- break;
- }
- }
- }
- if (!is_axis)
- {
- offset = offset * static_cast<size_t>(dims[in_idx]) + static_cast<size_t>(index[out_idx]);
- out_idx++;
- }
- else
- {
- offset = offset * static_cast<size_t>(dims[in_idx]);
- }
- }
- return offset;
-}
-
-// Gets offset of index if reducing on axis. When reducing, the flattened offset
-// will not change, if the input index changes on the given axis. For example,
-// if you have a 3D tensor and you are reducing to 2D by eliminating axis 0,
-// then index (0, 1, 2) and index (1, 1, 2) will map to the same flattened
-// offset.
-// TODO(kanlig): uses Dims to represent dimensions.
-inline size_t ReducedOutputOffset(const int num_dims, const int *dims, const int *index,
- const int num_axis, const int *axis)
-{
- size_t offset = 0;
- for (int idx = 0; idx < num_dims; ++idx)
- {
- // if we need to skip this axis
- bool is_axis = false;
- if (axis != nullptr)
- {
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- if (idx == axis[axis_idx])
- {
- is_axis = true;
- break;
- }
- }
- }
- if (!is_axis)
- {
- offset = offset * static_cast<size_t>(dims[idx]) + static_cast<size_t>(index[idx]);
- }
- }
- return offset;
-}
-
-// Gets next index to iterate through a multidimensional array.
-inline bool NextIndex(TfLiteContext *context, const int num_dims, const int *dims, int *current)
-{
- int carry = 1;
- for (int idx = num_dims - 1; idx >= 0; --idx)
- {
- int current_val = current[idx] + carry;
- TF_LITE_ENSURE(context, (dims[idx] >= current_val));
- if (dims[idx] == current_val)
- {
- current[idx] = 0;
- }
- else
- {
- current[idx] = current_val;
- carry = 0;
- break;
- }
- }
- return (carry == 0);
-}
-
-template <typename T>
-inline TfLiteStatus
-CustomMax(TfLiteContext *context, T *input_data, const int *input_dims, const int input_num_dims,
- T *output_data, const int *output_dims, const int output_num_dims, const int *axis,
- const int num_axis_dimensions, bool keep_dims, int *temp_index, int *resolved_axis)
-{
- // resolves axis.
- int num_resolved_axis = 0;
- for (int idx = 0; idx < num_axis_dimensions; ++idx)
- {
- int current = axis[idx];
- TF_LITE_ENSURE(context, (current < input_num_dims && current + input_num_dims >= 0));
- if (current < 0)
- {
- current += input_num_dims;
- }
- bool is_dup = false;
- for (int j = 0; j < num_resolved_axis; ++j)
- {
- if (resolved_axis[j] == current)
- {
- is_dup = true;
- break;
- }
- }
- if (!is_dup)
- {
- resolved_axis[num_resolved_axis++] = current;
- }
- }
-
- TF_LITE_ENSURE(context, (input_num_dims > 0));
- TF_LITE_ENSURE(context, (input_dims != nullptr));
- TF_LITE_ENSURE(context, (temp_index != nullptr));
-
- // resets output data.
- for (int idx = 0; idx < output_num_dims; ++idx)
- {
- temp_index[idx] = 0;
- }
- for (bool has_next = true; has_next;
- has_next = NextIndex(context, output_num_dims, output_dims, temp_index))
- {
- size_t output_offset =
- ReducedOutputOffset(output_num_dims, output_dims, temp_index, 0, nullptr);
- size_t input_offset = ExpandedInputOffset(input_num_dims, input_dims, temp_index,
- num_resolved_axis, resolved_axis);
- output_data[output_offset] = input_data[input_offset];
- }
-
- // resets temp index.
- for (int idx = 0; idx < input_num_dims; ++idx)
- {
- temp_index[idx] = 0;
- }
-
- // iterates through input_data.
- for (bool has_next = true; has_next;
- has_next = NextIndex(context, input_num_dims, input_dims, temp_index))
- {
- size_t input_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index, 0, nullptr);
- size_t output_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index,
- num_resolved_axis, resolved_axis);
- if (output_data[output_offset] < input_data[input_offset])
- {
- output_data[output_offset] = input_data[input_offset];
- }
- }
-
- return kTfLiteOk;
-}
-
-TfLiteStatus EvalTensorFlowMax(TfLiteContext *context, TfLiteNode *node)
-{
-
- TensorFlowMaxOp op_context(context, node);
- int num_axis = static_cast<int>(::tflite::NumElements(op_context.axis));
- TfLiteTensor *temp_index = &context->tensors[node->temporaries->data[0]];
- TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
- // Resize the output tensor if the output tensor is dynamic.
- if (::tflite::IsDynamicTensor(op_context.output))
- {
- TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
- TF_LITE_ENSURE_OK(context, ResizeOutputTensor(context, &op_context));
- }
-
- TfLiteStatus returnStatus = kTfLiteOk;
- switch (op_context.input->type)
- {
- case kTfLiteFloat32:
- returnStatus = CustomMax<float>(
- context, op_context.input->data.f, op_context.input->dims->data,
- op_context.input->dims->size, op_context.output->data.f, op_context.output->dims->data,
- op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
- temp_index->data.i32, resolved_axis->data.i32);
- break;
- case kTfLiteInt32:
- returnStatus = CustomMax<int>(context, op_context.input->data.i32,
- op_context.input->dims->data, op_context.input->dims->size,
- op_context.output->data.i32, op_context.output->dims->data,
- op_context.output->dims->size, op_context.axis->data.i32,
- num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
- break;
- case kTfLiteUInt8:
- returnStatus = CustomMax<uint8_t>(
- context, op_context.input->data.uint8, op_context.input->dims->data,
- op_context.input->dims->size, op_context.output->data.uint8,
- op_context.output->dims->data, op_context.output->dims->size, op_context.axis->data.i32,
- num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
- break;
- case kTfLiteInt64:
- returnStatus = CustomMax<int64_t>(
- context, op_context.input->data.i64, op_context.input->dims->data,
- op_context.input->dims->size, op_context.output->data.i64, op_context.output->dims->data,
- op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
- temp_index->data.i32, resolved_axis->data.i32);
- break;
- default:
- returnStatus = kTfLiteError;
- }
-
- return returnStatus;
-}
-
-} // namespace TensorFlowMax
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/ext/kernels/TensorFlowSum.cpp b/libs/tflite/src/ext/kernels/TensorFlowSum.cpp
deleted file mode 100644
index cbf97970c..000000000
--- a/libs/tflite/src/ext/kernels/TensorFlowSum.cpp
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/ext/kernels/TensorFlowSum.h"
-#include "tensorflow/contrib/lite/kernels/kernel_util.h"
-
-#include <iostream>
-
-namespace nnfw
-{
-namespace tflite
-{
-namespace custom
-{
-namespace TensorFlowSum
-{
-
-struct TensorFlowSumOp
-{
- TensorFlowSumOp(TfLiteContext *context, TfLiteNode *node)
- {
- input = ::tflite::GetInput(context, node, 0);
- axis = ::tflite::GetInput(context, node, 1);
- output = ::tflite::GetOutput(context, node, 0);
- }
- const TfLiteTensor *input;
- const TfLiteTensor *axis;
- TfLiteTensor *output;
-};
-
-void *InitTensorFlowSum(TfLiteContext *context, const char *buffer, size_t length)
-{
- // Creates two temp tensors to store index and axis for internal
- // implementation only.
- auto *scratch_tensor_index = new int;
- context->AddTensors(context, 2, scratch_tensor_index);
- return scratch_tensor_index;
-}
-
-void FreeTensorFlowSum(TfLiteContext *context, void *buffer)
-{
- delete static_cast<TensorFlowSumOp *>(buffer);
-}
-
-// Resizes the temp tensor that stores resolved axis.
-TfLiteStatus ResizeTempAxis(TfLiteContext *context, TensorFlowSumOp *op_context,
- TfLiteTensor *resolved_axis)
-{
- TfLiteIntArray *axis_size = TfLiteIntArrayCreate(1);
- axis_size->data[0] = static_cast<int>(::tflite::NumElements(op_context->axis));
- return context->ResizeTensor(context, resolved_axis, axis_size);
-}
-
-// Resizes output array based on the input size and resolved axis.
-TfLiteStatus ResizeOutputTensor(TfLiteContext *context, TensorFlowSumOp *op_context)
-{
- size_t num_axis = ::tflite::NumElements(op_context->axis);
- TfLiteIntArray *input_dims = op_context->input->dims;
- int input_num_dims = ::tflite::NumDimensions(op_context->input);
- const int *axis = op_context->axis->data.i32;
-
- {
- // Calculates size of reducing axis.
- int num_reduce_axis = num_axis;
- for (int i = 0; i < num_axis; ++i)
- {
- int current = axis[i];
- if (current < 0)
- {
- current += input_num_dims;
- }
- TF_LITE_ENSURE(context, current >= 0 && current < input_num_dims);
- for (int j = 0; j < i; ++j)
- {
- int previous = axis[j];
- if (previous < 0)
- {
- previous += input_num_dims;
- }
- if (current == previous)
- {
- --num_reduce_axis;
- break;
- }
- }
- }
- // Determines output dimensions.
- int output_num_dims = ::tflite::NumDimensions(op_context->output);
- TF_LITE_ENSURE(context, (input_num_dims == output_num_dims) ||
- (input_num_dims - num_reduce_axis == output_num_dims));
-
- if (input_num_dims == output_num_dims)
- {
- TfLiteIntArray *output_dims = TfLiteIntArrayCopy(input_dims);
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- int current = axis[axis_idx];
- output_dims->data[current] = 1;
- }
- return context->ResizeTensor(context, op_context->output, output_dims);
- }
- else
- {
- TfLiteIntArray *output_dims = TfLiteIntArrayCreate(output_num_dims);
- int num_skip_axis = 0;
- for (int idx = 0; idx < input_num_dims; ++idx)
- {
- bool is_axis = false;
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- if (axis[axis_idx] == idx || axis[axis_idx] + input_num_dims == idx)
- {
- ++num_skip_axis;
- is_axis = true;
- break;
- }
- }
- if (!is_axis)
- {
- output_dims->data[idx - num_skip_axis] = input_dims->data[idx];
- }
- }
- return context->ResizeTensor(context, op_context->output, output_dims);
- }
- }
-}
-
-// Initializes temp tensors to store index and resolved axis.
-TfLiteStatus InitializeTemporaries(TfLiteContext *context, TfLiteNode *node,
- TensorFlowSumOp *op_context)
-{
- // Creates a temp index to iterate through input data.
- int *scratch_tensor_index = reinterpret_cast<int *>(node->user_data);
- TfLiteIntArrayFree(node->temporaries);
- node->temporaries = TfLiteIntArrayCreate(2);
- node->temporaries->data[0] = *scratch_tensor_index;
- TfLiteTensor *scratch_tensor = &context->tensors[node->temporaries->data[0]];
- scratch_tensor->type = kTfLiteInt32;
- scratch_tensor->allocation_type = kTfLiteArenaRw;
- TfLiteIntArray *index_size = TfLiteIntArrayCreate(1);
- index_size->data[0] = ::tflite::NumDimensions(op_context->input);
- TF_LITE_ENSURE_OK(context, context->ResizeTensor(context, scratch_tensor, index_size));
-
- // Creates a temp tensor to store resolved axis given input data.
- node->temporaries->data[1] = *scratch_tensor_index + 1;
- TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
- resolved_axis->type = kTfLiteInt32;
- return kTfLiteOk;
-}
-
-TfLiteStatus PrepareTensorFlowSum(TfLiteContext *context, TfLiteNode *node)
-{
- TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 2);
- TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
-
- TensorFlowSumOp op_context(context, node);
- TF_LITE_ENSURE_OK(context, InitializeTemporaries(context, node, &op_context));
-
- TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
- // Leaves work to Eval if axis is not constant; else resizes output.
- if (!::tflite::IsConstantTensor(op_context.axis))
- {
- ::tflite::SetTensorToDynamic(op_context.output);
- ::tflite::SetTensorToDynamic(resolved_axis);
- return kTfLiteOk;
- }
- resolved_axis->allocation_type = kTfLiteArenaRw;
- TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
- return ResizeOutputTensor(context, &op_context);
-}
-
-// Gets offset of index if expanded on axis. When expanded, the flattened offset
-// will not change, if the output index changes on the given axis. For example,
-// if you have a 2D tensor and you are expanding to 3D on axis 0,
-// then index (0, 1, 2) and index (1, 1, 2) will map from the same flattened
-// offset.
-inline size_t ExpandedInputOffset(const int num_dims, const int *dims, const int *index,
- const int num_axis, const int *axis)
-{
- size_t offset = 0;
- int out_idx = 0;
- for (int in_idx = 0; in_idx < num_dims; ++in_idx)
- {
- // if we need to expand this axis
- bool is_axis = false;
- if (axis != nullptr)
- {
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- if (in_idx == axis[axis_idx])
- {
- is_axis = true;
- break;
- }
- }
- }
- if (!is_axis)
- {
- offset = offset * static_cast<size_t>(dims[in_idx]) + static_cast<size_t>(index[out_idx]);
- out_idx++;
- }
- else
- {
- offset = offset * static_cast<size_t>(dims[in_idx]);
- }
- }
- return offset;
-}
-
-// Gets offset of index if reducing on axis. When reducing, the flattened offset
-// will not change, if the input index changes on the given axis. For example,
-// if you have a 3D tensor and you are reducing to 2D by eliminating axis 0,
-// then index (0, 1, 2) and index (1, 1, 2) will map to the same flattened
-// offset.
-// TODO(kanlig): uses Dims to represent dimensions.
-inline size_t ReducedOutputOffset(const int num_dims, const int *dims, const int *index,
- const int num_axis, const int *axis)
-{
- size_t offset = 0;
- for (int idx = 0; idx < num_dims; ++idx)
- {
- // if we need to skip this axis
- bool is_axis = false;
- if (axis != nullptr)
- {
- for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
- {
- if (idx == axis[axis_idx])
- {
- is_axis = true;
- break;
- }
- }
- }
- if (!is_axis)
- {
- offset = offset * static_cast<size_t>(dims[idx]) + static_cast<size_t>(index[idx]);
- }
- }
- return offset;
-}
-
-// Gets next index to iterate through a multidimensional array.
-inline bool NextIndex(TfLiteContext *context, const int num_dims, const int *dims, int *current)
-{
- int carry = 1;
- for (int idx = num_dims - 1; idx >= 0; --idx)
- {
- int current_val = current[idx] + carry;
- TF_LITE_ENSURE(context, (dims[idx] >= current_val));
- if (dims[idx] == current_val)
- {
- current[idx] = 0;
- }
- else
- {
- current[idx] = current_val;
- carry = 0;
- break;
- }
- }
- return (carry == 0);
-}
-
-template <typename T>
-inline TfLiteStatus
-CustomSum(TfLiteContext *context, T *input_data, const int *input_dims, const int input_num_dims,
- T *output_data, const int *output_dims, const int output_num_dims, const int *axis,
- const int num_axis_dimensions, bool keep_dims, int *temp_index, int *resolved_axis)
-{
- // resolves axis.
- int num_resolved_axis = 0;
- for (int idx = 0; idx < num_axis_dimensions; ++idx)
- {
- int current = axis[idx];
- TF_LITE_ENSURE(context, (current < input_num_dims && current + input_num_dims >= 0));
- if (current < 0)
- {
- current += input_num_dims;
- }
- bool is_dup = false;
- for (int j = 0; j < num_resolved_axis; ++j)
- {
- if (resolved_axis[j] == current)
- {
- is_dup = true;
- break;
- }
- }
- if (!is_dup)
- {
- resolved_axis[num_resolved_axis++] = current;
- }
- }
-
- TF_LITE_ENSURE(context, (input_num_dims > 0));
- TF_LITE_ENSURE(context, (input_dims != nullptr));
- TF_LITE_ENSURE(context, (temp_index != nullptr));
-
- // resets output data.
- for (int idx = 0; idx < output_num_dims; ++idx)
- {
- temp_index[idx] = 0;
- }
- for (bool has_next = true; has_next;
- has_next = NextIndex(context, output_num_dims, output_dims, temp_index))
- {
- size_t output_offset =
- ReducedOutputOffset(output_num_dims, output_dims, temp_index, 0, nullptr);
- output_data[output_offset] = 0;
- }
-
- // resets temp index.
- for (int idx = 0; idx < input_num_dims; ++idx)
- {
- temp_index[idx] = 0;
- }
-
- // iterates through input_data.
- for (bool has_next = true; has_next;
- has_next = NextIndex(context, input_num_dims, input_dims, temp_index))
- {
- size_t input_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index, 0, nullptr);
- size_t output_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index,
- num_resolved_axis, resolved_axis);
- output_data[output_offset] += input_data[input_offset];
- }
-
- return kTfLiteOk;
-}
-
-TfLiteStatus EvalTensorFlowSum(TfLiteContext *context, TfLiteNode *node)
-{
-
- TensorFlowSumOp op_context(context, node);
- int num_axis = static_cast<int>(::tflite::NumElements(op_context.axis));
- TfLiteTensor *temp_index = &context->tensors[node->temporaries->data[0]];
- TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
- // Resize the output tensor if the output tensor is dynamic.
- if (::tflite::IsDynamicTensor(op_context.output))
- {
- TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
- TF_LITE_ENSURE_OK(context, ResizeOutputTensor(context, &op_context));
- }
-
- TfLiteStatus returnStatus = kTfLiteOk;
- switch (op_context.input->type)
- {
- case kTfLiteFloat32:
- returnStatus = CustomSum<float>(
- context, op_context.input->data.f, op_context.input->dims->data,
- op_context.input->dims->size, op_context.output->data.f, op_context.output->dims->data,
- op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
- temp_index->data.i32, resolved_axis->data.i32);
- break;
- case kTfLiteInt32:
- returnStatus = CustomSum<int>(context, op_context.input->data.i32,
- op_context.input->dims->data, op_context.input->dims->size,
- op_context.output->data.i32, op_context.output->dims->data,
- op_context.output->dims->size, op_context.axis->data.i32,
- num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
- break;
- case kTfLiteUInt8:
- returnStatus = CustomSum<uint8_t>(
- context, op_context.input->data.uint8, op_context.input->dims->data,
- op_context.input->dims->size, op_context.output->data.uint8,
- op_context.output->dims->data, op_context.output->dims->size, op_context.axis->data.i32,
- num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
- break;
- case kTfLiteInt64:
- returnStatus = CustomSum<int64_t>(
- context, op_context.input->data.i64, op_context.input->dims->data,
- op_context.input->dims->size, op_context.output->data.i64, op_context.output->dims->data,
- op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
- temp_index->data.i32, resolved_axis->data.i32);
- break;
- default:
- returnStatus = kTfLiteError;
- }
-
- return returnStatus;
-}
-
-} // namespace TensorFlowSum
-} // namespace custom
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/ext/kernels/register.cpp b/libs/tflite/src/ext/kernels/register.cpp
deleted file mode 100644
index b822bd616..000000000
--- a/libs/tflite/src/ext/kernels/register.cpp
+++ /dev/null
@@ -1,221 +0,0 @@
-/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- Copyright 2017 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This code is derived from the following file (in TensorFlow)
-// 'externals/tensorflow/tensorflow/contrib/lite/kernels/register.cc'
-#include "tflite/ext/kernels/register.h"
-#include "tflite/ext/kernels/CustomOps.h"
-
-namespace tflite {
-namespace ops {
-namespace builtin {
-
-TfLiteRegistration *Register_RELU();
-TfLiteRegistration *Register_RELU_N1_TO_1();
-TfLiteRegistration *Register_RELU6();
-TfLiteRegistration *Register_TANH();
-TfLiteRegistration *Register_LOGISTIC();
-TfLiteRegistration *Register_AVERAGE_POOL_2D();
-TfLiteRegistration *Register_MAX_POOL_2D();
-TfLiteRegistration *Register_L2_POOL_2D();
-TfLiteRegistration *Register_CONV_2D();
-TfLiteRegistration *Register_DEPTHWISE_CONV_2D();
-TfLiteRegistration *Register_SVDF();
-TfLiteRegistration *Register_RNN();
-TfLiteRegistration *Register_BIDIRECTIONAL_SEQUENCE_RNN();
-TfLiteRegistration *Register_UNIDIRECTIONAL_SEQUENCE_RNN();
-TfLiteRegistration *Register_EMBEDDING_LOOKUP();
-TfLiteRegistration *Register_EMBEDDING_LOOKUP_SPARSE();
-TfLiteRegistration *Register_FULLY_CONNECTED();
-TfLiteRegistration *Register_LSH_PROJECTION();
-TfLiteRegistration *Register_HASHTABLE_LOOKUP();
-TfLiteRegistration *Register_SOFTMAX();
-TfLiteRegistration *Register_CONCATENATION();
-TfLiteRegistration *Register_ADD();
-TfLiteRegistration *Register_SPACE_TO_BATCH_ND();
-TfLiteRegistration *Register_DIV();
-TfLiteRegistration *Register_SUB();
-TfLiteRegistration *Register_BATCH_TO_SPACE_ND();
-TfLiteRegistration *Register_MUL();
-TfLiteRegistration *Register_L2_NORMALIZATION();
-TfLiteRegistration *Register_LOCAL_RESPONSE_NORMALIZATION();
-TfLiteRegistration *Register_LSTM();
-TfLiteRegistration *Register_BIDIRECTIONAL_SEQUENCE_LSTM();
-TfLiteRegistration *Register_UNIDIRECTIONAL_SEQUENCE_LSTM();
-TfLiteRegistration *Register_PAD();
-TfLiteRegistration *Register_PADV2();
-TfLiteRegistration *Register_RESHAPE();
-TfLiteRegistration *Register_RESIZE_BILINEAR();
-TfLiteRegistration *Register_SKIP_GRAM();
-TfLiteRegistration *Register_SPACE_TO_DEPTH();
-TfLiteRegistration *Register_GATHER();
-TfLiteRegistration *Register_TRANSPOSE();
-TfLiteRegistration *Register_MEAN();
-TfLiteRegistration *Register_SPLIT();
-TfLiteRegistration *Register_SQUEEZE();
-TfLiteRegistration *Register_STRIDED_SLICE();
-TfLiteRegistration *Register_EXP();
-TfLiteRegistration *Register_TOPK_V2();
-TfLiteRegistration *Register_LOG_SOFTMAX();
-TfLiteRegistration *Register_CAST();
-TfLiteRegistration *Register_DEQUANTIZE();
-TfLiteRegistration *Register_PRELU();
-TfLiteRegistration *Register_MAXIMUM();
-TfLiteRegistration *Register_MINIMUM();
-TfLiteRegistration *Register_ARG_MAX();
-TfLiteRegistration *Register_GREATER();
-TfLiteRegistration *Register_GREATER_EQUAL();
-TfLiteRegistration *Register_LESS();
-TfLiteRegistration *Register_LESS_EQUAL();
-TfLiteRegistration *Register_FLOOR();
-TfLiteRegistration *Register_NEG();
-TfLiteRegistration *Register_SELECT();
-TfLiteRegistration *Register_SLICE();
-TfLiteRegistration *Register_SIN();
-TfLiteRegistration *Register_TRANSPOSE_CONV();
-TfLiteRegistration *Register_SPARSE_TO_DENSE();
-#ifndef OBS_BUILD
-TfLiteRegistration *Register_SUM();
-TfLiteRegistration *Register_REDUCE_MAX();
-TfLiteRegistration *Register_REDUCE_MIN();
-TfLiteRegistration *Register_EQUAL();
-TfLiteRegistration *Register_NOT_EQUAL();
-TfLiteRegistration *Register_SQRT();
-TfLiteRegistration *Register_RSQRT();
-TfLiteRegistration *Register_SHAPE();
-TfLiteRegistration *Register_POW();
-TfLiteRegistration *Register_FAKE_QUANT();
-TfLiteRegistration *Register_PACK();
-TfLiteRegistration *Register_ONE_HOT();
-TfLiteRegistration *Register_LOGICAL_OR();
-TfLiteRegistration *Register_LOGICAL_AND();
-TfLiteRegistration *Register_LOGICAL_NOT();
-TfLiteRegistration *Register_UNPACK();
-TfLiteRegistration *Register_FLOOR_DIV();
-TfLiteRegistration *Register_SQUARE();
-TfLiteRegistration *Register_ZEROS_LIKE();
-#endif // OBS_BUILD
-
-} // namespace builtin
-} // namespace ops
-} // namespace tflite
-
-namespace nnfw {
-namespace tflite {
-
-BuiltinOpResolver::BuiltinOpResolver()
-{
- // Using namespace directive to minimize diff with upstream tensorflow
- using namespace ::tflite::ops::builtin;
- using namespace ::tflite;
-
- AddBuiltin(BuiltinOperator_RELU, Register_RELU());
- AddBuiltin(BuiltinOperator_RELU_N1_TO_1, Register_RELU_N1_TO_1());
- AddBuiltin(BuiltinOperator_RELU6, Register_RELU6());
- AddBuiltin(BuiltinOperator_TANH, Register_TANH());
- AddBuiltin(BuiltinOperator_LOGISTIC, Register_LOGISTIC());
- AddBuiltin(BuiltinOperator_AVERAGE_POOL_2D, Register_AVERAGE_POOL_2D());
- AddBuiltin(BuiltinOperator_MAX_POOL_2D, Register_MAX_POOL_2D());
- AddBuiltin(BuiltinOperator_L2_POOL_2D, Register_L2_POOL_2D());
- AddBuiltin(BuiltinOperator_CONV_2D, Register_CONV_2D());
- AddBuiltin(BuiltinOperator_DEPTHWISE_CONV_2D, Register_DEPTHWISE_CONV_2D());
- AddBuiltin(BuiltinOperator_SVDF, Register_SVDF());
- AddBuiltin(BuiltinOperator_RNN, Register_RNN());
- AddBuiltin(BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN, Register_BIDIRECTIONAL_SEQUENCE_RNN());
- AddBuiltin(BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN, Register_UNIDIRECTIONAL_SEQUENCE_RNN());
- AddBuiltin(BuiltinOperator_EMBEDDING_LOOKUP, Register_EMBEDDING_LOOKUP());
- AddBuiltin(BuiltinOperator_EMBEDDING_LOOKUP_SPARSE, Register_EMBEDDING_LOOKUP_SPARSE());
- AddBuiltin(BuiltinOperator_FULLY_CONNECTED, Register_FULLY_CONNECTED());
- AddBuiltin(BuiltinOperator_LSH_PROJECTION, Register_LSH_PROJECTION());
- AddBuiltin(BuiltinOperator_HASHTABLE_LOOKUP, Register_HASHTABLE_LOOKUP());
- AddBuiltin(BuiltinOperator_SOFTMAX, Register_SOFTMAX());
- AddBuiltin(BuiltinOperator_CONCATENATION, Register_CONCATENATION());
- AddBuiltin(BuiltinOperator_ADD, Register_ADD());
- AddBuiltin(BuiltinOperator_SPACE_TO_BATCH_ND, Register_SPACE_TO_BATCH_ND());
- AddBuiltin(BuiltinOperator_BATCH_TO_SPACE_ND, Register_BATCH_TO_SPACE_ND());
- AddBuiltin(BuiltinOperator_MUL, Register_MUL());
- AddBuiltin(BuiltinOperator_L2_NORMALIZATION, Register_L2_NORMALIZATION());
- AddBuiltin(BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION, Register_LOCAL_RESPONSE_NORMALIZATION());
- AddBuiltin(BuiltinOperator_LSTM, Register_LSTM());
- AddBuiltin(BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM, Register_BIDIRECTIONAL_SEQUENCE_LSTM());
- AddBuiltin(BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM, Register_UNIDIRECTIONAL_SEQUENCE_LSTM());
- AddBuiltin(BuiltinOperator_PAD, Register_PAD());
- AddBuiltin(BuiltinOperator_PADV2, Register_PADV2());
- AddBuiltin(BuiltinOperator_RESHAPE, Register_RESHAPE());
- AddBuiltin(BuiltinOperator_RESIZE_BILINEAR, Register_RESIZE_BILINEAR());
- AddBuiltin(BuiltinOperator_SKIP_GRAM, Register_SKIP_GRAM());
- AddBuiltin(BuiltinOperator_SPACE_TO_DEPTH, Register_SPACE_TO_DEPTH());
- AddBuiltin(BuiltinOperator_GATHER, Register_GATHER());
- AddBuiltin(BuiltinOperator_TRANSPOSE, Register_TRANSPOSE());
- AddBuiltin(BuiltinOperator_MEAN, Register_MEAN());
- AddBuiltin(BuiltinOperator_DIV, Register_DIV());
- AddBuiltin(BuiltinOperator_SUB, Register_SUB());
- AddBuiltin(BuiltinOperator_SPLIT, Register_SPLIT());
- AddBuiltin(BuiltinOperator_SQUEEZE, Register_SQUEEZE());
- AddBuiltin(BuiltinOperator_STRIDED_SLICE, Register_STRIDED_SLICE());
- AddBuiltin(BuiltinOperator_EXP, Register_EXP());
- AddBuiltin(BuiltinOperator_TOPK_V2, Register_TOPK_V2());
- AddBuiltin(BuiltinOperator_LOG_SOFTMAX, Register_LOG_SOFTMAX());
- AddBuiltin(BuiltinOperator_CAST, Register_CAST());
- AddBuiltin(BuiltinOperator_DEQUANTIZE, Register_DEQUANTIZE());
- AddBuiltin(BuiltinOperator_PRELU, Register_PRELU());
- AddBuiltin(BuiltinOperator_MAXIMUM, Register_MAXIMUM());
- AddBuiltin(BuiltinOperator_MINIMUM, Register_MINIMUM());
- AddBuiltin(BuiltinOperator_ARG_MAX, Register_ARG_MAX());
- AddBuiltin(BuiltinOperator_GREATER, Register_GREATER());
- AddBuiltin(BuiltinOperator_GREATER_EQUAL, Register_GREATER_EQUAL());
- AddBuiltin(BuiltinOperator_LESS, Register_LESS());
- AddBuiltin(BuiltinOperator_LESS_EQUAL, Register_LESS_EQUAL());
- AddBuiltin(BuiltinOperator_FLOOR, Register_FLOOR());
- AddBuiltin(BuiltinOperator_NEG, Register_NEG());
- AddBuiltin(BuiltinOperator_SELECT, Register_SELECT());
- AddBuiltin(BuiltinOperator_SLICE, Register_SLICE());
- AddBuiltin(BuiltinOperator_SIN, Register_SIN());
-#ifndef OBS_BUILD
- AddBuiltin(BuiltinOperator_SUM, Register_SUM());
- AddBuiltin(BuiltinOperator_REDUCE_MAX, Register_REDUCE_MAX());
- AddBuiltin(BuiltinOperator_REDUCE_MIN, Register_REDUCE_MIN());
- AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, Register_TRANSPOSE_CONV());
- AddBuiltin(BuiltinOperator_SPARSE_TO_DENSE, Register_SPARSE_TO_DENSE());
- AddBuiltin(BuiltinOperator_EQUAL, Register_EQUAL());
- AddBuiltin(BuiltinOperator_NOT_EQUAL, Register_NOT_EQUAL());
- AddBuiltin(BuiltinOperator_SQRT, Register_SQRT());
- AddBuiltin(BuiltinOperator_RSQRT, Register_RSQRT());
- AddBuiltin(BuiltinOperator_SHAPE, Register_SHAPE());
- AddBuiltin(BuiltinOperator_POW, Register_POW());
- AddBuiltin(BuiltinOperator_FAKE_QUANT, Register_FAKE_QUANT(), 1, 2);
- AddBuiltin(BuiltinOperator_PACK, Register_PACK());
- AddBuiltin(BuiltinOperator_ONE_HOT, Register_ONE_HOT());
- AddBuiltin(BuiltinOperator_LOGICAL_OR, Register_LOGICAL_OR());
- AddBuiltin(BuiltinOperator_LOGICAL_AND, Register_LOGICAL_AND());
- AddBuiltin(BuiltinOperator_LOGICAL_NOT, Register_LOGICAL_NOT());
- AddBuiltin(BuiltinOperator_UNPACK, Register_UNPACK());
- AddBuiltin(BuiltinOperator_FLOOR_DIV, Register_FLOOR_DIV());
- AddBuiltin(BuiltinOperator_SQUARE, Register_SQUARE());
- AddBuiltin(BuiltinOperator_ZEROS_LIKE, Register_ZEROS_LIKE());
-#endif // OBS_BUILD
-
- AddCustom("TensorFlowMax", nnfw::tflite::custom::Register_TensorFlowMax());
- AddCustom("SquaredDifference", nnfw::tflite::custom::Register_SquaredDifference());
- AddCustom("TensorFlowSum", nnfw::tflite::custom::Register_TensorFlowSum());
- AddCustom("Abs", nnfw::tflite::custom::Register_Abs());
-}
-
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/ext/nnapi_delegate.cpp b/libs/tflite/src/ext/nnapi_delegate.cpp
deleted file mode 100644
index 25858a7b4..000000000
--- a/libs/tflite/src/ext/nnapi_delegate.cpp
+++ /dev/null
@@ -1,1209 +0,0 @@
-/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- Copyright 2017 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-==============================================================================*/
-
-// NOTE To minimize diff with upstream tensorflow, disable clang-format
-// clang-format off
-
-// NOTE This code is derived from the following file (in TensorFlow v1.12)
-// 'externals/tensorflow/tensorflow/contrib/lite/nnapi_delegate.cc'
-#include "tflite/ext/nnapi_delegate.h"
-#include <fcntl.h>
-#include <sys/mman.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#ifdef OBS_BUILD
-#include "tensorflow/contrib/lite/builtin_op_data.h"
-#include "tensorflow/contrib/lite/error_reporter.h"
-#else
-#include "tensorflow/contrib/lite/c/builtin_op_data.h"
-#include "tensorflow/contrib/lite/core/api/error_reporter.h"
-#endif
-#include "tensorflow/contrib/lite/model.h"
-#include "NeuralNetworksShim.h"
-#include "NeuralNetworksExShim.h"
-
-#ifdef __ANDROID__
-#include <android/log.h>
-#include <sys/system_properties.h>
-#endif
-
-namespace nnfw {
-namespace tflite {
-
-void logError(const char* format, ...) {
- // stderr is convenient for native tests, but is not captured for apps
- va_list args_for_stderr;
- va_start(args_for_stderr, format);
- vfprintf(stderr, format, args_for_stderr);
- va_end(args_for_stderr);
- fprintf(stderr, "\n");
- fflush(stderr);
-#ifdef __ANDROID__
- // produce logcat output for general consumption
- va_list args_for_log;
- va_start(args_for_log, format);
- __android_log_vprint(ANDROID_LOG_ERROR, "tflite", format, args_for_log);
- va_end(args_for_log);
-#endif
-}
-
-#define FATAL(...) \
- logError(__VA_ARGS__); \
- exit(1);
-
-// TODO(aselle): Change the error model to use status codes.
-#define CHECK_TFLITE_SUCCESS(x) \
- if (x != kTfLiteOk) { \
- FATAL("Aborting since tflite returned failure nnapi_delegate.cc:%d.", \
- __LINE__); \
- }
-
-#define CHECK_NN(x) \
- if (x != ANEURALNETWORKS_NO_ERROR) { \
- FATAL("Aborting since NNAPI returned failure nnapi_delegate.cc:%d", \
- __LINE__); \
- }
-
-#define RETURN_ERROR_IF_TFLITE_FAILED(x) \
- if (x != kTfLiteOk) { \
- logError( \
- "Returning error since TFLite returned failure nnapi_delegate.cc:%d.", \
- __LINE__); \
- return kTfLiteError; \
- }
-
-#define RETURN_ERROR_IF_NN_FAILED(x) \
- if (x != ANEURALNETWORKS_NO_ERROR) { \
- logError( \
- "Returning error since NNAPI returned failure nnapi_delegate.cc:%d.", \
- __LINE__); \
- return kTfLiteError; \
- }
-
-// Tracking of NNAPI operand ids
-static const int64_t kOperandIdNotSet = -1;
-static const int64_t kOperandNotNeeded = -2;
-
-namespace {
-
-int32_t GetAndroidSdkVersion() {
-#ifdef __ANDROID__
- const char* sdkProp = "ro.build.version.sdk";
- char sdkVersion[PROP_VALUE_MAX];
- int length = __system_property_get(sdkProp, sdkVersion);
- if (length != 0) {
- for (int i = 0; i < length; ++i) {
- int digit = sdkVersion[i] - '0';
- if (digit < 0 || digit > 9) {
- // Non-numeric SDK version, assume it's higher then expected;
- return 0xFFFF;
- }
- }
- return atoi(sdkVersion);
- }
- FATAL("No %s prop", sdkProp);
-#endif // __ANDROID__
- return 0;
-}
-
-int32_t GetAndroidSdkVersionCached() {
- static int32_t androidSdkVersion = GetAndroidSdkVersion();
- return androidSdkVersion;
-}
-
-static const uint32_t dimension_for_scalar[1] = {1};
-
-} // namespace
-
-NNAPIAllocation::NNAPIAllocation(const char* filename,
- ::tflite::ErrorReporter* error_reporter)
- : MMAPAllocation(filename, error_reporter) {
- if (mmapped_buffer_ != MAP_FAILED)
- CHECK_NN(ANeuralNetworksMemory_createFromFd(buffer_size_bytes_, PROT_READ,
- mmap_fd_, 0, &handle_));
-}
-
-NNAPIAllocation::~NNAPIAllocation() {
- if (handle_) {
- ANeuralNetworksMemory_free(handle_);
- }
-}
-
-NNAPIDelegate::~NNAPIDelegate() {
- if (nn_compiled_model_) {
- ANeuralNetworksCompilation_free(nn_compiled_model_);
- nn_compiled_model_ = nullptr;
- }
- if (nn_model_) {
- ANeuralNetworksModel_free(nn_model_);
- nn_model_ = nullptr;
- // TODO(aselle): Is this thread-safe and callable multiple times?
- }
- // ANeuralNetworksShutdown();
-}
-
-// Adds the tensors of the interpreter to the NN API model.
-TfLiteStatus addTensorOperands(::tflite::Interpreter* interpreter,
- ANeuralNetworksModel* nn_model,
- uint32_t* no_of_operands_added,
- std::vector<int64_t>* nnapi_ids) {
- uint32_t next_id = 0;
- for (size_t i = 0; i < interpreter->tensors_size(); i++) {
- // Skip temporaries and RNN back-edges.
- if ((*nnapi_ids)[i] == kOperandNotNeeded) continue;
-
- (*nnapi_ids)[i] = int64_t(next_id);
-
- int32_t nn_type = 0;
- // NNAPI requires 32-bit float scale to be zero, tflite doesn't care
- float scale = 0.0f;
- int32_t zeroPoint = 0;
- TfLiteTensor* tensor = interpreter->tensor(i);
- switch (tensor->type) {
- case kTfLiteNoType:
- // Tensors added during initialization of Ops don't have a type yet and
- // should not be registered with the NNAPI.
- continue;
- case kTfLiteFloat32:
- nn_type = ANEURALNETWORKS_TENSOR_FLOAT32;
- break;
- case kTfLiteUInt8:
- nn_type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM;
- scale = tensor->params.scale;
- // FIXME The next line is a workaround because currently zero scale is
- // passed down from TF
- // Lite. Note that the latest NeuralNetworks.h (see
- // https://android.googlesource.com/platform/frameworks/ml/+/master/nn/runtime/include/NeuralNetworks.h)
- // requires scale to be greater than zero. Remove this workaround
- // when the scale
- // value is correctly passed.
- scale = (scale == 0.0f) ? 1.0f : scale;
- zeroPoint = tensor->params.zero_point;
- break;
- case kTfLiteInt32:
- nn_type = ANEURALNETWORKS_TENSOR_INT32;
- scale = tensor->params.scale;
- zeroPoint = tensor->params.zero_point;
- break;
- case kTfLiteBool:
- // Workaround to pass bool type under NNAPI
- // Use bool type using ANEURALNETWORKS_TENSOR_QUANT8_ASYMM with scale = 1.0f and zero_point = 0
- nn_type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM;
- scale = 1.0f;
- zeroPoint = 0;
- break;
- default:
- logError("Unsupported tensor type %d", tensor->type);
- return kTfLiteError;
- }
- if (tensor->dims->size == 0) {
- // WORKAROUND Some model have dimension zero
- switch (tensor->type) {
- case kTfLiteFloat32:
- nn_type = ANEURALNETWORKS_TENSOR_FLOAT32;
- break;
- case kTfLiteInt32:
- nn_type = ANEURALNETWORKS_TENSOR_INT32;
- break;
- default:
- logError("NNAPI doesn't support tensors with rank 0 (index %d name %s)",
- i, tensor->name);
- return kTfLiteError;
- }
- }
- if (tensor->dims->size > 4) {
- logError("NNAPI doesn't support tensors with rank > 4 (index %d name %s)",
- i, tensor->name);
- return kTfLiteError;
- }
- // TODO(aselle): Note, many of these are intermediate results. Do I need
- // to ever specify these sizes. I am currently below doing setValue
- // on all of them, but I shouldn't in the future.
- // Answer(jeanluc): If all the operators can set the dimension correctly,
- // you won't need to.
- ANeuralNetworksOperandType operand_type{
- nn_type, static_cast<uint32_t>(tensor->dims->size),
- reinterpret_cast<uint32_t*>(tensor->dims->data), scale, zeroPoint};
- if (tensor->dims->size == 0) {
- // WORKAROUND Some model have dimension zero
- // Consider scalar as vector size 1
- operand_type.dimensions = dimension_for_scalar;
- operand_type.dimensionCount = 1;
- }
- RETURN_ERROR_IF_NN_FAILED(
- ANeuralNetworksModel_addOperand(nn_model, &operand_type));
- // TODO(aselle): Based on Michael's suggestion, limiting this to read
- // only memory
- if (tensor->allocation_type == kTfLiteMmapRo) {
- if (const NNAPIAllocation* alloc = dynamic_cast<const NNAPIAllocation*>(
- static_cast<const ::tflite::Allocation*>(tensor->allocation))) {
- RETURN_ERROR_IF_NN_FAILED(
- ANeuralNetworksModel_setOperandValueFromMemory(
- nn_model, next_id, alloc->memory(),
- alloc->offset(tensor->data.raw), tensor->bytes));
- } else {
- RETURN_ERROR_IF_NN_FAILED(ANeuralNetworksModel_setOperandValue(
- nn_model, next_id, tensor->data.raw, tensor->bytes));
- }
- } else if (tensor->bytes == 0) {
- // These size 0 tensors are optional tensors reserved.
- RETURN_ERROR_IF_NN_FAILED(
- ANeuralNetworksModel_setOperandValue(nn_model, next_id, nullptr, 0));
- }
-
- ++next_id;
- }
- *no_of_operands_added = next_id;
- return kTfLiteOk;
-}
-
-void MapAndAddTensorIds(const int* from_ids_buf, size_t from_ids_count,
- std::vector<uint32_t>* into,
- const std::vector<int64_t>& map) {
- for (size_t i = 0; i < from_ids_count; i++) {
- int from_id = from_ids_buf[i];
- if (from_id == kOptionalTensor) {
- into->push_back(from_id);
- } else {
- into->push_back(map[from_id]);
- }
- }
-}
-
-// Adds the operations and their parameters to the NN API model.
-// 'next-id' is the operand ID of the next operand of the model.
-TfLiteStatus AddOpsAndParams(
- ::tflite::Interpreter* interpreter, ANeuralNetworksModel* nn_model,
- uint32_t next_id, std::vector<int>* model_state_inputs,
- std::vector<int>* model_state_outputs,
- const std::vector<int64_t>& tensor_id_to_nnapi_id) {
- for (size_t i = 0; i < interpreter->nodes_size(); i++) {
- const auto* node_and_registration = interpreter->node_and_registration(i);
- const TfLiteNode& node = node_and_registration->first;
- const TfLiteRegistration& registration = node_and_registration->second;
- ::tflite::BuiltinOperator builtin =
- static_cast<::tflite::BuiltinOperator>(registration.builtin_code);
-
- // Add the parameters.
- std::vector<uint32_t> augmented_inputs, augmented_outputs;
- MapAndAddTensorIds(node.inputs->data, node.inputs->size, &augmented_inputs,
- tensor_id_to_nnapi_id);
- MapAndAddTensorIds(node.outputs->data, node.outputs->size,
- &augmented_outputs, tensor_id_to_nnapi_id);
-
- auto add_scalar_int32 = [&nn_model, &augmented_inputs,
- &next_id](int value) {
- ANeuralNetworksOperandType operand_type{.type = ANEURALNETWORKS_INT32};
- CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
- CHECK_NN(ANeuralNetworksModel_setOperandValue(nn_model, next_id, &value,
- sizeof(int32_t)))
- augmented_inputs.push_back(next_id++);
- };
-
- auto add_scalar_float32 = [&nn_model, &augmented_inputs,
- &next_id](float value) {
- ANeuralNetworksOperandType operand_type{.type = ANEURALNETWORKS_FLOAT32};
- CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
- CHECK_NN(ANeuralNetworksModel_setOperandValue(nn_model, next_id, &value,
- sizeof(float)))
- augmented_inputs.push_back(next_id++);
- };
-
- auto add_vector_int32 = [&](const int* values, uint32_t num_values) {
- ANeuralNetworksOperandType operand_type{
- .type = ANEURALNETWORKS_TENSOR_INT32,
- .dimensionCount = 1,
- .dimensions = &num_values};
- CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
- CHECK_NN(ANeuralNetworksModel_setOperandValue(
- nn_model, next_id, values, sizeof(int32_t) * num_values));
- augmented_inputs.push_back(next_id++);
- };
-
- // Handle state tensors of RNN, LSTM, SVDF.
- // For each state_out tensor, a corresponding state_in operand needs to be
- // created for NNAPI.
- auto duplicate_state_tensor_float32 =
- [interpreter, &nn_model, &next_id, &augmented_inputs,
- &model_state_inputs, &model_state_outputs](int tensor_id) {
- const TfLiteTensor* tensor = interpreter->tensor(tensor_id);
- ANeuralNetworksOperandType operand_type{
- ANEURALNETWORKS_TENSOR_FLOAT32,
- static_cast<uint32_t>(tensor->dims->size),
- reinterpret_cast<uint32_t*>(tensor->dims->data),
- tensor->params.scale, tensor->params.zero_point};
- CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type));
- augmented_inputs.push_back(next_id);
- model_state_inputs->push_back(next_id);
- model_state_outputs->push_back(tensor_id);
- next_id++;
- };
- auto check_and_add_activation = [&add_scalar_int32](int activation) {
- if (activation > kTfLiteActRelu6) {
- logError("NNAPI only supports RELU, RELU1 and RELU6 activations");
- return kTfLiteError;
- }
- add_scalar_int32(activation);
- return kTfLiteOk;
- };
-
- auto add_add_params = [&add_scalar_int32](void* data) {
- auto* builtin = reinterpret_cast<TfLiteAddParams*>(data);
- if (builtin->activation > kTfLiteActRelu6) {
- logError("NNAPI only supports RELU, RELU1 and RELU6 activations");
- return kTfLiteError;
- }
- add_scalar_int32(builtin->activation);
- return kTfLiteOk;
- };
-
- auto add_pooling_params = [&add_scalar_int32,
- &check_and_add_activation](void* data) {
- auto builtin = reinterpret_cast<TfLitePoolParams*>(data);
- add_scalar_int32(builtin->padding);
- add_scalar_int32(builtin->stride_width);
- add_scalar_int32(builtin->stride_height);
- add_scalar_int32(builtin->filter_width);
- add_scalar_int32(builtin->filter_height);
- return check_and_add_activation(builtin->activation);
- };
-
- auto add_convolution_params = [&add_scalar_int32,
- &check_and_add_activation](void* data) {
- auto builtin = reinterpret_cast<TfLiteConvParams*>(data);
- add_scalar_int32(builtin->padding);
- add_scalar_int32(builtin->stride_width);
- add_scalar_int32(builtin->stride_height);
- return check_and_add_activation(builtin->activation);
- };
-
- auto add_depthwise_conv_params = [&add_scalar_int32,
- &check_and_add_activation](void* data) {
- auto builtin = reinterpret_cast<TfLiteDepthwiseConvParams*>(data);
- add_scalar_int32(builtin->padding);
- add_scalar_int32(builtin->stride_width);
- add_scalar_int32(builtin->stride_height);
- add_scalar_int32(builtin->depth_multiplier);
- return check_and_add_activation(builtin->activation);
- };
-
- auto add_fully_connected_params = [&check_and_add_activation](void* data) {
- auto builtin = reinterpret_cast<TfLiteFullyConnectedParams*>(data);
- return check_and_add_activation(builtin->activation);
- };
-
- auto add_concatenation_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteConcatenationParams*>(data);
- add_scalar_int32(builtin->axis);
- if (builtin->activation != kTfLiteActNone) {
- logError("Concatenation does not support fused activation in NNAPI");
- return kTfLiteError;
- }
- return kTfLiteOk;
- };
-
- auto add_softmax_params = [&add_scalar_float32](void* data) {
- auto builtin = reinterpret_cast<TfLiteSoftmaxParams*>(data);
- add_scalar_float32(builtin->beta);
- };
-
- auto add_space_to_depth_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteSpaceToDepthParams*>(data);
- add_scalar_int32(builtin->block_size);
- };
-
- auto add_lstm_params = [&add_scalar_int32,
- &add_scalar_float32](void* data) {
- auto builtin = reinterpret_cast<TfLiteLSTMParams*>(data);
- add_scalar_int32(builtin->activation);
- add_scalar_float32(builtin->cell_clip);
- add_scalar_float32(builtin->proj_clip);
- };
-
- // LSTM in NNAPI requires scratch tensor as an output operand.
- auto add_lstm_scratch_tensor_float32 = [interpreter, &node, &nn_model,
- &next_id, &augmented_outputs]() {
- if (node.temporaries->size == 0) return;
- int scratch_buffer_index = node.temporaries->data[0];
- const TfLiteTensor* tensor = interpreter->tensor(scratch_buffer_index);
- ANeuralNetworksOperandType operand_type{
- ANEURALNETWORKS_TENSOR_FLOAT32,
- static_cast<uint32_t>(tensor->dims->size),
- reinterpret_cast<uint32_t*>(tensor->dims->data), tensor->params.scale,
- tensor->params.zero_point};
- CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type));
- augmented_outputs.insert(augmented_outputs.begin(), next_id++);
- };
-
- auto add_mean_params = [&add_scalar_int32](void* data) {
-#ifdef OBS_BUILD
- auto builtin = reinterpret_cast<TfLiteMeanParams*>(data);
-#else
- auto builtin = reinterpret_cast<TfLiteReducerParams*>(data);
-#endif
- add_scalar_int32(builtin->keep_dims);
- };
-
- auto add_svdf_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteSVDFParams*>(data);
- add_scalar_int32(builtin->rank);
- add_scalar_int32(builtin->activation);
- };
-
- auto add_rnn_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteRNNParams*>(data);
- add_scalar_int32(builtin->activation);
- };
-
- auto add_squeeze_params = [&](void* data) {
- const auto* builtin = reinterpret_cast<TfLiteSqueezeParams*>(data);
- // Note that we add the squeeze dimensions even if the dimensions were
- // unspecified (empty), as NNAPI requires the operand.
- add_vector_int32(builtin->squeeze_dims,
- static_cast<uint32_t>(builtin->num_squeeze_dims));
- };
-
- // Handle optional input tensors.
- auto add_optional_tensors = [&nn_model, &augmented_inputs,
- &next_id](int nn_type) {
- for (size_t idx = 0; idx < augmented_inputs.size(); idx++) {
- if (augmented_inputs[idx] == kOptionalTensor) {
- const std::vector<uint32_t> dim = {0, 0};
- ANeuralNetworksOperandType operand_type{nn_type, 2, dim.data(), 0, 0};
- CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
- CHECK_NN(ANeuralNetworksModel_setOperandValue(nn_model, next_id,
- nullptr, 0))
- augmented_inputs[idx] = next_id++;
- }
- }
- };
-
- int nnapi_version = 10;
-#include "nnapi_delegate_ex_AddOpsAndParams_lambda.inc"
-
- ANeuralNetworksOperationType nn_op_type;
-
- // Using namespace directive to minimize diff with upstream tensorflow
- namespace tflite = ::tflite;
-
- switch (builtin) {
- case tflite::BuiltinOperator_ADD:
- nn_op_type = ANEURALNETWORKS_ADD;
- RETURN_ERROR_IF_TFLITE_FAILED(add_add_params(node.builtin_data));
- break;
- case tflite::BuiltinOperator_MUL:
- nn_op_type = ANEURALNETWORKS_MUL;
- RETURN_ERROR_IF_TFLITE_FAILED(add_add_params(node.builtin_data));
- break;
- case tflite::BuiltinOperator_AVERAGE_POOL_2D:
- RETURN_ERROR_IF_TFLITE_FAILED(add_pooling_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_AVERAGE_POOL_2D;
- break;
- case tflite::BuiltinOperator_MAX_POOL_2D:
- RETURN_ERROR_IF_TFLITE_FAILED(add_pooling_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_MAX_POOL_2D;
- break;
- case tflite::BuiltinOperator_L2_POOL_2D:
- RETURN_ERROR_IF_TFLITE_FAILED(add_pooling_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_L2_POOL_2D;
- break;
- case tflite::BuiltinOperator_CONV_2D: {
- auto builtin = reinterpret_cast<TfLiteConvParams*>(node.builtin_data);
- if (builtin->dilation_width_factor != 1 ||
- builtin->dilation_height_factor != 1 || node.inputs->size != 3) {
- logError("NNAPI does not support dilated Conv2D.");
- return kTfLiteError;
- }
- }
- RETURN_ERROR_IF_TFLITE_FAILED(
- add_convolution_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_CONV_2D;
- break;
- case tflite::BuiltinOperator_RELU:
- nn_op_type = ANEURALNETWORKS_RELU;
- break;
- case tflite::BuiltinOperator_RELU_N1_TO_1:
- nn_op_type = ANEURALNETWORKS_RELU1;
- break;
- case tflite::BuiltinOperator_RELU6:
- nn_op_type = ANEURALNETWORKS_RELU6;
- break;
- case tflite::BuiltinOperator_TANH:
- nn_op_type = ANEURALNETWORKS_TANH;
- break;
- case tflite::BuiltinOperator_FLOOR:
- nn_op_type = ANEURALNETWORKS_FLOOR;
- break;
- case tflite::BuiltinOperator_LOGISTIC:
- nn_op_type = ANEURALNETWORKS_LOGISTIC;
- break;
- case tflite::BuiltinOperator_DEPTHWISE_CONV_2D:
- RETURN_ERROR_IF_TFLITE_FAILED(
- add_depthwise_conv_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_DEPTHWISE_CONV_2D;
- break;
- case tflite::BuiltinOperator_CONCATENATION:
- RETURN_ERROR_IF_TFLITE_FAILED(
- add_concatenation_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_CONCATENATION;
- break;
- case tflite::BuiltinOperator_SOFTMAX:
- add_softmax_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_SOFTMAX;
- break;
- case tflite::BuiltinOperator_FULLY_CONNECTED:
- RETURN_ERROR_IF_TFLITE_FAILED(
- add_fully_connected_params(node.builtin_data));
- nn_op_type = ANEURALNETWORKS_FULLY_CONNECTED;
- break;
- case tflite::BuiltinOperator_RESHAPE:
- if (node.inputs->size != 2) {
- logError("NNAPI only supports 2-input RESHAPE");
- return kTfLiteError;
- }
- nn_op_type = ANEURALNETWORKS_RESHAPE;
- // add_reshape_params(node.builtin_data);
- break;
- case tflite::BuiltinOperator_RESIZE_BILINEAR:
- add_resize_bilinear_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_RESIZE_BILINEAR;
- break;
- case tflite::BuiltinOperator_SPACE_TO_DEPTH:
- add_space_to_depth_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_SPACE_TO_DEPTH;
- break;
- case tflite::BuiltinOperator_LSTM: {
- if (node.inputs->size + /* no of params */ 3 != 21) {
- logError("NNAPI only supports 21-input LSTMs");
- return kTfLiteError;
- }
- duplicate_state_tensor_float32(
- node.outputs->data[/*kOutputStateTensor*/ 0]);
- duplicate_state_tensor_float32(
- node.outputs->data[/*kCellStateTensor*/ 1]);
- add_lstm_params(node.builtin_data);
- add_lstm_scratch_tensor_float32();
- add_optional_tensors(ANEURALNETWORKS_TENSOR_FLOAT32);
- nn_op_type = ANEURALNETWORKS_LSTM;
- break;
- }
- case tflite::BuiltinOperator_DEQUANTIZE:
- nn_op_type = ANEURALNETWORKS_DEQUANTIZE;
- break;
- case tflite::BuiltinOperator_SVDF: {
- duplicate_state_tensor_float32(node.outputs->data[/*kStateTensor*/ 0]);
- add_svdf_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_SVDF;
- break;
- }
- case tflite::BuiltinOperator_RNN: {
- duplicate_state_tensor_float32(
- node.outputs->data[/*kHiddenStateTensor*/ 0]);
- add_rnn_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_RNN;
- break;
- }
- case tflite::BuiltinOperator_EMBEDDING_LOOKUP:
- nn_op_type = ANEURALNETWORKS_EMBEDDING_LOOKUP;
- break;
- case tflite::BuiltinOperator_PAD:
- nnapi_version = 11; // require NNAPI 1.1
- nn_op_type = ANEURALNETWORKS_PAD;
- break;
- case tflite::BuiltinOperator_MEAN:
- nnapi_version = 11; // require NNAPI 1.1
- add_mean_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_MEAN;
- break;
- case tflite::BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION:
- nn_op_type = ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION;
- add_lrn_params(node.builtin_data);
- break;
- case tflite::BuiltinOperator_DIV:
- nnapi_version = 11; // require NNAPI 1.1
- nn_op_type = ANEURALNETWORKS_DIV;
- RETURN_ERROR_IF_TFLITE_FAILED(check_and_add_activation(
- reinterpret_cast<TfLiteDivParams*>(node.builtin_data)->activation));
- break;
- case tflite::BuiltinOperator_SUB:
- nnapi_version = 11; // require NNAPI 1.1
- nn_op_type = ANEURALNETWORKS_SUB;
- RETURN_ERROR_IF_TFLITE_FAILED(check_and_add_activation(
- reinterpret_cast<TfLiteSubParams*>(node.builtin_data)->activation));
- break;
- case tflite::BuiltinOperator_SQUEEZE:
- nnapi_version = 11; // requires NNAPI 1.1
- add_squeeze_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_SQUEEZE;
- break;
- case tflite::BuiltinOperator_TRANSPOSE:
- // The permutation input tensor value dictates the output dimensions.
- // TODO(b/110888333): Support dynamically-sized tensors in delegates.
- if ((node.inputs->size > 1) &&
- (interpreter->tensor(node.inputs->data[1])->allocation_type !=
- kTfLiteMmapRo)) {
- logError("NNAPI does not yet support dynamic tensors.");
- return kTfLiteError;
- }
- nnapi_version = 11; // require NNAPI 1.1
- nn_op_type = ANEURALNETWORKS_TRANSPOSE;
- break;
- case tflite::BuiltinOperator_L2_NORMALIZATION:
- nn_op_type = ANEURALNETWORKS_L2_NORMALIZATION;
- if (reinterpret_cast<TfLiteL2NormParams*>(node.builtin_data)
- ->activation != kTfLiteActNone) {
- logError(
- "NNAPI does not support L2Normalization with fused activations");
- return kTfLiteError;
- }
- if ((node.inputs->size > 0) &&
- (interpreter->tensor(node.inputs->data[0])->dims->size != 4)) {
- logError("NNAPI only supports input rank 4 for L2Normalization");
- return kTfLiteError;
- }
- break;
- case tflite::BuiltinOperator_HASHTABLE_LOOKUP:
- if (interpreter->tensor(node.outputs->data[0])->type !=
- kTfLiteFloat32) {
- logError("NNAPI only support HASHTABLE_LOOKUP with float32 output",
- builtin);
- return kTfLiteError;
- }
- nn_op_type = ANEURALNETWORKS_HASHTABLE_LOOKUP;
- break;
- case tflite::BuiltinOperator_STRIDED_SLICE:
- add_strided_slice_params(node.builtin_data);
- nn_op_type = ANEURALNETWORKS_STRIDED_SLICE;
- break;
- case tflite::BuiltinOperator_SPACE_TO_BATCH_ND:
- nnapi_version = 11; // require NNAPI 1.1
- nn_op_type = ANEURALNETWORKS_SPACE_TO_BATCH_ND;
- break;
- case tflite::BuiltinOperator_BATCH_TO_SPACE_ND:
- nnapi_version = 11; // require NNAPI 1.1
- nn_op_type = ANEURALNETWORKS_BATCH_TO_SPACE_ND;
- check_batch_to_space_params();
- break;
- case tflite::BuiltinOperator_CAST:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_CAST_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_TOPK_V2:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_TOPK_V2_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_GATHER:
- add_gather_ex_params(node.builtin_data);
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_GATHER_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_SPLIT:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_SPLIT_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_NEG:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_NEG_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_EXP:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_EXP_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_TRANSPOSE_CONV:
- add_transpose_conv_params(node.builtin_data);
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_TRANSPOSE_CONV_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_PRELU:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_PRELU_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_ARG_MAX:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_ARGMAX_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
-#ifndef OBS_BUILD
- case tflite::BuiltinOperator_PACK:
- add_pack_ex_params(node.builtin_data);
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_PACK_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_UNPACK:
- add_unpack_ex_params(node.builtin_data);
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_UNPACK_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_SQRT:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_SQRT_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_RSQRT:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_RSQRT_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_EQUAL:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_EQUAL_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_NOT_EQUAL:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_NOT_EQUAL_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_SUM:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_REDUCE_SUM_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_REDUCE_MAX:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_TENSORFLOW_MAX_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_REDUCE_MIN:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_REDUCE_MIN_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_LOGICAL_AND:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_LOGICAL_AND_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- case tflite::BuiltinOperator_LOGICAL_OR:
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_LOGICAL_OR_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
-#endif
- case tflite::BuiltinOperator_CONCAT_EMBEDDINGS:
- case tflite::BuiltinOperator_LSH_PROJECTION:
- case tflite::BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN:
- case tflite::BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN:
- case tflite::BuiltinOperator_EMBEDDING_LOOKUP_SPARSE:
- case tflite::BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM:
- case tflite::BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM:
- //case tflite::BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION:
- case tflite::BuiltinOperator_PADV2:
- //case tflite::BuiltinOperator_RESIZE_BILINEAR:
- case tflite::BuiltinOperator_CALL:
- case tflite::BuiltinOperator_SKIP_GRAM:
- //case tflite::BuiltinOperator_RELU_N1_TO_1:
- //case tflite::BuiltinOperator_GATHER:
- //case tflite::BuiltinOperator_SPACE_TO_BATCH_ND:
- //case tflite::BuiltinOperator_BATCH_TO_SPACE_ND:
- //case tflite::BuiltinOperator_TOPK_V2:
- //case tflite::BuiltinOperator_SPLIT:
- //case tflite::BuiltinOperator_STRIDED_SLICE:
- //case tflite::BuiltinOperator_EXP:
- case tflite::BuiltinOperator_LOG_SOFTMAX:
- //case tflite::BuiltinOperator_DEQUANTIZE:
- case tflite::BuiltinOperator_DELEGATE:
- //case tflite::BuiltinOperator_CAST:
- //case tflite::BuiltinOperator_PRELU:
- case tflite::BuiltinOperator_MAXIMUM:
- case tflite::BuiltinOperator_MINIMUM:
-#ifndef OBS_BUILD
- case tflite::BuiltinOperator_ARG_MIN:
-#endif
- case tflite::BuiltinOperator_GREATER:
- case tflite::BuiltinOperator_GREATER_EQUAL:
- case tflite::BuiltinOperator_LESS:
- case tflite::BuiltinOperator_LESS_EQUAL:
- //case tflite::BuiltinOperator_NEG:
- case tflite::BuiltinOperator_SELECT:
- case tflite::BuiltinOperator_SLICE:
- case tflite::BuiltinOperator_SIN:
- //case tflite::BuiltinOperator_LOG:
- //case tflite::BuiltinOperator_TRANSPOSE_CONV:
-#ifndef OBS_BUILD
- case tflite::BuiltinOperator_TILE:
- case tflite::BuiltinOperator_EXPAND_DIMS:
- case tflite::BuiltinOperator_SPARSE_TO_DENSE:
- //case tflite::BuiltinOperator_EQUAL:
- //case tflite::BuiltinOperator_NOT_EQUAL:
- //case tflite::BuiltinOperator_SUM:
- //case tflite::BuiltinOperator_REDUCE_MAX:
- //case tflite::BuiltinOperator_REDUCE_MIN:
- case tflite::BuiltinOperator_REDUCE_PROD:
- //case tflite::BuiltinOperator_SQRT:
- //case tflite::BuiltinOperator_RSQRT:
- case tflite::BuiltinOperator_SHAPE:
- case tflite::BuiltinOperator_POW:
- case tflite::BuiltinOperator_FAKE_QUANT:
- //case tflite::BuiltinOperator_PACK:
- //case tflite::BuiltinOperator_LOGICAL_OR:
- case tflite::BuiltinOperator_ONE_HOT:
- //case tflite::BuiltinOperator_LOGICAL_AND:
- case tflite::BuiltinOperator_LOGICAL_NOT:
- //case tflite::BuiltinOperator_UNPACK:
- case tflite::BuiltinOperator_FLOOR_DIV:
- case tflite::BuiltinOperator_REDUCE_ANY:
- case tflite::BuiltinOperator_SQUARE:
- case tflite::BuiltinOperator_ZEROS_LIKE:
- case tflite::BuiltinOperator_FILL:
-#endif
- logError("Op code %d is currently not delegated to NNAPI", builtin);
- return kTfLiteError;
- break;
- case tflite::BuiltinOperator_CUSTOM: {
- std::string custom_name(registration.custom_name);
- if (custom_name.compare("TensorFlowMax") == 0) {
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_TENSORFLOW_MAX_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- }
- else if (custom_name.compare("SquaredDifference") == 0) {
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_SQUARED_DIFFERENCE_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- }
- else if (custom_name.compare("TensorFlowSum") == 0) {
- CHECK_NN(ANeuralNetworksModel_addOperationEx(
- nn_model, ANEURALNETWORKS_REDUCE_SUM_EX,
- static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(node.outputs->size),
- reinterpret_cast<uint32_t*>(node.outputs->data)));
- continue;
- }
- logError("Custom operations are not supported when using NNAPI.");
- return kTfLiteError;
- break;
- }
-#ifdef OBS_BUILD
- default:
- logError("Op code %d is currently not delegated to NNAPI", builtin);
- return kTfLiteError;
- break;
-#endif
- }
-
- //if (nnapi_version == 11 && GetAndroidSdkVersionCached() < 28) {
- // FATAL("Op %d needs NNAPI1.1", builtin);
- //}
-
- // Add the operation.
- RETURN_ERROR_IF_NN_FAILED(ANeuralNetworksModel_addOperation(
- nn_model, nn_op_type, static_cast<uint32_t>(augmented_inputs.size()),
- augmented_inputs.data(),
- static_cast<uint32_t>(augmented_outputs.size()),
- reinterpret_cast<uint32_t*>(augmented_outputs.data())));
- }
- return kTfLiteOk;
-}
-
-TfLiteStatus NNAPIDelegate::BuildGraph(::tflite::Interpreter* interpreter) {
- if (nn_model_ && nn_compiled_model_) return model_status_;
-
- // TODO(aselle): This is not correct. need to handle resize invalidation.
- if (!nn_model_) {
- CHECK_NN(ANeuralNetworksModel_create(&nn_model_));
-
- // Find which tensors should be added to NNAPI. TFLite has temporaries
- // and RNN back-edges which are are not valid for NNAPI. We look through all
- // inputs and outputs and mark the mapping in tensor_id_to_nnapi_id with
- // kOperandIdNotSet. addTensorOperands will replace those with the
- // corresponding NNAPI operand ids and skip kOperandNotNeeded entries.
- std::vector<int64_t> tensor_id_to_nnapi_id(interpreter->tensors_size(),
- kOperandNotNeeded);
- auto set_ids_to_not_set = [&tensor_id_to_nnapi_id](const int* buf,
- size_t count) {
- for (int j = 0; j < count; j++) {
- auto tensor_id = buf[j];
- if (tensor_id != kOptionalTensor) {
- tensor_id_to_nnapi_id[tensor_id] = kOperandIdNotSet;
- }
- }
- };
- for (size_t i = 0; i < interpreter->nodes_size(); i++) {
- const auto* node_and_registration = interpreter->node_and_registration(i);
- const TfLiteNode& node = node_and_registration->first;
- set_ids_to_not_set(node.inputs->data, node.inputs->size);
- set_ids_to_not_set(node.outputs->data, node.outputs->size);
- }
- set_ids_to_not_set(interpreter->inputs().data(),
- interpreter->inputs().size());
- set_ids_to_not_set(interpreter->outputs().data(),
- interpreter->outputs().size());
-
- uint32_t next_id = 0;
- RETURN_ERROR_IF_TFLITE_FAILED(addTensorOperands(
- interpreter, nn_model_, &next_id, &tensor_id_to_nnapi_id));
- RETURN_ERROR_IF_TFLITE_FAILED(
- AddOpsAndParams(interpreter, nn_model_, next_id, &model_states_inputs_,
- &model_states_outputs_, tensor_id_to_nnapi_id));
-
- std::vector<uint32_t> augmented_inputs;
- MapAndAddTensorIds(interpreter->inputs().data(),
- interpreter->inputs().size(), &augmented_inputs,
- tensor_id_to_nnapi_id);
- augmented_inputs.insert(augmented_inputs.end(),
- model_states_inputs_.begin(),
- model_states_inputs_.end());
- std::vector<uint32_t> augmented_outputs;
- MapAndAddTensorIds(interpreter->outputs().data(),
- interpreter->outputs().size(), &augmented_outputs,
- tensor_id_to_nnapi_id);
- MapAndAddTensorIds(model_states_outputs_.data(),
- model_states_outputs_.size(), &augmented_outputs,
- tensor_id_to_nnapi_id);
-
- CHECK_NN(ANeuralNetworksModel_identifyInputsAndOutputs(
- nn_model_, static_cast<uint32_t>(augmented_inputs.size()),
- reinterpret_cast<const uint32_t*>(augmented_inputs.data()),
- static_cast<uint32_t>(augmented_outputs.size()),
- reinterpret_cast<const uint32_t*>(augmented_outputs.data())));
-
- // TODO Support ANeuralNetworksModel_relaxComputationFloat32toFloat16
- //if (GetAndroidSdkVersionCached() >= 28) {
- // CHECK_NN(ANeuralNetworksModel_relaxComputationFloat32toFloat16(
- // nn_model_, interpreter->GetAllowFp16PrecisionForFp32()));
- //}
- CHECK_NN(ANeuralNetworksModel_finish(nn_model_));
- }
- if (!nn_compiled_model_) {
- CHECK_NN(ANeuralNetworksCompilation_create(nn_model_, &nn_compiled_model_));
- CHECK_NN(ANeuralNetworksCompilation_finish(nn_compiled_model_));
- }
- return kTfLiteOk;
-}
-
-#include <unordered_map>
-
-TfLiteStatus NNAPIDelegate::Invoke(::tflite::Interpreter* interpreter) {
- if (!nn_model_) {
- model_status_ = BuildGraph(interpreter);
- if (model_status_ != kTfLiteOk) {
- logError("Failed to build graph for NNAPI");
- }
- }
- if (model_status_ != kTfLiteOk) {
- return model_status_;
- }
-
- ANeuralNetworksExecution* execution = nullptr;
- CHECK_NN(ANeuralNetworksExecution_create(nn_compiled_model_, &execution));
-
- // Allocate temporary buffer to save casted boolean tensor
- std::unordered_map<size_t, uint8_t*> input_boolean_tensors;
- std::unordered_map<size_t, uint8_t*> output_boolean_tensors;
- for (size_t i = 0; i < interpreter->inputs().size(); i++)
- {
- int input = interpreter->inputs()[i];
- TfLiteTensor* tensor = interpreter->tensor(input);
- if (tensor->type == kTfLiteBool)
- {
- size_t elements = tensor->bytes / sizeof(bool);
- uint8_t* temp_tensor = new uint8_t[tensor->bytes / sizeof(bool)];
- input_boolean_tensors[i] = temp_tensor;
- for (size_t idx = 0; idx < elements; idx++)
- {
- temp_tensor[idx] = (tensor->data.b[idx] ? 0x00 : 0xff);
- }
- }
- }
- for (size_t i = 0; i < interpreter->outputs().size(); i++)
- {
- int output = interpreter->outputs()[i];
- TfLiteTensor* tensor = interpreter->tensor(output);
- if (tensor->type == kTfLiteBool)
- {
- uint8_t* temp_tensor = new uint8_t[tensor->bytes / sizeof(bool)];
- output_boolean_tensors[i] = temp_tensor;
- }
- }
-
- // Currently perform deep copy of input buffer
- for (size_t i = 0; i < interpreter->inputs().size(); i++) {
- int input = interpreter->inputs()[i];
- // TODO(aselle): Is this what we want or do we want input instead?
- // TODO(aselle): This should be called setInputValue maybe to be cons.
- TfLiteTensor* tensor = interpreter->tensor(input);
- if (tensor->type == kTfLiteBool)
- {
- CHECK_NN(ANeuralNetworksExecution_setInput(
- execution, i, nullptr, input_boolean_tensors[i], tensor->bytes * sizeof(uint8_t) / sizeof(bool)));
- }
- else
- {
- CHECK_NN(ANeuralNetworksExecution_setInput(
- execution, i, nullptr, tensor->data.raw, tensor->bytes));
- }
- }
-
- // Tell nn api where to place final data.
- for (size_t i = 0; i < interpreter->outputs().size(); i++) {
- int output = interpreter->outputs()[i];
- TfLiteTensor* tensor = interpreter->tensor(output);
-
- if (tensor->type == kTfLiteBool)
- {
- CHECK_NN(ANeuralNetworksExecution_setOutput(
- execution, i, nullptr, output_boolean_tensors[i], tensor->bytes * sizeof(uint8_t) / sizeof(bool)));
- }
- else
- {
- CHECK_NN(ANeuralNetworksExecution_setOutput(
- execution, i, nullptr, tensor->data.raw, tensor->bytes));
- }
- }
-
- // The state_out of previous invocation need to be mapped to state_in of
- // current invocation.
- for (size_t i = 0; i < model_states_outputs_.size(); i++) {
- int state_tensor_idx = model_states_outputs_[i];
- TfLiteTensor* tensor = interpreter->tensor(state_tensor_idx);
- // Here we are using a deep copy for state_in tensors so that we are not
- // reading and writing into the same buffer during a invocation.
- // TODO(miaowang): using double shared buffer to minimize the copies.
- CHECK_NN(ANeuralNetworksExecution_setInput(
- execution, i + interpreter->inputs().size(), nullptr, tensor->data.raw,
- tensor->bytes));
- // Tell NNAPI where to output the state_out.
- CHECK_NN(ANeuralNetworksExecution_setOutput(
- execution, i + interpreter->outputs().size(), nullptr, tensor->data.raw,
- tensor->bytes));
- }
-
- // Currently use blocking compute.
- ANeuralNetworksEvent* event = nullptr;
- CHECK_NN(ANeuralNetworksExecution_startCompute(execution, &event));
- CHECK_NN(ANeuralNetworksEvent_wait(event));
- ANeuralNetworksEvent_free(event);
- ANeuralNetworksExecution_free(execution);
-
- // Tell nn api where to place final data.
- for (size_t i = 0; i < interpreter->inputs().size(); i++) {
- int input = interpreter->inputs()[i];
- TfLiteTensor* tensor = interpreter->tensor(input);
-
- if (tensor->type == kTfLiteBool)
- {
- uint8_t* temp_tensor = input_boolean_tensors[i];
- input_boolean_tensors[i] = nullptr;
- delete temp_tensor;
- }
- }
- for (size_t i = 0; i < interpreter->outputs().size(); i++) {
- int output = interpreter->outputs()[i];
- TfLiteTensor* tensor = interpreter->tensor(output);
-
- if (tensor->type == kTfLiteBool)
- {
- uint8_t* temp_tensor = output_boolean_tensors[i];
- size_t elements = tensor->bytes / sizeof(bool);
- for (size_t idx = 0; idx < elements; idx++)
- {
- tensor->data.b[idx] = ((temp_tensor[idx] == 0x00) ? false : true);
- }
- output_boolean_tensors[i] = nullptr;
- delete temp_tensor;
- }
- }
-
-#if 0
- printf("From the NN API:\n");
- TfLiteTensor* tensor = interpreter->tensor(interpreter->outputs()[0]);
- if (float* data =
- interpreter->typed_tensor<float>(interpreter->outputs()[0])) {
- size_t num = tensor->bytes / sizeof(float);
- for (float* p = data; p < data + num; p++) {
- printf(" %f", *p);
- }
- printf("\n");
- }
-#endif
-
- return kTfLiteOk;
-}
-
-bool NNAPIDelegate::IsSupported() { return nnfw::NNAPIExists(); }
-
-} // namespace tflite
-} // namespace nnfw
-
-// clang-format on
diff --git a/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc b/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc
deleted file mode 100644
index a91e4de60..000000000
--- a/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc
+++ /dev/null
@@ -1,106 +0,0 @@
-// This file is included from AddOpsAndParams defined in nnapi_delegate.cc
-// and contains lambda for extened implementation to original Tensorflow Lite.
- auto add_resize_bilinear_params = [&add_scalar_int32, &interpreter, &augmented_inputs](void* data) {
- auto builtin = reinterpret_cast<TfLiteResizeBilinearParams*>(data);
- if (builtin->align_corners) {
- FATAL("Resize bilinear does not support align corners in NNAPI");
- }
-
- TfLiteTensor* tensor = interpreter->tensor(augmented_inputs.back());
- assert(tensor->type == kTfLiteInt32);
- assert(tensor->bytes == sizeof(int)*2);
- augmented_inputs.pop_back();
-
- int height = ((int*)(tensor->data.raw))[1];
- int width = ((int*)(tensor->data.raw))[0];
- add_scalar_int32(height);
- add_scalar_int32(width);
- };
-
- auto check_l2normalization_params = [interpreter, &node](void* data) {
- auto builtin = reinterpret_cast<TfLiteL2NormParams*>(data);
- if (builtin->activation != kTfLiteActNone) {
- FATAL("NNAPI does not support L2Normalization with fused activations");
- }
- if ((node.inputs->size > 0) &&
- (interpreter->tensor(node.inputs->data[0])->dims->size != 4)) {
- FATAL("NNAPI only supports input rank 4 for L2Normalization");
- }
- };
-
- auto add_transpose_conv_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteTransposeConvParams*>(data);
- add_scalar_int32(builtin->padding);
- add_scalar_int32(builtin->stride_width);
- add_scalar_int32(builtin->stride_height);
- };
-
- auto add_lrn_params = [&add_scalar_int32,
- &add_scalar_float32](void* data) {
- auto builtin = reinterpret_cast<TfLiteLocalResponseNormParams*>(data);
- add_scalar_int32(builtin->radius);
- add_scalar_float32(builtin->bias);
- add_scalar_float32(builtin->alpha);
- add_scalar_float32(builtin->beta);
- };
-
- auto add_strided_slice_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteStridedSliceParams*>(data);
- add_scalar_int32(builtin->begin_mask);
- add_scalar_int32(builtin->end_mask);
- // ellipsis_mask and new_axis_mask are not supported on nn runtime
- // cf) tflite interpreter supports both operations
- if (builtin->ellipsis_mask) {
- FATAL("STRIDE_SLICE does not support ellipsis_mask in NNAPI");
- }
- if (builtin->new_axis_mask) {
- FATAL("STRIDE_SLICE does not support new_axis_mask in NNAPI");
- }
- add_scalar_int32(builtin->shrink_axis_mask);
- };
-
- auto add_gather_ex_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteGatherParams*>(data);
- add_scalar_int32(builtin->axis);
- if (builtin->axis != 0) {
- FATAL("GATHER does not support axis>0 in NNAPI");
- }
- };
-
-#ifndef OBS_BUILD
- auto add_pack_ex_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLitePackParams*>(data);
- add_scalar_int32(builtin->values_count);
- add_scalar_int32(builtin->axis);
- };
-
- auto add_unpack_ex_params = [&add_scalar_int32](void* data) {
- auto builtin = reinterpret_cast<TfLiteUnpackParams*>(data);
- add_scalar_int32(builtin->num);
- add_scalar_int32(builtin->axis);
- };
-#endif
-
- auto check_batch_to_space_params = [interpreter, &node, &augmented_inputs]() {
-
- //If there are 3 inputs, check if crops is having default values {0, 0, 0, 0}
- //Else unsupported by NNAPI
-
- if(augmented_inputs.size() == 3)
- {
- const uint32_t crops_buffer_index = node.inputs->data[2];
- const TfLiteTensor* crops = interpreter->tensor(crops_buffer_index);
- const int *crops_value = crops->data.i32;
-
- //Check if crops is having default values {0, 0, 0, 0}
- if(crops_value[0] != 0 || crops_value[1] != 0 || crops_value[2] != 0 || crops_value[3] != 0)
- {
- FATAL("BATCH_TO_SPACE_ND does not support Explicit crops in NNAPI");
- }
- else
- {
- //Restrict crops input and pass only other two inputs
- augmented_inputs.pop_back();
- }
- }
- };
diff --git a/libs/tflite/src/interp/FlatBufferBuilder.cpp b/libs/tflite/src/interp/FlatBufferBuilder.cpp
deleted file mode 100644
index 4b9cde719..000000000
--- a/libs/tflite/src/interp/FlatBufferBuilder.cpp
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/interp/FlatBufferBuilder.h"
-
-#include "tflite/ext/kernels/register.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-std::unique_ptr<::tflite::Interpreter> FlatBufferBuilder::build(void) const
-{
- std::unique_ptr<::tflite::Interpreter> interpreter;
-
- nnfw::tflite::BuiltinOpResolver resolver;
-
- ::tflite::InterpreterBuilder builder(_model, resolver);
-
- builder(&interpreter);
-
- return std::move(interpreter);
-}
-
-} // namespace tflite
-} // namespace nnfw
diff --git a/libs/tflite/src/interp/FunctionBuilder.cpp b/libs/tflite/src/interp/FunctionBuilder.cpp
deleted file mode 100644
index eab940c18..000000000
--- a/libs/tflite/src/interp/FunctionBuilder.cpp
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "tflite/interp/FunctionBuilder.h"
-
-namespace nnfw
-{
-namespace tflite
-{
-
-std::unique_ptr<::tflite::Interpreter> FunctionBuilder::build(void) const
-{
- auto res = std::unique_ptr<::tflite::Interpreter>{new ::tflite::Interpreter};
-
- _fn(*res);
-
- return std::move(res);
-}
-
-} // namespace tflite
-} // namespace nnfw
diff --git a/nnas b/nnas
new file mode 100755
index 000000000..4445198ff
--- /dev/null
+++ b/nnas
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+NNAS_CONFIG_RPATH="infra/config"
+NNAS_COMMAND_RPATH="infra/command"
+NNAS_PROJECT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+function Usage()
+{
+ echo "Usage: $0 [COMMAND] ..."
+ echo "Command:"
+ for file in "$NNAS_COMMAND_RPATH"/*;
+ do
+ echo " $(basename "$file")"
+ done
+}
+
+function import()
+{
+ source "${NNAS_PROJECT_PATH}/${NNAS_CONFIG_RPATH}/$1"
+}
+
+COMMAND=$1; shift
+
+if [[ -z "${COMMAND}" ]]; then
+ Usage
+ exit 255
+fi
+
+COMMAND_FILE="${NNAS_PROJECT_PATH}/${NNAS_COMMAND_RPATH}/${COMMAND}"
+
+if [[ ! -f "${COMMAND_FILE}" ]]; then
+ echo "ERROR: '${COMMAND}' is not supported"
+ exit 255
+fi
+
+export NNAS_PROJECT_PATH
+
+source "${COMMAND_FILE}" "$@"
diff --git a/nncc b/nncc
new file mode 100755
index 000000000..23cfb5bd4
--- /dev/null
+++ b/nncc
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+NNCC_SCRIPT_RPATH="scripts"
+NNCC_COMMAND_RPATH="infra/nncc/command"
+
+NNCC_PROJECT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+NNCC_SCRIPT_PATH="${NNCC_PROJECT_PATH}/${NNCC_SCRIPT_RPATH}"
+
+function Usage()
+{
+ echo "Usage: $0 [COMMAND] ..."
+ echo "Command:"
+ for file in "$NNCC_COMMAND_RPATH"/*;
+ do
+ echo " $(basename "$file")"
+ done
+}
+
+# Get command from command-line
+COMMAND=$1; shift
+
+if [[ -z "${COMMAND}" ]]; then
+ Usage
+ exit 255
+fi
+
+COMMAND_FILE="${NNCC_PROJECT_PATH}/${NNCC_COMMAND_RPATH}/${COMMAND}"
+
+if [[ ! -f "${COMMAND_FILE}" ]]; then
+ echo "ERROR: '${COMMAND}' is not supported"
+ Usage
+ exit 255
+fi
+
+export NNCC_PROJECT_PATH
+export NNCC_SCRIPT_PATH
+
+function import()
+{
+ source "${NNCC_PROJECT_PATH}/infra/nncc/config/$1"
+}
+
+source "${COMMAND_FILE}" "$@"
diff --git a/nnfw b/nnfw
new file mode 100755
index 000000000..12e71e02e
--- /dev/null
+++ b/nnfw
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+NNFW_SCRIPT_RPATH="infra/nnfw"
+NNFW_COMMAND_RPATH="${NNFW_SCRIPT_RPATH}/command"
+
+NNFW_PROJECT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+NNFW_SCRIPT_PATH="${NNFW_PROJECT_PATH}/${NNFW_SCRIPT_RPATH}"
+
+function Usage()
+{
+ echo "Usage: $0 [COMMAND] ..."
+ echo "Command:"
+ for file in "$NNFW_COMMAND_RPATH"/*;
+ do
+ echo " $(basename "$file")"
+ done
+}
+
+COMMAND=$1; shift
+
+if [[ -z "${COMMAND}" ]]; then
+ Usage
+ exit 255
+fi
+
+COMMAND_FILE="${NNFW_PROJECT_PATH}/${NNFW_COMMAND_RPATH}/${COMMAND}"
+
+if [[ ! -f "${COMMAND_FILE}" ]]; then
+ echo "ERROR: '${COMMAND}' is not supported"
+ exit 255
+fi
+
+export NNFW_PROJECT_PATH
+export NNFW_SCRIPT_PATH
+
+function import()
+{
+ source "${NNFW_PROJECT_PATH}/${NNFW_SCRIPT_RPATH}/config/$1"
+}
+
+source "${COMMAND_FILE}" "$@"
diff --git a/nnpackage/examples/one_op_in_tflite/add.tflite b/nnpackage/examples/one_op_in_tflite/add.tflite
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/nnpackage/examples/one_op_in_tflite/add.tflite
diff --git a/nnpackage/examples/one_op_in_tflite/metadata/MANIFEST b/nnpackage/examples/one_op_in_tflite/metadata/MANIFEST
new file mode 100644
index 000000000..1d96cce1b
--- /dev/null
+++ b/nnpackage/examples/one_op_in_tflite/metadata/MANIFEST
@@ -0,0 +1,7 @@
+{
+ "major-version" : "1",
+ "minor-version" : "0",
+ "patch-version" : "0",
+ "models" : [ "add.tflite" ],
+ "model-types" : [ "tflite" ]
+}
diff --git a/nnpackage/schema/circle_schema.fbs b/nnpackage/schema/circle_schema.fbs
new file mode 100644
index 000000000..dbb0d3526
--- /dev/null
+++ b/nnpackage/schema/circle_schema.fbs
@@ -0,0 +1,804 @@
+// Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+// Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Revision History
+// Version 0: Initial version. Based on TensorFlow Lite v1.13.1 schema.
+
+namespace circle;
+
+// This corresponds to the version.
+file_identifier "CIR0";
+// File extension of any written files.
+file_extension "circle";
+
+// IMPORTANT: All new members of tables, enums and unions must be added at the
+// end to ensure backwards compatibility.
+
+// The type of data stored in a tensor.
+enum TensorType : byte {
+ FLOAT32 = 0,
+ FLOAT16 = 1,
+ INT32 = 2,
+ UINT8 = 3,
+ INT64 = 4,
+ STRING = 5,
+ BOOL = 6,
+ INT16 = 7,
+ COMPLEX64 = 8,
+ INT8 = 9,
+}
+
+// Custom quantization parameters for experimenting with new quantization
+// techniques.
+table CustomQuantization {
+ custom:[ubyte] (force_align: 16);
+}
+
+// Represents a specific quantization technique's parameters.
+union QuantizationDetails {
+ CustomQuantization,
+}
+
+// Parameters for converting a quantized tensor back to float.
+table QuantizationParameters {
+ // These four parameters are the asymmetric linear quantization parameters.
+ // Given a quantized value q, the corresponding float value f should be:
+ // f = scale * (q - zero_point)
+ // For other quantization types, the QuantizationDetails below is used.
+ min:[float]; // For importing back into tensorflow.
+ max:[float]; // For importing back into tensorflow.
+ scale:[float]; // For dequantizing the tensor's values.
+ zero_point:[long];
+
+ // If this is not none, the quantization parameters above are ignored and the
+ // value of the QuantizationDetails union below should be used.
+ details:QuantizationDetails;
+}
+
+table Tensor {
+ // The tensor shape. The meaning of each entry is operator-specific but
+ // builtin ops use: [batch size, height, width, number of channels] (That's
+ // Tensorflow's NHWC).
+ shape:[int];
+ type:TensorType;
+ // An index that refers to the buffers table at the root of the model. Or,
+ // if there is no data buffer associated (i.e. intermediate results), then
+ // this is 0 (which refers to an always existent empty buffer).
+ //
+ // The data_buffer itself is an opaque container, with the assumption that the
+ // target device is little-endian. In addition, all builtin operators assume
+ // the memory is ordered such that if `shape` is [4, 3, 2], then index
+ // [i, j, k] maps to data_buffer[i*3*2 + j*2 + k].
+ buffer:uint;
+ name:string; // For debugging and importing back into tensorflow.
+ quantization:QuantizationParameters; // Optional.
+
+ is_variable:bool = false;
+}
+
+// A list of builtin operators. Builtin operators are slightly faster than custom
+// ones, but not by much. Moreover, while custom operators accept an opaque
+// object containing configuration parameters, builtins have a predetermined
+// set of acceptable options.
+enum BuiltinOperator : byte {
+ ADD = 0,
+ AVERAGE_POOL_2D = 1,
+ CONCATENATION = 2,
+ CONV_2D = 3,
+ DEPTHWISE_CONV_2D = 4,
+ // DEPTH_TO_SPACE = 5,
+ DEQUANTIZE = 6,
+ EMBEDDING_LOOKUP = 7,
+ FLOOR = 8,
+ FULLY_CONNECTED = 9,
+ HASHTABLE_LOOKUP = 10,
+ L2_NORMALIZATION = 11,
+ L2_POOL_2D = 12,
+ LOCAL_RESPONSE_NORMALIZATION = 13,
+ LOGISTIC = 14,
+ LSH_PROJECTION = 15,
+ LSTM = 16,
+ MAX_POOL_2D = 17,
+ MUL = 18,
+ RELU = 19,
+ // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed
+ // since different model developers use RELU1 in different ways. Never
+ // create another op called RELU1.
+ RELU_N1_TO_1 = 20,
+ RELU6 = 21,
+ RESHAPE = 22,
+ RESIZE_BILINEAR = 23,
+ RNN = 24,
+ SOFTMAX = 25,
+ SPACE_TO_DEPTH = 26,
+ SVDF = 27,
+ TANH = 28,
+ // TODO(aselle): Consider rename to CONCATENATE_EMBEDDINGS
+ CONCAT_EMBEDDINGS = 29,
+ SKIP_GRAM = 30,
+ CALL = 31,
+ CUSTOM = 32,
+ EMBEDDING_LOOKUP_SPARSE = 33,
+ PAD = 34,
+ UNIDIRECTIONAL_SEQUENCE_RNN = 35,
+ GATHER = 36,
+ BATCH_TO_SPACE_ND = 37,
+ SPACE_TO_BATCH_ND = 38,
+ TRANSPOSE = 39,
+ MEAN = 40,
+ SUB = 41,
+ DIV = 42,
+ SQUEEZE = 43,
+ UNIDIRECTIONAL_SEQUENCE_LSTM = 44,
+ STRIDED_SLICE = 45,
+ BIDIRECTIONAL_SEQUENCE_RNN = 46,
+ EXP = 47,
+ TOPK_V2 = 48,
+ SPLIT = 49,
+ LOG_SOFTMAX = 50,
+ // DELEGATE is a special op type for the operations which are delegated to
+ // other backends.
+ // WARNING: Experimental interface, subject to change
+ DELEGATE = 51,
+ BIDIRECTIONAL_SEQUENCE_LSTM = 52,
+ CAST = 53,
+ PRELU = 54,
+ MAXIMUM = 55,
+ ARG_MAX = 56,
+ MINIMUM = 57,
+ LESS = 58,
+ NEG = 59,
+ PADV2 = 60,
+ GREATER = 61,
+ GREATER_EQUAL = 62,
+ LESS_EQUAL = 63,
+ SELECT = 64,
+ SLICE = 65,
+ SIN = 66,
+ TRANSPOSE_CONV = 67,
+ SPARSE_TO_DENSE = 68,
+ TILE = 69,
+ EXPAND_DIMS = 70,
+ EQUAL = 71,
+ NOT_EQUAL = 72,
+ LOG = 73,
+ SUM = 74,
+ SQRT = 75,
+ RSQRT = 76,
+ SHAPE = 77,
+ POW = 78,
+ ARG_MIN = 79,
+ FAKE_QUANT = 80,
+ REDUCE_PROD = 81,
+ REDUCE_MAX = 82,
+ PACK = 83,
+ LOGICAL_OR = 84,
+ ONE_HOT = 85,
+ LOGICAL_AND = 86,
+ LOGICAL_NOT = 87,
+ UNPACK = 88,
+ REDUCE_MIN = 89,
+ FLOOR_DIV = 90,
+ REDUCE_ANY = 91,
+ SQUARE = 92,
+ ZEROS_LIKE = 93,
+ FILL = 94,
+ FLOOR_MOD = 95,
+ RANGE = 96,
+ RESIZE_NEAREST_NEIGHBOR = 97,
+ LEAKY_RELU = 98,
+ SQUARED_DIFFERENCE = 99,
+ MIRROR_PAD = 100,
+ ABS = 101,
+ SPLIT_V = 102,
+}
+
+// Options for the builtin operators.
+union BuiltinOptions {
+ Conv2DOptions,
+ DepthwiseConv2DOptions,
+ ConcatEmbeddingsOptions,
+ LSHProjectionOptions,
+ Pool2DOptions,
+ SVDFOptions,
+ RNNOptions,
+ FullyConnectedOptions,
+ SoftmaxOptions,
+ ConcatenationOptions,
+ AddOptions,
+ L2NormOptions,
+ LocalResponseNormalizationOptions,
+ LSTMOptions,
+ ResizeBilinearOptions,
+ CallOptions,
+ ReshapeOptions,
+ SkipGramOptions,
+ SpaceToDepthOptions,
+ EmbeddingLookupSparseOptions,
+ MulOptions,
+ PadOptions,
+ GatherOptions,
+ BatchToSpaceNDOptions,
+ SpaceToBatchNDOptions,
+ TransposeOptions,
+ ReducerOptions,
+ SubOptions,
+ DivOptions,
+ SqueezeOptions,
+ SequenceRNNOptions,
+ StridedSliceOptions,
+ ExpOptions,
+ TopKV2Options,
+ SplitOptions,
+ LogSoftmaxOptions,
+ CastOptions,
+ DequantizeOptions,
+ MaximumMinimumOptions,
+ ArgMaxOptions,
+ LessOptions,
+ NegOptions,
+ PadV2Options,
+ GreaterOptions,
+ GreaterEqualOptions,
+ LessEqualOptions,
+ SelectOptions,
+ SliceOptions,
+ TransposeConvOptions,
+ SparseToDenseOptions,
+ TileOptions,
+ ExpandDimsOptions,
+ EqualOptions,
+ NotEqualOptions,
+ ShapeOptions,
+ PowOptions,
+ ArgMinOptions,
+ FakeQuantOptions,
+ PackOptions,
+ LogicalOrOptions,
+ OneHotOptions,
+ LogicalAndOptions,
+ LogicalNotOptions,
+ UnpackOptions,
+ FloorDivOptions,
+ SquareOptions,
+ ZerosLikeOptions,
+ FillOptions,
+ BidirectionalSequenceLSTMOptions,
+ BidirectionalSequenceRNNOptions,
+ UnidirectionalSequenceLSTMOptions,
+ FloorModOptions,
+ RangeOptions,
+ ResizeNearestNeighborOptions,
+ LeakyReluOptions,
+ SquaredDifferenceOptions,
+ MirrorPadOptions,
+ AbsOptions,
+ SplitVOptions,
+}
+
+enum Padding : byte { SAME, VALID }
+
+enum ActivationFunctionType : byte {
+ NONE = 0,
+ RELU = 1,
+ RELU_N1_TO_1 = 2,
+ RELU6 = 3,
+ TANH = 4,
+ SIGN_BIT = 5,
+}
+
+table Conv2DOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ fused_activation_function:ActivationFunctionType;
+ dilation_w_factor:int = 1;
+ dilation_h_factor:int = 1;
+}
+
+table Pool2DOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ filter_width:int;
+ filter_height:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+table DepthwiseConv2DOptions {
+ // Parameters for DepthwiseConv version 1 or above.
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ depth_multiplier:int;
+ fused_activation_function:ActivationFunctionType;
+ // Parameters for DepthwiseConv version 2 or above.
+ dilation_w_factor:int = 1;
+ dilation_h_factor:int = 1;
+}
+
+table ConcatEmbeddingsOptions {
+ num_channels:int;
+ num_columns_per_channel:[int];
+ embedding_dim_per_channel:[int]; // This could be inferred from parameters.
+}
+
+enum LSHProjectionType: byte {
+ UNKNOWN = 0,
+ SPARSE = 1,
+ DENSE = 2,
+}
+
+table LSHProjectionOptions {
+ type: LSHProjectionType;
+}
+
+table SVDFOptions {
+ rank:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow RNNCell.
+table RNNOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow dynamic_rnn with RNNCell.
+table SequenceRNNOptions {
+ time_major:bool;
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow bidrectional_dynamic_rnn with RNNCell.
+table BidirectionalSequenceRNNOptions {
+ time_major:bool;
+ fused_activation_function:ActivationFunctionType;
+ merge_outputs: bool;
+}
+
+enum FullyConnectedOptionsWeightsFormat: byte {
+ DEFAULT = 0,
+ SHUFFLED4x16INT8 = 1,
+}
+
+// An implementation of TensorFlow fully_connected (a.k.a Dense) layer.
+table FullyConnectedOptions {
+ // Parameters for FullyConnected version 1 or above.
+ fused_activation_function:ActivationFunctionType;
+
+ // Parameters for FullyConnected version 2 or above.
+ weights_format:FullyConnectedOptionsWeightsFormat = DEFAULT;
+}
+
+table SoftmaxOptions {
+ beta: float;
+}
+
+// An implementation of TensorFlow concat.
+table ConcatenationOptions {
+ axis:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+table AddOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table MulOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table L2NormOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table LocalResponseNormalizationOptions {
+ radius:int;
+ bias:float;
+ alpha:float;
+ beta:float;
+}
+
+enum LSTMKernelType : byte {
+ // Full LSTM kernel which supports peephole and projection.
+ FULL = 0,
+ // Basic LSTM kernels. Equivalent to TensorFlow BasicLSTMCell.
+ BASIC = 1,
+}
+
+// An implementation of TensorFlow LSTMCell and CoupledInputForgetGateLSTMCell
+table LSTMOptions {
+ // Parameters for LSTM version 1 or above.
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // Parameters for LSTM version 2 or above.
+ // Basic kernel is only supported in version 2 or above.
+ kernel_type: LSTMKernelType = FULL;
+}
+
+// An implementation of TensorFlow dynamic_rnn with LSTMCell.
+table UnidirectionalSequenceLSTMOptions {
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // If true then first dimension is sequence, otherwise batch.
+ time_major:bool;
+}
+
+table BidirectionalSequenceLSTMOptions {
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // If true, store the outputs of both directions into the first output.
+ merge_outputs: bool;
+}
+
+table ResizeBilinearOptions {
+ new_height: int (deprecated);
+ new_width: int (deprecated);
+ align_corners: bool;
+}
+
+table ResizeNearestNeighborOptions {
+ align_corners: bool;
+}
+
+// A call operation options
+table CallOptions {
+ // The subgraph index that needs to be called.
+ subgraph:uint;
+}
+
+table PadOptions {
+}
+
+table PadV2Options {
+}
+
+table ReshapeOptions {
+ new_shape:[int];
+}
+
+table SpaceToBatchNDOptions {
+}
+
+table BatchToSpaceNDOptions {
+}
+
+table SkipGramOptions {
+ ngram_size: int;
+ max_skip_size: int;
+ include_all_ngrams: bool;
+}
+
+table SpaceToDepthOptions {
+ block_size: int;
+}
+
+table SubOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table DivOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table TopKV2Options {
+}
+
+enum CombinerType : byte {
+ SUM = 0,
+ MEAN = 1,
+ SQRTN = 2,
+}
+
+table EmbeddingLookupSparseOptions {
+ combiner:CombinerType;
+}
+
+table GatherOptions {
+ axis: int;
+}
+
+table TransposeOptions {
+}
+
+table ExpOptions {
+}
+
+table ReducerOptions {
+ keep_dims: bool;
+}
+
+table SqueezeOptions {
+ squeeze_dims:[int];
+}
+
+table SplitOptions {
+ num_splits: int;
+}
+
+table SplitVOptions {
+ num_splits: int;
+}
+
+table StridedSliceOptions {
+ begin_mask: int;
+ end_mask: int;
+ ellipsis_mask: int;
+ new_axis_mask: int;
+ shrink_axis_mask: int;
+}
+
+table LogSoftmaxOptions {
+}
+
+table CastOptions {
+ in_data_type: TensorType;
+ out_data_type: TensorType;
+}
+
+table DequantizeOptions {
+}
+
+table MaximumMinimumOptions {
+}
+
+table TileOptions {
+}
+
+table ArgMaxOptions {
+ output_type : TensorType;
+}
+
+table ArgMinOptions {
+ output_type : TensorType;
+}
+
+table GreaterOptions {
+}
+
+table GreaterEqualOptions {
+}
+
+table LessOptions {
+}
+
+table LessEqualOptions {
+}
+
+table NegOptions {
+}
+
+table SelectOptions {
+}
+
+table SliceOptions {
+}
+
+table TransposeConvOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+}
+
+table ExpandDimsOptions {
+}
+
+table SparseToDenseOptions {
+ validate_indices:bool;
+}
+
+table EqualOptions {
+}
+
+table NotEqualOptions {
+}
+
+table ShapeOptions {
+ // Optional output type of the operation (int32 or int64). Defaults to int32.
+ out_type : TensorType;
+}
+
+table PowOptions {
+}
+
+table FakeQuantOptions {
+ // Parameters supported by version 1:
+ min:float;
+ max:float;
+ num_bits:int;
+
+ // Parameters supported by version 2:
+ narrow_range:bool;
+}
+
+table PackOptions {
+ values_count:int;
+ axis:int;
+}
+
+table LogicalOrOptions {
+}
+
+table OneHotOptions {
+ axis:int;
+}
+
+table AbsOptions {
+}
+
+
+table LogicalAndOptions {
+}
+
+table LogicalNotOptions {
+}
+
+table UnpackOptions {
+ num:int;
+ axis:int;
+}
+
+table FloorDivOptions {
+}
+
+table SquareOptions {
+}
+
+table ZerosLikeOptions {
+}
+
+table FillOptions {
+}
+
+table FloorModOptions {
+}
+
+table RangeOptions {
+}
+
+table LeakyReluOptions {
+ alpha:float;
+}
+
+table SquaredDifferenceOptions {
+}
+
+enum MirrorPadMode : byte {
+ // Doesn't include borders.
+ REFLECT = 0,
+ // Includes borders.
+ SYMMETRIC = 1,
+}
+
+table MirrorPadOptions {
+ mode:MirrorPadMode;
+}
+
+// An OperatorCode can be an enum value (BuiltinOperator) if the operator is a
+// builtin, or a string if the operator is custom.
+table OperatorCode {
+ builtin_code:BuiltinOperator;
+ custom_code:string;
+
+ // The version of the operator. The version need to be bumped whenever new
+ // parameters are introduced into an op.
+ version:int = 1;
+}
+
+enum CustomOptionsFormat : byte {
+ FLEXBUFFERS = 0,
+}
+
+enum DataFormat : byte {
+ // For 2D data, NHWC(batch, height, width, channels)
+ // For 3D data, NDHWC(batch, depth, height, width, channels)
+ CHANNELS_LAST = 0,
+ // For 2D data, NCHW(batch, channels, height, width)
+ // For 3D data, NCDHW(batch, channels, depth, height, width)
+ CHANNELS_FIRST = 1,
+}
+
+// An operator takes tensors as inputs and outputs. The type of operation being
+// performed is determined by an index into the list of valid OperatorCodes,
+// while the specifics of each operations is configured using builtin_options
+// or custom_options.
+table Operator {
+ // Index into the operator_codes array. Using an integer here avoids
+ // complicate map lookups.
+ opcode_index:uint;
+
+ // Optional input and output tensors are indicated by -1.
+ inputs:[int];
+ outputs:[int];
+
+ builtin_options:BuiltinOptions;
+ custom_options:[ubyte];
+ custom_options_format:CustomOptionsFormat;
+
+ // A list of booleans indicating the input tensors which are being mutated by
+ // this operator.(e.g. used by RNN and LSTM).
+ // For example, if the "inputs" array refers to 5 tensors and the second and
+ // fifth are mutable variables, then this list will contain
+ // [false, true, false, false, true].
+ //
+ // If the list is empty, no variable is mutated in this operator.
+ // The list either has the same length as `inputs`, or is empty.
+ mutating_variable_inputs:[bool];
+}
+
+// The root type, defining a subgraph, which typically represents an entire
+// model.
+table SubGraph {
+ // A list of all tensors used in this subgraph.
+ tensors:[Tensor];
+
+ // Indices of the tensors that are inputs into this subgraph. Note this is
+ // the list of non-static tensors that feed into the subgraph for inference.
+ inputs:[int];
+
+ // Indices of the tensors that are outputs out of this subgraph. Note this is
+ // the list of output tensors that are considered the product of the
+ // subgraph's inference.
+ outputs:[int];
+
+ // All operators, in execution order.
+ operators:[Operator];
+
+ // Name of this subgraph (used for debugging).
+ name:string;
+
+ // Data format for input/output of SubGraph
+ data_format: DataFormat;
+}
+
+// Table of raw data buffers (used for constant tensors). Referenced by tensors
+// by index. The generous alignment accommodates mmap-friendly data structures.
+table Buffer {
+ data:[ubyte] (force_align: 16);
+}
+
+table Model {
+ // Version of the schema.
+ version:uint;
+
+ // A list of all operator codes used in this model. This is
+ // kept in order because operators carry an index into this
+ // vector.
+ operator_codes:[OperatorCode];
+
+ // All the subgraphs of the model. The 0th is assumed to be the main
+ // model.
+ subgraphs:[SubGraph];
+
+ // A description of the model.
+ description:string;
+
+ // Buffers of the model.
+ // Note the 0th entry of this array must be an empty buffer (sentinel).
+ // This is a convention so that tensors without a buffer can provide 0 as
+ // their buffer.
+ buffers:[Buffer];
+
+ // Metadata about the model. Indirects into the existings buffers list.
+ metadata_buffer:[int];
+}
+
+root_type Model;
diff --git a/nnpackage/spec/00_requirement.md b/nnpackage/spec/00_requirement.md
new file mode 100644
index 000000000..035fc9856
--- /dev/null
+++ b/nnpackage/spec/00_requirement.md
@@ -0,0 +1,28 @@
+# Requirements (or Checkpoint)
+
+## Packaging
+
+### Packaging Format
+
+- [ ] PF1. support royalty free compression
+- [ ] PF2. compatible with low end devices
+
+### Manifest
+
+- [ ] MF1. human readable
+- [ ] MF2. easy to parse for several types of configuration variables.
+- [ ] MF3. small binary size for parsing (since the parser will be part of runtime)
+
+## Model
+
+- [ ] MD1. support multiple tensor layout (such as NHWC, NCHW, etc)
+ - define layout for model / submodel / other unit?
+ - use operator (such as loco)
+- [ ] MD2. describe operand?
+ - include in operator vs. independent field for operand
+ - support unspecified dimension value & unspecified rank?
+- [ ] MD3. describe operation type
+ - string vs. enum value?
+- [ ] MD4. support many quantization
+ - howto (ex. union type quantization parameter field, field handle quantization parameter table for quantization methodology)
+- [ ] MD5. backward-compatibility and maintainability
diff --git a/nnpackage/spec/10_packaging_and_manifest.md b/nnpackage/spec/10_packaging_and_manifest.md
new file mode 100644
index 000000000..1bc18ff30
--- /dev/null
+++ b/nnpackage/spec/10_packaging_and_manifest.md
@@ -0,0 +1,92 @@
+# Packaging and Manifest
+
+## 1. Overview
+
+`nnpackage` is the input of nnruntime(`neurun`), and the output of nncompiler(`nncc`).
+
+`nnpackage` contains all data (such as model, `MANIFEST`, custom_op) that requires to run a given model.
+
+The document will cover packaging and `MANIFEST` only.
+
+For `model` and `custom_op`, see [20_model_and_operators.md](20_model_and_operators.md) and [30_custom_op.md](30_custom_op.md).
+
+## 2. Packaging Structure
+
+`nnpackage` is a Zip archive in the following structure:
+
+```
+nnpackage
+├── custom_op
+├── metadata
+│   └── MANIFEST
+└── mymodel.model
+```
+
+- `mymodel.model` is a model file that has computation graph and weights.
+- `metadata` is a directory that contains all metadata including `MANIFEST`.
+- `MANIFEST` is a collection of attributes about this package.
+- `custom_op` is a directory that contains implementation objects.
+
+## 3. Packaging Format
+
+`nnpackage` is contained in `Zip Archive`, which could be either `compressed` or `stored` (no compression).
+
+## 4. Manifest
+
+`MANIFEST` is a collection of attributes about `nnpacakge`.
+
+### Attributes
+
+#### version
+
+`version` is composed of 3 numbers in `MAJOR`.`MINOR`.`PATCH`.
+
+Given a version number MAJOR.MINOR.PATCH, increment the:
+
+MAJOR version when you make incompatible/breaking changes,
+MINOR version when you add functionality in a backwards-compatible manner, and
+PATCH version when you make backwards-compatible bug fixes.
+
+For detail, see [semantic versioning 2.0.0](https://semver.org/)
+
+##### major-version
+
+`major-version` is the major version of `nnpackage`.
+
+##### minor-version
+
+`minor-version` is the minor version of `nnpackage`.
+
+##### patch-version
+
+`patch-version` is the patch version of `nnpackage`.
+
+#### models
+
+`models` is an array of path to model files, which is relative path from top level directory of this package.
+The first element from the array will be the default model to be executed.
+
+#### model-types
+
+`model-types` is an array of strings that describes the type of each model in `models`.
+
+It can have the values (case-sensitive) in following table.
+
+| name | description |
+|--------|------------------------|
+| tflite | tensorflow lite schema |
+| circle | nnpackage schema |
+
+### Example
+
+Here is an example of `MANIFEST`.
+
+```
+{
+ "major-version" : "1",
+ "minor-version" : "0",
+ "patch-version" : "0",
+ "models" : [ "mymodel.model", "yourmodel.model" ],
+ "model-types" : [ "tflite", "circle" ]
+}
+```
diff --git a/nnpackage/spec/20_model_and_operators.md b/nnpackage/spec/20_model_and_operators.md
new file mode 100644
index 000000000..fa4131645
--- /dev/null
+++ b/nnpackage/spec/20_model_and_operators.md
@@ -0,0 +1,90 @@
+# Model
+
+## Serialization Format
+
+`nnpackage` uses flatbuffers to store model.
+
+Rationale:
+
+1. `flatbuffers` is:
+
+- space-efficient
+- explicit-schema based
+- royalty-free license open-source library
+- header-only solution (unless we use flatbuffer's reflection)
+- proven solution (used by TensorFlow-Lite)
+
+2. We've checked other solutions:
+- [`bjson (binary JSON)`](http://bjson.org/)
+- `protocol buffers`
+
+## Baseline Schema
+
+`nnpackage` schema is based on tensorflow-lite schema.
+
+Rationale:
+
+- Fundamentally, `nnpackage` and `TFLite` have same aim:
+Running pre-trained models on a device, which has relatively low computing power and memory.
+TFLite's solution is acceptable, we don't need to create same thing again.
+- We can use several infra-structures and tools from TFLite.
+
+## Extensions
+
+`nnpackage` model has some extensions that are different or missing from TFLite.
+
+### Multiple Layout
+
+`nnpackage` can support multiple layouts.
+
+1. The layout is presented using `DataFormat` enumeration.
+
+`DataFormat` must be one of the enumeration defined in `nnpackage_schema.fbs`.
+
+For example, `CHANNELS_FIRST` or `CHANNELS_LAST` can be used.
+
+```
+ // For 2D data, NHWC(batch, height, width, channels)
+ // For 3D data, NDHWC(batch, depth, height, width, channels)
+ CHANNELS_LAST = 0,
+ // For 2D data, NCHW(batch, channels, height, width)
+ // For 3D data, NCDHW(batch, channels, depth, height, width)
+ CHANNELS_FIRST = 1,
+```
+
+2. `DataFormat` must be same within a submodel.
+
+Rationale:
+
+- frequent switching between different layout degrades the performance
+
+Under this assumption, We expect to
+
+- simplify the runtime implementation
+- accelerate the performance
+- reduce the memory usage
+
+### Unspecified Dimension
+
+`nnpackage` represents unspecified dimension with `-1`.
+
+Rationale:
+
+1. It should be `int` since dimension is int type flatbuffer schema. Thus '?' cannot be used.
+2. `0` is also a candidate, which is used for Android NN API.
+However, we would like to reserve `0` because `0` could be a valid dimension for a certain
+operator (e.g. `tflite.slice`).
+
+## Operator Reference
+
+All operators use same semantics of tensorflow lite operators.
+Refer tensorflow lite source code to understand what inputs, outputs and attributes
+are required and how they are interpretered.
+
+## Schema Source
+
+nnpackage supports two kinds of models: `tflite` and `circle`
+
+- For tflite, see `schema.fbs` from tensorflow lite source.
+
+- For circle, see [`../schema/circle_schema.fbs`](../schema/circle_schema.fbs).
diff --git a/nnpackage/spec/30_custom_op.md b/nnpackage/spec/30_custom_op.md
new file mode 100644
index 000000000..58e0acddb
--- /dev/null
+++ b/nnpackage/spec/30_custom_op.md
@@ -0,0 +1,86 @@
+# Custom Operators
+
+This document explains about custom operator and how custom op is represented in nnpackage.
+
+## What is custom operator?
+
+Custom operator(hereafter custom op) is used to provide a new operator implementation.
+It can be anything that does not exist in current runtime implementation.
+
+You can use custom operator for several use cases, possible use cases are:
+
+- when an operator in tensorflow is not supported in nnfw runtime
+- when an operator is supported, however, you would like to use your own implementation
+ - it may be for optimization, by grouping several operators into one super operator.
+
+## Custom op in model
+
+nnpackage will support several kinds of models.
+Currently the only type is tflite.
+
+### tflite
+
+If you're using `tflite` format, it is same format to tensorflow lite.
+
+You can generate `tflite` model with custom op using `tflite_convert`.
+Please find the documentation in tensorflow official site.
+
+## Custom op kernel implementation
+
+You need to provide the kernel of custom op in the following form:
+
+```
+/*
+ * Custom kernel evaluation function
+ *
+ * param[in] params custom operation parameters
+ * param[in] userdata pointer to user-specified buffer( kernel instance specific )
+ */
+typedef void (*nnfw_custom_eval)(nnfw_custom_kernel_params *params, char *userdata,
+ size_t userdata_size);
+
+```
+
+The structures and relevant APIs are defined in nnfw APIs.
+Please see `nnfw_dev.h` for detail.
+
+You can find example in `nnfw` repository.
+
+Custom op kernel implementation is stored in nnpackage in form of prebuilt library.
+
+It is example nnpackage structure for `FillFrom`:
+
+```
+FillFrom
+├── FillFrom.tflite
+├── custom_op
+│ ├── libFillFrom.armv7l-linux.debug.a
+│ └── libFillFrom.armv7l-linux.release.a
+└── metadata
+ └── MANIFEST
+```
+
+All custom operator libraries are put under `{nnpackage_root}/custom_op/lib{customop_name}.{arch}-{os}-{buildtype}.a`.
+
+## How to use custom op in app
+
+To use custom op, the app has to register the operators with `nnfw_register_custom_op_info`.
+
+
+```
+/*
+ * custom operation registration info
+ */
+typedef struct
+{
+ nnfw_custom_eval eval_function;
+} custom_kernel_registration_info;
+
+NNFW_STATUS nnfw_register_custom_op_info(nnfw_session *session, const char *id,
+ custom_kernel_registration_info *info)
+```
+
+Please find sample app in `nnfw` repository
+
+The `id` should be unique in an app.
+
diff --git a/packaging/flatbuffers.tar.gz b/packaging/flatbuffers.tar.gz
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/packaging/flatbuffers.tar.gz
diff --git a/packaging/nnfw.spec b/packaging/nnfw.spec
index 3649e8e20..475fb2e81 100644
--- a/packaging/nnfw.spec
+++ b/packaging/nnfw.spec
@@ -1,12 +1,13 @@
Name: nnfw
Summary: nnfw
-Version: 0.2
+Version: 0.3
Release: 1
Group: Development
-License: Apache-2.0 and MIT
+License: Apache-2.0 and MIT and BSD-2-Clause
Source0: %{name}-%{version}.tar.gz
Source1: %{name}.manifest
+Source1001: flatbuffers.tar.gz
%ifarch arm armv7l aarch64
BuildRequires: cmake
@@ -27,6 +28,13 @@ nnfw is a high-performance, on-device neural network framework for Tizen
%{!?build_type: %define build_type Release}
+%package devel
+Summary: NNFW Devel Package
+Requires: nnfw
+
+%description devel
+NNFW devel package.
+
%package test
Summary: NNFW Test
Requires: nnfw
@@ -46,26 +54,41 @@ NNFW test rpm. It does not depends on nnfw rpm since it contains nnfw runtime.
%define target_arch aarch64
%endif
+# Execute gbs with --define "test_build 1" in case that you need to test with environment variable
+%{!?test_build: %define test_build 0}
%{!?coverage_build: %define coverage_build 0}
%if %{coverage_build} == 1
-%define build_options COVERAGE_BUILD=1 OBS_BUILD=1 BUILD_TYPE=Debug TARGET_ARCH=%{target_arch} TARGET_OS=tizen
+%define build_options COVERAGE_BUILD=1 OBS_BUILD=1 BUILD_TYPE=Debug TARGET_ARCH=%{target_arch} TARGET_OS=tizen TFLITE_MAJOR_VER=1 TFLITE_MINOR_VER=13 OPTIONS=-DENVVAR_NEURUN_CONFIG=ON
%else
-%define build_options OBS_BUILD=1 BUILD_TYPE=%{build_type} INSTALL_PATH=%{buildroot}%{_prefix} TARGET_ARCH=%{target_arch} TARGET_OS=tizen
+%if %{test_build} == 1
+%define build_options OBS_BUILD=1 BUILD_TYPE=%{build_type} INSTALL_PATH=%{buildroot}%{_prefix} TARGET_ARCH=%{target_arch} TARGET_OS=tizen TFLITE_MAJOR_VER=1 TFLITE_MINOR_VER=13 OPTIONS=-DENVVAR_NEURUN_CONFIG=ON
+%else
+%define build_options OBS_BUILD=1 BUILD_TYPE=%{build_type} INSTALL_PATH=%{buildroot}%{_prefix} TARGET_ARCH=%{target_arch} TARGET_OS=tizen TFLITE_MAJOR_VER=1 TFLITE_MINOR_VER=13
+%endif
%endif
%prep
%setup -q
cp %{SOURCE1} .
+mkdir ./externals
+cp %{SOURCE1001} ./externals/.
+cd ./externals
+tar -xf %{SOURCE1001}
%build
%ifarch arm armv7l aarch64
-%{build_options} make %{?jobs:-j%jobs}
+cp -v Makefile.template Makefile
+%{build_options} make %{?jobs:-j%jobs} all
%endif
%install
%ifarch arm armv7l aarch64
%{build_options} make install
+# copy header's for devel
+mkdir -p %{buildroot}%{_includedir}
+cp -rf runtimes/include/* %{buildroot}%{_includedir}
+
%ifarch aarch64
mv %{buildroot}%{_prefix}/lib %{buildroot}%{_libdir}
%endif
@@ -79,12 +102,16 @@ mv %{buildroot}%{_prefix}/bin %{buildroot}%{test_install_prefix}/Product/out
cp -rf %{buildroot}%{_libdir} %{buildroot}%{test_install_prefix}/Product/out
rm -rf %{buildroot}%{_libdir}/pureacl
## install tests
-cp -rf ./tests/framework %{buildroot}%{test_install_prefix}/framework
+mkdir -p %{buildroot}%{test_install_prefix}/tests
+cp -rf ./tests/framework %{buildroot}%{test_install_prefix}/tests
## install tools
mkdir -p %{buildroot}%{test_install_prefix}/tools
-cp -rf ./tests/scripts %{buildroot}%{test_install_prefix}/tools
+cp -rf ./tests/scripts %{buildroot}%{test_install_prefix}/tests
+## install infra
+mkdir -p %{buildroot}%{test_install_prefix}/infra
+cp -rf ./infra/scripts/ %{buildroot}%{test_install_prefix}/infra
%else
-%{build_options} make build_coverage_suite
+%{build_options} make build_coverage_suite
mkdir -p %{buildroot}%{test_install_prefix}
cp -rf Product/out/coverage-suite.tar.gz %{buildroot}%{test_install_prefix}/.
tar -zxf %{buildroot}%{test_install_prefix}/coverage-suite.tar.gz -C %{buildroot}%{test_install_prefix}
@@ -105,6 +132,13 @@ rm -rf %{buildroot}%{test_install_prefix}/coverage-suite.tar.gz
%exclude %{_libdir}/debug
%endif
+%files devel
+%manifest %{name}.manifest
+%defattr(-,root,root,-)
+%ifarch arm armv7l aarch64
+%{_includedir}/*
+%endif
+
%files test
%manifest %{name}.manifest
%defattr(-,root,root,-)
diff --git a/res/BVLCCaffeTests/BatchNorm_000/INFERENCE b/res/BVLCCaffeTests/BatchNorm_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/BatchNorm_000/INFERENCE
diff --git a/res/BVLCCaffeTests/BatchNorm_000/test.prototxt b/res/BVLCCaffeTests/BatchNorm_000/test.prototxt
new file mode 100644
index 000000000..7c0f19ddf
--- /dev/null
+++ b/res/BVLCCaffeTests/BatchNorm_000/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 4 dim: 4 }
+ }
+}
+layer {
+ name: "batchnorm"
+ type: "BatchNorm"
+ bottom: "data"
+ top: "batchnorm"
+ batch_norm_param {
+ use_global_stats: true
+ }
+}
diff --git a/res/BVLCCaffeTests/Concat_000/INFERENCE b/res/BVLCCaffeTests/Concat_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Concat_000/INFERENCE
diff --git a/res/BVLCCaffeTests/Concat_000/test.prototxt b/res/BVLCCaffeTests/Concat_000/test.prototxt
new file mode 100644
index 000000000..0c2ee899c
--- /dev/null
+++ b/res/BVLCCaffeTests/Concat_000/test.prototxt
@@ -0,0 +1,23 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data1"
+ top: "data2"
+ top: "data3"
+ top: "data4"
+ input_param {
+ shape: { dim: 1 dim: 1 dim: 4 dim: 4 }
+ shape: { dim: 1 dim: 2 dim: 4 dim: 4 }
+ shape: { dim: 1 dim: 3 dim: 4 dim: 4 }
+ shape: { dim: 1 dim: 4 dim: 4 dim: 4 }
+ }
+}
+layer {
+ name: "concat"
+ type: "Concat"
+ bottom: "data1"
+ bottom: "data2"
+ bottom: "data3"
+ bottom: "data4"
+ top: "concat"
+}
diff --git a/res/BVLCCaffeTests/Convolution_000/INFERENCE b/res/BVLCCaffeTests/Convolution_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_000/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_000/test.prototxt b/res/BVLCCaffeTests/Convolution_000/test.prototxt
new file mode 100644
index 000000000..b56731c06
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_000/test.prototxt
@@ -0,0 +1,23 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 4 dim: 4 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_001/INFERENCE b/res/BVLCCaffeTests/Convolution_001/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_001/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_001/test.prototxt b/res/BVLCCaffeTests/Convolution_001/test.prototxt
new file mode 100644
index 000000000..5b6b5c811
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_001/test.prototxt
@@ -0,0 +1,24 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 15 dim: 15 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ stride: 2
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_002/INFERENCE b/res/BVLCCaffeTests/Convolution_002/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_002/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_002/test.prototxt b/res/BVLCCaffeTests/Convolution_002/test.prototxt
new file mode 100644
index 000000000..7cd59be3e
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_002/test.prototxt
@@ -0,0 +1,24 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_003/INFERENCE b/res/BVLCCaffeTests/Convolution_003/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_003/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_003/test.prototxt b/res/BVLCCaffeTests/Convolution_003/test.prototxt
new file mode 100644
index 000000000..38c4ebdf9
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_003/test.prototxt
@@ -0,0 +1,25 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ pad: 1
+ pad: 2
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_004/INFERENCE b/res/BVLCCaffeTests/Convolution_004/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_004/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_004/test.prototxt b/res/BVLCCaffeTests/Convolution_004/test.prototxt
new file mode 100644
index 000000000..76b1039c5
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_004/test.prototxt
@@ -0,0 +1,24 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ kernel_h: 3
+ kernel_w: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_005/INFERENCE b/res/BVLCCaffeTests/Convolution_005/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_005/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_005/test.prototxt b/res/BVLCCaffeTests/Convolution_005/test.prototxt
new file mode 100644
index 000000000..93104fee8
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_005/test.prototxt
@@ -0,0 +1,21 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 15 dim: 15 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ pad_h: 0
+ pad_w: 3
+ kernel_size: 7
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_006/INFERENCE b/res/BVLCCaffeTests/Convolution_006/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_006/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_006/test.prototxt b/res/BVLCCaffeTests/Convolution_006/test.prototxt
new file mode 100644
index 000000000..f50a0070f
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_006/test.prototxt
@@ -0,0 +1,24 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "dconv"
+ type: "Convolution"
+ bottom: "data"
+ top: "dconv"
+ convolution_param {
+ group: 3
+ num_output: 3
+ bias_term: false
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Convolution_007/INFERENCE b/res/BVLCCaffeTests/Convolution_007/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_007/INFERENCE
diff --git a/res/BVLCCaffeTests/Convolution_007/test.prototxt b/res/BVLCCaffeTests/Convolution_007/test.prototxt
new file mode 100644
index 000000000..78009fe27
--- /dev/null
+++ b/res/BVLCCaffeTests/Convolution_007/test.prototxt
@@ -0,0 +1,26 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 4 dim: 4 }
+ }
+}
+layer {
+ name: "conv"
+ type: "Convolution"
+ bottom: "data"
+ top: "conv"
+ convolution_param {
+ num_output: 2
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
diff --git a/res/BVLCCaffeTests/Eltwise_000/INFERENCE b/res/BVLCCaffeTests/Eltwise_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Eltwise_000/INFERENCE
diff --git a/res/BVLCCaffeTests/Eltwise_000/test.prototxt b/res/BVLCCaffeTests/Eltwise_000/test.prototxt
new file mode 100644
index 000000000..09b85c390
--- /dev/null
+++ b/res/BVLCCaffeTests/Eltwise_000/test.prototxt
@@ -0,0 +1,20 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "lhs"
+ top: "rhs"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "sum"
+ type: "Eltwise"
+ bottom: "lhs"
+ bottom: "rhs"
+ top: "sum"
+ eltwise_param {
+ operation: SUM
+ }
+}
diff --git a/res/BVLCCaffeTests/Eltwise_001/INFERENCE b/res/BVLCCaffeTests/Eltwise_001/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Eltwise_001/INFERENCE
diff --git a/res/BVLCCaffeTests/Eltwise_001/test.prototxt b/res/BVLCCaffeTests/Eltwise_001/test.prototxt
new file mode 100644
index 000000000..3eef071f9
--- /dev/null
+++ b/res/BVLCCaffeTests/Eltwise_001/test.prototxt
@@ -0,0 +1,20 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "lhs"
+ top: "rhs"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ shape: { dim: 1 dim: 3 dim: 16 dim: 16 }
+ }
+}
+layer {
+ name: "prod"
+ type: "Eltwise"
+ bottom: "lhs"
+ bottom: "rhs"
+ top: "prod"
+ eltwise_param {
+ operation: PROD
+ }
+}
diff --git a/res/BVLCCaffeTests/Input_000/INFERENCE b/res/BVLCCaffeTests/Input_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Input_000/INFERENCE
diff --git a/res/BVLCCaffeTests/Input_000/test.prototxt b/res/BVLCCaffeTests/Input_000/test.prototxt
new file mode 100644
index 000000000..2d0571382
--- /dev/null
+++ b/res/BVLCCaffeTests/Input_000/test.prototxt
@@ -0,0 +1,8 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 3 dim: 244 dim: 244 }
+ }
+}
diff --git a/res/BVLCCaffeTests/Input_001/INFERENCE b/res/BVLCCaffeTests/Input_001/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Input_001/INFERENCE
diff --git a/res/BVLCCaffeTests/Input_001/test.prototxt b/res/BVLCCaffeTests/Input_001/test.prototxt
new file mode 100644
index 000000000..eece8b77c
--- /dev/null
+++ b/res/BVLCCaffeTests/Input_001/test.prototxt
@@ -0,0 +1,12 @@
+layer {
+ name: "data1"
+ type: "Input"
+ top: "data1"
+ input_param { shape: { dim: 1 dim: 3 dim: 15 dim: 15 } }
+}
+layer {
+ name: "data2"
+ type: "Input"
+ top: "data2"
+ input_param { shape: { dim: 1 dim: 3 dim: 15 dim: 15 } }
+}
diff --git a/res/BVLCCaffeTests/Pooling_000/INFERENCE b/res/BVLCCaffeTests/Pooling_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_000/INFERENCE
diff --git a/res/BVLCCaffeTests/Pooling_000/test.prototxt b/res/BVLCCaffeTests/Pooling_000/test.prototxt
new file mode 100644
index 000000000..8ed0c3828
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_000/test.prototxt
@@ -0,0 +1,16 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 16 dim: 16 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: MAX
+ kernel_size: 3
+ }
+}
diff --git a/res/BVLCCaffeTests/Pooling_001/INFERENCE b/res/BVLCCaffeTests/Pooling_001/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_001/INFERENCE
diff --git a/res/BVLCCaffeTests/Pooling_001/test.prototxt b/res/BVLCCaffeTests/Pooling_001/test.prototxt
new file mode 100644
index 000000000..1ecbb96bc
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_001/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 15 dim: 15 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: MAX
+ kernel_size: 3
+ stride: 2
+ }
+}
diff --git a/res/BVLCCaffeTests/Pooling_002/INFERENCE b/res/BVLCCaffeTests/Pooling_002/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_002/INFERENCE
diff --git a/res/BVLCCaffeTests/Pooling_002/test.prototxt b/res/BVLCCaffeTests/Pooling_002/test.prototxt
new file mode 100644
index 000000000..f199bdc60
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_002/test.prototxt
@@ -0,0 +1,16 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 16 dim: 16 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: AVE
+ kernel_size: 3
+ }
+}
diff --git a/res/BVLCCaffeTests/Pooling_003/INFERENCE b/res/BVLCCaffeTests/Pooling_003/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_003/INFERENCE
diff --git a/res/BVLCCaffeTests/Pooling_003/test.prototxt b/res/BVLCCaffeTests/Pooling_003/test.prototxt
new file mode 100644
index 000000000..e25855fa8
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_003/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 15 dim: 15 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: AVE
+ kernel_size: 3
+ stride: 2
+ }
+}
diff --git a/res/BVLCCaffeTests/Pooling_004/INFERENCE b/res/BVLCCaffeTests/Pooling_004/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_004/INFERENCE
diff --git a/res/BVLCCaffeTests/Pooling_004/test.prototxt b/res/BVLCCaffeTests/Pooling_004/test.prototxt
new file mode 100644
index 000000000..533a01c88
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_004/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 16 dim: 16 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: MAX
+ kernel_size: 3
+ pad: 1
+ }
+}
diff --git a/res/BVLCCaffeTests/Pooling_005/INFERENCE b/res/BVLCCaffeTests/Pooling_005/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_005/INFERENCE
diff --git a/res/BVLCCaffeTests/Pooling_005/test.prototxt b/res/BVLCCaffeTests/Pooling_005/test.prototxt
new file mode 100644
index 000000000..36475c71e
--- /dev/null
+++ b/res/BVLCCaffeTests/Pooling_005/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 15 dim: 15 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: AVE
+ kernel_size: 3
+ pad: 1
+ }
+}
diff --git a/res/BVLCCaffeTests/ReLU_000/INFERENCE b/res/BVLCCaffeTests/ReLU_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/ReLU_000/INFERENCE
diff --git a/res/BVLCCaffeTests/ReLU_000/test.prototxt b/res/BVLCCaffeTests/ReLU_000/test.prototxt
new file mode 100644
index 000000000..5fe2a938e
--- /dev/null
+++ b/res/BVLCCaffeTests/ReLU_000/test.prototxt
@@ -0,0 +1,14 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape: { dim: 1 dim: 2 dim: 5 dim: 3 }
+ }
+}
+layer {
+ name: "relu"
+ type: "ReLU"
+ bottom: "data"
+ top: "data"
+}
diff --git a/res/BVLCCaffeTests/Regression_0000/INFERENCE b/res/BVLCCaffeTests/Regression_0000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Regression_0000/INFERENCE
diff --git a/res/BVLCCaffeTests/Regression_0000/test.prototxt b/res/BVLCCaffeTests/Regression_0000/test.prototxt
new file mode 100644
index 000000000..5927d692f
--- /dev/null
+++ b/res/BVLCCaffeTests/Regression_0000/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 15 dim: 15 } }
+}
+layer {
+ name: "pool"
+ type: "Pooling"
+ bottom: "data"
+ top: "pool"
+ pooling_param {
+ pool: AVE
+ kernel_h: 3
+ kernel_w: 1
+ }
+}
diff --git a/res/BVLCCaffeTests/Scale_000/INFERENCE b/res/BVLCCaffeTests/Scale_000/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Scale_000/INFERENCE
diff --git a/res/BVLCCaffeTests/Scale_000/test.prototxt b/res/BVLCCaffeTests/Scale_000/test.prototxt
new file mode 100644
index 000000000..2f628ec6c
--- /dev/null
+++ b/res/BVLCCaffeTests/Scale_000/test.prototxt
@@ -0,0 +1,16 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 16 dim: 16 } }
+}
+layer {
+ name: "scale"
+ type: "Scale"
+ bottom: "data"
+ top: "scale"
+ scale_param {
+ filler { type: "gaussian" std: 0.01 }
+ bias_term: false
+ }
+}
diff --git a/res/BVLCCaffeTests/Scale_001/INFERENCE b/res/BVLCCaffeTests/Scale_001/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/Scale_001/INFERENCE
diff --git a/res/BVLCCaffeTests/Scale_001/test.prototxt b/res/BVLCCaffeTests/Scale_001/test.prototxt
new file mode 100644
index 000000000..d15be8ff3
--- /dev/null
+++ b/res/BVLCCaffeTests/Scale_001/test.prototxt
@@ -0,0 +1,17 @@
+layer {
+ name: "data"
+ type: "Input"
+ top: "data"
+ input_param { shape: { dim: 1 dim: 3 dim: 16 dim: 16 } }
+}
+layer {
+ name: "scale"
+ type: "Scale"
+ bottom: "data"
+ top: "scale"
+ scale_param {
+ filler { type: "gaussian" std: 0.01 }
+ bias_term: true
+ bias_filler { type: "gaussian" std: 0.01 }
+ }
+}
diff --git a/res/BVLCCaffeTests/inception_c1/INFERENCE b/res/BVLCCaffeTests/inception_c1/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/inception_c1/INFERENCE
diff --git a/res/BVLCCaffeTests/inception_c1/test.prototxt b/res/BVLCCaffeTests/inception_c1/test.prototxt
new file mode 100644
index 000000000..7f2df4571
--- /dev/null
+++ b/res/BVLCCaffeTests/inception_c1/test.prototxt
@@ -0,0 +1,252 @@
+name: "inception_c1"
+layer {
+ name: "input"
+ type: "Input"
+ top: "reduction_b_concat"
+ input_param {
+ shape {
+ dim: 1
+ dim: 3
+ dim: 32
+ dim: 32
+ }
+ }
+}
+layer {
+ name: "inception_c1_1x1_2"
+ type: "Convolution"
+ bottom: "reduction_b_concat"
+ top: "inception_c1_1x1_2"
+ convolution_param {
+ bias_term: false
+ num_output: 2
+ pad: 0
+ kernel_size: 1
+ stride: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_1x1_2_relu"
+ type: "ReLU"
+ bottom: "inception_c1_1x1_2"
+ top: "inception_c1_1x1_2"
+}
+layer {
+ name: "inception_c1_1x3_reduce"
+ type: "Convolution"
+ bottom: "reduction_b_concat"
+ top: "inception_c1_1x3_reduce"
+ convolution_param {
+ bias_term: false
+ num_output: 3
+ pad: 0
+ kernel_size: 1
+ stride: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_1x3_reduce_relu"
+ type: "ReLU"
+ bottom: "inception_c1_1x3_reduce"
+ top: "inception_c1_1x3_reduce"
+}
+layer {
+ name: "inception_c1_1x3"
+ type: "Convolution"
+ bottom: "inception_c1_1x3_reduce"
+ top: "inception_c1_1x3"
+ convolution_param {
+ bias_term: false
+ num_output: 4
+ stride: 1
+ pad_h: 0
+ pad_w: 1
+ kernel_h: 1
+ kernel_w: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_1x3_relu"
+ type: "ReLU"
+ bottom: "inception_c1_1x3"
+ top: "inception_c1_1x3"
+}
+layer {
+ name: "inception_c1_3x1"
+ type: "Convolution"
+ bottom: "inception_c1_1x3_reduce"
+ top: "inception_c1_3x1"
+ convolution_param {
+ bias_term: false
+ num_output: 5
+ stride: 1
+ pad_h: 1
+ pad_w: 0
+ kernel_h: 3
+ kernel_w: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_3x1_relu"
+ type: "ReLU"
+ bottom: "inception_c1_3x1"
+ top: "inception_c1_3x1"
+}
+layer {
+ name: "inception_c1_3x3_reduce"
+ type: "Convolution"
+ bottom: "reduction_b_concat"
+ top: "inception_c1_3x3_reduce"
+ convolution_param {
+ bias_term: false
+ num_output: 5
+ pad: 0
+ kernel_size: 1
+ stride: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_3x3_reduce_relu"
+ type: "ReLU"
+ bottom: "inception_c1_3x3_reduce"
+ top: "inception_c1_3x3_reduce"
+}
+layer {
+ name: "inception_c1_3x3"
+ type: "Convolution"
+ bottom: "inception_c1_3x3_reduce"
+ top: "inception_c1_3x3"
+ convolution_param {
+ bias_term: false
+ num_output: 6
+ pad: 1
+ kernel_size: 3
+ stride: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_3x3_relu"
+ type: "ReLU"
+ bottom: "inception_c1_3x3"
+ top: "inception_c1_3x3"
+}
+layer {
+ name: "inception_c1_1x3_2"
+ type: "Convolution"
+ bottom: "inception_c1_3x3"
+ top: "inception_c1_1x3_2"
+ convolution_param {
+ bias_term: false
+ num_output: 7
+ stride: 1
+ pad_h: 0
+ pad_w: 1
+ kernel_h: 1
+ kernel_w: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_1x3_2_relu"
+ type: "ReLU"
+ bottom: "inception_c1_1x3_2"
+ top: "inception_c1_1x3_2"
+}
+layer {
+ name: "inception_c1_3x1_2"
+ type: "Convolution"
+ bottom: "inception_c1_3x3"
+ top: "inception_c1_3x1_2"
+ convolution_param {
+ bias_term: false
+ num_output: 8
+ stride: 1
+ pad_h: 1
+ pad_w: 0
+ kernel_h: 3
+ kernel_w: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_3x1_2_relu"
+ type: "ReLU"
+ bottom: "inception_c1_3x1_2"
+ top: "inception_c1_3x1_2"
+}
+layer {
+ name: "inception_c1_pool"
+ type: "Pooling"
+ bottom: "reduction_b_concat"
+ top: "inception_c1_pool"
+ pooling_param {
+ pool: AVE
+ kernel_size: 3
+ stride: 1
+ pad: 1
+ }
+}
+layer {
+ name: "inception_c1_1x1"
+ type: "Convolution"
+ bottom: "inception_c1_pool"
+ top: "inception_c1_1x1"
+ convolution_param {
+ bias_term: false
+ num_output: 10
+ pad: 0
+ kernel_size: 1
+ stride: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ }
+}
+layer {
+ name: "inception_c1_1x1_relu"
+ type: "ReLU"
+ bottom: "inception_c1_1x1"
+ top: "inception_c1_1x1"
+}
+layer {
+ name: "inception_c1_concat"
+ type: "Concat"
+ bottom: "inception_c1_1x1_2"
+ bottom: "inception_c1_1x3"
+ bottom: "inception_c1_3x1"
+ bottom: "inception_c1_1x3_2"
+ bottom: "inception_c1_3x1_2"
+ bottom: "inception_c1_1x1"
+ top: "inception_c1_concat"
+}
diff --git a/res/BVLCCaffeTests/residual/INFERENCE b/res/BVLCCaffeTests/residual/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/residual/INFERENCE
diff --git a/res/BVLCCaffeTests/residual/test.prototxt b/res/BVLCCaffeTests/residual/test.prototxt
new file mode 100644
index 000000000..8ced8b6d5
--- /dev/null
+++ b/res/BVLCCaffeTests/residual/test.prototxt
@@ -0,0 +1,72 @@
+name: "residual"
+layer {
+ name: "input"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape {
+ dim: 1
+ dim: 3
+ dim: 31
+ dim: 31
+ }
+ }
+}
+layer {
+ name: "MaxPool2dBackward4"
+ type: "Pooling"
+ bottom: "data"
+ top: "MaxPool2dBackward4"
+ pooling_param {
+ pool: MAX
+ kernel_size: 3
+ stride: 2
+ pad: 1
+ }
+}
+layer {
+ name: "ConvNdBackward5"
+ type: "Convolution"
+ bottom: "MaxPool2dBackward4"
+ top: "ConvNdBackward5"
+ convolution_param {
+ num_output: 5
+ pad_h: 1
+ pad_w: 1
+ kernel_h: 3
+ kernel_w: 3
+ stride: 1
+ bias_term: false
+ }
+}
+layer {
+ name: "ThresholdBackward7"
+ type: "ReLU"
+ bottom: "ConvNdBackward5"
+ top: "ConvNdBackward5"
+}
+layer {
+ name: "ConvNdBackward8"
+ type: "Convolution"
+ bottom: "ConvNdBackward5"
+ top: "ConvNdBackward8"
+ convolution_param {
+ num_output: 3
+ pad_h: 1
+ pad_w: 1
+ kernel_h: 3
+ kernel_w: 3
+ stride: 1
+ bias_term: false
+ }
+}
+layer {
+ name: "AddBackward11"
+ type: "Eltwise"
+ bottom: "ConvNdBackward8"
+ bottom: "MaxPool2dBackward4"
+ top: "AddBackward11"
+ eltwise_param {
+ operation: SUM
+ }
+}
diff --git a/res/BVLCCaffeTests/residual_bn/INFERENCE b/res/BVLCCaffeTests/residual_bn/INFERENCE
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/res/BVLCCaffeTests/residual_bn/INFERENCE
diff --git a/res/BVLCCaffeTests/residual_bn/test.prototxt b/res/BVLCCaffeTests/residual_bn/test.prototxt
new file mode 100644
index 000000000..eaa321e8d
--- /dev/null
+++ b/res/BVLCCaffeTests/residual_bn/test.prototxt
@@ -0,0 +1,110 @@
+name: "residual_bn"
+layer {
+ name: "input"
+ type: "Input"
+ top: "data"
+ input_param {
+ shape {
+ dim: 1
+ dim: 3
+ dim: 31
+ dim: 31
+ }
+ }
+}
+layer {
+ name: "MaxPool2dBackward4"
+ type: "Pooling"
+ bottom: "data"
+ top: "MaxPool2dBackward4"
+ pooling_param {
+ pool: MAX
+ kernel_size: 3
+ stride: 2
+ pad: 1
+ }
+}
+layer {
+ name: "ConvNdBackward5"
+ type: "Convolution"
+ bottom: "MaxPool2dBackward4"
+ top: "ConvNdBackward5"
+ convolution_param {
+ num_output: 5
+ pad_h: 1
+ pad_w: 1
+ kernel_h: 3
+ kernel_w: 3
+ stride: 1
+ bias_term: false
+ }
+}
+layer {
+ name: "BatchNormBackward6_bn"
+ type: "BatchNorm"
+ bottom: "ConvNdBackward5"
+ top: "BatchNormBackward6"
+ batch_norm_param {
+ use_global_stats: true
+ eps: 1e-05
+ }
+}
+layer {
+ name: "BatchNormBackward6_scale"
+ type: "Scale"
+ bottom: "BatchNormBackward6"
+ top: "BatchNormBackward6"
+ scale_param {
+ bias_term: true
+ }
+}
+layer {
+ name: "ThresholdBackward7"
+ type: "ReLU"
+ bottom: "BatchNormBackward6"
+ top: "BatchNormBackward6"
+}
+layer {
+ name: "ConvNdBackward8"
+ type: "Convolution"
+ bottom: "BatchNormBackward6"
+ top: "ConvNdBackward8"
+ convolution_param {
+ num_output: 3
+ pad_h: 1
+ pad_w: 1
+ kernel_h: 3
+ kernel_w: 3
+ stride: 1
+ bias_term: false
+ }
+}
+layer {
+ name: "BatchNormBackward9_bn"
+ type: "BatchNorm"
+ bottom: "ConvNdBackward8"
+ top: "BatchNormBackward9"
+ batch_norm_param {
+ use_global_stats: true
+ eps: 1e-05
+ }
+}
+layer {
+ name: "BatchNormBackward9_scale"
+ type: "Scale"
+ bottom: "BatchNormBackward9"
+ top: "BatchNormBackward9"
+ scale_param {
+ bias_term: true
+ }
+}
+layer {
+ name: "AddBackward11"
+ type: "Eltwise"
+ bottom: "BatchNormBackward9"
+ bottom: "MaxPool2dBackward4"
+ top: "AddBackward11"
+ eltwise_param {
+ operation: SUM
+ }
+}
diff --git a/res/TensorFlowTests/NET_0000/test.info b/res/TensorFlowTests/NET_0000/test.info
new file mode 100644
index 000000000..bdde9126b
--- /dev/null
+++ b/res/TensorFlowTests/NET_0000/test.info
@@ -0,0 +1,3 @@
+# conv2d with VALID, stride = 1, 1
+input, ifm:0, TF_FLOAT, [1, 5, 5, 3]
+output, maxpool:0, TF_FLOAT, [1, 4, 4, 2]
diff --git a/res/TensorFlowTests/NET_0000/test.pbtxt b/res/TensorFlowTests/NET_0000/test.pbtxt
new file mode 100644
index 000000000..5aafa31db
--- /dev/null
+++ b/res/TensorFlowTests/NET_0000/test.pbtxt
@@ -0,0 +1,102 @@
+# Small Conv2D-MaxPool network
+node {
+ name: "ifm"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 5 }
+ dim { size: 5 }
+ dim { size: 3 }
+ }
+ }
+ }
+}
+node {
+ name: "ker"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 2 }
+ dim { size: 2 }
+ dim { size: 3 }
+ dim { size: 2 }
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "ofm"
+ op: "Conv2D"
+ input: "ifm"
+ input: "ker"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
+node {
+ name: "maxpool"
+ op: "MaxPool"
+ input: "ofm"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0001/test.info b/res/TensorFlowTests/NET_0001/test.info
new file mode 100644
index 000000000..0e8caae1c
--- /dev/null
+++ b/res/TensorFlowTests/NET_0001/test.info
@@ -0,0 +1,2 @@
+input, in:0, TF_FLOAT, [1, 5, 5, 3]
+output, out:0, TF_FLOAT, [1, 4, 4, 5]
diff --git a/res/TensorFlowTests/NET_0001/test.pbtxt b/res/TensorFlowTests/NET_0001/test.pbtxt
new file mode 100644
index 000000000..fc84d61a6
--- /dev/null
+++ b/res/TensorFlowTests/NET_0001/test.pbtxt
@@ -0,0 +1,160 @@
+# Small Conv2D-BiasAdd-Conv2D network
+node {
+ name: "in"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 } # N
+ dim { size: 5 } # H
+ dim { size: 5 } # W
+ dim { size: 3 } # C
+ }
+ }
+ }
+}
+node {
+ name: "ker"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 2 } # H
+ dim { size: 2 } # W
+ dim { size: 3 } # I
+ dim { size: 2 } # O
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "conv"
+ op: "Conv2D"
+ input: "in"
+ input: "ker"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
+node {
+ name: "bias"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 2 }
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "badd"
+ op: "BiasAdd"
+ input: "conv"
+ input: "bias"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+}
+node {
+ name: "ker1"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 1 } # H
+ dim { size: 1 } # W
+ dim { size: 2 } # I
+ dim { size: 5 } # O
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "out"
+ op: "Conv2D"
+ input: "badd"
+ input: "ker1"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0002/test.info b/res/TensorFlowTests/NET_0002/test.info
new file mode 100644
index 000000000..0e8caae1c
--- /dev/null
+++ b/res/TensorFlowTests/NET_0002/test.info
@@ -0,0 +1,2 @@
+input, in:0, TF_FLOAT, [1, 5, 5, 3]
+output, out:0, TF_FLOAT, [1, 4, 4, 5]
diff --git a/res/TensorFlowTests/NET_0002/test.pbtxt b/res/TensorFlowTests/NET_0002/test.pbtxt
new file mode 100644
index 000000000..692651aa4
--- /dev/null
+++ b/res/TensorFlowTests/NET_0002/test.pbtxt
@@ -0,0 +1,135 @@
+# Small Conv2D-ReLU-Conv2D network
+node {
+ name: "in"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 } # N
+ dim { size: 5 } # H
+ dim { size: 5 } # W
+ dim { size: 3 } # C
+ }
+ }
+ }
+}
+node {
+ name: "ker"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 2 } # H
+ dim { size: 2 } # W
+ dim { size: 3 } # I
+ dim { size: 2 } # O
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "conv"
+ op: "Conv2D"
+ input: "in"
+ input: "ker"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
+node {
+ name: "relu"
+ op: "Relu"
+ input: "conv"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+}
+node {
+ name: "ker1"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 1 } # H
+ dim { size: 1 } # W
+ dim { size: 2 } # I
+ dim { size: 5 } # O
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "out"
+ op: "Conv2D"
+ input: "relu"
+ input: "ker1"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0003/test.info b/res/TensorFlowTests/NET_0003/test.info
new file mode 100644
index 000000000..ee7d13cd1
--- /dev/null
+++ b/res/TensorFlowTests/NET_0003/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 5]
+output, FusedBatchNorm:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/NET_0003/test.pbtxt b/res/TensorFlowTests/NET_0003/test.pbtxt
new file mode 100644
index 000000000..cf6b1a7db
--- /dev/null
+++ b/res/TensorFlowTests/NET_0003/test.pbtxt
@@ -0,0 +1,244 @@
+# Small Conv2D + FusedBatchNorm network
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1 # N
+ }
+ dim {
+ size: 3 # H
+ }
+ dim {
+ size: 3 # W
+ }
+ dim {
+ size: 5 # C
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2 # H
+ }
+ dim {
+ size: 2 # W
+ }
+ dim {
+ size: 5 # I
+ }
+ dim {
+ size: 1 # O
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "Conv2D"
+ op: "Conv2D"
+ input: "Placeholder"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "scale"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm/mean"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm/variance"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm"
+ op: "FusedBatchNorm"
+ input: "Conv2D"
+ input: "scale"
+ input: "offset"
+ input: "FusedBatchNorm/mean"
+ input: "FusedBatchNorm/variance"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "epsilon"
+ value {
+ f: 0.0010000000474974513
+ }
+ }
+ attr {
+ key: "is_training"
+ value {
+ b: false
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0003/test.py b/res/TensorFlowTests/NET_0003/test.py
new file mode 100644
index 000000000..b5bad2dae
--- /dev/null
+++ b/res/TensorFlowTests/NET_0003/test.py
@@ -0,0 +1,15 @@
+# TF_SMALL_NET_0003/test.pbtxt is create with below script
+
+# Version info
+# - Tensorflow : 1.13.1
+# - Python : 3.5.2
+
+import tensorflow as tf
+
+input0 = tf.placeholder(tf.float32, [1, 3, 3, 5])
+filter0 = tf.constant(1.0, shape=[2, 2, 5, 1])
+conv = tf.nn.conv2d(input0, filter=filter0, strides=[1, 1, 1, 1], padding='SAME')
+fbn = tf.nn.fused_batch_norm(
+ conv, scale=[1.0], offset=[0.0], mean=[0.0], variance=[1.0], is_training=False)
+
+print(tf.get_default_graph().as_graph_def())
diff --git a/res/TensorFlowTests/NET_0004/test.info b/res/TensorFlowTests/NET_0004/test.info
new file mode 100644
index 000000000..75a892250
--- /dev/null
+++ b/res/TensorFlowTests/NET_0004/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 5]
+output, FusedBatchNorm:0, TF_FLOAT, [1, 3, 3, 10]
diff --git a/res/TensorFlowTests/NET_0004/test.pbtxt b/res/TensorFlowTests/NET_0004/test.pbtxt
new file mode 100644
index 000000000..18cdd65ff
--- /dev/null
+++ b/res/TensorFlowTests/NET_0004/test.pbtxt
@@ -0,0 +1,218 @@
+# Small DepthwiseConv2D + FusedBatchNorm network
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 5
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1
+ }
+ }
+ }
+}
+node {
+ name: "depthwise/Shape"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ int_val: 2
+ int_val: 2
+ int_val: 5
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "depthwise/dilation_rate"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ int_val: 1
+ int_val: 1
+ }
+ }
+ }
+}
+node {
+ name: "depthwise"
+ op: "DepthwiseConv2dNative"
+ input: "Placeholder"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
+node {
+ name: "Const_1"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 10
+ }
+ }
+ float_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm"
+ op: "FusedBatchNorm"
+ input: "depthwise"
+ input: "Const_1"
+ input: "Const_1"
+ input: "Const_1"
+ input: "Const_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "epsilon"
+ value {
+ f: 0.001
+ }
+ }
+ attr {
+ key: "is_training"
+ value {
+ b: false
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0004/test.py b/res/TensorFlowTests/NET_0004/test.py
new file mode 100644
index 000000000..a0c790d79
--- /dev/null
+++ b/res/TensorFlowTests/NET_0004/test.py
@@ -0,0 +1,16 @@
+# TF_SMALL_NET_0004/test.pbtxt is create with below script
+
+# Version info
+# - Tensorflow : 1.13.1
+# - Python : 3.5.2
+
+import tensorflow as tf
+
+input0 = tf.placeholder(tf.float32, [1, 3, 3, 5])
+filter0 = tf.constant(1.0, shape=[2, 2, 5, 2])
+dconv = tf.nn.depthwise_conv2d(input0, filter0, [1, 1, 1, 1], 'SAME')
+const = tf.constant(2.0, shape=[10])
+fbn = tf.nn.fused_batch_norm(
+ x=dconv, scale=const, offset=const, mean=const, variance=const, is_training=False)
+
+print(tf.get_default_graph().as_graph_def())
diff --git a/res/TensorFlowTests/NET_0005/test.info b/res/TensorFlowTests/NET_0005/test.info
new file mode 100644
index 000000000..379959c05
--- /dev/null
+++ b/res/TensorFlowTests/NET_0005/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 3, 2]
+output, Reshape2:0, TF_FLOAT, [1, 2, 3, 2]
diff --git a/res/TensorFlowTests/NET_0005/test.pbtxt b/res/TensorFlowTests/NET_0005/test.pbtxt
new file mode 100644
index 000000000..16cc44005
--- /dev/null
+++ b/res/TensorFlowTests/NET_0005/test.pbtxt
@@ -0,0 +1,120 @@
+# Small Shape + Reshape network
+# This test is quite similar with the last part of InceptionV3
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Shape"
+ op: "Shape"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "out_type"
+ value {
+ type: DT_INT32
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ int_val: 3
+ int_val: 4
+ }
+ }
+ }
+}
+node {
+ name: "Reshape1"
+ op: "Reshape"
+ input: "Placeholder"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+}
+node {
+ name: "Relu"
+ op: "Relu"
+ input: "Reshape1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "Reshape2"
+ op: "Reshape"
+ input: "Relu"
+ input: "Shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0006/test.info b/res/TensorFlowTests/NET_0006/test.info
new file mode 100644
index 000000000..f0b1e20ea
--- /dev/null
+++ b/res/TensorFlowTests/NET_0006/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 7, 7, 1]
+output, maxpool:0, TF_FLOAT, [1, 7, 7, 2]
diff --git a/res/TensorFlowTests/NET_0006/test.pbtxt b/res/TensorFlowTests/NET_0006/test.pbtxt
new file mode 100644
index 000000000..e92b60629
--- /dev/null
+++ b/res/TensorFlowTests/NET_0006/test.pbtxt
@@ -0,0 +1,149 @@
+# A simple network that has "Concat" in between feature operations.
+#
+# Placeholder -+-> MaxPool2D -+-> Concat -> MaxPool2D
+# | |
+# +-> MaxPool2D -+
+#
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 } # N
+ dim { size: 7 } # H
+ dim { size: 7 } # W
+ dim { size: 1 } # C
+ }
+ }
+ }
+}
+node {
+ name: "maxpool_1"
+ op: "MaxPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
+node {
+ name: "maxpool_2"
+ op: "MaxPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
+node {
+ name: "axis"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape { }
+ int_val: 3
+ }
+ }
+ }
+}
+node {
+ name: "concat"
+ op: "ConcatV2"
+ input: "maxpool_1"
+ input: "maxpool_2"
+ input: "axis"
+ attr {
+ key: "N"
+ value { i: 2 }
+ }
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "maxpool"
+ op: "MaxPool"
+ input: "concat"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0007/test.info b/res/TensorFlowTests/NET_0007/test.info
new file mode 100644
index 000000000..59c3f6122
--- /dev/null
+++ b/res/TensorFlowTests/NET_0007/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 5]
+output, Mul:0, TF_FLOAT, [1, 3, 3, 2]
diff --git a/res/TensorFlowTests/NET_0007/test.pbtxt b/res/TensorFlowTests/NET_0007/test.pbtxt
new file mode 100644
index 000000000..10f9f35a5
--- /dev/null
+++ b/res/TensorFlowTests/NET_0007/test.pbtxt
@@ -0,0 +1,151 @@
+# A simple network that has "Conv2D" + "Mul"
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "weights"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 5
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1.100000023841858
+ }
+ }
+ }
+}
+node {
+ name: "Conv2D"
+ op: "Conv2D"
+ input: "Placeholder"
+ input: "weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "mulparam"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ float_val: 2.0
+ }
+ }
+ }
+}
+node {
+ name: "Mul"
+ op: "Mul"
+ input: "Conv2D"
+ input: "mulparam"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0008/test.info b/res/TensorFlowTests/NET_0008/test.info
new file mode 100644
index 000000000..ccdd6fd44
--- /dev/null
+++ b/res/TensorFlowTests/NET_0008/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 5]
+output, Add:0, TF_FLOAT, [1, 3, 3, 2]
diff --git a/res/TensorFlowTests/NET_0008/test.pbtxt b/res/TensorFlowTests/NET_0008/test.pbtxt
new file mode 100644
index 000000000..bbaba7ded
--- /dev/null
+++ b/res/TensorFlowTests/NET_0008/test.pbtxt
@@ -0,0 +1,151 @@
+# A simple network that has "Conv2D" + "Add"
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "weights"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 5
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1.100000023841858
+ }
+ }
+ }
+}
+node {
+ name: "Conv2D"
+ op: "Conv2D"
+ input: "Placeholder"
+ input: "weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "addparam"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ float_val: 2.0
+ }
+ }
+ }
+}
+node {
+ name: "Add"
+ op: "Add"
+ input: "Conv2D"
+ input: "addparam"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0009/test.info b/res/TensorFlowTests/NET_0009/test.info
new file mode 100644
index 000000000..ccdd6fd44
--- /dev/null
+++ b/res/TensorFlowTests/NET_0009/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 5]
+output, Add:0, TF_FLOAT, [1, 3, 3, 2]
diff --git a/res/TensorFlowTests/NET_0009/test.pbtxt b/res/TensorFlowTests/NET_0009/test.pbtxt
new file mode 100644
index 000000000..a4eee69ae
--- /dev/null
+++ b/res/TensorFlowTests/NET_0009/test.pbtxt
@@ -0,0 +1,194 @@
+# A simple network that has "Conv2D" + "BiasAdd" + "Add"
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "weights"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 5
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1.100000023841858
+ }
+ }
+ }
+}
+node {
+ name: "Conv2D"
+ op: "Conv2D"
+ input: "Placeholder"
+ input: "weights"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "BiasAdd/bias"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1.0
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "BiasAdd"
+ op: "BiasAdd"
+ input: "Conv2D"
+ input: "BiasAdd/bias"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "addparam"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ float_val: 2.0
+ }
+ }
+ }
+}
+node {
+ name: "Add"
+ op: "Add"
+ input: "BiasAdd"
+ input: "addparam"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0010/test.info b/res/TensorFlowTests/NET_0010/test.info
new file mode 100644
index 000000000..bcbc41892
--- /dev/null
+++ b/res/TensorFlowTests/NET_0010/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 2]
+output, Add:0, TF_FLOAT, [1, 3, 3, 2]
diff --git a/res/TensorFlowTests/NET_0010/test.pbtxt b/res/TensorFlowTests/NET_0010/test.pbtxt
new file mode 100644
index 000000000..b9c6c9987
--- /dev/null
+++ b/res/TensorFlowTests/NET_0010/test.pbtxt
@@ -0,0 +1,109 @@
+# A simple network that has "BiasAdd" + "Add"
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "bias_add_01/bias"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1.0
+ float_val: -1.0
+ }
+ }
+ }
+}
+node {
+ name: "bias_add_01"
+ op: "BiasAdd"
+ input: "Placeholder"
+ input: "bias_add_01/bias"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "addparam"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ float_val: 2.0
+ }
+ }
+ }
+}
+node {
+ name: "Add"
+ op: "Add"
+ input: "bias_add_01"
+ input: "addparam"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0011/test.info b/res/TensorFlowTests/NET_0011/test.info
new file mode 100644
index 000000000..7fb5ec2c0
--- /dev/null
+++ b/res/TensorFlowTests/NET_0011/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [2]
+output, concat_out:0, TF_FLOAT, [6]
diff --git a/res/TensorFlowTests/NET_0011/test.pbtxt b/res/TensorFlowTests/NET_0011/test.pbtxt
new file mode 100644
index 000000000..a44aba783
--- /dev/null
+++ b/res/TensorFlowTests/NET_0011/test.pbtxt
@@ -0,0 +1,129 @@
+# A simple network that includes constant foldable subgraph
+# (note: [] indicates shape, () indicates real value)
+#
+# input1 (const) -- relu -+ placeholder --+
+# (-1, 1) (0, 1) | shape [2] |
+# | |
+# input2 (const) -+-- concat -------- concat_out
+# (2, 3) | (0, 1, 2, 3) |
+# | |
+# axis (const)-+-------------------+
+#
+# will be the following after constant folding:
+#
+# Pull --+
+# shape [2] |
+# |
+# ConstGen------- TensorConcat -- Push
+# (0, 1, 2, 3) shape [6]
+#
+node {
+ name: "input1"
+ op: "Const"
+ attr {
+ key: "dtype" value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape { dim { size: 2 } }
+ float_val: -1
+ float_val: 1
+ }
+ }
+ }
+}
+node {
+ name: "relu"
+ op: "Relu"
+ input: "input1"
+ attr { key: "T" value { type: DT_FLOAT } }
+}
+node {
+ name: "input2"
+ op: "Const"
+ attr {
+ key: "dtype" value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape { dim { size: 2 } }
+ float_val: 2
+ float_val: 3
+ }
+ }
+ }
+}
+node {
+ name: "axis"
+ op: "Const"
+ attr { key: "dtype" value { type: DT_INT32 } }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape { }
+ int_val: 0
+ }
+ }
+ }
+}
+node {
+ name: "concat"
+ op: "ConcatV2"
+ input: "relu"
+ input: "input2"
+ input: "axis"
+ attr {
+ key: "N"
+ value { i: 2 }
+ }
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype" value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "concat_out"
+ op: "ConcatV2"
+ input: "concat"
+ input: "placeholder"
+ input: "axis"
+ attr {
+ key: "N"
+ value { i: 2 }
+ }
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0012/test.info b/res/TensorFlowTests/NET_0012/test.info
new file mode 100644
index 000000000..b210407fb
--- /dev/null
+++ b/res/TensorFlowTests/NET_0012/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [6]
+output, reshape:0, TF_FLOAT, [3, 2]
diff --git a/res/TensorFlowTests/NET_0012/test.pbtxt b/res/TensorFlowTests/NET_0012/test.pbtxt
new file mode 100644
index 000000000..387dc92f6
--- /dev/null
+++ b/res/TensorFlowTests/NET_0012/test.pbtxt
@@ -0,0 +1,52 @@
+# Network with dynamic reshape which has resolvable wildcard dimension
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 6 }
+ }
+ }
+ }
+}
+node {
+ name: "shape"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: -1
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "reshape"
+ op: "Reshape"
+ input: "placeholder"
+ input: "shape"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tshape"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0013/test.info b/res/TensorFlowTests/NET_0013/test.info
new file mode 100644
index 000000000..137cdfa8c
--- /dev/null
+++ b/res/TensorFlowTests/NET_0013/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [6]
+output, Reshape:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/NET_0013/test.pbtxt b/res/TensorFlowTests/NET_0013/test.pbtxt
new file mode 100644
index 000000000..a9c6aff98
--- /dev/null
+++ b/res/TensorFlowTests/NET_0013/test.pbtxt
@@ -0,0 +1,72 @@
+# Network containing Shape node, which becomes Reshape's 'shape' input
+# Note that this is a work-around to test Shape
+#
+# (tensor)
+# Placeholder ---------- Reshape
+# /
+# Const --- Shape ---
+# (shape)
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 6 }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 2 }
+ dim { size: 3 }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "Shape"
+ op: "Shape"
+ input: "Const"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "out_type"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "Reshape"
+ op: "Reshape"
+ input: "Placeholder"
+ input: "Shape"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tshape"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0014/test.info b/res/TensorFlowTests/NET_0014/test.info
new file mode 100644
index 000000000..d3ea85a10
--- /dev/null
+++ b/res/TensorFlowTests/NET_0014/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [2, 1, 1, 3]
+output, reshape_2:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/NET_0014/test.pbtxt b/res/TensorFlowTests/NET_0014/test.pbtxt
new file mode 100644
index 000000000..d4bc8e698
--- /dev/null
+++ b/res/TensorFlowTests/NET_0014/test.pbtxt
@@ -0,0 +1,106 @@
+# Mock of the Epilogue, using ReLU instead of Softmax
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 1 }
+ dim { size: 3 }
+ }
+ }
+ }
+}
+node {
+ name: "squeeze"
+ op: "Squeeze"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "squeeze_dims"
+ value {
+ list { i: 1 i: 2 }
+ }
+ }
+}
+node {
+ name: "Reshape/shape"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: -1
+ int_val: 3
+ }
+ }
+ }
+}
+node {
+ name: "reshape_1"
+ op: "Reshape"
+ input: "squeeze"
+ input: "Reshape/shape"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tshape"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "relu"
+ op: "Relu"
+ input: "reshape_1"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+}
+node {
+ name: "shape"
+ op: "Shape"
+ input: "squeeze"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "out_type"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "reshape_2"
+ op: "Reshape"
+ input: "relu"
+ input: "shape"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tshape"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0015/test.info b/res/TensorFlowTests/NET_0015/test.info
new file mode 100644
index 000000000..dbbef9b3a
--- /dev/null
+++ b/res/TensorFlowTests/NET_0015/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 3]
+output, as_rsqrt:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/NET_0015/test.pbtxt b/res/TensorFlowTests/NET_0015/test.pbtxt
new file mode 100644
index 000000000..3d994e8d5
--- /dev/null
+++ b/res/TensorFlowTests/NET_0015/test.pbtxt
@@ -0,0 +1,74 @@
+# A simple network that simulates Rsqrt: 1.0 / Sqrt(input)
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "SQRT_02"
+ op: "Sqrt"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "as_rsqrt"
+ op: "RealDiv"
+ input: "Const"
+ input: "SQRT_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0016/test.info b/res/TensorFlowTests/NET_0016/test.info
new file mode 100644
index 000000000..ee7d13cd1
--- /dev/null
+++ b/res/TensorFlowTests/NET_0016/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 5]
+output, FusedBatchNorm:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/NET_0016/test.pbtxt b/res/TensorFlowTests/NET_0016/test.pbtxt
new file mode 100644
index 000000000..64e5c4194
--- /dev/null
+++ b/res/TensorFlowTests/NET_0016/test.pbtxt
@@ -0,0 +1,271 @@
+# Small Conv2D + FusedBatchNorm network with Identity
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1 # N
+ }
+ dim {
+ size: 3 # H
+ }
+ dim {
+ size: 3 # W
+ }
+ dim {
+ size: 5 # C
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2 # H
+ }
+ dim {
+ size: 2 # W
+ }
+ dim {
+ size: 5 # I
+ }
+ dim {
+ size: 1 # O
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "Conv2D"
+ op: "Conv2D"
+ input: "Placeholder"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
+node {
+ name: "scale"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm/mean"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm/variance"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "offset/id"
+ op: "Identity"
+ input: "offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm/mean/id"
+ op: "Identity"
+ input: "FusedBatchNorm/mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm/variance/id"
+ op: "Identity"
+ input: "FusedBatchNorm/variance"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "FusedBatchNorm"
+ op: "FusedBatchNorm"
+ input: "Conv2D"
+ input: "scale"
+ input: "offset/id"
+ input: "FusedBatchNorm/mean/id"
+ input: "FusedBatchNorm/variance/id"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "epsilon"
+ value {
+ f: 0.0010000000474974513
+ }
+ }
+ attr {
+ key: "is_training"
+ value {
+ b: false
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0017/test.info b/res/TensorFlowTests/NET_0017/test.info
new file mode 100644
index 000000000..e9413cb98
--- /dev/null
+++ b/res/TensorFlowTests/NET_0017/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 8, 8, 1]
+output, avgpool2d:0, TF_FLOAT, [1, 1, 1, 1]
diff --git a/res/TensorFlowTests/NET_0017/test.pbtxt b/res/TensorFlowTests/NET_0017/test.pbtxt
new file mode 100644
index 000000000..e8cc76b70
--- /dev/null
+++ b/res/TensorFlowTests/NET_0017/test.pbtxt
@@ -0,0 +1,61 @@
+# HOW TO GENERATE:
+#
+# import tensorflow as tf
+# value = tf.placeholder(dtype=tf.float32, shape=[1, 8, 8, 1], name='placeholder')
+# output = tf.nn.avg_pool(value, [1, 8, 8, 1], [1, 1, 1, 1], 'VALID', name='avgpool2d')
+# tf.get_default_graph().as_graph_def()
+#
+# NOTE 1. The output shape is 1x1x1x1
+#
+# >>> tf.graph_util.tensor_shape_from_node_def_name(tf.get_default_graph(), 'avgpool2d')
+# TensorShape([Dimension(1), Dimension(1), Dimension(1), Dimension(1)])
+#
+# NOTE 2. This test corresponds to the last AvgPool node inception v3 2018.04.27.
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 8 }
+ dim { size: 8 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "avgpool2d"
+ op: "AvgPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 8 i: 8 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0018/test.info b/res/TensorFlowTests/NET_0018/test.info
new file mode 100644
index 000000000..87f6fa795
--- /dev/null
+++ b/res/TensorFlowTests/NET_0018/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 4, 4, 1]
+output, avgpool2d:0, TF_FLOAT, [1, 4, 4, 1]
diff --git a/res/TensorFlowTests/NET_0018/test.pbtxt b/res/TensorFlowTests/NET_0018/test.pbtxt
new file mode 100644
index 000000000..3b8a15ef8
--- /dev/null
+++ b/res/TensorFlowTests/NET_0018/test.pbtxt
@@ -0,0 +1,63 @@
+# HOW TO GENERATE:
+#
+# import tensorflow as tf
+# value = tf.placeholder(dtype=tf.float32, shape=[1, 4, 4, 1], name='placeholder')
+# output = tf.nn.avg_pool(value, [1, 3, 3, 1], [1, 1, 1, 1], 'SAME', name='avgpool2d')
+# tf.get_default_graph().as_graph_def()
+#
+# NOTE 1. The output shape is 1x4x4x1
+#
+# >>> tf.graph_util.tensor_shape_from_node_def_name(tf.get_default_graph(), 'avgpool2d')
+# TensorShape([Dimension(1), Dimension(4), Dimension(4), Dimension(1)])
+#
+# NOTE 2. Almost all the AvgPool nodes in inception v3 2018.04.27 use this configuration.
+#
+# The only exception is "InceptionV3/Logits/AvgPool_1a_8x8/AvgPool" which performs global average pooling.
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 4 }
+ dim { size: 4 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "avgpool2d"
+ op: "AvgPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 3 i: 3 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "SAME" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0019/test.info b/res/TensorFlowTests/NET_0019/test.info
new file mode 100644
index 000000000..908883475
--- /dev/null
+++ b/res/TensorFlowTests/NET_0019/test.info
@@ -0,0 +1,2 @@
+input, ifm:0, TF_FLOAT, [1, 7, 7, 4]
+output, ofm:0, TF_FLOAT, [1, 3, 3, 6]
diff --git a/res/TensorFlowTests/NET_0019/test.pbtxt b/res/TensorFlowTests/NET_0019/test.pbtxt
new file mode 100644
index 000000000..076f4f619
--- /dev/null
+++ b/res/TensorFlowTests/NET_0019/test.pbtxt
@@ -0,0 +1,89 @@
+# HOW TO GENERATE:
+#
+# import tensorflow as tf
+# I = 4
+# O = 6
+# ifm = tf.placeholder(dtype=tf.float32, shape=[1, 7, 7, I], name='ifm')
+# ker = tf.constant(dtype=tf.float32, shape=[3, 3, I, O], name='ker', value=1.1)
+# ofm = tf.nn.conv2d(input=ifm, filter=ker, strides=[1, 2, 2, 1], padding='VALID', name='ofm')
+# tf.get_default_graph().as_graph_def()
+#
+# NOTE 1. The output shape is 1x3x3x6
+#
+# >>> tf.graph_util.tensor_shape_from_node_def_name(tf.get_default_graph(), 'ofm')
+# TensorShape([Dimension(1), Dimension(3), Dimension(3), Dimension(6)])
+#
+# NOTE 2. This test corresponds to "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D" node
+#
+node {
+ name: "ifm"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 7 }
+ dim { size: 7 }
+ dim { size: 4 }
+ }
+ }
+ }
+}
+node {
+ name: "ker"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 3 }
+ dim { size: 3 }
+ dim { size: 4 }
+ dim { size: 6 }
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "ofm"
+ op: "Conv2D"
+ input: "ifm"
+ input: "ker"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 2 i: 2 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0020/test.info b/res/TensorFlowTests/NET_0020/test.info
new file mode 100644
index 000000000..d3ea85a10
--- /dev/null
+++ b/res/TensorFlowTests/NET_0020/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [2, 1, 1, 3]
+output, reshape_2:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/NET_0020/test.pbtxt b/res/TensorFlowTests/NET_0020/test.pbtxt
new file mode 100644
index 000000000..efd18d132
--- /dev/null
+++ b/res/TensorFlowTests/NET_0020/test.pbtxt
@@ -0,0 +1,112 @@
+# The Epilogue, or endmost part of inception v3 comprised of Squeeze,
+# Reshape, Shape and Softmax
+#
+# Only difference from original is input shape:
+# - original has unknown batch and 1001 channels [?, 1, 1, 1001]
+# - this test has 2 batches and 3 channels [2, 1, 1, 3]
+
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 1 }
+ dim { size: 3 }
+ }
+ }
+ }
+}
+node {
+ name: "squeeze"
+ op: "Squeeze"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "squeeze_dims"
+ value {
+ list { i: 1 i: 2 }
+ }
+ }
+}
+node {
+ name: "Reshape/shape"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: -1
+ int_val: 3
+ }
+ }
+ }
+}
+node {
+ name: "reshape_1"
+ op: "Reshape"
+ input: "squeeze"
+ input: "Reshape/shape"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tshape"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "softmax"
+ op: "Softmax"
+ input: "reshape_1"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+}
+node {
+ name: "shape"
+ op: "Shape"
+ input: "squeeze"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "out_type"
+ value { type: DT_INT32 }
+ }
+}
+node {
+ name: "reshape_2"
+ op: "Reshape"
+ input: "softmax"
+ input: "shape"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tshape"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0021/test.info b/res/TensorFlowTests/NET_0021/test.info
new file mode 100644
index 000000000..059b21cca
--- /dev/null
+++ b/res/TensorFlowTests/NET_0021/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 7, 7, 1]
+output, maxpool2d:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/NET_0021/test.pbtxt b/res/TensorFlowTests/NET_0021/test.pbtxt
new file mode 100644
index 000000000..b1eadccd9
--- /dev/null
+++ b/res/TensorFlowTests/NET_0021/test.pbtxt
@@ -0,0 +1,65 @@
+# HOW TO GENERATE:
+#
+# import tensorflow as tf
+# value = tf.placeholder(dtype=tf.float32, shape=[1, 7, 7, 1], name='placeholder')
+# output = tf.nn.max_pool(value, [1, 3, 3, 1], [1, 2, 2, 1], 'VALID', name='maxpool2d')
+# tf.get_default_graph().as_graph_def()
+#
+# NOTE 1. The output shape is 1x3x3x1
+#
+# >>> tf.graph_util.tensor_shape_from_node_def_name(tf.get_default_graph(), 'maxpool2d')
+# TensorShape([Dimension(1), Dimension(3), Dimension(3), Dimension(1)])
+#
+# NOTE 2. All the MaxPool nodes in inception v3 2018.04.27 use this configuration.
+# - InceptionV3/InceptionV3/MaxPool_3a_3x3/MaxPool
+# - InceptionV3/InceptionV3/MaxPool_5a_3x3/MaxPool
+# - InceptionV3/InceptionV3/Mixed_6a/Branch_2/MaxPool_1a_3x3/MaxPool
+# - InceptionV3/InceptionV3/Mixed_7a/Branch_2/MaxPool_1a_3x3/MaxPool
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 7 }
+ dim { size: 7 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "maxpool2d"
+ op: "MaxPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 3 i: 3 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 2 i: 2 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0022/test.info b/res/TensorFlowTests/NET_0022/test.info
new file mode 100644
index 000000000..89c33ee9c
--- /dev/null
+++ b/res/TensorFlowTests/NET_0022/test.info
@@ -0,0 +1,5 @@
+# this has two inputs and two outputs. Let's make our code to handle multiple inputs and outputs.
+input, input_1:0, TF_FLOAT, [2, 2]
+input, input_2:0, TF_FLOAT, [2, 2]
+output, output_1:0, TF_FLOAT, [2, 2]
+output, output_2:0, TF_FLOAT, [2, 2]
diff --git a/res/TensorFlowTests/NET_0022/test.pbtxt b/res/TensorFlowTests/NET_0022/test.pbtxt
new file mode 100644
index 000000000..c8879e84f
--- /dev/null
+++ b/res/TensorFlowTests/NET_0022/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_1"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_2"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "output_1"
+ op: "Add"
+ input: "input_1"
+ input: "input_2"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "output_2"
+ op: "Add"
+ input: "input_1"
+ input: "input_2"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0023/test.info b/res/TensorFlowTests/NET_0023/test.info
new file mode 100644
index 000000000..856de94d4
--- /dev/null
+++ b/res/TensorFlowTests/NET_0023/test.info
@@ -0,0 +1,2 @@
+# this has no input
+output, out:0, TF_FLOAT, [2, 2]
diff --git a/res/TensorFlowTests/NET_0023/test.pbtxt b/res/TensorFlowTests/NET_0023/test.pbtxt
new file mode 100644
index 000000000..2ef5a0b99
--- /dev/null
+++ b/res/TensorFlowTests/NET_0023/test.pbtxt
@@ -0,0 +1,72 @@
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1
+ float_val: 2
+ float_val: 3
+ float_val: 4
+ }
+ }
+ }
+}
+node {
+ name: "Const_1"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1
+ float_val: 2
+ float_val: 3
+ float_val: 4
+ }
+ }
+ }
+}
+node {
+ name: "out"
+ op: "Add"
+ input: "Const"
+ input: "Const_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0024/test.info b/res/TensorFlowTests/NET_0024/test.info
new file mode 100644
index 000000000..f049f9d43
--- /dev/null
+++ b/res/TensorFlowTests/NET_0024/test.info
@@ -0,0 +1,3 @@
+input, fst:0, TF_FLOAT, [1, 6, 1, 1]
+input, snd:0, TF_FLOAT, [1, 3, 1, 1]
+output, out:0, TF_FLOAT, [1, 9, 1, 1]
diff --git a/res/TensorFlowTests/NET_0024/test.pbtxt b/res/TensorFlowTests/NET_0024/test.pbtxt
new file mode 100644
index 000000000..ee3f99dd9
--- /dev/null
+++ b/res/TensorFlowTests/NET_0024/test.pbtxt
@@ -0,0 +1,76 @@
+# This example intentionally declares "snd" node before "fst" node
+node {
+ name: "snd"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 1 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "fst"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 6 }
+ dim { size: 1 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "axis"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape { }
+ int_val: 1
+ }
+ }
+ }
+}
+node {
+ name: "out"
+ op: "ConcatV2"
+ input: "fst"
+ input: "snd"
+ input: "axis"
+ attr {
+ key: "N"
+ value { i: 2 }
+ }
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0025/test.info b/res/TensorFlowTests/NET_0025/test.info
new file mode 100644
index 000000000..6c5eafde7
--- /dev/null
+++ b/res/TensorFlowTests/NET_0025/test.info
@@ -0,0 +1,3 @@
+# This is a slim version of first 4 nodes in inception v3 (from Placeholder to the first RelU)
+input, input:0, TF_FLOAT, [1, 5, 5, 3]
+output, InceptionV3/InceptionV3/Conv2d_1a_3x3/Relu:0, TF_FLOAT, [1, 2, 2, 4]
diff --git a/res/TensorFlowTests/NET_0025/test.pbtxt b/res/TensorFlowTests/NET_0025/test.pbtxt
new file mode 100644
index 000000000..9b3316dc4
--- /dev/null
+++ b/res/TensorFlowTests/NET_0025/test.pbtxt
@@ -0,0 +1,167 @@
+node {
+ name: "InceptionV3/Conv2d_1a_3x3/weights/read/_3__cf__3"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 5
+ }
+ dim {
+ size: 5
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D"
+ op: "Conv2D"
+ input: "input:0"
+ input: "InceptionV3/Conv2d_1a_3x3/weights/read/_3__cf__3"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D_bn_offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/BatchNorm/FusedBatchNorm"
+ op: "BiasAdd"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D_bn_offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Relu"
+ op: "Relu"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/BatchNorm/FusedBatchNorm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0026/test.info b/res/TensorFlowTests/NET_0026/test.info
new file mode 100644
index 000000000..b96ff35b8
--- /dev/null
+++ b/res/TensorFlowTests/NET_0026/test.info
@@ -0,0 +1,3 @@
+# This is a slim version of first 7 nodes in inception v3 (from Placeholder to the second ReLU)
+input, input:0, TF_FLOAT, [1, 9, 9, 3]
+output, InceptionV3/InceptionV3/Conv2d_2a_3x3/Relu:0, TF_FLOAT, [1, 2, 2, 4]
diff --git a/res/TensorFlowTests/NET_0026/test.pbtxt b/res/TensorFlowTests/NET_0026/test.pbtxt
new file mode 100644
index 000000000..ea0262784
--- /dev/null
+++ b/res/TensorFlowTests/NET_0026/test.pbtxt
@@ -0,0 +1,305 @@
+node {
+ name: "InceptionV3/Conv2d_1a_3x3/weights/read/_3__cf__3"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 9
+ }
+ dim {
+ size: 9
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D"
+ op: "Conv2D"
+ input: "input:0"
+ input: "InceptionV3/Conv2d_1a_3x3/weights/read/_3__cf__3"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D_bn_offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/BatchNorm/FusedBatchNorm"
+ op: "BiasAdd"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D_bn_offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Relu"
+ op: "Relu"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/BatchNorm/FusedBatchNorm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InceptionV3/Conv2d_2a_3x3/weights/read/_7__cf__7"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D"
+ op: "Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Relu"
+ input: "InceptionV3/Conv2d_2a_3x3/weights/read/_7__cf__7"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D_bn_offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/BatchNorm/FusedBatchNorm"
+ op: "BiasAdd"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D_bn_offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Relu"
+ op: "Relu"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/BatchNorm/FusedBatchNorm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0027/test.info b/res/TensorFlowTests/NET_0027/test.info
new file mode 100644
index 000000000..db97c4603
--- /dev/null
+++ b/res/TensorFlowTests/NET_0027/test.info
@@ -0,0 +1,3 @@
+# This is a slim version of first 10 nodes in inception v3 (from Placeholder to MaxPool2D)
+input, input:0, TF_FLOAT, [1, 15, 15, 3]
+output, InceptionV3/InceptionV3/MaxPool_3a_3x3/MaxPool:0, TF_FLOAT, [1, 2, 2, 5]
diff --git a/res/TensorFlowTests/NET_0027/test.pbtxt b/res/TensorFlowTests/NET_0027/test.pbtxt
new file mode 100644
index 000000000..af5fa9372
--- /dev/null
+++ b/res/TensorFlowTests/NET_0027/test.pbtxt
@@ -0,0 +1,488 @@
+node {
+ name: "InceptionV3/Conv2d_1a_3x3/weights/read/_3__cf__3"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 15
+ }
+ dim {
+ size: 15
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D"
+ op: "Conv2D"
+ input: "input:0"
+ input: "InceptionV3/Conv2d_1a_3x3/weights/read/_3__cf__3"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D_bn_offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/BatchNorm/FusedBatchNorm"
+ op: "BiasAdd"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Conv2D_bn_offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Relu"
+ op: "Relu"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/BatchNorm/FusedBatchNorm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InceptionV3/Conv2d_2a_3x3/weights/read/_7__cf__7"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D"
+ op: "Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_1a_3x3/Relu"
+ input: "InceptionV3/Conv2d_2a_3x3/weights/read/_7__cf__7"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D_bn_offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/BatchNorm/FusedBatchNorm"
+ op: "BiasAdd"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Conv2D_bn_offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Relu"
+ op: "Relu"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/BatchNorm/FusedBatchNorm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2b_3x3/Conv2D"
+ op: "Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_2a_3x3/Relu"
+ input: "InceptionV3/Conv2d_2b_3x3/weights/read/_11__cf__11"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InceptionV3/Conv2d_2b_3x3/weights/read/_11__cf__11"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 5
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2b_3x3/Conv2D_bn_offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 5
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2b_3x3/BatchNorm/FusedBatchNorm"
+ op: "BiasAdd"
+ input: "InceptionV3/InceptionV3/Conv2d_2b_3x3/Conv2D"
+ input: "InceptionV3/InceptionV3/Conv2d_2b_3x3/Conv2D_bn_offset"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/Conv2d_2b_3x3/Relu"
+ op: "Relu"
+ input: "InceptionV3/InceptionV3/Conv2d_2b_3x3/BatchNorm/FusedBatchNorm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InceptionV3/InceptionV3/MaxPool_3a_3x3/MaxPool"
+ op: "MaxPool"
+ input: "InceptionV3/InceptionV3/Conv2d_2b_3x3/Relu"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list {
+ i: 1
+ i: 3
+ i: 3
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/NET_0028/test.info b/res/TensorFlowTests/NET_0028/test.info
new file mode 100644
index 000000000..d09ce02da
--- /dev/null
+++ b/res/TensorFlowTests/NET_0028/test.info
@@ -0,0 +1,3 @@
+# This is a slim version of Instance Normalizartion
+input, Input:0, TF_FLOAT, [1, 8, 6, 12]
+output, Relu:0, TF_FLOAT, [1, 8, 6, 12]
diff --git a/res/TensorFlowTests/NET_0028/test.pbtxt b/res/TensorFlowTests/NET_0028/test.pbtxt
new file mode 100644
index 000000000..48a5bccd3
--- /dev/null
+++ b/res/TensorFlowTests/NET_0028/test.pbtxt
@@ -0,0 +1,373 @@
+node {
+ name: "Input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 8
+ }
+ dim {
+ size: 6
+ }
+ dim {
+ size: 12
+ }
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/beta"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 12
+ }
+ }
+ float_val: 1.97139931
+ float_val: 1.45169675
+ float_val: 1.2031461
+ float_val: 0.287978739
+ float_val: 0.161815107
+ float_val: -0.281398058
+ float_val: 2.70276475
+ float_val: -0.166961521
+ float_val: 0.266388983
+ float_val: 0.890943348
+ float_val: -0.279832929
+ float_val: 1.82808423
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/beta/read"
+ op: "Identity"
+ input: "InstanceNorm/beta"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@InstanceNorm/beta"
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/gamma"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 12
+ }
+ }
+ float_val: 0.574708045
+ float_val: 0.387735426
+ float_val: 0.899499536
+ float_val: 0.484296113
+ float_val: 2.35850787
+ float_val: 1.06661248
+ float_val: 0.343602151
+ float_val: 2.27582788
+ float_val: 1.14559281
+ float_val: 0.690169275
+ float_val: 1.20440173
+ float_val: 0.350952208
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/gamma/read"
+ op: "Identity"
+ input: "InstanceNorm/gamma"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@InstanceNorm/gamma"
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/moments/mean/reduction_indices"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ int_val: 1
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/moments/mean"
+ op: "Mean"
+ input: "Input"
+ input: "InstanceNorm/moments/mean/reduction_indices"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InstanceNorm/moments/StopGradient"
+ op: "StopGradient"
+ input: "InstanceNorm/moments/mean"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/moments/SquaredDifference"
+ op: "SquaredDifference"
+ input: "Input"
+ input: "InstanceNorm/moments/StopGradient"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/moments/variance/reduction_indices"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ int_val: 1
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/moments/variance"
+ op: "Mean"
+ input: "InstanceNorm/moments/SquaredDifference"
+ input: "InstanceNorm/moments/variance/reduction_indices"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "keep_dims"
+ value {
+ b: true
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/add/y"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 0.001
+ }
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/add"
+ op: "Add"
+ input: "InstanceNorm/moments/variance"
+ input: "InstanceNorm/instancenorm/add/y"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/Rsqrt"
+ op: "Rsqrt"
+ input: "InstanceNorm/instancenorm/add"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/mul"
+ op: "Mul"
+ input: "InstanceNorm/instancenorm/Rsqrt"
+ input: "InstanceNorm/gamma/read"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/mul_1"
+ op: "Mul"
+ input: "Input"
+ input: "InstanceNorm/instancenorm/mul"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/mul_2"
+ op: "Mul"
+ input: "InstanceNorm/moments/mean"
+ input: "InstanceNorm/instancenorm/mul"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/sub"
+ op: "Sub"
+ input: "InstanceNorm/beta/read"
+ input: "InstanceNorm/instancenorm/mul_2"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "InstanceNorm/instancenorm/add_1"
+ op: "Add"
+ input: "InstanceNorm/instancenorm/mul_1"
+ input: "InstanceNorm/instancenorm/sub"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
+node {
+ name: "Relu"
+ op: "Relu"
+ input: "InstanceNorm/instancenorm/add_1"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/REGRESSION_0000/test.info b/res/TensorFlowTests/REGRESSION_0000/test.info
new file mode 100644
index 000000000..1012f87e8
--- /dev/null
+++ b/res/TensorFlowTests/REGRESSION_0000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 3]
+output, Add:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/REGRESSION_0000/test.pbtxt b/res/TensorFlowTests/REGRESSION_0000/test.pbtxt
new file mode 100644
index 000000000..aa552ce65
--- /dev/null
+++ b/res/TensorFlowTests/REGRESSION_0000/test.pbtxt
@@ -0,0 +1,68 @@
+# Add network with Placeholder and Const as arguments
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1.1
+ float_val: 2.2
+ float_val: 3.3
+ float_val: 4.4
+ float_val: 5.5
+ float_val: 6.6
+ }
+ }
+ }
+}
+node {
+ name: "Add"
+ op: "Add"
+ input: "Placeholder"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Add_000/test.info b/res/TensorFlowTests/UNIT_Add_000/test.info
new file mode 100644
index 000000000..77c2dbe0d
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_000/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 1]
+input, input_02:0, TF_FLOAT, [1, 3, 3, 1]
+output, add:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_Add_000/test.pbtxt b/res/TensorFlowTests/UNIT_Add_000/test.pbtxt
new file mode 100644
index 000000000..b03826840
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_000/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "add"
+ op: "Add"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Add_001/test.info b/res/TensorFlowTests/UNIT_Add_001/test.info
new file mode 100644
index 000000000..f99f0f808
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_001/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [1, 1, 1, 5]
+output, add:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Add_001/test.pbtxt b/res/TensorFlowTests/UNIT_Add_001/test.pbtxt
new file mode 100644
index 000000000..ff89686b8
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_001/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "add"
+ op: "Add"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Add_002/test.info b/res/TensorFlowTests/UNIT_Add_002/test.info
new file mode 100644
index 000000000..4471cff17
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_002/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [5]
+output, add:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Add_002/test.pbtxt b/res/TensorFlowTests/UNIT_Add_002/test.pbtxt
new file mode 100644
index 000000000..5e320a413
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_002/test.pbtxt
@@ -0,0 +1,61 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "add"
+ op: "Add"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Add_003/test.info b/res/TensorFlowTests/UNIT_Add_003/test.info
new file mode 100644
index 000000000..8c87db7ea
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_003/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, []
+output, add:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Add_003/test.pbtxt b/res/TensorFlowTests/UNIT_Add_003/test.pbtxt
new file mode 100644
index 000000000..96b9219af
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_003/test.pbtxt
@@ -0,0 +1,58 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ }
+ }
+ }
+}
+node {
+ name: "add"
+ op: "Add"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Add_004/test.info b/res/TensorFlowTests/UNIT_Add_004/test.info
new file mode 100644
index 000000000..201dfd6fc
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_004/test.info
@@ -0,0 +1,2 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+output, add:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Add_004/test.pbtxt b/res/TensorFlowTests/UNIT_Add_004/test.pbtxt
new file mode 100644
index 000000000..ca9295022
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_004/test.pbtxt
@@ -0,0 +1,62 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "scala"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "add"
+ op: "Add"
+ input: "input_01"
+ input: "scala"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Add_005/test.info b/res/TensorFlowTests/UNIT_Add_005/test.info
new file mode 100644
index 000000000..7cd730ae6
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_005/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 1, 1, 5]
+input, input_02:0, TF_FLOAT, [1, 3, 3, 5]
+output, add:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Add_005/test.pbtxt b/res/TensorFlowTests/UNIT_Add_005/test.pbtxt
new file mode 100644
index 000000000..cc28366b7
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Add_005/test.pbtxt
@@ -0,0 +1,49 @@
+# LHS broadcasting
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 1 }
+ dim { size: 1 }
+ dim { size: 5 }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 3 }
+ dim { size: 5 }
+ }
+ }
+ }
+}
+node {
+ name: "add"
+ op: "Add"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_AvgPool_000/test.info b/res/TensorFlowTests/UNIT_AvgPool_000/test.info
new file mode 100644
index 000000000..31c94cffa
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_AvgPool_000/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 3, 3, 1]
+output, avgpool2d:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_AvgPool_000/test.pbtxt b/res/TensorFlowTests/UNIT_AvgPool_000/test.pbtxt
new file mode 100644
index 000000000..8c71f0b9e
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_AvgPool_000/test.pbtxt
@@ -0,0 +1,74 @@
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "avgpool2d"
+ op: "AvgPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list {
+ i: 1
+ i: 3
+ i: 3
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_AvgPool_001/test.info b/res/TensorFlowTests/UNIT_AvgPool_001/test.info
new file mode 100644
index 000000000..2cf5bfe34
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_AvgPool_001/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 4, 4, 1]
+output, avgpool2d:0, TF_FLOAT, [1, 2, 2, 1]
diff --git a/res/TensorFlowTests/UNIT_AvgPool_001/test.pbtxt b/res/TensorFlowTests/UNIT_AvgPool_001/test.pbtxt
new file mode 100644
index 000000000..faf117ca4
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_AvgPool_001/test.pbtxt
@@ -0,0 +1,74 @@
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "avgpool2d"
+ op: "AvgPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_BiasAdd_000/test.info b/res/TensorFlowTests/UNIT_BiasAdd_000/test.info
new file mode 100644
index 000000000..cbb4f38dd
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_BiasAdd_000/test.info
@@ -0,0 +1 @@
+output, out:0, TF_FLOAT, [1, 5, 5, 3]
diff --git a/res/TensorFlowTests/UNIT_BiasAdd_000/test.pbtxt b/res/TensorFlowTests/UNIT_BiasAdd_000/test.pbtxt
new file mode 100644
index 000000000..642eac655
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_BiasAdd_000/test.pbtxt
@@ -0,0 +1,57 @@
+node {
+ name: "val"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 1 }
+ dim { size: 5 }
+ dim { size: 5 }
+ dim { size: 3 }
+ }
+ float_val: 2.1
+ }
+ }
+ }
+}
+node {
+ name: "bias"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 3 }
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "out"
+ op: "BiasAdd"
+ input: "val"
+ input: "bias"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_BiasAdd_001/test.info b/res/TensorFlowTests/UNIT_BiasAdd_001/test.info
new file mode 100644
index 000000000..04c89d69c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_BiasAdd_001/test.info
@@ -0,0 +1 @@
+output, out:0, TF_FLOAT, [1, 3, 4, 4]
diff --git a/res/TensorFlowTests/UNIT_BiasAdd_001/test.pbtxt b/res/TensorFlowTests/UNIT_BiasAdd_001/test.pbtxt
new file mode 100644
index 000000000..6b543084d
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_BiasAdd_001/test.pbtxt
@@ -0,0 +1,57 @@
+node {
+ name: "val"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 4 }
+ dim { size: 4 }
+ }
+ float_val: 2.1
+ }
+ }
+ }
+}
+node {
+ name: "bias"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 3 }
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "out"
+ op: "BiasAdd"
+ input: "val"
+ input: "bias"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NCHW" }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_BiasAdd_002/test.info b/res/TensorFlowTests/UNIT_BiasAdd_002/test.info
new file mode 100644
index 000000000..2133df438
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_BiasAdd_002/test.info
@@ -0,0 +1,2 @@
+input, input:0, TF_FLOAT, [1, 16, 16, 2]
+output, output:0, TF_FLOAT, [1, 16, 16, 2]
diff --git a/res/TensorFlowTests/UNIT_BiasAdd_002/test.pbtxt b/res/TensorFlowTests/UNIT_BiasAdd_002/test.pbtxt
new file mode 100644
index 000000000..336a10338
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_BiasAdd_002/test.pbtxt
@@ -0,0 +1,51 @@
+node {
+ name: "input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 16 }
+ dim { size: 16 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "bias"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape { dim { size: 2 } }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "output"
+ op: "BiasAdd"
+ input: "input"
+ input: "bias"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Concat_000/test.info b/res/TensorFlowTests/UNIT_Concat_000/test.info
new file mode 100644
index 000000000..9ea940f2a
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Concat_000/test.info
@@ -0,0 +1 @@
+output, Concat:0, TF_FLOAT, [4, 3]
diff --git a/res/TensorFlowTests/UNIT_Concat_000/test.pbtxt b/res/TensorFlowTests/UNIT_Concat_000/test.pbtxt
new file mode 100644
index 000000000..77dbea792
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Concat_000/test.pbtxt
@@ -0,0 +1,110 @@
+node {
+ name: "Input01"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1
+ float_val: 2
+ float_val: 3
+ float_val: 4
+ float_val: 5
+ float_val: 6
+ }
+ }
+ }
+}
+node {
+ name: "Input02"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 7
+ float_val: 8
+ float_val: 9
+ float_val: 10
+ float_val: 11
+ float_val: 12
+ }
+ }
+ }
+}
+node {
+ name: "Axis"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: 0
+ }
+ }
+ }
+}
+node {
+ name: "Concat"
+ op: "ConcatV2"
+ input: "Input01"
+ input: "Input02"
+ input: "Axis"
+ attr {
+ key: "N"
+ value {
+ i: 2
+ }
+ }
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Concat_001/test.info b/res/TensorFlowTests/UNIT_Concat_001/test.info
new file mode 100644
index 000000000..e2add430f
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Concat_001/test.info
@@ -0,0 +1 @@
+output, Concat:0, TF_FLOAT, [6, 3]
diff --git a/res/TensorFlowTests/UNIT_Concat_001/test.pbtxt b/res/TensorFlowTests/UNIT_Concat_001/test.pbtxt
new file mode 100644
index 000000000..ac3e85581
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Concat_001/test.pbtxt
@@ -0,0 +1,143 @@
+node {
+ name: "Input01"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1
+ float_val: 2
+ float_val: 3
+ float_val: 4
+ float_val: 5
+ float_val: 6
+ }
+ }
+ }
+}
+node {
+ name: "Input02"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 7
+ float_val: 8
+ float_val: 9
+ float_val: 10
+ float_val: 11
+ float_val: 12
+ }
+ }
+ }
+}
+node {
+ name: "Input03"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 13
+ float_val: 14
+ float_val: 15
+ float_val: 16
+ float_val: 17
+ float_val: 18
+ }
+ }
+ }
+}
+node {
+ name: "Axis"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: 0
+ }
+ }
+ }
+}
+node {
+ name: "Concat"
+ op: "ConcatV2"
+ input: "Input01"
+ input: "Input02"
+ input: "Input03"
+ input: "Axis"
+ attr {
+ key: "N"
+ value {
+ i: 3
+ }
+ }
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Concat_002/test.info b/res/TensorFlowTests/UNIT_Concat_002/test.info
new file mode 100644
index 000000000..460093bd5
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Concat_002/test.info
@@ -0,0 +1,3 @@
+input, Input01:0, TF_FLOAT, [2, 3]
+input, Input02:0, TF_FLOAT, [2, 3]
+output, Concat:0, TF_FLOAT, [2, 6]
diff --git a/res/TensorFlowTests/UNIT_Concat_002/test.pbtxt b/res/TensorFlowTests/UNIT_Concat_002/test.pbtxt
new file mode 100644
index 000000000..9a524040a
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Concat_002/test.pbtxt
@@ -0,0 +1,92 @@
+node {
+ name: "Input01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Input02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Axis"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ }
+ int_val: -1
+ }
+ }
+ }
+}
+node {
+ name: "Concat"
+ op: "ConcatV2"
+ input: "Input01"
+ input: "Input02"
+ input: "Axis"
+ attr {
+ key: "N"
+ value {
+ i: 2
+ }
+ }
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tidx"
+ value {
+ type: DT_INT32
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Const_000/test.info b/res/TensorFlowTests/UNIT_Const_000/test.info
new file mode 100644
index 000000000..43b4f06a5
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Const_000/test.info
@@ -0,0 +1 @@
+output, const/float:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/UNIT_Const_000/test.pbtxt b/res/TensorFlowTests/UNIT_Const_000/test.pbtxt
new file mode 100644
index 000000000..b91ade2f3
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Const_000/test.pbtxt
@@ -0,0 +1,32 @@
+node {
+ name: "const/float"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1.1
+ float_val: 2.2
+ float_val: 3.3
+ float_val: 4.4
+ float_val: 5.5
+ float_val: 6.6
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Const_001/test.info b/res/TensorFlowTests/UNIT_Const_001/test.info
new file mode 100644
index 000000000..445b34b67
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Const_001/test.info
@@ -0,0 +1 @@
+output, const/int32:0, TF_INT32, [2, 3]
diff --git a/res/TensorFlowTests/UNIT_Const_001/test.pbtxt b/res/TensorFlowTests/UNIT_Const_001/test.pbtxt
new file mode 100644
index 000000000..bec7dfb92
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Const_001/test.pbtxt
@@ -0,0 +1,32 @@
+node {
+ name: "const/int32"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ int_val: 1
+ int_val: 2
+ int_val: 3
+ int_val: 4
+ int_val: 5
+ int_val: 6
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.info b/res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.info
new file mode 100644
index 000000000..2537d0732
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.info
@@ -0,0 +1,2 @@
+input, ifm:0, TF_FLOAT, [1, 8, 6, 3]
+output, ofm:0, TF_FLOAT, [1, 16, 12, 2]
diff --git a/res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.pbtxt b/res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.pbtxt
new file mode 100644
index 000000000..8930c4221
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Conv2DBackpropInput_001/test.pbtxt
@@ -0,0 +1,136 @@
+node {
+ name: "ifm"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 8
+ }
+ dim {
+ size: 6
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "weights"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "outshape"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 4
+ }
+ }
+ int_val: 1
+ int_val: 16
+ int_val: 12
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "ofm"
+ op: "Conv2DBackpropInput"
+ input: "outshape"
+ input: "weights"
+ input: "ifm"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Conv2D_000/test.info b/res/TensorFlowTests/UNIT_Conv2D_000/test.info
new file mode 100644
index 000000000..22facc3fb
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Conv2D_000/test.info
@@ -0,0 +1,3 @@
+# conv2d with VALID, stride = 1, 1
+input, ifm:0, TF_FLOAT, [1, 5, 5, 3]
+output, ofm:0, TF_FLOAT, [1, 4, 4, 2]
diff --git a/res/TensorFlowTests/UNIT_Conv2D_000/test.pbtxt b/res/TensorFlowTests/UNIT_Conv2D_000/test.pbtxt
new file mode 100644
index 000000000..3649aa827
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Conv2D_000/test.pbtxt
@@ -0,0 +1,76 @@
+node {
+ name: "ifm"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 5 }
+ dim { size: 5 }
+ dim { size: 3 }
+ }
+ }
+ }
+}
+node {
+ name: "ker"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim { size: 2 }
+ dim { size: 2 }
+ dim { size: 3 }
+ dim { size: 2 }
+ }
+ float_val: 1.1
+ }
+ }
+ }
+}
+node {
+ name: "ofm"
+ op: "Conv2D"
+ input: "ifm"
+ input: "ker"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+ attr {
+ key: "use_cudnn_on_gpu"
+ value { b: false }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_CustomOp_000/customop.conf b/res/TensorFlowTests/UNIT_CustomOp_000/customop.conf
new file mode 100644
index 000000000..08ba0f09a
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_CustomOp_000/customop.conf
@@ -0,0 +1,22 @@
+# defining a list of custom ops for this compilation
+custom_op {
+ name: "my/customOp/000"
+ op: "new_custom_op"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "output_shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_CustomOp_000/test.info b/res/TensorFlowTests/UNIT_CustomOp_000/test.info
new file mode 100644
index 000000000..e3e2b176e
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_CustomOp_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 1, 2]
+output, output/relu:0, TF_FLOAT, [1, 2, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_CustomOp_000/test.pbtxt b/res/TensorFlowTests/UNIT_CustomOp_000/test.pbtxt
new file mode 100644
index 000000000..373f5b845
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_CustomOp_000/test.pbtxt
@@ -0,0 +1,53 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype" value { type: DT_FLOAT } }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "my/customOp/000"
+ op: "new_custom_op"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "my_string"
+ value { s: "Hello World" }
+ }
+ attr {
+ key: "my_float"
+ value { f: 0.001 }
+ }
+ attr {
+ key: "my_int"
+ value { i: 111 }
+ }
+ attr {
+ key: "my_bool"
+ value { b: false }
+ }
+}
+node {
+ name: "output/relu"
+ op: "Relu"
+ input: "my/customOp/000"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_CustomOp_001/customop.conf b/res/TensorFlowTests/UNIT_CustomOp_001/customop.conf
new file mode 100644
index 000000000..92c0dd2f2
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_CustomOp_001/customop.conf
@@ -0,0 +1,19 @@
+# defining a list of custom ops for this compilation
+custom_op {
+ name: "FillFrom"
+ op: "FillFrom"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "output_shape"
+ value {
+ shape {
+ dim { size: 10 }
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_CustomOp_001/test.info b/res/TensorFlowTests/UNIT_CustomOp_001/test.info
new file mode 100644
index 000000000..473374cd1
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_CustomOp_001/test.info
@@ -0,0 +1,2 @@
+input, input_00:0, TF_FLOAT, [10]
+output, FillFrom:0, TF_FLOAT, [10]
diff --git a/res/TensorFlowTests/UNIT_CustomOp_001/test.pbtxt b/res/TensorFlowTests/UNIT_CustomOp_001/test.pbtxt
new file mode 100644
index 000000000..ad673e0a7
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_CustomOp_001/test.pbtxt
@@ -0,0 +1,38 @@
+# test case discussed in https://github.sec.samsung.net/STAR/nnfw/issues/5607
+node {
+ name: "input_00"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 10
+ }
+ }
+ }
+ }
+}
+node {
+ name: "FillFrom"
+ op: "FillFrom"
+ input: "input_00"
+ attr {
+ key: "idx"
+ value {
+ i: 3
+ }
+ }
+ attr {
+ key: "val"
+ value {
+ f: 1.1
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.info b/res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.info
new file mode 100644
index 000000000..96a2b1403
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.info
@@ -0,0 +1,2 @@
+input, input:0, TF_FLOAT, [1, 11, 7, 3]
+output, DepthwiseConv2dNative:0, TF_FLOAT, [1, 6, 4, 12]
diff --git a/res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.pbtxt b/res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.pbtxt
new file mode 100644
index 000000000..1d8fcaa00
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_DepthwiseConv2D_000/test.pbtxt
@@ -0,0 +1,115 @@
+# A simple network that has DepthwiseConv2dNative with 'SAME' padding and strides.
+# HOW TO GENERATE:
+# import tensorflow as tf
+# input = tf.placeholder(tf.float32, shape=[1,11,7,3], name="input")
+# filter = tf.constant(1.0, shape=[2,3,3,4], dtype=tf.float32)
+# dwconv = tf.nn.depthwise_conv2d_native(input,filter,[1,2,2,1],'SAME')
+# tf.get_default_graph().as_graph_def()
+node {
+ name: "input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 11
+ }
+ dim {
+ size: 7
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "DepthwiseConv2dNative"
+ op: "DepthwiseConv2dNative"
+ input: "input"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "SAME"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.info b/res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.info
new file mode 100644
index 000000000..c17595388
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.info
@@ -0,0 +1,2 @@
+input, input:0, TF_FLOAT, [1, 4, 4, 3]
+output, DepthwiseConv2dNative:0, TF_FLOAT, [1, 3, 3, 6]
diff --git a/res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.pbtxt b/res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.pbtxt
new file mode 100644
index 000000000..9a9717611
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_DepthwiseConv2D_001/test.pbtxt
@@ -0,0 +1,115 @@
+# A simple network that has DepthwiseConv2dNative with input(Placeholder) and filter(Const)
+# HOW TO GENERATE:
+# import tensorflow as tf
+# input = tf.placeholder(tf.float32, shape=[1,4,4,3], name="input")
+# filter = tf.constant(1.0, shape=[2,2,3,2], dtype=tf.float32)
+# dwconv = tf.nn.depthwise_conv2d_native(input,filter,[1,1,1,1],'VALID')
+# tf.get_default_graph().as_graph_def()
+node {
+ name: "input"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 2
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "DepthwiseConv2dNative"
+ op: "DepthwiseConv2dNative"
+ input: "input"
+ input: "Const"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "dilations"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.info b/res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.info
new file mode 100644
index 000000000..30840e8e3
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.info
@@ -0,0 +1,3 @@
+# FusedBatchNorm
+input, placeholder:0, TF_FLOAT, [1, 4, 4, 1]
+output, FBN_01:0, TF_FLOAT, [1, 4, 4, 1]
diff --git a/res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.pbtxt b/res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.pbtxt
new file mode 100644
index 000000000..40cec4abc
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_FusedBatchNorm_000/test.pbtxt
@@ -0,0 +1,158 @@
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "scale"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+}
+node {
+ name: "FBN_01/mean"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 0.0
+ }
+ }
+ }
+}
+node {
+ name: "FBN_01/variance"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 1
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "FBN_01"
+ op: "FusedBatchNorm"
+ input: "placeholder"
+ input: "scale"
+ input: "offset"
+ input: "FBN_01/mean"
+ input: "FBN_01/variance"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "epsilon"
+ value {
+ f: 0.001
+ }
+ }
+ attr {
+ key: "is_training"
+ value {
+ b: false
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.info b/res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.info
new file mode 100644
index 000000000..09141a44e
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.info
@@ -0,0 +1,3 @@
+# FusedBatchNorm
+input, placeholder:0, TF_FLOAT, [1, 4, 4, 3]
+output, FBN_01:0, TF_FLOAT, [1, 4, 4, 3]
diff --git a/res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.pbtxt b/res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.pbtxt
new file mode 100644
index 000000000..9ebeadf70
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_FusedBatchNorm_001/test.pbtxt
@@ -0,0 +1,158 @@
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 4
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "scale"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1.5
+ }
+ }
+ }
+}
+node {
+ name: "offset"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ }
+ float_val: 2.5
+ }
+ }
+ }
+}
+node {
+ name: "FBN_01/mean"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ }
+ float_val: 3.5
+ }
+ }
+ }
+}
+node {
+ name: "FBN_01/variance"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 3
+ }
+ }
+ float_val: 4.5
+ }
+ }
+ }
+}
+node {
+ name: "FBN_01"
+ op: "FusedBatchNorm"
+ input: "placeholder"
+ input: "scale"
+ input: "offset"
+ input: "FBN_01/mean"
+ input: "FBN_01/variance"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "epsilon"
+ value {
+ f: 0.001
+ }
+ }
+ attr {
+ key: "is_training"
+ value {
+ b: false
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_MaxPool_000/test.info b/res/TensorFlowTests/UNIT_MaxPool_000/test.info
new file mode 100644
index 000000000..c94e2e7fb
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_MaxPool_000/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 3, 3, 1]
+output, maxpool2d:0, TF_FLOAT, [1, 2, 2, 1]
diff --git a/res/TensorFlowTests/UNIT_MaxPool_000/test.pbtxt b/res/TensorFlowTests/UNIT_MaxPool_000/test.pbtxt
new file mode 100644
index 000000000..81da5bbd5
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_MaxPool_000/test.pbtxt
@@ -0,0 +1,74 @@
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "maxpool2d"
+ op: "MaxPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "data_format"
+ value {
+ s: "NHWC"
+ }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list {
+ i: 1
+ i: 2
+ i: 2
+ i: 1
+ }
+ }
+ }
+ attr {
+ key: "padding"
+ value {
+ s: "VALID"
+ }
+ }
+ attr {
+ key: "strides"
+ value {
+ list {
+ i: 1
+ i: 1
+ i: 1
+ i: 1
+ }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_MaxPool_001/test.info b/res/TensorFlowTests/UNIT_MaxPool_001/test.info
new file mode 100644
index 000000000..513a83c84
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_MaxPool_001/test.info
@@ -0,0 +1,2 @@
+input, placeholder:0, TF_FLOAT, [1, 4, 4, 1]
+output, maxpool2d:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_MaxPool_001/test.pbtxt b/res/TensorFlowTests/UNIT_MaxPool_001/test.pbtxt
new file mode 100644
index 000000000..dfa58d440
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_MaxPool_001/test.pbtxt
@@ -0,0 +1,48 @@
+node {
+ name: "placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 4 }
+ dim { size: 4 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "maxpool2d"
+ op: "MaxPool"
+ input: "placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "data_format"
+ value { s: "NHWC" }
+ }
+ attr {
+ key: "ksize"
+ value {
+ list { i: 1 i: 2 i: 2 i: 1 }
+ }
+ }
+ attr {
+ key: "padding"
+ value { s: "VALID" }
+ }
+ attr {
+ key: "strides"
+ value {
+ list { i: 1 i: 1 i: 1 i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mean_000/test.info b/res/TensorFlowTests/UNIT_Mean_000/test.info
new file mode 100644
index 000000000..fed1cbd60
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 2]
+output, Mean:0, TF_FLOAT, [1, 1, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Mean_000/test.pbtxt b/res/TensorFlowTests/UNIT_Mean_000/test.pbtxt
new file mode 100644
index 000000000..8a9f7848f
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_000/test.pbtxt
@@ -0,0 +1,68 @@
+# Reference Python code:
+#
+# import tensorflow as tf
+#
+# inp = tf.placeholder(tf.float32, [1,3,3,2])
+# mean = tf.math.reduce_mean(inp, keepdims=True, axis=[1,2])
+#
+# print(tf.get_default_graph().as_graph_def())
+#
+# WARNING! Below GraphDef is modified to make it easy to read
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 3 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "Mean/reduction_indices"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: 1
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "Mean"
+ op: "Mean"
+ input: "Placeholder"
+ input: "Mean/reduction_indices"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "keep_dims"
+ value { b: true }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mean_001/test.info b/res/TensorFlowTests/UNIT_Mean_001/test.info
new file mode 100644
index 000000000..682e7cb20
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_001/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 2]
+output, Mean:0, TF_FLOAT, [1, 2]
diff --git a/res/TensorFlowTests/UNIT_Mean_001/test.pbtxt b/res/TensorFlowTests/UNIT_Mean_001/test.pbtxt
new file mode 100644
index 000000000..a579b2c9b
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_001/test.pbtxt
@@ -0,0 +1,68 @@
+# Reference Python code:
+#
+# import tensorflow as tf
+#
+# inp = tf.placeholder(tf.float32, [1,3,3,2])
+# mean = tf.math.reduce_mean(inp, keepdims=False, axis=[1,2])
+#
+# print(tf.get_default_graph().as_graph_def())
+#
+# WARNING! Below GraphDef is modified to make it easy to read
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 3 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "Mean/reduction_indices"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: 1
+ int_val: 2
+ }
+ }
+ }
+}
+node {
+ name: "Mean"
+ op: "Mean"
+ input: "Placeholder"
+ input: "Mean/reduction_indices"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "keep_dims"
+ value { b: false }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mean_002/test.info b/res/TensorFlowTests/UNIT_Mean_002/test.info
new file mode 100644
index 000000000..682e7cb20
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_002/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 2]
+output, Mean:0, TF_FLOAT, [1, 2]
diff --git a/res/TensorFlowTests/UNIT_Mean_002/test.pbtxt b/res/TensorFlowTests/UNIT_Mean_002/test.pbtxt
new file mode 100644
index 000000000..1d129f9a2
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_002/test.pbtxt
@@ -0,0 +1,68 @@
+# Reference Python code:
+#
+# import tensorflow as tf
+#
+# inp = tf.placeholder(tf.float32, [1,3,3,2])
+# mean = tf.math.reduce_mean(inp, keepdims=False, axis=[-2,-3])
+#
+# print(tf.get_default_graph().as_graph_def())
+#
+# WARNING! Below GraphDef is modified to make it easy to read
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 3 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "Mean/reduction_indices"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: -2
+ int_val: -3
+ }
+ }
+ }
+}
+node {
+ name: "Mean"
+ op: "Mean"
+ input: "Placeholder"
+ input: "Mean/reduction_indices"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "keep_dims"
+ value { b: false }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mean_003/test.info b/res/TensorFlowTests/UNIT_Mean_003/test.info
new file mode 100644
index 000000000..fed1cbd60
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_003/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 2]
+output, Mean:0, TF_FLOAT, [1, 1, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Mean_003/test.pbtxt b/res/TensorFlowTests/UNIT_Mean_003/test.pbtxt
new file mode 100644
index 000000000..383a1f2ea
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mean_003/test.pbtxt
@@ -0,0 +1,68 @@
+# Reference Python code:
+#
+# import tensorflow as tf
+#
+# inp = tf.placeholder(tf.float32, [1,3,3,2])
+# mean = tf.math.reduce_mean(inp, keepdims=True, axis=[-2,-3])
+#
+# print(tf.get_default_graph().as_graph_def())
+#
+# WARNING! Below GraphDef is modified to make it easy to read
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 3 }
+ dim { size: 2 }
+ }
+ }
+ }
+}
+node {
+ name: "Mean/reduction_indices"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim { size: 2 }
+ }
+ int_val: -2
+ int_val: -3
+ }
+ }
+ }
+}
+node {
+ name: "Mean"
+ op: "Mean"
+ input: "Placeholder"
+ input: "Mean/reduction_indices"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "Tidx"
+ value { type: DT_INT32 }
+ }
+ attr {
+ key: "keep_dims"
+ value { b: true }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mul_000/test.info b/res/TensorFlowTests/UNIT_Mul_000/test.info
new file mode 100644
index 000000000..b3cb3ff70
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mul_000/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 1]
+input, input_02:0, TF_FLOAT, [1, 3, 3, 1]
+output, mul:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_Mul_000/test.pbtxt b/res/TensorFlowTests/UNIT_Mul_000/test.pbtxt
new file mode 100644
index 000000000..fe6dbc330
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mul_000/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "mul"
+ op: "Mul"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mul_001/test.info b/res/TensorFlowTests/UNIT_Mul_001/test.info
new file mode 100644
index 000000000..9de3a648a
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mul_001/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [1, 1, 1, 5]
+output, mul:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Mul_001/test.pbtxt b/res/TensorFlowTests/UNIT_Mul_001/test.pbtxt
new file mode 100644
index 000000000..a96c9ae5a
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mul_001/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "mul"
+ op: "Mul"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Mul_002/test.info b/res/TensorFlowTests/UNIT_Mul_002/test.info
new file mode 100644
index 000000000..241ccd9df
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mul_002/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [5]
+output, mul:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Mul_002/test.pbtxt b/res/TensorFlowTests/UNIT_Mul_002/test.pbtxt
new file mode 100644
index 000000000..9ad65c9f5
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Mul_002/test.pbtxt
@@ -0,0 +1,61 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "mul"
+ op: "Mul"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Placeholder_000/test.info b/res/TensorFlowTests/UNIT_Placeholder_000/test.info
new file mode 100644
index 000000000..0d0a8ccc0
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Placeholder_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 1, 2]
+output, output/identity:0, TF_FLOAT, [1, 2, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Placeholder_000/test.pbtxt b/res/TensorFlowTests/UNIT_Placeholder_000/test.pbtxt
new file mode 100644
index 000000000..593fbb2d4
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Placeholder_000/test.pbtxt
@@ -0,0 +1,40 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "output/identity"
+ op: "Identity"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_RealDiv_000/test.info b/res/TensorFlowTests/UNIT_RealDiv_000/test.info
new file mode 100644
index 000000000..b98f2a338
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_RealDiv_000/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 1]
+input, input_02:0, TF_FLOAT, [1, 3, 3, 1]
+output, div:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_RealDiv_000/test.pbtxt b/res/TensorFlowTests/UNIT_RealDiv_000/test.pbtxt
new file mode 100644
index 000000000..e17d48326
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_RealDiv_000/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "div"
+ op: "RealDiv"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_RealDiv_001/test.info b/res/TensorFlowTests/UNIT_RealDiv_001/test.info
new file mode 100644
index 000000000..ecc4cc5a3
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_RealDiv_001/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [5]
+output, div:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_RealDiv_001/test.pbtxt b/res/TensorFlowTests/UNIT_RealDiv_001/test.pbtxt
new file mode 100644
index 000000000..4ee3bb898
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_RealDiv_001/test.pbtxt
@@ -0,0 +1,61 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "div"
+ op: "RealDiv"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Relu6_000/test.info b/res/TensorFlowTests/UNIT_Relu6_000/test.info
new file mode 100644
index 000000000..329172400
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Relu6_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 4, 4, 3]
+output, Relu6:0, TF_FLOAT, [1, 4, 4, 3]
diff --git a/res/TensorFlowTests/UNIT_Relu6_000/test.pbtxt b/res/TensorFlowTests/UNIT_Relu6_000/test.pbtxt
new file mode 100644
index 000000000..b2989916c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Relu6_000/test.pbtxt
@@ -0,0 +1,30 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 4 }
+ dim { size: 4 }
+ dim { size: 3 }
+ }
+ }
+ }
+}
+node {
+ name: "Relu6"
+ op: "Relu6"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Relu_000/test.info b/res/TensorFlowTests/UNIT_Relu_000/test.info
new file mode 100644
index 000000000..e3e2b176e
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Relu_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 1, 2]
+output, output/relu:0, TF_FLOAT, [1, 2, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Relu_000/test.pbtxt b/res/TensorFlowTests/UNIT_Relu_000/test.pbtxt
new file mode 100644
index 000000000..96f21d5de
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Relu_000/test.pbtxt
@@ -0,0 +1,40 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "output/relu"
+ op: "Relu"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Reshape_000/test.info b/res/TensorFlowTests/UNIT_Reshape_000/test.info
new file mode 100644
index 000000000..137cdfa8c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Reshape_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [6]
+output, Reshape:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/UNIT_Reshape_000/test.pbtxt b/res/TensorFlowTests/UNIT_Reshape_000/test.pbtxt
new file mode 100644
index 000000000..a648fb43c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Reshape_000/test.pbtxt
@@ -0,0 +1,63 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 6
+ }
+ }
+ }
+ }
+}
+node {
+ name: "New_shape"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_INT32
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_INT32
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ }
+ int_val: 2
+ int_val: 3
+ }
+ }
+ }
+}
+node {
+ name: "Reshape"
+ op: "Reshape"
+ input: "Placeholder"
+ input: "New_shape"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "Tshape"
+ value {
+ type: DT_INT32
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Rsqrt_000/test.info b/res/TensorFlowTests/UNIT_Rsqrt_000/test.info
new file mode 100644
index 000000000..b87da7e44
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Rsqrt_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 1, 2]
+output, output/rsqrt:0, TF_FLOAT, [1, 2, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Rsqrt_000/test.pbtxt b/res/TensorFlowTests/UNIT_Rsqrt_000/test.pbtxt
new file mode 100644
index 000000000..babfc3702
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Rsqrt_000/test.pbtxt
@@ -0,0 +1,40 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "output/rsqrt"
+ op: "Rsqrt"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Shape_000/test.info b/res/TensorFlowTests/UNIT_Shape_000/test.info
new file mode 100644
index 000000000..330c6c22c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Shape_000/test.info
@@ -0,0 +1 @@
+output, Shape:0, TF_INT32, [2]
diff --git a/res/TensorFlowTests/UNIT_Shape_000/test.pbtxt b/res/TensorFlowTests/UNIT_Shape_000/test.pbtxt
new file mode 100644
index 000000000..7a6cab72b
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Shape_000/test.pbtxt
@@ -0,0 +1,41 @@
+# Simple Shape network
+node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_FLOAT
+ tensor_shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ float_val: 1.0
+ }
+ }
+ }
+}
+node {
+ name: "Shape"
+ op: "Shape"
+ input: "Const"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "out_type"
+ value { type: DT_INT32 }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Softmax_000/test.info b/res/TensorFlowTests/UNIT_Softmax_000/test.info
new file mode 100644
index 000000000..66536b561
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2]
+output, Softmax:0, TF_FLOAT, [2]
diff --git a/res/TensorFlowTests/UNIT_Softmax_000/test.pbtxt b/res/TensorFlowTests/UNIT_Softmax_000/test.pbtxt
new file mode 100644
index 000000000..78451d0b3
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_000/test.pbtxt
@@ -0,0 +1,32 @@
+# Softmax with rank-1 tensor
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Softmax"
+ op: "Softmax"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Softmax_001/test.info b/res/TensorFlowTests/UNIT_Softmax_001/test.info
new file mode 100644
index 000000000..fe582132c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_001/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 3]
+output, Softmax:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/UNIT_Softmax_001/test.pbtxt b/res/TensorFlowTests/UNIT_Softmax_001/test.pbtxt
new file mode 100644
index 000000000..ec6e030bc
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_001/test.pbtxt
@@ -0,0 +1,35 @@
+# Softmax with rank-2 tensor
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Softmax"
+ op: "Softmax"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Softmax_002/test.info b/res/TensorFlowTests/UNIT_Softmax_002/test.info
new file mode 100644
index 000000000..ae4dcd6a4
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_002/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 3, 4]
+output, Softmax:0, TF_FLOAT, [2, 3, 4]
diff --git a/res/TensorFlowTests/UNIT_Softmax_002/test.pbtxt b/res/TensorFlowTests/UNIT_Softmax_002/test.pbtxt
new file mode 100644
index 000000000..e84d73c78
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_002/test.pbtxt
@@ -0,0 +1,38 @@
+# Softmax with rank-3 tensor
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Softmax"
+ op: "Softmax"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Softmax_003/test.info b/res/TensorFlowTests/UNIT_Softmax_003/test.info
new file mode 100644
index 000000000..97b3574f5
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_003/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 2, 3, 4]
+output, Softmax:0, TF_FLOAT, [2, 2, 3, 4]
diff --git a/res/TensorFlowTests/UNIT_Softmax_003/test.pbtxt b/res/TensorFlowTests/UNIT_Softmax_003/test.pbtxt
new file mode 100644
index 000000000..c4b5ca27f
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Softmax_003/test.pbtxt
@@ -0,0 +1,41 @@
+# Softmax with rank-4 tensor
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 4
+ }
+ }
+ }
+ }
+}
+node {
+ name: "Softmax"
+ op: "Softmax"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Sqrt_000/test.info b/res/TensorFlowTests/UNIT_Sqrt_000/test.info
new file mode 100644
index 000000000..9abb96a0c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Sqrt_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 1, 2]
+output, output/sqrt:0, TF_FLOAT, [1, 2, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Sqrt_000/test.pbtxt b/res/TensorFlowTests/UNIT_Sqrt_000/test.pbtxt
new file mode 100644
index 000000000..0324220e9
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Sqrt_000/test.pbtxt
@@ -0,0 +1,40 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "output/sqrt"
+ op: "Sqrt"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_SquaredDifference_000/test.info b/res/TensorFlowTests/UNIT_SquaredDifference_000/test.info
new file mode 100644
index 000000000..cca181d7d
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_SquaredDifference_000/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 1]
+input, input_02:0, TF_FLOAT, [1, 3, 3, 1]
+output, squared_difference:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_SquaredDifference_000/test.pbtxt b/res/TensorFlowTests/UNIT_SquaredDifference_000/test.pbtxt
new file mode 100644
index 000000000..0fca18f0e
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_SquaredDifference_000/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "squared_difference"
+ op: "SquaredDifference"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_SquaredDifference_001/test.info b/res/TensorFlowTests/UNIT_SquaredDifference_001/test.info
new file mode 100644
index 000000000..8dfd36065
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_SquaredDifference_001/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [1, 1, 1, 5]
+output, squared_difference:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_SquaredDifference_001/test.pbtxt b/res/TensorFlowTests/UNIT_SquaredDifference_001/test.pbtxt
new file mode 100644
index 000000000..5ef69e22f
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_SquaredDifference_001/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "squared_difference"
+ op: "SquaredDifference"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Squeeze_000/test.info b/res/TensorFlowTests/UNIT_Squeeze_000/test.info
new file mode 100644
index 000000000..fd07403f2
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 4, 4, 3]
+output, Squeeze:0, TF_FLOAT, [4, 4, 3]
diff --git a/res/TensorFlowTests/UNIT_Squeeze_000/test.pbtxt b/res/TensorFlowTests/UNIT_Squeeze_000/test.pbtxt
new file mode 100644
index 000000000..5ad75fca6
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_000/test.pbtxt
@@ -0,0 +1,28 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 1 }
+ dim { size: 4 }
+ dim { size: 4 }
+ dim { size: 3 }
+ }
+ }
+ }
+}
+node {
+ name: "Squeeze"
+ op: "Squeeze"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Squeeze_001/test.info b/res/TensorFlowTests/UNIT_Squeeze_001/test.info
new file mode 100644
index 000000000..f71e97f93
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_001/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 1, 3, 1]
+output, Squeeze:0, TF_FLOAT, [2, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_Squeeze_001/test.pbtxt b/res/TensorFlowTests/UNIT_Squeeze_001/test.pbtxt
new file mode 100644
index 000000000..641300c10
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_001/test.pbtxt
@@ -0,0 +1,35 @@
+# Network with Squeeze that has selective squeeze dimension
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "Squeeze"
+ op: "Squeeze"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "squeeze_dims"
+ value {
+ list { i: 1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Squeeze_002/test.info b/res/TensorFlowTests/UNIT_Squeeze_002/test.info
new file mode 100644
index 000000000..401e102aa
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_002/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 1, 3, 1]
+output, Squeeze:0, TF_FLOAT, [2, 1, 3]
diff --git a/res/TensorFlowTests/UNIT_Squeeze_002/test.pbtxt b/res/TensorFlowTests/UNIT_Squeeze_002/test.pbtxt
new file mode 100644
index 000000000..06d09014a
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_002/test.pbtxt
@@ -0,0 +1,35 @@
+# Network with Squeeze that has negative squeeze dimension
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "Squeeze"
+ op: "Squeeze"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "squeeze_dims"
+ value {
+ list { i: -1 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Squeeze_003/test.info b/res/TensorFlowTests/UNIT_Squeeze_003/test.info
new file mode 100644
index 000000000..7cc93f354
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_003/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 1, 3, 1]
+output, Squeeze:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/UNIT_Squeeze_003/test.pbtxt b/res/TensorFlowTests/UNIT_Squeeze_003/test.pbtxt
new file mode 100644
index 000000000..1c760c87f
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Squeeze_003/test.pbtxt
@@ -0,0 +1,35 @@
+# Network with Squeeze that have multiple negative squeeze dimensions
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim { size: 2 }
+ dim { size: 1 }
+ dim { size: 3 }
+ dim { size: 1 }
+ }
+ }
+ }
+}
+node {
+ name: "Squeeze"
+ op: "Squeeze"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value { type: DT_FLOAT }
+ }
+ attr {
+ key: "squeeze_dims"
+ value {
+ list { i: -1 i: -3 }
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_StopGradient_000/test.info b/res/TensorFlowTests/UNIT_StopGradient_000/test.info
new file mode 100644
index 000000000..2315306d0
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_StopGradient_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [2, 3]
+output, StopGradient:0, TF_FLOAT, [2, 3]
diff --git a/res/TensorFlowTests/UNIT_StopGradient_000/test.pbtxt b/res/TensorFlowTests/UNIT_StopGradient_000/test.pbtxt
new file mode 100644
index 000000000..dae572323
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_StopGradient_000/test.pbtxt
@@ -0,0 +1,34 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 2
+ }
+ dim {
+ size: 3
+ }
+ }
+ }
+ }
+}
+node {
+ name: "StopGradient"
+ op: "StopGradient"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_StopGradient_001/test.info b/res/TensorFlowTests/UNIT_StopGradient_001/test.info
new file mode 100644
index 000000000..8f87a218f
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_StopGradient_001/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 3, 3, 2]
+output, StopGradient:0, TF_FLOAT, [1, 3, 3, 2]
diff --git a/res/TensorFlowTests/UNIT_StopGradient_001/test.pbtxt b/res/TensorFlowTests/UNIT_StopGradient_001/test.pbtxt
new file mode 100644
index 000000000..95e7c358c
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_StopGradient_001/test.pbtxt
@@ -0,0 +1,40 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "StopGradient"
+ op: "StopGradient"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Sub_000/test.info b/res/TensorFlowTests/UNIT_Sub_000/test.info
new file mode 100644
index 000000000..4a6ba8b83
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Sub_000/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 1]
+input, input_02:0, TF_FLOAT, [1, 3, 3, 1]
+output, sub:0, TF_FLOAT, [1, 3, 3, 1]
diff --git a/res/TensorFlowTests/UNIT_Sub_000/test.pbtxt b/res/TensorFlowTests/UNIT_Sub_000/test.pbtxt
new file mode 100644
index 000000000..61ecd0221
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Sub_000/test.pbtxt
@@ -0,0 +1,70 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+}
+node {
+ name: "sub"
+ op: "Sub"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Sub_001/test.info b/res/TensorFlowTests/UNIT_Sub_001/test.info
new file mode 100644
index 000000000..313581754
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Sub_001/test.info
@@ -0,0 +1,3 @@
+input, input_01:0, TF_FLOAT, [1, 3, 3, 5]
+input, input_02:0, TF_FLOAT, [5]
+output, sub:0, TF_FLOAT, [1, 3, 3, 5]
diff --git a/res/TensorFlowTests/UNIT_Sub_001/test.pbtxt b/res/TensorFlowTests/UNIT_Sub_001/test.pbtxt
new file mode 100644
index 000000000..a0ab396f9
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Sub_001/test.pbtxt
@@ -0,0 +1,61 @@
+node {
+ name: "input_01"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 3
+ }
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "input_02"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 5
+ }
+ }
+ }
+ }
+}
+node {
+ name: "sub"
+ op: "Sub"
+ input: "input_01"
+ input: "input_02"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/res/TensorFlowTests/UNIT_Tanh_000/test.info b/res/TensorFlowTests/UNIT_Tanh_000/test.info
new file mode 100644
index 000000000..6655a9ff6
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Tanh_000/test.info
@@ -0,0 +1,2 @@
+input, Placeholder:0, TF_FLOAT, [1, 2, 1, 2]
+output, output/tanh:0, TF_FLOAT, [1, 2, 1, 2]
diff --git a/res/TensorFlowTests/UNIT_Tanh_000/test.pbtxt b/res/TensorFlowTests/UNIT_Tanh_000/test.pbtxt
new file mode 100644
index 000000000..7b3d28527
--- /dev/null
+++ b/res/TensorFlowTests/UNIT_Tanh_000/test.pbtxt
@@ -0,0 +1,40 @@
+node {
+ name: "Placeholder"
+ op: "Placeholder"
+ attr {
+ key: "dtype"
+ value {
+ type: DT_FLOAT
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ dim {
+ size: 1
+ }
+ dim {
+ size: 2
+ }
+ }
+ }
+ }
+}
+node {
+ name: "output/tanh"
+ op: "Tanh"
+ input: "Placeholder"
+ attr {
+ key: "T"
+ value {
+ type: DT_FLOAT
+ }
+ }
+}
diff --git a/run b/run
deleted file mode 100755
index fba47aa98..000000000
--- a/run
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-NNFW_SCRIPT_RPATH="scripts"
-NNFW_COMMAND_RPATH="${NNFW_SCRIPT_RPATH}/command"
-
-NNFW_PROJECT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-NNFW_SCRIPT_PATH="${NNFW_PROJECT_PATH}/${NNFW_SCRIPT_RPATH}"
-
-function Usage()
-{
- echo "Usage: $0 [COMMAND] ..."
- echo "Command:"
- for file in "$NNFW_COMMAND_RPATH"/*;
- do
- echo " $(basename "$file")"
- done
-}
-
-COMMAND=$1; shift
-
-if [[ -z "${COMMAND}" ]]; then
- Usage
- exit 255
-fi
-
-COMMAND_FILE="${NNFW_PROJECT_PATH}/${NNFW_COMMAND_RPATH}/${COMMAND}"
-
-if [[ ! -f "${COMMAND_FILE}" ]]; then
- echo "ERROR: '${COMMAND}' is not supported"
- exit 255
-fi
-
-export NNFW_PROJECT_PATH
-export NNFW_SCRIPT_PATH
-
-source "${COMMAND_FILE}" "$@"
diff --git a/runtimes/CMakeLists.txt b/runtimes/CMakeLists.txt
index e8ee9ffc7..2f1a972f3 100644
--- a/runtimes/CMakeLists.txt
+++ b/runtimes/CMakeLists.txt
@@ -8,6 +8,6 @@ if(BUILD_PURE_ARM_COMPUTE)
add_subdirectory(pure_arm_compute)
endif()
-if(NOT OBS_BUILD)
+if(BUILD_LOGGING)
add_subdirectory(logging)
-endif(NOT OBS_BUILD)
+endif(BUILD_LOGGING)
diff --git a/runtimes/contrib/CMakeLists.txt b/runtimes/contrib/CMakeLists.txt
new file mode 100644
index 000000000..5ea6cdadd
--- /dev/null
+++ b/runtimes/contrib/CMakeLists.txt
@@ -0,0 +1 @@
+add_subdirectories()
diff --git a/contrib/README.md b/runtimes/contrib/README.md
index 2f8b709eb..2f8b709eb 100644
--- a/contrib/README.md
+++ b/runtimes/contrib/README.md
diff --git a/contrib/TFLiteSharp/README.md b/runtimes/contrib/TFLiteSharp/README.md
index 8e43be618..8e43be618 100644
--- a/contrib/TFLiteSharp/README.md
+++ b/runtimes/contrib/TFLiteSharp/README.md
diff --git a/contrib/TFLiteSharp/TFLiteNative/CMakeLists.txt b/runtimes/contrib/TFLiteSharp/TFLiteNative/CMakeLists.txt
index 8b58aac9c..8b58aac9c 100644
--- a/contrib/TFLiteSharp/TFLiteNative/CMakeLists.txt
+++ b/runtimes/contrib/TFLiteSharp/TFLiteNative/CMakeLists.txt
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h b/runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h
new file mode 100644
index 000000000..405ca9879
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_log.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _TFLITE_LOG_H_
+#define _TFLITE_LOG_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif /*__cplusplus*/
+
+#define ERROR 1
+#define WARNING 2
+#define INFO 3
+#define DEBUG 4
+
+#ifdef __TIZEN__
+#include <dlog/dlog.h>
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif // LOG_TAG
+#define LOG_TAG "TFLITE_NATIVE"
+
+#define TFLITE_NATIVE_LOG(log_level, format, args...) \
+ do \
+ { \
+ switch (log_level) \
+ { \
+ case ERROR: \
+ LOGE(format, ##args); \
+ case WARNING: \
+ LOGE(format, ##args); \
+ default: \
+ LOGI(format, ##args); \
+ } \
+ } while (0)
+#else // __TIZEN__
+#define LEVEL_TO_STR(level) \
+ (((level) == ERROR) \
+ ? "ERROR" \
+ : ((level) == WARNING) \
+ ? "WARNING" \
+ : ((level) == INFO) ? "INFO" : ((level) == DEBUG) ? "DEBUG" : "DEFAULT")
+#define TFLITE_NATIVE_LOG(log_level, format, args...) \
+ do \
+ { \
+ printf("%s: %s: ", LEVEL_TO_STR(log_level), __FILE__); \
+ printf(format, ##args); \
+ printf("\n"); \
+ } while (0)
+#endif // __TIZEN__
+
+#ifdef __cplusplus
+}
+#endif /*__cplusplus*/
+
+#endif /*_TFLITE_LOG_H*/
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h b/runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h
new file mode 100644
index 000000000..af1947ff0
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteNative/include/tflite_nativewrapper.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _TFLITE_NATIVEWRAPPER_H_
+#define _TFLITE_NATIVEWRAPPER_H_
+
+#include "tensorflow/lite/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/string_util.h"
+#include "tensorflow/lite/tools/mutable_op_resolver.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif /*__cplusplus*/
+
+typedef enum {
+ /** 32-bit signed integer. */
+ INT32 = 1,
+
+ /** 32-bit single precision floating point. */
+ FLOAT32 = 2,
+
+ /** 8-bit unsigned integer. */
+ UINT8 = 3,
+
+ /** 64-bit signed integer. */
+ INT64 = 4
+} TFLiteNativeType;
+
+void tflite_interpreter_setNumThreads(long *interpreterHandle, int numThreads);
+
+long long tflite_flatbuffermodel_BuildFromFile(char *modelPath);
+
+long long tflite_builder_interpreterBuilder(long *modelHandle);
+
+void *tflite_interpreter_run(long *interpreterHandle, void *values, int inputLength, int dataType);
+
+#ifdef __cplusplus
+}
+#endif /*__cplusplus*/
+
+#endif /*_TFLITE_NATIVEWRAPPER_H_*/
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp b/runtimes/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp
new file mode 100644
index 000000000..0304720f7
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteNative/src/tflite_nativewrapper.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include "tflite_nativewrapper.h"
+#include "tflite_log.h"
+#include <string.h>
+#include <unistd.h>
+#include <errno.h>
+#include <assert.h>
+
+int getNumBytes(TFLiteNativeType dataType)
+{
+ switch (dataType)
+ {
+ case INT32:
+ return 4;
+ case FLOAT32:
+ return 4;
+ case UINT8:
+ return 1;
+ case INT64:
+ return 8;
+ default:
+ return 1;
+ }
+}
+
+/// <summary>
+/// Set the number of threads available to the interpreter.
+/// </summary>
+/// <param name="interpreterHandle">Handle of the interpreter instance.</param>
+/// <param name="numThreads">Number of threads.</param>
+void tflite_interpreter_setNumThreads(long *interpreterHandle, int numThreads)
+{
+ assert(interpreterHandle != nullptr);
+ tflite::Interpreter *interpreter = reinterpret_cast<tflite::Interpreter *>(*interpreterHandle);
+
+ interpreter->SetNumThreads(numThreads);
+
+ TFLITE_NATIVE_LOG(DEBUG, "Number of threads: %d", numThreads);
+ return;
+}
+
+/// <summary>
+/// Creates a Flat Buffer Model from the given .tflite model.
+/// </summary>
+/// <param name="modelPath">Path of the model.</param>
+long long tflite_flatbuffermodel_BuildFromFile(char *modelPath)
+{
+ if (modelPath == nullptr)
+ {
+ TFLITE_NATIVE_LOG(ERROR, "Invalid parameter");
+ return 0;
+ }
+ TFLITE_NATIVE_LOG(ERROR, "Model Path: %s", modelPath);
+
+ if (access(modelPath, F_OK) == -1)
+ {
+ TFLITE_NATIVE_LOG(ERROR, "Failed to access model [%s]", strerror(errno));
+ return 0;
+ }
+
+ auto model = tflite::FlatBufferModel::BuildFromFile(modelPath);
+
+ TFLITE_NATIVE_LOG(DEBUG, "Successfully loaded model");
+ return reinterpret_cast<long>(model.release());
+}
+
+/// <summary>
+/// Creates an interpreter instance taking the flatbuffer model as input.
+/// </summary>
+/// <param name="modelHandle">Address of the flatbuffer model.</param>
+long long tflite_builder_interpreterBuilder(long *modelHandle)
+{
+ assert(modelHandle != nullptr);
+ tflite::FlatBufferModel *model = reinterpret_cast<tflite::FlatBufferModel *>(*modelHandle);
+
+ tflite::ops::builtin::BuiltinOpResolver resolver;
+ std::unique_ptr<tflite::Interpreter> interpreter;
+
+ TfLiteStatus status = tflite::InterpreterBuilder(*model, resolver)(&interpreter);
+
+ if (status != kTfLiteOk)
+ {
+ TFLITE_NATIVE_LOG(DEBUG, "Cannot create interpreter");
+ return 0;
+ }
+ TFLITE_NATIVE_LOG(DEBUG, "CheckPoint interpreter");
+ return reinterpret_cast<long>(interpreter.release());
+}
+
+/// <summary>
+/// Runs the inference given the inputs.
+/// </summary>
+/// <param name="interpreterHandle">Address of the interpreter instance.</param>
+/// <param name="values">Input values for the model.</param>
+/// <param name="inpLength">Length of the input.</param>
+/// <param name="dataType">Data type key of the input.</param>
+void *tflite_interpreter_run(long *interpreterHandle, void *values, int inputLength, int dataType)
+{
+ assert(interpreterHandle != nullptr);
+ tflite::Interpreter *interpreter = reinterpret_cast<tflite::Interpreter *>(*interpreterHandle);
+
+ int inputTensorIndex = interpreter->inputs()[0];
+
+ // TODO:: input tensor size will be passed as a parameter. It is hardcoded for now.
+ interpreter->ResizeInputTensor(inputTensorIndex, {1, 224, 224, 3});
+
+ if (interpreter->AllocateTensors() != kTfLiteOk)
+ {
+ TFLITE_NATIVE_LOG(ERROR, "Failed to allocate tensors!");
+ return nullptr;
+ }
+
+ float *inputTensorPointer = interpreter->typed_tensor<float>(inputTensorIndex);
+
+ int numBytes = getNumBytes((TFLiteNativeType)dataType);
+
+ memcpy(inputTensorPointer, values, inputLength * numBytes);
+
+ if (interpreter->Invoke() != kTfLiteOk)
+ {
+ TFLITE_NATIVE_LOG(ERROR, "Failed to invoke");
+ }
+
+ float *output = interpreter->typed_output_tensor<float>(0);
+ return output;
+}
diff --git a/contrib/TFLiteSharp/TFLiteNative/tflite-native.pc.in b/runtimes/contrib/TFLiteSharp/TFLiteNative/tflite-native.pc.in
index eec103acc..eec103acc 100644
--- a/contrib/TFLiteSharp/TFLiteNative/tflite-native.pc.in
+++ b/runtimes/contrib/TFLiteSharp/TFLiteNative/tflite-native.pc.in
diff --git a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp.sln b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp.sln
index 985466cef..985466cef 100644
--- a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp.sln
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp.sln
diff --git a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.Libraries.cs b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.Libraries.cs
index db8d9f612..db8d9f612 100644
--- a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.Libraries.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.Libraries.cs
diff --git a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.TFLite.cs b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.TFLite.cs
index c7c7b24aa..c7c7b24aa 100644
--- a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.TFLite.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/Interop/Interop.TFLite.cs
diff --git a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/TFLiteSharp.csproj b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/TFLiteSharp.csproj
index e0490bfb8..e0490bfb8 100644
--- a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/TFLiteSharp.csproj
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/TFLiteSharp.csproj
diff --git a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Datatype.cs b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Datatype.cs
index 404d1663e..404d1663e 100644
--- a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Datatype.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Datatype.cs
diff --git a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Interpreter.cs b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Interpreter.cs
index f1b4a8e07..f1b4a8e07 100644
--- a/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Interpreter.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharp/TFLiteSharp/src/Interpreter.cs
diff --git a/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest.sln b/runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest.sln
index e260a72c7..e260a72c7 100644
--- a/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest.sln
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest.sln
diff --git a/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/Program.cs b/runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/Program.cs
index e559bec36..e559bec36 100644
--- a/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/Program.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/Program.cs
diff --git a/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/TFLiteSharpTest.csproj b/runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/TFLiteSharpTest.csproj
index b143ee598..b143ee598 100644
--- a/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/TFLiteSharpTest.csproj
+++ b/runtimes/contrib/TFLiteSharp/TFLiteSharpTest/TFLiteSharpTest/TFLiteSharpTest.csproj
diff --git a/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp.csproj b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp.csproj
index 1c9ed6037..1c9ed6037 100644
--- a/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp.csproj
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp.csproj
diff --git a/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_App.cs b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_App.cs
index 49a08604d..49a08604d 100644
--- a/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_App.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_App.cs
diff --git a/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_Main.cs b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_Main.cs
index 2a8f747a4..2a8f747a4 100644
--- a/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_Main.cs
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/TFLiteTestApp_Main.cs
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mobilenet_v1_1.0_224.tflite b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mobilenet_v1_1.0_224.tflite
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mobilenet_v1_1.0_224.tflite
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse1.bmp b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse1.bmp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse1.bmp
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse_224.bmp b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse_224.bmp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/res/mouse_224.bmp
diff --git a/runtimes/contrib/TFLiteSharp/TFLiteTestApp/shared/res/TFLiteTestApp.png b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/shared/res/TFLiteTestApp.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/shared/res/TFLiteTestApp.png
diff --git a/contrib/TFLiteSharp/TFLiteTestApp/tizen-manifest.xml b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/tizen-manifest.xml
index 62a8d4c7c..62a8d4c7c 100644
--- a/contrib/TFLiteSharp/TFLiteTestApp/tizen-manifest.xml
+++ b/runtimes/contrib/TFLiteSharp/TFLiteTestApp/tizen-manifest.xml
diff --git a/contrib/TFLiteSharp/packaging/TFLiteSharp.manifest b/runtimes/contrib/TFLiteSharp/packaging/TFLiteSharp.manifest
index 75b0fa5e3..75b0fa5e3 100644
--- a/contrib/TFLiteSharp/packaging/TFLiteSharp.manifest
+++ b/runtimes/contrib/TFLiteSharp/packaging/TFLiteSharp.manifest
diff --git a/contrib/TFLiteSharp/packaging/TFLiteSharp.spec b/runtimes/contrib/TFLiteSharp/packaging/TFLiteSharp.spec
index dcb65a864..dcb65a864 100644
--- a/contrib/TFLiteSharp/packaging/TFLiteSharp.spec
+++ b/runtimes/contrib/TFLiteSharp/packaging/TFLiteSharp.spec
diff --git a/contrib/TFLiteSharp/packaging/tflite-native.manifest b/runtimes/contrib/TFLiteSharp/packaging/tflite-native.manifest
index 75b0fa5e3..75b0fa5e3 100644
--- a/contrib/TFLiteSharp/packaging/tflite-native.manifest
+++ b/runtimes/contrib/TFLiteSharp/packaging/tflite-native.manifest
diff --git a/runtimes/contrib/android_tflite/CMakeLists.txt b/runtimes/contrib/android_tflite/CMakeLists.txt
new file mode 100644
index 000000000..7e8d211cf
--- /dev/null
+++ b/runtimes/contrib/android_tflite/CMakeLists.txt
@@ -0,0 +1,45 @@
+if(NOT BUILD_ANDROID_TFLITE)
+ return()
+endif(NOT BUILD_ANDROID_TFLITE)
+
+nnfw_find_package(TensorFlowLite REQUIRED)
+
+# TODO Set this as system-level global configuration on android build
+if(NOT DEFINED NDK_DIR)
+ file(GLOB NDK_DIRS "${CMAKE_SOURCE_DIR}/tools/cross/ndk/*")
+ list(LENGTH NDK_DIRS NDK_DIRS_COUNT)
+ if (NDK_DIRS_COUNT EQUAL 1)
+ set(NDK_DIR "${NDK_DIRS}")
+ endif(NDK_DIRS_COUNT EQUAL 1)
+endif(NOT DEFINED NDK_DIR)
+
+if(NOT DEFINED NDK_DIR)
+ message(FATAL_ERROR "NDK_DIR should be specified via environment variable")
+endif()
+message(STATUS "Found NDK: ${NDK_DIR}")
+target_include_directories(tensorflow-lite SYSTEM PUBLIC "${NDK_DIR}")
+target_link_libraries(tensorflow-lite log)
+
+#
+# Tensorflow Lite JNI library
+#
+set(TFLITE_JNI_BASE ${TENSORFLOW_LITE_BASE}/java/src/main/native)
+set(TFLITE_JNI_SRCS ${TFLITE_JNI_BASE}/duration_utils_jni.cc
+ ${TFLITE_JNI_BASE}/exception_jni.cc
+ ${TFLITE_JNI_BASE}/nativeinterpreterwrapper_jni.cc
+ ${TFLITE_JNI_BASE}/tensor_jni.cc
+ ${TFLITE_JNI_BASE}/tensorflow_lite_jni.cc
+ ${TFLITE_JNI_BASE}/builtin_ops_jni.cc
+ )
+set(TFLITE_JNI_INCLUDES ${TENSORFLOW_LITE_BASE}/java/src/native)
+
+# We need this for running vanilla tflite
+# TODO remove this when nnfw is used
+set(TFLITE_SRCS_V ${TENSORFLOW_LITE_BASE}/kernels/register.cc)
+
+# TODO use tensorflow-lite static library instead of compiling all the sources again
+add_library(tensorflowlite_jni SHARED ${TFLITE_JNI_SRCS} ${TFLITE_SRCS} ${TFLITE_SRCS_V})
+target_include_directories(tensorflowlite_jni PUBLIC ${TFLITE_JNI_INCLUDES} ${TFLITE_INCLUDES})
+target_link_libraries(tensorflowlite_jni eigen ${LIB_PTHREAD} dl)
+target_link_libraries(tensorflowlite_jni log)
+install(TARGETS tensorflowlite_jni DESTINATION lib)
diff --git a/runtimes/contrib/benchmark_acl/.FORMATDENY b/runtimes/contrib/benchmark_acl/.FORMATDENY
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/runtimes/contrib/benchmark_acl/.FORMATDENY
diff --git a/contrib/benchmark_acl/CMakeLists.txt b/runtimes/contrib/benchmark_acl/CMakeLists.txt
index b5fa3e529..b5fa3e529 100644
--- a/contrib/benchmark_acl/CMakeLists.txt
+++ b/runtimes/contrib/benchmark_acl/CMakeLists.txt
diff --git a/runtimes/contrib/benchmark_acl/src/Benchmark.cpp b/runtimes/contrib/benchmark_acl/src/Benchmark.cpp
new file mode 100644
index 000000000..4a761ec76
--- /dev/null
+++ b/runtimes/contrib/benchmark_acl/src/Benchmark.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Benchmark.h"
+
+#include <cstdlib>
+
+Count::Count() : _value(1)
+{
+ auto env = std::getenv("COUNT");
+
+ if (env)
+ {
+ _value = std::strtol(env, NULL, 0);
+ }
+}
+
+uint32_t Count::value(void) const { return _value; }
+
+#include <boost/accumulators/accumulators.hpp>
+#include <boost/accumulators/statistics/stats.hpp>
+#include <boost/accumulators/statistics/mean.hpp>
+
+#include <iostream>
+#include <chrono>
+
+using namespace boost::accumulators;
+
+void run_benchmark(arm_compute::graph::frontend::Stream &graph)
+{
+ // NOTE Here the number of warming-up iterations is hardcoded
+ // TODO Decide the number of warming-up iterations appropriately
+ for (uint32_t n = 0; n < 3; ++n)
+ {
+ auto beg = std::chrono::steady_clock::now();
+ graph.run();
+ auto end = std::chrono::steady_clock::now();
+ auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - beg);
+
+ std::cout << "Warming-up " << n << ": " << elapsed.count() << "ms" << std::endl;
+ }
+
+ accumulator_set<double, stats<tag::mean>> acc;
+
+ const Count count;
+
+ for (uint32_t n = 0; n < count.value(); ++n)
+ {
+ auto beg = std::chrono::steady_clock::now();
+ graph.run();
+ auto end = std::chrono::steady_clock::now();
+ auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end - beg);
+
+ std::cout << "Iteration " << n << ": " << elapsed.count() << "ms" << std::endl;
+
+ acc(elapsed.count());
+ }
+
+ std::cout << "--------" << std::endl;
+ std::cout << "Mean: " << mean(acc) << "ms" << std::endl;
+}
diff --git a/contrib/benchmark_acl/src/Benchmark.h b/runtimes/contrib/benchmark_acl/src/Benchmark.h
index 200f40952..200f40952 100644
--- a/contrib/benchmark_acl/src/Benchmark.h
+++ b/runtimes/contrib/benchmark_acl/src/Benchmark.h
diff --git a/contrib/benchmark_acl/src/benchmark_googlenet.cpp b/runtimes/contrib/benchmark_acl/src/benchmark_googlenet.cpp
index 8b0fbfdac..8b0fbfdac 100644
--- a/contrib/benchmark_acl/src/benchmark_googlenet.cpp
+++ b/runtimes/contrib/benchmark_acl/src/benchmark_googlenet.cpp
diff --git a/contrib/benchmark_acl/src/benchmark_inception_v3.cpp b/runtimes/contrib/benchmark_acl/src/benchmark_inception_v3.cpp
index 382851f50..382851f50 100644
--- a/contrib/benchmark_acl/src/benchmark_inception_v3.cpp
+++ b/runtimes/contrib/benchmark_acl/src/benchmark_inception_v3.cpp
diff --git a/contrib/benchmark_acl/src/benchmark_mobilenet.cpp b/runtimes/contrib/benchmark_acl/src/benchmark_mobilenet.cpp
index 085be184e..085be184e 100644
--- a/contrib/benchmark_acl/src/benchmark_mobilenet.cpp
+++ b/runtimes/contrib/benchmark_acl/src/benchmark_mobilenet.cpp
diff --git a/runtimes/contrib/custom_op/README.md b/runtimes/contrib/custom_op/README.md
new file mode 100644
index 000000000..7815ce9d5
--- /dev/null
+++ b/runtimes/contrib/custom_op/README.md
@@ -0,0 +1,25 @@
+This document is about custom operators.
+
+# Introduction
+
+# Requirements
+
+- [ ] Support tizen in-house custom op developer
+- [ ] Minimiz exposed headers (structures and functions)
+- [ ] Provide acceptable performance
+
+# Design
+
+## Design
+
+### Workflow
+
+![](customOp-workflow.png)
+
+## Candidate Architecture 1
+
+## Candidate Architecture 2
+
+## Discussion
+
+# Conclusion
diff --git a/runtimes/contrib/custom_op/customOp-workflow.png b/runtimes/contrib/custom_op/customOp-workflow.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/runtimes/contrib/custom_op/customOp-workflow.png
diff --git a/contrib/detection/CMakeLists.txt b/runtimes/contrib/detection/CMakeLists.txt
index 37d91b527..37d91b527 100644
--- a/contrib/detection/CMakeLists.txt
+++ b/runtimes/contrib/detection/CMakeLists.txt
diff --git a/runtimes/contrib/detection/detection.cpp b/runtimes/contrib/detection/detection.cpp
new file mode 100644
index 000000000..8fe78ca57
--- /dev/null
+++ b/runtimes/contrib/detection/detection.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <tensorflow/core/public/session.h>
+
+#include <iostream>
+#include <stdexcept>
+
+#include <cassert>
+#include <cstring>
+
+#include "misc/benchmark.h"
+
+#define CHECK_TF(e) \
+ { \
+ if (!(e).ok()) \
+ { \
+ throw std::runtime_error{"'" #e "' FAILED"}; \
+ } \
+ }
+
+int main(int argc, char **argv)
+{
+ if (argc < 2)
+ {
+ std::cerr << "USAGE: " << argv[0] << " [T/F model path] [output 0] [output 1] ..." << std::endl;
+ return 255;
+ }
+
+ std::vector<std::string> output_nodes;
+
+ for (int argn = 2; argn < argc; ++argn)
+ {
+ output_nodes.emplace_back(argv[argn]);
+ }
+
+ tensorflow::Session *sess;
+
+ CHECK_TF(tensorflow::NewSession(tensorflow::SessionOptions(), &sess));
+
+ tensorflow::GraphDef graph_def;
+
+ CHECK_TF(ReadBinaryProto(tensorflow::Env::Default(), argv[1], &graph_def));
+ CHECK_TF(sess->Create(graph_def));
+
+ tensorflow::Tensor input(tensorflow::DT_FLOAT, tensorflow::TensorShape({1, 320, 320, 3}));
+ std::vector<tensorflow::Tensor> outputs;
+
+ for (uint32_t n = 0; n < 5; ++n)
+ {
+ std::chrono::milliseconds elapsed(0);
+
+ nnfw::misc::benchmark::measure(elapsed) << [&](void) {
+ CHECK_TF(sess->Run({{"input_node", input}}, output_nodes, {}, &outputs));
+ };
+
+ std::cout << "Takes " << elapsed.count() << "ms" << std::endl;
+ }
+
+ return 0;
+}
diff --git a/contrib/labs/CMakeLists.txt b/runtimes/contrib/labs/CMakeLists.txt
index 57e28c11a..57e28c11a 100644
--- a/contrib/labs/CMakeLists.txt
+++ b/runtimes/contrib/labs/CMakeLists.txt
diff --git a/contrib/labs/jniacl/CMakeLists.txt b/runtimes/contrib/labs/jniacl/CMakeLists.txt
index f66127b84..f66127b84 100644
--- a/contrib/labs/jniacl/CMakeLists.txt
+++ b/runtimes/contrib/labs/jniacl/CMakeLists.txt
diff --git a/runtimes/contrib/labs/jniacl/src/io_accessor.cc b/runtimes/contrib/labs/jniacl/src/io_accessor.cc
new file mode 100644
index 000000000..076c93f3d
--- /dev/null
+++ b/runtimes/contrib/labs/jniacl/src/io_accessor.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Copyright (c) 2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "io_accessor.h"
+#include <ostream>
+#include <android/log.h>
+
+bool InputAccessor::access_tensor(arm_compute::ITensor &tensor)
+{
+ // Subtract the mean value from each channel
+ arm_compute::Window window;
+ window.use_tensor_dimensions(tensor.info()->tensor_shape());
+
+ execute_window_loop(window, [&](const arm_compute::Coordinates &id) {
+ *reinterpret_cast<float *>(tensor.ptr_to_element(id)) = _test_input;
+ _test_input += _inc ? 1.0 : 0.0;
+
+ __android_log_print(ANDROID_LOG_DEBUG, "LOG_TAG", "Input %d, %d = %lf\r\n", id.y(), id.x(),
+ *reinterpret_cast<float *>(tensor.ptr_to_element(id)));
+ });
+ return true;
+}
+
+bool OutputAccessor::access_tensor(arm_compute::ITensor &tensor)
+{
+ // Subtract the mean value from each channel
+ arm_compute::Window window;
+ window.use_tensor_dimensions(tensor.info()->tensor_shape());
+
+ execute_window_loop(window, [&](const arm_compute::Coordinates &id) {
+ __android_log_print(ANDROID_LOG_DEBUG, "Output", "Input %d, %d = %lf\r\n", id.y(), id.x(),
+ *reinterpret_cast<float *>(tensor.ptr_to_element(id)));
+ });
+ return false; // end the network
+}
+
+bool WeightAccessor::access_tensor(arm_compute::ITensor &tensor)
+{
+ // Subtract the mean value from each channel
+ arm_compute::Window window;
+ window.use_tensor_dimensions(tensor.info()->tensor_shape());
+
+ execute_window_loop(window, [&](const arm_compute::Coordinates &id) {
+ *reinterpret_cast<float *>(tensor.ptr_to_element(id)) = _test_weight;
+ _test_weight += _inc ? 1.0 : 0.0;
+ });
+ return true;
+}
+
+bool BiasAccessor::access_tensor(arm_compute::ITensor &tensor)
+{
+ // Subtract the mean value from each channel
+ arm_compute::Window window;
+ window.use_tensor_dimensions(tensor.info()->tensor_shape());
+
+ execute_window_loop(window, [&](const arm_compute::Coordinates &id) {
+ *reinterpret_cast<float *>(tensor.ptr_to_element(id)) = 0.0;
+ });
+ return true;
+}
diff --git a/runtimes/contrib/labs/jniacl/src/io_accessor.h b/runtimes/contrib/labs/jniacl/src/io_accessor.h
new file mode 100644
index 000000000..bc4376644
--- /dev/null
+++ b/runtimes/contrib/labs/jniacl/src/io_accessor.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Copyright (c) 2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __IO_ACCESSOR_H__
+#define __IO_ACCESSOR_H__
+
+#include <arm_compute/graph/ITensorAccessor.h>
+
+class InputAccessor : public arm_compute::graph::ITensorAccessor
+{
+public:
+ InputAccessor(bool inc) : _inc(inc) { _test_input = 1.0; }
+ InputAccessor(InputAccessor &&) = default;
+
+ // Inherited methods overriden:
+ bool access_tensor(arm_compute::ITensor &tensor) override;
+
+private:
+ bool _inc;
+ float _test_input;
+};
+
+class OutputAccessor : public arm_compute::graph::ITensorAccessor
+{
+public:
+ OutputAccessor() = default;
+ OutputAccessor(OutputAccessor &&) = default;
+
+ // Inherited methods overriden:
+ bool access_tensor(arm_compute::ITensor &tensor) override;
+};
+
+class WeightAccessor : public arm_compute::graph::ITensorAccessor
+{
+public:
+ WeightAccessor(bool inc) : _inc(inc) { _test_weight = 1.0; }
+ WeightAccessor(WeightAccessor &&) = default;
+
+ // Inherited methods overriden:
+ bool access_tensor(arm_compute::ITensor &tensor) override;
+
+private:
+ bool _inc;
+ float _test_weight;
+};
+
+class BiasAccessor : public arm_compute::graph::ITensorAccessor
+{
+public:
+ BiasAccessor() = default;
+ BiasAccessor(BiasAccessor &&) = default;
+
+ // Inherited methods overriden:
+ bool access_tensor(arm_compute::ITensor &tensor) override;
+};
+
+#endif // __IO_ACCESSOR_H__
diff --git a/runtimes/contrib/labs/jniacl/src/jniacl_main.cc b/runtimes/contrib/labs/jniacl/src/jniacl_main.cc
new file mode 100644
index 000000000..4e5f10d1f
--- /dev/null
+++ b/runtimes/contrib/labs/jniacl/src/jniacl_main.cc
@@ -0,0 +1,37 @@
+#include <jni.h>
+#include <string>
+
+#include <arm_compute/graph/Graph.h>
+#include <arm_compute/graph/Nodes.h>
+
+#include "io_accessor.h"
+
+extern "C" JNIEXPORT jstring JNICALL
+Java_com_samsung_testaclexec_ActivityMain_RunACLJNI(JNIEnv *env, jobject)
+{
+ using arm_compute::DataType;
+ using arm_compute::graph::Tensor;
+ using arm_compute::graph::TargetHint;
+ using arm_compute::graph::Graph;
+ using arm_compute::TensorInfo;
+ using arm_compute::TensorShape;
+
+ arm_compute::graph::Graph graph;
+ TargetHint target_hint = TargetHint::OPENCL;
+ bool autoinc = true;
+
+ graph << target_hint << Tensor(TensorInfo(TensorShape(3U, 3U, 1U, 1U), 1, DataType::F32),
+ std::unique_ptr<InputAccessor>(new InputAccessor(autoinc)))
+ << arm_compute::graph::ConvolutionLayer(
+ 3U, 3U, 1U, std::unique_ptr<WeightAccessor>(new WeightAccessor(autoinc)),
+ std::unique_ptr<BiasAccessor>(new BiasAccessor()),
+ arm_compute::PadStrideInfo(1, 1, 0, 0))
+ << Tensor(std::unique_ptr<OutputAccessor>(new OutputAccessor()));
+ ;
+
+ graph.run();
+
+ std::string hello = "SoftMax Run OK";
+
+ return env->NewStringUTF(hello.c_str());
+}
diff --git a/runtimes/contrib/labs/opencl_test/CMakeLists.txt b/runtimes/contrib/labs/opencl_test/CMakeLists.txt
new file mode 100644
index 000000000..dc8f5f661
--- /dev/null
+++ b/runtimes/contrib/labs/opencl_test/CMakeLists.txt
@@ -0,0 +1,11 @@
+if(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
+ return()
+endif(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
+
+list(APPEND OPENCL_INFO_SOURCE "src/opencl_test.cc")
+
+nnfw_find_package(ARMCompute REQUIRED)
+
+add_executable(opencl_test ${OPENCL_INFO_SOURCE})
+target_link_libraries(opencl_test arm_compute)
+target_link_libraries(opencl_test arm_compute_ex)
diff --git a/contrib/labs/opencl_test/README.md b/runtimes/contrib/labs/opencl_test/README.md
index 950528f81..950528f81 100644
--- a/contrib/labs/opencl_test/README.md
+++ b/runtimes/contrib/labs/opencl_test/README.md
diff --git a/runtimes/contrib/labs/opencl_test/src/opencl_test.cc b/runtimes/contrib/labs/opencl_test/src/opencl_test.cc
new file mode 100644
index 000000000..1faa91478
--- /dev/null
+++ b/runtimes/contrib/labs/opencl_test/src/opencl_test.cc
@@ -0,0 +1,386 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*******************************************************************************
+ * Copyright (c) 2008-2015 The Khronos Group Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ ******************************************************************************/
+
+#include "arm_compute/core/CL/OpenCL.h"
+
+#include <iostream>
+#include <vector>
+
+void printDeviceInfo(int n, cl::Device &device, cl::Device &default_device)
+{
+ bool is_default = (device() == default_device());
+ std::cout << "\t\t\t#" << n << " Device: (id: " << device() << ") "
+ << (is_default ? " -> default" : "") << "\n";
+
+ const auto name = device.getInfo<CL_DEVICE_NAME>();
+ std::cout << "\t\t\t\tName: " << name << "\n";
+
+ const auto compute_unit = device.getInfo<CL_DEVICE_MAX_COMPUTE_UNITS>();
+ std::cout << "\t\t\t\tMax Compute Unit: " << compute_unit << "\n";
+
+ const auto max_work_item_size = device.getInfo<CL_DEVICE_MAX_WORK_ITEM_SIZES>();
+ std::cout << "\t\t\t\tMax Work Item Size: [";
+ for (auto size : max_work_item_size)
+ std::cout << size << ",";
+ std::cout << "]\n";
+
+ const auto max_work_group_size = device.getInfo<CL_DEVICE_MAX_WORK_GROUP_SIZE>();
+ std::cout << "\t\t\t\tMax Work Grpup Size: " << max_work_group_size << "\n";
+
+ const auto max_clock_frequency = device.getInfo<CL_DEVICE_MAX_CLOCK_FREQUENCY>();
+ std::cout << "\t\t\t\tMax Clock Frequency: " << max_clock_frequency << "\n";
+
+ std::cout << "\n";
+}
+
+class OpenCLGpu
+{
+public:
+ cl::Platform platform_;
+ cl::Context context_;
+ cl::vector<cl::Device> devices_;
+ std::vector<cl::CommandQueue *> q_;
+ cl::Program program_;
+
+ OpenCLGpu()
+ {
+ cl_int cl_error;
+
+ platform_ = cl::Platform::getDefault();
+
+ try
+ {
+ cl_context_properties properties[3] = {CL_CONTEXT_PLATFORM,
+ (cl_context_properties)platform_(), 0};
+
+ context_ = cl::Context(CL_DEVICE_TYPE_GPU, properties, NULL, NULL, &cl_error);
+ }
+ catch (cl::Error &err) // thrown when there is no Context for this platform
+ {
+ std::cout << "\t\t No Context Found\n";
+ return;
+ }
+
+ devices_ = context_.getInfo<CL_CONTEXT_DEVICES>();
+
+ for (int dev_id = 0; dev_id < devices_.size(); dev_id++)
+ {
+ cl::CommandQueue *que = new cl::CommandQueue(context_, devices_[dev_id]);
+ q_.emplace_back(que);
+ }
+ }
+
+ ~OpenCLGpu()
+ {
+ for (auto each_q : q_)
+ delete each_q;
+ }
+
+ void buildProgram(std::string &kernel_source_code)
+ {
+ std::vector<std::string> programStrings{kernel_source_code};
+
+ program_ = cl::Program(context_, programStrings);
+
+ try
+ {
+ program_.build("-cl-std=CL1.2");
+ }
+ catch (cl::Error &err)
+ {
+ cl_int buildErr = CL_SUCCESS;
+ auto buildInfo = program_.getBuildInfo<CL_PROGRAM_BUILD_LOG>(&buildErr);
+ for (auto &pair : buildInfo)
+ {
+ std::cerr << pair.second << std::endl << std::endl;
+ }
+ }
+ }
+};
+
+void checkContextMem()
+{
+ cl_int cl_error;
+
+ // get context, devices
+ //
+ std::cout << "\nChecking if devices in GPU shares the same memory address:\n\n";
+
+ OpenCLGpu gpu;
+
+ std::cout << "\nDevices in GPU:\n\n";
+
+ auto &devices = gpu.devices_;
+ auto default_device = cl::Device::getDefault();
+
+ int d = 0;
+ for (auto device : devices)
+ printDeviceInfo(++d, device, default_device);
+
+ if (d < 2)
+ {
+ std::cout << "\t\t This options works when there are n (>= 2) devices.\n";
+ return;
+ }
+
+ // allocate and map memory
+
+ typedef cl_int T;
+ const int items_per_device = 128;
+ const int length = items_per_device * devices.size();
+
+ std::vector<T> input(length);
+ std::vector<T> output(length, 0);
+
+ for (int i = 0; i < length; i++)
+ input[i] = i;
+
+ cl::Buffer input_buf(gpu.context_, (cl_mem_flags)CL_MEM_USE_HOST_PTR, length * sizeof(T),
+ input.data(), &cl_error);
+ cl::Buffer output_buf(gpu.context_, (cl_mem_flags)CL_MEM_USE_HOST_PTR, length * sizeof(T),
+ output.data(), &cl_error);
+
+ // compile test cl code
+
+ std::string kernel_source{"typedef int T; \n"
+ "kernel void memory_test( \n"
+ " const int dev_id, \n"
+ " global T* input, \n"
+ " global T* output, \n"
+ " const int start_idx, \n"
+ " const int count) \n"
+ "{ \n"
+ " int input_idx = get_global_id(0); \n"
+ " if(input_idx < count) \n"
+ " { \n"
+ " int output_idx = start_idx + input_idx; \n"
+ " output[output_idx] = input[input_idx] + dev_id; \n"
+ " } \n"
+ "} \n"};
+
+ gpu.buildProgram(kernel_source);
+
+ try
+ {
+ auto kernel_functor = cl::KernelFunctor<cl_int, cl::Buffer, cl::Buffer, cl_int, cl_int>(
+ gpu.program_, "memory_test"); // name should be same as cl function name
+
+ // create a queue per device and queue a kernel job
+
+ for (int dev_id = 0; dev_id < devices.size(); dev_id++)
+ {
+ kernel_functor(cl::EnqueueArgs(*(gpu.q_[dev_id]), cl::NDRange(items_per_device)),
+ (cl_int)dev_id, // dev id
+ input_buf, output_buf,
+ (cl_int)(items_per_device * dev_id), // start index
+ (cl_int)(items_per_device), // count
+ cl_error);
+ }
+
+ // sync
+
+ for (d = 0; d < devices.size(); d++)
+ (gpu.q_[d])->finish();
+
+ // check if memory state changed by all devices
+
+ cl::copy(*(gpu.q_[0]), output_buf, begin(output), end(output));
+
+ bool use_same_memory = true;
+
+ for (int dev_id = 0; dev_id < devices.size(); dev_id++)
+ {
+ for (int i = 0; i < items_per_device; ++i)
+ {
+ int output_idx = items_per_device * dev_id + i;
+ if (output[output_idx] != input[i] + dev_id)
+ {
+ std::cout << "Output[" << output_idx << "] : "
+ << "expected = " << input[i] + dev_id << "; actual = " << output[output_idx]
+ << "\n";
+ use_same_memory = false;
+ break;
+ }
+ }
+ }
+
+ if (use_same_memory)
+ std::cout << "\n=> Mapped memory addresses used by devices in GPU are same.\n\n";
+ else
+ std::cout << "\n=> Mapped memory addresses used by devices in GPU are different.\n\n";
+ }
+ catch (cl::Error &err)
+ {
+ std::cerr << "error: code: " << err.err() << ", what: " << err.what() << std::endl;
+ }
+}
+
+void printHelp()
+{
+ std::cout << "opencl information: \n\n";
+ std::cout << "\t -h : help\n";
+ std::cout
+ << "\t -g : print if memory map is shared among devices in GPU (in default platform)\n\n";
+ std::cout << "\t -s : test for synchronized work by two devices in a GPU\n\n";
+}
+
+#include <mutex>
+#include <chrono>
+#include <thread>
+#include <condition_variable>
+
+#define MAX_DEVICE_NUM 8 // just for testing
+
+int kernel_idx[MAX_DEVICE_NUM];
+unsigned char kernel_completed = 0x00; // bit 0 = 1 means kernel by device[0] was completed.
+unsigned char
+ kernel_completed_flag; // if comparing kernel_completed with this var, all kernels are completed
+int device_num;
+std::mutex kernel_complete_handler_mutex;
+
+std::condition_variable wakeup_main;
+std::mutex wakeup_main_mutex;
+
+void notifyKernelFinished(cl_event ev, cl_int ev_info, void *device_idx)
+{
+ std::cout << "callback from device[" << *((int *)device_idx) << "] : ==> completed.\n";
+
+ std::unique_lock<std::mutex> lock(kernel_complete_handler_mutex);
+
+ kernel_completed |= 0x01 << *((int *)device_idx);
+ if (kernel_completed == kernel_completed_flag)
+ wakeup_main.notify_one();
+}
+
+void testSync()
+{
+ OpenCLGpu gpu;
+
+ cl_int cl_error;
+ typedef cl_int T;
+ const int items_per_device = 1024 * 768;
+ const int length = items_per_device * gpu.devices_.size();
+
+ std::vector<T> output(length, 0);
+
+ cl::Buffer output_buf(gpu.context_, (cl_mem_flags)CL_MEM_USE_HOST_PTR, length * sizeof(T),
+ output.data(), &cl_error);
+
+ std::string kernel_source{"kernel void test(global float* output, const int count) \n"
+ "{ \n"
+ " int idx = get_global_id(0); \n"
+ " if(idx < count) \n"
+ " { \n"
+ " float x = hypot(idx/1.111, idx*1.111); \n"
+ " for (int y = 0; y < 200; y++) \n"
+ " x = rootn(log(pown(rootn(log(pown(x, 20)), 5), 20)), 5); \n"
+ " output[idx] = x; \n"
+ " } \n"
+ "} \n"};
+
+ gpu.buildProgram(kernel_source);
+
+ try
+ {
+ auto kernel_functor = cl::KernelFunctor<cl::Buffer, cl_int>(
+ gpu.program_, "test"); // name should be same as cl function name
+
+ // variable init
+ cl::Event ev[MAX_DEVICE_NUM];
+
+ device_num = gpu.devices_.size();
+
+ kernel_completed = 0;
+ kernel_completed_flag = 0;
+ for (int i = 0; i < device_num; i++)
+ {
+ kernel_idx[i] = i;
+ kernel_completed_flag |= 0x01 << i;
+ }
+
+ // create a queue per device and queue a kernel job
+ // queueing with callback function
+ for (int dev_id = 0; dev_id < gpu.devices_.size(); dev_id++)
+ {
+ ev[dev_id] = kernel_functor(cl::EnqueueArgs(*(gpu.q_[dev_id]), cl::NDRange(items_per_device)),
+ output_buf,
+ (cl_int)(items_per_device), // count
+ cl_error);
+ ev[dev_id].setCallback(CL_COMPLETE, notifyKernelFinished, (void *)(kernel_idx + dev_id));
+
+ // how to check kernel execution status
+ //
+ // auto status = ev[dev_id].getInfo<CL_EVENT_COMMAND_EXECUTION_STATUS>();
+ // std::cout << "Event status = " << (status == CL_QUEUED ? "CL_QUEUED" : status ==
+ // CL_SUBMITTED ? "CL_SUBMITTED" : status == CL_COMPLETE ? "CL_COMPLETE" : "unknown")
+ // << std::endl;
+ // std::cout << "Event status code = " << status << std::endl;
+ }
+
+ // long wait until kernels are over
+ {
+ std::unique_lock<std::mutex> lk(wakeup_main_mutex);
+ wakeup_main.wait(lk, [] { return (kernel_completed == kernel_completed_flag); });
+
+ std::cout << "all devices were completed.\n";
+ }
+ }
+ catch (cl::Error &err)
+ {
+ std::cerr << "error: code: " << err.err() << ", what: " << err.what() << std::endl;
+ }
+}
+
+int main(const int argc, char **argv)
+{
+ if (argc < 2)
+ printHelp();
+ else
+ {
+ std::string option = argv[1];
+
+ if (option == "-h") // help
+ printHelp();
+ else if (option == "-g") // check if devices in GPU uses same memory address
+ checkContextMem();
+ else if (option == "-s") // check synchronization between devices in GPU
+ testSync();
+ }
+ return 0;
+}
diff --git a/contrib/labs/tflite_examples/CMakeLists.txt b/runtimes/contrib/labs/tflite_examples/CMakeLists.txt
index 463bc5531..463bc5531 100644
--- a/contrib/labs/tflite_examples/CMakeLists.txt
+++ b/runtimes/contrib/labs/tflite_examples/CMakeLists.txt
diff --git a/runtimes/contrib/labs/tflite_examples/src/conv.cpp b/runtimes/contrib/labs/tflite_examples/src/conv.cpp
new file mode 100644
index 000000000..3117c316c
--- /dev/null
+++ b/runtimes/contrib/labs/tflite_examples/src/conv.cpp
@@ -0,0 +1,330 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
+
+#include <iostream>
+
+using namespace tflite;
+using namespace nnfw::tflite;
+
+namespace vector
+{
+
+template <typename T> struct View
+{
+ virtual ~View() = default;
+
+ virtual int32_t size(void) const = 0;
+ virtual T at(uint32_t off) const = 0;
+};
+}
+
+namespace feature
+{
+
+struct Shape
+{
+ int32_t C;
+ int32_t H;
+ int32_t W;
+};
+
+template <typename T> struct View
+{
+ virtual ~View() = default;
+
+ virtual const Shape &shape(void) const = 0;
+ virtual T at(uint32_t ch, uint32_t row, uint32_t col) const = 0;
+};
+}
+
+namespace kernel
+{
+
+struct Shape
+{
+ int32_t N;
+ int32_t C;
+ int32_t H;
+ int32_t W;
+};
+
+template <typename T> struct View
+{
+ virtual ~View() = default;
+
+ virtual const Shape &shape(void) const = 0;
+ virtual T at(uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) const = 0;
+};
+}
+
+const int32_t N = 1;
+const int32_t C = 2;
+
+class SampleBiasObject final : public vector::View<float>
+{
+public:
+ SampleBiasObject() : _size(N)
+ {
+ // DO NOTHING
+ }
+
+public:
+ int32_t size(void) const override { return _size; }
+
+ float at(uint32_t off) const override { return 0.0f; }
+
+private:
+ int32_t _size;
+};
+
+class SampleFeatureObject final : public feature::View<float>
+{
+public:
+ SampleFeatureObject()
+ {
+ _shape.C = C;
+ _shape.H = 3;
+ _shape.W = 4;
+
+ const uint32_t size = _shape.C * _shape.H * _shape.W;
+
+ for (uint32_t off = 0; off < size; ++off)
+ {
+ _value.emplace_back(off);
+ }
+
+ assert(_value.size() == size);
+ }
+
+public:
+ const feature::Shape &shape(void) const override { return _shape; };
+
+ float at(uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ return _value.at(ch * _shape.H * _shape.W + row * _shape.W + col);
+ }
+
+public:
+ float &at(uint32_t ch, uint32_t row, uint32_t col)
+ {
+ return _value.at(ch * _shape.H * _shape.W + row * _shape.W + col);
+ }
+
+private:
+ feature::Shape _shape;
+ std::vector<float> _value;
+};
+
+class SampleKernelObject final : public kernel::View<float>
+{
+public:
+ SampleKernelObject()
+ {
+ _shape.N = N;
+ _shape.C = C;
+ _shape.H = 3;
+ _shape.W = 4;
+
+ const uint32_t size = _shape.N * _shape.C * _shape.H * _shape.W;
+
+ for (uint32_t off = 0; off < size; ++off)
+ {
+ _value.emplace_back(off);
+ }
+
+ assert(_value.size() == size);
+ }
+
+public:
+ const kernel::Shape &shape(void) const override { return _shape; };
+
+ float at(uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ return _value.at(nth * _shape.C * _shape.H * _shape.W + ch * _shape.H * _shape.W +
+ row * _shape.W + col);
+ }
+
+private:
+ kernel::Shape _shape;
+ std::vector<float> _value;
+};
+
+int main(int argc, char **argv)
+{
+ const SampleFeatureObject ifm;
+ const SampleKernelObject kernel;
+ const SampleBiasObject bias;
+
+ const int32_t IFM_C = ifm.shape().C;
+ const int32_t IFM_H = ifm.shape().H;
+ const int32_t IFM_W = ifm.shape().W;
+
+ const int32_t KER_N = kernel.shape().N;
+ const int32_t KER_C = kernel.shape().C;
+ const int32_t KER_H = kernel.shape().H;
+ const int32_t KER_W = kernel.shape().W;
+
+ const int32_t OFM_C = kernel.shape().N;
+ const int32_t OFM_H = (IFM_H - KER_H) + 1;
+ const int32_t OFM_W = (IFM_W - KER_W) + 1;
+
+ // Assumption on this example
+ assert(IFM_C == KER_C);
+ assert(KER_N == bias.size());
+
+ // Comment from 'context.h'
+ //
+ // Parameters for asymmetric quantization. Quantized values can be converted
+ // back to float using:
+ // real_value = scale * (quantized_value - zero_point);
+ //
+ // Q: Is this necessary?
+ TfLiteQuantizationParams quantization;
+
+ quantization.scale = 1;
+ quantization.zero_point = 0;
+
+ Interpreter interp;
+
+ // On AddTensors(N) call, T/F Lite interpreter creates N tensors whose index is [0 ~ N)
+ interp.AddTensors(5);
+
+ // Configure OFM
+ interp.SetTensorParametersReadWrite(0, kTfLiteFloat32 /* type */, "output" /* name */,
+ {1 /*N*/, OFM_H, OFM_W, OFM_C} /* dims */, quantization);
+
+ // Configure IFM
+ interp.SetTensorParametersReadWrite(1, kTfLiteFloat32 /* type */, "input" /* name */,
+ {1 /*N*/, IFM_H, IFM_W, IFM_C} /* dims */, quantization);
+
+ // Configure Filter
+ const uint32_t kernel_size = KER_N * KER_C * KER_H * KER_W;
+ float kernel_data[kernel_size] = {
+ 0.0f,
+ };
+
+ // Fill kernel data in NHWC order
+ {
+ uint32_t off = 0;
+
+ for (uint32_t nth = 0; nth < KER_N; ++nth)
+ {
+ for (uint32_t row = 0; row < KER_H; ++row)
+ {
+ for (uint32_t col = 0; col < KER_W; ++col)
+ {
+ for (uint32_t ch = 0; ch < KER_C; ++ch)
+ {
+ const auto value = kernel.at(nth, ch, row, col);
+ kernel_data[off++] = value;
+ }
+ }
+ }
+ }
+
+ assert(kernel_size == off);
+ }
+
+ interp.SetTensorParametersReadOnly(
+ 2, kTfLiteFloat32 /* type */, "filter" /* name */, {KER_N, KER_H, KER_W, KER_C} /* dims */,
+ quantization, reinterpret_cast<const char *>(kernel_data), sizeof(kernel_data));
+
+ // Configure Bias
+ const uint32_t bias_size = bias.size();
+ float bias_data[bias_size] = {
+ 0.0f,
+ };
+
+ // Fill bias data
+ for (uint32_t off = 0; off < bias.size(); ++off)
+ {
+ bias_data[off] = bias.at(off);
+ }
+
+ interp.SetTensorParametersReadOnly(3, kTfLiteFloat32 /* type */, "bias" /* name */,
+ {bias.size()} /* dims */, quantization,
+ reinterpret_cast<const char *>(bias_data), sizeof(bias_data));
+
+ // Add Convolution Node
+ //
+ // NOTE AddNodeWithParameters take the ownership of param, and deallocate it with free
+ // So, param should be allocated with malloc
+ TfLiteConvParams *param = reinterpret_cast<TfLiteConvParams *>(malloc(sizeof(TfLiteConvParams)));
+
+ param->padding = kTfLitePaddingValid;
+ param->stride_width = 1;
+ param->stride_height = 1;
+ param->activation = kTfLiteActRelu;
+
+ // Run Convolution and store its result into Tensor #0
+ // - Read IFM from Tensor #1
+ // - Read Filter from Tensor #2,
+ // - Read Bias from Tensor #3
+ interp.AddNodeWithParameters({1, 2, 3}, {0}, nullptr, 0, reinterpret_cast<void *>(param),
+ BuiltinOpResolver().FindOp(BuiltinOperator_CONV_2D, 1));
+
+ // Set Tensor #1 as Input #0, and Tensor #0 as Output #0
+ interp.SetInputs({1});
+ interp.SetOutputs({0});
+
+ // Let's use NNAPI (if possible)
+ interp.UseNNAPI(true);
+
+ // Allocate Tensor
+ interp.AllocateTensors();
+
+ // Fill IFM data in HWC order
+ {
+ uint32_t off = 0;
+
+ for (uint32_t row = 0; row < ifm.shape().H; ++row)
+ {
+ for (uint32_t col = 0; col < ifm.shape().W; ++col)
+ {
+ for (uint32_t ch = 0; ch < ifm.shape().C; ++ch)
+ {
+ const auto value = ifm.at(ch, row, col);
+ interp.typed_input_tensor<float>(0)[off++] = value;
+ }
+ }
+ }
+ }
+
+ // Let's Rock-n-Roll!
+ interp.Invoke();
+
+ // Print OFM
+ {
+ uint32_t off = 0;
+
+ for (uint32_t row = 0; row < OFM_H; ++row)
+ {
+ for (uint32_t col = 0; col < OFM_W; ++col)
+ {
+ for (uint32_t ch = 0; ch < kernel.shape().N; ++ch)
+ {
+ std::cout << interp.typed_output_tensor<float>(0)[off++] << std::endl;
+ }
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/runtimes/contrib/mlapse/CMakeLists.txt b/runtimes/contrib/mlapse/CMakeLists.txt
new file mode 100644
index 000000000..bba79971a
--- /dev/null
+++ b/runtimes/contrib/mlapse/CMakeLists.txt
@@ -0,0 +1,8 @@
+if(NOT BUILD_MLAPSE)
+ return()
+endif(NOT BUILD_MLAPSE)
+
+message(STATUS "Build mlapse: TRUE")
+
+# TODO Add "core"
+add_subdirectory(tfl)
diff --git a/runtimes/contrib/mlapse/README.md b/runtimes/contrib/mlapse/README.md
new file mode 100644
index 000000000..36f14ac39
--- /dev/null
+++ b/runtimes/contrib/mlapse/README.md
@@ -0,0 +1,3 @@
+# mlapse
+
+_mlapse_ is a toolkit for model inference latency benchmark.
diff --git a/runtimes/contrib/mlapse/tfl/CMakeLists.txt b/runtimes/contrib/mlapse/tfl/CMakeLists.txt
new file mode 100644
index 000000000..36f32d7ef
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/CMakeLists.txt
@@ -0,0 +1,12 @@
+message(STATUS "Build mlapse-tfl: TRUE")
+
+file(GLOB_RECURSE SOURCES "*.cc")
+
+add_executable(mlapse-tfl ${SOURCES})
+target_include_directories(mlapse-tfl PRIVATE ${CMAKE_CURRENT_SOURCE_DIR})
+target_link_libraries(mlapse-tfl nnfw_lib_tflite)
+target_link_libraries(mlapse-tfl nnfw_lib_misc)
+target_link_libraries(mlapse-tfl nnfw_lib_cpp14)
+target_link_libraries(mlapse-tfl tensorflow-lite)
+
+install(TARGETS mlapse-tfl DESTINATION bin)
diff --git a/runtimes/contrib/mlapse/tfl/driver.cc b/runtimes/contrib/mlapse/tfl/driver.cc
new file mode 100644
index 000000000..867a6051a
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/driver.cc
@@ -0,0 +1,280 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "mlapse/benchmark_runner.h"
+#include "mlapse/multicast_observer.h"
+#include "mlapse/CSV_report_generator.h"
+
+#include "mlapse/tfl/load.h"
+
+// From 'nnfw_lib_tflite'
+#include <tflite/InterpreterSession.h>
+#include <tflite/NNAPISession.h>
+
+// From 'nnfw_lib_cpp14'
+#include <cpp14/memory.h>
+
+// From C++ Standard Library
+#include <cassert>
+#include <fstream>
+#include <iostream>
+#include <vector>
+
+namespace
+{
+
+using namespace mlapse;
+
+class ConsoleReporter final : public mlapse::BenchmarkObserver
+{
+public:
+ ConsoleReporter() = default;
+
+public:
+ void notify(const NotificationArg<PhaseBegin> &arg) final
+ {
+ _phase = arg.phase;
+ _count = arg.count;
+
+ std::cout << tag() << " BEGIN" << std::endl;
+ }
+
+ void notify(const NotificationArg<PhaseEnd> &arg) final
+ {
+ std::cout << tag() << " END" << std::endl;
+
+ _phase = mlapse::uninitialized_phase();
+ _count = 0;
+ }
+
+ void notify(const NotificationArg<IterationBegin> &arg) final { _index = arg.index; }
+
+ void notify(const NotificationArg<IterationEnd> &arg) final
+ {
+ std::cout << tag() << " " << progress() << " - " << arg.latency.count() << "ms" << std::endl;
+ }
+
+private:
+ std::string progress(void) const
+ {
+ return "[" + std::to_string(_index + 1) + "/" + std::to_string(_count) + "]";
+ }
+
+ std::string tag(void) const
+ {
+ switch (_phase)
+ {
+ case Phase::Warmup:
+ return "WARMUP";
+ case Phase::Record:
+ return "RECORD";
+ default:
+ break;
+ }
+
+ return "unknown";
+ }
+
+ Phase _phase = mlapse::uninitialized_phase();
+ uint32_t _count = 0;
+ uint32_t _index = 0;
+};
+
+} // namespace
+
+// Q. Is is worth to make a library for these routines?
+namespace
+{
+
+enum class SessionType
+{
+ Interp,
+ NNAPI,
+};
+
+class SessionBuilder
+{
+public:
+ SessionBuilder(const SessionType &type) : _type{type}
+ {
+ // DO NOTHING
+ }
+
+public:
+ std::unique_ptr<nnfw::tflite::Session> with(tflite::Interpreter *interp) const
+ {
+ switch (_type)
+ {
+ case SessionType::Interp:
+ return nnfw::cpp14::make_unique<nnfw::tflite::InterpreterSession>(interp);
+ case SessionType::NNAPI:
+ return nnfw::cpp14::make_unique<nnfw::tflite::NNAPISession>(interp);
+ default:
+ break;
+ }
+
+ return nullptr;
+ }
+
+ std::unique_ptr<nnfw::tflite::Session>
+ with(const std::unique_ptr<tflite::Interpreter> &interp) const
+ {
+ return with(interp.get());
+ }
+
+private:
+ SessionType _type;
+};
+
+SessionBuilder make_session(const SessionType &type) { return SessionBuilder{type}; }
+
+} // namespace
+
+namespace
+{
+
+// mlapse-tfl
+// [REQUIRED] --model <path/to/tflite>
+// [OPTIONAL] --warmup-count N (default = 3)
+// [OPTIONAL] --record-count N (default = 10)
+// [OPTIONAL] --thread N or auto (default = auto)
+// [OPTIOANL] --nnapi (default = off)
+// [OPTIONAL] --pause N (default = 0)
+// [OPTIONAL] --csv-report <path/to/csv>
+int entry(const int argc, char **argv)
+{
+ // Create an observer
+ mlapse::MulticastObserver observer;
+
+ observer.append(nnfw::cpp14::make_unique<ConsoleReporter>());
+
+ // Set default parameters
+ std::string model_path;
+ bool model_path_initialized = false;
+
+ SessionType session_type = SessionType::Interp;
+ uint32_t warmup_count = 3;
+ uint32_t record_count = 10;
+ int num_thread = -1; // -1 means "auto"
+
+ // Read command-line arguments
+ std::map<std::string, std::function<uint32_t(const char *const *)>> opts;
+
+ opts["--model"] = [&model_path, &model_path_initialized](const char *const *tok) {
+ model_path = std::string{tok[0]};
+ model_path_initialized = true;
+ return 1; // # of arguments
+ };
+
+ opts["--record-count"] = [&record_count](const char *const *tok) {
+ record_count = std::stoi(tok[0]);
+ return 1; // # of arguments
+ };
+
+ opts["--thread"] = [](const char *const *tok) {
+ assert(std::string{tok[0]} == "auto");
+ return 1;
+ };
+
+ opts["--nnapi"] = [&session_type](const char *const *) {
+ session_type = SessionType::NNAPI;
+ return 0;
+ };
+
+ opts["--csv-report"] = [&observer](const char *const *tok) {
+ observer.append(nnfw::cpp14::make_unique<mlapse::CSVReportGenerator>(tok[0]));
+ return 1;
+ };
+
+ {
+ uint32_t offset = 1;
+
+ while (offset < argc)
+ {
+ auto opt = argv[offset];
+
+ auto it = opts.find(opt);
+
+ if (it == opts.end())
+ {
+ std::cout << "INVALID OPTION: " << opt << std::endl;
+ return 255;
+ }
+
+ auto func = it->second;
+
+ auto num_skip = func(argv + offset + 1);
+
+ offset += 1;
+ offset += num_skip;
+ }
+ }
+
+ // Check arguments
+ if (!model_path_initialized)
+ {
+ std::cerr << "ERROR: --model is missing" << std::endl;
+ return 255;
+ }
+
+ // Load T/F Lite model
+ auto model = mlapse::tfl::load_model(model_path);
+
+ if (model == nullptr)
+ {
+ std::cerr << "ERROR: Failed to load '" << model_path << "'" << std::endl;
+ return 255;
+ }
+
+ auto interp = mlapse::tfl::make_interpreter(model.get());
+
+ if (interp == nullptr)
+ {
+ std::cerr << "ERROR: Failed to create a T/F Lite interpreter" << std::endl;
+ return 255;
+ }
+
+ auto sess = make_session(session_type).with(interp);
+
+ if (sess == nullptr)
+ {
+ std::cerr << "ERROR: Failed to create a session" << std::endl;
+ }
+
+ // Run benchmark
+ mlapse::BenchmarkRunner benchmark_runner{warmup_count, record_count};
+
+ benchmark_runner.attach(&observer);
+ benchmark_runner.run(sess);
+
+ return 0;
+}
+
+} // namespace
+
+int main(int argc, char **argv)
+{
+ try
+ {
+ return entry(argc, argv);
+ }
+ catch (const std::exception &e)
+ {
+ std::cerr << e.what() << std::endl;
+ }
+
+ return 255;
+}
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.cc b/runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.cc
new file mode 100644
index 000000000..c6237a04f
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "mlapse/CSV_report_generator.h"
+
+#include <cassert>
+#include <stdexcept>
+
+namespace
+{
+
+std::string tag(const mlapse::Phase &phase)
+{
+ switch (phase)
+ {
+ case mlapse::Phase::Warmup:
+ return "WARMUP";
+ case mlapse::Phase::Record:
+ return "STEADY";
+ default:
+ break;
+ }
+
+ throw std::invalid_argument{"phase"};
+}
+
+} // namespace
+
+namespace mlapse
+{
+
+void CSVReportGenerator::notify(const NotificationArg<PhaseBegin> &arg)
+{
+ assert(_phase == uninitialized_phase());
+ _phase = arg.phase;
+}
+
+void CSVReportGenerator::notify(const NotificationArg<PhaseEnd> &arg)
+{
+ assert(_phase != uninitialized_phase());
+ _phase = uninitialized_phase();
+}
+
+void CSVReportGenerator::notify(const NotificationArg<IterationBegin> &arg)
+{
+ // DO NOTHING
+}
+
+void CSVReportGenerator::notify(const NotificationArg<IterationEnd> &arg)
+{
+ _ofs << tag(_phase) << "," << arg.latency.count() << std::endl;
+}
+
+} // namespace mlapse
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.h b/runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.h
new file mode 100644
index 000000000..8842baf8e
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/CSV_report_generator.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MLAPSE_CSV_REPORT_GENERATOR_H__
+#define __MLAPSE_CSV_REPORT_GENERATOR_H__
+
+#include "mlapse/benchmark_observer.h"
+
+#include <fstream>
+#include <string>
+
+namespace mlapse
+{
+
+class CSVReportGenerator final : public BenchmarkObserver
+{
+public:
+ CSVReportGenerator(const std::string &path) : _ofs{path, std::ofstream::out}
+ {
+ // DO NOTHING
+ }
+
+public:
+ void notify(const NotificationArg<PhaseBegin> &arg) final;
+ void notify(const NotificationArg<PhaseEnd> &arg) final;
+ void notify(const NotificationArg<IterationBegin> &arg) final;
+ void notify(const NotificationArg<IterationEnd> &arg);
+
+private:
+ std::ofstream _ofs;
+
+ Phase _phase = uninitialized_phase();
+};
+
+} // namespace mlapse
+
+#endif // __MLAPSE_MULTICAST_OBSERER_H__
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.cc b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.cc
new file mode 100644
index 000000000..f6d596a7b
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "mlapse/benchmark_observer.h"
+
+namespace mlapse
+{
+
+Phase uninitialized_phase(void) { return static_cast<Phase>(0); }
+
+} // namespace mlapse
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.h b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.h
new file mode 100644
index 000000000..8fc570d24
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_observer.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MLAPSE_BENCHMARK_OBSERVER_H__
+#define __MLAPSE_BENCHMARK_OBSERVER_H__
+
+#include <cstdint>
+#include <chrono>
+
+namespace mlapse
+{
+
+enum Phase : int32_t
+{
+ // 0 denotes "uninitialized value"
+ Warmup = 1,
+ Record = 2,
+};
+
+Phase uninitialized_phase(void);
+
+enum Notification
+{
+ PhaseBegin,
+ PhaseEnd,
+ IterationBegin,
+ IterationEnd,
+};
+
+template <Notification N> struct NotificationArg;
+
+template <> struct NotificationArg<PhaseBegin>
+{
+ Phase phase;
+ uint32_t count;
+};
+
+template <> struct NotificationArg<PhaseEnd>
+{
+};
+
+template <> struct NotificationArg<IterationBegin>
+{
+ uint32_t index;
+};
+
+template <> struct NotificationArg<IterationEnd>
+{
+ std::chrono::milliseconds latency;
+};
+
+struct BenchmarkObserver
+{
+ virtual ~BenchmarkObserver() = default;
+
+ virtual void notify(const NotificationArg<PhaseBegin> &arg) = 0;
+ virtual void notify(const NotificationArg<PhaseEnd> &arg) = 0;
+ virtual void notify(const NotificationArg<IterationBegin> &arg) = 0;
+ virtual void notify(const NotificationArg<IterationEnd> &arg) = 0;
+};
+
+} // namespace mlpase
+
+#endif // __MLAPSE_BENCHMARK_OBSERVER_H__
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.cc b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.cc
new file mode 100644
index 000000000..f5fc7302d
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "mlapse/benchmark_runner.h"
+
+// From 'nnfw_lib_misc'
+#include <misc/benchmark.h>
+
+// From C++ Standard Library
+#include <cassert>
+
+namespace mlapse
+{
+void BenchmarkRunner::attach(BenchmarkObserver *observer)
+{
+ assert(_observer == nullptr);
+ _observer = observer;
+}
+
+void BenchmarkRunner::run(nnfw::tflite::Session *sess) const
+{
+ for (auto phase : {Warmup, Record})
+ {
+ uint32_t const count = _count.at(phase);
+
+ // Notify when each phase begins
+ {
+ NotificationArg<PhaseBegin> arg;
+
+ arg.phase = phase;
+ arg.count = count;
+
+ notify(arg);
+ }
+
+ for (uint32_t n = 0; n < count; ++n)
+ {
+ std::chrono::milliseconds elapsed(0);
+
+ sess->prepare();
+
+ // Notify when each iteration begins
+ {
+ NotificationArg<IterationBegin> arg;
+
+ arg.index = n;
+
+ notify(arg);
+ };
+
+ nnfw::misc::benchmark::measure(elapsed) << [&](void) {
+ if (!sess->run())
+ {
+ throw std::runtime_error{"run failed"};
+ }
+ };
+
+ // Notify when each iteration ends
+ {
+ NotificationArg<IterationEnd> arg;
+
+ arg.latency = elapsed;
+
+ notify(arg);
+ };
+
+ sess->teardown();
+ }
+
+ // Notify when each phase ends
+ {
+ NotificationArg<PhaseEnd> arg;
+
+ notify(arg);
+ }
+ }
+}
+
+void BenchmarkRunner::notify(const NotificationArg<PhaseBegin> &arg) const
+{
+ if (_observer)
+ {
+ _observer->notify(arg);
+ }
+}
+
+void BenchmarkRunner::notify(const NotificationArg<PhaseEnd> &arg) const
+{
+ if (_observer)
+ {
+ _observer->notify(arg);
+ }
+}
+
+void BenchmarkRunner::notify(const NotificationArg<IterationBegin> &arg) const
+{
+ if (_observer)
+ {
+ _observer->notify(arg);
+ }
+}
+
+void BenchmarkRunner::notify(const NotificationArg<IterationEnd> &arg) const
+{
+ if (_observer)
+ {
+ _observer->notify(arg);
+ }
+}
+
+} // namespace mlapse
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.h b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.h
new file mode 100644
index 000000000..fcbb41d1b
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/benchmark_runner.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MLAPSE_BENCHMARK_RUNNER_H__
+#define __MLAPSE_BENCHMARK_RUNNER_H__
+
+#include "mlapse/benchmark_observer.h"
+
+#include <tflite/Session.h>
+
+#include <chrono>
+#include <map>
+
+namespace mlapse
+{
+
+class BenchmarkRunner final
+{
+public:
+ BenchmarkRunner(uint32_t warmup_count, uint32_t record_count)
+ {
+ _count[Warmup] = warmup_count;
+ _count[Record] = record_count;
+ }
+
+public:
+ void attach(BenchmarkObserver *observer);
+
+public:
+ void run(nnfw::tflite::Session *sess) const;
+
+public:
+ void run(const std::unique_ptr<nnfw::tflite::Session> &sess) const { run(sess.get()); }
+
+private:
+ void notify(const NotificationArg<PhaseBegin> &arg) const;
+ void notify(const NotificationArg<PhaseEnd> &arg) const;
+ void notify(const NotificationArg<IterationBegin> &arg) const;
+ void notify(const NotificationArg<IterationEnd> &arg) const;
+
+private:
+ std::map<Phase, uint32_t> _count;
+
+private:
+ BenchmarkObserver *_observer = nullptr;
+};
+
+} // namespace mlpase
+
+#endif // __MLAPSE_BENCHMARK_RUNNER_H__
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.cc b/runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.cc
new file mode 100644
index 000000000..639acfe45
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "mlapse/multicast_observer.h"
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.h b/runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.h
new file mode 100644
index 000000000..e4aac50a9
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/multicast_observer.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MLAPSE_MULTICAST_OBSERER_H__
+#define __MLAPSE_MULTICAST_OBSERER_H__
+
+#include "mlapse/benchmark_observer.h"
+
+#include <memory>
+#include <vector>
+
+namespace mlapse
+{
+
+class MulticastObserver final : public BenchmarkObserver
+{
+public:
+ MulticastObserver() = default;
+
+public:
+ void append(std::unique_ptr<BenchmarkObserver> &&o) { _observers.emplace_back(std::move(o)); }
+
+public:
+ void notify(const NotificationArg<PhaseBegin> &arg) final
+ {
+ for (const auto &o : _observers)
+ {
+ o->notify(arg);
+ }
+ }
+
+ void notify(const NotificationArg<PhaseEnd> &arg) final
+ {
+ for (const auto &o : _observers)
+ {
+ o->notify(arg);
+ }
+ }
+
+ void notify(const NotificationArg<IterationBegin> &arg) final
+ {
+ for (const auto &o : _observers)
+ {
+ o->notify(arg);
+ }
+ }
+
+ void notify(const NotificationArg<IterationEnd> &arg) final
+ {
+ for (const auto &o : _observers)
+ {
+ o->notify(arg);
+ }
+ }
+
+private:
+ std::vector<std::unique_ptr<BenchmarkObserver>> _observers;
+};
+
+} // namespace mlapse
+
+#endif // __MLAPSE_MULTICAST_OBSERER_H__
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/tfl/load.cc b/runtimes/contrib/mlapse/tfl/mlapse/tfl/load.cc
new file mode 100644
index 000000000..9e770aecf
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/tfl/load.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "mlapse/tfl/load.h"
+
+#include <tflite/ext/kernels/register.h>
+
+namespace
+{
+
+tflite::StderrReporter error_reporter;
+
+} // namespace
+
+namespace mlapse
+{
+namespace tfl
+{
+
+std::unique_ptr<tflite::FlatBufferModel> load_model(const std::string &path)
+{
+ return tflite::FlatBufferModel::BuildFromFile(path.c_str(), &error_reporter);
+}
+
+std::unique_ptr<tflite::Interpreter> make_interpreter(const tflite::FlatBufferModel *model)
+{
+ // Let's use extended resolver!
+ nnfw::tflite::BuiltinOpResolver resolver;
+ tflite::InterpreterBuilder builder(*model, resolver);
+
+ std::unique_ptr<tflite::Interpreter> interpreter;
+
+ if (builder(&interpreter) != kTfLiteOk)
+ {
+ return nullptr;
+ }
+
+ return std::move(interpreter);
+}
+
+} // namespace tfl
+} // namespace mlapse
diff --git a/runtimes/contrib/mlapse/tfl/mlapse/tfl/load.h b/runtimes/contrib/mlapse/tfl/mlapse/tfl/load.h
new file mode 100644
index 000000000..6f5a8f1ea
--- /dev/null
+++ b/runtimes/contrib/mlapse/tfl/mlapse/tfl/load.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MLAPSE_TFL_LOAD_H__
+#define __MLAPSE_TFL_LOAD_H__
+
+#include <tensorflow/lite/interpreter.h>
+#include <tensorflow/lite/model.h>
+
+#include <memory>
+
+namespace mlapse
+{
+namespace tfl
+{
+
+std::unique_ptr<tflite::FlatBufferModel> load_model(const std::string &path);
+
+// WARNING
+//
+// model SHOULD outlive Interpreter
+std::unique_ptr<tflite::Interpreter> make_interpreter(const tflite::FlatBufferModel *model);
+
+} // namespace tfl
+} // namespace mlapse
+
+#endif // __MLAPSE_TFL_LOAD_H__
diff --git a/runtimes/contrib/tflite_classify/CMakeLists.txt b/runtimes/contrib/tflite_classify/CMakeLists.txt
new file mode 100644
index 000000000..c0bf62738
--- /dev/null
+++ b/runtimes/contrib/tflite_classify/CMakeLists.txt
@@ -0,0 +1,22 @@
+if(NOT BUILD_TFLITE_CLASSIFY_APP)
+ return()
+endif(NOT BUILD_TFLITE_CLASSIFY_APP)
+
+list(APPEND SOURCES "src/tflite_classify.cc")
+list(APPEND SOURCES "src/ImageClassifier.cc")
+list(APPEND SOURCES "src/InferenceInterface.cc")
+
+## Required package
+find_package(OpenCV REQUIRED)
+find_package(Boost REQUIRED COMPONENTS system filesystem)
+
+# Without this line, this appliation couldn't search the opencv library that were already installed in ${ROOTFS_DIR}/usr/lib/arm-linux-gnueabihf directory
+set(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${ROOTFS_DIR}/usr/lib/arm-linux-gnueabihf -Wl,--rpath=${ROOTFS_DIR}/lib/arm-linux-gnueabihf")
+
+add_executable(tflite_classify ${SOURCES})
+target_include_directories(tflite_classify PRIVATE src)
+target_link_libraries(tflite_classify tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_tflite)
+target_link_libraries(tflite_classify ${Boost_LIBRARIES})
+target_link_libraries(tflite_classify ${OpenCV_LIBRARIES})
+
+install(TARGETS tflite_classify DESTINATION bin)
diff --git a/contrib/tflite_classify/src/ImageClassifier.cc b/runtimes/contrib/tflite_classify/src/ImageClassifier.cc
index fae4f066c..fae4f066c 100644
--- a/contrib/tflite_classify/src/ImageClassifier.cc
+++ b/runtimes/contrib/tflite_classify/src/ImageClassifier.cc
diff --git a/contrib/tflite_classify/src/ImageClassifier.h b/runtimes/contrib/tflite_classify/src/ImageClassifier.h
index 1ba19afb0..1ba19afb0 100644
--- a/contrib/tflite_classify/src/ImageClassifier.h
+++ b/runtimes/contrib/tflite_classify/src/ImageClassifier.h
diff --git a/contrib/tflite_classify/src/InferenceInterface.cc b/runtimes/contrib/tflite_classify/src/InferenceInterface.cc
index 160943477..160943477 100644
--- a/contrib/tflite_classify/src/InferenceInterface.cc
+++ b/runtimes/contrib/tflite_classify/src/InferenceInterface.cc
diff --git a/runtimes/contrib/tflite_classify/src/InferenceInterface.h b/runtimes/contrib/tflite_classify/src/InferenceInterface.h
new file mode 100644
index 000000000..fe2c1aa6c
--- /dev/null
+++ b/runtimes/contrib/tflite_classify/src/InferenceInterface.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file InferenceInterface.h
+ * @brief This file contains class for running the actual inference model
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __TFLITE_CLASSIFY_INFERENCE_INTERFACE_H__
+#define __TFLITE_CLASSIFY_INFERENCE_INTERFACE_H__
+
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+
+#include "tflite/InterpreterSession.h"
+#include "tflite/NNAPISession.h"
+
+#include <iostream>
+#include <string>
+
+/**
+ * @brief Class to define a inference interface for recognizing data
+ */
+class InferenceInterface
+{
+public:
+ /**
+ * @brief Construct a new InferenceInterface object with parameters
+ * @param[in] model_file The filepath of the model FlatBuffer protocol buffer
+ * @param[in] use_nnapi The flag to distinguish between TfLite interpreter and NNFW runtime
+ */
+ InferenceInterface(const std::string &model_file, const bool use_nnapi);
+
+ /**
+ * @brief Destructor an InferenceInterface object
+ */
+ ~InferenceInterface();
+
+ /**
+ * @brief Copy the input data into model
+ * @param[in] input_name The label of the image input node
+ * @param[in] data The actual data to be copied into input tensor
+ * @param[in] batch The number of batch size
+ * @param[in] height The number of height size
+ * @param[in] width The number of width size
+ * @param[in] channel The number of channel size
+ * @return N/A
+ */
+ void feed(const std::string &input_name, const std::vector<float> &data, const int batch,
+ const int height, const int width, const int channel);
+ /**
+ * @brief Run the inference call
+ * @param[in] output_name The label of the output node
+ * @return N/A
+ */
+ void run(const std::string &output_name);
+
+ /**
+ * @brief Copy the output tensor back into the output array
+ * @param[in] output_node The label of the output node
+ * @param[in] outputs The output data array
+ * @return N/A
+ */
+ void fetch(const std::string &output_name, std::vector<float> &outputs);
+
+ /**
+ * @brief Get tensor size
+ * @param[in] name The label of the node
+ * @result The size of tensor
+ */
+ int getTensorSize(const std::string &name);
+
+private:
+ std::unique_ptr<tflite::Interpreter> _interpreter;
+ std::unique_ptr<tflite::FlatBufferModel> _model;
+ std::shared_ptr<nnfw::tflite::Session> _sess;
+};
+
+#endif // __TFLITE_CLASSIFY_INFERENCE_INTERFACE_H__
diff --git a/contrib/tflite_classify/src/tflite_classify.cc b/runtimes/contrib/tflite_classify/src/tflite_classify.cc
index 40c15f331..40c15f331 100644
--- a/contrib/tflite_classify/src/tflite_classify.cc
+++ b/runtimes/contrib/tflite_classify/src/tflite_classify.cc
diff --git a/contrib/tflite_test/CMakeLists.txt b/runtimes/contrib/tflite_test/CMakeLists.txt
index 760952a84..760952a84 100644
--- a/contrib/tflite_test/CMakeLists.txt
+++ b/runtimes/contrib/tflite_test/CMakeLists.txt
diff --git a/runtimes/contrib/tflite_test/tflite_test.cpp b/runtimes/contrib/tflite_test/tflite_test.cpp
new file mode 100644
index 000000000..80ba448c6
--- /dev/null
+++ b/runtimes/contrib/tflite_test/tflite_test.cpp
@@ -0,0 +1,239 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <tensorflow/lite/model.h>
+#include <tensorflow/core/public/session.h>
+
+#include "tflite/Assert.h"
+#include "tflite/Session.h"
+#include "tflite/InterpreterSession.h"
+#include "tflite/NNAPISession.h"
+#include "tflite/ext/kernels/register.h"
+
+#include "misc/fp32.h"
+
+#include <iostream>
+
+#include <string>
+#include <vector>
+
+#define TF_ENSURE(e) \
+ { \
+ if (!(e).ok()) \
+ { \
+ throw std::runtime_error{"'" #e "' FAILED"}; \
+ } \
+ }
+
+using namespace tflite;
+using namespace tflite::ops::builtin;
+
+std::unique_ptr<FlatBufferModel> BuildModelFromFile(const std::string &path)
+{
+ static StderrReporter reporter;
+ return FlatBufferModel::BuildFromFile(path.c_str(), &reporter);
+}
+
+std::unique_ptr<Interpreter> BuildInterpFromModel(const std::unique_ptr<FlatBufferModel> &model)
+{
+ std::unique_ptr<Interpreter> interp;
+
+ BuiltinOpResolver resolver;
+ InterpreterBuilder builder(*model, resolver);
+
+ TFLITE_ENSURE(builder(&interp));
+
+ return std::move(interp);
+}
+
+tensorflow::TensorShape asTensorflowShape(const TfLiteTensor *tensor)
+{
+ tensorflow::TensorShape shape;
+
+ const int rank = tensor->dims->size;
+
+ for (int axis = 0; axis < rank; ++axis)
+ {
+ shape.AddDim(tensor->dims->data[axis]);
+ }
+
+ return shape;
+}
+
+uint32_t count_elements(const TfLiteTensor *tensor)
+{
+ const int rank = tensor->dims->size;
+
+ if (rank == 0)
+ {
+ return 0;
+ }
+
+ uint32_t res = 1;
+
+ for (int axis = 0; axis < rank; ++axis)
+ {
+ res *= tensor->dims->data[axis];
+ }
+
+ return res;
+}
+
+int main(int argc, char **argv)
+{
+ bool use_nnapi = false;
+
+ if (std::getenv("USE_NNAPI") != nullptr)
+ {
+ use_nnapi = true;
+ }
+
+ if (argc < 3)
+ {
+ std::cerr << "USAGE: " << argv[0] << " [T/F lite model] [T/F model]" << std::endl;
+ return 255;
+ }
+
+ //
+ // Prepare Tensorflow Lite session
+ //
+ const std::string lite_model_path{argv[1]};
+
+ auto lite_model = BuildModelFromFile(lite_model_path);
+ auto lite_interp = BuildInterpFromModel(lite_model);
+
+ std::shared_ptr<nnfw::tflite::Session> lite_sess;
+
+ if (use_nnapi)
+ {
+ lite_sess = std::make_shared<nnfw::tflite::NNAPISession>(lite_interp.get());
+ }
+ else
+ {
+ lite_sess = std::make_shared<nnfw::tflite::InterpreterSession>(lite_interp.get());
+ }
+
+ //
+ // Prepare Tensorflow session
+ //
+ const std::string full_model_path{argv[2]};
+
+ tensorflow::Session *full_sess;
+ tensorflow::GraphDef full_model;
+
+ TF_ENSURE(tensorflow::NewSession(tensorflow::SessionOptions(), &full_sess));
+ TF_ENSURE(ReadBinaryProto(tensorflow::Env::Default(), full_model_path, &full_model));
+ TF_ENSURE(full_sess->Create(full_model));
+
+ //
+ //
+ //
+ std::vector<tensorflow::Tensor> input_nodes;
+ std::vector<std::string> input_names;
+
+ for (uint32_t n = 0; n < lite_interp->inputs().size(); ++n)
+ {
+ const TfLiteTensor *tensor = lite_interp->tensor(lite_interp->inputs().at(n));
+
+ input_nodes.emplace_back(tensorflow::DT_FLOAT, asTensorflowShape(tensor));
+ input_names.emplace_back(tensor->name);
+ }
+
+ assert(input_nodes.size() == input_names.size());
+ assert(input_nodes.size() == lite_interp->inputs().size());
+
+ std::vector<std::string> output_names;
+ std::vector<tensorflow::Tensor> output_nodes;
+
+ for (uint32_t n = 0; n < lite_interp->outputs().size(); ++n)
+ {
+ const TfLiteTensor *tensor = lite_interp->tensor(lite_interp->outputs().at(n));
+
+ output_names.emplace_back(tensor->name);
+ }
+
+ assert(output_names.size() == lite_interp->outputs().size());
+ // output_nodes will be initialized after Tensorflow Session run
+ assert(output_nodes.size() == 0);
+
+ //
+ // Prepare inference
+ //
+ lite_sess->prepare();
+
+ // TODO Feed Inputs (for both Tensorflow and Tensorflow Lite)
+ std::vector<std::pair<std::string, tensorflow::Tensor>> inputs;
+
+ for (uint32_t n = 0; n < input_nodes.size(); ++n)
+ {
+ inputs.emplace_back(input_names.at(0), input_nodes.at(0));
+ }
+
+ //
+ // Run inference
+ //
+ TF_ENSURE(full_sess->Run(inputs, output_names, {}, &output_nodes));
+
+ lite_sess->run();
+
+ //
+ // Compare Output
+ //
+ auto equals = [](float lhs, float rhs) {
+ // TODO Allow users to set tolerance
+ if (nnfw::misc::fp32::absolute_epsilon_equal(lhs, rhs))
+ {
+ return true;
+ }
+
+ return nnfw::misc::fp32::epsilon_equal(lhs, rhs);
+ };
+
+ const uint32_t output_count = output_names.size();
+
+ bool matched = true;
+
+ for (uint32_t n = 0; n < output_count; ++n)
+ {
+ const TfLiteTensor *tensor = lite_interp->tensor(lite_interp->outputs().at(n));
+
+ // TODO Compare shape
+
+ const auto element_count = count_elements(tensor);
+
+ std::cout << "Compare output #" << n << "(" << tensor->name << ", " << element_count
+ << " elements)" << std::endl;
+ for (uint32_t index = 0; index < element_count; ++index)
+ {
+ const auto full_value = output_nodes.at(n).flat<float>().data()[index];
+ const auto lite_value = lite_sess->interp()->typed_output_tensor<float>(n)[index];
+
+ if (!equals(full_value, lite_value))
+ {
+ std::cerr << full_value << " is expected, but " << lite_value << " is obtaeind (at " << n
+ << ":" << index << ")" << std::endl;
+ matched = false;
+ }
+ }
+ }
+
+ //
+ // Cleanup
+ //
+ lite_sess->teardown();
+
+ return matched ? 0 : 255;
+}
diff --git a/runtimes/contrib/uben/CMakeLists.txt b/runtimes/contrib/uben/CMakeLists.txt
new file mode 100644
index 000000000..747398aaf
--- /dev/null
+++ b/runtimes/contrib/uben/CMakeLists.txt
@@ -0,0 +1,29 @@
+if(NOT BUILD_UBEN)
+ return()
+endif(NOT BUILD_UBEN)
+
+nnfw_find_package(ARMCompute QUIET)
+nnfw_find_package(Nonius QUIET)
+
+if(NOT ARMCompute_FOUND)
+ return()
+endif(NOT ARMCompute_FOUND)
+
+if(NOT Nonius_FOUND)
+ return()
+endif(NOT Nonius_FOUND)
+
+# 3x3 Convolution with unit stride
+add_executable(uben_conv_3x3 Convolution.cpp)
+target_compile_definitions(uben_conv_3x3 PRIVATE KER_H=3 KER_W=3 STRIDE_H=1 STRIDE_W=1)
+target_compile_definitions(uben_conv_3x3 PRIVATE CL_DIRECT_CONVOLUTION=1)
+target_compile_definitions(uben_conv_3x3 PRIVATE CL_GEMM_CONVOLUTION=1)
+target_compile_definitions(uben_conv_3x3 PRIVATE CL_WINOGRAD_CONVOLUTION=1)
+target_link_libraries(uben_conv_3x3 PRIVATE nonius)
+target_link_libraries(uben_conv_3x3 PRIVATE arm_compute)
+target_link_libraries(uben_conv_3x3 PRIVATE pthread)
+
+add_executable(uben_softmax Softmax.cpp)
+target_link_libraries(uben_softmax PRIVATE nonius)
+target_link_libraries(uben_softmax PRIVATE nnfw_lib_cker)
+target_link_libraries(uben_softmax PRIVATE pthread)
diff --git a/runtimes/contrib/uben/Convolution.cpp b/runtimes/contrib/uben/Convolution.cpp
new file mode 100644
index 000000000..ad69f1cec
--- /dev/null
+++ b/runtimes/contrib/uben/Convolution.cpp
@@ -0,0 +1,429 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Conv2D (with SAME padding) benchmark with various algorithms
+ */
+
+#ifndef KER_H
+#error "KER_H is undefined"
+#endif // KER_H
+#ifndef KER_W
+#error "KER_W is undefined"
+#endif // KER_W
+#ifndef STRIDE_H
+#error "STRIDE_H is undefined"
+#endif // STRIDE_H
+#ifndef STRIDE_W
+#error "STRIDE_W is undefined"
+#endif // STRIDE_W
+
+#define NONIUS_RUNNER
+#include <nonius/nonius_single.h++>
+
+#include <arm_compute/core/Types.h>
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include <arm_compute/runtime/CL/CLFunctions.h>
+
+#include <cstdint>
+#include <cassert>
+#include <stdexcept>
+
+using namespace arm_compute;
+
+//
+// Helpers
+//
+namespace
+{
+
+enum Layout
+{
+ NCHW,
+ NHWC
+};
+
+struct Initializer
+{
+ Initializer() { CLScheduler::get().default_init(); }
+};
+
+Initializer initializer;
+
+TensorInfo make_info(uint32_t N)
+{
+ TensorShape shape{N};
+ return TensorInfo{shape, 1, DataType::F32};
+}
+
+template <enum Layout> TensorInfo make_info(uint32_t N, uint32_t C, uint32_t H, uint32_t W);
+
+template <> TensorInfo make_info<NCHW>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{W, H, C, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NCHW);
+ return info;
+}
+
+template <> TensorInfo make_info<NHWC>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{C, W, H, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NHWC);
+ return info;
+}
+
+inline void check(const Status &status)
+{
+ if (!status)
+ {
+ std::cerr << status.error_description() << std::endl;
+ throw std::runtime_error{"ERROR"};
+ }
+}
+
+inline bool is_odd(uint32_t n) { return (n % 2 != 0) ? true : false; }
+
+} // namespace
+
+//
+// Benchmark Parameters
+//
+NONIUS_PARAM(BATCH, 1);
+
+NONIUS_PARAM(IFM_C, 3);
+NONIUS_PARAM(IFM_H, 244);
+NONIUS_PARAM(IFM_W, 244);
+
+NONIUS_PARAM(OFM_C, 3);
+NONIUS_PARAM(OFM_H, 244);
+NONIUS_PARAM(OFM_W, 244);
+
+//
+// Configuration Helpers
+//
+namespace
+{
+
+struct Configuration
+{
+ uint32_t ifm_N;
+ uint32_t ifm_C;
+ uint32_t ifm_H;
+ uint32_t ifm_W;
+
+ uint32_t ofm_N;
+ uint32_t ofm_C;
+ uint32_t ofm_H;
+ uint32_t ofm_W;
+
+ uint32_t ker_N;
+ uint32_t ker_C;
+ uint32_t ker_H;
+ uint32_t ker_W;
+
+ uint32_t vertical_stride;
+ uint32_t horizontal_stride;
+
+ uint32_t top_padding;
+ uint32_t bottom_padding;
+ uint32_t left_padding;
+ uint32_t right_padding;
+
+ Configuration(nonius::chronometer meter)
+ {
+ ifm_N = meter.param<BATCH>();
+ ifm_C = meter.param<IFM_C>();
+ ifm_H = meter.param<IFM_H>();
+ ifm_W = meter.param<IFM_W>();
+
+ ofm_N = meter.param<BATCH>();
+ ofm_C = meter.param<OFM_C>();
+ ofm_H = meter.param<OFM_H>();
+ ofm_W = meter.param<OFM_W>();
+
+ ker_N = meter.param<OFM_C>();
+ ker_C = meter.param<IFM_C>();
+ ker_H = KER_H;
+ ker_W = KER_W;
+
+ vertical_stride = STRIDE_H;
+ horizontal_stride = STRIDE_W;
+
+ assert((ifm_H - ker_H) % vertical_stride == 0);
+ assert((ifm_W - ker_H) % horizontal_stride == 0);
+
+ uint32_t const effective_ofm_H = (ifm_H - ker_H) / vertical_stride + 1;
+ uint32_t const effective_ofm_W = (ifm_W - ker_H) / horizontal_stride + 1;
+
+ assert(ofm_H >= effective_ofm_H);
+ assert(ofm_W >= effective_ofm_W);
+
+ uint32_t const pad_H = ofm_H - effective_ofm_H;
+ uint32_t const pad_W = ofm_W - effective_ofm_W;
+
+ top_padding = pad_H / 2;
+ bottom_padding = pad_H / 2;
+ left_padding = pad_W / 2;
+ right_padding = pad_W / 2;
+
+ if (is_odd(pad_H))
+ top_padding += 1;
+ if (is_odd(pad_W))
+ left_padding += 1;
+ }
+
+ template <Layout L> TensorInfo src_info() const
+ {
+ return make_info<L>(ifm_N, ifm_C, ifm_H, ifm_W);
+ }
+ template <Layout L> TensorInfo dst_info() const
+ {
+ return make_info<L>(ofm_N, ofm_C, ofm_H, ofm_W);
+ }
+ template <Layout L> TensorInfo ker_info() const
+ {
+ return make_info<L>(ker_N, ker_C, ker_H, ker_W);
+ }
+ TensorInfo bias_info(void) const { return make_info(ker_N); }
+
+ PadStrideInfo pad_stride_info(void) const
+ {
+ return PadStrideInfo{horizontal_stride,
+ vertical_stride,
+ left_padding,
+ right_padding,
+ top_padding,
+ bottom_padding,
+ DimensionRoundingType::FLOOR};
+ }
+};
+
+} // namespace
+
+//
+// Benchmakr Implementations
+//
+#ifndef CL_DIRECT_CONVOLUTION
+#error "CL_DIRECT_CONVOLUTION is undefined"
+#endif // CL_DIRECT_CONVOLUTION
+
+#if CL_DIRECT_CONVOLUTION
+NONIUS_BENCHMARK("CLDirectConvolutionLayer(NCHW)", [](nonius::chronometer meter) {
+ CLDirectConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_BENCHMARK("CLDirectConvolutionLayer(NHWC)", [](nonius::chronometer meter) {
+ CLDirectConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+#endif // CL_DIRECT_CONVOLUTION
+
+#ifndef CL_GEMM_CONVOLUTION
+#error "CL_GEMM_CONVOLUTION is undefined"
+#endif // CL_GEMM_CONVOLUTION
+
+#if CL_GEMM_CONVOLUTION
+NONIUS_BENCHMARK("CLGEMMConvolutionLayer(NCHW)", [](nonius::chronometer meter) {
+ CLGEMMConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_BENCHMARK("CLGEMMConvolutionLayer(NHWC)", [](nonius::chronometer meter) {
+ CLGEMMConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+#endif // CL_GEMM_CONVOLUTION
+
+#ifndef CL_WINOGRAD_CONVOLUTION
+#error "CL_WINOGRAD_CONVOLUTION is undefined"
+#endif // CL_WINOGRAD_CONVOLUTION
+
+#if CL_WINOGRAD_CONVOLUTION
+NONIUS_BENCHMARK("CLWinogradConvolutionLayer(NCHW)", [](nonius::chronometer meter) {
+ CLWinogradConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_BENCHMARK("CLWinogradConvolutionLayer(NHWC)", [](nonius::chronometer meter) {
+ CLWinogradConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+#endif // CL_WINOGRAD_CONVOLUTION
diff --git a/runtimes/contrib/uben/Softmax.cpp b/runtimes/contrib/uben/Softmax.cpp
new file mode 100644
index 000000000..1c4a6b197
--- /dev/null
+++ b/runtimes/contrib/uben/Softmax.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Softmax benchmark
+ */
+
+#define NONIUS_RUNNER
+#include <nonius/nonius_single.h++>
+
+#include <cker/operation/SoftMax.h>
+
+#include <vector>
+
+//
+// Parameters
+//
+NONIUS_PARAM(LEN, 1000);
+
+//
+// Implementations
+//
+NONIUS_BENCHMARK("cker::Softmax(float)", [](nonius::chronometer meter) {
+ auto len = meter.param<LEN>();
+
+ nnfw::cker::SoftmaxParams params;
+ nnfw::cker::Shape shape{1, len};
+
+ params.beta = 1.0;
+
+ std::vector<float> input;
+ std::vector<float> output;
+
+ input.resize(len);
+ output.resize(len);
+
+ meter.measure([&](int) {
+ // Run!
+ nnfw::cker::Softmax(params, shape, input.data(), shape, output.data());
+ });
+})
diff --git a/runtimes/contrib/xtrace/CMakeLists.txt b/runtimes/contrib/xtrace/CMakeLists.txt
new file mode 100644
index 000000000..0c2748353
--- /dev/null
+++ b/runtimes/contrib/xtrace/CMakeLists.txt
@@ -0,0 +1,16 @@
+# Enable xtrace build only when there is an explicit user request
+option(BUILD_CONTRIB_XTRACE "Build xtrace" OFF)
+
+file(GLOB_RECURSE SOURCES "src/*.cc")
+
+add_executable(xtrace ${SOURCES})
+target_link_libraries(xtrace nnfw_lib_tflite)
+target_link_libraries(xtrace nnfw_lib_misc)
+target_link_libraries(xtrace nnfw_lib_cpp14)
+target_link_libraries(xtrace nnfw_lib_xray_pipe)
+target_link_libraries(xtrace nnfw_lib_xray_mux)
+target_link_libraries(xtrace nnfw_lib_xdata)
+target_link_libraries(xtrace ${LIB_PTHREAD})
+target_link_libraries(xtrace dl)
+
+install(TARGETS xtrace DESTINATION bin)
diff --git a/runtimes/contrib/xtrace/src/benchmark_event.cc b/runtimes/contrib/xtrace/src/benchmark_event.cc
new file mode 100644
index 000000000..54727d630
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/benchmark_event.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "benchmark_event.h"
+
+#include <xray/pipe.h>
+
+xray::event_code BMCategory::set(std::unique_ptr<BMEvent> &&event)
+{
+ _event = std::move(event);
+ return xray::event_code{0};
+}
+
+void BMCategory::reset(void) { _event.reset(); }
+
+void BMCategory::post(std::unique_ptr<BMEvent> &&evt_info)
+{
+ auto evt_cat = this;
+ auto evt_idx = set(std::move(evt_info));
+ const xray::event evt{evt_cat, evt_idx};
+ xray::pipe::post(&evt);
+ reset();
+}
diff --git a/runtimes/contrib/xtrace/src/benchmark_event.h b/runtimes/contrib/xtrace/src/benchmark_event.h
new file mode 100644
index 000000000..d544cb0c7
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/benchmark_event.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __BENCHMARK_EVENT_H__
+#define __BENCHMARK_EVENT_H__
+
+#include <xray/event.h>
+#include <xray/event_code.h>
+#include <xray/event_category.h>
+
+#include <chrono>
+#include <memory>
+
+enum BMPhase
+{
+ Warmup,
+ Stable
+};
+
+struct BMEvent
+{
+ virtual ~BMEvent() = default;
+};
+
+struct BMBegin : public BMEvent
+{
+ BMPhase phase;
+ uint32_t cur_iter;
+};
+
+struct BMEnd : public BMEvent
+{
+ BMPhase phase;
+ uint32_t cur_iter;
+ std::chrono::milliseconds elapsed;
+};
+
+class BMCategory final : public xray::event_category
+{
+private:
+ BMCategory() = default;
+
+public:
+ xray::event_code set(std::unique_ptr<BMEvent> &&event);
+ void reset(void);
+
+public:
+ const BMEvent *event(void) const { return _event.get(); }
+
+private:
+ std::unique_ptr<BMEvent> _event;
+
+public:
+ static BMCategory *get(void)
+ {
+ static BMCategory cat;
+ return &cat;
+ }
+
+public:
+ void post(std::unique_ptr<BMEvent> &&event);
+};
+
+#endif // __BENCHMARK_EVENT_H__
diff --git a/runtimes/contrib/xtrace/src/benchmark_runner.cc b/runtimes/contrib/xtrace/src/benchmark_runner.cc
new file mode 100644
index 000000000..87ef1564f
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/benchmark_runner.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "benchmark_runner.h"
+#include "benchmark_event.h"
+
+#include <tensorflow/lite/model.h>
+
+#include <tflite/ext/kernels/register.h>
+#include <tflite/Assert.h>
+#include <tflite/NNAPISession.h>
+
+#include <misc/benchmark.h>
+
+#include <cpp14/memory.h>
+
+#include <iostream>
+
+using namespace tflite;
+using namespace nnfw::tflite;
+using namespace std::chrono;
+
+namespace
+{
+
+void notify(const BMBegin &event)
+{
+ BMCategory::get()->post(nnfw::cpp14::make_unique<BMBegin>(event));
+}
+
+void notify(const BMEnd &event) { BMCategory::get()->post(nnfw::cpp14::make_unique<BMEnd>(event)); }
+
+} // namespace
+
+void BMRunner<TFL_NNAPI_DELEGATE>::run(const std::string &filename) const
+{
+ BuiltinOpResolver op_resolver;
+ StderrReporter error_reporter;
+
+ auto model = FlatBufferModel::BuildFromFile(filename.c_str(), &error_reporter);
+
+ if (model == nullptr)
+ {
+ throw std::runtime_error{"Cannot create model"};
+ }
+
+ InterpreterBuilder builder(*model, op_resolver);
+
+ std::unique_ptr<Interpreter> interp;
+ TFLITE_ENSURE(builder(&interp));
+
+ auto sess = std::make_shared<nnfw::tflite::NNAPISession>(interp.release());
+
+ auto get_iteration_count = [](const BMPhase &phase) {
+ switch (phase)
+ {
+ case Warmup:
+ return 1; // Allow configuration
+ case Stable:
+ return 3;
+ default:
+ break;
+ }
+
+ throw std::runtime_error{"Error!"};
+ };
+
+ // Iteration!
+ for (auto phase : {Warmup, Stable})
+ {
+ uint32_t iteration_count = get_iteration_count(phase);
+
+ for (uint32_t n = 0; n < iteration_count; ++n)
+ {
+ // Notify event
+ {
+ BMBegin event;
+
+ event.phase = phase;
+ event.cur_iter = n;
+
+ notify(event);
+ }
+
+ sess->prepare();
+
+ std::chrono::milliseconds elapsed(0);
+ nnfw::misc::benchmark::measure(elapsed) << [&](void) {
+ if (!sess->run())
+ {
+ throw std::runtime_error{"run failed"};
+ }
+ };
+
+ sess->teardown();
+
+ // Notify
+ {
+ BMEnd event;
+
+ event.phase = phase;
+ event.cur_iter = n;
+ event.elapsed = elapsed;
+
+ notify(event);
+ }
+ }
+ }
+}
diff --git a/runtimes/contrib/xtrace/src/benchmark_runner.h b/runtimes/contrib/xtrace/src/benchmark_runner.h
new file mode 100644
index 000000000..40c5b510c
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/benchmark_runner.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __BENCHMARK_RUNNER_H__
+#define __BENCHMARK_RUNNER_H__
+
+#include <string>
+
+using TFLModelPath = std::string;
+
+enum BMRunnerType
+{
+ // Use T/F Lite interpreter with Android NN API Delegate
+ TFL_NNAPI_DELEGATE
+};
+
+template <BMRunnerType E> struct BMRunner;
+
+template <> struct BMRunner<TFL_NNAPI_DELEGATE>
+{
+ void run(const TFLModelPath &filename) const;
+};
+
+#endif // __BENCHMARK_RUNNER_H__
diff --git a/runtimes/contrib/xtrace/src/event_collector.cc b/runtimes/contrib/xtrace/src/event_collector.cc
new file mode 100644
index 000000000..2b37bf460
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/event_collector.cc
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "event_collector.h"
+#include "benchmark_event.h"
+
+// xtrace-internal libraries
+#include "str.h"
+
+// NNFW-internal libraries
+#include <xdata.h>
+#include <cpp14/memory.h>
+
+// C++ standard libraries
+#include <chrono>
+#include <iostream>
+
+// POSIX standard libraries
+#include <sys/time.h>
+#include <sys/resource.h>
+
+using nnfw::cpp14::make_unique;
+
+namespace
+{
+
+std::string timestamp(void)
+{
+ auto now = std::chrono::steady_clock::now();
+ return std::to_string(
+ std::chrono::duration_cast<std::chrono::microseconds>(now.time_since_epoch()).count());
+}
+
+class DurationEventBuilder
+{
+public:
+ DurationEventBuilder(const std::string &ts) : _ts{ts} {}
+
+ DurationEvent build(const std::string &name, const std::string &ph) const
+ {
+ DurationEvent evt;
+
+ evt.name = name;
+ evt.ph = ph;
+ evt.ts = _ts;
+
+ return evt;
+ }
+
+private:
+ std::string _ts;
+};
+
+void emit_rusage(EventRecorder *rec, const std::string &ts)
+{
+ struct rusage ru;
+
+ getrusage(RUSAGE_SELF, &ru);
+ {
+ CounterEvent evt;
+
+ evt.name = "maxrss";
+ evt.ph = "C";
+ evt.ts = ts;
+ evt.values["value"] = std::to_string(ru.ru_maxrss);
+
+ rec->emit(evt);
+ }
+
+ {
+ CounterEvent evt;
+
+ evt.name = "minflt";
+ evt.ph = "C";
+ evt.ts = ts;
+ evt.values["value"] = std::to_string(ru.ru_minflt);
+
+ rec->emit(evt);
+ }
+}
+
+std::ostream &operator<<(std::ostream &os, const BMPhase &phase)
+{
+ os << ((phase == Warmup) ? "Warmup" : "Iteration");
+ return os;
+}
+
+std::ostream &operator<<(std::ostream &os, const std::chrono::milliseconds &dur)
+{
+ os << dur.count();
+ return os;
+}
+
+} // namespace
+
+void EventCollector::notify(const xray::event *e)
+{
+ auto ts = timestamp();
+
+ // Record trace events (region enter/leave)
+ if (e->cat() == xdata::trace::category::get())
+ {
+ auto info = xdata::trace::category::get()->info();
+
+ switch (info->action())
+ {
+ case xdata::trace::enter:
+ _rec->emit(DurationEventBuilder(ts).build(info->region()->name(), "B"));
+ break;
+
+ case xdata::trace::leave:
+ _rec->emit(DurationEventBuilder(ts).build(info->region()->name(), "E"));
+ break;
+ }
+ }
+
+ // Record benchmark events
+ if (e->cat() == BMCategory::get())
+ {
+ auto make_head = [](const BMPhase &phase, uint32_t iter) { return str(phase, " ", iter); };
+
+ if (auto info = dynamic_cast<const BMBegin *>(BMCategory::get()->event()))
+ {
+ auto name = str(info->phase, info->cur_iter);
+ _rec->emit(DurationEventBuilder(ts).build(name, "B"));
+
+ auto head = make_head(info->phase, info->cur_iter);
+ std::cout << head << std::endl;
+ }
+
+ if (auto info = dynamic_cast<const BMEnd *>(BMCategory::get()->event()))
+ {
+ auto name = str(info->phase, info->cur_iter);
+ _rec->emit(DurationEventBuilder(ts).build(name, "E"));
+
+ auto head = make_head(info->phase, info->cur_iter);
+ std::cout << head << " - done " << std::endl;
+ std::cout << head << " takes " << info->elapsed << "ms" << std::endl;
+ }
+ }
+
+ // Trace resource usage per each event notification
+ emit_rusage(_rec, ts);
+}
diff --git a/runtimes/contrib/xtrace/src/event_collector.h b/runtimes/contrib/xtrace/src/event_collector.h
new file mode 100644
index 000000000..f088ecd0b
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/event_collector.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EVENT_COLLECTOR_H__
+#define __EVENT_COLLECTOR_H__
+
+#include "event_recorder.h"
+
+#include <xray/mux.h>
+
+class EventCollector final : public xray::listener
+{
+public:
+ EventCollector(EventRecorder *rec) : _rec{rec}
+ {
+ // DO NOTHING
+ }
+
+public:
+ void notify(const xray::event *e) final;
+
+private:
+ EventRecorder *_rec = nullptr;
+};
+
+#endif // __EVENT_COLLECTOR_H__
diff --git a/runtimes/contrib/xtrace/src/event_recorder.cc b/runtimes/contrib/xtrace/src/event_recorder.cc
new file mode 100644
index 000000000..780eae19b
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/event_recorder.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "event_recorder.h"
+#include "str.h"
+
+#include <vector>
+
+namespace
+{
+
+std::string quote(const std::string &value)
+{
+ std::stringstream ss;
+ ss << '"' << value << '"';
+ return ss.str();
+}
+
+std::string field(const std::string &k, const std::string &v)
+{
+ std::stringstream ss;
+ ss << quote(k) << " : " << quote(v);
+ return ss.str();
+}
+
+struct Content // One Entry in Chrome Event Trace
+{
+ std::vector<std::pair<std::string, std::string>> flds;
+ std::vector<std::pair<std::string, std::string>> args;
+};
+
+std::string object(const Content &content)
+{
+ std::stringstream ss;
+
+ ss << "{ ";
+
+ ss << field(content.flds[0].first, content.flds[0].second);
+
+ for (uint32_t n = 1; n < content.flds.size(); ++n)
+ {
+ ss << ", " << field(content.flds.at(n).first, content.flds.at(n).second);
+ }
+
+ if (content.args.size() > 0)
+ {
+ ss << ", " << quote("args") << " : { ";
+ ss << field(content.args.at(0).first, content.args.at(0).second);
+
+ for (uint32_t n = 1; n < content.args.size(); ++n)
+ {
+ ss << ", " << field(content.args.at(n).first, content.args.at(n).second);
+ }
+
+ ss << "}";
+ }
+
+ ss << " }";
+
+ return ss.str();
+}
+
+void fill(Content &content, const Event &evt)
+{
+ content.flds.emplace_back("name", evt.name);
+ content.flds.emplace_back("pid", "0");
+ content.flds.emplace_back("tid", "0");
+ content.flds.emplace_back("ph", evt.ph);
+ content.flds.emplace_back("ts", evt.ts);
+}
+
+std::string object(const DurationEvent &evt)
+{
+ Content content;
+
+ fill(content, evt);
+
+ return ::object(content);
+}
+
+std::string object(const CounterEvent &evt)
+{
+ Content content;
+
+ fill(content, evt);
+
+ for (auto it = evt.values.begin(); it != evt.values.end(); ++it)
+ {
+ content.args.emplace_back(it->first, it->second);
+ }
+
+ return ::object(content);
+}
+
+} // namespace
+
+void EventRecorder::init()
+{
+ _os << "{" << std::endl;
+ _os << " " << quote("traceEvents") << ": [" << std::endl;
+}
+
+void EventRecorder::emit(const DurationEvent &evt)
+{
+ _os << " " << object(evt) << "," << std::endl;
+}
+void EventRecorder::emit(const CounterEvent &evt)
+{
+ _os << " " << object(evt) << "," << std::endl;
+}
+
+void EventRecorder::fini()
+{
+ _os << " { }" << std::endl;
+ _os << " ]" << std::endl;
+ _os << "}" << std::endl;
+}
diff --git a/runtimes/contrib/xtrace/src/event_recorder.h b/runtimes/contrib/xtrace/src/event_recorder.h
new file mode 100644
index 000000000..9cc992178
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/event_recorder.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EVENT_RECORDER_H__
+#define __EVENT_RECORDER_H__
+
+#include <map>
+#include <memory>
+
+#include <ostream>
+
+struct Event
+{
+ std::string name;
+ std::string ph; /* REQUIRED */
+ std::string ts; /* REQUIRED */
+};
+
+struct DurationEvent : public Event
+{
+ // TO BE FILLED
+};
+
+struct CounterEvent : public Event
+{
+ std::map<std::string, std::string> values;
+};
+
+//
+// Record Event as Chrome Trace Event File Format
+//
+// Refrence: https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/edit
+//
+class EventRecorder
+{
+public:
+ EventRecorder(std::ostream &os) : _os(os)
+ {
+ // DO NOTHING
+ }
+
+public:
+ void init();
+
+public:
+ void emit(const DurationEvent &evt);
+ void emit(const CounterEvent &evt);
+
+public:
+ void fini();
+
+private:
+ std::ostream &_os;
+};
+
+#endif // __EVENT_RECORDER_H__
diff --git a/runtimes/contrib/xtrace/src/str.h b/runtimes/contrib/xtrace/src/str.h
new file mode 100644
index 000000000..a6d53a535
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/str.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __STR_H__
+#define __STR_H__
+
+#include <ostream>
+#include <sstream>
+
+template <typename Arg> void _str(std::ostream &os, Arg &&arg) { os << std::forward<Arg>(arg); }
+
+template <typename Arg, typename... Args> void _str(std::ostream &os, Arg &&arg, Args &&... args)
+{
+ _str(os, std::forward<Arg>(arg));
+ _str(os, std::forward<Args>(args)...);
+}
+
+template <typename... Args> std::string str(Args &&... args)
+{
+ std::stringstream ss;
+ _str(ss, std::forward<Args>(args)...);
+ return ss.str();
+}
+
+#endif // __STR_H__
diff --git a/runtimes/contrib/xtrace/src/xtrace.cc b/runtimes/contrib/xtrace/src/xtrace.cc
new file mode 100644
index 000000000..117a2e663
--- /dev/null
+++ b/runtimes/contrib/xtrace/src/xtrace.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "event_recorder.h"
+#include "event_collector.h"
+#include "benchmark_runner.h"
+
+#include <cassert>
+#include <fstream>
+#include <iostream>
+#include <string>
+
+// xtrace --out <output path> <T/F Lite model path)
+static int entry(int argc, char **argv)
+{
+ assert(argc == 4);
+ assert(std::string(argv[1]) == "--out");
+
+ // Create a file
+ std::ofstream ofs{argv[2], std::ofstream::out};
+
+ // Create an event recorder
+ EventRecorder recorder{ofs};
+
+ recorder.init();
+
+ EventCollector event_collector{&recorder};
+
+ xray::mux::get().attach(&event_collector);
+
+ BMRunner<TFL_NNAPI_DELEGATE>().run(argv[3]);
+
+ xray::mux::get().detach(&event_collector);
+
+ recorder.fini();
+
+ return 0;
+}
+
+int main(int argc, char **argv)
+{
+ try
+ {
+ return entry(argc, argv);
+ }
+ catch (const std::exception &e)
+ {
+ std::cerr << e.what() << std::endl;
+ return 255;
+ }
+}
diff --git a/runtimes/include/NeuralNetworks.h b/runtimes/include/NeuralNetworks.h
new file mode 100644
index 000000000..7400806d8
--- /dev/null
+++ b/runtimes/include/NeuralNetworks.h
@@ -0,0 +1,6444 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup NeuralNetworks
+ * @{
+ */
+
+/**
+ * @file NeuralNetworks.h
+ */
+
+#ifndef ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_H
+#define ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_H
+
+/******************************************************************
+ *
+ * IMPORTANT NOTICE:
+ *
+ * This file is part of Android's set of stable system headers
+ * exposed by the Android NDK (Native Development Kit).
+ *
+ * Third-party source AND binary code relies on the definitions
+ * here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES.
+ *
+ * - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES)
+ * - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS
+ * - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY
+ * - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES
+ */
+
+// For compatibility with android, check __ANDROID_API__ is defined
+// If __ANDROID_API__ is pre-defined, this header may be used for android
+#ifndef __ANDROID_API__
+#define __ANDROID_API__ 29
+#define __ANDROID_API_Q__ 29
+#define __INTRODUCED_IN(api_level)
+typedef struct AHardwareBuffer AHardwareBuffer;
+#else
+#include <android/hardware_buffer.h>
+#endif // __ANDROID_API__
+#include <stddef.h>
+#include <stdint.h>
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+/**
+ * Operand types.
+ *
+ * The type of operands that can be added to a model.
+ *
+ * Although we define many types, most operators accept just a few
+ * types. Most used are {@link ANEURALNETWORKS_TENSOR_FLOAT32},
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * and {@link ANEURALNETWORKS_INT32}.
+ *
+ * Available since API level 27.
+ */
+typedef enum {
+ /** A 32 bit floating point scalar value. */
+ ANEURALNETWORKS_FLOAT32 = 0,
+ /** A signed 32 bit integer scalar value. */
+ ANEURALNETWORKS_INT32 = 1,
+ /** An unsigned 32 bit integer scalar value. */
+ ANEURALNETWORKS_UINT32 = 2,
+ /** A tensor of 32 bit floating point values. */
+ ANEURALNETWORKS_TENSOR_FLOAT32 = 3,
+ /** A tensor of 32 bit integer values. */
+ ANEURALNETWORKS_TENSOR_INT32 = 4,
+ /**
+ * A tensor of 8 bit unsigned integers that represent real numbers.
+ *
+ * Attached to this tensor are two numbers that can be used to convert the
+ * 8 bit integer to the real value and vice versa. These two numbers are:
+ * - scale: a 32 bit floating point value greater than zero.
+ * - zeroPoint: a 32 bit integer, in range [0, 255].
+ *
+ * The formula is:
+ * real_value = (integer_value - zeroPoint) * scale.
+ */
+ ANEURALNETWORKS_TENSOR_QUANT8_ASYMM = 5,
+#if __ANDROID_API__ >= __ANDROID_API_Q__
+ /**
+ * An 8 bit boolean scalar value.
+ *
+ * Values of this operand type are either true or false. A zero value
+ * represents false; any other value represents true.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_BOOL = 6,
+ /**
+ * A tensor of 16 bit signed integers that represent real numbers.
+ *
+ * Attached to this tensor is a number representing real value scale that is
+ * used to convert the 16 bit number to a real value in the following way:
+ * realValue = integerValue * scale.
+ *
+ * scale is a 32 bit floating point with value greater than zero.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TENSOR_QUANT16_SYMM = 7,
+ /**
+ * A tensor of IEEE 754 16 bit floating point values.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TENSOR_FLOAT16 = 8,
+ /**
+ * A tensor of 8 bit boolean values.
+ *
+ * Values of this operand type are either true or false. A zero value
+ * represents false; any other value represents true.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TENSOR_BOOL8 = 9,
+ /**
+ * An IEEE 754 16 bit floating point scalar value.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_FLOAT16 = 10,
+ /**
+ * A tensor of 8 bit signed integers that represent real numbers.
+ *
+ * This tensor is associated with additional fields that can
+ * be used to convert the 8 bit signed integer to the real value and vice versa.
+ * These fields are:
+ * - channelDim: a 32 bit unsigned integer indicating channel dimension.
+ * - scales: an array of positive 32 bit floating point values.
+ * The size of the scales array must be equal to dimensions[channelDim].
+ *
+ * {@link ANeuralNetworksModel_setOperandSymmPerChannelQuantParams} must be used
+ * to set the parameters for an Operand of this type.
+ *
+ * The channel dimension of this tensor must not be unknown (dimensions[channelDim] != 0).
+ *
+ * The formula is:
+ * realValue[..., C, ...] =
+ * integerValue[..., C, ...] * scales[C]
+ * where C is an index in the Channel dimension.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL = 11,
+
+ /**
+ * A tensor of 16 bit unsigned integers that represent real numbers.
+ *
+ * Attached to this tensor are two numbers that can be used to convert the
+ * 16 bit integer to the real value and vice versa. These two numbers are:
+ * - scale: a 32 bit floating point value greater than zero.
+ * - zeroPoint: a 32 bit integer, in range [0, 65535].
+ *
+ * The formula is:
+ * real_value = (integer_value - zeroPoint) * scale.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TENSOR_QUANT16_ASYMM = 12,
+
+ /**
+ * A tensor of 8 bit signed integers that represent real numbers.
+ *
+ * Attached to this tensor is a number representing real value scale that is
+ * used to convert the 8 bit number to a real value in the following way:
+ * realValue = integerValue * scale.
+ *
+ * scale is a 32 bit floating point with value greater than zero.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TENSOR_QUANT8_SYMM = 13,
+#endif // __ANDROID_API__ >= __ANDROID_API_Q__
+
+} OperandCode;
+
+/**
+ * Operation types.
+ *
+ * The type of operations that can be added to a model.
+ *
+ * Available since API level 27.
+ */
+typedef enum {
+ // Operations below are available since API level 27.
+
+ /**
+ * Adds two tensors, element-wise.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible
+ * dimensions. The output is the sum of both input tensors, optionally
+ * modified by an activation function.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its
+ * way forward.
+ *
+ * Example:
+ *
+ * input1.dimension = {4, 1, 2}
+ * input2.dimension = {5, 4, 3, 1}
+ * output.dimension = {5, 4, 3, 2}
+ *
+ * Since API level 29, generic zero-sized input tensor is supported. Zero
+ * dimension is only compatible with 0 or 1. The size of the output
+ * dimension is zero if either of corresponding input dimension is zero.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scales and zeroPoint can be different from input0 scale and zeroPoint.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ *
+ * Outputs:
+ * * 0: The sum, a tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_ADD = 0,
+
+ /**
+ * Performs a 2-D average pooling operation.
+ *
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, i, j, channel] =
+ * sum_{di, dj}(
+ * input[b, strides[1] * i + di, strides[2] * j + dj, channel]
+ * ) / sum(1)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * width.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * height.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 10: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * width.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * height.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 7: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_AVERAGE_POOL_2D = 1,
+
+ /**
+ * Concatenates the input tensors along the given dimension.
+ *
+ * The input tensors must have identical {@link OperandCode} and the same
+ * dimensions except the dimension along the concatenation axis.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (full support since API
+ * level 29, see the input section)
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0 ~ n-1: The list of n input tensors, of shape
+ * [D0, D1, ..., Daxis(i), ..., Dm].
+ * Before API level 29, all input tensors of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * must have the same scale and zeroPoint as the output tensor.
+ * Since API level 29, zero-sized tensors are supported.
+ * * n: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
+ * concatenation axis.
+ *
+ * Outputs:
+ * * 0: The output, a tensor of the same {@link OperandCode} as the input
+ * tensors. The output shape is [D0, D1, ..., sum(Daxis(i)), ..., Dm].
+ * Since API level 29, for a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint values can be different from
+ * input tensors. Before API level 29 they have to be the same as for the input tensors.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_CONCATENATION = 2,
+
+ /**
+ * Performs an 2-D convolution operation.
+ *
+ * The CONV_2D op sweeps a 2-D filter that can mix channels together over a
+ * batch of images, applying the filter to each window of each image of the
+ * appropriate size.
+ *
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, i, j, channel] =
+ * sum_{di, dj, k} (
+ * input[b, strides[1] * i + di, strides[2] * j + dj, k] *
+ * filter[channel, di, dj, k]
+ * ) + bias[channel]
+ *
+ * Supported tensor {@link OperandCode} configurations:
+ * * 32 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT32} for input, filter, output, and bias.
+ *
+ * * Quantized:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, filter, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (with scale set to
+ * * * input.scale * filter.scale).
+ *
+ * Available since API level 29:
+ * * 16 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} for input, filter, output, and bias.
+ *
+ * * Quantized with symmetric per channel quantization for the filter:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} for filter.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (scale set to 0.0,
+ * * * each value scaling is separate and equal to input.scale * filter.scales[channel]).
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input. Since API level 29, zero batches is supported
+ * for this tensor.
+ * * 1: A 4-D tensor, of shape
+ * [depth_out, filter_height, filter_width, depth_in], specifying the
+ * filter. For tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} the channel
+ * dimension (extraParams.channelQuant.channelDim) must be set to 0.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias must be of the same
+ * type. For filter tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the bias should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint
+ * of 0 and bias_scale == input_scale * filter_scale. For filter tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal to
+ * bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 10: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ * * 11: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for width. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on width dimension. If this input is set,
+ * input 12 (dilation factor for height) must be specified as well.
+ * Available since API level 29.
+ * * 12: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for height. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on height dimension. If this input is set,
+ * input 11 (dilation factor for width) must be specified as well.
+ * Available since API level 29.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input. Since API level 29, zero batches is supported
+ * for this tensor.
+ * * 1: A 4-D tensor, of shape
+ * [depth_out, filter_height, filter_width, depth_in], specifying the
+ * filter. For tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} the channel
+ * dimension (extraParams.channelQuant.channelDim) must be set to 0.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias must be of the same
+ * type. For filter tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the bias should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint
+ * of 0 and bias_scale == input_scale * filter_scale. For filter tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal to
+ * bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 7: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ * * 8: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for width. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on width dimension. If this input is set,
+ * input 9 (dilation factor for height) must be specified as well.
+ * Available since API level 29.
+ * * 9: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for height. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on height dimension. If this input is set,
+ * input 8 (dilation factor for width) must be specified as well.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth_out]. Before API level 29,
+ * for output tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the following condition must be satisfied:
+ * output_scale > input_scale * filter_scale
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_CONV_2D = 3,
+
+ /**
+ * Performs a depthwise 2-D convolution operation.
+ *
+ * Given an input tensor of shape [batches, height, width, depth_in] and a
+ * filter tensor of shape [1, filter_height, filter_width, depth_out]
+ * containing depth_out convolutional filters of depth 1, DEPTHWISE_CONV
+ * applies a different filter to each input channel (expanding from 1
+ * channel to channel_multiplier channels for each), then concatenates the
+ * results together.
+ *
+ * The output has depth_out = depth_in * depth_multiplier channels.
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, i, j, k * channel_multiplier + q] =
+ * sum_{di, dj} (
+ * input[b, strides[1] * i + di, strides[2] * j + dj, k] *
+ * filter[1, di, dj, k * channel_multiplier + q]
+ * ) + bias[k * channel_multiplier + q]
+ *
+ * Supported tensor {@link OperandCode} configurations:
+ * * 32 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT32} for input, filter, output, and bias.
+ *
+ * * Quantized:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, filter, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (with scale set to
+ * * * input.scale * filter.scale).
+ *
+ * Available since API level 29:
+ * * 16 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} for input, filter, output, and bias.
+ *
+ * * Quantized with symmetric per channel quantization for the filter:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} for filter.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (scale set to 0.0,
+ * * * each value scaling is separate and equal to input.scale * filter.scales[channel]).
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input.
+ * * 1: A 4-D tensor, of shape [1, filter_height, filter_width, depth_out],
+ * specifying the filter. For tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} the channel
+ * dimension (extraParams.channelQuant.channelDim) must be set to 3.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias must be of the same
+ * type. For filter tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the bias should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint
+ * of 0 and bias_scale == input_scale * filter_scale. For filter tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal to
+ * bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, specifying the depthwise
+ * multiplier.
+ * * 10: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 11: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ * * 12: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for width. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on width dimension. If this input is set,
+ * input 13 (dilation factor for height) must be specified as well.
+ * Available since API level 29.
+ * * 13: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for height. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on height dimension. If this input is set,
+ * input 12 (dilation factor for width) must be specified as well.
+ * Available since API level 29.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input.
+ * * 1: A 4-D tensor, of shape [1, filter_height, filter_width, depth_out],
+ * specifying the filter.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias must be of the same
+ * type. For filter tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the bias should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint
+ * of 0 and bias_scale == input_scale * filter_scale. For filter tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal to
+ * bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the depthwise
+ * multiplier.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 8: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ * * 9: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for width. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on width dimension. If this input is set,
+ * input 10 (dilation factor for height) must be specified as well.
+ * Available since API level 29.
+ * * 10: An optional {@link ANEURALNETWORKS_INT32} scalar, specifying the dilation
+ * factor for height. Defaults to 1. If set to k > 1, there will be k-1 skipped
+ * cells between each filter element on height dimension. If this input is set,
+ * input 9 (dilation factor for width) must be specified as well.
+ * Available since API level 29.
+
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth_out]. Before API level 29,
+ * for output tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the following condition must be satisfied:
+ * output_scale > input_scale * filter_scale
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_DEPTHWISE_CONV_2D = 4,
+
+ /**
+ * Rearranges data from depth into blocks of spatial data.
+ *
+ * More specifically, this op outputs a copy of the input tensor where
+ * values from the depth dimension are moved in spatial blocks to the height
+ * and width dimensions. The value block_size indicates the input block size
+ * and how the data is moved.
+ *
+ * Chunks of data of size block_size * block_size from depth are rearranged
+ * into non-overlapping blocks of size block_size x block_size.
+ *
+ * The width of the output tensor is input_depth * block_size, whereas the
+ * height is input_height * block_size. The depth of the input tensor must
+ * be divisible by block_size * block_size
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the block_size.
+ * block_size must be >=1 and block_size * block_size must be a divisor
+ * of the input depth.
+ * * 2: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape [batch, height*block_size,
+ * width*block_size, depth/(block_size*block_size)].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_DEPTH_TO_SPACE = 5,
+
+ /**
+ * Dequantizes the input tensor.
+ *
+ * The formula is:
+ *
+ * output = (input - zeroPoint) * scale.
+ *
+ * Supported input tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} (since API level 29)
+ *
+ * Supported output tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}.
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor. Since API level 29, this tensor may be zero-sized.
+ *
+ * Outputs:
+ * * 0: A tensor with the same shape as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_DEQUANTIZE = 6,
+
+ /**
+ * Looks up sub-tensors in the input tensor.
+ *
+ * This operator takes for input a tensor of values (Values) and
+ * a one-dimensional tensor of selection indices (Lookups).
+ * The output tensor is the concatenation of sub-tensors of Values as
+ * selected by Lookups.
+ *
+ * Think of Values as being sliced along its first dimension:
+ * The entries in Lookups select which slices are concatenated together
+ * to create the output tensor.
+ *
+ * For example, if Values has shape of [40, 200, 300] and
+ * Lookups has shape of [3], all three values found in Lookups are
+ * expected to be between 0 and 39. The resulting tensor must
+ * have shape of [3, 200, 300].
+ *
+ * If a value in Lookups is out of bounds, the operation must fail
+ * and an error must be reported.
+ *
+ * Supported value tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported value tensor rank: from 2
+ *
+ * Inputs:
+ * * 0: Lookups. A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}.
+ * The values are indices into the first dimension of Values.
+ * * 1: Values. An n-D tensor, where n >= 2, from which sub-tensors are
+ * extracted.
+ *
+ * Output:
+ * * 0: A n-D tensor with the same rank and shape as the Values
+ * tensor, except for the first dimension which has the same size
+ * as Lookups' only dimension.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input1.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_EMBEDDING_LOOKUP = 7,
+
+ /**
+ * Computes element-wise floor() on the input tensor.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and dimensions as
+ * the input tensor.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_FLOOR = 8,
+
+ /**
+ * Denotes a fully (densely) connected layer, which connects all elements
+ * in the input tensor with each element in the output tensor.
+ *
+ * This layer implements the operation:
+ *
+ * outputs = activation(inputs * weights’ + bias)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor of at least rank 2, specifying the input. If rank is
+ * greater than 2, then it gets flattened to a 2-D Tensor. The
+ * (flattened) 2-D Tensor is reshaped (if necessary) to
+ * [batch_size, input_size], where "input_size" corresponds to the
+ * number of inputs to the layer, matching the second dimension of
+ * weights, and "batch_size" is calculated by dividing the number of
+ * elements by "input_size". Since API level 29, zero batch_size is
+ * supported for this tensor.
+ * * 1: A 2-D tensor, specifying the weights, of shape
+ * [num_units, input_size], where "num_units" corresponds to the number
+ * of output nodes.
+ * * 2: A 1-D tensor, of shape [num_units], specifying the bias. For input
+ * tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias should
+ * also be of {@link ANEURALNETWORKS_TENSOR_FLOAT32}. For input tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
+ * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
+ * bias_scale == input_scale * filter_scale.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ *
+ * Outputs:
+ * * 0: The output tensor, of shape [batch_size, num_units]. Before API
+ * level 29, for output tensor of {@link
+ * ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the following condition must
+ * be satisfied: output_scale > input_scale * filter_scale.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_FULLY_CONNECTED = 9,
+
+ /**
+ * Looks up sub-tensors in the input tensor using a key-value map.
+ *
+ * This operator takes for input a tensor of values (Values),
+ * a one-dimensional tensor of selection values (Lookups) and
+ * a one-dimensional tensor that maps these values to Values
+ * indexes. The output tensor is the concatenation of sub-tensors of
+ * Values as selected by Lookups via Keys.
+ *
+ * Think of Values as being sliced along its outer-most dimension.
+ * The output is a concatenation of selected slices, with one slice
+ * for each entry of Lookups. The slice selected is the one at the
+ * same index as the Maps entry that matches the value in Lookups.
+ *
+ * For a hit, the corresponding sub-tensor of Values is included
+ * in the Output tensor. For a miss, the corresponding sub-tensor in
+ * Output must have zero values.
+ *
+ * For example, if Values has shape of [40, 200, 300],
+ * Keys should have a shape of [40]. If Lookups tensor has shape
+ * of [3], three slices are being concatenated, so the resulting tensor
+ * must have the shape of [3, 200, 300]. If the first entry in Lookups
+ * has the value 123456, that value must be located in Keys tensor.
+ * If the sixth entry of Keys contains 123456, the sixth slice of Values
+ * must be selected. If no entry in Keys has 123456, a slice of zeroes
+ * must be concatenated.
+ *
+ * Supported value tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported value tensor rank: from 2
+ *
+ * Inputs:
+ * * 0: Lookups. A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor with
+ * shape [ k ].
+ * * 1: Keys. A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor with shape
+ * [ n ]; Keys and Values pair represent a map, i.e., the ith element
+ * in Keys (Keys[i]) is the key to select the ith sub-tensor in Values
+ * (Values[i]), where 0 <= i <= n-1. Keys tensor *MUST* be sorted in
+ * ascending order.
+ * * 2: Values. A tensor with shape of [ n, … ]; i.e., the first dimension
+ * must be n.
+ *
+ * Outputs:
+ * * 0: Output. A tensor with shape [ k …].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input2.
+ * * 1: Hits. A boolean tensor with shape [ k ] indicates whether the lookup
+ * hits (True) or not (False).
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_HASHTABLE_LOOKUP = 10,
+
+ /**
+ * Applies L2 normalization along the depth dimension.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[batch, row, col, channel] =
+ * input[batch, row, col, channel] /
+ * sqrt(sum_{c} pow(input[batch, row, col, c], 2))
+ *
+ * For input tensor with rank less than 4, independently normalizes each
+ * 1-D slice along dimension dim.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (since API level 29)
+ *
+ * Supported tensor rank: up to 4
+ * Tensors with rank less than 4 are only supported since API level 29.
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be normalized.
+ * * 1: An optional {@link ANEURALNETWORKS_INT32} scalar, default to -1,
+ * specifying the dimension normalization would be performed on.
+ * Negative index is used to specify axis from the end (e.g. -1 for
+ * the last axis). Must be in the range [-n, n).
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} and same shape as input0.
+ * For {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the scale must be 1.f / 128 and the zeroPoint must be 128.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_L2_NORMALIZATION = 11,
+
+ /**
+ * Performs an 2-D L2 pooling operation.
+ *
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, i, j, c] =
+ * sqrt(sum_{di, dj} pow(input[b, strides[1] * i + di, strides[2] * j + dj, c], 2) /
+ * sum(1))
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * width.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * height.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 10: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * width.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * height.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 7: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth].
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_L2_POOL_2D = 12,
+
+ /**
+ * Applies Local Response Normalization along the depth dimension.
+ *
+ * The 4-D input tensor is treated as a 3-D array of 1-D vectors (along the
+ * last dimension), and each vector is normalized independently. Within a
+ * given vector, each component is divided by the weighted, squared sum of
+ * inputs within depth_radius.
+ *
+ * The output is calculated using this formula:
+ *
+ * sqr_sum[a, b, c, d] = sum(
+ * pow(input[a, b, c, d - depth_radius : d + depth_radius + 1], 2))
+ * output = input / pow((bias + alpha * sqr_sum), beta)
+ *
+ * For input tensor with rank less than 4, independently normalizes each
+ * 1-D slice along specified dimension.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ * Tensors with rank less than 4 are only supported since API level 29.
+ *
+ * Inputs:
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the radius of
+ * the normalization window.
+ * * 2: A scalar, specifying the bias, must not be zero.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias
+ * value must be of {@link ANEURALNETWORKS_FLOAT16}.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the bias
+ * value must be of {@link ANEURALNETWORKS_FLOAT32}.
+ * * 3: A scalar, specifying the scale factor, alpha.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the
+ * alpha value must be of {@link ANEURALNETWORKS_FLOAT16}.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the
+ * alpha value must be of {@link ANEURALNETWORKS_FLOAT32}.
+ * * 4: A scalar, specifying the exponent, beta.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the beta
+ * value must be of {@link ANEURALNETWORKS_FLOAT16}.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the beta
+ * value must be of {@link ANEURALNETWORKS_FLOAT32}.
+ * * 5: An optional {@link ANEURALNETWORKS_INT32} scalar, default to -1,
+ * specifying the dimension normalization would be performed on.
+ * Negative index is used to specify axis from the end (e.g. -1 for
+ * the last axis). Must be in the range [-n, n).
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION = 13,
+
+ /**
+ * Computes sigmoid activation on the input tensor element-wise.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = 1 / (1 + exp(-input))
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input. Since API level 29, this tensor may
+ * be zero-sized.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ * For {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the scale must be 1.f / 256 and the zeroPoint must be 0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_LOGISTIC = 14,
+
+ /**
+ * Projects an input to a bit vector via locality senstive hashing.
+ *
+ * Supported input tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported input tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: Hash functions. Dim.size == 2, DataType: Float.
+ * Tensor[0].Dim[0]: Number of hash functions.
+ * Tensor[0].Dim[1]: Number of projected output bits generated by each
+ * hash function.
+ * If the projection type is Sparse:
+ * Tensor[0].Dim[1] + ceil(log2(Tensor[0].Dim[0])) <= 32
+ *
+ * * 1: Input. Dim.size >= 1, no restriction on DataType.
+ * * 2: Weight. Optional. Dim.size == 1, DataType: Float.
+ * If not set, each input element is considered to have the same weight
+ * of 1.0.
+ * Tensor[1].Dim[0] == Tensor[2].Dim[0]
+ * * 3: Type:
+ * Sparse:
+ * Value LSHProjectionType_SPARSE(=3) (since API level 29).
+ * Computed bit vector is considered to be sparse.
+ * Each output element is an int32 made up of multiple bits
+ * computed from hash functions.
+ *
+ * NOTE: To avoid collisions across hash functions, an offset value
+ * of k * (1 << Tensor[0].Dim[1]) will be added to each signature,
+ * where k is the index of the hash function.
+ *
+ * Value LSHProjectionType_SPARSE_DEPRECATED(=1).
+ * Legacy behavior that does not include the offset value.
+ *
+ * Dense:
+ * Value LSHProjectionType_DENSE(=2).
+ * Computed bit vector is considered to be dense. Each output
+ * element represents a bit and can take the value of either
+ * 0 or 1.
+ *
+ * Outputs:
+ * * 0: If the projection type is Sparse:
+ * Output.Dim == { Tensor[0].Dim[0] }
+ * A tensor of int32 that represents hash signatures,
+ *
+ * If the projection type is Dense:
+ * Output.Dim == { Tensor[0].Dim[0] * Tensor[0].Dim[1] }
+ * A flattened tensor that represents projected bit vectors.
+ *
+ * Available since API level 27.
+ * The offset value for sparse projections was added in API level 29.
+ */
+ ANEURALNETWORKS_LSH_PROJECTION = 15,
+
+ /**
+ * Performs a single time step in a Long Short-Term Memory (LSTM) layer
+ *
+ * The LSTM operation is described by the following equations.
+ *
+ * \f{eqnarray*}{
+ * i_t =& \sigma(W_{xi}x_t+W_{hi}h_{t-1}+W_{ci}C_{t-1}+b_i) & \\
+ * f_t =& \sigma(W_{xf}x_t+W_{hf}h_{t-1}+W_{cf}C_{t-1}+b_f) & \\
+ * C_t =& clip(f_t \odot C_{t-1} + i_t \odot
+ * g(W_{xc}x_t+W_{hc}h_{t-1}+b_c),\ t_{cell}) & \\
+ * o_t =& \sigma(W_{xo}x_t+W_{ho}h_{t-1}+W_{co}C_t+b_o) & \\
+ * & & \\
+ * & clip(W_{proj}(o_t \odot g(C_t))+b_{proj},\ t_{proj})
+ * & if\ there\ is\ a\ projection; \\
+ * h_t =& & \\
+ * & o_t \odot g(C_t) & otherwise. \\
+ * \f}
+ * Where:
+ * * \f$x_t\f$ is the input,
+ * * \f$i_t\f$ is the input gate,
+ * * \f$f_t\f$ is the forget gate,
+ * * \f$C_t\f$ is the cell state,
+ * * \f$o_t\f$ is the output,
+ * * \f$h_t\f$ is the output state,
+ * * \f$\sigma\f$ is the logistic sigmoid function,
+ * * \f$g\f$ is the cell input and cell output activation function, usually
+ * \f$tahn\f$,
+ * * \f$W_{xi}\f$ is the input-to-input weight matrix,
+ * * \f$W_{hi}\f$ is the recurrent to input weight matrix,
+ * * \f$W_{ci}\f$ is the cell-to-input weight matrix,
+ * * \f$b_i\f$ is the input gate bias,
+ * * \f$W_{xf}\f$ is the input-to-forget weight matrix,
+ * * \f$W_{hf}\f$ is the recurrent-to-forget weight matrix,
+ * * \f$W_{cf}\f$ is the cell-to-forget weight matrix,
+ * * \f$b_f\f$ is the forget gate bias,
+ * * \f$W_{xc}\f$ is the input-to-cell weight matrix,
+ * * \f$W_{hc}\f$ is the recurrent-to-cell weight matrix,
+ * * \f$b_c\f$ is the cell bias,
+ * * \f$W_{xo}\f$ is the input-to-output weight matrix,
+ * * \f$W_{ho}\f$ is the recurrent-to-output weight matrix,
+ * * \f$W_{co}\f$ is the cell-to-output weight matrix,
+ * * \f$b_o\f$ is the output gate bias,
+ * * \f$W_{proj}\f$ is the projection weight matrix,
+ * * \f$b_{proj}\f$ is the projection bias,
+ * * \f$t_{cell}\f$ is the threshold for clipping the cell state, and
+ * * \f$t_{proj}\f$ is the threshold for clipping the projected output.
+ * * \f$\odot\f$ is the
+ * <a href="https://en.wikipedia.org/wiki/Hadamard_product_(matrices)">
+ * Hadamard product</a> that takes two matrices and produces another
+ * matrix, each element of which is the product of the corresponding
+ * elements of the input matrices.
+ *
+ * Since API level 29 LSTM supports layer normalization.
+ * In case layer normalization is used, the inputs to internal activation
+ * functions (sigmoid and \f$g\f$) are normalized, rescaled and recentered
+ * following an approach from section 3.1 from
+ * https://arxiv.org/pdf/1607.06450.pdf
+ *
+ * The operation has the following independently optional inputs:
+ * * The cell-to-input weights (\f$W_{ci}\f$), cell-to-forget weights
+ * (\f$W_{cf}\f$) and cell-to-output weights (\f$W_{co}\f$) either all
+ * have values or neither of them have values (i.e., all set to null). If
+ * they have values, the peephole optimization is used.
+ * * The input-to-input weights (\f$W_{xi}\f$), recurrent-to-input weights
+ * (\f$W_{hi}\f$) and input gate bias (\f$b_i\f$) either all have values,
+ * or none of them have values. If they have no values, coupling of input
+ * and forget gates (CIFG) is used, in which case the input gate
+ * (\f$i_t\f$) is calculated using the following equation instead.
+ * \f{eqnarray*}{
+ * i_t = 1 - f_t
+ * \f}
+ * In case peephole optimization is used and CIFG is not used
+ * cell-to-input (\f$W_{ci}\f$) weights must be present. Otherwise, the
+ * cell-to-input weights must have no value.
+ * * The projection weights (\f$W_{proj}\f$) is required only for the
+ * recurrent projection layer, and should otherwise have no value.
+ * * The projection bias (\f$b_{proj}\f$) may (but not required to) have a
+ * value if the recurrent projection layer exists, and should otherwise
+ * have no value.
+ * * (API level >= 29) The four layer normalization weights either all have
+ * values or none of them have values. Additionally, if CIFG is used,
+ * input layer normalization weights tensor is omitted and the other layer
+ * normalization weights either all have values or none of them have
+ * values. Layer normalization is used when the values of all the layer
+ * normalization weights are present.
+ *
+ * References:
+ *
+ * The default non-peephole non-CIFG implementation is based on:
+ * http://www.bioinf.jku.at/publications/older/2604.pdf
+ * S. Hochreiter and J. Schmidhuber. "Long Short-Term Memory". Neural
+ * Computation, 9(8):1735-1780, 1997.
+ *
+ * The peephole implementation and projection layer is based on:
+ * https://research.google.com/pubs/archive/43905.pdf
+ * Hasim Sak, Andrew Senior, and Francoise Beaufays. "Long short-term memory
+ * recurrent neural network architectures for large scale acoustic
+ * modeling." INTERSPEECH, 2014.
+ * (However, the concept of peephole optimization was introduced in work
+ * prior to this paper.)
+ *
+ * The coupling of input and forget gate (CIFG) is based on:
+ * http://arxiv.org/pdf/1503.04069.pdf
+ * Greff et al. "LSTM: A Search Space Odyssey"
+ *
+ * The layer normalization is based on:
+ * https://arxiv.org/pdf/1607.06450.pdf
+ * Jimmy Ba et al. "Layer Normalization"
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * All input and output tensors must be of the same type.
+ *
+ * Inputs:
+ * * 0: The input (\f$x_t\f$).
+ * A 2-D tensor of shape [batch_size, input_size], where “batch_size”
+ * corresponds to the batching dimension, and “input_size” is the size
+ * of the input.
+ * * 1: The input-to-input weights (\f$W_{xi}\f$). Optional.
+ * A 2-D tensor of shape [num_units, input_size], where “num_units”
+ * corresponds to the number of cell units.
+ * * 2: The input-to-forget weights (\f$W_{xf}\f$).
+ * A 2-D tensor of shape [num_units, input_size].
+ * * 3: The input-to-cell weights (\f$W_{xc}\f$).
+ * A 2-D tensor of shape [num_units, input_size].
+ * * 4: The input-to-output weights (\f$W_{xo}\f$).
+ * A 2-D tensor of shape [num_units, input_size].
+ * * 5: The recurrent-to-input weights (\f$W_{hi}\f$). Optional.
+ * A 2-D tensor of shape [num_units, output_size], where “output_size”
+ * corresponds to either the number of cell units (i.e., “num_units”),
+ * or the second dimension of the “projection_weights”, if defined.
+ * * 6: The recurrent-to-forget weights (\f$W_{hf}\f$).
+ * A 2-D tensor of shape [num_units, output_size].
+ * * 7: The recurrent-to-cell weights (\f$W_{hc}\f$).
+ * A 2-D tensor of shape [num_units, output_size].
+ * * 8: The recurrent-to-output weights (\f$W_{ho}\f$).
+ * A 2-D tensor of shape [num_units, output_size].
+ * * 9: The cell-to-input weights (\f$W_{ci}\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 10:The cell-to-forget weights (\f$W_{cf}\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 11:The cell-to-output weights (\f$W_{co}\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 12:The input gate bias (\f$b_i\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 13:The forget gate bias (\f$b_f\f$).
+ * A 1-D tensor of shape [num_units].
+ * * 14:The cell bias (\f$b_c\f$).
+ * A 1-D tensor of shape [num_units].
+ * * 15:The output gate bias (\f$b_o\f$).
+ * A 1-D tensor of shape [num_units].
+ * * 16:The projection weights (\f$W_{proj}\f$). Optional.
+ * A 2-D tensor of shape [output_size, num_units].
+ * * 17:The projection bias (\f$b_{proj}\f$). Optional.
+ * A 1-D tensor of shape [output_size].
+ * * 18:The output state (in) (\f$h_{t-1}\f$).
+ * A 2-D tensor of shape [batch_size, output_size].
+ * * 19:The cell state (in) (\f$C_{t-1}\f$).
+ * A 2-D tensor of shape [batch_size, num_units].
+ * * 20:The activation function (\f$g\f$).
+ * A value indicating the activation function:
+ * <ul>
+ * <li>0: None;
+ * <li>1: Relu;
+ * <li>3: Relu6;
+ * <li>4: Tanh;
+ * <li>6: Sigmoid.
+ * </ul>
+ * * 21:The clipping threshold (\f$t_{cell}\f$) for the cell state, such
+ * that values are bound within [-cell_clip, cell_clip]. If set to 0.0
+ * then clipping is disabled.
+ * Until API level 29 this scalar must be of type {@link
+ * ANEURALNETWORKS_FLOAT32}. Since API level 29, if all the input
+ * tensors have type {@link ANEURALNETWORKS_TENSOR_FLOAT32}, this
+ * scalar must be of the type {@link ANEURALNETWORKS_FLOAT32},
+ * otherwise if all the input tensors have the type {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT16}, this scalar must be of type {@link
+ * ANEURALNETWORKS_FLOAT16}.
+ * * 22:The clipping threshold (\f$t_{proj}\f$) for the output from the
+ * projection layer, such that values are bound within
+ * [-proj_clip, proj_clip]. If set to 0.0 then clipping is disabled.
+ * Until API level 29 this scalar must be of type {@link
+ * ANEURALNETWORKS_FLOAT32}. Since API level 29, if all the input
+ * tensors have type {@link ANEURALNETWORKS_TENSOR_FLOAT32}, this
+ * scalar must be of the type {@link ANEURALNETWORKS_FLOAT32},
+ * otherwise if all the input tensors have the type {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT16}, this scalar must be of type {@link
+ * ANEURALNETWORKS_FLOAT16}.
+ * Since API level 29 there are additional inputs to this op:
+ * * 23:The input layer normalization weights.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at input gate.
+ * * 24:The forget layer normalization weights.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at forget gate.
+ * * 25:The cell layer normalization weights.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at cell gate.
+ * * 26:The output layer normalization weights.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at output gate.
+ *
+ * Outputs:
+ * * 0: The scratch buffer.
+ * A 2-D tensor of shape [batch_size, num_units * 3] with CIFG, or
+ * [batch_size, num_units * 4] without CIFG.
+ * * 1: The output state (out) (\f$h_t\f$).
+ * A 2-D tensor of shape [batch_size, output_size].
+ * * 2: The cell state (out) (\f$C_t\f$).
+ * A 2-D tensor of shape [batch_size, num_units].
+ * * 3: The output (\f$o_t\f$).
+ * A 2-D tensor of shape [batch_size, output_size]. This is effectively
+ * the same as the current “output state (out)” value.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_LSTM = 16,
+
+ /**
+ * Performs an 2-D max pooling operation.
+ *
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, i, j, channel] =
+ * max_{di, dj} (
+ * input[b, strides[1] * i + di, strides[2] * j + dj, channel]
+ * )
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * width.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * height.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 10: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * width.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the filter
+ * height.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 7: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_MAX_POOL_2D = 17,
+
+ /**
+ * Multiplies two tensors, element-wise.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible
+ * dimensions. The output is the product of both input tensors, optionally
+ * modified by an activation function.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the resulting output is the maximum size along each dimension
+ * of the input operands. It starts with the trailing dimensions, and works
+ * its way forward.
+ *
+ * Since API level 29, generic zero-sized input tensor is supported. Zero
+ * dimension is only compatible with 0 or 1. The size of the output
+ * dimension is zero if either of corresponding input dimension is zero.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ *
+ * Outputs:
+ * * 0: The product, a tensor of the same {@link OperandCode} as input0.
+ * For output tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the following condition must be satisfied:
+ * output_scale > input1_scale * input2_scale.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_MUL = 18,
+
+ /**
+ * Computes rectified linear activation on the input tensor element-wise.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = max(0, input)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input. Since API level 29, this tensor may
+ * be zero-sized.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_RELU = 19,
+
+ /**
+ * Computes rectified linear 1 activation on the input tensor element-wise.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = min(1.f, max(-1.f, input))
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input. Since API level 29, this tensor may
+ * be zero-sized.
+ *
+ * Outputs:
+ * * 0: The output tensor of the same shape as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_RELU1 = 20,
+
+ /**
+ * Computes rectified linear 6 activation on the input tensor element-wise.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = min(6, max(0, input))
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input. Since API level 29, this tensor may
+ * be zero-sized.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_RELU6 = 21,
+
+ /**
+ * Reshapes a tensor.
+ *
+ * Given tensor, this operation returns a tensor that has the same values as
+ * tensor, but with a newly specified shape.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the tensor to be reshaped.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, defining the
+ * shape of the output tensor. The number of elements implied by shape
+ * must be the same as the number of elements in the input tensor.
+ *
+ * If one component of shape is the special value -1, the size of that
+ * dimension is computed so that the total size remains constant. In
+ * particular, a shape of [-1] flattens into 1-D. At most one component
+ * of shape can be -1.
+ *
+ * Outputs:
+ * * 0: The output tensor, of shape specified by the input shape.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_RESHAPE = 22,
+
+ /**
+ * Resizes images to given size using the bilinear interpretation.
+ *
+ * Resized images must be distorted if their output aspect ratio is not the
+ * same as input aspect ratio. The corner pixels of output may not be the
+ * same as corner pixels of input.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (since API level 29)
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both resizing by shape and resizing by scale are supported.
+ *
+ * Inputs (resizing by shape):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Since API level 29, zero batches is supported for this
+ * tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * width of the output tensor.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * height of the output tensor.
+ * * 3: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Inputs (resizing by scale, since API level 29):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Zero batches is supported for this tensor.
+ * * 1: A scalar, specifying width_scale, the scaling factor of the width
+ * dimension from the input tensor to the output tensor. The output
+ * width is calculated as new_width = floor(width * width_scale).
+ * The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if input0 is
+ * of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of
+ * {@link ANEURALNETWORKS_FLOAT32} otherwise.
+ * * 2: A scalar, specifying height_scale, the scaling factor of the height
+ * dimension from the input tensor to the output tensor. The output
+ * height is calculated as new_height = floor(height * height_scale).
+ * The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if input0 is
+ * of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of
+ * {@link ANEURALNETWORKS_FLOAT32} otherwise.
+ * * 3: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, new_height, new_width, depth].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_RESIZE_BILINEAR = 23,
+
+ /**
+ * A basic recurrent neural network layer.
+ *
+ * This layer implements the operation:
+ * outputs = state = activation(inputs * input_weights +
+ * state * recurrent_weights + bias)
+ *
+ * Where:
+ * * “input_weights” is a weight matrix that multiplies the inputs;
+ * * “recurrent_weights” is a weight matrix that multiplies the current
+ * “state” which itself is the output from the previous time step
+ * computation;
+ * * “bias” is a bias vector (added to each output vector in the batch);
+ * * “activation” is the function passed as the “fused_activation_function”
+ * argument (if not “NONE”).
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * The input tensors must all be the same type.
+ *
+ * Inputs:
+ * * 0: input.
+ * A 2-D tensor of shape [batch_size, input_size], where “batch_size”
+ * corresponds to the batching dimension, and “input_size” is the size
+ * of the input.
+ * * 1: weights.
+ * A 2-D tensor of shape [num_units, input_size], where “num_units”
+ * corresponds to the number of units.
+ * * 2: recurrent_weights.
+ * A 2-D tensor of shape [num_units, num_units], with columns
+ * corresponding to the weights from each unit.
+ * * 3: bias.
+ * A 1-D tensor of shape [num_units].
+ * * 4: hidden state (in).
+ * A 2-D tensor of shape [batch_size, num_units].
+ * * 5: fused_activation_function.
+ * An optional {@link FuseCode} value indicating the
+ * activation function. If “NONE” is specified then it results in a
+ * linear activation.
+ *
+ * Outputs:
+ * * 0: hidden state (out).
+ * A 2-D tensor of shape [batch_size, num_units].
+ *
+ * * 1: output.
+ * A 2-D tensor of shape [batch_size, num_units]. This is effectively
+ * the same as the current state value.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_RNN = 24,
+
+ /**
+ * Computes the softmax activation on the input tensor element-wise, per
+ * batch, by normalizing the input vector so the maximum coefficient is
+ * zero.
+ *
+ * The output is calculated using this formula:
+ *
+ * output[batch, i] =
+ * exp((input[batch, i] - max(input[batch, :])) * beta) /
+ * sum_{k}{exp((input[batch, k] - max(input[batch, :])) * beta)}
+ *
+ * For input tensor with rank other than 2, the activation will be applied
+ * independently on each 1-D slice along specified dimension.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4.
+ * Tensors with rank other than 2 or 4 are only supported since API level 29.
+ *
+ * Inputs:
+ * * 0: A 2-D or 4-D tensor, specifying the tensor to be reshaped. Since
+ * API level 29, this tensor may be zero-sized.
+ * * 1: A scalar, specifying the positive scaling factor for the exponent,
+ * beta. If input0 is of {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the scalar must be of
+ * {@link ANEURALNETWORKS_FLOAT32}. If input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT16}, then the scalar must be of {@link
+ * ANEURALNETWORKS_FLOAT16}.
+ * * 2: An optional {@link ANEURALNETWORKS_INT32} scalar, default to -1,
+ * specifying the dimension the activation would be performed on.
+ * Negative index is used to specify axis from the end (e.g. -1 for
+ * the last axis). Must be in the range [-n, n).
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ * For {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the scale must be 1.f / 256 and the zeroPoint must be 0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_SOFTMAX = 25,
+
+ /**
+ * Rearranges blocks of spatial data, into depth.
+ *
+ * More specifically, this op outputs a copy of the input tensor where
+ * values from the height and width dimensions are moved to the depth
+ * dimension. The value block_size indicates the input block size and how
+ * the data is moved.
+ *
+ * Chunks of data of size block_size * block_size from depth are rearranged
+ * into non-overlapping blocks of size block_size x block_size.
+ *
+ * The depth of the output tensor is input_depth * block_size * block_size.
+ * The input tensor's height and width must be divisible by block_size.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the block_size.
+ * block_size must be >=1 and block_size must be a divisor of both the
+ * input height and width.
+ * * 2: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape [batches, height/block_size,
+ * width/block_size, depth_in*block_size*block_size].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_SPACE_TO_DEPTH = 26,
+
+ /**
+ * SVDF op is a kind of stateful layer derived from the notion that a
+ * densely connected layer that's processing a sequence of input frames can
+ * be approximated by using a singular value decomposition of each of its
+ * nodes. The implementation is based on:
+ *
+ * https://research.google.com/pubs/archive/43813.pdf
+ *
+ * P. Nakkiran, R. Alvarez, R. Prabhavalkar, C. Parada.
+ * “Compressing Deep Neural Networks using a Rank-Constrained Topology”.
+ * INTERSPEECH, 2015.
+ *
+ * It processes the incoming input using a 2-stage filtering mechanism:
+ * * stage 1 performs filtering on the "features" dimension, whose outputs
+ * get pushed into a memory of fixed-size memory_size.
+ * * stage 2 performs filtering on the "time" dimension of the memory_size
+ * memoized outputs of stage 1.
+ *
+ * Specifically, for rank 1, this layer implements the operation:
+ *
+ * memory = push(conv1d(inputs, weights_feature, feature_dim,
+ * "ANEURALNETWORKS_PADDING_VALID"));
+ * outputs = activation(memory * weights_time + bias);
+ *
+ * Where:
+ * * “weights_feature” is a weights matrix that processes the inputs (by
+ * convolving the input with every “feature filter”), and whose outputs
+ * get pushed, stacked in order, into the fixed-size “memory” (the oldest
+ * entry gets dropped);
+ * * “weights_time” is a weights matrix that processes the “memory” (by a
+ * batched matrix multiplication on the num_units);
+ * * “bias” is an optional bias vector (added to each output vector in the
+ * batch); and
+ * * “activation” is the function passed as the “fused_activation_function”
+ * argument (if not “NONE”).
+ *
+ * Each rank adds a dimension to the weights matrices by means of stacking
+ * the filters.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * All input tensors must be the same type.
+ *
+ * Inputs:
+ * * 0: input.
+ * A 2-D tensor of shape [batch_size, input_size], where “batch_size”
+ * corresponds to the batching dimension, and “input_size” is the size
+ * of the input.
+ * * 1: weights_feature.
+ * A 2-D tensor of shape [num_units, input_size], where “num_units”
+ * corresponds to the number of units.
+ * * 2: weights_time.
+ * A 2-D tensor of shape [num_units, memory_size], where “memory_size”
+ * corresponds to the fixed-size of the memory.
+ * * 3: bias.
+ * An optional 1-D tensor of shape [num_units].
+ * * 4: state (in).
+ * A 2-D tensor of shape [batch_size, (memory_size - 1) * num_units * rank].
+ * * 5: rank.
+ * The rank of the SVD approximation.
+ * * 6: fused_activation_function.
+ * An optional {@link FuseCode} value indicating the
+ * activation function. If “NONE” is specified then it results in a
+ * linear activation.
+ *
+ * Outputs:
+ * * 0: state (out).
+ * A 2-D tensor of the same {@link OperandCode} as the inputs, with shape
+ * [batch_size, (memory_size - 1) * num_units * rank].
+ * * 1: output.
+ * A 2-D tensor of the same {@link OperandCode} as the inputs, with shape
+ * [batch_size, num_units].
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_SVDF = 27,
+
+ /**
+ * Computes hyperbolic tangent of input tensor element-wise.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = tanh(input)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (since API level 29)
+ *
+ * Supported tensor rank: up to 4.
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input. Since API level 29, this tensor may
+ * be zero-sized.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ * For {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the scale must be 1.f / 128 and the zeroPoint must be 128.
+ *
+ * Available since API level 27.
+ */
+ ANEURALNETWORKS_TANH = 28,
+
+ // Operations below are available since API level 28.
+
+ // TODO: make the description easier to understand.
+ /**
+ * BatchToSpace for N-dimensional tensors.
+ *
+ * This operation reshapes the batch dimension (dimension 0) into M + 1
+ * dimensions of shape block_shape + [batch], interleaves these blocks back
+ * into the grid defined by the spatial dimensions [1, ..., M], to obtain a
+ * result with the same rank as the input.
+ *
+ * This is the reverse of SpaceToBatch.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be reshaped
+ * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the block
+ * sizes for each spatial dimension of the input tensor. All values
+ * must be >= 1.
+ * * 2: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_BATCH_TO_SPACE_ND = 29,
+
+ /**
+ * Element-wise division of two tensors.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible
+ * dimensions. The output is the result of dividing the first input tensor
+ * by the second, optionally modified by an activation function.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Example:
+ * input1.dimension = {4, 1, 2}
+ * input2.dimension = {5, 4, 3, 1}
+ * output.dimension = {5, 4, 3, 2}
+ *
+ * Since API level 29, generic zero-sized input tensor is supported. Zero
+ * dimension is only compatible with 0 or 1. The size of the output
+ * dimension is zero if either of corresponding input dimension is zero.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_DIV = 30,
+
+ /**
+ * Computes the mean of elements across dimensions of a tensor.
+ *
+ * Reduces the input tensor along the given dimensions to reduce. Unless
+ * keep_dims is true, the rank of the tensor is reduced by 1 for each entry
+ * in axis. If keep_dims is true, the reduced dimensions are retained with
+ * length 1.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input.
+ * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Must be in the range
+ * [-rank(input_tensor), rank(input_tensor)).
+ *
+ * NOTE: When the operation was introduced, the documentation
+ * incorrectly stated that if dimensions were empty, the operation
+ * would reduce across all dimensions. This behavior was never
+ * implemented.
+ *
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, keep_dims. If positive,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be same as input0.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_MEAN = 31,
+
+ /**
+ * Pads a tensor with zeros.
+ *
+ * This operation pads a tensor according to the specified paddings.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (full support since API
+ * level 29, see the output section)
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be padded.
+ * * 1: A 2-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the paddings
+ * for each spatial dimension of the input tensor. The shape of the
+ * tensor must be {rank(input0), 2}.
+ * padding[i, 0] specifies the number of elements to be padded in the
+ * front of dimension i.
+ * padding[i, 1] specifies the number of elements to be padded after the
+ * end of dimension i.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0. The
+ * output tensor has the same rank as input0, and each
+ * dimension of the output tensor has the same size as the
+ * corresponding dimension of the input tensor plus the size
+ * of the padding:
+ * output0.dimension[i] =
+ * padding[i, 0] + input0.dimension[i] + padding[i, 1]
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * NOTE: Before API level 29, the pad value for
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} is undefined.
+ * Since API level 29, the pad value is always the logical zero.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_PAD = 32,
+
+ // TODO: make the description easier to understand.
+ /**
+ * SpaceToBatch for N-Dimensional tensors.
+ *
+ * This operation divides "spatial" dimensions [1, ..., M] of the input into
+ * a grid of blocks of shape block_shape, and interleaves these blocks with
+ * the "batch" dimension (0) such that in the output, the spatial dimensions
+ * [1, ..., M] correspond to the position within the grid, and the batch
+ * dimension combines both the position within a spatial block and the
+ * original batch position. Prior to division into blocks, the spatial
+ * dimensions of the input are optionally zero padded according to paddings.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (full support since API
+ * level 29, see the output section)
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the input.
+ * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the block
+ * sizes for each spatial dimension of the input tensor. All values
+ * must be >= 1.
+ * * 2: A 2-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the paddings
+ * for each spatial dimension of the input tensor. All values must be
+ * >= 0. The shape of the tensor must be {M, 2}, where M is the number
+ * of spatial dimensions.
+ * padding[i, 0] specifies the number of element to be padded in the
+ * front of dimension i.
+ * padding[i, 1] specifies the number of element to be padded after the
+ * end of dimension i.
+ * * 3: An optional {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ * Available since API level 29.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * NOTE: Before API level 29, the pad value for
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} is undefined.
+ * Since API level 29, the pad value is always the logical zero.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_SPACE_TO_BATCH_ND = 33,
+
+ /**
+ * Removes dimensions of size 1 from the shape of a tensor.
+ *
+ * Given a tensor input, this operation returns a tensor of the same
+ * {@link OperandCode} with all dimensions of size 1 removed. If you don't
+ * want to remove all size 1 dimensions, you can remove specific size 1
+ * dimensions by specifying the axes (input1).
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, the tensor to be squeezed.
+ * * 1: An optional 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The
+ * dimensions to squeeze. If specified only squeezes the dimensions
+ * listed. Otherwise, squeezes all dimensions. The dimension index
+ * starts at 0. An error must be reported if squeezing a dimension that
+ * is not 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0. Contains the
+ * same data as input, but has one or more dimensions of size 1
+ * removed.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_SQUEEZE = 34,
+
+ /**
+ * Extracts a strided slice of a tensor.
+ *
+ * Roughly speaking, this op extracts a slice of size (end - begin) / stride
+ * from the given input tensor. Starting at the location specified by begin
+ * the slice continues by adding stride to the index until all dimensions
+ * are not less than end. Note that a stride can be negative, which causes a
+ * reverse slice.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be sliced.
+ * * 1: begin, a 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The
+ * starts of the dimensions of the input tensor to be sliced. The
+ * length must be of rank(input0).
+ * * 2: end, a 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The
+ * ends of the dimensions of the input tensor to be sliced. The length
+ * must be of rank(input0).
+ * * 3: strides, a 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The
+ * strides of the dimensions of the input tensor to be sliced. The
+ * length must be of rank(input0). The entries must be non-zero.
+ * * 4: begin_mask, an {@link ANEURALNETWORKS_INT32} scalar. If the ith bit
+ * of begin_mask is set, begin[i] is ignored and the fullest possible
+ * range in that dimension is used instead.
+ * * 5: end_mask, an {@link ANEURALNETWORKS_INT32} scalar. If the ith bit of
+ * end_mask is set, end[i] is ignored and the fullest possible range in
+ * that dimension is used instead.
+ * * 6: shrink_axis_mask, an {@link ANEURALNETWORKS_INT32} scalar. If the
+ * ith bit of shrink_axis_mask is set, the ith dimension specification
+ * shrinks the dimensionality by 1, taking on the value at index
+ * begin[i]. In this case, the ith specification must define a
+ * slice of size 1, e.g. begin[i] = x, end[i] = x + 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0 and rank (n - k),
+ * where k is the number of bits set in shrink_axis_mask.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_STRIDED_SLICE = 35,
+
+ /**
+ * Element-wise subtraction of two tensors.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible
+ * dimensions. The output is the result of subtracting the second input
+ * tensor from the first one, optionally modified by an activation function.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Example:
+ * input1.dimension = {4, 1, 2}
+ * input2.dimension = {5, 4, 3, 1}
+ * output.dimension = {5, 4, 3, 2}
+ *
+ * Since API level 29, generic zero-sized input tensor is supported. Zero
+ * dimension is only compatible with 0 or 1. The size of the output
+ * dimension is zero if either of corresponding input dimension is zero.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} (since API level 29)
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_SUB = 36,
+
+ /**
+ * Transposes the input tensor, permuting the dimensions according to the
+ * perm tensor.
+ *
+ * The returned tensor's dimension i corresponds to the input dimension
+ * perm[i]. If perm is not given, it is set to (n-1...0), where n is the
+ * rank of the input tensor. Hence by default, this operation performs a
+ * regular matrix transpose on 2-D input Tensors.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be transposed.
+ * Since API level 29, this tensor may be zero-sized.
+ * * 1: An optional 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32},
+ * the permutation of the dimensions of the input tensor.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 28.
+ */
+ ANEURALNETWORKS_TRANSPOSE = 37,
+
+ // Operations below are available since API level 29.
+
+ /**
+ * Computes the absolute value of a tensor, element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_ABS = 38,
+
+ /**
+ * Returns the index of the largest element along an axis.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor specifying the input. Must be non-empty.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis to
+ * reduce across. Negative index is used to specify axis from the
+ * end (e.g. -1 for the last axis). Must be in the range [-n, n).
+ *
+ * Outputs:
+ * * 0: An (n - 1)-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor.
+ *
+ * Available since API level 29.
+ */
+ // There is no underscore in ARG_MAX to avoid name conflict with
+ // the macro defined in libc/kernel/uapi/linux/limits.h.
+ ANEURALNETWORKS_ARGMAX = 39,
+
+ /**
+ * Returns the index of the smallest element along an axis.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor specifying the input. Must be non-empty.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis to
+ * reduce across. Negative index is used to specify axis from the
+ * end (e.g. -1 for the last axis). Must be in the range [-n, n).
+ *
+ * Outputs:
+ * * 0: An (n - 1)-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_ARGMIN = 40, // See ARGMAX for naming discussion.
+
+ /**
+ * Transform axis-aligned bounding box proposals using bounding box deltas.
+ *
+ * Given the positions of bounding box proposals and the corresponding
+ * bounding box deltas for each class, return the refined bounding box
+ * regions. The resulting bounding boxes are cliped against the edges of
+ * the image.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}
+ *
+ * Inputs:
+ * * 0: A 2-D Tensor of shape [num_rois, 4], specifying the locations of the
+ * bounding box proposals, each line with format [x1, y1, x2, y2].
+ * For tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM},
+ * the zeroPoint must be 0 and the scale must be 0.125. Zero num_rois
+ * is supported for this tensor.
+ * * 1: A 2-D Tensor of shape [num_rois, num_classes * 4], specifying the
+ * bounding box delta for each region of interest and each class. The
+ * bounding box deltas are organized in the following order
+ * [dx, dy, dw, dh], where dx and dy is the relative correction factor
+ * for the center position of the bounding box with respect to the width
+ * and height, dw and dh is the log-scale relative correction factor
+ * for the width and height. For input0 of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, this tensor should be
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}. Zero num_rois is
+ * supported for this tensor.
+ * * 2: An 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_rois], specifying the batch index of each box. Boxes with
+ * the same batch index are grouped together. Zero num_rois is
+ * supported for this tensor.
+ * * 3: A 2-D Tensor of shape [batches, 2], specifying the information of
+ * each image in the batch, each line with format
+ * [image_height, image_width].
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0, with shape
+ * [num_rois, num_classes * 4], specifying the coordinates of each
+ * output bounding box for each class, with format [x1, y1, x2, y2].
+ * For type of {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, the
+ * scale must be 0.125 and the zero point must be 0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM = 41,
+
+ /**
+ * Performs a forward LSTM on the input followed by a backward LSTM.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: 3, either time-major or batch-major.
+ *
+ * All input and output tensors must be of the same type.
+ *
+ *
+ * Inputs:
+ * * 0: The input.
+ * A 3-D tensor of shape:
+ * If time-major: [max_time, batch_size, input_size]
+ * If batch-major: [batch_size, max_time, input_size]
+ * where "max_time" is the number of timesteps (sequence length),
+ * "batch_size" corresponds to the batching dimension, and
+ * "input_size" is the size of the input.
+ * * 1: The forward input-to-input weights. Optional.
+ * A 2-D tensor of shape [fw_num_units, input_size], where “fw_num_units”
+ * corresponds to the number of forward cell units.
+ * * 2: The forward input-to-forget weights.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 3: The forward input-to-cell weights.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 4: The forward input-to-output weights.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 5: The forward recurrent-to-input weights. Optional.
+ * A 2-D tensor of shape [fw_num_units, fw_output_size], where “fw_output_size”
+ * corresponds to either the number of cell units (i.e., fw_num_units),
+ * or the second dimension of the “fw_projection_weights”, if defined.
+ * * 6: The forward recurrent-to-forget weights.
+ * A 2-D tensor of shape [fw_num_units, fw_output_size].
+ * * 7: The forward recurrent-to-cell weights.
+ * A 2-D tensor of shape [fw_num_units, fw_output_size].
+ * * 8: The forward recurrent-to-output weights.
+ * A 2-D tensor of shape [fw_num_units, fw_output_size].
+ * * 9: The forward cell-to-input weights. Optional.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 10: The forward cell-to-forget weights. Optional.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 11: The forward cell-to-output weights. Optional.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 12: The forward input gate bias. Optional.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 13: The forward forget gate bias.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 14: The forward cell gate bias.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 15: The forward output gate bias.
+ * A 1-D tensor of shape [fw_num_units].
+ * * 16: The forward projection weights. Optional.
+ * A 2-D tensor of shape [fw_output_size, fw_num_units].
+ * * 17: The forward projection bias. Optional.
+ * A 1-D tensor of shape [fw_output_size].
+ * * 18: The backward input-to-input weights. Optional.
+ * A 2-D tensor of shape [bw_num_units, input_size], where “bw_num_units”
+ * corresponds to the number of backward cell units.
+ * * 19: The backward input-to-forget weights.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 20: The backward input-to-cell weights.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 21: The backward input-to-output weights.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 22: The backward recurrent-to-input weights. Optional.
+ * A 2-D tensor of shape [bw_num_units, bw_output_size], where “bw_output_size”
+ * corresponds to either the number of cell units (i.e., “bw_num_units”),
+ * or the second dimension of the “bw_projection_weights”, if defined.
+ * * 23: The backward recurrent-to-forget weights.
+ * A 2-D tensor of shape [bw_num_units, bw_output_size].
+ * * 24: The backward recurrent-to-cell weights.
+ * A 2-D tensor of shape [bw_num_units, bw_output_size].
+ * * 25: The backward recurrent-to-output weights.
+ * A 2-D tensor of shape [bw_num_units, bw_output_size].
+ * * 26: The backward cell-to-input weights. Optional.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 27: The backward cell-to-forget weights. Optional.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 28: The backward cell-to-output weights. Optional.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 29: The backward input gate bias. Optional.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 30: The backward forget gate bias.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 31: The backward cell gate bias.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 32: The backward output gate bias.
+ * A 1-D tensor of shape [bw_num_units].
+ * * 33: The backward projection weights. Optional.
+ * A 2-D tensor of shape [bw_output_size, bw_num_units].
+ * * 34: The backward projection bias. Optional.
+ * A 1-D tensor of shape [bw_output_size].
+ * * 35: The forward input activation state.
+ * A 2-D tensor of shape [batch_size, bw_output_size].
+ * * 36: The forward input cell state.
+ * A 2-D tensor of shape [batch_size, bw_num_units].
+ * * 37: The backward input activation state.
+ * A 2-D tensor of shape [batch_size, bw_output_size].
+ * * 38: The backward input cell state.
+ * A 2-D tensor of shape [batch_size, bw_num_units].
+ * * 39: The auxiliary input. Optional.
+ * A 3-D tensor of shape [max_time, batch_size, input_size], where “batch_size”
+ * corresponds to the batching dimension, and “input_size” is the size
+ * of the input.
+ * * 40: The forward auxiliary input-to-input weights. Optional.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 41: The forward auxiliary input-to-forget weights. Optional.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 42: The forward auxiliary input-to-cell weights. Optional.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 43: The forward auxiliary input-to-output weights. Optional.
+ * A 2-D tensor of shape [fw_num_units, input_size].
+ * * 44: The backward auxiliary input-to-input weights. Optional.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 45: The backward auxiliary input-to-forget weights. Optional.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 46: The backward auxiliary input-to-cell weights. Optional.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 47: The backward auxiliary input-to-output weights. Optional.
+ * A 2-D tensor of shape [bw_num_units, input_size].
+ * * 48: The activation function.
+ * A value indicating the activation function:
+ * <ul>
+ * <li>0: None;
+ * <li>1: Relu;
+ * <li>3: Relu6;
+ * <li>4: Tanh;
+ * <li>6: Sigmoid.
+ * </ul>
+ * * 49: The clipping threshold for the cell state, such
+ * that values are bound within [-cell_clip, cell_clip]. If set to 0.0
+ * then clipping is disabled.
+ * If all the input tensors have type {@link ANEURALNETWORKS_TENSOR_FLOAT32},
+ * this scalar must be of the type {@link ANEURALNETWORKS_FLOAT32},
+ * otherwise if all the input tensors have the type {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT16}, this scalar must be of type {@link
+ * ANEURALNETWORKS_FLOAT16}.
+ * * 50: The clipping threshold for the output from the
+ * projection layer, such that values are bound within
+ * [-proj_clip, proj_clip]. If set to 0.0 then clipping is disabled.
+ * If all the input tensors have type {@link ANEURALNETWORKS_TENSOR_FLOAT32},
+ * this scalar must be of the type {@link ANEURALNETWORKS_FLOAT32},
+ * otherwise if all the input tensors have the type {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT16}, this scalar must be of type {@link
+ * ANEURALNETWORKS_FLOAT16}.
+ * * 51: merge_outputs
+ * An {@link ANEURALNETWORKS_BOOL} scalar specifying if the outputs
+ * from forward and backward cells should be merged.
+ * * 52: time_major
+ * An {@link ANEURALNETWORKS_BOOL} scalar specifying the shape format
+ * of input and output tensors.
+ * * 53: The forward input layer normalization weights. Optional.
+ * A 1-D tensor of shape [fw_num_units]. Used to rescale normalized inputs
+ * to activation at input gate.
+ * * 54: The forward forget layer normalization weights. Optional.
+ * A 1-D tensor of shape [fw_num_units]. Used to rescale normalized inputs
+ * to activation at forget gate.
+ * * 55: The forward cell layer normalization weights. Optional.
+ * A 1-D tensor of shape [fw_num_units]. Used to rescale normalized inputs
+ * to activation at cell gate.
+ * * 56: The forward output layer normalization weights. Optional.
+ * A 1-D tensor of shape [fw_num_units]. Used to rescale normalized inputs
+ * to activation at output gate.
+ * * 57: The backward input layer normalization weights. Optional.
+ * A 1-D tensor of shape [bw_num_units]. Used to rescale normalized inputs
+ * to activation at input gate.
+ * * 58: The backward forget layer normalization weights. Optional.
+ * A 1-D tensor of shape [bw_num_units]. Used to rescale normalized inputs
+ * to activation at forget gate.
+ * * 59: The backward cell layer normalization weights. Optional.
+ * A 1-D tensor of shape [bw_num_units]. Used to rescale normalized inputs
+ * to activation at cell gate.
+ * * 60: The backward output layer normalization weights. Optional.
+ * A 1-D tensor of shape [bw_num_units]. Used to rescale normalized inputs
+ * to activation at output gate.
+ *
+ * Outputs:
+ * * 0: The forward output.
+ * A 3-D tensor of shape:
+ * If time-major and not merge_outputs:
+ * [max_time, batch_size, fw_output_size]
+ * If time-major and merge_outputs:
+ * [max_time, batch_size, fw_output_size + bw_output_size]
+ * If batch-major and not merge_outputs:
+ * [batch_size, max_time, fw_output_size]
+ * If batch-major and merge_outputs:
+ * [batch_size, max_time, fw_output_size + bw_output_size]
+ * * 1: The backward output. Unused if merge_outputs is true.
+ * A 3-D tensor of shape:
+ * If time-major: [max_time, batch_size, bw_output_size]
+ * If batch-major: [batch_size, max_time, bw_output_size]
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM = 42,
+
+ /**
+ * A recurrent neural network layer that applies a basic RNN cell to a
+ * sequence of inputs in forward and backward directions.
+ *
+ * This Op unrolls the input along the sequence dimension, and implements
+ * the following operation for each element in the sequence s =
+ * 1...sequence_length:
+ * fw_outputs[s] = fw_state = activation(inputs[s] * fw_input_weights’ +
+ * fw_state * fw_recurrent_weights’ + fw_bias)
+ *
+ * And for each element in sequence t = sequence_length : 1
+ * bw_outputs[t] = bw_state = activation(inputs[t] * bw_input_weights’ +
+ * bw_state * bw_recurrent_weights’ + bw_bias)
+ *
+ * Where:
+ * * “{fw,bw}_input_weights” is a weight matrix that multiplies the inputs;
+ * * “{fw,bw}_recurrent_weights” is a weight matrix that multiplies the
+ * current “state” which itself is the output from the previous time step
+ * computation;
+ * * “{fw,bw}_bias” is a bias vector (added to each output vector in the
+ * batch);
+ * * “activation” is the function passed as the “fused_activation_function”
+ * argument (if not “NONE”).
+ *
+ * The op also supports an auxiliary input. Regular cell feeds one input
+ * into the two RNN cells in the following way:
+ *
+ * INPUT (INPUT_REVERSED)
+ * | |
+ * ---------------------
+ * | FW_RNN BW_RNN |
+ * ---------------------
+ * | |
+ * FW_OUT BW_OUT
+ *
+ * An op with an auxiliary input takes two inputs and feeds them into the
+ * RNN cells in the following way:
+ *
+ * AUX_INPUT (AUX_INPUT_REVERSED)
+ * | |
+ * INPUT | (INPUT_R'D.)|
+ * | | | |
+ * -----------------------
+ * | \ / \ / |
+ * | FW_RNN BW_RNN |
+ * -----------------------
+ * | |
+ * FW_OUT BW_OUT
+ *
+ * While stacking this op on top of itself, this allows to connect both
+ * forward and backward outputs from previous cell to the next cell's
+ * inputs.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * The input tensors must all be the same type.
+ *
+ * Inputs:
+ * * 0: input.
+ * A 3-D tensor. The shape is defined by the input 6 (timeMajor). If
+ * it is set to true, then the input has a shape [maxTime, batchSize,
+ * inputSize], otherwise the input has a shape [batchSize, maxTime,
+ * inputSize].
+ * * 1: fwWeights.
+ * A 2-D tensor of shape [fwNumUnits, inputSize].
+ * * 2: fwRecurrentWeights.
+ * A 2-D tensor of shape [fwNumUnits, fwNumUnits].
+ * * 3: fwBias.
+ * A 1-D tensor of shape [fwNumUnits].
+ * * 4: fwHiddenState.
+ * A 2-D tensor of shape [batchSize, fwNumUnits]. Specifies a hidden
+ * state input for the first time step of the computation.
+ * * 5: bwWeights.
+ * A 2-D tensor of shape [bwNumUnits, inputSize].
+ * * 6: bwRecurrentWeights.
+ * A 2-D tensor of shape [bwNumUnits, bwNumUnits].
+ * * 7: bwBias.
+ * A 1-D tensor of shape [bwNumUnits].
+ * * 8: bwHiddenState
+ * A 2-D tensor of shape [batchSize, bwNumUnits]. Specifies a hidden
+ * state input for the first time step of the computation.
+ * * 9: auxInput.
+ * A 3-D tensor. The shape is the same as of the input 0.
+ * * 10:fwAuxWeights.
+ * A 2-D tensor of shape [fwNumUnits, inputSize].
+ * * 11:bwAuxWeights.
+ * A 2-D tensor of shape [bwNumUnits, inputSize].
+ * * 12:fusedActivationFunction.
+ * A {@link FuseCode} value indicating the activation function. If
+ * “NONE” is specified then it results in a linear activation.
+ * * 13:timeMajor
+ * An {@link ANEURALNETWORKS_BOOL} scalar specifying the shape format
+ * of input and output tensors.
+ * * 14:mergeOutputs
+ * An {@link ANEURALNETWORKS_BOOL} scalar specifying if the outputs
+ * from forward and backward cells are separate (if set to false) or
+ * concatenated (if set to true).
+ * Outputs:
+ * * 0: fwOutput.
+ * A 3-D tensor. The first two dimensions of the shape are defined by
+ * the input 6 (timeMajor) and the third dimension is defined by the
+ * input 14 (mergeOutputs). If timeMajor is set to true, then the first
+ * two dimensions are [maxTime, batchSize], otherwise they are set to
+ * [batchSize, maxTime]. If mergeOutputs is set to true, then the third
+ * dimension is equal to (fwNumUnits + bwNumUnits), otherwise it is set
+ * to fwNumUnits.
+ * * 1: bwOutput.
+ * A 3-D tensor. If the input 14 (mergeOutputs) is set to true, then
+ * this tensor is not produced. The shape is defined by the input 6
+ * (timeMajor). If it is set to true, then the shape is set to
+ * [maxTime, batchSize, bwNumUnits], otherwise the shape is set to
+ * [batchSize, maxTime, bwNumUnits].
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN = 43,
+
+ /**
+ * Greedily selects a subset of bounding boxes in descending order of score.
+ *
+ * This op applies NMS algorithm to each class. In each loop of execution,
+ * the box with maximum score gets selected and removed from the pending set.
+ * The scores of the rest of boxes are lowered according to the
+ * intersection-over-union (IOU) overlapping with the previously selected
+ * boxes and a specified NMS kernel method. Any boxes with score less
+ * than a threshold are removed from the pending set.
+ *
+ * Three NMS kernels are supported:
+ * * Hard: score_new = score_old * (1 if IoU < threshold else 0)
+ * * Linear: score_new = score_old * (1 if IoU < threshold else 1 - IoU)
+ * * Gaussian: score_new = score_old * exp(- IoU^2 / sigma)
+ *
+ * Axis-aligned bounding boxes are represented by its upper-left corner
+ * coordinate (x1,y1) and lower-right corner coordinate (x2,y2). A valid
+ * bounding box should satisfy x1 <= x2 and y1 <= y2.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Inputs:
+ * * 0: A 2-D Tensor of shape [num_rois, num_classes], specifying the score
+ * of each bounding box proposal. The boxes are grouped by batches in the
+ * first dimension. Zero num_rois is supported for this tensor.
+ * * 1: A 2-D Tensor specifying the bounding boxes of shape
+ * [num_rois, num_classes * 4], organized in the order [x1, y1, x2, y2].
+ * The boxes are grouped by batches in the first dimension. The sequential
+ * order of the boxes corresponds with input0. For input0 of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, this tensor should be of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, with zeroPoint of 0 and
+ * scale of 0.125. Zero num_rois is supported for this tensor.
+ * * 2: A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_rois], specifying the batch index of each box. Boxes with
+ * the same batch index are grouped together.
+ * * 3: An {@link ANEURALNETWORKS_FLOAT32} scalar, score_threshold. Boxes
+ * with scores lower than the threshold are filtered before sending
+ * to the NMS algorithm.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the maximum
+ * number of selected bounding boxes for each image. Set to a negative
+ * value for unlimited number of output bounding boxes.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the NMS
+ * kernel method, options are 0:hard, 1:linear, 2:gaussian.
+ * * 6: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the IoU
+ * threshold in hard and linear NMS kernel. This field is ignored if
+ * gaussian kernel is selected.
+ * * 7: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the sigma in
+ * gaussian NMS kernel. This field is ignored if gaussian kernel is
+ * not selected.
+ * * 8: An {@link ANEURALNETWORKS_FLOAT32} scalar, nms_score_threshold.
+ * Boxes with scores lower than the threshold are dropped during the
+ * score updating phase in soft NMS.
+ *
+ * Outputs:
+ * * 0: A 1-D Tensor of the same {@link OperandCode} as input0, with shape
+ * [num_output_rois], specifying the score of each output box. The boxes
+ * are grouped by batches, but the sequential order in each batch is not
+ * guaranteed. For type of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the scale and zero point must be the same as input0.
+ * * 1: A 2-D Tensor of the same {@link OperandCode} as input1, with shape
+ * [num_output_rois, 4], specifying the coordinates of each
+ * output bounding box with the same format as input1. The sequential
+ * order of the boxes corresponds with output0. For type of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, the scale must be
+ * 0.125 and the zero point must be 0.
+ * * 2: A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_output_rois], specifying the class of each output box. The
+ * sequential order of the boxes corresponds with output0.
+ * * 3: A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_output_rois], specifying the batch index of each box. Boxes
+ * with the same batch index are grouped together.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_BOX_WITH_NMS_LIMIT = 44,
+
+ /**
+ * Casts a tensor to a new type.
+ *
+ * This operation ignores the scale and zeroPoint of quanized tensors,
+ * e.g. it treats a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} input
+ * as a tensor of uint8 values.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: A tensor with the same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_CAST = 45,
+
+ /**
+ * Shuffle the channels of the input tensor.
+ *
+ * Given an input tensor and a integer value of num_groups, CHANNEL_SHUFFLE
+ * divide the channel dimension into num_groups groups, and reorganize the
+ * channels by grouping channels with the same index in each group.
+ *
+ * Along the channel dimension, the output is calculated using this formula:
+ *
+ * output_channel[k * num_groups + g] = input_channel[g * group_size + k]
+ *
+ * where group_size = num_channels / num_groups
+ *
+ * The number of channels must be divisible by num_groups.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be shuffled.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ * groups.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the dimension
+ * channel shuffle would be performed on. Negative index is used to
+ * specify axis from the end (e.g. -1 for the last axis). Must be in
+ * the range [-n, n).
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} and same shape as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_CHANNEL_SHUFFLE = 46,
+
+ /**
+ * Apply postprocessing steps to bounding box detections.
+ *
+ * Bounding box detections are generated by applying transformation on a set
+ * of predefined anchors with the bounding box deltas from bounding box
+ * regression. A final step of hard NMS is applied to limit the number of
+ * returned boxes.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Inputs:
+ * * 0: A 3-D Tensor of shape [batches, num_anchors, num_classes], specifying
+ * the score of each anchor with each class. Class 0 for each
+ * [batches, num_anchors, 0] is background and will be ignored.
+ * * 1: A 3-D Tensor of shape [batches, num_anchors, length_box_encoding], with
+ * the first four values in length_box_encoding specifying the bounding
+ * box deltas. The box deltas are encoded in the order of [dy, dx, dh, dw],
+ * where dy and dx is the linear-scale relative correction factor for the
+ * center position of the bounding box with respect to the width and height,
+ * dh and dw is the log-scale relative correction factor for the width and
+ * height. All the entries in length_box_encoding beyond the first four
+ * values are ignored in this operation.
+ * * 2: A 2-D Tensor of shape [num_anchors, 4], specifying the shape of each
+ * predefined anchor, with format [ctr_y, ctr_x, h, w], where ctr_y and
+ * ctr_x are the center position of the box, and h and w are the height
+ * and the width.
+ * * 3: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the scaling
+ * factor for dy in bounding box deltas.
+ * * 4: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the scaling
+ * factor for dx in bounding box deltas.
+ * * 5: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the scaling
+ * factor for dh in bounding box deltas.
+ * * 6: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the scaling
+ * factor for dw in bounding box deltas.
+ * * 7: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to use regular
+ * multi-class NMS algorithm that do NMS separately for each class,
+ * set to false for a faster algorithm that only do one single NMS
+ * using the highest class score..
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, max_num_detections, specifying
+ * the maximum number of boxes for the output. Boxes with the lowest
+ * scores are discarded to meet the limit.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, only used when input7 is
+ * set to false, specifying the maximum number of classes per detection.
+ * * 10: An {@link ANEURALNETWORKS_INT32} scalar, only used when input7 is
+ * set to true, specifying the maximum number of detections when
+ * applying NMS algorithm for each single class.
+ * * 11: A scalar, score_threshold. Boxes with scores lower than the
+ * threshold are filtered before sending to the NMS algorithm. The
+ * scalar must be of {@link ANEURALNETWORKS_FLOAT16} if input0 is of
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of {@link
+ * ANEURALNETWORKS_FLOAT32} if input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT32}.
+ * * 12: A scalar, specifying the IoU threshold for hard NMS. The scalar
+ * must be of {@link ANEURALNETWORKS_FLOAT16} if input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT16} and of {@link
+ * ANEURALNETWORKS_FLOAT32} if input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT32}.
+ * * 13: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to include
+ * background class in the list of label map for the output, set
+ * to false to not include the background. When the background
+ * class is included, it has label 0 and the output classes start
+ * at 1 in the label map, otherwise, the output classes start at 0.
+ *
+ * Outputs:
+ * * 0: A 2-D tensor of the same {@link OperandCode} as input0, with shape
+ * [batches, max_num_detections], specifying the score of each output
+ * detections.
+ * * 1: A 3-D tensor of shape [batches, max_num_detections, 4], specifying the
+ * coordinates of each output bounding box, with format
+ * [y1, x1, y2, x2].
+ * * 2: A 2-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [batches, max_num_detections], specifying the class label for each
+ * output detection.
+ * * 3: An 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape [batches],
+ * specifying the number of valid output detections for each batch.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_DETECTION_POSTPROCESSING = 47,
+
+ /**
+ * For input tensors x and y, computes x == y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_EQUAL = 48,
+
+ /**
+ * Computes exponential of x element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_EXP = 49,
+
+ /**
+ * Inserts a dimension of 1 into a tensor's shape.
+ *
+ * Given a tensor input, this operation inserts a dimension of 1 at the
+ * given dimension index of input's shape. The dimension index starts at
+ * zero; if you specify a negative dimension index, it is counted backward
+ * from the end.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the dimension
+ * index to expand. Must be in the range [-(n + 1), (n + 1)).
+ *
+ * Outputs:
+ * * 0: An (n + 1)-D tensor with the same {@link OperandCode} and data as
+ * input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_EXPAND_DIMS = 50,
+
+ /**
+ * Gathers values along an axis.
+ *
+ * Produces an output tensor with shape
+ * input0.dimension[:axis] + indices.dimension + input0.dimension[axis + 1:]
+ * where:
+ * # Vector indices (output is rank(input0)).
+ * output[a_0, ..., a_n, i, b_0, ..., b_n] =
+ * input0[a_0, ..., a_n, indices[i], b_0, ..., b_n]
+ *
+ * # Higher rank indices (output is rank(input0) + rank(indices) - 1).
+ * output[a_0, ..., a_n, i, ..., j, b_0, ... b_n] =
+ * input0[a_0, ..., a_n, indices[i, ..., j], b_0, ..., b_n]
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor from which to gather values.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis.
+ * Negative index is used to specify axis from the end
+ * (e.g. -1 for the last axis). Must be in the range [-n, n).
+ * * 2: A k-D tensor {@link ANEURALNETWORKS_TENSOR_INT32} of indices.
+ * The values must be in the bounds of the corresponding dimensions
+ * of input0.
+ *
+ * Outputs:
+ * * 0: An (n + k - 1)-D tensor with the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_GATHER = 51,
+
+ /**
+ * Generate aixs-aligned bounding box proposals.
+ *
+ * Bounding box proposals are generated by applying transformation on a set
+ * of predefined anchors with the bounding box deltas from bounding box
+ * regression. A final step of hard NMS is applied to limit the number of
+ * returned boxes.
+ *
+ * Axis-aligned bounding boxes are represented by its upper-left corner
+ * coordinate (x1,y1) and lower-right corner coordinate (x2,y2). A valid
+ * bounding box should satisfy x1 <= x2 and y1 <= y2.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Inputs:
+ * * 0: A 4-D Tensor specifying the score of each anchor at each
+ * location. With "NHWC" data layout, the tensor shape is
+ * [batches, height, width, num_anchors]. With "NCHW" data layout,
+ * the tensor shape is [batches, num_anchors, height, width].
+ * * 1: A 4-D Tensor specifying the bounding box deltas. With "NHWC" data
+ * layout, the tensor shape is [batches, height, width, num_anchors * 4].
+ * With "NCHW" data layout, the tensor shape is
+ * [batches, num_anchors * 4, height, width]. The box deltas are encoded
+ * in the order of [dx, dy, dw, dh], where dx and dy is the linear-scale
+ * relative correction factor for the center position of the bounding box
+ * with respect to the width and height, dw and dh is the log-scale
+ * relative correction factor for the width and height. The last
+ * dimensions is the channel dimension.
+ * * 2: A 2-D Tensor of shape [num_anchors, 4], specifying the shape of each
+ * predefined anchor, with format [x1, y1, x2, y2]. For input0 of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, this tensor should be of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT16_SYMM}, with scale of 0.125.
+ * * 3: A 2-D Tensor of shape [batches, 2], specifying the size of
+ * each image in the batch, with format [image_height, image_width].
+ * For input0 of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, this
+ * tensor should be of {@link ANEURALNETWORKS_TENSOR_QUANT16_SYMM}, with
+ * scale of 0.125.
+ * * 4: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the ratio
+ * from the height of original image to the height of feature map.
+ * * 5: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the ratio
+ * from the width of original image to the width of feature map.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the maximum
+ * number of boxes before going into the hard NMS algorithm. Boxes
+ * with the lowest scores are discarded to meet the limit. Set to
+ * a non-positive value for unlimited number.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the maximum
+ * number of boxes returning from the hard NMS algorithm. Boxes
+ * with the lowest scores are discarded to meet the limit. Set to
+ * a non-positive value for unlimited number.
+ * * 8: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the IoU
+ * threshold for hard NMS.
+ * * 9: An {@link ANEURALNETWORKS_FLOAT32} scalar, min_size. Boxes with
+ * height or width lower than the absolute threshold are filtered out.
+ * * 10: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and input1. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0, of shape
+ * [num_output_rois], specifying the score of each output box.
+ * The boxes are grouped by batches, but the sequential order in
+ * each batch is not guaranteed. For type of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the scale and zero
+ * point must be the same as input0.
+ * * 1: A tensor of the same {@link OperandCode} as input3, of shape
+ * [num_output_rois, 4], specifying the coordinates of each output
+ * bounding box for each class, with format [x1, y1, x2, y2].
+ * The sequential order of the boxes corresponds with output0.
+ * For type of {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, the
+ * scale must be 0.125 and the zero point must be 0.
+ * * 2: A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_output_rois], specifying the batch index of each box. Boxes
+ * with the same batch index are grouped together.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_GENERATE_PROPOSALS = 52,
+
+ /**
+ * For input tensors x and y, computes x > y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_GREATER = 53,
+ /**
+ * For input tensors x and y, computes x >= y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_GREATER_EQUAL = 54,
+
+ /**
+ * Performs a grouped 2-D convolution operation.
+ *
+ * Given an input tensor of shape [batches, height, width, depth_in] and a
+ * filter tensor of shape [depth_out, filter_height, filter_width, depth_group]
+ * containing depth_out convolutional filters of depth depth_group, GROUPED_CONV
+ * applies a group of different filters to each input channel group, then
+ * concatenates the results together.
+ *
+ * Specifically, the input channels are divided into num_groups groups, each with
+ * depth depth_group, i.e. depth_in = num_groups * depth_group. The convolutional
+ * filters are also divided into num_groups groups, i.e. depth_out is divisible
+ * by num_groups. GROUPED_CONV applies each group of filters to the corresponding
+ * input channel group, and the result are concatenated together.
+ *
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, i, j, g * channel_multiplier + q] =
+ * sum_{di, dj, dk} (
+ * input[b, strides[1] * i + di, strides[2] * j + dj,
+ * g * depth_group + dk] *
+ * filter[g * channel_multiplier + q, di, dj, dk]
+ * ) + bias[channel]
+ *
+ * where channel_multiplier = depth_out / num_groups
+ *
+ * Supported tensor {@link OperandCode} configurations:
+ * * 16 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} for input, filter, output, and bias.
+ *
+ * * 32 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT32} for input, filter, output, and bias.
+ *
+ * * Quantized:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, filter, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (with scale set to
+ * * * input.scale * filter.scale).
+ *
+ * * Quantized with symmetric per channel quantization for the filter:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} for filter.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (scale set to 0.0,
+ * * * each value scaling is separate and equal to input.scale * filter.scales[channel]).
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input, where depth_in = num_groups * depth_group.
+ * * 1: A 4-D tensor, of shape
+ * [depth_out, filter_height, filter_width, depth_group], specifying
+ * the filter, where depth_out must be divisible by num_groups. For
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}
+ * the channel dimension (channelDim at
+ * {@link ANeuralNetworksSymmPerChannelQuantParams}) must be set to 0.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias must be of the same
+ * type. For filter tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the bias should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint
+ * of 0 and bias_scale == input_scale * filter_scale. For filter tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal to
+ * bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ groups.
+ * * 10: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 11: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input, where depth_in = num_groups * depth_group.
+ * * 1: A 4-D tensor, of shape
+ * [depth_out, filter_height, filter_width, depth_group], specifying
+ * the filter, where depth_out must be divisible by num_groups. For
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}
+ * the channel dimension (channelDim at
+ * {@link ANeuralNetworksSymmPerChannelQuantParams}) must be set to 0.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias must be of the same
+ * type. For filter tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the bias should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint
+ * of 0 and bias_scale == input_scale * filter_scale. For filter tensor
+ * of {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * should be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal to
+ * bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ * groups.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 8: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth_out].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_GROUPED_CONV_2D = 55,
+
+ /**
+ * Localize the maximum keypoints from heatmaps.
+ *
+ * This operation approximates the accurate maximum keypoint scores and
+ * indices after bicubic upscaling by using Taylor expansion up to the
+ * quadratic term.
+ *
+ * The bounding box is represented by its upper-left corner coordinate
+ * (x1,y1) and lower-right corner coordinate (x2,y2) in the original image.
+ * A valid bounding box should satisfy x1 <= x2 and y1 <= y2.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: A 4-D Tensor of shape
+ * [num_boxes, heatmap_size, heatmap_size, num_keypoints],
+ * specifying the heatmaps, the height and width of heatmaps should
+ * be the same, and must be greater than or equal to 2.
+ * * 1: A 2-D Tensor of shape [num_boxes, 4], specifying the bounding boxes,
+ * each with format [x1, y1, x2, y2]. For input0 of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, this tensor should
+ * be of {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, with zeroPoint
+ * of 0 and scale of 0.125.
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0, with shape
+ * [num_boxes, num_keypoints], specifying score of the keypoints.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from input0 scale and zeroPoint.
+ * * 1: A tensor of the same {@link OperandCode} as input1, with shape
+ * [num_boxes, num_keypoints, 2], specifying the location of
+ * the keypoints, the second dimension is organized as
+ * [keypoint_x, keypoint_y].
+ * For type of {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM}, the
+ * scale must be 0.125 and the zero point must be 0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT = 56,
+
+ /**
+ * Applies instance normalization to the input tensor.
+ *
+ * The values in the output tensor are computed as:
+ *
+ * output[b, h, w, c] =
+ * (input[b, h, w, c] - mean[b, c]) * gamma /
+ * sqrt(var[b, c] + epsilon) + beta
+ *
+ * Where the mean and variance are computed across the spatial dimensions:
+ *
+ * mean[b, c] =
+ * sum_{h, w}(input[b, h, w, c]) / sum(1)
+ *
+ * var[b, c] =
+ * sum_{h, w}(pow(input[b, h, w, c] - mean[b, c], 2)) / sum(1)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be normalized.
+ * * 1: A scalar, specifying gamma, the scale applied to the normalized
+ * tensor. The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if
+ * input0 is of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of {@link
+ * ANEURALNETWORKS_FLOAT32} if input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT32}.
+ * * 2: A scalar, specifying beta, the offset applied to the normalized
+ * tensor. The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if
+ * input0 is of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of {@link
+ * ANEURALNETWORKS_FLOAT32} if input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT32}.
+ * * 3: A scalar, specifying epsilon, the small value added to variance to
+ * avoid dividing by zero. The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if
+ * input0 is of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of {@link
+ * ANEURALNETWORKS_FLOAT32} if input0 is of {@link
+ * ANEURALNETWORKS_TENSOR_FLOAT32}.
+ * * 4: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} and same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_INSTANCE_NORMALIZATION = 57,
+
+ /**
+ * For input tensors x and y, computes x < y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LESS = 58,
+
+ /**
+ * For input tensors x and y, computes x <= y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LESS_EQUAL = 59,
+
+ /**
+ * Computes natural logarithm of x element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LOG = 60,
+
+ /**
+ * Returns the truth value of x AND y element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ * * 1: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8} and dimensions
+ * compatible with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LOGICAL_AND = 61,
+
+ /**
+ * Computes the truth value of NOT x element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LOGICAL_NOT = 62,
+
+ /**
+ * Returns the truth value of x OR y element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ * * 1: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8} and dimensions
+ * compatible with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LOGICAL_OR = 63,
+
+ /**
+ * Computes the log softmax activations given logits.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = logits * beta - log(reduce_sum(exp(logits * beta), axis))
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor specifying the input logits.
+ * * 1: A scalar, specifying the positive scaling factor for the exponent,
+ * beta.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the beta
+ * value must be of {@link ANEURALNETWORKS_FLOAT16}.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the beta
+ * value must be of {@link ANEURALNETWORKS_FLOAT32}.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis to
+ * reduce across. Negative index is used to specify axis from the
+ * end (e.g. -1 for the last axis). Must be in the range [-n, n).
+ *
+ * Outputs:
+ * * 0: The output tensor of the same {@link OperandCode} and shape as
+ * input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_LOG_SOFTMAX = 64,
+
+ /**
+ * Returns the element-wise maximum of two tensors.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and compatible dimensions
+ * with input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scales and zeroPoint can be different from input0 scale and zeroPoint.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_MAXIMUM = 65,
+
+ /**
+ * Returns the element-wise minimum of two tensors.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and compatible dimensions
+ * with input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scales and zeroPoint can be different from input0 scale and zeroPoint.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_MINIMUM = 66,
+
+ /**
+ * Computes numerical negative value element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_NEG = 67,
+
+ /**
+ * For input tensors x and y, computes x != y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_BOOL8}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_NOT_EQUAL = 68,
+
+ /**
+ * Pads a tensor with the given constant value according to the specified
+ * paddings.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the tensor to be padded.
+ * * 1: A 2-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the paddings
+ * for each spatial dimension of the input tensor. The shape of the
+ * tensor must be {rank(input0), 2}.
+ * padding[i, 0] specifies the number of elements to be padded in the
+ * front of dimension i.
+ * padding[i, 1] specifies the number of elements to be padded after
+ * the end of dimension i.
+ * * 2: An scalar specifying the value to use for padding input0.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the
+ * pad value must be of {@link ANEURALNETWORKS_FLOAT16}.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}, the
+ * pad value must be of {@link ANEURALNETWORKS_FLOAT32}.
+ * For input tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * the pad value must be of {@link ANEURALNETWORKS_INT32}. The
+ * scale and zeroPoint are assumed to be the same as in input0.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0. The
+ * output tensor has the same rank as input0, and each
+ * dimension of the output tensor has the same size as the
+ * corresponding dimension of the input tensor plus the size
+ * of the padding:
+ * output0.dimension[i] =
+ * padding[i, 0] + input0.dimension[i] + padding[i, 1]
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_PAD_V2 = 69,
+
+ /**
+ * Computes the power of one value to another.
+ *
+ * Given a tensor base and a tensor exponent, this operation computes
+ * base^exponent elementwise.
+ *
+ * This operations supports broadcasting. The size of the output is the
+ * maximum size along each dimension of the input operands. It starts with
+ * the trailing dimensions, and works its way forward.
+ *
+ * For example:
+ * base.dimension = {4, 1, 2}
+ * exponent.dimension = {5, 4, 3, 1}
+ * output.dimension = {5, 4, 3, 2}
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: A tensor specifying the base.
+ * * 1: A tensor specifying the exponent.
+ *
+ * Outputs:
+ * * 0: An output tensor.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_POW = 70,
+
+ /**
+ * Parametric Rectified Linear Unit.
+ *
+ * It follows: f(x) = alpha * x for x < 0, f(x) = x for x >= 0, where alpha
+ * is a learned array with the same {@link OperandCode} and compatible
+ * dimensions as input x.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Example:
+ * input.dimension = {4, 1, 2}
+ * alpha.dimension = {5, 4, 3, 1}
+ * output.dimension = {5, 4, 3, 2}
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0, specifying the alpha.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be diffent from the input0 scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_PRELU = 71,
+
+ /**
+ * Quantizes the input tensor.
+ *
+ * The formula is:
+ *
+ * output = max(0, min(255, round(input / scale) + zeroPoint)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: A tensor, may be zero-sized.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0, but with
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_QUANTIZE = 72,
+
+ /**
+ * A version of quantized LSTM, using 16 bit quantization for internal
+ * state.
+ *
+ * There is no projection layer, so cell state size is equal to the output
+ * size.
+ *
+ * Inputs:
+ * * 0: A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [numBatches, inputSize] specifying the input to the LSTM
+ * cell. Tensor is quantized with a fixed quantization range of
+ * [-1, 127/128] (scale = 1/128, zeroPoint = 128).
+ * * 1: The input-to-input weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, inputSize] specifying input-to-input part of
+ * weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 2: The input-to-forget weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, inputSize] specifying input-to-forget part of
+ * weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 3: The input-to-cell weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, inputSize] specifying input-to-cell part of
+ * weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 4: The input-to-output weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, inputSize] specifying input-to-output part of
+ * weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 5: The recurrent-to-input weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, outputSize] specifying recurrent-to-input part
+ * of weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 6: The recurrent-to-forget weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, outputSize] specifying recurrent-to-forget
+ * part of weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 7: The recurrent-to-cell weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, outputSize] specifying recurrent-to-cell part
+ * of weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 8: The recurrent-to-output weights.
+ * A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [outputSize, outputSize] specifying recurrent-to-output
+ * part of weights for fully-connected layer inside the LSTM cell.
+ * Quantization zero point and scale must be the same across all the
+ * weights.
+ * * 9: The input gate bias.
+ * A 1-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32} and shape
+ * [outputSize] specifying the bias for the fully-connected layer
+ * inside the LSTM cell. Bias is quantized with scale being a product
+ * of input and weights scales and zeroPoint equal to 0.
+ * * 10:The forget gate bias.
+ * A 1-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32} and shape
+ * [outputSize] specifying the bias for the fully-connected layer
+ * inside the LSTM cell. Bias is quantized with scale being a product
+ * of input and weights scales and zeroPoint equal to 0.
+ * * 11:The cell bias.
+ * A 1-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32} and shape
+ * [outputSize] specifying the bias for the fully-connected layer
+ * inside the LSTM cell. Bias is quantized with scale being a product
+ * of input and weights scales and zeroPoint equal to 0.
+ * * 12:The output gate bias.
+ * A 1-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32} and shape
+ * [outputSize] specifying the bias for the fully-connected layer
+ * inside the LSTM cell. Bias is quantized with scale being a product
+ * of input and weights scales and zeroPoint equal to 0.
+ * * 13: A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT16_SYMM}
+ * and shape [numBatches, outputSize] specifying the cell state from the
+ * previous time step of the LSTM cell. It is quantized using a
+ * quantization range of [-2^4, 2^4 * 32767/32768] (scale = 2^4 /
+ * 32768, zeroPoint = 0).
+ * * 14: A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [numBathes, outputSize] specifying the output of the LSTM
+ * cell from previous time-step. Tensor is quantized with a fixed
+ * quantization range of [-1, 127/128] (scale = 1/128, zeroPoint =
+ * 128).
+ *
+ *
+ * Outputs:
+ * * 0: A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT16_SYMM}
+ * and shape [numBatches, outputSize] which contains a cell state from
+ * the current time step. Tensor is quantized using a quantization
+ * range of [-2^4, 2^4 * 32767/32768] (scale = 2^4 / 32768, zeroPoint =
+ * 0).
+ * * 1: A 2-D tensor of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * and shape [numBathes, outputSize] which contains the output value.
+ * Tensor is quantized with a fixed quantization range of [-1, 127/128]
+ * (scale = 1/128, zeroPoint = 128).
+ */
+ ANEURALNETWORKS_QUANTIZED_16BIT_LSTM = 73,
+
+ /**
+ * Draws samples from a multinomial distribution.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Inputs:
+ * * 0: A 2-D tensor with shape [batches, classes], specifying the
+ * unnormalized log-probabilities for all classes.
+ * * 1: A scalar {@link ANEURALNETWORKS_INT32}, specifying the number of
+ * independent samples to draw for each row slice.
+ * * 2: A 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor with shape [2],
+ * specifying seeds used to initialize the random distribution.
+ * Outputs:
+ * * 0: A 2-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor with shape
+ * [batches, samples], containing the drawn samples.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_RANDOM_MULTINOMIAL = 74,
+
+ /**
+ * Reduces a tensor by computing the "logical and" of elements along given
+ * dimensions.
+ *
+ * If keep_dims is true, the reduced dimensions are
+ * retained with length 1. Otherwise, the rank of the tensor is reduced by
+ * 1 for each entry in dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Dimension values must be in the range [-n, n).
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, keep_dims. If true,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_REDUCE_ALL = 75,
+
+ /**
+ * Reduces a tensor by computing the "logical or" of elements along given
+ * dimensions.
+ *
+ * If keep_dims is true, the reduced dimensions are
+ * retained with length 1. Otherwise, the rank of the tensor is reduced by
+ * 1 for each entry in dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_BOOL8}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Dimension values must be in the range [-n, n).
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, keep_dims. If true,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_REDUCE_ANY = 76,
+
+ /**
+ * Reduces a tensor by computing the maximum of elements along given
+ * dimensions.
+ *
+ * If keep_dims is true, the reduced dimensions are
+ * retained with length 1. Otherwise, the rank of the tensor is reduced by
+ * 1 for each entry in dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Dimension values must be in the range [-n, n).
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, keep_dims. If true,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_REDUCE_MAX = 77,
+
+ /**
+ * Reduces a tensor by computing the minimum of elements along given
+ * dimensions.
+ *
+ * If keep_dims is true, the reduced dimensions are
+ * retained with length 1. Otherwise, the rank of the tensor is reduced by
+ * 1 for each entry in dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Dimension values must be in the range [-n, n).
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, keep_dims. If true,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_REDUCE_MIN = 78,
+
+ /**
+ * Reduces a tensor by multiplying elements along given dimensions.
+ *
+ * If keep_dims is true, the reduced dimensions are
+ * retained with length 1. Otherwise, the rank of the tensor is reduced by
+ * 1 for each entry in dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Dimension values must be in the range [-n, n).
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, keep_dims. If true,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_REDUCE_PROD = 79,
+
+ /**
+ * Reduces a tensor by summing elements along given dimensions.
+ *
+ * If keep_dims is true, the reduced dimensions are
+ * retained with length 1. Otherwise, the rank of the tensor is reduced by
+ * 1 for each entry in dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor.
+ * * 1: A 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce. Dimension values must be in the range [-n, n).
+ * * 2: An {@link ANEURALNETWORKS_BOOL} scalar, keep_dims. If true,
+ * retains reduced dimensions with length 1.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_REDUCE_SUM = 80,
+
+ /**
+ * Select and scale the feature map of each region of interest to a unified
+ * output size by average pooling sampling points from bilinear interpolation.
+ *
+ * The region of interest is represented by its upper-left corner coordinate
+ * (x1,y1) and lower-right corner coordinate (x2,y2) in the original image.
+ * A spatial scaling factor is applied to map into feature map coordinate.
+ * A valid region of interest should satisfy x1 <= x2 and y1 <= y2.
+ *
+ * No rounding is applied in this operation. The sampling points are unified
+ * distributed in the pooling bin and their values are calculated by bilinear
+ * interpolation.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} (since API level 29)
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: A 4-D tensor, specifying the feature map.
+ * * 1: A 2-D Tensor of shape [num_rois, 4], specifying the locations of
+ * the regions of interest, each line with format [x1, y1, x2, y2].
+ * For input0 of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * this tensor should be of {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM},
+ * with zeroPoint of 0 and scale of 0.125. Zero num_rois is
+ * supported for this tensor.
+ * * 2: An 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_rois], specifying the batch index of each box. Boxes with
+ * the same batch index are grouped together. Zero num_rois is
+ * supported for this tensor.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * height of the output tensor.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * width of the output tensor.
+ * * 5: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the ratio
+ * from the height of original image to the height of feature map.
+ * * 6: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the ratio
+ * from the width of original image to the width of feature map.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ * sampling points in height dimension used to compute the output.
+ * Set to 0 for adaptive value of ceil(roi_height/out_height).
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ * sampling points in width dimension used to compute the output.
+ * Set to 0 for adaptive value of ceil(roi_width/out_width).
+ * * 9: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0. The output
+ * shape is [num_rois, out_height, out_width, depth].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from the input0 scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_ROI_ALIGN = 81,
+
+ /**
+ * Select and scale the feature map of each region of interest to a unified
+ * output size by max-pooling.
+ *
+ * The region of interest is represented by its upper-left corner coordinate
+ * (x1,y1) and lower-right corner coordinate (x2,y2) in the original image.
+ * A spatial scaling factor is applied to map into feature map coordinate.
+ * A valid region of interest should satisfy x1 <= x2 and y1 <= y2.
+ *
+ * Rounding is applied in this operation to ensure integer boundary for
+ * regions of interest and pooling bins.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Inputs:
+ * * 0: A 4-D tensor, specifying the feature map.
+ * * 1: A 2-D Tensor of shape [num_rois, 4], specifying the locations of
+ * the regions of interest, each line with format [x1, y1, x2, y2].
+ * For input0 of type {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM},
+ * this tensor should be of {@link ANEURALNETWORKS_TENSOR_QUANT16_ASYMM},
+ * with zeroPoint of 0 and scale of 0.125.
+ * * 2: An 1-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor, of shape
+ * [num_rois], specifying the batch index of each box. Boxes with
+ * the same batch index are grouped together.
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * height of the output tensor.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * width of the output tensor.
+ * * 5: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the ratio
+ * from the height of original image to the height of feature map.
+ * * 6: An {@link ANEURALNETWORKS_FLOAT32} scalar, specifying the ratio
+ * from the width of original image to the width of feature map.
+ * * 7: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0. The output
+ * shape is [num_rois, out_height, out_width, depth].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_ROI_POOLING = 82,
+
+ /**
+ * Computes reciprocal of square root of x element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_RSQRT = 83,
+
+ /**
+ * Using a tensor of booleans c and input tensors x and y select values
+ * elementwise from both input tensors:
+ *
+ * O[i] = C[i] ? x[i] : y[i].
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: A tensor of type {@link ANEURALNETWORKS_TENSOR_BOOL8} acting as a
+ * mask that chooses, based on the value at each element, whether the
+ * corresponding element in the output should be taken from input1 (if
+ * true) or input2 (if false).
+ * * 1: An input tensor of the same shape as input0.
+ * * 2: An input tensor of the same shape and type as input1.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scales and zeroPoint can be different from input1 scale and zeroPoint.
+ *
+ * Outputs:
+ * * 0: A tensor of the same type and shape as input1 and input2.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ */
+ ANEURALNETWORKS_SELECT = 84,
+
+ /**
+ * Computes sin of x element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_SIN = 85,
+
+ /**
+ * Extracts a slice of specified size from the input tensor starting at a
+ * specified location.
+ *
+ * The starting location is specified as a 1-D tensor containing offsets
+ * for each dimension. The size is specified as a 1-D tensor containing
+ * either size of a slice along corresponding dimension or -1. In the latter
+ * case, all the remaining elements in dimension are included in the slice.
+ *
+ * A sum of begin offset and a size of a slice must not exceed size of a
+ * corresponding dimension.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor to take slice from, may be zero-sized.
+ * * 1: A 1-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32} specifying
+ * the beginning indices of the slice in each dimension.
+ * * 2: A 1-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32} specifying
+ * the size of the slice in each dimension.
+ *
+ * Outputs:
+ * * 0: An n-D tensor of the same type as the input containing the slice.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * its scale and zeroPoint has to be same as the input0 scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_SLICE = 86,
+
+ /**
+ * Splits a tensor along a given axis into num_splits subtensors.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor to split.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis along
+ * which to split.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar indicating the number of
+ * splits along given axis. Must evenly divide axis size.
+ *
+ * Outputs:
+ * * 0 ~ (num_splits - 1): Resulting subtensors.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_SPLIT = 87,
+
+ /**
+ * Computes square root of x element-wise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: from 1.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ *
+ * Outputs:
+ * * 0: The output tensor of same shape as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_SQRT = 88,
+
+ /**
+ * Constructs a tensor by tiling a given tensor.
+ *
+ * This operation creates a new tensor by replicating `input` `multiples`
+ * times. The output tensor's i-th dimension has `input.dims(i) * multiples[i]`
+ * elements, and the values of `input` are replicated `multiples[i]` times
+ * along the i-th dimension.
+ * For example, tiling `[a b c d]` by `[2]` produces `[a b c d a b c d]`.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: input, an n-D tensor specifying the input.
+ * * 1: multiples, a 1-D tensor of {@link ANEURALNETWORKS_TENSOR_INT32}.
+ * The length of multiples must be n.
+ *
+ * Outputs:
+ * * 0: A tiled tensor of the same {@link OperandCode} and rank as `input`.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TILE = 89,
+
+ /**
+ * Finds values and indices of the k largest entries for the last dimension.
+ *
+ * Resulting values in each dimensions are sorted in descending order. If
+ * two values are equal, the one with larger index appears first.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: input, an n-D tensor specifying the input.
+ * * 1: k, an {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ * top elements to look for along the last dimension.
+ *
+ * Outputs:
+ * * 0: An n-D tensor of the same type as the input, containing the k
+ * largest elements along each last dimensional slice.
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ * * 1: An n-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32}
+ * containing the indices of values within the last dimension of input.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TOPK_V2 = 90,
+
+ /**
+ * Performs the transpose of 2-D convolution operation.
+ *
+ * This operation is sometimes called "deconvolution" after Deconvolutional
+ * Networks, but is actually the transpose (gradient) of
+ * {@link ANEURALNETWORKS_CONV_2D} rather than an actual deconvolution.
+ *
+ * The output dimensions are functions of the filter dimensions, stride, and
+ * padding.
+ *
+ * Supported tensor {@link OperandCode} configurations:
+ * * 16 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT16} for input, filter, output, and bias.
+ *
+ * * 32 bit floating point:
+ * * * {@link ANEURALNETWORKS_TENSOR_FLOAT32} for input, filter, output, and bias.
+ *
+ * * Quantized:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, filter, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (with scale set to
+ * * * input.scale * filter.scale).
+ *
+ * * Quantized with symmetric per channel quantization for the filter:
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} for input, and output.
+ * * * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} for filter.
+ * * * {@link ANEURALNETWORKS_TENSOR_INT32} for bias (scale set to 0.0,
+ * * * each value scaling is separate and equal to input.scale * filter.scales[channel]).
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both explicit padding and implicit padding are supported.
+ *
+ * Inputs (explicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input. Since API level 29, zero batches is supported
+ * for this tensor.
+ * * 1: A 4-D tensor, of shape
+ * [depth_out, filter_height, filter_width, depth_in], specifying the
+ * filter. For tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} the channel
+ * dimension (extraParams.channelQuant.channelDim) must be set to 0.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias should be of the
+ * same type. For input tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
+ * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
+ * bias_scale == input_scale * filter_scale. For filter tensor of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * must be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal
+ * to bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the left, in the ‘width’ dimension.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the right, in the ‘width’ dimension.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the top, in the ‘height’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding on
+ * the bottom, in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 8: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 9: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 10: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Inputs (implicit padding):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth_in],
+ * specifying the input. Since API level 29, zero batches is supported
+ * for this tensor.
+ * * 1: A 4-D tensor, of shape
+ * [depth_out, filter_height, filter_width, depth_in], specifying the
+ * filter. For tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} the channel
+ * dimension (extraParams.channelQuant.channelDim) must be set to 0.
+ * * 2: A 1-D tensor, of shape [depth_out], specifying the bias. For input
+ * tensor of type {@link ANEURALNETWORKS_TENSOR_FLOAT32} or
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT16}, the bias should be of the
+ * same type. For input tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, the bias should be
+ * of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of 0 and
+ * bias_scale == input_scale * filter_scale. For filter tensor of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}, the bias
+ * must be of {@link ANEURALNETWORKS_TENSOR_INT32}, with zeroPoint of
+ * 0 and bias_scale of 0. The actual scale of each value 'i' is equal
+ * to bias_scale[i] = input_scale * filter_scale[i].
+ * * 3: An {@link ANEURALNETWORKS_TENSOR_INT32} tensor, specifying the output
+ * tensor shape.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the implicit
+ * padding scheme, has to be one of the
+ * {@link PaddingCode} values.
+ * * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * * 6: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘height’ dimension.
+ * * 7: An {@link ANEURALNETWORKS_INT32} scalar, and has to be one of the
+ * {@link FuseCode} values. Specifies the activation to
+ * invoke on the result.
+ * * 8: An {@link ANEURALNETWORKS_BOOL} scalar, set to true to specify
+ * NCHW data layout for input0 and output0. Set to false for NHWC.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, out_height, out_width, depth_out].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint can be different from inputs' scale and zeroPoint.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_TRANSPOSE_CONV_2D = 91,
+
+ /**
+ * A recurrent neural network specified by an LSTM cell.
+ *
+ * Performs (fully) dynamic unrolling of input.
+ *
+ * This Op unrolls the input along the time dimension, and implements the
+ * following operation for each element in the sequence
+ * s = 1...sequence_length:
+ * outputs[s] = projection(state = activation(LSTMOp(inputs[s])))
+ *
+ * Where LSTMOp is the LSTM op as in {@link ANEURALNETWORKS_LSTM},
+ * the "projection" is an optional projection layer from state and output
+ * and the “activation” is the function passed as the
+ * “fused_activation_function” argument (if not “NONE”).
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: 3, either time-major or batch-major.
+ *
+ * All input and output tensors must be of the same type.
+ *
+ * Inputs:
+ * * 0: The input (\f$x_t\f$).
+ * A 3-D tensor of shape:
+ * If time-major: [max_time, batch_size, input_size]
+ * If batch-major: [batch_size, max_time, input_size]
+ * where “max_time” is the number of timesteps (sequence length),
+ * “batch_size” corresponds to the batching dimension, and
+ * “input_size” is the size of the input.
+ * * 1: The input-to-input weights (\f$W_{xi}\f$). Optional.
+ * A 2-D tensor of shape [num_units, input_size], where “num_units”
+ * corresponds to the number of cell units.
+ * * 2: The input-to-forget weights (\f$W_{xf}\f$).
+ * A 2-D tensor of shape [num_units, input_size].
+ * * 3: The input-to-cell weights (\f$W_{xc}\f$).
+ * A 2-D tensor of shape [num_units, input_size].
+ * * 4: The input-to-output weights (\f$W_{xo}\f$).
+ * A 2-D tensor of shape [num_units, input_size].
+ * * 5: The recurrent-to-input weights (\f$W_{hi}\f$). Optional.
+ * A 2-D tensor of shape [num_units, output_size], where “output_size”
+ * corresponds to either the number of cell units (i.e., “num_units”),
+ * or the second dimension of the “projection_weights”, if defined.
+ * * 6: The recurrent-to-forget weights (\f$W_{hf}\f$).
+ * A 2-D tensor of shape [num_units, output_size].
+ * * 7: The recurrent-to-cell weights (\f$W_{hc}\f$).
+ * A 2-D tensor of shape [num_units, output_size].
+ * * 8: The recurrent-to-output weights (\f$W_{ho}\f$).
+ * A 2-D tensor of shape [num_units, output_size].
+ * * 9: The cell-to-input weights (\f$W_{ci}\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 10:The cell-to-forget weights (\f$W_{cf}\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 11:The cell-to-output weights (\f$W_{co}\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 12:The input gate bias (\f$b_i\f$). Optional.
+ * A 1-D tensor of shape [num_units].
+ * * 13:The forget gate bias (\f$b_f\f$).
+ * A 1-D tensor of shape [num_units].
+ * * 14:The cell bias (\f$b_c\f$).
+ * A 1-D tensor of shape [num_units].
+ * * 15:The output gate bias (\f$b_o\f$).
+ * A 1-D tensor of shape [num_units].
+ * * 16:The projection weights (\f$W_{proj}\f$). Optional.
+ * A 2-D tensor of shape [output_size, num_units].
+ * * 17:The projection bias (\f$b_{proj}\f$). Optional.
+ * A 1-D tensor of shape [output_size].
+ * * 18:The output state (in) (\f$h_{t-1}\f$).
+ * A 2-D tensor of shape [batch_size, output_size].
+ * * 19:The cell state (in) (\f$C_{t-1}\f$).
+ * A 2-D tensor of shape [batch_size, num_units].
+ * * 20:The activation function (\f$g\f$).
+ * A value indicating the activation function:
+ * <ul>
+ * <li>0: None;
+ * <li>1: Relu;
+ * <li>3: Relu6;
+ * <li>4: Tanh;
+ * <li>6: Sigmoid.
+ * </ul>
+ * * 21:The clipping threshold (\f$t_{cell}\f$) for the cell state, such
+ * that values are bound within [-cell_clip, cell_clip]. If set to 0.0
+ * then clipping is disabled.
+ * * 22:The clipping threshold (\f$t_{proj}\f$) for the output from the
+ * projection layer, such that values are bound within
+ * [-proj_clip, proj_clip]. If set to 0.0 then clipping is disabled.
+ * * 23:Time-major if true, batch-major if false.
+ * * 24:The input layer normalization weights. Optional.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at input gate.
+ * * 25:The forget layer normalization weights. Optional.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at forget gate.
+ * * 26:The cell layer normalization weights. Optional.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at cell gate.
+ * * 27:The output layer normalization weights. Optional.
+ * A 1-D tensor of shape [num_units]. Used to rescale normalized inputs
+ * to activation at output gate.
+ *
+ * Outputs:
+ * * 0: The output (\f$o_t\f$).
+ * A 3-D tensor of shape:
+ * If time-major: [max_time, batch_size, output_size]
+ * If batch-major: [batch_size, max_time, output_size]
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM = 92,
+
+ /**
+ * A recurrent neural network layer that applies a basic RNN cell to a
+ * sequence of inputs.
+ *
+ * This layer unrolls the input along the sequence dimension, and implements
+ * the following operation
+ * for each element in the sequence s = 1...sequence_length:
+ * outputs[s] = state = activation(inputs[s] * input_weights’ + state *
+ * recurrent_weights’ + bias)
+ *
+ * Where:
+ * * “input_weights” is a weight matrix that multiplies the inputs;
+ * * “recurrent_weights” is a weight matrix that multiplies the current
+ * “state” which itself is the output from the previous time step
+ * computation;
+ * * “bias” is a bias vector (added to each output vector in the batch);
+ * * “activation” is the function passed as the “fused_activation_function”
+ * argument (if not “NONE”).
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * The input tensors must all be the same type.
+ *
+ * Inputs:
+ * * 0: input.
+ * A 3-D tensor. The shape is defined by the input 6 (timeMajor). If
+ * it is set to 1, then the input has a shape [maxTime, batchSize,
+ * inputSize], otherwise the input has a shape [batchSize, maxTime,
+ * inputSize].
+ * * 1: weights.
+ * A 2-D tensor of shape [numUnits, inputSize].
+ * * 2: recurrent_weights.
+ * A 2-D tensor of shape [numUnits, numUnits].
+ * * 3: bias.
+ * A 1-D tensor of shape [numUnits].
+ * * 4: hidden state
+ * A 2-D tensor of shape [batchSize, numUnits]. Specifies a hidden
+ * state input for the first time step of the computation.
+ * * 5: fusedActivationFunction.
+ * A {@link FuseCode} value indicating the activation function. If
+ * “NONE” is specified then it results in a linear activation.
+ * * 6: timeMajor
+ * An {@link ANEURALNETWORKS_INT32} scalar specifying the shape format
+ * of input and output tensors. Must be set to either 0 or 1.
+ * Outputs:
+ * * 0: output.
+ * A 3-D tensor. The shape is defined by the input 6 (timeMajor). If
+ * it is set to 1, then the output has a shape [maxTime, batchSize,
+ * numUnits], otherwise the output has a shape [batchSize, maxTime,
+ * numUnits].
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN = 93,
+
+ /**
+ * Resizes images to given size using the nearest neighbor interpretation.
+ *
+ * Resized images must be distorted if their output aspect ratio is not the
+ * same as input aspect ratio. The corner pixels of output may not be the
+ * same as corner pixels of input.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT16}
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: 4, with "NHWC" or "NCHW" data layout.
+ * With the default data layout NHWC, the data is stored in the order of:
+ * [batch, height, width, channels]. Alternatively, the data layout could
+ * be NCHW, the data storage order of: [batch, channels, height, width].
+ *
+ * Both resizing by shape and resizing by scale are supported.
+ *
+ * Inputs (resizing by shape):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Zero batches is supported for this tensor.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * width of the output tensor.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the output
+ * height of the output tensor.
+ * * 3: An {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ *
+ * Inputs (resizing by scale):
+ * * 0: A 4-D tensor, of shape [batches, height, width, depth], specifying
+ * the input. Zero batches is supported for this tensor.
+ * * 1: A scalar, specifying width_scale, the scaling factor of the width
+ * dimension from the input tensor to the output tensor. The output
+ * width is calculated as new_width = floor(width * width_scale).
+ * The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if input0 is
+ * of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of
+ * {@link ANEURALNETWORKS_FLOAT32} otherwise.
+ * * 2: A scalar, specifying height_scale, the scaling factor of the height
+ * dimension from the input tensor to the output tensor. The output
+ * height is calculated as new_height = floor(height * height_scale).
+ * The scalar must be of {@link ANEURALNETWORKS_FLOAT16} if input0 is
+ * of {@link ANEURALNETWORKS_TENSOR_FLOAT16} and of
+ * {@link ANEURALNETWORKS_FLOAT32} otherwise.
+ * * 3: An {@link ANEURALNETWORKS_BOOL} scalar, default to false.
+ * Set to true to specify NCHW data layout for input0 and output0.
+ *
+ * Outputs:
+ * * 0: The output 4-D tensor, of shape
+ * [batches, new_height, new_width, depth].
+ * For a {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} tensor,
+ * the scale and zeroPoint must be the same as input0.
+ *
+ * Available since API level 29.
+ */
+ ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR = 94,
+} OperationCode;
+
+/**
+ * Fused activation function types.
+ *
+ *
+ * Available since API level 27.
+ */
+typedef enum {
+ /** NO fused activation function. */
+ ANEURALNETWORKS_FUSED_NONE = 0,
+ /** Fused ReLU activation function. */
+ ANEURALNETWORKS_FUSED_RELU = 1,
+ /** Fused ReLU1 activation function. */
+ ANEURALNETWORKS_FUSED_RELU1 = 2,
+ /** Fused ReLU6 activation function. */
+ ANEURALNETWORKS_FUSED_RELU6 = 3,
+} FuseCode;
+
+/**
+ * Implicit padding algorithms.
+ *
+ *
+ * Available since API level 27.
+ */
+typedef enum {
+ /**
+ * SAME padding.
+ * Padding on both ends are the "same":
+ * padding_to_beginning = total_padding / 2
+ * padding_to_end = (total_padding + 1)/2.
+ * i.e., for even number of padding, padding to both ends are exactly
+ * the same; for odd number of padding, padding to the ending is bigger
+ * than the padding to the beginning by 1.
+ *
+ * total_padding is a function of input, stride and filter size.
+ * It could be computed as follows:
+ * out_size = (input + stride - 1) / stride;
+ * needed_input = (out_size - 1) * stride + filter_size
+ * total_padding = max(0, needed_input - input_size)
+ * The computation is the same for the horizontal and vertical directions.
+ */
+ ANEURALNETWORKS_PADDING_SAME = 1,
+
+ /**
+ * VALID padding.
+ * No padding. When the input size is not evenly divisible by
+ * the filter size, the input at the end that could not fill
+ * the whole filter tile will simply be ignored.
+ */
+ ANEURALNETWORKS_PADDING_VALID = 2,
+} PaddingCode;
+
+/**
+ * Execution preferences.
+ *
+ * Available since API level 27.
+ */
+typedef enum {
+ /**
+ * Prefer executing in a way that minimizes battery drain.
+ * This is desirable for compilations that will be executed often.
+ */
+ ANEURALNETWORKS_PREFER_LOW_POWER = 0,
+ /**
+ * Prefer returning a single answer as fast as possible, even if this causes
+ * more power consumption.
+ */
+ ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER = 1,
+ /**
+ * Prefer maximizing the throughput of successive frames, for example when
+ * processing successive frames coming from the camera.
+ */
+ ANEURALNETWORKS_PREFER_SUSTAINED_SPEED = 2,
+} PreferenceCode;
+
+/**
+ * Device types.
+ *
+ * The type of NNAPI device.
+ */
+typedef enum {
+ /** The device type cannot be provided. */
+ ANEURALNETWORKS_DEVICE_UNKNOWN = 0,
+ /** The device does not fall into any category below. */
+ ANEURALNETWORKS_DEVICE_OTHER = 1,
+ /** The device runs NNAPI models on single or multi-core CPU. */
+ ANEURALNETWORKS_DEVICE_CPU = 2,
+ /** The device can run NNAPI models and also accelerate graphics APIs such
+ * as OpenGL ES and Vulkan. */
+ ANEURALNETWORKS_DEVICE_GPU = 3,
+ /** Dedicated accelerator for Machine Learning workloads. */
+ ANEURALNETWORKS_DEVICE_ACCELERATOR = 4,
+} DeviceTypeCode;
+
+/**
+ * Result codes.
+ *
+ * <p>Any NNAPI function can return any result code, including result codes not
+ * currently documented. Any value other than {@link ANEURALNETWORKS_NO_ERROR}
+ * indicates a failure of some kind.</p>
+ *
+ * <p>Additional information about the nature of a failure can be obtained from
+ * the device log after enabling NNAPI debugging by setting the debug.nn.vlog
+ * property to 1, e.g., by calling "adb shell setprop debug.nn.vlog 1".</p>
+ *
+ * Available since API level 27.
+ */
+typedef enum {
+ /**
+ * Operation was succesful.
+ */
+ ANEURALNETWORKS_NO_ERROR = 0,
+
+ /**
+ * Failure caused by not enough available memory.
+ */
+ ANEURALNETWORKS_OUT_OF_MEMORY = 1,
+
+ ANEURALNETWORKS_INCOMPLETE = 2,
+
+ /**
+ * Failure caused by unexpected null argument.
+ */
+ ANEURALNETWORKS_UNEXPECTED_NULL = 3,
+
+ /**
+ * Failure caused by invalid function arguments, invalid model definition,
+ * invalid execution definition or invalid data at execution time.
+ */
+ ANEURALNETWORKS_BAD_DATA = 4,
+
+ /**
+ * Failure caused by failed model execution.
+ */
+ ANEURALNETWORKS_OP_FAILED = 5,
+
+ /**
+ * Failure caused by object being in the wrong state.
+ */
+ ANEURALNETWORKS_BAD_STATE = 6,
+
+ /**
+ * Failure caused by not being able to map a file into memory.
+ * This may be caused by a file descriptor not being mappable, or an AHardwareBuffer
+ * not supported by the device.
+ * Mitigate by reading its content into memory.
+ */
+ ANEURALNETWORKS_UNMAPPABLE = 7,
+
+ /**
+ * Failure caused by insufficient buffer size provided to a model output.
+ */
+ ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE = 8,
+
+ /**
+ * Failure caused by a device not being available.
+ */
+ ANEURALNETWORKS_UNAVAILABLE_DEVICE = 9,
+} ResultCode;
+
+/**
+ * For {@link ANeuralNetworksModel_setOperandValue}, values with a
+ * length smaller or equal to this will be immediately copied into
+ * the model. The size is in bytes.
+ *
+ * Available since API level 27.
+ */
+enum { ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES = 128 };
+
+/**
+ * For {@link ANeuralNetworksCompilation_setCaching}, specify the size
+ * of the cache token required from the application. The size is in bytes.
+ *
+ * Available since API level 29.
+ */
+enum { ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN = 32 };
+
+/**
+ * ANeuralNetworksMemory is an opaque type that represents memory.
+ *
+ * This type is used to represent shared memory, memory mapped files,
+ * and similar memories.
+ *
+ * By using shared memory, a program can efficiently communicate to the
+ * runtime and drivers the tensors that define a model. See
+ * {@link ANeuralNetworksModel_setOperandValueFromMemory}. An application
+ * should typically create one shared memory object that contains every constant tensor
+ * needed to define a model. {@link ANeuralNetworksMemory_createFromFd} can be used to
+ * create shared memory from a file handle.
+ * {@link ANeuralNetworksMemory_createFromAHardwareBuffer} can be used to
+ * create shared memory from an AHardwareBuffer handle.
+ *
+ * Memory objects can also be used to specify the input and output arguments of
+ * an execution. See {@link ANeuralNetworksExecution_setInputFromMemory}
+ * and {@link ANeuralNetworksExecution_setOutputFromMemory}.
+ *
+ * When calling {@link ANeuralNetworksModel_setOperandValueFromMemory},
+ * {@link ANeuralNetworksExecution_setInputFromMemory} and
+ * {@link ANeuralNetworksExecution_setOutputFromMemory}, each operand in the shared
+ * memory object must be aligned on a boundary of a byte size that is a multiple
+ * of the element type byte size, e.g., a tensor with
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT32} type must be aligned on 4-byte boundary.
+ *
+ * Available since API level 27.
+ */
+typedef struct ANeuralNetworksMemory ANeuralNetworksMemory;
+
+/**
+ * ANeuralNetworksModel is an opaque type that contains a description of the
+ * mathematical operations that constitute the model.
+ *
+ * <p>Build the model by calling<ul>
+ * <li>{@link ANeuralNetworksModel_create}</li>
+ * <li>{@link ANeuralNetworksModel_addOperation}</li>
+ * <li>{@link ANeuralNetworksModel_addOperand}</li>
+ * </ul>
+ *
+ * This forms a graph in which each operation and operand is a node, a
+ * directed edge from an operand to an operation indicates that the
+ * operand is an input to the operation, and a directed edge from an
+ * operation to an operand indicates that the operand is an output
+ * from the operation. This graph must be acyclic.
+ *
+ * A model is completed by calling {@link ANeuralNetworksModel_finish}.
+ * A model is destroyed by calling {@link ANeuralNetworksModel_free}.
+ *
+ * <p>A model cannot be modified once {@link ANeuralNetworksModel_finish}
+ * has been called on it.</p>
+ *
+ * <p>It is the application's responsibility to make sure that only one thread
+ * modifies a model at a given time. It is however safe for more than one
+ * thread to use the model once {@link ANeuralNetworksModel_finish} has returned.</p>
+ *
+ * <p>It is also the application's responsibility to ensure that there are no other
+ * uses of the model after calling {@link ANeuralNetworksModel_free}.
+ * This includes any compilation or execution object created using the model.</p>
+ *
+ * Available since API level 27.
+ */
+typedef struct ANeuralNetworksModel ANeuralNetworksModel;
+
+/**
+ * ANeuralNetworksCompilation is an opaque type that can be used to compile
+ * a machine learning model.
+ *
+ * <p>To use:<ul>
+ * <li>Create a new compilation instance by calling the
+ * {@link ANeuralNetworksCompilation_create} function or
+ * {@link ANeuralNetworksCompilation_createForDevices}.</li>
+ * <li>Set any desired properties on the compilation (for example,
+ * {@link ANeuralNetworksCompilation_setPreference}).</li>
+ * <li>Optionally, set the caching signature and the cache directory on the
+ * compilation by calling {@link ANeuralNetworksCompilation_setCaching}.</li>
+ * <li>Complete the compilation with {@link ANeuralNetworksCompilation_finish}.</li>
+ * <li>Use the compilation as many times as needed
+ * with {@link ANeuralNetworksExecution_create} and
+ * {@link ANeuralNetworksBurst_create}.</li>
+ * <li>Destroy the compilation with {@link ANeuralNetworksCompilation_free}
+ * once all executions using the compilation have completed.</li></ul></p>
+ *
+ * A compilation is completed by calling {@link ANeuralNetworksCompilation_finish}.
+ * A compilation is destroyed by calling {@link ANeuralNetworksCompilation_free}.
+ *
+ * <p>A compilation cannot be modified once {@link ANeuralNetworksCompilation_finish}
+ * has been called on it.</p>
+ *
+ * <p>It is the application's responsibility to make sure that only
+ * one thread modifies a compilation at a given time. It is however
+ * safe for more than one thread to use the compilation once
+ * {@link ANeuralNetworksCompilation_finish} has returned.</p>
+ *
+ * <p>It is also the application's responsibility to ensure that there are no other
+ * uses of the compilation after calling {@link ANeuralNetworksCompilation_free}.
+ * This includes any execution object created using the compilation.</p>
+ *
+ * Available since API level 27.
+ */
+typedef struct ANeuralNetworksCompilation ANeuralNetworksCompilation;
+
+/**
+ * ANeuralNetworksExecution is an opaque type that can be used to apply a machine
+ * learning model to a set of inputs.
+ *
+ * <p>To use:<ul>
+ * <li>Create a new execution instance by calling the
+ * {@link ANeuralNetworksExecution_create} function.</li>
+ * <li>Associate input buffers or memory regions to the model inputs with
+ * {@link ANeuralNetworksExecution_setInput} or
+ * {@link ANeuralNetworksExecution_setInputFromMemory}.</li>
+ * <li>Associate output buffers or memory regions to the model outputs with
+ * {@link ANeuralNetworksExecution_setOutput} or
+ * {@link ANeuralNetworksExecution_setOutputFromMemory}.</li>
+ * <li>Apply the model with one of the following:</li><ul>
+ * <li>Asynchronously with {@link ANeuralNetworksExecution_startCompute},
+ * waiting for the execution to complete with
+ * {@link ANeuralNetworksEvent_wait}.</li>
+ * <li>Synchronously with {@link ANeuralNetworksExecution_compute}.</li>
+ * <li>Synchronously as part of an execution burst with
+ * {@link ANeuralNetworksExecution_burstCompute}.</li></ul>
+ * <li>Destroy the execution with
+ * {@link ANeuralNetworksExecution_free}.</li></ul></p>
+ *
+ * <p>An output buffer or memory region must not overlap with any
+ * other output buffer or memory region, with an input buffer or
+ * memory region, or with an operand value in a memory object
+ * ({@link ANeuralNetworksModel_setOperandValueFromMemory}).</p>
+ *
+ * <p>An execution cannot be modified once
+ * {@link ANeuralNetworksExecution_compute} or
+ * {@link ANeuralNetworksExecution_startCompute} has been called on it.</p>
+ *
+ * <p>An execution can be applied to a model with
+ * {@link ANeuralNetworksExecution_compute} or
+ * {@link ANeuralNetworksExecution_startCompute} only once. Create new
+ * executions to do new evaluations of the model.</p>
+ *
+ * <p>It is the application's responsibility to make sure that only one thread
+ * modifies an execution at a given time. It is however safe for more than one
+ * thread to use {@link ANeuralNetworksEvent_wait} at the same time.</p>
+ *
+ * <p>It is also the application's responsibility to ensure that there are no other
+ * uses of the execution after calling {@link ANeuralNetworksExecution_free}.</p>
+ *
+ * <p>Multiple executions can be scheduled and evaluated concurrently, either by
+ * means of {@link ANeuralNetworksExecution_compute} (which is synchronous) in
+ * different threads or by means of
+ * {@link ANeuralNetworksExecution_startCompute} (which is asynchronous). The
+ * runtime makes no guarantee on the ordering of completion of executions. If
+ * it's important to the application, the application should enforce the
+ * ordering by ensuring that one execution completes before the next is
+ * scheduled (for example, by scheduling all executions synchronously within a
+ * single thread, or by scheduling all executions asynchronously and using
+ * {@link ANeuralNetworksEvent_wait} between calls to
+ * {@link ANeuralNetworksExecution_startCompute}).</p>
+ *
+ * Available since API level 27.
+ */
+typedef struct ANeuralNetworksExecution ANeuralNetworksExecution;
+
+#if __ANDROID_API__ >= __ANDROID_API_Q__
+/**
+ * Parameters for ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL operand.
+ */
+typedef struct ANeuralNetworksSymmPerChannelQuantParams {
+ /* The index of the channel dimension. */
+ uint32_t channelDim;
+ /** The size of the scale array. Should be equal to dimension[channelDim] of the Operand. */
+ uint32_t scaleCount;
+ /** The array of scaling values for each channel. Each value must be greater than zero. */
+ const float* scales;
+} ANeuralNetworksSymmPerChannelQuantParams;
+
+/**
+ * ANeuralNetworksBurst is an opaque type that can be used to reduce the latency
+ * of a rapid sequence of executions. It will likely cause overhead if only used
+ * for a single execution.
+ *
+ * ANeuralNetworksBurst serves as a context object for any number of inferences
+ * using {@link ANeuralNetworksExecution} objects. An ANeuralNetworksBurst
+ * object and the {@link ANeuralNetworksExecution} objects used with it must all
+ * have been created from the same {@link ANeuralNetworksCompilation} object.
+ *
+ * This object is also used as a hint to drivers, providing insight to the
+ * lifetime of a rapid sequence of executions. For example, a driver may choose
+ * to increase the clock frequency of its accelerator for the lifetime of a
+ * burst object.
+ *
+ * <p>To use:<ul>
+ * <li>Create a new burst object by calling the
+ * {@link ANeuralNetworksBurst_create} function.</li>
+ * <li>For each execution:</li><ul>
+ * <li>Create {@link ANeuralNetworksExecution} and configure its
+ * properties (see {@link ANeuralNetworksExecution} for details).</li>
+ * <li>Apply the model synchronously with
+ * {@link ANeuralNetworksExecution_burstCompute}, reusing the same
+ * {@link ANeuralNetworksBurst} with the new
+ * {@link ANeuralNetworksExecution}.</li>
+ * <li>Use and free the {@link ANeuralNetworksExecution}.</li></ul>
+ * <li>Destroy the burst with
+ * {@link ANeuralNetworksBurst_free}.</li></ul></p>
+ *
+ * Available since API level 29.
+ */
+typedef struct ANeuralNetworksBurst ANeuralNetworksBurst;
+#endif // __ANDROID_API__ >= __ANDROID_API_Q__
+
+/**
+ * ANeuralNetworksOperandType describes the type of an operand.
+ *
+ * This structure is used to describe both scalars and tensors.
+ *
+ * A tensor operand type with all dimensions specified is "fully
+ * specified". Whenever possible (i.e., whenever the dimensions are
+ * known at model construction time), a tensor operand type should be
+ * (but is not required to be) fully specified, in order to enable the
+ * best possible performance.
+ *
+ * If a tensor operand's type is not fully specified, the dimensions
+ * of the operand are deduced from the operand types and values of the
+ * operation for which that operand is an output.
+ *
+ * <p>In the following situations, a tensor operand type must be fully
+ * specified:<ul>
+ * <li>The operand has a constant value, set by
+ * {@link ANeuralNetworksModel_setOperandValue} (with a
+ * non-nullptr buffer) or
+ * {@link ANeuralNetworksModel_setOperandValueFromMemory}.</li>
+ * <li>The operand is a model input (see
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}). A
+ * fully specified tensor operand type must either be provided
+ * to {@link ANeuralNetworksModel_addOperand}; or it must be
+ * provided to the corresponding
+ * {@link ANeuralNetworksExecution_setInput}, or
+ * {@link ANeuralNetworksExecution_setInputFromMemory}.
+ * EXCEPTION: If the input is optional and omitted
+ * (by passing nullptr for buffer to
+ * {@link ANeuralNetworksExecution_setInput}) then it need
+ * not have a fully specified tensor operand type.</li></ul>
+ *
+ * A tensor operand type of specified rank but some number of
+ * unspecified dimensions is represented by setting dimensionCount to
+ * the rank and each unspecified dimension to 0.
+ *
+ * Available since API level 27.
+ *
+ * Starting at API level 29, a tensor operand type of unspecified rank is
+ * represented by setting dimensionCount to 0 and dimensions to NULL (just as if
+ * it were a scalar operand type).
+ */
+typedef struct ANeuralNetworksOperandType {
+ /**
+ * The data type, e.g ANEURALNETWORKS_FLOAT32.
+ */
+ int32_t type;
+
+ /**
+ * The number of dimensions (rank).
+ *
+ * Must be 0 for scalars.
+ */
+ uint32_t dimensionCount;
+
+ /**
+ * The dimensions of the tensor.
+ *
+ * Must be nullptr for scalars.
+ */
+ const uint32_t* dimensions;
+
+ /**
+ * These two fields are only used for quantized tensors.
+ * They must be zero for all other types.
+ * The dequantized value of each entry is (value - zeroPoint) * scale.
+ */
+ float scale;
+ int32_t zeroPoint;
+} ANeuralNetworksOperandType;
+
+typedef int32_t ANeuralNetworksOperationType;
+
+/**
+ * ANeuralNetworksEvent is an opaque type that represents an event
+ * that will be signaled once an execution completes.
+ *
+ * Available since API level 27.
+ */
+typedef struct ANeuralNetworksEvent ANeuralNetworksEvent;
+
+#if __ANDROID_API__ >= __ANDROID_API_Q__
+
+/**
+ * ANeuralNetworksDevice is an opaque type that represents a device.
+ *
+ * This type is used to query basic properties and supported operations of the corresponding
+ * device, and control which device(s) a model is to be run on.
+ *
+ * Available since API level 29.
+ */
+typedef struct ANeuralNetworksDevice ANeuralNetworksDevice;
+
+/**
+ * Get the number of available devices.
+ *
+ * @param numDevices Used to return the number of devices.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworks_getDeviceCount(uint32_t* numDevices) __INTRODUCED_IN(29);
+
+/**
+ * Get the representation of the specified device.
+ *
+ * @param devIndex The index of the specified device. Must be less than the
+ number of available devices.
+ * @param device The representation of the specified device.
+ * The same representation will always be returned for the specified
+ * device.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworks_getDevice(uint32_t devIndex, ANeuralNetworksDevice** device)
+ __INTRODUCED_IN(29);
+
+/**
+ * Get the name of the specified device.
+ *
+ * @param device The representation of the specified device.
+ * @param name The returned name of the specified device. The name will be in UTF-8
+ * and will be null-terminated. It will be recognizable as a known device name
+ * rather than a cryptic string. For devices with feature level 29 and above, the
+ * format of the name is {VENDOR}-{DEVICE}. For devices with feature level 28
+ * or lower, the format of the name is undefined.
+ * The name will remain valid for the duration of the application.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksDevice_getName(const ANeuralNetworksDevice* device, const char** name)
+ __INTRODUCED_IN(29);
+
+/**
+ * Get the type of a given device.
+ *
+ * The device type can be used to help application developers to distribute Machine Learning
+ * workloads and other workloads such as graphical rendering.
+ * E.g., for an app which renders AR scenes based on real time object detection results,
+ * the developer could choose an ACCELERATOR type device for ML workloads, and reserve GPU
+ * for graphical rendering.
+ *
+ * @param device The representation of the specified device.
+ * @param type The returned {@link DeviceTypeCode} of the specified device.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksDevice_getType(const ANeuralNetworksDevice* device, int32_t* type)
+ __INTRODUCED_IN(29);
+
+/**
+ * Get the version of the driver implementation of the specified device.
+ *
+ * It’s the responsibility of the driver implementor to insure that this version string
+ * uniquely distinguishes this implementation from all previous implementations.
+ *
+ * This version string must not be confused with the feature level which is solely defined
+ * by {@link ANeuralNetworksDevice_getFeatureLevel}. There is no implicit ordering of the versions.
+ * For example, it is not possible to filter all drivers older than a certain version.
+ *
+ * Application developers may use this version string to avoid or prefer specific driver
+ * implementations. For example, an application may want to do so because:
+ * - A specific version of the driver does not provide the required performance,
+ * perhaps because of a performance regression.
+ * - A specific version of the driver has a bug or returns results that don’t match
+ * the minimum precision requirement for the application.
+ *
+ * @param device The representation of the specified device.
+ * @param version The returned version string of the driver for the specified device. The
+ * string will be in UTF-8 and will be null-terminated. For devices with feature
+ * level 28 or lower, "UNKNOWN" will be returned. The version string will remain
+ * valid for the duration of the application.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksDevice_getVersion(const ANeuralNetworksDevice* device, const char** version)
+ __INTRODUCED_IN(29);
+
+/**
+ * Get the supported NNAPI version of the specified device.
+ *
+ * Each device has a supported feature level, which is the most advanced feature this driver
+ * implements. For example, if the driver implements the features introduced in Android P,
+ * but does not implement the features introduced after Android P, the value would be 28.
+ * Developers could decide whether or not the specified device should be used for a Model that
+ * has certain feature requirements.
+ *
+ * @param device The representation of the specified device.
+ * @param featureLevel The API level of the most advanced feature this driver implements.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksDevice_getFeatureLevel(const ANeuralNetworksDevice* device,
+ int64_t* featureLevel) __INTRODUCED_IN(29);
+
+/**
+ * Get the supported operations for a specified set of devices. If multiple devices
+ * are selected, the supported operation list is a union of supported operations of all
+ * selected devices.
+ *
+ * @param model The model to be queried.
+ * @param devices The set of devices. Must not contain duplicates.
+ * @param numDevices The number of devices in the set.
+ * @param supportedOps The boolean array to be filled. True means supported. The size of the
+ * boolean array must be at least as large as the number of operations
+ * in the model. The order of elements in the supportedOps array matches
+ * the order in which the corresponding operations were added to the model.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksModel_getSupportedOperationsForDevices(
+ const ANeuralNetworksModel* model, const ANeuralNetworksDevice* const* devices,
+ uint32_t numDevices, bool* supportedOps) __INTRODUCED_IN(29);
+
+/**
+ * Create a {@link ANeuralNetworksCompilation} to compile the given model for a specified set
+ * of devices. If more than one device is specified, the compilation will
+ * distribute the workload automatically across the devices. The model must be fully
+ * supported by the specified set of devices. This means that
+ * ANeuralNetworksModel_getSupportedOperationsForDevices() must have returned true for every
+ * operation for that model/devices pair.
+ *
+ * The user must handle all compilation and execution failures from the
+ * specified set of devices. This is in contrast to a use of {@link
+ * ANeuralNetworksCompilation_create}, where the runtime will attempt to recover
+ * from such failures.
+ *
+ * @param model The {@link ANeuralNetworksModel} to be compiled.
+ * @param devices The set of devices. Must not contain duplicates.
+ * @param numDevices The number of devices in the set.
+ * @param compilation The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the model is invalid.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksCompilation_createForDevices(ANeuralNetworksModel* model,
+ const ANeuralNetworksDevice* const* devices,
+ uint32_t numDevices,
+ ANeuralNetworksCompilation** compilation)
+ __INTRODUCED_IN(29);
+
+/**
+ * Sets the compilation caching signature and the cache directory.
+ *
+ * Provides optional caching information to the runtime for faster repeated
+ * compilation.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
+ *
+ * @param compilation The compilation to be modified.
+ * @param cacheDir The cache directory for the runtime to store and retrieve caching
+ * data. It is recommended to use the code cache directory provided
+ * by the Android runtime. If not using the code cache directory, the
+ * user should choose a directory local to the application, and is
+ * responsible to managing the cache entries.
+ * @param token The token provided by the user to specify a model must be of length
+ * ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN. The user should ensure that
+ * the token is unique to a model within the application. The NNAPI
+ * runtime cannot detect token collisions; a collision will result in a
+ * failed execution or in a successful execution that produces incorrect
+ * output values.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksCompilation_setCaching(ANeuralNetworksCompilation* compilation,
+ const char* cacheDir, const uint8_t* token)
+ __INTRODUCED_IN(29);
+
+/**
+ * Schedule synchronous evaluation of the execution.
+ *
+ * <p>Schedules synchronous evaluation of the execution. Returns once the
+ * execution has completed and the outputs are ready to be consumed.
+ * </p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * See {@link ANeuralNetworksExecution_startCompute} for asynchronous execution.
+ * Synchronous execution incurs lower overhead than asynchronous execution.
+ *
+ * Available since API level 29.
+ *
+ * @param execution The execution to be scheduled and executed.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
+ * ANEURALNETWORKS_UNMAPPABLE if the execution input or output memory cannot
+ * be properly mapped.
+ */
+int ANeuralNetworksExecution_compute(ANeuralNetworksExecution* execution) __INTRODUCED_IN(29);
+
+/**
+ * Get the dimensional information of the specified output operand of the model of the
+ * {@link ANeuralNetworksExecution}.
+ *
+ * On asynchronous execution initiated by {@link ANeuralNetworksExecution_startCompute},
+ * {@link ANeuralNetworksEvent_wait} must be called prior to this function to recuperate
+ * the resources used by the execution.
+ *
+ * @param execution The execution to be queried.
+ * @param index The index of the output argument we are querying. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link ANeuralNetworksModel_addOperand}.
+ * @param rank The rank of the output operand.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE
+ * if the target output is provided an insufficient buffer at execution time,
+ * ANEURALNETWORKS_BAD_DATA if the index is invalid.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksExecution_getOutputOperandRank(ANeuralNetworksExecution* execution,
+ int32_t index, uint32_t* rank)
+ __INTRODUCED_IN(29);
+
+/**
+ * Get the dimensional information of the specified output operand of the model of the
+ * {@link ANeuralNetworksExecution}. The target output operand cannot be a scalar.
+ *
+ * On asynchronous execution initiated by {@link ANeuralNetworksExecution_startCompute},
+ * {@link ANeuralNetworksEvent_wait} must be called prior to this function to recuperate
+ * the resources used by the execution.
+ *
+ * @param execution The execution to be queried.
+ * @param index The index of the output argument we are querying. It is an index into the lists
+ * passed to {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link ANeuralNetworksModel_addOperand}.
+ * @param dimensions The dimension array to be filled. The size of the array must be exactly as
+ * large as the rank of the output operand to be queried in the model.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE
+ * if the target output is provided an insufficient buffer at execution time,
+ * ANEURALNETWORKS_BAD_DATA if the index is invalid or if the target is a scalar.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksExecution_getOutputOperandDimensions(ANeuralNetworksExecution* execution,
+ int32_t index, uint32_t* dimensions)
+ __INTRODUCED_IN(29);
+
+/**
+ * Create a {@link ANeuralNetworksBurst} to apply the given compilation.
+ * This only creates the burst object. Computation is only performed once
+ * {@link ANeuralNetworksExecution_burstCompute} is invoked with a valid
+ * {@link ANeuralNetworksExecution} and {@link ANeuralNetworksBurst}.
+ *
+ * <p>The provided compilation must outlive the burst object.</p>
+ *
+ * Available since API level 29.
+ *
+ * @param compilation The {@link ANeuralNetworksCompilation} to be evaluated.
+ * @param burst The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the compilation is invalid.
+ */
+int ANeuralNetworksBurst_create(ANeuralNetworksCompilation* compilation,
+ ANeuralNetworksBurst** burst) __INTRODUCED_IN(29);
+
+/**
+ * Destroys the burst object.
+ *
+ * Available since API level 29.
+ *
+ * @param burst The burst object to be destroyed. Passing NULL is acceptable and
+ * results in no operation.
+ */
+void ANeuralNetworksBurst_free(ANeuralNetworksBurst* burst) __INTRODUCED_IN(29);
+
+/**
+ * Schedule synchronous evaluation of the execution on a burst object.
+ *
+ * <p>Schedules synchronous evaluation of the execution. Returns once the
+ * execution has completed and the outputs are ready to be consumed.</p>
+ *
+ * <p>There must be at most one {@link ANeuralNetworksExecution} processing at
+ * any given time for any given burst object. Any
+ * {@link ANeuralNetworksExecution} launched before the previous has finished
+ * will result in ANEURALNETWORKS_BAD_STATE.</p>
+ *
+ * Available since API level 29.
+ *
+ * @param burst The burst object to execute on.
+ * @param execution The execution to be scheduled and executed. The execution
+ * must be created from the same {@link
+ * ANeuralNetworksCompilation} as the burst object.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
+ */
+int ANeuralNetworksExecution_burstCompute(ANeuralNetworksExecution* execution,
+ ANeuralNetworksBurst* burst) __INTRODUCED_IN(29);
+
+/**
+ * Creates a shared memory object from an AHardwareBuffer handle.
+ *
+ * If the shared memory is backed by an AHardwareBuffer of AHARDWAREBUFFER_FORMAT_BLOB
+ * format, it can be used the same way as shared memory created from a file handle. See
+ * {@link ANeuralNetworksMemory} for a description on how to use this shared memory.
+ *
+ * If the shared memory is backed by an AHardwareBuffer of a format other than
+ * AHARDWAREBUFFER_FORMAT_BLOB, it can only be used for Model inputs and outputs.
+ * When calling {@link ANeuralNetworksExecution_setInputFromMemory} or
+ * {@link ANeuralNetworksExecution_setOutputFromMemory} with the shared memory, both
+ * offset and length must be set to zero and the entire memory region will be
+ * associated with the specified input or output operand. There is no guarantee
+ * that an arbitrary AHardwareBuffer_Format and AHardwareBuffer_UsageFlags combination
+ * can be used by arbitrary devices. The execution will fail if selected set of devices
+ * cannot consume the buffer.
+ *
+ * Calling {@link ANeuralNetworksModel_setOperandValueFromMemory} with shared memory
+ * backed by an AHardwareBuffer of a format other than AHARDWAREBUFFER_FORMAT_BLOB is
+ * disallowed.
+ *
+ * TODO(miaowang): add documentation about intended usage with introspection API.
+ *
+ * Available since API level 29.
+ *
+ * @param ahwb The AHardwareBuffer handle.
+ * @param memory The memory object to be created.
+ * Set to NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the request completed normally.
+ *
+ * @see AHardwareBuffer
+ */
+int ANeuralNetworksMemory_createFromAHardwareBuffer(const AHardwareBuffer* ahwb,
+ ANeuralNetworksMemory** memory)
+ __INTRODUCED_IN(29);
+
+/**
+
+ * Specifies whether duration of the {@link ANeuralNetworksExecution} is to be
+ * measured. Evaluation of the execution must not have been scheduled.
+ *
+ * By default, duration is not measured.
+ *
+ * The {@link ANeuralNetworksExecution} must have been created with
+ * {@link ANeuralNetworksCompilation_createForDevices} with numDevices = 1.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 29.
+ *
+ * @param execution The execution to be modified.
+ * @param measure 'true' if duration is to be measured, 'false' if not.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksExecution_setMeasureTiming(ANeuralNetworksExecution* execution, bool measure)
+ __INTRODUCED_IN(29);
+
+/**
+ * Different duration measurements.
+ *
+ * Durations are measured in nanoseconds.
+ *
+ * Available since API level 29.
+ */
+typedef enum {
+ // Execution time on hardware (not driver, which runs on host processor).
+ ANEURALNETWORKS_DURATION_ON_HARDWARE = 0,
+ // Execution time in driver (including time on hardware). Excludes overhead
+ // such as that of the runtime itself and the IPC needed for the runtime to
+ // communicate with the driver.
+ ANEURALNETWORKS_DURATION_IN_DRIVER = 1,
+} DurationCode;
+
+/**
+ * Get the time spent in the specified {@link ANeuralNetworksExecution}, in nanoseconds.
+ * The execution must have completed.
+ *
+ * Available since API level 29.
+ *
+ * @param execution The execution to be queried.
+ * @param durationCode The measurement to be queried, specified by {@link DurationCode}.
+ * @param duration The returned duration. If no measurement was requested by
+ * {@link ANeuralNetworksExecution_setMeasureTiming}, or for some other
+ * reason the duration is not available, UINT64_MAX will be returned.
+ * A particular device need not support any given measurement.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksExecution_getDuration(const ANeuralNetworksExecution* execution,
+ int32_t durationCode, uint64_t* duration)
+ __INTRODUCED_IN(29);
+
+#endif // __ANDROID_API__ >= __ANDROID_API_Q__
+
+#if __ANDROID_API__ >= 27
+
+/**
+ * Creates a shared memory object from a file descriptor.
+ *
+ * The shared memory is backed by a file descriptor via mmap.
+ * See {@link ANeuralNetworksMemory} for a description on how to use
+ * this shared memory.
+ *
+ * Available since API level 27.
+ *
+ * @param size The requested size in bytes.
+ * Must not be larger than the file size.
+ * @param prot The desired memory protection for the mapping.
+ * It is either PROT_NONE or the bitwise OR of one or
+ * more of the following flags: PROT_READ, PROT_WRITE.
+ * @param fd The requested file descriptor.
+ * The file descriptor has to be mmap-able. The file
+ * descriptor will be duplicated.
+ * @param offset The offset to the beginning of the file of the area to map.
+ * The offset has to be aligned to a page size.
+ * @param memory The memory object to be created.
+ * Set to NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the request completed normally.
+ */
+int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t offset,
+ ANeuralNetworksMemory** memory) __INTRODUCED_IN(27);
+
+/**
+ * Delete a memory object.
+ *
+ * Destroys the object used by the run time to keep track of the memory.
+ * This will free the underlying actual memory if no other code has open
+ * handles to this memory.
+ *
+ * Available since API level 27.
+ *
+ * @param memory The memory object to be freed.
+ */
+void ANeuralNetworksMemory_free(ANeuralNetworksMemory* memory) __INTRODUCED_IN(27);
+
+/**
+ * Create an empty {@link ANeuralNetworksModel}.
+ *
+ * <p>This only creates the object. Computation is performed once
+ * {@link ANeuralNetworksExecution_compute} or
+ * {@link ANeuralNetworksExecution_startCompute} is invoked.
+ *
+ * The model should be constructed with calls to
+ * {@link ANeuralNetworksModel_addOperation} and
+ * {@link ANeuralNetworksModel_addOperand}
+ *
+ * <p>{@link ANeuralNetworksModel_finish} should be called once the model
+ * has been fully constructed.</p>
+ *
+ * <p>{@link ANeuralNetworksModel_free} should be called once the model
+ * is no longer needed.</p>
+ *
+ * Available since API level 27.
+ *
+ * @param model The {@link ANeuralNetworksModel} to be created.
+ * Set to NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_create(ANeuralNetworksModel** model) __INTRODUCED_IN(27);
+
+/**
+ * Destroy a model.
+ *
+ * The model need not have been finished by a call to
+ * {@link ANeuralNetworksModel_finish}.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param model The model to be destroyed. Passing NULL is acceptable and
+ * results in no operation.
+ */
+void ANeuralNetworksModel_free(ANeuralNetworksModel* model) __INTRODUCED_IN(27);
+
+/**
+ * Indicate that we have finished modifying a model. Required before
+ * calling {@link ANeuralNetworksCompilation_create} and
+ * {@link ANeuralNetworksCompilation_createForDevices}.
+ *
+ * An application is responsible to make sure that no other thread uses
+ * the model at the same time.
+ *
+ * This function must only be called once for a given model.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param model The model to be finished.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_finish(ANeuralNetworksModel* model) __INTRODUCED_IN(27);
+
+/**
+ * Add an operand to a model.
+ *
+ * The order in which the operands are added is important. The first one added
+ * to a model will have the index value 0, the second 1, etc. These indexes are
+ * used as operand identifiers in
+ * {@link ANeuralNetworksModel_addOperation},
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs},
+ * {@link ANeuralNetworksModel_setOperandValue},
+ * {@link ANeuralNetworksModel_setOperandValueFromMemory},
+ * {@link ANeuralNetworksExecution_setInput},
+ * {@link ANeuralNetworksExecution_setInputFromMemory},
+ * {@link ANeuralNetworksExecution_setOutput},
+ * {@link ANeuralNetworksExecution_setOutputFromMemory} and
+ * {@link ANeuralNetworksExecution_setOperandValue}.
+ *
+ * <p>Every operand must be referenced in exactly one of the following
+ * ways:<ul>
+ * <li>It is identified as a model input with
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}.</li>
+ * <li>It is identified as a constant with
+ * {@link ANeuralNetworksModel_setOperandValue} or
+ * {@link ANeuralNetworksModel_setOperandValueFromMemory}.</li>
+ * <li>It is identified as an output of exactly one operation with
+ * {@link ANeuralNetworksModel_addOperation}.</li></p>
+ * <p>An operand that is identified as a model input or as a constant
+ * must not also be identified as a model output with
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}.</p>
+ *
+ * To build a model that can accommodate inputs of various sizes, as
+ * you may want to do for a CNN, leave unspecified the dimensions that
+ * will vary at run time. If you do so, fully specify dimensions
+ * when calling {@link ANeuralNetworksExecution_setInput} or
+ * {@link ANeuralNetworksExecution_setInputFromMemory}.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param model The model to be modified.
+ * @param type The {@link ANeuralNetworksOperandType} that describes the shape
+ * of the operand. Neither the {@link ANeuralNetworksOperandType}
+ * nor the dimensions it points to need to outlive the call to
+ * {@link ANeuralNetworksModel_addOperand}.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_addOperand(ANeuralNetworksModel* model,
+ const ANeuralNetworksOperandType* type) __INTRODUCED_IN(27);
+
+/**
+ * Sets an operand to a constant value.
+ *
+ * Values of length smaller or equal to
+ * {@link ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES}
+ * are immediately copied into the model.
+ *
+ * For values of length greater than {@link ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES},
+ * a pointer to the buffer is stored within the model. The application is responsible
+ * for not changing the content of this region until all executions using this model
+ * have completed. As the data may be copied during processing, modifying the data
+ * after this call yields undefined results.
+ *
+ * For large tensors, using {@link ANeuralNetworksModel_setOperandValueFromMemory}
+ * is likely to be more efficient.
+ *
+ * To indicate that an optional operand should be considered missing,
+ * pass nullptr for buffer and 0 for length.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param buffer A pointer to the data to use.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel* model, int32_t index,
+ const void* buffer, size_t length) __INTRODUCED_IN(27);
+
+#if __ANDROID_API__ >= __ANDROID_API_Q__
+
+/**
+ * Sets an operand's per channel quantization parameters.
+ *
+ * Sets parameters required by a tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}.
+ * This function must be called for every tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} before
+ * calling {@link ANeuralNetworksModel_finish}.
+ *
+ * Available since API level 29.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param channelQuant The per channel quantization parameters for the operand.
+ * No memory in this struct needs to outlive the call to
+ * this function.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(
+ ANeuralNetworksModel* model, int32_t index,
+ const ANeuralNetworksSymmPerChannelQuantParams* channelQuant) __INTRODUCED_IN(29);
+
+#endif // __ANDROID_API__ >= __ANDROID_API_Q__
+
+/**
+ * Sets an operand to a value stored in a memory object.
+ *
+ * The content of the memory is not copied. A reference to that memory is stored
+ * inside the model. The application is responsible for not changing the content
+ * of the memory region until all executions using this model have completed.
+ * As the data may be copied during processing, modifying the data after this call
+ * yields undefined results.
+ *
+ * To indicate that an optional operand should be considered missing,
+ * use {@link ANeuralNetworksModel_setOperandValue} instead, passing nullptr for buffer.
+ *
+ * Is disallowed to set an operand value with shared memory backed by an AHardwareBuffer
+ * of a format other than AHARDWAREBUFFER_FORMAT_BLOB.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ * See {@link ANeuralNetworksMemory_createFromAHardwarBuffer} for information on
+ * AHardwareBuffer usage.
+ *
+ * Available since API level 27.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param buffer A pointer to the data to use.
+ * @param memory The memory containing the data.
+ * @param offset This specifies the location of the data within the memory.
+ * The offset is in bytes from the start of memory.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel* model, int32_t index,
+ const ANeuralNetworksMemory* memory,
+ size_t offset, size_t length)
+ __INTRODUCED_IN(27);
+
+/**
+ * Add an operation to a model.
+ *
+ * @param model The model to be modified.
+ * @param type The {@link ANeuralNetworksOperationType} of the operation.
+ * @param inputCount The number of entries in the inputs array.
+ * @param inputs An array of indexes identifying each operand.
+ * @param outputCount The number of entries in the outputs array.
+ * @param outputs An array of indexes identifying each operand.
+ *
+ * The operands specified by inputs and outputs must have been
+ * previously added by calls to {@link ANeuralNetworksModel_addOperand}.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_addOperation(ANeuralNetworksModel* model,
+ ANeuralNetworksOperationType type, uint32_t inputCount,
+ const uint32_t* inputs, uint32_t outputCount,
+ const uint32_t* outputs) __INTRODUCED_IN(27);
+
+/**
+ * Specifies which operands will be the model's inputs and
+ * outputs. Every model must have at least one input and one output.
+ *
+ * An operand cannot be used for both input and output. Doing so will
+ * return an error.
+ *
+ * @param model The model to be modified.
+ * @param inputCount The number of entries in the inputs array.
+ * @param inputs An array of indexes identifying the input operands.
+ * @param outputCount The number of entries in the outputs array.
+ * @param outputs An array of indexes identifying the output operands.
+ *
+ * The operands specified by inputs and outputs must have been
+ * previously added by calls to {@link ANeuralNetworksModel_addOperand}.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ */
+int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel* model, uint32_t inputCount,
+ const uint32_t* inputs, uint32_t outputCount,
+ const uint32_t* outputs) __INTRODUCED_IN(27);
+
+#if __ANDROID_API__ >= 28
+
+/**
+ * Specifies whether {@link ANEURALNETWORKS_TENSOR_FLOAT32} is allowed to be
+ * calculated with range and/or precision as low as that of the IEEE 754 16-bit
+ * floating-point format. By default, {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * must be calculated using at least the range and precision of the IEEE 754
+ * 32-bit floating-point format.
+ *
+ * @param model The model to be modified.
+ * @param allow 'true' indicates {@link ANEURALNETWORKS_TENSOR_FLOAT32} may be
+ * calculated with range and/or precision as low as that of the
+ * IEEE 754 16-bit floating point format. 'false' indicates
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT32} must be calculated using
+ * at least the range and precision of the IEEE 754 32-bit floating
+ * point format.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.
+ *
+ * Available since API level 28.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ */
+int ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel* model, bool allow)
+ __INTRODUCED_IN(28);
+
+#endif // __ANDROID_API__ >= 28
+
+/**
+ * Create a {@link ANeuralNetworksCompilation} to compile the given model.
+ *
+ * <p>This only creates the object. Compilation is only performed once
+ * {@link ANeuralNetworksCompilation_finish} is invoked.</p>
+ *
+ * <p>{@link ANeuralNetworksCompilation_finish} should be called once
+ * all desired properties have been set on the compilation.</p>
+ *
+ * <p>{@link ANeuralNetworksModel_free} should be called once the compilation
+ * is no longer needed.</p>
+ *
+ * <p>The provided model must outlive the compilation.</p>
+ *
+ * The model must already have been finished by a call to
+ * {@link ANeuralNetworksModel_finish}.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param model The {@link ANeuralNetworksModel} to be compiled.
+ * @param compilation The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the model is invalid.
+ */
+int ANeuralNetworksCompilation_create(ANeuralNetworksModel* model,
+ ANeuralNetworksCompilation** compilation) __INTRODUCED_IN(27);
+
+/**
+ * Destroy a compilation.
+ *
+ * The compilation need not have been finished by a call to
+ * {@link ANeuralNetworksModel_finish}.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param compilation The compilation to be destroyed. Passing NULL is acceptable and
+ * results in no operation.
+ */
+void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation* compilation) __INTRODUCED_IN(27);
+
+/**
+ * Sets the execution preference.
+ *
+ * <p>Provides guidance to the runtime when trade-offs are possible.</p>
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param compilation The compilation to be modified.
+ * @param preference Either {@link PREFER_LOW_POWER},
+ * {@link PREFER_SINGLE_FAST_ANSWER}, or
+ * {@link PREFER_SUSTAINED_SPEED}.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation* compilation,
+ int32_t preference) __INTRODUCED_IN(27);
+
+/**
+ * Indicate that we have finished modifying a compilation. Required before
+ * calling {@link ANeuralNetworksExecution_create}.
+ *
+ * An application is responsible to make sure that no other thread uses
+ * the compilation at the same time.
+ *
+ * This function must only be called once for a given compilation.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param compilation The compilation to be finished.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation* compilation) __INTRODUCED_IN(27);
+
+/**
+ * Create a {@link ANeuralNetworksExecution} to apply the given compilation.
+ * This only creates the object. Computation is only performed once
+ * {@link ANeuralNetworksExecution_compute} or
+ * {@link ANeuralNetworksExecution_startCompute} is invoked.
+ *
+ * <p>The provided compilation must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param compilation The {@link ANeuralNetworksCompilation} to be evaluated.
+ * @param execution The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the compilation is invalid.
+ */
+int ANeuralNetworksExecution_create(ANeuralNetworksCompilation* compilation,
+ ANeuralNetworksExecution** execution) __INTRODUCED_IN(27);
+
+/**
+ * Destroy an execution.
+ *
+ * <p>If called on an execution for which
+ * {@link ANeuralNetworksExecution_startCompute} has been called, the
+ * function will return immediately but will mark the execution to be deleted
+ * once the computation completes. The related {@link ANeuralNetworksEvent}
+ * will be signaled and the {@link ANeuralNetworksEvent_wait} will return
+ * ANEURALNETWORKS_ERROR_DELETED.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param execution The execution to be destroyed. Passing NULL is acceptable and
+ * results in no operation.
+ */
+void ANeuralNetworksExecution_free(ANeuralNetworksExecution* execution) __INTRODUCED_IN(27);
+
+/**
+ * Associate a user buffer with an input of the model of the
+ * {@link ANeuralNetworksExecution}. Evaluation of the execution must not have
+ * been scheduled.
+ *
+ * <p>The provided buffer must outlive the execution.</p>
+ *
+ * If the input is optional, you can indicate that it is omitted by
+ * passing nullptr for buffer and 0 for length.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the input argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with
+ * {@link ANeuralNetworksModel_addOperand}.
+ * @param type The {@link ANeuralNetworksOperandType} of the
+ * operand. Unless the input is omitted, this should be
+ * used to specify the dimensions that were left
+ * unspecified when the operand was added to the
+ * model. All other properties of the type must be the
+ * same as specified in the model. If the type is the same
+ * as specified when the model was built, NULL can be
+ * passed. Neither the {@link ANeuralNetworksOperandType}
+ * nor the dimensions it points to need to outlive the call
+ * to {@link ANeuralNetworksExecution_setInput}.
+ * @param buffer The buffer containing the data.
+ * @param length The length in bytes of the buffer.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
+ * name is not recognized or the buffer is too small for the input.
+ */
+int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution* execution, int32_t index,
+ const ANeuralNetworksOperandType* type, const void* buffer,
+ size_t length) __INTRODUCED_IN(27);
+
+/**
+ * Associate part of a memory object with an input of the model of the
+ * {@link ANeuralNetworksExecution}. Evaluation of the execution must not have
+ * been scheduled.
+ *
+ * <p>The provided memory must outlive the execution.</p>
+ *
+ * If the input is optional, you can indicate that it is omitted by
+ * using {@link ANeuralNetworksExecution_setInput} instead, passing nullptr for
+ * buffer and 0 for length.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ * See {@link ANeuralNetworksMemory_createFromAHardwarBuffer} for information on
+ * AHardwareBuffer usage.
+ *
+ * Available since API level 27.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the input argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link ANeuralNetworksModel_addOperand}.
+ * @param type The {@link ANeuralNetworksOperandType} of the
+ * operand. This should be used to specify the dimensions
+ * that were left unspecified when the operand was added
+ * to the model. All other properties of the type must be
+ * the same as specified in the model. If the type is the
+ * same as specified when the model was built, NULL can be
+ * passed. Neither the {@link ANeuralNetworksOperandType}
+ * nor the dimensions it points to need to outlive the call
+ * to {@link ANeuralNetworksExecution_setInputFromMemory}.
+ * @param memory The memory containing the data.
+ * @param offset This specifies the location of the data within the memory.
+ * The offset is in bytes from the start of memory.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
+ * name is not recognized or the buffer is too small for the input.
+ */
+int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
+ const ANeuralNetworksOperandType* type,
+ const ANeuralNetworksMemory* memory, size_t offset,
+ size_t length) __INTRODUCED_IN(27);
+
+/**
+ * Associate a user buffer with an output of the model of the
+ * {@link ANeuralNetworksExecution}. Evaluation of the execution must not have
+ * been scheduled.
+ *
+ * If the output is optional, you can indicate that it is omitted by
+ * passing nullptr for buffer and 0 for length.
+ *
+ * <p>The provided buffer must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the output argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link ANeuralNetworksModel_addOperand}.
+ * @param type The {@link ANeuralNetworksOperandType} of the
+ * operand. Unless the output is omitted, this should be
+ * used to specify the dimensions that were left
+ * unspecified when the operand was added to the
+ * model. All other properties of the type must be the
+ * same as specified in the model. If the type is the same
+ * as specified when the model was built, NULL can be
+ * passed. Neither the {@link ANeuralNetworksOperandType}
+ * nor the dimensions it points to need to outlive the call
+ * to {@link ANeuralNetworksExecution_setOutput}.
+ * Since API level 29, the output operand can have unspecified
+ * dimensions or rank to be deduced dynamically during the execution.
+ * However, the user must provide a large enough buffer. The user
+ * can retrieve the output dimensional information after the execution
+ * by {@link ANeuralNetworksExecution_getOutputOperandRank} and
+ * {@link ANeuralNetworksExecution_getOutputOperandDimensions}.
+ * @param buffer The buffer where the data is to be written.
+ * @param length The length in bytes of the buffer.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
+ * name is not recognized or the buffer is too small for the output.
+ */
+int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution* execution, int32_t index,
+ const ANeuralNetworksOperandType* type, void* buffer,
+ size_t length) __INTRODUCED_IN(27);
+
+/**
+ * Associate part of a memory object with an output of the model of the
+ * {@link ANeuralNetworksExecution}. Evaluation of the execution must not have
+ * been scheduled.
+ *
+ * If the output is optional, you can indicate that it is omitted by
+ * using {@link ANeuralNetworksExecution_setOutput} instead, passing nullptr for
+ * buffer and 0 for length.
+ *
+ * <p>The provided memory must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ * See {@link ANeuralNetworksMemory_createFromAHardwarBuffer} for information on
+ * AHardwareBuffer usage.
+ *
+ * Available since API level 27.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the output argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link ANeuralNetworksModel_addOperand}.
+ * @param type The {@link ANeuralNetworksOperandType} of the operand. This should be
+ * used to specify the dimensions that were left
+ * unspecified when the operand was added to the
+ * model. All other properties of the type must be the
+ * same as specified in the model. If the type is the same
+ * as specified when the model was built, NULL can be
+ * passed. Neither the {@link ANeuralNetworksOperandType}
+ * nor the dimensions it points to need to outlive the call
+ * to {@link ANeuralNetworksExecution_setOutputFromMemory}.
+ * Since API level 29, the output operand can have unspecified
+ * dimensions or rank to be deduced dynamically during the execution.
+ * However, the user must provide a large enough memory. The user
+ * can retrieve the output dimensional information after the execution
+ * by {@link ANeuralNetworksExecution_getOutputOperandRank} and
+ * {@link ANeuralNetworksExecution_getOutputOperandDimensions}.
+ * @param memory The memory where the data is to be stored.
+ * @param offset This specifies the location of the data within the memory.
+ * The offset is in bytes from the start of memory.
+ * @param length The length in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if the
+ * name is not recognized or the buffer is too small for the output.
+ */
+int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
+ const ANeuralNetworksOperandType* type,
+ const ANeuralNetworksMemory* memory, size_t offset,
+ size_t length) __INTRODUCED_IN(27);
+
+/**
+ * Schedule asynchronous evaluation of the execution.
+ *
+ * <p>Schedules asynchronous evaluation of the execution. Once the model has
+ * been applied and the outputs are ready to be consumed, the returned event
+ * will be signaled. Use {@link ANeuralNetworksEvent_wait} to wait for that
+ * event.
+ * </p>
+ *
+ * ANeuralNetworksEvent_wait must be called to recuperate the resources used
+ * by the execution.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * See {@link ANeuralNetworksExecution_compute} for synchronous execution.
+ * Synchronous execution incurs lower overhead than asynchronous execution.
+ *
+ * Available since API level 27.
+ *
+ * @param execution The execution to be scheduled and executed.
+ * @param event The event that will be signaled on completion. event is set to
+ * NULL if there's an error.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution* execution,
+ ANeuralNetworksEvent** event) __INTRODUCED_IN(27);
+
+/**
+ * Waits until the execution completes.
+ *
+ * More than one thread can wait on an event. When the execution completes,
+ * all threads will be released.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
+ * ANEURALNETWORKS_UNMAPPABLE if the execution input or output memory cannot
+ * be properly mapped.
+ */
+int ANeuralNetworksEvent_wait(ANeuralNetworksEvent* event) __INTRODUCED_IN(27);
+
+/**
+ * Destroys the event.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 27.
+ */
+void ANeuralNetworksEvent_free(ANeuralNetworksEvent* event) __INTRODUCED_IN(27);
+
+#endif // __ANDROID_API__ >= 27
+
+__END_DECLS
+
+#endif // ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_H
+
+/** @} */
diff --git a/runtimes/include/NeuralNetworksEx.h b/runtimes/include/NeuralNetworksEx.h
new file mode 100644
index 000000000..601bb2cf7
--- /dev/null
+++ b/runtimes/include/NeuralNetworksEx.h
@@ -0,0 +1,747 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file NeuralNetworksEx.h
+ * @brief This file contains ANeuralNetworksModel_addOperationEx function definition
+ * @note This header describes experimental feature,
+ * so specification here can be changed or/and removed
+ */
+#ifndef NN_RUNTIME_NEURAL_NETWORKS_EX_H
+#define NN_RUNTIME_NEURAL_NETWORKS_EX_H
+
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+/**
+ * @brief Extended operation types
+ */
+typedef enum {
+ /** extends operation. */
+
+ /**
+ * Casts a tensor/tensor-values to a new type
+ *
+ * The output value is calucated as:
+ *
+ * output = new_type(input)
+ *
+ * Ex:
+ * X = {1.8,2.2}, dtype of X = float32
+ * Y = Cast(X), dtype of Y = int32
+ * then Y = {1,2}
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
+ */
+ ANEURALNETWORKS_CAST_EX = 50000,
+
+ /**
+ * Gathers values along an axis.
+ *
+ * Produces an output tensor with shape
+ * input0.dimension[:axis] + indices.dimension + input0.dimension[axis + 1:]
+ * where:
+ * # Vector indices (output is rank(input0)).
+ * output[a_0, ..., a_n, i, b_0, ..., b_n] =
+ * input0[a_0, ..., a_n, indices[i], b_0, ..., b_n]
+ *
+ * # Higher rank indices (output is rank(input0) + rank(indices) - 1).
+ * output[a_0, ..., a_n, i, ..., j, b_0, ... b_n] =
+ * input0[a_0, ..., a_n, indices[i, ..., j], b_0, ..., b_n]
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor from which to gather values.
+ * * 1: A k-D tensor {@link ANEURALNETWORKS_TENSOR_INT32} of indices.
+ * The values must be in the bounds of the corresponding dimensions
+ * of input0.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis.
+ * Negative index is used to specify axis from the end
+ * (e.g. -1 for the last axis). Must be in the range [-n, n).
+ *
+ * Outputs:
+ * * 0: An (n + k - 1)-D tensor with the same {@link OperandCode} as input0.
+ */
+ ANEURALNETWORKS_GATHER_EX = 50001, /**< Gather slices according to indexes and axis */
+
+ /**
+ * Finds values and indices of the k largest entries for the last dimension.
+ *
+ * Resulting values in each dimensions are sorted in descending order. If
+ * two values are equal, the one with larger index appears first.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: input, an n-D tensor specifying the input.
+ * * 1: k, an {@link ANEURALNETWORKS_INT32} scalar, specifying the number of
+ * top elements to look for along the last dimension.
+ *
+ * Outputs:
+ * * 0: An n-D tensor of the same type as the input, containing the k
+ * largest elements along each last dimensional slice.
+ * * 1: An n-D tensor of type {@link ANEURALNETWORKS_TENSOR_INT32}
+ * containing the indices of values within the last dimension of input.
+ */
+ ANEURALNETWORKS_TOPK_V2_EX = 50002,
+
+ /**
+ * Computes the maximum of elements across dimensions of a tensor.
+ *
+ * Reduces the input tensor along the given dimensions to reduce.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input.
+ * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ */
+ ANEURALNETWORKS_REDUCE_MAX_EX = 50003,
+
+ /**
+ * Splits a tensor along a given axis into num_splits subtensors.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor to split.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis along
+ * which to split.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar indicating the number of
+ * splits along given axis. Must evenly divide axis size.
+ *
+ * Outputs:
+ * * 0 ~ (num_splits - 1): Resulting subtensors.
+ */
+ ANEURALNETWORKS_SPLIT_EX = 50004, /**< Splits a tensor into sub tensors */
+
+ /**
+ * Computes element-wise reciprocal of square root of the input tensor.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = 1/sqrt(input)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
+ */
+ ANEURALNETWORKS_RSQRT_EX = 50005,
+
+ /**
+ * Computes element-wise squared difference on the input tensors.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
+ * The output is the result of squaring of difference given by subtracting the second input tensor
+ * from the first one.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} as input0.
+ */
+ ANEURALNETWORKS_SQUARED_DIFFERENCE_EX = 50006,
+
+ /**
+ * Computes numerical negative value element-wise on the input tensor.
+ *
+ * Given an input tensor of {@link OperandCode},
+ * The output is the numerical negative value element-wise on the input tensor.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the input.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
+ */
+ ANEURALNETWORKS_NEG_EX = 50007,
+
+ /**
+ * Computes exponential value element-wise on the input tensor.
+ *
+ * Given an input tensor of {@link OperandCode},
+ * The output is the exponential value element-wise on the input tensor.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the input.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
+ */
+ ANEURALNETWORKS_EXP_EX = 50008,
+
+ /**
+ * Computes the sum of elements across dimensions of a tensor.
+ *
+ * Reduces the input tensor along the given dimensions to reduce.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input.
+ * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ */
+ ANEURALNETWORKS_REDUCE_SUM_EX = 50009,
+
+ /**
+ * A transposed convolutional layer carries out a regular convolution
+ * but reverts its spatial transformation.
+ * Transpose convolution basically performs convolution with transposed weights.
+ *
+ * Supported tensor {@link OperandCode}:
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: only 4
+ *
+ * Inputs:
+ * 0: An {@link ANEURALNETWORKS_INT32} 1-D four element tensor, specifying the output shape.
+ * 1: A 4-D tensor, of shape [depth_out, filter_height, filter_width, depth_in],
+ * specifying the filter.
+ * 2: A 4-D tensor, of shape [batches, height, width, depth_in], specifying the input.
+ * 3: An {@link ANEURALNETWORKS_INT32} scalar, specifying the padding type.
+ * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the ‘width’ dimension.
+ * 5: An {@link ANEURALNETWORKS_INT32} scalar, specifying the stride when
+ * walking through input in the height dimension.
+ *
+ * Outputs:
+ * 0: The output 4-D tensor, of shape [batches, out_height, out_width, depth_out].
+ */
+ ANEURALNETWORKS_TRANSPOSE_CONV_EX = 50010,
+
+ /**
+ * Computes element-wise truth value by comparing the two input tensors for equality.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
+ * The output is the result of comparison of two input tensors.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ *
+ * Outputs:
+ * * 0: A boolean tensor indicating the truth value of (x == y)
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ */
+ ANEURALNETWORKS_EQUAL_EX = 50011,
+
+ /**
+ * Computes element-wise absolute value of the input tensor.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = fabs(input)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
+ */
+ ANEURALNETWORKS_ABS_EX = 50012,
+ /**
+ * Packs a list of rank-R tensors into one rank- (R+1) tensor along the axis dimension.
+ *
+ * The input tensors must have identical {@link OperandCode} and the same
+ * dimensions.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 3
+ *
+ * Inputs:
+ * * 0 ~ n-1: The list of n input tensors, of shape
+ * [D0, D1, ..., Daxis(i), ..., Dm]. For inputs of
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}, all input tensors
+ * must have the same scale and zeroPoint.
+ * * n: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
+ * number of input tensors.
+ * * n+1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
+ * pack axis.
+ *
+ * Outputs:
+ * * 0: The output, a tensor of the same {@link OperandCode} as the input
+ * tensors. The output shape is [D0, D1, ..., N at Daxis(i), ..., Dm+1]
+ * where N is the number of tensors to be packed.
+ */
+ ANEURALNETWORKS_PACK_EX = 50013,
+ /**
+ * Unpacks a given rank-R tensors into num_splits rank- (R-1) tensors along the axis dimension.
+ * num_splits has to respect integral divisibility of dimension value along axis dimension
+ * of the input.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: The input shape is [D0, D1, ..., N at Daxis(i), ..., Dm+1].
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
+ * number of splits along unpack axis.
+ * * 2: An {@link ANEURALNETWORKS_INT32} scalar, specifying the
+ * unpack axis.
+ *
+ * Outputs:
+ * * 0 ~ n-1: The list of n output tensors, of shape
+ * [D0, D1, ..., Daxis(i), ..., Dm]. The output tensors are of the same
+ * {@link OperandCode} as the input tensor 0.
+ */
+ ANEURALNETWORKS_UNPACK_EX = 50014,
+
+ /**
+ * Returns the index of the largest element along an axis.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the input.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar specifying the axis to
+ * reduce across. Negative index is used to specify axis from the
+ * end (e.g. -1 for the last axis). Must be in the range [-n, n).
+ *
+ * Outputs:
+ * * 0: An (n - 1)-D {@link ANEURALNETWORKS_TENSOR_INT32} tensor.
+ */
+ ANEURALNETWORKS_ARGMAX_EX = 50015,
+
+ /**
+ * Element-wise square root computation of the input tensor.
+ *
+ * The output is calculated using this formula:
+ *
+ * output = sqrt(input)
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ *
+ * Outputs:
+ * * 0: The output tensor, of the same {@link OperandCode} and shape as input0.
+ */
+ ANEURALNETWORKS_SQRT_EX = 50016,
+
+ /**
+ * Computes element-wise truth value by comparing the input tensors for non-equality.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
+ * The output is the result of comparison of two input tensors.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D tensor, specifying the first input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ *
+ * Outputs:
+ * * 0: A boolean tensor indicating the truth value of non-equality of input tensors
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ */
+ ANEURALNETWORKS_NOT_EQUAL_EX = 50017,
+
+ /**
+ * Computes element-wise truth value of the input tensor negation.
+ *
+ * Takes one input tensor.
+ * The output is the negation, which is logical complement, of the input tensor.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D boolean tensor, specifying the input.
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True. A zero indicates otherwise.
+ *
+ * Outputs:
+ * * 0: A boolean tensor of the same size as input indicating the truth value of (NOT x)
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True. A zero indicates otherwise.
+ */
+ ANEURALNETWORKS_LOGICAL_NOT_EX = 50018,
+
+ /**
+ * Computes element-wise truth value of two input tensors for LOGICAL AND.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
+ * The output is the result of comparison of two input tensors.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D boolean tensor, specifying the first input.
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ *
+ * Outputs:
+ * * 0: A boolean tensor indicating the truth value of two input tensors for LOGICAL AND.
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ */
+ ANEURALNETWORKS_LOGICAL_AND_EX = 50019,
+
+ /**
+ * Computes element-wise truth value of two input tensors for LOGICAL OR.
+ *
+ * Takes two input tensors of identical {@link OperandCode} and compatible dimensions.
+ * The output is the result of comparison of two input tensors.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An n-D boolean tensor, specifying the first input.
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0.
+ *
+ * Outputs:
+ * * 0: A boolean tensor indicating the truth value of two input tensors for LOGICAL OR.
+ * Stored as {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM} with offset 0
+ * and scale 1.0f.
+ * A non-zero byte represents True, a hit. A zero indicates otherwise.
+ */
+ ANEURALNETWORKS_LOGICAL_OR_EX = 50020,
+
+ /**
+ * Computes the minimum of elements across dimensions of a tensor.
+ *
+ * Reduces the input tensor along the given dimensions to reduce.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input.
+ * * 1: A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}. The dimensions
+ * to reduce.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ */
+ ANEURALNETWORKS_REDUCE_MIN_EX = 50021,
+
+ /**
+ * Parametric Rectified Linear Unit.
+ *
+ * It follows: f(x) = alpha * x for x < 0, f(x) = x for x >= 0, where alpha
+ * is a learned array with the same {@link OperandCode} and compatible
+ * dimensions as input x.
+ *
+ * Two dimensions are compatible when:
+ * 1. they are equal, or
+ * 2. one of them is 1
+ *
+ * The size of the output is the maximum size along each dimension of the
+ * input operands. It starts with the trailing dimensions, and works its way
+ * forward.
+ *
+ * Example:
+ * input.dimension = {4, 1, 2}
+ * alpha.dimension = {5, 4, 3, 1}
+ * output.dimension = {5, 4, 3, 2}
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: A tensor, specifying the input.
+ * * 1: A tensor of the same {@link OperandCode}, and compatible dimensions
+ * as input0, specifying the alpha.
+ *
+ * Outputs:
+ * * 0: A tensor of the same {@link OperandCode} as input0.
+ */
+ ANEURALNETWORKS_PRELU_EX = 50022,
+
+ /**
+ * Returns a one-hot tensor.
+ *
+ * The locations represented by indices in indices take value on_value, while all other locations
+ * take value off_value.
+ * The on_value and off_value must have matching data types. They must be the same data type as
+ * specified by the data type of output.
+ *
+ * If the input indices is rank N, the output will have rank N+1. The new axis is created at
+ * dimension axis.
+ * If indices is a scalar the output shape will be a vector of length depth.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ *
+ * Supported tensor rank: up to 4
+ *
+ * Inputs:
+ * * 0: An {@link ANEURALNETWORKS_INT32} tensor, specifying the indices.
+ * * 1: An {@link ANEURALNETWORKS_INT32} scalar, specifying the depth.
+ * * 2: A scalar, specifying the on_value.
+ * * 3: A scalar, specifying the off_value.
+ * * 4: An {@link ANEURALNETWORKS_INT32} scalar, specifying the axis to fill. Optional.
+ * (default: -1, a new inner-most axis).
+ *
+ * Outputs:
+ * * 0: The one-hot tensor.
+ */
+ ANEURALNETWORKS_ONE_HOT_EX = 50023,
+
+ /**
+ * For input tensors x and y, computes x >= y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A boolean tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * with offset 0 and scale 1.0f.
+ */
+ ANEURALNETWORKS_GREATER_EQUAL_EX = 50024,
+
+ /**
+ * For input tensors x and y, computes x < y elementwise.
+ *
+ * Supported tensor {@link OperandCode}:
+ * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * * {@link ANEURALNETWORKS_TENSOR_INT32}
+ * * {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ *
+ * Supported tensor rank: from 1
+ *
+ * This operation supports broadcasting.
+ *
+ * Inputs:
+ * * 0: A tensor.
+ * * 1: A tensor of the same {@link OperandCode} and dimensions compatible
+ * with input0.
+ *
+ * Outputs:
+ * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}
+ * with offset 0 and scale 1.0f.
+ */
+ ANEURALNETWORKS_LESS_EX = 50025,
+} OperationCodeEx; // extends OperationCode
+
+typedef OperationCodeEx ANeuralNetworksOperationTypeEx;
+
+/**
+ * @brief Add an extended operation to a model.
+ *
+ * @param[in] model The model to be modified.
+ * @param[in] type The type of extended operation.
+ * @param[in] inputCount The number of entries in the inputs array.
+ * @param[in] inputs An array of indexes identifying each operand.
+ * @param[in] outputCount The number of entries in the outputs array.
+ * @param[in] outputs An array of indexes identifying each operand.
+ *
+ * @note The operands specified by inputs and outputs must have been
+ * previously added by calls to {@link ANeuralNetworksModel_addOperand}.\n
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has been
+ * called will return an error.\n
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs);
+
+__END_DECLS
+
+#endif // NN_RUNTIME_NEURAL_NETWORKS_EX_H
diff --git a/runtimes/include/NeuralNetworksExtensions.h b/runtimes/include/NeuralNetworksExtensions.h
new file mode 100644
index 000000000..ca2e04567
--- /dev/null
+++ b/runtimes/include/NeuralNetworksExtensions.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_EXTENSIONS_H
+#define ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_EXTENSIONS_H
+
+#include "NeuralNetworks.h"
+
+/******************************************************************
+ *
+ * IMPORTANT NOTICE:
+ *
+ * This file is not intended for use by general developers -- only
+ * by OEM applications.
+ *
+ * Extensions source AND binary code relies on the definitions
+ * here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES.
+ *
+ * - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES)
+ * - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS
+ * - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY
+ * - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES
+ */
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ >= __ANDROID_API_Q__
+
+/**
+ * Queries whether an extension is supported by the driver implementation of the specified device.
+ *
+ * @param device The representation of the specified device.
+ * @param extension The extension name.
+ * @param isExtensionSupported The boolean value indicating whether the extension is supported.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+int ANeuralNetworksDevice_getExtensionSupport(const ANeuralNetworksDevice* device,
+ const char* extensionName, bool* isExtensionSupported)
+ __INTRODUCED_IN(29);
+
+/**
+ * Creates an operand type from an extension name and an extension operand code.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 29.
+ *
+ * @param model The model to contain the operand.
+ * @param extensionName The extension name.
+ * @param operandCodeWithinExtension The extension operand code.
+ * @param type The operand type.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_getExtensionOperandType(ANeuralNetworksModel* model,
+ const char* extensionName,
+ uint16_t operandCodeWithinExtension, int32_t* type)
+ __INTRODUCED_IN(29);
+
+/**
+ * Creates an operation type from an extension name and an extension operation code.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * Available since API level 29.
+ *
+ * @param model The model to contain the operation.
+ * @param extensionName The extension name.
+ * @param operationCodeWithinExtension The extension operation code.
+ * @param type The operation type.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_getExtensionOperationType(ANeuralNetworksModel* model,
+ const char* extensionName,
+ uint16_t operationCodeWithinExtension,
+ ANeuralNetworksOperationType* type)
+ __INTRODUCED_IN(29);
+
+/**
+ * Sets extension operand parameters.
+ *
+ * Available since API level 29.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param data A pointer to the extension operand data.
+ * The data does not have to outlive the call to this function.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+int ANeuralNetworksModel_setOperandExtensionData(ANeuralNetworksModel* model, int32_t index,
+ const void* data, size_t length)
+ __INTRODUCED_IN(29);
+
+#endif // __ANDROID_API__ >= __ANDROID_API_Q__
+
+__END_DECLS
+
+#endif // ANDROID_ML_NN_RUNTIME_NEURAL_NETWORKS_EXTENSIONS_H
diff --git a/runtimes/include/nnfw.h b/runtimes/include/nnfw.h
new file mode 100644
index 000000000..456781e70
--- /dev/null
+++ b/runtimes/include/nnfw.h
@@ -0,0 +1,200 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_H__
+#define __NNFW_H__
+
+#include <stddef.h>
+#include <stdint.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct nnfw_session nnfw_session;
+typedef struct nnfw_tensorinfo nnfw_tensorinfo;
+
+typedef enum {
+ NNFW_TYPE_TENSOR_FLOAT32 = 0,
+ NNFW_TYPE_TENSOR_INT32 = 1,
+ /**
+ * A tensor of 8 bit integers that represent real numbers.
+ *
+ * real_value = (integer_value - zeroPoint) * scale.
+ */
+ NNFW_TYPE_TENSOR_QUANT8_ASYMM = 2,
+ NNFW_TYPE_TENSOR_BOOL = 3,
+} NNFW_TYPE;
+
+/**
+ * Result Values
+ */
+typedef enum {
+ NNFW_STATUS_NO_ERROR = 0,
+ NNFW_STATUS_ERROR = 1,
+} NNFW_STATUS;
+
+/**
+ * tensor_info
+ */
+struct nnfw_tensorinfo
+{
+ NNFW_TYPE dtype;
+ int rank;
+ int dims[6]; // MAX rank is 6
+};
+
+/*
+ * Create a new session instance
+ *
+ * @param[out] session the session to be created
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_create_session(nnfw_session **session);
+
+/*
+ * Close a session instance
+ *
+ * @param[in] session the session to be closed
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_close_session(nnfw_session *session);
+
+/*
+ * Load model from nnpackage file or directory
+ *
+ * @param[in] session nnfw_session loading the given nnpackage file/dir
+ * @param[in] package_file_path path to the nnpackage file or unzipped directory to be loaded
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_load_model_from_file(nnfw_session *session, const char *package_file_path);
+
+/*
+ * Prepare session to be ready for inference
+ * This phase may finalize model compilation, scheduling, and additional settings.
+ *
+ * @param[in] session the session to be prepared
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_prepare(nnfw_session *session);
+
+/*
+ * Run inference
+ *
+ * @param[in] session the session to run inference
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_run(nnfw_session *session);
+
+/*
+ * Set input
+ *
+ * @param[in] session session to the input is to be set
+ * @param[in] index index of input to be set (0-indexed)
+ * @param[in] type type of the input
+ * @param[in] buffer raw buffer for input
+ * @param[in] length size of bytes of output
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+
+NNFW_STATUS nnfw_set_input(nnfw_session *session, uint32_t index, NNFW_TYPE type,
+ const void *buffer, size_t length);
+
+/*
+ * Set output
+ *
+ * @param[in] session session from inference output is to be extracted
+ * @param[in] index index of output to be set (0-indexed)
+ * @param[in] type type of the output
+ * @param[out] buffer raw buffer for output
+ * @param[in] length size of bytes of output
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+
+NNFW_STATUS nnfw_set_output(nnfw_session *session, uint32_t index, NNFW_TYPE type, void *buffer,
+ size_t length);
+
+/*
+ * Get the number of inputs
+ *
+ * @param[in] session session from input information is to be extracted
+ * @param[out] number variable which the number of inputs is put into
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_input_size(nnfw_session *session, uint32_t *number);
+
+/*
+ * Get the number of outputs
+ *
+ * @param[in] session session from output information is to be extracted
+ * @param[out] number variable which the number of outputs is put into
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_output_size(nnfw_session *session, uint32_t *number);
+
+/*
+ * Get i-th input tensor info
+ *
+ * @param[in] session session from input information is to be extracted
+ * @param[in] index index of input
+ * @param[out] tensor_info nnfw_tensor_info
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_input_tensorinfo(nnfw_session *session, uint32_t index,
+ nnfw_tensorinfo *tensor_info);
+
+/*
+ * Get i-th output tensor info
+ *
+ * @param[in] session session from output information is to be extracted
+ * @param[in] index index of output
+ * @param[out] tensor_info nnfw_tensor_info
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_output_tensorinfo(nnfw_session *session, uint32_t index,
+ nnfw_tensorinfo *tensor_info);
+
+/*
+ * Set default backend
+ *
+ * @param[in] session session to which a default backend is set
+ * @param[in] backend default backend
+ */
+NNFW_STATUS nnfw_set_default_backend(nnfw_session *session, const char *backend);
+
+/*
+ * Set the operation's backend
+ *
+ * @param[in] session session to be modified
+ * @param[in] op operation to be set
+ * @param[in] backend bakcend on which operation run
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_set_op_backend(nnfw_session *session, const char *op, const char *backend);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/runtimes/include/nnfw_dev.h b/runtimes/include/nnfw_dev.h
new file mode 100644
index 000000000..ecf0597cf
--- /dev/null
+++ b/runtimes/include/nnfw_dev.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_DEV_H__
+#define __NNFW_DEV_H__
+
+#include "nnfw.h"
+
+// Used for custom kernel development
+
+/*
+ * operand type, used only for custom operations
+ */
+typedef struct
+{
+ nnfw_tensorinfo type;
+ void *allocation;
+} nnfw_operand;
+
+/*
+ * Used as input to custom operation eval function
+ */
+typedef struct
+{
+ size_t ninputs;
+ nnfw_operand *inputs;
+
+ size_t noutputs;
+ nnfw_operand *outputs;
+} nnfw_custom_kernel_params;
+
+/*
+ * Custom kernel evaluation function
+ *
+ * param[in] params custom operation parameters
+ * param[in] userdata pointer to user-specified buffer( kernel instance specific )
+ */
+typedef void (*nnfw_custom_eval)(nnfw_custom_kernel_params *params, char *userdata,
+ size_t userdata_size);
+
+/*
+ * custom operation registration info
+ */
+typedef struct
+{
+ nnfw_custom_eval eval_function;
+} custom_kernel_registration_info;
+
+NNFW_STATUS nnfw_register_custom_op_info(nnfw_session *session, const char *id,
+ custom_kernel_registration_info *info);
+
+#endif // __NNFW_DEV_H__
diff --git a/runtimes/libs/ARMComputeEx/CMakeLists.txt b/runtimes/libs/ARMComputeEx/CMakeLists.txt
new file mode 100644
index 000000000..ba1536dfe
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/CMakeLists.txt
@@ -0,0 +1,32 @@
+nnfw_find_package(ARMCompute QUIET)
+
+if(NOT ARMCompute_FOUND)
+ message(STATUS "Check ARM Compute library extension build: need ARM Compute library")
+ return()
+else(NOT ARMCompute_FOUND)
+ message(STATUS "Check ARM Compute library extension build: OK")
+endif(NOT ARMCompute_FOUND)
+
+set(ACL_EX_BASE ${CMAKE_CURRENT_SOURCE_DIR})
+
+file(GLOB_RECURSE ACL_EX_SRCS "${ACL_EX_BASE}/*.cpp")
+
+# generate embeded cl_kernel
+execute_process (
+ WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
+ COMMAND bash -c "python resolve_includes.py"
+)
+
+add_library(arm_compute_ex SHARED ${ACL_EX_SRCS})
+target_include_directories(arm_compute_ex PUBLIC ${ACL_EX_BASE})
+target_link_libraries(arm_compute_ex PRIVATE arm_compute_core)
+target_link_libraries(arm_compute_ex PRIVATE nnfw_common)
+target_link_libraries(arm_compute_ex PRIVATE nnfw_coverage)
+# Defines to enable validate check in debug build
+target_compile_definitions(arm_compute_ex PRIVATE EMBEDDED_KERNELS
+ $<$<CONFIG:Debug>:ARM_COMPUTE_DEBUG_ENABLED ARM_COMPUTE_ASSERTS_ENABLED
+ ARM_COMPUTE_LOGGING_ENABLED>)
+# Validate check functions are not used on release build
+# Some parameter are used for validate check function call, and these parameter may not used on release build
+target_compile_options(arm_compute_ex PRIVATE $<$<NOT:$<CONFIG:Debug>>:-Wno-unused-parameter -Wno-unused-function>)
+install(TARGETS arm_compute_ex DESTINATION lib)
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/CLKernelLibraryEx.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/CLKernelLibraryEx.h
index e4e752ef9..e4e752ef9 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/CLKernelLibraryEx.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/CLKernelLibraryEx.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgOperationKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgOperationKernel.h
new file mode 100644
index 000000000..b98b174f7
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLArgOperationKernel.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file CLArgOperationKernel.h
+ * @brief This file defines CLArgOperationKernel
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __ARM_COMPUTE_CLARGOPERATIONKERNEL_H__
+#define __ARM_COMPUTE_CLARGOPERATIONKERNEL_H__
+
+#include "arm_compute/core/CL/ICLKernel.h"
+#include "arm_compute/core/TypesEx.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/**
+ * @brief Class to define interface for the argop kernel.
+ */
+class CLArgOperationKernel : public ICLKernel
+{
+public:
+ /**
+ * @brief Default constructor.
+ */
+ CLArgOperationKernel();
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers).
+ * @param [in] copiedInstance Const reference of CLArgOperationKernel to be copied
+ */
+ CLArgOperationKernel(const CLArgOperationKernel &) = delete;
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers).
+ * @param [in] copiedInstance Const reference of CLArgOperationKernel to be copied
+ * @return Reference of this instance
+ */
+ CLArgOperationKernel &operator=(const CLArgOperationKernel &) = delete;
+ /**
+ * @brief Allow instances of this class to be moved
+ * @param [in] movedInstance Rvalue reference of CLArgOperationKernel to be moved
+ */
+ CLArgOperationKernel(CLArgOperationKernel &&) = default;
+ /**
+ * @brief Allow instances of this class to be moved
+ * @param [in] movedInstance Rvalue reference of CLArgOperationKernel to be moved
+ * @return Reference of this instance
+ */
+ CLArgOperationKernel &operator=(CLArgOperationKernel &&) = default;
+ /**
+ * @brief Initialise the kernel's input, output and border mode.
+ * @param[in] input An input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[out] output The output tensor, Data types supported: S32.
+ * @param[in] axis Axis along which to reduce. It must be sorted and no duplicates.
+ * @param[in] op Arg operation to perform.
+ * return N/A
+ */
+ void configure(const ICLTensor *input, ICLTensor *output, const uint32_t axis, ArgOperation op);
+ /**
+ * @brief Static function to check if given info will lead to a valid configuration of @ref
+ * CLArgOperationKernel
+ * @param[in] input An input tensor info. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[in] output The output tensor info, Data types supported: S32.
+ * @param[in] axis Axis along which to reduce. It must be sorted and no duplicates.
+ * @param[in] op Arg operation to perform.
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *output, const uint32_t axis,
+ ArgOperation op);
+
+ /*
+ * @brief Run CLArgOperationKernel op
+ * @param[in] window Window to be used for in_slice
+ * @param[in] queue cl::CommandQueue
+ * @return N/A
+ */
+ void run(const Window &window, cl::CommandQueue &queue) override;
+
+private:
+ const ICLTensor *_input;
+ ICLTensor *_output;
+ uint32_t _axis;
+};
+} // namespace arm_compute
+#endif /*__ARM_COMPUTE_CLARGOPERATIONKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h
index ab33d9d3a..ab33d9d3a 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLCastKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLCastKernel.h
index 4c2feb903..4c2feb903 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLCastKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLCastKernel.h
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h
index 60ec7a82a..60ec7a82a 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLEmbeddingLookupKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLEmbeddingLookupKernel.h
index da075db69..da075db69 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLEmbeddingLookupKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLEmbeddingLookupKernel.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherExKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherExKernel.h
new file mode 100644
index 000000000..aa81a1efa
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLGatherExKernel.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file CLGatherExKernel.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file defines CLGatherExKernel class
+ */
+
+#ifndef __ARM_COMPUTE_CLGATHEREXKERNEL_H__
+#define __ARM_COMPUTE_CLGATHEREXKERNEL_H__
+
+#include "arm_compute/core/CL/ICLKernel.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/**
+ * @brief Class to define an interface for the gather kernel.
+ */
+class CLGatherExKernel : public ICLKernel
+{
+public:
+ /**
+ * @brief Construct CLGatherExKernel object
+ * */
+ CLGatherExKernel();
+
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers).
+ */
+ CLGatherExKernel(const CLGatherExKernel &) = delete;
+
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers).
+ */
+ CLGatherExKernel &operator=(const CLGatherExKernel &) = delete;
+
+ /**
+ * @brief Construct CLGatherExKernel object by using default move constructor
+ * @param[in] CLGatherExKernel object to move
+ */
+ CLGatherExKernel(CLGatherExKernel &&) = default;
+
+ /**
+ * @brief Move assignment operator
+ * @param[in] CLGatherExKernel object to move
+ */
+ CLGatherExKernel &operator=(CLGatherExKernel &&) = default;
+
+ /**
+ * @brief Initialise the kernel's input, output and border mode.
+ * @param[in] input An input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[in] indices Indices tensor. Data types supported: S32.
+ * @param[out] output The output tensor, Data types supported: same as @p input1.
+ * @param[in] axis (Optional) The axis in @p input to gather @p indices from. Negative
+ * values wrap around. Defaults to 0
+ * @return N/A
+ */
+ void configure(const ICLTensor *input, const ICLTensor *indices, ICLTensor *output, int axis = 0);
+
+ /**
+ * @brief Static function to check if given info will lead to a valid configuration of @ref
+ * CLGatherExKernel
+ * @param[in] input An input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[in] indices Indices tensor. Data types supported: S32.
+ * @param[out] output The output tensor, Data types supported: same as @p input1.
+ * @param[in] axis (Optional) The axis in @p input to gather @p indices from. Negative
+ * values wrap around. Defaults to 0
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *indices,
+ const ITensorInfo *output, int axis = 0);
+
+ /**
+ * @brief Enqueue the OpenCL kernel to process the given window on the passed OpenCL command
+ * queue.
+ * @note The queue is *not* flushed by this method, and therefore the kernel will not have
+ * been executed by the time this method returns.
+ * @param[in] window Region on which to execute the kernel. (Must be a valid region of
+ * the window returned by window()).
+ * @param[in,out] queue Command queue on which to enqueue the kernel.@return N/A
+ * @return N/A
+ */
+ void run(const Window &window, cl::CommandQueue &queue) override;
+
+private:
+ const ICLTensor *_input;
+ const ICLTensor *_indices;
+ ICLTensor *_output;
+ int _axis;
+};
+} // namespace arm_compute
+#endif /*__ARM_COMPUTE_CLGATHEREXKERNEL_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h
new file mode 100644
index 000000000..8269e5a7a
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLHashtableLookupKernel.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file CLHashtableLookupKernel.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file defines CLHashtableLookupKernel class
+ */
+
+#ifndef __ARM_COMPUTE_CLHASHTABLELOOKUPKERNEL_H__
+#define __ARM_COMPUTE_CLHASHTABLELOOKUPKERNEL_H__
+
+#include "arm_compute/core/CL/ICLKernel.h"
+#include "arm_compute/runtime/CL/CLTensor.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/**
+* @brief Class to perform HashtableLookup operation with opencl kernel
+*/
+class CLHashtableLookupKernel : public ICLKernel
+{
+public:
+ /**
+ * @brief Construct a CLHashtableLookupKernel object
+ * */
+ CLHashtableLookupKernel();
+
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers)
+ * */
+ CLHashtableLookupKernel(const CLHashtableLookupKernel &) = delete;
+
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers)
+ * */
+ CLHashtableLookupKernel &operator=(const CLHashtableLookupKernel &) = delete;
+
+ /**
+ * @brief Construct a CLHashtableLookupKernel object by using default move constructor
+ * @param[in] CLHashtableLookupKernel object to move
+ * */
+ CLHashtableLookupKernel(CLHashtableLookupKernel &&) = default;
+
+ /**
+ * @brief Move assignment operator
+ * @param[in] CLHashtableLookupKernel object to move
+ * */
+ CLHashtableLookupKernel &operator=(CLHashtableLookupKernel &&) = default;
+
+ /**
+ * @brief Destruct this object
+ * */
+ ~CLHashtableLookupKernel() = default;
+
+ /**
+ * @brief Set the input and output of the kernel
+ * @param[in] lookups Lookups 1D tensor that values are indices into the first dimension of
+ * input.
+ * @param[in] keys Keys 1D tensor. keys and input pair represent a map.
+ * Data types supported: S32
+ * @param[in] input Source tensor.
+ * Data types supported: U8/S8/QASYMM8/U16/S16/U32/S32/F16/F32
+ * @param[out] output Destination tensor. Data types and data layouts supported: Same as @p
+ * input.
+ * @param[out] hits Hits 1D tensor. A boolean tensor that indicates whether the lookup hits
+ * (True) or not (False). Data types supported: U8/QASYMM8
+ * @return N/A
+ */
+ void configure(const ICLTensor *lookups, const ICLTensor *keys, const ICLTensor *input,
+ ICLTensor *output, ICLTensor *hits);
+
+ /**
+ * @brief Static function to check if given info will lead to a valid configuration of @ref
+ * CLHashtableLookupKernel
+ * @param[in] lookups The lookups tensor info. Data types supported: S32.
+ * @param[in] keys The keys tensor info. keys and input pair represent a map.
+ * Data types supported: S32
+ * @param[in] input The input tensor info.
+ * Data types supported: U8/S8/QASYMM8/U16/S16/U32/S32/F16/F32
+ * @param[out] output The output tensor. Data types and data layouts supported: Same as @p
+ * input.
+ * @param[out] hits The hits tensor info. A boolean tensor that indicates whether the lookup
+ * hits
+ * (True) or not (False). Data types supported: U8/QASYMM8
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *lookups, const ITensorInfo *keys,
+ const ITensorInfo *input, const ITensorInfo *output,
+ const ITensorInfo *hits);
+
+ /**
+ * @brief Enqueue the OpenCL kernel to process the given window on the passed OpenCL command
+ * queue.
+ * @note The queue is *not* flushed by this method, and therefore the kernel will not have
+ * been executed by the time this method returns.
+ * @param[in] window Region on which to execute the kernel. (Must be a valid region of
+ * the window returned by window()).
+ * @param[in,out] queue Command queue on which to enqueue the kernel.@return N/A
+ * @return N/A
+ */
+ void run(const Window &window, cl::CommandQueue &queue) override;
+
+private:
+ const ICLTensor *_lookups{nullptr}; /** Lookups tensor */
+ const ICLTensor *_keys{nullptr}; /** Keys tensor */
+ const ICLTensor *_input{nullptr}; /** Source tensor */
+ ICLTensor *_output{nullptr}; /** Destination tensor */
+ ICLTensor *_hits{nullptr}; /** Hits tensor */
+ std::unique_ptr<CLTensor> _lookup_indices{nullptr}; /** Lookup indices tensor */
+};
+} // namespace arm_compute
+#endif /*__ARM_COMPUTE_CLHASHTABLELOOKUPKERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNegKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNegKernel.h
index ccbea147e..ccbea147e 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNegKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLNegKernel.h
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPReLUKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPReLUKernel.h
index eff1b8bd5..eff1b8bd5 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPReLUKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLPReLUKernel.h
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLReduceOperationKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLReduceOperationKernel.h
index a26a4a7fc..a26a4a7fc 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLReduceOperationKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLReduceOperationKernel.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h
new file mode 100644
index 000000000..577e38cc4
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__
+#define __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__
+
+#include "arm_compute/core/CL/ICLKernel.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/** OpenCL kernel to perform SPACE_TO_BATCH_ND operation */
+class CLSpaceToBatchNDKernel final : public ICLKernel
+{
+public:
+ /** Default constructor */
+ CLSpaceToBatchNDKernel();
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLSpaceToBatchNDKernel(const CLSpaceToBatchNDKernel &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLSpaceToBatchNDKernel &operator=(const CLSpaceToBatchNDKernel &) = delete;
+ /** Allow instances of this class to be moved */
+ CLSpaceToBatchNDKernel(CLSpaceToBatchNDKernel &&) = default;
+ /** Allow instances of this class to be moved */
+ CLSpaceToBatchNDKernel &operator=(CLSpaceToBatchNDKernel &&) = default;
+ /** Default destructor */
+ ~CLSpaceToBatchNDKernel() = default;
+ /** Initialise the kernel's input and output.
+ *
+ * @note The data layout of input and output must be the same.
+ * @note The number of dimensions of input and output must be 4, and `spatial` dimensions
+ * are height and width.
+ * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S16/F16/S32/F32.
+ * Data layout supported: NCHW/NHWC
+ * @param[in] block_size Block size tensor. Data types supported: S32.
+ * @param[in] padding_size Padding size tensor. Data types supported: S32.
+ * @param[out] output Output tensor. Data types supported: U8/QASYMM8/S16/F16/S32/F32.
+ * Data layout supported: NCHW/NHWC
+ */
+ void configure(const ICLTensor *input, const ICLTensor *block_size, const ICLTensor *padding_size,
+ ICLTensor *output);
+
+ // Inherited methods overridden:
+ void run(const Window &window, cl::CommandQueue &queue) override;
+
+private:
+ const ICLTensor *_input{nullptr}; /**< Source tensor */
+ const ICLTensor *_block_size{nullptr}; /**< Block size tensor */
+ const ICLTensor *_padding_size{nullptr}; /**< Padding size tensor */
+ ICLTensor *_output{nullptr}; /**< Destination tensor */
+};
+
+} // namespace arm_compute
+
+#endif /* __ARM_COMPUTE_CLSPACE_TO_BATCH_ND_KERNEL_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h
index be845a549..be845a549 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h
diff --git a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTopKV2Kernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTopKV2Kernel.h
index eb2bad254..eb2bad254 100644
--- a/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTopKV2Kernel.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTopKV2Kernel.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.h
new file mode 100644
index 000000000..c5ef730b6
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_CLTRANSPOSECONVLAYERUPSAMPLEKERNEL_H__
+#define __ARM_COMPUTE_CLTRANSPOSECONVLAYERUPSAMPLEKERNEL_H__
+
+#include "arm_compute/core/CL/ICLKernel.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/** Interface for the Upsampling layer kernel for transpose convolution on OpenCL.
+ */
+class CLTransposeConvLayerUpsampleKernel : public ICLKernel
+{
+public:
+ /** Constructor */
+ CLTransposeConvLayerUpsampleKernel();
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLTransposeConvLayerUpsampleKernel(const CLTransposeConvLayerUpsampleKernel &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLTransposeConvLayerUpsampleKernel &
+ operator=(const CLTransposeConvLayerUpsampleKernel &) = delete;
+ /** Default Move Constructor. */
+ CLTransposeConvLayerUpsampleKernel(CLTransposeConvLayerUpsampleKernel &&) = default;
+ /** Default move assignment operator */
+ CLTransposeConvLayerUpsampleKernel &operator=(CLTransposeConvLayerUpsampleKernel &&) = default;
+ /** Default destructor */
+ ~CLTransposeConvLayerUpsampleKernel() = default;
+
+ /** Initialise the kernel's input and output.
+ *
+ * @param[in] input Source tensor. Data types supported: QASYMM8/F16/F32.
+ * @param[out] output Destination tensor. Data types supported: same as @p input. All but
+ * the lowest two dimensions must be the same size as in the input tensor, i.e. scaling is only
+ * performed within the XY-plane.
+ * @param[in] inner_border Top and right inner border sizes. These rows and columns will be
+ * filled with zero.
+ * @param[in] info Contains padding and stride information described in @ref
+ * PadStrideInfo.
+ */
+ void configure(const ICLTensor *input, ICLTensor *output, const BorderSize &inner_border,
+ const PadStrideInfo &info);
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * CLTransposeConvLayerUpsample
+ *
+ * @param[in] input Source tensor info. Data types supported: QASYMM8/F16/F32.
+ * @param[in] output Destination tensor info. Data types supported: same as @p input. All
+ * but the lowest two dimensions must be the same size as in the input tensor, i.e. scaling is
+ * only performed within the XY-plane.
+ * @param[in] inner_border Top and right inner border sizes. These rows and columns will be filled
+ * with zero.
+ * @param[in] info Contains padding and stride information described in @ref
+ * PadStrideInfo.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *output,
+ const BorderSize &inner_border, const PadStrideInfo &info);
+
+ // Inherited methods overridden:
+ void run(const Window &window, cl::CommandQueue &queue) override;
+
+private:
+ const ICLTensor *_input;
+ ICLTensor *_output;
+ BorderSize _inner_border;
+ PadStrideInfo _info;
+};
+} // namespace arm_compute
+#endif /*__ARM_COMPUTE_CLTRANSPOSECONVLAYERUPSAMPLEKERNEL_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/CPP/kernels/CPPUpsampleKernelEx.h b/runtimes/libs/ARMComputeEx/arm_compute/core/CPP/kernels/CPPUpsampleKernelEx.h
new file mode 100644
index 000000000..d093c22cb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/CPP/kernels/CPPUpsampleKernelEx.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_CPPUPSAMPLEKERNEL_EX_H__
+#define __ARM_COMPUTE_CPPUPSAMPLEKERNEL_EX_H__
+
+#include "arm_compute/core/CPP/ICPPKernel.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** CPP kernel to perform tensor upsample.
+ *
+ */
+class CPPUpsampleKernelEx : public ICPPKernel
+{
+public:
+ const char *name() const override { return "CPPUpsampleKernelEx"; }
+ /** Default constructor */
+ CPPUpsampleKernelEx();
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CPPUpsampleKernelEx(const CPPUpsampleKernelEx &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CPPUpsampleKernelEx &operator=(const CPPUpsampleKernelEx &) = delete;
+ /** Allow instances of this class to be moved */
+ CPPUpsampleKernelEx(CPPUpsampleKernelEx &&) = default;
+ /** Allow instances of this class to be moved */
+ CPPUpsampleKernelEx &operator=(CPPUpsampleKernelEx &&) = default;
+ /** Default destructor */
+ ~CPPUpsampleKernelEx() = default;
+
+ /** Set the input and output of the kernel.
+ *
+ * @param[in] input The input tensor to upsample. Data types supported: F32/F16/QASYMM8
+ * @param[out] output The output tensor. Data types supported: Same as @p input
+ * @param[in] info Padding info.
+ */
+ void configure(const ITensor *input, ITensor *output, const PadStrideInfo &info);
+
+ // Inherited methods overridden:
+ void run(const Window &window, const ThreadInfo &info) override;
+ bool is_parallelisable() const override;
+
+private:
+ const ITensor *_input;
+ ITensor *_output;
+ PadStrideInfo _info;
+};
+} // namespace arm_compute
+#endif /*__ARM_COMPUTE_CPPUPSAMPLEKERNEL_EX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/NEElementwiseOperationFuncs.h b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/NEElementwiseOperationFuncs.h
new file mode 100644
index 000000000..358e0ebc6
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/NEElementwiseOperationFuncs.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __ARM_COMPUTE_NEELEMENTWISEOPERATIONFUNCS_H__
+#define __ARM_COMPUTE_NEELEMENTWISEOPERATIONFUNCS_H__
+
+#include <arm_neon.h>
+
+namespace arm_compute
+{
+class ITensor;
+class Window;
+class QuantizationInfo;
+} // namespace arm_compute
+
+namespace arm_compute
+{
+
+float32x4x4_t load_quantized(const uint8_t *input1_ptr, const int32x4_t &offset,
+ const float32x4_t &scale);
+
+void store_quantized(uint8_t *output_ptr, const float32x4x4_t &rf, const float32x4_t &offset,
+ const float32x4_t &invscale);
+
+float32x4x4_t dup_quantized(uint8_t broadcast_value, int offset, float scale);
+
+void elementwise_op_quantized(
+ const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ uint8_t (*scalar_func)(const float &, const float &, QuantizationInfo),
+ int (*broadcast_func)(int, int, int, const uint8_t *, float32x4x4_t, uint8_t *, int32x4_t,
+ float32x4_t, float32x4_t, float32x4_t, const bool),
+ int (*neon_func)(int, int, int, const uint8_t *, const uint8_t *, uint8_t *, int32x4_t,
+ int32x4_t, float32x4_t, float32x4_t, float32x4_t, float32x4_t));
+
+void elementwise_op(const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ float (*scalar_func)(const float &, const float &),
+ int (*broadcast_func)(int, int, int, const float *, const float &, float *,
+ const bool),
+ int (*neon_func)(int, int, int, const float *, const float *, float *));
+
+void elementwise_op(const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ uint8_t (*scalar_func)(const uint8_t &, const uint8_t &),
+ int (*broadcast_func)(int, int, int, const uint8_t *, const uint8_t &,
+ uint8_t *, const bool),
+ int (*neon_func)(int, int, int, const uint8_t *, const uint8_t *, uint8_t *));
+} // namespace arm_compute
+#endif // __ARM_COMPUTE_NEELEMENTWISEOPERATIONFUNCS_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEBinaryLogicalOperationKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEBinaryLogicalOperationKernel.h
new file mode 100644
index 000000000..61992bd50
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEBinaryLogicalOperationKernel.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEBINARYLOGICALOPERATIONKERNEL_H__
+#define __ARM_COMPUTE_NEBINARYLOGICALOPERATIONKERNEL_H__
+
+#include "arm_compute/core/NEON/kernels/NEElementwiseOperationKernel.h"
+#include "arm_compute/core/TypesEx.h"
+
+namespace arm_compute
+{
+
+class NEBinaryLogicalOperationKernel : public NEElementwiseOperationKernel
+{
+public:
+ /** Default destructor */
+ ~NEBinaryLogicalOperationKernel() = default;
+
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEBinaryLogicalOperationKernel
+ *
+ * @param[in] op Binary logical operation to be executed.
+ * @param[in] input1 First tensor input. Data types supported: QASYMM8/U8.
+ * @param[in] input2 Second tensor input. Data types supported: Same as @p input1.
+ * @param[in] output Output tensor. Data types supported: Same as @p input1.
+ */
+ void configure(BinaryLogicalOperation op, const ITensor *input1, const ITensor *input2,
+ ITensor *output);
+
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEBinaryLogicalOperationKernel
+ *
+ * @param[in] op Binary logical operation to be executed.
+ * @param[in] input1 First tensor input info. Data types supported: QASYMM8/U8.
+ * @param[in] input2 Second tensor input info. Data types supported: Same as @p input1.
+ * @param[in] output Output tensor info. Data types supported: Same as @p input1.
+ *
+ * @return a Status
+ */
+ static Status validate(BinaryLogicalOperation op, const ITensorInfo *input1,
+ const ITensorInfo *input2, const ITensorInfo *output);
+
+protected:
+ // Inherited methods overridden:
+ static Status validate_arguments(const ITensorInfo &input1, const ITensorInfo &input2,
+ const ITensorInfo &output);
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEBINARYLOGICALOPERATIONKERNEL_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEElementwiseUnaryKernelEx.h b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEElementwiseUnaryKernelEx.h
new file mode 100644
index 000000000..d6fad1155
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEElementwiseUnaryKernelEx.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEELEMENTWISEUNARYKERNELEX_H__
+#define __ARM_COMPUTE_NEELEMENTWISEUNARYKERNELEX_H__
+
+#include "arm_compute/core/NEON/INEKernel.h"
+#include "arm_compute/core/TypesEx.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Interface for an element-wise unary operation kernel
+ *
+ * Element-wise operation is computed by:
+ * @f[ output(x) = OP(input(x))@f]
+ *
+ */
+class NEElementwiseUnaryKernelEx : public INEKernel
+{
+public:
+ const char *name() const override { return "NEElementwiseUnaryKernelEx"; }
+ /** Default constructor */
+ NEElementwiseUnaryKernelEx();
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NEElementwiseUnaryKernelEx(const NEElementwiseUnaryKernelEx &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NEElementwiseUnaryKernelEx &operator=(const NEElementwiseUnaryKernelEx &) = delete;
+ /** Allow instances of this class to be moved */
+ NEElementwiseUnaryKernelEx(NEElementwiseUnaryKernelEx &&) = default;
+ /** Allow instances of this class to be moved */
+ NEElementwiseUnaryKernelEx &operator=(NEElementwiseUnaryKernelEx &&) = default;
+ /** Default destructor */
+ ~NEElementwiseUnaryKernelEx() = default;
+
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEElementwiseUnaryKernelEx
+ *
+ * @param[in] op Arithmetic operation to be executed.
+ * @param[in] input First tensor input. Data types supported: F16/F32/S32.
+ * @param[in] output Output tensor. Data types supported: Same as @p input.
+ */
+ void configure(ElementWiseUnaryEx op, const ITensor *input, ITensor *output);
+
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEElementwiseUnaryKernelEx
+ *
+ * @param[in] op Arithmetic operation to be executed.
+ * @param[in] input First tensor input info. Data types supported: F16/F32/S32.
+ * @param[in] output Output tensor info. Data types supported: Same as @p input.
+ *
+ * @return a Status
+ */
+ static Status validate(ElementWiseUnaryEx op, const ITensorInfo *input,
+ const ITensorInfo *output);
+
+ // Inherited methods overridden:
+ void run(const Window &window, const ThreadInfo &info) override;
+
+ /** Common signature for all the specialised arithmetic functions
+ *
+ * @param[in] input An input tensor. Data types supported: F16/F32/S32.
+ * @param[out] output The output tensor. Data types supported: Same as @p input.
+ * @param[in] window Region on which to execute the kernel.
+ */
+ using ElementwiseUnaryFunction = void(const ITensor *input, ITensor *output,
+ const Window &window);
+
+protected:
+ // Inherited methods overridden:
+ static Status validate_arguments(const ITensorInfo &input, const ITensorInfo &output);
+
+ /** Function to use for the particular tensor types passed to configure() */
+ std::function<void(const ITensor *input, ITensor *output, const Window &window)> _function;
+
+ const ITensor *_input;
+ ITensor *_output;
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEELEMENTWISEUNARYKERNELEX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEPReLUKernel.h b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEPReLUKernel.h
new file mode 100644
index 000000000..79bb78661
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/NEON/kernels/NEPReLUKernel.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEPRELUKERNEL_H__
+#define __ARM_COMPUTE_NEPRELUKERNEL_H__
+
+#include "arm_compute/core/NEON/INEKernel.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Interface for the kernel to perform Parametric Rectified Linear Unit
+ *
+ * Result is computed by:
+ * @f[ output(x) = alpha * x for x < 0, output(x) = x for x >= 0 @f]
+ */
+class NEPReLUKernel : public INEKernel
+{
+public:
+ const char *name() const override { return "NEPReLUKernel"; }
+ /** Default constructor */
+ NEPReLUKernel();
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NEPReLUKernel(const NEPReLUKernel &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NEPReLUKernel &operator=(const NEPReLUKernel &) = delete;
+ /** Allow instances of this class to be moved */
+ NEPReLUKernel(NEPReLUKernel &&) = default;
+ /** Allow instances of this class to be moved */
+ NEPReLUKernel &operator=(NEPReLUKernel &&) = default;
+ /** Initialise the kernel's inputs and output
+ *
+ * @param[in] input Input tensor. Data type supported: QASYMM8/F32
+ * @param[in] alpha Alpha tensor. Data types supported: Same as @p input
+ * @param[out] output Output tensor. Data types supported: Same as @p input
+ */
+ void configure(const ITensor *input, const ITensor *alpha, ITensor *output);
+
+ // Inherited methods overridden:
+ void run(const Window &window, const ThreadInfo &info) override;
+
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEPReLUKernel.h
+ *
+ * @param[in] input Input tensor input info. Data types supported: QASYMM8/F32.
+ * @param[in] alpha Alpha tensor input info. Data types supported: Same as @p input.
+ * @param[in] output Output tensor info. Data types supported: Same as @p input.
+ *
+ * @return a Status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *alpha,
+ const ITensorInfo *output);
+ static Status validate_arguments(const ITensorInfo &input, const ITensorInfo &alpha,
+ const ITensorInfo &output);
+
+private:
+ const ITensor *_input; /**< Source tensor */
+ const ITensor *_alpha; /**< Alpha tensor */
+ ITensor *_output; /**< Destination tensor */
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEPRELUKERNEL_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/TypesEx.h b/runtimes/libs/ARMComputeEx/arm_compute/core/TypesEx.h
new file mode 100644
index 000000000..41754632d
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/TypesEx.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_TYPESEX_H__
+#define __ARM_COMPUTE_TYPESEX_H__
+
+namespace arm_compute
+{
+
+/** Available ArgIndex operations **/
+enum class ArgOperation
+{
+ MAX,
+ MIN,
+};
+
+/** Available reduce operations */
+enum class ReduceOperation
+{
+ MAX, /**< Max */
+ MEAN, /**< Mean */
+ SUM, /**< Sum */
+ MIN, /**< Min */
+};
+
+/** Available binary logical operations */
+enum class BinaryLogicalOperation
+{
+ AND, /**< AND */
+ OR, /**< OR */
+};
+
+enum class ComparisonOperationEx
+{
+ EQUAL, /**< EQUAL */
+ NOT_EQUAL, /**< NOT_EQUAL */
+};
+
+enum class ElementWiseUnaryEx
+{
+ NEG, /**< NEG */
+};
+
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_TYPESEX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/UtilsEx.h b/runtimes/libs/ARMComputeEx/arm_compute/core/UtilsEx.h
new file mode 100644
index 000000000..39026e6bb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/UtilsEx.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_UTILSEX_H__
+#define __ARM_COMPUTE_UTILSEX_H__
+
+#include <utility>
+
+#include "arm_compute/core/Types.h"
+
+namespace arm_compute
+{
+
+/** Returns expected width and height of the transpose convolution's output tensor.
+ *
+ * @note This function was copied in order to fix a bug computing to wrong output dimensions.
+ *
+ * @param[in] in_width Width of input tensor (Number of columns)
+ * @param[in] in_height Height of input tensor (Number of rows)
+ * @param[in] kernel_width Kernel width.
+ * @param[in] kernel_height Kernel height.
+ * @param[in] info padding and stride info.
+ * @param[in] invalid_right The number of zeros added to right edge of the output.
+ * @param[in] invalid_top The number of zeros added to bottom edge of the output.
+ *
+ * @return A pair with the new width in the first position and the new height in the second.
+ */
+const std::pair<unsigned int, unsigned int>
+transposeconv_output_dimensions(unsigned int in_width, unsigned int in_height,
+ unsigned int kernel_width, unsigned int kernel_height,
+ const PadStrideInfo &info, unsigned int invalid_right,
+ unsigned int invalid_top);
+}
+#endif /*__ARM_COMPUTE_UTILSEX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/core/utils/misc/ShapeCalculatorEx.h b/runtimes/libs/ARMComputeEx/arm_compute/core/utils/misc/ShapeCalculatorEx.h
new file mode 100644
index 000000000..bacb1140c
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/core/utils/misc/ShapeCalculatorEx.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __ARM_COMPUTE_MISC_SHAPE_CALCULATOR_EX_H__
+#define __ARM_COMPUTE_MISC_SHAPE_CALCULATOR_EX_H__
+
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/ITensorInfo.h"
+#include "arm_compute/core/Utils.h"
+
+#include "arm_compute/core/utils/helpers/tensor_transform.h"
+
+#include <cmath>
+
+namespace arm_compute
+{
+namespace misc
+{
+namespace shape_calculator
+{
+
+/** Calculate the upsampled output shape used for transpose convolution
+ *
+ * @param[in] input Input tensor info
+ * @param[in] weights Weights tensor shape
+ * @param[in] info Padding and stride info
+ * @param[in] out_dims Output shape dimensions
+ * @param[in] invalid_right The number of zeros added to right edge of the output.
+ * @param[in] invalid_bottom The number of zeros added to bottom edge of the output.
+ * @param[out] pad_left Padding on left
+ * @param[out] pad_right Padding on right
+ * @param[out] pad_top Padding on top
+ * @param[out] pad_bottom Padding on bottom
+ *
+ * @return the calculated shape
+ */
+inline TensorShape compute_transposeconv_upsampled_shape(
+ const ITensorInfo &input, const ITensorInfo &weights, const PadStrideInfo &info,
+ std::pair<unsigned int, unsigned int> &out_dims, unsigned int invalid_right,
+ unsigned int invalid_bottom, unsigned int &pad_left, unsigned int &pad_right,
+ unsigned int &pad_top, unsigned int &pad_bottom)
+{
+ unsigned int sx = info.stride().first;
+ unsigned int sy = info.stride().second;
+ const DataLayout data_layout = input.data_layout();
+ const size_t idx_w = get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const size_t idx_h = get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+
+ // Find the upsampled dimensions
+ // transpose conv out:
+ // tconv_out + pad = 1 + (in - 1) * stride + invalid
+ // tconv_out = 1 + (in - 1) * stride + invalid - pad
+ // upsample out:
+ // upsample_out = 1 + (in - 1) * stride
+ unsigned int out_x = (input.dimension(idx_w) - 1) * sx + 1;
+ unsigned int out_y = (input.dimension(idx_h) - 1) * sy + 1;
+
+ // Find the padding needed for the convolution with stride 1 in order to match output shape
+ // upsample+pad out:
+ // upsample_out + pad = tconv_out + kernel - 1
+ // pad = tconv_out + kernel - 1 - upsample_out
+ unsigned int padx = out_dims.first - (out_x - weights.dimension(idx_w) + 1);
+ unsigned int pady = out_dims.second - (out_y - weights.dimension(idx_h) + 1);
+ out_x += padx;
+ out_y += pady;
+
+ unsigned int padx_all_except_invallid = padx + info.pad_left() + info.pad_right() - invalid_right;
+ unsigned int pady_all_except_invallid =
+ pady + info.pad_top() + info.pad_bottom() - invalid_bottom;
+ pad_left = (padx_all_except_invallid + 1) / 2 - info.pad_left();
+ pad_right = pady_all_except_invallid / 2 - info.pad_right() + invalid_right;
+ pad_top = (padx_all_except_invallid + 1) / 2 - info.pad_top();
+ pad_bottom = pady_all_except_invallid / 2 - info.pad_bottom() + invalid_bottom;
+
+ TensorShape scale_out_shape(input.tensor_shape());
+ scale_out_shape.set(idx_w, out_x);
+ scale_out_shape.set(idx_h, out_y);
+
+ return scale_out_shape;
+}
+
+/** Calculate the output shape of the transpose convolution layer
+ *
+ * @param[in] out_dims Output x and y shape dimensions
+ * @param[in] input Input tensor info
+ * @param[in] weights Weights tensor shape
+ *
+ * @return the calculated shape
+ */
+inline TensorShape
+compute_transposeconv_output_shape(const std::pair<unsigned int, unsigned int> &out_dims,
+ const ITensorInfo &input, const ITensorInfo &weights)
+{
+ const TensorShape input_shape{input.tensor_shape()};
+ const TensorShape weights_shape{weights.tensor_shape()};
+
+ const DataLayout data_layout = input.data_layout();
+ const int width_idx = get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const int height_idx = get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+ const int channel_idx =
+ get_data_layout_dimension_index(data_layout, DataLayoutDimension::CHANNEL);
+ const int batch_idx = get_data_layout_dimension_index(data_layout, DataLayoutDimension::BATCHES);
+
+ TensorShape out_shape{input_shape};
+ out_shape.set(width_idx, out_dims.first);
+ out_shape.set(height_idx, out_dims.second);
+ out_shape.set(channel_idx, weights_shape[batch_idx]);
+ return out_shape;
+}
+
+} // namespace shape_calculator
+} // namespace misc
+} // namespace arm_compute
+
+#endif // __ARM_COMPUTE_MISC_SHAPE_CALCULATOR_EX_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/CLFunctionsEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/CLFunctionsEx.h
new file mode 100644
index 000000000..5fbbb2556
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/CLFunctionsEx.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_CLFUNCTIONSEX_H__
+#define __ARM_COMPUTE_CLFUNCTIONSEX_H__
+
+#include <arm_compute/runtime/CL/functions/CLArgOperation.h>
+#include <arm_compute/runtime/CL/functions/CLBatchToSpaceND.h>
+#include <arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h>
+#include <arm_compute/runtime/CL/functions/CLCast.h>
+#include <arm_compute/runtime/CL/functions/CLDepthToSpace.h>
+#include <arm_compute/runtime/CL/functions/CLEmbeddingLookup.h>
+#include <arm_compute/runtime/CL/functions/CLFullyConnectedReshapingLayer.h>
+#include <arm_compute/runtime/CL/functions/CLGatherEx.h>
+#include <arm_compute/runtime/CL/functions/CLHashtableLookup.h>
+#include <arm_compute/runtime/CL/functions/CLLogicalNot.h>
+#include <arm_compute/runtime/CL/functions/CLNeg.h>
+#include <arm_compute/runtime/CL/functions/CLPixelWiseDivision.h>
+#include <arm_compute/runtime/CL/functions/CLPReLU.h>
+#include <arm_compute/runtime/CL/functions/CLReduceOperation.h>
+#include <arm_compute/runtime/CL/functions/CLRNNLayerEx.h>
+#include <arm_compute/runtime/CL/functions/CLSpaceToBatchND.h>
+#include <arm_compute/runtime/CL/functions/CLSpaceToDepth.h>
+#include <arm_compute/runtime/CL/functions/CLSplit.h>
+#include <arm_compute/runtime/CL/functions/CLStridedSliceEx.h>
+#include <arm_compute/runtime/CL/functions/CLTopKV2.h>
+#include <arm_compute/runtime/CL/functions/CLTransposeConvLayer.h>
+
+#endif // __ARM_COMPUTE_CLFUNCTIONSEX_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgOperation.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgOperation.h
new file mode 100644
index 000000000..d9d0d4d35
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLArgOperation.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file CLArgOperation.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains arm_compute::CLArgOperation class
+ */
+
+#ifndef __ARM_COMPUTE_CLARGOPERATION_H__
+#define __ARM_COMPUTE_CLARGOPERATION_H__
+
+#include "arm_compute/core/CL/kernels/CLArgOperationKernel.h"
+#include "arm_compute/runtime/CL/CLTensor.h"
+#include "arm_compute/runtime/IFunction.h"
+#include "arm_compute/core/TypesEx.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/**
+ * @brief Class to execute CLArgOperation operation
+ */
+class CLArgOperation : public IFunction
+{
+public:
+ /**
+ * @brief Construct a new CLArgOperation object
+ */
+ CLArgOperation();
+
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers)
+ */
+ CLArgOperation(const CLArgOperation &) = delete;
+
+ /**
+ * @brief Prevent instances of this class from being copied (As this class contains pointers)
+ */
+ CLArgOperation &operator=(const CLArgOperation &) = delete;
+
+ /**
+ * @brief Construct a new CLArgOperation object by using copy constructor
+ * @param[in] CLArgOperation object to move
+ */
+ CLArgOperation(CLArgOperation &&) = default;
+
+ /**
+ * @brief Assign a CLArgOperation object.
+ * @param[in] CLArgOperation object to assign. This object will be moved.
+ */
+ CLArgOperation &operator=(CLArgOperation &&) = default;
+
+ /**
+ * @brief Initialise the kernel's inputs and outputs.
+ * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[out] output The result of arg operation. Data types supported: S32.
+ * @param[in] axis Axis along which to reduce. It must be sorted and no duplicates.
+ * @param[in] op Arg operation to perform.
+ * @return N/A
+ */
+ void configure(ICLTensor *input, ICLTensor *output, std::vector<uint32_t> axis, ArgOperation op);
+
+ /**
+ * @brief Static function to check if given info will lead to a valid configuration
+ * @param[in] input Input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[in] axis Axis along which to reduce. It must be sorted and no duplicates.
+ * @param[out] output The result of arg operation. Data types supported: S32.
+ * @param[in] op Arg operation to perform.
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const std::vector<uint32_t> &axis,
+ const ITensorInfo *output, ArgOperation op);
+ /**
+ * @brief Run the OpenCL kernel for this operation
+ * @return N/A
+ */
+ void run() override;
+
+private:
+ ICLTensor *_input{nullptr};
+ ICLTensor *_output{nullptr};
+ std::vector<uint32_t> _axis{};
+ ArgOperation _arg_op{ArgOperation::MAX};
+
+ std::unique_ptr<CLTensor[]> _interm_tensors{nullptr};
+ std::unique_ptr<CLArgOperationKernel[]> _argop_kernels{nullptr};
+ size_t _num_of_kernels{0};
+};
+}
+#endif /*__ARM_COMPUTE_CLARGOPERATION_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBatchToSpaceND.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBatchToSpaceND.h
index d16a0762d..d16a0762d 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBatchToSpaceND.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBatchToSpaceND.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h
index 061e34f26..061e34f26 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLCast.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLCast.h
index 56b8408e2..56b8408e2 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLCast.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLCast.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLDepthToSpace.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLDepthToSpace.h
index d78a6ada4..d78a6ada4 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLDepthToSpace.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLDepthToSpace.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLEmbeddingLookup.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLEmbeddingLookup.h
index 257772a89..257772a89 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLEmbeddingLookup.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLEmbeddingLookup.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLFullyConnectedReshapingLayer.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLFullyConnectedReshapingLayer.h
new file mode 100644
index 000000000..0867cf6bb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLFullyConnectedReshapingLayer.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file        CLFullyConnectedReshapingLayer.h
+ * @brief       This file contains CLFullyConnectedReshapingLayer class
+ * @ingroup     COM_AI_RUNTIME
+ */
+
+#ifndef __ARM_COMPUTE_CL_FULLY_CONNECTED_RESHAPING_LAYER_H__
+#define __ARM_COMPUTE_CL_FULLY_CONNECTED_RESHAPING_LAYER_H__
+
+#include <arm_compute/runtime/CL/functions/CLFullyConnectedLayer.h>
+#include <arm_compute/runtime/misc/functions/GenericReshapeLayer.h>
+#include <arm_compute/runtime/IMemoryManager.h>
+
+namespace arm_compute
+{
+/**
+ * @brief Class to run FullyConnected Layer after reshaping input tensor
+ */
+class CLFullyConnectedReshapingLayer : public arm_compute::IFunction
+{
+public:
+ CLFullyConnectedReshapingLayer(std::shared_ptr<IMemoryManager> memory_manager = nullptr)
+ : _input(nullptr), _weights(nullptr), _biases(nullptr), _output(nullptr), _cl_buffer{},
+ _cl_fc{memory_manager}, _cl_reshape{}, _needs_reshape(false)
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Configure the layer
+ * @param[in] input The source tensor
+ * @param[in] weights The tensor that is filled with weight values
+ * @param[in] biases The tensor that is filled with biase values
+ * @param[in] output The destination tensor
+ * @param[in] needs_reshape Whether it needs to be reshaped or not
+ * @param[in] reshape The tensor shape to be reshaped. Only valid when needs_reshape is true.
+ * @return N/A
+ */
+ void configure(const arm_compute::ICLTensor *input, const arm_compute::ICLTensor *weights,
+ const arm_compute::ICLTensor *biases, arm_compute::ICLTensor *output,
+ bool needs_reshape, const arm_compute::TensorShape &reshape);
+
+public:
+ /**
+ * @brief Run the operation. Must be called after configure().
+ * @return N/A
+ */
+ void run(void) override;
+
+private:
+ const arm_compute::ICLTensor *_input;
+ const arm_compute::ICLTensor *_weights;
+ const arm_compute::ICLTensor *_biases;
+ arm_compute::ICLTensor *_output;
+
+ // buffer for reshaping input tensor
+ arm_compute::CLTensor _cl_buffer;
+
+private:
+ arm_compute::CLFullyConnectedLayer _cl_fc;
+ // TODO Change to CLReshapeLayer
+ arm_compute::misc::GenericReshapeLayer _cl_reshape;
+ bool _needs_reshape;
+};
+} // namespace arm_compute
+
+#endif // __ARM_COMPUTE_CL_FULLY_CONNECTED_RESHAPING_LAYER_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGatherEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGatherEx.h
new file mode 100644
index 000000000..04d227aa7
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLGatherEx.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file CLGatherEx.h
+ * @brief This file contains CLGatherEx class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __ARM_COMPUTE_CLGATHEREX_H__
+#define __ARM_COMPUTE_CLGATHEREX_H__
+
+#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/**
+ * @brief Class to to run @ref CLGatherKernel.
+ */
+class CLGatherEx : public ICLSimpleFunction
+{
+public:
+ /**
+ * @brief Initialise the kernel's inputs, output and convertion policy.
+ * @param[in] input An input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[in] indices An indexes tensor. Data types supported: S32.
+ * @param[out] output The output tensor, Data types supported: same as @p input.
+ * @param[in] axis (Optional) The axis in @p input to gather @p indices from. Defaults to 0
+ * @return N/A
+ */
+ void configure(const ICLTensor *input, const ICLTensor *indices, ICLTensor *output, int axis = 0);
+
+ /**
+ * @brief Static function to check if given info will lead to a valid configuration
+ * of @ref CLGatherEx
+ * @param[in] input An input tensor. Data types supported: U8/QASYMM8/S32/F32.
+ * @param[in] indices An indexes tensor. Data types supported: S32.
+ * @param[out] output The output tensor, Data types supported: same as @p input.
+ * @param[in] axis (Optional) The axis in @p input to gather @p indices from. Defaults to 0
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *indices,
+ const ITensorInfo *output, int axis = 0);
+};
+}
+#endif /*__ARM_COMPUTE_CLGATHEREX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLHashtableLookup.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLHashtableLookup.h
index 65aa6cbd5..65aa6cbd5 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLHashtableLookup.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLHashtableLookup.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLLogicalNot.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLLogicalNot.h
new file mode 100644
index 000000000..4bf203c5a
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLLogicalNot.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_CLLOGICALNOT_H__
+#define __ARM_COMPUTE_CLLOGICALNOT_H__
+
+#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+class CLLogicalNot : public ICLSimpleFunction
+{
+public:
+ /** Initialise the function's source and destination.
+ *
+ * @param[in] input Source tensor. Data types supported: QASYMM8.
+ * @param[out] output Output tensor. Data types supported: QASYMM8.
+ */
+ void configure(ICLTensor *input, ICLTensor *output);
+};
+
+} // namespace arm_compute
+#endif /*__ARM_COMPUTE_CLLOGICALNOT_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNeg.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNeg.h
index 198a0fd4e..198a0fd4e 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNeg.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLNeg.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPReLU.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPReLU.h
index 622a61b5e..622a61b5e 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPReLU.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPReLU.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPixelWiseDivision.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPixelWiseDivision.h
index b142d3a2e..b142d3a2e 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPixelWiseDivision.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLPixelWiseDivision.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLRNNLayerEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLRNNLayerEx.h
new file mode 100644
index 000000000..7e88cb369
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLRNNLayerEx.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_CLRNN_LAYER_EX_H__
+#define __ARM_COMPUTE_CLRNN_LAYER_EX_H__
+
+#include "arm_compute/core/CL/kernels/CLActivationLayerKernel.h"
+#include "arm_compute/core/CL/kernels/CLCopyKernel.h"
+#include "arm_compute/core/CL/kernels/CLElementwiseOperationKernel.h"
+#include "arm_compute/runtime/CL/ICLSimpleFunction.h"
+#include "arm_compute/runtime/CL/functions/CLFullyConnectedLayer.h"
+#include "arm_compute/runtime/CL/functions/CLGEMM.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/** Basic function to run @ref CLRNNLayerEx */
+class CLRNNLayerEx : public IFunction
+{
+public:
+ /** Default constructor */
+ CLRNNLayerEx(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+ /** Initialize the function
+ *
+ * @param[in] input Input is a 2-D tensor of shape [input_size, batch_size]. Data
+ * types supported: F16/F32
+ * @param[in] weights Weights tensor of shape [input_size, num_units] that
+ * multiplies the input. Data types supported: Same as @p input
+ * @param[in] recurrent_weights Weights tensor of shape [num_units, num_units] that multiplies
+ * the current 'state'. Data types supported: Same as @p input
+ * @param[in] bias Bias vector of shape [num_units]. Data types supported: Same
+ * as @p input
+ * @param[out] output Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in,out] hidden_state Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in] info Activation layer parameter.
+ */
+ void configure(const ICLTensor *input, const ICLTensor *weights,
+ const ICLTensor *recurrent_weights, const ICLTensor *bias, ICLTensor *hidden_state,
+ ICLTensor *output, ActivationLayerInfo &info);
+ /** Initialize the function
+ *
+ * @param[in] input Input is a 2-D tensor of shape [input_size, batch_size]. Data
+ * types supported: F16/F32
+ * @param[in] weights Weights tensor of shape [input_size, num_units] that multiplies
+ * the input. Data types supported: Same as @p input
+ * @param[in] recurrent_weights Weights tensor of shape [num_units, num_units] that multiplies the
+ * current 'state'. Data types supported: Same as @p input
+ * @param[in] bias Bias vector of shape [num_units]. Data types supported: Same as @p
+ * input
+ * @param[in] output Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in] hidden_state Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in] info Activation layer parameter.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *recurrent_weights, const ITensorInfo *bias,
+ const ITensorInfo *hidden_state, const ITensorInfo *output,
+ const ActivationLayerInfo &info);
+
+ // Inherited methods overridden:
+ void run() override;
+ void prepare() override;
+
+private:
+ CLMemoryGroup _memory_group;
+ CLGEMM _gemm_state_f;
+ CLSaturatedArithmeticOperationKernel _add_kernel;
+ CLActivationLayerKernel _activation_kernel;
+ CLFullyConnectedLayer _fully_connected_kernel;
+ CLCopyKernel _copy_kernel;
+ CLTensor _fully_connected_out;
+ CLTensor _gemm_output;
+ CLTensor _add_output;
+ bool _is_prepared;
+};
+}
+#endif /* __ARM_COMPUTE_CLRNN_LAYER_EX_H__ */
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLReduceOperation.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLReduceOperation.h
index e1a6f6ab4..e1a6f6ab4 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLReduceOperation.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLReduceOperation.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToBatchND.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToBatchND.h
index 7e2df8986..7e2df8986 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToBatchND.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToBatchND.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToDepth.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToDepth.h
index 17f762092..17f762092 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToDepth.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLSpaceToDepth.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLStridedSliceEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLStridedSliceEx.h
index 6b26a85c8..6b26a85c8 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLStridedSliceEx.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLStridedSliceEx.h
diff --git a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTopKV2.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTopKV2.h
index 5327e016f..5327e016f 100644
--- a/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTopKV2.h
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTopKV2.h
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayer.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayer.h
new file mode 100644
index 000000000..340a7bfe9
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayer.h
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_CLTRANSPOSECONVLAYER_H__
+#define __ARM_COMPUTE_CLTRANSPOSECONVLAYER_H__
+
+#include "arm_compute/runtime/CL/functions/CLConvolutionLayer.h"
+#include "arm_compute/runtime/CL/functions/CLTransposeConvLayerUpsample.h"
+
+#include "arm_compute/core/CPP/kernels/CPPFlipWeightsKernel.h"
+
+#include "arm_compute/runtime/CL/CLMemoryGroup.h"
+#include "arm_compute/runtime/CL/CLTensor.h"
+#include "arm_compute/runtime/IFunction.h"
+#include "arm_compute/runtime/IMemoryManager.h"
+
+#include <memory>
+
+namespace arm_compute
+{
+class ICLTensor;
+/** Function to run the transpose convolution layer.
+ *
+ * @note This layer was copied in order to fix a bug computing to wrong output dimensions.
+ *
+ * TransposeConv Layer is the backward pass of Convolution Layer. First we transform the input
+ * depending on the stride and pad info and then perform a 1x1
+ * convolution pass. Input stride defines how many zeroes we should put between each element of the
+ * input, pad is the amount of padding and finally a is a user
+ * specified value where a < stride - 1, that increases the padding top and right of the input
+ * image.
+ *
+ * The relation between input to output is as follows:
+ * \f[
+ * width\_output = (width\_input - 1) \cdot stride\_x - \cdot padding\_x + kernel\_x
+ * \f]
+ * \f[
+ * height\_output = (height\_input - 1) \cdot stride\_y - \cdot padding\_y + kernel\_y
+ * \f]
+ *
+ * where:
+ * width_input is the size of the first input dimension.
+ * height_input is the size of the second input dimension.
+ * width_output is the size of the first output dimension.
+ * height_output is the size of the second output dimension.
+ * kernel_x and kernel_y are the convolution sizes in x and y.
+ * stride_x and stride_y is the input stride of the first and second dimension.
+ *
+ * The weights used by Deconvolution are supposed to be the same as the ones used for Convolution.
+ * Therefore, it will be necessary to use the weights in the
+ * reverse order to perform an actual convolution. This is achieved by using the @ref
+ * CPPFlipWeightsKernel.
+ *
+ * This function calls the following OpenCL kernels/functions:
+ *
+ * -# @ref CLTransposeConvLayerUpsample
+ * -# @ref CLConvolutionLayer
+ *
+ */
+class CLTransposeConvLayer : public IFunction
+{
+public:
+ /** Constructor */
+ CLTransposeConvLayer(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLTransposeConvLayer(const CLTransposeConvLayer &) = delete;
+ /** Default move constructor */
+ CLTransposeConvLayer(CLTransposeConvLayer &&) = default;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLTransposeConvLayer &operator=(const CLTransposeConvLayer &) = delete;
+ /** Default move assignment operator */
+ CLTransposeConvLayer &operator=(CLTransposeConvLayer &&) = default;
+ /** Set the input, weights, biases and output tensors.
+ *
+ * @param[in,out] input Input tensor. 3 lower dimensions represent a single input,
+ * and an optional 4th dimension for batch of inputs.
+ * Data types supported: QASYMM8/F16/F32.
+ * @param[in] weights The 4d weights with dimensions [width, height, IFM, OFM].
+ * Data type supported: Same as @p input.
+ * @param[in] bias (Optional) The biases have one dimension. Data type supported:
+ * Same as @p input.
+ * @param[out] output Output tensor. The output has the same number of dimensions
+ * as the @p input.
+ * @param[in] info Contains padding and policies to be used in the
+ * transpose convolution, this is decribed in @ref PadStrideInfo.
+ * @param[in] invalid_right The number of zeros added to right edge of the output.
+ * @param[in] invalid_bottom The number of zeros added to top edge of the output.
+ * @param[in] weights_info (Optional) Weights information needed for @ref
+ * CLConvolutionLayer, specifies if the weights tensor has been
+ * reshaped with @ref CLWeightsReshapeKernel.
+ */
+ void configure(ICLTensor *input, ICLTensor *weights, const ICLTensor *bias, ICLTensor *output,
+ const PadStrideInfo &info, unsigned int invalid_right, unsigned int invalid_bottom,
+ const WeightsInfo &weights_info = WeightsInfo());
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * CLTransposeConvLayer
+ *
+ * @param[in] input Input tensor info. 3 lower dimensions represent a single input,
+ * and an optional 4th dimension for batch of inputs.
+ * Data types supported: QASYMM8/F16/F32.
+ * @param[in] weights The 4d weights info with dimensions [width, height, IFM, OFM].
+ * Data type supported: Same as @p input.
+ * @param[in] bias (Optional) The biases have one dimension. Data type supported:
+ * Same as @p input.
+ * @param[in] output Output tensor info. The output has the same number of dimensions
+ * as the @p input.
+ * @param[in] info Contains padding and policies to be used in the
+ * transpose convolution, this is decribed in @ref PadStrideInfo.
+ * @param[in] innvalid_right The number of zeros added to right edge of the output.
+ * @param[in] invalid_bottom The number of zeros added to top edge of the output.
+ * @param[in] weights_info (Optional) Weights information needed for @ref CLConvolutionLayer,
+ * specifies if the weights tensor has been reshaped with @ref
+ * CLWeightsReshapeKernel.
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *bias, ITensorInfo *output, const PadStrideInfo &info,
+ unsigned int innvalid_right, unsigned int invalid_bottom,
+ const WeightsInfo &weights_info = WeightsInfo());
+
+ // Inherited methods overridden:
+ void run() override;
+ void prepare() override;
+
+private:
+ CLMemoryGroup _memory_group;
+ CLTransposeConvLayerUpsample _scale_f;
+ CLConvolutionLayer _conv_f;
+ CPPFlipWeightsKernel _flip_weights;
+ CLTensor _scaled_output;
+ ICLTensor *_original_weights;
+ CLTensor _weights_flipped;
+ bool _is_prepared;
+};
+}
+#endif /* __ARM_COMPUTE_CLTRANSPOSECONVLAYER_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayerUpsample.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayerUpsample.h
new file mode 100644
index 000000000..4ae0e1830
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CL/functions/CLTransposeConvLayerUpsample.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_CLTRANSPOSECONVLAYERUPSAMPLE_H__
+#define __ARM_COMPUTE_CLTRANSPOSECONVLAYERUPSAMPLE_H__
+
+#include "arm_compute/runtime/IFunction.h"
+
+#include "arm_compute/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/runtime/CL/CLMemoryGroup.h"
+#include "arm_compute/runtime/IFunction.h"
+#include "arm_compute/runtime/IMemoryManager.h"
+
+namespace arm_compute
+{
+class ICLTensor;
+
+/** Basic function to run @ref CLTransposeConvLayerUpsampleKernel */
+class CLTransposeConvLayerUpsample : public IFunction
+{
+public:
+ /** Default constructor */
+ CLTransposeConvLayerUpsample();
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLTransposeConvLayerUpsample(const CLTransposeConvLayerUpsample &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ CLTransposeConvLayerUpsample &operator=(const CLTransposeConvLayerUpsample &) = delete;
+ /** Allow instances of this class to be moved */
+ CLTransposeConvLayerUpsample(CLTransposeConvLayerUpsample &&) = default;
+ /** Allow instances of this class to be moved */
+ CLTransposeConvLayerUpsample &operator=(CLTransposeConvLayerUpsample &&) = default;
+ /** Default destructor */
+ virtual ~CLTransposeConvLayerUpsample() = default;
+
+ /** Initialize the function's source, destination, interpolation type and border_mode.
+ *
+ * @param[in, out] input Source tensor. Data type supported: QASYMM8/F16/F32.
+ * @param[out] output Destination tensor. Data type supported: same as @p input.
+ * @param[in] inner_border The number of zeros added to right and top edges of the input.
+ * @param[in] info Contains padding and policies to be used in the deconvolution.
+ */
+ void configure(ICLTensor *input, ICLTensor *output, const BorderSize &inner_border,
+ const PadStrideInfo &info);
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * CLTransposeConvLayerUpsample
+ *
+ * @param[in] input Source tensor info. Data type supported: QASYMM8/F16/F32.
+ * @param[in] output Destination tensor info. Data type supported: same as @p input.
+ * @param[in] inner_border The number of zeros added to right and top edges of the input.
+ * @param[in] info Contains padding and policies to be used in the deconvolution.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *output,
+ const BorderSize &inner_border, const PadStrideInfo &info);
+
+ // Inherited methods overridden:
+ void run() override;
+
+private:
+ CLTransposeConvLayerUpsampleKernel _upsample;
+ ICLTensor *_output;
+};
+}
+#endif /* __ARM_COMPUTE_CLTRANSPOSECONVLAYERUPSAMPLE_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/CPP/functions/CPPUpsampleEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CPP/functions/CPPUpsampleEx.h
new file mode 100644
index 000000000..8e7e2f937
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/CPP/functions/CPPUpsampleEx.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_CPPUPSAMPLE_EX_H__
+#define __ARM_COMPUTE_CPPUPSAMPLE_EX_H__
+
+#include "arm_compute/runtime/CPP/ICPPSimpleFunction.h"
+
+#include "arm_compute/core/Types.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to run @ref CPPUpsample */
+class CPPUpsampleEx : public ICPPSimpleFunction
+{
+public:
+ /** Configure the upsample CPP kernel
+ *
+ * @param[in] input The input tensor to upsample. Data types supported: F32/F16/QASYMM8
+ * @param[out] output The output tensor. Data types supported: Same as @p input
+ * @param[in] info Padding information
+ */
+ void configure(const ITensor *input, ITensor *output, const PadStrideInfo &info);
+};
+}
+#endif /* __ARM_COMPUTE_CPPUPSAMPLE_EX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/NEFunctionsEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/NEFunctionsEx.h
new file mode 100644
index 000000000..af1adea62
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/NEFunctionsEx.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __ARM_COMPUTE_NEFUNCTIONSEX_H__
+#define __ARM_COMPUTE_NEFUNCTIONSEX_H__
+
+#include <arm_compute/runtime/NEON/functions/NEArgMinMax.h>
+#include <arm_compute/runtime/NEON/functions/NEBinaryLogicalOperation.h>
+#include <arm_compute/runtime/NEON/functions/NEElementwiseUnaryLayerEx.h>
+#include <arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEPReLU.h>
+#include <arm_compute/runtime/NEON/functions/NEReduceMeanEx.h>
+#include <arm_compute/runtime/NEON/functions/NEReduceSum.h>
+#include <arm_compute/runtime/NEON/functions/NERNNLayerEx.h>
+#include <arm_compute/runtime/NEON/functions/NETransposeConvLayer.h>
+
+#endif // __ARM_COMPUTE_NEFUNCTIONSEX_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEArgMinMax.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEArgMinMax.h
new file mode 100644
index 000000000..604cd93c4
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEArgMinMax.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEON_ARG_MIN_MAX_H__
+#define __ARM_COMPUTE_NEON_ARG_MIN_MAX_H__
+
+#include "arm_compute/runtime/IFunction.h"
+
+#include "arm_compute/core/NEON/kernels/NEFillBorderKernel.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/runtime/MemoryGroup.h"
+#include "arm_compute/runtime/NEON/functions/NEArgMinMaxLayer.h"
+#include "arm_compute/runtime/NEON/functions/NEReshapeLayer.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to perform reduce min/max operation */
+template <ReductionOperation op> class NEArgMinMaxStatic : public IFunction
+{
+public:
+ /** Constructor */
+ NEArgMinMaxStatic(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+ /** Configure kernel
+ *
+ * @note Supported tensor rank: up to 4
+ *
+ * @param[in] input Source tensor. Data type supported: QASYMM8/F16/F32
+ * @param[in] axis Reduction axis.
+ * @param[out] output Destination tensor. Data type supported: Same as @p input
+ */
+ void configure(ITensor *input, int axis, ITensor *output);
+
+ /** Static function to check if given info will lead to a valid configuration of @ref NEArgMinMax
+ *
+ * @param[in] input Source tensor. Data type supported: QASYMM8/F16/F32
+ * @param[in] axis Reduction axis.
+ * @param[in] output Destination tensor. Data type supported: Same as @p input
+ *
+ * @return A status
+ */
+ static Status validate(const ITensorInfo *input, int axis, const ITensorInfo *output);
+
+ // Inherited methods overridden:
+ void run() override;
+
+private:
+ MemoryGroup _memory_group;
+ NEArgMinMaxLayer _reduction_kernel;
+ Tensor _reduced_out;
+ NEReshapeLayer _reshape;
+};
+
+/** Basic function to run arg max. */
+using NEArgMax = NEArgMinMaxStatic<ReductionOperation::ARG_IDX_MAX>;
+/** Basic function to run arg min. */
+using NEArgMin = NEArgMinMaxStatic<ReductionOperation::ARG_IDX_MIN>;
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEON_ARG_MIN_MAX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEBinaryLogicalOperation.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEBinaryLogicalOperation.h
new file mode 100644
index 000000000..2a624656d
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEBinaryLogicalOperation.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEBINARYLOGICALOPERATION_H__
+#define __ARM_COMPUTE_NEBINARYLOGICALOPERATION_H__
+
+#include "arm_compute/core/TypesEx.h"
+#include "arm_compute/runtime/NEON/INESimpleFunction.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to run @ref NEBinaryLogicalOperationKernel.
+ *
+ * @note The tensor data type for the inputs must be QASYMM8/U8.
+ * @note The function performs a binary logical operation between two tensors.
+ */
+class NEBinaryLogicalOperation : public INESimpleFunction
+{
+public:
+ /** Initialise the kernel's inputs, output and conversion policy.
+ *
+ * @param[in, out] input1 First tensor input. Data types supported: QASYMM8/U8.
+ * @param[in, out] input2 Second tensor input. Data types supported: Same as @p input1.
+ * @param[out] output Output tensor. Data types supported: Same as @p input1.
+ * @param[in] op Binary Logical Operation to be performed.
+ */
+ void configure(ITensor *input1, ITensor *input2, ITensor *output, BinaryLogicalOperation op);
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEBinaryLogicalOperationKernel
+ *
+ * @param[in] input1 First tensor input info. Data types supported: QASYMM8/U8.
+ * @param[in] input2 Second tensor input info. Data types supported: Same as @p input1.
+ * @param[in] output Output tensor info. Data types supported: Same as @p input1.
+ * @param[in] op Binary Logical Operation to be performed.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
+ const ITensorInfo *output, BinaryLogicalOperation op);
+};
+
+/** Basic function to run @ref NEBinaryLogicalOperationKernel
+ *
+ * @note The tensor data type for the inputs must be QASYMM8/U8.
+ * @note The function performs a binary logical operation between two tensors.
+ */
+template <BinaryLogicalOperation op> class NEBinaryLogicalOperationStatic : public INESimpleFunction
+{
+public:
+ /** Initialise the kernel's inputs, output and conversion policy.
+ *
+ * @param[in, out] input1 First tensor input. Data types supported: QASYMM8/U8
+ * @param[in, out] input2 Second tensor input. Data types supported: Same as @p input1.
+ * @param[out] output Output tensor. Data types supported: Same as @p input1.
+ */
+ void configure(ITensor *input1, ITensor *input2, ITensor *output);
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEBinaryLogicalOperationKernel
+ *
+ * @param[in] input1 First tensor input info. Data types supported: QASYMM8/U8
+ * @param[in] input2 Second tensor input info. Data types supported: Same as @p input1.
+ * @param[in] output Output tensor info. Data types supported: Same as @p input1.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input1, const ITensorInfo *input2,
+ const ITensorInfo *output);
+};
+
+/** Basic function to run equal comparison. */
+using NELogicalAnd = NEBinaryLogicalOperationStatic<BinaryLogicalOperation::AND>;
+/** Basic function to run not equal comparison. */
+using NELogicalOr = NEBinaryLogicalOperationStatic<BinaryLogicalOperation::OR>;
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEBINARYLOGICALOPERATION_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEElementwiseUnaryLayerEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEElementwiseUnaryLayerEx.h
new file mode 100644
index 000000000..f0c8ecdb5
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEElementwiseUnaryLayerEx.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEELEMENTWISEUNARYLAYEREX_H__
+#define __ARM_COMPUTE_NEELEMENTWISEUNARYLAYEREX_H__
+
+#include "arm_compute/runtime/NEON/INESimpleFunction.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to perform negative on an input tensor. */
+class NENegLayer : public INESimpleFunction
+{
+public:
+ /** Initialize the function
+ *
+ * @param[in] input Input tensor. Data types supported: F16/F32/S32.
+ * @param[out] output Output tensor. Data types supported: same as @p input.
+ */
+ void configure(const ITensor *input, ITensor *output);
+ /** Static function to check if given info will lead to a valid configuration of @ref NERsqrtLayer
+ *
+ * @param[in] input First tensor input info. Data types supported: F16/F32/S32.
+ * @param[in] output Output tensor info. Data types supported: Same as @p input.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *output);
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEELEMENTWISEUNARYLAYEREX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h
new file mode 100644
index 000000000..21352ff8b
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file        NEFullyConnectedReshapingLayer.h
+ * @brief       This file contains NEFullyConnectedReshapingLayer class
+ * @ingroup     COM_AI_RUNTIME
+ */
+
+#ifndef __ARM_COMPUTE_NE_FULLY_CONNECTED_RESHAPING_LAYER_H__
+#define __ARM_COMPUTE_NE_FULLY_CONNECTED_RESHAPING_LAYER_H__
+
+#include <arm_compute/runtime/NEON/functions/NEReshapeLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEFullyConnectedLayer.h>
+#include <arm_compute/runtime/IMemoryManager.h>
+
+namespace arm_compute
+{
+/**
+ * @brief Class to run FullyConnected Layer after reshaping input tensor
+ */
+class NEFullyConnectedReshapingLayer : public arm_compute::IFunction
+{
+public:
+ NEFullyConnectedReshapingLayer(std::shared_ptr<IMemoryManager> memory_manager = nullptr)
+ : _input(nullptr), _weights(nullptr), _biases(nullptr), _output(nullptr), _neon_buffer{},
+ _neon_fc{memory_manager}, _neon_reshape{}, _needs_reshape(false)
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Configure the layer
+ * @param[in] input The source tensor
+ * @param[in] weights The tensor that is filled with weight values
+ * @param[in] biases The tensor that is filled with biase values
+ * @param[in] output The destination tensor
+ * @param[in] needs_reshape Whether it needs to be reshaped or not
+ * @param[in] reshape The tensor shape to be reshaped. Only valid when needs_reshape is true.
+ * @return N/A
+ */
+ void configure(const arm_compute::ITensor *input, const arm_compute::ITensor *weights,
+ const arm_compute::ITensor *biases, arm_compute::ITensor *output,
+ bool needs_reshape, const arm_compute::TensorShape &reshape);
+
+public:
+ /**
+ * @brief Run the operation. Must be called after configure().
+ * @return N/A
+ */
+ void run(void) override;
+
+private:
+ const arm_compute::ITensor *_input;
+ const arm_compute::ITensor *_weights;
+ const arm_compute::ITensor *_biases;
+ arm_compute::ITensor *_output;
+
+ // buffer for reshaping input tensor
+ arm_compute::Tensor _neon_buffer;
+
+private:
+ arm_compute::NEFullyConnectedLayer _neon_fc;
+ NEReshapeLayer _neon_reshape;
+ bool _needs_reshape;
+};
+} // namespace arm_compute
+
+#endif // __ARM_COMPUTE_NE_FULLY_CONNECTED_RESHAPING_LAYER_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEPReLU.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEPReLU.h
new file mode 100644
index 000000000..5664c57cb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEPReLU.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEPRELU_H__
+#define __ARM_COMPUTE_NEPRELU_H__
+
+#include "arm_compute/runtime/NEON/INESimpleFunctionNoBorder.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to run @ref NEPReLUKernel */
+class NEPReLU : public INESimpleFunctionNoBorder
+{
+public:
+ /** Initialise the kernel's inputs and output
+ *
+ * @param[in] input. Data types supported: QASYMM8/F32.
+ * @param[in] alpha. Data types supported: Same as @p input.
+ * @param[out] output Output tensor. Data types supported: Same as @p input.
+ */
+ void configure(const ITensor *input, const ITensor *alpha, ITensor *output);
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEPRELU_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NERNNLayerEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NERNNLayerEx.h
new file mode 100644
index 000000000..17c37d806
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NERNNLayerEx.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NERNNLAYER_EX_H__
+#define __ARM_COMPUTE_NERNNLAYER_EX_H__
+
+#include "arm_compute/core/NEON/kernels/NEActivationLayerKernel.h"
+#include "arm_compute/core/NEON/kernels/NEArithmeticAdditionKernel.h"
+#include "arm_compute/core/NEON/kernels/NECopyKernel.h"
+
+#include "arm_compute/core/Types.h"
+#include "arm_compute/runtime/NEON/functions/NEFullyConnectedLayer.h"
+#include "arm_compute/runtime/NEON/functions/NEGEMM.h"
+
+namespace arm_compute
+{
+// Forward declarations
+class ITensor;
+
+/** Basic function to run @ref NERNNLayerEx */
+class NERNNLayerEx : public IFunction
+{
+public:
+ /** Default constructor */
+ NERNNLayerEx(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NERNNLayerEx(const NERNNLayerEx &) = delete;
+ /** Default move constructor */
+ NERNNLayerEx(NERNNLayerEx &&) = default;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NERNNLayerEx &operator=(const NERNNLayerEx &) = delete;
+ /** Default move assignment operator */
+ NERNNLayerEx &operator=(NERNNLayerEx &&) = default;
+ /** Initialize the function
+ *
+ * @param[in] input Input is a 2-D tensor of shape [input_size, batch_size]. Data
+ * types supported: F16/F32
+ * @param[in] weights Weights tensor of shape [input_size, num_units] that
+ * multiplies the input. Data types supported: Same as @p input
+ * @param[in] recurrent_weights Weights tensor of shape [num_units, num_units] that multiplies
+ * the current 'state'. Data types supported: Same as @p input
+ * @param[in] bias Bias vector of shape [num_units]. Data types supported: Same
+ * as @p input
+ * @param[out] output Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in,out] hidden_state Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in] info Activation layer parameter.
+ */
+ void configure(const ITensor *input, const ITensor *weights, const ITensor *recurrent_weights,
+ const ITensor *bias, ITensor *hidden_state, ITensor *output,
+ ActivationLayerInfo &info);
+ /** Initialize the function
+ *
+ * @param[in] input Input is a 2-D tensor of shape [input_size, batch_size]. Data
+ * types supported: F16/F32
+ * @param[in] weights Weights tensor of shape [input_size, num_units] that multiplies
+ * the input. Data types supported: Same as @p input
+ * @param[in] recurrent_weights Weights tensor of shape [num_units, num_units] that multiplies the
+ * current 'state'. Data types supported: Same as @p input
+ * @param[in] bias Bias vector of shape [num_units]. Data types supported: Same as @p
+ * input
+ * @param[in] output Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in] hidden_state Output tensor of shape [num_units, batch_size]. Data types
+ * supported: Same as @p input
+ * @param[in] info Activation layer parameter.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *recurrent_weights, const ITensorInfo *bias,
+ const ITensorInfo *hidden_state, const ITensorInfo *output,
+ const ActivationLayerInfo &info);
+
+ // Inherited methods overridden:
+ void run() override;
+ void prepare() override;
+
+private:
+ MemoryGroup _memory_group;
+ NEGEMM _gemm_state_f;
+ NEArithmeticAdditionKernel _add_kernel;
+ NEActivationLayerKernel _activation_kernel;
+ NEFullyConnectedLayer _fully_connected_kernel;
+ NECopyKernel _copy_kernel;
+ Tensor _fully_connected_out;
+ Tensor _gemm_output;
+ Tensor _add_output;
+ bool _is_prepared;
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NERNNLAYER_EX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceMeanEx.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceMeanEx.h
new file mode 100644
index 000000000..7209acf19
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceMeanEx.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEON_REDUCE_MEAN_EX_H__
+#define __ARM_COMPUTE_NEON_REDUCE_MEAN_EX_H__
+
+#include "arm_compute/runtime/IFunction.h"
+
+#include "arm_compute/core/NEON/kernels/NEFillBorderKernel.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/runtime/MemoryGroup.h"
+#include "arm_compute/runtime/NEON/functions/NEReductionOperation.h"
+#include "arm_compute/runtime/NEON/functions/NEReshapeLayer.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to perform reduce operation */
+class NEReduceMeanEx : public IFunction
+{
+public:
+ /** Constructor */
+ NEReduceMeanEx(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+ /** Configure kernel
+ *
+ * @note Supported tensor rank: up to 4
+ *
+ * @param[in] input Source tensor. Data type supported: QASYMM8/F16/F32
+ * @param[in] reduction_axis Reduction axis vector.
+ * @param[in] keep_dims If positive, retains reduced dimensions with length 1.
+ * @param[out] output Destination tensor. Data type supported: Same as @p input
+ */
+ void configure(ITensor *input, const Coordinates &reduction_axis, bool keep_dims,
+ ITensor *output);
+
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NEReduceMeanEx
+ *
+ * @param[in] input Source tensor. Data type supported: QASYMM8/F16/F32
+ * @param[in] reduction_axis Reduction axis vector.
+ * @param[in] keep_dims If positive, retains reduced dimensions with length 1.
+ * @param[in] output Destination tensor. Data type supported: Same as @p input
+ *
+ * @return A status
+ */
+ static Status validate(const ITensorInfo *input, const Coordinates &reduction_axis,
+ bool keep_dims, const ITensorInfo *output);
+
+ // Inherited methods overridden:
+ void run() override;
+
+private:
+ MemoryGroup _memory_group;
+ std::unique_ptr<NEReductionOperation[]> _reduction_kernels{nullptr};
+ std::unique_ptr<Tensor[]> _reduced_outs{nullptr};
+ NEReshapeLayer _reshape;
+ unsigned int _reduction_ops;
+ bool _keep_dims;
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEON_REDUCE_MEAN_EX_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceSum.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceSum.h
new file mode 100644
index 000000000..c028ea658
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NEReduceSum.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NEON_REDUCE_SUM_H__
+#define __ARM_COMPUTE_NEON_REDUCE_SUM_H__
+
+#include "arm_compute/runtime/IFunction.h"
+
+#include "arm_compute/core/NEON/kernels/NEFillBorderKernel.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/runtime/MemoryGroup.h"
+#include "arm_compute/runtime/NEON/functions/NEReductionOperation.h"
+#include "arm_compute/runtime/NEON/functions/NEReshapeLayer.h"
+
+namespace arm_compute
+{
+class ITensor;
+
+/** Basic function to perform reduce operation */
+class NEReduceSum : public IFunction
+{
+public:
+ /** Constructor */
+ NEReduceSum(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+ /** Configure kernel
+ *
+ * @note Supported tensor rank: up to 4
+ *
+ * @param[in] input Source tensor. Data type supported: QASYMM8/F16/F32
+ * @param[in] reduction_axis Reduction axis vector.
+ * @param[in] keep_dims If positive, retains reduced dimensions with length 1.
+ * @param[out] output Destination tensor. Data type supported: Same as @p input
+ */
+ void configure(ITensor *input, const Coordinates &reduction_axis, bool keep_dims,
+ ITensor *output);
+
+ /** Static function to check if given info will lead to a valid configuration of @ref NEReduceSum
+ *
+ * @param[in] input Source tensor. Data type supported: QASYMM8/F16/F32
+ * @param[in] reduction_axis Reduction axis vector.
+ * @param[in] keep_dims If positive, retains reduced dimensions with length 1.
+ * @param[in] output Destination tensor. Data type supported: Same as @p input
+ *
+ * @return A status
+ */
+ static Status validate(const ITensorInfo *input, const Coordinates &reduction_axis,
+ bool keep_dims, const ITensorInfo *output);
+
+ // Inherited methods overridden:
+ void run() override;
+
+private:
+ MemoryGroup _memory_group;
+ std::vector<NEReductionOperation> _reduction_kernels;
+ std::vector<Tensor> _reduced_outs;
+ NEReshapeLayer _reshape;
+ unsigned int _reduction_ops;
+ bool _keep_dims;
+};
+} // namespace arm_compute
+#endif /* __ARM_COMPUTE_NEON_REDUCE_SUM_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NETransposeConvLayer.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NETransposeConvLayer.h
new file mode 100644
index 000000000..a50b9ea60
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/NEON/functions/NETransposeConvLayer.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef __ARM_COMPUTE_NETRANSPOSECONVLAYER_H__
+#define __ARM_COMPUTE_NETRANSPOSECONVLAYER_H__
+
+#include "arm_compute/runtime/CPP/functions/CPPUpsampleEx.h"
+#include "arm_compute/runtime/NEON/functions/NEConvolutionLayer.h"
+#include "arm_compute/runtime/NEON/functions/NEDirectConvolutionLayer.h"
+#include "arm_compute/runtime/NEON/functions/NEPermute.h"
+
+#include "arm_compute/core/CPP/kernels/CPPFlipWeightsKernel.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/runtime/IFunction.h"
+#include "arm_compute/runtime/IMemoryManager.h"
+#include "arm_compute/runtime/MemoryGroup.h"
+#include "arm_compute/runtime/Tensor.h"
+
+#include <memory>
+
+namespace arm_compute
+{
+/** Function to run the deconvolution layer.
+ *
+ * Transpose convolution Layer is the backward pass of Convolution Layer. First we transform the
+ * input depending on the stride and pad info and then perfrom a 1x1
+ * convolution pass. Input stride defines how many zeroes we should put between each element of the
+ * input, pad is the amount of padding and finaly a is a user
+ * specified value where a < stride - 1 that increases the padding top and right of the input image.
+ *
+ * The relation between input to output is as follows:
+ * \f[
+ * width\_output = (width\_input - 1) \cdot stride\_x - 2 \cdot padding\_x + kernel\_x
+ * \f]
+ * \f[
+ * height\_output = (height\_input - 1) \cdot stride\_y - 2 \cdot padding\_y + kernel\_y
+ * \f]
+ *
+ * where
+ * width is the size of the first input dimension.
+ * height is the size of the second input dimension.
+ * width_output is the size of the first output dimension.
+ * height_output is the size of the second output dimension.
+ * kernel_x and kernel_y are the convolution sizes in x and y.
+ * stride_x and stride_y is the input stride of the first and second dimension.
+ *
+ * The weights used by Transpose convolution are supposed to be the same as the ones used for
+ * Convolution. Therefore, it will be necessary to use the weights in the
+ * reverse order to perform an actual convolution. This is achieved by using the @ref
+ * CPPFlipWeightsKernel.
+ *
+ * This function calls the following NEON kernels/functions:
+ *
+ * -# @ref CPPUpsample
+ * -# @ref NEConvolutionLayer
+ *
+ */
+class NETransposeConvLayer : public IFunction
+{
+public:
+ /** Default constructor */
+ NETransposeConvLayer(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
+
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NETransposeConvLayer(const NETransposeConvLayer &) = delete;
+ /** Prevent instances of this class from being copied (As this class contains pointers) */
+ NETransposeConvLayer &operator=(const NETransposeConvLayer &) = delete;
+ /** Allow instances of this class to be moved */
+ NETransposeConvLayer(NETransposeConvLayer &&) = default;
+ /** Allow instances of this class to be moved */
+ NETransposeConvLayer &operator=(NETransposeConvLayer &&) = default;
+ /** Default destructor */
+ virtual ~NETransposeConvLayer() = default;
+
+ /** Set the input, weights, biases and output tensors.
+ *
+ * @param[in,out] input Input tensor. 3 lower dimensions represent a single input, and an
+ * optional 4th dimension for batch of inputs. Data types supported: F32/F16/QASYMM8.
+ * @param[in] weights The 4d weights with dimensions [width, height, IFM, OFM]. Data type
+ * supported: Same as @p input.
+ * @param[in] bias Optional, ignored if NULL. The biases have one dimension. Data type
+ * supported: Data types supported: S32 for QASYMM8 input, F32 for F32 input, F16 for F16 input.
+ * @param[out] output Output tensor. The output has the same number of dimensions as the @p
+ * input.
+ * @param[in] info Contains padding and policies to be used in the deconvolution, this is
+ * decribed in @ref PadStrideInfo.
+ * @param[in] invalid_right The number of zeros added to right edge of the output.
+ * @param[in] invalid_bottom The number of zeros added to top edge of the output.
+ *
+ */
+ void configure(ITensor *input, const ITensor *weights, const ITensor *bias, ITensor *output,
+ const PadStrideInfo &info, unsigned int invalid_right,
+ unsigned int invalid_bottom);
+ /** Static function to check if given info will lead to a valid configuration of @ref
+ * NETransposeConvLayer
+ *
+ * @param[in] input Input tensor info. 3 lower dimensions represent a single input, and an
+ * optional 4th dimension for batch of inputs. Data types supported: F32/F16/QASYMM8.
+ * @param[in] weights The 4d weights info with dimensions [width, height, IFM, OFM]. Data type
+ * supported: Same as @p input.
+ * @param[in] bias (Optional) The biases have one dimension. Data type supported: Data types
+ * supported: S32 for QASYMM8 input, F32 for F32 input, F16 for F16 input.
+ * @param[in] output Output tensor info. The output has the same number of dimensions as the @p
+ * input.
+ * @param[in] info Contains padding and policies to be used in the deconvolution, this is
+ * decribed in @ref PadStrideInfo.
+ * @param[in] innvalid_right The number of zeros added to right edge of the output.
+ * @param[in] invalid_bottom The number of zeros added to top edge of the output.
+ *
+ * @return a status
+ */
+ static Status validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *bias, const ITensorInfo *output,
+ const PadStrideInfo &info, unsigned int invalid_right,
+ unsigned int invalid_bottom);
+
+ // Inherited methods overridden:
+ void run() override;
+ void prepare() override;
+
+private:
+ MemoryGroup _memory_group;
+ NEConvolutionLayer _conv_f;
+ CPPUpsampleEx _upsample_f;
+ CPPFlipWeightsKernel _flip_weights;
+ NEPermute _permute_input;
+ NEPermute _permute_weights;
+ NEPermute _permute_output;
+ Tensor _scaled_output;
+ Tensor _weights_flipped;
+ Tensor _permuted_input;
+ Tensor _permuted_weights;
+ Tensor _permuted_output;
+ bool _is_nchw;
+ const ITensor *_original_weights;
+ ITensor *_input;
+ PadStrideInfo _info;
+ bool _is_prepared;
+};
+} // arm_compute
+#endif /* __ARM_COMPUTE_NETRANSPOSECONVLAYER_H__ */
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericGather.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericGather.h
new file mode 100644
index 000000000..3db0c7e5e
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericGather.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file        GenericGather.h
+ * @brief       This file contains GenericGather class
+ * @ingroup     COM_AI_RUNTIME
+ */
+
+#ifndef __ARM_COMPUTE_MISC_GENERIC_GATHER_H__
+#define __ARM_COMPUTE_MISC_GENERIC_GATHER_H__
+
+#include <arm_compute/runtime/Tensor.h>
+#include <arm_compute/runtime/CL/CLTensor.h>
+
+#include <arm_compute/runtime/CL/functions/CLPermute.h>
+#include <arm_compute/runtime/CL/functions/CLGatherEx.h>
+
+#include "Utils.h"
+
+namespace arm_compute
+{
+namespace misc
+{
+
+/**
+ * @brief Class to run Gather with both CPU and GPU
+ */
+class GenericGather : public arm_compute::IFunction
+{
+public:
+ GenericGather(void)
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Configure the layer
+ * @param[in] input The source tensor
+ * @param[in] indices The indices tensor
+ * @param[in] output The destination tensor
+ * @param[in] axis (Optional) The axis in input to gather indices from
+ * @return N/A
+ */
+ void configure(arm_compute::ITensor *input, arm_compute::ITensor *indices,
+ arm_compute::ITensor *output, int axis = 0);
+
+public:
+ /**
+ * @brief Run the operation. Must be called after configure().
+ * @return N/A
+ */
+ void run(void) override;
+
+private:
+ arm_compute::ITensor *_input{nullptr};
+ arm_compute::ITensor *_indices{nullptr};
+ arm_compute::ITensor *_output{nullptr};
+ int _axis{0};
+ arm_compute::CLTensor _cl_permuted;
+
+private:
+ arm_compute::CLPermute _cl_permute;
+ arm_compute::CLGatherEx _cl_gather;
+};
+
+} // namespace misc
+} // namespace arm_compute
+
+#endif // __ARM_COMPUTE_MISC_GENERIC_GATHER_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericReshapeLayer.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericReshapeLayer.h
new file mode 100644
index 000000000..ab2fdc71d
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/GenericReshapeLayer.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file        GenericReshapeLayer.h
+ * @brief       This file contains GenericReshapeLayer class
+ * @ingroup     COM_AI_RUNTIME
+ */
+
+#ifndef __ARM_COMPUTE_MISC_GENERIC_RESHAPE_LAYER_H__
+#define __ARM_COMPUTE_MISC_GENERIC_RESHAPE_LAYER_H__
+
+#include <arm_compute/runtime/Tensor.h>
+#include <arm_compute/runtime/CL/CLTensor.h>
+
+#include <arm_compute/runtime/CL/functions/CLPermute.h>
+#include <arm_compute/runtime/CL/functions/CLReshapeLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEPermute.h>
+#include <arm_compute/runtime/NEON/functions/NEReshapeLayer.h>
+
+#include "Utils.h"
+
+namespace arm_compute
+{
+namespace misc
+{
+
+/**
+ * @brief Class to run Reshape Layer with both CPU and GPU
+ */
+class GenericReshapeLayer : public arm_compute::IFunction
+{
+public:
+ GenericReshapeLayer(void)
+ : _input(nullptr), _output(nullptr), _cl_permuted{}, _neon_permuted{}, _cl_permute{},
+ _cl_reshape{}, _neon_permute{}, _neon_reshape{}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Configure the layer
+ * @param[in] input The source tensor
+ * @param[in] output The destination tensor
+ * @return N/A
+ */
+ void configure(const arm_compute::ITensor *input, arm_compute::ITensor *output);
+
+public:
+ /**
+ * @brief Run the operation. Must be called after configure().
+ * @return N/A
+ */
+ void run(void) override;
+
+private:
+ const arm_compute::ITensor *_input;
+ arm_compute::ITensor *_output;
+ arm_compute::CLTensor _cl_permuted;
+ arm_compute::Tensor _neon_permuted;
+
+private:
+ arm_compute::CLPermute _cl_permute;
+ arm_compute::CLReshapeLayer _cl_reshape;
+
+ arm_compute::NEPermute _neon_permute;
+ arm_compute::NEReshapeLayer _neon_reshape;
+};
+
+} // namespace misc
+} // namespace arm_compute
+
+#endif // __ARM_COMPUTE_MISC_GENERIC_RESHAPE_LAYER_H__
diff --git a/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/Utils.h b/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/Utils.h
new file mode 100644
index 000000000..53736f55f
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/arm_compute/runtime/misc/functions/Utils.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file utils.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains utils for arm compute library
+ */
+#ifndef __ARM_COMPUTE_MISC_UTILS_H__
+#define __ARM_COMPUTE_MISC_UTILS_H__
+
+#include <string>
+#include <cassert>
+#include <arm_compute/runtime/CL/CLTensor.h>
+
+#include <arm_compute/core/Coordinates.h>
+#include <arm_compute/core/TensorInfo.h>
+#include <arm_compute/core/TensorShape.h>
+#include <arm_compute/core/Types.h>
+
+// TODO : It should be extracted to independent module.
+
+namespace arm_compute
+{
+namespace misc
+{
+namespace utils
+{
+
+/**
+ * @brief Check if this runtime runs on GPU or NEON
+ * @return @c true if GPU mode, otherwise @c false
+ */
+bool isGpuMode();
+
+#ifndef CAST_CL
+#define CAST_CL(tensor) static_cast<::arm_compute::CLTensor *>(tensor)
+#endif
+
+#ifndef CAST_NE
+#define CAST_NE(tensor) static_cast<::arm_compute::Tensor *>(tensor)
+#endif
+
+/**
+* @brief Generate arm compute permutation vector from runtime permutation vector
+* @param[in] rank Rank number supported upto 4
+* @param[in] runtime_pv Integer array for runtime permutation vector
+* @return Permutation vector of arm compute
+*/
+arm_compute::PermutationVector getARMComputePermutationVector(uint32_t rank,
+ const int32_t *runtime_pv);
+
+/**
+ * @brief Set value to arm compute tensor with casting
+ * @param[in] value Value to set
+ * @param[out] to Target tensor of arm compute
+ * @param[in] id Position of element
+ * @return N/A
+ */
+template <typename FromT>
+void copyCast(const FromT value, arm_compute::ITensor *to, const arm_compute::Coordinates &id)
+{
+ switch (to->info()->data_type())
+ {
+ case arm_compute::DataType::F32:
+ {
+ *reinterpret_cast<float *>(to->ptr_to_element(id)) = static_cast<float>(value);
+ break;
+ }
+ case arm_compute::DataType::S32:
+ {
+ *reinterpret_cast<int32_t *>(to->ptr_to_element(id)) = static_cast<int32_t>(value);
+ break;
+ }
+ case arm_compute::DataType::U32:
+ {
+ *reinterpret_cast<uint32_t *>(to->ptr_to_element(id)) = static_cast<uint32_t>(value);
+ break;
+ }
+ case arm_compute::DataType::QASYMM8:
+ {
+ float realValue = static_cast<float>(value);
+ // NOTE We haven't known the policy of rounding for quantization.
+ // So this is set to a temporary value.
+ *(to->ptr_to_element(id)) =
+ to->info()->quantization_info().quantize(realValue, arm_compute::RoundingPolicy::TO_ZERO);
+ break;
+ }
+ default:
+ throw std::runtime_error("Not supported, yet");
+ break;
+ }
+}
+
+} // namespace utils
+} // namespace misc
+} // namespace arm_compute
+
+#endif // __ARM_COMPUTE_MISC_UTILS_H__
diff --git a/libs/ARMComputeEx/resolve_includes.py b/runtimes/libs/ARMComputeEx/resolve_includes.py
index b3e252892..b3e252892 100644
--- a/libs/ARMComputeEx/resolve_includes.py
+++ b/runtimes/libs/ARMComputeEx/resolve_includes.py
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp
new file mode 100644
index 000000000..808125645
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/CLKernelLibrary.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/core/CL/CLKernelLibrary.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/Error.h"
+#include "arm_compute/core/Utils.h"
+
+#include <algorithm>
+#include <fstream>
+#include <iostream>
+#include <utility>
+#include <vector>
+
+using namespace arm_compute;
+
+const std::map<std::string, std::string> CLKernelLibraryEx::_kernel_program_map = {
+ // ARMComputeEx kernels
+ {"arg_op", "arg_operation.cl"},
+ {"arithmetic_add_qasymm8", "arithmetic_op_quantized.cl"},
+ {"binary_logical_op", "binary_logical_op.cl"},
+ {"cast", "cast.cl"},
+ {"cast_qasymm_in", "cast.cl"},
+ {"cast_qasymm_out", "cast.cl"},
+ {"comparison_op", "comparison_op.cl"},
+ {"comparison_op_qasymm8", "comparison_op_quantized.cl"},
+ {"depth_to_space_nchw", "depth_to_space.cl"},
+ {"depth_to_space_nhwc", "depth_to_space.cl"},
+ {"embedding_lookup", "embedding_lookup.cl"},
+ {"gather_ex", "gather_ex.cl"},
+ {"gather_ex_1d", "gather_ex.cl"},
+ {"gather_ex_1d_out", "gather_ex.cl"},
+ {"hashtable_lookup", "hashtable_lookup.cl"},
+ {"neg_tensor", "neg_tensor.cl"},
+ {"permute_generic", "permute_ex.cl"},
+ {"pixelwise_mul_qasymm8", "pixelwise_mul_quantized.cl"},
+ {"prelu", "prelu.cl"},
+ {"prelu_qasymm8", "prelu_quantized.cl"},
+ {"reduce_min_max", "reduce_operation.cl"},
+ {"reduce_sum_mean", "reduce_operation.cl"},
+ {"topkv2_init", "topkv2.cl"},
+ {"topkv2_find_first_negative", "topkv2.cl"},
+ {"topkv2_reorder_negatives", "topkv2.cl"},
+ {"topkv2_store", "topkv2.cl"},
+ {"radixsort_histogram", "topkv2_radixsort.cl"},
+ {"radixsort_scanhistograms", "topkv2_radixsort.cl"},
+ {"radixsort_pastehistograms", "topkv2_radixsort.cl"},
+ {"radixsort_reorder", "topkv2_radixsort.cl"},
+ {"topkv2_quicksort", "topkv2_quicksort.cl"},
+ {"space_to_batch_4d_nchw", "space_to_batch.cl"},
+ {"space_to_batch_4d_nhwc", "space_to_batch.cl"},
+ {"space_to_depth_nchw", "space_to_depth.cl"},
+ {"space_to_depth_nhwc", "space_to_depth.cl"},
+};
+
+const std::map<std::string, std::string> CLKernelLibraryEx::_program_source_map = {
+#ifdef EMBEDDED_KERNELS
+ {
+ "arg_operation.cl",
+#include "./cl_kernels/arg_operation.clembed"
+ },
+ {
+ "cast.cl",
+#include "./cl_kernels/cast.clembed"
+ },
+ {
+ "embedding_lookup.cl",
+#include "./cl_kernels/embedding_lookup.clembed"
+ },
+ {
+ "depth_to_space.cl",
+#include "./cl_kernels/depth_to_space.clembed"
+ },
+ {
+ "gather_ex.cl",
+#include "./cl_kernels/gather_ex.clembed"
+ },
+ {
+ "hashtable_lookup.cl",
+#include "./cl_kernels/hashtable_lookup.clembed"
+ },
+ {
+ "helpers.h",
+#include "./cl_kernels/helpers.hembed"
+ },
+ {
+ "helpers_asymm.h",
+#include "./cl_kernels/helpers_asymm.hembed"
+ },
+ {
+ "binary_logical_op.cl",
+#include "./cl_kernels/binary_logical_op.clembed"
+ },
+ {
+ "neg_tensor.cl",
+#include "./cl_kernels/neg_tensor.clembed"
+ },
+ {
+ "prelu.cl",
+#include "./cl_kernels/prelu.clembed"
+ },
+ {
+ "prelu_quantized.cl",
+#include "./cl_kernels/prelu_quantized.clembed"
+ },
+ {
+ "reduce_operation.cl",
+#include "./cl_kernels/reduce_operation.clembed"
+ },
+ {
+ "space_to_batch.cl",
+#include "./cl_kernels/space_to_batch.clembed"
+ },
+ {
+ "space_to_depth.cl",
+#include "./cl_kernels/space_to_depth.clembed"
+ },
+ {
+ "topkv2.cl",
+#include "./cl_kernels/topkv2.clembed"
+ },
+ {
+ "topkv2_radixsort.cl",
+#include "./cl_kernels/topkv2_radixsort.clembed"
+ },
+ {
+ "topkv2_quicksort.cl",
+#include "./cl_kernels/topkv2_quicksort.clembed"
+ },
+
+#endif /* EMBEDDED_KERNELS */
+};
+
+CLKernelLibraryEx::CLKernelLibraryEx()
+ : _context(), _device(), _kernel_path("."), _programs_map(), _built_programs_map()
+{
+ opencl_is_available(); // Make sure the OpenCL symbols are initialised *before* the
+ // CLKernelLibraryEx is built
+}
+
+CLKernelLibraryEx &CLKernelLibraryEx::get()
+{
+ static CLKernelLibraryEx _kernel_library;
+ return _kernel_library;
+}
+
+Kernel CLKernelLibraryEx::create_kernel(const std::string &kernel_name,
+ const StringSet &build_options_set) const
+{
+ // Find which program contains the kernel
+ auto kernel_program_it = _kernel_program_map.find(kernel_name);
+
+ if (_kernel_program_map.end() == kernel_program_it)
+ {
+ ARM_COMPUTE_ERROR("Kernel %s not found in the CLKernelLibrary", kernel_name.c_str());
+ }
+ std::string concat_str;
+
+ if (fp16_supported())
+ {
+ concat_str += " -DARM_COMPUTE_OPENCL_FP16_ENABLED=1 ";
+ }
+
+ if (get_cl_version(_device) == CLVersion::CL20)
+ {
+ concat_str += " -cl-std=CL2.0 ";
+ }
+ else if (arm_non_uniform_workgroup_supported(_device))
+ {
+ concat_str += " -cl-arm-non-uniform-work-group-size ";
+ }
+ else
+ {
+ ARM_COMPUTE_ERROR("Non uniform workgroup size is not supported!!");
+ }
+
+ // Check if the program has been built before with same build options.
+ const std::string program_name = kernel_program_it->second;
+ const std::string build_options = stringify_set(build_options_set) + concat_str;
+
+ const std::string built_program_name = program_name + "_" + build_options;
+ auto built_program_it = _built_programs_map.find(built_program_name);
+
+ cl::Program cl_program;
+
+ if (_built_programs_map.end() != built_program_it)
+ {
+ // If program has been built, retrieve to create kernel from it
+ cl_program = built_program_it->second;
+ }
+ else
+ {
+ // Get program
+ Program program = load_program(program_name);
+
+ // Build program
+ cl_program = program.build(build_options);
+
+ // Add built program to internal map
+ _built_programs_map.emplace(built_program_name, cl_program);
+ }
+
+ // Create and return kernel
+ return Kernel(kernel_name, cl_program);
+}
+
+void CLKernelLibraryEx::add_built_program(const std::string &built_program_name,
+ cl::Program program)
+{
+ _built_programs_map.emplace(built_program_name, program);
+}
+
+bool CLKernelLibraryEx::fp16_supported() const { return ::fp16_supported(_device); }
+
+bool CLKernelLibraryEx::int64_base_atomics_supported() const
+{
+ return device_supports_extension(_device, "cl_khr_int64_base_atomics");
+}
+
+const Program &CLKernelLibraryEx::load_program(const std::string &program_name) const
+{
+ const auto program_it = _programs_map.find(program_name);
+
+ if (program_it != _programs_map.end())
+ {
+ return program_it->second;
+ }
+
+ Program program;
+
+#ifdef EMBEDDED_KERNELS
+ const auto program_source_it = _program_source_map.find(program_name);
+
+ if (_program_source_map.end() == program_source_it)
+ {
+ ARM_COMPUTE_ERROR("Embedded program for %s does not exist.", program_name.c_str());
+ }
+
+ program = Program(_context, program_name, program_source_it->second);
+#else /* EMBEDDED_KERNELS */
+ // Check for binary
+ std::string source_name = _kernel_path + program_name;
+ std::string binary_name = source_name + "bin";
+
+ if (std::ifstream(binary_name).is_open())
+ {
+ const std::string program_binary = read_file(binary_name, true);
+ program = Program(_context, _device, program_name,
+ std::vector<unsigned char>(program_binary.begin(), program_binary.end()));
+ }
+ else if (std::ifstream(source_name).is_open())
+ {
+ program = Program(_context, program_name, read_file(source_name, false));
+ }
+ else
+ {
+ ARM_COMPUTE_ERROR("Kernel file %s does not exist.", source_name.c_str());
+ }
+#endif /* EMBEDDED_KERNELS */
+
+ // Insert program to program map
+ const auto new_program = _programs_map.emplace(program_name, std::move(program));
+
+ return new_program.first->second;
+}
+
+std::string CLKernelLibraryEx::stringify_set(const StringSet &s) const
+{
+ std::string concat_set;
+
+#ifndef EMBEDDED_KERNELS
+ concat_set += "-I" + _kernel_path + " ";
+#endif /* EMBEDDED_KERNELS */
+
+ // Concatenate set
+ for (const auto &el : s)
+ {
+ concat_set += " " + el;
+ }
+
+ return concat_set;
+}
+
+std::string CLKernelLibraryEx::get_program_source(const std::string &program_name)
+{
+ const auto program_source_it = _program_source_map.find(program_name);
+
+ if (program_source_it == _program_source_map.end())
+ {
+ ARM_COMPUTE_ERROR("Embedded program for %s does not exist.", program_name.c_str());
+ }
+
+ return program_source_it->second;
+}
+
+size_t CLKernelLibraryEx::max_local_workgroup_size(const cl::Kernel &kernel) const
+{
+ size_t result;
+
+ size_t err = kernel.getWorkGroupInfo(_device, CL_KERNEL_WORK_GROUP_SIZE, &result);
+ ARM_COMPUTE_ERROR_ON_MSG(
+ err != 0,
+ "clGetKernelWorkGroupInfo failed to return the maximum workgroup size for the kernel");
+ ARM_COMPUTE_UNUSED(err);
+
+ return result;
+}
+
+cl::NDRange CLKernelLibraryEx::default_ndrange() const
+{
+ // GPUTarget _target = get_target_from_device(_device);
+ cl::Device device = cl::Device::getDefault();
+ GPUTarget _target = get_target_from_device(device);
+ cl::NDRange default_range;
+
+ switch (_target)
+ {
+ case GPUTarget::MIDGARD:
+ case GPUTarget::T600:
+ case GPUTarget::T700:
+ case GPUTarget::T800:
+ default_range = cl::NDRange(128u, 1);
+ break;
+ default:
+ default_range = cl::NullRange;
+ }
+
+ return default_range;
+}
+
+std::string CLKernelLibraryEx::get_device_version() { return _device.getInfo<CL_DEVICE_VERSION>(); }
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl
new file mode 100644
index 000000000..2a6dfc91f
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arg_operation.cl
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
+/** Perform arg_max/arg_min
+ *
+ * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type.
+ * e.g. -DDATA_TYPE=short
+ * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size.
+ * e.g. -DDEPTH_OUT=16
+ * @attention Operation type(code) specifying which operation to perform should be passed as
+ * preprocessor argument using -DOP_CODE = number. e.g. -DOP_CODE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types:
+ * U8/QASYMM8/S8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension
+ * (in bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension
+ * (in bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element
+ * in the source image
+ * @param[in] input_stride_w Stride of the source tensor in W dimension
+ * (in bytes)
+ * @param[in] input_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[out] output_ptr Pointer to the destination image.
+ * Supported data types: U32
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension
+ * (in bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ * @param[in] axis Axis through which reduction occurs
+ * @param[in] dim Dimension across the axis to be reduced.
+ */
+
+__kernel void arg_op(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output), const int axis,
+ const int dim)
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
+
+ int indices[4] = {
+ get_global_id(0), get_global_id(1), get_global_id(2) % DEPTH_OUT,
+ get_global_id(2) / DEPTH_OUT,
+ };
+
+ DATA_TYPE value =
+ *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3]));
+ DATA_TYPE tval = value;
+ int idx = 0;
+ for (int i = 1; i < dim; ++i)
+ {
+ indices[axis] = i;
+
+#if OP_CODE == 1 // ArgMax
+ value = max(value, *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1],
+ indices[2], indices[3])));
+#elif OP_CODE == 2 // ArgMin
+ value = min(value, *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1],
+ indices[2], indices[3])));
+#else
+ return;
+
+#endif
+
+ if (tval != value)
+ {
+ idx = indices[axis];
+ tval = value;
+ }
+ }
+
+ *((__global uint *)out.ptr) = idx;
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl
new file mode 100644
index 000000000..77e239f55
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/arithmetic_op_quantized.cl
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016, 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers_asymm.h"
+
+#ifdef SATURATE
+#define ADD(x, y) add_sat((x), (y))
+#define SUB(x, y) sub_sat((x), (y))
+#else /* SATURATE */
+#define ADD(x, y) (x) + (y)
+#define SUB(x, y) (x) - (y)
+#endif /* SATURATE */
+
+/** Performs a pixelwise addition used to quantize down the int32 accumulator values of GEMMLowp to
+ * QASYMM8
+ *
+ * The following computations will be performed:
+ *
+ * -# Add offset terms to inputs
+ -# Get scaled value of two inputs
+ * -# Add inputs
+ * -# Add offset terms to final result
+ * -# Multiply each entry of result by result_mult_int
+ * -# Shift the int32 accumulator by result_shift
+ * -# Clamp the resulting int32 values to the [0..255] range and cast to QASYMM8.
+ *
+ * @attention The inputs and output data types need to be passed at compile time using
+ * -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
+ * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=uchar
+ * @attention The number of bits to shift left of input tensors must be passed at compile time using
+ * -DLEFT_SHIFT
+ * @attention The offset, scalar scale factor and number of bits to shift right of input tensors
+ * must be passed at compile time using -DIN1_OFFSET, -RIN1_MULT_INT, -DIN1_SHIFT,
+ -DIN2_OFFSET,
+ * -RIN2_MULT_INT and -DIN2_SHIFT
+ * @attention The offset, scalar scale factor and number of bits to shift right of output tensor
+ * must be passed at compile time using -DRESULT_OFFSET, -RESULT_MULT_INT and
+ -DRESULT_SHIFT
+ *
+ * @attention The input and output data_types need to be passed at compile time using
+ * -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
+ * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=uchar
+ * @attention The inputs and output scale information of qasymm8 need to be passed at compile time
+ * using -DSCALE_IN1, -DSCALE_IN2 and -DSCALE_OUT:
+ * e.g. -DSCALE_IN1=1.f -DSCALE_IN2=1.f -DSCALE_OUT=2.f
+ * @attention The inputs and output scale offset need to be passed at compile time using
+ * -DOFFSET_IN1, -DOFFSET_IN2 and -DOFFSET_OUT:
+ * e.g. -DOFFSET_IN1=0 -DOFFSET_IN2=0 -DOFFSET_OUT=0
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ * @attention To perform saturating operation -DSATURATE has to be passed to the compiler otherwise
+ * wrapping policy will be used.
+ *
+ * @param[in] in1_ptr Pointer to the source tensor.
+ * Supported data types: QASYMM8
+ * @param[in] in1_stride_x Stride of the source tensor in X dimension
+ * (in bytes)
+ * @param[in] in1_step_x in1_stride_x * number of elements along X processed
+ * per workitem(in bytes)
+ * @param[in] in1_stride_y Stride of the source tensor in Y dimension
+ * (in bytes)
+ * @param[in] in1_step_y in1_stride_y * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] in1_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] in1_step_z in1_stride_z * number of elements along Z processed
+ * per workitem(in bytes)
+ * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source
+ * tensor
+ * @param[in] in2_ptr Pointer to the source tensor. Supported data types:
+ * QASYMM8
+ * @param[in] in2_stride_x Stride of the source tensor in X dimension
+ * (in bytes)
+ * @param[in] in2_step_x in2_stride_x * number of elements along X processed
+ * per workitem(in bytes)
+ * @param[in] in2_stride_y Stride of the source tensor in Y dimension
+ * (in bytes)
+ * @param[in] in2_step_y in2_stride_y * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] in2_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] in2_step_z in2_stride_z * number of elements along Z processed
+ * per workitem(in bytes)
+ * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source
+ * tensor
+ * @param[out] out_ptr Pointer to the destination tensor.
+ * Supported data types: QASYMM8
+ * @param[in] out_stride_x Stride of the destination tensor in X dimension
+ * (in bytes)
+ * @param[in] out_step_x out_stride_x * number of elements along X processed
+ * per workitem(in bytes)
+ * @param[in] out_stride_y Stride of the destination tensor in Y dimension
+ * (in bytes)
+ * @param[in] out_step_y out_stride_y * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] out_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] out_step_z out_stride_z * number of elements along Z processed
+ * per workitem(in bytes)
+ * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination
+ * tensor
+ */
+__kernel void arithmetic_add_qasymm8(TENSOR3D_DECLARATION(in1), TENSOR3D_DECLARATION(in2),
+ TENSOR3D_DECLARATION(out))
+{
+ // Get pixels pointer
+ Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
+ Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
+ Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
+
+ // Load data
+ VEC_DATA_TYPE(int, 16)
+ in1_data = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(int, 16));
+ VEC_DATA_TYPE(int, 16)
+ in2_data = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(int, 16));
+
+ // Get scaled value of two inputs
+ VEC_DATA_TYPE(int, 16) in1_val = in1_data + (VEC_DATA_TYPE(int, 16))(IN1_OFFSET);
+ VEC_DATA_TYPE(int, 16) in2_val = in2_data + (VEC_DATA_TYPE(int, 16))(IN2_OFFSET);
+
+ VEC_DATA_TYPE(int, 16)
+ left_shift = (VEC_DATA_TYPE(int, 16))1 << (VEC_DATA_TYPE(int, 16))(LEFT_SHIFT);
+ VEC_DATA_TYPE(int, 16) shifted_in1_val = in1_val * left_shift;
+ VEC_DATA_TYPE(int, 16) shifted_in2_val = in2_val * left_shift;
+
+ VEC_DATA_TYPE(int, 16)
+ scaled_in1_val =
+ ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(shifted_in1_val, IN1_MULT_INT, IN1_SHIFT, 16);
+ VEC_DATA_TYPE(int, 16)
+ scaled_in2_val =
+ ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(shifted_in2_val, IN2_MULT_INT, IN2_SHIFT, 16);
+
+ // Add inputs and multiply with a multiplier smaller than 1
+ VEC_DATA_TYPE(int, 16) sum_val = scaled_in1_val + scaled_in2_val;
+ VEC_DATA_TYPE(int, 16)
+ out_val =
+ ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(sum_val, RESULT_MULT_INT, RESULT_SHIFT, 16);
+ out_val += (VEC_DATA_TYPE(int, 16))(RESULT_OFFSET);
+
+ VEC_DATA_TYPE(uchar, 16) res = CONVERT(out_val, VEC_DATA_TYPE(uchar, 16));
+
+ // TODO: Apply min-max BOUND to support fuse with relu.
+ /*
+ #if defined(MIN_BOUND)
+ res = max(res, (uchar16)MIN_BOUND);
+ #endif // defined(MIN_BOUND)
+ #if defined(MAX_BOUND)
+ res = min(res, (uchar16)MAX_BOUND);
+ #endif // defined(MAX_BOUND)
+ */
+
+ // Store result
+ VSTORE(16)(CONVERT(res, VEC_DATA_TYPE(DATA_TYPE_OUT, 16)), 0, (__global DATA_TYPE_OUT *)out.ptr);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl
new file mode 100644
index 000000000..8c875516d
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/binary_logical_op.cl
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#ifndef VEC_SIZE
+#define VEC_SIZE 1
+#endif
+
+#if defined(OP_CODE) && defined(DATA_TYPE)
+/** returns truth value of the two input tensors for BINARY LOGICAL OP.
+ * where BINARY LOGICAL OP can be AND, OR.
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=uchar
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size.
+ * e.g. -DVEC_SIZE=16
+ * @attention Operation type(code) specifying which operation to perform should be passed as
+ * preprocessor argument using -DOP_CODE = number. e.g. -DOP_CODE=1
+ *
+ * @param[in] input1_ptr Pointer to the source tensor.
+ * Supported data types: QASYMM8
+ * @param[in] input1_stride_x Stride of the source tensor in X dimension
+ * (in bytes)
+ * @param[in] input1_step_x input1_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input1_stride_y Stride of the source tensor in Y dimension
+ * (in bytes)
+ * @param[in] input1_step_y input1_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input1_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] input1_step_z input1_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source
+ * tensor
+ * @param[in] input2_ptr Pointer to the source tensor.
+ * Supported data types: QASYMM8
+ * @param[in] input2_stride_x Stride of the source tensor in X dimension
+ * (in bytes)
+ * @param[in] input2_step_x input2_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input2_stride_y Stride of the source tensor in Y dimension
+ * (in bytes)
+ * @param[in] input2_step_y input2_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input2_stride_z Stride of the source tensor in Z dimension
+ * (in bytes)
+ * @param[in] input2_step_z input2_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input2_offset_first_element_in_bytes The offset of the first element in the source
+ * tensor
+ * @param[out] output_ptr Pointer to the destination tensor.
+ * Supported data types: QASYMM8
+ * @param[in] output_stride_x Stride of the destination tensor in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination tensor in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the destination tensor in Z dimension
+ * (in bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ */
+__kernel void binary_logical_op(TENSOR3D_DECLARATION(input1), TENSOR3D_DECLARATION(input2),
+ TENSOR3D_DECLARATION(output))
+{
+ Tensor3D input1 = CONVERT_TO_TENSOR3D_STRUCT(input1);
+ Tensor3D input2 = CONVERT_TO_TENSOR3D_STRUCT(input2);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+#if OP_CODE == 1 // LOGICAL AND
+ VSTORE(VEC_SIZE)
+ (CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input1.ptr) &&
+ VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input2.ptr),
+ VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)),
+ 0, (__global DATA_TYPE *)output.ptr);
+
+#elif OP_CODE == 2 // LOGICAL OR
+ VSTORE(VEC_SIZE)
+ (CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input1.ptr) ||
+ VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input2.ptr),
+ VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)),
+ 0, (__global DATA_TYPE *)output.ptr);
+
+#else // OP NOT SUPPORTED
+ return
+
+#endif
+}
+#endif // if defined(OP_CODE) && defined(DATA_TYPE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl
new file mode 100644
index 000000000..d5a07476a
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/cast.cl
@@ -0,0 +1,198 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#ifndef SCALE
+#define SCALE 1.0f
+#endif
+#ifndef OFFSET
+#define OFFSET 0
+#endif
+#ifndef VEC_SIZE
+#define VEC_SIZE 1
+#endif
+
+#if defined(DATA_TYPE_IN) && defined(DATA_TYPE_OUT)
+/** Perform a cast operation on an input tensor.
+ *
+ * @attention Data types of both input and output can be passed using the -DDATA_TYPE_IN and
+ * -DDATA_TYPE_OUT compile flag, e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT=int
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: F16/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void cast(TENSOR3D_DECLARATION(input), TENSOR3D_DECLARATION(output))
+{
+ Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+ VSTORE(VEC_SIZE)
+ (CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input.ptr),
+ VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)),
+ 0, (__global DATA_TYPE_OUT *)output.ptr);
+}
+
+/** Perform a cast operation on an QASYMM8 input tensor.
+ * @attention Data types of both input and output can be passed using the -DDATA_TYPE_IN and
+ * -DDATA_TYPE_OUT compile flag, e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT=int
+ * @attention Offset and Scale of input should be given as a preprocessor argument using
+ * -DOFFSET=int, -DSCALE=float. e.g. -DOFFSET=1, -DSCALE=0.5
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: F16/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void cast_qasymm_in(TENSOR3D_DECLARATION(input), TENSOR3D_DECLARATION(output))
+{
+ Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+ VEC_DATA_TYPE(DATA_TYPE_IN, VEC_SIZE)
+ in_data = VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input.ptr);
+ VEC_DATA_TYPE(int, VEC_SIZE) offset = (VEC_DATA_TYPE(int, VEC_SIZE))(OFFSET);
+ VEC_DATA_TYPE(float, VEC_SIZE) scale = (VEC_DATA_TYPE(float, VEC_SIZE))(SCALE);
+
+ VEC_DATA_TYPE(int, VEC_SIZE) tmp = CONVERT(in_data, VEC_DATA_TYPE(int, VEC_SIZE)) - offset;
+ VEC_DATA_TYPE(float, VEC_SIZE) out_data = CONVERT(tmp, VEC_DATA_TYPE(float, VEC_SIZE)) * scale;
+
+ VSTORE(VEC_SIZE)
+ (CONVERT(out_data, VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)), 0,
+ (__global DATA_TYPE_OUT *)output.ptr);
+}
+
+/** Perform a cast operation on an QASYMM8 output tensor.
+ * @attention Data types of both input and output can be passed using the -DDATA_TYPE_IN and
+ * -DDATA_TYPE_OUT compile flag, e.g. -DDATA_TYPE_IN=float, -DDATA_TYPE_OUT=int
+ * @attention Offset and Scale of output should be given as a preprocessor argument using
+ * -DOFFSET=int, -DSCALE=float. e.g. -DOFFSET=1, -DSCALE=0.5
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: F16/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: U8
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void cast_qasymm_out(TENSOR3D_DECLARATION(input), TENSOR3D_DECLARATION(output))
+{
+ Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+ VEC_DATA_TYPE(DATA_TYPE_IN, VEC_SIZE)
+ in_data = VLOAD(VEC_SIZE)(0, (__global DATA_TYPE_IN *)input.ptr);
+ VEC_DATA_TYPE(int, VEC_SIZE) offset = (VEC_DATA_TYPE(int, VEC_SIZE))(OFFSET);
+ VEC_DATA_TYPE(float, VEC_SIZE) scale = (VEC_DATA_TYPE(float, VEC_SIZE))(SCALE);
+
+ VEC_DATA_TYPE(float, VEC_SIZE) tmp = CONVERT(in_data, VEC_DATA_TYPE(float, VEC_SIZE)) / scale;
+ VEC_DATA_TYPE(float, VEC_SIZE) out_data = tmp + CONVERT(offset, VEC_DATA_TYPE(float, VEC_SIZE));
+
+ VSTORE(VEC_SIZE)
+ (CONVERT(out_data, VEC_DATA_TYPE(DATA_TYPE_OUT, VEC_SIZE)), 0,
+ (__global DATA_TYPE_OUT *)output.ptr);
+}
+#endif // defined(DATA_TYPE_IN) && defined(DATA_TYPE_OUT)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl
new file mode 100644
index 000000000..e005322f7
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/depth_to_space.cl
@@ -0,0 +1,161 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016, 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE) && defined(Z_OUT)
+/** Perform space to depth rearrangement of tensor
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Input tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size.
+ * e.g. -DDEPTH_OUT=16
+ * @attention The value of the z-axis of output tensor should be given as a preprocessor argument
+ * using -DZ_OUT=size. e.g. -DZ_OUT=16
+ * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE=size. e.g.
+ * -DBLOCK_SIZE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void depth_to_space_nchw(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output))
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, Z_OUT);
+
+ int out_index[4] = {0};
+ int in_index[4] = {0};
+
+ out_index[0] = get_global_id(0); // W
+ out_index[1] = get_global_id(1); // H
+ out_index[2] = get_global_id(2) % Z_OUT; // C
+ out_index[3] = get_global_id(2) / Z_OUT; // B
+
+ in_index[0] = out_index[0] / BLOCK_SIZE;
+ in_index[1] = out_index[1] / BLOCK_SIZE;
+ in_index[2] = out_index[2] +
+ ((out_index[1] % BLOCK_SIZE) * BLOCK_SIZE + out_index[0] % BLOCK_SIZE) * DEPTH_OUT;
+ in_index[3] = out_index[3];
+
+ *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)tensor4D_offset(
+ &in, in_index[0], in_index[1], in_index[2], in_index[3]));
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE) && defined(Z_OUT)
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE) && defined(Z_OUT)
+/** Perform space to depth rearrangement of tensor (NHWC)
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size.
+ * e.g. -DDEPTH_OUT=16
+ * @attention The value of the z-axis of output tensor should be given as a preprocessor argument
+ * using -DZ_OUT=size. e.g. -DZ_OUT=16
+ * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE=size. e.g.
+ * -DBLOCK_SIZE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void depth_to_space_nhwc(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output))
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, Z_OUT);
+
+ int out_index[4] = {0};
+ int in_index[4] = {0};
+
+ out_index[0] = get_global_id(0); // C
+ out_index[1] = get_global_id(1); // W
+ out_index[2] = get_global_id(2) % Z_OUT; // H
+ out_index[3] = get_global_id(2) / Z_OUT; // B
+
+ in_index[0] = out_index[0] +
+ ((out_index[2] % BLOCK_SIZE) * BLOCK_SIZE + out_index[1] % BLOCK_SIZE) * DEPTH_OUT;
+ in_index[1] = out_index[1] / BLOCK_SIZE;
+ in_index[2] = out_index[2] / BLOCK_SIZE;
+ in_index[3] = out_index[3];
+
+ *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)tensor4D_offset(
+ &in, in_index[0], in_index[1], in_index[2], in_index[3]));
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BLOCK_SIZE) && defined(Z_OUT)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl
new file mode 100644
index 000000000..dd8cb6d93
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/embedding_lookup.cl
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#ifndef VEC_SIZE
+#define VEC_SIZE 1
+#endif
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
+/** Perform embedding_lookup of input tensor
+ *
+ * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g.
+ * -DDATA_TYPE=short
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ * @attention Output tensor depth should be given as a preprocessor argument using
+ * -DDEPTH_OUT=depth. e.g. -DDEPTH_OUT=16
+ * @attention Number of input dimensions are passed as a preprocessor argument using
+ * -DNUM_DIMS=size, e.g. -DNUM_DIMS=4
+ *
+ * @param[in] input_ptr Pointer to the source tensor. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source tensor in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source tensor in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * tensor
+ * @param[in] input_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] input_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[out] output_ptr Pointer to the destination tensor. Supported
+ * data types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination tensor in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination tensor in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[in] lookups_ptr Pointer to the lookups vector. Supported data
+ * types: S32
+ * @param[in] lookups_stride_x Stride of the lookups vector in X dimension (in
+ * bytes)
+ * @param[in] lookups_step_x lookups_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] lookups_offset_first_element_in_bytes The offset of the first element in the lookups
+ * vector
+ */
+
+__kernel void embedding_lookup(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output),
+ VECTOR_DECLARATION(lookups))
+{
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, DEPTH_OUT);
+
+ Vector lups = CONVERT_TO_VECTOR_STRUCT_NO_STEP(lookups);
+
+ // lookup ids for based on the tensor dimensions
+ int lup_id[4] = {0};
+
+ lup_id[0] = (NUM_DIMS == 1) ? *((__global int *)vector_offset(&lups, get_global_id(0)))
+ : get_global_id(0);
+ lup_id[1] = (NUM_DIMS == 2) ? *((__global int *)vector_offset(&lups, get_global_id(1)))
+ : get_global_id(1);
+ lup_id[2] = (NUM_DIMS == 3) ? *((__global int *)vector_offset(&lups, get_global_id(2)))
+ : get_global_id(2) % DEPTH_OUT;
+ lup_id[3] = (NUM_DIMS == 4)
+ ? *((__global int *)vector_offset(&lups, get_global_id(2) / DEPTH_OUT))
+ : get_global_id(2) / DEPTH_OUT;
+
+ in.ptr += input_offset_first_element_in_bytes + lup_id[0] * input_step_x +
+ lup_id[1] * input_step_y + lup_id[2] * input_step_z + lup_id[3] * input_step_w;
+
+ VSTORE(VEC_SIZE)
+ (CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)in.ptr), VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)), 0,
+ (__global DATA_TYPE *)out.ptr);
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/gather_ex.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/gather_ex.cl
new file mode 100644
index 000000000..09f776156
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/gather_ex.cl
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#if defined(DATA_TYPE) && defined(AXIS) && defined(INDICES_DIM)
+
+/** Performs the Gather operation along the chosen axis
+ * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g.
+ * -DDATA_TYPE=short
+ * @note Axis should be given as a preprocessor argument using -DAXIS=axis. e.g. -DAXIS=1
+ * @attention Output tensor depth should be given as a preprocessor argument using
+ * -DOUTPUT_DIM_Z=size. e.g. -DOUTPUT_DIM_Z=16
+ * @attention Input tensor depth should be given as a preprocessor argument using
+ * -DINPUT_DIM_Z=size. e.g. -DINPUT_DIM_Z=16
+ *
+ * @param[in] input_ptr Pointer to the source tensor. Supported data
+ * types: U8/S8/U16/S16/U32/S32/F16/F32
+ * @param[in] input_stride_x Stride of the source tensor in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per work item (in bytes)
+ * @param[in] input_stride_y Stride of the source tensor in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per work item (in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Y dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per work item (in bytes)
+ * @param[in] input_stride_w Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_w input_stride_w * number of elements along W
+ * processed per work item (in bytes)
+ * @param[in] input_offset_first_element_in_bytes Offset of the first element in the source
+ * tensor
+ * @param[in] indices_ptr Pointer to the source tensor. Supported data
+ * types: S32
+ * @param[in] indices_stride_x Stride of the source tensor in X dimension (in
+ * bytes)
+ * @param[in] indices_step_x indices_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] indices_stride_y Stride of the source tensor in Y dimension (in
+ * bytes)
+ * @param[in] indices_step_y indices_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] indices_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] indices_step_z indices_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] indices_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[out] output_ptr Pointer to the destination tensor. Supported
+ * data types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination tensor in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per work item (in bytes)
+ * @param[in] output_stride_y Stride of the destination tensor in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per work item (in bytes)
+ * @param[in] output_stride_z Stride of the destination tensor in Z dimension
+ * (in bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per work item (in bytes)
+ * @param[in] output_stride_w Stride of the destination tensor in W dimension
+ * (in bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per work item (in bytes)
+ * @param[in] output_offset_first_element_in_bytes Offset of the first element in the destination
+ * tensor
+ */
+__kernel void gather_ex(TENSOR4D_DECLARATION(input), TENSOR3D_DECLARATION(indices),
+ TENSOR4D_DECLARATION(output))
+{
+ const int px = get_global_id(0);
+ const int py = get_global_id(1);
+ const int pz = get_global_id(2) % OUTPUT_DIM_Z;
+ const int pw = get_global_id(2) / OUTPUT_DIM_Z;
+
+ const Tensor4D input = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, INPUT_DIM_Z);
+ const Tensor3D indices = CONVERT_TO_TENSOR3D_STRUCT_NO_STEP(indices);
+ Tensor4D output = CONVERT_TO_TENSOR4D_STRUCT(output, OUTPUT_DIM_Z);
+
+#if AXIS == 0
+#if INDICES_DIM == 1
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, px, 0, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, index, py, pz, pw);
+#elif INDICES_DIM == 2
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, px, py, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, index, pz, pw, 0);
+#elif INDICES_DIM == 3
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, px, py, pz);
+ __global const uchar *input_addr = tensor4D_offset(&input, index, pw, 0, 0);
+#endif
+#elif AXIS == 1
+#if INDICES_DIM == 1
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, py, 0, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, px, index, pz, pw);
+#elif INDICES_DIM == 2
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, py, pz, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, px, index, pw, 0);
+#elif INDICES_DIM == 3
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, py, pz, pw);
+ __global const uchar *input_addr = tensor4D_offset(&input, px, index, 0, 0);
+#endif
+#elif AXIS == 2
+#if INDICES_DIM == 1
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, pz, 0, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, px, py, index, pw);
+#elif INDICES_DIM == 2
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, pz, pw, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, px, py, index, 0);
+#endif
+#elif AXIS == 3
+#if INDICES_DIM == 1
+ const uint index = *(__global const uint *)tensor3D_offset(&indices, pw, 0, 0);
+ __global const uchar *input_addr = tensor4D_offset(&input, px, py, pz, index);
+#endif
+#endif // AXIS
+
+ *(__global DATA_TYPE *)output.ptr = *((__global const DATA_TYPE *)input_addr);
+}
+
+#endif // defined(DATA_TYPE) && defined(AXIS) && defined(INDICES_DIM)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl
new file mode 100644
index 000000000..73f29e3e5
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/hashtable_lookup.cl
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#ifndef VEC_SIZE
+#define VEC_SIZE 1
+#endif
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
+/** Perform hashtable_lookup of input tensor
+ *
+ * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g.
+ * -DDATA_TYPE=short
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ * @attention Output tensor depth should be given as a preprocessor argument using
+ * -DDEPTH_OUT=depth. e.g. -DDEPTH_OUT=16
+ * @attention Number of input dimensions are passed as a preprocessor argument using
+ * -DNUM_DIMS=size, e.g. -DNUM_DIMS=4
+ *
+ * @param[in] input_ptr Pointer to the source tensor. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source tensor in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source tensor in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * tensor
+ * @param[in] input_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] input_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[out] output_ptr Pointer to the destination tensor. Supported
+ * data types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination tensor in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination tensor in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[in] lookups_ptr Pointer to the lookups vector. Supported data
+ * types: S32
+ * @param[in] lookups_stride_x Stride of the lookups vector in X dimension (in
+ * bytes)
+ * @param[in] lookups_step_x lookups_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] lookups_offset_first_element_in_bytes The offset of the first element in the lookups
+ * vector
+ */
+__kernel void hashtable_lookup(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output),
+ VECTOR_DECLARATION(lookups))
+{
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(input, DEPTH_OUT);
+
+ Vector lups = CONVERT_TO_VECTOR_STRUCT_NO_STEP(lookups);
+
+ int lup_id[4] = {0};
+
+ lup_id[0] = (NUM_DIMS == 1) ? *((__global int *)vector_offset(&lups, get_global_id(0)))
+ : get_global_id(0);
+ lup_id[1] = (NUM_DIMS == 2) ? *((__global int *)vector_offset(&lups, get_global_id(1)))
+ : get_global_id(1);
+ lup_id[2] = (NUM_DIMS == 3) ? *((__global int *)vector_offset(&lups, get_global_id(2)))
+ : get_global_id(2) % DEPTH_OUT;
+ lup_id[3] = (NUM_DIMS == 4)
+ ? *((__global int *)vector_offset(&lups, get_global_id(2) / DEPTH_OUT))
+ : get_global_id(2) / DEPTH_OUT;
+
+ if (lup_id[NUM_DIMS - 1] < 0)
+ {
+ VSTORE(VEC_SIZE)((VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE))0, 0, (__global DATA_TYPE *)out.ptr);
+ return;
+ }
+
+ in.ptr += input_offset_first_element_in_bytes + lup_id[0] * input_step_x +
+ lup_id[1] * input_step_y + lup_id[2] * input_step_z + lup_id[3] * input_step_w;
+
+ VSTORE(VEC_SIZE)
+ (CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)in.ptr), VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)), 0,
+ (__global DATA_TYPE *)out.ptr);
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(NUM_DIMS)
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers.h b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers.h
index 0e123ae0a..0e123ae0a 100644
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers.h
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers.h
diff --git a/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers_asymm.h b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers_asymm.h
index c39138caa..c39138caa 100644
--- a/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers_asymm.h
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/helpers_asymm.h
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl
new file mode 100644
index 000000000..4aa7883c3
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/neg_tensor.cl
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#ifndef VEC_SIZE
+#define VEC_SIZE 1
+#endif
+
+#if defined(DATA_TYPE)
+/** Performs a negation of input tensor.
+ *
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ *
+ * @param[in] in_ptr Pointer to the source image. Supported data types:
+ * S16/S32/F16/F32.
+ * @param[in] in_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] in_step_x in_stride_x * number of elements along X processed
+ * per work item (in bytes)
+ * @param[in] in_offset_first_element_in_bytes Offset of the first element in the source image
+ * @param[out] out_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] out_stride_x Stride of the destination image in X dimension (in
+ * bytes)
+ * @param[in] out_step_x out_stride_x * number of elements along X processed
+ * per work item (in bytes)
+ * @param[in] out_offset_first_element_in_bytes Offset of the first element in the destination
+ * image
+ *
+ */
+__kernel void neg_tensor(TENSOR3D_DECLARATION(input), TENSOR3D_DECLARATION(output))
+{
+ Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+ VSTORE(VEC_SIZE)
+ (-VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr), 0, (__global DATA_TYPE *)output.ptr);
+}
+#endif // defined(DATA_TYPE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl
new file mode 100644
index 000000000..2074d3ceb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/pixelwise_mul_quantized.cl
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016, 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers_asymm.h"
+
+#ifdef SATURATE
+#define CONVERT_OP_FLOAT_STR(x, type, round) (convert_##type##_sat##round(x))
+#else /* SATURATE */
+#define CONVERT_OP_FLOAT_STR(x, type, round) (convert_##type##round(x))
+#endif /* SATURATE */
+#define CONVERT_OP_FLOAT(x, type, round) CONVERT_OP_FLOAT_STR(x, type, round)
+
+#if defined(RESULT_OFFSET) && defined(RESULT_MULT_INT) && defined(RESULT_SHIFT)
+/** Performs a pixelwise multiplication used to quantize down the int32 accumulator values of
+ * GEMMLowp to QASYMM8
+ *
+ * The following computations will be performed by the kernel:
+ *
+ * -# Add offset terms to inputs
+ * -# Multiply inputs
+ * -# Add offset terms to final result
+ * -# Multiply each entry of result by result_mult_int
+ * -# Shift the int32 accumulator by result_shift
+ * -# Clamp the resulting int32 values to the [0..255] range and cast to QASYMM8.
+ *
+ * @attention The inputs and output data types need to be passed at compile time using
+ * -DDATA_TYPE_IN1, -DDATA_TYPE_IN2 and -DDATA_TYPE_OUT:
+ * e.g. -DDATA_TYPE_IN1=uchar -DDATA_TYPE_IN2=uchar -DDATA_TYPE_OUT=uchar
+ * @attention The offset factor of inputs must be passed at compile time using -DIN1_OFFSET and
+ * -DIN2_OFFSET
+ * @attention The offset, scalar scale factor and number of bits to shift right of output tensor
+ * must be passed at compile time using -DRESULT_OFFSET, -RESULT_MULT_INT and
+ * -DRESULT_SHIFT
+ *
+ * @param[in] in1_ptr Pointer to the source image. Supported data types:
+ * U8
+ * @param[in] in1_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] in1_step_x in1_stride_x * number of elements along X processed
+ * per workitem(in bytes)
+ * @param[in] in1_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] in1_step_y in1_stride_y * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] in1_stride_z Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] in1_step_z in1_stride_z * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] in1_offset_first_element_in_bytes The offset of the first element in the source image
+ * @param[in] in2_ptr Pointer to the source image. Supported data types:
+ * U8
+ * @param[in] in2_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] in2_step_x in2_stride_x * number of elements along X processed
+ * per workitem(in bytes)
+ * @param[in] in2_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] in2_step_y in2_stride_y * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] in2_stride_z Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] in2_step_z in2_stride_z * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] in2_offset_first_element_in_bytes The offset of the first element in the source image
+ * @param[out] out_ptr Pointer to the destination image. Supported data
+ * types: U8
+ * @param[in] out_stride_x Stride of the destination image in X dimension (in
+ * bytes)
+ * @param[in] out_step_x out_stride_x * number of elements along X processed
+ * per workitem(in bytes)
+ * @param[in] out_stride_y Stride of the destination image in Y dimension (in
+ * bytes)
+ * @param[in] out_step_y out_stride_y * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] out_stride_z Stride of the destination image in Y dimension (in
+ * bytes)
+ * @param[in] out_step_z out_stride_z * number of elements along Y processed
+ * per workitem(in bytes)
+ * @param[in] out_offset_first_element_in_bytes The offset of the first element in the destination
+ * image
+ * @param[in] scale Float scaling factor. Supported data types: F32
+ */
+__kernel void pixelwise_mul_qasymm8(TENSOR3D_DECLARATION(in1), TENSOR3D_DECLARATION(in2),
+ TENSOR3D_DECLARATION(out), const float scale)
+{
+ // Get pixels pointer
+ Tensor3D in1 = CONVERT_TO_TENSOR3D_STRUCT(in1);
+ Tensor3D in2 = CONVERT_TO_TENSOR3D_STRUCT(in2);
+ Tensor3D out = CONVERT_TO_TENSOR3D_STRUCT(out);
+
+ // Load data
+ VEC_DATA_TYPE(int, 16)
+ in1_data = CONVERT(vload16(0, (__global DATA_TYPE_IN1 *)in1.ptr), VEC_DATA_TYPE(int, 16));
+ VEC_DATA_TYPE(int, 16)
+ in2_data = CONVERT(vload16(0, (__global DATA_TYPE_IN2 *)in2.ptr), VEC_DATA_TYPE(int, 16));
+
+ // Perform multiplication of two inputs
+ VEC_DATA_TYPE(int, 16) in1_val = in1_data + (VEC_DATA_TYPE(int, 16))(IN1_OFFSET);
+ VEC_DATA_TYPE(int, 16) in2_val = in2_data + (VEC_DATA_TYPE(int, 16))(IN2_OFFSET);
+ VEC_DATA_TYPE(int, 16) out_val = in1_val * in2_val;
+
+ // Multiply with a multiplier smaller than 1
+ out_val =
+ ASYMM_MULT_BY_QUANT_MULTIPLIER_LESS_THAN_ONE(out_val, RESULT_MULT_INT, RESULT_SHIFT, 16);
+ out_val += (VEC_DATA_TYPE(int, 16))(RESULT_OFFSET);
+
+ VEC_DATA_TYPE(uchar, 16) res = CONVERT(out_val, VEC_DATA_TYPE(uchar, 16));
+
+ // TODO: Apply min-max BOUND to support fuse with relu.
+ /*
+ #if defined(MIN_BOUND)
+ res = max(res, (uchar16)MIN_BOUND);
+ #endif // defined(MIN_BOUND)
+ #if defined(MAX_BOUND)
+ res = min(res, (uchar16)MAX_BOUND);
+ #endif // defined(MAX_BOUND)
+ */
+
+ // Store result
+ VSTORE(16)(CONVERT(res, VEC_DATA_TYPE(DATA_TYPE_OUT, 16)), 0, (__global DATA_TYPE_OUT *)out.ptr);
+}
+#endif // defined(RESULT_OFFSET) && defined(RESULT_MULT_INT) && defined(RESULT_SHIFT)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl
new file mode 100644
index 000000000..62a8901f6
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu.cl
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#ifndef VEC_SIZE
+#define VEC_SIZE 1
+#endif
+
+#if defined(DATA_TYPE)
+/** Returns result of prelu function implemented as below:
+ * f(input) = alpha * input for input < 0, f(input) = input for input >= 0.
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ * @note Can only take floating point data types.
+ *
+ * @param[in] input1_ptr Pointer to the source image. Supported Data
+ * types : F16/F32
+ * @param[in] input1_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input1_step_x input1_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input1_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input1_step_y input1_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input1_step_z input1_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[in] alpha_ptr Pointer to the source image. Supported Data
+ * types : F16/F32
+ * @param[in] alpha_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] alpha_step_x input2_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] alpha_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] alpha_step_y input2_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] alpha_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] alpha_step_z input2_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] alpha_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ *
+ * @param[out] output_ptr Pointer to the destination image. Supported
+ * data types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void prelu(TENSOR3D_DECLARATION(input), TENSOR3D_DECLARATION(alpha),
+ TENSOR3D_DECLARATION(output))
+{
+ Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
+ Tensor3D alpha = CONVERT_TO_TENSOR3D_STRUCT(alpha);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+ VSTORE(VEC_SIZE)
+ (VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr) < 0
+ ? VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr) *
+ VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)alpha.ptr)
+ : VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)input.ptr),
+ 0, (__global DATA_TYPE *)output.ptr);
+}
+#endif // defined(DATA_TYPE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl
new file mode 100644
index 000000000..5e0abd585
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/prelu_quantized.cl
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+#define SUB(x, y) (x) - (y)
+
+#if defined(OFF_IN) && defined(OFF_ALPHA) && defined(OFF_OUT) && defined(SCALE_IN) && \
+ defined(SCALE_ALPHA) && defined(SCALE_OUT) && defined(VEC_SIZE)
+
+#define VEC_FLOAT VEC_DATA_TYPE(float, VEC_SIZE)
+#define VEC_INT VEC_DATA_TYPE(int, VEC_SIZE)
+#define VEC_UCHAR VEC_DATA_TYPE(uchar, VEC_SIZE)
+#define CONVERT_RTE(x, type) (convert_##type##_rte((x)))
+#define CONVERT_DOWN(x, type) CONVERT_RTE(x, type)
+#define SELECT_TYPE VEC_INT
+
+/** Returns result of prelu function implemented as below:
+ * f(input) = alpha * input for input < 0, f(input) = input for input >= 0.
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE_IN compile flag, e.g.
+ * -DDATA_TYPE_IN=uchar
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ * @note Can only take uchar data types.
+ *
+ * @param[in] input1_ptr Pointer to the source image. Supported Data
+ * types : QASYMM8
+ * @param[in] input1_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input1_step_x input1_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input1_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input1_step_y input1_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input1_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input1_step_z input1_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input1_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[in] alpha_ptr Pointer to the source image. Supported Data
+ * types : QASYMM8
+ * @param[in] alpha_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] alpha_step_x input2_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] alpha_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] alpha_step_y input2_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] alpha_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] alpha_step_z input2_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] alpha_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported
+ * data types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void prelu_qasymm8(TENSOR3D_DECLARATION(input), TENSOR3D_DECLARATION(alpha),
+ TENSOR3D_DECLARATION(output))
+{
+ // Get pixels pointer
+ Tensor3D input = CONVERT_TO_TENSOR3D_STRUCT(input);
+ Tensor3D alpha = CONVERT_TO_TENSOR3D_STRUCT(alpha);
+ Tensor3D output = CONVERT_TO_TENSOR3D_STRUCT(output);
+
+ VEC_INT in_vec = CONVERT(VLOAD(VEC_SIZE)(0, (__global uchar *)input.ptr), VEC_INT);
+ VEC_INT alpha_vec = CONVERT(VLOAD(VEC_SIZE)(0, (__global uchar *)alpha.ptr), VEC_INT);
+
+ in_vec = SUB(in_vec, (VEC_INT)((int)OFF_IN));
+ alpha_vec = SUB(alpha_vec, (VEC_INT)((int)OFF_ALPHA));
+
+ const VEC_FLOAT inf32 = CONVERT(in_vec, VEC_FLOAT) * (VEC_FLOAT)((float)SCALE_IN);
+ const VEC_FLOAT alphaf32 = CONVERT(alpha_vec, VEC_FLOAT) * (VEC_FLOAT)((float)SCALE_ALPHA);
+ const VEC_FLOAT outf32 =
+ select(inf32, inf32 * alphaf32, CONVERT(inf32 < (VEC_FLOAT)0, SELECT_TYPE));
+ const VEC_FLOAT qresf32 = outf32 / ((VEC_FLOAT)(float)SCALE_OUT) + ((VEC_FLOAT)((float)OFF_OUT));
+ const VEC_UCHAR res = CONVERT_SAT(CONVERT_DOWN(qresf32, VEC_INT), VEC_UCHAR);
+
+ VSTORE(VEC_SIZE)
+ (res, 0, (__global uchar *)output.ptr);
+}
+
+#endif // defined(OFF_IN) && defined(OFF_ALPHA) && defined(OFF_OUT) && defined(SCALE_IN) &&
+ // defined(SCALE_ALPHA) && defined(SCALE_OUT) && defined(VEC_SIZE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl
new file mode 100644
index 000000000..d7ea2e2c4
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/reduce_operation.cl
@@ -0,0 +1,188 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016, 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
+/** Perform reduce max/min
+ *
+ * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g.
+ * -DDATA_TYPE=short
+ * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size.
+ * e.g. -DDEPTH_OUT=16
+ * @attention Operation type(code) specifying which operation to perform should be passed as
+ * preprocessor argument using -DOP_CODE = number. e.g. -DOP_CODE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[in] input_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] input_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ * @param[in] axis Axis through which reduction occurs
+ * @param[in] dim Dimension across the axis to be reduced.
+ */
+__kernel void reduce_min_max(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output),
+ const int axis, const int dim)
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
+
+ int indices[4] = {
+ get_global_id(0), get_global_id(1), get_global_id(2) % DEPTH_OUT,
+ get_global_id(2) / DEPTH_OUT,
+ };
+
+ DATA_TYPE value =
+ *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3]));
+ for (int i = 1; i < dim; ++i)
+ {
+ indices[axis] = i;
+
+#if OP_CODE == 1 // REDUCE_MAX
+ value = max(value, *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1],
+ indices[2], indices[3])));
+
+#elif OP_CODE == 2 // REDUCE_MIN
+ value = min(value, *((__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1],
+ indices[2], indices[3])));
+
+#else // OP NOT SUPPORTED
+ return;
+
+#endif
+ }
+
+ *((__global DATA_TYPE *)out.ptr) = value;
+}
+
+/** Perform reduce sum/mean
+ *
+ * @note Datatype should be given as a preprocessor argument using -DDATA_TYPE=type. e.g.
+ * -DDATA_TYPE=short
+ * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size.
+ * e.g. -DDEPTH_OUT=16
+ * @attention Operation type(code) specifying which operation to perform should be passed as
+ * preprocessor argument using -DOP_CODE = number. e.g. -DOP_CODE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: U8/S8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[in] input_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] input_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ * @param[in] axis Axis through which reduction occurs
+ * @param[in] dim Dimension across the axis to be reduced.
+ */
+__kernel void reduce_sum_mean(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output),
+ const int axis, const int dim)
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
+
+ int indices[4] = {
+ get_global_id(0), get_global_id(1), get_global_id(2) % DEPTH_OUT,
+ get_global_id(2) / DEPTH_OUT,
+ };
+
+ DATA_TYPE sum_value = (DATA_TYPE)0;
+ for (int i = 0; i < dim; ++i)
+ {
+ indices[axis] = i;
+ sum_value += *(
+ (__global DATA_TYPE *)tensor4D_offset(&in, indices[0], indices[1], indices[2], indices[3]));
+ }
+
+#if OP_CODE == 3 // REDUCE_SUM
+ *((__global DATA_TYPE *)out.ptr) = sum_value;
+
+#elif OP_CODE == 4 // REDUCE_MEAN
+ *((__global DATA_TYPE *)out.ptr) = sum_value / CONVERT(dim, DATA_TYPE);
+
+#else // OP NOT SUPPORTED
+ return;
+
+#endif
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(OP_CODE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl
new file mode 100644
index 000000000..7367da7fb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_batch.cl
@@ -0,0 +1,250 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016, 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#if defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) && \
+ defined(WIDTH_IN) && defined(ZERO_VALUE)
+/** Perform space to batch with input of 4D and NCHW format
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Output tensor depth should be given as a preprocessor argument using -DDEPTH_OUT=size.
+ * e.g. -DDEPTH_OUT=16
+ * @attention Input tensor batch should be given as a preprocessor argument using -DBATCH_IN=size.
+ * e.g. -DBATCH_IN=16
+ * @attention Input tensor height should be given as a preprocessor argument using -DHEIGHT_IN=size.
+ * e.g. -DHEIGHT_IN=16
+ * @attention Input tensor width should be given as a preprocessor argument using -DHEIGHT_IN=size.
+ * e.g. -DWIDTH_IN=16
+ * @attention The value to be set by pad value using -DZERO_VALUE=value. e.g. -DZERO_VALUE=0
+ *
+ * @param[in] input_ptr Pointer to the source tensor. Supported
+ * data types: U8/S8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source tensor in X
+ * dimension (in bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along
+ * X processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source tensor in Y
+ * dimension (in bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along
+ * Y processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z
+ * dimension (in bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along
+ * Z processed per workitem(in bytes)
+ * @param[in] input_stride_w Stride of the destination tensor in W
+ * dimension (in bytes)
+ * @param[in] input_step_w input_stride_w * number of elements along
+ * W processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the
+ * source tensor
+ * @param[out] output_ptr Pointer to the destination tensor.
+ * Supported data types: same as @p
+ * input_ptr
+ * @param[in] output_stride_x Stride of the destination tensor in X
+ * dimension (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements
+ * along X processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination tensor in Y
+ * dimension (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements
+ * along Y processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the destination tensor in Z
+ * dimension (in bytes)
+ * @param[in] output_step_z output_stride_z * number of elements
+ * along Z processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the destination tensor in W
+ * dimension (in bytes)
+ * @param[in] output_step_w output_stride_w * number of elements
+ * along W processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[in] block_size_ptr Pointer to the source tensor. Supported
+ * data types: S32
+ * @param[in] block_size_stride_x Stride of the source tensor in X
+ * dimension (in bytes)
+ * @param[in] block_size_step_x block_size_stride_x * number of elements
+ * along X processed per workitem(in bytes)
+ * @param[in] block_size_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[in] padding_size_ptr Pointer to the source tensor. Supported
+ * data types: S32
+ * @param[in] padding_size_stride_x Stride of the source tensor in X
+ * dimension (in bytes)
+ * @param[in] padding_size_step_x padding_size_stride_x * number of
+ * elements along X processed per workitem
+ * (in bytes)
+ * @param[in] padding_size_stride_y Stride of the source tensor in Y
+ * dimension (in bytes)
+ * @param[in] padding_size_step_y padding_size_stride_y * number of
+ * elements along Y processed per workitem
+ * (in bytes)
+ * @param[in] padding_size_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ */
+__kernel void space_to_batch_4d_nchw(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output),
+ VECTOR_DECLARATION(block_size),
+ IMAGE_DECLARATION(padding_size))
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, DEPTH_OUT);
+
+ int block_size_x = *((__global int *)(block_size_ptr));
+ int block_size_y = *((__global int *)(block_size_ptr + block_size_stride_x));
+ int shift_x = (get_global_id(2) / DEPTH_OUT / BATCH_IN) % block_size_x;
+ int shift_y = (get_global_id(2) / DEPTH_OUT / BATCH_IN) / block_size_x;
+
+ int in_index[4] = {
+ 0,
+ };
+ in_index[0] = get_global_id(0) * block_size_x + shift_x - *((__global int *)(padding_size_ptr));
+ in_index[1] = get_global_id(1) * block_size_y + shift_y -
+ *((__global int *)(padding_size_ptr + padding_size_stride_y));
+ in_index[2] = get_global_id(2) % DEPTH_OUT;
+ in_index[3] = (get_global_id(2) / DEPTH_OUT) % BATCH_IN;
+
+ if (in_index[0] < 0 || in_index[0] >= WIDTH_IN || in_index[1] < 0 || in_index[1] >= HEIGHT_IN)
+ {
+ *((__global DATA_TYPE *)out.ptr) = (DATA_TYPE)ZERO_VALUE;
+ }
+ else
+ {
+ *((__global DATA_TYPE *)out.ptr) = *((__global DATA_TYPE *)tensor4D_offset(
+ &in, in_index[0], in_index[1], in_index[2], in_index[3]));
+ }
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) &&
+ // defined(WIDTH_IN) && defined(ZERO_VALUE)
+
+#if defined(DATA_TYPE) && defined(HEIGHT_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) && \
+ defined(WIDTH_IN) && defined(ZERO_VALUE) && defined(VEC_SIZE)
+/** Perform space to batch with input of 4D and NHWC format
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Output tensor depth should be given as a preprocessor argument using
+ * -DHEIGHT_OUT=size. e.g. -DHEIGHT_OUT=16
+ * @attention Input tensor batch should be given as a preprocessor argument using -DBATCH_IN=size.
+ * e.g. -DBATCH_IN=16
+ * @attention Input tensor height should be given as a preprocessor argument using -DHEIGHT_IN=size.
+ * e.g. -DHEIGHT_IN=16
+ * @attention Input tensor width should be given as a preprocessor argument using -DHEIGHT_IN=size.
+ * e.g. -DWIDTH_IN=16
+ * @attention The value to be set by pad value using -DZERO_VALUE=value. e.g. -DZERO_VALUE=0
+ * @attention Vector size should be given as a preprocessor argument using -DVEC_SIZE=size. e.g.
+ * -DVEC_SIZE=16
+ *
+ * @param[in] input_ptr Pointer to the source tensor. Supported
+ * data types: U8/S8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source tensor in X
+ * dimension (in bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along
+ * X processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source tensor in Y
+ * dimension (in bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along
+ * Y processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z
+ * dimension (in bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along
+ * Z processed per workitem(in bytes)
+ * @param[in] input_stride_w Stride of the destination tensor in W
+ * dimension (in bytes)
+ * @param[in] input_step_w input_stride_w * number of elements along
+ * W processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the
+ * source tensor
+ * @param[out] output_ptr Pointer to the destination tensor.
+ * Supported data types: same as @p
+ * input_ptr
+ * @param[in] output_stride_x Stride of the destination tensor in X
+ * dimension (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements
+ * along X processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination tensor in Y
+ * dimension (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements
+ * along Y processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the destination tensor in Z
+ * dimension (in bytes)
+ * @param[in] output_step_z output_stride_z * number of elements
+ * along Z processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the destination tensor in W
+ * dimension (in bytes)
+ * @param[in] output_step_w output_stride_w * number of elements
+ * along W processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[in] block_size_ptr Pointer to the source tensor. Supported
+ * data types: S32
+ * @param[in] block_size_stride_x Stride of the source tensor in X
+ * dimension (in bytes)
+ * @param[in] block_size_step_x block_size_stride_x * number of elements
+ * along X processed per workitem(in bytes)
+ * @param[in] block_size_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ * @param[in] padding_size_ptr Pointer to the source tensor. Supported
+ * data types: S32
+ * @param[in] padding_size_stride_x Stride of the source tensor in X
+ * dimension (in bytes)
+ * @param[in] padding_size_step_x padding_size_stride_x * number of
+ * elements along X processed per workitem
+ * (in bytes)
+ * @param[in] padding_size_stride_y Stride of the source tensor in Y
+ * dimension (in bytes)
+ * @param[in] padding_size_step_y padding_size_stride_y * number of
+ * elements along Y processed per workitem
+ * (in bytes)
+ * @param[in] padding_size_offset_first_element_in_bytes The offset of the first element in the
+ * destination tensor
+ */
+__kernel void space_to_batch_4d_nhwc(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output),
+ VECTOR_DECLARATION(block_size),
+ IMAGE_DECLARATION(padding_size))
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, 0);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT(output, HEIGHT_OUT);
+
+ int block_size_x = *((__global int *)(block_size_ptr));
+ int block_size_y = *((__global int *)(block_size_ptr + block_size_stride_x));
+ int shift_x = (get_global_id(2) / HEIGHT_OUT / BATCH_IN) % block_size_x;
+ int shift_y = (get_global_id(2) / HEIGHT_OUT / BATCH_IN) / block_size_x;
+
+ int in_index[4] = {
+ 0,
+ };
+ in_index[0] = get_global_id(0) * VEC_SIZE;
+ in_index[1] = get_global_id(1) * block_size_x + shift_x - *((__global int *)(padding_size_ptr));
+ in_index[2] = get_global_id(2) % HEIGHT_OUT * block_size_y + shift_y -
+ *((__global int *)(padding_size_ptr + padding_size_stride_y));
+ in_index[3] = (get_global_id(2) / HEIGHT_OUT) % BATCH_IN;
+
+ if (in_index[1] < 0 || in_index[1] >= WIDTH_IN || in_index[2] < 0 || in_index[2] >= HEIGHT_IN)
+ {
+ VSTORE(VEC_SIZE)
+ ((VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE))ZERO_VALUE, 0, (__global DATA_TYPE *)out.ptr);
+ }
+ else
+ {
+ VSTORE(VEC_SIZE)
+ (CONVERT(VLOAD(VEC_SIZE)(0, (__global DATA_TYPE *)tensor4D_offset(&in, in_index[0], in_index[1],
+ in_index[2], in_index[3])),
+ VEC_DATA_TYPE(DATA_TYPE, VEC_SIZE)),
+ 0, (__global DATA_TYPE *)out.ptr);
+ }
+}
+
+#endif // defined(DATA_TYPE) && defined(HEIGHT_OUT) && defined(BATCH_IN) && defined(HEIGHT_IN) &&
+ // defined(WIDTH_IN) && defined(ZERO_VALUE) && defined(VEC_SIZE)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl
new file mode 100644
index 000000000..a26e762e8
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/space_to_depth.cl
@@ -0,0 +1,161 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016, 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "helpers.h"
+
+#if defined(DATA_TYPE) && defined(DEPTH_IN) && defined(BLOCK_SIZE) && defined(Z_IN)
+/** Perform space to depth rearrangement of tensor
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Input tensor depth should be given as a preprocessor argument using -DDEPTH_IN=size.
+ * e.g. -DDEPTH_IN=16
+ * @attention The value of the z-axis of input tensor depth should be given as a preprocessor
+ * argument using -DZ_IN=size. e.g. -DZ_IN=16
+ * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE=size. e.g.
+ * -DBLOCK_SIZE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void space_to_depth_nchw(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output))
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, Z_IN);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(output, 0);
+
+ int out_index[4] = {0};
+ int in_index[4] = {0};
+
+ in_index[0] = get_global_id(0); // W
+ in_index[1] = get_global_id(1); // H
+ in_index[2] = get_global_id(2) % Z_IN; // C
+ in_index[3] = get_global_id(2) / Z_IN; // B
+
+ out_index[0] = in_index[0] / BLOCK_SIZE;
+ out_index[1] = in_index[1] / BLOCK_SIZE;
+ out_index[2] =
+ in_index[2] + ((in_index[1] % BLOCK_SIZE) * BLOCK_SIZE + in_index[0] % BLOCK_SIZE) * DEPTH_IN;
+ out_index[3] = in_index[3];
+
+ *((__global DATA_TYPE *)tensor4D_offset(&out, out_index[0], out_index[1], out_index[2],
+ out_index[3])) = *((__global DATA_TYPE *)in.ptr);
+}
+#endif // defined(DATA_TYPE) && defined(Z_IN) && defined(BLOCK_SIZE) && defined(Z_IN)
+
+#if defined(DATA_TYPE) && defined(Z_IN) && defined(BLOCK_SIZE) && defined(Z_IN)
+/** Perform space to depth rearrangement of tensor
+ *
+ * @attention Data type can be passed using the -DDATA_TYPE compile flag, e.g. -DDATA_TYPE=float
+ * @attention Input tensor depth should be given as a preprocessor argument using -DDEPTH_IN=size.
+ * e.g. -DDEPTH_IN=16
+ * @attention The value of the z-axis of input tensor depth should be given as a preprocessor
+ * argument using -DZ_IN=size. e.g. -DZ_IN=16
+ * @attention block size should be given as a preprocessor argument using -DBLOCK_SIZE=size. e.g.
+ * -DBLOCK_SIZE=1
+ *
+ * @param[in] input_ptr Pointer to the source image. Supported data
+ * types: U8/S8/QASYMM8/U16/S16/F16/U32/S32/F32
+ * @param[in] input_stride_x Stride of the source image in X dimension (in
+ * bytes)
+ * @param[in] input_step_x input_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_y Stride of the source image in Y dimension (in
+ * bytes)
+ * @param[in] input_step_y input_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] input_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] input_step_z input_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] input_offset_first_element_in_bytes The offset of the first element in the source
+ * image
+ * @param[out] output_ptr Pointer to the destination image. Supported data
+ * types: same as @p input_ptr
+ * @param[in] output_stride_x Stride of the destination image in X dimension
+ * (in bytes)
+ * @param[in] output_step_x output_stride_x * number of elements along X
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_y Stride of the destination image in Y dimension
+ * (in bytes)
+ * @param[in] output_step_y output_stride_y * number of elements along Y
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_z Stride of the source tensor in Z dimension (in
+ * bytes)
+ * @param[in] output_step_z output_stride_z * number of elements along Z
+ * processed per workitem(in bytes)
+ * @param[in] output_stride_w Stride of the source tensor in W dimension (in
+ * bytes)
+ * @param[in] output_step_w output_stride_w * number of elements along W
+ * processed per workitem(in bytes)
+ * @param[in] output_offset_first_element_in_bytes The offset of the first element in the
+ * destination image
+ */
+__kernel void space_to_depth_nhwc(TENSOR4D_DECLARATION(input), TENSOR4D_DECLARATION(output))
+{
+ Tensor4D in = CONVERT_TO_TENSOR4D_STRUCT(input, Z_IN);
+ Tensor4D out = CONVERT_TO_TENSOR4D_STRUCT_NO_STEP(output, 0);
+
+ int out_index[4] = {0};
+ int in_index[4] = {0};
+
+ in_index[0] = get_global_id(0); // C
+ in_index[1] = get_global_id(1); // W
+ in_index[2] = get_global_id(2) % Z_IN; // H
+ in_index[3] = get_global_id(2) / Z_IN; // B
+
+ out_index[0] =
+ in_index[0] + ((in_index[2] % BLOCK_SIZE) * BLOCK_SIZE + in_index[1] % BLOCK_SIZE) * DEPTH_IN;
+ out_index[1] = in_index[1] / BLOCK_SIZE;
+ out_index[2] = in_index[2] / BLOCK_SIZE;
+ out_index[3] = in_index[3];
+
+ *((__global DATA_TYPE *)tensor4D_offset(&out, out_index[0], out_index[1], out_index[2],
+ out_index[3])) = *((__global DATA_TYPE *)in.ptr);
+}
+#endif // defined(DATA_TYPE) && defined(DEPTH_IN) && defined(BLOCK_SIZE) && defined(Z_IN)
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl
new file mode 100644
index 000000000..50472e4f9
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2.cl
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "helpers.h"
+
+__kernel void topkv2_init(VECTOR_DECLARATION(input), __global float *in_key_buf,
+ __global int *in_ind_buf, const int n)
+{
+ int gid = get_global_id(0);
+ int lws = get_local_size(0);
+ int groups = get_num_groups(0);
+ int gws = lws * groups;
+ int iter = n / gws;
+
+ Vector input = CONVERT_TO_VECTOR_STRUCT_NO_STEP(input);
+
+ for (int i = 0; i < iter; ++i)
+ {
+ int idx = i * gws + gid;
+ in_key_buf[idx] = *(__global float *)(input.ptr + idx * input.stride_x);
+ in_ind_buf[idx] = idx;
+ }
+}
+
+__kernel void topkv2_find_first_negative(__global float *out_key_buf,
+ __global int *first_negative_idx, int n)
+{
+ int gid = get_global_id(0);
+
+ if (gid == n - 1)
+ {
+ // if the last item is positive, the first negative index is n.
+ if (out_key_buf[gid] > 0.f)
+ *first_negative_idx = n;
+ }
+ else if (gid == 0)
+ {
+ // if the first item is negative, set it 0.
+ if (out_key_buf[gid] < 0.f)
+ *first_negative_idx = 0;
+ }
+ else
+ {
+ // if its left is positive and it is negative, then it is the first negative item.
+ if (out_key_buf[gid - 1] > 0.f && out_key_buf[gid] < 0.f)
+ *first_negative_idx = gid;
+ }
+}
+
+__kernel void topkv2_reorder_negatives(__global float *in_key_buf, __global float *out_key_buf,
+ __global float *in_ind_buf, __global float *out_ind_buf,
+ __global int *first_negative_idx, int n)
+{
+ int gid = get_global_id(0);
+
+ int num_negs = n - *first_negative_idx;
+ int in_idx;
+
+ if (gid < num_negs)
+ {
+ in_idx = n - 1 - gid;
+ }
+ else
+ {
+ in_idx = gid - num_negs;
+ }
+
+ out_key_buf[gid] = in_key_buf[in_idx];
+ out_ind_buf[gid] = in_ind_buf[in_idx];
+}
+
+__kernel void topkv2_store(VECTOR_DECLARATION(values), VECTOR_DECLARATION(indices),
+ __global float *out_key_buf, __global int *out_ind_buf, int n)
+{
+ int gid = get_global_id(0);
+
+ Vector values = CONVERT_TO_VECTOR_STRUCT_NO_STEP(values);
+ Vector indices = CONVERT_TO_VECTOR_STRUCT_NO_STEP(indices);
+
+ int idx = n - 1 - gid;
+
+ *(__global float *)(values.ptr + gid * values.stride_x) = out_key_buf[idx];
+ *(__global int *)(indices.ptr + gid * indices.stride_x) = out_ind_buf[idx];
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl
new file mode 100644
index 000000000..9594daf19
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_quicksort.cl
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "helpers.h"
+
+__global inline float *get_vec_elem(Vector *vec, int idx)
+{
+ return (__global float *)(vec->ptr + idx * vec->stride_x);
+}
+
+__global inline int *get_vec_elem_int(Vector *vec, int idx)
+{
+ return (__global int *)(vec->ptr + idx * vec->stride_x);
+}
+
+// A utility function to swap two elements
+void swap(__global float *a, __global float *b)
+{
+ float t = *a;
+ *a = *b;
+ *b = t;
+}
+
+void swap_idx(__global int *a, __global int *b)
+{
+ int t = *a;
+ *a = *b;
+ *b = t;
+}
+
+/* This function is same in both iterative and recursive*/
+int partition(Vector *arr, __global int *indices, int l, int h)
+{
+ float x = *get_vec_elem(arr, h);
+ int i = (l - 1);
+
+ for (int j = l; j <= h - 1; j++)
+ {
+ if (*get_vec_elem(arr, j) >= x)
+ {
+ i++;
+ swap(get_vec_elem(arr, i), get_vec_elem(arr, j));
+ swap_idx(&indices[i], &indices[j]);
+ }
+ }
+ swap(get_vec_elem(arr, i + 1), get_vec_elem(arr, h));
+ swap_idx(&indices[i + 1], &indices[h]);
+ return (i + 1);
+}
+
+/* A[] --> Array to be sorted,
+ l --> Starting index,
+ h --> Ending index */
+void quickSortIterative(Vector *arr, __global int *indices, __global int *stack, int l, int h)
+{
+ // Create an auxiliary stack
+
+ // initialize top of stack
+ int top = -1;
+
+ // push initial values of l and h to stack
+ stack[++top] = l;
+ stack[++top] = h;
+
+ // Keep popping from stack while is not empty
+ while (top >= 0)
+ {
+ // Pop h and l
+ h = stack[top--];
+ l = stack[top--];
+
+ // Set pivot element at its correct position
+ // in sorted array
+ int p = partition(arr, indices, l, h);
+
+ // If there are elements on left side of pivot,
+ // then push left side to stack
+ if (p - 1 > l)
+ {
+ stack[++top] = l;
+ stack[++top] = p - 1;
+ }
+
+ // If there are elements on right side of pivot,
+ // then push right side to stack
+ if (p + 1 < h)
+ {
+ stack[++top] = p + 1;
+ stack[++top] = h;
+ }
+ }
+}
+
+__kernel void topkv2_quicksort(VECTOR_DECLARATION(input), VECTOR_DECLARATION(topk_values),
+ VECTOR_DECLARATION(topk_indices), __global int *indices,
+ __global int *temp_stack, int k, int n)
+{
+ Vector input = CONVERT_TO_VECTOR_STRUCT_NO_STEP(input);
+ Vector topk_values = CONVERT_TO_VECTOR_STRUCT_NO_STEP(topk_values);
+ Vector topk_indices = CONVERT_TO_VECTOR_STRUCT_NO_STEP(topk_indices);
+
+ for (int i = 0; i < n; ++i)
+ {
+ indices[i] = i;
+ }
+
+ quickSortIterative(&input, indices, temp_stack, 0, n - 1);
+
+ // extract k items.
+ for (int i = 0; i < k; ++i)
+ {
+ *get_vec_elem(&topk_values, i) = *get_vec_elem(&input, i);
+ *get_vec_elem_int(&topk_indices, i) = indices[i];
+ }
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl
new file mode 100644
index 000000000..f6830d229
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/cl_kernels/topkv2_radixsort.cl
@@ -0,0 +1,269 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// reference:
+// https://code.google.com/archive/p/ocl-radix-sort/source/default/source
+// OpenCL kernel sources for the CLRadixSort class
+// the #include does not exist in OpenCL
+// Copyright Philippe Helluy, Université de Strasbourg, France, 2011, helluy@math.unistra.fr
+// licensed under the GNU Lesser General Public License see http://www.gnu.org/copyleft/lesser.html
+// if you find this software usefull you can cite the following work in your reports or articles:
+// Philippe HELLUY, A portable implementation of the radix sort algorithm in OpenCL, 2011.
+// http://hal.archives-ouvertes.fr/hal-00596730
+
+// Reference for floating point radix sort:
+// http://www.codercorner.com/RadixSortRevisited.htm
+
+// compute the histogram for each radix and each virtual processor for the pass
+__kernel void radixsort_histogram(__global float *in_key_buf, __global int *d_Histograms,
+ const int pass, __local int *loc_histo, const int n)
+{
+ int it = get_local_id(0); // i local number of the processor
+ int ig = get_global_id(0); // global number = i + g I
+
+ int gr = get_group_id(0); // g group number
+
+ int groups = get_num_groups(0);
+ int items = get_local_size(0);
+
+ // set the local histograms to zero
+ for (int ir = 0; ir < _RADIX; ir++)
+ {
+ loc_histo[ir * items + it] = 0;
+ }
+
+ barrier(CLK_LOCAL_MEM_FENCE);
+
+ // range of keys that are analyzed by the work item
+ int size = n / groups / items; // size of the sub-list
+ int start = ig * size; // beginning of the sub-list
+
+ unsigned int key;
+ int shortkey, k;
+
+ // compute the index
+ // the computation depends on the transposition
+ for (int j = 0; j < size; j++)
+ {
+#ifdef TRANSPOSE
+ k = groups * items * j + ig;
+#else
+ k = j + start;
+#endif
+
+ key = *((__global unsigned int *)(in_key_buf + k));
+
+ // extract the group of _BITS bits of the pass
+ // the result is in the range 0.._RADIX-1
+ shortkey = ((key >> (pass * _BITS)) & (_RADIX - 1));
+
+ // increment the local histogram
+ loc_histo[shortkey * items + it]++;
+ }
+
+ barrier(CLK_LOCAL_MEM_FENCE);
+
+ // copy the local histogram to the global one
+ for (int ir = 0; ir < _RADIX; ir++)
+ {
+ d_Histograms[items * (ir * groups + gr) + it] = loc_histo[ir * items + it];
+ }
+
+ barrier(CLK_GLOBAL_MEM_FENCE);
+}
+
+// initial transpose of the list for improving
+// coalescent memory access
+__kernel void transpose(const __global int *invect, __global int *outvect, const int nbcol,
+ const int nbrow, const __global int *inperm, __global int *outperm,
+ __local int *blockmat, __local int *blockperm, const int tilesize)
+{
+
+ int i0 = get_global_id(0) * tilesize; // first row index
+ int j = get_global_id(1); // column index
+
+ int jloc = get_local_id(1); // local column index
+
+ // fill the cache
+ for (int iloc = 0; iloc < tilesize; iloc++)
+ {
+ int k = (i0 + iloc) * nbcol + j; // position in the matrix
+ blockmat[iloc * tilesize + jloc] = invect[k];
+#ifdef PERMUT
+ blockperm[iloc * tilesize + jloc] = inperm[k];
+#endif
+ }
+
+ barrier(CLK_LOCAL_MEM_FENCE);
+
+ // first row index in the transpose
+ int j0 = get_group_id(1) * tilesize;
+
+ // put the cache at the good place
+ for (int iloc = 0; iloc < tilesize; iloc++)
+ {
+ int kt = (j0 + iloc) * nbrow + i0 + jloc; // position in the transpose
+ outvect[kt] = blockmat[jloc * tilesize + iloc];
+#ifdef PERMUT
+ outperm[kt] = blockperm[jloc * tilesize + iloc];
+#endif
+ }
+}
+
+// each virtual processor reorders its data using the scanned histogram
+__kernel void radixsort_reorder(__global float *in_key, __global float *out_key,
+ __global int *d_Histograms, const int pass,
+ __global int *indices_in, __global int *indices_out,
+ __local int *loc_histo, const int n)
+{
+
+ int it = get_local_id(0);
+ int ig = get_global_id(0);
+
+ int gr = get_group_id(0);
+ int groups = get_num_groups(0);
+ int items = get_local_size(0);
+
+ int start = ig * (n / groups / items);
+ int size = n / groups / items;
+
+ // take the histogram in the cache
+ for (int ir = 0; ir < _RADIX; ir++)
+ {
+ loc_histo[ir * items + it] = d_Histograms[items * (ir * groups + gr) + it];
+ }
+ barrier(CLK_LOCAL_MEM_FENCE);
+
+ int newpos, shortkey, k, newpost;
+ unsigned int key;
+
+ for (int j = 0; j < size; j++)
+ {
+#ifdef TRANSPOSE
+ k = groups * items * j + ig;
+#else
+ k = j + start;
+#endif
+ float org_value = in_key[k];
+ key = *(__global unsigned int *)(in_key + k);
+ shortkey = ((key >> (pass * _BITS)) & (_RADIX - 1));
+
+ newpos = loc_histo[shortkey * items + it];
+
+#ifdef TRANSPOSE
+ int ignew, jnew;
+ ignew = newpos / (n / groups / items);
+ jnew = newpos % (n / groups / items);
+ newpost = jnew * (groups * items) + ignew;
+#else
+ newpost = newpos;
+#endif
+
+ // d_outKeys[newpost]= key; // killing line !!!
+ out_key[newpost] = org_value;
+
+#ifdef PERMUT
+ indices_out[newpost] = indices_in[k];
+#endif
+
+ newpos++;
+ loc_histo[shortkey * items + it] = newpos;
+ }
+}
+
+// perform a parallel prefix sum (a scan) on the local histograms
+// (see Blelloch 1990) each workitem worries about two memories
+// see also http://http.developer.nvidia.com/GPUGems3/gpugems3_ch39.html
+__kernel void radixsort_scanhistograms(__global int *histo, __local int *temp,
+ __global int *globsum)
+{
+ int it = get_local_id(0);
+ int ig = get_global_id(0);
+ int decale = 1;
+ int n = get_local_size(0) * 2;
+ int gr = get_group_id(0);
+
+ // load input into local memory
+ // up sweep phase
+ temp[2 * it] = histo[2 * ig];
+ temp[2 * it + 1] = histo[2 * ig + 1];
+
+ // parallel prefix sum (algorithm of Blelloch 1990)
+ for (int d = n >> 1; d > 0; d >>= 1)
+ {
+ barrier(CLK_LOCAL_MEM_FENCE);
+ if (it < d)
+ {
+ int ai = decale * (2 * it + 1) - 1;
+ int bi = decale * (2 * it + 2) - 1;
+ temp[bi] += temp[ai];
+ }
+ decale *= 2;
+ }
+
+ // store the last element in the global sum vector
+ // (maybe used in the next step for constructing the global scan)
+ // clear the last element
+ if (it == 0)
+ {
+ globsum[gr] = temp[n - 1];
+ temp[n - 1] = 0;
+ }
+
+ // down sweep phase
+ for (int d = 1; d < n; d *= 2)
+ {
+ decale >>= 1;
+ barrier(CLK_LOCAL_MEM_FENCE);
+
+ if (it < d)
+ {
+ int ai = decale * (2 * it + 1) - 1;
+ int bi = decale * (2 * it + 2) - 1;
+
+ int t = temp[ai];
+ temp[ai] = temp[bi];
+ temp[bi] += t;
+ }
+ }
+ barrier(CLK_LOCAL_MEM_FENCE);
+
+ // write results to device memory
+
+ histo[2 * ig] = temp[2 * it];
+ histo[2 * ig + 1] = temp[2 * it + 1];
+
+ barrier(CLK_GLOBAL_MEM_FENCE);
+}
+
+// use the global sum for updating the local histograms
+// each work item updates two values
+__kernel void radixsort_pastehistograms(__global int *histo, __global int *globsum)
+{
+ int ig = get_global_id(0);
+ int gr = get_group_id(0);
+
+ int s;
+
+ s = globsum[gr];
+
+ // write results to device memory
+ histo[2 * ig] += s;
+ histo[2 * ig + 1] += s;
+
+ barrier(CLK_GLOBAL_MEM_FENCE);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLArgOperationKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLArgOperationKernel.cpp
new file mode 100644
index 000000000..7f4b5b0df
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLArgOperationKernel.cpp
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLArgOperationKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+const TensorShape inferOutputShape(const TensorShape &input_shape, const uint32_t axis)
+{
+ TensorShape out_shape{input_shape};
+
+ out_shape.set(axis, 1);
+
+ return out_shape;
+}
+} // namespace
+
+namespace
+{
+Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, const uint32_t axis,
+ ArgOperation /*op*/)
+{
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_NOT_IN(input, DataType::S32, DataType::F32, DataType::U8,
+ DataType::QASYMM8);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_NOT_IN(output, DataType::S32);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG((input->tensor_shape().num_dimensions() - 1) !=
+ output->tensor_shape().num_dimensions(),
+ "Input's rank is not same with output");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->tensor_shape().total_size() == 0,
+ "Inputs are not broadcast compatible");
+
+ const TensorShape output_shape = inferOutputShape(input->tensor_shape(), axis);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output_shape.total_size() != output->tensor_shape().total_size(),
+ "output shape's size does not match axis");
+
+ const auto num_dimensions = input->tensor_shape().num_dimensions();
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(axis >= num_dimensions, "axis must be less than (input's rank).");
+ return Status{};
+}
+
+} // namespace
+
+CLArgOperationKernel::CLArgOperationKernel() : _input(nullptr), _output(nullptr), _axis() {}
+
+void CLArgOperationKernel::configure(const ICLTensor *input, ICLTensor *output, const uint32_t axis,
+ ArgOperation op)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), axis, op));
+
+ _input = input;
+ _output = output;
+ _axis = axis;
+
+ std::unique_ptr<ITensorInfo> output_info = output->info()->clone();
+ output_info->set_tensor_shape(inferOutputShape(input->info()->tensor_shape(), axis));
+
+ // Construct kernel and set op_code based on type of ArgOperation as specified by object op
+ std::string kernel_name = "arg_op";
+ int op_code = 0;
+ if (op == ArgOperation::MAX)
+ {
+ op_code = 1;
+ }
+ else if (op == ArgOperation::MIN)
+ {
+ op_code = 2;
+ }
+ else
+ throw std::runtime_error("Operation not supported, yet");
+
+ // Set kernel build options
+ std::set<std::string> build_opts;
+ build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output_info->dimension(2)));
+ build_opts.emplace("-DOP_CODE=" + support::cpp11::to_string(op_code));
+
+ // Create kernel
+ _kernel =
+ static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
+
+ // Configure kernel window
+ Window win = calculate_max_window(*output_info, Steps());
+
+ Coordinates coord;
+ coord.set_num_dimensions(output_info->num_dimensions());
+ output->info()->set_valid_region(ValidRegion(coord, output_info->tensor_shape()));
+
+ ICLKernel::configure_internal(win);
+}
+
+Status CLArgOperationKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
+ const uint32_t axis, ArgOperation op)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, axis, op));
+
+ return Status{};
+}
+
+void CLArgOperationKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ const TensorShape &shape_in = _input->info()->tensor_shape();
+
+ unsigned int idx = 2 * num_arguments_per_4D_tensor(); // Skip the input and output parameters
+
+ _kernel.setArg<cl_int>(idx++, _axis);
+ _kernel.setArg<cl_int>(idx++, shape_in[_axis]);
+
+ Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
+
+ // Setup input slice
+ Window slice_in(slice_out);
+ slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
+ slice_in.set(3, Window::Dimension(0, 0, 0));
+
+ // Copy output's shape in order to use for recovering at end of this method
+ const TensorShape shape_out = _output->info()->tensor_shape();
+ _output->info()->set_tensor_shape(inferOutputShape(shape_in, _axis));
+
+ do
+ {
+ unsigned int idx = 0;
+ add_4D_tensor_argument(idx, _input, slice_in);
+ add_4D_tensor_argument(idx, _output, slice_out);
+ enqueue(queue, *this, slice_out);
+ } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
+
+ // Recover output's shape of output tensor
+ _output->info()->set_tensor_shape(shape_out);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp
new file mode 100644
index 000000000..c14e73634
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLBinaryLogicalOpKernel.cpp
@@ -0,0 +1,172 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLBinaryLogicalOpKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+constexpr unsigned int num_elems_processed_per_iteration = 16;
+
+Status validate_parameters(const ITensorInfo *input1, const ITensorInfo *input2,
+ const ITensorInfo *output)
+{
+ const TensorShape &out_shape =
+ TensorShape::broadcast_shape(input1->tensor_shape(), input2->tensor_shape());
+
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input1, 1, DataType::U8, DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input2, 1, DataType::U8, DataType::QASYMM8);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
+ "Inputs are not broadcast compatible");
+ // Validate in case of configured output
+ if (output->total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8,
+ DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
+ "Wrong shape for output");
+ }
+ return Status{};
+}
+} // namespace
+
+CLBinaryLogicalOpKernel::CLBinaryLogicalOpKernel()
+ : _input1(nullptr), _input2(nullptr), _output(nullptr)
+{
+}
+
+void CLBinaryLogicalOpKernel::configure(const ICLTensor *input1, const ICLTensor *input2,
+ ICLTensor *output, BinaryLogicalOperation op)
+{
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, input2);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input1, output);
+ ARM_COMPUTE_ERROR_THROW_ON(validate_parameters(input1->info(), input2->info(), output->info()));
+
+ _input1 = input1;
+ _input2 = input2;
+ _output = output;
+
+ // Create kernel
+ std::string kernel_name = "binary_logical_op";
+ std::set<std::string> build_opts;
+ build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input1->info()->data_type())));
+
+ int op_code = 0;
+ switch (op)
+ {
+ case BinaryLogicalOperation::AND:
+ op_code = 1;
+ break;
+ case BinaryLogicalOperation::OR:
+ op_code = 2;
+ break;
+ default:
+ throw std::runtime_error("Operation not supported, yet");
+ }
+
+ build_opts.emplace(("-DOP_CODE=" + support::cpp11::to_string(op_code)));
+ build_opts.emplace(
+ ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
+
+ _kernel =
+ static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
+
+ const std::pair<TensorShape, ValidRegion> broadcast_pair =
+ ITensorInfo::broadcast_shape_and_valid_region(*input1->info(), *input2->info());
+
+ const ValidRegion &valid_region = broadcast_pair.second;
+
+ Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
+ Window win_input1 = win.broadcast_if_dimension_le_one(*input1->info());
+ Window win_input2 = win.broadcast_if_dimension_le_one(*input2->info());
+
+ AccessWindowHorizontal input1_access(input1->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal input2_access(input2->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
+
+ update_window_and_padding(win_input1, input1_access) ||
+ update_window_and_padding(win_input2, input2_access) ||
+ update_window_and_padding(win, output_access);
+
+ output_access.set_valid_region(win, valid_region);
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLBinaryLogicalOpKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ const TensorShape &in_shape1 = _input1->info()->tensor_shape();
+ const TensorShape &in_shape2 = _input2->info()->tensor_shape();
+ const TensorShape &out_shape = _output->info()->tensor_shape();
+
+ bool can_collapse = true;
+ if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
+ {
+ can_collapse =
+ (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
+ for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
+ {
+ can_collapse = (in_shape1[d] == in_shape2[d]);
+ }
+ }
+
+ bool has_collapsed = false;
+ Window collapsed =
+ can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
+ : window;
+
+ const TensorShape &in_shape1_collapsed =
+ has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
+ const TensorShape &in_shape2_collapsed =
+ has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
+
+ Window slice = collapsed.first_slice_window_3D();
+ Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
+ Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
+
+ do
+ {
+ unsigned int idx = 0;
+ add_3D_tensor_argument(idx, _input1, slice_input1);
+ add_3D_tensor_argument(idx, _input2, slice_input2);
+ add_3D_tensor_argument(idx, _output, slice);
+
+ enqueue(queue, *this, slice);
+
+ collapsed.slide_window_slice_3D(slice_input1);
+ collapsed.slide_window_slice_3D(slice_input2);
+ } while (collapsed.slide_window_slice_3D(slice));
+}
+
+BorderSize CLBinaryLogicalOpKernel::border_size() const
+{
+ const unsigned int replicateSize =
+ _output->info()->dimension(0) -
+ std::min(_input1->info()->dimension(0), _input2->info()->dimension(0));
+ const unsigned int border =
+ std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
+ return BorderSize(0, border, 0, 0);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp
new file mode 100644
index 000000000..ac2963f38
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLCastKernel.cpp
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLCastKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+CLCastKernel::CLCastKernel() : _input(nullptr), _output(nullptr) {}
+
+void CLCastKernel::configure(const ICLTensor *input, ICLTensor *output)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::S32, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::S32, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_SHAPES(input, output);
+
+ _input = input;
+ _output = output;
+
+ constexpr unsigned int num_elems_processed_per_iteration = 16;
+
+ // Set kernel build options
+ std::set<std::string> build_opts;
+ build_opts.emplace("-DDATA_TYPE_IN=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.emplace("-DDATA_TYPE_OUT=" + get_cl_type_from_data_type(output->info()->data_type()));
+ build_opts.emplace(
+ ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
+
+ // Create kernel
+ if (is_data_type_quantized_asymmetric(input->info()->data_type()))
+ {
+ const float scale_in = input->info()->quantization_info().scale;
+ const int offset_in = input->info()->quantization_info().offset;
+ build_opts.emplace("-DSCALE=" + float_to_string_with_full_precision(scale_in));
+ build_opts.emplace("-DOFFSET=" + support::cpp11::to_string(offset_in));
+
+ _kernel = static_cast<cl::Kernel>(
+ CLKernelLibraryEx::get().create_kernel("cast_qasymm_in", build_opts));
+ }
+ else if (is_data_type_quantized_asymmetric(output->info()->data_type()))
+ {
+ const float scale_in = output->info()->quantization_info().scale;
+ const int offset_in = output->info()->quantization_info().offset;
+ build_opts.emplace("-DSCALE=" + float_to_string_with_full_precision(scale_in));
+ build_opts.emplace("-DOFFSET=" + support::cpp11::to_string(offset_in));
+
+ _kernel = static_cast<cl::Kernel>(
+ CLKernelLibraryEx::get().create_kernel("cast_qasymm_out", build_opts));
+ }
+ else
+ {
+ _kernel = static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("cast", build_opts));
+ }
+
+ // Configure kernel window
+ Window win = calculate_max_window(*input->info(), Steps(num_elems_processed_per_iteration));
+ AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
+ update_window_and_padding(win, input_access, output_access);
+ output_access.set_valid_region(win, input->info()->valid_region());
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLCastKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
+ Window slice = collapsed.first_slice_window_3D();
+
+ do
+ {
+ unsigned int idx = 0;
+ add_3D_tensor_argument(idx, _input, slice);
+ add_3D_tensor_argument(idx, _output, slice);
+ enqueue(queue, *this, slice, lws_hint());
+ } while (collapsed.slide_window_slice_3D(slice));
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp
new file mode 100644
index 000000000..2a3433c2b
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLDepthToSpaceKernel.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLDepthToSpaceKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+// TODO Use this validation function
+#if 0
+Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
+ const int32_t block_size)
+{
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::S32, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::S32, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size < 1,
+ "Block size should be greater than or equal to 1.");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(0) != input->dimension(0) * block_size,
+ "Output width should be equal to (Input width * block size)");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(1) != input->dimension(1) * block_size,
+ "Output height should be equal to (Input height * block size)");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(2) % (block_size * block_size) != 0,
+ "Input depth should be divisible by (block size * block size)");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ output->dimension(2) != input->dimension(2) / (block_size * block_size),
+ "Output depth should be equal to (Input depth / (block size * block size))");
+
+ return Status{};
+}
+#endif
+} // namespace
+
+CLDepthToSpaceKernel::CLDepthToSpaceKernel() : _input(nullptr), _output(nullptr)
+{
+ // DO NOTHING
+}
+
+void CLDepthToSpaceKernel::configure(const ICLTensor *input, ICLTensor *output,
+ const int32_t block_size)
+{
+ // TODO Add validation of data_layout
+ _input = input;
+ _output = output;
+
+ // Set kernel build options
+ auto layout_out = output->info()->data_layout();
+ std::set<std::string> build_opts;
+ build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.emplace("-DBLOCK_SIZE=" + support::cpp11::to_string(block_size));
+ auto index_depth = get_data_layout_dimension_index(layout_out, DataLayoutDimension::CHANNEL);
+ auto depth = output->info()->dimension(index_depth);
+ build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(depth));
+ build_opts.emplace("-DZ_OUT=" + support::cpp11::to_string(output->info()->tensor_shape().z()));
+
+ // Create kernel
+ _kernel = static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(
+ "depth_to_space_" + lower_string(string_from_data_layout(layout_out)), build_opts));
+
+ // Configure kernel window
+ Window win = calculate_max_window(*output->info(), Steps());
+
+ Coordinates coord;
+ coord.set_num_dimensions(output->info()->num_dimensions());
+ output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLDepthToSpaceKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
+
+ Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
+
+ // Setup input slice
+ Window slice_in(slice_out);
+ slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
+ slice_in.set(3, Window::Dimension(0, 0, 0));
+
+ do
+ {
+ unsigned int idx = 0;
+ add_4D_tensor_argument(idx, _input, slice_in);
+ add_4D_tensor_argument(idx, _output, slice_out);
+ enqueue(queue, *this, slice_out);
+ } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
+}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLEmbeddingLookupKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLEmbeddingLookupKernel.cpp
index 0862b78bf..0862b78bf 100644
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLEmbeddingLookupKernel.cpp
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLEmbeddingLookupKernel.cpp
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLGatherExKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLGatherExKernel.cpp
new file mode 100644
index 000000000..c83ece0e9
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLGatherExKernel.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLGatherExKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+#include "arm_compute/core/UtilsEx.h"
+
+using namespace arm_compute;
+
+namespace
+{
+
+inline TensorShape compute_gather_shape(const TensorShape &input_shape,
+ const TensorShape &indices_shape, uint32_t actual_axis)
+{
+ ARM_COMPUTE_ERROR_ON(indices_shape.num_dimensions() > 3);
+ ARM_COMPUTE_ERROR_ON(input_shape.num_dimensions() > 4);
+ ARM_COMPUTE_ERROR_ON(input_shape.num_dimensions() + indices_shape.num_dimensions() - 1 > 4);
+ ARM_COMPUTE_ERROR_ON(actual_axis >= input_shape.num_dimensions());
+
+ TensorShape output_shape = input_shape;
+ if (indices_shape.num_dimensions() == 1)
+ {
+ output_shape[actual_axis] = indices_shape[0];
+ }
+ else if (indices_shape.num_dimensions() > 1)
+ {
+ output_shape.shift_right(indices_shape.num_dimensions() - 1);
+
+ for (uint32_t i = 0, o = 0; o < output_shape.num_dimensions(); ++o, ++i)
+ {
+ if (o == actual_axis)
+ {
+ ++i;
+ for (uint32_t in = 0; in < indices_shape.num_dimensions(); ++in, ++o)
+ {
+ output_shape[o] = indices_shape[in];
+ }
+ }
+ else
+ {
+ output_shape[o] = input_shape[i];
+ }
+ }
+ }
+ return output_shape;
+}
+
+/** Wrap-around a number within the range 0 <= x < m
+ *
+ * @param[in] x Input value
+ * @param[in] m Range
+ *
+ * @return the wrapped-around number
+ */
+template <typename T> inline T wrap_around(T x, T m) { return x >= 0 ? x % m : (x % m + m) % m; }
+
+inline Status validate_arguments(const ITensorInfo *input, const ITensorInfo *indices,
+ const ITensorInfo *output, int axis)
+{
+ const uint32_t actual_axis = wrap_around(axis, static_cast<int>(input->num_dimensions()));
+ ARM_COMPUTE_RETURN_ERROR_ON(indices->num_dimensions() > 3);
+ ARM_COMPUTE_RETURN_ERROR_ON(input->num_dimensions() > 4);
+ ARM_COMPUTE_ERROR_ON(input->num_dimensions() + indices->num_dimensions() - 1 > 4);
+ ARM_COMPUTE_RETURN_ERROR_ON(actual_axis >= input->num_dimensions());
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(output);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(
+ input, 1, DataType::U8, DataType::S8, DataType::QASYMM8, DataType::U16, DataType::S16,
+ DataType::U32, DataType::S32, DataType::F16, DataType::F32);
+
+ if (output->total_size() != 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_QUANTIZATION_INFO(input, output);
+ TensorShape output_shape =
+ compute_gather_shape(input->tensor_shape(), indices->tensor_shape(), actual_axis);
+ ARM_COMPUTE_RETURN_ERROR_ON(output_shape.total_size() != output->tensor_shape().total_size());
+ }
+
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(indices, 1, DataType::U32, DataType::S32);
+
+ return Status{};
+}
+
+std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input, ITensorInfo *indices,
+ ITensorInfo *output, int axis)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output, indices);
+ const uint32_t actual_axis = wrap_around(axis, static_cast<int>(input->num_dimensions()));
+ std::unique_ptr<ITensorInfo> output_info = input->clone();
+ output_info->set_tensor_shape(
+ compute_gather_shape(input->tensor_shape(), indices->tensor_shape(), actual_axis));
+ // Output auto initialization if not yet initialized
+ auto_init_if_empty((*output), output_info->tensor_shape(), 1, input->data_type());
+
+ // Create window
+ Window win = calculate_max_window(*output, Steps());
+ output->set_valid_region(ValidRegion(Coordinates(), output->tensor_shape()));
+
+ return std::make_pair(Status{}, win);
+}
+
+} // namespace
+
+CLGatherExKernel::CLGatherExKernel()
+ : _input(nullptr), _indices(nullptr), _output(nullptr), _axis(0)
+{
+}
+
+void CLGatherExKernel::configure(const ICLTensor *input, const ICLTensor *indices,
+ ICLTensor *output, int axis)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output, indices);
+ ARM_COMPUTE_ERROR_THROW_ON(
+ validate_arguments(input->info(), indices->info(), output->info(), axis));
+
+ // Configure kernel window
+ auto win_config =
+ validate_and_configure_window(input->info(), indices->info(), output->info(), axis);
+ ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
+
+ _input = input;
+ _output = output;
+ _indices = indices;
+ _axis = wrap_around(axis, static_cast<int>(input->info()->num_dimensions()));
+
+ // Set build options
+ CLBuildOptions build_opts;
+ build_opts.add_option("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.add_option("-DOUTPUT_DIM_Z=" +
+ support::cpp11::to_string(output->info()->dimension(2)));
+ build_opts.add_option("-DINPUT_DIM_Z=" + support::cpp11::to_string(input->info()->dimension(2)));
+ build_opts.add_option("-DAXIS=" + support::cpp11::to_string(_axis));
+ build_opts.add_option("-DINDICES_DIM=" +
+ support::cpp11::to_string(indices->info()->num_dimensions()));
+
+ // Create kernel
+ _kernel = static_cast<cl::Kernel>(
+ CLKernelLibraryEx::get().create_kernel("gather_ex", build_opts.options()));
+ ICLKernel::configure_internal(win_config.second);
+}
+
+Status CLGatherExKernel::validate(const ITensorInfo *input, const ITensorInfo *indices,
+ const ITensorInfo *output, int axis)
+{
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, indices, output, axis));
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_and_configure_window(input->clone().get(),
+ indices->clone().get(),
+ output->clone().get(), axis)
+ .first);
+ return Status{};
+}
+
+void CLGatherExKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
+
+ Window window_collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ, 4);
+ unsigned int idx = 0;
+ add_4D_tensor_argument(idx, _input, window_collapsed);
+ add_3D_tensor_argument(idx, _indices, window_collapsed);
+ add_4D_tensor_argument(idx, _output, window_collapsed);
+ enqueue(queue, *this, window_collapsed, lws_hint());
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp
new file mode 100644
index 000000000..31e98c9a8
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLHashtableLookupKernel.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLHashtableLookupKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+constexpr unsigned int num_elems_processed_per_iteration = 16;
+
+std::pair<Status, Window> validate_and_configure_window(ITensorInfo *input, ITensorInfo *output)
+{
+ Window win = calculate_max_window(*output, Steps(num_elems_processed_per_iteration));
+ AccessWindowHorizontal input_access(input, 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal output_access(output, 0, num_elems_processed_per_iteration);
+
+ bool window_changed = update_window_and_padding(win, input_access, output_access);
+ input_access.set_valid_region(win, output->valid_region());
+
+ Status err = (window_changed)
+ ? ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!")
+ : Status{};
+ return std::make_pair(err, win);
+}
+} // namespace
+
+CLHashtableLookupKernel::CLHashtableLookupKernel()
+{
+ // DO NOTHING
+}
+
+Status CLHashtableLookupKernel::validate(const ITensorInfo *lookups, const ITensorInfo *keys,
+ const ITensorInfo *input, const ITensorInfo *output,
+ const ITensorInfo *hits)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(lookups, keys, input, output, hits);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(
+ input, 1, DataType::U8, DataType::S8, DataType::QASYMM8, DataType::U16, DataType::S16,
+ DataType::U32, DataType::S32, DataType::F16, DataType::F32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(lookups, 1, DataType::S32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(keys, 1, DataType::S32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(hits, 1, DataType::U8, DataType::QASYMM8);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->tensor_shape().total_size() == 0,
+ "Output's shape was not set");
+
+ ARM_COMPUTE_ERROR_ON(lookups->dimension(0) != hits->dimension(0) ||
+ output->dimension(output->num_dimensions() - 1) != lookups->dimension(0));
+ ARM_COMPUTE_ERROR_ON(input->num_dimensions() < 2 && input->num_dimensions() > 4);
+ ARM_COMPUTE_ERROR_ON(lookups->num_dimensions() > 1);
+ ARM_COMPUTE_ERROR_ON(keys->num_dimensions() > 1);
+ ARM_COMPUTE_ERROR_ON(hits->num_dimensions() > 1);
+
+ return Status{};
+}
+
+void CLHashtableLookupKernel::configure(const ICLTensor *lookups, const ICLTensor *keys,
+ const ICLTensor *input, ICLTensor *output, ICLTensor *hits)
+{
+ ARM_COMPUTE_ERROR_THROW_ON(
+ validate(lookups->info(), keys->info(), input->info(), output->info(), hits->info()));
+
+ _lookups = lookups;
+ _keys = keys;
+ _input = input;
+ _output = output;
+ _hits = hits;
+
+ // Make _lookup_indices tensor
+ _lookup_indices = arm_compute::support::cpp14::make_unique<CLTensor>();
+ _lookup_indices->allocator()->init(
+ TensorInfo(lookups->info()->tensor_shape(), lookups->info()->num_channels(), DataType::S32));
+ _lookup_indices->allocator()->allocate();
+
+ // Set kernel build options
+ std::stringstream kernel_name;
+ std::set<std::string> build_opts;
+ kernel_name << "hashtable_lookup";
+
+ build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
+ build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.emplace("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration));
+ build_opts.emplace("-DNUM_DIMS=" + support::cpp11::to_string(_input->info()->num_dimensions()));
+
+ // Create kernel
+ _kernel = static_cast<cl::Kernel>(
+ CLKernelLibraryEx::get().create_kernel(kernel_name.str(), build_opts));
+
+ // Configure kernel window
+ auto win_config = validate_and_configure_window(input->info(), output->info());
+ ARM_COMPUTE_ERROR_THROW_ON(win_config.first);
+ ICLKernel::configure_internal(win_config.second);
+}
+
+void CLHashtableLookupKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
+
+ const_cast<ICLTensor *>(_lookups)->map(queue);
+ const_cast<ICLTensor *>(_keys)->map(queue);
+ _hits->map(queue);
+ _lookup_indices->map(queue);
+
+ // Set values of hits
+ const int32_t *lookups_buf =
+ reinterpret_cast<int32_t *>(const_cast<ICLTensor *>(_lookups)->buffer());
+ const int32_t *keys_buf = reinterpret_cast<int32_t *>(const_cast<ICLTensor *>(_keys)->buffer());
+ uint8_t *hits_buf = reinterpret_cast<uint8_t *>(_hits->buffer());
+ int32_t *lookup_indices_buf = reinterpret_cast<int32_t *>(_lookup_indices->buffer());
+
+ std::map<int32_t, size_t> key_map;
+ const size_t keys_num = _keys->info()->dimension(0);
+ for (size_t key_index = 0; key_index < keys_num; key_index++)
+ {
+ key_map[keys_buf[key_index]] = key_index;
+ }
+
+ const size_t lookups_num = _lookups->info()->dimension(0);
+ for (size_t i = 0; i < lookups_num; ++i)
+ {
+ const auto lookup_value = lookups_buf[i];
+ const auto it = key_map.find(lookup_value);
+ if (it != key_map.end())
+ {
+#if defined(ARM_COMPUTE_DEBUG_ENABLED)
+ if (it->second >= lookups_num)
+ ARM_COMPUTE_ERROR("HashTable Lookup: index out of bounds.");
+#endif // defined(ARM_COMPUTE_DEBUG_ENABLED)
+ lookup_indices_buf[i] = static_cast<int32_t>(it->second);
+ hits_buf[i] = static_cast<uint8_t>(1);
+ }
+ else
+ {
+ lookup_indices_buf[i] = -1;
+ hits_buf[i] = static_cast<uint8_t>(0);
+ }
+ }
+
+ const_cast<ICLTensor *>(_lookups)->unmap(queue);
+ const_cast<ICLTensor *>(_keys)->unmap(queue);
+ _hits->unmap(queue);
+ _lookup_indices->unmap(queue);
+
+ Window win = window.collapse(ICLKernel::window(), 2, 4);
+
+ Window win_lookup;
+ win_lookup.set(Window::DimX, Window::Dimension(0, 0, 0));
+
+ do
+ {
+ unsigned int idx = 0;
+ add_4D_tensor_argument(idx, _input, win);
+ add_4D_tensor_argument(idx, _output, win);
+ add_1D_tensor_argument(idx, _lookup_indices.get(), win_lookup);
+
+ enqueue(queue, *this, win);
+ } while (window.slide_window_slice_4D(win) && window.slide_window_slice_1D(win_lookup));
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp
new file mode 100644
index 000000000..ecfe05a51
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLNegKernel.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLNegKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output)
+{
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::S16, DataType::S32,
+ DataType::F16, DataType::F32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::S16, DataType::S32,
+ DataType::F16, DataType::F32);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DIMENSIONS(input->tensor_shape(), output->tensor_shape());
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+
+ return Status{};
+}
+
+} // namespace
+
+CLNegKernel::CLNegKernel() : _input(nullptr), _output(nullptr) {}
+
+void CLNegKernel::configure(const ICLTensor *input, ICLTensor *output)
+{
+
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info()));
+
+ _input = input;
+ _output = output;
+
+ constexpr unsigned int num_elems_processed_per_iteration = 16;
+
+ // Create kernel
+ std::set<std::string> build_opts;
+ build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
+ build_opts.emplace(
+ ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
+ _kernel =
+ static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel("neg_tensor", build_opts));
+
+ // Configure window
+ Window win = calculate_max_window(*input->info(), Steps(num_elems_processed_per_iteration));
+
+ AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
+ update_window_and_padding(win, input_access, output_access);
+ output_access.set_valid_region(win, input->info()->valid_region());
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLNegKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
+ Window slice = collapsed.first_slice_window_3D();
+
+ do
+ {
+ unsigned int idx = 0;
+ add_3D_tensor_argument(idx, _input, slice);
+ add_3D_tensor_argument(idx, _output, slice);
+ enqueue(queue, *this, slice, lws_hint());
+ } while (collapsed.slide_window_slice_3D(slice));
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp
new file mode 100644
index 000000000..e7d587029
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLPReLUKernel.cpp
@@ -0,0 +1,186 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLPReLUKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+constexpr unsigned int num_elems_processed_per_iteration = 16;
+
+Status validate_info(const ITensorInfo *input, const ITensorInfo *alpha, const ITensorInfo *output)
+{
+ const TensorShape &out_shape =
+ TensorShape::broadcast_shape(input->tensor_shape(), alpha->tensor_shape());
+
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F16, DataType::F32,
+ DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(alpha, 1, DataType::F16, DataType::F32,
+ DataType::QASYMM8);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
+ "Inputs are not broadcast compatible");
+ // Validate in case of configured output
+ if (output->total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::F16, DataType::F32,
+ DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ detail::have_different_dimensions(out_shape, output->tensor_shape(), 0),
+ "Wrong shape for output");
+ }
+ return Status{};
+}
+} // namespace
+
+CLPReLUKernel::CLPReLUKernel() : _input(nullptr), _alpha(nullptr), _output(nullptr) {}
+
+void CLPReLUKernel::configure(const ICLTensor *input, const ICLTensor *alpha, ICLTensor *output)
+{
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, alpha);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+ ARM_COMPUTE_ERROR_THROW_ON(validate_info(input->info(), alpha->info(), output->info()));
+
+ _input = input;
+ _alpha = alpha;
+ _output = output;
+
+ // Create kernel
+ std::string kernel_name = "prelu";
+ std::set<std::string> build_opts;
+ build_opts.emplace(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
+ build_opts.emplace(
+ ("-DVEC_SIZE=" + support::cpp11::to_string(num_elems_processed_per_iteration)));
+
+ if (is_data_type_quantized_asymmetric(input->info()->data_type()))
+ {
+ build_opts.emplace("-DOFF_IN=" +
+ support::cpp11::to_string(input->info()->quantization_info().offset));
+ build_opts.emplace("-DOFF_ALPHA=" +
+ support::cpp11::to_string(alpha->info()->quantization_info().offset));
+ build_opts.emplace("-DOFF_OUT=" +
+ support::cpp11::to_string(output->info()->quantization_info().offset));
+ build_opts.emplace("-DSCALE_IN=" +
+ support::cpp11::to_string(input->info()->quantization_info().scale));
+ build_opts.emplace("-DSCALE_ALPHA=" +
+ support::cpp11::to_string(alpha->info()->quantization_info().scale));
+ build_opts.emplace("-DSCALE_OUT=" +
+ support::cpp11::to_string(output->info()->quantization_info().scale));
+ kernel_name += "_qasymm8";
+ }
+ _kernel =
+ static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
+
+ const std::pair<TensorShape, ValidRegion> broadcast_pair =
+ ITensorInfo::broadcast_shape_and_valid_region(*input->info(), *alpha->info());
+
+ const TensorShape &out_shape = broadcast_pair.first;
+ const ValidRegion &valid_region = broadcast_pair.second;
+
+ // Auto initialize output if not initialized
+ {
+ set_shape_if_empty(*output->info(), out_shape);
+
+ if (input->info()->data_type() == DataType::F16 && alpha->info()->data_type() == DataType::F16)
+ {
+ set_format_if_unknown(*output->info(), Format::F16);
+ }
+ else if (input->info()->data_type() == DataType::F32 ||
+ alpha->info()->data_type() == DataType::F32)
+ {
+ set_format_if_unknown(*output->info(), Format::F32);
+ }
+ }
+
+ Window win = calculate_max_window(valid_region, Steps(num_elems_processed_per_iteration));
+ Window win_input1 = win.broadcast_if_dimension_le_one(*input->info());
+ Window win_input2 = win.broadcast_if_dimension_le_one(*alpha->info());
+
+ AccessWindowHorizontal input1_access(input->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal input2_access(alpha->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
+
+ update_window_and_padding(win_input1, input1_access) ||
+ update_window_and_padding(win_input2, input2_access) ||
+ update_window_and_padding(win, output_access);
+
+ output_access.set_valid_region(win, valid_region);
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLPReLUKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ const TensorShape &in_shape1 = _input->info()->tensor_shape();
+ const TensorShape &in_shape2 = _alpha->info()->tensor_shape();
+ const TensorShape &out_shape = _output->info()->tensor_shape();
+
+ bool can_collapse = true;
+ if (std::min(in_shape1.total_size(), in_shape2.total_size()) > 1)
+ {
+ can_collapse =
+ (std::min(in_shape1.num_dimensions(), in_shape2.num_dimensions()) > Window::DimZ);
+ for (size_t d = Window::DimZ; can_collapse && (d < out_shape.num_dimensions()); d++)
+ {
+ can_collapse = (in_shape1[d] == in_shape2[d]);
+ }
+ }
+
+ bool has_collapsed = false;
+ Window collapsed =
+ can_collapse ? window.collapse_if_possible(ICLKernel::window(), Window::DimZ, &has_collapsed)
+ : window;
+
+ const TensorShape &in_shape1_collapsed =
+ has_collapsed ? in_shape1.collapsed_from(Window::DimZ) : in_shape1;
+ const TensorShape &in_shape2_collapsed =
+ has_collapsed ? in_shape2.collapsed_from(Window::DimZ) : in_shape2;
+
+ Window slice = collapsed.first_slice_window_3D();
+ Window slice_input1 = slice.broadcast_if_dimension_le_one(in_shape1_collapsed);
+ Window slice_input2 = slice.broadcast_if_dimension_le_one(in_shape2_collapsed);
+
+ do
+ {
+ unsigned int idx = 0;
+ add_3D_tensor_argument(idx, _input, slice_input1);
+ add_3D_tensor_argument(idx, _alpha, slice_input2);
+ add_3D_tensor_argument(idx, _output, slice);
+
+ enqueue(queue, *this, slice);
+
+ collapsed.slide_window_slice_3D(slice_input1);
+ collapsed.slide_window_slice_3D(slice_input2);
+ } while (collapsed.slide_window_slice_3D(slice));
+}
+
+BorderSize CLPReLUKernel::border_size() const
+{
+ const unsigned int replicateSize =
+ _output->info()->dimension(0) -
+ std::min(_input->info()->dimension(0), _alpha->info()->dimension(0));
+ const unsigned int border =
+ std::min<unsigned int>(num_elems_processed_per_iteration - 1U, replicateSize);
+ return BorderSize(0, border, 0, 0);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp
new file mode 100644
index 000000000..24e89db28
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLReduceOperationKernel.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLReduceOperationKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+namespace
+{
+// NOTE This is necessary because it is not guaranteed that the axis positions of input and output
+// are the same.
+const TensorShape inferOutputShape(const TensorShape &input_shape, const uint32_t axis)
+{
+ TensorShape out_shape{input_shape};
+
+ out_shape.set(axis, 1);
+
+ return out_shape;
+}
+} // namespace
+
+namespace
+{
+Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output, const uint32_t axis,
+ ReduceOperation op)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, output);
+
+ if (output->total_size() != 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+ }
+
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QASYMM8, DataType::F16,
+ DataType::F32, DataType::S32);
+ if (op == ReduceOperation::SUM)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->data_type() == DataType::QASYMM8,
+ "Not support QASYMM8, yet");
+ }
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->tensor_shape().total_size() == 0,
+ "Inputs are not broadcast compatible");
+
+ const auto num_dimensions = input->tensor_shape().num_dimensions();
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(axis >= num_dimensions, "axis must be less than (input's rank).");
+
+ const TensorShape output_shape = inferOutputShape(input->tensor_shape(), axis);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output_shape.total_size() != output->tensor_shape().total_size(),
+ "output shape's size does not match axis");
+
+ return Status{};
+}
+} // namespace
+
+CLReduceOperationKernel::CLReduceOperationKernel() : _input(nullptr), _output(nullptr), _axis() {}
+
+void CLReduceOperationKernel::configure(const ICLTensor *input, ICLTensor *output,
+ const uint32_t axis, ReduceOperation op)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), axis, op));
+
+ _input = input;
+ _output = output;
+ _axis = axis;
+
+ std::unique_ptr<ITensorInfo> output_info = output->info()->clone();
+ output_info->set_tensor_shape(inferOutputShape(input->info()->tensor_shape(), axis));
+
+ // Construct kernel name
+ std::string kernel_name;
+ int op_code = 0;
+ if (op == ReduceOperation::MAX)
+ {
+ kernel_name = "reduce_min_max";
+ op_code = 1;
+ }
+ else if (op == ReduceOperation::MIN)
+ {
+ kernel_name = "reduce_min_max";
+ op_code = 2;
+ }
+ else if (op == ReduceOperation::SUM)
+ {
+ kernel_name = "reduce_sum_mean";
+ op_code = 3;
+ }
+ else if (op == ReduceOperation::MEAN)
+ {
+ kernel_name = "reduce_sum_mean";
+ op_code = 4;
+ }
+ else
+ throw std::runtime_error("Operation not supported, yet");
+
+ // Set kernel build options
+ std::set<std::string> build_opts;
+ build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(output_info->data_type()));
+ build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output_info->dimension(2)));
+ build_opts.emplace("-DOP_CODE=" + support::cpp11::to_string(op_code));
+
+ // Create kernel
+ _kernel =
+ static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
+
+ // Configure kernel window
+ Window win = calculate_max_window(*output_info, Steps());
+
+ Coordinates coord;
+ coord.set_num_dimensions(output_info->num_dimensions());
+ output->info()->set_valid_region(ValidRegion(coord, output_info->tensor_shape()));
+
+ ICLKernel::configure_internal(win);
+}
+
+Status CLReduceOperationKernel::validate(const ITensorInfo *input, const ITensorInfo *output,
+ const uint32_t axis, ReduceOperation op)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(input, output, axis, op));
+
+ return Status{};
+}
+
+void CLReduceOperationKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ const TensorShape &shape_in = _input->info()->tensor_shape();
+
+ unsigned int idx = 2 * num_arguments_per_4D_tensor(); // Skip the input and output parameters
+
+ _kernel.setArg<cl_int>(idx++, _axis);
+ _kernel.setArg<cl_int>(idx++, shape_in[_axis]);
+
+ // Support dimensions up to 4
+ Window slice_out = window.collapse(ICLKernel::window(), 2, 4);
+
+ // Setup input slice
+ Window slice_in(slice_out);
+ slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
+ slice_in.set(3, Window::Dimension(0, 0, 0));
+
+ // Copy output's shape in order to use for recovering at end of this method
+ // TODO Remove changing and recovering output's shape if it is guaranteed that the axis positions
+ // of input and output are the same
+ const TensorShape shape_out = _output->info()->tensor_shape();
+ _output->info()->set_tensor_shape(inferOutputShape(shape_in, _axis));
+
+ idx = 0;
+ add_4D_tensor_argument(idx, _input, slice_in);
+ add_4D_tensor_argument(idx, _output, slice_out);
+ enqueue(queue, *this, slice_out, lws_hint());
+
+ // Recover output's shape of output tensor
+ _output->info()->set_tensor_shape(shape_out);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp
new file mode 100644
index 000000000..f7836b6cd
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToBatchNDKernel.cpp
@@ -0,0 +1,241 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLSpaceToBatchNDKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+constexpr unsigned int num_elems_processed_per_iteration = 16;
+
+Status validate_arguments(const ITensorInfo *input, const ITensorInfo *block_size,
+ const ITensorInfo *padding_size, const ITensorInfo *output)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::F16, DataType::S32,
+ DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(block_size, 1, DataType::S32);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(padding_size, 1, DataType::S32);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::F16, DataType::S32,
+ DataType::F32);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->num_dimensions() != output->num_dimensions(),
+ "The number of dimensions of input should be equal to output");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->data_layout() != output->data_layout(),
+ "The input and output layouts are different!");
+
+ // TODO Support other cases
+ if (input->num_dimensions() == 4 && input->data_layout() == DataLayout::NCHW)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(2) != output->dimension(2),
+ "Input Depth should be equal to Output Depth");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size->dimension(0) != 2 ||
+ padding_size->dimension(1) != 2,
+ "Only 2-dimensional spatial block's size was wrong");
+ }
+ else if (input->num_dimensions() == 4 && input->data_layout() == DataLayout::NHWC)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(0) != output->dimension(0),
+ "Input Depth should be equal to Output Depth");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size->dimension(0) != 2 ||
+ padding_size->dimension(1) != 2,
+ "Only 2-dimensional spatial block's size was wrong");
+ }
+ else
+ {
+ ARM_COMPUTE_RETURN_ERROR_MSG("CLSpaceToBatchNDKernel supports only 4-dimensional input");
+ }
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->num_dimensions() < 2 && input->num_dimensions() > 4,
+ "CLSpaceToBatchNDKernel supports dimensions up to 4");
+
+ if (input->data_type() == DataType::QASYMM8)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->quantization_info() != output->quantization_info(),
+ "The input and output quantization info are different!");
+ }
+
+ return Status{};
+}
+
+} // namespace
+
+CLSpaceToBatchNDKernel::CLSpaceToBatchNDKernel()
+{
+ // DO NOTHING
+}
+
+void CLSpaceToBatchNDKernel::configure(const ICLTensor *input, const ICLTensor *block_size,
+ const ICLTensor *padding_size, ICLTensor *output)
+{
+
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_ERROR_THROW_ON(
+ validate_arguments(input->info(), block_size->info(), padding_size->info(), output->info()));
+
+ _input = input;
+ _block_size = block_size;
+ _padding_size = padding_size;
+ _output = output;
+
+ // Set kernel build options
+ // TODO Support other cases
+ std::string kernel_name = "space_to_batch_4d";
+ std::set<std::string> build_opts;
+ Window win;
+
+ if (input->info()->data_layout() == DataLayout::NCHW)
+ {
+ kernel_name += "_nchw";
+ build_opts.emplace("-DDEPTH_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
+ build_opts.emplace("-DHEIGHT_IN=" + support::cpp11::to_string(input->info()->dimension(1)));
+ build_opts.emplace("-DWIDTH_IN=" + support::cpp11::to_string(input->info()->dimension(0)));
+
+ win = calculate_max_window(*output->info(), Steps());
+
+ Coordinates coord;
+ coord.set_num_dimensions(output->info()->num_dimensions());
+ output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
+ }
+ else if (input->info()->data_layout() == DataLayout::NHWC)
+ {
+ kernel_name += "_nhwc";
+ build_opts.emplace("-DHEIGHT_OUT=" + support::cpp11::to_string(output->info()->dimension(2)));
+ build_opts.emplace("-DHEIGHT_IN=" + support::cpp11::to_string(input->info()->dimension(2)));
+ build_opts.emplace("-DWIDTH_IN=" + support::cpp11::to_string(input->info()->dimension(1)));
+ build_opts.emplace("-DVEC_SIZE=" +
+ support::cpp11::to_string(num_elems_processed_per_iteration));
+
+ win = calculate_max_window(*output->info(), Steps(num_elems_processed_per_iteration));
+ AccessWindowHorizontal input_access(input->info(), 0, num_elems_processed_per_iteration);
+ AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
+
+ bool window_changed = update_window_and_padding(win, input_access, output_access);
+ input_access.set_valid_region(win, output->info()->valid_region());
+
+ if (window_changed)
+ {
+ ARM_COMPUTE_CREATE_ERROR(ErrorCode::RUNTIME_ERROR, "Insufficient Padding!");
+ }
+ }
+ else
+ {
+ ARM_COMPUTE_ERROR("Unsupported layout");
+ }
+
+ build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.emplace("-DBATCH_IN=" + support::cpp11::to_string(input->info()->dimension(3)));
+ if (input->info()->data_type() == DataType::QASYMM8)
+ {
+ build_opts.emplace("-DZERO_VALUE=" +
+ support::cpp11::to_string(input->info()->quantization_info().offset));
+ }
+ else
+ {
+ build_opts.emplace("-DZERO_VALUE=" + support::cpp11::to_string(0));
+ }
+
+ // Create kernel
+ _kernel =
+ static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(kernel_name, build_opts));
+
+ // Configure kernel window
+ ICLKernel::configure_internal(win);
+}
+
+void CLSpaceToBatchNDKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
+
+#if defined(ARM_COMPUTE_DEBUG_ENABLED)
+ const_cast<ICLTensor *>(_block_size)->map(queue);
+ const_cast<ICLTensor *>(_padding_size)->map(queue);
+
+ const size_t num_dimensions = _input->info()->num_dimensions();
+ const size_t num_spacial_dimensions = _block_size->info()->dimension(0);
+ uint32_t batch_size = _input->info()->dimension(num_dimensions - 1);
+ for (size_t i = 0; i < num_spacial_dimensions; ++i)
+ {
+ const int32_t block_size = *reinterpret_cast<int32_t *>(_block_size->ptr_to_element({i}));
+ const int32_t padding_size_pre =
+ *reinterpret_cast<int32_t *>(_padding_size->ptr_to_element({0, i}));
+ const int32_t padding_size_post =
+ *reinterpret_cast<int32_t *>(_padding_size->ptr_to_element({1, i}));
+
+ ARM_COMPUTE_ERROR_ON_MSG(block_size < 1, "Block size should be greater than or equal to 1");
+ ARM_COMPUTE_ERROR_ON_MSG(padding_size_pre < 0 && padding_size_post < 0,
+ "Padding size should be greater than or equal to 0");
+
+ if (num_dimensions == 4 && _input->info()->data_layout() == DataLayout::NCHW)
+ {
+ ARM_COMPUTE_ERROR_ON_MSG(
+ _output->info()->dimension(i) !=
+ (_input->info()->dimension(i) + padding_size_pre + padding_size_post) / block_size,
+ "Dimension value of spatial block does not match output's dimension value");
+ }
+ else
+ {
+ ARM_COMPUTE_ERROR_ON_MSG(
+ _output->info()->dimension(num_dimensions - num_spacial_dimensions - 1 + i) !=
+ (_input->info()->dimension(num_dimensions - num_spacial_dimensions - 1 + i) +
+ padding_size_pre + padding_size_post) /
+ block_size,
+ "Dimension value of spatial block does not match output's dimension value");
+ }
+
+ batch_size *= block_size;
+ }
+ ARM_COMPUTE_ERROR_ON_MSG(
+ _output->info()->dimension(num_dimensions - 1) != batch_size,
+ "Output batch size should be equal to input batch size * (multiplication of all block size)");
+
+ const_cast<ICLTensor *>(_block_size)->unmap(queue);
+ const_cast<ICLTensor *>(_padding_size)->unmap(queue);
+#endif // defined(ARM_COMPUTE_DEBUG_ENABLED)
+
+ Window slice_out = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
+
+ // Setup output slice
+ Window slice_in(slice_out);
+ slice_in.set(Window::DimX, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimY, Window::Dimension(0, 0, 0));
+ slice_in.set(Window::DimZ, Window::Dimension(0, 0, 0));
+ slice_in.set(3, Window::Dimension(0, 0, 0));
+
+ // Set block size window
+ Window win_block = calculate_max_window(*_block_size->info(), Steps());
+
+ // Set padding size window
+ Window win_padding = calculate_max_window(*_padding_size->info(), Steps());
+
+ do
+ {
+ unsigned int idx = 0;
+ add_4D_tensor_argument(idx, _input, slice_in);
+ add_4D_tensor_argument(idx, _output, slice_out);
+ add_1D_tensor_argument(idx, _block_size, win_block);
+ add_2D_tensor_argument(idx, _padding_size, win_padding);
+ enqueue(queue, *this, slice_out);
+ } while (window.slide_window_slice_4D(slice_out) && window.slide_window_slice_4D(slice_in));
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp
new file mode 100644
index 000000000..b085192a2
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLSpaceToDepthKernel.cpp
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLSpaceToDepthKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+
+using namespace arm_compute;
+
+namespace
+{
+Status validate_arguments(const ITensorInfo *input, const ITensorInfo *output,
+ const int32_t block_size)
+{
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::S32, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(output, 1, DataType::U8, DataType::QASYMM8,
+ DataType::S16, DataType::S32, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(block_size < 1,
+ "Block size should be greater than or equal to 1.");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(input->dimension(3) != output->dimension(3),
+ "Input batch should be equal to Output batch");
+
+ auto layout_out = input->data_layout();
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_LAYOUT(input, output);
+
+ auto index_depth = get_data_layout_dimension_index(layout_out, DataLayoutDimension::CHANNEL);
+ auto index_height = get_data_layout_dimension_index(layout_out, DataLayoutDimension::HEIGHT);
+ auto index_width = get_data_layout_dimension_index(layout_out, DataLayoutDimension::WIDTH);
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ input->dimension(index_depth) * block_size * block_size != output->dimension(index_depth),
+ "Output depth should be equal to (input depth * block size *block size)");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG((input->dimension(index_width) % block_size) ||
+ (input->dimension(index_height) % block_size),
+ "Input height and width should be divisible by block size");
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ (output->dimension(index_width) != (input->dimension(index_width) / block_size)) ||
+ (output->dimension(index_height) != (input->dimension(index_height) / block_size)),
+ "Output height and width should be equal to "
+ "input_height/blocksize and input_width/blocksize respectively");
+
+ return Status{};
+}
+
+} // namespace
+
+CLSpaceToDepthKernel::CLSpaceToDepthKernel() : _input(nullptr), _output(nullptr) {}
+
+void CLSpaceToDepthKernel::configure(const ICLTensor *input, ICLTensor *output,
+ const int32_t block_size)
+{
+
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(input->info(), output->info(), block_size));
+
+ _input = input;
+ _output = output;
+
+ // Set kernel build options
+ auto layout_out = input->info()->data_layout();
+ std::set<std::string> build_opts;
+ build_opts.emplace("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type()));
+ build_opts.emplace("-DBLOCK_SIZE=" + support::cpp11::to_string(block_size));
+ auto index_depth = get_data_layout_dimension_index(layout_out, DataLayoutDimension::CHANNEL);
+ auto depth = input->info()->dimension(index_depth);
+ build_opts.emplace("-DDEPTH_IN=" + support::cpp11::to_string(depth));
+ build_opts.emplace("-DZ_IN=" + support::cpp11::to_string(input->info()->tensor_shape().z()));
+
+ // Create kernel
+ _kernel = static_cast<cl::Kernel>(CLKernelLibraryEx::get().create_kernel(
+ "space_to_depth_" + lower_string(string_from_data_layout(layout_out)), build_opts));
+
+ // Configure kernel window
+ Window win = calculate_max_window(*input->info(), Steps());
+
+ Coordinates coord;
+ coord.set_num_dimensions(output->info()->num_dimensions());
+ output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLSpaceToDepthKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_MISMATCHING_WINDOWS(ICLKernel::window(), window);
+
+ Window slice_in = window.first_slice_window_4D().collapse(ICLKernel::window(), 2, 4);
+
+ // Setup output slice
+ Window slice_out(slice_in);
+ slice_out.set(Window::DimX, Window::Dimension(0, 0, 0));
+ slice_out.set(Window::DimY, Window::Dimension(0, 0, 0));
+ slice_out.set(Window::DimZ, Window::Dimension(0, 0, 0));
+ slice_out.set(3, Window::Dimension(0, 0, 0));
+
+ do
+ {
+ unsigned int idx = 0;
+ add_4D_tensor_argument(idx, _input, slice_in);
+ add_4D_tensor_argument(idx, _output, slice_out);
+ enqueue(queue, *this, slice_in);
+ } while (window.slide_window_slice_4D(slice_in) && window.slide_window_slice_4D(slice_out));
+}
diff --git a/libs/ARMComputeEx/src/core/CL/kernels/CLTopKV2Kernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTopKV2Kernel.cpp
index 073c2f7bb..073c2f7bb 100644
--- a/libs/ARMComputeEx/src/core/CL/kernels/CLTopKV2Kernel.cpp
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTopKV2Kernel.cpp
diff --git a/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.cpp
new file mode 100644
index 000000000..6cc8d9d13
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/CL/kernels/CLTransposeConvLayerUpsampleKernel.h"
+
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibrary.h"
+#include "arm_compute/core/CL/CLValidate.h"
+#include "arm_compute/core/CL/ICLTensor.h"
+#include "arm_compute/core/Error.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/Validate.h"
+#include "arm_compute/core/Window.h"
+
+using namespace arm_compute;
+
+CLTransposeConvLayerUpsampleKernel::CLTransposeConvLayerUpsampleKernel()
+ : _input(nullptr), _output(nullptr), _inner_border(), _info()
+{
+}
+
+Status CLTransposeConvLayerUpsampleKernel::validate(const ITensorInfo *input,
+ const ITensorInfo *output,
+ const BorderSize &inner_border,
+ const PadStrideInfo &info)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_RETURN_ERROR_ON_F16_UNSUPPORTED(input);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QASYMM8, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_QUANTIZATION_INFO(input, output);
+
+ const DataLayout data_layout = input->data_layout();
+
+ const size_t idx_w = get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const size_t idx_h = get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+ const size_t idx_c = get_data_layout_dimension_index(data_layout, DataLayoutDimension::CHANNEL);
+
+ ARM_COMPUTE_RETURN_ERROR_ON(output->dimension(idx_w) == 0);
+ ARM_COMPUTE_RETURN_ERROR_ON(output->dimension(idx_h) == 0);
+
+ ARM_COMPUTE_RETURN_ERROR_ON(input->dimension(idx_c) != output->dimension(idx_c));
+ for (size_t i = 3; i < Coordinates::num_max_dimensions; ++i)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON(input->dimension(i) != output->dimension(i));
+ }
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(inner_border.right > info.stride().first - 1,
+ "inner_border_right must be smaller that stride_x");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(inner_border.top > info.stride().second - 1,
+ "inner_border_top must be smaller that stride_y");
+
+ return Status{};
+}
+
+void CLTransposeConvLayerUpsampleKernel::configure(const ICLTensor *input, ICLTensor *output,
+ const BorderSize &inner_border,
+ const PadStrideInfo &info)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+
+ _input = input;
+ _output = output;
+ _inner_border = inner_border;
+ _info = info;
+
+ // Perform validation step
+ ARM_COMPUTE_ERROR_THROW_ON(CLTransposeConvLayerUpsampleKernel::validate(
+ input->info(), output->info(), inner_border, info));
+
+ // Create kernel
+ CLBuildOptions build_opts;
+ build_opts.add_option(("-DDATA_TYPE=" + get_cl_type_from_data_type(input->info()->data_type())));
+ _kernel = static_cast<cl::Kernel>(
+ CLKernelLibrary::get().create_kernel("deconvolution_upsample", build_opts.options()));
+
+ constexpr unsigned int num_elems_processed_per_iteration = 1;
+
+ // Configure kernel window
+ Window win = calculate_max_window(*output->info(), Steps(num_elems_processed_per_iteration));
+ AccessWindowHorizontal output_access(output->info(), 0, num_elems_processed_per_iteration);
+ output_access.set_valid_region(win, ValidRegion(Coordinates(), output->info()->tensor_shape()));
+
+ ICLKernel::configure_internal(win);
+}
+
+void CLTransposeConvLayerUpsampleKernel::run(const Window &window, cl::CommandQueue &queue)
+{
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICLKernel::window(), window);
+
+ const DataLayout data_layout = _input->info()->data_layout();
+
+ const size_t idx_w = get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const size_t idx_h = get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+
+ const int out_start_x = _info.pad_left();
+ const int out_end_x = _output->info()->dimension(idx_w) - _inner_border.right -
+ _info.pad_right() + _info.stride().first - 1;
+ const int out_step_x = _info.stride().first;
+
+ const int out_start_y = _inner_border.top + _info.pad_top();
+ const int out_end_y =
+ _output->info()->dimension(idx_h) - _info.pad_bottom() + _info.stride().second - 1;
+ const int out_step_y = _info.stride().second;
+
+ switch (data_layout)
+ {
+ case DataLayout::NCHW:
+ {
+ Window collapsed = window.collapse_if_possible(ICLKernel::window(), Window::DimZ);
+
+ Window slice_out = collapsed.first_slice_window_3D();
+ slice_out.set(Window::DimX, Window::Dimension(out_start_x, out_end_x, out_step_x));
+ slice_out.set(Window::DimY, Window::Dimension(out_start_y, out_end_y, out_step_y));
+
+ Window slice_in = collapsed.first_slice_window_3D();
+
+ do
+ {
+ unsigned int idx = 0;
+ add_3D_tensor_argument(idx, _input, slice_in);
+ add_3D_tensor_argument(idx, _output, slice_out);
+ enqueue(queue, *this, slice_out);
+ } while (collapsed.slide_window_slice_3D(slice_in) &&
+ collapsed.slide_window_slice_3D(slice_out));
+ break;
+ }
+ case DataLayout::NHWC:
+ {
+ // NOTE: not collapsing in NHWC
+ Window slice_out = window.first_slice_window_3D();
+ slice_out.set(Window::DimY, Window::Dimension(out_start_x, out_end_x, out_step_x));
+ slice_out.set(Window::DimZ, Window::Dimension(out_start_y, out_end_y, out_step_y));
+
+ Window slice_in = window.first_slice_window_3D();
+
+ do
+ {
+ unsigned int idx = 0;
+ add_3D_tensor_argument(idx, _input, slice_in);
+ add_3D_tensor_argument(idx, _output, slice_out);
+ enqueue(queue, *this, slice_out);
+ } while (window.slide_window_slice_3D(slice_in) && window.slide_window_slice_3D(slice_out));
+ break;
+ }
+ default:
+ ARM_COMPUTE_ERROR("Unsupported data layout");
+ }
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/CPP/kernels/CPPUpsampleKernelEx.cpp b/runtimes/libs/ARMComputeEx/src/core/CPP/kernels/CPPUpsampleKernelEx.cpp
new file mode 100644
index 000000000..8ac667ceb
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/CPP/kernels/CPPUpsampleKernelEx.cpp
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/core/CPP/kernels/CPPUpsampleKernelEx.h"
+
+#include "arm_compute/core/Error.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/ITensor.h"
+#include "arm_compute/core/TensorInfo.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/core/Validate.h"
+#include "arm_compute/core/utils/misc/ShapeCalculator.h"
+
+#include <cstddef>
+#include <cstdint>
+
+namespace arm_compute
+{
+CPPUpsampleKernelEx::CPPUpsampleKernelEx() : _input(nullptr), _output(nullptr), _info() {}
+
+bool CPPUpsampleKernelEx::is_parallelisable() const { return false; }
+
+void CPPUpsampleKernelEx::configure(const ITensor *input, ITensor *output,
+ const PadStrideInfo &info)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+
+ _input = input;
+ _output = output;
+ _info = info;
+
+ // Configure kernel window
+ Window win = calculate_max_window(*input->info(), Steps());
+
+ // The CPPUpsampleKernelEx doesn't need padding so update_window_and_padding() can be skipped
+ Coordinates coord;
+ coord.set_num_dimensions(output->info()->num_dimensions());
+ output->info()->set_valid_region(ValidRegion(coord, output->info()->tensor_shape()));
+
+ ICPPKernel::configure(win);
+}
+
+void CPPUpsampleKernelEx::run(const Window &window, const ThreadInfo &info)
+{
+ ARM_COMPUTE_UNUSED(info);
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(ICPPKernel::window(), window);
+
+ // Initialize _scaled_output buffer
+ const int width_scaled = _output->info()->dimension(0);
+ const int height_scaled = _output->info()->dimension(1);
+ const int stride_x = _info.stride().first;
+ const int stride_y = _info.stride().second;
+ const int start_x = _info.pad_left();
+ const int start_y = _info.pad_top();
+ const int end_y = height_scaled - _info.pad_bottom();
+ const int end_x = width_scaled - _info.pad_top();
+ const size_t element_size = _input->info()->element_size();
+
+ // The fill value is normally 0, but for QASYMM8 the '0' corresponds to the offset
+ const uint8_t fill_value =
+ _output->info()->data_type() == DataType::QASYMM8
+ ? utility::clamp<uint8_t>(_output->info()->quantization_info().offset)
+ : 0;
+ // Filling a value different than 0 works only for QASYMM8 datatype since we are filling 1byte
+ // values in a buffer of uint8_ts
+ std::fill_n(_output->buffer(), _output->info()->total_size(), fill_value);
+
+ // Create window
+ Window window_out(window);
+ window_out.set(Window::DimX, Window::Dimension(start_x, end_x, stride_x));
+ window_out.set(Window::DimY, Window::Dimension(start_y, end_y, stride_y));
+
+ // Create iterators
+ Iterator in(_input, window);
+ Iterator out(_output, window_out);
+
+ execute_window_loop(
+ window, [&](const Coordinates &) { memcpy(out.ptr(), in.ptr(), element_size); }, in, out);
+}
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/core/NEON/NEElementwiseOperationFuncs.cpp b/runtimes/libs/ARMComputeEx/src/core/NEON/NEElementwiseOperationFuncs.cpp
new file mode 100644
index 000000000..4508f5800
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/NEON/NEElementwiseOperationFuncs.cpp
@@ -0,0 +1,346 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "arm_compute/core/NEON/NEElementwiseOperationFuncs.h"
+
+#include <algorithm>
+#include "arm_compute/core/Types.h"
+#include "arm_compute/core/NEON/NEAsymm.h"
+#include "arm_compute/core/ITensor.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/Window.h"
+
+namespace
+{
+void store_quantized_int32(uint8_t *output_ptr, const int32x4x4_t &out)
+{
+ const uint8x8_t pa = vqmovun_s16(vcombine_s16(vqmovn_s32(out.val[0]), vqmovn_s32(out.val[1])));
+ const uint8x8_t pb = vqmovun_s16(vcombine_s16(vqmovn_s32(out.val[2]), vqmovn_s32(out.val[3])));
+ vst1q_u8(output_ptr, vcombine_u8(pa, pb));
+}
+
+using namespace arm_compute;
+template <typename InputScalarType, typename OutputScalarType, typename InputVectorType>
+void elementwise_op_templ(
+ const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ OutputScalarType (*scalar_func)(const InputScalarType &, const InputScalarType &),
+ int (*broadcast_func)(int, int, int, const InputScalarType *, const InputScalarType &,
+ OutputScalarType *, const bool),
+ int (*neon_func)(int, int, int, const InputScalarType *, const InputScalarType *,
+ OutputScalarType *))
+{
+ // Create input windows
+ Window input1_win = window.broadcast_if_dimension_le_one(in1->info()->tensor_shape());
+ Window input2_win = window.broadcast_if_dimension_le_one(in2->info()->tensor_shape());
+
+ // Clear X Dimension on execution window as we handle manually
+ Window win = window;
+ win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ const int window_step_x = std::min(16 / static_cast<int>(sizeof(OutputScalarType)), 8);
+ const auto window_start_x = static_cast<int>(window.x().start());
+ const auto window_end_x = static_cast<int>(window.x().end());
+ const bool is_broadcast_across_x = (input1_win.x().step() == 0) || (input2_win.x().step() == 0);
+
+ if (is_broadcast_across_x)
+ {
+ const bool is_broadcast_input_2 = input2_win.x().step() == 0;
+ Window broadcast_win = is_broadcast_input_2 ? input2_win : input1_win;
+ Window non_broadcast_win = !is_broadcast_input_2 ? input2_win : input1_win;
+ const ITensor *broadcast_tensor = is_broadcast_input_2 ? in2 : in1;
+ const ITensor *non_broadcast_tensor = !is_broadcast_input_2 ? in2 : in1;
+
+ // Clear X Dimension on execution window as we handle manually
+ non_broadcast_win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ Iterator broadcast_input(broadcast_tensor, broadcast_win);
+ Iterator non_broadcast_input(non_broadcast_tensor, non_broadcast_win);
+ Iterator output(out, win);
+
+ execute_window_loop(win,
+ [&](const Coordinates &) {
+ auto output_ptr = reinterpret_cast<OutputScalarType *>(output.ptr());
+ const auto non_broadcast_input_ptr =
+ reinterpret_cast<const InputScalarType *>(non_broadcast_input.ptr());
+ const InputScalarType broadcast_value =
+ *reinterpret_cast<const InputScalarType *>(broadcast_input.ptr());
+
+ int x = (*broadcast_func)(window_start_x, window_end_x, window_step_x,
+ non_broadcast_input_ptr, broadcast_value,
+ output_ptr, !is_broadcast_input_2);
+ for (; x < window_end_x; ++x)
+ {
+ const auto a = *(non_broadcast_input_ptr + x);
+ *(output_ptr + x) =
+ (*scalar_func)(!is_broadcast_input_2 ? broadcast_value : a,
+ !is_broadcast_input_2 ? a : broadcast_value);
+ }
+ },
+ broadcast_input, non_broadcast_input, output);
+ }
+ else
+ {
+ // Clear X Dimension on execution window as we handle manually
+ input1_win.set(Window::DimX, Window::Dimension(0, 1, 1));
+ input2_win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ Iterator input1(in1, input1_win);
+ Iterator input2(in2, input2_win);
+ Iterator output(out, win);
+
+ execute_window_loop(win,
+ [&](const Coordinates &) {
+ auto output_ptr = reinterpret_cast<OutputScalarType *>(output.ptr());
+ const auto input1_ptr =
+ reinterpret_cast<const InputScalarType *>(input1.ptr());
+ const auto input2_ptr =
+ reinterpret_cast<const InputScalarType *>(input2.ptr());
+
+ int x = (*neon_func)(window_start_x, window_end_x, window_step_x,
+ input1_ptr, input2_ptr, output_ptr);
+ for (; x < window_end_x; ++x)
+ {
+ const auto a = *(input1_ptr + x);
+ const auto b = *(input2_ptr + x);
+ *(output_ptr + x) = (*scalar_func)(a, b);
+ }
+ },
+ input1, input2, output);
+ }
+}
+
+} // namespace
+
+namespace arm_compute
+{
+
+float32x4x4_t load_quantized(const uint8_t *input1_ptr, const int32x4_t &offset,
+ const float32x4_t &scale)
+{
+ qasymm8x16_t x = vld1q_u8(input1_ptr);
+ const float32x4x4_t out = {{
+ vmulq_f32(
+ vcvtq_f32_s32(vsubq_s32(
+ vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(vmovl_u8(vget_low_u8(x))))), offset)),
+ scale),
+ vmulq_f32(
+ vcvtq_f32_s32(vsubq_s32(
+ vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(vmovl_u8(vget_low_u8(x))))), offset)),
+ scale),
+ vmulq_f32(
+ vcvtq_f32_s32(vsubq_s32(
+ vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(vmovl_u8(vget_high_u8(x))))), offset)),
+ scale),
+ vmulq_f32(
+ vcvtq_f32_s32(vsubq_s32(
+ vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(vmovl_u8(vget_high_u8(x))))), offset)),
+ scale),
+ }};
+ return out;
+}
+
+void store_quantized(uint8_t *output_ptr, const float32x4x4_t &rf, const float32x4_t &offset,
+ const float32x4_t &invscale)
+{
+ int32x4x4_t out = {{
+ vcvtq_s32_f32(vmlaq_f32(offset, rf.val[0], invscale)),
+ vcvtq_s32_f32(vmlaq_f32(offset, rf.val[1], invscale)),
+ vcvtq_s32_f32(vmlaq_f32(offset, rf.val[2], invscale)),
+ vcvtq_s32_f32(vmlaq_f32(offset, rf.val[3], invscale)),
+ }};
+ store_quantized_int32(output_ptr, out);
+}
+
+float32x4x4_t dup_quantized(uint8_t broadcast_value, int offset, float scale)
+{
+ const qasymm8x16_t broadcast_value_vec = vdupq_n_u8(broadcast_value);
+ const int32x4_t voffset = vdupq_n_s32(offset);
+ const float32x4_t vscale = vdupq_n_f32(scale);
+
+ const float32x4x4_t broadcast_vector = {{
+ vmulq_f32(vcvtq_f32_s32(vsubq_s32(vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(
+ vmovl_u8(vget_low_u8(broadcast_value_vec))))),
+ voffset)),
+ vscale),
+ vmulq_f32(vcvtq_f32_s32(vsubq_s32(vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(
+ vmovl_u8(vget_low_u8(broadcast_value_vec))))),
+ voffset)),
+ vscale),
+ vmulq_f32(vcvtq_f32_s32(vsubq_s32(vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(
+ vmovl_u8(vget_high_u8(broadcast_value_vec))))),
+ voffset)),
+ vscale),
+ vmulq_f32(vcvtq_f32_s32(vsubq_s32(vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(
+ vmovl_u8(vget_high_u8(broadcast_value_vec))))),
+ voffset)),
+ vscale),
+ }};
+ return broadcast_vector;
+}
+
+void elementwise_op_quantized(
+ const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ uint8_t (*scalar_func)(const float &, const float &, QuantizationInfo),
+ int (*broadcast_func)(int, int, int, const uint8_t *, float32x4x4_t, uint8_t *, int32x4_t,
+ float32x4_t, float32x4_t, float32x4_t, const bool),
+ int (*neon_func)(int, int, int, const uint8_t *, const uint8_t *, uint8_t *, int32x4_t,
+ int32x4_t, float32x4_t, float32x4_t, float32x4_t, float32x4_t))
+{
+ // Create input windows
+ Window input1_win = window.broadcast_if_dimension_le_one(in1->info()->tensor_shape());
+ Window input2_win = window.broadcast_if_dimension_le_one(in2->info()->tensor_shape());
+
+ // Clear X Dimension on execution window as we handle manually
+ Window win = window;
+ win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ const int window_step_x = 16;
+ const auto window_start_x = static_cast<int>(window.x().start());
+ const auto window_end_x = static_cast<int>(window.x().end());
+ const bool is_broadcast_across_x = (input1_win.x().step() == 0) || (input2_win.x().step() == 0);
+
+ const float output_scale = out->info()->quantization_info().scale;
+ const int output_offset = out->info()->quantization_info().offset;
+
+ // Output quantization info (add 0.5 to round toward the nearest integer - 0.5 rounds away from
+ // zero)
+ const float32x4_t voffseto = vdupq_n_f32(output_offset + 0.5f);
+ const float32x4_t invvscaleo = vdupq_n_f32(1.f / output_scale);
+
+ if (is_broadcast_across_x)
+ {
+ // Select the broadcast input on the X axis
+ const bool is_broadcast_input_2 = input2_win.x().step() == 0;
+ Window broadcast_win = is_broadcast_input_2 ? input2_win : input1_win;
+ Window non_broadcast_win = !is_broadcast_input_2 ? input2_win : input1_win;
+ const ITensor *broadcast_tensor = is_broadcast_input_2 ? in2 : in1;
+ const ITensor *non_broadcast_tensor = !is_broadcast_input_2 ? in2 : in1;
+
+ const QuantizationInfo broadcast_qinfo = broadcast_tensor->info()->quantization_info();
+ const QuantizationInfo non_broadcast_qinfo = non_broadcast_tensor->info()->quantization_info();
+
+ const int32x4_t voffset_non_broadcast = vdupq_n_s32(non_broadcast_qinfo.offset);
+ const float32x4_t vscale_non_broadcast = vdupq_n_f32(non_broadcast_qinfo.scale);
+
+ // Clear X Dimension on execution window as we handle manually
+ non_broadcast_win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ Iterator broadcast_input(broadcast_tensor, broadcast_win);
+ Iterator non_broadcast_input(non_broadcast_tensor, non_broadcast_win);
+ Iterator output(out, win);
+
+ execute_window_loop(
+ win,
+ [&](const Coordinates &) {
+ const auto non_broadcast_input_ptr =
+ reinterpret_cast<const uint8_t *>(non_broadcast_input.ptr());
+ const auto output_ptr = reinterpret_cast<uint8_t *>(output.ptr());
+
+ const uint8_t broadcast_value = *reinterpret_cast<const uint8_t *>(broadcast_input.ptr());
+ const float32x4x4_t broadcast_vector =
+ dup_quantized(broadcast_value, broadcast_qinfo.offset, broadcast_qinfo.scale);
+
+ int x = (*broadcast_func)(window_start_x, window_end_x, window_step_x,
+ non_broadcast_input_ptr, broadcast_vector, output_ptr,
+ voffset_non_broadcast, vscale_non_broadcast, voffseto,
+ invvscaleo, !is_broadcast_input_2);
+ for (; x < window_end_x; ++x)
+ {
+ const float afs =
+ scvt_f32_qasymm8(*(non_broadcast_input_ptr + x), non_broadcast_qinfo.scale,
+ non_broadcast_qinfo.offset);
+ const float bfs =
+ scvt_f32_qasymm8(broadcast_value, broadcast_qinfo.scale, broadcast_qinfo.offset);
+ *(output_ptr + x) =
+ (*scalar_func)(!is_broadcast_input_2 ? bfs : afs, !is_broadcast_input_2 ? afs : bfs,
+ out->info()->quantization_info());
+ }
+ },
+ broadcast_input, non_broadcast_input, output);
+ }
+ else
+ {
+ // Input1 quantization info
+ const int32x4_t voffset1 = vdupq_n_s32(in1->info()->quantization_info().offset);
+ const float32x4_t vscale1 = vdupq_n_f32(in1->info()->quantization_info().scale);
+
+ // Input2 quantization info
+ const int32x4_t voffset2 = vdupq_n_s32(in2->info()->quantization_info().offset);
+ const float32x4_t vscale2 = vdupq_n_f32(in2->info()->quantization_info().scale);
+
+ // Clear X Dimension on execution window as we handle manually
+ input1_win.set(Window::DimX, Window::Dimension(0, 1, 1));
+ input2_win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ const QuantizationInfo input1_qinfo = in1->info()->quantization_info();
+ const QuantizationInfo input2_qinfo = in2->info()->quantization_info();
+
+ Iterator input1(in1, input1_win);
+ Iterator input2(in2, input2_win);
+ Iterator output(out, win);
+
+ execute_window_loop(
+ win,
+ [&](const Coordinates &) {
+ const auto input1_ptr = reinterpret_cast<const uint8_t *>(input1.ptr());
+ const auto input2_ptr = reinterpret_cast<const uint8_t *>(input2.ptr());
+ const auto output_ptr = reinterpret_cast<uint8_t *>(output.ptr());
+
+ int x =
+ (*neon_func)(window_start_x, window_end_x, window_step_x, input1_ptr, input2_ptr,
+ output_ptr, voffset1, voffset2, vscale1, vscale2, voffseto, invvscaleo);
+ for (; x < window_end_x; ++x)
+ {
+ const float afs =
+ scvt_f32_qasymm8(*(input1_ptr + x), input1_qinfo.scale, input1_qinfo.offset);
+ const float bfs =
+ scvt_f32_qasymm8(*(input2_ptr + x), input2_qinfo.scale, input2_qinfo.offset);
+ *(output_ptr + x) = (*scalar_func)(afs, bfs, out->info()->quantization_info());
+ }
+ },
+ input1, input2, output);
+ }
+}
+
+void elementwise_op(const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ float (*scalar_func)(const float &, const float &),
+ int (*broadcast_func)(int, int, int, const float *, const float &, float *,
+ const bool),
+ int (*neon_func)(int, int, int, const float *, const float *, float *))
+{
+ elementwise_op_templ<float, float, float32x4_t>(in1, in2, out, window, scalar_func,
+ broadcast_func, neon_func);
+}
+
+void elementwise_op(const ITensor *in1, const ITensor *in2, ITensor *out, const Window &window,
+ uint8_t (*scalar_func)(const uint8_t &, const uint8_t &),
+ int (*broadcast_func)(int, int, int, const uint8_t *, const uint8_t &,
+ uint8_t *, const bool),
+ int (*neon_func)(int, int, int, const uint8_t *, const uint8_t *, uint8_t *))
+{
+ elementwise_op_templ<uint8_t, uint8_t, uint8x16_t>(in1, in2, out, window, scalar_func,
+ broadcast_func, neon_func);
+}
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEBinaryLogicalOperationKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEBinaryLogicalOperationKernel.cpp
new file mode 100644
index 000000000..d2f42de53
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEBinaryLogicalOperationKernel.cpp
@@ -0,0 +1,237 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/core/NEON/kernels/NEBinaryLogicalOperationKernel.h"
+
+#include "arm_compute/core/Error.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/ITensor.h"
+#include "arm_compute/core/NEON/wrapper/wrapper.h"
+#include "arm_compute/core/NEON/NEElementwiseOperationFuncs.h"
+#include "arm_compute/core/TensorInfo.h"
+#include "arm_compute/core/Validate.h"
+
+#include <algorithm>
+#include <arm_neon.h>
+#include <map>
+#include <string>
+
+namespace arm_compute
+{
+class Coordinates;
+} // namespace arm_compute
+
+namespace arm_compute
+{
+
+template <BinaryLogicalOperation op, typename ScalarType>
+inline ScalarType elementwise_logic_op_scalar(const ScalarType &a, const ScalarType &b)
+{
+ auto res = ScalarType(0);
+
+ switch (op)
+ {
+ case BinaryLogicalOperation::AND:
+ res = a & b;
+ break;
+ case BinaryLogicalOperation::OR:
+ res = a | b;
+ break;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+ return res;
+}
+
+template <BinaryLogicalOperation op, typename VectorType>
+inline VectorType elementwise_logic_op(const VectorType &a, const VectorType &b)
+{
+ VectorType res = {0, 0, 0, 0};
+
+ switch (op)
+ {
+ case BinaryLogicalOperation::AND:
+ res = wrapper::vand(a, b);
+ break;
+ case BinaryLogicalOperation::OR:
+ res = wrapper::vorr(a, b);
+ break;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+ return res;
+}
+
+template <BinaryLogicalOperation op>
+inline uint8x16x4_t elementwise_logic_op(const uint8x16x4_t &a, const uint8x16x4_t &b)
+{
+ uint8x16x4_t out = {{
+ elementwise_logic_op<op>(a.val[0], b.val[0]), elementwise_logic_op<op>(a.val[1], b.val[1]),
+ elementwise_logic_op<op>(a.val[2], b.val[2]), elementwise_logic_op<op>(a.val[3], b.val[3]),
+ }};
+ return out;
+}
+
+template <BinaryLogicalOperation op, typename ScalarType, typename VectorType>
+inline VectorType elementwise_logic_op_broadcast(const VectorType &a,
+ const ScalarType &broadcast_value,
+ const bool reorder)
+{
+ VectorType broadcast_vector = wrapper::vdup_n(broadcast_value, wrapper::traits::vector_128_tag());
+ return elementwise_logic_op<op>(reorder ? broadcast_vector : a, reorder ? a : broadcast_vector);
+}
+
+template <BinaryLogicalOperation op, typename ScalarType, typename VectorType>
+inline int elementwise_logic_op_loop(int window_start_x, int window_end_x, int window_step_x,
+ const ScalarType *input1_ptr, const ScalarType *input2_ptr,
+ ScalarType *output_ptr)
+{
+ int x = window_start_x;
+ for (; x <= (window_end_x - window_step_x); x += window_step_x)
+ {
+ const auto a = wrapper::vloadq(input1_ptr + x);
+ const auto b = wrapper::vloadq(input2_ptr + x);
+ wrapper::vstore(output_ptr + x, elementwise_logic_op<op>(a, b));
+ }
+ return x;
+}
+
+template <BinaryLogicalOperation op, typename ScalarType, typename VectorType>
+inline int elementwise_logic_op_broadcast_loop(int window_start_x, int window_end_x,
+ int window_step_x,
+ const ScalarType *non_broadcast_input_ptr,
+ const ScalarType &broadcast_value,
+ ScalarType *output_ptr, const bool reorder)
+{
+ int x = window_start_x;
+ for (; x <= (window_end_x - window_step_x); x += window_step_x)
+ {
+ const auto a = wrapper::vloadq((non_broadcast_input_ptr + x));
+ wrapper::vstore(output_ptr + x,
+ elementwise_logic_op_broadcast<op>(a, broadcast_value, reorder));
+ }
+ return x;
+}
+
+template <BinaryLogicalOperation op, typename ScalarType, typename VectorType>
+void elementwise_logic_op(const ITensor *in1, const ITensor *in2, ITensor *out,
+ const Window &window)
+{
+ elementwise_op(in1, in2, out, window, &elementwise_logic_op_scalar<op, ScalarType>,
+ &elementwise_logic_op_broadcast_loop<op, ScalarType, VectorType>,
+ &elementwise_logic_op_loop<op, ScalarType, VectorType>);
+}
+
+std::function<void(const ITensor *, const ITensor *, ITensor *, const Window &)> configure_func(
+ const ITensor *input1, const ITensor *input2, ITensor *output,
+ std::map<std::string, NEElementwiseOperationKernel::ElementwiseFunction *> map_function)
+{
+ std::string function_to_call("op_");
+ function_to_call += string_from_data_type(input1->info()->data_type()) + "_";
+ function_to_call += string_from_data_type(input2->info()->data_type()) + "_";
+ function_to_call += string_from_data_type(output->info()->data_type());
+
+ auto it = map_function.find(function_to_call);
+
+ if (it != map_function.end())
+ {
+ auto func = it->second;
+ return [func](const ITensor *input1, const ITensor *input2, ITensor *output,
+ const Window &window) { func(input1, input2, output, window); };
+ }
+ return nullptr;
+}
+
+template <BinaryLogicalOperation op>
+std::function<void(const ITensor *, const ITensor *, ITensor *, const Window &)>
+configure_logic_func(const ITensor *input1, const ITensor *input2, ITensor *output)
+{
+ static std::map<std::string, NEElementwiseOperationKernel::ElementwiseFunction *> map_function = {
+ {"op_U8_U8_U8", &elementwise_logic_op<op, uint8_t, uint8x16_t>},
+ {"op_QASYMM8_QASYMM8_QASYMM8", &elementwise_logic_op<op, uint8_t, uint8x16_t>}};
+
+ return configure_func(input1, input2, output, map_function);
+}
+
+void NEBinaryLogicalOperationKernel::configure(BinaryLogicalOperation op, const ITensor *input1,
+ const ITensor *input2, ITensor *output)
+{
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(*input1->info(), *input2->info(), *output->info()));
+ configure_common(input1, input2, output);
+ switch (op)
+ {
+ case BinaryLogicalOperation::AND:
+ _function = configure_logic_func<BinaryLogicalOperation::AND>(input1, input2, output);
+ break;
+ case BinaryLogicalOperation::OR:
+ _function = configure_logic_func<BinaryLogicalOperation::OR>(input1, input2, output);
+ break;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+}
+
+Status NEBinaryLogicalOperationKernel::validate_arguments(const ITensorInfo &input1,
+ const ITensorInfo &input2,
+ const ITensorInfo &output)
+{
+ // Validate in case of configured output
+ if (output.total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&output, 1, DataType::U8,
+ DataType::QASYMM8);
+ }
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&input1, 1, DataType::U8, DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&input2, 1, DataType::U8, DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(&input1, &input2);
+
+ const TensorShape out_shape =
+ TensorShape::broadcast_shape(input1.tensor_shape(), input2.tensor_shape());
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
+ "Inputs are not broadcast compatible");
+
+ // Validate in case of configured output
+ if (output.total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ detail::have_different_dimensions(out_shape, output.tensor_shape(), 0),
+ "Wrong shape for output");
+ }
+
+ return Status{};
+}
+
+Status NEBinaryLogicalOperationKernel::validate(BinaryLogicalOperation op,
+ const ITensorInfo *input1,
+ const ITensorInfo *input2,
+ const ITensorInfo *output)
+{
+ ARM_COMPUTE_UNUSED(op);
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input1, input2, output);
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(*input1, *input2, *output));
+ return Status{};
+}
+
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEElementwiseUnaryKernelEx.cpp b/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEElementwiseUnaryKernelEx.cpp
new file mode 100644
index 000000000..cebd614df
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEElementwiseUnaryKernelEx.cpp
@@ -0,0 +1,205 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/core/NEON/kernels/NEElementwiseUnaryKernelEx.h"
+
+#include "arm_compute/core/CPP/Validate.h"
+#include "arm_compute/core/Error.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/IAccessWindow.h"
+#include "arm_compute/core/ITensor.h"
+#include "arm_compute/core/NEON/NEAsymm.h"
+#include "arm_compute/core/NEON/NEFixedPoint.h"
+#include "arm_compute/core/NEON/wrapper/wrapper.h"
+#include "arm_compute/core/TensorInfo.h"
+#include "arm_compute/core/Validate.h"
+
+#include <algorithm>
+#include <arm_neon.h>
+#include <cstdint>
+#include <map>
+#include <string>
+
+namespace arm_compute
+{
+class Coordinates;
+
+namespace
+{
+template <ElementWiseUnaryEx op, typename ScalarType>
+inline ScalarType elementwise_op_scalar(const ScalarType &a)
+{
+ switch (op)
+ {
+ case ElementWiseUnaryEx::NEG:
+ return -a;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+}
+
+template <ElementWiseUnaryEx op, typename VectorType>
+inline VectorType elementwise_op(const VectorType &a)
+{
+ switch (op)
+ {
+ case ElementWiseUnaryEx::NEG:
+ return wrapper::vneg(a);
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+}
+
+template <ElementWiseUnaryEx op, typename ScalarType>
+void elementwise_op(const ITensor *in, ITensor *out, const Window &window)
+{
+ const int window_step_x = 16 / sizeof(ScalarType);
+ const auto window_start_x = static_cast<int>(window.x().start());
+ const auto window_end_x = static_cast<int>(window.x().end());
+
+ Window win = window;
+ win.set(Window::DimX, Window::Dimension(0, 1, 1));
+
+ Iterator input(in, win);
+ Iterator output(out, win);
+
+ execute_window_loop(win,
+ [&](const Coordinates &) {
+ auto output_ptr = reinterpret_cast<ScalarType *>(output.ptr());
+ const auto input_ptr = reinterpret_cast<const ScalarType *>(input.ptr());
+
+ int x = window_start_x;
+ for (; x <= window_end_x - window_step_x; x += window_step_x)
+ {
+ wrapper::vstore(output_ptr + x,
+ elementwise_op<op>(wrapper::vloadq(input_ptr + x)));
+ }
+ for (; x < window_end_x; ++x)
+ {
+ *(output_ptr + x) = elementwise_op_scalar<op>(*(input_ptr + x));
+ }
+ },
+ input, output);
+}
+
+template <ElementWiseUnaryEx op>
+std::function<void(const ITensor *input, ITensor *output, const Window &window)>
+configure_func(const ITensor *input, ITensor *output)
+{
+ std::string function_to_call("op_");
+ function_to_call += string_from_data_type(input->info()->data_type()) + "_";
+ function_to_call += string_from_data_type(output->info()->data_type());
+
+ static std::map<std::string, NEElementwiseUnaryKernelEx::ElementwiseUnaryFunction *>
+ map_function = {
+ {"op_F32_F32", &elementwise_op<op, float>}, {"op_S32_S32", &elementwise_op<op, int32_t>},
+ };
+#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
+ map_function["op_F16_F16"] = &elementwise_op<op, float16_t>;
+#endif /* __ARM_FEATURE_FP16_VECTOR_ARITHMETIC */
+
+ auto it = map_function.find(function_to_call);
+
+ if (it != map_function.end())
+ {
+ auto func = it->second;
+ return [func](const ITensor *input, ITensor *output, const Window &window) {
+ func(input, output, window);
+ };
+ }
+ return nullptr;
+}
+} // namespace
+
+NEElementwiseUnaryKernelEx::NEElementwiseUnaryKernelEx()
+ : _function(nullptr), _input(nullptr), _output(nullptr)
+{
+}
+
+void NEElementwiseUnaryKernelEx::configure(ElementWiseUnaryEx op, const ITensor *input,
+ ITensor *output)
+{
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(*input->info(), *output->info()));
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+
+ // Configure kernel window
+ const std::pair<TensorShape, ValidRegion> broadcast_pair =
+ ITensorInfo::broadcast_shape_and_valid_region(*input->info());
+ const TensorShape &out_shape = broadcast_pair.first;
+ const ValidRegion &valid_region = broadcast_pair.second;
+
+ // Auto initialize output if not initialized
+ auto_init_if_empty(*output->info(), out_shape, 1, input->info()->data_type());
+
+ Window win = calculate_max_window(valid_region);
+
+ _input = input;
+ _output = output;
+
+ INEKernel::configure(win);
+
+ switch (op)
+ {
+ case ElementWiseUnaryEx::NEG:
+ _function = configure_func<ElementWiseUnaryEx::NEG>(input, output);
+ break;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+}
+
+Status NEElementwiseUnaryKernelEx::validate_arguments(const ITensorInfo &input,
+ const ITensorInfo &output)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_CPU_F16_UNSUPPORTED(&input);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&input, 1, DataType::F16, DataType::F32,
+ DataType::S32);
+
+ // Validate in case of configured output
+ if (output.total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(&input, &output);
+ }
+
+ return Status{};
+}
+
+Status NEElementwiseUnaryKernelEx::validate(ElementWiseUnaryEx op, const ITensorInfo *input,
+ const ITensorInfo *output)
+{
+ ARM_COMPUTE_UNUSED(op);
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, output);
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(*input, *output));
+ return Status{};
+}
+
+void NEElementwiseUnaryKernelEx::run(const Window &window, const ThreadInfo &info)
+{
+ ARM_COMPUTE_UNUSED(info);
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(INEKernel::window(), window);
+ ARM_COMPUTE_ERROR_ON(_function == nullptr);
+ _function(_input, _output, window);
+}
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEPReLUKernel.cpp b/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEPReLUKernel.cpp
new file mode 100644
index 000000000..ad1bb9051
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/NEON/kernels/NEPReLUKernel.cpp
@@ -0,0 +1,274 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/core/NEON/kernels/NEPReLUKernel.h"
+
+#include "arm_compute/core/ITensor.h"
+#include "arm_compute/core/NEON/NEAsymm.h"
+#include "arm_compute/core/NEON/NEElementwiseOperationFuncs.h"
+#include "arm_compute/core/NEON/wrapper/wrapper.h"
+#include "arm_compute/core/TensorInfo.h"
+#include "arm_compute/core/Window.h"
+
+#include <arm_neon.h>
+
+using namespace arm_compute;
+namespace
+{
+
+/** Conditional element-wise operations */
+enum class ConditionalOperation
+{
+ PRELU, /**< (x * y) for x < 0, x for x >= 0 */
+};
+
+template <ConditionalOperation op, typename ScalarType>
+inline ScalarType elementwise_conditional_op_scalar(const ScalarType &a, const ScalarType &b)
+{
+ auto res = ScalarType(0);
+
+ switch (op)
+ {
+ case ConditionalOperation::PRELU:
+ res = a < 0 ? a * b : a;
+ break;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+ return res;
+}
+
+template <ConditionalOperation op>
+inline uint8_t elementwise_conditional_op_quantized_scalar(const float &a, const float &b,
+ QuantizationInfo qinfo)
+{
+ return qinfo.quantize(elementwise_conditional_op_scalar<op>(a, b), RoundingPolicy::TO_NEAREST_UP);
+}
+
+template <ConditionalOperation op, typename VectorType>
+inline VectorType elementwise_conditional_op(const VectorType &a, const VectorType &b)
+{
+ VectorType res = {0, 0, 0, 0};
+ VectorType const_0 = {0, 0, 0, 0};
+
+ switch (op)
+ {
+ case ConditionalOperation::PRELU:
+ res = wrapper::vbsl(wrapper::vcgt(a, const_0), a, wrapper::vmul(a, b));
+ ;
+ break;
+ default:
+ ARM_COMPUTE_ERROR("NOT_SUPPORTED!");
+ }
+ return res;
+}
+
+template <ConditionalOperation op>
+inline float32x4x4_t elementwise_conditional_op(const float32x4x4_t &a, const float32x4x4_t &b)
+{
+ float32x4x4_t out = {{
+ elementwise_conditional_op<op>(a.val[0], b.val[0]),
+ elementwise_conditional_op<op>(a.val[1], b.val[1]),
+ elementwise_conditional_op<op>(a.val[2], b.val[2]),
+ elementwise_conditional_op<op>(a.val[3], b.val[3]),
+ }};
+ return out;
+}
+
+template <ConditionalOperation op, typename ScalarType, typename VectorType>
+inline VectorType elementwise_conditional_op_broadcast(const VectorType &a,
+ const ScalarType &broadcast_value,
+ const bool reorder)
+{
+ VectorType broadcast_vector = wrapper::vdup_n(broadcast_value, wrapper::traits::vector_128_tag());
+ return elementwise_conditional_op<op>(reorder ? broadcast_vector : a,
+ reorder ? a : broadcast_vector);
+}
+
+template <ConditionalOperation op, typename ScalarType, typename VectorType>
+inline int elementwise_conditional_op_loop(int window_start_x, int window_end_x, int window_step_x,
+ const ScalarType *input1_ptr,
+ const ScalarType *input2_ptr, ScalarType *output_ptr)
+{
+ int x = window_start_x;
+ for (; x <= (window_end_x - window_step_x); x += window_step_x)
+ {
+ const auto a = wrapper::vloadq(input1_ptr + x);
+ const auto b = wrapper::vloadq(input2_ptr + x);
+ wrapper::vstore(output_ptr + x, elementwise_conditional_op<op>(a, b));
+ }
+ return x;
+}
+
+template <ConditionalOperation op>
+inline int elementwise_conditional_op_quantized_loop(int window_start_x, int window_end_x,
+ int window_step_x, const uint8_t *input1_ptr,
+ const uint8_t *input2_ptr, uint8_t *output_ptr,
+ int32x4_t voffset1, int32x4_t voffset2,
+ float32x4_t vscale1, float32x4_t vscale2,
+ float32x4_t voffseto, float32x4_t invvscaleo)
+{
+ int x = window_start_x;
+ for (; x <= (window_end_x - window_step_x); x += window_step_x)
+ {
+ // Get inputs and compute output
+ const float32x4x4_t af = load_quantized(input1_ptr + x, voffset1, vscale1);
+ const float32x4x4_t bf = load_quantized(input2_ptr + x, voffset2, vscale2);
+ const float32x4x4_t rf = elementwise_conditional_op<op>(af, bf);
+ store_quantized(output_ptr + x, rf, voffseto, invvscaleo);
+ }
+ return x;
+}
+
+template <ConditionalOperation op, typename ScalarType, typename VectorType>
+inline int elementwise_conditional_op_broadcast_loop(int window_start_x, int window_end_x,
+ int window_step_x,
+ const ScalarType *non_broadcast_input_ptr,
+ const ScalarType &broadcast_value,
+ ScalarType *output_ptr, const bool reorder)
+{
+ int x = window_start_x;
+ for (; x <= (window_end_x - window_step_x); x += window_step_x)
+ {
+ const auto a = wrapper::vloadq((non_broadcast_input_ptr + x));
+ wrapper::vstore(output_ptr + x,
+ elementwise_conditional_op_broadcast<op>(a, broadcast_value, reorder));
+ }
+ return x;
+}
+
+template <ConditionalOperation op>
+inline int elementwise_conditional_op_quantized_broadcast_loop(
+ int window_start_x, int window_end_x, int window_step_x, const uint8_t *non_broadcast_input_ptr,
+ float32x4x4_t broadcast_vector, uint8_t *output_ptr, int32x4_t voffset_non_broadcast,
+ float32x4_t vscale_non_broadcast, float32x4_t voffseto, float32x4_t invvscaleo, bool reorder)
+{
+ int x = window_start_x;
+ for (; x <= (window_end_x - window_step_x); x += window_step_x)
+ {
+ const float32x4x4_t af =
+ load_quantized(non_broadcast_input_ptr + x, voffset_non_broadcast, vscale_non_broadcast);
+ const float32x4x4_t rf = elementwise_conditional_op<op>(reorder ? broadcast_vector : af,
+ reorder ? af : broadcast_vector);
+ store_quantized(output_ptr + x, rf, voffseto, invvscaleo);
+ }
+ return x;
+}
+
+template <ConditionalOperation op, typename ScalarType, typename VectorType>
+void elementwise_conditional_op(const ITensor *in1, const ITensor *in2, ITensor *out,
+ const Window &window)
+{
+ elementwise_op(in1, in2, out, window, &elementwise_conditional_op_scalar<op, ScalarType>,
+ &elementwise_conditional_op_broadcast_loop<op, ScalarType, VectorType>,
+ &elementwise_conditional_op_loop<op, ScalarType, VectorType>);
+}
+
+template <ConditionalOperation op>
+void elementwise_conditional_op_quantized(const ITensor *in1, const ITensor *in2, ITensor *out,
+ const Window &window)
+{
+ elementwise_op_quantized(in1, in2, out, window, &elementwise_conditional_op_quantized_scalar<op>,
+ &elementwise_conditional_op_quantized_broadcast_loop<op>,
+ &elementwise_conditional_op_quantized_loop<op>);
+}
+} // namespace
+
+NEPReLUKernel::NEPReLUKernel() : _input(nullptr), _alpha(nullptr), _output(nullptr) {}
+
+void NEPReLUKernel::configure(const ITensor *input, const ITensor *alpha, ITensor *output)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, alpha, output);
+ ARM_COMPUTE_ERROR_THROW_ON(validate_arguments(*input->info(), *alpha->info(), *output->info()));
+
+ // Configure kernel window
+ const std::pair<TensorShape, ValidRegion> broadcast_pair =
+ ITensorInfo::broadcast_shape_and_valid_region(*input->info(), *alpha->info());
+ const TensorShape &out_shape = broadcast_pair.first;
+ const ValidRegion &valid_region = broadcast_pair.second;
+
+ // Auto initialize output if not initialized
+ auto_init_if_empty(*output->info(), out_shape, 1, input->info()->data_type());
+
+ Window win = calculate_max_window(valid_region);
+
+ _input = input;
+ _alpha = alpha;
+ _output = output;
+ INEKernel::configure(win);
+}
+
+void NEPReLUKernel::run(const Window &window, const ThreadInfo &info)
+{
+ ARM_COMPUTE_UNUSED(info);
+ ARM_COMPUTE_ERROR_ON_UNCONFIGURED_KERNEL(this);
+ ARM_COMPUTE_ERROR_ON_INVALID_SUBWINDOW(IKernel::window(), window);
+
+ if (_input->info()->data_type() == DataType::F32)
+ {
+ elementwise_conditional_op<ConditionalOperation::PRELU, float, float32x4_t>(_input, _alpha,
+ _output, window);
+ }
+ else if (_input->info()->data_type() == DataType::QASYMM8)
+ {
+ elementwise_conditional_op_quantized<ConditionalOperation::PRELU>(_input, _alpha, _output,
+ window);
+ }
+ else
+ {
+ ARM_COMPUTE_ERROR("Wrong Type");
+ }
+}
+
+Status NEPReLUKernel::validate_arguments(const ITensorInfo &input, const ITensorInfo &alpha,
+ const ITensorInfo &output)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&input, 1, DataType::QASYMM8, DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(&input, &alpha, &output);
+
+ const TensorShape out_shape =
+ TensorShape::broadcast_shape(input.tensor_shape(), alpha.tensor_shape());
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(out_shape.total_size() == 0,
+ "Inputs are not broadcast compatible");
+
+ // Checks performed when output is configured
+ if (output.total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(
+ detail::have_different_dimensions(out_shape, output.tensor_shape(), 0),
+ "Wrong shape for output");
+ }
+
+ return Status{};
+}
+
+Status NEPReLUKernel::validate(const ITensorInfo *input, const ITensorInfo *alpha,
+ const ITensorInfo *output)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, alpha, output);
+ ARM_COMPUTE_RETURN_ON_ERROR(validate_arguments(*input, *alpha, *output));
+
+ return Status{};
+}
diff --git a/runtimes/libs/ARMComputeEx/src/core/UtilsEx.cpp b/runtimes/libs/ARMComputeEx/src/core/UtilsEx.cpp
new file mode 100644
index 000000000..94242b56b
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/core/UtilsEx.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/core/UtilsEx.h"
+#include "arm_compute/core/Error.h"
+
+using namespace arm_compute;
+
+const std::pair<unsigned int, unsigned int>
+arm_compute::transposeconv_output_dimensions(unsigned int in_width, unsigned int in_height,
+ unsigned int kernel_width, unsigned int kernel_height,
+ const PadStrideInfo &info, unsigned int invalid_right,
+ unsigned int invalid_bottom)
+{
+ const unsigned int stride_x = info.stride().first;
+ const unsigned int stride_y = info.stride().second;
+ const unsigned int padx = info.pad_left() + info.pad_right();
+ const unsigned int pady = info.pad_top() + info.pad_bottom();
+
+ ARM_COMPUTE_ERROR_ON(in_width < 1 || in_height < 1);
+ ARM_COMPUTE_ERROR_ON(kernel_width <= padx);
+ ARM_COMPUTE_ERROR_ON(kernel_height <= pady);
+
+ // Find the transpose conv out dimensions
+ // transpose conv out:
+ // tconv_out + pad = 1 + (in - 1) * stride + invalid
+ // tconv_out = 1 + (in - 1) * stride + invalid - pad
+ const int w = stride_x * (in_width - 1) + kernel_width - padx + invalid_right;
+ const int h = stride_y * (in_height - 1) + kernel_height - pady + invalid_bottom;
+
+ return std::make_pair<unsigned int, unsigned int>(w, h);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/CLFunctionsEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/CLFunctionsEx.cpp
new file mode 100644
index 000000000..158fe0b0c
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/CLFunctionsEx.cpp
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/runtime/CL/CLFunctionsEx.h"
+
+// NOTE This empty file aims to validate "CLFunctionsEx.h".
+// DO NOT REMOVE this file.
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLArgOperation.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLArgOperation.cpp
new file mode 100644
index 000000000..ae64a6edd
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLArgOperation.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/runtime/CL/functions/CLArgOperation.h"
+
+#include "arm_compute/core/CL/kernels/CLArgOperationKernel.h"
+#include "arm_compute/runtime/CL/CLScheduler.h"
+
+namespace arm_compute
+{
+
+CLArgOperation::CLArgOperation()
+{
+ // DO NOTHING
+}
+
+void CLArgOperation::configure(ICLTensor *input, ICLTensor *output, std::vector<uint32_t> axis,
+ ArgOperation op)
+{
+ ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), axis, output->info(), op));
+ _input = input;
+ _output = output;
+ _axis = axis;
+ _arg_op = op;
+ // NOTE The argminmax_axis must have no duplication.
+ _num_of_kernels = axis.size();
+ const size_t num_of_interm_tensors = _num_of_kernels - 1;
+
+ _interm_tensors = arm_compute::support::cpp14::make_unique<CLTensor[]>(num_of_interm_tensors);
+ _argop_kernels =
+ arm_compute::support::cpp14::make_unique<CLArgOperationKernel[]>(_num_of_kernels);
+
+ TensorShape shape{input->info()->tensor_shape()};
+ for (size_t i = 0; i < num_of_interm_tensors; i++)
+ {
+ shape.set(_axis[i], 1);
+ _interm_tensors[i].allocator()->init(
+ TensorInfo(shape, input->info()->num_channels(), input->info()->data_type())
+ .set_data_layout(input->info()->data_layout()));
+ _interm_tensors[i].allocator()->allocate();
+ }
+
+ // Set a vector that is ordered ICLTensors sequentially.
+ std::vector<ICLTensor *> tensors;
+ tensors.emplace_back(input);
+ for (size_t i = 0; i < num_of_interm_tensors; i++)
+ {
+ tensors.emplace_back(_interm_tensors.get() + i);
+ }
+ tensors.emplace_back(output);
+
+ // Apply ArgMinMax on all kernels
+ for (size_t i = 0; i < _num_of_kernels; i++)
+ {
+ _argop_kernels[i].configure(tensors[i], tensors[i + 1], _axis[i], op);
+ }
+}
+
+Status CLArgOperation::validate(const ITensorInfo *input, const std::vector<uint32_t> &axis,
+ const ITensorInfo *output, ArgOperation op)
+{
+ const size_t num_of_kernels = axis.size();
+ const size_t num_of_interm_tensors = num_of_kernels - 1;
+
+ // Create temporary tensor infos
+ auto interm_tensors =
+ arm_compute::support::cpp14::make_unique<TensorInfo[]>(num_of_interm_tensors);
+
+ // Create intermediate tensor info
+ TensorShape shape{input->tensor_shape()};
+
+ for (size_t i = 0; i < num_of_interm_tensors; i++)
+ {
+ shape.set(axis[i], 1);
+ interm_tensors[i].set_data_type(input->data_type());
+ interm_tensors[i].set_tensor_shape(shape);
+ interm_tensors[i].set_num_channels(input->num_channels());
+ }
+
+ // Set a vector that is ordered ITensorInfo sequentially.
+ std::vector<const ITensorInfo *> tensors;
+ tensors.emplace_back(input);
+ for (size_t i = 0; i < num_of_interm_tensors; i++)
+ {
+ tensors.emplace_back(interm_tensors.get() + i);
+ }
+ tensors.emplace_back(output);
+
+ // Validate argminmax only on all kernels
+ for (size_t i = 0; i < num_of_kernels; i++)
+ {
+ ARM_COMPUTE_RETURN_ON_ERROR(
+ CLArgOperationKernel::validate(tensors[i], tensors[i + 1], axis[i], op));
+ }
+
+ return Status{};
+}
+
+void CLArgOperation::run()
+{
+ for (size_t i = 0; i < _num_of_kernels; ++i)
+ {
+ CLScheduler::get().enqueue(_argop_kernels[i]);
+ }
+}
+
+} // namespace arm_compute
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLBinaryLogicalOp.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLBinaryLogicalOp.cpp
index 7c5fe5eda..7c5fe5eda 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLBinaryLogicalOp.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLBinaryLogicalOp.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLCast.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLCast.cpp
index 8e106737c..8e106737c 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLCast.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLCast.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLDepthToSpace.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLDepthToSpace.cpp
index c2e4ca9ff..c2e4ca9ff 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLDepthToSpace.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLDepthToSpace.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLEmbeddingLookup.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLEmbeddingLookup.cpp
index 2781784ca..2781784ca 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLEmbeddingLookup.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLEmbeddingLookup.cpp
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLFullyConnectedReshapingLayer.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLFullyConnectedReshapingLayer.cpp
new file mode 100644
index 000000000..938852e5b
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLFullyConnectedReshapingLayer.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/runtime/CL/functions/CLFullyConnectedReshapingLayer.h"
+
+using namespace arm_compute;
+
+void CLFullyConnectedReshapingLayer::configure(const arm_compute::ICLTensor *input,
+ const arm_compute::ICLTensor *weights,
+ const arm_compute::ICLTensor *biases,
+ arm_compute::ICLTensor *output, bool needs_reshape,
+ const arm_compute::TensorShape &reshape)
+{
+ _input = input;
+ _weights = weights;
+ _biases = biases;
+ _output = output;
+ _needs_reshape = needs_reshape;
+
+ if (_needs_reshape)
+ {
+ // reshape
+ auto_init_if_empty(*_cl_buffer.info(),
+ _input->info()->clone()->set_tensor_shape(reshape).set_data_layout(
+ _input->info()->data_layout()));
+ _cl_reshape.configure(_input, &_cl_buffer);
+
+ _cl_fc.configure(&_cl_buffer, _weights, _biases, _output);
+
+ // NOTE _cl_buffer is inaccessible from outside, and thus it is safe to invoke allocate here.
+ _cl_buffer.allocator()->allocate();
+ }
+ else
+ {
+ _cl_fc.configure(_input, _weights, _biases, _output);
+ }
+}
+
+void CLFullyConnectedReshapingLayer::run(void)
+{
+ if (_needs_reshape)
+ _cl_reshape.run();
+
+ _cl_fc.run();
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLGatherEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLGatherEx.cpp
new file mode 100644
index 000000000..6cad9bd2e
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLGatherEx.cpp
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2016-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/runtime/CL/functions/CLGatherEx.h"
+
+#include "arm_compute/core/CL/ICLTensor.h"
+#include "arm_compute/core/CL/kernels/CLGatherExKernel.h"
+
+using namespace arm_compute;
+
+void CLGatherEx::configure(const ICLTensor *input, const ICLTensor *indices, ICLTensor *output,
+ int axis)
+{
+ auto k = arm_compute::support::cpp14::make_unique<CLGatherExKernel>();
+ k->configure(input, indices, output, axis);
+ _kernel = std::move(k);
+}
+
+Status CLGatherEx::validate(const ITensorInfo *input, const ITensorInfo *indices,
+ const ITensorInfo *output, int axis)
+{
+ return CLGatherExKernel::validate(input, indices, output, axis);
+}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLHashtableLookup.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLHashtableLookup.cpp
index 7180e9356..7180e9356 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLHashtableLookup.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLHashtableLookup.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLNeg.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLNeg.cpp
index be35ea732..be35ea732 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLNeg.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLNeg.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLPReLU.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLPReLU.cpp
index 38adedd10..38adedd10 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLPReLU.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLPReLU.cpp
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLRNNLayerEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLRNNLayerEx.cpp
new file mode 100644
index 000000000..2a34c0664
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLRNNLayerEx.cpp
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/CL/functions/CLRNNLayerEx.h"
+
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/core/Utils.h"
+#include "arm_compute/core/utils/misc/ShapeCalculator.h"
+#include "arm_compute/runtime/CL/CLScheduler.h"
+#include "support/ToolchainSupport.h"
+
+#include <utility>
+
+using namespace arm_compute;
+using namespace arm_compute::misc::shape_calculator;
+
+CLRNNLayerEx::CLRNNLayerEx(std::shared_ptr<IMemoryManager> memory_manager)
+ : _memory_group(std::move(memory_manager)), _gemm_state_f(), _add_kernel(),
+ _activation_kernel(), _fully_connected_kernel(), _copy_kernel(), _fully_connected_out(),
+ _gemm_output(), _add_output(), _is_prepared(false)
+{
+}
+
+Status CLRNNLayerEx::validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *recurrent_weights, const ITensorInfo *bias,
+ const ITensorInfo *hidden_state, const ITensorInfo *output,
+ const ActivationLayerInfo &info)
+{
+ const int idx_width = 0;
+ const int idx_height = 1;
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, weights, recurrent_weights, bias, hidden_state,
+ output);
+ ARM_COMPUTE_RETURN_ERROR_ON(input->dimension(idx_width) != weights->dimension(idx_width));
+ ARM_COMPUTE_RETURN_ERROR_ON(weights->dimension(idx_height) !=
+ recurrent_weights->dimension(idx_width));
+ ARM_COMPUTE_RETURN_ERROR_ON(recurrent_weights->dimension(idx_width) !=
+ recurrent_weights->dimension(1));
+ ARM_COMPUTE_RETURN_ERROR_ON(bias->num_dimensions() != 1);
+ ARM_COMPUTE_RETURN_ERROR_ON(bias->dimension(idx_width) != weights->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON(hidden_state->dimension(idx_width) != weights->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON(hidden_state->dimension(idx_height) != input->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DIMENSIONS(output->tensor_shape(),
+ hidden_state->tensor_shape());
+
+ auto shape_info =
+ TensorInfo(compute_rnn_shape(recurrent_weights, hidden_state->dimension(idx_height)), 1,
+ input->data_type());
+
+ ARM_COMPUTE_RETURN_ON_ERROR(CLFullyConnectedLayer::validate(input, weights, bias, &shape_info));
+ ARM_COMPUTE_RETURN_ON_ERROR(
+ CLGEMM::validate(hidden_state, recurrent_weights, nullptr, &shape_info, 1.f, 0.f));
+ ARM_COMPUTE_RETURN_ON_ERROR(CLSaturatedArithmeticOperationKernel::validate(
+ ArithmeticOperation::ADD, &shape_info, &shape_info, &shape_info, ConvertPolicy::SATURATE));
+ ARM_COMPUTE_RETURN_ON_ERROR(CLActivationLayerKernel::validate(&shape_info, &shape_info, info));
+
+ return Status{};
+}
+
+void CLRNNLayerEx::configure(const ICLTensor *input, const ICLTensor *weights,
+ const ICLTensor *recurrent_weights, const ICLTensor *bias,
+ ICLTensor *hidden_state, ICLTensor *output, ActivationLayerInfo &info)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, weights, recurrent_weights, bias, hidden_state, output);
+ ARM_COMPUTE_ERROR_THROW_ON(CLRNNLayerEx::validate(input->info(), weights->info(),
+ recurrent_weights->info(), bias->info(),
+ hidden_state->info(), output->info(), info));
+
+ const int idx_height = 1;
+ TensorShape shape =
+ compute_rnn_shape(recurrent_weights->info(), hidden_state->info()->dimension(idx_height));
+
+ _is_prepared = false;
+
+ _fully_connected_out.allocator()->init(TensorInfo(shape, 1, input->info()->data_type()));
+ _gemm_output.allocator()->init(TensorInfo(shape, 1, input->info()->data_type()));
+
+ // Manage intermediate buffers and configure
+ _memory_group.manage(&_fully_connected_out);
+ _fully_connected_kernel.configure(input, weights, bias, &_fully_connected_out);
+
+ _memory_group.manage(&_gemm_output);
+ _gemm_state_f.configure(hidden_state, recurrent_weights, nullptr, &_gemm_output, 1.f, 0.f);
+
+ _add_output.allocator()->init(TensorInfo(shape, 1, input->info()->data_type()));
+ _memory_group.manage(&_add_output);
+
+ _add_kernel.configure(ArithmeticOperation::ADD, &_fully_connected_out, &_gemm_output,
+ &_add_output, ConvertPolicy::SATURATE);
+
+ _fully_connected_out.allocator()->allocate();
+ _gemm_output.allocator()->allocate();
+
+ _activation_kernel.configure(&_add_output, hidden_state, info);
+ _add_output.allocator()->allocate();
+
+ _copy_kernel.configure(hidden_state, output);
+}
+
+void CLRNNLayerEx::run()
+{
+ prepare();
+
+ _memory_group.acquire();
+
+ _fully_connected_kernel.run();
+ _gemm_state_f.run();
+ CLScheduler::get().enqueue(_add_kernel);
+ CLScheduler::get().enqueue(_activation_kernel);
+
+ // copy hidden out to output
+ CLScheduler::get().enqueue(_copy_kernel);
+
+ _memory_group.release();
+}
+
+void CLRNNLayerEx::prepare()
+{
+ if (!_is_prepared)
+ {
+ _fully_connected_kernel.prepare();
+ _gemm_state_f.prepare();
+
+ _is_prepared = true;
+ }
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp
new file mode 100644
index 000000000..1727ec66b
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLReduceOperation.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/runtime/CL/functions/CLReduceOperation.h"
+
+#include "arm_compute/core/CL/kernels/CLReduceOperationKernel.h"
+#include "arm_compute/runtime/CL/CLScheduler.h"
+
+using namespace arm_compute;
+
+CLReduceOperation::CLReduceOperation()
+ : _input(nullptr), _output(nullptr), _axis(), _interm_tensors(), _reduce_kernels()
+{
+}
+
+Status CLReduceOperation::validate(const ITensorInfo *input, const ITensorInfo *output,
+ const std::set<uint32_t> &axis, const ReduceOperation &op)
+{
+ const size_t num_of_kernels = axis.size();
+ const size_t num_of_interm_tensors = num_of_kernels - 1;
+
+ // Create temporary tensor infos
+ auto interm_tensors =
+ arm_compute::support::cpp14::make_unique<TensorInfo[]>(num_of_interm_tensors);
+
+ // Create intermediate tensor info
+ TensorShape shape{input->tensor_shape()};
+
+ auto it = axis.begin();
+ for (size_t i = 0; i < num_of_interm_tensors; ++i, ++it)
+ {
+ shape.set(*it, 1);
+ interm_tensors[i].set_data_type(input->data_type());
+ interm_tensors[i].set_tensor_shape(shape);
+ interm_tensors[i].set_num_channels(input->num_channels());
+ interm_tensors[i].set_data_layout(input->data_layout());
+ }
+
+ // Set a vector that is ordered ITensorInfo sequentially.
+ std::vector<const ITensorInfo *> tensors;
+ tensors.emplace_back(input);
+ for (size_t i = 0; i < num_of_interm_tensors; ++i)
+ {
+ tensors.emplace_back(interm_tensors.get() + i);
+ }
+ tensors.emplace_back(output);
+
+ // Validate ReduceOperation only on all kernels
+ it = axis.begin();
+ for (size_t i = 0; i < num_of_kernels; ++i, ++it)
+ {
+ ARM_COMPUTE_RETURN_ON_ERROR(
+ CLReduceOperationKernel::validate(tensors[i], tensors[i + 1], *it, op));
+ }
+
+ return Status{};
+}
+
+void CLReduceOperation::configure(ICLTensor *input, ICLTensor *output,
+ const std::set<uint32_t> &axis, ReduceOperation op)
+{
+ ARM_COMPUTE_ERROR_THROW_ON(validate(input->info(), output->info(), axis, op));
+
+ _axis = axis;
+
+ _input = input;
+ _output = output;
+
+ // NOTE The axis must have no duplication.
+ const size_t num_of_kernels = axis.size();
+ const size_t num_of_interm_tensors = num_of_kernels - 1;
+
+ _interm_tensors = arm_compute::support::cpp14::make_unique<CLTensor[]>(num_of_interm_tensors);
+ _reduce_kernels =
+ arm_compute::support::cpp14::make_unique<CLReduceOperationKernel[]>(num_of_kernels);
+
+ TensorShape shape{input->info()->tensor_shape()};
+ auto it = axis.begin();
+ for (size_t i = 0; i < num_of_interm_tensors; ++i, ++it)
+ {
+ shape.set(*it, 1);
+ _interm_tensors[i].allocator()->init(
+ TensorInfo(shape, input->info()->num_channels(), input->info()->data_type())
+ .set_data_layout(input->info()->data_layout()));
+ _interm_tensors[i].allocator()->allocate();
+ }
+
+ // Set a vector that is ordered ICLTensors sequentially.
+ std::vector<ICLTensor *> tensors;
+ tensors.emplace_back(input);
+ for (size_t i = 0; i < num_of_interm_tensors; ++i)
+ {
+ tensors.emplace_back(_interm_tensors.get() + i);
+ }
+ tensors.emplace_back(output);
+
+ // Apply ReduceOperation on all kernels
+ it = axis.begin();
+ for (size_t i = 0; i < num_of_kernels; ++i, ++it)
+ {
+ _reduce_kernels[i].configure(tensors[i], tensors[i + 1], *it, op);
+ }
+}
+
+void CLReduceOperation::run()
+{
+ const size_t num_of_kernels = _axis.size();
+ for (size_t i = 0; i < num_of_kernels; ++i)
+ {
+ CLScheduler::get().enqueue(_reduce_kernels[i]);
+ }
+}
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToBatchND.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToBatchND.cpp
index c03826891..c03826891 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToBatchND.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToBatchND.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToDepth.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToDepth.cpp
index 0f455f96f..0f455f96f 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToDepth.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLSpaceToDepth.cpp
diff --git a/libs/ARMComputeEx/src/runtime/CL/functions/CLTopKV2.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTopKV2.cpp
index 19177497c..19177497c 100644
--- a/libs/ARMComputeEx/src/runtime/CL/functions/CLTopKV2.cpp
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTopKV2.cpp
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayer.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayer.cpp
new file mode 100644
index 000000000..40e21671d
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayer.cpp
@@ -0,0 +1,238 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2018 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/CL/functions/CLTransposeConvLayer.h"
+#include "arm_compute/core/utils/misc/ShapeCalculatorEx.h"
+
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/Utils.h"
+#include "arm_compute/core/UtilsEx.h"
+#include "arm_compute/core/Validate.h"
+#include "arm_compute/core/utils/misc/ShapeCalculator.h"
+#include "arm_compute/runtime/CL/CLScheduler.h"
+#include "arm_compute/runtime/CPP/CPPScheduler.h"
+
+#include <memory>
+#include <tuple>
+
+using namespace arm_compute;
+using namespace arm_compute::misc::shape_calculator;
+
+CLTransposeConvLayer::CLTransposeConvLayer(std::shared_ptr<IMemoryManager> memory_manager) // NOLINT
+ : _memory_group(std::move(memory_manager)),
+ _scale_f(),
+ _conv_f(),
+ _flip_weights(),
+ _scaled_output(),
+ _original_weights(nullptr),
+ _weights_flipped(),
+ _is_prepared(false)
+{
+}
+
+Status CLTransposeConvLayer::validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *bias, ITensorInfo *output,
+ const PadStrideInfo &info, unsigned int invalid_right,
+ unsigned int invalid_bottom, const WeightsInfo &weights_info)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, weights, output);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QASYMM8, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_LAYOUT(input, weights);
+
+ const DataLayout data_layout = input->data_layout();
+
+ const size_t idx_w = get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const size_t idx_h = get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+ const size_t idx_c = get_data_layout_dimension_index(data_layout, DataLayoutDimension::CHANNEL);
+
+ ARM_COMPUTE_RETURN_ERROR_ON(weights->dimension(idx_w) != weights->dimension(idx_h));
+ ARM_COMPUTE_RETURN_ERROR_ON(weights->dimension(idx_w) < 1);
+
+ const unsigned int kernel_x = weights->dimension(idx_w);
+ const unsigned int kernel_y = weights->dimension(idx_h);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(invalid_right > kernel_x - 1,
+ "invalid_right must be smaller than kernel_x");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(invalid_bottom > kernel_y - 1,
+ "inner_border_top must be smaller than kernel_y");
+
+ // NOTE From the existing CLDeconvolutionLayer, invalid_right and invalid_bottom were added.
+ auto out_dims = transposeconv_output_dimensions(
+ input->dimension(idx_w), input->dimension(idx_h), weights->dimension(idx_w),
+ weights->dimension(idx_h), info, invalid_right, invalid_bottom);
+
+ const TensorShape output_shape = compute_transposeconv_output_shape(out_dims, *input, *weights);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output, weights);
+
+ if (bias != nullptr)
+ {
+ if (is_data_type_quantized_asymmetric(input->data_type()))
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(bias, 1, DataType::S32);
+ }
+ else
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, bias);
+ }
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_LAYOUT(input, bias);
+ }
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(idx_w) != output_shape[idx_w],
+ "Output's width is invalid.");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(idx_h) != output_shape[idx_h],
+ "Output's height is invalid.");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(idx_c) != output_shape[idx_c],
+ "Output's depth is invalid.");
+
+ unsigned int pad_left = 0;
+ unsigned int pad_right = 0;
+ unsigned int pad_top = 0;
+ unsigned int pad_bottom = 0;
+ const TensorShape scale_out_shape = compute_transposeconv_upsampled_shape(
+ *input, *weights, info, out_dims, invalid_right, invalid_bottom, pad_left, pad_right, pad_top,
+ pad_bottom);
+ TensorInfo scale_out_info(input->clone()
+ ->set_is_resizable(true)
+ .reset_padding()
+ .set_tensor_shape(scale_out_shape)
+ .set_data_layout(data_layout));
+ const PadStrideInfo conv_info(1, 1, 0, 0, 0, 0, DimensionRoundingType::CEIL);
+
+ ARM_COMPUTE_RETURN_ON_ERROR(
+ CLTransposeConvLayerUpsample::validate(input, &scale_out_info, BorderSize(0, 0), info));
+ ARM_COMPUTE_RETURN_ON_ERROR(CLConvolutionLayer::validate(&scale_out_info, weights, bias, output,
+ conv_info, weights_info));
+
+ return Status{};
+}
+
+void CLTransposeConvLayer::configure(ICLTensor *input, ICLTensor *weights, const ICLTensor *bias,
+ ICLTensor *output, const PadStrideInfo &info,
+ unsigned int invalid_right, unsigned int invalid_bottom,
+ const WeightsInfo &weights_info)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, weights, output);
+
+ const unsigned int stride_x = info.stride().first;
+ const unsigned int stride_y = info.stride().second;
+
+ const DataLayout data_layout = input->info()->data_layout();
+
+ const size_t idx_w = get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const size_t idx_h = get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+
+ _original_weights = weights;
+ _weights_flipped.allocator()->init(weights->info()->clone()->set_data_layout(data_layout));
+ _flip_weights.configure(weights, &_weights_flipped);
+
+ // NOTE From the existing CLDeconvolutionLayer, invalid_right and invalid_bottom were
+ // added.
+ auto out_dims = transposeconv_output_dimensions(
+ input->info()->dimension(idx_w), input->info()->dimension(idx_h),
+ weights->info()->dimension(idx_w), weights->info()->dimension(idx_h), info, invalid_right,
+ invalid_bottom);
+
+ const TensorShape output_shape =
+ compute_transposeconv_output_shape(out_dims, *input->info(), *weights->info());
+
+ // Output auto initialization if not yet initialized
+ auto_init_if_empty(
+ *output->info(),
+ input->info()->clone()->set_tensor_shape(output_shape).set_data_layout(data_layout));
+
+ // Perform validation step
+ ARM_COMPUTE_ERROR_THROW_ON(CLTransposeConvLayer::validate(
+ input->info(), weights->info(), bias == nullptr ? nullptr : bias->info(), output->info(),
+ info, invalid_right, invalid_bottom));
+
+ _is_prepared = weights_info.retain_internal_weights();
+
+ _memory_group.manage(&_scaled_output);
+
+ // Find the upsampled dimensions and the padding needed for the convolution with stride 1 in order
+ // to match output shape
+ unsigned int pad_left = 0;
+ unsigned int pad_right = 0;
+ unsigned int pad_top = 0;
+ unsigned int pad_bottom = 0;
+ const TensorShape scale_out_shape = compute_transposeconv_upsampled_shape(
+ *input->info(), *weights->info(), info, out_dims, invalid_right, invalid_bottom, pad_left,
+ pad_right, pad_top, pad_bottom);
+
+ TensorInfo scale_out_info(scale_out_shape, 1, input->info()->data_type(),
+ input->info()->quantization_info());
+ scale_out_info.set_data_layout(data_layout);
+ _scaled_output.allocator()->init(scale_out_info);
+
+ // configure scale function
+ const PadStrideInfo upsample_info(stride_x, stride_y, pad_left, pad_right, pad_top, pad_bottom,
+ DimensionRoundingType::FLOOR);
+ _scale_f.configure(input, &_scaled_output, BorderSize(0, 0), upsample_info);
+
+ // setup the function to convolve the upscaled output
+ const PadStrideInfo conv_info(1, 1, 0, 0, 0, 0, DimensionRoundingType::CEIL);
+ _conv_f.configure(&_scaled_output, &_weights_flipped, bias, output, conv_info, weights_info);
+ _scaled_output.allocator()->allocate();
+}
+
+void CLTransposeConvLayer::run()
+{
+ prepare();
+
+ _memory_group.acquire();
+
+ _scale_f.run();
+ _conv_f.run();
+
+ _memory_group.release();
+}
+
+void CLTransposeConvLayer::prepare()
+{
+ if (!_is_prepared)
+ {
+ ARM_COMPUTE_ERROR_ON(!_original_weights->is_used());
+
+ // Run weights flipping and mark original weights tensor as unused
+ _weights_flipped.allocator()->allocate();
+ _weights_flipped.map(true);
+ _original_weights->map(CLScheduler::get().queue(), true);
+ CPPScheduler::get().schedule(&_flip_weights, Window::DimZ);
+ _weights_flipped.unmap();
+ _original_weights->unmap(CLScheduler::get().queue());
+ _original_weights->mark_as_unused();
+
+ // Prepare convolution
+ _conv_f.prepare();
+
+ if (!_weights_flipped.is_used())
+ {
+ _weights_flipped.allocator()->free();
+ }
+
+ _is_prepared = true;
+ }
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayerUpsample.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayerUpsample.cpp
new file mode 100644
index 000000000..0ce3e6700
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CL/functions/CLTransposeConvLayerUpsample.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2018 ARM Limited.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "arm_compute/runtime/CL/functions/CLTransposeConvLayerUpsample.h"
+
+#include "arm_compute/core/CL/OpenCL.h"
+#include "arm_compute/core/Utils.h"
+#include "arm_compute/runtime/CL/CLScheduler.h"
+
+#include <cmath>
+#include <memory>
+#include <tuple>
+
+using namespace arm_compute;
+
+CLTransposeConvLayerUpsample::CLTransposeConvLayerUpsample() // NOLINT
+ : _upsample(),
+ _output(nullptr)
+{
+}
+
+Status CLTransposeConvLayerUpsample::validate(const ITensorInfo *input, const ITensorInfo *output,
+ const BorderSize &inner_border,
+ const PadStrideInfo &info)
+{
+ return CLTransposeConvLayerUpsampleKernel::validate(input, output, inner_border, info);
+}
+
+void CLTransposeConvLayerUpsample::configure(ICLTensor *input, ICLTensor *output,
+ const BorderSize &inner_border,
+ const PadStrideInfo &info)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, output);
+
+ _output = output;
+ _upsample.configure(input, _output, inner_border, info);
+}
+
+void CLTransposeConvLayerUpsample::run()
+{
+ _output->map(CLScheduler::get().queue(), true);
+ if (is_data_type_quantized_asymmetric(_output->info()->data_type()))
+ {
+ const uint8_t quantized_zero = _output->info()->quantization_info().offset;
+ std::fill_n(_output->buffer(), _output->info()->total_size(), quantized_zero);
+ }
+ else
+ {
+ memset(_output->buffer(), 0, _output->info()->total_size());
+ }
+ _output->unmap(CLScheduler::get().queue());
+
+ CLScheduler::get().enqueue(_upsample, false);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/CPP/functions/CPPUpsampleEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/CPP/functions/CPPUpsampleEx.cpp
new file mode 100644
index 000000000..f8e0ef8a6
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/CPP/functions/CPPUpsampleEx.cpp
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/CPP/functions/CPPUpsampleEx.h"
+
+#include "arm_compute/core/CPP/kernels/CPPUpsampleKernelEx.h"
+#include "support/ToolchainSupport.h"
+
+using namespace arm_compute;
+
+void CPPUpsampleEx::configure(const ITensor *input, ITensor *output, const PadStrideInfo &info)
+{
+ auto k = arm_compute::support::cpp14::make_unique<CPPUpsampleKernelEx>();
+ k->configure(input, output, info);
+ _kernel = std::move(k);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/NEFunctionsEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/NEFunctionsEx.cpp
new file mode 100644
index 000000000..80fbf359d
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/NEFunctionsEx.cpp
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/runtime/NEON/NEFunctionsEx.h"
+
+// NOTE This empty file aims to validate "NEFunctionsEx.h".
+// DO NOT REMOVE this file.
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEArgMinMax.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEArgMinMax.cpp
new file mode 100644
index 000000000..5ba465b61
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEArgMinMax.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NEArgMinMax.h"
+
+#include "arm_compute/core/CPP/Validate.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/runtime/NEON/NEScheduler.h"
+
+namespace arm_compute
+{
+
+template <ReductionOperation OP>
+NEArgMinMaxStatic<OP>::NEArgMinMaxStatic(std::shared_ptr<IMemoryManager> memory_manager)
+ : _memory_group(std::move(memory_manager)), _reduction_kernel(), _reduced_out(), _reshape()
+{
+}
+
+template <ReductionOperation OP>
+Status NEArgMinMaxStatic<OP>::validate(const ITensorInfo *input, int axis,
+ const ITensorInfo *output)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input);
+ ARM_COMPUTE_RETURN_ERROR_ON_CPU_F16_UNSUPPORTED(input);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QASYMM8, DataType::F16,
+ DataType::F32);
+
+ TensorShape out_shape = input->tensor_shape();
+ const int input_dims = input->num_dimensions();
+ int axis_local = axis;
+
+ // Convert negative axis
+ axis_local = wrap_around(axis_local, input_dims);
+
+ ARM_COMPUTE_RETURN_ERROR_ON(axis_local > 3);
+ ARM_COMPUTE_RETURN_ERROR_ON(static_cast<unsigned int>(axis_local) > input->num_dimensions() - 1);
+ out_shape.remove_dimension(axis_local);
+
+ const TensorInfo out_info = output->clone()->set_tensor_shape(out_shape);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(output, &out_info);
+
+ return Status{};
+}
+
+template <ReductionOperation OP>
+void NEArgMinMaxStatic<OP>::configure(ITensor *input, int axis, ITensor *output)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input);
+
+ int axis_local = axis;
+ const int input_dims = input->info()->num_dimensions();
+
+ // Convert negative axis
+ axis_local = wrap_around(axis_local, input_dims);
+
+ // Perform reduction for axis
+ TensorShape intermediate_shape = input->info()->tensor_shape();
+ intermediate_shape.set(axis_local, 1);
+ auto in = input;
+
+ _reduced_out.allocator()->init(TensorInfo(intermediate_shape, output->info()->num_channels(),
+ output->info()->data_type(),
+ output->info()->quantization_info()));
+ _memory_group.manage(&_reduced_out);
+ _reduction_kernel.configure(in, axis_local, &_reduced_out, OP);
+
+ // Allocate intermediate tensor
+ _reduced_out.allocator()->allocate();
+
+ // Configure reshape layer if we want to drop the dimensions
+ TensorShape out_shape = input->info()->tensor_shape();
+ out_shape.remove_dimension(axis_local);
+ auto_init_if_empty(*output->info(), output->info()->clone()->set_tensor_shape(out_shape));
+ _reshape.configure(&_reduced_out, output);
+}
+
+template <ReductionOperation OP> void NEArgMinMaxStatic<OP>::run()
+{
+ MemoryGroupResourceScope scope_mg(_memory_group);
+
+ _reduction_kernel.run();
+ _reshape.run();
+}
+
+// Supported Specializations
+template class NEArgMinMaxStatic<ReductionOperation::ARG_IDX_MAX>;
+template class NEArgMinMaxStatic<ReductionOperation::ARG_IDX_MIN>;
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEBinaryLogicalOperation.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEBinaryLogicalOperation.cpp
new file mode 100644
index 000000000..7c15fc453
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEBinaryLogicalOperation.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NEBinaryLogicalOperation.h"
+#include <arm_compute/core/NEON/kernels/NEBinaryLogicalOperationKernel.h>
+
+#include "arm_compute/core/ITensor.h"
+#include "support/ToolchainSupport.h"
+
+#include <utility>
+
+namespace arm_compute
+{
+
+template <BinaryLogicalOperation COP>
+void NEBinaryLogicalOperationStatic<COP>::configure(ITensor *input1, ITensor *input2,
+ ITensor *output)
+{
+ auto k = arm_compute::support::cpp14::make_unique<NEBinaryLogicalOperationKernel>();
+ k->configure(COP, input1, input2, output);
+ _kernel = std::move(k);
+}
+
+template <BinaryLogicalOperation COP>
+Status NEBinaryLogicalOperationStatic<COP>::validate(const ITensorInfo *input1,
+ const ITensorInfo *input2,
+ const ITensorInfo *output)
+{
+ return NEBinaryLogicalOperationKernel::validate(COP, input1, input2, output);
+}
+
+void NEBinaryLogicalOperation::configure(ITensor *input1, ITensor *input2, ITensor *output,
+ BinaryLogicalOperation op)
+{
+ auto k = arm_compute::support::cpp14::make_unique<NEBinaryLogicalOperationKernel>();
+ k->configure(op, input1, input2, output);
+ _kernel = std::move(k);
+}
+
+Status NEBinaryLogicalOperation::validate(const ITensorInfo *input1, const ITensorInfo *input2,
+ const ITensorInfo *output, BinaryLogicalOperation op)
+{
+ return NEBinaryLogicalOperationKernel::validate(op, input1, input2, output);
+}
+
+// Supported Specializations
+template class NEBinaryLogicalOperationStatic<BinaryLogicalOperation::AND>;
+template class NEBinaryLogicalOperationStatic<BinaryLogicalOperation::OR>;
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEElementwiseUnaryLayerEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEElementwiseUnaryLayerEx.cpp
new file mode 100644
index 000000000..a95018a28
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEElementwiseUnaryLayerEx.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NEElementwiseUnaryLayerEx.h"
+
+#include "arm_compute/core/NEON/kernels/NEElementwiseUnaryKernelEx.h"
+#include "support/ToolchainSupport.h"
+
+#include <utility>
+
+namespace arm_compute
+{
+void NENegLayer::configure(const ITensor *input, ITensor *output)
+{
+ auto k = arm_compute::support::cpp14::make_unique<NEElementwiseUnaryKernelEx>();
+ k->configure(ElementWiseUnaryEx::NEG, input, output);
+ _kernel = std::move(k);
+}
+Status NENegLayer::validate(const ITensorInfo *input, const ITensorInfo *output)
+{
+ return NEElementwiseUnaryKernelEx::validate(ElementWiseUnaryEx::NEG, input, output);
+}
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEFullyConnectedReshapingLayer.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEFullyConnectedReshapingLayer.cpp
new file mode 100644
index 000000000..6649aa6e9
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEFullyConnectedReshapingLayer.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h"
+
+using namespace arm_compute;
+
+void NEFullyConnectedReshapingLayer::configure(const arm_compute::ITensor *input,
+ const arm_compute::ITensor *weights,
+ const arm_compute::ITensor *biases,
+ arm_compute::ITensor *output, bool needs_reshape,
+ const arm_compute::TensorShape &reshape)
+{
+ _input = input;
+ _weights = weights;
+ _biases = biases;
+ _output = output;
+ _needs_reshape = needs_reshape;
+
+ if (_needs_reshape)
+ {
+ // reshape
+ auto_init_if_empty(*_neon_buffer.info(), _input->info()->clone()->set_tensor_shape(reshape));
+ _neon_reshape.configure(_input, &_neon_buffer);
+
+ _neon_fc.configure(&_neon_buffer, _weights, _biases, _output);
+
+ // NOTE _neon_buffer is inaccessible from outside, and thus it is safe to invoke allocate here.
+ _neon_buffer.allocator()->allocate();
+ }
+ else
+ {
+ _neon_fc.configure(_input, _weights, _biases, _output);
+ }
+}
+
+void NEFullyConnectedReshapingLayer::run(void)
+{
+ if (_needs_reshape)
+ _neon_reshape.run();
+
+ _neon_fc.run();
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEPReLU.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEPReLU.cpp
new file mode 100644
index 000000000..1150cef76
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEPReLU.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NEPReLU.h"
+
+#include "arm_compute/core/NEON/kernels/NEPReLUKernel.h"
+#include "support/ToolchainSupport.h"
+
+#include <utility>
+
+using namespace arm_compute;
+
+void NEPReLU::configure(const ITensor *input, const ITensor *alpha, ITensor *output)
+{
+ auto k = arm_compute::support::cpp14::make_unique<NEPReLUKernel>();
+ k->configure(input, alpha, output);
+ _kernel = std::move(k);
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NERNNLayerEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NERNNLayerEx.cpp
new file mode 100644
index 000000000..84411c266
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NERNNLayerEx.cpp
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "arm_compute/runtime/NEON/functions/NERNNLayerEx.h"
+
+#include "arm_compute/core/Error.h"
+#include "arm_compute/core/TensorInfo.h"
+#include "arm_compute/core/Types.h"
+#include "arm_compute/core/Validate.h"
+#include "arm_compute/core/utils/misc/ShapeCalculator.h"
+#include "arm_compute/runtime/NEON/NEScheduler.h"
+
+namespace arm_compute
+{
+NERNNLayerEx::NERNNLayerEx(std::shared_ptr<IMemoryManager> memory_manager)
+ : _memory_group(std::move(memory_manager)), _gemm_state_f(), _add_kernel(),
+ _activation_kernel(), _fully_connected_kernel(), _copy_kernel(), _fully_connected_out(),
+ _gemm_output(), _add_output(), _is_prepared(false)
+{
+}
+
+Status NERNNLayerEx::validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *recurrent_weights, const ITensorInfo *bias,
+ const ITensorInfo *hidden_state, const ITensorInfo *output,
+ const ActivationLayerInfo &info)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, weights, recurrent_weights, bias, hidden_state,
+ output);
+
+ const int idx_width = 0;
+ const int idx_height = 1;
+ ARM_COMPUTE_RETURN_ERROR_ON(input->dimension(idx_width) != weights->dimension(idx_width));
+ ARM_COMPUTE_RETURN_ERROR_ON(weights->dimension(idx_height) !=
+ recurrent_weights->dimension(idx_width));
+ ARM_COMPUTE_RETURN_ERROR_ON(recurrent_weights->dimension(idx_width) !=
+ recurrent_weights->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON(bias->num_dimensions() != 1);
+ ARM_COMPUTE_RETURN_ERROR_ON(bias->dimension(idx_width) != weights->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON(hidden_state->dimension(idx_width) != weights->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON(hidden_state->dimension(idx_height) != input->dimension(idx_height));
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DIMENSIONS(output->tensor_shape(),
+ hidden_state->tensor_shape());
+
+ auto shape_info = TensorInfo(misc::shape_calculator::compute_rnn_shape(
+ recurrent_weights, hidden_state->dimension(idx_height)),
+ 1, input->data_type());
+
+ ARM_COMPUTE_RETURN_ON_ERROR(NEFullyConnectedLayer::validate(input, weights, bias, &shape_info));
+ ARM_COMPUTE_RETURN_ON_ERROR(NEArithmeticAdditionKernel::validate(
+ &shape_info, &shape_info, &shape_info, ConvertPolicy::SATURATE));
+ ARM_COMPUTE_RETURN_ON_ERROR(NEActivationLayerKernel::validate(&shape_info, &shape_info, info));
+
+ return Status{};
+}
+
+void NERNNLayerEx::configure(const ITensor *input, const ITensor *weights,
+ const ITensor *recurrent_weights, const ITensor *bias,
+ ITensor *hidden_state, ITensor *output, ActivationLayerInfo &info)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, weights, recurrent_weights, bias, hidden_state, output);
+ ARM_COMPUTE_ERROR_THROW_ON(NERNNLayerEx::validate(input->info(), weights->info(),
+ recurrent_weights->info(), bias->info(),
+ hidden_state->info(), output->info(), info));
+
+ const int idx_height = 1;
+ TensorShape shape = misc::shape_calculator::compute_rnn_shape(
+ recurrent_weights->info(), hidden_state->info()->dimension(idx_height));
+
+ _is_prepared = false;
+
+ // Manage intermediate buffers and configure
+ _fully_connected_out.allocator()->init(TensorInfo(shape, 1, input->info()->data_type()));
+ _gemm_output.allocator()->init(TensorInfo(shape, 1, input->info()->data_type()));
+
+ // Manage intermediate buffers and configure
+ _memory_group.manage(&_fully_connected_out);
+ _fully_connected_kernel.configure(input, weights, bias, &_fully_connected_out);
+
+ _memory_group.manage(&_gemm_output);
+ _gemm_state_f.configure(hidden_state, recurrent_weights, nullptr, &_gemm_output, 1.f, 0.f);
+
+ _add_output.allocator()->init(TensorInfo(shape, 1, input->info()->data_type()));
+ _memory_group.manage(&_add_output);
+
+ _add_kernel.configure(&_fully_connected_out, &_gemm_output, &_add_output,
+ ConvertPolicy::SATURATE);
+
+ _fully_connected_out.allocator()->allocate();
+ _gemm_output.allocator()->allocate();
+
+ _activation_kernel.configure(&_add_output, hidden_state, info);
+ _add_output.allocator()->allocate();
+
+ _copy_kernel.configure(hidden_state, output);
+}
+
+void NERNNLayerEx::run()
+{
+ prepare();
+
+ MemoryGroupResourceScope scope_mg(_memory_group);
+
+ _fully_connected_kernel.run();
+
+ _gemm_state_f.run();
+
+ NEScheduler::get().schedule(&_add_kernel, Window::DimY);
+ NEScheduler::get().schedule(&_activation_kernel, Window::DimY);
+
+ // copy hidden out to output
+ NEScheduler::get().schedule(&_copy_kernel, Window::DimY);
+}
+
+void NERNNLayerEx::prepare()
+{
+ if (!_is_prepared)
+ {
+ _fully_connected_kernel.prepare();
+ _gemm_state_f.prepare();
+
+ _is_prepared = true;
+ }
+}
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceMeanEx.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceMeanEx.cpp
new file mode 100644
index 000000000..c65e93570
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceMeanEx.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NEReduceMeanEx.h"
+
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/runtime/NEON/NEScheduler.h"
+
+using namespace arm_compute;
+
+NEReduceMeanEx::NEReduceMeanEx(std::shared_ptr<IMemoryManager> memory_manager)
+ : _memory_group(std::move(memory_manager)), _reduction_kernels(), _reduced_outs(), _reshape(),
+ _reduction_ops(), _keep_dims()
+{
+}
+
+Status NEReduceMeanEx::validate(const ITensorInfo *input, const Coordinates &reduction_axis,
+ bool keep_dims, const ITensorInfo *output)
+{
+ ARM_COMPUTE_UNUSED(keep_dims);
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input);
+ ARM_COMPUTE_RETURN_ERROR_ON(reduction_axis.num_dimensions() > input->num_dimensions());
+
+ TensorShape out_shape = input->tensor_shape();
+ const unsigned int reduction_ops = reduction_axis.num_dimensions();
+ const int input_dims = input->num_dimensions();
+ Coordinates axis_local = reduction_axis;
+
+ // Convert negative axis
+ for (unsigned int i = 0; i < reduction_ops; ++i)
+ {
+ axis_local[i] = wrap_around(axis_local[i], input_dims);
+ }
+
+ std::sort(axis_local.begin(), axis_local.begin() + reduction_ops);
+ for (unsigned int i = 0; i < reduction_ops; ++i)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON(axis_local[i] > 3);
+ ARM_COMPUTE_RETURN_ERROR_ON(static_cast<unsigned int>(axis_local[i]) >
+ input->num_dimensions() - 1);
+ if (output->total_size() > 0 && keep_dims)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON(output->dimension(axis_local[i]) != 1);
+ }
+ if (keep_dims)
+ {
+ out_shape.set(axis_local[i], 1);
+ }
+ else
+ {
+ out_shape.remove_dimension(axis_local[i] - i);
+ }
+ }
+ const TensorInfo out_info = input->clone()->set_tensor_shape(out_shape);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(output, &out_info);
+
+ return Status{};
+}
+
+void NEReduceMeanEx::configure(ITensor *input, const Coordinates &reduction_axis, bool keep_dims,
+ ITensor *output)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input);
+
+ _reduction_ops = reduction_axis.num_dimensions();
+ _reduction_kernels =
+ arm_compute::support::cpp14::make_unique<NEReductionOperation[]>(_reduction_ops);
+ _reduced_outs =
+ arm_compute::support::cpp14::make_unique<Tensor[]>(_reduction_ops - (keep_dims ? 1 : 0));
+ _keep_dims = keep_dims;
+
+ Coordinates axis_local = reduction_axis;
+ const int input_dims = input->info()->num_dimensions();
+ const unsigned int reduction_ops = reduction_axis.num_dimensions();
+
+ // Convert negative axis
+ for (unsigned int i = 0; i < reduction_ops; ++i)
+ {
+ axis_local[i] = wrap_around(axis_local[i], input_dims);
+ }
+
+ // Perform reduction for every axis
+ for (unsigned int i = 0; i < _reduction_ops; ++i)
+ {
+ TensorShape out_shape = i == 0 ? input->info()->tensor_shape()
+ : (_reduced_outs.get() + i - 1)->info()->tensor_shape();
+ out_shape.set(axis_local[i], 1);
+ auto in = (i == 0) ? input : (_reduced_outs.get() + i - 1);
+
+ if (i == _reduction_ops - 1 && keep_dims)
+ {
+ _reduction_kernels[i].configure(in, output, axis_local[i], ReductionOperation::MEAN_SUM);
+ }
+ else
+ {
+ _reduced_outs[i].allocator()->init(TensorInfo(out_shape, input->info()->num_channels(),
+ input->info()->data_type(),
+ input->info()->quantization_info())
+ .set_data_layout(output->info()->data_layout()));
+ _memory_group.manage(_reduced_outs.get() + i);
+ _reduction_kernels[i].configure(in, _reduced_outs.get() + i, axis_local[i],
+ ReductionOperation::MEAN_SUM);
+ }
+ }
+
+ // Allocate intermediate tensors
+ for (unsigned int i = 0; i < _reduction_ops - (keep_dims ? 1 : 0); ++i)
+ {
+ _reduced_outs[i].allocator()->allocate();
+ }
+
+ // Configure reshape layer if we want to drop the dimensions
+ if (!keep_dims)
+ {
+ TensorShape out_shape = input->info()->tensor_shape();
+
+ // We have to sort the reduction axis vectors in order for remove_dimension
+ // to work properly
+ std::sort(axis_local.begin(), axis_local.begin() + _reduction_ops);
+ for (unsigned int i = 0; i < _reduction_ops; ++i)
+ {
+ out_shape.remove_dimension(axis_local[i] - i);
+ }
+ auto_init_if_empty(*output->info(), input->info()->clone()->set_tensor_shape(out_shape));
+ _reshape.configure(_reduced_outs.get() + _reduction_ops - 1, output);
+ }
+}
+
+void NEReduceMeanEx::run()
+{
+ _memory_group.acquire();
+
+ for (unsigned int i = 0; i < _reduction_ops; ++i)
+ {
+ _reduction_kernels[i].run();
+ }
+
+ if (!_keep_dims)
+ {
+ _reshape.run();
+ }
+ _memory_group.release();
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceSum.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceSum.cpp
new file mode 100644
index 000000000..3c18217ef
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NEReduceSum.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2018-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NEReduceSum.h"
+
+#include "arm_compute/core/CPP/Validate.h"
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/runtime/NEON/NEScheduler.h"
+
+using namespace arm_compute;
+
+NEReduceSum::NEReduceSum(std::shared_ptr<IMemoryManager> memory_manager)
+ : _memory_group(std::move(memory_manager)), _reduction_kernels(), _reduced_outs(), _reshape(),
+ _reduction_ops(), _keep_dims()
+{
+}
+
+Status NEReduceSum::validate(const ITensorInfo *input, const Coordinates &reduction_axis,
+ bool keep_dims, const ITensorInfo *output)
+{
+ ARM_COMPUTE_UNUSED(keep_dims);
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input);
+ ARM_COMPUTE_RETURN_ERROR_ON_CPU_F16_UNSUPPORTED(input);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QASYMM8, DataType::F16,
+ DataType::F32);
+ ARM_COMPUTE_RETURN_ERROR_ON(reduction_axis.num_dimensions() > input->num_dimensions());
+
+ TensorShape out_shape = input->tensor_shape();
+ const unsigned int reduction_ops = reduction_axis.num_dimensions();
+ const int input_dims = input->num_dimensions();
+ Coordinates axis_local = reduction_axis;
+
+ // Convert negative axis
+ for (unsigned int i = 0; i < reduction_ops; ++i)
+ {
+ axis_local[i] = wrap_around(axis_local[i], input_dims);
+ }
+
+ std::sort(axis_local.begin(), axis_local.begin() + reduction_ops);
+ for (unsigned int i = 0; i < reduction_ops; ++i)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON(axis_local[i] > 3);
+ ARM_COMPUTE_RETURN_ERROR_ON(static_cast<unsigned int>(axis_local[i]) >
+ input->num_dimensions() - 1);
+ if (output->total_size() > 0 && keep_dims)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON(output->dimension(axis_local[i]) != 1);
+ }
+ if (keep_dims)
+ {
+ out_shape.set(axis_local[i], 1);
+ }
+ else
+ {
+ out_shape.remove_dimension(axis_local[i] - i);
+ }
+ }
+ const TensorInfo out_info = input->clone()->set_tensor_shape(out_shape);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_SHAPES(output, &out_info);
+
+ return Status{};
+}
+
+void NEReduceSum::configure(ITensor *input, const Coordinates &reduction_axis, bool keep_dims,
+ ITensor *output)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input);
+
+ _reduction_ops = reduction_axis.num_dimensions();
+ _reduction_kernels.resize(_reduction_ops);
+ _reduced_outs.resize(_reduction_ops - (keep_dims ? 1 : 0));
+ _keep_dims = keep_dims;
+
+ Coordinates axis_local = reduction_axis;
+ const int input_dims = input->info()->num_dimensions();
+ const unsigned int reduction_ops = reduction_axis.num_dimensions();
+
+ // Convert negative axis
+ for (unsigned int i = 0; i < reduction_ops; ++i)
+ {
+ axis_local[i] = wrap_around(axis_local[i], input_dims);
+ }
+
+ // Perform reduction for every axis
+ for (unsigned int i = 0; i < _reduction_ops; ++i)
+ {
+ TensorShape out_shape =
+ i == 0 ? input->info()->tensor_shape() : (&_reduced_outs[i - 1])->info()->tensor_shape();
+ out_shape.set(axis_local[i], 1);
+ auto in = (i == 0) ? input : (&_reduced_outs[i - 1]);
+
+ if (i == _reduction_ops - 1 && keep_dims)
+ {
+ _reduction_kernels[i].configure(in, output, axis_local[i], ReductionOperation::SUM);
+ }
+ else
+ {
+ _reduced_outs[i].allocator()->init(TensorInfo(out_shape, input->info()->num_channels(),
+ input->info()->data_type(),
+ input->info()->quantization_info())
+ .set_data_layout(input->info()->data_layout()));
+ _memory_group.manage(&_reduced_outs[i]);
+ _reduction_kernels[i].configure(in, &_reduced_outs[i], axis_local[i],
+ ReductionOperation::SUM);
+ }
+ }
+
+ // Allocate intermediate tensors
+ for (unsigned int i = 0; i < _reduction_ops - (keep_dims ? 1 : 0); ++i)
+ {
+ _reduced_outs[i].allocator()->allocate();
+ }
+
+ // Configure reshape layer if we want to drop the dimensions
+ if (!keep_dims)
+ {
+ TensorShape out_shape = input->info()->tensor_shape();
+
+ // We have to sort the reduction axis vectors in order for remove_dimension
+ // to work properly
+ std::sort(axis_local.begin(), axis_local.begin() + _reduction_ops);
+ for (unsigned int i = 0; i < _reduction_ops; ++i)
+ {
+ out_shape.remove_dimension(axis_local[i] - i);
+ }
+ auto_init_if_empty(*output->info(), input->info()->clone()->set_tensor_shape(out_shape));
+ _reshape.configure(&_reduced_outs[_reduction_ops - 1], output);
+ }
+}
+
+void NEReduceSum::run()
+{
+ MemoryGroupResourceScope scope_mg(_memory_group);
+
+ for (unsigned int i = 0; i < _reduction_ops; ++i)
+ {
+ _reduction_kernels[i].run();
+ }
+
+ if (!_keep_dims)
+ {
+ _reshape.run();
+ }
+}
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NETransposeConvLayer.cpp b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NETransposeConvLayer.cpp
new file mode 100644
index 000000000..fd15ef05f
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/NEON/functions/NETransposeConvLayer.cpp
@@ -0,0 +1,307 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (c) 2017-2019 ARM Limited.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#include "arm_compute/runtime/NEON/functions/NETransposeConvLayer.h"
+
+#include "arm_compute/core/Helpers.h"
+#include "arm_compute/core/Utils.h"
+#include "arm_compute/core/UtilsEx.h"
+#include "arm_compute/core/Validate.h"
+#include "arm_compute/core/utils/misc/ShapeCalculator.h"
+#include "arm_compute/core/utils/misc/ShapeCalculatorEx.h"
+#include "arm_compute/runtime/NEON/NEScheduler.h"
+
+using namespace arm_compute::misc::shape_calculator;
+
+namespace arm_compute
+{
+NETransposeConvLayer::NETransposeConvLayer(std::shared_ptr<IMemoryManager> memory_manager) // NOLINT
+ : _memory_group(std::move(memory_manager)),
+ _conv_f(),
+ _upsample_f(),
+ _flip_weights(),
+ _permute_input(),
+ _permute_weights(),
+ _permute_output(),
+ _scaled_output(),
+ _weights_flipped(),
+ _permuted_input(),
+ _permuted_weights(),
+ _permuted_output(),
+ _is_nchw(false),
+ _original_weights(nullptr),
+ _input(nullptr),
+ _info(),
+ _is_prepared(false)
+{
+}
+
+Status NETransposeConvLayer::validate(const ITensorInfo *input, const ITensorInfo *weights,
+ const ITensorInfo *bias, const ITensorInfo *output,
+ const PadStrideInfo &info, unsigned int invalid_right,
+ unsigned int invalid_bottom)
+{
+ ARM_COMPUTE_RETURN_ERROR_ON_NULLPTR(input, weights, output);
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::F32, DataType::F16,
+ DataType::QASYMM8);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(weights, input);
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_LAYOUT(weights, input);
+ const unsigned int width_idx =
+ get_data_layout_dimension_index(weights->data_layout(), DataLayoutDimension::WIDTH);
+ const unsigned int height_idx =
+ get_data_layout_dimension_index(weights->data_layout(), DataLayoutDimension::HEIGHT);
+ ARM_COMPUTE_RETURN_ERROR_ON(weights->dimension(width_idx) != weights->dimension(height_idx));
+ ARM_COMPUTE_RETURN_ERROR_ON(weights->dimension(width_idx) < 1);
+
+ auto out_dims = transposeconv_output_dimensions(
+ input->dimension(width_idx), input->dimension(height_idx), weights->dimension(width_idx),
+ weights->dimension(height_idx), info, invalid_right, invalid_bottom);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, weights);
+ if (is_data_type_quantized_asymmetric(input->data_type()) && bias)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(bias, 1, DataType::S32);
+ }
+ else if (bias)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, bias);
+ }
+
+ if (output->tensor_shape().total_size() > 0)
+ {
+ ARM_COMPUTE_RETURN_ERROR_ON_MISMATCHING_DATA_TYPES(input, output);
+
+ const TensorShape output_shape = compute_transposeconv_output_shape(out_dims, *input, *weights);
+
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(Window::DimX) < output_shape.x(),
+ "Output's dim 0 is invalid.");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(Window::DimY) < output_shape.y(),
+ "Output's dim 1 is invalid.");
+ ARM_COMPUTE_RETURN_ERROR_ON_MSG(output->dimension(Window::DimZ) < output_shape.z(),
+ "Output's dim 2 is invalid.");
+ }
+
+ unsigned int pad_left = 0;
+ unsigned int pad_right = 0;
+ unsigned int pad_top = 0;
+ unsigned int pad_bottom = 0;
+ const TensorShape scale_out_shape = compute_transposeconv_upsampled_shape(
+ *input, *weights, info, out_dims, invalid_right, invalid_bottom, pad_left, pad_right, pad_top,
+ pad_bottom);
+ TensorInfo scale_out_info(
+ input->clone()->set_is_resizable(true).reset_padding().set_tensor_shape(scale_out_shape));
+ scale_out_info.set_data_layout(input->data_layout());
+ const PadStrideInfo conv_info(1, 1, 0, 0, 0, 0, DimensionRoundingType::CEIL);
+
+ const unsigned int batches_idx =
+ get_data_layout_dimension_index(weights->data_layout(), DataLayoutDimension::BATCHES);
+ const unsigned int channel_idx =
+ get_data_layout_dimension_index(weights->data_layout(), DataLayoutDimension::CHANNEL);
+ ARM_COMPUTE_RETURN_ERROR_ON(input->dimension(batches_idx) !=
+ scale_out_info.dimension(batches_idx));
+ ARM_COMPUTE_RETURN_ERROR_ON(input->dimension(channel_idx) !=
+ scale_out_info.dimension(channel_idx));
+
+ ARM_COMPUTE_RETURN_ON_ERROR(NEConvolutionLayer::validate(&scale_out_info, weights, bias, output,
+ conv_info, WeightsInfo()));
+
+ return Status{};
+}
+
+void NETransposeConvLayer::configure(ITensor *input, const ITensor *weights, const ITensor *bias,
+ ITensor *output, const PadStrideInfo &info,
+ unsigned int invalid_right, unsigned int invalid_bottom)
+{
+ ARM_COMPUTE_ERROR_ON_NULLPTR(input, weights, output);
+
+ const DataLayout data_layout = input->info()->data_layout();
+
+ _input = input;
+ _original_weights = weights;
+ _info = info;
+ _is_prepared = false;
+ _is_nchw = data_layout == DataLayout::NCHW;
+
+ const unsigned int stride_x = info.stride().first;
+ const unsigned int stride_y = info.stride().second;
+
+ const unsigned int width_idx =
+ get_data_layout_dimension_index(data_layout, DataLayoutDimension::WIDTH);
+ const unsigned int height_idx =
+ get_data_layout_dimension_index(data_layout, DataLayoutDimension::HEIGHT);
+ auto out_dims = transposeconv_output_dimensions(
+ input->info()->dimension(width_idx), input->info()->dimension(height_idx),
+ weights->info()->dimension(width_idx), weights->info()->dimension(height_idx), info,
+ invalid_right, invalid_bottom);
+
+ const TensorShape output_shape =
+ compute_transposeconv_output_shape(out_dims, *input->info(), *weights->info());
+ // Output auto initialization if not yet initialized
+ auto_init_if_empty(*output->info(), output_shape, 1, input->info()->data_type(),
+ input->info()->quantization_info());
+
+ // Perform validation step
+ ARM_COMPUTE_ERROR_THROW_ON(NETransposeConvLayer::validate(
+ input->info(), weights->info(), bias == nullptr ? nullptr : bias->info(), output->info(),
+ info, invalid_right, invalid_bottom));
+
+ _memory_group.manage(&_scaled_output);
+
+ if (!_is_nchw)
+ {
+ _memory_group.manage(&_permuted_input);
+ _memory_group.manage(&_permuted_weights);
+ _memory_group.manage(&_permuted_output);
+
+ // Configure the function to transform the input tensor from NHWC -> NCHW
+ _permuted_input.info()->set_quantization_info(input->info()->quantization_info());
+ _permute_input.configure(input, &_permuted_input, PermutationVector(1U, 2U, 0U));
+ _permuted_input.info()->set_data_layout(DataLayout::NCHW);
+
+ // Configure the function to transform the weights tensor from NHWC -> NCHW
+ _permuted_weights.info()->set_quantization_info(weights->info()->quantization_info());
+ _permute_weights.configure(weights, &_permuted_weights, PermutationVector(1U, 2U, 0U));
+ _permuted_weights.info()->set_data_layout(DataLayout::NCHW);
+
+ // Find the upsampled dimensions and the padding needed for the convolution with stride 1 in
+ // order to match output shape
+
+ unsigned int pad_left = 0;
+ unsigned int pad_right = 0;
+ unsigned int pad_top = 0;
+ unsigned int pad_bottom = 0;
+ const TensorShape scale_out_shape = compute_transposeconv_upsampled_shape(
+ *_permuted_input.info(), *_permuted_weights.info(), info, out_dims, invalid_right,
+ invalid_bottom, pad_left, pad_right, pad_top, pad_bottom);
+
+ TensorInfo scale_out_info(scale_out_shape, 1, _permuted_input.info()->data_type(),
+ _permuted_input.info()->quantization_info());
+ scale_out_info.set_data_layout(DataLayout::NCHW);
+ _scaled_output.allocator()->init(scale_out_info);
+
+ const PadStrideInfo upsample_info(stride_x, stride_y, pad_left, pad_right, pad_top, pad_bottom,
+ DimensionRoundingType::CEIL);
+ _upsample_f.configure(&_permuted_input, &_scaled_output, upsample_info);
+
+ _weights_flipped.allocator()->init(*_permuted_weights.info()->clone());
+ _weights_flipped.info()->set_quantization_info(weights->info()->quantization_info());
+ _flip_weights.configure(&_permuted_weights, &_weights_flipped);
+
+ // setup the function to convolve the upscaled output
+ const PadStrideInfo conv_info(1, 1, 0, 0, 0, 0, DimensionRoundingType::CEIL);
+
+ const auto out_shape = output->info()->tensor_shape();
+ TensorShape permuted_out_shape{out_shape[1], out_shape[2], out_shape[0], out_shape[3]};
+ TensorInfo permuted_out_info(permuted_out_shape, 1, output->info()->data_type(),
+ output->info()->quantization_info());
+ _permuted_output.allocator()->init(permuted_out_info);
+ _permuted_output.info()->set_data_layout(DataLayout::NCHW);
+ _conv_f.configure(&_scaled_output, &_weights_flipped, bias, &_permuted_output, conv_info);
+
+ // Configure the function to transform the convoluted output to NHWC
+ _permute_output.configure(&_permuted_output, output, PermutationVector(2U, 0U, 1U));
+
+ _permuted_input.allocator()->allocate();
+ _permuted_weights.allocator()->allocate();
+ _permuted_output.allocator()->allocate();
+ }
+ else
+ {
+ // Find the upsampled dimensions and the padding needed for the convolution with stride 1 in
+ // order to match output shape
+ unsigned int pad_left = 0;
+ unsigned int pad_right = 0;
+ unsigned int pad_top = 0;
+ unsigned int pad_bottom = 0;
+ const TensorShape scale_out_shape = compute_transposeconv_upsampled_shape(
+ *input->info(), *weights->info(), info, out_dims, invalid_right, invalid_bottom, pad_left,
+ pad_right, pad_top, pad_bottom);
+
+ TensorInfo scale_out_info(scale_out_shape, 1, input->info()->data_type(),
+ input->info()->quantization_info());
+ _scaled_output.allocator()->init(scale_out_info);
+ const PadStrideInfo upsample_info(stride_x, stride_y, pad_left, pad_right, pad_top, pad_bottom,
+ DimensionRoundingType::FLOOR);
+ _upsample_f.configure(input, &_scaled_output, upsample_info);
+
+ _weights_flipped.allocator()->init(weights->info()->clone()->set_data_layout(data_layout));
+ _flip_weights.configure(weights, &_weights_flipped);
+
+ // setup the function to convolve the upscaled output
+ const PadStrideInfo conv_info(1, 1, 0, 0, 0, 0, DimensionRoundingType::CEIL);
+ _conv_f.configure(&_scaled_output, &_weights_flipped, bias, output, conv_info);
+ }
+ _scaled_output.allocator()->allocate();
+}
+
+void NETransposeConvLayer::run()
+{
+ prepare();
+
+ // MemoryGroupResourceScope scope_mg(_memory_group);
+
+ // Permute input
+ if (!_is_nchw)
+ {
+ _permute_input.run();
+ }
+
+ _upsample_f.run();
+ _conv_f.run();
+
+ // Permute output
+ if (!_is_nchw)
+ {
+ _permute_output.run();
+ }
+}
+
+void NETransposeConvLayer::prepare()
+{
+ if (!_is_prepared)
+ {
+ ARM_COMPUTE_ERROR_ON(!_original_weights->is_used());
+
+ // Run weights flipping and mark original weights tensor as unused
+ _weights_flipped.allocator()->allocate();
+ // Permute weights
+ if (!_is_nchw)
+ {
+ _permute_weights.run();
+ }
+ NEScheduler::get().schedule(&_flip_weights, Window::DimZ);
+ _original_weights->mark_as_unused();
+
+ // Prepare convolution
+ _conv_f.prepare();
+
+ if (!_weights_flipped.is_used())
+ {
+ _weights_flipped.allocator()->free();
+ }
+
+ _is_prepared = true;
+ }
+}
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericGather.cpp b/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericGather.cpp
new file mode 100644
index 000000000..67e1bfb02
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericGather.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/runtime/misc/functions/GenericGather.h"
+
+namespace arm_compute
+{
+namespace misc
+{
+
+bool shouldPermute(arm_compute::ITensorInfo *input, arm_compute::ITensorInfo *output)
+{
+ return (input->num_dimensions() != 4 && output->num_dimensions() == 4 &&
+ input->data_layout() == DataLayout::NCHW);
+}
+
+void GenericGather::configure(arm_compute::ITensor *input, arm_compute::ITensor *indices,
+ arm_compute::ITensor *output, int axis)
+{
+ _input = input;
+ _indices = indices;
+ _output = output;
+ _axis = axis;
+
+ arm_compute::PermutationVector pv;
+ if (shouldPermute(input->info(), output->info()))
+ {
+ // NOTE This vector comes from CLPermuteKernel implementation
+ //
+ // This implementation permutes a tensor of shape C / W / H into another tensor of shape W / H /
+ // C
+ //
+ // Original | Permuted
+ // 0 | C | W (from 1)
+ // 1 | W | H (from 2)
+ // 2 | H | C (from 0)
+ //
+ pv = arm_compute::PermutationVector{1, 2, 0};
+ }
+
+ if (utils::isGpuMode())
+ {
+ if (shouldPermute(input->info(), output->info()))
+ {
+ _cl_gather.configure(CAST_CL(input), CAST_CL(indices), &_cl_permuted, axis);
+ _cl_permute.configure(&_cl_permuted, CAST_CL(output), pv);
+
+ // NOTE _permuted is inaccessible from outside, and thus it is safe to invoke allocate here.
+ _cl_permuted.allocator()->allocate();
+ }
+ else
+ {
+ _cl_gather.configure(CAST_CL(input), CAST_CL(indices), CAST_CL(output), axis);
+ }
+ }
+ else
+ {
+ throw std::runtime_error("Not supported, yet");
+ }
+}
+
+void GenericGather::run(void)
+{
+ if (utils::isGpuMode())
+ {
+ _cl_gather.run();
+ if (shouldPermute(_input->info(), _output->info()))
+ {
+ _cl_permute.run();
+ }
+ }
+ else
+ {
+ throw std::runtime_error("Not supported, yet");
+ }
+}
+
+} // namespace misc
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericReshapeLayer.cpp b/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericReshapeLayer.cpp
new file mode 100644
index 000000000..8025ae28e
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/GenericReshapeLayer.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/core/ITensorInfo.h"
+#include "arm_compute/runtime/misc/functions/GenericReshapeLayer.h"
+
+namespace arm_compute
+{
+namespace misc
+{
+
+namespace
+{
+
+bool shouldPermute(const arm_compute::ITensorInfo *input, arm_compute::ITensorInfo *output)
+{
+ return (input->num_dimensions() == 4 || output->num_dimensions() == 4) &&
+ (input->num_dimensions() != output->num_dimensions() &&
+ input->data_layout() == DataLayout::NCHW);
+}
+
+} // namespace
+
+void GenericReshapeLayer::configure(const arm_compute::ITensor *input, arm_compute::ITensor *output)
+{
+ _input = input;
+ _output = output;
+
+ arm_compute::PermutationVector pv;
+ if (input->info()->data_layout() == DataLayout::NCHW && input->info()->num_dimensions() == 4 &&
+ output->info()->num_dimensions() != 4)
+ {
+ // NOTE This vector comes from CLPermuteKernel implementation
+ //
+ // This implementation permutes a tensor of shape W / H / C into another tensor of shape
+ // C / W / H
+ //
+ // Original | Permuted
+ // 0 | W | C (from 2)
+ // 1 | H | W (from 0)
+ // 2 | C | H (from 1)
+ //
+ pv = arm_compute::PermutationVector{2, 0, 1};
+ }
+ else if (input->info()->data_layout() == DataLayout::NCHW &&
+ input->info()->num_dimensions() != 4 && output->info()->num_dimensions() == 4)
+ {
+ // NOTE This vector comes from CLPermuteKernel implementation
+ //
+ // This implementation permutes a tensor of shape C / W / H into another tensor of shape
+ // W / H / C
+ //
+ // Original | Permuted
+ // 0 | C | W (from 1)
+ // 1 | W | H (from 2)
+ // 2 | H | C (from 0)
+ //
+ pv = arm_compute::PermutationVector{1, 2, 0};
+ }
+
+ if (utils::isGpuMode())
+ {
+ const auto const_input = CAST_CL(const_cast<arm_compute::ITensor *>(input));
+ if (shouldPermute(input->info(), output->info()))
+ {
+ _cl_permute.configure(const_input, &_cl_permuted, pv);
+ _cl_reshape.configure(&_cl_permuted, CAST_CL(output));
+
+ // NOTE _permuted is inaccessible from outside, and thus it is safe to invoke allocate here.
+ _cl_permuted.allocator()->allocate();
+ }
+ else
+ {
+ _cl_reshape.configure(const_input, CAST_CL(output));
+ }
+ }
+ else
+ {
+ if (shouldPermute(input->info(), output->info()))
+ {
+ _neon_permute.configure(input, &_neon_permuted, pv);
+ _neon_reshape.configure(&_neon_permuted, output);
+
+ // NOTE _permuted is inaccessible from outside, and thus it is safe to invoke allocate here.
+ _neon_permuted.allocator()->allocate();
+ }
+ else
+ {
+ _neon_reshape.configure(input, output);
+ }
+ }
+}
+
+void GenericReshapeLayer::run(void)
+{
+ if (utils::isGpuMode())
+ {
+ if (shouldPermute(_input->info(), _output->info()))
+ {
+ _cl_permute.run();
+ }
+ _cl_reshape.run();
+ }
+ else
+ {
+ if (shouldPermute(_input->info(), _output->info()))
+ {
+ _neon_permute.run();
+ }
+ _neon_reshape.run();
+ }
+}
+
+} // namespace misc
+} // namespace arm_compute
diff --git a/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/Utils.cpp b/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/Utils.cpp
new file mode 100644
index 000000000..44a4bb9ed
--- /dev/null
+++ b/runtimes/libs/ARMComputeEx/src/runtime/misc/functions/Utils.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arm_compute/runtime/misc/functions/Utils.h"
+
+namespace arm_compute
+{
+namespace misc
+{
+namespace utils
+{
+
+bool isGpuMode()
+{
+ char *neon = std::getenv("NEON");
+ if (neon == nullptr)
+ return true;
+ else if (neon[0] == '1')
+ return false;
+ return true;
+}
+
+} // namespace utils
+} // namespace misc
+} // namespace arm_compute
diff --git a/libs/ARMComputeEx/src/runtime/topk_v2.h b/runtimes/libs/ARMComputeEx/src/runtime/topk_v2.h
index f94effea1..f94effea1 100644
--- a/libs/ARMComputeEx/src/runtime/topk_v2.h
+++ b/runtimes/libs/ARMComputeEx/src/runtime/topk_v2.h
diff --git a/libs/CMakeLists.txt b/runtimes/libs/CMakeLists.txt
index 99d2028f4..99d2028f4 100644
--- a/libs/CMakeLists.txt
+++ b/runtimes/libs/CMakeLists.txt
diff --git a/runtimes/libs/cker/CMakeLists.txt b/runtimes/libs/cker/CMakeLists.txt
new file mode 100644
index 000000000..16a13f5dd
--- /dev/null
+++ b/runtimes/libs/cker/CMakeLists.txt
@@ -0,0 +1,2 @@
+add_library(nnfw_lib_cker INTERFACE)
+target_include_directories(nnfw_lib_cker INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/include)
diff --git a/runtimes/libs/cker/README.md b/runtimes/libs/cker/README.md
new file mode 100644
index 000000000..149320ffc
--- /dev/null
+++ b/runtimes/libs/cker/README.md
@@ -0,0 +1,7 @@
+# cker
+
+cker - Portable CPU kernel library
+
+__cker__ means `CPU kernel`
+
+Current __cker__ is porting of Tensorflow lite's reference_op kernel (Tensorflow 1.12) and gemmlow
diff --git a/runtimes/libs/cker/include/cker/Shape.h b/runtimes/libs/cker/include/cker/Shape.h
new file mode 100644
index 000000000..39449c68f
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/Shape.h
@@ -0,0 +1,286 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_SHAPE_H__
+#define __NNFW_CKER_SHAPE_H__
+
+#include <algorithm>
+#include <cstring>
+#include <cassert>
+#include <vector>
+
+#define UNUSED_RELEASE(a) (void)(a)
+
+namespace nnfw
+{
+namespace cker
+{
+
+class Shape
+{
+public:
+ // Shapes with dimensions up to 4 are stored directly in the structure, while
+ // larger shapes are separately allocated.
+ static constexpr int kMaxSmallSize = 4;
+
+ Shape &operator=(Shape const &) = delete;
+
+ Shape() : _size(0) {}
+
+ explicit Shape(int dimensions_count) : _size(dimensions_count)
+ {
+ if (dimensions_count > kMaxSmallSize)
+ {
+ _dims_pointer = new int32_t[dimensions_count];
+ }
+ }
+
+ Shape(int shape_size, int32_t value) : _size(0)
+ {
+ Resize(shape_size);
+ for (int i = 0; i < shape_size; ++i)
+ {
+ SetDim(i, value);
+ }
+ }
+
+ Shape(int dimensions_count, const int32_t *dims_data) : _size(0)
+ {
+ ReplaceWith(dimensions_count, dims_data);
+ }
+
+ Shape(const std::initializer_list<int> init_list) : _size(0) { BuildFrom(init_list); }
+
+ // Avoid using this constructor. We should be able to delete it when C++17
+ // rolls out.
+ Shape(Shape const &other) : _size(other.DimensionsCount())
+ {
+ if (_size > kMaxSmallSize)
+ {
+ _dims_pointer = new int32_t[_size];
+ }
+ std::memcpy(DimsData(), other.DimsData(), sizeof(int32_t) * _size);
+ }
+
+ bool operator==(const Shape &comp) const
+ {
+ return this->_size == comp._size &&
+ std::memcmp(DimsData(), comp.DimsData(), _size * sizeof(int32_t)) == 0;
+ }
+
+ ~Shape()
+ {
+ if (_size > kMaxSmallSize)
+ {
+ delete[] _dims_pointer;
+ }
+ }
+
+ inline int32_t DimensionsCount() const { return _size; }
+ inline int32_t Dims(int i) const
+ {
+ assert(i >= 0);
+ assert(i < _size);
+ return _size > kMaxSmallSize ? _dims_pointer[i] : _dims[i];
+ }
+ inline void SetDim(int i, int32_t val)
+ {
+ assert(i >= 0);
+ assert(i < _size);
+ if (_size > kMaxSmallSize)
+ {
+ _dims_pointer[i] = val;
+ }
+ else
+ {
+ _dims[i] = val;
+ }
+ }
+
+ inline int32_t *DimsData() { return _size > kMaxSmallSize ? _dims_pointer : _dims; }
+ inline const int32_t *DimsData() const { return _size > kMaxSmallSize ? _dims_pointer : _dims; }
+ // The caller must ensure that the shape is no bigger than 4-D.
+ inline const int32_t *DimsDataUpTo4D() const { return _dims; }
+
+ inline void Resize(int dimensions_count)
+ {
+ if (_size > kMaxSmallSize)
+ {
+ delete[] _dims_pointer;
+ }
+ _size = dimensions_count;
+ if (dimensions_count > kMaxSmallSize)
+ {
+ _dims_pointer = new int32_t[dimensions_count];
+ }
+ }
+
+ inline void ReplaceWith(int dimensions_count, const int32_t *dims_data)
+ {
+ Resize(dimensions_count);
+ int32_t *dst_dims = DimsData();
+ std::memcpy(dst_dims, dims_data, dimensions_count * sizeof(int32_t));
+ }
+
+ template <typename T> inline void BuildFrom(const T &src_iterable)
+ {
+ const int dimensions_count = std::distance(src_iterable.begin(), src_iterable.end());
+ Resize(dimensions_count);
+ int32_t *data = DimsData();
+ for (auto it : src_iterable)
+ {
+ *data = it;
+ ++data;
+ }
+ }
+
+ // This will probably be factored out. Old code made substantial use of 4-D
+ // shapes, and so this function is used to extend smaller shapes. Note that
+ // (a) as Dims<4>-dependent code is eliminated, the reliance on this should be
+ // reduced, and (b) some kernels are stricly 4-D, but then the shapes of their
+ // inputs should already be 4-D, so this function should not be needed.
+ inline static Shape ExtendedShape(int new_shape_size, const Shape &shape)
+ {
+ return Shape(new_shape_size, shape, 1);
+ }
+
+ inline void BuildFrom(const std::initializer_list<int> init_list)
+ {
+ BuildFrom<const std::initializer_list<int>>(init_list);
+ }
+
+ // Returns the total count of elements, that is the size when flattened into a
+ // vector.
+ inline int FlatSize() const
+ {
+ int buffer_size = 1;
+ const int *dims_data = DimsData();
+ for (int i = 0; i < _size; i++)
+ {
+ const int dim = dims_data[i];
+ assert(dim >= 1);
+ buffer_size *= dim;
+ }
+ return buffer_size;
+ }
+
+ bool operator!=(const Shape &comp) const { return !((*this) == comp); }
+
+private:
+ // For use only by ExtendedShape(), written to guarantee (return-value) copy
+ // elision in C++17.
+ // This creates a shape padded to the desired size with the specified value.
+ Shape(int new_shape_size, const Shape &shape, int pad_value) : _size(0)
+ {
+ assert(new_shape_size >= shape.DimensionsCount());
+ assert(new_shape_size <= kMaxSmallSize);
+ Resize(new_shape_size);
+ const int size_increase = new_shape_size - shape.DimensionsCount();
+ for (int i = 0; i < size_increase; ++i)
+ {
+ SetDim(i, pad_value);
+ }
+ std::memcpy(DimsData() + size_increase, shape.DimsData(),
+ sizeof(int32_t) * shape.DimensionsCount());
+ }
+
+ int32_t _size;
+ union {
+ int32_t _dims[kMaxSmallSize];
+ int32_t *_dims_pointer;
+ };
+};
+
+inline int MatchingDim(const Shape &shape1, int index1, const Shape &shape2, int index2)
+{
+ UNUSED_RELEASE(shape2);
+ UNUSED_RELEASE(index2);
+ assert(shape1.Dims(index1) == shape2.Dims(index2));
+ return shape1.Dims(index1);
+}
+
+inline Shape GetShape(const std::vector<int32_t> &data) { return Shape(data.size(), data.data()); }
+
+inline int Offset(const Shape &shape, int i0, int i1, int i2, int i3)
+{
+ assert(shape.DimensionsCount() == 4);
+ const int *dims_data = shape.DimsDataUpTo4D();
+ assert(i0 >= 0 && i0 < dims_data[0]);
+ assert(i1 >= 0 && i1 < dims_data[1]);
+ assert(i2 >= 0 && i2 < dims_data[2]);
+ assert(i3 >= 0 && i3 < dims_data[3]);
+ return ((i0 * dims_data[1] + i1) * dims_data[2] + i2) * dims_data[3] + i3;
+}
+
+inline int FlatSizeSkipDim(const Shape &shape, int skip_dim)
+{
+ const int dims_count = shape.DimensionsCount();
+ assert(skip_dim >= 0 && skip_dim < dims_count);
+ const auto *dims_data = shape.DimsData();
+ int flat_size = 1;
+ for (int i = 0; i < dims_count; ++i)
+ {
+ flat_size *= (i == skip_dim) ? 1 : dims_data[i];
+ }
+ return flat_size;
+}
+
+// Flat size calculation, checking that dimensions match with one or more other
+// arrays.
+inline int MatchingFlatSize(const Shape &shape, const Shape &check_shape_0)
+{
+ UNUSED_RELEASE(check_shape_0);
+ assert(shape.DimensionsCount() == check_shape_0.DimensionsCount());
+ const int dims_count = shape.DimensionsCount();
+ for (int i = 0; i < dims_count; ++i)
+ {
+ assert(shape.Dims(i) == check_shape_0.Dims(i));
+ }
+ return shape.FlatSize();
+}
+
+inline int MatchingFlatSize(const Shape &shape, const Shape &check_shape_0,
+ const Shape &check_shape_1)
+{
+ UNUSED_RELEASE(check_shape_0);
+ assert(shape.DimensionsCount() == check_shape_0.DimensionsCount());
+ const int dims_count = shape.DimensionsCount();
+ for (int i = 0; i < dims_count; ++i)
+ {
+ assert(shape.Dims(i) == check_shape_0.Dims(i));
+ }
+ return MatchingFlatSize(shape, check_shape_1);
+}
+
+inline int MatchingFlatSizeSkipDim(const Shape &shape, int skip_dim, const Shape &check_shape_0)
+{
+ UNUSED_RELEASE(check_shape_0);
+ const int dims_count = shape.DimensionsCount();
+ for (int i = 0; i < dims_count; ++i)
+ {
+ if (i != skip_dim)
+ {
+ assert(shape.Dims(i) == check_shape_0.Dims(i));
+ }
+ }
+ return FlatSizeSkipDim(shape, skip_dim);
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_SHAPE_H__
diff --git a/runtimes/libs/cker/include/cker/Types.h b/runtimes/libs/cker/include/cker/Types.h
new file mode 100644
index 000000000..d8dedbd9c
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/Types.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_TYPES_H__
+#define __NNFW_CKER_TYPES_H__
+
+#include <cstdint>
+
+namespace nnfw
+{
+namespace cker
+{
+
+enum class FusedActivationFunctionType
+{
+ kNone = 0,
+ kRelu6 = 1,
+ kRelu1 = 2,
+ kRelu = 3,
+};
+enum class PaddingType
+{
+ kNone = 0,
+ kSame = 1,
+ kValid = 2,
+};
+
+struct PaddingValues
+{
+ int16_t width;
+ int16_t height;
+};
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_TYPES_H__
diff --git a/runtimes/libs/cker/include/cker/Utils.h b/runtimes/libs/cker/include/cker/Utils.h
new file mode 100644
index 000000000..84bbbc3c6
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/Utils.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_UTILS_H__
+#define __NNFW_CKER_UTILS_H__
+
+#include <algorithm>
+#include <cstdint>
+
+#include "cker/gemmlowp/FixedPoint.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+template <typename T>
+inline T ActivationFunctionWithMinMax(T x, T output_activation_min, T output_activation_max)
+{
+ return std::min<T>(std::max<T>(x, output_activation_min), output_activation_max);
+}
+
+inline int32_t MultiplyByQuantizedMultiplier(int32_t x, int32_t quantized_multiplier, int shift)
+{
+ int left_shift = shift > 0 ? shift : 0;
+ int right_shift = shift > 0 ? 0 : -shift;
+ return gemmlowp::RoundingDivideByPOT(
+ gemmlowp::SaturatingRoundingDoublingHighMul(x * (1 << left_shift), quantized_multiplier),
+ right_shift);
+}
+
+inline int32_t MultiplyByQuantizedMultiplierGreaterThanOne(int32_t x, int32_t quantized_multiplier,
+ int left_shift)
+{
+ return gemmlowp::SaturatingRoundingDoublingHighMul(x * (1 << left_shift), quantized_multiplier);
+}
+
+inline int CountLeadingZeros(uint32_t integer_input)
+{
+ const uint32_t one_in_leading_positive = 1U << 31;
+ int leading_zeros = 0;
+ while (integer_input < one_in_leading_positive)
+ {
+ integer_input <<= 1;
+ ++leading_zeros;
+ }
+ return leading_zeros;
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_UTILS_H__
diff --git a/runtimes/libs/cker/include/cker/gemmlowp/FixedPoint.h b/runtimes/libs/cker/include/cker/gemmlowp/FixedPoint.h
new file mode 100644
index 000000000..159e01a22
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/gemmlowp/FixedPoint.h
@@ -0,0 +1,289 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2015 The Gemmlowp Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_GEMMLOWP_FIXED_POINT_H__
+#define __NNFW_CKER_GEMMLOWP_FIXED_POINT_H__
+
+#include <algorithm>
+#include <cassert>
+
+namespace nnfw
+{
+namespace cker
+{
+namespace gemmlowp
+{
+
+inline int32_t RoundingHalfSum(int32_t a, int32_t b)
+{
+ int64_t a64 = a;
+ int64_t b64 = b;
+ int64_t sum = a64 + b64;
+ int64_t sign = sum >= 0 ? 1 : -1;
+ return static_cast<int32_t>((sum + sign) / 2);
+}
+
+inline int32_t SaturatingRoundingDoublingHighMul(int32_t a, int32_t b)
+{
+ bool overflow = a == b && a == std::numeric_limits<int32_t>::min();
+ int64_t a_64(a);
+ int64_t b_64(b);
+ int64_t ab_64 = a_64 * b_64;
+ int32_t nudge = ab_64 >= 0 ? (1 << 30) : (1 - (1 << 30));
+ int32_t ab_x2_high32 = static_cast<int32_t>((ab_64 + nudge) / (1ll << 31));
+ return overflow ? std::numeric_limits<int32_t>::max() : ab_x2_high32;
+}
+
+// Correctly-rounded-to-nearest division by a power-of-two.
+// Also known as a rounding arithmetic right shift.
+inline int32_t RoundingDivideByPOT(int32_t x, int exponent)
+{
+ assert(exponent >= 0);
+ assert(exponent <= 31);
+ const int32_t mask = ((1ll << exponent) - 1);
+ const int32_t zero = 0;
+ const int32_t one = 1;
+ const int32_t remainder = x & mask;
+ const int32_t threshold = (mask >> 1) + ((x < zero) ? one : zero);
+ return ((x >> exponent) + ((remainder > threshold) ? one : zero));
+}
+
+// Returns the product of a run-time integer value by a compile-time power
+// of two, with either a positive exponent (equivalent to an arithmetic
+// left shift, saturating) or a negative exponent (equivalent to an arithmetic
+// right shift, rounding to nearest).
+template <int Exponent, int ExponentSign = (Exponent > 0 ? 1 : Exponent < 0 ? -1 : 0)>
+struct ImplSaturatingRoundingMultiplyByPOT
+{
+};
+
+template <int Exponent> struct ImplSaturatingRoundingMultiplyByPOT<Exponent, 0>
+{
+ static int32_t eval(int32_t x) { return x; }
+};
+
+template <int Exponent> struct ImplSaturatingRoundingMultiplyByPOT<Exponent, 1>
+{
+ static int32_t eval(int32_t x)
+ {
+ const int32_t min = (std::numeric_limits<int32_t>::min());
+ const int32_t max = (std::numeric_limits<int32_t>::max());
+ const int32_t threshold = ((1 << (31 - Exponent)) - 1);
+ const int32_t zero = 0;
+ const int32_t one = 1;
+
+ const int32_t positive_mask = ((x > threshold) ? ~zero : zero);
+ const int32_t negative_mask = ((x < -threshold) ? ~zero : zero);
+
+ int32_t result = (x * (one << Exponent));
+ result = (positive_mask ? max : result);
+ result = (negative_mask ? min : result);
+ return result;
+ }
+};
+
+template <int Exponent> struct ImplSaturatingRoundingMultiplyByPOT<Exponent, -1>
+{
+ static int32_t eval(int32_t x) { return RoundingDivideByPOT(x, -Exponent); }
+};
+
+template <int Exponent> int32_t SaturatingRoundingMultiplyByPOT(int32_t x)
+{
+ return ImplSaturatingRoundingMultiplyByPOT<Exponent>::eval(x);
+}
+
+template <int tIntegerBits> class FixedPoint
+{
+public:
+ static constexpr int kTotalBits = 8 * sizeof(int32_t);
+ static constexpr int kIntegerBits = tIntegerBits;
+ static constexpr int kFractionalBits = kTotalBits - 1 - kIntegerBits;
+ static_assert(kIntegerBits >= 0 && kIntegerBits < kTotalBits, "bad IntegerBits");
+
+ static int32_t ScalarRawMax() { return std::numeric_limits<int32_t>::max(); }
+
+ static FixedPoint FromRaw(int32_t x)
+ {
+ FixedPoint retval;
+ retval.raw() = x;
+ return retval;
+ }
+
+ static FixedPoint FromScalarRaw(int32_t x) { return FromRaw(x); }
+
+ template <int Exponent> static FixedPoint ConstantPOT()
+ {
+ static constexpr int kOffset = kFractionalBits + Exponent;
+ static_assert(kOffset < 31, "Constant not exactly representable in this fixed-point format");
+ return FromScalarRaw((int32_t)1 << kOffset);
+ }
+
+ static FixedPoint Zero() { return FromScalarRaw(0); }
+
+ static FixedPoint One()
+ {
+ return FromScalarRaw(kIntegerBits == 0 ? ScalarRawMax() : ((int32_t)1 << kFractionalBits));
+ }
+
+ int32_t raw() const { return i_; }
+ int32_t &raw() { return i_; }
+
+private:
+ int32_t i_;
+};
+
+// A FixedPoint multiplication is just a
+// SaturatingRoundingDoublingHighMul operation on the underlying
+// raw integer values. The IntegerBits simply add up, as is obvious
+// from the fact that the range is [-2^IntegerBits, 2^IntegerBits).
+template <int tIntegerBits_a, int tIntegerBits_b>
+FixedPoint<tIntegerBits_a + tIntegerBits_b> operator*(FixedPoint<tIntegerBits_a> a,
+ FixedPoint<tIntegerBits_b> b)
+{
+ FixedPoint<tIntegerBits_a + tIntegerBits_b> c;
+ c.raw() = SaturatingRoundingDoublingHighMul(a.raw(), b.raw());
+ return c;
+}
+
+// Tweaking IntegerBits gives exact multiplication by a power of two.
+template <int tExponent, int tIntegerBits>
+FixedPoint<tExponent + tIntegerBits> ExactMulByPot(FixedPoint<tIntegerBits> a)
+{
+ FixedPoint<tExponent + tIntegerBits> c;
+ c.raw() = a.raw();
+ return c;
+}
+
+template <int tIntegerBits>
+FixedPoint<tIntegerBits> operator+(FixedPoint<tIntegerBits> a, FixedPoint<tIntegerBits> b)
+{
+ return FixedPoint<tIntegerBits>::FromRaw((a.raw() + b.raw()));
+}
+template <int tIntegerBits>
+FixedPoint<tIntegerBits> operator-(FixedPoint<tIntegerBits> a, FixedPoint<tIntegerBits> b)
+{
+ return FixedPoint<tIntegerBits>::FromRaw((a.raw() - b.raw()));
+}
+template <int tIntegerBits>
+FixedPoint<tIntegerBits> operator&(FixedPoint<tIntegerBits> a, FixedPoint<tIntegerBits> b)
+{
+ return FixedPoint<tIntegerBits>::FromRaw((a.raw() & b.raw()));
+}
+
+// Rescale changes the number of IntegerBits and updates the underlying
+// raw integer value accordingly.
+template <int tIntegerBitsDst, int tIntegerBitsSrc>
+FixedPoint<tIntegerBitsDst> Rescale(FixedPoint<tIntegerBitsSrc> x)
+{
+ static constexpr int kExponent = tIntegerBitsSrc - tIntegerBitsDst;
+ FixedPoint<tIntegerBitsDst> result;
+ result.raw() = SaturatingRoundingMultiplyByPOT<kExponent>(x.raw());
+ return result;
+}
+
+// Implementation of exponential function.
+
+// Returns exp(x) for x in [-1/4, 0).
+inline FixedPoint<0> exp_on_interval_between_negative_one_quarter_and_0_excl(FixedPoint<0> a)
+{
+ typedef FixedPoint<0> F;
+ const F constant_term = F::FromScalarRaw(RoundingDivideByPOT(1895147668, 0));
+ const F constant_1_over_3 = F::FromScalarRaw(RoundingDivideByPOT(715827883, 0));
+ // We're evaluating a Taylor expansion around -1/8, so we do the change of
+ // variable: x = a + 1/8.
+ // In fixed-point with 0 integer bits, 1/8 is represented by 1 << 28.
+ F x = a + F::template ConstantPOT<-3>();
+ F x2 = x * x;
+ F x3 = x2 * x;
+ F x4 = x2 * x2;
+ F x4_over_4 = F::FromScalarRaw(SaturatingRoundingMultiplyByPOT<-2>(x4.raw()));
+ F x4_over_24_plus_x3_over_6_plus_x2_over_2 = F::FromScalarRaw(
+ SaturatingRoundingMultiplyByPOT<-1>((((x4_over_4 + x3) * constant_1_over_3) + x2).raw()));
+ return (constant_term + constant_term * (x + x4_over_24_plus_x3_over_6_plus_x2_over_2));
+}
+
+// Returns exp(x) for x < 0.
+template <int tIntegerBits> FixedPoint<0> exp_on_negative_values(FixedPoint<tIntegerBits> a)
+{
+ typedef FixedPoint<tIntegerBits> InputF;
+ typedef FixedPoint<0> ResultF;
+ static constexpr int kFractionalBits = InputF::kFractionalBits;
+ static constexpr int kIntegerBits = InputF::kIntegerBits;
+ const InputF kOneQuarter = InputF::template ConstantPOT<-2>();
+ InputF mask = kOneQuarter - InputF::FromScalarRaw(1);
+ InputF a_mod_quarter_minus_one_quarter = (a & mask) - kOneQuarter;
+ ResultF result = exp_on_interval_between_negative_one_quarter_and_0_excl(
+ Rescale<0>(a_mod_quarter_minus_one_quarter));
+ int32_t remainder = (a_mod_quarter_minus_one_quarter - a).raw();
+
+#define GEMMLOWP_EXP_BARREL_SHIFTER(Exponent, FixedPointMultiplier) \
+ if (kIntegerBits > Exponent) \
+ { \
+ const ResultF kMultiplier = \
+ ResultF::FromScalarRaw(RoundingDivideByPOT(FixedPointMultiplier, 0)); \
+ static constexpr int kShiftAmount = \
+ ((kIntegerBits > Exponent) ? (kFractionalBits + Exponent) : 0); \
+ result = ((remainder & (1 << kShiftAmount)) ? (result * kMultiplier) : result); \
+ }
+
+ GEMMLOWP_EXP_BARREL_SHIFTER(-2, 1672461947);
+ GEMMLOWP_EXP_BARREL_SHIFTER(-1, 1302514674);
+ GEMMLOWP_EXP_BARREL_SHIFTER(+0, 790015084);
+ GEMMLOWP_EXP_BARREL_SHIFTER(+1, 290630308);
+ GEMMLOWP_EXP_BARREL_SHIFTER(+2, 39332535);
+ GEMMLOWP_EXP_BARREL_SHIFTER(+3, 720401);
+ GEMMLOWP_EXP_BARREL_SHIFTER(+4, 242);
+
+#undef GEMMLOWP_EXP_BARREL_SHIFTER
+
+ static constexpr int clampB = ((kIntegerBits > 5) ? (36 - kIntegerBits) : 0);
+ if (kIntegerBits > 5)
+ {
+ const InputF clamp = InputF::FromScalarRaw(RoundingDivideByPOT(-(1 << clampB), 0));
+ result.raw() = ((a.raw() < clamp.raw()) ? ResultF::Zero().raw() : result.raw());
+ }
+
+ result.raw() = (a.raw() ? result.raw() : ResultF::One().raw());
+ return result;
+}
+
+// Returns 1 / (1 + x) for x in (0, 1).
+inline FixedPoint<0> one_over_one_plus_x_for_x_in_0_1(FixedPoint<0> a)
+{
+ typedef FixedPoint<0> F0;
+ typedef FixedPoint<2> F2;
+ F0 half_denominator = F0::FromScalarRaw(RoundingHalfSum(a.raw(), F0::One().raw()));
+ // Newton-Raphson division
+ // https://en.wikipedia.org/wiki/Division_algorithm#Newton.E2.80.93Raphson_division
+ // Refer to that page for the logic behind the 48/17 and 32/17 constants.
+ const F2 constant_48_over_17 = F2::FromScalarRaw(RoundingDivideByPOT(1515870810, 0));
+ const F2 constant_neg_32_over_17 = F2::FromScalarRaw(RoundingDivideByPOT(-1010580540, 0));
+ F2 x = constant_48_over_17 + half_denominator * constant_neg_32_over_17;
+ for (int i = 0; i < 3; i++)
+ {
+ F2 half_denominator_times_x = half_denominator * x;
+ F2 one_minus_half_denominator_times_x = F2::One() - half_denominator_times_x;
+ x = x + Rescale<2>(x * one_minus_half_denominator_times_x);
+ }
+ return Rescale<0>(ExactMulByPot<-1>(x));
+}
+
+} // namespace gemmlowp
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_GEMMLOWP_FIXED_POINT_H__
diff --git a/runtimes/libs/cker/include/cker/operation/Add.h b/runtimes/libs/cker/include/cker/operation/Add.h
new file mode 100644
index 000000000..703d617f8
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/Add.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_ADD_H__
+#define __NNFW_CKER_ADD_H__
+
+#include "cker/Shape.h"
+#include "cker/Types.h"
+#include "cker/Utils.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct AddParam
+{
+ // Shape dependent / common to data / op types.
+ // BroadcastableOpCategory broadcast_category;
+ // uint8 inference params.
+ int32_t input1_offset;
+ int32_t input2_offset;
+ int32_t output_offset;
+ int32_t output_multiplier;
+ int32_t output_shift;
+ // Add / Sub, not Mul, uint8 inference params.
+ int32_t left_shift;
+ int32_t input1_multiplier;
+ int32_t input1_shift;
+ int32_t input2_multiplier;
+ int32_t input2_shift;
+ // uint8, etc, activation params.
+ int32_t quantized_activation_min;
+ int32_t quantized_activation_max;
+ // float activation params.
+ float float_activation_min;
+ float float_activation_max;
+
+ // Processed output dimensions.
+ // Let input "a" be the one that broadcasts in the faster-changing dimension.
+ // Then, after coalescing, for shapes {a0, a1, a2, a3, a4} and
+ // {b0, b1, b2, b3, b4},
+ // broadcast_shape[4] = b0 = a0.
+ // broadcast_shape[3] = b1; a1 = 1.
+ // broadcast_shape[2] = b2 = a2.
+ // broadcast_shape[1] = a3; b3 = 1.
+ // broadcast_shape[0] = b4 = a4.
+ // int broadcast_shape[5];
+};
+
+template <typename T>
+inline void Add(const AddParam &params, const Shape &input1_shape, const T *input1_data,
+ const Shape &input2_shape, const T *input2_data, const Shape &output_shape,
+ T *output_data)
+{
+ const int32_t flat_size = MatchingFlatSize(input1_shape, input2_shape, output_shape);
+ for (int i = 0; i < flat_size; ++i)
+ {
+ output_data[i] = ActivationFunctionWithMinMax(input1_data[i] + input2_data[i],
+ params.quantized_activation_min,
+ params.quantized_activation_max);
+ }
+}
+
+inline void Add(const AddParam &params, const Shape &input1_shape, const float *input1_data,
+ const Shape &input2_shape, const float *input2_data, const Shape &output_shape,
+ float *output_data)
+{
+ const int size = MatchingFlatSize(input1_shape, input2_shape, output_shape);
+ for (int i = 0; i < size; i++)
+ {
+ auto x = input1_data[i] + input2_data[i];
+ output_data[i] =
+ ActivationFunctionWithMinMax(x, params.float_activation_min, params.float_activation_max);
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_ADD_H__
diff --git a/runtimes/libs/cker/include/cker/operation/AveragePool.h b/runtimes/libs/cker/include/cker/operation/AveragePool.h
new file mode 100644
index 000000000..81e99336f
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/AveragePool.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_AVERAGE_POOL_H__
+#define __NNFW_CKER_AVERAGE_POOL_H__
+
+#include "cker/Shape.h"
+#include "cker/Types.h"
+#include "cker/Utils.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct AveragePoolParams
+{
+ FusedActivationFunctionType activation;
+ PaddingType padding_type;
+ PaddingValues padding_values;
+ int stride_height;
+ int stride_width;
+ int filter_height;
+ int filter_width;
+ // uint8, etc, activation params.
+ int32_t quantized_activation_min;
+ int32_t quantized_activation_max;
+ // float activation params.
+ float float_activation_min;
+ float float_activation_max;
+};
+
+inline void AveragePool(const AveragePoolParams &params, const Shape &input_shape,
+ const float *input_data, const Shape &output_shape, float *output_data)
+{
+ assert(input_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int depth = MatchingDim(input_shape, 3, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ const int stride_height = params.stride_height;
+ const int stride_width = params.stride_width;
+ for (int batch = 0; batch < batches; ++batch)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ const int in_x_origin = (out_x * stride_width) - params.padding_values.width;
+ const int in_y_origin = (out_y * stride_height) - params.padding_values.height;
+ // Compute the boundaries of the filter region clamped so as to
+ // ensure that the filter window fits in the input array.
+ const int filter_x_start = std::max(0, -in_x_origin);
+ const int filter_x_end = std::min(params.filter_width, input_width - in_x_origin);
+ const int filter_y_start = std::max(0, -in_y_origin);
+ const int filter_y_end = std::min(params.filter_height, input_height - in_y_origin);
+ int filter_count = (filter_y_end - filter_y_start) * (filter_x_end - filter_x_start);
+ if (filter_count <= 0)
+ {
+ continue;
+ }
+ for (int channel = 0; channel < depth; ++channel)
+ {
+ float total = 0.f;
+ for (int filter_y = filter_y_start; filter_y < filter_y_end; ++filter_y)
+ {
+ for (int filter_x = filter_x_start; filter_x < filter_x_end; ++filter_x)
+ {
+ const int in_x = in_x_origin + filter_x;
+ const int in_y = in_y_origin + filter_y;
+ total += input_data[Offset(input_shape, batch, in_y, in_x, channel)];
+ }
+ }
+ const float average = total / (float)filter_count;
+ output_data[Offset(output_shape, batch, out_y, out_x, channel)] =
+ ActivationFunctionWithMinMax(average, params.float_activation_min,
+ params.float_activation_max);
+ }
+ }
+ }
+ }
+}
+
+inline void AveragePool(const AveragePoolParams &params, const Shape &input_shape,
+ const uint8_t *input_data, const Shape &output_shape, uint8_t *output_data)
+{
+ assert(params.quantized_activation_min <= params.quantized_activation_max);
+ assert(input_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int depth = MatchingDim(input_shape, 3, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ const int stride_height = params.stride_height;
+ const int stride_width = params.stride_width;
+ for (int batch = 0; batch < batches; ++batch)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ const int in_x_origin = (out_x * stride_width) - params.padding_values.width;
+ const int in_y_origin = (out_y * stride_height) - params.padding_values.height;
+ // Compute the boundaries of the filter region clamped so as to
+ // ensure that the filter window fits in the input array.
+ const int filter_x_start = std::max(0, -in_x_origin);
+ const int filter_x_end = std::min(params.filter_width, input_width - in_x_origin);
+ const int filter_y_start = std::max(0, -in_y_origin);
+ const int filter_y_end = std::min(params.filter_height, input_height - in_y_origin);
+ int filter_count = (filter_y_end - filter_y_start) * (filter_x_end - filter_x_start);
+ if (filter_count <= 0)
+ {
+ continue;
+ }
+ for (int channel = 0; channel < depth; ++channel)
+ {
+ int32_t acc = 0;
+ for (int filter_y = filter_y_start; filter_y < filter_y_end; ++filter_y)
+ {
+ for (int filter_x = filter_x_start; filter_x < filter_x_end; ++filter_x)
+ {
+ const int in_x = in_x_origin + filter_x;
+ const int in_y = in_y_origin + filter_y;
+ acc += input_data[Offset(input_shape, batch, in_y, in_x, channel)];
+ }
+ }
+ acc = (acc + filter_count / 2) / filter_count;
+ acc = std::max(acc, params.quantized_activation_min);
+ acc = std::min(acc, params.quantized_activation_max);
+ output_data[Offset(output_shape, batch, out_y, out_x, channel)] =
+ static_cast<uint8_t>(acc);
+ }
+ }
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_AVERAGE_POOL_H__
diff --git a/runtimes/libs/cker/include/cker/operation/Concatenation.h b/runtimes/libs/cker/include/cker/operation/Concatenation.h
new file mode 100644
index 000000000..69a179c8c
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/Concatenation.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_CONCATENATION_H__
+#define __NNFW_CKER_CONCATENATION_H__
+
+#include <cstdint>
+
+#include "cker/Shape.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct ConcatenationParams
+{
+ int8_t axis;
+ const int32_t *input_zeropoint;
+ const float *input_scale;
+ uint16_t inputs_count;
+ int32_t output_zeropoint;
+ float output_scale;
+};
+
+template <typename Scalar>
+inline void Concatenation(const ConcatenationParams &params, const Shape *const *input_shapes,
+ const Scalar *const *input_data, const Shape &output_shape,
+ Scalar *output_data)
+{
+ int axis = params.axis;
+ int inputs_count = params.inputs_count;
+ const int concat_dimensions = output_shape.DimensionsCount();
+ assert(axis < concat_dimensions);
+
+ int64_t concat_size = 0;
+ for (int i = 0; i < inputs_count; i++)
+ {
+ assert(input_shapes[i]->DimensionsCount() == concat_dimensions);
+ for (int j = 0; j < concat_dimensions; j++)
+ {
+ if (j != axis)
+ {
+ auto dim_checked = MatchingDim(*input_shapes[i], j, output_shape, j);
+ UNUSED_RELEASE(dim_checked);
+ }
+ }
+ concat_size += input_shapes[i]->Dims(axis);
+ }
+ assert(concat_size == output_shape.Dims(axis));
+ int64_t outer_size = 1;
+ for (int i = 0; i < axis; ++i)
+ {
+ outer_size *= output_shape.Dims(i);
+ }
+ // For all input arrays,
+ // FlatSize() = outer_size * Dims(axis) * base_inner_size;
+ int64_t base_inner_size = 1;
+ for (int i = axis + 1; i < concat_dimensions; ++i)
+ {
+ base_inner_size *= output_shape.Dims(i);
+ }
+
+ Scalar *output_ptr = output_data;
+ for (int k = 0; k < outer_size; k++)
+ {
+ for (int i = 0; i < inputs_count; ++i)
+ {
+ const int copy_size = input_shapes[i]->Dims(axis) * base_inner_size;
+ memcpy(output_ptr, input_data[i] + k * copy_size, copy_size * sizeof(Scalar));
+ output_ptr += copy_size;
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_CONCATENATION_H__
diff --git a/runtimes/libs/cker/include/cker/operation/Conv.h b/runtimes/libs/cker/include/cker/operation/Conv.h
new file mode 100644
index 000000000..35b0336fa
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/Conv.h
@@ -0,0 +1,217 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_CONV_H__
+#define __NNFW_CKER_CONV_H__
+
+#include "cker/Types.h"
+#include "cker/Shape.h"
+#include "cker/Utils.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct ConvParams
+{
+ PaddingType padding_type;
+ PaddingValues padding_values;
+ // TODO(starka): This was just "stride", so check that width+height is OK.
+ int16_t stride_width;
+ int16_t stride_height;
+ int16_t dilation_width_factor;
+ int16_t dilation_height_factor;
+ // uint8_t inference params.
+ // TODO(b/65838351): Use smaller types if appropriate.
+ int32_t input_offset;
+ int32_t weights_offset;
+ int32_t output_offset;
+ int32_t output_multiplier;
+ int output_shift;
+ // uint8_t, etc, activation params.
+ int32_t quantized_activation_min;
+ int32_t quantized_activation_max;
+ // float activation params.
+ float float_activation_min;
+ float float_activation_max;
+};
+
+inline void Conv(const ConvParams &params, const Shape &input_shape, const float *input_data,
+ const Shape &filter_shape, const float *filter_data, const Shape &bias_shape,
+ const float *bias_data, const Shape &output_shape, float *output_data)
+{
+ const int stride_width = params.stride_width;
+ const int stride_height = params.stride_height;
+ const int dilation_width_factor = params.dilation_width_factor;
+ const int dilation_height_factor = params.dilation_height_factor;
+ const int pad_width = params.padding_values.width;
+ const int pad_height = params.padding_values.height;
+ const float output_activation_min = params.float_activation_min;
+ const float output_activation_max = params.float_activation_max;
+ assert(input_shape.DimensionsCount() == 4);
+ assert(filter_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+ UNUSED_RELEASE(bias_shape);
+
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3);
+ const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3);
+ if (bias_data)
+ {
+ assert(bias_shape.FlatSize() == output_depth);
+ }
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int filter_height = filter_shape.Dims(1);
+ const int filter_width = filter_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ for (int batch = 0; batch < batches; ++batch)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ for (int out_channel = 0; out_channel < output_depth; ++out_channel)
+ {
+ const int in_x_origin = (out_x * stride_width) - pad_width;
+ const int in_y_origin = (out_y * stride_height) - pad_height;
+ float total = 0.f;
+ for (int filter_y = 0; filter_y < filter_height; ++filter_y)
+ {
+ for (int filter_x = 0; filter_x < filter_width; ++filter_x)
+ {
+ const int in_x = in_x_origin + dilation_width_factor * filter_x;
+ const int in_y = in_y_origin + dilation_height_factor * filter_y;
+ // If the location is outside the bounds of the input image,
+ // use zero as a default value.
+ if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && (in_y < input_height))
+ {
+ const int in_offset = Offset(input_shape, batch, in_y, in_x, 0);
+ const int filter_offset = Offset(filter_shape, out_channel, filter_y, filter_x, 0);
+ for (int in_channel = 0; in_channel < input_depth; ++in_channel)
+ {
+ float input_value = input_data[in_offset + in_channel];
+ float filter_value = filter_data[filter_offset + in_channel];
+ total += (input_value * filter_value);
+ }
+ }
+ }
+ }
+ float bias_value = 0.0f;
+ if (bias_data)
+ {
+ bias_value = bias_data[out_channel];
+ }
+ output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] =
+ ActivationFunctionWithMinMax(total + bias_value, output_activation_min,
+ output_activation_max);
+ }
+ }
+ }
+ }
+}
+
+inline void Conv(const ConvParams &params, const Shape &input_shape, const uint8_t *input_data,
+ const Shape &filter_shape, const uint8_t *filter_data, const Shape &bias_shape,
+ const int32_t *bias_data, const Shape &output_shape, uint8_t *output_data)
+{
+ const int stride_width = params.stride_width;
+ const int stride_height = params.stride_height;
+ const int dilation_width_factor = params.dilation_width_factor;
+ const int dilation_height_factor = params.dilation_height_factor;
+ const int pad_width = params.padding_values.width;
+ const int pad_height = params.padding_values.height;
+ const int32_t input_offset = params.input_offset;
+ const int32_t filter_offset = params.weights_offset;
+ const int32_t output_offset = params.output_offset;
+ const int32_t output_multiplier = params.output_multiplier;
+ const int output_shift = params.output_shift;
+ const int32_t output_activation_min = params.quantized_activation_min;
+ const int32_t output_activation_max = params.quantized_activation_max;
+ assert(output_activation_min <= output_activation_max);
+
+ assert(input_shape.DimensionsCount() == 4);
+ assert(filter_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+ UNUSED_RELEASE(bias_shape);
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3);
+ const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3);
+ if (bias_data)
+ {
+ assert(bias_shape.FlatSize() == output_depth);
+ }
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int filter_height = filter_shape.Dims(1);
+ const int filter_width = filter_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ for (int batch = 0; batch < batches; ++batch)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ for (int out_channel = 0; out_channel < output_depth; ++out_channel)
+ {
+ const int in_x_origin = (out_x * stride_width) - pad_width;
+ const int in_y_origin = (out_y * stride_height) - pad_height;
+ int32_t acc = 0;
+ for (int filter_y = 0; filter_y < filter_height; ++filter_y)
+ {
+ for (int filter_x = 0; filter_x < filter_width; ++filter_x)
+ {
+ const int in_x = in_x_origin + dilation_width_factor * filter_x;
+ const int in_y = in_y_origin + dilation_height_factor * filter_y;
+ // If the location is outside the bounds of the input image,
+ // use zero as a default value.
+ const int in_base = Offset(input_shape, batch, in_y, in_x, 0);
+ const int filter_base = Offset(filter_shape, out_channel, filter_y, filter_x, 0);
+ if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && (in_y < input_height))
+ {
+ for (int in_channel = 0; in_channel < input_depth; in_channel++)
+ {
+ int32_t input_val = input_data[in_channel + in_base];
+ int32_t filter_val = filter_data[in_channel + filter_base];
+ acc += (filter_val + filter_offset) * (input_val + input_offset);
+ }
+ }
+ }
+ }
+ if (bias_data)
+ {
+ acc += bias_data[out_channel];
+ }
+ acc = MultiplyByQuantizedMultiplier(acc, output_multiplier, output_shift);
+ acc += output_offset;
+ acc = std::max(acc, output_activation_min);
+ acc = std::min(acc, output_activation_max);
+ output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] =
+ static_cast<uint8_t>(acc);
+ }
+ }
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_CONCATENATION_H_
diff --git a/runtimes/libs/cker/include/cker/operation/DepthwiseConv.h b/runtimes/libs/cker/include/cker/operation/DepthwiseConv.h
new file mode 100644
index 000000000..7d022477d
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/DepthwiseConv.h
@@ -0,0 +1,217 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_DEPTHWISE_CONV_H__
+#define __NNFW_CKER_DEPTHWISE_CONV_H__
+
+#include "cker/Shape.h"
+#include "cker/Types.h"
+#include "cker/Utils.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct DepthwiseConvParams
+{
+ PaddingType padding_type;
+ PaddingValues padding_values;
+ int16_t stride_width;
+ int16_t stride_height;
+ int16_t dilation_width_factor;
+ int16_t dilation_height_factor;
+ int16_t depth_multiplier;
+ // uint8 inference params.
+ // TODO(b/65838351): Use smaller types if appropriate.
+ int32_t input_offset;
+ int32_t weights_offset;
+ int32_t output_offset;
+ int32_t output_multiplier;
+ int output_shift;
+ // uint8, etc, activation params.
+ int32_t quantized_activation_min;
+ int32_t quantized_activation_max;
+ // float activation params.
+ float float_activation_min;
+ float float_activation_max;
+};
+
+inline void DepthwiseConv(const DepthwiseConvParams &params, const Shape &input_shape,
+ const uint8_t *input_data, const Shape &filter_shape,
+ const uint8_t *filter_data, const Shape &bias_shape,
+ const int32_t *bias_data, const Shape &output_shape, uint8_t *output_data)
+{
+ const int stride_width = params.stride_width;
+ const int stride_height = params.stride_height;
+ const int dilation_width_factor = params.dilation_width_factor;
+ const int dilation_height_factor = params.dilation_height_factor;
+ const int pad_width = params.padding_values.width;
+ const int pad_height = params.padding_values.height;
+ const int depth_multiplier = params.depth_multiplier;
+ const int32_t output_activation_min = params.quantized_activation_min;
+ const int32_t output_activation_max = params.quantized_activation_max;
+ const int32_t input_offset = params.input_offset;
+ const int32_t filter_offset = params.weights_offset;
+ const int32_t output_offset = params.output_offset;
+ const int32_t output_multiplier = params.output_multiplier;
+ const int output_shift = params.output_shift;
+ assert(input_shape.DimensionsCount() == 4);
+ assert(filter_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+
+ assert(output_activation_min <= output_activation_max);
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int input_depth = input_shape.Dims(3);
+ const int filter_height = filter_shape.Dims(1);
+ const int filter_width = filter_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ assert(output_depth == input_depth * depth_multiplier);
+ assert(bias_shape.FlatSize() == output_depth);
+ UNUSED_RELEASE(output_depth);
+ UNUSED_RELEASE(bias_shape);
+
+ for (int b = 0; b < batches; ++b)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ for (int ic = 0; ic < input_depth; ++ic)
+ {
+ for (int m = 0; m < depth_multiplier; m++)
+ {
+ const int oc = m + ic * depth_multiplier;
+ const int in_x_origin = (out_x * stride_width) - pad_width;
+ const int in_y_origin = (out_y * stride_height) - pad_height;
+ int32_t acc = 0;
+ for (int filter_y = 0; filter_y < filter_height; ++filter_y)
+ {
+ for (int filter_x = 0; filter_x < filter_width; ++filter_x)
+ {
+ const int in_x = in_x_origin + dilation_width_factor * filter_x;
+ const int in_y = in_y_origin + dilation_height_factor * filter_y;
+ // If the location is outside the bounds of the input image,
+ // use zero as a default value.
+ if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && (in_y < input_height))
+ {
+ int32_t input_val = input_data[Offset(input_shape, b, in_y, in_x, ic)];
+ int32_t filter_val = filter_data[Offset(filter_shape, 0, filter_y, filter_x, oc)];
+ acc += (filter_val + filter_offset) * (input_val + input_offset);
+ }
+ }
+ }
+ if (bias_data)
+ {
+ acc += bias_data[oc];
+ }
+ acc = MultiplyByQuantizedMultiplier(acc, output_multiplier, output_shift);
+ acc += output_offset;
+ acc = std::max(acc, output_activation_min);
+ acc = std::min(acc, output_activation_max);
+ output_data[Offset(output_shape, b, out_y, out_x, oc)] = static_cast<uint8_t>(acc);
+ }
+ }
+ }
+ }
+ }
+}
+
+inline void DepthwiseConv(const DepthwiseConvParams &params, const Shape &input_shape,
+ const float *input_data, const Shape &filter_shape,
+ const float *filter_data, const Shape &bias_shape, const float *bias_data,
+ const Shape &output_shape, float *output_data)
+{
+ const int stride_width = params.stride_width;
+ const int stride_height = params.stride_height;
+ const int dilation_width_factor = params.dilation_width_factor;
+ const int dilation_height_factor = params.dilation_height_factor;
+ const int pad_width = params.padding_values.width;
+ const int pad_height = params.padding_values.height;
+ const int depth_multiplier = params.depth_multiplier;
+ const float output_activation_min = params.float_activation_min;
+ const float output_activation_max = params.float_activation_max;
+ assert(input_shape.DimensionsCount() == 4);
+ assert(filter_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int input_depth = input_shape.Dims(3);
+ const int filter_height = filter_shape.Dims(1);
+ const int filter_width = filter_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ assert(output_depth == input_depth * depth_multiplier);
+ assert(bias_shape.FlatSize() == output_depth);
+ UNUSED_RELEASE(output_depth);
+ UNUSED_RELEASE(bias_shape);
+
+ for (int b = 0; b < batches; ++b)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ for (int ic = 0; ic < input_depth; ++ic)
+ {
+ for (int m = 0; m < depth_multiplier; m++)
+ {
+ const int oc = m + ic * depth_multiplier;
+ const int in_x_origin = (out_x * stride_width) - pad_width;
+ const int in_y_origin = (out_y * stride_height) - pad_height;
+ float total = 0.f;
+ for (int filter_y = 0; filter_y < filter_height; ++filter_y)
+ {
+ for (int filter_x = 0; filter_x < filter_width; ++filter_x)
+ {
+ const int in_x = in_x_origin + dilation_width_factor * filter_x;
+ const int in_y = in_y_origin + dilation_height_factor * filter_y;
+ // If the location is outside the bounds of the input image,
+ // use zero as a default value.
+ if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) && (in_y < input_height))
+ {
+ float input_value = input_data[Offset(input_shape, b, in_y, in_x, ic)];
+ float filter_value = filter_data[Offset(filter_shape, 0, filter_y, filter_x, oc)];
+ total += (input_value * filter_value);
+ }
+ }
+ }
+ float bias_value = 0.0f;
+ if (bias_data)
+ {
+ bias_value = bias_data[oc];
+ }
+ output_data[Offset(output_shape, b, out_y, out_x, oc)] = ActivationFunctionWithMinMax(
+ total + bias_value, output_activation_min, output_activation_max);
+ }
+ }
+ }
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_DEPTHWISE_CONV_H__
diff --git a/runtimes/libs/cker/include/cker/operation/FullyConnected.h b/runtimes/libs/cker/include/cker/operation/FullyConnected.h
new file mode 100644
index 000000000..428fb1b53
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/FullyConnected.h
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_FULLY_CONNECTED_H__
+#define __NNFW_CKER_FULLY_CONNECTED_H__
+
+#include "cker/Shape.h"
+#include "cker/Utils.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct FullyConnectedParams
+{
+ // uint8 inference params.
+ // TODO(b/65838351): Use smaller types if appropriate.
+ int32_t input_offset;
+ int32_t weights_offset;
+ int32_t output_offset;
+ int32_t output_multiplier;
+ int output_shift;
+ // uint8, etc, activation params.
+ int32_t quantized_activation_min;
+ int32_t quantized_activation_max;
+ // float activation params.
+ float float_activation_min;
+ float float_activation_max;
+ // FullyConnectedWeightsFormat weights_format;
+};
+
+inline void FullyConnected(const FullyConnectedParams &params, const Shape &input_shape,
+ const float *input_data, const Shape &weights_shape,
+ const float *weights_data, const Shape &bias_shape,
+ const float *bias_data, const Shape &output_shape, float *output_data)
+{
+ UNUSED_RELEASE(input_shape);
+ UNUSED_RELEASE(bias_shape);
+ const float output_activation_min = params.float_activation_min;
+ const float output_activation_max = params.float_activation_max;
+ // TODO(benoitjacob): This really should be:
+ // const int batches = ArraySize(output_dims, 1);
+ // but the current --variable_batch hack consists in overwriting the 3rd
+ // dimension with the runtime batch size, as we don't keep track for each
+ // array of which dimension is the batch dimension in it.
+ const int output_dims_count = output_shape.DimensionsCount();
+ const int weights_dims_count = weights_shape.DimensionsCount();
+ const int batches = FlatSizeSkipDim(output_shape, output_dims_count - 1);
+ const int output_depth =
+ MatchingDim(weights_shape, weights_dims_count - 2, output_shape, output_dims_count - 1);
+ const int accum_depth = weights_shape.Dims(weights_dims_count - 1);
+ for (int b = 0; b < batches; ++b)
+ {
+ for (int out_c = 0; out_c < output_depth; ++out_c)
+ {
+ float total = 0.f;
+ for (int d = 0; d < accum_depth; ++d)
+ {
+ total += input_data[b * accum_depth + d] * weights_data[out_c * accum_depth + d];
+ }
+ float bias_value = 0.0f;
+ if (bias_data)
+ {
+ bias_value = bias_data[out_c];
+ }
+ output_data[out_c + output_depth * b] = ActivationFunctionWithMinMax(
+ total + bias_value, output_activation_min, output_activation_max);
+ }
+ }
+}
+
+inline void FullyConnected(const FullyConnectedParams &params, const Shape &input_shape,
+ const uint8_t *input_data, const Shape &filter_shape,
+ const uint8_t *filter_data, const Shape &bias_shape,
+ const int32_t *bias_data, const Shape &output_shape,
+ uint8_t *output_data)
+{
+ UNUSED_RELEASE(input_shape);
+ UNUSED_RELEASE(bias_shape);
+ const int32_t input_offset = params.input_offset;
+ const int32_t filter_offset = params.weights_offset;
+ const int32_t output_offset = params.output_offset;
+ const int32_t output_multiplier = params.output_multiplier;
+ const int output_shift = params.output_shift;
+ const int32_t output_activation_min = params.quantized_activation_min;
+ const int32_t output_activation_max = params.quantized_activation_max;
+ assert(filter_shape.DimensionsCount() >= 2);
+ assert(output_shape.DimensionsCount() >= 1);
+
+ assert(output_activation_min <= output_activation_max);
+ // TODO(benoitjacob): This really should be:
+ // const int batches = ArraySize(output_dims, 1);
+ // but the current --variable_batch hack consists in overwriting the 3rd
+ // dimension with the runtime batch size, as we don't keep track for each
+ // array of which dimension is the batch dimension in it.
+ const int output_dim_count = output_shape.DimensionsCount();
+ const int filter_dim_count = filter_shape.DimensionsCount();
+ const int batches = FlatSizeSkipDim(output_shape, output_dim_count - 1);
+ const int output_depth =
+ MatchingDim(filter_shape, filter_dim_count - 2, output_shape, output_dim_count - 1);
+ const int accum_depth = filter_shape.Dims(filter_dim_count - 1);
+ for (int b = 0; b < batches; ++b)
+ {
+ for (int out_c = 0; out_c < output_depth; ++out_c)
+ {
+ int32_t acc = 0;
+ for (int d = 0; d < accum_depth; ++d)
+ {
+ int32_t input_val = input_data[b * accum_depth + d];
+ int32_t filter_val = filter_data[out_c * accum_depth + d];
+ acc += (filter_val + filter_offset) * (input_val + input_offset);
+ }
+ if (bias_data)
+ {
+ acc += bias_data[out_c];
+ }
+ acc = MultiplyByQuantizedMultiplier(acc, output_multiplier, output_shift);
+ acc += output_offset;
+ acc = std::max(acc, output_activation_min);
+ acc = std::min(acc, output_activation_max);
+ output_data[out_c + output_depth * b] = static_cast<uint8_t>(acc);
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_FULLY_CONNECTED_H__
diff --git a/runtimes/libs/cker/include/cker/operation/MaxPool.h b/runtimes/libs/cker/include/cker/operation/MaxPool.h
new file mode 100644
index 000000000..9619e26c2
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/MaxPool.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_MAX_POOL_H__
+#define __NNFW_CKER_MAX_POOL_H__
+
+#include "cker/Shape.h"
+#include "cker/Types.h"
+#include "cker/Utils.h"
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct MaxPoolParams
+{
+ FusedActivationFunctionType activation;
+ PaddingType padding_type;
+ PaddingValues padding_values;
+ int stride_height;
+ int stride_width;
+ int filter_height;
+ int filter_width;
+ // uint8, etc, activation params.
+ int32_t quantized_activation_min;
+ int32_t quantized_activation_max;
+ // float activation params.
+ float float_activation_min;
+ float float_activation_max;
+};
+
+inline void MaxPool(const MaxPoolParams &params, const Shape &input_shape, const float *input_data,
+ const Shape &output_shape, float *output_data)
+{
+ assert(input_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int depth = MatchingDim(input_shape, 3, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ const int stride_height = params.stride_height;
+ const int stride_width = params.stride_width;
+ for (int batch = 0; batch < batches; ++batch)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ for (int channel = 0; channel < depth; ++channel)
+ {
+ const int in_x_origin = (out_x * stride_width) - params.padding_values.width;
+ const int in_y_origin = (out_y * stride_height) - params.padding_values.height;
+ // Compute the boundaries of the filter region clamped so as to
+ // ensure that the filter window fits in the input array.
+ const int filter_x_start = std::max(0, -in_x_origin);
+ const int filter_x_end = std::min(params.filter_width, input_width - in_x_origin);
+ const int filter_y_start = std::max(0, -in_y_origin);
+ const int filter_y_end = std::min(params.filter_height, input_height - in_y_origin);
+ float max = std::numeric_limits<float>::lowest();
+ for (int filter_y = filter_y_start; filter_y < filter_y_end; ++filter_y)
+ {
+ for (int filter_x = filter_x_start; filter_x < filter_x_end; ++filter_x)
+ {
+ const int in_x = in_x_origin + filter_x;
+ const int in_y = in_y_origin + filter_y;
+ max = std::max(max, input_data[Offset(input_shape, batch, in_y, in_x, channel)]);
+ }
+ }
+ output_data[Offset(output_shape, batch, out_y, out_x, channel)] =
+ ActivationFunctionWithMinMax(max, params.float_activation_min,
+ params.float_activation_max);
+ }
+ }
+ }
+ }
+}
+
+inline void MaxPool(const MaxPoolParams &params, const Shape &input_shape,
+ const uint8_t *input_data, const Shape &output_shape, uint8_t *output_data)
+{
+ assert(params.quantized_activation_min <= params.quantized_activation_max);
+ assert(params.quantized_activation_min >= 0);
+ assert(params.quantized_activation_max <= 255);
+ assert(input_shape.DimensionsCount() == 4);
+ assert(output_shape.DimensionsCount() == 4);
+ const int batches = MatchingDim(input_shape, 0, output_shape, 0);
+ const int depth = MatchingDim(input_shape, 3, output_shape, 3);
+ const int input_height = input_shape.Dims(1);
+ const int input_width = input_shape.Dims(2);
+ const int output_height = output_shape.Dims(1);
+ const int output_width = output_shape.Dims(2);
+ const int stride_height = params.stride_height;
+ const int stride_width = params.stride_width;
+ for (int batch = 0; batch < batches; ++batch)
+ {
+ for (int out_y = 0; out_y < output_height; ++out_y)
+ {
+ for (int out_x = 0; out_x < output_width; ++out_x)
+ {
+ for (int channel = 0; channel < depth; ++channel)
+ {
+ const int in_x_origin = (out_x * stride_width) - params.padding_values.width;
+ const int in_y_origin = (out_y * stride_height) - params.padding_values.height;
+ // Compute the boundaries of the filter region clamped so as to
+ // ensure that the filter window fits in the input array.
+ const int filter_x_start = std::max(0, -in_x_origin);
+ const int filter_x_end = std::min(params.filter_width, input_width - in_x_origin);
+ const int filter_y_start = std::max(0, -in_y_origin);
+ const int filter_y_end = std::min(params.filter_height, input_height - in_y_origin);
+ uint8_t max = 0;
+ for (int filter_y = filter_y_start; filter_y < filter_y_end; ++filter_y)
+ {
+ for (int filter_x = filter_x_start; filter_x < filter_x_end; ++filter_x)
+ {
+ const int in_x = in_x_origin + filter_x;
+ const int in_y = in_y_origin + filter_y;
+ max = std::max(max, input_data[Offset(input_shape, batch, in_y, in_x, channel)]);
+ }
+ }
+ max = std::max<uint8_t>(max, params.quantized_activation_min);
+ max = std::min<uint8_t>(max, params.quantized_activation_max);
+ output_data[Offset(output_shape, batch, out_y, out_x, channel)] =
+ static_cast<uint8_t>(max);
+ }
+ }
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_MAX_POOL_H__
diff --git a/runtimes/libs/cker/include/cker/operation/SoftMax.h b/runtimes/libs/cker/include/cker/operation/SoftMax.h
new file mode 100644
index 000000000..322f5d5a2
--- /dev/null
+++ b/runtimes/libs/cker/include/cker/operation/SoftMax.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_CKER_SOFTMAX_H__
+#define __NNFW_CKER_SOFTMAX_H__
+
+#include "cker/Shape.h"
+#include "cker/Utils.h"
+#include "cker/gemmlowp/FixedPoint.h"
+
+#include <cmath>
+
+namespace nnfw
+{
+namespace cker
+{
+
+struct SoftmaxParams
+{
+ // beta is not really used (not a Tensorflow parameter) and not implemented
+ // for LogSoftmax.
+ double beta;
+ // uint8 inference params. Used even when beta defaults to 1.0.
+ int32_t input_multiplier;
+ int32_t input_left_shift;
+ // Reverse scaling is only used by LogSoftmax.
+ int32_t reverse_scaling_divisor;
+ int32_t reverse_scaling_right_shift;
+ int diff_min;
+};
+
+inline void Softmax(const SoftmaxParams &params, const Shape &input_shape, const float *input_data,
+ const Shape &output_shape, float *output_data)
+{
+ const int trailing_dim = input_shape.DimensionsCount() - 1;
+ const int outer_size = MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape);
+ const int depth = MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim);
+
+ for (int i = 0; i < outer_size; ++i)
+ {
+ // Find max element value which we'll use to ensure numerical stability
+ // taking advantage of the following equality:
+ // exp(x[i])/sum(exp(x[i])) == exp(x[i]+C)/sum(exp(x[i]+C))
+ float max = std::numeric_limits<float>::lowest();
+ for (int c = 0; c < depth; ++c)
+ {
+ max = std::max(max, input_data[i * depth + c]);
+ }
+
+ // Compute sum.
+ float sum = 0.f;
+ for (int c = 0; c < depth; ++c)
+ {
+ sum += std::exp((input_data[i * depth + c] - max) * params.beta);
+ }
+
+ // Compute result.
+ for (int c = 0; c < depth; ++c)
+ {
+ output_data[i * depth + c] = std::exp((input_data[i * depth + c] - max) * params.beta) / sum;
+ }
+ }
+}
+
+inline void Softmax(const SoftmaxParams &params, const Shape &input_shape,
+ const uint8_t *input_data, const Shape &output_shape, uint8_t *output_data)
+{
+ const int32_t input_beta_multiplier = params.input_multiplier;
+ const int32_t input_beta_left_shift = params.input_left_shift;
+ const int diff_min = params.diff_min;
+ // The representation chosen for the input to the exp() function is Q5.26.
+ // We need to leave extra space since values that we skip might be as large as
+ // -32 before multiplying by input_beta_multiplier, and therefore as large as
+ // -16 afterwards. Note that exp(-8) is definitely not insignificant to
+ // accumulation, but exp(-16) definitely is.
+ static const int kScaledDiffIntegerBits = 5;
+ static const int kAccumulationIntegerBits = 12;
+ using FixedPointScaledDiff = gemmlowp::FixedPoint<kScaledDiffIntegerBits>;
+ using FixedPointAccum = gemmlowp::FixedPoint<kAccumulationIntegerBits>;
+ using FixedPoint0 = gemmlowp::FixedPoint<0>;
+
+ const int trailing_dim = input_shape.DimensionsCount() - 1;
+ const int outer_size = MatchingFlatSizeSkipDim(input_shape, trailing_dim, output_shape);
+ const int depth = MatchingDim(input_shape, trailing_dim, output_shape, trailing_dim);
+
+ for (int i = 0; i < outer_size; ++i)
+ {
+ uint8_t max_in_row = 0;
+ for (int c = 0; c < depth; ++c)
+ {
+ max_in_row = std::max(max_in_row, input_data[i * depth + c]);
+ }
+
+ FixedPointAccum sum_of_exps = FixedPointAccum::Zero();
+ for (int c = 0; c < depth; ++c)
+ {
+ int32_t input_diff = static_cast<int32_t>(input_data[i * depth + c]) - max_in_row;
+ if (input_diff >= diff_min)
+ {
+ const int32_t input_diff_rescaled = MultiplyByQuantizedMultiplierGreaterThanOne(
+ input_diff, input_beta_multiplier, input_beta_left_shift);
+ const FixedPointScaledDiff scaled_diff_f8 =
+ FixedPointScaledDiff::FromRaw(input_diff_rescaled);
+ sum_of_exps = sum_of_exps + gemmlowp::Rescale<kAccumulationIntegerBits>(
+ exp_on_negative_values(scaled_diff_f8));
+ }
+ }
+
+ int32_t fixed_sum_of_exps = sum_of_exps.raw();
+ int headroom_plus_one = CountLeadingZeros(static_cast<uint32_t>(fixed_sum_of_exps));
+ // This is the number of bits to the left of the binary point above 1.0.
+ // Consider fixed_sum_of_exps=1.25. In that case shifted_scale=0.8 and
+ // no later adjustment will be needed.
+ int num_bits_over_unit = kAccumulationIntegerBits - headroom_plus_one;
+ int32_t shifted_sum_minus_one =
+ static_cast<int32_t>((static_cast<uint32_t>(fixed_sum_of_exps) << headroom_plus_one) -
+ (static_cast<uint32_t>(1) << 31));
+
+ FixedPoint0 shifted_scale =
+ one_over_one_plus_x_for_x_in_0_1(FixedPoint0::FromRaw(shifted_sum_minus_one));
+
+ for (int c = 0; c < depth; ++c)
+ {
+ int32_t input_diff = static_cast<int32_t>(input_data[i * depth + c]) - max_in_row;
+ if (input_diff >= diff_min)
+ {
+ const int32_t input_diff_rescaled = MultiplyByQuantizedMultiplierGreaterThanOne(
+ input_diff, input_beta_multiplier, input_beta_left_shift);
+ const FixedPointScaledDiff scaled_diff_f8 =
+ FixedPointScaledDiff::FromRaw(input_diff_rescaled);
+
+ FixedPoint0 exp_in_0 = exp_on_negative_values(scaled_diff_f8);
+ int32_t unsat_output = gemmlowp::RoundingDivideByPOT((shifted_scale * exp_in_0).raw(),
+ num_bits_over_unit + 31 - 8);
+
+ output_data[i * depth + c] = static_cast<uint8_t>(
+ std::max(std::min(unsat_output, static_cast<int32_t>(255)), static_cast<int32_t>(0)));
+ }
+ else
+ {
+ output_data[i * depth + c] = 0;
+ }
+ }
+ }
+}
+
+} // namespace cker
+} // namespace nnfw
+
+#endif // __NNFW_CKER_SOFTMAX_H__
diff --git a/libs/cpp14/CMakeLists.txt b/runtimes/libs/cpp14/CMakeLists.txt
index bba9e132d..bba9e132d 100644
--- a/libs/cpp14/CMakeLists.txt
+++ b/runtimes/libs/cpp14/CMakeLists.txt
diff --git a/runtimes/libs/cpp14/include/cpp14/memory.h b/runtimes/libs/cpp14/include/cpp14/memory.h
new file mode 100644
index 000000000..7070e1c99
--- /dev/null
+++ b/runtimes/libs/cpp14/include/cpp14/memory.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file memory.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains @c make_unique which is not supported by C++11
+ * @details Implementation is based on http://isocpp.org/files/papers/N3656.txt
+ */
+#ifndef __NNFW_CPP14_MEMORY_H__
+#define __NNFW_CPP14_MEMORY_H__
+
+#include <memory>
+
+namespace nnfw
+{
+namespace cpp14
+{
+
+template <typename T> struct _Unique_if
+{
+ typedef std::unique_ptr<T> _Single_object;
+};
+
+template <typename T> struct _Unique_if<T[]>
+{
+ typedef std::unique_ptr<T[]> _Unknown_bound;
+};
+
+template <typename T, size_t N> struct _Unique_if<T[N]>
+{
+ typedef void _Known_bound;
+};
+
+template <typename T, typename... Args>
+typename _Unique_if<T>::_Single_object make_unique(Args &&... args)
+{
+ return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
+}
+
+template <typename T> typename _Unique_if<T>::_Unknown_bound make_unique(size_t n)
+{
+ typedef typename std::remove_extent<T>::type U;
+ return std::unique_ptr<T>(new U[n]());
+}
+
+template <typename T, typename... Args>
+typename _Unique_if<T>::_Known_bound make_unique(Args &&...) = delete;
+
+} // namespace cpp14
+} // namespace nnfw
+
+#endif // __NNFW_CPP14_MEMORY_H__
diff --git a/runtimes/libs/jsoncpp/CMakeLists.txt b/runtimes/libs/jsoncpp/CMakeLists.txt
new file mode 100644
index 000000000..5720cec5b
--- /dev/null
+++ b/runtimes/libs/jsoncpp/CMakeLists.txt
@@ -0,0 +1,6 @@
+file(GLOB_RECURSE SRCS "*.cpp")
+
+add_library(jsoncpp STATIC ${SRCS})
+set_property(TARGET jsoncpp PROPERTY POSITION_INDEPENDENT_CODE ON)
+set_property(TARGET jsoncpp APPEND PROPERTY INTERFACE_INCLUDE_DIRECTORIES
+ $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
diff --git a/runtimes/libs/jsoncpp/README.md b/runtimes/libs/jsoncpp/README.md
new file mode 100644
index 000000000..da5a06d71
--- /dev/null
+++ b/runtimes/libs/jsoncpp/README.md
@@ -0,0 +1,11 @@
+# Origin of source code
+
+This library is based on Json-cpp amalgated header and cpp files(https://github.com/open-source-parsers/jsoncpp/wiki/Amalgamated)
+
+# Background
+
+Since jsoncpp on tizen does not support static jsoncpp library, nnfw project will link this local library.
+
+# Version
+
+- 1.7.7 : https://github.com/open-source-parsers/jsoncpp/archive/1.7.7.tar.gz
diff --git a/runtimes/libs/jsoncpp/json/json-forwards.h b/runtimes/libs/jsoncpp/json/json-forwards.h
new file mode 100644
index 000000000..9fe95c055
--- /dev/null
+++ b/runtimes/libs/jsoncpp/json/json-forwards.h
@@ -0,0 +1,315 @@
+/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).
+/// It is intended to be used with #include "json/json-forwards.h"
+/// This header provides forward declaration for all JsonCpp types.
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+/*
+The JsonCpp library's source code, including accompanying documentation,
+tests and demonstration applications, are licensed under the following
+conditions...
+
+The author (Baptiste Lepilleur) explicitly disclaims copyright in all
+jurisdictions which recognize such a disclaimer. In such jurisdictions,
+this software is released into the Public Domain.
+
+In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
+2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
+released under the terms of the MIT License (see below).
+
+In jurisdictions which recognize Public Domain property, the user of this
+software may choose to accept it either as 1) Public Domain, 2) under the
+conditions of the MIT License (see below), or 3) under the terms of dual
+Public Domain/MIT License conditions described here, as they choose.
+
+The MIT License is about as close to Public Domain as a license can get, and is
+described in clear, concise terms at:
+
+ http://en.wikipedia.org/wiki/MIT_License
+
+The full text of the MIT License follows:
+
+========================================================================
+Copyright (c) 2007-2010 Baptiste Lepilleur
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+========================================================================
+(END LICENSE TEXT)
+
+The MIT license is compatible with both the GPL and commercial
+software, affording one all of the rights of Public Domain with the
+minor nuisance of being required to keep the above copyright notice
+and license text in the source code. Note also that by accepting the
+Public Domain "license" you can re-license your copy using whatever
+license you like.
+
+*/
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED
+#define JSON_FORWARD_AMALGATED_H_INCLUDED
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+#define JSON_IS_AMALGAMATION
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_CONFIG_H_INCLUDED
+#define JSON_CONFIG_H_INCLUDED
+#include <stddef.h>
+#include <string> //typedef String
+#include <stdint.h> //typedef int64_t, uint64_t
+
+/// If defined, indicates that json library is embedded in CppTL library.
+//# define JSON_IN_CPPTL 1
+
+/// If defined, indicates that json may leverage CppTL library
+//# define JSON_USE_CPPTL 1
+/// If defined, indicates that cpptl vector based map should be used instead of
+/// std::map
+/// as Value container.
+//# define JSON_USE_CPPTL_SMALLMAP 1
+
+// If non-zero, the library uses exceptions to report bad input instead of C
+// assertion macros. The default is to use exceptions.
+#ifndef JSON_USE_EXCEPTION
+#define JSON_USE_EXCEPTION 1
+#endif
+
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+/// Remarks: it is automatically defined in the generated amalgated header.
+// #define JSON_IS_AMALGAMATION
+
+#ifdef JSON_IN_CPPTL
+#include <cpptl/config.h>
+#ifndef JSON_USE_CPPTL
+#define JSON_USE_CPPTL 1
+#endif
+#endif
+
+#ifdef JSON_IN_CPPTL
+#define JSON_API CPPTL_API
+#elif defined(JSON_DLL_BUILD)
+#if defined(_MSC_VER) || defined(__MINGW32__)
+#define JSON_API __declspec(dllexport)
+#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
+#endif // if defined(_MSC_VER)
+#elif defined(JSON_DLL)
+#if defined(_MSC_VER) || defined(__MINGW32__)
+#define JSON_API __declspec(dllimport)
+#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
+#endif // if defined(_MSC_VER)
+#endif // ifdef JSON_IN_CPPTL
+#if !defined(JSON_API)
+#define JSON_API
+#endif
+
+// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
+// integer
+// Storages, and 64 bits integer support is disabled.
+// #define JSON_NO_INT64 1
+
+#if defined(_MSC_VER) // MSVC
+#if _MSC_VER <= 1200 // MSVC 6
+ // Microsoft Visual Studio 6 only support conversion from __int64 to double
+ // (no conversion from unsigned __int64).
+#define JSON_USE_INT64_DOUBLE_CONVERSION 1
+// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
+// characters in the debug information)
+// All projects I've ever seen with VS6 were using this globally (not bothering
+// with pragma push/pop).
+#pragma warning(disable : 4786)
+#endif // MSVC 6
+
+#if _MSC_VER >= 1500 // MSVC 2008
+ /// Indicates that the following function is deprecated.
+#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
+#endif
+
+#endif // defined(_MSC_VER)
+
+// In c++11 the override keyword allows you to explicity define that a function
+// is intended to override the base-class version. This makes the code more
+// managable and fixes a set of common hard-to-find bugs.
+#if __cplusplus >= 201103L
+#define JSONCPP_OVERRIDE override
+#elif defined(_MSC_VER) && _MSC_VER > 1600
+#define JSONCPP_OVERRIDE override
+#else
+#define JSONCPP_OVERRIDE
+#endif
+
+#ifndef JSON_HAS_RVALUE_REFERENCES
+
+#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // MSVC >= 2010
+
+#ifdef __clang__
+#if __has_feature(cxx_rvalue_references)
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // has_feature
+
+#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
+#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L)
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // GXX_EXPERIMENTAL
+
+#endif // __clang__ || __GNUC__
+
+#endif // not defined JSON_HAS_RVALUE_REFERENCES
+
+#ifndef JSON_HAS_RVALUE_REFERENCES
+#define JSON_HAS_RVALUE_REFERENCES 0
+#endif
+
+#ifdef __clang__
+#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
+#if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
+#define JSONCPP_DEPRECATED(message) __attribute__((deprecated(message)))
+#elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
+#define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
+#endif // GNUC version
+#endif // __clang__ || __GNUC__
+
+#if !defined(JSONCPP_DEPRECATED)
+#define JSONCPP_DEPRECATED(message)
+#endif // if !defined(JSONCPP_DEPRECATED)
+
+#if __GNUC__ >= 6
+#define JSON_USE_INT64_DOUBLE_CONVERSION 1
+#endif
+
+#if !defined(JSON_IS_AMALGAMATION)
+
+#include "version.h"
+
+#if JSONCPP_USING_SECURE_MEMORY
+#include "allocator.h" //typedef Allocator
+#endif
+
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json
+{
+typedef int Int;
+typedef unsigned int UInt;
+#if defined(JSON_NO_INT64)
+typedef int LargestInt;
+typedef unsigned int LargestUInt;
+#undef JSON_HAS_INT64
+#else // if defined(JSON_NO_INT64)
+// For Microsoft Visual use specific types as long long is not supported
+#if defined(_MSC_VER) // Microsoft Visual Studio
+typedef __int64 Int64;
+typedef unsigned __int64 UInt64;
+#else // if defined(_MSC_VER) // Other platforms, use long long
+typedef int64_t Int64;
+typedef uint64_t UInt64;
+#endif // if defined(_MSC_VER)
+typedef Int64 LargestInt;
+typedef UInt64 LargestUInt;
+#define JSON_HAS_INT64
+#endif // if defined(JSON_NO_INT64)
+#if JSONCPP_USING_SECURE_MEMORY
+#define JSONCPP_STRING std::basic_string<char, std::char_traits<char>, Json::SecureAllocator<char>>
+#define JSONCPP_OSTRINGSTREAM \
+ std::basic_ostringstream<char, std::char_traits<char>, Json::SecureAllocator<char>>
+#define JSONCPP_OSTREAM std::basic_ostream<char, std::char_traits<char>>
+#define JSONCPP_ISTRINGSTREAM \
+ std::basic_istringstream<char, std::char_traits<char>, Json::SecureAllocator<char>>
+#define JSONCPP_ISTREAM std::istream
+#else
+#define JSONCPP_STRING std::string
+#define JSONCPP_OSTRINGSTREAM std::ostringstream
+#define JSONCPP_OSTREAM std::ostream
+#define JSONCPP_ISTRINGSTREAM std::istringstream
+#define JSONCPP_ISTREAM std::istream
+#endif // if JSONCPP_USING_SECURE_MEMORY
+} // end namespace Json
+
+#endif // JSON_CONFIG_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_FORWARDS_H_INCLUDED
+#define JSON_FORWARDS_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "config.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json
+{
+
+// writer.h
+class FastWriter;
+class StyledWriter;
+
+// reader.h
+class Reader;
+
+// features.h
+class Features;
+
+// value.h
+typedef unsigned int ArrayIndex;
+class StaticString;
+class Path;
+class PathArgument;
+class Value;
+class ValueIteratorBase;
+class ValueIterator;
+class ValueConstIterator;
+
+} // namespace Json
+
+#endif // JSON_FORWARDS_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+#endif // ifndef JSON_FORWARD_AMALGATED_H_INCLUDED
diff --git a/runtimes/libs/jsoncpp/json/json.h b/runtimes/libs/jsoncpp/json/json.h
new file mode 100644
index 000000000..19c591267
--- /dev/null
+++ b/runtimes/libs/jsoncpp/json/json.h
@@ -0,0 +1,2133 @@
+/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).
+/// It is intended to be used with #include "json/json.h"
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+/*
+The JsonCpp library's source code, including accompanying documentation,
+tests and demonstration applications, are licensed under the following
+conditions...
+
+The author (Baptiste Lepilleur) explicitly disclaims copyright in all
+jurisdictions which recognize such a disclaimer. In such jurisdictions,
+this software is released into the Public Domain.
+
+In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
+2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
+released under the terms of the MIT License (see below).
+
+In jurisdictions which recognize Public Domain property, the user of this
+software may choose to accept it either as 1) Public Domain, 2) under the
+conditions of the MIT License (see below), or 3) under the terms of dual
+Public Domain/MIT License conditions described here, as they choose.
+
+The MIT License is about as close to Public Domain as a license can get, and is
+described in clear, concise terms at:
+
+ http://en.wikipedia.org/wiki/MIT_License
+
+The full text of the MIT License follows:
+
+========================================================================
+Copyright (c) 2007-2010 Baptiste Lepilleur
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+========================================================================
+(END LICENSE TEXT)
+
+The MIT license is compatible with both the GPL and commercial
+software, affording one all of the rights of Public Domain with the
+minor nuisance of being required to keep the above copyright notice
+and license text in the source code. Note also that by accepting the
+Public Domain "license" you can re-license your copy using whatever
+license you like.
+
+*/
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+#ifndef JSON_AMALGATED_H_INCLUDED
+#define JSON_AMALGATED_H_INCLUDED
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+#define JSON_IS_AMALGAMATION
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/version.h
+// //////////////////////////////////////////////////////////////////////
+
+// DO NOT EDIT. This file (and "version") is generated by CMake.
+// Run CMake configure step to update it.
+#ifndef JSON_VERSION_H_INCLUDED
+#define JSON_VERSION_H_INCLUDED
+
+#define JSONCPP_VERSION_STRING "1.7.7"
+#define JSONCPP_VERSION_MAJOR 1
+#define JSONCPP_VERSION_MINOR 7
+#define JSONCPP_VERSION_PATCH 7
+#define JSONCPP_VERSION_QUALIFIER
+#define JSONCPP_VERSION_HEXA \
+ ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
+
+#ifdef JSONCPP_USING_SECURE_MEMORY
+#undef JSONCPP_USING_SECURE_MEMORY
+#endif
+#define JSONCPP_USING_SECURE_MEMORY 0
+// If non-zero, the library zeroes any memory that it has allocated before
+// it frees its memory.
+
+#endif // JSON_VERSION_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/version.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_CONFIG_H_INCLUDED
+#define JSON_CONFIG_H_INCLUDED
+#include <stddef.h>
+#include <string> //typedef String
+#include <stdint.h> //typedef int64_t, uint64_t
+
+/// If defined, indicates that json library is embedded in CppTL library.
+//# define JSON_IN_CPPTL 1
+
+/// If defined, indicates that json may leverage CppTL library
+//# define JSON_USE_CPPTL 1
+/// If defined, indicates that cpptl vector based map should be used instead of
+/// std::map
+/// as Value container.
+//# define JSON_USE_CPPTL_SMALLMAP 1
+
+// If non-zero, the library uses exceptions to report bad input instead of C
+// assertion macros. The default is to use exceptions.
+#ifndef JSON_USE_EXCEPTION
+#define JSON_USE_EXCEPTION 1
+#endif
+
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+/// Remarks: it is automatically defined in the generated amalgated header.
+// #define JSON_IS_AMALGAMATION
+
+#ifdef JSON_IN_CPPTL
+#include <cpptl/config.h>
+#ifndef JSON_USE_CPPTL
+#define JSON_USE_CPPTL 1
+#endif
+#endif
+
+#ifdef JSON_IN_CPPTL
+#define JSON_API CPPTL_API
+#elif defined(JSON_DLL_BUILD)
+#if defined(_MSC_VER) || defined(__MINGW32__)
+#define JSON_API __declspec(dllexport)
+#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
+#endif // if defined(_MSC_VER)
+#elif defined(JSON_DLL)
+#if defined(_MSC_VER) || defined(__MINGW32__)
+#define JSON_API __declspec(dllimport)
+#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
+#endif // if defined(_MSC_VER)
+#endif // ifdef JSON_IN_CPPTL
+#if !defined(JSON_API)
+#define JSON_API
+#endif
+
+// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
+// integer
+// Storages, and 64 bits integer support is disabled.
+// #define JSON_NO_INT64 1
+
+#if defined(_MSC_VER) // MSVC
+#if _MSC_VER <= 1200 // MSVC 6
+ // Microsoft Visual Studio 6 only support conversion from __int64 to double
+ // (no conversion from unsigned __int64).
+#define JSON_USE_INT64_DOUBLE_CONVERSION 1
+// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
+// characters in the debug information)
+// All projects I've ever seen with VS6 were using this globally (not bothering
+// with pragma push/pop).
+#pragma warning(disable : 4786)
+#endif // MSVC 6
+
+#if _MSC_VER >= 1500 // MSVC 2008
+ /// Indicates that the following function is deprecated.
+#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
+#endif
+
+#endif // defined(_MSC_VER)
+
+// In c++11 the override keyword allows you to explicity define that a function
+// is intended to override the base-class version. This makes the code more
+// managable and fixes a set of common hard-to-find bugs.
+#if __cplusplus >= 201103L
+#define JSONCPP_OVERRIDE override
+#elif defined(_MSC_VER) && _MSC_VER > 1600
+#define JSONCPP_OVERRIDE override
+#else
+#define JSONCPP_OVERRIDE
+#endif
+
+#ifndef JSON_HAS_RVALUE_REFERENCES
+
+#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // MSVC >= 2010
+
+#ifdef __clang__
+#if __has_feature(cxx_rvalue_references)
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // has_feature
+
+#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
+#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L)
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // GXX_EXPERIMENTAL
+
+#endif // __clang__ || __GNUC__
+
+#endif // not defined JSON_HAS_RVALUE_REFERENCES
+
+#ifndef JSON_HAS_RVALUE_REFERENCES
+#define JSON_HAS_RVALUE_REFERENCES 0
+#endif
+
+#ifdef __clang__
+#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
+#if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
+#define JSONCPP_DEPRECATED(message) __attribute__((deprecated(message)))
+#elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
+#define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
+#endif // GNUC version
+#endif // __clang__ || __GNUC__
+
+#if !defined(JSONCPP_DEPRECATED)
+#define JSONCPP_DEPRECATED(message)
+#endif // if !defined(JSONCPP_DEPRECATED)
+
+#if __GNUC__ >= 6
+#define JSON_USE_INT64_DOUBLE_CONVERSION 1
+#endif
+
+#if !defined(JSON_IS_AMALGAMATION)
+
+#include "version.h"
+
+#if JSONCPP_USING_SECURE_MEMORY
+#include "allocator.h" //typedef Allocator
+#endif
+
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json
+{
+typedef int Int;
+typedef unsigned int UInt;
+#if defined(JSON_NO_INT64)
+typedef int LargestInt;
+typedef unsigned int LargestUInt;
+#undef JSON_HAS_INT64
+#else // if defined(JSON_NO_INT64)
+// For Microsoft Visual use specific types as long long is not supported
+#if defined(_MSC_VER) // Microsoft Visual Studio
+typedef __int64 Int64;
+typedef unsigned __int64 UInt64;
+#else // if defined(_MSC_VER) // Other platforms, use long long
+typedef int64_t Int64;
+typedef uint64_t UInt64;
+#endif // if defined(_MSC_VER)
+typedef Int64 LargestInt;
+typedef UInt64 LargestUInt;
+#define JSON_HAS_INT64
+#endif // if defined(JSON_NO_INT64)
+#if JSONCPP_USING_SECURE_MEMORY
+#define JSONCPP_STRING std::basic_string<char, std::char_traits<char>, Json::SecureAllocator<char>>
+#define JSONCPP_OSTRINGSTREAM \
+ std::basic_ostringstream<char, std::char_traits<char>, Json::SecureAllocator<char>>
+#define JSONCPP_OSTREAM std::basic_ostream<char, std::char_traits<char>>
+#define JSONCPP_ISTRINGSTREAM \
+ std::basic_istringstream<char, std::char_traits<char>, Json::SecureAllocator<char>>
+#define JSONCPP_ISTREAM std::istream
+#else
+#define JSONCPP_STRING std::string
+#define JSONCPP_OSTRINGSTREAM std::ostringstream
+#define JSONCPP_OSTREAM std::ostream
+#define JSONCPP_ISTRINGSTREAM std::istringstream
+#define JSONCPP_ISTREAM std::istream
+#endif // if JSONCPP_USING_SECURE_MEMORY
+} // end namespace Json
+
+#endif // JSON_CONFIG_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_FORWARDS_H_INCLUDED
+#define JSON_FORWARDS_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "config.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json
+{
+
+// writer.h
+class FastWriter;
+class StyledWriter;
+
+// reader.h
+class Reader;
+
+// features.h
+class Features;
+
+// value.h
+typedef unsigned int ArrayIndex;
+class StaticString;
+class Path;
+class PathArgument;
+class Value;
+class ValueIteratorBase;
+class ValueIterator;
+class ValueConstIterator;
+
+} // namespace Json
+
+#endif // JSON_FORWARDS_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/features.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
+#define CPPTL_JSON_FEATURES_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "forwards.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json
+{
+
+/** \brief Configuration passed to reader and writer.
+ * This configuration object can be used to force the Reader or Writer
+ * to behave in a standard conforming way.
+ */
+class JSON_API Features
+{
+public:
+ /** \brief A configuration that allows all features and assumes all strings
+ * are UTF-8.
+ * - C & C++ comments are allowed
+ * - Root object can be any JSON value
+ * - Assumes Value strings are encoded in UTF-8
+ */
+ static Features all();
+
+ /** \brief A configuration that is strictly compatible with the JSON
+ * specification.
+ * - Comments are forbidden.
+ * - Root object must be either an array or an object value.
+ * - Assumes Value strings are encoded in UTF-8
+ */
+ static Features strictMode();
+
+ /** \brief Initialize the configuration like JsonConfig::allFeatures;
+ */
+ Features();
+
+ /// \c true if comments are allowed. Default: \c true.
+ bool allowComments_;
+
+ /// \c true if root must be either an array or an object value. Default: \c
+ /// false.
+ bool strictRoot_;
+
+ /// \c true if dropped null placeholders are allowed. Default: \c false.
+ bool allowDroppedNullPlaceholders_;
+
+ /// \c true if numeric object key are allowed. Default: \c false.
+ bool allowNumericKeys_;
+};
+
+} // namespace Json
+
+#endif // CPPTL_JSON_FEATURES_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/features.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/value.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_H_INCLUDED
+#define CPPTL_JSON_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "forwards.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <string>
+#include <vector>
+#include <exception>
+
+#ifndef JSON_USE_CPPTL_SMALLMAP
+#include <map>
+#else
+#include <cpptl/smallmap.h>
+#endif
+#ifdef JSON_USE_CPPTL
+#include <cpptl/forwards.h>
+#endif
+
+// Conditional NORETURN attribute on the throw functions would:
+// a) suppress false positives from static code analysis
+// b) possibly improve optimization opportunities.
+#if !defined(JSONCPP_NORETURN)
+#if defined(_MSC_VER)
+#define JSONCPP_NORETURN __declspec(noreturn)
+#elif defined(__GNUC__)
+#define JSONCPP_NORETURN __attribute__((__noreturn__))
+#else
+#define JSONCPP_NORETURN
+#endif
+#endif
+
+// Disable warning C4251: <data member>: <type> needs to have dll-interface to
+// be used by...
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(push)
+#pragma warning(disable : 4251)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+/** \brief JSON (JavaScript Object Notation).
+ */
+namespace Json
+{
+
+/** Base class for all exceptions we throw.
+ *
+ * We use nothing but these internally. Of course, STL can throw others.
+ */
+class JSON_API Exception : public std::exception
+{
+public:
+ Exception(JSONCPP_STRING const &msg);
+ ~Exception() throw() JSONCPP_OVERRIDE;
+ char const *what() const throw() JSONCPP_OVERRIDE;
+
+protected:
+ JSONCPP_STRING msg_;
+};
+
+/** Exceptions which the user cannot easily avoid.
+ *
+ * E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
+ *
+ * \remark derived from Json::Exception
+ */
+class JSON_API RuntimeError : public Exception
+{
+public:
+ RuntimeError(JSONCPP_STRING const &msg);
+};
+
+/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
+ *
+ * These are precondition-violations (user bugs) and internal errors (our bugs).
+ *
+ * \remark derived from Json::Exception
+ */
+class JSON_API LogicError : public Exception
+{
+public:
+ LogicError(JSONCPP_STRING const &msg);
+};
+
+/// used internally
+JSONCPP_NORETURN void throwRuntimeError(JSONCPP_STRING const &msg);
+/// used internally
+JSONCPP_NORETURN void throwLogicError(JSONCPP_STRING const &msg);
+
+/** \brief Type of the value held by a Value object.
+ */
+enum ValueType
+{
+ nullValue = 0, ///< 'null' value
+ intValue, ///< signed integer value
+ uintValue, ///< unsigned integer value
+ realValue, ///< double value
+ stringValue, ///< UTF-8 string value
+ booleanValue, ///< bool value
+ arrayValue, ///< array value (ordered list)
+ objectValue ///< object value (collection of name/value pairs).
+};
+
+enum CommentPlacement
+{
+ commentBefore = 0, ///< a comment placed on the line before a value
+ commentAfterOnSameLine, ///< a comment just after a value on the same line
+ commentAfter, ///< a comment on the line after a value (only make sense for
+ /// root value)
+ numberOfCommentPlacement
+};
+
+//# ifdef JSON_USE_CPPTL
+// typedef CppTL::AnyEnumerator<const char *> EnumMemberNames;
+// typedef CppTL::AnyEnumerator<const Value &> EnumValues;
+//# endif
+
+/** \brief Lightweight wrapper to tag static string.
+ *
+ * Value constructor and objectValue member assignement takes advantage of the
+ * StaticString and avoid the cost of string duplication when storing the
+ * string or the member name.
+ *
+ * Example of usage:
+ * \code
+ * Json::Value aValue( StaticString("some text") );
+ * Json::Value object;
+ * static const StaticString code("code");
+ * object[code] = 1234;
+ * \endcode
+ */
+class JSON_API StaticString
+{
+public:
+ explicit StaticString(const char *czstring) : c_str_(czstring) {}
+
+ operator const char *() const { return c_str_; }
+
+ const char *c_str() const { return c_str_; }
+
+private:
+ const char *c_str_;
+};
+
+/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
+ *
+ * This class is a discriminated union wrapper that can represents a:
+ * - signed integer [range: Value::minInt - Value::maxInt]
+ * - unsigned integer (range: 0 - Value::maxUInt)
+ * - double
+ * - UTF-8 string
+ * - boolean
+ * - 'null'
+ * - an ordered list of Value
+ * - collection of name/value pairs (javascript object)
+ *
+ * The type of the held value is represented by a #ValueType and
+ * can be obtained using type().
+ *
+ * Values of an #objectValue or #arrayValue can be accessed using operator[]()
+ * methods.
+ * Non-const methods will automatically create the a #nullValue element
+ * if it does not exist.
+ * The sequence of an #arrayValue will be automatically resized and initialized
+ * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
+ *
+ * The get() methods can be used to obtain default value in the case the
+ * required element does not exist.
+ *
+ * It is possible to iterate over the list of a #objectValue values using
+ * the getMemberNames() method.
+ *
+ * \note #Value string-length fit in size_t, but keys must be < 2^30.
+ * (The reason is an implementation detail.) A #CharReader will raise an
+ * exception if a bound is exceeded to avoid security holes in your app,
+ * but the Value API does *not* check bounds. That is the responsibility
+ * of the caller.
+ */
+class JSON_API Value
+{
+ friend class ValueIteratorBase;
+
+public:
+ typedef std::vector<JSONCPP_STRING> Members;
+ typedef ValueIterator iterator;
+ typedef ValueConstIterator const_iterator;
+ typedef Json::UInt UInt;
+ typedef Json::Int Int;
+#if defined(JSON_HAS_INT64)
+ typedef Json::UInt64 UInt64;
+ typedef Json::Int64 Int64;
+#endif // defined(JSON_HAS_INT64)
+ typedef Json::LargestInt LargestInt;
+ typedef Json::LargestUInt LargestUInt;
+ typedef Json::ArrayIndex ArrayIndex;
+
+ static const Value
+ &null; ///< We regret this reference to a global instance; prefer the simpler Value().
+ static const Value &nullRef; ///< just a kludge for binary-compatibility; same as null
+ static Value const &nullSingleton(); ///< Prefer this to null or nullRef.
+
+ /// Minimum signed integer value that can be stored in a Json::Value.
+ static const LargestInt minLargestInt;
+ /// Maximum signed integer value that can be stored in a Json::Value.
+ static const LargestInt maxLargestInt;
+ /// Maximum unsigned integer value that can be stored in a Json::Value.
+ static const LargestUInt maxLargestUInt;
+
+ /// Minimum signed int value that can be stored in a Json::Value.
+ static const Int minInt;
+ /// Maximum signed int value that can be stored in a Json::Value.
+ static const Int maxInt;
+ /// Maximum unsigned int value that can be stored in a Json::Value.
+ static const UInt maxUInt;
+
+#if defined(JSON_HAS_INT64)
+ /// Minimum signed 64 bits int value that can be stored in a Json::Value.
+ static const Int64 minInt64;
+ /// Maximum signed 64 bits int value that can be stored in a Json::Value.
+ static const Int64 maxInt64;
+ /// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
+ static const UInt64 maxUInt64;
+#endif // defined(JSON_HAS_INT64)
+
+private:
+#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+ class CZString
+ {
+ public:
+ enum DuplicationPolicy
+ {
+ noDuplication = 0,
+ duplicate,
+ duplicateOnCopy
+ };
+ CZString(ArrayIndex index);
+ CZString(char const *str, unsigned length, DuplicationPolicy allocate);
+ CZString(CZString const &other);
+#if JSON_HAS_RVALUE_REFERENCES
+ CZString(CZString &&other);
+#endif
+ ~CZString();
+ CZString &operator=(CZString other);
+ bool operator<(CZString const &other) const;
+ bool operator==(CZString const &other) const;
+ ArrayIndex index() const;
+ // const char* c_str() const; ///< \deprecated
+ char const *data() const;
+ unsigned length() const;
+ bool isStaticString() const;
+
+ private:
+ void swap(CZString &other);
+
+ struct StringStorage
+ {
+ unsigned policy_ : 2;
+ unsigned length_ : 30; // 1GB max
+ };
+
+ char const *cstr_; // actually, a prefixed string, unless policy is noDup
+ union {
+ ArrayIndex index_;
+ StringStorage storage_;
+ };
+ };
+
+public:
+#ifndef JSON_USE_CPPTL_SMALLMAP
+ typedef std::map<CZString, Value> ObjectValues;
+#else
+ typedef CppTL::SmallMap<CZString, Value> ObjectValues;
+#endif // ifndef JSON_USE_CPPTL_SMALLMAP
+#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+
+public:
+ /** \brief Create a default Value of the given type.
+
+ This is a very useful constructor.
+ To create an empty array, pass arrayValue.
+ To create an empty object, pass objectValue.
+ Another Value can then be set to this one by assignment.
+This is useful since clear() and resize() will not alter types.
+
+ Examples:
+\code
+Json::Value null_value; // null
+Json::Value arr_value(Json::arrayValue); // []
+Json::Value obj_value(Json::objectValue); // {}
+\endcode
+ */
+ Value(ValueType type = nullValue);
+ Value(Int value);
+ Value(UInt value);
+#if defined(JSON_HAS_INT64)
+ Value(Int64 value);
+ Value(UInt64 value);
+#endif // if defined(JSON_HAS_INT64)
+ Value(double value);
+ Value(const char *value); ///< Copy til first 0. (NULL causes to seg-fault.)
+ Value(const char *begin, const char *end); ///< Copy all, incl zeroes.
+ /** \brief Constructs a value from a static string.
+
+ * Like other value string constructor but do not duplicate the string for
+ * internal storage. The given string must remain alive after the call to this
+ * constructor.
+ * \note This works only for null-terminated strings. (We cannot change the
+ * size of this class, so we have nowhere to store the length,
+ * which might be computed later for various operations.)
+ *
+ * Example of usage:
+ * \code
+ * static StaticString foo("some text");
+ * Json::Value aValue(foo);
+ * \endcode
+ */
+ Value(const StaticString &value);
+ Value(const JSONCPP_STRING &value); ///< Copy data() til size(). Embedded zeroes too.
+#ifdef JSON_USE_CPPTL
+ Value(const CppTL::ConstString &value);
+#endif
+ Value(bool value);
+ /// Deep copy.
+ Value(const Value &other);
+#if JSON_HAS_RVALUE_REFERENCES
+ /// Move constructor
+ Value(Value &&other);
+#endif
+ ~Value();
+
+ /// Deep copy, then swap(other).
+ /// \note Over-write existing comments. To preserve comments, use #swapPayload().
+ Value &operator=(Value other);
+ /// Swap everything.
+ void swap(Value &other);
+ /// Swap values but leave comments and source offsets in place.
+ void swapPayload(Value &other);
+
+ ValueType type() const;
+
+ /// Compare payload only, not comments etc.
+ bool operator<(const Value &other) const;
+ bool operator<=(const Value &other) const;
+ bool operator>=(const Value &other) const;
+ bool operator>(const Value &other) const;
+ bool operator==(const Value &other) const;
+ bool operator!=(const Value &other) const;
+ int compare(const Value &other) const;
+
+ const char *asCString() const; ///< Embedded zeroes could cause you trouble!
+#if JSONCPP_USING_SECURE_MEMORY
+ unsigned getCStringLength() const; // Allows you to understand the length of the CString
+#endif
+ JSONCPP_STRING asString() const; ///< Embedded zeroes are possible.
+ /** Get raw char* of string-value.
+ * \return false if !string. (Seg-fault if str or end are NULL.)
+ */
+ bool getString(char const **begin, char const **end) const;
+#ifdef JSON_USE_CPPTL
+ CppTL::ConstString asConstString() const;
+#endif
+ Int asInt() const;
+ UInt asUInt() const;
+#if defined(JSON_HAS_INT64)
+ Int64 asInt64() const;
+ UInt64 asUInt64() const;
+#endif // if defined(JSON_HAS_INT64)
+ LargestInt asLargestInt() const;
+ LargestUInt asLargestUInt() const;
+ float asFloat() const;
+ double asDouble() const;
+ bool asBool() const;
+
+ bool isNull() const;
+ bool isBool() const;
+ bool isInt() const;
+ bool isInt64() const;
+ bool isUInt() const;
+ bool isUInt64() const;
+ bool isIntegral() const;
+ bool isDouble() const;
+ bool isNumeric() const;
+ bool isString() const;
+ bool isArray() const;
+ bool isObject() const;
+
+ bool isConvertibleTo(ValueType other) const;
+
+ /// Number of values in array or object
+ ArrayIndex size() const;
+
+ /// \brief Return true if empty array, empty object, or null;
+ /// otherwise, false.
+ bool empty() const;
+
+ /// Return isNull()
+ bool operator!() const;
+
+ /// Remove all object members and array elements.
+ /// \pre type() is arrayValue, objectValue, or nullValue
+ /// \post type() is unchanged
+ void clear();
+
+ /// Resize the array to size elements.
+ /// New elements are initialized to null.
+ /// May only be called on nullValue or arrayValue.
+ /// \pre type() is arrayValue or nullValue
+ /// \post type() is arrayValue
+ void resize(ArrayIndex size);
+
+ /// Access an array element (zero based index ).
+ /// If the array contains less than index element, then null value are
+ /// inserted
+ /// in the array so that its size is index+1.
+ /// (You may need to say 'value[0u]' to get your compiler to distinguish
+ /// this from the operator[] which takes a string.)
+ Value &operator[](ArrayIndex index);
+
+ /// Access an array element (zero based index ).
+ /// If the array contains less than index element, then null value are
+ /// inserted
+ /// in the array so that its size is index+1.
+ /// (You may need to say 'value[0u]' to get your compiler to distinguish
+ /// this from the operator[] which takes a string.)
+ Value &operator[](int index);
+
+ /// Access an array element (zero based index )
+ /// (You may need to say 'value[0u]' to get your compiler to distinguish
+ /// this from the operator[] which takes a string.)
+ const Value &operator[](ArrayIndex index) const;
+
+ /// Access an array element (zero based index )
+ /// (You may need to say 'value[0u]' to get your compiler to distinguish
+ /// this from the operator[] which takes a string.)
+ const Value &operator[](int index) const;
+
+ /// If the array contains at least index+1 elements, returns the element
+ /// value,
+ /// otherwise returns defaultValue.
+ Value get(ArrayIndex index, const Value &defaultValue) const;
+ /// Return true if index < size().
+ bool isValidIndex(ArrayIndex index) const;
+ /// \brief Append value to array at the end.
+ ///
+ /// Equivalent to jsonvalue[jsonvalue.size()] = value;
+ Value &append(const Value &value);
+
+ /// Access an object value by name, create a null member if it does not exist.
+ /// \note Because of our implementation, keys are limited to 2^30 -1 chars.
+ /// Exceeding that will cause an exception.
+ Value &operator[](const char *key);
+ /// Access an object value by name, returns null if there is no member with
+ /// that name.
+ const Value &operator[](const char *key) const;
+ /// Access an object value by name, create a null member if it does not exist.
+ /// \param key may contain embedded nulls.
+ Value &operator[](const JSONCPP_STRING &key);
+ /// Access an object value by name, returns null if there is no member with
+ /// that name.
+ /// \param key may contain embedded nulls.
+ const Value &operator[](const JSONCPP_STRING &key) const;
+ /** \brief Access an object value by name, create a null member if it does not
+ exist.
+
+ * If the object has no entry for that name, then the member name used to store
+ * the new entry is not duplicated.
+ * Example of use:
+ * \code
+ * Json::Value object;
+ * static const StaticString code("code");
+ * object[code] = 1234;
+ * \endcode
+ */
+ Value &operator[](const StaticString &key);
+#ifdef JSON_USE_CPPTL
+ /// Access an object value by name, create a null member if it does not exist.
+ Value &operator[](const CppTL::ConstString &key);
+ /// Access an object value by name, returns null if there is no member with
+ /// that name.
+ const Value &operator[](const CppTL::ConstString &key) const;
+#endif
+ /// Return the member named key if it exist, defaultValue otherwise.
+ /// \note deep copy
+ Value get(const char *key, const Value &defaultValue) const;
+ /// Return the member named key if it exist, defaultValue otherwise.
+ /// \note deep copy
+ /// \note key may contain embedded nulls.
+ Value get(const char *begin, const char *end, const Value &defaultValue) const;
+ /// Return the member named key if it exist, defaultValue otherwise.
+ /// \note deep copy
+ /// \param key may contain embedded nulls.
+ Value get(const JSONCPP_STRING &key, const Value &defaultValue) const;
+#ifdef JSON_USE_CPPTL
+ /// Return the member named key if it exist, defaultValue otherwise.
+ /// \note deep copy
+ Value get(const CppTL::ConstString &key, const Value &defaultValue) const;
+#endif
+ /// Most general and efficient version of isMember()const, get()const,
+ /// and operator[]const
+ /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
+ Value const *find(char const *begin, char const *end) const;
+ /// Most general and efficient version of object-mutators.
+ /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
+ /// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
+ Value const *demand(char const *begin, char const *end);
+ /// \brief Remove and return the named member.
+ ///
+ /// Do nothing if it did not exist.
+ /// \return the removed Value, or null.
+ /// \pre type() is objectValue or nullValue
+ /// \post type() is unchanged
+ /// \deprecated
+ Value removeMember(const char *key);
+ /// Same as removeMember(const char*)
+ /// \param key may contain embedded nulls.
+ /// \deprecated
+ Value removeMember(const JSONCPP_STRING &key);
+ /// Same as removeMember(const char* begin, const char* end, Value* removed),
+ /// but 'key' is null-terminated.
+ bool removeMember(const char *key, Value *removed);
+ /** \brief Remove the named map member.
+
+ Update 'removed' iff removed.
+ \param key may contain embedded nulls.
+ \return true iff removed (no exceptions)
+ */
+ bool removeMember(JSONCPP_STRING const &key, Value *removed);
+ /// Same as removeMember(JSONCPP_STRING const& key, Value* removed)
+ bool removeMember(const char *begin, const char *end, Value *removed);
+ /** \brief Remove the indexed array element.
+
+ O(n) expensive operations.
+ Update 'removed' iff removed.
+ \return true iff removed (no exceptions)
+ */
+ bool removeIndex(ArrayIndex i, Value *removed);
+
+ /// Return true if the object has a member named key.
+ /// \note 'key' must be null-terminated.
+ bool isMember(const char *key) const;
+ /// Return true if the object has a member named key.
+ /// \param key may contain embedded nulls.
+ bool isMember(const JSONCPP_STRING &key) const;
+ /// Same as isMember(JSONCPP_STRING const& key)const
+ bool isMember(const char *begin, const char *end) const;
+#ifdef JSON_USE_CPPTL
+ /// Return true if the object has a member named key.
+ bool isMember(const CppTL::ConstString &key) const;
+#endif
+
+ /// \brief Return a list of the member names.
+ ///
+ /// If null, return an empty list.
+ /// \pre type() is objectValue or nullValue
+ /// \post if type() was nullValue, it remains nullValue
+ Members getMemberNames() const;
+
+ //# ifdef JSON_USE_CPPTL
+ // EnumMemberNames enumMemberNames() const;
+ // EnumValues enumValues() const;
+ //# endif
+
+ /// \deprecated Always pass len.
+ JSONCPP_DEPRECATED("Use setComment(JSONCPP_STRING const&) instead.")
+ void setComment(const char *comment, CommentPlacement placement);
+ /// Comments must be //... or /* ... */
+ void setComment(const char *comment, size_t len, CommentPlacement placement);
+ /// Comments must be //... or /* ... */
+ void setComment(const JSONCPP_STRING &comment, CommentPlacement placement);
+ bool hasComment(CommentPlacement placement) const;
+ /// Include delimiters and embedded newlines.
+ JSONCPP_STRING getComment(CommentPlacement placement) const;
+
+ JSONCPP_STRING toStyledString() const;
+
+ const_iterator begin() const;
+ const_iterator end() const;
+
+ iterator begin();
+ iterator end();
+
+ // Accessors for the [start, limit) range of bytes within the JSON text from
+ // which this value was parsed, if any.
+ void setOffsetStart(ptrdiff_t start);
+ void setOffsetLimit(ptrdiff_t limit);
+ ptrdiff_t getOffsetStart() const;
+ ptrdiff_t getOffsetLimit() const;
+
+private:
+ void initBasic(ValueType type, bool allocated = false);
+
+ Value &resolveReference(const char *key);
+ Value &resolveReference(const char *key, const char *end);
+
+ struct CommentInfo
+ {
+ CommentInfo();
+ ~CommentInfo();
+
+ void setComment(const char *text, size_t len);
+
+ char *comment_;
+ };
+
+ // struct MemberNamesTransform
+ //{
+ // typedef const char *result_type;
+ // const char *operator()( const CZString &name ) const
+ // {
+ // return name.c_str();
+ // }
+ //};
+
+ union ValueHolder {
+ LargestInt int_;
+ LargestUInt uint_;
+ double real_;
+ bool bool_;
+ char *string_; // actually ptr to unsigned, followed by str, unless !allocated_
+ ObjectValues *map_;
+ } value_;
+ ValueType type_ : 8;
+ unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
+ // If not allocated_, string_ must be null-terminated.
+ CommentInfo *comments_;
+
+ // [start, limit) byte offsets in the source JSON text from which this Value
+ // was extracted.
+ ptrdiff_t start_;
+ ptrdiff_t limit_;
+};
+
+/** \brief Experimental and untested: represents an element of the "path" to
+ * access a node.
+ */
+class JSON_API PathArgument
+{
+public:
+ friend class Path;
+
+ PathArgument();
+ PathArgument(ArrayIndex index);
+ PathArgument(const char *key);
+ PathArgument(const JSONCPP_STRING &key);
+
+private:
+ enum Kind
+ {
+ kindNone = 0,
+ kindIndex,
+ kindKey
+ };
+ JSONCPP_STRING key_;
+ ArrayIndex index_;
+ Kind kind_;
+};
+
+/** \brief Experimental and untested: represents a "path" to access a node.
+ *
+ * Syntax:
+ * - "." => root node
+ * - ".[n]" => elements at index 'n' of root node (an array value)
+ * - ".name" => member named 'name' of root node (an object value)
+ * - ".name1.name2.name3"
+ * - ".[0][1][2].name1[3]"
+ * - ".%" => member name is provided as parameter
+ * - ".[%]" => index is provied as parameter
+ */
+class JSON_API Path
+{
+public:
+ Path(const JSONCPP_STRING &path, const PathArgument &a1 = PathArgument(),
+ const PathArgument &a2 = PathArgument(), const PathArgument &a3 = PathArgument(),
+ const PathArgument &a4 = PathArgument(), const PathArgument &a5 = PathArgument());
+
+ const Value &resolve(const Value &root) const;
+ Value resolve(const Value &root, const Value &defaultValue) const;
+ /// Creates the "path" to access the specified node and returns a reference on
+ /// the node.
+ Value &make(Value &root) const;
+
+private:
+ typedef std::vector<const PathArgument *> InArgs;
+ typedef std::vector<PathArgument> Args;
+
+ void makePath(const JSONCPP_STRING &path, const InArgs &in);
+ void addPathInArg(const JSONCPP_STRING &path, const InArgs &in, InArgs::const_iterator &itInArg,
+ PathArgument::Kind kind);
+ void invalidPath(const JSONCPP_STRING &path, int location);
+
+ Args args_;
+};
+
+/** \brief base class for Value iterators.
+ *
+ */
+class JSON_API ValueIteratorBase
+{
+public:
+ typedef std::bidirectional_iterator_tag iterator_category;
+ typedef unsigned int size_t;
+ typedef int difference_type;
+ typedef ValueIteratorBase SelfType;
+
+ bool operator==(const SelfType &other) const { return isEqual(other); }
+
+ bool operator!=(const SelfType &other) const { return !isEqual(other); }
+
+ difference_type operator-(const SelfType &other) const { return other.computeDistance(*this); }
+
+ /// Return either the index or the member name of the referenced value as a
+ /// Value.
+ Value key() const;
+
+ /// Return the index of the referenced Value, or -1 if it is not an arrayValue.
+ UInt index() const;
+
+ /// Return the member name of the referenced Value, or "" if it is not an
+ /// objectValue.
+ /// \note Avoid `c_str()` on result, as embedded zeroes are possible.
+ JSONCPP_STRING name() const;
+
+ /// Return the member name of the referenced Value. "" if it is not an
+ /// objectValue.
+ /// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls.
+ JSONCPP_DEPRECATED("Use `key = name();` instead.")
+ char const *memberName() const;
+ /// Return the member name of the referenced Value, or NULL if it is not an
+ /// objectValue.
+ /// \note Better version than memberName(). Allows embedded nulls.
+ char const *memberName(char const **end) const;
+
+protected:
+ Value &deref() const;
+
+ void increment();
+
+ void decrement();
+
+ difference_type computeDistance(const SelfType &other) const;
+
+ bool isEqual(const SelfType &other) const;
+
+ void copy(const SelfType &other);
+
+private:
+ Value::ObjectValues::iterator current_;
+ // Indicates that iterator is for a null value.
+ bool isNull_;
+
+public:
+ // For some reason, BORLAND needs these at the end, rather
+ // than earlier. No idea why.
+ ValueIteratorBase();
+ explicit ValueIteratorBase(const Value::ObjectValues::iterator &current);
+};
+
+/** \brief const iterator for object and array value.
+ *
+ */
+class JSON_API ValueConstIterator : public ValueIteratorBase
+{
+ friend class Value;
+
+public:
+ typedef const Value value_type;
+ // typedef unsigned int size_t;
+ // typedef int difference_type;
+ typedef const Value &reference;
+ typedef const Value *pointer;
+ typedef ValueConstIterator SelfType;
+
+ ValueConstIterator();
+ ValueConstIterator(ValueIterator const &other);
+
+private:
+ /*! \internal Use by Value to create an iterator.
+ */
+ explicit ValueConstIterator(const Value::ObjectValues::iterator &current);
+
+public:
+ SelfType &operator=(const ValueIteratorBase &other);
+
+ SelfType operator++(int)
+ {
+ SelfType temp(*this);
+ ++*this;
+ return temp;
+ }
+
+ SelfType operator--(int)
+ {
+ SelfType temp(*this);
+ --*this;
+ return temp;
+ }
+
+ SelfType &operator--()
+ {
+ decrement();
+ return *this;
+ }
+
+ SelfType &operator++()
+ {
+ increment();
+ return *this;
+ }
+
+ reference operator*() const { return deref(); }
+
+ pointer operator->() const { return &deref(); }
+};
+
+/** \brief Iterator for object and array value.
+ */
+class JSON_API ValueIterator : public ValueIteratorBase
+{
+ friend class Value;
+
+public:
+ typedef Value value_type;
+ typedef unsigned int size_t;
+ typedef int difference_type;
+ typedef Value &reference;
+ typedef Value *pointer;
+ typedef ValueIterator SelfType;
+
+ ValueIterator();
+ explicit ValueIterator(const ValueConstIterator &other);
+ ValueIterator(const ValueIterator &other);
+
+private:
+ /*! \internal Use by Value to create an iterator.
+ */
+ explicit ValueIterator(const Value::ObjectValues::iterator &current);
+
+public:
+ SelfType &operator=(const SelfType &other);
+
+ SelfType operator++(int)
+ {
+ SelfType temp(*this);
+ ++*this;
+ return temp;
+ }
+
+ SelfType operator--(int)
+ {
+ SelfType temp(*this);
+ --*this;
+ return temp;
+ }
+
+ SelfType &operator--()
+ {
+ decrement();
+ return *this;
+ }
+
+ SelfType &operator++()
+ {
+ increment();
+ return *this;
+ }
+
+ reference operator*() const { return deref(); }
+
+ pointer operator->() const { return &deref(); }
+};
+
+} // namespace Json
+
+namespace std
+{
+/// Specialize std::swap() for Json::Value.
+template <> inline void swap(Json::Value &a, Json::Value &b) { a.swap(b); }
+}
+
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(pop)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+#endif // CPPTL_JSON_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/value.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/reader.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_READER_H_INCLUDED
+#define CPPTL_JSON_READER_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "features.h"
+#include "value.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <deque>
+#include <iosfwd>
+#include <stack>
+#include <string>
+#include <istream>
+
+// Disable warning C4251: <data member>: <type> needs to have dll-interface to
+// be used by...
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(push)
+#pragma warning(disable : 4251)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+namespace Json
+{
+
+/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
+ *Value.
+ *
+ * \deprecated Use CharReader and CharReaderBuilder.
+ */
+class JSON_API Reader
+{
+public:
+ typedef char Char;
+ typedef const Char *Location;
+
+ /** \brief An error tagged with where in the JSON text it was encountered.
+ *
+ * The offsets give the [start, limit) range of bytes within the text. Note
+ * that this is bytes, not codepoints.
+ *
+ */
+ struct StructuredError
+ {
+ ptrdiff_t offset_start;
+ ptrdiff_t offset_limit;
+ JSONCPP_STRING message;
+ };
+
+ /** \brief Constructs a Reader allowing all features
+ * for parsing.
+ */
+ Reader();
+
+ /** \brief Constructs a Reader allowing the specified feature set
+ * for parsing.
+ */
+ Reader(const Features &features);
+
+ /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
+ * document.
+ * \param document UTF-8 encoded string containing the document to read.
+ * \param root [out] Contains the root value of the document if it was
+ * successfully parsed.
+ * \param collectComments \c true to collect comment and allow writing them
+ * back during
+ * serialization, \c false to discard comments.
+ * This parameter is ignored if
+ * Features::allowComments_
+ * is \c false.
+ * \return \c true if the document was successfully parsed, \c false if an
+ * error occurred.
+ */
+ bool parse(const std::string &document, Value &root, bool collectComments = true);
+
+ /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
+ document.
+ * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
+ document to read.
+ * \param endDoc Pointer on the end of the UTF-8 encoded string of the
+ document to read.
+ * Must be >= beginDoc.
+ * \param root [out] Contains the root value of the document if it was
+ * successfully parsed.
+ * \param collectComments \c true to collect comment and allow writing them
+ back during
+ * serialization, \c false to discard comments.
+ * This parameter is ignored if
+ Features::allowComments_
+ * is \c false.
+ * \return \c true if the document was successfully parsed, \c false if an
+ error occurred.
+ */
+ bool parse(const char *beginDoc, const char *endDoc, Value &root, bool collectComments = true);
+
+ /// \brief Parse from input stream.
+ /// \see Json::operator>>(std::istream&, Json::Value&).
+ bool parse(JSONCPP_ISTREAM &is, Value &root, bool collectComments = true);
+
+ /** \brief Returns a user friendly string that list errors in the parsed
+ * document.
+ * \return Formatted error message with the list of errors with their location
+ * in
+ * the parsed document. An empty string is returned if no error
+ * occurred
+ * during parsing.
+ * \deprecated Use getFormattedErrorMessages() instead (typo fix).
+ */
+ JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
+ JSONCPP_STRING getFormatedErrorMessages() const;
+
+ /** \brief Returns a user friendly string that list errors in the parsed
+ * document.
+ * \return Formatted error message with the list of errors with their location
+ * in
+ * the parsed document. An empty string is returned if no error
+ * occurred
+ * during parsing.
+ */
+ JSONCPP_STRING getFormattedErrorMessages() const;
+
+ /** \brief Returns a vector of structured erros encounted while parsing.
+ * \return A (possibly empty) vector of StructuredError objects. Currently
+ * only one error can be returned, but the caller should tolerate
+ * multiple
+ * errors. This can occur if the parser recovers from a non-fatal
+ * parse error and then encounters additional errors.
+ */
+ std::vector<StructuredError> getStructuredErrors() const;
+
+ /** \brief Add a semantic error message.
+ * \param value JSON Value location associated with the error
+ * \param message The error message.
+ * \return \c true if the error was successfully added, \c false if the
+ * Value offset exceeds the document size.
+ */
+ bool pushError(const Value &value, const JSONCPP_STRING &message);
+
+ /** \brief Add a semantic error message with extra context.
+ * \param value JSON Value location associated with the error
+ * \param message The error message.
+ * \param extra Additional JSON Value location to contextualize the error
+ * \return \c true if the error was successfully added, \c false if either
+ * Value offset exceeds the document size.
+ */
+ bool pushError(const Value &value, const JSONCPP_STRING &message, const Value &extra);
+
+ /** \brief Return whether there are any errors.
+ * \return \c true if there are no errors to report \c false if
+ * errors have occurred.
+ */
+ bool good() const;
+
+private:
+ enum TokenType
+ {
+ tokenEndOfStream = 0,
+ tokenObjectBegin,
+ tokenObjectEnd,
+ tokenArrayBegin,
+ tokenArrayEnd,
+ tokenString,
+ tokenNumber,
+ tokenTrue,
+ tokenFalse,
+ tokenNull,
+ tokenArraySeparator,
+ tokenMemberSeparator,
+ tokenComment,
+ tokenError
+ };
+
+ class Token
+ {
+ public:
+ TokenType type_;
+ Location start_;
+ Location end_;
+ };
+
+ class ErrorInfo
+ {
+ public:
+ Token token_;
+ JSONCPP_STRING message_;
+ Location extra_;
+ };
+
+ typedef std::deque<ErrorInfo> Errors;
+
+ bool readToken(Token &token);
+ void skipSpaces();
+ bool match(Location pattern, int patternLength);
+ bool readComment();
+ bool readCStyleComment();
+ bool readCppStyleComment();
+ bool readString();
+ void readNumber();
+ bool readValue();
+ bool readObject(Token &token);
+ bool readArray(Token &token);
+ bool decodeNumber(Token &token);
+ bool decodeNumber(Token &token, Value &decoded);
+ bool decodeString(Token &token);
+ bool decodeString(Token &token, JSONCPP_STRING &decoded);
+ bool decodeDouble(Token &token);
+ bool decodeDouble(Token &token, Value &decoded);
+ bool decodeUnicodeCodePoint(Token &token, Location &current, Location end, unsigned int &unicode);
+ bool decodeUnicodeEscapeSequence(Token &token, Location &current, Location end,
+ unsigned int &unicode);
+ bool addError(const JSONCPP_STRING &message, Token &token, Location extra = 0);
+ bool recoverFromError(TokenType skipUntilToken);
+ bool addErrorAndRecover(const JSONCPP_STRING &message, Token &token, TokenType skipUntilToken);
+ void skipUntilSpace();
+ Value &currentValue();
+ Char getNextChar();
+ void getLocationLineAndColumn(Location location, int &line, int &column) const;
+ JSONCPP_STRING getLocationLineAndColumn(Location location) const;
+ void addComment(Location begin, Location end, CommentPlacement placement);
+ void skipCommentTokens(Token &token);
+
+ typedef std::stack<Value *> Nodes;
+ Nodes nodes_;
+ Errors errors_;
+ JSONCPP_STRING document_;
+ Location begin_;
+ Location end_;
+ Location current_;
+ Location lastValueEnd_;
+ Value *lastValue_;
+ JSONCPP_STRING commentsBefore_;
+ Features features_;
+ bool collectComments_;
+}; // Reader
+
+/** Interface for reading JSON from a char array.
+ */
+class JSON_API CharReader
+{
+public:
+ virtual ~CharReader() {}
+ /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
+ document.
+ * The document must be a UTF-8 encoded string containing the document to read.
+ *
+ * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
+ document to read.
+ * \param endDoc Pointer on the end of the UTF-8 encoded string of the
+ document to read.
+ * Must be >= beginDoc.
+ * \param root [out] Contains the root value of the document if it was
+ * successfully parsed.
+ * \param errs [out] Formatted error messages (if not NULL)
+ * a user friendly string that lists errors in the parsed
+ * document.
+ * \return \c true if the document was successfully parsed, \c false if an
+ error occurred.
+ */
+ virtual bool parse(char const *beginDoc, char const *endDoc, Value *root,
+ JSONCPP_STRING *errs) = 0;
+
+ class JSON_API Factory
+ {
+ public:
+ virtual ~Factory() {}
+ /** \brief Allocate a CharReader via operator new().
+ * \throw std::exception if something goes wrong (e.g. invalid settings)
+ */
+ virtual CharReader *newCharReader() const = 0;
+ }; // Factory
+}; // CharReader
+
+/** \brief Build a CharReader implementation.
+
+Usage:
+\code
+ using namespace Json;
+ CharReaderBuilder builder;
+ builder["collectComments"] = false;
+ Value value;
+ JSONCPP_STRING errs;
+ bool ok = parseFromStream(builder, std::cin, &value, &errs);
+\endcode
+*/
+class JSON_API CharReaderBuilder : public CharReader::Factory
+{
+public:
+ // Note: We use a Json::Value so that we can add data-members to this class
+ // without a major version bump.
+ /** Configuration of this builder.
+ These are case-sensitive.
+ Available settings (case-sensitive):
+ - `"collectComments": false or true`
+ - true to collect comment and allow writing them
+ back during serialization, false to discard comments.
+ This parameter is ignored if allowComments is false.
+ - `"allowComments": false or true`
+ - true if comments are allowed.
+ - `"strictRoot": false or true`
+ - true if root must be either an array or an object value
+ - `"allowDroppedNullPlaceholders": false or true`
+ - true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
+ - `"allowNumericKeys": false or true`
+ - true if numeric object keys are allowed.
+ - `"allowSingleQuotes": false or true`
+ - true if '' are allowed for strings (both keys and values)
+ - `"stackLimit": integer`
+ - Exceeding stackLimit (recursive depth of `readValue()`) will
+ cause an exception.
+ - This is a security issue (seg-faults caused by deeply nested JSON),
+ so the default is low.
+ - `"failIfExtra": false or true`
+ - If true, `parse()` returns false when extra non-whitespace trails
+ the JSON value in the input string.
+ - `"rejectDupKeys": false or true`
+ - If true, `parse()` returns false when a key is duplicated within an object.
+ - `"allowSpecialFloats": false or true`
+ - If true, special float values (NaNs and infinities) are allowed
+ and their values are lossfree restorable.
+
+ You can examine 'settings_` yourself
+ to see the defaults. You can also write and read them just like any
+ JSON Value.
+ \sa setDefaults()
+ */
+ Json::Value settings_;
+
+ CharReaderBuilder();
+ ~CharReaderBuilder() JSONCPP_OVERRIDE;
+
+ CharReader *newCharReader() const JSONCPP_OVERRIDE;
+
+ /** \return true if 'settings' are legal and consistent;
+ * otherwise, indicate bad settings via 'invalid'.
+ */
+ bool validate(Json::Value *invalid) const;
+
+ /** A simple way to update a specific setting.
+ */
+ Value &operator[](JSONCPP_STRING key);
+
+ /** Called by ctor, but you can use this to reset settings_.
+ * \pre 'settings' != NULL (but Json::null is fine)
+ * \remark Defaults:
+ * \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
+ */
+ static void setDefaults(Json::Value *settings);
+ /** Same as old Features::strictMode().
+ * \pre 'settings' != NULL (but Json::null is fine)
+ * \remark Defaults:
+ * \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
+ */
+ static void strictMode(Json::Value *settings);
+};
+
+/** Consume entire stream and use its begin/end.
+ * Someday we might have a real StreamReader, but for now this
+ * is convenient.
+ */
+bool JSON_API parseFromStream(CharReader::Factory const &, JSONCPP_ISTREAM &, Value *root,
+ std::string *errs);
+
+/** \brief Read from 'sin' into 'root'.
+
+ Always keep comments from the input JSON.
+
+ This can be used to read a file into a particular sub-object.
+ For example:
+ \code
+ Json::Value root;
+ cin >> root["dir"]["file"];
+ cout << root;
+ \endcode
+ Result:
+ \verbatim
+ {
+ "dir": {
+ "file": {
+ // The input stream JSON would be nested here.
+ }
+ }
+ }
+ \endverbatim
+ \throw std::exception on parse error.
+ \see Json::operator<<()
+*/
+JSON_API JSONCPP_ISTREAM &operator>>(JSONCPP_ISTREAM &, Value &);
+
+} // namespace Json
+
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(pop)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+#endif // CPPTL_JSON_READER_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/reader.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/writer.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_WRITER_H_INCLUDED
+#define JSON_WRITER_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "value.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <vector>
+#include <string>
+#include <ostream>
+
+// Disable warning C4251: <data member>: <type> needs to have dll-interface to
+// be used by...
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(push)
+#pragma warning(disable : 4251)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+namespace Json
+{
+
+class Value;
+
+/**
+
+Usage:
+\code
+ using namespace Json;
+ void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
+ std::unique_ptr<StreamWriter> const writer(
+ factory.newStreamWriter());
+ writer->write(value, &std::cout);
+ std::cout << std::endl; // add lf and flush
+ }
+\endcode
+*/
+class JSON_API StreamWriter
+{
+protected:
+ JSONCPP_OSTREAM *sout_; // not owned; will not delete
+public:
+ StreamWriter();
+ virtual ~StreamWriter();
+ /** Write Value into document as configured in sub-class.
+ Do not take ownership of sout, but maintain a reference during function.
+ \pre sout != NULL
+ \return zero on success (For now, we always return zero, so check the stream instead.)
+ \throw std::exception possibly, depending on configuration
+ */
+ virtual int write(Value const &root, JSONCPP_OSTREAM *sout) = 0;
+
+ /** \brief A simple abstract factory.
+ */
+ class JSON_API Factory
+ {
+ public:
+ virtual ~Factory();
+ /** \brief Allocate a CharReader via operator new().
+ * \throw std::exception if something goes wrong (e.g. invalid settings)
+ */
+ virtual StreamWriter *newStreamWriter() const = 0;
+ }; // Factory
+}; // StreamWriter
+
+/** \brief Write into stringstream, then return string, for convenience.
+ * A StreamWriter will be created from the factory, used, and then deleted.
+ */
+JSONCPP_STRING JSON_API writeString(StreamWriter::Factory const &factory, Value const &root);
+
+/** \brief Build a StreamWriter implementation.
+
+Usage:
+\code
+ using namespace Json;
+ Value value = ...;
+ StreamWriterBuilder builder;
+ builder["commentStyle"] = "None";
+ builder["indentation"] = " "; // or whatever you like
+ std::unique_ptr<Json::StreamWriter> writer(
+ builder.newStreamWriter());
+ writer->write(value, &std::cout);
+ std::cout << std::endl; // add lf and flush
+\endcode
+*/
+class JSON_API StreamWriterBuilder : public StreamWriter::Factory
+{
+public:
+ // Note: We use a Json::Value so that we can add data-members to this class
+ // without a major version bump.
+ /** Configuration of this builder.
+ Available settings (case-sensitive):
+ - "commentStyle": "None" or "All"
+ - "indentation": "<anything>"
+ - "enableYAMLCompatibility": false or true
+ - slightly change the whitespace around colons
+ - "dropNullPlaceholders": false or true
+ - Drop the "null" string from the writer's output for nullValues.
+ Strictly speaking, this is not valid JSON. But when the output is being
+ fed to a browser's Javascript, it makes for smaller output and the
+ browser can handle the output just fine.
+ - "useSpecialFloats": false or true
+ - If true, outputs non-finite floating point values in the following way:
+ NaN values as "NaN", positive infinity as "Infinity", and negative infinity
+ as "-Infinity".
+
+ You can examine 'settings_` yourself
+ to see the defaults. You can also write and read them just like any
+ JSON Value.
+ \sa setDefaults()
+ */
+ Json::Value settings_;
+
+ StreamWriterBuilder();
+ ~StreamWriterBuilder() JSONCPP_OVERRIDE;
+
+ /**
+ * \throw std::exception if something goes wrong (e.g. invalid settings)
+ */
+ StreamWriter *newStreamWriter() const JSONCPP_OVERRIDE;
+
+ /** \return true if 'settings' are legal and consistent;
+ * otherwise, indicate bad settings via 'invalid'.
+ */
+ bool validate(Json::Value *invalid) const;
+ /** A simple way to update a specific setting.
+ */
+ Value &operator[](JSONCPP_STRING key);
+
+ /** Called by ctor, but you can use this to reset settings_.
+ * \pre 'settings' != NULL (but Json::null is fine)
+ * \remark Defaults:
+ * \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
+ */
+ static void setDefaults(Json::Value *settings);
+};
+
+/** \brief Abstract class for writers.
+ * \deprecated Use StreamWriter. (And really, this is an implementation detail.)
+ */
+class JSON_API Writer
+{
+public:
+ virtual ~Writer();
+
+ virtual JSONCPP_STRING write(const Value &root) = 0;
+};
+
+/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format
+ *without formatting (not human friendly).
+ *
+ * The JSON document is written in a single line. It is not intended for 'human'
+ *consumption,
+ * but may be usefull to support feature such as RPC where bandwith is limited.
+ * \sa Reader, Value
+ * \deprecated Use StreamWriterBuilder.
+ */
+class JSON_API FastWriter : public Writer
+{
+
+public:
+ FastWriter();
+ ~FastWriter() JSONCPP_OVERRIDE {}
+
+ void enableYAMLCompatibility();
+
+ /** \brief Drop the "null" string from the writer's output for nullValues.
+ * Strictly speaking, this is not valid JSON. But when the output is being
+ * fed to a browser's Javascript, it makes for smaller output and the
+ * browser can handle the output just fine.
+ */
+ void dropNullPlaceholders();
+
+ void omitEndingLineFeed();
+
+public: // overridden from Writer
+ JSONCPP_STRING write(const Value &root) JSONCPP_OVERRIDE;
+
+private:
+ void writeValue(const Value &value);
+
+ JSONCPP_STRING document_;
+ bool yamlCompatiblityEnabled_;
+ bool dropNullPlaceholders_;
+ bool omitEndingLineFeed_;
+};
+
+/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
+ *human friendly way.
+ *
+ * The rules for line break and indent are as follow:
+ * - Object value:
+ * - if empty then print {} without indent and line break
+ * - if not empty the print '{', line break & indent, print one value per
+ *line
+ * and then unindent and line break and print '}'.
+ * - Array value:
+ * - if empty then print [] without indent and line break
+ * - if the array contains no object value, empty array or some other value
+ *types,
+ * and all the values fit on one lines, then print the array on a single
+ *line.
+ * - otherwise, it the values do not fit on one line, or the array contains
+ * object or non empty array, then print one value per line.
+ *
+ * If the Value have comments then they are outputed according to their
+ *#CommentPlacement.
+ *
+ * \sa Reader, Value, Value::setComment()
+ * \deprecated Use StreamWriterBuilder.
+ */
+class JSON_API StyledWriter : public Writer
+{
+public:
+ StyledWriter();
+ ~StyledWriter() JSONCPP_OVERRIDE {}
+
+public: // overridden from Writer
+ /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
+ * \param root Value to serialize.
+ * \return String containing the JSON document that represents the root value.
+ */
+ JSONCPP_STRING write(const Value &root) JSONCPP_OVERRIDE;
+
+private:
+ void writeValue(const Value &value);
+ void writeArrayValue(const Value &value);
+ bool isMultineArray(const Value &value);
+ void pushValue(const JSONCPP_STRING &value);
+ void writeIndent();
+ void writeWithIndent(const JSONCPP_STRING &value);
+ void indent();
+ void unindent();
+ void writeCommentBeforeValue(const Value &root);
+ void writeCommentAfterValueOnSameLine(const Value &root);
+ bool hasCommentForValue(const Value &value);
+ static JSONCPP_STRING normalizeEOL(const JSONCPP_STRING &text);
+
+ typedef std::vector<JSONCPP_STRING> ChildValues;
+
+ ChildValues childValues_;
+ JSONCPP_STRING document_;
+ JSONCPP_STRING indentString_;
+ unsigned int rightMargin_;
+ unsigned int indentSize_;
+ bool addChildValues_;
+};
+
+/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
+ human friendly way,
+ to a stream rather than to a string.
+ *
+ * The rules for line break and indent are as follow:
+ * - Object value:
+ * - if empty then print {} without indent and line break
+ * - if not empty the print '{', line break & indent, print one value per
+ line
+ * and then unindent and line break and print '}'.
+ * - Array value:
+ * - if empty then print [] without indent and line break
+ * - if the array contains no object value, empty array or some other value
+ types,
+ * and all the values fit on one lines, then print the array on a single
+ line.
+ * - otherwise, it the values do not fit on one line, or the array contains
+ * object or non empty array, then print one value per line.
+ *
+ * If the Value have comments then they are outputed according to their
+ #CommentPlacement.
+ *
+ * \param indentation Each level will be indented by this amount extra.
+ * \sa Reader, Value, Value::setComment()
+ * \deprecated Use StreamWriterBuilder.
+ */
+class JSON_API StyledStreamWriter
+{
+public:
+ StyledStreamWriter(JSONCPP_STRING indentation = "\t");
+ ~StyledStreamWriter() {}
+
+public:
+ /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
+ * \param out Stream to write to. (Can be ostringstream, e.g.)
+ * \param root Value to serialize.
+ * \note There is no point in deriving from Writer, since write() should not
+ * return a value.
+ */
+ void write(JSONCPP_OSTREAM &out, const Value &root);
+
+private:
+ void writeValue(const Value &value);
+ void writeArrayValue(const Value &value);
+ bool isMultineArray(const Value &value);
+ void pushValue(const JSONCPP_STRING &value);
+ void writeIndent();
+ void writeWithIndent(const JSONCPP_STRING &value);
+ void indent();
+ void unindent();
+ void writeCommentBeforeValue(const Value &root);
+ void writeCommentAfterValueOnSameLine(const Value &root);
+ bool hasCommentForValue(const Value &value);
+ static JSONCPP_STRING normalizeEOL(const JSONCPP_STRING &text);
+
+ typedef std::vector<JSONCPP_STRING> ChildValues;
+
+ ChildValues childValues_;
+ JSONCPP_OSTREAM *document_;
+ JSONCPP_STRING indentString_;
+ unsigned int rightMargin_;
+ JSONCPP_STRING indentation_;
+ bool addChildValues_ : 1;
+ bool indented_ : 1;
+};
+
+#if defined(JSON_HAS_INT64)
+JSONCPP_STRING JSON_API valueToString(Int value);
+JSONCPP_STRING JSON_API valueToString(UInt value);
+#endif // if defined(JSON_HAS_INT64)
+JSONCPP_STRING JSON_API valueToString(LargestInt value);
+JSONCPP_STRING JSON_API valueToString(LargestUInt value);
+JSONCPP_STRING JSON_API valueToString(double value);
+JSONCPP_STRING JSON_API valueToString(bool value);
+JSONCPP_STRING JSON_API valueToQuotedString(const char *value);
+
+/// \brief Output using the StyledStreamWriter.
+/// \see Json::operator>>()
+JSON_API JSONCPP_OSTREAM &operator<<(JSONCPP_OSTREAM &, const Value &root);
+
+} // namespace Json
+
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(pop)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+#endif // JSON_WRITER_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/writer.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/assertions.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
+#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
+
+#include <stdlib.h>
+#include <sstream>
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "config.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+/** It should not be possible for a maliciously designed file to
+ * cause an abort() or seg-fault, so these macros are used only
+ * for pre-condition violations and internal logic errors.
+ */
+#if JSON_USE_EXCEPTION
+
+// @todo <= add detail about condition in exception
+#define JSON_ASSERT(condition) \
+ { \
+ if (!(condition)) \
+ { \
+ Json::throwLogicError("assert json failed"); \
+ } \
+ }
+
+#define JSON_FAIL_MESSAGE(message) \
+ { \
+ JSONCPP_OSTRINGSTREAM oss; \
+ oss << message; \
+ Json::throwLogicError(oss.str()); \
+ abort(); \
+ }
+
+#else // JSON_USE_EXCEPTION
+
+#define JSON_ASSERT(condition) assert(condition)
+
+// The call to assert() will show the failure message in debug builds. In
+// release builds we abort, for a core-dump or debugger.
+#define JSON_FAIL_MESSAGE(message) \
+ { \
+ JSONCPP_OSTRINGSTREAM oss; \
+ oss << message; \
+ assert(false && oss.str().c_str()); \
+ abort(); \
+ }
+
+#endif
+
+#define JSON_ASSERT_MESSAGE(condition, message) \
+ if (!(condition)) \
+ { \
+ JSON_FAIL_MESSAGE(message); \
+ }
+
+#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/assertions.h
+// //////////////////////////////////////////////////////////////////////
+
+#endif // ifndef JSON_AMALGATED_H_INCLUDED
diff --git a/runtimes/libs/jsoncpp/jsoncpp.cpp b/runtimes/libs/jsoncpp/jsoncpp.cpp
new file mode 100644
index 000000000..5b3cd691d
--- /dev/null
+++ b/runtimes/libs/jsoncpp/jsoncpp.cpp
@@ -0,0 +1,5651 @@
+/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).
+/// It is intended to be used with #include "json/json.h"
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+/*
+The JsonCpp library's source code, including accompanying documentation,
+tests and demonstration applications, are licensed under the following
+conditions...
+
+The author (Baptiste Lepilleur) explicitly disclaims copyright in all
+jurisdictions which recognize such a disclaimer. In such jurisdictions,
+this software is released into the Public Domain.
+
+In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
+2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
+released under the terms of the MIT License (see below).
+
+In jurisdictions which recognize Public Domain property, the user of this
+software may choose to accept it either as 1) Public Domain, 2) under the
+conditions of the MIT License (see below), or 3) under the terms of dual
+Public Domain/MIT License conditions described here, as they choose.
+
+The MIT License is about as close to Public Domain as a license can get, and is
+described in clear, concise terms at:
+
+ http://en.wikipedia.org/wiki/MIT_License
+
+The full text of the MIT License follows:
+
+========================================================================
+Copyright (c) 2007-2010 Baptiste Lepilleur
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+========================================================================
+(END LICENSE TEXT)
+
+The MIT license is compatible with both the GPL and commercial
+software, affording one all of the rights of Public Domain with the
+minor nuisance of being required to keep the above copyright notice
+and license text in the source code. Note also that by accepting the
+Public Domain "license" you can re-license your copy using whatever
+license you like.
+
+*/
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+#include "json/json.h"
+
+#ifndef JSON_IS_AMALGAMATION
+#error "Compile with -I PATH_TO_JSON_DIRECTORY"
+#endif
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_tool.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED
+#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED
+
+#ifndef NO_LOCALE_SUPPORT
+#include <clocale>
+#endif
+
+/* This header provides common string manipulation support, such as UTF-8,
+ * portable conversion from/to string...
+ *
+ * It is an internal header that must not be exposed.
+ */
+
+namespace Json
+{
+static char getDecimalPoint()
+{
+#ifdef NO_LOCALE_SUPPORT
+ return '\0';
+#else
+ struct lconv *lc = localeconv();
+ return lc ? *(lc->decimal_point) : '\0';
+#endif
+}
+
+/// Converts a unicode code-point to UTF-8.
+static inline JSONCPP_STRING codePointToUTF8(unsigned int cp)
+{
+ JSONCPP_STRING result;
+
+ // based on description from http://en.wikipedia.org/wiki/UTF-8
+
+ if (cp <= 0x7f)
+ {
+ result.resize(1);
+ result[0] = static_cast<char>(cp);
+ }
+ else if (cp <= 0x7FF)
+ {
+ result.resize(2);
+ result[1] = static_cast<char>(0x80 | (0x3f & cp));
+ result[0] = static_cast<char>(0xC0 | (0x1f & (cp >> 6)));
+ }
+ else if (cp <= 0xFFFF)
+ {
+ result.resize(3);
+ result[2] = static_cast<char>(0x80 | (0x3f & cp));
+ result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
+ result[0] = static_cast<char>(0xE0 | (0xf & (cp >> 12)));
+ }
+ else if (cp <= 0x10FFFF)
+ {
+ result.resize(4);
+ result[3] = static_cast<char>(0x80 | (0x3f & cp));
+ result[2] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
+ result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 12)));
+ result[0] = static_cast<char>(0xF0 | (0x7 & (cp >> 18)));
+ }
+
+ return result;
+}
+
+/// Returns true if ch is a control character (in range [1,31]).
+static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
+
+enum
+{
+ /// Constant that specify the size of the buffer that must be passed to
+ /// uintToString.
+ uintToStringBufferSize = 3 * sizeof(LargestUInt) + 1
+};
+
+// Defines a char buffer for use with uintToString().
+typedef char UIntToStringBuffer[uintToStringBufferSize];
+
+/** Converts an unsigned integer to string.
+ * @param value Unsigned interger to convert to string
+ * @param current Input/Output string buffer.
+ * Must have at least uintToStringBufferSize chars free.
+ */
+static inline void uintToString(LargestUInt value, char *&current)
+{
+ *--current = 0;
+ do
+ {
+ *--current = static_cast<char>(value % 10U + static_cast<unsigned>('0'));
+ value /= 10;
+ } while (value != 0);
+}
+
+/** Change ',' to '.' everywhere in buffer.
+ *
+ * We had a sophisticated way, but it did not work in WinCE.
+ * @see https://github.com/open-source-parsers/jsoncpp/pull/9
+ */
+static inline void fixNumericLocale(char *begin, char *end)
+{
+ while (begin < end)
+ {
+ if (*begin == ',')
+ {
+ *begin = '.';
+ }
+ ++begin;
+ }
+}
+
+static inline void fixNumericLocaleInput(char *begin, char *end)
+{
+ char decimalPoint = getDecimalPoint();
+ if (decimalPoint != '\0' && decimalPoint != '.')
+ {
+ while (begin < end)
+ {
+ if (*begin == '.')
+ {
+ *begin = decimalPoint;
+ }
+ ++begin;
+ }
+ }
+}
+
+} // namespace Json {
+
+#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_tool.h
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_reader.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2011 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include <json/assertions.h>
+#include <json/reader.h>
+#include <json/value.h>
+#include "json_tool.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <utility>
+#include <cstdio>
+#include <cassert>
+#include <cstring>
+#include <istream>
+#include <sstream>
+#include <memory>
+#include <set>
+#include <limits>
+
+#if defined(_MSC_VER)
+#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
+#define snprintf sprintf_s
+#elif _MSC_VER >= 1900 // VC++ 14.0 and above
+#define snprintf std::snprintf
+#else
+#define snprintf _snprintf
+#endif
+#elif defined(__ANDROID__) || defined(__QNXNTO__)
+#define snprintf snprintf
+#elif __cplusplus >= 201103L
+#if !defined(__MINGW32__) && !defined(__CYGWIN__)
+#define snprintf std::snprintf
+#endif
+#endif
+
+#if defined(__QNXNTO__)
+#define sscanf std::sscanf
+#endif
+
+#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
+// Disable warning about strdup being deprecated.
+#pragma warning(disable : 4996)
+#endif
+
+static int const stackLimit_g = 1000;
+static int stackDepth_g = 0; // see readValue()
+
+namespace Json
+{
+
+#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
+typedef std::unique_ptr<CharReader> CharReaderPtr;
+#else
+typedef std::auto_ptr<CharReader> CharReaderPtr;
+#endif
+
+// Implementation of class Features
+// ////////////////////////////////
+
+Features::Features()
+ : allowComments_(true), strictRoot_(false), allowDroppedNullPlaceholders_(false),
+ allowNumericKeys_(false)
+{
+}
+
+Features Features::all() { return Features(); }
+
+Features Features::strictMode()
+{
+ Features features;
+ features.allowComments_ = false;
+ features.strictRoot_ = true;
+ features.allowDroppedNullPlaceholders_ = false;
+ features.allowNumericKeys_ = false;
+ return features;
+}
+
+// Implementation of class Reader
+// ////////////////////////////////
+
+static bool containsNewLine(Reader::Location begin, Reader::Location end)
+{
+ for (; begin < end; ++begin)
+ if (*begin == '\n' || *begin == '\r')
+ return true;
+ return false;
+}
+
+// Class Reader
+// //////////////////////////////////////////////////////////////////
+
+Reader::Reader()
+ : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), lastValue_(),
+ commentsBefore_(), features_(Features::all()), collectComments_()
+{
+}
+
+Reader::Reader(const Features &features)
+ : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), lastValue_(),
+ commentsBefore_(), features_(features), collectComments_()
+{
+}
+
+bool Reader::parse(const std::string &document, Value &root, bool collectComments)
+{
+ JSONCPP_STRING documentCopy(document.data(), document.data() + document.capacity());
+ std::swap(documentCopy, document_);
+ const char *begin = document_.c_str();
+ const char *end = begin + document_.length();
+ return parse(begin, end, root, collectComments);
+}
+
+bool Reader::parse(std::istream &sin, Value &root, bool collectComments)
+{
+ // std::istream_iterator<char> begin(sin);
+ // std::istream_iterator<char> end;
+ // Those would allow streamed input from a file, if parse() were a
+ // template function.
+
+ // Since JSONCPP_STRING is reference-counted, this at least does not
+ // create an extra copy.
+ JSONCPP_STRING doc;
+ std::getline(sin, doc, (char)EOF);
+ return parse(doc.data(), doc.data() + doc.size(), root, collectComments);
+}
+
+bool Reader::parse(const char *beginDoc, const char *endDoc, Value &root, bool collectComments)
+{
+ if (!features_.allowComments_)
+ {
+ collectComments = false;
+ }
+
+ begin_ = beginDoc;
+ end_ = endDoc;
+ collectComments_ = collectComments;
+ current_ = begin_;
+ lastValueEnd_ = 0;
+ lastValue_ = 0;
+ commentsBefore_ = "";
+ errors_.clear();
+ while (!nodes_.empty())
+ nodes_.pop();
+ nodes_.push(&root);
+
+ stackDepth_g = 0; // Yes, this is bad coding, but options are limited.
+ bool successful = readValue();
+ Token token;
+ skipCommentTokens(token);
+ if (collectComments_ && !commentsBefore_.empty())
+ root.setComment(commentsBefore_, commentAfter);
+ if (features_.strictRoot_)
+ {
+ if (!root.isArray() && !root.isObject())
+ {
+ // Set error location to start of doc, ideally should be first token found
+ // in doc
+ token.type_ = tokenError;
+ token.start_ = beginDoc;
+ token.end_ = endDoc;
+ addError("A valid JSON document must be either an array or an object value.", token);
+ return false;
+ }
+ }
+ return successful;
+}
+
+bool Reader::readValue()
+{
+ // This is a non-reentrant way to support a stackLimit. Terrible!
+ // But this deprecated class has a security problem: Bad input can
+ // cause a seg-fault. This seems like a fair, binary-compatible way
+ // to prevent the problem.
+ if (stackDepth_g >= stackLimit_g)
+ throwRuntimeError("Exceeded stackLimit in readValue().");
+ ++stackDepth_g;
+
+ Token token;
+ skipCommentTokens(token);
+ bool successful = true;
+
+ if (collectComments_ && !commentsBefore_.empty())
+ {
+ currentValue().setComment(commentsBefore_, commentBefore);
+ commentsBefore_ = "";
+ }
+
+ switch (token.type_)
+ {
+ case tokenObjectBegin:
+ successful = readObject(token);
+ currentValue().setOffsetLimit(current_ - begin_);
+ break;
+ case tokenArrayBegin:
+ successful = readArray(token);
+ currentValue().setOffsetLimit(current_ - begin_);
+ break;
+ case tokenNumber:
+ successful = decodeNumber(token);
+ break;
+ case tokenString:
+ successful = decodeString(token);
+ break;
+ case tokenTrue:
+ {
+ Value v(true);
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenFalse:
+ {
+ Value v(false);
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenNull:
+ {
+ Value v;
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenArraySeparator:
+ case tokenObjectEnd:
+ case tokenArrayEnd:
+ if (features_.allowDroppedNullPlaceholders_)
+ {
+ // "Un-read" the current token and mark the current value as a null
+ // token.
+ current_--;
+ Value v;
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(current_ - begin_ - 1);
+ currentValue().setOffsetLimit(current_ - begin_);
+ break;
+ } // Else, fall through...
+ default:
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return addError("Syntax error: value, object or array expected.", token);
+ }
+
+ if (collectComments_)
+ {
+ lastValueEnd_ = current_;
+ lastValue_ = &currentValue();
+ }
+
+ --stackDepth_g;
+ return successful;
+}
+
+void Reader::skipCommentTokens(Token &token)
+{
+ if (features_.allowComments_)
+ {
+ do
+ {
+ readToken(token);
+ } while (token.type_ == tokenComment);
+ }
+ else
+ {
+ readToken(token);
+ }
+}
+
+bool Reader::readToken(Token &token)
+{
+ skipSpaces();
+ token.start_ = current_;
+ Char c = getNextChar();
+ bool ok = true;
+ switch (c)
+ {
+ case '{':
+ token.type_ = tokenObjectBegin;
+ break;
+ case '}':
+ token.type_ = tokenObjectEnd;
+ break;
+ case '[':
+ token.type_ = tokenArrayBegin;
+ break;
+ case ']':
+ token.type_ = tokenArrayEnd;
+ break;
+ case '"':
+ token.type_ = tokenString;
+ ok = readString();
+ break;
+ case '/':
+ token.type_ = tokenComment;
+ ok = readComment();
+ break;
+ case '0':
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ case '-':
+ token.type_ = tokenNumber;
+ readNumber();
+ break;
+ case 't':
+ token.type_ = tokenTrue;
+ ok = match("rue", 3);
+ break;
+ case 'f':
+ token.type_ = tokenFalse;
+ ok = match("alse", 4);
+ break;
+ case 'n':
+ token.type_ = tokenNull;
+ ok = match("ull", 3);
+ break;
+ case ',':
+ token.type_ = tokenArraySeparator;
+ break;
+ case ':':
+ token.type_ = tokenMemberSeparator;
+ break;
+ case 0:
+ token.type_ = tokenEndOfStream;
+ break;
+ default:
+ ok = false;
+ break;
+ }
+ if (!ok)
+ token.type_ = tokenError;
+ token.end_ = current_;
+ return true;
+}
+
+void Reader::skipSpaces()
+{
+ while (current_ != end_)
+ {
+ Char c = *current_;
+ if (c == ' ' || c == '\t' || c == '\r' || c == '\n')
+ ++current_;
+ else
+ break;
+ }
+}
+
+bool Reader::match(Location pattern, int patternLength)
+{
+ if (end_ - current_ < patternLength)
+ return false;
+ int index = patternLength;
+ while (index--)
+ if (current_[index] != pattern[index])
+ return false;
+ current_ += patternLength;
+ return true;
+}
+
+bool Reader::readComment()
+{
+ Location commentBegin = current_ - 1;
+ Char c = getNextChar();
+ bool successful = false;
+ if (c == '*')
+ successful = readCStyleComment();
+ else if (c == '/')
+ successful = readCppStyleComment();
+ if (!successful)
+ return false;
+
+ if (collectComments_)
+ {
+ CommentPlacement placement = commentBefore;
+ if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin))
+ {
+ if (c != '*' || !containsNewLine(commentBegin, current_))
+ placement = commentAfterOnSameLine;
+ }
+
+ addComment(commentBegin, current_, placement);
+ }
+ return true;
+}
+
+static JSONCPP_STRING normalizeEOL(Reader::Location begin, Reader::Location end)
+{
+ JSONCPP_STRING normalized;
+ normalized.reserve(static_cast<size_t>(end - begin));
+ Reader::Location current = begin;
+ while (current != end)
+ {
+ char c = *current++;
+ if (c == '\r')
+ {
+ if (current != end && *current == '\n')
+ // convert dos EOL
+ ++current;
+ // convert Mac EOL
+ normalized += '\n';
+ }
+ else
+ {
+ normalized += c;
+ }
+ }
+ return normalized;
+}
+
+void Reader::addComment(Location begin, Location end, CommentPlacement placement)
+{
+ assert(collectComments_);
+ const JSONCPP_STRING &normalized = normalizeEOL(begin, end);
+ if (placement == commentAfterOnSameLine)
+ {
+ assert(lastValue_ != 0);
+ lastValue_->setComment(normalized, placement);
+ }
+ else
+ {
+ commentsBefore_ += normalized;
+ }
+}
+
+bool Reader::readCStyleComment()
+{
+ while ((current_ + 1) < end_)
+ {
+ Char c = getNextChar();
+ if (c == '*' && *current_ == '/')
+ break;
+ }
+ return getNextChar() == '/';
+}
+
+bool Reader::readCppStyleComment()
+{
+ while (current_ != end_)
+ {
+ Char c = getNextChar();
+ if (c == '\n')
+ break;
+ if (c == '\r')
+ {
+ // Consume DOS EOL. It will be normalized in addComment.
+ if (current_ != end_ && *current_ == '\n')
+ getNextChar();
+ // Break on Moc OS 9 EOL.
+ break;
+ }
+ }
+ return true;
+}
+
+void Reader::readNumber()
+{
+ const char *p = current_;
+ char c = '0'; // stopgap for already consumed character
+ // integral part
+ while (c >= '0' && c <= '9')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ // fractional part
+ if (c == '.')
+ {
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ while (c >= '0' && c <= '9')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ }
+ // exponential part
+ if (c == 'e' || c == 'E')
+ {
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ if (c == '+' || c == '-')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ while (c >= '0' && c <= '9')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ }
+}
+
+bool Reader::readString()
+{
+ Char c = '\0';
+ while (current_ != end_)
+ {
+ c = getNextChar();
+ if (c == '\\')
+ getNextChar();
+ else if (c == '"')
+ break;
+ }
+ return c == '"';
+}
+
+bool Reader::readObject(Token &tokenStart)
+{
+ Token tokenName;
+ JSONCPP_STRING name;
+ Value init(objectValue);
+ currentValue().swapPayload(init);
+ currentValue().setOffsetStart(tokenStart.start_ - begin_);
+ while (readToken(tokenName))
+ {
+ bool initialTokenOk = true;
+ while (tokenName.type_ == tokenComment && initialTokenOk)
+ initialTokenOk = readToken(tokenName);
+ if (!initialTokenOk)
+ break;
+ if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
+ return true;
+ name = "";
+ if (tokenName.type_ == tokenString)
+ {
+ if (!decodeString(tokenName, name))
+ return recoverFromError(tokenObjectEnd);
+ }
+ else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_)
+ {
+ Value numberName;
+ if (!decodeNumber(tokenName, numberName))
+ return recoverFromError(tokenObjectEnd);
+ name = JSONCPP_STRING(numberName.asCString());
+ }
+ else
+ {
+ break;
+ }
+
+ Token colon;
+ if (!readToken(colon) || colon.type_ != tokenMemberSeparator)
+ {
+ return addErrorAndRecover("Missing ':' after object member name", colon, tokenObjectEnd);
+ }
+ Value &value = currentValue()[name];
+ nodes_.push(&value);
+ bool ok = readValue();
+ nodes_.pop();
+ if (!ok) // error already set
+ return recoverFromError(tokenObjectEnd);
+
+ Token comma;
+ if (!readToken(comma) || (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
+ comma.type_ != tokenComment))
+ {
+ return addErrorAndRecover("Missing ',' or '}' in object declaration", comma, tokenObjectEnd);
+ }
+ bool finalizeTokenOk = true;
+ while (comma.type_ == tokenComment && finalizeTokenOk)
+ finalizeTokenOk = readToken(comma);
+ if (comma.type_ == tokenObjectEnd)
+ return true;
+ }
+ return addErrorAndRecover("Missing '}' or object member name", tokenName, tokenObjectEnd);
+}
+
+bool Reader::readArray(Token &tokenStart)
+{
+ Value init(arrayValue);
+ currentValue().swapPayload(init);
+ currentValue().setOffsetStart(tokenStart.start_ - begin_);
+ skipSpaces();
+ if (current_ != end_ && *current_ == ']') // empty array
+ {
+ Token endArray;
+ readToken(endArray);
+ return true;
+ }
+ int index = 0;
+ for (;;)
+ {
+ Value &value = currentValue()[index++];
+ nodes_.push(&value);
+ bool ok = readValue();
+ nodes_.pop();
+ if (!ok) // error already set
+ return recoverFromError(tokenArrayEnd);
+
+ Token token;
+ // Accept Comment after last item in the array.
+ ok = readToken(token);
+ while (token.type_ == tokenComment && ok)
+ {
+ ok = readToken(token);
+ }
+ bool badTokenType = (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd);
+ if (!ok || badTokenType)
+ {
+ return addErrorAndRecover("Missing ',' or ']' in array declaration", token, tokenArrayEnd);
+ }
+ if (token.type_ == tokenArrayEnd)
+ break;
+ }
+ return true;
+}
+
+bool Reader::decodeNumber(Token &token)
+{
+ Value decoded;
+ if (!decodeNumber(token, decoded))
+ return false;
+ currentValue().swapPayload(decoded);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return true;
+}
+
+bool Reader::decodeNumber(Token &token, Value &decoded)
+{
+ // Attempts to parse the number as an integer. If the number is
+ // larger than the maximum supported value of an integer then
+ // we decode the number as a double.
+ Location current = token.start_;
+ bool isNegative = *current == '-';
+ if (isNegative)
+ ++current;
+ // TODO: Help the compiler do the div and mod at compile time or get rid of them.
+ Value::LargestUInt maxIntegerValue =
+ isNegative ? Value::LargestUInt(Value::maxLargestInt) + 1 : Value::maxLargestUInt;
+ Value::LargestUInt threshold = maxIntegerValue / 10;
+ Value::LargestUInt value = 0;
+ while (current < token.end_)
+ {
+ Char c = *current++;
+ if (c < '0' || c > '9')
+ return decodeDouble(token, decoded);
+ Value::UInt digit(static_cast<Value::UInt>(c - '0'));
+ if (value >= threshold)
+ {
+ // We've hit or exceeded the max value divided by 10 (rounded down). If
+ // a) we've only just touched the limit, b) this is the last digit, and
+ // c) it's small enough to fit in that rounding delta, we're okay.
+ // Otherwise treat this number as a double to avoid overflow.
+ if (value > threshold || current != token.end_ || digit > maxIntegerValue % 10)
+ {
+ return decodeDouble(token, decoded);
+ }
+ }
+ value = value * 10 + digit;
+ }
+ if (isNegative && value == maxIntegerValue)
+ decoded = Value::minLargestInt;
+ else if (isNegative)
+ decoded = -Value::LargestInt(value);
+ else if (value <= Value::LargestUInt(Value::maxInt))
+ decoded = Value::LargestInt(value);
+ else
+ decoded = value;
+ return true;
+}
+
+bool Reader::decodeDouble(Token &token)
+{
+ Value decoded;
+ if (!decodeDouble(token, decoded))
+ return false;
+ currentValue().swapPayload(decoded);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return true;
+}
+
+bool Reader::decodeDouble(Token &token, Value &decoded)
+{
+ double value = 0;
+ JSONCPP_STRING buffer(token.start_, token.end_);
+ JSONCPP_ISTRINGSTREAM is(buffer);
+ if (!(is >> value))
+ return addError("'" + JSONCPP_STRING(token.start_, token.end_) + "' is not a number.", token);
+ decoded = value;
+ return true;
+}
+
+bool Reader::decodeString(Token &token)
+{
+ JSONCPP_STRING decoded_string;
+ if (!decodeString(token, decoded_string))
+ return false;
+ Value decoded(decoded_string);
+ currentValue().swapPayload(decoded);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return true;
+}
+
+bool Reader::decodeString(Token &token, JSONCPP_STRING &decoded)
+{
+ decoded.reserve(static_cast<size_t>(token.end_ - token.start_ - 2));
+ Location current = token.start_ + 1; // skip '"'
+ Location end = token.end_ - 1; // do not include '"'
+ while (current != end)
+ {
+ Char c = *current++;
+ if (c == '"')
+ break;
+ else if (c == '\\')
+ {
+ if (current == end)
+ return addError("Empty escape sequence in string", token, current);
+ Char escape = *current++;
+ switch (escape)
+ {
+ case '"':
+ decoded += '"';
+ break;
+ case '/':
+ decoded += '/';
+ break;
+ case '\\':
+ decoded += '\\';
+ break;
+ case 'b':
+ decoded += '\b';
+ break;
+ case 'f':
+ decoded += '\f';
+ break;
+ case 'n':
+ decoded += '\n';
+ break;
+ case 'r':
+ decoded += '\r';
+ break;
+ case 't':
+ decoded += '\t';
+ break;
+ case 'u':
+ {
+ unsigned int unicode;
+ if (!decodeUnicodeCodePoint(token, current, end, unicode))
+ return false;
+ decoded += codePointToUTF8(unicode);
+ }
+ break;
+ default:
+ return addError("Bad escape sequence in string", token, current);
+ }
+ }
+ else
+ {
+ decoded += c;
+ }
+ }
+ return true;
+}
+
+bool Reader::decodeUnicodeCodePoint(Token &token, Location &current, Location end,
+ unsigned int &unicode)
+{
+
+ if (!decodeUnicodeEscapeSequence(token, current, end, unicode))
+ return false;
+ if (unicode >= 0xD800 && unicode <= 0xDBFF)
+ {
+ // surrogate pairs
+ if (end - current < 6)
+ return addError("additional six characters expected to parse unicode surrogate pair.", token,
+ current);
+ unsigned int surrogatePair;
+ if (*(current++) == '\\' && *(current++) == 'u')
+ {
+ if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair))
+ {
+ unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
+ }
+ else
+ return false;
+ }
+ else
+ return addError("expecting another \\u token to begin the second half of "
+ "a unicode surrogate pair",
+ token, current);
+ }
+ return true;
+}
+
+bool Reader::decodeUnicodeEscapeSequence(Token &token, Location &current, Location end,
+ unsigned int &ret_unicode)
+{
+ if (end - current < 4)
+ return addError("Bad unicode escape sequence in string: four digits expected.", token, current);
+ int unicode = 0;
+ for (int index = 0; index < 4; ++index)
+ {
+ Char c = *current++;
+ unicode *= 16;
+ if (c >= '0' && c <= '9')
+ unicode += c - '0';
+ else if (c >= 'a' && c <= 'f')
+ unicode += c - 'a' + 10;
+ else if (c >= 'A' && c <= 'F')
+ unicode += c - 'A' + 10;
+ else
+ return addError("Bad unicode escape sequence in string: hexadecimal digit expected.", token,
+ current);
+ }
+ ret_unicode = static_cast<unsigned int>(unicode);
+ return true;
+}
+
+bool Reader::addError(const JSONCPP_STRING &message, Token &token, Location extra)
+{
+ ErrorInfo info;
+ info.token_ = token;
+ info.message_ = message;
+ info.extra_ = extra;
+ errors_.push_back(info);
+ return false;
+}
+
+bool Reader::recoverFromError(TokenType skipUntilToken)
+{
+ size_t const errorCount = errors_.size();
+ Token skip;
+ for (;;)
+ {
+ if (!readToken(skip))
+ errors_.resize(errorCount); // discard errors caused by recovery
+ if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream)
+ break;
+ }
+ errors_.resize(errorCount);
+ return false;
+}
+
+bool Reader::addErrorAndRecover(const JSONCPP_STRING &message, Token &token,
+ TokenType skipUntilToken)
+{
+ addError(message, token);
+ return recoverFromError(skipUntilToken);
+}
+
+Value &Reader::currentValue() { return *(nodes_.top()); }
+
+Reader::Char Reader::getNextChar()
+{
+ if (current_ == end_)
+ return 0;
+ return *current_++;
+}
+
+void Reader::getLocationLineAndColumn(Location location, int &line, int &column) const
+{
+ Location current = begin_;
+ Location lastLineStart = current;
+ line = 0;
+ while (current < location && current != end_)
+ {
+ Char c = *current++;
+ if (c == '\r')
+ {
+ if (*current == '\n')
+ ++current;
+ lastLineStart = current;
+ ++line;
+ }
+ else if (c == '\n')
+ {
+ lastLineStart = current;
+ ++line;
+ }
+ }
+ // column & line start at 1
+ column = int(location - lastLineStart) + 1;
+ ++line;
+}
+
+JSONCPP_STRING Reader::getLocationLineAndColumn(Location location) const
+{
+ int line, column;
+ getLocationLineAndColumn(location, line, column);
+ char buffer[18 + 16 + 16 + 1];
+ snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
+ return buffer;
+}
+
+// Deprecated. Preserved for backward compatibility
+JSONCPP_STRING Reader::getFormatedErrorMessages() const { return getFormattedErrorMessages(); }
+
+JSONCPP_STRING Reader::getFormattedErrorMessages() const
+{
+ JSONCPP_STRING formattedMessage;
+ for (Errors::const_iterator itError = errors_.begin(); itError != errors_.end(); ++itError)
+ {
+ const ErrorInfo &error = *itError;
+ formattedMessage += "* " + getLocationLineAndColumn(error.token_.start_) + "\n";
+ formattedMessage += " " + error.message_ + "\n";
+ if (error.extra_)
+ formattedMessage += "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n";
+ }
+ return formattedMessage;
+}
+
+std::vector<Reader::StructuredError> Reader::getStructuredErrors() const
+{
+ std::vector<Reader::StructuredError> allErrors;
+ for (Errors::const_iterator itError = errors_.begin(); itError != errors_.end(); ++itError)
+ {
+ const ErrorInfo &error = *itError;
+ Reader::StructuredError structured;
+ structured.offset_start = error.token_.start_ - begin_;
+ structured.offset_limit = error.token_.end_ - begin_;
+ structured.message = error.message_;
+ allErrors.push_back(structured);
+ }
+ return allErrors;
+}
+
+bool Reader::pushError(const Value &value, const JSONCPP_STRING &message)
+{
+ ptrdiff_t const length = end_ - begin_;
+ if (value.getOffsetStart() > length || value.getOffsetLimit() > length)
+ return false;
+ Token token;
+ token.type_ = tokenError;
+ token.start_ = begin_ + value.getOffsetStart();
+ token.end_ = end_ + value.getOffsetLimit();
+ ErrorInfo info;
+ info.token_ = token;
+ info.message_ = message;
+ info.extra_ = 0;
+ errors_.push_back(info);
+ return true;
+}
+
+bool Reader::pushError(const Value &value, const JSONCPP_STRING &message, const Value &extra)
+{
+ ptrdiff_t const length = end_ - begin_;
+ if (value.getOffsetStart() > length || value.getOffsetLimit() > length ||
+ extra.getOffsetLimit() > length)
+ return false;
+ Token token;
+ token.type_ = tokenError;
+ token.start_ = begin_ + value.getOffsetStart();
+ token.end_ = begin_ + value.getOffsetLimit();
+ ErrorInfo info;
+ info.token_ = token;
+ info.message_ = message;
+ info.extra_ = begin_ + extra.getOffsetStart();
+ errors_.push_back(info);
+ return true;
+}
+
+bool Reader::good() const { return !errors_.size(); }
+
+// exact copy of Features
+class OurFeatures
+{
+public:
+ static OurFeatures all();
+ bool allowComments_;
+ bool strictRoot_;
+ bool allowDroppedNullPlaceholders_;
+ bool allowNumericKeys_;
+ bool allowSingleQuotes_;
+ bool failIfExtra_;
+ bool rejectDupKeys_;
+ bool allowSpecialFloats_;
+ int stackLimit_;
+}; // OurFeatures
+
+// exact copy of Implementation of class Features
+// ////////////////////////////////
+
+OurFeatures OurFeatures::all() { return OurFeatures(); }
+
+// Implementation of class Reader
+// ////////////////////////////////
+
+// exact copy of Reader, renamed to OurReader
+class OurReader
+{
+public:
+ typedef char Char;
+ typedef const Char *Location;
+ struct StructuredError
+ {
+ ptrdiff_t offset_start;
+ ptrdiff_t offset_limit;
+ JSONCPP_STRING message;
+ };
+
+ OurReader(OurFeatures const &features);
+ bool parse(const char *beginDoc, const char *endDoc, Value &root, bool collectComments = true);
+ JSONCPP_STRING getFormattedErrorMessages() const;
+ std::vector<StructuredError> getStructuredErrors() const;
+ bool pushError(const Value &value, const JSONCPP_STRING &message);
+ bool pushError(const Value &value, const JSONCPP_STRING &message, const Value &extra);
+ bool good() const;
+
+private:
+ OurReader(OurReader const &); // no impl
+ void operator=(OurReader const &); // no impl
+
+ enum TokenType
+ {
+ tokenEndOfStream = 0,
+ tokenObjectBegin,
+ tokenObjectEnd,
+ tokenArrayBegin,
+ tokenArrayEnd,
+ tokenString,
+ tokenNumber,
+ tokenTrue,
+ tokenFalse,
+ tokenNull,
+ tokenNaN,
+ tokenPosInf,
+ tokenNegInf,
+ tokenArraySeparator,
+ tokenMemberSeparator,
+ tokenComment,
+ tokenError
+ };
+
+ class Token
+ {
+ public:
+ TokenType type_;
+ Location start_;
+ Location end_;
+ };
+
+ class ErrorInfo
+ {
+ public:
+ Token token_;
+ JSONCPP_STRING message_;
+ Location extra_;
+ };
+
+ typedef std::deque<ErrorInfo> Errors;
+
+ bool readToken(Token &token);
+ void skipSpaces();
+ bool match(Location pattern, int patternLength);
+ bool readComment();
+ bool readCStyleComment();
+ bool readCppStyleComment();
+ bool readString();
+ bool readStringSingleQuote();
+ bool readNumber(bool checkInf);
+ bool readValue();
+ bool readObject(Token &token);
+ bool readArray(Token &token);
+ bool decodeNumber(Token &token);
+ bool decodeNumber(Token &token, Value &decoded);
+ bool decodeString(Token &token);
+ bool decodeString(Token &token, JSONCPP_STRING &decoded);
+ bool decodeDouble(Token &token);
+ bool decodeDouble(Token &token, Value &decoded);
+ bool decodeUnicodeCodePoint(Token &token, Location &current, Location end, unsigned int &unicode);
+ bool decodeUnicodeEscapeSequence(Token &token, Location &current, Location end,
+ unsigned int &unicode);
+ bool addError(const JSONCPP_STRING &message, Token &token, Location extra = 0);
+ bool recoverFromError(TokenType skipUntilToken);
+ bool addErrorAndRecover(const JSONCPP_STRING &message, Token &token, TokenType skipUntilToken);
+ void skipUntilSpace();
+ Value &currentValue();
+ Char getNextChar();
+ void getLocationLineAndColumn(Location location, int &line, int &column) const;
+ JSONCPP_STRING getLocationLineAndColumn(Location location) const;
+ void addComment(Location begin, Location end, CommentPlacement placement);
+ void skipCommentTokens(Token &token);
+
+ typedef std::stack<Value *> Nodes;
+ Nodes nodes_;
+ Errors errors_;
+ JSONCPP_STRING document_;
+ Location begin_;
+ Location end_;
+ Location current_;
+ Location lastValueEnd_;
+ Value *lastValue_;
+ JSONCPP_STRING commentsBefore_;
+ int stackDepth_;
+
+ OurFeatures const features_;
+ bool collectComments_;
+}; // OurReader
+
+// complete copy of Read impl, for OurReader
+
+OurReader::OurReader(OurFeatures const &features)
+ : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), lastValue_(),
+ commentsBefore_(), stackDepth_(0), features_(features), collectComments_()
+{
+}
+
+bool OurReader::parse(const char *beginDoc, const char *endDoc, Value &root, bool collectComments)
+{
+ if (!features_.allowComments_)
+ {
+ collectComments = false;
+ }
+
+ begin_ = beginDoc;
+ end_ = endDoc;
+ collectComments_ = collectComments;
+ current_ = begin_;
+ lastValueEnd_ = 0;
+ lastValue_ = 0;
+ commentsBefore_ = "";
+ errors_.clear();
+ while (!nodes_.empty())
+ nodes_.pop();
+ nodes_.push(&root);
+
+ stackDepth_ = 0;
+ bool successful = readValue();
+ Token token;
+ skipCommentTokens(token);
+ if (features_.failIfExtra_)
+ {
+ if ((features_.strictRoot_ || token.type_ != tokenError) && token.type_ != tokenEndOfStream)
+ {
+ addError("Extra non-whitespace after JSON value.", token);
+ return false;
+ }
+ }
+ if (collectComments_ && !commentsBefore_.empty())
+ root.setComment(commentsBefore_, commentAfter);
+ if (features_.strictRoot_)
+ {
+ if (!root.isArray() && !root.isObject())
+ {
+ // Set error location to start of doc, ideally should be first token found
+ // in doc
+ token.type_ = tokenError;
+ token.start_ = beginDoc;
+ token.end_ = endDoc;
+ addError("A valid JSON document must be either an array or an object value.", token);
+ return false;
+ }
+ }
+ return successful;
+}
+
+bool OurReader::readValue()
+{
+ if (stackDepth_ >= features_.stackLimit_)
+ throwRuntimeError("Exceeded stackLimit in readValue().");
+ ++stackDepth_;
+ Token token;
+ skipCommentTokens(token);
+ bool successful = true;
+
+ if (collectComments_ && !commentsBefore_.empty())
+ {
+ currentValue().setComment(commentsBefore_, commentBefore);
+ commentsBefore_ = "";
+ }
+
+ switch (token.type_)
+ {
+ case tokenObjectBegin:
+ successful = readObject(token);
+ currentValue().setOffsetLimit(current_ - begin_);
+ break;
+ case tokenArrayBegin:
+ successful = readArray(token);
+ currentValue().setOffsetLimit(current_ - begin_);
+ break;
+ case tokenNumber:
+ successful = decodeNumber(token);
+ break;
+ case tokenString:
+ successful = decodeString(token);
+ break;
+ case tokenTrue:
+ {
+ Value v(true);
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenFalse:
+ {
+ Value v(false);
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenNull:
+ {
+ Value v;
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenNaN:
+ {
+ Value v(std::numeric_limits<double>::quiet_NaN());
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenPosInf:
+ {
+ Value v(std::numeric_limits<double>::infinity());
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenNegInf:
+ {
+ Value v(-std::numeric_limits<double>::infinity());
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ }
+ break;
+ case tokenArraySeparator:
+ case tokenObjectEnd:
+ case tokenArrayEnd:
+ if (features_.allowDroppedNullPlaceholders_)
+ {
+ // "Un-read" the current token and mark the current value as a null
+ // token.
+ current_--;
+ Value v;
+ currentValue().swapPayload(v);
+ currentValue().setOffsetStart(current_ - begin_ - 1);
+ currentValue().setOffsetLimit(current_ - begin_);
+ break;
+ } // else, fall through ...
+ default:
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return addError("Syntax error: value, object or array expected.", token);
+ }
+
+ if (collectComments_)
+ {
+ lastValueEnd_ = current_;
+ lastValue_ = &currentValue();
+ }
+
+ --stackDepth_;
+ return successful;
+}
+
+void OurReader::skipCommentTokens(Token &token)
+{
+ if (features_.allowComments_)
+ {
+ do
+ {
+ readToken(token);
+ } while (token.type_ == tokenComment);
+ }
+ else
+ {
+ readToken(token);
+ }
+}
+
+bool OurReader::readToken(Token &token)
+{
+ skipSpaces();
+ token.start_ = current_;
+ Char c = getNextChar();
+ bool ok = true;
+ switch (c)
+ {
+ case '{':
+ token.type_ = tokenObjectBegin;
+ break;
+ case '}':
+ token.type_ = tokenObjectEnd;
+ break;
+ case '[':
+ token.type_ = tokenArrayBegin;
+ break;
+ case ']':
+ token.type_ = tokenArrayEnd;
+ break;
+ case '"':
+ token.type_ = tokenString;
+ ok = readString();
+ break;
+ case '\'':
+ if (features_.allowSingleQuotes_)
+ {
+ token.type_ = tokenString;
+ ok = readStringSingleQuote();
+ break;
+ } // else continue
+ case '/':
+ token.type_ = tokenComment;
+ ok = readComment();
+ break;
+ case '0':
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ token.type_ = tokenNumber;
+ readNumber(false);
+ break;
+ case '-':
+ if (readNumber(true))
+ {
+ token.type_ = tokenNumber;
+ }
+ else
+ {
+ token.type_ = tokenNegInf;
+ ok = features_.allowSpecialFloats_ && match("nfinity", 7);
+ }
+ break;
+ case 't':
+ token.type_ = tokenTrue;
+ ok = match("rue", 3);
+ break;
+ case 'f':
+ token.type_ = tokenFalse;
+ ok = match("alse", 4);
+ break;
+ case 'n':
+ token.type_ = tokenNull;
+ ok = match("ull", 3);
+ break;
+ case 'N':
+ if (features_.allowSpecialFloats_)
+ {
+ token.type_ = tokenNaN;
+ ok = match("aN", 2);
+ }
+ else
+ {
+ ok = false;
+ }
+ break;
+ case 'I':
+ if (features_.allowSpecialFloats_)
+ {
+ token.type_ = tokenPosInf;
+ ok = match("nfinity", 7);
+ }
+ else
+ {
+ ok = false;
+ }
+ break;
+ case ',':
+ token.type_ = tokenArraySeparator;
+ break;
+ case ':':
+ token.type_ = tokenMemberSeparator;
+ break;
+ case 0:
+ token.type_ = tokenEndOfStream;
+ break;
+ default:
+ ok = false;
+ break;
+ }
+ if (!ok)
+ token.type_ = tokenError;
+ token.end_ = current_;
+ return true;
+}
+
+void OurReader::skipSpaces()
+{
+ while (current_ != end_)
+ {
+ Char c = *current_;
+ if (c == ' ' || c == '\t' || c == '\r' || c == '\n')
+ ++current_;
+ else
+ break;
+ }
+}
+
+bool OurReader::match(Location pattern, int patternLength)
+{
+ if (end_ - current_ < patternLength)
+ return false;
+ int index = patternLength;
+ while (index--)
+ if (current_[index] != pattern[index])
+ return false;
+ current_ += patternLength;
+ return true;
+}
+
+bool OurReader::readComment()
+{
+ Location commentBegin = current_ - 1;
+ Char c = getNextChar();
+ bool successful = false;
+ if (c == '*')
+ successful = readCStyleComment();
+ else if (c == '/')
+ successful = readCppStyleComment();
+ if (!successful)
+ return false;
+
+ if (collectComments_)
+ {
+ CommentPlacement placement = commentBefore;
+ if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin))
+ {
+ if (c != '*' || !containsNewLine(commentBegin, current_))
+ placement = commentAfterOnSameLine;
+ }
+
+ addComment(commentBegin, current_, placement);
+ }
+ return true;
+}
+
+void OurReader::addComment(Location begin, Location end, CommentPlacement placement)
+{
+ assert(collectComments_);
+ const JSONCPP_STRING &normalized = normalizeEOL(begin, end);
+ if (placement == commentAfterOnSameLine)
+ {
+ assert(lastValue_ != 0);
+ lastValue_->setComment(normalized, placement);
+ }
+ else
+ {
+ commentsBefore_ += normalized;
+ }
+}
+
+bool OurReader::readCStyleComment()
+{
+ while ((current_ + 1) < end_)
+ {
+ Char c = getNextChar();
+ if (c == '*' && *current_ == '/')
+ break;
+ }
+ return getNextChar() == '/';
+}
+
+bool OurReader::readCppStyleComment()
+{
+ while (current_ != end_)
+ {
+ Char c = getNextChar();
+ if (c == '\n')
+ break;
+ if (c == '\r')
+ {
+ // Consume DOS EOL. It will be normalized in addComment.
+ if (current_ != end_ && *current_ == '\n')
+ getNextChar();
+ // Break on Moc OS 9 EOL.
+ break;
+ }
+ }
+ return true;
+}
+
+bool OurReader::readNumber(bool checkInf)
+{
+ const char *p = current_;
+ if (checkInf && p != end_ && *p == 'I')
+ {
+ current_ = ++p;
+ return false;
+ }
+ char c = '0'; // stopgap for already consumed character
+ // integral part
+ while (c >= '0' && c <= '9')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ // fractional part
+ if (c == '.')
+ {
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ while (c >= '0' && c <= '9')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ }
+ // exponential part
+ if (c == 'e' || c == 'E')
+ {
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ if (c == '+' || c == '-')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ while (c >= '0' && c <= '9')
+ c = (current_ = p) < end_ ? *p++ : '\0';
+ }
+ return true;
+}
+bool OurReader::readString()
+{
+ Char c = 0;
+ while (current_ != end_)
+ {
+ c = getNextChar();
+ if (c == '\\')
+ getNextChar();
+ else if (c == '"')
+ break;
+ }
+ return c == '"';
+}
+
+bool OurReader::readStringSingleQuote()
+{
+ Char c = 0;
+ while (current_ != end_)
+ {
+ c = getNextChar();
+ if (c == '\\')
+ getNextChar();
+ else if (c == '\'')
+ break;
+ }
+ return c == '\'';
+}
+
+bool OurReader::readObject(Token &tokenStart)
+{
+ Token tokenName;
+ JSONCPP_STRING name;
+ Value init(objectValue);
+ currentValue().swapPayload(init);
+ currentValue().setOffsetStart(tokenStart.start_ - begin_);
+ while (readToken(tokenName))
+ {
+ bool initialTokenOk = true;
+ while (tokenName.type_ == tokenComment && initialTokenOk)
+ initialTokenOk = readToken(tokenName);
+ if (!initialTokenOk)
+ break;
+ if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
+ return true;
+ name = "";
+ if (tokenName.type_ == tokenString)
+ {
+ if (!decodeString(tokenName, name))
+ return recoverFromError(tokenObjectEnd);
+ }
+ else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_)
+ {
+ Value numberName;
+ if (!decodeNumber(tokenName, numberName))
+ return recoverFromError(tokenObjectEnd);
+ name = numberName.asString();
+ }
+ else
+ {
+ break;
+ }
+
+ Token colon;
+ if (!readToken(colon) || colon.type_ != tokenMemberSeparator)
+ {
+ return addErrorAndRecover("Missing ':' after object member name", colon, tokenObjectEnd);
+ }
+ if (name.length() >= (1U << 30))
+ throwRuntimeError("keylength >= 2^30");
+ if (features_.rejectDupKeys_ && currentValue().isMember(name))
+ {
+ JSONCPP_STRING msg = "Duplicate key: '" + name + "'";
+ return addErrorAndRecover(msg, tokenName, tokenObjectEnd);
+ }
+ Value &value = currentValue()[name];
+ nodes_.push(&value);
+ bool ok = readValue();
+ nodes_.pop();
+ if (!ok) // error already set
+ return recoverFromError(tokenObjectEnd);
+
+ Token comma;
+ if (!readToken(comma) || (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
+ comma.type_ != tokenComment))
+ {
+ return addErrorAndRecover("Missing ',' or '}' in object declaration", comma, tokenObjectEnd);
+ }
+ bool finalizeTokenOk = true;
+ while (comma.type_ == tokenComment && finalizeTokenOk)
+ finalizeTokenOk = readToken(comma);
+ if (comma.type_ == tokenObjectEnd)
+ return true;
+ }
+ return addErrorAndRecover("Missing '}' or object member name", tokenName, tokenObjectEnd);
+}
+
+bool OurReader::readArray(Token &tokenStart)
+{
+ Value init(arrayValue);
+ currentValue().swapPayload(init);
+ currentValue().setOffsetStart(tokenStart.start_ - begin_);
+ skipSpaces();
+ if (current_ != end_ && *current_ == ']') // empty array
+ {
+ Token endArray;
+ readToken(endArray);
+ return true;
+ }
+ int index = 0;
+ for (;;)
+ {
+ Value &value = currentValue()[index++];
+ nodes_.push(&value);
+ bool ok = readValue();
+ nodes_.pop();
+ if (!ok) // error already set
+ return recoverFromError(tokenArrayEnd);
+
+ Token token;
+ // Accept Comment after last item in the array.
+ ok = readToken(token);
+ while (token.type_ == tokenComment && ok)
+ {
+ ok = readToken(token);
+ }
+ bool badTokenType = (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd);
+ if (!ok || badTokenType)
+ {
+ return addErrorAndRecover("Missing ',' or ']' in array declaration", token, tokenArrayEnd);
+ }
+ if (token.type_ == tokenArrayEnd)
+ break;
+ }
+ return true;
+}
+
+bool OurReader::decodeNumber(Token &token)
+{
+ Value decoded;
+ if (!decodeNumber(token, decoded))
+ return false;
+ currentValue().swapPayload(decoded);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return true;
+}
+
+bool OurReader::decodeNumber(Token &token, Value &decoded)
+{
+ // Attempts to parse the number as an integer. If the number is
+ // larger than the maximum supported value of an integer then
+ // we decode the number as a double.
+ Location current = token.start_;
+ bool isNegative = *current == '-';
+ if (isNegative)
+ ++current;
+ // TODO: Help the compiler do the div and mod at compile time or get rid of them.
+ Value::LargestUInt maxIntegerValue =
+ isNegative ? Value::LargestUInt(-Value::minLargestInt) : Value::maxLargestUInt;
+ Value::LargestUInt threshold = maxIntegerValue / 10;
+ Value::LargestUInt value = 0;
+ while (current < token.end_)
+ {
+ Char c = *current++;
+ if (c < '0' || c > '9')
+ return decodeDouble(token, decoded);
+ Value::UInt digit(static_cast<Value::UInt>(c - '0'));
+ if (value >= threshold)
+ {
+ // We've hit or exceeded the max value divided by 10 (rounded down). If
+ // a) we've only just touched the limit, b) this is the last digit, and
+ // c) it's small enough to fit in that rounding delta, we're okay.
+ // Otherwise treat this number as a double to avoid overflow.
+ if (value > threshold || current != token.end_ || digit > maxIntegerValue % 10)
+ {
+ return decodeDouble(token, decoded);
+ }
+ }
+ value = value * 10 + digit;
+ }
+ if (isNegative)
+ decoded = -Value::LargestInt(value);
+ else if (value <= Value::LargestUInt(Value::maxInt))
+ decoded = Value::LargestInt(value);
+ else
+ decoded = value;
+ return true;
+}
+
+bool OurReader::decodeDouble(Token &token)
+{
+ Value decoded;
+ if (!decodeDouble(token, decoded))
+ return false;
+ currentValue().swapPayload(decoded);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return true;
+}
+
+bool OurReader::decodeDouble(Token &token, Value &decoded)
+{
+ double value = 0;
+ const int bufferSize = 32;
+ int count;
+ ptrdiff_t const length = token.end_ - token.start_;
+
+ // Sanity check to avoid buffer overflow exploits.
+ if (length < 0)
+ {
+ return addError("Unable to parse token length", token);
+ }
+ size_t const ulength = static_cast<size_t>(length);
+
+ // Avoid using a string constant for the format control string given to
+ // sscanf, as this can cause hard to debug crashes on OS X. See here for more
+ // info:
+ //
+ // http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html
+ char format[] = "%lf";
+
+ if (length <= bufferSize)
+ {
+ Char buffer[bufferSize + 1];
+ memcpy(buffer, token.start_, ulength);
+ buffer[length] = 0;
+ fixNumericLocaleInput(buffer, buffer + length);
+ count = sscanf(buffer, format, &value);
+ }
+ else
+ {
+ JSONCPP_STRING buffer(token.start_, token.end_);
+ count = sscanf(buffer.c_str(), format, &value);
+ }
+
+ if (count != 1)
+ return addError("'" + JSONCPP_STRING(token.start_, token.end_) + "' is not a number.", token);
+ decoded = value;
+ return true;
+}
+
+bool OurReader::decodeString(Token &token)
+{
+ JSONCPP_STRING decoded_string;
+ if (!decodeString(token, decoded_string))
+ return false;
+ Value decoded(decoded_string);
+ currentValue().swapPayload(decoded);
+ currentValue().setOffsetStart(token.start_ - begin_);
+ currentValue().setOffsetLimit(token.end_ - begin_);
+ return true;
+}
+
+bool OurReader::decodeString(Token &token, JSONCPP_STRING &decoded)
+{
+ decoded.reserve(static_cast<size_t>(token.end_ - token.start_ - 2));
+ Location current = token.start_ + 1; // skip '"'
+ Location end = token.end_ - 1; // do not include '"'
+ while (current != end)
+ {
+ Char c = *current++;
+ if (c == '"')
+ break;
+ else if (c == '\\')
+ {
+ if (current == end)
+ return addError("Empty escape sequence in string", token, current);
+ Char escape = *current++;
+ switch (escape)
+ {
+ case '"':
+ decoded += '"';
+ break;
+ case '/':
+ decoded += '/';
+ break;
+ case '\\':
+ decoded += '\\';
+ break;
+ case 'b':
+ decoded += '\b';
+ break;
+ case 'f':
+ decoded += '\f';
+ break;
+ case 'n':
+ decoded += '\n';
+ break;
+ case 'r':
+ decoded += '\r';
+ break;
+ case 't':
+ decoded += '\t';
+ break;
+ case 'u':
+ {
+ unsigned int unicode;
+ if (!decodeUnicodeCodePoint(token, current, end, unicode))
+ return false;
+ decoded += codePointToUTF8(unicode);
+ }
+ break;
+ default:
+ return addError("Bad escape sequence in string", token, current);
+ }
+ }
+ else
+ {
+ decoded += c;
+ }
+ }
+ return true;
+}
+
+bool OurReader::decodeUnicodeCodePoint(Token &token, Location &current, Location end,
+ unsigned int &unicode)
+{
+
+ if (!decodeUnicodeEscapeSequence(token, current, end, unicode))
+ return false;
+ if (unicode >= 0xD800 && unicode <= 0xDBFF)
+ {
+ // surrogate pairs
+ if (end - current < 6)
+ return addError("additional six characters expected to parse unicode surrogate pair.", token,
+ current);
+ unsigned int surrogatePair;
+ if (*(current++) == '\\' && *(current++) == 'u')
+ {
+ if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair))
+ {
+ unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
+ }
+ else
+ return false;
+ }
+ else
+ return addError("expecting another \\u token to begin the second half of "
+ "a unicode surrogate pair",
+ token, current);
+ }
+ return true;
+}
+
+bool OurReader::decodeUnicodeEscapeSequence(Token &token, Location &current, Location end,
+ unsigned int &ret_unicode)
+{
+ if (end - current < 4)
+ return addError("Bad unicode escape sequence in string: four digits expected.", token, current);
+ int unicode = 0;
+ for (int index = 0; index < 4; ++index)
+ {
+ Char c = *current++;
+ unicode *= 16;
+ if (c >= '0' && c <= '9')
+ unicode += c - '0';
+ else if (c >= 'a' && c <= 'f')
+ unicode += c - 'a' + 10;
+ else if (c >= 'A' && c <= 'F')
+ unicode += c - 'A' + 10;
+ else
+ return addError("Bad unicode escape sequence in string: hexadecimal digit expected.", token,
+ current);
+ }
+ ret_unicode = static_cast<unsigned int>(unicode);
+ return true;
+}
+
+bool OurReader::addError(const JSONCPP_STRING &message, Token &token, Location extra)
+{
+ ErrorInfo info;
+ info.token_ = token;
+ info.message_ = message;
+ info.extra_ = extra;
+ errors_.push_back(info);
+ return false;
+}
+
+bool OurReader::recoverFromError(TokenType skipUntilToken)
+{
+ size_t errorCount = errors_.size();
+ Token skip;
+ for (;;)
+ {
+ if (!readToken(skip))
+ errors_.resize(errorCount); // discard errors caused by recovery
+ if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream)
+ break;
+ }
+ errors_.resize(errorCount);
+ return false;
+}
+
+bool OurReader::addErrorAndRecover(const JSONCPP_STRING &message, Token &token,
+ TokenType skipUntilToken)
+{
+ addError(message, token);
+ return recoverFromError(skipUntilToken);
+}
+
+Value &OurReader::currentValue() { return *(nodes_.top()); }
+
+OurReader::Char OurReader::getNextChar()
+{
+ if (current_ == end_)
+ return 0;
+ return *current_++;
+}
+
+void OurReader::getLocationLineAndColumn(Location location, int &line, int &column) const
+{
+ Location current = begin_;
+ Location lastLineStart = current;
+ line = 0;
+ while (current < location && current != end_)
+ {
+ Char c = *current++;
+ if (c == '\r')
+ {
+ if (*current == '\n')
+ ++current;
+ lastLineStart = current;
+ ++line;
+ }
+ else if (c == '\n')
+ {
+ lastLineStart = current;
+ ++line;
+ }
+ }
+ // column & line start at 1
+ column = int(location - lastLineStart) + 1;
+ ++line;
+}
+
+JSONCPP_STRING OurReader::getLocationLineAndColumn(Location location) const
+{
+ int line, column;
+ getLocationLineAndColumn(location, line, column);
+ char buffer[18 + 16 + 16 + 1];
+ snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
+ return buffer;
+}
+
+JSONCPP_STRING OurReader::getFormattedErrorMessages() const
+{
+ JSONCPP_STRING formattedMessage;
+ for (Errors::const_iterator itError = errors_.begin(); itError != errors_.end(); ++itError)
+ {
+ const ErrorInfo &error = *itError;
+ formattedMessage += "* " + getLocationLineAndColumn(error.token_.start_) + "\n";
+ formattedMessage += " " + error.message_ + "\n";
+ if (error.extra_)
+ formattedMessage += "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n";
+ }
+ return formattedMessage;
+}
+
+std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const
+{
+ std::vector<OurReader::StructuredError> allErrors;
+ for (Errors::const_iterator itError = errors_.begin(); itError != errors_.end(); ++itError)
+ {
+ const ErrorInfo &error = *itError;
+ OurReader::StructuredError structured;
+ structured.offset_start = error.token_.start_ - begin_;
+ structured.offset_limit = error.token_.end_ - begin_;
+ structured.message = error.message_;
+ allErrors.push_back(structured);
+ }
+ return allErrors;
+}
+
+bool OurReader::pushError(const Value &value, const JSONCPP_STRING &message)
+{
+ ptrdiff_t length = end_ - begin_;
+ if (value.getOffsetStart() > length || value.getOffsetLimit() > length)
+ return false;
+ Token token;
+ token.type_ = tokenError;
+ token.start_ = begin_ + value.getOffsetStart();
+ token.end_ = end_ + value.getOffsetLimit();
+ ErrorInfo info;
+ info.token_ = token;
+ info.message_ = message;
+ info.extra_ = 0;
+ errors_.push_back(info);
+ return true;
+}
+
+bool OurReader::pushError(const Value &value, const JSONCPP_STRING &message, const Value &extra)
+{
+ ptrdiff_t length = end_ - begin_;
+ if (value.getOffsetStart() > length || value.getOffsetLimit() > length ||
+ extra.getOffsetLimit() > length)
+ return false;
+ Token token;
+ token.type_ = tokenError;
+ token.start_ = begin_ + value.getOffsetStart();
+ token.end_ = begin_ + value.getOffsetLimit();
+ ErrorInfo info;
+ info.token_ = token;
+ info.message_ = message;
+ info.extra_ = begin_ + extra.getOffsetStart();
+ errors_.push_back(info);
+ return true;
+}
+
+bool OurReader::good() const { return !errors_.size(); }
+
+class OurCharReader : public CharReader
+{
+ bool const collectComments_;
+ OurReader reader_;
+
+public:
+ OurCharReader(bool collectComments, OurFeatures const &features)
+ : collectComments_(collectComments), reader_(features)
+ {
+ }
+ bool parse(char const *beginDoc, char const *endDoc, Value *root,
+ JSONCPP_STRING *errs) JSONCPP_OVERRIDE
+ {
+ bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_);
+ if (errs)
+ {
+ *errs = reader_.getFormattedErrorMessages();
+ }
+ return ok;
+ }
+};
+
+CharReaderBuilder::CharReaderBuilder() { setDefaults(&settings_); }
+CharReaderBuilder::~CharReaderBuilder() {}
+CharReader *CharReaderBuilder::newCharReader() const
+{
+ bool collectComments = settings_["collectComments"].asBool();
+ OurFeatures features = OurFeatures::all();
+ features.allowComments_ = settings_["allowComments"].asBool();
+ features.strictRoot_ = settings_["strictRoot"].asBool();
+ features.allowDroppedNullPlaceholders_ = settings_["allowDroppedNullPlaceholders"].asBool();
+ features.allowNumericKeys_ = settings_["allowNumericKeys"].asBool();
+ features.allowSingleQuotes_ = settings_["allowSingleQuotes"].asBool();
+ features.stackLimit_ = settings_["stackLimit"].asInt();
+ features.failIfExtra_ = settings_["failIfExtra"].asBool();
+ features.rejectDupKeys_ = settings_["rejectDupKeys"].asBool();
+ features.allowSpecialFloats_ = settings_["allowSpecialFloats"].asBool();
+ return new OurCharReader(collectComments, features);
+}
+static void getValidReaderKeys(std::set<JSONCPP_STRING> *valid_keys)
+{
+ valid_keys->clear();
+ valid_keys->insert("collectComments");
+ valid_keys->insert("allowComments");
+ valid_keys->insert("strictRoot");
+ valid_keys->insert("allowDroppedNullPlaceholders");
+ valid_keys->insert("allowNumericKeys");
+ valid_keys->insert("allowSingleQuotes");
+ valid_keys->insert("stackLimit");
+ valid_keys->insert("failIfExtra");
+ valid_keys->insert("rejectDupKeys");
+ valid_keys->insert("allowSpecialFloats");
+}
+bool CharReaderBuilder::validate(Json::Value *invalid) const
+{
+ Json::Value my_invalid;
+ if (!invalid)
+ invalid = &my_invalid; // so we do not need to test for NULL
+ Json::Value &inv = *invalid;
+ std::set<JSONCPP_STRING> valid_keys;
+ getValidReaderKeys(&valid_keys);
+ Value::Members keys = settings_.getMemberNames();
+ size_t n = keys.size();
+ for (size_t i = 0; i < n; ++i)
+ {
+ JSONCPP_STRING const &key = keys[i];
+ if (valid_keys.find(key) == valid_keys.end())
+ {
+ inv[key] = settings_[key];
+ }
+ }
+ return 0u == inv.size();
+}
+Value &CharReaderBuilder::operator[](JSONCPP_STRING key) { return settings_[key]; }
+// static
+void CharReaderBuilder::strictMode(Json::Value *settings)
+{
+ //! [CharReaderBuilderStrictMode]
+ (*settings)["allowComments"] = false;
+ (*settings)["strictRoot"] = true;
+ (*settings)["allowDroppedNullPlaceholders"] = false;
+ (*settings)["allowNumericKeys"] = false;
+ (*settings)["allowSingleQuotes"] = false;
+ (*settings)["stackLimit"] = 1000;
+ (*settings)["failIfExtra"] = true;
+ (*settings)["rejectDupKeys"] = true;
+ (*settings)["allowSpecialFloats"] = false;
+ //! [CharReaderBuilderStrictMode]
+}
+// static
+void CharReaderBuilder::setDefaults(Json::Value *settings)
+{
+ //! [CharReaderBuilderDefaults]
+ (*settings)["collectComments"] = true;
+ (*settings)["allowComments"] = true;
+ (*settings)["strictRoot"] = false;
+ (*settings)["allowDroppedNullPlaceholders"] = false;
+ (*settings)["allowNumericKeys"] = false;
+ (*settings)["allowSingleQuotes"] = false;
+ (*settings)["stackLimit"] = 1000;
+ (*settings)["failIfExtra"] = false;
+ (*settings)["rejectDupKeys"] = false;
+ (*settings)["allowSpecialFloats"] = false;
+ //! [CharReaderBuilderDefaults]
+}
+
+//////////////////////////////////
+// global functions
+
+bool parseFromStream(CharReader::Factory const &fact, JSONCPP_ISTREAM &sin, Value *root,
+ JSONCPP_STRING *errs)
+{
+ JSONCPP_OSTRINGSTREAM ssin;
+ ssin << sin.rdbuf();
+ JSONCPP_STRING doc = ssin.str();
+ char const *begin = doc.data();
+ char const *end = begin + doc.size();
+ // Note that we do not actually need a null-terminator.
+ CharReaderPtr const reader(fact.newCharReader());
+ return reader->parse(begin, end, root, errs);
+}
+
+JSONCPP_ISTREAM &operator>>(JSONCPP_ISTREAM &sin, Value &root)
+{
+ CharReaderBuilder b;
+ JSONCPP_STRING errs;
+ bool ok = parseFromStream(b, sin, &root, &errs);
+ if (!ok)
+ {
+ fprintf(stderr, "Error from reader: %s", errs.c_str());
+
+ throwRuntimeError(errs);
+ }
+ return sin;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_reader.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_valueiterator.inl
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+// included by json_value.cpp
+
+namespace Json
+{
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class ValueIteratorBase
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+ValueIteratorBase::ValueIteratorBase() : current_(), isNull_(true) {}
+
+ValueIteratorBase::ValueIteratorBase(const Value::ObjectValues::iterator &current)
+ : current_(current), isNull_(false)
+{
+}
+
+Value &ValueIteratorBase::deref() const { return current_->second; }
+
+void ValueIteratorBase::increment() { ++current_; }
+
+void ValueIteratorBase::decrement() { --current_; }
+
+ValueIteratorBase::difference_type ValueIteratorBase::computeDistance(const SelfType &other) const
+{
+#ifdef JSON_USE_CPPTL_SMALLMAP
+ return other.current_ - current_;
+#else
+ // Iterator for null value are initialized using the default
+ // constructor, which initialize current_ to the default
+ // std::map::iterator. As begin() and end() are two instance
+ // of the default std::map::iterator, they can not be compared.
+ // To allow this, we handle this comparison specifically.
+ if (isNull_ && other.isNull_)
+ {
+ return 0;
+ }
+
+ // Usage of std::distance is not portable (does not compile with Sun Studio 12
+ // RogueWave STL,
+ // which is the one used by default).
+ // Using a portable hand-made version for non random iterator instead:
+ // return difference_type( std::distance( current_, other.current_ ) );
+ difference_type myDistance = 0;
+ for (Value::ObjectValues::iterator it = current_; it != other.current_; ++it)
+ {
+ ++myDistance;
+ }
+ return myDistance;
+#endif
+}
+
+bool ValueIteratorBase::isEqual(const SelfType &other) const
+{
+ if (isNull_)
+ {
+ return other.isNull_;
+ }
+ return current_ == other.current_;
+}
+
+void ValueIteratorBase::copy(const SelfType &other)
+{
+ current_ = other.current_;
+ isNull_ = other.isNull_;
+}
+
+Value ValueIteratorBase::key() const
+{
+ const Value::CZString czstring = (*current_).first;
+ if (czstring.data())
+ {
+ if (czstring.isStaticString())
+ return Value(StaticString(czstring.data()));
+ return Value(czstring.data(), czstring.data() + czstring.length());
+ }
+ return Value(czstring.index());
+}
+
+UInt ValueIteratorBase::index() const
+{
+ const Value::CZString czstring = (*current_).first;
+ if (!czstring.data())
+ return czstring.index();
+ return Value::UInt(-1);
+}
+
+JSONCPP_STRING ValueIteratorBase::name() const
+{
+ char const *keey;
+ char const *end;
+ keey = memberName(&end);
+ if (!keey)
+ return JSONCPP_STRING();
+ return JSONCPP_STRING(keey, end);
+}
+
+char const *ValueIteratorBase::memberName() const
+{
+ const char *cname = (*current_).first.data();
+ return cname ? cname : "";
+}
+
+char const *ValueIteratorBase::memberName(char const **end) const
+{
+ const char *cname = (*current_).first.data();
+ if (!cname)
+ {
+ *end = NULL;
+ return NULL;
+ }
+ *end = cname + (*current_).first.length();
+ return cname;
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class ValueConstIterator
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+ValueConstIterator::ValueConstIterator() {}
+
+ValueConstIterator::ValueConstIterator(const Value::ObjectValues::iterator &current)
+ : ValueIteratorBase(current)
+{
+}
+
+ValueConstIterator::ValueConstIterator(ValueIterator const &other) : ValueIteratorBase(other) {}
+
+ValueConstIterator &ValueConstIterator::operator=(const ValueIteratorBase &other)
+{
+ copy(other);
+ return *this;
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class ValueIterator
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+ValueIterator::ValueIterator() {}
+
+ValueIterator::ValueIterator(const Value::ObjectValues::iterator &current)
+ : ValueIteratorBase(current)
+{
+}
+
+ValueIterator::ValueIterator(const ValueConstIterator &other) : ValueIteratorBase(other)
+{
+ throwRuntimeError("ConstIterator to Iterator should never be allowed.");
+}
+
+ValueIterator::ValueIterator(const ValueIterator &other) : ValueIteratorBase(other) {}
+
+ValueIterator &ValueIterator::operator=(const SelfType &other)
+{
+ copy(other);
+ return *this;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_valueiterator.inl
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_value.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2011 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include <json/assertions.h>
+#include <json/value.h>
+#include <json/writer.h>
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <math.h>
+#include <sstream>
+#include <utility>
+#include <cstring>
+#include <cassert>
+#ifdef JSON_USE_CPPTL
+#include <cpptl/conststring.h>
+#endif
+#include <cstddef> // size_t
+#include <algorithm> // min()
+
+#define JSON_ASSERT_UNREACHABLE assert(false)
+
+namespace Json
+{
+
+// This is a walkaround to avoid the static initialization of Value::null.
+// kNull must be word-aligned to avoid crashing on ARM. We use an alignment of
+// 8 (instead of 4) as a bit of future-proofing.
+#if defined(__ARMEL__)
+#define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment)))
+#else
+#define ALIGNAS(byte_alignment)
+#endif
+// static const unsigned char ALIGNAS(8) kNull[sizeof(Value)] = { 0 };
+// const unsigned char& kNullRef = kNull[0];
+// const Value& Value::null = reinterpret_cast<const Value&>(kNullRef);
+// const Value& Value::nullRef = null;
+
+// static
+Value const &Value::nullSingleton()
+{
+ static Value const nullStatic;
+ return nullStatic;
+}
+
+// for backwards compatibility, we'll leave these global references around, but DO NOT
+// use them in JSONCPP library code any more!
+Value const &Value::null = Value::nullSingleton();
+Value const &Value::nullRef = Value::nullSingleton();
+
+const Int Value::minInt = Int(~(UInt(-1) / 2));
+const Int Value::maxInt = Int(UInt(-1) / 2);
+const UInt Value::maxUInt = UInt(-1);
+#if defined(JSON_HAS_INT64)
+const Int64 Value::minInt64 = Int64(~(UInt64(-1) / 2));
+const Int64 Value::maxInt64 = Int64(UInt64(-1) / 2);
+const UInt64 Value::maxUInt64 = UInt64(-1);
+// The constant is hard-coded because some compiler have trouble
+// converting Value::maxUInt64 to a double correctly (AIX/xlC).
+// Assumes that UInt64 is a 64 bits integer.
+static const double maxUInt64AsDouble = 18446744073709551615.0;
+#endif // defined(JSON_HAS_INT64)
+const LargestInt Value::minLargestInt = LargestInt(~(LargestUInt(-1) / 2));
+const LargestInt Value::maxLargestInt = LargestInt(LargestUInt(-1) / 2);
+const LargestUInt Value::maxLargestUInt = LargestUInt(-1);
+
+#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+template <typename T, typename U> static inline bool InRange(double d, T min, U max)
+{
+ // The casts can lose precision, but we are looking only for
+ // an approximate range. Might fail on edge cases though. ~cdunn
+ // return d >= static_cast<double>(min) && d <= static_cast<double>(max);
+ return d >= min && d <= max;
+}
+#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+static inline double integerToDouble(Json::UInt64 value)
+{
+ return static_cast<double>(Int64(value / 2)) * 2.0 + static_cast<double>(Int64(value & 1));
+}
+
+template <typename T> static inline double integerToDouble(T value)
+{
+ return static_cast<double>(value);
+}
+
+template <typename T, typename U> static inline bool InRange(double d, T min, U max)
+{
+ return d >= integerToDouble(min) && d <= integerToDouble(max);
+}
+#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+
+/** Duplicates the specified string value.
+ * @param value Pointer to the string to duplicate. Must be zero-terminated if
+ * length is "unknown".
+ * @param length Length of the value. if equals to unknown, then it will be
+ * computed using strlen(value).
+ * @return Pointer on the duplicate instance of string.
+ */
+static inline char *duplicateStringValue(const char *value, size_t length)
+{
+ // Avoid an integer overflow in the call to malloc below by limiting length
+ // to a sane value.
+ if (length >= static_cast<size_t>(Value::maxInt))
+ length = Value::maxInt - 1;
+
+ char *newString = static_cast<char *>(malloc(length + 1));
+ if (newString == NULL)
+ {
+ throwRuntimeError("in Json::Value::duplicateStringValue(): "
+ "Failed to allocate string value buffer");
+ }
+ memcpy(newString, value, length);
+ newString[length] = 0;
+ return newString;
+}
+
+/* Record the length as a prefix.
+ */
+static inline char *duplicateAndPrefixStringValue(const char *value, unsigned int length)
+{
+ // Avoid an integer overflow in the call to malloc below by limiting length
+ // to a sane value.
+ JSON_ASSERT_MESSAGE(length <= static_cast<unsigned>(Value::maxInt) - sizeof(unsigned) - 1U,
+ "in Json::Value::duplicateAndPrefixStringValue(): "
+ "length too big for prefixing");
+ unsigned actualLength = length + static_cast<unsigned>(sizeof(unsigned)) + 1U;
+ char *newString = static_cast<char *>(malloc(actualLength));
+ if (newString == 0)
+ {
+ throwRuntimeError("in Json::Value::duplicateAndPrefixStringValue(): "
+ "Failed to allocate string value buffer");
+ }
+ *reinterpret_cast<unsigned *>(newString) = length;
+ memcpy(newString + sizeof(unsigned), value, length);
+ newString[actualLength - 1U] = 0; // to avoid buffer over-run accidents by users later
+ return newString;
+}
+inline static void decodePrefixedString(bool isPrefixed, char const *prefixed, unsigned *length,
+ char const **value)
+{
+ if (!isPrefixed)
+ {
+ *length = static_cast<unsigned>(strlen(prefixed));
+ *value = prefixed;
+ }
+ else
+ {
+ *length = *reinterpret_cast<unsigned const *>(prefixed);
+ *value = prefixed + sizeof(unsigned);
+ }
+}
+/** Free the string duplicated by duplicateStringValue()/duplicateAndPrefixStringValue().
+ */
+#if JSONCPP_USING_SECURE_MEMORY
+static inline void releasePrefixedStringValue(char *value)
+{
+ unsigned length = 0;
+ char const *valueDecoded;
+ decodePrefixedString(true, value, &length, &valueDecoded);
+ size_t const size = sizeof(unsigned) + length + 1U;
+ memset(value, 0, size);
+ free(value);
+}
+static inline void releaseStringValue(char *value, unsigned length)
+{
+ // length==0 => we allocated the strings memory
+ size_t size = (length == 0) ? strlen(value) : length;
+ memset(value, 0, size);
+ free(value);
+}
+#else // !JSONCPP_USING_SECURE_MEMORY
+static inline void releasePrefixedStringValue(char *value) { free(value); }
+static inline void releaseStringValue(char *value, unsigned) { free(value); }
+#endif // JSONCPP_USING_SECURE_MEMORY
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// ValueInternals...
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+#if !defined(JSON_IS_AMALGAMATION)
+
+#include "json_valueiterator.inl"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json
+{
+
+Exception::Exception(JSONCPP_STRING const &msg) : msg_(msg) {}
+Exception::~Exception() throw() {}
+char const *Exception::what() const throw() { return msg_.c_str(); }
+RuntimeError::RuntimeError(JSONCPP_STRING const &msg) : Exception(msg) {}
+LogicError::LogicError(JSONCPP_STRING const &msg) : Exception(msg) {}
+JSONCPP_NORETURN void throwRuntimeError(JSONCPP_STRING const &msg) { throw RuntimeError(msg); }
+JSONCPP_NORETURN void throwLogicError(JSONCPP_STRING const &msg) { throw LogicError(msg); }
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class Value::CommentInfo
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+Value::CommentInfo::CommentInfo() : comment_(0) {}
+
+Value::CommentInfo::~CommentInfo()
+{
+ if (comment_)
+ releaseStringValue(comment_, 0u);
+}
+
+void Value::CommentInfo::setComment(const char *text, size_t len)
+{
+ if (comment_)
+ {
+ releaseStringValue(comment_, 0u);
+ comment_ = 0;
+ }
+ JSON_ASSERT(text != 0);
+ JSON_ASSERT_MESSAGE(text[0] == '\0' || text[0] == '/',
+ "in Json::Value::setComment(): Comments must start with /");
+ // It seems that /**/ style comments are acceptable as well.
+ comment_ = duplicateStringValue(text, len);
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class Value::CZString
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+// Notes: policy_ indicates if the string was allocated when
+// a string is stored.
+
+Value::CZString::CZString(ArrayIndex aindex) : cstr_(0), index_(aindex) {}
+
+Value::CZString::CZString(char const *str, unsigned ulength, DuplicationPolicy allocate)
+ : cstr_(str)
+{
+ // allocate != duplicate
+ storage_.policy_ = allocate & 0x3;
+ storage_.length_ = ulength & 0x3FFFFFFF;
+}
+
+Value::CZString::CZString(const CZString &other)
+{
+ cstr_ = (other.storage_.policy_ != noDuplication && other.cstr_ != 0
+ ? duplicateStringValue(other.cstr_, other.storage_.length_)
+ : other.cstr_);
+ storage_.policy_ =
+ static_cast<unsigned>(
+ other.cstr_ ? (static_cast<DuplicationPolicy>(other.storage_.policy_) == noDuplication
+ ? noDuplication
+ : duplicate)
+ : static_cast<DuplicationPolicy>(other.storage_.policy_)) &
+ 3U;
+ storage_.length_ = other.storage_.length_;
+}
+
+#if JSON_HAS_RVALUE_REFERENCES
+Value::CZString::CZString(CZString &&other) : cstr_(other.cstr_), index_(other.index_)
+{
+ other.cstr_ = nullptr;
+}
+#endif
+
+Value::CZString::~CZString()
+{
+ if (cstr_ && storage_.policy_ == duplicate)
+ {
+ releaseStringValue(const_cast<char *>(cstr_), storage_.length_ + 1u); //+1 for null terminating
+ // character for sake of
+ // completeness but not
+ // actually necessary
+ }
+}
+
+void Value::CZString::swap(CZString &other)
+{
+ std::swap(cstr_, other.cstr_);
+ std::swap(index_, other.index_);
+}
+
+Value::CZString &Value::CZString::operator=(CZString other)
+{
+ swap(other);
+ return *this;
+}
+
+bool Value::CZString::operator<(const CZString &other) const
+{
+ if (!cstr_)
+ return index_ < other.index_;
+ // return strcmp(cstr_, other.cstr_) < 0;
+ // Assume both are strings.
+ unsigned this_len = this->storage_.length_;
+ unsigned other_len = other.storage_.length_;
+ unsigned min_len = std::min(this_len, other_len);
+ JSON_ASSERT(this->cstr_ && other.cstr_);
+ int comp = memcmp(this->cstr_, other.cstr_, min_len);
+ if (comp < 0)
+ return true;
+ if (comp > 0)
+ return false;
+ return (this_len < other_len);
+}
+
+bool Value::CZString::operator==(const CZString &other) const
+{
+ if (!cstr_)
+ return index_ == other.index_;
+ // return strcmp(cstr_, other.cstr_) == 0;
+ // Assume both are strings.
+ unsigned this_len = this->storage_.length_;
+ unsigned other_len = other.storage_.length_;
+ if (this_len != other_len)
+ return false;
+ JSON_ASSERT(this->cstr_ && other.cstr_);
+ int comp = memcmp(this->cstr_, other.cstr_, this_len);
+ return comp == 0;
+}
+
+ArrayIndex Value::CZString::index() const { return index_; }
+
+// const char* Value::CZString::c_str() const { return cstr_; }
+const char *Value::CZString::data() const { return cstr_; }
+unsigned Value::CZString::length() const { return storage_.length_; }
+bool Value::CZString::isStaticString() const { return storage_.policy_ == noDuplication; }
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class Value::Value
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+/*! \internal Default constructor initialization must be equivalent to:
+ * memset( this, 0, sizeof(Value) )
+ * This optimization is used in ValueInternalMap fast allocator.
+ */
+Value::Value(ValueType vtype)
+{
+ static char const empty[] = "";
+ initBasic(vtype);
+ switch (vtype)
+ {
+ case nullValue:
+ break;
+ case intValue:
+ case uintValue:
+ value_.int_ = 0;
+ break;
+ case realValue:
+ value_.real_ = 0.0;
+ break;
+ case stringValue:
+ // allocated_ == false, so this is safe.
+ value_.string_ = const_cast<char *>(static_cast<char const *>(empty));
+ break;
+ case arrayValue:
+ case objectValue:
+ value_.map_ = new ObjectValues();
+ break;
+ case booleanValue:
+ value_.bool_ = false;
+ break;
+ default:
+ JSON_ASSERT_UNREACHABLE;
+ }
+}
+
+Value::Value(Int value)
+{
+ initBasic(intValue);
+ value_.int_ = value;
+}
+
+Value::Value(UInt value)
+{
+ initBasic(uintValue);
+ value_.uint_ = value;
+}
+#if defined(JSON_HAS_INT64)
+Value::Value(Int64 value)
+{
+ initBasic(intValue);
+ value_.int_ = value;
+}
+Value::Value(UInt64 value)
+{
+ initBasic(uintValue);
+ value_.uint_ = value;
+}
+#endif // defined(JSON_HAS_INT64)
+
+Value::Value(double value)
+{
+ initBasic(realValue);
+ value_.real_ = value;
+}
+
+Value::Value(const char *value)
+{
+ initBasic(stringValue, true);
+ value_.string_ = duplicateAndPrefixStringValue(value, static_cast<unsigned>(strlen(value)));
+}
+
+Value::Value(const char *beginValue, const char *endValue)
+{
+ initBasic(stringValue, true);
+ value_.string_ =
+ duplicateAndPrefixStringValue(beginValue, static_cast<unsigned>(endValue - beginValue));
+}
+
+Value::Value(const JSONCPP_STRING &value)
+{
+ initBasic(stringValue, true);
+ value_.string_ =
+ duplicateAndPrefixStringValue(value.data(), static_cast<unsigned>(value.length()));
+}
+
+Value::Value(const StaticString &value)
+{
+ initBasic(stringValue);
+ value_.string_ = const_cast<char *>(value.c_str());
+}
+
+#ifdef JSON_USE_CPPTL
+Value::Value(const CppTL::ConstString &value)
+{
+ initBasic(stringValue, true);
+ value_.string_ = duplicateAndPrefixStringValue(value, static_cast<unsigned>(value.length()));
+}
+#endif
+
+Value::Value(bool value)
+{
+ initBasic(booleanValue);
+ value_.bool_ = value;
+}
+
+Value::Value(Value const &other)
+ : type_(other.type_), allocated_(false), comments_(0), start_(other.start_),
+ limit_(other.limit_)
+{
+ switch (type_)
+ {
+ case nullValue:
+ case intValue:
+ case uintValue:
+ case realValue:
+ case booleanValue:
+ value_ = other.value_;
+ break;
+ case stringValue:
+ if (other.value_.string_ && other.allocated_)
+ {
+ unsigned len;
+ char const *str;
+ decodePrefixedString(other.allocated_, other.value_.string_, &len, &str);
+ value_.string_ = duplicateAndPrefixStringValue(str, len);
+ allocated_ = true;
+ }
+ else
+ {
+ value_.string_ = other.value_.string_;
+ allocated_ = false;
+ }
+ break;
+ case arrayValue:
+ case objectValue:
+ value_.map_ = new ObjectValues(*other.value_.map_);
+ break;
+ default:
+ JSON_ASSERT_UNREACHABLE;
+ }
+ if (other.comments_)
+ {
+ comments_ = new CommentInfo[numberOfCommentPlacement];
+ for (int comment = 0; comment < numberOfCommentPlacement; ++comment)
+ {
+ const CommentInfo &otherComment = other.comments_[comment];
+ if (otherComment.comment_)
+ comments_[comment].setComment(otherComment.comment_, strlen(otherComment.comment_));
+ }
+ }
+}
+
+#if JSON_HAS_RVALUE_REFERENCES
+// Move constructor
+Value::Value(Value &&other)
+{
+ initBasic(nullValue);
+ swap(other);
+}
+#endif
+
+Value::~Value()
+{
+ switch (type_)
+ {
+ case nullValue:
+ case intValue:
+ case uintValue:
+ case realValue:
+ case booleanValue:
+ break;
+ case stringValue:
+ if (allocated_)
+ releasePrefixedStringValue(value_.string_);
+ break;
+ case arrayValue:
+ case objectValue:
+ delete value_.map_;
+ break;
+ default:
+ JSON_ASSERT_UNREACHABLE;
+ }
+
+ delete[] comments_;
+
+ value_.uint_ = 0;
+}
+
+Value &Value::operator=(Value other)
+{
+ swap(other);
+ return *this;
+}
+
+void Value::swapPayload(Value &other)
+{
+ ValueType temp = type_;
+ type_ = other.type_;
+ other.type_ = temp;
+ std::swap(value_, other.value_);
+ int temp2 = allocated_;
+ allocated_ = other.allocated_;
+ other.allocated_ = temp2 & 0x1;
+}
+
+void Value::swap(Value &other)
+{
+ swapPayload(other);
+ std::swap(comments_, other.comments_);
+ std::swap(start_, other.start_);
+ std::swap(limit_, other.limit_);
+}
+
+ValueType Value::type() const { return type_; }
+
+int Value::compare(const Value &other) const
+{
+ if (*this < other)
+ return -1;
+ if (*this > other)
+ return 1;
+ return 0;
+}
+
+bool Value::operator<(const Value &other) const
+{
+ int typeDelta = type_ - other.type_;
+ if (typeDelta)
+ return typeDelta < 0 ? true : false;
+ switch (type_)
+ {
+ case nullValue:
+ return false;
+ case intValue:
+ return value_.int_ < other.value_.int_;
+ case uintValue:
+ return value_.uint_ < other.value_.uint_;
+ case realValue:
+ return value_.real_ < other.value_.real_;
+ case booleanValue:
+ return value_.bool_ < other.value_.bool_;
+ case stringValue:
+ {
+ if ((value_.string_ == 0) || (other.value_.string_ == 0))
+ {
+ if (other.value_.string_)
+ return true;
+ else
+ return false;
+ }
+ unsigned this_len;
+ unsigned other_len;
+ char const *this_str;
+ char const *other_str;
+ decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+ decodePrefixedString(other.allocated_, other.value_.string_, &other_len, &other_str);
+ unsigned min_len = std::min(this_len, other_len);
+ JSON_ASSERT(this_str && other_str);
+ int comp = memcmp(this_str, other_str, min_len);
+ if (comp < 0)
+ return true;
+ if (comp > 0)
+ return false;
+ return (this_len < other_len);
+ }
+ case arrayValue:
+ case objectValue:
+ {
+ int delta = int(value_.map_->size() - other.value_.map_->size());
+ if (delta)
+ return delta < 0;
+ return (*value_.map_) < (*other.value_.map_);
+ }
+ default:
+ JSON_ASSERT_UNREACHABLE;
+ }
+ return false; // unreachable
+}
+
+bool Value::operator<=(const Value &other) const { return !(other < *this); }
+
+bool Value::operator>=(const Value &other) const { return !(*this < other); }
+
+bool Value::operator>(const Value &other) const { return other < *this; }
+
+bool Value::operator==(const Value &other) const
+{
+ // if ( type_ != other.type_ )
+ // GCC 2.95.3 says:
+ // attempt to take address of bit-field structure member `Json::Value::type_'
+ // Beats me, but a temp solves the problem.
+ int temp = other.type_;
+ if (type_ != temp)
+ return false;
+ switch (type_)
+ {
+ case nullValue:
+ return true;
+ case intValue:
+ return value_.int_ == other.value_.int_;
+ case uintValue:
+ return value_.uint_ == other.value_.uint_;
+ case realValue:
+ return value_.real_ == other.value_.real_;
+ case booleanValue:
+ return value_.bool_ == other.value_.bool_;
+ case stringValue:
+ {
+ if ((value_.string_ == 0) || (other.value_.string_ == 0))
+ {
+ return (value_.string_ == other.value_.string_);
+ }
+ unsigned this_len;
+ unsigned other_len;
+ char const *this_str;
+ char const *other_str;
+ decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+ decodePrefixedString(other.allocated_, other.value_.string_, &other_len, &other_str);
+ if (this_len != other_len)
+ return false;
+ JSON_ASSERT(this_str && other_str);
+ int comp = memcmp(this_str, other_str, this_len);
+ return comp == 0;
+ }
+ case arrayValue:
+ case objectValue:
+ return value_.map_->size() == other.value_.map_->size() &&
+ (*value_.map_) == (*other.value_.map_);
+ default:
+ JSON_ASSERT_UNREACHABLE;
+ }
+ return false; // unreachable
+}
+
+bool Value::operator!=(const Value &other) const { return !(*this == other); }
+
+const char *Value::asCString() const
+{
+ JSON_ASSERT_MESSAGE(type_ == stringValue, "in Json::Value::asCString(): requires stringValue");
+ if (value_.string_ == 0)
+ return 0;
+ unsigned this_len;
+ char const *this_str;
+ decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+ return this_str;
+}
+
+#if JSONCPP_USING_SECURE_MEMORY
+unsigned Value::getCStringLength() const
+{
+ JSON_ASSERT_MESSAGE(type_ == stringValue, "in Json::Value::asCString(): requires stringValue");
+ if (value_.string_ == 0)
+ return 0;
+ unsigned this_len;
+ char const *this_str;
+ decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+ return this_len;
+}
+#endif
+
+bool Value::getString(char const **str, char const **cend) const
+{
+ if (type_ != stringValue)
+ return false;
+ if (value_.string_ == 0)
+ return false;
+ unsigned length;
+ decodePrefixedString(this->allocated_, this->value_.string_, &length, str);
+ *cend = *str + length;
+ return true;
+}
+
+JSONCPP_STRING Value::asString() const
+{
+ switch (type_)
+ {
+ case nullValue:
+ return "";
+ case stringValue:
+ {
+ if (value_.string_ == 0)
+ return "";
+ unsigned this_len;
+ char const *this_str;
+ decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+ return JSONCPP_STRING(this_str, this_len);
+ }
+ case booleanValue:
+ return value_.bool_ ? "true" : "false";
+ case intValue:
+ return valueToString(value_.int_);
+ case uintValue:
+ return valueToString(value_.uint_);
+ case realValue:
+ return valueToString(value_.real_);
+ default:
+ JSON_FAIL_MESSAGE("Type is not convertible to string");
+ }
+}
+
+#ifdef JSON_USE_CPPTL
+CppTL::ConstString Value::asConstString() const
+{
+ unsigned len;
+ char const *str;
+ decodePrefixedString(allocated_, value_.string_, &len, &str);
+ return CppTL::ConstString(str, len);
+}
+#endif
+
+Value::Int Value::asInt() const
+{
+ switch (type_)
+ {
+ case intValue:
+ JSON_ASSERT_MESSAGE(isInt(), "LargestInt out of Int range");
+ return Int(value_.int_);
+ case uintValue:
+ JSON_ASSERT_MESSAGE(isInt(), "LargestUInt out of Int range");
+ return Int(value_.uint_);
+ case realValue:
+ JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt, maxInt), "double out of Int range");
+ return Int(value_.real_);
+ case nullValue:
+ return 0;
+ case booleanValue:
+ return value_.bool_ ? 1 : 0;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to Int.");
+}
+
+Value::UInt Value::asUInt() const
+{
+ switch (type_)
+ {
+ case intValue:
+ JSON_ASSERT_MESSAGE(isUInt(), "LargestInt out of UInt range");
+ return UInt(value_.int_);
+ case uintValue:
+ JSON_ASSERT_MESSAGE(isUInt(), "LargestUInt out of UInt range");
+ return UInt(value_.uint_);
+ case realValue:
+ JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt), "double out of UInt range");
+ return UInt(value_.real_);
+ case nullValue:
+ return 0;
+ case booleanValue:
+ return value_.bool_ ? 1 : 0;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to UInt.");
+}
+
+#if defined(JSON_HAS_INT64)
+
+Value::Int64 Value::asInt64() const
+{
+ switch (type_)
+ {
+ case intValue:
+ return Int64(value_.int_);
+ case uintValue:
+ JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range");
+ return Int64(value_.uint_);
+ case realValue:
+ JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64), "double out of Int64 range");
+ return Int64(value_.real_);
+ case nullValue:
+ return 0;
+ case booleanValue:
+ return value_.bool_ ? 1 : 0;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to Int64.");
+}
+
+Value::UInt64 Value::asUInt64() const
+{
+ switch (type_)
+ {
+ case intValue:
+ JSON_ASSERT_MESSAGE(isUInt64(), "LargestInt out of UInt64 range");
+ return UInt64(value_.int_);
+ case uintValue:
+ return UInt64(value_.uint_);
+ case realValue:
+ JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt64), "double out of UInt64 range");
+ return UInt64(value_.real_);
+ case nullValue:
+ return 0;
+ case booleanValue:
+ return value_.bool_ ? 1 : 0;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to UInt64.");
+}
+#endif // if defined(JSON_HAS_INT64)
+
+LargestInt Value::asLargestInt() const
+{
+#if defined(JSON_NO_INT64)
+ return asInt();
+#else
+ return asInt64();
+#endif
+}
+
+LargestUInt Value::asLargestUInt() const
+{
+#if defined(JSON_NO_INT64)
+ return asUInt();
+#else
+ return asUInt64();
+#endif
+}
+
+double Value::asDouble() const
+{
+ switch (type_)
+ {
+ case intValue:
+ return static_cast<double>(value_.int_);
+ case uintValue:
+#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+ return static_cast<double>(value_.uint_);
+#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+ return integerToDouble(value_.uint_);
+#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+ case realValue:
+ return value_.real_;
+ case nullValue:
+ return 0.0;
+ case booleanValue:
+ return value_.bool_ ? 1.0 : 0.0;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to double.");
+}
+
+float Value::asFloat() const
+{
+ switch (type_)
+ {
+ case intValue:
+ return static_cast<float>(value_.int_);
+ case uintValue:
+#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+ return static_cast<float>(value_.uint_);
+#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+ // This can fail (silently?) if the value is bigger than MAX_FLOAT.
+ return static_cast<float>(integerToDouble(value_.uint_));
+#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+ case realValue:
+ return static_cast<float>(value_.real_);
+ case nullValue:
+ return 0.0;
+ case booleanValue:
+ return value_.bool_ ? 1.0f : 0.0f;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to float.");
+}
+
+bool Value::asBool() const
+{
+ switch (type_)
+ {
+ case booleanValue:
+ return value_.bool_;
+ case nullValue:
+ return false;
+ case intValue:
+ return value_.int_ ? true : false;
+ case uintValue:
+ return value_.uint_ ? true : false;
+ case realValue:
+ // This is kind of strange. Not recommended.
+ return (value_.real_ != 0.0) ? true : false;
+ default:
+ break;
+ }
+ JSON_FAIL_MESSAGE("Value is not convertible to bool.");
+}
+
+bool Value::isConvertibleTo(ValueType other) const
+{
+ switch (other)
+ {
+ case nullValue:
+ return (isNumeric() && asDouble() == 0.0) ||
+ (type_ == booleanValue && value_.bool_ == false) ||
+ (type_ == stringValue && asString() == "") ||
+ (type_ == arrayValue && value_.map_->size() == 0) ||
+ (type_ == objectValue && value_.map_->size() == 0) || type_ == nullValue;
+ case intValue:
+ return isInt() || (type_ == realValue && InRange(value_.real_, minInt, maxInt)) ||
+ type_ == booleanValue || type_ == nullValue;
+ case uintValue:
+ return isUInt() || (type_ == realValue && InRange(value_.real_, 0, maxUInt)) ||
+ type_ == booleanValue || type_ == nullValue;
+ case realValue:
+ return isNumeric() || type_ == booleanValue || type_ == nullValue;
+ case booleanValue:
+ return isNumeric() || type_ == booleanValue || type_ == nullValue;
+ case stringValue:
+ return isNumeric() || type_ == booleanValue || type_ == stringValue || type_ == nullValue;
+ case arrayValue:
+ return type_ == arrayValue || type_ == nullValue;
+ case objectValue:
+ return type_ == objectValue || type_ == nullValue;
+ }
+ JSON_ASSERT_UNREACHABLE;
+ return false;
+}
+
+/// Number of values in array or object
+ArrayIndex Value::size() const
+{
+ switch (type_)
+ {
+ case nullValue:
+ case intValue:
+ case uintValue:
+ case realValue:
+ case booleanValue:
+ case stringValue:
+ return 0;
+ case arrayValue: // size of the array is highest index + 1
+ if (!value_.map_->empty())
+ {
+ ObjectValues::const_iterator itLast = value_.map_->end();
+ --itLast;
+ return (*itLast).first.index() + 1;
+ }
+ return 0;
+ case objectValue:
+ return ArrayIndex(value_.map_->size());
+ }
+ JSON_ASSERT_UNREACHABLE;
+ return 0; // unreachable;
+}
+
+bool Value::empty() const
+{
+ if (isNull() || isArray() || isObject())
+ return size() == 0u;
+ else
+ return false;
+}
+
+bool Value::operator!() const { return isNull(); }
+
+void Value::clear()
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue || type_ == objectValue,
+ "in Json::Value::clear(): requires complex value");
+ start_ = 0;
+ limit_ = 0;
+ switch (type_)
+ {
+ case arrayValue:
+ case objectValue:
+ value_.map_->clear();
+ break;
+ default:
+ break;
+ }
+}
+
+void Value::resize(ArrayIndex newSize)
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue,
+ "in Json::Value::resize(): requires arrayValue");
+ if (type_ == nullValue)
+ *this = Value(arrayValue);
+ ArrayIndex oldSize = size();
+ if (newSize == 0)
+ clear();
+ else if (newSize > oldSize)
+ (*this)[newSize - 1];
+ else
+ {
+ for (ArrayIndex index = newSize; index < oldSize; ++index)
+ {
+ value_.map_->erase(index);
+ }
+ JSON_ASSERT(size() == newSize);
+ }
+}
+
+Value &Value::operator[](ArrayIndex index)
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue,
+ "in Json::Value::operator[](ArrayIndex): requires arrayValue");
+ if (type_ == nullValue)
+ *this = Value(arrayValue);
+ CZString key(index);
+ ObjectValues::iterator it = value_.map_->lower_bound(key);
+ if (it != value_.map_->end() && (*it).first == key)
+ return (*it).second;
+
+ ObjectValues::value_type defaultValue(key, nullSingleton());
+ it = value_.map_->insert(it, defaultValue);
+ return (*it).second;
+}
+
+Value &Value::operator[](int index)
+{
+ JSON_ASSERT_MESSAGE(index >= 0,
+ "in Json::Value::operator[](int index): index cannot be negative");
+ return (*this)[ArrayIndex(index)];
+}
+
+const Value &Value::operator[](ArrayIndex index) const
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue,
+ "in Json::Value::operator[](ArrayIndex)const: requires arrayValue");
+ if (type_ == nullValue)
+ return nullSingleton();
+ CZString key(index);
+ ObjectValues::const_iterator it = value_.map_->find(key);
+ if (it == value_.map_->end())
+ return nullSingleton();
+ return (*it).second;
+}
+
+const Value &Value::operator[](int index) const
+{
+ JSON_ASSERT_MESSAGE(index >= 0,
+ "in Json::Value::operator[](int index) const: index cannot be negative");
+ return (*this)[ArrayIndex(index)];
+}
+
+void Value::initBasic(ValueType vtype, bool allocated)
+{
+ type_ = vtype;
+ allocated_ = allocated;
+ comments_ = 0;
+ start_ = 0;
+ limit_ = 0;
+}
+
+// Access an object value by name, create a null member if it does not exist.
+// @pre Type of '*this' is object or null.
+// @param key is null-terminated.
+Value &Value::resolveReference(const char *key)
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue,
+ "in Json::Value::resolveReference(): requires objectValue");
+ if (type_ == nullValue)
+ *this = Value(objectValue);
+ CZString actualKey(key, static_cast<unsigned>(strlen(key)), CZString::noDuplication); // NOTE!
+ ObjectValues::iterator it = value_.map_->lower_bound(actualKey);
+ if (it != value_.map_->end() && (*it).first == actualKey)
+ return (*it).second;
+
+ ObjectValues::value_type defaultValue(actualKey, nullSingleton());
+ it = value_.map_->insert(it, defaultValue);
+ Value &value = (*it).second;
+ return value;
+}
+
+// @param key is not null-terminated.
+Value &Value::resolveReference(char const *key, char const *cend)
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue,
+ "in Json::Value::resolveReference(key, end): requires objectValue");
+ if (type_ == nullValue)
+ *this = Value(objectValue);
+ CZString actualKey(key, static_cast<unsigned>(cend - key), CZString::duplicateOnCopy);
+ ObjectValues::iterator it = value_.map_->lower_bound(actualKey);
+ if (it != value_.map_->end() && (*it).first == actualKey)
+ return (*it).second;
+
+ ObjectValues::value_type defaultValue(actualKey, nullSingleton());
+ it = value_.map_->insert(it, defaultValue);
+ Value &value = (*it).second;
+ return value;
+}
+
+Value Value::get(ArrayIndex index, const Value &defaultValue) const
+{
+ const Value *value = &((*this)[index]);
+ return value == &nullSingleton() ? defaultValue : *value;
+}
+
+bool Value::isValidIndex(ArrayIndex index) const { return index < size(); }
+
+Value const *Value::find(char const *key, char const *cend) const
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue,
+ "in Json::Value::find(key, end, found): requires objectValue or nullValue");
+ if (type_ == nullValue)
+ return NULL;
+ CZString actualKey(key, static_cast<unsigned>(cend - key), CZString::noDuplication);
+ ObjectValues::const_iterator it = value_.map_->find(actualKey);
+ if (it == value_.map_->end())
+ return NULL;
+ return &(*it).second;
+}
+const Value &Value::operator[](const char *key) const
+{
+ Value const *found = find(key, key + strlen(key));
+ if (!found)
+ return nullSingleton();
+ return *found;
+}
+Value const &Value::operator[](JSONCPP_STRING const &key) const
+{
+ Value const *found = find(key.data(), key.data() + key.length());
+ if (!found)
+ return nullSingleton();
+ return *found;
+}
+
+Value &Value::operator[](const char *key) { return resolveReference(key, key + strlen(key)); }
+
+Value &Value::operator[](const JSONCPP_STRING &key)
+{
+ return resolveReference(key.data(), key.data() + key.length());
+}
+
+Value &Value::operator[](const StaticString &key) { return resolveReference(key.c_str()); }
+
+#ifdef JSON_USE_CPPTL
+Value &Value::operator[](const CppTL::ConstString &key)
+{
+ return resolveReference(key.c_str(), key.end_c_str());
+}
+Value const &Value::operator[](CppTL::ConstString const &key) const
+{
+ Value const *found = find(key.c_str(), key.end_c_str());
+ if (!found)
+ return nullSingleton();
+ return *found;
+}
+#endif
+
+Value &Value::append(const Value &value) { return (*this)[size()] = value; }
+
+Value Value::get(char const *key, char const *cend, Value const &defaultValue) const
+{
+ Value const *found = find(key, cend);
+ return !found ? defaultValue : *found;
+}
+Value Value::get(char const *key, Value const &defaultValue) const
+{
+ return get(key, key + strlen(key), defaultValue);
+}
+Value Value::get(JSONCPP_STRING const &key, Value const &defaultValue) const
+{
+ return get(key.data(), key.data() + key.length(), defaultValue);
+}
+
+bool Value::removeMember(const char *key, const char *cend, Value *removed)
+{
+ if (type_ != objectValue)
+ {
+ return false;
+ }
+ CZString actualKey(key, static_cast<unsigned>(cend - key), CZString::noDuplication);
+ ObjectValues::iterator it = value_.map_->find(actualKey);
+ if (it == value_.map_->end())
+ return false;
+ *removed = it->second;
+ value_.map_->erase(it);
+ return true;
+}
+bool Value::removeMember(const char *key, Value *removed)
+{
+ return removeMember(key, key + strlen(key), removed);
+}
+bool Value::removeMember(JSONCPP_STRING const &key, Value *removed)
+{
+ return removeMember(key.data(), key.data() + key.length(), removed);
+}
+Value Value::removeMember(const char *key)
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue,
+ "in Json::Value::removeMember(): requires objectValue");
+ if (type_ == nullValue)
+ return nullSingleton();
+
+ Value removed; // null
+ removeMember(key, key + strlen(key), &removed);
+ return removed; // still null if removeMember() did nothing
+}
+Value Value::removeMember(const JSONCPP_STRING &key) { return removeMember(key.c_str()); }
+
+bool Value::removeIndex(ArrayIndex index, Value *removed)
+{
+ if (type_ != arrayValue)
+ {
+ return false;
+ }
+ CZString key(index);
+ ObjectValues::iterator it = value_.map_->find(key);
+ if (it == value_.map_->end())
+ {
+ return false;
+ }
+ *removed = it->second;
+ ArrayIndex oldSize = size();
+ // shift left all items left, into the place of the "removed"
+ for (ArrayIndex i = index; i < (oldSize - 1); ++i)
+ {
+ CZString keey(i);
+ (*value_.map_)[keey] = (*this)[i + 1];
+ }
+ // erase the last one ("leftover")
+ CZString keyLast(oldSize - 1);
+ ObjectValues::iterator itLast = value_.map_->find(keyLast);
+ value_.map_->erase(itLast);
+ return true;
+}
+
+#ifdef JSON_USE_CPPTL
+Value Value::get(const CppTL::ConstString &key, const Value &defaultValue) const
+{
+ return get(key.c_str(), key.end_c_str(), defaultValue);
+}
+#endif
+
+bool Value::isMember(char const *key, char const *cend) const
+{
+ Value const *value = find(key, cend);
+ return NULL != value;
+}
+bool Value::isMember(char const *key) const { return isMember(key, key + strlen(key)); }
+bool Value::isMember(JSONCPP_STRING const &key) const
+{
+ return isMember(key.data(), key.data() + key.length());
+}
+
+#ifdef JSON_USE_CPPTL
+bool Value::isMember(const CppTL::ConstString &key) const
+{
+ return isMember(key.c_str(), key.end_c_str());
+}
+#endif
+
+Value::Members Value::getMemberNames() const
+{
+ JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue,
+ "in Json::Value::getMemberNames(), value must be objectValue");
+ if (type_ == nullValue)
+ return Value::Members();
+ Members members;
+ members.reserve(value_.map_->size());
+ ObjectValues::const_iterator it = value_.map_->begin();
+ ObjectValues::const_iterator itEnd = value_.map_->end();
+ for (; it != itEnd; ++it)
+ {
+ members.push_back(JSONCPP_STRING((*it).first.data(), (*it).first.length()));
+ }
+ return members;
+}
+//
+//# ifdef JSON_USE_CPPTL
+// EnumMemberNames
+// Value::enumMemberNames() const
+//{
+// if ( type_ == objectValue )
+// {
+// return CppTL::Enum::any( CppTL::Enum::transform(
+// CppTL::Enum::keys( *(value_.map_), CppTL::Type<const CZString &>() ),
+// MemberNamesTransform() ) );
+// }
+// return EnumMemberNames();
+//}
+//
+//
+// EnumValues
+// Value::enumValues() const
+//{
+// if ( type_ == objectValue || type_ == arrayValue )
+// return CppTL::Enum::anyValues( *(value_.map_),
+// CppTL::Type<const Value &>() );
+// return EnumValues();
+//}
+//
+//# endif
+
+static bool IsIntegral(double d)
+{
+ double integral_part;
+ return modf(d, &integral_part) == 0.0;
+}
+
+bool Value::isNull() const { return type_ == nullValue; }
+
+bool Value::isBool() const { return type_ == booleanValue; }
+
+bool Value::isInt() const
+{
+ switch (type_)
+ {
+ case intValue:
+ return value_.int_ >= minInt && value_.int_ <= maxInt;
+ case uintValue:
+ return value_.uint_ <= UInt(maxInt);
+ case realValue:
+ return value_.real_ >= minInt && value_.real_ <= maxInt && IsIntegral(value_.real_);
+ default:
+ break;
+ }
+ return false;
+}
+
+bool Value::isUInt() const
+{
+ switch (type_)
+ {
+ case intValue:
+ return value_.int_ >= 0 && LargestUInt(value_.int_) <= LargestUInt(maxUInt);
+ case uintValue:
+ return value_.uint_ <= maxUInt;
+ case realValue:
+ return value_.real_ >= 0 && value_.real_ <= maxUInt && IsIntegral(value_.real_);
+ default:
+ break;
+ }
+ return false;
+}
+
+bool Value::isInt64() const
+{
+#if defined(JSON_HAS_INT64)
+ switch (type_)
+ {
+ case intValue:
+ return true;
+ case uintValue:
+ return value_.uint_ <= UInt64(maxInt64);
+ case realValue:
+ // Note that maxInt64 (= 2^63 - 1) is not exactly representable as a
+ // double, so double(maxInt64) will be rounded up to 2^63. Therefore we
+ // require the value to be strictly less than the limit.
+ return value_.real_ >= double(minInt64) && value_.real_ < double(maxInt64) &&
+ IsIntegral(value_.real_);
+ default:
+ break;
+ }
+#endif // JSON_HAS_INT64
+ return false;
+}
+
+bool Value::isUInt64() const
+{
+#if defined(JSON_HAS_INT64)
+ switch (type_)
+ {
+ case intValue:
+ return value_.int_ >= 0;
+ case uintValue:
+ return true;
+ case realValue:
+ // Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a
+ // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we
+ // require the value to be strictly less than the limit.
+ return value_.real_ >= 0 && value_.real_ < maxUInt64AsDouble && IsIntegral(value_.real_);
+ default:
+ break;
+ }
+#endif // JSON_HAS_INT64
+ return false;
+}
+
+bool Value::isIntegral() const
+{
+#if defined(JSON_HAS_INT64)
+ return isInt64() || isUInt64();
+#else
+ return isInt() || isUInt();
+#endif
+}
+
+bool Value::isDouble() const { return type_ == realValue || isIntegral(); }
+
+bool Value::isNumeric() const { return isIntegral() || isDouble(); }
+
+bool Value::isString() const { return type_ == stringValue; }
+
+bool Value::isArray() const { return type_ == arrayValue; }
+
+bool Value::isObject() const { return type_ == objectValue; }
+
+void Value::setComment(const char *comment, size_t len, CommentPlacement placement)
+{
+ if (!comments_)
+ comments_ = new CommentInfo[numberOfCommentPlacement];
+ if ((len > 0) && (comment[len - 1] == '\n'))
+ {
+ // Always discard trailing newline, to aid indentation.
+ len -= 1;
+ }
+ comments_[placement].setComment(comment, len);
+}
+
+void Value::setComment(const char *comment, CommentPlacement placement)
+{
+ setComment(comment, strlen(comment), placement);
+}
+
+void Value::setComment(const JSONCPP_STRING &comment, CommentPlacement placement)
+{
+ setComment(comment.c_str(), comment.length(), placement);
+}
+
+bool Value::hasComment(CommentPlacement placement) const
+{
+ return comments_ != 0 && comments_[placement].comment_ != 0;
+}
+
+JSONCPP_STRING Value::getComment(CommentPlacement placement) const
+{
+ if (hasComment(placement))
+ return comments_[placement].comment_;
+ return "";
+}
+
+void Value::setOffsetStart(ptrdiff_t start) { start_ = start; }
+
+void Value::setOffsetLimit(ptrdiff_t limit) { limit_ = limit; }
+
+ptrdiff_t Value::getOffsetStart() const { return start_; }
+
+ptrdiff_t Value::getOffsetLimit() const { return limit_; }
+
+JSONCPP_STRING Value::toStyledString() const
+{
+ StyledWriter writer;
+ return writer.write(*this);
+}
+
+Value::const_iterator Value::begin() const
+{
+ switch (type_)
+ {
+ case arrayValue:
+ case objectValue:
+ if (value_.map_)
+ return const_iterator(value_.map_->begin());
+ break;
+ default:
+ break;
+ }
+ return const_iterator();
+}
+
+Value::const_iterator Value::end() const
+{
+ switch (type_)
+ {
+ case arrayValue:
+ case objectValue:
+ if (value_.map_)
+ return const_iterator(value_.map_->end());
+ break;
+ default:
+ break;
+ }
+ return const_iterator();
+}
+
+Value::iterator Value::begin()
+{
+ switch (type_)
+ {
+ case arrayValue:
+ case objectValue:
+ if (value_.map_)
+ return iterator(value_.map_->begin());
+ break;
+ default:
+ break;
+ }
+ return iterator();
+}
+
+Value::iterator Value::end()
+{
+ switch (type_)
+ {
+ case arrayValue:
+ case objectValue:
+ if (value_.map_)
+ return iterator(value_.map_->end());
+ break;
+ default:
+ break;
+ }
+ return iterator();
+}
+
+// class PathArgument
+// //////////////////////////////////////////////////////////////////
+
+PathArgument::PathArgument() : key_(), index_(), kind_(kindNone) {}
+
+PathArgument::PathArgument(ArrayIndex index) : key_(), index_(index), kind_(kindIndex) {}
+
+PathArgument::PathArgument(const char *key) : key_(key), index_(), kind_(kindKey) {}
+
+PathArgument::PathArgument(const JSONCPP_STRING &key) : key_(key.c_str()), index_(), kind_(kindKey)
+{
+}
+
+// class Path
+// //////////////////////////////////////////////////////////////////
+
+Path::Path(const JSONCPP_STRING &path, const PathArgument &a1, const PathArgument &a2,
+ const PathArgument &a3, const PathArgument &a4, const PathArgument &a5)
+{
+ InArgs in;
+ in.push_back(&a1);
+ in.push_back(&a2);
+ in.push_back(&a3);
+ in.push_back(&a4);
+ in.push_back(&a5);
+ makePath(path, in);
+}
+
+void Path::makePath(const JSONCPP_STRING &path, const InArgs &in)
+{
+ const char *current = path.c_str();
+ const char *end = current + path.length();
+ InArgs::const_iterator itInArg = in.begin();
+ while (current != end)
+ {
+ if (*current == '[')
+ {
+ ++current;
+ if (*current == '%')
+ addPathInArg(path, in, itInArg, PathArgument::kindIndex);
+ else
+ {
+ ArrayIndex index = 0;
+ for (; current != end && *current >= '0' && *current <= '9'; ++current)
+ index = index * 10 + ArrayIndex(*current - '0');
+ args_.push_back(index);
+ }
+ if (current == end || *++current != ']')
+ invalidPath(path, int(current - path.c_str()));
+ }
+ else if (*current == '%')
+ {
+ addPathInArg(path, in, itInArg, PathArgument::kindKey);
+ ++current;
+ }
+ else if (*current == '.' || *current == ']')
+ {
+ ++current;
+ }
+ else
+ {
+ const char *beginName = current;
+ while (current != end && !strchr("[.", *current))
+ ++current;
+ args_.push_back(JSONCPP_STRING(beginName, current));
+ }
+ }
+}
+
+void Path::addPathInArg(const JSONCPP_STRING & /*path*/, const InArgs &in,
+ InArgs::const_iterator &itInArg, PathArgument::Kind kind)
+{
+ if (itInArg == in.end())
+ {
+ // Error: missing argument %d
+ }
+ else if ((*itInArg)->kind_ != kind)
+ {
+ // Error: bad argument type
+ }
+ else
+ {
+ args_.push_back(**itInArg++);
+ }
+}
+
+void Path::invalidPath(const JSONCPP_STRING & /*path*/, int /*location*/)
+{
+ // Error: invalid path.
+}
+
+const Value &Path::resolve(const Value &root) const
+{
+ const Value *node = &root;
+ for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it)
+ {
+ const PathArgument &arg = *it;
+ if (arg.kind_ == PathArgument::kindIndex)
+ {
+ if (!node->isArray() || !node->isValidIndex(arg.index_))
+ {
+ // Error: unable to resolve path (array value expected at position...
+ return Value::null;
+ }
+ node = &((*node)[arg.index_]);
+ }
+ else if (arg.kind_ == PathArgument::kindKey)
+ {
+ if (!node->isObject())
+ {
+ // Error: unable to resolve path (object value expected at position...)
+ return Value::null;
+ }
+ node = &((*node)[arg.key_]);
+ if (node == &Value::nullSingleton())
+ {
+ // Error: unable to resolve path (object has no member named '' at
+ // position...)
+ return Value::null;
+ }
+ }
+ }
+ return *node;
+}
+
+Value Path::resolve(const Value &root, const Value &defaultValue) const
+{
+ const Value *node = &root;
+ for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it)
+ {
+ const PathArgument &arg = *it;
+ if (arg.kind_ == PathArgument::kindIndex)
+ {
+ if (!node->isArray() || !node->isValidIndex(arg.index_))
+ return defaultValue;
+ node = &((*node)[arg.index_]);
+ }
+ else if (arg.kind_ == PathArgument::kindKey)
+ {
+ if (!node->isObject())
+ return defaultValue;
+ node = &((*node)[arg.key_]);
+ if (node == &Value::nullSingleton())
+ return defaultValue;
+ }
+ }
+ return *node;
+}
+
+Value &Path::make(Value &root) const
+{
+ Value *node = &root;
+ for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it)
+ {
+ const PathArgument &arg = *it;
+ if (arg.kind_ == PathArgument::kindIndex)
+ {
+ if (!node->isArray())
+ {
+ // Error: node is not an array at position ...
+ }
+ node = &((*node)[arg.index_]);
+ }
+ else if (arg.kind_ == PathArgument::kindKey)
+ {
+ if (!node->isObject())
+ {
+ // Error: node is not an object at position...
+ }
+ node = &((*node)[arg.key_]);
+ }
+ }
+ return *node;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_value.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_writer.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2011 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include <json/writer.h>
+#include "json_tool.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <iomanip>
+#include <memory>
+#include <sstream>
+#include <utility>
+#include <set>
+#include <cassert>
+#include <cstring>
+#include <cstdio>
+
+#if defined(_MSC_VER) && _MSC_VER >= 1200 && _MSC_VER < 1800 // Between VC++ 6.0 and VC++ 11.0
+#include <float.h>
+#define isfinite _finite
+#elif defined(__sun) && defined(__SVR4) // Solaris
+#if !defined(isfinite)
+#include <ieeefp.h>
+#define isfinite finite
+#endif
+#elif defined(_AIX)
+#if !defined(isfinite)
+#include <math.h>
+#define isfinite finite
+#endif
+#elif defined(__hpux)
+#if !defined(isfinite)
+#if defined(__ia64) && !defined(finite)
+#define isfinite(x) ((sizeof(x) == sizeof(float) ? _Isfinitef(x) : _IsFinite(x)))
+#else
+#include <math.h>
+#define isfinite finite
+#endif
+#endif
+#else
+#include <cmath>
+#if !(defined(__QNXNTO__)) // QNX already defines isfinite
+#define isfinite std::isfinite
+#endif
+#endif
+
+#if defined(_MSC_VER)
+#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
+#define snprintf sprintf_s
+#elif _MSC_VER >= 1900 // VC++ 14.0 and above
+#define snprintf std::snprintf
+#else
+#define snprintf _snprintf
+#endif
+#elif defined(__ANDROID__) || defined(__QNXNTO__)
+#define snprintf snprintf
+#elif __cplusplus >= 201103L
+#if !defined(__MINGW32__) && !defined(__CYGWIN__)
+#define snprintf std::snprintf
+#endif
+#endif
+
+#if defined(__BORLANDC__)
+#include <float.h>
+#define isfinite _finite
+#define snprintf _snprintf
+#endif
+
+#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
+// Disable warning about strdup being deprecated.
+#pragma warning(disable : 4996)
+#endif
+
+namespace Json
+{
+
+#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
+typedef std::unique_ptr<StreamWriter> StreamWriterPtr;
+#else
+typedef std::auto_ptr<StreamWriter> StreamWriterPtr;
+#endif
+
+static bool containsControlCharacter(const char *str)
+{
+ while (*str)
+ {
+ if (isControlCharacter(*(str++)))
+ return true;
+ }
+ return false;
+}
+
+static bool containsControlCharacter0(const char *str, unsigned len)
+{
+ char const *end = str + len;
+ while (end != str)
+ {
+ if (isControlCharacter(*str) || 0 == *str)
+ return true;
+ ++str;
+ }
+ return false;
+}
+
+JSONCPP_STRING valueToString(LargestInt value)
+{
+ UIntToStringBuffer buffer;
+ char *current = buffer + sizeof(buffer);
+ if (value == Value::minLargestInt)
+ {
+ uintToString(LargestUInt(Value::maxLargestInt) + 1, current);
+ *--current = '-';
+ }
+ else if (value < 0)
+ {
+ uintToString(LargestUInt(-value), current);
+ *--current = '-';
+ }
+ else
+ {
+ uintToString(LargestUInt(value), current);
+ }
+ assert(current >= buffer);
+ return current;
+}
+
+JSONCPP_STRING valueToString(LargestUInt value)
+{
+ UIntToStringBuffer buffer;
+ char *current = buffer + sizeof(buffer);
+ uintToString(value, current);
+ assert(current >= buffer);
+ return current;
+}
+
+#if defined(JSON_HAS_INT64)
+
+JSONCPP_STRING valueToString(Int value) { return valueToString(LargestInt(value)); }
+
+JSONCPP_STRING valueToString(UInt value) { return valueToString(LargestUInt(value)); }
+
+#endif // # if defined(JSON_HAS_INT64)
+
+namespace
+{
+JSONCPP_STRING valueToString(double value, bool useSpecialFloats, unsigned int precision)
+{
+ // Allocate a buffer that is more than large enough to store the 16 digits of
+ // precision requested below.
+ char buffer[32];
+ int len = -1;
+
+ char formatString[6];
+ sprintf(formatString, "%%.%dg", precision);
+
+ // Print into the buffer. We need not request the alternative representation
+ // that always has a decimal point because JSON doesn't distingish the
+ // concepts of reals and integers.
+ if (isfinite(value))
+ {
+ len = snprintf(buffer, sizeof(buffer), formatString, value);
+ }
+ else
+ {
+ // IEEE standard states that NaN values will not compare to themselves
+ if (value != value)
+ {
+ len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "NaN" : "null");
+ }
+ else if (value < 0)
+ {
+ len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "-Infinity" : "-1e+9999");
+ }
+ else
+ {
+ len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "Infinity" : "1e+9999");
+ }
+ // For those, we do not need to call fixNumLoc, but it is fast.
+ }
+ assert(len >= 0);
+ fixNumericLocale(buffer, buffer + len);
+ return buffer;
+}
+}
+
+JSONCPP_STRING valueToString(double value) { return valueToString(value, false, 17); }
+
+JSONCPP_STRING valueToString(bool value) { return value ? "true" : "false"; }
+
+JSONCPP_STRING valueToQuotedString(const char *value)
+{
+ if (value == NULL)
+ return "";
+ // Not sure how to handle unicode...
+ if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter(value))
+ return JSONCPP_STRING("\"") + value + "\"";
+ // We have to walk value and escape any special characters.
+ // Appending to JSONCPP_STRING is not efficient, but this should be rare.
+ // (Note: forward slashes are *not* rare, but I am not escaping them.)
+ JSONCPP_STRING::size_type maxsize = strlen(value) * 2 + 3; // allescaped+quotes+NULL
+ JSONCPP_STRING result;
+ result.reserve(maxsize); // to avoid lots of mallocs
+ result += "\"";
+ for (const char *c = value; *c != 0; ++c)
+ {
+ switch (*c)
+ {
+ case '\"':
+ result += "\\\"";
+ break;
+ case '\\':
+ result += "\\\\";
+ break;
+ case '\b':
+ result += "\\b";
+ break;
+ case '\f':
+ result += "\\f";
+ break;
+ case '\n':
+ result += "\\n";
+ break;
+ case '\r':
+ result += "\\r";
+ break;
+ case '\t':
+ result += "\\t";
+ break;
+ // case '/':
+ // Even though \/ is considered a legal escape in JSON, a bare
+ // slash is also legal, so I see no reason to escape it.
+ // (I hope I am not misunderstanding something.
+ // blep notes: actually escaping \/ may be useful in javascript to avoid </
+ // sequence.
+ // Should add a flag to allow this compatibility mode and prevent this
+ // sequence from occurring.
+ default:
+ if (isControlCharacter(*c))
+ {
+ JSONCPP_OSTRINGSTREAM oss;
+ oss << "\\u" << std::hex << std::uppercase << std::setfill('0') << std::setw(4)
+ << static_cast<int>(*c);
+ result += oss.str();
+ }
+ else
+ {
+ result += *c;
+ }
+ break;
+ }
+ }
+ result += "\"";
+ return result;
+}
+
+// https://github.com/upcaste/upcaste/blob/master/src/upcore/src/cstring/strnpbrk.cpp
+static char const *strnpbrk(char const *s, char const *accept, size_t n)
+{
+ assert((s || !n) && accept);
+
+ char const *const end = s + n;
+ for (char const *cur = s; cur < end; ++cur)
+ {
+ int const c = *cur;
+ for (char const *a = accept; *a; ++a)
+ {
+ if (*a == c)
+ {
+ return cur;
+ }
+ }
+ }
+ return NULL;
+}
+static JSONCPP_STRING valueToQuotedStringN(const char *value, unsigned length)
+{
+ if (value == NULL)
+ return "";
+ // Not sure how to handle unicode...
+ if (strnpbrk(value, "\"\\\b\f\n\r\t", length) == NULL &&
+ !containsControlCharacter0(value, length))
+ return JSONCPP_STRING("\"") + value + "\"";
+ // We have to walk value and escape any special characters.
+ // Appending to JSONCPP_STRING is not efficient, but this should be rare.
+ // (Note: forward slashes are *not* rare, but I am not escaping them.)
+ JSONCPP_STRING::size_type maxsize = length * 2 + 3; // allescaped+quotes+NULL
+ JSONCPP_STRING result;
+ result.reserve(maxsize); // to avoid lots of mallocs
+ result += "\"";
+ char const *end = value + length;
+ for (const char *c = value; c != end; ++c)
+ {
+ switch (*c)
+ {
+ case '\"':
+ result += "\\\"";
+ break;
+ case '\\':
+ result += "\\\\";
+ break;
+ case '\b':
+ result += "\\b";
+ break;
+ case '\f':
+ result += "\\f";
+ break;
+ case '\n':
+ result += "\\n";
+ break;
+ case '\r':
+ result += "\\r";
+ break;
+ case '\t':
+ result += "\\t";
+ break;
+ // case '/':
+ // Even though \/ is considered a legal escape in JSON, a bare
+ // slash is also legal, so I see no reason to escape it.
+ // (I hope I am not misunderstanding something.)
+ // blep notes: actually escaping \/ may be useful in javascript to avoid </
+ // sequence.
+ // Should add a flag to allow this compatibility mode and prevent this
+ // sequence from occurring.
+ default:
+ if ((isControlCharacter(*c)) || (*c == 0))
+ {
+ JSONCPP_OSTRINGSTREAM oss;
+ oss << "\\u" << std::hex << std::uppercase << std::setfill('0') << std::setw(4)
+ << static_cast<int>(*c);
+ result += oss.str();
+ }
+ else
+ {
+ result += *c;
+ }
+ break;
+ }
+ }
+ result += "\"";
+ return result;
+}
+
+// Class Writer
+// //////////////////////////////////////////////////////////////////
+Writer::~Writer() {}
+
+// Class FastWriter
+// //////////////////////////////////////////////////////////////////
+
+FastWriter::FastWriter()
+ : yamlCompatiblityEnabled_(false), dropNullPlaceholders_(false), omitEndingLineFeed_(false)
+{
+}
+
+void FastWriter::enableYAMLCompatibility() { yamlCompatiblityEnabled_ = true; }
+
+void FastWriter::dropNullPlaceholders() { dropNullPlaceholders_ = true; }
+
+void FastWriter::omitEndingLineFeed() { omitEndingLineFeed_ = true; }
+
+JSONCPP_STRING FastWriter::write(const Value &root)
+{
+ document_ = "";
+ writeValue(root);
+ if (!omitEndingLineFeed_)
+ document_ += "\n";
+ return document_;
+}
+
+void FastWriter::writeValue(const Value &value)
+{
+ switch (value.type())
+ {
+ case nullValue:
+ if (!dropNullPlaceholders_)
+ document_ += "null";
+ break;
+ case intValue:
+ document_ += valueToString(value.asLargestInt());
+ break;
+ case uintValue:
+ document_ += valueToString(value.asLargestUInt());
+ break;
+ case realValue:
+ document_ += valueToString(value.asDouble());
+ break;
+ case stringValue:
+ {
+ // Is NULL possible for value.string_? No.
+ char const *str;
+ char const *end;
+ bool ok = value.getString(&str, &end);
+ if (ok)
+ document_ += valueToQuotedStringN(str, static_cast<unsigned>(end - str));
+ break;
+ }
+ case booleanValue:
+ document_ += valueToString(value.asBool());
+ break;
+ case arrayValue:
+ {
+ document_ += '[';
+ ArrayIndex size = value.size();
+ for (ArrayIndex index = 0; index < size; ++index)
+ {
+ if (index > 0)
+ document_ += ',';
+ writeValue(value[index]);
+ }
+ document_ += ']';
+ }
+ break;
+ case objectValue:
+ {
+ Value::Members members(value.getMemberNames());
+ document_ += '{';
+ for (Value::Members::iterator it = members.begin(); it != members.end(); ++it)
+ {
+ const JSONCPP_STRING &name = *it;
+ if (it != members.begin())
+ document_ += ',';
+ document_ += valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length()));
+ document_ += yamlCompatiblityEnabled_ ? ": " : ":";
+ writeValue(value[name]);
+ }
+ document_ += '}';
+ }
+ break;
+ }
+}
+
+// Class StyledWriter
+// //////////////////////////////////////////////////////////////////
+
+StyledWriter::StyledWriter() : rightMargin_(74), indentSize_(3), addChildValues_() {}
+
+JSONCPP_STRING StyledWriter::write(const Value &root)
+{
+ document_ = "";
+ addChildValues_ = false;
+ indentString_ = "";
+ writeCommentBeforeValue(root);
+ writeValue(root);
+ writeCommentAfterValueOnSameLine(root);
+ document_ += "\n";
+ return document_;
+}
+
+void StyledWriter::writeValue(const Value &value)
+{
+ switch (value.type())
+ {
+ case nullValue:
+ pushValue("null");
+ break;
+ case intValue:
+ pushValue(valueToString(value.asLargestInt()));
+ break;
+ case uintValue:
+ pushValue(valueToString(value.asLargestUInt()));
+ break;
+ case realValue:
+ pushValue(valueToString(value.asDouble()));
+ break;
+ case stringValue:
+ {
+ // Is NULL possible for value.string_? No.
+ char const *str;
+ char const *end;
+ bool ok = value.getString(&str, &end);
+ if (ok)
+ pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end - str)));
+ else
+ pushValue("");
+ break;
+ }
+ case booleanValue:
+ pushValue(valueToString(value.asBool()));
+ break;
+ case arrayValue:
+ writeArrayValue(value);
+ break;
+ case objectValue:
+ {
+ Value::Members members(value.getMemberNames());
+ if (members.empty())
+ pushValue("{}");
+ else
+ {
+ writeWithIndent("{");
+ indent();
+ Value::Members::iterator it = members.begin();
+ for (;;)
+ {
+ const JSONCPP_STRING &name = *it;
+ const Value &childValue = value[name];
+ writeCommentBeforeValue(childValue);
+ writeWithIndent(valueToQuotedString(name.c_str()));
+ document_ += " : ";
+ writeValue(childValue);
+ if (++it == members.end())
+ {
+ writeCommentAfterValueOnSameLine(childValue);
+ break;
+ }
+ document_ += ',';
+ writeCommentAfterValueOnSameLine(childValue);
+ }
+ unindent();
+ writeWithIndent("}");
+ }
+ }
+ break;
+ }
+}
+
+void StyledWriter::writeArrayValue(const Value &value)
+{
+ unsigned size = value.size();
+ if (size == 0)
+ pushValue("[]");
+ else
+ {
+ bool isArrayMultiLine = isMultineArray(value);
+ if (isArrayMultiLine)
+ {
+ writeWithIndent("[");
+ indent();
+ bool hasChildValue = !childValues_.empty();
+ unsigned index = 0;
+ for (;;)
+ {
+ const Value &childValue = value[index];
+ writeCommentBeforeValue(childValue);
+ if (hasChildValue)
+ writeWithIndent(childValues_[index]);
+ else
+ {
+ writeIndent();
+ writeValue(childValue);
+ }
+ if (++index == size)
+ {
+ writeCommentAfterValueOnSameLine(childValue);
+ break;
+ }
+ document_ += ',';
+ writeCommentAfterValueOnSameLine(childValue);
+ }
+ unindent();
+ writeWithIndent("]");
+ }
+ else // output on a single line
+ {
+ assert(childValues_.size() == size);
+ document_ += "[ ";
+ for (unsigned index = 0; index < size; ++index)
+ {
+ if (index > 0)
+ document_ += ", ";
+ document_ += childValues_[index];
+ }
+ document_ += " ]";
+ }
+ }
+}
+
+bool StyledWriter::isMultineArray(const Value &value)
+{
+ ArrayIndex const size = value.size();
+ bool isMultiLine = size * 3 >= rightMargin_;
+ childValues_.clear();
+ for (ArrayIndex index = 0; index < size && !isMultiLine; ++index)
+ {
+ const Value &childValue = value[index];
+ isMultiLine = ((childValue.isArray() || childValue.isObject()) && childValue.size() > 0);
+ }
+ if (!isMultiLine) // check if line length > max line length
+ {
+ childValues_.reserve(size);
+ addChildValues_ = true;
+ ArrayIndex lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
+ for (ArrayIndex index = 0; index < size; ++index)
+ {
+ if (hasCommentForValue(value[index]))
+ {
+ isMultiLine = true;
+ }
+ writeValue(value[index]);
+ lineLength += static_cast<ArrayIndex>(childValues_[index].length());
+ }
+ addChildValues_ = false;
+ isMultiLine = isMultiLine || lineLength >= rightMargin_;
+ }
+ return isMultiLine;
+}
+
+void StyledWriter::pushValue(const JSONCPP_STRING &value)
+{
+ if (addChildValues_)
+ childValues_.push_back(value);
+ else
+ document_ += value;
+}
+
+void StyledWriter::writeIndent()
+{
+ if (!document_.empty())
+ {
+ char last = document_[document_.length() - 1];
+ if (last == ' ') // already indented
+ return;
+ if (last != '\n') // Comments may add new-line
+ document_ += '\n';
+ }
+ document_ += indentString_;
+}
+
+void StyledWriter::writeWithIndent(const JSONCPP_STRING &value)
+{
+ writeIndent();
+ document_ += value;
+}
+
+void StyledWriter::indent() { indentString_ += JSONCPP_STRING(indentSize_, ' '); }
+
+void StyledWriter::unindent()
+{
+ assert(indentString_.size() >= indentSize_);
+ indentString_.resize(indentString_.size() - indentSize_);
+}
+
+void StyledWriter::writeCommentBeforeValue(const Value &root)
+{
+ if (!root.hasComment(commentBefore))
+ return;
+
+ document_ += "\n";
+ writeIndent();
+ const JSONCPP_STRING &comment = root.getComment(commentBefore);
+ JSONCPP_STRING::const_iterator iter = comment.begin();
+ while (iter != comment.end())
+ {
+ document_ += *iter;
+ if (*iter == '\n' && (iter != comment.end() && *(iter + 1) == '/'))
+ writeIndent();
+ ++iter;
+ }
+
+ // Comments are stripped of trailing newlines, so add one here
+ document_ += "\n";
+}
+
+void StyledWriter::writeCommentAfterValueOnSameLine(const Value &root)
+{
+ if (root.hasComment(commentAfterOnSameLine))
+ document_ += " " + root.getComment(commentAfterOnSameLine);
+
+ if (root.hasComment(commentAfter))
+ {
+ document_ += "\n";
+ document_ += root.getComment(commentAfter);
+ document_ += "\n";
+ }
+}
+
+bool StyledWriter::hasCommentForValue(const Value &value)
+{
+ return value.hasComment(commentBefore) || value.hasComment(commentAfterOnSameLine) ||
+ value.hasComment(commentAfter);
+}
+
+// Class StyledStreamWriter
+// //////////////////////////////////////////////////////////////////
+
+StyledStreamWriter::StyledStreamWriter(JSONCPP_STRING indentation)
+ : document_(NULL), rightMargin_(74), indentation_(indentation), addChildValues_()
+{
+}
+
+void StyledStreamWriter::write(JSONCPP_OSTREAM &out, const Value &root)
+{
+ document_ = &out;
+ addChildValues_ = false;
+ indentString_ = "";
+ indented_ = true;
+ writeCommentBeforeValue(root);
+ if (!indented_)
+ writeIndent();
+ indented_ = true;
+ writeValue(root);
+ writeCommentAfterValueOnSameLine(root);
+ *document_ << "\n";
+ document_ = NULL; // Forget the stream, for safety.
+}
+
+void StyledStreamWriter::writeValue(const Value &value)
+{
+ switch (value.type())
+ {
+ case nullValue:
+ pushValue("null");
+ break;
+ case intValue:
+ pushValue(valueToString(value.asLargestInt()));
+ break;
+ case uintValue:
+ pushValue(valueToString(value.asLargestUInt()));
+ break;
+ case realValue:
+ pushValue(valueToString(value.asDouble()));
+ break;
+ case stringValue:
+ {
+ // Is NULL possible for value.string_? No.
+ char const *str;
+ char const *end;
+ bool ok = value.getString(&str, &end);
+ if (ok)
+ pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end - str)));
+ else
+ pushValue("");
+ break;
+ }
+ case booleanValue:
+ pushValue(valueToString(value.asBool()));
+ break;
+ case arrayValue:
+ writeArrayValue(value);
+ break;
+ case objectValue:
+ {
+ Value::Members members(value.getMemberNames());
+ if (members.empty())
+ pushValue("{}");
+ else
+ {
+ writeWithIndent("{");
+ indent();
+ Value::Members::iterator it = members.begin();
+ for (;;)
+ {
+ const JSONCPP_STRING &name = *it;
+ const Value &childValue = value[name];
+ writeCommentBeforeValue(childValue);
+ writeWithIndent(valueToQuotedString(name.c_str()));
+ *document_ << " : ";
+ writeValue(childValue);
+ if (++it == members.end())
+ {
+ writeCommentAfterValueOnSameLine(childValue);
+ break;
+ }
+ *document_ << ",";
+ writeCommentAfterValueOnSameLine(childValue);
+ }
+ unindent();
+ writeWithIndent("}");
+ }
+ }
+ break;
+ }
+}
+
+void StyledStreamWriter::writeArrayValue(const Value &value)
+{
+ unsigned size = value.size();
+ if (size == 0)
+ pushValue("[]");
+ else
+ {
+ bool isArrayMultiLine = isMultineArray(value);
+ if (isArrayMultiLine)
+ {
+ writeWithIndent("[");
+ indent();
+ bool hasChildValue = !childValues_.empty();
+ unsigned index = 0;
+ for (;;)
+ {
+ const Value &childValue = value[index];
+ writeCommentBeforeValue(childValue);
+ if (hasChildValue)
+ writeWithIndent(childValues_[index]);
+ else
+ {
+ if (!indented_)
+ writeIndent();
+ indented_ = true;
+ writeValue(childValue);
+ indented_ = false;
+ }
+ if (++index == size)
+ {
+ writeCommentAfterValueOnSameLine(childValue);
+ break;
+ }
+ *document_ << ",";
+ writeCommentAfterValueOnSameLine(childValue);
+ }
+ unindent();
+ writeWithIndent("]");
+ }
+ else // output on a single line
+ {
+ assert(childValues_.size() == size);
+ *document_ << "[ ";
+ for (unsigned index = 0; index < size; ++index)
+ {
+ if (index > 0)
+ *document_ << ", ";
+ *document_ << childValues_[index];
+ }
+ *document_ << " ]";
+ }
+ }
+}
+
+bool StyledStreamWriter::isMultineArray(const Value &value)
+{
+ ArrayIndex const size = value.size();
+ bool isMultiLine = size * 3 >= rightMargin_;
+ childValues_.clear();
+ for (ArrayIndex index = 0; index < size && !isMultiLine; ++index)
+ {
+ const Value &childValue = value[index];
+ isMultiLine = ((childValue.isArray() || childValue.isObject()) && childValue.size() > 0);
+ }
+ if (!isMultiLine) // check if line length > max line length
+ {
+ childValues_.reserve(size);
+ addChildValues_ = true;
+ ArrayIndex lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
+ for (ArrayIndex index = 0; index < size; ++index)
+ {
+ if (hasCommentForValue(value[index]))
+ {
+ isMultiLine = true;
+ }
+ writeValue(value[index]);
+ lineLength += static_cast<ArrayIndex>(childValues_[index].length());
+ }
+ addChildValues_ = false;
+ isMultiLine = isMultiLine || lineLength >= rightMargin_;
+ }
+ return isMultiLine;
+}
+
+void StyledStreamWriter::pushValue(const JSONCPP_STRING &value)
+{
+ if (addChildValues_)
+ childValues_.push_back(value);
+ else
+ *document_ << value;
+}
+
+void StyledStreamWriter::writeIndent()
+{
+ // blep intended this to look at the so-far-written string
+ // to determine whether we are already indented, but
+ // with a stream we cannot do that. So we rely on some saved state.
+ // The caller checks indented_.
+ *document_ << '\n' << indentString_;
+}
+
+void StyledStreamWriter::writeWithIndent(const JSONCPP_STRING &value)
+{
+ if (!indented_)
+ writeIndent();
+ *document_ << value;
+ indented_ = false;
+}
+
+void StyledStreamWriter::indent() { indentString_ += indentation_; }
+
+void StyledStreamWriter::unindent()
+{
+ assert(indentString_.size() >= indentation_.size());
+ indentString_.resize(indentString_.size() - indentation_.size());
+}
+
+void StyledStreamWriter::writeCommentBeforeValue(const Value &root)
+{
+ if (!root.hasComment(commentBefore))
+ return;
+
+ if (!indented_)
+ writeIndent();
+ const JSONCPP_STRING &comment = root.getComment(commentBefore);
+ JSONCPP_STRING::const_iterator iter = comment.begin();
+ while (iter != comment.end())
+ {
+ *document_ << *iter;
+ if (*iter == '\n' && (iter != comment.end() && *(iter + 1) == '/'))
+ // writeIndent(); // would include newline
+ *document_ << indentString_;
+ ++iter;
+ }
+ indented_ = false;
+}
+
+void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value &root)
+{
+ if (root.hasComment(commentAfterOnSameLine))
+ *document_ << ' ' << root.getComment(commentAfterOnSameLine);
+
+ if (root.hasComment(commentAfter))
+ {
+ writeIndent();
+ *document_ << root.getComment(commentAfter);
+ }
+ indented_ = false;
+}
+
+bool StyledStreamWriter::hasCommentForValue(const Value &value)
+{
+ return value.hasComment(commentBefore) || value.hasComment(commentAfterOnSameLine) ||
+ value.hasComment(commentAfter);
+}
+
+//////////////////////////
+// BuiltStyledStreamWriter
+
+/// Scoped enums are not available until C++11.
+struct CommentStyle
+{
+ /// Decide whether to write comments.
+ enum Enum
+ {
+ None, ///< Drop all comments.
+ Most, ///< Recover odd behavior of previous versions (not implemented yet).
+ All ///< Keep all comments.
+ };
+};
+
+struct BuiltStyledStreamWriter : public StreamWriter
+{
+ BuiltStyledStreamWriter(JSONCPP_STRING const &indentation, CommentStyle::Enum cs,
+ JSONCPP_STRING const &colonSymbol, JSONCPP_STRING const &nullSymbol,
+ JSONCPP_STRING const &endingLineFeedSymbol, bool useSpecialFloats,
+ unsigned int precision);
+ int write(Value const &root, JSONCPP_OSTREAM *sout) JSONCPP_OVERRIDE;
+
+private:
+ void writeValue(Value const &value);
+ void writeArrayValue(Value const &value);
+ bool isMultineArray(Value const &value);
+ void pushValue(JSONCPP_STRING const &value);
+ void writeIndent();
+ void writeWithIndent(JSONCPP_STRING const &value);
+ void indent();
+ void unindent();
+ void writeCommentBeforeValue(Value const &root);
+ void writeCommentAfterValueOnSameLine(Value const &root);
+ static bool hasCommentForValue(const Value &value);
+
+ typedef std::vector<JSONCPP_STRING> ChildValues;
+
+ ChildValues childValues_;
+ JSONCPP_STRING indentString_;
+ unsigned int rightMargin_;
+ JSONCPP_STRING indentation_;
+ CommentStyle::Enum cs_;
+ JSONCPP_STRING colonSymbol_;
+ JSONCPP_STRING nullSymbol_;
+ JSONCPP_STRING endingLineFeedSymbol_;
+ bool addChildValues_ : 1;
+ bool indented_ : 1;
+ bool useSpecialFloats_ : 1;
+ unsigned int precision_;
+};
+BuiltStyledStreamWriter::BuiltStyledStreamWriter(JSONCPP_STRING const &indentation,
+ CommentStyle::Enum cs,
+ JSONCPP_STRING const &colonSymbol,
+ JSONCPP_STRING const &nullSymbol,
+ JSONCPP_STRING const &endingLineFeedSymbol,
+ bool useSpecialFloats, unsigned int precision)
+ : rightMargin_(74), indentation_(indentation), cs_(cs), colonSymbol_(colonSymbol),
+ nullSymbol_(nullSymbol), endingLineFeedSymbol_(endingLineFeedSymbol), addChildValues_(false),
+ indented_(false), useSpecialFloats_(useSpecialFloats), precision_(precision)
+{
+}
+int BuiltStyledStreamWriter::write(Value const &root, JSONCPP_OSTREAM *sout)
+{
+ sout_ = sout;
+ addChildValues_ = false;
+ indented_ = true;
+ indentString_ = "";
+ writeCommentBeforeValue(root);
+ if (!indented_)
+ writeIndent();
+ indented_ = true;
+ writeValue(root);
+ writeCommentAfterValueOnSameLine(root);
+ *sout_ << endingLineFeedSymbol_;
+ sout_ = NULL;
+ return 0;
+}
+void BuiltStyledStreamWriter::writeValue(Value const &value)
+{
+ switch (value.type())
+ {
+ case nullValue:
+ pushValue(nullSymbol_);
+ break;
+ case intValue:
+ pushValue(valueToString(value.asLargestInt()));
+ break;
+ case uintValue:
+ pushValue(valueToString(value.asLargestUInt()));
+ break;
+ case realValue:
+ pushValue(valueToString(value.asDouble(), useSpecialFloats_, precision_));
+ break;
+ case stringValue:
+ {
+ // Is NULL is possible for value.string_? No.
+ char const *str;
+ char const *end;
+ bool ok = value.getString(&str, &end);
+ if (ok)
+ pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end - str)));
+ else
+ pushValue("");
+ break;
+ }
+ case booleanValue:
+ pushValue(valueToString(value.asBool()));
+ break;
+ case arrayValue:
+ writeArrayValue(value);
+ break;
+ case objectValue:
+ {
+ Value::Members members(value.getMemberNames());
+ if (members.empty())
+ pushValue("{}");
+ else
+ {
+ writeWithIndent("{");
+ indent();
+ Value::Members::iterator it = members.begin();
+ for (;;)
+ {
+ JSONCPP_STRING const &name = *it;
+ Value const &childValue = value[name];
+ writeCommentBeforeValue(childValue);
+ writeWithIndent(valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length())));
+ *sout_ << colonSymbol_;
+ writeValue(childValue);
+ if (++it == members.end())
+ {
+ writeCommentAfterValueOnSameLine(childValue);
+ break;
+ }
+ *sout_ << ",";
+ writeCommentAfterValueOnSameLine(childValue);
+ }
+ unindent();
+ writeWithIndent("}");
+ }
+ }
+ break;
+ }
+}
+
+void BuiltStyledStreamWriter::writeArrayValue(Value const &value)
+{
+ unsigned size = value.size();
+ if (size == 0)
+ pushValue("[]");
+ else
+ {
+ bool isMultiLine = (cs_ == CommentStyle::All) || isMultineArray(value);
+ if (isMultiLine)
+ {
+ writeWithIndent("[");
+ indent();
+ bool hasChildValue = !childValues_.empty();
+ unsigned index = 0;
+ for (;;)
+ {
+ Value const &childValue = value[index];
+ writeCommentBeforeValue(childValue);
+ if (hasChildValue)
+ writeWithIndent(childValues_[index]);
+ else
+ {
+ if (!indented_)
+ writeIndent();
+ indented_ = true;
+ writeValue(childValue);
+ indented_ = false;
+ }
+ if (++index == size)
+ {
+ writeCommentAfterValueOnSameLine(childValue);
+ break;
+ }
+ *sout_ << ",";
+ writeCommentAfterValueOnSameLine(childValue);
+ }
+ unindent();
+ writeWithIndent("]");
+ }
+ else // output on a single line
+ {
+ assert(childValues_.size() == size);
+ *sout_ << "[";
+ if (!indentation_.empty())
+ *sout_ << " ";
+ for (unsigned index = 0; index < size; ++index)
+ {
+ if (index > 0)
+ *sout_ << ((!indentation_.empty()) ? ", " : ",");
+ *sout_ << childValues_[index];
+ }
+ if (!indentation_.empty())
+ *sout_ << " ";
+ *sout_ << "]";
+ }
+ }
+}
+
+bool BuiltStyledStreamWriter::isMultineArray(Value const &value)
+{
+ ArrayIndex const size = value.size();
+ bool isMultiLine = size * 3 >= rightMargin_;
+ childValues_.clear();
+ for (ArrayIndex index = 0; index < size && !isMultiLine; ++index)
+ {
+ Value const &childValue = value[index];
+ isMultiLine = ((childValue.isArray() || childValue.isObject()) && childValue.size() > 0);
+ }
+ if (!isMultiLine) // check if line length > max line length
+ {
+ childValues_.reserve(size);
+ addChildValues_ = true;
+ ArrayIndex lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
+ for (ArrayIndex index = 0; index < size; ++index)
+ {
+ if (hasCommentForValue(value[index]))
+ {
+ isMultiLine = true;
+ }
+ writeValue(value[index]);
+ lineLength += static_cast<ArrayIndex>(childValues_[index].length());
+ }
+ addChildValues_ = false;
+ isMultiLine = isMultiLine || lineLength >= rightMargin_;
+ }
+ return isMultiLine;
+}
+
+void BuiltStyledStreamWriter::pushValue(JSONCPP_STRING const &value)
+{
+ if (addChildValues_)
+ childValues_.push_back(value);
+ else
+ *sout_ << value;
+}
+
+void BuiltStyledStreamWriter::writeIndent()
+{
+ // blep intended this to look at the so-far-written string
+ // to determine whether we are already indented, but
+ // with a stream we cannot do that. So we rely on some saved state.
+ // The caller checks indented_.
+
+ if (!indentation_.empty())
+ {
+ // In this case, drop newlines too.
+ *sout_ << '\n' << indentString_;
+ }
+}
+
+void BuiltStyledStreamWriter::writeWithIndent(JSONCPP_STRING const &value)
+{
+ if (!indented_)
+ writeIndent();
+ *sout_ << value;
+ indented_ = false;
+}
+
+void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; }
+
+void BuiltStyledStreamWriter::unindent()
+{
+ assert(indentString_.size() >= indentation_.size());
+ indentString_.resize(indentString_.size() - indentation_.size());
+}
+
+void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const &root)
+{
+ if (cs_ == CommentStyle::None)
+ return;
+ if (!root.hasComment(commentBefore))
+ return;
+
+ if (!indented_)
+ writeIndent();
+ const JSONCPP_STRING &comment = root.getComment(commentBefore);
+ JSONCPP_STRING::const_iterator iter = comment.begin();
+ while (iter != comment.end())
+ {
+ *sout_ << *iter;
+ if (*iter == '\n' && (iter != comment.end() && *(iter + 1) == '/'))
+ // writeIndent(); // would write extra newline
+ *sout_ << indentString_;
+ ++iter;
+ }
+ indented_ = false;
+}
+
+void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const &root)
+{
+ if (cs_ == CommentStyle::None)
+ return;
+ if (root.hasComment(commentAfterOnSameLine))
+ *sout_ << " " + root.getComment(commentAfterOnSameLine);
+
+ if (root.hasComment(commentAfter))
+ {
+ writeIndent();
+ *sout_ << root.getComment(commentAfter);
+ }
+}
+
+// static
+bool BuiltStyledStreamWriter::hasCommentForValue(const Value &value)
+{
+ return value.hasComment(commentBefore) || value.hasComment(commentAfterOnSameLine) ||
+ value.hasComment(commentAfter);
+}
+
+///////////////
+// StreamWriter
+
+StreamWriter::StreamWriter() : sout_(NULL) {}
+StreamWriter::~StreamWriter() {}
+StreamWriter::Factory::~Factory() {}
+StreamWriterBuilder::StreamWriterBuilder() { setDefaults(&settings_); }
+StreamWriterBuilder::~StreamWriterBuilder() {}
+StreamWriter *StreamWriterBuilder::newStreamWriter() const
+{
+ JSONCPP_STRING indentation = settings_["indentation"].asString();
+ JSONCPP_STRING cs_str = settings_["commentStyle"].asString();
+ bool eyc = settings_["enableYAMLCompatibility"].asBool();
+ bool dnp = settings_["dropNullPlaceholders"].asBool();
+ bool usf = settings_["useSpecialFloats"].asBool();
+ unsigned int pre = settings_["precision"].asUInt();
+ CommentStyle::Enum cs = CommentStyle::All;
+ if (cs_str == "All")
+ {
+ cs = CommentStyle::All;
+ }
+ else if (cs_str == "None")
+ {
+ cs = CommentStyle::None;
+ }
+ else
+ {
+ throwRuntimeError("commentStyle must be 'All' or 'None'");
+ }
+ JSONCPP_STRING colonSymbol = " : ";
+ if (eyc)
+ {
+ colonSymbol = ": ";
+ }
+ else if (indentation.empty())
+ {
+ colonSymbol = ":";
+ }
+ JSONCPP_STRING nullSymbol = "null";
+ if (dnp)
+ {
+ nullSymbol = "";
+ }
+ if (pre > 17)
+ pre = 17;
+ JSONCPP_STRING endingLineFeedSymbol = "";
+ return new BuiltStyledStreamWriter(indentation, cs, colonSymbol, nullSymbol, endingLineFeedSymbol,
+ usf, pre);
+}
+static void getValidWriterKeys(std::set<JSONCPP_STRING> *valid_keys)
+{
+ valid_keys->clear();
+ valid_keys->insert("indentation");
+ valid_keys->insert("commentStyle");
+ valid_keys->insert("enableYAMLCompatibility");
+ valid_keys->insert("dropNullPlaceholders");
+ valid_keys->insert("useSpecialFloats");
+ valid_keys->insert("precision");
+}
+bool StreamWriterBuilder::validate(Json::Value *invalid) const
+{
+ Json::Value my_invalid;
+ if (!invalid)
+ invalid = &my_invalid; // so we do not need to test for NULL
+ Json::Value &inv = *invalid;
+ std::set<JSONCPP_STRING> valid_keys;
+ getValidWriterKeys(&valid_keys);
+ Value::Members keys = settings_.getMemberNames();
+ size_t n = keys.size();
+ for (size_t i = 0; i < n; ++i)
+ {
+ JSONCPP_STRING const &key = keys[i];
+ if (valid_keys.find(key) == valid_keys.end())
+ {
+ inv[key] = settings_[key];
+ }
+ }
+ return 0u == inv.size();
+}
+Value &StreamWriterBuilder::operator[](JSONCPP_STRING key) { return settings_[key]; }
+// static
+void StreamWriterBuilder::setDefaults(Json::Value *settings)
+{
+ //! [StreamWriterBuilderDefaults]
+ (*settings)["commentStyle"] = "All";
+ (*settings)["indentation"] = "\t";
+ (*settings)["enableYAMLCompatibility"] = false;
+ (*settings)["dropNullPlaceholders"] = false;
+ (*settings)["useSpecialFloats"] = false;
+ (*settings)["precision"] = 17;
+ //! [StreamWriterBuilderDefaults]
+}
+
+JSONCPP_STRING writeString(StreamWriter::Factory const &builder, Value const &root)
+{
+ JSONCPP_OSTRINGSTREAM sout;
+ StreamWriterPtr const writer(builder.newStreamWriter());
+ writer->write(root, &sout);
+ return sout.str();
+}
+
+JSONCPP_OSTREAM &operator<<(JSONCPP_OSTREAM &sout, Value const &root)
+{
+ StreamWriterBuilder builder;
+ StreamWriterPtr const writer(builder.newStreamWriter());
+ writer->write(root, &sout);
+ return sout;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_writer.cpp
+// //////////////////////////////////////////////////////////////////////
diff --git a/runtimes/libs/misc/CMakeLists.txt b/runtimes/libs/misc/CMakeLists.txt
new file mode 100644
index 000000000..537c544dc
--- /dev/null
+++ b/runtimes/libs/misc/CMakeLists.txt
@@ -0,0 +1,14 @@
+# Library `nnfw_lib_misc`
+set(NNFW_UTILITY_SRCS src/tensor/Shape.cpp)
+list(APPEND NNFW_UTILITY_SRCS src/tensor/NonIncreasingStride.cpp)
+list(APPEND NNFW_UTILITY_SRCS src/tensor/IndexFormatter.cpp)
+list(APPEND NNFW_UTILITY_SRCS src/tensor/Comparator.cpp)
+
+add_library(nnfw_lib_misc STATIC ${NNFW_UTILITY_SRCS})
+target_include_directories(nnfw_lib_misc PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
+set_target_properties(nnfw_lib_misc PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_link_libraries(nnfw_lib_misc PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_misc PRIVATE nnfw_coverage)
+
+add_executable(nnfw_tensor_index_iterator "examples/tensor_index_iterator.cpp")
+target_link_libraries(nnfw_tensor_index_iterator nnfw_lib_misc)
diff --git a/runtimes/libs/misc/examples/tensor_index_iterator.cpp b/runtimes/libs/misc/examples/tensor_index_iterator.cpp
new file mode 100644
index 000000000..d94da9f49
--- /dev/null
+++ b/runtimes/libs/misc/examples/tensor_index_iterator.cpp
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "misc/tensor/IndexIterator.h"
+
+#include <array>
+
+#include <iostream>
+#include <algorithm>
+
+#include <cassert>
+
+void test_iterate(void)
+{
+ const nnfw::misc::tensor::Shape shape{3, 4, 7};
+
+ std::array<int, 3 * 4 * 7> array;
+
+ array.fill(0);
+
+ using nnfw::misc::tensor::iterate;
+ using nnfw::misc::tensor::Index;
+
+ iterate(shape) << [&](const Index &index) {
+ assert(index.rank() == shape.rank());
+
+ const uint32_t rank = index.rank();
+
+ uint32_t offset = index.at(0);
+
+ for (uint32_t axis = 1; axis < rank; ++axis)
+ {
+ offset *= shape.dim(axis);
+ offset += index.at(axis);
+ }
+
+ array[offset] += 1;
+ };
+
+ assert(std::all_of(array.begin(), array.end(), [](int num) { return num == 1; }));
+}
+
+int main(int argc, char **argv)
+{
+ test_iterate();
+
+ nnfw::misc::tensor::Shape shape{3, 4, 3, 4};
+
+ std::cout << "Iterate over tensor{3, 4, 3, 4}" << std::endl;
+
+ nnfw::misc::tensor::iterate(shape) << [](const nnfw::misc::tensor::Index &index) {
+ std::cout << "rank: " << index.rank() << std::endl;
+
+ for (uint32_t d = 0; d < index.rank(); ++d)
+ {
+ std::cout << " offset(" << d << ") = " << index.at(d) << std::endl;
+ }
+ };
+
+ return 0;
+}
diff --git a/runtimes/libs/misc/include/misc/EnvVar.h b/runtimes/libs/misc/include/misc/EnvVar.h
new file mode 100644
index 000000000..db28a3c7d
--- /dev/null
+++ b/runtimes/libs/misc/include/misc/EnvVar.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file EnvVar.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains nnfw::misc::EnvVar class
+ */
+
+#ifndef __NNFW_MISC_ENV_VAR__
+#define __NNFW_MISC_ENV_VAR__
+
+#include <algorithm>
+#include <array>
+#include <cstdlib>
+#include <string>
+
+namespace nnfw
+{
+namespace misc
+{
+/**
+ * @brief Class to access environment variable
+ */
+class EnvVar
+{
+public:
+ /**
+ * @brief Construct a new EnvVar object
+ * @param[in] key environment variable
+ */
+ EnvVar(const std::string &key)
+ {
+ const char *value = std::getenv(key.c_str());
+ if (value == nullptr)
+ {
+ // An empty string is considered as an empty value
+ _value = "";
+ }
+ else
+ {
+ _value = value;
+ }
+ }
+
+ /**
+ * @brief Get environment variable of string type
+ * @param[in] def Default value of environment variable
+ * @return Defaut value passed as a parameter when there is no environment variable,
+ * otherwise the value of environment variable passed into constructor
+ */
+ std::string asString(const std::string &def) const
+ {
+ if (_value.empty())
+ return def;
+ return _value;
+ }
+
+ /**
+ * @brief Get environment variable of boolean type
+ * @param[in] def Default value of environment variable
+ * @return Defaut value passed as a parameter when there is no environment variable,
+ * otherwise the value of environment variable passed into constructor
+ */
+ bool asBool(bool def) const
+ {
+ if (_value.empty())
+ return def;
+ static const std::array<std::string, 5> false_list{"0", "OFF", "FALSE", "N", "NO"};
+ auto false_found = std::find(false_list.begin(), false_list.end(), _value);
+ return (false_found == false_list.end());
+ }
+
+ /**
+ * @brief Get environment variable of int type
+ * @param[in] def Default value of environment variable
+ * @return Defaut value passed as a parameter when there is no environment variable,
+ * otherwise the value of environment variable passed into constructor
+ */
+ int asInt(int def) const
+ {
+ if (_value.empty())
+ return def;
+ return std::stoi(_value);
+ }
+
+ /**
+ * @brief Get environment variable of float type
+ * @param[in] def Default value of environment variable
+ * @return Defaut value passed as a parameter when there is no environment variable,
+ * otherwise the value of environment variable passed into constructor
+ */
+ float asFloat(float def) const
+ {
+ if (_value.empty())
+ return def;
+ return std::stof(_value);
+ }
+
+private:
+ std::string _value;
+};
+
+} // namespace misc
+} // namespace nnfw
+
+#endif // __NNFW_MISC_ENV_VAR__
diff --git a/libs/misc/include/misc/benchmark.h b/runtimes/libs/misc/include/misc/benchmark.h
index fe5b97585..fe5b97585 100644
--- a/libs/misc/include/misc/benchmark.h
+++ b/runtimes/libs/misc/include/misc/benchmark.h
diff --git a/libs/misc/include/misc/feature/Index.h b/runtimes/libs/misc/include/misc/feature/Index.h
index a361d8dd2..a361d8dd2 100644
--- a/libs/misc/include/misc/feature/Index.h
+++ b/runtimes/libs/misc/include/misc/feature/Index.h
diff --git a/libs/misc/include/misc/feature/IndexIterator.h b/runtimes/libs/misc/include/misc/feature/IndexIterator.h
index 1cf675526..1cf675526 100644
--- a/libs/misc/include/misc/feature/IndexIterator.h
+++ b/runtimes/libs/misc/include/misc/feature/IndexIterator.h
diff --git a/libs/misc/include/misc/feature/Object.h b/runtimes/libs/misc/include/misc/feature/Object.h
index 7af0e28f4..7af0e28f4 100644
--- a/libs/misc/include/misc/feature/Object.h
+++ b/runtimes/libs/misc/include/misc/feature/Object.h
diff --git a/libs/misc/include/misc/feature/Reader.h b/runtimes/libs/misc/include/misc/feature/Reader.h
index b09209789..b09209789 100644
--- a/libs/misc/include/misc/feature/Reader.h
+++ b/runtimes/libs/misc/include/misc/feature/Reader.h
diff --git a/libs/misc/include/misc/feature/Shape.h b/runtimes/libs/misc/include/misc/feature/Shape.h
index 09881f58b..09881f58b 100644
--- a/libs/misc/include/misc/feature/Shape.h
+++ b/runtimes/libs/misc/include/misc/feature/Shape.h
diff --git a/libs/misc/include/misc/feature/TextFormatter.h b/runtimes/libs/misc/include/misc/feature/TextFormatter.h
index e053f1c61..e053f1c61 100644
--- a/libs/misc/include/misc/feature/TextFormatter.h
+++ b/runtimes/libs/misc/include/misc/feature/TextFormatter.h
diff --git a/libs/misc/include/misc/fp32.h b/runtimes/libs/misc/include/misc/fp32.h
index c310402ba..c310402ba 100644
--- a/libs/misc/include/misc/fp32.h
+++ b/runtimes/libs/misc/include/misc/fp32.h
diff --git a/libs/misc/include/misc/kernel/IndexIterator.h b/runtimes/libs/misc/include/misc/kernel/IndexIterator.h
index 59e0f0095..59e0f0095 100644
--- a/libs/misc/include/misc/kernel/IndexIterator.h
+++ b/runtimes/libs/misc/include/misc/kernel/IndexIterator.h
diff --git a/libs/misc/include/misc/kernel/Reader.h b/runtimes/libs/misc/include/misc/kernel/Reader.h
index 019c809ee..019c809ee 100644
--- a/libs/misc/include/misc/kernel/Reader.h
+++ b/runtimes/libs/misc/include/misc/kernel/Reader.h
diff --git a/libs/misc/include/misc/kernel/Shape.h b/runtimes/libs/misc/include/misc/kernel/Shape.h
index 27d6a8bf0..27d6a8bf0 100644
--- a/libs/misc/include/misc/kernel/Shape.h
+++ b/runtimes/libs/misc/include/misc/kernel/Shape.h
diff --git a/libs/misc/include/misc/matrix/IndexIterator.h b/runtimes/libs/misc/include/misc/matrix/IndexIterator.h
index 742ed3a65..742ed3a65 100644
--- a/libs/misc/include/misc/matrix/IndexIterator.h
+++ b/runtimes/libs/misc/include/misc/matrix/IndexIterator.h
diff --git a/libs/misc/include/misc/matrix/Reader.h b/runtimes/libs/misc/include/misc/matrix/Reader.h
index ea222c9d1..ea222c9d1 100644
--- a/libs/misc/include/misc/matrix/Reader.h
+++ b/runtimes/libs/misc/include/misc/matrix/Reader.h
diff --git a/libs/misc/include/misc/matrix/Shape.h b/runtimes/libs/misc/include/misc/matrix/Shape.h
index 8cbcc1e12..8cbcc1e12 100644
--- a/libs/misc/include/misc/matrix/Shape.h
+++ b/runtimes/libs/misc/include/misc/matrix/Shape.h
diff --git a/runtimes/libs/misc/include/misc/polymorphic_downcast.h b/runtimes/libs/misc/include/misc/polymorphic_downcast.h
new file mode 100644
index 000000000..ee885eb70
--- /dev/null
+++ b/runtimes/libs/misc/include/misc/polymorphic_downcast.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_MISC_POLYMORPHIC_DOWNCAST_H__
+#define __NNFW_MISC_POLYMORPHIC_DOWNCAST_H__
+
+#include <cassert>
+#include <memory>
+
+namespace nnfw
+{
+namespace misc
+{
+
+template <typename DstType, typename SrcType> inline DstType polymorphic_downcast(SrcType *x)
+{
+ assert(dynamic_cast<DstType>(x) == x);
+ return static_cast<DstType>(x);
+}
+
+template <typename DstType, typename SrcType> inline DstType polymorphic_downcast(SrcType &x)
+{
+ assert(std::addressof(dynamic_cast<DstType>(x)) == std::addressof(x));
+ return static_cast<DstType>(x);
+}
+
+} // namespace misc
+} // namespace nnfw
+
+#endif // __NNFW_MISC_POLYMORPHIC_DOWNCAST_H__
diff --git a/runtimes/libs/misc/include/misc/string_helpers.h b/runtimes/libs/misc/include/misc/string_helpers.h
new file mode 100644
index 000000000..57d0006b0
--- /dev/null
+++ b/runtimes/libs/misc/include/misc/string_helpers.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file string_helpers.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains helper functions for std::string
+ */
+
+#include <string>
+#include <sstream>
+#include <vector>
+
+namespace nnfw
+{
+namespace misc
+{
+
+inline std::vector<std::string> split(const std::string &s, char delim)
+{
+ std::stringstream ss(s);
+ std::string item;
+ std::vector<std::string> elems;
+ while (std::getline(ss, item, delim))
+ {
+ elems.push_back(std::move(item));
+ }
+ return elems;
+}
+
+} // namespace misc
+} // namespace nnfw
diff --git a/libs/misc/include/misc/tensor/Comparator.h b/runtimes/libs/misc/include/misc/tensor/Comparator.h
index 80f53043c..80f53043c 100644
--- a/libs/misc/include/misc/tensor/Comparator.h
+++ b/runtimes/libs/misc/include/misc/tensor/Comparator.h
diff --git a/libs/misc/include/misc/tensor/Diff.h b/runtimes/libs/misc/include/misc/tensor/Diff.h
index c41a97987..c41a97987 100644
--- a/libs/misc/include/misc/tensor/Diff.h
+++ b/runtimes/libs/misc/include/misc/tensor/Diff.h
diff --git a/runtimes/libs/misc/include/misc/tensor/Index.h b/runtimes/libs/misc/include/misc/tensor/Index.h
new file mode 100644
index 000000000..a633b4ce0
--- /dev/null
+++ b/runtimes/libs/misc/include/misc/tensor/Index.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Index.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains nnfw::misc::tensor::Index struct
+ */
+#ifndef __NNFW_MISC_TENSOR_INDEX_H__
+#define __NNFW_MISC_TENSOR_INDEX_H__
+
+#include <cstdint>
+#include <cstddef>
+
+#include <vector>
+#include <initializer_list>
+
+namespace nnfw
+{
+namespace misc
+{
+namespace tensor
+{
+
+/**
+ * @brief Struct to represent index of each dimension of a tensor
+ */
+struct Index
+{
+public:
+ /**
+ * @brief Construct a new @c Index object
+ * @param[in] rank Rank of a tensor
+ */
+ Index(uint32_t rank) { _offsets.resize(rank); }
+
+public:
+ /**
+ * @brief Construct a new @c Index object
+ * @param[in] offsets Rank of a tensor of @c std::initializer_list<int32_t> type
+ */
+ Index(std::initializer_list<int32_t> offsets) : _offsets{offsets}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Get the rank
+ * @return Rank that this @c Index object can handle
+ * @note We can use static_cast\n
+ * because size of _offsets is decieded by constructor's uintt_32 type argument
+ */
+ uint32_t rank(void) const { return static_cast<uint32_t>(_offsets.size()); }
+
+public:
+ /**
+ * @brief Get the index n'th dimension
+ * @param[in] n Dimension
+ * @return index of n'th dimension
+ */
+ int32_t at(uint32_t n) const { return _offsets.at(n); }
+
+ /**
+ * @brief Get the reference of the index n'th dimension
+ * @param[in] n Dimension
+ * @return reference of index of n'th dimension
+ */
+ int32_t &at(uint32_t n) { return _offsets.at(n); }
+
+private:
+ std::vector<int32_t> _offsets;
+};
+
+/**
+ * @brief Copy an @c Index with reversed order
+ * @param[in] origin @c Index object to copy
+ * @return an @c Index object with reversed order
+ * @note This is used to convert NNAPI tensor index to ARM tensor index or vice versa
+ */
+inline static Index copy_reverse(const Index &origin)
+{
+ uint32_t rank = origin.rank();
+ Index target(rank);
+ for (uint32_t i = 0; i < rank; i++)
+ target.at(i) = origin.at(rank - 1 - i);
+ return target;
+}
+
+} // namespace tensor
+} // namespace misc
+} // namespace nnfw
+
+#endif // __NNFW_MISC_TENSOR_INDEX_H__
diff --git a/runtimes/libs/misc/include/misc/tensor/IndexEnumerator.h b/runtimes/libs/misc/include/misc/tensor/IndexEnumerator.h
new file mode 100644
index 000000000..6ce3add77
--- /dev/null
+++ b/runtimes/libs/misc/include/misc/tensor/IndexEnumerator.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file IndexEnumerator.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains nnfw::misc::tensor::IndexEnumerator class
+ */
+
+#ifndef __NNFW_MISC_TENSOR_INDEX_ENUMERATOR_H__
+#define __NNFW_MISC_TENSOR_INDEX_ENUMERATOR_H__
+
+#include "misc/tensor/Shape.h"
+#include "misc/tensor/Index.h"
+
+namespace nnfw
+{
+namespace misc
+{
+namespace tensor
+{
+/**
+ * @brief Class to enumerate index of a tensor
+ *
+ */
+class IndexEnumerator
+{
+public:
+ /**
+ * @brief Construct a new @c IndexEnumerator object
+ * @param[in] shape Shape of tensor of which index will be enumerate
+ */
+ explicit IndexEnumerator(const Shape &shape) : _shape(shape), _cursor(0), _index(shape.rank())
+ {
+ const uint32_t rank = _shape.rank();
+
+ for (uint32_t axis = 0; axis < rank; ++axis)
+ {
+ _index.at(axis) = 0;
+ }
+
+ for (_cursor = 0; _cursor < rank; ++_cursor)
+ {
+ if (_index.at(_cursor) < _shape.dim(_cursor))
+ {
+ break;
+ }
+ }
+ }
+
+public:
+ /**
+ * @brief Prevent constructing @c IndexEnumerator object by using R-value reference
+ */
+ IndexEnumerator(IndexEnumerator &&) = delete;
+ /**
+ * @brief Prevent copy constructor
+ */
+ IndexEnumerator(const IndexEnumerator &) = delete;
+
+public:
+ /**
+ * @brief Check if more enumeration is available
+ * @return @c true if more @c advance() is available, otherwise @c false
+ */
+ bool valid(void) const { return _cursor < _shape.rank(); }
+
+public:
+ /**
+ * @brief Get the current index to enumerate
+ * @return Current index
+ */
+ const Index &curr(void) const { return _index; }
+
+public:
+ /**
+ * @brief Advance index by +1
+ */
+ void advance(void)
+ {
+ const uint32_t rank = _shape.rank();
+
+ // Find axis to be updated
+ while ((_cursor < rank) && !(_index.at(_cursor) + 1 < _shape.dim(_cursor)))
+ {
+ ++_cursor;
+ }
+
+ if (_cursor == rank)
+ {
+ return;
+ }
+
+ // Update index
+ _index.at(_cursor) += 1;
+
+ for (uint32_t axis = 0; axis < _cursor; ++axis)
+ {
+ _index.at(axis) = 0;
+ }
+
+ // Update cursor
+ _cursor = 0;
+ }
+
+public:
+ const Shape _shape; //!< Shape to enumerate
+
+private:
+ uint32_t _cursor;
+ Index _index;
+};
+
+} // namespace tensor
+} // namespace misc
+} // namespace nnfw
+
+#endif // __NNFW_MISC_TENSOR_INDEX_ENUMERATOR_H__
diff --git a/libs/misc/include/misc/tensor/IndexFormatter.h b/runtimes/libs/misc/include/misc/tensor/IndexFormatter.h
index 7ae34eec1..7ae34eec1 100644
--- a/libs/misc/include/misc/tensor/IndexFormatter.h
+++ b/runtimes/libs/misc/include/misc/tensor/IndexFormatter.h
diff --git a/libs/misc/include/misc/tensor/IndexIterator.h b/runtimes/libs/misc/include/misc/tensor/IndexIterator.h
index f6428e19e..f6428e19e 100644
--- a/libs/misc/include/misc/tensor/IndexIterator.h
+++ b/runtimes/libs/misc/include/misc/tensor/IndexIterator.h
diff --git a/libs/misc/include/misc/tensor/NonIncreasingStride.h b/runtimes/libs/misc/include/misc/tensor/NonIncreasingStride.h
index e7ad0857b..e7ad0857b 100644
--- a/libs/misc/include/misc/tensor/NonIncreasingStride.h
+++ b/runtimes/libs/misc/include/misc/tensor/NonIncreasingStride.h
diff --git a/libs/misc/include/misc/tensor/Object.h b/runtimes/libs/misc/include/misc/tensor/Object.h
index 83fbc0bd1..83fbc0bd1 100644
--- a/libs/misc/include/misc/tensor/Object.h
+++ b/runtimes/libs/misc/include/misc/tensor/Object.h
diff --git a/libs/misc/include/misc/tensor/Reader.h b/runtimes/libs/misc/include/misc/tensor/Reader.h
index 9175a913e..9175a913e 100644
--- a/libs/misc/include/misc/tensor/Reader.h
+++ b/runtimes/libs/misc/include/misc/tensor/Reader.h
diff --git a/runtimes/libs/misc/include/misc/tensor/Shape.h b/runtimes/libs/misc/include/misc/tensor/Shape.h
new file mode 100644
index 000000000..bd0eac0a5
--- /dev/null
+++ b/runtimes/libs/misc/include/misc/tensor/Shape.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Shape.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains nnfw::misc::tensor::Shape class
+ */
+
+#ifndef __NNFW_MISC_TENSOR_SHAPE_H__
+#define __NNFW_MISC_TENSOR_SHAPE_H__
+
+#include <cstdint>
+#include <cstddef>
+#include <deque>
+#include <initializer_list>
+#include <ostream>
+#include <string>
+#include <cassert>
+
+namespace nnfw
+{
+namespace misc
+{
+namespace tensor
+{
+
+/**
+ * @brief Class to represent shape of a tensor
+ */
+class Shape
+{
+public:
+ /**
+ * @brief Construct a new Shape object
+ * @param[in] rank Rank of a tensor
+ */
+ Shape(uint32_t rank) { _dimensions.resize(rank); }
+
+public:
+ /**
+ * @brief Construct a new Shape object
+ * @param[in] dimensions @c initializer_list<int32_t> of dimensions of tensor
+ */
+ Shape(const std::initializer_list<int32_t> &dimensions) : _dimensions{dimensions}
+ {
+ // Check overflow because initializer_list type can be larger size than max of uint32_t
+ assert(dimensions.size() <= 0xFFFFFFFF);
+ }
+
+ /**
+ * @brief Construct a new Shape object
+ * @param[in] origin @c Shape object to copy
+ */
+ Shape(const Shape &origin) = default;
+
+public:
+ /**
+ * @brief Add dimension to the beginning
+ * @param[in] d dimension to add to the beginning
+ * @return N/A
+ */
+ void prepend(int32_t d) { _dimensions.emplace_front(d); }
+
+ /**
+ * @brief Add dimension to the back
+ * @param[in] d dimension to add to the back
+ * @return N/A
+ */
+ void append(int32_t d) { _dimensions.emplace_back(d); }
+
+public:
+ /**
+ * @brief Get the rank of this shape
+ * @return rank
+ * @note We can use static_cast\n
+ * because we don't support larger than max of uint32_t on constructor
+ */
+ uint32_t rank(void) const { return static_cast<uint32_t>(_dimensions.size()); }
+
+public:
+ /**
+ * @brief Get specific dimension
+ * @param[in] n Index of dimension
+ * @return n'th dimension
+ */
+ int32_t dim(uint32_t n) const { return _dimensions.at(n); }
+
+ /**
+ * @brief Get the reference of specific dimension
+ * @param[in] n Index of dimension
+ * @return Reference of n'th dimension
+ */
+ int32_t &dim(uint32_t n) { return _dimensions.at(n); }
+
+ const std::deque<int32_t> &dims() const { return _dimensions; }
+
+public:
+ /**
+ * @brief Get the number of elements specified by this shape
+ * @return The number of elements
+ */
+ uint64_t num_elements() const;
+
+private:
+ std::deque<int32_t> _dimensions;
+
+public:
+ /**
+ * @brief Get a @c Shape object after parsing string
+ * @param[in] s String of dimension list. Accepted format is numbers separated by comma.
+ * @return @c Shape object
+ */
+ static Shape from(const std::string &s);
+};
+
+/**
+ * @brief Check equality of two @c Shape
+ * @param[in] Shape First shape to compare
+ * @param[in] Shape Second shape to compare
+ * @return @c true if both shapes are equal, otherwise @c false
+ */
+bool operator==(const Shape &, const Shape &);
+
+/**
+ * @brief Send @c Shape to @c std::ostream
+ * @param[in] os @c std::ostream to process this @c Shape
+ * @param[in] shape @c Shape to send to @c ostream
+ * @return Reference of @c std::ostream
+ */
+std::ostream &operator<<(std::ostream &os, const Shape &shape);
+
+} // namespace tensor
+} // namespace misc
+} // namespace nnfw
+
+#endif // __NNFW_MISC_TENSOR_SHAPE_H__
diff --git a/libs/misc/include/misc/tensor/Zipper.h b/runtimes/libs/misc/include/misc/tensor/Zipper.h
index 8f0ec4ab6..8f0ec4ab6 100644
--- a/libs/misc/include/misc/tensor/Zipper.h
+++ b/runtimes/libs/misc/include/misc/tensor/Zipper.h
diff --git a/libs/misc/include/misc/vector.h b/runtimes/libs/misc/include/misc/vector.h
index 395b08912..395b08912 100644
--- a/libs/misc/include/misc/vector.h
+++ b/runtimes/libs/misc/include/misc/vector.h
diff --git a/libs/misc/include/misc/vector/Object.h b/runtimes/libs/misc/include/misc/vector/Object.h
index 65d4bc613..65d4bc613 100644
--- a/libs/misc/include/misc/vector/Object.h
+++ b/runtimes/libs/misc/include/misc/vector/Object.h
diff --git a/libs/misc/include/misc/vector/Reader.h b/runtimes/libs/misc/include/misc/vector/Reader.h
index eab4c427b..eab4c427b 100644
--- a/libs/misc/include/misc/vector/Reader.h
+++ b/runtimes/libs/misc/include/misc/vector/Reader.h
diff --git a/runtimes/libs/misc/src/tensor/Comparator.cpp b/runtimes/libs/misc/src/tensor/Comparator.cpp
new file mode 100644
index 000000000..e765e77b2
--- /dev/null
+++ b/runtimes/libs/misc/src/tensor/Comparator.cpp
@@ -0,0 +1,38 @@
+#include "misc/tensor/Comparator.h"
+#include "misc/tensor/Zipper.h"
+
+#include "misc/fp32.h"
+
+namespace nnfw
+{
+namespace misc
+{
+namespace tensor
+{
+
+std::vector<Diff<float>> Comparator::compare(const Shape &shape, const Reader<float> &expected,
+ const Reader<float> &obtained,
+ Observer *observer) const
+{
+ std::vector<Diff<float>> res;
+
+ zip(shape, expected, obtained) <<
+ [&](const Index &index, float expected_value, float obtained_value) {
+ if (!_compare_fn(expected_value, obtained_value))
+ {
+ res.emplace_back(index, expected_value, obtained_value);
+ }
+
+ // Update max_diff_index, if necessary
+ if (observer != nullptr)
+ {
+ observer->notify(index, expected_value, obtained_value);
+ }
+ };
+
+ return res;
+}
+
+} // namespace tensor
+} // namespace misc
+} // namespace nnfw
diff --git a/libs/misc/src/tensor/IndexFormatter.cpp b/runtimes/libs/misc/src/tensor/IndexFormatter.cpp
index c949db7a8..c949db7a8 100644
--- a/libs/misc/src/tensor/IndexFormatter.cpp
+++ b/runtimes/libs/misc/src/tensor/IndexFormatter.cpp
diff --git a/libs/misc/src/tensor/NonIncreasingStride.cpp b/runtimes/libs/misc/src/tensor/NonIncreasingStride.cpp
index c51ad0324..c51ad0324 100644
--- a/libs/misc/src/tensor/NonIncreasingStride.cpp
+++ b/runtimes/libs/misc/src/tensor/NonIncreasingStride.cpp
diff --git a/runtimes/libs/misc/src/tensor/Shape.cpp b/runtimes/libs/misc/src/tensor/Shape.cpp
new file mode 100644
index 000000000..70d3bdfdb
--- /dev/null
+++ b/runtimes/libs/misc/src/tensor/Shape.cpp
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "misc/tensor/Shape.h"
+
+#include <cassert>
+#include <functional>
+#include <numeric>
+
+namespace nnfw
+{
+namespace misc
+{
+namespace tensor
+{
+
+bool operator==(const Shape &lhs, const Shape &rhs)
+{
+ if (lhs.rank() != rhs.rank())
+ {
+ return false;
+ }
+
+ for (uint32_t axis = 0; axis < lhs.rank(); ++axis)
+ {
+ if (lhs.dim(axis) != rhs.dim(axis))
+ {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+Shape Shape::from(const std::string &str)
+{
+ Shape shape(0);
+
+ bool pending = false;
+ int value = 0;
+
+ for (const char *cur = str.c_str(); true; ++cur)
+ {
+ if (*cur == ',' || *cur == '\0')
+ {
+ if (pending)
+ {
+ shape.append(value);
+ }
+
+ if (*cur == '\0')
+ {
+ break;
+ }
+
+ pending = false;
+ value = 0;
+ continue;
+ }
+
+ assert(*cur >= '0' && *cur <= '9');
+
+ pending = true;
+ value *= 10;
+ value += *cur - '0';
+ }
+
+ return shape;
+}
+
+uint64_t Shape::num_elements() const
+{
+ return std::accumulate(_dimensions.cbegin(), _dimensions.cend(), UINT64_C(1),
+ std::multiplies<uint64_t>());
+}
+
+std::ostream &operator<<(std::ostream &os, const Shape &shape)
+{
+ if (shape.rank() > 0)
+ {
+ os << shape.dim(0);
+
+ for (uint32_t axis = 1; axis < shape.rank(); ++axis)
+ {
+ os << "," << shape.dim(axis);
+ }
+ }
+
+ return os;
+}
+
+} // namespace tensor
+} // namespace misc
+} // namespace nnfw
diff --git a/runtimes/libs/nnapi/CMakeLists.txt b/runtimes/libs/nnapi/CMakeLists.txt
new file mode 100644
index 000000000..a5d9490d1
--- /dev/null
+++ b/runtimes/libs/nnapi/CMakeLists.txt
@@ -0,0 +1,3 @@
+add_subdirectories()
+
+add_library(nnfw_lib_nnapi ALIAS nnfw_lib_nnapi_1_2)
diff --git a/runtimes/libs/nnapi/v1.1/CMakeLists.txt b/runtimes/libs/nnapi/v1.1/CMakeLists.txt
new file mode 100644
index 000000000..1bc158f62
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.1/CMakeLists.txt
@@ -0,0 +1,4 @@
+add_library(nnfw_lib_nnapi_1_1 INTERFACE)
+
+target_include_directories(nnfw_lib_nnapi_1_1 INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/include)
+target_link_libraries(nnfw_lib_nnapi_1_1 INTERFACE nnfw-header)
diff --git a/include/NeuralNetworksExShim.h b/runtimes/libs/nnapi/v1.1/include/NeuralNetworksExShim.h
index f684dab90..f684dab90 100644
--- a/include/NeuralNetworksExShim.h
+++ b/runtimes/libs/nnapi/v1.1/include/NeuralNetworksExShim.h
diff --git a/runtimes/libs/nnapi/v1.1/include/NeuralNetworksLoadHelpers.h b/runtimes/libs/nnapi/v1.1/include/NeuralNetworksLoadHelpers.h
new file mode 100644
index 000000000..201465f9c
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.1/include/NeuralNetworksLoadHelpers.h
@@ -0,0 +1,141 @@
+/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from part of the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/contrib/lite/nnapi/NeuralNetworksShim.h'
+
+/**
+ * @file NeuralNetworksLoadHelpers.h
+ * @ingroup COM_AI_RUNTIME
+ * @brief This file contains functions to load NN API runtime library
+ */
+
+#ifndef __NEURAL_NETWORKS_LOAD_HELPER_H__
+#define __NEURAL_NETWORKS_LOAD_HELPER_H__
+
+#include <dlfcn.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+/**
+ * @brief Print log data
+ * @param[in] format Format string of @c printf
+ * @param[in] args Argument after format string. (Same with @c printf)
+ */
+#define NNAPI_LOG(format, ...) printf(format "\n", __VA_ARGS__);
+
+/**
+ * @brief Create a function pointer named @c fn after loading NN API library
+ * @param[in] name Name of a function
+ */
+#define LOAD_FUNCTION(name) \
+ static name##_fn fn = reinterpret_cast<name##_fn>(nnfw::loadFunction(#name));
+
+/**
+ * @brief Run @c fn function. @c fn is created by @ref LOAD_FUNCTION
+ * @param[in] args List of arguments for the function @c fn
+ */
+#define EXECUTE_FUNCTION(...) \
+ if (fn != nullptr) { \
+ fn(__VA_ARGS__); \
+ }
+
+/**
+ * @brief Run @c fn function. @c fn is created by @ref LOAD_FUNCTION
+ * @param[in] args List of arguments for the function @c fn
+ * @return the return value of @c fn
+ */
+#define EXECUTE_FUNCTION_RETURN(...) return fn != nullptr ? fn(__VA_ARGS__) : 0;
+
+namespace nnfw
+{
+
+/**
+ * @brief Load NN API library
+ * @param[in] name path of NN API library
+ * @return a symbol table handle of NN API library
+ */
+inline void* loadLibrary(const char* name) {
+ // TODO: change RTLD_LOCAL? Assumes there can be multiple instances of nn
+ // api RT
+ void* handle = nullptr;
+#if 1 //#ifdef __ANDROID__
+ handle = dlopen(name, RTLD_LAZY | RTLD_LOCAL);
+ if (handle == nullptr) {
+ NNAPI_LOG("nnapi error: unable to open library %s", name);
+ NNAPI_LOG(" %s", dlerror());
+ }
+#endif
+ return handle;
+}
+
+/**
+ * @brief Load libneuralnetworks.so and return handle of library
+ * @return a symbol table handle of NN API library
+ */
+inline void* getLibraryHandle() {
+ static void* handle = loadLibrary("libneuralnetworks.so");
+ return handle;
+}
+
+/**
+ * @brief Return function ptr in libneuralnetworks.so
+ * @param[in] name Name of function
+ * @return function pointer
+ */
+inline void* loadFunction(const char* name) {
+ void* fn = nullptr;
+ if (getLibraryHandle() != nullptr) {
+ fn = dlsym(getLibraryHandle(), name);
+ }
+ if (fn == nullptr) {
+ NNAPI_LOG("nnapi error: unable to open function %s", name);
+ NNAPI_LOG(" %s", dlerror());
+ abort();
+ }
+ else {
+#ifdef _GNU_SOURCE
+ Dl_info info;
+ if (dladdr(fn, &info))
+ {
+ NNAPI_LOG("nnapi function '%s' is loaded from '%s' ", name, info.dli_fname);
+ }
+ else
+ {
+ NNAPI_LOG("nnapi function '%s' is failed to load", name);
+ }
+
+#endif // _GNU_SOURCE
+ }
+ return fn;
+}
+
+/**
+ * @brief Check if libneuralnetworks.so can be loaded
+ * @return @c true if loading is successful, otherwise @c false.
+ */
+inline bool NNAPIExists() {
+ static bool nnapi_is_available = getLibraryHandle();
+ return nnapi_is_available;
+}
+
+} // namespace nnfw
+
+#endif // __NEURAL_NETWORKS_LOAD_HELPER_H__
diff --git a/include/NeuralNetworksShim.h b/runtimes/libs/nnapi/v1.1/include/NeuralNetworksShim.h
index 60b16f766..60b16f766 100644
--- a/include/NeuralNetworksShim.h
+++ b/runtimes/libs/nnapi/v1.1/include/NeuralNetworksShim.h
diff --git a/runtimes/libs/nnapi/v1.2/CMakeLists.txt b/runtimes/libs/nnapi/v1.2/CMakeLists.txt
new file mode 100644
index 000000000..9aef6f1a8
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.2/CMakeLists.txt
@@ -0,0 +1,4 @@
+add_library(nnfw_lib_nnapi_1_2 INTERFACE)
+
+target_include_directories(nnfw_lib_nnapi_1_2 INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/include)
+target_link_libraries(nnfw_lib_nnapi_1_2 INTERFACE nnfw-header)
diff --git a/runtimes/libs/nnapi/v1.2/include/NeuralNetworksExShim.h b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksExShim.h
new file mode 100644
index 000000000..855613241
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksExShim.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file NeuralNetworksExShim.h
+ * @brief This file contains an actual implementation of
+ * ANeuralNetworksModel_addOperationEx function
+ */
+
+#ifndef __NEURAL_NETWORKS_EX_SHIM_H__
+#define __NEURAL_NETWORKS_EX_SHIM_H__
+
+#include "NeuralNetworks.h"
+#include "NeuralNetworksEx.h"
+#include "NeuralNetworksLoadHelpers.h"
+
+typedef int (*ANeuralNetworksModel_addOperationEx_fn)(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationTypeEx type,
+ uint32_t inputCount, const uint32_t *inputs,
+ uint32_t outputCount,
+ const uint32_t *outputs);
+
+/**
+ * @brief Add an extended operation to a model.
+ *
+ * @param[in] model The model to be modified.
+ * @param[in] type The type of extended operation.
+ * @param[in] inputCount The number of entries in the inputs array.
+ * @param[in] inputs An array of indexes identifying each operand.
+ * @param[in] outputCount The number of entries in the outputs array.
+ * @param[in] outputs An array of indexes identifying each operand.
+ *
+ * @note The operands specified by inputs and outputs must have been
+ * previously added by calls to {@link ANeuralNetworksModel_addOperand}.\n
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish}
+ * has been called will return an error.\n
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+
+inline int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationTypeEx type,
+ uint32_t inputCount, const uint32_t *inputs,
+ uint32_t outputCount, const uint32_t *outputs)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_addOperationEx);
+ EXECUTE_FUNCTION_RETURN(model, type, inputCount, inputs, outputCount, outputs);
+}
+
+#endif // __NEURAL_NETWORKS_EX_SHIM_H__
diff --git a/runtimes/libs/nnapi/v1.2/include/NeuralNetworksLoadHelpers.h b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksLoadHelpers.h
new file mode 100644
index 000000000..1c482b54c
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksLoadHelpers.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from part of the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/contrib/lite/nnapi/NeuralNetworksShim.h'
+
+/**
+ * @file NeuralNetworksLoadHelpers.h
+ * @brief This file contains functions to load NN API runtime library
+ */
+
+#ifndef __NEURAL_NETWORKS_LOAD_HELPER_H__
+#define __NEURAL_NETWORKS_LOAD_HELPER_H__
+
+#include <dlfcn.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+/**
+ * @brief Print log data
+ * @param[in] format Format string of @c printf
+ * @param[in] args Argument after format string. (Same with @c printf)
+ */
+#define NNAPI_LOG(format, ...) printf(format "\n", __VA_ARGS__);
+
+/**
+ * @brief Create a function pointer named @c fn after loading NN API library
+ * @param[in] name Name of a function
+ */
+#define LOAD_FUNCTION(name) \
+ static name##_fn fn = reinterpret_cast<name##_fn>(nnfw::loadFunction(#name));
+
+/**
+ * @brief Run @c fn function. @c fn is created by @ref LOAD_FUNCTION
+ * @param[in] args List of arguments for the function @c fn
+ */
+#define EXECUTE_FUNCTION(...) \
+ if (fn != nullptr) { \
+ fn(__VA_ARGS__); \
+ }
+
+/**
+ * @brief Run @c fn function. @c fn is created by @ref LOAD_FUNCTION
+ * @param[in] args List of arguments for the function @c fn
+ * @return the return value of @c fn
+ */
+#define EXECUTE_FUNCTION_RETURN(...) return fn != nullptr ? fn(__VA_ARGS__) : 0;
+
+namespace nnfw
+{
+
+/**
+ * @brief Load NN API library
+ * @param[in] name path of NN API library
+ * @return a symbol table handle of NN API library
+ */
+inline void* loadLibrary(const char* name) {
+ // TODO: change RTLD_LOCAL? Assumes there can be multiple instances of nn
+ // api RT
+ void* handle = nullptr;
+#if 1 //#ifdef __ANDROID__
+ handle = dlopen(name, RTLD_LAZY | RTLD_LOCAL);
+ if (handle == nullptr) {
+ NNAPI_LOG("nnapi error: unable to open library %s", name);
+ }
+#endif
+ return handle;
+}
+
+/**
+ * @brief Load libneuralnetworks.so and return handle of library
+ * @return a symbol table handle of NN API library
+ */
+inline void* getLibraryHandle() {
+ static void* handle = loadLibrary("libneuralnetworks.so");
+ return handle;
+}
+
+/**
+ * @brief Return function ptr in libneuralnetworks.so
+ * @param[in] name Name of function
+ * @return function pointer
+ */
+inline void* loadFunction(const char* name) {
+ void* fn = nullptr;
+ if (getLibraryHandle() != nullptr) {
+ fn = dlsym(getLibraryHandle(), name);
+ }
+ if (fn == nullptr) {
+ NNAPI_LOG("nnapi error: unable to open function %s", name);
+ abort();
+ }
+ else {
+#ifdef _GNU_SOURCE
+ Dl_info info;
+ if (dladdr(fn, &info))
+ {
+ NNAPI_LOG("nnapi function '%s' is loaded from '%s' ", name, info.dli_fname);
+ }
+ else
+ {
+ NNAPI_LOG("nnapi function '%s' is failed to load", name);
+ }
+#endif // _GNU_SOURCE
+ }
+ return fn;
+}
+
+/**
+ * @brief Check if libneuralnetworks.so can be loaded
+ * @return @c true if loading is successful, otherwise @c false.
+ */
+inline bool NNAPIExists() {
+ static bool nnapi_is_available = getLibraryHandle();
+ return nnapi_is_available;
+}
+
+} // namespace nnfw
+
+#endif // __NEURAL_NETWORKS_LOAD_HELPER_H__
diff --git a/runtimes/libs/nnapi/v1.2/include/NeuralNetworksShim.h b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksShim.h
new file mode 100644
index 000000000..80082383f
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksShim.h
@@ -0,0 +1,1136 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// NOTE This header is derived from part of the following file
+// https://github.com/tensorflow/tensorflow/blob/a59ad83d06abd38b5e142c41043db8886a92fca8/tensorflow/lite/nnapi/NeuralNetworksShim.h
+
+#ifndef __NEURAL_NETWORKS_SHIM_H__
+#define __NEURAL_NETWORKS_SHIM_H__
+
+#include "NeuralNetworksTypes.h"
+#include "NeuralNetworksLoadHelpers.h"
+
+// This interface is now deprecated. You should use instead
+// nnapi_implementation.
+
+// TODO(b/123017568): Update all current usages of this file.
+
+// NN api types based on NNAPI header file
+// https://developer.android.com/ndk/reference/group/neural-networks
+
+/**
+ * Creates a shared memory object from a file descriptor.
+ *
+ * The shared memory is backed by a file descriptor via mmap.
+ * See {@link ANeuralNetworksMemory} for a description on how to use
+ * this shared memory.
+ *
+ * @param size The requested size in bytes.
+ * Must not be larger than the file size.
+ * @param prot The desired memory protection for the mapping.
+ * It is either PROT_NONE or the bitwise OR of one or
+ * more of the following flags: PROT_READ, PROT_WRITE.
+ * @param fd The requested file descriptor.
+ * The file descriptor has to be mmap-able. The file
+ * descriptor will be duplicated.
+ * @param offset The offset to the beginning of the file of the area to map.
+ * The offset has to be aligned to a page size.
+ * @param memory The memory object to be created.
+ * Set to NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the request completed normally.
+ */
+inline int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t offset,
+ ANeuralNetworksMemory **memory)
+{
+ LOAD_FUNCTION(ANeuralNetworksMemory_createFromFd);
+ EXECUTE_FUNCTION_RETURN(size, protect, fd, offset, memory);
+}
+
+/**
+ * Delete a memory object.
+ *
+ * Destroys the object used by the run time to keep track of the memory.
+ * This will free the underlying actual memory if no other code has open
+ * handles to this memory.
+ *
+ * @param memory The memory object to be freed.
+ */
+inline void ANeuralNetworksMemory_free(ANeuralNetworksMemory *memory)
+{
+ LOAD_FUNCTION(ANeuralNetworksMemory_free);
+ EXECUTE_FUNCTION(memory);
+}
+
+/**
+ * Create an empty {@link ANeuralNetworksModel}.
+ *
+ * <p>This only creates the object. Computation is performed once
+ * {@link ANeuralNetworksExecution_startCompute} is invoked.
+ *
+ * The model should be constructed with calls to
+ * {@link ANeuralNetworksModel_addOperation} and
+ * {@link ANeuralNetworksModel_addOperand}
+ *
+ * <p>{@link ANeuralNetworksModel_finish} should be called once the model
+ * has been fully constructed.</p>
+ *
+ * <p>{@link ANeuralNetworksModel_free} should be called once the model
+ * is no longer needed.</p>
+ *
+ * @param model The {@link ANeuralNetworksModel} to be created.
+ * Set to NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_create(ANeuralNetworksModel **model)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_create);
+ EXECUTE_FUNCTION_RETURN(model);
+}
+
+/**
+ * Destroy a model.
+ *
+ * The model need not have been finished by a call to
+ * {@link ANeuralNetworksModel_finish}.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @param model The model to be destroyed. Passing NULL is acceptable and
+ * results in no operation.
+ */
+inline void ANeuralNetworksModel_free(ANeuralNetworksModel *model)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_free);
+ EXECUTE_FUNCTION(model);
+}
+
+/**
+ * Indicate that we have finished modifying a model. Required before
+ * calling {@link ANeuralNetworksCompilation_compile}.
+ *
+ * An application is responsible to make sure that no other thread uses
+ * the model at the same time.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @param model The model to be finished.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_finish(ANeuralNetworksModel *model)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_finish);
+ EXECUTE_FUNCTION_RETURN(model);
+}
+
+/**
+ * Add an operand to a model.
+ *
+ * The order in which the operands are added is important. The first one added
+ * to a model will have the index value 0, the second 1, etc. These indexes are
+ * used as operand identifiers in {@link ANeuralNetworksModel_addOperation},
+ * {@link ANeuralNetworksExecution_setInput},
+ * {@link ANeuralNetworksExecution_setInputFromMemory},
+ * {@link ANeuralNetworksExecution_setOutput},
+ * {@link ANeuralNetworksExecution_setOutputFromMemory} and
+ * {@link ANeuralNetworksExecution_setOperandValue}.
+ *
+ * To build a model that can accommodate inputs of various sizes, as you may
+ * want to do for a CNN, set the size of the dimensions that will vary at run
+ * time to 0. If you do so, provide the full dimensions when calling
+ * {@link ANeuralNetworksExecution_setInput} or {@link
+ * ANeuralNetworksExecution_setInputFromMemory}.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has
+ * been called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @param model The model to be modified.
+ * @param type The {@link ANeuralNetworksOperandType} that describes the shape
+ * of the operand.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_addOperand(ANeuralNetworksModel *model,
+ const ANeuralNetworksOperandType *type)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_addOperand);
+ EXECUTE_FUNCTION_RETURN(model, type);
+}
+
+/**
+ * Sets an operand to a constant value.
+ *
+ * For scalar values, the content of buffer is copied into the model.
+ *
+ * For tensor values, a pointer to the buffer is stored within the model.
+ * The application is responsible for not changing the content of this region
+ * until all executions using this model have completed. As the data may
+ * be copied during processing, modifying the data after this call yields
+ * undefined results.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has
+ * been called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param buffer A pointer to the data to use.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel *model, int32_t index,
+ const void *buffer, size_t length)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_setOperandValue);
+ EXECUTE_FUNCTION_RETURN(model, index, buffer, length);
+}
+
+/**
+ * Sets an operand's per channel quantization parameters.
+ *
+ * Sets parameters required by a tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL}.
+ * This function must be called for every tensor of type
+ * {@link ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL} before
+ * calling {@link ANeuralNetworksModel_finish}.
+ *
+ * Available since API level 29.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param channelQuant The per channel quantization parameters for the operand.
+ * No memory in this struct needs to outlive the call to
+ * this function.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(
+ ANeuralNetworksModel *model, int32_t index,
+ const ANeuralNetworksSymmPerChannelQuantParams *channelQuant)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams);
+ EXECUTE_FUNCTION_RETURN(model, index, channelQuant);
+}
+
+/**
+ * Sets an operand to a value stored in a memory object.
+ *
+ * The content of the memory is not copied. A reference to that memory is stored
+ * inside the model. The application is responsible for not changing the content
+ * of the memory region until all executions using this model have completed.
+ * As the data may be copied during processing, modifying the data after this
+ * call yields undefined results.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has
+ * been called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @param model The model to be modified.
+ * @param index The index of the model operand we're setting.
+ * @param buffer A pointer to the data to use.
+ * @param memory The memory containing the data.
+ * @param offset This specifies the location of the data within the memory.
+ * The offset is in bytes from the start of memory.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel *model,
+ int32_t index,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_setOperandValueFromMemory);
+ EXECUTE_FUNCTION_RETURN(model, index, memory, offset, length);
+}
+
+/**
+ * Add an operation to a model.
+ *
+ * @param model The model to be modified.
+ * @param type The type of the operation.
+ * @param inputCount The number of entries in the inputs array.
+ * @param inputs An array of indexes identifying each operand.
+ * @param outputCount The number of entries in the outputs array.
+ * @param outputs An array of indexes identifying each operand.
+ *
+ * The operands specified by inputs and outputs must have been
+ * previously added by calls to {@link ANeuralNetworksModel_addOperand}.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has
+ * been called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksModel_addOperation(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationType type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_addOperation);
+ EXECUTE_FUNCTION_RETURN(model, type, inputCount, inputs, outputCount, outputs);
+}
+
+/**
+ * Specifies which operands will be the model's inputs and outputs.
+ *
+ * An operand cannot be used for both input and output. Doing so will
+ * return an error.
+ *
+ * @param model The model to be modified.
+ * @param inputCount The number of entries in the inputs array.
+ * @param inputs An array of indexes identifying the input operands.
+ * @param outputCount The number of entries in the outputs array.
+ * @param outputs An array of indexes identifying the output operands.
+ *
+ * The operands specified by inputs and outputs must have been
+ * previously added by calls to {@link ANeuralNetworksModel_addOperand}.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has
+ * been called will return an error.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ *
+ */
+inline int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel *model,
+ uint32_t inputCount,
+ const uint32_t *inputs,
+ uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_identifyInputsAndOutputs);
+ EXECUTE_FUNCTION_RETURN(model, inputCount, inputs, outputCount, outputs);
+}
+
+/**
+ * Specifies whether {@link ANEURALNETWORKS_TENSOR_FLOAT32} is allowed to be
+ * calculated with range and/or precision as low as that of the IEEE 754 16-bit
+ * floating-point format. By default, {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+ * must be calculated using at least the range and precision of the IEEE 754
+ * 32-bit floating-point format.
+ *
+ * @param model The model to be modified.
+ * @param allow 'true' indicates {@link ANEURALNETWORKS_TENSOR_FLOAT32} may be
+ * calculated with range and/or precision as low as that of the
+ * IEEE 754 16-bit floating point format. 'false' indicates
+ * {@link ANEURALNETWORKS_TENSOR_FLOAT32} must be calculated using
+ * at least the range and precision of the IEEE 754 32-bit floating
+ * point format.
+ *
+ * Attempting to modify a model once {@link ANeuralNetworksModel_finish} has
+ * been called will return an error.
+ *
+ * Available since API level 28.
+ *
+ * See {@link ANeuralNetworksModel} for information on multithreaded usage.
+ */
+inline int ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel *model,
+ bool allow)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_relaxComputationFloat32toFloat16);
+ EXECUTE_FUNCTION_RETURN(model, allow);
+}
+
+/**
+ * Create a {@link ANeuralNetworksCompilation} to compile the given model.
+ * This only creates the object. Compilation is only performed once
+ * {@link ANeuralNetworksCompilation_start} is invoked.
+ *
+ * <p>The provided model must outlive the compilation.</p>
+ *
+ * The model must already have been finished by a call to
+ * {@link ANeuralNetworksModel_finish}.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded
+ * usage.
+ *
+ * @param model The {@link ANeuralNetworksModel} to be compiled.
+ * @param compilation The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the model is invalid.
+ */
+inline int ANeuralNetworksCompilation_create(ANeuralNetworksModel *model,
+ ANeuralNetworksCompilation **compilation)
+{
+ LOAD_FUNCTION(ANeuralNetworksCompilation_create);
+ EXECUTE_FUNCTION_RETURN(model, compilation);
+}
+
+/**
+ * Destroy a compilation.
+ *
+ * <p>If called on a compilation for which
+ * {@link ANeuralNetworksCompilation_start} has been called, the
+ * function will return immediately but will mark the compilation to be deleted
+ * once the compilation completes. The {@link ANeuralNetworksCompilation_wait}
+ * will return ERROR_DELETED.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded
+ * usage.
+ *
+ * @param compilation The compilation to be destroyed. Passing NULL is
+ * acceptable and results in no operation.
+ */
+inline void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation *compilation)
+{
+ LOAD_FUNCTION(ANeuralNetworksCompilation_free);
+ EXECUTE_FUNCTION(compilation);
+}
+
+/**
+ * Sets the execution preference.
+ *
+ * <p>Provides guidance to the runtime when trade-offs are possible.</p>
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded
+ * usage.
+ *
+ * @param compilation The compilation to be modified.
+ * @param preference Either {@link PREFER_LOW_POWER},
+ * {@link PREFER_SINGLE_FAST_ANSWER}, or
+ * {@link PREFER_SUSTAINED_SPEED}.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation *compilation,
+ int32_t preference)
+{
+ LOAD_FUNCTION(ANeuralNetworksCompilation_setPreference);
+ EXECUTE_FUNCTION_RETURN(compilation, preference);
+}
+
+/**
+ * Waits until the compilation completes.
+ *
+ * More than one thread can wait on a compilation. When the compilation
+ * completes, all threads will be released.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded
+ * usage.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the compilation completed normally.
+ */
+inline int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation *compilation)
+{
+ LOAD_FUNCTION(ANeuralNetworksCompilation_finish);
+ EXECUTE_FUNCTION_RETURN(compilation);
+}
+/**
+ * Create a {@link ANeuralNetworksExecution} to apply the given compilation.
+ * This only creates the object. Computation is only performed once
+ * {@link ANeuralNetworksExecution_startCompute} is invoked.
+ *
+ * <p>The provided compilation must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param compilation The {@link ANeuralNetworksCompilation} to be evaluated.
+ * @param execution The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the compilation is invalid.
+ */
+inline int ANeuralNetworksExecution_create(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_create);
+ EXECUTE_FUNCTION_RETURN(compilation, execution);
+}
+
+/**
+ * Destroy an execution.
+ *
+ * <p>If called on an execution for which
+ * {@link ANeuralNetworksExecution_startCompute} has been called, the
+ * function will return immediately but will mark the execution to be deleted
+ * once the computation completes. The {link ANeuralNetworksExecution_wait}
+ * will return ANEURALNETWORKS_ERROR_DELETED.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param execution The execution to be destroyed. Passing NULL is acceptable
+ * and results in no operation.
+ */
+inline void ANeuralNetworksExecution_free(ANeuralNetworksExecution *execution)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_free);
+ EXECUTE_FUNCTION(execution);
+}
+
+/**
+ * Associate a user buffer with an input of the model of the
+ * {@link ANeuralNetworksExecution}.
+ *
+ * <p>The provided buffer must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the input argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link
+ * ANeuralNetworksModel_addOperand}.
+ * @param type The type of the operand. This should be used to specify the
+ * dimensions that were set to 0 when the operand was added to the
+ * model. All other properties of the type must be the same as
+ * specified in the model. If the type is the same as specified
+ * when the model was built, NULL can be passed.
+ * @param buffer The buffer containing the data.
+ * @param length The length in bytes of the buffer.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if
+ * the name is not recognized or the buffer is too small for the input.
+ */
+inline int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const void *buffer, size_t length)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_setInput);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, buffer, length);
+}
+
+/**
+ * Associate part of a memory object with an input of the model of the
+ * {@link ANeuralNetworksExecution}.
+ *
+ * <p>The provided memory must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the input argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link
+ * ANeuralNetworksModel_addOperand}.
+ * @param type The type of the operand. This can be used to specify the
+ * dimensions that were set to 0 when the operand was added to the
+ * model. All other values must be the same as specified in the
+ * model. If the type is the same as specified when the model
+ * was built, NULL can be passed.
+ * @param memory The memory containing the data.
+ * @param offset This specifies the location of the data within the memory.
+ * The offset is in bytes from the start of memory.
+ * @param length The size in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if
+ * the name is not recognized or the buffer is too small for the input.
+ */
+inline int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_setInputFromMemory);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, memory, offset, length);
+}
+
+/**
+ * Associate a user buffer with an output of the model of the
+ * {@link ANeuralNetworksExecution}.
+ *
+ * <p>The provided buffer must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the output argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link
+ * ANeuralNetworksModel_addOperand}.
+ * @param type The type of the operand. This can be used to specify the
+ * dimensions that were set to 0 when the operand was added to the
+ * model. All other values must be the same as specified in the
+ * model. If the type is the same as specified when the model
+ * was built, NULL can be passed.
+ * @param buffer The buffer where the data is to be written.
+ * @param length The length in bytes of the buffer.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if
+ * the name is not recognized or the buffer is too small for the output.
+ */
+inline int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, void *buffer,
+ size_t length)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_setOutput);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, buffer, length);
+}
+
+/**
+ * Associate part of a memory object with an output of the model of the
+ * {@link ANeuralNetworksExecution}.
+ *
+ * <p>The provided memory must outlive the execution.</p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param execution The execution to be modified.
+ * @param index The index of the output argument we are setting. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link
+ * ANeuralNetworksModel_addOperand}.
+ * @param type The type of the operand. This can be used to specify the
+ * dimensions that were set to 0 when the operand was added to the
+ * model. All other values must be the same as specified in the
+ * model. If the type is the same as specified when the model
+ * was built, NULL can be passed.
+ * @param memory The memory where the data is to be stored.
+ * @param offset This specifies the location of the data within the memory.
+ * The offset is in bytes from the start of memory.
+ * @param length The length in bytes of the data value.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA if
+ * the name is not recognized or the buffer is too small for the output.
+ */
+inline int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_setOutputFromMemory);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, memory, offset, length);
+}
+
+/**
+ * Schedule evaluation of the execution.
+ *
+ * <p>Schedules evaluation of the execution. Once the model has been
+ * applied and the outputs are ready to be consumed, the execution will be
+ * signaled. Use {@link ANeuralNetworksExecution_wait} to wait for that signal.
+ * </p>
+ *
+ * Multiple executions can be scheduled and evaluated concurrently, and
+ * compilations can be performed concurrently with executions. The runtime makes
+ * no guarantee on the ordering of the completion of compilations and
+ * executions. If it's important to the application, the application should
+ * enforce the ordering by using {@link ANeuralNetworksCompilation_wait} and
+ * {@link ANeuralNetworksExecution_wait}.
+ *
+ * ANeuralNetworksExecution_wait must be called to recuperate the resources used
+ * by the execution.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @param execution The execution to be scheduled and executed.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution *execution,
+ ANeuralNetworksEvent **event)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_startCompute);
+ EXECUTE_FUNCTION_RETURN(execution, event);
+}
+
+/**
+ * Waits until the execution completes.
+ *
+ * More than one thread can wait on an event. When the execution completes,
+ * all threads will be released.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
+ */
+inline int ANeuralNetworksEvent_wait(ANeuralNetworksEvent *event)
+{
+ LOAD_FUNCTION(ANeuralNetworksEvent_wait);
+ EXECUTE_FUNCTION_RETURN(event);
+}
+
+/**
+ * Destroys the event.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ */
+inline void ANeuralNetworksEvent_free(ANeuralNetworksEvent *event)
+{
+ LOAD_FUNCTION(ANeuralNetworksEvent_free);
+ EXECUTE_FUNCTION(event);
+}
+
+/**
+ * Get the number of available devices.
+ *
+ * @param numDevices Used to return the number of devices.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworks_getDeviceCount(uint32_t *numDevices)
+{
+ LOAD_FUNCTION(ANeuralNetworks_getDeviceCount);
+ EXECUTE_FUNCTION_RETURN(numDevices);
+}
+
+/**
+ * Get the representation of the specified device.
+ *
+ * @param devIndex The index of the specified device. Must be less than the
+ * number of available devices.
+ * @param device The representation of the specified device.
+ * The same representation will always be returned for the
+ * specified device.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+
+inline int ANeuralNetworks_getDevice(uint32_t devIndex, ANeuralNetworksDevice **device)
+{
+ LOAD_FUNCTION(ANeuralNetworks_getDevice);
+ EXECUTE_FUNCTION_RETURN(devIndex, device);
+}
+
+/**
+ * Get the name of the specified device.
+ *
+ * @param device The representation of the specified device.
+ * @param name The returned name of the specified device. The name will be in
+ * UTF-8 and will be null-terminated. It will be recognizable as a
+ * known device name rather than a cryptic string. For devices
+ * with API level 29 and above, the format of the name is
+ * {VENDOR}-{DEVICE}, e.g. “google-ipu”. For devices with feature
+ * level 28 or lower, the name will always be “unknown-device”.
+ * The name will remain valid for the duration of the application.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksDevice_getName(const ANeuralNetworksDevice *device, const char **name)
+{
+ LOAD_FUNCTION(ANeuralNetworksDevice_getName);
+ EXECUTE_FUNCTION_RETURN(device, name);
+}
+
+/**
+ * Get the version of the driver implementation of the specified device.
+ *
+ * It’s the responsibility of the driver implementor to insure that this version
+ * string uniquely distinguishes this implementation from all previous
+ * implementations.
+ *
+ * This version string must not be confused with the feature level which is
+ * solely defined by {@link ANeuralNetworksDevice_getFeatureLevel}. There is no
+ * implicit ordering of the versions. For example, it is not possible to filter
+ * all drivers older than a certain version.
+ *
+ * Application developers may use this version string to avoid or prefer
+ * specific driver implementations. For example, an application may want to do
+ * so because:
+ * - A specific version of the driver does not provide the required
+ * performance, perhaps because of a performance regression.
+ * - A specific version of the driver has a bug or returns results that
+ * don’t match the minimum precision requirement for the application.
+ *
+ * @param device The representation of the specified device.
+ * @param version The returned version string of the driver for the specified
+ * device. The string will be in UTF-8 and will be
+ * null-terminated. For devices with feature level 28 or lower,
+ * "UNKNOWN" will be returned. The version string will remain
+ * valid for the duration of the application.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksDevice_getVersion(const ANeuralNetworksDevice *device,
+ const char **version)
+{
+ LOAD_FUNCTION(ANeuralNetworksDevice_getVersion);
+ EXECUTE_FUNCTION_RETURN(device, version);
+}
+
+/**
+ * Get the supported NNAPI version of the specified device.
+ *
+ * Each device has a supported feature level, which is the most advanced feature
+ * this driver implements. For example, if the driver implements the features
+ * introduced in Android P, but does not implement the features introduced after
+ * Android P, the value would be 28. Developers could decide whether or not the
+ * specified device should be used for a Model that has certain feature
+ * requirements.
+ *
+ * @param device The representation of the specified device.
+ * @param featureLevel The API level of the most advanced feature this driver
+ * implements.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksDevice_getFeatureLevel(const ANeuralNetworksDevice *device,
+ int64_t *featureLevel)
+{
+ LOAD_FUNCTION(ANeuralNetworksDevice_getFeatureLevel);
+ EXECUTE_FUNCTION_RETURN(device, featureLevel);
+}
+
+/**
+ * Get the supported operations for a specified set of devices. If multiple
+ * devices are selected, the supported operation list is a union of supported
+ * operations of all selected devices.
+ *
+ * @param model The model to be queried.
+ * @param devices The set of devices. Must not contain duplicates.
+ * @param numDevices The number of devices in the set.
+ * @param supportedOps The boolean array to be filled. True means supported. The
+ * size of the boolean array must be at least as large as
+ * the number of operations in the model. The order of
+ * elements in the supportedOps array matches the order in
+ * which the corresponding operations were added to the
+ * model.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+inline int
+ANeuralNetworksModel_getSupportedOperationsForDevices(const ANeuralNetworksModel *model,
+ const ANeuralNetworksDevice *const *devices,
+ uint32_t numDevices, bool *supportedOps)
+{
+ LOAD_FUNCTION(ANeuralNetworksModel_getSupportedOperationsForDevices);
+ EXECUTE_FUNCTION_RETURN(model, devices, numDevices, supportedOps);
+}
+
+/**
+ * Create a {@link ANeuralNetworksCompilation} to compile the given model for a
+ * specified set of devices. If more than one device is specified, the
+ * compilation will distribute the workload automatically across the devices.
+ * The model must be fully supported by the specified set of devices. This means
+ * that ANeuralNetworksModel_getSupportedOperationsForDevices() must have
+ * returned true for every operation for that model/devices pair.
+ *
+ * @param model The {@link ANeuralNetworksModel} to be compiled.
+ * @param devices The set of devices. Must not contain duplicates.
+ * @param numDevices The number of devices in the set.
+ * @param compilation The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the model is invalid.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksCompilation_createForDevices(ANeuralNetworksModel *model,
+ const ANeuralNetworksDevice *const *devices,
+ uint32_t numDevices,
+ ANeuralNetworksCompilation **compilation)
+{
+ LOAD_FUNCTION(ANeuralNetworksCompilation_createForDevices);
+ EXECUTE_FUNCTION_RETURN(model, devices, numDevices, compilation);
+}
+
+/**
+ * Sets the compilation caching signature and the cache directory.
+ *
+ * Provides optional caching information to the runtime for faster repeated
+ * compilation.
+ *
+ * See {@link ANeuralNetworksCompilation} for information on multithreaded
+ * usage.
+ *
+ * @param compilation The compilation to be modified.
+ * @param cacheDir The cache directory to store and retrieve caching data. It is
+ * recommended to use the code_cache provided by the Android
+ * runtime. If not using the code_cache, the user should choose
+ * a directory local to the application, and is responsible to
+ * manage and clean the cache entries.
+ * @param token The token provided by the user to specify a model, must be of
+ * length ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN. The user should
+ * ensure that the token is unique to a model within the
+ * application. The NNAPI runtime will not detected token
+ * collisions. If there is a collision, the compilation outcome may
+ * be incorrect without notifying with error.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksCompilation_setCaching(ANeuralNetworksCompilation *compilation,
+ const char *cacheDir, const uint8_t *token)
+{
+ LOAD_FUNCTION(ANeuralNetworksCompilation_setCaching);
+ EXECUTE_FUNCTION_RETURN(compilation, cacheDir, token);
+}
+
+/**
+ * Schedule synchronous evaluation of the execution.
+ *
+ * <p>Schedules synchronous evaluation of the execution. Returns once the
+ * execution has completed and the outputs are ready to be consumed.
+ * </p>
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * See {@link ANeuralNetworksExecution_startCompute} for asynchronous execution.
+ * Synchronous execution incurs lower overhead than asynchronous execution.
+ *
+ * Available since API level 29.
+ *
+ * @param execution The execution to be scheduled and executed.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
+ * ANEURALNETWORKS_UNMAPPABLE if the execution input or output memory
+ * cannot be properly mapped.
+ */
+inline int ANeuralNetworksExecution_compute(ANeuralNetworksExecution *execution)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_compute);
+ EXECUTE_FUNCTION_RETURN(execution);
+}
+
+/**
+ * Get the dimensional information of the specified output operand of the model
+ * of the
+ * {@link ANeuralNetworksExecution}.
+ *
+ * On asynchronous execution initiated by {@link
+ * ANeuralNetworksExecution_startCompute},
+ * {@link ANeuralNetworksEvent_wait} must be called prior to this function to
+ * recuperate the resources used by the execution.
+ *
+ * @param execution The execution to be queried.
+ * @param index The index of the output argument we are querying. It is
+ * an index into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with {@link
+ * ANeuralNetworksModel_addOperand}.
+ * @param rank The rank of the output operand.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful,
+ * ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE if the target output is provided an
+ * insufficient buffer at execution time, ANEURALNETWORKS_BAD_DATA if the index
+ * is invalid.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksExecution_getOutputOperandRank(ANeuralNetworksExecution *execution,
+ int32_t index, uint32_t *rank)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_getOutputOperandRank);
+ EXECUTE_FUNCTION_RETURN(execution, index, rank);
+}
+
+/**
+ * Get the dimensional information of the specified output operand of the model
+ * of the
+ * {@link ANeuralNetworksExecution}. The target output operand cannot be a
+ * scalar.
+ *
+ * On asynchronous execution initiated by
+ * {@link ANeuralNetworksExecution_startCompute},
+ * {@link ANeuralNetworksEvent_wait} must be called prior to this function to
+ * recuperate the resources used by the execution.
+ *
+ * @param execution The execution to be queried.
+ * @param index The index of the output argument we are querying. It is an index
+ * into the lists passed to
+ * {@link ANeuralNetworksModel_identifyInputsAndOutputs}. It is not
+ * the index associated with
+ * {@link ANeuralNetworksModel_addOperand}.
+ * @param dimensions The dimension array to be filled. The size of the array
+ * must be exactly as large as the rank of the output operand
+ * to be queried in the model.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful,
+ * ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE if the target output is provided an
+ * insufficient buffer at execution time, ANEURALNETWORKS_BAD_DATA if the index
+ * is invalid or if the target is a scalar.
+ *
+ * Available since API level 29.
+ */
+inline int ANeuralNetworksExecution_getOutputOperandDimensions(ANeuralNetworksExecution *execution,
+ int32_t index, uint32_t *dimensions)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_getOutputOperandDimensions);
+ EXECUTE_FUNCTION_RETURN(execution, index, dimensions);
+}
+
+/**
+ * Create a {@link ANeuralNetworksBurst} to apply the given compilation.
+ * This only creates the burst object. Computation is only performed once
+ * {@link ANeuralNetworksExecution_burstCompute} is invoked with a valid
+ * {@link ANeuralNetworksExecution} and {@link ANeuralNetworksBurst}.
+ *
+ * <p>The provided compilation must outlive the burst object.</p>
+ *
+ * Available since API level 29.
+ *
+ * @param compilation The {@link ANeuralNetworksCompilation} to be evaluated.
+ * @param burst The newly created object or NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful, ANEURALNETWORKS_BAD_DATA
+ * if the compilation is invalid.
+ */
+inline int ANeuralNetworksBurst_create(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksBurst **burst)
+{
+ LOAD_FUNCTION(ANeuralNetworksBurst_create);
+ EXECUTE_FUNCTION_RETURN(compilation, burst);
+}
+
+/**
+ * Destroys the burst object.
+ *
+ * Available since API level 29.
+ *
+ * @param burst The burst object to be destroyed. Passing NULL is acceptable and
+ * results in no operation.
+ */
+inline void ANeuralNetworksBurst_free(ANeuralNetworksBurst *burst)
+{
+ LOAD_FUNCTION(ANeuralNetworksBurst_free);
+ EXECUTE_FUNCTION(burst);
+}
+
+/**
+ * Schedule synchronous evaluation of the execution on a burst object.
+ *
+ * <p>Schedules synchronous evaluation of the execution. Returns once the
+ * execution has completed and the outputs are ready to be consumed.</p>
+ *
+ * <p>There must be at most one {@link ANeuralNetworksExecution} processing at
+ * any given time for any given burst object. Any
+ * {@link ANeuralNetworksExecution} launched before the previous has finished
+ * will result in ANEURALNETWORKS_BAD_STATE.</p>
+ *
+ * Available since API level 29.
+ *
+ * @param burst The burst object to execute on.
+ * @param execution The execution to be scheduled and executed. The execution
+ * must be created from the same {@link
+ * ANeuralNetworksCompilation} as the burst object.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the execution completed normally.
+ */
+inline int ANeuralNetworksExecution_burstCompute(ANeuralNetworksExecution *execution,
+ ANeuralNetworksBurst *burst)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_burstCompute);
+ EXECUTE_FUNCTION_RETURN(execution, burst);
+}
+
+/**
+ * Creates a shared memory object from an AHardwareBuffer handle.
+ *
+ * If the shared memory is backed by an AHardwareBuffer of
+ * AHARDWAREBUFFER_FORMAT_BLOB format, it can be used the same way as shared
+ * memory created from a file handle. See
+ * {@link ANeuralNetworksMemory} for a description on how to use this shared
+ * memory.
+ *
+ * If the shared memory is backed by an AHardwareBuffer of a format other than
+ * AHARDWAREBUFFER_FORMAT_BLOB, it can only be used for Model inputs and
+ * outputs. When calling {@link ANeuralNetworksExecution_setInputFromMemory} or
+ * {@link ANeuralNetworksExecution_setOutputFromMemory} with the shared memory,
+ * both offset and length must be set to zero and the entire memory region will
+ * be associated with the specified input or output operand. There is no
+ * guarantee that an arbitrary AHardwareBuffer_Format and
+ * AHardwareBuffer_UsageFlags combination can be used by arbitrary devices. The
+ * execution will fail if selected set of devices cannot consume the buffer.
+ *
+ * Calling {@link ANeuralNetworksModel_setOperandValueFromMemory} with shared
+ * memory backed by an AHardwareBuffer of a format other than
+ * AHARDWAREBUFFER_FORMAT_BLOB is disallowed.
+ *
+ * TODO(miaowang): add documentation about intended usage with introspection
+ * API.
+ *
+ * Available since API level 29.
+ *
+ * @param ahwb The AHardwareBuffer handle.
+ * @param memory The memory object to be created.
+ * Set to NULL if unsuccessful.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if the request completed normally.
+ *
+ * @see AHardwareBuffer
+ */
+inline int ANeuralNetworksMemory_createFromAHardwareBuffer(const AHardwareBuffer *ahwb,
+ ANeuralNetworksMemory **memory)
+{
+ LOAD_FUNCTION(ANeuralNetworksMemory_createFromAHardwareBuffer);
+ EXECUTE_FUNCTION_RETURN(ahwb, memory);
+}
+
+/**
+ * Specifies whether duration of the {@link ANeuralNetworksExecution} is to be
+ * measured. By default, duration is not measured.
+ *
+ * The {@link ANeuralNetworksExecution} must have been created with
+ * {@link ANeuralNetworksCompilation_createForDevices} with numDevices = 1.
+ *
+ * See {@link ANeuralNetworksExecution} for information on multithreaded usage.
+ *
+ * Available since API level 29.
+ *
+ * @param execution The execution to be modified.
+ * @param measure 'true' if duration is to be measured, 'false' if not.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksExecution_setMeasureTiming(ANeuralNetworksExecution *execution,
+ bool measure)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_setMeasureTiming);
+ EXECUTE_FUNCTION_RETURN(execution, measure);
+}
+
+/**
+ * Get the time spent in the specified {@link ANeuralNetworksExecution}, in
+ * nanoseconds. The execution must have completed.
+ *
+ * @param execution The execution to be queried.
+ * @param durationCode The measurement to be queried, specified by {@link
+ * DurationCode}.
+ * @param duration The returned duration. If no measurement was requested by
+ * {@link ANeuralNetworksExecution_setMeasureTiming}, or for
+ * some other reason the duration is not available, UINT64_MAX will be returned.
+ * A particular device need not support any given measurement.
+ *
+ * @return ANEURALNETWORKS_NO_ERROR if successful.
+ */
+inline int ANeuralNetworksExecution_getDuration(const ANeuralNetworksExecution *execution,
+ int32_t durationCode, uint64_t *duration)
+{
+ LOAD_FUNCTION(ANeuralNetworksExecution_getDuration);
+ EXECUTE_FUNCTION_RETURN(execution, durationCode, duration);
+}
+
+/**/
+
+#endif // __NEURAL_NETWORKS_SHIM_H__
diff --git a/runtimes/libs/nnapi/v1.2/include/NeuralNetworksTypes.h b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksTypes.h
new file mode 100644
index 000000000..d74402749
--- /dev/null
+++ b/runtimes/libs/nnapi/v1.2/include/NeuralNetworksTypes.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// NOTE This header is derived from part of the following file
+// https://github.com/tensorflow/tensorflow/blob/a59ad83d06abd38b5e142c41043db8886a92fca8/tensorflow/lite/nnapi/NeuralNetworksTypes.h
+
+#ifndef __NEURAL_NETWORKS_TYPES_H__
+#define __NEURAL_NETWORKS_TYPES_H__
+
+#include "NeuralNetworks.h"
+
+// NN api types based on NNAPI header file
+// https://developer.android.com/ndk/reference/group/neural-networks
+
+// nn api function types
+
+typedef int (*ANeuralNetworksMemory_createFromFd_fn)(size_t size, int protect, int fd,
+ size_t offset, ANeuralNetworksMemory **memory);
+
+typedef void (*ANeuralNetworksMemory_free_fn)(ANeuralNetworksMemory *memory);
+
+typedef int (*ANeuralNetworksModel_create_fn)(ANeuralNetworksModel **model);
+
+typedef int (*ANeuralNetworksModel_finish_fn)(ANeuralNetworksModel *model);
+
+typedef void (*ANeuralNetworksModel_free_fn)(ANeuralNetworksModel *model);
+
+typedef int (*ANeuralNetworksCompilation_create_fn)(ANeuralNetworksModel *model,
+ ANeuralNetworksCompilation **compilation);
+
+typedef void (*ANeuralNetworksCompilation_free_fn)(ANeuralNetworksCompilation *compilation);
+
+typedef int (*ANeuralNetworksCompilation_setPreference_fn)(ANeuralNetworksCompilation *compilation,
+ int32_t preference);
+
+typedef int (*ANeuralNetworksCompilation_finish_fn)(ANeuralNetworksCompilation *compilation);
+
+typedef int (*ANeuralNetworksModel_addOperand_fn)(ANeuralNetworksModel *model,
+ const ANeuralNetworksOperandType *type);
+
+typedef int (*ANeuralNetworksModel_setOperandValue_fn)(ANeuralNetworksModel *model, int32_t index,
+ const void *buffer, size_t length);
+
+typedef int (*ANeuralNetworksModel_setOperandSymmPerChannelQuantParams_fn)(
+ ANeuralNetworksModel *model, int32_t index,
+ const ANeuralNetworksSymmPerChannelQuantParams *channelQuant);
+
+typedef int (*ANeuralNetworksModel_setOperandValueFromMemory_fn)(
+ ANeuralNetworksModel *model, int32_t index, const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length);
+
+typedef int (*ANeuralNetworksModel_addOperation_fn)(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationType type,
+ uint32_t inputCount, const uint32_t *inputs,
+ uint32_t outputCount, const uint32_t *outputs);
+
+typedef int (*ANeuralNetworksModel_identifyInputsAndOutputs_fn)(ANeuralNetworksModel *model,
+ uint32_t inputCount,
+ const uint32_t *inputs,
+ uint32_t outputCount,
+ const uint32_t *outputs);
+
+typedef int (*ANeuralNetworksModel_relaxComputationFloat32toFloat16_fn)(ANeuralNetworksModel *model,
+ bool allow);
+
+typedef int (*ANeuralNetworksExecution_create_fn)(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution);
+
+typedef void (*ANeuralNetworksExecution_free_fn)(ANeuralNetworksExecution *execution);
+
+typedef int (*ANeuralNetworksExecution_setInput_fn)(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const void *buffer, size_t length);
+
+typedef int (*ANeuralNetworksExecution_setInputFromMemory_fn)(
+ ANeuralNetworksExecution *execution, int32_t index, const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset, size_t length);
+
+typedef int (*ANeuralNetworksExecution_setOutput_fn)(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ void *buffer, size_t length);
+
+typedef int (*ANeuralNetworksExecution_setOutputFromMemory_fn)(
+ ANeuralNetworksExecution *execution, int32_t index, const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset, size_t length);
+
+typedef int (*ANeuralNetworksExecution_startCompute_fn)(ANeuralNetworksExecution *execution,
+ ANeuralNetworksEvent **event);
+
+typedef int (*ANeuralNetworksEvent_wait_fn)(ANeuralNetworksEvent *event);
+
+typedef void (*ANeuralNetworksEvent_free_fn)(ANeuralNetworksEvent *event);
+
+typedef int (*ASharedMemory_create_fn)(const char *name, size_t size);
+
+typedef int (*ANeuralNetworks_getDeviceCount_fn)(uint32_t *numDevices);
+
+typedef int (*ANeuralNetworks_getDevice_fn)(uint32_t devIndex, ANeuralNetworksDevice **device);
+
+typedef int (*ANeuralNetworksDevice_getName_fn)(const ANeuralNetworksDevice *device,
+ const char **name);
+
+typedef int (*ANeuralNetworksDevice_getType_fn)(const ANeuralNetworksDevice *device, int32_t *type);
+
+typedef int (*ANeuralNetworksDevice_getVersion_fn)(const ANeuralNetworksDevice *device,
+ const char **version);
+
+typedef int (*ANeuralNetworksDevice_getFeatureLevel_fn)(const ANeuralNetworksDevice *device,
+ int64_t *featureLevel);
+
+typedef int (*ANeuralNetworksModel_getSupportedOperationsForDevices_fn)(
+ const ANeuralNetworksModel *model, const ANeuralNetworksDevice *const *devices,
+ uint32_t numDevices, bool *supportedOps);
+
+typedef int (*ANeuralNetworksCompilation_createForDevices_fn)(
+ ANeuralNetworksModel *model, const ANeuralNetworksDevice *const *devices, uint32_t numDevices,
+ ANeuralNetworksCompilation **compilation);
+
+typedef int (*ANeuralNetworksCompilation_setCaching_fn)(ANeuralNetworksCompilation *compilation,
+ const char *cacheDir, const uint8_t *token);
+
+typedef int (*ANeuralNetworksExecution_compute_fn)(ANeuralNetworksExecution *execution);
+
+typedef int (*ANeuralNetworksExecution_getOutputOperandRank_fn)(ANeuralNetworksExecution *execution,
+ int32_t index, uint32_t *rank);
+
+typedef int (*ANeuralNetworksExecution_getOutputOperandDimensions_fn)(
+ ANeuralNetworksExecution *execution, int32_t index, uint32_t *dimensions);
+
+typedef int (*ANeuralNetworksBurst_create_fn)(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksBurst **burst);
+
+typedef void (*ANeuralNetworksBurst_free_fn)(ANeuralNetworksBurst *burst);
+
+typedef int (*ANeuralNetworksExecution_burstCompute_fn)(ANeuralNetworksExecution *execution,
+ ANeuralNetworksBurst *burst);
+
+typedef int (*ANeuralNetworksMemory_createFromAHardwareBuffer_fn)(const AHardwareBuffer *ahwb,
+ ANeuralNetworksMemory **memory);
+
+typedef int (*ANeuralNetworksExecution_setMeasureTiming_fn)(ANeuralNetworksExecution *execution,
+ bool measure);
+
+typedef int (*ANeuralNetworksExecution_getDuration_fn)(const ANeuralNetworksExecution *execution,
+ int32_t durationCode, uint64_t *duration);
+
+#endif // __NEURAL_NETWORKS_TYPES_H__
diff --git a/runtimes/libs/profiling/CMakeLists.txt b/runtimes/libs/profiling/CMakeLists.txt
new file mode 100644
index 000000000..e0398ce93
--- /dev/null
+++ b/runtimes/libs/profiling/CMakeLists.txt
@@ -0,0 +1,7 @@
+file(GLOB_RECURSE SOURCES "src/*.cpp")
+
+add_library(nnfw_lib_profiling STATIC ${SOURCES})
+set_property(TARGET nnfw_lib_profiling PROPERTY POSITION_INDEPENDENT_CODE ON)
+target_include_directories(nnfw_lib_profiling PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
+target_link_libraries(nnfw_lib_profiling PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_profiling PRIVATE nnfw_coverage)
diff --git a/runtimes/libs/profiling/include/profiling/profile_buffer.h b/runtimes/libs/profiling/include/profiling/profile_buffer.h
new file mode 100644
index 000000000..bc8d75e7c
--- /dev/null
+++ b/runtimes/libs/profiling/include/profiling/profile_buffer.h
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/lite/profiling/profile_buffer.h
+#ifndef TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILE_BUFFER_H_
+#define TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILE_BUFFER_H_
+
+#include <cstddef>
+#include <cstdint>
+
+#include "profiling/time.h"
+
+namespace tflite {
+namespace profiling {
+
+// A profiling event.
+struct ProfileEvent {
+ // Describes the type of event.
+ // The event_metadata field may contain additional data for interpreting
+ // the event.
+ enum class EventType {
+ // Default event type, the metadata field has no special significance.
+ DEFAULT = 0,
+ // The event is an operator invocation and the event_metadata field is the
+ // index of operator node.
+ OPERATOR_INVOKE_EVENT = 1
+ };
+
+ // Label of the event. This usually describes the event.
+ const char* tag;
+ // Timestamp in microseconds when the event began.
+ uint64_t begin_timestamp_us;
+ // Timestamp in microseconds when the event ended.
+ uint64_t end_timestamp_us;
+ // The field containing the type of event. This must be one of the event types
+ // in EventType.
+ EventType event_type;
+ // Extra data describing the details of the event.
+ uint32_t event_metadata;
+};
+} // namespace profiling
+} // namespace tflite
+
+#ifdef TFLITE_PROFILING_ENABLED
+
+#include <sys/time.h>
+#include <vector>
+
+namespace tflite {
+namespace profiling {
+constexpr uint32_t kInvalidEventHandle = static_cast<uint32_t>(~0) - 1;
+
+// A ring buffer of profile events.
+// This class is not thread safe.
+class ProfileBuffer {
+ public:
+ ProfileBuffer(uint32_t max_num_entries, bool enabled)
+ : enabled_(enabled), current_index_(0), event_buffer_(max_num_entries) {}
+
+ // Adds an event to the buffer with begin timestamp set to the current
+ // timestamp. Returns a handle to event that can be used to call EndEvent. If
+ // buffer is disabled this has no affect.
+ // The tag of the event should remain valid till the buffer is valid.
+ uint32_t BeginEvent(const char* tag, ProfileEvent::EventType event_type,
+ uint32_t event_metadata) {
+ if (!enabled_) {
+ return kInvalidEventHandle;
+ }
+ uint64_t timestamp = time::NowMicros();
+ int index = current_index_ % event_buffer_.size();
+ event_buffer_[index].tag = tag;
+ event_buffer_[index].event_type = event_type;
+ event_buffer_[index].event_metadata = event_metadata;
+ event_buffer_[index].begin_timestamp_us = timestamp;
+ event_buffer_[index].end_timestamp_us = 0;
+ current_index_++;
+ return index;
+ }
+
+ // Sets the enabled state of buffer to |enabled|
+ void SetEnabled(bool enabled) { enabled_ = enabled; }
+
+ // Sets the end timestamp for event for the handle to current time.
+ // If the buffer is disabled or previous event has been overwritten this
+ // operation has not effect.
+ void EndEvent(uint32_t event_handle) {
+ if (!enabled_ || event_handle == kInvalidEventHandle ||
+ event_handle > current_index_) {
+ return;
+ }
+ const uint32_t max_size = event_buffer_.size();
+ if (current_index_ > (max_size + event_handle)) {
+ // Ignore, buffer has already overflowed.
+ return;
+ }
+
+ int event_index = event_handle % max_size;
+ event_buffer_[event_index].end_timestamp_us = time::NowMicros();
+ }
+
+ // Returns the size of the buffer.
+ size_t Size() const {
+ return (current_index_ >= event_buffer_.size()) ? event_buffer_.size()
+ : current_index_;
+ }
+
+ // Resets the buffer.
+ void Reset() {
+ enabled_ = false;
+ current_index_ = 0;
+ }
+
+ // Returns the profile event at the given index. If the index is invalid a
+ // nullptr is returned. The return event may get overwritten if more events
+ // are added to buffer.
+ const struct ProfileEvent* const At(int index) const {
+ size_t size = Size();
+ if (index >= size) {
+ return nullptr;
+ }
+ const uint32_t max_size = event_buffer_.size();
+ uint32_t start =
+ (current_index_ > max_size) ? current_index_ % max_size : max_size;
+ index = (index + start) % max_size;
+ return &event_buffer_[index];
+ }
+
+ private:
+ bool enabled_;
+ uint32_t current_index_;
+ std::vector<ProfileEvent> event_buffer_;
+};
+} // namespace profiling
+} // namespace tflite
+#endif // TFLITE_PROFILING_ENABLED
+#endif // TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILE_BUFFER_H_
+
+// clang-format on
diff --git a/runtimes/libs/profiling/include/profiling/profiler.h b/runtimes/libs/profiling/include/profiling/profiler.h
new file mode 100644
index 000000000..ed3688140
--- /dev/null
+++ b/runtimes/libs/profiling/include/profiling/profiler.h
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/lite/profiling/profiler.h
+#ifndef TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILER_H_
+#define TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILER_H_
+
+#include <vector>
+
+#include "profiling/profile_buffer.h"
+
+#ifdef TFLITE_PROFILING_ENABLED
+
+namespace tflite {
+namespace profiling {
+class ScopedProfile;
+class ScopedOperatorProfile;
+
+// Controls whether profiling is enabled or disabled and collects profiles.
+// TFLite is used on platforms that don't have posix threads, so the profiler is
+// kept as simple as possible. It is designed to be used only on a single
+// thread.
+//
+// Profiles are collected using Scoped*Profile objects that begin and end a
+// profile event.
+// An example usage is shown in the example below:
+//
+// Say Worker class has a DoWork method and we are interested in profiling
+// the overall execution time for DoWork and time spent in Task1 and Task2
+// functions.
+//
+// class Worker {
+// public:
+// void DoWork() {
+// ScopedProfile(&controller, "DoWork");
+// Task1();
+// Task2();
+// .....
+// }
+//
+// void Task1() {
+// ScopedProfile(&controller, "Task1");
+// ....
+// }
+//
+// void Task2() {
+// ScopedProfile(&controller, "Task2");
+// }
+//
+// Profiler profiler;
+// }
+//
+// We instrument the functions that need to be profiled.
+//
+// Profile can be collected by enable profiling and then getting profile
+// events.
+//
+// void ProfileWorker() {
+// Worker worker;
+// worker.profiler.EnableProfiling();
+// worker.DoWork();
+// worker.profiler.DisableProfiling();
+// // Profiling is complete, extract profiles.
+// auto profile_events = worker.profiler.GetProfiles();
+// }
+//
+//
+class Profiler {
+ public:
+ Profiler() : buffer_(1024, false) {}
+
+ void StartProfiling() { buffer_.SetEnabled(true); }
+ void StopProfiling() { buffer_.SetEnabled(false); }
+ void Reset() { buffer_.Reset(); }
+ std::vector<const ProfileEvent*> GetProfileEvents() {
+ std::vector<const ProfileEvent*> profile_events;
+ profile_events.reserve(buffer_.Size());
+ for (size_t i = 0; i < buffer_.Size(); i++) {
+ profile_events.push_back(buffer_.At(i));
+ }
+ return profile_events;
+ }
+
+ private:
+ friend class ScopedProfile;
+ friend class ScopedOperatorProfile;
+ ProfileBuffer* GetProfileBuffer() { return &buffer_; }
+ ProfileBuffer buffer_;
+};
+
+class ScopedProfile {
+ public:
+ // Adds a profile event to profile that begins with the construction
+ // of object and ends when the object goes out of scope.
+ // The lifetime of tag should be at least the lifetime of profiler.
+
+ ScopedProfile(Profiler* profiler, const char* tag)
+ : buffer_(nullptr), event_handle_(0) {
+ if (profiler) {
+ buffer_ = profiler->GetProfileBuffer();
+ event_handle_ =
+ buffer_->BeginEvent(tag, ProfileEvent::EventType::DEFAULT, 0);
+ }
+ }
+ ~ScopedProfile() {
+ if (buffer_) {
+ buffer_->EndEvent(event_handle_);
+ }
+ }
+
+ private:
+ ProfileBuffer* buffer_;
+ int32_t event_handle_;
+};
+
+class ScopedOperatorProfile {
+ public:
+ // Adds a profile event to profile that begins with the construction
+ // of object and ends when the object goes out of scope.
+ // The lifetime of tag should be at least the lifetime of profiler.
+ ScopedOperatorProfile(Profiler* profiler, const char* tag, int node_index)
+ : buffer_(nullptr), event_handle_(0) {
+ if (profiler) {
+ buffer_ = profiler->GetProfileBuffer();
+ event_handle_ = buffer_->BeginEvent(
+ tag, ProfileEvent::EventType::OPERATOR_INVOKE_EVENT, node_index);
+ }
+ }
+
+ ~ScopedOperatorProfile() {
+ if (buffer_) {
+ buffer_->EndEvent(event_handle_);
+ }
+ }
+
+ private:
+ ProfileBuffer* buffer_;
+ int32_t event_handle_;
+};
+
+} // namespace profiling
+} // namespace tflite
+
+#define VARNAME_UNIQ(name, ctr) name##ctr
+
+#define SCOPED_OPERATOR_PROFILE(profiler, node_index) \
+ tflite::profiling::ScopedOperatorProfile VARNAME_UNIQ( \
+ _profile_, __COUNTER__)((profiler), "OpInvoke", (node_index))
+#else
+
+namespace tflite {
+namespace profiling {
+// A noop version of profiler when profiling is disabled.
+class Profiler {
+ public:
+ Profiler() {}
+ void StartProfiling() {}
+ void StopProfiling() {}
+ void Reset() {}
+ std::vector<const ProfileEvent*> GetProfileEvents() { return {}; }
+};
+} // namespace profiling
+} // namespace tflite
+
+#define SCOPED_OPERATOR_PROFILE(profiler, node_index)
+
+#endif // TFLITE_PROFILING_ENABLED
+
+#endif // TENSORFLOW_CONTRIB_LITE_PROFILING_PROFILER_H_
+
+// clang-format on
diff --git a/libs/profiling/include/profiling/profiling.h b/runtimes/libs/profiling/include/profiling/profiling.h
index ee0df1338..ee0df1338 100644
--- a/libs/profiling/include/profiling/profiling.h
+++ b/runtimes/libs/profiling/include/profiling/profiling.h
diff --git a/runtimes/libs/profiling/include/profiling/time.h b/runtimes/libs/profiling/include/profiling/time.h
new file mode 100644
index 000000000..200563aa6
--- /dev/null
+++ b/runtimes/libs/profiling/include/profiling/time.h
@@ -0,0 +1,35 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/lite/profiling/time.h
+#ifndef TENSORFLOW_CONTRIB_LITE_PROFILING_TIME_H_
+#define TENSORFLOW_CONTRIB_LITE_PROFILING_TIME_H_
+
+#include <cstdint>
+
+namespace tflite {
+namespace profiling {
+namespace time {
+uint64_t NowMicros();
+} // namespace time
+} // namespace profiling
+} // namespace tflite
+#endif // TENSORFLOW_CONTRIB_LITE_PROFILING_TIME_H_
+
+// clang-format on
diff --git a/runtimes/libs/profiling/src/profiling/time.cpp b/runtimes/libs/profiling/src/profiling/time.cpp
new file mode 100644
index 000000000..761023e6d
--- /dev/null
+++ b/runtimes/libs/profiling/src/profiling/time.cpp
@@ -0,0 +1,55 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/lite/profiling/time.cpp
+#include "profiling/time.h"
+
+#if defined(_MSC_VER)
+#include <chrono> // NOLINT(build/c++11)
+#else
+#include <sys/time.h>
+#endif
+
+namespace tflite {
+namespace profiling {
+namespace time {
+
+#if defined(_MSC_VER)
+
+uint64_t NowMicros() {
+ return std::chrono::duration_cast<std::chrono::microseconds>(
+ std::chrono::system_clock::now().time_since_epoch())
+ .count();
+}
+
+#else
+
+uint64_t NowMicros() {
+ struct timeval tv;
+ gettimeofday(&tv, nullptr);
+ return static_cast<uint64_t>(tv.tv_sec) * 1000000 + tv.tv_usec;
+}
+
+#endif // defined(_MSC_VER)
+
+} // namespace time
+} // namespace profiling
+} // namespace tflite
+
+// clang-format on
diff --git a/runtimes/libs/rua/CMakeLists.txt b/runtimes/libs/rua/CMakeLists.txt
new file mode 100644
index 000000000..07ad9ea26
--- /dev/null
+++ b/runtimes/libs/rua/CMakeLists.txt
@@ -0,0 +1,4 @@
+add_subdirectory(core)
+add_subdirectory(dyn)
+add_subdirectory(anchor)
+add_subdirectory(shim)
diff --git a/runtimes/libs/rua/README.md b/runtimes/libs/rua/README.md
new file mode 100644
index 000000000..aea4ce033
--- /dev/null
+++ b/runtimes/libs/rua/README.md
@@ -0,0 +1,4 @@
+# rua
+
+_rua_ is a **RU**ntime **A**bstraction layer which allows us to switch between multiple
+Android NN rutime during execution (not loading time).
diff --git a/runtimes/libs/rua/anchor/CMakeLists.txt b/runtimes/libs/rua/anchor/CMakeLists.txt
new file mode 100644
index 000000000..6e65641f4
--- /dev/null
+++ b/runtimes/libs/rua/anchor/CMakeLists.txt
@@ -0,0 +1,9 @@
+file(GLOB_RECURSE SOURCES "src/*.cpp")
+
+add_library(nnfw_lib_rua_anchor STATIC ${SOURCES})
+set_target_properties(nnfw_lib_rua_anchor PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(nnfw_lib_rua_anchor PUBLIC include)
+target_link_libraries(nnfw_lib_rua_anchor PUBLIC nnfw_lib_rua_core)
+target_link_libraries(nnfw_lib_rua_anchor PRIVATE nnfw_lib_rua_dyn)
+target_link_libraries(nnfw_lib_rua_anchor PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_rua_anchor PRIVATE nnfw_coverage)
diff --git a/runtimes/libs/rua/anchor/include/rua/Anchor.h b/runtimes/libs/rua/anchor/include/rua/Anchor.h
new file mode 100644
index 000000000..f6056ab4e
--- /dev/null
+++ b/runtimes/libs/rua/anchor/include/rua/Anchor.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_RUA_ANCHOR_H__
+#define __NNFW_RUA_ANCHOR_H__
+
+#include <rua/Service.h>
+
+namespace rua
+{
+
+/**
+ * @brief Global Runtime Abstraction Context
+ *
+ * "set" will have global effect (within each process).
+ */
+struct Anchor
+{
+ static const RuntimeService *get(void);
+ static void set(const RuntimeService *svc);
+};
+
+} // namespace rua
+
+#endif // __NNFW_RUA_ANCHOR_H__
diff --git a/runtimes/libs/rua/anchor/src/Anchor.cpp b/runtimes/libs/rua/anchor/src/Anchor.cpp
new file mode 100644
index 000000000..a78cca19e
--- /dev/null
+++ b/runtimes/libs/rua/anchor/src/Anchor.cpp
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "rua/Anchor.h"
+#include <rua/DynamicBinder.h>
+
+namespace
+{
+
+const rua::RuntimeService *anchored_service = rua::DynamicBinder::get();
+
+} // namespace
+
+namespace rua
+{
+
+const RuntimeService *Anchor::get(void) { return anchored_service; }
+void Anchor::set(const RuntimeService *service) { anchored_service = service; }
+
+} // namespace rua
diff --git a/runtimes/libs/rua/core/CMakeLists.txt b/runtimes/libs/rua/core/CMakeLists.txt
new file mode 100644
index 000000000..f7d41f657
--- /dev/null
+++ b/runtimes/libs/rua/core/CMakeLists.txt
@@ -0,0 +1,3 @@
+add_library(nnfw_lib_rua_core INTERFACE)
+target_include_directories(nnfw_lib_rua_core INTERFACE include)
+target_link_libraries(nnfw_lib_rua_core INTERFACE nnfw_lib_nnapi)
diff --git a/runtimes/libs/rua/core/include/rua/Service.h b/runtimes/libs/rua/core/include/rua/Service.h
new file mode 100644
index 000000000..a79524a8a
--- /dev/null
+++ b/runtimes/libs/rua/core/include/rua/Service.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Service.h
+ * @brief Core abstraction that RUA depends on.
+ */
+#ifndef __NNFW_RUA_SERVICE_H__
+#define __NNFW_RUA_SERVICE_H__
+
+#include "NeuralNetworks.h"
+
+struct ANeuralNetworksMemory;
+struct ANeuralNetworksEvent;
+
+struct ANeuralNetworksModel;
+struct ANeuralNetworksCompilation;
+struct ANeuralNetworksExecution;
+
+namespace rua
+{
+
+/**
+ * @brief A wrapper for ANeuralNetworkMemory API
+ */
+struct MemoryService
+{
+ virtual ~MemoryService() = default;
+
+ virtual int createFromFd(size_t size, int protect, int fd, size_t offset,
+ ANeuralNetworksMemory **memory) const = 0;
+
+ virtual void free(ANeuralNetworksMemory *memory) const = 0;
+};
+
+/**
+ * @brief A wrapper for ANeuralNetworkModel API
+ */
+struct ModelService
+{
+ virtual ~ModelService() = default;
+
+ virtual int create(ANeuralNetworksModel **model) const = 0;
+
+ virtual int addOperand(ANeuralNetworksModel *model,
+ const ANeuralNetworksOperandType *type) const = 0;
+
+ virtual int setOperandValue(ANeuralNetworksModel *model, int32_t index, const void *buffer,
+ size_t length) const = 0;
+
+ virtual int setOperandValueFromMemory(ANeuralNetworksModel *model, int32_t index,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length) const = 0;
+
+ virtual int addOperation(ANeuralNetworksModel *model, ANeuralNetworksOperationType type,
+ uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) const = 0;
+
+ virtual int identifyInputsAndOutputs(ANeuralNetworksModel *model, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) const = 0;
+
+ virtual int relaxComputationFloat32toFloat16(ANeuralNetworksModel *model, bool allow) const = 0;
+
+ virtual int finish(ANeuralNetworksModel *model) const = 0;
+
+ virtual void free(ANeuralNetworksModel *model) const = 0;
+};
+
+/**
+ * @brief A wrapper for ANeuralNetworkCompilation API
+ */
+struct CompilationService
+{
+ virtual ~CompilationService() = default;
+
+ virtual int create(ANeuralNetworksModel *model,
+ ANeuralNetworksCompilation **compilation) const = 0;
+
+ virtual int setPreference(ANeuralNetworksCompilation *compilation, int32_t preference) const = 0;
+ virtual int finish(ANeuralNetworksCompilation *compilation) const = 0;
+
+ virtual void free(ANeuralNetworksCompilation *compilation) const = 0;
+};
+
+/**
+ * @brief A wrapper for ANeuralNetworkExecution API
+ */
+struct ExecutionService
+{
+ virtual ~ExecutionService() = default;
+
+ virtual int create(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution) const = 0;
+
+ virtual void free(ANeuralNetworksExecution *execution) const = 0;
+
+ virtual int setInput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, const void *buffer,
+ size_t length) const = 0;
+
+ virtual int setInputFromMemory(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length) const = 0;
+
+ virtual int setOutput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, void *buffer,
+ size_t length) const = 0;
+
+ virtual int setOutputFromMemory(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length) const = 0;
+
+ virtual int startCompute(ANeuralNetworksExecution *execution,
+ ANeuralNetworksEvent **event) const = 0;
+};
+
+/**
+ * @brief A wrapper for ANeuralNetworkEvent API
+ */
+struct EventService
+{
+ virtual int wait(ANeuralNetworksEvent *event) const = 0;
+ virtual void free(ANeuralNetworksEvent *event) const = 0;
+};
+
+/**
+ * @brief A wrapper for Android NN rutime itself
+ */
+struct RuntimeService
+{
+ virtual ~RuntimeService() = default;
+
+ virtual const MemoryService *memory(void) const = 0;
+ virtual const ModelService *model(void) const = 0;
+ virtual const CompilationService *compilation(void) const = 0;
+ virtual const ExecutionService *execution(void) const = 0;
+ virtual const EventService *event(void) const = 0;
+};
+
+} // namespace rua
+
+#endif // __NNFW_RUA_SERVICE_H__
diff --git a/runtimes/libs/rua/dyn/CMakeLists.txt b/runtimes/libs/rua/dyn/CMakeLists.txt
new file mode 100644
index 000000000..3f9ac8928
--- /dev/null
+++ b/runtimes/libs/rua/dyn/CMakeLists.txt
@@ -0,0 +1,8 @@
+file(GLOB_RECURSE SOURCES "src/*.cpp")
+
+add_library(nnfw_lib_rua_dyn STATIC ${SOURCES})
+set_target_properties(nnfw_lib_rua_dyn PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(nnfw_lib_rua_dyn PUBLIC include)
+target_link_libraries(nnfw_lib_rua_dyn PUBLIC nnfw_lib_rua_core)
+target_link_libraries(nnfw_lib_rua_dyn PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_rua_dyn PRIVATE nnfw_coverage)
diff --git a/runtimes/libs/rua/dyn/include/rua/DynamicBinder.h b/runtimes/libs/rua/dyn/include/rua/DynamicBinder.h
new file mode 100644
index 000000000..8ce0c42f8
--- /dev/null
+++ b/runtimes/libs/rua/dyn/include/rua/DynamicBinder.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_RUA_DYNAMIC_BINDER_H__
+#define __NNFW_RUA_DYNAMIC_BINDER_H__
+
+#include <rua/Service.h>
+
+namespace rua
+{
+
+/**
+ * @brief Bind Android NN runtime implementation via dlopen & dlsym
+ */
+struct DynamicBinder
+{
+ static const rua::RuntimeService *get(void);
+};
+
+} // namespace
+
+#endif // __NNFW_RUA_DYNAMIC_BINDER_H__
diff --git a/runtimes/libs/rua/dyn/src/DynamicBinder.cpp b/runtimes/libs/rua/dyn/src/DynamicBinder.cpp
new file mode 100644
index 000000000..68dae6262
--- /dev/null
+++ b/runtimes/libs/rua/dyn/src/DynamicBinder.cpp
@@ -0,0 +1,353 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "rua/DynamicBinder.h"
+
+#include "NeuralNetworksLoadHelpers.h"
+
+using namespace rua;
+
+//
+// Memory
+//
+namespace
+{
+
+typedef int (*ANeuralNetworksMemory_createFromFd_fn)(size_t size, int protect, int fd,
+ size_t offset, ANeuralNetworksMemory **memory);
+
+typedef void (*ANeuralNetworksMemory_free_fn)(ANeuralNetworksMemory *memory);
+
+struct MemoryServiceImpl final : public MemoryService
+{
+ int createFromFd(size_t size, int protect, int fd, size_t offset,
+ ANeuralNetworksMemory **memory) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksMemory_createFromFd);
+ EXECUTE_FUNCTION_RETURN(size, protect, fd, offset, memory);
+ }
+
+ void free(ANeuralNetworksMemory *memory) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksMemory_free);
+ EXECUTE_FUNCTION(memory);
+ }
+};
+
+} // namespace
+
+//
+// Event
+//
+namespace
+{
+
+typedef int (*ANeuralNetworksEvent_wait_fn)(ANeuralNetworksEvent *event);
+
+typedef void (*ANeuralNetworksEvent_free_fn)(ANeuralNetworksEvent *event);
+
+struct EventServiceImpl final : public EventService
+{
+
+ int wait(ANeuralNetworksEvent *event) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksEvent_wait);
+ EXECUTE_FUNCTION_RETURN(event);
+ }
+
+ void free(ANeuralNetworksEvent *event) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksEvent_free);
+ EXECUTE_FUNCTION(event);
+ }
+};
+
+} // namespace
+
+//
+// Model
+//
+namespace
+{
+
+typedef int (*ANeuralNetworksModel_create_fn)(ANeuralNetworksModel **model);
+
+typedef int (*ANeuralNetworksModel_finish_fn)(ANeuralNetworksModel *model);
+
+typedef void (*ANeuralNetworksModel_free_fn)(ANeuralNetworksModel *model);
+
+typedef int (*ANeuralNetworksModel_addOperand_fn)(ANeuralNetworksModel *model,
+ const ANeuralNetworksOperandType *type);
+
+typedef int (*ANeuralNetworksModel_setOperandValue_fn)(ANeuralNetworksModel *model, int32_t index,
+ const void *buffer, size_t length);
+
+typedef int (*ANeuralNetworksModel_setOperandValueFromMemory_fn)(
+ ANeuralNetworksModel *model, int32_t index, const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length);
+
+typedef int (*ANeuralNetworksModel_addOperation_fn)(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationType type,
+ uint32_t inputCount, const uint32_t *inputs,
+ uint32_t outputCount, const uint32_t *outputs);
+
+typedef int (*ANeuralNetworksModel_identifyInputsAndOutputs_fn)(ANeuralNetworksModel *model,
+ uint32_t inputCount,
+ const uint32_t *inputs,
+ uint32_t outputCount,
+ const uint32_t *outputs);
+
+typedef int (*ANeuralNetworksModel_relaxComputationFloat32toFloat16_fn)(ANeuralNetworksModel *model,
+ bool allow);
+
+struct ModelServiceImpl final : public ModelService
+{
+ int create(ANeuralNetworksModel **model) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_create);
+ EXECUTE_FUNCTION_RETURN(model);
+ }
+
+ int addOperand(ANeuralNetworksModel *model, const ANeuralNetworksOperandType *type) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_addOperand);
+ EXECUTE_FUNCTION_RETURN(model, type);
+ }
+ int setOperandValue(ANeuralNetworksModel *model, int32_t index, const void *buffer,
+ size_t length) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_setOperandValue);
+ EXECUTE_FUNCTION_RETURN(model, index, buffer, length);
+ }
+
+ int setOperandValueFromMemory(ANeuralNetworksModel *model, int32_t index,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_setOperandValueFromMemory);
+ EXECUTE_FUNCTION_RETURN(model, index, memory, offset, length);
+ }
+
+ int addOperation(ANeuralNetworksModel *model, ANeuralNetworksOperationType type,
+ uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_addOperation);
+ EXECUTE_FUNCTION_RETURN(model, type, inputCount, inputs, outputCount, outputs);
+ }
+
+ int identifyInputsAndOutputs(ANeuralNetworksModel *model, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_identifyInputsAndOutputs);
+ EXECUTE_FUNCTION_RETURN(model, inputCount, inputs, outputCount, outputs);
+ }
+
+ int relaxComputationFloat32toFloat16(ANeuralNetworksModel *model, bool allow) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_relaxComputationFloat32toFloat16);
+ EXECUTE_FUNCTION_RETURN(model, allow);
+ }
+
+ int finish(ANeuralNetworksModel *model) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_finish);
+ EXECUTE_FUNCTION_RETURN(model);
+ }
+
+ void free(ANeuralNetworksModel *model) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksModel_free);
+ EXECUTE_FUNCTION(model);
+ }
+};
+
+} // namespace
+
+//
+// Compilation
+//
+namespace
+{
+
+typedef int (*ANeuralNetworksCompilation_create_fn)(ANeuralNetworksModel *model,
+ ANeuralNetworksCompilation **compilation);
+
+typedef void (*ANeuralNetworksCompilation_free_fn)(ANeuralNetworksCompilation *compilation);
+
+typedef int (*ANeuralNetworksCompilation_setPreference_fn)(ANeuralNetworksCompilation *compilation,
+ int32_t preference);
+
+typedef int (*ANeuralNetworksCompilation_finish_fn)(ANeuralNetworksCompilation *compilation);
+
+struct CompilationServiceImpl : public CompilationService
+{
+
+ int create(ANeuralNetworksModel *model, ANeuralNetworksCompilation **compilation) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksCompilation_create);
+ EXECUTE_FUNCTION_RETURN(model, compilation);
+ }
+
+ int setPreference(ANeuralNetworksCompilation *compilation, int32_t preference) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksCompilation_setPreference);
+ EXECUTE_FUNCTION_RETURN(compilation, preference);
+ }
+
+ int finish(ANeuralNetworksCompilation *compilation) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksCompilation_finish);
+ EXECUTE_FUNCTION_RETURN(compilation);
+ }
+
+ void free(ANeuralNetworksCompilation *compilation) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksCompilation_free);
+ EXECUTE_FUNCTION(compilation);
+ }
+};
+
+} // namespace
+
+//
+// Exceution
+//
+namespace
+{
+
+typedef int (*ANeuralNetworksExecution_create_fn)(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution);
+
+typedef void (*ANeuralNetworksExecution_free_fn)(ANeuralNetworksExecution *execution);
+
+typedef int (*ANeuralNetworksExecution_setInput_fn)(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const void *buffer, size_t length);
+
+typedef int (*ANeuralNetworksExecution_setInputFromMemory_fn)(
+ ANeuralNetworksExecution *execution, int32_t index, const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset, size_t length);
+
+typedef int (*ANeuralNetworksExecution_setOutput_fn)(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ void *buffer, size_t length);
+
+typedef int (*ANeuralNetworksExecution_setOutputFromMemory_fn)(
+ ANeuralNetworksExecution *execution, int32_t index, const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset, size_t length);
+
+typedef int (*ANeuralNetworksExecution_startCompute_fn)(ANeuralNetworksExecution *execution,
+ ANeuralNetworksEvent **event);
+
+struct ExecutionServiceImpl : public ExecutionService
+{
+
+ int create(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_create);
+ EXECUTE_FUNCTION_RETURN(compilation, execution);
+ }
+
+ int setInput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, const void *buffer,
+ size_t length) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_setInput);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, buffer, length);
+ }
+
+ int setInputFromMemory(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_setInputFromMemory);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, memory, offset, length);
+ }
+
+ int setOutput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, void *buffer, size_t length) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_setOutput);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, buffer, length);
+ }
+
+ int setOutputFromMemory(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_setOutputFromMemory);
+ EXECUTE_FUNCTION_RETURN(execution, index, type, memory, offset, length);
+ }
+
+ int startCompute(ANeuralNetworksExecution *execution, ANeuralNetworksEvent **event) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_startCompute);
+ EXECUTE_FUNCTION_RETURN(execution, event);
+ }
+
+ void free(ANeuralNetworksExecution *execution) const override
+ {
+ LOAD_FUNCTION(ANeuralNetworksExecution_free);
+ EXECUTE_FUNCTION(execution);
+ }
+};
+
+} // namespace
+
+//
+// Runtime
+//
+namespace
+{
+
+class RuntimeImpl final : public RuntimeService
+{
+public:
+ const MemoryService *memory(void) const override { return &_memory; }
+ const EventService *event(void) const override { return &_event; }
+
+ const ModelService *model(void) const override { return &_model; }
+ const CompilationService *compilation(void) const override { return &_compilation; }
+ const ExecutionService *execution(void) const override { return &_execution; }
+
+private:
+ MemoryServiceImpl _memory;
+ EventServiceImpl _event;
+
+ ModelServiceImpl _model;
+ CompilationServiceImpl _compilation;
+ ExecutionServiceImpl _execution;
+};
+
+} // namespace
+
+namespace rua
+{
+
+const RuntimeService *DynamicBinder::get(void)
+{
+ static RuntimeImpl runtime;
+ return &runtime;
+}
+
+} // namespace rua
diff --git a/runtimes/libs/rua/shim/CMakeLists.txt b/runtimes/libs/rua/shim/CMakeLists.txt
new file mode 100644
index 000000000..814db5f7f
--- /dev/null
+++ b/runtimes/libs/rua/shim/CMakeLists.txt
@@ -0,0 +1,4 @@
+add_library(nnfw_lib_rua_shim INTERFACE)
+target_include_directories(nnfw_lib_rua_shim INTERFACE include)
+target_link_libraries(nnfw_lib_rua_shim INTERFACE nnfw_lib_rua_core)
+target_link_libraries(nnfw_lib_rua_shim INTERFACE nnfw_lib_rua_anchor)
diff --git a/runtimes/libs/rua/shim/include/rua/Shim.h b/runtimes/libs/rua/shim/include/rua/Shim.h
new file mode 100644
index 000000000..07a4bb2fd
--- /dev/null
+++ b/runtimes/libs/rua/shim/include/rua/Shim.h
@@ -0,0 +1,192 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_RUA_SHIM_H__
+#define __NNFW_RUA_SHIM_H__
+
+#include <rua/Anchor.h>
+
+//
+// Memory
+//
+inline int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t offset,
+ ANeuralNetworksMemory **memory)
+{
+ return rua::Anchor::get()->memory()->createFromFd(size, protect, fd, offset, memory);
+}
+
+inline void ANeuralNetworksMemory_free(ANeuralNetworksMemory *memory)
+{
+ return rua::Anchor::get()->memory()->free(memory);
+}
+
+//
+// Event
+//
+inline int ANeuralNetworksEvent_wait(ANeuralNetworksEvent *event)
+{
+ return rua::Anchor::get()->event()->wait(event);
+}
+
+inline void ANeuralNetworksEvent_free(ANeuralNetworksEvent *event)
+{
+ return rua::Anchor::get()->event()->free(event);
+}
+
+//
+// Model
+//
+inline int ANeuralNetworksModel_create(ANeuralNetworksModel **model)
+{
+ return rua::Anchor::get()->model()->create(model);
+}
+
+inline int ANeuralNetworksModel_addOperand(ANeuralNetworksModel *model,
+ const ANeuralNetworksOperandType *type)
+{
+ return rua::Anchor::get()->model()->addOperand(model, type);
+}
+
+inline int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel *model, int32_t index,
+ const void *buffer, size_t length)
+{
+ return rua::Anchor::get()->model()->setOperandValue(model, index, buffer, length);
+}
+
+inline int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel *model,
+ int32_t index,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ return rua::Anchor::get()->model()->setOperandValueFromMemory(model, index, memory, offset,
+ length);
+}
+
+inline int ANeuralNetworksModel_addOperation(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationType type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ return rua::Anchor::get()->model()->addOperation(model, type, inputCount, inputs, outputCount,
+ outputs);
+}
+
+inline int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel *model,
+ uint32_t inputCount,
+ const uint32_t *inputs,
+ uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ return rua::Anchor::get()->model()->identifyInputsAndOutputs(model, inputCount, inputs,
+ outputCount, outputs);
+}
+
+inline int ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel *model,
+ bool allow)
+{
+ return rua::Anchor::get()->model()->relaxComputationFloat32toFloat16(model, allow);
+}
+
+inline int ANeuralNetworksModel_finish(ANeuralNetworksModel *model)
+{
+ return rua::Anchor::get()->model()->finish(model);
+}
+
+inline void ANeuralNetworksModel_free(ANeuralNetworksModel *model)
+{
+ return rua::Anchor::get()->model()->free(model);
+}
+
+//
+// Compilation
+//
+inline int ANeuralNetworksCompilation_create(ANeuralNetworksModel *model,
+ ANeuralNetworksCompilation **compilation)
+{
+ return rua::Anchor::get()->compilation()->create(model, compilation);
+}
+
+inline int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation *compilation,
+ int32_t preference)
+{
+ return rua::Anchor::get()->compilation()->setPreference(compilation, preference);
+}
+
+inline int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation *compilation)
+{
+ return rua::Anchor::get()->compilation()->finish(compilation);
+}
+
+inline void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation *compilation)
+{
+ return rua::Anchor::get()->compilation()->free(compilation);
+}
+
+//
+// Execution
+//
+inline int ANeuralNetworksExecution_create(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution)
+{
+ return rua::Anchor::get()->execution()->create(compilation, execution);
+}
+
+inline int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const void *buffer, size_t length)
+{
+ return rua::Anchor::get()->execution()->setInput(execution, index, type, buffer, length);
+}
+
+inline int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ return rua::Anchor::get()->execution()->setInputFromMemory(execution, index, type, memory, offset,
+ length);
+}
+
+inline int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, void *buffer,
+ size_t length)
+{
+ return rua::Anchor::get()->execution()->setOutput(execution, index, type, buffer, length);
+}
+
+inline int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution *execution,
+ int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ return rua::Anchor::get()->execution()->setOutputFromMemory(execution, index, type, memory,
+ offset, length);
+}
+
+inline int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution *execution,
+ ANeuralNetworksEvent **event)
+{
+ return rua::Anchor::get()->execution()->startCompute(execution, event);
+}
+
+inline void ANeuralNetworksExecution_free(ANeuralNetworksExecution *execution)
+{
+ return rua::Anchor::get()->execution()->free(execution);
+}
+
+#endif // __NNFW_RUA_SHIM_H__
diff --git a/runtimes/libs/srcn/CMakeLists.txt b/runtimes/libs/srcn/CMakeLists.txt
new file mode 100644
index 000000000..f6226670c
--- /dev/null
+++ b/runtimes/libs/srcn/CMakeLists.txt
@@ -0,0 +1,24 @@
+if(NOT BUILD_SRCN_KERNEL)
+ message(STATUS "SRCN kernel library build: disabled")
+ return()
+else(NOT BUILD_SRCN_KERNEL)
+ message(STATUS "SRCN kernel library build: OK")
+endif()
+
+# Find and use pre-installed OpenMP
+find_package(OpenMP QUIET)
+if(NOT OpenMP_FOUND)
+ return()
+endif(NOT OpenMP_FOUND)
+
+file(GLOB_RECURSE SOURCES "*.cc")
+file(GLOB_RECURSE TESTS "*_test.cc")
+list(REMOVE_ITEM SOURCES ${TESTS})
+
+add_library(nnfw_lib_srcn STATIC ${SOURCES})
+target_include_directories(nnfw_lib_srcn PUBLIC include)
+target_link_libraries(nnfw_lib_srcn PRIVATE ${OpenMP_CXX_LIBRARIES})
+target_compile_options(nnfw_lib_srcn PRIVATE ${OpenMP_CXX_FLAGS})
+target_compile_definitions(nnfw_lib_srcn PRIVATE TIZEN) # ANDROID or TIZEN
+#target_compile_definitions(nnfw_lib_srcn PRIVATE NCNN) # Enable if ready
+set_target_properties(nnfw_lib_srcn PROPERTIES POSITION_INDEPENDENT_CODE ON)
diff --git a/runtimes/libs/srcn/include/srcn/conv_type.h b/runtimes/libs/srcn/include/srcn/conv_type.h
new file mode 100644
index 000000000..59152a094
--- /dev/null
+++ b/runtimes/libs/srcn/include/srcn/conv_type.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_TYPE_H__
+#define __NNFW_SRCN_CONV_TYPE_H__
+
+namespace nnfw
+{
+namespace srcn
+{
+
+enum convType_t
+{
+ row_major = 0,
+ col_major
+};
+
+struct convMat_t
+{
+ int w;
+ int h;
+ int c;
+ int n;
+ float *data;
+};
+
+struct convParams_t
+{
+ int kernel_w;
+ int kernel_h;
+ int stride_w;
+ int stride_h;
+ int dilation_w;
+ int dilation_h;
+ int padding;
+ int pad_w;
+ int pad_h;
+};
+
+struct winogradParams_t
+{
+ int kernel_w;
+ int kernel_h;
+ int stride_w;
+ int stride_h;
+ int dilation_w;
+ int dilation_h;
+ int batch;
+ int w;
+ int h;
+ int inch;
+ int outch;
+ int num_threads;
+ convType_t conv_type;
+ float *weight_data;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_TYPE_H__
diff --git a/runtimes/libs/srcn/include/srcn/srcn_conv.h b/runtimes/libs/srcn/include/srcn/srcn_conv.h
new file mode 100644
index 000000000..11130c0db
--- /dev/null
+++ b/runtimes/libs/srcn/include/srcn/srcn_conv.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_H__
+#define __NNFW_SRCN_CONV_H__
+
+#include "conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+int check_winograd(winogradParams_t &params);
+
+float *trans_weight2winograd(winogradParams_t &params, unsigned int *size = NULL);
+
+void winograd_release(float *winograd_weight);
+
+void srcn_convolution2D(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, const float *winograd_weight, int num_threads,
+ convType_t conv_type);
+
+void srcn_deconvolution2D(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, int num_threads, convType_t conv_type);
+
+void *trans_weight2sparse(const convMat_t &weights_mat);
+
+void sparse_release(const int outch, void *ptr);
+
+void srcn_sparse_convolution2D(const convMat_t &in_mat, convMat_t &out_mat,
+ const convParams_t &in_param, const void *sparse_weight,
+ int number_threas, convType_t conv_type);
+
+void srcn_batch_convolution2D(const convMat_t &in_mat, const convMat_t &weights_mat,
+ convMat_t &out_mat, const convParams_t &in_param,
+ const float *winograd_weight, int num_threads, convType_t conv_type);
+
+void srcn_convolution2D_gpu(const convMat_t &in_mat, const convMat_t &weights_mat,
+ convMat_t &out_mat, const convParams_t &in_param, convType_t conv_type);
+
+void srcn_convolution2D_dpu(const convMat_t &in_mat, const convMat_t &weights_mat,
+ convMat_t &out_mat, const convParams_t &in_param, convType_t conv_type);
+
+void srcn_depthwise_conv(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convMat_t &bias, const convParams_t &in_param, int num_threads,
+ convType_t conv_type);
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_H__
diff --git a/runtimes/libs/srcn/src/common.h b/runtimes/libs/srcn/src/common.h
new file mode 100644
index 000000000..e8abc1440
--- /dev/null
+++ b/runtimes/libs/srcn/src/common.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_COMMON_H__
+#define __NNFW_SRCN_COMMON_H__
+
+#include <string.h>
+#include <limits>
+#include <arm_neon.h>
+
+#include "srcn/conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+#define sizeof_RhsScalar 4
+#define sizeof_LhsScalar 4
+#define sizeof_ResScalar 4
+
+#define MIN(a, b) (a) > (b) ? (b) : (a)
+#define MAX(a, b) (a) > (b) ? (a) : (b)
+
+enum shardType_t
+{
+ shardByCol = 0,
+ shardByRow
+};
+
+#ifdef TIZEN
+#define L1_CACHE_SIZE (16536 * 2)
+#define L2_CACHE_SIZE (524288 * 2)
+#define L3_CACHE_SIZE (0) // no L3
+#define MAX_K (512)
+// single-thread
+#define GEN_COL (1440)
+// multi-threads
+#define MAX_COL (90)
+#define MIN_COL (32)
+#elif defined ANDROID
+#define L1_CACHE_SIZE (16536 * 4)
+#define L2_CACHE_SIZE (524288 * 8)
+#define L3_CACHE_SIZE (0) //(524288 * 8) //no L3
+#define MAX_K (512 * 2)
+// single-thread
+#define GEN_COL (1440)
+// multi-threads
+#if __aarch64__
+#define MAX_COL (1024)
+#else
+#define MAX_COL (90)
+#endif
+#define MIN_COL (32)
+#endif
+
+enum
+{
+ USE_COMMON_KENEL = 0,
+ USE_12BIT_KERNEL,
+ USE_NONZERO_KERENL
+};
+
+template <typename T> static T divup(const T &x, const T &y)
+{
+ return static_cast<T>((x + y - 1) / y);
+}
+
+#ifdef NCNN
+static inline size_t alignSize(size_t sz, int n) { return (sz + n - 1) / n * n; }
+
+static inline size_t alignBy2(size_t sz) { return (sz + 1) & -2; }
+#endif // NCNN
+
+static inline int32_t BitNot(int32_t a) { return ~a; }
+
+static inline int32_t MaskIfNonZero(int32_t a)
+{
+ static int32_t zero = 0;
+ return a ? BitNot(zero) : zero;
+}
+
+static inline int32_t BitAnd(int32_t a, int32_t b) { return a & b; }
+
+static inline int32_t ShiftRight(int32_t a, int offset) { return a >> offset; }
+
+static inline int32_t MaskIfLessThan(int32_t a, int32_t b) { return MaskIfNonZero(a < b); }
+
+static inline int32_t MaskIfGreaterThan(int32_t a, int32_t b) { return MaskIfNonZero(a > b); }
+
+static inline int32_t Add(int32_t a, int32_t b) { return a + b; }
+
+static inline int32_t RoundingDivideByPOT(int32_t x, int exponent)
+{
+ const int32_t mask = (1ll << exponent) - 1;
+ const int32_t zero = 0;
+ const int32_t one = 1;
+ const int32_t remainder = BitAnd(x, mask);
+ const int32_t threshold = Add(ShiftRight(mask, 1), BitAnd(MaskIfLessThan(x, zero), one));
+ return Add(ShiftRight(x, exponent), BitAnd(MaskIfGreaterThan(remainder, threshold), one));
+}
+static inline int32_t SaturatingRoundingDoublingHighMul(int32_t a, int32_t b)
+{
+ bool overflow = a == b && a == std::numeric_limits<int32_t>::min();
+ int64_t a_64(a);
+ int64_t b_64(b);
+ int64_t ab_64 = a_64 * b_64;
+ int32_t nudge = ab_64 >= 0 ? (1 << 30) : (1 - (1 << 30));
+ int32_t ab_x2_high32 = static_cast<int32_t>((ab_64 + nudge) / (1ll << 31));
+ return overflow ? std::numeric_limits<int32_t>::max() : ab_x2_high32;
+}
+
+static inline int32_t MultiplyByQuantizedMultiplier(int32_t x, int32_t quantized_multiplier,
+ int shift)
+{
+ int left_shift = shift > 0 ? shift : 0;
+ int right_shift = shift > 0 ? 0 : -shift;
+ return RoundingDivideByPOT(
+ SaturatingRoundingDoublingHighMul(x * (1 << left_shift), quantized_multiplier), right_shift);
+}
+
+static inline int32x4_t SaturatingRoundingDoublingHighMulV(int32x4_t a, int32x4_t b)
+{
+ return vqrdmulhq_s32(a, b);
+}
+
+static inline int32x4_t RoundingDivideByPOTV(int32x4_t x, int exponent)
+{
+ const int32x4_t shift_vec = vdupq_n_s32(-exponent);
+ const int32x4_t fixup = vshrq_n_s32(vandq_s32(x, shift_vec), 31);
+ const int32x4_t fixed_up_x = vqaddq_s32(x, fixup);
+ return vrshlq_s32(fixed_up_x, shift_vec);
+}
+
+static inline int32x4_t MultiplyByQuantizedMultiplierV(int32x4_t x, int32_t quantized_multiplier,
+ int shift)
+{
+ int left_shift = shift > 0 ? shift : 0;
+ int right_shift = shift > 0 ? 0 : -shift;
+ return RoundingDivideByPOTV(
+ SaturatingRoundingDoublingHighMulV(vrshlq_s32(x, vdupq_n_s32(left_shift)),
+ vdupq_n_s32(quantized_multiplier)),
+ right_shift);
+}
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_COMMON_H__
diff --git a/runtimes/libs/srcn/src/conv_sgemm_multithreads.cc b/runtimes/libs/srcn/src/conv_sgemm_multithreads.cc
new file mode 100644
index 000000000..91a4533bd
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_sgemm_multithreads.cc
@@ -0,0 +1,483 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef _OPENMP
+#include <omp.h>
+#endif
+
+#include "srcn/conv_type.h"
+#include "common.h"
+#include "sgemm_kernel.h"
+#include "sgemm_pack.h"
+#include "conv_sgemm_multithreads.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void conv_sgemm_multithreads::param_init()
+{
+#if __aarch64__
+ if (conv_type_ == row_major)
+ {
+ mr_ = 8;
+ nr_ = 12;
+ }
+ else if (conv_type_ == col_major)
+ {
+#ifdef BATCH_DILATION_FIX
+ if (out_mat_.n > 1)
+ {
+
+ mr_ = 24;
+ nr_ = 4;
+ }
+ else
+#endif // BATCH_DILATION_FIX
+ {
+ if (m_ > n_)
+ {
+ mr_ = 24;
+ nr_ = 4;
+ }
+ else
+ {
+ mr_ = 12;
+ nr_ = 8;
+ }
+ }
+ }
+#else // __aarch64__
+ if (conv_type_ == row_major)
+ {
+ mr_ = 6;
+ nr_ = 8;
+ }
+ else if (conv_type_ == col_major)
+ {
+ mr_ = 8;
+ nr_ = 6;
+ }
+#endif // __aarch64__
+ int col = n_;
+
+ if (m_ > n_)
+ {
+ shard_type_ = shardByRow;
+ col = m_;
+ }
+ else
+ {
+ shard_type_ = shardByCol;
+ }
+
+ int th_base = divup(col, num_threads_);
+
+ th_base = MIN(MAX(th_base, MIN_COL), MAX_COL);
+
+ int k_div = (nr_ * sizeof_RhsScalar);
+ int k_sub = (mr_ * nr_ * sizeof_ResScalar);
+
+ const int k_cache = MIN(divup((int)(L1_CACHE_SIZE - k_sub), (int)k_div * 2), MAX_K);
+ bk_ = MIN(k_cache, k_);
+
+ if (shard_type_ == shardByCol)
+ {
+ int m_sub = (bk_ * nr_ * sizeof_RhsScalar);
+ int m_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ if (L3_CACHE_SIZE)
+ m_div = (sizeof_LhsScalar * bk_ * 2);
+ int m_cache = divup((L2_CACHE_SIZE - m_sub), m_div);
+ bm_ = MIN(m_cache, m_);
+
+ bn_ = MIN(th_base, n_);
+ if (L3_CACHE_SIZE)
+ {
+ int n_sub = (bk_ * bm_ * sizeof_RhsScalar);
+ int n_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ int n_cache = divup((L3_CACHE_SIZE - n_sub), n_div);
+ bn_ = MIN(n_cache, bn_);
+ }
+ }
+ else
+ {
+ int n_sub = (bk_ * mr_ * sizeof_LhsScalar);
+ int n_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ if (L3_CACHE_SIZE)
+ n_div = (sizeof_LhsScalar * bk_ * 2);
+ int n_cache = divup((L2_CACHE_SIZE - n_sub), n_div);
+ bn_ = MIN(n_cache, n_);
+
+ bm_ = MIN(th_base, m_);
+ if (L3_CACHE_SIZE)
+ {
+ int m_sub = (bk_ * bn_ * sizeof_RhsScalar);
+ int m_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ int m_cache = divup((L3_CACHE_SIZE - m_sub), m_div);
+ bm_ = MIN(m_cache, bm_);
+ }
+ }
+
+ nm_ = divup(m_, bm_);
+ nn_ = divup(n_, bn_);
+ nk_ = divup(k_, bk_);
+
+ rm_ = m_ % bm_;
+ rn_ = n_ % bn_;
+ rk_ = k_ % bk_;
+}
+
+conv_sgemm_multithreads::conv_sgemm_multithreads(const convMat_t &in_mat,
+ const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, int num_threads,
+ convType_t conv_type)
+
+ : in_mat_(in_mat), weights_mat_(weights_mat), out_mat_(out_mat), in_param_(in_param),
+ num_threads_(num_threads), conv_type_(conv_type)
+{
+ m_ = out_mat_.c;
+#ifdef NCNN
+#ifdef WITH_DPU
+ np_ = out_mat_.n * alignSize(out_mat_.h * out_mat_.w, 16 / sizeof(float));
+ n_ = (np_ + 1) / 2;
+#else // WITH_DPU
+ n_ = out_mat_.n * alignSize(out_mat_.h * out_mat_.w, 16 / sizeof(float));
+#endif // WITH_DPU
+#else // NCNN
+#ifdef WITH_DPU
+ np_ = out_mat_.n * out_mat_.w * out_mat_.h;
+ n_ = (np_ + 1) / 2;
+#else // WITH_DPU
+ n_ = out_mat_.n * out_mat_.w * out_mat_.h;
+#endif // WITH_DPU
+#endif // NCNN
+ k_ = in_param_.kernel_h * in_param_.kernel_w * in_mat.c;
+
+ param_init();
+
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ if (shard_type_ == shardByCol)
+ {
+ plhs_buffer_ = new float[lhs_stride * 1 * nm_];
+ prhs_buffer_ = new float[rhs_stride * num_threads_];
+ }
+ else
+ {
+ plhs_buffer_ = new float[lhs_stride * num_threads_];
+ prhs_buffer_ = new float[rhs_stride * 1 * nn_];
+ }
+
+ if (plhs_buffer_ == NULL || prhs_buffer_ == NULL)
+ {
+ error_ = 1;
+ }
+
+ if (in_param_.kernel_w != 1 || in_param_.kernel_h != 1 || in_param_.stride_w != 1 ||
+ in_param_.stride_h != 1 || in_param_.padding != 0)
+ {
+ need_im2col_ = 1;
+ }
+ else
+ {
+ need_im2col_ = 0;
+ }
+
+ omp_set_num_threads(num_threads_);
+
+ error_ = 0;
+}
+
+conv_sgemm_multithreads::~conv_sgemm_multithreads()
+{
+ if (plhs_buffer_)
+ delete[] plhs_buffer_;
+ if (prhs_buffer_)
+ delete[] prhs_buffer_;
+}
+
+void conv_sgemm_multithreads::run()
+{
+ if (error_)
+ return;
+
+ if (shard_type_ == shardByCol && conv_type_ == col_major)
+ {
+ compute_colmajor_colshard();
+ }
+ else if (shard_type_ == shardByRow && conv_type_ == col_major)
+ {
+ compute_colmajor_rowshard();
+ }
+ else if (shard_type_ == shardByCol && conv_type_ == row_major)
+ {
+ compute_rowmajor_colshard();
+ }
+ else if (shard_type_ == shardByRow && conv_type_ == row_major)
+ {
+ compute_rowmajor_rowshard();
+ }
+}
+
+void conv_sgemm_multithreads::compute_rowmajor_colshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &weights_mat_.data[i * bm_ * k_ + l * bk_],
+ &plhs_buffer_[i * lhs_stride]);
+ }
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ int thread_num = omp_get_thread_num();
+ // float *plhs_ptr = &plhs_buffer_[lhs_stride * thread_num];
+ float *prhs_ptr = &prhs_buffer_[rhs_stride * thread_num];
+
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_rowmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ else
+ {
+ _pack_rowmajor_image_rhs_batch(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ }
+ else
+ {
+#ifdef WITH_DPU
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, np_, &in_mat_.data[n_ + l * bk_ * np_ + j * bn_],
+ prhs_ptr);
+#else
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &in_mat_.data[l * bk_ * n_ + j * bn_],
+ prhs_ptr);
+#endif
+ }
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+#ifdef WITH_DPU
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, &plhs_buffer_[i * lhs_stride],
+ prhs_ptr, &out_mat_.data[n_ + i * bm_ * np_ + j * bn_],
+ l, np_, bk);
+#else // WITH_DPU
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, &plhs_buffer_[i * lhs_stride],
+ prhs_ptr, &out_mat_.data[i * bm_ * n_ + j * bn_], l, n_,
+ bk);
+#endif // WITH_DPU
+ }
+ }
+ }
+}
+
+void conv_sgemm_multithreads::compute_rowmajor_rowshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_rowmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_),
+ &prhs_buffer_[j * rhs_stride]);
+ }
+ else
+ {
+ _pack_rowmajor_image_rhs_batch(
+ nr_, bn, bk, l * bk_, j * bn_, const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), &prhs_buffer_[j * rhs_stride]);
+ }
+ }
+ else
+ {
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &in_mat_.data[l * bk_ * n_ + j * bn_],
+ &prhs_buffer_[j * rhs_stride]);
+ }
+ }
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *plhs_ptr = &plhs_buffer_[lhs_stride * thread_num];
+
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &weights_mat_.data[i * bm_ * k_ + l * bk_],
+ plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr,
+ &prhs_buffer_[j * rhs_stride],
+ &out_mat_.data[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+}
+
+void conv_sgemm_multithreads::compute_colmajor_colshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &weights_mat_.data[l * bk_ * m_ + i * bm_],
+ &plhs_buffer_[i * lhs_stride]);
+ }
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *prhs_ptr = &prhs_buffer_[rhs_stride * thread_num];
+
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_colmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ else
+ {
+ _pack_colmajor_image_rhs_batch(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ }
+ else
+ {
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &in_mat_.data[j * bn_ * k_ + l * bk_],
+ prhs_ptr);
+ }
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, &plhs_buffer_[i * lhs_stride],
+ prhs_ptr, &out_mat_.data[j * bn_ * m_ + i * bm_], l, m_,
+ bk);
+ }
+ }
+ }
+}
+
+void conv_sgemm_multithreads::compute_colmajor_rowshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_colmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_),
+ &prhs_buffer_[j * rhs_stride]);
+ }
+ else
+ {
+ _pack_colmajor_image_rhs_batch(
+ nr_, bn, bk, l * bk_, j * bn_, const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), &prhs_buffer_[j * rhs_stride]);
+ }
+ }
+ else
+ {
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &in_mat_.data[j * bn_ * k_ + l * bk_],
+ &prhs_buffer_[j * rhs_stride]);
+ }
+ }
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *plhs_ptr = &plhs_buffer_[lhs_stride * thread_num];
+
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &weights_mat_.data[l * bk_ * m_ + i * bm_],
+ plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr,
+ &prhs_buffer_[j * rhs_stride],
+ &out_mat_.data[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/conv_sgemm_multithreads.h b/runtimes/libs/srcn/src/conv_sgemm_multithreads.h
new file mode 100644
index 000000000..8cb526916
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_sgemm_multithreads.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_SGEMM_MULTITHREADS_H__
+#define __NNFW_SRCN_CONV_SGEMM_MULTITHREADS_H__
+
+#include "srcn/conv_type.h"
+#include "common.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+class conv_sgemm_multithreads
+{
+public:
+ conv_sgemm_multithreads(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, int num_threads, convType_t conv_type);
+ ~conv_sgemm_multithreads();
+
+ void run();
+
+private:
+ void param_init();
+
+ void compute_rowmajor_colshard();
+ void compute_rowmajor_rowshard();
+ void compute_colmajor_colshard();
+ void compute_colmajor_rowshard();
+
+ const convMat_t in_mat_;
+ const convMat_t weights_mat_;
+ convMat_t out_mat_;
+ const convParams_t in_param_;
+ convType_t conv_type_;
+ int num_threads_;
+
+ int m_;
+ int n_;
+#ifdef WITH_DPU
+ int np_;
+#endif
+ int k_;
+
+ int bm_;
+ int bn_;
+ int bk_;
+
+ int rm_;
+ int rn_;
+ int rk_;
+
+ int nm_;
+ int nn_;
+ int nk_;
+
+ int mr_;
+ int nr_;
+
+ int need_im2col_;
+ shardType_t shard_type_;
+
+ float *prhs_buffer_;
+ float *plhs_buffer_;
+
+ int error_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_SGEMM_MULTITHREADS_H__
diff --git a/runtimes/libs/srcn/src/conv_sgemm_singlethread.cc b/runtimes/libs/srcn/src/conv_sgemm_singlethread.cc
new file mode 100644
index 000000000..4cbbf217f
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_sgemm_singlethread.cc
@@ -0,0 +1,366 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdexcept>
+
+#include "common.h"
+#include "sgemm_kernel.h"
+#include "sgemm_pack.h"
+#include "conv_sgemm_singlethread.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void conv_sgemm_singlethread::param_init()
+{
+ if (n_ > 3 * m_)
+ {
+ shard_type_ = shardByRow;
+ }
+ else
+ {
+ shard_type_ = shardByCol;
+ }
+
+#if __aarch64__
+ if (conv_type_ == row_major)
+ {
+ if (shard_type_ == shardByRow)
+ {
+ mr_ = 8;
+ nr_ = 12;
+ }
+ else
+ {
+ mr_ = 12;
+ nr_ = 8;
+ }
+ }
+ else if (conv_type_ == col_major)
+ {
+#ifndef BATCH_DILATION_FIX
+ mr_ = 12;
+ nr_ = 8;
+#else // BATCH_DILATION_FIX
+ // TODO: batch(dilation) + inw * inh
+ if (out_mat_.n > 1)
+ {
+ mr_ = 24;
+ nr_ = 4;
+ }
+ else
+ {
+ mr_ = 12;
+ nr_ = 8;
+ }
+#endif // BATCH_DILATION_FIX
+ }
+#else // __aarch64__
+ if (conv_type_ == row_major)
+ {
+ mr_ = 6;
+ nr_ = 8;
+ }
+ else if (conv_type_ == col_major)
+ {
+ mr_ = 8;
+ nr_ = 6;
+ }
+#endif // __aarch64__
+
+ int k_div = (nr_ * sizeof_RhsScalar);
+ int k_sub = (mr_ * nr_ * sizeof_ResScalar);
+
+ const int k_cache = MIN(divup((int)(L1_CACHE_SIZE - k_sub), (int)k_div), MAX_K);
+ bk_ = MIN(k_cache, k_);
+
+ if (shard_type_ == shardByCol)
+ {
+ int m_sub = (bk_ * nr_ * sizeof_RhsScalar);
+ int m_cache = divup((L2_CACHE_SIZE - m_sub), (sizeof_LhsScalar * bk_ * 2));
+ bm_ = MIN(m_cache, m_);
+
+ bn_ = MIN(GEN_COL, n_);
+ if (L3_CACHE_SIZE)
+ {
+ int n_sub = (bk_ * bm_ * sizeof_RhsScalar);
+ int n_cache = divup((L3_CACHE_SIZE - n_sub), (sizeof_LhsScalar * bk_ * 2));
+ bn_ = MIN(n_cache, bn_);
+ }
+ }
+ else
+ {
+ int n_sub = (bk_ * mr_ * sizeof_RhsScalar);
+ int n_cache = divup((L2_CACHE_SIZE - n_sub), (sizeof_LhsScalar * bk_ * 2));
+ bn_ = MIN(n_cache, n_);
+
+ bm_ = MIN(GEN_COL, m_);
+ if (L3_CACHE_SIZE)
+ {
+ int m_sub = (bk_ * bn_ * sizeof_RhsScalar);
+ int m_cache = divup((L3_CACHE_SIZE - m_sub), (sizeof_LhsScalar * bk_ * 2));
+ bm_ = MIN(m_cache, bm_);
+ }
+ }
+
+ nm_ = divup(m_, bm_);
+ nn_ = divup(n_, bn_);
+ nk_ = divup(k_, bk_);
+
+ rm_ = m_ % bm_;
+ rn_ = n_ % bn_;
+ rk_ = k_ % bk_;
+}
+
+conv_sgemm_singlethread::conv_sgemm_singlethread(const convMat_t &in_mat,
+ const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, convType_t conv_type)
+ : in_mat_(in_mat), weights_mat_(weights_mat), out_mat_(out_mat), in_param_(in_param),
+ conv_type_(conv_type)
+{
+ m_ = out_mat_.c;
+#ifdef NCNN
+ n_ = out_mat_.n * alignSize(out_mat_.h * out_mat_.w, 16 / sizeof(float));
+#else
+ n_ = out_mat_.n * out_mat_.w * out_mat_.h;
+#endif
+ k_ = in_param_.kernel_h * in_param_.kernel_w * in_mat.c;
+
+ param_init();
+
+ if (in_param_.kernel_w != 1 || in_param_.kernel_h != 1 || in_param_.stride_w != 1 ||
+ in_param_.stride_h != 1 || in_param_.padding != 0 || out_mat_.n > 1)
+ {
+ need_im2col_ = 1;
+ }
+ else
+ {
+ need_im2col_ = 0;
+ }
+}
+
+conv_sgemm_singlethread::~conv_sgemm_singlethread() {}
+
+void conv_sgemm_singlethread::run()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float *plhs_ptr = new float[mstride * bk_];
+ float *prhs_ptr = new float[nstride * bk_];
+
+ if (conv_type_ == row_major)
+ {
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_rowmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ else
+ {
+ _pack_rowmajor_image_rhs_batch(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ }
+ else
+ {
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &in_mat_.data[l * bk_ * n_ + j * bn_],
+ prhs_ptr);
+ }
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &weights_mat_.data[i * bm_ * k_ + l * bk_],
+ plhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &out_mat_.data[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &weights_mat_.data[i * bm_ * k_ + l * bk_],
+ plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_rowmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ else
+ {
+ _pack_rowmajor_image_rhs_batch(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ }
+ else
+ {
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &in_mat_.data[l * bk_ * n_ + j * bn_],
+ prhs_ptr);
+ }
+
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &out_mat_.data[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"Error shrad type!"};
+ }
+ }
+ else if (conv_type_ == col_major)
+ {
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_colmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ else
+ {
+ _pack_colmajor_image_rhs_batch(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ }
+ else
+ {
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &in_mat_.data[j * bn_ * k_ + l * bk_],
+ prhs_ptr);
+ }
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &weights_mat_.data[l * bk_ * m_ + i * bm_],
+ plhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &out_mat_.data[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &weights_mat_.data[l * bk_ * m_ + i * bm_],
+ plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ if (need_im2col_)
+ {
+ if (out_mat_.n == 1)
+ {
+ _pack_colmajor_image_rhs(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ else
+ {
+ _pack_colmajor_image_rhs_batch(nr_, bn, bk, l * bk_, j * bn_,
+ const_cast<convMat_t *>(&in_mat_), &out_mat_,
+ const_cast<convParams_t *>(&in_param_), prhs_ptr);
+ }
+ }
+ else
+ {
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &in_mat_.data[j * bn_ * k_ + l * bk_],
+ prhs_ptr);
+ }
+
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &out_mat_.data[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"Error shrad type!"};
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"Error conv type!"};
+ }
+
+ delete[] plhs_ptr;
+ delete[] prhs_ptr;
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/conv_sgemm_singlethread.h b/runtimes/libs/srcn/src/conv_sgemm_singlethread.h
new file mode 100644
index 000000000..06713e604
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_sgemm_singlethread.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_SGEMM_SINGLETHREAD_H__
+#define __NNFW_SRCN_CONV_SGEMM_SINGLETHREAD_H__
+
+#include "srcn/conv_type.h"
+#include "common.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+class conv_sgemm_singlethread
+{
+public:
+ conv_sgemm_singlethread(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, convType_t conv_type);
+ ~conv_sgemm_singlethread();
+
+ void run();
+
+private:
+ void param_init();
+
+ const convMat_t in_mat_;
+ const convMat_t weights_mat_;
+ convMat_t out_mat_;
+ const convParams_t in_param_;
+ convType_t conv_type_;
+
+ int m_;
+ int n_;
+ int k_;
+
+ int bm_;
+ int bn_;
+ int bk_;
+
+ int rm_;
+ int rn_;
+ int rk_;
+
+ int nm_;
+ int nn_;
+ int nk_;
+
+ int mr_;
+ int nr_;
+
+ int need_im2col_;
+
+ shardType_t shard_type_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_SGEMM_SINGLETHREAD_H__
diff --git a/runtimes/libs/srcn/src/conv_sparse.cc b/runtimes/libs/srcn/src/conv_sparse.cc
new file mode 100644
index 000000000..10e2a2b93
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_sparse.cc
@@ -0,0 +1,271 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef _OPENMP
+#include <omp.h>
+#endif
+
+#include <stdexcept>
+
+#include "common.h"
+#include "sgemm_kernel.h"
+#include "sgemm_pack.h"
+#include "conv_sparse.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void conv_sparse::param_init()
+{
+#ifdef NCNN
+ n_ = alignSize(out_mat_.h * out_mat_.w, 16 / sizeof(float));
+#else
+ n_ = out_mat_.w * out_mat_.h;
+#endif
+
+ bch_ = BCH;
+ nch_ = (out_mat_.c + bch_ - 1) / bch_;
+
+ rch_ = out_mat_.c % bch_;
+
+ bn_ = MIN(n_, L1_CACHE_SIZE / (sizeof(float) * 2));
+ bn_ = MIN(bn_, (L2_CACHE_SIZE / 2 - bch_ * sizeof(weight_data_t)) / ((bch_ + 1) * sizeof(float)) /
+ num_threads_);
+ nn_ = (n_ + bn_ - 1) / bn_;
+ rn_ = n_ % bn_;
+
+ if (in_param_.kernel_w != 1 || in_param_.kernel_h != 1 || in_param_.stride_w != 1 ||
+ in_param_.stride_h != 1 || in_param_.padding != 0)
+ {
+ need_im2col_ = 1;
+ }
+ else
+ {
+ need_im2col_ = 0;
+ }
+}
+
+conv_sparse::conv_sparse(const convMat_t &in_mat, convMat_t &out_mat, const convParams_t &in_param,
+ const sparse_weight_t *weights, int num_threads, convType_t conv_type)
+ : in_mat_(in_mat), out_mat_(out_mat), in_param_(in_param), weights_(weights),
+ num_threads_(num_threads), conv_type_(conv_type)
+{
+ param_init();
+}
+
+conv_sparse::~conv_sparse() {}
+
+void conv_sparse::compute_singlethread()
+{
+ if (need_im2col_)
+ {
+ for (int i = 0; i < nch_; i++)
+ {
+ const sparse_weight_t *weight_ptr = weights_ + i;
+ const int mxk = weight_ptr->mxk;
+ float prhs_ptr[bn_];
+
+ for (int j = 0; j < nn_; j++)
+ {
+ int k = -1;
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ weight_data_t *lhs_ptr = weight_ptr->wdata;
+
+ for (int l = 0; l < mxk; l++)
+ {
+ if (k != lhs_ptr->k)
+ {
+ k = lhs_ptr->k;
+ _sparse_pack_rowmajor_image(bn, k, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_),
+ prhs_ptr);
+ }
+
+ // Why n_ = 64 x 64 is too much slower on Tizen???
+ _sparse_sgemm_kernel(bn, lhs_ptr->data, prhs_ptr,
+ &out_mat_.data[lhs_ptr->m * n_ + j * bn_]);
+
+ lhs_ptr++;
+ }
+ }
+ }
+ }
+ else
+ {
+ for (int i = 0; i < nch_; i++)
+ {
+ const sparse_weight_t *weight_ptr = weights_ + i;
+ const int mxk = weight_ptr->mxk;
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ weight_data_t *lhs_ptr = weight_ptr->wdata;
+ float *rhs_ptr = in_mat_.data + j * bn_;
+
+ for (int l = 0; l < mxk; l++)
+ {
+ // Why n_ = 64 x 64 is too much slower on Tizen???
+ _sparse_sgemm_kernel(bn, lhs_ptr->data, rhs_ptr + lhs_ptr->k * n_,
+ &out_mat_.data[lhs_ptr->m * n_ + j * bn_]);
+
+ lhs_ptr++;
+ }
+ }
+ }
+ }
+}
+
+void conv_sparse::compute_multithreads()
+{
+ omp_set_num_threads(num_threads_);
+
+ if (nch_ >= num_threads_ || nch_ >= nn_)
+ {
+ if (need_im2col_)
+ {
+#pragma omp parallel for
+ for (int i = 0; i < nch_; i++)
+ {
+ const sparse_weight_t *weight_ptr = weights_ + i;
+ const int mxk = weight_ptr->mxk;
+ float prhs_ptr[bn_];
+
+ for (int j = 0; j < nn_; j++)
+ {
+ int k = -1;
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ weight_data_t *lhs_ptr = weight_ptr->wdata;
+
+ for (int l = 0; l < mxk; l++)
+ {
+ if (k != lhs_ptr->k)
+ {
+ k = lhs_ptr->k;
+ _sparse_pack_rowmajor_image(bn, k, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_),
+ prhs_ptr);
+ }
+
+ _sparse_sgemm_kernel(bn, lhs_ptr->data, prhs_ptr,
+ &out_mat_.data[lhs_ptr->m * n_ + j * bn_]);
+
+ lhs_ptr++;
+ }
+ }
+ }
+ }
+ else
+ {
+#pragma omp parallel for
+ for (int i = 0; i < nch_; i++)
+ {
+ const sparse_weight_t *weight_ptr = weights_ + i;
+ const int mxk = weight_ptr->mxk;
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ weight_data_t *lhs_ptr = weight_ptr->wdata;
+ float *rhs_ptr = in_mat_.data + j * bn_;
+
+ for (int l = 0; l < mxk; l++)
+ {
+ _sparse_sgemm_kernel(bn, lhs_ptr->data, rhs_ptr + lhs_ptr->k * n_,
+ &out_mat_.data[lhs_ptr->m * n_ + j * bn_]);
+
+ lhs_ptr++;
+ }
+ }
+ }
+ }
+ }
+ else
+ {
+ if (need_im2col_)
+ {
+ for (int i = 0; i < nch_; i++)
+ {
+ const sparse_weight_t *weight_ptr = weights_ + i;
+ const int mxk = weight_ptr->mxk;
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ int k = -1;
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ weight_data_t *lhs_ptr = weight_ptr->wdata;
+ float prhs_ptr[bn];
+
+ for (int l = 0; l < mxk; l++)
+ {
+ if (k != lhs_ptr->k)
+ {
+ k = lhs_ptr->k;
+ _sparse_pack_rowmajor_image(bn, k, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_),
+ prhs_ptr);
+ }
+
+ _sparse_sgemm_kernel(bn, lhs_ptr->data, prhs_ptr,
+ &out_mat_.data[lhs_ptr->m * n_ + j * bn_]);
+
+ lhs_ptr++;
+ }
+ }
+ }
+ }
+ else
+ {
+ for (int i = 0; i < nch_; i++)
+ {
+ const sparse_weight_t *weight_ptr = weights_ + i;
+ const int mxk = weight_ptr->mxk;
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ weight_data_t *lhs_ptr = weight_ptr->wdata;
+ float *rhs_ptr = in_mat_.data + j * bn_;
+
+ for (int l = 0; l < mxk; l++)
+ {
+ _sparse_sgemm_kernel(bn, lhs_ptr->data, rhs_ptr + lhs_ptr->k * n_,
+ &out_mat_.data[lhs_ptr->m * n_ + j * bn_]);
+
+ lhs_ptr++;
+ }
+ }
+ }
+ }
+ }
+}
+
+void conv_sparse::run()
+{
+ if (num_threads_ == 1)
+ compute_singlethread();
+ else if (num_threads_ > 1)
+ compute_multithreads();
+ else
+ throw std::runtime_error{"Invalid thread number."};
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/conv_sparse.h b/runtimes/libs/srcn/src/conv_sparse.h
new file mode 100644
index 000000000..3541ff131
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_sparse.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_SPARSE_H__
+#define __NNFW_SRCN_CONV_SPARSE_H__
+
+#include "srcn/conv_type.h"
+#include "common.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+#define BCH 128
+
+typedef struct
+{
+ short m;
+ short k;
+ float data;
+} weight_data_t;
+
+typedef struct
+{
+ int mxk;
+ weight_data_t *wdata;
+} sparse_weight_t;
+
+class conv_sparse
+{
+public:
+ conv_sparse(const convMat_t &in_mat, convMat_t &out_mat, const convParams_t &in_param,
+ const sparse_weight_t *weights, int num_threads, convType_t conv_type);
+ ~conv_sparse();
+
+ void run();
+
+private:
+ void param_init();
+ void compute_singlethread();
+ void compute_multithreads();
+
+ const convMat_t in_mat_;
+ convMat_t out_mat_;
+ const convParams_t in_param_;
+ const sparse_weight_t *weights_;
+ int num_threads_;
+ convType_t conv_type_;
+
+ int n_;
+ int bn_;
+ int rn_;
+ int nn_;
+
+ int bch_;
+ int rch_;
+ int nch_;
+
+ int need_im2col_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_SPARSE_H__
diff --git a/runtimes/libs/srcn/src/conv_winograd.cc b/runtimes/libs/srcn/src/conv_winograd.cc
new file mode 100644
index 000000000..cc114981f
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_winograd.cc
@@ -0,0 +1,341 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common.h"
+#include "conv_winograd.h"
+
+namespace std
+{
+template <typename Dtype> static inline Dtype max(Dtype a, Dtype b)
+{
+ if (a > b)
+ return a;
+ else
+ return b;
+}
+}
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void conv_winograd::param_init()
+{
+ if ((in_param_.kernel_w != in_param_.kernel_h) || (in_param_.stride_w != in_param_.stride_h) ||
+ (in_param_.kernel_w != 3 && in_param_.kernel_w != 5) || (in_param_.stride_w != 1) ||
+ (!winograd_weight_))
+ {
+ error_ = 1;
+ return;
+ }
+
+ int M, N;
+ const int w = in_mat_.w;
+ const int h = in_mat_.h;
+ const int outw = out_mat_.w;
+ const int outh = out_mat_.h;
+ const int pad_w = in_param_.pad_w;
+ const int pad_h = in_param_.pad_h;
+
+ if (in_param_.kernel_w == 3)
+ {
+ M = winograd_para_3x3s1::M;
+ N = winograd_para_3x3s1::N;
+ }
+ else
+ {
+ M = winograd_para_5x5s1::M;
+ N = winograd_para_5x5s1::N;
+ }
+
+ tile_h_in_ = tile_w_in_ = M;
+ tile_h_out_ = tile_h_in_ - N + 1;
+ tile_w_out_ = tile_w_in_ - N + 1;
+ ntiles_h_ = (std::max(h + pad_h - tile_h_in_ + 1, outh) + tile_h_out_ - 1) / tile_h_out_;
+ ntiles_w_ = (std::max(w + pad_w - tile_w_in_ + 1, outw) + tile_w_out_ - 1) / tile_w_out_;
+
+ error_ = 0;
+}
+
+conv_winograd::conv_winograd(const convMat_t &in_mat, convMat_t &out_mat,
+ const convParams_t &in_param, convType_t conv_type,
+ const float *winograd_weight, int num_threads, int inc_stride,
+ int outc_stride, int c_stride)
+ : in_mat_(in_mat), out_mat_(out_mat), in_param_(in_param), conv_type_(conv_type),
+ winograd_weight_(winograd_weight), num_threads_(num_threads), inc_stride_(inc_stride),
+ outc_stride_(outc_stride), c_stride_(c_stride)
+
+{
+ param_init();
+}
+
+conv_winograd::~conv_winograd() {}
+
+void conv_winograd::compute_sgemm(sgemmType_t major_type, sgemmTrans_t ltrans, sgemmTrans_t rtrans,
+ const int m, const int n, const int k, const float *lhs_data,
+ const float *rhs_data, float *res_data)
+{
+ class sgemm_singlethread sgemm(major_type, ltrans, rtrans, m, n, k, lhs_data, rhs_data, res_data,
+ num_threads_);
+
+ sgemm.run();
+}
+
+void conv_winograd::winograd_input_im2col(float *col_buff)
+{
+ const int w = in_mat_.w;
+ const int h = in_mat_.h;
+ const float *data = in_mat_.data;
+ const int channels = in_mat_.c;
+ const int pad_w = in_param_.pad_w;
+ const int pad_h = in_param_.pad_h;
+
+ if (conv_type_ == row_major)
+ {
+#ifdef NCNN
+ const int n = alignSize(inc_stride_, 16 / sizeof(float));
+#else // NCNN
+ const int n = inc_stride_;
+#endif // NCNN
+ for (int c = 0; c < channels; ++c)
+ {
+ for (int tile_h = 0; tile_h < ntiles_h_; ++tile_h)
+ {
+ for (int tile_w = 0; tile_w < ntiles_w_; ++tile_w)
+ {
+ for (int y = 0; y < tile_h_in_; ++y)
+ {
+ for (int x = 0; x < tile_w_in_; ++x)
+ {
+ int in_y = tile_h * tile_h_out_ + y - pad_h;
+ int in_x = tile_w * tile_w_out_ + x - pad_w;
+
+ if (in_y < 0 || in_x < 0 || in_y >= h || in_x >= w)
+ {
+ col_buff[(((c * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) * tile_h_in_ + y) *
+ tile_w_in_ +
+ x] = 0;
+ }
+ else
+ {
+ col_buff[(((c * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) * tile_h_in_ + y) *
+ tile_w_in_ +
+ x] = data[c * n + in_y * w + in_x];
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ else if (conv_type_ == col_major)
+ {
+ for (int tile_h = 0; tile_h < ntiles_h_; ++tile_h)
+ {
+ for (int tile_w = 0; tile_w < ntiles_w_; ++tile_w)
+ {
+ for (int y = 0; y < tile_h_in_; ++y)
+ {
+ for (int x = 0; x < tile_w_in_; ++x)
+ {
+ for (int c = 0; c < channels; ++c)
+ {
+ int in_y = tile_h * tile_h_out_ + y - pad_h;
+ int in_x = tile_w * tile_w_out_ + x - pad_w;
+
+ if (in_y < 0 || in_x < 0 || in_y >= h || in_x >= w)
+ {
+ col_buff[(((c * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) * tile_h_in_ + y) *
+ tile_w_in_ +
+ x] = 0;
+ }
+ else
+ {
+ col_buff[(((c * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) * tile_h_in_ + y) *
+ tile_w_in_ +
+ x] = data[c + (in_y * w + in_x) * channels];
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+void conv_winograd::winograd_output_col2im(const float *col_buff)
+{
+ int outh = out_mat_.h;
+ int outw = out_mat_.w;
+ float *data = out_mat_.data;
+ int channels = out_mat_.c;
+
+ if (conv_type_ == row_major)
+ {
+#ifdef NCNN
+ const int n = alignSize(outc_stride_, 16 / sizeof(float));
+#else // NCNN
+ const int n = outc_stride_;
+#endif // NCNN
+ for (int c = 0; c < channels; ++c)
+ {
+ for (int tile_h = 0; tile_h < ntiles_h_; ++tile_h)
+ {
+ for (int tile_w = 0; tile_w < ntiles_w_; ++tile_w)
+ {
+ for (int y = 0; y < tile_h_out_; ++y)
+ {
+ for (int x = 0; x < tile_w_out_; ++x)
+ {
+ int out_y = tile_h * tile_h_out_ + y;
+ int out_x = tile_w * tile_w_out_ + x;
+ if (out_y < outh && out_x < outw)
+ {
+ data[c * n + out_y * outw + out_x] =
+ col_buff[(((c * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) * tile_h_out_ + y) *
+ tile_w_out_ +
+ x];
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ else if (conv_type_ == col_major)
+ {
+ for (int tile_h = 0; tile_h < ntiles_h_; ++tile_h)
+ {
+ for (int tile_w = 0; tile_w < ntiles_w_; ++tile_w)
+ {
+ for (int y = 0; y < tile_h_out_; ++y)
+ {
+ for (int x = 0; x < tile_w_out_; ++x)
+ {
+ for (int c = 0; c < channels; ++c)
+ {
+ int out_y = tile_h * tile_h_out_ + y;
+ int out_x = tile_w * tile_w_out_ + x;
+ if (out_y < outh && out_x < outw)
+ {
+ data[c + (out_y * outw + out_x) * c_stride_] =
+ col_buff[(((c * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) * tile_h_out_ + y) *
+ tile_w_out_ +
+ x];
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+void conv_winograd::compute_winograd()
+{
+ const int w = in_mat_.w;
+ const int h = in_mat_.h;
+ const int inch = in_mat_.c;
+ const int outw = out_mat_.w;
+ const int outh = out_mat_.h;
+ const int outch = out_mat_.c;
+ const int kernel_size = in_param_.kernel_w;
+
+ int M, N;
+ const double *A;
+ const double *B;
+
+ if (kernel_size == 3)
+ {
+ M = winograd_para_3x3s1::M;
+ N = winograd_para_3x3s1::N;
+ B = winograd_para_3x3s1::getB();
+ A = winograd_para_3x3s1::getA();
+ }
+ else
+ {
+ M = winograd_para_5x5s1::M;
+ N = winograd_para_5x5s1::N;
+ B = winograd_para_5x5s1::getB();
+ A = winograd_para_5x5s1::getA();
+ }
+
+ /*Step 2: transfer image to winograd domain*/
+ float *col_buff =
+ new float[std::max(outch, inch) * ntiles_h_ * ntiles_w_ * tile_h_in_ * tile_w_in_];
+
+ int temp1_n = inch * ntiles_h_ * ntiles_w_;
+ float *temp1_ =
+ new float[tile_h_in_ * tile_w_in_ * std::max(outch, inch) * ntiles_h_ * ntiles_w_];
+
+ float *winograd_b = new float[M * M * M * M];
+
+ if ((NULL == col_buff) || (NULL == temp1_) || (NULL == winograd_b))
+ {
+ delete[] col_buff;
+ delete[] temp1_;
+ delete[] winograd_b;
+ return;
+ }
+
+ winograd_input_im2col(col_buff);
+
+ kronecker_product(winograd_b, B, B, M, M, M, M);
+
+ compute_sgemm(rowMajor, trans, trans, tile_h_in_ * tile_w_in_, temp1_n, tile_h_in_ * tile_w_in_,
+ winograd_b, col_buff, temp1_);
+
+ delete[] winograd_b;
+
+ /*Step 3: convolution in winograd domain*/
+ for (int j = 0; j < tile_h_in_ * tile_w_in_; ++j)
+ {
+ compute_sgemm(rowMajor, notrans, notrans, outch, ntiles_h_ * ntiles_w_, inch,
+ winograd_weight_ + j * c_stride_ * inch,
+ temp1_ + j * inch * ntiles_h_ * ntiles_w_,
+ col_buff + j * outch * ntiles_h_ * ntiles_w_);
+ }
+
+ /*Step 4: transfer back to time domain*/
+ float *winograd_a = new float[M * (M - N + 1) * M * (M - N + 1)];
+ if (NULL == winograd_a)
+ {
+ delete[] col_buff;
+ delete[] temp1_;
+ return;
+ }
+ kronecker_product(winograd_a, A, A, M, M - N + 1, M, M - N + 1);
+ compute_sgemm(rowMajor, trans, notrans, outch * ntiles_h_ * ntiles_w_, tile_h_out_ * tile_w_out_,
+ tile_h_in_ * tile_w_in_, col_buff, winograd_a, temp1_);
+ delete[] winograd_a;
+ delete[] col_buff;
+
+ winograd_output_col2im(temp1_);
+
+ delete[] temp1_;
+}
+
+void conv_winograd::run()
+{
+ if (error_)
+ return;
+
+ compute_winograd();
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/conv_winograd.h b/runtimes/libs/srcn/src/conv_winograd.h
new file mode 100644
index 000000000..d478f943c
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_winograd.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_WINOGRAD_H__
+#define __NNFW_SRCN_CONV_WINOGRAD_H__
+
+#include "srcn/conv_type.h"
+#include "winograd.h"
+#include "sgemm_singlethread.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+class conv_winograd
+{
+public:
+ conv_winograd(const convMat_t &in_mat, convMat_t &out_mat, const convParams_t &in_param,
+ convType_t conv_type, const float *winograd_weight, int num_threads, int inc_stride,
+ int outc_stride, int c_stride);
+ ~conv_winograd();
+
+ void run();
+
+private:
+ void param_init();
+ void compute_sgemm(sgemmType_t major_type, sgemmTrans_t ltrans, sgemmTrans_t rtrans, const int m,
+ const int n, const int k, const float *lhs_data, const float *rhs_data,
+ float *res_data);
+ void winograd_input_im2col(float *col_buff);
+ void winograd_output_col2im(const float *col_buff);
+ void compute_winograd();
+
+ const convMat_t in_mat_;
+ convMat_t out_mat_;
+ const convParams_t in_param_;
+ convType_t conv_type_;
+ const float *winograd_weight_;
+ const int num_threads_;
+
+ int tile_w_in_;
+ int tile_h_in_;
+ int tile_w_out_;
+ int tile_h_out_;
+ int ntiles_w_;
+ int ntiles_h_;
+
+ int inc_stride_;
+ int outc_stride_;
+ int c_stride_;
+
+ int error_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_WINOGRAD_H__
diff --git a/runtimes/libs/srcn/src/conv_winograd_batch.cc b/runtimes/libs/srcn/src/conv_winograd_batch.cc
new file mode 100644
index 000000000..7b468db02
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_winograd_batch.cc
@@ -0,0 +1,304 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common.h"
+#include "conv_winograd_batch.h"
+
+namespace std
+{
+template <typename Dtype> static inline Dtype max(Dtype a, Dtype b)
+{
+ if (a > b)
+ return a;
+ else
+ return b;
+}
+}
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void conv_winograd_batch::param_init()
+{
+ if ((in_param_.kernel_w != in_param_.kernel_h) || (in_param_.stride_w != in_param_.stride_h) ||
+ (in_param_.kernel_w != 3 && in_param_.kernel_w != 5) || (in_param_.stride_w != 1) ||
+ (!winograd_weight_))
+ {
+ error_ = 1;
+ return;
+ }
+
+ int M, N;
+ const int w = in_mat_.w;
+ const int h = in_mat_.h;
+ const int outw = out_mat_.w;
+ const int outh = out_mat_.h;
+ const int pad_w = in_param_.pad_w;
+ const int pad_h = in_param_.pad_h;
+
+ if (in_param_.kernel_w == 3)
+ {
+ if (w == 4)
+ {
+ M = winograd_para_3x3s1_2::M;
+ N = winograd_para_3x3s1_2::N;
+ }
+ else
+ {
+ M = winograd_para_3x3s1::M;
+ N = winograd_para_3x3s1::N;
+ }
+ }
+ else
+ {
+ M = winograd_para_5x5s1::M;
+ N = winograd_para_5x5s1::N;
+ }
+
+ tile_h_in_ = tile_w_in_ = M;
+ tile_h_out_ = tile_h_in_ - N + 1;
+ tile_w_out_ = tile_w_in_ - N + 1;
+ ntiles_h_ = (std::max(h + pad_h - tile_h_in_ + 1, outh) + tile_h_out_ - 1) / tile_h_out_;
+ ntiles_w_ = (std::max(w + pad_w - tile_w_in_ + 1, outw) + tile_w_out_ - 1) / tile_w_out_;
+
+ error_ = 0;
+}
+
+conv_winograd_batch::conv_winograd_batch(const convMat_t &in_mat, convMat_t &out_mat,
+ const convParams_t &in_param, convType_t conv_type,
+ const float *winograd_weight, int num_threads)
+ : in_mat_(in_mat), out_mat_(out_mat), in_param_(in_param), conv_type_(conv_type),
+ winograd_weight_(winograd_weight), num_threads_(num_threads)
+{
+ param_init();
+}
+
+conv_winograd_batch::~conv_winograd_batch() {}
+
+void conv_winograd_batch::compute_sgemm(sgemmType_t major_type, sgemmTrans_t ltrans,
+ sgemmTrans_t rtrans, const int m, const int n, const int k,
+ const float *lhs_data, const float *rhs_data,
+ float *res_data)
+{
+ class sgemm_singlethread sgemm(major_type, ltrans, rtrans, m, n, k, lhs_data, rhs_data, res_data,
+ num_threads_);
+
+ sgemm.run();
+}
+
+void conv_winograd_batch::winograd_input_im2col(float *col_buff)
+{
+ const int w = in_mat_.w;
+ const int h = in_mat_.h;
+ const float *data = in_mat_.data;
+ const int channels = in_mat_.c;
+ const int batch = in_mat_.n;
+ const int pad_w = in_param_.pad_w;
+ const int pad_h = in_param_.pad_h;
+
+ // TODO: row_major
+ if (conv_type_ == col_major)
+ {
+ for (int n = 0; n < batch; n++)
+ {
+ for (int tile_h = 0; tile_h < ntiles_h_; ++tile_h)
+ {
+ for (int tile_w = 0; tile_w < ntiles_w_; ++tile_w)
+ {
+ for (int y = 0; y < tile_h_in_; ++y)
+ {
+ for (int x = 0; x < tile_w_in_; ++x)
+ {
+ for (int c = 0; c < channels; ++c)
+ {
+ int in_y = tile_h * tile_h_out_ + y - pad_h;
+ int in_x = tile_w * tile_w_out_ + x - pad_w;
+
+ if (in_y < 0 || in_x < 0 || in_y >= h || in_x >= w)
+ {
+ col_buff[((((c * batch + n) * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) *
+ tile_h_in_ +
+ y) *
+ tile_w_in_ +
+ x] = 0;
+ }
+ else
+ {
+ col_buff[((((c * batch + n) * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) *
+ tile_h_in_ +
+ y) *
+ tile_w_in_ +
+ x] = data[((n * h + in_y) * w + in_x) * channels + c];
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+void conv_winograd_batch::winograd_output_col2im(const float *col_buff)
+{
+ int outh = out_mat_.h;
+ int outw = out_mat_.w;
+ float *data = out_mat_.data;
+ int channels = out_mat_.c;
+ int batch = out_mat_.n;
+
+ // TODO: row_major
+ if (conv_type_ == col_major)
+ {
+ for (int n = 0; n < batch; n++)
+ {
+ for (int tile_h = 0; tile_h < ntiles_h_; ++tile_h)
+ {
+ for (int tile_w = 0; tile_w < ntiles_w_; ++tile_w)
+ {
+ for (int y = 0; y < tile_h_out_; ++y)
+ {
+ for (int x = 0; x < tile_w_out_; ++x)
+ {
+ for (int c = 0; c < channels; ++c)
+ {
+ int out_y = tile_h * tile_h_out_ + y;
+ int out_x = tile_w * tile_w_out_ + x;
+ if (out_y < outh && out_x < outw)
+ {
+ data[((n * outh + out_y) * outw + out_x) * channels + c] =
+ col_buff[((((c * batch + n) * ntiles_h_ + tile_h) * ntiles_w_ + tile_w) *
+ tile_h_out_ +
+ y) *
+ tile_w_out_ +
+ x];
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+void conv_winograd_batch::compute_winograd()
+{
+ const int w = in_mat_.w;
+ const int h = in_mat_.h;
+ const int inch = in_mat_.c;
+ const int outw = out_mat_.w;
+ const int outh = out_mat_.h;
+ const int outch = out_mat_.c;
+ const int kernel_size = in_param_.kernel_w;
+ const int batch = in_mat_.n;
+
+ int M, N;
+ const double *A;
+ const double *B;
+
+ if (kernel_size == 3)
+ {
+ if (w == 4)
+ {
+ M = winograd_para_3x3s1_2::M;
+ N = winograd_para_3x3s1_2::N;
+ B = winograd_para_3x3s1_2::getB();
+ A = winograd_para_3x3s1_2::getA();
+ }
+ else
+ {
+ M = winograd_para_3x3s1::M;
+ N = winograd_para_3x3s1::N;
+ B = winograd_para_3x3s1::getB();
+ A = winograd_para_3x3s1::getA();
+ }
+ }
+ else
+ {
+ M = winograd_para_5x5s1::M;
+ N = winograd_para_5x5s1::N;
+ B = winograd_para_5x5s1::getB();
+ A = winograd_para_5x5s1::getA();
+ }
+
+ /*Step 2: transfer image to winograd domain*/
+ float *col_buff =
+ new float[std::max(outch, inch) * batch * ntiles_h_ * ntiles_w_ * tile_h_in_ * tile_w_in_];
+
+ int temp1_n = batch * inch * ntiles_h_ * ntiles_w_;
+ float *temp1_ =
+ new float[batch * tile_h_in_ * tile_w_in_ * std::max(outch, inch) * ntiles_h_ * ntiles_w_];
+
+ float *winograd_b = new float[M * M * M * M];
+
+ if ((NULL == col_buff) || (NULL == temp1_) || (NULL == winograd_b))
+ {
+ delete[] col_buff;
+ delete[] temp1_;
+ delete[] winograd_b;
+ return;
+ }
+
+ winograd_input_im2col(col_buff);
+
+ kronecker_product(winograd_b, B, B, M, M, M, M);
+
+ compute_sgemm(rowMajor, trans, trans, tile_h_in_ * tile_w_in_, temp1_n, tile_h_in_ * tile_w_in_,
+ winograd_b, col_buff, temp1_);
+ delete[] winograd_b;
+
+ /*Step 3: convolution in winograd domain*/
+ for (int j = 0; j < tile_h_in_ * tile_w_in_; ++j)
+ {
+ compute_sgemm(rowMajor, notrans, notrans, outch, batch * ntiles_h_ * ntiles_w_, inch,
+ winograd_weight_ + j * outch * inch,
+ temp1_ + j * batch * inch * ntiles_h_ * ntiles_w_,
+ col_buff + j * batch * outch * ntiles_h_ * ntiles_w_);
+ }
+
+ /*Step 4: transfer back to time domain*/
+ float *winograd_a = new float[M * (M - N + 1) * M * (M - N + 1)];
+ if (NULL == winograd_a)
+ {
+ delete[] col_buff;
+ delete[] temp1_;
+ return;
+ }
+
+ kronecker_product(winograd_a, A, A, M, M - N + 1, M, M - N + 1);
+ compute_sgemm(rowMajor, trans, notrans, batch * outch * ntiles_h_ * ntiles_w_,
+ tile_h_out_ * tile_w_out_, tile_h_in_ * tile_w_in_, col_buff, winograd_a, temp1_);
+ delete[] winograd_a;
+ delete[] col_buff;
+
+ winograd_output_col2im(temp1_);
+
+ delete[] temp1_;
+}
+
+void conv_winograd_batch::run()
+{
+ if (error_)
+ return;
+
+ compute_winograd();
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/conv_winograd_batch.h b/runtimes/libs/srcn/src/conv_winograd_batch.h
new file mode 100644
index 000000000..8cf4428bb
--- /dev/null
+++ b/runtimes/libs/srcn/src/conv_winograd_batch.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_CONV_WINOGRAD_BATCH_H__
+#define __NNFW_SRCN_CONV_WINOGRAD_BATCH_H__
+
+#include "srcn/conv_type.h"
+#include "winograd.h"
+#include "sgemm_singlethread.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+class conv_winograd_batch
+{
+public:
+ conv_winograd_batch(const convMat_t &in_mat, convMat_t &out_mat, const convParams_t &in_param,
+ convType_t conv_type, const float *winograd_weight, int num_threads);
+ ~conv_winograd_batch();
+
+ void run();
+
+private:
+ void param_init();
+ void compute_sgemm(sgemmType_t major_type, sgemmTrans_t ltrans, sgemmTrans_t rtrans, const int m,
+ const int n, const int k, const float *lhs_data, const float *rhs_data,
+ float *res_data);
+ void winograd_input_im2col(float *col_buff);
+ void winograd_output_col2im(const float *col_buff);
+ void compute_winograd();
+
+ const convMat_t in_mat_;
+ convMat_t out_mat_;
+ const convParams_t in_param_;
+ convType_t conv_type_;
+ const float *winograd_weight_;
+ const int num_threads_;
+
+ int tile_w_in_;
+ int tile_h_in_;
+ int tile_w_out_;
+ int tile_h_out_;
+ int ntiles_w_;
+ int ntiles_h_;
+
+ int error_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_CONV_WINOGRAD_BATCH_H__
diff --git a/runtimes/libs/srcn/src/deconv_sgemm_multithreads.cc b/runtimes/libs/srcn/src/deconv_sgemm_multithreads.cc
new file mode 100644
index 000000000..77042bcfa
--- /dev/null
+++ b/runtimes/libs/srcn/src/deconv_sgemm_multithreads.cc
@@ -0,0 +1,387 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef _OPENMP
+#include <omp.h>
+#endif
+
+#include "common.h"
+#include "sgemm_kernel.h"
+#include "sgemm_pack.h"
+#include "deconv_sgemm_multithreads.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void deconv_sgemm_multithreads::param_init()
+{
+#if __aarch64__
+ if (conv_type_ == row_major)
+ {
+ mr_ = 8;
+ nr_ = 12;
+ }
+ else if (conv_type_ == col_major)
+ {
+
+ mr_ = 12;
+ nr_ = 8;
+ }
+#else // __aarch64__
+ if (conv_type_ == row_major)
+ {
+ mr_ = 6;
+ nr_ = 8;
+ }
+ else if (conv_type_ == col_major)
+ {
+ mr_ = 8;
+ nr_ = 6;
+ }
+#endif // __aarch64__
+
+ int col = n_;
+
+ if (m_ > n_)
+ {
+ shard_type_ = shardByRow;
+ col = m_;
+ }
+ else
+ {
+ shard_type_ = shardByCol;
+ }
+
+ int th_base = divup(col, num_threads_);
+
+ th_base = MIN(MAX(th_base, MIN_COL), MAX_COL);
+
+ int k_div = (nr_ * sizeof_RhsScalar);
+ int k_sub = (mr_ * nr_ * sizeof_ResScalar);
+
+ const int k_cache = MIN(divup((int)(L1_CACHE_SIZE - k_sub), (int)k_div * 2), MAX_K);
+ bk_ = MIN(k_cache, k_);
+
+ if (shard_type_ == shardByCol)
+ {
+ int m_sub = (bk_ * nr_ * sizeof_RhsScalar);
+ int m_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ if (L3_CACHE_SIZE)
+ m_div = (sizeof_LhsScalar * bk_ * 2);
+ int m_cache = divup((L2_CACHE_SIZE - m_sub), m_div);
+ bm_ = MIN(m_cache, m_);
+
+ bn_ = MIN(th_base, n_);
+ if (L3_CACHE_SIZE)
+ {
+ int n_sub = (bk_ * bm_ * sizeof_RhsScalar);
+ int n_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ int n_cache = divup((L3_CACHE_SIZE - n_sub), n_div);
+ bn_ = MIN(n_cache, bn_);
+ }
+ }
+ else
+ {
+ int n_sub = (bk_ * mr_ * sizeof_LhsScalar);
+ int n_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ if (L3_CACHE_SIZE)
+ n_div = (sizeof_LhsScalar * bk_ * 2);
+ int n_cache = divup((L2_CACHE_SIZE - n_sub), n_div);
+ bn_ = MIN(n_cache, n_);
+
+ bm_ = MIN(th_base, m_);
+ if (L3_CACHE_SIZE)
+ {
+ int m_sub = (bk_ * bn_ * sizeof_RhsScalar);
+ int m_div = (sizeof_LhsScalar * bk_ * 2 * num_threads_);
+ int m_cache = divup((L3_CACHE_SIZE - m_sub), m_div);
+ bm_ = MIN(m_cache, bm_);
+ }
+ }
+
+ nm_ = divup(m_, bm_);
+ nn_ = divup(n_, bn_);
+ nk_ = divup(k_, bk_);
+
+ rm_ = m_ % bm_;
+ rn_ = n_ % bn_;
+ rk_ = k_ % bk_;
+}
+
+deconv_sgemm_multithreads::deconv_sgemm_multithreads(const convMat_t &in_mat,
+ const convMat_t &weights_mat,
+ convMat_t &out_mat,
+ const convParams_t &in_param, int num_threads,
+ convType_t conv_type)
+
+ : in_mat_(in_mat), weights_mat_(weights_mat), out_mat_(out_mat), in_param_(in_param),
+ num_threads_(num_threads), conv_type_(conv_type)
+{
+ m_ = in_param_.kernel_h * in_param_.kernel_w * out_mat_.c;
+#ifdef NCNN
+ n_ = alignSize(in_mat_.h * in_mat_.w, 16 / sizeof(float));
+#else // NCNN
+ n_ = in_mat_.w * in_mat_.h;
+#endif // NCNN
+ k_ = in_mat.c;
+
+ param_init();
+
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ if (shard_type_ == shardByCol)
+ {
+ plhs_buffer_ = new float[lhs_stride * 1 * nm_];
+ prhs_buffer_ = new float[rhs_stride * num_threads_];
+ }
+ else
+ {
+ plhs_buffer_ = new float[lhs_stride * num_threads_];
+ prhs_buffer_ = new float[rhs_stride * 1 * nn_];
+ }
+
+ pres_buffer_ = new float[bm_ * bn_ * num_threads_];
+
+ if (plhs_buffer_ == NULL || prhs_buffer_ == NULL || pres_buffer_ == NULL)
+ {
+ error_ = 1;
+ }
+
+ if (in_param_.kernel_w != 1 || in_param_.kernel_h != 1 || in_param_.stride_w != 1 ||
+ in_param_.stride_h != 1 || in_param_.padding != 0)
+ {
+ need_col2im_ = 1;
+ }
+ else
+ {
+ need_col2im_ = 0;
+ }
+
+ omp_set_num_threads(num_threads_);
+
+ error_ = 0;
+}
+
+deconv_sgemm_multithreads::~deconv_sgemm_multithreads()
+{
+ if (plhs_buffer_)
+ delete[] plhs_buffer_;
+ if (prhs_buffer_)
+ delete[] prhs_buffer_;
+ if (pres_buffer_)
+ delete[] pres_buffer_;
+}
+
+void deconv_sgemm_multithreads::run()
+{
+ if (error_)
+ return;
+
+ if (shard_type_ == shardByCol && conv_type_ == col_major)
+ {
+ compute_colmajor_colshard();
+ }
+ else if (shard_type_ == shardByRow && conv_type_ == col_major)
+ {
+ compute_colmajor_rowshard();
+ }
+ else if (shard_type_ == shardByCol && conv_type_ == row_major)
+ {
+ compute_rowmajor_colshard();
+ }
+ else if (shard_type_ == shardByRow && conv_type_ == row_major)
+ {
+ compute_rowmajor_rowshard();
+ }
+}
+
+void deconv_sgemm_multithreads::compute_rowmajor_colshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_trans_lhs(mr_, bm, bk, m_, &weights_mat_.data[l * bk_ * m_ + i * bm_],
+ &plhs_buffer_[i * lhs_stride]);
+ }
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *prhs_ptr = &prhs_buffer_[rhs_stride * thread_num];
+ float *pres_ptr = &pres_buffer_[bm_ * bn_ * thread_num];
+
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &in_mat_.data[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, &plhs_buffer_[i * lhs_stride],
+ prhs_ptr, pres_ptr, 0, bn, bk);
+
+ if (need_col2im_)
+ _unpack_rowmajor_image_res(bm, bn, i * bm_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_), pres_ptr);
+ }
+ }
+ }
+}
+
+void deconv_sgemm_multithreads::compute_rowmajor_rowshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &in_mat_.data[l * bk_ * n_ + j * bn_],
+ &prhs_buffer_[j * rhs_stride]);
+ }
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *plhs_ptr = &plhs_buffer_[lhs_stride * thread_num];
+ float *pres_ptr = &pres_buffer_[bm_ * bn_ * thread_num];
+
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_trans_lhs(mr_, bm, bk, m_, &weights_mat_.data[l * bk_ * m_ + i * bm_],
+ plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr,
+ &prhs_buffer_[j * rhs_stride], pres_ptr, 0, bn, bk);
+ if (need_col2im_)
+ _unpack_rowmajor_image_res(bm, bn, i * bm_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_), pres_ptr);
+ }
+ }
+ }
+}
+
+void deconv_sgemm_multithreads::compute_colmajor_colshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_trans_lhs(mr_, bm, bk, k_, &weights_mat_.data[i * bm_ * k_ + l * bk_],
+ &plhs_buffer_[i * lhs_stride]);
+ }
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *prhs_ptr = &prhs_buffer_[rhs_stride * thread_num];
+ float *pres_ptr = &pres_buffer_[bm_ * bn_ * thread_num];
+
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &in_mat_.data[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, &plhs_buffer_[i * lhs_stride],
+ prhs_ptr, pres_ptr, 0, bm, bk);
+
+ // Need to add lock?
+ if (need_col2im_)
+ _unpack_colmajor_image_res(bm, bn, i * bm_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_), pres_ptr);
+ }
+ }
+ }
+}
+
+void deconv_sgemm_multithreads::compute_colmajor_rowshard()
+{
+ int lhs_stride = (bm_ + mr_ - 1) / mr_ * mr_ * bk_;
+ int rhs_stride = (bn_ + nr_ - 1) / nr_ * nr_ * bk_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+#pragma omp parallel for
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &in_mat_.data[j * bn_ * k_ + l * bk_],
+ &prhs_buffer_[j * rhs_stride]);
+ }
+
+#pragma omp parallel for
+ for (int i = 0; i < nm_; i++)
+ {
+ int thread_num = omp_get_thread_num();
+ float *plhs_ptr = &plhs_buffer_[lhs_stride * thread_num];
+ float *pres_ptr = &pres_buffer_[bm_ * bn_ * thread_num];
+
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_trans_lhs(mr_, bm, bk, k_, &weights_mat_.data[i * bm_ * k_ + l * bk_],
+ plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr,
+ &prhs_buffer_[j * rhs_stride], pres_ptr, 0, bm, bk);
+
+ if (need_col2im_)
+ _unpack_colmajor_image_res(bm, bn, i * bm_, j * bn_, const_cast<convMat_t *>(&in_mat_),
+ &out_mat_, const_cast<convParams_t *>(&in_param_), pres_ptr);
+ }
+ }
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/deconv_sgemm_multithreads.h b/runtimes/libs/srcn/src/deconv_sgemm_multithreads.h
new file mode 100644
index 000000000..0f0e47baa
--- /dev/null
+++ b/runtimes/libs/srcn/src/deconv_sgemm_multithreads.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_DECONV_SGEMM_MULTITHREADS_H__
+#define __NNFW_SRCN_DECONV_SGEMM_MULTITHREADS_H__
+
+#include "srcn/conv_type.h"
+#include "common.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+class deconv_sgemm_multithreads
+{
+public:
+ deconv_sgemm_multithreads(const convMat_t &in_mat, const convMat_t &weights_mat,
+ convMat_t &out_mat, const convParams_t &in_param, int num_threads,
+ convType_t conv_type);
+ ~deconv_sgemm_multithreads();
+
+ void run();
+
+private:
+ void param_init();
+
+ void compute_rowmajor_colshard();
+ void compute_rowmajor_rowshard();
+ void compute_colmajor_colshard();
+ void compute_colmajor_rowshard();
+
+ const convMat_t in_mat_;
+ const convMat_t weights_mat_;
+ convMat_t out_mat_;
+ const convParams_t in_param_;
+ convType_t conv_type_;
+ const int num_threads_;
+
+ int m_;
+ int n_;
+ int k_;
+
+ int bm_;
+ int bn_;
+ int bk_;
+
+ int rm_;
+ int rn_;
+ int rk_;
+
+ int nm_;
+ int nn_;
+ int nk_;
+
+ int mr_;
+ int nr_;
+
+ int need_col2im_;
+ shardType_t shard_type_;
+
+ float *prhs_buffer_;
+ float *plhs_buffer_;
+ float *pres_buffer_;
+
+ int error_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_DECONV_SGEMM_MULTITHREADS_H__
diff --git a/runtimes/libs/srcn/src/depthwise_conv.cc b/runtimes/libs/srcn/src/depthwise_conv.cc
new file mode 100644
index 000000000..a06ee7050
--- /dev/null
+++ b/runtimes/libs/srcn/src/depthwise_conv.cc
@@ -0,0 +1,2639 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef _OPENMP
+#include <omp.h>
+#endif
+
+#include <arm_neon.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "common.h"
+#include "srcn/conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+static void depthwise_conv3x3S1_nopad(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+ float *in_ptr3 = inbuf + 3 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+ float *out_ptr1 = outbuf + 1 * outw;
+
+ int i;
+ for (i = 0; i + 1 < outh; i += 2)
+ {
+ int nn = outw >> 2 - 1;
+ int remain = outw & 0x03;
+
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+
+ "1:\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr1], %[in_ptr1], #16\n"
+
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q2, %e[weight345][1]\n"
+ "vmul.f32 q12, q0, %e[weight012][0]\n"
+ "vmul.f32 q13, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr2], %[in_ptr2], #16\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q2, %e[weight678][1]\n"
+ "vmla.f32 q12, q0, %e[weight345][0]\n"
+ "vmla.f32 q13, q2, %e[weight345][1]\n"
+
+ "pld [%[in_ptr3], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr3]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vmla.f32 q15, q3, %f[weight345][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr3], %[in_ptr3], #16\n"
+
+ "vmla.f32 q12, q0, %e[weight678][0]\n"
+ "vmla.f32 q13, q2, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vmla.f32 q15, q3, %f[weight678][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+
+ "bne 1b\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [in_ptr3] "+r"(in_ptr3),
+
+ [out_ptr0] "+r"(out_ptr0), [out_ptr1] "+r"(out_ptr1), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+ float32x4_t input3 = vld1q_f32(in_ptr3);
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ float32x4_t out1 = vmulq_f32(input1, weight012);
+ out1 = vmlaq_f32(out1, input2, weight345);
+ out1 = vmlaq_f32(out1, input3, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+ out1 = vsetq_lane_f32(bias0, out1, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+ float32x2_t out11 = vadd_f32(vget_low_f32(out1), vget_high_f32(out1));
+
+ float32x2_t out01 = vpadd_f32(out00, out11);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+ *out_ptr1 = vget_lane_f32(out01, 1);
+
+ in_ptr0++;
+ in_ptr1++;
+ in_ptr2++;
+ in_ptr3++;
+ out_ptr0++;
+ out_ptr1++;
+ }
+
+ in_ptr0 += w + 2;
+ in_ptr1 += w + 2;
+ in_ptr2 += w + 2;
+ in_ptr3 += w + 2;
+
+ out_ptr0 += outw;
+ out_ptr1 += outw;
+ }
+
+ for (; i < outh; i++)
+ {
+ int nn = outw >> 2;
+ int remain = outw & 0x03;
+
+ if (nn > 0)
+ {
+ __asm __volatile("1:\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmla.f32 q14, q0, %e[weight012][0]\n"
+ "vmla.f32 q14, q2, %e[weight012][1]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr1], %[in_ptr1], #16\n"
+
+ "vmla.f32 q14, q0, %e[weight345][0]\n"
+ "vmla.f32 q14, q2, %e[weight345][1]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr2], %[in_ptr2], #16\n"
+
+ "vmla.f32 q14, q0, %e[weight678][0]\n"
+ "vmla.f32 q14, q2, %e[weight678][1]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+
+ "bne 1b\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0++;
+ in_ptr1++;
+ in_ptr2++;
+ out_ptr0++;
+ }
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ in_ptr2 += 2;
+ }
+ }
+
+#endif // !__aarch64__
+}
+
+static void depthwise_conv3x3S1_padding(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+ float *in_ptr3 = inbuf + 3 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+ float *out_ptr1 = outbuf + 1 * outw;
+
+ int i;
+ for (i = 0; i + 1 < outh; i += 2)
+ {
+ int nn = (outw >> 2) - 1;
+ int remain = (outw & 0x03) + 4;
+ if (i == 0)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q8, #0\n"
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr0], %[in_ptr0], #12\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q2, %e[weight345][0]\n"
+ "vmul.f32 q11, q0, %e[weight345][1]\n"
+ "vmul.f32 q12, q2, %e[weight012][0]\n"
+ "vmul.f32 q13, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr1], %[in_ptr1], #12\n"
+
+ "vmla.f32 q10, q2, %e[weight678][0]\n"
+ "vmla.f32 q11, q0, %e[weight678][1]\n"
+ "vmla.f32 q12, q2, %e[weight345][0]\n"
+ "vmla.f32 q13, q0, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vmla.f32 q15, q3, %f[weight345][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr2], %[in_ptr2], #12\n"
+
+ "vmla.f32 q12, q2, %e[weight678][0]\n"
+ "vmla.f32 q13, q0, %e[weight678][1]\n"
+ "vmla.f32 q15, q3, %f[weight678][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "1:\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight345][0]\n"
+ "vmul.f32 q11, q2, %e[weight345][1]\n"
+ "vmul.f32 q12, q0, %e[weight012][0]\n"
+ "vmul.f32 q13, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr1], %[in_ptr1], #16\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q2, %e[weight678][1]\n"
+ "vmla.f32 q12, q0, %e[weight345][0]\n"
+ "vmla.f32 q13, q2, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vmla.f32 q15, q3, %f[weight345][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr2], %[in_ptr2], #16\n"
+
+ "vmla.f32 q12, q0, %e[weight678][0]\n"
+ "vmla.f32 q13, q2, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vmla.f32 q15, q3, %f[weight678][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+ "bne 1b\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0),
+ [out_ptr1] "+r"(out_ptr1), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+
+ for (; remain > 0; remain--)
+ {
+ // TODO: when nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight345);
+ out0 = vmlaq_f32(out0, input1, weight678);
+
+ float32x4_t out1 = vmulq_f32(input0, weight012);
+ out1 = vmlaq_f32(out1, input1, weight345);
+ out1 = vmlaq_f32(out1, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+ out1 = vsetq_lane_f32(bias0, out1, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+ float32x2_t out11 = vadd_f32(vget_low_f32(out1), vget_high_f32(out1));
+
+ float32x2_t out01 = vpadd_f32(out00, out11);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+ *out_ptr1 = vget_lane_f32(out01, 1);
+
+ in_ptr0++;
+ in_ptr1++;
+ in_ptr2++;
+ out_ptr0++;
+ out_ptr1++;
+ }
+
+ in_ptr0 += 1;
+ in_ptr1 += 1;
+ in_ptr2 += 1;
+ in_ptr3 += w;
+ }
+ else if (i == outh - 2)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q8, #0\n"
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr0], %[in_ptr0], #12\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q2, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr1], %[in_ptr1], #12\n"
+
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmla.f32 q10, q2, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+ "vmul.f32 q12, q2, %e[weight012][0]\n"
+ "vmul.f32 q13, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr2], %[in_ptr2], #12\n"
+
+ "vmla.f32 q10, q2, %e[weight678][0]\n"
+ "vmla.f32 q11, q0, %e[weight678][1]\n"
+ "vmla.f32 q12, q2, %e[weight345][0]\n"
+ "vmla.f32 q13, q0, %e[weight345][1]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "1:\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr1], %[in_ptr1], #16\n"
+
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q2, %e[weight345][1]\n"
+ "vmul.f32 q12, q0, %e[weight012][0]\n"
+ "vmul.f32 q13, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr2], %[in_ptr2], #16\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q2, %e[weight678][1]\n"
+ "vmla.f32 q12, q0, %e[weight345][0]\n"
+ "vmla.f32 q13, q2, %e[weight345][1]\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vmla.f32 q15, q3, %f[weight345][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+ "bne 1b\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0),
+ [out_ptr1] "+r"(out_ptr1), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: when nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ float32x4_t out1 = vmulq_f32(input1, weight012);
+ out1 = vmlaq_f32(out1, input2, weight345);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+ out1 = vsetq_lane_f32(bias0, out1, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+ float32x2_t out11 = vadd_f32(vget_low_f32(out1), vget_high_f32(out1));
+
+ float32x2_t out01 = vpadd_f32(out00, out11);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+ *out_ptr1 = vget_lane_f32(out01, 1);
+
+ in_ptr0++;
+ in_ptr1++;
+ in_ptr2++;
+ out_ptr0++;
+ out_ptr1++;
+ }
+ }
+ else
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q8, #0\n"
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr0], %[in_ptr0], #12\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q2, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr1], %[in_ptr1], #12\n"
+
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmla.f32 q10, q2, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+ "vmul.f32 q12, q2, %e[weight012][0]\n"
+ "vmul.f32 q13, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr2], %[in_ptr2], #12\n"
+
+ "vmla.f32 q10, q2, %e[weight678][0]\n"
+ "vmla.f32 q11, q0, %e[weight678][1]\n"
+ "vmla.f32 q12, q2, %e[weight345][0]\n"
+ "vmla.f32 q13, q0, %e[weight345][1]\n"
+
+ "pld [%[in_ptr3], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr3]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vmla.f32 q15, q3, %f[weight345][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr3], %[in_ptr3], #12\n"
+
+ "vmla.f32 q15, q2, %e[weight678][0]\n"
+ "vmla.f32 q15, q0, %e[weight678][1]\n"
+ "vmla.f32 q15, q3, %f[weight678][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "1:\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr1], %[in_ptr1], #16\n"
+
+ "vand q15, %q[qbias0], %q[qbias0]\n"
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q2, %e[weight345][1]\n"
+ "vmul.f32 q12, q0, %e[weight012][0]\n"
+ "vmul.f32 q13, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr2], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vmla.f32 q15, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr2], %[in_ptr2], #16\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q2, %e[weight678][1]\n"
+ "vmla.f32 q12, q0, %e[weight345][0]\n"
+ "vmla.f32 q13, q2, %e[weight345][1]\n"
+
+ "pld [%[in_ptr3], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr3]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vmla.f32 q15, q3, %f[weight345][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr3], %[in_ptr3], #16\n"
+
+ "vmla.f32 q15, q0, %e[weight678][0]\n"
+ "vmla.f32 q15, q2, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vmla.f32 q15, q3, %f[weight678][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q15, q15, q12\n"
+ "vadd.f32 q14, q14, q11\n"
+ "vadd.f32 q15, q15, q13\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[out_ptr1]]!\n"
+ "bne 1b\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [in_ptr3] "+r"(in_ptr3),
+
+ [out_ptr0] "+r"(out_ptr0), [out_ptr1] "+r"(out_ptr1), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: when nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+ float32x4_t input3 = vld1q_f32(in_ptr3);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ input3 = vsetq_lane_f32(0.0f, input3, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ float32x4_t out1 = vmulq_f32(input1, weight012);
+ out1 = vmlaq_f32(out1, input2, weight345);
+ out1 = vmlaq_f32(out1, input3, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+ out1 = vsetq_lane_f32(bias0, out1, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+ float32x2_t out11 = vadd_f32(vget_low_f32(out1), vget_high_f32(out1));
+
+ float32x2_t out01 = vpadd_f32(out00, out11);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+ *out_ptr1 = vget_lane_f32(out01, 1);
+
+ in_ptr0++;
+ in_ptr1++;
+ in_ptr2++;
+ in_ptr3++;
+ out_ptr0++;
+ out_ptr1++;
+ }
+ in_ptr0 += w + 1;
+ in_ptr1 += w + 1;
+ in_ptr2 += w + 1;
+ in_ptr3 += w + 1;
+ }
+
+ out_ptr0 += outw;
+ out_ptr1 += outw;
+ }
+
+ for (; i < outh; i++)
+ {
+ // TODO:if i == 0, pad_top comes here.
+ int nn = (outw >> 2) - 1;
+ int remain = (outw & 0x03) + 4;
+
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q8, #0\n"
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr0], %[in_ptr0], #12\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q2, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q8, q0, #3\n"
+ "vext.32 q3, q0, q1, #1\n"
+ "add %[in_ptr1], %[in_ptr1], #12\n"
+
+ "vmla.f32 q10, q2, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "1:\n"
+ "add %[in_ptr0], %[in_ptr0], #16\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q2, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+ "add %[in_ptr1], %[in_ptr1], #16\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q2, %e[weight345][1]\n"
+
+ "pld [%[in_ptr0], #192]\n"
+ "vld1.f32 {d0-d2}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q2, q0, q1, #1\n"
+ "vext.32 q3, q0, q1, #2\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: when nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0++;
+ in_ptr1++;
+ out_ptr0++;
+ out_ptr1++;
+ }
+ }
+ }
+#endif // !__aarch64__
+}
+
+static void depthwise_conv3x3S2_nopad(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+ const int tailstep = w - 2 * outw + w;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+
+ int i;
+ for (i = 0; i < outh; i++)
+ {
+ int nn = outw >> 2;
+ int remain = outw & 0x03;
+
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ in_ptr2 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr0 += tailstep;
+ in_ptr1 += tailstep;
+ in_ptr2 += tailstep;
+ }
+ }
+#endif // !__aarch64__
+}
+
+static void depthwise_conv3x3S2_padding00(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+
+ int i;
+ for (i = 0; i < outh; i++)
+ {
+ int nn = (outw >> 2) - 1;
+ int remain = (outw & 0x03) + 4;
+
+ if (i == outh - 1)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ out_ptr0++;
+ }
+ }
+ else
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ in_ptr2 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr0 += w;
+ in_ptr1 += w;
+ in_ptr2 += w;
+ }
+ }
+ }
+#endif // !__aarch64__
+}
+
+static void depthwise_conv3x3S2_padding01(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+
+ int i;
+ for (i = 0; i < outh; i++)
+ {
+ int nn = (outw >> 2) - 1;
+ int remain = (outw & 0x03) + 4;
+
+ if (i == outh - 1)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q2, #0\n"
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr0], %[in_ptr0], #28\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q3, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q1, %f[weight012][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr1], %[in_ptr1], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+ "vmla.f32 q14, q1, %f[weight345][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: if nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ out_ptr0++;
+ }
+ }
+ else
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q2, #0\n"
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr0], %[in_ptr0], #28\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q3, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q1, %f[weight012][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr1], %[in_ptr1], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q1, %f[weight345][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr2], %[in_ptr2], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight678][0]\n"
+ "vmla.f32 q11, q0, %e[weight678][1]\n"
+ "vmla.f32 q14, q1, %f[weight678][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: if nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ in_ptr2 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr0 += w;
+ in_ptr1 += w;
+ in_ptr2 += w;
+ }
+ }
+ }
+#endif // !__aarch64__
+}
+
+static void depthwise_conv3x3S2_padding10(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+
+ int i;
+ for (i = 0; i < outh; i++)
+ {
+ int nn = (outw >> 2) - 1;
+ int remain = (outw & 0x03) + 4;
+
+ // TODO: i == 0 && i == outh -1
+ if (i == 0)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight345][0]\n"
+ "vmul.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight345);
+ out0 = vmlaq_f32(out0, input1, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr2 += w;
+ }
+ else if (i == outh - 1)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ out_ptr0++;
+ }
+ }
+ else
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ in_ptr2 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr0 += w;
+ in_ptr1 += w;
+ in_ptr2 += w;
+ }
+ }
+ }
+#endif // !__aarch64__
+}
+
+static void depthwise_conv3x3S2_padding11(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convMat_t &bias)
+{
+#if !__aarch64__
+ int w = in_mat.w;
+ int h = in_mat.h;
+ int outw = out_mat.w;
+ int outh = out_mat.h;
+ int channels = in_mat.c;
+
+#pragma omp parallel for
+ for (int c = 0; c < channels; c++)
+ {
+ const float *filter = kernel.data + c * 9;
+#ifdef NCNN
+ float *inbuf = in_mat.data + c * alignSize(w * h, 16 / sizeof(float));
+ float *outbuf = out_mat.data + c * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *inbuf = in_mat.data + c * w * h;
+ float *outbuf = out_mat.data + c * outw * outh;
+#endif // NCNN
+ float bias0 = bias.data ? bias.data[c] : 0.0f;
+
+ register float32x4_t weight012 asm("q4") = vld1q_f32(filter);
+ register float32x4_t weight345 asm("q5") = vld1q_f32(filter + 3);
+ register float32x4_t weight678 asm("q6") = vld1q_f32(filter + 6);
+ register float32x4_t qbias0 asm("q7") = vdupq_n_f32(bias0);
+
+ float *in_ptr0 = inbuf + 0 * w;
+ float *in_ptr1 = inbuf + 1 * w;
+ float *in_ptr2 = inbuf + 2 * w;
+
+ float *out_ptr0 = outbuf + 0 * outw;
+
+ int i;
+ for (i = 0; i < outh; i++)
+ {
+ int nn = (outw >> 2) - 1;
+ int remain = (outw & 0x03) + 4;
+
+ // TODO: i == 0 && i == outh - 1
+ if (i == 0)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q2, #0\n"
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr0], %[in_ptr0], #28\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q3, %e[weight345][0]\n"
+ "vmul.f32 q11, q0, %e[weight345][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q1, %f[weight345][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr1], %[in_ptr1], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight678][0]\n"
+ "vmla.f32 q11, q0, %e[weight678][1]\n"
+ "vmla.f32 q14, q1, %f[weight678][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight345][0]\n"
+ "vmul.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: if nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight345);
+ out0 = vmlaq_f32(out0, input1, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr2 += w;
+ }
+ else if (i == outh - 1)
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q2, #0\n"
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr0], %[in_ptr0], #28\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q3, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q1, %f[weight012][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr1], %[in_ptr1], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+ "vmla.f32 q14, q1, %f[weight345][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: if nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ out_ptr0++;
+ }
+ }
+ else
+ {
+ if (nn > 0)
+ {
+ __asm __volatile("vmov.i32 q2, #0\n"
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr0], %[in_ptr0], #28\n"
+
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q3, %e[weight012][0]\n"
+ "vmul.f32 q11, q0, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q1, %f[weight012][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr1], %[in_ptr1], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight345][0]\n"
+ "vmla.f32 q11, q0, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q1, %f[weight345][0]\n"
+ "vext.32 q3, q2, q0, #3\n"
+ "add %[in_ptr2], %[in_ptr2], #28\n"
+
+ "vmla.f32 q10, q3, %e[weight678][0]\n"
+ "vmla.f32 q11, q0, %e[weight678][1]\n"
+ "vmla.f32 q14, q1, %f[weight678][0]\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "beq 2f\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "1:\n"
+ "vand q14, %q[qbias0], %q[qbias0]\n"
+ "vmul.f32 q10, q0, %e[weight012][0]\n"
+ "vmul.f32 q11, q1, %e[weight012][1]\n"
+
+ "pld [%[in_ptr1], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr1]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr1]]\n"
+ "vmla.f32 q14, q3, %f[weight012][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight345][0]\n"
+ "vmla.f32 q11, q1, %e[weight345][1]\n"
+
+ "pld [%[in_ptr2], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr2]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr2]]\n"
+ "vmla.f32 q14, q3, %f[weight345][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vmla.f32 q10, q0, %e[weight678][0]\n"
+ "vmla.f32 q11, q1, %e[weight678][1]\n"
+
+ "pld [%[in_ptr0], #256]\n"
+ "vld2.f32 {d0-d3}, [%[in_ptr0]]!\n"
+ "vld1.f32 {d4[0]}, [%[in_ptr0]]\n"
+ "vmla.f32 q14, q3, %f[weight678][0]\n"
+ "vext.32 q3, q0, q2, #1\n"
+
+ "vadd.f32 q14, q14, q10\n"
+ "vadd.f32 q14, q14, q11\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "vst1.f32 {d28-d29}, [%[out_ptr0]]!\n"
+ "bne 1b\n"
+ "sub %[in_ptr0], %[in_ptr0], #32\n"
+ "2:\n"
+ : [in_ptr0] "+r"(in_ptr0), [in_ptr1] "+r"(in_ptr1),
+ [in_ptr2] "+r"(in_ptr2), [out_ptr0] "+r"(out_ptr0), [nn] "+r"(nn)
+ : [weight012] "w"(weight012), [weight345] "w"(weight345),
+ [weight678] "w"(weight678), [qbias0] "w"(qbias0)
+ : "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q13", "q14",
+ "q15", "cc", "memory");
+ }
+ for (; remain > 0; remain--)
+ {
+ // TODO: if nn == 0, pad_left comes here.
+ float32x4_t input0 = vld1q_f32(in_ptr0);
+ float32x4_t input1 = vld1q_f32(in_ptr1);
+ float32x4_t input2 = vld1q_f32(in_ptr2);
+
+ if (remain == 1)
+ {
+ input0 = vsetq_lane_f32(0.0f, input0, 2);
+ input1 = vsetq_lane_f32(0.0f, input1, 2);
+ input2 = vsetq_lane_f32(0.0f, input2, 2);
+ }
+
+ float32x4_t out0 = vmulq_f32(input0, weight012);
+ out0 = vmlaq_f32(out0, input1, weight345);
+ out0 = vmlaq_f32(out0, input2, weight678);
+
+ out0 = vsetq_lane_f32(bias0, out0, 3);
+
+ float32x2_t out00 = vadd_f32(vget_low_f32(out0), vget_high_f32(out0));
+
+ float32x2_t out01 = vpadd_f32(out00, out00);
+
+ *out_ptr0 = vget_lane_f32(out01, 0);
+
+ in_ptr0 += 2;
+ in_ptr1 += 2;
+ in_ptr2 += 2;
+ out_ptr0++;
+ }
+
+ in_ptr0 += w;
+ in_ptr1 += w;
+ in_ptr2 += w;
+ }
+ }
+ }
+#endif // !__aarch64__
+}
+
+static void depthwise_conv_colmajor(const convMat_t &in_mat, convMat_t &out_mat,
+ const convMat_t &kernel, const convParams_t &in_param)
+{
+#if __aarch64__
+ const int w = in_mat.w;
+ const int h = in_mat.h;
+ const int outw = out_mat.w;
+ const int outh = out_mat.h;
+ const int channels = out_mat.c;
+ const int stridew = in_param.stride_w;
+ const int strideh = in_param.stride_h;
+ const int padding = in_param.padding;
+ const int padw = in_param.pad_w;
+ const int padh = in_param.pad_h;
+
+#pragma omp parallel for
+ for (int oh = 0; oh < outh; oh++)
+ {
+ const float *input_data0 = in_mat.data + (oh * strideh - padh) * w * channels;
+
+ memset(out_mat.data + oh * outw * channels, 0x00, outw * channels * sizeof(float));
+
+ for (int kh = 0; kh < in_param.kernel_h; kh++)
+ {
+ for (int kw = 0; kw < in_param.kernel_w; kw++)
+ {
+ const float *kernel_data = kernel.data + (kh * in_param.kernel_w + kw) * channels;
+ const float *input_data1 = input_data0 + (kh * w + kw) * channels;
+
+ if (padding && ((oh * strideh + kh < padh) || (oh * strideh + kh >= padh + h)))
+ {
+ continue;
+ }
+
+ int ow = 0;
+ for (; ow + 3 < outw; /*ow += 4*/)
+ {
+ if (((ow + 3) * stridew + kw < padw) || (ow * stridew + kw >= padw + w))
+ {
+ ow += 4;
+ continue;
+ }
+ else if ((ow + 3) * stridew + kw >= padw + w)
+ {
+ break;
+ }
+ else if (ow * stridew + kw < padw)
+ {
+ int delta = (padw - kw) / stridew - ow;
+ delta += (padw - kw) % stridew ? 1 : 0;
+ ow += delta;
+ continue;
+ }
+
+ int nn = channels >> 2;
+ int remain = channels & 0x03;
+
+ const float *input_r0 = input_data1 + (ow * stridew - padw) * channels;
+
+ const float *input_r1 = input_r0 + stridew * channels;
+ const float *input_r2 = input_r1 + stridew * channels;
+ const float *input_r3 = input_r2 + stridew * channels;
+ const float *weights_data = kernel_data;
+ float *output_r0 = out_mat.data + (oh * outw + ow) * channels;
+ float *output_r1 = output_r0 + channels;
+ float *output_r2 = output_r1 + channels;
+ float *output_r3 = output_r2 + channels;
+
+ if (nn > 0)
+ {
+ int _n = (nn + 1) >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("subs %[_n], %[_n], #1\n"
+ "ld1 {v4.4s}, [%[weights_data]], #16\n"
+ "ld1 {v5.4s}, [%[input_r0]], #16\n"
+ "ld1 {v6.4s}, [%[input_r1]], #16\n"
+ "ld1 {v7.4s}, [%[input_r2]], #16\n"
+ "ld1 {v8.4s}, [%[input_r3]], #16\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v24.4s, v25.4s}, [%[output_r0]]\n"
+ "ld1 {v26.4s, v27.4s}, [%[output_r1]]\n"
+ "ld1 {v28.4s, v29.4s}, [%[output_r2]]\n"
+ "ld1 {v30.4s, v31.4s}, [%[output_r3]]\n"
+
+ "ld1 {v9.4s}, [%[weights_data]], #16\n"
+ "ld1 {v10.4s}, [%[input_r0]], #16\n"
+ "ld1 {v11.4s}, [%[input_r1]], #16\n"
+ "ld1 {v12.4s}, [%[input_r2]], #16\n"
+ "ld1 {v13.4s}, [%[input_r3]], #16\n"
+
+ "fmla v24.4s, v4.4s, v5.4s\n"
+ "fmla v26.4s, v4.4s, v6.4s\n"
+
+ "fmla v28.4s, v4.4s, v7.4s\n"
+ "fmla v30.4s, v4.4s, v8.4s\n"
+
+ "ld1 {v4.4s}, [%[weights_data]], #16\n"
+ "ld1 {v5.4s}, [%[input_r0]], #16\n"
+ "ld1 {v6.4s}, [%[input_r1]], #16\n"
+ "ld1 {v7.4s}, [%[input_r2]], #16\n"
+ "ld1 {v8.4s}, [%[input_r3]], #16\n"
+
+ "fmla v25.4s, v9.4s, v10.4s\n"
+ "fmla v27.4s, v9.4s, v11.4s\n"
+
+ "fmla v29.4s, v9.4s, v12.4s\n"
+ "fmla v31.4s, v9.4s, v13.4s\n"
+
+ "st1 {v24.4s, v25.4s}, [%[output_r0]], #32\n"
+ "st1 {v26.4s, v27.4s}, [%[output_r1]], #32\n"
+ "st1 {v28.4s, v29.4s}, [%[output_r2]], #32\n"
+ "st1 {v30.4s, v31.4s}, [%[output_r3]], #32\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v24.4s}, [%[output_r0]]\n"
+ "ld1 {v26.4s}, [%[output_r1]]\n"
+ "ld1 {v28.4s}, [%[output_r2]]\n"
+ "ld1 {v30.4s}, [%[output_r3]]\n"
+ "cmp %[oddn], #1\n"
+
+ "fmla v24.4s, v4.4s, v5.4s\n"
+ "fmla v26.4s, v4.4s, v6.4s\n"
+
+ "fmla v28.4s, v4.4s, v7.4s\n"
+ "fmla v30.4s, v4.4s, v8.4s\n"
+
+ "st1 {v24.4s}, [%[output_r0]], #16\n"
+ "st1 {v26.4s}, [%[output_r1]], #16\n"
+ "st1 {v28.4s}, [%[output_r2]], #16\n"
+ "st1 {v30.4s}, [%[output_r3]], #16\n"
+
+ "beq 2f\n"
+ "ld1 {v25.4s}, [%[output_r0]]\n"
+ "ld1 {v27.4s}, [%[output_r1]]\n"
+ "ld1 {v29.4s}, [%[output_r2]]\n"
+ "ld1 {v31.4s}, [%[output_r3]]\n"
+
+ "ld1 {v9.4s}, [%[weights_data]], #16\n"
+ "ld1 {v10.4s}, [%[input_r0]], #16\n"
+ "ld1 {v11.4s}, [%[input_r1]], #16\n"
+ "ld1 {v12.4s}, [%[input_r2]], #16\n"
+ "ld1 {v13.4s}, [%[input_r3]], #16\n"
+
+ "fmla v25.4s, v9.4s, v10.4s\n"
+ "fmla v27.4s, v9.4s, v11.4s\n"
+
+ "fmla v29.4s, v9.4s, v12.4s\n"
+ "fmla v31.4s, v9.4s, v13.4s\n"
+
+ "st1 {v25.4s}, [%[output_r0]], #16\n"
+ "st1 {v27.4s}, [%[output_r1]], #16\n"
+ "st1 {v29.4s}, [%[output_r2]], #16\n"
+ "st1 {v31.4s}, [%[output_r3]], #16\n"
+ "2:\n"
+ : [weights_data] "+r"(weights_data), [input_r0] "+r"(input_r0),
+ [input_r1] "+r"(input_r1), [input_r2] "+r"(input_r2),
+ [input_r3] "+r"(input_r3), [output_r0] "+r"(output_r0),
+ [output_r1] "+r"(output_r1), [output_r2] "+r"(output_r2),
+ [output_r3] "+r"(output_r3), [_n] "+r"(_n)
+ : [oddn] "r"(oddn)
+ : "cc", "memory", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12",
+ "v13", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+ }
+ if (remain >= 2)
+ {
+ asm volatile(
+ "ld1 {v24.2s}, [%[output_r0]]\n"
+ "ld1 {v26.2s}, [%[output_r1]]\n"
+ "ld1 {v28.2s}, [%[output_r2]]\n"
+ "ld1 {v30.2s}, [%[output_r3]]\n"
+ "ld1 {v4.2s}, [%[weights_data]], #8\n"
+ "ld1 {v5.2s}, [%[input_r0]], #8\n"
+
+ "ld1 {v6.2s}, [%[input_r1]], #8\n"
+ "ld1 {v7.2s}, [%[input_r2]], #8\n"
+ "ld1 {v8.2s}, [%[input_r3]], #8\n"
+
+ "fmla v24.2s, v4.2s, v5.2s\n"
+ "fmla v26.2s, v4.2s, v6.2s\n"
+
+ "fmla v28.2s, v4.2s, v7.2s\n"
+ "fmla v30.2s, v4.2s, v8.2s\n"
+
+ "st1 {v24.2s}, [%[output_r0]], #8\n"
+ "st1 {v26.2s}, [%[output_r1]], #8\n"
+ "st1 {v28.2s}, [%[output_r2]], #8\n"
+ "st1 {v30.2s}, [%[output_r3]], #8\n"
+ : [weights_data] "+r"(weights_data), [input_r0] "+r"(input_r0),
+ [input_r1] "+r"(input_r1), [input_r2] "+r"(input_r2), [input_r3] "+r"(input_r3),
+ [output_r0] "+r"(output_r0), [output_r1] "+r"(output_r1),
+ [output_r2] "+r"(output_r2), [output_r3] "+r"(output_r3)
+ :
+ : "cc", "memory", "v4", "v5", "v6", "v7", "v8", "v24", "v26", "v28", "v30");
+ remain -= 2;
+ }
+
+ if (remain > 0)
+ {
+ *output_r0++ += (*weights_data) * (*input_r0++);
+ *output_r1++ += (*weights_data++) * (*input_r1++);
+ *output_r2++ += (*weights_data) * (*input_r2++);
+ *output_r3++ += (*weights_data++) * (*input_r3++);
+ }
+ ow += 4;
+ }
+
+ for (; ow + 1 < outw; /*ow += 2*/)
+ {
+ if (padding)
+ {
+ if (((ow + 1) * stridew + kw < padw) || (ow * stridew + kw >= padw + w))
+ {
+ ow += 2;
+ continue;
+ }
+ else if ((ow + 1) * stridew + kw >= padw + w)
+ {
+ break;
+ }
+ else if (ow * stridew + kw < padw)
+ {
+ ow++;
+ continue;
+ }
+ }
+
+ int nn = channels >> 2;
+ int remain = channels & 0x03;
+
+ const float *input_r0 = input_data1 + (ow * stridew - padw) * channels;
+
+ const float *input_r1 = input_r0 + stridew * channels;
+ const float *weights_data = kernel_data;
+ float *output_r0 = out_mat.data + (oh * outw + ow) * channels;
+ float *output_r1 = output_r0 + channels;
+
+ if (nn > 0)
+ {
+ int _n = (nn + 1) >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("subs %[_n], %[_n], #1\n"
+ "ld1 {v4.4s}, [%[weights_data]], #16\n"
+ "ld1 {v5.4s}, [%[input_r0]], #16\n"
+ "ld1 {v6.4s}, [%[input_r1]], #16\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v24.4s, v25.4s}, [%[output_r0]]\n"
+ "ld1 {v26.4s, v27.4s}, [%[output_r1]]\n"
+
+ "ld1 {v9.4s}, [%[weights_data]], #16\n"
+ "ld1 {v10.4s}, [%[input_r0]], #16\n"
+ "ld1 {v11.4s}, [%[input_r1]], #16\n"
+
+ "fmla v24.4s, v4.4s, v5.4s\n"
+ "fmla v26.4s, v4.4s, v6.4s\n"
+
+ "ld1 {v4.4s}, [%[weights_data]], #16\n"
+ "ld1 {v5.4s}, [%[input_r0]], #16\n"
+ "ld1 {v6.4s}, [%[input_r1]], #16\n"
+
+ "fmla v25.4s, v9.4s, v10.4s\n"
+ "fmla v27.4s, v9.4s, v11.4s\n"
+
+ "st1 {v24.4s, v25.4s}, [%[output_r0]], #32\n"
+ "st1 {v26.4s, v27.4s}, [%[output_r1]], #32\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v24.4s}, [%[output_r0]]\n"
+ "ld1 {v26.4s}, [%[output_r1]]\n"
+ "cmp %[oddn], #1\n"
+
+ "fmla v24.4s, v4.4s, v5.4s\n"
+ "fmla v26.4s, v4.4s, v6.4s\n"
+
+ "st1 {v24.4s}, [%[output_r0]], #16\n"
+ "st1 {v26.4s}, [%[output_r1]], #16\n"
+
+ "beq 2f\n"
+ "ld1 {v25.4s}, [%[output_r0]]\n"
+ "ld1 {v27.4s}, [%[output_r1]]\n"
+
+ "ld1 {v9.4s}, [%[weights_data]], #16\n"
+ "ld1 {v10.4s}, [%[input_r0]], #16\n"
+ "ld1 {v11.4s}, [%[input_r1]], #16\n"
+
+ "fmla v25.4s, v9.4s, v10.4s\n"
+ "fmla v27.4s, v9.4s, v11.4s\n"
+
+ "st1 {v25.4s}, [%[output_r0]], #16\n"
+ "st1 {v27.4s}, [%[output_r1]], #16\n"
+ "2:\n"
+ : [weights_data] "+r"(weights_data), [input_r0] "+r"(input_r0),
+ [input_r1] "+r"(input_r1), [output_r0] "+r"(output_r0),
+ [output_r1] "+r"(output_r1), [_n] "+r"(_n)
+ : [oddn] "r"(oddn)
+ : "cc", "memory", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12",
+ "v13", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+ }
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v24.2s}, [%[output_r0]]\n"
+ "ld1 {v26.2s}, [%[output_r1]]\n"
+ "ld1 {v4.2s}, [%[weights_data]], #8\n"
+ "ld1 {v5.2s}, [%[input_r0]], #8\n"
+
+ "ld1 {v6.2s}, [%[input_r1]], #8\n"
+
+ "fmla v24.2s, v4.2s, v5.2s\n"
+ "fmla v26.2s, v4.2s, v6.2s\n"
+
+ "st1 {v24.2s}, [%[output_r0]], #8\n"
+ "st1 {v26.2s}, [%[output_r1]], #8\n"
+ : [weights_data] "+r"(weights_data), [input_r0] "+r"(input_r0),
+ [input_r1] "+r"(input_r1), [output_r0] "+r"(output_r0),
+ [output_r1] "+r"(output_r1)
+ :
+ : "cc", "memory", "v4", "v5", "v6", "v7", "v8", "v24", "v26", "v28",
+ "v30");
+ remain -= 2;
+ }
+
+ if (remain > 0)
+ {
+ *output_r0++ += (*weights_data) * (*input_r0++);
+ *output_r1++ += (*weights_data++) * (*input_r1++);
+ }
+ ow += 2;
+ }
+
+ for (; ow < outw; ow++)
+ {
+ const float *input_data = input_data1 + (ow * stridew - padw) * channels;
+
+ if (padding && ((ow * stridew + kw < padw) || (ow * strideh + kw >= padw + w)))
+ {
+ continue;
+ }
+
+ int nn = channels >> 2;
+ int remain = channels & 0x03;
+
+ const float *weights_data = kernel_data;
+ float *output_data = out_mat.data + (oh * outw + ow) * channels;
+
+ if (nn > 0)
+ {
+ int _n = (nn + 1) >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("subs %[_n], %[_n], #1\n"
+ "ld1 {v4.4s}, [%[weights_data]], #16\n"
+ "ld1 {v5.4s}, [%[input_data]], #16\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v30.4s, v31.4s}, [%[output_data]]\n"
+ "ld1 {v6.4s}, [%[weights_data]], #16\n"
+ "ld1 {v7.4s}, [%[input_data]], #16\n"
+ "fmla v30.4s, v4.4s, v5.4s\n"
+
+ "ld1 {v4.4s}, [%[weights_data]], #16\n"
+ "ld1 {v5.4s}, [%[input_data]], #16\n"
+ "fmla v31.4s, v6.4s, v7.4s\n"
+
+ "st1 {v30.4s, v31.4s}, [%[output_data]], #32\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v30.4s}, [%[output_data]]\n"
+ "cmp %[oddn], #1\n"
+ "fmla v30.4s, v4.4s, v5.4s\n"
+ "st1 {v30.4s}, [%[output_data]], #16\n"
+ "beq 2f\n"
+ "ld1 {v31.4s}, [%[output_data]]\n"
+ "ld1 {v6.4s}, [%[weights_data]], #16\n"
+ "ld1 {v7.4s}, [%[input_data]], #16\n"
+ "fmla v31.4s, v6.4s, v7.4s\n"
+
+ "st1 {v31.4s}, [%[output_data]], #16\n"
+ "2:\n"
+ : [weights_data] "+r"(weights_data), [input_data] "+r"(input_data),
+ [output_data] "+r"(output_data), [_n] "+r"(_n)
+ : [oddn] "r"(oddn)
+ : "cc", "memory", "v4", "v5", "v30", "v31");
+ }
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v30.2s}, [%[output_data]]\n"
+ "ld1 {v4.2s}, [%[weights_data]], #8\n"
+ "ld1 {v5.2s}, [%[input_data]], #8\n"
+
+ "fmla v30.2s, v4.2s, v5.2s\n"
+
+ "st1 {v30.2s}, [%[output_data]], #8\n"
+ : [weights_data] "+r"(weights_data), [input_data] "+r"(input_data),
+ [output_data] "+r"(output_data)
+ :
+ : "cc", "memory", "v4", "v5", "v30");
+ remain -= 2;
+ }
+
+ if (remain > 0)
+ {
+ *output_data++ += (*weights_data++) * (*input_data++);
+ }
+ }
+ }
+ }
+ }
+#endif // __aarch64__
+}
+
+void srcn_depthwise_conv(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convMat_t &bias, const convParams_t &in_param, int num_threads,
+ convType_t conv_type)
+{
+ omp_set_num_threads(num_threads);
+
+ if (conv_type == col_major)
+ {
+ depthwise_conv_colmajor(in_mat, out_mat, weights_mat, in_param);
+ return;
+ }
+
+ else if (conv_type == row_major)
+ {
+ if (in_param.kernel_w == 3 && in_param.kernel_h == 3 && in_param.dilation_w == 1 &&
+ in_param.dilation_h == 1)
+ {
+ if (in_param.stride_w == 1 && in_param.stride_h == 1)
+ {
+ if (in_param.padding == 0)
+ depthwise_conv3x3S1_nopad(in_mat, out_mat, weights_mat, bias);
+ else
+ depthwise_conv3x3S1_padding(in_mat, out_mat, weights_mat, bias);
+ }
+ else if (in_param.stride_w == 2 && in_param.stride_h == 2)
+ {
+ if (in_param.padding == 0)
+ depthwise_conv3x3S2_nopad(in_mat, out_mat, weights_mat, bias);
+ else
+ {
+ if (in_param.pad_w == 0 && in_param.pad_h == 0)
+ depthwise_conv3x3S2_padding00(in_mat, out_mat, weights_mat, bias);
+ else if (in_param.pad_w == 0 && in_param.pad_h == 1)
+ depthwise_conv3x3S2_padding10(in_mat, out_mat, weights_mat, bias);
+ else if (in_param.pad_w == 1 && in_param.pad_h == 0)
+ depthwise_conv3x3S2_padding01(in_mat, out_mat, weights_mat, bias);
+ else if (in_param.pad_w == 1 && in_param.pad_h == 1)
+ depthwise_conv3x3S2_padding11(in_mat, out_mat, weights_mat, bias);
+ }
+ }
+ }
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/direct_conv_colmajor.cc b/runtimes/libs/srcn/src/direct_conv_colmajor.cc
new file mode 100644
index 000000000..394ea6d58
--- /dev/null
+++ b/runtimes/libs/srcn/src/direct_conv_colmajor.cc
@@ -0,0 +1,5872 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef _OPENMP
+#include <omp.h>
+#endif
+
+#include <stdlib.h>
+#include <arm_neon.h>
+#include "srcn/conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+#if __aarch64__
+static void direct_conv_l(const convMat_t &bottom_blob, convMat_t &top_blob,
+ const convMat_t &_kernel, const int _stride, const int padding,
+ const int pad_top, const int pad_left)
+{
+ const int w = bottom_blob.w;
+ const int h = bottom_blob.h;
+ const int inch = bottom_blob.c;
+ const int outw = top_blob.w;
+ const int outh = top_blob.h;
+ const int outch = top_blob.c;
+ const int kernel_w = _kernel.w;
+ const int kernel_h = _kernel.h;
+
+ for (int m = 0; m < kernel_w * kernel_h; m++)
+ {
+ const float *_kernel0 = _kernel.data + m * inch * outch;
+ const float *img0 =
+ bottom_blob.data + (m / kernel_w - pad_top) * w * inch + (m % kernel_w - pad_left) * inch;
+
+#ifdef _OPENMP
+#pragma omp parallel for
+#endif // _OPENMP
+ for (int p = 0; p < outh; p++)
+ {
+ float *out0 = top_blob.data + p * outw * outch;
+
+ // clear output
+ if (m == 0)
+ {
+ for (int j = 0; j < outw * outch; j++)
+ {
+ *(out0 + j) = 0.f;
+ }
+ }
+
+ if (padding)
+ {
+ if (((p * _stride + m / kernel_w) < pad_top) || (p * _stride + m / kernel_w >= pad_top + h))
+ {
+ continue;
+ }
+ }
+
+ const float *img1 = img0 + p * w * inch * _stride;
+
+ int q = 0;
+ for (; q + 3 < outw; /*q += 4*/)
+ {
+ if (padding)
+ {
+ if (((q + 3) * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w) >= pad_left + w)
+ {
+ out0 += outch * 4;
+ img1 += inch * _stride * 4;
+ q += 4;
+ continue;
+ }
+ else if ((q + 3) * _stride + m % kernel_w >= pad_left + w)
+ {
+ break;
+ }
+ else if (q * _stride + m % kernel_w < pad_left)
+ {
+ int delta = (pad_left - m % kernel_w) / _stride - q;
+ delta += (pad_left - m % kernel_w) % _stride ? 1 : 0;
+ out0 += outch * delta;
+ img1 += inch * _stride * delta;
+ q += delta;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *_x1 = img1 + inch * _stride;
+ const float *_x2 = img1 + inch * _stride * 2;
+ const float *_x3 = img1 + inch * _stride * 3;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("v4") = vld1q_f32(_x0);
+ register float32x4_t rx1 asm("v5") = vld1q_f32(_x1);
+ register float32x4_t rx2 asm("v16") = vld1q_f32(_x2);
+ register float32x4_t rx3 asm("v17") = vld1q_f32(_x3);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+ float *outptr2 = out0 + outch * 2;
+ float *outptr3 = out0 + outch * 3;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v8.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v8.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v9.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v9.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v12.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v12.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v13.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v13.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v8.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v8.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v9.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v9.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v12.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v12.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v13.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v13.4s, %[rx3].s[3]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v8.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v8.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v9.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v9.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n), [outptr2] "+r"(outptr2),
+ [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn),
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13",
+ "v14", "v15", "v30", "v31");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+ "ld1 {v30.2s}, [%[outptr2]]\n"
+ "ld1 {v31.2s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v30.2s, v6.2s, %[rx2].s[0]\n"
+ "fmla v31.2s, v6.2s, %[rx3].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+ "fmla v30.2s, v7.2s, %[rx2].s[1]\n"
+ "fmla v31.2s, v7.2s, %[rx3].s[1]\n"
+ "fmla v14.2s, v8.2s, %[rx0].s[2]\n"
+ "fmla v15.2s, v8.2s, %[rx1].s[2]\n"
+ "fmla v30.2s, v8.2s, %[rx2].s[2]\n"
+ "fmla v31.2s, v8.2s, %[rx3].s[2]\n"
+ "fmla v14.2s, v9.2s, %[rx0].s[3]\n"
+ "fmla v15.2s, v9.2s, %[rx1].s[3]\n"
+ "fmla v30.2s, v9.2s, %[rx2].s[3]\n"
+ "fmla v31.2s, v9.2s, %[rx3].s[3]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+ "st1 {v30.2s}, [%[outptr2]], #8\n"
+ "st1 {v31.2s}, [%[outptr3]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1),
+
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v14", "v15", "v30",
+ "v31");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x1 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x1 + 3));
+
+ *outptr2 += (*kernel0) * (*_x2) + (*(kernel0 + outch)) * (*(_x2 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x2 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x2 + 3));
+
+ *outptr3 += (*kernel0) * (*_x3) + (*(kernel0 + outch)) * (*(_x3 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x3 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x3 + 3));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ outptr2++;
+ outptr3++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ _x1 += 4;
+ _x2 += 4;
+ _x3 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_f32(_x1);
+ register float32x2_t rx2 asm("v16") = vld1_f32(_x2);
+ register float32x2_t rx3 asm("v17") = vld1_f32(_x3);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+ float *outptr2 = out0 + outch * 2;
+ float *outptr3 = out0 + outch * 3;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile(
+ "cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1),
+ [_n] "+r"(_n), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn),
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v10", "v11", "v14", "v15", "v30", "v31");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+ "ld1 {v30.2s}, [%[outptr2]]\n"
+ "ld1 {v31.2s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v30.2s, v6.2s, %[rx2].s[0]\n"
+ "fmla v31.2s, v6.2s, %[rx3].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+ "fmla v30.2s, v7.2s, %[rx2].s[1]\n"
+ "fmla v31.2s, v7.2s, %[rx3].s[1]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+ "st1 {v30.2s}, [%[outptr2]], #8\n"
+ "st1 {v31.2s}, [%[outptr3]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1),
+
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v14", "v15", "v30", "v31");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1));
+ *outptr2 += (*kernel0) * (*_x2) + (*(kernel0 + outch)) * (*(_x2 + 1));
+ *outptr3 += (*kernel0) * (*_x3) + (*(kernel0 + outch)) * (*(_x3 + 1));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ outptr2++;
+ outptr3++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ _x1 += 2;
+ _x2 += 2;
+ _x3 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_dup_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_dup_f32(_x1);
+ register float32x2_t rx2 asm("v16") = vld1_dup_f32(_x2);
+ register float32x2_t rx3 asm("v17") = vld1_dup_f32(_x3);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+ float *outptr2 = out0 + outch * 2;
+ float *outptr3 = out0 + outch * 3;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile(
+ "cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1),
+ [_n] "+r"(_n), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn), [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v10", "v14", "v15", "v30", "v31");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+ "ld1 {v30.2s}, [%[outptr2]]\n"
+ "ld1 {v31.2s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v30.2s, v6.2s, %[rx2].s[0]\n"
+ "fmla v31.2s, v6.2s, %[rx3].s[0]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+ "st1 {v30.2s}, [%[outptr2]], #8\n"
+ "st1 {v31.2s}, [%[outptr3]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1),
+
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v14", "v15", "v30", "v31");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+ *outptr1 += (*kernel0) * (*_x1);
+ *outptr2 += (*kernel0) * (*_x2);
+ *outptr3 += (*kernel0) * (*_x3);
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ outptr2++;
+ outptr3++;
+ }
+
+ _x0 += 1;
+ _x1 += 1;
+ _x2 += 1;
+ _x3 += 1;
+ }
+
+ img1 += inch * 4 * _stride;
+ out0 += outch * 4;
+ q += 4;
+ }
+
+ for (; q + 1 < outw; /*q += 2*/)
+ {
+ if (padding)
+ {
+ if (((q + 1) * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w) >= pad_left + w)
+ {
+ out0 += outch * 2;
+ img1 += inch * _stride * 2;
+ q += 2;
+ continue;
+ }
+ else if ((q + 1) * _stride + m % kernel_w >= pad_left + w)
+ {
+ break;
+ }
+ else if (q * _stride + m % kernel_w < pad_left)
+ {
+ out0 += outch;
+ img1 += inch * _stride;
+ q++;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *_x1 = img1 + inch * _stride;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("v4") = vld1q_f32(_x0);
+ register float32x4_t rx1 asm("v5") = vld1q_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13",
+ "v14", "v15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+ "fmla v14.2s, v8.2s, %[rx0].s[2]\n"
+ "fmla v15.2s, v8.2s, %[rx1].s[2]\n"
+ "fmla v14.2s, v9.2s, %[rx0].s[3]\n"
+ "fmla v15.2s, v9.2s, %[rx1].s[3]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v14", "v15");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x1 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x1 + 3));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ _x1 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v10", "v11", "v14", "v15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+ : "cc", "memory", "x0", "v6", "v7", "v14", "v15");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ _x1 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_dup_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_dup_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v10", "v14", "v15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1)
+ : "cc", "memory", "x0", "v6", "v14", "v15");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+ *outptr1 += (*kernel0) * (*_x1);
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ _x0 += 1;
+ _x1 += 1;
+ }
+
+ img1 += inch * 2 * _stride;
+ out0 += outch * 2;
+ q += 2;
+ }
+
+ for (; q < outw; q++)
+ {
+ if (padding)
+ {
+ if ((q * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w >= pad_left + w))
+ {
+ img1 += inch * _stride;
+ out0 += outch;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("v4") = vld1q_f32(_x0);
+
+ float *outptr0 = out0;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13",
+ "v14");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v14.2s, v8.2s, %[rx0].s[2]\n"
+ "fmla v14.2s, v9.2s, %[rx0].s[3]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v14");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_f32(_x0);
+
+ float *outptr0 = out0;
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v10", "v11", "v14");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+ : "cc", "memory", "x0", "v6", "v7", "v14");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_dup_f32(_x0);
+
+ float *outptr0 = out0;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v10", "v14");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [rx0] "w"(rx0)
+ : "cc", "memory", "x0", "v6", "v14");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+
+ kernel0++;
+ outptr0++;
+ }
+
+ _x0 += 1;
+ }
+
+ img1 += inch * _stride;
+ out0 += outch;
+ }
+ }
+ }
+}
+
+static void direct_conv_s(const convMat_t &bottom_blob, convMat_t &top_blob,
+ const convMat_t &_kernel, const int _stride, const int padding,
+ const int pad_top, const int pad_left)
+{
+ const int w = bottom_blob.w;
+ const int h = bottom_blob.h;
+ const int inch = bottom_blob.c;
+ const int outw = top_blob.w;
+ const int outh = top_blob.h;
+ const int outch = top_blob.c;
+ const int kernel_w = _kernel.w;
+ const int kernel_h = _kernel.h;
+
+#ifdef _OPENMP
+#pragma omp parallel for
+#endif
+ for (int p = 0; p < outh; p++)
+ {
+ const float *img0 = bottom_blob.data + (p * _stride - pad_top) * w * inch;
+ float *out = top_blob.data + p * outw * outch;
+
+ // clear output
+ for (int j = 0; j < outw * outch; j++)
+ {
+ *(out + j) = 0.f;
+ }
+
+ for (int m = 0; m < kernel_w * kernel_h; m++)
+ {
+ if (padding)
+ {
+ if (((p * _stride + m / kernel_w) < pad_top) || (p * _stride + m / kernel_w >= pad_top + h))
+ {
+ continue;
+ }
+ }
+
+ float *out0 = out;
+ const float *_kernel0 = _kernel.data + m * inch * outch;
+ const float *img1 = img0 + (m / kernel_w) * w * inch + (m % kernel_w - pad_left) * inch;
+
+ int q = 0;
+ for (; q + 3 < outw; /*q += 4*/)
+ {
+ if (padding)
+ {
+ if (((q + 3) * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w) >= pad_left + w)
+ {
+ out0 += outch * 4;
+ img1 += inch * _stride * 4;
+ q += 4;
+ continue;
+ }
+ else if ((q + 3) * _stride + m % kernel_w >= pad_left + w)
+ {
+ break;
+ }
+ else if (q * _stride + m % kernel_w < pad_left)
+ {
+ int delta = (pad_left - m % kernel_w) / _stride - q;
+ delta += (pad_left - m % kernel_w) % _stride ? 1 : 0;
+ out0 += outch * delta;
+ img1 += inch * _stride * delta;
+ q += delta;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *_x1 = img1 + inch * _stride;
+ const float *_x2 = img1 + inch * _stride * 2;
+ const float *_x3 = img1 + inch * _stride * 3;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("v4") = vld1q_f32(_x0);
+ register float32x4_t rx1 asm("v5") = vld1q_f32(_x1);
+ register float32x4_t rx2 asm("v16") = vld1q_f32(_x2);
+ register float32x4_t rx3 asm("v17") = vld1q_f32(_x3);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+ float *outptr2 = out0 + outch * 2;
+ float *outptr3 = out0 + outch * 3;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v8.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v8.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v9.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v9.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v12.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v12.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v13.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v13.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v8.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v8.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v9.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v9.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v12.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v12.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v13.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v13.4s, %[rx3].s[3]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v30.4s, v8.4s, %[rx2].s[2]\n"
+ "fmla v31.4s, v8.4s, %[rx3].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+ "fmla v30.4s, v9.4s, %[rx2].s[3]\n"
+ "fmla v31.4s, v9.4s, %[rx3].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n), [outptr2] "+r"(outptr2),
+ [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn),
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13",
+ "v14", "v15", "v30", "v31");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+ "ld1 {v30.2s}, [%[outptr2]]\n"
+ "ld1 {v31.2s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v30.2s, v6.2s, %[rx2].s[0]\n"
+ "fmla v31.2s, v6.2s, %[rx3].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+ "fmla v30.2s, v7.2s, %[rx2].s[1]\n"
+ "fmla v31.2s, v7.2s, %[rx3].s[1]\n"
+ "fmla v14.2s, v8.2s, %[rx0].s[2]\n"
+ "fmla v15.2s, v8.2s, %[rx1].s[2]\n"
+ "fmla v30.2s, v8.2s, %[rx2].s[2]\n"
+ "fmla v31.2s, v8.2s, %[rx3].s[2]\n"
+ "fmla v14.2s, v9.2s, %[rx0].s[3]\n"
+ "fmla v15.2s, v9.2s, %[rx1].s[3]\n"
+ "fmla v30.2s, v9.2s, %[rx2].s[3]\n"
+ "fmla v31.2s, v9.2s, %[rx3].s[3]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+ "st1 {v30.2s}, [%[outptr2]], #8\n"
+ "st1 {v31.2s}, [%[outptr3]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1),
+
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v14", "v15", "v30",
+ "v31");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x1 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x1 + 3));
+
+ *outptr2 += (*kernel0) * (*_x2) + (*(kernel0 + outch)) * (*(_x2 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x2 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x2 + 3));
+
+ *outptr3 += (*kernel0) * (*_x3) + (*(kernel0 + outch)) * (*(_x3 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x3 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x3 + 3));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ outptr2++;
+ outptr3++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ _x1 += 4;
+ _x2 += 4;
+ _x3 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_f32(_x1);
+ register float32x2_t rx2 asm("v16") = vld1_f32(_x2);
+ register float32x2_t rx3 asm("v17") = vld1_f32(_x3);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+ float *outptr2 = out0 + outch * 2;
+ float *outptr3 = out0 + outch * 3;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile(
+ "cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v11.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v11.4s, %[rx3].s[1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v30.4s, v7.4s, %[rx2].s[1]\n"
+ "fmla v31.4s, v7.4s, %[rx3].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1),
+ [_n] "+r"(_n), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn),
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v10", "v11", "v14", "v15", "v30", "v31");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+ "ld1 {v30.2s}, [%[outptr2]]\n"
+ "ld1 {v31.2s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v30.2s, v6.2s, %[rx2].s[0]\n"
+ "fmla v31.2s, v6.2s, %[rx3].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+ "fmla v30.2s, v7.2s, %[rx2].s[1]\n"
+ "fmla v31.2s, v7.2s, %[rx3].s[1]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+ "st1 {v30.2s}, [%[outptr2]], #8\n"
+ "st1 {v31.2s}, [%[outptr3]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1),
+
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v7", "v14", "v15", "v30", "v31");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1));
+ *outptr2 += (*kernel0) * (*_x2) + (*(kernel0 + outch)) * (*(_x2 + 1));
+ *outptr3 += (*kernel0) * (*_x3) + (*(kernel0 + outch)) * (*(_x3 + 1));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ outptr2++;
+ outptr3++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ _x1 += 2;
+ _x2 += 2;
+ _x3 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_dup_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_dup_f32(_x1);
+ register float32x2_t rx2 asm("v16") = vld1_dup_f32(_x2);
+ register float32x2_t rx3 asm("v17") = vld1_dup_f32(_x3);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+ float *outptr2 = out0 + outch * 2;
+ float *outptr3 = out0 + outch * 3;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile(
+ "cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v10.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v10.4s, %[rx3].s[0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+ "ld1 {v30.4s}, [%[outptr2]]\n"
+ "ld1 {v31.4s}, [%[outptr3]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v30.4s, v6.4s, %[rx2].s[0]\n"
+ "fmla v31.4s, v6.4s, %[rx3].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "st1 {v30.4s}, [%[outptr2]], #16\n"
+ "st1 {v31.4s}, [%[outptr3]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1),
+ [_n] "+r"(_n), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn), [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v10", "v14", "v15", "v30", "v31");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+ "ld1 {v30.2s}, [%[outptr2]]\n"
+ "ld1 {v31.2s}, [%[outptr3]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v30.2s, v6.2s, %[rx2].s[0]\n"
+ "fmla v31.2s, v6.2s, %[rx3].s[0]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+ "st1 {v30.2s}, [%[outptr2]], #8\n"
+ "st1 {v31.2s}, [%[outptr3]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [outptr2] "+r"(outptr2), [outptr3] "+r"(outptr3)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1),
+
+ [rx2] "w"(rx2), [rx3] "w"(rx3)
+ : "cc", "memory", "x0", "v6", "v14", "v15", "v30", "v31");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+ *outptr1 += (*kernel0) * (*_x1);
+ *outptr2 += (*kernel0) * (*_x2);
+ *outptr3 += (*kernel0) * (*_x3);
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ outptr2++;
+ outptr3++;
+ }
+
+ _x0 += 1;
+ _x1 += 1;
+ _x2 += 1;
+ _x3 += 1;
+ }
+
+ img1 += inch * 4 * _stride;
+ out0 += outch * 4;
+ q += 4;
+ }
+
+ for (; q + 1 < outw; /*q += 2*/)
+ {
+ if (padding)
+ {
+ if (((q + 1) * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w) >= pad_left + w)
+ {
+ out0 += outch * 2;
+ img1 += inch * _stride * 2;
+ q += 2;
+ continue;
+ }
+ else if ((q + 1) * _stride + m % kernel_w >= pad_left + w)
+ {
+ break;
+ }
+ else if (q * _stride + m % kernel_w < pad_left)
+ {
+ out0 += outch;
+ img1 += inch * _stride;
+ q++;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *_x1 = img1 + inch * _stride;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("v4") = vld1q_f32(_x0);
+ register float32x4_t rx1 asm("v5") = vld1q_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v12.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v13.4s, %[rx1].s[3]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v15.4s, v8.4s, %[rx1].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+ "fmla v15.4s, v9.4s, %[rx1].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13",
+ "v14", "v15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+ "fmla v14.2s, v8.2s, %[rx0].s[2]\n"
+ "fmla v15.2s, v8.2s, %[rx1].s[2]\n"
+ "fmla v14.2s, v9.2s, %[rx0].s[3]\n"
+ "fmla v15.2s, v9.2s, %[rx1].s[3]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v14", "v15");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x1 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x1 + 3));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ _x1 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v11.4s, %[rx1].s[1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v15.4s, v7.4s, %[rx1].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v10", "v11", "v14", "v15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v15.2s, v7.2s, %[rx1].s[1]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+ : "cc", "memory", "x0", "v6", "v7", "v14", "v15");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ _x1 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_dup_f32(_x0);
+ register float32x2_t rx1 asm("v5") = vld1_dup_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v10.4s, %[rx1].s[0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+ "ld1 {v15.4s}, [%[outptr1]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v15.4s, v6.4s, %[rx1].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "st1 {v15.4s}, [%[outptr1]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v10", "v14", "v15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+ "ld1 {v15.2s}, [%[outptr1]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v15.2s, v6.2s, %[rx1].s[0]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ "st1 {v15.2s}, [%[outptr1]], #8\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1)
+ : "cc", "memory", "x0", "v6", "v14", "v15");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+ *outptr1 += (*kernel0) * (*_x1);
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ _x0 += 1;
+ _x1 += 1;
+ }
+
+ img1 += inch * 2 * _stride;
+ out0 += outch * 2;
+ q += 2;
+ }
+
+ for (; q < outw; q++)
+ {
+ if (padding)
+ {
+ if ((q * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w >= pad_left + w))
+ {
+ img1 += inch * _stride;
+ out0 += outch;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("v4") = vld1q_f32(_x0);
+
+ float *outptr0 = out0;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v12.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v13.4s, %[rx0].s[3]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+ "fmla v14.4s, v8.4s, %[rx0].s[2]\n"
+ "fmla v14.4s, v9.4s, %[rx0].s[3]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13",
+ "v14");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v8.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v9.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+ "fmla v14.2s, v8.2s, %[rx0].s[2]\n"
+ "fmla v14.2s, v9.2s, %[rx0].s[3]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+ : "cc", "memory", "x0", "v6", "v7", "v8", "v9", "v14");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_f32(_x0);
+
+ float *outptr0 = out0;
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v11.4s, %[rx0].s[1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+ "fmla v14.4s, v7.4s, %[rx0].s[1]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v7", "v10", "v11", "v14");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+ "add x0, x0, %[stride]\n"
+ "ld1 {v7.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+ "fmla v14.2s, v7.2s, %[rx0].s[1]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+ : "cc", "memory", "x0", "v6", "v7", "v14");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("v4") = vld1_dup_f32(_x0);
+
+ float *outptr0 = out0;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "beq 1f\n"
+
+ "0:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v10.4s}, [x0]\n"
+
+ "fmla v14.4s, v10.4s, %[rx0].s[0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "ld1 {v6.4s}, [x0]\n"
+
+ "ld1 {v14.4s}, [%[outptr0]]\n"
+
+ "fmla v14.4s, v6.4s, %[rx0].s[0]\n"
+
+ "st1 {v14.4s}, [%[outptr0]], #16\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [oddn] "r"(oddn)
+ : "cc", "memory", "x0", "v6", "v10", "v14");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("ld1 {v14.2s}, [%[outptr0]]\n"
+
+ "mov x0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "ld1 {v6.2s}, [x0]\n"
+
+ "fmla v14.2s, v6.2s, %[rx0].s[0]\n"
+
+ "st1 {v14.2s}, [%[outptr0]], #8\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [rx0] "w"(rx0)
+ : "cc", "memory", "x0", "v6", "v14");
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+
+ kernel0++;
+ outptr0++;
+ }
+
+ _x0 += 1;
+ }
+
+ img1 += inch * _stride;
+ out0 += outch;
+ }
+ }
+ }
+}
+
+#else // __aarch64__
+static void direct_conv_l(const convMat_t &bottom_blob, convMat_t &top_blob,
+ const convMat_t &_kernel, const int _stride, const int padding,
+ const int pad_top, const int pad_left)
+{
+ const int w = bottom_blob.w;
+ const int h = bottom_blob.h;
+ const int inch = bottom_blob.c;
+ const int outw = top_blob.w;
+ const int outh = top_blob.h;
+ const int outch = top_blob.c;
+ const int kernel_w = _kernel.w;
+ const int kernel_h = _kernel.h;
+
+ for (int m = 0; m < kernel_w * kernel_h; m++)
+ {
+ const float *_kernel0 = _kernel.data + m * inch * outch;
+ const float *img0 =
+ bottom_blob.data + (m / kernel_w - pad_top) * w * inch + (m % kernel_w - pad_left) * inch;
+
+#ifdef _OPENMP
+#pragma omp parallel for
+#endif // _OPENMP
+ for (int p = 0; p < outh; p++)
+ {
+ float *out0 = top_blob.data + p * outw * outch;
+ // clear output.
+ if (m == 0)
+ {
+ for (int j = 0; j < outw * outch; j++)
+ {
+ *(out0 + j) = 0.f;
+ }
+ }
+
+ if (padding)
+ {
+ if (((p * _stride + m / kernel_w) < pad_top) || (p * _stride + m / kernel_w >= pad_top + h))
+ {
+ continue;
+ }
+ }
+
+ const float *img1 = img0 + p * w * inch * _stride;
+
+ int q = 0;
+ for (; q + 1 < outw; /*q += 2*/)
+ {
+ if (padding)
+ {
+ if (((q + 1) * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w) >= pad_left + w)
+ {
+ out0 += outch * 2;
+ img1 += inch * _stride * 2;
+ q += 2;
+ continue;
+ }
+ else if (q * _stride + m % kernel_w < pad_left)
+ {
+ out0 += outch;
+ img1 += inch * _stride;
+ q++;
+ continue;
+ }
+ else if ((q + 1) * _stride + m % kernel_w >= pad_left + w)
+ {
+ break;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *_x1 = img1 + inch * _stride;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("q4") = vld1q_f32(_x0);
+ register float32x4_t rx1 asm("q5") = vld1q_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q15, q6, %e[rx1][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q15, q7, %e[rx1][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q15, q8, %f[rx1][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+ "vmla.f32 q15, q9, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q15, q10, %e[rx1][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q15, q11, %e[rx1][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q15, q12, %f[rx1][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+ "vmla.f32 q15, q13, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q15, q6, %e[rx1][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q15, q7, %e[rx1][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q15, q8, %f[rx1][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+ "vmla.f32 q15, q9, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q15, q10, %e[rx1][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q15, q11, %e[rx1][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q15, q12, %f[rx1][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+ "vmla.f32 q15, q13, %f[rx1][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q15, q6, %e[rx1][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q15, q7, %e[rx1][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q15, q8, %f[rx1][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+ "vmla.f32 q15, q9, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+ "vld1.f32 {d30}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18}, [r0]\n"
+
+ "vmla.f32 d28, d12, %e[rx0][0]\n"
+ "vmla.f32 d30, d12, %e[rx1][0]\n"
+ "vmla.f32 d28, d14, %e[rx0][1]\n"
+ "vmla.f32 d30, d14, %e[rx1][1]\n"
+ "vmla.f32 d28, d16, %f[rx0][0]\n"
+ "vmla.f32 d30, d16, %f[rx1][0]\n"
+ "vmla.f32 d28, d18, %f[rx0][1]\n"
+ "vmla.f32 d30, d18, %f[rx1][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ "vst1.f32 {d30}, [%[outptr1]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+#ifndef _OPENMP
+
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x1 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x1 + 3));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ _x1 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_f32(_x0);
+ register float32x2_t rx1 asm("d10") = vld1_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+ "vmla.f32 q15, q7, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+ "vmla.f32 q15, q11, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+ "vmla.f32 q15, q7, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+ "vmla.f32 q15, q11, %P[rx1][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+ "vmla.f32 q15, q7, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q10", "q11", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+ "vld1.f32 {d30}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+ "vmla.f32 d30, d12, %P[rx1][0]\n"
+ "vmla.f32 d28, d14, %P[rx0][1]\n"
+ "vmla.f32 d30, d14, %P[rx1][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ "vst1.f32 {d30}, [%[outptr1]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ _x1 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_dup_f32(_x0);
+ register float32x2_t rx1 asm("d10") = vld1_dup_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q10", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+ "vld1.f32 {d30}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+ "vmla.f32 d30, d12, %P[rx1][0]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ "vst1.f32 {d30}, [%[outptr1]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+ *outptr1 += (*kernel0) * (*_x1);
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ _x0 += 1;
+ _x1 += 1;
+ }
+
+ img1 += inch * 2 * _stride;
+ out0 += outch * 2;
+ q += 2;
+ }
+
+ for (; q < outw; q++)
+ {
+ if (padding)
+ {
+ if ((q * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w) >= pad_left + bottom_blob.w)
+ {
+ img1 += inch * _stride;
+ out0 += outch;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("q4") = vld1q_f32(_x0);
+
+ float *outptr0 = out0;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18}, [r0]\n"
+
+ "vmla.f32 d28, d12, %e[rx0][0]\n"
+ "vmla.f32 d28, d14, %e[rx0][1]\n"
+ "vmla.f32 d28, d16, %f[rx0][0]\n"
+ "vmla.f32 d28, d18, %f[rx0][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_f32(_x0);
+
+ float *outptr0 = out0;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q10", "q11", "q14"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+ "vmla.f32 d28, d14, %P[rx0][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_dup_f32(_x0);
+
+ float *outptr0 = out0;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q10", "q14"
+
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [rx0] "w"(rx0)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+
+ kernel0++;
+ outptr0++;
+ }
+
+ _x0 += 1;
+ }
+
+ img1 += inch * _stride;
+ out0 += outch;
+ }
+ }
+ }
+}
+
+static void direct_conv_s(const convMat_t &bottom_blob, convMat_t &top_blob,
+ const convMat_t &_kernel, const int _stride, const int padding,
+ const int pad_top, const int pad_left)
+{
+ const int w = bottom_blob.w;
+ const int h = bottom_blob.h;
+ const int inch = bottom_blob.c;
+ const int outw = top_blob.w;
+ const int outh = top_blob.h;
+ const int outch = top_blob.c;
+ const int kernel_w = _kernel.w;
+ const int kernel_h = _kernel.h;
+
+#ifdef _OPENMP
+#pragma omp parallel for
+#endif // _OPENMP
+ for (int p = 0; p < outh; p++)
+ {
+ const float *img0 = bottom_blob.data + (p * _stride - pad_top) * w * inch;
+ float *out = top_blob.data + p * outw * outch;
+
+ // clear output.
+ for (int j = 0; j < outw * outch; j++)
+ {
+ *(out + j) = 0.f;
+ }
+
+ for (int m = 0; m < kernel_w * kernel_h; m++)
+ {
+ if (padding)
+ {
+ if (((p * _stride + m / kernel_w) < pad_top) || (p * _stride + m / kernel_w >= pad_top + h))
+ {
+ continue;
+ }
+ }
+
+ float *out0 = out;
+ const float *_kernel0 = _kernel.data + m * inch * outch;
+ const float *img1 = img0 + (m / kernel_w) * w * inch + (m % kernel_w - pad_left) * inch;
+
+ int q = 0;
+ for (; q + 1 < outw; /*q += 2*/)
+ {
+ if (padding)
+ {
+ if (((q + 1) * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w >= pad_left + w))
+ {
+ out0 += outch * 2;
+ img1 += inch * _stride * 2;
+ q += 2;
+ continue;
+ }
+ else if (q * _stride + m % kernel_w < pad_left)
+ {
+ out0 += outch;
+ img1 += inch * _stride;
+ q++;
+ continue;
+ }
+ else if ((q + 1) * _stride + m % kernel_w >= pad_left + w)
+ {
+ break;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *_x1 = img1 + inch * _stride;
+
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("q4") = vld1q_f32(_x0);
+ register float32x4_t rx1 asm("q5") = vld1q_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q15, q6, %e[rx1][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q15, q7, %e[rx1][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q15, q8, %f[rx1][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+ "vmla.f32 q15, q9, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q15, q10, %e[rx1][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q15, q11, %e[rx1][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q15, q12, %f[rx1][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+ "vmla.f32 q15, q13, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q15, q6, %e[rx1][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q15, q7, %e[rx1][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q15, q8, %f[rx1][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+ "vmla.f32 q15, q9, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q15, q10, %e[rx1][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q15, q11, %e[rx1][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q15, q12, %f[rx1][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+ "vmla.f32 q15, q13, %f[rx1][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q15, q6, %e[rx1][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q15, q7, %e[rx1][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q15, q8, %f[rx1][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+ "vmla.f32 q15, q9, %f[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15");
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+ "vld1.f32 {d30}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18}, [r0]\n"
+
+ "vmla.f32 d28, d12, %e[rx0][0]\n"
+ "vmla.f32 d30, d12, %e[rx1][0]\n"
+ "vmla.f32 d28, d14, %e[rx0][1]\n"
+ "vmla.f32 d30, d14, %e[rx1][1]\n"
+ "vmla.f32 d28, d16, %f[rx0][0]\n"
+ "vmla.f32 d30, d16, %f[rx1][0]\n"
+ "vmla.f32 d28, d18, %f[rx0][1]\n"
+ "vmla.f32 d30, d18, %f[rx1][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ "vst1.f32 {d30}, [%[outptr1]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q14", "q15"
+#else
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x1 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x1 + 3));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ _x1 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_f32(_x0);
+ register float32x2_t rx1 asm("d10") = vld1_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+ "vmla.f32 q15, q7, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+ "vmla.f32 q15, q11, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+ "vmla.f32 q15, q7, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+ "vmla.f32 q15, q11, %P[rx1][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+ "vmla.f32 q15, q7, %P[rx1][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q10", "q11", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+ "vld1.f32 {d30}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+ "vmla.f32 d30, d12, %P[rx1][0]\n"
+ "vmla.f32 d28, d14, %P[rx0][1]\n"
+ "vmla.f32 d30, d14, %P[rx1][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ "vst1.f32 {d30}, [%[outptr1]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [rx1] "w"(rx1)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+ *outptr1 += (*kernel0) * (*_x1) + (*(kernel0 + outch)) * (*(_x1 + 1));
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ _x1 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_dup_f32(_x0);
+ register float32x2_t rx1 asm("d10") = vld1_dup_f32(_x1);
+
+ float *outptr0 = out0;
+ float *outptr1 = out0 + outch;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q15, q10, %P[rx1][0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+ "vld1.f32 {d30-d31}, [%[outptr1]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q15, q6, %P[rx1][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vst1.f32 {d30-d31}, [%[outptr1]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0),
+ [outptr1] "+r"(outptr1), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q10", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+ "vld1.f32 {d30}, [%[outptr1]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+ "vmla.f32 d30, d12, %P[rx1][0]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ "vst1.f32 {d30}, [%[outptr1]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [outptr1] "+r"(outptr1)
+ : [rx0] "w"(rx0), [rx1] "w"(rx1)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+ *outptr1 += (*kernel0) * (*_x1);
+
+ kernel0++;
+ outptr0++;
+ outptr1++;
+ }
+
+ _x0 += 1;
+ _x1 += 1;
+ }
+
+ img1 += inch * 2 * _stride;
+ out0 += outch * 2;
+ q += 2;
+ }
+
+ for (; q < outw; q++)
+ {
+ if (padding)
+ {
+ if ((q * _stride + m % kernel_w < pad_left) ||
+ (q * _stride + m % kernel_w >= pad_left + w))
+ {
+ img1 += inch * _stride;
+ out0 += outch;
+ continue;
+ }
+ }
+
+ const float *_x0 = img1;
+ const float *kernel0 = _kernel0;
+
+ int i = 0;
+ for (; i + 3 < inch; i += 4)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x4_t rx0 asm("q4") = vld1q_f32(_x0);
+
+ float *outptr0 = out0;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+
+ "vmla.f32 q14, q10, %e[rx0][0]\n"
+ "vmla.f32 q14, q11, %e[rx0][1]\n"
+ "vmla.f32 q14, q12, %f[rx0][0]\n"
+ "vmla.f32 q14, q13, %f[rx0][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %e[rx0][0]\n"
+ "vmla.f32 q14, q7, %e[rx0][1]\n"
+ "vmla.f32 q14, q8, %f[rx0][0]\n"
+ "vmla.f32 q14, q9, %f[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d16}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d18}, [r0]\n"
+
+ "vmla.f32 d28, d12, %e[rx0][0]\n"
+ "vmla.f32 d28, d14, %e[rx0][1]\n"
+ "vmla.f32 d28, d16, %f[rx0][0]\n"
+ "vmla.f32 d28, d18, %f[rx0][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1)) +
+ (*(kernel0 + outch * 2)) * (*(_x0 + 2)) +
+ (*(kernel0 + outch * 3)) * (*(_x0 + 3));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch * 3;
+ _x0 += 4;
+ }
+
+ for (; i + 1 < inch; i += 2)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_f32(_x0);
+
+ float *outptr0 = out0;
+
+ int stride = outch << 2;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+ "vmla.f32 q14, q11, %P[rx0][1]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+ "vmla.f32 q14, q7, %P[rx0][1]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [stride] "r"(stride), [rx0] "w"(rx0), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q10", "q11", "q14"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+ "add r0, r0, %[stride]\n"
+ "vld1.f32 {d14}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+ "vmla.f32 d28, d14, %P[rx0][1]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [stride] "r"(stride), [rx0] "w"(rx0)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0) + (*(kernel0 + outch)) * (*(_x0 + 1));
+
+ kernel0++;
+ outptr0++;
+ }
+
+ kernel0 += outch;
+ _x0 += 2;
+ }
+
+ for (; i < inch; i++)
+ {
+ int nn = outch >> 2;
+ int remain = outch & 0x03;
+
+ register float32x2_t rx0 asm("d8") = vld1_dup_f32(_x0);
+
+ float *outptr0 = out0;
+
+ if (nn > 0)
+ {
+ int _n = nn >> 1;
+ int oddn = nn & 1;
+
+ asm volatile("cmp %[_n], #0\n"
+ "beq 2f\n"
+ "subs %[_n], %[_n], #1\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "beq 1f\n"
+
+ "0:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "subs %[_n], %[_n], #1\n"
+ "bne 0b\n"
+
+ "1:\n"
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+
+ "vmla.f32 q14, q10, %P[rx0][0]\n"
+
+ "cmp %[oddn], #1\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+
+ "bne 3f\n"
+
+ "2:\n"
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #16\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+
+ "vld1.f32 {d28-d29}, [%[outptr0]]\n"
+
+ "vmla.f32 q14, q6, %P[rx0][0]\n"
+
+ "vst1.f32 {d28-d29}, [%[outptr0]]!\n"
+ "3:\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0), [_n] "+r"(_n)
+ : [rx0] "w"(rx0), [oddn] "r"(oddn)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q10", "q14"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ }
+
+ if (remain >= 2)
+ {
+ asm volatile("vld1.f32 {d28}, [%[outptr0]]\n"
+
+ "mov r0, %[kernel0]\n"
+ "add %[kernel0], %[kernel0], #8\n"
+ "vld1.f32 {d12}, [r0]\n"
+
+ "vmla.f32 d28, d12, %P[rx0][0]\n"
+
+ "vst1.f32 {d28}, [%[outptr0]]!\n"
+ : [kernel0] "+r"(kernel0), [outptr0] "+r"(outptr0)
+ : [rx0] "w"(rx0)
+#ifndef _OPENMP
+ : "cc", "memory", "r0", "q6", "q14", "q15"
+#else // _OPENMP
+ : "cc", "memory", "r0", "q6", "q7", "q8", "q9", "q10", "q11", "q12", "q13",
+ "q14", "q15"
+#endif // _OPENMP
+ );
+ remain -= 2;
+ }
+
+ if (remain == 1)
+ {
+ *outptr0 += (*kernel0) * (*_x0);
+
+ kernel0++;
+ outptr0++;
+ }
+
+ _x0 += 1;
+ }
+
+ img1 += inch * _stride;
+ out0 += outch;
+ }
+ }
+ }
+}
+#endif // __aarch64__
+
+void direct_conv_colmajor(const convMat_t &bottom_blob, convMat_t &top_blob,
+ const convMat_t &kernel, const convParams_t &params, int num_threads)
+{
+ omp_set_num_threads(num_threads);
+
+ if (bottom_blob.c * top_blob.c < 256 * 256)
+ {
+ direct_conv_s(bottom_blob, top_blob, kernel, params.stride_w, params.padding, params.pad_h,
+ params.pad_w);
+ return;
+ }
+
+ direct_conv_l(bottom_blob, top_blob, kernel, params.stride_w, params.padding, params.pad_h,
+ params.pad_w);
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/direct_conv_colmajor.h b/runtimes/libs/srcn/src/direct_conv_colmajor.h
new file mode 100644
index 000000000..e50e03907
--- /dev/null
+++ b/runtimes/libs/srcn/src/direct_conv_colmajor.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_DIRECT_CONV_COLMAJOR_H__
+#define __NNFW_SRCN_DIRECT_CONV_COLMAJOR_H__
+
+#include "srcn/conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void direct_conv_colmajor(const convMat_t &, convMat_t &, const convMat_t &, const convParams_t &,
+ int);
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_DIRECT_CONV_COLMAJOR_H__
diff --git a/runtimes/libs/srcn/src/sgemm_kernel.cc b/runtimes/libs/srcn/src/sgemm_kernel.cc
new file mode 100644
index 000000000..90c3641db
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_kernel.cc
@@ -0,0 +1,2508 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arm_neon.h>
+
+namespace nnfw
+{
+namespace srcn
+{
+
+#if __aarch64__
+static void sgemm_rowmajor_micro_kernel_8x12(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int oddk = (k & 1);
+ int nk = ((k + 1) / 2) - 1;
+
+ const int nstride = stride << 2;
+
+ __asm __volatile("ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov x0, %[res_ptr]\n"
+ "ld1 {v8.4s, v9.4s, v10.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v11.4s, v12.4s, v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v14.4s, v15.4s, v16.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v17.4s, v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v20.4s, v21.4s, v22.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v23.4s, v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v26.4s, v27.4s, v28.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v29.4s, v30.4s, v31.4s}, [x0]\n"
+ "cbz %w[nk], 4f\n"
+ "b 1f\n"
+
+ "0:\n"
+ "movi v8.4s, #0x0\n"
+ "movi v9.4s, #0x0\n"
+ "movi v10.4s, #0x0\n"
+ "movi v11.4s, #0x0\n"
+ "movi v12.4s, #0x0\n"
+ "movi v13.4s, #0x0\n"
+ "movi v14.4s, #0x0\n"
+ "movi v15.4s, #0x0\n"
+ "movi v16.4s, #0x0\n"
+ "movi v17.4s, #0x0\n"
+ "movi v18.4s, #0x0\n"
+ "movi v19.4s, #0x0\n"
+ "movi v20.4s, #0x0\n"
+ "movi v21.4s, #0x0\n"
+ "movi v22.4s, #0x0\n"
+ "movi v23.4s, #0x0\n"
+ "movi v24.4s, #0x0\n"
+ "movi v25.4s, #0x0\n"
+ "movi v26.4s, #0x0\n"
+ "movi v27.4s, #0x0\n"
+ "movi v28.4s, #0x0\n"
+ "movi v29.4s, #0x0\n"
+ "movi v30.4s, #0x0\n"
+ "movi v31.4s, #0x0\n"
+ "cbz %w[nk], 4f\n"
+
+ "1:\n"
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v11.4s, v2.4s, v0.s[1]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v14.4s, v2.4s, v0.s[2]\n"
+ "fmla v17.4s, v2.4s, v0.s[3]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v12.4s, v3.4s, v0.s[1]\n"
+ "fmla v15.4s, v3.4s, v0.s[2]\n"
+ "fmla v18.4s, v3.4s, v0.s[3]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "fmla v13.4s, v4.4s, v0.s[1]\n"
+ "fmla v16.4s, v4.4s, v0.s[2]\n"
+ "fmla v19.4s, v4.4s, v0.s[3]\n"
+
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v20.4s, v2.4s, v1.s[0]\n"
+ "fmla v23.4s, v2.4s, v1.s[1]\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v26.4s, v2.4s, v1.s[2]\n"
+ "fmla v29.4s, v2.4s, v1.s[3]\n"
+ "fmla v21.4s, v3.4s, v1.s[0]\n"
+ "fmla v24.4s, v3.4s, v1.s[1]\n"
+ "fmla v27.4s, v3.4s, v1.s[2]\n"
+ "fmla v30.4s, v3.4s, v1.s[3]\n"
+ "fmla v22.4s, v4.4s, v1.s[0]\n"
+ "fmla v25.4s, v4.4s, v1.s[1]\n"
+ "fmla v28.4s, v4.4s, v1.s[2]\n"
+ "fmla v31.4s, v4.4s, v1.s[3]\n"
+
+ "fmla v8.4s, v5.4s, v0.s[0]\n"
+ "fmla v11.4s, v5.4s, v0.s[1]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v14.4s, v5.4s, v0.s[2]\n"
+ "fmla v17.4s, v5.4s, v0.s[3]\n"
+ "fmla v9.4s, v6.4s, v0.s[0]\n"
+ "fmla v12.4s, v6.4s, v0.s[1]\n"
+ "fmla v15.4s, v6.4s, v0.s[2]\n"
+ "fmla v18.4s, v6.4s, v0.s[3]\n"
+ "fmla v10.4s, v7.4s, v0.s[0]\n"
+ "fmla v13.4s, v7.4s, v0.s[1]\n"
+ "fmla v16.4s, v7.4s, v0.s[2]\n"
+ "fmla v19.4s, v7.4s, v0.s[3]\n"
+
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v20.4s, v5.4s, v1.s[0]\n"
+ "fmla v23.4s, v5.4s, v1.s[1]\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v26.4s, v5.4s, v1.s[2]\n"
+ "fmla v29.4s, v5.4s, v1.s[3]\n"
+ "fmla v21.4s, v6.4s, v1.s[0]\n"
+ "fmla v24.4s, v6.4s, v1.s[1]\n"
+ "fmla v27.4s, v6.4s, v1.s[2]\n"
+ "fmla v30.4s, v6.4s, v1.s[3]\n"
+ "fmla v22.4s, v7.4s, v1.s[0]\n"
+ "fmla v25.4s, v7.4s, v1.s[1]\n"
+ "subs %w[nk], %w[nk], #1\n"
+ "fmla v28.4s, v7.4s, v1.s[2]\n"
+ "fmla v31.4s, v7.4s, v1.s[3]\n"
+ "bne 1b\n"
+
+ "4:\n"
+ "mov x0, %[res_ptr]\n"
+ "cbnz %[oddk], 2f\n"
+
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "fmla v11.4s, v2.4s, v0.s[1]\n"
+ "fmla v12.4s, v3.4s, v0.s[1]\n"
+ "fmla v13.4s, v4.4s, v0.s[1]\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v14.4s, v2.4s, v0.s[2]\n"
+ "fmla v15.4s, v3.4s, v0.s[2]\n"
+ "fmla v16.4s, v4.4s, v0.s[2]\n"
+ "fmla v17.4s, v2.4s, v0.s[3]\n"
+ "fmla v18.4s, v3.4s, v0.s[3]\n"
+ "fmla v19.4s, v4.4s, v0.s[3]\n"
+
+ "fmla v20.4s, v2.4s, v1.s[0]\n"
+ "fmla v21.4s, v3.4s, v1.s[0]\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v22.4s, v4.4s, v1.s[0]\n"
+ "fmla v23.4s, v2.4s, v1.s[1]\n"
+ "fmla v24.4s, v3.4s, v1.s[1]\n"
+ "fmla v25.4s, v4.4s, v1.s[1]\n"
+ "fmla v26.4s, v2.4s, v1.s[2]\n"
+ "fmla v27.4s, v3.4s, v1.s[2]\n"
+ "fmla v28.4s, v4.4s, v1.s[2]\n"
+ "fmla v29.4s, v2.4s, v1.s[3]\n"
+ "fmla v30.4s, v3.4s, v1.s[3]\n"
+ "fmla v31.4s, v4.4s, v1.s[3]\n"
+
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v5.4s, v0.s[0]\n"
+ "fmla v9.4s, v6.4s, v0.s[0]\n"
+ "fmla v10.4s, v7.4s, v0.s[0]\n"
+ "st1 {v8.4s, v9.4s, v10.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v11.4s, v5.4s, v0.s[1]\n"
+ "fmla v12.4s, v6.4s, v0.s[1]\n"
+ "fmla v13.4s, v7.4s, v0.s[1]\n"
+ "st1 {v11.4s, v12.4s, v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v14.4s, v5.4s, v0.s[2]\n"
+ "fmla v15.4s, v6.4s, v0.s[2]\n"
+ "fmla v16.4s, v7.4s, v0.s[2]\n"
+ "st1 {v14.4s, v15.4s, v16.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v17.4s, v5.4s, v0.s[3]\n"
+ "fmla v18.4s, v6.4s, v0.s[3]\n"
+ "fmla v19.4s, v7.4s, v0.s[3]\n"
+ "st1 {v17.4s, v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+
+ "fmla v20.4s, v5.4s, v1.s[0]\n"
+ "fmla v21.4s, v6.4s, v1.s[0]\n"
+ "fmla v22.4s, v7.4s, v1.s[0]\n"
+ "st1 {v20.4s, v21.4s, v22.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v23.4s, v5.4s, v1.s[1]\n"
+ "fmla v24.4s, v6.4s, v1.s[1]\n"
+ "fmla v25.4s, v7.4s, v1.s[1]\n"
+ "st1 {v23.4s, v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v26.4s, v5.4s, v1.s[2]\n"
+ "fmla v27.4s, v6.4s, v1.s[2]\n"
+ "fmla v28.4s, v7.4s, v1.s[2]\n"
+ "st1 {v26.4s, v27.4s, v28.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v29.4s, v5.4s, v1.s[3]\n"
+ "fmla v30.4s, v6.4s, v1.s[3]\n"
+ "fmla v31.4s, v7.4s, v1.s[3]\n"
+ "b 3f\n"
+
+ "2:\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "st1 {v8.4s, v9.4s, v10.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v11.4s, v2.4s, v0.s[1]\n"
+ "fmla v12.4s, v3.4s, v0.s[1]\n"
+ "fmla v13.4s, v4.4s, v0.s[1]\n"
+ "st1 {v11.4s, v12.4s, v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v14.4s, v2.4s, v0.s[2]\n"
+ "fmla v15.4s, v3.4s, v0.s[2]\n"
+ "fmla v16.4s, v4.4s, v0.s[2]\n"
+ "st1 {v14.4s, v15.4s, v16.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v17.4s, v2.4s, v0.s[3]\n"
+ "fmla v18.4s, v3.4s, v0.s[3]\n"
+ "fmla v19.4s, v4.4s, v0.s[3]\n"
+ "st1 {v17.4s, v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+
+ "fmla v20.4s, v2.4s, v1.s[0]\n"
+ "fmla v21.4s, v3.4s, v1.s[0]\n"
+ "fmla v22.4s, v4.4s, v1.s[0]\n"
+ "st1 {v20.4s, v21.4s, v22.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v23.4s, v2.4s, v1.s[1]\n"
+ "fmla v24.4s, v3.4s, v1.s[1]\n"
+ "fmla v25.4s, v4.4s, v1.s[1]\n"
+ "st1 {v23.4s, v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v26.4s, v2.4s, v1.s[2]\n"
+ "fmla v27.4s, v3.4s, v1.s[2]\n"
+ "fmla v28.4s, v4.4s, v1.s[2]\n"
+ "st1 {v26.4s, v27.4s, v28.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v29.4s, v2.4s, v1.s[3]\n"
+ "fmla v30.4s, v3.4s, v1.s[3]\n"
+ "fmla v31.4s, v4.4s, v1.s[3]\n"
+
+ "3:\n"
+ "st1 {v29.4s, v30.4s, v31.4s}, [x0]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk)
+ : [oddk] "r"(oddk), [k0] "r"(k0), [nstride] "r"(nstride)
+ : "x0", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22",
+ "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+}
+
+static void sgemm_rowmajor_micro_kernel_12x8(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int oddk = (k & 1);
+ int nk = ((k + 1) / 2) - 1;
+
+ const int nstride = stride << 2;
+
+ __asm __volatile("ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v4.4s, v5.4s}, [%[rhs_ptr]], #32\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov x0, %[res_ptr]\n"
+ "ld1 {v8.4s, v9.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v10.4s, v11.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v12.4s, v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v14.4s, v15.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v16.4s, v17.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v20.4s, v21.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v22.4s, v23.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v26.4s, v27.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v28.4s, v29.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v30.4s, v31.4s}, [x0]\n"
+ "cbz %w[nk], 4f\n"
+ "b 1f\n"
+
+ "0:\n"
+ "movi v8.4s, #0x0\n"
+ "movi v9.4s, #0x0\n"
+ "movi v10.4s, #0x0\n"
+ "movi v11.4s, #0x0\n"
+ "movi v12.4s, #0x0\n"
+ "movi v13.4s, #0x0\n"
+ "movi v14.4s, #0x0\n"
+ "movi v15.4s, #0x0\n"
+ "movi v16.4s, #0x0\n"
+ "movi v17.4s, #0x0\n"
+ "movi v18.4s, #0x0\n"
+ "movi v19.4s, #0x0\n"
+ "movi v20.4s, #0x0\n"
+ "movi v21.4s, #0x0\n"
+ "movi v22.4s, #0x0\n"
+ "movi v23.4s, #0x0\n"
+ "movi v24.4s, #0x0\n"
+ "movi v25.4s, #0x0\n"
+ "movi v26.4s, #0x0\n"
+ "movi v27.4s, #0x0\n"
+ "movi v28.4s, #0x0\n"
+ "movi v29.4s, #0x0\n"
+ "movi v30.4s, #0x0\n"
+ "movi v31.4s, #0x0\n"
+ "cbz %w[nk], 4f\n"
+
+ "1:\n"
+ "fmla v8.4s, v4.4s, v0.s[0]\n"
+ "fmla v10.4s, v4.4s, v0.s[1]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v12.4s, v4.4s, v0.s[2]\n"
+ "fmla v14.4s, v4.4s, v0.s[3]\n"
+ "fmla v9.4s, v5.4s, v0.s[0]\n"
+ "fmla v11.4s, v5.4s, v0.s[1]\n"
+ "fmla v13.4s, v5.4s, v0.s[2]\n"
+ "fmla v15.4s, v5.4s, v0.s[3]\n"
+
+ "fmla v16.4s, v4.4s, v1.s[0]\n"
+ "fmla v18.4s, v4.4s, v1.s[1]\n"
+ "ld1 {v2.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v20.4s, v4.4s, v1.s[2]\n"
+ "fmla v22.4s, v4.4s, v1.s[3]\n"
+ "fmla v17.4s, v5.4s, v1.s[0]\n"
+ "fmla v19.4s, v5.4s, v1.s[1]\n"
+ "fmla v21.4s, v5.4s, v1.s[2]\n"
+ "fmla v23.4s, v5.4s, v1.s[3]\n"
+
+ "ld1 {v6.4s, v7.4s}, [%[rhs_ptr]], #32\n"
+
+ "fmla v24.4s, v4.4s, v2.s[0]\n"
+ "fmla v26.4s, v4.4s, v2.s[1]\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v28.4s, v4.4s, v2.s[2]\n"
+ "fmla v30.4s, v4.4s, v2.s[3]\n"
+ "fmla v25.4s, v5.4s, v2.s[0]\n"
+ "fmla v27.4s, v5.4s, v2.s[1]\n"
+ "fmla v29.4s, v5.4s, v2.s[2]\n"
+ "fmla v31.4s, v5.4s, v2.s[3]\n"
+
+ "fmla v8.4s, v6.4s, v0.s[0]\n"
+ "fmla v10.4s, v6.4s, v0.s[1]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v12.4s, v6.4s, v0.s[2]\n"
+ "fmla v14.4s, v6.4s, v0.s[3]\n"
+ "fmla v9.4s, v7.4s, v0.s[0]\n"
+ "fmla v11.4s, v7.4s, v0.s[1]\n"
+ "fmla v13.4s, v7.4s, v0.s[2]\n"
+ "fmla v15.4s, v7.4s, v0.s[3]\n"
+
+ "fmla v16.4s, v6.4s, v1.s[0]\n"
+ "fmla v18.4s, v6.4s, v1.s[1]\n"
+ "ld1 {v2.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v20.4s, v6.4s, v1.s[2]\n"
+ "fmla v22.4s, v6.4s, v1.s[3]\n"
+ "fmla v17.4s, v7.4s, v1.s[0]\n"
+ "fmla v19.4s, v7.4s, v1.s[1]\n"
+ "fmla v21.4s, v7.4s, v1.s[2]\n"
+ "fmla v23.4s, v7.4s, v1.s[3]\n"
+
+ "ld1 {v4.4s, v5.4s}, [%[rhs_ptr]], #32\n"
+
+ "fmla v24.4s, v6.4s, v2.s[0]\n"
+ "fmla v26.4s, v6.4s, v2.s[1]\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v28.4s, v6.4s, v2.s[2]\n"
+ "fmla v30.4s, v6.4s, v2.s[3]\n"
+ "fmla v25.4s, v7.4s, v2.s[0]\n"
+ "fmla v27.4s, v7.4s, v2.s[1]\n"
+ "subs %w[nk], %w[nk], #1\n"
+ "fmla v29.4s, v7.4s, v2.s[2]\n"
+ "fmla v31.4s, v7.4s, v2.s[3]\n"
+ "bne 1b\n"
+
+ "4:\n"
+ "mov x0, %[res_ptr]\n"
+ "cbnz %[oddk], 2f\n"
+
+ "fmla v8.4s, v4.4s, v0.s[0]\n"
+ "fmla v9.4s, v5.4s, v0.s[0]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v10.4s, v4.4s, v0.s[1]\n"
+ "fmla v11.4s, v5.4s, v0.s[1]\n"
+ "fmla v12.4s, v4.4s, v0.s[2]\n"
+ "fmla v13.4s, v5.4s, v0.s[2]\n"
+ "fmla v14.4s, v4.4s, v0.s[3]\n"
+ "fmla v15.4s, v5.4s, v0.s[3]\n"
+
+ "fmla v16.4s, v4.4s, v1.s[0]\n"
+ "fmla v17.4s, v5.4s, v1.s[0]\n"
+ "ld1 {v2.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v18.4s, v4.4s, v1.s[1]\n"
+ "fmla v19.4s, v5.4s, v1.s[1]\n"
+ "fmla v20.4s, v4.4s, v1.s[2]\n"
+ "fmla v21.4s, v5.4s, v1.s[2]\n"
+ "fmla v22.4s, v4.4s, v1.s[3]\n"
+ "fmla v23.4s, v5.4s, v1.s[3]\n"
+
+ "ld1 {v6.4s, v7.4s}, [%[rhs_ptr]], #32\n"
+
+ "fmla v24.4s, v4.4s, v2.s[0]\n"
+ "fmla v25.4s, v5.4s, v2.s[0]\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "fmla v26.4s, v4.4s, v2.s[1]\n"
+ "fmla v27.4s, v5.4s, v2.s[1]\n"
+ "fmla v28.4s, v4.4s, v2.s[2]\n"
+ "fmla v29.4s, v5.4s, v2.s[2]\n"
+ "fmla v30.4s, v4.4s, v2.s[3]\n"
+ "fmla v31.4s, v5.4s, v2.s[3]\n"
+
+ "fmla v8.4s, v6.4s, v0.s[0]\n"
+ "fmla v9.4s, v7.4s, v0.s[0]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "st1 {v8.4s, v9.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v10.4s, v6.4s, v0.s[1]\n"
+ "fmla v11.4s, v7.4s, v0.s[1]\n"
+ "st1 {v10.4s, v11.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v12.4s, v6.4s, v0.s[2]\n"
+ "fmla v13.4s, v7.4s, v0.s[2]\n"
+ "st1 {v12.4s, v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v14.4s, v6.4s, v0.s[3]\n"
+ "fmla v15.4s, v7.4s, v0.s[3]\n"
+ "st1 {v14.4s, v15.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+
+ "fmla v16.4s, v6.4s, v1.s[0]\n"
+ "fmla v17.4s, v7.4s, v1.s[0]\n"
+ "ld1 {v2.4s}, [%[lhs_ptr]], #16\n"
+ "st1 {v16.4s, v17.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v18.4s, v6.4s, v1.s[1]\n"
+ "fmla v19.4s, v7.4s, v1.s[1]\n"
+ "st1 {v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v20.4s, v6.4s, v1.s[2]\n"
+ "fmla v21.4s, v7.4s, v1.s[2]\n"
+ "st1 {v20.4s, v21.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v22.4s, v6.4s, v1.s[3]\n"
+ "fmla v23.4s, v7.4s, v1.s[3]\n"
+ "st1 {v22.4s, v23.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+
+ "fmla v24.4s, v6.4s, v2.s[0]\n"
+ "fmla v25.4s, v7.4s, v2.s[0]\n"
+ "st1 {v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v26.4s, v6.4s, v2.s[1]\n"
+ "fmla v27.4s, v7.4s, v2.s[1]\n"
+ "st1 {v26.4s, v27.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v28.4s, v6.4s, v2.s[2]\n"
+ "fmla v29.4s, v7.4s, v2.s[2]\n"
+ "st1 {v28.4s, v29.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v30.4s, v6.4s, v2.s[3]\n"
+ "fmla v31.4s, v7.4s, v2.s[3]\n"
+ "b 3f\n"
+
+ "2:\n"
+ "fmla v8.4s, v4.4s, v0.s[0]\n"
+ "fmla v9.4s, v5.4s, v0.s[0]\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "st1 {v8.4s, v9.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v10.4s, v4.4s, v0.s[1]\n"
+ "fmla v11.4s, v5.4s, v0.s[1]\n"
+ "st1 {v10.4s, v11.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v12.4s, v4.4s, v0.s[2]\n"
+ "fmla v13.4s, v5.4s, v0.s[2]\n"
+ "st1 {v12.4s, v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v14.4s, v4.4s, v0.s[3]\n"
+ "fmla v15.4s, v5.4s, v0.s[3]\n"
+ "st1 {v14.4s, v15.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+
+ "fmla v16.4s, v4.4s, v1.s[0]\n"
+ "fmla v17.4s, v5.4s, v1.s[0]\n"
+ "ld1 {v2.4s}, [%[lhs_ptr]], #16\n"
+ "st1 {v16.4s, v17.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v18.4s, v4.4s, v1.s[1]\n"
+ "fmla v19.4s, v5.4s, v1.s[1]\n"
+ "st1 {v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v20.4s, v4.4s, v1.s[2]\n"
+ "fmla v21.4s, v5.4s, v1.s[2]\n"
+ "st1 {v20.4s, v21.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v22.4s, v4.4s, v1.s[3]\n"
+ "fmla v23.4s, v5.4s, v1.s[3]\n"
+ "st1 {v22.4s, v23.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+
+ "fmla v24.4s, v4.4s, v2.s[0]\n"
+ "fmla v25.4s, v5.4s, v2.s[0]\n"
+ "st1 {v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v26.4s, v4.4s, v2.s[1]\n"
+ "fmla v27.4s, v5.4s, v2.s[1]\n"
+ "st1 {v26.4s, v27.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v28.4s, v4.4s, v2.s[2]\n"
+ "fmla v29.4s, v5.4s, v2.s[2]\n"
+ "st1 {v28.4s, v29.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v30.4s, v4.4s, v2.s[3]\n"
+ "fmla v31.4s, v5.4s, v2.s[3]\n"
+
+ "3:\n"
+ "st1 {v30.4s, v31.4s}, [x0]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk)
+ : [oddk] "r"(oddk), [k0] "r"(k0), [nstride] "r"(nstride)
+ : "x0", "v0", "v1", "v2", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22",
+ "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+}
+
+#ifdef BATCH_DILATION_FIX
+static void sgemm_rowmajor_micro_kernel_4x24(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int oddk = (k & 1);
+ int nk = ((k + 1) / 2) - 1;
+
+ const int nstride = stride << 2;
+
+ __asm __volatile("ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov x0, %[res_ptr]\n"
+ "mov x1, x0\n"
+ "ld1 {v8.4s, v9.4s, v10.4s}, [x1], #48\n"
+ "ld1 {v11.4s, v12.4s, v13.4s}, [x1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "ld1 {v14.4s, v15.4s, v16.4s}, [x1], #48\n"
+ "ld1 {v17.4s, v18.4s, v19.4s}, [x1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "ld1 {v20.4s, v21.4s, v22.4s}, [x1], #48\n"
+ "ld1 {v23.4s, v24.4s, v25.4s}, [x1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "ld1 {v26.4s, v27.4s, v28.4s}, [x1], #48\n"
+ "ld1 {v29.4s, v30.4s, v31.4s}, [x1]\n"
+ "cbz %w[nk], 4f\n"
+ "b 1f\n"
+
+ "0:\n"
+ "movi v8.4s, #0x0\n"
+ "movi v9.4s, #0x0\n"
+ "movi v10.4s, #0x0\n"
+ "movi v11.4s, #0x0\n"
+ "movi v12.4s, #0x0\n"
+ "movi v13.4s, #0x0\n"
+ "movi v14.4s, #0x0\n"
+ "movi v15.4s, #0x0\n"
+ "movi v16.4s, #0x0\n"
+ "movi v17.4s, #0x0\n"
+ "movi v18.4s, #0x0\n"
+ "movi v19.4s, #0x0\n"
+ "movi v20.4s, #0x0\n"
+ "movi v21.4s, #0x0\n"
+ "movi v22.4s, #0x0\n"
+ "movi v23.4s, #0x0\n"
+ "movi v24.4s, #0x0\n"
+ "movi v25.4s, #0x0\n"
+ "movi v26.4s, #0x0\n"
+ "movi v27.4s, #0x0\n"
+ "movi v28.4s, #0x0\n"
+ "movi v29.4s, #0x0\n"
+ "movi v30.4s, #0x0\n"
+ "movi v31.4s, #0x0\n"
+ "cbz %w[nk], 4f\n"
+
+ "1:\n"
+ "mov x0, v0.d[0]\n"
+ "cmp x0, #0\n"
+ "bne 5f\n"
+ "mov x0, v0.d[1]\n"
+ "cmp x0, #0\n"
+ "bne 5f\n"
+ "add %[rhs_ptr], %[rhs_ptr], #96\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "b 6f\n"
+ "5:\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v14.4s, v2.4s, v0.s[1]\n"
+ "fmla v20.4s, v2.4s, v0.s[2]\n"
+ "fmla v26.4s, v2.4s, v0.s[3]\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v15.4s, v3.4s, v0.s[1]\n"
+ "fmla v21.4s, v3.4s, v0.s[2]\n"
+ "fmla v27.4s, v3.4s, v0.s[3]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "fmla v16.4s, v4.4s, v0.s[1]\n"
+ "fmla v22.4s, v4.4s, v0.s[2]\n"
+ "fmla v28.4s, v4.4s, v0.s[3]\n"
+
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+
+ "fmla v11.4s, v5.4s, v0.s[0]\n"
+ "fmla v17.4s, v5.4s, v0.s[1]\n"
+ "fmla v23.4s, v5.4s, v0.s[2]\n"
+ "fmla v29.4s, v5.4s, v0.s[3]\n"
+ "fmla v12.4s, v6.4s, v0.s[0]\n"
+ "fmla v18.4s, v6.4s, v0.s[1]\n"
+ "fmla v24.4s, v6.4s, v0.s[2]\n"
+ "fmla v30.4s, v6.4s, v0.s[3]\n"
+ "fmla v13.4s, v7.4s, v0.s[0]\n"
+ "fmla v19.4s, v7.4s, v0.s[1]\n"
+ "fmla v25.4s, v7.4s, v0.s[2]\n"
+ "fmla v31.4s, v7.4s, v0.s[3]\n"
+
+ "6:\n"
+ "mov x0, v1.d[0]\n"
+ "cmp x0, #0\n"
+ "bne 7f\n"
+ "mov x0, v1.d[1]\n"
+ "cmp x0, #0\n"
+ "bne 7f\n"
+ "add %[rhs_ptr], %[rhs_ptr], #96\n"
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "b 8f\n"
+ "7:\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v8.4s, v2.4s, v1.s[0]\n"
+ "fmla v14.4s, v2.4s, v1.s[1]\n"
+ "fmla v20.4s, v2.4s, v1.s[2]\n"
+ "fmla v26.4s, v2.4s, v1.s[3]\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v9.4s, v3.4s, v1.s[0]\n"
+ "fmla v15.4s, v3.4s, v1.s[1]\n"
+ "fmla v21.4s, v3.4s, v1.s[2]\n"
+ "fmla v27.4s, v3.4s, v1.s[3]\n"
+ "fmla v10.4s, v4.4s, v1.s[0]\n"
+ "fmla v16.4s, v4.4s, v1.s[1]\n"
+ "fmla v22.4s, v4.4s, v1.s[2]\n"
+ "fmla v28.4s, v4.4s, v1.s[3]\n"
+
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+
+ "fmla v11.4s, v5.4s, v1.s[0]\n"
+ "fmla v17.4s, v5.4s, v1.s[1]\n"
+ "fmla v23.4s, v5.4s, v1.s[2]\n"
+ "fmla v29.4s, v5.4s, v1.s[3]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "fmla v18.4s, v6.4s, v1.s[1]\n"
+ "fmla v24.4s, v6.4s, v1.s[2]\n"
+ "fmla v30.4s, v6.4s, v1.s[3]\n"
+ "fmla v13.4s, v7.4s, v1.s[0]\n"
+ "fmla v19.4s, v7.4s, v1.s[1]\n"
+ "fmla v25.4s, v7.4s, v1.s[2]\n"
+ "fmla v31.4s, v7.4s, v1.s[3]\n"
+
+ "8:\n"
+ "subs %w[nk], %w[nk], #1\n"
+ "bne 1b\n"
+
+ "4:\n"
+ "mov x0, %[res_ptr]\n"
+ "cbnz %[oddk], 2f\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "fmla v14.4s, v2.4s, v0.s[1]\n"
+ "fmla v15.4s, v3.4s, v0.s[1]\n"
+ "fmla v16.4s, v4.4s, v0.s[1]\n"
+ "fmla v20.4s, v2.4s, v0.s[2]\n"
+ "fmla v21.4s, v3.4s, v0.s[2]\n"
+ "fmla v22.4s, v4.4s, v0.s[2]\n"
+ "fmla v26.4s, v2.4s, v0.s[3]\n"
+ "fmla v27.4s, v3.4s, v0.s[3]\n"
+ "fmla v28.4s, v4.4s, v0.s[3]\n"
+
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v11.4s, v5.4s, v0.s[0]\n"
+ "fmla v12.4s, v6.4s, v0.s[0]\n"
+ "fmla v13.4s, v7.4s, v0.s[0]\n"
+ "fmla v17.4s, v5.4s, v0.s[1]\n"
+ "fmla v18.4s, v6.4s, v0.s[1]\n"
+ "fmla v19.4s, v7.4s, v0.s[1]\n"
+ "fmla v23.4s, v5.4s, v0.s[2]\n"
+ "fmla v24.4s, v6.4s, v0.s[2]\n"
+ "fmla v25.4s, v7.4s, v0.s[2]\n"
+ "fmla v29.4s, v5.4s, v0.s[3]\n"
+ "fmla v30.4s, v6.4s, v0.s[3]\n"
+ "fmla v31.4s, v7.4s, v0.s[3]\n"
+
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v1.s[0]\n"
+ "fmla v9.4s, v3.4s, v1.s[0]\n"
+ "fmla v10.4s, v4.4s, v1.s[0]\n"
+ "mov x1, x0\n"
+ "st1 {v8.4s, v9.4s, v10.4s}, [x1], #48\n"
+ "fmla v11.4s, v5.4s, v1.s[0]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "fmla v13.4s, v7.4s, v1.s[0]\n"
+ "st1 {v11.4s, v12.4s, v13.4s}, [x1]\n"
+ "fmla v14.4s, v2.4s, v1.s[1]\n"
+ "fmla v15.4s, v3.4s, v1.s[1]\n"
+ "fmla v16.4s, v4.4s, v1.s[1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v14.4s, v15.4s, v16.4s}, [x1], #48\n"
+ "fmla v17.4s, v5.4s, v1.s[1]\n"
+ "fmla v18.4s, v6.4s, v1.s[1]\n"
+ "fmla v19.4s, v7.4s, v1.s[1]\n"
+ "st1 {v17.4s, v18.4s, v19.4s}, [x1]\n"
+ "fmla v20.4s, v2.4s, v1.s[2]\n"
+ "fmla v21.4s, v3.4s, v1.s[2]\n"
+ "fmla v22.4s, v4.4s, v1.s[2]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v20.4s, v21.4s, v22.4s}, [x1], #48\n"
+ "fmla v23.4s, v5.4s, v1.s[2]\n"
+ "fmla v24.4s, v6.4s, v1.s[2]\n"
+ "fmla v25.4s, v7.4s, v1.s[2]\n"
+ "st1 {v23.4s, v24.4s, v25.4s}, [x1]\n"
+ "fmla v26.4s, v2.4s, v1.s[3]\n"
+ "fmla v27.4s, v3.4s, v1.s[3]\n"
+ "fmla v28.4s, v4.4s, v1.s[3]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v26.4s, v27.4s, v28.4s}, [x1], #48\n"
+ "fmla v29.4s, v5.4s, v1.s[3]\n"
+ "fmla v30.4s, v6.4s, v1.s[3]\n"
+ "fmla v31.4s, v7.4s, v1.s[3]\n"
+ "b 3f\n"
+
+ "2:\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "mov x1, x0\n"
+ "st1 {v8.4s, v9.4s, v10.4s}, [x1], #48\n"
+ "fmla v11.4s, v5.4s, v0.s[0]\n"
+ "fmla v12.4s, v6.4s, v0.s[0]\n"
+ "fmla v13.4s, v7.4s, v0.s[0]\n"
+ "st1 {v11.4s, v12.4s, v13.4s}, [x1]\n"
+ "fmla v14.4s, v2.4s, v0.s[1]\n"
+ "fmla v15.4s, v3.4s, v0.s[1]\n"
+ "fmla v16.4s, v4.4s, v0.s[1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v14.4s, v15.4s, v16.4s}, [x1], #48\n"
+ "fmla v17.4s, v5.4s, v0.s[1]\n"
+ "fmla v18.4s, v6.4s, v0.s[1]\n"
+ "fmla v19.4s, v7.4s, v0.s[1]\n"
+ "st1 {v17.4s, v18.4s, v19.4s}, [x1]\n"
+ "fmla v20.4s, v2.4s, v0.s[2]\n"
+ "fmla v21.4s, v3.4s, v0.s[2]\n"
+ "fmla v22.4s, v4.4s, v0.s[2]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v20.4s, v21.4s, v22.4s}, [x1], #48\n"
+ "fmla v23.4s, v5.4s, v0.s[2]\n"
+ "fmla v24.4s, v6.4s, v0.s[2]\n"
+ "fmla v25.4s, v7.4s, v0.s[2]\n"
+ "st1 {v23.4s, v24.4s, v25.4s}, [x1]\n"
+ "fmla v26.4s, v2.4s, v0.s[3]\n"
+ "fmla v27.4s, v3.4s, v0.s[3]\n"
+ "fmla v28.4s, v4.4s, v0.s[3]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v26.4s, v27.4s, v28.4s}, [x1], #48\n"
+ "fmla v29.4s, v5.4s, v0.s[3]\n"
+ "fmla v30.4s, v6.4s, v0.s[3]\n"
+ "fmla v31.4s, v7.4s, v0.s[3]\n"
+ "3:\n"
+ "st1 {v29.4s, v30.4s, v31.4s}, [x1]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk)
+ : [oddk] "r"(oddk), [k0] "r"(k0), [nstride] "r"(nstride)
+ : "x0", "x1", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10",
+ "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21",
+ "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+}
+#else // BATCH_DILATION_FIX
+static void sgemm_rowmajor_micro_kernel_4x24(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int oddk = (k & 1);
+ int nk = ((k + 1) / 2) - 1;
+
+ const int nstride = stride << 2;
+
+ __asm __volatile("ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v2.4s}, [%[rhs_ptr]], #16\n"
+ "ld1 {v3.4s}, [%[rhs_ptr]], #16\n"
+ "ld1 {v4.4s}, [%[rhs_ptr]], #16\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov x0, %[res_ptr]\n"
+ "mov x1, x0\n"
+ "ld1 {v8.4s, v9.4s, v10.4s}, [x1], #48\n"
+ "ld1 {v11.4s, v12.4s, v13.4s}, [x1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "ld1 {v14.4s, v15.4s, v16.4s}, [x1], #48\n"
+ "ld1 {v17.4s, v18.4s, v19.4s}, [x1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "ld1 {v20.4s, v21.4s, v22.4s}, [x1], #48\n"
+ "ld1 {v23.4s, v24.4s, v25.4s}, [x1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "ld1 {v26.4s, v27.4s, v28.4s}, [x1], #48\n"
+ "ld1 {v29.4s, v30.4s, v31.4s}, [x1]\n"
+ "cbz %w[nk], 4f\n"
+ "b 1f\n"
+
+ "0:\n"
+ "movi v8.4s, #0x0\n"
+ "movi v9.4s, #0x0\n"
+ "movi v10.4s, #0x0\n"
+ "movi v11.4s, #0x0\n"
+ "movi v12.4s, #0x0\n"
+ "movi v13.4s, #0x0\n"
+ "movi v14.4s, #0x0\n"
+ "movi v15.4s, #0x0\n"
+ "movi v16.4s, #0x0\n"
+ "movi v17.4s, #0x0\n"
+ "movi v18.4s, #0x0\n"
+ "movi v19.4s, #0x0\n"
+ "movi v20.4s, #0x0\n"
+ "movi v21.4s, #0x0\n"
+ "movi v22.4s, #0x0\n"
+ "movi v23.4s, #0x0\n"
+ "movi v24.4s, #0x0\n"
+ "movi v25.4s, #0x0\n"
+ "movi v26.4s, #0x0\n"
+ "movi v27.4s, #0x0\n"
+ "movi v28.4s, #0x0\n"
+ "movi v29.4s, #0x0\n"
+ "movi v30.4s, #0x0\n"
+ "movi v31.4s, #0x0\n"
+ "cbz %w[nk], 4f\n"
+
+ "1:\n"
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v14.4s, v2.4s, v0.s[1]\n"
+ "fmla v20.4s, v2.4s, v0.s[2]\n"
+ "fmla v26.4s, v2.4s, v0.s[3]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v15.4s, v3.4s, v0.s[1]\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v21.4s, v3.4s, v0.s[2]\n"
+ "fmla v27.4s, v3.4s, v0.s[3]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "fmla v16.4s, v4.4s, v0.s[1]\n"
+ "fmla v22.4s, v4.4s, v0.s[2]\n"
+ "fmla v28.4s, v4.4s, v0.s[3]\n"
+
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+
+ "fmla v11.4s, v5.4s, v0.s[0]\n"
+ "fmla v17.4s, v5.4s, v0.s[1]\n"
+ "fmla v23.4s, v5.4s, v0.s[2]\n"
+ "fmla v29.4s, v5.4s, v0.s[3]\n"
+ "fmla v12.4s, v6.4s, v0.s[0]\n"
+ "fmla v18.4s, v6.4s, v0.s[1]\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v24.4s, v6.4s, v0.s[2]\n"
+ "fmla v30.4s, v6.4s, v0.s[3]\n"
+ "fmla v13.4s, v7.4s, v0.s[0]\n"
+ "fmla v19.4s, v7.4s, v0.s[1]\n"
+ "fmla v25.4s, v7.4s, v0.s[2]\n"
+ "fmla v31.4s, v7.4s, v0.s[3]\n"
+
+ "fmla v8.4s, v2.4s, v1.s[0]\n"
+ "fmla v14.4s, v2.4s, v1.s[1]\n"
+ "fmla v20.4s, v2.4s, v1.s[2]\n"
+ "fmla v26.4s, v2.4s, v1.s[3]\n"
+ "fmla v9.4s, v3.4s, v1.s[0]\n"
+ "fmla v15.4s, v3.4s, v1.s[1]\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v21.4s, v3.4s, v1.s[2]\n"
+ "fmla v27.4s, v3.4s, v1.s[3]\n"
+ "fmla v10.4s, v4.4s, v1.s[0]\n"
+ "fmla v16.4s, v4.4s, v1.s[1]\n"
+ "fmla v22.4s, v4.4s, v1.s[2]\n"
+ "fmla v28.4s, v4.4s, v1.s[3]\n"
+
+ "ld1 {v0.4s}, [%[lhs_ptr]], #16\n"
+
+ "fmla v11.4s, v5.4s, v1.s[0]\n"
+ "fmla v17.4s, v5.4s, v1.s[1]\n"
+ "fmla v23.4s, v5.4s, v1.s[2]\n"
+ "fmla v29.4s, v5.4s, v1.s[3]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "fmla v18.4s, v6.4s, v1.s[1]\n"
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+ "fmla v24.4s, v6.4s, v1.s[2]\n"
+ "fmla v30.4s, v6.4s, v1.s[3]\n"
+ "fmla v13.4s, v7.4s, v1.s[0]\n"
+ "fmla v19.4s, v7.4s, v1.s[1]\n"
+ "subs %w[nk], %w[nk], #1\n"
+ "fmla v25.4s, v7.4s, v1.s[2]\n"
+ "fmla v31.4s, v7.4s, v1.s[3]\n"
+ "bne 1b\n"
+
+ "4:\n"
+ "mov x0, %[res_ptr]\n"
+ "cbnz %[oddk], 2f\n"
+ "ld1 {v1.4s}, [%[lhs_ptr]], #16\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "fmla v14.4s, v2.4s, v0.s[1]\n"
+ "fmla v15.4s, v3.4s, v0.s[1]\n"
+ "fmla v16.4s, v4.4s, v0.s[1]\n"
+ "fmla v20.4s, v2.4s, v0.s[2]\n"
+ "fmla v21.4s, v3.4s, v0.s[2]\n"
+ "fmla v22.4s, v4.4s, v0.s[2]\n"
+ "fmla v26.4s, v2.4s, v0.s[3]\n"
+ "fmla v27.4s, v3.4s, v0.s[3]\n"
+ "fmla v28.4s, v4.4s, v0.s[3]\n"
+
+ "ld1 {v2.4s, v3.4s, v4.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v11.4s, v5.4s, v0.s[0]\n"
+ "fmla v12.4s, v6.4s, v0.s[0]\n"
+ "fmla v13.4s, v7.4s, v0.s[0]\n"
+ "fmla v17.4s, v5.4s, v0.s[1]\n"
+ "fmla v18.4s, v6.4s, v0.s[1]\n"
+ "fmla v19.4s, v7.4s, v0.s[1]\n"
+ "fmla v23.4s, v5.4s, v0.s[2]\n"
+ "fmla v24.4s, v6.4s, v0.s[2]\n"
+ "fmla v25.4s, v7.4s, v0.s[2]\n"
+ "fmla v29.4s, v5.4s, v0.s[3]\n"
+ "fmla v30.4s, v6.4s, v0.s[3]\n"
+ "fmla v31.4s, v7.4s, v0.s[3]\n"
+
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v1.s[0]\n"
+ "fmla v9.4s, v3.4s, v1.s[0]\n"
+ "fmla v10.4s, v4.4s, v1.s[0]\n"
+ "mov x1, x0\n"
+ "st1 {v8.4s, v9.4s, v10.4s}, [x1], #48\n"
+ "fmla v11.4s, v5.4s, v1.s[0]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "fmla v13.4s, v7.4s, v1.s[0]\n"
+ "st1 {v11.4s, v12.4s, v13.4s}, [x1]\n"
+ "fmla v14.4s, v2.4s, v1.s[1]\n"
+ "fmla v15.4s, v3.4s, v1.s[1]\n"
+ "fmla v16.4s, v4.4s, v1.s[1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v14.4s, v15.4s, v16.4s}, [x1], #48\n"
+ "fmla v17.4s, v5.4s, v1.s[1]\n"
+ "fmla v18.4s, v6.4s, v1.s[1]\n"
+ "fmla v19.4s, v7.4s, v1.s[1]\n"
+ "st1 {v17.4s, v18.4s, v19.4s}, [x1]\n"
+ "fmla v20.4s, v2.4s, v1.s[2]\n"
+ "fmla v21.4s, v3.4s, v1.s[2]\n"
+ "fmla v22.4s, v4.4s, v1.s[2]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v20.4s, v21.4s, v22.4s}, [x1], #48\n"
+ "fmla v23.4s, v5.4s, v1.s[2]\n"
+ "fmla v24.4s, v6.4s, v1.s[2]\n"
+ "fmla v25.4s, v7.4s, v1.s[2]\n"
+ "st1 {v23.4s, v24.4s, v25.4s}, [x1]\n"
+ "fmla v26.4s, v2.4s, v1.s[3]\n"
+ "fmla v27.4s, v3.4s, v1.s[3]\n"
+ "fmla v28.4s, v4.4s, v1.s[3]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v26.4s, v27.4s, v28.4s}, [x1], #48\n"
+ "fmla v29.4s, v5.4s, v1.s[3]\n"
+ "fmla v30.4s, v6.4s, v1.s[3]\n"
+ "fmla v31.4s, v7.4s, v1.s[3]\n"
+ "b 3f\n"
+
+ "2:\n"
+ "ld1 {v5.4s, v6.4s, v7.4s}, [%[rhs_ptr]], #48\n"
+
+ "fmla v8.4s, v2.4s, v0.s[0]\n"
+ "fmla v9.4s, v3.4s, v0.s[0]\n"
+ "fmla v10.4s, v4.4s, v0.s[0]\n"
+ "mov x1, x0\n"
+ "st1 {v8.4s, v9.4s, v10.4s}, [x1], #48\n"
+ "fmla v11.4s, v5.4s, v0.s[0]\n"
+ "fmla v12.4s, v6.4s, v0.s[0]\n"
+ "fmla v13.4s, v7.4s, v0.s[0]\n"
+ "st1 {v11.4s, v12.4s, v13.4s}, [x1]\n"
+ "fmla v14.4s, v2.4s, v0.s[1]\n"
+ "fmla v15.4s, v3.4s, v0.s[1]\n"
+ "fmla v16.4s, v4.4s, v0.s[1]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v14.4s, v15.4s, v16.4s}, [x1], #48\n"
+ "fmla v17.4s, v5.4s, v0.s[1]\n"
+ "fmla v18.4s, v6.4s, v0.s[1]\n"
+ "fmla v19.4s, v7.4s, v0.s[1]\n"
+ "st1 {v17.4s, v18.4s, v19.4s}, [x1]\n"
+ "fmla v20.4s, v2.4s, v0.s[2]\n"
+ "fmla v21.4s, v3.4s, v0.s[2]\n"
+ "fmla v22.4s, v4.4s, v0.s[2]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v20.4s, v21.4s, v22.4s}, [x1], #48\n"
+ "fmla v23.4s, v5.4s, v0.s[2]\n"
+ "fmla v24.4s, v6.4s, v0.s[2]\n"
+ "fmla v25.4s, v7.4s, v0.s[2]\n"
+ "st1 {v23.4s, v24.4s, v25.4s}, [x1]\n"
+ "fmla v26.4s, v2.4s, v0.s[3]\n"
+ "fmla v27.4s, v3.4s, v0.s[3]\n"
+ "fmla v28.4s, v4.4s, v0.s[3]\n"
+ "add x0, x0, %[nstride]\n"
+ "mov x1, x0\n"
+ "st1 {v26.4s, v27.4s, v28.4s}, [x1], #48\n"
+ "fmla v29.4s, v5.4s, v0.s[3]\n"
+ "fmla v30.4s, v6.4s, v0.s[3]\n"
+ "fmla v31.4s, v7.4s, v0.s[3]\n"
+ "3:\n"
+ "st1 {v29.4s, v30.4s, v31.4s}, [x1]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk)
+ : [oddk] "r"(oddk), [k0] "r"(k0), [nstride] "r"(nstride)
+ : "x0", "x1", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10",
+ "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21",
+ "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+}
+#endif // BATCH_DILATION_FIX
+
+static void sgemm_rowmajor_micro_kernel_24x4(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int oddk = (k & 1);
+ int nk = ((k + 1) / 2) - 1;
+
+ const int nstride = stride << 2;
+
+ __asm __volatile("ld1 {v0.4s, v1.4s, v2.4s}, [%[lhs_ptr]], #48\n"
+ "ld1 {v6.4s}, [%[rhs_ptr]], #16\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov x0, %[res_ptr]\n"
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v15.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v16.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v17.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v18.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v20.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v21.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v22.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v23.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v24.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v26.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v27.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v28.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v29.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v30.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "ld1 {v31.4s}, [x0]\n"
+ "cbz %w[nk], 4f\n"
+ "b 1f\n"
+
+ "0:\n"
+ "movi v8.4s, #0x0\n"
+ "movi v9.4s, #0x0\n"
+ "movi v10.4s, #0x0\n"
+ "movi v11.4s, #0x0\n"
+ "movi v12.4s, #0x0\n"
+ "movi v13.4s, #0x0\n"
+ "movi v14.4s, #0x0\n"
+ "movi v15.4s, #0x0\n"
+ "movi v16.4s, #0x0\n"
+ "movi v17.4s, #0x0\n"
+ "movi v18.4s, #0x0\n"
+ "movi v19.4s, #0x0\n"
+ "movi v20.4s, #0x0\n"
+ "movi v21.4s, #0x0\n"
+ "movi v22.4s, #0x0\n"
+ "movi v23.4s, #0x0\n"
+ "movi v24.4s, #0x0\n"
+ "movi v25.4s, #0x0\n"
+ "movi v26.4s, #0x0\n"
+ "movi v27.4s, #0x0\n"
+ "movi v28.4s, #0x0\n"
+ "movi v29.4s, #0x0\n"
+ "movi v30.4s, #0x0\n"
+ "movi v31.4s, #0x0\n"
+ "cbz %w[nk], 4f\n"
+
+ "1:\n"
+ "ld1 {v3.4s, v4.4s, v5.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v8.4s, v6.4s, v0.s[0]\n"
+ "fmla v9.4s, v6.4s, v0.s[1]\n"
+ "fmla v10.4s, v6.4s, v0.s[2]\n"
+ "fmla v11.4s, v6.4s, v0.s[3]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "fmla v13.4s, v6.4s, v1.s[1]\n"
+ "ld1 {v7.4s}, [%[rhs_ptr]], #16\n"
+ "fmla v14.4s, v6.4s, v1.s[2]\n"
+ "fmla v15.4s, v6.4s, v1.s[3]\n"
+ "fmla v16.4s, v6.4s, v2.s[0]\n"
+ "fmla v17.4s, v6.4s, v2.s[1]\n"
+ "fmla v18.4s, v6.4s, v2.s[2]\n"
+ "fmla v19.4s, v6.4s, v2.s[3]\n"
+ "ld1 {v0.4s, v1.4s, v2.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v20.4s, v6.4s, v3.s[0]\n"
+ "fmla v21.4s, v6.4s, v3.s[1]\n"
+ "fmla v22.4s, v6.4s, v3.s[2]\n"
+ "fmla v23.4s, v6.4s, v3.s[3]\n"
+ "fmla v24.4s, v6.4s, v4.s[0]\n"
+ "fmla v25.4s, v6.4s, v4.s[1]\n"
+ "fmla v26.4s, v6.4s, v4.s[2]\n"
+ "fmla v27.4s, v6.4s, v4.s[3]\n"
+ "fmla v28.4s, v6.4s, v5.s[0]\n"
+ "fmla v29.4s, v6.4s, v5.s[1]\n"
+ "fmla v30.4s, v6.4s, v5.s[2]\n"
+ "fmla v31.4s, v6.4s, v5.s[3]\n"
+
+ "ld1 {v3.4s, v4.4s, v5.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v8.4s, v7.4s, v0.s[0]\n"
+ "fmla v9.4s, v7.4s, v0.s[1]\n"
+ "fmla v10.4s, v7.4s, v0.s[2]\n"
+ "fmla v11.4s, v7.4s, v0.s[3]\n"
+ "fmla v12.4s, v7.4s, v1.s[0]\n"
+ "fmla v13.4s, v7.4s, v1.s[1]\n"
+ "ld1 {v6.4s}, [%[rhs_ptr]], #16\n"
+ "fmla v14.4s, v7.4s, v1.s[2]\n"
+ "fmla v15.4s, v7.4s, v1.s[3]\n"
+ "fmla v16.4s, v7.4s, v2.s[0]\n"
+ "fmla v17.4s, v7.4s, v2.s[1]\n"
+ "fmla v18.4s, v7.4s, v2.s[2]\n"
+ "fmla v19.4s, v7.4s, v2.s[3]\n"
+ "ld1 {v0.4s, v1.4s, v2.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v20.4s, v7.4s, v3.s[0]\n"
+ "fmla v21.4s, v7.4s, v3.s[1]\n"
+ "fmla v22.4s, v7.4s, v3.s[2]\n"
+ "fmla v23.4s, v7.4s, v3.s[3]\n"
+ "fmla v24.4s, v7.4s, v4.s[0]\n"
+ "fmla v25.4s, v7.4s, v4.s[1]\n"
+ "fmla v26.4s, v7.4s, v4.s[2]\n"
+ "fmla v27.4s, v7.4s, v4.s[3]\n"
+ "fmla v28.4s, v7.4s, v5.s[0]\n"
+ "fmla v29.4s, v7.4s, v5.s[1]\n"
+ "subs %w[nk], %w[nk], #1\n"
+ "fmla v30.4s, v7.4s, v5.s[2]\n"
+ "fmla v31.4s, v7.4s, v5.s[3]\n"
+ "bne 1b\n"
+
+ "4:\n"
+ "mov x0, %[res_ptr]\n"
+ "cbnz %[oddk], 2f\n"
+
+ "ld1 {v3.4s, v4.4s, v5.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v8.4s, v6.4s, v0.s[0]\n"
+ "fmla v9.4s, v6.4s, v0.s[1]\n"
+ "fmla v10.4s, v6.4s, v0.s[2]\n"
+ "fmla v11.4s, v6.4s, v0.s[3]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "fmla v13.4s, v6.4s, v1.s[1]\n"
+ "ld1 {v7.4s}, [%[rhs_ptr]], #16\n"
+ "fmla v14.4s, v6.4s, v1.s[2]\n"
+ "fmla v15.4s, v6.4s, v1.s[3]\n"
+ "fmla v16.4s, v6.4s, v2.s[0]\n"
+ "fmla v17.4s, v6.4s, v2.s[1]\n"
+ "fmla v18.4s, v6.4s, v2.s[2]\n"
+ "fmla v19.4s, v6.4s, v2.s[3]\n"
+ "ld1 {v0.4s, v1.4s, v2.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v20.4s, v6.4s, v3.s[0]\n"
+ "fmla v21.4s, v6.4s, v3.s[1]\n"
+ "fmla v22.4s, v6.4s, v3.s[2]\n"
+ "fmla v23.4s, v6.4s, v3.s[3]\n"
+ "fmla v24.4s, v6.4s, v4.s[0]\n"
+ "fmla v25.4s, v6.4s, v4.s[1]\n"
+ "fmla v26.4s, v6.4s, v4.s[2]\n"
+ "fmla v27.4s, v6.4s, v4.s[3]\n"
+ "fmla v28.4s, v6.4s, v5.s[0]\n"
+ "fmla v29.4s, v6.4s, v5.s[1]\n"
+ "fmla v30.4s, v6.4s, v5.s[2]\n"
+ "fmla v31.4s, v6.4s, v5.s[3]\n"
+
+ "ld1 {v3.4s, v4.4s, v5.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v8.4s, v7.4s, v0.s[0]\n"
+ "fmla v9.4s, v7.4s, v0.s[1]\n"
+ "st1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v10.4s, v7.4s, v0.s[2]\n"
+ "st1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v11.4s, v7.4s, v0.s[3]\n"
+ "st1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v12.4s, v7.4s, v1.s[0]\n"
+ "st1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v13.4s, v7.4s, v1.s[1]\n"
+ "st1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v14.4s, v7.4s, v1.s[2]\n"
+ "st1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v15.4s, v7.4s, v1.s[3]\n"
+ "st1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v16.4s, v7.4s, v2.s[0]\n"
+ "st1 {v15.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v17.4s, v7.4s, v2.s[1]\n"
+ "st1 {v16.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v18.4s, v7.4s, v2.s[2]\n"
+ "st1 {v17.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v19.4s, v7.4s, v2.s[3]\n"
+ "st1 {v18.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v20.4s, v7.4s, v3.s[0]\n"
+ "st1 {v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v21.4s, v7.4s, v3.s[1]\n"
+ "st1 {v20.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v22.4s, v7.4s, v3.s[2]\n"
+ "st1 {v21.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v23.4s, v7.4s, v3.s[3]\n"
+ "st1 {v22.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v24.4s, v7.4s, v4.s[0]\n"
+ "st1 {v23.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v25.4s, v7.4s, v4.s[1]\n"
+ "st1 {v24.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v26.4s, v7.4s, v4.s[2]\n"
+ "st1 {v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v27.4s, v7.4s, v4.s[3]\n"
+ "st1 {v26.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v28.4s, v7.4s, v5.s[0]\n"
+ "st1 {v27.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v29.4s, v7.4s, v5.s[1]\n"
+ "st1 {v28.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v30.4s, v7.4s, v5.s[2]\n"
+ "st1 {v29.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v31.4s, v7.4s, v5.s[3]\n"
+ "st1 {v30.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "b 3f\n"
+
+ "2:\n"
+ "ld1 {v3.4s, v4.4s, v5.4s}, [%[lhs_ptr]], #48\n"
+ "fmla v8.4s, v6.4s, v0.s[0]\n"
+ "fmla v9.4s, v6.4s, v0.s[1]\n"
+ "st1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v10.4s, v6.4s, v0.s[2]\n"
+ "st1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v11.4s, v6.4s, v0.s[3]\n"
+ "st1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v12.4s, v6.4s, v1.s[0]\n"
+ "st1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v13.4s, v6.4s, v1.s[1]\n"
+ "st1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v14.4s, v6.4s, v1.s[2]\n"
+ "st1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v15.4s, v6.4s, v1.s[3]\n"
+ "st1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v16.4s, v6.4s, v2.s[0]\n"
+ "st1 {v15.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v17.4s, v6.4s, v2.s[1]\n"
+ "st1 {v16.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v18.4s, v6.4s, v2.s[2]\n"
+ "st1 {v17.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v19.4s, v6.4s, v2.s[3]\n"
+ "st1 {v18.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v20.4s, v6.4s, v3.s[0]\n"
+ "st1 {v19.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v21.4s, v6.4s, v3.s[1]\n"
+ "st1 {v20.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v22.4s, v6.4s, v3.s[2]\n"
+ "st1 {v21.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v23.4s, v6.4s, v3.s[3]\n"
+ "st1 {v22.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v24.4s, v6.4s, v4.s[0]\n"
+ "st1 {v23.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v25.4s, v6.4s, v4.s[1]\n"
+ "st1 {v24.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v26.4s, v6.4s, v4.s[2]\n"
+ "st1 {v25.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v27.4s, v6.4s, v4.s[3]\n"
+ "st1 {v26.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v28.4s, v6.4s, v5.s[0]\n"
+ "st1 {v27.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v29.4s, v6.4s, v5.s[1]\n"
+ "st1 {v28.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v30.4s, v6.4s, v5.s[2]\n"
+ "st1 {v29.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "fmla v31.4s, v6.4s, v5.s[3]\n"
+ "st1 {v30.4s}, [x0]\n"
+ "add x0, x0, %[nstride]\n"
+ "3:\n"
+ "st1 {v31.4s}, [x0]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk)
+ : [oddk] "r"(oddk), [k0] "r"(k0), [nstride] "r"(nstride)
+ : "x0", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22",
+ "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+}
+
+#else // __aarch64__
+static void sgemm_rowmajor_micro_kernel_6x8(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int nk = k >> 2;
+ int rk = k & 3;
+
+ const int nstride = stride << 2;
+
+ if (rk == 0)
+ {
+ nk--;
+ rk = 4;
+ }
+
+ __asm __volatile("vld1.32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov r0, %[res_ptr]\n"
+
+ "vld1.f32 {d8-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d12-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d16-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d20-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d24-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d28-d31}, [r0]\n"
+ "b 1f\n"
+
+ "0:\n"
+ "vmov.i32 q4, #0\n"
+ "vmov.i32 q5, #0\n"
+ "vmov.i32 q6, #0\n"
+ "pld [%[lhs_ptr], #48]\n"
+ "vmov.i32 q7, #0\n"
+ "pld [%[rhs_ptr], #48]\n"
+ "vmov.i32 q8, #0\n"
+ "pld [%[lhs_ptr], #112]\n"
+ "vmov.i32 q9, #0\n"
+ "pld [%[rhs_ptr], #112]\n"
+ "vmov.i32 q10, #0\n"
+ "vmov.i32 q11, #0\n"
+ "vmov.i32 q12, #0\n"
+ "vmov.i32 q13, #0\n"
+ "pld [%[lhs_ptr], #176]\n"
+ "vmov.i32 q14, #0\n"
+ "pld [%[rhs_ptr], #176]\n"
+ "vmov.i32 q15, #0\n"
+
+ "1:\n"
+ "cmp %[nk], #0\n"
+ "beq 6f\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vld1.32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q6, q2, d0[1]\n"
+ "vmla.f32 q8, q2, d1[0]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q10, q2, d1[1]\n"
+ "vmla.f32 q12, q2, d2[0]\n"
+ "vmla.f32 q14, q2, d2[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d0[0]\n"
+ "vmla.f32 q7, q3, d0[1]\n"
+ "vmla.f32 q9, q3, d1[0]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vld1.32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q13, q3, d2[0]\n"
+ "vmla.f32 q15, q3, d2[1]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q4, q2, d3[0]\n"
+ "subs %[nk], %[nk], #1\n"
+ "vmla.f32 q6, q2, d3[1]\n"
+ "pld [%[lhs_ptr], #208]\n"
+ "vmla.f32 q8, q2, d0[0]\n"
+ "vmla.f32 q10, q2, d0[1]\n"
+ "pld [%[rhs_ptr], #192]\n"
+ "vmla.f32 q12, q2, d1[0]\n"
+ "vmla.f32 q14, q2, d1[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d3[0]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vld1.32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q9, q3, d0[0]\n"
+ "vmla.f32 q11, q3, d0[1]\n"
+ "vmla.f32 q13, q3, d1[0]\n"
+ "vmla.f32 q15, q3, d1[1]\n"
+ "vld1.32 {d0-d1}, [%[lhs_ptr]]!\n"
+
+ "vmla.f32 q4, q2, d2[0]\n"
+ "vmla.f32 q6, q2, d2[1]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q8, q2, d3[0]\n"
+ "vmla.f32 q10, q2, d3[1]\n"
+ "pld [%[lhs_ptr], #240]\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q14, q2, d0[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d2[0]\n"
+ "vmla.f32 q7, q3, d2[1]\n"
+ "pld [%[rhs_ptr], #208]\n"
+ "vmla.f32 q9, q3, d3[0]\n"
+ "vmla.f32 q11, q3, d3[1]\n"
+ "vld1.32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q13, q3, d0[0]\n"
+ "vmla.f32 q15, q3, d0[1]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q4, q2, d1[0]\n"
+ "vmla.f32 q6, q2, d1[1]\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vmla.f32 q10, q2, d2[1]\n"
+ "vmla.f32 q12, q2, d3[0]\n"
+ "vmla.f32 q14, q2, d3[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d1[0]\n"
+ "vmla.f32 q7, q3, d1[1]\n"
+ "vld1.32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q9, q3, d2[0]\n"
+ "vmla.f32 q11, q3, d2[1]\n"
+ "vmla.f32 q13, q3, d3[0]\n"
+ "vmla.f32 q15, q3, d3[1]\n"
+ "bne 1b\n"
+
+ "6:\n"
+ "mov r0, %[res_ptr]\n"
+ "subs %[rk], %[rk], #1\n"
+ "beq 3f\n"
+
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vld1.32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q6, q2, d0[1]\n"
+ "vmla.f32 q8, q2, d1[0]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q10, q2, d1[1]\n"
+ "vmla.f32 q12, q2, d2[0]\n"
+ "subs %[rk], %[rk], #1\n"
+ "vmla.f32 q14, q2, d2[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d0[0]\n"
+ "vmla.f32 q7, q3, d0[1]\n"
+ "vmla.f32 q9, q3, d1[0]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vld1.32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q13, q3, d2[0]\n"
+ "vmla.f32 q15, q3, d2[1]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "beq 4f\n"
+
+ "vmla.f32 q4, q2, d3[0]\n"
+ "vmla.f32 q6, q2, d3[1]\n"
+ "subs %[rk], %[rk], #1\n"
+ "vmla.f32 q8, q2, d0[0]\n"
+ "vmla.f32 q10, q2, d0[1]\n"
+ "vmla.f32 q12, q2, d1[0]\n"
+ "vmla.f32 q14, q2, d1[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d3[0]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vld1.32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q9, q3, d0[0]\n"
+ "vmla.f32 q11, q3, d0[1]\n"
+ "vmla.f32 q13, q3, d1[0]\n"
+ "vmla.f32 q15, q3, d1[1]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "beq 5f\n"
+
+ "vld1.32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d2[0]\n"
+ "vmla.f32 q6, q2, d2[1]\n"
+ "vmla.f32 q8, q2, d3[0]\n"
+ "vmla.f32 q10, q2, d3[1]\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q14, q2, d0[1]\n"
+ "vld1.32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q5, q3, d2[0]\n"
+ "vmla.f32 q7, q3, d2[1]\n"
+ "vmla.f32 q9, q3, d3[0]\n"
+ "vmla.f32 q11, q3, d3[1]\n"
+ "vld1.32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q13, q3, d0[0]\n"
+ "vmla.f32 q15, q3, d0[1]\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q4, q2, d1[0]\n"
+ "vmla.f32 q5, q3, d1[0]\n"
+ "vst1.32 {d8-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q6, q2, d1[1]\n"
+ "vmla.f32 q7, q3, d1[1]\n"
+ "vst1.32 {d12-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vmla.f32 q9, q3, d2[0]\n"
+ "vst1.32 {d16-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q10, q2, d2[1]\n"
+ "vmla.f32 q11, q3, d2[1]\n"
+ "vst1.32 {d20-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q12, q2, d3[0]\n"
+ "vmla.f32 q13, q3, d3[0]\n"
+ "vst1.32 {d24-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q14, q2, d3[1]\n"
+ "vmla.f32 q15, q3, d3[1]\n"
+ "b 2f\n"
+
+ "3:\n"
+ "vld1.32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vmla.f32 q5, q3, d0[0]\n"
+ "vst1.32 {d8-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q6, q2, d0[1]\n"
+ "vmla.f32 q7, q3, d0[1]\n"
+ "vst1.32 {d12-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q8, q2, d1[0]\n"
+ "vld1.32 {d2}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q9, q3, d1[0]\n"
+ "vst1.32 {d16-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q10, q2, d1[1]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vst1.32 {d20-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q12, q2, d2[0]\n"
+ "vmla.f32 q13, q3, d2[0]\n"
+ "vst1.32 {d24-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q14, q2, d2[1]\n"
+ "vmla.f32 q15, q3, d2[1]\n"
+ "b 2f\n"
+
+ "4:\n"
+ "vmla.f32 q4, q2, d3[0]\n"
+ "vmla.f32 q5, q3, d3[0]\n"
+ "vst1.32 {d8-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q6, q2, d3[1]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vst1.32 {d12-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q8, q2, d0[0]\n"
+ "vmla.f32 q9, q3, d0[0]\n"
+ "vst1.32 {d16-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q10, q2, d0[1]\n"
+ "vmla.f32 q11, q3, d0[1]\n"
+ "vst1.32 {d20-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q12, q2, d1[0]\n"
+ "vmla.f32 q13, q3, d1[0]\n"
+ "vst1.32 {d24-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q14, q2, d1[1]\n"
+ "vmla.f32 q15, q3, d1[1]\n"
+ "b 2f\n"
+
+ "5:\n"
+ "vld1.32 {d0}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d2[0]\n"
+ "vmla.f32 q5, q3, d2[0]\n"
+ "vst1.32 {d8-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q6, q2, d2[1]\n"
+ "vmla.f32 q7, q3, d2[1]\n"
+ "vst1.32 {d12-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q8, q2, d3[0]\n"
+ "vmla.f32 q9, q3, d3[0]\n"
+ "vst1.32 {d16-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q10, q2, d3[1]\n"
+ "vmla.f32 q11, q3, d3[1]\n"
+ "vst1.32 {d20-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q13, q3, d0[0]\n"
+ "vst1.32 {d24-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q14, q2, d0[1]\n"
+ "vmla.f32 q15, q3, d0[1]\n"
+ "2:\n"
+ "vst1.32 {d28-d31}, [r0]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk), [rk] "+r"(rk)
+ : [k0] "r"(k0), [nstride] "r"(nstride)
+ : "r0", "r1", "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", "q8", "q9", "q10",
+ "q11", "q12", "q13", "q14", "q15", "cc");
+}
+
+static void sgemm_rowmajor_micro_kernel_4x12(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int rk = (k & 1);
+ int nk = (k + 1) / 2;
+
+ const int nstride = stride << 2;
+
+ asm volatile("vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov r1, %[res_ptr]\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "mov r0, r1\n"
+ "vld1.f32 {d8-d9}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vld1.f32 {d16-d17}, [r0]!\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "mov r0, r1\n"
+ "vld1.f32 {d10-d11}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vld1.f32 {d18-d19}, [r0]!\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+ "mov r0, r1\n"
+ "vld1.f32 {d12-d13}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vld1.f32 {d20-d21}, [r0]!\n"
+ "vld1.f32 {d28-d29}, [r0]\n"
+ "mov r0, r1\n"
+ "vld1.f32 {d14-d15}, [r0]!\n"
+ "vld1.f32 {d22-d23}, [r0]!\n"
+ "vld1.f32 {d30-d31}, [r0]\n"
+ "beq 2f\n"
+
+ "b 1f\n"
+
+ "0:\n"
+ "veor q4, q4\n"
+ "subs %[nk],%[nk], #1\n"
+ "vmov.f32 q8, q4\n"
+ "vmov.f32 q12, q4\n"
+ "vmov.f32 q5, q4\n"
+ "vmov.f32 q9, q4\n"
+ "vmov.f32 q13, q4\n"
+ "vmov.f32 q6, q4\n"
+ "vmov.f32 q10, q4\n"
+ "vmov.f32 q14, q4\n"
+ "vmov.f32 q7, q4\n"
+ "vmov.f32 q11, q4\n"
+ "vmov.f32 q15, q4\n"
+
+ "beq 2f\n"
+
+ "1:\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vmla.f32 q5, q2, d0[1]\n"
+ "vmla.f32 q6, q2, d1[0]\n"
+ "vmla.f32 q7, q2, d1[1]\n"
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q8, q3, d0[0]\n"
+ "vmla.f32 q9, q3, d0[1]\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q10, q3, d1[0]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q13, q2, d0[1]\n"
+ "pld [%[lhs_ptr], #208]\n"
+ "vmla.f32 q14, q2, d1[0]\n"
+ "pld [%[rhs_ptr], #192]\n"
+ "vmla.f32 q15, q2, d1[1]\n"
+
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q4, q3, d2[0]\n"
+ "vmla.f32 q5, q3, d2[1]\n"
+ "vmla.f32 q6, q3, d3[0]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vmla.f32 q9, q2, d2[1]\n"
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q10, q2, d3[0]\n"
+ "vmla.f32 q11, q2, d3[1]\n"
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q12, q3, d2[0]\n"
+ "vmla.f32 q13, q3, d2[1]\n"
+ "subs %[nk],%[nk], #1\n"
+ "pld [%[lhs_ptr], #240]\n"
+ "vmla.f32 q14, q3, d3[0]\n"
+ "pld [%[rhs_ptr], #208]\n"
+ "vmla.f32 q15, q3, d3[1]\n"
+ "bne 1b\n"
+
+ "2:\n"
+ "cmp %[rk], #1\n"
+ "beq 3f\n"
+
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vmla.f32 q5, q2, d0[1]\n"
+ "vmla.f32 q6, q2, d1[0]\n"
+ "vmla.f32 q7, q2, d1[1]\n"
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q8, q3, d0[0]\n"
+ "vmla.f32 q9, q3, d0[1]\n"
+ "vmla.f32 q10, q3, d1[0]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q13, q2, d0[1]\n"
+ "vmla.f32 q14, q2, d1[0]\n"
+ "vmla.f32 q15, q2, d1[1]\n"
+
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+ "vld1.f32 {d0-d1}, [%[rhs_ptr]]!\n"
+ "mov r1, %[res_ptr]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q4, q3, d2[0]\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vmla.f32 q12, q0, d2[0]\n"
+ "vst1.f32 {d8-d9}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vmla.f32 q5, q3, d2[1]\n"
+ "vst1.f32 {d16-d17}, [r0]!\n"
+ "vmla.f32 q9, q2, d2[1]\n"
+ "vst1.f32 {d24-d25}, [r0]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q13, q0, d2[1]\n"
+ "vst1.f32 {d10-d11}, [r0]!\n"
+ "vmla.f32 q6, q3, d3[0]\n"
+ "add r1, %[nstride]\n"
+ "vst1.f32 {d18-d19}, [r0]!\n"
+ "vmla.f32 q10, q2, d3[0]\n"
+ "vst1.f32 {d26-d27}, [r0]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q14, q0, d3[0]\n"
+ "vst1.f32 {d12-d13}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vst1.f32 {d20-d21}, [r0]!\n"
+ "vmla.f32 q11, q2, d3[1]\n"
+ "vst1.f32 {d28-d29}, [r0]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q15, q0, d3[1]\n"
+ "b 4f\n"
+
+ "3:\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vld1.f32 {d2-d3}, [%[rhs_ptr]]!\n"
+ "mov r1, %[res_ptr]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vmla.f32 q8, q3, d0[0]\n"
+ "vmla.f32 q12, q1, d0[0]\n"
+ "vst1.f32 {d8-d9}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vmla.f32 q5, q2, d0[1]\n"
+ "vst1.f32 {d16-d17}, [r0]!\n"
+ "vmla.f32 q9, q3, d0[1]\n"
+ "vst1.f32 {d24-d25}, [r0]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q13, q1, d0[1]\n"
+ "vst1.f32 {d10-d11}, [r0]!\n"
+ "vmla.f32 q6, q2, d1[0]\n"
+ "add r1, %[nstride]\n"
+ "vst1.f32 {d18-d19}, [r0]!\n"
+ "vmla.f32 q10, q3, d1[0]\n"
+ "vst1.f32 {d26-d27}, [r0]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q14, q1, d1[0]\n"
+ "vst1.f32 {d12-d13}, [r0]!\n"
+ "add r1, %[nstride]\n"
+ "vmla.f32 q7, q2, d1[1]\n"
+ "vst1.f32 {d20-d21}, [r0]!\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vst1.f32 {d28-d29}, [r0]\n"
+ "mov r0, r1\n"
+ "vmla.f32 q15, q1, d1[1]\n"
+
+ "4:\n"
+ "vst1.f32 {d14-d15}, [r0]!\n"
+ "vst1.f32 {d22-d23}, [r0]!\n"
+ "vst1.f32 {d30-d31}, [r0]\n"
+
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk), [rk] "+r"(rk)
+ : [k0] "r"(k0), [nstride] "r"(nstride)
+ : "r0", "r1", "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", "q8", "q9", "q10",
+ "q11", "q12", "q13", "q14", "q15", "cc");
+}
+
+static void sgemm_rowmajor_micro_kernel_12x4(const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k, const int k0,
+ const int stride)
+{
+ int rk = (k & 1);
+ int nk = (k + 1) / 2;
+
+ const int nstride = stride << 2;
+
+ asm volatile("vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+
+ "cmp %[k0], #0\n"
+ "beq 0f\n"
+
+ "mov r0, %[res_ptr]\n"
+ "subs %[nk], %[nk], #1\n"
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d28-d29}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d30-d31}, [r0]\n"
+ "beq 2f\n"
+ "b 1f\n"
+
+ "0:\n"
+ "veor q4, q4\n"
+ "subs %[nk],%[nk], #1\n"
+ "vmov.f32 q5, q4\n"
+ "vmov.f32 q6, q4\n"
+ "vmov.f32 q7, q4\n"
+ "vmov.f32 q8, q4\n"
+ "vmov.f32 q9, q4\n"
+ "vmov.f32 q10, q4\n"
+ "vmov.f32 q11, q4\n"
+ "vmov.f32 q12, q4\n"
+ "vmov.f32 q13, q4\n"
+ "vmov.f32 q14, q4\n"
+ "vmov.f32 q15, q4\n"
+
+ "beq 2f\n"
+
+ "1:\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vmla.f32 q5, q2, d0[1]\n"
+ "vmla.f32 q6, q2, d1[0]\n"
+ "vmla.f32 q7, q2, d1[1]\n"
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vmla.f32 q9, q2, d2[1]\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q10, q2, d3[0]\n"
+ "vmla.f32 q11, q2, d3[1]\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q13, q2, d0[1]\n"
+ "pld [%[rhs_ptr], #208]\n"
+ "vmla.f32 q14, q2, d1[0]\n"
+ "pld [%[lhs_ptr], #192]\n"
+ "vmla.f32 q15, q2, d1[1]\n"
+
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q3, d2[0]\n"
+ "vmla.f32 q5, q3, d2[1]\n"
+ "vmla.f32 q6, q3, d3[0]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q8, q3, d0[0]\n"
+ "vmla.f32 q9, q3, d0[1]\n"
+ "vld1.f32 {d4-d5}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q10, q3, d1[0]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q12, q3, d2[0]\n"
+ "vmla.f32 q13, q3, d2[1]\n"
+ "subs %[nk],%[nk], #1\n"
+ "pld [%[rhs_ptr], #240]\n"
+ "vmla.f32 q14, q3, d3[0]\n"
+ "pld [%[lhs_ptr], #208]\n"
+ "vmla.f32 q15, q3, d3[1]\n"
+ "bne 1b\n"
+
+ "2:\n"
+ "cmp %[rk], #1\n"
+ "beq 3f\n"
+
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vmla.f32 q5, q2, d0[1]\n"
+ "vmla.f32 q6, q2, d1[0]\n"
+ "vmla.f32 q7, q2, d1[1]\n"
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vmla.f32 q9, q2, d2[1]\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vmla.f32 q10, q2, d3[0]\n"
+ "vmla.f32 q11, q2, d3[1]\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vmla.f32 q13, q2, d0[1]\n"
+ "vmla.f32 q14, q2, d1[0]\n"
+ "vmla.f32 q15, q2, d1[1]\n"
+
+ "mov r0, %[res_ptr]\n"
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q3, d2[0]\n"
+ "vst1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q5, q3, d2[1]\n"
+ "vst1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q6, q3, d3[0]\n"
+ "vst1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q7, q3, d3[1]\n"
+ "vst1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q8, q3, d0[0]\n"
+ "vst1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q9, q3, d0[1]\n"
+ "vst1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q10, q3, d1[0]\n"
+ "vst1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q11, q3, d1[1]\n"
+ "vst1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q12, q3, d2[0]\n"
+ "vst1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q13, q3, d2[1]\n"
+ "vst1.f32 {d26-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q14, q3, d3[0]\n"
+ "vst1.f32 {d28-d29}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q15, q3, d3[1]\n"
+ "b 4f\n"
+
+ "3:\n"
+ "mov r0, %[res_ptr]\n"
+ "vld1.f32 {d2-d3}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q4, q2, d0[0]\n"
+ "vst1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q5, q2, d0[1]\n"
+ "vst1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q6, q2, d1[0]\n"
+ "vst1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q7, q2, d1[1]\n"
+ "vst1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vld1.f32 {d0-d1}, [%[lhs_ptr]]!\n"
+ "vmla.f32 q8, q2, d2[0]\n"
+ "vst1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q9, q2, d2[1]\n"
+ "vst1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q10, q2, d3[0]\n"
+ "vst1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q11, q2, d3[1]\n"
+ "vst1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q12, q2, d0[0]\n"
+ "vst1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q13, q2, d0[1]\n"
+ "vst1.f32 {d26-d27}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q14, q2, d1[0]\n"
+ "vst1.f32 {d28-d29}, [r0]\n"
+ "add r0, r0, %[nstride]\n"
+ "vmla.f32 q15, q3, d1[1]\n"
+
+ "4:\n"
+ "vst1.f32 {d30-d31}, [r0]\n"
+ : [lhs_ptr] "+r"(lhs_ptr), [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr),
+ [nk] "+r"(nk), [rk] "+r"(rk)
+ : [k0] "r"(k0), [nstride] "r"(nstride)
+ : "r0", "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", "q8", "q9", "q10", "q11",
+ "q12", "q13", "q14", "q15", "cc");
+}
+#endif // __aarch64__
+
+typedef void (*sgemm_rowmajoy_micro_kernel_func)(const float *, const float *, float *, const int,
+ const int, const int);
+
+static sgemm_rowmajoy_micro_kernel_func sgemm_rowmajoy_micro_kernel_table[12][12] = {
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+ {
+
+ 0, 0, 0, 0, 0,
+#if !__aarch64__
+ sgemm_rowmajor_micro_kernel_4x12,
+#else // !__aarch64__
+ 0,
+#endif // !__aarch64__
+ 0, 0, 0, 0, 0,
+#if __aarch64__
+ sgemm_rowmajor_micro_kernel_4x24
+#else // __aarch64__
+ 0
+#endif // __aarch64__
+ },
+ {0, 0, 0,
+#if !__aarch64__
+ sgemm_rowmajor_micro_kernel_6x8,
+#else // !__aarch64__
+ 0,
+#endif // !__aarch64__
+ 0, 0, 0, 0, 0, 0, 0, 0},
+ {0, 0, 0, 0, 0,
+#if __aarch64__
+ sgemm_rowmajor_micro_kernel_8x12,
+#else // __aarch64__
+ 0,
+#endif // __aarch64__
+ 0, 0, 0, 0, 0, 0
+
+ },
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+
+ },
+ {0,
+#if !__aarch64__
+ sgemm_rowmajor_micro_kernel_12x4,
+#else // !__aarch64__
+ 0,
+#endif // !__aarch64__
+ 0,
+#if __aarch64__
+ sgemm_rowmajor_micro_kernel_12x8,
+#else // __aarch64__
+ 0,
+#endif // __aarch64__
+ 0, 0, 0, 0, 0, 0, 0, 0
+
+ },
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+ {
+
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+
+ },
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+
+ },
+ {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+
+ },
+ {0,
+#if __aarch64__
+ sgemm_rowmajor_micro_kernel_24x4,
+#else // __aarch64__
+ 0,
+#endif // __aarch64__
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+
+ },
+
+};
+
+void _sgemm_rowmajor_macro_kernel_divnm(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int nstride,
+ const int kstride)
+{
+ const int nm = (mb + mr - 1) / mr;
+ const int nn = (nb + nr - 1) / nr;
+ const int rm = mb % mr;
+ const int rn = nb % nr;
+
+ sgemm_rowmajoy_micro_kernel_func sgemm_rowmajoy_micro_kernel =
+ sgemm_rowmajoy_micro_kernel_table[mr / 2 - 1][nr / 2 - 1];
+ if (!sgemm_rowmajoy_micro_kernel)
+ return;
+
+ for (int j = 0; j < nn; j++)
+ {
+ const int _nr = (j != nn - 1 || rn == 0) ? nr : rn;
+ for (int i = 0; i < nm; i++)
+ {
+ const int _mr = (i != nm - 1 || rm == 0) ? mr : rm;
+ if (_mr == mr && _nr == nr)
+ {
+ sgemm_rowmajoy_micro_kernel(&lhs_ptr[i * mr * kstride], &rhs_ptr[j * nr * kstride],
+ &res_ptr[i * mr * nstride + j * nr], kb, k0, nstride);
+ }
+ else
+ {
+ float res_micro[mr * nr];
+ float *res = &res_ptr[i * mr * nstride + j * nr];
+
+ sgemm_rowmajoy_micro_kernel(&lhs_ptr[i * mr * kstride], &rhs_ptr[j * nr * kstride],
+ res_micro, kb, 0, nr);
+ if (k0 == 0)
+ {
+ for (int pi = 0; pi < _mr; pi++)
+ {
+ for (int pj = 0; pj < _nr; pj++)
+ {
+ res[pi * nstride + pj] = res_micro[pi * nr + pj];
+ }
+ }
+ }
+ else
+ {
+ for (int pi = 0; pi < _mr; pi++)
+ {
+ for (int pj = 0; pj < _nr; pj++)
+ {
+ res[pi * nstride + pj] += res_micro[pi * nr + pj];
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+void _sgemm_rowmajor_macro_kernel_divmn(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int nstride,
+ const int kstride)
+{
+ const int nm = (mb + mr - 1) / mr;
+ const int nn = (nb + nr - 1) / nr;
+ const int rm = mb % mr;
+ const int rn = nb % nr;
+
+ sgemm_rowmajoy_micro_kernel_func sgemm_rowmajoy_micro_kernel =
+ sgemm_rowmajoy_micro_kernel_table[mr / 2 - 1][nr / 2 - 1];
+ if (!sgemm_rowmajoy_micro_kernel)
+ return;
+
+ for (int j = 0; j < nm; j++)
+ {
+ const int _mr = (j != nm - 1 || rm == 0) ? mr : rm;
+ for (int i = 0; i < nn; i++)
+ {
+ const int _nr = (i != nn - 1 || rn == 0) ? nr : rn;
+ if (_mr == mr && _nr == nr)
+ {
+ sgemm_rowmajoy_micro_kernel(&lhs_ptr[j * mr * kstride], &rhs_ptr[i * nr * kstride],
+ &res_ptr[j * mr * nstride + i * nr], kb, k0, nstride);
+ }
+ else
+ {
+ float res_micro[mr * nr];
+ float *res = &res_ptr[j * mr * nstride + i * nr];
+
+ sgemm_rowmajoy_micro_kernel(&lhs_ptr[j * mr * kstride], &rhs_ptr[i * nr * kstride],
+ res_micro, kb, 0, nr);
+ if (k0 == 0)
+ {
+ for (int pi = 0; pi < _mr; pi++)
+ {
+ for (int pj = 0; pj < _nr; pj++)
+ {
+ res[pi * nstride + pj] = res_micro[pi * nr + pj];
+ }
+ }
+ }
+ else
+ {
+ for (int pi = 0; pi < _mr; pi++)
+ {
+ for (int pj = 0; pj < _nr; pj++)
+ {
+ res[pi * nstride + pj] += res_micro[pi * nr + pj];
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+void _sgemm_colmajor_macro_kernel_divnm(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int mstride,
+ const int kstride)
+{
+ _sgemm_rowmajor_macro_kernel_divmn(nr, mr, nb, mb, kb, rhs_ptr, lhs_ptr, res_ptr, k0, mstride,
+ kstride);
+}
+
+void _sgemm_colmajor_macro_kernel_divmn(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int mstride,
+ const int kstride)
+{
+ _sgemm_rowmajor_macro_kernel_divnm(nr, mr, nb, mb, kb, rhs_ptr, lhs_ptr, res_ptr, k0, mstride,
+ kstride);
+}
+
+#if __aarch64__
+void _sparse_sgemm_kernel(const int nb, float lhs_data, const float *rhs_ptr, float *res_ptr)
+{
+ int nn = nb >> 3;
+ int rn = nb & 7;
+
+ if (nn > 0)
+ {
+ asm volatile("mov x0, %[res_ptr]\n"
+ "dup v0.2d, %[lhs_data]\n"
+ "ld1 {v1.4s}, [%[rhs_ptr]], #16\n"
+ "ld1 {v2.4s}, [x0], #16\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "beq 2f\n"
+
+ "1:\n"
+ "ld1 {v4.4s}, [x0], #16\n"
+ "ld1 {v3.4s}, [%[rhs_ptr]], #16\n"
+
+ "fmla v2.4s, v1.4s, v0.s[0]\n"
+ "st1 {v2.4s}, [%[res_ptr]], #16\n"
+
+ "ld1 {v2.4s}, [x0], #16\n"
+ "ld1 {v1.4s}, [%[rhs_ptr]], #16\n"
+
+ "fmla v4.4s, v3.4s, v0.s[0]\n"
+ "st1 {v4.4s}, [%[res_ptr]], #16\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "bne 1b\n"
+
+ "2:\n"
+ "ld1 {v3.4s}, [%[rhs_ptr]], #16\n"
+ "ld1 {v4.4s}, [x0], #16\n"
+
+ "fmla v2.4s, v1.4s, v0.s[0]\n"
+ "st1 {v2.4s}, [%[res_ptr]], #16\n"
+
+ "fmla v4.4s, v3.4s, v0.s[0]\n"
+ "st1 {v4.4s}, [%[res_ptr]], #16\n"
+ : [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr), [nn] "+r"(nn)
+ : [lhs_data] "r"(lhs_data)
+ : "x0", "v0", "v1", "v2", "v3", "v4", "cc");
+ }
+ if (rn > 0)
+ {
+ int _nn = rn >> 2;
+ int _rn = rn & 3;
+
+ if (_nn > 0)
+ {
+ asm volatile("dup v0.2d, %[lhs_data]\n"
+ "ld1 {v1.4s}, [%[rhs_ptr]], #16\n"
+ "ld1 {v2.4s}, [%[res_ptr]]\n"
+ "fmla v2.4s, v1.4s, v0.s[0]\n"
+ "st1 {v2.4s}, [%[res_ptr]], #16\n"
+ : [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr)
+ : [lhs_data] "r"(lhs_data)
+ : "x0", "x1", "x2", "cc");
+ }
+ if (_rn > 0)
+ {
+ for (int i = 0; i < _rn; i++)
+ {
+ res_ptr[i] += lhs_data * rhs_ptr[i];
+ }
+ }
+ }
+}
+
+#else // __aarch64__
+void _sparse_sgemm_kernel(const int nb, float lhs_data, const float *rhs_ptr, float *res_ptr)
+{
+ int nn = nb >> 3;
+ int rn = nb & 7;
+
+ if (nn > 0)
+ {
+ asm volatile("mov r0, %[res_ptr]\n"
+ "vdup.32 d0, %[lhs_data]\n"
+ "vld1.f32 {d2-d3}, [%[rhs_ptr]]!\n"
+ "vld1.f32 {d4-d5}, [r0]!\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "beq 2f\n"
+
+ "1:\n"
+ "vld1.f32 {d8-d9}, [r0]!\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q2, q1, d0[0]\n"
+ "vst1.f32 {d4-d5}, [%[res_ptr]]!\n"
+
+ "vld1.f32 {d4-d5}, [r0]!\n"
+ "vld1.f32 {d2-d3}, [%[rhs_ptr]]!\n"
+
+ "vmla.f32 q4, q3, d0[0]\n"
+ "vst1.f32 {d8-d9}, [%[res_ptr]]!\n"
+
+ "subs %[nn], %[nn], #1\n"
+ "bne 1b\n"
+
+ "2:\n"
+ "vld1.f32 {d6-d7}, [%[rhs_ptr]]!\n"
+ "vld1.f32 {d8-d9}, [r0]!\n"
+
+ "vmla.f32 q2, q1, d0[0]\n"
+ "vst1.f32 {d4-d5}, [%[res_ptr]]!\n"
+
+ "vmla.f32 q4, q3, d0[0]\n"
+ "vst1.f32 {d8-d9}, [%[res_ptr]]!\n"
+ : [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr), [nn] "+r"(nn)
+ : [lhs_data] "r"(lhs_data)
+ : "r0", "q0", "q1", "q2", "q3", "q4", "cc");
+ }
+ if (rn > 0)
+ {
+ int _nn = rn >> 2;
+ int _rn = rn & 3;
+
+ if (_nn > 0)
+ {
+ asm volatile("vdup.32 d0, %[lhs_data]\n"
+ "vld1.f32 {d2-d3}, [%[rhs_ptr]]!\n"
+ "vld1.f32 {d4-d5}, [%[res_ptr]]\n"
+ "vmla.f32 q2, q1, d0[0]\n"
+ "vst1.f32 {d4-d5}, [%[res_ptr]]!\n"
+ : [rhs_ptr] "+r"(rhs_ptr), [res_ptr] "+r"(res_ptr)
+ : [lhs_data] "r"(lhs_data)
+ : "q0", "q1", "q2", "cc");
+ }
+ if (_rn > 0)
+ {
+ for (int i = 0; i < _rn; i++)
+ {
+ res_ptr[i] += lhs_data * rhs_ptr[i];
+ }
+ }
+ }
+}
+#endif // __aarch64__
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/sgemm_kernel.h b/runtimes/libs/srcn/src/sgemm_kernel.h
new file mode 100644
index 000000000..77d90b136
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_kernel.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_SGEMM_KERNEL_H__
+#define __NNFW_SRCN_SGEMM_KERNEL_H__
+
+#include "srcn/conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void _sgemm_rowmajor_macro_kernel_divnm(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int nstride,
+ const int kstride);
+
+void _sgemm_rowmajor_macro_kernel_divmn(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int nstride,
+ const int kstride);
+
+void _sgemm_colmajor_macro_kernel_divnm(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int mstride,
+ const int kstride);
+
+void _sgemm_colmajor_macro_kernel_divmn(const int mr, const int nr, const int mb, const int nb,
+ const int kb, const float *lhs_ptr, const float *rhs_ptr,
+ float *res_ptr, const int k0, const int mstride,
+ const int kstride);
+
+void _sparse_sgemm_kernel(const int nb, float lhs_data, const float *rhs_ptr, float *res_ptr);
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_SGEMM_KERNEL_H__
diff --git a/runtimes/libs/srcn/src/sgemm_pack.cc b/runtimes/libs/srcn/src/sgemm_pack.cc
new file mode 100644
index 000000000..83eb6caef
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_pack.cc
@@ -0,0 +1,2316 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdlib.h>
+#include <arm_neon.h>
+
+#include "srcn/conv_type.h"
+#include "common.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void _pack_rowmajor_notrans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr)
+{
+ const int nm = mb / mr;
+ const int rm = mb % mr;
+
+ switch (mr)
+ {
+#if __aarch64__
+ case 24:
+ for (int i = 0; i < nm; i++)
+ {
+ int nk = kb >> 2;
+ int rk = kb & 0x03;
+
+ const float *lhs_temp = lhs_ptr;
+ const int _stride = stride << 2;
+
+ if (nk > 0)
+ {
+ asm volatile("0:\n"
+ "mov x0, %[lhs_temp]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v8.4s, v10.4s\n"
+ "zip2 v30.4s, v8.4s, v10.4s\n"
+ "zip1 v29.4s, v9.4s, v11.4s\n"
+ "zip2 v31.4s, v9.4s, v11.4s\n"
+ "zip1 v8.4s, v28.4s, v29.4s\n"
+ "zip2 v9.4s, v28.4s, v29.4s\n"
+ "zip1 v10.4s, v30.4s, v31.4s\n"
+ "zip2 v11.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v15.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v12.4s, v14.4s\n"
+ "zip2 v30.4s, v12.4s, v14.4s\n"
+ "zip1 v29.4s, v13.4s, v15.4s\n"
+ "zip2 v31.4s, v13.4s, v15.4s\n"
+ "zip1 v12.4s, v28.4s, v29.4s\n"
+ "zip2 v13.4s, v28.4s, v29.4s\n"
+ "zip1 v14.4s, v30.4s, v31.4s\n"
+ "zip2 v15.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v16.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v17.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v18.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v19.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v16.4s, v18.4s\n"
+ "zip2 v30.4s, v16.4s, v18.4s\n"
+ "zip1 v29.4s, v17.4s, v19.4s\n"
+ "zip2 v31.4s, v17.4s, v19.4s\n"
+ "zip1 v16.4s, v28.4s, v29.4s\n"
+ "zip2 v17.4s, v28.4s, v29.4s\n"
+ "zip1 v18.4s, v30.4s, v31.4s\n"
+ "zip2 v19.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v20.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v21.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v22.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v23.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v20.4s, v22.4s\n"
+ "zip2 v30.4s, v20.4s, v22.4s\n"
+ "zip1 v29.4s, v21.4s, v23.4s\n"
+ "zip2 v31.4s, v21.4s, v23.4s\n"
+ "zip1 v20.4s, v28.4s, v29.4s\n"
+ "zip2 v21.4s, v28.4s, v29.4s\n"
+ "zip1 v22.4s, v30.4s, v31.4s\n"
+ "zip2 v23.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v24.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v25.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v26.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v27.4s}, [x0]\n"
+
+ "zip1 v28.4s, v24.4s, v26.4s\n"
+ "zip2 v30.4s, v24.4s, v26.4s\n"
+ "zip1 v29.4s, v25.4s, v27.4s\n"
+ "zip2 v31.4s, v25.4s, v27.4s\n"
+ "zip1 v24.4s, v28.4s, v29.4s\n"
+ "zip2 v25.4s, v28.4s, v29.4s\n"
+ "zip1 v26.4s, v30.4s, v31.4s\n"
+ "zip2 v27.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v8.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v12.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v16.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v20.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v24.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v9.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v13.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v17.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v21.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v25.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v10.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v14.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v18.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v22.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v26.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v11.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v15.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v19.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v23.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v27.4s}, [%[plhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21",
+ "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31");
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ plhs_ptr[0] = lhs_temp[0];
+ plhs_ptr[1] = lhs_temp[stride];
+ plhs_ptr[2] = lhs_temp[stride << 1];
+ plhs_ptr[3] = lhs_temp[3 * stride];
+ plhs_ptr[4] = lhs_temp[stride << 2];
+ plhs_ptr[5] = lhs_temp[5 * stride];
+ plhs_ptr[6] = lhs_temp[6 * stride];
+ plhs_ptr[7] = lhs_temp[7 * stride];
+ plhs_ptr[8] = lhs_temp[stride << 3];
+ plhs_ptr[9] = lhs_temp[9 * stride];
+ plhs_ptr[10] = lhs_temp[10 * stride];
+ plhs_ptr[11] = lhs_temp[11 * stride];
+ plhs_ptr[12] = lhs_temp[0];
+ plhs_ptr[13] = lhs_temp[13 * stride];
+ plhs_ptr[14] = lhs_temp[14 * stride];
+ plhs_ptr[15] = lhs_temp[15 * stride];
+ plhs_ptr[16] = lhs_temp[stride << 4];
+ plhs_ptr[17] = lhs_temp[17 * stride];
+ plhs_ptr[18] = lhs_temp[18 * stride];
+ plhs_ptr[19] = lhs_temp[19 * stride];
+ plhs_ptr[20] = lhs_temp[20 * stride];
+ plhs_ptr[21] = lhs_temp[21 * stride];
+ plhs_ptr[22] = lhs_temp[22 * stride];
+ plhs_ptr[23] = lhs_temp[23 * stride];
+ plhs_ptr += mr;
+ lhs_temp++;
+ }
+
+ lhs_ptr += mr * stride;
+ }
+ break;
+ case 16:
+ for (int i = 0; i < nm; i++)
+ {
+ int nk = kb >> 2;
+ int rk = kb & 0x03;
+
+ const float *lhs_temp = lhs_ptr;
+ const int _stride = stride << 2;
+
+ if (nk > 0)
+ {
+ asm volatile("0:\n"
+ "mov x0, %[lhs_temp]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v8.4s, v10.4s\n"
+ "zip2 v30.4s, v8.4s, v10.4s\n"
+ "zip1 v29.4s, v9.4s, v11.4s\n"
+ "zip2 v31.4s, v9.4s, v11.4s\n"
+ "zip1 v8.4s, v28.4s, v29.4s\n"
+ "zip2 v9.4s, v28.4s, v29.4s\n"
+ "zip1 v10.4s, v30.4s, v31.4s\n"
+ "zip2 v11.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v15.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v12.4s, v14.4s\n"
+ "zip2 v30.4s, v12.4s, v14.4s\n"
+ "zip1 v29.4s, v13.4s, v15.4s\n"
+ "zip2 v31.4s, v13.4s, v15.4s\n"
+ "zip1 v12.4s, v28.4s, v29.4s\n"
+ "zip2 v13.4s, v28.4s, v29.4s\n"
+ "zip1 v14.4s, v30.4s, v31.4s\n"
+ "zip2 v15.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v16.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v17.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v18.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v19.4s}, [x0]\n"
+
+ "zip1 v28.4s, v16.4s, v18.4s\n"
+ "zip2 v30.4s, v16.4s, v18.4s\n"
+ "zip1 v29.4s, v17.4s, v19.4s\n"
+ "zip2 v31.4s, v17.4s, v19.4s\n"
+ "zip1 v16.4s, v28.4s, v29.4s\n"
+ "zip2 v17.4s, v28.4s, v29.4s\n"
+ "zip1 v18.4s, v30.4s, v31.4s\n"
+ "zip2 v19.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v8.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v12.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v16.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v9.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v13.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v17.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v10.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v14.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v18.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v11.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v15.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v19.4s}, [%[plhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v28", "v29",
+ "v30", "v31");
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ plhs_ptr[0] = lhs_temp[0];
+ plhs_ptr[1] = lhs_temp[stride];
+ plhs_ptr[2] = lhs_temp[stride << 1];
+ plhs_ptr[3] = lhs_temp[3 * stride];
+ plhs_ptr[4] = lhs_temp[stride << 2];
+ plhs_ptr[5] = lhs_temp[5 * stride];
+ plhs_ptr[6] = lhs_temp[6 * stride];
+ plhs_ptr[7] = lhs_temp[7 * stride];
+ plhs_ptr[8] = lhs_temp[stride << 3];
+ plhs_ptr[9] = lhs_temp[9 * stride];
+ plhs_ptr[10] = lhs_temp[10 * stride];
+ plhs_ptr[11] = lhs_temp[11 * stride];
+ plhs_ptr[12] = lhs_temp[0];
+ plhs_ptr[13] = lhs_temp[13 * stride];
+ plhs_ptr[14] = lhs_temp[14 * stride];
+ plhs_ptr[15] = lhs_temp[15 * stride];
+ plhs_ptr += mr;
+ lhs_temp++;
+ }
+
+ lhs_ptr += mr * stride;
+ }
+ break;
+#endif // __aarch64__
+ case 12:
+ for (int i = 0; i < nm; i++)
+ {
+ int nk = kb >> 2;
+ int rk = kb & 0x03;
+
+ const float *lhs_temp = lhs_ptr;
+ const int _stride = stride << 2;
+
+ if (nk > 0)
+ {
+#if __aarch64__
+ asm volatile("0:\n"
+ "mov x0, %[lhs_temp]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v8.4s, v10.4s\n"
+ "zip2 v30.4s, v8.4s, v10.4s\n"
+ "zip1 v29.4s, v9.4s, v11.4s\n"
+ "zip2 v31.4s, v9.4s, v11.4s\n"
+ "zip1 v8.4s, v28.4s, v29.4s\n"
+ "zip2 v9.4s, v28.4s, v29.4s\n"
+ "zip1 v10.4s, v30.4s, v31.4s\n"
+ "zip2 v11.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v15.4s}, [x0]\n"
+
+ "zip1 v28.4s, v12.4s, v14.4s\n"
+ "zip2 v30.4s, v12.4s, v14.4s\n"
+ "zip1 v29.4s, v13.4s, v15.4s\n"
+ "zip2 v31.4s, v13.4s, v15.4s\n"
+ "zip1 v12.4s, v28.4s, v29.4s\n"
+ "zip2 v13.4s, v28.4s, v29.4s\n"
+ "zip1 v14.4s, v30.4s, v31.4s\n"
+ "zip2 v15.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v8.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v12.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v9.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v13.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v10.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v14.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v11.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v15.4s}, [%[plhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[lhs_temp]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+
+ "vzip.32 q8, q10\n"
+ "vzip.32 q9, q11\n"
+ "vzip.32 q8, q9\n"
+ "vzip.32 q10, q11\n"
+
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d28-d29}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d30-d31}, [r0]\n"
+
+ "vzip.32 q12, q14\n"
+ "vzip.32 q13, q15\n"
+ "vzip.32 q12, q13\n"
+ "vzip.32 q14, q15\n"
+
+ "vst1.f32 {d8-d9}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d16-d17}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d24-d25}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d18-d19}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d26-d27}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d20-d21}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d28-d29}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d22-d23}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d30-d31}, [%[plhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7", "q8", "q9", "q10", "q11",
+ "q12", "q13", "q14", "q15");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ plhs_ptr[0] = lhs_temp[0];
+ plhs_ptr[1] = lhs_temp[stride];
+ plhs_ptr[2] = lhs_temp[stride << 1];
+ plhs_ptr[3] = lhs_temp[3 * stride];
+ plhs_ptr[4] = lhs_temp[stride << 2];
+ plhs_ptr[5] = lhs_temp[5 * stride];
+ plhs_ptr[6] = lhs_temp[6 * stride];
+ plhs_ptr[7] = lhs_temp[7 * stride];
+ plhs_ptr[8] = lhs_temp[stride << 3];
+ plhs_ptr[9] = lhs_temp[9 * stride];
+ plhs_ptr[10] = lhs_temp[10 * stride];
+ plhs_ptr[11] = lhs_temp[11 * stride];
+ plhs_ptr += mr;
+ lhs_temp++;
+ }
+
+ lhs_ptr += mr * stride;
+ }
+ break;
+ case 8:
+ for (int i = 0; i < nm; i++)
+ {
+ int nk = kb >> 2;
+ int rk = kb & 0x03;
+
+ const float *lhs_temp = lhs_ptr;
+ const int _stride = stride << 2;
+
+ if (nk > 0)
+ {
+#if __aarch64__
+ asm volatile("0:\n"
+ "mov x0, %[lhs_temp]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "zip1 v28.4s, v8.4s, v10.4s\n"
+ "zip2 v30.4s, v8.4s, v10.4s\n"
+ "zip1 v29.4s, v9.4s, v11.4s\n"
+ "zip2 v31.4s, v9.4s, v11.4s\n"
+ "zip1 v8.4s, v28.4s, v29.4s\n"
+ "zip2 v9.4s, v28.4s, v29.4s\n"
+ "zip1 v10.4s, v30.4s, v31.4s\n"
+ "zip2 v11.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v8.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v9.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v10.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v11.4s}, [%[plhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[lhs_temp]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vzip.32 q8, q10\n"
+ "vzip.32 q9, q11\n"
+ "vzip.32 q8, q9\n"
+ "vzip.32 q10, q11\n"
+
+ "vst1.f32 {d8-d9}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d16-d17}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d18-d19}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d20-d21}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d22-d23}, [%[plhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7", "q8", "q9", "q10", "q11");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ plhs_ptr[0] = lhs_temp[0];
+ plhs_ptr[1] = lhs_temp[stride];
+ plhs_ptr[2] = lhs_temp[stride << 1];
+ plhs_ptr[3] = lhs_temp[3 * stride];
+ plhs_ptr[4] = lhs_temp[stride << 2];
+ plhs_ptr[5] = lhs_temp[5 * stride];
+ plhs_ptr[6] = lhs_temp[6 * stride];
+ plhs_ptr[7] = lhs_temp[7 * stride];
+ plhs_ptr += mr;
+ lhs_temp++;
+ }
+
+ lhs_ptr += mr * stride;
+ }
+ break;
+ case 6:
+ for (int i = 0; i < nm; i++)
+ {
+ int nk = kb >> 2;
+ int rk = kb & 0x03;
+
+ const float *lhs_temp = lhs_ptr;
+ const int _stride = stride << 2;
+
+ if (nk > 0)
+ {
+#if __aarch64__
+ // TODO: 4--->6
+ asm volatile("0:\n"
+ "mov x0, %[lhs_temp]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v8.4s}, [x0]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[plhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[lhs_temp]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+ "vzip.32 q8, q9\n"
+
+ "vst1.f32 {d8-d9}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d16}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d17}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d18}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d19}, [%[plhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7", "q8", "q9");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ plhs_ptr[0] = lhs_temp[0];
+ plhs_ptr[1] = lhs_temp[stride];
+ plhs_ptr[2] = lhs_temp[stride << 1];
+ plhs_ptr[3] = lhs_temp[3 * stride];
+ plhs_ptr[4] = lhs_temp[stride << 2];
+ plhs_ptr[5] = lhs_temp[5 * stride];
+ plhs_ptr += mr;
+ lhs_temp++;
+ }
+
+ lhs_ptr += mr * stride;
+ }
+ break;
+ case 4:
+ for (int i = 0; i < nm; i++)
+ {
+ int nk = kb >> 2;
+ int rk = kb & 0x03;
+
+ const float *lhs_temp = lhs_ptr;
+ const int _stride = stride << 2;
+
+ if (nk > 0)
+ {
+#if __aarch64__
+ asm volatile("0:\n"
+ "mov x0, %[lhs_temp]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[plhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[plhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[lhs_temp]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+
+ "vst1.f32 {d8-d9}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[plhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[plhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[lhs_temp], %[lhs_temp], #16\n"
+ "bne 0b\n"
+ : [lhs_temp] "+r"(lhs_temp), [plhs_ptr] "+r"(plhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ plhs_ptr[0] = lhs_temp[0];
+ plhs_ptr[1] = lhs_temp[stride];
+ plhs_ptr[2] = lhs_temp[stride << 1];
+ plhs_ptr[3] = lhs_temp[3 * stride];
+ plhs_ptr += mr;
+ lhs_temp++;
+ }
+
+ lhs_ptr += mr * stride;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (rm > 0)
+ {
+ for (int j = 0; j < kb; j++)
+ {
+ for (int i = 0; i < rm; i++)
+ {
+ plhs_ptr[i] = lhs_ptr[i * stride];
+ }
+ for (int i = rm; i < mr; i++)
+ {
+ plhs_ptr[i] = 0.f;
+ }
+ plhs_ptr += mr;
+ lhs_ptr++;
+ }
+ }
+}
+
+void _pack_rowmajor_notrans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr)
+{
+ const int nn = nb / nr;
+ const int rn = nb % nr;
+
+ switch (nr)
+ {
+ case 24:
+ for (int j = 0; j < nn; j++)
+ {
+ const float *rhs_temp = rhs_ptr;
+ float32x4_t q0, q1, q2, q3, q4, q5;
+ for (int i = 0; i < kb; i++)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + 4);
+ q2 = vld1q_f32(rhs_temp + 8);
+ q3 = vld1q_f32(rhs_temp + 12);
+ q4 = vld1q_f32(rhs_temp + 16);
+ q5 = vld1q_f32(rhs_temp + 20);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ vst1q_f32(prhs_ptr + 12, q3);
+ vst1q_f32(prhs_ptr + 16, q4);
+ vst1q_f32(prhs_ptr + 20, q5);
+
+ rhs_temp += stride;
+ prhs_ptr += nr;
+ }
+
+ rhs_ptr += nr;
+ }
+ break;
+ case 16:
+ for (int j = 0; j < nn; j++)
+ {
+ const float *rhs_temp = rhs_ptr;
+ float32x4_t q0, q1, q2, q3;
+ for (int i = 0; i < kb; i++)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + 4);
+ q2 = vld1q_f32(rhs_temp + 8);
+ q3 = vld1q_f32(rhs_temp + 12);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ vst1q_f32(prhs_ptr + 12, q3);
+
+ rhs_temp += stride;
+ prhs_ptr += nr;
+ }
+
+ rhs_ptr += nr;
+ }
+ break;
+ case 12:
+ for (int j = 0; j < nn; j++)
+ {
+ const float *rhs_temp = rhs_ptr;
+ float32x4_t q0, q1, q2;
+ for (int i = 0; i < kb; i++)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + 4);
+ q2 = vld1q_f32(rhs_temp + 8);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+
+ rhs_temp += stride;
+ prhs_ptr += nr;
+ }
+
+ rhs_ptr += nr;
+ }
+ break;
+ case 8:
+ for (int j = 0; j < nn; j++)
+
+ {
+ const float *rhs_temp = rhs_ptr;
+ float32x4_t q0, q1, q2, q3;
+
+ int i = 0;
+ for (; i + 1 < kb; i += 2)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + 4);
+ q2 = vld1q_f32(rhs_temp + stride);
+ q3 = vld1q_f32(rhs_temp + stride + 4);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ vst1q_f32(prhs_ptr + 12, q3);
+
+ rhs_temp += stride << 1;
+ prhs_ptr += nr << 1;
+ }
+
+ for (; i < kb; i++)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + 4);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+
+ rhs_temp += stride;
+ prhs_ptr += nr;
+ }
+
+ rhs_ptr += nr;
+ }
+ break;
+ case 6:
+ for (int j = 0; j < nn; j++)
+
+ {
+ const float *rhs_temp = rhs_ptr;
+ float32x4_t q0, q2;
+ float32x2_t q1, q3;
+
+ int i = 0;
+ for (; i + 1 < kb; i += 2)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1_f32(rhs_temp + 4);
+
+ q2 = vld1q_f32(rhs_temp + stride);
+ q3 = vld1_f32(rhs_temp + stride + 4);
+ vst1q_f32(prhs_ptr, q0);
+ vst1_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 6, q2);
+ vst1_f32(prhs_ptr + 10, q3);
+
+ rhs_temp += stride << 1;
+ prhs_ptr += nr << 1;
+ }
+
+ for (; i < kb; i++)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1_f32(rhs_temp + 4);
+
+ vst1q_f32(prhs_ptr, q0);
+ vst1_f32(prhs_ptr + 4, q1);
+
+ rhs_temp += stride;
+ prhs_ptr += nr;
+ }
+
+ rhs_ptr += nr;
+ }
+ break;
+ case 4:
+ for (int j = 0; j < nn; j++)
+
+ {
+ const float *rhs_temp = rhs_ptr;
+ float32x4_t q0, q1, q2, q3;
+
+ int i = 0;
+ for (; i + 3 < kb; i += 4)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + stride);
+ q2 = vld1q_f32(rhs_temp + (stride << 1));
+ q3 = vld1q_f32(rhs_temp + (stride * 3));
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ vst1q_f32(prhs_ptr + 12, q3);
+
+ rhs_temp += stride << 2;
+ prhs_ptr += nr << 2;
+ }
+ for (; i + 1 < kb; i += 2)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ q1 = vld1q_f32(rhs_temp + stride);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+
+ rhs_temp += stride << 1;
+ prhs_ptr += nr << 1;
+ }
+ for (; i < kb; i++)
+ {
+ q0 = vld1q_f32(rhs_temp);
+ vst1q_f32(prhs_ptr, q0);
+
+ rhs_temp += stride;
+ prhs_ptr += nr;
+ }
+
+ rhs_ptr += nr;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (rn > 0)
+ {
+ for (int i = 0; i < kb; i++)
+ {
+ for (int j = 0; j < rn; j++)
+ {
+ prhs_ptr[j] = rhs_ptr[j];
+ }
+ for (int j = rn; j < nr; j++)
+ {
+ prhs_ptr[j] = 0.f;
+ }
+ prhs_ptr += nr;
+ rhs_ptr += stride;
+ }
+ }
+}
+
+void _pack_rowmajor_trans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr)
+{
+ _pack_rowmajor_notrans_rhs(mr, mb, kb, stride, lhs_ptr, plhs_ptr);
+}
+
+void _pack_rowmajor_trans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr)
+{
+ _pack_rowmajor_notrans_lhs(nr, nb, kb, stride, rhs_ptr, prhs_ptr);
+}
+
+static inline void _pack_rowmajor_image_subn(const int nr, const int nb, const int stride,
+ const float *buffer, float *prhs_ptr)
+{
+ const int nn = nb / nr;
+ const int rn = nb % nr;
+
+ switch (nr)
+ {
+ case 24:
+ for (int j = 0; j < nn; j++)
+ {
+ float32x4_t q0, q1, q2, q3, q4, q5;
+ q0 = vld1q_f32(buffer);
+ q1 = vld1q_f32(buffer + 4);
+ q2 = vld1q_f32(buffer + 8);
+ q3 = vld1q_f32(buffer + 12);
+ q4 = vld1q_f32(buffer + 16);
+ q5 = vld1q_f32(buffer + 20);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ vst1q_f32(prhs_ptr + 12, q3);
+ vst1q_f32(prhs_ptr + 16, q4);
+ vst1q_f32(prhs_ptr + 20, q5);
+ prhs_ptr += stride;
+ buffer += nr;
+ }
+ break;
+ case 16:
+ for (int j = 0; j < nn; j++)
+ {
+ float32x4_t q0, q1, q2, q3;
+ q0 = vld1q_f32(buffer);
+ q1 = vld1q_f32(buffer + 4);
+ q2 = vld1q_f32(buffer + 8);
+ q3 = vld1q_f32(buffer + 12);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ vst1q_f32(prhs_ptr + 12, q3);
+ prhs_ptr += stride;
+ buffer += nr;
+ }
+ break;
+ case 12:
+ for (int j = 0; j < nn; j++)
+ {
+ float32x4_t q0, q1, q2;
+ q0 = vld1q_f32(buffer);
+ q1 = vld1q_f32(buffer + 4);
+ q2 = vld1q_f32(buffer + 8);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ vst1q_f32(prhs_ptr + 8, q2);
+ prhs_ptr += stride;
+ buffer += nr;
+ }
+ break;
+ case 8:
+ for (int j = 0; j < nn; j++)
+ {
+ float32x4_t q0, q1;
+ q0 = vld1q_f32(buffer);
+ q1 = vld1q_f32(buffer + 4);
+ vst1q_f32(prhs_ptr, q0);
+ vst1q_f32(prhs_ptr + 4, q1);
+ prhs_ptr += stride;
+ buffer += nr;
+ }
+ break;
+ case 6:
+ for (int j = 0; j < nn; j++)
+ {
+ float32x4_t q0;
+ float32x2_t q1;
+ q0 = vld1q_f32(buffer);
+ q1 = vld1_f32(buffer + 4);
+ vst1q_f32(prhs_ptr, q0);
+ vst1_f32(prhs_ptr + 4, q1);
+ prhs_ptr += stride;
+ buffer += nr;
+ }
+ break;
+ case 4:
+ for (int j = 0; j < nn; j++)
+ {
+ float32x4_t q0;
+ q0 = vld1q_f32(buffer);
+ vst1q_f32(prhs_ptr, q0);
+ prhs_ptr += stride;
+ buffer += nr;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (rn > 0)
+ {
+ for (int j = 0; j < rn; j++)
+ {
+ prhs_ptr[j] = buffer[j];
+ }
+ for (int j = rn; j < nr; j++)
+ {
+ prhs_ptr[j] = 0.f;
+ }
+ }
+}
+
+void _pack_rowmajor_image_rhs(const int nr, const int nb, const int kb, const int k0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *prhs_ptr)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int outw = output->w;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+
+ const int in_row0 = n0 / outw * stride_h;
+ const int in_col0 = n0 % outw * stride_w;
+ int seg0 = outw - n0 % outw;
+ if (seg0 > nb)
+ seg0 = nb;
+ int rows = (nb - seg0 + outw - 1) / outw;
+ if (seg0)
+ rows++;
+ const int segn = (nb - seg0) % outw;
+
+ float row_data[nb];
+
+ for (int i = k0; i < kb + k0; i++)
+ {
+ const int ic = i / (kernel_w * kernel_h);
+ const int in_row1 = ((i / kernel_w) % kernel_h) * params->dilation_h + in_row0;
+ const int in_col1 = i % kernel_w * params->dilation_w;
+
+#ifdef NCNN
+ const float *input_data = input->data + ic * alignSize(w * h, 16 / sizeof(float));
+#else // NCNN
+ const float *input_data = input->data + ic * w * h;
+#endif // NCNN
+ float *buffer = row_data;
+ int in_row = in_row1 - pad_h;
+
+ for (int out_rows = rows; out_rows; out_rows--)
+ {
+ int cols = (out_rows != 1 || segn == 0) ? outw : segn;
+ int in_col = in_col1 - pad_w;
+ if (out_rows == rows)
+ {
+ cols = seg0;
+ in_col += in_col0;
+ }
+ if ((unsigned int)in_row < (unsigned int)h)
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ if ((unsigned int)in_col < (unsigned int)w)
+ *(buffer++) = input_data[in_row * w + in_col];
+ else
+ *(buffer++) = 0;
+ in_col += stride_w;
+ }
+ }
+ else
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ *(buffer++) = 0;
+ in_col += stride_w;
+ }
+ }
+
+ in_row += stride_h;
+ }
+
+ _pack_rowmajor_image_subn(nr, nb, nr * kb, row_data, prhs_ptr);
+ prhs_ptr += nr;
+ }
+}
+
+void _pack_rowmajor_image_rhs_batch(const int nr, const int nb, const int kb, const int k0,
+ const int n0, convMat_t *input, convMat_t *output,
+ convParams_t *params, float *prhs_ptr)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int c = input->c;
+
+#ifdef NCNN
+ const int seg_size = alignSize(output->w * output->h, 16 / sizeof(float));
+#else // NCNN
+ const int seg_size = output->w * output->h;
+#endif // NCNN
+
+#ifdef NCNN
+ float *data = input->data + (alignSize(w * h, 16 / sizeof(float)) * c) * (n0 / seg_size);
+#else // NCNN
+ float *data = input->data + (w * h * c) * (n0 / seg_size);
+#endif // NCNN
+
+ int seg0 = seg_size - n0 % seg_size;
+ if (seg0 > nb)
+ seg0 = nb;
+ int nseg = (nb - seg0 + seg_size - 1) / seg_size;
+ if (seg0)
+ nseg++;
+ const int segn = (nb - seg0) % seg_size;
+ convMat_t _input = {w, h, c, 1, data};
+
+ for (int i = 0; i < nseg; i++)
+ {
+ const int _nb = (i == 0 ? seg0 : (i == nseg - 1 ? segn : seg_size));
+ const int _n0 = (i == 0 ? seg_size - seg0 : 0);
+
+ _pack_rowmajor_image_rhs(nr, _nb, kb, k0, _n0, &_input, output, params, prhs_ptr);
+
+#ifdef NCNN
+ _input.data += alignSize(w * h, 16 / sizeof(float)) * c;
+#else // NCNN
+ _input.data += w * h * c;
+#endif // NCNN
+ }
+}
+
+void _unpack_rowmajor_image_res(const int mb, const int nb, const int m0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *pres_ptr)
+{
+ const int outw = output->w;
+ const int outh = output->h;
+ const int w = input->w;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+
+ const int out_row0 = n0 / w * stride_h;
+ const int out_col0 = n0 % w * stride_w;
+ int seg0 = w - n0 % w;
+ if (seg0 > nb)
+ seg0 = nb;
+ int rows = (nb - seg0 + w - 1) / w;
+ if (seg0)
+ rows++;
+ const int segn = (nb - seg0) % w;
+
+ for (int i = m0; i < mb + m0; i++)
+ {
+ const int oc = i / (kernel_w * kernel_h);
+ const int out_row1 = ((i / kernel_w) % kernel_h) * params->dilation_h + out_row0;
+ const int out_col1 = i % kernel_w * params->dilation_w;
+
+#ifdef NCNN
+ float *output_data = output->data + oc * alignSize(outw * outh, 16 / sizeof(float));
+#else // NCNN
+ float *output_data = output->data + oc * outw * outh;
+#endif // NCNN
+ int out_row = out_row1 - pad_h;
+
+ for (int in_rows = rows; in_rows; in_rows--)
+ {
+ int cols = (in_rows != 1 || segn == 0) ? w : segn;
+ int out_col = out_col1 - pad_w;
+ if (in_rows == rows)
+ {
+ cols = seg0;
+ out_col += out_col0;
+ }
+ if ((unsigned int)out_row < (unsigned int)outh)
+ {
+ for (int in_col = cols; in_col; in_col--)
+ {
+ if ((unsigned int)out_col < (unsigned int)outw)
+ output_data[out_row * outw + out_col] += *pres_ptr++;
+ else
+ pres_ptr++;
+ out_col += stride_w;
+ }
+ }
+ else
+ {
+ pres_ptr += cols;
+ }
+ out_row += stride_h;
+ }
+ }
+}
+
+// TODO:v8 & other case.
+static inline void _pack_colmajor_image_rhs_sub(const int nr, const int k, const float *buffer,
+ float *prhs_ptr)
+{
+ int nk = k >> 2;
+ int rk = k & 0x03;
+
+ const int _stride = k << 2;
+
+ switch (nr)
+ {
+ case 12:
+ if (nk > 0)
+ {
+#if __aarch64__
+ asm volatile("0:\n"
+ "mov x0, %[buffer]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v8.4s, v10.4s\n"
+ "zip2 v30.4s, v8.4s, v10.4s\n"
+ "zip1 v29.4s, v9.4s, v11.4s\n"
+ "zip2 v31.4s, v9.4s, v11.4s\n"
+ "zip1 v8.4s, v28.4s, v29.4s\n"
+ "zip2 v9.4s, v28.4s, v29.4s\n"
+ "zip1 v10.4s, v30.4s, v31.4s\n"
+ "zip2 v11.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v12.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v13.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v14.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v15.4s}, [x0]\n"
+
+ "zip1 v28.4s, v12.4s, v14.4s\n"
+ "zip2 v30.4s, v12.4s, v14.4s\n"
+ "zip1 v29.4s, v13.4s, v15.4s\n"
+ "zip2 v31.4s, v13.4s, v15.4s\n"
+ "zip1 v12.4s, v28.4s, v29.4s\n"
+ "zip2 v13.4s, v28.4s, v29.4s\n"
+ "zip1 v14.4s, v30.4s, v31.4s\n"
+ "zip2 v15.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v8.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v12.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v9.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v13.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v10.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v14.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v11.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v15.4s}, [%[prhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v12", "v13", "v14", "v15", "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[buffer]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+
+ "vzip.32 q8, q10\n"
+ "vzip.32 q9, q11\n"
+ "vzip.32 q8, q9\n"
+ "vzip.32 q10, q11\n"
+
+ "vld1.f32 {d24-d25}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d26-d27}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d28-d29}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d30-d31}, [r0]\n"
+
+ "vzip.32 q12, q14\n"
+ "vzip.32 q13, q15\n"
+ "vzip.32 q12, q13\n"
+ "vzip.32 q14, q15\n"
+
+ "vst1.f32 {d8-d9}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d16-d17}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d24-d25}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d18-d19}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d26-d27}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d20-d21}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d28-d29}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d22-d23}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d30-d31}, [%[prhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7", "q8", "q9", "q10", "q11",
+ "q12", "q13", "q14", "q15");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ prhs_ptr[0] = buffer[0];
+ prhs_ptr[1] = buffer[k];
+ prhs_ptr[2] = buffer[k << 1];
+ prhs_ptr[3] = buffer[3 * k];
+ prhs_ptr[4] = buffer[k << 2];
+ prhs_ptr[5] = buffer[5 * k];
+ prhs_ptr[6] = buffer[6 * k];
+ prhs_ptr[7] = buffer[7 * k];
+ prhs_ptr[8] = buffer[k << 3];
+ prhs_ptr[9] = buffer[9 * k];
+ prhs_ptr[10] = buffer[10 * k];
+ prhs_ptr[11] = buffer[11 * k];
+ prhs_ptr += nr;
+ buffer++;
+ }
+ break;
+
+ case 8:
+ if (nk > 0)
+ {
+#if __aarch64__
+ asm volatile("0:\n"
+ "mov x0, %[buffer]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "ld1 {v8.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v9.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v10.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v11.4s}, [x0]\n"
+
+ "zip1 v28.4s, v8.4s, v10.4s\n"
+ "zip2 v30.4s, v8.4s, v10.4s\n"
+ "zip1 v29.4s, v9.4s, v11.4s\n"
+ "zip2 v31.4s, v9.4s, v11.4s\n"
+ "zip1 v8.4s, v28.4s, v29.4s\n"
+ "zip2 v9.4s, v28.4s, v29.4s\n"
+ "zip1 v10.4s, v30.4s, v31.4s\n"
+ "zip2 v11.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v8.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v9.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v10.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v11.4s}, [%[prhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11",
+ "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[buffer]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d20-d21}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d22-d23}, [r0]\n"
+
+ "vzip.32 q8, q10\n"
+ "vzip.32 q9, q11\n"
+ "vzip.32 q8, q9\n"
+ "vzip.32 q10, q11\n"
+
+ "vst1.f32 {d8-d9}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d16-d17}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d18-d19}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d20-d21}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d22-d23}, [%[prhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7", "q8", "q9", "q10", "q11");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ prhs_ptr[0] = buffer[0];
+ prhs_ptr[1] = buffer[k];
+ prhs_ptr[2] = buffer[k << 1];
+ prhs_ptr[3] = buffer[3 * k];
+ prhs_ptr[4] = buffer[k << 2];
+ prhs_ptr[5] = buffer[5 * k];
+ prhs_ptr[6] = buffer[6 * k];
+ prhs_ptr[7] = buffer[7 * k];
+ prhs_ptr += nr;
+ buffer++;
+ }
+ break;
+#if !__aarch64__
+ case 6:
+ if (nk > 0)
+ {
+ asm volatile("0:\n"
+ "mov r0, %[buffer]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d16-d17}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d18-d19}, [r0]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+ "vzip.32 q8, q9\n"
+
+ "vst1.f32 {d8-d9}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d16}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d17}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d18}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d19}, [%[prhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7", "q8", "q9");
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ prhs_ptr[0] = buffer[0];
+ prhs_ptr[1] = buffer[k];
+ prhs_ptr[2] = buffer[k << 1];
+ prhs_ptr[3] = buffer[3 * k];
+ prhs_ptr[4] = buffer[k << 2];
+ prhs_ptr[5] = buffer[5 * k];
+ prhs_ptr += nr;
+ buffer++;
+ }
+ break;
+#endif // !__aarch64__
+ case 4:
+ if (nk > 0)
+ {
+#if __aarch64__
+ asm volatile("0:\n"
+ "mov x0, %[buffer]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "add x0, x0, %[_stride]\n"
+ "ld1 {v7.4s}, [x0]\n"
+
+ "zip1 v28.4s, v4.4s, v6.4s\n"
+ "zip2 v30.4s, v4.4s, v6.4s\n"
+ "zip1 v29.4s, v5.4s, v7.4s\n"
+ "zip2 v31.4s, v5.4s, v7.4s\n"
+ "zip1 v4.4s, v28.4s, v29.4s\n"
+ "zip2 v5.4s, v28.4s, v29.4s\n"
+ "zip1 v6.4s, v30.4s, v31.4s\n"
+ "zip2 v7.4s, v30.4s, v31.4s\n"
+
+ "st1 {v4.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v5.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v6.4s}, [%[prhs_ptr]], #16\n"
+ "st1 {v7.4s}, [%[prhs_ptr]], #16\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "x0", "v4", "v5", "v6", "v7", "v28", "v29", "v30", "v31");
+#else // __aarch64__
+ asm volatile("0:\n"
+ "mov r0, %[buffer]\n"
+
+ "vld1.f32 {d8-d9}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d10-d11}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d12-d13}, [r0]\n"
+ "add r0, r0, %[_stride]\n"
+ "vld1.f32 {d14-d15}, [r0]\n"
+
+ "vzip.32 q4, q6\n"
+ "vzip.32 q5, q7\n"
+ "vzip.32 q4, q5\n"
+ "vzip.32 q6, q7\n"
+
+ "vst1.f32 {d8-d9}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d10-d11}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d12-d13}, [%[prhs_ptr]]!\n"
+ "vst1.f32 {d14-d15}, [%[prhs_ptr]]!\n"
+
+ "subs %[nk], %[nk], #1\n"
+ "add %[buffer], %[buffer], #16\n"
+ "bne 0b\n"
+ : [buffer] "+r"(buffer), [prhs_ptr] "+r"(prhs_ptr), [nk] "+r"(nk)
+ : [_stride] "r"(_stride)
+ : "cc", "memory", "r0", "q4", "q5", "q6", "q7");
+#endif // __aarch64__
+ }
+
+ for (int j = 0; j < rk; j++)
+ {
+ prhs_ptr[0] = buffer[0];
+ prhs_ptr[1] = buffer[k];
+ prhs_ptr[2] = buffer[k << 1];
+ prhs_ptr[3] = buffer[3 * k];
+ prhs_ptr += nr;
+ buffer++;
+ }
+ break;
+ default:
+ break;
+ }
+}
+
+void _pack_colmajor_notrans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr)
+{
+ _pack_rowmajor_notrans_rhs(mr, mb, kb, stride, lhs_ptr, plhs_ptr);
+}
+
+void _pack_colmajor_notrans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr)
+{
+ _pack_rowmajor_notrans_lhs(nr, nb, kb, stride, rhs_ptr, prhs_ptr);
+}
+
+void _pack_colmajor_trans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr)
+{
+ _pack_rowmajor_notrans_lhs(mr, mb, kb, stride, lhs_ptr, plhs_ptr);
+}
+
+void _pack_colmajor_trans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr)
+{
+ _pack_rowmajor_notrans_rhs(nr, nb, kb, stride, rhs_ptr, prhs_ptr);
+}
+
+void _pack_colmajor_image_rhs(const int nr, const int nb, const int kb, const int k0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *prhs_ptr)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int c = input->c;
+ const int outw = output->w;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+ const float *input_data = input->data;
+
+ int c0 = c - k0 % c;
+ if (c0 > kb)
+ c0 = kb;
+ int nc = (kb - c0 + c - 1) / c;
+ if (c0)
+ nc++;
+ const int cn = (kb - c0) % c;
+
+ int seg0 = outw - n0 % outw;
+ if (seg0 > nb)
+ seg0 = nb;
+ int rows = (nb - seg0 + outw - 1) / outw;
+ if (seg0)
+ rows++;
+ const int segn = (nb - seg0) % outw;
+
+ const int in_row0 = n0 / outw * stride_h;
+ const int in_col0 = n0 % outw * stride_w;
+
+ for (int i = 0; i < nc; i++)
+ {
+ const int channels = (i == 0 && c0 != 0) ? c0 : ((i == nc - 1 && cn != 0) ? cn : c);
+ const int c1 = (i == 0) ? k0 % c : 0;
+
+ float tmp_data[channels * nr];
+ int nindex = 0;
+ float *buffer = tmp_data;
+ float *prhs_tmp = prhs_ptr;
+
+ const int in_row1 = (k0 / c + i) / kernel_w % kernel_h * params->dilation_h + in_row0;
+ const int in_col1 = (k0 / c + i) % kernel_w * params->dilation_w;
+
+ int in_row = in_row1 - pad_h;
+
+ for (int out_rows = rows; out_rows; out_rows--)
+ {
+ int cols = (out_rows != 1 || segn == 0) ? outw : segn;
+ int in_col = in_col1 - pad_w;
+ if (out_rows == rows)
+ {
+ cols = seg0;
+ in_col += in_col0;
+ }
+ if ((unsigned int)in_row < (unsigned int)h)
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ if ((unsigned int)in_col < (unsigned int)w)
+ {
+ for (int j = c1; j < c1 + channels; j++)
+ {
+ *(buffer++) = input_data[(in_row * w + in_col) * c + j];
+ }
+ }
+ else
+ {
+ for (int j = 0; j < channels; j++)
+ {
+ *(buffer++) = 0;
+ }
+ }
+ in_col += stride_w;
+
+ nindex++;
+ if (nindex == nr)
+ {
+ nindex = 0;
+ buffer = tmp_data;
+ _pack_colmajor_image_rhs_sub(nr, channels, tmp_data, prhs_tmp);
+ prhs_tmp += kb * nr;
+ }
+ }
+ }
+ else
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ for (int j = 0; j < channels; j++)
+ {
+ *(buffer++) = 0;
+ }
+ in_col += stride_w;
+
+ nindex++;
+ if (nindex == nr)
+ {
+ nindex = 0;
+ buffer = tmp_data;
+ _pack_colmajor_image_rhs_sub(nr, channels, tmp_data, prhs_tmp);
+ prhs_tmp += kb * nr;
+ }
+ }
+ }
+
+ in_row += stride_h;
+ }
+
+ if (nindex > 0)
+ {
+ float *data = tmp_data;
+ for (int i = 0; i < channels; i++)
+ {
+ for (int j = 0; j < nindex; j++)
+ {
+ prhs_tmp[j] = data[j * channels];
+ }
+ for (int j = nindex; j < nr; j++)
+ {
+ prhs_tmp[j] = 0.f;
+ }
+ prhs_tmp += nr;
+ data++;
+ }
+ }
+
+ prhs_ptr += channels * nr;
+ }
+}
+
+void _pack_colmajor_image_rhs_batch(const int nr, const int nb, const int kb, const int k0,
+ const int n0, convMat_t *input, convMat_t *output,
+ convParams_t *params, float *prhs_ptr)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int c = input->c;
+ const int outw = output->w;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+
+ int c0 = c - k0 % c;
+ if (c0 > kb)
+ c0 = kb;
+ int nc = (kb - c0 + c - 1) / c;
+ if (c0)
+ nc++;
+ const int cn = (kb - c0) % c;
+
+ const int seg_size = output->w * output->h;
+
+ const float *indata = input->data + (w * h * c) * (n0 / seg_size);
+
+ int bseg0 = seg_size - n0 % seg_size;
+ if (bseg0 > nb)
+ bseg0 = nb;
+ int bnseg = (nb - bseg0 + seg_size - 1) / seg_size;
+ if (bseg0)
+ bnseg++;
+ const int bsegn = (nb - bseg0) % seg_size;
+
+ for (int ll = 0; ll < nc; ll++)
+ {
+ const float *input_data = indata;
+
+ const int channels = (ll == 0 && c0 != 0) ? c0 : ((ll == nc - 1 && cn != 0) ? cn : c);
+ const int c1 = (ll == 0) ? k0 % c : 0;
+
+ int nindex = 0;
+ float *prhs_tmp = prhs_ptr;
+ float tmp_data[channels * nr];
+ float *buffer = tmp_data;
+
+ for (int i = 0; i < bnseg; i++)
+ {
+ const int _nb =
+ ((i == 0 && bseg0 != 0) ? bseg0 : ((i == bnseg - 1 && bsegn != 0) ? bsegn : seg_size));
+ const int _n0 = (i == 0 ? n0 % seg_size : 0);
+
+ int seg0 = outw - _n0 % outw;
+ if (seg0 > _nb)
+ seg0 = _nb;
+ int rows = (_nb - seg0 + outw - 1) / outw;
+ if (seg0)
+ rows++;
+ const int segn = (_nb - seg0) % outw;
+
+ const int in_row0 = _n0 / outw * stride_h;
+ const int in_col0 = _n0 % outw * stride_w;
+
+ const int in_row1 = (k0 / c + ll) / kernel_w % kernel_h + in_row0;
+ const int in_col1 = (k0 / c + ll) % kernel_w;
+
+ int in_row = in_row1;
+
+ for (int out_rows = rows; out_rows; out_rows--)
+ {
+ int cols = (out_rows != 1 || segn == 0) ? outw : segn;
+ int in_col = in_col1;
+ if (out_rows == rows)
+ {
+ cols = seg0;
+ in_col += in_col0;
+ }
+ if ((unsigned int)in_row < (unsigned int)h)
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ if ((unsigned int)in_col < (unsigned int)w)
+ {
+ for (int j = c1; j < c1 + channels; j++)
+ {
+ *(buffer++) = input_data[(in_row * w + in_col) * c + j];
+ }
+ }
+ else
+ {
+ for (int j = 0; j < channels; j++)
+ {
+ *(buffer++) = 0;
+ }
+ }
+ in_col += stride_w;
+
+ nindex++;
+ if (nindex == nr)
+ {
+ nindex = 0;
+ buffer = tmp_data;
+ _pack_colmajor_image_rhs_sub(nr, channels, tmp_data, prhs_tmp);
+ prhs_tmp += kb * nr;
+ }
+ }
+ }
+ else
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ for (int j = 0; j < channels; j++)
+ {
+ *(buffer++) = 0;
+ }
+ in_col += stride_w;
+
+ nindex++;
+ if (nindex == nr)
+ {
+ nindex = 0;
+ buffer = tmp_data;
+ _pack_colmajor_image_rhs_sub(nr, channels, tmp_data, prhs_tmp);
+ prhs_tmp += kb * nr;
+ }
+ }
+ }
+
+ in_row += stride_h;
+ }
+
+ input_data += w * h * c;
+ }
+
+ if (nindex > 0)
+ {
+ float *data = tmp_data;
+ for (int ii = 0; ii < channels; ii++)
+ {
+ for (int jj = 0; jj < nindex; jj++)
+ {
+ prhs_tmp[jj] = data[jj * channels];
+ }
+ for (int jj = nindex; jj < nr; jj++)
+ {
+ prhs_tmp[jj] = 0.f;
+ }
+ prhs_tmp += nr;
+ data++;
+ }
+ }
+
+ prhs_ptr += channels * nr;
+ }
+}
+
+void _unpack_colmajor_image_res(const int mb, const int nb, const int m0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *pres_ptr)
+{
+ const int w = input->w;
+ const int outw = output->w;
+ const int outh = output->h;
+ const int outc = output->c;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+ float *output_data = output->data;
+
+ int c0 = outc - m0 % outc;
+ if (c0 > mb)
+ c0 = mb;
+ int nc = (mb - c0 + outc - 1) / outc;
+ if (c0)
+ nc++;
+ const int cn = (mb - c0) % outc;
+
+ int seg0 = w - n0 % w;
+ if (seg0 > nb)
+ seg0 = nb;
+ int rows = (nb - seg0 + w - 1) / w;
+ if (seg0)
+ rows++;
+ const int segn = (nb - seg0) % w;
+
+ const int out_row0 = n0 / w * stride_h;
+ const int out_col0 = n0 % w * stride_w;
+
+ for (int i = 0; i < nc; i++)
+ {
+ const int channels = (i == 0 && c0 != 0) ? c0 : ((i == nc - 1 && cn != 0) ? cn : outc);
+ const int c1 = (i == 0) ? m0 % outc : 0;
+
+ float *buffer = pres_ptr;
+
+ const int out_row1 = (m0 / outc + i) / kernel_w % kernel_h * params->dilation_h + out_row0;
+ const int out_col1 = (m0 / outc + i) % kernel_w * params->dilation_w;
+
+ int out_row = out_row1 - pad_h;
+
+ for (int in_rows = rows; in_rows; in_rows--)
+ {
+ int cols = (in_rows != 1 || segn == 0) ? w : segn;
+ int out_col = out_col1 - pad_w;
+ if (in_rows == rows)
+ {
+ cols = seg0;
+ out_col += out_col0;
+ }
+ if ((unsigned int)out_row < (unsigned int)outh)
+ {
+ for (int in_col = cols; in_col; in_col--)
+ {
+ if ((unsigned int)out_col < (unsigned int)outw)
+ {
+ for (int j = c1; j < c1 + channels; j++)
+ {
+ // Note:Data competition for multi-threads
+ //#pragma omp atomic //low performance
+ output_data[(out_row * outw + out_col) * outc + j] += *(buffer + j - c1);
+ }
+ }
+ buffer += mb;
+ out_col += stride_w;
+ }
+ }
+ else
+ {
+ buffer += cols * mb;
+ }
+ out_row += stride_h;
+ }
+
+ pres_ptr += channels;
+ }
+}
+
+void _sparse_pack_rowmajor_image(const int nb, const int k0, const int n0, convMat_t *input,
+ convMat_t *output, convParams_t *params, float *prhs_ptr)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int outw = output->w;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+
+ const int in_row0 = n0 / outw * stride_h;
+ const int in_col0 = n0 % outw * stride_w;
+ int seg0 = outw - n0 % outw;
+ if (seg0 > nb)
+ seg0 = nb;
+ int rows = (nb - seg0 + outw - 1) / outw;
+ if (seg0)
+ rows++;
+ const int segn = (nb - seg0) % outw;
+
+ const int ic = k0 / (kernel_w * kernel_h);
+ const int in_row1 = ((k0 / kernel_w) % kernel_h) * params->dilation_h + in_row0;
+ const int in_col1 = k0 % kernel_w * params->dilation_w;
+
+#ifdef NCNN
+ const float *input_data = input->data + ic * alignSize(w * h, 16 / sizeof(float));
+#else // NCNN
+ const float *input_data = input->data + ic * w * h;
+#endif // NCNN
+
+ int in_row = in_row1 - pad_h;
+
+ for (int out_rows = rows; out_rows; out_rows--)
+ {
+ int cols = (out_rows != 1 || segn == 0) ? outw : segn;
+ int in_col = in_col1 - pad_w;
+ if (out_rows == rows)
+ {
+ cols = seg0;
+ in_col += in_col0;
+ }
+ if ((unsigned int)in_row < (unsigned int)h)
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ if ((unsigned int)in_col < (unsigned int)w)
+ *(prhs_ptr++) = input_data[in_row * w + in_col];
+ else
+ *(prhs_ptr++) = 0;
+ in_col += stride_w;
+ }
+ }
+ else
+ {
+ for (int out_col = cols; out_col; out_col--)
+ {
+ *(prhs_ptr++) = 0;
+ in_col += stride_w;
+ }
+ }
+
+ in_row += stride_h;
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/sgemm_pack.h b/runtimes/libs/srcn/src/sgemm_pack.h
new file mode 100644
index 000000000..6653e7396
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_pack.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_SGEMM_PACK_H__
+#define __NNFW_SRCN_SGEMM_PACK_H__
+
+#include "srcn/conv_type.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void _pack_rowmajor_notrans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr);
+void _pack_rowmajor_notrans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr);
+void _pack_rowmajor_trans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr);
+void _pack_rowmajor_trans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr);
+void _pack_rowmajor_image_rhs(const int nr, const int nb, const int kb, const int k0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *prhs_ptr);
+void _pack_rowmajor_image_rhs_batch(const int nr, const int nb, const int kb, const int k0,
+ const int n0, convMat_t *input, convMat_t *output,
+ convParams_t *params, float *prhs_ptr);
+
+void _unpack_rowmajor_image_res(const int mb, const int nb, const int m0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *pres_ptr);
+
+void _pack_colmajor_notrans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr);
+void _pack_colmajor_notrans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr);
+void _pack_colmajor_trans_lhs(const int mr, const int mb, const int kb, const int stride,
+ const float *lhs_ptr, float *plhs_ptr);
+void _pack_colmajor_trans_rhs(const int nr, const int nb, const int kb, const int stride,
+ const float *rhs_ptr, float *prhs_ptr);
+
+void _pack_colmajor_image_rhs(const int nr, const int nb, const int kb, const int k0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *prhs_ptr);
+
+void _pack_colmajor_image_rhs_batch(const int nr, const int nb, const int kb, const int k0,
+ const int n0, convMat_t *input, convMat_t *output,
+ convParams_t *params, float *prhs_ptr);
+
+void _unpack_colmajor_image_res(const int mb, const int nb, const int m0, const int n0,
+ convMat_t *input, convMat_t *output, convParams_t *params,
+ float *pres_ptr);
+
+void _sparse_pack_rowmajor_image(const int nb, const int k0, const int n0, convMat_t *input,
+ convMat_t *output, convParams_t *params, float *prhs_ptr);
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_SGEMM_PACK_H__
diff --git a/runtimes/libs/srcn/src/sgemm_singlethread.cc b/runtimes/libs/srcn/src/sgemm_singlethread.cc
new file mode 100644
index 000000000..f9b9f45a9
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_singlethread.cc
@@ -0,0 +1,689 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdexcept>
+
+#include "common.h"
+#include "sgemm_kernel.h"
+#include "sgemm_pack.h"
+#include "sgemm_singlethread.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+void sgemm_singlethread::param_init()
+{
+ if (n_ >= m_)
+ {
+ shard_type_ = shardByRow;
+ }
+ else
+ {
+ shard_type_ = shardByCol;
+ }
+
+#if __aarch64__
+ if (major_type_ == rowMajor)
+ {
+ if (shard_type_ == shardByRow)
+ {
+ mr_ = 8;
+ nr_ = 12;
+ }
+ else
+ {
+ mr_ = 12;
+ nr_ = 8;
+ }
+ }
+ else if (major_type_ == colMajor)
+ {
+ mr_ = 12;
+ nr_ = 8;
+ }
+#else // __aarch64__
+ if (major_type_ == rowMajor)
+ {
+ // it is a bug, but i do not know why as now.
+ if (ltrans_ == notrans && rtrans_ == trans)
+ {
+ mr_ = 4;
+ nr_ = 12;
+ }
+ else
+ {
+ mr_ = 6;
+ nr_ = 8;
+ }
+ }
+ else if (major_type_ == colMajor)
+ {
+ mr_ = 8;
+ nr_ = 6;
+ }
+#endif // __aarch64__
+
+ int k_div = (nr_ * sizeof_RhsScalar);
+ int k_sub = (mr_ * nr_ * sizeof_ResScalar);
+
+ int gen_col = GEN_COL / cache_div_;
+ int min_k = MAX_K / cache_div_;
+
+ const int k_cache = MIN(divup((int)(L1_CACHE_SIZE - k_sub), (int)k_div), min_k);
+ bk_ = MIN(k_cache, k_);
+
+ if (shard_type_ == shardByCol)
+ {
+ int m_sub = (bk_ * nr_ * sizeof_RhsScalar);
+ int m_div = (sizeof_LhsScalar * bk_ * 2 * cache_div_);
+ if (L3_CACHE_SIZE)
+ m_div = (sizeof_LhsScalar * bk_ * 2);
+ int m_cache = divup((L2_CACHE_SIZE - m_sub), m_div);
+ bm_ = MIN(m_cache, m_);
+
+ bn_ = MIN(gen_col, n_);
+ if (L3_CACHE_SIZE)
+ {
+ int n_sub = (bk_ * bm_ * sizeof_RhsScalar);
+ int n_cache = divup((L3_CACHE_SIZE - n_sub), (sizeof_LhsScalar * bk_ * 2));
+ bn_ = MIN(n_cache, bn_);
+ }
+ }
+ else
+ {
+ int n_sub = (bk_ * mr_ * sizeof_RhsScalar);
+ int n_div = (sizeof_LhsScalar * bk_ * 2 * cache_div_);
+ if (L3_CACHE_SIZE)
+ n_div = (sizeof_LhsScalar * bk_ * 2);
+ int n_cache = divup((L2_CACHE_SIZE - n_sub), n_div);
+ bn_ = MIN(n_cache, n_);
+
+ bm_ = MIN(gen_col, m_);
+ if (L3_CACHE_SIZE)
+ {
+ int m_sub = (bk_ * bn_ * sizeof_RhsScalar);
+ int m_cache = divup((L3_CACHE_SIZE - m_sub), (sizeof_LhsScalar * bk_ * 2));
+ bm_ = MIN(m_cache, bm_);
+ }
+ }
+
+ nm_ = divup(m_, bm_);
+ nn_ = divup(n_, bn_);
+ nk_ = divup(k_, bk_);
+
+ rm_ = m_ % bm_;
+ rn_ = n_ % bn_;
+ rk_ = k_ % bk_;
+}
+
+sgemm_singlethread::sgemm_singlethread(sgemmType_t major_type, sgemmTrans_t ltrans,
+ sgemmTrans_t rtrans, const int m, const int n, const int k,
+ const float *lhs_data, const float *rhs_data,
+ float *res_data, int cache_div)
+ : major_type_(major_type), ltrans_(ltrans), rtrans_(rtrans), m_(m), n_(n), k_(k),
+ lhs_data_(lhs_data), rhs_data_(rhs_data), res_data_(res_data), cache_div_(cache_div)
+{
+ param_init();
+}
+
+sgemm_singlethread::~sgemm_singlethread() {}
+
+void sgemm_singlethread::run()
+{
+ if (major_type_ == rowMajor)
+ {
+ if (ltrans_ == notrans && rtrans_ == notrans)
+ {
+ compute_rowmajor_nn();
+ }
+ else if (ltrans_ == notrans && rtrans_ == trans)
+ {
+ compute_rowmajor_nt();
+ }
+ else if (ltrans_ == trans && rtrans_ == notrans)
+ {
+ compute_rowmajor_tn();
+ }
+ else if (ltrans_ == trans && rtrans_ == trans)
+ {
+ compute_rowmajor_tt();
+ }
+ else
+ {
+ throw std::runtime_error{"error trans type."};
+ }
+ }
+ else if (major_type_ == colMajor)
+ {
+ if (ltrans_ == notrans && rtrans_ == notrans)
+ {
+ compute_colmajor_nn();
+ }
+ else if (ltrans_ == notrans && rtrans_ == trans)
+ {
+ compute_colmajor_nt();
+ }
+ else if (ltrans_ == trans && rtrans_ == notrans)
+ {
+ compute_colmajor_tn();
+ }
+ else if (ltrans_ == trans && rtrans_ == trans)
+ {
+ compute_colmajor_tt();
+ }
+ else
+ {
+ throw std::runtime_error{"error trans type."};
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error major type."};
+ }
+}
+
+void sgemm_singlethread::compute_rowmajor_nn()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_rowmajor_nt()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_trans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_notrans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_rowmajor_trans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_rowmajor_tn()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_trans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_trans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_rowmajor_notrans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_rowmajor_tt()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_trans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_rowmajor_trans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_rowmajor_trans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_rowmajor_trans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ _sgemm_rowmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[i * bm_ * n_ + j * bn_], l, n_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_colmajor_nn()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_colmajor_nt()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_trans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_notrans_lhs(mr_, bm, bk, m_, &lhs_data_[l * bk_ * m_ + i * bm_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_colmajor_trans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_colmajor_tn()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_trans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_trans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_colmajor_notrans_rhs(nr_, bn, bk, k_, &rhs_data_[j * bn_ * k_ + l * bk_], prhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+void sgemm_singlethread::compute_colmajor_tt()
+{
+ int mstride = (bm_ + mr_ - 1) / mr_ * mr_;
+ int nstride = (bn_ + nr_ - 1) / nr_ * nr_;
+
+ float plhs_ptr[mstride * bk_];
+ float prhs_ptr[nstride * bk_];
+
+ if (shard_type_ == shardByCol)
+ {
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_trans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ _pack_colmajor_trans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divnm(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else if (shard_type_ == shardByRow)
+ {
+ for (int i = 0; i < nm_; i++)
+ {
+ const int bm = (i != nm_ - 1 || rm_ == 0) ? bm_ : rm_;
+
+ for (int l = 0; l < nk_; l++)
+ {
+ const int bk = (l != nk_ - 1 || rk_ == 0) ? bk_ : rk_;
+
+ _pack_colmajor_trans_lhs(mr_, bm, bk, k_, &lhs_data_[i * bm_ * k_ + l * bk_], plhs_ptr);
+
+ for (int j = 0; j < nn_; j++)
+ {
+ const int bn = (j != nn_ - 1 || rn_ == 0) ? bn_ : rn_;
+
+ _pack_colmajor_trans_rhs(nr_, bn, bk, n_, &rhs_data_[l * bk_ * n_ + j * bn_], prhs_ptr);
+
+ _sgemm_colmajor_macro_kernel_divmn(mr_, nr_, bm, bn, bk, plhs_ptr, prhs_ptr,
+ &res_data_[j * bn_ * m_ + i * bm_], l, m_, bk);
+ }
+ }
+ }
+ }
+ else
+ {
+ throw std::runtime_error{"error shard type."};
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/sgemm_singlethread.h b/runtimes/libs/srcn/src/sgemm_singlethread.h
new file mode 100644
index 000000000..47954e028
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_singlethread.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_SGEMM_SINGLETHREAD_H__
+#define __NNFW_SRCN_SGEMM_SINGLETHREAD_H__
+
+#include "common.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+typedef enum { rowMajor = 0, colMajor } sgemmType_t;
+
+typedef enum { trans = 0, notrans } sgemmTrans_t;
+
+class sgemm_singlethread
+{
+public:
+ sgemm_singlethread(sgemmType_t major_type, sgemmTrans_t ltrans, sgemmTrans_t rtrans, const int m,
+ const int n, const int k, const float *lhs_data, const float *rhs_data,
+ float *res_data, int cache_div);
+ ~sgemm_singlethread();
+
+ void run();
+
+private:
+ void param_init();
+
+ void compute_rowmajor_nn();
+ void compute_rowmajor_nt();
+ void compute_rowmajor_tn();
+ void compute_rowmajor_tt();
+
+ void compute_colmajor_nn();
+ void compute_colmajor_nt();
+ void compute_colmajor_tn();
+ void compute_colmajor_tt();
+
+ const float *lhs_data_;
+ const float *rhs_data_;
+ float *res_data_;
+
+ sgemmType_t major_type_;
+ sgemmTrans_t ltrans_;
+ sgemmTrans_t rtrans_;
+
+ int m_;
+ int n_;
+ int k_;
+
+ int bm_;
+ int bn_;
+ int bk_;
+
+ int rm_;
+ int rn_;
+ int rk_;
+
+ int nm_;
+ int nn_;
+ int nk_;
+
+ int mr_;
+ int nr_;
+
+ shardType_t shard_type_;
+ int cache_div_;
+};
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_SGEMM_SINGLETHREAD_H__
diff --git a/runtimes/libs/srcn/src/sgemm_test.cc b/runtimes/libs/srcn/src/sgemm_test.cc
new file mode 100644
index 000000000..f06f05701
--- /dev/null
+++ b/runtimes/libs/srcn/src/sgemm_test.cc
@@ -0,0 +1,1883 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/time.h>
+#include <unistd.h>
+
+#include "srcn/conv_type.h"
+#include "srcn/srcn_conv.h"
+//#include "srcn_sgemm.h"
+#include "conv_sgemm_singlethread.h"
+#include "conv_sgemm_multithreads.h"
+//#include "conv_sgemm_batch.h"
+#include "sgemm_singlethread.h"
+#include "conv_winograd.h"
+#include "winograd.h"
+
+//#include "conv_gpu.h"
+//#include "convolutiondepthwise_3x3.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+static void direct_conv_rowmajor(convMat_t *input, convMat_t *output, convMat_t *filter,
+ convParams_t *params)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int inch = input->c;
+ const int outw = output->w;
+ const int outh = output->h;
+ const int outch = output->c;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+ const int dilation_w = params->dilation_w;
+ const int dilation_h = params->dilation_h;
+ const float *input_data = input->data;
+ const float *filter_data = filter->data;
+ float *output_data = output->data;
+
+ for (int out_c = 0; out_c < outch; out_c++)
+ {
+ for (int out_row = 0; out_row < outh; out_row++)
+ {
+ for (int out_col = 0; out_col < outw; out_col++)
+ {
+ const int in_col0 = (out_col * stride_w) - pad_w;
+ const int in_row0 = (out_row * stride_h) - pad_h;
+ float sum = 0.f;
+ for (int in_c = 0; in_c < inch; in_c++)
+ {
+ for (int filter_y = 0; filter_y < kernel_h; filter_y++)
+ {
+ for (int filter_x = 0; filter_x < kernel_w; filter_x++)
+ {
+ const int in_col = in_col0 + filter_x * dilation_w;
+ const int in_row = in_row0 + filter_y * dilation_h;
+
+ if (((unsigned int)in_col < (unsigned int)w) &&
+ ((unsigned int)in_row < (unsigned int)h))
+ {
+ float input_value = input_data[(in_c * h + in_row) * w + in_col];
+ float filter_value =
+ filter_data[((out_c * inch + in_c) * kernel_h + filter_y) * kernel_w +
+ filter_x];
+ sum += (input_value * filter_value);
+ }
+ }
+ }
+ }
+ output_data[(out_c * outh + out_row) * outw + out_col] = sum;
+ }
+ }
+ }
+}
+
+static void direct_deconv_rowmajor(convMat_t *input, convMat_t *output, convMat_t *filter,
+ convParams_t *params)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int inch = input->c;
+ const int outw = output->w;
+ const int outh = output->h;
+ const int outch = output->c;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+ const int dilation_w = params->dilation_w;
+ const int dilation_h = params->dilation_h;
+ const float *input_data = input->data;
+ const float *filter_data = filter->data;
+ float *output_data = output->data;
+
+ for (int i = 0; i < outw * outh * outch; i++)
+ {
+ output_data[i] = 0;
+ }
+
+ for (int in_c = 0; in_c < inch; in_c++)
+ {
+ for (int in_row = 0; in_row < h; in_row++)
+ {
+ for (int in_col = 0; in_col < w; in_col++)
+ {
+ const int out_col0 = (in_col * stride_w) - pad_w;
+ const int out_row0 = (in_row * stride_h) - pad_h;
+ float in_value = input_data[(in_c * h + in_row) * w + in_col];
+ for (int out_c = 0; out_c < outch; out_c++)
+ {
+ for (int filter_y = 0; filter_y < kernel_h; filter_y++)
+ {
+ for (int filter_x = 0; filter_x < kernel_w; filter_x++)
+ {
+ const int out_col = out_col0 + filter_x * dilation_w;
+ const int out_row = out_row0 + filter_y * dilation_h;
+
+ if (((unsigned int)out_col < (unsigned int)outw) &&
+ ((unsigned int)out_row < (unsigned int)outh))
+ {
+ float filter_value =
+ filter_data[((in_c * outch + out_c) * kernel_h + filter_y) * kernel_w +
+ filter_x];
+ output_data[(out_c * outh + out_row) * outw + out_col] += filter_value * in_value;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+static void direct_sgemm_rowmajor(int Atrans, int Btrans, int m, int n, int k, float *A, float *B,
+ float *C)
+{
+ float *aa, *bb;
+
+ if (Atrans == trans)
+ {
+ aa = (float *)malloc(m * k * sizeof(float));
+ if (!aa)
+ return;
+
+ for (int i = 0; i < k; i++)
+ {
+ for (int j = 0; j < m; j++)
+ {
+ aa[j * k + i] = A[i * m + j];
+ }
+ }
+ }
+ else
+ {
+ aa = A;
+ }
+
+ if (Btrans == trans)
+ {
+ bb = (float *)malloc(n * k * sizeof(float));
+ if (!bb)
+ return;
+
+ for (int i = 0; i < n; i++)
+ {
+ for (int j = 0; j < k; j++)
+ {
+ bb[j * n + i] = B[i * k + j];
+ }
+ }
+ }
+ else
+ {
+ bb = B;
+ }
+
+ for (int i = 0; i < m; i++)
+ {
+ for (int j = 0; j < n; j++)
+ {
+ float res = 0.f;
+ for (int l = 0; l < k; l++)
+ {
+ res += aa[i * k + l] * bb[l * n + j];
+ }
+ C[i * n + j] = res;
+ }
+ }
+}
+
+/*static void direct_sgemm_kernel(const int k, const int lhs_stride, const int rhs_stride, const int
+res_stride,
+ const float *lhs_ptr, const float *rhs_ptr, float *res_ptr)
+{
+ int lstride = lhs_stride << 2;
+ int rstride = rhs_stride << 2;
+ int estride = res_stride << 2;
+ int rstep = rstride << 2;
+
+ int nk = (k >> 2) - 1;
+
+ __asm __volatile (
+ "movi v16.4s, #0x0\n"
+ "movi v17.4s, #0x0\n"
+ "movi v18.4s, #0x0\n"
+ "movi v19.4s, #0x0\n"
+ "movi v20.4s, #0x0\n"
+ "movi v21.4s, #0x0\n"
+ "movi v22.4s, #0x0\n"
+ "movi v23.4s, #0x0\n"
+ "movi v24.4s, #0x0\n"
+ "movi v25.4s, #0x0\n"
+ "movi v26.4s, #0x0\n"
+ "movi v27.4s, #0x0\n"
+ "movi v28.4s, #0x0\n"
+ "movi v29.4s, #0x0\n"
+ "movi v30.4s, #0x0\n"
+ "movi v31.4s, #0x0\n"
+
+ "mov x0, %[lhs_ptr]\n"
+ "add %[lhs_ptr], %[lhs_ptr], #16\n"
+ "ld1 {v0.4s}, [x0]\n"
+ "add x0, x0, %[lstride]\n"
+ "ld1 {v1.4s}, [x0]\n"
+ "add x0, x0, %[lstride]\n"
+ "ld1 {v2.4s}, [x0]\n"
+ "add x0, x0, %[lstride]\n"
+ "ld1 {v3.4s}, [x0]\n"
+ "add x0, x0, %[lstride]\n"
+
+ "mov x1, %[rhs_ptr]\n"
+ "add %[rhs_ptr], %[rhs_ptr], %[rstep]\n"
+ "ld1 {v8.4s, v9.4s}, [x1]\n"
+ "add x1, x1, %[rstride]\n"
+ "ld1 {v10.4s, v11.4s}, [x1]\n"
+ "add x1, x1, %[rstride]\n"
+
+ "1:\n"
+ "fmla v16.4s, v8.4s, v0.s[0]\n"
+ "fmla v17.4s, v9.4s, v0.s[0]\n"
+ "fmla v16.4s, v10.4s, v0.s[1]\n"
+ "fmla v17.4s, v11.4s, v0.s[1]\n"
+ "fmla v18.4s, v8.4s, v1.s[0]\n"
+ "fmla v19.4s, v9.4s, v1.s[0]\n"
+ "fmla v18.4s, v10.4s, v1.s[1]\n"
+ "fmla v19.4s, v11.4s, v1.s[1]\n"
+ "ld1 {v12.4s, v13.4s}, [x1]\n"
+ "fmla v20.4s, v8.4s, v2.s[0]\n"
+ "add x1, x1, %[rstride]\n"
+ "fmla v21.4s, v9.4s, v2.s[0]\n"
+ "ld1 {v14.4s, v15.4s}, [x1]\n"
+ "fmla v20.4s, v10.4s, v2.s[1]\n"
+ "add x1, x1, %[rstride]\n"
+ "fmla v21.4s, v11.4s, v2.s[1]\n"
+ "fmla v22.4s, v8.4s, v3.s[0]\n"
+ "fmla v23.4s, v9.4s, v3.s[0]\n"
+ "fmla v22.4s, v10.4s, v3.s[1]\n"
+ "fmla v23.4s, v11.4s, v3.s[1]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "fmla v16.4s, v12.4s, v0.s[2]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v17.4s, v13.4s, v0.s[2]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "fmla v16.4s, v14.4s, v0.s[3]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v17.4s, v15.4s, v0.s[3]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "fmla v18.4s, v12.4s, v1.s[2]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v19.4s, v13.4s, v1.s[2]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "fmla v18.4s, v14.4s, v1.s[3]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v19.4s, v15.4s, v1.s[3]\n"
+ "fmla v20.4s, v12.4s, v2.s[2]\n"
+ "fmla v21.4s, v13.4s, v2.s[2]\n"
+ "fmla v20.4s, v14.4s, v2.s[3]\n"
+ "fmla v21.4s, v15.4s, v2.s[3]\n"
+ "fmla v22.4s, v12.4s, v3.s[2]\n"
+ "fmla v23.4s, v13.4s, v3.s[2]\n"
+ "fmla v22.4s, v14.4s, v3.s[3]\n"
+ "fmla v23.4s, v15.4s, v3.s[3]\n"
+
+ "mov x0, %[lhs_ptr]\n"
+ "add %[lhs_ptr], %[lhs_ptr], #16\n"
+
+ "fmla v24.4s, v8.4s, v4.s[0]\n"
+ "fmla v25.4s, v9.4s, v4.s[0]\n"
+ "ld1 {v0.4s}, [x0]\n"
+ "fmla v24.4s, v10.4s, v4.s[1]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v25.4s, v11.4s, v4.s[1]\n"
+ "ld1 {v1.4s}, [x0]\n"
+ "fmla v26.4s, v8.4s, v5.s[0]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v27.4s, v9.4s, v5.s[0]\n"
+ "ld1 {v2.4s}, [x0]\n"
+ "fmla v26.4s, v10.4s, v5.s[1]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v27.4s, v11.4s, v5.s[1]\n"
+ "ld1 {v3.4s}, [x0]\n"
+ "fmla v28.4s, v8.4s, v6.s[0]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v29.4s, v9.4s, v6.s[0]\n"
+ "fmla v28.4s, v10.4s, v6.s[1]\n"
+ "fmla v29.4s, v11.4s, v6.s[1]\n"
+ "fmla v30.4s, v8.4s, v7.s[0]\n"
+ "fmla v31.4s, v9.4s, v7.s[0]\n"
+ "fmla v30.4s, v10.4s, v7.s[1]\n"
+ "fmla v31.4s, v11.4s, v7.s[1]\n"
+
+ "mov x1, %[rhs_ptr]\n"
+ "add %[rhs_ptr], %[rhs_ptr], %[rstep]\n"
+
+ "fmla v24.4s, v12.4s, v4.s[2]\n"
+ "fmla v25.4s, v13.4s, v4.s[2]\n"
+ "ld1 {v8.4s, v9.4s}, [x1]\n"
+ "fmla v24.4s, v14.4s, v4.s[3]\n"
+ "add x1, x1, %[rstride]\n"
+ "fmla v25.4s, v15.4s, v4.s[3]\n"
+ "ld1 {v10.4s, v11.4s}, [x1]\n"
+ "fmla v26.4s, v12.4s, v5.s[2]\n"
+ "add x1, x1, %[rstride]\n"
+ "fmla v27.4s, v13.4s, v5.s[2]\n"
+ "fmla v26.4s, v14.4s, v5.s[3]\n"
+ "fmla v27.4s, v15.4s, v5.s[3]\n"
+ "fmla v28.4s, v12.4s, v6.s[2]\n"
+ "fmla v29.4s, v13.4s, v6.s[2]\n"
+ "fmla v28.4s, v14.4s, v6.s[3]\n"
+ "fmla v29.4s, v15.4s, v6.s[3]\n"
+ "fmla v30.4s, v12.4s, v7.s[2]\n"
+ "fmla v31.4s, v13.4s, v7.s[2]\n"
+ "subs %w[nk], %w[nk], #1\n"
+ "fmla v30.4s, v14.4s, v7.s[3]\n"
+ "fmla v31.4s, v15.4s, v7.s[3]\n"
+ "bne 1b\n"
+
+ "fmla v16.4s, v8.4s, v0.s[0]\n"
+ "fmla v17.4s, v9.4s, v0.s[0]\n"
+ "fmla v16.4s, v10.4s, v0.s[1]\n"
+ "fmla v17.4s, v11.4s, v0.s[1]\n"
+ "fmla v18.4s, v8.4s, v1.s[0]\n"
+ "fmla v19.4s, v9.4s, v1.s[0]\n"
+ "fmla v18.4s, v10.4s, v1.s[1]\n"
+ "fmla v19.4s, v11.4s, v1.s[1]\n"
+ "ld1 {v12.4s, v13.4s}, [x1]\n"
+ "fmla v20.4s, v8.4s, v2.s[0]\n"
+ "add x1, x1, %[rstride]\n"
+ "fmla v21.4s, v9.4s, v2.s[0]\n"
+ "ld1 {v14.4s, v15.4s}, [x1]\n"
+ "fmla v20.4s, v10.4s, v2.s[1]\n"
+ "add x1, x1, %[rstride]\n"
+ "fmla v21.4s, v11.4s, v2.s[1]\n"
+ "fmla v22.4s, v8.4s, v3.s[0]\n"
+ "fmla v23.4s, v9.4s, v3.s[0]\n"
+ "fmla v22.4s, v10.4s, v3.s[1]\n"
+ "fmla v23.4s, v11.4s, v3.s[1]\n"
+
+ "ld1 {v4.4s}, [x0]\n"
+ "fmla v16.4s, v12.4s, v0.s[2]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v17.4s, v13.4s, v0.s[2]\n"
+ "ld1 {v5.4s}, [x0]\n"
+ "fmla v16.4s, v14.4s, v0.s[3]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v17.4s, v15.4s, v0.s[3]\n"
+ "ld1 {v6.4s}, [x0]\n"
+ "fmla v18.4s, v12.4s, v1.s[2]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v19.4s, v13.4s, v1.s[2]\n"
+ "ld1 {v7.4s}, [x0]\n"
+ "fmla v18.4s, v14.4s, v1.s[3]\n"
+ "add x0, x0, %[lstride]\n"
+ "fmla v19.4s, v15.4s, v1.s[3]\n"
+ "fmla v20.4s, v12.4s, v2.s[2]\n"
+ "fmla v21.4s, v13.4s, v2.s[2]\n"
+ "fmla v20.4s, v14.4s, v2.s[3]\n"
+ "fmla v21.4s, v15.4s, v2.s[3]\n"
+ "fmla v22.4s, v12.4s, v3.s[2]\n"
+ "fmla v23.4s, v13.4s, v3.s[2]\n"
+ "fmla v22.4s, v14.4s, v3.s[3]\n"
+ "fmla v23.4s, v15.4s, v3.s[3]\n"
+
+ "mov x0, %[res_ptr]\n"
+ "fmla v24.4s, v8.4s, v4.s[0]\n"
+ "fmla v25.4s, v9.4s, v4.s[0]\n"
+ "st1 {v16.4s, v17.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v24.4s, v10.4s, v4.s[1]\n"
+ "fmla v25.4s, v11.4s, v4.s[1]\n"
+ "st1 {v18.4s, v19.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v26.4s, v8.4s, v5.s[0]\n"
+ "fmla v27.4s, v9.4s, v5.s[0]\n"
+ "st1 {v20.4s, v21.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v26.4s, v10.4s, v5.s[1]\n"
+ "fmla v27.4s, v11.4s, v5.s[1]\n"
+ "st1 {v22.4s, v23.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v28.4s, v8.4s, v6.s[0]\n"
+ "fmla v29.4s, v9.4s, v6.s[0]\n"
+ "fmla v28.4s, v10.4s, v6.s[1]\n"
+ "fmla v29.4s, v11.4s, v6.s[1]\n"
+ "fmla v30.4s, v8.4s, v7.s[0]\n"
+ "fmla v31.4s, v9.4s, v7.s[0]\n"
+ "fmla v30.4s, v10.4s, v7.s[1]\n"
+ "fmla v31.4s, v11.4s, v7.s[1]\n"
+
+ "fmla v24.4s, v12.4s, v4.s[2]\n"
+ "fmla v25.4s, v13.4s, v4.s[2]\n"
+ "fmla v24.4s, v14.4s, v4.s[3]\n"
+ "fmla v25.4s, v15.4s, v4.s[3]\n"
+ "fmla v26.4s, v12.4s, v5.s[2]\n"
+ "fmla v27.4s, v13.4s, v5.s[2]\n"
+ "st1 {v24.4s, v25.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v26.4s, v14.4s, v5.s[3]\n"
+ "fmla v27.4s, v15.4s, v5.s[3]\n"
+ "fmla v28.4s, v12.4s, v6.s[2]\n"
+ "fmla v29.4s, v13.4s, v6.s[2]\n"
+ "st1 {v26.4s, v27.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v28.4s, v14.4s, v6.s[3]\n"
+ "fmla v29.4s, v15.4s, v6.s[3]\n"
+ "fmla v30.4s, v12.4s, v7.s[2]\n"
+ "fmla v31.4s, v13.4s, v7.s[2]\n"
+ "st1 {v28.4s, v29.4s}, [x0]\n"
+ "add x0, x0, %[estride]\n"
+ "fmla v30.4s, v14.4s, v7.s[3]\n"
+ "fmla v31.4s, v15.4s, v7.s[3]\n"
+ "st1 {v30.4s, v31.4s}, [x0]\n"
+ :[lhs_ptr] "+r" (lhs_ptr), [rhs_ptr] "+r" (rhs_ptr), [res_ptr] "+r" (res_ptr),
+ [nk] "+r" (nk)
+ : [lstride] "r" (lstride), [rstride] "r" (rstride), [estride] "r" (estride), [rstep] "r"
+(rstep)
+ : "x0", "x1", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7",
+ "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18",
+ "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
+ );
+}*/
+
+static void direct_conv_colmajor(convMat_t *input, convMat_t *output, convMat_t *filter,
+ convParams_t *params)
+{
+ const int w = input->w;
+ const int h = input->h;
+ const int inch = input->c;
+ const int outw = output->w;
+ const int outh = output->h;
+ const int outch = output->c;
+ const int kernel_w = params->kernel_w;
+ const int kernel_h = params->kernel_h;
+ const int stride_w = params->stride_w;
+ const int stride_h = params->stride_h;
+ const int pad_w = params->pad_w;
+ const int pad_h = params->pad_h;
+ const int dilation_w = params->dilation_w;
+ const int dilation_h = params->dilation_h;
+ const float *input_data = input->data;
+ const float *filter_data = filter->data;
+ float *output_data = output->data;
+
+ for (int out_row = 0; out_row < outh; out_row++)
+ {
+ for (int out_col = 0; out_col < outw; out_col++)
+ {
+ const int in_col0 = (out_col * stride_w) - pad_w;
+ const int in_row0 = (out_row * stride_h) - pad_h;
+
+ for (int out_c = 0; out_c < outch; out_c++)
+ {
+ float sum = 0.f;
+ for (int filter_y = 0; filter_y < kernel_h; filter_y++)
+ {
+ for (int filter_x = 0; filter_x < kernel_w; filter_x++)
+ {
+ const int in_col = in_col0 + filter_x * dilation_w;
+ const int in_row = in_row0 + filter_y * dilation_h;
+
+ if (((unsigned int)in_col < (unsigned int)w) &&
+ ((unsigned int)in_row < (unsigned int)h))
+ {
+ for (int in_c = 0; in_c < inch; in_c++)
+ {
+ float input_value = input_data[(in_row * w + in_col) * inch + in_c];
+ float filter_value =
+ filter_data[((filter_y * kernel_w + filter_x) * inch + in_c) * outch + out_c];
+ sum += (input_value * filter_value);
+ }
+ }
+ }
+ }
+ output_data[(out_row * outw + out_col) * outch + out_c] = sum;
+ }
+ }
+ }
+}
+
+static void direct_sgemm_colmajor(int Atrans, int Btrans, int m, int n, int k, float *A, float *B,
+ float *C)
+{
+ float *aa, *bb;
+
+ if (Atrans)
+ {
+ aa = (float *)malloc(m * k * sizeof(float));
+ if (!aa)
+ return;
+
+ for (int i = 0; i < k; i++)
+ {
+ for (int j = 0; j < m; j++)
+ {
+ aa[i * m + j] = A[j * k + i];
+ }
+ }
+ }
+ else
+ {
+ aa = A;
+ }
+
+ if (Btrans)
+ {
+ bb = (float *)malloc(n * k * sizeof(float));
+ if (!bb)
+ return;
+
+ for (int i = 0; i < n; i++)
+ {
+ for (int j = 0; j < k; j++)
+ {
+ bb[i * k + j] = B[j * n + i];
+ }
+ }
+ }
+ else
+ {
+ bb = B;
+ }
+
+ for (int i = 0; i < m; i++)
+ {
+ for (int j = 0; j < n; j++)
+ {
+ float res = 0.f;
+ for (int l = 0; l < k; l++)
+ {
+ res += bb[j * k + l] * aa[l * m + i];
+ }
+ C[j * m + i] = res;
+ }
+ }
+}
+
+#if 0
+static int test_sgemm(int m, int n, int k, int loops)
+{
+ struct timeval start, end;
+ float total_time = 0.f;
+
+ const int mb = 180;
+ const int nb = 1440;
+ const int kb = 512;
+
+ const int mr = 4;
+ const int nr = 12;
+
+#if 0
+ const int pm = (m + mr - 1) / mr * mr;
+ const int pn = (n + nr - 1) / nr * nr;
+ const int pk = k;
+#else
+ const int pm = (mb + mr - 1) / mr * mr;
+ const int pn = (nb + nr - 1) / nr * nr;
+ const int pk = kb;
+#endif
+ const int nm = (m + mb - 1) / mb;
+ const int nn = (n + nb - 1) / nb;
+ const int nk = (k + kb - 1) / kb;
+
+ const int rm = m % mb;
+ const int rn = n % nb;
+ const int rk = k % kb;
+
+ float *A = (float *)malloc(m * k * sizeof(float));
+ if(!A) return 0;
+
+ for(int i = 0 ; i < m * k; i++)
+ {
+ A[i] = 0.001 + i * 0.000001;
+ }
+
+ float *B = (float *)malloc(k * n * sizeof(float));
+ if(!B) return 0;
+
+ for(int i = 0 ; i < n * k; i++)
+ {
+ B[i] = 0.001 - i * 0.000001;
+ }
+
+ float *C = (float *)malloc(m * n * sizeof(float));
+ if(!C) return 0;
+
+#if 0
+ float *PA = (float *)malloc(pm * pk * sizeof(float));
+ if(!PA) return 0;
+
+ float *PB = (float *)malloc(pk * pn * sizeof(float));
+ if(!PB) return 0;
+#else
+ float PA[pm * pk];
+ float PB[pk * pn];
+#endif
+
+ for(int nloop = 0; nloop < loops; nloop++)
+
+ {
+ gettimeofday(&start, NULL);
+
+ //pack_rowmajor_notrans_lhs(mr, m, k, k, A, PA);
+ //pack_rowmajor_notrans_rhs(nr, n, k, n, B, PB);
+#if 1
+ for (int j = 0; j < nn; j++)
+ {
+ const int _nb = (j != nn - 1 || rn == 0) ? nb : rn;
+ for (int l = 0; l < nk; l++)
+ {
+ const int _kb = (l != nk - 1 || rk == 0) ? kb : rk;
+ pack_rowmajor_notrans_rhs(nr, _nb, _kb, 1, n, &B[l * kb * n + j * nb], PB);
+ for(int i = 0; i < nm; i++)
+ {
+ const int _mb = (i != nm - 1 || rm == 0) ? mb : rm;
+ pack_rowmajor_notrans_lhs(mr, _mb, _kb, 1, k, &A[i * mb * k + l * kb], PA);
+ sgemm_rowmajor_macro_kernel_divnm(mr, nr, _mb, _nb, _kb, PA, PB, &C[i * mb * n + j * nb], l, n, _kb);
+ //sgemm_rowmajor_macro_kernel_divnm(mr, nr, _mb, _nb, _kb, &PA[i * mb * k + l * kb], &PB[l * kb * pn + j * nb], &C[i * mb * n + j * nb], l, n, pk);
+ }
+ }
+ }
+#else
+ for (int j = 0; j < nm; j++)
+ {
+ const int _mb = (j != nm - 1 || rm == 0) ? mb : rm;
+ for (int l = 0; l < nk; l++)
+ {
+ const int _kb = (l != nk - 1 || rk == 0) ? kb : rk;
+ pack_rowmajor_notrans_lhs(mr, _mb, _kb, 1, k, &A[j * mb * k + l * kb], PA);
+ for(int i = 0; i < nn; i++)
+ {
+ const int _nb = (i != nn - 1 || rn == 0) ? nb : rn;
+ pack_rowmajor_notrans_rhs(nr, _nb, _kb, 1, n, &B[l * kb * n + i * nb], PB);
+ sgemm_rowmajor_macro_kernel_divmn(mr, nr, _mb, _nb, _kb, PA, PB, &C[j * mb * n + i * nb], l, n, _kb);
+ //sgemm_rowmajor_macro_kernel_divmn(mr, nr, _mb, _nb, _kb, &PA[i * mb * k + l * kb], &PB[l * kb * pn + j * nb], &C[i * mb * n + j * nb], l, n, pk);
+ }
+ }
+ }
+#endif
+ gettimeofday(&end, NULL);
+ total_time += ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec))/1000;
+ }
+
+ int div = m * n < 16 ? m * n : 16;
+ int num = m * n > 64 ? 64 : m * n;
+
+ float *c_ptr = &C[0];
+ for(int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if((i + 1) % div == 0) printf("\n");
+ }
+
+ printf("\n");
+
+ c_ptr = &C[m * n - num];
+ for(int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if((i + 1) % div == 0) printf("\n");
+ }
+
+ printf("\n");
+
+ long long total_size = (long long)m *n * k * 2;
+ printf("AVER Time consuming: %.2fms, total size: %lld, (GFLOP: %.2f)\n", total_time / loops , total_size, (double)total_size/(total_time / loops)/1000000);
+
+ free(A);
+ free(B);
+ free(C);
+
+ //free(PA);
+ //free(PB);
+
+}
+#endif
+
+static int test_sgemm(int m, int n, int k, int type, int loops)
+{
+ struct timeval start, end;
+ float total_time = 0.f;
+
+ // printf("1.\n");
+
+ float *A = (float *)malloc(m * k * sizeof(float));
+ if (!A)
+ return 0;
+
+ for (int i = 0; i < m * k; i++)
+ {
+ A[i] = 0.001 + i * 0.001; // i * 0.000001;
+ }
+
+ float *B = (float *)malloc(k * n * sizeof(float));
+ if (!B)
+ return 0;
+
+ for (int i = 0; i < n * k; i++)
+ {
+ B[i] = 0.001 - i * 0.001; // - i * 0.000001;
+ }
+
+ float *C = (float *)malloc(m * n * sizeof(float));
+ if (!C)
+ return 0;
+
+ for (int nloop = 0; nloop < loops; nloop++)
+
+ {
+ gettimeofday(&start, NULL);
+
+ if (type == 0)
+ {
+ // direct_sgemm_rowmajor(notrans, notrans, m, n, k, A, B, C);
+ direct_sgemm_colmajor(notrans, notrans, m, n, k, A, B, C);
+ }
+
+ else if (type == 1)
+ {
+ class sgemm_singlethread my_gemm(colMajor, notrans, notrans, m, n, k, A, B, C, 1);
+ my_gemm.run();
+ }
+
+ /*else if(type == 2)
+ {
+ for(int i = 0; i < m / 8; i++)
+ {
+ for(int j = 0; j < n / 8; j++)
+ {
+ direct_sgemm_kernel(k, k, n, n, A + i * 8 * k, B + j * 8, C + i * 8 * n + j * 8);
+ }
+ }
+ }*/
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+
+ int div = m * n < 16 ? m * n : 16;
+ int num = m * n > 64 ? 64 : m * n;
+
+ float *c_ptr = &C[0];
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ c_ptr = &C[m * n - num];
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ long long total_size = (long long)m * n * k * 2;
+ printf("AVER Time consuming: %.2fms, total size: %lld, (GFLOP: %.2f)\n", total_time / loops,
+ total_size, (double)total_size / (total_time / loops) / 1000000);
+
+ free(A);
+ free(B);
+ free(C);
+
+ return 0;
+}
+
+void weight_tensorflow2caffe(float *out, float *in, int H, int W, int C, int N)
+{ // HWCN ---> NCHW
+ for (int h = 0; h < H; ++h)
+ {
+ for (int w = 0; w < W; ++w)
+ {
+ for (int c = 0; c < C; ++c)
+ {
+ for (int n = 0; n < N; ++n)
+ {
+ int index_in = h * W * C * N + w * C * N + c * N + n;
+ int index_out = n * C * H * W + c * H * W + h * W + w;
+ // printf("%3d <--- %3d\n", index_out, index_in);
+ out[index_out] = in[index_in];
+ }
+ }
+ }
+ }
+}
+
+void trans_weight2winograd(const convMat_t &_kernel, float **winograd_weight)
+{
+ const double *G;
+ const int kernel_size = _kernel.h;
+ const int channels = _kernel.c;
+ const int num_output = _kernel.n;
+
+ int tile_h_in_, tile_w_in_;
+ int M, N;
+
+ /*Step 1: transfer weight to winograd domain*/
+ if (kernel_size == 3)
+ {
+ M = winograd_para_3x3s1::M;
+ N = winograd_para_3x3s1::N;
+ G = winograd_para_3x3s1::getG();
+ }
+ else
+ {
+ M = winograd_para_5x5s1::M;
+ N = winograd_para_5x5s1::N;
+ G = winograd_para_5x5s1::getG();
+ }
+
+ tile_h_in_ = tile_w_in_ = M;
+
+ float *winograd_g = new float[M * M * N * N];
+ if (NULL == winograd_g)
+ return;
+ kronecker_product(winograd_g, G, G, M, N, M, N);
+
+ *winograd_weight = new float[tile_h_in_ * tile_w_in_ * channels * num_output];
+
+ if (NULL == *winograd_weight)
+ return;
+
+ float *weight_data_tran = new float[_kernel.h * _kernel.w * _kernel.c * _kernel.n];
+ if (NULL == weight_data_tran)
+ return;
+ weight_tensorflow2caffe(weight_data_tran, _kernel.data, kernel_size, kernel_size, channels,
+ num_output);
+
+ class sgemm_singlethread sgemm(rowMajor, notrans, trans, tile_h_in_ * tile_w_in_,
+ channels * num_output, kernel_size * kernel_size, winograd_g,
+ weight_data_tran, *winograd_weight, 1);
+
+ sgemm.run();
+
+ delete[] weight_data_tran;
+
+ /*With winograd, original weight data is useless.*/
+ delete[] winograd_g;
+}
+
+static int test_conv(const int w, const int h, const int kernel_size, const int stride,
+ const int inch, const int outch, const int padding, const int conv_type,
+ const int thread_num, const int loops)
+{
+ struct timeval start, end;
+ float total_time = 0.f;
+
+ struct timeval start1, end1;
+ float total_time1 = 0.f;
+
+ const int dilation = 1;
+
+ const int kernel_dilation = dilation * (kernel_size - 1) + 1;
+
+ convMat_t input;
+ convMat_t output;
+ convMat_t filter;
+ convParams_t params;
+
+ int pad_l, pad_r, pad_t, pad_b;
+ if (padding)
+ {
+ int pad_w = kernel_dilation + (w - 1) / stride * stride - w;
+ int pad_h = kernel_dilation + (h - 1) / stride * stride - h;
+ pad_l = pad_w / 2;
+ pad_r = pad_w - pad_l;
+ pad_t = pad_h / 2;
+ pad_b = pad_h - pad_t;
+ }
+ else
+ {
+ pad_l = pad_r = pad_t = pad_b = 0;
+ }
+
+ input.w = w;
+ input.h = h;
+ input.c = inch;
+ input.n = 1;
+#ifdef NCNN
+ input.data =
+ (float *)malloc(alignSize(input.w * input.h, 16 / sizeof(float)) * input.c * sizeof(float));
+#else
+ input.data = (float *)malloc(input.w * input.h * input.c * sizeof(float));
+#endif
+
+ if (!input.data)
+ return 0;
+
+ output.w = (w + pad_l + pad_r - kernel_dilation) / stride + 1;
+ output.h = (h + pad_t + pad_b - kernel_dilation) / stride + 1;
+ output.c = outch;
+ output.n = 1;
+#ifdef NCNN
+ output.data = (float *)malloc(alignSize(output.w * output.h, 16 / sizeof(float)) * output.c *
+ sizeof(float));
+#else
+ output.data = (float *)malloc(output.w * output.h * output.c * sizeof(float));
+#endif
+
+ if (!output.data)
+ return 0;
+
+ for (int i = 0; i < output.w * output.h * output.c; i++)
+ {
+ output.data[i] = 0;
+ }
+
+ filter.w = kernel_size;
+ filter.h = kernel_size;
+ filter.c = inch;
+ filter.n = outch;
+ filter.data = (float *)malloc(filter.w * filter.h * filter.c * filter.n * sizeof(float));
+ if (!filter.data)
+ return 0;
+
+ for (int i = 0; i < input.w * input.h * input.c; i++)
+ {
+ input.data[i] = 0.001 + i * 0.000001;
+ }
+
+#if 1
+ for (int i = 0; i < filter.w * filter.h * filter.c * filter.n; i++)
+ {
+ filter.data[i] = 0.001 - i * 0.000001;
+ }
+#else
+ for (int i = 0; i < filter.w * filter.h * filter.c * filter.n; i++)
+ {
+ if ((i + 1) % 15 == 0)
+ filter.data[i] = 0.001 - i * 0.000001;
+ else
+ filter.data[i] = 0;
+ }
+#endif
+ params.kernel_w = kernel_size;
+ params.kernel_h = kernel_size;
+ params.stride_w = stride;
+ params.stride_h = stride;
+ params.padding = padding;
+ params.pad_w = pad_l;
+ params.pad_h = pad_t;
+ params.dilation_w = dilation;
+ params.dilation_h = dilation;
+
+ const int m = output.c;
+ const int n = output.w * output.h;
+ const int k = params.kernel_h * params.kernel_w * input.c;
+
+ // ocl_context_t context;
+ size_t local_min[2];
+ /**
+ if(conv_type == 14 || conv_type == 15 || conv_type == 6)
+ {
+ if(init_gpu(&context) < 0) return -1;
+ //if(conv_type ==14 || conv_type == 5) sgemm_ocltune(&context, m, n, (k < 1024 ? k :
+ 1024), local_min);
+ //else if(conv_type == 6)
+ {
+ if(kernel_size == 3) directconv_3x3S1_tune(&context, &input, &filter, &output,
+ local_min);
+ else if(kernel_size == 1) directconv_1x1S1_tune(&context, &input, &filter, &output,
+ local_min);
+ }
+ //local_min[0] = 1; local_min[1] = 1;
+ }
+ **/
+ if (conv_type == 0)
+ {
+ for (int nloop = 0; nloop < loops; nloop++)
+ {
+ gettimeofday(&start, NULL);
+
+ direct_conv_rowmajor(&input, &output, &filter, &params);
+ // direct_conv_colmajor(&input, &output, &filter, &params);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+ else if (conv_type == 1)
+ {
+ for (int nloop = 0; nloop < loops; nloop++)
+ {
+ // printf("nloop = %d, thread_num = %d\n", nloop, thread_num);
+ // class srcn_sgemm my_gemm(input, filter, output, params, thread_num, col_major);
+ gettimeofday(&start, NULL);
+
+ /*if(thread_num == 1)
+ {
+ class conv_sgemm_singlethread my_gemm(input, filter, output, params, col_major);
+ my_gemm.run();
+ }
+ else
+ {
+ class conv_sgemm_multithreads my_gemm(input, filter, output, params, thread_num,
+ col_major);
+ my_gemm.run();
+ }*/
+
+ srcn_convolution2D(input, filter, output, params, NULL, thread_num, row_major);
+
+ // printf("sync\n");
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+ else if (conv_type == 2)
+ {
+ float *winograd_weight;
+
+ // trans_weight2winograd(filter, &winograd_weight);
+
+ winogradParams_t wparams = {params.kernel_w,
+ params.kernel_h,
+ params.stride_w,
+ params.stride_h,
+ params.dilation_w,
+ params.dilation_h,
+ 1,
+ w,
+ h,
+ input.c,
+ output.c,
+ thread_num,
+ col_major,
+ filter.data};
+ winograd_weight = trans_weight2winograd(wparams);
+
+ for (int nloop = 0; nloop < loops; nloop++)
+ {
+ gettimeofday(&start, NULL);
+
+ // class conv_winograd my_sgemm(input, output, params, col_major, winograd_weight, thread_num,
+ // w * h, n);
+ // my_sgemm.run();
+
+ srcn_convolution2D(input, filter, output, params, winograd_weight, thread_num, row_major);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+ else if (conv_type == 3)
+ {
+ void *sparse_weight = trans_weight2sparse(filter);
+
+ for (int nloop = 0; nloop < loops; nloop++)
+ {
+ gettimeofday(&start, NULL);
+
+ srcn_sparse_convolution2D(input, output, params, sparse_weight, thread_num, row_major);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+
+ sparse_release(outch, sparse_weight);
+ } /**
+else if(conv_type == 4)
+{
+#if 0
+ cl_int err;
+ convlib::load_opencl("./libmali.so");
+ const int mpad = (m + 4 - 1) / 4 * 4;
+ const int npad = (n + 4 - 1) / 4 * 4;
+ cl_mem lhs_gpu = convlib::clCreateBuffer(context.context, CL_MEM_READ_WRITE |
+CL_MEM_ALLOC_HOST_PTR, mpad * k * sizeof(float), NULL, &err);
+ if(err != CL_SUCCESS)
+ {
+ printf("err = %d@%s:%d\n", err, __FUNCTION__, __LINE__);
+ return -1;
+ }
+
+ cl_image_format rhs_format = {CL_RGBA, CL_FLOAT};
+ cl_image_desc desc =
+ {
+ CL_MEM_OBJECT_IMAGE2D,
+ (size_t)npad / 4,
+ (size_t)k,
+ 0, 0,
+ 0,
+ 0, 0, 0, 0
+ };
+ cl_mem rhs_gpu = convlib::clCreateImage(context.context, CL_MEM_READ_ONLY |
+CL_MEM_ALLOC_HOST_PTR, &rhs_format, &desc, NULL, &err);
+ if(err != CL_SUCCESS)
+ {
+ printf("err = %d@%s:%d\n", err, __FUNCTION__, __LINE__);
+ return -1;
+ }
+
+ cl_mem rhs_gpu = convlib::clCreateBuffer(context.context, CL_MEM_READ_WRITE |
+CL_MEM_ALLOC_HOST_PTR, npad * k * sizeof(float), NULL, &err);
+ if(err != CL_SUCCESS)
+ {
+ printf("err = %d@%s:%d\n", err, __FUNCTION__, __LINE__);
+ return -1;;
+ }
+
+ cl_mem res_gpu = convlib::clCreateBuffer(context.context, CL_MEM_READ_WRITE |
+CL_MEM_ALLOC_HOST_PTR, mpad * npad * sizeof(float), NULL, &err);
+ if(err != CL_SUCCESS)
+ {
+ printf("err = %d@%s:%d\n", err, __FUNCTION__, __LINE__);
+ return -1;
+ }
+#endif
+ for(int nloop = 0; nloop < loops + 1; nloop++)
+ {
+ gettimeofday(&start, NULL);
+
+ //cl_mem _res_gpu = conv2D_gpu_sgemm(&context, &input, &filter, &output, &params, local_min,
+lhs_gpu, rhs_gpu, res_gpu);
+
+ //get_result_gpu(&context, output.data + gpu_data_off, _res_gpu, m, n);
+ srcn_convolution2D_gpu(input, filter, output, params, row_major);
+
+ gettimeofday(&end, NULL);
+
+ if(nloop > 0) total_time += ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000
++ start.tv_usec))/1000;
+ }
+}
+else if(conv_type == 5)
+{
+
+ for(int nloop = 0; nloop < loops + 1; nloop++)
+ {
+ gettimeofday(&start, NULL);
+
+ //cl_mem res_gpu = conv2D_gpu_sgemm(&context, &input, &filter, &output, &params, local_min);
+
+ //clFlush(context.cmdQueue);
+ gettimeofday(&start1, NULL);
+ #if 1
+ srcn_convolution2D(input, filter, output, params, NULL, thread_num, row_major
+
+ #endif
+ //usleep(80 * 1000);
+ gettimeofday(&end1, NULL);
+ total_time1 += ((end1.tv_sec * 1000000 + end1.tv_usec) - (start1.tv_sec * 1000000 +
+start1.tv_usec))/1000;
+
+ //get_result_gpu(&context, output.data + gpu_data_off, res_gpu, m, n);
+
+ srcn_convolution2D_dpu(input, filter, output, params, row_major);
+
+ gettimeofday(&end, NULL);
+ if(nloop > 0) total_time += ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000
++ start.tv_usec))/1000;
+ }
+}
+else if(conv_type == 6)
+{
+ for(int nloop = 0; nloop < loops; nloop++)
+ {
+ gettimeofday(&start, NULL);
+
+ if(kernel_size == 3 && stride == 1 && padding == 0)
+ {
+ conv2D_gpu_directconv_3x3S1(&context, &input, &filter, &output, &params, local_min);
+ }
+ else if(kernel_size == 1 && stride == 1 && padding == 0)
+ {
+ conv2D_gpu_directconv_1x1S1(&context, &input, &filter, &output, &params, local_min);
+ }
+
+ gettimeofday(&end, NULL);
+ total_time += ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 +
+start.tv_usec))/1000;
+ }
+}**/
+
+ int div = m * n < 16 ? m * n : 16;
+ int num = m * n > 64 ? 64 : m * n;
+
+ if (conv_type < 4)
+ printf("[CPU RESULT]\n");
+ else if (conv_type == 4)
+ printf("[GPU RESULT]\n");
+ else if (conv_type == 5)
+ printf("[DPU RESULT]\n");
+ float *c_ptr = output.data;
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ c_ptr = &output.data[m * n - num];
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ long long total_size = (long long)m * n * k * 2;
+ printf(
+ "AVER Time consuming: %.2fms, CPU Time consuming: %.2fms, total size: %lld, (GFLOP: %.2f)\n",
+ total_time / loops, total_time1 / loops, total_size,
+ (double)total_size / (total_time / loops) / 1000000);
+
+ free(input.data);
+ free(output.data);
+ free(filter.data);
+
+ return 0;
+}
+
+static int test_deconv(const int w, const int h, const int kernel_size, const int stride,
+ const int inch, const int outch, const int padding, const int conv_type,
+ const int thread_num, const int loops)
+{
+ struct timeval start, end;
+ float total_time = 0.f;
+
+ const int dilation = 1;
+
+ const int kernel_dilation = dilation * (kernel_size - 1) + 1;
+
+ convMat_t input;
+ convMat_t output;
+ convMat_t filter;
+ convParams_t params;
+
+ int pad_l, pad_r, pad_t, pad_b;
+ if (padding)
+ {
+ int pad_w = kernel_dilation - 1;
+ int pad_h = kernel_dilation - 1;
+ pad_l = pad_w / 2;
+ pad_r = pad_w - pad_l;
+ pad_t = pad_h / 2;
+ pad_b = pad_h - pad_t;
+ }
+ else
+ {
+ pad_l = pad_r = pad_t = pad_b = 0;
+ }
+
+ input.w = w;
+ input.h = h;
+ input.c = inch;
+ input.data = (float *)malloc(input.w * input.h * input.c * sizeof(float));
+ if (!input.data)
+ return 0;
+
+ // output.w = (w + pad_l + pad_r - kernel_dilation) / stride + 1;
+ // output.h = (h + pad_t + pad_b - kernel_dilation) / stride + 1;
+ output.w = stride * (w - 1) + kernel_dilation - (pad_l + pad_r);
+ output.h = stride * (h - 1) + kernel_dilation - (pad_t + pad_b);
+ output.c = outch;
+ output.data = (float *)malloc(output.w * output.h * output.c * sizeof(float));
+ if (!output.data)
+ return 0;
+
+ filter.w = kernel_size;
+ filter.h = kernel_size;
+ filter.c = outch;
+ filter.n = inch;
+ filter.data = (float *)malloc(filter.w * filter.h * filter.c * filter.n * sizeof(float));
+ if (!filter.data)
+ return 0;
+
+ for (int i = 0; i < input.w * input.h * input.c; i++)
+ {
+ input.data[i] = 0.001 + i * 0.000001;
+ }
+
+ for (int i = 0; i < filter.w * filter.h * filter.c * filter.n; i++)
+ {
+ filter.data[i] = 0.001 - i * 0.000001;
+ }
+
+ params.kernel_w = kernel_size;
+ params.kernel_h = kernel_size;
+ params.stride_w = stride;
+ params.stride_h = stride;
+ params.padding = padding;
+ params.pad_w = pad_l;
+ params.pad_h = pad_t;
+ params.dilation_w = dilation;
+ params.dilation_h = dilation;
+
+ const int m = params.kernel_h * params.kernel_w * output.c;
+ const int n = input.w * input.h;
+ const int k = input.c;
+
+ if (conv_type == 0)
+ {
+ for (int nloop = 0; nloop < loops; nloop++)
+
+ {
+ gettimeofday(&start, NULL);
+
+ direct_deconv_rowmajor(&input, &output, &filter, &params);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+ else if (conv_type == 1)
+ {
+ for (int nloop = 0; nloop < loops; nloop++)
+
+ {
+ gettimeofday(&start, NULL);
+
+ for (int i = 0; i < output.w * output.h * output.c; i++)
+ {
+ output.data[i] = 0;
+ }
+
+ srcn_deconvolution2D(input, filter, output, params, thread_num, row_major);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+
+ const int output_size = output.w * output.h * output.c;
+
+ int div = output_size < 16 ? output_size : 16;
+ int num = output_size > 64 ? 64 : output_size;
+
+ float *c_ptr = output.data;
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ c_ptr = &output.data[output_size - num];
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ long long total_size = (long long)m * n * k * 2;
+ printf("AVER Time consuming: %.2fms, total size: %lld, (GFLOP: %.2f)\n", total_time / loops,
+ total_size, (double)total_size / (total_time / loops) / 1000000);
+
+ free(input.data);
+ free(output.data);
+ free(filter.data);
+
+ return 0;
+}
+
+static int test_batch_conv(const int batch, const int w, const int h, const int kernel_size,
+ const int stride, const int inch, const int outch, const int padding,
+ const int conv_type, const int thread_num, const int loops)
+{
+ struct timeval start, end;
+ float total_time = 0.f;
+
+ const int dilation = 1;
+
+ const int kernel_dilation = dilation * (kernel_size - 1) + 1;
+
+ convMat_t input;
+ convMat_t output;
+ convMat_t filter;
+ convParams_t params;
+
+ int pad_l, pad_r, pad_t, pad_b;
+ if (padding)
+ {
+ int pad_w = kernel_dilation + (w - 1) / stride * stride - w;
+ int pad_h = kernel_dilation + (h - 1) / stride * stride - h;
+ pad_l = pad_w / 2;
+ pad_r = pad_w - pad_l;
+ pad_t = pad_h / 2;
+ pad_b = pad_h - pad_t;
+ }
+ else
+ {
+ pad_l = pad_r = pad_t = pad_b = 0;
+ }
+
+ input.w = w;
+ input.h = h;
+ input.c = inch;
+ input.n = batch;
+ input.data = (float *)malloc(input.n * input.w * input.h * input.c * sizeof(float));
+ if (!input.data)
+ return 0;
+
+ output.w = (w + pad_l + pad_r - kernel_dilation) / stride + 1;
+ output.h = (h + pad_t + pad_b - kernel_dilation) / stride + 1;
+ output.c = outch;
+ output.n = batch;
+ output.data = (float *)malloc(output.n * output.w * output.h * output.c * sizeof(float));
+ if (!output.data)
+ return 0;
+
+ filter.w = kernel_size;
+ filter.h = kernel_size;
+ filter.c = inch;
+ filter.n = outch;
+ filter.data = (float *)malloc(filter.w * filter.h * filter.c * filter.n * sizeof(float));
+ if (!filter.data)
+ return 0;
+
+ for (int i = 0; i < input.w * input.h * input.c * input.n; i++)
+ {
+ input.data[i] = 0.001 + i * 0.000001;
+ }
+
+ for (int i = 0; i < filter.w * filter.h * filter.c * filter.n; i++)
+ {
+ filter.data[i] = 0.001 - i * 0.000001;
+ }
+
+ params.kernel_w = kernel_size;
+ params.kernel_h = kernel_size;
+ params.stride_w = stride;
+ params.stride_h = stride;
+ params.padding = padding;
+ params.pad_w = pad_l;
+ params.pad_h = pad_t;
+ params.dilation_w = dilation;
+ params.dilation_h = dilation;
+
+ const int m = output.c;
+ const int n = output.w * output.h;
+ const int k = params.kernel_h * params.kernel_w * input.c;
+
+ if (conv_type == 1)
+ {
+ for (int nloop = 0; nloop < loops; nloop++)
+
+ {
+ // printf("nloop = %d, thread_num = %d\n", nloop, thread_num);
+ // class srcn_sgemm my_gemm(input, filter, output, params, thread_num, col_major);
+
+ gettimeofday(&start, NULL);
+
+ srcn_batch_convolution2D(input, filter, output, params, NULL, thread_num, col_major);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+ else if (conv_type == 2)
+ {
+ float *winograd_weight;
+
+ // trans_weight2winograd(filter, &winograd_weight);
+
+ winogradParams_t wparams = {params.kernel_w,
+ params.kernel_h,
+ params.stride_w,
+ params.stride_h,
+ params.dilation_w,
+ params.dilation_h,
+ input.n,
+ w,
+ h,
+ input.c,
+ output.c,
+ thread_num,
+ col_major,
+ filter.data};
+ winograd_weight = trans_weight2winograd(wparams);
+
+ for (int nloop = 0; nloop < loops; nloop++)
+
+ {
+ gettimeofday(&start, NULL);
+
+ srcn_batch_convolution2D(input, filter, output, params, winograd_weight, thread_num,
+ col_major);
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+ }
+ }
+
+ int div = m * n < 16 ? m * n : 16;
+ int num = m * n > 64 ? 64 : m * n;
+
+ float *c_ptr = output.data;
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ c_ptr = &output.data[m * n * batch - num];
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ long long total_size = (long long)batch * m * n * k * 2;
+ printf("AVER Time consuming: %.2fms, total size: %lld, (GFLOP: %.2f)\n", total_time / loops,
+ total_size, (double)total_size / (total_time / loops) / 1000000);
+
+ free(input.data);
+ free(output.data);
+ free(filter.data);
+
+ return 0;
+}
+
+static int test_depthwise_conv(const int w, const int h, const int kernel_size, const int stride,
+ const int inch, const int outch, const int padding,
+ const int conv_type, const int thread_num, const int loops)
+{
+ if (outch != inch)
+ return -1;
+ struct timeval start, end;
+ float total_time = 0.f;
+
+ const int dilation = 1;
+
+ const int kernel_dilation = dilation * (kernel_size - 1) + 1;
+
+ convMat_t input;
+ convMat_t output;
+ convMat_t filter;
+ convMat_t bias;
+ convParams_t params;
+
+ int pad_l, pad_r, pad_t, pad_b;
+ if (padding)
+ {
+ int pad_w = kernel_dilation + (w - 1) / stride * stride - w;
+ int pad_h = kernel_dilation + (h - 1) / stride * stride - h;
+ pad_l = pad_w / 2;
+ pad_r = pad_w - pad_l;
+ pad_t = pad_h / 2;
+ pad_b = pad_h - pad_t;
+ }
+ else
+ {
+ pad_l = pad_r = pad_t = pad_b = 0;
+ }
+
+ input.w = w;
+ input.h = h;
+ input.c = inch;
+ input.n = 1;
+#ifdef NCNN
+ input.data =
+ (float *)malloc(alignSize(input.w * input.h, 16 / sizeof(float)) * input.c * sizeof(float));
+#else
+ input.data = (float *)malloc(input.w * input.h * input.c * sizeof(float));
+#endif
+ if (!input.data)
+ return 0;
+
+ output.w = (w + pad_l + pad_r - kernel_dilation) / stride + 1;
+ output.h = (h + pad_t + pad_b - kernel_dilation) / stride + 1;
+ output.c = outch;
+ output.n = 1;
+
+#ifdef NCNN
+ output.data = (float *)malloc(alignSize(output.w * output.h, 16 / sizeof(float)) * output.c *
+ sizeof(float));
+#else
+ output.data = (float *)malloc(output.w * output.h * output.c * sizeof(float));
+#endif
+ const int gpu_data_off = output.w * output.h * output.c;
+ if (!output.data)
+ return 0;
+
+ for (int i = 0; i < output.w * output.h * output.c; i++)
+ {
+ output.data[i] = 1.f;
+ }
+
+ filter.w = kernel_size;
+ filter.h = kernel_size;
+ filter.c = 1;
+ filter.n = outch;
+ filter.data = (float *)malloc(filter.w * filter.h * filter.c * filter.n * sizeof(float));
+ if (!filter.data)
+ return 0;
+
+ for (int i = 0; i < input.w * input.h * input.c; i++)
+ {
+ input.data[i] = 0.001 + i * 0.000001;
+ }
+
+ for (int i = 0; i < filter.w * filter.h * filter.c * filter.n; i++)
+ {
+ filter.data[i] = 0.001 - i * 0.000001;
+ }
+
+ bias.w = outch;
+ bias.data = (float *)malloc(bias.w * sizeof(float));
+ if (!bias.data)
+ return 0;
+ for (int i = 0; i < bias.w; i++)
+ {
+ bias.data[i] = 0.f;
+ }
+
+ params.kernel_w = kernel_size;
+ params.kernel_h = kernel_size;
+ params.stride_w = stride;
+ params.stride_h = stride;
+ params.padding = padding;
+ params.pad_w = pad_l;
+ params.pad_h = pad_t;
+ params.dilation_w = dilation;
+ params.dilation_h = dilation;
+
+ const int m = output.c;
+ const int n = output.w * output.h;
+ const int k = params.kernel_h * params.kernel_w * input.c;
+
+ // ocl_context_t context;
+ size_t local_min[2] = {4, 4};
+ /**
+ if(conv_type == 1)
+ {
+ if(init_gpu(&context) < 0) return -1;
+ depthwise_conv_3x3S1_tune(&context, &input, &filter, &output, local_min);
+ }**/
+
+ gettimeofday(&start, NULL);
+ if (conv_type == 0)
+ srcn_depthwise_conv(input, filter, output, bias, params, 4,
+ row_major); // convdw3x3s1_neon(input, output, filter, filter);
+ // else if(conv_type == 1) depthwise_conv_gpu3x3S1(&context, &input, &filter, &output, &params,
+ // local_min);
+ else if (conv_type == 2)
+ {
+ for (int i = 0; i < input.c; i++)
+ {
+ convMat_t _input;
+ convMat_t _output;
+ convMat_t _filter;
+ convParams_t _params = params;
+
+ _input.w = input.w;
+ _input.h = input.h;
+ _input.c = 1;
+ _input.n = 1;
+#ifdef NCNN
+ _input.data = input.data + i * alignSize(input.w * input.h, 16 / sizeof(float));
+#else
+ _input.data = input.data + i * input.w * input.h;
+#endif
+
+ _output.w = output.w;
+ _output.h = output.h;
+ _output.c = 1;
+ _output.n = 1;
+#ifdef NCNN
+ _output.data = output.data + i * alignSize(output.w * output.h, 16 / sizeof(float));
+#else
+ _output.data = output.data + i * output.w * output.h;
+#endif
+ _filter.w = filter.w;
+ _filter.h = filter.h;
+ _filter.c = 1; // filter.c;
+ _filter.n = 1; // filter.n;
+ _filter.data = filter.data + i * 9;
+
+ srcn_convolution2D(_input, _filter, _output, _params, NULL, 1, row_major);
+ // direct_conv_rowmajor(&_input, &_output, &_filter, &_params);
+ }
+ }
+
+ gettimeofday(&end, NULL);
+ total_time +=
+ ((end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec)) / 1000;
+
+ int div = m * n < 16 ? m * n : 16;
+ int num = m * n > 64 ? 64 : m * n;
+
+ if (conv_type == 0)
+ printf("[CPU RESULT]\n");
+ else if (conv_type == 1)
+ printf("[GPU RESULT]\n");
+ float *c_ptr = output.data;
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ c_ptr = &output.data[m * n - num];
+ for (int i = 0; i < num; i++)
+ {
+ printf("%f ", c_ptr[i]);
+ if ((i + 1) % div == 0)
+ printf("\n");
+ }
+
+ printf("\n");
+
+ long long total_size = (long long)m * n * k * 2;
+ printf("AVER Time consuming: %.2fms, total size: %lld, (GFLOP: %.2f)\n", total_time / loops,
+ total_size, (double)total_size / (total_time / loops) / 1000000);
+
+ free(input.data);
+ free(output.data);
+ free(filter.data);
+ free(bias.data);
+
+ return 0;
+}
+
+//#define TEST_SGEMM
+#define TEST_CONV
+//#define TEST_DECONV
+//#define TEST_BATCH_CONV
+//#define TEST_DEPTHWISE_CONV
+
+int main(int argc, char **argv)
+{
+#ifdef TEST_SGEMM
+ if (argc < 6)
+ return 0;
+
+ const int m = atoi(argv[1]);
+ const int n = atoi(argv[2]);
+ const int k = atoi(argv[3]);
+ const int type = atoi(argv[4]);
+ const int loops = atoi(argv[5]);
+
+ test_sgemm(m, n, k, type, loops);
+#elif (defined TEST_CONV)
+ if (argc < 10)
+ return 0;
+ const int w = atoi(argv[1]);
+ const int h = atoi(argv[2]);
+ const int kernel_size = atoi(argv[3]);
+ const int stride = atoi(argv[4]);
+ const int outch = atoi(argv[5]);
+ const int inch = atoi(argv[6]);
+ const int padding = atoi(argv[7]);
+ const int conv_type = atoi(argv[8]);
+ const int thread_num = atoi(argv[9]);
+ int loops = 1;
+ if (argc > 10)
+ loops = atoi(argv[10]);
+ test_conv(w, h, kernel_size, stride, inch, outch, padding, conv_type, thread_num, loops);
+#elif (defined TEST_DECONV)
+ if (argc < 10)
+ return 0;
+ const int w = atoi(argv[1]);
+ const int h = atoi(argv[2]);
+ const int kernel_size = atoi(argv[3]);
+ const int stride = atoi(argv[4]);
+ const int outch = atoi(argv[5]);
+ const int inch = atoi(argv[6]);
+ const int padding = atoi(argv[7]);
+ const int conv_type = atoi(argv[8]);
+ const int thread_num = atoi(argv[9]);
+ int loops = 1;
+ if (argc > 10)
+ loops = atoi(argv[10]);
+ test_deconv(w, h, kernel_size, stride, inch, outch, padding, conv_type, thread_num, loops);
+#elif (defined TEST_BATCH_CONV)
+ if (argc < 11)
+ return 0;
+ const int batch = atoi(argv[1]);
+ const int w = atoi(argv[2]);
+ const int h = atoi(argv[3]);
+ const int kernel_size = atoi(argv[4]);
+ const int stride = atoi(argv[5]);
+ const int outch = atoi(argv[6]);
+ const int inch = atoi(argv[7]);
+ const int padding = atoi(argv[8]);
+ const int conv_type = atoi(argv[9]);
+ const int thread_num = atoi(argv[10]);
+ int loops = 1;
+ if (argc > 11)
+ loops = atoi(argv[11]);
+ test_batch_conv(batch, w, h, kernel_size, stride, inch, outch, padding, conv_type, thread_num,
+ loops);
+#elif (defined TEST_DEPTHWISE_CONV)
+ if (argc < 10)
+ return 0;
+ const int w = atoi(argv[1]);
+ const int h = atoi(argv[2]);
+ const int kernel_size = atoi(argv[3]);
+ const int stride = atoi(argv[4]);
+ const int outch = atoi(argv[5]);
+ const int inch = atoi(argv[6]);
+ const int padding = atoi(argv[7]);
+ const int conv_type = atoi(argv[8]);
+ const int thread_num = atoi(argv[9]);
+ int loops = 1;
+ if (argc > 10)
+ loops = atoi(argv[10]);
+ test_depthwise_conv(w, h, kernel_size, stride, inch, outch, padding, conv_type, thread_num,
+ loops);
+#endif
+
+ return 0;
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/srcn_conv.cc b/runtimes/libs/srcn/src/srcn_conv.cc
new file mode 100644
index 000000000..df2c87116
--- /dev/null
+++ b/runtimes/libs/srcn/src/srcn_conv.cc
@@ -0,0 +1,614 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef _OPENMP
+#include <omp.h>
+#endif
+
+#include "srcn/conv_type.h"
+#include "common.h"
+#include "sgemm_singlethread.h"
+#include "conv_sgemm_singlethread.h"
+#include "conv_sgemm_multithreads.h"
+#include "conv_winograd.h"
+#include "direct_conv_colmajor.h"
+#include "winograd.h"
+
+#include "deconv_sgemm_multithreads.h"
+#include "conv_sparse.h"
+#include "conv_winograd_batch.h"
+
+namespace nnfw
+{
+namespace srcn
+{
+
+static inline void weight_transfer(float *out, float *in, int H, int W, int C, int N)
+{
+ // HWCN ---> NCHW
+ for (int h = 0; h < H; ++h)
+ {
+ for (int w = 0; w < W; ++w)
+ {
+ for (int c = 0; c < C; ++c)
+ {
+ for (int n = 0; n < N; ++n)
+ {
+ int index_in = h * W * C * N + w * C * N + c * N + n;
+ int index_out = n * C * H * W + c * H * W + h * W + w;
+ out[index_out] = in[index_in];
+ }
+ }
+ }
+ }
+}
+
+int check_winograd(winogradParams_t &params)
+{
+ int winograd_flag =
+ ((params.kernel_w == params.kernel_h) && (params.stride_w == params.stride_h) &&
+ (params.kernel_w == 3 || params.kernel_w == 5) && (params.stride_w == 1) &&
+ (params.dilation_w == 1) && (params.dilation_h == 1));
+
+ int winograd_channel_cond = 64 * 64;
+ int winograd_image_cond = 10 * 10;
+
+#ifdef TIZEN
+ if (params.num_threads > 1)
+ {
+ winograd_channel_cond = 128 * 128;
+ winograd_image_cond = 20 * 20;
+ }
+#endif // TIZEN
+
+ winograd_flag &= (params.inch * params.outch >= winograd_channel_cond);
+
+ if (params.w > 0 && params.h > 0 && params.batch == 1)
+ {
+ winograd_flag &= (params.w * params.h >= winograd_image_cond);
+ }
+
+ return winograd_flag;
+}
+
+float *trans_weight2winograd(winogradParams_t &params, unsigned int *size = NULL)
+{
+ int M, N;
+ const double *G;
+
+ float *winograd_weight;
+
+ int winograd_channel_cond = 64 * 64;
+ int winograd_image_cond = 10 * 10;
+
+#ifdef TIZEN
+ if (params.num_threads > 1)
+ {
+ winograd_channel_cond = 128 * 128;
+ int winograd_image_cond = 20 * 20;
+ }
+#endif // TIZEN
+
+ int winograd_flag =
+ ((params.kernel_w == params.kernel_h) && (params.stride_w == params.stride_h) &&
+ (params.kernel_w == 3 || params.kernel_w == 5) && (params.stride_w == 1) &&
+ (params.dilation_w == 1) && (params.dilation_h == 1));
+ if (!winograd_flag)
+ return NULL;
+
+ winograd_flag = (params.inch * params.outch >= winograd_channel_cond);
+
+ if (!winograd_flag)
+ return NULL;
+
+ if (params.w > 0 && params.h > 0 && params.batch == 1)
+ {
+ winograd_flag &= (params.w * params.h >= winograd_image_cond);
+ if (!winograd_flag)
+ return NULL;
+ }
+
+ const int kernel_size = params.kernel_w;
+ const int inch = params.inch;
+ const int outch = params.outch;
+ float *weight_data = params.weight_data;
+
+ /*Step 1: transfer weight to winograd domain*/
+ if (kernel_size == 3)
+ {
+ if (params.w == 4 && params.batch > 1)
+ {
+ M = winograd_para_3x3s1_2::M;
+ N = winograd_para_3x3s1_2::N;
+ G = winograd_para_3x3s1_2::getG();
+ }
+ else
+ {
+ M = winograd_para_3x3s1::M;
+ N = winograd_para_3x3s1::N;
+ G = winograd_para_3x3s1::getG();
+ }
+ }
+ else
+ {
+ M = winograd_para_5x5s1::M;
+ N = winograd_para_5x5s1::N;
+ G = winograd_para_5x5s1::getG();
+ }
+
+ int tile_h_in_, tile_w_in_;
+ tile_h_in_ = tile_w_in_ = M;
+
+ if (size)
+ *size = tile_h_in_ * tile_w_in_ * inch * outch;
+
+ winograd_weight = new float[tile_h_in_ * tile_w_in_ * inch * outch];
+ if (!winograd_weight)
+ return NULL;
+
+ float *winograd_g = new float[M * M * N * N];
+ if (!winograd_g)
+ {
+ delete[] winograd_weight;
+ return NULL;
+ }
+
+ kronecker_product(winograd_g, G, G, M, N, M, N);
+
+ if (params.conv_type == col_major)
+ {
+ weight_data = new float[kernel_size * kernel_size * inch * outch];
+ if (!weight_data)
+ {
+ delete[] winograd_weight;
+ delete[] winograd_g;
+ return NULL;
+ }
+ weight_transfer(weight_data, params.weight_data, kernel_size, kernel_size, inch, outch);
+ }
+
+ class sgemm_singlethread sgemm(rowMajor, notrans, trans, tile_h_in_ * tile_w_in_, inch * outch,
+ kernel_size * kernel_size, winograd_g, weight_data,
+ winograd_weight, 1);
+
+ sgemm.run();
+
+ if (params.conv_type == col_major)
+ delete[] weight_data;
+
+ delete[] winograd_g;
+
+ return winograd_weight;
+}
+
+void winograd_release(float *winograd_weight)
+{
+ if (winograd_weight)
+ delete[] winograd_weight;
+}
+
+void srcn_convolution2D(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, const float *winograd_weight, int num_threads,
+ convType_t conv_type)
+{
+ const int outw = out_mat.w;
+ const int outh = out_mat.h;
+ const int inch = in_mat.c;
+ const int outch = out_mat.c;
+
+ int winograd_flag =
+ ((in_param.kernel_w == in_param.kernel_h) && (in_param.stride_w == in_param.stride_h) &&
+ (in_param.kernel_w == 3 || in_param.kernel_w == 5) && (in_param.stride_w == 1) &&
+ (winograd_weight) && (in_param.dilation_w == 1) && (in_param.dilation_h == 1));
+
+ int direct_flag = ((conv_type == col_major) && (in_param.stride_w == in_param.stride_h) &&
+ (in_param.dilation_w == 1) && (in_param.dilation_h == 1));
+
+ int winograd_image_cond = 10 * 10;
+ int winograd_channel_cond = 64 * 64;
+ int direct_image_cond = 4 * 4;
+ int direct_channel_cond = 16 * 16;
+
+#ifdef TIZEN
+ if (num_threads > 1)
+ {
+ winograd_image_cond = 20 * 20;
+ winograd_channel_cond = 128 * 128;
+ }
+#endif
+
+ winograd_flag &=
+ ((outw * outh >= winograd_image_cond) && (inch * outch >= winograd_channel_cond));
+ direct_flag &= ((outw * outh <= direct_image_cond) || (inch * outch <= direct_channel_cond));
+
+ if (num_threads == 1)
+ {
+ if (winograd_flag)
+ {
+ class conv_winograd conv(in_mat, out_mat, in_param, conv_type, winograd_weight, num_threads,
+ in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ }
+ else if (direct_flag)
+ {
+ direct_conv_colmajor(in_mat, out_mat, weights_mat, in_param, num_threads);
+ }
+ else
+ {
+ class conv_sgemm_singlethread conv(in_mat, weights_mat, out_mat, in_param, conv_type);
+ conv.run();
+ }
+ }
+ else if (num_threads > 1)
+ {
+ if (winograd_flag)
+ {
+ const int npart = num_threads > 4 ? 4 : num_threads;
+
+ omp_set_num_threads(npart);
+
+ if (conv_type == col_major)
+ {
+ if (outch < 512)
+ {
+ const int _H = (outh + npart - 1) / npart;
+
+ if (_H < in_param.pad_h)
+ {
+ class conv_winograd conv(in_mat, out_mat, in_param, conv_type, winograd_weight, 1,
+ in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ return;
+ }
+
+ const int ih = (_H - 1) * in_param.stride_w + in_param.kernel_w;
+ const int oh = _H;
+ const int nh = (outh + _H - 1) / _H;
+ int rh = outh % _H;
+ if (rh == 0)
+ rh = _H;
+
+#pragma omp parallel for
+ for (int i = 0; i < nh; i++)
+ {
+ int pad_h_part = 0;
+ convMat_t in_part;
+ convMat_t out_part;
+ const int oh = (i != nh - 1 || rh == 0) ? _H : rh;
+ const int ih = (oh - 1) * in_param.stride_w + in_param.kernel_w;
+
+ in_part.w = in_mat.w;
+ in_part.c = inch;
+ out_part.w = outw;
+ out_part.c = outch;
+ in_part.h = ih;
+ out_part.h = oh;
+
+ int bottom_offset = i * _H - in_param.pad_h;
+ if (bottom_offset < 0)
+ {
+ bottom_offset = 0;
+ pad_h_part = in_param.pad_h;
+ }
+ in_part.data = in_mat.data + bottom_offset * in_mat.w * inch * in_param.stride_w;
+ if (ih + bottom_offset > in_mat.h)
+ {
+ in_part.h = in_mat.h - bottom_offset;
+ }
+
+ out_part.data = out_mat.data + i * _H * outw * outch;
+
+ convParams_t params = {
+ in_param.kernel_w, in_param.kernel_h, in_param.stride_w, in_param.stride_h, 1, 1,
+ in_param.padding, in_param.pad_w, pad_h_part};
+
+ class conv_winograd conv(in_part, out_part, params, conv_type, winograd_weight,
+ num_threads, in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ }
+ }
+ else
+ {
+ const int _OUTC = (outch + npart - 1) / npart;
+
+ const int nc = (outch + _OUTC - 1) / _OUTC;
+ int rc = out_mat.c % _OUTC;
+ if (rc == 0)
+ rc = _OUTC;
+
+#pragma omp parallel for
+ for (int i = 0; i < nc; i++)
+ {
+ const float *weight_part;
+ convMat_t out_part;
+
+ const int oc = (i != nc - 1 || rc == 0) ? _OUTC : rc;
+
+ out_part.w = outw;
+ out_part.h = outh;
+ out_part.c = oc;
+ out_part.data = out_mat.data + i * _OUTC;
+ weight_part = winograd_weight + i * _OUTC * inch;
+ class conv_winograd conv(in_mat, out_part, in_param, conv_type, weight_part,
+ num_threads, in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ }
+ }
+ }
+ else if (conv_type == row_major)
+ {
+#ifdef TIZEN
+ if (outch < 512)
+#else // TIZEN
+ if (outh >= 20)
+#endif // TIZEN
+ {
+ const int _H = (outh + npart - 1) / npart;
+
+ if (_H < in_param.pad_h)
+ {
+ class conv_winograd conv(in_mat, out_mat, in_param, conv_type, winograd_weight, 1,
+ in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ return;
+ }
+
+ const int ih = (_H - 1) * in_param.stride_w + in_param.kernel_w;
+ const int oh = _H;
+ const int nh = (outh + _H - 1) / _H;
+ int rh = outh % _H;
+ if (rh == 0)
+ rh = _H;
+
+#pragma omp parallel for
+ for (int i = 0; i < nh; i++)
+ {
+ int pad_h_part = 0;
+ convMat_t in_part;
+ convMat_t out_part;
+ const int oh = (i != nh - 1 || rh == 0) ? _H : rh;
+ const int ih = (oh - 1) * in_param.stride_w + in_param.kernel_w;
+
+ in_part.w = in_mat.w;
+ in_part.c = inch;
+ out_part.w = outw;
+ out_part.c = outch;
+ in_part.h = ih;
+ out_part.h = oh;
+
+ int bottom_offset = i * _H - in_param.pad_h;
+ if (bottom_offset < 0)
+ {
+ bottom_offset = 0;
+ pad_h_part = in_param.pad_h;
+ }
+ in_part.data = in_mat.data + bottom_offset * in_mat.w * in_param.stride_w;
+ if (ih + bottom_offset > in_mat.h)
+ {
+ in_part.h = in_mat.h - bottom_offset;
+ }
+
+ out_part.data = out_mat.data + i * _H * outw;
+
+ convParams_t params = {
+ in_param.kernel_w, in_param.kernel_h, in_param.stride_w, 1, 1,
+ in_param.stride_h, in_param.padding, in_param.pad_w, pad_h_part};
+
+ class conv_winograd conv(in_part, out_part, params, conv_type, winograd_weight,
+ num_threads, in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ }
+ }
+ else
+ {
+ const int _OUTC = (outch + npart - 1) / npart;
+
+ const int nc = (outch + _OUTC - 1) / _OUTC;
+ int rc = out_mat.c % _OUTC;
+ if (rc == 0)
+ rc = _OUTC;
+
+#pragma omp parallel for
+ for (int i = 0; i < nc; i++)
+ {
+ const float *weight_part;
+ convMat_t out_part;
+
+ const int oc = (i != nc - 1 || rc == 0) ? _OUTC : rc;
+
+ out_part.w = outw;
+ out_part.h = outh;
+ out_part.c = oc;
+ out_part.data = out_mat.data + i * _OUTC * outw * outh;
+ weight_part = winograd_weight + i * _OUTC * inch;
+ class conv_winograd conv(in_mat, out_part, in_param, conv_type, weight_part,
+ num_threads, in_mat.w * in_mat.h, outw * outh, outch);
+ conv.run();
+ }
+ }
+ }
+ }
+ else if (direct_flag)
+ {
+ direct_conv_colmajor(in_mat, out_mat, weights_mat, in_param, num_threads);
+ }
+ else
+ {
+ class conv_sgemm_multithreads conv(in_mat, weights_mat, out_mat, in_param, num_threads,
+ conv_type);
+ conv.run();
+ }
+ }
+}
+
+void srcn_deconvolution2D(const convMat_t &in_mat, const convMat_t &weights_mat, convMat_t &out_mat,
+ const convParams_t &in_param, int num_threads, convType_t conv_type)
+{
+ class deconv_sgemm_multithreads deconv(in_mat, weights_mat, out_mat, in_param, num_threads,
+ conv_type);
+ deconv.run();
+}
+
+void *trans_weight2sparse(const convMat_t &weights_mat)
+{
+ const int kernel_w = weights_mat.w;
+ const int kernel_h = weights_mat.h;
+ const int inch = weights_mat.c;
+ const int outch = weights_mat.n;
+
+ const int nch = (outch + BCH - 1) / BCH;
+ const int rch = outch % BCH;
+
+ const float *data = weights_mat.data;
+ const int klength = inch * kernel_h * kernel_w;
+
+ sparse_weight_t *sparse_weight = new sparse_weight_t[nch];
+ if (!sparse_weight)
+ return NULL;
+
+ for (int i = 0; i < nch; i++)
+ {
+ int _bch = (i != nch - 1 || rch == 0) ? BCH : rch;
+ sparse_weight_t *sparse_weight_n = &sparse_weight[i];
+ sparse_weight_n->mxk = 0;
+
+ for (int j = 0; j < _bch; j++)
+ {
+ for (int l = 0; l < klength; l++)
+ {
+ float val = *(data + (i * BCH + j) * klength + l);
+ if (val != 0)
+ {
+ sparse_weight_n->mxk++;
+ }
+ }
+ }
+ }
+
+ for (int i = 0; i < nch; i++)
+ {
+ int _bch = (i != nch - 1 || rch == 0) ? BCH : rch;
+ sparse_weight_t *sparse_weight_n = &sparse_weight[i];
+ sparse_weight_n->wdata = new weight_data_t[sparse_weight_n->mxk];
+ int index = 0;
+
+ for (int l = 0; l < klength; l++)
+ {
+ for (int j = 0; j < _bch; j++)
+ {
+ float val = *(data + (i * BCH + j) * klength + l);
+ if (val != 0)
+ {
+ sparse_weight_n->wdata[index].m = i * BCH + j;
+ sparse_weight_n->wdata[index].k = l;
+ sparse_weight_n->wdata[index++].data = val;
+ }
+ }
+ }
+ }
+
+ return (void *)sparse_weight;
+}
+
+void sparse_release(const int outch, void *ptr)
+{
+ sparse_weight_t *sparse_weight = (sparse_weight_t *)ptr;
+ const int nch = (outch + BCH - 1) / BCH;
+
+ if (!sparse_weight)
+ return;
+
+ for (int i = 0; i < nch; i++)
+ {
+ sparse_weight_t *sparse_weight_n = &sparse_weight[i];
+ if (sparse_weight_n->wdata)
+ delete[] sparse_weight_n->wdata;
+ }
+
+ if (sparse_weight)
+ delete[] sparse_weight;
+}
+
+void srcn_sparse_convolution2D(const convMat_t &in_mat, convMat_t &out_mat,
+ const convParams_t &in_param, const void *sparse_weight,
+ int number_threas, convType_t conv_type)
+{
+ class conv_sparse conv(in_mat, out_mat, in_param, (const sparse_weight_t *)sparse_weight,
+ number_threas, conv_type);
+
+ for (int i = 0; i < out_mat.c * out_mat.h * out_mat.w; i++)
+ {
+ *(out_mat.data + i) = 0;
+ }
+
+ conv.run();
+}
+
+void srcn_batch_convolution2D(const convMat_t &in_mat, const convMat_t &weights_mat,
+ convMat_t &out_mat, const convParams_t &in_param,
+ const float *winograd_weight, int num_threads, convType_t conv_type)
+{
+ int winograd_flag = (winograd_weight != NULL);
+
+ if (winograd_flag)
+ {
+ if (num_threads > 1)
+ {
+ omp_set_num_threads(num_threads);
+ const int batch = in_mat.n;
+ const int npart = (batch + num_threads - 1) / num_threads;
+ const int nn = (batch + npart - 1) / npart;
+ const int rn = batch % npart;
+
+#pragma omp parallel for
+ for (int i = 0; i < nn; i++)
+ {
+ const int pn = (i != nn - 1 || rn == 0) ? npart : rn;
+ convMat_t in_mat_part = {in_mat.w, in_mat.h, in_mat.c, pn,
+ in_mat.data + i * npart * in_mat.w * in_mat.h * in_mat.c};
+ convMat_t out_mat_part = {out_mat.w, out_mat.h, out_mat.c, pn,
+ out_mat.data + i * npart * out_mat.w * out_mat.h * out_mat.c};
+
+ class conv_winograd_batch conv(in_mat_part, out_mat_part, in_param, conv_type,
+ winograd_weight, num_threads);
+ conv.run();
+ }
+ }
+ else
+ {
+ class conv_winograd_batch conv(in_mat, out_mat, in_param, conv_type, winograd_weight,
+ num_threads);
+ conv.run();
+ }
+ }
+ else
+ {
+ if (num_threads == 1)
+ {
+ class conv_sgemm_singlethread conv(in_mat, weights_mat, out_mat, in_param, conv_type);
+ conv.run();
+ }
+ else
+ {
+ class conv_sgemm_multithreads conv(in_mat, weights_mat, out_mat, in_param, num_threads,
+ conv_type);
+ conv.run();
+ }
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
diff --git a/runtimes/libs/srcn/src/winograd.h b/runtimes/libs/srcn/src/winograd.h
new file mode 100644
index 000000000..5ad8f1126
--- /dev/null
+++ b/runtimes/libs/srcn/src/winograd.h
@@ -0,0 +1,148 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SRCN_WINOGRAD_H__
+#define __NNFW_SRCN_WINOGRAD_H__
+
+namespace nnfw
+{
+namespace srcn
+{
+
+struct winograd_para_3x3s1
+{
+ static const int M = 3 + 4 - 1;
+ static const int N = 3;
+
+ static const double *getG()
+ {
+ static const double G[M * N] = {
+ 1. / 4., 0, 0, -1. / 6., -1. / 6., -1. / 6., -1. / 6., 1. / 6., -1. / 6.,
+ 1. / 24., 1. / 12., 1. / 6., 1. / 24., -1. / 12., 1. / 6., 0, 0, 1,
+ };
+ return G;
+ }
+
+ static const double *getA()
+ {
+ static const double A[M * (M - N + 1)] = {
+ 1, 0, 0, 0, 1, 1, 1, 1, 1, -1, 1, -1, 1, 2, 4, 8, 1, -2, 4, -8, 0, 0, 0, 1,
+ };
+ return A;
+ }
+
+ static const double *getB()
+ {
+ static const double B[M * M] = {
+ 4, 0, 0, 0, 0, 0, 0, -4, 4, -2, 2, 4, -5, -4, -4, -1, -1, 0,
+ 0, 1, -1, 2, -2, -5, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1,
+ };
+ return B;
+ };
+};
+
+struct winograd_para_3x3s1_2
+{
+ static const int M = 3 + 2 - 1;
+ static const int N = 3;
+
+ static const double *getG()
+ {
+ static const double G[M * N] = {
+ 1, 0, 0, 1. / 2., 1. / 2., 1. / 2., 1. / 2., -1. / 2., 1. / 2., 0, 0, 1,
+ };
+ return G;
+ }
+
+ static const double *getA()
+ {
+ static const double A[M * (M - N + 1)] = {
+ 1, 0, 1, 1, 1, -1, 0, 1,
+ };
+ return A;
+ }
+
+ static const double *getB()
+ {
+ static const double B[M * M] = {
+ 1, 0, 0, 0, 0, 1, -1, -1, -1, 1, 1, 0, 0, 0, 0, 1,
+ };
+ return B;
+ };
+};
+
+struct winograd_para_5x5s1
+{
+ static const int M = 5 + 4 - 1;
+ static const int N = 5;
+
+ static const double *getG()
+ {
+ static const double G[M * N] = {
+ 1, 0, 0, 0, 0, -2. / 9., -2. / 9., -2. / 9.,
+ -2. / 9., -2. / 9., -2. / 9., 2. / 9., -2. / 9., 2. / 9., -2. / 9., 1. / 90.,
+ 1. / 45., 2. / 45., 4. / 45., 8. / 45., 1. / 90., -1. / 45., 2. / 45., -4. / 45.,
+ 8. / 45., 4. / 45., 2. / 45., 1. / 45., 1. / 90., 1. / 180., 4. / 45., -2. / 45.,
+ 1. / 45., -1. / 90., 1. / 180., 0, 0, 0, 0, 1,
+ };
+ return G;
+ }
+
+ static const double *getA()
+ {
+ static const double A[M * (M - N + 1)] = {1, 0, 0, 0, 1, 1, 1, 1, 1, -1, 1, -1, 1, 2, 4, 8,
+ 1, -2, 4, -8, 8, 4, 2, 1, 8, -4, 2, -1, 0, 0, 0, 1};
+ return A;
+ }
+
+ static const double *getB()
+ {
+ static const double B[M * M] = {
+ 1, 0, 0, 0, 0, 0, 0, 0, 0, 1,
+ -1, 1. / 2, -1. / 2, 2, -2, -1, -21. / 4, 1, 1, 1. / 4,
+ 1. / 4, 4, 4, 0, 0, -17. / 4, 17. / 4, -5. / 2, 5. / 2, -5. / 2,
+ 5. / 2, 21. / 4, 21. / 4, -17. / 4, -17. / 4, -5. / 4, -5. / 4, -5, -5, 0,
+ 0, 1, -1, 2, -2, 1. / 2, -1. / 2, -21. / 4, -1, 1,
+ 1, 1, 1, 1, 1, 0, 0, 0, 0, 0,
+ 0, 0, 0, 1,
+ };
+ return B;
+ }
+};
+
+static void kronecker_product(float *out, const double *in1, const double *in2, int m, int n, int p,
+ int q)
+{
+ for (int i = 0; i < m; ++i)
+ {
+ for (int j = 0; j < n; ++j)
+ {
+ for (int k = 0; k < p; ++k)
+ {
+ for (int l = 0; l < q; ++l)
+ {
+ out[(p * i + k) * n * q + q * j + l] = in1[n * i + j] * in2[k * q + l];
+ /* compute in double precision and then convert it back to Dtype for accuracy */
+ }
+ }
+ }
+ }
+}
+
+} // namespace srcn
+} // namespace nnfw
+
+#endif // __NNFW_SRCN_WINOGRAD_H__
diff --git a/runtimes/libs/tflite/CMakeLists.txt b/runtimes/libs/tflite/CMakeLists.txt
new file mode 100644
index 000000000..8b85e7183
--- /dev/null
+++ b/runtimes/libs/tflite/CMakeLists.txt
@@ -0,0 +1,26 @@
+nnfw_find_package(TensorFlowLite REQUIRED)
+
+file(GLOB_RECURSE SOURCES "src/*.cpp")
+file(GLOB_RECURSE TESTS "src/*.test.cpp")
+list(REMOVE_ITEM SOURCES ${TESTS})
+
+# NOTE For now NNFW supports two TFLITE versions - v1.12 for Ubuntu and v1.9 for Tizen.
+# So TFLITE version should mandatorily be specified to distinguish the version.
+if(NOT TFLITE_MAJOR_VER OR NOT TFLITE_MINOR_VER)
+ message(FATAL_ERROR "TFLITE_MAJOR_VER and TFLITE_MINOR_VER should be defined")
+endif(NOT TFLITE_MAJOR_VER OR NOT TFLITE_MINOR_VER)
+
+add_library(nnfw_lib_tflite STATIC ${SOURCES})
+set_target_properties(nnfw_lib_tflite PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(nnfw_lib_tflite PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
+target_link_libraries(nnfw_lib_tflite PUBLIC tensorflow-lite)
+target_link_libraries(nnfw_lib_tflite PUBLIC nnfw_lib_misc nnfw_lib_rua_shim)
+target_link_libraries(nnfw_lib_tflite PRIVATE ${LIB_PTHREAD} dl)
+target_link_libraries(nnfw_lib_tflite PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_tflite PRIVATE nnfw_coverage)
+
+target_compile_definitions(nnfw_lib_tflite PUBLIC TFLITE_MAJOR_VER=${TFLITE_MAJOR_VER}
+ TFLITE_MINOR_VER=${TFLITE_MINOR_VER})
+
+add_executable(nnfw_lib_tflite_test_TensorView src/TensorView.test.cpp)
+target_link_libraries(nnfw_lib_tflite_test_TensorView nnfw_lib_tflite)
diff --git a/runtimes/libs/tflite/include/tflite/Assert.h b/runtimes/libs/tflite/include/tflite/Assert.h
new file mode 100644
index 000000000..148ac7e01
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/Assert.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Assert.h
+ * @brief This file contains helper function of assertion
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_ASSERT_H__
+#define __NNFW_TFLITE_ASSERT_H__
+
+#include "tensorflow/lite/context.h"
+
+#include <sstream>
+
+#define STR_DETAIL(value) #value
+#define STR(value) STR_DETAIL(value)
+
+#define TFLITE_ENSURE(exp) \
+ { \
+ const TfLiteStatus status = (exp); \
+ \
+ if (status != kTfLiteOk) \
+ { \
+ std::ostringstream ss; \
+ ss << #exp << " failed (" << __FILE__ << ":" << __LINE__ << ")"; \
+ throw std::runtime_error{ss.str()}; \
+ } \
+ }
+
+#endif // __NNFW_TFLITE_ASSERT_H__
diff --git a/runtimes/libs/tflite/include/tflite/Diff.h b/runtimes/libs/tflite/include/tflite/Diff.h
new file mode 100644
index 000000000..55b7526ab
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/Diff.h
@@ -0,0 +1,200 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Diff.h
+ * @brief This file contains classes for testing correctess of implementation
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_DIFF_H__
+#define __NNFW_TFLITE_DIFF_H__
+
+#include "tensorflow/lite/interpreter.h"
+
+#include "misc/tensor/Index.h"
+#include "misc/tensor/Diff.h"
+#include "misc/tensor/Shape.h"
+#include "misc/tensor/Comparator.h"
+
+#include "tflite/TensorView.h"
+
+#include <functional>
+#include <vector>
+
+/**
+ * @brief Class to define TfLite interpreter match application
+ */
+class TfLiteInterpMatchApp
+{
+public:
+ /**
+ * @brief Construct a new TfLiteInterpMatchApp object with Comparator
+ * @param[in] comparator Comparator object for tensor comparation
+ */
+ TfLiteInterpMatchApp(const nnfw::misc::tensor::Comparator &comparator)
+ : _verbose{false}, _comparator(comparator)
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Get reference verbose for debugging information
+ * @return Reference of verbose value
+ */
+ int &verbose(void) { return _verbose; }
+
+private:
+ int _verbose;
+
+public:
+ /**
+ * @brief Run two interpreter and return the output matching
+ * @param[in] pure Interpreter object of expected(with TfLite)
+ * @param[in] nnapi Interpreter object of obtained(through NNAPI)
+ * @return @c true if two Interpreter results are same, otherwise @c false
+ */
+ bool run(::tflite::Interpreter &pure, ::tflite::Interpreter &nnapi) const;
+ /**
+ * @brief Compare two TensorView values and return the match result
+ * @param[in] expected TensorView object to read expected values
+ * @param[in] obtained TensorView object to read obtained values
+ * @param[in] id Tensor ID value used for debug message
+ * @return @c true if two TensorView values are same, otherwise @c false
+ */
+ template <typename T>
+ bool compareSingleTensorView(const nnfw::tflite::TensorView<T> &expected,
+ const nnfw::tflite::TensorView<T> &obtained, int id) const;
+
+private:
+ const nnfw::misc::tensor::Comparator &_comparator;
+};
+
+#include "tflite/interp/Builder.h"
+#include "tflite/Quantization.h"
+
+#include <random>
+
+/**
+ * @brief Class to generate random values
+ */
+class RandomGenerator
+{
+public:
+ /**
+ * @brief Construct a new RandomGenerator object
+ * @param[in] seed Random seed value
+ * @param[in] mean Mean value of normal random number generation
+ * @param[in] stddev Standard deviation of random number generation
+ * @param[in] quantization TfLiteQuantizationParams type to represent quantization value
+ * (not used yet)
+ */
+ RandomGenerator(int seed, float mean, float stddev,
+ const TfLiteQuantizationParams quantization = make_default_quantization())
+ : _rand{seed}, _dist{mean, stddev}, _quantization{quantization}
+ {
+ (void)_quantization;
+ }
+
+public:
+ /**
+ * @brief Generate random numbers for type T
+ * @param[in] s Shape value
+ * @param[in] i Index value
+ * @return Random generated value
+ * @note This is same as T generate(void) as two input parameters are not used
+ */
+ template <typename T>
+ T generate(const ::nnfw::misc::tensor::Shape &, const ::nnfw::misc::tensor::Index &)
+ {
+ return generate<T>();
+ }
+
+ /**
+ * @brief Generate random numbers for type T
+ * @return Random generated value
+ */
+ template <typename T> T generate(void) { return _dist(_rand); }
+
+private:
+ std::minstd_rand _rand;
+ std::normal_distribution<float> _dist;
+ // unused
+ const TfLiteQuantizationParams _quantization;
+};
+
+template <> uint8_t RandomGenerator::generate<uint8_t>(void);
+
+/**
+ * @brief Structure for NNAPI correctness test
+ */
+struct RandomTestParam
+{
+ int verbose; //!< Verbosity of debug information
+ int tolerance; //!< Torlerance of value difference
+ int tensor_logging = 0; //!< Save logging to a file if not 0
+ std::string log_path = ""; //!< Path of log file, meaningful only when tensor_logging is 1
+};
+
+/**
+ * @brief Class to define Random test runner
+ */
+class RandomTestRunner
+{
+public:
+ /**
+ * @brief Construct a new RandomTestRunner object
+ * @param[in] seed Random seed value
+ * @param[in] param RandomTestParam object for test runner
+ * @param[in] quantization TfLiteQuantizationParams type to represent quantization value
+ */
+ RandomTestRunner(int seed, const RandomTestParam &param,
+ const TfLiteQuantizationParams quantization = make_default_quantization())
+ : _randgen{seed, 0.0f, 2.0f, quantization}, _param{param}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Run the random test runner
+ * @param[in] builder Interpreter Builder used to run
+ * @return 0 if test succeeds, otherwise failure
+ */
+ int run(const nnfw::tflite::Builder &builder);
+
+public:
+ /**
+ * @brief Get RandomGenerator reference
+ * @return RandomGenerator reference
+ */
+ RandomGenerator &generator() { return _randgen; };
+
+private:
+ RandomGenerator _randgen;
+ const RandomTestParam _param;
+
+public:
+ /**
+ * @brief Create a RandomTestRunner object
+ * @param[in] seed Random seed value
+ * @return RandomGenerator object
+ */
+ static RandomTestRunner make(int seed);
+};
+
+#endif // __NNFW_TFLITE_DIFF_H__
diff --git a/runtimes/libs/tflite/include/tflite/FeatureView.h b/runtimes/libs/tflite/include/tflite/FeatureView.h
new file mode 100644
index 000000000..a8f069c40
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/FeatureView.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file FeatureView.h
+ * @brief This file contains FeatureView class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_FEATURE_VIEW_H__
+#define __NNFW_TFLITE_FEATURE_VIEW_H__
+
+#include "tensorflow/lite/interpreter.h"
+
+#include "tflite/InputIndex.h"
+#include "tflite/OutputIndex.h"
+
+#include "misc/feature/Shape.h"
+#include "misc/feature/Reader.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+template <typename T> class FeatureView;
+
+/**
+ * @brief Class to support reading element of float type feature
+ */
+template <> class FeatureView<float> : public nnfw::misc::feature::Reader<float>
+{
+public:
+ /**
+ * @brief Construct a new FeatureView object
+ * @param[in] interp Interpreter to read from
+ * @param[in] index InputIndex index of input
+ */
+ FeatureView(::tflite::Interpreter &interp, const InputIndex &index);
+ /**
+ * @brief Construct a new FeatureView object
+ * @param[in] interp Interpreter to read from
+ * @param[in] index OutputIndex index of output
+ */
+ FeatureView(::tflite::Interpreter &interp, const OutputIndex &index);
+
+public:
+ /**
+ * @brief Get value of element using channel, row and column index
+ * @param[in] ch Channel index
+ * @param[in] row Row index
+ * @param[in] col Column index
+ * @return Value of element
+ */
+ float at(uint32_t ch, uint32_t row, uint32_t col) const;
+ /**
+ * @brief Get reference of element using channel, row and column index
+ * @param[in] ch Channel index
+ * @param[in] row Row index
+ * @param[in] col Column index
+ * @return Reference of element
+ */
+ float &at(uint32_t ch, uint32_t row, uint32_t col);
+
+ float at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const = 0;
+
+private:
+ /**
+ * @brief Get offset of element from channel, row and column index
+ * @param[in] ch Channel index
+ * @param[in] row Row index
+ * @param[in] col Column index
+ * @return Offset of element
+ */
+ uint32_t getElementOffset(uint32_t ch, uint32_t row, uint32_t col) const
+ {
+ uint32_t res = 0;
+
+ // TensorFlow Lite assumes that NHWC ordering for tessor
+ res += row * _shape.W * _shape.C;
+ res += col * _shape.C;
+ res += ch;
+
+ return res;
+ }
+
+private:
+ nnfw::misc::feature::Shape _shape;
+ float *_base;
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_FEATURE_VIEW_H__
diff --git a/libs/tflite/include/tflite/InputIndex.h b/runtimes/libs/tflite/include/tflite/InputIndex.h
index f535b2626..f535b2626 100644
--- a/libs/tflite/include/tflite/InputIndex.h
+++ b/runtimes/libs/tflite/include/tflite/InputIndex.h
diff --git a/libs/tflite/include/tflite/InterpreterSession.h b/runtimes/libs/tflite/include/tflite/InterpreterSession.h
index deaf05a7f..deaf05a7f 100644
--- a/libs/tflite/include/tflite/InterpreterSession.h
+++ b/runtimes/libs/tflite/include/tflite/InterpreterSession.h
diff --git a/libs/tflite/include/tflite/NNAPISession.h b/runtimes/libs/tflite/include/tflite/NNAPISession.h
index b2a999d10..b2a999d10 100644
--- a/libs/tflite/include/tflite/NNAPISession.h
+++ b/runtimes/libs/tflite/include/tflite/NNAPISession.h
diff --git a/libs/tflite/include/tflite/OutputIndex.h b/runtimes/libs/tflite/include/tflite/OutputIndex.h
index dd1ca8d44..dd1ca8d44 100644
--- a/libs/tflite/include/tflite/OutputIndex.h
+++ b/runtimes/libs/tflite/include/tflite/OutputIndex.h
diff --git a/runtimes/libs/tflite/include/tflite/Quantization.h b/runtimes/libs/tflite/include/tflite/Quantization.h
new file mode 100644
index 000000000..8272bcdc0
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/Quantization.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Quantization.h
+ * @brief This file contains BitwiseIntToFloat union and quantization related
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_QUANTIZATION_H__
+#define __NNFW_TFLITE_QUANTIZATION_H__
+
+/**
+ * @brief Union to provide bitwise conversion of integer and float
+ */
+union BitwiseIntToFloat {
+ int i;
+ float f;
+};
+
+static const float FLOAT_NEAREST_TO_1 = BitwiseIntToFloat{0x3f7fffff}.f;
+
+#include "tensorflow/lite/context.h"
+
+/**
+ * @brief Get TfLiteQuantizationParams object with default values
+ * @return TfLiteQuantizationParams object
+ */
+TfLiteQuantizationParams make_default_quantization(void);
+
+#endif // __NNFW_TFLITE_QUANTIZATION_H__
diff --git a/runtimes/libs/tflite/include/tflite/Session.h b/runtimes/libs/tflite/include/tflite/Session.h
new file mode 100644
index 000000000..b653acf61
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/Session.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Session.h
+ * @brief This file contains Session class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_SESSION_H__
+#define __NNFW_TFLITE_SESSION_H__
+
+#include <tensorflow/lite/interpreter.h>
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Structure to provide interface methods of interpreter session
+ */
+struct Session
+{
+ /**
+ * @brief Destruct Session object using default destructor
+ */
+ virtual ~Session() = default;
+
+ /**
+ * @brief Get the Interpreter object pointer
+ * @return The Interpreter object pointer
+ */
+ virtual ::tflite::Interpreter *interp(void) = 0;
+
+ /**
+ * @brief Prepare the session
+ * @return @c true if prepare method succeeded, otherwise @c false
+ */
+ virtual bool prepare(void) = 0;
+ /**
+ * @brief Run the session
+ * @return @c true if run method succeeded, otherwise @c false
+ */
+ virtual bool run(void) = 0;
+ /**
+ * @brief Teardown(release) the session
+ * @return @c true if teardown method succeeded, otherwise @c false
+ */
+ virtual bool teardown(void) = 0;
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_INTERP_SESSION_H__
diff --git a/runtimes/libs/tflite/include/tflite/TensorLogger.h b/runtimes/libs/tflite/include/tflite/TensorLogger.h
new file mode 100644
index 000000000..7b3363bd5
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/TensorLogger.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file TensorLogger.h
+ * @brief This file contains TensorLogger class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_TENSOR_LOGGER_H__
+#define __NNFW_TFLITE_TENSOR_LOGGER_H__
+
+#include "misc/tensor/IndexIterator.h"
+#include "tflite/TensorView.h"
+
+#include <tensorflow/lite/interpreter.h>
+#include <tensorflow/lite/context.h>
+#include <fstream>
+#include <iomanip>
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Class to write input and output value / shape into a file in python form
+ * @note This is a utility to write input and output value / shape into a file in python form.\n
+ * any python app can load this value by running the python code below:\n
+ * exec(open(filename).read())\n
+ * generated python code looks like the following: \n
+ * tensor_shape_gen = []\n
+ * tensor_value_gen = []\n\n
+ * tensor_shape_gen.append("{2, 1, 2}")\n
+ * tensor_value_gen.append([1, 2, 3, 4])\n\n
+ * tensor_shape_gen.append("{2}")\n
+ * tensor_value_gen.append([1, 2])\n\n
+ * tensor_shape_gen.append("{2, 1, 2}")\n
+ * tensor_value_gen.append([1, 4, 3, 8])\n
+ */
+class TensorLogger
+{
+private:
+ std::ofstream _outfile;
+
+public:
+ /**
+ * @brief Get TensorLogger instance
+ * @return The TensorLogger instance
+ */
+ static TensorLogger &instance()
+ {
+ static TensorLogger instance;
+ return instance;
+ }
+
+ /**
+ * @brief Save the tensor details to file from interpreter
+ * @param[in] path The file path to save
+ * @param[in] interp The TfLite interpreter
+ */
+ void save(const std::string &path, ::tflite::Interpreter &interp)
+ {
+ open(path);
+
+ int log_index = 0;
+ for (const auto id : interp.inputs())
+ {
+ _outfile << "# input tensors" << std::endl;
+ printTensor(interp, id, log_index++);
+ }
+ for (const auto id : interp.outputs())
+ {
+ _outfile << "# output tensors" << std::endl;
+ printTensor(interp, id, log_index++);
+ }
+ close();
+ }
+
+private:
+ void open(const std::string &path)
+ {
+ if (!_outfile.is_open())
+ _outfile.open(path, std::ios_base::out);
+
+ _outfile << "# ------ file: " << path << " ------" << std::endl
+ << "tensor_shape_gen = []" << std::endl
+ << "tensor_value_gen = []" << std::endl
+ << std::endl;
+ }
+
+ void printTensor(::tflite::Interpreter &interp, const int id, const int log_index)
+ {
+ const TfLiteTensor *tensor = interp.tensor(id);
+
+ _outfile << "# tensor name: " << tensor->name << std::endl;
+ _outfile << "# tflite::interpreter.tensor(" << id << ") -> "
+ "tensor_value_gen["
+ << log_index << "]" << std::endl;
+
+ if (tensor->type == kTfLiteInt32)
+ {
+ printTensorShape(tensor);
+ printTensorValue<int32_t>(tensor, tensor->data.i32);
+ }
+ else if (interp.tensor(id)->type == kTfLiteUInt8)
+ {
+ printTensorShape(tensor);
+ printTensorValue<uint8_t>(tensor, tensor->data.uint8);
+ }
+ else if (tensor->type == kTfLiteFloat32)
+ {
+ printTensorShape(tensor);
+ printTensorValue<float>(tensor, tensor->data.f);
+ }
+ }
+
+ void printTensorShape(const TfLiteTensor *tensor)
+ {
+ _outfile << "tensor_shape_gen.append('{";
+
+ int r = 0;
+ for (; r < tensor->dims->size - 1; r++)
+ {
+ _outfile << tensor->dims->data[r] << ", ";
+ }
+ _outfile << tensor->dims->data[r];
+
+ _outfile << "}')" << std::endl;
+ }
+
+ template <typename T> void printTensorValue(const TfLiteTensor *tensor, T *tensor_data_ptr)
+ {
+ _outfile << "tensor_value_gen.append([";
+
+ _outfile << std::fixed << std::setprecision(10);
+
+ const T *end = reinterpret_cast<const T *>(tensor->data.raw_const + tensor->bytes);
+ for (T *ptr = tensor_data_ptr; ptr < end; ptr++)
+ _outfile << *ptr << ", ";
+
+ _outfile << "])" << std::endl << std::endl;
+ }
+
+ void close()
+ {
+ _outfile << "# --------- tensor shape and value defined above ---------" << std::endl;
+ _outfile.close();
+ }
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_TENSOR_LOGGER_H__
diff --git a/libs/tflite/include/tflite/TensorShapeUtils.h b/runtimes/libs/tflite/include/tflite/TensorShapeUtils.h
index ba8687413..ba8687413 100644
--- a/libs/tflite/include/tflite/TensorShapeUtils.h
+++ b/runtimes/libs/tflite/include/tflite/TensorShapeUtils.h
diff --git a/runtimes/libs/tflite/include/tflite/TensorUtils.h b/runtimes/libs/tflite/include/tflite/TensorUtils.h
new file mode 100644
index 000000000..08af1468b
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/TensorUtils.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file TensorUtils.h
+ * @brief This file contains utilities function
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_TENSOR_UTILS_H__
+#define __NNFW_TFLITE_TENSOR_UTILS_H__
+
+#include <tensorflow/lite/context.h>
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Get @c true if tensor type is kTfLiteFloat32, otherwise @c false
+ * @param[in] tensor The tensor object to be compared
+ * @return @c true if tensor type is kTfLiteFloat32, otherwise @c false
+ */
+inline bool isFloatTensor(const TfLiteTensor *tensor) { return tensor->type == kTfLiteFloat32; }
+
+/**
+ * @brief Get @c true if tensor is 4-D tensor and the first dimension length is 1,
+ * otherwise @c false
+ * @param[in] tensor The tensor object to be compared
+ * @return @c true if tensor is 4-D tensor and the first dimension length is 1, otherwise @c false
+ */
+inline bool isFeatureTensor(const TfLiteTensor *tensor)
+{
+ return (tensor->dims->size == 4) && (tensor->dims->data[0] == 1);
+}
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_TENSOR_UTILS_H__
diff --git a/runtimes/libs/tflite/include/tflite/TensorView.h b/runtimes/libs/tflite/include/tflite/TensorView.h
new file mode 100644
index 000000000..ce791a73f
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/TensorView.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file TensorView.h
+ * @brief This file contains TensorView class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_TENSOR_VIEW_H__
+#define __NNFW_TFLITE_TENSOR_VIEW_H__
+
+#include "tensorflow/lite/interpreter.h"
+
+#include "misc/tensor/Shape.h"
+#include "misc/tensor/Index.h"
+#include "misc/tensor/Reader.h"
+#include "misc/tensor/NonIncreasingStride.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Class to define TensorView which is inherited from nnfw::misc::tensor::Reader<T> class
+ */
+template <typename T> class TensorView final : public nnfw::misc::tensor::Reader<T>
+{
+public:
+ /**
+ * @brief Construct a TensorView object with base and shape informations
+ * @param[in] shape The shape of a tensor
+ * @param[in] base The base address of a tensor
+ */
+ TensorView(const nnfw::misc::tensor::Shape &shape, T *base) : _shape{shape}, _base{base}
+ {
+ // Set 'stride'
+ _stride.init(_shape);
+ }
+
+public:
+ /**
+ * @brief Get shape of tensor
+ * @return Reference of shape
+ */
+ const nnfw::misc::tensor::Shape &shape(void) const { return _shape; }
+
+public:
+ /**
+ * @brief Get value of tensor index
+ * @param[in] index The tensor index
+ * @return The value at the index
+ */
+ T at(const nnfw::misc::tensor::Index &index) const override
+ {
+ const auto offset = _stride.offset(index);
+ return *(_base + offset);
+ }
+
+public:
+ /**
+ * @brief Get reference value of tensor index
+ * @param[in] index The tensor index
+ * @return The reference value at the index
+ */
+ T &at(const nnfw::misc::tensor::Index &index)
+ {
+ const auto offset = _stride.offset(index);
+ return *(_base + offset);
+ }
+
+private:
+ nnfw::misc::tensor::Shape _shape; /**< The tensor shape */
+
+public:
+ T *_base; /**< The base address of tensor */
+ nnfw::misc::tensor::NonIncreasingStride _stride; /**< The NonIncreasingStride object */
+
+public:
+ // TODO Introduce Operand ID class
+ /**
+ * @brief Create TensorView object using given parameters
+ * @param[in] interp The TfLite interpreter
+ * @param[in] tensor_index The tensor index
+ * @return The new TensorView<T> object
+ */
+ static TensorView<T> make(::tflite::Interpreter &interp, int tensor_index)
+ {
+ auto tensor_ptr = interp.tensor(tensor_index);
+
+ // Set 'shape'
+ nnfw::misc::tensor::Shape shape(tensor_ptr->dims->size);
+
+ for (uint32_t axis = 0; axis < shape.rank(); ++axis)
+ {
+ shape.dim(axis) = tensor_ptr->dims->data[axis];
+ }
+
+ return TensorView<T>(shape, interp.typed_tensor<T>(tensor_index));
+ }
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_TENSOR_VIEW_H__
diff --git a/runtimes/libs/tflite/include/tflite/ext/kernels/Abs.h b/runtimes/libs/tflite/include/tflite/ext/kernels/Abs.h
new file mode 100644
index 000000000..697ba33e9
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/kernels/Abs.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_TFLITE_EXT_KERNELS_ABS_H__
+#define __NNFW_TFLITE_EXT_KERNELS_ABS_H__
+
+#include "tensorflow/lite/context.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace Abs
+{
+
+void *InitAbs(TfLiteContext *context, const char *buffer, size_t length);
+void FreeAbs(TfLiteContext *context, void *buffer);
+TfLiteStatus PrepareAbs(TfLiteContext *context, TfLiteNode *node);
+TfLiteStatus EvalAbs(TfLiteContext *context, TfLiteNode *node);
+
+} // namespace Abs
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_KERNELS_ABS_H__
diff --git a/runtimes/libs/tflite/include/tflite/ext/kernels/CustomOps.h b/runtimes/libs/tflite/include/tflite/ext/kernels/CustomOps.h
new file mode 100644
index 000000000..3370db778
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/kernels/CustomOps.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file CustomOps.h
+ * @brief This file contains registration of custom operands
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_EXT_KERNELS_CUSTOM_OP_H__
+#define __NNFW_TFLITE_EXT_KERNELS_CUSTOM_OP_H__
+
+#include "tensorflow/lite/context.h"
+#include "tflite/ext/kernels/TensorFlowMax.h"
+#include "tflite/ext/kernels/SquaredDifference.h"
+#include "tflite/ext/kernels/TensorFlowSum.h"
+#include "tflite/ext/kernels/Abs.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+
+#define REGISTER_FUNCTION(Name) \
+ TfLiteRegistration *Register_##Name(void) \
+ { \
+ static TfLiteRegistration r = {}; \
+ r.init = Name::Init##Name; \
+ r.free = Name::Free##Name; \
+ r.prepare = Name::Prepare##Name; \
+ r.invoke = Name::Eval##Name; \
+ r.custom_name = #Name; \
+ return &r; \
+ }
+
+REGISTER_FUNCTION(TensorFlowMax)
+REGISTER_FUNCTION(SquaredDifference)
+REGISTER_FUNCTION(TensorFlowSum)
+REGISTER_FUNCTION(Abs)
+
+#undef REGISTER_FUNCTION
+
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_KERNELS_CUSTOM_OP_H__
diff --git a/runtimes/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h b/runtimes/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h
new file mode 100644
index 000000000..5512ead78
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/kernels/SquaredDifference.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file SquaredDifference.h
+ * @brief This file contains SquaredDifference namespace and SquaredDifference function
+ * definitions
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_EXT_KERNELS_SQUARED_DIFFERENCE_H__
+#define __NNFW_TFLITE_EXT_KERNELS_SQUARED_DIFFERENCE_H__
+
+#include "tensorflow/lite/context.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace SquaredDifference
+{
+
+/**
+ * @brief Initialize SquaredDifference operand using the contents of buffer
+ * @param[in] context The TfLite context
+ * @param[in] buffer The buffer with contents
+ * @param[in] length The buffer length
+ * @return The void pointer for user data
+ */
+void *InitSquaredDifference(TfLiteContext *context, const char *buffer, size_t length);
+
+/**
+ * @brief Release any memory it might have allocated via 'InitSquaredDifference'
+ * @param[in] context The TfLite context
+ * @param[in] buffer The buffer with contents
+ * @return N/A
+ */
+void FreeSquaredDifference(TfLiteContext *context, void *buffer);
+
+/**
+ * @brief Prepare the SquaredDifference operand for execution
+ * @param[in] context The TfLite context
+ * @param[in] node The operand node
+ * @return The TfLite status
+ */
+TfLiteStatus PrepareSquaredDifference(TfLiteContext *context, TfLiteNode *node);
+
+/**
+ * @brief Evaluation the SquaredDifference operand for execution
+ * @param[in] context The TfLite context
+ * @param[in] node The operand node
+ * @return The TfLite status
+ */
+TfLiteStatus EvalSquaredDifference(TfLiteContext *context, TfLiteNode *node);
+
+} // namespace SquaredDifference
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_KERNELS_SQUARED_DIFFERENCE_H__
diff --git a/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h b/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h
new file mode 100644
index 000000000..d573308ed
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file TensorFlowMax.h
+ * @brief This file contains TensorFlowMax namespace and TensorFlowMax function definitions
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
+#define __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
+
+#include "tensorflow/lite/context.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace TensorFlowMax
+{
+
+/**
+ * @brief Initialize TensorFlowMax operand using the contents of buffer
+ * @param[in] context The TfLite context
+ * @param[in] buffer The buffer with contents
+ * @param[in] length The buffer length
+ * @return The void pointer for user data
+ */
+void *InitTensorFlowMax(TfLiteContext *context, const char *buffer, size_t length);
+
+/**
+ * @brief Release any memory it might have allocated via 'InitTensorFlowMax'
+ * @param[in] context The TfLite context
+ * @param[in] buffer The buffer with contents
+ * @return N/A
+ */
+void FreeTensorFlowMax(TfLiteContext *context, void *buffer);
+
+/**
+ * @brief Prepare the TensorFlowMax operand for execution
+ * @param[in] context The TfLite context
+ * @param[in] node The operand node
+ * @return The TfLite status
+ */
+TfLiteStatus PrepareTensorFlowMax(TfLiteContext *context, TfLiteNode *node);
+
+/**
+ * @brief Evaluation the TensorFlowMax operand for execution
+ * @param[in] context The TfLite context
+ * @param[in] node The operand node
+ * @return The TfLite status
+ */
+TfLiteStatus EvalTensorFlowMax(TfLiteContext *context, TfLiteNode *node);
+
+} // namespace TensorFlowMax
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
diff --git a/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h b/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h
new file mode 100644
index 000000000..29455aac5
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowSum.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_SUM_H__
+#define __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_SUM_H__
+
+#include "tensorflow/lite/context.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace TensorFlowSum
+{
+
+void *InitTensorFlowSum(TfLiteContext *context, const char *buffer, size_t length);
+void FreeTensorFlowSum(TfLiteContext *context, void *buffer);
+TfLiteStatus PrepareTensorFlowSum(TfLiteContext *context, TfLiteNode *node);
+TfLiteStatus EvalTensorFlowSum(TfLiteContext *context, TfLiteNode *node);
+
+} // namespace TensorFlowSum
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_SUM_H__
diff --git a/runtimes/libs/tflite/include/tflite/ext/kernels/register.h b/runtimes/libs/tflite/include/tflite/ext/kernels/register.h
new file mode 100644
index 000000000..7d2bd786d
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/kernels/register.h
@@ -0,0 +1,42 @@
+/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from the following file (in TensorFlow)
+// 'externals/tensorflow/tensorflow/lite/kernels/register.h'
+#ifndef __NNFW_TFLITE_EXT_KERNELS_REGISTER_H__
+#define __NNFW_TFLITE_EXT_KERNELS_REGISTER_H__
+
+#include <unordered_map>
+#include "tensorflow/lite/context.h"
+#include "tensorflow/lite/model.h"
+
+namespace nnfw {
+namespace tflite {
+
+class BuiltinOpResolver : public ::tflite::MutableOpResolver {
+ public:
+ BuiltinOpResolver();
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_KERNELS_REGISTER_H__
+
+// clang-format on
diff --git a/runtimes/libs/tflite/include/tflite/ext/nnapi_delegate.h b/runtimes/libs/tflite/include/tflite/ext/nnapi_delegate.h
new file mode 100644
index 000000000..21017b29f
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/ext/nnapi_delegate.h
@@ -0,0 +1,96 @@
+/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This header is derived from the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/lite/nnapi_delegate.h'
+#ifndef __NNFW_TFLITE_EXT_NNAPI_DELEGATE_H__
+#define __NNFW_TFLITE_EXT_NNAPI_DELEGATE_H__
+
+#include "tensorflow/lite/allocation.h"
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 9
+#include "tensorflow/lite/context.h"
+#include "tensorflow/lite/error_reporter.h"
+#else
+#include "tensorflow/lite/c/c_api_internal.h"
+#include "tensorflow/lite/core/api/error_reporter.h"
+#endif
+#include "tensorflow/lite/interpreter.h"
+
+struct ANeuralNetworksModel;
+struct ANeuralNetworksMemory;
+struct ANeuralNetworksCompilation;
+
+namespace nnfw {
+namespace tflite {
+
+class NNAPIAllocation : public ::tflite::MMAPAllocation {
+ public:
+ NNAPIAllocation(const char* filename, ::tflite::ErrorReporter* error_reporter);
+ ~NNAPIAllocation();
+
+ size_t offset(const void* ptr) const {
+ auto signed_offset = reinterpret_cast<const uint8_t*>(ptr) -
+ reinterpret_cast<const uint8_t*>(mmapped_buffer_);
+
+ return static_cast<size_t>(signed_offset);
+ }
+
+ ANeuralNetworksMemory* memory() const { return handle_; }
+ bool valid() const override { return handle_ != nullptr; }
+
+ private:
+ mutable ANeuralNetworksMemory* handle_ = nullptr;
+};
+
+class NNAPIDelegate {
+ public:
+ ~NNAPIDelegate();
+
+ // Convert a tflite graph to NNAPI
+ TfLiteStatus BuildGraph(::tflite::Interpreter* interpreter);
+
+ // Run
+ TfLiteStatus Invoke(::tflite::Interpreter* interpreter);
+
+ // Whether the current platform supports NNAPI delegation.
+ static bool IsSupported();
+
+ private:
+ // The NN API model handle
+ ANeuralNetworksModel* nn_model_ = nullptr;
+ // The NN API compilation handle
+ ANeuralNetworksCompilation* nn_compiled_model_ = nullptr;
+ // Model status
+ TfLiteStatus model_status_ = kTfLiteOk;
+
+ // List of state tensors for LSTM, RNN, SVDF.
+ // NN API does not allow ops to maintain states across multiple
+ // invocations. We need to manually create state input tensors from
+ // corresponding state output tensors of TFLite operations, and map them
+ // correctly.
+ std::vector<int> model_states_inputs_; // holds NNAPI operand ids
+ std::vector<int> model_states_outputs_; // holds TFLite tensor ids
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_EXT_NNAPI_DELEGATE_H__
+
+// clang-format on
diff --git a/runtimes/libs/tflite/include/tflite/interp/Builder.h b/runtimes/libs/tflite/include/tflite/interp/Builder.h
new file mode 100644
index 000000000..0f54e1779
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/interp/Builder.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Builder.h
+ * @brief This file contains Builder structure
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_INTERP_BUILDER_H__
+#define __NNFW_TFLITE_INTERP_BUILDER_H__
+
+#include <tensorflow/lite/interpreter.h>
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Structure to Builder
+ */
+struct Builder
+{
+ /**
+ * @brief Destroy the Builder object
+ */
+ virtual ~Builder() = default;
+
+ /**
+ * @brief Build a FlatBuffer model
+ * @return The TfLite interpreter object
+ */
+ virtual std::unique_ptr<::tflite::Interpreter> build(void) const = 0;
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_INTERP_BUILDER_H__
diff --git a/runtimes/libs/tflite/include/tflite/interp/FlatBufferBuilder.h b/runtimes/libs/tflite/include/tflite/interp/FlatBufferBuilder.h
new file mode 100644
index 000000000..2d96af50b
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/interp/FlatBufferBuilder.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file FlatBufferBuilder.h
+ * @brief This file contains FlatBufferBuilder class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_INTERP_FLAT_BUFFER_BUILDER_H__
+#define __NNFW_TFLITE_INTERP_FLAT_BUFFER_BUILDER_H__
+
+#include <tensorflow/lite/model.h>
+
+#include "tflite/interp/Builder.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Class to define FlatBufferBuilder which is inherited from Builder
+ */
+class FlatBufferBuilder final : public Builder
+{
+public:
+ /**
+ * @brief Construct a FlatBufferBuilder object with FlatBufferModel of TfLite
+ * @param[in] model The TfLite Flatbuffer model
+ */
+ FlatBufferBuilder(const ::tflite::FlatBufferModel &model) : _model{model}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Build a FlatBuffer model
+ * @return The TfLite interpreter pointer address
+ */
+ std::unique_ptr<::tflite::Interpreter> build(void) const override;
+
+private:
+ const ::tflite::FlatBufferModel &_model;
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_INTERP_FLAT_BUFFER_BUILDER_H__
diff --git a/runtimes/libs/tflite/include/tflite/interp/FunctionBuilder.h b/runtimes/libs/tflite/include/tflite/interp/FunctionBuilder.h
new file mode 100644
index 000000000..7bfb8db2d
--- /dev/null
+++ b/runtimes/libs/tflite/include/tflite/interp/FunctionBuilder.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file FunctionBuilder.h
+ * @brief This file contains FunctionBuilder class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NNFW_TFLITE_INTERP_FUNCTION_BUILDER_H__
+#define __NNFW_TFLITE_INTERP_FUNCTION_BUILDER_H__
+
+#include <tensorflow/lite/model.h>
+
+#include "tflite/interp/Builder.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+/**
+ * @brief Class to define FunctionBuilder which is inherited from Builder
+ */
+class FunctionBuilder final : public Builder
+{
+public:
+ using SetupFunc = std::function<void(::tflite::Interpreter &)>;
+
+public:
+ /**
+ * @brief Construct a FunctionBuilder object with SetupFunction
+ * @param[in] fn The SetupFunc object
+ */
+ FunctionBuilder(const SetupFunc &fn) : _fn{fn}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Build a SetupFunc
+ * @return The TfLite interpreter pointer address
+ */
+ std::unique_ptr<::tflite::Interpreter> build(void) const override;
+
+private:
+ SetupFunc _fn;
+};
+
+} // namespace tflite
+} // namespace nnfw
+
+#endif // __NNFW_TFLITE_INTERP_FUNCTION_BUILDER_H__
diff --git a/runtimes/libs/tflite/src/Diff.cpp b/runtimes/libs/tflite/src/Diff.cpp
new file mode 100644
index 000000000..414aef207
--- /dev/null
+++ b/runtimes/libs/tflite/src/Diff.cpp
@@ -0,0 +1,596 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/Diff.h"
+#include "tflite/ext/nnapi_delegate.h"
+
+#include "misc/fp32.h"
+
+#include "misc/tensor/IndexIterator.h"
+#include "misc/tensor/IndexFormatter.h"
+#include "misc/tensor/Zipper.h"
+#include "misc/tensor/Comparator.h"
+
+#include "misc/EnvVar.h"
+
+#include <iostream>
+#include <cassert>
+
+class DiffSummary : public nnfw::misc::tensor::Comparator::Observer
+{
+public:
+ DiffSummary()
+ : max_abs_diff_index(0), max_abs_diff_expected{0.0f}, max_abs_diff_obtained{0.0f},
+ max_abs_diff_value{0.0f}, max_rel_diff_index(0), max_rel_diff_expected{0.0f},
+ max_rel_diff_obtained{0.0f}, max_rel_diff_value{0.0f}
+ {
+ // DO NOTHING
+ }
+
+public:
+ void notify(const nnfw::misc::tensor::Index &index, float expected, float obtained) override;
+
+public:
+ nnfw::misc::tensor::Index max_abs_diff_index;
+ float max_abs_diff_expected;
+ float max_abs_diff_obtained;
+ float max_abs_diff_value;
+
+ nnfw::misc::tensor::Index max_rel_diff_index;
+ float max_rel_diff_expected;
+ float max_rel_diff_obtained;
+ float max_rel_diff_value;
+};
+
+void DiffSummary::notify(const nnfw::misc::tensor::Index &index, float expected, float obtained)
+{
+ const auto abs_diff_value = std::fabs(expected - obtained);
+
+ if (max_abs_diff_value < abs_diff_value)
+ {
+ max_abs_diff_index = index;
+ max_abs_diff_value = abs_diff_value;
+ max_abs_diff_expected = expected;
+ max_abs_diff_obtained = obtained;
+ }
+
+ const auto rel_diff_value = nnfw::misc::fp32::relative_diff(expected, obtained);
+
+ if (max_rel_diff_value < rel_diff_value)
+ {
+ max_rel_diff_index = index;
+ max_rel_diff_value = rel_diff_value;
+ max_rel_diff_expected = expected;
+ max_rel_diff_obtained = obtained;
+ }
+}
+
+template <typename T>
+bool TfLiteInterpMatchApp::compareSingleTensorView(const nnfw::tflite::TensorView<T> &expected,
+ const nnfw::tflite::TensorView<T> &obtained,
+ int id) const
+{
+ std::vector<nnfw::misc::tensor::Diff<T>> diffs;
+ assert(expected.shape() == obtained.shape());
+
+ using nnfw::misc::tensor::zip;
+ using nnfw::misc::tensor::Index;
+
+ zip(expected.shape(), expected, obtained)
+ << [&](const Index &index, T expected_value, T obtained_value) {
+ if (expected_value != obtained_value)
+ {
+ diffs.emplace_back(index, expected_value, obtained_value);
+ }
+ };
+
+ // TODO Unify summary generation code
+ if (diffs.size() == 0)
+ {
+ std::cout << " Tensor #" << id << ": MATCHED" << std::endl;
+ }
+ else
+ {
+ std::cout << " Tensor #" << id << ": UNMATCHED" << std::endl;
+ std::cout << " " << diffs.size() << " diffs are detected" << std::endl;
+ }
+
+ if (diffs.size() > 0 && _verbose != 0)
+ {
+ std::cout << " ---- Details ---" << std::endl;
+ for (const auto &diff : diffs)
+ {
+ std::cout << " Diff at [" << nnfw::misc::tensor::IndexFormatter(diff.index) << "]"
+ << std::endl;
+ std::cout << " expected: " << diff.expected << std::endl;
+ std::cout << " obtained: " << diff.obtained << std::endl;
+ }
+ }
+
+ return diffs.size() == 0;
+}
+
+template <>
+bool TfLiteInterpMatchApp::compareSingleTensorView<float>(
+ const nnfw::tflite::TensorView<float> &expected,
+ const nnfw::tflite::TensorView<float> &obtained, int id) const
+{
+ DiffSummary summary;
+
+ assert(expected.shape() == obtained.shape());
+ auto diffs = _comparator.compare(expected.shape(), expected, obtained, &summary);
+
+ // TODO Unify summary generation code
+ if (diffs.size() == 0)
+ {
+ std::cout << " Tensor #" << id << ": MATCHED" << std::endl;
+ }
+ else
+ {
+ std::cout << " Tensor #" << id << ": UNMATCHED" << std::endl;
+ std::cout << " " << diffs.size() << " diffs are detected" << std::endl;
+ }
+
+ // Print out max_diff
+ if (summary.max_abs_diff_value > 0)
+ {
+ std::cout << " Max absolute diff at ["
+ << nnfw::misc::tensor::IndexFormatter(summary.max_abs_diff_index) << "]" << std::endl;
+ std::cout << " expected: " << summary.max_abs_diff_expected << std::endl;
+ std::cout << " obtained: " << summary.max_abs_diff_obtained << std::endl;
+ std::cout << " absolute diff: " << summary.max_abs_diff_value << std::endl;
+ }
+
+ if (summary.max_rel_diff_value > 0)
+ {
+ const auto tolerance_level = summary.max_rel_diff_value / FLT_EPSILON;
+
+ std::cout << " Max relative diff at ["
+ << nnfw::misc::tensor::IndexFormatter(summary.max_rel_diff_index) << "]" << std::endl;
+ std::cout << " expected: " << summary.max_rel_diff_expected << std::endl;
+ std::cout << " obtained: " << summary.max_rel_diff_obtained << std::endl;
+ std::cout << " relative diff: " << summary.max_rel_diff_value << std::endl;
+ std::cout << " (tolerance level = " << tolerance_level << ")" << std::endl;
+ }
+
+ if (diffs.size() > 0)
+ {
+ if (_verbose != 0)
+ {
+ std::cout << " ---- Details ---" << std::endl;
+ for (const auto &diff : diffs)
+ {
+ const auto absolute_diff = std::fabs(diff.expected - diff.obtained);
+ const auto relative_diff = nnfw::misc::fp32::relative_diff(diff.expected, diff.obtained);
+ const auto tolerance_level = relative_diff / FLT_EPSILON;
+
+ std::cout << " Diff at [" << nnfw::misc::tensor::IndexFormatter(diff.index) << "]"
+ << std::endl;
+ std::cout << " expected: " << diff.expected << std::endl;
+ std::cout << " obtained: " << diff.obtained << std::endl;
+ std::cout << " absolute diff: " << absolute_diff << std::endl;
+ std::cout << " relative diff: " << relative_diff << std::endl;
+ std::cout << " (tolerance level = " << tolerance_level << ")" << std::endl;
+ }
+ }
+
+ return false;
+ }
+ return true;
+}
+
+#include <map>
+
+bool TfLiteInterpMatchApp::run(::tflite::Interpreter &interp, ::tflite::Interpreter &nnapi) const
+{
+ assert(interp.outputs() == nnapi.outputs());
+
+ bool all_matched = true;
+
+ using Comparator = std::function<bool(int id, ::tflite::Interpreter &, ::tflite::Interpreter &)>;
+
+ std::map<TfLiteType, Comparator> comparators;
+
+ comparators[kTfLiteUInt8] = [this](int id, ::tflite::Interpreter &interp,
+ ::tflite::Interpreter &nnapi) {
+ const auto expected = nnfw::tflite::TensorView<uint8_t>::make(interp, id);
+ const auto obtained = nnfw::tflite::TensorView<uint8_t>::make(nnapi, id);
+
+ return compareSingleTensorView(expected, obtained, id);
+ };
+
+ comparators[kTfLiteInt32] = [this](int id, ::tflite::Interpreter &interp,
+ ::tflite::Interpreter &nnapi) {
+ const auto expected = nnfw::tflite::TensorView<int32_t>::make(interp, id);
+ const auto obtained = nnfw::tflite::TensorView<int32_t>::make(nnapi, id);
+
+ return compareSingleTensorView(expected, obtained, id);
+ };
+
+ comparators[kTfLiteFloat32] = [this](int id, ::tflite::Interpreter &interp,
+ ::tflite::Interpreter &nnapi) {
+ const auto expected = nnfw::tflite::TensorView<float>::make(interp, id);
+ const auto obtained = nnfw::tflite::TensorView<float>::make(nnapi, id);
+
+ return compareSingleTensorView(expected, obtained, id);
+ };
+
+ comparators[kTfLiteBool] = [this](int id, ::tflite::Interpreter &interp,
+ ::tflite::Interpreter &nnapi) {
+ const auto expected = nnfw::tflite::TensorView<bool>::make(interp, id);
+ const auto obtained = nnfw::tflite::TensorView<bool>::make(nnapi, id);
+
+ return compareSingleTensorView(expected, obtained, id);
+ };
+
+ for (const auto &id : interp.outputs())
+ {
+ assert(interp.tensor(id)->type == nnapi.tensor(id)->type);
+
+ auto it = comparators.find(interp.tensor(id)->type);
+
+ if (it == comparators.end())
+ {
+ throw std::runtime_error{"Not supported output type"};
+ }
+
+ const auto &comparator = it->second;
+
+ if (!comparator(id, interp, nnapi))
+ {
+ all_matched = false;
+ }
+ }
+
+ return all_matched;
+}
+
+#include "misc/tensor/Object.h"
+
+using namespace std::placeholders;
+
+template <> uint8_t RandomGenerator::generate<uint8_t>(void)
+{
+ // The value of type_range is 255.
+ float type_range = static_cast<float>(std::numeric_limits<uint8_t>::max()) -
+ static_cast<float>(std::numeric_limits<uint8_t>::min());
+ // Most _dist values range from -5.0 to 5.0.
+ float min_range = -5.0f;
+ float max_range = 5.0f;
+ return static_cast<uint8_t>((_dist(_rand) - min_range) * type_range / (max_range - min_range));
+}
+
+#include "tflite/TensorLogger.h"
+//
+// Random Test Runner
+//
+int RandomTestRunner::run(const nnfw::tflite::Builder &builder)
+{
+ auto tfl_interp = builder.build();
+ auto nnapi = builder.build();
+
+ tfl_interp->UseNNAPI(false);
+
+ // Allocate Tensors
+ tfl_interp->AllocateTensors();
+ nnapi->AllocateTensors();
+
+ assert(tfl_interp->inputs() == nnapi->inputs());
+
+ using ::tflite::Interpreter;
+ using Initializer = std::function<void(int id, Interpreter *, Interpreter *)>;
+
+ std::map<TfLiteType, Initializer> initializers;
+ std::map<TfLiteType, Initializer> reseters;
+
+ // Generate singed 32-bit integer (s32) input
+ initializers[kTfLiteInt32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteInt32);
+ assert(nnapi->tensor(id)->type == kTfLiteInt32);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<int32_t>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<int32_t>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ int32_t value = 0;
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ // TODO Generate random values
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ ++value;
+ };
+ };
+
+ // Generate singed 32-bit integer (s32) input
+ reseters[kTfLiteInt32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteInt32);
+ assert(nnapi->tensor(id)->type == kTfLiteInt32);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<int32_t>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<int32_t>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ int32_t value = 0;
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ // TODO Generate random values
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ initializers[kTfLiteUInt8] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteUInt8);
+ assert(nnapi->tensor(id)->type == kTfLiteUInt8);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<uint8_t>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<uint8_t>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ auto fp = static_cast<uint8_t (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
+ const ::nnfw::misc::tensor::Index &)>(
+ &RandomGenerator::generate<uint8_t>);
+ const nnfw::misc::tensor::Object<uint8_t> data(tfl_interp_view.shape(),
+ std::bind(fp, _randgen, _1, _2));
+ assert(tfl_interp_view.shape() == data.shape());
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ const auto value = data.at(ind);
+
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ reseters[kTfLiteUInt8] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteUInt8);
+ assert(nnapi->tensor(id)->type == kTfLiteUInt8);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<uint8_t>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<uint8_t>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ auto fp = static_cast<uint8_t (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
+ const ::nnfw::misc::tensor::Index &)>(
+ &RandomGenerator::generate<uint8_t>);
+ const nnfw::misc::tensor::Object<uint8_t> data(tfl_interp_view.shape(),
+ std::bind(fp, _randgen, _1, _2));
+ assert(tfl_interp_view.shape() == data.shape());
+
+ uint8_t value = 0;
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ initializers[kTfLiteFloat32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteFloat32);
+ assert(nnapi->tensor(id)->type == kTfLiteFloat32);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<float>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<float>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ auto fp = static_cast<float (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
+ const ::nnfw::misc::tensor::Index &)>(
+ &RandomGenerator::generate<float>);
+ const nnfw::misc::tensor::Object<float> data(tfl_interp_view.shape(),
+ std::bind(fp, _randgen, _1, _2));
+
+ assert(tfl_interp_view.shape() == data.shape());
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ const auto value = data.at(ind);
+
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ reseters[kTfLiteFloat32] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteFloat32);
+ assert(nnapi->tensor(id)->type == kTfLiteFloat32);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<float>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<float>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ auto fp = static_cast<float (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
+ const ::nnfw::misc::tensor::Index &)>(
+ &RandomGenerator::generate<float>);
+ const nnfw::misc::tensor::Object<float> data(tfl_interp_view.shape(),
+ std::bind(fp, _randgen, _1, _2));
+
+ assert(tfl_interp_view.shape() == data.shape());
+
+ float value = 0;
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ initializers[kTfLiteBool] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteBool);
+ assert(nnapi->tensor(id)->type == kTfLiteBool);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<bool>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<bool>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ auto fp = static_cast<bool (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
+ const ::nnfw::misc::tensor::Index &)>(
+ &RandomGenerator::generate<bool>);
+ const nnfw::misc::tensor::Object<bool> data(tfl_interp_view.shape(),
+ std::bind(fp, _randgen, _1, _2));
+
+ assert(tfl_interp_view.shape() == data.shape());
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ const auto value = data.at(ind);
+
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ reseters[kTfLiteBool] = [&](int id, Interpreter *tfl_interp, Interpreter *nnapi) {
+ assert(tfl_interp->tensor(id)->type == kTfLiteBool);
+ assert(nnapi->tensor(id)->type == kTfLiteBool);
+
+ auto tfl_interp_view = nnfw::tflite::TensorView<bool>::make(*tfl_interp, id);
+ auto nnapi_view = nnfw::tflite::TensorView<bool>::make(*nnapi, id);
+
+ assert(tfl_interp_view.shape() == nnapi_view.shape());
+
+ auto fp = static_cast<bool (RandomGenerator::*)(const ::nnfw::misc::tensor::Shape &,
+ const ::nnfw::misc::tensor::Index &)>(
+ &RandomGenerator::generate<bool>);
+ const nnfw::misc::tensor::Object<bool> data(tfl_interp_view.shape(),
+ std::bind(fp, _randgen, _1, _2));
+
+ assert(tfl_interp_view.shape() == data.shape());
+
+ bool value = false;
+
+ nnfw::misc::tensor::iterate(tfl_interp_view.shape())
+ << [&](const nnfw::misc::tensor::Index &ind) {
+ tfl_interp_view.at(ind) = value;
+ nnapi_view.at(ind) = value;
+ };
+ };
+
+ // Fill IFM with random numbers
+ for (const auto id : tfl_interp->inputs())
+ {
+ assert(tfl_interp->tensor(id)->type == nnapi->tensor(id)->type);
+
+ auto it = initializers.find(tfl_interp->tensor(id)->type);
+
+ if (it == initializers.end())
+ {
+ throw std::runtime_error{"Not supported input type"};
+ }
+
+ it->second(id, tfl_interp.get(), nnapi.get());
+ }
+
+ // Fill OFM with 0
+ for (const auto id : tfl_interp->outputs())
+ {
+ assert(tfl_interp->tensor(id)->type == nnapi->tensor(id)->type);
+
+ auto it = reseters.find(tfl_interp->tensor(id)->type);
+
+ if (it == reseters.end())
+ {
+ throw std::runtime_error{"Not supported input type"};
+ }
+
+ it->second(id, tfl_interp.get(), nnapi.get());
+ }
+
+ std::cout << "[NNAPI TEST] Run T/F Lite Interpreter without NNAPI" << std::endl;
+ tfl_interp->Invoke();
+
+ std::cout << "[NNAPI TEST] Run T/F Lite Interpreter with NNAPI" << std::endl;
+
+ char *env = getenv("UPSTREAM_DELEGATE");
+
+ if (env && !std::string(env).compare("1"))
+ {
+ nnapi->UseNNAPI(true);
+ nnapi->Invoke();
+ }
+ else
+ {
+ nnfw::tflite::NNAPIDelegate d;
+
+ if (d.BuildGraph(nnapi.get()))
+ {
+ throw std::runtime_error{"Failed to BuildGraph"};
+ }
+
+ if (d.Invoke(nnapi.get()))
+ {
+ throw std::runtime_error{"Failed to BuildGraph"};
+ }
+ }
+
+ // Compare OFM
+ std::cout << "[NNAPI TEST] Compare the result" << std::endl;
+
+ const auto tolerance = _param.tolerance;
+
+ auto equals = [tolerance](float lhs, float rhs) {
+ // NOTE Hybrid approach
+ // TODO Allow users to set tolerance for absolute_epsilon_equal
+ if (nnfw::misc::fp32::absolute_epsilon_equal(lhs, rhs))
+ {
+ return true;
+ }
+
+ return nnfw::misc::fp32::epsilon_equal(lhs, rhs, tolerance);
+ };
+
+ nnfw::misc::tensor::Comparator comparator(equals);
+ TfLiteInterpMatchApp app(comparator);
+
+ app.verbose() = _param.verbose;
+
+ bool res = app.run(*tfl_interp, *nnapi);
+
+ if (!res)
+ {
+ return 255;
+ }
+
+ std::cout << "[NNAPI TEST] PASSED" << std::endl;
+
+ if (_param.tensor_logging)
+ nnfw::tflite::TensorLogger::instance().save(_param.log_path, *tfl_interp);
+
+ return 0;
+}
+
+RandomTestRunner RandomTestRunner::make(int seed)
+{
+ RandomTestParam param;
+
+ param.verbose = nnfw::misc::EnvVar("VERBOSE").asInt(0);
+ param.tolerance = nnfw::misc::EnvVar("TOLERANCE").asInt(1);
+ ;
+
+ return RandomTestRunner{seed, param};
+}
diff --git a/libs/tflite/src/FeatureView.cpp b/runtimes/libs/tflite/src/FeatureView.cpp
index fdf5a4b00..fdf5a4b00 100644
--- a/libs/tflite/src/FeatureView.cpp
+++ b/runtimes/libs/tflite/src/FeatureView.cpp
diff --git a/libs/tflite/src/Quantization.cpp b/runtimes/libs/tflite/src/Quantization.cpp
index 9c162c342..9c162c342 100644
--- a/libs/tflite/src/Quantization.cpp
+++ b/runtimes/libs/tflite/src/Quantization.cpp
diff --git a/runtimes/libs/tflite/src/TensorShapeUtils.cpp b/runtimes/libs/tflite/src/TensorShapeUtils.cpp
new file mode 100644
index 000000000..29628cd26
--- /dev/null
+++ b/runtimes/libs/tflite/src/TensorShapeUtils.cpp
@@ -0,0 +1,29 @@
+#include "tflite/TensorShapeUtils.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+nnfw::misc::tensor::Shape broadcast(const nnfw::misc::tensor::Shape &lhs_shape,
+ const nnfw::misc::tensor::Shape &rhs_shape)
+{
+ const uint32_t lhs_rank = lhs_shape.rank();
+ const uint32_t rhs_rank = rhs_shape.rank();
+ const uint32_t out_rank = std::max(lhs_rank, rhs_rank);
+ const uint32_t lhs_rank_diff = out_rank - lhs_rank;
+ const uint32_t rhs_rank_diff = out_rank - rhs_rank;
+
+ nnfw::misc::tensor::Shape out_shape(out_rank);
+
+ for (uint32_t axis = 0; axis < out_rank; ++axis)
+ {
+ out_shape.dim(axis) = std::max(axis < lhs_rank_diff ? 1 : lhs_shape.dim(axis - lhs_rank_diff),
+ axis < rhs_rank_diff ? 1 : rhs_shape.dim(axis - rhs_rank_diff));
+ }
+
+ return out_shape;
+}
+
+} // namespace tflite
+} // namespace nnfw
diff --git a/libs/tflite/src/TensorView.test.cpp b/runtimes/libs/tflite/src/TensorView.test.cpp
index c710b3c33..c710b3c33 100644
--- a/libs/tflite/src/TensorView.test.cpp
+++ b/runtimes/libs/tflite/src/TensorView.test.cpp
diff --git a/runtimes/libs/tflite/src/ext/kernels/Abs.cpp b/runtimes/libs/tflite/src/ext/kernels/Abs.cpp
new file mode 100644
index 000000000..61181465d
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/kernels/Abs.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/ext/kernels/Abs.h"
+#include "tensorflow/lite/kernels/kernel_util.h"
+
+#include <iostream>
+#include <cmath>
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace Abs
+{
+
+void *InitAbs(TfLiteContext *, const char *, size_t) { return nullptr; }
+
+void FreeAbs(TfLiteContext *, void *) {}
+
+TfLiteStatus PrepareAbs(TfLiteContext *context, TfLiteNode *node)
+{
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 1);
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
+
+ const TfLiteTensor *input = ::tflite::GetInput(context, node, 0);
+ TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
+
+ TF_LITE_ENSURE_EQ(context, input->type, output->type);
+
+ return context->ResizeTensor(context, output, TfLiteIntArrayCopy(input->dims));
+}
+
+TfLiteStatus EvalAbs(TfLiteContext *context, TfLiteNode *node)
+{
+ const TfLiteTensor *input = ::tflite::GetInput(context, node, 0);
+ TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
+ size_t elements = ::tflite::NumElements(input);
+ switch (input->type)
+ {
+ case kTfLiteFloat32:
+ {
+ auto *in = input->data.f;
+ auto *in_end = in + elements;
+ auto *out = output->data.f;
+ for (; in < in_end; in++, out++)
+ *out = std::abs(*in);
+ return kTfLiteOk;
+ }
+ case kTfLiteInt32:
+ {
+ auto *in = input->data.i32;
+ auto *in_end = in + elements;
+ auto *out = output->data.i32;
+ for (; in < in_end; in++, out++)
+ *out = std::abs(*in);
+ return kTfLiteOk;
+ }
+ case kTfLiteInt64:
+ {
+ auto *in = input->data.i64;
+ auto *in_end = in + elements;
+ auto *out = output->data.i64;
+ for (; in < in_end; in++, out++)
+ *out = std::abs(*in);
+ return kTfLiteOk;
+ }
+ case kTfLiteUInt8:
+ {
+ auto *in = input->data.uint8;
+ auto *in_end = in + elements;
+ auto *out = output->data.uint8;
+ for (; in < in_end; in++, out++)
+ *out = *in;
+ return kTfLiteOk;
+ }
+ default:
+ {
+ context->ReportError(context, "Input type %d is not supported", input->type);
+ return kTfLiteError;
+ }
+ }
+}
+
+} // namespace Abs
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/tflite/src/ext/kernels/SquaredDifference.cpp b/runtimes/libs/tflite/src/ext/kernels/SquaredDifference.cpp
new file mode 100644
index 000000000..615878513
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/kernels/SquaredDifference.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/ext/kernels/SquaredDifference.h"
+#include "tensorflow/lite/kernels/kernel_util.h"
+
+#include <iostream>
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace SquaredDifference
+{
+
+void *InitSquaredDifference(TfLiteContext *, const char *, size_t) { return nullptr; }
+
+void FreeSquaredDifference(TfLiteContext *, void *) {}
+
+TfLiteStatus PrepareSquaredDifference(TfLiteContext *context, TfLiteNode *node)
+{
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 2);
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
+
+ const TfLiteTensor *input1 = ::tflite::GetInput(context, node, 0);
+ const TfLiteTensor *input2 = ::tflite::GetInput(context, node, 1);
+ TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
+
+ TF_LITE_ENSURE_EQ(context, input1->type, input2->type);
+ TF_LITE_ENSURE_EQ(context, input1->type, output->type);
+
+ return context->ResizeTensor(context, output, TfLiteIntArrayCopy(input1->dims));
+}
+
+TfLiteStatus EvalSquaredDifference(TfLiteContext *context, TfLiteNode *node)
+{
+
+ const TfLiteTensor *input1 = ::tflite::GetInput(context, node, 0);
+ const TfLiteTensor *input2 = ::tflite::GetInput(context, node, 1);
+
+ TfLiteTensor *output = ::tflite::GetOutput(context, node, 0);
+
+ size_t elements = ::tflite::NumElements(input1);
+
+ switch (input1->type)
+ {
+ case kTfLiteFloat32:
+ {
+ const float *in1 = input1->data.f;
+ const float *in2 = input2->data.f;
+ const float *in_end1 = in1 + elements;
+ float *out = output->data.f;
+
+ for (; in1 < in_end1; in1++, in2++, out++)
+ *out = ((*in1 - *in2) * (*in1 - *in2));
+
+ return kTfLiteOk;
+ }
+ case kTfLiteInt32:
+ {
+ const int *in1 = input1->data.i32;
+ const int *in2 = input2->data.i32;
+ const int *in_end1 = in1 + elements;
+ int *out = output->data.i32;
+
+ for (; in1 < in_end1; in1++, in2++, out++)
+ *out = ((*in1 - *in2) * (*in1 - *in2));
+
+ return kTfLiteOk;
+ }
+ case kTfLiteInt64:
+ {
+ const int64_t *in1 = input1->data.i64;
+ const int64_t *in2 = input1->data.i64;
+ const int64_t *in_end1 = in1 + elements;
+ int64_t *out = output->data.i64;
+
+ for (; in1 < in_end1; in1++, in2++, out++)
+ *out = ((*in1 - *in2) * (*in1 - *in2));
+
+ return kTfLiteOk;
+ }
+ default:
+ {
+ context->ReportError(context, "InputType is %d Unsupported", input1->type);
+ return kTfLiteError;
+ }
+ }
+}
+
+} // namespace SquaredDifference
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/tflite/src/ext/kernels/TensorFlowMax.cpp b/runtimes/libs/tflite/src/ext/kernels/TensorFlowMax.cpp
new file mode 100644
index 000000000..207de98f5
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/kernels/TensorFlowMax.cpp
@@ -0,0 +1,405 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/ext/kernels/TensorFlowMax.h"
+#include "tensorflow/lite/kernels/kernel_util.h"
+
+#include <iostream>
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace TensorFlowMax
+{
+
+struct TensorFlowMaxOp
+{
+ TensorFlowMaxOp(TfLiteContext *context, TfLiteNode *node)
+ {
+ input = ::tflite::GetInput(context, node, 0);
+ axis = ::tflite::GetInput(context, node, 1);
+ output = ::tflite::GetOutput(context, node, 0);
+ }
+ const TfLiteTensor *input;
+ const TfLiteTensor *axis;
+ TfLiteTensor *output;
+};
+
+void *InitTensorFlowMax(TfLiteContext *context, const char *, size_t)
+{
+ // Creates two temp tensors to store index and axis for internal
+ // implementation only.
+ auto *scratch_tensor_index = new int;
+ context->AddTensors(context, 2, scratch_tensor_index);
+ return scratch_tensor_index;
+}
+
+void FreeTensorFlowMax(TfLiteContext *, void *buffer)
+{
+ delete static_cast<TensorFlowMaxOp *>(buffer);
+}
+
+// Resizes the temp tensor that stores resolved axis.
+TfLiteStatus ResizeTempAxis(TfLiteContext *context, TensorFlowMaxOp *op_context,
+ TfLiteTensor *resolved_axis)
+{
+ TfLiteIntArray *axis_size = TfLiteIntArrayCreate(1);
+ axis_size->data[0] = static_cast<int>(::tflite::NumElements(op_context->axis));
+ return context->ResizeTensor(context, resolved_axis, axis_size);
+}
+
+// Resizes output array based on the input size and resolved axis.
+TfLiteStatus ResizeOutputTensor(TfLiteContext *context, TensorFlowMaxOp *op_context)
+{
+ int64_t num_axis = ::tflite::NumElements(op_context->axis);
+ TfLiteIntArray *input_dims = op_context->input->dims;
+ int input_num_dims = ::tflite::NumDimensions(op_context->input);
+ const int *axis = op_context->axis->data.i32;
+
+ {
+ // Calculates size of reducing axis.
+ int64_t num_reduce_axis = num_axis;
+ for (int64_t i = 0; i < num_axis; ++i)
+ {
+ int current = axis[i];
+ if (current < 0)
+ {
+ current += input_num_dims;
+ }
+ TF_LITE_ENSURE(context, current >= 0 && current < input_num_dims);
+ for (int64_t j = 0; j < i; ++j)
+ {
+ int previous = axis[j];
+ if (previous < 0)
+ {
+ previous += input_num_dims;
+ }
+ if (current == previous)
+ {
+ --num_reduce_axis;
+ break;
+ }
+ }
+ }
+ // Determines output dimensions.
+ int output_num_dims = ::tflite::NumDimensions(op_context->output);
+ TF_LITE_ENSURE(context, (input_num_dims == output_num_dims) ||
+ (input_num_dims - num_reduce_axis == output_num_dims));
+
+ if (input_num_dims == output_num_dims)
+ {
+ TfLiteIntArray *output_dims = TfLiteIntArrayCopy(input_dims);
+ for (int64_t axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ int current = axis[axis_idx];
+ output_dims->data[current] = 1;
+ }
+ return context->ResizeTensor(context, op_context->output, output_dims);
+ }
+ else
+ {
+ TfLiteIntArray *output_dims = TfLiteIntArrayCreate(output_num_dims);
+ int num_skip_axis = 0;
+ for (int idx = 0; idx < input_num_dims; ++idx)
+ {
+ bool is_axis = false;
+ for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ if (axis[axis_idx] == idx || axis[axis_idx] + input_num_dims == idx)
+ {
+ ++num_skip_axis;
+ is_axis = true;
+ break;
+ }
+ }
+ if (!is_axis)
+ {
+ output_dims->data[idx - num_skip_axis] = input_dims->data[idx];
+ }
+ }
+ return context->ResizeTensor(context, op_context->output, output_dims);
+ }
+ }
+}
+
+// Initializes temp tensors to store index and resolved axis.
+TfLiteStatus InitializeTemporaries(TfLiteContext *context, TfLiteNode *node,
+ TensorFlowMaxOp *op_context)
+{
+ // Creates a temp index to iterate through input data.
+ int *scratch_tensor_index = reinterpret_cast<int *>(node->user_data);
+ TfLiteIntArrayFree(node->temporaries);
+ node->temporaries = TfLiteIntArrayCreate(2);
+ node->temporaries->data[0] = *scratch_tensor_index;
+ TfLiteTensor *scratch_tensor = &context->tensors[node->temporaries->data[0]];
+ scratch_tensor->type = kTfLiteInt32;
+ scratch_tensor->allocation_type = kTfLiteArenaRw;
+ TfLiteIntArray *index_size = TfLiteIntArrayCreate(1);
+ index_size->data[0] = ::tflite::NumDimensions(op_context->input);
+ TF_LITE_ENSURE_OK(context, context->ResizeTensor(context, scratch_tensor, index_size));
+
+ // Creates a temp tensor to store resolved axis given input data.
+ node->temporaries->data[1] = *scratch_tensor_index + 1;
+ TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
+ resolved_axis->type = kTfLiteInt32;
+ return kTfLiteOk;
+}
+
+TfLiteStatus PrepareTensorFlowMax(TfLiteContext *context, TfLiteNode *node)
+{
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 2);
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
+
+ TensorFlowMaxOp op_context(context, node);
+ TF_LITE_ENSURE_OK(context, InitializeTemporaries(context, node, &op_context));
+
+ TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
+ // Leaves work to Eval if axis is not constant; else resizes output.
+ if (!::tflite::IsConstantTensor(op_context.axis))
+ {
+ ::tflite::SetTensorToDynamic(op_context.output);
+ ::tflite::SetTensorToDynamic(resolved_axis);
+ return kTfLiteOk;
+ }
+ resolved_axis->allocation_type = kTfLiteArenaRw;
+ TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
+ return ResizeOutputTensor(context, &op_context);
+}
+
+// Gets offset of index if expanded on axis. When expanded, the flattened offset
+// will not change, if the output index changes on the given axis. For example,
+// if you have a 2D tensor and you are expanding to 3D on axis 0,
+// then index (0, 1, 2) and index (1, 1, 2) will map from the same flattened
+// offset.
+inline size_t ExpandedInputOffset(const int num_dims, const int *dims, const int *index,
+ const int num_axis, const int *axis)
+{
+ size_t offset = 0;
+ int out_idx = 0;
+ for (int in_idx = 0; in_idx < num_dims; ++in_idx)
+ {
+ // if we need to expand this axis
+ bool is_axis = false;
+ if (axis != nullptr)
+ {
+ for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ if (in_idx == axis[axis_idx])
+ {
+ is_axis = true;
+ break;
+ }
+ }
+ }
+ if (!is_axis)
+ {
+ offset = offset * static_cast<size_t>(dims[in_idx]) + static_cast<size_t>(index[out_idx]);
+ out_idx++;
+ }
+ else
+ {
+ offset = offset * static_cast<size_t>(dims[in_idx]);
+ }
+ }
+ return offset;
+}
+
+// Gets offset of index if reducing on axis. When reducing, the flattened offset
+// will not change, if the input index changes on the given axis. For example,
+// if you have a 3D tensor and you are reducing to 2D by eliminating axis 0,
+// then index (0, 1, 2) and index (1, 1, 2) will map to the same flattened
+// offset.
+// TODO(kanlig): uses Dims to represent dimensions.
+inline size_t ReducedOutputOffset(const int num_dims, const int *dims, const int *index,
+ const int num_axis, const int *axis)
+{
+ size_t offset = 0;
+ for (int idx = 0; idx < num_dims; ++idx)
+ {
+ // if we need to skip this axis
+ bool is_axis = false;
+ if (axis != nullptr)
+ {
+ for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ if (idx == axis[axis_idx])
+ {
+ is_axis = true;
+ break;
+ }
+ }
+ }
+ if (!is_axis)
+ {
+ offset = offset * static_cast<size_t>(dims[idx]) + static_cast<size_t>(index[idx]);
+ }
+ }
+ return offset;
+}
+
+// Gets next index to iterate through a multidimensional array.
+inline bool NextIndex(TfLiteContext *context, const int num_dims, const int *dims, int *current)
+{
+ int carry = 1;
+ for (int idx = num_dims - 1; idx >= 0; --idx)
+ {
+ int current_val = current[idx] + carry;
+ TF_LITE_ENSURE(context, (dims[idx] >= current_val));
+ if (dims[idx] == current_val)
+ {
+ current[idx] = 0;
+ }
+ else
+ {
+ current[idx] = current_val;
+ carry = 0;
+ break;
+ }
+ }
+ return (carry == 0);
+}
+
+template <typename T>
+inline TfLiteStatus
+CustomMax(TfLiteContext *context, T *input_data, const int *input_dims, const int input_num_dims,
+ T *output_data, const int *output_dims, const int output_num_dims, const int *axis,
+ const int num_axis_dimensions, bool /*keep_dims*/, int *temp_index, int *resolved_axis)
+{
+ // resolves axis.
+ int num_resolved_axis = 0;
+ for (int idx = 0; idx < num_axis_dimensions; ++idx)
+ {
+ int current = axis[idx];
+ TF_LITE_ENSURE(context, (current < input_num_dims && current + input_num_dims >= 0));
+ if (current < 0)
+ {
+ current += input_num_dims;
+ }
+ bool is_dup = false;
+ for (int j = 0; j < num_resolved_axis; ++j)
+ {
+ if (resolved_axis[j] == current)
+ {
+ is_dup = true;
+ break;
+ }
+ }
+ if (!is_dup)
+ {
+ resolved_axis[num_resolved_axis++] = current;
+ }
+ }
+
+ TF_LITE_ENSURE(context, (input_num_dims > 0));
+ TF_LITE_ENSURE(context, (input_dims != nullptr));
+ TF_LITE_ENSURE(context, (temp_index != nullptr));
+
+ // resets output data.
+ for (int idx = 0; idx < output_num_dims; ++idx)
+ {
+ temp_index[idx] = 0;
+ }
+ for (bool has_next = true; has_next;
+ has_next = NextIndex(context, output_num_dims, output_dims, temp_index))
+ {
+ size_t output_offset =
+ ReducedOutputOffset(output_num_dims, output_dims, temp_index, 0, nullptr);
+ size_t input_offset = ExpandedInputOffset(input_num_dims, input_dims, temp_index,
+ num_resolved_axis, resolved_axis);
+ output_data[output_offset] = input_data[input_offset];
+ }
+
+ // resets temp index.
+ for (int idx = 0; idx < input_num_dims; ++idx)
+ {
+ temp_index[idx] = 0;
+ }
+
+ // iterates through input_data.
+ for (bool has_next = true; has_next;
+ has_next = NextIndex(context, input_num_dims, input_dims, temp_index))
+ {
+ size_t input_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index, 0, nullptr);
+ size_t output_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index,
+ num_resolved_axis, resolved_axis);
+ if (output_data[output_offset] < input_data[input_offset])
+ {
+ output_data[output_offset] = input_data[input_offset];
+ }
+ }
+
+ return kTfLiteOk;
+}
+
+TfLiteStatus EvalTensorFlowMax(TfLiteContext *context, TfLiteNode *node)
+{
+
+ TensorFlowMaxOp op_context(context, node);
+ int num_axis = static_cast<int>(::tflite::NumElements(op_context.axis));
+ TfLiteTensor *temp_index = &context->tensors[node->temporaries->data[0]];
+ TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
+ // Resize the output tensor if the output tensor is dynamic.
+ if (::tflite::IsDynamicTensor(op_context.output))
+ {
+ TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
+ TF_LITE_ENSURE_OK(context, ResizeOutputTensor(context, &op_context));
+ }
+
+ TfLiteStatus returnStatus = kTfLiteOk;
+ switch (op_context.input->type)
+ {
+ case kTfLiteFloat32:
+ returnStatus = CustomMax<float>(
+ context, op_context.input->data.f, op_context.input->dims->data,
+ op_context.input->dims->size, op_context.output->data.f, op_context.output->dims->data,
+ op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
+ temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ case kTfLiteInt32:
+ returnStatus = CustomMax<int>(context, op_context.input->data.i32,
+ op_context.input->dims->data, op_context.input->dims->size,
+ op_context.output->data.i32, op_context.output->dims->data,
+ op_context.output->dims->size, op_context.axis->data.i32,
+ num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ case kTfLiteUInt8:
+ returnStatus = CustomMax<uint8_t>(
+ context, op_context.input->data.uint8, op_context.input->dims->data,
+ op_context.input->dims->size, op_context.output->data.uint8,
+ op_context.output->dims->data, op_context.output->dims->size, op_context.axis->data.i32,
+ num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ case kTfLiteInt64:
+ returnStatus = CustomMax<int64_t>(
+ context, op_context.input->data.i64, op_context.input->dims->data,
+ op_context.input->dims->size, op_context.output->data.i64, op_context.output->dims->data,
+ op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
+ temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ default:
+ returnStatus = kTfLiteError;
+ }
+
+ return returnStatus;
+}
+
+} // namespace TensorFlowMax
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/tflite/src/ext/kernels/TensorFlowSum.cpp b/runtimes/libs/tflite/src/ext/kernels/TensorFlowSum.cpp
new file mode 100644
index 000000000..40f266baa
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/kernels/TensorFlowSum.cpp
@@ -0,0 +1,400 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/ext/kernels/TensorFlowSum.h"
+#include "tensorflow/lite/kernels/kernel_util.h"
+
+#include <iostream>
+
+namespace nnfw
+{
+namespace tflite
+{
+namespace custom
+{
+namespace TensorFlowSum
+{
+
+struct TensorFlowSumOp
+{
+ TensorFlowSumOp(TfLiteContext *context, TfLiteNode *node)
+ {
+ input = ::tflite::GetInput(context, node, 0);
+ axis = ::tflite::GetInput(context, node, 1);
+ output = ::tflite::GetOutput(context, node, 0);
+ }
+ const TfLiteTensor *input;
+ const TfLiteTensor *axis;
+ TfLiteTensor *output;
+};
+
+void *InitTensorFlowSum(TfLiteContext *context, const char *, size_t)
+{
+ // Creates two temp tensors to store index and axis for internal
+ // implementation only.
+ auto *scratch_tensor_index = new int;
+ context->AddTensors(context, 2, scratch_tensor_index);
+ return scratch_tensor_index;
+}
+
+void FreeTensorFlowSum(TfLiteContext *, void *buffer)
+{
+ delete static_cast<TensorFlowSumOp *>(buffer);
+}
+
+// Resizes the temp tensor that stores resolved axis.
+TfLiteStatus ResizeTempAxis(TfLiteContext *context, TensorFlowSumOp *op_context,
+ TfLiteTensor *resolved_axis)
+{
+ TfLiteIntArray *axis_size = TfLiteIntArrayCreate(1);
+ axis_size->data[0] = static_cast<int>(::tflite::NumElements(op_context->axis));
+ return context->ResizeTensor(context, resolved_axis, axis_size);
+}
+
+// Resizes output array based on the input size and resolved axis.
+TfLiteStatus ResizeOutputTensor(TfLiteContext *context, TensorFlowSumOp *op_context)
+{
+ int64_t num_axis = ::tflite::NumElements(op_context->axis);
+ TfLiteIntArray *input_dims = op_context->input->dims;
+ int input_num_dims = ::tflite::NumDimensions(op_context->input);
+ const int *axis = op_context->axis->data.i32;
+
+ {
+ // Calculates size of reducing axis.
+ int64_t num_reduce_axis = num_axis;
+ for (int64_t i = 0; i < num_axis; ++i)
+ {
+ int current = axis[i];
+ if (current < 0)
+ {
+ current += input_num_dims;
+ }
+ TF_LITE_ENSURE(context, current >= 0 && current < input_num_dims);
+ for (int64_t j = 0; j < i; ++j)
+ {
+ int previous = axis[j];
+ if (previous < 0)
+ {
+ previous += input_num_dims;
+ }
+ if (current == previous)
+ {
+ --num_reduce_axis;
+ break;
+ }
+ }
+ }
+ // Determines output dimensions.
+ int output_num_dims = ::tflite::NumDimensions(op_context->output);
+ TF_LITE_ENSURE(context, (input_num_dims == output_num_dims) ||
+ (input_num_dims - num_reduce_axis == output_num_dims));
+
+ if (input_num_dims == output_num_dims)
+ {
+ TfLiteIntArray *output_dims = TfLiteIntArrayCopy(input_dims);
+ for (int64_t axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ int current = axis[axis_idx];
+ output_dims->data[current] = 1;
+ }
+ return context->ResizeTensor(context, op_context->output, output_dims);
+ }
+ else
+ {
+ TfLiteIntArray *output_dims = TfLiteIntArrayCreate(output_num_dims);
+ int num_skip_axis = 0;
+ for (int idx = 0; idx < input_num_dims; ++idx)
+ {
+ bool is_axis = false;
+ for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ if (axis[axis_idx] == idx || axis[axis_idx] + input_num_dims == idx)
+ {
+ ++num_skip_axis;
+ is_axis = true;
+ break;
+ }
+ }
+ if (!is_axis)
+ {
+ output_dims->data[idx - num_skip_axis] = input_dims->data[idx];
+ }
+ }
+ return context->ResizeTensor(context, op_context->output, output_dims);
+ }
+ }
+}
+
+// Initializes temp tensors to store index and resolved axis.
+TfLiteStatus InitializeTemporaries(TfLiteContext *context, TfLiteNode *node,
+ TensorFlowSumOp *op_context)
+{
+ // Creates a temp index to iterate through input data.
+ int *scratch_tensor_index = reinterpret_cast<int *>(node->user_data);
+ TfLiteIntArrayFree(node->temporaries);
+ node->temporaries = TfLiteIntArrayCreate(2);
+ node->temporaries->data[0] = *scratch_tensor_index;
+ TfLiteTensor *scratch_tensor = &context->tensors[node->temporaries->data[0]];
+ scratch_tensor->type = kTfLiteInt32;
+ scratch_tensor->allocation_type = kTfLiteArenaRw;
+ TfLiteIntArray *index_size = TfLiteIntArrayCreate(1);
+ index_size->data[0] = ::tflite::NumDimensions(op_context->input);
+ TF_LITE_ENSURE_OK(context, context->ResizeTensor(context, scratch_tensor, index_size));
+
+ // Creates a temp tensor to store resolved axis given input data.
+ node->temporaries->data[1] = *scratch_tensor_index + 1;
+ TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
+ resolved_axis->type = kTfLiteInt32;
+ return kTfLiteOk;
+}
+
+TfLiteStatus PrepareTensorFlowSum(TfLiteContext *context, TfLiteNode *node)
+{
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumInputs(node), 2);
+ TF_LITE_ENSURE_EQ(context, ::tflite::NumOutputs(node), 1);
+
+ TensorFlowSumOp op_context(context, node);
+ TF_LITE_ENSURE_OK(context, InitializeTemporaries(context, node, &op_context));
+
+ TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
+ // Leaves work to Eval if axis is not constant; else resizes output.
+ if (!::tflite::IsConstantTensor(op_context.axis))
+ {
+ ::tflite::SetTensorToDynamic(op_context.output);
+ ::tflite::SetTensorToDynamic(resolved_axis);
+ return kTfLiteOk;
+ }
+ resolved_axis->allocation_type = kTfLiteArenaRw;
+ TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
+ return ResizeOutputTensor(context, &op_context);
+}
+
+// Gets offset of index if expanded on axis. When expanded, the flattened offset
+// will not change, if the output index changes on the given axis. For example,
+// if you have a 2D tensor and you are expanding to 3D on axis 0,
+// then index (0, 1, 2) and index (1, 1, 2) will map from the same flattened
+// offset.
+inline size_t ExpandedInputOffset(const int num_dims, const int *dims, const int *index,
+ const int num_axis, const int *axis)
+{
+ size_t offset = 0;
+ int out_idx = 0;
+ for (int in_idx = 0; in_idx < num_dims; ++in_idx)
+ {
+ // if we need to expand this axis
+ bool is_axis = false;
+ if (axis != nullptr)
+ {
+ for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ if (in_idx == axis[axis_idx])
+ {
+ is_axis = true;
+ break;
+ }
+ }
+ }
+ if (!is_axis)
+ {
+ offset = offset * static_cast<size_t>(dims[in_idx]) + static_cast<size_t>(index[out_idx]);
+ out_idx++;
+ }
+ else
+ {
+ offset = offset * static_cast<size_t>(dims[in_idx]);
+ }
+ }
+ return offset;
+}
+
+// Gets offset of index if reducing on axis. When reducing, the flattened offset
+// will not change, if the input index changes on the given axis. For example,
+// if you have a 3D tensor and you are reducing to 2D by eliminating axis 0,
+// then index (0, 1, 2) and index (1, 1, 2) will map to the same flattened
+// offset.
+// TODO(kanlig): uses Dims to represent dimensions.
+inline size_t ReducedOutputOffset(const int num_dims, const int *dims, const int *index,
+ const int num_axis, const int *axis)
+{
+ size_t offset = 0;
+ for (int idx = 0; idx < num_dims; ++idx)
+ {
+ // if we need to skip this axis
+ bool is_axis = false;
+ if (axis != nullptr)
+ {
+ for (int axis_idx = 0; axis_idx < num_axis; ++axis_idx)
+ {
+ if (idx == axis[axis_idx])
+ {
+ is_axis = true;
+ break;
+ }
+ }
+ }
+ if (!is_axis)
+ {
+ offset = offset * static_cast<size_t>(dims[idx]) + static_cast<size_t>(index[idx]);
+ }
+ }
+ return offset;
+}
+
+// Gets next index to iterate through a multidimensional array.
+inline bool NextIndex(TfLiteContext *context, const int num_dims, const int *dims, int *current)
+{
+ int carry = 1;
+ for (int idx = num_dims - 1; idx >= 0; --idx)
+ {
+ int current_val = current[idx] + carry;
+ TF_LITE_ENSURE(context, (dims[idx] >= current_val));
+ if (dims[idx] == current_val)
+ {
+ current[idx] = 0;
+ }
+ else
+ {
+ current[idx] = current_val;
+ carry = 0;
+ break;
+ }
+ }
+ return (carry == 0);
+}
+
+template <typename T>
+inline TfLiteStatus
+CustomSum(TfLiteContext *context, T *input_data, const int *input_dims, const int input_num_dims,
+ T *output_data, const int *output_dims, const int output_num_dims, const int *axis,
+ const int num_axis_dimensions, bool /*keep_dims*/, int *temp_index, int *resolved_axis)
+{
+ // resolves axis.
+ int num_resolved_axis = 0;
+ for (int idx = 0; idx < num_axis_dimensions; ++idx)
+ {
+ int current = axis[idx];
+ TF_LITE_ENSURE(context, (current < input_num_dims && current + input_num_dims >= 0));
+ if (current < 0)
+ {
+ current += input_num_dims;
+ }
+ bool is_dup = false;
+ for (int j = 0; j < num_resolved_axis; ++j)
+ {
+ if (resolved_axis[j] == current)
+ {
+ is_dup = true;
+ break;
+ }
+ }
+ if (!is_dup)
+ {
+ resolved_axis[num_resolved_axis++] = current;
+ }
+ }
+
+ TF_LITE_ENSURE(context, (input_num_dims > 0));
+ TF_LITE_ENSURE(context, (input_dims != nullptr));
+ TF_LITE_ENSURE(context, (temp_index != nullptr));
+
+ // resets output data.
+ for (int idx = 0; idx < output_num_dims; ++idx)
+ {
+ temp_index[idx] = 0;
+ }
+ for (bool has_next = true; has_next;
+ has_next = NextIndex(context, output_num_dims, output_dims, temp_index))
+ {
+ size_t output_offset =
+ ReducedOutputOffset(output_num_dims, output_dims, temp_index, 0, nullptr);
+ output_data[output_offset] = 0;
+ }
+
+ // resets temp index.
+ for (int idx = 0; idx < input_num_dims; ++idx)
+ {
+ temp_index[idx] = 0;
+ }
+
+ // iterates through input_data.
+ for (bool has_next = true; has_next;
+ has_next = NextIndex(context, input_num_dims, input_dims, temp_index))
+ {
+ size_t input_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index, 0, nullptr);
+ size_t output_offset = ReducedOutputOffset(input_num_dims, input_dims, temp_index,
+ num_resolved_axis, resolved_axis);
+ output_data[output_offset] += input_data[input_offset];
+ }
+
+ return kTfLiteOk;
+}
+
+TfLiteStatus EvalTensorFlowSum(TfLiteContext *context, TfLiteNode *node)
+{
+
+ TensorFlowSumOp op_context(context, node);
+ int num_axis = static_cast<int>(::tflite::NumElements(op_context.axis));
+ TfLiteTensor *temp_index = &context->tensors[node->temporaries->data[0]];
+ TfLiteTensor *resolved_axis = &context->tensors[node->temporaries->data[1]];
+ // Resize the output tensor if the output tensor is dynamic.
+ if (::tflite::IsDynamicTensor(op_context.output))
+ {
+ TF_LITE_ENSURE_OK(context, ResizeTempAxis(context, &op_context, resolved_axis));
+ TF_LITE_ENSURE_OK(context, ResizeOutputTensor(context, &op_context));
+ }
+
+ TfLiteStatus returnStatus = kTfLiteOk;
+ switch (op_context.input->type)
+ {
+ case kTfLiteFloat32:
+ returnStatus = CustomSum<float>(
+ context, op_context.input->data.f, op_context.input->dims->data,
+ op_context.input->dims->size, op_context.output->data.f, op_context.output->dims->data,
+ op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
+ temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ case kTfLiteInt32:
+ returnStatus = CustomSum<int>(context, op_context.input->data.i32,
+ op_context.input->dims->data, op_context.input->dims->size,
+ op_context.output->data.i32, op_context.output->dims->data,
+ op_context.output->dims->size, op_context.axis->data.i32,
+ num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ case kTfLiteUInt8:
+ returnStatus = CustomSum<uint8_t>(
+ context, op_context.input->data.uint8, op_context.input->dims->data,
+ op_context.input->dims->size, op_context.output->data.uint8,
+ op_context.output->dims->data, op_context.output->dims->size, op_context.axis->data.i32,
+ num_axis, false, temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ case kTfLiteInt64:
+ returnStatus = CustomSum<int64_t>(
+ context, op_context.input->data.i64, op_context.input->dims->data,
+ op_context.input->dims->size, op_context.output->data.i64, op_context.output->dims->data,
+ op_context.output->dims->size, op_context.axis->data.i32, num_axis, false,
+ temp_index->data.i32, resolved_axis->data.i32);
+ break;
+ default:
+ returnStatus = kTfLiteError;
+ }
+
+ return returnStatus;
+}
+
+} // namespace TensorFlowSum
+} // namespace custom
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/tflite/src/ext/kernels/register.cpp b/runtimes/libs/tflite/src/ext/kernels/register.cpp
new file mode 100644
index 000000000..a99899a80
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/kernels/register.cpp
@@ -0,0 +1,247 @@
+/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This code is derived from the following file (in TensorFlow)
+// 'externals/tensorflow/tensorflow/lite/kernels/register.cc'
+#include "tflite/ext/kernels/register.h"
+#include "tflite/ext/kernels/CustomOps.h"
+
+namespace tflite {
+namespace ops {
+namespace custom {
+
+TfLiteRegistration* Register_DETECTION_POSTPROCESS();
+
+} // namespace custom
+}
+}
+
+namespace tflite {
+namespace ops {
+namespace builtin {
+
+TfLiteRegistration *Register_RELU();
+TfLiteRegistration *Register_RELU_N1_TO_1();
+TfLiteRegistration *Register_RELU6();
+TfLiteRegistration *Register_TANH();
+TfLiteRegistration *Register_LOGISTIC();
+TfLiteRegistration *Register_AVERAGE_POOL_2D();
+TfLiteRegistration *Register_MAX_POOL_2D();
+TfLiteRegistration *Register_L2_POOL_2D();
+TfLiteRegistration *Register_CONV_2D();
+TfLiteRegistration *Register_DEPTHWISE_CONV_2D();
+TfLiteRegistration *Register_SVDF();
+TfLiteRegistration *Register_RNN();
+TfLiteRegistration *Register_BIDIRECTIONAL_SEQUENCE_RNN();
+TfLiteRegistration *Register_UNIDIRECTIONAL_SEQUENCE_RNN();
+TfLiteRegistration *Register_EMBEDDING_LOOKUP();
+TfLiteRegistration *Register_EMBEDDING_LOOKUP_SPARSE();
+TfLiteRegistration *Register_FULLY_CONNECTED();
+TfLiteRegistration *Register_LSH_PROJECTION();
+TfLiteRegistration *Register_HASHTABLE_LOOKUP();
+TfLiteRegistration *Register_SOFTMAX();
+TfLiteRegistration *Register_CONCATENATION();
+TfLiteRegistration *Register_ADD();
+TfLiteRegistration *Register_SPACE_TO_BATCH_ND();
+TfLiteRegistration *Register_DIV();
+TfLiteRegistration *Register_SUB();
+TfLiteRegistration *Register_BATCH_TO_SPACE_ND();
+TfLiteRegistration *Register_MUL();
+TfLiteRegistration *Register_L2_NORMALIZATION();
+TfLiteRegistration *Register_LOCAL_RESPONSE_NORMALIZATION();
+TfLiteRegistration *Register_LSTM();
+TfLiteRegistration *Register_BIDIRECTIONAL_SEQUENCE_LSTM();
+TfLiteRegistration *Register_UNIDIRECTIONAL_SEQUENCE_LSTM();
+TfLiteRegistration *Register_PAD();
+TfLiteRegistration *Register_PADV2();
+TfLiteRegistration *Register_RESHAPE();
+TfLiteRegistration *Register_RESIZE_BILINEAR();
+TfLiteRegistration *Register_SKIP_GRAM();
+TfLiteRegistration *Register_SPACE_TO_DEPTH();
+TfLiteRegistration *Register_GATHER();
+TfLiteRegistration *Register_TRANSPOSE();
+TfLiteRegistration *Register_MEAN();
+TfLiteRegistration *Register_SPLIT();
+TfLiteRegistration *Register_SQUEEZE();
+TfLiteRegistration *Register_STRIDED_SLICE();
+TfLiteRegistration *Register_EXP();
+TfLiteRegistration *Register_TOPK_V2();
+TfLiteRegistration *Register_LOG_SOFTMAX();
+TfLiteRegistration *Register_CAST();
+TfLiteRegistration *Register_DEQUANTIZE();
+TfLiteRegistration *Register_PRELU();
+TfLiteRegistration *Register_MAXIMUM();
+TfLiteRegistration *Register_MINIMUM();
+TfLiteRegistration *Register_ARG_MAX();
+TfLiteRegistration *Register_GREATER();
+TfLiteRegistration *Register_GREATER_EQUAL();
+TfLiteRegistration *Register_LESS();
+TfLiteRegistration *Register_LESS_EQUAL();
+TfLiteRegistration *Register_FLOOR();
+TfLiteRegistration *Register_NEG();
+TfLiteRegistration *Register_SELECT();
+TfLiteRegistration *Register_SLICE();
+TfLiteRegistration *Register_SIN();
+TfLiteRegistration *Register_TRANSPOSE_CONV();
+TfLiteRegistration *Register_SPARSE_TO_DENSE();
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+TfLiteRegistration *Register_SUM();
+TfLiteRegistration *Register_REDUCE_MAX();
+TfLiteRegistration *Register_REDUCE_MIN();
+TfLiteRegistration *Register_EQUAL();
+TfLiteRegistration *Register_NOT_EQUAL();
+TfLiteRegistration *Register_SQRT();
+TfLiteRegistration *Register_RSQRT();
+TfLiteRegistration *Register_SHAPE();
+TfLiteRegistration *Register_POW();
+TfLiteRegistration *Register_FAKE_QUANT();
+TfLiteRegistration *Register_PACK();
+TfLiteRegistration *Register_ONE_HOT();
+TfLiteRegistration *Register_LOGICAL_OR();
+TfLiteRegistration *Register_LOGICAL_AND();
+TfLiteRegistration *Register_LOGICAL_NOT();
+TfLiteRegistration *Register_UNPACK();
+TfLiteRegistration *Register_FLOOR_DIV();
+TfLiteRegistration *Register_SQUARE();
+TfLiteRegistration *Register_ZEROS_LIKE();
+TfLiteRegistration* Register_FLOOR_MOD();
+TfLiteRegistration* Register_RANGE();
+TfLiteRegistration* Register_LEAKY_RELU();
+TfLiteRegistration* Register_SQUARED_DIFFERENCE();
+TfLiteRegistration* Register_FILL();
+TfLiteRegistration* Register_MIRROR_PAD();
+#endif // TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 12
+
+} // namespace builtin
+} // namespace ops
+} // namespace tflite
+
+namespace nnfw {
+namespace tflite {
+
+BuiltinOpResolver::BuiltinOpResolver()
+{
+ // Using namespace directive to minimize diff with upstream tensorflow
+ using namespace ::tflite::ops::custom;
+ using namespace ::tflite::ops::builtin;
+ using namespace ::tflite;
+
+ AddBuiltin(BuiltinOperator_RELU, Register_RELU());
+ AddBuiltin(BuiltinOperator_RELU_N1_TO_1, Register_RELU_N1_TO_1());
+ AddBuiltin(BuiltinOperator_RELU6, Register_RELU6());
+ AddBuiltin(BuiltinOperator_TANH, Register_TANH());
+ AddBuiltin(BuiltinOperator_LOGISTIC, Register_LOGISTIC());
+ AddBuiltin(BuiltinOperator_AVERAGE_POOL_2D, Register_AVERAGE_POOL_2D());
+ AddBuiltin(BuiltinOperator_MAX_POOL_2D, Register_MAX_POOL_2D());
+ AddBuiltin(BuiltinOperator_L2_POOL_2D, Register_L2_POOL_2D());
+ AddBuiltin(BuiltinOperator_CONV_2D, Register_CONV_2D());
+ AddBuiltin(BuiltinOperator_DEPTHWISE_CONV_2D, Register_DEPTHWISE_CONV_2D());
+ AddBuiltin(BuiltinOperator_SVDF, Register_SVDF());
+ AddBuiltin(BuiltinOperator_RNN, Register_RNN());
+ AddBuiltin(BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN, Register_BIDIRECTIONAL_SEQUENCE_RNN());
+ AddBuiltin(BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN, Register_UNIDIRECTIONAL_SEQUENCE_RNN());
+ AddBuiltin(BuiltinOperator_EMBEDDING_LOOKUP, Register_EMBEDDING_LOOKUP());
+ AddBuiltin(BuiltinOperator_EMBEDDING_LOOKUP_SPARSE, Register_EMBEDDING_LOOKUP_SPARSE());
+ AddBuiltin(BuiltinOperator_FULLY_CONNECTED, Register_FULLY_CONNECTED());
+ AddBuiltin(BuiltinOperator_LSH_PROJECTION, Register_LSH_PROJECTION());
+ AddBuiltin(BuiltinOperator_HASHTABLE_LOOKUP, Register_HASHTABLE_LOOKUP());
+ AddBuiltin(BuiltinOperator_SOFTMAX, Register_SOFTMAX());
+ AddBuiltin(BuiltinOperator_CONCATENATION, Register_CONCATENATION());
+ AddBuiltin(BuiltinOperator_ADD, Register_ADD());
+ AddBuiltin(BuiltinOperator_SPACE_TO_BATCH_ND, Register_SPACE_TO_BATCH_ND());
+ AddBuiltin(BuiltinOperator_BATCH_TO_SPACE_ND, Register_BATCH_TO_SPACE_ND());
+ AddBuiltin(BuiltinOperator_MUL, Register_MUL());
+ AddBuiltin(BuiltinOperator_L2_NORMALIZATION, Register_L2_NORMALIZATION());
+ AddBuiltin(BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION, Register_LOCAL_RESPONSE_NORMALIZATION());
+ AddBuiltin(BuiltinOperator_LSTM, Register_LSTM());
+ AddBuiltin(BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM, Register_BIDIRECTIONAL_SEQUENCE_LSTM());
+ AddBuiltin(BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM, Register_UNIDIRECTIONAL_SEQUENCE_LSTM());
+ AddBuiltin(BuiltinOperator_PAD, Register_PAD());
+ AddBuiltin(BuiltinOperator_PADV2, Register_PADV2());
+ AddBuiltin(BuiltinOperator_RESHAPE, Register_RESHAPE());
+ AddBuiltin(BuiltinOperator_RESIZE_BILINEAR, Register_RESIZE_BILINEAR());
+ AddBuiltin(BuiltinOperator_SKIP_GRAM, Register_SKIP_GRAM());
+ AddBuiltin(BuiltinOperator_SPACE_TO_DEPTH, Register_SPACE_TO_DEPTH());
+ AddBuiltin(BuiltinOperator_GATHER, Register_GATHER());
+ AddBuiltin(BuiltinOperator_TRANSPOSE, Register_TRANSPOSE());
+ AddBuiltin(BuiltinOperator_MEAN, Register_MEAN());
+ AddBuiltin(BuiltinOperator_DIV, Register_DIV());
+ AddBuiltin(BuiltinOperator_SUB, Register_SUB());
+ AddBuiltin(BuiltinOperator_SPLIT, Register_SPLIT());
+ AddBuiltin(BuiltinOperator_SQUEEZE, Register_SQUEEZE());
+ AddBuiltin(BuiltinOperator_STRIDED_SLICE, Register_STRIDED_SLICE());
+ AddBuiltin(BuiltinOperator_EXP, Register_EXP());
+ AddBuiltin(BuiltinOperator_TOPK_V2, Register_TOPK_V2());
+ AddBuiltin(BuiltinOperator_LOG_SOFTMAX, Register_LOG_SOFTMAX());
+ AddBuiltin(BuiltinOperator_CAST, Register_CAST());
+ AddBuiltin(BuiltinOperator_DEQUANTIZE, Register_DEQUANTIZE());
+ AddBuiltin(BuiltinOperator_PRELU, Register_PRELU());
+ AddBuiltin(BuiltinOperator_MAXIMUM, Register_MAXIMUM());
+ AddBuiltin(BuiltinOperator_MINIMUM, Register_MINIMUM());
+ AddBuiltin(BuiltinOperator_ARG_MAX, Register_ARG_MAX());
+ AddBuiltin(BuiltinOperator_GREATER, Register_GREATER());
+ AddBuiltin(BuiltinOperator_GREATER_EQUAL, Register_GREATER_EQUAL());
+ AddBuiltin(BuiltinOperator_LESS, Register_LESS());
+ AddBuiltin(BuiltinOperator_LESS_EQUAL, Register_LESS_EQUAL());
+ AddBuiltin(BuiltinOperator_FLOOR, Register_FLOOR());
+ AddBuiltin(BuiltinOperator_NEG, Register_NEG());
+ AddBuiltin(BuiltinOperator_SELECT, Register_SELECT());
+ AddBuiltin(BuiltinOperator_SLICE, Register_SLICE());
+ AddBuiltin(BuiltinOperator_SIN, Register_SIN());
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+ AddBuiltin(BuiltinOperator_SUM, Register_SUM());
+ AddBuiltin(BuiltinOperator_REDUCE_MAX, Register_REDUCE_MAX());
+ AddBuiltin(BuiltinOperator_REDUCE_MIN, Register_REDUCE_MIN());
+ AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, Register_TRANSPOSE_CONV());
+ AddBuiltin(BuiltinOperator_SPARSE_TO_DENSE, Register_SPARSE_TO_DENSE());
+ AddBuiltin(BuiltinOperator_EQUAL, Register_EQUAL());
+ AddBuiltin(BuiltinOperator_NOT_EQUAL, Register_NOT_EQUAL());
+ AddBuiltin(BuiltinOperator_SQRT, Register_SQRT());
+ AddBuiltin(BuiltinOperator_RSQRT, Register_RSQRT());
+ AddBuiltin(BuiltinOperator_SHAPE, Register_SHAPE());
+ AddBuiltin(BuiltinOperator_POW, Register_POW());
+ AddBuiltin(BuiltinOperator_FAKE_QUANT, Register_FAKE_QUANT(), 1, 2);
+ AddBuiltin(BuiltinOperator_PACK, Register_PACK());
+ AddBuiltin(BuiltinOperator_ONE_HOT, Register_ONE_HOT());
+ AddBuiltin(BuiltinOperator_LOGICAL_OR, Register_LOGICAL_OR());
+ AddBuiltin(BuiltinOperator_LOGICAL_AND, Register_LOGICAL_AND());
+ AddBuiltin(BuiltinOperator_LOGICAL_NOT, Register_LOGICAL_NOT());
+ AddBuiltin(BuiltinOperator_UNPACK, Register_UNPACK());
+ AddBuiltin(BuiltinOperator_FLOOR_DIV, Register_FLOOR_DIV());
+ AddBuiltin(BuiltinOperator_SQUARE, Register_SQUARE());
+ AddBuiltin(BuiltinOperator_ZEROS_LIKE, Register_ZEROS_LIKE());
+ AddBuiltin(BuiltinOperator_FLOOR_MOD, Register_FLOOR_MOD());
+ AddBuiltin(BuiltinOperator_RANGE, Register_RANGE());
+ AddBuiltin(BuiltinOperator_LEAKY_RELU, Register_LEAKY_RELU());
+ AddBuiltin(BuiltinOperator_SQUARED_DIFFERENCE, Register_SQUARED_DIFFERENCE());
+ AddBuiltin(BuiltinOperator_FILL, Register_FILL());
+ AddBuiltin(BuiltinOperator_MIRROR_PAD, Register_MIRROR_PAD());
+#endif // TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+
+ AddCustom("TensorFlowMax", nnfw::tflite::custom::Register_TensorFlowMax());
+ AddCustom("SquaredDifference", nnfw::tflite::custom::Register_SquaredDifference());
+ AddCustom("TensorFlowSum", nnfw::tflite::custom::Register_TensorFlowSum());
+ AddCustom("Abs", nnfw::tflite::custom::Register_Abs());
+ AddCustom("TFLite_Detection_PostProcess",
+ ::tflite::ops::custom::Register_DETECTION_POSTPROCESS());
+
+}
+
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/tflite/src/ext/nnapi_delegate.cpp b/runtimes/libs/tflite/src/ext/nnapi_delegate.cpp
new file mode 100644
index 000000000..55bdb0cd5
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/nnapi_delegate.cpp
@@ -0,0 +1,1238 @@
+/* Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+// NOTE To minimize diff with upstream tensorflow, disable clang-format
+// clang-format off
+
+// NOTE This code is derived from the following file (in TensorFlow v1.12)
+// 'externals/tensorflow/tensorflow/lite/nnapi_delegate.cc'
+#include "tflite/ext/nnapi_delegate.h"
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 9
+#include "tensorflow/lite/builtin_op_data.h"
+#include "tensorflow/lite/error_reporter.h"
+#else
+#include "tensorflow/lite/c/builtin_op_data.h"
+#include "tensorflow/lite/core/api/error_reporter.h"
+#endif
+#include "tensorflow/lite/model.h"
+#include <rua/Shim.h>
+#include "NeuralNetworksExShim.h"
+
+#ifdef __ANDROID__
+#include <android/log.h>
+#include <sys/system_properties.h>
+#endif
+
+namespace nnfw {
+namespace tflite {
+
+void logError(const char* format, ...) {
+ // stderr is convenient for native tests, but is not captured for apps
+ va_list args_for_stderr;
+ va_start(args_for_stderr, format);
+ vfprintf(stderr, format, args_for_stderr);
+ va_end(args_for_stderr);
+ fprintf(stderr, "\n");
+ fflush(stderr);
+#ifdef __ANDROID__
+ // produce logcat output for general consumption
+ va_list args_for_log;
+ va_start(args_for_log, format);
+ __android_log_vprint(ANDROID_LOG_ERROR, "tflite", format, args_for_log);
+ va_end(args_for_log);
+#endif
+}
+
+#define FATAL(...) \
+ logError(__VA_ARGS__); \
+ exit(1);
+
+// TODO(aselle): Change the error model to use status codes.
+#define CHECK_TFLITE_SUCCESS(x) \
+ if (x != kTfLiteOk) { \
+ FATAL("Aborting since tflite returned failure nnapi_delegate.cc:%d.", \
+ __LINE__); \
+ }
+
+#define CHECK_NN(x) \
+ if (x != ANEURALNETWORKS_NO_ERROR) { \
+ FATAL("Aborting since NNAPI returned failure nnapi_delegate.cc:%d", \
+ __LINE__); \
+ }
+
+#define RETURN_ERROR_IF_TFLITE_FAILED(x) \
+ if (x != kTfLiteOk) { \
+ logError( \
+ "Returning error since TFLite returned failure nnapi_delegate.cc:%d.", \
+ __LINE__); \
+ return kTfLiteError; \
+ }
+
+#define RETURN_ERROR_IF_NN_FAILED(x) \
+ if (x != ANEURALNETWORKS_NO_ERROR) { \
+ logError( \
+ "Returning error since NNAPI returned failure nnapi_delegate.cc:%d.", \
+ __LINE__); \
+ return kTfLiteError; \
+ }
+
+// Tracking of NNAPI operand ids
+static const int64_t kOperandIdNotSet = -1;
+static const int64_t kOperandNotNeeded = -2;
+
+namespace {
+
+int32_t GetAndroidSdkVersion() {
+#ifdef __ANDROID__
+ const char* sdkProp = "ro.build.version.sdk";
+ char sdkVersion[PROP_VALUE_MAX];
+ int length = __system_property_get(sdkProp, sdkVersion);
+ if (length != 0) {
+ for (int i = 0; i < length; ++i) {
+ int digit = sdkVersion[i] - '0';
+ if (digit < 0 || digit > 9) {
+ // Non-numeric SDK version, assume it's higher then expected;
+ return 0xFFFF;
+ }
+ }
+ return std::strtol(sdkVersion, NULL, 0);
+ }
+ FATAL("No %s prop", sdkProp);
+#endif // __ANDROID__
+ return 0;
+}
+
+int32_t GetAndroidSdkVersionCached() {
+ static int32_t androidSdkVersion = GetAndroidSdkVersion();
+ return androidSdkVersion;
+}
+
+static const uint32_t dimension_for_scalar[1] = {1};
+
+} // namespace
+
+NNAPIAllocation::NNAPIAllocation(const char* filename,
+ ::tflite::ErrorReporter* error_reporter)
+ : MMAPAllocation(filename, error_reporter) {
+ if (mmapped_buffer_ != MAP_FAILED)
+ CHECK_NN(ANeuralNetworksMemory_createFromFd(buffer_size_bytes_, PROT_READ,
+ mmap_fd_, 0, &handle_));
+}
+
+NNAPIAllocation::~NNAPIAllocation() {
+ if (handle_) {
+ ANeuralNetworksMemory_free(handle_);
+ }
+}
+
+NNAPIDelegate::~NNAPIDelegate() {
+ if (nn_compiled_model_) {
+ ANeuralNetworksCompilation_free(nn_compiled_model_);
+ nn_compiled_model_ = nullptr;
+ }
+ if (nn_model_) {
+ ANeuralNetworksModel_free(nn_model_);
+ nn_model_ = nullptr;
+ // TODO(aselle): Is this thread-safe and callable multiple times?
+ }
+ // ANeuralNetworksShutdown();
+}
+
+// Adds the tensors of the interpreter to the NN API model.
+TfLiteStatus addTensorOperands(::tflite::Interpreter* interpreter,
+ ANeuralNetworksModel* nn_model,
+ uint32_t* no_of_operands_added,
+ std::vector<int64_t>* nnapi_ids) {
+ uint32_t next_id = 0;
+ for (size_t i = 0; i < interpreter->tensors_size(); i++) {
+ // Skip temporaries and RNN back-edges.
+ if ((*nnapi_ids)[i] == kOperandNotNeeded) continue;
+
+ (*nnapi_ids)[i] = int64_t(next_id);
+
+ int32_t nn_type = 0;
+ // NNAPI requires 32-bit float scale to be zero, tflite doesn't care
+ float scale = 0.0f;
+ int32_t zeroPoint = 0;
+ TfLiteTensor* tensor = interpreter->tensor(i);
+ switch (tensor->type) {
+ case kTfLiteNoType:
+ // Tensors added during initialization of Ops don't have a type yet and
+ // should not be registered with the NNAPI.
+ continue;
+ case kTfLiteFloat32:
+ nn_type = ANEURALNETWORKS_TENSOR_FLOAT32;
+ break;
+ case kTfLiteUInt8:
+ nn_type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM;
+ scale = tensor->params.scale;
+ // FIXME The next line is a workaround because currently zero scale is
+ // passed down from TF
+ // Lite. Note that the latest NeuralNetworks.h (see
+ // https://android.googlesource.com/platform/frameworks/ml/+/master/nn/runtime/include/NeuralNetworks.h)
+ // requires scale to be greater than zero. Remove this workaround
+ // when the scale
+ // value is correctly passed.
+ scale = (scale == 0.0f) ? 1.0f : scale;
+ zeroPoint = tensor->params.zero_point;
+ break;
+ case kTfLiteInt32:
+ nn_type = ANEURALNETWORKS_TENSOR_INT32;
+ scale = tensor->params.scale;
+ zeroPoint = tensor->params.zero_point;
+ break;
+ case kTfLiteBool:
+ // Workaround to pass bool type under NNAPI
+ // Use bool type using ANEURALNETWORKS_TENSOR_QUANT8_ASYMM with scale = 1.0f and zero_point = 0
+ nn_type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM;
+ scale = 1.0f;
+ zeroPoint = 0;
+ break;
+ default:
+ logError("Unsupported tensor type %d", tensor->type);
+ return kTfLiteError;
+ }
+ if (tensor->dims->size == 0) {
+ // WORKAROUND Some model have dimension zero
+ switch (tensor->type) {
+ case kTfLiteFloat32:
+ nn_type = ANEURALNETWORKS_TENSOR_FLOAT32;
+ break;
+ case kTfLiteInt32:
+ nn_type = ANEURALNETWORKS_TENSOR_INT32;
+ break;
+ default:
+ logError("NNAPI doesn't support tensors with rank 0 (index %d name %s)",
+ i, tensor->name);
+ return kTfLiteError;
+ }
+ }
+ if (tensor->dims->size > 4) {
+ logError("NNAPI doesn't support tensors with rank > 4 (index %d name %s)",
+ i, tensor->name);
+ return kTfLiteError;
+ }
+ // TODO(aselle): Note, many of these are intermediate results. Do I need
+ // to ever specify these sizes. I am currently below doing setValue
+ // on all of them, but I shouldn't in the future.
+ // Answer(jeanluc): If all the operators can set the dimension correctly,
+ // you won't need to.
+ ANeuralNetworksOperandType operand_type{
+ nn_type, static_cast<uint32_t>(tensor->dims->size),
+ reinterpret_cast<uint32_t*>(tensor->dims->data), scale, zeroPoint};
+ if (tensor->dims->size == 0) {
+ // WORKAROUND Some model have dimension zero
+ // Consider scalar as vector size 1
+ operand_type.dimensions = dimension_for_scalar;
+ operand_type.dimensionCount = 1;
+ }
+ RETURN_ERROR_IF_NN_FAILED(
+ ANeuralNetworksModel_addOperand(nn_model, &operand_type));
+ // TODO(aselle): Based on Michael's suggestion, limiting this to read
+ // only memory
+ if (tensor->allocation_type == kTfLiteMmapRo) {
+ if (const NNAPIAllocation* alloc = dynamic_cast<const NNAPIAllocation*>(
+ static_cast<const ::tflite::Allocation*>(tensor->allocation))) {
+ RETURN_ERROR_IF_NN_FAILED(
+ ANeuralNetworksModel_setOperandValueFromMemory(
+ nn_model, next_id, alloc->memory(),
+ alloc->offset(tensor->data.raw), tensor->bytes));
+ } else {
+ RETURN_ERROR_IF_NN_FAILED(ANeuralNetworksModel_setOperandValue(
+ nn_model, next_id, tensor->data.raw, tensor->bytes));
+ }
+ } else if (tensor->bytes == 0) {
+ // These size 0 tensors are optional tensors reserved.
+ RETURN_ERROR_IF_NN_FAILED(
+ ANeuralNetworksModel_setOperandValue(nn_model, next_id, nullptr, 0));
+ }
+
+ ++next_id;
+ }
+ *no_of_operands_added = next_id;
+ return kTfLiteOk;
+}
+
+void MapAndAddTensorIds(const int* from_ids_buf, size_t from_ids_count,
+ std::vector<uint32_t>* into,
+ const std::vector<int64_t>& map) {
+ for (size_t i = 0; i < from_ids_count; i++) {
+ int from_id = from_ids_buf[i];
+ if (from_id == kOptionalTensor) {
+ into->push_back(from_id);
+ } else {
+ into->push_back(map[from_id]);
+ }
+ }
+}
+
+// Adds the operations and their parameters to the NN API model.
+// 'next-id' is the operand ID of the next operand of the model.
+TfLiteStatus AddOpsAndParams(
+ ::tflite::Interpreter* interpreter, ANeuralNetworksModel* nn_model,
+ uint32_t next_id, std::vector<int>* model_state_inputs,
+ std::vector<int>* model_state_outputs,
+ const std::vector<int64_t>& tensor_id_to_nnapi_id) {
+ for (size_t i = 0; i < interpreter->nodes_size(); i++) {
+ const auto* node_and_registration = interpreter->node_and_registration(i);
+ const TfLiteNode& node = node_and_registration->first;
+ const TfLiteRegistration& registration = node_and_registration->second;
+ ::tflite::BuiltinOperator builtin =
+ static_cast<::tflite::BuiltinOperator>(registration.builtin_code);
+
+ // Add the parameters.
+ std::vector<uint32_t> augmented_inputs, augmented_outputs;
+ MapAndAddTensorIds(node.inputs->data, node.inputs->size, &augmented_inputs,
+ tensor_id_to_nnapi_id);
+ MapAndAddTensorIds(node.outputs->data, node.outputs->size,
+ &augmented_outputs, tensor_id_to_nnapi_id);
+
+ auto add_scalar_int32 = [&nn_model, &augmented_inputs,
+ &next_id](int value) {
+ ANeuralNetworksOperandType operand_type{}; operand_type.type = ANEURALNETWORKS_INT32;
+ CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
+ CHECK_NN(ANeuralNetworksModel_setOperandValue(nn_model, next_id, &value,
+ sizeof(int32_t)))
+ augmented_inputs.push_back(next_id++);
+ };
+
+ auto add_scalar_float32 = [&nn_model, &augmented_inputs,
+ &next_id](float value) {
+ ANeuralNetworksOperandType operand_type{}; operand_type.type = ANEURALNETWORKS_FLOAT32;
+ CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
+ CHECK_NN(ANeuralNetworksModel_setOperandValue(nn_model, next_id, &value,
+ sizeof(float)))
+ augmented_inputs.push_back(next_id++);
+ };
+
+ auto add_vector_int32 = [&](const int* values, uint32_t num_values) {
+ ANeuralNetworksOperandType operand_type{};
+ operand_type.type = ANEURALNETWORKS_TENSOR_INT32;
+ operand_type.dimensionCount = 1;
+ operand_type.dimensions = &num_values;
+ CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
+ CHECK_NN(ANeuralNetworksModel_setOperandValue(
+ nn_model, next_id, values, sizeof(int32_t) * num_values));
+ augmented_inputs.push_back(next_id++);
+ };
+
+ // Handle state tensors of RNN, LSTM, SVDF.
+ // For each state_out tensor, a corresponding state_in operand needs to be
+ // created for NNAPI.
+ auto duplicate_state_tensor_float32 =
+ [interpreter, &nn_model, &next_id, &augmented_inputs,
+ &model_state_inputs, &model_state_outputs](int tensor_id) {
+ const TfLiteTensor* tensor = interpreter->tensor(tensor_id);
+ ANeuralNetworksOperandType operand_type{
+ ANEURALNETWORKS_TENSOR_FLOAT32,
+ static_cast<uint32_t>(tensor->dims->size),
+ reinterpret_cast<uint32_t*>(tensor->dims->data),
+ tensor->params.scale, tensor->params.zero_point};
+ CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type));
+ augmented_inputs.push_back(next_id);
+ model_state_inputs->push_back(next_id);
+ model_state_outputs->push_back(tensor_id);
+ next_id++;
+ };
+ auto check_and_add_activation = [&add_scalar_int32](int activation) {
+ if (activation > kTfLiteActRelu6) {
+ logError("NNAPI only supports RELU, RELU1 and RELU6 activations");
+ return kTfLiteError;
+ }
+ add_scalar_int32(activation);
+ return kTfLiteOk;
+ };
+
+ auto add_add_params = [&add_scalar_int32](void* data) {
+ auto* builtin = reinterpret_cast<TfLiteAddParams*>(data);
+ if (builtin->activation > kTfLiteActRelu6) {
+ logError("NNAPI only supports RELU, RELU1 and RELU6 activations");
+ return kTfLiteError;
+ }
+ add_scalar_int32(builtin->activation);
+ return kTfLiteOk;
+ };
+
+ auto add_pooling_params = [&add_scalar_int32,
+ &check_and_add_activation](void* data) {
+ auto builtin = reinterpret_cast<TfLitePoolParams*>(data);
+ add_scalar_int32(builtin->padding);
+ add_scalar_int32(builtin->stride_width);
+ add_scalar_int32(builtin->stride_height);
+ add_scalar_int32(builtin->filter_width);
+ add_scalar_int32(builtin->filter_height);
+ return check_and_add_activation(builtin->activation);
+ };
+
+ auto add_convolution_params = [&add_scalar_int32,
+ &check_and_add_activation](void* data) {
+ auto builtin = reinterpret_cast<TfLiteConvParams*>(data);
+ add_scalar_int32(builtin->padding);
+ add_scalar_int32(builtin->stride_width);
+ add_scalar_int32(builtin->stride_height);
+ return check_and_add_activation(builtin->activation);
+ };
+
+ auto add_depthwise_conv_params = [&add_scalar_int32,
+ &check_and_add_activation](void* data) {
+ auto builtin = reinterpret_cast<TfLiteDepthwiseConvParams*>(data);
+ add_scalar_int32(builtin->padding);
+ add_scalar_int32(builtin->stride_width);
+ add_scalar_int32(builtin->stride_height);
+ add_scalar_int32(builtin->depth_multiplier);
+ return check_and_add_activation(builtin->activation);
+ };
+
+ auto add_fully_connected_params = [&check_and_add_activation](void* data) {
+ auto builtin = reinterpret_cast<TfLiteFullyConnectedParams*>(data);
+ return check_and_add_activation(builtin->activation);
+ };
+
+ auto add_concatenation_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteConcatenationParams*>(data);
+ add_scalar_int32(builtin->axis);
+ if (builtin->activation != kTfLiteActNone) {
+ logError("Concatenation does not support fused activation in NNAPI");
+ return kTfLiteError;
+ }
+ return kTfLiteOk;
+ };
+
+ auto add_softmax_params = [&add_scalar_float32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteSoftmaxParams*>(data);
+ add_scalar_float32(builtin->beta);
+ };
+
+ auto add_space_to_depth_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteSpaceToDepthParams*>(data);
+ add_scalar_int32(builtin->block_size);
+ };
+
+ auto add_lstm_params = [&add_scalar_int32,
+ &add_scalar_float32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteLSTMParams*>(data);
+ add_scalar_int32(builtin->activation);
+ add_scalar_float32(builtin->cell_clip);
+ add_scalar_float32(builtin->proj_clip);
+ };
+
+ // LSTM in NNAPI requires scratch tensor as an output operand.
+ auto add_lstm_scratch_tensor_float32 = [interpreter, &node, &nn_model,
+ &next_id, &augmented_outputs]() {
+ if (node.temporaries->size == 0) return;
+ int scratch_buffer_index = node.temporaries->data[0];
+ const TfLiteTensor* tensor = interpreter->tensor(scratch_buffer_index);
+ ANeuralNetworksOperandType operand_type{
+ ANEURALNETWORKS_TENSOR_FLOAT32,
+ static_cast<uint32_t>(tensor->dims->size),
+ reinterpret_cast<uint32_t*>(tensor->dims->data), tensor->params.scale,
+ tensor->params.zero_point};
+ CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type));
+ augmented_outputs.insert(augmented_outputs.begin(), next_id++);
+ };
+
+ auto add_mean_params = [&add_scalar_int32](void* data) {
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 9
+ auto builtin = reinterpret_cast<TfLiteMeanParams*>(data);
+#else
+ auto builtin = reinterpret_cast<TfLiteReducerParams*>(data);
+#endif
+ add_scalar_int32(builtin->keep_dims);
+ };
+
+ auto add_svdf_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteSVDFParams*>(data);
+ add_scalar_int32(builtin->rank);
+ add_scalar_int32(builtin->activation);
+ };
+
+ auto add_rnn_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteRNNParams*>(data);
+ add_scalar_int32(builtin->activation);
+ };
+
+ auto add_squeeze_params = [&](void* data) {
+ const auto* builtin = reinterpret_cast<TfLiteSqueezeParams*>(data);
+ // Note that we add the squeeze dimensions even if the dimensions were
+ // unspecified (empty), as NNAPI requires the operand.
+ add_vector_int32(builtin->squeeze_dims,
+ static_cast<uint32_t>(builtin->num_squeeze_dims));
+ };
+
+ // Handle optional input tensors.
+ auto add_optional_tensors = [&nn_model, &augmented_inputs,
+ &next_id](int nn_type) {
+ for (size_t idx = 0; idx < augmented_inputs.size(); idx++) {
+ if (augmented_inputs[idx] == static_cast<uint32_t>(kOptionalTensor)) {
+ const std::vector<uint32_t> dim = {0, 0};
+ ANeuralNetworksOperandType operand_type{nn_type, 2, dim.data(), 0, 0};
+ CHECK_NN(ANeuralNetworksModel_addOperand(nn_model, &operand_type))
+ CHECK_NN(ANeuralNetworksModel_setOperandValue(nn_model, next_id,
+ nullptr, 0))
+ augmented_inputs[idx] = next_id++;
+ }
+ }
+ };
+
+ int nnapi_version = 10;
+#include "nnapi_delegate_ex_AddOpsAndParams_lambda.inc"
+
+ ANeuralNetworksOperationType nn_op_type = -1;
+
+ // Using namespace directive to minimize diff with upstream tensorflow
+ namespace tflite = ::tflite;
+
+ switch (builtin) {
+ case tflite::BuiltinOperator_ADD:
+ nn_op_type = ANEURALNETWORKS_ADD;
+ RETURN_ERROR_IF_TFLITE_FAILED(add_add_params(node.builtin_data));
+ break;
+ case tflite::BuiltinOperator_MUL:
+ nn_op_type = ANEURALNETWORKS_MUL;
+ RETURN_ERROR_IF_TFLITE_FAILED(add_add_params(node.builtin_data));
+ break;
+ case tflite::BuiltinOperator_AVERAGE_POOL_2D:
+ RETURN_ERROR_IF_TFLITE_FAILED(add_pooling_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_AVERAGE_POOL_2D;
+ break;
+ case tflite::BuiltinOperator_MAX_POOL_2D:
+ RETURN_ERROR_IF_TFLITE_FAILED(add_pooling_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_MAX_POOL_2D;
+ break;
+ case tflite::BuiltinOperator_L2_POOL_2D:
+ RETURN_ERROR_IF_TFLITE_FAILED(add_pooling_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_L2_POOL_2D;
+ break;
+ case tflite::BuiltinOperator_CONV_2D: {
+ auto builtin = reinterpret_cast<TfLiteConvParams*>(node.builtin_data);
+ if (builtin->dilation_width_factor != 1 ||
+ builtin->dilation_height_factor != 1 || node.inputs->size != 3) {
+ logError("NNAPI does not support dilated Conv2D.");
+ return kTfLiteError;
+ }
+ }
+ RETURN_ERROR_IF_TFLITE_FAILED(
+ add_convolution_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_CONV_2D;
+ break;
+ case tflite::BuiltinOperator_RELU:
+ nn_op_type = ANEURALNETWORKS_RELU;
+ break;
+ case tflite::BuiltinOperator_RELU_N1_TO_1:
+ nn_op_type = ANEURALNETWORKS_RELU1;
+ break;
+ case tflite::BuiltinOperator_RELU6:
+ nn_op_type = ANEURALNETWORKS_RELU6;
+ break;
+ case tflite::BuiltinOperator_TANH:
+ nn_op_type = ANEURALNETWORKS_TANH;
+ break;
+ case tflite::BuiltinOperator_FLOOR:
+ nn_op_type = ANEURALNETWORKS_FLOOR;
+ break;
+ case tflite::BuiltinOperator_LOGISTIC:
+ nn_op_type = ANEURALNETWORKS_LOGISTIC;
+ break;
+ case tflite::BuiltinOperator_DEPTHWISE_CONV_2D:
+ RETURN_ERROR_IF_TFLITE_FAILED(
+ add_depthwise_conv_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_DEPTHWISE_CONV_2D;
+ break;
+ case tflite::BuiltinOperator_CONCATENATION:
+ RETURN_ERROR_IF_TFLITE_FAILED(
+ add_concatenation_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_CONCATENATION;
+ break;
+ case tflite::BuiltinOperator_SOFTMAX:
+ add_softmax_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_SOFTMAX;
+ break;
+ case tflite::BuiltinOperator_FULLY_CONNECTED:
+ RETURN_ERROR_IF_TFLITE_FAILED(
+ add_fully_connected_params(node.builtin_data));
+ nn_op_type = ANEURALNETWORKS_FULLY_CONNECTED;
+ break;
+ case tflite::BuiltinOperator_RESHAPE:
+ if (node.inputs->size != 2) {
+ logError("NNAPI only supports 2-input RESHAPE");
+ return kTfLiteError;
+ }
+ nn_op_type = ANEURALNETWORKS_RESHAPE;
+ // add_reshape_params(node.builtin_data);
+ break;
+ case tflite::BuiltinOperator_RESIZE_BILINEAR:
+ add_resize_bilinear_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_RESIZE_BILINEAR;
+ break;
+ case tflite::BuiltinOperator_SPACE_TO_DEPTH:
+ add_space_to_depth_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_SPACE_TO_DEPTH;
+ break;
+ case tflite::BuiltinOperator_LSTM: {
+ if (node.inputs->size + /* no of params */ 3 != 21) {
+ logError("NNAPI only supports 21-input LSTMs");
+ return kTfLiteError;
+ }
+ duplicate_state_tensor_float32(
+ node.outputs->data[/*kOutputStateTensor*/ 0]);
+ duplicate_state_tensor_float32(
+ node.outputs->data[/*kCellStateTensor*/ 1]);
+ add_lstm_params(node.builtin_data);
+ add_lstm_scratch_tensor_float32();
+ add_optional_tensors(ANEURALNETWORKS_TENSOR_FLOAT32);
+ nn_op_type = ANEURALNETWORKS_LSTM;
+ break;
+ }
+ case tflite::BuiltinOperator_DEQUANTIZE:
+ nn_op_type = ANEURALNETWORKS_DEQUANTIZE;
+ break;
+ case tflite::BuiltinOperator_SVDF: {
+ duplicate_state_tensor_float32(node.outputs->data[/*kStateTensor*/ 0]);
+ add_svdf_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_SVDF;
+ break;
+ }
+ case tflite::BuiltinOperator_RNN: {
+ duplicate_state_tensor_float32(
+ node.outputs->data[/*kHiddenStateTensor*/ 0]);
+ add_rnn_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_RNN;
+ break;
+ }
+ case tflite::BuiltinOperator_EMBEDDING_LOOKUP:
+ nn_op_type = ANEURALNETWORKS_EMBEDDING_LOOKUP;
+ break;
+ case tflite::BuiltinOperator_PAD:
+ nnapi_version = 11; // require NNAPI 1.1
+ nn_op_type = ANEURALNETWORKS_PAD;
+ break;
+ case tflite::BuiltinOperator_MEAN:
+ nnapi_version = 11; // require NNAPI 1.1
+ add_mean_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_MEAN;
+ break;
+ case tflite::BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION:
+ nn_op_type = ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION;
+ add_lrn_params(node.builtin_data);
+ break;
+ case tflite::BuiltinOperator_DIV:
+ nnapi_version = 11; // require NNAPI 1.1
+ nn_op_type = ANEURALNETWORKS_DIV;
+ RETURN_ERROR_IF_TFLITE_FAILED(check_and_add_activation(
+ reinterpret_cast<TfLiteDivParams*>(node.builtin_data)->activation));
+ break;
+ case tflite::BuiltinOperator_SUB:
+ nnapi_version = 11; // require NNAPI 1.1
+ nn_op_type = ANEURALNETWORKS_SUB;
+ RETURN_ERROR_IF_TFLITE_FAILED(check_and_add_activation(
+ reinterpret_cast<TfLiteSubParams*>(node.builtin_data)->activation));
+ break;
+ case tflite::BuiltinOperator_SQUEEZE:
+ nnapi_version = 11; // requires NNAPI 1.1
+ add_squeeze_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_SQUEEZE;
+ break;
+ case tflite::BuiltinOperator_TRANSPOSE:
+ // The permutation input tensor value dictates the output dimensions.
+ // TODO(b/110888333): Support dynamically-sized tensors in delegates.
+ if ((node.inputs->size > 1) &&
+ (interpreter->tensor(node.inputs->data[1])->allocation_type !=
+ kTfLiteMmapRo)) {
+ logError("NNAPI does not yet support dynamic tensors.");
+ return kTfLiteError;
+ }
+ nnapi_version = 11; // require NNAPI 1.1
+ nn_op_type = ANEURALNETWORKS_TRANSPOSE;
+ break;
+ case tflite::BuiltinOperator_L2_NORMALIZATION:
+ nn_op_type = ANEURALNETWORKS_L2_NORMALIZATION;
+ if (reinterpret_cast<TfLiteL2NormParams*>(node.builtin_data)
+ ->activation != kTfLiteActNone) {
+ logError(
+ "NNAPI does not support L2Normalization with fused activations");
+ return kTfLiteError;
+ }
+ if ((node.inputs->size > 0) &&
+ (interpreter->tensor(node.inputs->data[0])->dims->size != 4)) {
+ logError("NNAPI only supports input rank 4 for L2Normalization");
+ return kTfLiteError;
+ }
+ break;
+ case tflite::BuiltinOperator_HASHTABLE_LOOKUP:
+ if (interpreter->tensor(node.outputs->data[0])->type !=
+ kTfLiteFloat32) {
+ logError("NNAPI only support HASHTABLE_LOOKUP with float32 output",
+ builtin);
+ return kTfLiteError;
+ }
+ nn_op_type = ANEURALNETWORKS_HASHTABLE_LOOKUP;
+ break;
+ case tflite::BuiltinOperator_STRIDED_SLICE:
+ add_strided_slice_params(node.builtin_data);
+ nn_op_type = ANEURALNETWORKS_STRIDED_SLICE;
+ break;
+ case tflite::BuiltinOperator_SPACE_TO_BATCH_ND:
+ nnapi_version = 11; // require NNAPI 1.1
+ nn_op_type = ANEURALNETWORKS_SPACE_TO_BATCH_ND;
+ break;
+ case tflite::BuiltinOperator_BATCH_TO_SPACE_ND:
+ nnapi_version = 11; // require NNAPI 1.1
+ nn_op_type = ANEURALNETWORKS_BATCH_TO_SPACE_ND;
+ check_batch_to_space_params();
+ break;
+ case tflite::BuiltinOperator_CAST:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_CAST_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_TOPK_V2:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_TOPK_V2_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_GATHER:
+ add_gather_ex_params(node.builtin_data);
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_GATHER_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_SPLIT:
+ add_split_params(node.builtin_data);
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_SPLIT_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_NEG:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_NEG_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_EXP:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_EXP_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_TRANSPOSE_CONV:
+ add_transpose_conv_params(node.builtin_data);
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_TRANSPOSE_CONV_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_PRELU:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_PRELU_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_ARG_MAX:
+ check_arg_max_input(node.builtin_data);
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_ARGMAX_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+ case tflite::BuiltinOperator_PACK:
+ add_pack_ex_params(node.builtin_data);
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_PACK_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_UNPACK:
+ add_unpack_ex_params(node.builtin_data);
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_UNPACK_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_SQRT:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_SQRT_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_RSQRT:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_RSQRT_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_EQUAL:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_EQUAL_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_NOT_EQUAL:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_NOT_EQUAL_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(), static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_SUM:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_REDUCE_SUM_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_REDUCE_MAX:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_REDUCE_MAX_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_REDUCE_MIN:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_REDUCE_MIN_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_LOGICAL_AND:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_LOGICAL_AND_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_LOGICAL_OR:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_LOGICAL_OR_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_LOGICAL_NOT:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_LOGICAL_NOT_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ case tflite::BuiltinOperator_SQUARED_DIFFERENCE:
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_SQUARED_DIFFERENCE_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+
+#endif
+ case tflite::BuiltinOperator_CONCAT_EMBEDDINGS:
+ case tflite::BuiltinOperator_LSH_PROJECTION:
+ case tflite::BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN:
+ case tflite::BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN:
+ case tflite::BuiltinOperator_EMBEDDING_LOOKUP_SPARSE:
+ case tflite::BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM:
+ case tflite::BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM:
+ //case tflite::BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION:
+ case tflite::BuiltinOperator_PADV2:
+ //case tflite::BuiltinOperator_RESIZE_BILINEAR:
+ case tflite::BuiltinOperator_CALL:
+ case tflite::BuiltinOperator_SKIP_GRAM:
+ //case tflite::BuiltinOperator_RELU_N1_TO_1:
+ //case tflite::BuiltinOperator_GATHER:
+ //case tflite::BuiltinOperator_SPACE_TO_BATCH_ND:
+ //case tflite::BuiltinOperator_BATCH_TO_SPACE_ND:
+ //case tflite::BuiltinOperator_TOPK_V2:
+ //case tflite::BuiltinOperator_SPLIT:
+ //case tflite::BuiltinOperator_STRIDED_SLICE:
+ //case tflite::BuiltinOperator_EXP:
+ case tflite::BuiltinOperator_LOG_SOFTMAX:
+ //case tflite::BuiltinOperator_DEQUANTIZE:
+ case tflite::BuiltinOperator_DELEGATE:
+ //case tflite::BuiltinOperator_CAST:
+ //case tflite::BuiltinOperator_PRELU:
+ case tflite::BuiltinOperator_MAXIMUM:
+ case tflite::BuiltinOperator_MINIMUM:
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+ case tflite::BuiltinOperator_ARG_MIN:
+#endif
+ case tflite::BuiltinOperator_GREATER:
+ case tflite::BuiltinOperator_GREATER_EQUAL:
+ case tflite::BuiltinOperator_LESS:
+ case tflite::BuiltinOperator_LESS_EQUAL:
+ //case tflite::BuiltinOperator_NEG:
+ case tflite::BuiltinOperator_SELECT:
+ case tflite::BuiltinOperator_SLICE:
+ case tflite::BuiltinOperator_SIN:
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+ case tflite::BuiltinOperator_LOG:
+#endif
+ //case tflite::BuiltinOperator_TRANSPOSE_CONV:
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+ case tflite::BuiltinOperator_TILE:
+ case tflite::BuiltinOperator_EXPAND_DIMS:
+ case tflite::BuiltinOperator_SPARSE_TO_DENSE:
+ //case tflite::BuiltinOperator_EQUAL:
+ //case tflite::BuiltinOperator_NOT_EQUAL:
+ //case tflite::BuiltinOperator_SUM:
+ //case tflite::BuiltinOperator_REDUCE_MAX:
+ //case tflite::BuiltinOperator_REDUCE_MIN:
+ case tflite::BuiltinOperator_REDUCE_PROD:
+ //case tflite::BuiltinOperator_SQRT:
+ //case tflite::BuiltinOperator_RSQRT:
+ case tflite::BuiltinOperator_SHAPE:
+ case tflite::BuiltinOperator_POW:
+ case tflite::BuiltinOperator_FAKE_QUANT:
+ //case tflite::BuiltinOperator_PACK:
+ //case tflite::BuiltinOperator_LOGICAL_OR:
+ case tflite::BuiltinOperator_ONE_HOT:
+ //case tflite::BuiltinOperator_LOGICAL_AND:
+ //case tflite::BuiltinOperator_LOGICAL_NOT:
+ //case tflite::BuiltinOperator_UNPACK:
+ case tflite::BuiltinOperator_FLOOR_DIV:
+ case tflite::BuiltinOperator_REDUCE_ANY:
+ case tflite::BuiltinOperator_SQUARE:
+ case tflite::BuiltinOperator_ZEROS_LIKE:
+ case tflite::BuiltinOperator_FILL:
+#endif
+ logError("Op code %d is currently not delegated to NNAPI", builtin);
+ return kTfLiteError;
+ break;
+ case tflite::BuiltinOperator_CUSTOM: {
+ std::string custom_name(registration.custom_name);
+ if (custom_name.compare("TensorFlowMax") == 0) {
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_REDUCE_MAX_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ }
+ else if (custom_name.compare("SquaredDifference") == 0) {
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_SQUARED_DIFFERENCE_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ }
+ else if (custom_name.compare("TensorFlowSum") == 0) {
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_REDUCE_SUM_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ }
+ else if (custom_name.compare("Abs") == 0) {
+ CHECK_NN(ANeuralNetworksModel_addOperationEx(
+ nn_model, ANEURALNETWORKS_ABS_EX,
+ static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(node.outputs->size),
+ reinterpret_cast<uint32_t*>(node.outputs->data)));
+ continue;
+ }
+ logError("Custom operations are not supported when using NNAPI.");
+ return kTfLiteError;
+ break;
+ }
+ default:
+ logError("Op code %d is currently not delegated to NNAPI", builtin);
+ return kTfLiteError;
+ break;
+ }
+
+ if (nnapi_version == 11 && GetAndroidSdkVersionCached() < 28) {
+ //logError("Op %d needs NNAPI1.1", builtin);
+ //return kTfLiteError;
+ }
+
+ // Add the operation.
+ RETURN_ERROR_IF_NN_FAILED(ANeuralNetworksModel_addOperation(
+ nn_model, nn_op_type, static_cast<uint32_t>(augmented_inputs.size()),
+ augmented_inputs.data(),
+ static_cast<uint32_t>(augmented_outputs.size()),
+ reinterpret_cast<uint32_t*>(augmented_outputs.data())));
+ }
+ return kTfLiteOk;
+}
+
+TfLiteStatus NNAPIDelegate::BuildGraph(::tflite::Interpreter* interpreter) {
+ if (nn_model_ && nn_compiled_model_) return model_status_;
+
+ // TODO(aselle): This is not correct. need to handle resize invalidation.
+ if (!nn_model_) {
+ CHECK_NN(ANeuralNetworksModel_create(&nn_model_));
+
+ // Find which tensors should be added to NNAPI. TFLite has temporaries
+ // and RNN back-edges which are are not valid for NNAPI. We look through all
+ // inputs and outputs and mark the mapping in tensor_id_to_nnapi_id with
+ // kOperandIdNotSet. addTensorOperands will replace those with the
+ // corresponding NNAPI operand ids and skip kOperandNotNeeded entries.
+ std::vector<int64_t> tensor_id_to_nnapi_id(interpreter->tensors_size(),
+ kOperandNotNeeded);
+ auto set_ids_to_not_set = [&tensor_id_to_nnapi_id](const int* buf,
+ int count) {
+ for (int j = 0; j < count; j++) {
+ auto tensor_id = buf[j];
+ if (tensor_id != kOptionalTensor) {
+ tensor_id_to_nnapi_id[tensor_id] = kOperandIdNotSet;
+ }
+ }
+ };
+ for (size_t i = 0; i < interpreter->nodes_size(); i++) {
+ const auto* node_and_registration = interpreter->node_and_registration(i);
+ const TfLiteNode& node = node_and_registration->first;
+ set_ids_to_not_set(node.inputs->data, node.inputs->size);
+ set_ids_to_not_set(node.outputs->data, node.outputs->size);
+ }
+ set_ids_to_not_set(interpreter->inputs().data(),
+ interpreter->inputs().size());
+ set_ids_to_not_set(interpreter->outputs().data(),
+ interpreter->outputs().size());
+
+ uint32_t next_id = 0;
+ RETURN_ERROR_IF_TFLITE_FAILED(addTensorOperands(
+ interpreter, nn_model_, &next_id, &tensor_id_to_nnapi_id));
+ RETURN_ERROR_IF_TFLITE_FAILED(
+ AddOpsAndParams(interpreter, nn_model_, next_id, &model_states_inputs_,
+ &model_states_outputs_, tensor_id_to_nnapi_id));
+
+ std::vector<uint32_t> augmented_inputs;
+ MapAndAddTensorIds(interpreter->inputs().data(),
+ interpreter->inputs().size(), &augmented_inputs,
+ tensor_id_to_nnapi_id);
+ augmented_inputs.insert(augmented_inputs.end(),
+ model_states_inputs_.begin(),
+ model_states_inputs_.end());
+ std::vector<uint32_t> augmented_outputs;
+ MapAndAddTensorIds(interpreter->outputs().data(),
+ interpreter->outputs().size(), &augmented_outputs,
+ tensor_id_to_nnapi_id);
+ MapAndAddTensorIds(model_states_outputs_.data(),
+ model_states_outputs_.size(), &augmented_outputs,
+ tensor_id_to_nnapi_id);
+
+ CHECK_NN(ANeuralNetworksModel_identifyInputsAndOutputs(
+ nn_model_, static_cast<uint32_t>(augmented_inputs.size()),
+ reinterpret_cast<const uint32_t*>(augmented_inputs.data()),
+ static_cast<uint32_t>(augmented_outputs.size()),
+ reinterpret_cast<const uint32_t*>(augmented_outputs.data())));
+
+ // TODO Support ANeuralNetworksModel_relaxComputationFloat32toFloat16
+ //if (GetAndroidSdkVersionCached() >= 28) {
+ // CHECK_NN(ANeuralNetworksModel_relaxComputationFloat32toFloat16(
+ // nn_model_, interpreter->GetAllowFp16PrecisionForFp32()));
+ //}
+ CHECK_NN(ANeuralNetworksModel_finish(nn_model_));
+ }
+ if (!nn_compiled_model_) {
+ CHECK_NN(ANeuralNetworksCompilation_create(nn_model_, &nn_compiled_model_));
+ CHECK_NN(ANeuralNetworksCompilation_finish(nn_compiled_model_));
+ }
+ return kTfLiteOk;
+}
+
+#include <unordered_map>
+
+TfLiteStatus NNAPIDelegate::Invoke(::tflite::Interpreter* interpreter) {
+ if (!nn_model_) {
+ model_status_ = BuildGraph(interpreter);
+ if (model_status_ != kTfLiteOk) {
+ logError("Failed to build graph for NNAPI");
+ }
+ }
+ if (model_status_ != kTfLiteOk) {
+ return model_status_;
+ }
+
+ ANeuralNetworksExecution* execution = nullptr;
+ CHECK_NN(ANeuralNetworksExecution_create(nn_compiled_model_, &execution));
+
+ // Allocate temporary buffer to save casted boolean tensor
+ std::unordered_map<size_t, uint8_t*> input_boolean_tensors;
+ std::unordered_map<size_t, uint8_t*> output_boolean_tensors;
+ for (size_t i = 0; i < interpreter->inputs().size(); i++)
+ {
+ int input = interpreter->inputs()[i];
+ TfLiteTensor* tensor = interpreter->tensor(input);
+ if (tensor->type == kTfLiteBool)
+ {
+ size_t elements = tensor->bytes / sizeof(bool);
+ uint8_t* temp_tensor = new uint8_t[tensor->bytes / sizeof(bool)];
+ input_boolean_tensors[i] = temp_tensor;
+ for (size_t idx = 0; idx < elements; idx++)
+ {
+ temp_tensor[idx] = (tensor->data.b[idx] ? 0x00 : 0xff);
+ }
+ }
+ }
+ for (size_t i = 0; i < interpreter->outputs().size(); i++)
+ {
+ int output = interpreter->outputs()[i];
+ TfLiteTensor* tensor = interpreter->tensor(output);
+ if (tensor->type == kTfLiteBool)
+ {
+ uint8_t* temp_tensor = new uint8_t[tensor->bytes / sizeof(bool)];
+ output_boolean_tensors[i] = temp_tensor;
+ }
+ }
+
+ // Currently perform deep copy of input buffer
+ for (size_t i = 0; i < interpreter->inputs().size(); i++) {
+ int input = interpreter->inputs()[i];
+ // TODO(aselle): Is this what we want or do we want input instead?
+ // TODO(aselle): This should be called setInputValue maybe to be cons.
+ TfLiteTensor* tensor = interpreter->tensor(input);
+ if (tensor->type == kTfLiteBool)
+ {
+ CHECK_NN(ANeuralNetworksExecution_setInput(
+ execution, i, nullptr, input_boolean_tensors[i], tensor->bytes * sizeof(uint8_t) / sizeof(bool)));
+ }
+ else
+ {
+ CHECK_NN(ANeuralNetworksExecution_setInput(
+ execution, i, nullptr, tensor->data.raw, tensor->bytes));
+ }
+ }
+
+ // Tell nn api where to place final data.
+ for (size_t i = 0; i < interpreter->outputs().size(); i++) {
+ int output = interpreter->outputs()[i];
+ TfLiteTensor* tensor = interpreter->tensor(output);
+
+ if (tensor->type == kTfLiteBool)
+ {
+ CHECK_NN(ANeuralNetworksExecution_setOutput(
+ execution, i, nullptr, output_boolean_tensors[i], tensor->bytes * sizeof(uint8_t) / sizeof(bool)));
+ }
+ else
+ {
+ CHECK_NN(ANeuralNetworksExecution_setOutput(
+ execution, i, nullptr, tensor->data.raw, tensor->bytes));
+ }
+ }
+
+ // The state_out of previous invocation need to be mapped to state_in of
+ // current invocation.
+ for (size_t i = 0; i < model_states_outputs_.size(); i++) {
+ int state_tensor_idx = model_states_outputs_[i];
+ TfLiteTensor* tensor = interpreter->tensor(state_tensor_idx);
+ // Here we are using a deep copy for state_in tensors so that we are not
+ // reading and writing into the same buffer during a invocation.
+ // TODO(miaowang): using double shared buffer to minimize the copies.
+ CHECK_NN(ANeuralNetworksExecution_setInput(
+ execution, i + interpreter->inputs().size(), nullptr, tensor->data.raw,
+ tensor->bytes));
+ // Tell NNAPI where to output the state_out.
+ CHECK_NN(ANeuralNetworksExecution_setOutput(
+ execution, i + interpreter->outputs().size(), nullptr, tensor->data.raw,
+ tensor->bytes));
+ }
+
+ // Currently use blocking compute.
+ ANeuralNetworksEvent* event = nullptr;
+ CHECK_NN(ANeuralNetworksExecution_startCompute(execution, &event));
+ CHECK_NN(ANeuralNetworksEvent_wait(event));
+ ANeuralNetworksEvent_free(event);
+ ANeuralNetworksExecution_free(execution);
+
+ // Tell nn api where to place final data.
+ for (size_t i = 0; i < interpreter->inputs().size(); i++) {
+ int input = interpreter->inputs()[i];
+ TfLiteTensor* tensor = interpreter->tensor(input);
+
+ if (tensor->type == kTfLiteBool)
+ {
+ uint8_t* temp_tensor = input_boolean_tensors[i];
+ input_boolean_tensors[i] = nullptr;
+ delete temp_tensor;
+ }
+ }
+ for (size_t i = 0; i < interpreter->outputs().size(); i++) {
+ int output = interpreter->outputs()[i];
+ TfLiteTensor* tensor = interpreter->tensor(output);
+
+ if (tensor->type == kTfLiteBool)
+ {
+ uint8_t* temp_tensor = output_boolean_tensors[i];
+ size_t elements = tensor->bytes / sizeof(bool);
+ for (size_t idx = 0; idx < elements; idx++)
+ {
+ tensor->data.b[idx] = ((temp_tensor[idx] == 0x00) ? false : true);
+ }
+ output_boolean_tensors[i] = nullptr;
+ delete temp_tensor;
+ }
+ }
+
+#if 0
+ printf("From the NN API:\n");
+ TfLiteTensor* tensor = interpreter->tensor(interpreter->outputs()[0]);
+ if (float* data =
+ interpreter->typed_tensor<float>(interpreter->outputs()[0])) {
+ size_t num = tensor->bytes / sizeof(float);
+ for (float* p = data; p < data + num; p++) {
+ printf(" %f", *p);
+ }
+ printf("\n");
+ }
+#endif
+
+ return kTfLiteOk;
+}
+
+bool NNAPIDelegate::IsSupported() { return nnfw::NNAPIExists(); }
+
+} // namespace tflite
+} // namespace nnfw
+
+// clang-format on
diff --git a/runtimes/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc b/runtimes/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc
new file mode 100644
index 000000000..a3e99ee7a
--- /dev/null
+++ b/runtimes/libs/tflite/src/ext/nnapi_delegate_ex_AddOpsAndParams_lambda.inc
@@ -0,0 +1,123 @@
+// This file is included from AddOpsAndParams defined in nnapi_delegate.cc
+// and contains lambda for extened implementation to original Tensorflow Lite.
+ auto add_resize_bilinear_params = [&add_scalar_int32, &interpreter, &augmented_inputs](void* data) {
+ auto builtin = reinterpret_cast<TfLiteResizeBilinearParams*>(data);
+ if (builtin->align_corners) {
+ FATAL("Resize bilinear does not support align corners in NNAPI");
+ }
+
+ TfLiteTensor* tensor = interpreter->tensor(augmented_inputs.back());
+ assert(tensor->type == kTfLiteInt32);
+ assert(tensor->bytes == sizeof(int)*2);
+ augmented_inputs.pop_back();
+
+ int height = ((int*)(tensor->data.raw))[1];
+ int width = ((int*)(tensor->data.raw))[0];
+ add_scalar_int32(height);
+ add_scalar_int32(width);
+ };
+
+ auto add_transpose_conv_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteTransposeConvParams*>(data);
+ add_scalar_int32(builtin->padding);
+ add_scalar_int32(builtin->stride_width);
+ add_scalar_int32(builtin->stride_height);
+ };
+
+ auto add_lrn_params = [&add_scalar_int32,
+ &add_scalar_float32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteLocalResponseNormParams*>(data);
+ add_scalar_int32(builtin->radius);
+ add_scalar_float32(builtin->bias);
+ add_scalar_float32(builtin->alpha);
+ add_scalar_float32(builtin->beta);
+ };
+
+ auto add_strided_slice_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteStridedSliceParams*>(data);
+ add_scalar_int32(builtin->begin_mask);
+ add_scalar_int32(builtin->end_mask);
+ // ellipsis_mask and new_axis_mask are not supported on nn runtime
+ // cf) tflite interpreter supports both operations
+ if (builtin->ellipsis_mask) {
+ FATAL("STRIDE_SLICE does not support ellipsis_mask in NNAPI");
+ }
+ if (builtin->new_axis_mask) {
+ FATAL("STRIDE_SLICE does not support new_axis_mask in NNAPI");
+ }
+ add_scalar_int32(builtin->shrink_axis_mask);
+ };
+
+ auto add_gather_ex_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteGatherParams*>(data);
+ add_scalar_int32(builtin->axis);
+ if (builtin->axis != 0) {
+ FATAL("GATHER does not support axis>0 in NNAPI");
+ }
+ };
+
+#if TFLITE_MAJOR_VER == 1 && TFLITE_MINOR_VER == 13
+ auto add_pack_ex_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLitePackParams*>(data);
+ add_scalar_int32(builtin->values_count);
+ add_scalar_int32(builtin->axis);
+ };
+
+ auto add_unpack_ex_params = [&add_scalar_int32](void* data) {
+ auto builtin = reinterpret_cast<TfLiteUnpackParams*>(data);
+ add_scalar_int32(builtin->num);
+ add_scalar_int32(builtin->axis);
+ };
+#endif
+
+ auto check_batch_to_space_params = [interpreter, &node, &augmented_inputs]() {
+
+ //If there are 3 inputs, check if crops is having default values {0, 0, 0, 0}
+ //Else unsupported by NNAPI
+
+ if(augmented_inputs.size() == 3)
+ {
+ const uint32_t crops_buffer_index = node.inputs->data[2];
+ const TfLiteTensor* crops = interpreter->tensor(crops_buffer_index);
+ const int *crops_value = crops->data.i32;
+
+ //Check if crops is having default values {0, 0, 0, 0}
+ if(crops_value[0] != 0 || crops_value[1] != 0 || crops_value[2] != 0 || crops_value[3] != 0)
+ {
+ FATAL("BATCH_TO_SPACE_ND does not support Explicit crops in NNAPI");
+ }
+ else
+ {
+ //Restrict crops input and pass only other two inputs
+ augmented_inputs.pop_back();
+ }
+ }
+ };
+
+ auto add_split_params = [&add_scalar_int32, &augmented_inputs](void* data) {
+ // swap 1st and 2nd operand order
+ auto input_tensor = augmented_inputs[1];
+ auto axis = augmented_inputs[0];
+ augmented_inputs[0] = input_tensor;
+ augmented_inputs[1] = axis;
+
+ auto builtin = reinterpret_cast<TfLiteSplitParams*>(data);
+ add_scalar_int32(builtin->num_splits);
+ };
+
+ auto check_arg_max_input = [&interpreter, &augmented_inputs](void *data) {
+ auto params = reinterpret_cast<TfLiteArgMaxParams*>(data);
+ if (params->output_type != kTfLiteInt32)
+ {
+ FATAL("Cannot handle output type in NNAPI");
+ }
+
+ TfLiteTensor* axis_tensor = interpreter->tensor(augmented_inputs.back());
+ assert(axis_tensor->type == kTfLiteInt32);
+
+ int64_t count = 1;
+ for (int i = 0; i < axis_tensor->dims->size; ++i) {
+ count *= axis_tensor->dims->data[i];
+ }
+ assert(count == 1);
+ };
diff --git a/runtimes/libs/tflite/src/interp/FlatBufferBuilder.cpp b/runtimes/libs/tflite/src/interp/FlatBufferBuilder.cpp
new file mode 100644
index 000000000..f54e67202
--- /dev/null
+++ b/runtimes/libs/tflite/src/interp/FlatBufferBuilder.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/interp/FlatBufferBuilder.h"
+
+#include "tflite/ext/kernels/register.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+std::unique_ptr<::tflite::Interpreter> FlatBufferBuilder::build(void) const
+{
+ std::unique_ptr<::tflite::Interpreter> interpreter;
+
+ nnfw::tflite::BuiltinOpResolver resolver;
+
+ ::tflite::InterpreterBuilder builder(_model, resolver);
+
+ builder(&interpreter);
+
+ return interpreter;
+}
+
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/tflite/src/interp/FunctionBuilder.cpp b/runtimes/libs/tflite/src/interp/FunctionBuilder.cpp
new file mode 100644
index 000000000..599a4f393
--- /dev/null
+++ b/runtimes/libs/tflite/src/interp/FunctionBuilder.cpp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/interp/FunctionBuilder.h"
+
+namespace nnfw
+{
+namespace tflite
+{
+
+std::unique_ptr<::tflite::Interpreter> FunctionBuilder::build(void) const
+{
+ auto res = std::unique_ptr<::tflite::Interpreter>{new ::tflite::Interpreter};
+
+ _fn(*res);
+
+ return res;
+}
+
+} // namespace tflite
+} // namespace nnfw
diff --git a/runtimes/libs/xdata/CMakeLists.txt b/runtimes/libs/xdata/CMakeLists.txt
new file mode 100644
index 000000000..23a08303b
--- /dev/null
+++ b/runtimes/libs/xdata/CMakeLists.txt
@@ -0,0 +1,7 @@
+add_library(nnfw_lib_xdata SHARED src/trace.cpp)
+target_include_directories(nnfw_lib_xdata PUBLIC include)
+target_link_libraries(nnfw_lib_xdata PUBLIC nnfw_lib_xray_event)
+target_link_libraries(nnfw_lib_xdata PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_xdata PRIVATE nnfw_coverage)
+
+install(TARGETS nnfw_lib_xdata LIBRARY DESTINATION lib)
diff --git a/runtimes/libs/xdata/README.md b/runtimes/libs/xdata/README.md
new file mode 100644
index 000000000..76b100936
--- /dev/null
+++ b/runtimes/libs/xdata/README.md
@@ -0,0 +1,3 @@
+# xdata
+
+_xdata_ is a collection of XRay data structures for performance analysis.
diff --git a/runtimes/libs/xdata/include/xdata.h b/runtimes/libs/xdata/include/xdata.h
new file mode 100644
index 000000000..4f291ef6b
--- /dev/null
+++ b/runtimes/libs/xdata/include/xdata.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XDATA_H__
+#define __NNFW_XDATA_H__
+
+#include <xdata/trace.h>
+
+#endif // __NNFW_XDATA_H__
diff --git a/runtimes/libs/xdata/include/xdata/trace.h b/runtimes/libs/xdata/include/xdata/trace.h
new file mode 100644
index 000000000..700c39aaa
--- /dev/null
+++ b/runtimes/libs/xdata/include/xdata/trace.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XDATA_TRACE_H__
+#define __NNFW_XDATA_TRACE_H__
+
+#include <xray/event_category.h>
+#include <xray/event_code.h>
+
+#include <memory>
+#include <string>
+
+namespace xdata
+{
+namespace trace
+{
+
+class region
+{
+public:
+ region(const char *s) : _name{s}
+ {
+ // DO NOTHING
+ }
+
+public:
+ const std::string &name(void) const { return _name; }
+
+private:
+ std::string _name;
+};
+
+enum action
+{
+ enter,
+ leave
+};
+
+class info final
+{
+public:
+ info(const trace::region *rgn, const trace::action &act) : _region{rgn}, _action{act}
+ {
+ // DO NOTHING
+ }
+
+public:
+ const trace::region *region(void) const { return _region; }
+ const trace::action &action(void) const { return _action; }
+
+private:
+ const trace::region *_region;
+ trace::action _action;
+};
+
+// WARN! This implementation is not thread-safe.
+// TODO Make this thread-safe
+class category final : public xray::event_category
+{
+private:
+ category() = default;
+
+public:
+ xray::event_code set(std::unique_ptr<trace::info> &&info);
+ void reset(void);
+
+public:
+ const trace::info *info(void) const { return _info.get(); }
+
+private:
+ std::unique_ptr<trace::info> _info;
+
+public:
+ static category *get(void);
+};
+
+static inline category *cat(void) { return category::get(); }
+
+} // namespace trace
+} // namespace xdata
+
+#endif // __NNFW_XDATA_TRACE_H__
diff --git a/runtimes/libs/xdata/src/trace.cpp b/runtimes/libs/xdata/src/trace.cpp
new file mode 100644
index 000000000..6f11847ae
--- /dev/null
+++ b/runtimes/libs/xdata/src/trace.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "xdata/trace.h"
+
+#include <cassert>
+
+namespace xdata
+{
+namespace trace
+{
+
+xray::event_code category::set(std::unique_ptr<trace::info> &&info)
+{
+ assert(info != nullptr);
+ _info = std::move(info);
+ return xray::event_code{0};
+}
+
+void category::reset(void) { _info.release(); }
+
+category *category::get(void)
+{
+ static category cat;
+ return &cat;
+}
+
+} // namespace trace
+} // namespace xdata
diff --git a/runtimes/libs/xprobe/CMakeLists.txt b/runtimes/libs/xprobe/CMakeLists.txt
new file mode 100644
index 000000000..006899b91
--- /dev/null
+++ b/runtimes/libs/xprobe/CMakeLists.txt
@@ -0,0 +1,9 @@
+add_library(nnfw_lib_xprobe SHARED src/trace.cpp)
+target_include_directories(nnfw_lib_xprobe PUBLIC include)
+target_link_libraries(nnfw_lib_xprobe PUBLIC nnfw_lib_xdata)
+target_link_libraries(nnfw_lib_xprobe PUBLIC nnfw_lib_xray_pipe)
+target_link_libraries(nnfw_lib_xprobe PRIVATE nnfw_lib_cpp14)
+target_link_libraries(nnfw_lib_xprobe PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_xprobe PRIVATE nnfw_coverage)
+
+install(TARGETS nnfw_lib_xprobe LIBRARY DESTINATION lib)
diff --git a/runtimes/libs/xprobe/include/xprobe.h b/runtimes/libs/xprobe/include/xprobe.h
new file mode 100644
index 000000000..df7a6188e
--- /dev/null
+++ b/runtimes/libs/xprobe/include/xprobe.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __XPROBE_H__
+#define __XPROBE_H__
+
+#include <xprobe/trace.h>
+
+/**
+ * HOW TO USE
+ *
+ * void f(args)
+ * {
+ * XPROBE_INSTRUMENT_FUNCTION();
+ *
+ * ...
+ * }
+ */
+#define XPROBE_INSTRUMENT_FUNCTION() \
+ ::xprobe::trace::logger __xprobe__##__LINE__ { __FUNCTION__ }
+
+/**
+ * HOW TO USE
+ *
+ * void f(args)
+ * {
+ * if(cond)
+ * {
+ * XPROBE_INSTRUMENT_REGION("if branch");
+ * }
+ * }
+ */
+#define XPROBE_INSTRUMENT_REGION(name) \
+ ::xprobe::trace::logger __xprobe__##__LINE__ { (name) }
+
+#endif // __XPROBE_H__
diff --git a/runtimes/libs/xprobe/include/xprobe/trace.h b/runtimes/libs/xprobe/include/xprobe/trace.h
new file mode 100644
index 000000000..4c466f31f
--- /dev/null
+++ b/runtimes/libs/xprobe/include/xprobe/trace.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XPROBE_TRACE_LOGGER_H__
+#define __NNFW_XPROBE_TRACE_LOGGER_H__
+
+#include <xdata.h>
+
+#include <memory>
+
+namespace xprobe
+{
+namespace trace
+{
+
+class logger final
+{
+public:
+ logger(const char *s);
+
+public:
+ ~logger();
+
+private:
+ std::unique_ptr<xdata::trace::region> _region;
+};
+
+} // namespace trace
+} // namespace xprobe
+
+#endif // __NNFW_XPROBE_LOGGER_H__
diff --git a/runtimes/libs/xprobe/src/trace.cpp b/runtimes/libs/xprobe/src/trace.cpp
new file mode 100644
index 000000000..600ef69fa
--- /dev/null
+++ b/runtimes/libs/xprobe/src/trace.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "xprobe/trace.h"
+
+#include <xray/pipe.h>
+
+#include <cpp14/memory.h>
+
+#include <cassert>
+
+static bool enabled = (xray::pipe::post != nullptr);
+
+namespace xprobe
+{
+namespace trace
+{
+
+logger::logger(const char *s)
+{
+ if (enabled)
+ {
+ _region = nnfw::cpp14::make_unique<xdata::trace::region>(s);
+
+ auto xprobe_duration_cat = xdata::trace::category::get();
+
+ auto evt_info =
+ nnfw::cpp14::make_unique<xdata::trace::info>(_region.get(), xdata::trace::enter);
+ auto evt_cat = xprobe_duration_cat;
+ auto evt_idx = xprobe_duration_cat->set(std::move(evt_info));
+
+ const xray::event evt{evt_cat, evt_idx};
+ xray::pipe::post(&evt);
+ xprobe_duration_cat->reset();
+ }
+}
+
+logger::~logger()
+{
+ if (enabled)
+ {
+ assert(_region != nullptr);
+
+ auto xprobe_duration_cat = xdata::trace::category::get();
+
+ auto evt_info =
+ nnfw::cpp14::make_unique<xdata::trace::info>(_region.get(), xdata::trace::leave);
+ auto evt_cat = xprobe_duration_cat;
+ auto evt_idx = xprobe_duration_cat->set(std::move(evt_info));
+
+ const xray::event evt{evt_cat, evt_idx};
+ xray::pipe::post(&evt);
+ xprobe_duration_cat->reset();
+ }
+}
+
+} // namespace trace
+} // namespace xprobe
diff --git a/runtimes/libs/xray/CMakeLists.txt b/runtimes/libs/xray/CMakeLists.txt
new file mode 100644
index 000000000..5d46daa52
--- /dev/null
+++ b/runtimes/libs/xray/CMakeLists.txt
@@ -0,0 +1,3 @@
+add_subdirectory(event)
+add_subdirectory(pipe)
+add_subdirectory(mux)
diff --git a/runtimes/libs/xray/event/CMakeLists.txt b/runtimes/libs/xray/event/CMakeLists.txt
new file mode 100644
index 000000000..a5c44c099
--- /dev/null
+++ b/runtimes/libs/xray/event/CMakeLists.txt
@@ -0,0 +1,2 @@
+add_library(nnfw_lib_xray_event INTERFACE)
+target_include_directories(nnfw_lib_xray_event INTERFACE include)
diff --git a/runtimes/libs/xray/event/include/xray/event.h b/runtimes/libs/xray/event/include/xray/event.h
new file mode 100644
index 000000000..3c6fbba5b
--- /dev/null
+++ b/runtimes/libs/xray/event/include/xray/event.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XRAY_EVENT_H__
+#define __NNFW_XRAY_EVENT_H__
+
+#include "xray/event_category.h"
+#include "xray/event_code.h"
+
+namespace xray
+{
+
+/**
+ * @brief Abstract event
+ *
+ * "event" class describes an abstract event that XRay recognizes, which consists of its category
+ * and code.
+ *
+ * The interpretation depends on "category" that it belongs to.
+ */
+class event final
+{
+public:
+ event(const event_category *cat, const event_code &code) : _cat{cat}, _code{code}
+ {
+ // DO NOTHING
+ }
+
+public:
+ const event_category *cat(void) const { return _cat; }
+ const event_code &code(void) const { return _code; }
+
+private:
+ const event_category *_cat;
+ event_code _code;
+};
+
+} // namespace xray
+
+#endif // __NNFW_XRAY_EVENT_H__
diff --git a/runtimes/libs/xray/event/include/xray/event_category.h b/runtimes/libs/xray/event/include/xray/event_category.h
new file mode 100644
index 000000000..4984adeab
--- /dev/null
+++ b/runtimes/libs/xray/event/include/xray/event_category.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XRAY_EVENT_CATEGORY_H__
+#define __NNFW_XRAY_EVENT_CATEGORY_H__
+
+namespace xray
+{
+
+/**
+ * Each derived category is expected to have static "get" method.
+ */
+struct event_category
+{
+ // Non-virtual
+};
+
+} // namespace xray
+
+#endif // __NNFW_XRAY_EVENT_CATEGORY_H__
diff --git a/runtimes/libs/xray/event/include/xray/event_code.h b/runtimes/libs/xray/event/include/xray/event_code.h
new file mode 100644
index 000000000..6879e5455
--- /dev/null
+++ b/runtimes/libs/xray/event/include/xray/event_code.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XRAY_EVENT_CODE_H__
+#define __NNFW_XRAY_EVENT_CODE_H__
+
+#include <cstdint>
+
+namespace xray
+{
+
+class event_code final
+{
+public:
+ explicit event_code(uint64_t value) : _value{value}
+ {
+ // DO NOTHING
+ }
+
+public:
+ const uint64_t &value(void) const { return _value; }
+
+private:
+ uint64_t _value;
+};
+
+} // namespace xray
+
+#endif // __NNFW_XRAY_EVENT_CODE_H__
diff --git a/runtimes/libs/xray/mux/CMakeLists.txt b/runtimes/libs/xray/mux/CMakeLists.txt
new file mode 100644
index 000000000..e020cd877
--- /dev/null
+++ b/runtimes/libs/xray/mux/CMakeLists.txt
@@ -0,0 +1,9 @@
+add_library(nnfw_lib_xray_mux SHARED src/mux.cc)
+set_target_properties(nnfw_lib_xray_mux PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(nnfw_lib_xray_mux PUBLIC include)
+target_link_libraries(nnfw_lib_xray_mux PUBLIC nnfw_lib_xray_event)
+target_link_libraries(nnfw_lib_xray_mux PUBLIC nnfw_lib_xray_pipe)
+target_link_libraries(nnfw_lib_xray_mux PRIVATE nnfw_common)
+target_link_libraries(nnfw_lib_xray_mux PRIVATE nnfw_coverage)
+
+install(TARGETS nnfw_lib_xray_mux LIBRARY DESTINATION lib)
diff --git a/runtimes/libs/xray/mux/include/xray/mux.h b/runtimes/libs/xray/mux/include/xray/mux.h
new file mode 100644
index 000000000..4cdfc1482
--- /dev/null
+++ b/runtimes/libs/xray/mux/include/xray/mux.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XRAY_MUX_H__
+#define __NNFW_XRAY_MUX_H__
+
+#include <xray/event.h>
+
+#include <set>
+
+namespace xray
+{
+
+struct listener
+{
+ virtual ~listener() = default;
+
+ virtual void notify(const event *) = 0;
+};
+
+class mux
+{
+private:
+ // Use "get()" below
+ mux() = default;
+
+public:
+ void attach(listener *l) { _listeners.insert(l); }
+ void detach(listener *l) { _listeners.erase(l); }
+
+public:
+ void notify(const event *e) const
+ {
+ for (auto listener : _listeners)
+ {
+ listener->notify(e);
+ }
+ }
+
+private:
+ std::set<listener *> _listeners;
+
+public:
+ static mux &get(void);
+};
+
+} // namespace xray
+
+#endif // __NNFW_XRAY_MUX_H__
diff --git a/runtimes/libs/xray/mux/src/mux.cc b/runtimes/libs/xray/mux/src/mux.cc
new file mode 100644
index 000000000..a224294dd
--- /dev/null
+++ b/runtimes/libs/xray/mux/src/mux.cc
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "xray/mux.h"
+
+#define XRAY_TRACER
+#include <xray/pipe.h>
+#undef XRAY_TRACER
+
+namespace xray
+{
+
+mux &mux::get(void)
+{
+ static mux m;
+ return m;
+}
+
+void pipe::post(const event *e) { mux::get().notify(e); }
+
+} // namespace xray
diff --git a/runtimes/libs/xray/pipe/CMakeLists.txt b/runtimes/libs/xray/pipe/CMakeLists.txt
new file mode 100644
index 000000000..19f8e507e
--- /dev/null
+++ b/runtimes/libs/xray/pipe/CMakeLists.txt
@@ -0,0 +1,3 @@
+add_library(nnfw_lib_xray_pipe INTERFACE)
+target_include_directories(nnfw_lib_xray_pipe INTERFACE include)
+target_link_libraries(nnfw_lib_xray_pipe INTERFACE nnfw_lib_xray_event)
diff --git a/runtimes/libs/xray/pipe/include/xray/pipe.h b/runtimes/libs/xray/pipe/include/xray/pipe.h
new file mode 100644
index 000000000..3a9eb5c96
--- /dev/null
+++ b/runtimes/libs/xray/pipe/include/xray/pipe.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_XRAY_PIPE_H__
+#define __NNFW_XRAY_PIPE_H__
+
+#include <xray/event.h>
+
+/**
+ * Define XRAY_TRACER before include this file for tracer implementation
+ */
+#ifndef XRAY_ATTR
+#ifdef XRAY_TRACER
+#define XRAY_ATTR __attribute__(())
+#else
+#define XRAY_ATTR __attribute__((weak))
+#endif // XRAY_TRACER
+#endif // XRAY_ATTR
+
+namespace xray
+{
+
+/**
+ * @brief A lightweight communication channel between tracee-tracer
+ *
+ * NOTE 1.
+ *
+ * "pipe" intentionally begins with a lowercase to use the following code pattern:
+ *
+ * xray::pipe::post(...)
+ *
+ * NOTE 2.
+ *
+ * The use of static member method here enables early error detection.
+ *
+ * For example, C++ compiler is unable to detect the following error at compile-time.
+ *
+ * "Method.h"
+ * void meth(T);
+ *
+ * "Method.cpp"
+ * void meth(U) { ... }
+ *
+ * The following error, on the other hand, is detected at compile-time.
+ *
+ * "Method.h"
+ * struct K { void meth(T); }
+ *
+ * "Method.cpp"
+ * void K::meth(U) { ... }
+ *
+ */
+struct pipe
+{
+ /**
+ * @brief Post event through underlying pipe
+ *
+ * WARNING FOR TRACER IMPLEMENTOR
+ *
+ * There is no guarantee that event is valid after post call.
+ * Use event only inside post method implementation, or copy its content if necessary.
+ *
+ * WARNING FOR TRACEE IMPELEMENTOR
+ *
+ * xray framework does not release any resource. Be careful about resource leak.
+ */
+ static XRAY_ATTR void post(const event *);
+};
+
+} // namespace xray
+
+#endif // __NNFW_XRAY_PIPE_H__
diff --git a/runtimes/logging/CMakeLists.txt b/runtimes/logging/CMakeLists.txt
index 788b94372..956890b30 100644
--- a/runtimes/logging/CMakeLists.txt
+++ b/runtimes/logging/CMakeLists.txt
@@ -1,5 +1,8 @@
file(GLOB_RECURSE NNAPI_LOGGING_SRCS "src/*.cc")
+nnfw_find_package(Boost REQUIRED)
+
add_library(neuralnetworks SHARED ${NNAPI_LOGGING_SRCS})
-target_include_directories(neuralnetworks PUBLIC ${NNFW_INCLUDE_DIR})
target_include_directories(neuralnetworks PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/include)
+target_include_directories(neuralnetworks PRIVATE ${Boost_INCLUDE_DIRS})
+target_link_libraries(neuralnetworks PUBLIC nnfw-header)
diff --git a/runtimes/logging/src/nnapi_logging.cc b/runtimes/logging/src/nnapi_logging.cc
index d3c7b0fec..14f2369ec 100644
--- a/runtimes/logging/src/nnapi_logging.cc
+++ b/runtimes/logging/src/nnapi_logging.cc
@@ -118,15 +118,9 @@ struct ANeuralNetworksEvent
{
};
-int ANeuralNetworksEvent_wait(ANeuralNetworksEvent *event)
-{
- return ANEURALNETWORKS_NO_ERROR;
-}
+int ANeuralNetworksEvent_wait(ANeuralNetworksEvent *event) { return ANEURALNETWORKS_NO_ERROR; }
-void ANeuralNetworksEvent_free(ANeuralNetworksEvent *event)
-{
- delete event;
-}
+void ANeuralNetworksEvent_free(ANeuralNetworksEvent *event) { delete event; }
//
// Memory
@@ -149,8 +143,8 @@ int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t
void ANeuralNetworksMemory_free(ANeuralNetworksMemory *memory)
{
- delete memory;
std::cout << __FUNCTION__ << "(" << memory << ")" << std::endl;
+ delete memory;
}
//
@@ -216,8 +210,7 @@ int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel *model, int32_t in
return ANEURALNETWORKS_NO_ERROR;
}
-int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel *model,
- int32_t index,
+int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel *model, int32_t index,
const ANeuralNetworksMemory *memory,
size_t offset, size_t length)
{
@@ -257,8 +250,7 @@ int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
const uint32_t *inputs, uint32_t outputCount,
const uint32_t *outputs)
{
- std::cout << __FUNCTION__ << "(model: " << model
- << ", type: " << type
+ std::cout << __FUNCTION__ << "(model: " << model << ", type: " << type
<< ", inputCount: " << inputCount << ", outputCount: " << outputCount << ")"
<< std::endl;
@@ -274,11 +266,8 @@ int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
return ANEURALNETWORKS_NO_ERROR;
}
-
-int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel *model,
- uint32_t inputCount,
- const uint32_t *inputs,
- uint32_t outputCount,
+int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel *model, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
const uint32_t *outputs)
{
std::cout << __FUNCTION__ << "(model: " << model << ")" << std::endl;
@@ -355,8 +344,8 @@ int ANeuralNetworksExecution_create(ANeuralNetworksCompilation *compilation,
// ANeuralNetworksExecution_setInput and ANeuralNetworksExecution_setOutput specify HOST buffer for
// input/output
int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution *execution, int32_t index,
- const ANeuralNetworksOperandType *type,
- const void *buffer, size_t length)
+ const ANeuralNetworksOperandType *type, const void *buffer,
+ size_t length)
{
std::cout << __FUNCTION__ << "(execution: " << execution << ", type: ";
@@ -402,3 +391,9 @@ void ANeuralNetworksExecution_free(ANeuralNetworksExecution *execution)
delete execution;
}
+
+void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation *compilation)
+{
+ std::cout << __FUNCTION__ << "(compilation: " << compilation << ")" << std::endl;
+ delete compilation;
+}
diff --git a/runtimes/neurun/CMakeLists.txt b/runtimes/neurun/CMakeLists.txt
index 92547da2c..6d3822c2f 100644
--- a/runtimes/neurun/CMakeLists.txt
+++ b/runtimes/neurun/CMakeLists.txt
@@ -1,59 +1,10 @@
# Add cpu
-set(NEURUN_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src)
-
-# Build kernels
-set(LIB_NEURUN_KERNEL_CPU neurun_kernel_cpu)
-set(LIB_NEURUN_KERNEL_ACL_CL neurun_kernel_acl_cl)
-add_subdirectory(src/kernel)
-
-# Build backends
-set(LIB_NEURUN_BACKEND_CPU neurun_backend_cpu)
-set(LIB_NEURUN_BACKEND_ACL_CL neurun_backend_acl_cl)
-add_subdirectory(src/backend)
-
-file(GLOB SOURCES "src/*.cc")
-file(GLOB_RECURSE SOURCES_FRONTEND "src/frontend/*.cc")
-file(GLOB SOURCES_BACKEND "src/backend/*.cc")
-file(GLOB_RECURSE SOURCES_INTERNAL "src/internal/*.cc")
-file(GLOB_RECURSE SOURCES_GRAPH "src/graph/*.cc")
-file(GLOB_RECURSE SOURCES_LINEAR "src/linear/*.cc")
-file(GLOB_RECURSE SOURCES_DUMPER "src/dumper/*.cc")
-file(GLOB_RECURSE SOURCES_COMPILER "src/compiler/*.cc")
-file(GLOB_RECURSE SOURCES_VERIFIER "src/verifier/*.cc")
-file(GLOB_RECURSE SOURCES_UTIL "src/util/*.cc")
-file(GLOB_RECURSE SOURCES_MODEL "src/model/*.cc")
-
-set(SOURCES ${SOURCES} ${SOURCES_FRONTEND} ${SOURCES_BACKEND} ${SOURCES_INTERNAL} ${SOURCES_GRAPH} ${SOURCES_LINEAR} ${SOURCES_DUMPER} ${SOURCES_COMPILER} ${SOURCES_VERIFIER} ${SOURCES_UTIL} ${SOURCES_MODEL})
-
-add_library(${LIB_NEURUN} SHARED ${SOURCES})
-target_include_directories(${LIB_NEURUN} PUBLIC ${NNFW_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN} PUBLIC ${NEURUN_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN} PUBLIC ${CMAKE_SOURCE_DIR}/externals/tensorflow)
-target_link_libraries(${LIB_NEURUN} tensorflow-lite)
-target_link_libraries(${LIB_NEURUN} nnfw_lib_misc)
-target_link_libraries(${LIB_NEURUN} nnfw_lib_cpp14)
-
-target_compile_options(${LIB_NEURUN} PRIVATE -Wall -Wextra -Werror -Wno-unused-parameter)
-
-set_target_properties(${LIB_NEURUN} PROPERTIES OUTPUT_NAME neuralnetworks)
-
-install(TARGETS ${LIB_NEURUN} DESTINATION lib/neurun)
-
-
-# Unit Tests
-
-set(TEST_NEURUN test_neurun)
-
-file(GLOB_RECURSE TESTS "test/*.cc")
-
-add_executable(${TEST_NEURUN} ${TESTS})
-target_link_libraries(${TEST_NEURUN} ${LIB_NEURUN})
-target_link_libraries(${TEST_NEURUN} gtest)
-target_link_libraries(${TEST_NEURUN} gtest_main)
-target_link_libraries(${TEST_NEURUN} ${LIB_PTHREAD})
-target_link_libraries(${TEST_NEURUN} ${LIB_NEURUN_BACKEND_CPU})
-target_link_libraries(${TEST_NEURUN} ${LIB_NEURUN_BACKEND_ACL_CL})
-add_test(${TEST_NEURUN} ${TEST_NEURUN})
-
-install(TARGETS ${TEST_NEURUN} DESTINATION unittest)
+# TODO Remove this variable as adding include dirs is done with target_link_libraries
+# (currently used by cpu/acl_cl kernel module which is not proper)
+set(NEURUN_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/core/include)
+
+add_subdirectory(backend)
+add_subdirectory(frontend)
+add_subdirectory(core)
+add_subdirectory(test)
diff --git a/runtimes/neurun/backend/CMakeLists.txt b/runtimes/neurun/backend/CMakeLists.txt
new file mode 100644
index 000000000..27e1ae25f
--- /dev/null
+++ b/runtimes/neurun/backend/CMakeLists.txt
@@ -0,0 +1,8 @@
+set(LIB_NEURUN_BACKEND_ACL_COMMON neurun_backend_acl_common)
+
+add_subdirectory(cpu)
+add_subdirectory(acl_cl)
+add_subdirectory(acl_neon)
+add_subdirectory(acl_common)
+add_subdirectory(hi_perf_cpu)
+add_subdirectory(srcn)
diff --git a/runtimes/neurun/backend/acl_cl/Backend.h b/runtimes/neurun/backend/acl_cl/Backend.h
new file mode 100644
index 000000000..7c69d7b40
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/Backend.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_BACKEND_H__
+#define __NEURUN_BACKEND_ACL_CL_BACKEND_H__
+
+#include <memory>
+#include <backend/Backend.h>
+#include <model/Operands.h>
+
+#include "Config.h"
+#include "ConstantInitializer.h"
+#include "KernelGenerator.h"
+#include "ShapeFixer.h"
+#include "TensorManager.h"
+#include "backend/CustomKernelRegistry.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+class Backend : public ::neurun::backend::Backend
+{
+public:
+ Backend() : _config{std::make_shared<Config>()} {}
+
+ std::shared_ptr<IConfig> config() const override { return _config; }
+
+ std::unique_ptr<BackendContext>
+ newContext(const model::Operands &operands,
+ const std::shared_ptr<custom::KernelRegistry> &) const override
+ {
+ auto tensor_builder = std::make_shared<TensorBuilder>(createTensorManager());
+ return std::unique_ptr<BackendContext>{new BackendContext{
+ this, tensor_builder, std::make_shared<ConstantInitializer>(operands, tensor_builder),
+ std::make_shared<KernelGenerator>(operands, tensor_builder),
+ std::make_shared<ShapeFixer>(operands, tensor_builder)}};
+ }
+
+private:
+ std::shared_ptr<IConfig> _config;
+};
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_BACKEND_H__
diff --git a/runtimes/neurun/backend/acl_cl/CLTimer.h b/runtimes/neurun/backend/acl_cl/CLTimer.h
new file mode 100644
index 000000000..3939ee722
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/CLTimer.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_CLTIMER_H__
+#define __NEURUN_BACKEND_ACL_CL_CLTIMER_H__
+
+#include <util/ITimer.h>
+#include <arm_compute/core/CL/OpenCL.h>
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include <chrono>
+#include <list>
+#include <sstream>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+/**
+ * @brief Class to measure CL kernels execution time
+ */
+class CLTimer : public util::ITimer
+{
+public:
+ /**
+ * @brief This function replaces CL function, which enqueues a command to execute a kernel
+ * with a wrapper which remembers enqueued kernels
+ */
+ void handleBegin() override
+ {
+ _measured_events.clear();
+
+ _origin_enqueue_function = arm_compute::CLSymbols::get().clEnqueueNDRangeKernel_ptr;
+
+ auto _timer_enqueue_function = [this](cl_command_queue command_queue, cl_kernel kernel,
+ cl_uint work_dim, const size_t *gwo, const size_t *gws,
+ const size_t *lws, cl_uint num_events_in_wait_list,
+ const cl_event *event_wait_list, cl_event *usr_event) {
+ cl_event event;
+ cl_int enqueue_res =
+ this->_origin_enqueue_function(command_queue, kernel, work_dim, gwo, gws, lws,
+ num_events_in_wait_list, event_wait_list, &event);
+ this->_measured_events.emplace_back(event);
+
+ // According to spec, if NULL was provided in usr_event - event shouldn't be returned
+ if (usr_event != nullptr)
+ {
+ clRetainEvent(event);
+ *usr_event = event;
+ }
+ return enqueue_res;
+ };
+ arm_compute::CLSymbols::get().clEnqueueNDRangeKernel_ptr = _timer_enqueue_function;
+
+ // Set CL_QUEUE_PROFILING_ENABLE flag for the CL command-queue, if it isn't already set
+ auto &cl_scheduler = arm_compute::CLScheduler::get();
+ auto props = cl_scheduler.queue().getInfo<CL_QUEUE_PROPERTIES>();
+ if ((props & CL_QUEUE_PROFILING_ENABLE) == 0)
+ {
+ cl_scheduler.set_queue(
+ cl::CommandQueue(cl_scheduler.context(), props | CL_QUEUE_PROFILING_ENABLE));
+ }
+ };
+
+ /**
+ * @brief Get timer result by addition executed CL kernels durations
+ */
+ void handleEnd() override
+ {
+ _timer_res = 0;
+ for (auto const &event : _measured_events)
+ {
+ cl_ulong start;
+ cl_ulong end;
+ event.getProfilingInfo(CL_PROFILING_COMMAND_START, &start);
+ event.getProfilingInfo(CL_PROFILING_COMMAND_END, &end);
+ _timer_res += (end - start) / 1000.f; // nanoseconds -> microseconds
+ }
+
+ // Restore origin CL enqueue function
+ arm_compute::CLSymbols::get().clEnqueueNDRangeKernel_ptr = _origin_enqueue_function;
+ };
+
+private:
+ std::function<decltype(clEnqueueNDRangeKernel)> _origin_enqueue_function;
+ std::list<::cl::Event> _measured_events;
+};
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_CLTIMER_H__
diff --git a/runtimes/neurun/backend/acl_cl/CMakeLists.txt b/runtimes/neurun/backend/acl_cl/CMakeLists.txt
new file mode 100644
index 000000000..7d7b50cf0
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/CMakeLists.txt
@@ -0,0 +1,21 @@
+# Unsupported architecture
+nnfw_find_package(ARMCompute QUIET)
+if(NOT ARMCompute_FOUND)
+ return()
+endif(NOT ARMCompute_FOUND)
+
+set(LIB_NEURUN_BACKEND_ACL_CL neurun_backend_acl_cl)
+
+file(GLOB_RECURSE SOURCES "*.cc")
+
+add_library(${LIB_NEURUN_BACKEND_ACL_CL} SHARED ${SOURCES})
+
+target_include_directories(${LIB_NEURUN_BACKEND_ACL_CL} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR})
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} PRIVATE neurun_core)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} PRIVATE ${LIB_NEURUN_BACKEND_ACL_COMMON})
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} PRIVATE nnfw_coverage)
+
+set_target_properties(${LIB_NEURUN_BACKEND_ACL_CL} PROPERTIES OUTPUT_NAME backend_acl_cl)
+
+install(TARGETS ${LIB_NEURUN_BACKEND_ACL_CL} DESTINATION lib)
diff --git a/runtimes/neurun/backend/acl_cl/Config.cc b/runtimes/neurun/backend/acl_cl/Config.cc
new file mode 100644
index 000000000..0c0769184
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/Config.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// For CLKernelLibraryEx initialization
+#include "arm_compute/core/CL/CLHelpers.h"
+#include "arm_compute/core/CL/CLKernelLibrary.h"
+#include "arm_compute/core/CL/CLKernelLibraryEx.h"
+
+#include <arm_compute/runtime/CL/CLScheduler.h>
+
+#include "Config.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+void Config::initialize()
+{
+ arm_compute::CLScheduler::get().default_init();
+ // NOTE CLKernelLibraryEx must use the same context as CLScheduler
+ // It did not check whether another device is available.
+ arm_compute::CLKernelLibraryEx::get().init(
+ "./cl_kernels/", arm_compute::CLScheduler::get().context(), cl::Device::getDefault());
+}
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/Config.h b/runtimes/neurun/backend/acl_cl/Config.h
new file mode 100644
index 000000000..185765161
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/Config.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_CONFIG_H__
+#define __NEURUN_BACKEND_ACL_CL_CONFIG_H__
+
+#include "CLTimer.h"
+#include <cpp14/memory.h>
+#include <backend/IConfig.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+class Config : public IConfig
+{
+public:
+ std::string id() override { return "acl_cl"; }
+ void initialize() override;
+ bool SupportSubTensorAlloc() override { return true; }
+ std::unique_ptr<util::ITimer> timer() override { return nnfw::cpp14::make_unique<CLTimer>(); }
+};
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_CONFIG_H__
diff --git a/runtimes/neurun/backend/acl_cl/ConstantInitializer.cc b/runtimes/neurun/backend/acl_cl/ConstantInitializer.cc
new file mode 100644
index 000000000..0a8f536ec
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/ConstantInitializer.cc
@@ -0,0 +1,214 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConstantInitializer.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+ConstantInitializer::ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _operands{operands}, _tensor_builder{tensor_builder}
+{
+ // DO NOTHING
+}
+
+void ConstantInitializer::run()
+{
+ for (const auto &it : _init_map)
+ {
+ const auto &ind = it.first;
+ const auto &fn = it.second;
+
+ const auto &model_obj = _operands.at(ind);
+ auto tensor_obj = _tensor_builder->wrapTensor(ind);
+ fn(model_obj, *tensor_obj);
+ }
+
+ _init_map.clear();
+}
+
+void ConstantInitializer::visit(const model::operation::Conv2DNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::Conv2DNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteInitializer(kernel_index, kernel_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::Conv2DNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::DepthwiseConv2DNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteInitializer(kernel_index, kernel_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::EmbeddingLookupNode &node)
+{
+ const auto &lookups_index = node.getInputs().at(model::operation::EmbeddingLookupNode::LOOKUPS);
+ const auto &lookups_obj = _operands.at(lookups_index);
+ registerCopyInitializer(lookups_index, lookups_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::FullyConnectedNode &node)
+{
+ const auto &weight_index = node.getInputs().at(model::operation::FullyConnectedNode::WEIGHT);
+ const auto &weight_obj = _operands.at(weight_index);
+ registerCopyInitializer(weight_index, weight_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::FullyConnectedNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::GatherNode &node)
+{
+ const auto &indices_index = node.getInputs().at(model::operation::GatherNode::INDICES);
+ const auto &indices_obj = _operands.at(indices_index);
+ registerCopyInitializer(indices_index, indices_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::HashtableLookupNode &node)
+{
+ const auto &lookups_index = node.getInputs().at(model::operation::HashtableLookupNode::LOOKUPS);
+ const auto &lookups_obj = _operands.at(lookups_index);
+ registerCopyInitializer(lookups_index, lookups_obj);
+
+ const auto &keys_index = node.getInputs().at(model::operation::HashtableLookupNode::KEYS);
+ const auto &keys_obj = _operands.at(keys_index);
+ registerCopyInitializer(keys_index, keys_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::LSTMNode &node)
+{
+ const auto &input_to_input_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_INPUT_WEIGHTS);
+ const auto &input_to_input_weights_obj = _operands.at(input_to_input_weights_index);
+ registerCopyInitializer(input_to_input_weights_index, input_to_input_weights_obj);
+
+ const auto &input_to_forget_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_FORGET_WEIGHTS);
+ const auto &input_to_forget_weights_obj = _operands.at(input_to_forget_weights_index);
+ registerCopyInitializer(input_to_forget_weights_index, input_to_forget_weights_obj);
+
+ const auto &input_to_cell_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_CELL_WEIGHTS);
+ const auto &input_to_cell_weights_obj = _operands.at(input_to_cell_weights_index);
+ registerCopyInitializer(input_to_cell_weights_index, input_to_cell_weights_obj);
+
+ const auto &input_to_output_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_OUTPUT_WEIGHTS);
+ const auto &input_to_output_weights_obj = _operands.at(input_to_output_weights_index);
+ registerCopyInitializer(input_to_output_weights_index, input_to_output_weights_obj);
+
+ const auto &recurrent_to_input_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_INPUT_WEIGHTS);
+ const auto &recurrent_to_input_weights_obj = _operands.at(recurrent_to_input_weights_index);
+ registerCopyInitializer(recurrent_to_input_weights_index, recurrent_to_input_weights_obj);
+
+ const auto &recurrent_to_forget_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_FORGET_WEIGHTS);
+ const auto &recurrent_to_forget_weights_obj = _operands.at(recurrent_to_forget_weights_index);
+ registerCopyInitializer(recurrent_to_forget_weights_index, recurrent_to_forget_weights_obj);
+
+ const auto &recurrent_to_cell_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_CELL_WEIGHTS);
+ const auto &recurrent_to_cell_weights_obj = _operands.at(recurrent_to_cell_weights_index);
+ registerCopyInitializer(recurrent_to_cell_weights_index, recurrent_to_cell_weights_obj);
+
+ const auto &recurrent_to_output_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_OUTPUT_WEIGHTS);
+ const auto &recurrent_to_output_weights_obj = _operands.at(recurrent_to_output_weights_index);
+ registerCopyInitializer(recurrent_to_output_weights_index, recurrent_to_output_weights_obj);
+
+ const auto &cell_to_input_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::CELL_TO_INPUT_WEIGHTS);
+ const auto &cell_to_input_weights_obj = _operands.at(cell_to_input_weights_index);
+ registerCopyInitializer(cell_to_input_weights_index, cell_to_input_weights_obj);
+
+ const auto &cell_to_forget_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::CELL_TO_FORGET_WEIGHTS);
+ const auto &cell_to_forget_weights_obj = _operands.at(cell_to_forget_weights_index);
+ registerCopyInitializer(cell_to_forget_weights_index, cell_to_forget_weights_obj);
+
+ const auto &cell_to_output_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::CELL_TO_OUTPUT_WEIGHTS);
+ const auto &cell_to_output_weights_obj = _operands.at(cell_to_output_weights_index);
+ registerCopyInitializer(cell_to_output_weights_index, cell_to_output_weights_obj);
+
+ const auto &input_gate_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_GATE_BIAS);
+ const auto &input_gate_bias_obj = _operands.at(input_gate_bias_index);
+ registerCopyInitializer(input_gate_bias_index, input_gate_bias_obj);
+
+ const auto &forget_gate_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::FORGET_GATE_BIAS);
+ const auto &forget_gate_bias_obj = _operands.at(forget_gate_bias_index);
+ registerCopyInitializer(forget_gate_bias_index, forget_gate_bias_obj);
+
+ const auto &output_gate_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::OUTPUT_GATE_BIAS);
+ const auto &output_gate_bias_obj = _operands.at(output_gate_bias_index);
+ registerCopyInitializer(output_gate_bias_index, output_gate_bias_obj);
+
+ const auto &projection_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::PROJECTION_WEIGHTS);
+ const auto &projection_weights_obj = _operands.at(projection_weights_index);
+ registerCopyInitializer(projection_weights_index, projection_weights_obj);
+
+ const auto &projection_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::PROJECTION_BIAS);
+ const auto &projection_bias_obj = _operands.at(projection_bias_index);
+ registerCopyInitializer(projection_bias_index, projection_bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::RNNNode &node)
+{
+ const auto &weights_index = node.getInputs().at(model::operation::RNNNode::WEIGHTS);
+ const auto &weights_obj = _operands.at(weights_index);
+ registerCopyInitializer(weights_index, weights_obj);
+
+ const auto &recurrent_weights_index =
+ node.getInputs().at(model::operation::RNNNode::RECURRENT_WEIGHTS);
+ const auto &recurrent_weights_obj = _operands.at(recurrent_weights_index);
+ registerCopyInitializer(recurrent_weights_index, recurrent_weights_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::RNNNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::TransposeConvNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::TransposeConvNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteInitializer(kernel_index, kernel_obj);
+}
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/ConstantInitializer.h b/runtimes/neurun/backend/acl_cl/ConstantInitializer.h
new file mode 100644
index 000000000..59772e0f7
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/ConstantInitializer.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_ACL_CL_CONSTANT_INITIALIZER_H__
+#define __NEURUN_COMPILER_ACL_CL_CONSTANT_INITIALIZER_H__
+
+#include <backend/IConstantInitializer.h>
+#include <model/Operands.h>
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+class ConstantInitializer : public IConstantInitializer
+{
+public:
+ ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+public:
+ void run() override;
+
+public:
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::EmbeddingLookupNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::GatherNode &) override;
+ void visit(const model::operation::HashtableLookupNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+
+private:
+ const model::Operands &_operands;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_ACL_CL_CONSTANT_INITIALIZER_H__
diff --git a/runtimes/neurun/backend/acl_cl/KernelGenerator.cc b/runtimes/neurun/backend/acl_cl/KernelGenerator.cc
new file mode 100644
index 000000000..8b019a45a
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/KernelGenerator.cc
@@ -0,0 +1,2034 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "KernelGenerator.h"
+
+#include <arm_compute/runtime/CL/CLFunctions.h> // Include all ARM Compute CL functions
+#include <arm_compute/runtime/CL/CLFunctionsEx.h> // Include all ARM Compute EX CL functions
+#include <arm_compute/runtime/misc/functions/GenericGather.h>
+#include <arm_compute/runtime/misc/functions/GenericReshapeLayer.h>
+
+#include <AclFunction.h>
+#include <Convert.h>
+#include <Swizzle.h>
+
+#include "kernel/ConcatLayer.h"
+#include "model/Index.h"
+#include "model/DataType.h"
+#include "model/InternalType.h"
+#include "compiler/IExecutionBuilder.h"
+#include "exec/NopFunction.h"
+#include "util/logging.h"
+#include "util/Utils.h"
+#include "util/Padding.h"
+
+using ::neurun::compiler::IExecutionBuilder;
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+using ::neurun::backend::acl_common::asAclFunction;
+
+//
+// ActivationBuilder
+//
+class ActivationBuilder
+{
+public:
+ explicit ActivationBuilder(IExecutionBuilder &builder) : _builder(builder)
+ {
+ // DO NOTHING
+ }
+
+private:
+ void appendReLU(::arm_compute::ICLTensor *ifm_alloc);
+ void appendReLU1(::arm_compute::ICLTensor *ifm_alloc);
+ void appendReLU6(::arm_compute::ICLTensor *ifm_alloc);
+
+public:
+ void append(model::Activation code, ::arm_compute::ICLTensor *ifm_alloc);
+
+private:
+ IExecutionBuilder &_builder;
+};
+
+void ActivationBuilder::appendReLU(::arm_compute::ICLTensor *ifm_alloc)
+{
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(ifm_alloc, nullptr, act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _builder.append(std::move(acl_fn));
+}
+
+void ActivationBuilder::appendReLU1(::arm_compute::ICLTensor *ifm_alloc)
+{
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1.0f, -1.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(ifm_alloc, nullptr, act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _builder.append(std::move(acl_fn));
+}
+
+void ActivationBuilder::appendReLU6(::arm_compute::ICLTensor *ifm_alloc)
+{
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.0f, 0.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(ifm_alloc, nullptr, act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _builder.append(std::move(acl_fn));
+}
+
+void ActivationBuilder::append(model::Activation code, ::arm_compute::ICLTensor *ifm_alloc)
+{
+ switch (code)
+ {
+ case model::Activation::NONE:
+ {
+ // DO NOTHING
+ break;
+ }
+ case model::Activation::RELU:
+ {
+ appendReLU(ifm_alloc);
+ break;
+ }
+ case model::Activation::RELU1:
+ {
+ appendReLU1(ifm_alloc);
+ break;
+ }
+ case model::Activation::RELU6:
+ {
+ appendReLU6(ifm_alloc);
+ break;
+ }
+ default:
+ {
+ throw std::runtime_error("Not supported, yet");
+ }
+ }
+}
+
+//
+// KernelGenerator
+//
+KernelGenerator::KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _ctx(ctx), _tensor_builder(tensor_builder), _current_subg_layout(model::Layout::UNKNOWN)
+{
+ // DO NOTHING
+}
+
+void KernelGenerator::visit(const model::Subgraph &subgraph)
+{
+ _current_subg_layout = subgraph.getLayout();
+ for (const auto &e : subgraph.operations())
+ {
+ const auto &node = *(e.node);
+ _tensor_builder->preVisit(node);
+ node.accept(*this);
+ _tensor_builder->postVisit(node);
+ }
+}
+
+void KernelGenerator::visit(const model::operation::CastNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::CastNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLCast>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::Conv2DNode &node)
+{
+ using model::operation::Conv2DNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [depth_out, kernel_height, kernel_width, depth_in].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+
+ const auto stride = node.param().stride;
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
+ stride, ker_width, ker_height);
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto ker_alloc = _tensor_builder->at(ker_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+
+ const auto conv_info = acl_common::asPadStrideInfo(padding, stride);
+ const auto act_info = acl_common::asActivationLayerInfo(activation);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLConvolutionLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(), ofm_alloc->handle(),
+ conv_info, ::arm_compute::WeightsInfo(), ::arm_compute::Size2D(1U, 1U), act_info);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::DepthwiseConv2DNode &node)
+{
+ using model::operation::DepthwiseConv2DNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(DepthwiseConv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(DepthwiseConv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(DepthwiseConv2DNode::Input::BIAS)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [1, kernel_height, kernel_width, depth_out].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+
+ const auto stride = node.param().stride;
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
+ stride, ker_width, ker_height);
+ const auto multiplier = node.param().multiplier;
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto ker_alloc = _tensor_builder->at(ker_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+
+ const auto conv_info = acl_common::asPadStrideInfo(padding, stride);
+ // TODO Use `activation` instead of `model::Activation::NONE`. See below.
+ const auto act_info = acl_common::asActivationLayerInfo(model::Activation::NONE);
+
+ if (ker_height == 3 && ker_width == 3)
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLDepthwiseConvolutionLayer3x3>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(),
+ ofm_alloc->handle(), conv_info, multiplier, act_info);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+ }
+ else
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLDepthwiseConvolutionLayer>();
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(),
+ ofm_alloc->handle(), conv_info, multiplier, act_info);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+ }
+
+ // TODO Use fused activation instead of separate layer after switching to ACL version >= v19.05.
+ // Prior versions had a bug due to which the fused activation did not apply in some cases.
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::MaxPool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::MaxPool2DNode::Input::INPUT)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+
+ const auto kh = node.param().kh;
+ const auto kw = node.param().kw;
+ const auto stride = node.param().stride;
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ VERBOSE(MaxPool2D) << "IFM_H: " << ifm_shape.H << std::endl;
+ VERBOSE(MaxPool2D) << "IFM_W: " << ifm_shape.W << std::endl;
+ VERBOSE(MaxPool2D) << "OFM_H: " << ofm_shape.H << std::endl;
+ VERBOSE(MaxPool2D) << "OFM_W: " << ofm_shape.W << std::endl;
+ VERBOSE(MaxPool2D) << "KER_H: " << kh << std::endl;
+ VERBOSE(MaxPool2D) << "KER_W: " << kw << std::endl;
+ VERBOSE(MaxPool2D) << "STRIDE_H: " << stride.vertical << std::endl;
+ VERBOSE(MaxPool2D) << "STRIDE_W: " << stride.horizontal << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(T): " << padding.top << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(B): " << padding.bottom << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(L): " << padding.left << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(R): " << padding.right << std::endl;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ ::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::MAX,
+ ::arm_compute::Size2D{kw, kh},
+ acl_common::asPadStrideInfo(padding, stride)};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPoolingLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append((std::move(acl_fn)));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::AvgPool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::AvgPool2DNode::Input::INPUT)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+
+ const auto kh = node.param().kh;
+ const auto kw = node.param().kw;
+ const auto stride = node.param().stride;
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ VERBOSE(AvgPool2D) << "IFM_H: " << ifm_shape.H << std::endl;
+ VERBOSE(AvgPool2D) << "IFM_W: " << ifm_shape.W << std::endl;
+ VERBOSE(AvgPool2D) << "OFM_H: " << ofm_shape.H << std::endl;
+ VERBOSE(AvgPool2D) << "OFM_W: " << ofm_shape.W << std::endl;
+ VERBOSE(AvgPool2D) << "KER_H: " << kh << std::endl;
+ VERBOSE(AvgPool2D) << "KER_W: " << kw << std::endl;
+ VERBOSE(AvgPool2D) << "STRIDE_H: " << stride.vertical << std::endl;
+ VERBOSE(AvgPool2D) << "STRIDE_W: " << stride.horizontal << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(T): " << padding.top << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(B): " << padding.bottom << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(L): " << padding.left << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(R): " << padding.right << std::endl;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ ::arm_compute::PoolingLayerInfo info{
+ ::arm_compute::PoolingType::AVG, ::arm_compute::Size2D{kw, kh},
+ acl_common::asPadStrideInfo(padding, stride), true /* exclude_padding */};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPoolingLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append((std::move(acl_fn)));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::ConcatNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ std::vector<model::OperandIndex> input_indexes;
+
+ for (const auto &input : node.getInputs())
+ input_indexes.emplace_back(input);
+
+ const auto axis = node.param().axis;
+
+ // If tensor allocator allocate as subtensor
+ bool canEliminate = true;
+ for (auto &ifm_ind : input_indexes)
+ {
+ if (!_tensor_builder->isSubTensorOf(ofm_index, ifm_ind))
+ {
+ canEliminate = false;
+ break;
+ }
+ }
+ if (canEliminate)
+ {
+ // If concat eliminated, return a NOP IFunction
+ _execution_builder->append(nnfw::cpp14::make_unique<exec::NopFunction>());
+ return;
+ }
+
+ auto output_alloc = static_cast<::neurun::backend::acl_cl::operand::Object *>(
+ _tensor_builder->wrapTensor(ofm_index).get());
+
+ std::vector<::neurun::backend::acl_cl::operand::Object *> input_allocs;
+ for (auto &ifm_ind : input_indexes)
+ input_allocs.emplace_back(static_cast<::neurun::backend::acl_cl::operand::Object *>(
+ _tensor_builder->wrapTensor(ifm_ind).get()));
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::acl_cl::kernel::ConcatLayer>();
+
+ const auto rank = _ctx.at(ofm_index).shape().rank();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = output_alloc->ptr()->layout();
+ const auto fixed_axis =
+ acl_common::ToARMComputeAxis(rank, axis, frontend_layout, backend_layout).value();
+
+ fn->configure(input_allocs, fixed_axis, output_alloc);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::FullyConnectedNode &node)
+{
+ using model::operation::FullyConnectedNode;
+
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
+ const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
+
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+ // TODO Currently we are not handling where the case is that the input's rank is 3.
+ // The handling should be added in the future.
+ assert(input_rank != 3);
+
+ const auto output_size = _ctx.at(output_index).shape().dim(1);
+ UNUSED_RELEASE(output_size);
+ assert(_ctx.at(bias_index).shape().dim(0) == output_size);
+ assert(_ctx.at(weight_index).shape().dim(0) == output_size);
+ const auto batch_size = _ctx.at(output_index).shape().dim(0);
+ const auto input_size = _ctx.at(weight_index).shape().dim(1);
+
+ // Check for reshaping input's shape into rank-2
+ bool needs_reshape = false;
+ neurun::model::Shape reshape(2);
+ if (input_rank == 4)
+ {
+ const auto feature_size = _ctx.at(input_index).shape().num_elements();
+
+ UNUSED_RELEASE(feature_size);
+ assert((batch_size * input_size) >= 0);
+ assert(feature_size == static_cast<uint64_t>(batch_size * input_size));
+
+ // for reshaping
+ needs_reshape = true;
+ reshape.dim(0) = batch_size; /* H */
+ reshape.dim(1) = input_size; /* W */
+ }
+
+ const auto activation = node.param().activation;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto weight_alloc = _tensor_builder->at(weight_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+ auto acl_layout = output_alloc->handle()->info()->data_layout();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::CLFullyConnectedReshapingLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(
+ input_alloc->handle(), weight_alloc->handle(), bias_alloc->handle(), output_alloc->handle(),
+ needs_reshape,
+ ::neurun::backend::acl_common::asTensorShape(/* TODO Support NCHW frontend */
+ reshape, model::Layout::NHWC,
+ ::neurun::backend::acl_common::asRuntimeLayout(
+ acl_layout)));
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, output_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::MulNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::MulNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::MulNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPixelWiseMultiplication>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle(), 1.0, // scale
+ arm_compute::ConvertPolicy::SATURATE, arm_compute::RoundingPolicy::TO_NEAREST_EVEN);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::ReduceSumNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReduceSumNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ const auto axis_base = _ctx.at(axis_index).data().base();
+ const auto axis_size = _ctx.at(axis_index).shape().num_elements();
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = input_alloc->layout();
+ // The axis's data must exist as constant values
+ assert(axis_base != nullptr);
+ std::set<uint32_t> axes;
+ for (size_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += input_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(input_rank, axis_value,
+ frontend_layout, backend_layout)
+ .value());
+ }
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLReduceOperation>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), axes,
+ ::arm_compute::ReduceOperation::SUM);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReshapeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ // NOTE This operation must not be changed the layout from frontend to backend
+ // However, this runtime can be change the layout of this operation from NHWC to NCHW now
+ // TODO Change the layout of frontend and backend to be the same and layer to CLReshapeLayer
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::misc::GenericReshapeLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SqueezeNode &node)
+{
+ // Squeeze is identical to reshape except that it has an optional dimensions input.
+ // In addition, optional dims_index is ignored since output tensor already has squeezed shape
+ // by freezer and toco
+ // TODO Support multi-layout for frontend and backend
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SqueezeNode::Input::INPUT)};
+ const auto dims_index{node.param().dims};
+ (void)dims_index;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto fn = nnfw::cpp14::make_unique<arm_compute::CLReshapeLayer>();
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+ auto acl_fn = asAclFunction(std::move(fn));
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TanhNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::TanhNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::CLActivationLayer>();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::TANH, 1.0f, 1.0f};
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SoftmaxNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SoftmaxNode::Input::INPUT)};
+
+ const auto beta = node.param().beta;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLSoftmaxLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), beta);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::StridedSliceNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::StridedSliceNode::Input::INPUT)};
+ const auto startData_index{node.param().startData_index};
+ const auto endData_index{node.param().endData_index};
+ const auto stridesData_index{node.param().stridesData_index};
+ const auto beginMask_index{node.param().beginMask_index};
+ const auto endMask_index{node.param().endMask_index};
+ const auto shrinkAxisMask_index{node.param().shrinkAxisMask_index};
+
+ auto outputData_alloc = _tensor_builder->at(output_index).get();
+ auto inputData_alloc = _tensor_builder->at(input_index).get();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = inputData_alloc->layout();
+
+ // Set initializers for indices data such as order of inputData
+ int input_rank = _ctx.at(input_index).shape().rank();
+ std::vector<int32_t> starts;
+ std::vector<int32_t> ends;
+ std::vector<int32_t> strides;
+ starts.resize(input_rank, 0);
+ ends.resize(input_rank, 0);
+ strides.resize(input_rank, 0);
+ {
+ auto input_shape = _ctx.at(input_index).shape();
+ auto startData_base = _ctx.at(startData_index).data().base();
+ auto endData_base = _ctx.at(endData_index).data().base();
+ auto stridesData_base = _ctx.at(stridesData_index).data().base();
+ const int startData_size = _ctx.at(startData_index).shape().num_elements();
+ const int endData_size = _ctx.at(endData_index).shape().num_elements();
+ const int stridesData_size = _ctx.at(stridesData_index).shape().num_elements();
+
+ using neurun::model::DataType;
+
+ UNUSED_RELEASE(startData_size);
+ UNUSED_RELEASE(endData_size);
+ UNUSED_RELEASE(stridesData_size);
+
+ assert(_ctx.at(startData_index).typeInfo().type() == DataType::INT32);
+ assert(_ctx.at(endData_index).typeInfo().type() == DataType::INT32);
+ assert(_ctx.at(stridesData_index).typeInfo().type() == DataType::INT32);
+ assert(startData_size == input_rank);
+ assert(endData_size == input_rank);
+ assert(stridesData_size == input_rank);
+
+ assert(startData_base != nullptr);
+ for (int n = 0; n < input_rank; ++n)
+ {
+ auto axis = ::neurun::backend::acl_common::ToARMComputeAxis(input_rank, n, frontend_layout,
+ backend_layout)
+ .value();
+
+ int32_t start_value = *(reinterpret_cast<const int32_t *>(startData_base) + n);
+ starts[axis] = start_value;
+
+ int32_t end_value = *(reinterpret_cast<const int32_t *>(endData_base) + n);
+ ends[axis] = end_value;
+
+ int32_t strides_value = *(reinterpret_cast<const int32_t *>(stridesData_base) + n);
+ strides[axis] = strides_value;
+ }
+ }
+
+ // Set mask bits such as order of inputData
+ const auto beginMask = ::neurun::backend::acl_common::ReorderBits<int32_t>(
+ _ctx.at(beginMask_index).asScalar<int32_t>(), input_rank, frontend_layout, backend_layout);
+ const auto endMask = ::neurun::backend::acl_common::ReorderBits<int32_t>(
+ _ctx.at(endMask_index).asScalar<int32_t>(), input_rank, frontend_layout, backend_layout);
+ const auto shrinkAxisMask = ::neurun::backend::acl_common::ReorderBits<int32_t>(
+ _ctx.at(shrinkAxisMask_index).asScalar<int32_t>(), input_rank, frontend_layout,
+ backend_layout);
+
+ ::arm_compute::Coordinates starts_set;
+ ::arm_compute::Coordinates ends_set;
+ ::arm_compute::BiStrides strides_set;
+
+ for (size_t i = 0; i < starts.size(); ++i)
+ {
+ starts_set.set(i, starts[i]);
+ ends_set.set(i, ends[i]);
+ strides_set.set(i, strides[i]);
+ }
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLStridedSlice>();
+
+ fn->configure(inputData_alloc->handle(), outputData_alloc->handle(), starts_set, ends_set,
+ strides_set, beginMask, endMask, shrinkAxisMask);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TransposeNode &node)
+{
+ const auto ofm_idx{node.getOutputs().at(0)};
+ const auto ifm_idx{node.getInputs().at(model::operation::TransposeNode::Input::INPUT)};
+ const auto perm{node.param().perm};
+
+ const auto rank = _ctx.at(ifm_idx).shape().rank();
+ std::vector<int32_t> pv;
+ const auto perm_base = _ctx.at(perm).data().base();
+ const int perm_size = _ctx.at(perm).shape().num_elements();
+
+ assert(perm_base != nullptr);
+ for (int32_t n = 0; n < perm_size; ++n)
+ {
+ int32_t perm_value = *(reinterpret_cast<const int32_t *>(perm_base) + n);
+ assert(perm_value < rank);
+ pv.emplace_back(perm_value);
+ }
+
+ auto ofm_alloc = _tensor_builder->at(ofm_idx).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_idx).get();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ // Reversed
+ auto backend_pv = ::neurun::backend::acl_common::getARMComputePermutationVector(
+ rank, pv, frontend_layout, backend_layout);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPermute>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), backend_pv);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::AddNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArithmeticAddition>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle(),
+ arm_compute::ConvertPolicy::SATURATE);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::SubNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::SubNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SubNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArithmeticSubtraction>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle(),
+ arm_compute::ConvertPolicy::SATURATE);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::DivNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::DivNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::DivNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArithmeticDivision>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::ExpNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ExpNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLExpLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogisticNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::LogisticNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LOGISTIC};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogicalAndNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input0_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT1)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input0_alloc = _tensor_builder->at(input0_index).get();
+ auto input1_alloc = _tensor_builder->at(input1_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLBinaryLogicalOp>();
+
+ fn->configure(input0_alloc->handle(), input1_alloc->handle(), output_alloc->handle(),
+ ::arm_compute::BinaryLogicalOperation::AND);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LSTMNode &node)
+{
+ // TODO Support dynamic rnn
+ // TODO Fix subtle error in the case of non-CIFG, non-peephole and No Projection.
+ const auto scratch_buffer_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::SCRATCH_BUFFER)};
+ const auto output_state_out_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::OUTPUT_STATE_OUT)};
+ const auto cell_state_out_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::CELL_STATE_OUT)};
+ const auto output_index{node.getOutputs().at(model::operation::LSTMNode::Output::OUTPUT)};
+
+ const auto input_index{node.getInputs().at(model::operation::LSTMNode::Input::INPUT)};
+ const auto input_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_INPUT_WEIGHTS)}; // optional
+ const auto input_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_FORGET_WEIGHTS)};
+ const auto input_to_cell_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_CELL_WEIGHTS)};
+ const auto input_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_OUTPUT_WEIGHTS)};
+ const auto recurrent_to_input_weights_index{node.getInputs().at(
+ model::operation::LSTMNode::Input::RECURRENT_TO_INPUT_WEIGHTS)}; // optional
+ const auto recurrent_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_FORGET_WEIGHTS)};
+ const auto recurrent_to_cell_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_CELL_WEIGHTS)};
+ const auto recurrent_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_OUTPUT_WEIGHTS)};
+ const auto cell_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_INPUT_WEIGHTS)}; // optional
+ const auto cell_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_FORGET_WEIGHTS)}; // optional
+ const auto cell_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_OUTPUT_WEIGHTS)}; // optional
+ const auto input_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_GATE_BIAS)};
+ const auto forget_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::FORGET_GATE_BIAS)};
+ const auto cell_bias_index{node.getInputs().at(model::operation::LSTMNode::Input::CELL_BIAS)};
+ const auto output_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::OUTPUT_GATE_BIAS)};
+ const auto projection_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::PROJECTION_WEIGHTS)}; // optional
+ const auto projection_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::PROJECTION_BIAS)}; // optional
+ const auto output_state_in_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::OUTPUT_STATE_IN)};
+ const auto cell_state_in_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_STATE_IN)};
+ const auto cell_threshold = node.param().cell_threshold;
+ const auto projection_threshold = node.param().projection_threshold;
+
+ bool has_input_to_input_weights = _ctx.at(input_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(input_to_input_weights_index).shape().dim(1) != 0;
+ bool has_recurrent_to_input_weights =
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(1) != 0;
+ bool has_cell_to_forget_weights = _ctx.at(cell_to_forget_weights_index).shape().dim(0) != 0;
+ bool has_cell_to_output_weights = _ctx.at(cell_to_output_weights_index).shape().dim(0) != 0;
+ bool has_projection_weights = _ctx.at(projection_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(projection_weights_index).shape().dim(1) != 0;
+ bool has_projection_bias = _ctx.at(projection_bias_index).shape().dim(0);
+
+ // NOTE The input_to_input_weights and the recurrent_to_input_weights do not exist in CIFG.
+ // true: no CIFG
+ // false: CIFG
+ // NOTE The cell_to_input_weights does not exist in non-peephole although regular LSTM(non-CIFG).
+ bool has_cifg_param = has_input_to_input_weights && has_recurrent_to_input_weights;
+
+ // NOTE The cell_to_forget_weights and the cell_to_output_weights exist in peephole.
+ // But the cell_to_input_weights does not exist in regular CIFG although peephole.
+ // true: peephole
+ // false: no peephole
+ bool has_peephole_param = has_cell_to_forget_weights && has_cell_to_output_weights;
+
+ // NOTE Although the projection weights has data the projection bias may not have data.
+ bool has_projection_param = has_projection_weights;
+
+ const auto activation = node.param().activation;
+ const auto cell_clip = cell_threshold;
+ const auto projection_clip = projection_threshold;
+ assert(cell_clip >= 0.f && projection_clip >= 0.f);
+
+ auto scratch_buffer_alloc = _tensor_builder->at(scratch_buffer_index).get();
+ auto output_state_out_alloc = _tensor_builder->at(output_state_out_index).get();
+ auto cell_state_out_alloc = _tensor_builder->at(cell_state_out_index).get();
+ auto output_alloc = _tensor_builder->at(output_index).get();
+
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto input_to_forget_weights_alloc = _tensor_builder->at(input_to_forget_weights_index).get();
+ auto input_to_cell_weights_alloc = _tensor_builder->at(input_to_cell_weights_index).get();
+ auto input_to_output_weights_alloc = _tensor_builder->at(input_to_output_weights_index).get();
+ auto recurrent_to_forget_weights_alloc =
+ _tensor_builder->at(recurrent_to_forget_weights_index).get();
+ auto recurrent_to_cell_weights_alloc = _tensor_builder->at(recurrent_to_cell_weights_index).get();
+ auto recurrent_to_output_weights_alloc =
+ _tensor_builder->at(recurrent_to_output_weights_index).get();
+
+ auto forget_gate_bias_alloc = _tensor_builder->at(forget_gate_bias_index).get();
+ auto cell_bias_alloc = _tensor_builder->at(cell_bias_index).get();
+ auto output_gate_bias_alloc = _tensor_builder->at(output_gate_bias_index).get();
+ auto output_state_in_alloc = _tensor_builder->at(output_state_in_index).get();
+ auto cell_state_in_alloc = _tensor_builder->at(cell_state_in_index).get();
+
+ auto act_info = ::neurun::backend::acl_common::asActivationLayerInfo(activation);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLLSTMLayer>();
+
+ ::arm_compute::LSTMParams<::arm_compute::ICLTensor> lstm_params{};
+ if (has_cifg_param)
+ {
+ auto input_to_input_weights_alloc =
+ _tensor_builder->at(input_to_input_weights_index).get(); // optional
+ auto recurrent_to_input_weights_alloc =
+ _tensor_builder->at(recurrent_to_input_weights_index).get(); // optional
+ auto cell_to_input_weights_handle =
+ has_peephole_param ? _tensor_builder->at(cell_to_input_weights_index).get()->handle()
+ : nullptr; // optional (non-cifg && peephole)
+ auto input_gate_bias_alloc = _tensor_builder->at(input_gate_bias_index).get(); // optional
+ lstm_params.set_cifg_params(input_to_input_weights_alloc->handle(),
+ recurrent_to_input_weights_alloc->handle(),
+ cell_to_input_weights_handle, input_gate_bias_alloc->handle());
+ }
+ if (has_peephole_param)
+ {
+ auto cell_to_forget_weights_alloc =
+ _tensor_builder->at(cell_to_forget_weights_index).get(); // optional
+ auto cell_to_output_weights_alloc =
+ _tensor_builder->at(cell_to_output_weights_index).get(); // optional
+ lstm_params.set_peephole_params(cell_to_forget_weights_alloc->handle(),
+ cell_to_output_weights_alloc->handle());
+ }
+ if (has_projection_param)
+ {
+ auto projection_weights_alloc = _tensor_builder->at(projection_weights_index).get(); // optional
+ auto projection_bias_handle = has_projection_bias
+ ? _tensor_builder->at(projection_bias_index).get()->handle()
+ : nullptr; // optional
+ lstm_params.set_projection_params(projection_weights_alloc->handle(), projection_bias_handle);
+ }
+
+ fn->configure(
+ input_alloc->handle(), input_to_forget_weights_alloc->handle(),
+ input_to_cell_weights_alloc->handle(), input_to_output_weights_alloc->handle(),
+ recurrent_to_forget_weights_alloc->handle(), recurrent_to_cell_weights_alloc->handle(),
+ recurrent_to_output_weights_alloc->handle(), forget_gate_bias_alloc->handle(),
+ cell_bias_alloc->handle(), output_gate_bias_alloc->handle(), output_state_in_alloc->handle(),
+ cell_state_in_alloc->handle(), scratch_buffer_alloc->handle(),
+ output_state_out_alloc->handle(), cell_state_out_alloc->handle(), output_alloc->handle(),
+ lstm_params, act_info, cell_clip, projection_clip);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReduceMaxNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReduceMaxNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ auto input_shape = _ctx.at(input_index).shape();
+ auto axis_shape = _ctx.at(axis_index).shape();
+
+ auto ofm_alloc = _tensor_builder->at(output_index).get();
+ auto ifm_alloc = _tensor_builder->at(input_index).get();
+ std::set<uint32_t> axes;
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ {
+ const auto ifm_rank = input_shape.rank();
+ switch (axis_shape.rank())
+ {
+ case 0: // scalar
+ {
+ int32_t axis_value = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(ifm_rank, axis_value,
+ frontend_layout, backend_layout)
+ .value());
+ break;
+ }
+ case 1: // vector
+ {
+ const auto axis_base = _ctx.at(axis_index).data().base();
+ const int axis_size = axis_shape.num_elements();
+
+ // If axis's data does not exist as constant values and can be gotten as input data, we have
+ // to find a way to infer output shape when sinking output.
+ assert(axis_base != nullptr);
+ for (int32_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(
+ ifm_rank, axis_value, frontend_layout, backend_layout)
+ .value());
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("Not supported");
+ break;
+ }
+ }
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLReduceOperation>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), axes, arm_compute::ReduceOperation::MAX);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ComparisonNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input0_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT1)};
+
+ const auto comparison_type = node.param().comparison_type;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input0_alloc = _tensor_builder->at(input0_index).get();
+ auto input1_alloc = _tensor_builder->at(input1_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLComparison>();
+
+ fn->configure(input0_alloc->handle(), input1_alloc->handle(), output_alloc->handle(),
+ (arm_compute::ComparisonOperation)comparison_type);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::RSQRTNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::RSQRTNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLRsqrtLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::ReLUNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReLUNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::CLActivationLayer>();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU};
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ResizeBilinearNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ const auto ifm_index{node.getInputs().at(model::operation::ResizeBilinearNode::Input::INPUT)};
+ const auto height_index{node.param().height_index};
+ const auto width_index{node.param().width_index};
+ (void)height_index;
+ (void)width_index;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLScale>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(),
+ ::arm_compute::InterpolationPolicy::BILINEAR, ::arm_compute::BorderMode::REPLICATE,
+ ::arm_compute::PixelValue(0.f), ::arm_compute::SamplingPolicy::TOP_LEFT);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReLU1Node &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ReLU1Node::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1.0f, -1.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReLU6Node &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ReLU6Node::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::RNNNode &node)
+{
+ const auto output_index{node.getOutputs().at(model::operation::RNNNode::Output::OUTPUT)};
+ const auto hidden_state_out_index{
+ node.getOutputs().at(model::operation::RNNNode::Output::HIDDEN_STATE_OUT)};
+
+ const auto input_index{node.getInputs().at(model::operation::RNNNode::Input::INPUT)};
+ const auto weights_index{node.getInputs().at(model::operation::RNNNode::Input::WEIGHTS)};
+ const auto recurrent_weights_index{
+ node.getInputs().at(model::operation::RNNNode::Input::RECURRENT_WEIGHTS)};
+ const auto bias_index{node.getInputs().at(model::operation::RNNNode::Input::BIAS)};
+ const auto hidden_state_in_index{
+ node.getInputs().at(model::operation::RNNNode::Input::HIDDEN_STATE_IN)};
+
+ const auto activation = node.param().activation;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto hidden_state_out_alloc = _tensor_builder->at(hidden_state_out_index).get();
+
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto weights_alloc = _tensor_builder->at(weights_index).get();
+ auto recurrent_weights_alloc = _tensor_builder->at(recurrent_weights_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+ auto hidden_state_in_alloc = _tensor_builder->at(hidden_state_in_index).get();
+ auto act_info = ::neurun::backend::acl_common::asActivationLayerInfo(activation);
+
+ auto copy_layer = nnfw::cpp14::make_unique<::arm_compute::CLCopy>();
+ copy_layer->configure(hidden_state_in_alloc->handle(), hidden_state_out_alloc->handle());
+ _execution_builder->append(asAclFunction(std::move(copy_layer)));
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLRNNLayerEx>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+ fn->configure(input_alloc->handle(), weights_alloc->handle(), recurrent_weights_alloc->handle(),
+ bias_alloc->handle(), hidden_state_out_alloc->handle(), output_alloc->handle(),
+ act_info);
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::FloorNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::FloorNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLFloor>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SpaceToDepthNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::SpaceToDepthNode::Input::INPUT)};
+ const auto block_size_index{node.param().block_size_index};
+
+ auto block_size = _ctx.at(block_size_index).asScalar<int32_t>();
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLSpaceToDepth>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), block_size);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::L2Pool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::L2Pool2DNode::Input::INPUT)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+
+ uint32_t kw = node.param().kw;
+ uint32_t kh = node.param().kh;
+ const auto stride = node.param().stride;
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ ::arm_compute::PoolingLayerInfo info{
+ ::arm_compute::PoolingType::L2, ::arm_compute::Size2D{kw, kh},
+ ::neurun::backend::acl_common::asPadStrideInfo(padding, stride)};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPoolingLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::EmbeddingLookupNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto lookups_index{
+ node.getInputs().at(model::operation::EmbeddingLookupNode::Input::LOOKUPS)};
+ const auto values_index{
+ node.getInputs().at(model::operation::EmbeddingLookupNode::Input::VALUES)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto lookups_alloc = _tensor_builder->at(lookups_index).get();
+ auto values_alloc = _tensor_builder->at(values_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLEmbeddingLookup>();
+
+ fn->configure(values_alloc->handle(), output_alloc->handle(), lookups_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::L2NormalizationNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::L2NormalizationNode::Input::INPUT)};
+
+ // {CL|Neon}L2Normalization performs the reduction only along dimension 0
+ // L2 Normalization always performs the reduction along the depth axis
+ // Thus, we repurpose {CL|Neon}NormalizationLayers to act as depthwise L2 normalizations by
+ // choosing normalization parameters as below
+
+ const auto &ifm_shape = _ctx.at(ifm_index).shape();
+ // TODO Support optional constant dimension that normalization would be performed on
+ const auto normalization_axis = ifm_shape.rank() - 1;
+ int32_t radius =
+ 2 * ifm_shape.dim(normalization_axis) + 1; // normSize = depth(last dimension) * 2 + 1
+ float alpha = 1.0f; // In the implementation to make alpha_ become 1
+ float beta = 0.5f; // pow(reduction, -0.5) = 1 / sqrt(reduction)
+ float bias = 0.0f; // Don't offset the reduction.
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const auto norm_info = ::arm_compute::NormalizationLayerInfo(::arm_compute::NormType::CROSS_MAP,
+ radius, alpha, beta, bias, false);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNormalizationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), norm_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::HashtableLookupNode &node)
+{
+ const auto output_index{
+ node.getOutputs().at(model::operation::HashtableLookupNode::Output::OUTPUT)};
+ const auto hits_index{node.getOutputs().at(model::operation::HashtableLookupNode::Output::HITS)};
+
+ const auto lookups_index{
+ node.getInputs().at(model::operation::HashtableLookupNode::Input::LOOKUPS)};
+ const auto keys_index{node.getInputs().at(model::operation::HashtableLookupNode::Input::KEYS)};
+ const auto values_index{
+ node.getInputs().at(model::operation::HashtableLookupNode::Input::VALUES)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto hits_alloc = _tensor_builder->at(hits_index).get();
+
+ auto lookups_alloc = _tensor_builder->at(lookups_index).get();
+ auto keys_alloc = _tensor_builder->at(keys_index).get();
+ auto values_alloc = _tensor_builder->at(values_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLHashtableLookup>();
+
+ fn->configure(lookups_alloc->handle(), keys_alloc->handle(), values_alloc->handle(),
+ output_alloc->handle(), hits_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::PReLUNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::PReLUNode::Input::INPUT)};
+ const auto alpha_index{node.getInputs().at(model::operation::PReLUNode::Input::ALPHA)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto alpha_alloc = _tensor_builder->at(alpha_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPReLU>();
+
+ fn->configure(ifm_alloc->handle(), alpha_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TransposeConvNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto output_shape_index{
+ node.getInputs().at(model::operation::TransposeConvNode::Input::OUTPUT_SHAPE)};
+ const auto ker_index{node.getInputs().at(model::operation::TransposeConvNode::Input::KERNEL)};
+ const auto ifm_index{node.getInputs().at(model::operation::TransposeConvNode::Input::INPUT)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ker_shape = _ctx.at(ker_index).shape().asFeature(_current_subg_layout);
+
+ const auto stride = node.param().stride;
+
+ assert((node.param().padding.type == model::PaddingType::SAME) ||
+ (node.param().padding.type == model::PaddingType::VALID));
+ auto padding = neurun::util::calculatePadding(node.param().padding, ofm_shape, ifm_shape, stride,
+ ker_shape.W, ker_shape.H);
+
+ uint32_t invalid_horizontal = 0;
+ uint32_t invalid_vertical = 0;
+ if (node.param().padding.type == model::PaddingType::VALID)
+ {
+ invalid_horizontal =
+ ofm_shape.W - (1 + (ifm_shape.W - 1) * stride.horizontal) - (ker_shape.W - 1);
+ invalid_vertical = ofm_shape.H - (1 + (ifm_shape.H - 1) * stride.vertical) - (ker_shape.H - 1);
+ }
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto ker_alloc = _tensor_builder->at(ker_index).get();
+
+ const auto tconv_info = acl_common::asPadStrideInfo(padding, stride);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLTransposeConvLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), nullptr, ofm_alloc->handle(), tconv_info,
+ invalid_horizontal, invalid_vertical);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SQRTNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SQRTNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::SQRT};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogicalOrNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input0_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT1)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input0_alloc = _tensor_builder->at(input0_index).get();
+ auto input1_alloc = _tensor_builder->at(input1_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLBitwiseOr>();
+
+ fn->configure(input0_alloc->handle(), input1_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogicalNotNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::LogicalNotNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLBitwiseNot>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SquaredDifferenceNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::RHS)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLElementwiseSquaredDiff>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TopKV2Node &node)
+{
+ const auto outputValues_index{
+ node.getOutputs().at(model::operation::TopKV2Node::Output::OUTPUT_VALUES)};
+ const auto outputIndices_index{
+ node.getOutputs().at(model::operation::TopKV2Node::Output::OUTPUT_INDICES)};
+
+ const auto inputData_index{node.getInputs().at(model::operation::TopKV2Node::Input::INPUT)};
+ const auto k_index{node.param().k_index};
+
+ // Currently, we only support the vector input.
+ assert(_ctx.at(inputData_index).shape().rank() == 1 ||
+ _ctx.at(inputData_index).shape().rank() == 2);
+
+ const auto k = _ctx.at(k_index).asScalar<int32_t>();
+
+ auto values_alloc = _tensor_builder->at(outputValues_index).get();
+ auto indices_alloc = _tensor_builder->at(outputIndices_index).get();
+ auto input_alloc = _tensor_builder->at(inputData_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLTopKV2>();
+
+ fn->configure(input_alloc->handle(), k, values_alloc->handle(), indices_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::GatherNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ const auto ifm_index{node.getInputs().at(model::operation::GatherNode::Input::INPUT)};
+ const auto indices_index{node.getInputs().at(model::operation::GatherNode::Input::INDICES)};
+
+ const auto axis_index{node.param().axis_index};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape();
+
+ const auto axis_value = static_cast<int>(_ctx.at(axis_index).asScalar<int32_t>());
+ // Converting in reverse order
+ const int axis =
+ ::neurun::backend::acl_common::ToARMComputeAxis(ifm_shape.rank(), axis_value).value();
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto indices_alloc = _tensor_builder->at(indices_index).get();
+ auto acl_layout = ofm_alloc->handle()->info()->data_layout();
+ UNUSED_RELEASE(acl_layout);
+
+ // NOTE The frontend layout and backend layout must be the same for this operation.
+ // If not the same, we have to add a stage(?) to perform permutation of output tensor. It
+ // is not not efficient even if it works well. If so, it would be better to set the
+ // layout of these backend tensors to the same layout.
+ // There is also one thing we have to think about. This operation depends on the layout of
+ // a model. For example, if a model in NHWC has this operation as output rank == 4, indices
+ // rank == 2 and axis == 2, this operation should work as the axis W and C, but the axis W
+ // and C are not sequential in NCHW. So the backend in NCHW cannot handle this case.
+ // TODO Remove this workaround
+ // It is a workaround how to set the layout of these backend tensors to the layout of the
+ // frontend when creating them
+ // TODO Supports front-end in NCHW
+ // TODO Change the layout of frontend and backend to be the same
+ // assert(::arm_compute::DataLayout::NHWC == acl_layout);
+ assert(acl_layout == ifm_alloc->handle()->info()->data_layout());
+ assert(acl_layout == indices_alloc->handle()->info()->data_layout());
+
+ // TODO Change to CLGather
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::misc::GenericGather>();
+
+ fn->configure(ifm_alloc->handle(), indices_alloc->handle(), ofm_alloc->handle(), axis);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::NegNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::NegNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNeg>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::AbsNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::AbsNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::ABS};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ArgMaxNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ArgMaxNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ auto ifm_shape = _ctx.at(ifm_index).shape();
+ auto ofm_shape = _ctx.at(ofm_index).shape();
+ auto axis_shape = _ctx.at(axis_index).shape();
+
+ assert(_ctx.at(axis_index).isConstant());
+ // Axis dimension is always 1.
+ assert(axis_shape.rank() == 1);
+ assert((ifm_shape.rank() - 1) == ofm_shape.rank());
+
+ const int axis_size = axis_shape.num_elements();
+ auto axis_base = _ctx.at(axis_index).data().base();
+ // TODO Should support axis size > 1.
+ assert(axis_size == 1);
+ // axis is tensor with 1 dimension - always a vector.
+ assert(axis_base != nullptr);
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ const auto ifm_rank = ifm_shape.rank();
+ auto frontend_layout = _current_subg_layout;
+ auto backend_layout = ifm_alloc->layout();
+ std::set<uint32_t> axes;
+ for (int32_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(acl_common::ToARMComputeAxis(ifm_rank, axis_value, frontend_layout, backend_layout)
+ .value());
+ }
+ std::vector<uint32_t> fixed_axes(axes.begin(), axes.end());
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArgOperation>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), fixed_axes,
+ ::arm_compute::ArgOperation::MAX);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::DequantizeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::DequantizeNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLCast>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::MeanNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::MeanNode::Input::INPUT)};
+
+ const auto axis_index{node.param().axis_index};
+ const auto keep_dims{node.param().keep_dims};
+ (void)keep_dims;
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape();
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ std::set<uint32_t> axes;
+ {
+ const auto ifm_rank = ifm_shape.rank();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ const auto axis_shape = _ctx.at(axis_index).shape();
+ switch (axis_shape.rank())
+ {
+ case 0: // scalar
+ {
+ auto axis_value = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(ifm_rank, axis_value,
+ frontend_layout, backend_layout)
+ .value());
+ break;
+ }
+ case 1: // vector
+ {
+ const auto axis_base = _ctx.at(axis_index).data().base();
+ const int axis_size = axis_shape.num_elements();
+
+ // If axis's data does not exist as constant values and can be gotten as input data, we have
+ // to find a way to infer output shape when sinking output.
+ assert(axis_base != nullptr);
+ for (int32_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(
+ ifm_rank, axis_value, frontend_layout, backend_layout)
+ .value());
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("Not supported");
+ }
+ }
+
+ // NOTE CLReduceMean has a bug that does not support NHWC layout
+ // CLReduceMean intermediate tensors are always NCHW layout
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLReduceOperation>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), axes,
+ ::arm_compute::ReduceOperation::MEAN);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LocalResponseNormalizationNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{
+ node.getInputs().at(model::operation::LocalResponseNormalizationNode::Input::INPUT)};
+ const auto radius_index{node.param().radius_index};
+ const auto bias_index{node.param().bias_index};
+ const auto alpha_index{node.param().alpha_index};
+ const auto beta_index{node.param().beta_index};
+
+ auto radius = _ctx.at(radius_index).asScalar<int32_t>();
+ auto alpha = _ctx.at(alpha_index).asScalar<float>();
+ auto beta = _ctx.at(beta_index).asScalar<float>();
+ auto bias = _ctx.at(bias_index).asScalar<float>();
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const auto norm_info = ::arm_compute::NormalizationLayerInfo(
+ ::arm_compute::NormType::CROSS_MAP, radius * 2 + 1, alpha, beta, bias, false);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNormalizationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), norm_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::DepthToSpaceNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::DepthToSpaceNode::Input::INPUT)};
+ const auto block_size_index{node.param().block_size_index};
+
+ auto block_size = _ctx.at(block_size_index).asScalar<int32_t>();
+ assert(block_size > 0);
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLDepthToSpace>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), block_size);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReduceMinNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ReduceMinNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ auto ifm_shape = _ctx.at(ifm_index).shape();
+ auto ofm_shape = _ctx.at(ofm_index).shape();
+ auto axis_shape = _ctx.at(axis_index).shape();
+
+ const auto ifm_rank = ifm_shape.rank();
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ std::set<uint32_t> axes;
+ {
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ switch (axis_shape.rank())
+ {
+ case 0: // scalar
+ {
+ auto axis_value = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(ifm_rank, axis_value,
+ frontend_layout, backend_layout)
+ .value());
+ break;
+ }
+ case 1: // vector
+ {
+ const auto axis_base = _ctx.at(axis_index).data().base();
+ const int axis_size = axis_shape.num_elements();
+
+ // If axis's data does not exist as constant values and can be gotten as input data, we have
+ // to find a way to infer output shape when sinking output.
+ assert(axis_base != nullptr);
+ for (int32_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(
+ ifm_rank, axis_value, frontend_layout, backend_layout)
+ .value());
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("Not supported");
+ break;
+ }
+ }
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLReduceOperation>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), axes,
+ ::arm_compute::ReduceOperation::MIN);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SplitNode &node)
+{
+ const auto ifm_index{node.getInputs().at(model::operation::SplitNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+ const auto num_of_splits_index{node.param().num_of_splits_index};
+
+ assert(_ctx.at(num_of_splits_index).asScalar<unsigned int>() == node.getOutputs().size());
+
+ const auto ifm_rank = _ctx.at(ifm_index).shape().rank();
+ std::vector<model::OperandIndex> output_indexes;
+ for (const auto &output : node.getOutputs())
+ output_indexes.emplace_back(output);
+
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ std::vector<arm_compute::ICLTensor *> output_allocs;
+ for (const auto &ofm_ind : output_indexes)
+ output_allocs.emplace_back(_tensor_builder->at(ofm_ind).get()->handle());
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ auto axis = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis < 0)
+ axis += ifm_rank;
+ axis = acl_common::ToARMComputeAxis(ifm_rank, axis, frontend_layout, backend_layout).value();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLSplit>();
+
+ fn->configure(ifm_alloc->handle(), output_allocs, axis);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::UnpackNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::UnpackNode::Input::INPUT)};
+ auto axis{node.param().axis};
+
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+
+ std::vector<model::OperandIndex> output_indexes;
+ for (const auto &output_index : node.getOutputs())
+ output_indexes.emplace_back(output_index);
+
+ auto input = _tensor_builder->at(input_index).get()->handle();
+ std::vector<arm_compute::ICLTensor *> outputs;
+ for (const auto &output_index : output_indexes)
+ outputs.emplace_back(_tensor_builder->at(output_index)->handle());
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = _tensor_builder->at(input_index).get()->layout();
+ if (axis < 0)
+ axis += input_rank;
+ axis = acl_common::ToARMComputeAxis(input_rank, axis, frontend_layout, backend_layout).value();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLUnstack>();
+
+ fn->configure(input, outputs, axis);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::PadNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::PadNode::Input::INPUT)};
+ const auto pad_index{node.getInputs().at(model::operation::PadNode::Input::PAD)};
+ const auto output_index{node.getOutputs().at(0)};
+ assert(_ctx.at(pad_index).isConstant());
+
+ auto rank = _ctx.at(pad_index).shape().dim(0);
+ auto pad_base = _ctx.at(pad_index).data().base();
+
+ auto input_type = _ctx.at(input_index).typeInfo();
+ auto data_type = acl_common::asDataType(input_type.type());
+ auto quant_info = ::arm_compute::QuantizationInfo(input_type.scale(), input_type.offset());
+ const auto pixel_value = ::arm_compute::PixelValue(0, data_type, quant_info);
+
+ auto input = _tensor_builder->at(input_index).get()->handle();
+ auto output = _tensor_builder->at(output_index).get()->handle();
+
+ ::arm_compute::PaddingList padding_list;
+ padding_list.resize(rank);
+ for (int32_t n = 0; n < rank; ++n)
+ {
+ const int32_t *from = reinterpret_cast<const int32_t *>(pad_base) + (n * 2);
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = _tensor_builder->at(input_index).get()->layout();
+ const auto axis =
+ acl_common::ToARMComputeAxis(rank, n, frontend_layout, backend_layout).value();
+ padding_list[axis] = ::arm_compute::PaddingInfo{from[0], from[1]};
+ }
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPadLayer>();
+ fn->configure(input, output, padding_list, pixel_value);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/KernelGenerator.h b/runtimes/neurun/backend/acl_cl/KernelGenerator.h
new file mode 100644
index 000000000..db9bf4199
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/KernelGenerator.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_KERNEL_GENERATOR_H__
+#define __NEURUN_BACKEND_ACL_CL_KERNEL_GENERATOR_H__
+
+#include <backend/IKernelGenerator.h>
+
+#include "model/Operands.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+class KernelGenerator : public IKernelGenerator
+{
+public:
+ KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+ void visit(const model::Subgraph &) override;
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::MaxPool2DNode &) override;
+ void visit(const model::operation::AvgPool2DNode &) override;
+ void visit(const model::operation::ConcatNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::ReduceSumNode &) override;
+ void visit(const model::operation::ReshapeNode &) override;
+ void visit(const model::operation::SqueezeNode &) override;
+ void visit(const model::operation::TanhNode &) override;
+ void visit(const model::operation::SoftmaxNode &) override;
+ void visit(const model::operation::StridedSliceNode &) override;
+ void visit(const model::operation::TransposeNode &) override;
+ void visit(const model::operation::AddNode &) override;
+ void visit(const model::operation::SubNode &) override;
+ void visit(const model::operation::CastNode &) override;
+ void visit(const model::operation::DivNode &) override;
+ void visit(const model::operation::ExpNode &) override;
+ void visit(const model::operation::LogisticNode &) override;
+ void visit(const model::operation::ReduceMaxNode &) override;
+ void visit(const model::operation::ComparisonNode &) override;
+ void visit(const model::operation::LogicalAndNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::RSQRTNode &) override;
+ void visit(const model::operation::ReLUNode &) override;
+ void visit(const model::operation::ResizeBilinearNode &) override;
+ void visit(const model::operation::ReLU1Node &) override;
+ void visit(const model::operation::ReLU6Node &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::FloorNode &) override;
+ void visit(const model::operation::SpaceToDepthNode &) override;
+ void visit(const model::operation::L2Pool2DNode &) override;
+ void visit(const model::operation::EmbeddingLookupNode &) override;
+ void visit(const model::operation::L2NormalizationNode &) override;
+ void visit(const model::operation::HashtableLookupNode &) override;
+ void visit(const model::operation::PReLUNode &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+ void visit(const model::operation::SQRTNode &) override;
+ void visit(const model::operation::LogicalOrNode &) override;
+ void visit(const model::operation::LogicalNotNode &) override;
+ void visit(const model::operation::SquaredDifferenceNode &) override;
+ void visit(const model::operation::TopKV2Node &) override;
+ void visit(const model::operation::GatherNode &) override;
+ void visit(const model::operation::NegNode &) override;
+ void visit(const model::operation::AbsNode &) override;
+ void visit(const model::operation::ArgMaxNode &) override;
+ void visit(const model::operation::DequantizeNode &) override;
+ void visit(const model::operation::MeanNode &) override;
+ void visit(const model::operation::LocalResponseNormalizationNode &) override;
+ void visit(const model::operation::DepthToSpaceNode &) override;
+ void visit(const model::operation::ReduceMinNode &) override;
+ void visit(const model::operation::SplitNode &) override;
+ void visit(const model::operation::UnpackNode &) override;
+ void visit(const model::operation::PadNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+ model::Layout _current_subg_layout;
+};
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_KERNEL_GENERATOR_H__
diff --git a/runtimes/neurun/backend/acl_cl/PluginClassesAllocator.cc b/runtimes/neurun/backend/acl_cl/PluginClassesAllocator.cc
new file mode 100644
index 000000000..ac3f0acff
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/PluginClassesAllocator.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <util/logging.h>
+
+#include "Backend.h"
+
+extern "C" {
+neurun::backend::Backend *neurun_backend_create()
+{
+ VERBOSE(neurun_backend_create) << "'acl_cl' loaded\n";
+ return new neurun::backend::acl_cl::Backend;
+}
+
+void neurun_backend_destroy(neurun::backend::Backend *backend)
+{
+ VERBOSE(neurun_backend_create) << "'acl_cl' unloaded\n";
+ delete backend;
+}
+}
diff --git a/runtimes/neurun/backend/acl_cl/ShapeFixer.cc b/runtimes/neurun/backend/acl_cl/ShapeFixer.cc
new file mode 100644
index 000000000..e6744cc24
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/ShapeFixer.cc
@@ -0,0 +1,361 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ShapeFixer.h"
+
+#include <arm_compute/runtime/CL/CLFunctions.h> // Include all ARM Compute CL functions
+#include <arm_compute/runtime/CL/CLFunctionsEx.h> // Include all ARM Compute EX CL functions
+#include <arm_compute/runtime/misc/functions/GenericGather.h>
+#include <arm_compute/runtime/misc/functions/GenericReshapeLayer.h>
+
+#include <AclFunction.h>
+#include <Convert.h>
+#include <Swizzle.h>
+
+#include "kernel/ConcatLayer.h"
+#include "model/Index.h"
+#include "model/DataType.h"
+#include "model/InternalType.h"
+#include "compiler/IExecutionBuilder.h"
+#include "exec/NopFunction.h"
+#include "util/logging.h"
+#include "util/Utils.h"
+#include "util/Padding.h"
+
+using ::neurun::compiler::IExecutionBuilder;
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+using ::neurun::backend::acl_common::asAclFunction;
+
+ShapeFixer::ShapeFixer(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _ctx(ctx), _tensor_builder(tensor_builder)
+{
+ assert(tensor_builder);
+}
+
+void ShapeFixer::visit(const model::operation::CastNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::Conv2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::DepthwiseConv2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::MaxPool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::AvgPool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ConcatNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ _tensor_builder->dimCorrection(ofm_index, false);
+ for (const auto &input : node.getInputs())
+ _tensor_builder->dimCorrection(input, false);
+}
+
+void ShapeFixer::visit(const model::operation::FullyConnectedNode &node)
+{
+ using model::operation::FullyConnectedNode;
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+ if (input_rank == 4)
+ _tensor_builder->dimCorrection(input_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::MulNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::MulNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::MulNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::ReduceSumNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReshapeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::SqueezeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SqueezeNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::TanhNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SoftmaxNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::StridedSliceNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::TransposeNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::AddNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::SubNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::SubNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SubNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::DivNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::DivNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::DivNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::ExpNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LogisticNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LogicalAndNode &node)
+{
+ const auto input0_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT1)};
+
+ if (!(_ctx.at(input0_index).shape() == _ctx.at(input1_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(input0_index).shape().rank(), _ctx.at(input1_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(input0_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(input1_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::LSTMNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReduceMaxNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ComparisonNode &node)
+{
+ const auto input0_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT1)};
+
+ if (!(_ctx.at(input0_index).shape() == _ctx.at(input1_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(input0_index).shape().rank(), _ctx.at(input1_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(input0_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(input1_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::RSQRTNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReLUNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ResizeBilinearNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReLU1Node &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReLU6Node &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::RNNNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::FloorNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SpaceToDepthNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::SpaceToDepthNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(ofm_index, false);
+ _tensor_builder->dimCorrection(ifm_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::L2Pool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::EmbeddingLookupNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto values_index{
+ node.getInputs().at(model::operation::EmbeddingLookupNode::Input::VALUES)};
+ _tensor_builder->dimCorrection(values_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::L2NormalizationNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::HashtableLookupNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::PReLUNode &node)
+{
+ const auto ifm_index{node.getInputs().at(model::operation::PReLUNode::Input::INPUT)};
+ const auto alpha_index{node.getInputs().at(model::operation::PReLUNode::Input::ALPHA)};
+
+ if (!(_ctx.at(ifm_index).shape() == _ctx.at(alpha_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(ifm_index).shape().rank(), _ctx.at(alpha_index).shape().rank());
+ const_cast<::neurun::model::Shape &>(_ctx.at(ifm_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(alpha_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::TransposeConvNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SQRTNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LogicalOrNode &node)
+{
+ const auto input0_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT1)};
+
+ if (!(_ctx.at(input0_index).shape() == _ctx.at(input1_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(input0_index).shape().rank(), _ctx.at(input1_index).shape().rank());
+ const_cast<::neurun::model::Shape &>(_ctx.at(input0_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(input1_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::LogicalNotNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SquaredDifferenceNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+ const_cast<neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::TopKV2Node &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::GatherNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::GatherNode::Input::INPUT)};
+ const auto indices_index{node.getInputs().at(model::operation::GatherNode::Input::INDICES)};
+ _tensor_builder->dimCorrection(ofm_index, false);
+ _tensor_builder->dimCorrection(ifm_index, false);
+ _tensor_builder->dimCorrection(indices_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::NegNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::AbsNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ArgMaxNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ArgMaxNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(ofm_index, false);
+ _tensor_builder->dimCorrection(ifm_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::DequantizeNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::MeanNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LocalResponseNormalizationNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::DepthToSpaceNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReduceMinNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SplitNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::SplitNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ for (const auto &output : node.getOutputs())
+ _tensor_builder->dimCorrection(output, false);
+}
+
+void ShapeFixer::visit(const model::operation::UnpackNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::UnpackNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ for (const auto &output_index : node.getOutputs())
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::PadNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::PadNode::Input::INPUT)};
+ const auto output_index{node.getOutputs().at(0)};
+ _tensor_builder->dimCorrection(input_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/ShapeFixer.h b/runtimes/neurun/backend/acl_cl/ShapeFixer.h
new file mode 100644
index 000000000..519d1bafb
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/ShapeFixer.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_SHAPE_FIXER_H__
+#define __NEURUN_BACKEND_ACL_CL_SHAPE_FIXER_H__
+
+#include <backend/IShapeFixer.h>
+
+#include "model/Operands.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+class ShapeFixer : public IShapeFixer
+{
+public:
+ ShapeFixer(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+ std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
+
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::MaxPool2DNode &) override;
+ void visit(const model::operation::AvgPool2DNode &) override;
+ void visit(const model::operation::ConcatNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::ReduceSumNode &) override;
+ void visit(const model::operation::ReshapeNode &) override;
+ void visit(const model::operation::SqueezeNode &) override;
+ void visit(const model::operation::TanhNode &) override;
+ void visit(const model::operation::SoftmaxNode &) override;
+ void visit(const model::operation::StridedSliceNode &) override;
+ void visit(const model::operation::TransposeNode &) override;
+ void visit(const model::operation::AddNode &) override;
+ void visit(const model::operation::SubNode &) override;
+ void visit(const model::operation::CastNode &) override;
+ void visit(const model::operation::DivNode &) override;
+ void visit(const model::operation::ExpNode &) override;
+ void visit(const model::operation::LogisticNode &) override;
+ void visit(const model::operation::ReduceMaxNode &) override;
+ void visit(const model::operation::ComparisonNode &) override;
+ void visit(const model::operation::LogicalAndNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::RSQRTNode &) override;
+ void visit(const model::operation::ReLUNode &) override;
+ void visit(const model::operation::ResizeBilinearNode &) override;
+ void visit(const model::operation::ReLU1Node &) override;
+ void visit(const model::operation::ReLU6Node &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::FloorNode &) override;
+ void visit(const model::operation::SpaceToDepthNode &) override;
+ void visit(const model::operation::L2Pool2DNode &) override;
+ void visit(const model::operation::EmbeddingLookupNode &) override;
+ void visit(const model::operation::L2NormalizationNode &) override;
+ void visit(const model::operation::HashtableLookupNode &) override;
+ void visit(const model::operation::PReLUNode &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+ void visit(const model::operation::SQRTNode &) override;
+ void visit(const model::operation::LogicalOrNode &) override;
+ void visit(const model::operation::LogicalNotNode &) override;
+ void visit(const model::operation::SquaredDifferenceNode &) override;
+ void visit(const model::operation::TopKV2Node &) override;
+ void visit(const model::operation::GatherNode &) override;
+ void visit(const model::operation::NegNode &) override;
+ void visit(const model::operation::AbsNode &) override;
+ void visit(const model::operation::ArgMaxNode &) override;
+ void visit(const model::operation::DequantizeNode &) override;
+ void visit(const model::operation::MeanNode &) override;
+ void visit(const model::operation::LocalResponseNormalizationNode &) override;
+ void visit(const model::operation::DepthToSpaceNode &) override;
+ void visit(const model::operation::ReduceMinNode &) override;
+ void visit(const model::operation::SplitNode &) override;
+ void visit(const model::operation::UnpackNode &) override;
+ void visit(const model::operation::PadNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_SHAPE_FIXER_H__
diff --git a/runtimes/neurun/backend/acl_cl/TensorBuilder.h b/runtimes/neurun/backend/acl_cl/TensorBuilder.h
new file mode 100644
index 000000000..8ce69a6c2
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/TensorBuilder.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_TENSOR_BUILDER_H__
+#define __NEURUN_BACKEND_ACL_CL_TENSOR_BUILDER_H__
+
+#include <TemplTensorBuilder.h>
+
+#include "operand/CLTensor.h"
+#include "operand/CLSubTensor.h"
+#include "operand/Object.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+using TensorBuilder =
+ ::neurun::backend::acl_common::TemplTensorBuilder<::neurun::backend::acl_cl::operand::ICLTensor,
+ operand::CLTensor, operand::CLSubTensor,
+ operand::Object>;
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/backend/acl_cl/TensorManager.h b/runtimes/neurun/backend/acl_cl/TensorManager.h
new file mode 100644
index 000000000..10145bb9a
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/TensorManager.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_TENSOR_MANAGER_H__
+#define __NEURUN_BACKEND_ACL_CL_TENSOR_MANAGER_H__
+
+#include <arm_compute/runtime/CL/CLBufferAllocator.h>
+#include <arm_compute/runtime/PoolManager.h>
+#include <arm_compute/runtime/BlobLifetimeManager.h>
+#include <arm_compute/runtime/MemoryManagerOnDemand.h>
+#include <arm_compute/runtime/CL/CLMemoryGroup.h>
+
+#include <AclMemoryManager.h>
+#include <AclLinearMemoryManager.h>
+#include <AclInternalBufferManager.h>
+#include <AclTensorManager.h>
+
+#include "operand/CLTensor.h"
+#include "operand/CLSubTensor.h"
+#include "operand/Object.h"
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+
+using MemoryManager =
+ ::neurun::backend::acl_common::AclMemoryManager<operand::ICLTensor, operand::CLTensor,
+ operand::CLSubTensor, operand::Object>;
+
+using LinearMemoryManager = ::neurun::backend::acl_common::AclLinearMemoryManager<
+ operand::ICLTensor, operand::CLTensor, operand::CLSubTensor, operand::Object,
+ ::arm_compute::MemoryManagerOnDemand, ::arm_compute::PoolManager,
+ ::arm_compute::BlobLifetimeManager, ::arm_compute::CLBufferAllocator,
+ ::arm_compute::CLMemoryGroup>;
+
+using InternalBufferManager = ::neurun::backend::acl_common::AclInternalBufferManager<
+ ::arm_compute::MemoryManagerOnDemand, ::arm_compute::PoolManager,
+ ::arm_compute::BlobLifetimeManager, ::arm_compute::CLBufferAllocator>;
+
+using TensorManager =
+ ::neurun::backend::acl_common::AclTensorManager<::neurun::backend::acl_cl::operand::ICLTensor,
+ operand::CLTensor, operand::CLSubTensor,
+ operand::Object>;
+
+TensorManager *createTensorManager()
+{
+ const std::string executor_str = util::getConfigString(util::config::EXECUTOR);
+
+ if (executor_str == "Linear")
+ {
+ VERBOSE(acl_cl_createTensorManager) << "AclTensorManager as Linear" << std::endl;
+ return new TensorManager(new MemoryManager(), new LinearMemoryManager(),
+ new InternalBufferManager());
+ }
+ else
+ {
+ VERBOSE(acl_cl_createTensorManager) << "AclTensorManager" << std::endl;
+ return new TensorManager(new MemoryManager(), new MemoryManager(), new InternalBufferManager());
+ }
+}
+
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_TENSOR_MANAGER_H__
diff --git a/runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.cc b/runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.cc
new file mode 100644
index 000000000..aa1fd9aed
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.cc
@@ -0,0 +1,165 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConcatLayer.h"
+
+#include <arm_compute/runtime/CL/CLScheduler.h>
+
+#include "util/feature/nchw/View.h"
+#include "util/logging.h"
+
+namespace
+{
+
+inline bool matchSizeExceptAxis(const ::neurun::backend::acl_cl::operand::ICLTensor *t1,
+ const ::neurun::backend::acl_cl::operand::ICLTensor *t2,
+ uint32_t axis)
+{
+ assert(t1->num_dimensions() <= 4);
+ assert(t2->num_dimensions() <= 4);
+
+ for (uint32_t i = 0; i < 4; i++)
+ {
+ if (axis == i)
+ continue;
+ if (t1->info()->dimension(i) != t2->info()->dimension(i))
+ return false;
+ }
+ return true;
+}
+
+} // namespace {anonymous}
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace kernel
+{
+
+ConcatLayer::ConcatLayer()
+ : _input_allocs(), _output_alloc(nullptr), _axis(0), _input_type(arm_compute::DataType::F32)
+{
+ // DO NOTHING
+}
+
+template <typename T> bool ConcatLayer::concatenate()
+{
+ // Input and output size check
+ {
+ // NOTE Support only tensor with dimension 4 or less
+
+ uint32_t axis_sum = 0;
+
+ for (auto input : _input_allocs)
+ {
+ assert(_output_alloc->ptr()->layout() == input->ptr()->layout());
+ assert(matchSizeExceptAxis(_output_alloc->ptr(), input->ptr(), _axis));
+ axis_sum += input->ptr()->info()->dimension(_axis);
+ }
+
+ assert(_output_alloc->ptr()->info()->dimension(_axis) == axis_sum);
+ }
+
+ VERBOSE(Concat_RUN) << "START Concat" << std::endl;
+
+ // Perform operation
+ {
+ uint32_t axis_offset = 0;
+
+ auto outout_fn = [&](::neurun::backend::operand::ITensor &out_tensor) {
+ for (auto input : _input_allocs)
+ {
+ auto &out_cl_tensor =
+ static_cast<::neurun::backend::acl_cl::operand::ICLTensor &>(out_tensor);
+ auto input_fn = [&](::neurun::backend::operand::ITensor &in_tensor) {
+ auto &in_cl_tensor =
+ static_cast<::neurun::backend::acl_cl::operand::ICLTensor &>(in_tensor);
+ for (uint32_t i = 0; i < in_cl_tensor.info()->dimension(0); i++)
+ {
+ for (uint32_t j = 0; j < in_cl_tensor.info()->dimension(1); j++)
+ {
+ for (uint32_t k = 0; k < in_cl_tensor.info()->dimension(2); k++)
+ {
+ for (uint32_t l = 0; l < in_cl_tensor.info()->dimension(3); l++)
+ {
+ int32_t io = (_axis == 0) ? axis_offset : 0;
+ int32_t jo = (_axis == 1) ? axis_offset : 0;
+ int32_t ko = (_axis == 2) ? axis_offset : 0;
+ int32_t lo = (_axis == 3) ? axis_offset : 0;
+ T value =
+ *reinterpret_cast<T *>(in_cl_tensor.handle()->ptr_to_element({i, j, k, l}));
+ *reinterpret_cast<T *>(out_cl_tensor.handle()->ptr_to_element(
+ {i + io, j + jo, k + ko, l + lo})) = value;
+ }
+ }
+ }
+ }
+ if (_axis == 0)
+ axis_offset += in_cl_tensor.info()->dimension(0);
+ if (_axis == 1)
+ axis_offset += in_cl_tensor.info()->dimension(1);
+ if (_axis == 2)
+ axis_offset += in_cl_tensor.info()->dimension(2);
+ if (_axis == 3)
+ axis_offset += in_cl_tensor.info()->dimension(3);
+ };
+ input->access(input_fn);
+ }
+ };
+ _output_alloc->access(outout_fn);
+ }
+
+ VERBOSE(Concat_RUN) << "End Concat" << std::endl;
+
+ return true;
+}
+
+void ConcatLayer::configure(
+ const std::vector<::neurun::backend::acl_cl::operand::Object *> &input_allocs, int32_t axis,
+ ::neurun::backend::acl_cl::operand::Object *output_alloc)
+{
+ _input_allocs = input_allocs;
+ _output_alloc = output_alloc;
+
+ assert(axis < 4);
+
+ // TODO Handle when axis is negative
+ assert(axis >= 0);
+
+ _axis = axis;
+
+ _input_type = input_allocs[0]->ptr()->data_type();
+}
+
+void ConcatLayer::run()
+{
+ if (_input_type == arm_compute::DataType::F32)
+ {
+ concatenate<float>();
+ }
+ else if (_input_type == arm_compute::DataType::QASYMM8)
+ {
+ concatenate<uint8_t>();
+ }
+}
+
+} // namespace kernel
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.h b/runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.h
new file mode 100644
index 000000000..ed273e297
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/kernel/ConcatLayer.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_KERNEL_CONCAT_LAYER_H__
+#define __NEURUN_BACKEND_ACL_CL_KERNEL_CONCAT_LAYER_H__
+
+#include <arm_compute/runtime/IFunction.h>
+#include <arm_compute/core/Types.h>
+
+#include "operand/Object.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace kernel
+{
+
+//
+// neurun::backend::acl_cl::kernel::ConcatLayer
+// A naive implementation of ConcatLayer for ACL
+//
+
+class ConcatLayer : public ::arm_compute::IFunction
+{
+public:
+ ConcatLayer();
+
+public:
+ void configure(const std::vector<::neurun::backend::acl_cl::operand::Object *> &input_allocs,
+ int32_t axis /* NNAPI tensor axis from NHWC order */,
+ ::neurun::backend::acl_cl::operand::Object *output_alloc);
+
+ void run();
+
+private:
+ template <typename T> bool concatenate();
+
+private:
+ std::vector<::neurun::backend::acl_cl::operand::Object *> _input_allocs;
+ ::neurun::backend::acl_cl::operand::Object *_output_alloc;
+ int32_t _axis;
+ arm_compute::DataType _input_type;
+};
+
+} // namespace kernel
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_KERNEL_CONCAT_LAYER_H__
diff --git a/runtimes/neurun/backend/acl_cl/operand/CLSubTensor.cc b/runtimes/neurun/backend/acl_cl/operand/CLSubTensor.cc
new file mode 100644
index 000000000..70c8829d9
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/CLSubTensor.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CLSubTensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+CLSubTensor::CLSubTensor(ICLTensor *parent, const arm_compute::TensorShape &tensor_shape,
+ const arm_compute::Coordinates &coords, size_t rank, bool extend_parent)
+ : _cl_sub_tensor(std::make_shared<arm_compute::CLSubTensor>(parent->handle(), tensor_shape,
+ coords, extend_parent)),
+ _rank{rank}
+{
+ // DO NOTHING
+}
+
+const arm_compute::CLSubTensor *CLSubTensor::handle() const { return _cl_sub_tensor.get(); }
+
+arm_compute::CLSubTensor *CLSubTensor::handle() { return _cl_sub_tensor.get(); }
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/operand/CLSubTensor.h b/runtimes/neurun/backend/acl_cl/operand/CLSubTensor.h
new file mode 100644
index 000000000..8eba3760f
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/CLSubTensor.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_CL_SUB_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_CL_OPERAND_CL_SUB_TENSOR_H__
+
+#include <arm_compute/runtime/CL/CLSubTensor.h>
+#include "ICLTensor.h"
+#include "compiler/SubTensorInfo.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+class CLSubTensor : public ICLTensor
+{
+public:
+ CLSubTensor() = delete;
+
+public:
+ CLSubTensor(ICLTensor *parent, const arm_compute::TensorShape &tensor_shape,
+ const arm_compute::Coordinates &coords, size_t rank, bool extend_parent = false);
+
+public:
+ size_t num_dimensions() const final { return _rank; }
+
+public:
+ const arm_compute::CLSubTensor *handle() const override;
+ arm_compute::CLSubTensor *handle() override;
+
+public:
+ // This method is used to prevent the use of memcpy for SubTensor
+ bool has_padding() const override { return true; }
+
+private:
+ std::shared_ptr<arm_compute::CLSubTensor> _cl_sub_tensor;
+ size_t _rank;
+};
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_CL_SUB_TENSOR_H__
diff --git a/runtimes/neurun/backend/acl_cl/operand/CLTensor.cc b/runtimes/neurun/backend/acl_cl/operand/CLTensor.cc
new file mode 100644
index 000000000..6153fc2e4
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/CLTensor.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CLTensor.h"
+
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include <arm_compute/runtime/CL/CLMemory.h>
+#include <arm_compute/runtime/CL/CLMemoryRegion.h>
+
+#include <Convert.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+CLTensor::CLTensor(const arm_compute::TensorInfo &info, size_t rank)
+ : _cl_tensor(std::make_shared<arm_compute::CLTensor>()), _rank{rank}
+{
+ allocator()->init(info);
+}
+
+const arm_compute::CLTensor *CLTensor::handle() const { return _cl_tensor.get(); }
+
+arm_compute::CLTensor *CLTensor::handle() { return _cl_tensor.get(); }
+
+arm_compute::CLTensorAllocator *CLTensor::allocator() { return _cl_tensor->allocator(); }
+
+void CLTensor::map(bool blocking) { _cl_tensor->map(blocking); }
+
+void CLTensor::unmap() { _cl_tensor->unmap(); }
+
+void CLTensor::setBuffer(void *host_ptr)
+{
+ // Constructs a Buffer on a user-supplied memory
+ auto buffer = cl::Buffer(arm_compute::CLScheduler::get().context(),
+ CL_MEM_USE_HOST_PTR | CL_MEM_READ_WRITE, info()->total_size(), host_ptr);
+ // import memory
+ allocator()->import_memory(buffer);
+}
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/operand/CLTensor.h b/runtimes/neurun/backend/acl_cl/operand/CLTensor.h
new file mode 100644
index 000000000..952851623
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/CLTensor.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_CL_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_CL_OPERAND_CL_TENSOR_H__
+
+#include <arm_compute/core/TensorInfo.h>
+#include <arm_compute/runtime/CL/CLTensor.h>
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include "arm_compute/runtime/CL/CLTensorAllocator.h"
+#include "ICLTensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+class CLTensor : public ICLTensor
+{
+public:
+ CLTensor() = delete;
+
+public:
+ CLTensor(const arm_compute::TensorInfo &info, size_t rank);
+
+public:
+ size_t num_dimensions() const final { return _rank; }
+
+public:
+ const arm_compute::CLTensor *handle() const override;
+ arm_compute::CLTensor *handle() override;
+
+public:
+ arm_compute::CLTensorAllocator *allocator();
+ void map(bool blocking = true);
+ void unmap();
+ /** Set given buffer as the buffer of the tensor
+ *
+ * @note Ownership of the memory is not transferred to this object.
+ * Thus management (allocate/free) should be done by the client.
+ *
+ * @param[in] host_ptr Storage to be used.
+ */
+ void setBuffer(void *host_ptr);
+
+private:
+ std::shared_ptr<arm_compute::CLTensor> _cl_tensor;
+ size_t _rank;
+};
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_CL_TENSOR_H__
diff --git a/runtimes/neurun/backend/acl_cl/operand/ICLTensor.h b/runtimes/neurun/backend/acl_cl/operand/ICLTensor.h
new file mode 100644
index 000000000..022cec6e3
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/ICLTensor.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_I_CL_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_CL_OPERAND_I_CL_TENSOR_H__
+
+#include <arm_compute/core/CL/ICLTensor.h>
+
+#include <IACLTensor.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+class ICLTensor : public acl_common::IACLTensor
+{
+public:
+ const arm_compute::ICLTensor *handle() const override = 0;
+ arm_compute::ICLTensor *handle() override = 0;
+
+public:
+ void map(cl::CommandQueue &q, bool blocking = true) { return handle()->map(q, blocking); }
+ void unmap(cl::CommandQueue &q) { return handle()->unmap(q); }
+};
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_I_CL_TENSOR_H__
diff --git a/runtimes/neurun/backend/acl_cl/operand/Object.cc b/runtimes/neurun/backend/acl_cl/operand/Object.cc
new file mode 100644
index 000000000..8f9b2a181
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/Object.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Object.h"
+
+#include <arm_compute/runtime/CL/CLScheduler.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+void Object::access(const std::function<void(backend::operand::ITensor &tensor)> &fn) const
+{
+ auto &queue = ::arm_compute::CLScheduler::get().queue();
+
+ // This is an optional input
+ if (_tensor->total_size() == 0)
+ return;
+
+ _tensor->map(queue);
+ fn(*_tensor);
+ _tensor->unmap(queue);
+}
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_cl/operand/Object.h b/runtimes/neurun/backend/acl_cl/operand/Object.h
new file mode 100644
index 000000000..a4308feed
--- /dev/null
+++ b/runtimes/neurun/backend/acl_cl/operand/Object.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_OBJECT_H__
+#define __NEURUN_BACKEND_ACL_CL_OPERAND_OBJECT_H__
+
+#include <memory>
+
+#include <backend/operand/IObject.h>
+#include "operand/ICLTensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_cl
+{
+namespace operand
+{
+
+class Object : public backend::operand::IObject
+{
+public:
+ Object() = default;
+
+public:
+ Object(const std::shared_ptr<acl_cl::operand::ICLTensor> &tensor) : _tensor{tensor}
+ {
+ // DO NOTHING
+ }
+
+public:
+ acl_cl::operand::ICLTensor *ptr(void) const override { return _tensor.get(); }
+
+private:
+ std::shared_ptr<acl_cl::operand::ICLTensor> _tensor;
+
+public:
+ void access(const std::function<void(backend::operand::ITensor &tensor)> &fn) const override;
+};
+
+} // namespace operand
+} // namespace acl_cl
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_OBJECT_H__
diff --git a/runtimes/neurun/backend/acl_common/AclFunction.h b/runtimes/neurun/backend/acl_common/AclFunction.h
new file mode 100644
index 000000000..a63f3807b
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/AclFunction.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_KERNEL_ACL_FUNCTION_H__
+#define __NEURUN_BACKEND_ACL_COMMON_KERNEL_ACL_FUNCTION_H__
+
+#include <exec/IFunction.h>
+#include <arm_compute/runtime/IFunction.h>
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include <memory>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+class AclFunction : public ::neurun::exec::IFunction
+{
+public:
+ AclFunction() = delete;
+
+public:
+ AclFunction(std::unique_ptr<::arm_compute::IFunction> &&func) : _func(std::move(func))
+ {
+ // DO NOTHING
+ }
+
+public:
+ void run() override { _func->run(); }
+ void runSync() override
+ {
+ run();
+ arm_compute::CLScheduler::get().sync();
+ }
+ void prepare() override { _func->prepare(); }
+
+private:
+ std::unique_ptr<::arm_compute::IFunction> _func;
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_KERNEL_ACL_FUNCTION_H__
diff --git a/runtimes/neurun/backend/acl_common/AclInternalBufferManager.h b/runtimes/neurun/backend/acl_common/AclInternalBufferManager.h
new file mode 100644
index 000000000..fe964bd85
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/AclInternalBufferManager.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_INTERNAL_BUFFER_MANAGER_H__
+#define __NEURUN_BACKEND_ACL_COMMON_INTERNAL_BUFFER_MANAGER_H__
+
+#include <arm_compute/runtime/IMemoryManager.h>
+#include <cassert>
+#include <memory>
+#include <backend/IMemoryManager.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+// NOTE. If any backend can use something like InternalBufferManager,
+// this interface can be moved to core/include/backend/
+/**
+ * @brief Interface for InternalBufferManager which has ::arm_compute::IMemoryManager pointer
+ */
+struct IInternalBufferManager : public backend::IMemoryManager
+{
+ virtual ~IInternalBufferManager() = default;
+
+ /**
+ * @brief Get shared_ptr of ::arm_compute::IMemoryManager
+ */
+ virtual std::shared_ptr<::arm_compute::IMemoryManager> internal_buffer_manager(void) = 0;
+};
+
+/**
+ * @brief class for InternalBufferManager which has ::arm_compute::IMemoryManager pointer
+ */
+template <typename T_MemoryManager, typename T_PoolManager, typename T_LifetimeManager,
+ typename T_Allocator>
+class AclInternalBufferManager : public IInternalBufferManager
+{
+public:
+ AclInternalBufferManager() : _allocator{nullptr}
+ {
+ std::shared_ptr<T_LifetimeManager> lifetime_mgr = std::make_shared<T_LifetimeManager>();
+ std::shared_ptr<T_PoolManager> pool_mgr = std::make_shared<T_PoolManager>();
+
+ _internal_manager = std::make_shared<T_MemoryManager>(lifetime_mgr, pool_mgr);
+ assert(_internal_manager);
+ }
+
+ virtual ~AclInternalBufferManager() = default;
+
+ /**
+ * @brief Allocate the internal buffer manager on acl
+ */
+ virtual void allocate(void) override
+ {
+ _allocator = std::make_shared<T_Allocator>();
+ _internal_manager->populate(*_allocator, 1);
+ }
+
+ /**
+ * @brief Deallocate the internal buffer manager on acl
+ */
+ virtual void deallocate(void) override { _internal_manager->clear(); }
+
+ /**
+ * @brief Get shared_ptr of ::arm_compute::IMemoryManager
+ */
+ virtual std::shared_ptr<::arm_compute::IMemoryManager> internal_buffer_manager(void) override
+ {
+ return _internal_manager;
+ }
+
+private:
+ std::shared_ptr<T_Allocator> _allocator;
+ std::shared_ptr<T_MemoryManager> _internal_manager;
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_INTERNAL_BUFFER_MANAGER_H__
diff --git a/runtimes/neurun/backend/acl_common/AclLinearMemoryManager.h b/runtimes/neurun/backend/acl_common/AclLinearMemoryManager.h
new file mode 100644
index 000000000..793c7b2c4
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/AclLinearMemoryManager.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_LINEAR_MEMORY_MANAGER_H__
+#define __NEURUN_BACKEND_ACL_COMMON_LINEAR_MEMORY_MANAGER_H__
+
+#include <cassert>
+
+#include "AclMemoryManager.h"
+#include "model/OperandIndexMap.h"
+#include "util/logging.h"
+
+namespace
+{
+
+template <typename T_MemoryManager, typename T_PoolManager, typename T_LifetimeManager>
+std::shared_ptr<T_MemoryManager> createMemoryManager()
+{
+ std::shared_ptr<T_LifetimeManager> lifetime_mgr = std::make_shared<T_LifetimeManager>();
+ std::shared_ptr<T_PoolManager> pool_mgr = std::make_shared<T_PoolManager>();
+
+ std::shared_ptr<T_MemoryManager> mem_mgr =
+ std::make_shared<T_MemoryManager>(lifetime_mgr, pool_mgr);
+ return mem_mgr;
+}
+
+} // namespace anonymous
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object,
+ typename T_MemoryManager, typename T_PoolManager, typename T_LifetimeManager,
+ typename T_Allocator, typename T_MemoryGroup>
+class AclLinearMemoryManager : public AclMemoryManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>
+{
+public:
+ AclLinearMemoryManager()
+ : _allocator{nullptr},
+ _io_manager{createMemoryManager<T_MemoryManager, T_PoolManager, T_LifetimeManager>()},
+ _io_group{std::make_shared<T_MemoryGroup>(_io_manager)}
+ {
+ // DO NOTHING
+ }
+
+ virtual ~AclLinearMemoryManager() = default;
+
+ virtual void allocate(void) override
+ {
+ _allocator = std::make_shared<T_Allocator>();
+ _io_manager->populate(*_allocator, 1);
+ _io_group->acquire();
+ }
+
+ virtual void deallocate(void) override
+ {
+ _io_group->release();
+ _io_manager->clear();
+ }
+
+ virtual void startLifetime(const model::OperandIndex &ind) override
+ {
+ auto &tensors = this->tensors();
+ assert(tensors.find(ind) != tensors.end());
+
+ auto tensor = tensors[ind];
+ assert(tensor->handle());
+
+ _io_group->manage(tensor->handle());
+ }
+
+ virtual void finishLifetime(const model::OperandIndex &ind) override
+ {
+ auto &tensors = this->tensors();
+ assert(tensors.find(ind) != tensors.end());
+
+ auto tensor = tensors[ind];
+ assert(tensor->allocator());
+
+ tensor->allocator()->allocate();
+ }
+
+private:
+ std::shared_ptr<T_Allocator> _allocator;
+ std::shared_ptr<T_MemoryManager> _io_manager;
+ std::shared_ptr<T_MemoryGroup> _io_group;
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_LINEAR_MEMORY_MANAGER_H__
diff --git a/runtimes/neurun/backend/acl_common/AclMemoryManager.h b/runtimes/neurun/backend/acl_common/AclMemoryManager.h
new file mode 100644
index 000000000..076a3f490
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/AclMemoryManager.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_MEMORY_MANAGER_H__
+#define __NEURUN_BACKEND_ACL_COMMON_MEMORY_MANAGER_H__
+
+#include <arm_compute/core/Types.h>
+#include <arm_compute/runtime/IMemoryManager.h>
+#include <cassert>
+
+#include "backend/IMemoryManager.h"
+#include "model/OperandIndexMap.h"
+#include "Convert.h"
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+class AclMemoryManager : public backend::IMemoryManager
+{
+public:
+ AclMemoryManager()
+ {
+ // DO NOTHING
+ }
+
+ virtual ~AclMemoryManager() = default;
+
+ virtual void allocate(void) override
+ {
+ for (const auto &tensor_entry : _tensors)
+ {
+ auto tensor = tensor_entry.second;
+ tensor->allocator()->allocate();
+ }
+ }
+
+ virtual void deallocate(void) override
+ {
+ for (const auto &tensor_entry : _tensors)
+ {
+ auto tensor = tensor_entry.second;
+ tensor->allocator()->free();
+ }
+ }
+
+ virtual void startLifetime(const model::OperandIndex &) { /* DO NOTHING */}
+ virtual void finishLifetime(const model::OperandIndex &) { /* DO NOTHING */}
+
+ void buildTensor(const model::OperandIndex &ind, const ::arm_compute::TensorInfo &info,
+ size_t rank)
+ {
+ auto tensor = std::make_shared<T_Tensor>(info, rank);
+ _tensors[ind] = tensor;
+ }
+
+ void buildSubtensor(std::shared_ptr<T_ITensor> parent_tensor,
+ const model::OperandIndex &child_ind, const ::arm_compute::TensorShape &shape,
+ const ::arm_compute::Coordinates &coordinates, size_t rank,
+ bool extent_parent)
+ {
+ auto subtensor =
+ std::make_shared<T_SubTensor>(parent_tensor.get(), shape, coordinates, rank, extent_parent);
+ _subtensors[child_ind] = subtensor;
+ }
+
+ std::shared_ptr<T_Object> wrapTensor(const model::OperandIndex &ind)
+ {
+ if (_objects.find(ind) != _objects.end())
+ {
+ return _objects.at(ind);
+ }
+ else
+ {
+ if (_tensors.find(ind) != _tensors.end())
+ {
+ return _objects[ind] = std::make_shared<T_Object>(_tensors.at(ind));
+ }
+ else
+ {
+ return _objects[ind] = std::make_shared<T_Object>(_subtensors.at(ind));
+ }
+ }
+ }
+
+ model::OperandIndexMap<std::shared_ptr<T_Tensor>> &tensors(void) { return _tensors; }
+
+ model::OperandIndexMap<std::shared_ptr<T_SubTensor>> &subtensors(void) { return _subtensors; }
+
+ model::OperandIndexMap<std::shared_ptr<T_Object>> &objects(void) { return _objects; }
+
+private:
+ model::OperandIndexMap<std::shared_ptr<T_Tensor>> _tensors;
+ model::OperandIndexMap<std::shared_ptr<T_SubTensor>> _subtensors;
+ model::OperandIndexMap<std::shared_ptr<T_Object>> _objects;
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_MEMORY_MANAGER_H__
diff --git a/runtimes/neurun/backend/acl_common/AclTensorManager.h b/runtimes/neurun/backend/acl_common/AclTensorManager.h
new file mode 100644
index 000000000..63918ff09
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/AclTensorManager.h
@@ -0,0 +1,315 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_TENSOR_MANAGER_H__
+#define __NEURUN_BACKEND_ACL_COMMON_TENSOR_MANAGER_H__
+
+#include <arm_compute/runtime/IMemoryManager.h>
+
+#include "backend/ITensorManager.h"
+#include "AclMemoryManager.h"
+#include "AclInternalBufferManager.h"
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+class AclTensorManager : public backend::ITensorManager
+{
+public:
+ using T_AclMemoryManager = AclMemoryManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>;
+
+ AclTensorManager(T_AclMemoryManager *const_mgr, T_AclMemoryManager *nonconst_mgr,
+ IInternalBufferManager *inter_mgr);
+
+ virtual ~AclTensorManager() = default;
+
+ void allocateConsts(void) override;
+ void allocateNonconsts(void) override;
+ void deallocateConsts(void) override;
+ void deallocateNonconsts(void) override;
+
+ void allocateInternalBufferManager(void);
+ void deallocateInternalBufferManager(void);
+
+ void buildTensor(const model::OperandIndex &ind, const ::arm_compute::TensorInfo &info,
+ size_t rank, bool as_const);
+ void buildSubtensor(const model::OperandIndex &parent, const model::OperandIndex &child,
+ const ::arm_compute::TensorShape &shape,
+ const ::arm_compute::Coordinates &coordinates, size_t rank,
+ bool extent_parent);
+
+ std::shared_ptr<T_ITensor> findTensorAsParent(const model::OperandIndex &ind);
+
+ void startLifetime(const model::OperandIndex &ind);
+ void finishLifetime(const model::OperandIndex &ind);
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind);
+ std::shared_ptr<T_ITensor> at(const ::neurun::model::OperandIndex &ind);
+
+ model::OperandIndexMap<std::shared_ptr<T_Tensor>> &constTensors(void);
+ model::OperandIndexMap<std::shared_ptr<T_Tensor>> &nonconstTensors(void);
+ model::OperandIndexMap<std::shared_ptr<T_SubTensor>> &nonconstSubtensors(void);
+
+ std::shared_ptr<::arm_compute::IMemoryManager> internal_buffer_manager(void);
+
+ void iterate(const std::function<void(const model::OperandIndex &)> &fn);
+
+ void tryDeallocConstants(void);
+
+private:
+ std::unique_ptr<T_AclMemoryManager> _const_mgr;
+ std::unique_ptr<T_AclMemoryManager> _nonconst_mgr;
+ std::unique_ptr<IInternalBufferManager> _inter_mgr;
+ model::OperandIndexMap<T_AclMemoryManager &> _ind_to_mgr;
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#include <cassert>
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::AclTensorManager(
+ T_AclMemoryManager *const_mgr, T_AclMemoryManager *nonconst_mgr,
+ IInternalBufferManager *inter_mgr)
+ : _const_mgr{const_mgr}, _nonconst_mgr{nonconst_mgr}, _inter_mgr{inter_mgr}
+{
+ // DO NOTHING
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::allocateConsts(void)
+{
+ _const_mgr->allocate();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::allocateNonconsts(void)
+{
+ _nonconst_mgr->allocate();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::deallocateConsts(void)
+{
+ _const_mgr->deallocate();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::deallocateNonconsts(void)
+{
+ _nonconst_mgr->deallocate();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::allocateInternalBufferManager(
+ void)
+{
+ _inter_mgr->allocate();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::deallocateInternalBufferManager(
+ void)
+{
+ _inter_mgr->deallocate();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::buildTensor(
+ const model::OperandIndex &ind, const ::arm_compute::TensorInfo &info, size_t rank,
+ bool as_const)
+{
+ assert(_ind_to_mgr.find(ind) == _ind_to_mgr.end());
+ if (as_const)
+ {
+ _const_mgr->buildTensor(ind, info, rank);
+ _ind_to_mgr.insert({ind, *_const_mgr});
+ }
+ else
+ {
+ _nonconst_mgr->buildTensor(ind, info, rank);
+ _ind_to_mgr.insert({ind, *_nonconst_mgr});
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::buildSubtensor(
+ const model::OperandIndex &parent, const model::OperandIndex &child,
+ const ::arm_compute::TensorShape &shape, const ::arm_compute::Coordinates &coordinates,
+ size_t rank, bool extent_parent)
+{
+ assert(_ind_to_mgr.find(child) == _ind_to_mgr.end());
+ std::shared_ptr<T_ITensor> parent_tensor = findTensorAsParent(parent);
+ assert(parent_tensor);
+ _nonconst_mgr->buildSubtensor(parent_tensor, child, shape, coordinates, rank, extent_parent);
+ _ind_to_mgr.insert({child, *_nonconst_mgr});
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<T_ITensor>
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::findTensorAsParent(
+ const model::OperandIndex &ind)
+{
+
+ auto &tensors = _nonconst_mgr->tensors();
+ auto &subtensors = _nonconst_mgr->subtensors();
+ if (tensors.find(ind) != tensors.end())
+ {
+ // Parent is allocated as tensor
+ return tensors[ind];
+ }
+ else if (subtensors.find(ind) != subtensors.end())
+ {
+ // Parent is allocated as subtensor
+ return subtensors[ind];
+ }
+ else
+ {
+ return nullptr;
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::startLifetime(
+ const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ _ind_to_mgr.at(ind).startLifetime(ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::finishLifetime(
+ const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ _ind_to_mgr.at(ind).finishLifetime(ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<backend::operand::IObject>
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::wrapTensor(
+ const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ return _ind_to_mgr.at(ind).wrapTensor(ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<T_ITensor> AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::at(
+ const ::neurun::model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+
+ auto &tensors = _ind_to_mgr.at(ind).tensors();
+ if (tensors.find(ind) != tensors.end())
+ {
+ return tensors.at(ind);
+ }
+ else
+ {
+ return _ind_to_mgr.at(ind).subtensors().at(ind);
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+model::OperandIndexMap<std::shared_ptr<T_Tensor>> &
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::constTensors(void)
+{
+ return _const_mgr->tensors();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+model::OperandIndexMap<std::shared_ptr<T_Tensor>> &
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::nonconstTensors(void)
+{
+ return _nonconst_mgr->tensors();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+model::OperandIndexMap<std::shared_ptr<T_SubTensor>> &
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::nonconstSubtensors(void)
+{
+ return _nonconst_mgr->subtensors();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<::arm_compute::IMemoryManager>
+AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::internal_buffer_manager(void)
+{
+ return _inter_mgr->internal_buffer_manager();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::iterate(
+ const std::function<void(const model::OperandIndex &)> &fn)
+{
+ for (auto it : _nonconst_mgr->tensors())
+ fn(it.first);
+
+ for (auto it : _nonconst_mgr->subtensors())
+ fn(it.first);
+
+ for (auto it : _const_mgr->tensors())
+ fn(it.first);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>::tryDeallocConstants(void)
+{
+ auto &tensors = _const_mgr->tensors();
+ auto &objects = _const_mgr->objects();
+
+ for (auto it = tensors.begin(); it != tensors.end();)
+ {
+ const auto &ind = it->first;
+ auto tensor = it->second;
+ if (tensor->handle() && !tensor->handle()->is_used())
+ {
+ VERBOSE(AclTensorManager) << "Tensor #" << ind.value()
+ << " will be deallocated as an unused constant tensor" << std::endl;
+ tensor->allocator()->free();
+ tensor.reset();
+ it = tensors.erase(it);
+ objects.erase(ind);
+ }
+ else
+ {
+ ++it;
+ }
+ }
+}
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_TENSOR_MANAGER_H__
diff --git a/runtimes/neurun/backend/acl_common/CMakeLists.txt b/runtimes/neurun/backend/acl_common/CMakeLists.txt
new file mode 100644
index 000000000..0dff1d68a
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/CMakeLists.txt
@@ -0,0 +1,21 @@
+# Unsupported architecture
+nnfw_find_package(ARMCompute QUIET)
+if(NOT ARMCompute_FOUND)
+ return()
+endif(NOT ARMCompute_FOUND)
+
+file(GLOB SOURCES "*.cc")
+
+add_library(${LIB_NEURUN_BACKEND_ACL_COMMON} STATIC ${SOURCES})
+
+target_include_directories(${LIB_NEURUN_BACKEND_ACL_COMMON} PUBLIC ${NEURUN_INCLUDE_DIR})
+target_include_directories(${LIB_NEURUN_BACKEND_ACL_COMMON} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_COMMON} PUBLIC arm_compute arm_compute_ex)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_COMMON} PUBLIC nnfw_lib_misc nnfw_lib_cpp14)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_COMMON} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_COMMON} PRIVATE nnfw_coverage)
+
+set_target_properties(${LIB_NEURUN_BACKEND_ACL_COMMON} PROPERTIES POSITION_INDEPENDENT_CODE ON)
+set_target_properties(${LIB_NEURUN_BACKEND_ACL_COMMON} PROPERTIES OUTPUT_NAME backend_acl_common)
+
+install(TARGETS ${LIB_NEURUN_BACKEND_ACL_COMMON} DESTINATION lib)
diff --git a/runtimes/neurun/backend/acl_common/Convert.cc b/runtimes/neurun/backend/acl_common/Convert.cc
new file mode 100644
index 000000000..b814587cc
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/Convert.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Convert.h"
+
+#include "Swizzle.h"
+#include "model/DataType.h"
+#include <cpp14/memory.h>
+
+namespace
+{
+
+::arm_compute::DataLayout asDataLayout(::neurun::model::Layout layout)
+{
+ switch (layout)
+ {
+ case ::neurun::model::Layout::NHWC:
+ return ::arm_compute::DataLayout::NHWC;
+ case ::neurun::model::Layout::NCHW:
+ return ::arm_compute::DataLayout::NCHW;
+ default:
+ return ::arm_compute::DataLayout::UNKNOWN;
+ }
+}
+
+} // namespace
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+::arm_compute::TensorShape asTensorShape(const ::neurun::model::Shape &shape,
+ ::neurun::model::Layout frontend_layout,
+ ::neurun::model::Layout backend_layout,
+ bool apply_dim_correction)
+{
+ const uint32_t rank = shape.rank();
+
+ ::arm_compute::TensorShape res{};
+
+ res.set_num_dimensions(rank);
+
+ for (uint32_t axis = 0; axis < rank; ++axis)
+ {
+ // NOTE In some cases, in incorrect dimensions is required.
+ // For example, intput_size is 1 in LSTM. The input-to-input weights([num_units, input_size]) of
+ // LSTM is used as the weight of the FullyConnected.
+ // The FullyConnected's weight must be greater or equal than 2-dimensions.
+ // However, if the dimension correction is applied to input_to_input_weights with input_size
+ // equal to 1, it will be changed to 1-D.
+ // So input_to_input_weights is not used by the weight of FullyConnected.
+ res.set(ToARMComputeAxis(rank, axis, frontend_layout, backend_layout).value(), shape.dim(axis),
+ apply_dim_correction);
+ }
+
+ return res;
+}
+
+::arm_compute::Coordinates asTensorCoordinate(const ::neurun::util::Coordinates &coord,
+ ::neurun::model::Layout frontend_layout,
+ ::neurun::model::Layout backend_layout)
+{
+ const uint32_t rank = coord.size();
+
+ ::arm_compute::Coordinates res{};
+
+ res.set_num_dimensions(rank);
+
+ for (uint32_t axis = 0; axis < rank; ++axis)
+ {
+ res.set(ToARMComputeAxis(rank, axis, frontend_layout, backend_layout).value(), coord[axis]);
+ }
+
+ return res;
+}
+
+::arm_compute::DataType asDataType(const ::neurun::model::DataType &type)
+{
+ switch (type)
+ {
+ case ::neurun::model::DataType::FLOAT32:
+ return ::arm_compute::DataType::F32;
+ case ::neurun::model::DataType::INT32:
+ return ::arm_compute::DataType::S32;
+ case ::neurun::model::DataType::UINT32:
+ return ::arm_compute::DataType::U32;
+ case ::neurun::model::DataType::QUANT8_ASYMM:
+ return ::arm_compute::DataType::QASYMM8;
+ case ::neurun::model::DataType::BOOL8:
+ return ::arm_compute::DataType::U8;
+ default:
+ throw std::runtime_error("Not supported, yet");
+ break;
+ }
+}
+
+::arm_compute::QuantizationInfo asQuantizationInfo(const float scale, const int32_t offset)
+{
+ return ::arm_compute::QuantizationInfo(scale, offset);
+}
+
+::arm_compute::TensorInfo asTensorInfo(const ::neurun::model::Shape &shape,
+ const ::neurun::model::TypeInfo &typeInfo,
+ ::neurun::model::Layout frontend_layout,
+ ::neurun::model::Layout backend_layout,
+ bool apply_dim_correction)
+{
+ ::arm_compute::TensorInfo info(
+ asTensorShape(shape, frontend_layout, backend_layout, apply_dim_correction), 1,
+ asDataType(typeInfo.type()), asQuantizationInfo(typeInfo.scale(), typeInfo.offset()));
+ info.set_data_layout(asDataLayout(backend_layout));
+ return info;
+}
+
+::arm_compute::PadStrideInfo asPadStrideInfo(const model::ExplicitPadding &padding,
+ const model::Stride &stride)
+{
+ return ::arm_compute::PadStrideInfo{stride.horizontal,
+ stride.vertical,
+ padding.left,
+ padding.right,
+ padding.top,
+ padding.bottom,
+ ::arm_compute::DimensionRoundingType::FLOOR};
+}
+
+::arm_compute::ActivationLayerInfo
+asActivationLayerInfo(const ::neurun::model::Activation &act_code)
+{
+ switch (act_code)
+ {
+ case ::neurun::model::Activation::NONE:
+ return ::arm_compute::ActivationLayerInfo{};
+ case ::neurun::model::Activation::RELU:
+ return ::arm_compute::ActivationLayerInfo{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU};
+ case ::neurun::model::Activation::RELU1:
+ return ::arm_compute::ActivationLayerInfo{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1.0f, -1.0f};
+ case ::neurun::model::Activation::RELU6:
+ return ::arm_compute::ActivationLayerInfo{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.0f, 0.0f};
+ // Cases for activation of LSTM.
+ case ::neurun::model::Activation::TANH:
+ return ::arm_compute::ActivationLayerInfo{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::TANH, 1.0f, 1.0f};
+ case ::neurun::model::Activation::SIGMOID:
+ // NOTE The sigmoid function is a special case of the Logistic function when L=1, k=1, x0=0.
+ // TODO In ACL and nnapi sepc, currently, Logistic's L always is 1, k always is 1, x0 always
+ // 0(always sigmoid) regardless of values of the parameter.
+ // If ACL support non-sigmoid logistic, should fix param values.
+ return ::arm_compute::ActivationLayerInfo{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LOGISTIC, 0.0f, 0.0f};
+ default:
+ throw std::runtime_error{"Not supported, yet"};
+ break;
+ }
+}
+
+std::unique_ptr<AclFunction> asAclFunction(std::unique_ptr<::arm_compute::IFunction> &&layer)
+{
+ return nnfw::cpp14::make_unique<AclFunction>(std::move(layer));
+}
+
+::neurun::model::Layout asRuntimeLayout(::arm_compute::DataLayout data_layout)
+{
+ switch (data_layout)
+ {
+ case ::arm_compute::DataLayout::NHWC:
+ return ::neurun::model::Layout::NHWC;
+ case ::arm_compute::DataLayout::NCHW:
+ return ::neurun::model::Layout::NCHW;
+ default:
+ return ::neurun::model::Layout::UNKNOWN;
+ }
+}
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_common/Convert.h b/runtimes/neurun/backend/acl_common/Convert.h
new file mode 100644
index 000000000..37bb2965b
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/Convert.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_CONVERT_H__
+#define __NEURUN_BACKEND_ACL_COMMON_CONVERT_H__
+
+#include <arm_compute/core/TensorInfo.h>
+#include <arm_compute/core/SubTensorInfo.h>
+#include <arm_compute/core/TensorShape.h>
+
+#include "model/Layout.h"
+#include "model/InternalType.h"
+#include "model/Operand.h"
+#include "model/Shape.h"
+#include "model/TypeInfo.h"
+#include "misc/feature/Shape.h"
+#include "misc/kernel/Shape.h"
+
+#include "util/Padding.h"
+#include "util/Coordinates.h"
+
+#include "AclFunction.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+::arm_compute::TensorShape asTensorShape(const ::neurun::model::Shape &shape,
+ ::neurun::model::Layout frontend_layout,
+ ::neurun::model::Layout backend_layout,
+ bool apply_dim_correction = true);
+::arm_compute::Coordinates asTensorCoordinate(const ::neurun::util::Coordinates &coord,
+ ::neurun::model::Layout frontend_layout,
+ ::neurun::model::Layout backend_layout);
+::arm_compute::DataType asDataType(const ::neurun::model::DataType &type);
+::arm_compute::TensorInfo asTensorInfo(const ::neurun::model::Shape &shape,
+ const ::neurun::model::TypeInfo &typeInfo,
+ ::neurun::model::Layout frontend_layout,
+ ::neurun::model::Layout backend_layout,
+ bool apply_dim_correction = true);
+
+::arm_compute::PadStrideInfo asPadStrideInfo(const model::ExplicitPadding &padding,
+ const model::Stride &stride);
+
+::arm_compute::ActivationLayerInfo
+asActivationLayerInfo(const ::neurun::model::Activation &act_code);
+
+std::unique_ptr<AclFunction> asAclFunction(std::unique_ptr<::arm_compute::IFunction> &&layer);
+
+::neurun::model::Layout asRuntimeLayout(::arm_compute::DataLayout data_layout);
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_CONVERT_H__
diff --git a/runtimes/neurun/backend/acl_common/IACLTensor.cc b/runtimes/neurun/backend/acl_common/IACLTensor.cc
new file mode 100644
index 000000000..89972e267
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/IACLTensor.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IACLTensor.h"
+#include "Convert.h"
+#include "Swizzle.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+size_t IACLTensor::num_dimensions() const
+{
+ throw std::runtime_error("No definition of num_dimensions()");
+ return 0;
+}
+
+size_t IACLTensor::dimension(size_t index) const
+{
+ // Assume that the front is higher dimensional.
+ // i.g. N: 0, C: 1, H: 2, W: 3 for NCHW layout
+ // NOTE This tensor must not be applied dim correction
+ assert(num_dimensions() > index);
+ const ARMComputeAxis reversed{(static_cast<uint32_t>(num_dimensions() - index) - 1)};
+ return info()->dimension(reversed.value());
+}
+
+size_t IACLTensor::calcOffset(const neurun::util::Coordinates &coords) const
+{
+ const auto rank = coords.size();
+ ::arm_compute::Coordinates acl_coords;
+ for (size_t i = 0; i < rank; ++i)
+ {
+ acl_coords.set(acl_common::ToARMComputeAxis(rank, i).value(), coords[i]);
+ }
+
+ return info()->offset_element_in_bytes(acl_coords);
+}
+
+model::Layout IACLTensor::layout() const
+{
+ return acl_common::asRuntimeLayout(info()->data_layout());
+}
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_common/IACLTensor.h b/runtimes/neurun/backend/acl_common/IACLTensor.h
new file mode 100644
index 000000000..1dc79f480
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/IACLTensor.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_I_ACL_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_COMMON_I_ACL_TENSOR_H__
+
+#include <backend/operand/ITensor.h>
+#include <arm_compute/core/ITensor.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+class IACLTensor : public operand::ITensor
+{
+public:
+ IACLTensor() = default;
+ IACLTensor(const IACLTensor &) = delete;
+ IACLTensor &operator=(const IACLTensor &) = delete;
+ IACLTensor(IACLTensor &&) = default;
+ IACLTensor &operator=(IACLTensor &&) = default;
+
+public:
+ uint8_t *buffer() const final { return handle()->buffer(); }
+ size_t total_size() const final { return info()->total_size(); }
+ size_t dimension(size_t index) const final;
+ size_t num_dimensions() const override;
+ size_t calcOffset(const neurun::util::Coordinates &coords) const final;
+ model::Layout layout() const final;
+ bool has_padding() const override { return info()->has_padding(); }
+
+public:
+ virtual const arm_compute::ITensor *handle() const = 0;
+ virtual arm_compute::ITensor *handle() = 0;
+
+ const arm_compute::ITensorInfo *info() const { return handle()->info(); }
+ arm_compute::ITensorInfo *info() { return handle()->info(); }
+
+ arm_compute::DataType data_type() const { return info()->data_type(); }
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif //__NEURUN_BACKEND_ACL_COMMON_I_ACL_TENSOR_H__
diff --git a/runtimes/neurun/backend/acl_common/Swizzle.h b/runtimes/neurun/backend/acl_common/Swizzle.h
new file mode 100644
index 000000000..dfa5daba9
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/Swizzle.h
@@ -0,0 +1,161 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_SWIZZLE_H__
+#define __NEURUN_BACKEND_ACL_COMMON_SWIZZLE_H__
+
+#include <cassert>
+#include <model/Layout.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+class ARMComputeAxis
+{
+public:
+ ARMComputeAxis() = default;
+
+public:
+ explicit ARMComputeAxis(uint32_t value) : _value{value}
+ {
+ // DO NOTHING
+ }
+
+public:
+ uint32_t value(void) const { return _value; }
+
+private:
+ uint32_t _value;
+};
+
+// Convert axis in acl order
+inline ARMComputeAxis ToARMComputeAxis(uint32_t rank, uint32_t axis,
+ const model::Layout org_layout = model::Layout::UNKNOWN,
+ const model::Layout acl_layout = model::Layout::UNKNOWN)
+{
+ assert(rank > axis);
+
+ const ARMComputeAxis reversed{(rank - axis) - 1};
+
+ if (rank >= 4 && org_layout == model::Layout::NHWC && acl_layout == model::Layout::NCHW)
+ {
+ // NHWC -> WHCN
+ // DEPTH
+ if (0 == reversed.value())
+ {
+ return ARMComputeAxis{2};
+ }
+ // WIDTH
+ if (1 == reversed.value())
+ {
+ return ARMComputeAxis{0};
+ }
+ // HEIGHT
+ if (2 == reversed.value())
+ {
+ return ARMComputeAxis{1};
+ }
+ }
+ if (rank >= 4 && org_layout == model::Layout::NCHW && acl_layout == model::Layout::NHWC)
+ {
+ // NCHW -> CWHN
+ // WIDTH
+ if (0 == reversed.value())
+ {
+ return ARMComputeAxis{1};
+ }
+ // HEIGHT
+ if (1 == reversed.value())
+ {
+ return ARMComputeAxis{2};
+ }
+ // DEPTH
+ if (2 == reversed.value())
+ {
+ return ARMComputeAxis{0};
+ }
+ }
+
+ return reversed;
+}
+
+inline ::arm_compute::Coordinates
+getARMComputeAxises(uint32_t rank, const model::Layout org_layout = model::Layout::UNKNOWN,
+ const model::Layout acl_layout = model::Layout::UNKNOWN)
+{
+ ::arm_compute::Coordinates res{};
+
+ res.set_num_dimensions(rank);
+
+ for (uint32_t axis = 0; axis < rank; ++axis)
+ {
+ res.set(axis, ToARMComputeAxis(rank, axis, org_layout, acl_layout).value());
+ }
+
+ return res;
+}
+
+// Restructure runtime_permutationVector to ACL_permutationVector
+inline ::arm_compute::PermutationVector
+getARMComputePermutationVector(uint32_t rank, const std::vector<int32_t> runtime_pv,
+ const model::Layout org_layout = model::Layout::UNKNOWN,
+ const model::Layout acl_layout = model::Layout::UNKNOWN)
+{
+ // rank upto 4 is supported
+ assert(rank <= 4);
+ assert(runtime_pv.size() > 0);
+
+ int new_pv[4] = {0};
+ ::arm_compute::Coordinates axises = getARMComputeAxises(rank, org_layout, acl_layout);
+
+ for (uint32_t i = 0; i < rank; ++i)
+ {
+ new_pv[axises[i]] = ToARMComputeAxis(rank, runtime_pv[i], org_layout, acl_layout).value();
+ }
+
+ ::arm_compute::PermutationVector ACL_PV =
+ ::arm_compute::PermutationVector{new_pv[0], new_pv[1], new_pv[2], new_pv[3]};
+ ACL_PV.set_num_dimensions(rank);
+
+ return ACL_PV;
+}
+
+template <typename T>
+inline T ReorderBits(T in, size_t numOfBits,
+ const model::Layout org_layout = model::Layout::UNKNOWN,
+ const model::Layout acl_layout = model::Layout::UNKNOWN)
+{
+ assert(numOfBits > 0);
+ T out = 0;
+ for (int32_t i = numOfBits - 1; i >= 0; --i)
+ {
+ const uint32_t toShift =
+ numOfBits - ToARMComputeAxis(numOfBits, i, org_layout, acl_layout).value() - 1;
+ out += ((in & 1) << toShift);
+ in >>= 1;
+ }
+ return out;
+}
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_SWIZZLE_H__
diff --git a/runtimes/neurun/backend/acl_common/TemplTensorBuilder.h b/runtimes/neurun/backend/acl_common/TemplTensorBuilder.h
new file mode 100644
index 000000000..df9fa8c2a
--- /dev/null
+++ b/runtimes/neurun/backend/acl_common/TemplTensorBuilder.h
@@ -0,0 +1,617 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_COMMON_TEMPL_TENSOR_BUILDER_H__
+#define __NEURUN_BACKEND_ACL_COMMON_TEMPL_TENSOR_BUILDER_H__
+
+#include <memory>
+#include <queue>
+
+#include <arm_compute/core/Types.h>
+#include <backend/ITensorBuilder.h>
+#include "model/OperandIndexMap.h"
+#include "AclTensorManager.h"
+#include "cpp14/memory.h"
+#include <util/Utils.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+enum class UsesType
+{
+ FIRST,
+ LAST
+};
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+class TemplTensorBuilder : public ITensorBuilder
+{
+public:
+ using T_AclTensorManager = AclTensorManager<T_ITensor, T_Tensor, T_SubTensor, T_Object>;
+
+ TemplTensorBuilder(T_AclTensorManager *tensor_mgr);
+
+ /**
+ * @brief Register tensor information to allocate on ACL-CL backend
+ * @param[in] ind Operand index
+ * @param[in] info Tensor information
+ * @param[in] layout Tensor data layout
+ */
+ void registerTensorInfo(const model::OperandIndex &ind, const model::OperandInfo &info,
+ model::Layout frontend_layout, model::Layout backend_layout,
+ bool as_const) override;
+ /**
+ * @brief Register subtensor information to allocate on ACL-CL backend
+ * @param[in] ind Operand index
+ * @param[in] info Tensor information
+ */
+ void registerSubTensorInfo(const model::OperandIndex &ind,
+ const compiler::SubTensorInfo &info) override;
+
+ void notifyFirstUse(const model::OperandIndex &) override;
+ void notifyLastUse(const model::OperandIndex &) override;
+
+ void prepare(void) override;
+ void allocate(void) override; // TODO Remove this
+ void allocateConsts() override;
+ void allocateNonconsts() override;
+ void postFunctionPrepare() override;
+ void finalize() override;
+
+ std::shared_ptr<::neurun::backend::operand::ITensor>
+ tensorAt(const model::OperandIndex &ind) override;
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind) override;
+ void iterate(const IterateFunction &fn) override;
+
+ void preVisit(const model::Operation &node) override;
+ void postVisit(const model::Operation &node) override;
+
+ std::unique_ptr<ITensorManager> releaseTensorManager(void) override;
+
+ std::shared_ptr<T_ITensor> at(const ::neurun::model::OperandIndex &ind);
+ /**
+ * @brief Check child tensor is allocated as subtensor of parent tensor
+ * @param[in] parent Index of parent
+ * @param[in] child Index of child
+ * @return @c true if child is allocated as subtensor of parent, otherwise @c false
+ */
+ bool isSubTensorOf(const model::OperandIndex &parent, const model::OperandIndex &child);
+
+ void dimCorrection(const model::OperandIndex &index, bool apply_dim_correction);
+
+ T_AclTensorManager *acl_tensor_manager(void) { return _tensor_mgr.get(); }
+
+private:
+ void buildTensors(void);
+ void buildSubtensors(void);
+ void validate(void);
+ model::OperandIndex findRootParent(model::OperandIndex index);
+
+private:
+ model::OperandIndexMap<model::OperandInfo> _tensor_info_map;
+ model::OperandIndexMap<compiler::SubTensorInfo> _subtensor_info_map;
+ model::OperandIndexMap<bool> _apply_dim_correction_map;
+ model::OperandIndexMap<std::pair<model::Layout, model::Layout>> _tensor_layouts_map;
+
+ std::unique_ptr<T_AclTensorManager> _tensor_mgr;
+ model::OperandIndexSequence _constants;
+
+ // TODO Consider dividing TensorBuilder into Linear and others
+ const std::string _executor_str;
+
+ // for linear executor
+ std::queue<std::pair<UsesType, model::OperandIndex>> _uses_queue;
+ uint32_t _first_uses_num;
+ model::OperandIndexMap<bool> _first_uses_visit;
+
+ // for subtensors
+ model::OperandIndexMap<uint32_t> _parent_def;
+ model::OperandIndexMap<uint32_t> _parent_uses;
+};
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#include <cassert>
+#include <stack>
+
+#include "Convert.h"
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_common
+{
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::TemplTensorBuilder(
+ T_AclTensorManager *tensor_mgr)
+ : _tensor_mgr{tensor_mgr}, _executor_str(util::getConfigString(util::config::EXECUTOR)),
+ _first_uses_num(0)
+{
+ assert(_tensor_mgr);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::registerTensorInfo(
+ const model::OperandIndex &ind, const model::OperandInfo &info, model::Layout frontend_layout,
+ model::Layout backend_layout, bool as_const)
+{
+ assert(_tensor_mgr->constTensors().size() == 0);
+ assert(_tensor_mgr->nonconstTensors().size() == 0);
+
+ _tensor_info_map.emplace(ind, info);
+ _apply_dim_correction_map.emplace(ind, true);
+ _tensor_layouts_map.insert({ind, std::make_pair(frontend_layout, backend_layout)});
+ if (as_const)
+ _constants.append(ind);
+
+ assert(_first_uses_visit.find(ind) == _first_uses_visit.end());
+ _first_uses_visit[ind] = false;
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::registerSubTensorInfo(
+ const model::OperandIndex &ind, const compiler::SubTensorInfo &info)
+{
+ assert(_tensor_mgr->constTensors().size() == 0);
+ assert(_tensor_mgr->nonconstTensors().size() == 0);
+
+ _subtensor_info_map.emplace(ind, info);
+ _apply_dim_correction_map.emplace(ind, true);
+
+ assert(_first_uses_visit.find(ind) == _first_uses_visit.end());
+ _first_uses_visit[ind] = false;
+
+ const auto &parent_ind = info.parent();
+
+ // parent_def
+ _parent_def[parent_ind] = 1;
+
+ // parent_use
+ if (_parent_uses.find(parent_ind) == _parent_uses.end())
+ _parent_uses[parent_ind] = 1; // 1 means including parent it-self
+ _parent_uses[parent_ind]++;
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::notifyFirstUse(
+ const model::OperandIndex &ind)
+{
+ _first_uses_num++;
+ _uses_queue.emplace(UsesType::FIRST, ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::notifyLastUse(
+ const model::OperandIndex &ind)
+{
+ _uses_queue.emplace(UsesType::LAST, ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::prepare(void)
+{
+ buildTensors();
+ buildSubtensors();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::allocate(void)
+{
+ allocateConsts();
+ allocateNonconsts();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::allocateConsts(void)
+{
+ assert(_constants.size() == _tensor_mgr->constTensors().size());
+ _tensor_mgr->allocateConsts();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::allocateNonconsts(void)
+{
+ assert(_tensor_info_map.size() == _tensor_mgr->nonconstTensors().size() + _constants.size());
+ _tensor_mgr->allocateNonconsts();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::postFunctionPrepare(void)
+{
+ _tensor_mgr->tryDeallocConstants();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::finalize(void)
+{
+ validate();
+ _tensor_mgr->allocateInternalBufferManager();
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<::neurun::backend::operand::ITensor>
+TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::tensorAt(
+ const model::OperandIndex &ind)
+{
+ return _tensor_mgr->at(ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<backend::operand::IObject>
+TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::wrapTensor(
+ const model::OperandIndex &ind)
+{
+ return _tensor_mgr->wrapTensor(ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::iterate(
+ const IterateFunction &fn)
+{
+ _tensor_mgr->iterate(fn);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::shared_ptr<T_ITensor> TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::at(
+ const ::neurun::model::OperandIndex &ind)
+{
+ return _tensor_mgr->at(ind);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+bool TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::isSubTensorOf(
+ const model::OperandIndex &parent, const model::OperandIndex &child)
+{
+ if (_subtensor_info_map.find(child) == _subtensor_info_map.end())
+ {
+ return false;
+ }
+
+ auto &subtensors = _tensor_mgr->nonconstSubtensors();
+ if (subtensors.find(child) == subtensors.end())
+ {
+ return false;
+ }
+
+ if (_subtensor_info_map.at(child).parent() != parent)
+ {
+ return false;
+ }
+
+ return true;
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::dimCorrection(
+ const model::OperandIndex &index, bool apply_dim_correction)
+{
+ _apply_dim_correction_map[index] = apply_dim_correction;
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+std::unique_ptr<ITensorManager>
+TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::releaseTensorManager(void)
+{
+ return std::move(_tensor_mgr);
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::buildTensors(void)
+{
+ assert(_tensor_mgr->constTensors().size() == 0);
+ assert(_tensor_mgr->nonconstTensors().size() == 0);
+
+ for (auto &entry : _tensor_info_map)
+ {
+ auto ind = entry.first;
+ const auto &info = entry.second;
+ // NOTE SubTensor's layout must be the same with layout of parent tensor
+ const auto &root_parent = findRootParent(ind);
+ const auto &frontend_layout = _tensor_layouts_map[root_parent].first;
+ const auto &backend_layout = _tensor_layouts_map[root_parent].second;
+ auto tensor_info = asTensorInfo(info.shape(), info.typeInfo(), frontend_layout, backend_layout,
+ _apply_dim_correction_map[ind]);
+ _tensor_mgr->buildTensor(ind, tensor_info, info.shape().rank(), _constants.contains(ind));
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::buildSubtensors(void)
+{
+ // TODO Handle SubTensor(subsumption)
+ // Currently this TemplTensorBuilder does not have subsumption info yet
+ // Allocated subtensor will be mapped to _subtensors instead of _tensors
+ assert(_tensor_mgr->nonconstSubtensors().size() == 0);
+
+ // To make subtensor, parent tensor must be made first
+ // For this condition, use stack
+ // 1) Push one subtensor index to stack (iterate subtensors)
+ // 2) If tensor at stack top is already made, pop and go to 4)
+ // 3) If tensor pushed at 1) is not made, check parent tensor
+ // 3-1) If parent tensor is already made, we can make child tensor
+ // Make child tensor and pop, go to 4)
+ // 3-2) If parent tensor is not made, we can't make child tensor yet
+ // Push parent tensor index to stack and return to 4)
+ // 4) If stack is empty, return to 1), else return to 2)
+ auto &subtensors = _tensor_mgr->nonconstSubtensors();
+ for (auto &entry : _subtensor_info_map)
+ {
+ model::OperandIndex ind = entry.first;
+
+ std::stack<model::OperandIndex> stack;
+ stack.push(ind);
+
+ while (!stack.empty())
+ {
+ const auto current = stack.top();
+ const auto &info = _subtensor_info_map.at(current);
+
+ // Already generated SubTensor
+ if (subtensors.find(current) != subtensors.end())
+ {
+ stack.pop();
+ continue;
+ }
+
+ auto parent = info.parent();
+ std::shared_ptr<T_ITensor> parent_tensor = _tensor_mgr->findTensorAsParent(parent);
+ if (!parent_tensor)
+ {
+ // Cannot find allocated parent tensor: allocate parent first
+ assert(_subtensor_info_map.find(parent) != _subtensor_info_map.end());
+ stack.push(parent);
+ continue;
+ }
+ assert(parent_tensor != nullptr);
+
+ // Child's type should be same with parent
+ assert(info.type().offset() == parent_tensor->info()->quantization_info().offset);
+ assert(info.type().scale() == parent_tensor->info()->quantization_info().scale);
+ assert(asDataType(info.type().type()) == parent_tensor->info()->data_type());
+
+ // NOTE SubTensor's layout must be the same with layout of parent tensor
+ const auto &root_parent = findRootParent(parent);
+ const auto &frontend_layout = _tensor_layouts_map[root_parent].first;
+ const auto &backend_layout = _tensor_layouts_map[root_parent].second;
+
+ auto shape = asTensorShape(info.shape(), frontend_layout, backend_layout,
+ _apply_dim_correction_map[current]);
+ ::arm_compute::Coordinates coordinates =
+ asTensorCoordinate(info.offset(), frontend_layout, backend_layout);
+ _tensor_mgr->buildSubtensor(parent, current, shape, coordinates, info.shape().rank(), true);
+ stack.pop();
+ }
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::preVisit(
+ const model::Operation &node)
+{
+ // For now others executor doesn't need this step
+ if (_executor_str != "Linear")
+ {
+ return;
+ }
+
+ std::function<void(const model::OperandIndex &ind)> def_handler =
+ [this, &def_handler](const model::OperandIndex &ind) {
+ bool is_subtensor = _subtensor_info_map.find(ind) != _subtensor_info_map.end();
+ bool is_parent = _parent_def.find(ind) != _parent_def.end();
+ if (!is_subtensor && !is_parent)
+ {
+ _tensor_mgr->startLifetime(ind);
+ return;
+ }
+
+ if (is_parent)
+ {
+ if (_parent_def[ind] == 0)
+ return;
+
+ _parent_def[ind] = 0;
+
+ if (is_subtensor)
+ {
+ const auto &it = _parent_def.find(ind);
+ _parent_def.erase(it);
+ def_handler(ind);
+ }
+ else
+ {
+ _tensor_mgr->startLifetime(ind);
+ }
+ }
+ else if (is_subtensor)
+ {
+ const model::OperandIndex &parent_ind = _subtensor_info_map.at(ind).parent();
+ if (_parent_def[parent_ind] == 0)
+ return;
+ def_handler(parent_ind);
+ }
+ };
+
+ // See #5642
+ model::OperandIndexMap<bool> outputs_map;
+ for (const auto &ind : node.getOutputs())
+ {
+ assert(_first_uses_visit.find(ind) != _first_uses_visit.end());
+ outputs_map[ind] = _first_uses_visit[ind];
+ }
+
+ // outputs_map's all elements are true?
+ auto outputs_map_all_check = [&outputs_map]() {
+ return std::all_of(outputs_map.begin(), outputs_map.end(),
+ [](std::pair<const model::OperandIndex, bool> it) { return it.second; });
+ };
+
+ std::pair<UsesType, model::OperandIndex> peak;
+ while (!outputs_map_all_check() && (peak = _uses_queue.front()).first == UsesType::FIRST)
+ {
+ _uses_queue.pop();
+ _first_uses_num--;
+
+ const auto &popped_idx = peak.second;
+ def_handler(popped_idx);
+
+ outputs_map[popped_idx] = true;
+ _first_uses_visit[popped_idx] = true;
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::postVisit(
+ const model::Operation &node)
+{
+ // For now others executor doesn't need this step
+ if (_executor_str != "Linear")
+ {
+ return;
+ }
+
+ std::function<void(const model::OperandIndex &ind)> use_handler =
+ [this, &use_handler](const model::OperandIndex &ind) {
+ bool is_subtensor = _subtensor_info_map.find(ind) != _subtensor_info_map.end();
+ bool is_parent = _parent_uses.find(ind) != _parent_uses.end();
+ if (!is_subtensor && !is_parent)
+ {
+ _tensor_mgr->finishLifetime(ind);
+ return;
+ }
+
+ // This handler shall be executed by the linear executor so that
+ // The parent operand will always be done after the subtensor
+ if (is_parent)
+ {
+ --_parent_uses[ind];
+ assert(_parent_uses[ind] == 0);
+
+ if (is_subtensor)
+ {
+ const auto &it = _parent_uses.find(ind);
+ _parent_uses.erase(it);
+ use_handler(ind);
+ }
+ else
+ {
+ _tensor_mgr->finishLifetime(ind);
+ }
+ }
+ else if (is_subtensor)
+ {
+ const model::OperandIndex &parent_ind = _subtensor_info_map.at(ind).parent();
+ --_parent_uses[parent_ind];
+ assert(_parent_uses[parent_ind] > 0);
+ }
+ };
+
+ // See #5642
+ const auto &inputs = node.getInputs();
+ std::pair<UsesType, model::OperandIndex> peak;
+ while ((peak = _uses_queue.front()).first == UsesType::LAST)
+ {
+ const auto &popped_idx = peak.second;
+ if (inputs.contains(popped_idx))
+ {
+ _uses_queue.pop();
+ use_handler(popped_idx);
+ }
+ else
+ {
+ break;
+ }
+ }
+
+ if (_first_uses_num == 0)
+ {
+ while (!_uses_queue.empty())
+ {
+ peak = _uses_queue.front();
+ assert(peak.first == UsesType::LAST);
+
+ _uses_queue.pop();
+
+ use_handler(peak.second);
+ }
+ }
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+void TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::validate(void)
+{
+ // For now others executor doesn't need this step
+ if (_executor_str != "Linear")
+ {
+ return;
+ }
+
+ for (auto it : _tensor_info_map)
+ {
+ assert(_first_uses_visit.find(it.first) != _first_uses_visit.end());
+ assert(_first_uses_visit[it.first]);
+ }
+
+ for (auto it : _subtensor_info_map)
+ {
+ assert(_first_uses_visit.find(it.first) != _first_uses_visit.end());
+ assert(_first_uses_visit[it.first]);
+ }
+
+ for (auto it : _tensor_layouts_map)
+ {
+ assert(_first_uses_visit.find(it.first) != _first_uses_visit.end());
+ assert(_first_uses_visit[it.first]);
+ UNUSED_RELEASE(it);
+ }
+
+ assert(_uses_queue.size() == 0);
+ assert(_first_uses_num == 0);
+
+ assert(std::all_of(
+ _parent_def.begin(), _parent_def.end(),
+ [](std::pair<const model::OperandIndex, uint32_t> it) { return it.second == 0; }));
+
+ assert(std::all_of(
+ _parent_uses.begin(), _parent_uses.end(),
+ [](std::pair<const model::OperandIndex, uint32_t> it) { return it.second == 0; }));
+}
+
+template <typename T_ITensor, typename T_Tensor, typename T_SubTensor, typename T_Object>
+model::OperandIndex TemplTensorBuilder<T_ITensor, T_Tensor, T_SubTensor, T_Object>::findRootParent(
+ model::OperandIndex ind)
+{
+ if (_subtensor_info_map.find(ind) == _subtensor_info_map.end())
+ return ind;
+
+ const auto &parent_ind = _subtensor_info_map.at(ind).parent();
+ return findRootParent(parent_ind);
+}
+
+} // namespace acl_common
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_COMMON_TEMPL_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/backend/acl_neon/Backend.h b/runtimes/neurun/backend/acl_neon/Backend.h
new file mode 100644
index 000000000..38769e5d4
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/Backend.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_BACKEND_H__
+#define __NEURUN_BACKEND_ACL_NEON_BACKEND_H__
+
+#include <memory>
+#include <backend/Backend.h>
+#include <model/Operands.h>
+
+#include "Config.h"
+#include "ConstantInitializer.h"
+#include "KernelGenerator.h"
+#include "ShapeFixer.h"
+#include "TensorManager.h"
+#include "backend/CustomKernelRegistry.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+class Backend : public ::neurun::backend::Backend
+{
+public:
+ Backend() : _config{std::make_shared<Config>()} {}
+
+ std::shared_ptr<IConfig> config() const override { return _config; }
+
+ std::unique_ptr<BackendContext>
+ newContext(const model::Operands &operands,
+ const std::shared_ptr<custom::KernelRegistry> &) const override
+ {
+ auto tensor_builder = std::make_shared<TensorBuilder>(createTensorManager());
+ return std::unique_ptr<BackendContext>{new BackendContext{
+ this, tensor_builder, std::make_shared<ConstantInitializer>(operands, tensor_builder),
+ std::make_shared<KernelGenerator>(operands, tensor_builder),
+ std::make_shared<ShapeFixer>(operands, tensor_builder)}};
+ }
+
+private:
+ std::shared_ptr<IConfig> _config;
+};
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_BACKEND_H__
diff --git a/runtimes/neurun/backend/acl_neon/CMakeLists.txt b/runtimes/neurun/backend/acl_neon/CMakeLists.txt
new file mode 100644
index 000000000..bb2334366
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/CMakeLists.txt
@@ -0,0 +1,21 @@
+# Unsupported architecture
+nnfw_find_package(ARMCompute QUIET)
+if(NOT ARMCompute_FOUND)
+ return()
+endif(NOT ARMCompute_FOUND)
+
+set(LIB_NEURUN_BACKEND_ACL_NEON neurun_backend_acl_neon)
+
+file(GLOB_RECURSE SOURCES "*.cc")
+
+add_library(${LIB_NEURUN_BACKEND_ACL_NEON} SHARED ${SOURCES})
+
+target_include_directories(${LIB_NEURUN_BACKEND_ACL_NEON} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR})
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_NEON} PRIVATE neurun_core)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_NEON} PRIVATE ${LIB_NEURUN_BACKEND_ACL_COMMON})
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_NEON} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN_BACKEND_ACL_NEON} PRIVATE nnfw_coverage)
+
+set_target_properties(${LIB_NEURUN_BACKEND_ACL_NEON} PROPERTIES OUTPUT_NAME backend_acl_neon)
+
+install(TARGETS ${LIB_NEURUN_BACKEND_ACL_NEON} DESTINATION lib)
diff --git a/runtimes/neurun/backend/acl_neon/Config.cc b/runtimes/neurun/backend/acl_neon/Config.cc
new file mode 100644
index 000000000..8f4a8e7f8
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/Config.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Config.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+void Config::initialize()
+{
+ // DO NOTHING
+}
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/Config.h b/runtimes/neurun/backend/acl_neon/Config.h
new file mode 100644
index 000000000..0656fa46b
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/Config.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_CONFIG_H__
+#define __NEURUN_BACKEND_ACL_NEON_CONFIG_H__
+
+#include <backend/IConfig.h>
+#include <cpp14/memory.h>
+#include <util/ITimer.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+class Config : public IConfig
+{
+public:
+ std::string id() override { return "acl_neon"; }
+ void initialize() override;
+ bool SupportSubTensorAlloc() override { return true; }
+
+ std::unique_ptr<util::ITimer> timer() override
+ {
+ return nnfw::cpp14::make_unique<util::CPUTimer>();
+ }
+};
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_CONFIG_H__
diff --git a/runtimes/neurun/backend/acl_neon/ConstantInitializer.cc b/runtimes/neurun/backend/acl_neon/ConstantInitializer.cc
new file mode 100644
index 000000000..d83b7c01b
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/ConstantInitializer.cc
@@ -0,0 +1,189 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConstantInitializer.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+ConstantInitializer::ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _operands{operands}, _tensor_builder{tensor_builder}
+{
+ // DO NOTHING
+}
+
+void ConstantInitializer::run()
+{
+ for (const auto &it : _init_map)
+ {
+ const auto &ind = it.first;
+ const auto &fn = it.second;
+
+ const auto &model_obj = _operands.at(ind);
+ auto tensor_obj = _tensor_builder->wrapTensor(ind);
+ fn(model_obj, *tensor_obj);
+ }
+
+ _init_map.clear();
+}
+
+void ConstantInitializer::visit(const model::operation::Conv2DNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::Conv2DNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteInitializer(kernel_index, kernel_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::Conv2DNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::DepthwiseConv2DNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteInitializer(kernel_index, kernel_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::FullyConnectedNode &node)
+{
+ const auto &weight_index = node.getInputs().at(model::operation::FullyConnectedNode::WEIGHT);
+ const auto &weight_obj = _operands.at(weight_index);
+ registerCopyInitializer(weight_index, weight_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::FullyConnectedNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::LSTMNode &node)
+{
+ const auto &input_to_input_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_INPUT_WEIGHTS);
+ const auto &input_to_input_weights_obj = _operands.at(input_to_input_weights_index);
+ registerCopyInitializer(input_to_input_weights_index, input_to_input_weights_obj);
+
+ const auto &input_to_forget_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_FORGET_WEIGHTS);
+ const auto &input_to_forget_weights_obj = _operands.at(input_to_forget_weights_index);
+ registerCopyInitializer(input_to_forget_weights_index, input_to_forget_weights_obj);
+
+ const auto &input_to_cell_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_CELL_WEIGHTS);
+ const auto &input_to_cell_weights_obj = _operands.at(input_to_cell_weights_index);
+ registerCopyInitializer(input_to_cell_weights_index, input_to_cell_weights_obj);
+
+ const auto &input_to_output_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_TO_OUTPUT_WEIGHTS);
+ const auto &input_to_output_weights_obj = _operands.at(input_to_output_weights_index);
+ registerCopyInitializer(input_to_output_weights_index, input_to_output_weights_obj);
+
+ const auto &recurrent_to_input_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_INPUT_WEIGHTS);
+ const auto &recurrent_to_input_weights_obj = _operands.at(recurrent_to_input_weights_index);
+ registerCopyInitializer(recurrent_to_input_weights_index, recurrent_to_input_weights_obj);
+
+ const auto &recurrent_to_forget_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_FORGET_WEIGHTS);
+ const auto &recurrent_to_forget_weights_obj = _operands.at(recurrent_to_forget_weights_index);
+ registerCopyInitializer(recurrent_to_forget_weights_index, recurrent_to_forget_weights_obj);
+
+ const auto &recurrent_to_cell_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_CELL_WEIGHTS);
+ const auto &recurrent_to_cell_weights_obj = _operands.at(recurrent_to_cell_weights_index);
+ registerCopyInitializer(recurrent_to_cell_weights_index, recurrent_to_cell_weights_obj);
+
+ const auto &recurrent_to_output_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::RECURRENT_TO_OUTPUT_WEIGHTS);
+ const auto &recurrent_to_output_weights_obj = _operands.at(recurrent_to_output_weights_index);
+ registerCopyInitializer(recurrent_to_output_weights_index, recurrent_to_output_weights_obj);
+
+ const auto &cell_to_input_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::CELL_TO_INPUT_WEIGHTS);
+ const auto &cell_to_input_weights_obj = _operands.at(cell_to_input_weights_index);
+ registerCopyInitializer(cell_to_input_weights_index, cell_to_input_weights_obj);
+
+ const auto &cell_to_forget_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::CELL_TO_FORGET_WEIGHTS);
+ const auto &cell_to_forget_weights_obj = _operands.at(cell_to_forget_weights_index);
+ registerCopyInitializer(cell_to_forget_weights_index, cell_to_forget_weights_obj);
+
+ const auto &cell_to_output_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::CELL_TO_OUTPUT_WEIGHTS);
+ const auto &cell_to_output_weights_obj = _operands.at(cell_to_output_weights_index);
+ registerCopyInitializer(cell_to_output_weights_index, cell_to_output_weights_obj);
+
+ const auto &input_gate_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::INPUT_GATE_BIAS);
+ const auto &input_gate_bias_obj = _operands.at(input_gate_bias_index);
+ registerCopyInitializer(input_gate_bias_index, input_gate_bias_obj);
+
+ const auto &forget_gate_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::FORGET_GATE_BIAS);
+ const auto &forget_gate_bias_obj = _operands.at(forget_gate_bias_index);
+ registerCopyInitializer(forget_gate_bias_index, forget_gate_bias_obj);
+
+ const auto &output_gate_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::OUTPUT_GATE_BIAS);
+ const auto &output_gate_bias_obj = _operands.at(output_gate_bias_index);
+ registerCopyInitializer(output_gate_bias_index, output_gate_bias_obj);
+
+ const auto &projection_weights_index =
+ node.getInputs().at(model::operation::LSTMNode::PROJECTION_WEIGHTS);
+ const auto &projection_weights_obj = _operands.at(projection_weights_index);
+ registerCopyInitializer(projection_weights_index, projection_weights_obj);
+
+ const auto &projection_bias_index =
+ node.getInputs().at(model::operation::LSTMNode::PROJECTION_BIAS);
+ const auto &projection_bias_obj = _operands.at(projection_bias_index);
+ registerCopyInitializer(projection_bias_index, projection_bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::RNNNode &node)
+{
+ const auto &weights_index = node.getInputs().at(model::operation::RNNNode::WEIGHTS);
+ const auto &weights_obj = _operands.at(weights_index);
+ registerCopyInitializer(weights_index, weights_obj);
+
+ const auto &recurrent_weights_index =
+ node.getInputs().at(model::operation::RNNNode::RECURRENT_WEIGHTS);
+ const auto &recurrent_weights_obj = _operands.at(recurrent_weights_index);
+ registerCopyInitializer(recurrent_weights_index, recurrent_weights_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::RNNNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::TransposeConvNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::TransposeConvNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteInitializer(kernel_index, kernel_obj);
+}
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/ConstantInitializer.h b/runtimes/neurun/backend/acl_neon/ConstantInitializer.h
new file mode 100644
index 000000000..cdd94f795
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/ConstantInitializer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_ACL_NEON_CONSTANT_INITIALIZER_H__
+#define __NEURUN_COMPILER_ACL_NEON_CONSTANT_INITIALIZER_H__
+
+#include <backend/IConstantInitializer.h>
+#include <model/Operands.h>
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+class ConstantInitializer : public IConstantInitializer
+{
+public:
+ ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+public:
+ void run() override;
+
+public:
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+
+private:
+ const model::Operands &_operands;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_ACL_NEON_CONSTANT_INITIALIZER_H__
diff --git a/runtimes/neurun/backend/acl_neon/KernelGenerator.cc b/runtimes/neurun/backend/acl_neon/KernelGenerator.cc
new file mode 100644
index 000000000..52300030f
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/KernelGenerator.cc
@@ -0,0 +1,1726 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "KernelGenerator.h"
+
+#include <arm_compute/runtime/NEON/NEFunctions.h> // Include all ARM Compute NEON functions
+#include <arm_compute/runtime/NEON/NEFunctionsEx.h> // Include all ARM Compute EX NEON functions
+
+#include <Convert.h>
+#include <Swizzle.h>
+
+#include "kernel/ConcatLayer.h"
+#include "util/Padding.h"
+#include "model/Index.h"
+#include "model/DataType.h"
+#include "model/InternalType.h"
+#include "compiler/IExecutionBuilder.h"
+#include "exec/NopFunction.h"
+#include "util/logging.h"
+#include "util/Utils.h"
+
+using ::neurun::compiler::IExecutionBuilder;
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+using ::neurun::backend::acl_common::asAclFunction;
+
+//
+// ActivationBuilder
+//
+class ActivationBuilder
+{
+public:
+ ActivationBuilder(IExecutionBuilder &builder) : _builder(builder)
+ {
+ // DO NOTHING
+ }
+
+private:
+ void appendReLU(::arm_compute::ITensor *ifm_alloc);
+ void appendReLU1(::arm_compute::ITensor *ifm_alloc);
+ void appendReLU6(::arm_compute::ITensor *ifm_alloc);
+
+public:
+ void append(model::Activation act, ::arm_compute::ITensor *ifm_alloc);
+
+private:
+ IExecutionBuilder &_builder;
+};
+
+void ActivationBuilder::appendReLU(::arm_compute::ITensor *ifm_alloc)
+{
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(ifm_alloc, nullptr, act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _builder.append(std::move(acl_fn));
+}
+
+void ActivationBuilder::appendReLU1(::arm_compute::ITensor *ifm_alloc)
+{
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1.0f, -1.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(ifm_alloc, nullptr, act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _builder.append(std::move(acl_fn));
+}
+
+void ActivationBuilder::appendReLU6(::arm_compute::ITensor *ifm_alloc)
+{
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.0f, 0.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(ifm_alloc, nullptr, act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _builder.append(std::move(acl_fn));
+}
+
+void ActivationBuilder::append(model::Activation act, ::arm_compute::ITensor *ifm_alloc)
+{
+ switch (act)
+ {
+ case model::Activation::NONE:
+ {
+ // DO NOTHING
+ break;
+ }
+ case model::Activation::RELU:
+ {
+ appendReLU(ifm_alloc);
+ break;
+ }
+ case model::Activation::RELU1:
+ {
+ appendReLU1(ifm_alloc);
+ break;
+ }
+ case model::Activation::RELU6:
+ {
+ appendReLU6(ifm_alloc);
+ break;
+ }
+ default:
+ {
+ throw std::runtime_error("Not supported, yet");
+ }
+ }
+}
+
+//
+// KernelGenerator
+//
+KernelGenerator::KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _ctx(ctx), _tensor_builder(tensor_builder), _current_subg_layout(model::Layout::UNKNOWN)
+{
+ // DO NOTHING
+}
+
+void KernelGenerator::visit(const model::Subgraph &subgraph)
+{
+ _current_subg_layout = subgraph.getLayout();
+ for (const auto &e : subgraph.operations())
+ {
+ const auto &node = *(e.node);
+ _tensor_builder->preVisit(node);
+ node.accept(*this);
+ _tensor_builder->postVisit(node);
+ }
+}
+
+void KernelGenerator::visit(const model::operation::AbsNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::AbsNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::ABS};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ArgMaxNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ArgMaxNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ auto ifm_shape = _ctx.at(ifm_index).shape();
+ auto ofm_shape = _ctx.at(ofm_index).shape();
+ auto axis_shape = _ctx.at(axis_index).shape();
+
+ assert(_ctx.at(axis_index).isConstant());
+ // Axis rank is always 1.
+ assert(axis_shape.rank() == 1);
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ const auto ifm_rank = ifm_shape.rank();
+ auto frontend_layout = _current_subg_layout;
+ auto backend_layout = ifm_alloc->layout();
+ int32_t axis_value = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ assert(axis_value >= 0 && axis_value < ifm_rank);
+ const auto fixed_axis =
+ acl_common::ToARMComputeAxis(ifm_rank, axis_value, frontend_layout, backend_layout).value();
+
+ // auto fn = nnfw::cpp14::make_unique<::arm_compute::NEArgMinMaxLayer>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEArgMax>();
+
+ // NOTE
+ // if (ofm_alloc->info()->data_type() == arm_compute::DataType::S32)
+ //{
+ ofm_alloc->info()->set_data_type(arm_compute::DataType::U32);
+ //}
+ fn->configure(ifm_alloc->handle(), fixed_axis, ofm_alloc->handle());
+ // fn->configure(ifm_alloc->handle(), fixed_axis, ofm_alloc->handle(),
+ // arm_compute::ReductionOperation::ARG_IDX_MAX);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::Conv2DNode &node)
+{
+ using model::operation::Conv2DNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [depth_out, kernel_height, kernel_width, depth_in].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+
+ const auto stride = node.param().stride;
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
+ stride, ker_width, ker_height);
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto ker_alloc = _tensor_builder->at(ker_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+
+ const auto conv_info = acl_common::asPadStrideInfo(padding, stride);
+ const auto act_info = acl_common::asActivationLayerInfo(activation);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEConvolutionLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(), ofm_alloc->handle(),
+ conv_info, ::arm_compute::WeightsInfo(), ::arm_compute::Size2D(1U, 1U), act_info);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::DepthwiseConv2DNode &node)
+{
+ using model::operation::DepthwiseConv2DNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(DepthwiseConv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(DepthwiseConv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(DepthwiseConv2DNode::Input::BIAS)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [1, kernel_height, kernel_width, depth_out].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+
+ const auto stride = node.param().stride;
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
+ stride, ker_width, ker_height);
+ const auto multiplier = node.param().multiplier;
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto ker_alloc = _tensor_builder->at(ker_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+
+ const auto conv_info = acl_common::asPadStrideInfo(padding, stride);
+ const auto act_info = acl_common::asActivationLayerInfo(activation);
+
+ if (ker_height == 3 && ker_width == 3)
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEDepthwiseConvolutionLayer3x3>();
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(),
+ ofm_alloc->handle(), conv_info, multiplier, act_info);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+ }
+ else
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEDepthwiseConvolutionLayer>();
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(),
+ ofm_alloc->handle(), conv_info, multiplier, act_info);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+ }
+}
+
+void KernelGenerator::visit(const model::operation::DequantizeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::DequantizeNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEDequantizationLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::MaxPool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::MaxPool2DNode::Input::INPUT)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+
+ const auto kh = node.param().kh;
+ const auto kw = node.param().kw;
+ const auto stride = node.param().stride;
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ VERBOSE(MaxPool2D) << "IFM_H: " << ifm_shape.H << std::endl;
+ VERBOSE(MaxPool2D) << "IFM_W: " << ifm_shape.W << std::endl;
+ VERBOSE(MaxPool2D) << "OFM_H: " << ofm_shape.H << std::endl;
+ VERBOSE(MaxPool2D) << "OFM_W: " << ofm_shape.W << std::endl;
+ VERBOSE(MaxPool2D) << "KER_H: " << kh << std::endl;
+ VERBOSE(MaxPool2D) << "KER_W: " << kw << std::endl;
+ VERBOSE(MaxPool2D) << "STRIDE_H: " << stride.vertical << std::endl;
+ VERBOSE(MaxPool2D) << "STRIDE_W: " << stride.horizontal << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(T): " << padding.top << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(B): " << padding.bottom << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(L): " << padding.left << std::endl;
+ VERBOSE(MaxPool2D) << "PAD(R): " << padding.right << std::endl;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ ::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::MAX,
+ ::arm_compute::Size2D{kw, kh},
+ acl_common::asPadStrideInfo(padding, stride)};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEPoolingLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append((std::move(acl_fn)));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::MeanNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::MeanNode::Input::INPUT)};
+
+ const auto axis_index{node.param().axis_index};
+ const auto keep_dims{node.param().keep_dims};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape();
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ std::set<uint32_t> axes;
+ {
+ const auto ifm_rank = ifm_shape.rank();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ const auto axis_shape = _ctx.at(axis_index).shape();
+ switch (axis_shape.rank())
+ {
+ case 0: // scalar
+ {
+ auto axis_value = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(ifm_rank, axis_value,
+ frontend_layout, backend_layout)
+ .value());
+ break;
+ }
+ case 1: // vector
+ {
+ const auto axis_base = _ctx.at(axis_index).data().base();
+ const int axis_size = axis_shape.num_elements();
+
+ // If axis's data does not exist as constant values and can be gotten as input data, we have
+ // to find a way to infer output shape when sinking output.
+ assert(axis_base != nullptr);
+ for (int32_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += ifm_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(
+ ifm_rank, axis_value, frontend_layout, backend_layout)
+ .value());
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("Not supported");
+ }
+ }
+
+ arm_compute::Coordinates fixed_axis;
+ for (auto a : axes)
+ {
+ fixed_axis.set(fixed_axis.num_dimensions(), a);
+ }
+
+ // NOTE NEReduceMean has a bug that does not support NHWC layout
+ // NEReduceMean intermediate tensors are always NCHW layout
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEReduceMeanEx>();
+
+ fn->configure(ifm_alloc->handle(), fixed_axis, keep_dims, ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::AvgPool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::AvgPool2DNode::Input::INPUT)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+
+ const auto kh = node.param().kh;
+ const auto kw = node.param().kw;
+ const auto stride = node.param().stride;
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ VERBOSE(AvgPool2D) << "IFM_H: " << ifm_shape.H << std::endl;
+ VERBOSE(AvgPool2D) << "IFM_W: " << ifm_shape.W << std::endl;
+ VERBOSE(AvgPool2D) << "OFM_H: " << ofm_shape.H << std::endl;
+ VERBOSE(AvgPool2D) << "OFM_W: " << ofm_shape.W << std::endl;
+ VERBOSE(AvgPool2D) << "KER_H: " << kh << std::endl;
+ VERBOSE(AvgPool2D) << "KER_W: " << kw << std::endl;
+ VERBOSE(AvgPool2D) << "STRIDE_H: " << stride.vertical << std::endl;
+ VERBOSE(AvgPool2D) << "STRIDE_W: " << stride.horizontal << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(T): " << padding.top << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(B): " << padding.bottom << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(L): " << padding.left << std::endl;
+ VERBOSE(AvgPool2D) << "PAD(R): " << padding.right << std::endl;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ ::arm_compute::PoolingLayerInfo info{
+ ::arm_compute::PoolingType::AVG, ::arm_compute::Size2D{kw, kh},
+ acl_common::asPadStrideInfo(padding, stride), true /* exclude_padding */};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEPoolingLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append((std::move(acl_fn)));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::ConcatNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ std::vector<model::OperandIndex> input_indexes;
+ for (const auto &input : node.getInputs())
+ input_indexes.emplace_back(input);
+
+ const auto axis = node.param().axis;
+
+ // If tensor allocator allocate as subtensor
+ bool canEliminate = true;
+ for (auto ifm_ind : input_indexes)
+ {
+ if (!_tensor_builder->isSubTensorOf(ofm_index, ifm_ind))
+ {
+ canEliminate = false;
+ break;
+ }
+ }
+ if (canEliminate)
+ {
+ // If concat eliminated, return a NOP IFunction
+ _execution_builder->append(nnfw::cpp14::make_unique<exec::NopFunction>());
+ return;
+ }
+
+ auto output_alloc = _tensor_builder->at(ofm_index).get();
+
+ std::vector<::neurun::backend::acl_neon::operand::INETensor *> input_allocs;
+ for (const auto &ifm_ind : input_indexes)
+ input_allocs.emplace_back(_tensor_builder->at(ifm_ind).get());
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::acl_neon::kernel::ConcatLayer>();
+
+ const auto rank = _ctx.at(ofm_index).shape().rank();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = output_alloc->layout();
+ const auto fixed_axis =
+ acl_common::ToARMComputeAxis(rank, axis, frontend_layout, backend_layout).value();
+
+ fn->configure(input_allocs, fixed_axis, output_alloc);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::FloorNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::FloorNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEFloor>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::FullyConnectedNode &node)
+{
+ using model::operation::FullyConnectedNode;
+
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
+ const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
+
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+ // TODO Currently we are not handling where the case is that the input's rank is 3.
+ // The handling should be added in the future.
+ assert(input_rank != 3);
+
+ const auto output_size = _ctx.at(output_index).shape().dim(1);
+ UNUSED_RELEASE(output_size);
+ assert(_ctx.at(bias_index).shape().dim(0) == output_size);
+ assert(_ctx.at(weight_index).shape().dim(0) == output_size);
+ const auto batch_size = _ctx.at(output_index).shape().dim(0);
+ const auto input_size = _ctx.at(weight_index).shape().dim(1);
+
+ // Check for reshaping input's shape into rank-2
+ bool needs_reshape = false;
+ neurun::model::Shape reshape(2);
+ if (input_rank == 4)
+ {
+ model::FeatureShape ifm_shape_feature =
+ _ctx.at(input_index).shape().asFeature(_current_subg_layout);
+ auto feature_size =
+ ifm_shape_feature.N * ifm_shape_feature.C * ifm_shape_feature.H * ifm_shape_feature.W;
+
+ UNUSED_RELEASE(feature_size);
+ assert(feature_size == batch_size * input_size);
+
+ // for reshaping
+ needs_reshape = true;
+ reshape.dim(0) = batch_size; /* H */
+ reshape.dim(1) = input_size; /* W */
+ }
+
+ const auto activation = node.param().activation;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto weight_alloc = _tensor_builder->at(weight_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+ auto acl_layout = output_alloc->handle()->info()->data_layout();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::NEFullyConnectedReshapingLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(
+ input_alloc->handle(), weight_alloc->handle(), bias_alloc->handle(), output_alloc->handle(),
+ needs_reshape,
+ ::neurun::backend::acl_common::asTensorShape(/* TODO Support NCHW frontend */
+ reshape, model::Layout::NHWC,
+ ::neurun::backend::acl_common::asRuntimeLayout(
+ acl_layout)));
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, output_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::L2NormalizationNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::L2NormalizationNode::Input::INPUT)};
+
+ // {CL|Neon}L2Normalization performs the reduction only along dimension 0
+ // L2 Normalization always performs the reduction along the depth axis
+ // Thus, we repurpose {CL|Neon}NormalizationLayers to act as depthwise L2 normalizations by
+ // choosing normalization parameters as below
+
+ const auto &ifm_shape = _ctx.at(ifm_index).shape();
+ // TODO Support optional constant dimension that normalization would be performed on
+ const auto normalization_axis = ifm_shape.rank() - 1;
+ int32_t radius =
+ 2 * ifm_shape.dim(normalization_axis) + 1; // normSize = depth(last dimension) * 2 + 1
+ float alpha = 1.0f; // In the implementation to make alpha_ become 1
+ float beta = 0.5f; // pow(reduction, -0.5) = 1 / sqrt(reduction)
+ float bias = 0.0f; // Don't offset the reduction.
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const auto norm_info = ::arm_compute::NormalizationLayerInfo(::arm_compute::NormType::CROSS_MAP,
+ radius, alpha, beta, bias, false);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NENormalizationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), norm_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::L2Pool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::L2Pool2DNode::Input::INPUT)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+
+ uint32_t kw = node.param().kw;
+ uint32_t kh = node.param().kh;
+ const auto stride = node.param().stride;
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ ::arm_compute::PoolingLayerInfo info{
+ ::arm_compute::PoolingType::L2, ::arm_compute::Size2D{kw, kh},
+ ::neurun::backend::acl_common::asPadStrideInfo(padding, stride)};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEPoolingLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::LocalResponseNormalizationNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{
+ node.getInputs().at(model::operation::LocalResponseNormalizationNode::Input::INPUT)};
+ const auto radius_index{node.param().radius_index};
+ const auto bias_index{node.param().bias_index};
+ const auto alpha_index{node.param().alpha_index};
+ const auto beta_index{node.param().beta_index};
+
+ auto radius = _ctx.at(radius_index).asScalar<int32_t>();
+ auto alpha = _ctx.at(alpha_index).asScalar<float>();
+ auto beta = _ctx.at(beta_index).asScalar<float>();
+ auto bias = _ctx.at(bias_index).asScalar<float>();
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const auto norm_info = ::arm_compute::NormalizationLayerInfo(
+ ::arm_compute::NormType::CROSS_MAP, radius * 2 + 1, alpha, beta, bias, false);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NENormalizationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), norm_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogicalAndNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input0_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT1)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input0_alloc = _tensor_builder->at(input0_index).get();
+ auto input1_alloc = _tensor_builder->at(input1_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NELogicalAnd>();
+
+ fn->configure(input0_alloc->handle(), input1_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogicalNotNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::LogicalNotNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEBitwiseNot>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogicalOrNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input0_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT1)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input0_alloc = _tensor_builder->at(input0_index).get();
+ auto input1_alloc = _tensor_builder->at(input1_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NELogicalOr>();
+
+ fn->configure(input0_alloc->handle(), input1_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LogisticNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::LogisticNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LOGISTIC};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::LSTMNode &node)
+{
+ // TODO Support dynamic rnn
+ // TODO Fix subtle error in the case of non-CIFG, non-peephole and No Projection.
+ const auto scratch_buffer_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::SCRATCH_BUFFER)};
+ const auto output_state_out_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::OUTPUT_STATE_OUT)};
+ const auto cell_state_out_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::CELL_STATE_OUT)};
+ const auto output_index{node.getOutputs().at(model::operation::LSTMNode::Output::OUTPUT)};
+
+ const auto input_index{node.getInputs().at(model::operation::LSTMNode::Input::INPUT)};
+ const auto input_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_INPUT_WEIGHTS)}; // optional
+ const auto input_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_FORGET_WEIGHTS)};
+ const auto input_to_cell_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_CELL_WEIGHTS)};
+ const auto input_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_OUTPUT_WEIGHTS)};
+ const auto recurrent_to_input_weights_index{node.getInputs().at(
+ model::operation::LSTMNode::Input::RECURRENT_TO_INPUT_WEIGHTS)}; // optional
+ const auto recurrent_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_FORGET_WEIGHTS)};
+ const auto recurrent_to_cell_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_CELL_WEIGHTS)};
+ const auto recurrent_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_OUTPUT_WEIGHTS)};
+ const auto cell_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_INPUT_WEIGHTS)}; // optional
+ const auto cell_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_FORGET_WEIGHTS)}; // optional
+ const auto cell_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_OUTPUT_WEIGHTS)}; // optional
+ const auto input_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_GATE_BIAS)};
+ const auto forget_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::FORGET_GATE_BIAS)};
+ const auto cell_bias_index{node.getInputs().at(model::operation::LSTMNode::Input::CELL_BIAS)};
+ const auto output_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::OUTPUT_GATE_BIAS)};
+ const auto projection_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::PROJECTION_WEIGHTS)}; // optional
+ const auto projection_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::PROJECTION_BIAS)}; // optional
+ const auto output_state_in_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::OUTPUT_STATE_IN)};
+ const auto cell_state_in_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_STATE_IN)};
+ const auto cell_threshold = node.param().cell_threshold;
+ const auto projection_threshold = node.param().projection_threshold;
+
+ bool has_input_to_input_weights = _ctx.at(input_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(input_to_input_weights_index).shape().dim(1) != 0;
+ bool has_recurrent_to_input_weights =
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(1) != 0;
+ bool has_cell_to_forget_weights = _ctx.at(cell_to_forget_weights_index).shape().dim(0) != 0;
+ bool has_cell_to_output_weights = _ctx.at(cell_to_output_weights_index).shape().dim(0) != 0;
+ bool has_projection_weights = _ctx.at(projection_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(projection_weights_index).shape().dim(1) != 0;
+ bool has_projection_bias = _ctx.at(projection_bias_index).shape().dim(0);
+
+ // NOTE The input_to_input_weights and the recurrent_to_input_weights do not exist in CIFG.
+ // true: no CIFG
+ // false: CIFG
+ // NOTE The cell_to_input_weights does not exist in non-peephole although regular LSTM(non-CIFG).
+ bool has_cifg_param = has_input_to_input_weights && has_recurrent_to_input_weights;
+
+ // NOTE The cell_to_forget_weights and the cell_to_output_weights exist in peephole.
+ // But the cell_to_input_weights does not exist in regular CIFG although peephole.
+ // true: peephole
+ // false: no peephole
+ bool has_peephole_param = has_cell_to_forget_weights && has_cell_to_output_weights;
+
+ // NOTE Although the projection weights has data the projection bias may not have data.
+ bool has_projection_param = has_projection_weights;
+
+ const auto activation = node.param().activation;
+ const auto cell_clip = cell_threshold;
+ const auto projection_clip = projection_threshold;
+ assert(cell_clip >= 0.f && projection_clip >= 0.f);
+
+ auto scratch_buffer_alloc = _tensor_builder->at(scratch_buffer_index).get();
+ auto output_state_out_alloc = _tensor_builder->at(output_state_out_index).get();
+ auto cell_state_out_alloc = _tensor_builder->at(cell_state_out_index).get();
+ auto output_alloc = _tensor_builder->at(output_index).get();
+
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto input_to_forget_weights_alloc = _tensor_builder->at(input_to_forget_weights_index).get();
+ auto input_to_cell_weights_alloc = _tensor_builder->at(input_to_cell_weights_index).get();
+ auto input_to_output_weights_alloc = _tensor_builder->at(input_to_output_weights_index).get();
+ auto recurrent_to_forget_weights_alloc =
+ _tensor_builder->at(recurrent_to_forget_weights_index).get();
+ auto recurrent_to_cell_weights_alloc = _tensor_builder->at(recurrent_to_cell_weights_index).get();
+ auto recurrent_to_output_weights_alloc =
+ _tensor_builder->at(recurrent_to_output_weights_index).get();
+
+ auto forget_gate_bias_alloc = _tensor_builder->at(forget_gate_bias_index).get();
+ auto cell_bias_alloc = _tensor_builder->at(cell_bias_index).get();
+ auto output_gate_bias_alloc = _tensor_builder->at(output_gate_bias_index).get();
+ auto output_state_in_alloc = _tensor_builder->at(output_state_in_index).get();
+ auto cell_state_in_alloc = _tensor_builder->at(cell_state_in_index).get();
+
+ auto act_info = ::neurun::backend::acl_common::asActivationLayerInfo(activation);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NELSTMLayer>();
+
+ ::arm_compute::LSTMParams<::arm_compute::ITensor> lstm_params{};
+ if (has_cifg_param)
+ {
+ auto input_to_input_weights_alloc =
+ _tensor_builder->at(input_to_input_weights_index).get(); // optional
+ auto recurrent_to_input_weights_alloc =
+ _tensor_builder->at(recurrent_to_input_weights_index).get(); // optional
+ auto cell_to_input_weights_handle =
+ has_peephole_param ? _tensor_builder->at(cell_to_input_weights_index).get()->handle()
+ : nullptr; // optional (non-cifg && peephole)
+ auto input_gate_bias_alloc = _tensor_builder->at(input_gate_bias_index).get(); // optional
+ lstm_params.set_cifg_params(input_to_input_weights_alloc->handle(),
+ recurrent_to_input_weights_alloc->handle(),
+ cell_to_input_weights_handle, input_gate_bias_alloc->handle());
+ }
+ if (has_peephole_param)
+ {
+ auto cell_to_forget_weights_alloc =
+ _tensor_builder->at(cell_to_forget_weights_index).get(); // optional
+ auto cell_to_output_weights_alloc =
+ _tensor_builder->at(cell_to_output_weights_index).get(); // optional
+ lstm_params.set_peephole_params(cell_to_forget_weights_alloc->handle(),
+ cell_to_output_weights_alloc->handle());
+ }
+ if (has_projection_param)
+ {
+ auto projection_weights_alloc = _tensor_builder->at(projection_weights_index).get(); // optional
+ auto projection_bias_handle = has_projection_bias
+ ? _tensor_builder->at(projection_bias_index).get()->handle()
+ : nullptr; // optional
+ lstm_params.set_projection_params(projection_weights_alloc->handle(), projection_bias_handle);
+ }
+
+ fn->configure(
+ input_alloc->handle(), input_to_forget_weights_alloc->handle(),
+ input_to_cell_weights_alloc->handle(), input_to_output_weights_alloc->handle(),
+ recurrent_to_forget_weights_alloc->handle(), recurrent_to_cell_weights_alloc->handle(),
+ recurrent_to_output_weights_alloc->handle(), forget_gate_bias_alloc->handle(),
+ cell_bias_alloc->handle(), output_gate_bias_alloc->handle(), output_state_in_alloc->handle(),
+ cell_state_in_alloc->handle(), scratch_buffer_alloc->handle(),
+ output_state_out_alloc->handle(), cell_state_out_alloc->handle(), output_alloc->handle(),
+ lstm_params, act_info, cell_clip, projection_clip);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::MulNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::MulNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::MulNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEPixelWiseMultiplication>();
+
+ // RoundingPolicy for scale:1.0 is only allowed RoundingPolicy::TO_ZERO
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle(), 1.0, // scale
+ arm_compute::ConvertPolicy::SATURATE, arm_compute::RoundingPolicy::TO_ZERO);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::NegNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::NegNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NENegLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::PadNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::PadNode::Input::INPUT)};
+ const auto pad_index{node.getInputs().at(model::operation::PadNode::Input::PAD)};
+ const auto output_index{node.getOutputs().at(0)};
+ assert(_ctx.at(pad_index).isConstant());
+
+ auto rank = _ctx.at(pad_index).shape().dim(0);
+ auto pad_base = _ctx.at(pad_index).data().base();
+
+ auto input = _tensor_builder->at(input_index).get()->handle();
+ auto output = _tensor_builder->at(output_index).get()->handle();
+
+ ::arm_compute::PaddingList padding_list;
+ padding_list.resize(rank);
+ for (int32_t n = 0; n < rank; ++n)
+ {
+ const int32_t *from = reinterpret_cast<const int32_t *>(pad_base) + (n * 2);
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = _tensor_builder->at(input_index).get()->layout();
+ const auto axis =
+ acl_common::ToARMComputeAxis(rank, n, frontend_layout, backend_layout).value();
+ padding_list[axis] = ::arm_compute::PaddingInfo{from[0], from[1]};
+ }
+
+ const auto input_type = _ctx.at(input_index).typeInfo();
+ UNUSED_RELEASE(input_type);
+ assert(input->info()->data_type() == acl_common::asDataType(input_type.type()));
+ assert(input->info()->quantization_info() ==
+ ::arm_compute::QuantizationInfo(input_type.scale(), input_type.offset()));
+ const auto pixel_value =
+ ::arm_compute::PixelValue(0, input->info()->data_type(), input->info()->quantization_info());
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEPadLayer>();
+ fn->configure(input, output, padding_list, pixel_value);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::PReLUNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::PReLUNode::Input::INPUT)};
+ const auto alpha_index{node.getInputs().at(model::operation::PReLUNode::Input::ALPHA)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto alpha_alloc = _tensor_builder->at(alpha_index).get();
+
+ std::unique_ptr<::arm_compute::IFunction> fn;
+
+ auto l = nnfw::cpp14::make_unique<::arm_compute::NEPReLU>();
+
+ l->configure(ifm_alloc->handle(), alpha_alloc->handle(), ofm_alloc->handle());
+
+ fn = std::move(l);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReduceSumNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReduceSumNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ const auto axis_base = _ctx.at(axis_index).data().base();
+ const auto axis_size = _ctx.at(axis_index).shape().num_elements();
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = input_alloc->layout();
+ // The axis's data must exist as constant values
+ assert(axis_base != nullptr);
+ std::set<int32_t> axes;
+ for (size_t n = 0; n < axis_size; ++n)
+ {
+ int32_t axis_value = *(reinterpret_cast<const int32_t *>(axis_base) + n);
+ if (axis_value < 0)
+ {
+ axis_value += input_rank;
+ }
+ axes.insert(::neurun::backend::acl_common::ToARMComputeAxis(input_rank, axis_value,
+ frontend_layout, backend_layout)
+ .value());
+ }
+ arm_compute::Coordinates fixed_axes;
+ for (const auto &a : axes)
+ {
+ fixed_axes.set(fixed_axes.num_dimensions(), a);
+ }
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEReduceSum>();
+
+ fn->configure(input_alloc->handle(), fixed_axes, false, output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReLUNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReLUNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::NEActivationLayer>();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU};
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReLU1Node &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ReLU1Node::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1.0f, -1.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReLU6Node &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ReLU6Node::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 6.0f};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReshapeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::NEReshapeLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ResizeBilinearNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ const auto ifm_index{node.getInputs().at(model::operation::ResizeBilinearNode::Input::INPUT)};
+ const auto height_index{node.param().height_index};
+ const auto width_index{node.param().width_index};
+ (void)height_index;
+ (void)width_index;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEScale>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle(),
+ ::arm_compute::InterpolationPolicy::BILINEAR, ::arm_compute::BorderMode::REPLICATE,
+ ::arm_compute::PixelValue(0.f), ::arm_compute::SamplingPolicy::TOP_LEFT);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::RNNNode &node)
+{
+ const auto output_index{node.getOutputs().at(model::operation::RNNNode::Output::OUTPUT)};
+ const auto hidden_state_out_index{
+ node.getOutputs().at(model::operation::RNNNode::Output::HIDDEN_STATE_OUT)};
+
+ const auto input_index{node.getInputs().at(model::operation::RNNNode::Input::INPUT)};
+ const auto weights_index{node.getInputs().at(model::operation::RNNNode::Input::WEIGHTS)};
+ const auto recurrent_weights_index{
+ node.getInputs().at(model::operation::RNNNode::Input::RECURRENT_WEIGHTS)};
+ const auto bias_index{node.getInputs().at(model::operation::RNNNode::Input::BIAS)};
+ const auto hidden_state_in_index{
+ node.getInputs().at(model::operation::RNNNode::Input::HIDDEN_STATE_IN)};
+
+ const auto activation = node.param().activation;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto hidden_state_out_alloc = _tensor_builder->at(hidden_state_out_index).get();
+
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto weights_alloc = _tensor_builder->at(weights_index).get();
+ auto recurrent_weights_alloc = _tensor_builder->at(recurrent_weights_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+ auto hidden_state_in_alloc = _tensor_builder->at(hidden_state_in_index).get();
+ auto act_info = ::neurun::backend::acl_common::asActivationLayerInfo(activation);
+
+ auto copy_layer = nnfw::cpp14::make_unique<::arm_compute::NECopy>();
+ copy_layer->configure(hidden_state_in_alloc->handle(), hidden_state_out_alloc->handle());
+ _execution_builder->append(asAclFunction(std::move(copy_layer)));
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NERNNLayerEx>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+ fn->configure(input_alloc->handle(), weights_alloc->handle(), recurrent_weights_alloc->handle(),
+ bias_alloc->handle(), hidden_state_out_alloc->handle(), output_alloc->handle(),
+ act_info);
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::RSQRTNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::RSQRTNode::Input::INPUT)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NERsqrtLayer>();
+
+ fn->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::SqueezeNode &node)
+{
+ // Squeeze is identical to reshape except that it has an optional dimensions input.
+ // In addition, optional dims_index is ignored since output tensor already has squeezed shape
+ // by freezer and toco
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SqueezeNode::Input::INPUT)};
+ const auto dims_index{node.param().dims};
+ (void)dims_index;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto fn = nnfw::cpp14::make_unique<arm_compute::NEReshapeLayer>();
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+ auto acl_fn = asAclFunction(std::move(fn));
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TanhNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::TanhNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<arm_compute::NEActivationLayer>();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::TANH, 1.0f, 1.0f};
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SoftmaxNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SoftmaxNode::Input::INPUT)};
+ const auto beta = node.param().beta;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NESoftmaxLayer>(
+ _tensor_builder->acl_tensor_manager()->internal_buffer_manager());
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), beta);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SplitNode &node)
+{
+ // TODO Support this op by SubTensor
+ const auto ifm_index{node.getInputs().at(model::operation::SplitNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+ const auto num_of_splits_index{node.param().num_of_splits_index};
+
+ assert(_ctx.at(num_of_splits_index).asScalar<unsigned int>() == node.getOutputs().size());
+
+ const auto ifm_rank = _ctx.at(ifm_index).shape().rank();
+ std::vector<model::OperandIndex> output_indexes;
+ for (const auto &output : node.getOutputs())
+ output_indexes.emplace_back(output);
+
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ std::vector<arm_compute::ITensor *> output_allocs;
+ for (const auto &ofm_ind : output_indexes)
+ output_allocs.emplace_back(_tensor_builder->at(ofm_ind).get()->handle());
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+ auto axis = _ctx.at(axis_index).asScalar<int32_t>();
+ if (axis < 0)
+ axis += ifm_rank;
+ axis = acl_common::ToARMComputeAxis(ifm_rank, axis, frontend_layout, backend_layout).value();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NESplit>();
+
+ fn->configure(ifm_alloc->handle(), output_allocs, axis);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SQRTNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SQRTNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::SQRT};
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle(), act_info);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SquaredDifferenceNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::RHS)};
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEElementwiseSquaredDiff>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::SubNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::SubNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SubNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEArithmeticSubtraction>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle(),
+ arm_compute::ConvertPolicy::SATURATE);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::StridedSliceNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::StridedSliceNode::Input::INPUT)};
+ const auto startData_index{node.param().startData_index};
+ const auto endData_index{node.param().endData_index};
+ const auto stridesData_index{node.param().stridesData_index};
+ const auto beginMask_index{node.param().beginMask_index};
+ const auto endMask_index{node.param().endMask_index};
+ const auto shrinkAxisMask_index{node.param().shrinkAxisMask_index};
+
+ // Set initializers for indices data such as order of inputData
+ int input_rank = _ctx.at(input_index).shape().rank();
+ std::vector<int32_t> starts;
+ std::vector<int32_t> ends;
+ std::vector<int32_t> strides;
+ starts.resize(input_rank, 0);
+ ends.resize(input_rank, 0);
+ strides.resize(input_rank, 0);
+ {
+ auto input_shape = _ctx.at(input_index).shape();
+ auto startData_base = _ctx.at(startData_index).data().base();
+ auto endData_base = _ctx.at(endData_index).data().base();
+ auto stridesData_base = _ctx.at(stridesData_index).data().base();
+ const int startData_size = _ctx.at(startData_index).shape().num_elements();
+ const int endData_size = _ctx.at(endData_index).shape().num_elements();
+ const int stridesData_size = _ctx.at(stridesData_index).shape().num_elements();
+
+ using neurun::model::DataType;
+
+ UNUSED_RELEASE(startData_size);
+ UNUSED_RELEASE(endData_size);
+ UNUSED_RELEASE(stridesData_size);
+
+ assert(_ctx.at(startData_index).typeInfo().type() == DataType::INT32);
+ assert(_ctx.at(endData_index).typeInfo().type() == DataType::INT32);
+ assert(_ctx.at(stridesData_index).typeInfo().type() == DataType::INT32);
+ assert(startData_size == input_rank);
+ assert(endData_size == input_rank);
+ assert(stridesData_size == input_rank);
+
+ assert(startData_base != nullptr);
+ for (int n = 0; n < input_rank; ++n)
+ {
+ auto axis = ::neurun::backend::acl_common::ToARMComputeAxis(input_rank, n).value();
+
+ int32_t start_value = *(reinterpret_cast<const int32_t *>(startData_base) + n);
+ starts[axis] = start_value;
+
+ int32_t end_value = *(reinterpret_cast<const int32_t *>(endData_base) + n);
+ ends[axis] = end_value;
+
+ int32_t strides_value = *(reinterpret_cast<const int32_t *>(stridesData_base) + n);
+ strides[axis] = strides_value;
+ }
+ }
+
+ // Set mask bits such as order of inputData
+ const auto beginMask = ::neurun::backend::acl_common::ReorderBits<int32_t>(
+ _ctx.at(beginMask_index).asScalar<int32_t>(), input_rank);
+ const auto endMask = ::neurun::backend::acl_common::ReorderBits<int32_t>(
+ _ctx.at(endMask_index).asScalar<int32_t>(), input_rank);
+ const auto shrinkAxisMask = ::neurun::backend::acl_common::ReorderBits<int32_t>(
+ _ctx.at(shrinkAxisMask_index).asScalar<int32_t>(), input_rank);
+
+ auto outputData_alloc = _tensor_builder->at(output_index).get();
+ auto inputData_alloc = _tensor_builder->at(input_index).get();
+
+ ::arm_compute::Coordinates starts_set;
+ ::arm_compute::Coordinates ends_set;
+ ::arm_compute::BiStrides strides_set;
+
+ for (size_t i = 0; i < starts.size(); ++i)
+ {
+ starts_set.set(i, starts[i]);
+ ends_set.set(i, ends[i]);
+ strides_set.set(i, strides[i]);
+ }
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEStridedSlice>();
+
+ fn->configure(inputData_alloc->handle(), outputData_alloc->handle(), starts_set, ends_set,
+ strides_set, beginMask, endMask, shrinkAxisMask);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TransposeConvNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto output_shape_index{
+ node.getInputs().at(model::operation::TransposeConvNode::Input::OUTPUT_SHAPE)};
+ const auto ker_index{node.getInputs().at(model::operation::TransposeConvNode::Input::KERNEL)};
+ const auto ifm_index{node.getInputs().at(model::operation::TransposeConvNode::Input::INPUT)};
+
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ker_shape = _ctx.at(ker_index).shape().asFeature(_current_subg_layout);
+
+ const auto stride = node.param().stride;
+
+ assert((node.param().padding.type == model::PaddingType::SAME) ||
+ (node.param().padding.type == model::PaddingType::VALID));
+ auto padding = neurun::util::calculatePadding(node.param().padding, ofm_shape, ifm_shape, stride,
+ ker_shape.W, ker_shape.H);
+
+ uint32_t invalid_horizontal = 0;
+ uint32_t invalid_vertical = 0;
+ if (node.param().padding.type == model::PaddingType::VALID)
+ {
+ invalid_horizontal =
+ ofm_shape.W - (1 + (ifm_shape.W - 1) * stride.horizontal) - (ker_shape.W - 1);
+ invalid_vertical = ofm_shape.H - (1 + (ifm_shape.H - 1) * stride.vertical) - (ker_shape.H - 1);
+ }
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+ auto ker_alloc = _tensor_builder->at(ker_index).get();
+
+ const auto tconv_info = acl_common::asPadStrideInfo(padding, stride);
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NETransposeConvLayer>();
+
+ fn->configure(ifm_alloc->handle(), ker_alloc->handle(), nullptr, ofm_alloc->handle(), tconv_info,
+ invalid_horizontal, invalid_vertical);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::TransposeNode &node)
+{
+ const auto ofm_idx{node.getOutputs().at(0)};
+ const auto ifm_idx{node.getInputs().at(model::operation::TransposeNode::Input::INPUT)};
+ const auto perm{node.param().perm};
+
+ const auto rank = _ctx.at(ifm_idx).shape().rank();
+ std::vector<int32_t> pv;
+ const auto perm_base = _ctx.at(perm).data().base();
+ const int perm_size = _ctx.at(perm).shape().num_elements();
+
+ assert(perm_base != nullptr);
+ for (int32_t n = 0; n < perm_size; ++n)
+ {
+ const int32_t perm_value = *(reinterpret_cast<const int32_t *>(perm_base) + n);
+ assert(perm_value < rank);
+ pv.emplace_back(perm_value);
+ }
+
+ auto ofm_alloc = _tensor_builder->at(ofm_idx).get();
+ const auto ifm_alloc = _tensor_builder->at(ifm_idx).get();
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = ifm_alloc->layout();
+
+ auto backend_pv = ::neurun::backend::acl_common::getARMComputePermutationVector(
+ rank, pv, frontend_layout, backend_layout);
+
+ std::unique_ptr<::arm_compute::IFunction> fn;
+
+ if (ifm_alloc->num_dimensions() <= 2 && ofm_alloc->num_dimensions() <= 2)
+ {
+ auto l = nnfw::cpp14::make_unique<::arm_compute::NETranspose>();
+
+ l->configure(ifm_alloc->handle(), ofm_alloc->handle());
+
+ fn = std::move(l);
+ }
+ else
+ {
+ auto l = nnfw::cpp14::make_unique<::arm_compute::NEPermute>();
+
+ l->configure(ifm_alloc->handle(), ofm_alloc->handle(), backend_pv);
+
+ fn = std::move(l);
+ }
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::UnpackNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::UnpackNode::Input::INPUT)};
+ auto axis{node.param().axis};
+
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+
+ std::vector<model::OperandIndex> output_indexes;
+ for (const auto &output_index : node.getOutputs())
+ output_indexes.emplace_back(output_index);
+
+ auto input = _tensor_builder->at(input_index).get()->handle();
+ std::vector<arm_compute::ITensor *> outputs;
+ for (const auto &output_index : output_indexes)
+ outputs.emplace_back(_tensor_builder->at(output_index)->handle());
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto backend_layout = _tensor_builder->at(input_index).get()->layout();
+ if (axis < 0)
+ axis += input_rank;
+ axis = acl_common::ToARMComputeAxis(input_rank, axis, frontend_layout, backend_layout).value();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEUnstack>();
+
+ fn->configure(input, outputs, axis);
+
+ _execution_builder->append(asAclFunction(std::move(fn)));
+}
+
+void KernelGenerator::visit(const model::operation::AddNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEArithmeticAddition>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle(),
+ arm_compute::ConvertPolicy::SATURATE);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::DivNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::DivNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::DivNode::Input::RHS)};
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEElementwiseDivision>();
+
+ fn->configure(lhs_alloc->handle(), rhs_alloc->handle(), ofm_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+
+ ActivationBuilder{*_execution_builder}.append(activation, ofm_alloc->handle());
+}
+
+void KernelGenerator::visit(const model::operation::ExpNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ExpNode::Input::INPUT)};
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEExpLayer>();
+
+ fn->configure(input_alloc->handle(), output_alloc->handle());
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+void KernelGenerator::visit(const model::operation::ReduceMaxNode &node)
+{
+ (void)node;
+ throw std::runtime_error("Not supported, yet");
+}
+
+void KernelGenerator::visit(const model::operation::ComparisonNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input0_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT1)};
+
+ const auto comparison_type = node.param().comparison_type;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input0_alloc = _tensor_builder->at(input0_index).get();
+ auto input1_alloc = _tensor_builder->at(input1_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEElementwiseComparison>();
+
+ fn->configure(input0_alloc->handle(), input1_alloc->handle(), output_alloc->handle(),
+ (arm_compute::ComparisonOperation)comparison_type);
+
+ auto acl_fn = asAclFunction(std::move(fn));
+
+ _execution_builder->append(std::move(acl_fn));
+}
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/KernelGenerator.h b/runtimes/neurun/backend/acl_neon/KernelGenerator.h
new file mode 100644
index 000000000..1d839eade
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/KernelGenerator.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_KERNEL_GENERATOR_H__
+#define __NEURUN_BACKEND_ACL_NEON_KERNEL_GENERATOR_H__
+
+#include <backend/IKernelGenerator.h>
+
+#include "model/Operands.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+class KernelGenerator : public IKernelGenerator
+{
+public:
+ KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+ void visit(const model::Subgraph &) override;
+ void visit(const model::operation::AbsNode &) override;
+ void visit(const model::operation::ArgMaxNode &) override;
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::DequantizeNode &) override;
+ void visit(const model::operation::MaxPool2DNode &) override;
+ void visit(const model::operation::MeanNode &) override;
+ void visit(const model::operation::AvgPool2DNode &) override;
+ void visit(const model::operation::ConcatNode &) override;
+ void visit(const model::operation::FloorNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::L2NormalizationNode &) override;
+ void visit(const model::operation::L2Pool2DNode &) override;
+ void visit(const model::operation::LocalResponseNormalizationNode &) override;
+ void visit(const model::operation::LogicalAndNode &) override;
+ void visit(const model::operation::LogicalNotNode &) override;
+ void visit(const model::operation::LogicalOrNode &) override;
+ void visit(const model::operation::LogisticNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::NegNode &) override;
+ void visit(const model::operation::PadNode &) override;
+ void visit(const model::operation::PReLUNode &) override;
+ void visit(const model::operation::ReduceSumNode &) override;
+ void visit(const model::operation::ReLUNode &) override;
+ void visit(const model::operation::ReLU1Node &) override;
+ void visit(const model::operation::ReLU6Node &) override;
+ void visit(const model::operation::ReshapeNode &) override;
+ void visit(const model::operation::ResizeBilinearNode &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::RSQRTNode &) override;
+ void visit(const model::operation::SqueezeNode &) override;
+ void visit(const model::operation::TanhNode &) override;
+ void visit(const model::operation::SoftmaxNode &) override;
+ void visit(const model::operation::SplitNode &) override;
+ void visit(const model::operation::SQRTNode &) override;
+ void visit(const model::operation::SquaredDifferenceNode &) override;
+ void visit(const model::operation::SubNode &) override;
+ void visit(const model::operation::StridedSliceNode &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+ void visit(const model::operation::TransposeNode &) override;
+ void visit(const model::operation::UnpackNode &) override;
+ void visit(const model::operation::AddNode &) override;
+ void visit(const model::operation::DivNode &) override;
+ void visit(const model::operation::ExpNode &) override;
+ void visit(const model::operation::ReduceMaxNode &) override;
+ void visit(const model::operation::ComparisonNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+ model::Layout _current_subg_layout;
+};
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_KERNEL_GENERATOR_H__
diff --git a/runtimes/neurun/backend/acl_neon/PluginClassesAllocator.cc b/runtimes/neurun/backend/acl_neon/PluginClassesAllocator.cc
new file mode 100644
index 000000000..75f2e9797
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/PluginClassesAllocator.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <util/logging.h>
+
+#include "Backend.h"
+
+extern "C" {
+neurun::backend::Backend *neurun_backend_create()
+{
+ VERBOSE(neurun_backend_create) << "'acl_neon' loaded\n";
+ return new neurun::backend::acl_neon::Backend;
+}
+
+void neurun_backend_destroy(neurun::backend::Backend *backend)
+{
+ VERBOSE(neurun_backend_create) << "'acl_neon' unloaded\n";
+ delete backend;
+}
+}
diff --git a/runtimes/neurun/backend/acl_neon/ShapeFixer.cc b/runtimes/neurun/backend/acl_neon/ShapeFixer.cc
new file mode 100644
index 000000000..e7cdbea4c
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/ShapeFixer.cc
@@ -0,0 +1,332 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ShapeFixer.h"
+
+#include <arm_compute/runtime/NEON/functions/NESoftmaxLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEArithmeticAddition.h>
+#include <arm_compute/runtime/NEON/functions/NEArithmeticSubtraction.h>
+#include <arm_compute/runtime/NEON/functions/NEPixelWiseMultiplication.h>
+#include <arm_compute/runtime/NEON/functions/NEPoolingLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEActivationLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEConvolutionLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEDepthwiseConvolutionLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEReshapeLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEFullyConnectedLayer.h>
+#include <arm_compute/runtime/NEON/functions/NEFullyConnectedReshapingLayer.h>
+
+#include <Convert.h>
+#include <Swizzle.h>
+
+#include "kernel/ConcatLayer.h"
+#include "util/Padding.h"
+#include "model/Index.h"
+#include "model/DataType.h"
+#include "model/InternalType.h"
+#include "compiler/IExecutionBuilder.h"
+#include "exec/NopFunction.h"
+#include "util/logging.h"
+#include "util/Utils.h"
+
+using ::neurun::compiler::IExecutionBuilder;
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+using ::neurun::backend::acl_common::asAclFunction;
+
+ShapeFixer::ShapeFixer(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _ctx(ctx), _tensor_builder(tensor_builder)
+{
+ assert(tensor_builder);
+}
+
+void ShapeFixer::visit(const model::operation::AbsNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ArgMaxNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::Conv2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::DepthwiseConv2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::DequantizeNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::MaxPool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::MeanNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::AvgPool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ConcatNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ _tensor_builder->dimCorrection(ofm_index, false);
+ for (const auto &inputs : node.getInputs())
+ _tensor_builder->dimCorrection(inputs, false);
+}
+
+void ShapeFixer::visit(const model::operation::ExpNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::FloorNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::FullyConnectedNode &node)
+{
+ using model::operation::FullyConnectedNode;
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto input_rank = _ctx.at(input_index).shape().rank();
+ // TODO Currently we are not handling where the case is that the input's rank is 3.
+ // The handling should be added in the future.
+ assert(input_rank != 3);
+ // Check for reshaping input's shape into rank-2
+ if (input_rank == 4)
+ _tensor_builder->dimCorrection(input_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::L2NormalizationNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::L2Pool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LocalResponseNormalizationNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LogicalAndNode &node)
+{
+ const auto input0_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalAndNode::Input::INPUT1)};
+
+ if (!(_ctx.at(input0_index).shape() == _ctx.at(input1_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(input0_index).shape().rank(), _ctx.at(input1_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(input0_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(input1_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::LogicalNotNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LogicalOrNode &node)
+{
+ const auto input0_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::LogicalOrNode::Input::INPUT1)};
+
+ if (!(_ctx.at(input0_index).shape() == _ctx.at(input1_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(input0_index).shape().rank(), _ctx.at(input1_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(input0_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(input1_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::LogisticNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::LSTMNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::PadNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::PadNode::Input::INPUT)};
+ const auto output_index{node.getOutputs().at(0)};
+ _tensor_builder->dimCorrection(input_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::MulNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::MulNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::MulNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::NegNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::PReLUNode &node)
+{
+ const auto ifm_index{node.getInputs().at(model::operation::PReLUNode::Input::INPUT)};
+ const auto alpha_index{node.getInputs().at(model::operation::PReLUNode::Input::ALPHA)};
+
+ if (!(_ctx.at(ifm_index).shape() == _ctx.at(alpha_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(ifm_index).shape().rank(), _ctx.at(alpha_index).shape().rank());
+ const_cast<::neurun::model::Shape &>(_ctx.at(ifm_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(alpha_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::ReduceSumNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReLUNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReLU1Node &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReLU6Node &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ReshapeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
+
+ _tensor_builder->dimCorrection(input_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::ResizeBilinearNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::RNNNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ComparisonNode &node)
+{
+ const auto input0_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT0)};
+ const auto input1_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT1)};
+
+ if (!(_ctx.at(input0_index).shape() == _ctx.at(input1_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(input0_index).shape().rank(), _ctx.at(input1_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(input0_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(input1_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::RSQRTNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SqueezeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SqueezeNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::TanhNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::StridedSliceNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SoftmaxNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SplitNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::SplitNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ for (const auto &output : node.getOutputs())
+ _tensor_builder->dimCorrection(output, false);
+}
+
+void ShapeFixer::visit(const model::operation::SQRTNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SquaredDifferenceNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SquaredDifferenceNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::SubNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::SubNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::SubNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::TransposeConvNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::TransposeNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::UnpackNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::UnpackNode::Input::INPUT)};
+ _tensor_builder->dimCorrection(input_index, false);
+ for (const auto &output_index : node.getOutputs())
+ _tensor_builder->dimCorrection(output_index, false);
+}
+
+void ShapeFixer::visit(const model::operation::AddNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+void ShapeFixer::visit(const model::operation::DivNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::DivNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::DivNode::Input::RHS)};
+
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(lhs_index).shape().rank(), _ctx.at(rhs_index).shape().rank());
+
+ // TODO remove const_cast later. For example, _ctx may need to be a non const variable or
+ // a node to extend shape may be inserted in front of this operation
+ const_cast<::neurun::model::Shape &>(_ctx.at(lhs_index).shape()).extendRank(broadcast_rank);
+ const_cast<::neurun::model::Shape &>(_ctx.at(rhs_index).shape()).extendRank(broadcast_rank);
+ }
+}
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/ShapeFixer.h b/runtimes/neurun/backend/acl_neon/ShapeFixer.h
new file mode 100644
index 000000000..394f9d932
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/ShapeFixer.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_SHAPE_FIXER_H__
+#define __NEURUN_BACKEND_ACL_NEON_SHAPE_FIXER_H__
+
+#include <backend/IShapeFixer.h>
+
+#include "model/Operands.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+class ShapeFixer : public IShapeFixer
+{
+public:
+ ShapeFixer(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+ std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
+
+ void visit(const model::operation::AbsNode &) override;
+ void visit(const model::operation::ArgMaxNode &) override;
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::DequantizeNode &) override;
+ void visit(const model::operation::MaxPool2DNode &) override;
+ void visit(const model::operation::MeanNode &) override;
+ void visit(const model::operation::AvgPool2DNode &) override;
+ void visit(const model::operation::ConcatNode &) override;
+ void visit(const model::operation::ExpNode &) override;
+ void visit(const model::operation::FloorNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::L2NormalizationNode &) override;
+ void visit(const model::operation::L2Pool2DNode &) override;
+ void visit(const model::operation::LocalResponseNormalizationNode &) override;
+ void visit(const model::operation::LogicalAndNode &) override;
+ void visit(const model::operation::LogicalNotNode &) override;
+ void visit(const model::operation::LogicalOrNode &) override;
+ void visit(const model::operation::LogisticNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::NegNode &) override;
+ void visit(const model::operation::PadNode &) override;
+ void visit(const model::operation::PReLUNode &) override;
+ void visit(const model::operation::ReduceSumNode &) override;
+ void visit(const model::operation::ReLUNode &) override;
+ void visit(const model::operation::ReLU1Node &) override;
+ void visit(const model::operation::ReLU6Node &) override;
+ void visit(const model::operation::ReshapeNode &) override;
+ void visit(const model::operation::ResizeBilinearNode &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::RSQRTNode &) override;
+ void visit(const model::operation::SqueezeNode &) override;
+ void visit(const model::operation::TanhNode &) override;
+ void visit(const model::operation::SoftmaxNode &) override;
+ void visit(const model::operation::SplitNode &) override;
+ void visit(const model::operation::SQRTNode &) override;
+ void visit(const model::operation::SquaredDifferenceNode &) override;
+ void visit(const model::operation::SubNode &) override;
+ void visit(const model::operation::StridedSliceNode &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+ void visit(const model::operation::TransposeNode &) override;
+ void visit(const model::operation::UnpackNode &) override;
+ void visit(const model::operation::AddNode &) override;
+ void visit(const model::operation::DivNode &) override;
+ void visit(const model::operation::ComparisonNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_SHAPE_FIXER_H__
diff --git a/runtimes/neurun/backend/acl_neon/TensorBuilder.h b/runtimes/neurun/backend/acl_neon/TensorBuilder.h
new file mode 100644
index 000000000..3da30dfe9
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/TensorBuilder.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_TENSOR_BUILDER_H__
+#define __NEURUN_BACKEND_ACL_NEON_TENSOR_BUILDER_H__
+
+#include <TemplTensorBuilder.h>
+
+#include <backend/operand/Object.h>
+
+#include "operand/NETensor.h"
+#include "operand/NESubTensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+using TensorBuilder = ::neurun::backend::acl_common::TemplTensorBuilder<
+ ::neurun::backend::acl_neon::operand::INETensor, operand::NETensor, operand::NESubTensor,
+ ::neurun::backend::operand::Object>;
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/backend/acl_neon/TensorManager.h b/runtimes/neurun/backend/acl_neon/TensorManager.h
new file mode 100644
index 000000000..3d5c40b5b
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/TensorManager.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_TENSOR_MANAGER_H__
+#define __NEURUN_BACKEND_ACL_NEON_TENSOR_MANAGER_H__
+
+#include <arm_compute/runtime/Allocator.h>
+#include <arm_compute/runtime/PoolManager.h>
+#include <arm_compute/runtime/OffsetLifetimeManager.h>
+#include <arm_compute/runtime/MemoryManagerOnDemand.h>
+#include <arm_compute/runtime/MemoryGroup.h>
+
+#include <AclMemoryManager.h>
+#include <AclLinearMemoryManager.h>
+#include <AclInternalBufferManager.h>
+#include <AclTensorManager.h>
+
+#include "operand/NETensor.h"
+#include "operand/NESubTensor.h"
+#include <backend/operand/Object.h>
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+
+using MemoryManager =
+ ::neurun::backend::acl_common::AclMemoryManager<operand::INETensor, operand::NETensor,
+ operand::NESubTensor,
+ ::neurun::backend::operand::Object>;
+
+using LinearMemoryManager = ::neurun::backend::acl_common::AclLinearMemoryManager<
+ operand::INETensor, operand::NETensor, operand::NESubTensor, ::neurun::backend::operand::Object,
+ ::arm_compute::MemoryManagerOnDemand, ::arm_compute::PoolManager,
+ ::arm_compute::OffsetLifetimeManager, ::arm_compute::Allocator, ::arm_compute::MemoryGroup>;
+
+using InternalBufferManager = ::neurun::backend::acl_common::AclInternalBufferManager<
+ ::arm_compute::MemoryManagerOnDemand, ::arm_compute::PoolManager,
+ ::arm_compute::OffsetLifetimeManager, ::arm_compute::Allocator>;
+
+using TensorManager =
+ ::neurun::backend::acl_common::AclTensorManager<::neurun::backend::acl_neon::operand::INETensor,
+ operand::NETensor, operand::NESubTensor,
+ ::neurun::backend::operand::Object>;
+
+TensorManager *createTensorManager()
+{
+ const std::string executor_str = util::getConfigString(util::config::EXECUTOR);
+ if (executor_str == "Linear")
+ {
+ VERBOSE(acl_neon_createTensorManager) << "AclTensorManager as Linear" << std::endl;
+ return new TensorManager(new MemoryManager(), new LinearMemoryManager(),
+ new InternalBufferManager());
+ }
+ else
+ {
+ VERBOSE(acl_neon_createTensorManager) << "AclTensorManager" << std::endl;
+ return new TensorManager(new MemoryManager(), new MemoryManager(), new InternalBufferManager());
+ }
+}
+
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_TENSOR_MANAGER_H__
diff --git a/runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.cc b/runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.cc
new file mode 100644
index 000000000..f2d88fa91
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.cc
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConcatLayer.h"
+
+#include "util/logging.h"
+
+namespace
+{
+
+inline bool matchSizeExceptAxis(const ::neurun::backend::acl_neon::operand::INETensor *t1,
+ const ::neurun::backend::acl_neon::operand::INETensor *t2,
+ uint32_t axis)
+{
+ assert(t1->num_dimensions() <= 4);
+ assert(t2->num_dimensions() <= 4);
+
+ for (uint32_t i = 0; i < 4; i++)
+ {
+ if (axis == i)
+ continue;
+ if (t1->info()->dimension(i) != t2->info()->dimension(i))
+ return false;
+ }
+ return true;
+}
+
+} // namespace {anonymous}
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace kernel
+{
+
+ConcatLayer::ConcatLayer()
+ : _input_allocs(), _output_alloc(nullptr), _axis(0), _input_type(arm_compute::DataType::F32)
+{
+ // DO NOTHING
+}
+
+template <typename T> bool ConcatLayer::concatenate()
+{
+ // Input and output size check
+ {
+ // NOTE Support only tensor with dimension 4 or less
+
+ uint32_t axis_sum = 0;
+
+ for (auto input : _input_allocs)
+ {
+ assert(_output_alloc->layout() == input->layout());
+ assert(matchSizeExceptAxis(_output_alloc, input, _axis));
+ axis_sum += input->info()->dimension(_axis);
+ }
+
+ assert(_output_alloc->info()->dimension(_axis) == axis_sum);
+ }
+
+ VERBOSE(Concat_RUN) << "START Concat" << std::endl;
+
+ // Perform operation
+ {
+ uint32_t axis_offset = 0;
+
+ for (auto input : _input_allocs)
+ {
+ for (uint32_t i = 0; i < input->info()->dimension(0); i++)
+ {
+ for (uint32_t j = 0; j < input->info()->dimension(1); j++)
+ {
+ for (uint32_t k = 0; k < input->info()->dimension(2); k++)
+ {
+ for (uint32_t l = 0; l < input->info()->dimension(3); l++)
+ {
+ uint32_t io = (_axis == 0) ? axis_offset : 0;
+ uint32_t jo = (_axis == 1) ? axis_offset : 0;
+ uint32_t ko = (_axis == 2) ? axis_offset : 0;
+ uint32_t lo = (_axis == 3) ? axis_offset : 0;
+ T value = *reinterpret_cast<T *>(input->handle()->ptr_to_element({i, j, k, l}));
+ *reinterpret_cast<T *>(_output_alloc->handle()->ptr_to_element(
+ {i + io, j + jo, k + ko, l + lo})) = value;
+ }
+ }
+ }
+ }
+ if (_axis == 0)
+ axis_offset += input->info()->dimension(0);
+ if (_axis == 1)
+ axis_offset += input->info()->dimension(1);
+ if (_axis == 2)
+ axis_offset += input->info()->dimension(2);
+ if (_axis == 3)
+ axis_offset += input->info()->dimension(3);
+ }
+ }
+
+ VERBOSE(Concat_RUN) << "End Concat" << std::endl;
+
+ return true;
+}
+
+void ConcatLayer::configure(
+ const std::vector<::neurun::backend::acl_neon::operand::INETensor *> &input_allocs,
+ int32_t axis, ::neurun::backend::acl_neon::operand::INETensor *output_alloc)
+{
+ _input_allocs = input_allocs;
+ _output_alloc = output_alloc;
+
+ assert(axis < 4);
+
+ // TODO Handle when axis is negative
+ assert(axis >= 0);
+
+ _axis = axis;
+
+ _input_type = input_allocs[0]->data_type();
+}
+
+void ConcatLayer::run()
+{
+ if (_input_type == arm_compute::DataType::F32)
+ {
+ concatenate<float>();
+ }
+ else if (_input_type == arm_compute::DataType::QASYMM8)
+ {
+ concatenate<uint8_t>();
+ }
+}
+
+} // namespace kernel
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.h b/runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.h
new file mode 100644
index 000000000..ee57ceb10
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/kernel/ConcatLayer.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_KERNEL_CONCAT_LAYER_H__
+#define __NEURUN_BACKEND_ACL_NEON_KERNEL_CONCAT_LAYER_H__
+
+#include <arm_compute/runtime/IFunction.h>
+#include <arm_compute/core/Types.h>
+
+#include "operand/INETensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace kernel
+{
+
+//
+// neurun::backend::acl_neon::kernel::ConcatLayer
+// A naive implementation of ConcatLayer for ACL NEON
+//
+
+class ConcatLayer : public ::arm_compute::IFunction
+{
+public:
+ ConcatLayer();
+
+public:
+ void configure(const std::vector<::neurun::backend::acl_neon::operand::INETensor *> &input_allocs,
+ int32_t axis /* NNAPI tensor axis from NHWC order */,
+ ::neurun::backend::acl_neon::operand::INETensor *output_alloc);
+
+ void run();
+
+private:
+ template <typename T> bool concatenate();
+
+private:
+ std::vector<::neurun::backend::acl_neon::operand::INETensor *> _input_allocs;
+ ::neurun::backend::acl_neon::operand::INETensor *_output_alloc;
+ int32_t _axis;
+ arm_compute::DataType _input_type;
+};
+
+} // namespace kernel
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_KERNEL_CONCAT_LAYER_H__
diff --git a/runtimes/neurun/backend/acl_neon/operand/INETensor.h b/runtimes/neurun/backend/acl_neon/operand/INETensor.h
new file mode 100644
index 000000000..256806a61
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/operand/INETensor.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_OPERAND_I_NE_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_NEON_OPERAND_I_NE_TENSOR_H__
+
+#include <arm_compute/core/ITensor.h>
+
+#include <IACLTensor.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace operand
+{
+
+class INETensor : public acl_common::IACLTensor
+{
+public:
+ const arm_compute::ITensor *handle() const override = 0;
+ arm_compute::ITensor *handle() override = 0;
+};
+
+} // namespace operand
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_OPERAND_I_NE_TENSOR_H__
diff --git a/runtimes/neurun/backend/acl_neon/operand/NESubTensor.cc b/runtimes/neurun/backend/acl_neon/operand/NESubTensor.cc
new file mode 100644
index 000000000..a36af609c
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/operand/NESubTensor.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "NESubTensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace operand
+{
+
+NESubTensor::NESubTensor(INETensor *parent, const arm_compute::TensorShape &tensor_shape,
+ const arm_compute::Coordinates &coords, size_t rank, bool extend_parent)
+ : _ne_sub_tensor(std::make_shared<arm_compute::SubTensor>(parent->handle(), tensor_shape,
+ coords, extend_parent)),
+ _rank{rank}
+{
+ // DO NOTHING
+}
+
+const arm_compute::SubTensor *NESubTensor::handle() const { return _ne_sub_tensor.get(); }
+
+arm_compute::SubTensor *NESubTensor::handle() { return _ne_sub_tensor.get(); }
+
+} // namespace operand
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/operand/NESubTensor.h b/runtimes/neurun/backend/acl_neon/operand/NESubTensor.h
new file mode 100644
index 000000000..010e4deda
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/operand/NESubTensor.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_OPERAND_NE_SUB_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_NEON_OPERAND_NE_SUB_TENSOR_H__
+
+#include <arm_compute/runtime/SubTensor.h>
+#include "INETensor.h"
+#include "compiler/SubTensorInfo.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace operand
+{
+
+class NESubTensor : public INETensor
+{
+public:
+ NESubTensor() = delete;
+
+public:
+ NESubTensor(INETensor *parent, const arm_compute::TensorShape &tensor_shape,
+ const arm_compute::Coordinates &coords, size_t rank, bool extend_parent = false);
+
+public:
+ size_t num_dimensions() const final { return _rank; }
+
+public:
+ const arm_compute::SubTensor *handle() const override;
+ arm_compute::SubTensor *handle() override;
+
+public:
+ // This method is used to prevent the use of memcpy for SubTensor
+ bool has_padding() const override { return true; }
+
+private:
+ std::shared_ptr<arm_compute::SubTensor> _ne_sub_tensor;
+ size_t _rank;
+};
+
+} // namespace operand
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_OPERAND_NE_SUB_TENSOR_H__
diff --git a/runtimes/neurun/backend/acl_neon/operand/NETensor.cc b/runtimes/neurun/backend/acl_neon/operand/NETensor.cc
new file mode 100644
index 000000000..756403ef1
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/operand/NETensor.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arm_compute/runtime/Memory.h>
+#include <arm_compute/runtime/MemoryRegion.h>
+#include "NETensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace operand
+{
+
+NETensor::NETensor(const arm_compute::TensorInfo &info, size_t rank)
+ : _ne_tensor(std::make_shared<arm_compute::Tensor>()), _rank{rank}
+{
+ allocator()->init(info);
+}
+
+const arm_compute::Tensor *NETensor::handle() const { return _ne_tensor.get(); }
+
+arm_compute::Tensor *NETensor::handle() { return _ne_tensor.get(); }
+
+arm_compute::TensorAllocator *NETensor::allocator() { return _ne_tensor->allocator(); }
+
+} // namespace operand
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/acl_neon/operand/NETensor.h b/runtimes/neurun/backend/acl_neon/operand/NETensor.h
new file mode 100644
index 000000000..298a82054
--- /dev/null
+++ b/runtimes/neurun/backend/acl_neon/operand/NETensor.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ACL_NEON_OPERAND_NE_TENSOR_H__
+#define __NEURUN_BACKEND_ACL_NEON_OPERAND_NE_TENSOR_H__
+
+#include <arm_compute/core/TensorInfo.h>
+#include <arm_compute/runtime/Tensor.h>
+#include "arm_compute/runtime/TensorAllocator.h"
+#include "INETensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace acl_neon
+{
+namespace operand
+{
+
+class NETensor : public INETensor
+{
+public:
+ NETensor() = delete;
+
+public:
+ NETensor(const arm_compute::TensorInfo &info, size_t rank);
+
+public:
+ size_t num_dimensions() const final { return _rank; }
+
+public:
+ const arm_compute::Tensor *handle() const override;
+ arm_compute::Tensor *handle() override;
+
+public:
+ arm_compute::TensorAllocator *allocator();
+
+private:
+ std::shared_ptr<arm_compute::Tensor> _ne_tensor;
+ size_t _rank;
+};
+
+} // namespace operand
+} // namespace acl_neon
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ACL_NEON_OPERAND_NE_TENSOR_H__
diff --git a/runtimes/neurun/backend/cpu/Backend.h b/runtimes/neurun/backend/cpu/Backend.h
new file mode 100644
index 000000000..94624ee6d
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/Backend.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_BACKEND_H__
+#define __NEURUN_BACKEND_CPU_BACKEND_H__
+
+#include <memory>
+#include <backend/Backend.h>
+#include <model/Operands.h>
+
+#include "Config.h"
+#include "ConstantInitializer.h"
+#include "KernelGenerator.h"
+#include "ShapeFixer.h"
+#include "backend/CustomKernelRegistry.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class Backend : public ::neurun::backend::Backend
+{
+public:
+ Backend() : _config{std::make_shared<Config>()} {}
+
+ std::shared_ptr<IConfig> config() const override { return _config; }
+
+ std::unique_ptr<BackendContext>
+ newContext(const model::Operands &operands,
+ const std::shared_ptr<custom::KernelRegistry> &registry) const override
+ {
+ auto tensor_builder = std::make_shared<TensorBuilder>();
+ return std::unique_ptr<BackendContext>{new BackendContext{
+ this, tensor_builder, std::make_shared<ConstantInitializer>(operands, tensor_builder),
+ std::make_shared<KernelGenerator>(operands, tensor_builder, registry),
+ std::make_shared<ShapeFixer>(operands, tensor_builder)}};
+ }
+
+private:
+ std::shared_ptr<IConfig> _config;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_BACKEND_H__
diff --git a/runtimes/neurun/backend/cpu/CMakeLists.txt b/runtimes/neurun/backend/cpu/CMakeLists.txt
new file mode 100644
index 000000000..c7b9b4d6b
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/CMakeLists.txt
@@ -0,0 +1,29 @@
+set(LIB_NEURUN_BACKEND_CPU neurun_backend_cpu)
+
+file(GLOB_RECURSE SOURCES "*.cc")
+file(GLOB_RECURSE TESTS "*.test.cc")
+list(REMOVE_ITEM SOURCES ${TESTS})
+
+add_library(${LIB_NEURUN_BACKEND_CPU} SHARED ${SOURCES})
+
+target_link_libraries(${LIB_NEURUN_BACKEND_CPU} PUBLIC nnfw_lib_cpp14)
+target_link_libraries(${LIB_NEURUN_BACKEND_CPU} PRIVATE nnfw_lib_misc nnfw_lib_cker)
+target_link_libraries(${LIB_NEURUN_BACKEND_CPU} PRIVATE neurun_core)
+target_link_libraries(${LIB_NEURUN_BACKEND_CPU} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN_BACKEND_CPU} PRIVATE nnfw_coverage)
+
+set_target_properties(${LIB_NEURUN_BACKEND_CPU} PROPERTIES OUTPUT_NAME backend_cpu)
+
+install(TARGETS ${LIB_NEURUN_BACKEND_CPU} DESTINATION lib)
+
+# Unit Tests
+set(TEST_NEURUN_BACKEND_CPU test_neurun_backend_cpu)
+
+add_executable(${TEST_NEURUN_BACKEND_CPU} ${TESTS})
+
+target_link_libraries(${TEST_NEURUN_BACKEND_CPU} ${LIB_NEURUN_BACKEND_CPU})
+target_link_libraries(${TEST_NEURUN_BACKEND_CPU} gtest gtest_main ${LIB_PTHREAD})
+target_include_directories(${TEST_NEURUN_BACKEND_CPU} PRIVATE ${NEURUN_INCLUDE_DIR})
+
+add_test(${TEST_NEURUN_BACKEND_CPU} ${TEST_NEURUN_BACKEND_CPU})
+install(TARGETS ${TEST_NEURUN_BACKEND_CPU} DESTINATION unittest)
diff --git a/runtimes/neurun/backend/cpu/Config.cc b/runtimes/neurun/backend/cpu/Config.cc
new file mode 100644
index 000000000..1a487f724
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/Config.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Config.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+void Config::initialize()
+{
+ // DO NOTHING
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/Config.h b/runtimes/neurun/backend/cpu/Config.h
new file mode 100644
index 000000000..ac55d987e
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/Config.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_CONFIG_H__
+#define __NEURUN_BACKEND_CPU_CONFIG_H__
+
+#include <backend/IConfig.h>
+#include <cpp14/memory.h>
+#include <util/ITimer.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class Config : public IConfig
+{
+public:
+ std::string id() override { return "cpu"; }
+ void initialize() override;
+ bool SupportSubTensorAlloc() override
+ {
+ // NOTE CPU allocator cannot support subtensor allocation yet
+ return false;
+ }
+
+ std::unique_ptr<util::ITimer> timer() override
+ {
+ return nnfw::cpp14::make_unique<util::CPUTimer>();
+ }
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_CONFIG_H__
diff --git a/runtimes/neurun/backend/cpu/ConstantInitializer.cc b/runtimes/neurun/backend/cpu/ConstantInitializer.cc
new file mode 100644
index 000000000..cff9fcfa2
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/ConstantInitializer.cc
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConstantInitializer.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+ConstantInitializer::ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _operands{operands}, _tensor_builder{tensor_builder}
+{
+ // DO NOTHING
+}
+
+void ConstantInitializer::run()
+{
+ for (const auto &it : _init_map)
+ {
+ const auto &ind = it.first;
+ const auto &fn = it.second;
+
+ const auto &model_obj = _operands.at(ind);
+ auto tensor_obj = _tensor_builder->wrapTensor(ind);
+ fn(model_obj, *tensor_obj);
+ }
+
+ _init_map.clear();
+}
+
+void ConstantInitializer::visit(const model::operation::Conv2DNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::Conv2DNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerCopyInitializer(kernel_index, kernel_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::Conv2DNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::DepthwiseConv2DNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerCopyInitializer(kernel_index, kernel_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+void ConstantInitializer::visit(const model::operation::FullyConnectedNode &node)
+{
+ const auto &weight_index = node.getInputs().at(model::operation::FullyConnectedNode::WEIGHT);
+ const auto &weight_obj = _operands.at(weight_index);
+ registerCopyInitializer(weight_index, weight_obj);
+
+ const auto &bias_index = node.getInputs().at(model::operation::FullyConnectedNode::BIAS);
+ const auto &bias_obj = _operands.at(bias_index);
+ registerCopyInitializer(bias_index, bias_obj);
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/ConstantInitializer.h b/runtimes/neurun/backend/cpu/ConstantInitializer.h
new file mode 100644
index 000000000..91f1d50b9
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/ConstantInitializer.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_CPU_CONSTANT_INITIALIZER_H__
+#define __NEURUN_COMPILER_CPU_CONSTANT_INITIALIZER_H__
+
+#include <backend/IConstantInitializer.h>
+#include <model/Operands.h>
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class ConstantInitializer : public IConstantInitializer
+{
+public:
+ ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+public:
+ void run() override;
+
+public:
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+
+private:
+ const model::Operands &_operands;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_CPU_CONSTANT_INITIALIZER_H__
diff --git a/runtimes/neurun/backend/cpu/KernelGenerator.cc b/runtimes/neurun/backend/cpu/KernelGenerator.cc
new file mode 100644
index 000000000..61de75493
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/KernelGenerator.cc
@@ -0,0 +1,455 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "KernelGenerator.h"
+
+#include <stdexcept>
+
+#include "cpp14/memory.h"
+#include "util/Padding.h"
+#include "kernel/OperationUtils.h"
+#include "kernel/ConvolutionLayer.h"
+#include "kernel/AvgPoolLayer.h"
+#include "kernel/MaxPoolLayer.h"
+#include "kernel/ConcatLayer.h"
+#include "kernel/FullyConnectedLayer.h"
+#include "kernel/ReshapeLayer.h"
+#include "kernel/SoftMaxLayer.h"
+#include "kernel/PermuteLayer.h"
+#include "kernel/DepthwiseConvolutionLayer.h"
+#include "kernel/AddLayer.h"
+
+#include <backend/Backend.h>
+#include <backend/IConfig.h>
+
+#include "util/logging.h"
+
+#include "util/Utils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+KernelGenerator::KernelGenerator(const neurun::model::Operands &operand_ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder,
+ const std::shared_ptr<custom::KernelRegistry> &kernel_registry)
+ : _ctx(operand_ctx), _tensor_builder(tensor_builder), _kernel_registry(kernel_registry),
+ _current_subg_layout(model::Layout::UNKNOWN)
+{
+ // DO NOTHING
+}
+
+void KernelGenerator::visit(const model::Subgraph &subgraph)
+{
+ _current_subg_layout = subgraph.getLayout();
+ for (const auto &e : subgraph.operations())
+ {
+ const auto &node = *(e.node);
+ _tensor_builder->preVisit(node);
+ node.accept(*this);
+ _tensor_builder->postVisit(node);
+ }
+}
+
+void KernelGenerator::visit(const model::operation::Conv2DNode &node)
+{
+ using model::operation::Conv2DNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
+
+ const auto stride = node.param().stride;
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [depth_out, kernel_height, kernel_width, depth_in].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
+ stride, ker_width, ker_height);
+ const auto activation = node.param().activation;
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ifm_index), _current_subg_layout);
+ const auto ker_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ker_index), model::Layout::UNKNOWN);
+ const auto bias_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(bias_index), model::Layout::UNKNOWN);
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index);
+ auto ifm_alloc = _tensor_builder->at(ifm_index);
+ auto ker_alloc = _tensor_builder->at(ker_index);
+ auto bias_alloc = _tensor_builder->at(bias_index);
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::ConvolutionLayer>();
+
+ fn->configure(ifm_alloc->buffer(), ifm_backend_shape, ker_alloc->buffer(), ker_backend_shape,
+ bias_alloc->buffer(), bias_backend_shape, padding.left, padding.right, padding.top,
+ padding.bottom, stride.horizontal, stride.vertical, activation, ofm_alloc->buffer(),
+ ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::DepthwiseConv2DNode &node)
+{
+ using model::operation::DepthwiseConv2DNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(DepthwiseConv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(DepthwiseConv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(DepthwiseConv2DNode::Input::BIAS)};
+
+ const auto stride = node.param().stride;
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [1, kernel_height, kernel_width, depth_out].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
+ stride, ker_width, ker_height);
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ifm_index), _current_subg_layout);
+ const auto ker_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ker_index), model::Layout::UNKNOWN);
+ const auto bias_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(bias_index), model::Layout::UNKNOWN);
+
+ const auto multiplier = node.param().multiplier;
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index);
+ auto ifm_alloc = _tensor_builder->at(ifm_index);
+ auto ker_alloc = _tensor_builder->at(ker_index);
+ auto bias_alloc = _tensor_builder->at(bias_index);
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::DepthwiseConvolutionLayer>();
+
+ fn->configure(ifm_alloc->buffer(), ifm_backend_shape, ker_alloc->buffer(), ker_backend_shape,
+ bias_alloc->buffer(), bias_backend_shape, padding.left, padding.right, padding.top,
+ padding.bottom, stride.horizontal, stride.vertical, multiplier, activation,
+ ofm_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::MaxPool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::MaxPool2DNode::Input::INPUT)};
+
+ const auto kh = node.param().kh;
+ const auto kw = node.param().kw;
+
+ const auto stride = node.param().stride;
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ifm_index), _current_subg_layout);
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::MaxPoolLayer>();
+
+ fn->configure(ifm_alloc->buffer(), ifm_backend_shape, padding.left, padding.right, padding.top,
+ padding.bottom, stride.horizontal, stride.vertical, kw, kh, activation,
+ ofm_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::AvgPool2DNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::AvgPool2DNode::Input::INPUT)};
+
+ const auto kh = node.param().kh;
+ const auto kw = node.param().kw;
+ const auto stride = node.param().stride;
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ const auto padding =
+ neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape, stride, kw, kh);
+ const auto activation = node.param().activation;
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ifm_index), _current_subg_layout);
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto ifm_alloc = _tensor_builder->at(ifm_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::AvgPoolLayer>();
+
+ fn->configure(ifm_alloc->buffer(), ifm_backend_shape, padding.left, padding.right, padding.top,
+ padding.bottom, stride.horizontal, stride.vertical, kw, kh, activation,
+ ofm_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::ConcatNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ const auto rank = _ctx.at(ofm_index).shape().rank();
+ const auto axis =
+ ::neurun::backend::cpu::kernel::getAxis(rank, node.param().axis, _current_subg_layout);
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ std::vector<::neurun::backend::cpu::kernel::Shape> ifm_backend_shapes;
+ for (auto &in_idx : node.getInputs())
+ ifm_backend_shapes.emplace_back(
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(in_idx), _current_subg_layout));
+
+ auto output_alloc = _tensor_builder->at(ofm_index).get();
+
+ std::vector<const uint8_t *> input_buffers;
+ for (auto &ifm_idx : node.getInputs())
+ input_buffers.emplace_back(_tensor_builder->at(ifm_idx).get()->buffer());
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::ConcatLayer>();
+
+ fn->configure(input_buffers, ifm_backend_shapes, axis, output_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::FullyConnectedNode &node)
+{
+ using model::operation::FullyConnectedNode;
+
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
+ const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(output_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(input_index), _current_subg_layout);
+ const auto weight_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(weight_index), model::Layout::UNKNOWN);
+ const auto bias_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(bias_index), model::Layout::UNKNOWN);
+
+ const auto activation = node.param().activation;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+ auto weight_alloc = _tensor_builder->at(weight_index).get();
+ auto bias_alloc = _tensor_builder->at(bias_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::FullyConnectedLayer>();
+
+ fn->configure(input_alloc->buffer(), ifm_backend_shape, weight_alloc->buffer(),
+ weight_backend_shape, bias_alloc->buffer(), bias_backend_shape, activation,
+ output_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::MulNode &) { throw std::runtime_error("NYI"); }
+
+void KernelGenerator::visit(const model::operation::ReshapeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(output_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(input_index), _current_subg_layout);
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::ReshapeLayer>();
+
+ fn->configure(input_alloc->buffer(), ifm_backend_shape, output_alloc->buffer(),
+ ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::SoftmaxNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::SoftmaxNode::Input::INPUT)};
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(output_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(input_index), _current_subg_layout);
+
+ const auto beta = node.param().beta;
+
+ auto output_alloc = _tensor_builder->at(output_index).get();
+ auto input_alloc = _tensor_builder->at(input_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::SoftMaxLayer>();
+
+ fn->configure(input_alloc->buffer(), ifm_backend_shape, beta, output_alloc->buffer(),
+ ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::AddNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ const auto lhs_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(lhs_index), _current_subg_layout);
+ const auto rhs_backend_shape =
+ ::neurun::backend::cpu::kernel::getShape(_ctx.at(rhs_index), _current_subg_layout);
+
+ const auto activation = node.param().activation;
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index).get();
+ auto lhs_alloc = _tensor_builder->at(lhs_index).get();
+ auto rhs_alloc = _tensor_builder->at(rhs_index).get();
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::AddLayer>();
+
+ fn->configure(lhs_alloc->buffer(), lhs_backend_shape, rhs_alloc->buffer(), rhs_backend_shape,
+ activation, ofm_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::PermuteNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(0)};
+
+ const auto &shape = _ctx.at(output_index).shape();
+ const auto input_backend_ctx = node.param().input_backend_ctx;
+ const auto output_backend_ctx = node.param().output_backend_ctx;
+ const auto data_type = node.getDataType();
+
+ output_backend_ctx->tensor_builder->preVisit(node);
+
+ auto output_object = output_backend_ctx->tensor_builder->wrapTensor(output_index);
+ auto input_object = input_backend_ctx->tensor_builder->wrapTensor(input_index);
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::cpu::kernel::PermuteLayer>();
+
+ // TODO Support NCHW frontend
+ auto out_shape = shape;
+ if (shape.rank() == 4 && output_object->ptr()->layout() == model::Layout::NCHW)
+ {
+ out_shape.dim(1) = shape.dim(3);
+ out_shape.dim(2) = shape.dim(1);
+ out_shape.dim(3) = shape.dim(2);
+ }
+
+ const auto permute_type = node.getPermuteType();
+ // Check Permutation Type
+ const auto inferPermuteType = [&]() {
+ if (input_object->ptr()->layout() == model::Layout::NHWC &&
+ output_object->ptr()->layout() == model::Layout::NCHW)
+ {
+ return model::operation::PermuteNode::Type::NHWC_TO_NCHW;
+ }
+ else if (input_object->ptr()->layout() == model::Layout::NCHW &&
+ output_object->ptr()->layout() == model::Layout::NHWC)
+ {
+ return model::operation::PermuteNode::Type::NCHW_TO_NHWC;
+ }
+ else
+ {
+ return model::operation::PermuteNode::Type::COPY;
+ }
+ }();
+ UNUSED_RELEASE(inferPermuteType);
+ assert(permute_type == inferPermuteType);
+
+ fn->configure(input_object, output_object, out_shape, permute_type, data_type);
+
+ input_backend_ctx->tensor_builder->postVisit(node);
+
+ _execution_builder->append(std::move(fn));
+}
+
+void KernelGenerator::visit(const model::operation::CustomNode &node)
+{
+ auto get_type_info = [this](const model::Operand &operand) -> custom::TypeInfo {
+ auto backendShape = ::neurun::backend::cpu::kernel::getShape(operand, _current_subg_layout);
+
+ custom::Shape shape(backendShape.dimensions.size());
+ for (size_t d = 0; d < backendShape.dimensions.size(); ++d)
+ {
+ shape.dim(d) = backendShape.dimensions[d];
+ }
+
+ return {shape, backendShape.type};
+ };
+
+ auto fill_op_info = [&](const model::OperandIndexSequence &opSeq,
+ std::vector<custom::TypeInfo> &types, std::vector<void *> &allocs) {
+ for (auto &idx : opSeq)
+ {
+ const auto &operand = _ctx.at(idx);
+ // TODO make sure using `_current_subg_layout` is correct for custom operations
+ types.emplace_back(get_type_info(operand));
+ auto in_alloc = _tensor_builder->at(idx)->buffer();
+ allocs.emplace_back(in_alloc);
+ }
+ };
+
+ custom::Kernel::CustomKernelConfigParams params{};
+
+ fill_op_info(node.getInputs(), params.input_types, params.input_allocations);
+ fill_op_info(node.getOutputs(), params.output_types, params.output_allocations);
+
+ params.userdata = node.userdata().data;
+ params.userdata_size = node.userdata().size;
+
+ auto fn = _kernel_registry->buildKernelForOp(node.id());
+
+ fn->configure(std::move(params));
+
+ _execution_builder->append(std::move(fn));
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/KernelGenerator.h b/runtimes/neurun/backend/cpu/KernelGenerator.h
new file mode 100644
index 000000000..178540e3e
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/KernelGenerator.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_GENERATOR_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_GENERATOR_H__
+
+#include "backend/IKernelGenerator.h"
+#include "model/Operands.h"
+#include "operand/Tensor.h"
+#include "backend/CustomKernelRegistry.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class KernelGenerator : public IKernelGenerator
+{
+public:
+ KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder,
+ const std::shared_ptr<custom::KernelRegistry> &kernel_registry);
+
+ using IKernelGenerator::visit;
+
+ void visit(const model::Subgraph &) override;
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::MaxPool2DNode &) override;
+ void visit(const model::operation::AvgPool2DNode &) override;
+ void visit(const model::operation::ConcatNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::ReshapeNode &) override;
+ void visit(const model::operation::SoftmaxNode &) override;
+ void visit(const model::operation::AddNode &) override;
+ void visit(const model::operation::PermuteNode &) override;
+ void visit(const model::operation::CustomNode &node) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+ std::shared_ptr<custom::KernelRegistry> _kernel_registry;
+ model::Layout _current_subg_layout;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_GENERATOR_H__
diff --git a/runtimes/neurun/backend/cpu/MemoryManager.cc b/runtimes/neurun/backend/cpu/MemoryManager.cc
new file mode 100644
index 000000000..192a6db36
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryManager.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MemoryManager.h"
+
+#include <cassert>
+
+#include "MemoryPlannerFactory.h"
+#include <backend/operand/Object.h>
+#include "util/logging.h"
+#include "util/ConfigSource.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+MemoryManager::MemoryManager() : _mem_planner{createMemoryPlanner()}
+{
+ // DO NOTHING
+}
+
+IMemoryPlanner *MemoryManager::createMemoryPlanner()
+{
+ auto planner_id = util::getConfigString(util::config::CPU_MEMORY_PLANNER);
+ return MemoryPlannerFactory::instance().create(planner_id);
+}
+
+void MemoryManager::buildTensor(const model::OperandIndex &ind, const model::OperandInfo &info)
+{
+ auto tensor = std::make_shared<operand::Tensor>(info);
+ _tensors[ind] = tensor;
+}
+
+void MemoryManager::claimPlan(const model::OperandIndex &ind, uint32_t size)
+{
+ _mem_planner->claim(ind, size);
+}
+
+void MemoryManager::releasePlan(const model::OperandIndex &ind) { _mem_planner->release(ind); }
+
+void MemoryManager::allocate(void)
+{
+ _mem_alloc = std::make_shared<Allocator>(_mem_planner->capacity());
+ assert(_mem_alloc->base());
+
+ for (auto &mem_plan : _mem_planner->memory_plans())
+ {
+ auto ind = mem_plan.first;
+ auto mem_blk = mem_plan.second;
+
+ uint8_t *buffer = _mem_alloc->base() + mem_blk.offset;
+ auto tensor = _tensors[ind];
+ tensor->setBuffer(buffer);
+
+ VERBOSE(CPU_MEMORYMANAGER) << "TENSOR(#" << ind.value() << "): " << static_cast<void *>(buffer)
+ << std::endl;
+
+ // If we do not make tensor here currently, kernel generation would cause segmentation fault.
+ // See also : Comments in `allocate` method.
+ }
+}
+
+std::shared_ptr<backend::operand::IObject> MemoryManager::wrapTensor(const model::OperandIndex &ind)
+{
+ if (_objects.find(ind) != _objects.end())
+ {
+ return _objects.at(ind);
+ }
+ else
+ {
+ return _objects[ind] = std::make_shared<::neurun::backend::operand::Object>(_tensors.at(ind));
+ }
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/MemoryManager.h b/runtimes/neurun/backend/cpu/MemoryManager.h
new file mode 100644
index 000000000..6b225edcb
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryManager.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_MEMORY_MANAGER_H__
+#define __NEURUN_BACKEND_CPU_MEMORY_MANAGER_H__
+
+#include "backend/IMemoryManager.h"
+#include "MemoryPlanner.h"
+#include "operand/Tensor.h"
+#include <backend/operand/Object.h>
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class MemoryManager : public backend::IMemoryManager
+{
+public:
+ MemoryManager();
+ virtual ~MemoryManager() = default;
+
+ void allocate(void) override;
+ void deallocate(void) override { _mem_alloc->release(); }
+
+ void buildTensor(const model::OperandIndex &ind, const model::OperandInfo &info);
+ void claimPlan(const model::OperandIndex &ind, uint32_t size);
+ void releasePlan(const model::OperandIndex &ind);
+
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &tensors(void) { return _tensors; }
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind);
+
+private:
+ IMemoryPlanner *createMemoryPlanner();
+
+private:
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> _tensors;
+ model::OperandIndexMap<std::shared_ptr<::neurun::backend::operand::Object>> _objects;
+ model::OperandIndexMap<Block> _tensor_mem_map;
+ std::shared_ptr<IMemoryPlanner> _mem_planner;
+ std::shared_ptr<Allocator> _mem_alloc;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_MEMORY_MANAGER_H__
diff --git a/runtimes/neurun/backend/cpu/MemoryPlanner.cc b/runtimes/neurun/backend/cpu/MemoryPlanner.cc
new file mode 100644
index 000000000..8eaf7bb9e
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryPlanner.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MemoryPlanner.h"
+#include "util/logging.h"
+#include <cassert>
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+Allocator::Allocator(uint32_t capacity)
+{
+ _base = nnfw::cpp14::make_unique<uint8_t[]>(capacity);
+
+ VERBOSE(ALLOC) << "allocation capacity: " << capacity << std::endl;
+ VERBOSE(ALLOC) << "base pointer: " << static_cast<void *>(_base.get()) << std::endl;
+}
+
+void BumpPlanner::claim(const model::OperandIndex &ind, size_t size)
+{
+ assert(size != 0);
+
+ Block blk{_capacity, size};
+ _mem_plans[ind] = blk;
+ _capacity += size;
+
+ VERBOSE(BP_PLANNER) << "CLAIM(#" << ind.value() << "): " << blk.offset << ", " << blk.size
+ << std::endl;
+}
+
+void BumpPlanner::release(const model::OperandIndex &ind)
+{
+ VERBOSE(BP_PLANNER) << "RELEASE(#" << ind.value() << "): "
+ << "NOTHING does" << std::endl;
+}
+
+// There are some assumptions for claiming memory(== making a reservation for memory).
+// 1. About _claim_table(std::map).
+// - The table's data structure is std::map so that it always sorts
+// value(model::OperandIndex) by key(base_offset).
+// - This claim() inserts key/value into _claim_table and the release() removes the key/value from
+// _claim_table.
+// - _claim_table shows the memory status at a certain point in time. Therefore,
+// - If _claim_table has an offset and a certain size at a certain point in time,
+// it means the place at the offset has been already claimed(== can't claim now. need to find
+// someplace new).
+// - If _claim_table doesn't have any element for an offset and a certain size at a certain
+// point in time, it means the place at the offset can be claimed.
+// 2. In the loop for _claim_table, we can assume the current claim_base_offset value is bigger than
+// the previous claim_base_offset.
+void FirstFitPlanner::claim(const model::OperandIndex &ind, size_t size)
+{
+ assert(size != 0);
+
+ // Find the right position for claiming
+ uint32_t next_offset = 0;
+ for (auto &mem_claim : _claim_table)
+ {
+ auto claimed_base_offset = mem_claim.first;
+ auto claimed_size = _mem_plans[mem_claim.second].size;
+ if (next_offset + size <= claimed_base_offset)
+ {
+ break;
+ }
+ else
+ {
+ next_offset = claimed_base_offset + claimed_size;
+ }
+ }
+
+ // Now next_offset is set to the proper offset
+ _claim_table[next_offset] = ind;
+ _mem_plans[ind] = {next_offset, size};
+
+ VERBOSE(FF_PLANNER) << "claim(#" << ind.value() << "): [+" << next_offset << ", " << size << "sz]"
+ << std::endl;
+
+ if (_capacity < next_offset + size)
+ {
+ _capacity = next_offset + size;
+ }
+}
+
+void FirstFitPlanner::release(const model::OperandIndex &ind)
+{
+ for (auto it = _claim_table.cbegin(); it != _claim_table.cend(); ++it)
+ {
+ if (it->second == ind)
+ {
+ uint32_t offset = it->first;
+ uint32_t index = ind.value();
+ uint32_t size = _mem_plans[ind].size;
+
+ _claim_table.erase(it);
+
+ VERBOSE(FF_PLANNER) << "release(#" << index << "): [+" << offset << ", " << size << "sz]"
+ << std::endl;
+ return;
+ }
+ }
+ assert(!"Cannot release for given index. It has been not claimed or released already.");
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/MemoryPlanner.h b/runtimes/neurun/backend/cpu/MemoryPlanner.h
new file mode 100644
index 000000000..eaa4299ea
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryPlanner.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file        MemoryPlanner.h
+ * @brief       This file contains Memory Planning related classes
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_MEMORY_PLANNER_H__
+#define __NEURUN_BACKEND_CPU_MEMORY_PLANNER_H__
+
+#include <map>
+#include <cpp14/memory.h>
+
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+/**
+ * @brief Structure to have memory offset and size
+ */
+struct Block
+{
+ uint32_t offset;
+ size_t size;
+};
+
+/**
+ * @brief Class to allocate memory
+ */
+class Allocator
+{
+public:
+ Allocator(uint32_t capacity);
+ /**
+ * @brief Get memory base pointer
+ * @return base pointer
+ */
+ uint8_t *base() const { return _base.get(); }
+ void release() { _base.reset(); }
+
+private:
+ std::unique_ptr<uint8_t[]> _base;
+};
+
+/**
+ * @brief Interface to plan memory
+ */
+struct IMemoryPlanner
+{
+ using MemoryPlans = model::OperandIndexMap<Block>;
+
+ /**
+ * @brief Claim memory for operand
+ * @param[in] index The operand index
+ * @param[in] size The size of the memory
+ */
+ virtual void claim(const model::OperandIndex &, size_t) = 0;
+ /**
+ * @brief Release memory for operand
+ * @param[in] index The operand index
+ */
+ virtual void release(const model::OperandIndex &) = 0;
+ /**
+ * @brief Get capacity for memory planning
+ * @return The value of capacity
+ */
+ virtual uint32_t capacity() = 0;
+ /**
+ * @brief Get MemoryPlans
+ * @return MemoryPlans
+ */
+ virtual MemoryPlans &memory_plans() = 0;
+
+ virtual ~IMemoryPlanner() = default;
+};
+
+/**
+ * @brief Class to plan memory by bump way
+ */
+class BumpPlanner : public IMemoryPlanner
+{
+public:
+ /**
+ * @brief Claim memory for operand by bump way
+ * @param[in] index The operand index
+ * @param[in] size The size of the memory
+ */
+ void claim(const model::OperandIndex &, size_t) override;
+ /**
+ * @brief Release memory for operand by bump way
+ * @param[in] index The operand index
+ */
+ void release(const model::OperandIndex &) override;
+ /**
+ * @brief Get capacity for memory planning
+ * @return The value of capacity
+ */
+ uint32_t capacity() override { return _capacity; }
+ /**
+ * @brief Get MemoryPlans
+ * @return MemoryPlans
+ */
+ MemoryPlans &memory_plans() override { return _mem_plans; }
+
+private:
+ uint32_t _capacity = 0;
+ MemoryPlans _mem_plans;
+};
+
+/**
+ * @brief Class to plan memory by firstfit way
+ */
+class FirstFitPlanner : public IMemoryPlanner
+{
+public:
+ /**
+ * @brief Claim memory for operand by firstfit way
+ * @param[in] index The operand index
+ * @param[in] size The size of the memory
+ */
+ void claim(const model::OperandIndex &, size_t) override;
+ /**
+ * @brief Release memory for operand by firstfit way
+ * @param[in] index The operand index
+ */
+ void release(const model::OperandIndex &) override;
+ /**
+ * @brief Get capacity for memory planning
+ * @return The value of capacity
+ */
+ uint32_t capacity() override { return _capacity; }
+ /**
+ * @brief Get MemoryPlans
+ * @return MemoryPlans
+ */
+ MemoryPlans &memory_plans() override { return _mem_plans; }
+
+private:
+ uint32_t _capacity = 0;
+ MemoryPlans _mem_plans;
+ // Use std::map because claim() assumes that _claim_table is sorted by uint32_t(base_offset)
+ std::map<uint32_t, model::OperandIndex> _claim_table;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_MEMORY_PLANNER_H__
diff --git a/runtimes/neurun/backend/cpu/MemoryPlanner.test.cc b/runtimes/neurun/backend/cpu/MemoryPlanner.test.cc
new file mode 100644
index 000000000..39e0f0d0c
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryPlanner.test.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "MemoryPlanner.h"
+#include "model/Index.h"
+
+TEST(Allocator, allocate_test)
+{
+ ::neurun::backend::cpu::Allocator allocator(1024);
+ ASSERT_NE(allocator.base(), nullptr);
+}
+
+TEST(BumpPlanner, claim_test)
+{
+ ::neurun::backend::cpu::BumpPlanner planner;
+
+ auto claim = [&planner](uint32_t index, size_t size, uint32_t expected_offset) {
+ ::neurun::model::OperandIndex mem_idx(index);
+ planner.claim(mem_idx, size);
+ auto mem_blk = planner.memory_plans()[mem_idx];
+ ASSERT_EQ(mem_blk.offset, expected_offset);
+ ASSERT_EQ(mem_blk.size, size);
+ };
+
+ claim(0, 10, 0);
+ claim(1, 20, 10);
+ claim(2, 30, 30);
+}
+
+TEST(FirstFitPlanner, claim_release_test)
+{
+ ::neurun::backend::cpu::FirstFitPlanner planner;
+
+ auto claim = [&planner](uint32_t index, size_t size, uint32_t expected_offset) {
+ ::neurun::model::OperandIndex mem_idx(index);
+ planner.claim(mem_idx, size);
+ auto mem_blk = planner.memory_plans()[mem_idx];
+ ASSERT_EQ(mem_blk.offset, expected_offset);
+ ASSERT_EQ(mem_blk.size, size);
+ };
+
+ auto release = [&planner](uint32_t index) {
+ ::neurun::model::OperandIndex mem_idx(index);
+ planner.release(mem_idx);
+ };
+
+ // 0 CLAIM - 10
+ claim(0, 10, 0);
+
+ // 1 CLAIM - 20
+ claim(1, 20, 10);
+
+ // 2 CLAIM - 30
+ claim(2, 30, 30);
+
+ // 0 RELEASE - 10
+ release(0);
+
+ // 3 CLAIM - 20
+ claim(3, 20, 60);
+
+ // 4 CLAIM - 5
+ claim(4, 5, 0);
+
+ // 5 CLAIM - 10
+ claim(5, 10, 80);
+
+ // 6 CLAIM - 5
+ claim(6, 5, 5);
+
+ // 2 RELEASE - 30
+ release(2);
+
+ // 7 CLAIM - 35
+ claim(7, 35, 90);
+
+ // 8 CLAIM - 10
+ claim(8, 10, 30);
+
+ // 4 RELEASE - 5
+ release(4);
+
+ // 9 CLAIM - 10
+ claim(9, 10, 40);
+
+ // 10 CLAIM - 10
+ claim(10, 10, 50);
+
+ // 6 RELEASE
+ release(6);
+
+ // 1 RELEASE
+ release(1);
+
+ // 8 RELEASE
+ release(8);
+
+ // 9 RELEASE
+ release(9);
+
+ // 10 RELEASE
+ release(10);
+
+ // 3 RELEASE
+ release(3);
+
+ // 5 RELEASE
+ release(5);
+
+ // 7 RELEASE
+ release(7);
+}
diff --git a/runtimes/neurun/backend/cpu/MemoryPlannerFactory.cc b/runtimes/neurun/backend/cpu/MemoryPlannerFactory.cc
new file mode 100644
index 000000000..25ac4392f
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryPlannerFactory.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MemoryPlannerFactory.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+MemoryPlannerFactory &MemoryPlannerFactory::instance()
+{
+ static MemoryPlannerFactory instance;
+ return instance;
+}
+
+IMemoryPlanner *MemoryPlannerFactory::create(const std::string &key)
+{
+ if (key == "FirstFit")
+ {
+ return new FirstFitPlanner;
+ }
+ else if (key == "Bump")
+ {
+ return new BumpPlanner;
+ }
+ return new FirstFitPlanner; // Default Planner
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/MemoryPlannerFactory.h b/runtimes/neurun/backend/cpu/MemoryPlannerFactory.h
new file mode 100644
index 000000000..610cc9a15
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/MemoryPlannerFactory.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_MEMORY_PLANNER_FACTORY_H__
+#define __NEURUN_BACKEND_CPU_MEMORY_PLANNER_FACTORY_H__
+
+#include "MemoryPlanner.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class MemoryPlannerFactory
+{
+public:
+ static MemoryPlannerFactory &instance();
+
+private:
+ MemoryPlannerFactory() = default;
+
+public:
+ IMemoryPlanner *create(const std::string &key);
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_MEMORY_PLANNER_FACTORY_H__
diff --git a/runtimes/neurun/backend/cpu/PluginClassesAllocator.cc b/runtimes/neurun/backend/cpu/PluginClassesAllocator.cc
new file mode 100644
index 000000000..caba78447
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/PluginClassesAllocator.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <util/logging.h>
+
+#include "Backend.h"
+
+extern "C" {
+neurun::backend::Backend *neurun_backend_create()
+{
+ VERBOSE(neurun_backend_create) << "'cpu' loaded\n";
+ return new neurun::backend::cpu::Backend;
+}
+
+void neurun_backend_destroy(neurun::backend::Backend *backend)
+{
+ VERBOSE(neurun_backend_create) << "'cpu' unloaded\n";
+ delete backend;
+}
+}
diff --git a/runtimes/neurun/backend/cpu/ShapeFixer.cc b/runtimes/neurun/backend/cpu/ShapeFixer.cc
new file mode 100644
index 000000000..741f07d5e
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/ShapeFixer.cc
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ShapeFixer.h"
+
+#include <stdexcept>
+
+#include "cpp14/memory.h"
+#include "util/Padding.h"
+#include "kernel/OperationUtils.h"
+#include "kernel/ConvolutionLayer.h"
+#include "kernel/AvgPoolLayer.h"
+#include "kernel/MaxPoolLayer.h"
+#include "kernel/ConcatLayer.h"
+#include "kernel/FullyConnectedLayer.h"
+#include "kernel/ReshapeLayer.h"
+#include "kernel/SoftMaxLayer.h"
+#include "kernel/PermuteLayer.h"
+#include "kernel/DepthwiseConvolutionLayer.h"
+#include "kernel/AddLayer.h"
+
+#include <backend/Backend.h>
+#include <backend/IConfig.h>
+#include "compiler/IExecutionBuilder.h"
+
+#include "util/logging.h"
+
+#include "util/Utils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+ShapeFixer::ShapeFixer(const neurun::model::Operands &operand_ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _ctx(operand_ctx), _tensor_builder(tensor_builder)
+{
+ assert(tensor_builder);
+}
+
+void ShapeFixer::visit(const model::operation::Conv2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::DepthwiseConv2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::MaxPool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::AvgPool2DNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::ConcatNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::FullyConnectedNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::MulNode &) { throw std::runtime_error("NYI"); }
+
+void ShapeFixer::visit(const model::operation::ReshapeNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::SoftmaxNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::AddNode &node)
+{
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+
+ // Broadcasting and quantization
+ if (!(_ctx.at(lhs_index).shape() == _ctx.at(rhs_index).shape()) ||
+ _ctx.at(lhs_index).typeInfo().type() == model::DataType::QUANT8_ASYMM)
+ {
+ throw std::runtime_error{"NYI"};
+ }
+}
+
+void ShapeFixer::visit(const model::operation::PermuteNode &) { /* DO NOTHING */}
+
+void ShapeFixer::visit(const model::operation::CustomNode &) { /* DO NOTHING */}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/ShapeFixer.h b/runtimes/neurun/backend/cpu/ShapeFixer.h
new file mode 100644
index 000000000..34a001797
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/ShapeFixer.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_SHAPE_FIXER_H__
+#define __NEURUN_BACKEND_CPU_SHAPE_FIXER_H__
+
+#include <backend/IShapeFixer.h>
+
+#include "model/Operands.h"
+#include "operand/Tensor.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class ShapeFixer : public IShapeFixer
+{
+public:
+ ShapeFixer(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+ std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
+
+ void visit(const model::operation::Conv2DNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &) override;
+ void visit(const model::operation::MaxPool2DNode &) override;
+ void visit(const model::operation::AvgPool2DNode &) override;
+ void visit(const model::operation::ConcatNode &) override;
+ void visit(const model::operation::FullyConnectedNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::ReshapeNode &) override;
+ void visit(const model::operation::SoftmaxNode &) override;
+ void visit(const model::operation::AddNode &) override;
+ void visit(const model::operation::PermuteNode &) override;
+ void visit(const model::operation::CustomNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_SHAPE_FIXER_H__
diff --git a/runtimes/neurun/backend/cpu/TensorBuilder.cc b/runtimes/neurun/backend/cpu/TensorBuilder.cc
new file mode 100644
index 000000000..cf91e5e61
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/TensorBuilder.cc
@@ -0,0 +1,141 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TensorBuilder.h"
+
+#include <cassert>
+
+#include "util/logging.h"
+
+namespace
+{
+
+using namespace neurun;
+
+// NOTE This backend support only NHWC now
+model::OperandInfo asTensorInfo(const model::OperandInfo &info, model::Layout frontend_layout)
+{
+ const auto &shape = info.shape();
+ const auto &rank = shape.rank();
+ assert(rank <= 4);
+
+ auto ret = info;
+ if (frontend_layout == model::Layout::NCHW && rank == 4)
+ {
+ // NCHW -> NHWC
+ uint32_t permutation[4] = {0, 2, 3, 1};
+ ret = model::OperandInfo{{shape.dim(permutation[0]), shape.dim(permutation[1]),
+ shape.dim(permutation[2]), shape.dim(permutation[3])},
+ info.typeInfo()};
+ }
+ return ret;
+}
+
+} // namespace
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+TensorBuilder::TensorBuilder() : _tensor_mgr{new TensorManager()}
+{
+ // DO NOTHING
+}
+
+void TensorBuilder::registerTensorInfo(const model::OperandIndex &ind,
+ const model::OperandInfo &info,
+ model::Layout frontend_layout, model::Layout backend_layout,
+ bool as_const)
+{
+ _tensor_info_map.emplace(ind, info);
+ _tensor_layouts_map.insert({ind, std::make_pair(frontend_layout, backend_layout)});
+
+ if (as_const)
+ _constants.append(ind);
+}
+
+void TensorBuilder::registerSubTensorInfo(const model::OperandIndex &,
+ const compiler::SubTensorInfo &)
+{
+ // Not supported yet
+ assert(false);
+}
+
+void TensorBuilder::notifyFirstUse(const model::OperandIndex &ind)
+{
+ assert(_tensor_info_map.find(ind) != _tensor_info_map.end());
+ const auto tensor_info = asTensorInfo(_tensor_info_map.at(ind), _tensor_layouts_map[ind].first);
+ const auto size = tensor_info.total_size();
+ _tensor_mgr->buildTensor(ind, tensor_info, _constants.contains(ind));
+ _tensor_mgr->claimPlan(ind, size);
+}
+
+void TensorBuilder::notifyLastUse(const model::OperandIndex &ind) { _tensor_mgr->releasePlan(ind); }
+
+void TensorBuilder::prepare(void)
+{
+ _tensor_mgr->allocateConsts();
+ _tensor_mgr->allocateNonconsts();
+}
+
+// TODO Remove this
+void TensorBuilder::allocate(void)
+{
+ // NOTE For now nothing to do. Allocation is done in prepare stage, which is not appropriate
+ // This is because CPU kernels require `ITensor`s to be allocated before Kernel Generation.
+}
+
+void TensorBuilder::allocateConsts()
+{
+ // NOTE For now nothing to do. Allocation is done in prepare stage, which is not appropriate
+ // This is because CPU kernels require `ITensor`s to be allocated before Kernel Generation.
+}
+
+void TensorBuilder::allocateNonconsts()
+{
+ // NOTE For now nothing to do. Allocation is done in prepare stage, which is not appropriate
+ // This is because CPU kernels require `ITensor`s to be allocated before Kernel Generation.
+}
+
+std::shared_ptr<::neurun::backend::operand::ITensor>
+TensorBuilder::tensorAt(const model::OperandIndex &ind)
+{
+ return _tensor_mgr->at(ind);
+}
+
+std::shared_ptr<backend::operand::IObject> TensorBuilder::wrapTensor(const model::OperandIndex &ind)
+{
+ return _tensor_mgr->wrapTensor(ind);
+}
+
+void TensorBuilder::iterate(const IterateFunction &fn) { _tensor_mgr->iterate(fn); }
+
+std::shared_ptr<operand::Tensor> TensorBuilder::at(const ::neurun::model::OperandIndex &ind)
+{
+ return _tensor_mgr->at(ind);
+}
+
+std::unique_ptr<ITensorManager> TensorBuilder::releaseTensorManager(void)
+{
+ return std::move(_tensor_mgr);
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/TensorBuilder.h b/runtimes/neurun/backend/cpu/TensorBuilder.h
new file mode 100644
index 000000000..efafbd97b
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/TensorBuilder.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_TENSOR_BUILDER_H__
+#define __NEURUN_BACKEND_CPU_TENSOR_BUILDER_H__
+
+#include <unordered_map>
+
+#include <backend/ITensorBuilder.h>
+#include <backend/operand/Object.h>
+#include "operand/Tensor.h"
+#include "model/OperandIndexMap.h"
+#include "TensorManager.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class TensorBuilder : public ITensorBuilder
+{
+public:
+ TensorBuilder();
+
+ /**
+ * @brief Register tensor information to allocate on CPU backend
+ * @param[in] ind Operand index
+ * @param[in] info Operand information
+ * @param[in] layout Operand data layout
+ */
+ void registerTensorInfo(const model::OperandIndex &ind, const model::OperandInfo &info,
+ model::Layout frontend_layout, model::Layout backend_layout,
+ bool as_const) override;
+ /**
+ * @brief Register subtensor information to allocate on CPU backend
+ * @param[in] ind Operand index
+ * @param[in] info Tensor information
+ */
+ void registerSubTensorInfo(const model::OperandIndex &ind,
+ const compiler::SubTensorInfo &info) override;
+
+ void notifyFirstUse(const model::OperandIndex &) override;
+ void notifyLastUse(const model::OperandIndex &) override;
+
+ void prepare(void) override;
+ void allocate(void) override; // TODO Remove this
+ void allocateConsts() override;
+ void allocateNonconsts() override;
+ void postFunctionPrepare() override { /* DO NOTHING */}
+ void finalize() override { /* DO NOTHING */}
+
+ std::shared_ptr<::neurun::backend::operand::ITensor>
+ tensorAt(const model::OperandIndex &ind) override;
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind) override;
+
+ void iterate(const IterateFunction &fn) override;
+
+ void preVisit(const model::Operation &) override { /* DO NOTHING */}
+ void postVisit(const model::Operation &) override { /* DO NOTHING */}
+
+ std::unique_ptr<ITensorManager> releaseTensorManager(void) override;
+
+ std::shared_ptr<operand::Tensor> at(const ::neurun::model::OperandIndex &ind);
+
+private:
+ std::unique_ptr<TensorManager> _tensor_mgr;
+ model::OperandIndexMap<model::OperandInfo> _tensor_info_map;
+ model::OperandIndexMap<std::pair<model::Layout, model::Layout>> _tensor_layouts_map;
+ model::OperandIndexSequence _constants;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/backend/cpu/TensorManager.cc b/runtimes/neurun/backend/cpu/TensorManager.cc
new file mode 100644
index 000000000..22d874bf3
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/TensorManager.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TensorManager.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+TensorManager::TensorManager() : _const_mgr{new MemoryManager()}, _nonconst_mgr{new MemoryManager()}
+{
+ // DO NOTHING
+}
+
+void TensorManager::allocateConsts(void) { _const_mgr->allocate(); }
+
+void TensorManager::allocateNonconsts(void) { _nonconst_mgr->allocate(); }
+
+void TensorManager::deallocateConsts(void) { _const_mgr->deallocate(); }
+
+void TensorManager::deallocateNonconsts(void) { _nonconst_mgr->deallocate(); }
+
+void TensorManager::buildTensor(const model::OperandIndex &ind,
+ const model::OperandInfo &tensor_info, bool as_const)
+{
+ assert(_ind_to_mgr.find(ind) == _ind_to_mgr.end());
+ if (as_const)
+ {
+ _const_mgr->buildTensor(ind, tensor_info);
+ _ind_to_mgr.insert({ind, *_const_mgr});
+ }
+ else
+ {
+ _nonconst_mgr->buildTensor(ind, tensor_info);
+ _ind_to_mgr.insert({ind, *_nonconst_mgr});
+ }
+}
+
+void TensorManager::claimPlan(const model::OperandIndex &ind, uint32_t size)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ _ind_to_mgr.at(ind).claimPlan(ind, size);
+}
+
+void TensorManager::releasePlan(const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ _ind_to_mgr.at(ind).releasePlan(ind);
+}
+
+std::shared_ptr<backend::operand::IObject> TensorManager::wrapTensor(const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ return _ind_to_mgr.at(ind).wrapTensor(ind);
+}
+
+std::shared_ptr<operand::Tensor> TensorManager::at(const ::neurun::model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ return _ind_to_mgr.at(ind).tensors().at(ind);
+}
+
+model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &TensorManager::constTensors(void)
+{
+ return _const_mgr->tensors();
+}
+
+model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &TensorManager::nonconstTensors(void)
+{
+ return _nonconst_mgr->tensors();
+}
+
+void TensorManager::iterate(const std::function<void(const model::OperandIndex &)> &fn)
+{
+ for (auto it : _nonconst_mgr->tensors())
+ fn(it.first);
+
+ for (auto it : _const_mgr->tensors())
+ fn(it.first);
+}
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/TensorManager.h b/runtimes/neurun/backend/cpu/TensorManager.h
new file mode 100644
index 000000000..c1f4a0072
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/TensorManager.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_TENSOR_MANAGER_H__
+#define __NEURUN_BACKEND_CPU_TENSOR_MANAGER_H__
+
+#include "backend/ITensorManager.h"
+#include "MemoryManager.h"
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+
+class TensorManager : public backend::ITensorManager
+{
+public:
+ TensorManager();
+ virtual ~TensorManager() = default;
+
+ void allocateConsts(void) override;
+ void allocateNonconsts(void) override;
+ void deallocateConsts(void) override;
+ void deallocateNonconsts(void) override;
+
+ void buildTensor(const model::OperandIndex &ind, const model::OperandInfo &tensor_info,
+ bool as_const);
+
+ void claimPlan(const model::OperandIndex &ind, uint32_t size);
+ void releasePlan(const model::OperandIndex &ind);
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind);
+ std::shared_ptr<operand::Tensor> at(const ::neurun::model::OperandIndex &ind);
+
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &constTensors(void);
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &nonconstTensors(void);
+
+ void iterate(const std::function<void(const model::OperandIndex &)> &fn);
+
+private:
+ std::unique_ptr<MemoryManager> _const_mgr;
+ std::unique_ptr<MemoryManager> _nonconst_mgr;
+ model::OperandIndexMap<MemoryManager &> _ind_to_mgr;
+};
+
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_TENSOR_MANAGER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/AddLayer.cc b/runtimes/neurun/backend/cpu/kernel/AddLayer.cc
new file mode 100644
index 000000000..14e2afec8
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/AddLayer.cc
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AddLayer.h"
+
+#include <cker/operation/Add.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+void AddLayer::addFloat32()
+{
+ float output_activation_min, output_activation_max;
+ CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
+ nnfw::cker::AddParam op_params;
+ op_params.float_activation_max = output_activation_max;
+ op_params.float_activation_min = output_activation_min;
+
+ nnfw::cker::Add(op_params, convertShapeToCkerShape(_lhsShape), _lhsData.f,
+ convertShapeToCkerShape(_rhsShape), _rhsData.f,
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+}
+
+void AddLayer::addQuant8()
+{
+ int32_t output_activation_min, output_activation_max;
+ CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
+ &output_activation_max);
+ // nnfw::cker::AddParam op_params;
+ // op_params.quantized_activation_max = output_activation_max;
+ // op_params.quantized_activation_min = output_activation_min;
+
+ // cker quant8 add is not implemented yet
+ throw std::runtime_error{"NYI"};
+}
+
+void AddLayer::configure(uint8_t *lhsData, const Shape &lhsShape, uint8_t *rhsData,
+ const Shape &rhsShape, const model::Activation activation,
+ uint8_t *outputData, const Shape &outputShape)
+{
+ _lhsData.u8 = lhsData;
+ _lhsShape = lhsShape;
+ _rhsData.u8 = rhsData;
+ _rhsShape = rhsShape;
+ _inputType = lhsShape.type;
+ _activation = activation;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void AddLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ addFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ addQuant8();
+ }
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/AddLayer.h b/runtimes/neurun/backend/cpu/kernel/AddLayer.h
new file mode 100644
index 000000000..e7ac39115
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/AddLayer.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_ADDLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_ADDLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class AddLayer : public ::neurun::exec::IFunction
+{
+public:
+ AddLayer() : _lhsData(), _rhsData(), _outputData(), _lhsShape(), _rhsShape(), _outputShape()
+ {
+ // DO NOTHING
+ }
+
+public:
+ void addFloat32();
+
+ void addQuant8();
+
+ void configure(uint8_t *lhsData, const Shape &lhsShape, uint8_t *rhsData, const Shape &rhsShape,
+ const model::Activation activation, uint8_t *outputData, const Shape &outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _lhsData;
+ DataPtr _rhsData;
+ DataPtr _outputData;
+
+ Shape _lhsShape;
+ Shape _rhsShape;
+ Shape _outputShape;
+
+ model::Activation _activation{model::Activation::NONE};
+
+ OperandType _inputType{OperandType::FLOAT32};
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_ADDLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.cc b/runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.cc
new file mode 100644
index 000000000..de43aae76
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AvgPoolLayer.h"
+
+#include "OperationUtils.h"
+
+#include <cker/operation/AveragePool.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+#define AVGPOOLING_PARAMETERS \
+ nnfw::cker::AveragePoolParams op_params; \
+ op_params.stride_height = _strideHeight; \
+ op_params.stride_width = _strideWidth; \
+ op_params.filter_height = _kernelHeight; \
+ op_params.filter_width = _kernelWidth; \
+ op_params.padding_values.height = (int8_t)_paddingTop; \
+ op_params.padding_values.width = (int8_t)_paddingLeft;
+
+AvgPoolLayer::AvgPoolLayer()
+ : _inputData(), _outputData(), _inputShape(), _outputShape(), _paddingLeft(0), _paddingTop(0),
+ _paddingRight(0), _paddingBottom(0), _strideWidth(0), _strideHeight(0), _kernelWidth(0),
+ _kernelHeight(0), _activation(model::Activation::NONE), _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void AvgPoolLayer::averagePoolFloat32()
+{
+ AVGPOOLING_PARAMETERS
+ float output_activation_min, output_activation_max;
+ CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
+ op_params.float_activation_min = output_activation_min;
+ op_params.float_activation_max = output_activation_max;
+
+ nnfw::cker::AveragePool(op_params, convertShapeToCkerShape(_inputShape), _inputData.f,
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+}
+void AvgPoolLayer::averagePoolQuant8()
+{
+ AVGPOOLING_PARAMETERS
+ int32_t output_activation_min = 0;
+ int32_t output_activation_max = 0;
+ CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
+ &output_activation_max);
+ op_params.quantized_activation_min = output_activation_min;
+ op_params.quantized_activation_max = output_activation_max;
+
+ nnfw::cker::AveragePool(op_params, convertShapeToCkerShape(_inputShape), _inputData.u8,
+ convertShapeToCkerShape(_outputShape), _outputData.u8);
+}
+
+void AvgPoolLayer::configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
+ const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideWidth,
+ const uint32_t strideHeight, const uint32_t kernelWidth,
+ const uint32_t kernelHeight, const model::Activation activation,
+ uint8_t *outputData, const Shape outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _paddingLeft = paddingLeft;
+ _paddingRight = paddingRight;
+ _paddingTop = paddingTop;
+ _paddingBottom = paddingBottom;
+ _strideWidth = strideWidth;
+ _strideHeight = strideHeight;
+ _kernelWidth = kernelWidth;
+ _kernelHeight = kernelHeight;
+ _activation = activation;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void AvgPoolLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ averagePoolFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ averagePoolQuant8();
+ }
+}
+
+#undef AVGPOOLING_PARAMETERS
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.h b/runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.h
new file mode 100644
index 000000000..c18b9f9a6
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/AvgPoolLayer.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_AVGPOOLLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_AVGPOOLLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class AvgPoolLayer : public ::neurun::exec::IFunction
+{
+public:
+ AvgPoolLayer();
+
+public:
+ void averagePoolFloat32();
+
+ void averagePoolQuant8();
+
+ void configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
+ const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideWidth,
+ const uint32_t strideHeight, const uint32_t kernelWidth,
+ const uint32_t kernelHeight, const model::Activation activation,
+ uint8_t *outputData, const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _outputData;
+
+ Shape _inputShape;
+ Shape _outputShape;
+
+ uint32_t _paddingLeft;
+ uint32_t _paddingTop;
+ uint32_t _paddingRight;
+ uint32_t _paddingBottom;
+
+ uint32_t _strideWidth;
+ uint32_t _strideHeight;
+ uint32_t _kernelWidth;
+ uint32_t _kernelHeight;
+
+ model::Activation _activation;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_AVGPOOLLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/ConcatLayer.cc b/runtimes/neurun/backend/cpu/kernel/ConcatLayer.cc
new file mode 100644
index 000000000..c390436a0
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/ConcatLayer.cc
@@ -0,0 +1,136 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConcatLayer.h"
+
+#include "OperationUtils.h"
+
+#include <cker/operation/Concatenation.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+ConcatLayer::ConcatLayer()
+ : _inputDataPtrs(), _outputData(), _axis(0), _inputShapes(), _outputShape(),
+ _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void ConcatLayer::concatenationFloat32()
+{
+ uint32_t num_inputs = _inputShapes.size();
+
+ nnfw::cker::ConcatenationParams op_params;
+ op_params.axis = _axis;
+ op_params.inputs_count = num_inputs;
+
+ std::vector<nnfw::cker::Shape *> inputDimsPtr;
+ std::vector<nnfw::cker::Shape> inputDims;
+ inputDimsPtr.reserve(num_inputs);
+ inputDims.reserve(num_inputs);
+
+ for (uint32_t i = 0; i < num_inputs; i++)
+ {
+ inputDims.push_back(convertShapeToCkerShape(_inputShapes[i]));
+ inputDimsPtr.push_back(&inputDims[i]);
+ }
+
+ std::vector<const float *> inputFloatPtrs;
+
+ for (auto ptr : _inputDataPtrs)
+ {
+ inputFloatPtrs.emplace_back(reinterpret_cast<const float *>(ptr));
+ }
+
+ nnfw::cker::Concatenation<float>(op_params, inputDimsPtr.data(), inputFloatPtrs.data(),
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+}
+void ConcatLayer::concatenationQuant8()
+{
+ uint32_t num_inputs = _inputShapes.size();
+
+ std::vector<int32_t> input_zeropoints(num_inputs);
+ std::vector<float> input_scales(num_inputs);
+ for (uint32_t i = 0; i < num_inputs; i++)
+ {
+ input_zeropoints[i] = _inputShapes[i].offset;
+ input_scales[i] = _inputShapes[i].scale;
+ }
+
+ nnfw::cker::ConcatenationParams op_params;
+ op_params.axis = _axis;
+ op_params.inputs_count = num_inputs;
+ op_params.input_zeropoint = input_zeropoints.data();
+ op_params.input_scale = input_scales.data();
+ op_params.output_zeropoint = _outputShape.offset;
+ op_params.output_scale = _outputShape.scale;
+
+ std::vector<nnfw::cker::Shape *> inputDimsPtr;
+ std::vector<nnfw::cker::Shape> inputDims;
+ inputDimsPtr.reserve(num_inputs);
+ inputDims.reserve(num_inputs);
+ for (uint32_t i = 0; i < num_inputs; i++)
+ {
+ inputDims.push_back(convertShapeToCkerShape(_inputShapes[i]));
+ inputDimsPtr.push_back(&inputDims[i]);
+ }
+
+ nnfw::cker::Concatenation<uint8_t>(op_params, inputDimsPtr.data(), _inputDataPtrs.data(),
+ convertShapeToCkerShape(_outputShape), _outputData.u8);
+}
+
+void ConcatLayer::configure(const std::vector<const uint8_t *> &inputDataPtrs,
+ const std::vector<Shape> &inputShapes, int32_t axis,
+ uint8_t *outputData, const Shape outputShape)
+{
+ _inputDataPtrs = inputDataPtrs;
+
+ for (auto shape : inputShapes)
+ {
+ _inputShapes.emplace_back(shape);
+ _inputType = shape.type;
+ }
+
+ _axis = axis;
+
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void ConcatLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ concatenationFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ concatenationQuant8();
+ }
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/ConcatLayer.h b/runtimes/neurun/backend/cpu/kernel/ConcatLayer.h
new file mode 100644
index 000000000..5469179e0
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/ConcatLayer.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_CONCATLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_CONCATLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class ConcatLayer : public ::neurun::exec::IFunction
+{
+public:
+ ConcatLayer();
+
+public:
+ void concatenationFloat32();
+
+ void concatenationQuant8();
+
+ void configure(const std::vector<const uint8_t *> &inputDataPtrs,
+ const std::vector<Shape> &inputShapes, int32_t axis, uint8_t *outputData,
+ const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ std::vector<const uint8_t *> _inputDataPtrs;
+ DataPtr _outputData;
+
+ int32_t _axis;
+
+ std::vector<Shape> _inputShapes;
+ Shape _outputShape;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_CONCATLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.cc b/runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.cc
new file mode 100644
index 000000000..efeabbbae
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.cc
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConvolutionLayer.h"
+
+#include <cker/operation/Conv.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+ConvolutionLayer::ConvolutionLayer()
+ : _inputData(), _kernelData(), _outputData(), _biasData(), _inputShape(), _kernelShape(),
+ _outputShape(), _biasShape(), _paddingLeft(0), _paddingTop(0), _paddingRight(0),
+ _paddingBottom(0), _strideWidth(0), _strideHeight(0), _activation(model::Activation::NONE),
+ _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void ConvolutionLayer::convFloat32()
+{
+ float output_activation_min, output_activation_max;
+ CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
+
+ nnfw::cker::ConvParams op_params;
+ op_params.padding_values.width = _paddingLeft;
+ op_params.padding_values.height = _paddingTop;
+ op_params.stride_width = _strideWidth;
+ op_params.stride_height = _strideHeight;
+ op_params.dilation_width_factor = 1;
+ op_params.dilation_height_factor = 1;
+ op_params.float_activation_min = output_activation_min;
+ op_params.float_activation_max = output_activation_max;
+
+ nnfw::cker::Conv(op_params, convertShapeToCkerShape(_inputShape), _inputData.f,
+ convertShapeToCkerShape(_kernelShape), _kernelData.f,
+ convertShapeToCkerShape(_biasShape), _biasData.f,
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+}
+
+void ConvolutionLayer::convQuant8()
+{
+ int32_t output_activation_min = 0;
+ int32_t output_activation_max = 0;
+ CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
+ &output_activation_max);
+
+ float real_multiplier = 0.0;
+ int32_t output_multiplier = 0;
+ int32_t output_shift = 0;
+ GetQuantizedConvolutionMultiplier(_inputShape, _kernelShape, _biasShape, _outputShape,
+ &real_multiplier);
+ QuantizeMultiplier(real_multiplier, &output_multiplier, &output_shift);
+
+ nnfw::cker::ConvParams op_params;
+ op_params.stride_width = _strideWidth;
+ op_params.stride_height = _strideHeight;
+ op_params.dilation_width_factor = 1;
+ op_params.dilation_height_factor = 1;
+ op_params.padding_values.width = _paddingLeft;
+ op_params.padding_values.height = _paddingTop;
+ op_params.input_offset = -_inputShape.offset;
+ op_params.weights_offset = -_kernelShape.offset;
+ op_params.output_offset = _outputShape.offset;
+ op_params.output_multiplier = output_multiplier;
+ op_params.output_shift = output_shift;
+ op_params.quantized_activation_min = output_activation_min;
+ op_params.quantized_activation_max = output_activation_max;
+
+ nnfw::cker::Conv(op_params, convertShapeToCkerShape(_inputShape), _inputData.u8,
+ convertShapeToCkerShape(_kernelShape), _kernelData.u8,
+ convertShapeToCkerShape(_biasShape), _biasData.i32,
+ convertShapeToCkerShape(_outputShape), _outputData.u8);
+}
+
+void ConvolutionLayer::configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
+ const Shape kernelShape, uint8_t *biasData, const Shape biasShape,
+ const uint32_t paddingLeft, const uint32_t paddingRight,
+ const uint32_t paddingTop, const uint32_t paddingBottom,
+ const uint32_t strideWidth, const uint32_t strideHeight,
+ const model::Activation activation, uint8_t *outputData,
+ const Shape outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _kernelData.u8 = kernelData;
+ _kernelShape = kernelShape;
+ _biasData.u8 = biasData;
+ _biasShape = biasShape;
+ _paddingLeft = paddingLeft;
+ _paddingRight = paddingRight;
+ _paddingTop = paddingTop;
+ _paddingBottom = paddingBottom;
+ _strideWidth = strideWidth;
+ _strideHeight = strideHeight;
+ _activation = activation;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void ConvolutionLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ convFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ convQuant8();
+ }
+}
+
+#undef ANDROID_NN_CONV_PARAMETERS
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.h b/runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.h
new file mode 100644
index 000000000..868d08a14
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/ConvolutionLayer.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_CONVOLUTIONLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_CONVOLUTIONLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class ConvolutionLayer : public ::neurun::exec::IFunction
+{
+public:
+ ConvolutionLayer();
+
+public:
+ void convFloat32();
+
+ void convQuant8();
+
+ void configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
+ const Shape kernelShape, uint8_t *biasData, const Shape biasShape,
+ const uint32_t paddingLeft, const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideW, const uint32_t strideH,
+ const model::Activation activation, uint8_t *outputData, const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _kernelData;
+ DataPtr _outputData;
+ DataPtr _biasData;
+
+ Shape _inputShape;
+ Shape _kernelShape;
+ Shape _outputShape;
+ Shape _biasShape;
+
+ uint32_t _paddingLeft;
+ uint32_t _paddingTop;
+ uint32_t _paddingRight;
+ uint32_t _paddingBottom;
+
+ uint32_t _strideWidth;
+ uint32_t _strideHeight;
+
+ model::Activation _activation;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_CONVOLUTIONLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.cc b/runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.cc
new file mode 100644
index 000000000..1c750e0e1
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.cc
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DepthwiseConvolutionLayer.h"
+
+#include <cker/operation/DepthwiseConv.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+DepthwiseConvolutionLayer::DepthwiseConvolutionLayer()
+ : _inputData(), _kernelData(), _outputData(), _biasData(), _inputShape(), _kernelShape(),
+ _outputShape(), _biasShape(), _paddingLeft(0), _paddingTop(0), _paddingRight(0),
+ _paddingBottom(0), _strideWidth(0), _strideHeight(0), _multiplier(0),
+ _activation(model::Activation::NONE), _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void DepthwiseConvolutionLayer::convFloat32()
+{
+ float output_activation_min, output_activation_max;
+ CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
+
+ nnfw::cker::DepthwiseConvParams op_params;
+ op_params.stride_width = _strideWidth;
+ op_params.stride_height = _strideHeight;
+ op_params.dilation_width_factor = 1;
+ op_params.dilation_height_factor = 1;
+ op_params.padding_values.width = _paddingLeft;
+ op_params.padding_values.height = _paddingTop;
+ op_params.depth_multiplier = _multiplier;
+ op_params.float_activation_min = output_activation_min;
+ op_params.float_activation_max = output_activation_max;
+
+ nnfw::cker::DepthwiseConv(op_params, convertShapeToCkerShape(_inputShape), _inputData.f,
+ convertShapeToCkerShape(_kernelShape), _kernelData.f,
+ convertShapeToCkerShape(_biasShape), _biasData.f,
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+}
+
+void DepthwiseConvolutionLayer::convQuant8()
+{
+ int32_t output_activation_min = 0;
+ int32_t output_activation_max = 0;
+ CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
+ &output_activation_max);
+
+ float real_multiplier = 0.0;
+ int32_t output_multiplier = 0;
+ int32_t output_shift = 0;
+ GetQuantizedConvolutionMultiplier(_inputShape, _kernelShape, _biasShape, _outputShape,
+ &real_multiplier);
+ QuantizeMultiplier(real_multiplier, &output_multiplier, &output_shift);
+
+ nnfw::cker::DepthwiseConvParams op_params;
+ op_params.stride_width = _strideWidth;
+ op_params.stride_height = _strideHeight;
+ op_params.dilation_width_factor = 1;
+ op_params.dilation_height_factor = 1;
+ op_params.padding_values.width = _paddingLeft;
+ op_params.padding_values.height = _paddingTop;
+ op_params.depth_multiplier = _multiplier;
+ op_params.input_offset = -_inputShape.offset;
+ op_params.weights_offset = -_kernelShape.offset;
+ op_params.output_offset = _outputShape.offset;
+ op_params.output_multiplier = output_multiplier;
+ op_params.output_shift = output_shift;
+ op_params.quantized_activation_min = output_activation_min;
+ op_params.quantized_activation_max = output_activation_max;
+
+ nnfw::cker::DepthwiseConv(op_params, convertShapeToCkerShape(_inputShape), _inputData.u8,
+ convertShapeToCkerShape(_kernelShape), _kernelData.u8,
+ convertShapeToCkerShape(_biasShape), _biasData.i32,
+ convertShapeToCkerShape(_outputShape), _outputData.u8);
+}
+
+void DepthwiseConvolutionLayer::configure(
+ uint8_t *inputData, const Shape inputShape, uint8_t *kernelData, const Shape kernelShape,
+ uint8_t *biasData, const Shape biasShape, const uint32_t paddingLeft,
+ const uint32_t paddingRight, const uint32_t paddingTop, const uint32_t paddingBottom,
+ const uint32_t strideWidth, const uint32_t strideHeight, const uint32_t multiplier,
+ const model::Activation activation, uint8_t *outputData, const Shape outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _kernelData.u8 = kernelData;
+ _kernelShape = kernelShape;
+ _biasData.u8 = biasData;
+ _biasShape = biasShape;
+ _paddingLeft = paddingLeft;
+ _paddingRight = paddingRight;
+ _paddingTop = paddingTop;
+ _paddingBottom = paddingBottom;
+ _strideWidth = strideWidth;
+ _strideHeight = strideHeight;
+ _multiplier = multiplier;
+ _activation = activation;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void DepthwiseConvolutionLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ convFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ convQuant8();
+ }
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.h b/runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.h
new file mode 100644
index 000000000..b031bc8eb
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/DepthwiseConvolutionLayer.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_KERNEL_CPU_DEPTHWISECONVOLUTIONLAYER_H__
+#define __NEURUN_KERNEL_CPU_DEPTHWISECONVOLUTIONLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class DepthwiseConvolutionLayer : public ::neurun::exec::IFunction
+{
+public:
+ DepthwiseConvolutionLayer();
+
+public:
+ void convFloat32();
+
+ void convQuant8();
+
+ void configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
+ const Shape kernelShape, uint8_t *biasData, const Shape biasShape,
+ const uint32_t paddingLeft, const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideW, const uint32_t strideH,
+ const uint32_t multiplier, const model::Activation activation, uint8_t *outputData,
+ const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _kernelData;
+ DataPtr _outputData;
+ DataPtr _biasData;
+
+ Shape _inputShape;
+ Shape _kernelShape;
+ Shape _outputShape;
+ Shape _biasShape;
+
+ uint32_t _paddingLeft;
+ uint32_t _paddingTop;
+ uint32_t _paddingRight;
+ uint32_t _paddingBottom;
+
+ uint32_t _strideWidth;
+ uint32_t _strideHeight;
+
+ uint32_t _multiplier;
+
+ model::Activation _activation;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // backend
+} // namespace neurun
+
+#endif // __NEURUN_KERNEL_CPU_DEPTHWISECONVOLUTIONLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.cc b/runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.cc
new file mode 100644
index 000000000..cbd36929f
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FullyConnectedLayer.h"
+
+#include <cker/operation/FullyConnected.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+FullyConnectedLayer::FullyConnectedLayer()
+ : _inputData(), _weightsData(), _biasData(), _outputData(), _inputShape(), _weightsShape(),
+ _biasShape(), _outputShape(), _activation(model::Activation::NONE),
+ _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void FullyConnectedLayer::fullyConnectedFloat32()
+{
+ float output_activation_min, output_activation_max;
+ CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
+
+ nnfw::cker::FullyConnectedParams op_params;
+ op_params.float_activation_min = output_activation_min;
+ op_params.float_activation_max = output_activation_max;
+
+ nnfw::cker::FullyConnected(op_params, convertToExtendedCkerShape(_inputShape), _inputData.f,
+ convertToExtendedCkerShape(_weightsShape), _weightsData.f,
+ convertToExtendedCkerShape(_biasShape), _biasData.f,
+ convertToExtendedCkerShape(_outputShape), _outputData.f);
+}
+
+// executionMutex is used to protect concurrent access of non-threadsafe resources
+// like gemmlowp::GemmContext.
+void FullyConnectedLayer::fullyConnectedQuant8()
+{
+ float real_multiplier = 0.0;
+ int32_t output_multiplier = 0;
+ int32_t output_shift = 0;
+ int32_t output_activation_min = 0;
+ int32_t output_activation_max = 0;
+ GetQuantizedConvolutionMultiplier(_inputShape, _weightsShape, _biasShape, _outputShape,
+ &real_multiplier);
+ QuantizeMultiplier(real_multiplier, &output_multiplier, &output_shift);
+ CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
+ &output_activation_max);
+
+ nnfw::cker::FullyConnectedParams op_params;
+ op_params.input_offset = -_inputShape.offset;
+ op_params.weights_offset = -_weightsShape.offset;
+ op_params.output_offset = _outputShape.offset;
+ op_params.output_multiplier = output_multiplier;
+ op_params.output_shift = output_shift;
+ op_params.quantized_activation_min = output_activation_min;
+ op_params.quantized_activation_max = output_activation_max;
+
+ nnfw::cker::FullyConnected(op_params, convertToExtendedCkerShape(_inputShape), _inputData.u8,
+ convertToExtendedCkerShape(_weightsShape), _weightsData.u8,
+ convertToExtendedCkerShape(_biasShape), _biasData.i32,
+ convertToExtendedCkerShape(_outputShape), _outputData.u8);
+}
+
+void FullyConnectedLayer::configure(uint8_t *inputData, const Shape inputShape,
+ uint8_t *weightsData, const Shape weightsShape,
+ uint8_t *biasData, const Shape biasShape,
+ model::Activation activation, uint8_t *outputData,
+ const Shape outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _weightsData.u8 = weightsData;
+ _weightsShape = weightsShape;
+ _biasData.u8 = biasData;
+ _biasShape = biasShape;
+ _activation = activation;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void FullyConnectedLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ fullyConnectedFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ fullyConnectedQuant8();
+ }
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.h b/runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.h
new file mode 100644
index 000000000..635239f98
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/FullyConnectedLayer.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_FULLYCONNECTEDLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_FULLYCONNECTEDLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class FullyConnectedLayer : public ::neurun::exec::IFunction
+{
+public:
+ FullyConnectedLayer();
+
+public:
+ void fullyConnectedFloat32();
+
+ void fullyConnectedQuant8();
+
+ void configure(uint8_t *inputData, const Shape inputShape, uint8_t *weightsData,
+ const Shape weightsShape, uint8_t *biasData, const Shape biasShape,
+ model::Activation activation, uint8_t *outputData, const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _weightsData;
+ DataPtr _biasData;
+ DataPtr _outputData;
+
+ Shape _inputShape;
+ Shape _weightsShape;
+ Shape _biasShape;
+ Shape _outputShape;
+
+ model::Activation _activation;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_FULLYCONNECTEDLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.cc b/runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.cc
new file mode 100644
index 000000000..0aaaf9282
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MaxPoolLayer.h"
+
+#include <cker/operation/MaxPool.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+#define MAXPOOLING_PARAMETERS \
+ nnfw::cker::MaxPoolParams op_params; \
+ op_params.stride_height = _strideHeight; \
+ op_params.stride_width = _strideWidth; \
+ op_params.filter_height = _kernelHeight; \
+ op_params.filter_width = _kernelWidth; \
+ op_params.padding_values.height = (int8_t)_paddingTop; \
+ op_params.padding_values.width = (int8_t)_paddingLeft;
+
+MaxPoolLayer::MaxPoolLayer()
+ : _inputData(), _outputData(), _inputShape(), _outputShape(), _paddingLeft(0), _paddingTop(0),
+ _paddingRight(0), _paddingBottom(0), _strideWidth(0), _strideHeight(0), _kernelWidth(0),
+ _kernelHeight(0), _activation(model::Activation::NONE), _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void MaxPoolLayer::maxPoolFloat32()
+{
+ MAXPOOLING_PARAMETERS
+ float output_activation_min, output_activation_max;
+ CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
+ op_params.float_activation_min = output_activation_min;
+ op_params.float_activation_max = output_activation_max;
+
+ nnfw::cker::MaxPool(op_params, convertShapeToCkerShape(_inputShape), _inputData.f,
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+}
+void MaxPoolLayer::maxPoolQuant8()
+{
+ MAXPOOLING_PARAMETERS
+ int32_t output_activation_min = 0;
+ int32_t output_activation_max = 0;
+ CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
+ &output_activation_max);
+ op_params.quantized_activation_min = output_activation_min;
+ op_params.quantized_activation_max = output_activation_max;
+
+ nnfw::cker::MaxPool(op_params, convertShapeToCkerShape(_inputShape), _inputData.u8,
+ convertShapeToCkerShape(_outputShape), _outputData.u8);
+}
+
+void MaxPoolLayer::configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
+ const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideWidth,
+ const uint32_t strideHeight, const uint32_t kernelWidth,
+ const uint32_t kernelHeight, const model::Activation activation,
+ uint8_t *outputData, const Shape outputShape)
+{
+ _inputData.u8 = inputData;
+
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _paddingLeft = paddingLeft;
+ _paddingRight = paddingRight;
+ _paddingTop = paddingTop;
+ _paddingBottom = paddingBottom;
+ _strideWidth = strideWidth;
+ _strideHeight = strideHeight;
+ _kernelWidth = kernelWidth;
+ _kernelHeight = kernelHeight;
+ _activation = activation;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void MaxPoolLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ maxPoolFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ maxPoolQuant8();
+ }
+}
+
+#undef MAXPOOLING_PARAMETERS
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.h b/runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.h
new file mode 100644
index 000000000..2f4d2fb74
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/MaxPoolLayer.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_MAXPOOLLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_MAXPOOLLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class MaxPoolLayer : public ::neurun::exec::IFunction
+{
+public:
+ MaxPoolLayer();
+
+public:
+ void maxPoolFloat32();
+
+ void maxPoolQuant8();
+
+ void configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
+ const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideWidth,
+ const uint32_t strideHeight, const uint32_t kernelWidth,
+ const uint32_t kernelHeight, const model::Activation activation,
+ uint8_t *outputData, const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _outputData;
+
+ Shape _inputShape;
+ Shape _outputShape;
+
+ uint32_t _paddingLeft;
+ uint32_t _paddingTop;
+ uint32_t _paddingRight;
+ uint32_t _paddingBottom;
+
+ uint32_t _strideWidth;
+ uint32_t _strideHeight;
+ uint32_t _kernelWidth;
+ uint32_t _kernelHeight;
+
+ model::Activation _activation;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_MAXPOOLLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/OperationUtils.cc b/runtimes/neurun/backend/cpu/kernel/OperationUtils.cc
new file mode 100644
index 000000000..40b7ef3d6
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/OperationUtils.cc
@@ -0,0 +1,239 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperationUtils.h"
+
+#include <cmath>
+#include <algorithm>
+#include <cassert>
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+uint32_t getNumberOfDimensions(const Shape &shape) { return shape.dimensions.size(); }
+
+uint32_t getNumberOfElements(const Shape &shape)
+{
+ uint32_t count = 1;
+ for (size_t i = 0; i < shape.dimensions.size(); i++)
+ {
+ count *= shape.dimensions[i];
+ }
+ return count;
+}
+
+uint32_t getSizeOfDimension(const Shape &shape, uint32_t dimensionIdx)
+{
+ if (dimensionIdx >= shape.dimensions.size())
+ {
+ // TODO, log the error
+ return 0;
+ }
+ return shape.dimensions[dimensionIdx];
+}
+
+void QuantizeMultiplier(double double_multiplier, int32_t *quantized_multiplier, int *shift)
+{
+ if (double_multiplier == 0.)
+ {
+ *quantized_multiplier = 0;
+ *shift = 0;
+ return;
+ }
+ const double q = std::frexp(double_multiplier, shift);
+ auto q_fixed = static_cast<int64_t>(std::round(q * (1ll << 31)));
+
+ assert(q_fixed <= (1ll << 31));
+ if (q_fixed == (1ll << 31))
+ {
+ q_fixed /= 2;
+ ++*shift;
+ }
+ assert(q_fixed <= std::numeric_limits<int32_t>::max());
+ *quantized_multiplier = static_cast<int32_t>(q_fixed);
+}
+
+void GetQuantizedConvolutionMultiplier(const Shape &inputShape, const Shape &filterShape,
+ const Shape &biasShape, const Shape &outputShape,
+ float *multiplier)
+{
+ const float input_product_scale = inputShape.scale * filterShape.scale;
+ const float bias_scale = biasShape.scale;
+ const float output_scale = outputShape.scale;
+ // The following conditions must be guaranteed by the training pipeline.
+ UNUSED_RELEASE(bias_scale);
+ assert(std::abs(input_product_scale - bias_scale) <=
+ 1e-6 * std::min(input_product_scale, bias_scale));
+ assert(input_product_scale >= 0);
+ assert(input_product_scale < output_scale);
+ *multiplier = input_product_scale / output_scale;
+}
+
+void QuantizeMultiplierGreaterThanOne(double double_multiplier, int32_t *quantized_multiplier,
+ int *left_shift)
+{
+ assert(double_multiplier > 1.);
+ const double q = std::frexp(double_multiplier, left_shift);
+ int64_t q_fixed = static_cast<int64_t>(std::round(q * (1ll << 31)));
+ assert(q_fixed <= (1ll << 31));
+ if (q_fixed == (1ll << 31))
+ {
+ q_fixed /= 2;
+ ++*left_shift;
+ }
+ assert(*left_shift >= 0);
+ assert(q_fixed <= std::numeric_limits<int32_t>::max());
+ *quantized_multiplier = static_cast<int32_t>(q_fixed);
+}
+
+void CalculateActivationRangeFloat(model::Activation activation, float *activation_min,
+ float *activation_max)
+{
+ if (activation == model::Activation::RELU)
+ {
+ *activation_min = 0.f;
+ *activation_max = std::numeric_limits<float>::max();
+ }
+ else if (activation == model::Activation::RELU6)
+ {
+ *activation_min = 0.f;
+ *activation_max = 6.f;
+ }
+ else if (activation == model::Activation::RELU1)
+ {
+ *activation_min = -1.f;
+ *activation_max = 1.f;
+ }
+ else if (activation == model::Activation::NONE)
+ {
+ *activation_min = std::numeric_limits<float>::lowest();
+ *activation_max = std::numeric_limits<float>::max();
+ }
+ else
+ {
+ std::cout << "Unsupported fused activation function." << std::endl;
+ }
+}
+
+void CalculateActivationRangeUint8(model::Activation activation, const Shape &outputShape,
+ int32_t *act_min, int32_t *act_max)
+{
+ const int32_t qmin = std::numeric_limits<uint8_t>::min();
+ const int32_t qmax = std::numeric_limits<uint8_t>::max();
+ const auto scale = outputShape.scale;
+ const auto zero_point = outputShape.offset;
+ auto quantize = [scale, zero_point](float f) {
+ return zero_point + static_cast<int32_t>(std::round(f / scale));
+ };
+ if (activation == model::Activation::RELU)
+ {
+ *act_min = std::max(qmin, quantize(0.0));
+ *act_max = qmax;
+ }
+ else if (activation == model::Activation::RELU6)
+ {
+ *act_min = std::max(qmin, quantize(0.0));
+ *act_max = std::min(qmax, quantize(6.0));
+ }
+ else if (activation == model::Activation::RELU1)
+ {
+ *act_min = std::max(qmin, quantize(-1.0));
+ *act_max = std::min(qmax, quantize(1.0));
+ }
+ else if (activation == model::Activation::NONE)
+ {
+ *act_min = qmin;
+ *act_max = qmax;
+ }
+ else
+ {
+ std::cout << "Unsupported fused activation function." << std::endl;
+ }
+}
+
+int32_t CalculateInputRadius(int input_integer_bits, int input_left_shift)
+{
+ const double max_input_rescaled = 1.0 * ((1 << input_integer_bits) - 1) *
+ (1ll << (31 - input_integer_bits)) / (1ll << input_left_shift);
+ // Tighten bound using floor. Suppose that we could use the exact value.
+ // After scaling the difference, the result would be at the maximum. Thus we
+ // must ensure that our value has lower magnitude.
+ return static_cast<int32_t>(std::floor(max_input_rescaled));
+}
+
+Shape getShape(const ::neurun::model::Operand &o, ::neurun::model::Layout frontend_layout)
+{
+ Shape shape;
+
+ auto dims = o.shape().dims();
+ if (frontend_layout == ::neurun::model::Layout::NCHW && o.shape().rank() == 4)
+ {
+ // NCHW -> NHWC
+ uint32_t permutation[4] = {0, 2, 3, 1};
+ for (int i = 0; i < o.shape().rank(); ++i)
+ {
+ dims.at(i) = o.shape().dim(permutation[i]);
+ }
+ }
+ shape.dimensions = std::vector<uint32_t>(dims.begin(), dims.end());
+ shape.type = static_cast<OperandType>(static_cast<int32_t>(o.typeInfo().type()));
+ shape.scale = o.typeInfo().scale();
+ shape.offset = o.typeInfo().offset();
+
+ // CPU backend assume that neurun internal shape's rank is always same or less than 4
+ assert(shape.dimensions.size() <= 4);
+
+ return shape;
+}
+
+uint32_t sizeOfData(OperandType type, const std::vector<uint32_t> &dimensions)
+{
+ uint32_t size = 4;
+
+ switch (type)
+ {
+ case OperandType::FLOAT32:
+ case OperandType::INT32:
+ case OperandType::UINT32:
+ size = 4;
+ break;
+ case OperandType::BOOL8:
+ case OperandType::QUANT8_ASYMM:
+ size = 1;
+ break;
+ default:
+ throw std::runtime_error("Not supported operand type.");
+ break;
+ }
+
+ for (auto d : dimensions)
+ {
+ size *= d;
+ }
+
+ return size;
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/OperationUtils.h b/runtimes/neurun/backend/cpu/kernel/OperationUtils.h
new file mode 100644
index 000000000..dc5bab90a
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/OperationUtils.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNFW_SUPPORT_NNAPI_OPERATION_UTILS_H__
+#define __NNFW_SUPPORT_NNAPI_OPERATION_UTILS_H__
+
+#include <iostream>
+#include <limits>
+#include <vector>
+
+#include <cker/Shape.h>
+
+#include "model/Operand.h"
+#include "model/DataType.h"
+#include <model/InternalType.h>
+
+using OperandType = neurun::model::DataType;
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+struct Shape
+{
+ OperandType type;
+ std::vector<uint32_t> dimensions;
+ float scale;
+ int32_t offset;
+};
+
+union DataPtr {
+ uint8_t *u8;
+ int8_t *i8;
+ int32_t *i32;
+ float *f;
+ void *v;
+};
+
+uint32_t getNumberOfDimensions(const Shape &shape);
+
+uint32_t getNumberOfElements(const Shape &shape);
+
+uint32_t getSizeOfDimension(const Shape &shape, uint32_t dimensionIdx);
+
+inline nnfw::cker::Shape convertToExtendedCkerShape(const Shape &shape)
+{
+ std::vector<int32_t> raw_shape;
+ raw_shape.resize(4);
+
+ uint32_t src = 4 - shape.dimensions.size();
+ for (uint32_t i = 0; i < 4; ++i)
+ {
+ if (i < src)
+ {
+ raw_shape[i] = 1;
+ }
+ else
+ {
+ raw_shape[i] = shape.dimensions[i - src];
+ }
+ }
+
+ return nnfw::cker::GetShape(raw_shape);
+}
+
+inline nnfw::cker::Shape convertShapeToCkerShape(const Shape &shape)
+{
+ std::vector<int32_t> raw_shape;
+ raw_shape.resize(4);
+
+ for (uint32_t i = 0; i < 4; ++i)
+ {
+ if (i >= shape.dimensions.size())
+ {
+ raw_shape[i] = 1;
+ }
+ else
+ {
+ raw_shape[i] = shape.dimensions[i];
+ }
+ }
+
+ return nnfw::cker::GetShape(raw_shape);
+}
+
+inline int32_t getAxis(uint32_t rank, int32_t axis, ::neurun::model::Layout frontend_layout)
+{
+ auto ret = axis;
+
+ if (axis < 0)
+ {
+ ret += rank;
+ }
+
+ // NCHW -> NHWC
+ if (frontend_layout == ::neurun::model::Layout::NCHW)
+ {
+ int32_t permutation[4] = {0, 3, 1, 2};
+ ret = permutation[ret];
+ }
+
+ return ret;
+}
+
+void QuantizeMultiplier(double double_multiplier, int32_t *quantized_multiplier, int *shift);
+
+void GetQuantizedConvolutionMultiplier(const Shape &inputShape, const Shape &filterShape,
+ const Shape &biasShape, const Shape &outputShape,
+ float *multiplier);
+
+void QuantizeMultiplierGreaterThanOne(double double_multiplier, int32_t *quantized_multiplier,
+ int *left_shift);
+
+void CalculateActivationRangeFloat(model::Activation activation, float *activation_min,
+ float *activation_max);
+
+void CalculateActivationRangeUint8(model::Activation activation, const Shape &outputShape,
+ int32_t *act_min, int32_t *act_max);
+
+int32_t CalculateInputRadius(int input_integer_bits, int input_left_shift);
+
+Shape getShape(const ::neurun::model::Operand &o, ::neurun::model::Layout frontend_layout);
+
+uint32_t sizeOfData(OperandType type, const std::vector<uint32_t> &dimensions);
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NNFW_SUPPORT_NNAPI_OPERATION_UTILS_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/PermuteLayer.cc b/runtimes/neurun/backend/cpu/kernel/PermuteLayer.cc
new file mode 100644
index 000000000..3be904351
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/PermuteLayer.cc
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PermuteLayer.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+using Type = model::operation::PermuteNode::Type;
+
+void PermuteLayer::configure(std::shared_ptr<::neurun::backend::operand::IObject> input,
+ std::shared_ptr<::neurun::backend::operand::IObject> output,
+ const model::Shape &output_shape, Type type, model::DataType dataType)
+{
+ _input = input;
+ _output = output;
+ _output_shape = output_shape;
+ _type = type;
+ _dataType = dataType;
+}
+
+void PermuteLayer::run()
+{
+ using ::neurun::model::DataType;
+ switch (_dataType)
+ {
+ case DataType::FLOAT32:
+ runTempl<float>();
+ break;
+ case DataType::INT32:
+ runTempl<int32_t>();
+ break;
+ case DataType::UINT32:
+ runTempl<uint32_t>();
+ break;
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ runTempl<uint8_t>();
+ break;
+ default:
+ throw std::runtime_error("NYI");
+ break;
+ }
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/PermuteLayer.h b/runtimes/neurun/backend/cpu/kernel/PermuteLayer.h
new file mode 100644
index 000000000..3acb2cc1f
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/PermuteLayer.h
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_PERMUTE_LAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_PERMUTE_LAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "util/feature/nhwc/View.h"
+#include "OperationUtils.h"
+#include <backend/operand/IObject.h>
+#include "model/operation/PermuteNode.h"
+#include "util/feature/nhwc/Reader.h"
+#include "util/feature/nchw/View.h"
+#include "util/Coordinates.h"
+
+#include <misc/feature/IndexIterator.h>
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class PermuteLayer : public ::neurun::exec::IFunction
+{
+public:
+ PermuteLayer() = default;
+
+public:
+ void configure(std::shared_ptr<::neurun::backend::operand::IObject> input,
+ std::shared_ptr<::neurun::backend::operand::IObject> output,
+ const model::Shape &output_shape, model::operation::PermuteNode::Type type,
+ model::DataType dataType);
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ template <class T> void runTempl()
+ {
+ auto rank = _output_shape.rank();
+ auto fn = [&](::neurun::backend::operand::ITensor &in_tensor) {
+ _output->access([&](::neurun::backend::operand::ITensor &out_tensor) {
+ auto input_buffer = in_tensor.buffer();
+ auto input_size = in_tensor.total_size();
+ auto output_buffer = out_tensor.buffer();
+ if (_type == model::operation::PermuteNode::Type::COPY)
+ {
+ assert(in_tensor.layout() == out_tensor.layout());
+ if (!in_tensor.has_padding() && !out_tensor.has_padding())
+ {
+ assert(input_size == out_tensor.total_size());
+ memcpy(output_buffer, input_buffer, input_size);
+ return;
+ }
+ }
+ switch (rank)
+ {
+ case 0:
+ case 1:
+ {
+ const int32_t copy_len = _output_shape.dim(0);
+
+ memcpy(output_buffer, input_buffer, copy_len);
+ break;
+ }
+ case 2:
+ {
+ const int32_t copy_len = _output_shape.dim(1);
+
+ for (auto i = 0; i < _output_shape.dim(0); ++i)
+ {
+ neurun::util::Coordinates coords{i, 0};
+ memcpy(output_buffer + out_tensor.calcOffset(coords),
+ input_buffer + in_tensor.calcOffset(coords), copy_len * sizeof(T));
+ }
+ break;
+ }
+ case 3:
+ {
+ const int32_t copy_len = _output_shape.dim(2);
+
+ for (auto i = 0; i < _output_shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < _output_shape.dim(1); ++j)
+ {
+ neurun::util::Coordinates coords{i, j, 0};
+ memcpy(output_buffer + out_tensor.calcOffset(coords),
+ input_buffer + in_tensor.calcOffset(coords), copy_len * sizeof(T));
+ }
+ }
+ break;
+ }
+ case 4:
+ {
+ // TODO Unify permute type and remove switch case
+ switch (_type)
+ {
+ case model::operation::PermuteNode::Type::NHWC_TO_NCHW:
+ {
+ for (auto n = 0; n < _output_shape.dim(0); ++n)
+ {
+ for (auto c = 0; c < _output_shape.dim(1); ++c)
+ {
+ for (auto h = 0; h < _output_shape.dim(2); ++h)
+ {
+ for (auto w = 0; w < _output_shape.dim(3); ++w)
+ {
+ const neurun::util::Coordinates in_coords{n, h, w, c};
+ const auto out_coords =
+ convertCoordinates(in_coords, in_tensor.layout(), out_tensor.layout());
+ const auto value =
+ *reinterpret_cast<T *>(input_buffer + in_tensor.calcOffset(in_coords));
+ *reinterpret_cast<T *>(output_buffer + out_tensor.calcOffset(out_coords)) =
+ value;
+ }
+ }
+ }
+ }
+ break;
+ }
+ case model::operation::PermuteNode::Type::NCHW_TO_NHWC:
+ {
+ for (auto n = 0; n < _output_shape.dim(0); ++n)
+ {
+ for (auto h = 0; h < _output_shape.dim(1); ++h)
+ {
+ for (auto w = 0; w < _output_shape.dim(2); ++w)
+ {
+ for (auto c = 0; c < _output_shape.dim(3); ++c)
+ {
+ const neurun::util::Coordinates in_coords{n, c, h, w};
+ const auto out_coords =
+ convertCoordinates(in_coords, in_tensor.layout(), out_tensor.layout());
+ const auto value =
+ *reinterpret_cast<T *>(input_buffer + in_tensor.calcOffset(in_coords));
+ *reinterpret_cast<T *>(output_buffer + out_tensor.calcOffset(out_coords)) =
+ value;
+ }
+ }
+ }
+ }
+ break;
+ }
+ case model::operation::PermuteNode::Type::COPY:
+ {
+ const int32_t copy_len = _output_shape.dim(3);
+
+ for (auto i = 0; i < _output_shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < _output_shape.dim(1); ++j)
+ {
+ for (auto k = 0; k < _output_shape.dim(2); ++k)
+ {
+ neurun::util::Coordinates coords{i, j, k, 0};
+ memcpy(output_buffer + out_tensor.calcOffset(coords),
+ input_buffer + in_tensor.calcOffset(coords), copy_len * sizeof(T));
+ }
+ }
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("NYI");
+ break;
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("NYI");
+ break;
+ }
+ });
+ };
+ _input->access(fn);
+ }
+
+private:
+ std::shared_ptr<::neurun::backend::operand::IObject> _input{nullptr};
+ std::shared_ptr<::neurun::backend::operand::IObject> _output{nullptr};
+ model::Shape _output_shape{};
+ model::operation::PermuteNode::Type _type{model::operation::PermuteNode::Type::COPY};
+ model::DataType _dataType{model::DataType::FLOAT32};
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_PERMUTE_LAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/ReshapeLayer.cc b/runtimes/neurun/backend/cpu/kernel/ReshapeLayer.cc
new file mode 100644
index 000000000..3cf3ae3c2
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/ReshapeLayer.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ReshapeLayer.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+ReshapeLayer::ReshapeLayer() : _inputData(), _outputData(), _inputShape(), _outputShape()
+{
+ // DO NOTHING
+}
+
+void ReshapeLayer::reshapeGeneric()
+{
+ size_t count = sizeOfData(_inputShape.type, _inputShape.dimensions);
+ memcpy(_outputData.v, _inputData.v, count);
+}
+
+void ReshapeLayer::configure(uint8_t *inputData, const Shape &inputShape, uint8_t *outputData,
+ const Shape &outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void ReshapeLayer::run() { reshapeGeneric(); }
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/ReshapeLayer.h b/runtimes/neurun/backend/cpu/kernel/ReshapeLayer.h
new file mode 100644
index 000000000..dccd82cd0
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/ReshapeLayer.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_RESHAPELAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_RESHAPELAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class ReshapeLayer : public ::neurun::exec::IFunction
+{
+public:
+ ReshapeLayer();
+
+public:
+ void reshapeGeneric();
+
+ void configure(uint8_t *inputData, const Shape &inputShape, uint8_t *outputData,
+ const Shape &outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _outputData;
+
+ Shape _inputShape;
+ Shape _outputShape;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_RESHAPELAYER_H__
diff --git a/runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.cc b/runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.cc
new file mode 100644
index 000000000..f71a779bd
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.cc
@@ -0,0 +1,171 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SoftMaxLayer.h"
+
+#include <cker/operation/SoftMax.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+SoftMaxLayer::SoftMaxLayer()
+ : _inputData(), _outputData(), _beta(0.0), _inputShape(), _outputShape(),
+ _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+// Performs softmax along the input of size (input_size * batch_size).
+void Softmax(const float *in, const int input_size, const int batch_size, const float beta,
+ float *out)
+{
+ assert(input_size > 0);
+
+ // For each batch
+ for (int b = 0; b < batch_size; b++)
+ {
+ // Find the max coeff.
+ float max_coeff = in[0];
+ for (int i = 1; i < input_size; i++)
+ {
+ if (in[i] > max_coeff)
+ max_coeff = in[i];
+ }
+
+ // Compute the normalized sum of exps.
+ float exp_sum = 0.0;
+ for (int i = 0; i < input_size; i++)
+ {
+ out[i] = std::exp((in[i] - max_coeff) * beta);
+ exp_sum += out[i];
+ }
+
+ // Divide by the sum of exps.
+ float reciprocal_sum_exp = 1.f / exp_sum;
+ for (int i = 0; i < input_size; i++)
+ {
+ out[i] *= reciprocal_sum_exp;
+ }
+
+ // Advance in and out pointers for the next batch.
+ in += input_size;
+ out += input_size;
+ }
+}
+
+void SoftMaxLayer::softmaxFloat32()
+{
+ Shape shapeIn4D;
+
+ if (getNumberOfDimensions(_inputShape) == 2)
+ {
+ uint32_t batch_size = getSizeOfDimension(_inputShape, 0);
+ if (batch_size == 0)
+ throw std::runtime_error("batch_size should not be 0");
+
+ uint32_t input_size = getNumberOfElements(_inputShape) / batch_size;
+ Softmax(_inputData.f, input_size, batch_size, _beta, _outputData.f);
+ }
+ else if (getNumberOfDimensions(_inputShape) == 4)
+ {
+ nnfw::cker::SoftmaxParams op_params;
+ op_params.beta = _beta;
+ nnfw::cker::Softmax(op_params, convertShapeToCkerShape(_inputShape), _inputData.f,
+ convertShapeToCkerShape(_outputShape), _outputData.f);
+ }
+ else
+ {
+ throw std::runtime_error{"only 2D and 4D tensors supported"};
+ }
+}
+
+void SoftMaxLayer::softmaxQuant8()
+{
+ Shape shapeIn4D = _inputShape;
+
+ if (getNumberOfDimensions(_inputShape) == 2)
+ {
+ uint32_t batch_size = getSizeOfDimension(_inputShape, 0);
+ if (batch_size == 0)
+ throw std::runtime_error("batch_size should not be 0");
+
+ uint32_t input_size = getNumberOfElements(_inputShape) / batch_size;
+ shapeIn4D.dimensions = {batch_size, 1, 1, input_size};
+ }
+ else if (getNumberOfDimensions(_inputShape) == 4)
+ {
+ shapeIn4D = _inputShape;
+ }
+ else
+ {
+ throw std::runtime_error{"only 2D and 4D tensors supported"};
+ }
+ if (_outputShape.offset != 0 || _outputShape.scale != 1.f / 256)
+ {
+ throw std::runtime_error{"incorrect scale / offset for output"};
+ }
+ static const int32_t kScaledDiffIntegerBits = 5;
+ const double input_beta_real_multiplier = std::min(
+ 1.0 * _beta * _inputShape.scale * (1 << (31 - kScaledDiffIntegerBits)), (1ll << 31) - 1.0);
+ int32_t input_multiplier = 0;
+ int32_t input_left_shift = 0;
+ QuantizeMultiplierGreaterThanOne(input_beta_real_multiplier, &input_multiplier,
+ &input_left_shift);
+ float diff_min = -1.0f * CalculateInputRadius(kScaledDiffIntegerBits, input_left_shift);
+
+ nnfw::cker::SoftmaxParams op_params;
+ op_params.input_multiplier = input_multiplier;
+ op_params.input_left_shift = input_left_shift;
+ op_params.diff_min = diff_min;
+ nnfw::cker::Softmax(op_params, convertShapeToCkerShape(shapeIn4D), _inputData.u8,
+ convertShapeToCkerShape(shapeIn4D), _outputData.u8);
+}
+
+void SoftMaxLayer::configure(uint8_t *inputData, const Shape &inputShape, const float beta,
+ uint8_t *outputData, const Shape &outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+ _beta = beta;
+}
+
+void SoftMaxLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ softmaxFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ softmaxQuant8();
+ }
+}
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.h b/runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.h
new file mode 100644
index 000000000..097b3dd5a
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/kernel/SoftMaxLayer.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_KERNEL_SOFTMAXLAYER_H__
+#define __NEURUN_BACKEND_CPU_KERNEL_SOFTMAXLAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace kernel
+{
+
+class SoftMaxLayer : public ::neurun::exec::IFunction
+{
+public:
+ SoftMaxLayer();
+
+public:
+ void softmaxFloat32();
+
+ void softmaxQuant8();
+
+ void configure(uint8_t *inputData, const Shape &inputShape, const float beta, uint8_t *outputData,
+ const Shape &outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _outputData;
+
+ float _beta;
+
+ Shape _inputShape;
+ Shape _outputShape;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_KERNEL_SOFTMAXLAYER_H__
diff --git a/runtimes/neurun/backend/cpu/operand/Tensor.cc b/runtimes/neurun/backend/cpu/operand/Tensor.cc
new file mode 100644
index 000000000..29e6eb846
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/operand/Tensor.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Tensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace operand
+{
+
+size_t Tensor::calcOffset(const neurun::util::Coordinates &coords) const
+{
+ size_t rank = num_dimensions();
+ size_t offset = 0;
+ for (size_t i = 0; i < rank; ++i)
+ {
+ offset = offset * dimension(i) + coords[i];
+ }
+ offset *= sizeOfDataType(data_type());
+ return offset;
+}
+
+} // namespace operand
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/cpu/operand/Tensor.h b/runtimes/neurun/backend/cpu/operand/Tensor.h
new file mode 100644
index 000000000..d0bfbf340
--- /dev/null
+++ b/runtimes/neurun/backend/cpu/operand/Tensor.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CPU_OPERAND_TENSOR_H__
+#define __NEURUN_BACKEND_CPU_OPERAND_TENSOR_H__
+
+#include <backend/operand/ITensor.h>
+#include "model/OperandInfo.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace cpu
+{
+namespace operand
+{
+
+class Tensor : public ::neurun::backend::operand::ITensor
+{
+public:
+ Tensor() = delete;
+
+public:
+ Tensor(const model::OperandInfo &info) : _info(info)
+ {
+ // DO NOTHING
+ }
+
+public:
+ void setBuffer(uint8_t *buffer) { _buffer = buffer; }
+ ::neurun::model::DataType data_type() const { return _info.typeInfo().type(); }
+
+public:
+ uint8_t *buffer() const override { return _buffer; }
+ /**
+ * @brief Get dimension by index
+ *
+ * @param index Index to get diemension
+ * @return size_t Dimension at index
+ * @note N : dimension(0)
+ * H : dimension(1)
+ * W : dimension(2)
+ * C : dimension(3)
+ */
+ size_t dimension(size_t index) const override { return _info.shape().dim(index); }
+ size_t num_dimensions() const override { return _info.shape().rank(); }
+ size_t total_size() const override { return _info.total_size(); }
+ size_t calcOffset(const neurun::util::Coordinates &coords) const override;
+ model::Layout layout() const override { return model::Layout::NHWC; }
+ bool has_padding() const override { return false; }
+
+private:
+ model::OperandInfo _info;
+ uint8_t *_buffer = nullptr;
+};
+
+} // namespace operand
+} // namespace cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CPU_OPERAND_TENSOR_H__
diff --git a/runtimes/neurun/backend/hi_perf_cpu/CMakeLists.txt b/runtimes/neurun/backend/hi_perf_cpu/CMakeLists.txt
new file mode 100644
index 000000000..816edba5e
--- /dev/null
+++ b/runtimes/neurun/backend/hi_perf_cpu/CMakeLists.txt
@@ -0,0 +1,44 @@
+set(LIB_NEURUN_BACKEND_HI_PERF_CPU neurun_backend_hi_perf)
+
+nnfw_find_package(NNPACK QUIET)
+
+option(BUILD_NEURUN_HI_PERF_CPU_BACKEND
+ "Build neurun HI_PERF_CPU backend"
+ ${NNPACK_FOUND} # Default value when there is no explicit user request
+)
+
+message(STATUS "Build neurun HI_PERF_CPU backend: ${BUILD_NEURUN_HI_PERF_CPU_BACKEND}")
+
+if(NOT BUILD_NEURUN_HI_PERF_CPU_BACKEND)
+ return()
+endif(NOT BUILD_NEURUN_HI_PERF_CPU_BACKEND)
+
+file(GLOB_RECURSE SOURCES "*.cc")
+file(GLOB_RECURSE TESTS "*.test.cc")
+list(REMOVE_ITEM SOURCES ${TESTS})
+
+add_library(${LIB_NEURUN_BACKEND_HI_PERF_CPU} SHARED ${SOURCES})
+
+target_link_libraries(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE nnfw_lib_misc)
+target_link_libraries(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE nnfw_lib_cpp14)
+target_link_libraries(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE neurun_core)
+target_link_libraries(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE nnpack pthreadpool cpuinfo)
+target_link_libraries(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE nnfw_coverage)
+target_include_directories(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PRIVATE ${NNPACK_INCLUDE_DIRS})
+
+set_target_properties(${LIB_NEURUN_BACKEND_HI_PERF_CPU} PROPERTIES OUTPUT_NAME backend_NNPACK)
+
+install(TARGETS ${LIB_NEURUN_BACKEND_HI_PERF_CPU} DESTINATION lib)
+
+# Unit Tests
+set(TEST_NEURUN_BACKEND_HI_PERF_CPU test_neurun_backend_hi_perf)
+
+add_executable(${TEST_NEURUN_BACKEND_HI_PERF_CPU} ${TESTS})
+
+target_link_libraries(${TEST_NEURUN_BACKEND_HI_PERF_CPU} ${LIB_NEURUN_BACKEND_HI_PERF_CPU})
+target_link_libraries(${TEST_NEURUN_BACKEND_HI_PERF_CPU} gtest gtest_main ${LIB_PTHREAD})
+target_link_libraries(${TEST_NEURUN_BACKEND_HI_PERF_CPU} nnpack)
+
+add_test(${TEST_NEURUN_BACKEND_HI_PERF_CPU} ${TEST_NEURUN_BACKEND_HI_PERF_CPU})
+install(TARGETS ${TEST_NEURUN_BACKEND_HI_PERF_CPU} DESTINATION unittest)
diff --git a/runtimes/neurun/backend/hi_perf_cpu/HighPerformanceBackend.test.cc b/runtimes/neurun/backend/hi_perf_cpu/HighPerformanceBackend.test.cc
new file mode 100644
index 000000000..625fe1c36
--- /dev/null
+++ b/runtimes/neurun/backend/hi_perf_cpu/HighPerformanceBackend.test.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "nnpack.h"
+
+TEST(High_performance_backend, NNPACK_Test)
+{
+ // Check that it is possible to import
+ const enum nnp_status init_status = nnp_initialize();
+
+ // One of the allowed nnp status codes
+ ASSERT_GE(init_status, 0);
+ ASSERT_LE(init_status, 54);
+
+ // If it is possible to test, test relu
+ if (init_status == nnp_status_success)
+ {
+ float in[] = {-1, 1, -1, 1};
+ float out[4];
+ nnp_relu_output(1, 4, in, out, 0, nullptr);
+ for (int i = 0; i < 4; i++)
+ {
+ ASSERT_EQ(out[i], in[i] >= 0 ? in[i] : 0);
+ }
+ }
+ nnp_deinitialize();
+}
diff --git a/runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.cc b/runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.cc
new file mode 100644
index 000000000..a13fe12b9
--- /dev/null
+++ b/runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "KernelGenerator.h"
+// to force compilation
diff --git a/runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.h b/runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.h
new file mode 100644
index 000000000..71322e743
--- /dev/null
+++ b/runtimes/neurun/backend/hi_perf_cpu/KernelGenerator.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_HI_PERF_CPU_KERNEL_GENERATOR_H__
+#define __NEURUN_BACKEND_HI_PERF_CPU_KERNEL_GENERATOR_H__
+
+#include <backend/IKernelGenerator.h>
+
+#include "model/Operands.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace hi_perf_cpu
+{
+
+class KernelGenerator : public IKernelGenerator
+{
+public:
+ KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+ // TODO add more ops
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace hi_perf_cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_HI_PERF_CPU_KERNEL_GENERATOR_H__
diff --git a/runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.cc b/runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.cc
new file mode 100644
index 000000000..e6ebf5f0b
--- /dev/null
+++ b/runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TensorBuilder.h"
+// to force compilation
diff --git a/runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.h b/runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.h
new file mode 100644
index 000000000..344d80432
--- /dev/null
+++ b/runtimes/neurun/backend/hi_perf_cpu/TensorBuilder.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NNFW_TENSORBUILDER_H
+#define NNFW_TENSORBUILDER_H
+
+#include <unordered_map>
+
+#include <backend/ITensorBuilder.h>
+#include <backend/operand/Object.h>
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace hi_perf_cpu
+{
+
+class TensorBuilder : public ITensorBuilder
+{
+public:
+ TensorBuilder();
+
+private:
+};
+
+} // namespace hi_perf_cpu
+} // namespace backend
+} // namespace neurun
+
+#endif // NNFW_TENSORBUILDER_H
diff --git a/runtimes/neurun/backend/srcn/Backend.h b/runtimes/neurun/backend/srcn/Backend.h
new file mode 100644
index 000000000..6d7da689f
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/Backend.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_BACKEND_H__
+#define __NEURUN_BACKEND_SRCN_BACKEND_H__
+
+#include <memory>
+#include <backend/Backend.h>
+#include <model/Operands.h>
+
+#include "Config.h"
+#include "ConstantInitializer.h"
+#include "KernelGenerator.h"
+#include "ShapeFixer.h"
+#include "backend/CustomKernelRegistry.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class Backend : public ::neurun::backend::Backend
+{
+public:
+ Backend() : _config{std::make_shared<Config>()} {}
+
+ std::shared_ptr<IConfig> config() const override { return _config; }
+
+ std::unique_ptr<BackendContext>
+ newContext(const model::Operands &operands,
+ const std::shared_ptr<custom::KernelRegistry> &registry) const override
+ {
+ auto tensor_builder = std::make_shared<TensorBuilder>();
+ return std::unique_ptr<BackendContext>{new BackendContext{
+ this, tensor_builder, std::make_shared<ConstantInitializer>(operands, tensor_builder),
+ std::make_shared<KernelGenerator>(operands, tensor_builder, registry),
+ std::make_shared<ShapeFixer>(operands, tensor_builder)}};
+ }
+
+private:
+ std::shared_ptr<IConfig> _config;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_BACKEND_H__
diff --git a/runtimes/neurun/backend/srcn/CMakeLists.txt b/runtimes/neurun/backend/srcn/CMakeLists.txt
new file mode 100644
index 000000000..b51b95133
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/CMakeLists.txt
@@ -0,0 +1,20 @@
+if(NOT BUILD_SRCN_KERNEL)
+ message(STATUS "Skip building SRCN backend: SRCN kernel library is not build")
+ return()
+endif()
+
+set(LIB_NEURUN_BACKEND_SRCN neurun_backend_srcn)
+
+file(GLOB_RECURSE SOURCES "*.cc")
+
+add_library(${LIB_NEURUN_BACKEND_SRCN} SHARED ${SOURCES})
+
+target_link_libraries(${LIB_NEURUN_BACKEND_SRCN} PUBLIC nnfw_lib_cpp14)
+target_link_libraries(${LIB_NEURUN_BACKEND_SRCN} PRIVATE nnfw_lib_srcn)
+target_link_libraries(${LIB_NEURUN_BACKEND_SRCN} PRIVATE neurun_core)
+target_link_libraries(${LIB_NEURUN_BACKEND_SRCN} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN_BACKEND_SRCN} PRIVATE nnfw_coverage)
+
+set_target_properties(${LIB_NEURUN_BACKEND_SRCN} PROPERTIES OUTPUT_NAME backend_srcn)
+
+install(TARGETS ${LIB_NEURUN_BACKEND_SRCN} DESTINATION lib)
diff --git a/runtimes/neurun/backend/srcn/Config.cc b/runtimes/neurun/backend/srcn/Config.cc
new file mode 100644
index 000000000..e69136fd9
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/Config.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Config.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+void Config::initialize()
+{
+ // DO NOTHING
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/Config.h b/runtimes/neurun/backend/srcn/Config.h
new file mode 100644
index 000000000..bffcbf245
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/Config.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_CONFIG_H__
+#define __NEURUN_BACKEND_SRCN_CONFIG_H__
+
+#include <backend/IConfig.h>
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class Config : public IConfig
+{
+public:
+ std::string id() override { return "srcn"; }
+ void initialize() override;
+ bool SupportSubTensorAlloc() override
+ {
+ // NOTE srcn allocator cannot support subtensor allocation yet
+ return false;
+ }
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_CONFIG_H__
diff --git a/runtimes/neurun/backend/srcn/ConstantInitializer.cc b/runtimes/neurun/backend/srcn/ConstantInitializer.cc
new file mode 100644
index 000000000..f37ebe9a4
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/ConstantInitializer.cc
@@ -0,0 +1,145 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConstantInitializer.h"
+
+#include "kernel/OperationUtils.h"
+
+namespace
+{
+
+template <typename T>
+static void
+PermuteKernel(const neurun::model::Operand &model_obj, neurun::backend::operand::IObject &obj,
+ const neurun::model::Layout frontend_layout = neurun::model::Layout::UNKNOWN)
+{
+ const auto shape = model_obj.shape();
+ auto base = reinterpret_cast<const T *>(model_obj.data().base());
+
+ assert(shape.rank() == 4);
+
+ // TODO Support frontend layout
+ UNUSED_RELEASE(frontend_layout);
+
+ obj.access([&](::neurun::backend::operand::ITensor &tensor) {
+ // NOTE The srcn takes a HWOI layout as kernel filter even though image layout is NHWC.
+ // This policy is the same with the tensorflow policy.
+ // So using srcn library, we need to change kernel layout to HWOI from OHWI.
+ const int32_t outch = shape.dim(0);
+ const int32_t height = shape.dim(1);
+ const int32_t width = shape.dim(2);
+ const int32_t inch = shape.dim(3);
+ const auto to_dim = ::neurun::backend::srcn::kernel::convertCoordinates(
+ {outch, height, width, inch}, ::neurun::backend::srcn::kernel::FilterLayout::OHWI,
+ ::neurun::backend::srcn::kernel::FilterLayout::HWOI);
+ for (auto i = 0; i < outch; ++i)
+ {
+ for (auto j = 0; j < height; ++j)
+ {
+ for (auto k = 0; k < width; ++k)
+ {
+ for (auto l = 0; l < inch; ++l)
+ {
+ const auto coords = ::neurun::backend::srcn::kernel::convertCoordinates(
+ {i, j, k, l}, ::neurun::backend::srcn::kernel::FilterLayout::OHWI,
+ ::neurun::backend::srcn::kernel::FilterLayout::HWOI);
+ const size_t offset = coords[0] * to_dim[1] * to_dim[2] * to_dim[3] +
+ coords[1] * to_dim[2] * to_dim[3] + coords[2] * to_dim[3] +
+ coords[3];
+ T *into = reinterpret_cast<T *>(tensor.buffer() + offset * sizeof(T));
+ T value = *(base + i * height * width * inch + j * width * inch + k * inch + l);
+ *into = value;
+ }
+ }
+ }
+ }
+ });
+}
+}
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+ConstantInitializer::ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _operands{operands}, _tensor_builder{tensor_builder}
+{
+ // DO NOTHING
+}
+
+void ConstantInitializer::run()
+{
+ for (const auto &it : _init_map)
+ {
+ const auto &ind = it.first;
+ const auto &fn = it.second;
+
+ const auto &model_obj = _operands.at(ind);
+ auto tensor_obj = _tensor_builder->wrapTensor(ind);
+ fn(model_obj, *tensor_obj);
+ }
+
+ _init_map.clear();
+}
+
+void ConstantInitializer::registerPermuteKernelInitializer(const model::OperandIndex &index,
+ const model::Operand &obj)
+{
+ // For only CONSTANTS
+ if (!obj.isConstant())
+ return;
+
+ VERBOSE(FillOperandData) << "[SRCN] Fill data for operand " << index.value() << std::endl;
+
+ const auto type = obj.typeInfo().type();
+ using neurun::model::DataType;
+ using namespace std::placeholders;
+
+ switch (type)
+ {
+ case DataType::FLOAT32:
+ _init_map[index] = std::bind(PermuteKernel<float>, _1, _2, _current_subg_layout);
+ break;
+ case DataType::INT32:
+ _init_map[index] = std::bind(PermuteKernel<int32_t>, _1, _2, _current_subg_layout);
+ break;
+ case DataType::UINT32:
+ _init_map[index] = std::bind(PermuteKernel<uint32_t>, _1, _2, _current_subg_layout);
+ break;
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ _init_map[index] = std::bind(PermuteKernel<uint8_t>, _1, _2, _current_subg_layout);
+ break;
+ default:
+ throw std::runtime_error("Not supported, yet");
+ break;
+ }
+}
+
+void ConstantInitializer::visit(const model::operation::TransposeConvNode &node)
+{
+ const auto &kernel_index = node.getInputs().at(model::operation::TransposeConvNode::KERNEL);
+ const auto &kernel_obj = _operands.at(kernel_index);
+ registerPermuteKernelInitializer(kernel_index, kernel_obj);
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/ConstantInitializer.h b/runtimes/neurun/backend/srcn/ConstantInitializer.h
new file mode 100644
index 000000000..9865084c0
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/ConstantInitializer.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_SRCN_CONSTANT_INITIALIZER_H__
+#define __NEURUN_COMPILER_SRCN_CONSTANT_INITIALIZER_H__
+
+#include <backend/IConstantInitializer.h>
+#include <model/Operands.h>
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class ConstantInitializer : public IConstantInitializer
+{
+public:
+ ConstantInitializer(const model::Operands &operands,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+public:
+ void run() override;
+
+public:
+ void registerPermuteKernelInitializer(const model::OperandIndex &index,
+ const model::Operand &obj);
+
+public:
+ void visit(const model::operation::TransposeConvNode &) override;
+
+private:
+ const model::Operands &_operands;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_SRCN_CONSTANT_INITIALIZER_H__
diff --git a/runtimes/neurun/backend/srcn/KernelGenerator.cc b/runtimes/neurun/backend/srcn/KernelGenerator.cc
new file mode 100644
index 000000000..c0cd8b43c
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/KernelGenerator.cc
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "KernelGenerator.h"
+
+#include <stdexcept>
+
+#include "cpp14/memory.h"
+#include "util/Padding.h"
+#include "kernel/TransposeConvLayer.h"
+
+#include <backend/Backend.h>
+#include <backend/IConfig.h>
+
+#include "util/logging.h"
+
+#include "util/Utils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+KernelGenerator::KernelGenerator(const neurun::model::Operands &operand_ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder,
+ const std::shared_ptr<custom::KernelRegistry> &kernel_registry)
+ : _ctx(operand_ctx), _tensor_builder(tensor_builder), _kernel_registry(kernel_registry),
+ _current_subg_layout(model::Layout::UNKNOWN)
+{
+ // DO NOTHING
+}
+
+void KernelGenerator::visit(const model::Subgraph &subgraph)
+{
+ _current_subg_layout = subgraph.getLayout();
+ for (const auto &e : subgraph.operations())
+ {
+ const auto &node = *(e.node);
+ _tensor_builder->preVisit(node);
+ node.accept(*this);
+ _tensor_builder->postVisit(node);
+ }
+}
+
+void KernelGenerator::visit(const model::operation::TransposeConvNode &node)
+{
+ using model::operation::TransposeConvNode;
+
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(TransposeConvNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(TransposeConvNode::Input::KERNEL)};
+ const auto output_shape_index{node.getInputs().at(TransposeConvNode::Input::OUTPUT_SHAPE)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(_current_subg_layout);
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(_current_subg_layout);
+ // Kernel format is [depth_out, kernel_height, kernel_width, depth_in].
+ const auto &ker_shape = _ctx.at(ker_index).shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+ const auto stride = node.param().stride;
+ const int padding_type = (node.param().padding.type == model::PaddingType::SAME);
+ const auto padding = neurun::util::calculatePadding(node.param().padding, ofm_shape, ifm_shape,
+ stride, ker_width, ker_height);
+
+ const auto ofm_backend_shape =
+ ::neurun::backend::srcn::kernel::getShape(_ctx.at(ofm_index), _current_subg_layout);
+ const auto ifm_backend_shape =
+ ::neurun::backend::srcn::kernel::getShape(_ctx.at(ifm_index), _current_subg_layout);
+ const auto ker_backend_shape =
+ ::neurun::backend::srcn::kernel::getShape(_ctx.at(ker_index), model::Layout::UNKNOWN);
+
+ auto ofm_alloc = _tensor_builder->at(ofm_index);
+ auto ifm_alloc = _tensor_builder->at(ifm_index);
+ auto ker_alloc = _tensor_builder->at(ker_index);
+
+ auto fn = nnfw::cpp14::make_unique<::neurun::backend::srcn::kernel::TransposeConvLayer>();
+
+ fn->configure(ifm_alloc->buffer(), ifm_backend_shape, ker_alloc->buffer(), ker_backend_shape,
+ padding_type, padding.left, padding.right, padding.top, padding.bottom,
+ stride.horizontal, stride.vertical, ofm_alloc->buffer(), ofm_backend_shape);
+
+ _execution_builder->append(std::move(fn));
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/KernelGenerator.h b/runtimes/neurun/backend/srcn/KernelGenerator.h
new file mode 100644
index 000000000..f3e92e465
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/KernelGenerator.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_KERNEL_GENERATOR_H__
+#define __NEURUN_BACKEND_SRCN_KERNEL_GENERATOR_H__
+
+#include "backend/IKernelGenerator.h"
+#include "model/Operands.h"
+#include "operand/Tensor.h"
+#include "backend/CustomKernelRegistry.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class KernelGenerator : public IKernelGenerator
+{
+public:
+ KernelGenerator(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder,
+ const std::shared_ptr<custom::KernelRegistry> &kernel_registry);
+
+ using IKernelGenerator::visit;
+
+ void visit(const model::Subgraph &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+ std::shared_ptr<custom::KernelRegistry> _kernel_registry;
+ model::Layout _current_subg_layout;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_KERNEL_GENERATOR_H__
diff --git a/runtimes/neurun/backend/srcn/MemoryManager.cc b/runtimes/neurun/backend/srcn/MemoryManager.cc
new file mode 100644
index 000000000..ad3f639f6
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/MemoryManager.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MemoryManager.h"
+
+#include <cassert>
+
+#include "MemoryPlannerFactory.h"
+#include <backend/operand/Object.h>
+#include "util/logging.h"
+#include "util/ConfigSource.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+MemoryManager::MemoryManager() : _mem_planner{createMemoryPlanner()}
+{
+ // DO NOTHING
+}
+
+IMemoryPlanner *MemoryManager::createMemoryPlanner()
+{
+ auto planner_id = util::getConfigString(util::config::CPU_MEMORY_PLANNER);
+ return MemoryPlannerFactory::instance().create(planner_id);
+}
+
+void MemoryManager::buildTensor(const model::OperandIndex &ind, const model::OperandInfo &info)
+{
+ auto tensor = std::make_shared<operand::Tensor>(info);
+ _tensors[ind] = tensor;
+}
+
+void MemoryManager::claimPlan(const model::OperandIndex &ind, uint32_t size)
+{
+ _mem_planner->claim(ind, size);
+}
+
+void MemoryManager::releasePlan(const model::OperandIndex &ind) { _mem_planner->release(ind); }
+
+void MemoryManager::allocate(void)
+{
+ _mem_alloc = std::make_shared<Allocator>(_mem_planner->capacity());
+ assert(_mem_alloc->base());
+
+ for (auto &mem_plan : _mem_planner->memory_plans())
+ {
+ auto ind = mem_plan.first;
+ auto mem_blk = mem_plan.second;
+
+ uint8_t *buffer = _mem_alloc->base() + mem_blk.offset;
+ auto tensor = _tensors[ind];
+ tensor->setBuffer(buffer);
+
+ VERBOSE(CPU_MEMORYMANAGER) << "TENSOR(#" << ind.value() << "): " << static_cast<void *>(buffer)
+ << std::endl;
+
+ // If we do not make tensor here currently, kernel generation would cause segmentation fault.
+ // See also : Comments in `allocate` method.
+ }
+}
+
+std::shared_ptr<backend::operand::IObject> MemoryManager::wrapTensor(const model::OperandIndex &ind)
+{
+ if (_objects.find(ind) != _objects.end())
+ {
+ return _objects.at(ind);
+ }
+ else
+ {
+ return _objects[ind] = std::make_shared<::neurun::backend::operand::Object>(_tensors.at(ind));
+ }
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/MemoryManager.h b/runtimes/neurun/backend/srcn/MemoryManager.h
new file mode 100644
index 000000000..f0cf8f0ba
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/MemoryManager.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_MEMORY_MANAGER_H__
+#define __NEURUN_BACKEND_SRCN_MEMORY_MANAGER_H__
+
+#include "backend/IMemoryManager.h"
+#include "MemoryPlanner.h"
+#include "operand/Tensor.h"
+#include <backend/operand/Object.h>
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class MemoryManager : public backend::IMemoryManager
+{
+public:
+ MemoryManager();
+ virtual ~MemoryManager() = default;
+
+ void allocate(void) override;
+ void deallocate(void) override { _mem_alloc->release(); }
+
+ void buildTensor(const model::OperandIndex &ind, const model::OperandInfo &info);
+ void claimPlan(const model::OperandIndex &ind, uint32_t size);
+ void releasePlan(const model::OperandIndex &ind);
+
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &tensors(void) { return _tensors; }
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind);
+
+private:
+ IMemoryPlanner *createMemoryPlanner();
+
+private:
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> _tensors;
+ model::OperandIndexMap<std::shared_ptr<::neurun::backend::operand::Object>> _objects;
+ model::OperandIndexMap<Block> _tensor_mem_map;
+ std::shared_ptr<IMemoryPlanner> _mem_planner;
+ std::shared_ptr<Allocator> _mem_alloc;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_MEMORY_MANAGER_H__
diff --git a/runtimes/neurun/backend/srcn/MemoryPlanner.cc b/runtimes/neurun/backend/srcn/MemoryPlanner.cc
new file mode 100644
index 000000000..96ce27bd8
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/MemoryPlanner.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MemoryPlanner.h"
+#include "util/logging.h"
+#include <cassert>
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+Allocator::Allocator(uint32_t capacity)
+{
+ _base = nnfw::cpp14::make_unique<uint8_t[]>(capacity);
+
+ VERBOSE(ALLOC) << "allocation capacity: " << capacity << std::endl;
+ VERBOSE(ALLOC) << "base pointer: " << static_cast<void *>(_base.get()) << std::endl;
+}
+
+void BumpPlanner::claim(const model::OperandIndex &ind, size_t size)
+{
+ assert(size != 0);
+
+ Block blk{_capacity, size};
+ _mem_plans[ind] = blk;
+ _capacity += size;
+
+ VERBOSE(BP_PLANNER) << "CLAIM(#" << ind.value() << "): " << blk.offset << ", " << blk.size
+ << std::endl;
+}
+
+void BumpPlanner::release(const model::OperandIndex &ind)
+{
+ VERBOSE(BP_PLANNER) << "RELEASE(#" << ind.value() << "): "
+ << "NOTHING does" << std::endl;
+}
+
+// There are some assumptions for claiming memory(== making a reservation for memory).
+// 1. About _claim_table(std::map).
+// - The table's data structure is std::map so that it always sorts
+// value(model::OperandIndex) by key(base_offset).
+// - This claim() inserts key/value into _claim_table and the release() removes the key/value from
+// _claim_table.
+// - _claim_table shows the memory status at a certain point in time. Therefore,
+// - If _claim_table has an offset and a certain size at a certain point in time,
+// it means the place at the offset has been already claimed(== can't claim now. need to find
+// someplace new).
+// - If _claim_table doesn't have any element for an offset and a certain size at a certain
+// point in time, it means the place at the offset can be claimed.
+// 2. In the loop for _claim_table, we can assume the current claim_base_offset value is bigger than
+// the previous claim_base_offset.
+void FirstFitPlanner::claim(const model::OperandIndex &ind, size_t size)
+{
+ assert(size != 0);
+
+ // Find the right position for claiming
+ uint32_t next_offset = 0;
+ for (auto &mem_claim : _claim_table)
+ {
+ auto claimed_base_offset = mem_claim.first;
+ auto claimed_size = _mem_plans[mem_claim.second].size;
+ if (next_offset + size <= claimed_base_offset)
+ {
+ break;
+ }
+ else
+ {
+ next_offset = claimed_base_offset + claimed_size;
+ }
+ }
+
+ // Now next_offset is set to the proper offset
+ _claim_table[next_offset] = ind;
+ _mem_plans[ind] = {next_offset, size};
+
+ VERBOSE(FF_PLANNER) << "claim(#" << ind.value() << "): [+" << next_offset << ", " << size << "sz]"
+ << std::endl;
+
+ if (_capacity < next_offset + size)
+ {
+ _capacity = next_offset + size;
+ }
+}
+
+void FirstFitPlanner::release(const model::OperandIndex &ind)
+{
+ for (auto it = _claim_table.cbegin(); it != _claim_table.cend(); ++it)
+ {
+ if (it->second == ind)
+ {
+ uint32_t offset = it->first;
+ uint32_t index = ind.value();
+ uint32_t size = _mem_plans[ind].size;
+
+ _claim_table.erase(it);
+
+ VERBOSE(FF_PLANNER) << "release(#" << index << "): [+" << offset << ", " << size << "sz]"
+ << std::endl;
+ return;
+ }
+ }
+ assert(!"Cannot release for given index. It has been not claimed or released already.");
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/MemoryPlanner.h b/runtimes/neurun/backend/srcn/MemoryPlanner.h
new file mode 100644
index 000000000..c66efec2f
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/MemoryPlanner.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file        MemoryPlanner.h
+ * @brief       This file contains Memory Planning related classes
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_MEMORY_PLANNER_H__
+#define __NEURUN_BACKEND_SRCN_MEMORY_PLANNER_H__
+
+#include <map>
+#include <cpp14/memory.h>
+
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+/**
+ * @brief Structure to have memory offset and size
+ */
+struct Block
+{
+ uint32_t offset;
+ size_t size;
+};
+
+/**
+ * @brief Class to allocate memory
+ */
+class Allocator
+{
+public:
+ Allocator(uint32_t capacity);
+ /**
+ * @brief Get memory base pointer
+ * @return base pointer
+ */
+ uint8_t *base() const { return _base.get(); }
+ void release() { _base.reset(); }
+
+private:
+ std::unique_ptr<uint8_t[]> _base;
+};
+
+/**
+ * @brief Interface to plan memory
+ */
+struct IMemoryPlanner
+{
+ using MemoryPlans = model::OperandIndexMap<Block>;
+
+ /**
+ * @brief Claim memory for operand
+ * @param[in] index The operand index
+ * @param[in] size The size of the memory
+ */
+ virtual void claim(const model::OperandIndex &, size_t) = 0;
+ /**
+ * @brief Release memory for operand
+ * @param[in] index The operand index
+ */
+ virtual void release(const model::OperandIndex &) = 0;
+ /**
+ * @brief Get capacity for memory planning
+ * @return The value of capacity
+ */
+ virtual uint32_t capacity() = 0;
+ /**
+ * @brief Get MemoryPlans
+ * @return MemoryPlans
+ */
+ virtual MemoryPlans &memory_plans() = 0;
+
+ virtual ~IMemoryPlanner() = default;
+};
+
+/**
+ * @brief Class to plan memory by bump way
+ */
+class BumpPlanner : public IMemoryPlanner
+{
+public:
+ /**
+ * @brief Claim memory for operand by bump way
+ * @param[in] index The operand index
+ * @param[in] size The size of the memory
+ */
+ void claim(const model::OperandIndex &, size_t) override;
+ /**
+ * @brief Release memory for operand by bump way
+ * @param[in] index The operand index
+ */
+ void release(const model::OperandIndex &) override;
+ /**
+ * @brief Get capacity for memory planning
+ * @return The value of capacity
+ */
+ uint32_t capacity() override { return _capacity; }
+ /**
+ * @brief Get MemoryPlans
+ * @return MemoryPlans
+ */
+ MemoryPlans &memory_plans() override { return _mem_plans; }
+
+private:
+ uint32_t _capacity = 0;
+ MemoryPlans _mem_plans;
+};
+
+/**
+ * @brief Class to plan memory by firstfit way
+ */
+class FirstFitPlanner : public IMemoryPlanner
+{
+public:
+ /**
+ * @brief Claim memory for operand by firstfit way
+ * @param[in] index The operand index
+ * @param[in] size The size of the memory
+ */
+ void claim(const model::OperandIndex &, size_t) override;
+ /**
+ * @brief Release memory for operand by firstfit way
+ * @param[in] index The operand index
+ */
+ void release(const model::OperandIndex &) override;
+ /**
+ * @brief Get capacity for memory planning
+ * @return The value of capacity
+ */
+ uint32_t capacity() override { return _capacity; }
+ /**
+ * @brief Get MemoryPlans
+ * @return MemoryPlans
+ */
+ MemoryPlans &memory_plans() override { return _mem_plans; }
+
+private:
+ uint32_t _capacity = 0;
+ MemoryPlans _mem_plans;
+ // Use std::map because claim() assumes that _claim_table is sorted by uint32_t(base_offset)
+ std::map<uint32_t, model::OperandIndex> _claim_table;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_MEMORY_PLANNER_H__
diff --git a/runtimes/neurun/backend/srcn/MemoryPlannerFactory.cc b/runtimes/neurun/backend/srcn/MemoryPlannerFactory.cc
new file mode 100644
index 000000000..0029c38cf
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/MemoryPlannerFactory.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MemoryPlannerFactory.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+MemoryPlannerFactory &MemoryPlannerFactory::instance()
+{
+ static MemoryPlannerFactory instance;
+ return instance;
+}
+
+IMemoryPlanner *MemoryPlannerFactory::create(const std::string &key)
+{
+ if (key == "FirstFit")
+ {
+ return new FirstFitPlanner;
+ }
+ else if (key == "Bump")
+ {
+ return new BumpPlanner;
+ }
+ return new FirstFitPlanner; // Default Planner
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/MemoryPlannerFactory.h b/runtimes/neurun/backend/srcn/MemoryPlannerFactory.h
new file mode 100644
index 000000000..79cb264a6
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/MemoryPlannerFactory.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_MEMORY_PLANNER_FACTORY_H__
+#define __NEURUN_BACKEND_SRCN_MEMORY_PLANNER_FACTORY_H__
+
+#include "MemoryPlanner.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class MemoryPlannerFactory
+{
+public:
+ static MemoryPlannerFactory &instance();
+
+private:
+ MemoryPlannerFactory() = default;
+
+public:
+ IMemoryPlanner *create(const std::string &key);
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_MEMORY_PLANNER_FACTORY_H__
diff --git a/runtimes/neurun/backend/srcn/PluginClassesAllocator.cc b/runtimes/neurun/backend/srcn/PluginClassesAllocator.cc
new file mode 100644
index 000000000..9efc6aaaa
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/PluginClassesAllocator.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <util/logging.h>
+
+#include "Backend.h"
+
+extern "C" {
+neurun::backend::Backend *neurun_backend_create()
+{
+ VERBOSE(neurun_backend_create) << "'srcn' loaded\n";
+ return new neurun::backend::srcn::Backend;
+}
+
+void neurun_backend_destroy(neurun::backend::Backend *backend)
+{
+ VERBOSE(neurun_backend_create) << "'srcn' unloaded\n";
+ delete backend;
+}
+}
diff --git a/runtimes/neurun/backend/srcn/ShapeFixer.cc b/runtimes/neurun/backend/srcn/ShapeFixer.cc
new file mode 100644
index 000000000..38f0d9252
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/ShapeFixer.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ShapeFixer.h"
+
+#include <stdexcept>
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+ShapeFixer::ShapeFixer(const neurun::model::Operands &operand_ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder)
+ : _ctx(operand_ctx), _tensor_builder(tensor_builder)
+{
+ assert(tensor_builder);
+}
+
+void ShapeFixer::visit(const model::operation::TransposeConvNode &) { /* DO NOTHING */}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/ShapeFixer.h b/runtimes/neurun/backend/srcn/ShapeFixer.h
new file mode 100644
index 000000000..c0a127a34
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/ShapeFixer.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_SHAPE_FIXER_H__
+#define __NEURUN_BACKEND_SRCN_SHAPE_FIXER_H__
+
+#include <backend/IShapeFixer.h>
+
+#include "model/Operands.h"
+#include "operand/Tensor.h"
+#include "TensorBuilder.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class ShapeFixer : public IShapeFixer
+{
+public:
+ ShapeFixer(const neurun::model::Operands &ctx,
+ const std::shared_ptr<TensorBuilder> &tensor_builder);
+
+ std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
+
+ void visit(const model::operation::TransposeConvNode &) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ std::shared_ptr<TensorBuilder> _tensor_builder;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_SHAPE_FIXER_H__
diff --git a/runtimes/neurun/backend/srcn/TensorBuilder.cc b/runtimes/neurun/backend/srcn/TensorBuilder.cc
new file mode 100644
index 000000000..52c11ad37
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/TensorBuilder.cc
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TensorBuilder.h"
+
+#include <cassert>
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+TensorBuilder::TensorBuilder() : _tensor_mgr{new TensorManager()}
+{
+ // DO NOTHING
+}
+
+void TensorBuilder::registerTensorInfo(const model::OperandIndex &ind,
+ const model::OperandInfo &info,
+ model::Layout /*frontend_layout*/,
+ model::Layout /*backend_layout*/, bool as_const)
+{
+ _tensor_info_map.emplace(ind, info);
+
+ // TODO set the layout
+
+ if (as_const)
+ _constants.append(ind);
+}
+
+void TensorBuilder::registerSubTensorInfo(const model::OperandIndex &,
+ const compiler::SubTensorInfo &)
+{
+ // Not supported yet
+ assert(false);
+}
+
+void TensorBuilder::notifyFirstUse(const model::OperandIndex &ind)
+{
+ assert(_tensor_info_map.find(ind) != _tensor_info_map.end());
+ const auto &info = _tensor_info_map.at(ind);
+ const auto size = info.total_size();
+ _tensor_mgr->buildTensor(ind, info, _constants.contains(ind));
+ _tensor_mgr->claimPlan(ind, size);
+}
+
+void TensorBuilder::notifyLastUse(const model::OperandIndex &ind) { _tensor_mgr->releasePlan(ind); }
+
+void TensorBuilder::prepare(void)
+{
+ _tensor_mgr->allocateConsts();
+ _tensor_mgr->allocateNonconsts();
+}
+
+// TODO Remove this
+void TensorBuilder::allocate(void)
+{
+ // NOTE For now nothing to do. Allocation is done in prepare stage, which is not appropriate
+ // This is because SRCN kernels require `ITensor`s to be allocated before Kernel Generation.
+}
+
+void TensorBuilder::allocateConsts()
+{
+ // NOTE For now nothing to do. Allocation is done in prepare stage, which is not appropriate
+ // This is because SRCN kernels require `ITensor`s to be allocated before Kernel Generation.
+}
+
+void TensorBuilder::allocateNonconsts()
+{
+ // NOTE For now nothing to do. Allocation is done in prepare stage, which is not appropriate
+ // This is because SRCN kernels require `ITensor`s to be allocated before Kernel Generation.
+}
+
+std::shared_ptr<::neurun::backend::operand::ITensor>
+TensorBuilder::tensorAt(const model::OperandIndex &ind)
+{
+ return _tensor_mgr->at(ind);
+}
+
+std::shared_ptr<backend::operand::IObject> TensorBuilder::wrapTensor(const model::OperandIndex &ind)
+{
+ return _tensor_mgr->wrapTensor(ind);
+}
+
+void TensorBuilder::iterate(const IterateFunction &fn) { _tensor_mgr->iterate(fn); }
+
+std::shared_ptr<operand::Tensor> TensorBuilder::at(const ::neurun::model::OperandIndex &ind)
+{
+ return _tensor_mgr->at(ind);
+}
+
+std::unique_ptr<ITensorManager> TensorBuilder::releaseTensorManager(void)
+{
+ return std::move(_tensor_mgr);
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/TensorBuilder.h b/runtimes/neurun/backend/srcn/TensorBuilder.h
new file mode 100644
index 000000000..98b45b64f
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/TensorBuilder.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_TENSOR_BUILDER_H__
+#define __NEURUN_BACKEND_SRCN_TENSOR_BUILDER_H__
+
+#include <unordered_map>
+
+#include <backend/ITensorBuilder.h>
+#include <backend/operand/Object.h>
+#include "operand/Tensor.h"
+#include "model/OperandIndexMap.h"
+#include "TensorManager.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class TensorBuilder : public ITensorBuilder
+{
+public:
+ TensorBuilder();
+
+ /**
+ * @brief Register tensor information to allocate on CPU backend
+ * @param[in] ind Operand index
+ * @param[in] info Operand information
+ * @param[in] layout Operand data layout
+ */
+ void registerTensorInfo(const model::OperandIndex &ind, const model::OperandInfo &info,
+ model::Layout frontend_layout, model::Layout backend_layout,
+ bool as_const) override;
+ /**
+ * @brief Register subtensor information to allocate on CPU backend
+ * @param[in] ind Operand index
+ * @param[in] info Tensor information
+ */
+ void registerSubTensorInfo(const model::OperandIndex &ind,
+ const compiler::SubTensorInfo &info) override;
+
+ void notifyFirstUse(const model::OperandIndex &) override;
+ void notifyLastUse(const model::OperandIndex &) override;
+
+ void prepare(void) override;
+ void allocate(void) override; // TODO Remove this
+ void allocateConsts() override;
+ void allocateNonconsts() override;
+ void postFunctionPrepare() override { /* DO NOTHING */}
+ void finalize() override { /* DO NOTHING */}
+
+ std::shared_ptr<::neurun::backend::operand::ITensor>
+ tensorAt(const model::OperandIndex &ind) override;
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind) override;
+
+ void iterate(const IterateFunction &fn) override;
+
+ void preVisit(const model::Operation &) override { /* DO NOTHING */}
+ void postVisit(const model::Operation &) override { /* DO NOTHING */}
+
+ std::unique_ptr<ITensorManager> releaseTensorManager(void) override;
+
+ std::shared_ptr<operand::Tensor> at(const ::neurun::model::OperandIndex &ind);
+
+private:
+ std::unique_ptr<TensorManager> _tensor_mgr;
+ model::OperandIndexMap<model::OperandInfo> _tensor_info_map;
+ model::OperandIndexMap<std::pair<model::Layout, model::Layout>> _tensor_layouts_map;
+ model::OperandIndexSequence _constants;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/backend/srcn/TensorManager.cc b/runtimes/neurun/backend/srcn/TensorManager.cc
new file mode 100644
index 000000000..d0c80273e
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/TensorManager.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TensorManager.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+TensorManager::TensorManager() : _const_mgr{new MemoryManager()}, _nonconst_mgr{new MemoryManager()}
+{
+ // DO NOTHING
+}
+
+void TensorManager::allocateConsts(void) { _const_mgr->allocate(); }
+
+void TensorManager::allocateNonconsts(void) { _nonconst_mgr->allocate(); }
+
+void TensorManager::deallocateConsts(void) { _const_mgr->deallocate(); }
+
+void TensorManager::deallocateNonconsts(void) { _nonconst_mgr->deallocate(); }
+
+void TensorManager::buildTensor(const model::OperandIndex &ind,
+ const model::OperandInfo &tensor_info, bool as_const)
+{
+ assert(_ind_to_mgr.find(ind) == _ind_to_mgr.end());
+ if (as_const)
+ {
+ _const_mgr->buildTensor(ind, tensor_info);
+ _ind_to_mgr.insert({ind, *_const_mgr});
+ }
+ else
+ {
+ _nonconst_mgr->buildTensor(ind, tensor_info);
+ _ind_to_mgr.insert({ind, *_nonconst_mgr});
+ }
+}
+
+void TensorManager::claimPlan(const model::OperandIndex &ind, uint32_t size)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ _ind_to_mgr.at(ind).claimPlan(ind, size);
+}
+
+void TensorManager::releasePlan(const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ _ind_to_mgr.at(ind).releasePlan(ind);
+}
+
+std::shared_ptr<backend::operand::IObject> TensorManager::wrapTensor(const model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ return _ind_to_mgr.at(ind).wrapTensor(ind);
+}
+
+std::shared_ptr<operand::Tensor> TensorManager::at(const ::neurun::model::OperandIndex &ind)
+{
+ assert(_ind_to_mgr.find(ind) != _ind_to_mgr.end());
+ return _ind_to_mgr.at(ind).tensors().at(ind);
+}
+
+model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &TensorManager::constTensors(void)
+{
+ return _const_mgr->tensors();
+}
+
+model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &TensorManager::nonconstTensors(void)
+{
+ return _nonconst_mgr->tensors();
+}
+
+void TensorManager::iterate(const std::function<void(const model::OperandIndex &)> &fn)
+{
+ for (auto it : _nonconst_mgr->tensors())
+ fn(it.first);
+
+ for (auto it : _const_mgr->tensors())
+ fn(it.first);
+}
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/TensorManager.h b/runtimes/neurun/backend/srcn/TensorManager.h
new file mode 100644
index 000000000..61a10d255
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/TensorManager.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_TENSOR_MANAGER_H__
+#define __NEURUN_BACKEND_SRCN_TENSOR_MANAGER_H__
+
+#include "backend/ITensorManager.h"
+#include "MemoryManager.h"
+#include "model/OperandIndexMap.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+
+class TensorManager : public backend::ITensorManager
+{
+public:
+ TensorManager();
+ virtual ~TensorManager() = default;
+
+ void allocateConsts(void) override;
+ void allocateNonconsts(void) override;
+ void deallocateConsts(void) override;
+ void deallocateNonconsts(void) override;
+
+ void buildTensor(const model::OperandIndex &ind, const model::OperandInfo &tensor_info,
+ bool as_const);
+
+ void claimPlan(const model::OperandIndex &ind, uint32_t size);
+ void releasePlan(const model::OperandIndex &ind);
+
+ std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind);
+ std::shared_ptr<operand::Tensor> at(const ::neurun::model::OperandIndex &ind);
+
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &constTensors(void);
+ model::OperandIndexMap<std::shared_ptr<operand::Tensor>> &nonconstTensors(void);
+
+ void iterate(const std::function<void(const model::OperandIndex &)> &fn);
+
+private:
+ std::unique_ptr<MemoryManager> _const_mgr;
+ std::unique_ptr<MemoryManager> _nonconst_mgr;
+ model::OperandIndexMap<MemoryManager &> _ind_to_mgr;
+};
+
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_TENSOR_MANAGER_H__
diff --git a/runtimes/neurun/backend/srcn/kernel/OperationUtils.cc b/runtimes/neurun/backend/srcn/kernel/OperationUtils.cc
new file mode 100644
index 000000000..0df0f7b33
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/kernel/OperationUtils.cc
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperationUtils.h"
+
+#include <cmath>
+#include <algorithm>
+#include <cassert>
+
+#include "util/Utils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+namespace kernel
+{
+
+uint32_t MatchingDim(const Shape &shape1, int index1, const Shape &shape2, int index2)
+{
+ UNUSED_RELEASE(shape2);
+ UNUSED_RELEASE(index2);
+ assert(shape1.dimensions[index1] == shape2.dimensions[index2]);
+ return shape1.dimensions[index1];
+}
+
+Coordinates convertCoordinates(const Coordinates &from_coordinates, FilterLayout from_layout,
+ FilterLayout to_layout)
+{
+ assert(from_coordinates.size() == 4);
+ Coordinates to{from_coordinates};
+ if (from_layout == FilterLayout::OHWI && to_layout == FilterLayout::HWOI)
+ {
+ to.set(0, from_coordinates[1]);
+ to.set(1, from_coordinates[2]);
+ to.set(2, from_coordinates[0]);
+ to.set(3, from_coordinates[3]);
+ }
+ else
+ {
+ throw std::runtime_error{"NYI"};
+ }
+
+ return to;
+}
+
+Shape getShape(const ::neurun::model::Operand &o, ::neurun::model::Layout frontend_layout)
+{
+ Shape shape;
+
+ auto dims = o.shape().dims();
+ if (frontend_layout == ::neurun::model::Layout::NCHW && o.shape().rank() == 4)
+ {
+ // NCHW -> NHWC
+ uint32_t permutation[4] = {0, 2, 3, 1};
+ for (int i = 0; i < o.shape().rank(); ++i)
+ {
+ dims.at(i) = o.shape().dim(permutation[i]);
+ }
+ }
+ shape.dimensions = std::vector<uint32_t>(dims.begin(), dims.end());
+ shape.type = static_cast<OperandType>(static_cast<int32_t>(o.typeInfo().type()));
+ shape.scale = o.typeInfo().scale();
+ shape.offset = o.typeInfo().offset();
+
+ // CPU backend assume that neurun internal shape's rank is always same or less than 4
+ assert(shape.dimensions.size() <= 4);
+
+ return shape;
+}
+
+} // namespace kernel
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/kernel/OperationUtils.h b/runtimes/neurun/backend/srcn/kernel/OperationUtils.h
new file mode 100644
index 000000000..e9c833565
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/kernel/OperationUtils.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_OPERATION_UTILS_H__
+#define __NEURUN_BACKEND_SRCN_OPERATION_UTILS_H__
+
+#include <iostream>
+#include <limits>
+#include <vector>
+
+#include "model/Operand.h"
+#include "model/DataType.h"
+#include <model/InternalType.h>
+
+using OperandType = neurun::model::DataType;
+using neurun::util::Coordinates;
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+namespace kernel
+{
+
+struct Shape
+{
+ OperandType type;
+ std::vector<uint32_t> dimensions;
+ float scale;
+ int32_t offset;
+};
+
+union DataPtr {
+ uint8_t *u8;
+ int8_t *i8;
+ int32_t *i32;
+ float *f;
+ void *v;
+};
+
+enum FilterLayout
+{
+ OHWI = 0, // TfLite Kernel Layout when using NHWC image layout
+ HWOI, // SRCN Transpose Conv Kernel Layout when using NHWC image layout
+ OIHW, // SRCN Transpose Conv Kernel Layout when using NCHW image layout
+};
+
+uint32_t MatchingDim(const Shape &shape1, int index1, const Shape &shape2, int index2);
+
+Coordinates convertCoordinates(const Coordinates &from_coordinates, FilterLayout from_layout,
+ FilterLayout to_layout);
+
+Shape getShape(const ::neurun::model::Operand &o, ::neurun::model::Layout frontend_layout);
+
+} // namespace kernel
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_OPERATION_UTILS_H__
diff --git a/runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.cc b/runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.cc
new file mode 100644
index 000000000..59332ab6d
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TransposeConvLayer.h"
+
+#include "OperationUtils.h"
+#include "srcn/srcn_conv.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+namespace kernel
+{
+
+TransposeConvLayer::TransposeConvLayer()
+ : _inputData(), _kernelData(), _outputData(), _inputShape(), _kernelShape(), _outputShape(),
+ _paddingType(0), _paddingLeft(0), _paddingTop(0), _paddingRight(0), _paddingBottom(0),
+ _strideWidth(0), _strideHeight(0), _inputType(OperandType::FLOAT32)
+{
+ // DO NOTHING
+}
+
+void TransposeConvLayer::convFloat32()
+{
+ nnfw::srcn::convMat_t in_mat, out_mat, kernel_mat;
+ nnfw::srcn::convParams_t in_param;
+
+ const int batches = MatchingDim(_inputShape, 0, _outputShape, 0);
+ const int input_height = _inputShape.dimensions[1];
+ const int input_width = _inputShape.dimensions[2];
+ const int input_depth = MatchingDim(_inputShape, 3, _kernelShape, 3);
+ in_mat.c = input_depth;
+ in_mat.w = input_width;
+ in_mat.h = input_height;
+ in_mat.n = batches;
+ in_mat.data = _inputData.f;
+
+ const int output_height = _outputShape.dimensions[1];
+ const int output_width = _outputShape.dimensions[2];
+ const int output_depth = MatchingDim(_kernelShape, 0, _outputShape, 3);
+ out_mat.c = output_depth;
+ out_mat.w = output_width;
+ out_mat.h = output_height;
+ out_mat.n = batches;
+ out_mat.data = _outputData.f;
+
+ const int ker_height = _kernelShape.dimensions[1];
+ const int ker_width = _kernelShape.dimensions[2];
+ kernel_mat.c = output_depth;
+ kernel_mat.w = ker_width;
+ kernel_mat.h = ker_height;
+ kernel_mat.n = input_depth;
+ kernel_mat.data = _kernelData.f;
+
+ in_param.kernel_w = ker_width;
+ in_param.kernel_h = ker_height;
+ in_param.stride_w = _strideWidth;
+ in_param.stride_h = _strideHeight;
+ in_param.padding = _paddingType;
+ in_param.pad_w = _paddingLeft;
+ in_param.pad_h = _paddingTop;
+ in_param.dilation_w = 1;
+ in_param.dilation_h = 1;
+
+ nnfw::srcn::srcn_deconvolution2D(in_mat, kernel_mat, out_mat, in_param, 4, nnfw::srcn::col_major);
+}
+
+void TransposeConvLayer::configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
+ const Shape kernelShape, const uint32_t paddingType,
+ const uint32_t paddingLeft, const uint32_t paddingRight,
+ const uint32_t paddingTop, const uint32_t paddingBottom,
+ const uint32_t strideWidth, const uint32_t strideHeight,
+ uint8_t *outputData, const Shape outputShape)
+{
+ _inputData.u8 = inputData;
+ _inputShape = inputShape;
+ _inputType = inputShape.type;
+ _kernelData.u8 = kernelData;
+ _kernelShape = kernelShape;
+ _paddingType = paddingType;
+ _paddingLeft = paddingLeft;
+ _paddingRight = paddingRight;
+ _paddingTop = paddingTop;
+ _paddingBottom = paddingBottom;
+ _strideWidth = strideWidth;
+ _strideHeight = strideHeight;
+ _outputData.u8 = outputData;
+ _outputShape = outputShape;
+}
+
+void TransposeConvLayer::run()
+{
+ if (_inputType == OperandType::FLOAT32)
+ {
+ convFloat32();
+ }
+ else if (_inputType == OperandType::QUANT8_ASYMM)
+ {
+ throw std::runtime_error("NYI");
+ }
+}
+
+} // namespace kernel
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.h b/runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.h
new file mode 100644
index 000000000..db9006c22
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/kernel/TransposeConvLayer.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_KERNEL_TRANSPOSECONV_LAYER_H__
+#define __NEURUN_BACKEND_SRCN_KERNEL_TRANSPOSECONV_LAYER_H__
+
+#include <exec/IFunction.h>
+
+#include "OperationUtils.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+namespace kernel
+{
+
+class TransposeConvLayer : public ::neurun::exec::IFunction
+{
+public:
+ TransposeConvLayer();
+
+public:
+ void convFloat32();
+ void configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
+ const Shape kernelShape, const uint32_t paddingType, const uint32_t paddingLeft,
+ const uint32_t paddingRight, const uint32_t paddingTop,
+ const uint32_t paddingBottom, const uint32_t strideW, const uint32_t strideH,
+ uint8_t *outputData, const Shape outputShape);
+
+ void run();
+ void runSync()
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+
+private:
+ DataPtr _inputData;
+ DataPtr _kernelData;
+ DataPtr _outputData;
+
+ Shape _inputShape;
+ Shape _kernelShape;
+ Shape _outputShape;
+
+ uint32_t _paddingType;
+ uint32_t _paddingLeft;
+ uint32_t _paddingTop;
+ uint32_t _paddingRight;
+ uint32_t _paddingBottom;
+
+ uint32_t _strideWidth;
+ uint32_t _strideHeight;
+
+ OperandType _inputType;
+};
+
+} // namespace kernel
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_KERNEL_TRANSPOSECONV_LAYER_H__
diff --git a/runtimes/neurun/backend/srcn/operand/Tensor.cc b/runtimes/neurun/backend/srcn/operand/Tensor.cc
new file mode 100644
index 000000000..ef5f67512
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/operand/Tensor.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Tensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+namespace operand
+{
+
+size_t Tensor::calcOffset(const neurun::util::Coordinates &coords) const
+{
+ size_t rank = num_dimensions();
+ size_t offset = 0;
+ for (size_t i = 0; i < rank; ++i)
+ {
+ offset = offset * dimension(i) + coords[i];
+ }
+ offset *= sizeOfDataType(data_type());
+ return offset;
+}
+
+} // namespace operand
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/backend/srcn/operand/Tensor.h b/runtimes/neurun/backend/srcn/operand/Tensor.h
new file mode 100644
index 000000000..762f73837
--- /dev/null
+++ b/runtimes/neurun/backend/srcn/operand/Tensor.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_SRCN_OPERAND_TENSOR_H__
+#define __NEURUN_BACKEND_SRCN_OPERAND_TENSOR_H__
+
+#include <backend/operand/ITensor.h>
+#include "model/OperandInfo.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace srcn
+{
+namespace operand
+{
+
+class Tensor : public ::neurun::backend::operand::ITensor
+{
+public:
+ Tensor() = delete;
+
+public:
+ Tensor(const model::OperandInfo &info) : _info(info)
+ {
+ // DO NOTHING
+ }
+
+public:
+ void setBuffer(uint8_t *buffer) { _buffer = buffer; }
+ ::neurun::model::DataType data_type() const { return _info.typeInfo().type(); }
+
+public:
+ uint8_t *buffer() const override { return _buffer; }
+ /**
+ * @brief Get dimension by index
+ *
+ * @param index Index to get diemension
+ * @return size_t Dimension at index
+ * @note N : dimension(0)
+ * H : dimension(1)
+ * W : dimension(2)
+ * C : dimension(3)
+ */
+ size_t dimension(size_t index) const override { return _info.shape().dim(index); }
+ size_t num_dimensions() const override { return _info.shape().rank(); }
+ size_t total_size() const override { return _info.total_size(); }
+ size_t calcOffset(const neurun::util::Coordinates &coords) const override;
+ model::Layout layout() const override { return model::Layout::NHWC; }
+ bool has_padding() const override { return false; }
+
+private:
+ model::OperandInfo _info;
+ uint8_t *_buffer = nullptr;
+};
+
+} // namespace operand
+} // namespace srcn
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_SRCN_OPERAND_TENSOR_H__
diff --git a/runtimes/neurun/core/CMakeLists.txt b/runtimes/neurun/core/CMakeLists.txt
new file mode 100644
index 000000000..d5449a987
--- /dev/null
+++ b/runtimes/neurun/core/CMakeLists.txt
@@ -0,0 +1,18 @@
+file(GLOB_RECURSE SOURCES "src/*.cc")
+
+add_library(neurun_core STATIC ${SOURCES})
+set_target_properties(neurun_core PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(neurun_core PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
+target_include_directories(neurun_core PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src)
+target_link_libraries(neurun_core PUBLIC nnfw_lib_misc)
+target_link_libraries(neurun_core PUBLIC nnfw_lib_cpp14)
+target_link_libraries(neurun_core PRIVATE nnfw_lib_cker)
+target_link_libraries(neurun_core PRIVATE nnfw_common)
+target_link_libraries(neurun_core PRIVATE nnfw_coverage)
+target_link_libraries(neurun_core PRIVATE dl)
+
+if(ENVVAR_NEURUN_CONFIG)
+ target_compile_definitions(neurun_core PRIVATE ENVVAR_FOR_DEFAULT_CONFIG)
+endif(ENVVAR_NEURUN_CONFIG)
+
+target_link_libraries(neurun_core PUBLIC nnfw-header) # To be removed later
diff --git a/runtimes/neurun/core/include/backend/Backend.h b/runtimes/neurun/core/include/backend/Backend.h
new file mode 100644
index 000000000..e8bfac25c
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/Backend.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_BACKEND_H__
+#define __NEURUN_BACKEND_BACKEND_H__
+
+#include <memory>
+
+#include "model/Operands.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+class Backend;
+struct IConfig;
+class IConstantInitializer;
+class IKernelGenerator;
+class IShapeFixer;
+struct ITensorBuilder;
+
+namespace custom
+{
+class KernelRegistry;
+}
+
+class BackendContext
+{
+public:
+ const Backend *backend;
+ std::shared_ptr<ITensorBuilder> tensor_builder;
+ std::shared_ptr<IConstantInitializer> constant_initializer;
+ std::shared_ptr<IKernelGenerator> kernel_gen;
+ std::shared_ptr<IShapeFixer> shape_fixer;
+};
+
+class Backend
+{
+public:
+ virtual ~Backend() = default;
+ virtual std::shared_ptr<neurun::backend::IConfig> config() const = 0;
+
+ virtual std::unique_ptr<BackendContext>
+ newContext(const model::Operands &operands,
+ const std::shared_ptr<custom::KernelRegistry> &registry) const = 0;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_BACKEND_H__
diff --git a/runtimes/neurun/core/include/backend/CustomKernel.h b/runtimes/neurun/core/include/backend/CustomKernel.h
new file mode 100644
index 000000000..db0c91e46
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/CustomKernel.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CUSTOM_KERNEL_H__
+#define __NEURUN_BACKEND_CUSTOM_KERNEL_H__
+
+#include "nnfw_dev.h"
+
+#include "exec/IFunction.h"
+
+#include "misc/tensor/Shape.h"
+#include "model/DataType.h"
+
+#include <vector>
+
+namespace neurun
+{
+namespace backend
+{
+namespace custom
+{
+
+using Shape = nnfw::misc::tensor::Shape;
+
+struct TypeInfo
+{
+ Shape shape;
+ model::DataType dtype;
+};
+
+class Kernel : public ::neurun::exec::IFunction
+{
+public:
+ explicit Kernel(nnfw_custom_eval evalFunction);
+
+ nnfw_custom_kernel_params _params;
+ char *_userdata;
+ size_t _userdata_size;
+
+ nnfw_custom_eval _evalFunction;
+ // nnfw_custom_type_infer _type_infer_function; //Unused for now
+
+ struct CustomKernelConfigParams
+ {
+ std::vector<void *> input_allocations;
+ std::vector<TypeInfo> input_types;
+
+ std::vector<void *> output_allocations;
+ std::vector<TypeInfo> output_types;
+
+ char *userdata;
+ size_t userdata_size;
+ };
+
+ /**
+ * Fills _params field used later by user specified eval function
+ * @param inParams custom kernel parameters
+ */
+ virtual void configure(CustomKernelConfigParams &&inParams);
+
+ void run() override;
+ void runSync() override { run(); }
+};
+
+} // namespace custom
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CUSTOM_KERNEL_H__
diff --git a/runtimes/neurun/core/include/backend/CustomKernelRegistry.h b/runtimes/neurun/core/include/backend/CustomKernelRegistry.h
new file mode 100644
index 000000000..3eb218e11
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/CustomKernelRegistry.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_CUSTOM_KERNEL_REGISTRY_H__
+#define __NEURUN_BACKEND_CUSTOM_KERNEL_REGISTRY_H__
+
+#include "CustomKernel.h"
+
+#include <unordered_map>
+#include <functional>
+#include <memory>
+
+#include <iostream>
+
+namespace neurun
+{
+namespace backend
+{
+
+namespace custom
+{
+
+class KernelRegistry
+{
+public:
+ void registerKernel(const std::string &id, nnfw_custom_eval evalFunction);
+ std::unique_ptr<Kernel> buildKernelForOp(const std::string &id);
+
+private:
+ std::unordered_map<std::string, nnfw_custom_eval> _storage;
+};
+
+} // namespace custom
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_CUSTOM_KERNEL_REGISTRY_H__
diff --git a/runtimes/neurun/core/include/backend/ExecTime.h b/runtimes/neurun/core/include/backend/ExecTime.h
new file mode 100644
index 000000000..4eaf49fab
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/ExecTime.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_EXEC_TIME_H__
+#define __NEURUN_BACKEND_EXEC_TIME_H__
+
+#include "backend/Backend.h"
+#include "backend/IConfig.h"
+#include "JSONExecTime.h"
+#include <memory>
+#include <limits>
+#include <map>
+#include <unordered_map>
+#include <vector>
+
+namespace neurun
+{
+namespace backend
+{
+class ExecTime
+{
+public:
+ explicit ExecTime(const std::vector<const Backend *> &backends) : _json(backends, _measurements)
+ {
+ }
+
+public:
+ /**
+ * @brief Get exec time of an operation with input size
+ * or linearly interpolated value based on size if there is no record for given size
+ *
+ * @param[in] backend id of a backend
+ * @param[in] operation name of an operation
+ * @param[in] quant if input type quantized
+ * @param[in] op_size sum of operation's flattened sizes of inputs and outputs
+ * @return execution time for given input sizes
+ * -1 if there are no records for given parameters (backend, op, quantization).
+ */
+ int64_t getOperationExecTime(const Backend *backend, const std::string &operation, bool quant,
+ uint32_t op_size) const;
+ /**
+ * @brief Update exec time of the operation on a backend with given input size or
+ * add new entity if there is no one.
+ *
+ * @param[in] backend id of a backend
+ * @param[in] operation name of an operation
+ * @param[in] quant if input type quantized
+ * @param[in] op_size sum of operation's flattened sizes of inputs and outputs
+ * @param[in] time real measured value
+ */
+ void updateOperationExecTime(const Backend *backend, const std::string &operation, bool quant,
+ uint32_t op_size, int64_t time);
+ /**
+ * @brief Get the permute time from one backend to another
+ *
+ * @param[in] from_backend
+ * @param[in] to_backend
+ * @param[in] quant if input type quantized
+ * @param[in] op_size sum of operation's flattened sizes of inputs and outputs
+ * @return permutation time for operation size
+ */
+ int64_t getPermuteTime(const Backend *from_backend, const Backend *to_backend, bool quant,
+ uint32_t op_size) const;
+ /**
+ * @brief Update permute time from one backend to another
+ *
+ * @param[in] from_backend
+ * @param[in] to_backend
+ * @param[in] quant if input type quantized
+ * @param[in] time measured permutation time
+ * @param[in] op_size sum of operation's flattened sizes of inputs and outputs
+ */
+ void updatePermuteTime(const Backend *from_backend, const Backend *to_backend, bool quant,
+ uint32_t op_size, int64_t time);
+ /**
+ * @brief Get the max value of int32_t in int64_t
+ * @return max value
+ */
+ static int64_t getMax() { return _MAX; }
+ /**
+ * @brief Update metrics file with new data.
+ */
+ void uploadOperationsExecTime() const { _json.uploadOperationsExecTime(); }
+ static const int64_t NOT_FOUND = -1;
+
+private:
+ /// @brief Measurement data, which is shared with serializer
+ MeasurementData _measurements;
+ // int64_t::max may cause integer overflow
+ static const int64_t _MAX = std::numeric_limits<int32_t>::max();
+ /// @brief Serializer
+ JSON _json;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_EXEC_TIME_H__
diff --git a/runtimes/neurun/core/include/backend/IConfig.h b/runtimes/neurun/core/include/backend/IConfig.h
new file mode 100644
index 000000000..0e9572033
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/IConfig.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ICONFIG_H__
+#define __NEURUN_BACKEND_ICONFIG_H__
+
+#include "util/ITimer.h"
+#include <memory>
+#include <string>
+
+namespace neurun
+{
+namespace backend
+{
+
+struct IConfig
+{
+ virtual ~IConfig() = default;
+
+ virtual std::string id() = 0;
+ virtual void initialize() = 0;
+ // Support subtensor allocation
+ virtual bool SupportSubTensorAlloc() = 0;
+
+ // Timer is used for backend profiling. In case of default (nullptr) timer profiler won't work.
+ virtual std::unique_ptr<util::ITimer> timer() { return nullptr; }
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ICONFIG_H__
diff --git a/runtimes/neurun/core/include/backend/IConstantInitializer.h b/runtimes/neurun/core/include/backend/IConstantInitializer.h
new file mode 100644
index 000000000..8393e0fd8
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/IConstantInitializer.h
@@ -0,0 +1,260 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ICONSTANT_INITIALIZER_H__
+#define __NEURUN_BACKEND_ICONSTANT_INITIALIZER_H__
+
+#include <unordered_map>
+#include <functional>
+
+#include "ITensorBuilder.h"
+#include "model/Layout.h"
+#include "model/Operand.h"
+#include "model/Operands.h"
+#include "model/OperationVisitor.h"
+#include "model/Subgraph.h"
+#include "util/logging.h"
+#include "util/Utils.h"
+
+namespace
+{
+template <typename T>
+static void Init(const neurun::model::Operand &model_obj, neurun::backend::operand::IObject &obj,
+ const bool copy,
+ const neurun::model::Layout frontend_layout = neurun::model::Layout::UNKNOWN)
+{
+ const auto shape = model_obj.shape();
+ auto base = reinterpret_cast<const T *>(model_obj.data().base());
+
+ obj.access([&](::neurun::backend::operand::ITensor &tensor) {
+ switch (shape.rank())
+ {
+ case 1:
+ {
+ auto vec_size = shape.dim(0);
+ for (int32_t n = 0; n < vec_size; ++n)
+ {
+ const T *from = reinterpret_cast<const T *>(base) + n;
+ const auto value = *from;
+
+ T *into = reinterpret_cast<T *>(tensor.buffer()) + n;
+
+ *into = value;
+ }
+ break;
+ }
+ case 2:
+ {
+ const int32_t copy_len = shape.dim(1);
+
+ for (auto i = 0; i < shape.dim(0); ++i)
+ {
+ neurun::util::Coordinates coords{i, 0};
+ memcpy(tensor.buffer() + tensor.calcOffset(coords), base + i * copy_len,
+ copy_len * sizeof(T));
+ }
+ break;
+ }
+ case 3:
+ {
+ const int32_t width = shape.dim(1);
+ const int32_t copy_len = shape.dim(2);
+
+ for (auto i = 0; i < shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < shape.dim(1); ++j)
+ {
+ neurun::util::Coordinates coords{i, j, 0};
+ memcpy(tensor.buffer() + tensor.calcOffset(coords),
+ base + i * width * copy_len + j * copy_len, copy_len * sizeof(T));
+ }
+ }
+ break;
+ }
+ case 4:
+ {
+ const int32_t height = shape.dim(1);
+ const int32_t width = shape.dim(2);
+ const int32_t copy_len = shape.dim(3);
+ for (auto i = 0; i < shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < shape.dim(1); ++j)
+ {
+ for (auto k = 0; k < shape.dim(2); ++k)
+ {
+ if (copy)
+ {
+ neurun::util::Coordinates coords{i, j, k, 0};
+ memcpy(tensor.buffer() + tensor.calcOffset(coords),
+ base + i * height * width * copy_len + j * width * copy_len + k * copy_len,
+ copy_len * sizeof(T));
+ }
+ else
+ {
+ for (auto l = 0; l < shape.dim(3); ++l)
+ {
+ const auto coords = neurun::util::convertCoordinates(
+ {i, j, k, l}, frontend_layout, tensor.layout());
+ T *into = reinterpret_cast<T *>(tensor.buffer() + tensor.calcOffset(coords));
+ T value = *(base + i * height * width * copy_len + j * width * copy_len +
+ k * copy_len + l);
+ *into = value;
+ }
+ }
+ }
+ }
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error{"Not yet supported"};
+ }
+ });
+}
+
+template <typename T>
+void copyInit(const neurun::model::Operand &model_obj, neurun::backend::operand::IObject &obj)
+{
+ Init<T>(model_obj, obj, true);
+}
+
+template <typename T>
+void permuteInit(const neurun::model::Operand &model_obj, neurun::backend::operand::IObject &obj,
+ const neurun::model::Layout frontend_layout)
+{
+ Init<T>(model_obj, obj, false, frontend_layout);
+}
+
+} // namespace
+
+namespace neurun
+{
+namespace backend
+{
+
+class IConstantInitializer : model::OperationVisitor
+{
+public:
+ virtual ~IConstantInitializer() = default;
+
+public:
+ virtual void run() = 0;
+
+public:
+ using Initializer = std::function<void(const model::Operand &, backend::operand::IObject &)>;
+
+ void generate(const model::Subgraph &subg, const model::Operands &operands)
+ {
+ _current_subg_layout = subg.getLayout();
+ subg.accept(*this);
+ for (const auto &e : subg.operations())
+ {
+ for (const auto &ind : e.node->getInputs())
+ {
+ const auto &obj = operands.at(ind);
+ if (obj.isConstant() && !exist(ind))
+ {
+ registerPermuteInitializer(ind, obj);
+ }
+ }
+ }
+ }
+
+protected:
+#define OP(InternalName, IsNnApi) \
+ virtual void visit(const model::operation::InternalName &) override { /* DO NOTHING */}
+#include "model/Operations.lst"
+#undef OP
+
+protected:
+ void registerCopyInitializer(const model::OperandIndex &index, const model::Operand &obj)
+ {
+ // For only CONSTANTS
+ if (!obj.isConstant())
+ return;
+
+ VERBOSE(FillOperandData) << "Fill data for operand " << index.value() << std::endl;
+
+ const auto type = obj.typeInfo().type();
+ using neurun::model::DataType;
+
+ switch (type)
+ {
+ case DataType::FLOAT32:
+ _init_map[index] = copyInit<float>;
+ break;
+ case DataType::INT32:
+ _init_map[index] = copyInit<int32_t>;
+ break;
+ case DataType::UINT32:
+ _init_map[index] = copyInit<uint32_t>;
+ break;
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ _init_map[index] = copyInit<uint8_t>;
+ break;
+ default:
+ throw std::runtime_error("Not supported, yet");
+ break;
+ }
+ }
+
+protected:
+ void registerPermuteInitializer(const model::OperandIndex &index, const model::Operand &obj)
+ {
+ // For only CONSTANTS
+ if (!obj.isConstant())
+ return;
+
+ VERBOSE(FillOperandData) << "Fill data for operand " << index.value() << std::endl;
+
+ const auto type = obj.typeInfo().type();
+ using neurun::model::DataType;
+ using namespace std::placeholders;
+
+ switch (type)
+ {
+ case DataType::FLOAT32:
+ _init_map[index] = std::bind(permuteInit<float>, _1, _2, _current_subg_layout);
+ break;
+ case DataType::INT32:
+ _init_map[index] = std::bind(permuteInit<int32_t>, _1, _2, _current_subg_layout);
+ break;
+ case DataType::UINT32:
+ _init_map[index] = std::bind(permuteInit<uint32_t>, _1, _2, _current_subg_layout);
+ break;
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ _init_map[index] = std::bind(permuteInit<uint8_t>, _1, _2, _current_subg_layout);
+ break;
+ default:
+ throw std::runtime_error("Not supported, yet");
+ break;
+ }
+ }
+
+private:
+ bool exist(const model::OperandIndex &ind) { return _init_map.find(ind) != _init_map.end(); }
+
+protected:
+ std::unordered_map<model::OperandIndex, Initializer> _init_map;
+ model::Layout _current_subg_layout;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ICONSTANT_INITIALIZER_H__
diff --git a/runtimes/neurun/core/include/backend/IKernelGenerator.h b/runtimes/neurun/core/include/backend/IKernelGenerator.h
new file mode 100644
index 000000000..542a55338
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/IKernelGenerator.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_IKERNEL_GENERATOR_H__
+#define __NEURUN_BACKEND_IKERNEL_GENERATOR_H__
+
+#include <memory>
+#include <functional>
+
+#include "ITensorBuilder.h"
+#include "compiler/IExecutionBuilder.h"
+#include "model/OperationVisitor.h"
+#include "model/Subgraph.h"
+#include "cpp14/memory.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+class IKernelGenerator : public model::OperationVisitor
+{
+public:
+ virtual ~IKernelGenerator() = default;
+
+ void generate(const model::Operation &node, neurun::compiler::IExecutionBuilder *executionBuilder)
+ {
+ _execution_builder = executionBuilder;
+ node.accept(*this);
+ }
+
+protected:
+ using model::OperationVisitor::visit;
+
+#define OP(InternalName, IsNnApi) \
+ virtual void visit(const model::operation::InternalName &) override \
+ { \
+ throw std::runtime_error("NYI"); \
+ }
+#include "model/Operations.lst"
+#undef OP
+
+protected:
+ neurun::compiler::IExecutionBuilder *_execution_builder;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_IKERNEL_GENERATOR_H__
diff --git a/runtimes/neurun/core/include/backend/IMemoryManager.h b/runtimes/neurun/core/include/backend/IMemoryManager.h
new file mode 100644
index 000000000..b06bab872
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/IMemoryManager.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_IMEMORY_MANAGER_H__
+#define __NEURUN_BACKEND_IMEMORY_MANAGER_H__
+
+namespace neurun
+{
+namespace backend
+{
+
+struct IMemoryManager
+{
+ virtual ~IMemoryManager() = default;
+
+ virtual void allocate(void) = 0;
+ virtual void deallocate(void) = 0;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#include <unordered_set>
+#include <memory>
+
+namespace neurun
+{
+namespace backend
+{
+
+using MemoryManagerSet = std::unordered_set<std::unique_ptr<backend::IMemoryManager>>;
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_IMEMORY_MANAGER_H__
diff --git a/runtimes/neurun/core/include/backend/IShapeFixer.h b/runtimes/neurun/core/include/backend/IShapeFixer.h
new file mode 100644
index 000000000..ad137942c
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/IShapeFixer.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ISHAPE_FIXER_H__
+#define __NEURUN_BACKEND_ISHAPE_FIXER_H__
+
+#include <memory>
+#include <functional>
+
+#include "ITensorBuilder.h"
+#include "model/OperationVisitor.h"
+#include "model/Subgraph.h"
+#include "cpp14/memory.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+class IShapeFixer : model::OperationVisitor
+{
+public:
+ virtual ~IShapeFixer() = default;
+
+ virtual std::shared_ptr<ITensorBuilder> tensor_builder() = 0;
+
+protected:
+#define OP(InternalName, IsNnApi) \
+ virtual void visit(const model::operation::InternalName &) override \
+ { \
+ throw std::runtime_error("NYI"); \
+ }
+#include "model/Operations.lst"
+#undef OP
+
+public:
+ void fix(const model::Operation &node) { node.accept(*this); }
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ISHAPE_FIXER_H__
diff --git a/runtimes/neurun/core/include/backend/ITensorBuilder.h b/runtimes/neurun/core/include/backend/ITensorBuilder.h
new file mode 100644
index 000000000..72079a2fb
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/ITensorBuilder.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ITENSOR_BUILDER_H__
+#define __NEURUN_BACKEND_ITENSOR_BUILDER_H__
+
+#include <map>
+
+#include "model/Index.h"
+#include "model/OperandInfo.h"
+#include "model/Operation.h"
+#include "model/Layout.h"
+#include "operand/IObject.h"
+#include "operand/ITensor.h"
+#include "compiler/SubTensorInfo.h"
+#include "ITensorManager.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+struct ITensorBuilder
+{
+ using IterateFunction = std::function<void(const model::OperandIndex &)>;
+
+ virtual ~ITensorBuilder(void) = default;
+
+ // TODO Merge registerTensorInfo and registerSubTensorInfo using abstraction by internal class
+ /**
+ * @brief Register tensor information to allocate on backend
+ */
+ virtual void registerTensorInfo(const model::OperandIndex &, const model::OperandInfo &,
+ model::Layout frontend_layout, model::Layout backend_layout,
+ bool as_const) = 0;
+ /**
+ * @brief Register subtensor information to allocate on backend
+ */
+ virtual void registerSubTensorInfo(const model::OperandIndex &,
+ const compiler::SubTensorInfo &) = 0;
+
+ virtual void notifyFirstUse(const model::OperandIndex &) = 0;
+ virtual void notifyLastUse(const model::OperandIndex &) = 0;
+
+ virtual void prepare(void) = 0;
+ // TODO Remove after all of apis appended land
+ virtual void allocate(void) = 0;
+
+ virtual void allocateConsts() = 0;
+ virtual void allocateNonconsts() = 0;
+ virtual void postFunctionPrepare() = 0;
+ virtual void finalize() = 0;
+
+ virtual std::shared_ptr<::neurun::backend::operand::ITensor>
+ tensorAt(const model::OperandIndex &ind) = 0;
+ virtual std::shared_ptr<backend::operand::IObject> wrapTensor(const model::OperandIndex &ind) = 0;
+ virtual void iterate(const IterateFunction &fn) = 0;
+
+ virtual void preVisit(const model::Operation &) = 0;
+ virtual void postVisit(const model::Operation &) = 0;
+
+ virtual std::unique_ptr<ITensorManager> releaseTensorManager(void) = 0;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#include <unordered_set>
+#include <memory>
+
+namespace neurun
+{
+namespace backend
+{
+
+using TensorBuilderSet = std::unordered_set<std::shared_ptr<backend::ITensorBuilder>>;
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ITENSOR_BUILDER_H__
diff --git a/runtimes/neurun/core/include/backend/ITensorManager.h b/runtimes/neurun/core/include/backend/ITensorManager.h
new file mode 100644
index 000000000..74506ef59
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/ITensorManager.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_ITENSOR_MANAGER_H__
+#define __NEURUN_BACKEND_ITENSOR_MANAGER_H__
+
+namespace neurun
+{
+namespace backend
+{
+
+// NOTE This name ITensorManager has been discussed whether or not the name is proper.
+// Anyone can argue with any better name.
+/**
+ * @brief Interface as an abstract tensor manager which has MemoryManager
+ */
+struct ITensorManager
+{
+ virtual ~ITensorManager() = default;
+
+ virtual void allocateConsts(void) = 0;
+ virtual void allocateNonconsts(void) = 0;
+ virtual void deallocateConsts(void) = 0;
+ virtual void deallocateNonconsts(void) = 0;
+};
+
+} // namespace backend
+} // namespace neurun
+
+#include <unordered_set>
+#include <memory>
+
+namespace neurun
+{
+namespace backend
+{
+
+using TensorManagerSet = std::unordered_set<std::unique_ptr<backend::ITensorManager>>;
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_ITENSOR_MANAGER_H__
diff --git a/runtimes/neurun/core/include/backend/JSONExecTime.h b/runtimes/neurun/core/include/backend/JSONExecTime.h
new file mode 100644
index 000000000..84505e10f
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/JSONExecTime.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_JSON_EXEC_TIME_H__
+#define __NEURUN_BACKEND_JSON_EXEC_TIME_H__
+
+#include <fstream>
+#include <unordered_map>
+#include <map>
+#include <vector>
+#include "backend/Backend.h"
+#include "backend/IConfig.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+/**
+ * @brief table, that contains execution time of an operation on some backend for different input
+ * sizes and transfer time from one backend to another for various input sizes (permutation time)
+ *
+ * backend -> op -> quant-> size --> time
+ * _measurements[Backend*]["string"][bool][uint32_t] = int64_t
+ */
+using MeasurementData = std::unordered_map<
+ const Backend *,
+ std::unordered_map<std::string, std::unordered_map<bool, std::map<uint32_t, int64_t>>>>;
+
+class JSON
+{
+public:
+ explicit JSON(const std::vector<const Backend *> &backends, MeasurementData &measurements)
+ : _measurement_file("exec_time.json"), _backends(), _measurements(measurements)
+ {
+ for (const auto b : backends)
+ {
+ _backends.emplace(b->config()->id(), b);
+ }
+ loadOperationsExecTime();
+ };
+ /**
+ * @brief Update _operations_exec_time_file with new data.
+ */
+ void uploadOperationsExecTime() const;
+
+private:
+ ///@brief file containing measurements
+ std::string _measurement_file;
+ std::unordered_map<std::string, const Backend *> _backends;
+ std::unordered_map<
+ const Backend *,
+ std::unordered_map<std::string, std::unordered_map<bool, std::map<uint32_t, int64_t>>>>
+ &_measurements;
+ /**
+ * @brief Helper function for inserting data to OperationExecTimes
+ *
+ * @param backend String name of backend
+ * @param operation String name of operation
+ * @param quant if input type quantized
+ * @param stream File stream
+ */
+ void readOperation(const std::string &backend, const std::string &operation, bool quant,
+ std::ifstream &stream);
+
+ /**
+ * @brief Helper function for writing OperationExecTimes to stream
+ *
+ * @param operation_info Map of operations execution information
+ * @param stream File stream
+ */
+ void printOperation(const std::map<uint32_t, int64_t> &operation_info,
+ std::ofstream &stream) const;
+ /**
+ * @brief Parse and load operations_exec_time from _operations_exec_time_file.
+ */
+ void loadOperationsExecTime();
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_JSON_EXEC_TIME_H__
diff --git a/runtimes/neurun/core/include/backend/operand/IObject.h b/runtimes/neurun/core/include/backend/operand/IObject.h
new file mode 100644
index 000000000..56eea34a8
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/operand/IObject.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_OPERAND_I_OBJECT_H__
+#define __NEURUN_BACKEND_OPERAND_I_OBJECT_H__
+
+#include <functional>
+
+#include "ITensor.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace operand
+{
+
+struct IObject
+{
+ virtual ~IObject() = default;
+ virtual operand::ITensor *ptr(void) const = 0;
+ virtual void access(const std::function<void(operand::ITensor &tensor)> &fn) const = 0;
+};
+
+} // namespace operand
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_OPERAND_I_OBJECT_H__
diff --git a/runtimes/neurun/core/include/backend/operand/ITensor.h b/runtimes/neurun/core/include/backend/operand/ITensor.h
new file mode 100644
index 000000000..f762ad03c
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/operand/ITensor.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_OPERAND_I_TENSOR_H__
+#define __NEURUN_BACKEND_OPERAND_I_TENSOR_H__
+
+#include <cstring>
+#include <cstdint>
+
+#include "model/Layout.h"
+#include "util/Coordinates.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace operand
+{
+
+class ITensor
+{
+public:
+ virtual ~ITensor() = default;
+
+public:
+ virtual uint8_t *buffer() const = 0;
+ virtual size_t total_size() const = 0;
+ virtual size_t dimension(size_t index) const = 0;
+ virtual size_t num_dimensions() const = 0;
+ virtual size_t calcOffset(const neurun::util::Coordinates &coords) const = 0;
+ virtual model::Layout layout() const = 0;
+ virtual bool has_padding() const = 0;
+};
+
+} // namespace operand
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_OPERAND_I_TENSOR_H__
diff --git a/runtimes/neurun/core/include/backend/operand/Object.h b/runtimes/neurun/core/include/backend/operand/Object.h
new file mode 100644
index 000000000..e6f6d926d
--- /dev/null
+++ b/runtimes/neurun/core/include/backend/operand/Object.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_OPERAND_OBJECT_H__
+#define __NEURUN_BACKEND_OPERAND_OBJECT_H__
+
+#include <memory>
+#include "ITensor.h"
+
+#include "IObject.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace operand
+{
+
+class Object : public IObject
+{
+public:
+ Object() = default;
+
+public:
+ Object(const std::shared_ptr<ITensor> &tensor) : _tensor{tensor}
+ {
+ // DO NOTHING
+ }
+
+public:
+ ITensor *ptr(void) const override { return _tensor.get(); }
+
+private:
+ std::shared_ptr<ITensor> _tensor;
+
+public:
+ void access(const std::function<void(ITensor &tensor)> &fn) const override { fn(*_tensor); }
+};
+
+} // namespace operand
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_OPERAND_OBJECT_H__
diff --git a/runtimes/neurun/core/include/compiler/Compiler.h b/runtimes/neurun/core/include/compiler/Compiler.h
new file mode 100644
index 000000000..094ffe853
--- /dev/null
+++ b/runtimes/neurun/core/include/compiler/Compiler.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Compiler.h
+ * @brief This file contains Compiler class to define and run compilation phase
+ */
+
+#ifndef __NEURUN_COMPILER_COMPILE_H_
+#define __NEURUN_COMPILER_COMPILE_H_
+
+#include "graph/Graph.h"
+#include "exec/IExecutor.h"
+
+namespace neurun
+{
+
+namespace compiler
+{
+
+enum class State
+{
+ CREATED, // Before compilation
+ STARTED, // Compile is started
+ LOWERED, // Backend is decided
+ COMPILED // Success compilation
+};
+
+/**
+ * @brief Class to compile graph model
+ */
+class Compiler
+{
+public:
+ /**
+ * @brief Construct a new Compiler object
+ * @param[in] model Graph model
+ */
+ Compiler(const std::shared_ptr<graph::Graph> &graph)
+ : _graph{graph}, _executor{nullptr}, _state{State::CREATED}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Run compilation. Compilation result will be saved in _plan
+ */
+ void compile(void);
+ /**
+ * @brief Pass plan reference
+ * @param[out] plan Plan reference to return\n
+ * Set nullptr if compile is not run yet
+ */
+ void release(std::shared_ptr<exec::IExecutor> &executor) { executor = _executor; }
+
+ void state(State state) { _state = state; }
+ State state(void) const { return _state; }
+
+ /**
+ * @brief Check if model can compile
+ * @return @c true if model can compile, otherwise @c false
+ * @note This method don't check model correctness,\n
+ * so model verification should be done before calling this method
+ */
+ bool checkCompilable();
+
+private:
+ std::shared_ptr<graph::Graph> _graph;
+ std::shared_ptr<exec::IExecutor> _executor;
+ State _state;
+};
+
+} // namespace compiler
+
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_COMPILE_H_
diff --git a/runtimes/neurun/core/include/compiler/IExecutionBuilder.h b/runtimes/neurun/core/include/compiler/IExecutionBuilder.h
new file mode 100644
index 000000000..c5a06fec0
--- /dev/null
+++ b/runtimes/neurun/core/include/compiler/IExecutionBuilder.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_I_EXECUTION_BUILDER_H__
+#define __NEURUN_COMPILER_I_EXECUTION_BUILDER_H__
+
+#include <memory>
+
+#include "exec/IFunction.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+struct IExecutionBuilder
+{
+ virtual ~IExecutionBuilder() = default;
+
+ virtual void append(std::unique_ptr<::neurun::exec::IFunction> &&f) = 0;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_I_EXECUTION_BUILDER_H__
diff --git a/runtimes/neurun/core/include/compiler/SubTensorInfo.h b/runtimes/neurun/core/include/compiler/SubTensorInfo.h
new file mode 100644
index 000000000..92b2759ea
--- /dev/null
+++ b/runtimes/neurun/core/include/compiler/SubTensorInfo.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file SubTensorInfo.h
+ * @brief This file contains SubTensorInfo to represent subsumption between tensors
+ * for backend tensor allocation
+ */
+#ifndef __NEURUN_COMPILER_SUBTENSOR_INFO_H__
+#define __NEURUN_COMPILER_SUBTENSOR_INFO_H__
+
+#include "model/Operand.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+/**
+ * @brief Class to represent information of subtensor
+ */
+class SubTensorInfo
+{
+public:
+ SubTensorInfo() = delete;
+
+ /**
+ * @brief Construct a new SubTensorInfo object
+ * @param[in] obj SubTensor object
+ */
+ SubTensorInfo(const model::Operand &obj)
+ : _parent{obj.parent_info()->parent()}, _shape{obj.shape()}, _type{obj.typeInfo()},
+ _offset{obj.parent_info()->offset()}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Return parent tensor index
+ * @return Parent tensor index
+ */
+ const model::OperandIndex parent(void) const { return _parent; }
+ /**
+ * @brief Return tensor shape
+ * @return Tensor shape
+ */
+ const model::Shape shape(void) const { return _shape; }
+ /**
+ * @brief Return tensor type
+ * @return Tensor type
+ */
+ const model::TypeInfo type(void) const { return _type; }
+ /**
+ * @brief Return tensor's offset in parent tensor
+ * @return Tensor offset
+ */
+ const neurun::util::Coordinates offset(void) const { return _offset; }
+
+private:
+ const model::OperandIndex _parent;
+ const model::Shape _shape;
+ const model::TypeInfo _type;
+ const neurun::util::Coordinates _offset;
+};
+
+} // compiler
+} // neurun
+
+#endif // __NEURUN_COMPILER_SUBTENSOR_INFO_H__
diff --git a/runtimes/neurun/core/include/exec/Execution.h b/runtimes/neurun/core/include/exec/Execution.h
new file mode 100644
index 000000000..a5b47f039
--- /dev/null
+++ b/runtimes/neurun/core/include/exec/Execution.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Execution.h
+ * @brief This file defines execution
+ */
+#ifndef __NEURUN_EXEC_EXECUTION_H__
+#define __NEURUN_EXEC_EXECUTION_H__
+
+#include "exec/IExecutor.h"
+#include "IODescription.h"
+
+#include <thread>
+
+namespace neurun
+{
+namespace exec
+{
+
+/**
+ * @brief Class to define execution instance to collect input/output information for inference
+ * and prepare executor run (TODO)
+ */
+class Execution
+{
+
+public:
+ /**
+ * @brief Construct a new Execution object
+ * @param[in] executor Model executor
+ */
+ Execution(const std::shared_ptr<IExecutor> &executor);
+
+public:
+ /**
+ * @brief Returns model object
+ * @return Model object
+ */
+ const model::Model &model() const { return _executor->model(); }
+ /**
+ * @brief Set input data's information
+ * @param[in] index Input index
+ * @param[in] buffer Input data's buffer pointer
+ * @param[in] length Input data's length
+ */
+ void setInput(const model::IOIndex &index, const void *buffer, size_t length);
+ /**
+ * @brief Set input data's information, especially to specify unknown dimensions on model
+ * build time.
+ * @param[in] index Input index
+ * @param[in] type Input data's type info
+ * @param[in] shape Input data's shape
+ * @param[in] buffer Input data's buffer pointer
+ * @param[in] length Input data's length
+ */
+ void setInput(const model::IOIndex &index, const model::TypeInfo &type, const model::Shape &shape,
+ const void *buffer, size_t length);
+ /**
+ * @brief Set output data's information
+ * @param[in] index Output index
+ * @param[in] buffer Output data's buffer pointer
+ * @param[in] length Output data's length
+ */
+ void setOutput(const model::IOIndex &index, void *buffer, size_t length);
+ /**
+ * @brief Set output data's information, especially to specify unknown dimensions on model
+ * build time.
+ * @param[in] index Output index
+ * @param[in] type Output data's type info
+ * @param[in] shape Output data's shape
+ * @param[in] buffer Output data's buffer pointer
+ * @param[in] length Output data's length
+ */
+ void setOutput(const model::IOIndex &index, const model::TypeInfo &type,
+ const model::Shape &shape, void *buffer, size_t length);
+ /**
+ * @brief Execution
+ * @note It should be called after setting input and output buffer
+ */
+ void execute();
+
+ /**
+ * @brief Start asynchronous execution
+ * @note It returns after execution thread is started
+ * It should be called after setting input and output buffer
+ */
+ void startExecute(void);
+
+ /**
+ * @brief Return when execution is finished
+ * @note It waits until execution is finished
+ */
+ void waitFinish(void);
+
+private:
+ const std::shared_ptr<IExecutor> _executor;
+ IODescription _io_desc;
+ std::unique_ptr<std::thread> _exec_thread;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_EXECUTION_H__
diff --git a/runtimes/neurun/core/include/exec/ExecutionObservers.h b/runtimes/neurun/core/include/exec/ExecutionObservers.h
new file mode 100644
index 000000000..61c8bf1b2
--- /dev/null
+++ b/runtimes/neurun/core/include/exec/ExecutionObservers.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_OBSREVERS_H__
+#define __NEURUN_EXEC_OBSREVERS_H__
+
+#include "exec/IFunction.h"
+#include "model/Operation.h"
+#include "backend/ExecTime.h"
+#include "util/ITimer.h"
+#include "IExecutor.h"
+
+namespace neurun
+{
+namespace exec
+{
+class IExecutionObserver
+{
+public:
+ /// @brief Invoked just before model (not individual operation) execution begins
+ virtual void handleBegin(IExecutor *) { return; }
+
+ virtual void handleBegin(IExecutor *, const model::Operation *, const backend::Backend *) = 0;
+ virtual void handleEnd(IExecutor *, const model::Operation *, const backend::Backend *) = 0;
+
+ /// @brief Invoked just after model (not individual operation) execution ends
+ virtual void handleEnd(IExecutor *) { return; }
+
+ virtual ~IExecutionObserver() = default;
+};
+
+class ProfileObserver : public IExecutionObserver
+{
+public:
+ explicit ProfileObserver(std::shared_ptr<backend::ExecTime> et) : _et(std::move(et)) {}
+ void handleBegin(IExecutor *, const model::Operation *, const backend::Backend *) override;
+ void handleEnd(IExecutor *, const model::Operation *, const backend::Backend *) override;
+
+ void handleEnd(IExecutor *) override { uploadExecTime(); }
+
+private:
+ void uploadExecTime() { _et->uploadOperationsExecTime(); }
+
+private:
+ std::unique_ptr<util::ITimer> _timer;
+ std::shared_ptr<backend::ExecTime> _et;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_OBSREVERS_H__
diff --git a/runtimes/neurun/core/include/exec/IExecutor.h b/runtimes/neurun/core/include/exec/IExecutor.h
new file mode 100644
index 000000000..eb4f5e302
--- /dev/null
+++ b/runtimes/neurun/core/include/exec/IExecutor.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file IExecutor.h
+ * @brief This file defines interface of Executor
+ */
+#ifndef __NEURUN_EXEC_I_EXECUTOR_H_
+#define __NEURUN_EXEC_I_EXECUTOR_H_
+
+#include "model/Model.h"
+#include "IFunction.h"
+#include "IODescription.h"
+#include "model/OperationIndexMap.h"
+
+namespace neurun
+{
+namespace exec
+{
+class IExecutionObserver;
+/**
+ * @brief Struct to define interface of Executor
+ */
+struct IExecutor
+{
+ /**
+ * @brief Construct a new IExecutor object
+ */
+ IExecutor() = default;
+ /**
+ * @brief Destroy the IExecutor object
+ */
+ virtual ~IExecutor() = default;
+
+ /**
+ * @brief Returns model object
+ *
+ * @return Model object
+ */
+ virtual const model::Model &model() = 0;
+
+ /**
+ * @brief Set an ordering on operations
+ * @param[in] ranks The table encoding the ordering
+ */
+ virtual void setIndexedRanks(std::shared_ptr<model::OperationIndexMap<int64_t>>) = 0;
+
+ /**
+ * @brief Start execution
+ * @param[in] desc Input and output description
+ * @note This method should be thread-safe
+ */
+ virtual void execute(const IODescription &desc) = 0;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_I_EXECUTOR_H_
diff --git a/runtimes/neurun/core/include/exec/IFunction.h b/runtimes/neurun/core/include/exec/IFunction.h
new file mode 100644
index 000000000..5cc29ea75
--- /dev/null
+++ b/runtimes/neurun/core/include/exec/IFunction.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_I_FUNCTION_H__
+#define __NEURUN_EXEC_I_FUNCTION_H__
+
+namespace neurun
+{
+namespace exec
+{
+
+class IFunction
+{
+public:
+ virtual ~IFunction() = default;
+ virtual void run() = 0;
+ virtual void runSync() = 0;
+ virtual void prepare() {}
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_I_FUNCTION_H__
diff --git a/runtimes/neurun/core/include/exec/IODescription.h b/runtimes/neurun/core/include/exec/IODescription.h
new file mode 100644
index 000000000..4809f34fe
--- /dev/null
+++ b/runtimes/neurun/core/include/exec/IODescription.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_IO_DESCRIPTION_H__
+#define __NEURUN_EXEC_IO_DESCRIPTION_H__
+
+#include <vector>
+
+#include "model/OperandInfo.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+struct InputDesc
+{
+ const model::OperandInfo info;
+ const void *buffer;
+ const size_t size;
+
+ InputDesc(void) = delete;
+ InputDesc(const model::OperandInfo &info, const void *buffer, const size_t size)
+ : info(info), buffer(buffer), size(size)
+ {
+ }
+};
+
+struct OutputDesc
+{
+ const model::OperandInfo info;
+ void *buffer;
+ const size_t size;
+
+ OutputDesc(void) = delete;
+ OutputDesc(const model::OperandInfo &info, void *buffer, const size_t size)
+ : info(info), buffer(buffer), size(size)
+ {
+ }
+};
+
+struct IODescription
+{
+ std::vector<std::unique_ptr<InputDesc>> inputs;
+ std::vector<std::unique_ptr<OutputDesc>> outputs;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_IO_DESCRIPTION_H__
diff --git a/runtimes/neurun/core/include/exec/NopFunction.h b/runtimes/neurun/core/include/exec/NopFunction.h
new file mode 100644
index 000000000..5cbd7e5ce
--- /dev/null
+++ b/runtimes/neurun/core/include/exec/NopFunction.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file NopFunction.h
+ * @brief This file defines NopFunction
+ */
+#ifndef __NEURUN_EXEC_NOP_FUNCTION_H_
+#define __NEURUN_EXEC_NOP_FUNCTION_H_
+
+#include "IFunction.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+/**
+ * @brief A derivative of IFunction tha does nothing
+ *
+ */
+class NopFunction : public IFunction
+{
+public:
+ NopFunction() = default;
+ void run() override
+ {
+ // DO NOTHING
+ }
+ void runSync() override
+ {
+ // this abstract method is used just for profiling and called for
+ // backend::acl_common::AclFunction
+ run();
+ }
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_NOP_FUNCTION_H_
diff --git a/runtimes/neurun/core/include/graph/BackendSet.h b/runtimes/neurun/core/include/graph/BackendSet.h
new file mode 100644
index 000000000..a3a508697
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/BackendSet.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_OPERAND_BACKEND_SET_H__
+#define __NEURUN_GRAPH_OPERAND_BACKEND_SET_H__
+
+#include "util/Set.h"
+
+namespace neurun
+{
+namespace backend
+{
+class Backend;
+} // namespace backend
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+
+using BackendSet = util::Set<const backend::Backend *>;
+
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_OPERAND_BACKEND_SET_H__
diff --git a/runtimes/neurun/core/include/graph/Graph.h b/runtimes/neurun/core/include/graph/Graph.h
new file mode 100644
index 000000000..b3e6d54ff
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/Graph.h
@@ -0,0 +1,204 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_GRAPH_H__
+#define __NEURUN_GRAPH_GRAPH_H__
+
+#include <functional>
+
+#include "model/Operation.h"
+#include "model/Model.h"
+#include "graph/LowerInfoMap.h"
+#include "model/Subgraph.h"
+#include "model/Subgraphs.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operand
+{
+class LowerInfo;
+} // namespace operand
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace compiler
+{
+class Linear;
+} // namespace linear
+} // namespace neurun
+
+namespace neurun
+{
+namespace compiler
+{
+class BackendResolver;
+} // namespace compiler
+} // namespace neurun
+
+namespace neurun
+{
+namespace backend
+{
+namespace custom
+{
+class KernelRegistry;
+} // namespace neurun
+} // namespace backend
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+
+class Graph
+{
+private:
+ enum class Phase
+ {
+ BUILDING,
+ MODEL
+ };
+
+public:
+ template <bool is_const> class Iterator
+ {
+ public:
+ using GraphRef = typename std::conditional<is_const, const Graph &, Graph &>::type;
+ using IndexRef = const model::OperationIndex &;
+ using NodeRef =
+ typename std::conditional<is_const, const model::Operation &, model::Operation &>::type;
+ using IterFn = std::function<void(IndexRef, NodeRef)>;
+
+ public:
+ virtual ~Iterator() = default;
+ virtual void iterate(GraphRef graph, const IterFn &fn) const = 0;
+ };
+
+ template <bool is_const = false> class DefaultIterator final : public Iterator<is_const>
+ {
+ public:
+ using GraphRef = typename Iterator<is_const>::GraphRef;
+ using IndexRef = typename Iterator<is_const>::IndexRef;
+ using NodeRef = typename Iterator<is_const>::NodeRef;
+ using IterFn = typename Iterator<is_const>::IterFn;
+
+ public:
+ void iterate(GraphRef graph, const IterFn &fn) const;
+ };
+ using DefaultConstIterator = DefaultIterator<true>;
+
+ template <bool is_const = false> class PostDfsIterator final : public Iterator<is_const>
+ {
+ public:
+ using GraphRef = typename Iterator<is_const>::GraphRef;
+ using IndexRef = typename Iterator<is_const>::IndexRef;
+ using NodeRef = typename Iterator<is_const>::NodeRef;
+ using IterFn = typename Iterator<is_const>::IterFn;
+
+ public:
+ void iterate(GraphRef graph, const IterFn &fn) const;
+ };
+ using PostDfsConstIterator = PostDfsIterator<true>;
+
+public:
+ Graph(void) = delete;
+ Graph(std::unique_ptr<model::Model> &&model);
+ ~Graph(void);
+
+ // Graph Building
+public:
+ model::OperandIndex addOperand(const model::Shape &shape, const model::TypeInfo &type);
+ model::OperationIndex addOperation(std::unique_ptr<model::Operation> &&node);
+ void setOperandValue(const model::OperandIndex &ind, std::unique_ptr<model::Data> &&data);
+ void addInput(const model::OperandIndex &ind);
+ void addOutput(const model::OperandIndex &ind);
+ void finishBuilding(void);
+ void lower(void);
+ void removeOperand(const model::OperandIndex &ind) { _model->operands.remove(ind); }
+ std::unique_ptr<compiler::Linear> linearize(void);
+ bool isBuildingPhase(void) const { return _phase == Phase::BUILDING; }
+ std::shared_ptr<const model::Model> shareModel() { return _model; }
+ std::unique_ptr<graph::LowerInfoMap> releaseLowerInfo() { return std::move(_lower_info_map); }
+ std::unique_ptr<model::Subgraphs> releaseSubgraphs() { return std::move(_subgraphs); }
+
+private:
+ void initializeUseDef();
+
+ // Custom operations support
+public:
+ void bindKernelRegistry(const std::shared_ptr<backend::custom::KernelRegistry> &registry)
+ {
+ _kernel_registry = registry;
+ }
+
+ const std::shared_ptr<backend::custom::KernelRegistry> &getKernelRegistry() const
+ {
+ return _kernel_registry;
+ }
+
+private:
+ std::shared_ptr<backend::custom::KernelRegistry> _kernel_registry;
+
+ // Accessors
+public:
+ const model::OperandIndexSequence &getInputs() const { return _model->inputs; }
+ model::OperandIndexSequence &getInputs() { return _model->inputs; }
+ const model::OperandIndexSequence &getOutputs() const { return _model->outputs; }
+ model::OperandIndexSequence &getOutputs() { return _model->outputs; }
+ const model::Operands &operands() const { return _model->operands; }
+ model::Operands &operands() { return _model->operands; } // TODO Remove this non-const accessor
+ const model::Operations &operations() const { return _model->operations; }
+ model::Operations &operations() { return _model->operations; }
+ const compiler::BackendResolver *backend_resolver() const { return _backend_resolver.get(); }
+
+private:
+ Phase _phase{Phase::BUILDING};
+ std::shared_ptr<model::Model> _model;
+
+ // For LOWERED phase
+public:
+ const operation::LowerInfo *getLowerInfo(const model::SubgraphIndex &subg_index) const;
+ void setLowerInfo(const model::SubgraphIndex &subg_index,
+ std::unique_ptr<operation::LowerInfo> &&lower_info);
+ const operand::LowerInfo *getLowerInfo(const model::OperandIndex &index) const;
+ operand::LowerInfo *getLowerInfo(const model::OperandIndex &index);
+ void setLowerInfo(const model::OperandIndex &index,
+ std::unique_ptr<operand::LowerInfo> &&lower_info);
+ model::Subgraphs &subgraphs()
+ {
+ assert(_subgraphs);
+ return *_subgraphs;
+ }
+ const model::Subgraphs *subgraphs() const { return _subgraphs.get(); }
+ void setBackendResolver(std::unique_ptr<compiler::BackendResolver> &&br);
+ std::unique_ptr<compiler::BackendResolver> releaseBackendResolver();
+
+private:
+ std::unique_ptr<compiler::BackendResolver> _backend_resolver;
+ std::unique_ptr<LowerInfoMap> _lower_info_map;
+ // Pass(for Perm) can accept only graph so that Graph has Subgraphs as a member
+ std::unique_ptr<model::Subgraphs> _subgraphs;
+};
+
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_GRAPH_H__
diff --git a/runtimes/neurun/core/include/graph/LowerInfoMap.h b/runtimes/neurun/core/include/graph/LowerInfoMap.h
new file mode 100644
index 000000000..5b755ead3
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/LowerInfoMap.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_LOWER_INFO_MAP_H__
+#define __NEURUN_GRAPH_LOWER_INFO_MAP_H__
+
+#include <memory>
+#include <unordered_map>
+
+#include "graph/operand/LowerInfo.h"
+#include "graph/operation/LowerInfo.h"
+#include "model/OperandIndexMap.h"
+#include "model/Index.h"
+
+namespace neurun
+{
+namespace graph
+{
+
+struct LowerInfoMap
+{
+ std::unordered_map<model::SubgraphIndex, std::unique_ptr<operation::LowerInfo>> operation;
+ model::OperandIndexMap<std::unique_ptr<operand::LowerInfo>> operand;
+};
+
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_LOWER_INFO_MAP_H__
diff --git a/runtimes/neurun/core/include/graph/operand/LowerInfo.h b/runtimes/neurun/core/include/graph/operand/LowerInfo.h
new file mode 100644
index 000000000..3558f6cc2
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/operand/LowerInfo.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_OPERAND_LOWER_INFO_H__
+#define __NEURUN_GRAPH_OPERAND_LOWER_INFO_H__
+
+#include <functional>
+#include <stdint.h>
+
+#include "graph/operand/PermuteFactor.h"
+#include "util/Set.h"
+
+namespace neurun
+{
+namespace backend
+{
+class Backend;
+} // namespace backend
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+namespace operand
+{
+using PermuteFactorSet = util::Set<PermuteFactor>;
+
+class LowerInfo
+{
+public:
+ class Shape4D
+ {
+ public:
+ Shape4D(uint32_t n, uint32_t h, uint32_t w, uint32_t c) : _n{n}, _h{h}, _w{w}, _c{c}
+ {
+ // DO NOTHING
+ }
+
+ public:
+ uint32_t n(void) const { return _n; }
+ uint32_t h(void) const { return _h; }
+ uint32_t w(void) const { return _w; }
+ uint32_t c(void) const { return _c; }
+
+ private:
+ uint32_t _n;
+ uint32_t _h;
+ uint32_t _w;
+ uint32_t _c;
+ };
+
+public:
+ LowerInfo(const Shape4D &shape) : _shape{shape}
+ {
+ // DO NOTHING
+ }
+
+public:
+ const Shape4D &shape(void) const { return _shape; }
+ const PermuteFactorSet &def_factors(void) const { return _def_factors; }
+ const PermuteFactorSet &use_factors(void) const { return _use_factors; }
+
+public:
+ void addDefPermuteFactor(const PermuteFactor &factor) { _def_factors.add(factor); }
+ void addUsePermuteFactor(const PermuteFactor &factor) { _use_factors.add(factor); }
+ void removeDefPermuteFactor(const PermuteFactor &factor) { _def_factors.remove(factor); }
+ void removeUsePermuteFactor(const PermuteFactor &factor) { _use_factors.remove(factor); }
+
+private:
+ Shape4D _shape;
+ PermuteFactorSet _def_factors;
+ PermuteFactorSet _use_factors;
+};
+
+} // namespace operand
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_OPERAND_LOWED_INFO_H__
diff --git a/runtimes/neurun/core/include/graph/operand/ParentInfo.h b/runtimes/neurun/core/include/graph/operand/ParentInfo.h
new file mode 100644
index 000000000..024925d90
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/operand/ParentInfo.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file ParentInfo.h
+ * @brief This file contains ParentInfo class and internal Coordinate4D class
+ * to represent subsumption between operand
+ */
+
+#ifndef __NEURUN_GRAPH_OPERAND_PARENT_INFO_H__
+#define __NEURUN_GRAPH_OPERAND_PARENT_INFO_H__
+
+#include <stdint.h>
+
+#include "model/Index.h"
+#include "util/Coordinates.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operand
+{
+
+using neurun::util::Coordinates;
+
+/**
+ * @brief Class to represent parent operand in child operand
+ */
+class ParentInfo
+{
+public:
+ /**
+ * @brief Construct a new ParentInfo object
+ * @param[in] parent Index of parent operand
+ * @param[in] coordinate Offset of child operand in parent operand
+ * @return
+ */
+ ParentInfo(const model::OperandIndex parent, const Coordinates &coordinate)
+ : _parent{parent}, _coordinate{coordinate}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Return parent index
+ * @return Parent index
+ */
+ model::OperandIndex parent(void) const { return _parent; }
+ /**
+ * @brief Retern offset in parent
+ * @return Offset
+ */
+ Coordinates offset(void) const { return _coordinate; }
+
+private:
+ model::OperandIndex _parent;
+ Coordinates _coordinate;
+};
+
+} // namespace operand
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_OPERAND_PARENT_INFO_H__
diff --git a/runtimes/neurun/core/include/graph/operand/PermuteFactor.h b/runtimes/neurun/core/include/graph/operand/PermuteFactor.h
new file mode 100644
index 000000000..480e95c15
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/operand/PermuteFactor.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file PermuteFactor.h
+ * @brief This file contains neurun::graph::operand::PermuteFactor class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NEURUN_GRAPH_OPERAND_PERMUTE_FACTOR_H__
+#define __NEURUN_GRAPH_OPERAND_PERMUTE_FACTOR_H__
+
+#include <functional>
+
+#include "model/Layout.h"
+
+namespace neurun
+{
+namespace backend
+{
+class Backend;
+} // namespace backend
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+namespace operand
+{
+
+/**
+ * @brief Class that has factors of permutation
+ */
+class PermuteFactor
+{
+public:
+ /**
+ * @brief Construct PermuteFactor object.
+ * @param backend The backend factor
+ * @param backend The layout factor
+ */
+ PermuteFactor(const backend::Backend *backend, model::Layout layout)
+ : _backend{backend}, _layout{layout}
+ {
+ // DO NOTHING
+ }
+ /**
+ * @brief Construct PermuteFactor object by copy semantics.
+ */
+ PermuteFactor(const PermuteFactor &f) : _backend{f._backend}, _layout{f._layout}
+ {
+ // DO NOTHING
+ }
+ /**
+ * @brief Construct PermuteFactor object by move semantics.
+ */
+ PermuteFactor(PermuteFactor &&) = default;
+
+public:
+ /**
+ * @brief Get backend
+ *
+ * @return Backend factor
+ */
+ const backend::Backend *backend() const { return _backend; }
+ /**
+ * @brief Get layout
+ *
+ * @return Layout factor
+ */
+ model::Layout layout() const { return _layout; }
+
+public:
+ /**
+ * @brief operator overloading function for `==`
+ *
+ * @return Whether two PermuteFactor are the same
+ */
+ bool operator==(const PermuteFactor &other) const
+ {
+ return _backend == other.backend() && _layout == other.layout();
+ }
+ /**
+ * @brief operator overloading function for `!=`
+ *
+ * @return Whether two PermuteFactor are differenct
+ */
+ bool operator!=(const PermuteFactor &other) const { return !(*this == other); }
+
+private:
+ const backend::Backend *_backend{nullptr};
+ model::Layout _layout{model::Layout::UNKNOWN};
+};
+
+} // namespace operand
+} // namespace graph
+} // namespace neurun
+
+namespace std
+{
+
+using PermuteFactor = ::neurun::graph::operand::PermuteFactor;
+
+/**
+ * @brief Structure that provides hash value of PermuteFactor
+ */
+template <> struct hash<PermuteFactor>
+{
+ size_t operator()(const PermuteFactor &factor) const noexcept
+ {
+ hash<const ::neurun::backend::Backend *> b_hash{};
+ hash<::neurun::model::Layout> l_hash{};
+ return b_hash(factor.backend()) ^ (l_hash(factor.layout()) << 1);
+ }
+};
+
+} // namespace std
+
+#endif // __NEURUN_GRAPH_OPERAND_PERMUTE_FACTOR_H__
diff --git a/runtimes/neurun/core/include/graph/operation/LowerInfo.h b/runtimes/neurun/core/include/graph/operation/LowerInfo.h
new file mode 100644
index 000000000..fb9f5206c
--- /dev/null
+++ b/runtimes/neurun/core/include/graph/operation/LowerInfo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_OPERATION_LOWER_INFO_H__
+#define __NEURUN_GRAPH_OPERATION_LOWER_INFO_H__
+
+#include <string>
+
+#include <graph/operand/PermuteFactor.h>
+
+namespace neurun
+{
+namespace backend
+{
+class Backend;
+} // namespace backend
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+namespace operation
+{
+
+class LowerInfo
+{
+public:
+ LowerInfo(const backend::Backend *backend, model::Layout layout);
+ const backend::Backend *backend() const { return _permute_factor.backend(); }
+ model::Layout layout() const { return _permute_factor.layout(); }
+
+private:
+ graph::operand::PermuteFactor _permute_factor;
+};
+
+} // namespace operation
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_OPERATION_LOWER_INFO_H__
diff --git a/runtimes/neurun/core/include/model/Data.h b/runtimes/neurun/core/include/model/Data.h
new file mode 100644
index 000000000..3316ad874
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Data.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_DATA_H__
+#define __NEURUN_MODEL_DATA_H__
+
+#include <algorithm>
+
+namespace neurun
+{
+namespace model
+{
+
+struct Data
+{
+ virtual ~Data() = default;
+
+ virtual size_t size(void) const = 0;
+ virtual const uint8_t *base(void) const = 0;
+};
+
+class CachedData final : public Data
+{
+public:
+ CachedData(const uint8_t *base, size_t size) : _base{new uint8_t[size]}, _size{size}
+ {
+ std::copy(base, base + size, _base);
+ }
+
+public:
+ ~CachedData() { delete[] _base; }
+
+public:
+ size_t size(void) const override { return _size; }
+ const uint8_t *base(void) const override { return _base; }
+
+private:
+ uint8_t *_base;
+ size_t _size;
+};
+
+class ExternalData final : public Data
+{
+public:
+ ExternalData(const uint8_t *base, size_t size) : _base{base}, _size{size}
+ {
+ // DO NOTHING
+ }
+
+public:
+ size_t size(void) const override { return _size; }
+ const uint8_t *base(void) const override { return _base; }
+
+private:
+ const uint8_t *_base;
+ const size_t _size;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_DATA_H__
diff --git a/runtimes/neurun/core/include/model/DataType.h b/runtimes/neurun/core/include/model/DataType.h
new file mode 100644
index 000000000..7b68dabea
--- /dev/null
+++ b/runtimes/neurun/core/include/model/DataType.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_DATATYPE_H__
+#define __NEURUN_MODEL_DATATYPE_H__
+
+#include <stdexcept>
+
+namespace neurun
+{
+namespace model
+{
+
+enum class DataType
+{
+ FLOAT32 = 0,
+ INT32 = 1,
+ UINT32 = 2,
+ QUANT8_ASYMM = 3,
+ BOOL8 = 4,
+};
+
+inline size_t sizeOfDataType(DataType data_type)
+{
+ switch (data_type)
+ {
+ case DataType::FLOAT32:
+ return sizeof(float);
+ case DataType::INT32:
+ return sizeof(int32_t);
+ case DataType::UINT32:
+ return sizeof(uint32_t);
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ return sizeof(uint8_t);
+ default:
+ throw std::runtime_error{"Unsupported type size"};
+ }
+}
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_DATATYPE_H__
diff --git a/runtimes/neurun/core/include/model/Index.h b/runtimes/neurun/core/include/model/Index.h
new file mode 100644
index 000000000..e4218d51d
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Index.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERAND_INDEX_H__
+#define __NEURUN_MODEL_OPERAND_INDEX_H__
+
+#include "util/Index.h"
+
+namespace neurun
+{
+namespace model
+{
+
+struct OperationIndexTag;
+using OperationIndex = ::neurun::util::Index<uint32_t, OperationIndexTag>;
+
+struct OperandIndexTag;
+using OperandIndex = ::neurun::util::Index<uint32_t, OperandIndexTag>;
+
+struct IOIndexTag;
+using IOIndex = ::neurun::util::Index<uint32_t, IOIndexTag>;
+
+struct SubgraphIndexTag;
+using SubgraphIndex = ::neurun::util::Index<uint32_t, SubgraphIndexTag>;
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_INDEX_H__
diff --git a/runtimes/neurun/core/include/model/InternalType.h b/runtimes/neurun/core/include/model/InternalType.h
new file mode 100644
index 000000000..fccf2fe04
--- /dev/null
+++ b/runtimes/neurun/core/include/model/InternalType.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_INTERNAL_TYPE_H__
+#define __NEURUN_MODEL_INTERNAL_TYPE_H__
+
+#include <cstdint>
+
+namespace neurun
+{
+namespace model
+{
+
+enum class Activation
+{
+ NONE = 0,
+ RELU = 1,
+ RELU1 = 2,
+ RELU6 = 3,
+ TANH = 4,
+ SIGMOID = 5
+};
+
+enum class PaddingType
+{
+ EXPLICIT = 0,
+ SAME = 1,
+ VALID = 2
+};
+
+struct ExplicitPadding
+{
+ uint32_t left;
+ uint32_t right;
+ uint32_t top;
+ uint32_t bottom;
+};
+
+// TODO Resolve explicit padding param at frontend and save in value field
+struct Padding
+{
+ PaddingType type;
+ ExplicitPadding param;
+};
+
+struct Stride
+{
+ uint32_t vertical;
+ uint32_t horizontal;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_INTERNAL_TYPE_H__
diff --git a/runtimes/neurun/core/include/model/Layout.h b/runtimes/neurun/core/include/model/Layout.h
new file mode 100644
index 000000000..db46f42de
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Layout.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_LAYOUT_H__
+#define __NEURUN_MODEL_LAYOUT_H__
+
+#include <functional>
+#include <string>
+
+namespace neurun
+{
+namespace model
+{
+
+enum class Layout
+{
+ UNKNOWN = 0,
+ NHWC,
+ NCHW
+};
+
+inline std::string to_string(model::Layout layout)
+{
+ switch (layout)
+ {
+ case Layout::NHWC:
+ return std::string{"NHWC"};
+ case model::Layout::NCHW:
+ return std::string{"NCHW"};
+ case model::Layout::UNKNOWN:
+ return std::string{"UNKNOWN"};
+ default:
+ throw std::runtime_error("WRONG LAYOUT");
+ }
+}
+
+} // namespace model
+} // namespace neurun
+
+namespace std
+{
+
+template <> struct hash<::neurun::model::Layout>
+{
+ size_t operator()(::neurun::model::Layout value) const noexcept
+ {
+ using type = typename std::underlying_type<::neurun::model::Layout>::type;
+ return hash<type>()(static_cast<type>(value));
+ }
+};
+
+} // namespace std
+
+#endif // __NEURUN_MODEL_LAYOUT_H__
diff --git a/runtimes/neurun/core/include/model/Model.h b/runtimes/neurun/core/include/model/Model.h
new file mode 100644
index 000000000..365bef198
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Model.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_MODEL_H__
+#define __NEURUN_MODEL_MODEL_H__
+
+#include "model/Operations.h"
+#include "model/OperandIndexSequence.h"
+#include "model/Operands.h"
+
+namespace neurun
+{
+namespace model
+{
+
+struct Model
+{
+ model::Operations operations;
+ model::Operands operands;
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_MODEL_H__
diff --git a/runtimes/neurun/core/include/model/Operand.h b/runtimes/neurun/core/include/model/Operand.h
new file mode 100644
index 000000000..6cfe40cb9
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Operand.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERAND_H__
+#define __NEURUN_MODEL_OPERAND_H__
+
+#include <cassert>
+#include <cstdint>
+#include <cpp14/memory.h>
+#include <algorithm>
+
+#include "Data.h"
+#include "DataType.h"
+#include "OperandInfo.h"
+#include "graph/operand/ParentInfo.h" // TODO Remove this dependency
+#include "model/OperationIndexList.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class Operand
+{
+public:
+ explicit Operand(const Shape &shape, const TypeInfo &type) : _info{shape, type}
+ {
+ // DO NOTHING
+ }
+
+public:
+ const Shape &shape(void) const { return _info.shape(); }
+ const TypeInfo &typeInfo(void) const { return _info.typeInfo(); }
+ Layout layout() const { return _info.layout(); }
+ const OperandInfo &info(void) const { return _info; }
+ size_t operandSize(void) const;
+
+ const OperationIndexList &getUses() const { return _uses; }
+ const OperationIndexList &getDef() const { return _def; }
+ void appendUse(const OperationIndex &idx);
+ void removeUse(const OperationIndex &idx);
+ void appendDef(const OperationIndex &idx);
+ void removeDef(const OperationIndex &idx);
+
+public:
+ void type(const DataType &type) { _info.type(type); };
+
+public:
+ void data(std::unique_ptr<Data> &&data) { _data = std::move(data); }
+ const Data &data(void) const
+ {
+ assert(_data);
+ return *_data;
+ }
+
+ /**
+ * @brief Get true if Operand has data, otherwise @c false
+ a @return @c true if Operand has data, otherwise @c false
+ */
+ bool isConstant(void) const { return _data != nullptr; }
+
+public:
+ template <typename T, typename... Args> void data(Args &&... args)
+ {
+ data(nnfw::cpp14::make_unique<T>(std::forward<Args>(args)...));
+ }
+
+public:
+ template <typename T> T asScalar(void) const
+ {
+ assert((shape().rank() == 0) || ((shape().rank() == 1) && (shape().dim(0) == 1)));
+ assert(_data != nullptr);
+ assert((_data->base() != nullptr) && (_data->size() == sizeof(T)));
+
+ return *(reinterpret_cast<const T *>(_data->base()));
+ }
+
+public:
+ /**
+ * @brief Set parent information
+ * @param[in] parent_info Parent information
+ */
+ void parent_info(std::unique_ptr<graph::operand::ParentInfo> &&parent_info);
+ /**
+ * @brief Return parent information pointer as constant
+ * @return Parent information pointer
+ */
+ const graph::operand::ParentInfo *parent_info() const;
+ /**
+ * @brief Return parent information pointer
+ * @return Perent information pointer
+ */
+ graph::operand::ParentInfo *parent_info();
+
+private:
+ OperandInfo _info;
+ std::unique_ptr<Data> _data;
+
+ OperationIndexList _uses;
+ OperationIndexList _def; // size is 0 (constant) or 1 (from def operation)
+
+ std::unique_ptr<graph::operand::ParentInfo> _parent_info;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_H__
diff --git a/runtimes/neurun/core/include/model/OperandConstraint.h b/runtimes/neurun/core/include/model/OperandConstraint.h
new file mode 100644
index 000000000..c3145d20d
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperandConstraint.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERAND_CONSTRAINT_H__
+#define __NEURUN_MODEL_OPERAND_CONSTRAINT_H__
+
+#include <stdint.h>
+#include <limits>
+#include <set>
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class OperandConstraint
+{
+private:
+ static const uint32_t INF = std::numeric_limits<uint32_t>::max();
+
+public:
+ static OperandConstraint createAny() { return OperandConstraint{0u, INF}; }
+ static OperandConstraint createExact(uint32_t exact) { return OperandConstraint{exact, exact}; }
+ static OperandConstraint createAtMost(uint32_t end) { return OperandConstraint{0u, end}; }
+ static OperandConstraint createAtLeast(uint32_t begin) { return OperandConstraint{begin, INF}; }
+ static OperandConstraint createInRange(uint32_t begin, uint32_t end)
+ {
+ return OperandConstraint{begin, end};
+ }
+
+private:
+ OperandConstraint(uint32_t begin, uint32_t end) : _begin{begin}, _end{end} {}
+
+public:
+ bool check(uint32_t ind) const { return _begin <= ind && ind <= _end; }
+
+private:
+ uint32_t _begin;
+ uint32_t _end;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_CONSTRAINT_H__
diff --git a/runtimes/neurun/core/include/model/OperandIndexMap.h b/runtimes/neurun/core/include/model/OperandIndexMap.h
new file mode 100644
index 000000000..c3492d4d0
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperandIndexMap.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERAND_INDEX_MAP_H__
+#define __NEURUN_MODEL_OPERAND_INDEX_MAP_H__
+
+#include <unordered_map>
+
+#include "Index.h"
+
+namespace neurun
+{
+namespace model
+{
+
+template <typename T> using OperandIndexMap = std::unordered_map<model::OperandIndex, T>;
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_INDEX_MAP_H__
diff --git a/runtimes/neurun/core/include/model/OperandIndexSequence.h b/runtimes/neurun/core/include/model/OperandIndexSequence.h
new file mode 100644
index 000000000..2cf060df2
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperandIndexSequence.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERAND_INDEX_SEQUENCE_H__
+#define __NEURUN_MODEL_OPERAND_INDEX_SEQUENCE_H__
+
+#include <initializer_list>
+#include <vector>
+
+#include "Index.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class OperandIndexSequence
+{
+public:
+ OperandIndexSequence(void) = default;
+ OperandIndexSequence(std::initializer_list<OperandIndex> list);
+ OperandIndexSequence(std::initializer_list<int32_t> list);
+ OperandIndexSequence(std::initializer_list<uint32_t> list);
+
+public:
+ void append(const OperandIndex &index) { _set.emplace_back(index); }
+
+public:
+ uint32_t size() const { return static_cast<uint32_t>(_set.size()); }
+ const OperandIndex &at(IOIndex set_index) const { return _set.at(set_index.value()); }
+ const OperandIndex &at(uint32_t index) const { return _set.at(index); }
+ bool contains(const OperandIndex &index) const;
+ void replace(const OperandIndex &from, const OperandIndex &to);
+
+public:
+ std::vector<OperandIndex>::const_iterator begin(void) const { return _set.begin(); }
+ std::vector<OperandIndex>::const_iterator end(void) const { return _set.end(); }
+
+private:
+ std::vector<OperandIndex> _set;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_INDEX_SET_H__
diff --git a/runtimes/neurun/core/include/model/OperandInfo.h b/runtimes/neurun/core/include/model/OperandInfo.h
new file mode 100644
index 000000000..036306e76
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperandInfo.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file OperandInfo.h
+ * @brief This file contains OperandInfo class
+ */
+#ifndef __NEURUN_MODEL_OPERAND_INFO_H__
+#define __NEURUN_MODEL_OPERAND_INFO_H__
+
+#include "Shape.h"
+#include "TypeInfo.h"
+#include "Layout.h"
+
+namespace neurun
+{
+namespace model
+{
+
+/**
+ * @brief Class to save tensor's shape and type
+ */
+class OperandInfo
+{
+public:
+ /**
+ * @brief Construct a new OperandInfo object (deleted)
+ */
+ OperandInfo() = delete;
+ /**
+ * @brief Construct a new OperandInfo object
+ * @param[in] shape Tensor shape
+ * @param[in] typeInfo Tensor data type
+ */
+ OperandInfo(const Shape &shape, const TypeInfo &typeInfo, Layout layout = Layout::NHWC)
+ : _shape(shape), _typeInfo(typeInfo), _layout(layout)
+ {
+ // DO NOTHING
+ }
+ /**
+ * @brief Construct a new OperandInfo object
+ * @param[in] origin info for copy
+ */
+ OperandInfo(const OperandInfo &origin)
+ : _shape(origin.shape()), _typeInfo(origin.typeInfo()), _layout(origin.layout())
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Return tensor shape
+ * @return Tensor shape
+ */
+ const Shape &shape() const { return _shape; }
+ /**
+ * @brief Return tensor data type info
+ * @return Tensor data type
+ */
+ const TypeInfo &typeInfo() const { return _typeInfo; }
+ /**
+ * @brief Return operand shape layout in model
+ * @return Tensor shape layout
+ */
+ Layout layout() const { return _layout; }
+ /**
+ * @brief Set tensor data type
+ */
+ void type(const DataType &type) { _typeInfo.type(type); }
+ /**
+ * @brief Return size of tensor (bytes)
+ * @return Tensor size
+ */
+ size_t total_size() const { return _shape.num_elements() * sizeOfDataType(_typeInfo.type()); }
+
+private:
+ Shape _shape;
+ TypeInfo _typeInfo;
+ Layout _layout;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_INFO_H__
diff --git a/runtimes/neurun/core/include/model/Operands.h b/runtimes/neurun/core/include/model/Operands.h
new file mode 100644
index 000000000..517d2ff2b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Operands.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERANDS_H__
+#define __NEURUN_MODEL_OPERANDS_H__
+
+#include <memory>
+#include <unordered_map>
+
+#include "Operand.h"
+#include "Index.h"
+#include "util/ObjectManager.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class Operands : public util::ObjectManager<OperandIndex, Operand>
+{
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERAND_SET_H__
diff --git a/runtimes/neurun/core/include/model/Operation.h b/runtimes/neurun/core/include/model/Operation.h
new file mode 100644
index 000000000..029684dbd
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Operation.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_H__
+#define __NEURUN_MODEL_OPERATION_H__
+
+#include <memory>
+
+#include "model/Operand.h"
+#include "model/OperandIndexSequence.h"
+#include "model/OperandConstraint.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operation
+{
+class LowerInfo;
+} // namespace operation
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace model
+{
+struct OperationVisitor;
+} // namespace model
+} // namespace neurun
+
+namespace neurun
+{
+namespace model
+{
+
+using OperandConstraint = ::neurun::model::operation::OperandConstraint;
+
+class Operation
+{
+public:
+ Operation(OperandConstraint input_constr, const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs);
+ explicit Operation(OperandConstraint input_constr);
+
+ Operation(const Operation &) = delete;
+ Operation(Operation &&) = default;
+ Operation &operator=(const Operation &) = delete;
+ Operation &operator=(Operation &&) = default;
+
+ virtual ~Operation();
+
+public:
+ virtual void accept(OperationVisitor &v) const = 0;
+ virtual std::string getName() const = 0;
+
+public:
+ void replaceInput(const OperandIndex &from, const OperandIndex &to);
+ void replaceOutput(const OperandIndex &from, const OperandIndex &to);
+ const OperandIndexSequence &getInputs() const { return _inputs; }
+ const OperandIndexSequence &getOutputs() const { return _outputs; }
+ // It's for only input/output tensors but const data.
+ void setInputs(const OperandIndexSequence &indexes);
+ void setOutputs(const OperandIndexSequence &indexes);
+
+private:
+ OperandConstraint _input_constr;
+ OperandIndexSequence _inputs;
+ OperandIndexSequence _outputs;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_H__
diff --git a/runtimes/neurun/core/include/model/OperationIndexList.h b/runtimes/neurun/core/include/model/OperationIndexList.h
new file mode 100644
index 000000000..924af7925
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperationIndexList.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_INDEX_LIST_H__
+#define __NEURUN_MODEL_OPERATION_INDEX_LIST_H__
+
+#include <initializer_list>
+#include <list>
+
+#include "model/Index.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class OperationIndexList
+{
+public:
+ OperationIndexList(void) = default;
+ OperationIndexList(std::initializer_list<OperationIndex> list);
+
+public:
+ void append(const OperationIndex &index) { _list.push_back(index); }
+ void remove(const OperationIndex &index) { _list.remove(index); }
+
+public:
+ uint32_t size() const { return static_cast<uint32_t>(_list.size()); }
+ const std::list<OperationIndex> &list() const { return _list; }
+ bool contains(const OperationIndex &index) const;
+
+private:
+ std::list<OperationIndex> _list;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_INDEX_LIST_H__
diff --git a/runtimes/neurun/core/include/model/OperationIndexMap.h b/runtimes/neurun/core/include/model/OperationIndexMap.h
new file mode 100644
index 000000000..e0399ef3c
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperationIndexMap.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_INDEX_MAP_H__
+#define __NEURUN_MODEL_OPERATION_INDEX_MAP_H__
+
+#include <unordered_map>
+
+#include "Index.h"
+
+namespace neurun
+{
+namespace model
+{
+
+template <typename T> using OperationIndexMap = std::unordered_map<model::OperationIndex, T>;
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_INDEX_MAP_H__
diff --git a/runtimes/neurun/core/include/model/OperationVisitor.h b/runtimes/neurun/core/include/model/OperationVisitor.h
new file mode 100644
index 000000000..200e62dd6
--- /dev/null
+++ b/runtimes/neurun/core/include/model/OperationVisitor.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_VISITOR_H__
+#define __NEURUN_MODEL_OPERATION_VISITOR_H__
+
+#include "Operations.Include.h"
+#include "Subgraph.h"
+
+namespace neurun
+{
+namespace model
+{
+
+struct OperationVisitor
+{
+ virtual ~OperationVisitor() = default;
+
+#define OP(InternalName, IsNnApi) \
+ virtual void visit(const operation::InternalName &) {}
+#include "model/Operations.lst"
+#undef OP
+
+ // This Subgraph node should be handled specially so that
+ // Op.lst doesn't have Subgraph
+ virtual void visit(const Subgraph &subgraph)
+ {
+ for (const auto &e : subgraph.operations())
+ {
+ e.node->accept(*this);
+ }
+ }
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_VISITOR_H__
diff --git a/runtimes/neurun/core/include/model/Operations.Include.h b/runtimes/neurun/core/include/model/Operations.Include.h
new file mode 100644
index 000000000..e6790c93b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Operations.Include.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This file has no ifdef guard intentionally
+
+#include "operation/Conv2DNode.h"
+#include "operation/MaxPool2DNode.h"
+#include "operation/AvgPool2DNode.h"
+#include "operation/ConcatNode.h"
+#include "operation/ReshapeNode.h"
+#include "operation/FullyConnectedNode.h"
+#include "operation/SoftmaxNode.h"
+#include "operation/TransposeNode.h"
+#include "operation/PermuteNode.h"
+#include "operation/ReduceSumNode.h"
+#include "operation/AddNode.h"
+#include "operation/SubNode.h"
+#include "operation/DepthwiseConv2DNode.h"
+#include "operation/StridedSliceNode.h"
+#include "operation/MulNode.h"
+#include "operation/SqueezeNode.h"
+#include "operation/TanhNode.h"
+#include "operation/LogisticNode.h"
+#include "operation/CastNode.h"
+#include "operation/DivNode.h"
+#include "operation/ExpNode.h"
+#include "operation/ReduceMaxNode.h"
+#include "operation/ComparisonNode.h"
+#include "operation/LogicalAndNode.h"
+#include "operation/LogicalOrNode.h"
+#include "operation/LogicalNotNode.h"
+#include "operation/LSTMNode.h"
+#include "operation/RSQRTNode.h"
+#include "operation/ReLUNode.h"
+#include "operation/ResizeBilinearNode.h"
+#include "operation/ReLU1Node.h"
+#include "operation/ReLU6Node.h"
+#include "operation/RNNNode.h"
+#include "operation/FloorNode.h"
+#include "operation/SpaceToDepthNode.h"
+#include "operation/L2Pool2DNode.h"
+#include "operation/EmbeddingLookupNode.h"
+#include "operation/L2NormalizationNode.h"
+#include "operation/HashtableLookupNode.h"
+#include "operation/PReLUNode.h"
+#include "operation/TransposeConvNode.h"
+#include "operation/SQRTNode.h"
+#include "operation/SquaredDifferenceNode.h"
+#include "operation/TopKV2Node.h"
+#include "operation/GatherNode.h"
+#include "operation/NegNode.h"
+#include "operation/AbsNode.h"
+#include "operation/ArgMaxNode.h"
+#include "operation/DequantizeNode.h"
+#include "operation/MeanNode.h"
+#include "operation/LocalResponseNormalizationNode.h"
+#include "operation/DepthToSpaceNode.h"
+#include "operation/ReduceMinNode.h"
+#include "operation/SplitNode.h"
+#include "operation/UnpackNode.h"
+#include "operation/PadNode.h"
+#include "operation/CustomNode.h"
diff --git a/runtimes/neurun/core/include/model/Operations.h b/runtimes/neurun/core/include/model/Operations.h
new file mode 100644
index 000000000..4a1b2ca8d
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Operations.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATIONS_H__
+#define __NEURUN_MODEL_OPERATIONS_H__
+
+#include "model/Index.h"
+#include "model/Operation.h"
+#include "util/ObjectManager.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class Operations : public util::ObjectManager<OperationIndex, Operation>
+{
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_MANAGER_H__
diff --git a/runtimes/neurun/core/include/model/Operations.lst b/runtimes/neurun/core/include/model/Operations.lst
new file mode 100644
index 000000000..ef645dd35
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Operations.lst
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OP
+#error Define OP before including this file
+#endif
+
+// NOTE The relation between "Internal Name" and "NN API" is "1 : N".
+
+// Internal Name | NN API?
+OP(AddNode , true)
+OP(SubNode , true)
+OP(CastNode , true)
+OP(Conv2DNode , true)
+OP(DepthwiseConv2DNode , true)
+OP(AvgPool2DNode , true)
+OP(MaxPool2DNode , true)
+OP(ConcatNode , true)
+OP(FullyConnectedNode , true)
+OP(ReduceSumNode , true)
+OP(ReshapeNode , true)
+OP(MulNode , true)
+OP(SoftmaxNode , true)
+OP(SqueezeNode , true)
+OP(StridedSliceNode , true)
+OP(TanhNode , true)
+OP(LogisticNode , true)
+OP(DivNode , true)
+OP(TransposeNode , true)
+OP(ExpNode , true)
+OP(ReduceMaxNode , true)
+OP(ComparisonNode , true)
+OP(LogicalAndNode , true)
+OP(LogicalOrNode , true)
+OP(LogicalNotNode , true)
+OP(LSTMNode , true)
+OP(RSQRTNode , true)
+OP(ReLUNode , true)
+OP(ResizeBilinearNode , true)
+OP(ReLU1Node , true)
+OP(ReLU6Node , true)
+OP(RNNNode , true)
+OP(FloorNode , true)
+OP(SpaceToDepthNode , true)
+OP(L2Pool2DNode , true)
+OP(EmbeddingLookupNode , true)
+OP(L2NormalizationNode , true)
+OP(HashtableLookupNode , true)
+OP(PReLUNode , true)
+OP(TransposeConvNode , true)
+OP(SQRTNode , true)
+OP(SquaredDifferenceNode , true)
+OP(TopKV2Node , true)
+OP(GatherNode , true)
+OP(NegNode , true)
+OP(AbsNode , true)
+OP(ArgMaxNode , true)
+OP(DequantizeNode , true)
+OP(MeanNode , true)
+OP(LocalResponseNormalizationNode , true)
+OP(DepthToSpaceNode , true)
+OP(ReduceMinNode , true)
+OP(SplitNode , true)
+OP(UnpackNode , true)
+OP(PadNode , true)
+OP(CustomNode , true)
+OP(PermuteNode , false)
+
diff --git a/runtimes/neurun/core/include/model/Shape.h b/runtimes/neurun/core/include/model/Shape.h
new file mode 100644
index 000000000..c8d986633
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Shape.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_SHAPE_H__
+#define __NEURUN_MODEL_SHAPE_H__
+
+#include "Layout.h"
+#include "misc/feature/Shape.h"
+
+#include <cstdint>
+#include <vector>
+
+namespace neurun
+{
+namespace model
+{
+
+// TODO Remove this dependency.
+using FeatureShape = nnfw::misc::feature::Shape;
+
+struct Shape
+{
+public:
+ Shape() = default;
+
+ explicit Shape(int rank) : _dimensions(rank) {}
+
+ Shape(std::initializer_list<int32_t> dimensions) : _dimensions(dimensions) {}
+
+ int rank() const { return _dimensions.size(); }
+
+ const std::vector<int32_t> &dims() const { return _dimensions; }
+
+ int32_t dim(int i) const { return _dimensions.at(i); }
+
+ int32_t &dim(int i) { return _dimensions.at(i); }
+
+ uint64_t num_elements() const;
+
+public:
+ FeatureShape asFeature(Layout layout) const;
+
+ /**
+ * @brief Add dimension to the beginning
+ * @param[in] d dimension to add to the beginning
+ */
+ void prepend(int32_t d) { _dimensions.insert(_dimensions.cbegin(), d); }
+
+ /**
+ * @brief Add dimension to the end
+ * @param[in] d dimension to add to the end
+ */
+ void append(int32_t d) { _dimensions.emplace_back(d); }
+
+ /**
+ * @brief Extend rank of Shape object for operand with param.
+ * @param[in] to_rank The rank value to be extended to
+ */
+ void extendRank(int to_rank);
+
+private:
+ std::vector<int32_t> _dimensions;
+};
+
+inline bool operator==(const Shape &lhs, const Shape &rhs) { return lhs.dims() == rhs.dims(); }
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_SHAPE_H__
diff --git a/runtimes/neurun/core/include/model/Subgraph.h b/runtimes/neurun/core/include/model/Subgraph.h
new file mode 100644
index 000000000..70abf6a1c
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Subgraph.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_SUBGRAPH_H__
+#define __NEURUN_MODEL_SUBGRAPH_H__
+
+#include <vector>
+#include <string>
+#include <memory>
+
+#include "Layout.h"
+#include "Index.h"
+#include "Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+
+// To support ValueSwappable, Element doesn't have members which are classes
+// as value(or can have members which are classes as value and the classes
+// support Swappable)
+struct Element
+{
+ OperationIndex index;
+ const Operation *node;
+
+ Element(const OperationIndex *i, const Operation *n) : index{*i}, node{n}
+ {
+ // DO NOTHING
+ }
+};
+
+class Subgraph : public Operation
+{
+public:
+ explicit Subgraph(model::Layout layout);
+ Subgraph(const Subgraph &) = delete;
+
+public:
+ void accept(OperationVisitor &v) const override;
+
+ virtual std::string getName(void) const override { return "Subgraph"; }
+
+public:
+ void appendOperation(const OperationIndex &index, const Operation &node)
+ {
+ _operations.emplace_back(&index, &node);
+ }
+
+ std::vector<Element> &operations(void) { return _operations; }
+
+ const std::vector<Element> &operations(void) const { return _operations; }
+
+ uint32_t size(void) const { return _operations.size(); }
+
+ // TODO: Impl Dumper instead of this method
+ std::string getStr(void) const;
+
+public:
+ Layout getLayout() const { return _layout; }
+
+private:
+ std::vector<Element> _operations;
+
+private:
+ Layout _layout;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_SUBGRAPH_H__
diff --git a/runtimes/neurun/core/include/model/Subgraphs.h b/runtimes/neurun/core/include/model/Subgraphs.h
new file mode 100644
index 000000000..13bc549be
--- /dev/null
+++ b/runtimes/neurun/core/include/model/Subgraphs.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_SUBGRAPHS_H__
+#define __NEURUN_MODEL_SUBGRAPHS_H__
+
+#include "model/Index.h"
+#include "model/Subgraph.h"
+#include "util/ObjectManager.h"
+
+namespace neurun
+{
+namespace model
+{
+
+/**
+ * @brief Class that manages Subgraph objects
+ */
+class Subgraphs : public util::ObjectManager<SubgraphIndex, Subgraph>
+{
+public:
+ /**
+ * @brief Create an instance of Subgraph with given op and push it to objects
+ *
+ * @param[in] op_idx Operation index that is emplaced
+ * @param[in] op Operation that is emplaced
+ * @param[in] layout Subgraph's layout
+ * @return SubgraphIndex
+ */
+ SubgraphIndex emplace(const OperationIndex &op_index, const Operation &op, Layout layout);
+
+ /**
+ * @brief Push an instance of Subgraph to objects
+ *
+ * @param[in] subg An instance of Subgraph
+ * @return SubgraphIndex
+ */
+ SubgraphIndex emplace(std::unique_ptr<Subgraph> &&subg);
+
+ /**
+ * @brief Check if an operation does exist in any subgraphs
+ *
+ * @param operation_index Operation index to find
+ * @return true If such operation exists in any subgraphs otherwise false
+ */
+ bool containsOperation(const OperationIndex &operation_index) const;
+ /**
+ * @brief Find an operation from all subgraphs
+ *
+ * @param operation_index Operation index to find
+ * @return SubgraphIndex Index of Subgraph that contains given operation index
+ */
+ SubgraphIndex getOperation(const OperationIndex &operation_index) const;
+ /**
+ * @brief Dump subgraphs
+ *
+ * @param msg Message that will be displayed
+ */
+ void dump(const std::string &msg) const;
+
+private:
+ SubgraphIndex findOperation(const OperationIndex &operation_index) const;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_SUBGRAPHS_H__
diff --git a/runtimes/neurun/core/include/model/TypeInfo.h b/runtimes/neurun/core/include/model/TypeInfo.h
new file mode 100644
index 000000000..4d6a5458b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/TypeInfo.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_TYPEINFO_H__
+#define __NEURUN_MODEL_TYPEINFO_H__
+
+#include <cstdint>
+
+#include "DataType.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class TypeInfo
+{
+public:
+ TypeInfo() = delete;
+
+ explicit TypeInfo(DataType type, float scale = 0, int32_t offset = 0)
+ : _type(type), _scale(scale), _offset(offset)
+ {
+ }
+
+public:
+ DataType type() const { return _type; }
+ float scale() const { return _scale; }
+ int32_t offset() const { return _offset; }
+
+public:
+ void type(const DataType &type) { _type = type; }
+
+private:
+ DataType _type;
+ float _scale;
+ int32_t _offset;
+};
+
+bool operator==(const TypeInfo &lhs, const TypeInfo &rhs);
+bool operator!=(const TypeInfo &lhs, const TypeInfo &rhs);
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_TYPEINFO_H__
diff --git a/runtimes/neurun/core/include/model/operation/AbsNode.h b/runtimes/neurun/core/include/model/operation/AbsNode.h
new file mode 100644
index 000000000..a081d05ba
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/AbsNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_ABS_NODE_H__
+#define __NEURUN_MODEL_OPERATION_ABS_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class AbsNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ AbsNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Abs"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_ABS_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/AddNode.h b/runtimes/neurun/core/include/model/operation/AddNode.h
new file mode 100644
index 000000000..4310cb231
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/AddNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_ADD_NODE_H__
+#define __NEURUN_MODEL_OPERATION_ADD_NODE_H__
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class AddNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LHS = 0,
+ RHS
+ };
+
+ struct Param
+ {
+ Activation activation;
+ };
+
+public:
+ AddNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Add"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_ADD_H__
diff --git a/runtimes/neurun/core/include/model/operation/ArgMaxNode.h b/runtimes/neurun/core/include/model/operation/ArgMaxNode.h
new file mode 100644
index 000000000..1123509ae
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ArgMaxNode.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_ARG_MAX_NODE_H__
+#define __NEURUN_MODEL_OPERATION_ARG_MAX_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ArgMaxNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ };
+
+public:
+ ArgMaxNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ArgMax"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_ARG_MAX_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/AvgPool2DNode.h b/runtimes/neurun/core/include/model/operation/AvgPool2DNode.h
new file mode 100644
index 000000000..eb219308e
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/AvgPool2DNode.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_AVGPOOL2D_NODE_H__
+#define __NEURUN_MODEL_OPERATION_AVGPOOL2D_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class AvgPool2DNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ uint32_t kh;
+ uint32_t kw;
+
+ Stride stride;
+ Padding padding;
+ Activation activation;
+ };
+
+public:
+ AvgPool2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "AvgPool2D"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_AVGPOOL2D_H__
diff --git a/runtimes/neurun/core/include/model/operation/CastNode.h b/runtimes/neurun/core/include/model/operation/CastNode.h
new file mode 100644
index 000000000..7d774dfca
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/CastNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_CAST_NODE_H__
+#define __NEURUN_MODEL_OPERATION_CAST_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class CastNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ CastNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Cast"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_CAST_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ComparisonNode.h b/runtimes/neurun/core/include/model/operation/ComparisonNode.h
new file mode 100644
index 000000000..b8f3074a4
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ComparisonNode.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_COMPARISON_NODE_H__
+#define __NEURUN_MODEL_OPERATION_COMPARISON_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ComparisonNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT0 = 0,
+ INPUT1
+ };
+
+ enum class ComparisonType
+ {
+ Equal,
+ NotEqual,
+ Greater,
+ GreaterEqual,
+ Less,
+ LessEqual
+ };
+
+ struct Param
+ {
+ ComparisonType comparison_type;
+ };
+
+public:
+ ComparisonNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Comparison"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_COMPARISON_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ConcatNode.h b/runtimes/neurun/core/include/model/operation/ConcatNode.h
new file mode 100644
index 000000000..63965f243
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ConcatNode.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_CONCAT_NODE_H__
+#define __NEURUN_MODEL_OPERATION_CONCAT_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ConcatNode : public model::Operation
+{
+public:
+ struct Param
+ {
+ int32_t axis;
+ };
+
+public:
+ ConcatNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Concat"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_CONCAT_H__
diff --git a/runtimes/neurun/core/include/model/operation/Conv2DNode.h b/runtimes/neurun/core/include/model/operation/Conv2DNode.h
new file mode 100644
index 000000000..0e7e5b7fb
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/Conv2DNode.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_CONV2D_NODE_H__
+#define __NEURUN_MODEL_OPERATION_CONV2D_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class Conv2DNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ KERNEL,
+ BIAS
+ };
+
+ struct Param
+ {
+ Stride stride;
+ Padding padding;
+ Activation activation;
+ };
+
+public:
+ Conv2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Conv2D"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_CONV2D_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/CustomNode.h b/runtimes/neurun/core/include/model/operation/CustomNode.h
new file mode 100644
index 000000000..ea51b9f3e
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/CustomNode.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __NEURUN_MODEL_OPERATION_CUSTOM_NODE_H__
+#define __NEURUN_MODEL_OPERATION_CUSTOM_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class CustomNode : public model::Operation
+{
+public:
+ struct Userdata
+ {
+ char *data;
+ size_t size;
+ };
+
+ CustomNode(OperandConstraint input_constr, const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, std::string id, const Userdata &userdata);
+
+ void accept(OperationVisitor &v) const override;
+
+public:
+ /**
+ * @return unique operation identifier
+ */
+ const std::string &id() const;
+
+ std::string getName() const override;
+
+ /**
+ * @return user-provided data
+ */
+ const Userdata &userdata() const;
+
+ ~CustomNode() override;
+
+private:
+ std::string _id;
+ Userdata _userdata;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+#endif // __NEURUN_MODEL_OPERATION_CUSTOM_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/DepthToSpaceNode.h b/runtimes/neurun/core/include/model/operation/DepthToSpaceNode.h
new file mode 100644
index 000000000..eee6ab7a5
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/DepthToSpaceNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_DEPTH_TO_SPACE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_DEPTH_TO_SPACE_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class DepthToSpaceNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex block_size_index;
+ };
+
+public:
+ DepthToSpaceNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "DepthToSpace"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_DEPTH_TO_SPACE_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/DepthwiseConv2DNode.h b/runtimes/neurun/core/include/model/operation/DepthwiseConv2DNode.h
new file mode 100644
index 000000000..45122fa2c
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/DepthwiseConv2DNode.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_DEPTHWISECONV2D_NODE_H__
+#define __NEURUN_MODEL_OPERATION_DEPTHWISECONV2D_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class DepthwiseConv2DNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ KERNEL,
+ BIAS
+ };
+
+ struct Param
+ {
+ Stride stride;
+ Padding padding;
+ uint32_t multiplier;
+ Activation activation;
+ };
+
+public:
+ DepthwiseConv2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "DepthwiseConv2D"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_DEPTHWISECONV2D_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/DequantizeNode.h b/runtimes/neurun/core/include/model/operation/DequantizeNode.h
new file mode 100644
index 000000000..1536c0f09
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/DequantizeNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_DEQUANTIZE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_DEQUANTIZE_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class DequantizeNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ DequantizeNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Dequantize"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_DEQUANTIZE_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/DivNode.h b/runtimes/neurun/core/include/model/operation/DivNode.h
new file mode 100644
index 000000000..d30efe116
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/DivNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_DIV_NODE_H__
+#define __NEURUN_MODEL_OPERATION_DIV_NODE_H__
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class DivNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LHS = 0,
+ RHS
+ };
+
+ struct Param
+ {
+ Activation activation;
+ };
+
+public:
+ DivNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Div"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_DIV_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/EmbeddingLookupNode.h b/runtimes/neurun/core/include/model/operation/EmbeddingLookupNode.h
new file mode 100644
index 000000000..9b61884db
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/EmbeddingLookupNode.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_EMBEDDING_LOOKUP_NODE_H__
+#define __NEURUN_MODEL_OPERATION_EMBEDDING_LOOKUP_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class EmbeddingLookupNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LOOKUPS = 0,
+ VALUES = 1
+ };
+
+public:
+ EmbeddingLookupNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "EmbeddingLookup"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_EMBEDDING_LOOKUP_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ExpNode.h b/runtimes/neurun/core/include/model/operation/ExpNode.h
new file mode 100644
index 000000000..fa7aa1d68
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ExpNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_EXP_NODE_H__
+#define __NEURUN_MODEL_OPERATION_EXP_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ExpNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ ExpNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Exp"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_EXP_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/FloorNode.h b/runtimes/neurun/core/include/model/operation/FloorNode.h
new file mode 100644
index 000000000..13d87e8a7
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/FloorNode.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_FLOOR_NODE_H__
+#define __NEURUN_MODEL_OPERATION_FLOOR_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class FloorNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ FloorNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Floor"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_FLOOR_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/FullyConnectedNode.h b/runtimes/neurun/core/include/model/operation/FullyConnectedNode.h
new file mode 100644
index 000000000..61809b660
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/FullyConnectedNode.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_FULLYCONNECTED_NODE_H__
+#define __NEURUN_MODEL_OPERATION_FULLYCONNECTED_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class FullyConnectedNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ WEIGHT,
+ BIAS
+ };
+
+ struct Param
+ {
+ Activation activation;
+ };
+
+public:
+ FullyConnectedNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "FullyConnected"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_FULLYCONNECTED_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/GatherNode.h b/runtimes/neurun/core/include/model/operation/GatherNode.h
new file mode 100644
index 000000000..fddeefcf0
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/GatherNode.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_GATHER_NODE_H__
+#define __NEURUN_MODEL_OPERATION_GATHER_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class GatherNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ INDICES,
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ };
+
+public:
+ GatherNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Gather"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_GATHER_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/HashtableLookupNode.h b/runtimes/neurun/core/include/model/operation/HashtableLookupNode.h
new file mode 100644
index 000000000..bbf3d309b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/HashtableLookupNode.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_HASHTABLE_LOOKUP_NODE_H__
+#define __NEURUN_MODEL_OPERATION_HASHTABLE_LOOKUP_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class HashtableLookupNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LOOKUPS = 0,
+ KEYS = 1,
+ VALUES = 2
+ };
+
+ enum Output
+ {
+ OUTPUT = 0,
+ HITS = 1
+ };
+
+public:
+ HashtableLookupNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "HashTableLookup"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_HASHTABLE_LOOKUP_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/L2NormalizationNode.h b/runtimes/neurun/core/include/model/operation/L2NormalizationNode.h
new file mode 100644
index 000000000..3c126de45
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/L2NormalizationNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_L2_NORMALIZATION_NODE_H__
+#define __NEURUN_MODEL_OPERATION_L2_NORMALIZATION_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class L2NormalizationNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ L2NormalizationNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "L2Normalization"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_L2_NORMALIZATION_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/L2Pool2DNode.h b/runtimes/neurun/core/include/model/operation/L2Pool2DNode.h
new file mode 100644
index 000000000..76e80d35a
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/L2Pool2DNode.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_L2_POOL_2D_NODE_H__
+#define __NEURUN_MODEL_OPERATION_L2_POOL_2D_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class L2Pool2DNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ };
+
+ struct Param
+ {
+ Padding padding;
+ Stride stride;
+ uint32_t kw;
+ uint32_t kh;
+ Activation activation;
+ };
+
+public:
+ L2Pool2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "L2Pool2D"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_L2_POOL_2D_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/LSTMNode.h b/runtimes/neurun/core/include/model/operation/LSTMNode.h
new file mode 100644
index 000000000..e453aed6b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/LSTMNode.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __NEURUN_MODEL_OPERATION_LSTM_NODE_H__
+#define __NEURUN_MODEL_OPERATION_LSTM_NODE_H__
+
+#include "model/InternalType.h"
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class LSTMNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ INPUT_TO_INPUT_WEIGHTS = 1,
+ INPUT_TO_FORGET_WEIGHTS = 2,
+ INPUT_TO_CELL_WEIGHTS = 3,
+ INPUT_TO_OUTPUT_WEIGHTS = 4,
+ RECURRENT_TO_INPUT_WEIGHTS = 5,
+ RECURRENT_TO_FORGET_WEIGHTS = 6,
+ RECURRENT_TO_CELL_WEIGHTS = 7,
+ RECURRENT_TO_OUTPUT_WEIGHTS = 8,
+ CELL_TO_INPUT_WEIGHTS = 9,
+ CELL_TO_FORGET_WEIGHTS = 10,
+ CELL_TO_OUTPUT_WEIGHTS = 11,
+ INPUT_GATE_BIAS = 12,
+ FORGET_GATE_BIAS = 13,
+ CELL_BIAS = 14,
+ OUTPUT_GATE_BIAS = 15,
+ PROJECTION_WEIGHTS = 16,
+ PROJECTION_BIAS = 17,
+ OUTPUT_STATE_IN = 18,
+ CELL_STATE_IN = 19,
+ };
+
+ enum Output
+ {
+ SCRATCH_BUFFER = 0,
+ OUTPUT_STATE_OUT = 1,
+ CELL_STATE_OUT = 2,
+ OUTPUT = 3
+ };
+
+ struct Param
+ {
+ Activation activation;
+ float cell_threshold;
+ float projection_threshold;
+ };
+
+public:
+ LSTMNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "LSTM"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_LSTM_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/LocalResponseNormalizationNode.h b/runtimes/neurun/core/include/model/operation/LocalResponseNormalizationNode.h
new file mode 100644
index 000000000..a7c1cd382
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/LocalResponseNormalizationNode.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_LOCAL_RESPONSE_NORMALIZATION_NODE_H__
+#define __NEURUN_MODEL_OPERATION_LOCAL_RESPONSE_NORMALIZATION_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class LocalResponseNormalizationNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex radius_index;
+ OperandIndex bias_index;
+ OperandIndex alpha_index;
+ OperandIndex beta_index;
+ };
+
+public:
+ LocalResponseNormalizationNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "LocalResponseNormalization"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_LOCAL_RESPONSE_NORMALIZATION_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/LogicalAndNode.h b/runtimes/neurun/core/include/model/operation/LogicalAndNode.h
new file mode 100644
index 000000000..058f457d2
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/LogicalAndNode.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_LOGICAL_AND_NODE_H__
+#define __NEURUN_MODEL_OPERATION_LOGICAL_AND_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class LogicalAndNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT0 = 0,
+ INPUT1 = 1,
+ };
+
+public:
+ LogicalAndNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "LogicalAnd"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_LOGICAL_AND_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/LogicalNotNode.h b/runtimes/neurun/core/include/model/operation/LogicalNotNode.h
new file mode 100644
index 000000000..d694510d7
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/LogicalNotNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_LOGICAL_NOT_NODE_H__
+#define __NEURUN_MODEL_OPERATION_LOGICAL_NOT_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class LogicalNotNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ };
+
+public:
+ LogicalNotNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "LogicalNot"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_LOGICAL_NOT_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/LogicalOrNode.h b/runtimes/neurun/core/include/model/operation/LogicalOrNode.h
new file mode 100644
index 000000000..220aea2c7
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/LogicalOrNode.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_LOGICAL_OR_NODE_H__
+#define __NEURUN_MODEL_OPERATION_LOGICAL_OR_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class LogicalOrNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT0 = 0,
+ INPUT1 = 1,
+ };
+
+public:
+ LogicalOrNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "LogicalOr"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_LOGICAL_OR_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/LogisticNode.h b/runtimes/neurun/core/include/model/operation/LogisticNode.h
new file mode 100644
index 000000000..03577143b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/LogisticNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_LOGISTIC_NODE_H__
+#define __NEURUN_MODEL_OPERATION_LOGISTIC_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class LogisticNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ LogisticNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Logistic"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_LOGISTIC_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h b/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h
new file mode 100644
index 000000000..e8afe863d
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_MAXPOOL2D_NODE_H__
+#define __NEURUN_MODEL_OPERATION_MAXPOOL2D_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class MaxPool2DNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ uint32_t kh;
+ uint32_t kw;
+ Stride stride;
+ Padding padding;
+ Activation activation;
+ };
+
+public:
+ MaxPool2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "MaxPool2D"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_MAXPOOL2D_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/MeanNode.h b/runtimes/neurun/core/include/model/operation/MeanNode.h
new file mode 100644
index 000000000..9d142545d
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/MeanNode.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_MEAN_NODE_H__
+#define __NEURUN_MODEL_OPERATION_MEAN_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class MeanNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ bool keep_dims;
+ };
+
+public:
+ MeanNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Mean"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_MEAN_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/MulNode.h b/runtimes/neurun/core/include/model/operation/MulNode.h
new file mode 100644
index 000000000..e76155256
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/MulNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_MUL_NODE_H__
+#define __NEURUN_MODEL_OPERATION_MUL_NODE_H__
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class MulNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LHS = 0,
+ RHS
+ };
+
+ struct Param
+ {
+ Activation activation;
+ };
+
+public:
+ MulNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Mul"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_MUL_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/NegNode.h b/runtimes/neurun/core/include/model/operation/NegNode.h
new file mode 100644
index 000000000..07f27eab7
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/NegNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_NEG_NODE_H__
+#define __NEURUN_MODEL_OPERATION_NEG_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class NegNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ NegNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Neg"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_NEG_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/PReLUNode.h b/runtimes/neurun/core/include/model/operation/PReLUNode.h
new file mode 100644
index 000000000..e31805d7f
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/PReLUNode.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_PRELU_NODE_H__
+#define __NEURUN_MODEL_OPERATION_PRELU_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class PReLUNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ ALPHA = 1
+ };
+
+public:
+ PReLUNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "PReLU"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_PRELU_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/PadNode.h b/runtimes/neurun/core/include/model/operation/PadNode.h
new file mode 100644
index 000000000..c4cc18c39
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/PadNode.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_PAD_NODE_H__
+#define __NEURUN_MODEL_OPERATION_PAD_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class PadNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ PAD = 1,
+ // VALUE = 2 Not allow padding value operand yet
+ };
+
+public:
+ PadNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Pad"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_PAD_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/PermuteNode.h b/runtimes/neurun/core/include/model/operation/PermuteNode.h
new file mode 100644
index 000000000..2339f35ee
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/PermuteNode.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_PERMUTE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_PERMUTE_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace backend
+{
+class BackendContext;
+} // namespace backend
+} // namespace neurun
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class PermuteNode : public model::Operation
+{
+public:
+ enum class Type
+ {
+ NHWC_TO_NCHW,
+ NCHW_TO_NHWC,
+ COPY
+ };
+
+ struct Param
+ {
+ const backend::BackendContext *input_backend_ctx;
+ const backend::BackendContext *output_backend_ctx;
+ };
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Permute"; }
+
+public:
+ PermuteNode(const OperandIndex &input, const OperandIndex &output,
+ const backend::BackendContext *input_backend_ctx,
+ const backend::BackendContext *output_backend_ctx, Type type,
+ model::DataType data_type = model::DataType::FLOAT32);
+
+public:
+ const Param &param() const { return _param; }
+ model::DataType getDataType() const { return _dataType; }
+ Type getPermuteType() const { return _type; }
+
+private:
+ Param _param;
+ Type _type;
+ model::DataType _dataType;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_PERMUTE_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/RNNNode.h b/runtimes/neurun/core/include/model/operation/RNNNode.h
new file mode 100644
index 000000000..fb4c9b325
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/RNNNode.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __NEURUN_MODEL_OPERATION_RNN_NODE_H__
+#define __NEURUN_MODEL_OPERATION_RNN_NODE_H__
+
+#include "model/InternalType.h"
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class RNNNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0,
+ WEIGHTS = 1,
+ RECURRENT_WEIGHTS = 2,
+ BIAS = 3,
+ HIDDEN_STATE_IN = 4
+ };
+
+ enum Output
+ {
+ OUTPUT = 0,
+ HIDDEN_STATE_OUT = 1
+ };
+
+ struct Param
+ {
+ Activation activation;
+ };
+
+public:
+ RNNNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "RNN"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_RNN_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/RSQRTNode.h b/runtimes/neurun/core/include/model/operation/RSQRTNode.h
new file mode 100644
index 000000000..bd3fe2227
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/RSQRTNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_RSQRT_NODE_H__
+#define __NEURUN_MODEL_OPERATION_RSQRT_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class RSQRTNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ RSQRTNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "RSQRT"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_RSQRT_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReLU1Node.h b/runtimes/neurun/core/include/model/operation/ReLU1Node.h
new file mode 100644
index 000000000..d8a325f21
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReLU1Node.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_ReLU1_NODE_H__
+#define __NEURUN_MODEL_OPERATION_ReLU1_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReLU1Node : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ ReLU1Node(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ReLU1"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_ReLU1_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReLU6Node.h b/runtimes/neurun/core/include/model/operation/ReLU6Node.h
new file mode 100644
index 000000000..437f1e07b
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReLU6Node.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_ReLU6_NODE_H__
+#define __NEURUN_MODEL_OPERATION_ReLU6_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReLU6Node : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ ReLU6Node(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ReLU6"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_ReLU6_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReLUNode.h b/runtimes/neurun/core/include/model/operation/ReLUNode.h
new file mode 100644
index 000000000..848ca1b5c
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReLUNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_RELU_NODE_H__
+#define __NEURUN_MODEL_OPERATION_RELU_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReLUNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ ReLUNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ReLU"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_RELU_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReduceMaxNode.h b/runtimes/neurun/core/include/model/operation/ReduceMaxNode.h
new file mode 100644
index 000000000..3886ff481
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReduceMaxNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_REDUCEMAX_NODE_H__
+#define __NEURUN_MODEL_OPERATION_REDUCEMAX_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReduceMaxNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ };
+
+public:
+ ReduceMaxNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ReduceMax"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_REDUCEMAX_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReduceMinNode.h b/runtimes/neurun/core/include/model/operation/ReduceMinNode.h
new file mode 100644
index 000000000..f0de17c07
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReduceMinNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_REDUCEMIN_NODE_H__
+#define __NEURUN_MODEL_OPERATION_REDUCEMIN_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReduceMinNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ };
+
+public:
+ ReduceMinNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ReduceMin"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_REDUCEMIN_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReduceSumNode.h b/runtimes/neurun/core/include/model/operation/ReduceSumNode.h
new file mode 100644
index 000000000..b70c83cff
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReduceSumNode.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_REDUCE_SUM_NODE_H__
+#define __NEURUN_MODEL_OPERATION_REDUCE_SUM_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReduceSumNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ };
+
+public:
+ ReduceSumNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ReduceSum"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_REDUCE_SUM_H__
diff --git a/runtimes/neurun/core/include/model/operation/ReshapeNode.h b/runtimes/neurun/core/include/model/operation/ReshapeNode.h
new file mode 100644
index 000000000..735aa30e0
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ReshapeNode.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_RESHAPE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_RESHAPE_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ReshapeNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ ReshapeNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Reshape"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_RESHAPE_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/ResizeBilinearNode.h b/runtimes/neurun/core/include/model/operation/ResizeBilinearNode.h
new file mode 100644
index 000000000..76f0341cc
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/ResizeBilinearNode.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_RESIZE_BILINEAR_NODE_H__
+#define __NEURUN_MODEL_OPERATION_RESIZE_BILINEAR_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class ResizeBilinearNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex height_index;
+ OperandIndex width_index;
+ };
+
+public:
+ ResizeBilinearNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "ResizeBilinear"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_RESIZE_BILINEAR_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SQRTNode.h b/runtimes/neurun/core/include/model/operation/SQRTNode.h
new file mode 100644
index 000000000..b693dab94
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SQRTNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_SQRT_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SQRT_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class SQRTNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ SQRTNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "SQRT"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_SQRT_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SoftmaxNode.h b/runtimes/neurun/core/include/model/operation/SoftmaxNode.h
new file mode 100644
index 000000000..0810526f3
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SoftmaxNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_SOFTMAX_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SOFTMAX_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class SoftmaxNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ float beta;
+ };
+
+public:
+ SoftmaxNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "SoftMax"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_SOFTMAX_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SpaceToDepthNode.h b/runtimes/neurun/core/include/model/operation/SpaceToDepthNode.h
new file mode 100644
index 000000000..bbf6732f1
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SpaceToDepthNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_SPACE_TO_DEPTH_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SPACE_TO_DEPTH_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class SpaceToDepthNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex block_size_index;
+ };
+
+public:
+ SpaceToDepthNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "SpaceToDepth"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_SPACE_TO_DEPTH_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SplitNode.h b/runtimes/neurun/core/include/model/operation/SplitNode.h
new file mode 100644
index 000000000..eee2c4f84
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SplitNode.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __NEURUN_MODEL_OPERATION_SPLIT_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SPLIT_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+class SplitNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex axis_index;
+ OperandIndex num_of_splits_index;
+ };
+
+public:
+ SplitNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Split"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+} // namespace operation
+} // namespace model
+} // namespace neurun
+#endif // __NEURUN_MODEL_OPERATION_SPLIT_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SquaredDifferenceNode.h b/runtimes/neurun/core/include/model/operation/SquaredDifferenceNode.h
new file mode 100644
index 000000000..180c68731
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SquaredDifferenceNode.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_SQUARED_DIFFERENCE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SQUARED_DIFFERENCE_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class SquaredDifferenceNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LHS = 0,
+ RHS
+ };
+
+public:
+ SquaredDifferenceNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "SquaredDifference"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_SQUARED_DIFFERENCE_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SqueezeNode.h b/runtimes/neurun/core/include/model/operation/SqueezeNode.h
new file mode 100644
index 000000000..aa2386b94
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SqueezeNode.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_SQUEEZE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SQUEEZE_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class SqueezeNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex dims;
+ };
+
+public:
+ SqueezeNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Squeeze"; }
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_SQUEEZE_H__
diff --git a/runtimes/neurun/core/include/model/operation/StridedSliceNode.h b/runtimes/neurun/core/include/model/operation/StridedSliceNode.h
new file mode 100644
index 000000000..4de5bc9df
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/StridedSliceNode.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_STRIDED_SLICE_H__
+#define __NEURUN_MODEL_OPERATION_STRIDED_SLICE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class StridedSliceNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ OperandIndex startData_index; //!< index where slicing start from
+ OperandIndex endData_index; //!< index where slicing ends to
+ OperandIndex stridesData_index; //!< index for stride value
+ OperandIndex beginMask_index; //!< index for beginmask
+ OperandIndex endMask_index; //!< index for endmask
+ OperandIndex shrinkAxisMask_index; //!< index for shrink axis
+ };
+
+public:
+ StridedSliceNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "StridedSlice"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_STRIDED_SLICE_H__
diff --git a/runtimes/neurun/core/include/model/operation/SubNode.h b/runtimes/neurun/core/include/model/operation/SubNode.h
new file mode 100644
index 000000000..cb930fd95
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/SubNode.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_SUB_NODE_H__
+#define __NEURUN_MODEL_OPERATION_SUB_NODE_H__
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class SubNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ LHS = 0,
+ RHS
+ };
+
+ struct Param
+ {
+ Activation activation;
+ };
+
+public:
+ SubNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Sub"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_SUB_H__
diff --git a/runtimes/neurun/core/include/model/operation/TanhNode.h b/runtimes/neurun/core/include/model/operation/TanhNode.h
new file mode 100644
index 000000000..5af480ab7
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/TanhNode.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_TANH_NODE_H__
+#define __NEURUN_MODEL_OPERATION_TANH_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class TanhNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+public:
+ TanhNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Tanh"; }
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_TANH_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/TopKV2Node.h b/runtimes/neurun/core/include/model/operation/TopKV2Node.h
new file mode 100644
index 000000000..675c19c58
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/TopKV2Node.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_TOPK_V2_H__
+#define __NEURUN_MODEL_OPERATION_TOPK_V2_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class TopKV2Node : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT
+ };
+
+ enum Output
+ {
+ OUTPUT_VALUES = 0,
+ OUTPUT_INDICES,
+ };
+
+ struct Param
+ {
+ OperandIndex k_index;
+ };
+
+public:
+ TopKV2Node(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "TopKV2"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_TOPK_V2_H__
diff --git a/runtimes/neurun/core/include/model/operation/TransposeConvNode.h b/runtimes/neurun/core/include/model/operation/TransposeConvNode.h
new file mode 100644
index 000000000..72443c810
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/TransposeConvNode.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_TRANSPOSE_CONV_NODE_H__
+#define __NEURUN_MODEL_OPERATION_TRANSPOSE_CONV_NODE_H__
+
+#include <memory>
+
+#include "model/Operation.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class TransposeConvNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ OUTPUT_SHAPE = 0,
+ KERNEL,
+ INPUT
+ };
+
+ struct Param
+ {
+ Padding padding;
+ Stride stride;
+ };
+
+public:
+ TransposeConvNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "TransposeConv"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_TRANSPOSE_CONV_NODE_H__
diff --git a/runtimes/neurun/core/include/model/operation/TransposeNode.h b/runtimes/neurun/core/include/model/operation/TransposeNode.h
new file mode 100644
index 000000000..1a42212e8
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/TransposeNode.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_MODEL_OPERATION_TRANSPOSE_NODE_H__
+#define __NEURUN_MODEL_OPERATION_TRANSPOSE_NODE_H__
+
+#include "model/Operation.h"
+
+#include <utility>
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+class TransposeNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0, // for an n-D tensor, specifying the tensor to be transposed.
+ };
+
+ struct Param
+ {
+ // permutation vector is optional.
+ // if permutation vector is provided, set perm.first to true
+ // if permutation vector is NOT provided, set perm.first to false
+ OperandIndex perm;
+ };
+
+public:
+ TransposeNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Transpose"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_MODEL_OPERATION_TRANSPOSE_H__
diff --git a/runtimes/neurun/core/include/model/operation/UnpackNode.h b/runtimes/neurun/core/include/model/operation/UnpackNode.h
new file mode 100644
index 000000000..08d8979bf
--- /dev/null
+++ b/runtimes/neurun/core/include/model/operation/UnpackNode.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __NEURUN_MODEL_OPERATION_UNPACK_NODE_H__
+#define __NEURUN_MODEL_OPERATION_UNPACK_NODE_H__
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+class UnpackNode : public model::Operation
+{
+public:
+ enum Input
+ {
+ INPUT = 0
+ };
+
+ struct Param
+ {
+ int32_t num;
+ int32_t axis;
+ };
+
+public:
+ UnpackNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param);
+
+public:
+ void accept(OperationVisitor &v) const override;
+ std::string getName() const override { return "Unpack"; }
+
+public:
+ const Param &param() const { return _param; }
+
+private:
+ Param _param;
+};
+} // namespace operation
+} // namespace model
+} // namespace neurun
+#endif // __NEURUN_MODEL_OPERATION_UNPACK_NODE_H__
diff --git a/runtimes/neurun/core/include/util/Config.lst b/runtimes/neurun/core/include/util/Config.lst
new file mode 100644
index 000000000..c17ac147e
--- /dev/null
+++ b/runtimes/neurun/core/include/util/Config.lst
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CONFIG
+#error Define CONFIG before including this file
+#endif
+
+// Name | Type | Default
+CONFIG(GRAPH_DOT_DUMP , int , "0")
+CONFIG(BACKENDS , std::string , "cpu;acl_cl;acl_neon;srcn")
+CONFIG(OP_BACKEND_ALLOPS , std::string , "acl_cl")
+CONFIG(OP_BACKEND_MAP , std::string , "")
+CONFIG(DISABLE_COMPILE , bool , "0")
+CONFIG(NEURUN_LOG_ENABLE , bool , "0")
+CONFIG(CPU_MEMORY_PLANNER , std::string , "FirstFit")
+CONFIG(EXECUTOR , std::string , "Linear")
+CONFIG(ACL_LAYOUT , std::string , "none")
+CONFIG(PROFILING_MODE , bool , "0")
+CONFIG(USE_SCHEDULER , bool , "0")
+
+// Auto-generate all operations
+
+#define OP(InternalName, IsNnApi) \
+ CONFIG(OP_BACKEND_ ## InternalName, std::string, "")
+#include "model/Operations.lst"
+#undef OP
+
diff --git a/runtimes/neurun/core/include/util/ConfigSource.h b/runtimes/neurun/core/include/util/ConfigSource.h
new file mode 100644
index 000000000..b1fa9a87d
--- /dev/null
+++ b/runtimes/neurun/core/include/util/ConfigSource.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_CONFIG_SOURCE_H__
+#define __NEURUN_UTIL_CONFIG_SOURCE_H__
+
+#include <memory>
+
+#include "IConfigSource.h"
+
+namespace neurun
+{
+namespace util
+{
+
+void config_source(std::unique_ptr<IConfigSource> &&source);
+
+bool getConfigBool(const std::string &key);
+int getConfigInt(const std::string &key);
+std::string getConfigString(const std::string &key);
+
+} // namespace util
+} // namespace neurun
+
+namespace neurun
+{
+namespace util
+{
+namespace config
+{
+
+#define CONFIG(Name, Type, Default) extern const char *Name;
+
+#include "Config.lst"
+
+#undef CONFIG
+
+} // namespace config
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_CONFIG_SOURCE_H__
diff --git a/runtimes/neurun/core/include/util/Coordinates.h b/runtimes/neurun/core/include/util/Coordinates.h
new file mode 100644
index 000000000..67947138f
--- /dev/null
+++ b/runtimes/neurun/core/include/util/Coordinates.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_COORDINATES_H__
+#define __NEURUN_UTIL_COORDINATES_H__
+
+#include <cassert>
+#include <stdint.h>
+#include <vector>
+
+namespace neurun
+{
+namespace util
+{
+
+/**
+ * @brief Class to represent position(offset) of tensor.\n
+ * Assume that the front is higher dimensional.
+ * i.g. N: 0, C: 1, H: 2, W: 3 for NCHW layout
+ */
+class Coordinates final
+{
+public:
+ static constexpr size_t num_max_dimensions = 4;
+
+public:
+ /**
+ * @brief Construct a new Coordinates object
+ * @param[in] init The initialzer_list with coordinates
+ * @return
+ */
+ Coordinates(std::initializer_list<int32_t> init) : _coordinates{init}
+ {
+ assert(init.size() <= num_max_dimensions);
+ }
+
+public:
+ /**
+ * @brief Set the coordinate of one of the coordinates.
+ *
+ * @param[in] dimension Dimension for which the coordinate is set.
+ * @param[in] Coordinate Coordinate to be set for the dimension.
+ */
+ void set(size_t dimension, int32_t coordinate)
+ {
+ assert(dimension < num_max_dimensions);
+ if (dimension >= _coordinates.size())
+ {
+ _coordinates.resize(dimension + 1, 0);
+ }
+ _coordinates[dimension] = coordinate;
+ }
+
+public:
+ /**
+ * @brief Return size of coordinates
+ *
+ * @return size of coordinates
+ */
+ size_t size() const { return _coordinates.size(); }
+
+public:
+ int32_t operator[](size_t dimension) const
+ {
+ assert(dimension < _coordinates.size());
+ return _coordinates[dimension];
+ }
+
+public:
+ /**
+ * @brief begin() of const_iterator for this class
+ *
+ * @return The first iterator of the coordinates
+ */
+ std::vector<int32_t>::const_iterator begin() const { return _coordinates.begin(); }
+ /**
+ * @brief end() of const_iterator for this class
+ *
+ * @return The last iterator of the coordinates
+ */
+ std::vector<int32_t>::const_iterator end() const { return _coordinates.end(); }
+
+private:
+ std::vector<int32_t> _coordinates;
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_COORDINATES_H__
diff --git a/runtimes/neurun/core/include/util/GeneralConfigSource.h b/runtimes/neurun/core/include/util/GeneralConfigSource.h
new file mode 100644
index 000000000..04e3332b3
--- /dev/null
+++ b/runtimes/neurun/core/include/util/GeneralConfigSource.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_GLOBAL_CONFIG_SOURCE_H__
+#define __NEURUN_UTIL_GLOBAL_CONFIG_SOURCE_H__
+
+#include <unordered_map>
+
+#include "util/IConfigSource.h"
+
+namespace neurun
+{
+namespace util
+{
+
+class GeneralConfigSource : public IConfigSource
+{
+public:
+ GeneralConfigSource() = default;
+
+ std::string get(const std::string &key) const override;
+ void set(const std::string &key, const std::string &val);
+
+private:
+ std::unordered_map<std::string, std::string> _map;
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_GLOBAL_CONFIG_SOURCE_H__
diff --git a/runtimes/neurun/core/include/util/IConfigSource.h b/runtimes/neurun/core/include/util/IConfigSource.h
new file mode 100644
index 000000000..a52d87097
--- /dev/null
+++ b/runtimes/neurun/core/include/util/IConfigSource.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_I_CONFIG_SOURCE_H__
+#define __NEURUN_UTIL_I_CONFIG_SOURCE_H__
+
+#include <string>
+
+namespace neurun
+{
+namespace util
+{
+
+struct IConfigSource
+{
+ /**
+ * @brief Destroy the IConfigSource object
+ */
+ virtual ~IConfigSource() = default;
+
+ /**
+ * @brief get the value for the matching key
+ *
+ * @param key string key to search
+ * @return string value associated with the key
+ */
+ virtual std::string get(const std::string &key) const = 0;
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_I_CONFIG_SOURCE_H__
diff --git a/runtimes/neurun/core/include/util/ITimer.h b/runtimes/neurun/core/include/util/ITimer.h
new file mode 100644
index 000000000..79ecdd0ca
--- /dev/null
+++ b/runtimes/neurun/core/include/util/ITimer.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_ITIMER_H__
+#define __NEURUN_UTIL_ITIMER_H__
+
+#include <chrono>
+
+namespace neurun
+{
+namespace util
+{
+
+class ITimer
+{
+public:
+ virtual void handleBegin() = 0;
+ virtual void handleEnd() = 0;
+ int getTime() { return _timer_res; };
+
+ virtual ~ITimer() = default;
+
+protected:
+ int _timer_res{0};
+};
+
+class CPUTimer : public ITimer
+{
+public:
+ void handleBegin() override { _start_time = std::chrono::steady_clock::now(); };
+
+ void handleEnd() override
+ {
+ const auto end_time = std::chrono::steady_clock::now();
+ _timer_res =
+ std::chrono::duration_cast<std::chrono::microseconds>(end_time - _start_time).count();
+ };
+
+private:
+ std::chrono::steady_clock::time_point _start_time; // in microseconds
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_ITIMER_H__
diff --git a/runtimes/neurun/core/include/util/Index.h b/runtimes/neurun/core/include/util/Index.h
new file mode 100644
index 000000000..d1fdc237c
--- /dev/null
+++ b/runtimes/neurun/core/include/util/Index.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_INDEX_H__
+#define __NEURUN_UTIL_INDEX_H__
+
+#include <functional>
+#include <limits>
+#include <stdint.h>
+
+namespace neurun
+{
+namespace util
+{
+
+/**
+ * @brief A wrapper class for unsigned integral Index
+ * NOTE : Max value of the underlying type is used as the invalid value
+ *
+ * @tparam T Underlying type. Must be unsigned integral type otherwise its behavior is undefined.
+ * @tparam DummyTag Dummy type to distinguish types with a same underlying type. Using an opaque
+ * type is recommended.
+ */
+template <typename T, typename DummyTag> class Index
+{
+private:
+ static const T UNDEFINED = std::numeric_limits<T>::max();
+
+public:
+ /**
+ * @brief Construct a new Index object
+ */
+ explicit Index(void) : _index{UNDEFINED} {}
+ /**
+ * @brief Construct a new Index object with a value in the underlying type
+ *
+ * @param o Value in the underlying type
+ */
+ explicit Index(T o) : _index{o} {}
+ /**
+ * @brief Copy Constructor
+ *
+ * @param o Object to be copied
+ */
+ Index(const Index &o) : _index{o._index} {}
+
+ /**
+ * @brief Assign a value in the underlying time
+ *
+ * @param o Value in the underlying type
+ * @return Index& Reference of this pointer
+ */
+ Index &operator=(T o)
+ {
+ _index = o;
+ return *this;
+ }
+
+ /**
+ * @brief Copy assignment operator
+ *
+ * @param o Object to be copied
+ * @return Index& Reference of this pointer
+ */
+ Index &operator=(const T &o)
+ {
+ _index = o._index;
+ return *this;
+ }
+
+ /**
+ * @brief Equality operator
+ *
+ * @param o The other value in the underlying type to compare
+ * @return true if underlying value is the same, false otherwise
+ */
+ bool operator==(T o) const { return _index == o; }
+ /**
+ * @brief Equality operator
+ *
+ * @param o The other object to compare
+ * @return true if underlying value is the same, false otherwise
+ */
+ bool operator==(const Index &o) const { return _index == o._index; }
+ /**
+ * @brief Inquality operator
+ *
+ * @param o The other value in the underlying type to compare
+ * @return true if underlying value is the same, false otherwise
+ */
+ bool operator!=(T o) const { return !(*this == o); }
+ /**
+ * @brief Inquality operator
+ *
+ * @param o The other object to compare
+ * @return true if underlying value is the same, false otherwise
+ */
+ bool operator!=(const Index &o) const { return !(*this == o); }
+
+ /**
+ * @brief Post increment operator
+ *
+ * @return Index Index before increment
+ */
+ Index operator++(int)
+ {
+ Index temp = *this;
+ _index++;
+ return temp;
+ }
+
+ /**
+ * @brief Check whether the value is valid or not
+ *
+ * @return true if valid, false otherwise
+ */
+ bool valid() const { return _index != UNDEFINED; }
+ /**
+ * @brief Return underlying value
+ *
+ * @return T Underlying value
+ */
+ T value() const { return _index; }
+
+private:
+ T _index;
+};
+
+} // namespace util
+} // namespace neurun
+
+namespace std
+{
+
+template <typename T, typename Tag> struct hash<::neurun::util::Index<T, Tag>>
+{
+ size_t operator()(const ::neurun::util::Index<T, Tag> &index) const noexcept
+ {
+ return hash<T>()(index.value());
+ }
+};
+
+} // namespace std
+
+#endif // __NEURUN_UTIL_INDEX_H__
diff --git a/runtimes/neurun/core/include/util/ObjectManager.h b/runtimes/neurun/core/include/util/ObjectManager.h
new file mode 100644
index 000000000..fd2c3f295
--- /dev/null
+++ b/runtimes/neurun/core/include/util/ObjectManager.h
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_OBJECT_MANAGER_H__
+#define __NEURUN_UTIL_OBJECT_MANAGER_H__
+
+#include <unordered_map>
+#include <memory>
+
+namespace neurun
+{
+namespace util
+{
+
+/**
+ * @brief Class that owns objects and maps them with indices as a handle for them
+ *
+ */
+template <typename Index, typename Object> class ObjectManager
+{
+public:
+ ObjectManager() : _index_count{0u} {}
+
+public:
+ /**
+ * @brief Create an object with args and put it in the container with a new Index for that
+ *
+ * @param[in] args Arguments for creating Operand object
+ * @return Created index that is associated to the object
+ */
+ template <class... Args> Index emplace(Args &&... args)
+ {
+ auto index = generateIndex();
+ _objects.emplace(index, nnfw::cpp14::make_unique<Object>(std::forward<Args>(args)...));
+ return index;
+ }
+
+ /**
+ * @brief Put object in the container with a new Index for that
+ *
+ * @param[in] object Object to be pushed
+ * @return Created index that is associated to the object
+ */
+ Index push(std::unique_ptr<Object> &&object)
+ {
+ auto index = generateIndex();
+ _objects.emplace(index, std::move(object));
+ return index;
+ }
+
+ /**
+ * @brief Remove the object that is associated with the given index
+ *
+ * @param[in] index Index of the object to be removed
+ * @return N/A
+ */
+ void remove(const Index &index) { _objects.erase(index); };
+
+ /**
+ * @brief Get the object that is associated with the given index
+ *
+ * @param[in] index Index of the object to be returned
+ * @return Object
+ */
+ const Object &at(const Index &index) const { return *(_objects.at(index)); }
+ /**
+ * @brief Get the object that is associated with the given index
+ *
+ * @param[in] index Index of the object to be returned
+ * @return Object
+ */
+ Object &at(const Index &index) { return *(_objects.at(index)); }
+ /**
+ * @brief Get the object that is associated with the given index
+ *
+ * @param[in] index Index of the object to be returned
+ * @return true if such entry exists otherwise false
+ */
+ bool exist(const Index &index) const
+ {
+ auto it = _objects.find(index);
+ return it != _objects.end();
+ }
+ /**
+ * @brief Iterate over the container with given function
+ *
+ * @param[in] fn Function to be run for every container entry
+ * @return N/A
+ */
+ void iterate(const std::function<void(const Index &, const Object &)> &fn) const
+ {
+ for (const auto &e : _objects)
+ {
+ fn(e.first, *e.second);
+ }
+ }
+ /**
+ * @brief Iterate over the container with given function
+ *
+ * @param[in] fn Function to be run for every container entry
+ * @return N/A
+ */
+ void iterate(const std::function<void(const Index &, Object &)> &fn)
+ {
+ // TODO Remove this workaround
+ // This implementation is a workaround in case of adding operands while iteration
+ std::list<Index> l;
+
+ for (auto &e : _objects)
+ {
+ l.push_back(e.first);
+ }
+
+ for (auto index : l)
+ {
+ fn(index, *_objects[index]);
+ }
+ }
+
+private:
+ Index generateIndex() { return Index{_index_count++}; }
+
+private:
+ std::unordered_map<Index, std::unique_ptr<Object>> _objects;
+ uint32_t _index_count;
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_OBJECT_MANAGER_H__
diff --git a/runtimes/neurun/core/include/util/Padding.h b/runtimes/neurun/core/include/util/Padding.h
new file mode 100644
index 000000000..230013238
--- /dev/null
+++ b/runtimes/neurun/core/include/util/Padding.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_PADDING_H__
+#define __NEURUN_UTIL_PADDING_H__
+
+#include <stdint.h>
+
+#include "model/Shape.h"
+#include "model/InternalType.h"
+
+namespace neurun
+{
+namespace util
+{
+
+model::ExplicitPadding validPadding(void);
+model::ExplicitPadding samePadding(const model::FeatureShape &ifm_shape,
+ const model::FeatureShape &ofm_shape,
+ const model::Stride &stride, uint32_t kw, uint32_t kh);
+model::ExplicitPadding calculatePadding(const model::Padding &padding,
+ const model::FeatureShape &ifm_shape,
+ const model::FeatureShape &ofm_shape,
+ const model::Stride &stride, uint32_t kw, uint32_t kh);
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_PADDING_H__
diff --git a/runtimes/neurun/core/include/util/Set.h b/runtimes/neurun/core/include/util/Set.h
new file mode 100644
index 000000000..13213511d
--- /dev/null
+++ b/runtimes/neurun/core/include/util/Set.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Set.h
+ * @brief This file contains neurun::util::Set class
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NEURUN_UTIL_SET_H__
+#define __NEURUN_UTIL_SET_H__
+
+#include <cassert>
+#include <unordered_set>
+
+namespace neurun
+{
+namespace util
+{
+
+/**
+ * @brief Class for set of custom element
+ & @tparam Element Key type of Set
+ */
+template <typename Element> class Set
+{
+public:
+ /**
+ * @brief Construct default Set object.
+ */
+ Set() = default;
+ /**
+ * @brief Construct Set object by copy semantics.
+ */
+ Set(const Set<Element> &) = default;
+ /**
+ * @brief Construct move Set object by move semantics.
+ */
+ Set(Set<Element> &&) = default;
+
+public:
+ /**
+ * @brief Add a given element to the set
+ *
+ * @param e Element added
+ */
+ void add(const Element &e) { _set.insert(e); }
+ /**
+ * @brief remove a given element from the set
+ *
+ * @param e Element removed
+ */
+ void remove(const Element &e) { _set.erase(e); }
+ /**
+ * @brief Get size of the set
+ *
+ * @return The size of the set
+ */
+ uint32_t size() const { return static_cast<uint32_t>(_set.size()); }
+ /**
+ * @brief Get whether the set is empty
+ *
+ * @return Whether the set is empty
+ */
+ bool empty() const { return _set.empty(); }
+ /**
+ * @brief Get whether a given element exists in the set
+ *
+ * @param e A given element
+ *
+ * @return Whether a given element exists in the set
+ */
+ bool contains(const Element &e) const { return _set.find(e) != _set.end(); }
+ /**
+ * @brief Get first element of the set
+ *
+ * @return first element of the set
+ */
+ const Element &getOnlyElement() const
+ {
+ assert(_set.size() == 1u);
+ return *_set.begin();
+ }
+
+public:
+ /**
+ * @brief operator overloading function for `|`
+ *
+ * @return A set with two sets combined
+ */
+ Set<Element> operator|(const Set<Element> &other) const // Union
+ {
+ auto ret = *this;
+ for (auto e : other)
+ {
+ ret.add(e);
+ }
+ return ret;
+ }
+ /**
+ * @brief operator overloading function for `&`
+ *
+ * @return A set of elements that overlap in two sets
+ */
+ Set<Element> operator&(const Set<Element> &other) const // Intersect
+ {
+ Set<Element> ret;
+ for (auto e : other)
+ {
+ if (contains(e))
+ {
+ ret.add(e);
+ }
+ }
+ return ret;
+ }
+ /**
+ * @brief operator overloading function for `-`
+ *
+ * @return A set of subtracted from another set
+ */
+ Set<Element> operator-(const Set<Element> &other) const // Minus
+ {
+ auto ret = *this;
+ for (auto e : other)
+ {
+ ret.remove(e);
+ }
+ return ret;
+ }
+
+public:
+ /**
+ * @brief begin() of const_iterator for this class
+ *
+ * @return The first iterator of the set
+ */
+ typename std::unordered_set<Element>::const_iterator begin() const { return _set.begin(); }
+ /**
+ * @brief end() of const_iterator for this class
+ *
+ * @return The last iterator of the set
+ */
+ typename std::unordered_set<Element>::const_iterator end() const { return _set.end(); }
+
+private:
+ std::unordered_set<Element> _set;
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_SET_H__
diff --git a/runtimes/neurun/core/include/util/ShapeInference.h b/runtimes/neurun/core/include/util/ShapeInference.h
new file mode 100644
index 000000000..54076199b
--- /dev/null
+++ b/runtimes/neurun/core/include/util/ShapeInference.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_SHAPE_INFERENCE_H__
+#define __NEURUN_GRAPH_SHAPE_INFERENCE_H__
+
+#include "model/operation/AvgPool2DNode.h"
+#include "model/operation/ConcatNode.h"
+#include "model/operation/MaxPool2DNode.h"
+#include "model/operation/Conv2DNode.h"
+#include "model/operation/DepthwiseConv2DNode.h"
+#include "model/Operands.h"
+#include "model/Index.h"
+#include "model/Layout.h"
+
+namespace neurun
+{
+namespace shape_inference
+{
+
+using Shapes = std::vector<model::Shape>;
+
+Shapes inferEltwiseShape(const model::Shape &lhs_shape, const model::Shape &rhs_shape);
+
+Shapes inferAvgPoolShape(const model::Shape &in_shape,
+ const model::operation::AvgPool2DNode::Param &param,
+ model::Layout layout = model::Layout::NHWC);
+
+Shapes inferConcatShape(const Shapes &in_shapes, const model::operation::ConcatNode::Param &param);
+
+Shapes inferMaxPoolShape(const model::Shape &in_shape,
+ const model::operation::MaxPool2DNode::Param &param,
+ model::Layout layout = model::Layout::NHWC);
+
+Shapes inferConv2DShape(const model::Shape &in_shape, const model::Shape &ker_shape,
+ const model::operation::Conv2DNode::Param &param,
+ model::Layout layout = model::Layout::NHWC);
+
+Shapes inferDepthwiseConv2DShape(const model::Shape &in_shape, const model::Shape &ker_shape,
+ const model::operation::DepthwiseConv2DNode::Param &param,
+ model::Layout layout = model::Layout::NHWC);
+
+Shapes inferFullyConnectedShape(const model::Shape &in_shape, const model::Shape &ker_shape);
+
+} // namespace shape_inference
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_SHAPE_INFERENCE_H__
diff --git a/runtimes/neurun/core/include/util/Utils.h b/runtimes/neurun/core/include/util/Utils.h
new file mode 100644
index 000000000..c472dd7c8
--- /dev/null
+++ b/runtimes/neurun/core/include/util/Utils.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Utils.h
+ * @brief This file contains utility functions
+ * @ingroup COM_AI_RUNTIME
+ */
+
+#ifndef __NEURUN_UTIL_UTILS_H__
+#define __NEURUN_UTIL_UTILS_H__
+
+#include "model/InternalType.h"
+#include "model/Layout.h"
+#include "model/Operand.h"
+#include "util/Coordinates.h"
+#include "backend/operand/IObject.h"
+
+#define UNUSED_RELEASE(a) (void)(a)
+
+namespace neurun
+{
+namespace util
+{
+
+/**
+ * @brief Converts a internal padding type to const char*
+ * @param[in] code Padding type to be converted
+ * @return A string holding the converted value
+ */
+const char *to_string(const model::PaddingType &type);
+
+Coordinates convertCoordinates(const Coordinates &from_coordinates, model::Layout from_layout,
+ model::Layout to_layout);
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_UTILS_H__
diff --git a/runtimes/neurun/core/include/util/feature/Coordinate4D.h b/runtimes/neurun/core/include/util/feature/Coordinate4D.h
new file mode 100644
index 000000000..b020ed239
--- /dev/null
+++ b/runtimes/neurun/core/include/util/feature/Coordinate4D.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_FEATURE_COORDINATE_4D_H__
+#define __NEURUN_UTIL_FEATURE_COORDINATE_4D_H__
+
+#include <stdint.h>
+
+namespace neurun
+{
+namespace util
+{
+namespace feature
+{
+
+/**
+ * @brief Class to represent position(offset) of subtensor.\n
+ * Assume that parent and child are already lowered (can get Shape4D).
+ */
+class Coordinate4D
+{
+public:
+ /**
+ * @brief Construct a new Coordinate4D object
+ */
+ Coordinate4D(void) : _n{0}, _h{0}, _w{0}, _c{0}
+ {
+ // DO NOTHING
+ }
+ /**
+ * @brief Construct a new Coordinate4D object
+ * @param[in] n Batch offset
+ * @param[in] h Height offset
+ * @param[in] w Width offset
+ * @param[in] c Channel offset
+ * @return
+ */
+ Coordinate4D(int32_t n, int32_t h, int32_t w, int32_t c) : _n{n}, _h{h}, _w{w}, _c{c}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Set batch offset
+ * @param[in] n Batch offset
+ */
+ void n(int32_t n) { _n = n; }
+ /**
+ * @brief Set height offset
+ * @param[in] h Height offset
+ */
+ void h(int32_t h) { _h = h; }
+ /**
+ * @brief Set width offset
+ * @param[in] w Width offset
+ */
+ void w(int32_t w) { _w = w; }
+ /**
+ * @brief Set channel offset
+ * @param[in] c Channel offset
+ */
+ void c(int32_t c) { _c = c; }
+
+public:
+ /**
+ * @brief Return batch offset
+ * @return Batch offset
+ */
+ int32_t n(void) const { return _n; }
+ /**
+ * @brief Return height offset
+ * @return Height offset
+ */
+ int32_t h(void) const { return _h; }
+ /**
+ * @brief Return width offset
+ * @return Width offset
+ */
+ int32_t w(void) const { return _w; }
+ /**
+ * @brief Return channel offset
+ * @return Channel offset
+ */
+ int32_t c(void) const { return _c; }
+
+private:
+ int32_t _n;
+ int32_t _h;
+ int32_t _w;
+ int32_t _c;
+};
+
+} // namespace feature
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_FEATURE_COORDINATE_4D_H__
diff --git a/runtimes/neurun/core/include/util/feature/nchw/View.h b/runtimes/neurun/core/include/util/feature/nchw/View.h
new file mode 100644
index 000000000..37ee8e398
--- /dev/null
+++ b/runtimes/neurun/core/include/util/feature/nchw/View.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_FEATURE_NCHW_VIEW_H__
+#define __NEURUN_UTIL_FEATURE_NCHW_VIEW_H__
+
+#include "misc/feature/Reader.h"
+#include "misc/feature/Shape.h"
+
+#include "backend/operand/ITensor.h"
+#include "util/Coordinates.h"
+
+#include <cassert>
+
+namespace neurun
+{
+namespace util
+{
+namespace feature
+{
+namespace nchw
+{
+
+template <typename T> class View final : public nnfw::misc::feature::Reader<T>
+{
+public:
+ View(::neurun::backend::operand::ITensor *tensor) : _tensor{tensor}
+ {
+ assert(tensor->num_dimensions() == 4 && tensor->layout() == model::Layout::NCHW);
+ _shape.N = tensor->dimension(0);
+ _shape.C = tensor->dimension(1);
+ _shape.H = tensor->dimension(2);
+ _shape.W = tensor->dimension(3);
+ }
+
+public:
+ const ::nnfw::misc::feature::Shape &shape(void) const { return _shape; }
+
+public:
+ T at(uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ const auto offset = feature_index_to_byte_offset(0, ch, row, col);
+
+ T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
+
+ return *ptr;
+ }
+ T at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ const auto offset = feature_index_to_byte_offset(batch, ch, row, col);
+
+ T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
+
+ return *ptr;
+ }
+
+public:
+ T &at(uint32_t ch, uint32_t row, uint32_t col)
+ {
+ const auto offset = feature_index_to_byte_offset(0, ch, row, col);
+
+ T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
+
+ return *ptr;
+ }
+ T &at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col)
+ {
+ const auto offset = feature_index_to_byte_offset(batch, ch, row, col);
+
+ T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
+
+ return *ptr;
+ }
+
+private:
+ size_t feature_index_to_byte_offset(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const
+ {
+ return _tensor->calcOffset(
+ neurun::util::Coordinates{static_cast<int32_t>(batch), static_cast<int32_t>(ch),
+ static_cast<int32_t>(row), static_cast<int32_t>(col)});
+ }
+
+private:
+ ::nnfw::misc::feature::Shape _shape;
+ ::neurun::backend::operand::ITensor *_tensor;
+};
+
+} // namespace nchw
+} // namespace feature
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_FEATURE_NCHW_VIEW_H__
diff --git a/runtimes/neurun/core/include/util/feature/nhwc/Reader.h b/runtimes/neurun/core/include/util/feature/nhwc/Reader.h
new file mode 100644
index 000000000..471f62a4b
--- /dev/null
+++ b/runtimes/neurun/core/include/util/feature/nhwc/Reader.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_FEATURE_NHWC_READER_H__
+#define __NEURUN_UTIL_FEATURE_NHWC_READER_H__
+
+#include "util/Utils.h"
+#include "Utils.h"
+
+#include "misc/feature/Reader.h"
+
+namespace neurun
+{
+namespace util
+{
+namespace feature
+{
+namespace nhwc
+{
+
+template <typename T> class Reader final : public nnfw::misc::feature::Reader<T>
+{
+public:
+ Reader(const ::nnfw::misc::feature::Shape &shape, const T *ptr, size_t len)
+ : _shape{shape}, _ptr{ptr}
+ {
+ UNUSED_RELEASE(len); // Workaround for unused variable in release mode
+ assert(shape.N * shape.C * shape.H * shape.W * sizeof(T) == len);
+ }
+
+public:
+ const nnfw::misc::feature::Shape &shape(void) const { return _shape; }
+
+public:
+ T at(uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ uint32_t index = index_of(_shape, ch, row, col);
+
+ return _ptr[index];
+ }
+ T at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ uint32_t index = index_of(_shape, batch, ch, row, col);
+
+ return _ptr[index];
+ }
+
+private:
+ nnfw::misc::feature::Shape _shape;
+
+private:
+ const T *_ptr;
+};
+
+} // namespace nhwc
+} // namespace feature
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_FEATURE_NHWC_READER_H__
diff --git a/runtimes/neurun/src/util/feature/nhwc/Utils.h b/runtimes/neurun/core/include/util/feature/nhwc/Utils.h
index 3dab4261c..3dab4261c 100644
--- a/runtimes/neurun/src/util/feature/nhwc/Utils.h
+++ b/runtimes/neurun/core/include/util/feature/nhwc/Utils.h
diff --git a/runtimes/neurun/core/include/util/feature/nhwc/View.h b/runtimes/neurun/core/include/util/feature/nhwc/View.h
new file mode 100644
index 000000000..cfaab8ea4
--- /dev/null
+++ b/runtimes/neurun/core/include/util/feature/nhwc/View.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_FEATURE_NHWC_VIEW_H__
+#define __NEURUN_UTIL_FEATURE_NHWC_VIEW_H__
+
+#include <cassert>
+#include <cstddef>
+
+#include "Utils.h"
+#include "util/Utils.h"
+
+#include "misc/feature/Reader.h"
+
+namespace neurun
+{
+namespace util
+{
+namespace feature
+{
+namespace nhwc
+{
+
+// This class is for cpu buffer only, and do not support padding.
+template <typename T> class View final : public nnfw::misc::feature::Reader<T>
+{
+public:
+ View(const ::nnfw::misc::feature::Shape &shape, T *ptr, size_t len) : _shape{shape}, _ptr{ptr}
+ {
+ UNUSED_RELEASE(len); // Workaround for unused variable in release mode
+ assert(shape.N * shape.C * shape.H * shape.W * sizeof(T) == len);
+ }
+
+public:
+ const nnfw::misc::feature::Shape &shape(void) const { return _shape; }
+
+public:
+ T at(uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ uint32_t index = index_of(_shape, ch, row, col);
+
+ return _ptr[index];
+ }
+
+ T at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const override
+ {
+ uint32_t index = index_of(_shape, batch, ch, row, col);
+
+ return _ptr[index];
+ }
+
+ T &at(uint32_t ch, uint32_t row, uint32_t col)
+ {
+ uint32_t index = index_of(_shape, ch, row, col);
+
+ return _ptr[index];
+ }
+
+ T &at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col)
+ {
+ uint32_t index = index_of(_shape, batch, ch, row, col);
+
+ return _ptr[index];
+ }
+
+private:
+ nnfw::misc::feature::Shape _shape;
+
+private:
+ T *_ptr;
+};
+
+} // namespace nhwc
+} // namespace feature
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_FEATURE_NHWC_VIEW_H__
diff --git a/runtimes/neurun/core/include/util/logging.h b/runtimes/neurun/core/include/util/logging.h
new file mode 100644
index 000000000..a2fdbdd59
--- /dev/null
+++ b/runtimes/neurun/core/include/util/logging.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_LOGGING_H__
+#define __NEURUN_UTIL_LOGGING_H__
+
+#include <iostream>
+
+#include "util/ConfigSource.h"
+
+namespace neurun
+{
+namespace util
+{
+namespace logging
+{
+
+class Context
+{
+public:
+ Context() : _enabled{false}
+ {
+ const auto env = util::getConfigBool(util::config::NEURUN_LOG_ENABLE);
+
+ if (env)
+ {
+ _enabled = true;
+ }
+ }
+
+public:
+ bool enabled(void) const { return _enabled; }
+
+private:
+ bool _enabled;
+};
+
+static Context ctx;
+
+} // namespace logging
+} // namespace util
+} // namespace neurun
+
+#define VERBOSE(name) \
+ if (::neurun::util::logging::ctx.enabled()) \
+ std::cout << "[" << #name << "] "
+
+#endif // __NEURUN_UTIL_LOGGING_H__
diff --git a/runtimes/neurun/core/src/backend/Backend.cc b/runtimes/neurun/core/src/backend/Backend.cc
new file mode 100644
index 000000000..c2f745f8f
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/Backend.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "backend/Backend.h"
+
+#include "backend/IConfig.h"
+#include "backend/ITensorBuilder.h"
+#include "backend/IKernelGenerator.h"
+#include "backend/IShapeFixer.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/backend/BackendManager.cc b/runtimes/neurun/core/src/backend/BackendManager.cc
new file mode 100644
index 000000000..155f7f51a
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/BackendManager.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <memory>
+#include <dlfcn.h>
+#include "BackendManager.h"
+
+#include "backend/Backend.h"
+#include "backend/IConfig.h"
+#include "util/logging.h"
+#include "util/ConfigSource.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+BackendManager &BackendManager::instance()
+{
+ static BackendManager object;
+ return object;
+}
+
+template <typename T, class... Types>
+void BackendManager::loadObjectFromPlugin(std::shared_ptr<T> &object_of_plugin_class,
+ const std::string obj_creator_func_name, void *handle,
+ Types &&... args)
+{
+ T *(*allocate_obj)(Types && ... Args);
+ // load object creator function
+ allocate_obj = (T * (*)(Types && ... Args))dlsym(handle, obj_creator_func_name.c_str());
+ if (allocate_obj == nullptr)
+ {
+ fprintf(stderr, "BackendManager: unable to open function %s: %s\n",
+ obj_creator_func_name.c_str(), dlerror());
+ abort();
+ }
+
+ object_of_plugin_class.reset(allocate_obj(args...));
+}
+
+void BackendManager::loadBackend(const std::string &backend)
+{
+ const std::string backend_plugin = "libbackend_" + backend + ".so";
+ void *handle = dlopen(backend_plugin.c_str(), RTLD_LAZY | RTLD_LOCAL);
+ if (handle == nullptr)
+ {
+ fprintf(stderr, "BackendManager::loadBackend failed to load plugin of %s backend: %s\n",
+ backend.c_str(), dlerror());
+ abort();
+ }
+ VERBOSE(BackendManager::loadBackend) << "loaded " << backend_plugin << " as a plugin of "
+ << backend << " backend\n";
+
+ {
+ // load object creator function
+ auto backend_create = (backend_create_t)dlsym(handle, "neurun_backend_create");
+ if (backend_create == nullptr)
+ {
+ fprintf(stderr, "BackendManager: unable to open function neurun_backend_create : %s\n",
+ dlerror());
+ abort();
+ }
+
+ // load object creator function
+ auto backend_destroy = (backend_destroy_t)dlsym(handle, "neurun_backend_destroy");
+ if (backend_destroy == nullptr)
+ {
+ fprintf(stderr, "BackendManager: unable to open function neurun_backend_destroy : %s\n",
+ dlerror());
+ abort();
+ }
+
+ auto backend_object =
+ std::unique_ptr<backend::Backend, backend_destroy_t>(backend_create(), backend_destroy);
+ auto backend_object_raw = backend_object.get();
+ backend_object->config()->initialize(); // Call initialize here?
+ _gen_map.emplace(backend_object->config()->id(), std::move(backend_object));
+ _available_backends.push_back(backend_object_raw);
+ }
+
+ // Save backend handle (avoid warning by handle lost without dlclose())
+ _handle_map.emplace(backend, handle);
+}
+
+BackendManager::BackendManager()
+{
+ const auto backends = util::getConfigString(util::config::BACKENDS);
+ size_t prev_pos = 0;
+ auto pos = backends.find(";");
+ while (pos != std::string::npos)
+ {
+ loadBackend(backends.substr(prev_pos, pos - prev_pos));
+ prev_pos = pos + 1;
+ pos = backends.find(";", prev_pos);
+ }
+ // if backends doesn't terminate with ";"
+ if (prev_pos < backends.size())
+ {
+ loadBackend(backends.substr(prev_pos));
+ }
+}
+
+Backend *BackendManager::get(const std::string &key) { return _gen_map.at(key).get(); }
+
+const Backend *BackendManager::get(const std::string &key) const { return _gen_map.at(key).get(); }
+
+const Backend *BackendManager::getDefault() const { return get("cpu"); }
+
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/backend/BackendManager.h b/runtimes/neurun/core/src/backend/BackendManager.h
new file mode 100644
index 000000000..ef102123c
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/BackendManager.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_BACKEND_BACKEND_MANAGER_H__
+#define __NEURUN_BACKEND_BACKEND_MANAGER_H__
+
+#include <memory>
+#include <map>
+
+#include "model/Operands.h"
+#include "backend/Backend.h"
+
+namespace neurun
+{
+namespace backend
+{
+
+class BackendManager
+{
+public:
+ using backend_create_t = Backend *(*)();
+ using backend_destroy_t = void (*)(Backend *);
+
+ static BackendManager &instance();
+
+public:
+ Backend *get(const std::string &key);
+ const Backend *get(const std::string &key) const;
+ const Backend *getDefault() const;
+ const std::vector<const Backend *> &getAll() const { return _available_backends; };
+
+private:
+ BackendManager();
+
+private:
+ std::vector<const Backend *> _available_backends;
+ std::map<std::string, std::unique_ptr<Backend, backend_destroy_t>> _gen_map;
+ std::map<std::string, void *> _handle_map;
+ /**
+ * @brief Allocate an object of a class of a plugin by loading a plugin function, that does
+ * allocation, and calling it
+ *
+ * @param object_of_plugin_class target object
+ * @param obj_creator_func_name name of the plugin function, that allocates an object
+ * @param handle handle of the plugin
+ * @param args arguments to pass to constructor of the plugin class
+ *
+ * @return
+ */
+ template <typename T, class... Types>
+ void loadObjectFromPlugin(std::shared_ptr<T> &object_of_plugin_class,
+ const std::string obj_creator_func_name, void *handle,
+ Types &&... args);
+
+ /**
+ * @brief load backend plugin
+ *
+ * @param backend backend to be loaded
+ *
+ * @return
+ */
+ void loadBackend(const std::string &backend);
+};
+
+} // namespace backend
+} // namespace neurun
+
+#endif // __NEURUN_BACKEND_BACKEND_MANAGER_H__
diff --git a/runtimes/neurun/core/src/backend/CustomKernel.cc b/runtimes/neurun/core/src/backend/CustomKernel.cc
new file mode 100644
index 000000000..198e223cf
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/CustomKernel.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "backend/CustomKernel.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace custom
+{
+
+// TODO move this elsewhere
+class APIConverter
+{
+public:
+ static nnfw_operand convertOperand(void *alloc, const TypeInfo &type)
+ {
+ nnfw_operand api_operand;
+ api_operand.allocation = alloc;
+ api_operand.type = convertType(type);
+ return api_operand;
+ }
+
+ static nnfw_tensorinfo convertType(const TypeInfo &type)
+ {
+ nnfw_tensorinfo api_type;
+ api_type.rank = type.shape.rank();
+ assert(type.shape.rank() <= 6);
+ std::copy(type.shape.dims().begin(), type.shape.dims().end(), std::begin(api_type.dims));
+
+ switch (type.dtype)
+ {
+ case model::DataType::FLOAT32:
+ api_type.dtype = NNFW_TYPE_TENSOR_FLOAT32;
+ break;
+ case model::DataType::INT32:
+ api_type.dtype = NNFW_TYPE_TENSOR_INT32;
+ break;
+ case model::DataType::QUANT8_ASYMM:
+ api_type.dtype = NNFW_TYPE_TENSOR_QUANT8_ASYMM;
+ break;
+ case model::DataType::BOOL8:
+ api_type.dtype = NNFW_TYPE_TENSOR_BOOL;
+ break;
+ default:
+ throw std::runtime_error("Unsupported tensor datatype");
+ }
+ return api_type;
+ }
+};
+
+Kernel::Kernel(const nnfw_custom_eval evalFunction)
+ : _params(), _userdata(nullptr), _userdata_size(0), _evalFunction(evalFunction)
+{
+}
+
+void Kernel::configure(Kernel::CustomKernelConfigParams &&inParams)
+{
+ _userdata = inParams.userdata;
+ _userdata_size = inParams.userdata_size;
+
+ _params.ninputs = inParams.input_allocations.size();
+ _params.inputs = new nnfw_operand[_params.ninputs];
+ for (size_t i = 0; i < _params.ninputs; ++i)
+ {
+ _params.inputs[i] =
+ APIConverter::convertOperand(inParams.input_allocations[i], inParams.input_types[i]);
+ }
+
+ _params.noutputs = inParams.output_allocations.size();
+ _params.outputs = new nnfw_operand[_params.noutputs];
+ for (size_t i = 0; i < _params.noutputs; ++i)
+ {
+ _params.outputs[i] =
+ APIConverter::convertOperand(inParams.output_allocations[i], inParams.output_types[i]);
+ }
+}
+
+void Kernel::run() { _evalFunction(&_params, _userdata, _userdata_size); }
+
+} // namespace custom
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/backend/CustomKernelRegistry.cc b/runtimes/neurun/core/src/backend/CustomKernelRegistry.cc
new file mode 100644
index 000000000..4acab70a9
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/CustomKernelRegistry.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "backend/CustomKernelRegistry.h"
+
+#include "cpp14/memory.h"
+
+namespace neurun
+{
+namespace backend
+{
+namespace custom
+{
+
+void KernelRegistry::registerKernel(const std::string &id, nnfw_custom_eval evalFunction)
+{
+ _storage.emplace(id, evalFunction);
+}
+
+std::unique_ptr<Kernel> KernelRegistry::buildKernelForOp(const std::string &id)
+{
+ auto it = _storage.find(id);
+ if (it == _storage.end())
+ {
+ throw std::runtime_error("Unable to find associated kernel for op");
+ }
+
+ return nnfw::cpp14::make_unique<custom::Kernel>(it->second);
+}
+
+} // namespace custom
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/backend/ExecTime.cc b/runtimes/neurun/core/src/backend/ExecTime.cc
new file mode 100644
index 000000000..d5aa679d7
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/ExecTime.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "backend/ExecTime.h"
+
+#include <fstream>
+#include <cassert>
+#include <limits>
+#include <algorithm>
+
+namespace neurun
+{
+namespace backend
+{
+
+int64_t ExecTime::getOperationExecTime(const Backend *backend, const std::string &operation,
+ bool quant, uint32_t op_size) const
+{
+ auto found_backend = _measurements.find(backend);
+ if (found_backend == _measurements.end())
+ return NOT_FOUND; // no execution time for this backend
+
+ auto found_operation_with_type = found_backend->second.find(operation);
+ if (found_operation_with_type == found_backend->second.end())
+ // no execution time for this operation
+ return NOT_FOUND;
+
+ auto found_operation = found_operation_with_type->second.find(quant);
+ if (found_operation == found_operation_with_type->second.end())
+ // no execution time for this operation
+ return NOT_FOUND;
+
+ auto found_size = found_operation->second.find(op_size);
+ if (found_size != found_operation->second.end())
+ return found_size->second; // found execution time
+
+ // Try to interpolate
+ if (found_operation->second.size() < 2)
+ // not possible to do linear interpolation
+ return found_operation->second.begin()->second;
+
+ // if we reach here, then this means, that there is no record, that is equal to op_size
+ auto upper_bound = found_operation->second.upper_bound(op_size); // > op_size
+ auto lower_bound = upper_bound;
+
+ if (upper_bound == found_operation->second.end()) // all values <= op_size
+ {
+ upper_bound--;
+ lower_bound = upper_bound;
+ lower_bound--;
+ }
+ else if (upper_bound == found_operation->second.begin()) // all values > op_size
+ {
+ upper_bound++;
+ }
+ else // op_size between
+ {
+ lower_bound--;
+ }
+
+ // Linear interpolation
+ const auto x0 = static_cast<int64_t>(lower_bound->first); // size
+ const auto x1 = static_cast<int64_t>(upper_bound->first); // size
+ const int64_t y0 = lower_bound->second; // time
+ const int64_t y1 = upper_bound->second; // time
+ const auto x = static_cast<int64_t>(op_size);
+
+ int64_t interpolated_value = y0 + (x - x0) * (y1 - y0) / (x1 - x0);
+
+ // In some cases ops with smaller inputs is executed slower than the one
+ // with larger inputs, more likely because of a backend's load difference
+ if (interpolated_value < 0 && x > x1)
+ {
+ return y0;
+ }
+ // It must be non-positive ONLY if it's lesser than both of them
+ assert(interpolated_value > 0 || x < x0);
+
+ // execution time must be non-negative
+ return std::max<int64_t>(interpolated_value, 1);
+}
+
+void ExecTime::updateOperationExecTime(const Backend *backend, const std::string &operation,
+ bool quant, uint32_t op_size, int64_t time)
+{
+ // If the op is not implemented for some input, it should not be scheduled
+ const auto &recs = _measurements[backend][operation][quant];
+ if (time == getMax() ||
+ std::any_of(recs.begin(), recs.end(),
+ [](std::pair<const uint32_t, const int64_t> p) { return p.second == getMax(); }))
+ {
+ _measurements[backend][operation][quant].clear();
+ _measurements[backend][operation][quant].emplace(op_size, getMax());
+ }
+ else
+ {
+ auto it = _measurements[backend][operation][quant].emplace(op_size, time);
+ if (!it.second)
+ {
+ // affect of the last measurement is bigger than the previous ones:
+ // this prefers new metrics than older once, so will adapt backend changes
+ it.first->second = (it.first->second + time) / 2;
+ }
+ }
+}
+
+void ExecTime::updatePermuteTime(const Backend *from_backend, const Backend *to_backend, bool quant,
+ uint32_t op_size, int64_t time)
+{
+ updateOperationExecTime(from_backend, to_backend->config()->id(), quant, op_size, time);
+}
+
+int64_t ExecTime::getPermuteTime(const Backend *from_backend, const Backend *to_backend, bool quant,
+ uint32_t op_size) const
+{
+ return getOperationExecTime(from_backend, to_backend->config()->id(), quant, op_size);
+}
+
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/backend/JSONExecTime.cc b/runtimes/neurun/core/src/backend/JSONExecTime.cc
new file mode 100644
index 000000000..e2404b2c8
--- /dev/null
+++ b/runtimes/neurun/core/src/backend/JSONExecTime.cc
@@ -0,0 +1,231 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "backend/JSONExecTime.h"
+#include "backend/IConfig.h"
+#include <fstream>
+
+namespace neurun
+{
+namespace backend
+{
+/**
+ * @brief Helper function for reading string from stream
+ *
+ * @param str Output string
+ * @param stream File stream
+ */
+void readString(std::string &str, std::ifstream &stream)
+{
+ str.clear();
+ char buf;
+ while (stream.good())
+ {
+ stream.get(buf);
+ if (buf == '"')
+ break;
+ str.push_back(buf);
+ }
+}
+
+/**
+ * @brief Helper function for reading bool from stream
+ *
+ * @param quant Output bool
+ * @param stream File stream
+ */
+void readBool(bool &quant, std::ifstream &stream)
+{
+ char buf;
+ stream.get(buf);
+ quant = (buf == '1');
+ stream.get(buf);
+}
+
+void printString(const std::string &str, std::ofstream &stream) { stream << "\"" << str << "\""; }
+
+void printBool(bool quant, std::ofstream &stream) { stream << "\"" << quant << "\""; }
+
+void JSON::readOperation(const std::string &backend, const std::string &operation, bool quant,
+ std::ifstream &stream)
+{
+ uint32_t size = 0;
+ int64_t time = 0;
+
+ std::string int_buf;
+ char buf;
+ int number_of_closed_braces = 0;
+ int number_of_commas = 0;
+
+ while (stream.good())
+ {
+ stream.get(buf);
+
+ switch (buf)
+ {
+ case ']':
+ {
+ number_of_closed_braces++;
+ break;
+ }
+ case '[':
+ {
+ number_of_closed_braces--;
+ break;
+ }
+ default:
+ {
+ if (std::isdigit(buf))
+ {
+ int_buf.push_back(buf);
+ }
+ break;
+ }
+ }
+
+ if (number_of_closed_braces == 1)
+ break;
+
+ if ((buf == ']' && number_of_closed_braces == 0) ||
+ (buf == ',' && number_of_closed_braces == -1))
+ {
+ switch (number_of_commas % 2)
+ {
+ case 0:
+ {
+ size = static_cast<uint32_t>(std::atoi(int_buf.c_str()));
+ break;
+ }
+ case 1:
+ {
+ time = static_cast<int64_t>(std::atol(int_buf.c_str()));
+ auto bf = _backends.find(backend);
+ if (bf != _backends.end())
+ {
+ _measurements[bf->second][operation][quant][size] = time;
+ } // we ignore the records for unsupported backends
+ break;
+ }
+ }
+ number_of_commas++;
+ int_buf.clear();
+ }
+ }
+}
+void JSON::printOperation(const std::map<uint32_t, int64_t> &operation_info,
+ std::ofstream &stream) const
+{
+ for (const auto &items : operation_info)
+ {
+ stream << "[" << items.first << ", " << items.second << "], ";
+ }
+ stream.seekp(-2, std::ofstream::end);
+}
+
+void JSON::uploadOperationsExecTime() const
+{
+ std::ofstream stream(_measurement_file);
+ if (!stream.is_open())
+ {
+ throw std::runtime_error("Failed to save backend config file");
+ }
+ else
+ {
+ stream << "{";
+ for (const auto &backend : _measurements)
+ {
+ printString(backend.first->config()->id(), stream);
+ stream << ": {";
+ for (const auto &operation : backend.second)
+ {
+ printString(operation.first, stream);
+ stream << ": {";
+ for (const auto &type : operation.second)
+ {
+ printBool(type.first, stream);
+ stream << ": [";
+ printOperation(type.second, stream);
+ stream << "], ";
+ }
+ stream.seekp(-2, std::ofstream::end);
+ stream << "}, ";
+ }
+ stream.seekp(-2, std::ofstream::end);
+ stream << "}, ";
+ }
+ stream.seekp(-2, std::ofstream::end);
+ stream << "}";
+ stream.close();
+ }
+}
+
+void JSON::loadOperationsExecTime()
+{
+ std::ifstream stream(_measurement_file);
+ if (stream.is_open())
+ {
+ std::string backend;
+ std::string operation;
+ bool quant = false;
+ char buf;
+ int number_of_open_braces = 0;
+
+ while (stream.good())
+ {
+ stream.get(buf);
+ switch (buf)
+ {
+ case '{':
+ number_of_open_braces++;
+ break;
+ case '}':
+ number_of_open_braces--;
+ break;
+ case '"':
+ {
+ if (number_of_open_braces == 1)
+ {
+ // read backend string
+ readString(backend, stream);
+ }
+ if (number_of_open_braces == 2)
+ {
+ // read operation string
+ readString(operation, stream);
+ }
+ if (number_of_open_braces == 3)
+ {
+ // read operation string
+ readBool(quant, stream);
+ }
+ break;
+ }
+ case '[':
+ {
+ // reading and creating all info for operation
+ readOperation(backend, operation, quant, stream);
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ stream.close();
+ }
+}
+
+} // namespace backend
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/BackendResolver.cc b/runtimes/neurun/core/src/compiler/BackendResolver.cc
new file mode 100644
index 000000000..0c544190c
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/BackendResolver.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "BackendResolver.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+BackendResolver::BackendResolver(const BackendResolver &obj)
+ : _context_manager{}, _gen_map{obj._gen_map}
+{
+ for (const auto &e : obj._context_manager)
+ {
+ _context_manager.emplace(e.first, nnfw::cpp14::make_unique<backend::BackendContext>(*e.second));
+ }
+}
+
+BackendResolver &BackendResolver::operator=(const BackendResolver &obj)
+{
+ _gen_map = obj._gen_map;
+
+ _context_manager.clear();
+ for (const auto &e : obj._context_manager)
+ {
+ _context_manager.emplace(e.first, nnfw::cpp14::make_unique<backend::BackendContext>(*e.second));
+ }
+
+ return *this;
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/BackendResolver.h b/runtimes/neurun/core/src/compiler/BackendResolver.h
new file mode 100644
index 000000000..248ef2f2e
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/BackendResolver.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_BACKEND_RESOLVER_H__
+#define __NEURUN_COMPILER_BACKEND_RESOLVER_H__
+
+#include <unordered_map>
+#include <typeindex>
+
+#include "util/logging.h"
+#include "backend/Backend.h"
+#include "backend/BackendManager.h"
+#include "backend/ITensorBuilder.h"
+#include "model/OperationIndexMap.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+class BackendResolver
+{
+public:
+ BackendResolver(const model::Operands &operands,
+ const std::vector<const backend::Backend *> &backends,
+ const std::shared_ptr<backend::custom::KernelRegistry> &registry)
+ {
+ for (const auto backend : backends)
+ {
+ _context_manager.emplace(backend, backend->newContext(operands, registry));
+ }
+ }
+
+ ~BackendResolver() = default;
+ BackendResolver(const BackendResolver &obj);
+ BackendResolver(BackendResolver &&obj) = default;
+ BackendResolver &operator=(const BackendResolver &obj);
+ BackendResolver &operator=(BackendResolver &&obj) = default;
+
+public:
+ const backend::BackendContext *getBackendContext(const model::OperationIndex &index) const
+ {
+ return _context_manager.at(_gen_map.at(index)).get();
+ }
+
+ const backend::BackendContext *getBackendContext(const backend::Backend *backend) const
+ {
+ return _context_manager.at(backend).get();
+ }
+
+ backend::TensorBuilderSet tensor_builders() const
+ {
+ backend::TensorBuilderSet ret;
+ for (const auto &e : _context_manager)
+ {
+ ret.insert(e.second->tensor_builder);
+ }
+ return ret;
+ }
+
+ const backend::Backend *getBackend(const model::OperationIndex &index) const
+ {
+ return getBackendContext(index)->backend;
+ }
+
+ void setBackend(const model::OperationIndex &index, const backend::Backend *backend)
+ {
+ _gen_map[index] = backend;
+ }
+
+ void iterate(const std::function<void(const model::OperationIndex &,
+ const backend::BackendContext &)> &fn) const
+ {
+ for (const auto &e : _gen_map)
+ {
+ fn(e.first, *_context_manager.at(e.second));
+ }
+ }
+
+private:
+ std::unordered_map<const backend::Backend *, std::unique_ptr<backend::BackendContext>>
+ _context_manager;
+ model::OperationIndexMap<const backend::Backend *> _gen_map;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_BACKEND_RESOLVER_H__
diff --git a/runtimes/neurun/core/src/compiler/Compiler.cc b/runtimes/neurun/core/src/compiler/Compiler.cc
new file mode 100644
index 000000000..6a378faa9
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/Compiler.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "compiler/Compiler.h"
+
+#include "BackendResolver.h"
+#include "ParamChecker.h"
+#include "ExecutorFactory.h"
+
+#include "compiler/IScheduler.h"
+#include "compiler/ManualScheduler.h"
+#include "compiler/HEScheduler.h"
+#include "backend/ExecTime.h"
+#include "graph/operation/LowerInfo.h"
+#include "dumper/dot/DotDumper.h"
+#include "compiler/Linear.h"
+#include "exec/interp/ExecManager.h"
+#include "backend/ExecTime.h"
+#include "util/ConfigSource.h"
+
+namespace neurun
+{
+
+namespace compiler
+{
+
+void Compiler::compile(void)
+{
+ _state = State::STARTED;
+
+ if (!checkCompilable())
+ {
+ _executor = std::make_shared<exec::interp::ExecManager>(_graph->shareModel());
+ return;
+ }
+
+ /***************************************************
+ * Backend independent analysis & optimization phase
+ ***************************************************/
+ // Schedule
+ std::unique_ptr<BackendResolver> br;
+ std::shared_ptr<model::OperationIndexMap<int64_t>> indexed_ranks;
+ if (util::getConfigBool(util::config::USE_SCHEDULER))
+ {
+ auto scheduler =
+ compiler::HEScheduler(_graph->operands(), backend::BackendManager::instance().getAll(),
+ _graph->getKernelRegistry());
+ br = scheduler.schedule(*_graph);
+ indexed_ranks = scheduler.getIndexedRanks();
+ }
+ else
+ {
+ auto scheduler = compiler::ManualScheduler();
+ br = scheduler.schedule(*_graph);
+ }
+ _graph->setBackendResolver(std::move(br));
+ /*************************************************************
+ * Backend independent analysis & optimization phase finished
+ *************************************************************/
+
+ // dump graph to .dot
+ auto dump_level =
+ static_cast<dumper::dot::DotDumper::Level>(util::getConfigInt(util::config::GRAPH_DOT_DUMP));
+ neurun::dumper::dot::DotDumper dot_dumper(*_graph, dump_level);
+ dot_dumper.dump("before_lower");
+
+ // Lower: decide backend
+ _graph->lower();
+ _state = State::LOWERED;
+
+ dot_dumper.dump("after_lower");
+
+ const std::string executor_str = util::getConfigString(util::config::EXECUTOR);
+
+ _executor =
+ std::shared_ptr<exec::IExecutor>{ExecutorFactory::instance().create(executor_str, *_graph)};
+ _executor->setIndexedRanks(indexed_ranks);
+ /********************************
+ * Code generation phase finished
+ ********************************/
+ _state = State::COMPILED;
+}
+
+bool Compiler::checkCompilable()
+{
+ // Disable compile phase
+ // When ready to use interpreter backend, remove this config and use backend setting
+ const auto env_disable_compile = util::getConfigBool(util::config::DISABLE_COMPILE);
+ if (env_disable_compile)
+ {
+ return false;
+ }
+
+ // TODO check unspecified operand shape
+
+ // Check compilable parameter
+ ParamChecker paramChecker{_graph};
+ paramChecker();
+ if (paramChecker.haveNoneConstParam())
+ {
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace compiler
+
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/ExecutorFactory.cc b/runtimes/neurun/core/src/compiler/ExecutorFactory.cc
new file mode 100644
index 000000000..2ff32a57e
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/ExecutorFactory.cc
@@ -0,0 +1,351 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ExecutorFactory.h"
+
+#include <functional>
+#include "exec/ExecutionObservers.h"
+#include "exec/LinearExecutor.h"
+#include "exec/DataflowExecutor.h"
+#include "exec/ParallelExecutor.h"
+#include "compiler/BackendResolver.h"
+#include "backend/ExecTime.h"
+#include "compiler/Linear.h"
+#include "graph/dumper/Dumper.h"
+#include "OperationValidator.h"
+#include "SubTensorAnalyzer.h"
+#include "backend/IConstantInitializer.h"
+#include "backend/IKernelGenerator.h"
+#include "backend/IShapeFixer.h"
+#include "cpp14/memory.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+ExecutorFactory &ExecutorFactory::instance()
+{
+ static ExecutorFactory singleton;
+ return singleton;
+}
+
+ExecutorFactory::ExecutorFactory()
+{
+ _map["Linear"] = createLinearExecutor;
+ _map["Dataflow"] = std::bind(createDataflowExecutor, std::placeholders::_1, false);
+ _map["Parallel"] = std::bind(createDataflowExecutor, std::placeholders::_1, true);
+}
+
+exec::IExecutor *ExecutorFactory::create(const std::string &id, graph::Graph &graph)
+{
+ return _map.at(id)(graph);
+}
+
+exec::IExecutor *ExecutorFactory::createLinearExecutor(graph::Graph &graph)
+{
+ auto operand_context = std::make_shared<OperandContext>();
+ const auto &operands = graph.operands();
+
+ // Compilation result will be filled in operand_context and operation_sequence
+ auto function_sequence = std::make_shared<exec::FunctionSequence>();
+
+ // linearize
+ auto linear = graph.linearize();
+
+ // Dump ops
+ linear->accept(neurun::graph::dumper::Dumper{});
+
+ linear->accept(OperationValidator{operands});
+
+ /*************************************************
+ * Backend dependent analysis & optimization phase
+ *************************************************/
+
+ // SubTensorInfo should be generated after lower, before shape correction and finalize
+ // because SubTensorAnalyzer assume that insert permutation is already finished
+ // lower: decide backend and insert permutation
+ // fix shapes: prepare codegen to optimization
+ // generate tensor objects: generate tensor using subtensor info
+ // generate kernels
+ // allocate tesor memory
+ // constant intialization: fill the constants with values
+ // Generated SubTensorInfo is in operand(Object)
+ // for easy pass SubTensorInfo to plan builder and tensor builder
+ linear->accept(SubTensorAnalyzer{graph.operands()});
+
+ /**********************************************************
+ * Backend dependent analysis & optimization phase finished
+ **********************************************************/
+
+ /***********************
+ * Code generation phase
+ ***********************/
+
+ // Fix shapes
+ linear->iterate([&](const compiler::Linear::Element &element) {
+ auto backend = element.lower_info->backend();
+ auto shape_fixer = linear->getBackendContext(backend)->shape_fixer;
+ shape_fixer->fix(*element.subgraph);
+ });
+
+ linear->planTensors();
+
+ auto tensor_builders = linear->backend_resolver()->tensor_builders();
+
+ // Prepare tensors
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->prepare();
+ }
+
+ // Generate initializers
+ linear->generateConstantInitializers();
+
+ class ExecutionBuilder final : public IExecutionBuilder
+ {
+ public:
+ ExecutionBuilder(exec::FunctionSequence &functions) : _functions{functions}
+ {
+ // DO NOTHING
+ }
+
+ public:
+ void append(std::unique_ptr<::neurun::exec::IFunction> &&f) override
+ {
+ _functions.append(std::move(f));
+ }
+
+ private:
+ exec::FunctionSequence &_functions;
+ };
+
+ auto execution_builder = nnfw::cpp14::make_unique<ExecutionBuilder>(*function_sequence);
+
+ // Generate kernels
+ linear->iterate([&](const compiler::Linear::Element &element) {
+ auto backend = element.lower_info->backend();
+ auto kernel_gen = linear->getBackendContext(backend)->kernel_gen;
+ kernel_gen->generate(*element.subgraph, execution_builder.get());
+ });
+
+ // Allocate Tensor Memory
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->allocate();
+ }
+
+ // TODO Add optimization passes
+
+ // Initialize constant tensors
+ for (const auto backend : backend::BackendManager::instance().getAll())
+ {
+ linear->getBackendContext(backend)->constant_initializer->run();
+ }
+
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->finalize();
+ }
+
+ // Wrap tensors as Object and store them to plan
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->iterate([&](const model::OperandIndex &index) {
+ auto object = tensor_builder->wrapTensor(index);
+ operand_context->set(index, object);
+ });
+ }
+
+ // Prepare each TensorManager on each backend
+ auto tensor_mgrs = nnfw::cpp14::make_unique<backend::TensorManagerSet>();
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_mgrs->insert(tensor_builder->releaseTensorManager());
+ }
+
+ return new exec::LinearExecutor{graph.shareModel(), linear->releaseSubgraphs(),
+ operand_context, linear->releaseLowerInfo(),
+ std::move(tensor_mgrs), linear->releaseElements(),
+ function_sequence};
+}
+
+exec::IExecutor *ExecutorFactory::createDataflowExecutor(graph::Graph &graph, bool parallel)
+{
+ auto operand_context = std::make_shared<OperandContext>();
+
+ graph.subgraphs().iterate([&](const model::SubgraphIndex &, const model::Subgraph &subg) {
+ auto subtensor_analyzer = SubTensorAnalyzer{graph.operands()};
+ subg.accept(subtensor_analyzer);
+ });
+
+ // Fix shapes
+ graph.subgraphs().iterate(
+ [&](const model::SubgraphIndex &subg_index, const model::Subgraph &subg) {
+ auto backend = graph.getLowerInfo(subg_index)->backend();
+ auto shape_fixer = graph.backend_resolver()->getBackendContext(backend)->shape_fixer;
+ shape_fixer->fix(subg);
+ });
+
+ graph.operands().iterate([&](const model::OperandIndex &ind, const model::Operand &obj) {
+ const auto lower_info = graph.getLowerInfo(ind);
+ for (auto factor : lower_info->def_factors())
+ {
+ bool isSubTensor = false;
+ auto backend = factor.backend();
+ auto tensor_builder = graph.backend_resolver()->getBackendContext(backend)->tensor_builder;
+
+ if (backend->config()->SupportSubTensorAlloc())
+ {
+ const auto parentInfo = obj.parent_info();
+ if (parentInfo != nullptr)
+ {
+ isSubTensor = true;
+ }
+ }
+
+ if (isSubTensor)
+ {
+ const compiler::SubTensorInfo info(obj);
+ tensor_builder->registerSubTensorInfo(ind, info);
+ }
+ else
+ {
+ const auto info = obj.info();
+ // NOTE This assumes an operand can have one layout, and only PermutateNode can have
+ // different layouts for input and output
+ const auto &def = *obj.getDef().list().cbegin();
+ auto frontend_layout =
+ graph.subgraphs().at(graph.subgraphs().getOperation(def)).getLayout();
+ if (frontend_layout == model::Layout::UNKNOWN)
+ {
+ const auto &use = *obj.getUses().list().cbegin();
+ frontend_layout = graph.subgraphs().at(graph.subgraphs().getOperation(use)).getLayout();
+ }
+ const auto backend_layout = lower_info->def_factors().getOnlyElement().layout();
+ tensor_builder->registerTensorInfo(ind, info, frontend_layout, backend_layout,
+ obj.isConstant());
+ // To make this never be deallocated, this is a workaround to use static memory planner
+ tensor_builder->notifyFirstUse(ind);
+ }
+ }
+ });
+
+ auto tensor_builders = graph.backend_resolver()->tensor_builders();
+
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->prepare();
+ }
+
+ class ExecutionBuilder : public IExecutionBuilder
+ {
+ public:
+ void append(std::unique_ptr<exec::IFunction> &&fn) override
+ {
+ auto itr = _code_map.find(_next_index);
+ if (itr == _code_map.end())
+ {
+ _code_map[_next_index] = nnfw::cpp14::make_unique<exec::FunctionSequence>();
+ }
+ _code_map[_next_index]->append(std::move(fn));
+ };
+
+ // TODO Remove this method and make `append` to get index value as an argument
+ void setNextIndex(const model::SubgraphIndex next_index) { _next_index = next_index; }
+
+ exec::DataflowExecutor::CodeMap &&releaseCodeMap() { return std::move(_code_map); }
+
+ private:
+ model::SubgraphIndex _next_index;
+ exec::DataflowExecutor::CodeMap _code_map;
+ };
+
+ auto execution_builder = nnfw::cpp14::make_unique<ExecutionBuilder>();
+
+ // Generate kernels
+ graph.subgraphs().iterate(
+ [&](const model::SubgraphIndex &subg_index, const model::Subgraph &subg) {
+ auto backend = graph.getLowerInfo(subg_index)->backend();
+ auto constant_initializer =
+ graph.backend_resolver()->getBackendContext(backend)->constant_initializer;
+ constant_initializer->generate(subg, graph.operands());
+ // TODO This approach is temporal. See declaration of `setNextIndex`.
+ execution_builder->setNextIndex(subg_index);
+ auto kernel_gen = graph.backend_resolver()->getBackendContext(backend)->kernel_gen;
+ kernel_gen->generate(subg, execution_builder.get());
+ });
+
+ for (const auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->allocate();
+ }
+
+ // Initialize constant tensors
+ for (const auto backend : backend::BackendManager::instance().getAll())
+ {
+ graph.backend_resolver()->getBackendContext(backend)->constant_initializer->run();
+ }
+
+ auto lower_info = graph.releaseLowerInfo();
+
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->finalize();
+ }
+
+ // Wrap tensors as Object and store them to plan
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_builder->iterate([&](const model::OperandIndex &index) {
+ auto object = tensor_builder->wrapTensor(index);
+ operand_context->set(index, object);
+ });
+ }
+
+ // Prepare each TensorManager on each backend
+ auto tensor_mgrs = nnfw::cpp14::make_unique<backend::TensorManagerSet>();
+ for (auto &tensor_builder : tensor_builders)
+ {
+ tensor_mgrs->insert(tensor_builder->releaseTensorManager());
+ }
+
+ if (parallel)
+ {
+ return new exec::ParallelExecutor{
+ graph.shareModel(), graph.releaseSubgraphs(),
+ operand_context, std::move(lower_info),
+ std::move(tensor_mgrs), std::move(execution_builder->releaseCodeMap())};
+ }
+ else
+ {
+ auto exec = new exec::DataflowExecutor{
+ graph.shareModel(), graph.releaseSubgraphs(),
+ operand_context, std::move(lower_info),
+ std::move(tensor_mgrs), std::move(execution_builder->releaseCodeMap())};
+ if (util::getConfigBool(util::config::PROFILING_MODE))
+ {
+ auto et = std::make_shared<backend::ExecTime>(backend::BackendManager::instance().getAll());
+ std::unique_ptr<exec::IExecutionObserver> obs =
+ nnfw::cpp14::make_unique<exec::ProfileObserver>(et);
+ exec->addObserver(std::move(obs));
+ }
+ return exec;
+ }
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/ExecutorFactory.h b/runtimes/neurun/core/src/compiler/ExecutorFactory.h
new file mode 100644
index 000000000..894fec1b5
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/ExecutorFactory.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_EXECUTOR_FACTORY_H__
+#define __NEURUN_COMPILER_EXECUTOR_FACTORY_H__
+
+#include <unordered_map>
+
+#include "exec/IExecutor.h"
+#include "graph/Graph.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+class ExecutorFactory
+{
+public:
+ static ExecutorFactory &instance();
+
+public:
+ exec::IExecutor *create(const std::string &id, graph::Graph &graph);
+
+private:
+ ExecutorFactory();
+
+private:
+ static exec::IExecutor *createLinearExecutor(graph::Graph &graph);
+ static exec::IExecutor *createDataflowExecutor(graph::Graph &graph, bool parallel);
+
+private:
+ std::unordered_map<std::string, std::function<exec::IExecutor *(graph::Graph &)>> _map;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_EXECUTOR_FACTORY_H__
diff --git a/runtimes/neurun/core/src/compiler/HEScheduler.cc b/runtimes/neurun/core/src/compiler/HEScheduler.cc
new file mode 100644
index 000000000..a3d1a5990
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/HEScheduler.cc
@@ -0,0 +1,577 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/Operand.h"
+#include "compiler/HEScheduler.h"
+#include "graph/Graph.h"
+#include "util/ConfigSource.h"
+#include "compiler/IExecutionBuilder.h"
+#include "compiler/BackendResolver.h"
+#include "backend/IShapeFixer.h"
+#include "util/logging.h"
+#include "util/Utils.h"
+#include "exec/FunctionSequence.h"
+#include <cassert>
+#include <cmath>
+#include <chrono>
+
+namespace neurun
+{
+
+namespace compiler
+{
+static uint32_t getOperationsFlattenedIOSize(const graph::Graph &graph,
+ const model::Operation &node)
+{
+ uint32_t size = 0;
+ for (const auto &input : node.getInputs())
+ {
+ size += graph.operands().at(input).info().total_size();
+ }
+ for (const auto &output : node.getOutputs())
+ {
+ size += graph.operands().at(output).info().total_size();
+ }
+ return size;
+}
+
+static bool isQuant(const graph::Graph &graph, const model::Operation &node)
+{
+ for (const auto &input : node.getInputs())
+ {
+ const auto &obj = graph.operands().at(input);
+ if (obj.typeInfo().type() == model::DataType::QUANT8_ASYMM)
+ {
+ return true;
+ }
+ }
+ return false;
+}
+
+static bool isWorkaroundSkip(const graph::Graph &graph, const backend::Backend *backend,
+ const model::Operation &node, bool quant)
+{
+ /* TODO: this is workaround, come up with better solution if have.
+ Adding exception in stage doesn't help. Because if there is a record for add without
+ broadcast, scheduling will select it since it doesn't distinguish broadcast and
+ non-broadcast like it does for quant non-quantized*/
+ if (backend->config()->id() == "cpu" && node.getName() == "Add")
+ {
+ const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
+ /*Broadcasting isn't supported on CPU: no way to differ the existing exec_time record of
+ * Add with and without broadcasting*/
+ /*Quant is also unsupported: throws an exception in run(): in case of scheduling without warm-up
+ it isn't catched by tryBackend()*/
+ if (quant ||
+ !(graph.operands().at(lhs_index).shape() == graph.operands().at(rhs_index).shape()))
+ {
+ return true;
+ }
+ }
+ /* TODO: this is workaround, come up with better solution if have.
+ Adding exception in stage doesn't help. Because if there is a record for Mul without
+ broadcast, scheduling will select it since it doesn't distinguish broadcast and
+ non-broadcast like it does for quant non-quantized*/
+ else if (backend->config()->id() == "acl_neon" && node.getName() == "Mul")
+ {
+ const auto lhs_index{node.getInputs().at(model::operation::MulNode::Input::LHS)};
+ const auto rhs_index{node.getInputs().at(model::operation::MulNode::Input::RHS)};
+
+ // Nontrivial broadcasting isn't supported yet
+ if (quant ||
+ !(graph.operands().at(lhs_index).shape() == graph.operands().at(rhs_index).shape()))
+ {
+ return true;
+ }
+ }
+ return false;
+}
+
+// if a node can be merged into subgraph
+static bool isMergable(const graph::Graph &graph, const model::Operation &node)
+{
+ size_t prev_op_cnt = 0;
+ for (const auto &input : node.getInputs())
+ {
+ // only valid_inputs
+ const auto &operand = graph.operands().at(input);
+ if (operand.isConstant())
+ continue;
+
+ // This operand is input of operation, not weight or bias
+ if (operand.getDef().list().size() > 0)
+ ++prev_op_cnt;
+
+ // Current node has multiple inputs as concat or at the beginning of the separated branch
+ if (prev_op_cnt > 1 || operand.getUses().list().size() > 1)
+ {
+ return false;
+ }
+ }
+ return true;
+}
+
+void HEScheduler::scheduleShufflingBackends()
+{
+ VERBOSE(HEScheduler::scheduleNode)
+ << "Started task scheduling: uses all backends to get more metrics for data transfer"
+ << std::endl;
+ size_t backend_ind = 0;
+ for (const auto &rank : _rank_to_op)
+ {
+ VERBOSE(HEScheduler::scheduleNode) << "scheduling (" << rank.second.value() << ")" << std::endl;
+ const auto &node = _graph->operations().at(rank.second);
+ const bool quant = isQuant(*_graph, node);
+ const auto size = getOperationsFlattenedIOSize(*_graph, node);
+ for (size_t i = 0;; ++i)
+ {
+ if (i == _all_backends.size())
+ {
+ // wasn't able to find backend
+ assert(false);
+ break;
+ }
+ if (backend_ind == _all_backends.size())
+ {
+ backend_ind = 0;
+ }
+ if (isWorkaroundSkip(*_graph, _all_backends[backend_ind], node, quant))
+ {
+ ++backend_ind;
+ continue;
+ }
+ const auto exec_time =
+ _exec_time->getOperationExecTime(_all_backends[backend_ind], node.getName(), quant, size);
+ // Scheduling to measure data transfer must be done after measuring all backends separately
+ assert(exec_time != _exec_time->NOT_FOUND);
+ if (exec_time == _exec_time->getMax())
+ {
+ ++backend_ind;
+ continue;
+ }
+ _backend_resolver->setBackend(rank.second, _all_backends[backend_ind]);
+ VERBOSE(HEScheduler::schedule) << "backend for " << node.getName() << " is "
+ << _all_backends[backend_ind]->config()->id() << std::endl;
+ ++backend_ind;
+ break;
+ }
+ }
+}
+
+bool HEScheduler::isNodeProfiled(const model::Operation &node)
+{
+ const bool quant = isQuant(*_graph, node);
+ const auto size = getOperationsFlattenedIOSize(*_graph, node);
+ for (const auto *backend : _all_backends)
+ {
+ const auto exec_time = _exec_time->getOperationExecTime(backend, node.getName(), quant, size);
+ if (exec_time == _exec_time->NOT_FOUND)
+ return false;
+ }
+ return true;
+}
+
+std::unique_ptr<compiler::BackendResolver> HEScheduler::schedule(const graph::Graph &graph)
+{
+ _graph = &graph;
+ VERBOSE(HEScheduler::schedule) << "task scheduling started" << std::endl;
+ // Make ranks and save in descending order
+ makeRank();
+
+ for (const auto *backend : _all_backends)
+ {
+ _backends_avail_time.emplace(backend, std::map<int64_t, int64_t>{{0, 0}});
+ }
+
+ const bool is_profiling = util::getConfigBool(util::config::PROFILING_MODE);
+ if (is_profiling)
+ {
+ // Check if profiling info about all backend/node pairs already exists
+ bool all_nodes_are_profiled = true;
+ _graph->operations().iterate([&](const model::OperationIndex &, const model::Operation &op) {
+ if (all_nodes_are_profiled)
+ all_nodes_are_profiled = isNodeProfiled(op);
+ });
+
+ // If all nodes are already profiled - schedule backends in such order, so more profiling
+ // information about between-backends data transfer could be collected
+ if (all_nodes_are_profiled)
+ {
+ scheduleShufflingBackends();
+ VERBOSE(HEScheduler::schedule) << "task scheduling finished" << std::endl;
+ return std::move(_backend_resolver);
+ }
+ }
+
+ // for each task select the backend with the smallest earliest finishing time(eft)
+ for (const auto &rank : _rank_to_op)
+ {
+ scheduleNode(rank.second);
+ }
+ VERBOSE(HEScheduler::schedule) << "task scheduling finished" << std::endl;
+ return std::move(_backend_resolver);
+}
+
+int64_t HEScheduler::getOpTime(const backend::Backend *backend, const std::string &operation,
+ bool quant, uint32_t size)
+{
+ const auto time = _exec_time->getOperationExecTime(backend, operation, quant, size);
+ if (time != _exec_time->NOT_FOUND)
+ return time;
+
+ return _is_supported.at(backend).at(operation) ? 1 : _exec_time->getMax();
+}
+
+int64_t HEScheduler::getPermuteTime(const backend::Backend *src_backend,
+ const backend::Backend *dst_backend, bool quant, uint32_t size)
+{
+ const auto time = _exec_time->getPermuteTime(src_backend, dst_backend, quant, size);
+ if (time != _exec_time->NOT_FOUND)
+ return time;
+
+ // Makes the scheduler prefer keeping computations on one backend
+ return size / 200;
+}
+
+int64_t HEScheduler::tryBackend(const model::Operation &node, const backend::Backend *backend)
+{
+ auto iter = _is_supported.find(backend);
+ if (iter != _is_supported.end())
+ {
+ auto it2 = iter->second.find(node.getName());
+ if (it2 != iter->second.end())
+ {
+ return _is_supported[backend][node.getName()] ? 1 : _exec_time->getMax();
+ }
+ }
+ try
+ {
+ _backend_resolver->getBackendContext(backend)->shape_fixer->fix(node);
+
+ if (!util::getConfigBool(util::config::PROFILING_MODE))
+ throw std::runtime_error("You are trying to run heterogeneous scheduler with disabled "
+ "profiling mode, while there is no profiling information about some "
+ "nodes. Run scheduler with enabled profiling mode first.");
+
+ _is_supported[backend][node.getName()] = true;
+ }
+ catch (std::runtime_error &e)
+ {
+ _is_supported[backend][node.getName()] = false;
+ }
+ return _is_supported[backend][node.getName()] ? 1 : _exec_time->getMax();
+}
+
+void HEScheduler::makeRank()
+{
+ VERBOSE(HEScheduler::makeRank) << "task prioritizing" << std::endl;
+
+ _graph->operations().iterate(
+ [&](const model::OperationIndex &index, const model::Operation &) { DFSMaxRank(index); });
+
+ // Check that ranks are calculated for all operations(nodes)
+ _graph->operations().iterate([&](const model::OperationIndex &index, const model::Operation &) {
+ UNUSED_RELEASE(index);
+ assert(_op_to_rank->find(index) != _op_to_rank->end());
+ });
+ VERBOSE(HEScheduler::makeRank) << "task prioritizing finished" << std::endl;
+}
+
+int64_t HEScheduler::DFSMaxRank(const model::OperationIndex &index)
+{
+ auto op_to_rank_it = _op_to_rank->find(index);
+ if (op_to_rank_it != _op_to_rank->end())
+ return op_to_rank_it->second;
+
+ const auto &node = _graph->operations().at(index);
+ int64_t rank = 0;
+ const bool quant = isQuant(*_graph, node);
+ const auto size = getOperationsFlattenedIOSize(*_graph, node);
+ auto supported_backends_quantity = static_cast<int64_t>(_all_backends.size());
+
+ const auto max_child_rank = DFSChildrenMaxRank(index);
+
+ // get average exec time of this op
+ for (const auto &backend : _all_backends)
+ {
+ auto exec_time = _exec_time->getOperationExecTime(backend, node.getName(), quant, size);
+ if (exec_time == _exec_time->NOT_FOUND)
+ {
+ exec_time = tryBackend(node, backend);
+ }
+ if (exec_time < _exec_time->getMax())
+ {
+ rank += exec_time;
+ }
+ else
+ {
+ // this operation isn't supported in this backend
+ --supported_backends_quantity;
+ }
+ }
+ assert((supported_backends_quantity > 0) && "Encountered unsupported op");
+ rank /= supported_backends_quantity;
+
+ // get standard deviation
+ int64_t std = 0;
+ for (const auto backend : _all_backends)
+ {
+ const auto exec_time = getOpTime(backend, node.getName(), quant, size);
+ if (exec_time < _exec_time->getMax())
+ {
+ std += (exec_time - rank) * (exec_time - rank);
+ }
+ }
+ std /= supported_backends_quantity;
+ if (std > 0)
+ {
+ std = static_cast<int>(std::sqrt(std));
+ rank *= std;
+ }
+ rank += max_child_rank;
+
+ assert(rank >= 0);
+ _rank_to_op.emplace(rank, index);
+ _op_to_rank->emplace(index, rank);
+ VERBOSE(HEScheduler::DFSMaxRank) << "rank of operation (" << index.value() << ")"
+ << node.getName() << " is " << rank << std::endl;
+
+ return rank;
+}
+
+int64_t HEScheduler::DFSChildrenMaxRank(const model::OperationIndex &index)
+{
+ const auto &node = _graph->operations().at(index);
+ int64_t max_child_rank = 0;
+ for (const auto &output : node.getOutputs())
+ {
+ const auto &operand = _graph->operands().at(output);
+ const bool quant = operand.typeInfo().type() == model::DataType::QUANT8_ASYMM;
+ // average data transfer cost of this operand's data
+ int64_t avg_transfer_cost = 1;
+ for (const auto *backend : _all_backends)
+ {
+ for (const auto *other_backend : _all_backends)
+ {
+ if (backend == other_backend)
+ {
+ continue;
+ }
+ auto transfer_cost =
+ _exec_time->getPermuteTime(backend, other_backend, quant, operand.info().total_size());
+ if (transfer_cost == _exec_time->NOT_FOUND)
+ {
+ // Makes the scheduler prefer keeping computations on one backend
+ transfer_cost = operand.info().total_size() / 100;
+ }
+ avg_transfer_cost += transfer_cost;
+ }
+ }
+ avg_transfer_cost /= _all_backends.size();
+ for (const auto &use : operand.getUses().list())
+ {
+ const auto cur_child_rank = DFSMaxRank(use);
+ max_child_rank = std::max(max_child_rank, cur_child_rank + avg_transfer_cost);
+ }
+ }
+ return max_child_rank;
+}
+
+int64_t HEScheduler::backendAvailableTime(const backend::Backend *backend,
+ const int64_t &starting_time, const int64_t &time_amount)
+{
+ const auto backend_times = _backends_avail_time.at(backend);
+ // finishing and starting times of an op, that will come after current op
+ auto next_op_fst = backend_times.upper_bound(starting_time);
+ // finishing time of an op, that will come before current op
+ auto prev_op_ft = starting_time;
+ // until reach the "hole/gap", that is enough to run this op
+ while (next_op_fst != backend_times.end() && next_op_fst->second - prev_op_ft <= time_amount)
+ {
+ prev_op_ft = next_op_fst->first + 1;
+ ++next_op_fst;
+ }
+ return prev_op_ft;
+}
+
+void HEScheduler::scheduleNode(const model::OperationIndex &index)
+{
+ VERBOSE(HEScheduler::scheduleNode) << "scheduling (" << index.value() << ")" << std::endl;
+ int64_t eft = std::numeric_limits<int64_t>::max(), selected_exec_time = 0;
+ const auto &node = _graph->operations().at(index);
+
+ std::multimap<int64_t, int64_t> selected_transfer_st_exec_time;
+ // select the backend with the smallest eft of this task
+ const backend::Backend *chosen_backend = nullptr;
+ for (const auto *backend : _all_backends)
+ {
+ std::multimap<int64_t, int64_t> transfer_st_exec_time;
+ const auto est_and_et = ESTAndExecTime(backend, index, transfer_st_exec_time);
+
+ if (eft > est_and_et.first + est_and_et.second)
+ {
+ eft = est_and_et.first + est_and_et.second;
+ selected_exec_time = est_and_et.second;
+ chosen_backend = backend;
+ selected_transfer_st_exec_time = transfer_st_exec_time;
+ }
+ }
+
+ if (chosen_backend == nullptr)
+ {
+ throw std::runtime_error{"Fail to choose backend on scheduler"};
+ }
+
+ for (const auto &it : selected_transfer_st_exec_time)
+ {
+ auto prev_op_ft = backendAvailableTime(_cpu_backend, it.first, it.second);
+ _backends_avail_time[_cpu_backend].insert({prev_op_ft + it.second, prev_op_ft});
+ }
+
+ _ops_eft[index] = eft;
+ _backends_avail_time[chosen_backend].emplace(eft, eft - selected_exec_time);
+ _backend_resolver->setBackend(index, chosen_backend);
+
+ VERBOSE(HEScheduler::scheduleNode) << "backend for " << node.getName() << " is "
+ << chosen_backend->config()->id() << ". Its eft: " << eft
+ << std::endl;
+}
+
+std::pair<int64_t, int64_t>
+HEScheduler::ESTAndExecTime(const backend::Backend *backend, const model::OperationIndex &index,
+ std::multimap<int64_t, int64_t> &transfer_st_exec_time)
+{
+ const bool is_linear_exec = "Linear" == util::getConfigString(util::config::EXECUTOR);
+ const bool is_parallel_exec = "Parallel" == util::getConfigString(util::config::EXECUTOR);
+ // Permutation will cause creating a separate subgraph that contains just this permutation node.
+ // This isn't needed for Linear executor since it doesn't use subgraphs
+ // Number 1 ms is picked experimentally
+ int64_t permute_fine = 1000;
+ // Multiply cpu operations' exec time by 2 because in parallel executor it might be busy with
+ // permutation on other branches or non-nnfw specific tasks and have to wait for it.
+ // Number 2 is picked experimentally
+ const int64_t CPU_DELAY = 2;
+ const auto &node = _graph->operations().at(index);
+ const bool quant = isQuant(*_graph, node);
+ const auto size = getOperationsFlattenedIOSize(*_graph, node);
+ // if this node can be part of a subgraph, then assigning different backend will cause creating
+ // another subgraph
+ if (isMergable(*_graph, node))
+ {
+ permute_fine *= 2;
+ }
+ if (isWorkaroundSkip(*_graph, backend, node, quant))
+ {
+ return {_exec_time->getMax(), _exec_time->getMax()};
+ }
+ // get average exec time of the op on this backend
+ auto exec_time = getOpTime(backend, node.getName(), quant, size);
+ if (backend->config()->id() == "cpu" && is_parallel_exec)
+ {
+ exec_time *= CPU_DELAY;
+ }
+
+ // get max eft of direct (one level above) predecessors
+ auto max_pred_eft = predMaxEFT(backend, node, transfer_st_exec_time);
+
+ int64_t total_transfer_cost = 0;
+ std::vector<std::multimap<int64_t, int64_t>::iterator> inserted_permutations;
+ // Find free time for data transferring and insert it into backend taskset. This is needed:
+ // 1. Time for multiple permutations for this node's input is found correctly
+ // 2. If backend==cpu, then free time for this node must come after permutations
+ for (auto &it : transfer_st_exec_time)
+ {
+ if (is_parallel_exec)
+ {
+ it.second *= CPU_DELAY;
+ }
+ if (!is_linear_exec)
+ {
+ it.second += permute_fine;
+ }
+ total_transfer_cost += it.second;
+
+ const auto prev_op_ft = backendAvailableTime(_cpu_backend, it.first, it.second);
+
+ max_pred_eft = std::max(max_pred_eft, prev_op_ft + it.second);
+
+ const auto tmp = _backends_avail_time[_cpu_backend].emplace(prev_op_ft + it.second, prev_op_ft);
+ inserted_permutations.push_back(tmp.first);
+ }
+ // find the hole/gap, where this op can be put or the finishing time of the last assigned op
+ auto prev_op_ft = backendAvailableTime(backend, max_pred_eft, exec_time);
+
+ // Remove inserted permutation from cpu's task set
+ for (const auto &it : inserted_permutations)
+ {
+ _backends_avail_time[_cpu_backend].erase(it);
+ }
+
+ /* In case non-parallel executor measure just exec time and data transfer time
+ * because EFT(prev_op_ft) is the same for all backends. Since two operations
+ * can't be run simultaneously, finish of running operation must be waited for.
+ * When an operation starts, all backends are free. So, they need time just for
+ * data transfer.*/
+ if (!is_parallel_exec)
+ {
+ VERBOSE(HEScheduler::ESTAndExecTime)
+ << "exec_time of (" << index.value() << ") " << node.getName() << " quant==" << quant
+ << " on " << backend->config()->id() << " is " << exec_time
+ << " microseconds. Data transfer cost: " << total_transfer_cost << std::endl;
+
+ return {total_transfer_cost, exec_time};
+ }
+ VERBOSE(HEScheduler::ESTAndExecTime)
+ << "exec_time of (" << index.value() << ") " << node.getName() << " quant==" << quant
+ << " on " << backend->config()->id() << ": " << exec_time
+ << " microseconds. Backend available time: " << prev_op_ft
+ << " Parent's max eft: " << max_pred_eft - total_transfer_cost
+ << " data transfer cost: " << total_transfer_cost << std::endl;
+
+ return {prev_op_ft, exec_time};
+}
+
+int64_t HEScheduler::predMaxEFT(const backend::Backend *backend, const model::Operation &node,
+ std::multimap<int64_t, int64_t> &transfer_st_exec_time)
+{
+ int64_t max_pred_eft = 0;
+ for (const auto &input_operand_idx : node.getInputs())
+ {
+ const auto &input_operand = _graph->operands().at(input_operand_idx);
+ const bool quant = input_operand.typeInfo().type() == model::DataType::QUANT8_ASYMM;
+
+ for (const auto &input_node_idx : input_operand.getDef().list())
+ {
+ // Data transfer cost from parent's node backend to current node's backend:
+ auto parent_backend = _backend_resolver->getBackend(input_node_idx);
+
+ max_pred_eft = std::max(max_pred_eft, _ops_eft.at(input_node_idx));
+ if (parent_backend != backend)
+ {
+ // Multiply operand size by 2 because size must describe input+output size
+ int64_t transfer_cost =
+ getPermuteTime(parent_backend, backend, quant, input_operand.info().total_size() * 2);
+ transfer_st_exec_time.emplace(_ops_eft.at(input_node_idx), transfer_cost);
+ }
+ }
+ }
+ return max_pred_eft;
+}
+
+} // namespace compiler
+
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/HEScheduler.h b/runtimes/neurun/core/src/compiler/HEScheduler.h
new file mode 100644
index 000000000..2b818f248
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/HEScheduler.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file HEScheduler.h
+ * @brief This file contains HEScheduler class to define and run task Heterogeneous Execution
+ * Scheduler
+ */
+
+#ifndef __NEURUN_COMPILER_H_E_SCHEDULER_H_
+#define __NEURUN_COMPILER_H_E_SCHEDULER_H_
+
+#include "compiler/IScheduler.h"
+#include "graph/Graph.h"
+#include "backend/ExecTime.h"
+#include "backend/Backend.h"
+#include "cpp14/memory.h"
+#include "model/OperationIndexMap.h"
+#include <map>
+#include <memory>
+
+namespace neurun
+{
+
+namespace compiler
+{
+/**
+ * @brief Class to schedule tasks
+ */
+class HEScheduler : IScheduler
+{
+public:
+ /**
+ * @brief Construct a new Heterogeneous Execution Scheduler object
+ * @param[in] model Graph model
+ * @param[in] backend_resolver backend resolver
+ */
+ HEScheduler(const neurun::model::Operands &operands,
+ std::vector<const backend::Backend *> backends,
+ const std::shared_ptr<backend::custom::KernelRegistry> &registry)
+ : _is_supported{}, _backends_avail_time{}, _ops_eft{},
+ _op_to_rank{std::make_shared<model::OperationIndexMap<int64_t>>()},
+ _all_backends(std::move(backends))
+ {
+ _backend_resolver =
+ nnfw::cpp14::make_unique<compiler::BackendResolver>(operands, _all_backends, registry);
+ _exec_time = nnfw::cpp14::make_unique<backend::ExecTime>(_all_backends);
+
+ // Find cpu backend
+ auto cpu_backend_it = std::find_if(
+ _all_backends.begin(), _all_backends.end(),
+ [](const backend::Backend *backend) { return backend->config()->id() == "cpu"; });
+ if (cpu_backend_it == _all_backends.end())
+ throw std::runtime_error("HEScheduler could be used only if 'cpu' backend is available");
+ _cpu_backend = *cpu_backend_it;
+ }
+
+public:
+ /**
+ * @brief Task scheduling
+ *
+ * @note The main idea is taken from HSIP algo:
+ * https://www.hindawi.com/journals/sp/2016/3676149/
+ */
+ std::unique_ptr<compiler::BackendResolver> schedule(const graph::Graph &graph) final;
+ std::shared_ptr<model::OperationIndexMap<int64_t>> getIndexedRanks() { return _op_to_rank; }
+
+private:
+ bool isNodeProfiled(const model::Operation &);
+
+ void scheduleNode(const model::OperationIndex &);
+ /**
+ * @brief Get earliest starting time and execution time of an operation on a backend.
+ *
+ * @note Returns a time when operation's inputs are ready and backend is available
+ * It also returns exec time. If this is "cpu" backend, then exec_time*CPU_DELAY
+ *
+ * @param[in] backend: backend, for which to return the time
+ * @param[in] index: index of an operation
+ * @param[out] transfer_st_exec_time: est and exec time of data transfer operation
+ *
+ * @return earliest starting time and execution time
+ */
+ std::pair<int64_t, int64_t>
+ ESTAndExecTime(const backend::Backend *backend, const model::OperationIndex &index,
+ std::multimap<int64_t, int64_t> &transfer_st_exec_time);
+ /**
+ * @brief Returns the latest finishing time of parents of a node.
+ *
+ * @param[in] backend: backend, for which to return the time
+ * @param[in] node: node to get eft of parents
+ * @param[out] transfer_st_exec_time: est and exec time of data transfer operation
+ *
+ * @return earliest finishing time of parent nodes
+ */
+ int64_t predMaxEFT(const backend::Backend *backend, const model::Operation &node,
+ std::multimap<int64_t, int64_t> &transfer_st_exec_time);
+
+ void makeRank();
+
+ int64_t DFSMaxRank(const model::OperationIndex &index);
+
+ int64_t DFSChildrenMaxRank(const model::OperationIndex &index);
+ /**
+ * @brief Returns the time, when backend is available for at least given amount of time.
+ *
+ * @note Returns either hole/gap between two performing two already scheduled operations,
+ * or the finishing time of the last scheduled operation
+ *
+ * @param[in] backend backend, for which to return the time
+ * @param[in] starting_time time, starting which to look for gap
+ * @param[in] time_amount amount of the time, for which to look gap
+ *
+ * @return time, when backend has at least time_amount free time
+ */
+ int64_t backendAvailableTime(const backend::Backend *backend, const int64_t &starting_time,
+ const int64_t &time_amount);
+
+ int64_t getOpTime(const backend::Backend *backend, const std::string &operation, bool quant,
+ uint32_t size);
+
+ int64_t getPermuteTime(const backend::Backend *src_backend, const backend::Backend *dst_backend,
+ bool quant, uint32_t size);
+
+ void scheduleShufflingBackends();
+
+ int64_t tryBackend(const model::Operation &node, const backend::Backend *backend);
+
+private:
+ // This variable stores backend/node pairs with unknown execution time, and hints scheduler
+ // whether it should assign these backends to these nodes:
+ // * It stores false for unsupported nodes
+ // * During rank calculation with enabled profiling mode it stores true for supported nodes
+ std::unordered_map<const backend::Backend *, std::unordered_map<std::string, bool>> _is_supported;
+ // Finishing and starting time of each backend
+ std::unordered_map<const backend::Backend *, std::map<int64_t, int64_t>> _backends_avail_time;
+ model::OperationIndexMap<int64_t> _ops_eft;
+ std::multimap<int64_t, model::OperationIndex, std::greater<int64_t>> _rank_to_op;
+ std::shared_ptr<model::OperationIndexMap<int64_t>> _op_to_rank;
+ std::unique_ptr<compiler::BackendResolver> _backend_resolver;
+ std::unique_ptr<backend::ExecTime> _exec_time;
+ const graph::Graph *_graph{nullptr};
+ const std::vector<const backend::Backend *> _all_backends;
+ const backend::Backend *_cpu_backend{nullptr};
+};
+
+} // namespace compiler
+
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_H_E_SCHEDULER_H_
diff --git a/runtimes/neurun/core/src/compiler/IScheduler.h b/runtimes/neurun/core/src/compiler/IScheduler.h
new file mode 100644
index 000000000..5b425bf45
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/IScheduler.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_CORE_COMPILER_I_SCHEDULER_H__
+#define __NEURUN_CORE_COMPILER_I_SCHEDULER_H__
+
+#include "BackendResolver.h"
+#include "graph/Graph.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+struct IScheduler
+{
+ virtual ~IScheduler() = default;
+
+ virtual std::unique_ptr<BackendResolver> schedule(const graph::Graph &graph) = 0;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_CORE_COMPILER_I_SCHEDULER_H__
diff --git a/runtimes/neurun/core/src/compiler/Linear.cc b/runtimes/neurun/core/src/compiler/Linear.cc
new file mode 100644
index 000000000..72d0fdb8f
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/Linear.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+
+#include "Linear.h"
+
+#include "graph/operation/LowerInfo.h"
+#include "graph/operand/LowerInfo.h"
+#include "backend/IShapeFixer.h"
+#include "backend/IConfig.h"
+#include "backend/IConstantInitializer.h"
+#include "backend/Backend.h"
+#include "compiler/SubTensorInfo.h"
+#include "model/OperandInfo.h"
+#include "model/OperandIndexMap.h"
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+Linear::Linear(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ std::unique_ptr<graph::LowerInfoMap> lower_info_map,
+ std::unique_ptr<compiler::BackendResolver> backend_resolver)
+ : _model(model), _subgraphs{std::move(subgraphs)}, _lower_info_map{std::move(lower_info_map)},
+ _backend_resolver{std::move(backend_resolver)}
+{
+ assert(_model && _subgraphs && _lower_info_map);
+
+ // Get SubgraphSequence by topological sorting
+ {
+ // _subgraphs can't access a subgraph by an operand so that input_to_subgs can offer it
+ std::unordered_map<model::OperandIndex, std::list<model::SubgraphIndex>> input_to_subgs;
+
+ // Get the relations between input/subgraph to be used for dfs-post-iter
+ //
+ // [0] # input -> _input_to_subgraphes[0] = {SUBG0}
+ // |
+ // [SUBG0]
+ // |
+ // [1]-----. # input -> _input_to_subgraphes[1] = {SUBG1, SUBG2}
+ // | |
+ // [SUBG1] [SUBG2]
+ // | |
+ // [2] [3] # input -> _input_to_subgraphes[2] = {SUBG3}
+ // \ / # input -> _input_to_subgraphes[3] = {SUBG3}
+ // [SUBG3]
+ // |
+ // [4]
+ _subgraphs->iterate([&](const model::SubgraphIndex &subg_idx, model::Subgraph &subg) {
+ for (auto input : subg.getInputs())
+ {
+ // only valid_inputs
+ const auto &operand = _model->operands.at(input);
+ if (operand.isConstant())
+ continue;
+
+ auto it = input_to_subgs.find(input);
+ if (it == input_to_subgs.end())
+ {
+ std::list<model::SubgraphIndex> list{subg_idx};
+ input_to_subgs[input] = list;
+ }
+ else
+ {
+ it->second.push_back(subg_idx);
+ }
+ }
+ });
+
+ std::unordered_map<model::SubgraphIndex, bool> visited;
+ _subgraphs->iterate([&](const model::SubgraphIndex &index, const model::Subgraph &) {
+ visited[index] = false;
+ });
+
+ std::function<void(const model::SubgraphIndex &, model::Subgraph &)> dfs_recursive =
+ [&](const model::SubgraphIndex &index, model::Subgraph &subg) -> void {
+ if (visited[index])
+ return;
+ visited[index] = true;
+
+ // The outputs should be not constants
+ for (auto output : subg.getOutputs())
+ {
+ const auto it = input_to_subgs.find(output);
+ if (it != input_to_subgs.end())
+ {
+ const auto &subg_index_list = it->second;
+ for (const auto &index : subg_index_list)
+ {
+ auto &subg = _subgraphs->at(index);
+ dfs_recursive(index, subg);
+ }
+ }
+ }
+
+ _elements.emplace_back(&_subgraphs->at(index), getLowerInfo(index));
+ };
+
+ _subgraphs->iterate(dfs_recursive);
+
+ // All of the nodes must have been visited.
+ assert(
+ std::all_of(visited.begin(), visited.end(),
+ [](const std::pair<const model::SubgraphIndex, bool> &v) { return v.second; }));
+
+ // NOTE. Now these subgraph are on the reverse order
+ std::reverse(_elements.begin(), _elements.end());
+ }
+
+ {
+ const auto &backendToString = [](const neurun::backend::Backend *backend) {
+ assert(backend);
+ std::string str;
+ str += backend->config()->id();
+ str += " ";
+ return "{ " + str + "}";
+ };
+
+ VERBOSE(Linear) << "Final SubgraphSequence" << std::endl;
+ for (const auto &element : _elements)
+ {
+ const auto subg = element.subgraph;
+ const auto lower_info = element.lower_info;
+ VERBOSE(Linear) << "* SUBG"
+ << " " << backendToString(lower_info->backend()) << " " << subg->getStr()
+ << std::endl;
+ }
+ }
+}
+
+void Linear::accept(model::OperationVisitor &&visitor) const
+{
+ for (const auto &e : _elements)
+ {
+ e.subgraph->accept(visitor);
+ }
+}
+
+void Linear::planTensors()
+{
+ model::OperandIndexMap<std::shared_ptr<backend::ITensorBuilder>> tensor_builder_map;
+
+ // NOTE
+ // While current ITensorBuilder exposes registerSubTensorInfo for subtensor,
+ // this stage uses registerSubTensorInfo() and notify{First|Last}Use()
+ // but handling subtensor should be processed on each backend. See #5726.
+ model::OperandIndexMap<uint32_t> uses_map;
+ model::OperandIndexMap<uint32_t> def_map;
+ model::OperandIndexSequence constants;
+
+ // Prepare scanning
+ _model->operands.iterate([&](const model::OperandIndex &ind, const model::Operand &obj) {
+ const auto lower_info = getLowerInfo(ind);
+ // TODO Remove if neurun doesn't support anymore such as
+ // GeneratedTests.reshape_quant8_weights_as_inputs
+ if (lower_info->def_factors().size() == 0 && lower_info->use_factors().size() == 0 &&
+ _model->inputs.contains(ind) == false)
+ {
+ VERBOSE(LINEAR) << "Operand #" << ind.value() << " will be not used. no more process."
+ << std::endl;
+ return;
+ }
+
+ uses_map[ind] = obj.getUses().size();
+ def_map[ind] = obj.getDef().size(); // should be 1 or 0
+
+ bool is_const = obj.isConstant();
+ if (is_const)
+ {
+ constants.append(ind);
+ }
+
+ for (auto factor : lower_info->def_factors())
+ {
+ bool isSubTensor = false;
+ auto backend = factor.backend();
+ auto tensor_builder = _backend_resolver->getBackendContext(backend)->tensor_builder;
+
+ if (backend->config()->SupportSubTensorAlloc())
+ {
+ const auto parentInfo = obj.parent_info();
+ if (parentInfo != nullptr)
+ {
+ isSubTensor = true;
+ }
+ }
+
+ if (isSubTensor)
+ {
+ const compiler::SubTensorInfo info(obj);
+ tensor_builder->registerSubTensorInfo(ind, info);
+ }
+ else
+ {
+ const auto info = obj.info();
+
+ // NOTE This assumes an operand can have one layout, and only PermutateNode can have
+ // different layouts for input and output
+ const auto &def = *obj.getDef().list().cbegin();
+ auto frontend_layout = _subgraphs->at(_subgraphs->getOperation(def)).getLayout();
+ if (frontend_layout == model::Layout::UNKNOWN)
+ {
+ const auto &use = *obj.getUses().list().cbegin();
+ frontend_layout = _subgraphs->at(_subgraphs->getOperation(use)).getLayout();
+ }
+ const auto backend_layout = lower_info->def_factors().getOnlyElement().layout();
+ tensor_builder->registerTensorInfo(ind, info, frontend_layout, backend_layout, is_const);
+ }
+
+ tensor_builder_map[ind] = tensor_builder;
+ }
+ });
+
+ // If a tensor is model output, increase the use of the tensor.
+ // This aim is same to above one.
+ for (const auto &ind : _model->outputs)
+ {
+ uses_map[ind]++;
+ }
+
+ // Start scanning to do notify{First|Last}Use for each tensor
+
+ // If a tensor is a constant, increase the use of the tensor.
+ // It makes the tensor not be dealloced. It means these will be deallocated last.
+ // And allocate constant operands first
+ VERBOSE(LINEAR) << "TENSORS as CONSTANT" << std::endl;
+ for (const auto &ind : constants)
+ {
+ uses_map[ind]++;
+ tensor_builder_map[ind]->notifyFirstUse(ind);
+ }
+
+ // Allocate Model's inputs
+ VERBOSE(LINEAR) << "TENSORS as MODEL INPUT" << std::endl;
+ for (const auto &ind : _model->inputs)
+ {
+ auto tensor_builder = tensor_builder_map[ind];
+ if (!tensor_builder) // for GeneratedTests.xxx_weights_as_inputs
+ continue;
+ tensor_builder->notifyFirstUse(ind);
+ }
+
+ // At each operation,
+ // 1. Scan DEF of outputs. If the DEF, allocate it
+ // 2. Scan USE of inputs. Decrease the USE and deallocate if the USE is 0
+ VERBOSE(LINEAR) << "TENSORS" << std::endl;
+ for (const auto &e : _elements)
+ {
+ for (const auto &op : e.subgraph->operations())
+ {
+ for (const auto &ind : op.node->getOutputs())
+ {
+ assert(def_map.find(ind) != def_map.end());
+ if (def_map[ind])
+ {
+ def_map[ind] = 0;
+ tensor_builder_map[ind]->notifyFirstUse(ind);
+ }
+ }
+
+ for (const auto &ind : op.node->getInputs())
+ {
+ assert(uses_map.find(ind) != uses_map.end());
+ assert(uses_map[ind] > 0);
+ uses_map[ind]--;
+ if (uses_map[ind] == 0)
+ {
+ tensor_builder_map[ind]->notifyLastUse(ind);
+ }
+ }
+ }
+ }
+
+ // Dispose and validate
+ for (const auto &ind : _model->outputs)
+ {
+ --uses_map[ind];
+ assert(uses_map[ind] == 0);
+ tensor_builder_map[ind]->notifyLastUse(ind);
+ }
+
+ for (const auto &ind : constants)
+ {
+ --uses_map[ind];
+ assert(uses_map[ind] == 0);
+ tensor_builder_map[ind]->notifyLastUse(ind);
+ }
+
+ assert(std::all_of(
+ uses_map.begin(), uses_map.end(),
+ [](std::pair<const model::OperandIndex, uint32_t> it) { return it.second == 0; }));
+
+ assert(std::all_of(
+ def_map.begin(), def_map.end(),
+ [](std::pair<const model::OperandIndex, uint32_t> it) { return it.second == 0; }));
+}
+
+void Linear::iterate(const std::function<void(const Element &element)> &fn) const
+{
+ for (const auto &e : _elements)
+ {
+ fn(e);
+ }
+}
+
+void Linear::generateConstantInitializers(void) const
+{
+ iterate([&](const compiler::Linear::Element &element) {
+ auto backend = element.lower_info->backend();
+
+ auto constant_initializer = _backend_resolver->getBackendContext(backend)->constant_initializer;
+ constant_initializer->generate(*element.subgraph, _model->operands);
+ });
+}
+
+const graph::operation::LowerInfo *Linear::getLowerInfo(const model::SubgraphIndex &index) const
+{
+ if (!_lower_info_map)
+ return nullptr;
+ auto itr = _lower_info_map->operation.find(index);
+ if (itr == _lower_info_map->operation.end())
+ return nullptr;
+ return itr->second.get();
+}
+
+const graph::operand::LowerInfo *Linear::getLowerInfo(const model::OperandIndex &index) const
+{
+ if (!_lower_info_map)
+ return nullptr;
+ auto itr = _lower_info_map->operand.find(index);
+ if (itr == _lower_info_map->operand.end())
+ return nullptr;
+ return itr->second.get();
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/Linear.h b/runtimes/neurun/core/src/compiler/Linear.h
new file mode 100644
index 000000000..78c782a78
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/Linear.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_LINEAR_H__
+#define __NEURUN_COMPILER_LINEAR_H__
+
+#include <vector>
+#include <memory>
+
+#include "model/Model.h"
+#include "model/Subgraphs.h"
+#include "backend/ITensorBuilder.h"
+#include "graph/LowerInfoMap.h"
+#include "compiler/BackendResolver.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operation
+{
+struct OperationVisitor;
+} // namespace operation
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace compiler
+{
+
+class Linear
+{
+public:
+ struct Element
+ {
+ const model::Subgraph *subgraph;
+ const graph::operation::LowerInfo *lower_info;
+
+ Element(const model::Subgraph *subgraph, const graph::operation::LowerInfo *lower_info)
+ : subgraph{subgraph}, lower_info{lower_info}
+ {
+ // DO NOTHING
+ }
+ };
+
+public:
+ Linear(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ std::unique_ptr<graph::LowerInfoMap> lower_info_map,
+ std::unique_ptr<compiler::BackendResolver> backend_resolver);
+
+public:
+ Linear(const Linear &linear) = delete;
+
+public:
+ void accept(model::OperationVisitor &&visitor) const;
+
+ void planTensors();
+
+ void iterate(const std::function<void(const Element &element)> &fn) const;
+
+ void generateConstantInitializers(void) const;
+
+ std::unique_ptr<graph::LowerInfoMap> releaseLowerInfo() { return std::move(_lower_info_map); }
+ graph::LowerInfoMap *getLowerInfo() { return _lower_info_map.get(); }
+
+ std::unique_ptr<model::Subgraphs> releaseSubgraphs() { return std::move(_subgraphs); }
+
+ std::vector<Element> &&releaseElements() { return std::move(_elements); }
+
+ const backend::BackendContext *getBackendContext(const backend::Backend *backend)
+ {
+ return _backend_resolver->getBackendContext(backend);
+ }
+
+ const compiler::BackendResolver *backend_resolver() const { return _backend_resolver.get(); }
+
+private:
+ // TODO Replace these getLowerInfo methods with ones of LowerInfoMap in the future
+ const graph::operation::LowerInfo *getLowerInfo(const model::SubgraphIndex &index) const;
+ const graph::operand::LowerInfo *getLowerInfo(const model::OperandIndex &index) const;
+
+private:
+ std::shared_ptr<const model::Model> _model;
+ std::unique_ptr<model::Subgraphs> _subgraphs;
+ std::unique_ptr<graph::LowerInfoMap> _lower_info_map;
+ std::vector<Element> _elements;
+ std::unique_ptr<compiler::BackendResolver> _backend_resolver;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_LINEAR_H__
diff --git a/runtimes/neurun/core/src/compiler/ManualScheduler.cc b/runtimes/neurun/core/src/compiler/ManualScheduler.cc
new file mode 100644
index 000000000..efd5ccc31
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/ManualScheduler.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ManualScheduler.h"
+#include "model/Operations.Include.h"
+#include "backend/Backend.h"
+#include "backend/BackendManager.h"
+#include "backend/IConfig.h"
+#include "util/ConfigSource.h"
+#include "misc/string_helpers.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+std::unique_ptr<BackendResolver> ManualScheduler::schedule(const graph::Graph &graph)
+{
+ auto backend_resolver = nnfw::cpp14::make_unique<compiler::BackendResolver>(
+ graph.operands(), backend::BackendManager::instance().getAll(), graph.getKernelRegistry());
+
+ // 1. Backend for All operations
+ const auto backend_all_str = util::getConfigString(util::config::OP_BACKEND_ALLOPS);
+ auto backend_all = backend::BackendManager::instance().get(backend_all_str);
+
+ VERBOSE(ManualScheduler) << "Default backend for all ops: " << backend_all_str << std::endl;
+
+ graph.operations().iterate([&](const model::OperationIndex &index, const model::Operation &) {
+ backend_resolver->setBackend(index, backend_all);
+ });
+
+ // 2. Backend per operation type
+ std::unordered_map<std::type_index, backend::Backend *> op_type_map;
+ // By default, CustomNode uses cpu backend
+ op_type_map[typeid(model::operation::CustomNode)] =
+ backend::BackendManager::instance().get("cpu");
+#define OP(InternalName, IsNnApi) \
+ if (IsNnApi) \
+ { \
+ const auto &backend_str = util::getConfigString(util::config::OP_BACKEND_##InternalName); \
+ if (!backend_str.empty()) \
+ { \
+ auto backend = backend::BackendManager::instance().get(backend_str); \
+ VERBOSE(Lower) << "backend for " << #InternalName << ": " << backend_str << std::endl; \
+ op_type_map[typeid(model::operation::InternalName)] = backend; \
+ } \
+ }
+#include "model/Operations.lst"
+#undef OP
+ graph.operations().iterate(
+ [&](const model::OperationIndex &index, const model::Operation &operation) {
+ auto itr = op_type_map.find(typeid(operation));
+ if (itr != op_type_map.end())
+ {
+ backend_resolver->setBackend(index, itr->second);
+ }
+ });
+
+ // 3. Backend per operation
+ try
+ {
+ auto map_str = util::getConfigString(util::config::OP_BACKEND_MAP);
+ auto key_val_list = nnfw::misc::split(map_str, ';');
+ for (const auto &key_val_str : key_val_list)
+ {
+ if (key_val_str.empty())
+ {
+ continue;
+ }
+
+ auto key_val = nnfw::misc::split(key_val_str, '=');
+ const auto &key_str = key_val.at(0);
+ const auto &val = key_val.at(1);
+ auto key = static_cast<uint32_t>(std::stoi(key_str));
+
+ graph.operations().at(model::OperationIndex{key}); // Check if exist, or this wil throw
+ backend_resolver->setBackend(model::OperationIndex{key},
+ backend::BackendManager::instance().get(val));
+ }
+ }
+ catch (...)
+ {
+ VERBOSE(ManualScheduler) << "Invalid value from " << util::config::OP_BACKEND_MAP
+ << ". Some of the given values are ignored" << std::endl;
+ }
+
+ // Dump final assignment
+ backend_resolver->iterate(
+ [&](const model::OperationIndex &index, const backend::BackendContext &backend_ctx) {
+ VERBOSE(ManualScheduler) << "backend for operation #" << index.value() << ": "
+ << backend_ctx.backend->config()->id() << std::endl;
+ });
+
+ return backend_resolver;
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/ManualScheduler.h b/runtimes/neurun/core/src/compiler/ManualScheduler.h
new file mode 100644
index 000000000..c40318a70
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/ManualScheduler.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_CORE_COMPILER_MANUAL_SCHEDULER_H__
+#define __NEURUN_CORE_COMPILER_MANUAL_SCHEDULER_H__
+
+#include "IScheduler.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+class ManualScheduler : public IScheduler
+{
+public:
+ std::unique_ptr<BackendResolver> schedule(const graph::Graph &graph) override;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_CORE_COMPILER_MANUAL_SCHEDULER_H__
diff --git a/runtimes/neurun/core/src/compiler/OperandContext.cc b/runtimes/neurun/core/src/compiler/OperandContext.cc
new file mode 100644
index 000000000..77adc556c
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/OperandContext.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperandContext.h"
+
+#include <cassert>
+
+namespace neurun
+{
+namespace compiler
+{
+
+OperandContext &OperandContext::set(const model::OperandIndex &id,
+ const std::shared_ptr<backend::operand::IObject> &object)
+{
+ // Only one object for an id
+ assert(_objects.find(id) == _objects.end());
+ _objects[id] = object;
+ return (*this);
+}
+
+void OperandContext::iterate(
+ const std::function<void(const model::OperandIndex &, backend::operand::IObject &)> &fn)
+{
+ for (auto &e : _objects)
+ {
+ fn(e.first, *e.second);
+ }
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/OperandContext.h b/runtimes/neurun/core/src/compiler/OperandContext.h
new file mode 100644
index 000000000..169122500
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/OperandContext.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_OPERAND_CONTEXT_H__
+#define __NEURUN_COMPILER_OPERAND_CONTEXT_H__
+
+#include "backend/operand/IObject.h"
+#include "model/OperandIndexMap.h"
+#include <unordered_map>
+#include <memory>
+
+namespace neurun
+{
+namespace compiler
+{
+
+class OperandContext
+{
+public:
+ OperandContext &set(const model::OperandIndex &ind,
+ const std::shared_ptr<backend::operand::IObject> &object);
+
+public:
+ bool exist(const ::neurun::model::OperandIndex &ind) const
+ {
+ return _objects.find(ind) != _objects.end();
+ }
+
+public:
+ std::shared_ptr<backend::operand::IObject> at(const model::OperandIndex &ind) const
+ {
+ return _objects.at(ind);
+ }
+
+ std::shared_ptr<backend::operand::IObject> &at(const model::OperandIndex &ind)
+ {
+ return _objects.at(ind);
+ }
+
+ void
+ iterate(const std::function<void(const model::OperandIndex &, backend::operand::IObject &)> &fn);
+
+private:
+ model::OperandIndexMap<std::shared_ptr<backend::operand::IObject>> _objects;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_OPERAND_CONTEXT_H__
diff --git a/runtimes/neurun/core/src/compiler/OperationValidator.cc b/runtimes/neurun/core/src/compiler/OperationValidator.cc
new file mode 100644
index 000000000..0be680941
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/OperationValidator.cc
@@ -0,0 +1,879 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperationValidator.h"
+
+#include <typeinfo>
+
+#include "model/Operands.h"
+#include "graph/operation/LowerInfo.h"
+
+#include "util/logging.h"
+#include "util/Utils.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+void OperationValidator::visit(const model::Subgraph &subgraph)
+{
+ _current_subg_layout = subgraph.getLayout();
+ for (const auto &e : subgraph.operations())
+ {
+ const auto &node = *(e.node);
+ node.accept(*this);
+ }
+}
+
+void OperationValidator::visit(const model::operation::CastNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(0)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+
+ assert(_ctx.at(output_index).shape() == _ctx.at(input_index).shape());
+}
+
+void OperationValidator::visit(const model::operation::ComparisonNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto lhs_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT0)};
+ const auto rhs_index{node.getInputs().at(model::operation::ComparisonNode::Input::INPUT1)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(lhs_index);
+ UNUSED_RELEASE(rhs_index);
+
+ assert(_ctx.at(lhs_index).typeInfo().type() == _ctx.at(rhs_index).typeInfo().type());
+ assert(_ctx.at(output_index).typeInfo().type() == model::DataType::BOOL8);
+}
+
+void OperationValidator::visit(const model::operation::SoftmaxNode &node)
+{
+ VERBOSE(Softmax) << "Configure SOFTMAX operation" << std::endl;
+
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(0)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+
+ assert(_ctx.at(output_index).shape().rank() == _ctx.at(input_index).shape().rank());
+}
+
+void OperationValidator::visit(const model::operation::PermuteNode &node)
+{
+ VERBOSE(Permute) << "Configure Permute operation" << std::endl;
+
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(0)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+
+ assert(_ctx.at(output_index).shape().rank() == _ctx.at(input_index).shape().rank());
+}
+
+void OperationValidator::visit(const model::operation::ReduceSumNode &node)
+{
+ VERBOSE(Permute) << "Configure ReduceSum operation" << std::endl;
+
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReduceSumNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+ UNUSED_RELEASE(axis_index);
+
+ const auto input_shape = _ctx.at(input_index).shape();
+ const auto output_shape = _ctx.at(output_index).shape();
+ const auto axis_shape = _ctx.at(axis_index).shape();
+
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(input_shape);
+ UNUSED_RELEASE(axis_shape);
+
+ assert(input_shape.rank() <= 4);
+ assert(output_shape.rank() <= input_shape.rank());
+ assert(_ctx.at(axis_index).isConstant());
+ assert(axis_shape.rank() == 0 || axis_shape.rank() == 1);
+
+ // NOTE For the 4-dimensions, if the rank of input and output are different, this runtime only
+ // supports cases reducing height and width or reducing depth.
+ // TODO We have to support all cases of dimensions up to 4.
+ // For correct permuting, we have to set output's shape to be equal in dimension position of the
+ // input. But the positions of the same dimensions in the input and output may be set differently.
+ // For example {2,3,4,5}(input's shape) can be reduced to {3,5}(output's shape). The original
+ // output shape should be {1,3,1,5}, but real output shape may be {3,5}. If you simply try to
+ // extend it in 4 dimensions, it should be {1,1,3,5}.
+ // Even if output shape is changed to {1,3,1,5}, there is another problem. It is that shape of
+ // output tensor used at next operation is changed to {1,3,1,5} after this operation even if the
+ // next operation is not desired.
+ if (input_shape.rank() == 4 && input_shape.rank() != output_shape.rank())
+ {
+ if (output_shape.rank() == 2)
+ {
+ // Reducing HW
+ assert(input_shape.dim(0) == output_shape.dim(0) &&
+ input_shape.dim(3) == output_shape.dim(1));
+ }
+ else if (output_shape.rank() == 3)
+ {
+ // Reducing C or
+ // (Reducing H and C(input and output) == 1) or (Reducing W and C(input and output) == 1)
+ assert((input_shape.dim(0) == output_shape.dim(0) &&
+ input_shape.dim(1) == output_shape.dim(1) &&
+ input_shape.dim(2) == output_shape.dim(2)) ||
+ (input_shape.dim(0) == output_shape.dim(0) &&
+ (input_shape.dim(1) == output_shape.dim(1) ||
+ input_shape.dim(2) == output_shape.dim(1)) &&
+ input_shape.dim(3) == 1 && output_shape.dim(2) == 1));
+ }
+ }
+}
+
+void OperationValidator::visit(const model::operation::TransposeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::TransposeNode::Input::INPUT)};
+ const auto perm_idx{node.param().perm};
+
+ const auto &output_shape = _ctx.at(output_index).shape();
+ const auto &input_shape = _ctx.at(input_index).shape();
+ const auto &perm_shape = _ctx.at(perm_idx).shape();
+
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(input_shape);
+ UNUSED_RELEASE(perm_shape);
+
+ assert(perm_shape.rank() == 1);
+ assert(input_shape.rank() == perm_shape.dim(0));
+ assert(input_shape.rank() == output_shape.rank());
+}
+
+void OperationValidator::visit(const model::operation::ReduceMaxNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ReduceMaxNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ auto output_shape = _ctx.at(output_index).shape();
+ auto input_shape = _ctx.at(input_index).shape();
+ auto axis_shape = _ctx.at(axis_index).shape();
+
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(input_shape);
+ UNUSED_RELEASE(axis_shape);
+
+ assert(input_shape.rank() <= 4);
+ assert(output_shape.rank() <= input_shape.rank());
+ assert(_ctx.at(axis_index).isConstant());
+ assert(axis_shape.rank() == 0 || axis_shape.rank() == 1);
+
+ // NOTE For the 4-dimensions, if the rank of input and output are different, this runtime only
+ // supports cases reducing height and width or reducing depth.
+ // TODO We have to support all cases of dimensions up to 4.
+ // For correct permuting, we have to set output's shape to be equal in dimension position of the
+ // input. But the positions of the same dimensions in the input and output may be set differently.
+ // For example {2,3,4,5}(input's shape) can be reduced to {3,5}(output's shape). The original
+ // output shape should be {1,3,1,5}, but real output shape may be {3,5}. If you simply try to
+ // extend it in 4 dimensions, it should be {1,1,3,5}.
+ // Even if output shape is changed to {1,3,1,5}, there is another problem. It is that shape of
+ // output tensor used at next operation is changed to {1,3,1,5} after this operation even if the
+ // next operation is not desired.
+ if (input_shape.rank() == 4 && input_shape.rank() != output_shape.rank())
+ {
+ if (output_shape.rank() == 2)
+ {
+ // Reducing HW
+ assert(input_shape.dim(0) == output_shape.dim(0) &&
+ input_shape.dim(3) == output_shape.dim(1));
+ }
+ else if (output_shape.rank() == 3)
+ {
+ // Reducing C or
+ // (Reducing H and C(ifm and ofm) == 1) or (Reducing W and C(ifm and ofm) == 1)
+ assert((input_shape.dim(0) == output_shape.dim(0) &&
+ input_shape.dim(1) == output_shape.dim(1) &&
+ input_shape.dim(2) == output_shape.dim(2)) ||
+ (input_shape.dim(0) == output_shape.dim(0) &&
+ (input_shape.dim(1) == output_shape.dim(1) ||
+ input_shape.dim(2) == output_shape.dim(1)) &&
+ input_shape.dim(3) == 1 && output_shape.dim(2) == 1));
+ }
+ }
+}
+
+void OperationValidator::visit(const model::operation::RNNNode &node)
+{
+ // NOTE This validation is for static rnn(non-dynamic shape), but not for dynamic rnn
+ // TODO Support dynamic rnn
+ const auto output_index{node.getOutputs().at(model::operation::RNNNode::Output::OUTPUT)};
+ const auto hidden_state_out_index{
+ node.getOutputs().at(model::operation::RNNNode::Output::HIDDEN_STATE_OUT)};
+
+ const auto input_index{node.getInputs().at(model::operation::RNNNode::Input::INPUT)};
+ const auto weights_index{node.getInputs().at(model::operation::RNNNode::Input::WEIGHTS)};
+ const auto recurrent_weights_index{
+ node.getInputs().at(model::operation::RNNNode::Input::RECURRENT_WEIGHTS)};
+ const auto bias_index{node.getInputs().at(model::operation::RNNNode::Input::BIAS)};
+ const auto hidden_state_in_index{
+ node.getInputs().at(model::operation::RNNNode::Input::HIDDEN_STATE_IN)};
+
+ const auto batch_size = _ctx.at(output_index).shape().dim(0);
+ const auto num_units = _ctx.at(output_index).shape().dim(1);
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(hidden_state_out_index);
+ UNUSED_RELEASE(input_index);
+ UNUSED_RELEASE(weights_index);
+ UNUSED_RELEASE(recurrent_weights_index);
+ UNUSED_RELEASE(bias_index);
+ UNUSED_RELEASE(hidden_state_in_index);
+ UNUSED_RELEASE(batch_size);
+ UNUSED_RELEASE(num_units);
+
+ assert(_ctx.at(output_index).shape().rank() == 2 &&
+ _ctx.at(hidden_state_out_index).shape().rank() == 2 &&
+ _ctx.at(input_index).shape().rank() == 2 && _ctx.at(weights_index).shape().rank() == 2 &&
+ _ctx.at(recurrent_weights_index).shape().rank() == 2 &&
+ _ctx.at(hidden_state_in_index).shape().rank() == 2);
+ assert(_ctx.at(bias_index).shape().rank() == 1);
+
+ assert(batch_size == _ctx.at(input_index).shape().dim(0) &&
+ batch_size == _ctx.at(hidden_state_in_index).shape().dim(0) &&
+ batch_size == _ctx.at(hidden_state_out_index).shape().dim(0));
+ assert(_ctx.at(input_index).shape().dim(1) == _ctx.at(weights_index).shape().dim(1));
+
+ assert(num_units == _ctx.at(weights_index).shape().dim(0) &&
+ num_units == _ctx.at(recurrent_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(bias_index).shape().dim(0));
+ assert(num_units == _ctx.at(output_index).shape().dim(1) &&
+ num_units == _ctx.at(recurrent_weights_index).shape().dim(1) &&
+ num_units == _ctx.at(hidden_state_in_index).shape().dim(1) &&
+ num_units == _ctx.at(hidden_state_out_index).shape().dim(1));
+}
+
+void OperationValidator::visit(const model::operation::SpaceToDepthNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::SpaceToDepthNode::Input::INPUT)};
+ const auto block_size_index{node.param().block_size_index};
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto input_shape = _ctx.at(ifm_index).shape().asFeature(frontend_layout);
+ const auto output_shape = _ctx.at(ofm_index).shape().asFeature(frontend_layout);
+ const auto block_size = _ctx.at(block_size_index).asScalar<int32_t>();
+
+ UNUSED_RELEASE(input_shape);
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(block_size);
+
+ // All assertions as per NNAPI specification.
+ assert(_ctx.at(ifm_index).shape().rank() == 4);
+ assert(_ctx.at(ofm_index).shape().rank() == 4);
+ assert((block_size >= 1) && (input_shape.H % block_size == 0) &&
+ (input_shape.W % block_size == 0));
+ assert(input_shape.N == output_shape.N);
+ assert(input_shape.C * block_size * block_size == output_shape.C);
+}
+
+void OperationValidator::visit(const model::operation::EmbeddingLookupNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto lookups_index{
+ node.getInputs().at(model::operation::EmbeddingLookupNode::Input::LOOKUPS)};
+ const auto values_index{
+ node.getInputs().at(model::operation::EmbeddingLookupNode::Input::VALUES)};
+
+ const auto &output_obj = _ctx.at(output_index);
+ const auto &lookups_obj = _ctx.at(lookups_index);
+ const auto &values_obj = _ctx.at(values_index);
+
+ UNUSED_RELEASE(output_obj);
+ UNUSED_RELEASE(lookups_obj);
+ UNUSED_RELEASE(values_obj);
+
+ // Verify operand here, not at SimpleEmbeddingLookup::configure() to avoid acl's modifying
+ // TensorShape sometimes(Issue: https://github.sec.samsung.net/STAR/nnfw/issues/729)
+ {
+ assert(lookups_obj.typeInfo().type() == neurun::model::DataType::INT32);
+
+ const auto &output_shape = output_obj.shape();
+ const auto &lookups_shape = lookups_obj.shape();
+ const auto &values_shape = values_obj.shape();
+
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(lookups_shape);
+ UNUSED_RELEASE(values_shape);
+
+ assert(lookups_shape.rank() == 1);
+ assert(values_shape.rank() >= 2);
+
+ // output should be a n-D tensor with the same rank and shape as the values tensor, except for
+ // the first dimension which has the same size as lookups' only dimension.
+ assert(output_shape.rank() == values_shape.rank());
+ assert(output_shape.dim(0) == lookups_shape.dim(0));
+ for (int n = 1; n < output_shape.rank(); ++n)
+ {
+ assert(output_shape.dim(n) == values_shape.dim(n));
+ }
+ }
+}
+
+void OperationValidator::visit(const model::operation::ExpNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::ExpNode::Input::INPUT)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+
+ assert(_ctx.at(output_index).shape() == _ctx.at(input_index).shape());
+ assert(_ctx.at(output_index).typeInfo().type() == _ctx.at(input_index).typeInfo().type());
+}
+
+void OperationValidator::visit(const model::operation::FloorNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::FloorNode::Input::INPUT)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+
+ assert(_ctx.at(output_index).shape() == _ctx.at(input_index).shape());
+ assert(_ctx.at(output_index).typeInfo().type() == _ctx.at(input_index).typeInfo().type());
+}
+
+void OperationValidator::visit(const model::operation::HashtableLookupNode &node)
+{
+ const auto output_index{
+ node.getOutputs().at(model::operation::HashtableLookupNode::Output::OUTPUT)};
+ const auto hits_index{node.getOutputs().at(model::operation::HashtableLookupNode::Output::HITS)};
+
+ const auto lookups_index{
+ node.getInputs().at(model::operation::HashtableLookupNode::Input::LOOKUPS)};
+ const auto keys_index{node.getInputs().at(model::operation::HashtableLookupNode::Input::KEYS)};
+ const auto values_index{
+ node.getInputs().at(model::operation::HashtableLookupNode::Input::VALUES)};
+
+ const auto &output_obj = _ctx.at(output_index);
+ const auto &hits_obj = _ctx.at(hits_index);
+
+ const auto &lookups_obj = _ctx.at(lookups_index);
+ const auto &keys_obj = _ctx.at(keys_index);
+ const auto &values_obj = _ctx.at(values_index);
+
+ assert(lookups_obj.typeInfo().type() == neurun::model::DataType::INT32);
+ assert(keys_obj.typeInfo().type() == neurun::model::DataType::INT32);
+ assert(hits_obj.typeInfo().type() == neurun::model::DataType::QUANT8_ASYMM);
+
+ const auto &output_shape = output_obj.shape();
+ const auto &hits_shape = hits_obj.shape();
+
+ const auto &lookups_shape = lookups_obj.shape();
+ const auto &keys_shape = keys_obj.shape();
+ const auto &values_shape = values_obj.shape();
+
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(hits_shape);
+ UNUSED_RELEASE(lookups_shape);
+ UNUSED_RELEASE(keys_shape);
+ UNUSED_RELEASE(values_shape);
+
+ assert(values_shape.rank() == output_shape.rank());
+ assert(lookups_shape.rank() == 1);
+ assert(keys_shape.rank() == 1);
+ assert(values_shape.dim(0) == keys_shape.dim(0));
+ assert(lookups_shape.dim(0) == output_shape.dim(0));
+}
+
+void OperationValidator::visit(const model::operation::TransposeConvNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto out_shape_index{
+ node.getInputs().at(model::operation::TransposeConvNode::Input::OUTPUT_SHAPE)};
+ const auto ifm_index{node.getInputs().at(model::operation::TransposeConvNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(model::operation::TransposeConvNode::Input::KERNEL)};
+
+ // Only 4D tensors are supported
+ assert(_ctx.at(ofm_index).shape().rank() == 4);
+ assert(_ctx.at(ofm_index).shape().rank() == _ctx.at(ifm_index).shape().rank());
+ assert(_ctx.at(ofm_index).shape().rank() == _ctx.at(ker_index).shape().rank());
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature(frontend_layout);
+ const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature(frontend_layout);
+ // The kernel has only IHWO layout on frontend
+ // So ker_shape is treated here below
+ // I -> N
+ // H -> H
+ // W -> W
+ // O -> C
+ const auto ker_shape = _ctx.at(ker_index).shape().asFeature(model::Layout::NHWC);
+
+ UNUSED_RELEASE(ofm_shape);
+ UNUSED_RELEASE(ifm_shape);
+ UNUSED_RELEASE(ker_shape);
+
+ assert((node.param().padding.type == model::PaddingType::SAME) ||
+ (node.param().padding.type == model::PaddingType::VALID));
+ assert(ifm_shape.N == ofm_shape.N);
+ assert(ifm_shape.C == ker_shape.C);
+ assert(ker_shape.N == ofm_shape.C);
+}
+
+void OperationValidator::visit(const model::operation::GatherNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+
+ const auto ifm_index{node.getInputs().at(model::operation::GatherNode::Input::INPUT)};
+ const auto indices_index{node.getInputs().at(model::operation::GatherNode::Input::INDICES)};
+
+ const auto axis_index{node.param().axis_index};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape();
+ const auto indices_shape = _ctx.at(indices_index).shape();
+ const auto axis_shape = _ctx.at(axis_index).shape();
+ const auto ofm_shape = _ctx.at(ofm_index).shape();
+
+ UNUSED_RELEASE(ifm_shape);
+ UNUSED_RELEASE(indices_shape);
+ UNUSED_RELEASE(axis_shape);
+ UNUSED_RELEASE(ofm_shape);
+
+ assert(ifm_shape.rank() <= 4);
+ assert(indices_shape.rank() <= 3);
+ assert(ofm_shape.rank() <= 4);
+ assert(_ctx.at(axis_index).isConstant());
+ assert(axis_shape.rank() == 0);
+}
+
+void OperationValidator::visit(const model::operation::DequantizeNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::DequantizeNode::Input::INPUT)};
+
+ UNUSED_RELEASE(output_index);
+ UNUSED_RELEASE(input_index);
+
+ assert(_ctx.at(input_index).shape().rank() <= 4);
+ assert(_ctx.at(input_index).shape() == _ctx.at(output_index).shape());
+ assert(_ctx.at(input_index).typeInfo().type() == neurun::model::DataType::QUANT8_ASYMM);
+ assert(_ctx.at(output_index).typeInfo().type() == neurun::model::DataType::FLOAT32);
+}
+
+void OperationValidator::visit(const model::operation::MeanNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::MeanNode::Input::INPUT)};
+
+ const auto ifm_shape = _ctx.at(ifm_index).shape();
+ const auto ofm_shape = _ctx.at(ofm_index).shape();
+
+ // NOTE For the 4-dimensions, if the rank of input and output are different, this runtime only
+ // supports cases reducing height and width or reducing depth.
+ // TODO We have to support all cases of dimensions up to 4.
+ // For correct permuting, we have to set output's shape to be equal in dimension position of the
+ // input. But the positions of the same dimensions in the input and output may be set differently.
+ // For example {2,3,4,5}(input's shape) can be reduced to {3,5}(output's shape). The original
+ // output shape should be {1,3,1,5}, but real output shape may be {3,5}. If you simply try to
+ // extend it in 4 dimensions, it should be {1,1,3,5}.
+ // Even if output shape is changed to {1,3,1,5}, there is another problem. It is that shape of
+ // output tensor used at next operation is changed to {1,3,1,5} after this operation even if the
+ // next operation is not desired.
+ if (ifm_shape.rank() == 4 && ifm_shape.rank() != ofm_shape.rank())
+ {
+ if (ofm_shape.rank() == 2)
+ {
+ // Reducing HW
+ assert(ifm_shape.dim(0) == ofm_shape.dim(0) && ifm_shape.dim(3) == ofm_shape.dim(1));
+ }
+ else if (ofm_shape.rank() == 3)
+ {
+ // Reducing C or
+ // (Reducing H and C(ifm and ofm) == 1) or (Reducing W and C(ifm and ofm) == 1)
+ assert((ifm_shape.dim(0) == ofm_shape.dim(0) && ifm_shape.dim(1) == ofm_shape.dim(1) &&
+ ifm_shape.dim(2) == ofm_shape.dim(2)) ||
+ (ifm_shape.dim(0) == ofm_shape.dim(0) &&
+ (ifm_shape.dim(1) == ofm_shape.dim(1) || ifm_shape.dim(2) == ofm_shape.dim(1)) &&
+ ifm_shape.dim(3) == 1 && ofm_shape.dim(2) == 1));
+ }
+ }
+}
+
+void OperationValidator::visit(const model::operation::DepthToSpaceNode &node)
+{
+ const auto output_index{node.getOutputs().at(0)};
+ const auto input_index{node.getInputs().at(model::operation::DepthToSpaceNode::Input::INPUT)};
+ const auto block_size_index{node.param().block_size_index};
+
+ const auto frontend_layout = _current_subg_layout;
+ const auto output_shape = _ctx.at(output_index).shape().asFeature(frontend_layout);
+ const auto input_shape = _ctx.at(input_index).shape().asFeature(frontend_layout);
+
+ UNUSED_RELEASE(output_shape);
+ UNUSED_RELEASE(input_shape);
+
+ assert(_ctx.at(input_index).shape().rank() == 4);
+ assert(_ctx.at(output_index).shape().rank() == 4);
+
+ int32_t block_size = _ctx.at(block_size_index).asScalar<int32_t>();
+
+ UNUSED_RELEASE(block_size);
+
+ assert(block_size > 0);
+
+ { // assertions block
+ assert(output_shape.N == input_shape.N);
+ assert(output_shape.H == input_shape.H * block_size);
+ assert(output_shape.W == input_shape.W * block_size);
+ assert(input_shape.C % (block_size * block_size) == 0);
+ assert(output_shape.C == input_shape.C / (block_size * block_size));
+ }
+}
+
+void OperationValidator::visit(const model::operation::ReduceMinNode &node)
+{
+ const auto ofm_index{node.getOutputs().at(0)};
+ const auto ifm_index{node.getInputs().at(model::operation::ReduceMinNode::Input::INPUT)};
+ const auto axis_index{node.param().axis_index};
+
+ auto ifm_shape = _ctx.at(ifm_index).shape();
+ auto ofm_shape = _ctx.at(ofm_index).shape();
+ auto axis_shape = _ctx.at(axis_index).shape();
+
+ UNUSED_RELEASE(ifm_shape);
+ UNUSED_RELEASE(ofm_shape);
+ UNUSED_RELEASE(axis_shape);
+
+ assert(ifm_shape.rank() <= 4);
+ assert(ofm_shape.rank() <= ifm_shape.rank());
+ assert(_ctx.at(axis_index).isConstant());
+ assert(axis_shape.rank() == 0 || axis_shape.rank() == 1);
+
+ // NOTE For the 4-dimensions, if the rank of input and output are different, this runtime only
+ // supports cases reducing height and width or reducing depth.
+ // TODO We have to support all cases of dimensions up to 4.
+ // For correct permuting, we have to set output's shape to be equal in dimension position of the
+ // input. But the positions of the same dimensions in the input and output may be set differently.
+ // For example {2,3,4,5}(input's shape) can be reduced to {3,5}(output's shape). The original
+ // output shape should be {1,3,1,5}, but real output shape may be {3,5}. If you simply try to
+ // extend it in 4 dimensions, it should be {1,1,3,5}.
+ // Even if output shape is changed to {1,3,1,5}, there is another problem. It is that shape of
+ // output tensor used at next operation is changed to {1,3,1,5} after this operation even if the
+ // next operation is not desired.
+ if (ifm_shape.rank() == 4 && ifm_shape.rank() != ofm_shape.rank())
+ {
+ if (ofm_shape.rank() == 2)
+ {
+ // Reducing HW
+ assert(ifm_shape.dim(0) == ofm_shape.dim(0) && ifm_shape.dim(3) == ofm_shape.dim(1));
+ }
+ else if (ofm_shape.rank() == 3)
+ {
+ // Reducing C or
+ // (Reducing H and C(ifm and ofm) == 1) or (Reducing W and C(ifm and ofm) == 1)
+ assert((ifm_shape.dim(0) == ofm_shape.dim(0) && ifm_shape.dim(1) == ofm_shape.dim(1) &&
+ ifm_shape.dim(2) == ofm_shape.dim(2)) ||
+ (ifm_shape.dim(0) == ofm_shape.dim(0) &&
+ (ifm_shape.dim(1) == ofm_shape.dim(1) || ifm_shape.dim(2) == ofm_shape.dim(1)) &&
+ ifm_shape.dim(3) == 1 && ofm_shape.dim(2) == 1));
+ }
+ }
+}
+
+void OperationValidator::visit(const model::operation::LSTMNode &node)
+{
+ // NOTE This validation is for static rnn(non-dynamic shape), but not for dynamic rnn
+ // TODO Support dynamic rnn
+ const auto scratch_buffer_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::SCRATCH_BUFFER)};
+ const auto output_state_out_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::OUTPUT_STATE_OUT)};
+ const auto cell_state_out_index{
+ node.getOutputs().at(model::operation::LSTMNode::Output::CELL_STATE_OUT)};
+ const auto output_index{node.getOutputs().at(model::operation::LSTMNode::Output::OUTPUT)};
+
+ const auto input_index{node.getInputs().at(model::operation::LSTMNode::Input::INPUT)};
+ const auto input_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_INPUT_WEIGHTS)};
+ const auto input_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_FORGET_WEIGHTS)};
+ const auto input_to_cell_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_CELL_WEIGHTS)};
+ const auto input_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_TO_OUTPUT_WEIGHTS)};
+ const auto recurrent_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_INPUT_WEIGHTS)};
+ const auto recurrent_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_FORGET_WEIGHTS)};
+ const auto recurrent_to_cell_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_CELL_WEIGHTS)};
+ const auto recurrent_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::RECURRENT_TO_OUTPUT_WEIGHTS)};
+ const auto cell_to_input_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_INPUT_WEIGHTS)};
+ const auto cell_to_forget_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_FORGET_WEIGHTS)};
+ const auto cell_to_output_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_TO_OUTPUT_WEIGHTS)};
+ const auto input_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::INPUT_GATE_BIAS)};
+ const auto forget_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::FORGET_GATE_BIAS)};
+ const auto cell_bias_index{node.getInputs().at(model::operation::LSTMNode::Input::CELL_BIAS)};
+ const auto output_gate_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::OUTPUT_GATE_BIAS)};
+ const auto projection_weights_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::PROJECTION_WEIGHTS)};
+ const auto projection_bias_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::PROJECTION_BIAS)};
+ const auto output_state_in_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::OUTPUT_STATE_IN)};
+ const auto cell_state_in_index{
+ node.getInputs().at(model::operation::LSTMNode::Input::CELL_STATE_IN)};
+
+ UNUSED_RELEASE(scratch_buffer_index);
+ UNUSED_RELEASE(output_state_out_index);
+ UNUSED_RELEASE(cell_state_out_index);
+ UNUSED_RELEASE(output_index);
+
+ UNUSED_RELEASE(input_index);
+ UNUSED_RELEASE(input_to_input_weights_index);
+ UNUSED_RELEASE(input_to_forget_weights_index);
+ UNUSED_RELEASE(input_to_cell_weights_index);
+ UNUSED_RELEASE(input_to_output_weights_index);
+ UNUSED_RELEASE(recurrent_to_input_weights_index);
+ UNUSED_RELEASE(recurrent_to_forget_weights_index);
+ UNUSED_RELEASE(recurrent_to_cell_weights_index);
+ UNUSED_RELEASE(recurrent_to_output_weights_index);
+ UNUSED_RELEASE(cell_to_input_weights_index);
+ UNUSED_RELEASE(cell_to_forget_weights_index);
+ UNUSED_RELEASE(cell_to_output_weights_index);
+ UNUSED_RELEASE(input_gate_bias_index);
+ UNUSED_RELEASE(forget_gate_bias_index);
+ UNUSED_RELEASE(cell_bias_index);
+ UNUSED_RELEASE(output_gate_bias_index);
+ UNUSED_RELEASE(projection_weights_index);
+ UNUSED_RELEASE(projection_bias_index);
+ UNUSED_RELEASE(output_state_in_index);
+ UNUSED_RELEASE(cell_state_in_index);
+
+ assert(_ctx.at(scratch_buffer_index).shape().rank() == 2 &&
+ _ctx.at(output_state_out_index).shape().rank() == 2 &&
+ _ctx.at(cell_state_out_index).shape().rank() == 2 &&
+ _ctx.at(output_index).shape().rank() == 2 && _ctx.at(input_index).shape().rank() == 2 &&
+ _ctx.at(input_to_input_weights_index).shape().rank() == 2 &&
+ _ctx.at(input_to_forget_weights_index).shape().rank() == 2 &&
+ _ctx.at(input_to_cell_weights_index).shape().rank() == 2 &&
+ _ctx.at(input_to_output_weights_index).shape().rank() == 2 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().rank() == 2 &&
+ _ctx.at(recurrent_to_forget_weights_index).shape().rank() == 2 &&
+ _ctx.at(recurrent_to_cell_weights_index).shape().rank() == 2 &&
+ _ctx.at(recurrent_to_output_weights_index).shape().rank() == 2 &&
+ _ctx.at(projection_weights_index).shape().rank() == 2 &&
+ _ctx.at(output_state_in_index).shape().rank() == 2 &&
+ _ctx.at(cell_state_in_index).shape().rank() == 2);
+
+ assert(_ctx.at(cell_to_input_weights_index).shape().rank() == 1 &&
+ _ctx.at(cell_to_forget_weights_index).shape().rank() == 1 &&
+ _ctx.at(cell_to_output_weights_index).shape().rank() == 1 &&
+ _ctx.at(input_gate_bias_index).shape().rank() == 1 &&
+ _ctx.at(forget_gate_bias_index).shape().rank() == 1 &&
+ _ctx.at(cell_bias_index).shape().rank() == 1 &&
+ _ctx.at(output_gate_bias_index).shape().rank() == 1 &&
+ _ctx.at(projection_bias_index).shape().rank() == 1);
+
+ // CIFG assertion
+ assert((_ctx.at(input_to_input_weights_index).shape().dim(0) == 0 &&
+ _ctx.at(input_to_input_weights_index).shape().dim(1) == 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(0) == 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(1) == 0 &&
+ _ctx.at(input_gate_bias_index).shape().dim(0) == 0 &&
+ _ctx.at(cell_to_input_weights_index).shape().dim(0) == 0) ||
+ (_ctx.at(input_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(input_to_input_weights_index).shape().dim(1) != 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(1) != 0 &&
+ _ctx.at(input_gate_bias_index).shape().dim(0) != 0));
+
+ // Peephole assertion
+ assert((_ctx.at(cell_to_forget_weights_index).shape().dim(0) == 0 &&
+ _ctx.at(cell_to_output_weights_index).shape().dim(0) == 0) ||
+ (_ctx.at(cell_to_forget_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(cell_to_output_weights_index).shape().dim(0) != 0));
+
+ bool has_input_to_input_weights = _ctx.at(input_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(input_to_input_weights_index).shape().dim(1) != 0;
+ bool has_recurrent_to_input_weights =
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(recurrent_to_input_weights_index).shape().dim(1) != 0;
+ bool has_input_gate_bias = _ctx.at(input_gate_bias_index).shape().dim(0) != 0;
+ bool has_cell_to_input_weights = _ctx.at(cell_to_input_weights_index).shape().dim(0) != 0;
+ bool has_cell_to_forget_weights = _ctx.at(cell_to_forget_weights_index).shape().dim(0) != 0;
+ bool has_cell_to_output_weights = _ctx.at(cell_to_output_weights_index).shape().dim(0) != 0;
+ bool has_projection_weights = _ctx.at(projection_weights_index).shape().dim(0) != 0 &&
+ _ctx.at(projection_weights_index).shape().dim(1) != 0;
+ bool has_projection_bias = _ctx.at(projection_bias_index).shape().dim(0);
+
+ // NOTE The cell_to_input_weights do not exist in non-peephole although regular LSTM(non-CIFG).
+ // true: no CIFG
+ // false: CIFG
+ bool has_cifg_param = has_input_to_input_weights && has_recurrent_to_input_weights;
+
+ // NOTE The cell_to_input_weights do not exist in regular CIFG although peephole.
+ // true: peephole
+ // false: no peephole
+ bool has_peephole_param = has_cell_to_forget_weights && has_cell_to_output_weights;
+
+ // NOTE The projection weights may have data but the projection bias may not.
+ bool has_projection_param = has_projection_weights;
+
+ UNUSED_RELEASE(has_input_to_input_weights);
+ UNUSED_RELEASE(has_recurrent_to_input_weights);
+ UNUSED_RELEASE(has_input_gate_bias);
+ UNUSED_RELEASE(has_cell_to_input_weights);
+ UNUSED_RELEASE(has_cell_to_forget_weights);
+ UNUSED_RELEASE(has_cell_to_output_weights);
+ UNUSED_RELEASE(has_projection_weights);
+ UNUSED_RELEASE(has_projection_bias);
+ UNUSED_RELEASE(has_cifg_param);
+ UNUSED_RELEASE(has_peephole_param);
+ UNUSED_RELEASE(has_projection_param);
+
+ const auto batch_size = _ctx.at(input_index).shape().dim(0);
+ UNUSED_RELEASE(batch_size);
+ assert(batch_size == _ctx.at(output_state_in_index).shape().dim(0) &&
+ batch_size == _ctx.at(cell_state_in_index).shape().dim(0) &&
+ batch_size == _ctx.at(scratch_buffer_index).shape().dim(0) &&
+ batch_size == _ctx.at(output_state_out_index).shape().dim(0) &&
+ batch_size == _ctx.at(cell_state_out_index).shape().dim(0) &&
+ batch_size == _ctx.at(output_index).shape().dim(0));
+
+ const auto input_size = _ctx.at(input_index).shape().dim(1);
+ UNUSED_RELEASE(input_size);
+ assert(input_size == _ctx.at(input_to_forget_weights_index).shape().dim(1) &&
+ input_size == _ctx.at(input_to_cell_weights_index).shape().dim(1) &&
+ input_size == _ctx.at(input_to_output_weights_index).shape().dim(1));
+
+ const auto num_units = _ctx.at(cell_state_out_index).shape().dim(1);
+ UNUSED_RELEASE(num_units);
+ assert(num_units == _ctx.at(input_to_forget_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(input_to_cell_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(input_to_output_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(recurrent_to_forget_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(recurrent_to_cell_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(recurrent_to_output_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(forget_gate_bias_index).shape().dim(0) &&
+ num_units == _ctx.at(cell_bias_index).shape().dim(0) &&
+ num_units == _ctx.at(output_gate_bias_index).shape().dim(0) &&
+ num_units == _ctx.at(cell_state_in_index).shape().dim(1) &&
+ (((num_units * 3) == _ctx.at(scratch_buffer_index).shape().dim(1)) ||
+ ((num_units * 4) == _ctx.at(scratch_buffer_index).shape().dim(1))));
+
+ const auto output_size = _ctx.at(output_index).shape().dim(1);
+ UNUSED_RELEASE(output_size);
+ assert(output_size == _ctx.at(recurrent_to_forget_weights_index).shape().dim(1) &&
+ output_size == _ctx.at(recurrent_to_cell_weights_index).shape().dim(1) &&
+ output_size == _ctx.at(recurrent_to_output_weights_index).shape().dim(1) &&
+ output_size == _ctx.at(output_state_in_index).shape().dim(1) &&
+ output_size == _ctx.at(output_state_out_index).shape().dim(1));
+
+ if (has_cifg_param)
+ {
+ assert(input_size == _ctx.at(input_to_input_weights_index).shape().dim(1));
+ assert(num_units == _ctx.at(input_to_input_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(recurrent_to_input_weights_index).shape().dim(0) &&
+ (num_units == _ctx.at(cell_to_input_weights_index).shape().dim(0) ||
+ _ctx.at(cell_to_input_weights_index).shape().dim(0) == 0 /* non-peephole */) &&
+ num_units == _ctx.at(input_gate_bias_index).shape().dim(0));
+ assert(output_size == _ctx.at(recurrent_to_input_weights_index).shape().dim(1));
+ assert(has_input_to_input_weights && has_recurrent_to_input_weights && has_input_gate_bias);
+ if (has_cell_to_input_weights)
+ {
+ // NOTE The cell_to_input_weights exist only in case of non-CIFG and peephole.
+ assert(has_peephole_param);
+ }
+ assert(_ctx.at(scratch_buffer_index).shape().dim(1) == num_units * 4);
+ }
+ else
+ {
+ assert(_ctx.at(scratch_buffer_index).shape().dim(1) == num_units * 3);
+ }
+
+ if (has_peephole_param)
+ {
+ assert(num_units == _ctx.at(cell_to_forget_weights_index).shape().dim(0) &&
+ num_units == _ctx.at(cell_to_output_weights_index).shape().dim(0) &&
+ (num_units == _ctx.at(cell_to_input_weights_index).shape().dim(0) ||
+ _ctx.at(cell_to_input_weights_index).shape().dim(0) == 0 /* CIFG */));
+ }
+
+ if (has_projection_param)
+ {
+ assert(num_units == _ctx.at(projection_weights_index).shape().dim(1));
+ assert(output_size == _ctx.at(projection_weights_index).shape().dim(0));
+ if (has_projection_bias)
+ {
+ assert(output_size == _ctx.at(projection_bias_index).shape().dim(0));
+ }
+ }
+}
+
+void OperationValidator::visit(const model::operation::UnpackNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::UnpackNode::Input::INPUT)};
+ const auto num{node.param().num};
+ const auto axis{node.param().axis};
+
+ const auto &input_shape = _ctx.at(input_index).shape();
+ const auto input_rank = static_cast<int32_t>(input_shape.rank());
+
+ UNUSED_RELEASE(num);
+ UNUSED_RELEASE(axis);
+ UNUSED_RELEASE(input_rank);
+
+ assert(num == static_cast<int32_t>(node.getOutputs().size()));
+ assert(axis >= -input_rank && axis < input_rank);
+}
+
+void OperationValidator::visit(const model::operation::PadNode &node)
+{
+ const auto input_index{node.getInputs().at(model::operation::PadNode::Input::INPUT)};
+ const auto pad_index{node.getInputs().at(model::operation::PadNode::Input::PAD)};
+ const auto output_index{node.getInputs().at(0)};
+
+ const auto &pad_shape = _ctx.at(pad_index).shape();
+ const auto input_rank = static_cast<int32_t>(_ctx.at(input_index).shape().rank());
+
+ UNUSED_RELEASE(pad_shape);
+ UNUSED_RELEASE(input_rank);
+ UNUSED_RELEASE(output_index);
+
+ assert(pad_shape.rank() == 2);
+ assert(pad_shape.dim(0) == input_rank);
+ assert(pad_shape.dim(1) == 2);
+ assert(_ctx.at(pad_index).typeInfo().type() == model::DataType::INT32);
+ assert(_ctx.at(input_index).shape().rank() == _ctx.at(output_index).shape().rank());
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/OperationValidator.h b/runtimes/neurun/core/src/compiler/OperationValidator.h
new file mode 100644
index 000000000..76774daeb
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/OperationValidator.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
+#define __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
+
+#include "model/Layout.h"
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+class Operands;
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace compiler
+{
+
+class OperationValidator : public model::OperationVisitor
+{
+public:
+ OperationValidator(const neurun::model::Operands &ctx)
+ : _ctx{ctx}, _current_subg_layout{model::Layout::UNKNOWN}
+ {
+ }
+
+public:
+ void visit(const model::Subgraph &node) override;
+ void visit(const model::operation::CastNode &node) override;
+ void visit(const model::operation::ComparisonNode &node) override;
+ void visit(const model::operation::SoftmaxNode &node) override;
+ void visit(const model::operation::PermuteNode &node) override;
+ void visit(const model::operation::ReduceSumNode &node) override;
+ void visit(const model::operation::TransposeNode &node) override;
+ void visit(const model::operation::ReduceMaxNode &node) override;
+ void visit(const model::operation::RNNNode &node) override;
+ void visit(const model::operation::SpaceToDepthNode &node) override;
+ void visit(const model::operation::EmbeddingLookupNode &node) override;
+ void visit(const model::operation::ExpNode &node) override;
+ void visit(const model::operation::FloorNode &node) override;
+ void visit(const model::operation::HashtableLookupNode &node) override;
+ void visit(const model::operation::TransposeConvNode &node) override;
+ void visit(const model::operation::GatherNode &node) override;
+ void visit(const model::operation::DequantizeNode &node) override;
+ void visit(const model::operation::MeanNode &node) override;
+ void visit(const model::operation::DepthToSpaceNode &node) override;
+ void visit(const model::operation::ReduceMinNode &node) override;
+ void visit(const model::operation::LSTMNode &node) override;
+ void visit(const model::operation::UnpackNode &node) override;
+ void visit(const model::operation::PadNode &node) override;
+
+private:
+ const neurun::model::Operands &_ctx;
+ model::Layout _current_subg_layout;
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
diff --git a/runtimes/neurun/core/src/compiler/ParamChecker.cc b/runtimes/neurun/core/src/compiler/ParamChecker.cc
new file mode 100644
index 000000000..10bfa1ea3
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/ParamChecker.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParamChecker.h"
+
+#include "graph/Graph.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+void ParamChecker::operator()()
+{
+ _model->operations().iterate(
+ [&](const model::OperationIndex &, const model::Operation &node) { node.accept(*this); });
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/ParamChecker.h b/runtimes/neurun/core/src/compiler/ParamChecker.h
new file mode 100644
index 000000000..82f46692d
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/ParamChecker.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file ParamChecker.h
+ * @brief This file contains ParamChecker to check\n
+ * operations' parameters are compilable at machine independent phase\n
+ * ex) Check param is constant
+ */
+#ifndef __NEURUN_COMPILER_PARAM_CHECKER_H__
+#define __NEURUN_COMPILER_PARAM_CHECKER_H__
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace graph
+{
+class Graph;
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace compiler
+{
+
+class ParamChecker : public model::OperationVisitor
+{
+public:
+ /**
+ * @brief Construct a new Param Checker object (deleted)
+ */
+ ParamChecker(void) = delete;
+ /**
+ * @brief Construct a new Param Checker object
+ * @param[in] model Graph model to check
+ */
+ ParamChecker(std::shared_ptr<graph::Graph> model) : _model{model} {}
+
+public:
+ /**
+ * @brief Run parameter analysis
+ */
+ void operator()();
+ /**
+ * @brief Return analysis result if model have non-const parameter
+ * @return @c true if there is non-const parameter, otherwise @c false
+ */
+ bool haveNoneConstParam(void) { return _nonConstParam; }
+
+private:
+ const std::shared_ptr<graph::Graph> _model;
+ bool _nonConstParam{false};
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
diff --git a/runtimes/neurun/core/src/compiler/SubTensorAnalyzer.cc b/runtimes/neurun/core/src/compiler/SubTensorAnalyzer.cc
new file mode 100644
index 000000000..c2c6da290
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/SubTensorAnalyzer.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SubTensorAnalyzer.h"
+
+#include <typeinfo>
+
+#include "cpp14/memory.h"
+#include "model/OperandIndexSequence.h"
+#include "util/logging.h"
+#include "util/Coordinates.h"
+
+namespace neurun
+{
+namespace compiler
+{
+
+void SubTensorAnalyzer::visit(const model::operation::ConcatNode &node)
+{
+ // If operator is concat (or other operators related with subsumption), fill subsumption info
+ // TODO: if one tensor is subset of many parents or model input
+ // Solution 1. Handle 1st parent only, ignore others (need to invert for other children)
+ // Solution 2. Insert copy operation for other parents
+ int32_t axis_raw = node.param().axis;
+
+ auto &output_index = node.getOutputs().at(0);
+ auto &inputs = node.getInputs();
+
+ int32_t axis_point = 0;
+ const auto rank = _ctx.at(output_index).shape().rank();
+ int32_t axis = axis_raw < 0 ? (axis_raw + rank) : axis_raw;
+ assert(rank > axis);
+
+ // NOTE Not support multiple parent tensor yet
+ for (auto &input_index : inputs)
+ {
+ if (_ctx.at(input_index).parent_info() != nullptr)
+ {
+ return;
+ }
+ }
+
+ for (auto &input_index : inputs)
+ {
+ auto input_shape = _ctx.at(input_index).shape();
+ assert(rank == input_shape.rank());
+
+ neurun::util::Coordinates coordinate_info{};
+ for (int i = 0; i < rank; i++)
+ {
+ coordinate_info.set(i, 0);
+ }
+ coordinate_info.set(axis, axis_point);
+
+ std::unique_ptr<graph::operand::ParentInfo> parentInfo =
+ nnfw::cpp14::make_unique<graph::operand::ParentInfo>(output_index, coordinate_info);
+
+ _ctx.at(input_index).parent_info(std::move(parentInfo));
+
+ axis_point += input_shape.dim(axis);
+ }
+}
+
+} // namespace compiler
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/compiler/SubTensorAnalyzer.h b/runtimes/neurun/core/src/compiler/SubTensorAnalyzer.h
new file mode 100644
index 000000000..606d755b7
--- /dev/null
+++ b/runtimes/neurun/core/src/compiler/SubTensorAnalyzer.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file SubTensorAnalyzer.h
+ * @brief This file contains SubTensorAnalyzer to analyze tensor subsumption
+ * using operation visitor
+ */
+
+#ifndef __NEURUN_COMPILER_SUBTENSOR_ANALYZER_H__
+#define __NEURUN_COMPILER_SUBTENSOR_ANALYZER_H__
+
+#include "model/OperationVisitor.h"
+#include "graph/Graph.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operand
+{
+class Set;
+} // namespace operation
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace compiler
+{
+
+/**
+ * @brief Class to analyze tensor subsumption
+ */
+class SubTensorAnalyzer : public model::OperationVisitor
+{
+public:
+ /**
+ * @brief Construct a new SubTensorAnalyzer object
+ * @param[in] ctx Graph operand set
+ */
+ SubTensorAnalyzer(neurun::model::Operands &ctx) : _ctx{ctx}
+ {
+ // DO NOTHING
+ }
+
+public:
+ void visit(const model::operation::ConcatNode &) override;
+
+private:
+ neurun::model::Operands &_ctx; // TODO Refactor : Do not update Operands
+};
+
+} // namespace compiler
+} // namespace neurun
+
+#endif // __NEURUN_COMPILER_SUBTENSOR_ANALYZER_H__
diff --git a/runtimes/neurun/core/src/dumper/dot/DotBuilder.cc b/runtimes/neurun/core/src/dumper/dot/DotBuilder.cc
new file mode 100644
index 000000000..8563b4cf0
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/DotBuilder.cc
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DotBuilder.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+// DotDumper
+DotBuilder::DotBuilder() {}
+
+void DotBuilder::update(const Node &node_info)
+{
+ addNode(node_info);
+ for (auto edge : node_info.edges())
+ {
+ addEdge(node_info, *edge);
+ }
+}
+
+void DotBuilder::addSubgraph(const DotSubgraphInfo &subgraph_info)
+{
+ _dot << "subgraph cluster_" << subgraph_info.index().value() << " {\n";
+ _dot << " label=\"" << subgraph_info.label() << "\";\n";
+ _dot << " style=filled;\n";
+ _dot << " color=lightgrey;\n";
+ _dot << " ";
+ for (auto op : subgraph_info.operations())
+ {
+ _dot << "operation" << op.value() << "; ";
+ }
+ for (auto op : subgraph_info.operands())
+ {
+ _dot << "operand" << op.value() << "; ";
+ }
+ _dot << "\n";
+ _dot << "}\n";
+}
+
+void DotBuilder::writeDot(std::ostream &os)
+{
+ os << "digraph D {\n"
+ << _dot.str() << "\n"
+ << "}\n";
+}
+
+void DotBuilder::addNode(const Node &node)
+{
+ _dot << node.id();
+ std::stringstream ss;
+ _dot << "[";
+ for (auto attr : node.attributes())
+ {
+ _dot << attr.first << "=\"" << attr.second << "\" ";
+ }
+ _dot << "];\n";
+}
+
+void DotBuilder::addEdge(const Node &node1, const Node &node2)
+{
+ _dot << node1.id() << " -> " << node2.id() << ";\n";
+}
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/dumper/dot/DotBuilder.h b/runtimes/neurun/core/src/dumper/dot/DotBuilder.h
new file mode 100644
index 000000000..b78fd4469
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/DotBuilder.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_DUMPER_DOT_DOT_BUILDER_H__
+#define __NEURUN_DUMPER_DOT_DOT_BUILDER_H__
+
+#include <sstream>
+
+#include "model/Index.h"
+#include "model/Operation.h"
+#include "model/Operand.h"
+
+#include "OperationNode.h"
+#include "OperandNode.h"
+#include "DotSubgraphInfo.h"
+
+using Operation = neurun::model::Operation;
+using Object = neurun::model::Operand;
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+class DotBuilder
+{
+public:
+ DotBuilder();
+
+public:
+ void update(const Node &dotinfo);
+ void addSubgraph(const DotSubgraphInfo &subgraph_info);
+
+ void writeDot(std::ostream &os);
+
+private:
+ void addNode(const Node &dotinfo);
+ void addEdge(const Node &dotinfo1, const Node &dotinfo2);
+
+ std::stringstream _dot;
+};
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
+
+#endif // __NEURUN_DUMPER_DOT_DOT_BUILDER_H__
diff --git a/runtimes/neurun/core/src/dumper/dot/DotDumper.cc b/runtimes/neurun/core/src/dumper/dot/DotDumper.cc
new file mode 100644
index 000000000..d01b472c3
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/DotDumper.cc
@@ -0,0 +1,198 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <unordered_map>
+
+#include "DotDumper.h"
+#include "DotBuilder.h"
+#include "DotSubgraphInfo.h"
+#include "model/Subgraph.h"
+#include "model/OperationIndexMap.h"
+#include "backend/Backend.h"
+#include "backend/BackendManager.h"
+#include "backend/IConfig.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+using namespace neurun::graph;
+
+void DotDumper::dump(const std::string &tag)
+{
+ if (_level == Level::OFF)
+ {
+ return;
+ }
+
+ neurun::dumper::dot::DotBuilder dot_builder;
+
+ auto &operations = _graph.operations();
+ auto &operands = _graph.operands();
+
+ model::OperationIndexMap<std::unique_ptr<OperationNode>> operation_nodes;
+ std::unordered_map<model::OperandIndex, std::unique_ptr<OperandNode>> operand_nodes;
+
+ operations.iterate([&](const model::OperationIndex &index, const model::Operation &op) {
+ auto node = nnfw::cpp14::make_unique<OperationNode>(index, op);
+
+ for (auto output : op.getOutputs())
+ {
+ using neurun::dumper::dot::OperandNode;
+ auto child = std::make_shared<OperandNode>(output, OperandNode::Type::MODEL_OUTPUT);
+ node->addEdge(child);
+ }
+
+ operation_nodes.emplace(index, std::move(node));
+ });
+
+ auto backend_to_fillcolor = [](const backend::Backend *backend) {
+ static const auto map = []() {
+ std::unordered_map<const backend::Backend *, std::string> ret;
+ uint32_t index = 1; // Start from 1 to avoid 0(red) which is too dark :(
+ for (const auto backend : backend::BackendManager::instance().getAll())
+ {
+ ret.emplace(backend, Node::BG_COLORS[index]);
+ index = (index + 1) % (sizeof(Node::BG_COLORS) / sizeof(Node::BG_COLORS[0]));
+ }
+ return ret;
+ }();
+
+ auto itr = map.find(backend);
+ if (itr == map.end())
+ {
+ return Node::DEFAULT_FILLCOLOR;
+ }
+ else
+ {
+ return itr->second;
+ }
+ };
+
+ util::Set<model::OperandIndex> shown_operand_set;
+
+ operands.iterate([&](const model::OperandIndex &index, const model::Operand &object) {
+ bool showing_cond = false;
+ if (_level == Level::ALL)
+ {
+ showing_cond = true;
+ }
+ else
+ {
+ showing_cond = !object.isConstant();
+ }
+ if (object.isConstant() || _graph.getInputs().contains(index))
+ {
+ showing_cond = showing_cond && (object.getUses().size() > 0);
+ }
+ if (showing_cond)
+ {
+ shown_operand_set.add(index);
+
+ auto type = [&]() {
+ using neurun::dumper::dot::OperandNode;
+ if (_graph.getInputs().contains(index))
+ return OperandNode::Type::MODEL_INPUT;
+ if (_graph.getOutputs().contains(index))
+ return OperandNode::Type::MODEL_OUTPUT;
+ return OperandNode::Type::INTERNAL;
+ }();
+
+ auto lower_info = _graph.getLowerInfo(index);
+ auto node = nnfw::cpp14::make_unique<OperandNode>(index, type);
+
+ {
+ // Display LowerInfo attributes
+ std::string label = std::to_string(index.value());
+ std::string fillcolor = "";
+ if (lower_info)
+ {
+ const auto &def_factors = lower_info->def_factors();
+ label += "\\n[";
+ label += def_factors.getOnlyElement().backend()->config()->id();
+ label += "]";
+
+ fillcolor = backend_to_fillcolor(lower_info->def_factors().getOnlyElement().backend());
+ }
+ node->setAttribute("label", label);
+ node->setAttribute("fillcolor", fillcolor);
+ }
+
+ for (auto operation_index : object.getUses().list())
+ {
+ auto &operation = operations.at(operation_index);
+ auto child = std::make_shared<OperationNode>(operation_index, operation);
+ node->addEdge(child);
+ }
+
+ operand_nodes.emplace(index, std::move(node));
+ }
+ });
+
+ const auto subgraphs = _graph.subgraphs();
+ if (subgraphs)
+ {
+ subgraphs->iterate([&](const model::SubgraphIndex &index, const model::Subgraph &subgraph) {
+ const auto lower_info = _graph.getLowerInfo(index);
+ auto fillcolor = backend_to_fillcolor(lower_info->backend());
+ std::string label =
+ std::to_string(index.value()) + " [" + lower_info->backend()->config()->id() + "]";
+ DotSubgraphInfo subgraph_info{index, subgraph, shown_operand_set};
+ subgraph_info.label(label);
+ subgraph_info.fillcolor(fillcolor);
+ dot_builder.addSubgraph(subgraph_info);
+
+ // Set fillcolor of all operations in the subgraph
+ for (const auto &op : subgraph.operations())
+ {
+ auto found = operation_nodes.find(op.index);
+ if (found != operation_nodes.end())
+ {
+ auto &&op = found->second;
+ op->setAttribute("fillcolor", fillcolor);
+ }
+ }
+ });
+ }
+
+ for (const auto &e : operation_nodes)
+ dot_builder.update(*e.second);
+ for (const auto &e : operand_nodes)
+ dot_builder.update(*e.second);
+
+ // Dump to file
+ {
+ std::string file_name;
+ file_name += tag;
+ file_name += ".dot";
+ std::filebuf fb;
+
+ fb.open(file_name, std::ios::out);
+ std::ostream os(&fb);
+
+ dot_builder.writeDot(os);
+
+ fb.close();
+ }
+}
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/dumper/dot/DotDumper.h b/runtimes/neurun/core/src/dumper/dot/DotDumper.h
new file mode 100644
index 000000000..4ccaac882
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/DotDumper.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "graph/Graph.h"
+
+#ifndef __NEURUN_DUMPER_DOT_DOT_DUMPER_H__
+#define __NEURUN_DUMPER_DOT_DOT_DUMPER_H__
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+class DotDumper
+{
+public:
+ enum Level
+ {
+ OFF = 0, //< Do not dump
+ ALL_BUT_CONSTANTS = 1, //< Emit all operations and operands but constants
+ ALL = 2 //< Emit all operations and operands
+ };
+
+public:
+ DotDumper(const neurun::graph::Graph &graph, Level level) : _graph(graph), _level{level} {}
+
+public:
+ /**
+ * @brief Dump to dot file as tag name if "GRAPH_DOT_DUMP" is set
+ *
+ * @param[in] tag The name of dot file that would be created
+ * @return N/A
+ */
+ void dump(const std::string &tag);
+
+private:
+ const neurun::graph::Graph &_graph;
+ Level _level;
+};
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
+
+#endif // __NEURUN_DUMPER_DOT_DOT_DUMPER_H__
diff --git a/runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.cc b/runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.cc
new file mode 100644
index 000000000..1ea681bdb
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.cc
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DotSubgraphInfo.h"
+
+#include <sstream>
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+DotSubgraphInfo::DotSubgraphInfo(const model::SubgraphIndex &index, const model::Subgraph &subgraph,
+ const util::Set<model::OperandIndex> &shown_operands)
+ : _index{index}
+{
+ for (const auto &element : subgraph.operations())
+ {
+ _operations.insert(element.index);
+ for (auto o : element.node->getInputs())
+ {
+ // Must be a shown operand, not subgraph's inputs
+ if (shown_operands.contains(o) && !subgraph.getInputs().contains(o))
+ {
+ _operands.insert(o);
+ }
+ }
+ for (auto o : element.node->getOutputs())
+ {
+ // Must be a shown operand, not subgraph's inputs
+ if (shown_operands.contains(o) && !subgraph.getOutputs().contains(o))
+ {
+ _operands.insert(o);
+ }
+ }
+ }
+}
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.h b/runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.h
new file mode 100644
index 000000000..771c5552e
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/DotSubgraphInfo.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_CORE_DUMPER_DOT_DOT_SUBGRAPH_INFO_H__
+#define __NEURUN_CORE_DUMPER_DOT_DOT_SUBGRAPH_INFO_H__
+
+#include <unordered_set>
+
+#include "model/Index.h"
+#include "model/Subgraph.h"
+#include "util/Set.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+class DotSubgraphInfo
+{
+public:
+ DotSubgraphInfo(const model::SubgraphIndex &index, const model::Subgraph &subgraph,
+ const util::Set<model::OperandIndex> &shown_operands);
+
+ model::SubgraphIndex index() const { return _index; }
+ std::string label() const { return _label; }
+ void label(const std::string &val) { _label = val; }
+ std::string fillcolor() const { return _fillcolor; }
+ void fillcolor(const std::string &val) { _fillcolor = val; }
+ const std::unordered_set<model::OperationIndex> &operations() const { return _operations; }
+ const std::unordered_set<model::OperandIndex> &operands() const { return _operands; }
+
+private:
+ model::SubgraphIndex _index;
+ std::string _label;
+ std::string _fillcolor;
+ std::unordered_set<model::OperationIndex> _operations;
+ std::unordered_set<model::OperandIndex> _operands;
+};
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
+
+#endif // __NEURUN_CORE_DUMPER_DOT_DOT_SUBGRAPH_INFO_H__
diff --git a/runtimes/neurun/core/src/dumper/dot/Node.cc b/runtimes/neurun/core/src/dumper/dot/Node.cc
new file mode 100644
index 000000000..166f0f40f
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/Node.cc
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Node.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+const std::string Node::DEFAULT_COLORSCHEME = "x11";
+const std::string Node::DEFAULT_FILLCOLOR = "white";
+// RED, BLUE, GREEN, PURPLE, ORANGE, YELLOW, BROWN, PINK
+const std::string Node::BG_COLORS[8] = {"1", "2", "3", "4", "5", "6", "7", "8"};
+
+Node::Node(const std::string &id) : _id{id}
+{
+ // Set default values
+ _attributes["style"] = "filled";
+ _attributes["colorscheme"] = DEFAULT_COLORSCHEME;
+ _attributes["fillcolor"] = DEFAULT_FILLCOLOR;
+}
+
+void Node::setAttribute(const std::string &key, const std::string &val) { _attributes[key] = val; }
+
+std::string Node::getAttribute(const std::string &key)
+{
+ auto itr = _attributes.find(key);
+ if (itr == _attributes.end())
+ {
+ return "";
+ }
+ else
+ {
+ return itr->second;
+ }
+}
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/dumper/dot/Node.h b/runtimes/neurun/core/src/dumper/dot/Node.h
new file mode 100644
index 000000000..364cb08a4
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/Node.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Node.h
+ * @brief This file contains Node class
+ * @ingroup COM_AI_RUNTIME
+ *
+ */
+
+#ifndef __NEURUN_DUMPER_DOT_NODE_H__
+#define __NEURUN_DUMPER_DOT_NODE_H__
+
+#include <string>
+#include <memory>
+#include <vector>
+#include <unordered_map>
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+enum BGCOLORS : int
+{
+ RED,
+ BLUE,
+ GREEN,
+ PUPLE,
+ ORANGE,
+ YELLOW,
+ BROWN,
+ PINK
+};
+
+/**
+ * @brief Class that represents a Node in "dot" format
+ *
+*/
+class Node
+{
+public:
+ const static std::string DEFAULT_FILLCOLOR;
+ const static std::string DEFAULT_COLORSCHEME;
+ const static std::string BG_COLORS[8];
+
+public:
+ /**
+ * @brief Destroy the Node object
+ *
+ */
+ virtual ~Node() = default;
+
+ /**
+ * @brief Construct a new Node object
+ *
+ * @param id
+ */
+ Node(const std::string &id);
+
+ /**
+ * @brief return id
+ *
+ * @return id
+ */
+ std::string id() const { return _id; }
+
+ /**
+ * @brief return attributes
+ *
+ * @return const reference of attributes object
+ */
+ const std::unordered_map<std::string, std::string> &attributes() const { return _attributes; }
+ /**
+ * @brief Store an attribute with key-value pair
+ *
+ * @param[in] key attribute's key
+ * @param[in] val attribute's value that is associated with the key
+ */
+ void setAttribute(const std::string &key, const std::string &val);
+ /**
+ * @brief Get the attributte value that is associated with key
+ *
+ * @param[in] key key of the attribute
+ * @return value that is associated with the key
+ */
+ std::string getAttribute(const std::string &key);
+
+ /**
+ * @brief Add an edge in the graph, which is an outgoing edge
+ *
+ * @param[in] dotinfo A node that the new edge will be connected to
+ */
+ void addEdge(std::shared_ptr<Node> dotinfo) { _children.emplace_back(dotinfo); }
+ /**
+ * @brief Return list of edges
+ *
+ * @return Edges
+ */
+ const std::vector<std::shared_ptr<Node>> &edges() const { return _children; }
+
+private:
+ std::string _id;
+ std::unordered_map<std::string, std::string> _attributes;
+ std::vector<std::shared_ptr<Node>> _children;
+};
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
+
+#endif // __NEURUN_DUMPER_DOT_NODE_H__
diff --git a/runtimes/neurun/core/src/dumper/dot/OperandNode.cc b/runtimes/neurun/core/src/dumper/dot/OperandNode.cc
new file mode 100644
index 000000000..338dfc4b6
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/OperandNode.cc
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sstream>
+
+#include "OperandNode.h"
+#include "graph/Graph.h"
+#include "graph/operand/LowerInfo.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+const std::string OperandNode::INPUT_SHAPE = "doublecircle";
+const std::string OperandNode::OUTPUT_SHAPE = "doublecircle";
+const std::string OperandNode::OPERAND_SHAPE = "ellipse";
+const std::string OperandNode::BG_COLOR_SCHEME = "set18";
+
+OperandNode::OperandNode(const neurun::model::OperandIndex &index, Type type)
+ : Node{"operand" + std::to_string(index.value())}
+{
+ {
+ auto type_to_shape = [](Type type) {
+ switch (type)
+ {
+ case Type::MODEL_INPUT:
+ return INPUT_SHAPE;
+ case Type::MODEL_OUTPUT:
+ return OUTPUT_SHAPE;
+ case Type::UNDEFINED:
+ case Type::INTERNAL:
+ default:
+ return OPERAND_SHAPE;
+ }
+ };
+ setAttribute("shape", type_to_shape(type));
+ }
+
+ setAttribute("colorscheme", BG_COLOR_SCHEME);
+}
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/dumper/dot/OperandNode.h b/runtimes/neurun/core/src/dumper/dot/OperandNode.h
new file mode 100644
index 000000000..40f715eac
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/OperandNode.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file OperandNode.h
+ * @brief This file contains OperandNode
+ * @ingroup COM_AI_RUNTIME
+ *
+ */
+
+#ifndef __NEURUN_DUMPER_DOT_DOT_OPERAND_INFO_H__
+#define __NEURUN_DUMPER_DOT_DOT_OPERAND_INFO_H__
+
+#include <vector>
+
+#include "Node.h"
+#include "model/Operand.h"
+#include "model/Index.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+/**
+ * @brief Class that represents an Operand
+ *
+ */
+class OperandNode : public Node
+{
+public:
+ enum class Type
+ {
+ UNDEFINED,
+ MODEL_INPUT,
+ MODEL_OUTPUT,
+ INTERNAL
+ };
+
+public:
+ static const std::string INPUT_SHAPE;
+ static const std::string OUTPUT_SHAPE;
+ static const std::string OPERAND_SHAPE;
+ static const std::string BG_COLOR_SCHEME;
+
+public:
+ /**
+ * @brief Construct a new Operand Node object
+ *
+ * @param[in] index Operand index
+ * @param[in] type Operand type
+ * @param[in] lower_info Operand LowerInfo
+ */
+ OperandNode(const neurun::model::OperandIndex &index, Type type);
+
+private:
+ void addBackendLabel();
+};
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
+
+#endif // __NEURUN_DUMPER_DOT_DOT_OPERAND_INFO_H__
diff --git a/runtimes/neurun/core/src/dumper/dot/OperationNode.cc b/runtimes/neurun/core/src/dumper/dot/OperationNode.cc
new file mode 100644
index 000000000..040241daa
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/OperationNode.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sstream>
+
+#include "OperationNode.h"
+#include "graph/Graph.h"
+#include "graph/operation/LowerInfo.h"
+#include "backend/IConfig.h"
+#include "backend/Backend.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+const std::string OperationNode::OPERATION_SHAPE = "rect";
+const std::string OperationNode::BG_COLOR_SCHEME = "pastel18";
+
+OperationNode::OperationNode(const neurun::model::OperationIndex &index,
+ const neurun::model::Operation &node)
+ : Node{"operation" + std::to_string(index.value())}
+{
+ setAttribute("label", std::to_string(index.value()) + " : " + node.getName());
+ setAttribute("shape", OPERATION_SHAPE);
+ setAttribute("colorscheme", BG_COLOR_SCHEME);
+ setAttribute("fillcolor", DEFAULT_FILLCOLOR);
+}
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/dumper/dot/OperationNode.h b/runtimes/neurun/core/src/dumper/dot/OperationNode.h
new file mode 100644
index 000000000..6b8dede2d
--- /dev/null
+++ b/runtimes/neurun/core/src/dumper/dot/OperationNode.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file OperationNode.h
+ * @brief This file contains OperationNode
+ * @ingroup COM_AI_RUNTIME
+ *
+ */
+
+#ifndef __NEURUN_DUMPER_DOT_DOT_NODE_INFO_H__
+#define __NEURUN_DUMPER_DOT_DOT_NODE_INFO_H__
+
+#include "Node.h"
+#include "model/Operation.h"
+#include "model/Index.h"
+
+namespace neurun
+{
+namespace dumper
+{
+namespace dot
+{
+
+/**
+ * @brief Class that represents an Operation
+ *
+ */
+class OperationNode : public Node
+{
+public:
+ static const std::string OPERATION_SHAPE;
+ static const std::string BG_COLOR_SCHEME;
+
+public:
+ /**
+ * @brief Construct a new Operation Node object
+ *
+ * @param[in] index operation index
+ * @param[in] node operation object
+ */
+ OperationNode(const neurun::model::OperationIndex &index, const neurun::model::Operation &node);
+};
+
+} // namespace dot
+} // namespace dumper
+} // namespace neurun
+
+#endif // __NEURUN_DUMPER_DOT_DOT_NODE_INFO_H__
diff --git a/runtimes/neurun/core/src/exec/DataflowExecutor.cc b/runtimes/neurun/core/src/exec/DataflowExecutor.cc
new file mode 100644
index 000000000..75d616131
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/DataflowExecutor.cc
@@ -0,0 +1,206 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DataflowExecutor.h"
+
+#include <cassert>
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+int64_t DataflowExecutor::calculateRank(const std::vector<model::Element> &operations)
+{
+ int64_t rank = 0;
+ if (!_indexed_ranks)
+ {
+ return rank;
+ }
+ for (const auto &element : operations)
+ {
+ auto it = _indexed_ranks->find(element.index);
+ if (it == _indexed_ranks->end())
+ {
+ assert(element.node->getName() == "Permute");
+ // assign int32_t::max to prevent integer overflow
+ rank += std::numeric_limits<int32_t>::max();
+ }
+ else
+ {
+ rank += it->second;
+ }
+ }
+ return rank;
+}
+
+void DataflowExecutor::emplaceToReadyJobs(const uint32_t &id)
+{
+ auto &job = _waiting_jobs[id];
+ assert(job != nullptr);
+ auto &subg = _subgraphs->at(_job_to_subgraph[job->index()]);
+ auto rank = calculateRank(subg.operations());
+ _ready_jobs.emplace(rank, std::move(job));
+}
+
+void DataflowExecutor::notify(uint32_t finished_job_id)
+{
+ for (auto id : _output_info[finished_job_id])
+ {
+ assert(_input_info[id] > 0);
+ auto count = --_input_info[id];
+ if (count == 0) // No dependent jobs left, ready for execution
+ {
+ emplaceToReadyJobs(id);
+ }
+ }
+}
+bool DataflowExecutor::noWaitingJobs()
+{
+ return std::all_of(_waiting_jobs.begin(), _waiting_jobs.end(),
+ [](const std::unique_ptr<Job> &job) { return job == nullptr; });
+}
+
+DataflowExecutor::DataflowExecutor(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs,
+ CodeMap &&code_map)
+ : ExecutorBase{model, std::move(subgraphs), operand_context, std::move(lower_info),
+ std::move(tensor_mgrs)},
+ _code_map{std::move(code_map)}
+{
+ VERBOSE(DataflowExecutor) << "Constructing Dataflow Executor" << std::endl;
+
+ assert(_subgraphs);
+ // Assign jobs convert SubgraphIndex to job index(uint32_t)
+ uint32_t next_job_index = 0;
+ std::unordered_map<model::SubgraphIndex, uint32_t> subgraph_to_job;
+ _subgraphs->iterate([&](const model::SubgraphIndex &subg_index, const model::Subgraph &) {
+ VERBOSE(DataflowExecutor) << "Create a job #" << next_job_index << " with SubgraphIndex "
+ << subg_index.value() << std::endl;
+ _finished_jobs.emplace_back(
+ nnfw::cpp14::make_unique<Job>(next_job_index, _code_map.at(subg_index).get(),
+ _lower_info->operation.at(subg_index)->backend()));
+ subgraph_to_job[subg_index] = next_job_index++;
+ });
+
+ _waiting_jobs.resize(next_job_index);
+ _output_info.resize(next_job_index);
+ _initial_input_info.resize(next_job_index, 0);
+
+ _subgraphs->iterate([&](const model::SubgraphIndex &subg_index, const model::Subgraph &subg) {
+ auto job_index = subgraph_to_job[subg_index];
+ for (auto output : subg.getOutputs())
+ {
+ // Update output and input info
+ _subgraphs->iterate(
+ [&](const model::SubgraphIndex &subg_cur_index, const model::Subgraph &subg_cur) {
+ if (subg_cur.getInputs().contains(output))
+ {
+ auto dep_index = subgraph_to_job[subg_cur_index];
+ ++_initial_input_info[dep_index];
+ _output_info[job_index].push_back(dep_index);
+ }
+ });
+ }
+ });
+ for (const auto &s : subgraph_to_job)
+ _job_to_subgraph.emplace(s.second, s.first);
+
+ _input_info = _initial_input_info;
+}
+
+void DataflowExecutor::executeImpl()
+{
+ assert(noWaitingJobs());
+
+ // Execution setup
+ _waiting_jobs.swap(_finished_jobs); // Move finished jobs to waiting jobs
+
+ for (uint32_t i = 0; i < _waiting_jobs.size(); ++i)
+ {
+ if (_input_info[i] == 0)
+ {
+ emplaceToReadyJobs(i);
+ }
+ }
+ assert(!_ready_jobs.empty()); // Cannot begin if there is no initial jobs
+ bool is_profiling = util::getConfigBool(util::config::PROFILING_MODE);
+ // TODO Fix indentation
+ {
+ // Notifiy Execution Begin
+ for (auto &o : _observers)
+ {
+ o->handleBegin(this);
+ }
+ }
+
+ while (!_ready_jobs.empty())
+ {
+ auto job = std::move((_ready_jobs.begin())->second);
+ _ready_jobs.erase(_ready_jobs.begin());
+ auto job_index = job->index();
+ VERBOSE(DataflowExecutor) << "Run job #" << job_index << std::endl;
+ notifyJobBegin(job_index);
+ if (is_profiling)
+ job->fn()->runSync();
+ else
+ job->run();
+ notifyJobEnd(job_index);
+ notify(job_index);
+ _finished_jobs[job_index] = std::move(job);
+ }
+ assert(noWaitingJobs());
+
+ for (auto &o : _observers)
+ {
+ o->handleEnd(this);
+ }
+
+ // Reset input info for the next execution
+ _input_info = _initial_input_info;
+}
+
+void DataflowExecutor::notifyJobBegin(uint32_t job_index)
+{
+ auto subgraph_index = _job_to_subgraph[job_index];
+ // Workaround - assumes only one operation
+ auto node = _subgraphs->at(subgraph_index).operations().at(0).node;
+ const backend::Backend *backend = _lower_info->operation.at(subgraph_index)->backend();
+ for (auto &o : _observers)
+ {
+ o->handleBegin(this, node, backend);
+ }
+}
+
+void DataflowExecutor::notifyJobEnd(uint32_t job_index)
+{
+ auto subgraph_index = _job_to_subgraph[job_index];
+ // Workaround - assumes only one operation
+ auto node = _subgraphs->at(subgraph_index).operations().at(0).node;
+ const backend::Backend *backend = _lower_info->operation.at(subgraph_index)->backend();
+ for (auto &o : _observers)
+ {
+ o->handleEnd(this, node, backend);
+ }
+}
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/DataflowExecutor.h b/runtimes/neurun/core/src/exec/DataflowExecutor.h
new file mode 100644
index 000000000..935f9976d
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/DataflowExecutor.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_DATAFLOW_EXECUTOR_H__
+#define __NEURUN_EXEC_DATAFLOW_EXECUTOR_H__
+
+#include <list>
+#include <map>
+#include <unordered_map>
+
+#include "FunctionSequence.h"
+#include "Job.h"
+#include "model/OperandIndexSequence.h"
+#include "model/Index.h"
+#include "model/Model.h"
+#include "cpp14/memory.h"
+#include "exec/ExecutorBase.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+class DataflowExecutor : public ExecutorBase
+{
+public:
+ using CodeMap = std::unordered_map<model::SubgraphIndex, std::unique_ptr<FunctionSequence>>;
+
+protected:
+ virtual void notify(uint32_t finished_job_id);
+ bool noWaitingJobs();
+
+public:
+ /**
+ * @brief Constructs a DataflowExecutor object
+ *
+ * @param model Model object
+ * @param operand_context (Only for input/output operand data access)
+ * @param lower_info LowerInfo object (Only to know input/output operands layout)
+ * @param code_map Compiled code map
+ * @param ranks Operation ranks for ordering execution
+ */
+ DataflowExecutor(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs, CodeMap &&code_map);
+
+ void executeImpl() override;
+
+ void notifyJobEnd(uint32_t job_index);
+ void notifyJobBegin(uint32_t job_index);
+
+ void addObserver(std::unique_ptr<IExecutionObserver> ref)
+ {
+ _observers.emplace_back(std::move(ref));
+ };
+ void removeObserver(std::unique_ptr<IExecutionObserver> ref) { _observers.remove(ref); };
+
+protected:
+ int64_t calculateRank(const std::vector<model::Element> &operations);
+ void emplaceToReadyJobs(const uint32_t &id);
+
+protected:
+ CodeMap _code_map;
+ /**
+ * @brief A vector of finished jobs for current execution
+ * After a run it has all the jobs of this execution for the next run
+ */
+ std::vector<std::unique_ptr<Job>> _finished_jobs;
+ /**
+ * @brief A vector of waiting jobs for current execution
+ * All the jobs are moved from #_finished_jobs to it when start a run
+ */
+ std::vector<std::unique_ptr<Job>> _waiting_jobs;
+ /**
+ * @brief Jobs' output info
+ * Used for notifying after finishing a job
+ */
+ std::vector<std::list<uint32_t>> _output_info;
+ std::vector<uint32_t> _initial_input_info;
+ std::vector<uint32_t> _input_info;
+ /**
+ * @brief A collection of jobs that are ready for execution
+ * Jobs in it are ready to be scheduled.
+ * Ordered by priority from `_indexed_ranks`
+ */
+ std::multimap<int64_t, std::unique_ptr<Job>, std::greater<int64_t>> _ready_jobs;
+
+ /// @brief Which job runs which op and function.
+ std::unordered_map<uint32_t, model::SubgraphIndex> _job_to_subgraph;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_DATAFLOW_EXECUTOR_H__
diff --git a/runtimes/neurun/core/src/exec/Execution.cc b/runtimes/neurun/core/src/exec/Execution.cc
new file mode 100644
index 000000000..01114d8c8
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/Execution.cc
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "exec/Execution.h"
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+Execution::Execution(const std::shared_ptr<IExecutor> &executor) : _executor{executor}
+{
+ _io_desc.inputs.resize(_executor->model().inputs.size());
+ _io_desc.outputs.resize(_executor->model().outputs.size());
+}
+
+void Execution::setInput(const model::IOIndex &index, const void *buffer, size_t length)
+{
+ const auto input_index = model().inputs.at(index);
+ const auto info = model().operands.at(input_index).info();
+
+ if (length < info.total_size())
+ {
+ throw std::runtime_error{"Too small length"};
+ }
+
+ _io_desc.inputs.at(index.value()) = nnfw::cpp14::make_unique<InputDesc>(info, buffer, length);
+}
+
+void Execution::setInput(const model::IOIndex &index, const model::TypeInfo &type,
+ const model::Shape &shape, const void *buffer, size_t length)
+{
+ const model::OperandInfo info{shape, type};
+
+ if (length < info.total_size())
+ {
+ throw std::runtime_error{"Too small length"};
+ }
+
+ _io_desc.inputs.at(index.value()) = nnfw::cpp14::make_unique<InputDesc>(info, buffer, length);
+}
+
+void Execution::setOutput(const model::IOIndex &index, void *buffer, size_t length)
+{
+ const auto output_index = model().outputs.at(index);
+ const auto info = model().operands.at(output_index).info();
+
+ if (length < info.total_size())
+ {
+ throw std::runtime_error{"Too small length"};
+ }
+
+ _io_desc.outputs.at(index.value()) = nnfw::cpp14::make_unique<OutputDesc>(info, buffer, length);
+}
+
+void Execution::setOutput(const model::IOIndex &index, const model::TypeInfo &type,
+ const model::Shape &shape, void *buffer, size_t length)
+{
+ const model::OperandInfo info{shape, type};
+
+ if (length < info.total_size())
+ {
+ throw std::runtime_error{"Too small length"};
+ }
+
+ _io_desc.outputs.at(index.value()) = nnfw::cpp14::make_unique<OutputDesc>(info, buffer, length);
+}
+
+void Execution::execute()
+{
+ VERBOSE(Execution) << "Start execution" << std::endl;
+
+ _executor->execute(_io_desc);
+
+ VERBOSE(Execution) << "Execution finished" << std::endl;
+}
+
+void Execution::startExecute()
+{
+ VERBOSE(Execution) << "Create asynchronous execution thread" << std::endl;
+
+ _exec_thread = nnfw::cpp14::make_unique<std::thread>(&Execution::execute, this);
+}
+
+void Execution::waitFinish()
+{
+ VERBOSE(Execution) << "Wait to finish execution" << std::endl;
+
+ _exec_thread->join();
+}
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/ExecutionObservers.cc b/runtimes/neurun/core/src/exec/ExecutionObservers.cc
new file mode 100644
index 000000000..e6561fe5c
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ExecutionObservers.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "exec/ExecutionObservers.h"
+#include "util/logging.h"
+#include "model/operation/PermuteNode.h"
+#include "exec/IExecutor.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+
+namespace exec
+{
+
+void ProfileObserver::handleBegin(neurun::exec::IExecutor *, const neurun::model::Operation *,
+ const neurun::backend::Backend *backend)
+{
+ _timer = backend->config()->timer();
+ if (_timer == nullptr)
+ throw std::runtime_error("To profile backend timer() method must be implemented");
+ _timer->handleBegin();
+}
+
+void ProfileObserver::handleEnd(IExecutor *exec, const model::Operation *node,
+ const backend::Backend *backend)
+{
+ _timer->handleEnd();
+ const auto timer_res = _timer->getTime();
+
+ auto node_name = node->getName();
+ VERBOSE(ProfileInfo) << "Time for " << node_name << " : " << timer_res << std::endl;
+
+ // fill ExecTime:
+ bool is_quantized = exec->model().operands.at(node->getInputs().at(0)).typeInfo().type() ==
+ model::DataType::QUANT8_ASYMM;
+
+ uint32_t size = 0;
+ for (const auto &input : node->getInputs())
+ {
+ size += exec->model().operands.at(input).info().total_size();
+ }
+ for (const auto &output : node->getOutputs())
+ {
+ size += exec->model().operands.at(output).info().total_size();
+ }
+ if (node_name == "Permute")
+ {
+ auto *permute_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::PermuteNode *>(node);
+ assert(permute_node != nullptr);
+ _et->updatePermuteTime(permute_node->param().input_backend_ctx->backend,
+ permute_node->param().output_backend_ctx->backend, is_quantized, size,
+ timer_res);
+ }
+ else
+ {
+ _et->updateOperationExecTime(backend, node_name, is_quantized, size, timer_res);
+ }
+};
+
+} // namespace exec
+
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/ExecutorBase.cc b/runtimes/neurun/core/src/exec/ExecutorBase.cc
new file mode 100644
index 000000000..827d4dc8b
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ExecutorBase.cc
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ExecutorBase.h"
+#include "util/logging.h"
+namespace neurun
+{
+namespace exec
+{
+
+ExecutorBase::ExecutorBase(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs)
+ : _observers(), _model{model}, _subgraphs{std::move(subgraphs)},
+ _operand_context{operand_context}, _lower_info{std::move(lower_info)},
+ _tensor_mgrs{std::move(tensor_mgrs)}, _mutex()
+{
+ // DO NOTHING
+}
+
+std::unique_ptr<ISource> ExecutorBase::source(const model::IOIndex &index,
+ const model::TypeInfo &type, const void *buffer,
+ size_t length)
+{
+ using ::neurun::model::DataType;
+ switch (type.type())
+ {
+ case DataType::FLOAT32:
+ return source<float>(index, buffer, length);
+ case DataType::INT32:
+ return source<int32_t>(index, buffer, length);
+ case DataType::UINT32:
+ return source<uint32_t>(index, buffer, length);
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ return source<uint8_t>(index, buffer, length);
+ default:
+ throw std::runtime_error("Not supported yet");
+ }
+}
+
+std::unique_ptr<ISink> ExecutorBase::sink(const model::IOIndex &index, const model::TypeInfo &type,
+ void *buffer, size_t length)
+{
+ using ::neurun::model::DataType;
+ switch (type.type())
+ {
+ case DataType::FLOAT32:
+ return sink<float>(index, buffer, length);
+ case DataType::INT32:
+ return sink<int32_t>(index, buffer, length);
+ case DataType::UINT32:
+ return sink<uint32_t>(index, buffer, length);
+ case DataType::BOOL8:
+ case DataType::QUANT8_ASYMM:
+ return sink<uint8_t>(index, buffer, length);
+ default:
+ throw std::runtime_error("Not supported yet");
+ }
+}
+
+void ExecutorBase::execute(const IODescription &desc)
+{
+ // For thread-safe, use mutex
+ // TODO: if all used backends on this executor are thread-safe,
+ // do not need to use mutex (otherwise, use mutex)
+ std::lock_guard<std::mutex> lock(_mutex);
+
+ std::vector<std::unique_ptr<ISource>> sources{_model->inputs.size()};
+ std::vector<std::unique_ptr<ISink>> sinks{_model->outputs.size()};
+
+ // Set input(s)
+ for (uint32_t n = 0; n < _model->inputs.size(); ++n)
+ {
+ model::IOIndex input_index{n};
+ model::OperandIndex index{_model->inputs.at(input_index)};
+
+ if (desc.inputs.at(n) == nullptr)
+ {
+ // Optional input
+ continue;
+ }
+
+ const auto operand_li = _lower_info->operand.at(index).get();
+ if (operand_li->def_factors().empty())
+ {
+ // This input is not used (i.e. constant, EX. reshape's axis)
+ continue;
+ }
+
+ const auto &input = *desc.inputs.at(n);
+ sources.at(n) = source(input_index, input.info.typeInfo(), input.buffer, input.size);
+
+ auto setter = [&](::neurun::backend::operand::ITensor &tensor) { sources.at(n)->push(tensor); };
+
+ auto object = _operand_context->at(index);
+
+ object->access(setter);
+ }
+
+ executeImpl();
+
+ // Get output(s)
+ for (uint32_t n = 0; n < _model->outputs.size(); ++n)
+ {
+ neurun::model::IOIndex output_index{n};
+ // Optional output
+ if (desc.outputs.at(n) == nullptr)
+ {
+ continue;
+ }
+ const auto &output = *desc.outputs.at(n);
+ sinks.at(n) = sink(output_index, output.info.typeInfo(), output.buffer, output.size);
+
+ auto getter = [&](::neurun::backend::operand::ITensor &tensor) { sinks.at(n)->pull(tensor); };
+
+ ::neurun::model::OperandIndex index{_model->outputs.at(output_index)};
+ auto object = _operand_context->at(index);
+
+ object->access(getter);
+ }
+}
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/ExecutorBase.h b/runtimes/neurun/core/src/exec/ExecutorBase.h
new file mode 100644
index 000000000..c283e7f61
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ExecutorBase.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_EXECUTOR_BASE_H__
+#define __NEURUN_EXEC_EXECUTOR_BASE_H__
+
+#include <mutex>
+
+#include "Source.h"
+#include "exec/ExecutionObservers.h"
+#include "Sink.h"
+#include "exec/IExecutor.h"
+#include "model/Model.h"
+#include "graph/LowerInfoMap.h"
+#include "backend/IConfig.h"
+#include "model/OperandInfo.h"
+#include "backend/Backend.h"
+#include "compiler/OperandContext.h"
+#include "model/Subgraphs.h"
+#include "model/Subgraph.h"
+#include "backend/ExecTime.h"
+#include "exec/IFunction.h"
+#include "backend/ITensorManager.h"
+#include <list>
+
+namespace neurun
+{
+namespace exec
+{
+
+class ExecutorBase : public IExecutor
+{
+public:
+ ExecutorBase(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs);
+
+ virtual ~ExecutorBase() = default;
+
+ const model::Model &model() override { return *_model; }
+
+ void execute(const IODescription &desc) final;
+
+ // Used only in Dataflow and Parallel Executors
+ void setIndexedRanks(std::shared_ptr<model::OperationIndexMap<int64_t>> ranks) final
+ {
+ _indexed_ranks = std::move(ranks);
+ };
+
+ virtual void executeImpl(void) = 0;
+
+private:
+ std::unique_ptr<ISource> source(const model::IOIndex &index, const model::TypeInfo &type,
+ const void *buffer, size_t length);
+ std::unique_ptr<ISink> sink(const model::IOIndex &index, const model::TypeInfo &type,
+ void *buffer, size_t length);
+
+ template <typename T>
+ std::unique_ptr<ISource> source(const model::IOIndex &index, const void *buffer, size_t length)
+ {
+ const auto operand_index = _model->inputs.at(index);
+ const auto &operand = _model->operands.at(operand_index);
+
+ const auto tensor = _operand_context->at(operand_index)->ptr();
+ const auto output_layout = tensor->layout();
+ // TODO Set input_layout as frontend model's input layout
+ auto input_layout = model::Layout::NHWC;
+ if ((input_layout == model::Layout::NHWC) && (output_layout == model::Layout::NCHW))
+ {
+ return nnfw::cpp14::make_unique<PermutateSource<T>>(buffer, length, operand.shape());
+ }
+ // TODO Supports NCHW -> NHWC
+
+ return nnfw::cpp14::make_unique<CopySource<T>>(buffer, length, operand.shape());
+ }
+
+ template <typename T>
+ std::unique_ptr<ISink> sink(const model::IOIndex &index, void *buffer, size_t length)
+ {
+ const auto operand_index = _model->outputs.at(index);
+ const auto &operand = _model->operands.at(operand_index);
+ const auto tensor = _operand_context->at(operand_index)->ptr();
+ const auto input_layout = tensor->layout();
+ // TODO Set output_layout as frontend model's output layout
+ auto output_layout = model::Layout::NHWC;
+ if ((input_layout == model::Layout::NCHW) && (output_layout == model::Layout::NHWC))
+ {
+ return nnfw::cpp14::make_unique<PermutateSink<T>>(buffer, length, operand.shape());
+ }
+ // TODO Supports NHWC -> NCHW
+
+ return nnfw::cpp14::make_unique<CopySink<T>>(buffer, length, operand.shape());
+ }
+
+protected:
+ std::list<std::unique_ptr<IExecutionObserver>> _observers;
+ std::shared_ptr<model::OperationIndexMap<int64_t>> _indexed_ranks;
+ std::shared_ptr<const model::Model> _model;
+ std::unique_ptr<model::Subgraphs> _subgraphs;
+ std::shared_ptr<compiler::OperandContext> _operand_context;
+ std::unique_ptr<graph::LowerInfoMap> _lower_info;
+ std::unique_ptr<backend::TensorManagerSet> _tensor_mgrs;
+ std::mutex _mutex;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_EXECUTOR_BASE_H__
diff --git a/runtimes/neurun/core/src/exec/FunctionSequence.cc b/runtimes/neurun/core/src/exec/FunctionSequence.cc
new file mode 100644
index 000000000..00214fcfa
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/FunctionSequence.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FunctionSequence.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+void FunctionSequence::run()
+{
+ for (const auto &function : _functions)
+ {
+ function->run();
+ }
+}
+
+void FunctionSequence::runSync()
+{
+ for (const auto &function : _functions)
+ {
+ function->runSync();
+ }
+}
+
+void FunctionSequence::prepare()
+{
+ for (const auto &function : _functions)
+ {
+ function->prepare();
+ }
+}
+
+void FunctionSequence::append(std::unique_ptr<IFunction> &&function)
+{
+ _functions.push_back(std::move(function));
+}
+
+void FunctionSequence::iterate(const std::function<void(IFunction &)> &fn)
+{
+ for (const auto &func : _functions)
+ {
+ fn(*func);
+ }
+}
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/FunctionSequence.h b/runtimes/neurun/core/src/exec/FunctionSequence.h
new file mode 100644
index 000000000..2ba5c0b08
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/FunctionSequence.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_FUNCTION_SEQUENCE_H__
+#define __NEURUN_EXEC_FUNCTION_SEQUENCE_H__
+
+#include <memory>
+#include <vector>
+#include <functional>
+
+#include "exec/IFunction.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+class FunctionSequence : public IFunction
+{
+public:
+ virtual ~FunctionSequence() = default;
+
+ void run() override;
+ void runSync() override;
+ void prepare() override;
+
+ /**
+ * @brief Appends an IFunction object to the function sequence
+ *
+ * @param function IFunction object to be appended
+ */
+ void append(std::unique_ptr<IFunction> &&function);
+
+ void iterate(const std::function<void(IFunction &)> &fn);
+
+private:
+ std::vector<std::unique_ptr<IFunction>> _functions;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_FUNCTION_SEQUENCE_H__
diff --git a/runtimes/neurun/core/src/exec/Job.cc b/runtimes/neurun/core/src/exec/Job.cc
new file mode 100644
index 000000000..6ce3a84f9
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/Job.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Job.h"
+
+#include <cassert>
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+Job::Job(uint32_t index, IFunction *fn, const backend::Backend *backend)
+ : _index{index}, _fn{fn}, _backend{backend}
+{
+}
+
+void Job::run() { _fn->run(); }
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/Job.h b/runtimes/neurun/core/src/exec/Job.h
new file mode 100644
index 000000000..108f39e99
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/Job.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_JOB_H__
+#define __NEURUN_EXEC_JOB_H__
+
+#include <unordered_set>
+
+#include "exec/IFunction.h"
+#include "model/Index.h"
+#include "model/OperandIndexSequence.h"
+#include "backend/Backend.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+class Job
+{
+public:
+ /**
+ * @brief Constructs a Job object
+ *
+ * @param index Operation index for this job
+ * @param fn compiled code to run this job
+ * @param inputs Input operand list
+ * @param outputs Output operand list
+ */
+ Job(uint32_t index, IFunction *fn, const backend::Backend *backend);
+ /**
+ * @brief Execute the compiled code
+ */
+ void run();
+ /**
+ * @brief Return job index
+ *
+ * @return Job index
+ */
+ uint32_t index() const { return _index; }
+ /**
+ * @brief Return the function to be executed
+ *
+ * @return Pointer of the function
+ */
+ IFunction *fn() { return _fn; }
+
+ /**
+ * @brief Return the backend
+ *
+ * @return Backend
+ */
+ const backend::Backend *backend() { return _backend; }
+
+private:
+ uint32_t _index;
+ IFunction *_fn;
+ const backend::Backend *_backend;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_JOB_H__
diff --git a/runtimes/neurun/core/src/exec/LinearExecutor.cc b/runtimes/neurun/core/src/exec/LinearExecutor.cc
new file mode 100644
index 000000000..35197a257
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/LinearExecutor.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "LinearExecutor.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+void LinearExecutor::executeImpl() { _fn_seq->run(); }
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/LinearExecutor.h b/runtimes/neurun/core/src/exec/LinearExecutor.h
new file mode 100644
index 000000000..58c1ea9ae
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/LinearExecutor.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file LinearExecutor.h
+ * @brief This file contains LinearExecutor class to define and run execution phase
+ */
+
+#ifndef __NEURUN_EXEC_EXECUTOR_H_
+#define __NEURUN_EXEC_EXECUTOR_H_
+
+#include "ExecutorBase.h"
+#include "compiler/Linear.h"
+#include "exec/FunctionSequence.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+/**
+ * @brief Class to handle execution phase. Simple run the sequence of operations that is sorted in
+ * topological order
+ */
+class LinearExecutor final : public ExecutorBase
+{
+public:
+ /**
+ * @brief Construct a new LinearExecutor object
+ * @param[in] plan Execution plan generated by compiled result
+ */
+ LinearExecutor(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs,
+ std::vector<compiler::Linear::Element> &&elements,
+ const std::shared_ptr<exec::FunctionSequence> &fn_seq)
+ : ExecutorBase{model, std::move(subgraphs), operand_context, std::move(lower_info),
+ std::move(tensor_mgrs)},
+ _fn_seq{fn_seq}, _elements{std::move(elements)}
+ {
+ }
+
+public:
+ void executeImpl(void) override;
+
+private:
+ std::shared_ptr<exec::FunctionSequence> _fn_seq;
+ std::vector<compiler::Linear::Element> _elements;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_EXECUTOR_H_
diff --git a/runtimes/neurun/core/src/exec/ParallelExecutor.cc b/runtimes/neurun/core/src/exec/ParallelExecutor.cc
new file mode 100644
index 000000000..81d4ac03f
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ParallelExecutor.cc
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParallelExecutor.h"
+
+#include <cassert>
+
+#include "util/logging.h"
+#include "exec/IFunction.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+class HookFunction : public IFunction
+{
+public:
+ HookFunction(IFunction *fn, std::function<void()> teardown) : _fn{fn}, _teardown{teardown} {}
+
+public:
+ void run() override
+ {
+ // TODO Introduce and call setup() function here
+ _fn->run();
+ _teardown();
+ }
+ void runSync() override { throw("runSync is needed just for profiling in Dataflow executor"); }
+
+private:
+ IFunction *_fn;
+ std::function<void()> _teardown;
+};
+
+void ParallelExecutor::notify(uint32_t finished_job_id)
+{
+ std::unique_lock<std::mutex> lock{_mu_jobs};
+
+ DataflowExecutor::notify(finished_job_id);
+
+ lock.unlock();
+ _cv_jobs.notify_all();
+}
+
+ParallelExecutor::ParallelExecutor(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs,
+ CodeMap &&code_map)
+ : DataflowExecutor{model,
+ std::move(subgraphs),
+ operand_context,
+ std::move(lower_info),
+ std::move(tensor_mgrs),
+ std::move(code_map)}
+{
+ VERBOSE(ParallelExecutor) << "Constructing Parallel Executor" << std::endl;
+}
+
+void ParallelExecutor::executeImpl()
+{
+ // Init scheduler
+ // TODO Consider to have distinct backend set in LowerInfoMap
+ graph::BackendSet backends;
+ for (auto &itr : _lower_info->operation)
+ {
+ backends.add(itr.second->backend());
+ }
+ _scheduler = nnfw::cpp14::make_unique<ParallelScheduler>(backends);
+
+ assert(noWaitingJobs());
+
+ // Execution setup
+ _waiting_jobs.swap(_finished_jobs); // Move finished jobs to waiting jobs
+
+ for (uint32_t i = 0; i < _waiting_jobs.size(); ++i)
+ {
+ VERBOSE(ParallelExecutor) << i << ": " << _input_info[i] << std::endl;
+ if (_input_info[i] == 0)
+ {
+ emplaceToReadyJobs(i);
+ }
+ }
+ assert(!_ready_jobs.empty()); // Cannot begin if there is no initial jobs
+
+ VERBOSE(ParallelExecutor) << "INITIAL JOBS : " << _ready_jobs.size() << std::endl;
+
+ while (true)
+ {
+ std::unique_lock<std::mutex> lock{_mu_jobs};
+
+ if (_ready_jobs.empty())
+ {
+ _cv_jobs.wait(lock, [this] { return !_ready_jobs.empty() || noWaitingJobs(); });
+ // Check finish condition
+ if (_ready_jobs.empty() && noWaitingJobs())
+ {
+ break;
+ }
+ }
+
+ auto job = std::move(_ready_jobs.begin()->second);
+ _ready_jobs.erase(_ready_jobs.begin());
+
+ lock.unlock();
+
+ VERBOSE(ParallelExecutor) << "Assigning fn #" << job->index() << std::endl;
+
+ auto job_index = job->index();
+ auto teardown = [&, job_index]() { notify(job_index); };
+
+ _scheduler->assign(nnfw::cpp14::make_unique<HookFunction>(job->fn(), teardown), job->backend());
+ _finished_jobs[job_index] = std::move(job);
+ }
+
+ assert(noWaitingJobs());
+
+ // Wait for all the jobs done
+ _scheduler->finish();
+
+ // Reset input info for the next execution
+ _input_info = _initial_input_info;
+}
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/ParallelExecutor.h b/runtimes/neurun/core/src/exec/ParallelExecutor.h
new file mode 100644
index 000000000..7a4673b9c
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ParallelExecutor.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_PARALLEL_EXECUTOR_H__
+#define __NEURUN_EXEC_PARALLEL_EXECUTOR_H__
+
+#include <list>
+#include <queue>
+#include <unordered_map>
+
+#include "FunctionSequence.h"
+#include "Job.h"
+#include "model/OperandIndexSequence.h"
+#include "model/Index.h"
+#include "model/Model.h"
+#include "cpp14/memory.h"
+#include "exec/DataflowExecutor.h"
+#include "ParallelScheduler.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+/**
+ * @brief Class to execute Graph in parallel
+ */
+class ParallelExecutor : public DataflowExecutor
+{
+protected:
+ void notify(uint32_t finished_job_id) override;
+
+public:
+ /**
+ * @brief Constructs a ParallelExecutor object
+ *
+ * @param model Model object
+ * @param operand_context (Only for input/output operand data access)
+ * @param lower_info LowerInfo object (Only to know input/output operands layout)
+ * @param code_map Compiled code map
+ * @param ranks Operation ranks for ordering execution
+ */
+ ParallelExecutor(const std::shared_ptr<const model::Model> &model,
+ std::unique_ptr<model::Subgraphs> subgraphs,
+ const std::shared_ptr<compiler::OperandContext> &operand_context,
+ std::unique_ptr<graph::LowerInfoMap> lower_info,
+ std::unique_ptr<backend::TensorManagerSet> tensor_mgrs, CodeMap &&code_map);
+
+ void executeImpl() override;
+
+private:
+ std::condition_variable _cv_jobs;
+ std::mutex _mu_jobs;
+ std::unique_ptr<ParallelScheduler> _scheduler;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_PARALLEL_EXECUTOR_H__
diff --git a/runtimes/neurun/core/src/exec/ParallelScheduler.cc b/runtimes/neurun/core/src/exec/ParallelScheduler.cc
new file mode 100644
index 000000000..44f1a5f08
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ParallelScheduler.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParallelScheduler.h"
+
+#include <cassert>
+
+#include "cpp14/memory.h"
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+WorkQueue::~WorkQueue()
+{
+ {
+ std::unique_lock<std::mutex> lock(_mu);
+ _state = State::FORCE_FINISHING;
+ }
+ _cv.notify_all();
+}
+
+void WorkQueue::operator()()
+{
+ while (true)
+ {
+ std::unique_ptr<IFunction> fn = nullptr;
+
+ {
+ std::unique_lock<std::mutex> lock{_mu};
+ _cv.wait(lock, [this] {
+ return (_state == State::FORCE_FINISHING) || (_state == State::FINISHING) ||
+ (_state == State::ONLINE && !_functions.empty());
+ });
+
+ if (_state == State::FORCE_FINISHING)
+ {
+ assert(_functions.empty() && "Terminating with unfinished jobs");
+ return;
+ }
+ else if (_state == State::FINISHING && _functions.empty())
+ {
+ return;
+ }
+ else
+ {
+ assert(((_state == State::FINISHING) || (_state == State::ONLINE)) && !_functions.empty());
+ fn = std::move(_functions.front());
+ _functions.pop();
+ }
+ }
+
+ assert(fn);
+ fn->run();
+ }
+}
+
+void WorkQueue::enqueue(std::unique_ptr<IFunction> &&fn)
+{
+ {
+ std::unique_lock<std::mutex> lock{_mu};
+ _functions.emplace(std::move(fn));
+ }
+ _cv.notify_one();
+}
+
+void WorkQueue::terminate()
+{
+ {
+ std::unique_lock<std::mutex> lock{_mu};
+ _state = State::FORCE_FINISHING;
+ }
+ _cv.notify_all();
+}
+
+void WorkQueue::finish()
+{
+ {
+ std::unique_lock<std::mutex> lock{_mu};
+ _state = State::FINISHING;
+ }
+ _cv.notify_all();
+}
+
+uint32_t WorkQueue::numJobsInQueue()
+{
+ std::unique_lock<std::mutex> lock{_mu};
+ return _functions.size();
+}
+
+ThreadPool::ThreadPool(uint32_t num_threads)
+{
+ assert(num_threads >= 1);
+
+ for (uint32_t i = 0; i < num_threads; i++)
+ {
+ _threads.emplace_back(std::ref(_worker));
+ }
+}
+
+ThreadPool::~ThreadPool()
+{
+ if (!_threads.empty())
+ {
+ _worker.terminate();
+ join();
+ }
+}
+
+void ThreadPool::enqueue(std::unique_ptr<IFunction> &&fn) { _worker.enqueue(std::move(fn)); }
+
+uint32_t ThreadPool::numJobsInQueue() { return _worker.numJobsInQueue(); }
+
+void ThreadPool::join()
+{
+ for (auto &thread : _threads)
+ {
+ thread.join();
+ }
+ _threads.clear();
+}
+
+void ThreadPool::finish()
+{
+ _worker.finish();
+ join();
+}
+
+ParallelScheduler::ParallelScheduler(const graph::BackendSet &backends)
+{
+ assert(!backends.empty());
+
+ for (auto backend : backends)
+ {
+ _thread_pools[backend] = nnfw::cpp14::make_unique<ThreadPool>();
+ }
+}
+
+void ParallelScheduler::assign(std::unique_ptr<IFunction> &&fn, const backend::Backend *backend)
+{
+ assert(!_thread_pools.empty());
+
+ _thread_pools.at(backend)->enqueue(std::move(fn));
+}
+
+void ParallelScheduler::finish()
+{
+ for (auto &itr : _thread_pools)
+ {
+ itr.second->finish();
+ }
+}
+
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/ParallelScheduler.h b/runtimes/neurun/core/src/exec/ParallelScheduler.h
new file mode 100644
index 000000000..9660478e8
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/ParallelScheduler.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_PARALLEL_SCHEDULER_H__
+#define __NEURUN_EXEC_PARALLEL_SCHEDULER_H__
+
+#include <unordered_map>
+#include <thread>
+#include <mutex>
+#include <condition_variable>
+#include <memory>
+#include <queue>
+#include <vector>
+#include <unordered_set>
+
+#include "exec/IFunction.h"
+#include "graph/BackendSet.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+// TODO Extract this class to a separate file
+class WorkQueue
+{
+public:
+ enum class State
+ {
+ ONLINE,
+ FINISHING,
+ FORCE_FINISHING
+ };
+
+public:
+ /**
+ * @brief Create WorkQueue object
+ */
+ WorkQueue() = default;
+ /**
+ * @brief Destroy WorkQueue object
+ */
+ ~WorkQueue();
+ /**
+ * @brief Thread entry function
+ */
+ void operator()();
+ /**
+ * @brief Push the given Task to the job queue
+ *
+ * @param fn Function to be executed(a job)
+ */
+ void enqueue(std::unique_ptr<IFunction> &&fn);
+ /**
+ * @brief Flag as terminating so all the worker threads can terminate
+ */
+ void terminate();
+ /**
+ * @brief Flag as terminating so all the worker threads can terminate
+ */
+ void finish();
+ /**
+ * @brief Check if it has pending jobs. Even if this returns fals, WorkQueue threads may be still
+ * running
+ *
+ * @return true if the job queue not empty otherwise false
+ */
+ uint32_t numJobsInQueue();
+
+private:
+ State _state{State::ONLINE};
+ std::queue<std::unique_ptr<IFunction>> _functions;
+ std::mutex _mu;
+ std::condition_variable _cv;
+};
+
+// TODO Extract this class to a separate file
+class ThreadPool
+{
+public:
+ /**
+ * @brief Coustruct ThreadPool object
+ *
+ * @param num_threads Number of threads
+ */
+ ThreadPool(uint32_t num_threads = 1);
+ /**
+ * @brief Destroy ThreadPool object
+ */
+ ~ThreadPool();
+ /**
+ * @brief Enqueue a function
+ *
+ * @param fn A function to be queued
+ */
+ void enqueue(std::unique_ptr<IFunction> &&fn);
+ /**
+ * @brief Get number of jobs in worker's queue
+ *
+ * @return Number of jobs
+ */
+ uint32_t numJobsInQueue();
+
+ /**
+ * @brief Block until all jobs are finished
+ */
+ void finish();
+
+private:
+ void join();
+
+private:
+ WorkQueue _worker;
+ std::vector<std::thread> _threads;
+};
+
+class ParallelScheduler
+{
+public:
+ /**
+ * @brief Constructs ParallelScheduler object
+ *
+ * @param backends Backend set
+ */
+ ParallelScheduler(const graph::BackendSet &backends);
+ /**
+ * @brief Assign a task to the given backend
+ *
+ * @param[in] fn Function to be assigned
+ * @param[in] fn Target backend
+ */
+ void assign(std::unique_ptr<IFunction> &&fn, const backend::Backend *backend);
+ /**
+ * @brief Block until all jobs are finished
+ */
+ void finish();
+
+private:
+ std::unordered_map<const backend::Backend *, std::unique_ptr<ThreadPool>> _thread_pools;
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_PARALLEL_SCHEDULER_H__
diff --git a/runtimes/neurun/core/src/exec/Sink.h b/runtimes/neurun/core/src/exec/Sink.h
new file mode 100644
index 000000000..7ec3efa22
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/Sink.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_SINK_H__
+#define __NEURUN_EXEC_SINK_H__
+
+#include <cassert>
+
+#include "cpp14/memory.h"
+#include "util/feature/nhwc/View.h"
+#include "util/feature/nchw/View.h"
+#include "util/Utils.h"
+#include <misc/feature/IndexIterator.h>
+
+namespace neurun
+{
+namespace exec
+{
+struct ISink
+{
+ virtual ~ISink() = default;
+
+ virtual void pull(::neurun::backend::operand::ITensor &tensor) const = 0;
+};
+
+// Create second lever inheritance: the first lever is used as a reference type in use-case places
+template <typename T> class ITemplSink : public ISink
+{
+public:
+ ITemplSink(void *output_buffer, const size_t &output_size, const model::Shape &shape,
+ const bool copy)
+ : _output_buffer{reinterpret_cast<T *>(output_buffer)}, _output_size{output_size},
+ _shape{shape}, _copy{copy}
+ {
+ }
+
+protected:
+ void pullUnif(neurun::backend::operand::ITensor &tensor) const
+ {
+ auto input_buffer = tensor.buffer();
+ auto rank = _shape.rank();
+
+ if (!tensor.has_padding() && rank < 4 + _copy)
+ {
+ memcpy(_output_buffer, input_buffer, _output_size);
+ return;
+ }
+
+ switch (rank)
+ {
+ case 0:
+ case 1:
+ {
+ memcpy(_output_buffer, input_buffer, _output_size);
+ break;
+ }
+ case 2:
+ {
+ const int32_t copy_len = _shape.dim(1);
+
+ for (auto i = 0; i < _shape.dim(0); ++i)
+ {
+ neurun::util::Coordinates coords{i, 0};
+ memcpy(_output_buffer + i * copy_len, input_buffer + tensor.calcOffset(coords),
+ copy_len * sizeof(T));
+ }
+ break;
+ }
+ case 3:
+ {
+ const int32_t dim1 = _shape.dim(1);
+ const int32_t dim2 = _shape.dim(2);
+
+ for (auto i = 0; i < _shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < _shape.dim(1); ++j)
+ {
+ neurun::util::Coordinates coords{i, j, 0};
+ memcpy(_output_buffer + i * dim1 * dim2 + j * dim2,
+ input_buffer + tensor.calcOffset(coords), dim2 * sizeof(T));
+ }
+ }
+ break;
+ }
+ case 4:
+ {
+ if (_copy)
+ {
+ const int32_t dim1 = _shape.dim(1);
+ const int32_t dim2 = _shape.dim(2);
+ const int32_t dim3 = _shape.dim(3);
+
+ for (auto i = 0; i < _shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < _shape.dim(1); ++j)
+ {
+ for (auto k = 0; k < _shape.dim(2); ++k)
+ {
+ neurun::util::Coordinates coords{i, j, k, 0};
+ memcpy(_output_buffer + i * dim1 * dim2 * dim3 + j * dim2 * dim3 + k * dim3,
+ input_buffer + tensor.calcOffset(coords), dim3 * sizeof(T));
+ }
+ }
+ }
+ }
+ else
+ {
+ // TODO Support from nhwc to nchw
+ auto feature = _shape.asFeature(model::Layout::NHWC);
+
+ const util::feature::nchw::View<T> from{&tensor};
+ util::feature::nhwc::View<T> into{feature, _output_buffer, _output_size};
+
+ ::nnfw::misc::feature::iterate(feature)
+ << [&](uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) {
+ const auto value = from.at(batch, ch, row, col);
+ into.at(batch, ch, row, col) = value;
+ };
+ }
+ break;
+ }
+ default:
+ throw std::runtime_error("NYI");
+ break;
+ }
+ }
+
+private:
+ T *_output_buffer;
+ const size_t _output_size;
+ const model::Shape _shape;
+ const bool _copy;
+};
+
+template <typename T> class PermutateSink final : public ITemplSink<T>
+{
+public:
+ PermutateSink(void *output_buffer, const size_t &output_size, const model::Shape &shape)
+ : ITemplSink<T>(output_buffer, output_size, shape, false)
+ {
+ }
+
+public:
+ void pull(neurun::backend::operand::ITensor &tensor) const override
+ {
+ ITemplSink<T>::pullUnif(tensor);
+ }
+};
+
+// Only supports NHWC format front-end(NNAPI) now
+template <typename T> class CopySink final : public ITemplSink<T>
+{
+public:
+ CopySink(void *output_buffer, const size_t &output_size, const model::Shape &shape)
+ : ITemplSink<T>(output_buffer, output_size, shape, true)
+ {
+ }
+
+public:
+ void pull(neurun::backend::operand::ITensor &tensor) const override
+ {
+ ITemplSink<T>::pullUnif(tensor);
+ }
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_SINK_H__
diff --git a/runtimes/neurun/core/src/exec/Source.h b/runtimes/neurun/core/src/exec/Source.h
new file mode 100644
index 000000000..5b914f714
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/Source.h
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_SOURCE_H__
+#define __NEURUN_EXEC_SOURCE_H__
+
+#include <cassert>
+
+#include "cpp14/memory.h"
+#include "util/feature/nchw/View.h"
+#include "util/feature/nhwc/Reader.h"
+#include "util/Utils.h"
+#include <misc/feature/IndexIterator.h>
+#include "model/Shape.h"
+
+namespace neurun
+{
+namespace exec
+{
+
+struct ISource
+{
+ virtual ~ISource() = default;
+
+ virtual void push(::neurun::backend::operand::ITensor &tensor) const = 0;
+};
+
+// Create second lever inheritance: the first lever is used as a reference type in use-case places
+template <typename T> class ITemplSource : public ISource
+{
+public:
+ ITemplSource(const void *input_buffer, const size_t &input_size, const model::Shape &shape,
+ const bool copy)
+ : _input_buffer{reinterpret_cast<const T *>(input_buffer)}, _input_size{input_size},
+ _shape{shape}, _copy(copy)
+ {
+ }
+
+ virtual void push(::neurun::backend::operand::ITensor &tensor) const = 0;
+
+protected:
+ void pushUnif(neurun::backend::operand::ITensor &tensor) const
+ {
+ auto output_buffer = tensor.buffer();
+ auto rank = _shape.rank();
+
+ if (!tensor.has_padding() && rank < 4 + _copy)
+ {
+ memcpy(output_buffer, _input_buffer, _input_size);
+ return;
+ }
+
+ switch (rank)
+ {
+ case 0:
+ case 1:
+ {
+ memcpy(output_buffer, _input_buffer, _input_size);
+ break;
+ }
+ case 2:
+ {
+ const int32_t copy_len = _shape.dim(1);
+
+ for (auto i = 0; i < _shape.dim(0); ++i)
+ {
+ neurun::util::Coordinates coords{i, 0};
+ memcpy(output_buffer + tensor.calcOffset(coords), _input_buffer + i * copy_len,
+ copy_len * sizeof(T));
+ }
+ break;
+ }
+ case 3:
+ {
+ const int32_t dim1 = _shape.dim(1);
+ const int32_t dim2 = _shape.dim(2);
+
+ for (auto i = 0; i < _shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < _shape.dim(1); ++j)
+ {
+ neurun::util::Coordinates coords{i, j, 0};
+ memcpy(output_buffer + tensor.calcOffset(coords),
+ _input_buffer + i * dim1 * dim2 + j * dim2, dim2 * sizeof(T));
+ }
+ }
+ break;
+ }
+ case 4:
+ {
+ if (_copy)
+ {
+ const int32_t dim1 = _shape.dim(1);
+ const int32_t dim2 = _shape.dim(2);
+ const int32_t dim3 = _shape.dim(3);
+ for (auto i = 0; i < _shape.dim(0); ++i)
+ {
+ for (auto j = 0; j < _shape.dim(1); ++j)
+ {
+ for (auto k = 0; k < _shape.dim(2); ++k)
+ {
+ neurun::util::Coordinates coords{i, j, k, 0};
+ memcpy(output_buffer + tensor.calcOffset(coords),
+ _input_buffer + i * dim1 * dim2 * dim3 + j * dim2 * dim3 + k * dim3,
+ dim3 * sizeof(T));
+ }
+ }
+ }
+ }
+ else
+ {
+ auto feature = _shape.asFeature(model::Layout::NHWC);
+
+ const util::feature::nhwc::Reader<T> from{feature, _input_buffer, _input_size};
+ util::feature::nchw::View<T> into{&tensor};
+
+ ::nnfw::misc::feature::iterate(feature)
+ << [&](uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) {
+ const auto value = from.at(batch, ch, row, col);
+ into.at(batch, ch, row, col) = value;
+ };
+ }
+
+ break;
+ }
+ default:
+ throw std::runtime_error("NYI");
+ break;
+ }
+ }
+
+private:
+ const T *_input_buffer;
+ const size_t _input_size;
+ const model::Shape _shape;
+ const bool _copy;
+};
+
+template <typename T> class PermutateSource final : public ITemplSource<T>
+{
+public:
+ PermutateSource(const void *input_buffer, const size_t &input_size, const model::Shape &shape)
+ : ITemplSource<T>(input_buffer, input_size, shape, false)
+ {
+ }
+
+public:
+ void push(neurun::backend::operand::ITensor &tensor) const override
+ {
+ // do NHWC_TO_NCHW permutation
+ ITemplSource<T>::pushUnif(tensor);
+ }
+};
+
+// Only supports NHWC format front-end(NNAPI) now
+template <typename T> class CopySource final : public ITemplSource<T>
+{
+public:
+ CopySource(const void *input_buffer, const size_t &input_size, const model::Shape &shape)
+ : ITemplSource<T>(input_buffer, input_size, shape, true)
+ {
+ }
+
+public:
+ void push(neurun::backend::operand::ITensor &tensor) const override
+ {
+ ITemplSource<T>::pushUnif(tensor);
+ }
+};
+
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_SOURCE_H__
diff --git a/runtimes/neurun/core/src/exec/interp/Buffer.h b/runtimes/neurun/core/src/exec/interp/Buffer.h
new file mode 100644
index 000000000..3528e0819
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/Buffer.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Buffer.h
+ * @brief This file contains Buffer interface and InternalBuffer, ExternalBuffer class
+ */
+#ifndef __NEURUN_EXEC_INTERP_BUFFER_H__
+#define __NEURUN_EXEC_INTERP_BUFFER_H__
+
+#include <cpp14/memory.h>
+
+#include "model/Data.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+/**
+ * @brief Interface for writable data area
+ */
+class Buffer : public model::Data
+{
+public:
+ /**
+ * @brief Return writable pointer for data area
+ * @return Writable pointer
+ */
+ virtual uint8_t *baseWritable(void) const = 0;
+};
+
+/**
+ * @brief Class for internally allocated data area
+ */
+class InternalBuffer final : public Buffer
+{
+public:
+ InternalBuffer(size_t size) : _base{nnfw::cpp14::make_unique<uint8_t[]>(size)}, _size{size}
+ {
+ // DO NOTHING
+ }
+
+public:
+ size_t size(void) const override { return _size; }
+ const uint8_t *base(void) const override { return _base.get(); }
+ uint8_t *baseWritable(void) const override { return _base.get(); }
+
+private:
+ std::unique_ptr<uint8_t[]> _base;
+ size_t _size;
+};
+
+/**
+ * @brief Class for data area from outside
+ */
+class ExternalBuffer final : public Buffer
+{
+public:
+ ExternalBuffer(uint8_t *base, size_t size) : _base{base}, _size{size}
+ {
+ // DO NOTHING
+ }
+
+public:
+ size_t size(void) const override { return _size; }
+ const uint8_t *base(void) const override { return _base; }
+ uint8_t *baseWritable(void) const override { return _base; }
+
+private:
+ uint8_t *_base;
+ size_t _size;
+};
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_BUFFER_H__
diff --git a/runtimes/neurun/core/src/exec/interp/ExecEnv.h b/runtimes/neurun/core/src/exec/interp/ExecEnv.h
new file mode 100644
index 000000000..c270d723c
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/ExecEnv.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file ExecEnv.h
+ * @brief This file contains ExecEnv to access interpreter tensor and execution status
+ */
+#ifndef __NEURUN_EXEC_INTERP_EXEC_ENV_H_
+#define __NEURUN_EXEC_INTERP_EXEC_ENV_H_
+
+#include <unordered_set>
+
+#include "model/Model.h"
+#include "Tensor.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+/**
+ * @brief Class to gather interpreter execution environment
+ * Each interpreter instance own execution environment
+ */
+class ExecEnv
+{
+public:
+ /**
+ * @brief Construct a new Exec Env object (deleted)
+ */
+ ExecEnv(void) = delete;
+ /**
+ * @brief Construct a new ExecEnv object
+ * @param[in] model Model to execute by interpreter
+ */
+ ExecEnv(const std::shared_ptr<const model::Model> &model) : _model{model}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Return model to execute
+ * @return Model
+ */
+ const model::Model &model(void) const { return *_model; }
+ /**
+ * @brief Assign tensor to environment which have allocated or assigned buffer
+ * @param[in] index Tensor index
+ * @param[in] tensor Tensor
+ */
+ void assignTensor(const model::OperandIndex index, std::shared_ptr<ITensor> tensor)
+ {
+ assert(tensor->bufferRO() != nullptr);
+ _tensors.emplace(index, tensor);
+ }
+
+ /**
+ * @brief Return tensor pointer in environment
+ * @param[in] index Tensor index
+ * @return Tensor pointer
+ */
+ const ITensor *tensorAt(const model::OperandIndex index) const
+ {
+ return _tensors.at(index).get();
+ }
+
+ /**
+ * @brief Check environment contains tensor
+ * @param[in] index Tensor index
+ * @return @c true if environment contain tensor, otherwise @c false
+ */
+ bool contains(const model::OperandIndex index) const
+ {
+ return (_tensors.find(index) != _tensors.end());
+ }
+
+ /**
+ * @brief Allocate tensor using operand info
+ * @param[in] index Tensor index
+ * @param[in] info Operand info
+ * @note If already allocated, just return
+ * @TODO More smart allocation policy
+ */
+ void allocateIfNeeded(const model::OperandIndex index, const model::OperandInfo &info)
+ {
+ // already allocated, or constant
+ if (contains(index))
+ {
+ return;
+ }
+
+ auto tensor = std::make_shared<Tensor>(info);
+ tensor->setBuffer(std::make_shared<InternalBuffer>(tensor->total_size()));
+ assignTensor(index, tensor);
+ _buffers.insert(index);
+ }
+
+ /**
+ * @brief Allocate read-only tensor and share data with other tensor
+ * @param[in] index Tensor index
+ * @param[in] info Operand info
+ * @param[in] index_to_share Tensor index that have data to share
+ */
+ void allocateAndShareIfNeeded(const model::OperandIndex index, const model::OperandInfo &info,
+ const model::OperandIndex index_to_share)
+ {
+ if (!contains(index_to_share))
+ {
+ throw std::runtime_error{"Cannot find tensor to share data"};
+ }
+
+ // already allocated
+ if (contains(index))
+ {
+ return;
+ }
+ else
+ {
+ auto tensor = std::make_shared<ROTensor>(info);
+ tensor->setData(tensorAt(index_to_share)->shareData());
+ assignTensor(index, tensor);
+ _buffers.insert(index);
+ }
+ }
+
+ /**
+ * @brief Free buffer if allocated by allocateIfNeed
+ * @param[in] index Tensor index
+ * @note If allocated by outside, just return
+ */
+ void freeIfAllocated(const model::OperandIndex index)
+ {
+ if (_buffers.find(index) != _buffers.end())
+ {
+ _tensors.at(index)->releaseData();
+ }
+ }
+
+private:
+ std::shared_ptr<const model::Model> _model;
+ // Tensor map to use in interpreter
+ // It should map tensors that have allocated or assigned buffer pointer
+ std::unordered_map<model::OperandIndex, std::shared_ptr<ITensor>> _tensors;
+ // Tensors allocated by allocateIfNeed (buffer)
+ std::unordered_set<model::OperandIndex> _buffers;
+};
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_EXEC_ENV_H_
diff --git a/runtimes/neurun/core/src/exec/interp/ExecManager.cc b/runtimes/neurun/core/src/exec/interp/ExecManager.cc
new file mode 100644
index 000000000..96f503eea
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/ExecManager.cc
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ExecManager.h"
+#include "ExecEnv.h"
+#include "Interpreter.h"
+
+#include "util/logging.h"
+
+#include <cpp14/memory.h>
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+void ExecManager::execute(const IODescription &desc)
+{
+ /************************************************************************
+ * Prepare execution model (submodel)
+ It may execute divided model
+ but now consider model inference is done at interpreter
+ ***********************************************************************/
+ model::OperandIndexMap<std::shared_ptr<ITensor>> tensor_map;
+
+ for (uint32_t n = 0; n < _model->inputs.size(); n++)
+ {
+ neurun::model::IOIndex index{n};
+ const auto input_index = _model->inputs.at(index);
+ const auto &input = *desc.inputs.at(n);
+
+ auto input_tensor = std::make_shared<ROTensor>(input.info);
+ input_tensor->setData(std::make_shared<const model::ExternalData>(
+ reinterpret_cast<const uint8_t *>(input.buffer), input.size));
+ tensor_map[input_index] = input_tensor;
+ }
+
+ for (uint32_t n = 0; n < _model->outputs.size(); n++)
+ {
+ neurun::model::IOIndex index{n};
+ const auto output_index = _model->outputs.at(index);
+ const auto &output = *desc.outputs.at(n);
+
+ auto output_tensor = std::make_shared<Tensor>(output.info);
+ output_tensor->setBuffer(
+ std::make_shared<ExternalBuffer>(reinterpret_cast<uint8_t *>(output.buffer), output.size));
+ tensor_map[output_index] = output_tensor;
+ }
+
+ /************************************************************************
+ * Prepare execution environment
+ Execution environment will be assigned to invoked interpreter instance
+ ***********************************************************************/
+
+ std::unique_ptr<ExecEnv> interp_env = nnfw::cpp14::make_unique<ExecEnv>(_model);
+
+ // Assign input tensor into interpreter execution environment
+ for (auto index : _model->inputs)
+ {
+ if (tensor_map.find(index) != tensor_map.end())
+ {
+ VERBOSE(INTERPRETER) << "Assign input tensor. operand index:" << index.value() << std::endl;
+ interp_env->assignTensor(index, tensor_map.at(index));
+ }
+ }
+
+ // Assign output tensor into interpreter execution environment
+ for (auto index : _model->outputs)
+ {
+ if (tensor_map.find(index) != tensor_map.end())
+ {
+ VERBOSE(INTERPRETER) << "Assign output tensor. operand index: " << index.value() << std::endl;
+ interp_env->assignTensor(index, tensor_map.at(index));
+ }
+ }
+
+ // Allocate constant tensor
+ _model->operands.iterate([&](const model::OperandIndex &ind, const model::Operand &obj) {
+ if (obj.isConstant())
+ {
+ VERBOSE(INTERPRETER) << "Allocate and assign constant tensor. operand index:" << ind.value()
+ << std::endl;
+
+ auto const_tensor = std::make_shared<ROTensor>(obj.info());
+ // Assume that interpreter's tensor layout is same with model (NHWC)
+ const_tensor->setData(
+ std::make_shared<model::ExternalData>(obj.data().base(), obj.info().total_size()));
+ interp_env->assignTensor(ind, const_tensor);
+ }
+ });
+
+ /*****************************************************************************
+ * Invoke interpreter
+ ****************************************************************************/
+
+ Interpreter interp(std::move(interp_env));
+ interp.run();
+
+ /*****************************************************************************
+ * Invoked interpreter run is finished
+ ****************************************************************************/
+
+ // If interpreter execute submodel
+ // 1. Get tensor output of submodel into tensor_map to save result
+ // 2. Generate new ExecEnv for next interpretation
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/ExecManager.h b/runtimes/neurun/core/src/exec/interp/ExecManager.h
new file mode 100644
index 000000000..77486dcaf
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/ExecManager.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file ExecManager.h
+ * @brief This file contains ExecManager class\n
+ * to manage interpreter execution and environment
+ */
+#ifndef __NEURUN_EXEC_INTERP_EXEC_MANAGER_H_
+#define __NEURUN_EXEC_INTERP_EXEC_MANAGER_H_
+
+#include "model/OperandIndexMap.h"
+#include "model/OperationIndexMap.h"
+#include "exec/IExecutor.h"
+#include "Tensor.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+/**
+ * @brief Class to execute model using interpreter
+ */
+class ExecManager final : public IExecutor
+{
+public:
+ ExecManager(const std::shared_ptr<const model::Model> &model) : _model{model}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Return graph model
+ * @return Graph model
+ */
+ const model::Model &model() override { return *_model; }
+ void setIndexedRanks(std::shared_ptr<model::OperationIndexMap<int64_t>>) override{
+ // Not implemented
+ };
+ /**
+ * @brief Start execution
+ * @note It should be called after setting input and output buffer
+ */
+ void execute(const IODescription &desc) final;
+
+private:
+ std::shared_ptr<const model::Model> _model;
+ model::OperandIndexMap<std::shared_ptr<ITensor>> _tensor_map;
+};
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_EXEC_MANAGER_H_
diff --git a/runtimes/neurun/core/src/exec/interp/Interpreter.cc b/runtimes/neurun/core/src/exec/interp/Interpreter.cc
new file mode 100644
index 000000000..81de27c36
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/Interpreter.cc
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Interpreter.h"
+
+#include <stack>
+#include <unordered_set>
+
+#include "Registration.h"
+
+#include "model/OperandIndexMap.h"
+#include "util/logging.h"
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+// TODO more structured execution kernel implementation
+// TODO use cker for execution
+// TODO divide tensor prepare and execution
+// TODO introduce memory manager (buffer allocate and free)
+class OperationExecutor : model::OperationVisitor
+{
+public:
+#define OP(InternalName, IsNnApi) InternalName,
+ enum class NodeName
+ {
+#include "model/Operations.lst"
+ };
+#undef OP
+
+public:
+ OperationExecutor(ExecEnv *env) : _env{env}
+ {
+ _kernels[NodeName::AddNode] = getAddNode();
+ _kernels[NodeName::Conv2DNode] = getConv2DNode();
+ _kernels[NodeName::MaxPool2DNode] = getMaxPool2DNode();
+ _kernels[NodeName::ConcatNode] = getConcatNode();
+ _kernels[NodeName::AvgPool2DNode] = getAvgPool2DNode();
+ _kernels[NodeName::FullyConnectedNode] = getFullyConnectedNode();
+ _kernels[NodeName::SoftmaxNode] = getSoftMaxNode();
+ _kernels[NodeName::ReshapeNode] = getReshapeNode();
+ _kernels[NodeName::DepthwiseConv2DNode] = getDepthwiseConvNode();
+ }
+
+ void execute(const model::OperationIndex &idx)
+ {
+ const auto nodeName = _env->model().operations.at(idx).getName();
+ VERBOSE(INTERPRETER) << "Prepare output operands and execute " << nodeName
+ << " operation (id: " << idx.value() << ")" << std::endl;
+ _env->model().operations.at(idx).accept(*this);
+ }
+
+private:
+#define OP(InternalName, IsNnApi) \
+ virtual void visit(const model::operation::InternalName &node) override \
+ { \
+ if (_kernels[NodeName::InternalName]->prepare != nullptr) \
+ { \
+ _kernels[NodeName::InternalName]->prepare(_env, node); \
+ } \
+ _kernels[NodeName::InternalName]->invoke(_env, node); \
+ }
+#include "model/Operations.lst"
+#undef OP
+
+private:
+ ExecEnv *_env;
+ std::unordered_map<NodeName, OpKernel *> _kernels;
+};
+
+void Interpreter::run()
+{
+ VERBOSE(INTERPRETER) << "Interpreter is invoked " << std::endl;
+
+ // operand_stack: save operands prepared to use
+ std::stack<model::OperandIndex> operand_stack;
+
+ // Note: We should push input first, then constant.
+ // We use use-def for find operators ready to execution,
+ // but Use-Def cannot handle parameters (maybe constant, but not always)
+ // Note: If all model inputs are constant, it may not work (depend on tensors' order).
+ // But that scenario may not exist
+ for (auto ind : _env->model().inputs)
+ {
+ VERBOSE(INTERPRETER) << "Input: Push to operand stack " << ind.value() << std::endl;
+
+ operand_stack.push(ind);
+ }
+
+ _env->model().operands.iterate([&](const model::OperandIndex &ind, const model::Operand &obj) {
+ if (obj.isConstant())
+ {
+ VERBOSE(INTERPRETER) << "Constant: Push to operand stack " << ind.value() << std::endl;
+
+ operand_stack.push(ind);
+ }
+ });
+
+ // Execution
+ std::unordered_set<model::OperandIndex> ready_check;
+ std::unordered_set<model::OperationIndex> executed;
+ OperationExecutor executor{_env.get()};
+ while (!operand_stack.empty())
+ {
+ const auto current_operand_index = operand_stack.top();
+ operand_stack.pop();
+ VERBOSE(INTERPRETER) << "Poped operand " << current_operand_index.value()
+ << " is checked ready to use" << std::endl;
+
+ assert(ready_check.find(current_operand_index) == ready_check.end());
+ ready_check.insert(current_operand_index);
+
+ // Find prepared operations by scan use of current operand
+ std::stack<model::OperationIndex> operation_stack;
+ const auto use_operators = _env->model().operands.at(current_operand_index).getUses();
+ for (auto use_operator : use_operators.list())
+ {
+ // Assumption: all parameters are ready to use
+ bool operator_ready = true;
+ for (auto input_index : _env->model().operations.at(use_operator).getInputs())
+ {
+ if (ready_check.find(input_index) == ready_check.end())
+ {
+ operator_ready = false;
+ break;
+ }
+ }
+
+ if (operator_ready)
+ {
+ VERBOSE(INTERPRETER) << "Ready to execute operation " << use_operator.value() << std::endl;
+ operation_stack.push(use_operator);
+ }
+ }
+
+ while (!operation_stack.empty())
+ {
+ const auto current_operation_index = operation_stack.top();
+ operation_stack.pop();
+ VERBOSE(INTERPRETER) << "Poped operation: " << current_operation_index.value() << "("
+ << _env->model().operations.at(current_operation_index).getName() << ")"
+ << std::endl;
+
+ // execution
+ // 1. Prepare output tensor
+ // 2. Call operation kernel
+ executor.execute(current_operation_index);
+ executed.insert(current_operation_index);
+
+ // 3. Push each output into operand stack
+ const auto def_operands = _env->model().operations.at(current_operation_index).getOutputs();
+ for (auto def_operand : def_operands)
+ {
+ VERBOSE(INTERPRETER) << "Buffer: Push to operand stack " << def_operand.value()
+ << std::endl;
+ operand_stack.push(def_operand);
+ }
+
+ // 4. Free if lifetime of buffer operands used by input is finished
+ for (auto input_index : _env->model().operations.at(current_operation_index).getInputs())
+ {
+ const auto use_operators = _env->model().operands.at(input_index).getUses();
+ bool dead_buffer = true;
+ for (auto use_operator : use_operators.list())
+ {
+ if (executed.find(use_operator) == executed.end())
+ {
+ dead_buffer = false;
+ break;
+ }
+ }
+
+ if (dead_buffer)
+ {
+ _env->freeIfAllocated(input_index);
+ }
+ }
+ }
+ }
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/Interpreter.h b/runtimes/neurun/core/src/exec/interp/Interpreter.h
new file mode 100644
index 000000000..1b73592b3
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/Interpreter.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Interpreter.h
+ * @brief This file contains Interpreter class for interpretation
+ */
+#ifndef __NEURUN_EXEC_INTERP_INTERPRETER_H__
+#define __NEURUN_EXEC_INTERP_INTERPRETER_H__
+
+#include "ExecEnv.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+/**
+ * @brief Class for interpretation
+ */
+class Interpreter
+{
+
+public:
+ /**
+ * @brief Construct a new Interpreter object (deleted)
+ */
+ Interpreter() = delete;
+ /**
+ * @brief Construct a new Interpreter object
+ * @param[in] env Execution environment variable for interpreter object
+ */
+ Interpreter(std::unique_ptr<ExecEnv> env) : _env{std::move(env)}
+ {
+ // DO NOTHING
+ }
+
+public:
+ /**
+ * @brief Run interpreter until there is no operation to execute
+ */
+ void run();
+
+private:
+ std::unique_ptr<ExecEnv> _env;
+};
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_INTERPRETER_H__
diff --git a/runtimes/neurun/core/src/exec/interp/Registration.h b/runtimes/neurun/core/src/exec/interp/Registration.h
new file mode 100644
index 000000000..37c591f9d
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/Registration.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_INTERP_REGISTRATION_H__
+#define __NEURUN_EXEC_INTERP_REGISTRATION_H__
+
+#include "ExecEnv.h"
+
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+struct OpKernel
+{
+ std::function<void(ExecEnv *, const model::Operation &)> prepare;
+ std::function<void(const ExecEnv *, const model::Operation &)> invoke;
+};
+
+// Defined in operations/ directory
+OpKernel *getAddNode();
+OpKernel *getConv2DNode();
+OpKernel *getMaxPool2DNode();
+OpKernel *getConcatNode();
+OpKernel *getAvgPool2DNode();
+OpKernel *getFullyConnectedNode();
+OpKernel *getSoftMaxNode();
+OpKernel *getDepthwiseConvNode();
+OpKernel *getReshapeNode();
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_REGISTRATION_H__
diff --git a/runtimes/neurun/core/src/exec/interp/Tensor.cc b/runtimes/neurun/core/src/exec/interp/Tensor.cc
new file mode 100644
index 000000000..becb73786
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/Tensor.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Tensor.h"
+
+#define NO_USE(a) (void)(a)
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+size_t ROTensor::calcOffset(const neurun::util::Coordinates &coords) const
+{
+ NO_USE(coords);
+ throw std::runtime_error("offset_element_in_bytes is not supported for cpu::Tensor now.");
+}
+
+size_t Tensor::calcOffset(const neurun::util::Coordinates &coords) const
+{
+ NO_USE(coords);
+ throw std::runtime_error("offset_element_in_bytes is not supported for cpu::Tensor now.");
+}
+
+model::Layout ROTensor::layout() const
+{
+ // TODO Changes to return frontend layout
+ return model::Layout::NHWC;
+}
+
+model::Layout Tensor::layout() const
+{
+ // TODO Changes to return frontend layout
+ return model::Layout::NHWC;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/Tensor.h b/runtimes/neurun/core/src/exec/interp/Tensor.h
new file mode 100644
index 000000000..c8237de1e
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/Tensor.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Tensor.h
+ * @brief This file contains ITensor interface, ROTensor class, and Tensor class
+ */
+#ifndef __NEURUN_EXEC_INTERP_TENSOR_H__
+#define __NEURUN_EXEC_INTERP_TENSOR_H__
+
+#include "Buffer.h"
+
+#include "model/OperandInfo.h"
+#include "backend/operand/ITensor.h"
+#include "model/Layout.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+/**
+ * @brief Interface to handle Tensor in interpreter
+ */
+class ITensor : public backend::operand::ITensor
+{
+public:
+ virtual ~ITensor() = default;
+
+public:
+ virtual uint8_t *buffer() const = 0;
+ /**
+ * @brief Return shared pointer for buffer
+ * @return Buffer shared pointer
+ */
+ virtual std::shared_ptr<const Buffer> shareBuffer() const = 0;
+ /**
+ * @brief Return read-only buffer pointer
+ * @return Read-only buffer pointer
+ */
+ virtual const uint8_t *bufferRO() const = 0;
+ /**
+ * @brief Return shared pointer for data
+ * @return Data shared pointer
+ */
+ virtual std::shared_ptr<const model::Data> shareData() const = 0;
+ /**
+ * @brief Set internal/external buffer
+ * @param[in] buffer Buffer pointer
+ */
+ virtual void setBuffer(std::shared_ptr<const Buffer> buffer) = 0;
+ /**
+ * @brief Set data reference (including constant, input)
+ * @param[in] data Data pointer
+ */
+ virtual void setData(std::shared_ptr<const model::Data> data) = 0;
+ virtual void releaseData() = 0;
+
+ virtual size_t total_size() const = 0;
+ virtual size_t dimension(size_t index) const = 0;
+ virtual size_t num_dimensions() const = 0;
+ virtual size_t calcOffset(const util::Coordinates &coords) const = 0;
+
+ virtual bool has_padding() const = 0;
+ /**
+ * @brief Return data type of tensor
+ * @return Data type of tensor
+ */
+ virtual model::DataType data_type() const = 0;
+ /**
+ * @brief Return TensorInfo
+ * @return TensorInfo
+ */
+ virtual const model::OperandInfo &tensorInfo() const = 0;
+ /**
+ * @brief Return number of elements
+ * @return Number of elements
+ */
+ virtual uint64_t num_elements() const = 0;
+};
+
+/**
+ * @brief Class to handle tensor in interpreter as read-only
+ */
+class ROTensor final : public ITensor
+{
+public:
+ ROTensor() = delete;
+ ROTensor(const model::OperandInfo &info) : _info(info)
+ {
+ // DO NOTHING
+ }
+
+public:
+ uint8_t *buffer() const override { throw std::runtime_error{"Read only tensor"}; }
+ std::shared_ptr<const Buffer> shareBuffer() const override
+ {
+ throw std::runtime_error{"Read only tensor"};
+ }
+ const uint8_t *bufferRO() const override { return _data->base(); }
+ std::shared_ptr<const model::Data> shareData() const override { return _data; }
+ void setBuffer(std::shared_ptr<const Buffer> buffer) override { _data = buffer; }
+ void setData(std::shared_ptr<const model::Data> data) override { _data = data; }
+ void releaseData() override { _data = nullptr; }
+
+ size_t total_size() const override { return _info.total_size(); }
+ size_t dimension(size_t index) const override { return _info.shape().dim(index); }
+ size_t num_dimensions() const override { return _info.shape().rank(); }
+ size_t calcOffset(const util::Coordinates &coords) const override;
+ model::Layout layout() const override;
+ bool has_padding() const override { return false; }
+ model::DataType data_type() const override { return _info.typeInfo().type(); }
+ const model::OperandInfo &tensorInfo() const override { return _info; }
+ uint64_t num_elements() const override { return _info.shape().num_elements(); };
+
+private:
+ const model::OperandInfo _info;
+ std::shared_ptr<const model::Data> _data{nullptr};
+};
+
+/**
+ * @brief Class to handle tensor in interpreter as writable
+ */
+class Tensor final : public ITensor
+{
+public:
+ Tensor() = delete;
+ Tensor(const model::OperandInfo &info) : _info(info)
+ {
+ // DO NOTHING
+ }
+
+public:
+ uint8_t *buffer() const override { return _buffer->baseWritable(); }
+ std::shared_ptr<const Buffer> shareBuffer() const override { return _buffer; };
+ const uint8_t *bufferRO() const override { return _buffer->base(); }
+ std::shared_ptr<const model::Data> shareData() const override { return _buffer; }
+ void setBuffer(std::shared_ptr<const Buffer> buffer) override { _buffer = buffer; }
+ void setData(std::shared_ptr<const model::Data>) override
+ {
+ throw std::runtime_error{"Passed data may read-only"};
+ }
+ void releaseData() override { _buffer = nullptr; }
+
+ size_t total_size() const override { return _info.total_size(); }
+ size_t dimension(size_t index) const override { return _info.shape().dim(index); }
+ size_t num_dimensions() const override { return _info.shape().rank(); }
+ size_t calcOffset(const util::Coordinates &coords) const override;
+ model::Layout layout() const override;
+ bool has_padding() const override { return false; }
+ model::DataType data_type() const override { return _info.typeInfo().type(); }
+ const model::OperandInfo &tensorInfo() const override { return _info; }
+ uint64_t num_elements() const override { return _info.shape().num_elements(); };
+
+private:
+ const model::OperandInfo _info;
+ std::shared_ptr<const Buffer> _buffer{nullptr};
+};
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_TENSOR_H__
diff --git a/runtimes/neurun/core/src/exec/interp/operations/Add.cc b/runtimes/neurun/core/src/exec/interp/operations/Add.cc
new file mode 100644
index 000000000..666c3cba6
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/Add.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/Add.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/AddNode.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace add
+{
+
+void prepareAdd(ExecEnv *env, const model::Operation &node)
+{
+ const auto &add_node = nnfw::misc::polymorphic_downcast<const model::operation::AddNode &>(node);
+
+ const auto lhs_index = node.getInputs().at(add_node.LHS);
+ const auto rhs_index = node.getInputs().at(add_node.RHS);
+ const auto out_index = node.getOutputs().at(0);
+
+ // Check lhs shape is same with rhs (with broadcast)
+ const auto lhs_tensor = env->tensorAt(lhs_index);
+ const auto rhs_tensor = env->tensorAt(rhs_index);
+ UNUSED_RELEASE(rhs_tensor);
+
+ // Check shape and type lhs is same with rhs
+ // TODO Util function to compare TensorInfo
+ // TODO Handle broadcasting
+ assert(lhs_tensor->data_type() == rhs_tensor->data_type());
+ assert(lhs_tensor->num_dimensions() == rhs_tensor->num_dimensions());
+ for (uint32_t i = 0; i < lhs_tensor->num_dimensions(); i++)
+ {
+ assert(lhs_tensor->dimension(i) == rhs_tensor->dimension(i));
+ }
+
+ // Output's shape and type should be same with input (don't consider broadcast)
+ auto output_info = lhs_tensor->tensorInfo();
+ // We can handle already allocated (ex. model output)
+ env->allocateIfNeeded(out_index, output_info);
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Check shape and type lhs is same with output
+ // TODO Util function to compare TensorInfo
+ // TODO Handle broadcasting
+ assert(lhs_tensor->data_type() == out_tensor->data_type());
+ assert(lhs_tensor->num_dimensions() == out_tensor->num_dimensions());
+ for (uint32_t i = 0; i < lhs_tensor->num_dimensions(); i++)
+ {
+ assert(lhs_tensor->dimension(i) == out_tensor->dimension(i));
+ }
+}
+
+inline void setActivationParams(float min, float max, nnfw::cker::AddParam *params)
+{
+ params->float_activation_min = min;
+ params->float_activation_max = max;
+}
+
+inline void setActivationParams(int32_t min, int32_t max, nnfw::cker::AddParam *params)
+{
+ params->quantized_activation_min = min;
+ params->quantized_activation_max = max;
+}
+
+template <typename raw_type>
+void invoke(const ITensor *lhs_tensor, const ITensor *rhs_tensor, const ITensor *out_tensor,
+ const model::operation::AddNode::Param &param)
+{
+ const auto lhs_buffer = lhs_tensor->bufferRO();
+ const auto rhs_buffer = rhs_tensor->bufferRO();
+ auto out_buffer = out_tensor->buffer();
+
+ nnfw::cker::AddParam cker_param;
+ raw_type activation_min, activation_max;
+ calculateActivationRange(param.activation, &activation_min, &activation_max);
+ setActivationParams(activation_min, activation_max, &cker_param);
+ const auto lhs_shape = convertShape(lhs_tensor->tensorInfo().shape());
+ const auto rhs_shape = convertShape(rhs_tensor->tensorInfo().shape());
+ const auto out_shape = convertShape(out_tensor->tensorInfo().shape());
+ const raw_type *lhs_ptr = reinterpret_cast<const raw_type *>(lhs_buffer);
+ const raw_type *rhs_ptr = reinterpret_cast<const raw_type *>(rhs_buffer);
+ raw_type *out_ptr = reinterpret_cast<raw_type *>(out_buffer);
+
+ // Calculate
+ nnfw::cker::Add(cker_param, lhs_shape, lhs_ptr, rhs_shape, rhs_ptr, out_shape, out_ptr);
+}
+
+void invokeAdd(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &add_node = nnfw::misc::polymorphic_downcast<const model::operation::AddNode &>(node);
+
+ const auto lhs_index = node.getInputs().at(add_node.LHS);
+ const auto rhs_index = node.getInputs().at(add_node.RHS);
+ const auto out_index = node.getOutputs().at(0);
+ const auto lhs_tensor = env->tensorAt(lhs_index);
+ const auto rhs_tensor = env->tensorAt(rhs_index);
+ const auto out_tensor = env->tensorAt(out_index);
+ const auto data_type = lhs_tensor->data_type();
+
+ if (data_type == model::DataType::INT32)
+ {
+ invoke<int32_t>(lhs_tensor, rhs_tensor, out_tensor, add_node.param());
+ }
+ else if (data_type == model::DataType::FLOAT32)
+ {
+ invoke<float>(lhs_tensor, rhs_tensor, out_tensor, add_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Unsupported data type"};
+ }
+}
+} // namespace add
+
+OpKernel *getAddNode()
+{
+ static OpKernel kernel = {add::prepareAdd, add::invokeAdd};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/AvgPool2D.cc b/runtimes/neurun/core/src/exec/interp/operations/AvgPool2D.cc
new file mode 100644
index 000000000..b6dfba85c
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/AvgPool2D.cc
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/AveragePool.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/AvgPool2DNode.h"
+#include "util/Utils.h"
+#include "util/Padding.h"
+#include "util/ShapeInference.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace avgpool2d
+{
+
+void prepareAvgPool2D(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ UNUSED_RELEASE(in_tensor);
+
+ assert(in_tensor->num_dimensions() == 4);
+
+ const auto output_info = env->model().operands.at(out_index).info();
+ if (output_info.total_size() == 0)
+ {
+ // Handle unspecified output shape
+ const auto &avgpool_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::AvgPool2DNode &>(node);
+ const auto infered_output_shapes =
+ shape_inference::inferAvgPoolShape(in_tensor->tensorInfo().shape(), avgpool_node.param());
+ env->allocateIfNeeded(out_index, {infered_output_shapes[0], output_info.typeInfo()});
+ }
+ else
+ {
+ env->allocateIfNeeded(out_index, output_info);
+ }
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Handle same ifm & ofm data type only
+ assert(in_tensor->data_type() == out_tensor->data_type());
+ assert(out_tensor->num_dimensions() == 4);
+}
+
+void invoke(const ITensor *in_tensor, const ITensor *out_tensor,
+ const model::operation::AvgPool2DNode::Param &param)
+{
+ // TODO Support NCHW frontend
+ const auto ifm_shape = in_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ const auto ofm_shape = out_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ const auto padding = neurun::util::calculatePadding(param.padding, ifm_shape, ofm_shape,
+ param.stride, param.kw, param.kh);
+ // Calculate
+ nnfw::cker::AveragePoolParams cker_param;
+ calculateActivationRange(param.activation, &cker_param.float_activation_min,
+ &cker_param.float_activation_max);
+ cker_param.filter_width = param.kw;
+ cker_param.filter_height = param.kh;
+ cker_param.padding_values.width = padding.left;
+ cker_param.padding_values.height = padding.top;
+ cker_param.stride_width = param.stride.horizontal;
+ cker_param.stride_height = param.stride.vertical;
+
+ const auto in_shape = convertShape(in_tensor->tensorInfo().shape());
+ const auto out_shape = convertShape(out_tensor->tensorInfo().shape());
+ const float *in_ptr = reinterpret_cast<const float *>(in_tensor->bufferRO());
+ float *out_ptr = reinterpret_cast<float *>(out_tensor->buffer());
+
+ nnfw::cker::AveragePool(cker_param, in_shape, in_ptr, out_shape, out_ptr);
+}
+
+void invokeAvgPool2D(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &avgpool_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::AvgPool2DNode &>(node);
+
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ // Check lhs shape is same with rhs (with broadcast)
+ const auto in_tensor = env->tensorAt(in_index);
+ const auto out_tensor = env->tensorAt(out_index);
+
+ const auto data_type = in_tensor->data_type();
+ if (data_type == model::DataType::FLOAT32)
+ {
+ invoke(in_tensor, out_tensor, avgpool_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float only"};
+ }
+}
+} // namespace avgpool2d
+
+OpKernel *getAvgPool2DNode()
+{
+ static OpKernel kernel = {avgpool2d::prepareAvgPool2D, avgpool2d::invokeAvgPool2D};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/Concat.cc b/runtimes/neurun/core/src/exec/interp/operations/Concat.cc
new file mode 100644
index 000000000..09a86c179
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/Concat.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/Concatenation.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/ConcatNode.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace concat
+{
+
+void prepareConcat(ExecEnv *env, const model::Operation &node)
+{
+ const auto &concat_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::ConcatNode &>(node);
+
+ const auto first_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto first_tensor = env->tensorAt(first_index);
+ uint32_t out_axis_dimension = 0;
+ const int32_t axis_raw = concat_node.param().axis;
+ const uint32_t axis = (axis_raw < 0) ? (axis_raw + first_tensor->num_dimensions()) : axis_raw;
+
+ // All inputs shape should be same except axis dimension
+ // All inputs type should be same
+ for (auto input : node.getInputs())
+ {
+ assert(first_tensor->num_dimensions() == env->tensorAt(input)->num_dimensions());
+ assert(first_tensor->data_type() == env->tensorAt(input)->data_type());
+ for (uint32_t i = 0; i < first_tensor->num_dimensions(); i++)
+ {
+ if (i == axis)
+ {
+ out_axis_dimension += env->tensorAt(input)->dimension(i);
+ continue;
+ }
+ assert(first_tensor->dimension(i) == env->tensorAt(input)->dimension(i));
+ }
+ }
+
+ // Make output tensor info using first input tensor info, and accumulated axis dimension value
+ auto out_shape = first_tensor->tensorInfo().shape();
+ out_shape.dim(axis) = out_axis_dimension;
+ env->allocateIfNeeded(out_index,
+ model::OperandInfo{out_shape, first_tensor->tensorInfo().typeInfo()});
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Output shape should be same with input except axis dimension
+ // Output type should be same with input
+ assert(first_tensor->data_type() == out_tensor->data_type());
+ for (uint32_t i = 0; i < first_tensor->num_dimensions(); i++)
+ {
+ if (i == axis)
+ {
+ continue;
+ }
+ assert(first_tensor->dimension(i) == out_tensor->dimension(i));
+ }
+}
+
+void invoke(const std::vector<const ITensor *> in_tensors, const ITensor *out_tensor, uint32_t axis)
+{
+ const uint32_t count = in_tensors.size();
+
+ // Calculate
+ nnfw::cker::ConcatenationParams cker_param;
+ cker_param.axis = (int8_t)axis;
+ cker_param.inputs_count = count;
+
+ const auto out_shape = convertShape(out_tensor->tensorInfo().shape());
+
+ std::vector<nnfw::cker::Shape> in_shapes;
+ std::vector<const nnfw::cker::Shape *> in_shape_ptrs;
+ in_shapes.reserve(count);
+ in_shape_ptrs.reserve(count);
+ std::vector<const float *> in_ptrs;
+ for (uint32_t i = 0; i < count; i++)
+ {
+ in_shapes.push_back(convertShape(in_tensors[i]->tensorInfo().shape()));
+ in_shape_ptrs.push_back(&in_shapes[i]);
+ in_ptrs.push_back(reinterpret_cast<const float *>(in_tensors[i]->bufferRO()));
+ }
+
+ auto out_buffer = out_tensor->buffer();
+ float *out_ptr = reinterpret_cast<float *>(out_buffer);
+
+ nnfw::cker::Concatenation<float>(cker_param, in_shape_ptrs.data(), in_ptrs.data(), out_shape,
+ out_ptr);
+}
+
+void invokeConcat(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &concat_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::ConcatNode &>(node);
+ const int32_t axis_raw = concat_node.param().axis;
+
+ std::vector<const ITensor *> in_tensors;
+ for (const auto &e : concat_node.getInputs())
+ {
+ in_tensors.emplace_back(env->tensorAt(e));
+ }
+
+ const auto out_index = node.getOutputs().at(0);
+ const auto out_tensor = env->tensorAt(out_index);
+ const uint32_t axis = (axis_raw < 0) ? (axis_raw + out_tensor->num_dimensions()) : axis_raw;
+
+ const auto data_type = in_tensors[0]->data_type();
+ if (data_type == model::DataType::FLOAT32)
+ {
+ invoke(in_tensors, out_tensor, axis);
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float32 only"};
+ }
+}
+} // namespace concat
+
+OpKernel *getConcatNode()
+{
+ static OpKernel kernel = {concat::prepareConcat, concat::invokeConcat};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/Conv2D.cc b/runtimes/neurun/core/src/exec/interp/operations/Conv2D.cc
new file mode 100644
index 000000000..92f4f6415
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/Conv2D.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/Conv.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/Conv2DNode.h"
+#include "util/Utils.h"
+#include "util/Padding.h"
+#include "util/ShapeInference.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace conv2d
+{
+
+void prepareConv2D(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(model::operation::Conv2DNode::INPUT);
+ const auto kernel_index = node.getInputs().at(model::operation::Conv2DNode::KERNEL);
+ const auto bias_index = node.getInputs().at(model::operation::Conv2DNode::BIAS);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ const auto kernel_tensor = env->tensorAt(kernel_index);
+ const auto bias_tensor = env->tensorAt(bias_index);
+
+ assert(in_tensor->num_dimensions() == 4);
+ assert(kernel_tensor->num_dimensions() == 4);
+ assert(bias_tensor->num_dimensions() == 1);
+
+ UNUSED_RELEASE(in_tensor);
+ UNUSED_RELEASE(kernel_tensor);
+ UNUSED_RELEASE(bias_tensor);
+
+ const auto output_info = env->model().operands.at(out_index).info();
+ if (output_info.total_size() == 0)
+ {
+ // Handle unspecified output shape
+ const auto &conv_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::Conv2DNode &>(node);
+ const auto infered_output_shapes = shape_inference::inferConv2DShape(
+ in_tensor->tensorInfo().shape(), kernel_tensor->tensorInfo().shape(), conv_node.param());
+ env->allocateIfNeeded(out_index, {infered_output_shapes[0], output_info.typeInfo()});
+ }
+ else
+ {
+ env->allocateIfNeeded(out_index, output_info);
+ }
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Handle same ifm & ofm data type only
+ assert(in_tensor->data_type() == out_tensor->data_type());
+ assert(out_tensor->num_dimensions() == 4);
+}
+
+void invoke(const ITensor *ifm_tensor, const ITensor *ker_tensor, const ITensor *bias_tensor,
+ const ITensor *ofm_tensor, const model::operation::Conv2DNode::Param &param)
+{
+ // TODO Support NCHW frontned
+ const auto ifm_shape = ifm_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ const auto ofm_shape = ofm_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ // Kernel format is [depth_out, kernel_height, kernel_width, depth_in].
+ const auto &ker_shape = ker_tensor->tensorInfo().shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+ const auto padding = neurun::util::calculatePadding(param.padding, ifm_shape, ofm_shape,
+ param.stride, ker_width, ker_height);
+
+ // Calculate
+ float activation_min, activation_max;
+ calculateActivationRange(param.activation, &activation_min, &activation_max);
+
+ nnfw::cker::ConvParams cker_param;
+ cker_param.padding_values.width = padding.left;
+ cker_param.padding_values.height = padding.top;
+ cker_param.stride_width = param.stride.horizontal;
+ cker_param.stride_height = param.stride.vertical;
+ cker_param.dilation_width_factor = 1;
+ cker_param.dilation_height_factor = 1;
+ cker_param.float_activation_min = activation_min;
+ cker_param.float_activation_max = activation_max;
+
+ const auto cker_ifm_shape = convertShape(ifm_tensor->tensorInfo().shape());
+ const auto cker_ker_shape = convertShape(ker_tensor->tensorInfo().shape());
+ const auto cker_bias_shape = convertShape(bias_tensor->tensorInfo().shape());
+ const auto cker_ofm_shape = convertShape(ofm_tensor->tensorInfo().shape());
+ const float *ifm_ptr = reinterpret_cast<const float *>(ifm_tensor->bufferRO());
+ const float *ker_ptr = reinterpret_cast<const float *>(ker_tensor->bufferRO());
+ const float *bias_ptr = reinterpret_cast<const float *>(bias_tensor->bufferRO());
+ float *ofm_ptr = reinterpret_cast<float *>(ofm_tensor->buffer());
+
+ nnfw::cker::Conv(cker_param, cker_ifm_shape, ifm_ptr, cker_ker_shape, ker_ptr, cker_bias_shape,
+ bias_ptr, cker_ofm_shape, ofm_ptr);
+}
+
+void invokeConv2D(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &conv_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::Conv2DNode &>(node);
+
+ const auto ifm_index = node.getInputs().at(model::operation::Conv2DNode::INPUT);
+ const auto ker_index = node.getInputs().at(model::operation::Conv2DNode::KERNEL);
+ const auto bias_index = node.getInputs().at(model::operation::Conv2DNode::BIAS);
+ const auto ofm_index = node.getOutputs().at(0);
+
+ const auto ifm_tensor = env->tensorAt(ifm_index);
+ const auto ker_tensor = env->tensorAt(ker_index);
+ const auto bias_tensor = env->tensorAt(bias_index);
+ const auto ofm_tensor = env->tensorAt(ofm_index);
+
+ const auto data_type = ifm_tensor->data_type();
+ if (data_type == model::DataType::FLOAT32)
+ {
+ invoke(ifm_tensor, ker_tensor, bias_tensor, ofm_tensor, conv_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float32 only"};
+ }
+}
+} // namespace conv2d
+
+OpKernel *getConv2DNode()
+{
+ static OpKernel kernel = {conv2d::prepareConv2D, conv2d::invokeConv2D};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/DepthwiseConv.cc b/runtimes/neurun/core/src/exec/interp/operations/DepthwiseConv.cc
new file mode 100644
index 000000000..e1e7c0674
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/DepthwiseConv.cc
@@ -0,0 +1,159 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/DepthwiseConv.h>
+#include <misc/polymorphic_downcast.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/DepthwiseConv2DNode.h"
+#include "util/Padding.h"
+#include "util/Utils.h"
+#include "util/ShapeInference.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+namespace
+{
+
+void prepareDepthwiseConv(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::INPUT);
+ const auto kernel_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::KERNEL);
+ const auto bias_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::BIAS);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ const auto kernel_tensor = env->tensorAt(kernel_index);
+ const auto bias_tensor = env->tensorAt(bias_index);
+
+ assert(in_tensor->num_dimensions() == 4);
+ assert(kernel_tensor->num_dimensions() == 4);
+ assert(bias_tensor->num_dimensions() == 1);
+
+ UNUSED_RELEASE(in_tensor);
+ UNUSED_RELEASE(kernel_tensor);
+ UNUSED_RELEASE(bias_tensor);
+
+ // TODO handle unspecified output shape:
+ // calculate output shape using ifm shape, kernel shape, padding, stride
+ const auto output_info = env->model().operands.at(out_index).info();
+ if (output_info.total_size() == 0)
+ {
+ // Handle unspecified output shape
+ const auto &depth_conv_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::DepthwiseConv2DNode &>(node);
+ const auto infered_output_shapes = shape_inference::inferDepthwiseConv2DShape(
+ in_tensor->tensorInfo().shape(), kernel_tensor->tensorInfo().shape(),
+ depth_conv_node.param());
+ env->allocateIfNeeded(out_index, {infered_output_shapes[0], output_info.typeInfo()});
+ }
+ else
+ {
+ env->allocateIfNeeded(out_index, output_info);
+ }
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Handle same ifm & ofm data type only
+ assert(in_tensor->data_type() == out_tensor->data_type());
+ assert(out_tensor->num_dimensions() == 4);
+}
+
+void invoke(const ITensor *ifm_tensor, const ITensor *ker_tensor, const ITensor *bias_tensor,
+ const ITensor *ofm_tensor, const model::operation::DepthwiseConv2DNode::Param &param)
+{
+ // TODO Support NCHW frontend
+ const auto ifm_shape = ifm_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ const auto ofm_shape = ofm_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ // Kernel format is [1, kernel_height, kernel_width, depth_out].
+ const auto &ker_shape = ker_tensor->tensorInfo().shape();
+ const auto ker_height = ker_shape.dim(1);
+ const auto ker_width = ker_shape.dim(2);
+ const auto padding = neurun::util::calculatePadding(param.padding, ifm_shape, ofm_shape,
+ param.stride, ker_width, ker_height);
+
+ // Calculate
+ float activation_min, activation_max;
+ calculateActivationRange(param.activation, &activation_min, &activation_max);
+
+ nnfw::cker::DepthwiseConvParams cker_param;
+ cker_param.padding_values.width = padding.left;
+ cker_param.padding_values.height = padding.top;
+ cker_param.depth_multiplier = param.multiplier;
+ cker_param.stride_width = param.stride.horizontal;
+ cker_param.stride_height = param.stride.vertical;
+ cker_param.dilation_width_factor = 1;
+ cker_param.dilation_height_factor = 1;
+ cker_param.float_activation_min = activation_min;
+ cker_param.float_activation_max = activation_max;
+
+ const auto cker_ifm_shape = convertShape(ifm_tensor->tensorInfo().shape());
+ const auto cker_ker_shape = convertShape(ker_tensor->tensorInfo().shape());
+ const auto cker_bias_shape = convertShape(bias_tensor->tensorInfo().shape());
+ const auto cker_ofm_shape = convertShape(ofm_tensor->tensorInfo().shape());
+ const float *ifm_ptr = reinterpret_cast<const float *>(ifm_tensor->bufferRO());
+ const float *ker_ptr = reinterpret_cast<const float *>(ker_tensor->bufferRO());
+ const float *bias_ptr = reinterpret_cast<const float *>(bias_tensor->bufferRO());
+ float *ofm_ptr = reinterpret_cast<float *>(ofm_tensor->buffer());
+
+ nnfw::cker::DepthwiseConv(cker_param, cker_ifm_shape, ifm_ptr, cker_ker_shape, ker_ptr,
+ cker_bias_shape, bias_ptr, cker_ofm_shape, ofm_ptr);
+}
+
+void invokeDepthwiseConv(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &conv_node = static_cast<const model::operation::DepthwiseConv2DNode &>(node);
+
+ const auto ifm_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::INPUT);
+ const auto ker_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::KERNEL);
+ const auto bias_index = node.getInputs().at(model::operation::DepthwiseConv2DNode::BIAS);
+ const auto ofm_index = node.getOutputs().at(0);
+
+ const auto ifm_tensor = env->tensorAt(ifm_index);
+ const auto ker_tensor = env->tensorAt(ker_index);
+ const auto bias_tensor = env->tensorAt(bias_index);
+ const auto ofm_tensor = env->tensorAt(ofm_index);
+
+ const auto data_type = ifm_tensor->data_type();
+ if (data_type == model::DataType::FLOAT32)
+ {
+ invoke(ifm_tensor, ker_tensor, bias_tensor, ofm_tensor, conv_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float32 only"};
+ }
+}
+
+} // namespace
+
+OpKernel *getDepthwiseConvNode()
+{
+ static OpKernel kernel = {prepareDepthwiseConv, invokeDepthwiseConv};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/FullyConnected.cc b/runtimes/neurun/core/src/exec/interp/operations/FullyConnected.cc
new file mode 100644
index 000000000..466c220b1
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/FullyConnected.cc
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/FullyConnected.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/FullyConnectedNode.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace fc
+{
+
+void prepareFC(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(model::operation::FullyConnectedNode::INPUT);
+ const auto kernel_index = node.getInputs().at(model::operation::FullyConnectedNode::WEIGHT);
+ const auto bias_index = node.getInputs().at(model::operation::FullyConnectedNode::BIAS);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ const auto kernel_tensor = env->tensorAt(kernel_index);
+ const auto bias_tensor = env->tensorAt(bias_index);
+
+ UNUSED_RELEASE(in_tensor);
+ UNUSED_RELEASE(kernel_tensor);
+ UNUSED_RELEASE(bias_tensor);
+
+ assert(in_tensor->num_dimensions() >= 2);
+ assert(kernel_tensor->num_dimensions() == 2);
+ assert(bias_tensor->num_dimensions() == 1);
+
+ const auto input_size_with_batch = in_tensor->num_elements();
+ const auto num_units = kernel_tensor->dimension(0);
+ const auto input_size = kernel_tensor->dimension(1);
+ const auto batch_size = input_size_with_batch / input_size;
+ assert(input_size_with_batch % input_size == 0);
+ assert(num_units == bias_tensor->dimension(0));
+
+ // Make output tensor info
+ model::Shape output_shape(2);
+ output_shape.dim(0) = batch_size;
+ output_shape.dim(1) = num_units;
+ const model::OperandInfo out_info{output_shape, in_tensor->tensorInfo().typeInfo()};
+ env->allocateIfNeeded(out_index, out_info);
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Handle same ifm & ofm data type only
+ assert(in_tensor->data_type() == out_tensor->data_type());
+ assert(out_tensor->num_dimensions() == 2);
+ assert(out_tensor->dimension(0) == batch_size);
+ assert(out_tensor->dimension(1) == num_units);
+}
+
+void invoke(const ITensor *ifm_tensor, const ITensor *ker_tensor, const ITensor *bias_tensor,
+ const ITensor *ofm_tensor, const model::operation::FullyConnectedNode::Param &param)
+{
+ const auto ifm_buffer = ifm_tensor->bufferRO();
+ const auto ker_buffer = ker_tensor->bufferRO();
+ const auto bias_buffer = bias_tensor->bufferRO();
+ auto ofm_buffer = ofm_tensor->buffer();
+
+ // Calculate
+ nnfw::cker::FullyConnectedParams cker_param;
+ calculateActivationRange(param.activation, &cker_param.float_activation_min,
+ &cker_param.float_activation_max);
+ const auto cker_ifm_shape = convertExtendShape(ifm_tensor->tensorInfo().shape());
+ const auto cker_ker_shape = convertExtendShape(ker_tensor->tensorInfo().shape());
+ const auto cker_bias_shape = convertExtendShape(bias_tensor->tensorInfo().shape());
+ const auto cker_ofm_shape = convertExtendShape(ofm_tensor->tensorInfo().shape());
+ const float *ifm_ptr = reinterpret_cast<const float *>(ifm_buffer);
+ const float *ker_ptr = reinterpret_cast<const float *>(ker_buffer);
+ const float *bias_ptr = reinterpret_cast<const float *>(bias_buffer);
+ float *ofm_ptr = reinterpret_cast<float *>(ofm_buffer);
+
+ nnfw::cker::FullyConnected(cker_param, cker_ifm_shape, ifm_ptr, cker_ker_shape, ker_ptr,
+ cker_bias_shape, bias_ptr, cker_ofm_shape, ofm_ptr);
+}
+
+void invokeFC(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &conv_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::FullyConnectedNode &>(node);
+
+ const auto ifm_index = node.getInputs().at(model::operation::FullyConnectedNode::INPUT);
+ const auto ker_index = node.getInputs().at(model::operation::FullyConnectedNode::WEIGHT);
+ const auto bias_index = node.getInputs().at(model::operation::FullyConnectedNode::BIAS);
+ const auto ofm_index = node.getOutputs().at(0);
+
+ const auto ifm_tensor = env->tensorAt(ifm_index);
+ const auto ker_tensor = env->tensorAt(ker_index);
+ const auto bias_tensor = env->tensorAt(bias_index);
+ const auto ofm_tensor = env->tensorAt(ofm_index);
+
+ const auto data_type = ifm_tensor->data_type();
+ if (data_type == model::DataType::FLOAT32)
+ {
+ invoke(ifm_tensor, ker_tensor, bias_tensor, ofm_tensor, conv_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float only"};
+ }
+}
+} // namespace fc
+
+OpKernel *getFullyConnectedNode()
+{
+ static OpKernel kernel = {fc::prepareFC, fc::invokeFC};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/MaxPool2D.cc b/runtimes/neurun/core/src/exec/interp/operations/MaxPool2D.cc
new file mode 100644
index 000000000..e53fa1473
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/MaxPool2D.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/MaxPool.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/MaxPool2DNode.h"
+#include "util/Utils.h"
+#include "util/Padding.h"
+#include "util/ShapeInference.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace
+{
+
+void prepareMaxPool2D(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+
+ assert(in_tensor->num_dimensions() == 4);
+ UNUSED_RELEASE(in_tensor);
+
+ const auto output_info = env->model().operands.at(out_index).info();
+ if (output_info.total_size() == 0)
+ {
+ // Handle unspecified output shape
+ const auto &maxpool_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::MaxPool2DNode &>(node);
+ const auto infered_output_shapes =
+ shape_inference::inferMaxPoolShape(in_tensor->tensorInfo().shape(), maxpool_node.param());
+ env->allocateIfNeeded(out_index, {infered_output_shapes[0], output_info.typeInfo()});
+ }
+ else
+ {
+ env->allocateIfNeeded(out_index, output_info);
+ }
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Handle same ifm & ofm data type only
+ assert(in_tensor->data_type() == out_tensor->data_type());
+ assert(out_tensor->num_dimensions() == 4);
+}
+
+void invoke(const ITensor *in_tensor, const ITensor *out_tensor,
+ const model::operation::MaxPool2DNode::Param &param)
+{
+ // TODO support NCHW frontend
+ const auto ifm_shape = in_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ const auto ofm_shape = out_tensor->tensorInfo().shape().asFeature(model::Layout::NHWC);
+ const auto padding = neurun::util::calculatePadding(param.padding, ifm_shape, ofm_shape,
+ param.stride, param.kw, param.kh);
+ // Calculate
+ nnfw::cker::MaxPoolParams cker_param;
+ calculateActivationRange(param.activation, &cker_param.float_activation_min,
+ &cker_param.float_activation_max);
+ cker_param.filter_width = param.kw;
+ cker_param.filter_height = param.kh;
+ cker_param.padding_values.width = padding.left;
+ cker_param.padding_values.height = padding.top;
+ cker_param.stride_width = param.stride.horizontal;
+ cker_param.stride_height = param.stride.vertical;
+
+ const auto in_shape = convertShape(in_tensor->tensorInfo().shape());
+ const auto out_shape = convertShape(out_tensor->tensorInfo().shape());
+ const float *in_ptr = reinterpret_cast<const float *>(in_tensor->bufferRO());
+ float *out_ptr = reinterpret_cast<float *>(out_tensor->buffer());
+
+ nnfw::cker::MaxPool(cker_param, in_shape, in_ptr, out_shape, out_ptr);
+}
+
+void invokeMaxPool2D(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &maxpool_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::MaxPool2DNode &>(node);
+
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ const auto out_tensor = env->tensorAt(out_index);
+
+ const auto data_type = in_tensor->data_type();
+ if (data_type == model::DataType::FLOAT32)
+ {
+ invoke(in_tensor, out_tensor, maxpool_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float32 only"};
+ }
+}
+} // namespace
+
+OpKernel *getMaxPool2DNode()
+{
+ static OpKernel kernel = {prepareMaxPool2D, invokeMaxPool2D};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/OperationUtil.h b/runtimes/neurun/core/src/exec/interp/operations/OperationUtil.h
new file mode 100644
index 000000000..4d2b4e1d8
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/OperationUtil.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_EXEC_INTERP_OPERATIONS_OPERATION_UTILS_H_
+#define __NEURUN_EXEC_INTERP_OPERATIONS_OPERATION_UTILS_H_
+
+#include "model/Shape.h"
+#include "model/InternalType.h"
+
+#include <cker/Shape.h>
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+
+inline nnfw::cker::Shape convertShape(const model::Shape &shape)
+{
+ auto dimensions = std::vector<uint32_t>(shape.dims().begin(), shape.dims().end());
+
+ std::vector<int32_t> raw_shape;
+ raw_shape.resize(4);
+
+ for (uint32_t i = 0; i < 4; ++i)
+ {
+ if (i >= dimensions.size())
+ {
+ raw_shape[i] = 1;
+ }
+ else
+ {
+ raw_shape[i] = dimensions[i];
+ }
+ }
+
+ return nnfw::cker::GetShape(raw_shape);
+}
+
+inline nnfw::cker::Shape convertExtendShape(const model::Shape &shape)
+{
+ auto dimensions = std::vector<uint32_t>(shape.dims().begin(), shape.dims().end());
+
+ std::vector<int32_t> raw_shape;
+ raw_shape.resize(4);
+ uint32_t start = 4 - dimensions.size();
+
+ for (uint32_t i = 0; i < 4; ++i)
+ {
+ if (i < start)
+ {
+ raw_shape[i] = 1;
+ }
+ else
+ {
+ raw_shape[i] = dimensions[i - start];
+ }
+ }
+
+ return nnfw::cker::GetShape(raw_shape);
+}
+
+template <typename T>
+void calculateActivationRange(model::Activation activation, T *activation_min, T *activation_max)
+{
+ if (activation == model::Activation::RELU)
+ {
+ *activation_min = 0;
+ *activation_max = std::numeric_limits<T>::max();
+ }
+ else if (activation == model::Activation::RELU6)
+ {
+ *activation_min = 0;
+ *activation_max = 6;
+ }
+ else if (activation == model::Activation::RELU1)
+ {
+ *activation_min = -1;
+ *activation_max = 1;
+ }
+ else if (activation == model::Activation::NONE)
+ {
+ *activation_min = std::numeric_limits<T>::lowest();
+ *activation_max = std::numeric_limits<T>::max();
+ }
+ else
+ {
+ throw std::runtime_error{"Unsupported activation type"};
+ }
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
+
+#endif // __NEURUN_EXEC_INTERP_OPERATIONS_OPERATION_UTILS_H_
diff --git a/runtimes/neurun/core/src/exec/interp/operations/Reshape.cc b/runtimes/neurun/core/src/exec/interp/operations/Reshape.cc
new file mode 100644
index 000000000..a45c3b3f2
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/Reshape.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "exec/interp/Registration.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace
+{
+
+void prepare(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ // Unspecified shape is not supported in operation node spec now
+ const auto output_info = env->model().operands.at(out_index).info();
+ env->allocateAndShareIfNeeded(out_index, output_info, in_index);
+
+ assert(output_info.total_size() == env->model().operands.at(in_index).info().total_size());
+}
+
+void invoke(const ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ if (env->tensorAt(in_index)->bufferRO() == env->tensorAt(out_index)->bufferRO())
+ {
+ // Same data
+ return;
+ }
+
+ const auto output_info = env->model().operands.at(out_index).info();
+ memcpy(env->tensorAt(out_index)->buffer(), env->tensorAt(in_index)->bufferRO(),
+ output_info.total_size());
+}
+
+} // namespace {anonymous}
+
+OpKernel *getReshapeNode()
+{
+ static OpKernel kernel = {prepare, invoke};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/exec/interp/operations/SoftMax.cc b/runtimes/neurun/core/src/exec/interp/operations/SoftMax.cc
new file mode 100644
index 000000000..07865969b
--- /dev/null
+++ b/runtimes/neurun/core/src/exec/interp/operations/SoftMax.cc
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cker/operation/SoftMax.h>
+
+#include "OperationUtil.h"
+
+#include "exec/interp/Registration.h"
+#include "model/operation/SoftmaxNode.h"
+#include "misc/polymorphic_downcast.h"
+
+namespace neurun
+{
+namespace exec
+{
+namespace interp
+{
+namespace
+{
+
+void Softmax2D(const float *in, const int input_size, const int batch_size, const float beta,
+ float *out)
+{
+ assert(input_size > 0);
+
+ // For each batch
+ for (int b = 0; b < batch_size; b++)
+ {
+ // Find the max coeff.
+ float max_coeff = in[0];
+ for (int i = 1; i < input_size; i++)
+ {
+ if (in[i] > max_coeff)
+ max_coeff = in[i];
+ }
+
+ // Compute the normalized sum of exps.
+ float exp_sum = 0.0;
+ for (int i = 0; i < input_size; i++)
+ {
+ out[i] = std::exp((in[i] - max_coeff) * beta);
+ exp_sum += out[i];
+ }
+
+ // Divide by the sum of exps.
+ float reciprocal_sum_exp = 1.f / exp_sum;
+ for (int i = 0; i < input_size; i++)
+ {
+ out[i] *= reciprocal_sum_exp;
+ }
+
+ // Advance in and out pointers for the next batch.
+ in += input_size;
+ out += input_size;
+ }
+}
+
+void prepareSoftMax(ExecEnv *env, const model::Operation &node)
+{
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ UNUSED_RELEASE(in_tensor);
+
+ assert((in_tensor->num_dimensions() == 4) || (in_tensor->num_dimensions() == 2));
+
+ // Output shape should be same with input
+ // Output type is pre-defined in model
+ const auto output_shape = env->model().operands.at(in_index).info().shape();
+ const auto output_type = env->model().operands.at(out_index).info().typeInfo();
+
+ const model::OperandInfo output_info{output_shape, output_type};
+ env->allocateIfNeeded(out_index, output_info);
+
+ auto out_tensor = env->tensorAt(out_index);
+ UNUSED_RELEASE(out_tensor);
+
+ // Check output shape is same with input
+ assert(out_tensor->num_dimensions() == out_tensor->num_dimensions());
+ for (uint32_t i = 0; i < in_tensor->num_dimensions(); i++)
+ {
+ assert(in_tensor->dimension(i) == out_tensor->dimension(i));
+ }
+}
+
+void invoke(const ITensor *in_tensor, const ITensor *out_tensor,
+ const model::operation::SoftmaxNode::Param &param)
+{
+ const float *in_ptr = reinterpret_cast<const float *>(in_tensor->bufferRO());
+ float *out_ptr = reinterpret_cast<float *>(out_tensor->buffer());
+
+ float beta = param.beta;
+
+ if (in_tensor->num_dimensions() == 2)
+ {
+ uint32_t batch_size = in_tensor->dimension(0);
+ uint32_t input_size = in_tensor->dimension(1);
+
+ Softmax2D(in_ptr, input_size, batch_size, beta, out_ptr);
+ }
+ else if (in_tensor->num_dimensions() == 4)
+ {
+ const auto in_shape = convertShape(in_tensor->tensorInfo().shape());
+ const auto out_shape = convertShape(out_tensor->tensorInfo().shape());
+
+ nnfw::cker::SoftmaxParams cker_param;
+ cker_param.beta = beta;
+
+ nnfw::cker::Softmax(cker_param, in_shape, in_ptr, out_shape, out_ptr);
+ }
+ else
+ {
+ throw std::runtime_error{"Unsuported input dimension: support 2D or 4D"};
+ }
+}
+
+void invokeSoftMax(const ExecEnv *env, const model::Operation &node)
+{
+ const auto &softmax_node =
+ nnfw::misc::polymorphic_downcast<const model::operation::SoftmaxNode &>(node);
+
+ const auto in_index = node.getInputs().at(0);
+ const auto out_index = node.getOutputs().at(0);
+
+ const auto in_tensor = env->tensorAt(in_index);
+ const auto out_tensor = env->tensorAt(out_index);
+
+ const auto in_data_type = in_tensor->data_type();
+ const auto out_data_type = out_tensor->data_type();
+ if ((in_data_type == model::DataType::FLOAT32) && (out_data_type == model::DataType::FLOAT32))
+ {
+ invoke(in_tensor, out_tensor, softmax_node.param());
+ }
+ else
+ {
+ throw std::runtime_error{"NYI: Support float32 only"};
+ }
+}
+
+} // namespace
+
+OpKernel *getSoftMaxNode()
+{
+ static OpKernel kernel = {prepareSoftMax, invokeSoftMax};
+ return &kernel;
+}
+
+} // namespace interp
+} // namespace exec
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/Graph.cc b/runtimes/neurun/core/src/graph/Graph.cc
new file mode 100644
index 000000000..4264b1a8a
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/Graph.cc
@@ -0,0 +1,589 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "graph/Graph.h"
+
+#include <algorithm>
+#include <bitset>
+
+#include "util/logging.h"
+#include "verifier/Verifier.h"
+#include "cpp14/memory.h"
+#include "compiler/Linear.h"
+#include "graph/operation/LowerInfo.h"
+#include "graph/operand/LowerInfo.h"
+#include "graph/operand/PermuteFactor.h"
+#include "operand/Shape4DConvert.h"
+#include "compiler/BackendResolver.h"
+#include "backend/IConfig.h"
+#include "pass/PermutationInsertionPass.h"
+#include "pass/PermutationEliminationPass.h"
+
+namespace neurun
+{
+namespace graph
+{
+
+Graph::Graph(std::unique_ptr<model::Model> &&model) : _model{std::move(model)}
+{
+ // DO NOTHING
+}
+
+Graph::~Graph(void) = default;
+
+model::OperandIndex Graph::addOperand(const model::Shape &shape, const model::TypeInfo &type)
+{
+ return _model->operands.emplace(shape, type);
+}
+
+model::OperationIndex Graph::addOperation(std::unique_ptr<model::Operation> &&node)
+{
+ assert(isBuildingPhase());
+ return _model->operations.push(std::move(node));
+}
+
+void Graph::setOperandValue(const model::OperandIndex &ind, std::unique_ptr<model::Data> &&data)
+{
+ assert(isBuildingPhase());
+ assert(_model->operands.exist(ind));
+ _model->operands.at(ind).data(std::move(data));
+}
+
+void Graph::addInput(const model::OperandIndex &ind)
+{
+ assert(isBuildingPhase());
+ _model->inputs.append(ind);
+}
+
+void Graph::addOutput(const model::OperandIndex &ind)
+{
+ assert(isBuildingPhase());
+ _model->outputs.append(ind);
+}
+
+void Graph::finishBuilding(void)
+{
+ assert(isBuildingPhase());
+ _phase = Phase::MODEL;
+
+ // Initialize operand use-def
+ initializeUseDef();
+
+ // Call graph verifications for the MODEL phase
+ {
+ assert(verifier::DAGChecker().verify(*this));
+ assert(verifier::EdgeConsistencyChecker().verify(*this));
+ }
+}
+
+void Graph::lower(void)
+{
+ assert(_phase == Phase::MODEL);
+
+ _subgraphs = nnfw::cpp14::make_unique<model::Subgraphs>();
+ bool is_profiling = util::getConfigBool(util::config::PROFILING_MODE);
+
+ // Lower
+ {
+ // operand::LowerInfo holder
+ model::OperandIndexMap<std::unique_ptr<operand::LowerInfo>> operands_lower_info;
+
+ _model->operands.iterate([&](const model::OperandIndex &index, const model::Operand &object) {
+ operands_lower_info[index] =
+ nnfw::cpp14::make_unique<operand::LowerInfo>(graph::operand::asShape4D(object.shape()));
+ });
+
+ _lower_info_map = nnfw::cpp14::make_unique<LowerInfoMap>();
+
+ // Are they mergeable?
+ // 1. the same backend id and layout?
+ // 2. if 1 is true, the subg and a node are connected?
+ auto mergeable = [&](const model::SubgraphIndex &subg_index,
+ const model::OperationIndex &node_index, model::Layout layout) {
+ const auto &subg = _subgraphs->at(subg_index);
+ const auto &node = _model->operations.at(node_index);
+
+ // The same backend id and layout?
+ {
+ const auto subg_backend_layout = getLowerInfo(subg_index)->layout();
+ const auto &subg_backend_id = getLowerInfo(subg_index)->backend()->config()->id();
+ const auto &node_backend_id = _backend_resolver->getBackend(node_index)->config()->id();
+ VERBOSE(Lower) << "SUBG#" << subg_index.value() << " { " << subg_backend_id << "("
+ << model::to_string(subg_backend_layout) << ") } "
+ << " NODE#" << node_index.value() << " (" << node.getName() << ") { "
+ << node_backend_id << "(" << model::to_string(layout) << ") } " << std::endl;
+ if (subg_backend_id != node_backend_id || subg_backend_layout != layout)
+ return false;
+ }
+
+ // Connected?
+ // an input of one node is an output of the other node? or vice-versa?
+ {
+ const auto &node_inputs = node.getInputs();
+ const auto &node_outputs = node.getOutputs();
+
+ // subg's operations are in order so that we just check the first and the last
+ std::vector<model::Element> subg_ops{subg.operations()[0]};
+ if (subg.operations().size() > 1)
+ subg_ops.emplace_back(subg.operations()[subg.operations().size() - 1]);
+
+ for (const auto &elem : subg_ops)
+ {
+ const auto &n_index = elem.index;
+ const auto &n = *elem.node;
+
+ // node's output == subg's input?
+ const auto &n_inputs = n.getInputs();
+ for (auto input : n_inputs)
+ {
+ if (node_outputs.contains(input))
+ {
+ VERBOSE(Lower) << "SUBG#" << subg_index.value() << " 's NODE#" << n_index.value()
+ << "(" << n.getName() << ") is connected to NODE#"
+ << node_index.value() << "(" << node.getName() << ")" << std::endl;
+ return true;
+ }
+ }
+
+ // node's input == subg's output?
+ const auto &n_outputs = n.getOutputs();
+ for (auto output : n_outputs)
+ {
+ if (node_inputs.contains(output))
+ {
+ VERBOSE(Lower) << "SUBG#" << subg_index.value() << " 's NODE#" << n_index.value()
+ << " (" << n.getName() << ") is connected to NODE#"
+ << node_index.value() << std::endl;
+ return true;
+ }
+ }
+ }
+
+ VERBOSE(Lower) << "SUBG#" << subg_index.value() << " is not connected to NODE#"
+ << node_index.value() << "(" << node.getName() << ")" << std::endl;
+ }
+
+ return false;
+ };
+
+ // Create a fresh subgraph with one operation, and append it to subgraphs
+ auto append_fresh_single_op_subgraph = [&](const model::OperationIndex &node_index,
+ const model::Operation &node, model::Layout layout) {
+ // Create a fresh subgraph
+ auto subg = nnfw::cpp14::make_unique<model::Subgraph>(layout);
+
+ // Add an operation
+ subg->appendOperation(node_index, node);
+
+ // Update input/output
+ subg->setOutputs(node.getOutputs());
+ subg->setInputs(node.getInputs());
+
+ return _subgraphs->emplace(std::move(subg));
+ };
+
+ model::Subgraph *subg = nullptr;
+ model::SubgraphIndex subg_index;
+
+ // Make subgraphs while checking whether a node can be merged into a subgraph.
+ // NOTE: The below method appends nodes while making one subgraph if needed. If something better
+ // ways, happy to update this code.
+ Graph::PostDfsConstIterator().iterate(*this, [&](const model::OperationIndex &node_index,
+ const model::Operation &node) {
+ // LowerInfo for in/output operands
+ auto backend = _backend_resolver->getBackend(node_index);
+ // TODO How to get layout of this node from IR
+ auto frontend_layout = model::Layout::NHWC;
+ auto backend_layout = frontend_layout;
+ const std::string acl_layout_str = util::getConfigString(util::config::ACL_LAYOUT);
+ if (acl_layout_str == "NHWC")
+ {
+ backend_layout = model::Layout::NHWC;
+ }
+ else if (acl_layout_str == "NCHW")
+ {
+ backend_layout = model::Layout::NCHW;
+ }
+
+ // CPU supports only NHWC now
+ if (backend->config()->id() == "cpu")
+ {
+ backend_layout = model::Layout::NHWC;
+ }
+
+ for (auto operand : node.getInputs())
+ {
+ auto &&lower_info = operands_lower_info.at(operand);
+ lower_info->addUsePermuteFactor(operand::PermuteFactor{backend, backend_layout});
+ }
+ for (auto operand : node.getOutputs())
+ {
+ auto &&lower_info = operands_lower_info.at(operand);
+ lower_info->addDefPermuteFactor(operand::PermuteFactor{backend, backend_layout});
+ }
+ /*for profiling each subgraph must contain just one node,
+ so that we can measure a node separately*/
+ if (!subg || is_profiling || !mergeable(subg_index, node_index, backend_layout))
+ {
+ auto new_subg_index = append_fresh_single_op_subgraph(node_index, node, frontend_layout);
+
+ // Subgraph LowerInfo
+ setLowerInfo(new_subg_index, nnfw::cpp14::make_unique<graph::operation::LowerInfo>(
+ backend, backend_layout));
+
+ subg_index = new_subg_index;
+ subg = &(_subgraphs->at(new_subg_index));
+
+ VERBOSE(Lower) << "SUBG#" << subg_index.value() << " is created for "
+ << "NODE#" << node_index.value() << "(" << node.getName() << ")"
+ << std::endl;
+ }
+ else
+ {
+ subg->appendOperation(node_index, node);
+ subg->setInputs(node.getInputs());
+
+ VERBOSE(Lower) << "SUBG#" << subg_index.value() << " merges "
+ << "NODE#" << node_index.value() << "(" << node.getName() << ")"
+ << std::endl;
+ }
+
+ bool finish = false;
+ {
+ size_t prev_op_cnt = 0;
+ for (auto input : node.getInputs())
+ {
+ // only valid_inputs
+ const auto &operand = _model->operands.at(input);
+ if (operand.isConstant())
+ continue;
+
+ // This operand is input of operation, not weight or bias
+ if (operand.getDef().list().size() > 0)
+ ++prev_op_cnt;
+
+ // Test the node is Concat or BeginningBranch
+ // About (1)isConcat and (2)isBeginningBranch
+ // (1) Current node has multiple inputs as concat?
+ // - Does current node have two or more than previous operation?
+ //
+ // [CONV] [CONV] [CONV] [MAX_POOL]
+ // | | | |
+ // [0] [1] [2] [3]
+ // \ | | /
+ // [ C O N C A T ] # current node
+ //
+ // (2) Current node is on the separated branch at the beginning?
+ // - Does current node's input operand's uses have two or more than?
+ //
+ // [CONV]
+ // |
+ // [0]----.
+ // | |
+ // [CONV] [CONV] # current node
+ // | |
+ // [1] [2]
+ // \ /
+ // [CONCAT]
+ if (prev_op_cnt > 1 || operand.getUses().list().size() > 1)
+ {
+ finish = true;
+ break;
+ }
+ }
+ }
+
+ if (finish)
+ subg = nullptr;
+ });
+
+ _subgraphs->iterate([&](const model::SubgraphIndex &, model::Subgraph &subg) {
+ assert(subg.operations().size() > 0);
+ std::reverse(std::begin(subg.operations()), std::end(subg.operations()));
+ });
+
+ _subgraphs->dump("merged and sorted operations without permutation");
+
+// NOTE This is desired way to handle model input and outputs however getDefaultBackend() is
+// cpu backend dependent for now we cannot use it.
+#if 0
+ // Add def backend to model input/output operand as default backend
+ for (auto index : getInputs())
+ {
+ auto &&lower_info = operands_lower_info.at(index);
+ lower_info->addDefBackend(_backend_resolver->getDefaultBackend());
+ }
+
+ for (auto index : getOutputs())
+ {
+ auto &&lower_info = operands_lower_info.at(index);
+ lower_info->addUseBackend(_backend_resolver->getDefaultBackend());
+ }
+#endif
+
+ // Add DefFactor constants same as UseFactor
+ // NOTE This assumes a constant operand is used by only one operation
+ _model->operations.iterate([&](const model::OperationIndex &, model::Operation &node) {
+ // LowerInfo for input operands
+ for (auto operand : node.getInputs())
+ {
+ auto &&lower_info = operands_lower_info.at(operand);
+ if (lower_info->def_factors().empty())
+ {
+ // NOTE Handling model inputs here is not ideal. See above NOTE comment.
+ // If it is a model input, not a constant
+ if (_model->inputs.contains(operand))
+ {
+ // If one or more elements then any PermuteFactor is OK so pick first one
+ if (!lower_info->use_factors().empty())
+ {
+ lower_info->addDefPermuteFactor(*lower_info->use_factors().begin());
+ }
+ }
+ // If it is a constant
+ else
+ {
+ lower_info->addDefPermuteFactor(lower_info->use_factors().getOnlyElement());
+ }
+ }
+ }
+ });
+
+ // Set LowerInfo for each operand from the operand::LowerInfo holder
+ _model->operands.iterate([&](const model::OperandIndex &index, model::Operand &object) {
+ setLowerInfo(index, std::move(operands_lower_info[index]));
+
+ // Dump operand LowerInfo
+ // TODO Extract this dumping procedure to be reusable
+ if (!getLowerInfo(index)->def_factors().empty() ||
+ !getLowerInfo(index)->use_factors().empty())
+ {
+ auto factors_to_string = [](const operand::PermuteFactorSet &factors) {
+ std::string str;
+ for (auto factor : factors)
+ {
+ str += factor.backend()->config()->id();
+ str += "(" + model::to_string(factor.layout()) + ")";
+ str += " ";
+ }
+ return "{ " + str + "}";
+ };
+
+ auto operation_index_to_string = [](const model::OperationIndexList &operations) {
+ std::string str;
+ for (auto op : operations.list())
+ {
+ str += std::to_string(op.value());
+ str += " ";
+ }
+ return "{ " + str + "}";
+ };
+
+ const auto lower_info = getLowerInfo(index);
+ const auto &shape = object.shape();
+ const auto &lower_shape = lower_info->shape();
+ std::string def_ops = operation_index_to_string(object.getDef());
+ std::string use_ops = operation_index_to_string(object.getUses());
+ std::string def_layouts = factors_to_string(lower_info->def_factors());
+ std::string use_layouts = factors_to_string(lower_info->use_factors());
+ VERBOSE(Lower) << "* Operand #" << index.value() << " LowerInfo" << std::endl;
+ VERBOSE(Lower) << " - Shape : { " << shape.dim(0) << " "
+ << (shape.rank() > 1 ? shape.dim(1) : 0) << " "
+ << (shape.rank() > 2 ? shape.dim(2) : 0) << " "
+ << (shape.rank() > 3 ? shape.dim(3) : 0) << " "
+ << "}" << std::endl;
+ VERBOSE(Lower) << " - Def Operations : " << def_ops << std::endl;
+ VERBOSE(Lower) << " - Use Operations : " << use_ops << std::endl;
+ VERBOSE(Lower) << " - Lower Info" << std::endl;
+ VERBOSE(Lower) << " - 4D Shape (NHWC) : { " << lower_shape.n() << " " << lower_shape.h()
+ << " " << lower_shape.w() << " " << lower_shape.c() << " "
+ << "}" << std::endl;
+ VERBOSE(Lower) << " - Def Backends : " << def_layouts << std::endl;
+ VERBOSE(Lower) << " - Use Backends : " << use_layouts << std::endl;
+ }
+ });
+ }
+
+ // Run PermutationInsertionPass
+ {
+ pass::PermutationInsertionPass pi_pass(*this);
+ pi_pass.run();
+ // Implemented code no longer works.
+ // pass::PermutationEliminationPass pe_pass(*this);
+ // pe_pass.run();
+
+ // TODO merge perm subgraphs if possible
+ _subgraphs->dump("merged and sorted operations with permutation");
+ }
+
+ // Graph verifications for the LOWERED phase
+ {
+ assert(verifier::DAGChecker().verify(*this));
+ assert(verifier::EdgeConsistencyChecker().verify(*this));
+ }
+}
+
+std::unique_ptr<compiler::Linear> Graph::linearize(void)
+{
+ assert(_phase == Phase::MODEL);
+
+ auto linear = nnfw::cpp14::make_unique<compiler::Linear>(
+ shareModel(), releaseSubgraphs(), releaseLowerInfo(), releaseBackendResolver());
+
+ // TODO Move the operations and operands to linear object
+ return linear;
+}
+
+void Graph::initializeUseDef()
+{
+ operations().iterate(
+ [&](const model::OperationIndex &index, const model::Operation &node) -> void {
+ auto outputs = node.getOutputs();
+ for (auto output : outputs)
+ {
+ operands().at(output).appendDef(index);
+ }
+
+ auto inputs = node.getInputs();
+ for (auto input : inputs)
+ {
+ operands().at(input).appendUse(index);
+ }
+ });
+}
+
+const operation::LowerInfo *Graph::getLowerInfo(const model::SubgraphIndex &subg_index) const
+{
+ if (!_lower_info_map)
+ return nullptr;
+ auto itr = _lower_info_map->operation.find(subg_index);
+ if (itr == _lower_info_map->operation.end())
+ return nullptr;
+ return itr->second.get();
+}
+
+void Graph::setLowerInfo(const model::SubgraphIndex &subg_index,
+ std::unique_ptr<operation::LowerInfo> &&lower_info)
+{
+ assert(_lower_info_map);
+ _lower_info_map->operation.insert(std::make_pair(subg_index, std::move(lower_info)));
+}
+
+const operand::LowerInfo *Graph::getLowerInfo(const model::OperandIndex &index) const
+{
+ if (!_lower_info_map)
+ return nullptr;
+ auto itr = _lower_info_map->operand.find(index);
+ if (itr == _lower_info_map->operand.end())
+ return nullptr;
+ return itr->second.get();
+}
+
+operand::LowerInfo *Graph::getLowerInfo(const model::OperandIndex &index)
+{
+ if (!_lower_info_map)
+ return nullptr;
+ auto itr = _lower_info_map->operand.find(index);
+ if (itr == _lower_info_map->operand.end())
+ return nullptr;
+ return itr->second.get();
+}
+
+void Graph::setLowerInfo(const model::OperandIndex &index,
+ std::unique_ptr<operand::LowerInfo> &&lower_info)
+{
+ assert(_lower_info_map);
+ _lower_info_map->operand.insert(std::make_pair(index, std::move(lower_info)));
+}
+
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+
+// Explicit instantiations to have implementation in the source file.
+
+template class Graph::DefaultIterator<true>;
+template class Graph::DefaultIterator<false>;
+
+template class Graph::PostDfsIterator<true>;
+template class Graph::PostDfsIterator<false>;
+
+//
+// Graph::DefaultIterator
+//
+
+template <bool is_const>
+void Graph::DefaultIterator<is_const>::iterate(GraphRef graph, const IterFn &fn) const
+{
+ graph.operations().iterate(
+ [&](const model::OperationIndex &index, NodeRef node) -> void { fn(index, node); });
+}
+
+//
+// Graph::PostDfsIterator
+//
+
+template <bool is_const>
+void Graph::PostDfsIterator<is_const>::iterate(GraphRef graph, const IterFn &fn) const
+{
+ assert(!graph.isBuildingPhase()); // Restrict iteration condition
+
+ model::OperationIndexMap<bool> visited;
+ graph.operations().iterate(
+ [&](const model::OperationIndex &index, NodeRef) { visited[index] = false; });
+
+ std::function<void(const model::OperationIndex &, NodeRef)> dfs_recursive =
+ [&](const model::OperationIndex &index, NodeRef node) -> void {
+ if (visited[index])
+ return;
+ visited[index] = true;
+
+ for (auto output : node.getOutputs())
+ {
+ const auto &operand = graph.operands().at(output);
+ for (const auto &use : operand.getUses().list())
+ {
+ dfs_recursive(use, graph.operations().at(use));
+ }
+ }
+
+ fn(index, node);
+ };
+
+ graph.operations().iterate(dfs_recursive);
+
+ // All of the operations(nodes) must have been visited.
+ assert(
+ std::all_of(visited.begin(), visited.end(),
+ [](const std::pair<const model::OperationIndex, bool> &v) { return v.second; }));
+}
+
+void Graph::setBackendResolver(std::unique_ptr<compiler::BackendResolver> &&br)
+{
+ _backend_resolver = std::move(br);
+}
+
+std::unique_ptr<compiler::BackendResolver> Graph::releaseBackendResolver()
+{
+ return std::move(_backend_resolver);
+}
+
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/dumper/Dumper.cc b/runtimes/neurun/core/src/graph/dumper/Dumper.cc
new file mode 100644
index 000000000..315e2cebf
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/dumper/Dumper.cc
@@ -0,0 +1,583 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Dumper.h"
+
+#include <string>
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace dumper
+{
+
+using namespace neurun::model::operation;
+
+void Dumper::visit(const AbsNode &node)
+{
+ VERBOSE(LIR) << "* Abs" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(AbsNode::Input::INPUT).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const AddNode &node)
+{
+ VERBOSE(LIR) << "* Add" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(AddNode::Input::LHS).value() << ", "
+ << node.getInputs().at(AddNode::Input::RHS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ArgMaxNode &node)
+{
+ VERBOSE(LIR) << "* ArgMax" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ArgMaxNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const AvgPool2DNode &node)
+{
+ VERBOSE(LIR) << "* AvgPool2D(Implicit)" << std::endl;
+ VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(AvgPool2DNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const CastNode &node)
+{
+ VERBOSE(LIR) << "* Cast" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(CastNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ComparisonNode &node)
+{
+ VERBOSE(LIR) << "* Comparison" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(ComparisonNode::Input::INPUT0).value() << ", "
+ << node.getInputs().at(ComparisonNode::Input::INPUT1).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ConcatNode &node)
+{
+ VERBOSE(LIR) << "* Concat" << std::endl;
+ std::string inputs;
+ for (auto i : node.getInputs())
+ {
+ inputs += std::to_string(i.value()) + ",";
+ }
+ VERBOSE(LIR) << " - Inputs : IFM(" << inputs << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const Conv2DNode &node)
+{
+ std::string padding_type =
+ node.param().padding.type == model::PaddingType::EXPLICIT ? "Explicit" : "Implicit";
+ VERBOSE(LIR) << "* Conv2D(" << padding_type << ")" << std::endl;
+ VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(Conv2DNode::Input::INPUT).value()
+ << ") Kernel(" << node.getInputs().at(Conv2DNode::Input::KERNEL).value() << ") Bias("
+ << node.getInputs().at(Conv2DNode::Input::BIAS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const DepthToSpaceNode &node)
+{
+ VERBOSE(LIR) << "* DepthToSpace" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(DepthToSpaceNode::Input::INPUT).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const DepthwiseConv2DNode &node)
+{
+ std::string padding_type =
+ node.param().padding.type == model::PaddingType::EXPLICIT ? "Explicit" : "Implicit";
+ VERBOSE(LIR) << "* DepthwiseConv2D(" << padding_type << ")" << std::endl;
+ VERBOSE(LIR) << " - Inputs : IFM("
+ << node.getInputs().at(DepthwiseConv2DNode::Input::INPUT).value() << ") Kernel("
+ << node.getInputs().at(DepthwiseConv2DNode::Input::KERNEL).value() << ") Bias("
+ << node.getInputs().at(DepthwiseConv2DNode::Input::BIAS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const DequantizeNode &node)
+{
+ VERBOSE(LIR) << "* Dequantize" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(DequantizeNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const DivNode &node)
+{
+ VERBOSE(LIR) << "* Div" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(DivNode::Input::LHS).value() << ", "
+ << node.getInputs().at(DivNode::Input::RHS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const EmbeddingLookupNode &node)
+{
+ VERBOSE(LIR) << "* EmbeddingLookup" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Lookups("
+ << node.getInputs().at(EmbeddingLookupNode::Input::LOOKUPS).value() << ") VALUES("
+ << node.getInputs().at(EmbeddingLookupNode::Input::VALUES).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ExpNode &node)
+{
+ VERBOSE(LIR) << "* Exp" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ExpNode::Input::INPUT).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const FloorNode &node)
+{
+ VERBOSE(LIR) << "* Floor" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(FloorNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const FullyConnectedNode &node)
+{
+ VERBOSE(LIR) << "* FullyConnected" << std::endl;
+ VERBOSE(LIR) << " - Inputs : IFM("
+ << node.getInputs().at(FullyConnectedNode::Input::INPUT).value() << ") Weight("
+ << node.getInputs().at(FullyConnectedNode::Input::WEIGHT).value() << ") Bias("
+ << node.getInputs().at(FullyConnectedNode::Input::BIAS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const GatherNode &node)
+{
+ VERBOSE(LIR) << "* Gather" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(GatherNode::Input::INPUT).value()
+ << ") Indices(" << node.getInputs().at(GatherNode::Input::INDICES).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const HashtableLookupNode &node)
+{
+ VERBOSE(LIR) << "* HashTableLookup" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Lookups("
+ << node.getInputs().at(HashtableLookupNode::Input::LOOKUPS).value() << ") Keys("
+ << node.getInputs().at(HashtableLookupNode::Input::KEYS).value() << ") Values("
+ << node.getInputs().at(HashtableLookupNode::Input::VALUES).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Outputs : Output("
+ << node.getInputs().at(HashtableLookupNode::Output::OUTPUT).value() << ") Hits("
+ << node.getInputs().at(HashtableLookupNode::Output::HITS).value() << ")"
+ << std::endl;
+}
+
+void Dumper::visit(const L2NormalizationNode &node)
+{
+ VERBOSE(LIR) << "* L2Normalization" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(L2NormalizationNode::Input::INPUT).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const L2Pool2DNode &node)
+{
+ VERBOSE(LIR) << "* L2Pool2D" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(L2Pool2DNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const LocalResponseNormalizationNode &node)
+{
+ VERBOSE(LIR) << "* LocalResponseNormalization" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(LocalResponseNormalizationNode::Input::INPUT).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const LSTMNode &node)
+{
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(LSTMNode::Input::INPUT).value()
+ << ") Input To Input Weights("
+ << node.getInputs().at(LSTMNode::Input::INPUT_TO_INPUT_WEIGHTS).value()
+ << ") Input To Forget Weights("
+ << node.getInputs().at(LSTMNode::Input::INPUT_TO_FORGET_WEIGHTS).value()
+ << ") Input To Cell Weights("
+ << node.getInputs().at(LSTMNode::Input::INPUT_TO_CELL_WEIGHTS).value()
+ << ") Input To Output Weights("
+ << node.getInputs().at(LSTMNode::Input::INPUT_TO_OUTPUT_WEIGHTS).value()
+ << ") Recurrent To Input Weights("
+ << node.getInputs().at(LSTMNode::Input::RECURRENT_TO_INPUT_WEIGHTS).value()
+ << ") Recurrent To Forget Weights("
+ << node.getInputs().at(LSTMNode::Input::RECURRENT_TO_FORGET_WEIGHTS).value()
+ << ") Recurrent To Cell Weights("
+ << node.getInputs().at(LSTMNode::Input::RECURRENT_TO_CELL_WEIGHTS).value()
+ << ") Recurrent To Output Weights("
+ << node.getInputs().at(LSTMNode::Input::RECURRENT_TO_OUTPUT_WEIGHTS).value()
+ << ") Cell To Input Weights("
+ << node.getInputs().at(LSTMNode::Input::CELL_TO_INPUT_WEIGHTS).value()
+ << ") Cell To Forget Weights("
+ << node.getInputs().at(LSTMNode::Input::CELL_TO_FORGET_WEIGHTS).value()
+ << ") Cell To OUTPUT Weights("
+ << node.getInputs().at(LSTMNode::Input::CELL_TO_OUTPUT_WEIGHTS).value()
+ << ") Input Gate Bias("
+ << node.getInputs().at(LSTMNode::Input::INPUT_GATE_BIAS).value()
+ << ") Forget Gate Bias("
+ << node.getInputs().at(LSTMNode::Input::FORGET_GATE_BIAS).value() << ") Cell Bias("
+ << node.getInputs().at(LSTMNode::Input::CELL_BIAS).value() << ") Output Gate Bias("
+ << node.getInputs().at(LSTMNode::Input::OUTPUT_GATE_BIAS).value()
+ << ") Projection Weights("
+ << node.getInputs().at(LSTMNode::Input::PROJECTION_WEIGHTS).value()
+ << ") Projection Bias("
+ << node.getInputs().at(LSTMNode::Input::PROJECTION_BIAS).value()
+ << ") Output State In("
+ << node.getInputs().at(LSTMNode::Input::OUTPUT_STATE_IN).value()
+ << ") Cell State In(" << node.getInputs().at(LSTMNode::Input::CELL_STATE_IN).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Scratch Buffer("
+ << node.getOutputs().at(LSTMNode::Output::SCRATCH_BUFFER).value()
+ << ") Output State Out("
+ << node.getInputs().at(LSTMNode::Output::OUTPUT_STATE_OUT).value()
+ << ") Cell State Out("
+ << node.getInputs().at(LSTMNode::Output::CELL_STATE_OUT).value() << ") Output("
+ << node.getInputs().at(LSTMNode::Output::OUTPUT).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const LogicalAndNode &node)
+{
+ VERBOSE(LIR) << "* LogicalAnd" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(LogicalAndNode::Input::INPUT0).value() << ", "
+ << node.getInputs().at(LogicalAndNode::Input::INPUT1).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const LogicalNotNode &node)
+{
+ VERBOSE(LIR) << "* LogicalNot" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(LogicalNotNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const LogicalOrNode &node)
+{
+ VERBOSE(LIR) << "* LogicalOr" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(LogicalOrNode::Input::INPUT0).value()
+ << ", " << node.getInputs().at(LogicalOrNode::Input::INPUT1).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const LogisticNode &node)
+{
+ VERBOSE(LIR) << "* Logistic" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(LogisticNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const MaxPool2DNode &node)
+{
+ std::string padding_type =
+ node.param().padding.type == model::PaddingType::EXPLICIT ? "Explicit" : "Implicit";
+ VERBOSE(LIR) << "* MaxPool2D(" << padding_type << ")" << std::endl;
+ VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(MaxPool2DNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const MeanNode &node)
+{
+ VERBOSE(LIR) << "* Mean" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(MeanNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const MulNode &node)
+{
+ VERBOSE(LIR) << "* Mul" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(MulNode::Input::LHS).value() << ", "
+ << node.getInputs().at(MulNode::Input::RHS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const NegNode &node)
+{
+ VERBOSE(LIR) << "* Neg" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(NegNode::Input::INPUT).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const PermuteNode &node)
+{
+ std::string permute_type = "Unknown";
+ switch (node.getPermuteType())
+ {
+ case PermuteNode::Type::COPY:
+ permute_type = "Copy";
+ break;
+ case PermuteNode::Type::NHWC_TO_NCHW:
+ permute_type = "NHWC to NCHW";
+ break;
+ case PermuteNode::Type::NCHW_TO_NHWC:
+ permute_type = "NCHW to NHWC";
+ break;
+ }
+
+ VERBOSE(LIR) << "* Permute(" + permute_type + ")" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(0).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const PReLUNode &node)
+{
+ VERBOSE(LIR) << "* PReLU" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(PReLUNode::Input::INPUT).value()
+ << ") Alpha(" << node.getInputs().at(PReLUNode::Input::ALPHA).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReduceMaxNode &node)
+{
+ VERBOSE(LIR) << "* ReduceMax" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReduceMaxNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReduceMinNode &node)
+{
+ VERBOSE(LIR) << "* ReduceMin" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReduceMinNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReduceSumNode &node)
+{
+ VERBOSE(LIR) << "* ReduceSum" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReduceSumNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReLUNode &node)
+{
+ VERBOSE(LIR) << "* ReLU" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReLUNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReLU1Node &node)
+{
+ VERBOSE(LIR) << "* ReLU1" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReLU1Node::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReLU6Node &node)
+{
+ VERBOSE(LIR) << "* ReLU6" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReLU6Node::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ReshapeNode &node)
+{
+ VERBOSE(LIR) << "* Reshape" << std::endl;
+ // TODO The shape index should be "node.getInputs().at(1).value()" but not valid for now
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(ReshapeNode::Input::INPUT).value()
+ << ") Shape("
+ << "?"
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const ResizeBilinearNode &node)
+{
+ VERBOSE(LIR) << "* ResizeBilinear" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(ResizeBilinearNode::Input::INPUT).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const RNNNode &node)
+{
+ VERBOSE(LIR) << "* RNN" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(RNNNode::Input::INPUT).value()
+ << ") Weights" << node.getInputs().at(RNNNode::Input::WEIGHTS).value()
+ << ") Recurrent Weights"
+ << node.getInputs().at(RNNNode::Input::RECURRENT_WEIGHTS).value() << ") Bias"
+ << node.getInputs().at(RNNNode::Input::BIAS).value() << ") Hidden State"
+ << node.getInputs().at(RNNNode::Input::HIDDEN_STATE_IN).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(RNNNode::Output::OUTPUT).value()
+ << ") Hidden State" << node.getInputs().at(RNNNode::Output::HIDDEN_STATE_OUT).value()
+ << ")" << std::endl;
+}
+
+void Dumper::visit(const RSQRTNode &node)
+{
+ VERBOSE(LIR) << "* RSQRT" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(RSQRTNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SoftmaxNode &node)
+{
+ VERBOSE(LIR) << "* Softmax" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(SoftmaxNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SpaceToDepthNode &node)
+{
+ VERBOSE(LIR) << "* SpaceToDepth" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(SpaceToDepthNode::Input::INPUT).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SplitNode &node)
+{
+ VERBOSE(LIR) << "* Split" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(SplitNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SQRTNode &node)
+{
+ VERBOSE(LIR) << "* SQRT" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(SQRTNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SquaredDifferenceNode &node)
+{
+ VERBOSE(LIR) << "* SquaredDifference" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(SquaredDifferenceNode::Input::LHS).value() << ", "
+ << node.getInputs().at(SquaredDifferenceNode::Input::RHS).value() << ")"
+ << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SqueezeNode &node)
+{
+ VERBOSE(LIR) << "* Squeeze" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(SqueezeNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const StridedSliceNode &node)
+{
+ VERBOSE(LIR) << "* StridedSlice" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input("
+ << node.getInputs().at(StridedSliceNode::Input::INPUT).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const SubNode &node)
+{
+ VERBOSE(LIR) << "* Sub" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(SubNode::Input::LHS).value() << ", "
+ << node.getInputs().at(SubNode::Input::RHS).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const TanhNode &node)
+{
+ VERBOSE(LIR) << "* TanH" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(TanhNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const TopKV2Node &node)
+{
+ VERBOSE(LIR) << "* TopKV2" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(TopKV2Node::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Outputs : Values("
+ << node.getOutputs().at(TopKV2Node::Output::OUTPUT_VALUES).value() << ") Indices("
+ << node.getOutputs().at(TopKV2Node::Output::OUTPUT_INDICES).value() << ")"
+ << std::endl;
+}
+
+void Dumper::visit(const TransposeConvNode &node)
+{
+ std::string padding_type =
+ node.param().padding.type == model::PaddingType::EXPLICIT ? "Explicit" : "Implicit";
+ VERBOSE(LIR) << "* TransposeConv(" << padding_type << ")" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Output Shape("
+ << node.getInputs().at(TransposeConvNode::Input::OUTPUT_SHAPE).value() << ") KERNEL("
+ << node.getInputs().at(TransposeConvNode::Input::KERNEL).value() << ") IFM("
+ << node.getInputs().at(TransposeConvNode::Input::INPUT).value() << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const TransposeNode &node)
+{
+ VERBOSE(LIR) << "* Transpose" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(TransposeNode::Input::INPUT).value()
+ << ")" << std::endl;
+ VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
+}
+
+void Dumper::visit(const model::operation::UnpackNode &node)
+{
+ VERBOSE(LIR) << "* Unpack" << std::endl;
+ VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(UnpackNode::Input::INPUT).value()
+ << ")" << std::endl;
+ std::string outputs;
+ const auto &output_indices = node.getOutputs();
+ for (auto it = std::begin(output_indices); it != std::end(output_indices); ++it)
+ {
+ outputs += std::to_string(it->value());
+ if (std::next(it) != std::end(output_indices))
+ outputs += ", ";
+ }
+ VERBOSE(LIR) << " - Outputs : Outputs(" << outputs << ")" << std::endl;
+}
+
+} // namespace dumper
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/dumper/Dumper.h b/runtimes/neurun/core/src/graph/dumper/Dumper.h
new file mode 100644
index 000000000..882108ad7
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/dumper/Dumper.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_DUMPER_H__
+#define __NEURUN_GRAPH_DUMPER_H__
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace dumper
+{
+
+class Dumper : public model::OperationVisitor
+{
+public:
+ Dumper() = default;
+
+public:
+ void visit(const model::operation::AbsNode &) override;
+ void visit(const model::operation::AddNode &node) override;
+ void visit(const model::operation::ArgMaxNode &) override;
+ void visit(const model::operation::AvgPool2DNode &node) override;
+ void visit(const model::operation::CastNode &) override;
+ void visit(const model::operation::ComparisonNode &) override;
+ void visit(const model::operation::ConcatNode &node) override;
+ void visit(const model::operation::Conv2DNode &node) override;
+ void visit(const model::operation::DepthToSpaceNode &) override;
+ void visit(const model::operation::DepthwiseConv2DNode &node) override;
+ void visit(const model::operation::DequantizeNode &) override;
+ void visit(const model::operation::DivNode &) override;
+ void visit(const model::operation::EmbeddingLookupNode &) override;
+ void visit(const model::operation::ExpNode &) override;
+ void visit(const model::operation::FloorNode &) override;
+ void visit(const model::operation::FullyConnectedNode &node) override;
+ void visit(const model::operation::GatherNode &) override;
+ void visit(const model::operation::HashtableLookupNode &) override;
+ void visit(const model::operation::L2NormalizationNode &) override;
+ void visit(const model::operation::L2Pool2DNode &) override;
+ void visit(const model::operation::LocalResponseNormalizationNode &) override;
+ void visit(const model::operation::LogicalAndNode &) override;
+ void visit(const model::operation::LogicalNotNode &) override;
+ void visit(const model::operation::LogicalOrNode &) override;
+ void visit(const model::operation::LogisticNode &) override;
+ void visit(const model::operation::LSTMNode &) override;
+ void visit(const model::operation::MaxPool2DNode &node) override;
+ void visit(const model::operation::MeanNode &) override;
+ void visit(const model::operation::MulNode &) override;
+ void visit(const model::operation::NegNode &) override;
+ void visit(const model::operation::PermuteNode &node) override;
+ void visit(const model::operation::PReLUNode &) override;
+ void visit(const model::operation::ReduceMaxNode &) override;
+ void visit(const model::operation::ReduceMinNode &) override;
+ void visit(const model::operation::ReduceSumNode &) override;
+ void visit(const model::operation::ReLUNode &) override;
+ void visit(const model::operation::ReLU1Node &) override;
+ void visit(const model::operation::ReLU6Node &) override;
+ void visit(const model::operation::ReshapeNode &node) override;
+ void visit(const model::operation::ResizeBilinearNode &) override;
+ void visit(const model::operation::RNNNode &) override;
+ void visit(const model::operation::RSQRTNode &) override;
+ void visit(const model::operation::SoftmaxNode &node) override;
+ void visit(const model::operation::SpaceToDepthNode &) override;
+ void visit(const model::operation::SplitNode &) override;
+ void visit(const model::operation::SQRTNode &) override;
+ void visit(const model::operation::SquaredDifferenceNode &) override;
+ void visit(const model::operation::SqueezeNode &) override;
+ void visit(const model::operation::StridedSliceNode &) override;
+ void visit(const model::operation::SubNode &) override;
+ void visit(const model::operation::TanhNode &) override;
+ void visit(const model::operation::TopKV2Node &) override;
+ void visit(const model::operation::TransposeConvNode &) override;
+ void visit(const model::operation::TransposeNode &) override;
+ void visit(const model::operation::UnpackNode &) override;
+};
+
+} // namespace dumper
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_DUMPER_H__
diff --git a/runtimes/neurun/core/src/graph/operand/LowerInfo.cc b/runtimes/neurun/core/src/graph/operand/LowerInfo.cc
new file mode 100644
index 000000000..e8a4fe553
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/operand/LowerInfo.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "graph/operand/LowerInfo.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operand
+{
+
+// NO IMPLEMENTATION YET
+
+} // namespace operand
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/operand/Shape4DConvert.h b/runtimes/neurun/core/src/graph/operand/Shape4DConvert.h
new file mode 100644
index 000000000..9b8d44e1f
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/operand/Shape4DConvert.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_OPERAND_SHAPE4D_CONVERT_H__
+#define __NEURUN_GRAPH_OPERAND_SHAPE4D_CONVERT_H__
+
+#include "graph/operand/LowerInfo.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operand
+{
+
+inline LowerInfo::Shape4D asShape4D(const model::Shape &shape)
+{
+ switch (shape.rank())
+ {
+ case 0u:
+ return LowerInfo::Shape4D(1, 1, 1, 1);
+
+ case 1u:
+ return LowerInfo::Shape4D(1, 1, 1, shape.dim(0));
+
+ case 2u:
+ return LowerInfo::Shape4D(1, 1, shape.dim(0), shape.dim(1));
+
+ case 3u:
+ return LowerInfo::Shape4D(1, shape.dim(0), shape.dim(1), shape.dim(2));
+
+ case 4u:
+ return LowerInfo::Shape4D(shape.dim(0), shape.dim(1), shape.dim(2), shape.dim(3));
+
+ default:
+ throw "Unsupported rank > 4";
+ }
+}
+
+} // namespace operand
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_OPERAND_SHAPE4D_CONVERT_H__
diff --git a/runtimes/neurun/core/src/graph/operation/LowerInfo.cc b/runtimes/neurun/core/src/graph/operation/LowerInfo.cc
new file mode 100644
index 000000000..507dcc7d0
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/operation/LowerInfo.cc
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "graph/operation/LowerInfo.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace operation
+{
+
+LowerInfo::LowerInfo(const backend::Backend *backend, model::Layout layout)
+ : _permute_factor{backend, layout}
+{
+ // DO NOTHING
+}
+
+} // namespace operation
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/pass/OperandPass.cc b/runtimes/neurun/core/src/graph/pass/OperandPass.cc
new file mode 100644
index 000000000..237833cf4
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/OperandPass.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperandPass.h"
+
+#include "graph/Graph.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+void OperandPass::run()
+{
+ _graph.operands().iterate(
+ [&](const model::OperandIndex &index, model::Operand &object) { callback(index, object); });
+}
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/pass/OperandPass.h b/runtimes/neurun/core/src/graph/pass/OperandPass.h
new file mode 100644
index 000000000..4b25929c5
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/OperandPass.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_PASS_OPERAND_PASS_H__
+#define __NEURUN_GRAPH_PASS_OPERAND_PASS_H__
+
+#include "Pass.h"
+#include "model/Index.h"
+
+namespace neurun
+{
+namespace model
+{
+class Operand;
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+class OperandPass : public Pass
+{
+public:
+ using Pass::Pass;
+
+public:
+ std::string id() override = 0;
+ void run() override final;
+ virtual void callback(const model::OperandIndex &i, model::Operand &o) = 0;
+};
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_PASS_OPERAND_PASS_H__
diff --git a/runtimes/neurun/core/src/graph/pass/OperationPass.cc b/runtimes/neurun/core/src/graph/pass/OperationPass.cc
new file mode 100644
index 000000000..8e6709873
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/OperationPass.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperationPass.h"
+
+#include "graph/Graph.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+void OperationPass::run()
+{
+ _graph.operations().iterate(
+ [&](const model::OperationIndex &index, model::Operation &node) { callback(index, node); });
+}
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/pass/OperationPass.h b/runtimes/neurun/core/src/graph/pass/OperationPass.h
new file mode 100644
index 000000000..ac6a85345
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/OperationPass.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file OperationPass.h
+ * @brief This file contains OperationPass class
+ */
+
+#ifndef __NEURUN_GRAPH_PASS_OPERATION_PASS_H__
+#define __NEURUN_GRAPH_PASS_OPERATION_PASS_H__
+
+#include "Pass.h"
+
+#include "model/Index.h"
+#include "model/Operation.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+/**
+ * @brief Class to iterate over operations and calls callback() method
+ */
+class OperationPass : public Pass
+{
+public:
+ using Pass::Pass;
+
+public:
+ /**
+ * @brief Returns string id for this pass. Same with class name.
+ *
+ * @return string id
+ */
+ std::string id() override = 0;
+
+ /**
+ * @brief Run the pass
+ */
+ void run() override final;
+
+ /**
+ * @brief The function that will be executed for each operations
+ *
+ * @param i[in] Index of the operation node
+ * @param n[in] The operation node
+ */
+ virtual void callback(const model::OperationIndex &i, model::Operation &n) = 0;
+};
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_PASS_OPERATION_PASS_H__
diff --git a/runtimes/neurun/src/graph/pass/Pass.cc b/runtimes/neurun/core/src/graph/pass/Pass.cc
index 4c3436961..4c3436961 100644
--- a/runtimes/neurun/src/graph/pass/Pass.cc
+++ b/runtimes/neurun/core/src/graph/pass/Pass.cc
diff --git a/runtimes/neurun/src/graph/pass/Pass.h b/runtimes/neurun/core/src/graph/pass/Pass.h
index 4200936d1..4200936d1 100644
--- a/runtimes/neurun/src/graph/pass/Pass.h
+++ b/runtimes/neurun/core/src/graph/pass/Pass.h
diff --git a/runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.cc b/runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.cc
new file mode 100644
index 000000000..1fc9b69cf
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.cc
@@ -0,0 +1,199 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PermutationEliminationPass.h"
+
+#include "model/Operand.h"
+#include "graph/operand/LowerInfo.h"
+#include "graph/Graph.h"
+#include "backend/IConfig.h"
+#include "util/logging.h"
+#include "compiler/BackendResolver.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+void PermutationEliminationPass::callback(const model::OperandIndex &inp_index,
+ model::Operand &object)
+{
+ if (_graph.getInputs().contains(inp_index))
+ {
+ eliminateInput(inp_index, object);
+ }
+ else if (_graph.getOutputs().contains(inp_index))
+ {
+ eliminateOutput(inp_index, object);
+ }
+}
+
+void PermutationEliminationPass::eliminateInput(const model::OperandIndex &inp_index,
+ model::Operand &object)
+{
+ auto &model_inputs = _graph.getInputs();
+
+ // get uses of the model's given input
+ auto uses = object.getUses();
+
+ // input must be used just by permutation
+ if (uses.size() != 1)
+ {
+ return;
+ }
+
+ for (auto input_use : uses.list())
+ {
+ auto &perm_operation = _graph.operations().at(input_use);
+ auto perm_inputs = perm_operation.getInputs();
+
+ auto perm_outputs = perm_operation.getOutputs();
+
+ if (!isPermuteLayerToEliminate(perm_inputs, perm_outputs, true))
+ {
+ return;
+ }
+
+ assert(perm_inputs.at(0) == inp_index);
+
+ VERBOSE(PermutationEliminationPass::EliminateInput) << "remove NHWC_TO_NCHW permutation\n";
+
+ // set model's new input, which was output of permutation
+ model_inputs.replace(inp_index, perm_outputs.at(0));
+
+ // remove model's input, which is also input of permutation
+ _graph.removeOperand(inp_index);
+
+ // remove permutation operation
+ assert(_graph.subgraphs().containsOperation(input_use));
+ auto subg_idx = _graph.subgraphs().getOperation(input_use);
+ _graph.subgraphs().remove(subg_idx);
+ _graph.operations().remove(input_use);
+
+ VERBOSE(PermutationEliminationPass::EliminateInput)
+ << inp_index.value() << " is model's input and is removed. New input is "
+ << perm_outputs.at(0).value() << "\n"
+ << input_use.value() << " is removed permutation operation\n";
+ }
+}
+
+void PermutationEliminationPass::eliminateOutput(const model::OperandIndex &out_index,
+ model::Operand &object)
+{
+ auto &model_outputs = _graph.getOutputs();
+
+ // get defs of the model's given output
+ auto defs = object.getDef();
+
+ // output must use just permutation
+ if (defs.size() != 1)
+ {
+ return;
+ }
+
+ for (auto output_def : defs.list())
+ {
+ auto &perm_operation = _graph.operations().at(output_def);
+ auto perm_outputs = perm_operation.getOutputs();
+
+ auto perm_inputs = perm_operation.getInputs();
+ if (!isPermuteLayerToEliminate(perm_inputs, perm_outputs, false))
+ {
+ return;
+ }
+
+ assert(perm_outputs.at(0) == out_index);
+
+ VERBOSE(PermutationEliminationPass::EliminateOutput) << "remove NCHW_TO_NHWC permutation\n";
+
+ // Update operations' output that is used by permute operand
+ for (auto perm_input_index : perm_inputs)
+ {
+ auto &perm_input_operand = _graph.operands().at(perm_input_index);
+ perm_input_operand.removeUse(output_def);
+ }
+
+ // set model's new output, which was input of permutation
+ model_outputs.replace(out_index, perm_inputs.at(0));
+
+ // remove model's output, which is also output of permutation
+ _graph.removeOperand(out_index);
+
+ // remove permutation operation
+ assert(_graph.subgraphs().containsOperation(output_def));
+ auto subg_idx = _graph.subgraphs().getOperation(output_def);
+ _graph.subgraphs().remove(subg_idx);
+ _graph.operations().remove(output_def);
+
+ VERBOSE(PermutationEliminationPass::EliminateOutput)
+ << out_index.value() << " is model's output and is removed. New output is "
+ << perm_inputs.at(0).value() << "\n"
+ << output_def.value() << " is removed permutation operation\n";
+ }
+}
+
+bool PermutationEliminationPass::isPermuteLayerToEliminate(
+ const model::OperandIndexSequence &inp_indexes, const model::OperandIndexSequence &out_indexes,
+ bool is_for_model_input)
+{
+ auto input_def_factors = _graph.getLowerInfo(inp_indexes.at(0))->def_factors();
+ auto output_def_factors = _graph.getLowerInfo(out_indexes.at(0))->def_factors();
+
+ auto input_layout = input_def_factors.getOnlyElement().layout();
+ auto output_layout = output_def_factors.getOnlyElement().layout();
+
+ if (input_def_factors.size() != 1 || output_def_factors.size() != 1)
+ {
+ return false;
+ }
+
+ // all operands' factor must be the same
+ for (auto index : inp_indexes)
+ {
+ auto op_factor_set = _graph.getLowerInfo(index)->def_factors();
+ if (op_factor_set.size() != 1 ||
+ input_layout != _graph.getLowerInfo(index)->def_factors().getOnlyElement().layout())
+ {
+ return false;
+ }
+ }
+ // all operands' factor must be the same
+ for (auto index : out_indexes)
+ {
+ auto op_factor_set = _graph.getLowerInfo(index)->def_factors();
+ if (op_factor_set.size() != 1 ||
+ output_layout != _graph.getLowerInfo(index)->def_factors().getOnlyElement().layout())
+ {
+ return false;
+ }
+ }
+
+ if (is_for_model_input)
+ {
+ // check if this is NHWC_TO_NCHW permutation: must have single input, which is model's input
+ return (inp_indexes.size() == 1 && input_layout == model::Layout::NHWC &&
+ output_layout == model::Layout::NCHW);
+ }
+
+ // check if this is NCHW_TO_NHWC permutation: must have single output, which is model's output
+ return (out_indexes.size() == 1 && input_layout == model::Layout::NCHW &&
+ output_layout == model::Layout::NHWC);
+}
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.h b/runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.h
new file mode 100644
index 000000000..332eeb6f4
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/PermutationEliminationPass.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_PASS_PERMUTATION_ELIMINATION_PASS_H__
+#define __NEURUN_GRAPH_PASS_PERMUTATION_ELIMINATION_PASS_H__
+
+#include "OperandPass.h"
+#include "model/Operand.h"
+#include "model/OperandIndexSequence.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+class PermutationEliminationPass : public OperandPass
+{
+public:
+ using OperandPass::OperandPass;
+
+public:
+ std::string id() override { return "PermutationEliminationPass"; }
+
+ void callback(const model::OperandIndex &index, model::Operand &object) override;
+
+private:
+ /**
+ * @brief Remove Permute operation that permutates input
+ *
+ * Note: This function aslo removes model's input and
+ * sets output of permutation as model's new input
+ *
+ * @param inp_index is the target operand index for the elimination
+ * @param object is the target operand object for the elimination
+ *
+ * @return
+ */
+ void eliminateInput(const model::OperandIndex &inp_index, model::Operand &object);
+
+ /**
+ * @brief Remove Permute operation that permutates output of a model
+ *
+ * Note: This function aslo removes model's output and
+ * sets input of permutation as model's new output
+ *
+ * @param out_index is the target operand index for the elimination
+ * @param object is the target operand object for the elimination
+ *
+ * @return
+ */
+ void eliminateOutput(const model::OperandIndex &out_index, model::Operand &object);
+
+ /**
+ * @brief Determine if passed operands are permute layer's input and output, that must be
+ * eliminated
+ *
+ * @param inp_index indexes of the input operand to operation
+ * @param out_index indexes of the output operand to operation
+ * @param is_for_model_input checking for model's input or output
+ *
+ * @return if it is permutation layer
+ */
+ bool isPermuteLayerToEliminate(const model::OperandIndexSequence &inp_indexes,
+ const model::OperandIndexSequence &out_indexes,
+ bool is_for_model_input);
+};
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_PASS_PERMUTATION_ELIMINATION_PASS_H__
diff --git a/runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.cc b/runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.cc
new file mode 100644
index 000000000..0f07b47fe
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.cc
@@ -0,0 +1,210 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PermutationInsertionPass.h"
+
+#include <cassert>
+#include <utility>
+#include <unordered_map>
+
+#include "model/Operand.h"
+#include "graph/operation/LowerInfo.h"
+#include "graph/Graph.h"
+#include "backend/IConfig.h"
+#include "util/logging.h"
+#include "cpp14/memory.h"
+#include "model/operation/PermuteNode.h"
+#include "graph/operand/Shape4DConvert.h"
+#include "compiler/BackendResolver.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+void PermutationInsertionPass::callback(const model::OperandIndex &index, model::Operand &object)
+{
+ auto &&operand_li = _graph.getLowerInfo(index);
+ assert(operand_li);
+
+ // NOTE Later, constants also will have Def
+ // Ignore constants
+ if (operand_li->def_factors().size() == 0)
+ {
+ return;
+ }
+
+ std::list<model::OperationIndex> permute_indexes;
+
+ // Build a map for all necessary type of operands
+ std::unordered_map<operand::PermuteFactor, model::OperandIndex> factor_to_index;
+ {
+ assert(operand_li->def_factors().size() == 1);
+ for (auto factor : operand_li->def_factors())
+ {
+ factor_to_index.emplace(factor, index);
+ }
+
+ auto insert_set = operand_li->use_factors() - operand_li->def_factors();
+ for (auto factor : insert_set)
+ {
+ const auto permute_operation_index = insertPermute(index, factor);
+ permute_indexes.push_back(permute_operation_index);
+ VERBOSE(PermutationInsertionPass) << "Insert 'Permute' operation for operand "
+ << index.value() << std::endl;
+ const auto &permute_operation = _graph.operations().at(permute_operation_index);
+ const auto permuted_operand_index = permute_operation.getOutputs().at(0);
+ factor_to_index.emplace(factor, permuted_operand_index);
+ }
+ }
+
+ // Update operations' input that uses this operand
+ {
+ std::list<model::OperationIndex> remove_list;
+
+ auto uses = object.getUses();
+ for (auto use : uses.list())
+ {
+ // If permute operation, ignore it
+ if (std::find(permute_indexes.begin(), permute_indexes.end(), use) != permute_indexes.end())
+ continue;
+
+ auto &operation = _graph.operations().at(use);
+ assert(_graph.subgraphs().containsOperation(use));
+ auto subg_index = _graph.subgraphs().getOperation(use);
+ auto subg_li = _graph.getLowerInfo(subg_index);
+ assert(subg_li);
+ const auto subg_layout = subg_li->layout();
+ const backend::Backend *backend = subg_li->backend();
+ assert(backend);
+ auto use_node_inputs = operation.getInputs();
+ assert(use_node_inputs.contains(index));
+
+ auto new_index = factor_to_index.at({backend, subg_layout});
+ if (index != new_index)
+ {
+ // Update from subgraph
+ _graph.subgraphs().at(subg_index).replaceInput(index, new_index);
+
+ // Update from operation
+ operation.replaceInput(index, new_index);
+
+ // Update from operand
+ remove_list.push_back(
+ use); // Removal should be done in another loop since we are in the loop
+ _graph.operands().at(new_index).appendUse(use);
+ }
+ }
+
+ for (auto &operation : remove_list)
+ {
+ object.removeUse(operation);
+ }
+ }
+}
+
+model::OperationIndex
+PermutationInsertionPass::insertPermute(const model::OperandIndex &operand_index,
+ const operand::PermuteFactor &factor)
+{
+ assert(!_graph.isBuildingPhase());
+
+ auto &operand = _graph.operands().at(operand_index);
+
+ // Generate output operand and permute operation
+ auto out_operand_index = _graph.addOperand(operand.shape(), operand.typeInfo());
+ // change model output if operand_index is model output index
+ auto &model_outputs = _graph.getOutputs();
+ if (model_outputs.contains(operand_index))
+ {
+ model_outputs.replace(operand_index, out_operand_index);
+ }
+
+ // Find PermuteNode information
+ auto input_backend = _graph.getLowerInfo(operand_index)->def_factors().getOnlyElement().backend();
+ auto output_backend = factor.backend();
+ // NOTE PermuteNode may not have specific layout because the layout of input and output may be
+ // different.
+ const auto permute_node_layout = model::Layout::UNKNOWN;
+ const auto permute_node_backend = backend::BackendManager::instance().getDefault();
+ const operand::PermuteFactor permute_node_factor{permute_node_backend, permute_node_layout};
+
+ // Update LowerInfo of input operand
+ auto operand_lower_info = _graph.getLowerInfo(operand_index);
+ operand_lower_info->removeUsePermuteFactor(factor);
+ operand_lower_info->addUsePermuteFactor(permute_node_factor);
+
+ // Update LowerInfo of output operand
+ auto out_operand_li =
+ nnfw::cpp14::make_unique<operand::LowerInfo>(operand::asShape4D(operand.shape()));
+
+ // The input and output factors of all nodes will be the same except PermuteNode. So Tensor's
+ // allocators allocates memory using only the information of def permutation factor now.
+ // TODO Change param to permute_node_factor
+ out_operand_li->addDefPermuteFactor(factor);
+ out_operand_li->addUsePermuteFactor(factor);
+ _graph.setLowerInfo(out_operand_index, std::move(out_operand_li));
+
+ auto input_backend_ctx = _graph.backend_resolver()->getBackendContext(input_backend);
+ auto output_backend_ctx = _graph.backend_resolver()->getBackendContext(output_backend);
+
+ // Insert permute operation to the graph
+ const auto input_layout =
+ _graph.getLowerInfo(operand_index)->def_factors().getOnlyElement().layout();
+ const auto output_layout = factor.layout();
+ using PermuteNode = model::operation::PermuteNode;
+ const auto permute_type = [&]() {
+ if (input_layout == model::Layout::NHWC && output_layout == model::Layout::NCHW)
+ {
+ return PermuteNode::Type::NHWC_TO_NCHW;
+ }
+ else if (input_layout == model::Layout::NCHW && output_layout == model::Layout::NHWC)
+ {
+ return PermuteNode::Type::NCHW_TO_NHWC;
+ }
+ else
+ {
+ return PermuteNode::Type::COPY;
+ }
+ }();
+ auto insert_node = nnfw::cpp14::make_unique<PermuteNode>(
+ operand_index, out_operand_index, input_backend_ctx, output_backend_ctx, permute_type);
+
+ auto node_index = _graph.operations().push(std::move(insert_node));
+ const auto &node = _graph.operations().at(node_index);
+
+ // Subgraph
+ {
+ auto subg_index = _graph.subgraphs().emplace(node_index, node, permute_node_layout);
+ auto &subg = _graph.subgraphs().at(subg_index);
+ subg.setInputs(node.getInputs());
+ subg.setOutputs(node.getOutputs());
+ _graph.setLowerInfo(subg_index, nnfw::cpp14::make_unique<graph::operation::LowerInfo>(
+ permute_node_backend, permute_node_layout));
+ }
+
+ // Update Use/Def info
+ {
+ _graph.operands().at(operand_index).appendUse(node_index);
+ _graph.operands().at(out_operand_index).appendDef(node_index);
+ }
+ return node_index;
+}
+} // namespace pass
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.h b/runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.h
new file mode 100644
index 000000000..b430be8b3
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/pass/PermutationInsertionPass.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_PASS_PERMUTATION_INSERTION_PASS_H__
+#define __NEURUN_GRAPH_PASS_PERMUTATION_INSERTION_PASS_H__
+
+#include "OperandPass.h"
+#include "model/Operand.h" //for model::OperationIndex
+#include "backend/BackendManager.h"
+#include "graph/operand/PermuteFactor.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace pass
+{
+
+class PermutationInsertionPass : public OperandPass
+{
+public:
+ using OperandPass::OperandPass;
+
+public:
+ std::string id() override { return "PermutationInsertionPass"; }
+ void callback(const model::OperandIndex &index, model::Operand &object) override;
+
+ /**
+ * @brief Insert Permute operation that has given operand as input
+ *
+ * @param operand_index is the target operand index for the insertion
+ * @param factor is the output operand's backend type and layout
+ *
+ * @return model::OperationIndex
+ */
+ model::OperationIndex insertPermute(const model::OperandIndex &operand_index,
+ const operand::PermuteFactor &factor);
+
+private:
+};
+
+} // namespace pass
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_PASS_PERMUTATION_INSERTION_PASS_H__
diff --git a/runtimes/neurun/core/src/graph/verifier/Verifier.cc b/runtimes/neurun/core/src/graph/verifier/Verifier.cc
new file mode 100644
index 000000000..46e2ead55
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/verifier/Verifier.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Verifier.h"
+
+#include "graph/Graph.h"
+#include "model/OperationIndexMap.h"
+
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace graph
+{
+namespace verifier
+{
+
+//
+// DAGChecker
+//
+
+bool DAGChecker::verify(const Graph &graph) const
+{
+ auto &operations = graph.operations();
+ bool cyclic = false;
+
+ model::OperationIndexMap<bool> visited;
+ operations.iterate([&](const model::OperationIndex &index, const model::Operation &) {
+ visited[index] = false;
+ });
+ model::OperationIndexMap<bool> on_stack = visited; // Copy from visited
+
+ std::function<void(const model::OperationIndex &index, const model::Operation &)> dfs_recursive =
+ [&](const model::OperationIndex &index, const model::Operation &node) -> void {
+ if (on_stack[index])
+ cyclic = true;
+ if (visited[index])
+ return;
+ visited[index] = true;
+ on_stack[index] = true;
+
+ for (auto output : node.getOutputs())
+ {
+ const auto &operand = graph.operands().at(output);
+ for (const auto &use : operand.getUses().list())
+ {
+ dfs_recursive(use, graph.operations().at(use));
+ }
+ }
+
+ on_stack[index] = false;
+ };
+
+ operations.iterate(dfs_recursive);
+
+ return !cyclic;
+}
+
+//
+// EdgeConsistencyVerifier
+//
+
+bool EdgeConsistencyChecker::verify(const Graph &graph) const
+{
+ auto &operations = graph.operations();
+ uint32_t mismatches = 0;
+ operations.iterate([&](const model::OperationIndex &index, const model::Operation &node) {
+ for (auto operand_index : node.getInputs())
+ {
+ auto &operand = graph.operands().at(operand_index);
+ mismatches += (operand.getUses().contains(index) ? 0 : 1);
+ }
+ for (auto operand_index : node.getOutputs())
+ {
+ auto &operand = graph.operands().at(operand_index);
+ mismatches += (operand.getDef().contains(index) ? 0 : 1);
+ }
+ });
+ return mismatches == 0;
+}
+
+} // namespace verifier
+} // namespace graph
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/graph/verifier/Verifier.h b/runtimes/neurun/core/src/graph/verifier/Verifier.h
new file mode 100644
index 000000000..ebd908832
--- /dev/null
+++ b/runtimes/neurun/core/src/graph/verifier/Verifier.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_VERIFIER_VERIFIER_H__
+#define __NEURUN_GRAPH_VERIFIER_VERIFIER_H__
+
+namespace neurun
+{
+namespace graph
+{
+class Graph;
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+namespace verifier
+{
+
+struct IVerifier
+{
+ virtual ~IVerifier() = default;
+ virtual bool verify(const Graph &graph) const = 0;
+};
+
+} // namespace verifier
+} // namespace graph
+} // namespace neurun
+
+namespace neurun
+{
+namespace graph
+{
+namespace verifier
+{
+
+class DAGChecker : public IVerifier
+{
+public:
+ bool verify(const Graph &graph) const override;
+};
+
+class EdgeConsistencyChecker : public IVerifier
+{
+public:
+ bool verify(const Graph &graph) const override;
+};
+
+} // namespace verifier
+} // namespace graph
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_VERIFIER_VERIFIER_H__
diff --git a/runtimes/neurun/core/src/library_info.cc b/runtimes/neurun/core/src/library_info.cc
new file mode 100644
index 000000000..601d09185
--- /dev/null
+++ b/runtimes/neurun/core/src/library_info.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+volatile const char info[] = "library information : runtime=neurun";
diff --git a/runtimes/neurun/core/src/model/LayoutSet.cc b/runtimes/neurun/core/src/model/LayoutSet.cc
new file mode 100644
index 000000000..fec6138a7
--- /dev/null
+++ b/runtimes/neurun/core/src/model/LayoutSet.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "LayoutSet.h"
+
+namespace neurun
+{
+namespace model
+{
+
+LayoutSet::LayoutSet(std::initializer_list<Layout> layouts)
+{
+ for (auto layout : layouts)
+ {
+ _set.insert(layout);
+ }
+}
+
+LayoutSet LayoutSet::operator|(const LayoutSet &other) const
+{
+ auto ret = *this;
+ for (auto layout : other)
+ {
+ ret.add(layout);
+ }
+ return ret;
+}
+
+LayoutSet LayoutSet::operator&(const LayoutSet &other) const
+{
+ LayoutSet ret;
+ for (auto layout : other)
+ {
+ if (contains(layout))
+ {
+ ret.add(layout);
+ }
+ }
+ return ret;
+}
+
+LayoutSet LayoutSet::operator-(const LayoutSet &other) const
+{
+ auto ret = *this;
+ for (auto layout : other)
+ {
+ ret.remove(layout);
+ }
+ return ret;
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/LayoutSet.h b/runtimes/neurun/core/src/model/LayoutSet.h
new file mode 100644
index 000000000..be75c8ee5
--- /dev/null
+++ b/runtimes/neurun/core/src/model/LayoutSet.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_GRAPH_OPERAND_LAYOUT_SET_H__
+#define __NEURUN_GRAPH_OPERAND_LAYOUT_SET_H__
+
+#include <initializer_list>
+#include <unordered_set>
+
+#include "model/Layout.h"
+
+namespace neurun
+{
+namespace model
+{
+
+class LayoutSet
+{
+public:
+ LayoutSet() = default;
+ LayoutSet(std::initializer_list<Layout> layouts);
+
+public:
+ void add(const Layout &layout) { _set.insert(layout); }
+ void remove(const Layout &layout) { _set.erase(layout); }
+ uint32_t size() const { return static_cast<uint32_t>(_set.size()); }
+ bool contains(const Layout &layout) const { return _set.find(layout) != _set.end(); }
+
+public:
+ LayoutSet operator|(const LayoutSet &other) const; // Union
+ LayoutSet operator&(const LayoutSet &other) const; // Intersect
+ LayoutSet operator-(const LayoutSet &other) const; // Minus
+
+public:
+ std::unordered_set<Layout>::const_iterator begin() const { return _set.begin(); }
+ std::unordered_set<Layout>::const_iterator end() const { return _set.end(); }
+
+private:
+ std::unordered_set<Layout> _set;
+};
+
+} // namespace model
+} // namespace neurun
+
+#endif // __NEURUN_GRAPH_OPERAND_LAYOUT_SET_H__
diff --git a/runtimes/neurun/core/src/model/Operand.cc b/runtimes/neurun/core/src/model/Operand.cc
new file mode 100644
index 000000000..4d72fac8c
--- /dev/null
+++ b/runtimes/neurun/core/src/model/Operand.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/Operand.h"
+
+namespace neurun
+{
+namespace model
+{
+
+size_t Operand::operandSize(void) const
+{
+ const uint32_t ranks = shape().rank();
+ int32_t elements = 1;
+
+ for (uint32_t rank = 0; rank < ranks; rank++)
+ {
+ elements *= shape().dim(rank);
+ }
+
+ DataType type = typeInfo().type();
+ size_t element_size = sizeOfDataType(type);
+
+ // Value of type is matched with OperandCode enum in NeuralNetworks.h
+ return element_size * elements;
+}
+
+void Operand::appendUse(const ::neurun::model::OperationIndex &idx)
+{
+ assert(!_uses.contains(idx));
+
+ _uses.append(idx);
+}
+
+void Operand::removeUse(const ::neurun::model::OperationIndex &idx)
+{
+ assert(_uses.contains(idx));
+
+ _uses.remove(idx);
+}
+
+void Operand::appendDef(const ::neurun::model::OperationIndex &idx)
+{
+ assert(!isConstant());
+ assert(_def.size() == 0);
+
+ _def.append(idx);
+}
+
+void Operand::removeDef(const ::neurun::model::OperationIndex &idx)
+{
+ assert(_def.contains(idx));
+
+ _def.remove(idx);
+}
+
+void Operand::parent_info(std::unique_ptr<graph::operand::ParentInfo> &&parent_info)
+{
+ _parent_info = std::move(parent_info);
+}
+
+const graph::operand::ParentInfo *Operand::parent_info() const { return _parent_info.get(); }
+
+graph::operand::ParentInfo *Operand::parent_info() { return _parent_info.get(); }
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/OperandConstraint.cc b/runtimes/neurun/core/src/model/OperandConstraint.cc
new file mode 100644
index 000000000..2730f712a
--- /dev/null
+++ b/runtimes/neurun/core/src/model/OperandConstraint.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/OperandConstraint.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/OperandIndexSequence.cc b/runtimes/neurun/core/src/model/OperandIndexSequence.cc
new file mode 100644
index 000000000..a9454df24
--- /dev/null
+++ b/runtimes/neurun/core/src/model/OperandIndexSequence.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/OperandIndexSequence.h"
+
+#include <algorithm>
+
+namespace neurun
+{
+namespace model
+{
+
+OperandIndexSequence::OperandIndexSequence(std::initializer_list<OperandIndex> list) : _set(list)
+{
+ // DO NOTHING
+}
+
+OperandIndexSequence::OperandIndexSequence(std::initializer_list<int32_t> list)
+{
+ for (auto val : list)
+ {
+ _set.emplace_back(static_cast<uint32_t>(val));
+ }
+}
+
+OperandIndexSequence::OperandIndexSequence(std::initializer_list<uint32_t> list)
+{
+ for (auto val : list)
+ {
+ _set.emplace_back(val);
+ }
+}
+
+bool OperandIndexSequence::contains(const OperandIndex &index) const
+{
+ return std::find(_set.begin(), _set.end(), index) != _set.end();
+}
+
+void OperandIndexSequence::replace(const OperandIndex &from, const OperandIndex &to)
+{
+ std::replace(_set.begin(), _set.end(), from, to);
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/Operation.cc b/runtimes/neurun/core/src/model/Operation.cc
new file mode 100644
index 000000000..fc1bd599e
--- /dev/null
+++ b/runtimes/neurun/core/src/model/Operation.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/Operation.h"
+
+#include <cassert>
+
+#include "graph/operation/LowerInfo.h"
+
+namespace neurun
+{
+namespace model
+{
+
+Operation::Operation(OperandConstraint input_constr, const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : _input_constr{input_constr}, _inputs{inputs}, _outputs{outputs}
+{
+}
+
+Operation::Operation(OperandConstraint input_constr) : _input_constr{input_constr} {}
+
+Operation::~Operation() = default;
+
+void Operation::setInputs(const OperandIndexSequence &indexes)
+{
+ assert(_input_constr.check(indexes.size()));
+ _inputs = indexes;
+}
+
+void Operation::setOutputs(const OperandIndexSequence &indexes) { _outputs = indexes; }
+
+void Operation::replaceInput(const OperandIndex &from, const OperandIndex &to)
+{
+ _inputs.replace(from, to);
+}
+
+void Operation::replaceOutput(const OperandIndex &from, const OperandIndex &to)
+{
+ _outputs.replace(from, to);
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/OperationIndexList.cc b/runtimes/neurun/core/src/model/OperationIndexList.cc
new file mode 100644
index 000000000..e2c077ed4
--- /dev/null
+++ b/runtimes/neurun/core/src/model/OperationIndexList.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/OperationIndexList.h"
+
+#include <algorithm>
+
+namespace neurun
+{
+namespace model
+{
+
+OperationIndexList::OperationIndexList(std::initializer_list<OperationIndex> list) : _list(list)
+{
+ // DO NOTHING
+}
+
+bool OperationIndexList::contains(const ::neurun::model::OperationIndex &index) const
+{
+ return std::find(_list.begin(), _list.end(), index) != _list.end();
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/Shape.cc b/runtimes/neurun/core/src/model/Shape.cc
new file mode 100644
index 000000000..b7f7bff68
--- /dev/null
+++ b/runtimes/neurun/core/src/model/Shape.cc
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/Shape.h"
+#include "util/Utils.h"
+
+#include <cassert>
+#include <functional>
+#include <numeric>
+
+namespace neurun
+{
+namespace model
+{
+
+FeatureShape Shape::asFeature(Layout layout) const
+{
+ assert(rank() == 4);
+ assert(layout == Layout::NHWC || layout == Layout::NCHW);
+
+ if (layout == Layout::NHWC)
+ {
+ // Feature Map in NHWC layout
+ // - Dimension(0) -> Batch
+ // - Dimension(1) -> Height
+ // - Dimension(2) -> Width
+ // - Dimension(3) -> Depth
+ const auto batch = dim(0);
+ const auto depth = dim(3);
+ const auto height = dim(1);
+ const auto width = dim(2);
+
+ return {batch, depth, height, width};
+ }
+ else if (layout == Layout::NCHW)
+ {
+ // Feature Map in NHWC layout
+ // - Dimension(0) -> Batch
+ // - Dimension(1) -> Depth
+ // - Dimension(2) -> Height
+ // - Dimension(3) -> Width
+ const auto batch = dim(0);
+ const auto depth = dim(1);
+ const auto height = dim(2);
+ const auto width = dim(3);
+
+ return {batch, depth, height, width};
+ }
+ else
+ {
+ throw std::runtime_error("Wrong Layout");
+ }
+}
+
+// Extended dimension is filled with 1.
+void Shape::extendRank(int to_rank)
+{
+ assert(to_rank - rank() >= 0);
+ _dimensions.insert(_dimensions.cbegin(), to_rank - rank(), 1);
+}
+
+uint64_t Shape::num_elements() const
+{
+ // All of the nodes must have non-negative dimension
+ assert(std::all_of(_dimensions.begin(), _dimensions.end(),
+ [](const int32_t &v) { return (v >= 0); }));
+
+ return std::accumulate(_dimensions.cbegin(), _dimensions.cend(), UINT64_C(1),
+ std::multiplies<uint64_t>());
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/Subgraph.cc b/runtimes/neurun/core/src/model/Subgraph.cc
new file mode 100644
index 000000000..4b8402720
--- /dev/null
+++ b/runtimes/neurun/core/src/model/Subgraph.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/Subgraph.h"
+#include "model/OperationVisitor.h"
+#include <sstream>
+
+namespace neurun
+{
+namespace model
+{
+
+Subgraph::Subgraph(Layout layout) : Operation{OperandConstraint::createAny()}, _layout{layout}
+{
+ // DO NOTHING
+}
+
+void Subgraph::accept(OperationVisitor &v) const { v.visit(*this); }
+
+// TODO: Impl Dumper instead of this method
+std::string Subgraph::getStr() const
+{
+ // " subgraph IN(xx,xx,xx) -> { op0, op1, op2 } -> OUT(yy,yy,yy)"
+ std::stringstream ss;
+ ss << " subgraph IN(";
+ for (const auto &index : getInputs())
+ {
+ ss << " " << index.value();
+ }
+ ss << " ) -> {";
+ for (const auto &elem : _operations)
+ {
+ ss << " " << elem.index.value() << "(" << elem.node->getName() << ")";
+ }
+ ss << " } -> OUT(";
+ for (const auto &index : getOutputs())
+ {
+ ss << " " << index.value();
+ }
+ ss << " )";
+ return ss.str();
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/Subgraphs.cc b/runtimes/neurun/core/src/model/Subgraphs.cc
new file mode 100644
index 000000000..64d806dfa
--- /dev/null
+++ b/runtimes/neurun/core/src/model/Subgraphs.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/Subgraphs.h"
+#include "util/logging.h"
+#include "cpp14/memory.h"
+
+#include <cassert>
+#include <string>
+
+namespace neurun
+{
+namespace model
+{
+
+SubgraphIndex Subgraphs::emplace(const OperationIndex &index, const Operation &node, Layout layout)
+{
+ std::unique_ptr<Subgraph> subg = nnfw::cpp14::make_unique<model::Subgraph>(layout);
+ subg->appendOperation(index, node);
+ return push(std::move(subg));
+}
+
+SubgraphIndex Subgraphs::emplace(std::unique_ptr<Subgraph> &&subg) { return push(std::move(subg)); }
+
+bool Subgraphs::containsOperation(const OperationIndex &operation_index) const
+{
+ return findOperation(operation_index).valid();
+}
+
+SubgraphIndex Subgraphs::getOperation(const OperationIndex &operation_index) const
+{
+ SubgraphIndex ret = findOperation(operation_index);
+ assert(ret.valid());
+ return ret;
+}
+
+// TODO: Extract this into external helper function
+void Subgraphs::dump(const std::string &msg) const
+{
+ VERBOSE(Subgraphs) << "Subgraphs(" << msg << ")" << std::endl;
+ iterate([&](const SubgraphIndex &idx, const model::Subgraph &subg) {
+ VERBOSE(Subgraphs) << idx.value() << "] " << subg.getStr() << std::endl;
+ });
+}
+
+SubgraphIndex Subgraphs::findOperation(const OperationIndex &operation_index) const
+{
+ SubgraphIndex ret;
+ iterate([&](const SubgraphIndex &index, const Subgraph &object) {
+ for (const auto &elem : object.operations())
+ {
+ if (elem.index == operation_index)
+ ret = index;
+ }
+ });
+ return ret;
+}
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/TypeInfo.cc b/runtimes/neurun/core/src/model/TypeInfo.cc
new file mode 100644
index 000000000..46ac2d4de
--- /dev/null
+++ b/runtimes/neurun/core/src/model/TypeInfo.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/TypeInfo.h"
+
+namespace neurun
+{
+namespace model
+{
+
+bool operator==(const TypeInfo &lhs, const TypeInfo &rhs)
+{
+ if (lhs.type() != rhs.type())
+ {
+ return false;
+ }
+
+ if (lhs.offset() != rhs.offset())
+ {
+ return false;
+ }
+
+ if (lhs.scale() != rhs.scale())
+ {
+ return false;
+ }
+
+ return true;
+}
+
+bool operator!=(const TypeInfo &lhs, const TypeInfo &rhs) { return !(lhs == rhs); }
+
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/AbsNode.cc b/runtimes/neurun/core/src/model/operation/AbsNode.cc
new file mode 100644
index 000000000..dd9566da9
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/AbsNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/AbsNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void AbsNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+AbsNode::AbsNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/AddNode.cc b/runtimes/neurun/core/src/model/operation/AddNode.cc
new file mode 100644
index 000000000..43ad7241f
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/AddNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/AddNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void AddNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+AddNode::AddNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ArgMaxNode.cc b/runtimes/neurun/core/src/model/operation/ArgMaxNode.cc
new file mode 100644
index 000000000..2486f54b0
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ArgMaxNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ArgMaxNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ArgMaxNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ArgMaxNode::ArgMaxNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/AvgPool2DNode.cc b/runtimes/neurun/core/src/model/operation/AvgPool2DNode.cc
new file mode 100644
index 000000000..4c625f973
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/AvgPool2DNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/AvgPool2DNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void AvgPool2DNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+AvgPool2DNode::AvgPool2DNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/CastNode.cc b/runtimes/neurun/core/src/model/operation/CastNode.cc
new file mode 100644
index 000000000..85d11e3d1
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/CastNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/CastNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void CastNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+CastNode::CastNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ComparisonNode.cc b/runtimes/neurun/core/src/model/operation/ComparisonNode.cc
new file mode 100644
index 000000000..598e61969
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ComparisonNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ComparisonNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ComparisonNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ComparisonNode::ComparisonNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ConcatNode.cc b/runtimes/neurun/core/src/model/operation/ConcatNode.cc
new file mode 100644
index 000000000..195952637
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ConcatNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ConcatNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ConcatNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ConcatNode::ConcatNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createAtLeast(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/Conv2DNode.cc b/runtimes/neurun/core/src/model/operation/Conv2DNode.cc
new file mode 100644
index 000000000..218c5d193
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/Conv2DNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/Conv2DNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void Conv2DNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+Conv2DNode::Conv2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(3u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/CustomNode.cc b/runtimes/neurun/core/src/model/operation/CustomNode.cc
new file mode 100644
index 000000000..059786218
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/CustomNode.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/CustomNode.h"
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void CustomNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+CustomNode::CustomNode(OperandConstraint input_constr, const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, std::string id,
+ const Userdata &userdata)
+ : model::Operation{input_constr, inputs, outputs}, _id(std::move(id)), _userdata(userdata)
+{
+}
+
+const std::string &CustomNode::id() const { return _id; }
+
+const CustomNode::Userdata &CustomNode::userdata() const { return _userdata; }
+
+CustomNode::~CustomNode() { delete[] _userdata.data; }
+
+std::string CustomNode::getName() const { return id(); }
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/DepthToSpaceNode.cc b/runtimes/neurun/core/src/model/operation/DepthToSpaceNode.cc
new file mode 100644
index 000000000..ec3e5433e
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/DepthToSpaceNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/DepthToSpaceNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void DepthToSpaceNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+DepthToSpaceNode::DepthToSpaceNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/DepthwiseConv2DNode.cc b/runtimes/neurun/core/src/model/operation/DepthwiseConv2DNode.cc
new file mode 100644
index 000000000..70d107aa7
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/DepthwiseConv2DNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/DepthwiseConv2DNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void DepthwiseConv2DNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+DepthwiseConv2DNode::DepthwiseConv2DNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(3u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/DequantizeNode.cc b/runtimes/neurun/core/src/model/operation/DequantizeNode.cc
new file mode 100644
index 000000000..634d36b26
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/DequantizeNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/DequantizeNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void DequantizeNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+DequantizeNode::DequantizeNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/DivNode.cc b/runtimes/neurun/core/src/model/operation/DivNode.cc
new file mode 100644
index 000000000..814491aa7
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/DivNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/DivNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void DivNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+DivNode::DivNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/EmbeddingLookupNode.cc b/runtimes/neurun/core/src/model/operation/EmbeddingLookupNode.cc
new file mode 100644
index 000000000..d49ca19b1
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/EmbeddingLookupNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/EmbeddingLookupNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void EmbeddingLookupNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+EmbeddingLookupNode::EmbeddingLookupNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ExpNode.cc b/runtimes/neurun/core/src/model/operation/ExpNode.cc
new file mode 100644
index 000000000..3f420f8b5
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ExpNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ExpNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ExpNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ExpNode::ExpNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/FloorNode.cc b/runtimes/neurun/core/src/model/operation/FloorNode.cc
new file mode 100644
index 000000000..47b56fbaa
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/FloorNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/FloorNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void FloorNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+FloorNode::FloorNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/FullyConnectedNode.cc b/runtimes/neurun/core/src/model/operation/FullyConnectedNode.cc
new file mode 100644
index 000000000..42f18c72d
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/FullyConnectedNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/FullyConnectedNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void FullyConnectedNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+FullyConnectedNode::FullyConnectedNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(3u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/GatherNode.cc b/runtimes/neurun/core/src/model/operation/GatherNode.cc
new file mode 100644
index 000000000..1ecb6f8ec
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/GatherNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/GatherNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void GatherNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+GatherNode::GatherNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/HashtableLookupNode.cc b/runtimes/neurun/core/src/model/operation/HashtableLookupNode.cc
new file mode 100644
index 000000000..d2c144e71
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/HashtableLookupNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/HashtableLookupNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void HashtableLookupNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+HashtableLookupNode::HashtableLookupNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(3u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/L2NormalizationNode.cc b/runtimes/neurun/core/src/model/operation/L2NormalizationNode.cc
new file mode 100644
index 000000000..1169785c6
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/L2NormalizationNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/L2NormalizationNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void L2NormalizationNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+L2NormalizationNode::L2NormalizationNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/L2Pool2DNode.cc b/runtimes/neurun/core/src/model/operation/L2Pool2DNode.cc
new file mode 100644
index 000000000..fb53f52c6
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/L2Pool2DNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/L2Pool2DNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void L2Pool2DNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+L2Pool2DNode::L2Pool2DNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/LSTMNode.cc b/runtimes/neurun/core/src/model/operation/LSTMNode.cc
new file mode 100644
index 000000000..31443e8ae
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/LSTMNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/LSTMNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void LSTMNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+LSTMNode::LSTMNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(23u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/LocalResponseNormalizationNode.cc b/runtimes/neurun/core/src/model/operation/LocalResponseNormalizationNode.cc
new file mode 100644
index 000000000..4b1dded76
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/LocalResponseNormalizationNode.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/LocalResponseNormalizationNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void LocalResponseNormalizationNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+LocalResponseNormalizationNode::LocalResponseNormalizationNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/LogicalAndNode.cc b/runtimes/neurun/core/src/model/operation/LogicalAndNode.cc
new file mode 100644
index 000000000..9e9a3dbbf
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/LogicalAndNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/LogicalAndNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void LogicalAndNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+LogicalAndNode::LogicalAndNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/LogicalNotNode.cc b/runtimes/neurun/core/src/model/operation/LogicalNotNode.cc
new file mode 100644
index 000000000..1a3c324a5
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/LogicalNotNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/LogicalNotNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void LogicalNotNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+LogicalNotNode::LogicalNotNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/LogicalOrNode.cc b/runtimes/neurun/core/src/model/operation/LogicalOrNode.cc
new file mode 100644
index 000000000..53fa305ae
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/LogicalOrNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/LogicalOrNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void LogicalOrNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+LogicalOrNode::LogicalOrNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/LogisticNode.cc b/runtimes/neurun/core/src/model/operation/LogisticNode.cc
new file mode 100644
index 000000000..358ce6acd
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/LogisticNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/LogisticNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void LogisticNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+LogisticNode::LogisticNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/MaxPool2DNode.cc b/runtimes/neurun/core/src/model/operation/MaxPool2DNode.cc
new file mode 100644
index 000000000..596aa2df9
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/MaxPool2DNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/MaxPool2DNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void MaxPool2DNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+MaxPool2DNode::MaxPool2DNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/MeanNode.cc b/runtimes/neurun/core/src/model/operation/MeanNode.cc
new file mode 100644
index 000000000..22b23b27e
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/MeanNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/MeanNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void MeanNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+MeanNode::MeanNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/MulNode.cc b/runtimes/neurun/core/src/model/operation/MulNode.cc
new file mode 100644
index 000000000..23a66848a
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/MulNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/MulNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void MulNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+MulNode::MulNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/NegNode.cc b/runtimes/neurun/core/src/model/operation/NegNode.cc
new file mode 100644
index 000000000..6f3cf5a0a
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/NegNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/NegNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void NegNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+NegNode::NegNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/PReLUNode.cc b/runtimes/neurun/core/src/model/operation/PReLUNode.cc
new file mode 100644
index 000000000..aa8aecdd5
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/PReLUNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/PReLUNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void PReLUNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+PReLUNode::PReLUNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/PadNode.cc b/runtimes/neurun/core/src/model/operation/PadNode.cc
new file mode 100644
index 000000000..9947c9e71
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/PadNode.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/PadNode.h"
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void PadNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+PadNode::PadNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/PermuteNode.cc b/runtimes/neurun/core/src/model/operation/PermuteNode.cc
new file mode 100644
index 000000000..8affca184
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/PermuteNode.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/PermuteNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void PermuteNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+PermuteNode::PermuteNode(const OperandIndex &input, const OperandIndex &output,
+ const backend::BackendContext *input_backend_ctx,
+ const backend::BackendContext *output_backend_ctx, Type type,
+ model::DataType data_type)
+ : model::Operation{OperandConstraint::createExact(1u)},
+ _param{input_backend_ctx, output_backend_ctx}, _type{type}, _dataType{data_type}
+{
+ setInputs({input});
+ setOutputs({output});
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/RNNNode.cc b/runtimes/neurun/core/src/model/operation/RNNNode.cc
new file mode 100644
index 000000000..fa32059d0
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/RNNNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/RNNNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void RNNNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+RNNNode::RNNNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(5u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/RSQRTNode.cc b/runtimes/neurun/core/src/model/operation/RSQRTNode.cc
new file mode 100644
index 000000000..faed11663
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/RSQRTNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/RSQRTNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void RSQRTNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+RSQRTNode::RSQRTNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReLU1Node.cc b/runtimes/neurun/core/src/model/operation/ReLU1Node.cc
new file mode 100644
index 000000000..b1fe14e09
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReLU1Node.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReLU1Node.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReLU1Node::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReLU1Node::ReLU1Node(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReLU6Node.cc b/runtimes/neurun/core/src/model/operation/ReLU6Node.cc
new file mode 100644
index 000000000..de7c35e1a
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReLU6Node.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReLU6Node.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReLU6Node::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReLU6Node::ReLU6Node(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReLUNode.cc b/runtimes/neurun/core/src/model/operation/ReLUNode.cc
new file mode 100644
index 000000000..d79819d7f
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReLUNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReLUNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReLUNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReLUNode::ReLUNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReduceMaxNode.cc b/runtimes/neurun/core/src/model/operation/ReduceMaxNode.cc
new file mode 100644
index 000000000..486646ac9
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReduceMaxNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReduceMaxNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReduceMaxNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReduceMaxNode::ReduceMaxNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReduceMinNode.cc b/runtimes/neurun/core/src/model/operation/ReduceMinNode.cc
new file mode 100644
index 000000000..9f55251c4
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReduceMinNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReduceMinNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReduceMinNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReduceMinNode::ReduceMinNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReduceSumNode.cc b/runtimes/neurun/core/src/model/operation/ReduceSumNode.cc
new file mode 100644
index 000000000..5a06ef81b
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReduceSumNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReduceSumNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReduceSumNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReduceSumNode::ReduceSumNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ReshapeNode.cc b/runtimes/neurun/core/src/model/operation/ReshapeNode.cc
new file mode 100644
index 000000000..cfd987ffb
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ReshapeNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ReshapeNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ReshapeNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ReshapeNode::ReshapeNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/ResizeBilinearNode.cc b/runtimes/neurun/core/src/model/operation/ResizeBilinearNode.cc
new file mode 100644
index 000000000..263668a3c
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/ResizeBilinearNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/ResizeBilinearNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void ResizeBilinearNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+ResizeBilinearNode::ResizeBilinearNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SQRTNode.cc b/runtimes/neurun/core/src/model/operation/SQRTNode.cc
new file mode 100644
index 000000000..835aa3f97
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SQRTNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/SQRTNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void SQRTNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+SQRTNode::SQRTNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SoftmaxNode.cc b/runtimes/neurun/core/src/model/operation/SoftmaxNode.cc
new file mode 100644
index 000000000..39e6d2bd8
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SoftmaxNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/SoftmaxNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void SoftmaxNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+SoftmaxNode::SoftmaxNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SpaceToDepthNode.cc b/runtimes/neurun/core/src/model/operation/SpaceToDepthNode.cc
new file mode 100644
index 000000000..2622881f4
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SpaceToDepthNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/SpaceToDepthNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void SpaceToDepthNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+SpaceToDepthNode::SpaceToDepthNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SplitNode.cc b/runtimes/neurun/core/src/model/operation/SplitNode.cc
new file mode 100644
index 000000000..9a542f418
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SplitNode.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "model/operation/SplitNode.h"
+#include <cassert>
+#include "model/OperationVisitor.h"
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+void SplitNode::accept(OperationVisitor &v) const { v.visit(*this); }
+SplitNode::SplitNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SquaredDifferenceNode.cc b/runtimes/neurun/core/src/model/operation/SquaredDifferenceNode.cc
new file mode 100644
index 000000000..6672e08c1
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SquaredDifferenceNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/SquaredDifferenceNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void SquaredDifferenceNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+SquaredDifferenceNode::SquaredDifferenceNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SqueezeNode.cc b/runtimes/neurun/core/src/model/operation/SqueezeNode.cc
new file mode 100644
index 000000000..1a82d65b3
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SqueezeNode.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/SqueezeNode.h"
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void SqueezeNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+SqueezeNode::SqueezeNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param(param)
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/StridedSliceNode.cc b/runtimes/neurun/core/src/model/operation/StridedSliceNode.cc
new file mode 100644
index 000000000..9d60645a8
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/StridedSliceNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/StridedSliceNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void StridedSliceNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+StridedSliceNode::StridedSliceNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/SubNode.cc b/runtimes/neurun/core/src/model/operation/SubNode.cc
new file mode 100644
index 000000000..6a64c4b76
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/SubNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/SubNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void SubNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+SubNode::SubNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/TanhNode.cc b/runtimes/neurun/core/src/model/operation/TanhNode.cc
new file mode 100644
index 000000000..6372b4c73
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/TanhNode.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/TanhNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void TanhNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+TanhNode::TanhNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/TopKV2Node.cc b/runtimes/neurun/core/src/model/operation/TopKV2Node.cc
new file mode 100644
index 000000000..6ebcd50b3
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/TopKV2Node.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/TopKV2Node.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void TopKV2Node::accept(OperationVisitor &v) const { v.visit(*this); }
+
+TopKV2Node::TopKV2Node(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/TransposeConvNode.cc b/runtimes/neurun/core/src/model/operation/TransposeConvNode.cc
new file mode 100644
index 000000000..7ad2d1dca
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/TransposeConvNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/TransposeConvNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void TransposeConvNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+TransposeConvNode::TransposeConvNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(3u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/TransposeNode.cc b/runtimes/neurun/core/src/model/operation/TransposeNode.cc
new file mode 100644
index 000000000..73542a04d
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/TransposeNode.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "model/operation/TransposeNode.h"
+
+#include <cassert>
+
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+
+void TransposeNode::accept(OperationVisitor &v) const { v.visit(*this); }
+
+TransposeNode::TransposeNode(const OperandIndexSequence &inputs,
+ const OperandIndexSequence &outputs, const Param &param)
+ : model::Operation{OperandConstraint::createExact(2u), inputs, outputs}, _param{param}
+{
+}
+
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/model/operation/UnpackNode.cc b/runtimes/neurun/core/src/model/operation/UnpackNode.cc
new file mode 100644
index 000000000..7717a017a
--- /dev/null
+++ b/runtimes/neurun/core/src/model/operation/UnpackNode.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "model/operation/UnpackNode.h"
+#include "model/OperationVisitor.h"
+
+namespace neurun
+{
+namespace model
+{
+namespace operation
+{
+void UnpackNode::accept(OperationVisitor &v) const { v.visit(*this); }
+UnpackNode::UnpackNode(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs,
+ const Param &param)
+ : model::Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
+{
+}
+} // namespace operation
+} // namespace model
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/util/ConfigSource.cc b/runtimes/neurun/core/src/util/ConfigSource.cc
new file mode 100644
index 000000000..f84e95566
--- /dev/null
+++ b/runtimes/neurun/core/src/util/ConfigSource.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "util/ConfigSource.h"
+#include "util/GeneralConfigSource.h"
+
+#include <algorithm>
+#include <cassert>
+
+#include "cpp14/memory.h"
+#include "EnvConfigSource.h"
+
+namespace neurun
+{
+namespace util
+{
+
+static std::unique_ptr<IConfigSource> _source;
+
+void config_source(std::unique_ptr<IConfigSource> &&source) { _source = std::move(source); }
+
+static IConfigSource *config_source()
+{
+ if (!_source)
+ {
+#ifdef ENVVAR_FOR_DEFAULT_CONFIG
+ // Default ConfigSource is EnvConfigSource
+ _source = nnfw::cpp14::make_unique<EnvConfigSource>();
+#else
+ _source = nnfw::cpp14::make_unique<GeneralConfigSource>();
+#endif // ENVVAR_FOR_DEFAULT_CONFIG
+ }
+ return _source.get();
+}
+
+static std::string getConfigOrDefault(const std::string &key)
+{
+ static std::unordered_map<std::string, std::string> defaults;
+ if (defaults.empty())
+ {
+#define CONFIG(Name, Type, Default) \
+ { \
+ auto name = std::string{#Name}; \
+ defaults.emplace(name, std::string{Default}); \
+ }
+
+#include "util/Config.lst"
+
+#undef CONFIG
+ }
+
+ // Treat empty string and absence of the value to be the same
+ auto ret = config_source()->get(key);
+ if (ret.empty())
+ {
+ auto itr = defaults.find(key);
+ if (itr != defaults.end())
+ {
+ // Return the default value if exists
+ ret = itr->second;
+ }
+ }
+
+ return ret;
+}
+
+bool getConfigBool(const std::string &key)
+{
+ auto raw = getConfigOrDefault(key);
+ static const std::array<std::string, 5> false_list{"0", "OFF", "FALSE", "N", "NO"};
+ auto false_found = std::find(false_list.begin(), false_list.end(), raw);
+
+ return (false_found == false_list.end());
+}
+
+int getConfigInt(const std::string &key)
+{
+ auto raw = getConfigOrDefault(key);
+ return std::stoi(raw);
+}
+
+std::string getConfigString(const std::string &key) { return getConfigOrDefault(key); }
+
+} // namespace util
+} // namespace neurun
+
+namespace neurun
+{
+namespace util
+{
+namespace config
+{
+
+#define CONFIG(Name, Type, Default) const char *Name = #Name;
+
+#include "util/Config.lst"
+
+#undef CONFIG
+
+} // namespace config
+} // namespace util
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/util/EnvConfigSource.cc b/runtimes/neurun/core/src/util/EnvConfigSource.cc
new file mode 100644
index 000000000..be8239b9f
--- /dev/null
+++ b/runtimes/neurun/core/src/util/EnvConfigSource.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EnvConfigSource.h"
+
+#include <cstdlib>
+
+namespace neurun
+{
+namespace util
+{
+
+std::string EnvConfigSource::get(const std::string &key) const
+{
+ const char *value = std::getenv(key.c_str());
+ if (value != nullptr)
+ {
+ return value;
+ }
+ else
+ {
+ return "";
+ }
+}
+
+} // namespace util
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/util/EnvConfigSource.h b/runtimes/neurun/core/src/util/EnvConfigSource.h
new file mode 100644
index 000000000..b187ec772
--- /dev/null
+++ b/runtimes/neurun/core/src/util/EnvConfigSource.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NEURUN_UTIL_ENV_CONFIG_SOURCE_H__
+#define __NEURUN_UTIL_ENV_CONFIG_SOURCE_H__
+
+#include <unordered_map>
+
+#include "util/IConfigSource.h"
+
+namespace neurun
+{
+namespace util
+{
+
+class EnvConfigSource final : public IConfigSource
+{
+public:
+ std::string get(const std::string &key) const override;
+
+private:
+ std::unordered_map<std::string, std::string> _default_attributes;
+};
+
+} // namespace util
+} // namespace neurun
+
+#endif // __NEURUN_UTIL_ENV_CONFIG_SOURCE_H__
diff --git a/runtimes/neurun/core/src/util/GeneralConfigSource.cc b/runtimes/neurun/core/src/util/GeneralConfigSource.cc
new file mode 100644
index 000000000..084e4c109
--- /dev/null
+++ b/runtimes/neurun/core/src/util/GeneralConfigSource.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "util/GeneralConfigSource.h"
+#include "util/logging.h"
+
+namespace neurun
+{
+namespace util
+{
+
+std::string GeneralConfigSource::get(const std::string &key) const
+{
+ auto itr = _map.find(key);
+ if (itr == _map.end())
+ {
+ return "";
+ }
+ else
+ {
+ return itr->second;
+ }
+}
+
+void GeneralConfigSource::set(const std::string &key, const std::string &val)
+{
+ VERBOSE(GeneralConfigSource) << key << " : " << val << std::endl;
+ _map[key] = val;
+}
+
+} // namespace util
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/util/Padding.cc b/runtimes/neurun/core/src/util/Padding.cc
new file mode 100644
index 000000000..dd5a3b502
--- /dev/null
+++ b/runtimes/neurun/core/src/util/Padding.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "util/Padding.h"
+#include "util/Utils.h"
+
+#include <algorithm>
+#include <stdexcept>
+
+namespace neurun
+{
+namespace util
+{
+
+model::ExplicitPadding validPadding(void)
+{
+ //
+ // ANEURALNETWORKS_PADDING_VALID
+ //
+ // VALID padding. No padding.
+ //
+ // When the input size is not evenly divisible by the filter size,
+ // the input at the end that could not fill the whole filter tile
+ // will simply be ignored.
+ //
+ model::ExplicitPadding padding;
+
+ padding.top = 0;
+ padding.bottom = 0;
+ padding.left = 0;
+ padding.right = 0;
+
+ return padding;
+}
+
+model::ExplicitPadding samePaddingUsingIFM(const model::FeatureShape &ifm_shape,
+ const model::Stride &stride, uint32_t kw, uint32_t kh)
+{
+ model::ExplicitPadding padding;
+
+ // ANEURALNETWORKS_PADDING_SAME (from NNAPI spec)
+ //
+ // SAME padding. Padding on both ends are the "same":
+ //
+ // padding_to_beginning = total_padding / 2
+ // padding_to_end = (total_padding + 1)/2.
+ //
+ const int32_t vertical_expected_output = (ifm_shape.H + stride.vertical - 1) / stride.vertical;
+ const int32_t horizontal_expected_output =
+ (ifm_shape.W + stride.horizontal - 1) / stride.horizontal;
+
+ const int32_t vertical_needed_input = (vertical_expected_output - 1) * stride.vertical + kh;
+ const int32_t vertical_total_padding = std::max(0, vertical_needed_input - ifm_shape.H);
+
+ const int32_t horizontal_needed_input = (horizontal_expected_output - 1) * stride.horizontal + kw;
+ const int32_t horizontal_total_padding = std::max(0, horizontal_needed_input - ifm_shape.W);
+
+ padding.top = vertical_total_padding / 2;
+ padding.bottom = (vertical_total_padding + 1) / 2;
+ padding.left = horizontal_total_padding / 2;
+ padding.right = (horizontal_total_padding + 1) / 2;
+
+ return padding;
+}
+
+model::ExplicitPadding samePadding(const model::FeatureShape &ifm_shape,
+ const model::FeatureShape &ofm_shape,
+ const model::Stride &stride, uint32_t kw, uint32_t kh)
+{
+ const int32_t vertical_expected_output = (ifm_shape.H + stride.vertical - 1) / stride.vertical;
+ const int32_t horizontal_expected_output =
+ (ifm_shape.W + stride.horizontal - 1) / stride.horizontal;
+ assert(vertical_expected_output == ofm_shape.H);
+ assert(horizontal_expected_output == ofm_shape.W);
+
+ UNUSED_RELEASE(ofm_shape);
+ UNUSED_RELEASE(vertical_expected_output);
+ UNUSED_RELEASE(horizontal_expected_output);
+
+ return samePaddingUsingIFM(ifm_shape, stride, kw, kh);
+}
+
+model::ExplicitPadding calculatePadding(const model::Padding &padding,
+ const model::FeatureShape &ifm_shape,
+ const model::FeatureShape &ofm_shape,
+ const model::Stride &stride, uint32_t kw, uint32_t kh)
+{
+ if (padding.type == model::PaddingType::EXPLICIT)
+ {
+ return padding.param;
+ }
+ else if (padding.type == model::PaddingType::SAME)
+ {
+ return samePadding(ifm_shape, ofm_shape, stride, kw, kh);
+ }
+ else if (padding.type == model::PaddingType::VALID)
+ {
+ return validPadding();
+ }
+ else
+ {
+ throw std::runtime_error{"Cannot handle padding type"};
+ }
+}
+
+} // namespace util
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/util/ShapeInference.cc b/runtimes/neurun/core/src/util/ShapeInference.cc
new file mode 100644
index 000000000..5a7bfde41
--- /dev/null
+++ b/runtimes/neurun/core/src/util/ShapeInference.cc
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "util/Utils.h"
+#include "model/InternalType.h"
+#include "model/Shape.h"
+#include "model/operation/AvgPool2DNode.h"
+#include "model/operation/MaxPool2DNode.h"
+#include "util/ShapeInference.h"
+
+namespace neurun
+{
+namespace shape_inference
+{
+
+//
+// Helper functions
+//
+
+namespace
+{
+
+template <typename T, typename U>
+typename std::enable_if<std::is_integral<T>::value && std::is_integral<U>::value,
+ typename std::common_type<T, U>::type>::type
+ceil_div(T dividend, U divisor)
+{
+ assert(dividend > 0 && divisor > 0 && "this implementations is for positive numbers only");
+ return (dividend + divisor - 1) / divisor;
+}
+
+// Calculate the result of broadcast of two shapes
+model::Shape broadcastShapes(const model::Shape &lhs_shape, const model::Shape &rhs_shape)
+{
+ model::Shape out_shape;
+ auto max_rank = std::max(lhs_shape.rank(), rhs_shape.rank());
+
+ for (int idx = 0; idx < max_rank; ++idx)
+ {
+ // Go over operands dimensions from right to left
+ int lhs_idx = lhs_shape.rank() - idx - 1;
+ int rhs_idx = rhs_shape.rank() - idx - 1;
+
+ int32_t lhs_dim = lhs_idx >= 0 ? lhs_shape.dim(lhs_idx) : 1;
+ int32_t rhs_dim = rhs_idx >= 0 ? rhs_shape.dim(rhs_idx) : 1;
+
+ if (lhs_dim != 1 && rhs_dim != 1 && lhs_dim != rhs_dim)
+ throw std::runtime_error("Incompatible shapes for broadcast");
+
+ out_shape.prepend(std::max(lhs_dim, rhs_dim));
+ }
+
+ return out_shape;
+}
+
+// Calculate output height and width of convolution-like operation
+std::pair<int, int> calcConvLikeHeightAndWidth(const int in_h, const int in_w, const int ker_h,
+ const int ker_w, const model::Padding pad,
+ const model::Stride stride)
+{
+ int32_t out_h = 0, out_w = 0;
+
+ switch (pad.type)
+ {
+ case model::PaddingType::SAME:
+ out_h = ceil_div(in_h, stride.vertical);
+ out_w = ceil_div(in_w, stride.horizontal);
+ break;
+ case model::PaddingType::VALID:
+ out_h = ceil_div(in_h - ker_h + 1, stride.vertical);
+ out_w = ceil_div(in_w - ker_w + 1, stride.horizontal);
+ break;
+ case model::PaddingType::EXPLICIT:
+ out_h = (in_h + pad.param.top + pad.param.bottom - ker_h) / stride.vertical + 1;
+ out_w = (in_w + pad.param.left + pad.param.right - ker_w) / stride.horizontal + 1;
+ break;
+ default:
+ assert(false);
+ }
+
+ return {out_h, out_w};
+}
+
+} // namespace
+
+//
+// Shape inference
+//
+
+Shapes inferEltwiseShape(const model::Shape &lhs_shape, const model::Shape &rhs_shape)
+{
+ return {broadcastShapes(lhs_shape, rhs_shape)};
+}
+
+Shapes inferAvgPoolShape(const model::Shape &in_shape,
+ const model::operation::AvgPool2DNode::Param &param,
+ const model::Layout layout)
+{
+ assert(layout == model::Layout::NHWC);
+ auto ifm_shape = in_shape.asFeature(layout);
+ const auto out_h_w = calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, param.kh, param.kw,
+ param.padding, param.stride);
+ // Pooling don't change number of channels and batch size
+ return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, ifm_shape.C}};
+}
+
+Shapes inferConcatShape(const Shapes &in_shapes, const model::operation::ConcatNode::Param &param)
+{
+ const int32_t concat_axis = param.axis;
+ const auto &first_in_shape = in_shapes[0];
+
+ // Check that all shapes are equal except for concat axis dimension
+ for (const auto &in_shape : in_shapes)
+ {
+ assert(in_shape.rank() == first_in_shape.rank());
+ for (int64_t dim_idx = 0; dim_idx < in_shape.rank(); ++dim_idx)
+ assert(dim_idx == concat_axis || in_shape.dim(dim_idx) == first_in_shape.dim(dim_idx));
+ }
+
+ // Calculate output shape
+ model::Shape out_shape(first_in_shape);
+ out_shape.dim(concat_axis) = 0;
+ for (const auto &in_shape : in_shapes)
+ out_shape.dim(concat_axis) += in_shape.dim(concat_axis);
+ return {out_shape};
+}
+
+Shapes inferMaxPoolShape(const model::Shape &in_shape,
+ const model::operation::MaxPool2DNode::Param &param,
+ const model::Layout layout)
+{
+ assert(layout == model::Layout::NHWC);
+ auto ifm_shape = in_shape.asFeature(layout);
+ const auto out_h_w = calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, param.kh, param.kw,
+ param.padding, param.stride);
+ // Pooling don't change number of channels and batch size
+ return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, ifm_shape.C}};
+}
+
+Shapes inferConv2DShape(const model::Shape &in_shape, const model::Shape &ker_shape,
+ const model::operation::Conv2DNode::Param &param, model::Layout layout)
+{
+ assert(layout == model::Layout::NHWC);
+ auto ifm_shape = in_shape.asFeature(layout);
+
+ // Kernel format is [depth_out, kernel_height, kernel_width, depth_in]
+ auto kf_shape = ker_shape.asFeature(layout);
+ assert(ifm_shape.C == kf_shape.C);
+
+ const auto out_h_w = calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, kf_shape.H, kf_shape.W,
+ param.padding, param.stride);
+
+ return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, kf_shape.N}};
+}
+
+Shapes inferDepthwiseConv2DShape(const model::Shape &in_shape, const model::Shape &ker_shape,
+ const model::operation::DepthwiseConv2DNode::Param &param,
+ model::Layout layout)
+{
+ assert(layout == model::Layout::NHWC);
+ auto ifm_shape = in_shape.asFeature(layout);
+
+ // Kernel format is [1, kernel_height, kernel_width, depth_out]
+ auto kf_shape = ker_shape.asFeature(layout);
+ assert(kf_shape.C == static_cast<int32_t>(ifm_shape.C * param.multiplier));
+ assert(kf_shape.N == 1);
+
+ const auto out_h_w = calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, kf_shape.H, kf_shape.W,
+ param.padding, param.stride);
+
+ return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, kf_shape.C}};
+}
+
+Shapes inferFullyConnectedShape(const model::Shape &in_shape, const model::Shape &ker_shape)
+{
+ assert(in_shape.rank() >= 2);
+ assert(ker_shape.rank() == 2);
+
+ const auto input_size_with_batch = in_shape.num_elements();
+ const auto num_units = ker_shape.dim(0);
+ const auto input_size = ker_shape.dim(1);
+ const auto batch_size = input_size_with_batch / input_size;
+ assert(input_size_with_batch % input_size == 0);
+
+ return {{model::Shape({static_cast<int32_t>(batch_size), num_units})}};
+}
+
+} // namespace shape_inference
+} // namespace neurun
diff --git a/runtimes/neurun/core/src/util/Utils.cc b/runtimes/neurun/core/src/util/Utils.cc
new file mode 100644
index 000000000..cd912a810
--- /dev/null
+++ b/runtimes/neurun/core/src/util/Utils.cc
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "util/Utils.h"
+
+#include <cassert>
+
+namespace neurun
+{
+namespace util
+{
+
+const char *to_string(const model::PaddingType &type)
+{
+ assert((type == model::PaddingType::EXPLICIT) || (type == model::PaddingType::SAME) ||
+ (type == model::PaddingType::VALID));
+
+ switch (type)
+ {
+ case model::PaddingType::EXPLICIT:
+ return "Padding::EXPLICIT";
+ case model::PaddingType::SAME:
+ return "Padding::SAME";
+ case model::PaddingType::VALID:
+ return "Padding::VALID";
+ }
+
+ return nullptr;
+}
+
+Coordinates convertCoordinates(const Coordinates &from_coordinates, model::Layout from_layout,
+ model::Layout to_layout)
+{
+ assert(from_coordinates.size() == 4);
+ Coordinates to{from_coordinates};
+ if (from_layout == model::Layout::NHWC && to_layout == model::Layout::NCHW)
+ {
+ to.set(0, from_coordinates[0]);
+ to.set(1, from_coordinates[3]);
+ to.set(2, from_coordinates[1]);
+ to.set(3, from_coordinates[2]);
+ }
+ else if (from_layout == model::Layout::NCHW && to_layout == model::Layout::NHWC)
+ {
+ to.set(0, from_coordinates[0]);
+ to.set(1, from_coordinates[2]);
+ to.set(2, from_coordinates[3]);
+ to.set(3, from_coordinates[1]);
+ }
+
+ return to;
+}
+
+} // namespace util
+} // namespace neurun
diff --git a/runtimes/neurun/frontend/CMakeLists.txt b/runtimes/neurun/frontend/CMakeLists.txt
new file mode 100644
index 000000000..5ea6cdadd
--- /dev/null
+++ b/runtimes/neurun/frontend/CMakeLists.txt
@@ -0,0 +1 @@
+add_subdirectories()
diff --git a/runtimes/neurun/frontend/api/CMakeLists.txt b/runtimes/neurun/frontend/api/CMakeLists.txt
new file mode 100644
index 000000000..10a33a85e
--- /dev/null
+++ b/runtimes/neurun/frontend/api/CMakeLists.txt
@@ -0,0 +1,12 @@
+file(GLOB_RECURSE API_SRC "*.cc")
+
+set(NEURUN_DEV nnfw-dev)
+add_library(${NEURUN_DEV} SHARED ${API_SRC})
+
+target_link_libraries(${NEURUN_DEV} PUBLIC nnfw-header)
+target_link_libraries(${NEURUN_DEV} PUBLIC neurun_core) # TODO Link PRIVATE neurun_core
+target_link_libraries(${NEURUN_DEV} PRIVATE jsoncpp tflite_loader ${LIB_PTHREAD})
+target_link_libraries(${NEURUN_DEV} PRIVATE nnfw_common)
+target_link_libraries(${NEURUN_DEV} PRIVATE nnfw_coverage)
+
+install(TARGETS ${NEURUN_DEV} DESTINATION lib)
diff --git a/runtimes/neurun/frontend/api/nnfw_dev.cc b/runtimes/neurun/frontend/api/nnfw_dev.cc
new file mode 100644
index 000000000..ddb6dd572
--- /dev/null
+++ b/runtimes/neurun/frontend/api/nnfw_dev.cc
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "wrapper/nnfw_api.hpp"
+
+/*
+ * Create a new session instance
+ *
+ * @param session the session to be created
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_create_session(nnfw_session **session)
+{
+ *session = new nnfw_session();
+
+ return NNFW_STATUS_NO_ERROR;
+}
+
+/*
+ * Close a session instance
+ *
+ * @param session the session to be closed
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_close_session(nnfw_session *session)
+{
+ delete session;
+ return NNFW_STATUS_NO_ERROR;
+}
+
+#define NNFW_RETURN_ERROR_IF_NULL(p) \
+ do \
+ { \
+ if ((p) == NULL) \
+ return NNFW_STATUS_ERROR; \
+ } while (0)
+
+/*
+ * Load model from nnpackage file or directory
+ *
+ * @param session nnfw_session loading the given nnpackage file/dir
+ * @param package_file_path path to the nnpackage file or unzipped directory to be loaded
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_load_model_from_file(nnfw_session *session, const char *pacakge_file_path)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->load_model_from_file(pacakge_file_path);
+}
+
+/*
+ * Prepare session to be ready for inference
+ * This phase may finalize model compilation, scheduling, and additional settings.
+ *
+ * @param session the session to be prepared
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_prepare(nnfw_session *session)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->prepare();
+}
+
+/*
+ * Run inference
+ *
+ * @param session the session to run inference
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_run(nnfw_session *session)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->run();
+}
+
+/*
+ * Set input
+ *
+ * @param session session to the input is to be set
+ * @param index index of input to be set (0-indexed)
+ * @param type type of the input
+ * @param buffer raw buffer for input
+ * @param length size of bytes of output
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+
+NNFW_STATUS nnfw_set_input(nnfw_session *session, uint32_t index, NNFW_TYPE type,
+ const void *buffer, size_t length)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->set_input(index, type, buffer, length);
+}
+
+/*
+ * Set output
+ *
+ * @param session session from inference output is to be extracted
+ * @param index index of output to be set (0-indexed)
+ * @param type type of the output
+ * @param buffer raw buffer for output
+ * @param length size of bytes of output
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+
+NNFW_STATUS nnfw_set_output(nnfw_session *session, uint32_t index, NNFW_TYPE type, void *buffer,
+ size_t length)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->set_output(index, type, buffer, length);
+}
+
+/*
+ * Get the number of inputs
+ *
+ * @param[in] session session from input information is to be extracted
+ * @param[out] number variable which the number of inputs is put into
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+
+NNFW_STATUS nnfw_input_size(nnfw_session *session, uint32_t *number)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->input_size(number);
+}
+
+/*
+ * Get the number of outputs
+ *
+ * @param[in] session session from output information is to be extracted
+ * @param[out] number variable which the number of outputs is put into
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_output_size(nnfw_session *session, uint32_t *number)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->output_size(number);
+}
+
+/*
+ * Get i-th input tensor info
+ *
+ * @param[in] session session from input information is to be extracted
+ * @param[in] index index of input
+ * @param[out] tensor_info nnfw_tensor_info
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_input_tensorinfo(nnfw_session *session, uint32_t index,
+ nnfw_tensorinfo *tensor_info)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->input_tensorinfo(index, tensor_info);
+}
+
+/*
+ * Get i-th output tensor info
+ *
+ * @param[in] session session from output information is to be extracted
+ * @param[in] index index of output
+ * @param[out] tensor_info nnfw_tensor_info
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_output_tensorinfo(nnfw_session *session, uint32_t index,
+ nnfw_tensorinfo *tensor_info)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->output_tensorinfo(index, tensor_info);
+}
+
+/*
+ * Register custom operation
+ * @param session session to register this operation
+ * @param id operation id
+ * @param info registration info ( eval function, etc. )
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_register_custom_op_info(nnfw_session *session, const char *id,
+ custom_kernel_registration_info *info)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->register_custom_operation(id, info->eval_function);
+}
+
+/*
+ * Set default backend
+ *
+ * @param[in] session session to which a default backend is set
+ * @param[in] backend default backend
+ */
+NNFW_STATUS nnfw_set_default_backend(nnfw_session *session, const char *backend)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->set_default_backend(backend);
+}
+
+/*
+ * Set the operation's backend
+ *
+ * @param[in] session session to be modified
+ * @param[in] op operation to be set
+ * @param[in] backend bakcend on which operation run
+ *
+ * @return NNFW_STATUS_NO_ERROR if successful
+ */
+NNFW_STATUS nnfw_set_op_backend(nnfw_session *session, const char *op, const char *backend)
+{
+ NNFW_RETURN_ERROR_IF_NULL(session);
+ return session->set_op_backend(op, backend);
+}
diff --git a/runtimes/neurun/frontend/api/wrapper/nnfw_api.cc b/runtimes/neurun/frontend/api/wrapper/nnfw_api.cc
new file mode 100644
index 000000000..a6021f0da
--- /dev/null
+++ b/runtimes/neurun/frontend/api/wrapper/nnfw_api.cc
@@ -0,0 +1,366 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "nnfw_api.hpp"
+#include "backend/CustomKernelRegistry.h"
+#include "compiler/Compiler.h"
+#include "exec/Execution.h"
+#include "loader.h"
+#include "json/json.h"
+#include <iostream>
+#include <string>
+#include <dirent.h>
+#include <limits.h>
+#include <stdint.h>
+#include <util/ConfigSource.h>
+
+/*
+ * API does not accept string argument longer than max length below
+ */
+#define MAX_BACKEND_NAME_LENGTH 32
+#define MAX_OP_NAME_LENGTH 64
+
+// Is null-terminating in length ?
+static bool null_terminating(const char *str, uint32_t length)
+{
+ for (uint32_t i = 0; i < length; i++)
+ {
+ if (*(str + i) == '\0')
+ {
+ return true;
+ }
+ }
+ return false;
+}
+
+nnfw_session::nnfw_session()
+ : _graph{nullptr}, _execution{nullptr},
+ _kernel_registry{std::make_shared<neurun::backend::custom::KernelRegistry>()},
+ _source{nnfw::cpp14::make_unique<neurun::util::GeneralConfigSource>()}
+{
+ // DO NOTHING
+}
+
+NNFW_STATUS nnfw_session::load_model_from_file(const char *package_dir)
+{
+ // TODO : add support for zipped package file load
+ DIR *dir;
+ if (!(dir = opendir(package_dir)))
+ {
+ std::cerr << "invalid nnpackge directory: " << package_dir << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ closedir(dir);
+
+ try
+ {
+ std::string manifest_file_name(package_dir);
+ manifest_file_name += "/metadata/MANIFEST";
+ std::ifstream mfs(manifest_file_name);
+
+ // extract the filename of the first(index 0) model
+ // e.g. In MANIFEST file, { "models" : [ "firstmodel.tflite", "2nd.tflite" ] }
+ Json::Value root;
+ mfs >> root;
+ Json::Value models = root["models"];
+
+ auto model = nnfw::cpp14::make_unique<neurun::model::Model>();
+ _graph = std::make_shared<neurun::graph::Graph>(std::move(model));
+ _graph->bindKernelRegistry(_kernel_registry);
+ tflite_loader::Loader loader(*_graph);
+ auto model_file_path = package_dir + std::string("/") + models[0].asString(); // first model
+ loader.loadFromFile(model_file_path.c_str());
+ }
+ catch (...)
+ {
+ std::cerr << "Error during model loading" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::prepare()
+{
+ // TODO : add additional setting routine(executor type, backend)
+ // Note that we assume acl_cl backend
+
+ try
+ {
+ // config_source setting
+ using neurun::util::config_source;
+ config_source(std::move(_source));
+
+ auto compiler = nnfw::cpp14::make_unique<neurun::compiler::Compiler>(_graph);
+ compiler->compile();
+ std::shared_ptr<neurun::exec::IExecutor> executor;
+ compiler->release(executor);
+ _execution = std::make_shared<neurun::exec::Execution>(executor);
+ }
+ catch (...)
+ {
+ std::cerr << "Error during model prepare" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::run()
+{
+ try
+ {
+ _execution->execute();
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::run" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::set_input(uint32_t index, NNFW_TYPE /*type*/, const void *buffer,
+ size_t length)
+{
+ try
+ {
+ _execution->setInput(neurun::model::IOIndex(index), buffer, length);
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::set_input" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::set_output(uint32_t index, NNFW_TYPE /*type*/, void *buffer,
+ size_t length)
+{
+ try
+ {
+ _execution->setOutput(neurun::model::IOIndex(index), buffer, length);
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::set_output" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::input_size(uint32_t *number)
+{
+ try
+ {
+ if (number == nullptr)
+ {
+ std::cerr << "Error during nnfw_session::input_size, number is null pointer." << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ *number = _graph->getInputs().size();
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::input_size" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::output_size(uint32_t *number)
+{
+ try
+ {
+ if (number == nullptr)
+ {
+ std::cerr << "Error during nnfw_session::output_size, number is null pointer." << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ *number = _graph->getOutputs().size();
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::output_size" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+static NNFW_TYPE datatype_to_nnfw_dtype(neurun::model::DataType dt)
+{
+ using neurun::model::DataType;
+ switch (dt)
+ {
+ case DataType::FLOAT32:
+ return NNFW_TYPE_TENSOR_FLOAT32;
+ case DataType::INT32:
+ return NNFW_TYPE_TENSOR_INT32;
+ case DataType::QUANT8_ASYMM:
+ return NNFW_TYPE_TENSOR_QUANT8_ASYMM;
+ case DataType::BOOL8:
+ return NNFW_TYPE_TENSOR_BOOL;
+ case DataType::UINT32:
+ default:
+ std::cerr << "Error: Model has type that runtime API does not support." << std::endl;
+ exit(-1);
+ }
+}
+
+NNFW_STATUS nnfw_session::input_tensorinfo(uint32_t index, nnfw_tensorinfo *ti)
+{
+ try
+ {
+ if (ti == nullptr)
+ {
+ std::cerr << "Error during nnfw_session::input_tensorinfo, tensorinfo is null pointer."
+ << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ if (index >= _graph->getInputs().size())
+ {
+ std::cerr << "Error during nnfw_session::input_tensorinfo, index is out of range."
+ << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ auto opidx = _graph->getInputs().at(index);
+ auto shape = _graph->operands().at(opidx).shape();
+ ti->rank = shape.rank();
+ for (int j = 0; j < ti->rank; ++j)
+ {
+ ti->dims[j] = shape.dim(j);
+ }
+ ti->dtype = datatype_to_nnfw_dtype(_graph->operands().at(opidx).typeInfo().type());
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::input_tensorinfo." << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::output_tensorinfo(uint32_t index, nnfw_tensorinfo *ti)
+{
+ try
+ {
+ if (ti == nullptr)
+ {
+ std::cerr << "Error during nnfw_session::output_tensorinfo, tensorinfo is null pointer."
+ << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ if (index >= _graph->getOutputs().size())
+ {
+ std::cerr << "Error during nnfw_session::output_tensorinfo, index is out of range."
+ << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ auto opidx = _graph->getOutputs().at(index);
+ auto shape = _graph->operands().at(opidx).shape();
+ ti->rank = shape.rank();
+ for (int j = 0; j < ti->rank; ++j)
+ {
+ ti->dims[j] = shape.dim(j);
+ }
+ ti->dtype = datatype_to_nnfw_dtype(_graph->operands().at(opidx).typeInfo().type());
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::output_tensorinfo." << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::register_custom_operation(const std::string &id,
+ nnfw_custom_eval eval_func)
+{
+ _kernel_registry->registerKernel(id, eval_func);
+ return NNFW_STATUS_NO_ERROR;
+}
+
+static std::string get_op_backend_string(std::string op)
+{
+ // TODO: Provide complete set of operations
+ static std::unordered_map<std::string, std::string> operation_map = {
+ {"TRANSPOSE_CONV", "OP_BACKEND_TransposeConvNode"},
+ {"CONV_2D", "OP_BACKEND_Conv2DNode"},
+ {"DEPTHWISE_CONV_2D", "OP_BACKEND_DepthwiseConv2DNode"},
+ {"MEAN", "OP_BACKEND_MeanNode"},
+ {"AVERAGE_POOL_2D", "OP_BACKEND_AvgPool2DNode"},
+ {"MAX_POOL_2D", "OP_BACKEND_MaxPool2DNode"},
+ };
+
+ auto n = operation_map.find(op);
+
+ if (n == operation_map.end())
+ {
+ // this return value is handled by a caller to return error code
+ return std::string("");
+ }
+ else
+ {
+ return n->second;
+ }
+}
+
+NNFW_STATUS nnfw_session::set_default_backend(const char *backend)
+{
+ try
+ {
+ if (!backend || null_terminating(backend, MAX_BACKEND_NAME_LENGTH) == false)
+ {
+ return NNFW_STATUS_ERROR;
+ }
+
+ _source->set("OP_BACKEND_ALLOPS", backend);
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::set_default_backend" << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
+
+NNFW_STATUS nnfw_session::set_op_backend(const char *op, const char *backend)
+{
+ try
+ {
+ if (!op || !null_terminating(op, MAX_OP_NAME_LENGTH) || !backend ||
+ !null_terminating(backend, MAX_BACKEND_NAME_LENGTH))
+ {
+ return NNFW_STATUS_ERROR;
+ }
+
+ auto key = get_op_backend_string(op);
+
+ if (key.empty())
+ {
+ return NNFW_STATUS_ERROR;
+ }
+
+ _source->set(key, backend);
+ }
+ catch (...)
+ {
+ std::cerr << "Error during nnfw_session::set_op_backend." << std::endl;
+ return NNFW_STATUS_ERROR;
+ }
+ return NNFW_STATUS_NO_ERROR;
+}
diff --git a/runtimes/neurun/frontend/api/wrapper/nnfw_api.hpp b/runtimes/neurun/frontend/api/wrapper/nnfw_api.hpp
new file mode 100644
index 000000000..84514161e
--- /dev/null
+++ b/runtimes/neurun/frontend/api/wrapper/nnfw_api.hpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __API_NNFW_INTERNAL_HPP__
+#define __API_NNFW_INTERNAL_HPP__
+
+#include "nnfw.h"
+#include "nnfw_dev.h"
+
+#include <util/GeneralConfigSource.h>
+
+#include <string>
+#include <memory>
+
+namespace neurun
+{
+namespace backend
+{
+namespace custom
+{
+class KernelRegistry;
+}
+}
+namespace exec
+{
+class Execution;
+}
+namespace graph
+{
+class Graph;
+}
+}
+
+struct nnfw_session
+{
+public:
+ nnfw_session();
+
+ NNFW_STATUS load_model_from_file(const char *package_file_path);
+ NNFW_STATUS prepare();
+ NNFW_STATUS run();
+
+ NNFW_STATUS set_input(uint32_t index, NNFW_TYPE type, const void *buffer, size_t length);
+ NNFW_STATUS set_output(uint32_t index, NNFW_TYPE type, void *buffer, size_t length);
+
+ NNFW_STATUS input_size(uint32_t *number);
+ NNFW_STATUS output_size(uint32_t *number);
+
+ NNFW_STATUS input_tensorinfo(uint32_t index, nnfw_tensorinfo *ti);
+ NNFW_STATUS output_tensorinfo(uint32_t index, nnfw_tensorinfo *ti);
+
+ NNFW_STATUS register_custom_operation(const std::string &id, nnfw_custom_eval eval_func);
+
+ NNFW_STATUS set_default_backend(const char *backend);
+ NNFW_STATUS set_op_backend(const char *op, const char *backend);
+
+private:
+ std::shared_ptr<neurun::graph::Graph> _graph;
+ std::shared_ptr<neurun::exec::Execution> _execution;
+ std::shared_ptr<neurun::backend::custom::KernelRegistry> _kernel_registry;
+ std::unique_ptr<neurun::util::GeneralConfigSource> _source;
+};
+
+#endif // __API_NNFW_INTERNAL_HPP__
diff --git a/runtimes/neurun/frontend/nnapi/ANeuralNetworksModel.test.cc b/runtimes/neurun/frontend/nnapi/ANeuralNetworksModel.test.cc
new file mode 100644
index 000000000..15a279a7e
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/ANeuralNetworksModel.test.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "wrapper/ANeuralNetworksModel.h"
+
+TEST(MODEL, model_build)
+{
+ ANeuralNetworksModel model;
+ ASSERT_EQ(model.isFinished(), false);
+}
diff --git a/runtimes/neurun/frontend/nnapi/CMakeLists.txt b/runtimes/neurun/frontend/nnapi/CMakeLists.txt
new file mode 100644
index 000000000..bd58de91b
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/CMakeLists.txt
@@ -0,0 +1,21 @@
+file(GLOB_RECURSE SOURCES_FRONTEND "*.cc")
+file(GLOB_RECURSE TESTS_FRONTEND "*.test.cc")
+list(REMOVE_ITEM SOURCES_FRONTEND ${TESTS_FRONTEND})
+
+add_library(${LIB_NEURUN} SHARED ${SOURCES_FRONTEND})
+target_link_libraries(${LIB_NEURUN} PUBLIC nnfw-header)
+target_link_libraries(${LIB_NEURUN} PUBLIC neurun_core) # TODO Link PRIVATE neurun_core
+target_link_libraries(${LIB_NEURUN} PRIVATE nnfw_common)
+target_link_libraries(${LIB_NEURUN} PRIVATE nnfw_coverage)
+
+set_target_properties(${LIB_NEURUN} PROPERTIES OUTPUT_NAME neuralnetworks)
+
+install(TARGETS ${LIB_NEURUN} DESTINATION lib)
+
+add_executable(test_neurun_frontend_nnapi ${TESTS_FRONTEND})
+
+target_link_libraries(test_neurun_frontend_nnapi PRIVATE ${LIB_NEURUN})
+target_link_libraries(test_neurun_frontend_nnapi PRIVATE gtest)
+target_link_libraries(test_neurun_frontend_nnapi PRIVATE gtest_main)
+
+install(TARGETS test_neurun_frontend_nnapi DESTINATION unittest)
diff --git a/runtimes/neurun/frontend/nnapi/compilation.cc b/runtimes/neurun/frontend/nnapi/compilation.cc
new file mode 100644
index 000000000..c35314989
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/compilation.cc
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <NeuralNetworks.h>
+
+#include <new>
+
+#include "wrapper/ANeuralNetworksModel.h"
+#include "wrapper/ANeuralNetworksCompilation.h"
+#include "util/logging.h"
+
+//
+// NNAPI Implementation
+//
+int ANeuralNetworksCompilation_create(ANeuralNetworksModel *model,
+ ANeuralNetworksCompilation **compilation)
+{
+ if ((model == nullptr) || (compilation == nullptr))
+ {
+ VERBOSE(NNAPI::Compilation) << "create: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (!model->isFinished())
+ {
+ VERBOSE(NNAPI::Compilation) << "create: Model define is not finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ std::shared_ptr<neurun::graph::Graph> internal;
+
+ model->release(internal);
+
+ *compilation = new (std::nothrow) ANeuralNetworksCompilation(internal);
+ if (*compilation == nullptr)
+ {
+ VERBOSE(NNAPI::Compilation) << "create: ail to create compilation object" << std::endl;
+ return ANEURALNETWORKS_OUT_OF_MEMORY;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation *compilation)
+{
+ if (compilation == nullptr)
+ {
+ VERBOSE(NNAPI::Compilation) << "finish: Incorrect null pointer parameter" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (compilation->state() != ::neurun::compiler::State::CREATED)
+ {
+ VERBOSE(NNAPI::Compilation) << "finish: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ if (!compilation->finish())
+ {
+ VERBOSE(NNAPI::Compilation) << "finish: Fail to compile" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation *compilation)
+{
+ delete compilation;
+}
+
+int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation *compilation,
+ int32_t preference)
+{
+ if (compilation == nullptr)
+ {
+ VERBOSE(NNAPI::Compilation) << "setPreference: Incorrect null pointer parameter" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (compilation->state() != ::neurun::compiler::State::CREATED)
+ {
+ VERBOSE(NNAPI::Compilation) << "setPreference: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ const PreferenceCode FIRST_PREFERENCE_CODE = ANEURALNETWORKS_PREFER_LOW_POWER;
+ const PreferenceCode LAST_PREFERENCE_CODE = ANEURALNETWORKS_PREFER_SUSTAINED_SPEED;
+ if ((preference < FIRST_PREFERENCE_CODE) || (preference > LAST_PREFERENCE_CODE))
+ {
+ VERBOSE(NNAPI::Compilation) << "setPreference: Incorrect preference code" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ // NYI: nothing to set
+ return ANEURALNETWORKS_NO_ERROR;
+}
diff --git a/runtimes/neurun/frontend/nnapi/event.cc b/runtimes/neurun/frontend/nnapi/event.cc
new file mode 100644
index 000000000..593b74e90
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/event.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <NeuralNetworks.h>
+
+#include "wrapper/ANeuralNetworksEvent.h"
+
+int ANeuralNetworksEvent_wait(ANeuralNetworksEvent *event)
+{
+ if (event == nullptr)
+ {
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (!event->waitFinish())
+ {
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+void ANeuralNetworksEvent_free(ANeuralNetworksEvent *event) { delete event; }
diff --git a/runtimes/neurun/frontend/nnapi/execution.cc b/runtimes/neurun/frontend/nnapi/execution.cc
new file mode 100644
index 000000000..837cab0fa
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/execution.cc
@@ -0,0 +1,411 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <NeuralNetworks.h>
+
+#include <new>
+
+#include "wrapper/ANeuralNetworksCompilation.h"
+#include "wrapper/ANeuralNetworksExecution.h"
+#include "wrapper/ANeuralNetworksMemory.h"
+#include "wrapper/ANeuralNetworksEvent.h"
+#include "wrapper/NNAPIConvert.h"
+#include "util/logging.h"
+
+//
+// NNAPI Implementation
+//
+int ANeuralNetworksExecution_create(ANeuralNetworksCompilation *compilation,
+ ANeuralNetworksExecution **execution)
+{
+ if ((compilation == nullptr) || (execution == nullptr))
+ {
+ VERBOSE(NNAPI::Execution) << "create: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ std::shared_ptr<neurun::exec::IExecutor> executor;
+
+ compilation->publish(executor);
+
+ if (executor == nullptr)
+ {
+ VERBOSE(NNAPI::Execution) << "create: Never compiled yet" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ *execution = new (std::nothrow) ANeuralNetworksExecution{executor};
+ if (*execution == nullptr)
+ {
+ VERBOSE(NNAPI::Execution) << "create: Fail to create execution object" << std::endl;
+ return ANEURALNETWORKS_OUT_OF_MEMORY;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+// NOTE Handle optional input
+// Unspecified shape on model build
+// Optional and omitted input on execution: skip input setting (workaround for LSTM)
+// Optional but not omitted input on execution: cannot handle
+// Normal input on execution: cannot handle
+// Fully specified shape on model build
+// Optional input on execution: cannot handle
+// Normal input: handle normally
+int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, const void *buffer,
+ size_t length)
+{
+ // Don't check type
+ // Comment about ANeuralNetworksOperandType in NeuralNetworks.h:
+ // If the input or output is optional and omitted then it need not have a fully specified tensor
+ // operand type
+ if ((execution == nullptr) || ((buffer == nullptr) && (length != 0)))
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if ((buffer != nullptr) && (length == 0))
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Zero length input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ const auto operand_index = execution->getInputOperandIndex(index);
+ if (!operand_index.valid())
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Invalid input index" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ // Omitted optional input
+ // LSTM operation's some inputs can be optional input
+ if ((buffer == nullptr) && (length == 0))
+ {
+ if (execution->haveUnspecifiedDims(operand_index))
+ {
+ return ANEURALNETWORKS_NO_ERROR;
+ }
+ else
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Cannot handle fully-specified shape on model build "
+ "but omitted input on execution"
+ << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (type != nullptr)
+ {
+ if (!execution->compareDataType(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Data type mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!execution->compareShape(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Shape mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (NNAPIConvert::calculateSizeFromType(type) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+ else
+ {
+ if (execution->haveUnspecifiedDims(operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Unspecified dimension value" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (execution->getOperandSize(operand_index) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (!execution->setInput(index, type, buffer, length))
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Fail to set input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type, void *buffer,
+ size_t length)
+{
+ // Don't check type
+ // Comment about ANeuralNetworksOperandType in NeuralNetworks.h:
+ // If the input or output is optional and omitted then it need not have a fully specified tensor
+ // operand type
+ if ((execution == nullptr) || ((buffer == nullptr) && (length != 0)))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if ((buffer != nullptr) && (length == 0))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Zero length output" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ // Handle optional output
+ if (buffer == nullptr)
+ {
+ return ANEURALNETWORKS_NO_ERROR;
+ }
+
+ const auto operand_index = execution->getOutputOperandIndex(index);
+ if (!operand_index.valid())
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Invalid output index" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (type != nullptr)
+ {
+ if (!execution->compareDataType(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Data type mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!execution->compareShape(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Shape mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (NNAPIConvert::calculateSizeFromType(type) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+ else
+ {
+ if (execution->haveUnspecifiedDims(operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Unspecified dimension value" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (execution->getOperandSize(operand_index) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setInput: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (!execution->setOutput(index, type, buffer, length))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutput: Fail to set output" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution *execution,
+ ANeuralNetworksEvent **event)
+{
+ if ((execution == nullptr) || (event == nullptr))
+ {
+ VERBOSE(NNAPI::Execution) << "startCompute: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ // TODO: Handle event
+ auto instance = execution->instance();
+ *event = new (std::nothrow) ANeuralNetworksEvent{instance};
+ if (*event == nullptr)
+ {
+ VERBOSE(NNAPI::Execution) << "startCompute: Fail to create event" << std::endl;
+ return ANEURALNETWORKS_OUT_OF_MEMORY;
+ }
+
+ if (!execution->startExecute())
+ {
+ VERBOSE(NNAPI::Execution) << "startCompute: Fail to start execution" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+void ANeuralNetworksExecution_free(ANeuralNetworksExecution *execution) { delete execution; }
+
+int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length)
+{
+ if ((execution == nullptr) || (memory == nullptr))
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Incorrect null pointer parameter(s)"
+ << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (length == 0)
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Zero length input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ const auto operand_index = execution->getInputOperandIndex(index);
+ if (!operand_index.valid())
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Invalid input index" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (type != nullptr)
+ {
+ if (!execution->compareDataType(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Data type mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!execution->compareShape(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Shape mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (NNAPIConvert::calculateSizeFromType(type) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+ else
+ {
+ if (execution->haveUnspecifiedDims(operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Unspecified dimension value" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (execution->getOperandSize(operand_index) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (!memory->vaildAccess(offset, length))
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Invalid memory access" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!execution->setInput(index, type, reinterpret_cast<const void *>(memory->base() + offset),
+ length))
+ {
+ VERBOSE(NNAPI::Execution) << "setInputFromMemory: Fail to set input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution *execution, int32_t index,
+ const ANeuralNetworksOperandType *type,
+ const ANeuralNetworksMemory *memory, size_t offset,
+ size_t length)
+{
+ if ((execution == nullptr) || (memory == nullptr))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Incorrect null pointer parameter(s)"
+ << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (length == 0)
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Zero length input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ const auto operand_index = execution->getOutputOperandIndex(index);
+ if (!operand_index.valid())
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Invalid output index" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (type != nullptr)
+ {
+ if (!execution->compareDataType(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Data type mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!execution->compareShape(type, operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Shape mismatch" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (NNAPIConvert::calculateSizeFromType(type) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+ else
+ {
+ if (execution->haveUnspecifiedDims(operand_index))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Unspecified dimension value" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (execution->getOperandSize(operand_index) != length)
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Invalid length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (!memory->vaildAccess(offset, length))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Invalid memory access" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!execution->setOutput(index, type, reinterpret_cast<void *>(memory->base() + offset), length))
+ {
+ VERBOSE(NNAPI::Execution) << "setOutputFromMemory: Fail to set input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
diff --git a/runtimes/neurun/frontend/nnapi/memory.cc b/runtimes/neurun/frontend/nnapi/memory.cc
new file mode 100644
index 000000000..fbe1a48e8
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/memory.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <NeuralNetworks.h>
+#include <sys/mman.h>
+#include <new>
+#include <memory>
+
+#include "cpp14/memory.h"
+#include "wrapper/ANeuralNetworksMemory.h"
+
+int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t offset,
+ ANeuralNetworksMemory **memory)
+{
+ if (memory == nullptr)
+ {
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ *memory = new (std::nothrow) ANeuralNetworksMemory{size, protect, fd, offset};
+ if (*memory == nullptr)
+ {
+ return ANEURALNETWORKS_OUT_OF_MEMORY;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+void ANeuralNetworksMemory_free(ANeuralNetworksMemory *memory) { delete memory; }
diff --git a/runtimes/neurun/frontend/nnapi/model.cc b/runtimes/neurun/frontend/nnapi/model.cc
new file mode 100644
index 000000000..e854b1694
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/model.cc
@@ -0,0 +1,396 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <NeuralNetworks.h>
+#include <NeuralNetworksEx.h>
+
+#include <new>
+
+#include "wrapper/ANeuralNetworksModel.h"
+#include "wrapper/ANeuralNetworksMemory.h"
+#include "util/logging.h"
+
+int ANeuralNetworksModel_create(ANeuralNetworksModel **model)
+{
+ if (model == nullptr)
+ {
+ VERBOSE(NNAPI::Model) << "create: Incorrect null pointer parameter" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ *model = new (std::nothrow) ANeuralNetworksModel{};
+ if (*model == nullptr)
+ {
+ VERBOSE(NNAPI::Model) << "create: Fail to create model object" << std::endl;
+ return ANEURALNETWORKS_OUT_OF_MEMORY;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+void ANeuralNetworksModel_free(ANeuralNetworksModel *model) { delete model; }
+
+int ANeuralNetworksModel_addOperand(ANeuralNetworksModel *model,
+ const ANeuralNetworksOperandType *type)
+{
+ if ((model == nullptr) || (type == nullptr))
+ {
+ VERBOSE(NNAPI::Model) << "addOperand: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "addOperand: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ // scale and zeroPoint should be zero for scalars and non-fixed point tensors
+ // Quantized:
+ // scale: a 32 bit floating point value greater than zero
+ // zeroPoint: a 32 bit integer, in range [0, 255]
+ if (type->type == ANEURALNETWORKS_TENSOR_QUANT8_ASYMM)
+ {
+ if (!(type->scale > 0.0f))
+ {
+ VERBOSE(NNAPI::Model) << "addOperand: Incorrect scale value for quantization" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if ((type->zeroPoint < 0) || (type->zeroPoint > 255))
+ {
+ VERBOSE(NNAPI::Model) << "addOperand: Incorrect zeroPoint value for quantization"
+ << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+ // NOTE Validation of scale and zeroPoint would be skipped for a while.
+ // We do not know whether scalar type can have scale and zeroPoint.
+ // To pass ValidationTest and GeneratedTest, this validation code
+ // would not be implemented until we can define this issue clearly.
+ //
+ // scale and zeroPoint should be zero for scalars and non-fixed point tensors
+ // else if ((type->scale != 0.0f) || (type->zeroPoint != 0))
+ // {
+ // return ANEURALNETWORKS_BAD_DATA;
+ // }
+
+ // dimensionCount should be zero for scalars
+ if ((type->dimensionCount != 0) &&
+ ((type->type == ANEURALNETWORKS_FLOAT32) || (type->type == ANEURALNETWORKS_INT32) ||
+ (type->type == ANEURALNETWORKS_UINT32)))
+ {
+ VERBOSE(NNAPI::Model) << "addOperand: Incorrect data type" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!model->addOperand(type))
+ {
+ VERBOSE(NNAPI::Model) << "addOperand: Fail to add operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel *model, int32_t index,
+ const void *buffer, size_t length)
+{
+ const bool optional_operand = ((buffer == nullptr) && (length == 0));
+
+ if ((model == nullptr) || ((buffer == nullptr) && (length != 0)))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ // Negative index value is not allowed
+ if (index < 0)
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Invalid index value (negative)" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ // NOTE ::neurun::model::OperandIndex uses uint32_t as its underlying type as various NNAPI
+ // functions such as ANeuralNetworksModel_addOperation use uint32_t to represent operand
+ // index
+ // ANeuralNetworksModel_setOperandValue, however, uses int32_t to represent operand index.
+ //
+ // Below, static_cast<uint32_t>(...) is introduced to eliminate compiler warning.
+ uint32_t ind = static_cast<uint32_t>(index);
+
+ if (!model->isExistOperand(ind))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Invalid index value (not exist)" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!optional_operand && (model->operandSize(ind) != length))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Invalid data length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (model->isUsageSet(ind))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Already set operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ // NNAPI spec in NeuralNetworks.h
+ // For values of length greater than ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES,
+ // the application is responsible for not changing the content of this region
+ // until all executions using this model have completed
+ bool copy_value = false;
+ if (length <= ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES)
+ {
+ copy_value = true;
+ }
+
+ if (!model->setOperandValue(ind, buffer, length, optional_operand, copy_value))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValue: Fail to set operand value" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel *model, int32_t index,
+ const ANeuralNetworksMemory *memory,
+ size_t offset, size_t length)
+{
+ if ((model == nullptr) || (memory == nullptr))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Incorrect null pointer parameter(s)"
+ << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ // Negative index value is not allowed
+ if (index < 0)
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Invalid index value (negative)"
+ << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ // NOTE ::neurun::model::OperandIndex uses uint32_t as its underlying type as various NNAPI
+ // functions such as ANeuralNetworksModel_addOperation use uint32_t to represent operand
+ // index
+ // ANeuralNetworksModel_setOperandValue, however, uses int32_t to represent operand index.
+ //
+ // Below, static_cast<uint32_t>(...) is introduced to eliminate compiler warning.
+ uint32_t ind = static_cast<uint32_t>(index);
+
+ if (!model->isExistOperand(ind))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Invalid index value (not exist)"
+ << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if ((model->operandSize(ind) != length) || (memory->size() < (offset + length)))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Invalid data length" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (model->isUsageSet(ind))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Already set operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!model->setOperandValue(ind, memory->base() + offset, length))
+ {
+ VERBOSE(NNAPI::Model) << "setOperandValueFromMemory: Fail to set operand value" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksModel_addOperation(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationType type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ if ((model == nullptr) || (inputs == nullptr) || (outputs == nullptr))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ const ANeuralNetworksOperationType FIRST_OPERATION = ANEURALNETWORKS_ADD;
+ const ANeuralNetworksOperationType LAST_OPERATION = ANEURALNETWORKS_TRANSPOSE;
+ if ((type < FIRST_OPERATION) || (type > LAST_OPERATION))
+ {
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ for (uint32_t i = 0; i < outputCount; i++)
+ {
+ if (model->isUsageSet(outputs[i]))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Already set output operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (!model->addOperation(type, inputCount, inputs, outputCount, outputs))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Fail to add operation" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
+ ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ if ((model == nullptr) || (inputs == nullptr) || (outputs == nullptr))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Incorrect null pointer parameter(s)" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ const ANeuralNetworksOperationTypeEx FIRST_OPERATION = ANEURALNETWORKS_CAST_EX;
+ const ANeuralNetworksOperationTypeEx LAST_OPERATION = ANEURALNETWORKS_LESS_EX;
+ if ((type < FIRST_OPERATION) || (type > LAST_OPERATION))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Invalid operation type" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ for (uint32_t i = 0; i < outputCount; i++)
+ {
+ if (model->isUsageSet(outputs[i]))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Already set output operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ if (!model->addOperationEx(type, inputCount, inputs, outputCount, outputs))
+ {
+ VERBOSE(NNAPI::Model) << "addOperation: Fail to add operation" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel *model, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs)
+{
+ if ((model == nullptr) || (inputs == nullptr) || (outputs == nullptr))
+ {
+ VERBOSE(NNAPI::Model) << "identifyInputsAndOutputs: Incorrect null pointer parameter(s)"
+ << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "identifyInputsAndOutputs: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ for (uint32_t n = 0; n < inputCount; ++n)
+ {
+ uint32_t ind = inputs[n];
+ if (model->isUsageSet(ind))
+ {
+ VERBOSE(NNAPI::Model) << "identifyInputsAndOutputs: Already set input operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!model->addModelInput(ind))
+ {
+ VERBOSE(NNAPI::Model) << "identifyInputsAndOutputs: Fail to add input" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ for (uint32_t n = 0; n < outputCount; ++n)
+ {
+ uint32_t ind = outputs[n];
+
+ if (!model->isOperationOutput(ind))
+ {
+ VERBOSE(NNAPI::Model) << "identifyInputsAndOutputs: Need to set output operand" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+
+ if (!model->addModelOutput(ind))
+ {
+ VERBOSE(NNAPI::Model) << "identifyInputsAndOutputs: Fail to add output" << std::endl;
+ return ANEURALNETWORKS_BAD_DATA;
+ }
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
+
+int ANeuralNetworksModel_finish(ANeuralNetworksModel *model)
+{
+ if (model == nullptr)
+ {
+ VERBOSE(NNAPI::Model) << "finish: Incorrect null pointer parameter" << std::endl;
+ return ANEURALNETWORKS_UNEXPECTED_NULL;
+ }
+
+ if (model->isFinished())
+ {
+ VERBOSE(NNAPI::Model) << "finish: Already finished" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ if (!model->finish())
+ {
+ VERBOSE(NNAPI::Model) << "finish: Fail to generate internal graph" << std::endl;
+ return ANEURALNETWORKS_BAD_STATE;
+ }
+
+ return ANEURALNETWORKS_NO_ERROR;
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.cc b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.cc
new file mode 100644
index 000000000..96a3e918e
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ANeuralNetworksCompilation.h"
+
+#include "util/logging.h"
+
+ANeuralNetworksCompilation::ANeuralNetworksCompilation(
+ const std::shared_ptr<neurun::graph::Graph> &model) noexcept
+ : _compiler{new neurun::compiler::Compiler{model}}
+{
+ // DO NOTHING
+}
+
+bool ANeuralNetworksCompilation::finish() noexcept
+{
+ try
+ {
+ _compiler->compile();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.h b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.h
new file mode 100644
index 000000000..7a4ee98f9
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksCompilation.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __COMPILATION_H__
+#define __COMPILATION_H__
+
+#include "compiler/Compiler.h"
+#include "graph/Graph.h"
+#include "exec/IExecutor.h"
+
+struct ANeuralNetworksCompilation
+{
+public:
+ ANeuralNetworksCompilation(const std::shared_ptr<neurun::graph::Graph> &graph) noexcept;
+
+public:
+ bool finish() noexcept;
+
+ neurun::compiler::State state(void) noexcept { return _compiler->state(); }
+ void publish(std::shared_ptr<neurun::exec::IExecutor> &executor) noexcept
+ {
+ _compiler->release(executor);
+ }
+
+private:
+ std::shared_ptr<neurun::compiler::Compiler> _compiler;
+};
+
+#endif
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.cc b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.cc
new file mode 100644
index 000000000..b09f9abe6
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ANeuralNetworksEvent.h"
+
+#include "exec/Execution.h"
+#include "util/logging.h"
+
+ANeuralNetworksEvent::ANeuralNetworksEvent(
+ const std::shared_ptr<neurun::exec::Execution> &execution)
+ : _execution{execution}
+{
+ // DO NOTHING
+}
+
+bool ANeuralNetworksEvent::waitFinish(void) noexcept
+{
+ try
+ {
+ _execution->waitFinish();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.h b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.h
new file mode 100644
index 000000000..e499bab77
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksEvent.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EVENT_H__
+#define __EVENT_H__
+
+#include <NeuralNetworks.h>
+
+#include <memory>
+
+namespace neurun
+{
+namespace exec
+{
+class Execution;
+} // namespace exec
+} // namespace neurun
+
+struct ANeuralNetworksEvent
+{
+public:
+ ANeuralNetworksEvent(const std::shared_ptr<neurun::exec::Execution> &execution);
+
+public:
+ bool waitFinish(void) noexcept;
+
+private:
+ const std::shared_ptr<neurun::exec::Execution> _execution;
+};
+
+#endif
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.cc b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.cc
new file mode 100644
index 000000000..ee621e31d
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.cc
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ANeuralNetworksExecution.h"
+#include "NNAPIConvert.h"
+#include "util/logging.h"
+
+const neurun::model::OperandIndex
+ANeuralNetworksExecution::getInputOperandIndex(int32_t index) noexcept
+{
+ if (index < 0)
+ {
+ // Negative index: return invalid index
+ return neurun::model::OperandIndex{};
+ }
+
+ uint32_t cast_index = static_cast<uint32_t>(index);
+ if (cast_index >= _execution->model().inputs.size())
+ {
+ // Return invalid index
+ return neurun::model::OperandIndex{};
+ }
+
+ neurun::model::IOIndex input_index{cast_index};
+ const auto operand_index = _execution->model().inputs.at(input_index);
+ return operand_index;
+}
+
+const neurun::model::OperandIndex
+ANeuralNetworksExecution::getOutputOperandIndex(int32_t index) noexcept
+{
+ if (index < 0)
+ {
+ // Negative index: return invalid index
+ return neurun::model::OperandIndex{};
+ }
+
+ uint32_t cast_index = static_cast<uint32_t>(index);
+ if (cast_index >= _execution->model().outputs.size())
+ {
+ // Return invalid index
+ return neurun::model::OperandIndex{};
+ }
+
+ neurun::model::IOIndex output_index{cast_index};
+ const auto operand_index = _execution->model().outputs.at(output_index);
+ return operand_index;
+}
+
+bool ANeuralNetworksExecution::compareDataType(const ANeuralNetworksOperandType *type,
+ const neurun::model::OperandIndex index) noexcept
+{
+ try
+ {
+ const auto operand_type = _execution->model().operands.at(index).typeInfo();
+ const auto typeInfo = NNAPIConvert::getTypeInfo(type);
+
+ if (operand_type != typeInfo)
+ {
+ // Data type mismatch
+ return false;
+ }
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksExecution::compareShape(const ANeuralNetworksOperandType *type,
+ const neurun::model::OperandIndex index) noexcept
+{
+ // Passed shape should be specified
+ if (haveUnspecifiedDims(index))
+ {
+ return false;
+ }
+
+ const auto &operand_shape = _execution->model().operands.at(index).shape();
+ const auto &shape_from_type = NNAPIConvert::getShape(type);
+
+ return operand_shape == shape_from_type;
+}
+
+bool ANeuralNetworksExecution::haveUnspecifiedDims(const neurun::model::OperandIndex index) noexcept
+{
+ const auto operand_shape = _execution->model().operands.at(index).shape();
+
+ return operand_shape.num_elements() == 0;
+}
+
+size_t ANeuralNetworksExecution::getOperandSize(const neurun::model::OperandIndex index) noexcept
+{
+ try
+ {
+ return _execution->model().operands.at(index).operandSize();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return 0;
+ }
+}
+
+bool ANeuralNetworksExecution::setInput(uint32_t index, const ANeuralNetworksOperandType *type,
+ const void *buffer, size_t length) noexcept
+{
+ try
+ {
+ neurun::model::IOIndex input_index{index};
+ const auto operand_index = getInputOperandIndex(index);
+
+ const auto type_info = _execution->model().operands.at(operand_index).typeInfo();
+ const auto shape = ((type != nullptr) ? NNAPIConvert::getShape(type)
+ : _execution->model().operands.at(operand_index).shape());
+
+ _execution->setInput(input_index, type_info, shape, buffer, length);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksExecution::setOutput(uint32_t index, const ANeuralNetworksOperandType *type,
+ void *buffer, size_t length) noexcept
+{
+ try
+ {
+ neurun::model::IOIndex output_index{index};
+ const auto operand_index = getOutputOperandIndex(index);
+
+ const auto type_info = _execution->model().operands.at(operand_index).typeInfo();
+ const auto shape = ((type != nullptr) ? NNAPIConvert::getShape(type)
+ : _execution->model().operands.at(operand_index).shape());
+
+ _execution->setOutput(output_index, type_info, shape, buffer, length);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksExecution::startExecute(void) noexcept
+{
+ try
+ {
+ _execution->startExecute();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+const std::shared_ptr<neurun::exec::Execution> ANeuralNetworksExecution::instance(void) noexcept
+{
+ return _execution;
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.h b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.h
new file mode 100644
index 000000000..946a12db8
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksExecution.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EXECUTION_H__
+#define __EXECUTION_H__
+
+#include <NeuralNetworks.h>
+
+#include <memory>
+
+#include "exec/Execution.h"
+
+struct ANeuralNetworksExecution
+{
+public:
+ ANeuralNetworksExecution(const std::shared_ptr<neurun::exec::IExecutor> &executor)
+ : _execution{std::make_shared<neurun::exec::Execution>(executor)}
+ {
+ // DO NOTHING
+ }
+
+public:
+ bool setInput(uint32_t index, const ANeuralNetworksOperandType *type, const void *buffer,
+ size_t length) noexcept;
+ bool setOutput(uint32_t index, const ANeuralNetworksOperandType *type, void *buffer,
+ size_t length) noexcept;
+ bool startExecute(void) noexcept;
+
+ const neurun::model::OperandIndex getInputOperandIndex(int32_t index) noexcept;
+ const neurun::model::OperandIndex getOutputOperandIndex(int32_t index) noexcept;
+ bool compareDataType(const ANeuralNetworksOperandType *type,
+ const neurun::model::OperandIndex index) noexcept;
+ bool compareShape(const ANeuralNetworksOperandType *type,
+ const neurun::model::OperandIndex index) noexcept;
+ bool haveUnspecifiedDims(const neurun::model::OperandIndex index) noexcept;
+ size_t getOperandSize(const neurun::model::OperandIndex index) noexcept;
+ const std::shared_ptr<neurun::exec::Execution> instance(void) noexcept;
+
+private:
+ std::shared_ptr<neurun::exec::Execution> _execution;
+};
+
+#endif
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.cc b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.cc
new file mode 100644
index 000000000..9cc100585
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <NeuralNetworks.h>
+#include <sys/mman.h>
+
+#include "ANeuralNetworksMemory.h"
+
+//
+// ANeuralNetworksMemory
+//
+ANeuralNetworksMemory::ANeuralNetworksMemory(size_t size, int protect, int fd, size_t offset)
+{
+ _base = reinterpret_cast<uint8_t *>(mmap(nullptr, size, protect, MAP_PRIVATE, fd, offset));
+ _size = size;
+}
+
+ANeuralNetworksMemory::~ANeuralNetworksMemory() { munmap(reinterpret_cast<void *>(_base), _size); }
+
+bool ANeuralNetworksMemory::vaildAccess(size_t offset, size_t length) const
+{
+ if ((offset >= _size) || (length > _size))
+ {
+ return false;
+ }
+
+ if ((offset + length) >= _size)
+ {
+ return false;
+ }
+
+ return true;
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.h b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.h
new file mode 100644
index 000000000..48a1bc5fc
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksMemory.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MEMORY_H__
+#define __MEMORY_H__
+
+#include <cstdint>
+
+struct ANeuralNetworksMemory
+{
+public:
+ ANeuralNetworksMemory(size_t size, int protect, int fd, size_t offset);
+ ~ANeuralNetworksMemory();
+
+public:
+ size_t size(void) const { return _size; }
+ uint8_t *base(void) { return _base; }
+ uint8_t *base(void) const { return _base; }
+ bool vaildAccess(size_t offset, size_t length) const;
+
+private:
+ size_t _size;
+ uint8_t *_base;
+};
+
+#endif // __MEMORY_H__
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc
new file mode 100644
index 000000000..e3bb29161
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc
@@ -0,0 +1,257 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ANeuralNetworksModel.h"
+#include "OperationFactory.h"
+#include "NNAPIConvert.h"
+
+#include "model/Operations.Include.h"
+#include "util/logging.h"
+
+#include "cpp14/memory.h"
+
+//
+// ANeuralNetworksModel
+//
+ANeuralNetworksModel::ANeuralNetworksModel() noexcept
+ : _model{new neurun::model::Model}, _graph{nullptr}, _optional_operands{}, _operand_usages{}
+{
+ // DO NOTHING
+}
+
+bool ANeuralNetworksModel::addOperand(const ANeuralNetworksOperandType *type) noexcept
+{
+ try
+ {
+ const auto shape = NNAPIConvert::getShape(type);
+ const auto typeInfo = NNAPIConvert::getTypeInfo(type);
+ _model->operands.emplace(shape, typeInfo);
+ _operand_usages.emplace_back(OperandUsage::NOT_DEFINED);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::setOperandValue(uint32_t index, const void *buffer, size_t length,
+ bool optional, bool copy) noexcept
+{
+ const neurun::model::OperandIndex ind{index};
+
+ try
+ {
+ _operand_usages[index] = OperandUsage::CONSTANT;
+
+ // Remain operands.at(ind).data()->base() as nullptr for optional operand
+ // This will be filled when model finished
+ if (optional)
+ {
+ setOptionalOperand(ind);
+ }
+
+ using ::neurun::model::CachedData;
+ using ::neurun::model::ExternalData;
+ if (copy)
+ {
+ _model->operands.at(ind).data(
+ nnfw::cpp14::make_unique<CachedData>(reinterpret_cast<const uint8_t *>(buffer), length));
+ }
+ else
+ {
+ _model->operands.at(ind).data(nnfw::cpp14::make_unique<ExternalData>(
+ reinterpret_cast<const uint8_t *>(buffer), length));
+ }
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::addOperation(ANeuralNetworksOperationType type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) noexcept
+{
+ try
+ {
+ for (uint32_t i = 0; i < outputCount; i++)
+ {
+ _operand_usages[outputs[i]] = OperandUsage::OPERATION_OUTPUT;
+ }
+
+ auto &factory = OperationFactory::instance();
+ OperationFactory::Param param{inputCount, inputs, outputCount, outputs};
+
+ auto node = factory.create(type, param, _model->operands);
+ _model->operations.push(std::unique_ptr<neurun::model::Operation>{node});
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::addOperationEx(ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) noexcept
+{
+ try
+ {
+ for (uint32_t i = 0; i < outputCount; i++)
+ {
+ _operand_usages[outputs[i]] = OperandUsage::OPERATION_OUTPUT;
+ }
+
+ auto &factory = OperationFactory::instance();
+ OperationFactory::Param param{inputCount, inputs, outputCount, outputs};
+
+ auto node = factory.create(type, param, _model->operands);
+ _model->operations.push(std::unique_ptr<neurun::model::Operation>{node});
+ }
+ catch (const std::exception &e)
+ {
+ return false;
+ }
+ return true;
+}
+
+bool ANeuralNetworksModel::addModelInput(uint32_t index) noexcept
+{
+ try
+ {
+ _operand_usages[index] = OperandUsage::MODEL_INPUT;
+
+ const neurun::model::OperandIndex ind{index};
+ _model->inputs.append(ind);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+bool ANeuralNetworksModel::addModelOutput(uint32_t index) noexcept
+{
+ try
+ {
+ const neurun::model::OperandIndex ind{index};
+
+ // Duplicated output is not allowed
+ if (_model->outputs.contains(ind))
+ {
+ return false;
+ }
+
+ _model->outputs.append(ind);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::finish() noexcept
+{
+ try
+ {
+ fillOptionalOperand();
+
+ _graph = std::make_shared<neurun::graph::Graph>(std::move(_model));
+
+ _graph->finishBuilding();
+
+ _operand_usages.clear();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << '\n';
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::isFinished() noexcept { return _graph != nullptr; }
+
+bool ANeuralNetworksModel::isExistOperand(uint32_t index) noexcept
+{
+ return _model->operands.exist(neurun::model::OperandIndex{index});
+}
+
+size_t ANeuralNetworksModel::operandSize(uint32_t index) noexcept
+{
+ try
+ {
+ return _model->operands.at(neurun::model::OperandIndex{index}).operandSize();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << '\n';
+
+ return 0;
+ }
+}
+
+bool ANeuralNetworksModel::isUsageSet(uint32_t index) noexcept
+{
+ return (_operand_usages[index] != OperandUsage::NOT_DEFINED);
+}
+
+bool ANeuralNetworksModel::isOperationOutput(uint32_t index) noexcept
+{
+ return (_operand_usages[index] == OperandUsage::OPERATION_OUTPUT);
+}
+
+void ANeuralNetworksModel::setOptionalOperand(const neurun::model::OperandIndex idx)
+{
+ _optional_operands.insert(idx);
+}
+
+void ANeuralNetworksModel::fillOptionalOperand(void)
+{
+ _model->operations.iterate(
+ [&](const ::neurun::model::OperationIndex &, ::neurun::model::Operation &node) {
+ for (auto input : node.getInputs())
+ {
+ // TODO fill default value for optional operands
+ if (_optional_operands.find(input) != _optional_operands.end())
+ {
+ throw std::runtime_error{"Optional operand is not supported yet"};
+ }
+ }
+ });
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.h b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.h
new file mode 100644
index 000000000..1cd1e2699
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MODEL_H__
+#define __MODEL_H__
+
+#include <unordered_set>
+#include <NeuralNetworks.h>
+#include <NeuralNetworksEx.h>
+
+#include "graph/Graph.h"
+
+struct ANeuralNetworksModel
+{
+public:
+ enum class OperandUsage
+ {
+ NOT_DEFINED = 0,
+ MODEL_INPUT,
+ CONSTANT,
+ OPERATION_OUTPUT,
+ };
+
+public:
+ ANeuralNetworksModel() noexcept;
+
+public:
+ bool addOperand(const ANeuralNetworksOperandType *type) noexcept;
+ bool setOperandValue(uint32_t index, const void *buffer, size_t length, bool optional = false,
+ bool copy = false) noexcept;
+ bool addOperation(ANeuralNetworksOperationType type, uint32_t inputCount, const uint32_t *inputs,
+ uint32_t outputCount, const uint32_t *outputs) noexcept;
+ bool addOperationEx(ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) noexcept;
+ bool addModelInput(uint32_t index) noexcept;
+ bool addModelOutput(uint32_t index) noexcept;
+ bool finish() noexcept;
+
+ neurun::graph::Graph &deref(void) { return *_graph; }
+ bool isFinished() noexcept;
+ bool isExistOperand(uint32_t index) noexcept;
+ size_t operandSize(uint32_t index) noexcept;
+ bool isUsageSet(uint32_t index) noexcept;
+ bool isOperationOutput(uint32_t index) noexcept;
+ void release(std::shared_ptr<neurun::graph::Graph> &graph) { graph = _graph; }
+
+private:
+ void setOptionalOperand(const neurun::model::OperandIndex idx);
+ void fillOptionalOperand(void);
+
+private:
+ std::unique_ptr<neurun::model::Model> _model;
+ std::shared_ptr<neurun::graph::Graph> _graph;
+ std::unordered_set<neurun::model::OperandIndex> _optional_operands;
+ std::vector<OperandUsage> _operand_usages;
+};
+
+#endif // __MODEL_H__
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.cc b/runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.cc
new file mode 100644
index 000000000..a44c279cc
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.cc
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "NNAPIConvert.h"
+
+#include <numeric>
+
+using namespace ::neurun::model;
+
+DataType NNAPIConvert::getDataType(OperandCode type)
+{
+ switch (type)
+ {
+ case ANEURALNETWORKS_FLOAT32:
+ case ANEURALNETWORKS_TENSOR_FLOAT32:
+ return DataType::FLOAT32;
+ case ANEURALNETWORKS_INT32:
+ case ANEURALNETWORKS_TENSOR_INT32:
+ return DataType::INT32;
+ case ANEURALNETWORKS_UINT32:
+ return DataType::UINT32;
+ case ANEURALNETWORKS_TENSOR_QUANT8_ASYMM:
+ return DataType::QUANT8_ASYMM;
+ default:
+ throw std::runtime_error("Unsupported type");
+ }
+}
+
+TypeInfo NNAPIConvert::getTypeInfo(const ANeuralNetworksOperandType *type)
+{
+ return TypeInfo(getDataType((OperandCode)(type->type)), type->scale, type->zeroPoint);
+}
+
+Shape NNAPIConvert::getShape(const ANeuralNetworksOperandType *type)
+{
+ Shape shape(type->dimensionCount);
+
+ for (uint32_t axis = 0; axis < type->dimensionCount; ++axis)
+ {
+ shape.dim(axis) = type->dimensions[axis];
+ }
+
+ return shape;
+}
+
+size_t NNAPIConvert::calculateSizeFromType(const ANeuralNetworksOperandType *type)
+{
+ auto shape = getShape(type);
+ auto data_type = getDataType((OperandCode)(type->type));
+
+ return shape.num_elements() * sizeOfDataType(data_type);
+}
+
+Activation NNAPIConvert::getFusedActivation(FuseCode act)
+{
+ switch (act)
+ {
+ case ANEURALNETWORKS_FUSED_NONE:
+ return Activation::NONE;
+ case ANEURALNETWORKS_FUSED_RELU:
+ return Activation::RELU;
+ case ANEURALNETWORKS_FUSED_RELU1:
+ return Activation::RELU1;
+ case ANEURALNETWORKS_FUSED_RELU6:
+ return Activation::RELU6;
+ default:
+ throw std::runtime_error("Unsupported activation type");
+ }
+}
+
+PaddingType NNAPIConvert::getPaddingType(PaddingCode type)
+{
+ switch (type)
+ {
+ case ANEURALNETWORKS_PADDING_SAME:
+ return PaddingType::SAME;
+ case ANEURALNETWORKS_PADDING_VALID:
+ return PaddingType::VALID;
+ default:
+ throw std::runtime_error("Unsupported type");
+ }
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.h b/runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.h
new file mode 100644
index 000000000..bd7fc6cdd
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/NNAPIConvert.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file NNAPIConvert.h
+ * @brief This file contains convereter(s)\n
+ * from NNAPI frontend's struct to neurun's internal struct
+ */
+#ifndef __NEURUN_NNAPI_CONVERT_H__
+#define __NEURUN_NNAPI_CONVERT_H__
+
+#include <NeuralNetworks.h>
+
+#include <model/TypeInfo.h>
+#include <model/Shape.h>
+#include <model/InternalType.h>
+
+class NNAPIConvert
+{
+
+public:
+ /**
+ * @brief Convert data type from NNAPI to internal data type
+ * @param[in] type NNAPI's data type
+ * @return neurun's internal data type
+ * @note Now neurun::model::DataType shares the same enum value\n
+ with OperandCode in NeuralNetworks.h.\n
+ If we don't share same value, we must fix this mapping function.
+ */
+ static ::neurun::model::DataType getDataType(OperandCode type);
+
+ /**
+ * @brief Convert operand type info from NNAPI to interanl operand type info
+ * @param[in] type NNAPI's operand type
+ * @return neurun's internal operand type info
+ */
+ static ::neurun::model::TypeInfo getTypeInfo(const ANeuralNetworksOperandType *type);
+
+ /**
+ * @brief Convert operand shape info from NNAPI to internal operand shape
+ * @param[in] type NNAPI's operand type
+ * @return neurun's internal operand shape
+ */
+ static ::neurun::model::Shape getShape(const ANeuralNetworksOperandType *type);
+
+ /**
+ * @brief Calcaulate operand size from NNAPI type
+ * @param[in] type NNAPI's operand type
+ * @return Operand size
+ */
+ static size_t calculateSizeFromType(const ANeuralNetworksOperandType *type);
+
+ /**
+ * @brief Convert NNAPI FuseCode to internal activation type
+ * @param[in] act NNAPI's FuseCode type
+ * @return neurun's internal activation type
+ */
+ static ::neurun::model::Activation getFusedActivation(FuseCode act);
+
+ /**
+ * @brief Convert NNAPI PaddingCode to internal padding type
+ * @param[in] act NNAPI's PaddingCode type
+ * @return neurun's internal padding type
+ */
+ static ::neurun::model::PaddingType getPaddingType(PaddingCode type);
+};
+
+#endif // __NEURUN_NNAPI_CONVERT_H__
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.cc b/runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.cc
new file mode 100644
index 000000000..cd9d869a0
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.cc
@@ -0,0 +1,1524 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "OperationFactory.h"
+#include "NNAPIConvert.h"
+
+#include <model/Operations.Include.h>
+
+namespace
+{
+using namespace neurun::model;
+
+void replaceDataType(Operands &operands, const OperandIndex &index, const DataType &type)
+{
+ assert(operands.exist(index));
+ operands.at(index).type(type);
+}
+
+ExplicitPadding makeExplicitPadding(Operands &operands, const OperandIndex &left_index,
+ const OperandIndex &right_index, const OperandIndex &top_index,
+ const OperandIndex &bottom_index)
+{
+ auto left = operands.at(left_index).asScalar<int32_t>();
+ auto right = operands.at(right_index).asScalar<int32_t>();
+ auto top = operands.at(top_index).asScalar<int32_t>();
+ auto bottom = operands.at(bottom_index).asScalar<int32_t>();
+
+ if (left < 0 || right < 0 || top < 0 || bottom < 0)
+ {
+ throw std::runtime_error{"Cannot handle negative explicit padding value"};
+ }
+
+ ExplicitPadding param;
+ param.left = static_cast<uint32_t>(left);
+ param.right = static_cast<uint32_t>(right);
+ param.top = static_cast<uint32_t>(top);
+ param.bottom = static_cast<uint32_t>(bottom);
+
+ return param;
+}
+
+Stride makeStride(Operands &operands, const OperandIndex &horizontal_index,
+ const OperandIndex &vertical_index)
+{
+ auto horizontal = operands.at(horizontal_index).asScalar<int32_t>();
+ auto vertical = operands.at(vertical_index).asScalar<int32_t>();
+
+ if (vertical < 0 || horizontal < 0)
+ {
+ throw std::runtime_error{"Cannot handle negative stride value"};
+ }
+
+ Stride stride;
+ stride.horizontal = static_cast<uint32_t>(horizontal);
+ stride.vertical = static_cast<uint32_t>(vertical);
+
+ return stride;
+}
+
+uint32_t getUint32Scalar(Operands &operands, const OperandIndex index)
+{
+ auto int32_value = operands.at(index).asScalar<int32_t>();
+ if (int32_value < 0)
+ {
+ throw std::runtime_error{"Cannot handle negative value"};
+ }
+
+ return static_cast<uint32_t>(int32_value);
+}
+
+} // namespace
+
+OperationFactory &OperationFactory::instance()
+{
+ static OperationFactory factory;
+ return factory;
+}
+
+OperationFactory::OperationFactory()
+{
+ using namespace neurun::model;
+
+ _map[ANEURALNETWORKS_DEPTHWISE_CONV_2D] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ assert((init_param.input_count == 8 || init_param.input_count == 11) &&
+ init_param.output_count == 1);
+
+ // In common
+ // 0 -> IFM Tensor Index
+ // 1 -> Kernel Tensor Index
+ // 2 -> Bias Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::DepthwiseConv2DNode::Param param;
+ if (init_param.input_count == 8)
+ {
+ // Imlicit Padding case
+ // Each input should be interpreted as follows:
+ //
+ // 3 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
+ // 4 -> Stride (width) Index
+ // 5 -> Stride (height) INdex
+ // 6 -> Depthwise multiplier
+ // 7 -> Activation Index
+
+ const auto padding_index = OperandIndex{init_param.inputs[3]};
+ const auto hstride_index = OperandIndex{init_param.inputs[4]};
+ const auto vstride_index = OperandIndex{init_param.inputs[5]};
+ const auto multiplier_index = OperandIndex{init_param.inputs[6]};
+ const auto activation_index = OperandIndex{init_param.inputs[7]};
+
+ param.padding.type =
+ NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.multiplier = getUint32Scalar(operands, multiplier_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+ else
+ {
+ // Explicit Padding case
+ // Each input should be interpreted as follows:
+ //
+ // 3 -> Padding On the Left
+ // 4 -> Padding On the Right
+ // 5 -> Padding On the Top
+ // 6 -> Padding On the Bottom
+ // 7 -> Stride (width) Index
+ // 8 -> Stride (height) Index
+ // 9 -> Depthwise multiplier
+ // 10-> Activation Index
+
+ const auto padding_left_index = OperandIndex{init_param.inputs[3]};
+ const auto padding_right_index = OperandIndex{init_param.inputs[4]};
+ const auto padding_top_index = OperandIndex{init_param.inputs[5]};
+ const auto padding_bottom_index = OperandIndex{init_param.inputs[6]};
+ const auto hstride_index = OperandIndex{init_param.inputs[7]};
+ const auto vstride_index = OperandIndex{init_param.inputs[8]};
+ const auto multiplier_index = OperandIndex{init_param.inputs[9]};
+ const auto activation_index = OperandIndex{init_param.inputs[10]};
+
+ param.padding.type = PaddingType::EXPLICIT;
+ param.padding.param = makeExplicitPadding(operands, padding_left_index, padding_right_index,
+ padding_top_index, padding_bottom_index);
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.multiplier = getUint32Scalar(operands, multiplier_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+
+ return new operation::DepthwiseConv2DNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_MAX_POOL_2D] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ assert(init_param.input_count == 7 || init_param.input_count == 10);
+ assert(init_param.output_count == 1);
+
+ // In common
+ // 0 -> IFM Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::MaxPool2DNode::Param param;
+ if (init_param.input_count == 7) // support implicit padding
+ {
+ // Each input should be interpreted as follows:
+ //
+ // 1 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
+ // 2 -> Horizontal (over width) Stride Index
+ // 3 -> Vertial (over height) Stride Index
+ // 4 -> Filter Width Index
+ // 5 -> Filter Height Index
+ // 6 -> FuseCode (activation) Index
+
+ const auto padding_index = OperandIndex{init_param.inputs[1]};
+ const auto hstride_index = OperandIndex{init_param.inputs[2]};
+ const auto vstride_index = OperandIndex{init_param.inputs[3]};
+ const auto kw_index = OperandIndex{init_param.inputs[4]};
+ const auto kh_index = OperandIndex{init_param.inputs[5]};
+ const auto activation_index = OperandIndex{init_param.inputs[6]};
+
+ param.padding.type =
+ NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.kw = getUint32Scalar(operands, kw_index);
+ param.kh = operands.at(kh_index).asScalar<uint32_t>();
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+ else if (init_param.input_count == 10) // support explicit padding
+ {
+ // Each input should be interpreted as follows:
+ //
+ // 1 -> Padding_left index
+ // 2 -> Padding_right index
+ // 3 -> Padding_top index
+ // 4 -> Padding_bottom index
+ // 5 -> Horizontal (over width) Stride Index
+ // 6 -> Vertial (over height) Stride Index
+ // 7 -> Filter Width Index
+ // 8 -> Filter Height Index
+ // 9 -> FuseCode (activation) Index
+
+ const auto padding_left_index = OperandIndex{init_param.inputs[1]};
+ const auto padding_right_index = OperandIndex{init_param.inputs[2]};
+ const auto padding_top_index = OperandIndex{init_param.inputs[3]};
+ const auto padding_bottom_index = OperandIndex{init_param.inputs[4]};
+ const auto hstride_index = OperandIndex{init_param.inputs[5]};
+ const auto vstride_index = OperandIndex{init_param.inputs[6]};
+ const auto kw_index = OperandIndex{init_param.inputs[7]};
+ const auto kh_index = OperandIndex{init_param.inputs[8]};
+ const auto activation_index = OperandIndex{init_param.inputs[9]};
+
+ param.padding.type = PaddingType::EXPLICIT;
+ param.padding.param = makeExplicitPadding(operands, padding_left_index, padding_right_index,
+ padding_top_index, padding_bottom_index);
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.kw = getUint32Scalar(operands, kw_index);
+ param.kh = getUint32Scalar(operands, kh_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+ return new operation::MaxPool2DNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_AVERAGE_POOL_2D] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ // TODO We may reuse code here for MAX_POOL_2D. Seems like these two are identical
+ assert(init_param.input_count == 7 || init_param.input_count == 10);
+ assert(init_param.output_count == 1);
+
+ // In common
+ // 0 -> IFM Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::AvgPool2DNode::Param param;
+ if (init_param.input_count == 7) // support implicit padding
+ {
+ // Each input should be interpreted as follows:
+ //
+ // 1 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
+ // 2 -> Horizontal (over width) Stride Index
+ // 3 -> Vertial (over height) Stride Index
+ // 4 -> Filter Width Index
+ // 5 -> Filter Height Index
+ // 6 -> FuseCode (activation) Index
+
+ const auto padding_index = OperandIndex{init_param.inputs[1]};
+ const auto hstride_index = OperandIndex{init_param.inputs[2]};
+ const auto vstride_index = OperandIndex{init_param.inputs[3]};
+ const auto kw_index = OperandIndex{init_param.inputs[4]};
+ const auto kh_index = OperandIndex{init_param.inputs[5]};
+ const auto activation_index = OperandIndex{init_param.inputs[6]};
+
+ param.padding.type =
+ NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.kw = getUint32Scalar(operands, kw_index);
+ param.kh = getUint32Scalar(operands, kh_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+ else if (init_param.input_count == 10) // support explicit padding
+ {
+ // Each input should be interpreted as follows:
+ //
+ // 1 -> Padding_left index
+ // 2 -> Padding_right index
+ // 3 -> Padding_top index
+ // 4 -> Padding_bottom index
+ // 5 -> Horizontal (over width) Stride Index
+ // 6 -> Vertial (over height) Stride Index
+ // 7 -> Filter Width Index
+ // 8 -> Filter Height Index
+ // 9 -> FuseCode (activation) Index
+
+ const auto padding_left_index = OperandIndex{init_param.inputs[1]};
+ const auto padding_right_index = OperandIndex{init_param.inputs[2]};
+ const auto padding_top_index = OperandIndex{init_param.inputs[3]};
+ const auto padding_bottom_index = OperandIndex{init_param.inputs[4]};
+ const auto hstride_index = OperandIndex{init_param.inputs[5]};
+ const auto vstride_index = OperandIndex{init_param.inputs[6]};
+ const auto kw_index = OperandIndex{init_param.inputs[7]};
+ const auto kh_index = OperandIndex{init_param.inputs[8]};
+ const auto activation_index = OperandIndex{init_param.inputs[9]};
+
+ param.padding.type = PaddingType::EXPLICIT;
+ param.padding.param = makeExplicitPadding(operands, padding_left_index, padding_right_index,
+ padding_top_index, padding_bottom_index);
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.kw = getUint32Scalar(operands, kw_index);
+ param.kh = getUint32Scalar(operands, kh_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+
+ return new operation::AvgPool2DNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_CONCATENATION] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count >= 2); // At least one one input tensor and axis
+ assert(init_param.output_count == 1);
+
+ // When there are N + 1 inputs, each input should be interpreted as follows:
+ //
+ // [0, N) -> Input tensors
+ // N -> Axis
+ //
+
+ OperandIndexSequence inputs;
+ for (uint32_t n = 0; n < init_param.input_count - 1; ++n)
+ {
+ inputs.append(OperandIndex{init_param.inputs[n]});
+ }
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::ConcatNode::Param param;
+ const OperandIndex axis_index{init_param.inputs[init_param.input_count - 1]};
+ param.axis = operands.at(axis_index).asScalar<int32_t>();
+
+ return new operation::ConcatNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_RESHAPE] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> A tensor, specifying the tensor to be reshaped.
+ // 1 -> A 1-D tensor of type ANEURALNETWORKS_TENSOR_INT32, defining the shape of the output
+ // tensor
+
+ // TODO Second input should be shape tensor (init_param.inputs[1])
+ // Currently unused since assume that it is same with output tensor size
+ OperandIndexSequence inputs{init_param.inputs[0] /* , init_param.inputs[1] */};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ return new operation::ReshapeNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_FULLY_CONNECTED] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ assert(init_param.input_count == 4 && init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> A tensor, specifying the input.
+ // 1 -> A 2-D tensor, specifying the weights
+ // 2 -> A 1-D tensor, specifying the bias
+ // 3 -> An INT32 value, and has to be one of the FuseCode values
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::FullyConnectedNode::Param param;
+ const auto activation_index = OperandIndex{init_param.inputs[3]};
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+
+ return new operation::FullyConnectedNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SOFTMAX] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> A 2-D or 4-D tensor, specifying the tensor to be reshaped.
+ // 1 -> FLOAT32 value, specifying the positive scaling factor for the exponent, beta.
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ const auto beta_index = OperandIndex{init_param.inputs[1]};
+
+ operation::SoftmaxNode::Param param;
+ param.beta = operands.at(beta_index).asScalar<float>();
+
+ return new operation::SoftmaxNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_CAST_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ // 0 -> input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::CastNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_CONV_2D] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ using neurun::model::operation::Conv2DNode;
+
+ // inputCount is either 7 or 10 acccording to NN API specification.
+ // - Padding is implicit when inputCount is 7
+ // - Padding is explicit when inputCount is 10
+ assert(init_param.input_count == 7 || init_param.input_count == 10);
+ assert(init_param.output_count == 1);
+
+ // 0 -> IFM Tensor Index
+ // 1 -> Kernel Tensor Index
+ // 2 -> Bias Tensor Index
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ Conv2DNode::Param param;
+
+ if (init_param.input_count == 7) // support implicit padding
+ {
+ // Each input should be interpreted as follows:
+ //
+ // 3 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
+ // 4 -> Stride (width) Index
+ // 5 -> Stride (height) INdex
+ // 6 -> Activation Index
+
+ const auto padding_index = OperandIndex{init_param.inputs[3]};
+ const auto hstride_index = OperandIndex{init_param.inputs[4]};
+ const auto vstride_index = OperandIndex{init_param.inputs[5]};
+ const auto activation_index = OperandIndex{init_param.inputs[6]};
+
+ param.padding.type =
+ NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+ else if (init_param.input_count == 10) // support explicit padding
+ {
+ // Each input should be interpreted as follows:
+ //
+ // 3 -> Padding_left index
+ // 4 -> Padding_right index
+ // 5 -> Padding_top index
+ // 6 -> Padding_bottom index
+ // 7 -> Stride (width) Index
+ // 8 -> Stride (height) INdex
+ // 9 -> Activation Index
+
+ const auto padding_left_index = OperandIndex{init_param.inputs[3]};
+ const auto padding_right_index = OperandIndex{init_param.inputs[4]};
+ const auto padding_top_index = OperandIndex{init_param.inputs[5]};
+ const auto padding_bottom_index = OperandIndex{init_param.inputs[6]};
+ const auto hstride_index = OperandIndex{init_param.inputs[7]};
+ const auto vstride_index = OperandIndex{init_param.inputs[8]};
+ const auto activation_index = OperandIndex{init_param.inputs[9]};
+
+ param.padding.type = PaddingType::EXPLICIT;
+ param.padding.param = makeExplicitPadding(operands, padding_left_index, padding_right_index,
+ padding_top_index, padding_bottom_index);
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+
+ return new Conv2DNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_ADD] = [](const OperationFactory::Param &init_param, Operands &operands) {
+ assert(init_param.input_count == 3);
+ assert(init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Lefthand side operand
+ // 1 -> Righthand side operand
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::AddNode::Param param;
+
+ const auto activation_index = OperandIndex{init_param.inputs[2]};
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+
+ return new operation::AddNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_REDUCE_SUM_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2);
+ assert(init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Reduced Axes Tensor Index
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::ReduceSumNode::Param param;
+
+ param.axis_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::ReduceSumNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SUB] = [](const OperationFactory::Param &init_param, Operands &operands) {
+ assert(init_param.input_count == 3);
+ assert(init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Lefthand side operand
+ // 1 -> Righthand side operand
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::SubNode::Param param;
+
+ const auto activation_index = OperandIndex{init_param.inputs[2]};
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+
+ return new operation::SubNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_STRIDED_SLICE] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 7 && init_param.output_count == 1);
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 1 -> A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the starts of
+ // the dimensions of the input tensor to be sliced. The length must be
+ // of rank(input0).
+ // 2 -> A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the ends of
+ // the dimensions of the input tensor to be sliced. The length must be
+ // of rank(input0).
+ // 3 -> A 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32}, the strides of
+ // the dimensions of the input tensor to be sliced. The length must be
+ // of rank(input0).
+ // 4 -> An {@link ANEURALNETWORKS_INT32} scalar, begin_mask. If the ith bit
+ // of begin_mask is set, begin[i] is ignored and the fullest possible
+ // range in that dimension is used instead.
+ // 5 -> An {@link ANEURALNETWORKS_INT32} scalar, end_mask. If the ith bit of
+ // end_mask is set, end[i] is ignored and the fullest possible range in
+ // that dimension is used instead.
+ // 6 -> An {@link ANEURALNETWORKS_INT32} scalar, shrink_axis_mask. An int32
+ // mask. If the ith bit of shrink_axis_mask is set, it implies that the
+ // ith specification shrinks the dimensionality by 1. A slice of size 1
+ // starting from begin[i] in the dimension must be preserved.
+
+ operation::StridedSliceNode::Param param;
+
+ param.startData_index = OperandIndex{init_param.inputs[1]};
+ param.endData_index = OperandIndex{init_param.inputs[2]};
+ param.stridesData_index = OperandIndex{init_param.inputs[3]};
+ param.beginMask_index = OperandIndex{init_param.inputs[4]};
+ param.endMask_index = OperandIndex{init_param.inputs[5]};
+ param.shrinkAxisMask_index = OperandIndex{init_param.inputs[6]};
+
+ return new operation::StridedSliceNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_TRANSPOSE] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ // TODO make this work with init_param.input_count == 1 (when permutation vector is optional)
+
+ // Inputs
+ // 0: An n-D tensor, specifying the tensor to be transposed.
+ // 1: An optional 1-D Tensor of {@link ANEURALNETWORKS_TENSOR_INT32},
+ // the permutation of the dimensions of the input tensor.
+ // The returned tensor's dimension i corresponds to the input dimension
+ // perm[i]. If perm is not given, it is set to (n-1...0), where n is the
+ // rank of the input tensor. Hence by default, this operation performs a
+ // regular matrix transpose on 2-D input Tensors.
+ assert(init_param.input_count == 2);
+ assert(init_param.output_count == 1);
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ operation::TransposeNode::Param param;
+ param.perm = OperandIndex{init_param.inputs[1]};
+
+ return new operation::TransposeNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_MUL] = [](const OperationFactory::Param &init_param, Operands &operands) {
+ assert(init_param.input_count == 3 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> LHS Tensor Index
+ // 1 -> RHS Tensor Index
+ // 2 -> Activation Index
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::MulNode::Param param;
+
+ const auto activation_index = OperandIndex{init_param.inputs[2]};
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+
+ return new operation::MulNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SQUEEZE] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 || init_param.input_count == 2);
+ assert(init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> An n-D tensor, the tensor to be squeezed.
+ // 1 -> An optional 1-D tensor of ANEURALNETWORKS_TENSOR_INT32. The dimensions to squeeze.
+ // If specified only squeezes the dimensions listed. Otherwise, squeezes all dimensions.
+ // The dimension index starts at 0. An error must be reported if squeezing a dimension that
+ // is not 1.
+
+ // Add mandatory input index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ // Add dims index if specified
+ operation::SqueezeNode::Param param;
+ if (init_param.input_count == 2)
+ {
+ param.dims = OperandIndex{init_param.inputs[1]};
+ }
+
+ return new operation::SqueezeNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_TANH] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::TanhNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_LOGISTIC] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::LogisticNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_DIV] = [](const OperationFactory::Param &init_param, Operands &operands) {
+ assert(init_param.input_count == 3 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> LHS Tensor Index
+ // 1 -> RHS Tensor Index
+ // 2 -> Activation Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::DivNode::Param param;
+
+ const auto activation_index = OperandIndex{init_param.inputs[2]};
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+
+ return new operation::DivNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_EXP_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::ExpNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_GREATER_EQUAL_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input0 Tensor Index
+ // 1 -> input1 Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::ComparisonNode::Param param;
+ param.comparison_type = operation::ComparisonNode::ComparisonType::GreaterEqual;
+
+ // Output operand type must be boolean
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::ComparisonNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_LESS_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input0 Tensor Index
+ // 1 -> input1 Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::ComparisonNode::Param param;
+ param.comparison_type = operation::ComparisonNode::ComparisonType::Less;
+
+ // Output operand type must be boolean
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::ComparisonNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_REDUCE_MAX_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Axis Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::ReduceMaxNode::Param param;
+ param.axis_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::ReduceMaxNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_NOT_EQUAL_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input1 Tensor Index
+ // 1 -> input2 Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::ComparisonNode::Param param;
+ param.comparison_type = operation::ComparisonNode::ComparisonType::NotEqual;
+
+ // Output operand type must be boolean
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::ComparisonNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_LOGICAL_AND_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input0 Tensor Index
+ // 1 -> input1 Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ // This operation's operands must be boolean type.
+ replaceDataType(operands, inputs.at(0), DataType::BOOL8);
+ replaceDataType(operands, inputs.at(1), DataType::BOOL8);
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::LogicalAndNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_RSQRT_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::RSQRTNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_RELU] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::ReLUNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_RESIZE_BILINEAR] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 3 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> IFM Index
+ // 1 -> Height Index
+ // 2 -> Width Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::ResizeBilinearNode::Param param;
+ param.height_index = OperandIndex{init_param.inputs[1]};
+ param.width_index = OperandIndex{init_param.inputs[2]};
+
+ return new operation::ResizeBilinearNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_RELU1] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::ReLU1Node{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_RELU6] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::ReLU6Node{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_RNN] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 6 && init_param.output_count == 2);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Weights Tensor Index
+ // 2 -> Recurrent Weights Tensor Index
+ // 3 -> Bias Tensor Index
+ // 4 -> Hidden state (in) Index
+ // 5 -> Activation Index
+
+ OperandIndexSequence inputs;
+ for (uint32_t n = 0; n < init_param.input_count - 1; ++n)
+ {
+ inputs.append(OperandIndex{init_param.inputs[n]});
+ }
+ OperandIndexSequence outputs;
+ for (uint32_t n = 0; n < init_param.output_count; ++n)
+ {
+ outputs.append(OperandIndex{init_param.outputs[n]});
+ }
+
+ operation::RNNNode::Param param;
+ const auto activation_index = OperandIndex{init_param.inputs[5]};
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+
+ return new operation::RNNNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_FLOOR] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ // 0 -> input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::FloorNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_SPACE_TO_DEPTH] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Block size Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::SpaceToDepthNode::Param param;
+ param.block_size_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::SpaceToDepthNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_L2_POOL_2D] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ assert(init_param.input_count == 10 || init_param.input_count == 7);
+ assert(init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> IFM Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::L2Pool2DNode::Param param;
+
+ if (init_param.input_count == 7) // Imlicit Padding case
+ {
+ // 1 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
+ // 2 -> Horizontal (over width) Stride Index
+ // 3 -> Vertial (over height) Stride Index
+ // 4 -> Filter Width Index
+ // 5 -> Filter Height Index
+ // 6 -> FuseCode (activation) Index
+ const auto padding_index = OperandIndex{init_param.inputs[1]};
+ const auto hstride_index = OperandIndex{init_param.inputs[2]};
+ const auto vstride_index = OperandIndex{init_param.inputs[3]};
+ const auto kw_index = OperandIndex{init_param.inputs[4]};
+ const auto kh_index = OperandIndex{init_param.inputs[5]};
+ const auto activation_index = OperandIndex{init_param.inputs[6]};
+
+ param.padding.type =
+ NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.kw = getUint32Scalar(operands, kw_index);
+ param.kh = getUint32Scalar(operands, kh_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+ else // Explicit Padding case
+ {
+ // 1 -> Padding_left index
+ // 2 -> Padding_right index
+ // 3 -> Padding_top index
+ // 4 -> Padding_bottom index
+ // 5 -> Horizontal (over width) Stride Index
+ // 6 -> Vertial (over height) Stride Index
+ // 7 -> Filter Width Index
+ // 8 -> Filter Height Index
+ // 9 -> FuseCode (activation) Index
+ const auto padding_left_index = OperandIndex{init_param.inputs[1]};
+ const auto padding_right_index = OperandIndex{init_param.inputs[2]};
+ const auto padding_top_index = OperandIndex{init_param.inputs[3]};
+ const auto padding_bottom_index = OperandIndex{init_param.inputs[4]};
+ const auto hstride_index = OperandIndex{init_param.inputs[5]};
+ const auto vstride_index = OperandIndex{init_param.inputs[6]};
+ const auto kw_index = OperandIndex{init_param.inputs[7]};
+ const auto kh_index = OperandIndex{init_param.inputs[8]};
+ const auto activation_index = OperandIndex{init_param.inputs[9]};
+
+ param.padding.type = PaddingType::EXPLICIT;
+ param.padding.param = makeExplicitPadding(operands, padding_left_index, padding_right_index,
+ padding_top_index, padding_bottom_index);
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+ param.kw = getUint32Scalar(operands, kw_index);
+ param.kh = getUint32Scalar(operands, kh_index);
+ param.activation =
+ NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
+ }
+
+ return new operation::L2Pool2DNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_EMBEDDING_LOOKUP] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Lookups Index
+ // 1 -> Values Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ return new operation::EmbeddingLookupNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_L2_NORMALIZATION] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ // 0 -> input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::L2NormalizationNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_HASHTABLE_LOOKUP] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 3 && init_param.output_count == 2);
+
+ // Each output should be interpreted as follows:
+ //
+ // 0 -> Output Index
+ // 1 -> Hits Index
+ OperandIndexSequence outputs{init_param.outputs[0], init_param.outputs[1]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Lookups Index
+ // 1 -> Keys Index
+ // 2 -> Values Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]};
+
+ return new operation::HashtableLookupNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_PRELU_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input Tensor Index
+ // 1 -> alpha Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ return new operation::PReLUNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_TRANSPOSE_CONV_EX] = [](const OperationFactory::Param &init_param,
+ Operands &operands) {
+ assert(init_param.input_count == 6 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Output Shape Index
+ // 1 -> Weights Index
+ // 2 -> Input Tensor Index
+ // 3 -> Padding Type
+ // 4 -> Stride width
+ // 5 -> Stride height
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]};
+
+ operation::TransposeConvNode::Param param;
+
+ const auto padding_index = OperandIndex{init_param.inputs[3]};
+ const auto hstride_index = OperandIndex{init_param.inputs[4]};
+ const auto vstride_index = OperandIndex{init_param.inputs[5]};
+
+ param.padding.type =
+ NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
+ param.stride = makeStride(operands, hstride_index, vstride_index);
+
+ return new operation::TransposeConvNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SQRT_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ // 0 -> input Tensor Index
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ return new operation::SQRTNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_LOGICAL_OR_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input0 Tensor Index
+ // 1 -> input1 Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ // This operation's operands must be boolean type.
+ replaceDataType(operands, inputs.at(0), DataType::BOOL8);
+ replaceDataType(operands, inputs.at(1), DataType::BOOL8);
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::LogicalOrNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_LOGICAL_NOT_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ // This operation's operands must be boolean type.
+ replaceDataType(operands, inputs.at(0), DataType::BOOL8);
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::LogicalNotNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_LSTM] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 23 && init_param.output_count == 4);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Input to Input Tensor Index
+ // 2 -> Input to Forget Tensor Index
+ // 3 -> Input to Cell Tensor Index
+ // 4 -> Input to Output Tensor Index
+ // 5 -> Recurrent to Input Weights Tensor Index
+ // 6 -> Recurrent to Forget Weights Tensor Index
+ // 7 -> Recurrent to Cell Weights Tensor Index
+ // 8 -> Recurrent to Output Weights Tensor Index
+ // 9 -> Cell to Input Weights Tensor Index
+ // 10 -> Cell to Forget Weights Tensor Index
+ // 11 -> Cell to Output Weights Tensor Index
+ // 12 -> Input Gate Bias Tensor Index
+ // 13 -> Forget Gate Bias Tensor Index
+ // 14 -> Cell Bias Tensor Index
+ // 15 -> Output Gate Bias Tensor Index
+ // 16 -> Projection Weights Tensor Index
+ // 17 -> Projection Bias Tensor Index
+ // 18 -> Output State In Tensor Index
+ // 19 -> Cell State In Tensor Index
+ OperandIndexSequence inputs;
+ for (uint32_t n = 0; n < init_param.input_count - 3; ++n)
+ {
+ inputs.append(OperandIndex{init_param.inputs[n]});
+ }
+
+ // Each output should be interpreted as follows:
+ //
+ // 0 -> Scratch Buffer Tensor Index
+ // 1 -> Output State Out Tensor Index
+ // 2 -> Cell State Out Tensor Index
+ // 3 -> Output Tensor Index
+ OperandIndexSequence outputs;
+ for (uint32_t n = 0; n < init_param.output_count; ++n)
+ {
+ outputs.append(OperandIndex{init_param.outputs[n]});
+ }
+
+ operation::LSTMNode::Param param;
+ const auto activation_index = OperandIndex{init_param.inputs[20]};
+ switch (operands.at(activation_index).asScalar<int32_t>())
+ {
+ case 0:
+ param.activation = Activation::NONE;
+ break;
+ case 1:
+ param.activation = Activation::RELU;
+ break;
+ case 2:
+ param.activation = Activation::RELU1;
+ break;
+ case 3:
+ param.activation = Activation::RELU6;
+ break;
+ case 4:
+ param.activation = Activation::TANH;
+ break;
+ case 6:
+ param.activation = Activation::SIGMOID;
+ break;
+ default:
+ throw std::runtime_error("Unsupported activation type");
+ break;
+ }
+ param.cell_threshold = operands.at(OperandIndex{init_param.inputs[21]}).asScalar<float>();
+ param.projection_threshold = operands.at(OperandIndex{init_param.inputs[22]}).asScalar<float>();
+
+ return new operation::LSTMNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_EQUAL_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input0 Tensor Index
+ // 1 -> input1 Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::ComparisonNode::Param param;
+ param.comparison_type = operation::ComparisonNode::ComparisonType::Equal;
+
+ // Output operand type must be boolean
+ replaceDataType(operands, outputs.at(0), DataType::BOOL8);
+
+ return new operation::ComparisonNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SQUARED_DIFFERENCE_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> LHS Tensor Index
+ // 1 -> RHS Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ return new operation::SquaredDifferenceNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_TOPK_V2_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 2);
+
+ // Each output should be interpreted as follows:
+ //
+ // 0 -> Index for Output Values
+ // 1 -> Index for Output Indices
+ OperandIndexSequence outputs{init_param.outputs[0], init_param.outputs[1]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Index for Input Data
+ // 1 -> Index for K
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::TopKV2Node::Param param;
+ param.k_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::TopKV2Node{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_GATHER_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 3 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> input Tensor Index
+ // 1 -> indices Tensor Index
+ // 2 -> axis Index
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+
+ operation::GatherNode::Param param;
+ param.axis_index = OperandIndex{init_param.inputs[2]};
+
+ return new operation::GatherNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_NEG_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::NegNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_ABS_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::AbsNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_ARGMAX_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Axis Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::ArgMaxNode::Param param;
+ param.axis_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::ArgMaxNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_DEQUANTIZE] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 1 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ return new operation::DequantizeNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_MEAN] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 3 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> ifm Tensor Index
+ // 1 -> axis Tensor Index
+ // 2 -> keep_dims Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::MeanNode::Param param;
+ param.axis_index = OperandIndex{init_param.inputs[1]};
+ param.keep_dims = operands.at(OperandIndex{init_param.inputs[2]}).asScalar<int32_t>() != 0;
+
+ return new operation::MeanNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 5 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::LocalResponseNormalizationNode::Param param;
+ param.radius_index = OperandIndex{init_param.inputs[1]};
+ param.bias_index = OperandIndex{init_param.inputs[2]};
+ param.alpha_index = OperandIndex{init_param.inputs[3]};
+ param.beta_index = OperandIndex{init_param.inputs[4]};
+
+ return new operation::LocalResponseNormalizationNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_DEPTH_TO_SPACE] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Block size Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::DepthToSpaceNode::Param param;
+ param.block_size_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::DepthToSpaceNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_REDUCE_MIN_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> Input Tensor Index
+ // 1 -> Axis Tensor Index
+ OperandIndexSequence inputs{init_param.inputs[0]};
+
+ operation::ReduceMinNode::Param param;
+ param.axis_index = OperandIndex{init_param.inputs[1]};
+
+ return new operation::ReduceMinNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SPLIT_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &) {
+ assert(init_param.input_count == 3);
+ assert(init_param.output_count >= 1); // At least one output tensor and axis
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs;
+ for (uint32_t n = 0; n < init_param.output_count; ++n)
+ {
+ outputs.append(OperandIndex{init_param.outputs[n]});
+ }
+
+ operation::SplitNode::Param param;
+ param.axis_index = OperandIndex{init_param.inputs[1]};
+ param.num_of_splits_index = OperandIndex{init_param.inputs[2]};
+
+ return new operation::SplitNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_UNPACK_EX] = [](const OperationFactory::Param &init_param,
+ neurun::model::Operands &operands) {
+ assert(init_param.input_count == 3 && init_param.output_count >= 1);
+
+ OperandIndexSequence inputs{init_param.inputs[0]};
+ OperandIndexSequence outputs;
+ for (uint32_t n = 0; n < init_param.output_count; ++n)
+ {
+ outputs.append(OperandIndex{init_param.outputs[n]});
+ }
+
+ operation::UnpackNode::Param param;
+ const auto num_index = OperandIndex{init_param.inputs[1]};
+ const auto axis_index = OperandIndex{init_param.inputs[2]};
+ param.num = operands.at(num_index).asScalar<int32_t>();
+ param.axis = operands.at(axis_index).asScalar<int32_t>();
+
+ return new operation::UnpackNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_PAD] = [](const OperationFactory::Param &init_param, Operands &) {
+ assert(init_param.input_count == 2 && init_param.output_count >= 1);
+
+ OperandIndexSequence inputs{init_param.inputs[0], init_param.inputs[1]};
+ OperandIndexSequence outputs{init_param.outputs[0]};
+
+ return new operation::PadNode{inputs, outputs};
+ };
+}
+
+neurun::model::Operation *OperationFactory::create(ANeuralNetworksOperationType type,
+ const OperationFactory::Param &param,
+ neurun::model::Operands &operands)
+{
+ auto it = _map.find(type);
+ if (it == _map.end())
+ {
+ throw std::runtime_error("Unsupported operation type: " + std::to_string(type));
+ }
+ return it->second(param, operands);
+}
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.h b/runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.h
new file mode 100644
index 000000000..ae2a53088
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __OPERATION_FACTORY_H__
+#define __OPERATION_FACTORY_H__
+
+#include <unordered_map>
+
+#include "model/Operands.h"
+#include "model/Operation.h"
+#include "NeuralNetworks.h"
+#include "NeuralNetworksEx.h"
+
+/**
+ * @brief A class to create a neurun operation object from NN API input parameters
+ */
+class OperationFactory
+{
+public:
+ struct Param
+ {
+ uint32_t input_count;
+ const uint32_t *inputs;
+ uint32_t output_count;
+ const uint32_t *outputs;
+ };
+
+public:
+ using Generator = std::function<neurun::model::Operation *(const OperationFactory::Param &,
+ neurun::model::Operands &)>;
+
+public:
+ static OperationFactory &instance();
+
+private:
+ OperationFactory();
+
+public:
+ neurun::model::Operation *create(ANeuralNetworksOperationType,
+ const OperationFactory::Param &param,
+ neurun::model::Operands &operands);
+ // TODO add "register" method for separating registration, possibly supporting custom-ops
+
+private:
+ std::unordered_map<ANeuralNetworksOperationType, Generator> _map;
+};
+
+#endif // __OPERATION_FACTORY_H__
diff --git a/runtimes/neurun/frontend/tflite/CMakeLists.txt b/runtimes/neurun/frontend/tflite/CMakeLists.txt
new file mode 100644
index 000000000..738a692dc
--- /dev/null
+++ b/runtimes/neurun/frontend/tflite/CMakeLists.txt
@@ -0,0 +1,17 @@
+if(NOT BUILD_TFLITE_LOADER)
+ return()
+endif(NOT BUILD_TFLITE_LOADER)
+
+file(GLOB_RECURSE SOURCES_TFLITE_LOADER "*.cc")
+
+# tflite_loader lib
+add_library(tflite_loader SHARED ${SOURCES_TFLITE_LOADER})
+nnfw_find_package(FlatBuffersSource REQUIRED)
+target_include_directories(tflite_loader PUBLIC ${FlatBuffersSource_DIR}/include .)
+
+target_link_libraries(tflite_loader PUBLIC neurun_core) # TODO Link PRIVATE neurun_core
+target_link_libraries(tflite_loader PUBLIC nnfw_lib_misc nnfw_lib_cpp14)
+target_link_libraries(tflite_loader PRIVATE nnfw_common)
+target_link_libraries(tflite_loader PRIVATE nnfw_coverage)
+
+install(TARGETS tflite_loader DESTINATION lib)
diff --git a/runtimes/neurun/frontend/tflite/loader.cc b/runtimes/neurun/frontend/tflite/loader.cc
new file mode 100644
index 000000000..14acbfc6d
--- /dev/null
+++ b/runtimes/neurun/frontend/tflite/loader.cc
@@ -0,0 +1,700 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "loader.h"
+
+#include "model/Operations.Include.h"
+
+#include "cpp14/memory.h"
+
+namespace
+{
+
+using namespace tflite;
+using namespace neurun;
+
+model::Activation convertActivation(const ActivationFunctionType type)
+{
+ return static_cast<model::Activation>(static_cast<uint32_t>(type));
+}
+
+} // namespace anonymous
+
+namespace tflite_loader
+{
+using namespace tflite;
+using namespace neurun;
+
+void Loader::loadFromFile(const char *file_path)
+{
+ std::ifstream stream(file_path, std::fstream::in | std::fstream::binary);
+
+ stream.seekg(0, stream.end);
+ auto size = stream.tellg();
+ stream.seekg(0, stream.beg);
+
+ _buffer.resize(size);
+ stream.read(_buffer.data(), size);
+
+ stream.close();
+ loadModel();
+}
+
+void Loader::loadFromBuffer(std::pair<const char *, const char *> ref)
+{
+ _buffer.reserve(ref.second - ref.first);
+ std::copy(ref.first, ref.second, _buffer.begin());
+ loadModel();
+}
+
+model::DataType tensorTypeToDataType(const TensorType &type)
+{
+ switch (type)
+ {
+ case TensorType::TensorType_FLOAT32:
+ return model::DataType::FLOAT32;
+ case TensorType::TensorType_INT32:
+ return model::DataType::INT32;
+ case TensorType::TensorType_BOOL:
+ return model::DataType::BOOL8;
+ case TensorType::TensorType_INT8:
+ return model::DataType::QUANT8_ASYMM;
+ default:
+ throw std::runtime_error("NYI");
+ }
+}
+
+void Loader::loadOperand(const tflite::Tensor *tensor)
+{
+ model::Shape shape;
+ std::unique_ptr<model::Data> data_ptr;
+ // Shape
+ const auto *tensor_shape = tensor->shape();
+ for (const auto &dim : *tensor_shape)
+ {
+ shape.append(dim);
+ }
+ // Type
+ model::DataType data_type = tensorTypeToDataType(tensor->type());
+ // Create TypeInfo
+ model::TypeInfo type_info(data_type);
+ // Create operand
+ const auto &operand_index = _graph.addOperand(shape, type_info);
+ // Buffer index
+ if (tensor->buffer() != 0)
+ _tensor_to_operand[tensor->buffer()] = operand_index;
+ // Name unused
+ // auto name = tensor->name();
+ // Quantization
+ auto quantization = tensor->quantization();
+ if (quantization != nullptr)
+ {
+ auto scale = quantization->scale();
+ auto zero_point = quantization->zero_point();
+ if (scale != nullptr || zero_point != nullptr)
+ throw std::runtime_error("Quantization is not supported!");
+
+ auto details = quantization->details_as_CustomQuantization();
+ if (details != nullptr)
+ throw std::runtime_error("Custom Quantization is not supported");
+ }
+ // Variablie
+ if (tensor->is_variable())
+ throw std::runtime_error("Variable tensor not supported!");
+}
+
+void loadOperationIO(const tflite::Operator *op, model::OperandIndexSequence &inputs,
+ model::OperandIndexSequence &outputs)
+{
+ for (const auto &idx : *op->inputs())
+ {
+ inputs.append(model::OperandIndex(idx));
+ }
+
+ for (const auto &idx : *op->outputs())
+ {
+ outputs.append(model::OperandIndex(idx));
+ }
+}
+
+template <typename T>
+neurun::model::OperandIndex Loader::createOperand(const uint8_t *ptr,
+ const neurun::model::Shape &shape,
+ const neurun::model::TypeInfo &type_info)
+{
+ const auto &operand_index = _graph.addOperand(shape, type_info);
+ if (ptr != nullptr)
+ {
+ size_t size = shape.num_elements();
+ auto data = nnfw::cpp14::make_unique<model::CachedData>(ptr, size * sizeof(T));
+ auto &created_operand = _graph.operands().at(operand_index);
+ created_operand.data(std::move(data));
+ }
+ return operand_index;
+}
+
+template <typename Param, typename OptionsType>
+void Loader::loadStridesAndPaddings(Param &param, const OptionsType *options)
+{
+ model::Shape shape;
+ model::TypeInfo type_info(neurun::model::DataType::INT32);
+ // Strides
+ param.stride.vertical = options->stride_w();
+ param.stride.horizontal = options->stride_h();
+ // Paddings
+ if (options->padding() == Padding::Padding_SAME)
+ param.padding.type = neurun::model::PaddingType::SAME;
+ if (options->padding() == Padding::Padding_VALID)
+ param.padding.type = neurun::model::PaddingType::VALID;
+ // param paddings indexes unused
+}
+
+template <typename Param> void Loader::loadPool2D(Param &param, const Pool2DOptions *options)
+{
+ // Strides and Paddings
+ loadStridesAndPaddings(param, options);
+ // Filter width and height
+ model::Shape shape;
+ model::TypeInfo type_info(neurun::model::DataType::INT32);
+ // Strides
+ param.kw = options->filter_width();
+ param.kh = options->filter_height();
+ // Activation
+ param.activation = convertActivation(options->fused_activation_function());
+}
+
+void Loader::loadConv2D(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::Conv2DNode::Param param;
+ const auto *options = op->builtin_options_as_Conv2DOptions();
+ param.activation = convertActivation(options->fused_activation_function());
+ loadStridesAndPaddings(param, options);
+ // Dilation h/w factor unused
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::Conv2DNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadDepthwiseConv2D(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::DepthwiseConv2DNode::Param param;
+ const auto *options = op->builtin_options_as_DepthwiseConv2DOptions();
+ param.activation = convertActivation(options->fused_activation_function());
+ loadStridesAndPaddings(param, options);
+ // Multiplier
+ model::Shape shape;
+ model::TypeInfo type_info(neurun::model::DataType::INT32);
+ param.multiplier = options->depth_multiplier();
+ // Dilation h/w factor unused
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::DepthwiseConv2DNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadTransposeConv(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::TransposeConvNode::Param param;
+ const auto *options = op->builtin_options_as_TransposeConvOptions();
+ loadStridesAndPaddings(param, options);
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::TransposeConvNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadAvgPool2D(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ neurun::model::operation::AvgPool2DNode::Param param;
+ const auto *options = op->builtin_options_as_Pool2DOptions();
+
+ loadPool2D(param, options);
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::AvgPool2DNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadReshape(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ // const auto *options = op->builtin_options_as_ReshapeOptions();
+ // No params
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::ReshapeNode(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadSoftmax(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::SoftmaxNode::Param param;
+ const auto *options = op->builtin_options_as_SoftmaxOptions();
+ // Beta
+ param.beta = options->beta();
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::SoftmaxNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadMaxPool2D(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::MaxPool2DNode::Param param;
+ const auto *options = op->builtin_options_as_Pool2DOptions();
+
+ loadPool2D(param, options);
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::MaxPool2DNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadConcatenation(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::ConcatNode::Param param;
+ const auto *options = op->builtin_options_as_ConcatenationOptions();
+ // Axis
+ model::Shape shape;
+ model::TypeInfo type_info(neurun::model::DataType::INT32);
+ param.axis = options->axis();
+ // activation unused
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::ConcatNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadFC(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::FullyConnectedNode::Param param;
+ const auto *options = op->builtin_options_as_FullyConnectedOptions();
+
+ param.activation = convertActivation(options->fused_activation_function());
+ // weights_format unused
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::FullyConnectedNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadAdd(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::AddNode::Param param;
+ const auto *options = op->builtin_options_as_AddOptions();
+
+ param.activation = convertActivation(options->fused_activation_function());
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::AddNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadSub(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::SubNode::Param param;
+ const auto *options = op->builtin_options_as_SubOptions();
+
+ param.activation = convertActivation(options->fused_activation_function());
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::SubNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadMul(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::SubNode::Param param;
+ const auto *options = op->builtin_options_as_SubOptions();
+
+ param.activation = convertActivation(options->fused_activation_function());
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::SubNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadDiv(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ model::operation::DivNode::Param param;
+ const auto *options = op->builtin_options_as_DivOptions();
+
+ param.activation = convertActivation(options->fused_activation_function());
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::DivNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadRelu(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::ReLUNode(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadRelu6(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::ReLU6Node(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadRsqrt(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::RSQRTNode(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadSqrt(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::SQRTNode(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadSquaredDifference(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::SquaredDifferenceNode(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadTanh(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ loadOperationIO(op, inputs, outputs);
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::TanhNode(inputs, outputs));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadTranspose(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ const auto input_index = (*op->inputs())[0];
+ inputs.append(model::OperandIndex(input_index));
+ const auto output_index = (*op->outputs())[0];
+ outputs.append(model::OperandIndex(output_index));
+
+ model::operation::TransposeNode::Param param;
+ if (op->inputs()->size() == 2)
+ {
+ const auto perm_index = (*op->inputs())[1];
+ param.perm = model::OperandIndex(perm_index);
+ }
+
+ std::unique_ptr<model::Operation> new_op(
+ new model::operation::TransposeNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadMean(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ const auto input_index = (*op->inputs())[0];
+ inputs.append(model::OperandIndex(input_index));
+ const auto output_index = (*op->outputs())[0];
+ outputs.append(model::OperandIndex(output_index));
+
+ model::operation::MeanNode::Param param;
+ param.axis_index = model::OperandIndex((*op->inputs())[1]);
+ param.keep_dims = op->builtin_options_as_ReducerOptions()->keep_dims();
+
+ std::unique_ptr<model::Operation> new_op(new model::operation::MeanNode(inputs, outputs, param));
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadCustom(const tflite::Operator *op)
+{
+ model::OperandIndexSequence inputs;
+ model::OperandIndexSequence outputs;
+
+ for (auto in_idx : *op->inputs())
+ {
+ inputs.append(model::OperandIndex(in_idx));
+ }
+
+ for (auto out_idx : *op->outputs())
+ {
+ outputs.append(model::OperandIndex(out_idx));
+ }
+
+ auto custom_op_id = _opcode_index_to_custom_opcode.at(op->opcode_index());
+
+ auto constraint = model::operation::OperandConstraint::createExact(inputs.size());
+
+ assert(op->custom_options_format() == CustomOptionsFormat_FLEXBUFFERS &&
+ "Unsupported custom operation options format");
+
+ size_t custom_op_data_size = op->custom_options()->size();
+ auto custom_op_data = new char[custom_op_data_size];
+ std::copy(op->custom_options()->begin(), op->custom_options()->end(), custom_op_data);
+
+ model::operation::CustomNode::Userdata userdata{};
+ userdata.data = custom_op_data;
+ userdata.size = custom_op_data_size;
+
+ auto new_op = nnfw::cpp14::make_unique<model::operation::CustomNode>(constraint, inputs, outputs,
+ custom_op_id, userdata);
+
+ _graph.addOperation(std::move(new_op));
+}
+
+void Loader::loadOperation(const tflite::Operator *op)
+{
+ switch (_op_code_to_builtin_op[op->opcode_index()])
+ {
+ case BuiltinOperator_CONV_2D:
+ loadConv2D(op);
+ return;
+ case BuiltinOperator_AVERAGE_POOL_2D:
+ loadAvgPool2D(op);
+ return;
+ case BuiltinOperator_DEPTHWISE_CONV_2D:
+ loadDepthwiseConv2D(op);
+ return;
+ case BuiltinOperator_TRANSPOSE_CONV:
+ loadTransposeConv(op);
+ return;
+ case BuiltinOperator_RESHAPE:
+ loadReshape(op);
+ return;
+ case BuiltinOperator_SOFTMAX:
+ loadSoftmax(op);
+ return;
+ case BuiltinOperator_MAX_POOL_2D:
+ loadMaxPool2D(op);
+ return;
+ case BuiltinOperator_CONCATENATION:
+ loadConcatenation(op);
+ return;
+ case BuiltinOperator_FULLY_CONNECTED:
+ loadFC(op);
+ return;
+ case BuiltinOperator_ADD:
+ loadAdd(op);
+ return;
+ case BuiltinOperator_SUB:
+ loadSub(op);
+ return;
+ case BuiltinOperator_MUL:
+ loadMul(op);
+ return;
+ case BuiltinOperator_DIV:
+ loadDiv(op);
+ return;
+ case BuiltinOperator_RELU:
+ loadRelu(op);
+ return;
+ case BuiltinOperator_RELU6:
+ loadRelu6(op);
+ return;
+ case BuiltinOperator_RSQRT:
+ loadRsqrt(op);
+ return;
+ case BuiltinOperator_SQRT:
+ loadSqrt(op);
+ return;
+ case BuiltinOperator_SQUARED_DIFFERENCE:
+ loadSquaredDifference(op);
+ return;
+ case BuiltinOperator_TANH:
+ loadTanh(op);
+ return;
+ case BuiltinOperator_TRANSPOSE:
+ loadTranspose(op);
+ return;
+ case BuiltinOperator_MEAN:
+ loadMean(op);
+ return;
+ case BuiltinOperator_CUSTOM:
+ loadCustom(op);
+ return;
+ default:
+ auto *names = EnumNamesBuiltinOperator();
+ int enum_value = static_cast<int>(_op_code_to_builtin_op[op->opcode_index()]);
+ throw std::runtime_error(std::string("Unsupported operation: ").append(names[enum_value]));
+ }
+}
+
+void Loader::loadSubgraph(const SubGraph *subgraph)
+{
+ // Load tensors
+ for (const auto *tensor : *subgraph->tensors())
+ {
+ loadOperand(tensor);
+ }
+ // Set inputs
+ for (const auto &input_ind : *subgraph->inputs())
+ {
+ _graph.addInput(model::OperandIndex(input_ind));
+ }
+ // Set outputs
+ for (const auto &output_ind : *subgraph->outputs())
+ {
+ _graph.addOutput(model::OperandIndex(output_ind));
+ }
+ // Create operations
+ for (const auto *op : *subgraph->operators())
+ {
+ loadOperation(op);
+ }
+ // Name unused
+}
+
+void Loader::loadConstantTensor(const Buffer *buffer, const uint32_t &index)
+{
+ const auto *data = buffer->data();
+ if (data != nullptr)
+ {
+ auto ptr = nnfw::cpp14::make_unique<model::CachedData>(data->data(), data->size());
+ const auto &operand_index = _tensor_to_operand[index];
+ auto &operand = _graph.operands().at(operand_index);
+ operand.data(std::move(ptr));
+ }
+}
+
+void Loader::loadModel()
+{
+ flatbuffers::Verifier verifier(reinterpret_cast<const std::uint8_t *>(_buffer.data()),
+ _buffer.size());
+ if (!tflite::VerifyModelBuffer(verifier))
+ throw std::runtime_error{"Invalid tflite model"};
+
+ const auto *model = GetModel(_buffer.data());
+ // Version unused
+ // const auto version = model->version();
+ const auto *op_codes = model->operator_codes();
+ const auto *subgraphs = model->subgraphs();
+ // Description unused
+ // const auto *description = model->description();
+ const auto *buffers = model->buffers();
+ // Metabuffer unsued
+ // const auto *metadata_buffer = model->metadata_buffer();
+ // Use operator codes
+ for (const auto *op_code : *op_codes)
+ {
+ _op_code_to_builtin_op.push_back(op_code->builtin_code());
+
+ if (op_code->builtin_code() == BuiltinOperator_CUSTOM)
+ {
+ auto id = op_code->custom_code()->str();
+ _opcode_index_to_custom_opcode[_op_code_to_builtin_op.size() - 1] = id;
+ }
+ // Custom code unsued
+ // Version unused
+ }
+ // Load subgraphs
+ for (const auto *subgraph : *subgraphs)
+ {
+ loadSubgraph(subgraph);
+ }
+ // Load buffers with constant tensors
+ for (uint32_t ind = 0; ind < buffers->size(); ind++)
+ {
+ loadConstantTensor(buffers->Get(ind), ind);
+ }
+
+ _graph.finishBuilding();
+}
+
+} // namespace tflite_loader
diff --git a/runtimes/neurun/frontend/tflite/loader.h b/runtimes/neurun/frontend/tflite/loader.h
new file mode 100644
index 000000000..c398cbc00
--- /dev/null
+++ b/runtimes/neurun/frontend/tflite/loader.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TFLITE_LOADER_LOADER_H__
+#define __TFLITE_LOADER_LOADER_H__
+
+#include "schema_generated.h"
+
+#include "graph/Graph.h"
+
+#include <fstream>
+#include <map>
+
+namespace tflite_loader
+{
+/**
+ * @brief TFLite model loader
+ */
+class Loader
+{
+public:
+ /**
+ * @brief Construct a new Loader object
+ *
+ * @param graph reference on graph
+ */
+ explicit Loader(neurun::graph::Graph &graph) : _graph(graph){};
+ ~Loader() = default;
+ /**
+ * @brief Load a model from file
+ *
+ * @param file_path
+ */
+ void loadFromFile(const char *file_path);
+ /**
+ * @brief Load a model from buffer
+ *
+ * @param ref Pointers on begin and end of buffer
+ */
+ void loadFromBuffer(std::pair<const char *, const char *> ref);
+
+private:
+ void loadModel();
+ // Helper functions
+ // Load subgraphs
+ void loadSubgraph(const tflite::SubGraph *subgraph);
+ // Load data from buffer to tensor on index
+ void loadConstantTensor(const tflite::Buffer *buffer, const uint32_t &index);
+ // Create operands form tflite::Tensor
+ void loadOperand(const tflite::Tensor *tensor);
+ // Create operations from tflite::Operator
+ void loadOperation(const tflite::Operator *op);
+ // Load Strides and Paddings from options to param
+ template <typename Param, typename OptionsType>
+ void loadStridesAndPaddings(Param &param, const OptionsType *options);
+ // Load Pool2D param
+ template <typename Param> void loadPool2D(Param &param, const tflite::Pool2DOptions *options);
+ // Create new Operand from Shape and TypeInfo
+ template <typename T>
+ neurun::model::OperandIndex createOperand(const uint8_t *ptr, const neurun::model::Shape &shape,
+ const neurun::model::TypeInfo &type_info);
+
+ // Operations
+ void loadConv2D(const tflite::Operator *op);
+ void loadDepthwiseConv2D(const tflite::Operator *op);
+ void loadTransposeConv(const tflite::Operator *op);
+ void loadAvgPool2D(const tflite::Operator *op);
+ void loadReshape(const tflite::Operator *op);
+ void loadSoftmax(const tflite::Operator *op);
+ void loadMaxPool2D(const tflite::Operator *op);
+ void loadConcatenation(const tflite::Operator *op);
+ void loadFC(const tflite::Operator *op);
+ void loadAdd(const tflite::Operator *op);
+ void loadSub(const tflite::Operator *op);
+ void loadMul(const tflite::Operator *op);
+ void loadDiv(const tflite::Operator *op);
+ void loadRelu(const tflite::Operator *op);
+ void loadRelu6(const tflite::Operator *op);
+ void loadRsqrt(const tflite::Operator *op);
+ void loadSqrt(const tflite::Operator *op);
+ void loadSquaredDifference(const tflite::Operator *op);
+ void loadTanh(const tflite::Operator *op);
+ void loadTranspose(const tflite::Operator *op);
+ void loadMean(const tflite::Operator *op);
+
+ void loadCustom(const tflite::Operator *op);
+
+private:
+ // Buffer for loading (if needed)
+ std::vector<char> _buffer;
+ // Reference on loadable Graph
+ neurun::graph::Graph &_graph;
+ // Mapping from tflite tensor index to Graph OperandIndex
+ std::map<uint32_t, neurun::model::OperandIndex> _tensor_to_operand;
+ // Mapping from operator code to BuiltinOperator
+ std::vector<tflite::BuiltinOperator> _op_code_to_builtin_op;
+ std::unordered_map<uint32_t, std::string> _opcode_index_to_custom_opcode;
+};
+
+} // namespace tflite_loader
+
+#endif //__TFLITE_LOADER_LOADER_H__
diff --git a/runtimes/neurun/frontend/tflite/schema.fbs b/runtimes/neurun/frontend/tflite/schema.fbs
new file mode 100644
index 000000000..980f13b19
--- /dev/null
+++ b/runtimes/neurun/frontend/tflite/schema.fbs
@@ -0,0 +1,794 @@
+// Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Revision History
+// Version 0: Initial version.
+// Version 1: Add subgraphs to schema.
+// Version 2: Rename operators to conform to NN API.
+// Version 3: Move buffer data from Model.Subgraph.Tensors to Model.Buffers.
+
+namespace tflite;
+
+// This corresponds to the version.
+file_identifier "TFL3";
+// File extension of any written files.
+file_extension "tflite";
+
+// IMPORTANT: All new members of tables, enums and unions must be added at the
+// end to ensure backwards compatibility.
+
+// The type of data stored in a tensor.
+enum TensorType : byte {
+ FLOAT32 = 0,
+ FLOAT16 = 1,
+ INT32 = 2,
+ UINT8 = 3,
+ INT64 = 4,
+ STRING = 5,
+ BOOL = 6,
+ INT16 = 7,
+ COMPLEX64 = 8,
+ INT8 = 9,
+}
+
+// Custom quantization parameters for experimenting with new quantization
+// techniques.
+table CustomQuantization {
+ custom:[ubyte] (force_align: 16);
+}
+
+// Represents a specific quantization technique's parameters.
+union QuantizationDetails {
+ CustomQuantization,
+}
+
+// Parameters for converting a quantized tensor back to float.
+table QuantizationParameters {
+ // These four parameters are the asymmetric linear quantization parameters.
+ // Given a quantized value q, the corresponding float value f should be:
+ // f = scale * (q - zero_point)
+ // For other quantization types, the QuantizationDetails below is used.
+ min:[float]; // For importing back into tensorflow.
+ max:[float]; // For importing back into tensorflow.
+ scale:[float]; // For dequantizing the tensor's values.
+ zero_point:[long];
+
+ // If this is not none, the quantization parameters above are ignored and the
+ // value of the QuantizationDetails union below should be used.
+ details:QuantizationDetails;
+}
+
+table Tensor {
+ // The tensor shape. The meaning of each entry is operator-specific but
+ // builtin ops use: [batch size, height, width, number of channels] (That's
+ // Tensorflow's NHWC).
+ shape:[int];
+ type:TensorType;
+ // An index that refers to the buffers table at the root of the model. Or,
+ // if there is no data buffer associated (i.e. intermediate results), then
+ // this is 0 (which refers to an always existent empty buffer).
+ //
+ // The data_buffer itself is an opaque container, with the assumption that the
+ // target device is little-endian. In addition, all builtin operators assume
+ // the memory is ordered such that if `shape` is [4, 3, 2], then index
+ // [i, j, k] maps to data_buffer[i*3*2 + j*2 + k].
+ buffer:uint;
+ name:string; // For debugging and importing back into tensorflow.
+ quantization:QuantizationParameters; // Optional.
+
+ is_variable:bool = false;
+}
+
+// A list of builtin operators. Builtin operators are slightly faster than custom
+// ones, but not by much. Moreover, while custom operators accept an opaque
+// object containing configuration parameters, builtins have a predetermined
+// set of acceptable options.
+enum BuiltinOperator : byte {
+ ADD = 0,
+ AVERAGE_POOL_2D = 1,
+ CONCATENATION = 2,
+ CONV_2D = 3,
+ DEPTHWISE_CONV_2D = 4,
+ // DEPTH_TO_SPACE = 5,
+ DEQUANTIZE = 6,
+ EMBEDDING_LOOKUP = 7,
+ FLOOR = 8,
+ FULLY_CONNECTED = 9,
+ HASHTABLE_LOOKUP = 10,
+ L2_NORMALIZATION = 11,
+ L2_POOL_2D = 12,
+ LOCAL_RESPONSE_NORMALIZATION = 13,
+ LOGISTIC = 14,
+ LSH_PROJECTION = 15,
+ LSTM = 16,
+ MAX_POOL_2D = 17,
+ MUL = 18,
+ RELU = 19,
+ // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed
+ // since different model developers use RELU1 in different ways. Never
+ // create another op called RELU1.
+ RELU_N1_TO_1 = 20,
+ RELU6 = 21,
+ RESHAPE = 22,
+ RESIZE_BILINEAR = 23,
+ RNN = 24,
+ SOFTMAX = 25,
+ SPACE_TO_DEPTH = 26,
+ SVDF = 27,
+ TANH = 28,
+ // TODO(aselle): Consider rename to CONCATENATE_EMBEDDINGS
+ CONCAT_EMBEDDINGS = 29,
+ SKIP_GRAM = 30,
+ CALL = 31,
+ CUSTOM = 32,
+ EMBEDDING_LOOKUP_SPARSE = 33,
+ PAD = 34,
+ UNIDIRECTIONAL_SEQUENCE_RNN = 35,
+ GATHER = 36,
+ BATCH_TO_SPACE_ND = 37,
+ SPACE_TO_BATCH_ND = 38,
+ TRANSPOSE = 39,
+ MEAN = 40,
+ SUB = 41,
+ DIV = 42,
+ SQUEEZE = 43,
+ UNIDIRECTIONAL_SEQUENCE_LSTM = 44,
+ STRIDED_SLICE = 45,
+ BIDIRECTIONAL_SEQUENCE_RNN = 46,
+ EXP = 47,
+ TOPK_V2 = 48,
+ SPLIT = 49,
+ LOG_SOFTMAX = 50,
+ // DELEGATE is a special op type for the operations which are delegated to
+ // other backends.
+ // WARNING: Experimental interface, subject to change
+ DELEGATE = 51,
+ BIDIRECTIONAL_SEQUENCE_LSTM = 52,
+ CAST = 53,
+ PRELU = 54,
+ MAXIMUM = 55,
+ ARG_MAX = 56,
+ MINIMUM = 57,
+ LESS = 58,
+ NEG = 59,
+ PADV2 = 60,
+ GREATER = 61,
+ GREATER_EQUAL = 62,
+ LESS_EQUAL = 63,
+ SELECT = 64,
+ SLICE = 65,
+ SIN = 66,
+ TRANSPOSE_CONV = 67,
+ SPARSE_TO_DENSE = 68,
+ TILE = 69,
+ EXPAND_DIMS = 70,
+ EQUAL = 71,
+ NOT_EQUAL = 72,
+ LOG = 73,
+ SUM = 74,
+ SQRT = 75,
+ RSQRT = 76,
+ SHAPE = 77,
+ POW = 78,
+ ARG_MIN = 79,
+ FAKE_QUANT = 80,
+ REDUCE_PROD = 81,
+ REDUCE_MAX = 82,
+ PACK = 83,
+ LOGICAL_OR = 84,
+ ONE_HOT = 85,
+ LOGICAL_AND = 86,
+ LOGICAL_NOT = 87,
+ UNPACK = 88,
+ REDUCE_MIN = 89,
+ FLOOR_DIV = 90,
+ REDUCE_ANY = 91,
+ SQUARE = 92,
+ ZEROS_LIKE = 93,
+ FILL = 94,
+ FLOOR_MOD = 95,
+ RANGE = 96,
+ RESIZE_NEAREST_NEIGHBOR = 97,
+ LEAKY_RELU = 98,
+ SQUARED_DIFFERENCE = 99,
+ MIRROR_PAD = 100,
+ ABS = 101,
+ SPLIT_V = 102,
+}
+
+// Options for the builtin operators.
+union BuiltinOptions {
+ Conv2DOptions,
+ DepthwiseConv2DOptions,
+ ConcatEmbeddingsOptions,
+ LSHProjectionOptions,
+ Pool2DOptions,
+ SVDFOptions,
+ RNNOptions,
+ FullyConnectedOptions,
+ SoftmaxOptions,
+ ConcatenationOptions,
+ AddOptions,
+ L2NormOptions,
+ LocalResponseNormalizationOptions,
+ LSTMOptions,
+ ResizeBilinearOptions,
+ CallOptions,
+ ReshapeOptions,
+ SkipGramOptions,
+ SpaceToDepthOptions,
+ EmbeddingLookupSparseOptions,
+ MulOptions,
+ PadOptions,
+ GatherOptions,
+ BatchToSpaceNDOptions,
+ SpaceToBatchNDOptions,
+ TransposeOptions,
+ ReducerOptions,
+ SubOptions,
+ DivOptions,
+ SqueezeOptions,
+ SequenceRNNOptions,
+ StridedSliceOptions,
+ ExpOptions,
+ TopKV2Options,
+ SplitOptions,
+ LogSoftmaxOptions,
+ CastOptions,
+ DequantizeOptions,
+ MaximumMinimumOptions,
+ ArgMaxOptions,
+ LessOptions,
+ NegOptions,
+ PadV2Options,
+ GreaterOptions,
+ GreaterEqualOptions,
+ LessEqualOptions,
+ SelectOptions,
+ SliceOptions,
+ TransposeConvOptions,
+ SparseToDenseOptions,
+ TileOptions,
+ ExpandDimsOptions,
+ EqualOptions,
+ NotEqualOptions,
+ ShapeOptions,
+ PowOptions,
+ ArgMinOptions,
+ FakeQuantOptions,
+ PackOptions,
+ LogicalOrOptions,
+ OneHotOptions,
+ LogicalAndOptions,
+ LogicalNotOptions,
+ UnpackOptions,
+ FloorDivOptions,
+ SquareOptions,
+ ZerosLikeOptions,
+ FillOptions,
+ BidirectionalSequenceLSTMOptions,
+ BidirectionalSequenceRNNOptions,
+ UnidirectionalSequenceLSTMOptions,
+ FloorModOptions,
+ RangeOptions,
+ ResizeNearestNeighborOptions,
+ LeakyReluOptions,
+ SquaredDifferenceOptions,
+ MirrorPadOptions,
+ AbsOptions,
+ SplitVOptions,
+}
+
+enum Padding : byte { SAME, VALID }
+
+enum ActivationFunctionType : byte {
+ NONE = 0,
+ RELU = 1,
+ RELU_N1_TO_1 = 2,
+ RELU6 = 3,
+ TANH = 4,
+ SIGN_BIT = 5,
+}
+
+table Conv2DOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ fused_activation_function:ActivationFunctionType;
+ dilation_w_factor:int = 1;
+ dilation_h_factor:int = 1;
+}
+
+table Pool2DOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ filter_width:int;
+ filter_height:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+table DepthwiseConv2DOptions {
+ // Parameters for DepthwiseConv version 1 or above.
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ depth_multiplier:int;
+ fused_activation_function:ActivationFunctionType;
+ // Parameters for DepthwiseConv version 2 or above.
+ dilation_w_factor:int = 1;
+ dilation_h_factor:int = 1;
+}
+
+table ConcatEmbeddingsOptions {
+ num_channels:int;
+ num_columns_per_channel:[int];
+ embedding_dim_per_channel:[int]; // This could be inferred from parameters.
+}
+
+enum LSHProjectionType: byte {
+ UNKNOWN = 0,
+ SPARSE = 1,
+ DENSE = 2,
+}
+
+table LSHProjectionOptions {
+ type: LSHProjectionType;
+}
+
+table SVDFOptions {
+ rank:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow RNNCell.
+table RNNOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow dynamic_rnn with RNNCell.
+table SequenceRNNOptions {
+ time_major:bool;
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow bidrectional_dynamic_rnn with RNNCell.
+table BidirectionalSequenceRNNOptions {
+ time_major:bool;
+ fused_activation_function:ActivationFunctionType;
+ merge_outputs: bool;
+}
+
+enum FullyConnectedOptionsWeightsFormat: byte {
+ DEFAULT = 0,
+ SHUFFLED4x16INT8 = 1,
+}
+
+// An implementation of TensorFlow fully_connected (a.k.a Dense) layer.
+table FullyConnectedOptions {
+ // Parameters for FullyConnected version 1 or above.
+ fused_activation_function:ActivationFunctionType;
+
+ // Parameters for FullyConnected version 2 or above.
+ weights_format:FullyConnectedOptionsWeightsFormat = DEFAULT;
+}
+
+table SoftmaxOptions {
+ beta: float;
+}
+
+// An implementation of TensorFlow concat.
+table ConcatenationOptions {
+ axis:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+table AddOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table MulOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table L2NormOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table LocalResponseNormalizationOptions {
+ radius:int;
+ bias:float;
+ alpha:float;
+ beta:float;
+}
+
+enum LSTMKernelType : byte {
+ // Full LSTM kernel which supports peephole and projection.
+ FULL = 0,
+ // Basic LSTM kernels. Equivalent to TensorFlow BasicLSTMCell.
+ BASIC = 1,
+}
+
+// An implementation of TensorFlow LSTMCell and CoupledInputForgetGateLSTMCell
+table LSTMOptions {
+ // Parameters for LSTM version 1 or above.
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // Parameters for LSTM version 2 or above.
+ // Basic kernel is only supported in version 2 or above.
+ kernel_type: LSTMKernelType = FULL;
+}
+
+// An implementation of TensorFlow dynamic_rnn with LSTMCell.
+table UnidirectionalSequenceLSTMOptions {
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // If true then first dimension is sequence, otherwise batch.
+ time_major:bool;
+}
+
+table BidirectionalSequenceLSTMOptions {
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // If true, store the outputs of both directions into the first output.
+ merge_outputs: bool;
+}
+
+table ResizeBilinearOptions {
+ new_height: int (deprecated);
+ new_width: int (deprecated);
+ align_corners: bool;
+}
+
+table ResizeNearestNeighborOptions {
+ align_corners: bool;
+}
+
+// A call operation options
+table CallOptions {
+ // The subgraph index that needs to be called.
+ subgraph:uint;
+}
+
+table PadOptions {
+}
+
+table PadV2Options {
+}
+
+table ReshapeOptions {
+ new_shape:[int];
+}
+
+table SpaceToBatchNDOptions {
+}
+
+table BatchToSpaceNDOptions {
+}
+
+table SkipGramOptions {
+ ngram_size: int;
+ max_skip_size: int;
+ include_all_ngrams: bool;
+}
+
+table SpaceToDepthOptions {
+ block_size: int;
+}
+
+table SubOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table DivOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table TopKV2Options {
+}
+
+enum CombinerType : byte {
+ SUM = 0,
+ MEAN = 1,
+ SQRTN = 2,
+}
+
+table EmbeddingLookupSparseOptions {
+ combiner:CombinerType;
+}
+
+table GatherOptions {
+ axis: int;
+}
+
+table TransposeOptions {
+}
+
+table ExpOptions {
+}
+
+table ReducerOptions {
+ keep_dims: bool;
+}
+
+table SqueezeOptions {
+ squeeze_dims:[int];
+}
+
+table SplitOptions {
+ num_splits: int;
+}
+
+table SplitVOptions {
+ num_splits: int;
+}
+
+table StridedSliceOptions {
+ begin_mask: int;
+ end_mask: int;
+ ellipsis_mask: int;
+ new_axis_mask: int;
+ shrink_axis_mask: int;
+}
+
+table LogSoftmaxOptions {
+}
+
+table CastOptions {
+ in_data_type: TensorType;
+ out_data_type: TensorType;
+}
+
+table DequantizeOptions {
+}
+
+table MaximumMinimumOptions {
+}
+
+table TileOptions {
+}
+
+table ArgMaxOptions {
+ output_type : TensorType;
+}
+
+table ArgMinOptions {
+ output_type : TensorType;
+}
+
+table GreaterOptions {
+}
+
+table GreaterEqualOptions {
+}
+
+table LessOptions {
+}
+
+table LessEqualOptions {
+}
+
+table NegOptions {
+}
+
+table SelectOptions {
+}
+
+table SliceOptions {
+}
+
+table TransposeConvOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+}
+
+table ExpandDimsOptions {
+}
+
+table SparseToDenseOptions {
+ validate_indices:bool;
+}
+
+table EqualOptions {
+}
+
+table NotEqualOptions {
+}
+
+table ShapeOptions {
+ // Optional output type of the operation (int32 or int64). Defaults to int32.
+ out_type : TensorType;
+}
+
+table PowOptions {
+}
+
+table FakeQuantOptions {
+ // Parameters supported by version 1:
+ min:float;
+ max:float;
+ num_bits:int;
+
+ // Parameters supported by version 2:
+ narrow_range:bool;
+}
+
+table PackOptions {
+ values_count:int;
+ axis:int;
+}
+
+table LogicalOrOptions {
+}
+
+table OneHotOptions {
+ axis:int;
+}
+
+table AbsOptions {
+}
+
+
+table LogicalAndOptions {
+}
+
+table LogicalNotOptions {
+}
+
+table UnpackOptions {
+ num:int;
+ axis:int;
+}
+
+table FloorDivOptions {
+}
+
+table SquareOptions {
+}
+
+table ZerosLikeOptions {
+}
+
+table FillOptions {
+}
+
+table FloorModOptions {
+}
+
+table RangeOptions {
+}
+
+table LeakyReluOptions {
+ alpha:float;
+}
+
+table SquaredDifferenceOptions {
+}
+
+enum MirrorPadMode : byte {
+ // Doesn't include borders.
+ REFLECT = 0,
+ // Includes borders.
+ SYMMETRIC = 1,
+}
+
+table MirrorPadOptions {
+ mode:MirrorPadMode;
+}
+
+// An OperatorCode can be an enum value (BuiltinOperator) if the operator is a
+// builtin, or a string if the operator is custom.
+table OperatorCode {
+ builtin_code:BuiltinOperator;
+ custom_code:string;
+
+ // The version of the operator. The version need to be bumped whenever new
+ // parameters are introduced into an op.
+ version:int = 1;
+}
+
+enum CustomOptionsFormat : byte {
+ FLEXBUFFERS = 0,
+}
+
+// An operator takes tensors as inputs and outputs. The type of operation being
+// performed is determined by an index into the list of valid OperatorCodes,
+// while the specifics of each operations is configured using builtin_options
+// or custom_options.
+table Operator {
+ // Index into the operator_codes array. Using an integer here avoids
+ // complicate map lookups.
+ opcode_index:uint;
+
+ // Optional input and output tensors are indicated by -1.
+ inputs:[int];
+ outputs:[int];
+
+ builtin_options:BuiltinOptions;
+ custom_options:[ubyte];
+ custom_options_format:CustomOptionsFormat;
+
+ // A list of booleans indicating the input tensors which are being mutated by
+ // this operator.(e.g. used by RNN and LSTM).
+ // For example, if the "inputs" array refers to 5 tensors and the second and
+ // fifth are mutable variables, then this list will contain
+ // [false, true, false, false, true].
+ //
+ // If the list is empty, no variable is mutated in this operator.
+ // The list either has the same length as `inputs`, or is empty.
+ mutating_variable_inputs:[bool];
+}
+
+// The root type, defining a subgraph, which typically represents an entire
+// model.
+table SubGraph {
+ // A list of all tensors used in this subgraph.
+ tensors:[Tensor];
+
+ // Indices of the tensors that are inputs into this subgraph. Note this is
+ // the list of non-static tensors that feed into the subgraph for inference.
+ inputs:[int];
+
+ // Indices of the tensors that are outputs out of this subgraph. Note this is
+ // the list of output tensors that are considered the product of the
+ // subgraph's inference.
+ outputs:[int];
+
+ // All operators, in execution order.
+ operators:[Operator];
+
+ // Name of this subgraph (used for debugging).
+ name:string;
+}
+
+// Table of raw data buffers (used for constant tensors). Referenced by tensors
+// by index. The generous alignment accommodates mmap-friendly data structures.
+table Buffer {
+ data:[ubyte] (force_align: 16);
+}
+
+table Model {
+ // Version of the schema.
+ version:uint;
+
+ // A list of all operator codes used in this model. This is
+ // kept in order because operators carry an index into this
+ // vector.
+ operator_codes:[OperatorCode];
+
+ // All the subgraphs of the model. The 0th is assumed to be the main
+ // model.
+ subgraphs:[SubGraph];
+
+ // A description of the model.
+ description:string;
+
+ // Buffers of the model.
+ // Note the 0th entry of this array must be an empty buffer (sentinel).
+ // This is a convention so that tensors without a buffer can provide 0 as
+ // their buffer.
+ buffers:[Buffer];
+
+ // Metadata about the model. Indirects into the existings buffers list.
+ metadata_buffer:[int];
+}
+
+root_type Model;
diff --git a/runtimes/neurun/frontend/tflite/schema_generated.h b/runtimes/neurun/frontend/tflite/schema_generated.h
new file mode 100644
index 000000000..4a11f9a7a
--- /dev/null
+++ b/runtimes/neurun/frontend/tflite/schema_generated.h
@@ -0,0 +1,7272 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// automatically generated by the FlatBuffers compiler, do not modify
+
+#ifndef FLATBUFFERS_GENERATED_SCHEMA_TFLITE_H_
+#define FLATBUFFERS_GENERATED_SCHEMA_TFLITE_H_
+
+#include "flatbuffers/flatbuffers.h"
+
+namespace tflite
+{
+
+struct CustomQuantization;
+
+struct QuantizationParameters;
+
+struct Tensor;
+
+struct Conv2DOptions;
+
+struct Pool2DOptions;
+
+struct DepthwiseConv2DOptions;
+
+struct ConcatEmbeddingsOptions;
+
+struct LSHProjectionOptions;
+
+struct SVDFOptions;
+
+struct RNNOptions;
+
+struct SequenceRNNOptions;
+
+struct BidirectionalSequenceRNNOptions;
+
+struct FullyConnectedOptions;
+
+struct SoftmaxOptions;
+
+struct ConcatenationOptions;
+
+struct AddOptions;
+
+struct MulOptions;
+
+struct L2NormOptions;
+
+struct LocalResponseNormalizationOptions;
+
+struct LSTMOptions;
+
+struct UnidirectionalSequenceLSTMOptions;
+
+struct BidirectionalSequenceLSTMOptions;
+
+struct ResizeBilinearOptions;
+
+struct ResizeNearestNeighborOptions;
+
+struct CallOptions;
+
+struct PadOptions;
+
+struct PadV2Options;
+
+struct ReshapeOptions;
+
+struct SpaceToBatchNDOptions;
+
+struct BatchToSpaceNDOptions;
+
+struct SkipGramOptions;
+
+struct SpaceToDepthOptions;
+
+struct SubOptions;
+
+struct DivOptions;
+
+struct TopKV2Options;
+
+struct EmbeddingLookupSparseOptions;
+
+struct GatherOptions;
+
+struct TransposeOptions;
+
+struct ExpOptions;
+
+struct ReducerOptions;
+
+struct SqueezeOptions;
+
+struct SplitOptions;
+
+struct SplitVOptions;
+
+struct StridedSliceOptions;
+
+struct LogSoftmaxOptions;
+
+struct CastOptions;
+
+struct DequantizeOptions;
+
+struct MaximumMinimumOptions;
+
+struct TileOptions;
+
+struct ArgMaxOptions;
+
+struct ArgMinOptions;
+
+struct GreaterOptions;
+
+struct GreaterEqualOptions;
+
+struct LessOptions;
+
+struct LessEqualOptions;
+
+struct NegOptions;
+
+struct SelectOptions;
+
+struct SliceOptions;
+
+struct TransposeConvOptions;
+
+struct ExpandDimsOptions;
+
+struct SparseToDenseOptions;
+
+struct EqualOptions;
+
+struct NotEqualOptions;
+
+struct ShapeOptions;
+
+struct PowOptions;
+
+struct FakeQuantOptions;
+
+struct PackOptions;
+
+struct LogicalOrOptions;
+
+struct OneHotOptions;
+
+struct AbsOptions;
+
+struct LogicalAndOptions;
+
+struct LogicalNotOptions;
+
+struct UnpackOptions;
+
+struct FloorDivOptions;
+
+struct SquareOptions;
+
+struct ZerosLikeOptions;
+
+struct FillOptions;
+
+struct FloorModOptions;
+
+struct RangeOptions;
+
+struct LeakyReluOptions;
+
+struct SquaredDifferenceOptions;
+
+struct MirrorPadOptions;
+
+struct OperatorCode;
+
+struct Operator;
+
+struct SubGraph;
+
+struct Buffer;
+
+struct Model;
+
+enum TensorType
+{
+ TensorType_FLOAT32 = 0,
+ TensorType_FLOAT16 = 1,
+ TensorType_INT32 = 2,
+ TensorType_UINT8 = 3,
+ TensorType_INT64 = 4,
+ TensorType_STRING = 5,
+ TensorType_BOOL = 6,
+ TensorType_INT16 = 7,
+ TensorType_COMPLEX64 = 8,
+ TensorType_INT8 = 9,
+ TensorType_MIN = TensorType_FLOAT32,
+ TensorType_MAX = TensorType_INT8
+};
+
+inline const TensorType (&EnumValuesTensorType())[10]
+{
+ static const TensorType values[] = {TensorType_FLOAT32, TensorType_FLOAT16, TensorType_INT32,
+ TensorType_UINT8, TensorType_INT64, TensorType_STRING,
+ TensorType_BOOL, TensorType_INT16, TensorType_COMPLEX64,
+ TensorType_INT8};
+ return values;
+}
+
+inline const char *const *EnumNamesTensorType()
+{
+ static const char *const names[] = {"FLOAT32", "FLOAT16", "INT32", "UINT8", "INT64", "STRING",
+ "BOOL", "INT16", "COMPLEX64", "INT8", nullptr};
+ return names;
+}
+
+inline const char *EnumNameTensorType(TensorType e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesTensorType()[index];
+}
+
+enum QuantizationDetails
+{
+ QuantizationDetails_NONE = 0,
+ QuantizationDetails_CustomQuantization = 1,
+ QuantizationDetails_MIN = QuantizationDetails_NONE,
+ QuantizationDetails_MAX = QuantizationDetails_CustomQuantization
+};
+
+inline const QuantizationDetails (&EnumValuesQuantizationDetails())[2]
+{
+ static const QuantizationDetails values[] = {QuantizationDetails_NONE,
+ QuantizationDetails_CustomQuantization};
+ return values;
+}
+
+inline const char *const *EnumNamesQuantizationDetails()
+{
+ static const char *const names[] = {"NONE", "CustomQuantization", nullptr};
+ return names;
+}
+
+inline const char *EnumNameQuantizationDetails(QuantizationDetails e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesQuantizationDetails()[index];
+}
+
+template <typename T> struct QuantizationDetailsTraits
+{
+ static const QuantizationDetails enum_value = QuantizationDetails_NONE;
+};
+
+template <> struct QuantizationDetailsTraits<CustomQuantization>
+{
+ static const QuantizationDetails enum_value = QuantizationDetails_CustomQuantization;
+};
+
+bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const void *obj,
+ QuantizationDetails type);
+bool VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier,
+ const flatbuffers::Vector<flatbuffers::Offset<void>> *values,
+ const flatbuffers::Vector<uint8_t> *types);
+
+enum BuiltinOperator
+{
+ BuiltinOperator_ADD = 0,
+ BuiltinOperator_AVERAGE_POOL_2D = 1,
+ BuiltinOperator_CONCATENATION = 2,
+ BuiltinOperator_CONV_2D = 3,
+ BuiltinOperator_DEPTHWISE_CONV_2D = 4,
+ BuiltinOperator_DEQUANTIZE = 6,
+ BuiltinOperator_EMBEDDING_LOOKUP = 7,
+ BuiltinOperator_FLOOR = 8,
+ BuiltinOperator_FULLY_CONNECTED = 9,
+ BuiltinOperator_HASHTABLE_LOOKUP = 10,
+ BuiltinOperator_L2_NORMALIZATION = 11,
+ BuiltinOperator_L2_POOL_2D = 12,
+ BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION = 13,
+ BuiltinOperator_LOGISTIC = 14,
+ BuiltinOperator_LSH_PROJECTION = 15,
+ BuiltinOperator_LSTM = 16,
+ BuiltinOperator_MAX_POOL_2D = 17,
+ BuiltinOperator_MUL = 18,
+ BuiltinOperator_RELU = 19,
+ BuiltinOperator_RELU_N1_TO_1 = 20,
+ BuiltinOperator_RELU6 = 21,
+ BuiltinOperator_RESHAPE = 22,
+ BuiltinOperator_RESIZE_BILINEAR = 23,
+ BuiltinOperator_RNN = 24,
+ BuiltinOperator_SOFTMAX = 25,
+ BuiltinOperator_SPACE_TO_DEPTH = 26,
+ BuiltinOperator_SVDF = 27,
+ BuiltinOperator_TANH = 28,
+ BuiltinOperator_CONCAT_EMBEDDINGS = 29,
+ BuiltinOperator_SKIP_GRAM = 30,
+ BuiltinOperator_CALL = 31,
+ BuiltinOperator_CUSTOM = 32,
+ BuiltinOperator_EMBEDDING_LOOKUP_SPARSE = 33,
+ BuiltinOperator_PAD = 34,
+ BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN = 35,
+ BuiltinOperator_GATHER = 36,
+ BuiltinOperator_BATCH_TO_SPACE_ND = 37,
+ BuiltinOperator_SPACE_TO_BATCH_ND = 38,
+ BuiltinOperator_TRANSPOSE = 39,
+ BuiltinOperator_MEAN = 40,
+ BuiltinOperator_SUB = 41,
+ BuiltinOperator_DIV = 42,
+ BuiltinOperator_SQUEEZE = 43,
+ BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM = 44,
+ BuiltinOperator_STRIDED_SLICE = 45,
+ BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN = 46,
+ BuiltinOperator_EXP = 47,
+ BuiltinOperator_TOPK_V2 = 48,
+ BuiltinOperator_SPLIT = 49,
+ BuiltinOperator_LOG_SOFTMAX = 50,
+ BuiltinOperator_DELEGATE = 51,
+ BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM = 52,
+ BuiltinOperator_CAST = 53,
+ BuiltinOperator_PRELU = 54,
+ BuiltinOperator_MAXIMUM = 55,
+ BuiltinOperator_ARG_MAX = 56,
+ BuiltinOperator_MINIMUM = 57,
+ BuiltinOperator_LESS = 58,
+ BuiltinOperator_NEG = 59,
+ BuiltinOperator_PADV2 = 60,
+ BuiltinOperator_GREATER = 61,
+ BuiltinOperator_GREATER_EQUAL = 62,
+ BuiltinOperator_LESS_EQUAL = 63,
+ BuiltinOperator_SELECT = 64,
+ BuiltinOperator_SLICE = 65,
+ BuiltinOperator_SIN = 66,
+ BuiltinOperator_TRANSPOSE_CONV = 67,
+ BuiltinOperator_SPARSE_TO_DENSE = 68,
+ BuiltinOperator_TILE = 69,
+ BuiltinOperator_EXPAND_DIMS = 70,
+ BuiltinOperator_EQUAL = 71,
+ BuiltinOperator_NOT_EQUAL = 72,
+ BuiltinOperator_LOG = 73,
+ BuiltinOperator_SUM = 74,
+ BuiltinOperator_SQRT = 75,
+ BuiltinOperator_RSQRT = 76,
+ BuiltinOperator_SHAPE = 77,
+ BuiltinOperator_POW = 78,
+ BuiltinOperator_ARG_MIN = 79,
+ BuiltinOperator_FAKE_QUANT = 80,
+ BuiltinOperator_REDUCE_PROD = 81,
+ BuiltinOperator_REDUCE_MAX = 82,
+ BuiltinOperator_PACK = 83,
+ BuiltinOperator_LOGICAL_OR = 84,
+ BuiltinOperator_ONE_HOT = 85,
+ BuiltinOperator_LOGICAL_AND = 86,
+ BuiltinOperator_LOGICAL_NOT = 87,
+ BuiltinOperator_UNPACK = 88,
+ BuiltinOperator_REDUCE_MIN = 89,
+ BuiltinOperator_FLOOR_DIV = 90,
+ BuiltinOperator_REDUCE_ANY = 91,
+ BuiltinOperator_SQUARE = 92,
+ BuiltinOperator_ZEROS_LIKE = 93,
+ BuiltinOperator_FILL = 94,
+ BuiltinOperator_FLOOR_MOD = 95,
+ BuiltinOperator_RANGE = 96,
+ BuiltinOperator_RESIZE_NEAREST_NEIGHBOR = 97,
+ BuiltinOperator_LEAKY_RELU = 98,
+ BuiltinOperator_SQUARED_DIFFERENCE = 99,
+ BuiltinOperator_MIRROR_PAD = 100,
+ BuiltinOperator_ABS = 101,
+ BuiltinOperator_SPLIT_V = 102,
+ BuiltinOperator_MIN = BuiltinOperator_ADD,
+ BuiltinOperator_MAX = BuiltinOperator_SPLIT_V
+};
+
+inline const BuiltinOperator (&EnumValuesBuiltinOperator())[102]
+{
+ static const BuiltinOperator values[] = {BuiltinOperator_ADD,
+ BuiltinOperator_AVERAGE_POOL_2D,
+ BuiltinOperator_CONCATENATION,
+ BuiltinOperator_CONV_2D,
+ BuiltinOperator_DEPTHWISE_CONV_2D,
+ BuiltinOperator_DEQUANTIZE,
+ BuiltinOperator_EMBEDDING_LOOKUP,
+ BuiltinOperator_FLOOR,
+ BuiltinOperator_FULLY_CONNECTED,
+ BuiltinOperator_HASHTABLE_LOOKUP,
+ BuiltinOperator_L2_NORMALIZATION,
+ BuiltinOperator_L2_POOL_2D,
+ BuiltinOperator_LOCAL_RESPONSE_NORMALIZATION,
+ BuiltinOperator_LOGISTIC,
+ BuiltinOperator_LSH_PROJECTION,
+ BuiltinOperator_LSTM,
+ BuiltinOperator_MAX_POOL_2D,
+ BuiltinOperator_MUL,
+ BuiltinOperator_RELU,
+ BuiltinOperator_RELU_N1_TO_1,
+ BuiltinOperator_RELU6,
+ BuiltinOperator_RESHAPE,
+ BuiltinOperator_RESIZE_BILINEAR,
+ BuiltinOperator_RNN,
+ BuiltinOperator_SOFTMAX,
+ BuiltinOperator_SPACE_TO_DEPTH,
+ BuiltinOperator_SVDF,
+ BuiltinOperator_TANH,
+ BuiltinOperator_CONCAT_EMBEDDINGS,
+ BuiltinOperator_SKIP_GRAM,
+ BuiltinOperator_CALL,
+ BuiltinOperator_CUSTOM,
+ BuiltinOperator_EMBEDDING_LOOKUP_SPARSE,
+ BuiltinOperator_PAD,
+ BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_RNN,
+ BuiltinOperator_GATHER,
+ BuiltinOperator_BATCH_TO_SPACE_ND,
+ BuiltinOperator_SPACE_TO_BATCH_ND,
+ BuiltinOperator_TRANSPOSE,
+ BuiltinOperator_MEAN,
+ BuiltinOperator_SUB,
+ BuiltinOperator_DIV,
+ BuiltinOperator_SQUEEZE,
+ BuiltinOperator_UNIDIRECTIONAL_SEQUENCE_LSTM,
+ BuiltinOperator_STRIDED_SLICE,
+ BuiltinOperator_BIDIRECTIONAL_SEQUENCE_RNN,
+ BuiltinOperator_EXP,
+ BuiltinOperator_TOPK_V2,
+ BuiltinOperator_SPLIT,
+ BuiltinOperator_LOG_SOFTMAX,
+ BuiltinOperator_DELEGATE,
+ BuiltinOperator_BIDIRECTIONAL_SEQUENCE_LSTM,
+ BuiltinOperator_CAST,
+ BuiltinOperator_PRELU,
+ BuiltinOperator_MAXIMUM,
+ BuiltinOperator_ARG_MAX,
+ BuiltinOperator_MINIMUM,
+ BuiltinOperator_LESS,
+ BuiltinOperator_NEG,
+ BuiltinOperator_PADV2,
+ BuiltinOperator_GREATER,
+ BuiltinOperator_GREATER_EQUAL,
+ BuiltinOperator_LESS_EQUAL,
+ BuiltinOperator_SELECT,
+ BuiltinOperator_SLICE,
+ BuiltinOperator_SIN,
+ BuiltinOperator_TRANSPOSE_CONV,
+ BuiltinOperator_SPARSE_TO_DENSE,
+ BuiltinOperator_TILE,
+ BuiltinOperator_EXPAND_DIMS,
+ BuiltinOperator_EQUAL,
+ BuiltinOperator_NOT_EQUAL,
+ BuiltinOperator_LOG,
+ BuiltinOperator_SUM,
+ BuiltinOperator_SQRT,
+ BuiltinOperator_RSQRT,
+ BuiltinOperator_SHAPE,
+ BuiltinOperator_POW,
+ BuiltinOperator_ARG_MIN,
+ BuiltinOperator_FAKE_QUANT,
+ BuiltinOperator_REDUCE_PROD,
+ BuiltinOperator_REDUCE_MAX,
+ BuiltinOperator_PACK,
+ BuiltinOperator_LOGICAL_OR,
+ BuiltinOperator_ONE_HOT,
+ BuiltinOperator_LOGICAL_AND,
+ BuiltinOperator_LOGICAL_NOT,
+ BuiltinOperator_UNPACK,
+ BuiltinOperator_REDUCE_MIN,
+ BuiltinOperator_FLOOR_DIV,
+ BuiltinOperator_REDUCE_ANY,
+ BuiltinOperator_SQUARE,
+ BuiltinOperator_ZEROS_LIKE,
+ BuiltinOperator_FILL,
+ BuiltinOperator_FLOOR_MOD,
+ BuiltinOperator_RANGE,
+ BuiltinOperator_RESIZE_NEAREST_NEIGHBOR,
+ BuiltinOperator_LEAKY_RELU,
+ BuiltinOperator_SQUARED_DIFFERENCE,
+ BuiltinOperator_MIRROR_PAD,
+ BuiltinOperator_ABS,
+ BuiltinOperator_SPLIT_V};
+ return values;
+}
+
+inline const char *const *EnumNamesBuiltinOperator()
+{
+ static const char *const names[] = {"ADD",
+ "AVERAGE_POOL_2D",
+ "CONCATENATION",
+ "CONV_2D",
+ "DEPTHWISE_CONV_2D",
+ "",
+ "DEQUANTIZE",
+ "EMBEDDING_LOOKUP",
+ "FLOOR",
+ "FULLY_CONNECTED",
+ "HASHTABLE_LOOKUP",
+ "L2_NORMALIZATION",
+ "L2_POOL_2D",
+ "LOCAL_RESPONSE_NORMALIZATION",
+ "LOGISTIC",
+ "LSH_PROJECTION",
+ "LSTM",
+ "MAX_POOL_2D",
+ "MUL",
+ "RELU",
+ "RELU_N1_TO_1",
+ "RELU6",
+ "RESHAPE",
+ "RESIZE_BILINEAR",
+ "RNN",
+ "SOFTMAX",
+ "SPACE_TO_DEPTH",
+ "SVDF",
+ "TANH",
+ "CONCAT_EMBEDDINGS",
+ "SKIP_GRAM",
+ "CALL",
+ "CUSTOM",
+ "EMBEDDING_LOOKUP_SPARSE",
+ "PAD",
+ "UNIDIRECTIONAL_SEQUENCE_RNN",
+ "GATHER",
+ "BATCH_TO_SPACE_ND",
+ "SPACE_TO_BATCH_ND",
+ "TRANSPOSE",
+ "MEAN",
+ "SUB",
+ "DIV",
+ "SQUEEZE",
+ "UNIDIRECTIONAL_SEQUENCE_LSTM",
+ "STRIDED_SLICE",
+ "BIDIRECTIONAL_SEQUENCE_RNN",
+ "EXP",
+ "TOPK_V2",
+ "SPLIT",
+ "LOG_SOFTMAX",
+ "DELEGATE",
+ "BIDIRECTIONAL_SEQUENCE_LSTM",
+ "CAST",
+ "PRELU",
+ "MAXIMUM",
+ "ARG_MAX",
+ "MINIMUM",
+ "LESS",
+ "NEG",
+ "PADV2",
+ "GREATER",
+ "GREATER_EQUAL",
+ "LESS_EQUAL",
+ "SELECT",
+ "SLICE",
+ "SIN",
+ "TRANSPOSE_CONV",
+ "SPARSE_TO_DENSE",
+ "TILE",
+ "EXPAND_DIMS",
+ "EQUAL",
+ "NOT_EQUAL",
+ "LOG",
+ "SUM",
+ "SQRT",
+ "RSQRT",
+ "SHAPE",
+ "POW",
+ "ARG_MIN",
+ "FAKE_QUANT",
+ "REDUCE_PROD",
+ "REDUCE_MAX",
+ "PACK",
+ "LOGICAL_OR",
+ "ONE_HOT",
+ "LOGICAL_AND",
+ "LOGICAL_NOT",
+ "UNPACK",
+ "REDUCE_MIN",
+ "FLOOR_DIV",
+ "REDUCE_ANY",
+ "SQUARE",
+ "ZEROS_LIKE",
+ "FILL",
+ "FLOOR_MOD",
+ "RANGE",
+ "RESIZE_NEAREST_NEIGHBOR",
+ "LEAKY_RELU",
+ "SQUARED_DIFFERENCE",
+ "MIRROR_PAD",
+ "ABS",
+ "SPLIT_V",
+ nullptr};
+ return names;
+}
+
+inline const char *EnumNameBuiltinOperator(BuiltinOperator e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesBuiltinOperator()[index];
+}
+
+enum BuiltinOptions
+{
+ BuiltinOptions_NONE = 0,
+ BuiltinOptions_Conv2DOptions = 1,
+ BuiltinOptions_DepthwiseConv2DOptions = 2,
+ BuiltinOptions_ConcatEmbeddingsOptions = 3,
+ BuiltinOptions_LSHProjectionOptions = 4,
+ BuiltinOptions_Pool2DOptions = 5,
+ BuiltinOptions_SVDFOptions = 6,
+ BuiltinOptions_RNNOptions = 7,
+ BuiltinOptions_FullyConnectedOptions = 8,
+ BuiltinOptions_SoftmaxOptions = 9,
+ BuiltinOptions_ConcatenationOptions = 10,
+ BuiltinOptions_AddOptions = 11,
+ BuiltinOptions_L2NormOptions = 12,
+ BuiltinOptions_LocalResponseNormalizationOptions = 13,
+ BuiltinOptions_LSTMOptions = 14,
+ BuiltinOptions_ResizeBilinearOptions = 15,
+ BuiltinOptions_CallOptions = 16,
+ BuiltinOptions_ReshapeOptions = 17,
+ BuiltinOptions_SkipGramOptions = 18,
+ BuiltinOptions_SpaceToDepthOptions = 19,
+ BuiltinOptions_EmbeddingLookupSparseOptions = 20,
+ BuiltinOptions_MulOptions = 21,
+ BuiltinOptions_PadOptions = 22,
+ BuiltinOptions_GatherOptions = 23,
+ BuiltinOptions_BatchToSpaceNDOptions = 24,
+ BuiltinOptions_SpaceToBatchNDOptions = 25,
+ BuiltinOptions_TransposeOptions = 26,
+ BuiltinOptions_ReducerOptions = 27,
+ BuiltinOptions_SubOptions = 28,
+ BuiltinOptions_DivOptions = 29,
+ BuiltinOptions_SqueezeOptions = 30,
+ BuiltinOptions_SequenceRNNOptions = 31,
+ BuiltinOptions_StridedSliceOptions = 32,
+ BuiltinOptions_ExpOptions = 33,
+ BuiltinOptions_TopKV2Options = 34,
+ BuiltinOptions_SplitOptions = 35,
+ BuiltinOptions_LogSoftmaxOptions = 36,
+ BuiltinOptions_CastOptions = 37,
+ BuiltinOptions_DequantizeOptions = 38,
+ BuiltinOptions_MaximumMinimumOptions = 39,
+ BuiltinOptions_ArgMaxOptions = 40,
+ BuiltinOptions_LessOptions = 41,
+ BuiltinOptions_NegOptions = 42,
+ BuiltinOptions_PadV2Options = 43,
+ BuiltinOptions_GreaterOptions = 44,
+ BuiltinOptions_GreaterEqualOptions = 45,
+ BuiltinOptions_LessEqualOptions = 46,
+ BuiltinOptions_SelectOptions = 47,
+ BuiltinOptions_SliceOptions = 48,
+ BuiltinOptions_TransposeConvOptions = 49,
+ BuiltinOptions_SparseToDenseOptions = 50,
+ BuiltinOptions_TileOptions = 51,
+ BuiltinOptions_ExpandDimsOptions = 52,
+ BuiltinOptions_EqualOptions = 53,
+ BuiltinOptions_NotEqualOptions = 54,
+ BuiltinOptions_ShapeOptions = 55,
+ BuiltinOptions_PowOptions = 56,
+ BuiltinOptions_ArgMinOptions = 57,
+ BuiltinOptions_FakeQuantOptions = 58,
+ BuiltinOptions_PackOptions = 59,
+ BuiltinOptions_LogicalOrOptions = 60,
+ BuiltinOptions_OneHotOptions = 61,
+ BuiltinOptions_LogicalAndOptions = 62,
+ BuiltinOptions_LogicalNotOptions = 63,
+ BuiltinOptions_UnpackOptions = 64,
+ BuiltinOptions_FloorDivOptions = 65,
+ BuiltinOptions_SquareOptions = 66,
+ BuiltinOptions_ZerosLikeOptions = 67,
+ BuiltinOptions_FillOptions = 68,
+ BuiltinOptions_BidirectionalSequenceLSTMOptions = 69,
+ BuiltinOptions_BidirectionalSequenceRNNOptions = 70,
+ BuiltinOptions_UnidirectionalSequenceLSTMOptions = 71,
+ BuiltinOptions_FloorModOptions = 72,
+ BuiltinOptions_RangeOptions = 73,
+ BuiltinOptions_ResizeNearestNeighborOptions = 74,
+ BuiltinOptions_LeakyReluOptions = 75,
+ BuiltinOptions_SquaredDifferenceOptions = 76,
+ BuiltinOptions_MirrorPadOptions = 77,
+ BuiltinOptions_AbsOptions = 78,
+ BuiltinOptions_SplitVOptions = 79,
+ BuiltinOptions_MIN = BuiltinOptions_NONE,
+ BuiltinOptions_MAX = BuiltinOptions_SplitVOptions
+};
+
+inline const BuiltinOptions (&EnumValuesBuiltinOptions())[80]
+{
+ static const BuiltinOptions values[] = {BuiltinOptions_NONE,
+ BuiltinOptions_Conv2DOptions,
+ BuiltinOptions_DepthwiseConv2DOptions,
+ BuiltinOptions_ConcatEmbeddingsOptions,
+ BuiltinOptions_LSHProjectionOptions,
+ BuiltinOptions_Pool2DOptions,
+ BuiltinOptions_SVDFOptions,
+ BuiltinOptions_RNNOptions,
+ BuiltinOptions_FullyConnectedOptions,
+ BuiltinOptions_SoftmaxOptions,
+ BuiltinOptions_ConcatenationOptions,
+ BuiltinOptions_AddOptions,
+ BuiltinOptions_L2NormOptions,
+ BuiltinOptions_LocalResponseNormalizationOptions,
+ BuiltinOptions_LSTMOptions,
+ BuiltinOptions_ResizeBilinearOptions,
+ BuiltinOptions_CallOptions,
+ BuiltinOptions_ReshapeOptions,
+ BuiltinOptions_SkipGramOptions,
+ BuiltinOptions_SpaceToDepthOptions,
+ BuiltinOptions_EmbeddingLookupSparseOptions,
+ BuiltinOptions_MulOptions,
+ BuiltinOptions_PadOptions,
+ BuiltinOptions_GatherOptions,
+ BuiltinOptions_BatchToSpaceNDOptions,
+ BuiltinOptions_SpaceToBatchNDOptions,
+ BuiltinOptions_TransposeOptions,
+ BuiltinOptions_ReducerOptions,
+ BuiltinOptions_SubOptions,
+ BuiltinOptions_DivOptions,
+ BuiltinOptions_SqueezeOptions,
+ BuiltinOptions_SequenceRNNOptions,
+ BuiltinOptions_StridedSliceOptions,
+ BuiltinOptions_ExpOptions,
+ BuiltinOptions_TopKV2Options,
+ BuiltinOptions_SplitOptions,
+ BuiltinOptions_LogSoftmaxOptions,
+ BuiltinOptions_CastOptions,
+ BuiltinOptions_DequantizeOptions,
+ BuiltinOptions_MaximumMinimumOptions,
+ BuiltinOptions_ArgMaxOptions,
+ BuiltinOptions_LessOptions,
+ BuiltinOptions_NegOptions,
+ BuiltinOptions_PadV2Options,
+ BuiltinOptions_GreaterOptions,
+ BuiltinOptions_GreaterEqualOptions,
+ BuiltinOptions_LessEqualOptions,
+ BuiltinOptions_SelectOptions,
+ BuiltinOptions_SliceOptions,
+ BuiltinOptions_TransposeConvOptions,
+ BuiltinOptions_SparseToDenseOptions,
+ BuiltinOptions_TileOptions,
+ BuiltinOptions_ExpandDimsOptions,
+ BuiltinOptions_EqualOptions,
+ BuiltinOptions_NotEqualOptions,
+ BuiltinOptions_ShapeOptions,
+ BuiltinOptions_PowOptions,
+ BuiltinOptions_ArgMinOptions,
+ BuiltinOptions_FakeQuantOptions,
+ BuiltinOptions_PackOptions,
+ BuiltinOptions_LogicalOrOptions,
+ BuiltinOptions_OneHotOptions,
+ BuiltinOptions_LogicalAndOptions,
+ BuiltinOptions_LogicalNotOptions,
+ BuiltinOptions_UnpackOptions,
+ BuiltinOptions_FloorDivOptions,
+ BuiltinOptions_SquareOptions,
+ BuiltinOptions_ZerosLikeOptions,
+ BuiltinOptions_FillOptions,
+ BuiltinOptions_BidirectionalSequenceLSTMOptions,
+ BuiltinOptions_BidirectionalSequenceRNNOptions,
+ BuiltinOptions_UnidirectionalSequenceLSTMOptions,
+ BuiltinOptions_FloorModOptions,
+ BuiltinOptions_RangeOptions,
+ BuiltinOptions_ResizeNearestNeighborOptions,
+ BuiltinOptions_LeakyReluOptions,
+ BuiltinOptions_SquaredDifferenceOptions,
+ BuiltinOptions_MirrorPadOptions,
+ BuiltinOptions_AbsOptions,
+ BuiltinOptions_SplitVOptions};
+ return values;
+}
+
+inline const char *const *EnumNamesBuiltinOptions()
+{
+ static const char *const names[] = {"NONE",
+ "Conv2DOptions",
+ "DepthwiseConv2DOptions",
+ "ConcatEmbeddingsOptions",
+ "LSHProjectionOptions",
+ "Pool2DOptions",
+ "SVDFOptions",
+ "RNNOptions",
+ "FullyConnectedOptions",
+ "SoftmaxOptions",
+ "ConcatenationOptions",
+ "AddOptions",
+ "L2NormOptions",
+ "LocalResponseNormalizationOptions",
+ "LSTMOptions",
+ "ResizeBilinearOptions",
+ "CallOptions",
+ "ReshapeOptions",
+ "SkipGramOptions",
+ "SpaceToDepthOptions",
+ "EmbeddingLookupSparseOptions",
+ "MulOptions",
+ "PadOptions",
+ "GatherOptions",
+ "BatchToSpaceNDOptions",
+ "SpaceToBatchNDOptions",
+ "TransposeOptions",
+ "ReducerOptions",
+ "SubOptions",
+ "DivOptions",
+ "SqueezeOptions",
+ "SequenceRNNOptions",
+ "StridedSliceOptions",
+ "ExpOptions",
+ "TopKV2Options",
+ "SplitOptions",
+ "LogSoftmaxOptions",
+ "CastOptions",
+ "DequantizeOptions",
+ "MaximumMinimumOptions",
+ "ArgMaxOptions",
+ "LessOptions",
+ "NegOptions",
+ "PadV2Options",
+ "GreaterOptions",
+ "GreaterEqualOptions",
+ "LessEqualOptions",
+ "SelectOptions",
+ "SliceOptions",
+ "TransposeConvOptions",
+ "SparseToDenseOptions",
+ "TileOptions",
+ "ExpandDimsOptions",
+ "EqualOptions",
+ "NotEqualOptions",
+ "ShapeOptions",
+ "PowOptions",
+ "ArgMinOptions",
+ "FakeQuantOptions",
+ "PackOptions",
+ "LogicalOrOptions",
+ "OneHotOptions",
+ "LogicalAndOptions",
+ "LogicalNotOptions",
+ "UnpackOptions",
+ "FloorDivOptions",
+ "SquareOptions",
+ "ZerosLikeOptions",
+ "FillOptions",
+ "BidirectionalSequenceLSTMOptions",
+ "BidirectionalSequenceRNNOptions",
+ "UnidirectionalSequenceLSTMOptions",
+ "FloorModOptions",
+ "RangeOptions",
+ "ResizeNearestNeighborOptions",
+ "LeakyReluOptions",
+ "SquaredDifferenceOptions",
+ "MirrorPadOptions",
+ "AbsOptions",
+ "SplitVOptions",
+ nullptr};
+ return names;
+}
+
+inline const char *EnumNameBuiltinOptions(BuiltinOptions e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesBuiltinOptions()[index];
+}
+
+template <typename T> struct BuiltinOptionsTraits
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_NONE;
+};
+
+template <> struct BuiltinOptionsTraits<Conv2DOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_Conv2DOptions;
+};
+
+template <> struct BuiltinOptionsTraits<DepthwiseConv2DOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_DepthwiseConv2DOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ConcatEmbeddingsOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ConcatEmbeddingsOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LSHProjectionOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LSHProjectionOptions;
+};
+
+template <> struct BuiltinOptionsTraits<Pool2DOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_Pool2DOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SVDFOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SVDFOptions;
+};
+
+template <> struct BuiltinOptionsTraits<RNNOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_RNNOptions;
+};
+
+template <> struct BuiltinOptionsTraits<FullyConnectedOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_FullyConnectedOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SoftmaxOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SoftmaxOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ConcatenationOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ConcatenationOptions;
+};
+
+template <> struct BuiltinOptionsTraits<AddOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_AddOptions;
+};
+
+template <> struct BuiltinOptionsTraits<L2NormOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_L2NormOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LocalResponseNormalizationOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LocalResponseNormalizationOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LSTMOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LSTMOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ResizeBilinearOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ResizeBilinearOptions;
+};
+
+template <> struct BuiltinOptionsTraits<CallOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_CallOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ReshapeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ReshapeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SkipGramOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SkipGramOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SpaceToDepthOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SpaceToDepthOptions;
+};
+
+template <> struct BuiltinOptionsTraits<EmbeddingLookupSparseOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_EmbeddingLookupSparseOptions;
+};
+
+template <> struct BuiltinOptionsTraits<MulOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_MulOptions;
+};
+
+template <> struct BuiltinOptionsTraits<PadOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_PadOptions;
+};
+
+template <> struct BuiltinOptionsTraits<GatherOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_GatherOptions;
+};
+
+template <> struct BuiltinOptionsTraits<BatchToSpaceNDOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_BatchToSpaceNDOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SpaceToBatchNDOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SpaceToBatchNDOptions;
+};
+
+template <> struct BuiltinOptionsTraits<TransposeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_TransposeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ReducerOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ReducerOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SubOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SubOptions;
+};
+
+template <> struct BuiltinOptionsTraits<DivOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_DivOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SqueezeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SqueezeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SequenceRNNOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SequenceRNNOptions;
+};
+
+template <> struct BuiltinOptionsTraits<StridedSliceOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_StridedSliceOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ExpOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ExpOptions;
+};
+
+template <> struct BuiltinOptionsTraits<TopKV2Options>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_TopKV2Options;
+};
+
+template <> struct BuiltinOptionsTraits<SplitOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SplitOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LogSoftmaxOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LogSoftmaxOptions;
+};
+
+template <> struct BuiltinOptionsTraits<CastOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_CastOptions;
+};
+
+template <> struct BuiltinOptionsTraits<DequantizeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_DequantizeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<MaximumMinimumOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_MaximumMinimumOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ArgMaxOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ArgMaxOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LessOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LessOptions;
+};
+
+template <> struct BuiltinOptionsTraits<NegOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_NegOptions;
+};
+
+template <> struct BuiltinOptionsTraits<PadV2Options>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_PadV2Options;
+};
+
+template <> struct BuiltinOptionsTraits<GreaterOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_GreaterOptions;
+};
+
+template <> struct BuiltinOptionsTraits<GreaterEqualOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_GreaterEqualOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LessEqualOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LessEqualOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SelectOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SelectOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SliceOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SliceOptions;
+};
+
+template <> struct BuiltinOptionsTraits<TransposeConvOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_TransposeConvOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SparseToDenseOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SparseToDenseOptions;
+};
+
+template <> struct BuiltinOptionsTraits<TileOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_TileOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ExpandDimsOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ExpandDimsOptions;
+};
+
+template <> struct BuiltinOptionsTraits<EqualOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_EqualOptions;
+};
+
+template <> struct BuiltinOptionsTraits<NotEqualOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_NotEqualOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ShapeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ShapeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<PowOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_PowOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ArgMinOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ArgMinOptions;
+};
+
+template <> struct BuiltinOptionsTraits<FakeQuantOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_FakeQuantOptions;
+};
+
+template <> struct BuiltinOptionsTraits<PackOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_PackOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LogicalOrOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LogicalOrOptions;
+};
+
+template <> struct BuiltinOptionsTraits<OneHotOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_OneHotOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LogicalAndOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LogicalAndOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LogicalNotOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LogicalNotOptions;
+};
+
+template <> struct BuiltinOptionsTraits<UnpackOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_UnpackOptions;
+};
+
+template <> struct BuiltinOptionsTraits<FloorDivOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_FloorDivOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SquareOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SquareOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ZerosLikeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ZerosLikeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<FillOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_FillOptions;
+};
+
+template <> struct BuiltinOptionsTraits<BidirectionalSequenceLSTMOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_BidirectionalSequenceLSTMOptions;
+};
+
+template <> struct BuiltinOptionsTraits<BidirectionalSequenceRNNOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_BidirectionalSequenceRNNOptions;
+};
+
+template <> struct BuiltinOptionsTraits<UnidirectionalSequenceLSTMOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_UnidirectionalSequenceLSTMOptions;
+};
+
+template <> struct BuiltinOptionsTraits<FloorModOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_FloorModOptions;
+};
+
+template <> struct BuiltinOptionsTraits<RangeOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_RangeOptions;
+};
+
+template <> struct BuiltinOptionsTraits<ResizeNearestNeighborOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_ResizeNearestNeighborOptions;
+};
+
+template <> struct BuiltinOptionsTraits<LeakyReluOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_LeakyReluOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SquaredDifferenceOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SquaredDifferenceOptions;
+};
+
+template <> struct BuiltinOptionsTraits<MirrorPadOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_MirrorPadOptions;
+};
+
+template <> struct BuiltinOptionsTraits<AbsOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_AbsOptions;
+};
+
+template <> struct BuiltinOptionsTraits<SplitVOptions>
+{
+ static const BuiltinOptions enum_value = BuiltinOptions_SplitVOptions;
+};
+
+bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type);
+bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier,
+ const flatbuffers::Vector<flatbuffers::Offset<void>> *values,
+ const flatbuffers::Vector<uint8_t> *types);
+
+enum Padding
+{
+ Padding_SAME = 0,
+ Padding_VALID = 1,
+ Padding_MIN = Padding_SAME,
+ Padding_MAX = Padding_VALID
+};
+
+inline const Padding (&EnumValuesPadding())[2]
+{
+ static const Padding values[] = {Padding_SAME, Padding_VALID};
+ return values;
+}
+
+inline const char *const *EnumNamesPadding()
+{
+ static const char *const names[] = {"SAME", "VALID", nullptr};
+ return names;
+}
+
+inline const char *EnumNamePadding(Padding e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesPadding()[index];
+}
+
+enum ActivationFunctionType
+{
+ ActivationFunctionType_NONE = 0,
+ ActivationFunctionType_RELU = 1,
+ ActivationFunctionType_RELU_N1_TO_1 = 2,
+ ActivationFunctionType_RELU6 = 3,
+ ActivationFunctionType_TANH = 4,
+ ActivationFunctionType_SIGN_BIT = 5,
+ ActivationFunctionType_MIN = ActivationFunctionType_NONE,
+ ActivationFunctionType_MAX = ActivationFunctionType_SIGN_BIT
+};
+
+inline const ActivationFunctionType (&EnumValuesActivationFunctionType())[6]
+{
+ static const ActivationFunctionType values[] = {
+ ActivationFunctionType_NONE, ActivationFunctionType_RELU,
+ ActivationFunctionType_RELU_N1_TO_1, ActivationFunctionType_RELU6,
+ ActivationFunctionType_TANH, ActivationFunctionType_SIGN_BIT};
+ return values;
+}
+
+inline const char *const *EnumNamesActivationFunctionType()
+{
+ static const char *const names[] = {"NONE", "RELU", "RELU_N1_TO_1", "RELU6",
+ "TANH", "SIGN_BIT", nullptr};
+ return names;
+}
+
+inline const char *EnumNameActivationFunctionType(ActivationFunctionType e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesActivationFunctionType()[index];
+}
+
+enum LSHProjectionType
+{
+ LSHProjectionType_UNKNOWN = 0,
+ LSHProjectionType_SPARSE = 1,
+ LSHProjectionType_DENSE = 2,
+ LSHProjectionType_MIN = LSHProjectionType_UNKNOWN,
+ LSHProjectionType_MAX = LSHProjectionType_DENSE
+};
+
+inline const LSHProjectionType (&EnumValuesLSHProjectionType())[3]
+{
+ static const LSHProjectionType values[] = {LSHProjectionType_UNKNOWN, LSHProjectionType_SPARSE,
+ LSHProjectionType_DENSE};
+ return values;
+}
+
+inline const char *const *EnumNamesLSHProjectionType()
+{
+ static const char *const names[] = {"UNKNOWN", "SPARSE", "DENSE", nullptr};
+ return names;
+}
+
+inline const char *EnumNameLSHProjectionType(LSHProjectionType e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesLSHProjectionType()[index];
+}
+
+enum FullyConnectedOptionsWeightsFormat
+{
+ FullyConnectedOptionsWeightsFormat_DEFAULT = 0,
+ FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8 = 1,
+ FullyConnectedOptionsWeightsFormat_MIN = FullyConnectedOptionsWeightsFormat_DEFAULT,
+ FullyConnectedOptionsWeightsFormat_MAX = FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8
+};
+
+inline const FullyConnectedOptionsWeightsFormat (&EnumValuesFullyConnectedOptionsWeightsFormat())[2]
+{
+ static const FullyConnectedOptionsWeightsFormat values[] = {
+ FullyConnectedOptionsWeightsFormat_DEFAULT,
+ FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8};
+ return values;
+}
+
+inline const char *const *EnumNamesFullyConnectedOptionsWeightsFormat()
+{
+ static const char *const names[] = {"DEFAULT", "SHUFFLED4x16INT8", nullptr};
+ return names;
+}
+
+inline const char *EnumNameFullyConnectedOptionsWeightsFormat(FullyConnectedOptionsWeightsFormat e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesFullyConnectedOptionsWeightsFormat()[index];
+}
+
+enum LSTMKernelType
+{
+ LSTMKernelType_FULL = 0,
+ LSTMKernelType_BASIC = 1,
+ LSTMKernelType_MIN = LSTMKernelType_FULL,
+ LSTMKernelType_MAX = LSTMKernelType_BASIC
+};
+
+inline const LSTMKernelType (&EnumValuesLSTMKernelType())[2]
+{
+ static const LSTMKernelType values[] = {LSTMKernelType_FULL, LSTMKernelType_BASIC};
+ return values;
+}
+
+inline const char *const *EnumNamesLSTMKernelType()
+{
+ static const char *const names[] = {"FULL", "BASIC", nullptr};
+ return names;
+}
+
+inline const char *EnumNameLSTMKernelType(LSTMKernelType e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesLSTMKernelType()[index];
+}
+
+enum CombinerType
+{
+ CombinerType_SUM = 0,
+ CombinerType_MEAN = 1,
+ CombinerType_SQRTN = 2,
+ CombinerType_MIN = CombinerType_SUM,
+ CombinerType_MAX = CombinerType_SQRTN
+};
+
+inline const CombinerType (&EnumValuesCombinerType())[3]
+{
+ static const CombinerType values[] = {CombinerType_SUM, CombinerType_MEAN, CombinerType_SQRTN};
+ return values;
+}
+
+inline const char *const *EnumNamesCombinerType()
+{
+ static const char *const names[] = {"SUM", "MEAN", "SQRTN", nullptr};
+ return names;
+}
+
+inline const char *EnumNameCombinerType(CombinerType e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesCombinerType()[index];
+}
+
+enum MirrorPadMode
+{
+ MirrorPadMode_REFLECT = 0,
+ MirrorPadMode_SYMMETRIC = 1,
+ MirrorPadMode_MIN = MirrorPadMode_REFLECT,
+ MirrorPadMode_MAX = MirrorPadMode_SYMMETRIC
+};
+
+inline const MirrorPadMode (&EnumValuesMirrorPadMode())[2]
+{
+ static const MirrorPadMode values[] = {MirrorPadMode_REFLECT, MirrorPadMode_SYMMETRIC};
+ return values;
+}
+
+inline const char *const *EnumNamesMirrorPadMode()
+{
+ static const char *const names[] = {"REFLECT", "SYMMETRIC", nullptr};
+ return names;
+}
+
+inline const char *EnumNameMirrorPadMode(MirrorPadMode e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesMirrorPadMode()[index];
+}
+
+enum CustomOptionsFormat
+{
+ CustomOptionsFormat_FLEXBUFFERS = 0,
+ CustomOptionsFormat_MIN = CustomOptionsFormat_FLEXBUFFERS,
+ CustomOptionsFormat_MAX = CustomOptionsFormat_FLEXBUFFERS
+};
+
+inline const CustomOptionsFormat (&EnumValuesCustomOptionsFormat())[1]
+{
+ static const CustomOptionsFormat values[] = {CustomOptionsFormat_FLEXBUFFERS};
+ return values;
+}
+
+inline const char *const *EnumNamesCustomOptionsFormat()
+{
+ static const char *const names[] = {"FLEXBUFFERS", nullptr};
+ return names;
+}
+
+inline const char *EnumNameCustomOptionsFormat(CustomOptionsFormat e)
+{
+ const size_t index = static_cast<int>(e);
+ return EnumNamesCustomOptionsFormat()[index];
+}
+
+struct CustomQuantization FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_CUSTOM = 4
+ };
+ const flatbuffers::Vector<uint8_t> *custom() const
+ {
+ return GetPointer<const flatbuffers::Vector<uint8_t> *>(VT_CUSTOM);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_CUSTOM) &&
+ verifier.VerifyVector(custom()) && verifier.EndTable();
+ }
+};
+
+struct CustomQuantizationBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_custom(flatbuffers::Offset<flatbuffers::Vector<uint8_t>> custom)
+ {
+ fbb_.AddOffset(CustomQuantization::VT_CUSTOM, custom);
+ }
+ explicit CustomQuantizationBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ CustomQuantizationBuilder &operator=(const CustomQuantizationBuilder &);
+ flatbuffers::Offset<CustomQuantization> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<CustomQuantization>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<CustomQuantization>
+CreateCustomQuantization(flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<uint8_t>> custom = 0)
+{
+ CustomQuantizationBuilder builder_(_fbb);
+ builder_.add_custom(custom);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<CustomQuantization>
+CreateCustomQuantizationDirect(flatbuffers::FlatBufferBuilder &_fbb,
+ const std::vector<uint8_t> *custom = nullptr)
+{
+ return tflite::CreateCustomQuantization(_fbb, custom ? _fbb.CreateVector<uint8_t>(*custom) : 0);
+}
+
+struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_MIN = 4,
+ VT_MAX = 6,
+ VT_SCALE = 8,
+ VT_ZERO_POINT = 10,
+ VT_DETAILS_TYPE = 12,
+ VT_DETAILS = 14
+ };
+ const flatbuffers::Vector<float> *min() const
+ {
+ return GetPointer<const flatbuffers::Vector<float> *>(VT_MIN);
+ }
+ const flatbuffers::Vector<float> *max() const
+ {
+ return GetPointer<const flatbuffers::Vector<float> *>(VT_MAX);
+ }
+ const flatbuffers::Vector<float> *scale() const
+ {
+ return GetPointer<const flatbuffers::Vector<float> *>(VT_SCALE);
+ }
+ const flatbuffers::Vector<int64_t> *zero_point() const
+ {
+ return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_ZERO_POINT);
+ }
+ QuantizationDetails details_type() const
+ {
+ return static_cast<QuantizationDetails>(GetField<uint8_t>(VT_DETAILS_TYPE, 0));
+ }
+ const void *details() const { return GetPointer<const void *>(VT_DETAILS); }
+ template <typename T> const T *details_as() const;
+ const CustomQuantization *details_as_CustomQuantization() const
+ {
+ return details_type() == QuantizationDetails_CustomQuantization
+ ? static_cast<const CustomQuantization *>(details())
+ : nullptr;
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_MIN) &&
+ verifier.VerifyVector(min()) && VerifyOffset(verifier, VT_MAX) &&
+ verifier.VerifyVector(max()) && VerifyOffset(verifier, VT_SCALE) &&
+ verifier.VerifyVector(scale()) && VerifyOffset(verifier, VT_ZERO_POINT) &&
+ verifier.VerifyVector(zero_point()) && VerifyField<uint8_t>(verifier, VT_DETAILS_TYPE) &&
+ VerifyOffset(verifier, VT_DETAILS) &&
+ VerifyQuantizationDetails(verifier, details(), details_type()) && verifier.EndTable();
+ }
+};
+
+template <>
+inline const CustomQuantization *QuantizationParameters::details_as<CustomQuantization>() const
+{
+ return details_as_CustomQuantization();
+}
+
+struct QuantizationParametersBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_min(flatbuffers::Offset<flatbuffers::Vector<float>> min)
+ {
+ fbb_.AddOffset(QuantizationParameters::VT_MIN, min);
+ }
+ void add_max(flatbuffers::Offset<flatbuffers::Vector<float>> max)
+ {
+ fbb_.AddOffset(QuantizationParameters::VT_MAX, max);
+ }
+ void add_scale(flatbuffers::Offset<flatbuffers::Vector<float>> scale)
+ {
+ fbb_.AddOffset(QuantizationParameters::VT_SCALE, scale);
+ }
+ void add_zero_point(flatbuffers::Offset<flatbuffers::Vector<int64_t>> zero_point)
+ {
+ fbb_.AddOffset(QuantizationParameters::VT_ZERO_POINT, zero_point);
+ }
+ void add_details_type(QuantizationDetails details_type)
+ {
+ fbb_.AddElement<uint8_t>(QuantizationParameters::VT_DETAILS_TYPE,
+ static_cast<uint8_t>(details_type), 0);
+ }
+ void add_details(flatbuffers::Offset<void> details)
+ {
+ fbb_.AddOffset(QuantizationParameters::VT_DETAILS, details);
+ }
+ explicit QuantizationParametersBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ QuantizationParametersBuilder &operator=(const QuantizationParametersBuilder &);
+ flatbuffers::Offset<QuantizationParameters> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<QuantizationParameters>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<QuantizationParameters>
+CreateQuantizationParameters(flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<float>> min = 0,
+ flatbuffers::Offset<flatbuffers::Vector<float>> max = 0,
+ flatbuffers::Offset<flatbuffers::Vector<float>> scale = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int64_t>> zero_point = 0,
+ QuantizationDetails details_type = QuantizationDetails_NONE,
+ flatbuffers::Offset<void> details = 0)
+{
+ QuantizationParametersBuilder builder_(_fbb);
+ builder_.add_details(details);
+ builder_.add_zero_point(zero_point);
+ builder_.add_scale(scale);
+ builder_.add_max(max);
+ builder_.add_min(min);
+ builder_.add_details_type(details_type);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<QuantizationParameters> CreateQuantizationParametersDirect(
+ flatbuffers::FlatBufferBuilder &_fbb, const std::vector<float> *min = nullptr,
+ const std::vector<float> *max = nullptr, const std::vector<float> *scale = nullptr,
+ const std::vector<int64_t> *zero_point = nullptr,
+ QuantizationDetails details_type = QuantizationDetails_NONE,
+ flatbuffers::Offset<void> details = 0)
+{
+ return tflite::CreateQuantizationParameters(
+ _fbb, min ? _fbb.CreateVector<float>(*min) : 0, max ? _fbb.CreateVector<float>(*max) : 0,
+ scale ? _fbb.CreateVector<float>(*scale) : 0,
+ zero_point ? _fbb.CreateVector<int64_t>(*zero_point) : 0, details_type, details);
+}
+
+struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_SHAPE = 4,
+ VT_TYPE = 6,
+ VT_BUFFER = 8,
+ VT_NAME = 10,
+ VT_QUANTIZATION = 12,
+ VT_IS_VARIABLE = 14
+ };
+ const flatbuffers::Vector<int32_t> *shape() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_SHAPE);
+ }
+ TensorType type() const { return static_cast<TensorType>(GetField<int8_t>(VT_TYPE, 0)); }
+ uint32_t buffer() const { return GetField<uint32_t>(VT_BUFFER, 0); }
+ const flatbuffers::String *name() const
+ {
+ return GetPointer<const flatbuffers::String *>(VT_NAME);
+ }
+ const QuantizationParameters *quantization() const
+ {
+ return GetPointer<const QuantizationParameters *>(VT_QUANTIZATION);
+ }
+ bool is_variable() const { return GetField<uint8_t>(VT_IS_VARIABLE, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_SHAPE) &&
+ verifier.VerifyVector(shape()) && VerifyField<int8_t>(verifier, VT_TYPE) &&
+ VerifyField<uint32_t>(verifier, VT_BUFFER) && VerifyOffset(verifier, VT_NAME) &&
+ verifier.VerifyString(name()) && VerifyOffset(verifier, VT_QUANTIZATION) &&
+ verifier.VerifyTable(quantization()) && VerifyField<uint8_t>(verifier, VT_IS_VARIABLE) &&
+ verifier.EndTable();
+ }
+};
+
+struct TensorBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_shape(flatbuffers::Offset<flatbuffers::Vector<int32_t>> shape)
+ {
+ fbb_.AddOffset(Tensor::VT_SHAPE, shape);
+ }
+ void add_type(TensorType type)
+ {
+ fbb_.AddElement<int8_t>(Tensor::VT_TYPE, static_cast<int8_t>(type), 0);
+ }
+ void add_buffer(uint32_t buffer) { fbb_.AddElement<uint32_t>(Tensor::VT_BUFFER, buffer, 0); }
+ void add_name(flatbuffers::Offset<flatbuffers::String> name)
+ {
+ fbb_.AddOffset(Tensor::VT_NAME, name);
+ }
+ void add_quantization(flatbuffers::Offset<QuantizationParameters> quantization)
+ {
+ fbb_.AddOffset(Tensor::VT_QUANTIZATION, quantization);
+ }
+ void add_is_variable(bool is_variable)
+ {
+ fbb_.AddElement<uint8_t>(Tensor::VT_IS_VARIABLE, static_cast<uint8_t>(is_variable), 0);
+ }
+ explicit TensorBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ TensorBuilder &operator=(const TensorBuilder &);
+ flatbuffers::Offset<Tensor> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<Tensor>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<Tensor>
+CreateTensor(flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> shape = 0,
+ TensorType type = TensorType_FLOAT32, uint32_t buffer = 0,
+ flatbuffers::Offset<flatbuffers::String> name = 0,
+ flatbuffers::Offset<QuantizationParameters> quantization = 0, bool is_variable = false)
+{
+ TensorBuilder builder_(_fbb);
+ builder_.add_quantization(quantization);
+ builder_.add_name(name);
+ builder_.add_buffer(buffer);
+ builder_.add_shape(shape);
+ builder_.add_is_variable(is_variable);
+ builder_.add_type(type);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<Tensor> CreateTensorDirect(
+ flatbuffers::FlatBufferBuilder &_fbb, const std::vector<int32_t> *shape = nullptr,
+ TensorType type = TensorType_FLOAT32, uint32_t buffer = 0, const char *name = nullptr,
+ flatbuffers::Offset<QuantizationParameters> quantization = 0, bool is_variable = false)
+{
+ return tflite::CreateTensor(_fbb, shape ? _fbb.CreateVector<int32_t>(*shape) : 0, type, buffer,
+ name ? _fbb.CreateString(name) : 0, quantization, is_variable);
+}
+
+struct Conv2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_PADDING = 4,
+ VT_STRIDE_W = 6,
+ VT_STRIDE_H = 8,
+ VT_FUSED_ACTIVATION_FUNCTION = 10,
+ VT_DILATION_W_FACTOR = 12,
+ VT_DILATION_H_FACTOR = 14
+ };
+ Padding padding() const { return static_cast<Padding>(GetField<int8_t>(VT_PADDING, 0)); }
+ int32_t stride_w() const { return GetField<int32_t>(VT_STRIDE_W, 0); }
+ int32_t stride_h() const { return GetField<int32_t>(VT_STRIDE_H, 0); }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ int32_t dilation_w_factor() const { return GetField<int32_t>(VT_DILATION_W_FACTOR, 1); }
+ int32_t dilation_h_factor() const { return GetField<int32_t>(VT_DILATION_H_FACTOR, 1); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_PADDING) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_W) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_H) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<int32_t>(verifier, VT_DILATION_W_FACTOR) &&
+ VerifyField<int32_t>(verifier, VT_DILATION_H_FACTOR) && verifier.EndTable();
+ }
+};
+
+struct Conv2DOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_padding(Padding padding)
+ {
+ fbb_.AddElement<int8_t>(Conv2DOptions::VT_PADDING, static_cast<int8_t>(padding), 0);
+ }
+ void add_stride_w(int32_t stride_w)
+ {
+ fbb_.AddElement<int32_t>(Conv2DOptions::VT_STRIDE_W, stride_w, 0);
+ }
+ void add_stride_h(int32_t stride_h)
+ {
+ fbb_.AddElement<int32_t>(Conv2DOptions::VT_STRIDE_H, stride_h, 0);
+ }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(Conv2DOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_dilation_w_factor(int32_t dilation_w_factor)
+ {
+ fbb_.AddElement<int32_t>(Conv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1);
+ }
+ void add_dilation_h_factor(int32_t dilation_h_factor)
+ {
+ fbb_.AddElement<int32_t>(Conv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1);
+ }
+ explicit Conv2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ Conv2DOptionsBuilder &operator=(const Conv2DOptionsBuilder &);
+ flatbuffers::Offset<Conv2DOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<Conv2DOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<Conv2DOptions>
+CreateConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, Padding padding = Padding_SAME,
+ int32_t stride_w = 0, int32_t stride_h = 0,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ int32_t dilation_w_factor = 1, int32_t dilation_h_factor = 1)
+{
+ Conv2DOptionsBuilder builder_(_fbb);
+ builder_.add_dilation_h_factor(dilation_h_factor);
+ builder_.add_dilation_w_factor(dilation_w_factor);
+ builder_.add_stride_h(stride_h);
+ builder_.add_stride_w(stride_w);
+ builder_.add_fused_activation_function(fused_activation_function);
+ builder_.add_padding(padding);
+ return builder_.Finish();
+}
+
+struct Pool2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_PADDING = 4,
+ VT_STRIDE_W = 6,
+ VT_STRIDE_H = 8,
+ VT_FILTER_WIDTH = 10,
+ VT_FILTER_HEIGHT = 12,
+ VT_FUSED_ACTIVATION_FUNCTION = 14
+ };
+ Padding padding() const { return static_cast<Padding>(GetField<int8_t>(VT_PADDING, 0)); }
+ int32_t stride_w() const { return GetField<int32_t>(VT_STRIDE_W, 0); }
+ int32_t stride_h() const { return GetField<int32_t>(VT_STRIDE_H, 0); }
+ int32_t filter_width() const { return GetField<int32_t>(VT_FILTER_WIDTH, 0); }
+ int32_t filter_height() const { return GetField<int32_t>(VT_FILTER_HEIGHT, 0); }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_PADDING) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_W) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_H) &&
+ VerifyField<int32_t>(verifier, VT_FILTER_WIDTH) &&
+ VerifyField<int32_t>(verifier, VT_FILTER_HEIGHT) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct Pool2DOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_padding(Padding padding)
+ {
+ fbb_.AddElement<int8_t>(Pool2DOptions::VT_PADDING, static_cast<int8_t>(padding), 0);
+ }
+ void add_stride_w(int32_t stride_w)
+ {
+ fbb_.AddElement<int32_t>(Pool2DOptions::VT_STRIDE_W, stride_w, 0);
+ }
+ void add_stride_h(int32_t stride_h)
+ {
+ fbb_.AddElement<int32_t>(Pool2DOptions::VT_STRIDE_H, stride_h, 0);
+ }
+ void add_filter_width(int32_t filter_width)
+ {
+ fbb_.AddElement<int32_t>(Pool2DOptions::VT_FILTER_WIDTH, filter_width, 0);
+ }
+ void add_filter_height(int32_t filter_height)
+ {
+ fbb_.AddElement<int32_t>(Pool2DOptions::VT_FILTER_HEIGHT, filter_height, 0);
+ }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(Pool2DOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit Pool2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ Pool2DOptionsBuilder &operator=(const Pool2DOptionsBuilder &);
+ flatbuffers::Offset<Pool2DOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<Pool2DOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<Pool2DOptions>
+CreatePool2DOptions(flatbuffers::FlatBufferBuilder &_fbb, Padding padding = Padding_SAME,
+ int32_t stride_w = 0, int32_t stride_h = 0, int32_t filter_width = 0,
+ int32_t filter_height = 0,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ Pool2DOptionsBuilder builder_(_fbb);
+ builder_.add_filter_height(filter_height);
+ builder_.add_filter_width(filter_width);
+ builder_.add_stride_h(stride_h);
+ builder_.add_stride_w(stride_w);
+ builder_.add_fused_activation_function(fused_activation_function);
+ builder_.add_padding(padding);
+ return builder_.Finish();
+}
+
+struct DepthwiseConv2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_PADDING = 4,
+ VT_STRIDE_W = 6,
+ VT_STRIDE_H = 8,
+ VT_DEPTH_MULTIPLIER = 10,
+ VT_FUSED_ACTIVATION_FUNCTION = 12,
+ VT_DILATION_W_FACTOR = 14,
+ VT_DILATION_H_FACTOR = 16
+ };
+ Padding padding() const { return static_cast<Padding>(GetField<int8_t>(VT_PADDING, 0)); }
+ int32_t stride_w() const { return GetField<int32_t>(VT_STRIDE_W, 0); }
+ int32_t stride_h() const { return GetField<int32_t>(VT_STRIDE_H, 0); }
+ int32_t depth_multiplier() const { return GetField<int32_t>(VT_DEPTH_MULTIPLIER, 0); }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ int32_t dilation_w_factor() const { return GetField<int32_t>(VT_DILATION_W_FACTOR, 1); }
+ int32_t dilation_h_factor() const { return GetField<int32_t>(VT_DILATION_H_FACTOR, 1); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_PADDING) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_W) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_H) &&
+ VerifyField<int32_t>(verifier, VT_DEPTH_MULTIPLIER) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<int32_t>(verifier, VT_DILATION_W_FACTOR) &&
+ VerifyField<int32_t>(verifier, VT_DILATION_H_FACTOR) && verifier.EndTable();
+ }
+};
+
+struct DepthwiseConv2DOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_padding(Padding padding)
+ {
+ fbb_.AddElement<int8_t>(DepthwiseConv2DOptions::VT_PADDING, static_cast<int8_t>(padding), 0);
+ }
+ void add_stride_w(int32_t stride_w)
+ {
+ fbb_.AddElement<int32_t>(DepthwiseConv2DOptions::VT_STRIDE_W, stride_w, 0);
+ }
+ void add_stride_h(int32_t stride_h)
+ {
+ fbb_.AddElement<int32_t>(DepthwiseConv2DOptions::VT_STRIDE_H, stride_h, 0);
+ }
+ void add_depth_multiplier(int32_t depth_multiplier)
+ {
+ fbb_.AddElement<int32_t>(DepthwiseConv2DOptions::VT_DEPTH_MULTIPLIER, depth_multiplier, 0);
+ }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(DepthwiseConv2DOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_dilation_w_factor(int32_t dilation_w_factor)
+ {
+ fbb_.AddElement<int32_t>(DepthwiseConv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1);
+ }
+ void add_dilation_h_factor(int32_t dilation_h_factor)
+ {
+ fbb_.AddElement<int32_t>(DepthwiseConv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1);
+ }
+ explicit DepthwiseConv2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ DepthwiseConv2DOptionsBuilder &operator=(const DepthwiseConv2DOptionsBuilder &);
+ flatbuffers::Offset<DepthwiseConv2DOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<DepthwiseConv2DOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<DepthwiseConv2DOptions> CreateDepthwiseConv2DOptions(
+ flatbuffers::FlatBufferBuilder &_fbb, Padding padding = Padding_SAME, int32_t stride_w = 0,
+ int32_t stride_h = 0, int32_t depth_multiplier = 0,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ int32_t dilation_w_factor = 1, int32_t dilation_h_factor = 1)
+{
+ DepthwiseConv2DOptionsBuilder builder_(_fbb);
+ builder_.add_dilation_h_factor(dilation_h_factor);
+ builder_.add_dilation_w_factor(dilation_w_factor);
+ builder_.add_depth_multiplier(depth_multiplier);
+ builder_.add_stride_h(stride_h);
+ builder_.add_stride_w(stride_w);
+ builder_.add_fused_activation_function(fused_activation_function);
+ builder_.add_padding(padding);
+ return builder_.Finish();
+}
+
+struct ConcatEmbeddingsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_NUM_CHANNELS = 4,
+ VT_NUM_COLUMNS_PER_CHANNEL = 6,
+ VT_EMBEDDING_DIM_PER_CHANNEL = 8
+ };
+ int32_t num_channels() const { return GetField<int32_t>(VT_NUM_CHANNELS, 0); }
+ const flatbuffers::Vector<int32_t> *num_columns_per_channel() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_NUM_COLUMNS_PER_CHANNEL);
+ }
+ const flatbuffers::Vector<int32_t> *embedding_dim_per_channel() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_EMBEDDING_DIM_PER_CHANNEL);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_NUM_CHANNELS) &&
+ VerifyOffset(verifier, VT_NUM_COLUMNS_PER_CHANNEL) &&
+ verifier.VerifyVector(num_columns_per_channel()) &&
+ VerifyOffset(verifier, VT_EMBEDDING_DIM_PER_CHANNEL) &&
+ verifier.VerifyVector(embedding_dim_per_channel()) && verifier.EndTable();
+ }
+};
+
+struct ConcatEmbeddingsOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_num_channels(int32_t num_channels)
+ {
+ fbb_.AddElement<int32_t>(ConcatEmbeddingsOptions::VT_NUM_CHANNELS, num_channels, 0);
+ }
+ void add_num_columns_per_channel(
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> num_columns_per_channel)
+ {
+ fbb_.AddOffset(ConcatEmbeddingsOptions::VT_NUM_COLUMNS_PER_CHANNEL, num_columns_per_channel);
+ }
+ void add_embedding_dim_per_channel(
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> embedding_dim_per_channel)
+ {
+ fbb_.AddOffset(ConcatEmbeddingsOptions::VT_EMBEDDING_DIM_PER_CHANNEL,
+ embedding_dim_per_channel);
+ }
+ explicit ConcatEmbeddingsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ConcatEmbeddingsOptionsBuilder &operator=(const ConcatEmbeddingsOptionsBuilder &);
+ flatbuffers::Offset<ConcatEmbeddingsOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ConcatEmbeddingsOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ConcatEmbeddingsOptions> CreateConcatEmbeddingsOptions(
+ flatbuffers::FlatBufferBuilder &_fbb, int32_t num_channels = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> num_columns_per_channel = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> embedding_dim_per_channel = 0)
+{
+ ConcatEmbeddingsOptionsBuilder builder_(_fbb);
+ builder_.add_embedding_dim_per_channel(embedding_dim_per_channel);
+ builder_.add_num_columns_per_channel(num_columns_per_channel);
+ builder_.add_num_channels(num_channels);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<ConcatEmbeddingsOptions>
+CreateConcatEmbeddingsOptionsDirect(flatbuffers::FlatBufferBuilder &_fbb, int32_t num_channels = 0,
+ const std::vector<int32_t> *num_columns_per_channel = nullptr,
+ const std::vector<int32_t> *embedding_dim_per_channel = nullptr)
+{
+ return tflite::CreateConcatEmbeddingsOptions(
+ _fbb, num_channels,
+ num_columns_per_channel ? _fbb.CreateVector<int32_t>(*num_columns_per_channel) : 0,
+ embedding_dim_per_channel ? _fbb.CreateVector<int32_t>(*embedding_dim_per_channel) : 0);
+}
+
+struct LSHProjectionOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_TYPE = 4
+ };
+ LSHProjectionType type() const
+ {
+ return static_cast<LSHProjectionType>(GetField<int8_t>(VT_TYPE, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_TYPE) &&
+ verifier.EndTable();
+ }
+};
+
+struct LSHProjectionOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_type(LSHProjectionType type)
+ {
+ fbb_.AddElement<int8_t>(LSHProjectionOptions::VT_TYPE, static_cast<int8_t>(type), 0);
+ }
+ explicit LSHProjectionOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LSHProjectionOptionsBuilder &operator=(const LSHProjectionOptionsBuilder &);
+ flatbuffers::Offset<LSHProjectionOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LSHProjectionOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LSHProjectionOptions>
+CreateLSHProjectionOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ LSHProjectionType type = LSHProjectionType_UNKNOWN)
+{
+ LSHProjectionOptionsBuilder builder_(_fbb);
+ builder_.add_type(type);
+ return builder_.Finish();
+}
+
+struct SVDFOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_RANK = 4,
+ VT_FUSED_ACTIVATION_FUNCTION = 6
+ };
+ int32_t rank() const { return GetField<int32_t>(VT_RANK, 0); }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_RANK) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct SVDFOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_rank(int32_t rank) { fbb_.AddElement<int32_t>(SVDFOptions::VT_RANK, rank, 0); }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(SVDFOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit SVDFOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SVDFOptionsBuilder &operator=(const SVDFOptionsBuilder &);
+ flatbuffers::Offset<SVDFOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SVDFOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SVDFOptions>
+CreateSVDFOptions(flatbuffers::FlatBufferBuilder &_fbb, int32_t rank = 0,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ SVDFOptionsBuilder builder_(_fbb);
+ builder_.add_rank(rank);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct RNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct RNNOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(RNNOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit RNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ RNNOptionsBuilder &operator=(const RNNOptionsBuilder &);
+ flatbuffers::Offset<RNNOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<RNNOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<RNNOptions>
+CreateRNNOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ RNNOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct SequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_TIME_MAJOR = 4,
+ VT_FUSED_ACTIVATION_FUNCTION = 6
+ };
+ bool time_major() const { return GetField<uint8_t>(VT_TIME_MAJOR, 0) != 0; }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_TIME_MAJOR) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct SequenceRNNOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_time_major(bool time_major)
+ {
+ fbb_.AddElement<uint8_t>(SequenceRNNOptions::VT_TIME_MAJOR, static_cast<uint8_t>(time_major),
+ 0);
+ }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(SequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit SequenceRNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SequenceRNNOptionsBuilder &operator=(const SequenceRNNOptionsBuilder &);
+ flatbuffers::Offset<SequenceRNNOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SequenceRNNOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SequenceRNNOptions> CreateSequenceRNNOptions(
+ flatbuffers::FlatBufferBuilder &_fbb, bool time_major = false,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ SequenceRNNOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ builder_.add_time_major(time_major);
+ return builder_.Finish();
+}
+
+struct BidirectionalSequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_TIME_MAJOR = 4,
+ VT_FUSED_ACTIVATION_FUNCTION = 6,
+ VT_MERGE_OUTPUTS = 8
+ };
+ bool time_major() const { return GetField<uint8_t>(VT_TIME_MAJOR, 0) != 0; }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool merge_outputs() const { return GetField<uint8_t>(VT_MERGE_OUTPUTS, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_TIME_MAJOR) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<uint8_t>(verifier, VT_MERGE_OUTPUTS) && verifier.EndTable();
+ }
+};
+
+struct BidirectionalSequenceRNNOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_time_major(bool time_major)
+ {
+ fbb_.AddElement<uint8_t>(BidirectionalSequenceRNNOptions::VT_TIME_MAJOR,
+ static_cast<uint8_t>(time_major), 0);
+ }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(BidirectionalSequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_merge_outputs(bool merge_outputs)
+ {
+ fbb_.AddElement<uint8_t>(BidirectionalSequenceRNNOptions::VT_MERGE_OUTPUTS,
+ static_cast<uint8_t>(merge_outputs), 0);
+ }
+ explicit BidirectionalSequenceRNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ BidirectionalSequenceRNNOptionsBuilder &operator=(const BidirectionalSequenceRNNOptionsBuilder &);
+ flatbuffers::Offset<BidirectionalSequenceRNNOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<BidirectionalSequenceRNNOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<BidirectionalSequenceRNNOptions> CreateBidirectionalSequenceRNNOptions(
+ flatbuffers::FlatBufferBuilder &_fbb, bool time_major = false,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ bool merge_outputs = false)
+{
+ BidirectionalSequenceRNNOptionsBuilder builder_(_fbb);
+ builder_.add_merge_outputs(merge_outputs);
+ builder_.add_fused_activation_function(fused_activation_function);
+ builder_.add_time_major(time_major);
+ return builder_.Finish();
+}
+
+struct FullyConnectedOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4,
+ VT_WEIGHTS_FORMAT = 6
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ FullyConnectedOptionsWeightsFormat weights_format() const
+ {
+ return static_cast<FullyConnectedOptionsWeightsFormat>(GetField<int8_t>(VT_WEIGHTS_FORMAT, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<int8_t>(verifier, VT_WEIGHTS_FORMAT) && verifier.EndTable();
+ }
+};
+
+struct FullyConnectedOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(FullyConnectedOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_weights_format(FullyConnectedOptionsWeightsFormat weights_format)
+ {
+ fbb_.AddElement<int8_t>(FullyConnectedOptions::VT_WEIGHTS_FORMAT,
+ static_cast<int8_t>(weights_format), 0);
+ }
+ explicit FullyConnectedOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ FullyConnectedOptionsBuilder &operator=(const FullyConnectedOptionsBuilder &);
+ flatbuffers::Offset<FullyConnectedOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FullyConnectedOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FullyConnectedOptions> CreateFullyConnectedOptions(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ FullyConnectedOptionsWeightsFormat weights_format = FullyConnectedOptionsWeightsFormat_DEFAULT)
+{
+ FullyConnectedOptionsBuilder builder_(_fbb);
+ builder_.add_weights_format(weights_format);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct SoftmaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_BETA = 4
+ };
+ float beta() const { return GetField<float>(VT_BETA, 0.0f); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<float>(verifier, VT_BETA) &&
+ verifier.EndTable();
+ }
+};
+
+struct SoftmaxOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_beta(float beta) { fbb_.AddElement<float>(SoftmaxOptions::VT_BETA, beta, 0.0f); }
+ explicit SoftmaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SoftmaxOptionsBuilder &operator=(const SoftmaxOptionsBuilder &);
+ flatbuffers::Offset<SoftmaxOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SoftmaxOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SoftmaxOptions>
+CreateSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, float beta = 0.0f)
+{
+ SoftmaxOptionsBuilder builder_(_fbb);
+ builder_.add_beta(beta);
+ return builder_.Finish();
+}
+
+struct ConcatenationOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_AXIS = 4,
+ VT_FUSED_ACTIVATION_FUNCTION = 6
+ };
+ int32_t axis() const { return GetField<int32_t>(VT_AXIS, 0); }
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_AXIS) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct ConcatenationOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_axis(int32_t axis) { fbb_.AddElement<int32_t>(ConcatenationOptions::VT_AXIS, axis, 0); }
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(ConcatenationOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit ConcatenationOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ConcatenationOptionsBuilder &operator=(const ConcatenationOptionsBuilder &);
+ flatbuffers::Offset<ConcatenationOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ConcatenationOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ConcatenationOptions> CreateConcatenationOptions(
+ flatbuffers::FlatBufferBuilder &_fbb, int32_t axis = 0,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ ConcatenationOptionsBuilder builder_(_fbb);
+ builder_.add_axis(axis);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct AddOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct AddOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(AddOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit AddOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ AddOptionsBuilder &operator=(const AddOptionsBuilder &);
+ flatbuffers::Offset<AddOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<AddOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<AddOptions>
+CreateAddOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ AddOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct MulOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct MulOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(MulOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit MulOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ MulOptionsBuilder &operator=(const MulOptionsBuilder &);
+ flatbuffers::Offset<MulOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<MulOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<MulOptions>
+CreateMulOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ MulOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct L2NormOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct L2NormOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(L2NormOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit L2NormOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ L2NormOptionsBuilder &operator=(const L2NormOptionsBuilder &);
+ flatbuffers::Offset<L2NormOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<L2NormOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<L2NormOptions>
+CreateL2NormOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ L2NormOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct LocalResponseNormalizationOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_RADIUS = 4,
+ VT_BIAS = 6,
+ VT_ALPHA = 8,
+ VT_BETA = 10
+ };
+ int32_t radius() const { return GetField<int32_t>(VT_RADIUS, 0); }
+ float bias() const { return GetField<float>(VT_BIAS, 0.0f); }
+ float alpha() const { return GetField<float>(VT_ALPHA, 0.0f); }
+ float beta() const { return GetField<float>(VT_BETA, 0.0f); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_RADIUS) &&
+ VerifyField<float>(verifier, VT_BIAS) && VerifyField<float>(verifier, VT_ALPHA) &&
+ VerifyField<float>(verifier, VT_BETA) && verifier.EndTable();
+ }
+};
+
+struct LocalResponseNormalizationOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_radius(int32_t radius)
+ {
+ fbb_.AddElement<int32_t>(LocalResponseNormalizationOptions::VT_RADIUS, radius, 0);
+ }
+ void add_bias(float bias)
+ {
+ fbb_.AddElement<float>(LocalResponseNormalizationOptions::VT_BIAS, bias, 0.0f);
+ }
+ void add_alpha(float alpha)
+ {
+ fbb_.AddElement<float>(LocalResponseNormalizationOptions::VT_ALPHA, alpha, 0.0f);
+ }
+ void add_beta(float beta)
+ {
+ fbb_.AddElement<float>(LocalResponseNormalizationOptions::VT_BETA, beta, 0.0f);
+ }
+ explicit LocalResponseNormalizationOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
+ : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LocalResponseNormalizationOptionsBuilder &
+ operator=(const LocalResponseNormalizationOptionsBuilder &);
+ flatbuffers::Offset<LocalResponseNormalizationOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LocalResponseNormalizationOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LocalResponseNormalizationOptions>
+CreateLocalResponseNormalizationOptions(flatbuffers::FlatBufferBuilder &_fbb, int32_t radius = 0,
+ float bias = 0.0f, float alpha = 0.0f, float beta = 0.0f)
+{
+ LocalResponseNormalizationOptionsBuilder builder_(_fbb);
+ builder_.add_beta(beta);
+ builder_.add_alpha(alpha);
+ builder_.add_bias(bias);
+ builder_.add_radius(radius);
+ return builder_.Finish();
+}
+
+struct LSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4,
+ VT_CELL_CLIP = 6,
+ VT_PROJ_CLIP = 8,
+ VT_KERNEL_TYPE = 10
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ float cell_clip() const { return GetField<float>(VT_CELL_CLIP, 0.0f); }
+ float proj_clip() const { return GetField<float>(VT_PROJ_CLIP, 0.0f); }
+ LSTMKernelType kernel_type() const
+ {
+ return static_cast<LSTMKernelType>(GetField<int8_t>(VT_KERNEL_TYPE, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<float>(verifier, VT_CELL_CLIP) &&
+ VerifyField<float>(verifier, VT_PROJ_CLIP) &&
+ VerifyField<int8_t>(verifier, VT_KERNEL_TYPE) && verifier.EndTable();
+ }
+};
+
+struct LSTMOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(LSTMOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_cell_clip(float cell_clip)
+ {
+ fbb_.AddElement<float>(LSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f);
+ }
+ void add_proj_clip(float proj_clip)
+ {
+ fbb_.AddElement<float>(LSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f);
+ }
+ void add_kernel_type(LSTMKernelType kernel_type)
+ {
+ fbb_.AddElement<int8_t>(LSTMOptions::VT_KERNEL_TYPE, static_cast<int8_t>(kernel_type), 0);
+ }
+ explicit LSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LSTMOptionsBuilder &operator=(const LSTMOptionsBuilder &);
+ flatbuffers::Offset<LSTMOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LSTMOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LSTMOptions>
+CreateLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ float cell_clip = 0.0f, float proj_clip = 0.0f,
+ LSTMKernelType kernel_type = LSTMKernelType_FULL)
+{
+ LSTMOptionsBuilder builder_(_fbb);
+ builder_.add_proj_clip(proj_clip);
+ builder_.add_cell_clip(cell_clip);
+ builder_.add_kernel_type(kernel_type);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct UnidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4,
+ VT_CELL_CLIP = 6,
+ VT_PROJ_CLIP = 8,
+ VT_TIME_MAJOR = 10
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ float cell_clip() const { return GetField<float>(VT_CELL_CLIP, 0.0f); }
+ float proj_clip() const { return GetField<float>(VT_PROJ_CLIP, 0.0f); }
+ bool time_major() const { return GetField<uint8_t>(VT_TIME_MAJOR, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<float>(verifier, VT_CELL_CLIP) &&
+ VerifyField<float>(verifier, VT_PROJ_CLIP) &&
+ VerifyField<uint8_t>(verifier, VT_TIME_MAJOR) && verifier.EndTable();
+ }
+};
+
+struct UnidirectionalSequenceLSTMOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(UnidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_cell_clip(float cell_clip)
+ {
+ fbb_.AddElement<float>(UnidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f);
+ }
+ void add_proj_clip(float proj_clip)
+ {
+ fbb_.AddElement<float>(UnidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f);
+ }
+ void add_time_major(bool time_major)
+ {
+ fbb_.AddElement<uint8_t>(UnidirectionalSequenceLSTMOptions::VT_TIME_MAJOR,
+ static_cast<uint8_t>(time_major), 0);
+ }
+ explicit UnidirectionalSequenceLSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
+ : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ UnidirectionalSequenceLSTMOptionsBuilder &
+ operator=(const UnidirectionalSequenceLSTMOptionsBuilder &);
+ flatbuffers::Offset<UnidirectionalSequenceLSTMOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<UnidirectionalSequenceLSTMOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<UnidirectionalSequenceLSTMOptions>
+CreateUnidirectionalSequenceLSTMOptions(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ float cell_clip = 0.0f, float proj_clip = 0.0f, bool time_major = false)
+{
+ UnidirectionalSequenceLSTMOptionsBuilder builder_(_fbb);
+ builder_.add_proj_clip(proj_clip);
+ builder_.add_cell_clip(cell_clip);
+ builder_.add_time_major(time_major);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct BidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4,
+ VT_CELL_CLIP = 6,
+ VT_PROJ_CLIP = 8,
+ VT_MERGE_OUTPUTS = 10
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ float cell_clip() const { return GetField<float>(VT_CELL_CLIP, 0.0f); }
+ float proj_clip() const { return GetField<float>(VT_PROJ_CLIP, 0.0f); }
+ bool merge_outputs() const { return GetField<uint8_t>(VT_MERGE_OUTPUTS, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) &&
+ VerifyField<float>(verifier, VT_CELL_CLIP) &&
+ VerifyField<float>(verifier, VT_PROJ_CLIP) &&
+ VerifyField<uint8_t>(verifier, VT_MERGE_OUTPUTS) && verifier.EndTable();
+ }
+};
+
+struct BidirectionalSequenceLSTMOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(BidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ void add_cell_clip(float cell_clip)
+ {
+ fbb_.AddElement<float>(BidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f);
+ }
+ void add_proj_clip(float proj_clip)
+ {
+ fbb_.AddElement<float>(BidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f);
+ }
+ void add_merge_outputs(bool merge_outputs)
+ {
+ fbb_.AddElement<uint8_t>(BidirectionalSequenceLSTMOptions::VT_MERGE_OUTPUTS,
+ static_cast<uint8_t>(merge_outputs), 0);
+ }
+ explicit BidirectionalSequenceLSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
+ : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ BidirectionalSequenceLSTMOptionsBuilder &
+ operator=(const BidirectionalSequenceLSTMOptionsBuilder &);
+ flatbuffers::Offset<BidirectionalSequenceLSTMOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<BidirectionalSequenceLSTMOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<BidirectionalSequenceLSTMOptions> CreateBidirectionalSequenceLSTMOptions(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE,
+ float cell_clip = 0.0f, float proj_clip = 0.0f, bool merge_outputs = false)
+{
+ BidirectionalSequenceLSTMOptionsBuilder builder_(_fbb);
+ builder_.add_proj_clip(proj_clip);
+ builder_.add_cell_clip(cell_clip);
+ builder_.add_merge_outputs(merge_outputs);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct ResizeBilinearOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_ALIGN_CORNERS = 8
+ };
+ bool align_corners() const { return GetField<uint8_t>(VT_ALIGN_CORNERS, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_ALIGN_CORNERS) &&
+ verifier.EndTable();
+ }
+};
+
+struct ResizeBilinearOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_align_corners(bool align_corners)
+ {
+ fbb_.AddElement<uint8_t>(ResizeBilinearOptions::VT_ALIGN_CORNERS,
+ static_cast<uint8_t>(align_corners), 0);
+ }
+ explicit ResizeBilinearOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ResizeBilinearOptionsBuilder &operator=(const ResizeBilinearOptionsBuilder &);
+ flatbuffers::Offset<ResizeBilinearOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ResizeBilinearOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ResizeBilinearOptions>
+CreateResizeBilinearOptions(flatbuffers::FlatBufferBuilder &_fbb, bool align_corners = false)
+{
+ ResizeBilinearOptionsBuilder builder_(_fbb);
+ builder_.add_align_corners(align_corners);
+ return builder_.Finish();
+}
+
+struct ResizeNearestNeighborOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_ALIGN_CORNERS = 4
+ };
+ bool align_corners() const { return GetField<uint8_t>(VT_ALIGN_CORNERS, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_ALIGN_CORNERS) &&
+ verifier.EndTable();
+ }
+};
+
+struct ResizeNearestNeighborOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_align_corners(bool align_corners)
+ {
+ fbb_.AddElement<uint8_t>(ResizeNearestNeighborOptions::VT_ALIGN_CORNERS,
+ static_cast<uint8_t>(align_corners), 0);
+ }
+ explicit ResizeNearestNeighborOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ResizeNearestNeighborOptionsBuilder &operator=(const ResizeNearestNeighborOptionsBuilder &);
+ flatbuffers::Offset<ResizeNearestNeighborOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ResizeNearestNeighborOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ResizeNearestNeighborOptions>
+CreateResizeNearestNeighborOptions(flatbuffers::FlatBufferBuilder &_fbb, bool align_corners = false)
+{
+ ResizeNearestNeighborOptionsBuilder builder_(_fbb);
+ builder_.add_align_corners(align_corners);
+ return builder_.Finish();
+}
+
+struct CallOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_SUBGRAPH = 4
+ };
+ uint32_t subgraph() const { return GetField<uint32_t>(VT_SUBGRAPH, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint32_t>(verifier, VT_SUBGRAPH) &&
+ verifier.EndTable();
+ }
+};
+
+struct CallOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_subgraph(uint32_t subgraph)
+ {
+ fbb_.AddElement<uint32_t>(CallOptions::VT_SUBGRAPH, subgraph, 0);
+ }
+ explicit CallOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ CallOptionsBuilder &operator=(const CallOptionsBuilder &);
+ flatbuffers::Offset<CallOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<CallOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<CallOptions> CreateCallOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ uint32_t subgraph = 0)
+{
+ CallOptionsBuilder builder_(_fbb);
+ builder_.add_subgraph(subgraph);
+ return builder_.Finish();
+}
+
+struct PadOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct PadOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit PadOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ PadOptionsBuilder &operator=(const PadOptionsBuilder &);
+ flatbuffers::Offset<PadOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<PadOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<PadOptions> CreatePadOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ PadOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct PadV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct PadV2OptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit PadV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ PadV2OptionsBuilder &operator=(const PadV2OptionsBuilder &);
+ flatbuffers::Offset<PadV2Options> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<PadV2Options>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<PadV2Options> CreatePadV2Options(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ PadV2OptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct ReshapeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_NEW_SHAPE = 4
+ };
+ const flatbuffers::Vector<int32_t> *new_shape() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_NEW_SHAPE);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_NEW_SHAPE) &&
+ verifier.VerifyVector(new_shape()) && verifier.EndTable();
+ }
+};
+
+struct ReshapeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_new_shape(flatbuffers::Offset<flatbuffers::Vector<int32_t>> new_shape)
+ {
+ fbb_.AddOffset(ReshapeOptions::VT_NEW_SHAPE, new_shape);
+ }
+ explicit ReshapeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ReshapeOptionsBuilder &operator=(const ReshapeOptionsBuilder &);
+ flatbuffers::Offset<ReshapeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ReshapeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ReshapeOptions>
+CreateReshapeOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> new_shape = 0)
+{
+ ReshapeOptionsBuilder builder_(_fbb);
+ builder_.add_new_shape(new_shape);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<ReshapeOptions>
+CreateReshapeOptionsDirect(flatbuffers::FlatBufferBuilder &_fbb,
+ const std::vector<int32_t> *new_shape = nullptr)
+{
+ return tflite::CreateReshapeOptions(_fbb, new_shape ? _fbb.CreateVector<int32_t>(*new_shape) : 0);
+}
+
+struct SpaceToBatchNDOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct SpaceToBatchNDOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit SpaceToBatchNDOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SpaceToBatchNDOptionsBuilder &operator=(const SpaceToBatchNDOptionsBuilder &);
+ flatbuffers::Offset<SpaceToBatchNDOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SpaceToBatchNDOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SpaceToBatchNDOptions>
+CreateSpaceToBatchNDOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ SpaceToBatchNDOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct BatchToSpaceNDOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct BatchToSpaceNDOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit BatchToSpaceNDOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ BatchToSpaceNDOptionsBuilder &operator=(const BatchToSpaceNDOptionsBuilder &);
+ flatbuffers::Offset<BatchToSpaceNDOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<BatchToSpaceNDOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<BatchToSpaceNDOptions>
+CreateBatchToSpaceNDOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ BatchToSpaceNDOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct SkipGramOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_NGRAM_SIZE = 4,
+ VT_MAX_SKIP_SIZE = 6,
+ VT_INCLUDE_ALL_NGRAMS = 8
+ };
+ int32_t ngram_size() const { return GetField<int32_t>(VT_NGRAM_SIZE, 0); }
+ int32_t max_skip_size() const { return GetField<int32_t>(VT_MAX_SKIP_SIZE, 0); }
+ bool include_all_ngrams() const { return GetField<uint8_t>(VT_INCLUDE_ALL_NGRAMS, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_NGRAM_SIZE) &&
+ VerifyField<int32_t>(verifier, VT_MAX_SKIP_SIZE) &&
+ VerifyField<uint8_t>(verifier, VT_INCLUDE_ALL_NGRAMS) && verifier.EndTable();
+ }
+};
+
+struct SkipGramOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_ngram_size(int32_t ngram_size)
+ {
+ fbb_.AddElement<int32_t>(SkipGramOptions::VT_NGRAM_SIZE, ngram_size, 0);
+ }
+ void add_max_skip_size(int32_t max_skip_size)
+ {
+ fbb_.AddElement<int32_t>(SkipGramOptions::VT_MAX_SKIP_SIZE, max_skip_size, 0);
+ }
+ void add_include_all_ngrams(bool include_all_ngrams)
+ {
+ fbb_.AddElement<uint8_t>(SkipGramOptions::VT_INCLUDE_ALL_NGRAMS,
+ static_cast<uint8_t>(include_all_ngrams), 0);
+ }
+ explicit SkipGramOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SkipGramOptionsBuilder &operator=(const SkipGramOptionsBuilder &);
+ flatbuffers::Offset<SkipGramOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SkipGramOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SkipGramOptions>
+CreateSkipGramOptions(flatbuffers::FlatBufferBuilder &_fbb, int32_t ngram_size = 0,
+ int32_t max_skip_size = 0, bool include_all_ngrams = false)
+{
+ SkipGramOptionsBuilder builder_(_fbb);
+ builder_.add_max_skip_size(max_skip_size);
+ builder_.add_ngram_size(ngram_size);
+ builder_.add_include_all_ngrams(include_all_ngrams);
+ return builder_.Finish();
+}
+
+struct SpaceToDepthOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_BLOCK_SIZE = 4
+ };
+ int32_t block_size() const { return GetField<int32_t>(VT_BLOCK_SIZE, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_BLOCK_SIZE) &&
+ verifier.EndTable();
+ }
+};
+
+struct SpaceToDepthOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_block_size(int32_t block_size)
+ {
+ fbb_.AddElement<int32_t>(SpaceToDepthOptions::VT_BLOCK_SIZE, block_size, 0);
+ }
+ explicit SpaceToDepthOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SpaceToDepthOptionsBuilder &operator=(const SpaceToDepthOptionsBuilder &);
+ flatbuffers::Offset<SpaceToDepthOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SpaceToDepthOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SpaceToDepthOptions>
+CreateSpaceToDepthOptions(flatbuffers::FlatBufferBuilder &_fbb, int32_t block_size = 0)
+{
+ SpaceToDepthOptionsBuilder builder_(_fbb);
+ builder_.add_block_size(block_size);
+ return builder_.Finish();
+}
+
+struct SubOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct SubOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(SubOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit SubOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SubOptionsBuilder &operator=(const SubOptionsBuilder &);
+ flatbuffers::Offset<SubOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SubOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SubOptions>
+CreateSubOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ SubOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct DivOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_FUSED_ACTIVATION_FUNCTION = 4
+ };
+ ActivationFunctionType fused_activation_function() const
+ {
+ return static_cast<ActivationFunctionType>(GetField<int8_t>(VT_FUSED_ACTIVATION_FUNCTION, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) &&
+ VerifyField<int8_t>(verifier, VT_FUSED_ACTIVATION_FUNCTION) && verifier.EndTable();
+ }
+};
+
+struct DivOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_fused_activation_function(ActivationFunctionType fused_activation_function)
+ {
+ fbb_.AddElement<int8_t>(DivOptions::VT_FUSED_ACTIVATION_FUNCTION,
+ static_cast<int8_t>(fused_activation_function), 0);
+ }
+ explicit DivOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ DivOptionsBuilder &operator=(const DivOptionsBuilder &);
+ flatbuffers::Offset<DivOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<DivOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<DivOptions>
+CreateDivOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ ActivationFunctionType fused_activation_function = ActivationFunctionType_NONE)
+{
+ DivOptionsBuilder builder_(_fbb);
+ builder_.add_fused_activation_function(fused_activation_function);
+ return builder_.Finish();
+}
+
+struct TopKV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct TopKV2OptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit TopKV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ TopKV2OptionsBuilder &operator=(const TopKV2OptionsBuilder &);
+ flatbuffers::Offset<TopKV2Options> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<TopKV2Options>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<TopKV2Options> CreateTopKV2Options(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ TopKV2OptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct EmbeddingLookupSparseOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_COMBINER = 4
+ };
+ CombinerType combiner() const
+ {
+ return static_cast<CombinerType>(GetField<int8_t>(VT_COMBINER, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_COMBINER) &&
+ verifier.EndTable();
+ }
+};
+
+struct EmbeddingLookupSparseOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_combiner(CombinerType combiner)
+ {
+ fbb_.AddElement<int8_t>(EmbeddingLookupSparseOptions::VT_COMBINER,
+ static_cast<int8_t>(combiner), 0);
+ }
+ explicit EmbeddingLookupSparseOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ EmbeddingLookupSparseOptionsBuilder &operator=(const EmbeddingLookupSparseOptionsBuilder &);
+ flatbuffers::Offset<EmbeddingLookupSparseOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<EmbeddingLookupSparseOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<EmbeddingLookupSparseOptions>
+CreateEmbeddingLookupSparseOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ CombinerType combiner = CombinerType_SUM)
+{
+ EmbeddingLookupSparseOptionsBuilder builder_(_fbb);
+ builder_.add_combiner(combiner);
+ return builder_.Finish();
+}
+
+struct GatherOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_AXIS = 4
+ };
+ int32_t axis() const { return GetField<int32_t>(VT_AXIS, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_AXIS) &&
+ verifier.EndTable();
+ }
+};
+
+struct GatherOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_axis(int32_t axis) { fbb_.AddElement<int32_t>(GatherOptions::VT_AXIS, axis, 0); }
+ explicit GatherOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ GatherOptionsBuilder &operator=(const GatherOptionsBuilder &);
+ flatbuffers::Offset<GatherOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<GatherOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<GatherOptions> CreateGatherOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ int32_t axis = 0)
+{
+ GatherOptionsBuilder builder_(_fbb);
+ builder_.add_axis(axis);
+ return builder_.Finish();
+}
+
+struct TransposeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct TransposeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit TransposeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ TransposeOptionsBuilder &operator=(const TransposeOptionsBuilder &);
+ flatbuffers::Offset<TransposeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<TransposeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<TransposeOptions>
+CreateTransposeOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ TransposeOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct ExpOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct ExpOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit ExpOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ExpOptionsBuilder &operator=(const ExpOptionsBuilder &);
+ flatbuffers::Offset<ExpOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ExpOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ExpOptions> CreateExpOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ ExpOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct ReducerOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_KEEP_DIMS = 4
+ };
+ bool keep_dims() const { return GetField<uint8_t>(VT_KEEP_DIMS, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_KEEP_DIMS) &&
+ verifier.EndTable();
+ }
+};
+
+struct ReducerOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_keep_dims(bool keep_dims)
+ {
+ fbb_.AddElement<uint8_t>(ReducerOptions::VT_KEEP_DIMS, static_cast<uint8_t>(keep_dims), 0);
+ }
+ explicit ReducerOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ReducerOptionsBuilder &operator=(const ReducerOptionsBuilder &);
+ flatbuffers::Offset<ReducerOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ReducerOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ReducerOptions>
+CreateReducerOptions(flatbuffers::FlatBufferBuilder &_fbb, bool keep_dims = false)
+{
+ ReducerOptionsBuilder builder_(_fbb);
+ builder_.add_keep_dims(keep_dims);
+ return builder_.Finish();
+}
+
+struct SqueezeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_SQUEEZE_DIMS = 4
+ };
+ const flatbuffers::Vector<int32_t> *squeeze_dims() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_SQUEEZE_DIMS);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_SQUEEZE_DIMS) &&
+ verifier.VerifyVector(squeeze_dims()) && verifier.EndTable();
+ }
+};
+
+struct SqueezeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_squeeze_dims(flatbuffers::Offset<flatbuffers::Vector<int32_t>> squeeze_dims)
+ {
+ fbb_.AddOffset(SqueezeOptions::VT_SQUEEZE_DIMS, squeeze_dims);
+ }
+ explicit SqueezeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SqueezeOptionsBuilder &operator=(const SqueezeOptionsBuilder &);
+ flatbuffers::Offset<SqueezeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SqueezeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SqueezeOptions>
+CreateSqueezeOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> squeeze_dims = 0)
+{
+ SqueezeOptionsBuilder builder_(_fbb);
+ builder_.add_squeeze_dims(squeeze_dims);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<SqueezeOptions>
+CreateSqueezeOptionsDirect(flatbuffers::FlatBufferBuilder &_fbb,
+ const std::vector<int32_t> *squeeze_dims = nullptr)
+{
+ return tflite::CreateSqueezeOptions(_fbb,
+ squeeze_dims ? _fbb.CreateVector<int32_t>(*squeeze_dims) : 0);
+}
+
+struct SplitOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_NUM_SPLITS = 4
+ };
+ int32_t num_splits() const { return GetField<int32_t>(VT_NUM_SPLITS, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_NUM_SPLITS) &&
+ verifier.EndTable();
+ }
+};
+
+struct SplitOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_num_splits(int32_t num_splits)
+ {
+ fbb_.AddElement<int32_t>(SplitOptions::VT_NUM_SPLITS, num_splits, 0);
+ }
+ explicit SplitOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SplitOptionsBuilder &operator=(const SplitOptionsBuilder &);
+ flatbuffers::Offset<SplitOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SplitOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SplitOptions> CreateSplitOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ int32_t num_splits = 0)
+{
+ SplitOptionsBuilder builder_(_fbb);
+ builder_.add_num_splits(num_splits);
+ return builder_.Finish();
+}
+
+struct SplitVOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_NUM_SPLITS = 4
+ };
+ int32_t num_splits() const { return GetField<int32_t>(VT_NUM_SPLITS, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_NUM_SPLITS) &&
+ verifier.EndTable();
+ }
+};
+
+struct SplitVOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_num_splits(int32_t num_splits)
+ {
+ fbb_.AddElement<int32_t>(SplitVOptions::VT_NUM_SPLITS, num_splits, 0);
+ }
+ explicit SplitVOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SplitVOptionsBuilder &operator=(const SplitVOptionsBuilder &);
+ flatbuffers::Offset<SplitVOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SplitVOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SplitVOptions> CreateSplitVOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ int32_t num_splits = 0)
+{
+ SplitVOptionsBuilder builder_(_fbb);
+ builder_.add_num_splits(num_splits);
+ return builder_.Finish();
+}
+
+struct StridedSliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_BEGIN_MASK = 4,
+ VT_END_MASK = 6,
+ VT_ELLIPSIS_MASK = 8,
+ VT_NEW_AXIS_MASK = 10,
+ VT_SHRINK_AXIS_MASK = 12
+ };
+ int32_t begin_mask() const { return GetField<int32_t>(VT_BEGIN_MASK, 0); }
+ int32_t end_mask() const { return GetField<int32_t>(VT_END_MASK, 0); }
+ int32_t ellipsis_mask() const { return GetField<int32_t>(VT_ELLIPSIS_MASK, 0); }
+ int32_t new_axis_mask() const { return GetField<int32_t>(VT_NEW_AXIS_MASK, 0); }
+ int32_t shrink_axis_mask() const { return GetField<int32_t>(VT_SHRINK_AXIS_MASK, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_BEGIN_MASK) &&
+ VerifyField<int32_t>(verifier, VT_END_MASK) &&
+ VerifyField<int32_t>(verifier, VT_ELLIPSIS_MASK) &&
+ VerifyField<int32_t>(verifier, VT_NEW_AXIS_MASK) &&
+ VerifyField<int32_t>(verifier, VT_SHRINK_AXIS_MASK) && verifier.EndTable();
+ }
+};
+
+struct StridedSliceOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_begin_mask(int32_t begin_mask)
+ {
+ fbb_.AddElement<int32_t>(StridedSliceOptions::VT_BEGIN_MASK, begin_mask, 0);
+ }
+ void add_end_mask(int32_t end_mask)
+ {
+ fbb_.AddElement<int32_t>(StridedSliceOptions::VT_END_MASK, end_mask, 0);
+ }
+ void add_ellipsis_mask(int32_t ellipsis_mask)
+ {
+ fbb_.AddElement<int32_t>(StridedSliceOptions::VT_ELLIPSIS_MASK, ellipsis_mask, 0);
+ }
+ void add_new_axis_mask(int32_t new_axis_mask)
+ {
+ fbb_.AddElement<int32_t>(StridedSliceOptions::VT_NEW_AXIS_MASK, new_axis_mask, 0);
+ }
+ void add_shrink_axis_mask(int32_t shrink_axis_mask)
+ {
+ fbb_.AddElement<int32_t>(StridedSliceOptions::VT_SHRINK_AXIS_MASK, shrink_axis_mask, 0);
+ }
+ explicit StridedSliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ StridedSliceOptionsBuilder &operator=(const StridedSliceOptionsBuilder &);
+ flatbuffers::Offset<StridedSliceOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<StridedSliceOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<StridedSliceOptions>
+CreateStridedSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, int32_t begin_mask = 0,
+ int32_t end_mask = 0, int32_t ellipsis_mask = 0,
+ int32_t new_axis_mask = 0, int32_t shrink_axis_mask = 0)
+{
+ StridedSliceOptionsBuilder builder_(_fbb);
+ builder_.add_shrink_axis_mask(shrink_axis_mask);
+ builder_.add_new_axis_mask(new_axis_mask);
+ builder_.add_ellipsis_mask(ellipsis_mask);
+ builder_.add_end_mask(end_mask);
+ builder_.add_begin_mask(begin_mask);
+ return builder_.Finish();
+}
+
+struct LogSoftmaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct LogSoftmaxOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit LogSoftmaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LogSoftmaxOptionsBuilder &operator=(const LogSoftmaxOptionsBuilder &);
+ flatbuffers::Offset<LogSoftmaxOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LogSoftmaxOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LogSoftmaxOptions>
+CreateLogSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ LogSoftmaxOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct CastOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_IN_DATA_TYPE = 4,
+ VT_OUT_DATA_TYPE = 6
+ };
+ TensorType in_data_type() const
+ {
+ return static_cast<TensorType>(GetField<int8_t>(VT_IN_DATA_TYPE, 0));
+ }
+ TensorType out_data_type() const
+ {
+ return static_cast<TensorType>(GetField<int8_t>(VT_OUT_DATA_TYPE, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_IN_DATA_TYPE) &&
+ VerifyField<int8_t>(verifier, VT_OUT_DATA_TYPE) && verifier.EndTable();
+ }
+};
+
+struct CastOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_in_data_type(TensorType in_data_type)
+ {
+ fbb_.AddElement<int8_t>(CastOptions::VT_IN_DATA_TYPE, static_cast<int8_t>(in_data_type), 0);
+ }
+ void add_out_data_type(TensorType out_data_type)
+ {
+ fbb_.AddElement<int8_t>(CastOptions::VT_OUT_DATA_TYPE, static_cast<int8_t>(out_data_type), 0);
+ }
+ explicit CastOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ CastOptionsBuilder &operator=(const CastOptionsBuilder &);
+ flatbuffers::Offset<CastOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<CastOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<CastOptions>
+CreateCastOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ TensorType in_data_type = TensorType_FLOAT32,
+ TensorType out_data_type = TensorType_FLOAT32)
+{
+ CastOptionsBuilder builder_(_fbb);
+ builder_.add_out_data_type(out_data_type);
+ builder_.add_in_data_type(in_data_type);
+ return builder_.Finish();
+}
+
+struct DequantizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct DequantizeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit DequantizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ DequantizeOptionsBuilder &operator=(const DequantizeOptionsBuilder &);
+ flatbuffers::Offset<DequantizeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<DequantizeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<DequantizeOptions>
+CreateDequantizeOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ DequantizeOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct MaximumMinimumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct MaximumMinimumOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit MaximumMinimumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ MaximumMinimumOptionsBuilder &operator=(const MaximumMinimumOptionsBuilder &);
+ flatbuffers::Offset<MaximumMinimumOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<MaximumMinimumOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<MaximumMinimumOptions>
+CreateMaximumMinimumOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ MaximumMinimumOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct TileOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct TileOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit TileOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ TileOptionsBuilder &operator=(const TileOptionsBuilder &);
+ flatbuffers::Offset<TileOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<TileOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<TileOptions> CreateTileOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ TileOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct ArgMaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_OUTPUT_TYPE = 4
+ };
+ TensorType output_type() const
+ {
+ return static_cast<TensorType>(GetField<int8_t>(VT_OUTPUT_TYPE, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_OUTPUT_TYPE) &&
+ verifier.EndTable();
+ }
+};
+
+struct ArgMaxOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_output_type(TensorType output_type)
+ {
+ fbb_.AddElement<int8_t>(ArgMaxOptions::VT_OUTPUT_TYPE, static_cast<int8_t>(output_type), 0);
+ }
+ explicit ArgMaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ArgMaxOptionsBuilder &operator=(const ArgMaxOptionsBuilder &);
+ flatbuffers::Offset<ArgMaxOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ArgMaxOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ArgMaxOptions>
+CreateArgMaxOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ TensorType output_type = TensorType_FLOAT32)
+{
+ ArgMaxOptionsBuilder builder_(_fbb);
+ builder_.add_output_type(output_type);
+ return builder_.Finish();
+}
+
+struct ArgMinOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_OUTPUT_TYPE = 4
+ };
+ TensorType output_type() const
+ {
+ return static_cast<TensorType>(GetField<int8_t>(VT_OUTPUT_TYPE, 0));
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_OUTPUT_TYPE) &&
+ verifier.EndTable();
+ }
+};
+
+struct ArgMinOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_output_type(TensorType output_type)
+ {
+ fbb_.AddElement<int8_t>(ArgMinOptions::VT_OUTPUT_TYPE, static_cast<int8_t>(output_type), 0);
+ }
+ explicit ArgMinOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ArgMinOptionsBuilder &operator=(const ArgMinOptionsBuilder &);
+ flatbuffers::Offset<ArgMinOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ArgMinOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ArgMinOptions>
+CreateArgMinOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ TensorType output_type = TensorType_FLOAT32)
+{
+ ArgMinOptionsBuilder builder_(_fbb);
+ builder_.add_output_type(output_type);
+ return builder_.Finish();
+}
+
+struct GreaterOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct GreaterOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit GreaterOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ GreaterOptionsBuilder &operator=(const GreaterOptionsBuilder &);
+ flatbuffers::Offset<GreaterOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<GreaterOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<GreaterOptions>
+CreateGreaterOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ GreaterOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct GreaterEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct GreaterEqualOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit GreaterEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ GreaterEqualOptionsBuilder &operator=(const GreaterEqualOptionsBuilder &);
+ flatbuffers::Offset<GreaterEqualOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<GreaterEqualOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<GreaterEqualOptions>
+CreateGreaterEqualOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ GreaterEqualOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct LessOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct LessOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit LessOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LessOptionsBuilder &operator=(const LessOptionsBuilder &);
+ flatbuffers::Offset<LessOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LessOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LessOptions> CreateLessOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ LessOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct LessEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct LessEqualOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit LessEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LessEqualOptionsBuilder &operator=(const LessEqualOptionsBuilder &);
+ flatbuffers::Offset<LessEqualOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LessEqualOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LessEqualOptions>
+CreateLessEqualOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ LessEqualOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct NegOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct NegOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit NegOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ NegOptionsBuilder &operator=(const NegOptionsBuilder &);
+ flatbuffers::Offset<NegOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<NegOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<NegOptions> CreateNegOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ NegOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct SelectOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct SelectOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit SelectOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SelectOptionsBuilder &operator=(const SelectOptionsBuilder &);
+ flatbuffers::Offset<SelectOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SelectOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SelectOptions> CreateSelectOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ SelectOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct SliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct SliceOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit SliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SliceOptionsBuilder &operator=(const SliceOptionsBuilder &);
+ flatbuffers::Offset<SliceOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SliceOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SliceOptions> CreateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ SliceOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct TransposeConvOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_PADDING = 4,
+ VT_STRIDE_W = 6,
+ VT_STRIDE_H = 8
+ };
+ Padding padding() const { return static_cast<Padding>(GetField<int8_t>(VT_PADDING, 0)); }
+ int32_t stride_w() const { return GetField<int32_t>(VT_STRIDE_W, 0); }
+ int32_t stride_h() const { return GetField<int32_t>(VT_STRIDE_H, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_PADDING) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_W) &&
+ VerifyField<int32_t>(verifier, VT_STRIDE_H) && verifier.EndTable();
+ }
+};
+
+struct TransposeConvOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_padding(Padding padding)
+ {
+ fbb_.AddElement<int8_t>(TransposeConvOptions::VT_PADDING, static_cast<int8_t>(padding), 0);
+ }
+ void add_stride_w(int32_t stride_w)
+ {
+ fbb_.AddElement<int32_t>(TransposeConvOptions::VT_STRIDE_W, stride_w, 0);
+ }
+ void add_stride_h(int32_t stride_h)
+ {
+ fbb_.AddElement<int32_t>(TransposeConvOptions::VT_STRIDE_H, stride_h, 0);
+ }
+ explicit TransposeConvOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ TransposeConvOptionsBuilder &operator=(const TransposeConvOptionsBuilder &);
+ flatbuffers::Offset<TransposeConvOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<TransposeConvOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<TransposeConvOptions>
+CreateTransposeConvOptions(flatbuffers::FlatBufferBuilder &_fbb, Padding padding = Padding_SAME,
+ int32_t stride_w = 0, int32_t stride_h = 0)
+{
+ TransposeConvOptionsBuilder builder_(_fbb);
+ builder_.add_stride_h(stride_h);
+ builder_.add_stride_w(stride_w);
+ builder_.add_padding(padding);
+ return builder_.Finish();
+}
+
+struct ExpandDimsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct ExpandDimsOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit ExpandDimsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ExpandDimsOptionsBuilder &operator=(const ExpandDimsOptionsBuilder &);
+ flatbuffers::Offset<ExpandDimsOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ExpandDimsOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ExpandDimsOptions>
+CreateExpandDimsOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ ExpandDimsOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct SparseToDenseOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_VALIDATE_INDICES = 4
+ };
+ bool validate_indices() const { return GetField<uint8_t>(VT_VALIDATE_INDICES, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_VALIDATE_INDICES) &&
+ verifier.EndTable();
+ }
+};
+
+struct SparseToDenseOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_validate_indices(bool validate_indices)
+ {
+ fbb_.AddElement<uint8_t>(SparseToDenseOptions::VT_VALIDATE_INDICES,
+ static_cast<uint8_t>(validate_indices), 0);
+ }
+ explicit SparseToDenseOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SparseToDenseOptionsBuilder &operator=(const SparseToDenseOptionsBuilder &);
+ flatbuffers::Offset<SparseToDenseOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SparseToDenseOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SparseToDenseOptions>
+CreateSparseToDenseOptions(flatbuffers::FlatBufferBuilder &_fbb, bool validate_indices = false)
+{
+ SparseToDenseOptionsBuilder builder_(_fbb);
+ builder_.add_validate_indices(validate_indices);
+ return builder_.Finish();
+}
+
+struct EqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct EqualOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit EqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ EqualOptionsBuilder &operator=(const EqualOptionsBuilder &);
+ flatbuffers::Offset<EqualOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<EqualOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<EqualOptions> CreateEqualOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ EqualOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct NotEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct NotEqualOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit NotEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ NotEqualOptionsBuilder &operator=(const NotEqualOptionsBuilder &);
+ flatbuffers::Offset<NotEqualOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<NotEqualOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<NotEqualOptions>
+CreateNotEqualOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ NotEqualOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct ShapeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_OUT_TYPE = 4
+ };
+ TensorType out_type() const { return static_cast<TensorType>(GetField<int8_t>(VT_OUT_TYPE, 0)); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_OUT_TYPE) &&
+ verifier.EndTable();
+ }
+};
+
+struct ShapeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_out_type(TensorType out_type)
+ {
+ fbb_.AddElement<int8_t>(ShapeOptions::VT_OUT_TYPE, static_cast<int8_t>(out_type), 0);
+ }
+ explicit ShapeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ShapeOptionsBuilder &operator=(const ShapeOptionsBuilder &);
+ flatbuffers::Offset<ShapeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ShapeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ShapeOptions>
+CreateShapeOptions(flatbuffers::FlatBufferBuilder &_fbb, TensorType out_type = TensorType_FLOAT32)
+{
+ ShapeOptionsBuilder builder_(_fbb);
+ builder_.add_out_type(out_type);
+ return builder_.Finish();
+}
+
+struct PowOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct PowOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit PowOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ PowOptionsBuilder &operator=(const PowOptionsBuilder &);
+ flatbuffers::Offset<PowOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<PowOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<PowOptions> CreatePowOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ PowOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct FakeQuantOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_MIN = 4,
+ VT_MAX = 6,
+ VT_NUM_BITS = 8,
+ VT_NARROW_RANGE = 10
+ };
+ float min() const { return GetField<float>(VT_MIN, 0.0f); }
+ float max() const { return GetField<float>(VT_MAX, 0.0f); }
+ int32_t num_bits() const { return GetField<int32_t>(VT_NUM_BITS, 0); }
+ bool narrow_range() const { return GetField<uint8_t>(VT_NARROW_RANGE, 0) != 0; }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<float>(verifier, VT_MIN) &&
+ VerifyField<float>(verifier, VT_MAX) && VerifyField<int32_t>(verifier, VT_NUM_BITS) &&
+ VerifyField<uint8_t>(verifier, VT_NARROW_RANGE) && verifier.EndTable();
+ }
+};
+
+struct FakeQuantOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_min(float min) { fbb_.AddElement<float>(FakeQuantOptions::VT_MIN, min, 0.0f); }
+ void add_max(float max) { fbb_.AddElement<float>(FakeQuantOptions::VT_MAX, max, 0.0f); }
+ void add_num_bits(int32_t num_bits)
+ {
+ fbb_.AddElement<int32_t>(FakeQuantOptions::VT_NUM_BITS, num_bits, 0);
+ }
+ void add_narrow_range(bool narrow_range)
+ {
+ fbb_.AddElement<uint8_t>(FakeQuantOptions::VT_NARROW_RANGE, static_cast<uint8_t>(narrow_range),
+ 0);
+ }
+ explicit FakeQuantOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ FakeQuantOptionsBuilder &operator=(const FakeQuantOptionsBuilder &);
+ flatbuffers::Offset<FakeQuantOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FakeQuantOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FakeQuantOptions>
+CreateFakeQuantOptions(flatbuffers::FlatBufferBuilder &_fbb, float min = 0.0f, float max = 0.0f,
+ int32_t num_bits = 0, bool narrow_range = false)
+{
+ FakeQuantOptionsBuilder builder_(_fbb);
+ builder_.add_num_bits(num_bits);
+ builder_.add_max(max);
+ builder_.add_min(min);
+ builder_.add_narrow_range(narrow_range);
+ return builder_.Finish();
+}
+
+struct PackOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_VALUES_COUNT = 4,
+ VT_AXIS = 6
+ };
+ int32_t values_count() const { return GetField<int32_t>(VT_VALUES_COUNT, 0); }
+ int32_t axis() const { return GetField<int32_t>(VT_AXIS, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_VALUES_COUNT) &&
+ VerifyField<int32_t>(verifier, VT_AXIS) && verifier.EndTable();
+ }
+};
+
+struct PackOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_values_count(int32_t values_count)
+ {
+ fbb_.AddElement<int32_t>(PackOptions::VT_VALUES_COUNT, values_count, 0);
+ }
+ void add_axis(int32_t axis) { fbb_.AddElement<int32_t>(PackOptions::VT_AXIS, axis, 0); }
+ explicit PackOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ PackOptionsBuilder &operator=(const PackOptionsBuilder &);
+ flatbuffers::Offset<PackOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<PackOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<PackOptions>
+CreatePackOptions(flatbuffers::FlatBufferBuilder &_fbb, int32_t values_count = 0, int32_t axis = 0)
+{
+ PackOptionsBuilder builder_(_fbb);
+ builder_.add_axis(axis);
+ builder_.add_values_count(values_count);
+ return builder_.Finish();
+}
+
+struct LogicalOrOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct LogicalOrOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit LogicalOrOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LogicalOrOptionsBuilder &operator=(const LogicalOrOptionsBuilder &);
+ flatbuffers::Offset<LogicalOrOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LogicalOrOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LogicalOrOptions>
+CreateLogicalOrOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ LogicalOrOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct OneHotOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_AXIS = 4
+ };
+ int32_t axis() const { return GetField<int32_t>(VT_AXIS, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_AXIS) &&
+ verifier.EndTable();
+ }
+};
+
+struct OneHotOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_axis(int32_t axis) { fbb_.AddElement<int32_t>(OneHotOptions::VT_AXIS, axis, 0); }
+ explicit OneHotOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ OneHotOptionsBuilder &operator=(const OneHotOptionsBuilder &);
+ flatbuffers::Offset<OneHotOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<OneHotOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<OneHotOptions> CreateOneHotOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ int32_t axis = 0)
+{
+ OneHotOptionsBuilder builder_(_fbb);
+ builder_.add_axis(axis);
+ return builder_.Finish();
+}
+
+struct AbsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct AbsOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit AbsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ AbsOptionsBuilder &operator=(const AbsOptionsBuilder &);
+ flatbuffers::Offset<AbsOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<AbsOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<AbsOptions> CreateAbsOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ AbsOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct LogicalAndOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct LogicalAndOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit LogicalAndOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LogicalAndOptionsBuilder &operator=(const LogicalAndOptionsBuilder &);
+ flatbuffers::Offset<LogicalAndOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LogicalAndOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LogicalAndOptions>
+CreateLogicalAndOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ LogicalAndOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct LogicalNotOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct LogicalNotOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit LogicalNotOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LogicalNotOptionsBuilder &operator=(const LogicalNotOptionsBuilder &);
+ flatbuffers::Offset<LogicalNotOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LogicalNotOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LogicalNotOptions>
+CreateLogicalNotOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ LogicalNotOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct UnpackOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_NUM = 4,
+ VT_AXIS = 6
+ };
+ int32_t num() const { return GetField<int32_t>(VT_NUM, 0); }
+ int32_t axis() const { return GetField<int32_t>(VT_AXIS, 0); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int32_t>(verifier, VT_NUM) &&
+ VerifyField<int32_t>(verifier, VT_AXIS) && verifier.EndTable();
+ }
+};
+
+struct UnpackOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_num(int32_t num) { fbb_.AddElement<int32_t>(UnpackOptions::VT_NUM, num, 0); }
+ void add_axis(int32_t axis) { fbb_.AddElement<int32_t>(UnpackOptions::VT_AXIS, axis, 0); }
+ explicit UnpackOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ UnpackOptionsBuilder &operator=(const UnpackOptionsBuilder &);
+ flatbuffers::Offset<UnpackOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<UnpackOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<UnpackOptions> CreateUnpackOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ int32_t num = 0, int32_t axis = 0)
+{
+ UnpackOptionsBuilder builder_(_fbb);
+ builder_.add_axis(axis);
+ builder_.add_num(num);
+ return builder_.Finish();
+}
+
+struct FloorDivOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct FloorDivOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit FloorDivOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ FloorDivOptionsBuilder &operator=(const FloorDivOptionsBuilder &);
+ flatbuffers::Offset<FloorDivOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FloorDivOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FloorDivOptions>
+CreateFloorDivOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ FloorDivOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct SquareOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct SquareOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit SquareOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SquareOptionsBuilder &operator=(const SquareOptionsBuilder &);
+ flatbuffers::Offset<SquareOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SquareOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SquareOptions> CreateSquareOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ SquareOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct ZerosLikeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct ZerosLikeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit ZerosLikeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ZerosLikeOptionsBuilder &operator=(const ZerosLikeOptionsBuilder &);
+ flatbuffers::Offset<ZerosLikeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<ZerosLikeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<ZerosLikeOptions>
+CreateZerosLikeOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ ZerosLikeOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct FillOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct FillOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit FillOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ FillOptionsBuilder &operator=(const FillOptionsBuilder &);
+ flatbuffers::Offset<FillOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FillOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FillOptions> CreateFillOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ FillOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct FloorModOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct FloorModOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit FloorModOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ FloorModOptionsBuilder &operator=(const FloorModOptionsBuilder &);
+ flatbuffers::Offset<FloorModOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FloorModOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FloorModOptions>
+CreateFloorModOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ FloorModOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct RangeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct RangeOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit RangeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ RangeOptionsBuilder &operator=(const RangeOptionsBuilder &);
+ flatbuffers::Offset<RangeOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<RangeOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<RangeOptions> CreateRangeOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ RangeOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct LeakyReluOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_ALPHA = 4
+ };
+ float alpha() const { return GetField<float>(VT_ALPHA, 0.0f); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<float>(verifier, VT_ALPHA) &&
+ verifier.EndTable();
+ }
+};
+
+struct LeakyReluOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_alpha(float alpha) { fbb_.AddElement<float>(LeakyReluOptions::VT_ALPHA, alpha, 0.0f); }
+ explicit LeakyReluOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ LeakyReluOptionsBuilder &operator=(const LeakyReluOptionsBuilder &);
+ flatbuffers::Offset<LeakyReluOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<LeakyReluOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<LeakyReluOptions>
+CreateLeakyReluOptions(flatbuffers::FlatBufferBuilder &_fbb, float alpha = 0.0f)
+{
+ LeakyReluOptionsBuilder builder_(_fbb);
+ builder_.add_alpha(alpha);
+ return builder_.Finish();
+}
+
+struct SquaredDifferenceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && verifier.EndTable();
+ }
+};
+
+struct SquaredDifferenceOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ explicit SquaredDifferenceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SquaredDifferenceOptionsBuilder &operator=(const SquaredDifferenceOptionsBuilder &);
+ flatbuffers::Offset<SquaredDifferenceOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SquaredDifferenceOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SquaredDifferenceOptions>
+CreateSquaredDifferenceOptions(flatbuffers::FlatBufferBuilder &_fbb)
+{
+ SquaredDifferenceOptionsBuilder builder_(_fbb);
+ return builder_.Finish();
+}
+
+struct MirrorPadOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_MODE = 4
+ };
+ MirrorPadMode mode() const { return static_cast<MirrorPadMode>(GetField<int8_t>(VT_MODE, 0)); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_MODE) &&
+ verifier.EndTable();
+ }
+};
+
+struct MirrorPadOptionsBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_mode(MirrorPadMode mode)
+ {
+ fbb_.AddElement<int8_t>(MirrorPadOptions::VT_MODE, static_cast<int8_t>(mode), 0);
+ }
+ explicit MirrorPadOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ MirrorPadOptionsBuilder &operator=(const MirrorPadOptionsBuilder &);
+ flatbuffers::Offset<MirrorPadOptions> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<MirrorPadOptions>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<MirrorPadOptions>
+CreateMirrorPadOptions(flatbuffers::FlatBufferBuilder &_fbb,
+ MirrorPadMode mode = MirrorPadMode_REFLECT)
+{
+ MirrorPadOptionsBuilder builder_(_fbb);
+ builder_.add_mode(mode);
+ return builder_.Finish();
+}
+
+struct OperatorCode FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_BUILTIN_CODE = 4,
+ VT_CUSTOM_CODE = 6,
+ VT_VERSION = 8
+ };
+ BuiltinOperator builtin_code() const
+ {
+ return static_cast<BuiltinOperator>(GetField<int8_t>(VT_BUILTIN_CODE, 0));
+ }
+ const flatbuffers::String *custom_code() const
+ {
+ return GetPointer<const flatbuffers::String *>(VT_CUSTOM_CODE);
+ }
+ int32_t version() const { return GetField<int32_t>(VT_VERSION, 1); }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<int8_t>(verifier, VT_BUILTIN_CODE) &&
+ VerifyOffset(verifier, VT_CUSTOM_CODE) && verifier.VerifyString(custom_code()) &&
+ VerifyField<int32_t>(verifier, VT_VERSION) && verifier.EndTable();
+ }
+};
+
+struct OperatorCodeBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_builtin_code(BuiltinOperator builtin_code)
+ {
+ fbb_.AddElement<int8_t>(OperatorCode::VT_BUILTIN_CODE, static_cast<int8_t>(builtin_code), 0);
+ }
+ void add_custom_code(flatbuffers::Offset<flatbuffers::String> custom_code)
+ {
+ fbb_.AddOffset(OperatorCode::VT_CUSTOM_CODE, custom_code);
+ }
+ void add_version(int32_t version)
+ {
+ fbb_.AddElement<int32_t>(OperatorCode::VT_VERSION, version, 1);
+ }
+ explicit OperatorCodeBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ OperatorCodeBuilder &operator=(const OperatorCodeBuilder &);
+ flatbuffers::Offset<OperatorCode> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<OperatorCode>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<OperatorCode>
+CreateOperatorCode(flatbuffers::FlatBufferBuilder &_fbb,
+ BuiltinOperator builtin_code = BuiltinOperator_ADD,
+ flatbuffers::Offset<flatbuffers::String> custom_code = 0, int32_t version = 1)
+{
+ OperatorCodeBuilder builder_(_fbb);
+ builder_.add_version(version);
+ builder_.add_custom_code(custom_code);
+ builder_.add_builtin_code(builtin_code);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<OperatorCode>
+CreateOperatorCodeDirect(flatbuffers::FlatBufferBuilder &_fbb,
+ BuiltinOperator builtin_code = BuiltinOperator_ADD,
+ const char *custom_code = nullptr, int32_t version = 1)
+{
+ return tflite::CreateOperatorCode(_fbb, builtin_code,
+ custom_code ? _fbb.CreateString(custom_code) : 0, version);
+}
+
+struct Operator FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_OPCODE_INDEX = 4,
+ VT_INPUTS = 6,
+ VT_OUTPUTS = 8,
+ VT_BUILTIN_OPTIONS_TYPE = 10,
+ VT_BUILTIN_OPTIONS = 12,
+ VT_CUSTOM_OPTIONS = 14,
+ VT_CUSTOM_OPTIONS_FORMAT = 16,
+ VT_MUTATING_VARIABLE_INPUTS = 18
+ };
+ uint32_t opcode_index() const { return GetField<uint32_t>(VT_OPCODE_INDEX, 0); }
+ const flatbuffers::Vector<int32_t> *inputs() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_INPUTS);
+ }
+ const flatbuffers::Vector<int32_t> *outputs() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_OUTPUTS);
+ }
+ BuiltinOptions builtin_options_type() const
+ {
+ return static_cast<BuiltinOptions>(GetField<uint8_t>(VT_BUILTIN_OPTIONS_TYPE, 0));
+ }
+ const void *builtin_options() const { return GetPointer<const void *>(VT_BUILTIN_OPTIONS); }
+ template <typename T> const T *builtin_options_as() const;
+ const Conv2DOptions *builtin_options_as_Conv2DOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_Conv2DOptions
+ ? static_cast<const Conv2DOptions *>(builtin_options())
+ : nullptr;
+ }
+ const DepthwiseConv2DOptions *builtin_options_as_DepthwiseConv2DOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_DepthwiseConv2DOptions
+ ? static_cast<const DepthwiseConv2DOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ConcatEmbeddingsOptions *builtin_options_as_ConcatEmbeddingsOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ConcatEmbeddingsOptions
+ ? static_cast<const ConcatEmbeddingsOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LSHProjectionOptions *builtin_options_as_LSHProjectionOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LSHProjectionOptions
+ ? static_cast<const LSHProjectionOptions *>(builtin_options())
+ : nullptr;
+ }
+ const Pool2DOptions *builtin_options_as_Pool2DOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_Pool2DOptions
+ ? static_cast<const Pool2DOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SVDFOptions *builtin_options_as_SVDFOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SVDFOptions
+ ? static_cast<const SVDFOptions *>(builtin_options())
+ : nullptr;
+ }
+ const RNNOptions *builtin_options_as_RNNOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_RNNOptions
+ ? static_cast<const RNNOptions *>(builtin_options())
+ : nullptr;
+ }
+ const FullyConnectedOptions *builtin_options_as_FullyConnectedOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_FullyConnectedOptions
+ ? static_cast<const FullyConnectedOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SoftmaxOptions *builtin_options_as_SoftmaxOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SoftmaxOptions
+ ? static_cast<const SoftmaxOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ConcatenationOptions *builtin_options_as_ConcatenationOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ConcatenationOptions
+ ? static_cast<const ConcatenationOptions *>(builtin_options())
+ : nullptr;
+ }
+ const AddOptions *builtin_options_as_AddOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_AddOptions
+ ? static_cast<const AddOptions *>(builtin_options())
+ : nullptr;
+ }
+ const L2NormOptions *builtin_options_as_L2NormOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_L2NormOptions
+ ? static_cast<const L2NormOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LocalResponseNormalizationOptions *
+ builtin_options_as_LocalResponseNormalizationOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LocalResponseNormalizationOptions
+ ? static_cast<const LocalResponseNormalizationOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LSTMOptions *builtin_options_as_LSTMOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LSTMOptions
+ ? static_cast<const LSTMOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ResizeBilinearOptions *builtin_options_as_ResizeBilinearOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ResizeBilinearOptions
+ ? static_cast<const ResizeBilinearOptions *>(builtin_options())
+ : nullptr;
+ }
+ const CallOptions *builtin_options_as_CallOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_CallOptions
+ ? static_cast<const CallOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ReshapeOptions *builtin_options_as_ReshapeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ReshapeOptions
+ ? static_cast<const ReshapeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SkipGramOptions *builtin_options_as_SkipGramOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SkipGramOptions
+ ? static_cast<const SkipGramOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SpaceToDepthOptions *builtin_options_as_SpaceToDepthOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SpaceToDepthOptions
+ ? static_cast<const SpaceToDepthOptions *>(builtin_options())
+ : nullptr;
+ }
+ const EmbeddingLookupSparseOptions *builtin_options_as_EmbeddingLookupSparseOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_EmbeddingLookupSparseOptions
+ ? static_cast<const EmbeddingLookupSparseOptions *>(builtin_options())
+ : nullptr;
+ }
+ const MulOptions *builtin_options_as_MulOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_MulOptions
+ ? static_cast<const MulOptions *>(builtin_options())
+ : nullptr;
+ }
+ const PadOptions *builtin_options_as_PadOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_PadOptions
+ ? static_cast<const PadOptions *>(builtin_options())
+ : nullptr;
+ }
+ const GatherOptions *builtin_options_as_GatherOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_GatherOptions
+ ? static_cast<const GatherOptions *>(builtin_options())
+ : nullptr;
+ }
+ const BatchToSpaceNDOptions *builtin_options_as_BatchToSpaceNDOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_BatchToSpaceNDOptions
+ ? static_cast<const BatchToSpaceNDOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SpaceToBatchNDOptions *builtin_options_as_SpaceToBatchNDOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SpaceToBatchNDOptions
+ ? static_cast<const SpaceToBatchNDOptions *>(builtin_options())
+ : nullptr;
+ }
+ const TransposeOptions *builtin_options_as_TransposeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_TransposeOptions
+ ? static_cast<const TransposeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ReducerOptions *builtin_options_as_ReducerOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ReducerOptions
+ ? static_cast<const ReducerOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SubOptions *builtin_options_as_SubOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SubOptions
+ ? static_cast<const SubOptions *>(builtin_options())
+ : nullptr;
+ }
+ const DivOptions *builtin_options_as_DivOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_DivOptions
+ ? static_cast<const DivOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SqueezeOptions *builtin_options_as_SqueezeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SqueezeOptions
+ ? static_cast<const SqueezeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SequenceRNNOptions *builtin_options_as_SequenceRNNOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SequenceRNNOptions
+ ? static_cast<const SequenceRNNOptions *>(builtin_options())
+ : nullptr;
+ }
+ const StridedSliceOptions *builtin_options_as_StridedSliceOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_StridedSliceOptions
+ ? static_cast<const StridedSliceOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ExpOptions *builtin_options_as_ExpOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ExpOptions
+ ? static_cast<const ExpOptions *>(builtin_options())
+ : nullptr;
+ }
+ const TopKV2Options *builtin_options_as_TopKV2Options() const
+ {
+ return builtin_options_type() == BuiltinOptions_TopKV2Options
+ ? static_cast<const TopKV2Options *>(builtin_options())
+ : nullptr;
+ }
+ const SplitOptions *builtin_options_as_SplitOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SplitOptions
+ ? static_cast<const SplitOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LogSoftmaxOptions *builtin_options_as_LogSoftmaxOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LogSoftmaxOptions
+ ? static_cast<const LogSoftmaxOptions *>(builtin_options())
+ : nullptr;
+ }
+ const CastOptions *builtin_options_as_CastOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_CastOptions
+ ? static_cast<const CastOptions *>(builtin_options())
+ : nullptr;
+ }
+ const DequantizeOptions *builtin_options_as_DequantizeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_DequantizeOptions
+ ? static_cast<const DequantizeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const MaximumMinimumOptions *builtin_options_as_MaximumMinimumOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_MaximumMinimumOptions
+ ? static_cast<const MaximumMinimumOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ArgMaxOptions *builtin_options_as_ArgMaxOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ArgMaxOptions
+ ? static_cast<const ArgMaxOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LessOptions *builtin_options_as_LessOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LessOptions
+ ? static_cast<const LessOptions *>(builtin_options())
+ : nullptr;
+ }
+ const NegOptions *builtin_options_as_NegOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_NegOptions
+ ? static_cast<const NegOptions *>(builtin_options())
+ : nullptr;
+ }
+ const PadV2Options *builtin_options_as_PadV2Options() const
+ {
+ return builtin_options_type() == BuiltinOptions_PadV2Options
+ ? static_cast<const PadV2Options *>(builtin_options())
+ : nullptr;
+ }
+ const GreaterOptions *builtin_options_as_GreaterOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_GreaterOptions
+ ? static_cast<const GreaterOptions *>(builtin_options())
+ : nullptr;
+ }
+ const GreaterEqualOptions *builtin_options_as_GreaterEqualOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_GreaterEqualOptions
+ ? static_cast<const GreaterEqualOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LessEqualOptions *builtin_options_as_LessEqualOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LessEqualOptions
+ ? static_cast<const LessEqualOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SelectOptions *builtin_options_as_SelectOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SelectOptions
+ ? static_cast<const SelectOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SliceOptions *builtin_options_as_SliceOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SliceOptions
+ ? static_cast<const SliceOptions *>(builtin_options())
+ : nullptr;
+ }
+ const TransposeConvOptions *builtin_options_as_TransposeConvOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_TransposeConvOptions
+ ? static_cast<const TransposeConvOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SparseToDenseOptions *builtin_options_as_SparseToDenseOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SparseToDenseOptions
+ ? static_cast<const SparseToDenseOptions *>(builtin_options())
+ : nullptr;
+ }
+ const TileOptions *builtin_options_as_TileOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_TileOptions
+ ? static_cast<const TileOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ExpandDimsOptions *builtin_options_as_ExpandDimsOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ExpandDimsOptions
+ ? static_cast<const ExpandDimsOptions *>(builtin_options())
+ : nullptr;
+ }
+ const EqualOptions *builtin_options_as_EqualOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_EqualOptions
+ ? static_cast<const EqualOptions *>(builtin_options())
+ : nullptr;
+ }
+ const NotEqualOptions *builtin_options_as_NotEqualOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_NotEqualOptions
+ ? static_cast<const NotEqualOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ShapeOptions *builtin_options_as_ShapeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ShapeOptions
+ ? static_cast<const ShapeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const PowOptions *builtin_options_as_PowOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_PowOptions
+ ? static_cast<const PowOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ArgMinOptions *builtin_options_as_ArgMinOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ArgMinOptions
+ ? static_cast<const ArgMinOptions *>(builtin_options())
+ : nullptr;
+ }
+ const FakeQuantOptions *builtin_options_as_FakeQuantOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_FakeQuantOptions
+ ? static_cast<const FakeQuantOptions *>(builtin_options())
+ : nullptr;
+ }
+ const PackOptions *builtin_options_as_PackOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_PackOptions
+ ? static_cast<const PackOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LogicalOrOptions *builtin_options_as_LogicalOrOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LogicalOrOptions
+ ? static_cast<const LogicalOrOptions *>(builtin_options())
+ : nullptr;
+ }
+ const OneHotOptions *builtin_options_as_OneHotOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_OneHotOptions
+ ? static_cast<const OneHotOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LogicalAndOptions *builtin_options_as_LogicalAndOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LogicalAndOptions
+ ? static_cast<const LogicalAndOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LogicalNotOptions *builtin_options_as_LogicalNotOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LogicalNotOptions
+ ? static_cast<const LogicalNotOptions *>(builtin_options())
+ : nullptr;
+ }
+ const UnpackOptions *builtin_options_as_UnpackOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_UnpackOptions
+ ? static_cast<const UnpackOptions *>(builtin_options())
+ : nullptr;
+ }
+ const FloorDivOptions *builtin_options_as_FloorDivOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_FloorDivOptions
+ ? static_cast<const FloorDivOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SquareOptions *builtin_options_as_SquareOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SquareOptions
+ ? static_cast<const SquareOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ZerosLikeOptions *builtin_options_as_ZerosLikeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ZerosLikeOptions
+ ? static_cast<const ZerosLikeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const FillOptions *builtin_options_as_FillOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_FillOptions
+ ? static_cast<const FillOptions *>(builtin_options())
+ : nullptr;
+ }
+ const BidirectionalSequenceLSTMOptions *
+ builtin_options_as_BidirectionalSequenceLSTMOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_BidirectionalSequenceLSTMOptions
+ ? static_cast<const BidirectionalSequenceLSTMOptions *>(builtin_options())
+ : nullptr;
+ }
+ const BidirectionalSequenceRNNOptions *builtin_options_as_BidirectionalSequenceRNNOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_BidirectionalSequenceRNNOptions
+ ? static_cast<const BidirectionalSequenceRNNOptions *>(builtin_options())
+ : nullptr;
+ }
+ const UnidirectionalSequenceLSTMOptions *
+ builtin_options_as_UnidirectionalSequenceLSTMOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_UnidirectionalSequenceLSTMOptions
+ ? static_cast<const UnidirectionalSequenceLSTMOptions *>(builtin_options())
+ : nullptr;
+ }
+ const FloorModOptions *builtin_options_as_FloorModOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_FloorModOptions
+ ? static_cast<const FloorModOptions *>(builtin_options())
+ : nullptr;
+ }
+ const RangeOptions *builtin_options_as_RangeOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_RangeOptions
+ ? static_cast<const RangeOptions *>(builtin_options())
+ : nullptr;
+ }
+ const ResizeNearestNeighborOptions *builtin_options_as_ResizeNearestNeighborOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_ResizeNearestNeighborOptions
+ ? static_cast<const ResizeNearestNeighborOptions *>(builtin_options())
+ : nullptr;
+ }
+ const LeakyReluOptions *builtin_options_as_LeakyReluOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_LeakyReluOptions
+ ? static_cast<const LeakyReluOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SquaredDifferenceOptions *builtin_options_as_SquaredDifferenceOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SquaredDifferenceOptions
+ ? static_cast<const SquaredDifferenceOptions *>(builtin_options())
+ : nullptr;
+ }
+ const MirrorPadOptions *builtin_options_as_MirrorPadOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_MirrorPadOptions
+ ? static_cast<const MirrorPadOptions *>(builtin_options())
+ : nullptr;
+ }
+ const AbsOptions *builtin_options_as_AbsOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_AbsOptions
+ ? static_cast<const AbsOptions *>(builtin_options())
+ : nullptr;
+ }
+ const SplitVOptions *builtin_options_as_SplitVOptions() const
+ {
+ return builtin_options_type() == BuiltinOptions_SplitVOptions
+ ? static_cast<const SplitVOptions *>(builtin_options())
+ : nullptr;
+ }
+ const flatbuffers::Vector<uint8_t> *custom_options() const
+ {
+ return GetPointer<const flatbuffers::Vector<uint8_t> *>(VT_CUSTOM_OPTIONS);
+ }
+ CustomOptionsFormat custom_options_format() const
+ {
+ return static_cast<CustomOptionsFormat>(GetField<int8_t>(VT_CUSTOM_OPTIONS_FORMAT, 0));
+ }
+ const flatbuffers::Vector<uint8_t> *mutating_variable_inputs() const
+ {
+ return GetPointer<const flatbuffers::Vector<uint8_t> *>(VT_MUTATING_VARIABLE_INPUTS);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint32_t>(verifier, VT_OPCODE_INDEX) &&
+ VerifyOffset(verifier, VT_INPUTS) && verifier.VerifyVector(inputs()) &&
+ VerifyOffset(verifier, VT_OUTPUTS) && verifier.VerifyVector(outputs()) &&
+ VerifyField<uint8_t>(verifier, VT_BUILTIN_OPTIONS_TYPE) &&
+ VerifyOffset(verifier, VT_BUILTIN_OPTIONS) &&
+ VerifyBuiltinOptions(verifier, builtin_options(), builtin_options_type()) &&
+ VerifyOffset(verifier, VT_CUSTOM_OPTIONS) && verifier.VerifyVector(custom_options()) &&
+ VerifyField<int8_t>(verifier, VT_CUSTOM_OPTIONS_FORMAT) &&
+ VerifyOffset(verifier, VT_MUTATING_VARIABLE_INPUTS) &&
+ verifier.VerifyVector(mutating_variable_inputs()) && verifier.EndTable();
+ }
+};
+
+template <> inline const Conv2DOptions *Operator::builtin_options_as<Conv2DOptions>() const
+{
+ return builtin_options_as_Conv2DOptions();
+}
+
+template <>
+inline const DepthwiseConv2DOptions *Operator::builtin_options_as<DepthwiseConv2DOptions>() const
+{
+ return builtin_options_as_DepthwiseConv2DOptions();
+}
+
+template <>
+inline const ConcatEmbeddingsOptions *Operator::builtin_options_as<ConcatEmbeddingsOptions>() const
+{
+ return builtin_options_as_ConcatEmbeddingsOptions();
+}
+
+template <>
+inline const LSHProjectionOptions *Operator::builtin_options_as<LSHProjectionOptions>() const
+{
+ return builtin_options_as_LSHProjectionOptions();
+}
+
+template <> inline const Pool2DOptions *Operator::builtin_options_as<Pool2DOptions>() const
+{
+ return builtin_options_as_Pool2DOptions();
+}
+
+template <> inline const SVDFOptions *Operator::builtin_options_as<SVDFOptions>() const
+{
+ return builtin_options_as_SVDFOptions();
+}
+
+template <> inline const RNNOptions *Operator::builtin_options_as<RNNOptions>() const
+{
+ return builtin_options_as_RNNOptions();
+}
+
+template <>
+inline const FullyConnectedOptions *Operator::builtin_options_as<FullyConnectedOptions>() const
+{
+ return builtin_options_as_FullyConnectedOptions();
+}
+
+template <> inline const SoftmaxOptions *Operator::builtin_options_as<SoftmaxOptions>() const
+{
+ return builtin_options_as_SoftmaxOptions();
+}
+
+template <>
+inline const ConcatenationOptions *Operator::builtin_options_as<ConcatenationOptions>() const
+{
+ return builtin_options_as_ConcatenationOptions();
+}
+
+template <> inline const AddOptions *Operator::builtin_options_as<AddOptions>() const
+{
+ return builtin_options_as_AddOptions();
+}
+
+template <> inline const L2NormOptions *Operator::builtin_options_as<L2NormOptions>() const
+{
+ return builtin_options_as_L2NormOptions();
+}
+
+template <>
+inline const LocalResponseNormalizationOptions *
+Operator::builtin_options_as<LocalResponseNormalizationOptions>() const
+{
+ return builtin_options_as_LocalResponseNormalizationOptions();
+}
+
+template <> inline const LSTMOptions *Operator::builtin_options_as<LSTMOptions>() const
+{
+ return builtin_options_as_LSTMOptions();
+}
+
+template <>
+inline const ResizeBilinearOptions *Operator::builtin_options_as<ResizeBilinearOptions>() const
+{
+ return builtin_options_as_ResizeBilinearOptions();
+}
+
+template <> inline const CallOptions *Operator::builtin_options_as<CallOptions>() const
+{
+ return builtin_options_as_CallOptions();
+}
+
+template <> inline const ReshapeOptions *Operator::builtin_options_as<ReshapeOptions>() const
+{
+ return builtin_options_as_ReshapeOptions();
+}
+
+template <> inline const SkipGramOptions *Operator::builtin_options_as<SkipGramOptions>() const
+{
+ return builtin_options_as_SkipGramOptions();
+}
+
+template <>
+inline const SpaceToDepthOptions *Operator::builtin_options_as<SpaceToDepthOptions>() const
+{
+ return builtin_options_as_SpaceToDepthOptions();
+}
+
+template <>
+inline const EmbeddingLookupSparseOptions *
+Operator::builtin_options_as<EmbeddingLookupSparseOptions>() const
+{
+ return builtin_options_as_EmbeddingLookupSparseOptions();
+}
+
+template <> inline const MulOptions *Operator::builtin_options_as<MulOptions>() const
+{
+ return builtin_options_as_MulOptions();
+}
+
+template <> inline const PadOptions *Operator::builtin_options_as<PadOptions>() const
+{
+ return builtin_options_as_PadOptions();
+}
+
+template <> inline const GatherOptions *Operator::builtin_options_as<GatherOptions>() const
+{
+ return builtin_options_as_GatherOptions();
+}
+
+template <>
+inline const BatchToSpaceNDOptions *Operator::builtin_options_as<BatchToSpaceNDOptions>() const
+{
+ return builtin_options_as_BatchToSpaceNDOptions();
+}
+
+template <>
+inline const SpaceToBatchNDOptions *Operator::builtin_options_as<SpaceToBatchNDOptions>() const
+{
+ return builtin_options_as_SpaceToBatchNDOptions();
+}
+
+template <> inline const TransposeOptions *Operator::builtin_options_as<TransposeOptions>() const
+{
+ return builtin_options_as_TransposeOptions();
+}
+
+template <> inline const ReducerOptions *Operator::builtin_options_as<ReducerOptions>() const
+{
+ return builtin_options_as_ReducerOptions();
+}
+
+template <> inline const SubOptions *Operator::builtin_options_as<SubOptions>() const
+{
+ return builtin_options_as_SubOptions();
+}
+
+template <> inline const DivOptions *Operator::builtin_options_as<DivOptions>() const
+{
+ return builtin_options_as_DivOptions();
+}
+
+template <> inline const SqueezeOptions *Operator::builtin_options_as<SqueezeOptions>() const
+{
+ return builtin_options_as_SqueezeOptions();
+}
+
+template <>
+inline const SequenceRNNOptions *Operator::builtin_options_as<SequenceRNNOptions>() const
+{
+ return builtin_options_as_SequenceRNNOptions();
+}
+
+template <>
+inline const StridedSliceOptions *Operator::builtin_options_as<StridedSliceOptions>() const
+{
+ return builtin_options_as_StridedSliceOptions();
+}
+
+template <> inline const ExpOptions *Operator::builtin_options_as<ExpOptions>() const
+{
+ return builtin_options_as_ExpOptions();
+}
+
+template <> inline const TopKV2Options *Operator::builtin_options_as<TopKV2Options>() const
+{
+ return builtin_options_as_TopKV2Options();
+}
+
+template <> inline const SplitOptions *Operator::builtin_options_as<SplitOptions>() const
+{
+ return builtin_options_as_SplitOptions();
+}
+
+template <> inline const LogSoftmaxOptions *Operator::builtin_options_as<LogSoftmaxOptions>() const
+{
+ return builtin_options_as_LogSoftmaxOptions();
+}
+
+template <> inline const CastOptions *Operator::builtin_options_as<CastOptions>() const
+{
+ return builtin_options_as_CastOptions();
+}
+
+template <> inline const DequantizeOptions *Operator::builtin_options_as<DequantizeOptions>() const
+{
+ return builtin_options_as_DequantizeOptions();
+}
+
+template <>
+inline const MaximumMinimumOptions *Operator::builtin_options_as<MaximumMinimumOptions>() const
+{
+ return builtin_options_as_MaximumMinimumOptions();
+}
+
+template <> inline const ArgMaxOptions *Operator::builtin_options_as<ArgMaxOptions>() const
+{
+ return builtin_options_as_ArgMaxOptions();
+}
+
+template <> inline const LessOptions *Operator::builtin_options_as<LessOptions>() const
+{
+ return builtin_options_as_LessOptions();
+}
+
+template <> inline const NegOptions *Operator::builtin_options_as<NegOptions>() const
+{
+ return builtin_options_as_NegOptions();
+}
+
+template <> inline const PadV2Options *Operator::builtin_options_as<PadV2Options>() const
+{
+ return builtin_options_as_PadV2Options();
+}
+
+template <> inline const GreaterOptions *Operator::builtin_options_as<GreaterOptions>() const
+{
+ return builtin_options_as_GreaterOptions();
+}
+
+template <>
+inline const GreaterEqualOptions *Operator::builtin_options_as<GreaterEqualOptions>() const
+{
+ return builtin_options_as_GreaterEqualOptions();
+}
+
+template <> inline const LessEqualOptions *Operator::builtin_options_as<LessEqualOptions>() const
+{
+ return builtin_options_as_LessEqualOptions();
+}
+
+template <> inline const SelectOptions *Operator::builtin_options_as<SelectOptions>() const
+{
+ return builtin_options_as_SelectOptions();
+}
+
+template <> inline const SliceOptions *Operator::builtin_options_as<SliceOptions>() const
+{
+ return builtin_options_as_SliceOptions();
+}
+
+template <>
+inline const TransposeConvOptions *Operator::builtin_options_as<TransposeConvOptions>() const
+{
+ return builtin_options_as_TransposeConvOptions();
+}
+
+template <>
+inline const SparseToDenseOptions *Operator::builtin_options_as<SparseToDenseOptions>() const
+{
+ return builtin_options_as_SparseToDenseOptions();
+}
+
+template <> inline const TileOptions *Operator::builtin_options_as<TileOptions>() const
+{
+ return builtin_options_as_TileOptions();
+}
+
+template <> inline const ExpandDimsOptions *Operator::builtin_options_as<ExpandDimsOptions>() const
+{
+ return builtin_options_as_ExpandDimsOptions();
+}
+
+template <> inline const EqualOptions *Operator::builtin_options_as<EqualOptions>() const
+{
+ return builtin_options_as_EqualOptions();
+}
+
+template <> inline const NotEqualOptions *Operator::builtin_options_as<NotEqualOptions>() const
+{
+ return builtin_options_as_NotEqualOptions();
+}
+
+template <> inline const ShapeOptions *Operator::builtin_options_as<ShapeOptions>() const
+{
+ return builtin_options_as_ShapeOptions();
+}
+
+template <> inline const PowOptions *Operator::builtin_options_as<PowOptions>() const
+{
+ return builtin_options_as_PowOptions();
+}
+
+template <> inline const ArgMinOptions *Operator::builtin_options_as<ArgMinOptions>() const
+{
+ return builtin_options_as_ArgMinOptions();
+}
+
+template <> inline const FakeQuantOptions *Operator::builtin_options_as<FakeQuantOptions>() const
+{
+ return builtin_options_as_FakeQuantOptions();
+}
+
+template <> inline const PackOptions *Operator::builtin_options_as<PackOptions>() const
+{
+ return builtin_options_as_PackOptions();
+}
+
+template <> inline const LogicalOrOptions *Operator::builtin_options_as<LogicalOrOptions>() const
+{
+ return builtin_options_as_LogicalOrOptions();
+}
+
+template <> inline const OneHotOptions *Operator::builtin_options_as<OneHotOptions>() const
+{
+ return builtin_options_as_OneHotOptions();
+}
+
+template <> inline const LogicalAndOptions *Operator::builtin_options_as<LogicalAndOptions>() const
+{
+ return builtin_options_as_LogicalAndOptions();
+}
+
+template <> inline const LogicalNotOptions *Operator::builtin_options_as<LogicalNotOptions>() const
+{
+ return builtin_options_as_LogicalNotOptions();
+}
+
+template <> inline const UnpackOptions *Operator::builtin_options_as<UnpackOptions>() const
+{
+ return builtin_options_as_UnpackOptions();
+}
+
+template <> inline const FloorDivOptions *Operator::builtin_options_as<FloorDivOptions>() const
+{
+ return builtin_options_as_FloorDivOptions();
+}
+
+template <> inline const SquareOptions *Operator::builtin_options_as<SquareOptions>() const
+{
+ return builtin_options_as_SquareOptions();
+}
+
+template <> inline const ZerosLikeOptions *Operator::builtin_options_as<ZerosLikeOptions>() const
+{
+ return builtin_options_as_ZerosLikeOptions();
+}
+
+template <> inline const FillOptions *Operator::builtin_options_as<FillOptions>() const
+{
+ return builtin_options_as_FillOptions();
+}
+
+template <>
+inline const BidirectionalSequenceLSTMOptions *
+Operator::builtin_options_as<BidirectionalSequenceLSTMOptions>() const
+{
+ return builtin_options_as_BidirectionalSequenceLSTMOptions();
+}
+
+template <>
+inline const BidirectionalSequenceRNNOptions *
+Operator::builtin_options_as<BidirectionalSequenceRNNOptions>() const
+{
+ return builtin_options_as_BidirectionalSequenceRNNOptions();
+}
+
+template <>
+inline const UnidirectionalSequenceLSTMOptions *
+Operator::builtin_options_as<UnidirectionalSequenceLSTMOptions>() const
+{
+ return builtin_options_as_UnidirectionalSequenceLSTMOptions();
+}
+
+template <> inline const FloorModOptions *Operator::builtin_options_as<FloorModOptions>() const
+{
+ return builtin_options_as_FloorModOptions();
+}
+
+template <> inline const RangeOptions *Operator::builtin_options_as<RangeOptions>() const
+{
+ return builtin_options_as_RangeOptions();
+}
+
+template <>
+inline const ResizeNearestNeighborOptions *
+Operator::builtin_options_as<ResizeNearestNeighborOptions>() const
+{
+ return builtin_options_as_ResizeNearestNeighborOptions();
+}
+
+template <> inline const LeakyReluOptions *Operator::builtin_options_as<LeakyReluOptions>() const
+{
+ return builtin_options_as_LeakyReluOptions();
+}
+
+template <>
+inline const SquaredDifferenceOptions *
+Operator::builtin_options_as<SquaredDifferenceOptions>() const
+{
+ return builtin_options_as_SquaredDifferenceOptions();
+}
+
+template <> inline const MirrorPadOptions *Operator::builtin_options_as<MirrorPadOptions>() const
+{
+ return builtin_options_as_MirrorPadOptions();
+}
+
+template <> inline const AbsOptions *Operator::builtin_options_as<AbsOptions>() const
+{
+ return builtin_options_as_AbsOptions();
+}
+
+template <> inline const SplitVOptions *Operator::builtin_options_as<SplitVOptions>() const
+{
+ return builtin_options_as_SplitVOptions();
+}
+
+struct OperatorBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_opcode_index(uint32_t opcode_index)
+ {
+ fbb_.AddElement<uint32_t>(Operator::VT_OPCODE_INDEX, opcode_index, 0);
+ }
+ void add_inputs(flatbuffers::Offset<flatbuffers::Vector<int32_t>> inputs)
+ {
+ fbb_.AddOffset(Operator::VT_INPUTS, inputs);
+ }
+ void add_outputs(flatbuffers::Offset<flatbuffers::Vector<int32_t>> outputs)
+ {
+ fbb_.AddOffset(Operator::VT_OUTPUTS, outputs);
+ }
+ void add_builtin_options_type(BuiltinOptions builtin_options_type)
+ {
+ fbb_.AddElement<uint8_t>(Operator::VT_BUILTIN_OPTIONS_TYPE,
+ static_cast<uint8_t>(builtin_options_type), 0);
+ }
+ void add_builtin_options(flatbuffers::Offset<void> builtin_options)
+ {
+ fbb_.AddOffset(Operator::VT_BUILTIN_OPTIONS, builtin_options);
+ }
+ void add_custom_options(flatbuffers::Offset<flatbuffers::Vector<uint8_t>> custom_options)
+ {
+ fbb_.AddOffset(Operator::VT_CUSTOM_OPTIONS, custom_options);
+ }
+ void add_custom_options_format(CustomOptionsFormat custom_options_format)
+ {
+ fbb_.AddElement<int8_t>(Operator::VT_CUSTOM_OPTIONS_FORMAT,
+ static_cast<int8_t>(custom_options_format), 0);
+ }
+ void add_mutating_variable_inputs(
+ flatbuffers::Offset<flatbuffers::Vector<uint8_t>> mutating_variable_inputs)
+ {
+ fbb_.AddOffset(Operator::VT_MUTATING_VARIABLE_INPUTS, mutating_variable_inputs);
+ }
+ explicit OperatorBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ OperatorBuilder &operator=(const OperatorBuilder &);
+ flatbuffers::Offset<Operator> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<Operator>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<Operator>
+CreateOperator(flatbuffers::FlatBufferBuilder &_fbb, uint32_t opcode_index = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> inputs = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> outputs = 0,
+ BuiltinOptions builtin_options_type = BuiltinOptions_NONE,
+ flatbuffers::Offset<void> builtin_options = 0,
+ flatbuffers::Offset<flatbuffers::Vector<uint8_t>> custom_options = 0,
+ CustomOptionsFormat custom_options_format = CustomOptionsFormat_FLEXBUFFERS,
+ flatbuffers::Offset<flatbuffers::Vector<uint8_t>> mutating_variable_inputs = 0)
+{
+ OperatorBuilder builder_(_fbb);
+ builder_.add_mutating_variable_inputs(mutating_variable_inputs);
+ builder_.add_custom_options(custom_options);
+ builder_.add_builtin_options(builtin_options);
+ builder_.add_outputs(outputs);
+ builder_.add_inputs(inputs);
+ builder_.add_opcode_index(opcode_index);
+ builder_.add_custom_options_format(custom_options_format);
+ builder_.add_builtin_options_type(builtin_options_type);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<Operator>
+CreateOperatorDirect(flatbuffers::FlatBufferBuilder &_fbb, uint32_t opcode_index = 0,
+ const std::vector<int32_t> *inputs = nullptr,
+ const std::vector<int32_t> *outputs = nullptr,
+ BuiltinOptions builtin_options_type = BuiltinOptions_NONE,
+ flatbuffers::Offset<void> builtin_options = 0,
+ const std::vector<uint8_t> *custom_options = nullptr,
+ CustomOptionsFormat custom_options_format = CustomOptionsFormat_FLEXBUFFERS,
+ const std::vector<uint8_t> *mutating_variable_inputs = nullptr)
+{
+ return tflite::CreateOperator(
+ _fbb, opcode_index, inputs ? _fbb.CreateVector<int32_t>(*inputs) : 0,
+ outputs ? _fbb.CreateVector<int32_t>(*outputs) : 0, builtin_options_type, builtin_options,
+ custom_options ? _fbb.CreateVector<uint8_t>(*custom_options) : 0, custom_options_format,
+ mutating_variable_inputs ? _fbb.CreateVector<uint8_t>(*mutating_variable_inputs) : 0);
+}
+
+struct SubGraph FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_TENSORS = 4,
+ VT_INPUTS = 6,
+ VT_OUTPUTS = 8,
+ VT_OPERATORS = 10,
+ VT_NAME = 12
+ };
+ const flatbuffers::Vector<flatbuffers::Offset<Tensor>> *tensors() const
+ {
+ return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Tensor>> *>(VT_TENSORS);
+ }
+ const flatbuffers::Vector<int32_t> *inputs() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_INPUTS);
+ }
+ const flatbuffers::Vector<int32_t> *outputs() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_OUTPUTS);
+ }
+ const flatbuffers::Vector<flatbuffers::Offset<Operator>> *operators() const
+ {
+ return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Operator>> *>(VT_OPERATORS);
+ }
+ const flatbuffers::String *name() const
+ {
+ return GetPointer<const flatbuffers::String *>(VT_NAME);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_TENSORS) &&
+ verifier.VerifyVector(tensors()) && verifier.VerifyVectorOfTables(tensors()) &&
+ VerifyOffset(verifier, VT_INPUTS) && verifier.VerifyVector(inputs()) &&
+ VerifyOffset(verifier, VT_OUTPUTS) && verifier.VerifyVector(outputs()) &&
+ VerifyOffset(verifier, VT_OPERATORS) && verifier.VerifyVector(operators()) &&
+ verifier.VerifyVectorOfTables(operators()) && VerifyOffset(verifier, VT_NAME) &&
+ verifier.VerifyString(name()) && verifier.EndTable();
+ }
+};
+
+struct SubGraphBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_tensors(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Tensor>>> tensors)
+ {
+ fbb_.AddOffset(SubGraph::VT_TENSORS, tensors);
+ }
+ void add_inputs(flatbuffers::Offset<flatbuffers::Vector<int32_t>> inputs)
+ {
+ fbb_.AddOffset(SubGraph::VT_INPUTS, inputs);
+ }
+ void add_outputs(flatbuffers::Offset<flatbuffers::Vector<int32_t>> outputs)
+ {
+ fbb_.AddOffset(SubGraph::VT_OUTPUTS, outputs);
+ }
+ void
+ add_operators(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Operator>>> operators)
+ {
+ fbb_.AddOffset(SubGraph::VT_OPERATORS, operators);
+ }
+ void add_name(flatbuffers::Offset<flatbuffers::String> name)
+ {
+ fbb_.AddOffset(SubGraph::VT_NAME, name);
+ }
+ explicit SubGraphBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ SubGraphBuilder &operator=(const SubGraphBuilder &);
+ flatbuffers::Offset<SubGraph> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<SubGraph>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<SubGraph> CreateSubGraph(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Tensor>>> tensors = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> inputs = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> outputs = 0,
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Operator>>> operators = 0,
+ flatbuffers::Offset<flatbuffers::String> name = 0)
+{
+ SubGraphBuilder builder_(_fbb);
+ builder_.add_name(name);
+ builder_.add_operators(operators);
+ builder_.add_outputs(outputs);
+ builder_.add_inputs(inputs);
+ builder_.add_tensors(tensors);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<SubGraph>
+CreateSubGraphDirect(flatbuffers::FlatBufferBuilder &_fbb,
+ const std::vector<flatbuffers::Offset<Tensor>> *tensors = nullptr,
+ const std::vector<int32_t> *inputs = nullptr,
+ const std::vector<int32_t> *outputs = nullptr,
+ const std::vector<flatbuffers::Offset<Operator>> *operators = nullptr,
+ const char *name = nullptr)
+{
+ return tflite::CreateSubGraph(
+ _fbb, tensors ? _fbb.CreateVector<flatbuffers::Offset<Tensor>>(*tensors) : 0,
+ inputs ? _fbb.CreateVector<int32_t>(*inputs) : 0,
+ outputs ? _fbb.CreateVector<int32_t>(*outputs) : 0,
+ operators ? _fbb.CreateVector<flatbuffers::Offset<Operator>>(*operators) : 0,
+ name ? _fbb.CreateString(name) : 0);
+}
+
+struct Buffer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_DATA = 4
+ };
+ const flatbuffers::Vector<uint8_t> *data() const
+ {
+ return GetPointer<const flatbuffers::Vector<uint8_t> *>(VT_DATA);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_DATA) &&
+ verifier.VerifyVector(data()) && verifier.EndTable();
+ }
+};
+
+struct BufferBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_data(flatbuffers::Offset<flatbuffers::Vector<uint8_t>> data)
+ {
+ fbb_.AddOffset(Buffer::VT_DATA, data);
+ }
+ explicit BufferBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ BufferBuilder &operator=(const BufferBuilder &);
+ flatbuffers::Offset<Buffer> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<Buffer>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<Buffer>
+CreateBuffer(flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<uint8_t>> data = 0)
+{
+ BufferBuilder builder_(_fbb);
+ builder_.add_data(data);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<Buffer> CreateBufferDirect(flatbuffers::FlatBufferBuilder &_fbb,
+ const std::vector<uint8_t> *data = nullptr)
+{
+ return tflite::CreateBuffer(_fbb, data ? _fbb.CreateVector<uint8_t>(*data) : 0);
+}
+
+struct Model FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table
+{
+ enum
+ {
+ VT_VERSION = 4,
+ VT_OPERATOR_CODES = 6,
+ VT_SUBGRAPHS = 8,
+ VT_DESCRIPTION = 10,
+ VT_BUFFERS = 12,
+ VT_METADATA_BUFFER = 14
+ };
+ uint32_t version() const { return GetField<uint32_t>(VT_VERSION, 0); }
+ const flatbuffers::Vector<flatbuffers::Offset<OperatorCode>> *operator_codes() const
+ {
+ return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<OperatorCode>> *>(
+ VT_OPERATOR_CODES);
+ }
+ const flatbuffers::Vector<flatbuffers::Offset<SubGraph>> *subgraphs() const
+ {
+ return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<SubGraph>> *>(VT_SUBGRAPHS);
+ }
+ const flatbuffers::String *description() const
+ {
+ return GetPointer<const flatbuffers::String *>(VT_DESCRIPTION);
+ }
+ const flatbuffers::Vector<flatbuffers::Offset<Buffer>> *buffers() const
+ {
+ return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Buffer>> *>(VT_BUFFERS);
+ }
+ const flatbuffers::Vector<int32_t> *metadata_buffer() const
+ {
+ return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_METADATA_BUFFER);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const
+ {
+ return VerifyTableStart(verifier) && VerifyField<uint32_t>(verifier, VT_VERSION) &&
+ VerifyOffset(verifier, VT_OPERATOR_CODES) && verifier.VerifyVector(operator_codes()) &&
+ verifier.VerifyVectorOfTables(operator_codes()) &&
+ VerifyOffset(verifier, VT_SUBGRAPHS) && verifier.VerifyVector(subgraphs()) &&
+ verifier.VerifyVectorOfTables(subgraphs()) && VerifyOffset(verifier, VT_DESCRIPTION) &&
+ verifier.VerifyString(description()) && VerifyOffset(verifier, VT_BUFFERS) &&
+ verifier.VerifyVector(buffers()) && verifier.VerifyVectorOfTables(buffers()) &&
+ VerifyOffset(verifier, VT_METADATA_BUFFER) && verifier.VerifyVector(metadata_buffer()) &&
+ verifier.EndTable();
+ }
+};
+
+struct ModelBuilder
+{
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_version(uint32_t version) { fbb_.AddElement<uint32_t>(Model::VT_VERSION, version, 0); }
+ void add_operator_codes(
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<OperatorCode>>> operator_codes)
+ {
+ fbb_.AddOffset(Model::VT_OPERATOR_CODES, operator_codes);
+ }
+ void
+ add_subgraphs(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<SubGraph>>> subgraphs)
+ {
+ fbb_.AddOffset(Model::VT_SUBGRAPHS, subgraphs);
+ }
+ void add_description(flatbuffers::Offset<flatbuffers::String> description)
+ {
+ fbb_.AddOffset(Model::VT_DESCRIPTION, description);
+ }
+ void add_buffers(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Buffer>>> buffers)
+ {
+ fbb_.AddOffset(Model::VT_BUFFERS, buffers);
+ }
+ void add_metadata_buffer(flatbuffers::Offset<flatbuffers::Vector<int32_t>> metadata_buffer)
+ {
+ fbb_.AddOffset(Model::VT_METADATA_BUFFER, metadata_buffer);
+ }
+ explicit ModelBuilder(flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb)
+ {
+ start_ = fbb_.StartTable();
+ }
+ ModelBuilder &operator=(const ModelBuilder &);
+ flatbuffers::Offset<Model> Finish()
+ {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<Model>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<Model> CreateModel(
+ flatbuffers::FlatBufferBuilder &_fbb, uint32_t version = 0,
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<OperatorCode>>> operator_codes = 0,
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<SubGraph>>> subgraphs = 0,
+ flatbuffers::Offset<flatbuffers::String> description = 0,
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Buffer>>> buffers = 0,
+ flatbuffers::Offset<flatbuffers::Vector<int32_t>> metadata_buffer = 0)
+{
+ ModelBuilder builder_(_fbb);
+ builder_.add_metadata_buffer(metadata_buffer);
+ builder_.add_buffers(buffers);
+ builder_.add_description(description);
+ builder_.add_subgraphs(subgraphs);
+ builder_.add_operator_codes(operator_codes);
+ builder_.add_version(version);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<Model>
+CreateModelDirect(flatbuffers::FlatBufferBuilder &_fbb, uint32_t version = 0,
+ const std::vector<flatbuffers::Offset<OperatorCode>> *operator_codes = nullptr,
+ const std::vector<flatbuffers::Offset<SubGraph>> *subgraphs = nullptr,
+ const char *description = nullptr,
+ const std::vector<flatbuffers::Offset<Buffer>> *buffers = nullptr,
+ const std::vector<int32_t> *metadata_buffer = nullptr)
+{
+ return tflite::CreateModel(
+ _fbb, version,
+ operator_codes ? _fbb.CreateVector<flatbuffers::Offset<OperatorCode>>(*operator_codes) : 0,
+ subgraphs ? _fbb.CreateVector<flatbuffers::Offset<SubGraph>>(*subgraphs) : 0,
+ description ? _fbb.CreateString(description) : 0,
+ buffers ? _fbb.CreateVector<flatbuffers::Offset<Buffer>>(*buffers) : 0,
+ metadata_buffer ? _fbb.CreateVector<int32_t>(*metadata_buffer) : 0);
+}
+
+inline bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const void *obj,
+ QuantizationDetails type)
+{
+ switch (type)
+ {
+ case QuantizationDetails_NONE:
+ {
+ return true;
+ }
+ case QuantizationDetails_CustomQuantization:
+ {
+ auto ptr = reinterpret_cast<const CustomQuantization *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ default:
+ return false;
+ }
+}
+
+inline bool
+VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier,
+ const flatbuffers::Vector<flatbuffers::Offset<void>> *values,
+ const flatbuffers::Vector<uint8_t> *types)
+{
+ if (!values || !types)
+ return !values && !types;
+ if (values->size() != types->size())
+ return false;
+ for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i)
+ {
+ if (!VerifyQuantizationDetails(verifier, values->Get(i),
+ types->GetEnum<QuantizationDetails>(i)))
+ {
+ return false;
+ }
+ }
+ return true;
+}
+
+inline bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *obj,
+ BuiltinOptions type)
+{
+ switch (type)
+ {
+ case BuiltinOptions_NONE:
+ {
+ return true;
+ }
+ case BuiltinOptions_Conv2DOptions:
+ {
+ auto ptr = reinterpret_cast<const Conv2DOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_DepthwiseConv2DOptions:
+ {
+ auto ptr = reinterpret_cast<const DepthwiseConv2DOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ConcatEmbeddingsOptions:
+ {
+ auto ptr = reinterpret_cast<const ConcatEmbeddingsOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LSHProjectionOptions:
+ {
+ auto ptr = reinterpret_cast<const LSHProjectionOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_Pool2DOptions:
+ {
+ auto ptr = reinterpret_cast<const Pool2DOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SVDFOptions:
+ {
+ auto ptr = reinterpret_cast<const SVDFOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_RNNOptions:
+ {
+ auto ptr = reinterpret_cast<const RNNOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_FullyConnectedOptions:
+ {
+ auto ptr = reinterpret_cast<const FullyConnectedOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SoftmaxOptions:
+ {
+ auto ptr = reinterpret_cast<const SoftmaxOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ConcatenationOptions:
+ {
+ auto ptr = reinterpret_cast<const ConcatenationOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_AddOptions:
+ {
+ auto ptr = reinterpret_cast<const AddOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_L2NormOptions:
+ {
+ auto ptr = reinterpret_cast<const L2NormOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LocalResponseNormalizationOptions:
+ {
+ auto ptr = reinterpret_cast<const LocalResponseNormalizationOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LSTMOptions:
+ {
+ auto ptr = reinterpret_cast<const LSTMOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ResizeBilinearOptions:
+ {
+ auto ptr = reinterpret_cast<const ResizeBilinearOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_CallOptions:
+ {
+ auto ptr = reinterpret_cast<const CallOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ReshapeOptions:
+ {
+ auto ptr = reinterpret_cast<const ReshapeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SkipGramOptions:
+ {
+ auto ptr = reinterpret_cast<const SkipGramOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SpaceToDepthOptions:
+ {
+ auto ptr = reinterpret_cast<const SpaceToDepthOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_EmbeddingLookupSparseOptions:
+ {
+ auto ptr = reinterpret_cast<const EmbeddingLookupSparseOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_MulOptions:
+ {
+ auto ptr = reinterpret_cast<const MulOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_PadOptions:
+ {
+ auto ptr = reinterpret_cast<const PadOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_GatherOptions:
+ {
+ auto ptr = reinterpret_cast<const GatherOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_BatchToSpaceNDOptions:
+ {
+ auto ptr = reinterpret_cast<const BatchToSpaceNDOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SpaceToBatchNDOptions:
+ {
+ auto ptr = reinterpret_cast<const SpaceToBatchNDOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_TransposeOptions:
+ {
+ auto ptr = reinterpret_cast<const TransposeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ReducerOptions:
+ {
+ auto ptr = reinterpret_cast<const ReducerOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SubOptions:
+ {
+ auto ptr = reinterpret_cast<const SubOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_DivOptions:
+ {
+ auto ptr = reinterpret_cast<const DivOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SqueezeOptions:
+ {
+ auto ptr = reinterpret_cast<const SqueezeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SequenceRNNOptions:
+ {
+ auto ptr = reinterpret_cast<const SequenceRNNOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_StridedSliceOptions:
+ {
+ auto ptr = reinterpret_cast<const StridedSliceOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ExpOptions:
+ {
+ auto ptr = reinterpret_cast<const ExpOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_TopKV2Options:
+ {
+ auto ptr = reinterpret_cast<const TopKV2Options *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SplitOptions:
+ {
+ auto ptr = reinterpret_cast<const SplitOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LogSoftmaxOptions:
+ {
+ auto ptr = reinterpret_cast<const LogSoftmaxOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_CastOptions:
+ {
+ auto ptr = reinterpret_cast<const CastOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_DequantizeOptions:
+ {
+ auto ptr = reinterpret_cast<const DequantizeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_MaximumMinimumOptions:
+ {
+ auto ptr = reinterpret_cast<const MaximumMinimumOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ArgMaxOptions:
+ {
+ auto ptr = reinterpret_cast<const ArgMaxOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LessOptions:
+ {
+ auto ptr = reinterpret_cast<const LessOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_NegOptions:
+ {
+ auto ptr = reinterpret_cast<const NegOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_PadV2Options:
+ {
+ auto ptr = reinterpret_cast<const PadV2Options *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_GreaterOptions:
+ {
+ auto ptr = reinterpret_cast<const GreaterOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_GreaterEqualOptions:
+ {
+ auto ptr = reinterpret_cast<const GreaterEqualOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LessEqualOptions:
+ {
+ auto ptr = reinterpret_cast<const LessEqualOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SelectOptions:
+ {
+ auto ptr = reinterpret_cast<const SelectOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SliceOptions:
+ {
+ auto ptr = reinterpret_cast<const SliceOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_TransposeConvOptions:
+ {
+ auto ptr = reinterpret_cast<const TransposeConvOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SparseToDenseOptions:
+ {
+ auto ptr = reinterpret_cast<const SparseToDenseOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_TileOptions:
+ {
+ auto ptr = reinterpret_cast<const TileOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ExpandDimsOptions:
+ {
+ auto ptr = reinterpret_cast<const ExpandDimsOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_EqualOptions:
+ {
+ auto ptr = reinterpret_cast<const EqualOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_NotEqualOptions:
+ {
+ auto ptr = reinterpret_cast<const NotEqualOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ShapeOptions:
+ {
+ auto ptr = reinterpret_cast<const ShapeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_PowOptions:
+ {
+ auto ptr = reinterpret_cast<const PowOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ArgMinOptions:
+ {
+ auto ptr = reinterpret_cast<const ArgMinOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_FakeQuantOptions:
+ {
+ auto ptr = reinterpret_cast<const FakeQuantOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_PackOptions:
+ {
+ auto ptr = reinterpret_cast<const PackOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LogicalOrOptions:
+ {
+ auto ptr = reinterpret_cast<const LogicalOrOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_OneHotOptions:
+ {
+ auto ptr = reinterpret_cast<const OneHotOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LogicalAndOptions:
+ {
+ auto ptr = reinterpret_cast<const LogicalAndOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LogicalNotOptions:
+ {
+ auto ptr = reinterpret_cast<const LogicalNotOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_UnpackOptions:
+ {
+ auto ptr = reinterpret_cast<const UnpackOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_FloorDivOptions:
+ {
+ auto ptr = reinterpret_cast<const FloorDivOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SquareOptions:
+ {
+ auto ptr = reinterpret_cast<const SquareOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ZerosLikeOptions:
+ {
+ auto ptr = reinterpret_cast<const ZerosLikeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_FillOptions:
+ {
+ auto ptr = reinterpret_cast<const FillOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_BidirectionalSequenceLSTMOptions:
+ {
+ auto ptr = reinterpret_cast<const BidirectionalSequenceLSTMOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_BidirectionalSequenceRNNOptions:
+ {
+ auto ptr = reinterpret_cast<const BidirectionalSequenceRNNOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_UnidirectionalSequenceLSTMOptions:
+ {
+ auto ptr = reinterpret_cast<const UnidirectionalSequenceLSTMOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_FloorModOptions:
+ {
+ auto ptr = reinterpret_cast<const FloorModOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_RangeOptions:
+ {
+ auto ptr = reinterpret_cast<const RangeOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_ResizeNearestNeighborOptions:
+ {
+ auto ptr = reinterpret_cast<const ResizeNearestNeighborOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_LeakyReluOptions:
+ {
+ auto ptr = reinterpret_cast<const LeakyReluOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SquaredDifferenceOptions:
+ {
+ auto ptr = reinterpret_cast<const SquaredDifferenceOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_MirrorPadOptions:
+ {
+ auto ptr = reinterpret_cast<const MirrorPadOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_AbsOptions:
+ {
+ auto ptr = reinterpret_cast<const AbsOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ case BuiltinOptions_SplitVOptions:
+ {
+ auto ptr = reinterpret_cast<const SplitVOptions *>(obj);
+ return verifier.VerifyTable(ptr);
+ }
+ default:
+ return false;
+ }
+}
+
+inline bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier,
+ const flatbuffers::Vector<flatbuffers::Offset<void>> *values,
+ const flatbuffers::Vector<uint8_t> *types)
+{
+ if (!values || !types)
+ return !values && !types;
+ if (values->size() != types->size())
+ return false;
+ for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i)
+ {
+ if (!VerifyBuiltinOptions(verifier, values->Get(i), types->GetEnum<BuiltinOptions>(i)))
+ {
+ return false;
+ }
+ }
+ return true;
+}
+
+inline const tflite::Model *GetModel(const void *buf)
+{
+ return flatbuffers::GetRoot<tflite::Model>(buf);
+}
+
+inline const tflite::Model *GetSizePrefixedModel(const void *buf)
+{
+ return flatbuffers::GetSizePrefixedRoot<tflite::Model>(buf);
+}
+
+inline const char *ModelIdentifier() { return "TFL3"; }
+
+inline bool ModelBufferHasIdentifier(const void *buf)
+{
+ return flatbuffers::BufferHasIdentifier(buf, ModelIdentifier());
+}
+
+inline bool VerifyModelBuffer(flatbuffers::Verifier &verifier)
+{
+ return verifier.VerifyBuffer<tflite::Model>(ModelIdentifier());
+}
+
+inline bool VerifySizePrefixedModelBuffer(flatbuffers::Verifier &verifier)
+{
+ return verifier.VerifySizePrefixedBuffer<tflite::Model>(ModelIdentifier());
+}
+
+inline const char *ModelExtension() { return "tflite"; }
+
+inline void FinishModelBuffer(flatbuffers::FlatBufferBuilder &fbb,
+ flatbuffers::Offset<tflite::Model> root)
+{
+ fbb.Finish(root, ModelIdentifier());
+}
+
+inline void FinishSizePrefixedModelBuffer(flatbuffers::FlatBufferBuilder &fbb,
+ flatbuffers::Offset<tflite::Model> root)
+{
+ fbb.FinishSizePrefixed(root, ModelIdentifier());
+}
+
+} // namespace tflite
+
+#endif // FLATBUFFERS_GENERATED_SCHEMA_TFLITE_H_
diff --git a/runtimes/neurun/src/backend/BackendManager.cc b/runtimes/neurun/src/backend/BackendManager.cc
deleted file mode 100644
index 5d19d4015..000000000
--- a/runtimes/neurun/src/backend/BackendManager.cc
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <dlfcn.h>
-#include "BackendManager.h"
-
-#include "backend/interface/IConfig.h"
-#include "backend/interface/ITensorBuilder.h"
-#include "backend/interface/IStageGenerator.h"
-#include "util/logging.h"
-#include "util/config/ConfigManager.h"
-
-namespace neurun
-{
-namespace backend
-{
-
-Backend::Backend(const std::shared_ptr<neurun::backend::IConfig> &backend_config,
- const std::shared_ptr<neurun::backend::IStageGenerator> &stage_gen)
- : _config(backend_config), _stage_gen(stage_gen)
-{
- backend_config->initialize();
-}
-
-const std::shared_ptr<neurun::backend::IConfig> Backend::config() const { return _config; }
-
-const std::shared_ptr<neurun::backend::IStageGenerator> Backend::stage_gen() const
-{
- return _stage_gen;
-}
-
-const std::shared_ptr<neurun::backend::ITensorBuilder> Backend::tensor_builder() const
-{
- return _stage_gen->tensor_builder();
-}
-
-template <typename T, class... Types>
-void BackendManager::loadObjectFromPlugin(std::shared_ptr<T> &object_of_plugin_class,
- const std::string obj_creator_func_name, void *handle,
- Types &&... args)
-{
- T *(*allocate_obj)(Types && ... Args);
- // load object creator function
- allocate_obj = (T * (*)(Types && ... Args))dlsym(handle, obj_creator_func_name.c_str());
- if (allocate_obj == nullptr)
- {
- fprintf(stderr, "BackendManager: unable to open function %s: %s\n",
- obj_creator_func_name.c_str(), dlerror());
- abort();
- }
-
- object_of_plugin_class.reset(allocate_obj(args...));
-}
-
-void BackendManager::loadBackend(const std::string &backend,
- const neurun::model::operand::Set &operands)
-{
- const std::string backend_plugin = "libbackend_" + backend + ".so";
- void *handle = dlopen(backend_plugin.c_str(), RTLD_LAZY | RTLD_LOCAL);
- if (handle == nullptr)
- {
- fprintf(stderr, "BackendManager::loadBackend failed to load plugin of %s backend: %s\n",
- backend.c_str(), dlerror());
- abort();
- }
- VERBOSE(BackendManager::loadBackend) << "loaded " << backend_plugin << " as a plugin of "
- << backend << " backend\n";
-
- // load Config
- std::shared_ptr<neurun::backend::IConfig> config;
- loadObjectFromPlugin(config, std::string("allocate_Config"), handle);
-
- // load TensorBuilder
- std::shared_ptr<neurun::backend::ITensorBuilder> tensor_builder;
- loadObjectFromPlugin(tensor_builder, std::string("allocate_TensorBuilder"), handle);
-
- // load StageGenerator
- std::shared_ptr<neurun::backend::IStageGenerator> stage_gen;
- loadObjectFromPlugin(stage_gen, std::string("allocate_StageGenerator"), handle, operands,
- tensor_builder);
- _gen_map[config->id()] = {config, stage_gen};
-}
-
-BackendManager::BackendManager(const neurun::model::operand::Set &operands)
-{
- const auto backends = config::ConfigManager::instance().get<std::string>("BACKENDS");
- size_t prev_pos = 0;
- auto pos = backends.find(";");
- while (pos != std::string::npos)
- {
- loadBackend(backends.substr(prev_pos, pos - prev_pos), operands);
- prev_pos = pos + 1;
- pos = backends.find(";", prev_pos);
- }
- // if backends doesn't terminate with ";"
- if (prev_pos < backends.size())
- {
- loadBackend(backends.substr(prev_pos), operands);
- }
-}
-
-Backend *BackendManager::get(const std::string &key) { return &_gen_map.at(key); }
-
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/BackendManager.h b/runtimes/neurun/src/backend/BackendManager.h
deleted file mode 100644
index 428542b1e..000000000
--- a/runtimes/neurun/src/backend/BackendManager.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_BACKEND_MANAGER_H__
-#define __NEURUN_BACKEND_BACKEND_MANAGER_H__
-
-#include <memory>
-#include <map>
-
-#include "model/operand/Set.h"
-
-namespace neurun
-{
-namespace backend
-{
-
-struct IConfig;
-struct IStageGenerator;
-struct ITensorBuilder;
-
-class Backend
-{
-public:
- Backend(const std::shared_ptr<neurun::backend::IConfig> &backend_config,
- const std::shared_ptr<neurun::backend::IStageGenerator> &stage_gen);
-
- Backend(void) : _config(nullptr), _stage_gen(nullptr)
- {
- // DO NOTHING
- }
-
-public:
- const std::shared_ptr<neurun::backend::IConfig> config() const;
- const std::shared_ptr<neurun::backend::IStageGenerator> stage_gen() const;
- const std::shared_ptr<neurun::backend::ITensorBuilder> tensor_builder() const;
-
-private:
- std::shared_ptr<neurun::backend::IConfig> _config;
- std::shared_ptr<neurun::backend::IStageGenerator> _stage_gen;
-};
-
-class BackendManager
-{
-public:
- BackendManager(const neurun::model::operand::Set &operands);
-
- Backend *get(const std::string &key);
-
-private:
- std::map<std::string, Backend> _gen_map;
- /**
- * @brief Allocate an object of a class of a plugin by loading a plugin function, that does
- * allocation, and calling it
- *
- * @param object_of_plugin_class target object
- * @param obj_creator_func_name name of the plugin function, that allocates an object
- * @param handle handle of the plugin
- * @param args arguments to pass to constructor of the plugin class
- *
- * @return
- */
- template <typename T, class... Types>
- void loadObjectFromPlugin(std::shared_ptr<T> &object_of_plugin_class,
- const std::string obj_creator_func_name, void *handle,
- Types &&... args);
-
- /**
- * @brief load backend plugin
- *
- * @param backend backend to be loaded
- * @param operands operands to construct StageGenerator
- *
- * @return
- */
- void loadBackend(const std::string &backend, const neurun::model::operand::Set &operands);
-};
-
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_BACKEND_MANAGER_H__
diff --git a/runtimes/neurun/src/backend/CMakeLists.txt b/runtimes/neurun/src/backend/CMakeLists.txt
deleted file mode 100644
index a39823102..000000000
--- a/runtimes/neurun/src/backend/CMakeLists.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-add_subdirectory(cpu)
-add_subdirectory(acl_cl)
diff --git a/runtimes/neurun/src/backend/acl_cl/CMakeLists.txt b/runtimes/neurun/src/backend/acl_cl/CMakeLists.txt
deleted file mode 100644
index f1ea22bc5..000000000
--- a/runtimes/neurun/src/backend/acl_cl/CMakeLists.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-file(GLOB_RECURSE SOURCES "*.cc")
-
-add_library(${LIB_NEURUN_BACKEND_ACL_CL} SHARED ${SOURCES})
-
-target_include_directories(${LIB_NEURUN_BACKEND_ACL_CL} PUBLIC ${NNFW_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN_BACKEND_ACL_CL} PUBLIC ${NEURUN_INCLUDE_DIR})
-
-target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} arm_compute)
-target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} ${LIB_NEURUN_KERNEL_ACL_CL})
-target_link_libraries(${LIB_NEURUN_BACKEND_ACL_CL} ${LIB_NEURUN})
-
-target_compile_options(${LIB_NEURUN_BACKEND_ACL_CL} PRIVATE -Wall -Wextra -Werror -Wno-unused-parameter)
-
-set_target_properties(${LIB_NEURUN_BACKEND_ACL_CL} PROPERTIES OUTPUT_NAME backend_acl_cl)
-install(TARGETS ${LIB_NEURUN_BACKEND_ACL_CL} DESTINATION lib/neurun)
diff --git a/runtimes/neurun/src/backend/acl_cl/Config.cc b/runtimes/neurun/src/backend/acl_cl/Config.cc
deleted file mode 100644
index cad9b8988..000000000
--- a/runtimes/neurun/src/backend/acl_cl/Config.cc
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-#include "backend/acl_cl/Config.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-void Config::initialize() { arm_compute::CLScheduler::get().default_init(); }
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/Config.h b/runtimes/neurun/src/backend/acl_cl/Config.h
deleted file mode 100644
index cb43bfbe0..000000000
--- a/runtimes/neurun/src/backend/acl_cl/Config.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_CONFIG_H__
-#define __NEURUN_BACKEND_ACL_CL_CONFIG_H__
-
-#include "backend/interface/IConfig.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-class Config : public IConfig
-{
-public:
- Config()
- {
- // DO NOTHING
- }
-
- virtual std::string id() override { return "acl_cl"; }
- virtual void initialize() override;
- virtual graph::operand::Layout getOperandLayout() { return graph::operand::Layout::NCHW; }
- virtual bool SupportSubTensorAlloc() override { return true; }
-};
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_CONFIG_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/Convert.cc b/runtimes/neurun/src/backend/acl_cl/Convert.cc
deleted file mode 100644
index ed0a089c4..000000000
--- a/runtimes/neurun/src/backend/acl_cl/Convert.cc
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Convert.h"
-
-#include "Swizzle.h"
-#include "model/operand/DataType.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-::arm_compute::TensorShape asTensorShape(const ::neurun::model::operand::Shape &shape,
- bool apply_dim_correction)
-{
- const uint32_t rank = shape.rank();
-
- ::arm_compute::TensorShape res{};
-
- res.set_num_dimensions(rank);
-
- for (uint32_t axis = 0; axis < rank; ++axis)
- {
- // NOTE In some cases, in incorrect dimensions is required.
- // For example, intput_size is 1 in LSTM. The input-to-input weights([num_units, input_size]) of
- // LSTM is used as the weight of the FullyConnected.
- // The FullyConnected's weight must be greater or equal than 2-dimensions.
- // However, if the dimension correction is applied to input_to_input_weights with input_size
- // equal to 1, it will be changed to 1-D.
- // So input_to_input_weights is not used by the weight of FullyConnected.
- res.set(ToARMComputeAxis(rank, axis).value(), shape.dim(axis), apply_dim_correction);
- }
-
- return res;
-}
-
-::arm_compute::DataType asDataType(const ::neurun::model::operand::DataType &type)
-{
- switch (type)
- {
- case ::neurun::model::operand::DataType::SCALAR_FLOAT32:
- case ::neurun::model::operand::DataType::TENSOR_FLOAT32:
- return ::arm_compute::DataType::F32;
- case ::neurun::model::operand::DataType::SCALAR_INT32:
- case ::neurun::model::operand::DataType::TENSOR_INT32:
- return ::arm_compute::DataType::S32;
- case ::neurun::model::operand::DataType::SCALAR_UINT32:
- return ::arm_compute::DataType::U32;
- case ::neurun::model::operand::DataType::TENSOR_QUANT8_ASYMM:
- return ::arm_compute::DataType::QASYMM8;
- default:
- throw std::runtime_error("Not supported, yet");
- break;
- }
-}
-
-::arm_compute::QuantizationInfo asQuantizationInfo(const float scale, const int32_t offset)
-{
- return ::arm_compute::QuantizationInfo(scale, offset);
-}
-
-::arm_compute::TensorInfo asTensorInfo(const ::neurun::model::operand::Shape &shape,
- const ::neurun::model::operand::TypeInfo &typeInfo)
-{
- return ::arm_compute::TensorInfo(asTensorShape(shape), 1, asDataType(typeInfo.type()),
- asQuantizationInfo(typeInfo.scale(), typeInfo.offset()));
-}
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/Convert.h b/runtimes/neurun/src/backend/acl_cl/Convert.h
deleted file mode 100644
index 1a233fb87..000000000
--- a/runtimes/neurun/src/backend/acl_cl/Convert.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_CONVERT_H__
-#define __NEURUN_BACKEND_ACL_CL_CONVERT_H__
-
-#include <arm_compute/core/TensorInfo.h>
-#include <arm_compute/core/SubTensorInfo.h>
-#include <arm_compute/core/TensorShape.h>
-
-#include "model/operand/Object.h"
-#include "model/operand/Shape.h"
-#include "model/operand/TypeInfo.h"
-#include "misc/feature/Shape.h"
-#include "misc/kernel/Shape.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-::arm_compute::TensorShape asTensorShape(const ::neurun::model::operand::Shape &shape,
- bool apply_dim_correction = true);
-::arm_compute::DataType asDataType(const ::neurun::model::operand::DataType &type);
-::arm_compute::TensorInfo asTensorInfo(const ::neurun::model::operand::Shape &shape,
- const ::neurun::model::operand::TypeInfo &typeInfo);
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_CONVERT_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/PluginClassesAllocator.cc b/runtimes/neurun/src/backend/acl_cl/PluginClassesAllocator.cc
deleted file mode 100644
index f33e71d33..000000000
--- a/runtimes/neurun/src/backend/acl_cl/PluginClassesAllocator.cc
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <memory>
-#include "TensorBuilder.h"
-#include "StageGenerator.h"
-#include "Config.h"
-#include "util/logging.h"
-
-extern "C" {
-neurun::backend::acl_cl::TensorBuilder *allocate_TensorBuilder()
-{
- VERBOSE(allocate_TensorBuilder) << "loaded from acl_cl\n";
- return new neurun::backend::acl_cl::TensorBuilder;
-}
-
-neurun::backend::acl_cl::StageGenerator *allocate_StageGenerator(
- const neurun::model::operand::Set &operand_ctx,
- const std::shared_ptr<neurun::backend::acl_cl::TensorBuilder> &tensor_builder)
-{
- VERBOSE(allocate_StageGenerator) << "loaded from acl_cl\n";
- return new neurun::backend::acl_cl::StageGenerator(operand_ctx, tensor_builder);
-}
-
-neurun::backend::acl_cl::Config *allocate_Config()
-{
- VERBOSE(allocate_Config) << "loaded from acl_cl\n";
- return new neurun::backend::acl_cl::Config;
-}
-}
diff --git a/runtimes/neurun/src/backend/acl_cl/StageGenerator.cc b/runtimes/neurun/src/backend/acl_cl/StageGenerator.cc
deleted file mode 100644
index 89bbd7bd2..000000000
--- a/runtimes/neurun/src/backend/acl_cl/StageGenerator.cc
+++ /dev/null
@@ -1,593 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "backend/acl_cl/StageGenerator.h"
-
-#include "kernel/acl_cl/CLFunction.h"
-
-#include <arm_compute/runtime/CL/functions/CLConvolutionLayer.h>
-#include <arm_compute/runtime/CL/functions/CLPoolingLayer.h>
-#include <arm_compute/runtime/CL/functions/CLActivationLayer.h>
-#include <arm_compute/runtime/CL/functions/CLReshapeLayer.h>
-#include <arm_compute/runtime/CL/functions/CLFullyConnectedLayer.h>
-#include <arm_compute/runtime/CL/functions/CLSoftmaxLayer.h>
-
-#include "kernel/acl_cl/ConcatLayer.h"
-
-#include "util/Padding.h"
-
-#include "model/operand/Index.h"
-
-#include "util/logging.h"
-
-#include "NeuralNetworks.h"
-
-#include "util/Utils.h"
-
-template <typename T> std::unique_ptr<T> make_layer(void) { return std::unique_ptr<T>{new T}; }
-
-std::unique_ptr<::neurun::kernel::acl_cl::CLFunction>
-make_cl_function(std::unique_ptr<::arm_compute::IFunction> &&layer)
-{
- return std::unique_ptr<::neurun::kernel::acl_cl::CLFunction>(
- new ::neurun::kernel::acl_cl::CLFunction(std::move(layer)));
-}
-
-::arm_compute::PadStrideInfo asPadStringInfo(const neurun::util::Padding &padding,
- const neurun::util::Stride &stride)
-{
- return ::arm_compute::PadStrideInfo{stride.horizontal,
- stride.vertical,
- padding.left,
- padding.right,
- padding.top,
- padding.bottom,
- ::arm_compute::DimensionRoundingType::FLOOR};
-}
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-//
-// ActivationBuilder
-//
-class ActivationBuilder
-{
-public:
- ActivationBuilder(IExecutionBuilder &builder) : _builder(builder)
- {
- // DO NOTHING
- }
-
-private:
- void appendReLU(::arm_compute::ICLTensor *tensor);
-
-public:
- void append(FuseCode code, ::arm_compute::ICLTensor *tensor);
-
-private:
- IExecutionBuilder &_builder;
-};
-
-void ActivationBuilder::appendReLU(::arm_compute::ICLTensor *ifm_alloc)
-{
- const ::arm_compute::ActivationLayerInfo act_info{
- ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU};
-
- auto fn = make_layer<::arm_compute::CLActivationLayer>();
-
- fn->configure(ifm_alloc, nullptr, act_info);
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- _builder.append(std::move(acl_fn));
-}
-
-void ActivationBuilder::append(FuseCode code, ::arm_compute::ICLTensor *ifm_alloc)
-{
- switch (code)
- {
- case ANEURALNETWORKS_FUSED_NONE:
- {
- // DO NOTHING
- break;
- }
- case ANEURALNETWORKS_FUSED_RELU:
- {
- appendReLU(ifm_alloc);
- break;
- }
- default:
- {
- throw std::runtime_error("Not supported, yet");
- }
- }
-}
-
-//
-// StageGenerator
-//
-StageGenerator::StageGenerator(const neurun::model::operand::Set &ctx,
- const std::shared_ptr<TensorBuilder> &tensor_builder)
- : _ctx(ctx), _tensor_builder(tensor_builder)
-{
- // DO NOTHING
-}
-
-void StageGenerator::visit(const model::operation::Conv2DNode &node)
-{
- using model::operation::Conv2DNode;
-
- const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
- const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
- const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
-
- const auto vstride_index{node.param().vstride_index};
- const auto hstride_index{node.param().hstride_index};
-
- const auto padding_index{node.param().padding_index};
- const auto activation_index{node.param().activation_index};
-
- const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
- const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
- const auto ker_shape = _ctx.at(ker_index).shape().asKernel();
-
- const PaddingCode padding_type =
- static_cast<PaddingCode>(_ctx.at(padding_index).asScalar<int32_t>());
-
- assert((ANEURALNETWORKS_PADDING_SAME == padding_type) ||
- (ANEURALNETWORKS_PADDING_VALID == padding_type));
-
- neurun::util::Stride stride;
-
- stride.vertical = _ctx.at(vstride_index).asScalar<int32_t>();
- stride.horizontal = _ctx.at(hstride_index).asScalar<int32_t>();
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index ofm_index;
- model::operand::Index ifm_index;
- model::operand::Index ker_index;
- model::operand::Index bias_index;
-
- neurun::util::Padding padding;
- neurun::util::Stride stride;
-
- FuseCode activation;
- };
-
- Param param;
-
- param.ofm_index = ofm_index;
- param.ifm_index = ifm_index;
- param.ker_index = ker_index;
- param.bias_index = bias_index;
-
- param.stride = stride;
- param.padding =
- (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? neurun::util::same_padding(ifm_shape, ofm_shape, stride, ker_shape.W, ker_shape.H)
- : neurun::util::valid_padding();
-
- param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto ofm_alloc = tensors->at(param.ofm_index).get();
- auto ifm_alloc = tensors->at(param.ifm_index).get();
- auto ker_alloc = tensors->at(param.ker_index).get();
- auto bias_alloc = tensors->at(param.bias_index).get();
-
- const auto conv_info = asPadStringInfo(param.padding, param.stride);
-
- std::unique_ptr<::arm_compute::CLConvolutionLayer> fn{new ::arm_compute::CLConvolutionLayer};
-
- fn->configure(ifm_alloc->handle(), ker_alloc->handle(), bias_alloc->handle(),
- ofm_alloc->handle(), conv_info);
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append(std::move(acl_fn));
-
- ActivationBuilder{builder}.append(param.activation, ofm_alloc->handle());
- });
-}
-
-void StageGenerator::visit(const model::operation::MaxPool2DNode &node)
-{
- const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(model::operation::MaxPool2DNode::Input::INPUT)};
-
- const auto kh_index{node.param().kh_index};
- const auto kw_index{node.param().kw_index};
-
- const auto vstride_index{node.param().vstride_index};
- const auto hstride_index{node.param().hstride_index};
-
- const auto padding_index{node.param().padding_index};
-
- const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
- const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
-
- const int32_t kh = _ctx.at(kh_index).asScalar<int32_t>();
- const int32_t kw = _ctx.at(kw_index).asScalar<int32_t>();
-
- const int32_t vstride = _ctx.at(vstride_index).asScalar<int32_t>();
- const int32_t hstride = _ctx.at(hstride_index).asScalar<int32_t>();
-
- const PaddingCode padding_type =
- static_cast<PaddingCode>(_ctx.at(padding_index).asScalar<int32_t>());
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index ofm_index;
- model::operand::Index ifm_index;
-
- uint32_t kw;
- uint32_t kh;
-
- neurun::util::Padding padding;
- neurun::util::Stride stride;
-
- // TODO Add 'activation' field
- };
-
- Param param;
-
- param.ofm_index = ofm_index;
- param.ifm_index = ifm_index;
-
- param.kh = kh;
- param.kw = kw;
-
- param.stride.vertical = vstride;
- param.stride.horizontal = hstride;
-
- param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? neurun::util::same_padding(ifm_shape, ofm_shape, param.stride, kw, kh)
- : neurun::util::valid_padding();
-
- VERBOSE(MaxPool2D) << "IFM_H: " << ifm_shape.H << std::endl;
- VERBOSE(MaxPool2D) << "IFM_W: " << ifm_shape.W << std::endl;
- VERBOSE(MaxPool2D) << "OFM_H: " << ofm_shape.H << std::endl;
- VERBOSE(MaxPool2D) << "OFM_W: " << ofm_shape.W << std::endl;
- VERBOSE(MaxPool2D) << "KER_H: " << kh << std::endl;
- VERBOSE(MaxPool2D) << "KER_W: " << kw << std::endl;
- VERBOSE(MaxPool2D) << "STRIDE_H: " << vstride << std::endl;
- VERBOSE(MaxPool2D) << "STRIDE_W: " << hstride << std::endl;
- VERBOSE(MaxPool2D) << "PAD(T): " << param.padding.top << std::endl;
- VERBOSE(MaxPool2D) << "PAD(B): " << param.padding.bottom << std::endl;
- VERBOSE(MaxPool2D) << "PAD(L): " << param.padding.left << std::endl;
- VERBOSE(MaxPool2D) << "PAD(R): " << param.padding.right << std::endl;
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto ofm_alloc = tensors->at(param.ofm_index).get();
- auto ifm_alloc = tensors->at(param.ifm_index).get();
-
- ::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::MAX,
- ::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride)};
-
- std::unique_ptr<::arm_compute::CLPoolingLayer> fn{new ::arm_compute::CLPoolingLayer};
-
- fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append((std::move(acl_fn)));
- });
-}
-
-void StageGenerator::visit(const model::operation::AvgPool2DNode &node)
-{
- const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(model::operation::AvgPool2DNode::Input::INPUT)};
-
- const auto kh_index{node.param().kh_index};
- const auto kw_index{node.param().kw_index};
-
- const auto vstride_index{node.param().vstride_index};
- const auto hstride_index{node.param().hstride_index};
-
- const auto padding_index{node.param().padding_index};
-
- const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
- const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
-
- const int32_t kh = _ctx.at(kh_index).asScalar<int32_t>();
- const int32_t kw = _ctx.at(kw_index).asScalar<int32_t>();
-
- const int32_t vstride = _ctx.at(vstride_index).asScalar<int32_t>();
- const int32_t hstride = _ctx.at(hstride_index).asScalar<int32_t>();
-
- const PaddingCode padding_type =
- static_cast<PaddingCode>(_ctx.at(padding_index).asScalar<int32_t>());
-
- assert((ANEURALNETWORKS_PADDING_SAME == padding_type) ||
- (ANEURALNETWORKS_PADDING_VALID == padding_type));
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index ofm_index;
- model::operand::Index ifm_index;
-
- uint32_t kw;
- uint32_t kh;
-
- neurun::util::Padding padding;
- neurun::util::Stride stride;
-
- // TODO Add 'activation' field
- };
-
- Param param;
-
- param.ofm_index = ofm_index;
- param.ifm_index = ifm_index;
-
- param.kh = kh;
- param.kw = kw;
-
- param.stride.vertical = vstride;
- param.stride.horizontal = hstride;
-
- param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? neurun::util::same_padding(ifm_shape, ofm_shape, param.stride, kw, kh)
- : neurun::util::valid_padding();
-
- VERBOSE(AvgPool2D) << "IFM_H: " << ifm_shape.H << std::endl;
- VERBOSE(AvgPool2D) << "IFM_W: " << ifm_shape.W << std::endl;
- VERBOSE(AvgPool2D) << "OFM_H: " << ofm_shape.H << std::endl;
- VERBOSE(AvgPool2D) << "OFM_W: " << ofm_shape.W << std::endl;
- VERBOSE(AvgPool2D) << "KER_H: " << kh << std::endl;
- VERBOSE(AvgPool2D) << "KER_W: " << kw << std::endl;
- VERBOSE(AvgPool2D) << "STRIDE_H: " << vstride << std::endl;
- VERBOSE(AvgPool2D) << "STRIDE_W: " << hstride << std::endl;
- VERBOSE(AvgPool2D) << "PAD: " << neurun::util::to_string(padding_type) << std::endl;
- VERBOSE(AvgPool2D) << "PAD(T): " << param.padding.top << std::endl;
- VERBOSE(AvgPool2D) << "PAD(B): " << param.padding.bottom << std::endl;
- VERBOSE(AvgPool2D) << "PAD(L): " << param.padding.left << std::endl;
- VERBOSE(AvgPool2D) << "PAD(R): " << param.padding.right << std::endl;
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto ofm_alloc = tensors->at(param.ofm_index).get();
- auto ifm_alloc = tensors->at(param.ifm_index).get();
-
- ::arm_compute::PoolingLayerInfo info{
- ::arm_compute::PoolingType::AVG, ::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride), true /* exclude_padding */};
-
- std::unique_ptr<::arm_compute::CLPoolingLayer> fn{new ::arm_compute::CLPoolingLayer};
-
- fn->configure(ifm_alloc->handle(), ofm_alloc->handle(), info);
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append((std::move(acl_fn)));
- });
-}
-
-void StageGenerator::visit(const model::operation::ConcatNode &node)
-{
- const auto ofm_index{node.getOutputs().at(0)};
- const auto axis_index{node.param().axis_index};
-
- struct Param
- {
- model::operand::Index output_index;
- std::vector<model::operand::Index> input_indexes;
-
- int32_t axis;
- };
-
- Param param;
-
- param.output_index = ofm_index;
- for (const auto &e : node.getInputs())
- {
- param.input_indexes.emplace_back(e);
- }
- param.axis = _ctx.at(axis_index).asScalar<int32_t>();
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- // If tensor allocator allocate as subtensor
- bool canEliminate = true;
- for (auto ifm_ind : param.input_indexes)
- {
- if (!tensors->isSubTensorOf(param.output_index, ifm_ind))
- {
- canEliminate = false;
- break;
- }
- }
- if (canEliminate)
- {
- // If concat eliminated, return with nothing to do
- return;
- }
-
- auto output_alloc = tensors->at(param.output_index).get();
-
- std::vector<::neurun::backend::acl_cl::operand::ICLTensor *> input_allocs;
- for (auto ifm_ind : param.input_indexes)
- {
- input_allocs.emplace_back(
- dynamic_cast<::neurun::backend::acl_cl::operand::CLTensor *>(tensors->at(ifm_ind).get()));
- }
-
- std::unique_ptr<::neurun::kernel::acl_cl::ConcatLayer> fn{
- new ::neurun::kernel::acl_cl::ConcatLayer};
-
- fn->configure(input_allocs, param.axis,
- dynamic_cast<::neurun::backend::acl_cl::operand::CLTensor *>(output_alloc));
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append(std::move(acl_fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::FullyConnectedNode &node)
-{
- using model::operation::FullyConnectedNode;
-
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
- const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
- const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
- const auto activation_index{node.param().activation_index};
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index output_index;
-
- model::operand::Index input_index;
- model::operand::Index weight_index;
- model::operand::Index bias_index;
-
- FuseCode activation;
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
- param.weight_index = weight_index;
- param.bias_index = bias_index;
-
- param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
- auto input_alloc = tensors->at(param.input_index).get();
- auto weight_alloc = tensors->at(param.weight_index).get();
- auto bias_alloc = tensors->at(param.bias_index).get();
-
- auto fn = make_layer<::arm_compute::CLFullyConnectedLayer>();
-
- fn->configure(input_alloc->handle(), weight_alloc->handle(), bias_alloc->handle(),
- output_alloc->handle());
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append(std::move(acl_fn));
-
- ActivationBuilder{builder}.append(param.activation, output_alloc->handle());
- });
-}
-
-void StageGenerator::visit(const model::operation::ReshapeNode &node)
-{
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
-
- struct Param
- {
- model::operand::Index output_index;
- model::operand::Index input_index;
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
- auto input_alloc = tensors->at(param.input_index).get();
-
- auto fn = make_layer<::arm_compute::CLReshapeLayer>();
-
- fn->configure(input_alloc->handle(), output_alloc->handle());
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append(std::move(acl_fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::SoftmaxNode &node)
-{
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(model::operation::SoftmaxNode::Input::INPUT)};
- const auto scale_index{node.param().scale_index};
-
- assert(_ctx.at(scale_index).shape().rank() == 0);
-
- struct Param
- {
- model::operand::Index output_index;
- model::operand::Index input_index;
- float scale;
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
- param.scale = _ctx.at(scale_index).asScalar<float>();
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
- auto input_alloc = tensors->at(param.input_index).get();
-
- auto fn = make_layer<::arm_compute::CLSoftmaxLayer>();
-
- fn->configure(input_alloc->handle(), output_alloc->handle(), param.scale);
-
- auto acl_fn = make_cl_function(std::move(fn));
-
- builder.append(std::move(acl_fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::PermuteNode & /* node */)
-{
- throw "Unsupported";
-}
-
-void StageGenerator::visit(const model::operation::AddNode &)
-{
- VERBOSE(Add) << "generate CPU Add" << std::endl;
-
- throw std::runtime_error("NYI");
-}
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/StageGenerator.h b/runtimes/neurun/src/backend/acl_cl/StageGenerator.h
deleted file mode 100644
index 1dac2592b..000000000
--- a/runtimes/neurun/src/backend/acl_cl/StageGenerator.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_STAGE_GENERATOR_H__
-#define __NEURUN_BACKEND_ACL_CL_STAGE_GENERATOR_H__
-
-#include "backend/interface/IStageGenerator.h"
-
-#include "model/operand/Set.h"
-#include "backend/acl_cl/TensorBuilder.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-class StageGenerator : public IStageGenerator
-{
-public:
- StageGenerator(const neurun::model::operand::Set &ctx,
- const std::shared_ptr<TensorBuilder> &tensor_builder);
-
- virtual std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
-
-#define OP(InternalName, IsNnApi, NnApiName) \
- virtual void visit(const model::operation::InternalName &) override;
-#include "model/operation/Op.lst"
-#undef OP
-
-private:
- const neurun::model::operand::Set &_ctx;
- std::shared_ptr<TensorBuilder> _tensor_builder;
-};
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_STAGE_GENERATOR_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/Swizzle.h b/runtimes/neurun/src/backend/acl_cl/Swizzle.h
deleted file mode 100644
index 838e57162..000000000
--- a/runtimes/neurun/src/backend/acl_cl/Swizzle.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_SWIZZLE_H__
-#define __NEURUN_BACKEND_ACL_CL_SWIZZLE_H__
-
-#include <cassert>
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-class ARMComputeAxis
-{
-public:
- ARMComputeAxis() = default;
-
-public:
- explicit ARMComputeAxis(uint32_t value) : _value{value}
- {
- // DO NOTHING
- }
-
-public:
- uint32_t value(void) const { return _value; }
-
-private:
- uint32_t _value;
-};
-
-// Convert T/F Lite / NNAPI axis (based on ...NHWC) to ARMCompute axis (WHCN...)
-inline ARMComputeAxis ToARMComputeAxis(uint32_t rank, uint32_t axis)
-{
- assert(rank > axis);
- const ARMComputeAxis reversed{(rank - axis) - 1};
-
- if (rank < 4)
- {
- return reversed;
- }
-
- // DEPTH
- if (0 == reversed.value())
- {
- return ARMComputeAxis{2};
- }
- // WIDTH
- if (1 == reversed.value())
- {
- return ARMComputeAxis{0};
- }
- // HEIGHT
- if (2 == reversed.value())
- {
- return ARMComputeAxis{1};
- }
-
- // ELSE
- return reversed;
-}
-
-template <typename T> inline T ReorderBits(T in, size_t numOfBits)
-{
- assert(numOfBits > 0);
- T out = 0;
- for (int32_t i = numOfBits - 1; i >= 0; --i)
- {
- const uint32_t toShift = numOfBits - ToARMComputeAxis(numOfBits, i).value() - 1;
- out += ((in & 1) << toShift);
- in >>= 1;
- }
- return out;
-}
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_SWIZZLE_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/TensorBuilder.cc b/runtimes/neurun/src/backend/acl_cl/TensorBuilder.cc
deleted file mode 100644
index b5c038200..000000000
--- a/runtimes/neurun/src/backend/acl_cl/TensorBuilder.cc
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "backend/acl_cl/TensorBuilder.h"
-
-#include <cassert>
-#include <stack>
-
-#include "operand/Object.h"
-#include "Convert.h"
-
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-TensorBuilder::TensorBuilder()
-{
- // DO NOTHING
-}
-
-void TensorBuilder::registerTensorInfo(const model::operand::Index &ind,
- const compiler::TensorInfo &info)
-{
- assert(_tensors.size() == 0);
-
- _tensor_info_map.insert({ind, info});
-}
-
-void TensorBuilder::registerSubTensorInfo(const model::operand::Index &ind,
- const compiler::SubTensorInfo &info)
-{
- assert(_tensors.size() == 0);
-
- _subtensor_info_map.insert({ind, info});
-}
-
-void TensorBuilder::notifyFirstUse(const model::operand::Index &)
-{
- // DO NOTHING
-}
-
-void TensorBuilder::notifyLastUse(const model::operand::Index &)
-{
- // DO NOTHING
-}
-
-void TensorBuilder::prepare(void)
-{
- assert(_tensors.size() == 0);
-
- // TODO Handle SubTensor(subsumption)
- // Currently this TensorBuilder does not have subsumption info yet
- // Allocated subtensor will be mapped to _subtensors instead of _tensors
- assert(_subtensors.size() == 0);
-
- for (auto &entry : _tensor_info_map)
- {
- auto ind = entry.first;
- const auto &info = entry.second;
- auto tensor = std::make_shared<::neurun::backend::acl_cl::operand::CLTensor>(info);
- _tensors[ind] = tensor;
- }
-
- // To make subtensor, parent tensor must be made first
- // For this condition, use stack
- // 1) Push one subtensor index to stack (iterate subtensors)
- // 2) If tensor at stack top is already made, pop and go to 4)
- // 3) If tensor pushed at 1) is not made, check parent tensor
- // 3-1) If parent tensor is already made, we can make child tensor
- // Make child tensor and pop, go to 4)
- // 3-2) If parent tensor is not made, we can't make child tensor yet
- // Push parent tensor index to stack and return to 4)
- // 4) If stack is empty, return to 1), else return to 2)
- for (auto &entry : _subtensor_info_map)
- {
- model::operand::Index ind = entry.first;
-
- std::stack<model::operand::Index> stack;
- stack.push(ind);
-
- while (!stack.empty())
- {
- const auto current = stack.top();
- const auto &info = _subtensor_info_map.at(current);
-
- // Already generated CLSubTensor
- if (_subtensors.find(current) != _subtensors.end())
- {
- stack.pop();
- continue;
- }
-
- auto parent = info.parent();
- std::shared_ptr<::neurun::backend::acl_cl::operand::ICLTensor> parent_tensor;
-
- if (_tensors.find(parent) != _tensors.end())
- {
- // Parent is allocated as tensor
- parent_tensor = _tensors[parent];
- }
- else if (_subtensors.find(parent) != _subtensors.end())
- {
- // Parent is allocated as subtensor
- parent_tensor = _subtensors[parent];
- }
- else
- {
- // Cannot find allocated parent tensor: allocate parent first
- assert(_subtensor_info_map.find(parent) != _subtensor_info_map.end());
- stack.push(parent);
- continue;
- }
- assert(parent_tensor != nullptr);
-
- // Child's type should be same with parent
- assert(info.type().offset() == parent_tensor->info()->quantization_info().offset);
- assert(info.type().scale() == parent_tensor->info()->quantization_info().scale);
- assert(asDataType(info.type().type()) == parent_tensor->info()->data_type());
- auto shape = asTensorShape(info.shape());
-
- // Only support axis: 3 (channel)
- ::arm_compute::Coordinates coordinates;
- coordinates.set_num_dimensions(4);
- assert(info.offset().h() == 0);
- assert(info.offset().n() == 0);
- assert(info.offset().w() == 0);
- coordinates[2] = info.offset().c();
- auto tensor = std::make_shared<::neurun::backend::acl_cl::operand::CLSubTensor>(
- parent_tensor.get(), shape, coordinates, true);
- _subtensors[current] = tensor;
- stack.pop();
- }
- }
-}
-
-void TensorBuilder::allocate(void)
-{
- assert(_tensor_info_map.size() == _tensors.size());
-
- for (const auto &tensor_entry : _tensors)
- {
- auto tensor = tensor_entry.second;
- tensor->allocator()->allocate();
- }
-}
-
-std::shared_ptr<::neurun::backend::operand::ITensor>
-TensorBuilder::tensorAt(const model::operand::Index &ind)
-{
- if (_tensors.find(ind) != _tensors.end())
- {
- return _tensors.at(ind);
- }
- else
- {
- return _subtensors.at(ind);
- }
-}
-
-std::shared_ptr<backend::operand::IObject>
-TensorBuilder::wrapTensor(const model::operand::Index &ind)
-{
- if (_objects.find(ind) != _objects.end())
- {
- return _objects.at(ind);
- }
- else
- {
- if (_tensors.find(ind) != _tensors.end())
- {
- return _objects[ind] = std::make_shared<operand::Object>(_tensors.at(ind));
- }
- else
- {
- return _objects[ind] = std::make_shared<operand::Object>(_subtensors.at(ind));
- }
- }
-}
-
-void TensorBuilder::iterate(const IterateFunction &fn)
-{
- for (auto it : _tensors)
- {
- fn(it.first);
- }
- for (auto it : _subtensors)
- {
- fn(it.first);
- }
-}
-
-std::shared_ptr<::neurun::backend::acl_cl::operand::ICLTensor>
-TensorBuilder::at(const ::neurun::model::operand::Index &ind)
-{
- if (_tensors.find(ind) != _tensors.end())
- {
- return _tensors.at(ind);
- }
- else
- {
- return _subtensors.at(ind);
- }
-}
-
-bool TensorBuilder::isSubTensorOf(const model::operand::Index &parent,
- const model::operand::Index &child)
-{
- if (_subtensor_info_map.find(child) == _subtensor_info_map.end())
- {
- return false;
- }
-
- if (_subtensors.find(child) == _subtensors.end())
- {
- return false;
- }
-
- if (_subtensor_info_map.at(child).parent() != parent)
- {
- return false;
- }
-
- return true;
-}
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/TensorBuilder.h b/runtimes/neurun/src/backend/acl_cl/TensorBuilder.h
deleted file mode 100644
index 64d81721a..000000000
--- a/runtimes/neurun/src/backend/acl_cl/TensorBuilder.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_TENSOR_BUILDER_H__
-#define __NEURUN_BACKEND_ACL_CL_TENSOR_BUILDER_H__
-
-#include "backend/interface/ITensorBuilder.h"
-#include "backend/acl_cl/operand/CLTensor.h"
-#include "backend/acl_cl/operand/CLSubTensor.h"
-#include "backend/acl_cl/operand/Object.h"
-
-#include <unordered_map>
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-
-class TensorBuilder : public ITensorBuilder
-{
-public:
- TensorBuilder();
-
- /**
- * @brief Register tensor information to allocate on ACL-CL backend
- * @param[in] ind Operand index
- * @param[in] info Tensor information
- */
- virtual void registerTensorInfo(const model::operand::Index &ind,
- const compiler::TensorInfo &info) override;
- /**
- * @brief Register subtensor information to allocate on ACL-CL backend
- * @param[in] ind Operand index
- * @param[in] info Tensor information
- */
- virtual void registerSubTensorInfo(const model::operand::Index &ind,
- const compiler::SubTensorInfo &info) override;
-
- virtual void notifyFirstUse(const model::operand::Index &) override;
- virtual void notifyLastUse(const model::operand::Index &) override;
-
- virtual void prepare(void) override;
- virtual void allocate(void) override;
-
- virtual std::shared_ptr<::neurun::backend::operand::ITensor>
- tensorAt(const model::operand::Index &ind) override;
- virtual std::shared_ptr<backend::operand::IObject>
- wrapTensor(const model::operand::Index &ind) override;
- virtual void iterate(const IterateFunction &fn) override;
-
- std::shared_ptr<::neurun::backend::acl_cl::operand::ICLTensor>
- at(const ::neurun::model::operand::Index &ind);
- /**
- * @brief Check child tensor is allocated as subtensor of parent tensor
- * @param[in] parent Index of parent
- * @param[in] child Index of child
- * @return @c true if child is allocated as subtensor of parent, otherwise @c false
- */
- bool isSubTensorOf(const model::operand::Index &parent, const model::operand::Index &child);
-
-private:
- std::unordered_map<model::operand::Index, compiler::TensorInfo> _tensor_info_map;
- std::unordered_map<model::operand::Index, compiler::SubTensorInfo> _subtensor_info_map;
- std::unordered_map<model::operand::Index,
- std::shared_ptr<::neurun::backend::acl_cl::operand::CLTensor>>
- _tensors;
- std::unordered_map<model::operand::Index,
- std::shared_ptr<::neurun::backend::acl_cl::operand::CLSubTensor>>
- _subtensors;
- std::unordered_map<model::operand::Index,
- std::shared_ptr<::neurun::backend::acl_cl::operand::Object>>
- _objects;
-};
-
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.cc b/runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.cc
deleted file mode 100644
index f64b521dd..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.cc
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "CLSubTensor.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-CLSubTensor::CLSubTensor(ICLTensor *parent, const arm_compute::TensorShape &tensor_shape,
- const arm_compute::Coordinates &coords, bool extend_parent)
- : _cl_sub_tensor(std::make_shared<arm_compute::CLSubTensor>(parent->handle(), tensor_shape,
- coords, extend_parent))
-{
- // DO NOTHING
-}
-
-arm_compute::CLSubTensor *CLSubTensor::handle() const { return _cl_sub_tensor.get(); }
-
-arm_compute::CLSubTensor *CLSubTensor::handle() { return _cl_sub_tensor.get(); }
-
-void CLSubTensor::map(bool blocking) { _cl_sub_tensor->map(blocking); }
-
-void CLSubTensor::unmap() { _cl_sub_tensor->unmap(); }
-
-uint8_t *CLSubTensor::doMap(cl::CommandQueue &q, bool blocking)
-{
- assert(cl_buffer().get() == nullptr);
- return static_cast<uint8_t *>(q.enqueueMapBuffer(cl_buffer(), blocking ? CL_TRUE : CL_FALSE,
- CL_MAP_READ | CL_MAP_WRITE, 0,
- info()->total_size()));
-}
-
-void CLSubTensor::doUnmap(cl::CommandQueue &q)
-{
- assert(cl_buffer().get() == nullptr);
- q.enqueueUnmapMemObject(cl_buffer(), buffer());
-}
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.h b/runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.h
deleted file mode 100644
index cef78c196..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/CLSubTensor.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_CL_SUB_TENSOR_H__
-#define __NEURUN_BACKEND_ACL_CL_OPERAND_CL_SUB_TENSOR_H__
-
-#include <arm_compute/runtime/CL/CLSubTensor.h>
-#include "ICLTensor.h"
-#include "compiler/SubTensorInfo.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-class CLSubTensor : public ICLTensor
-{
-public:
- CLSubTensor() = delete;
-
-public:
- CLSubTensor(ICLTensor *parent, const arm_compute::TensorShape &tensor_shape,
- const arm_compute::Coordinates &coords, bool extend_parent = false);
-
-public:
- arm_compute::CLSubTensor *handle() const override;
- arm_compute::CLSubTensor *handle() override;
-
-public:
- void map(bool blocking = true);
- void unmap();
-
-protected:
- uint8_t *doMap(cl::CommandQueue &q, bool blocking) override;
- virtual void doUnmap(cl::CommandQueue &q) override;
-
-private:
- std::shared_ptr<arm_compute::CLSubTensor> _cl_sub_tensor;
-};
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_CL_SUB_TENSOR_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/CLTensor.cc b/runtimes/neurun/src/backend/acl_cl/operand/CLTensor.cc
deleted file mode 100644
index e7b718df3..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/CLTensor.cc
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-#include <arm_compute/runtime/CL/CLMemory.h>
-#include <arm_compute/runtime/CL/CLMemoryRegion.h>
-#include "CLTensor.h"
-
-#include "backend/acl_cl/Convert.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-CLTensor::CLTensor(const compiler::TensorInfo &info)
- : _cl_tensor(std::make_shared<arm_compute::CLTensor>())
-{
- auto acl_cl_info = asTensorInfo(info.shape(), info.typeInfo());
- allocator()->init(acl_cl_info);
-}
-
-arm_compute::CLTensor *CLTensor::handle() const { return _cl_tensor.get(); }
-
-arm_compute::CLTensor *CLTensor::handle() { return _cl_tensor.get(); }
-
-arm_compute::CLTensorAllocator *CLTensor::allocator() { return _cl_tensor->allocator(); }
-
-void CLTensor::map(bool blocking) { _cl_tensor->map(blocking); }
-
-void CLTensor::unmap() { _cl_tensor->unmap(); }
-
-uint8_t *CLTensor::doMap(cl::CommandQueue &q, bool blocking)
-{
- return allocator()->map(q, blocking);
-}
-
-void CLTensor::doUnmap(cl::CommandQueue &q) { allocator()->unmap(q, buffer()); }
-
-// handle() is Deprecated on acl v18.11
-// TODO Update this
-#if 0
-void CLTensor::setBuffer(void *host_ptr)
-{
- // create empty MemoryRegion: just context. Since flag isn't used here, no matter which flag to
- // pass
- auto memory = arm_compute::CLMemory(std::make_shared<arm_compute::CLBufferMemoryRegion>(
- arm_compute::CLScheduler::get().context(), CL_MEM_USE_HOST_PTR | CL_MEM_READ_WRITE, 0));
-
- // set buffer
- auto mem = reinterpret_cast<cl::Buffer *>(memory.region()->handle());
- *mem = cl::Buffer(arm_compute::CLScheduler::get().context(),
- CL_MEM_USE_HOST_PTR | CL_MEM_READ_WRITE, info()->total_size(), host_ptr);
- // set correct buffer size
- memory.region()->set_size(info()->total_size());
- // import memory
- allocator()->import_memory(memory);
-}
-#endif
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/CLTensor.h b/runtimes/neurun/src/backend/acl_cl/operand/CLTensor.h
deleted file mode 100644
index 31c96e201..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/CLTensor.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_CL_TENSOR_H__
-#define __NEURUN_BACKEND_ACL_CL_OPERAND_CL_TENSOR_H__
-
-#include <arm_compute/core/TensorInfo.h>
-#include <arm_compute/runtime/CL/CLTensor.h>
-#include <arm_compute/runtime/CL/CLScheduler.h>
-#include "arm_compute/runtime/CL/CLTensorAllocator.h"
-#include "ICLTensor.h"
-#include "compiler/TensorInfo.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-class CLTensor : public ICLTensor
-{
-public:
- CLTensor() = delete;
-
-public:
- CLTensor(const compiler::TensorInfo &info);
-
-public:
- arm_compute::CLTensor *handle() const override;
- arm_compute::CLTensor *handle() override;
-
-public:
- arm_compute::CLTensorAllocator *allocator();
- void map(bool blocking = true);
- void unmap();
- void setBuffer(void *host_ptr);
-
-protected:
- uint8_t *doMap(cl::CommandQueue &q, bool blocking) override;
- void doUnmap(cl::CommandQueue &q) override;
-
-private:
- std::shared_ptr<arm_compute::CLTensor> _cl_tensor;
-};
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_CL_TENSOR_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.cc b/runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.cc
deleted file mode 100644
index 23d723de4..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.cc
+++ /dev/null
@@ -1,48 +0,0 @@
-#include "ICLTensor.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-size_t ICLTensor::total_size() const { return info()->total_size(); }
-
-size_t ICLTensor::dimension(size_t index) const { return info()->dimension(index); }
-
-size_t ICLTensor::num_dimensions() const { return info()->num_dimensions(); }
-
-size_t ICLTensor::calcOffset(const neurun::util::feature::Coordinate4D &coords)
-{
- int32_t N = coords.n();
- int32_t C = coords.c();
- int32_t H = coords.h();
- int32_t W = coords.w();
-
- ::arm_compute::Coordinates coordinates{W, H, C, N};
- return info()->offset_element_in_bytes(coordinates);
-}
-
-arm_compute::DataType ICLTensor::data_type() const { return info()->data_type(); }
-
-uint8_t *ICLTensor::buffer() const { return handle()->buffer(); }
-
-const cl::Buffer &ICLTensor::cl_buffer() const { return handle()->cl_buffer(); }
-
-arm_compute::ITensorInfo *ICLTensor::info() const { return handle()->info(); }
-
-arm_compute::ITensorInfo *ICLTensor::info() { return handle()->info(); }
-
-void ICLTensor::map(cl::CommandQueue &q, bool blocking) { return handle()->map(q, blocking); }
-
-void ICLTensor::unmap(cl::CommandQueue &q) { return handle()->unmap(q); }
-
-void ICLTensor::clear(cl::CommandQueue &q) { return handle()->clear(q); }
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.h b/runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.h
deleted file mode 100644
index 226fbf814..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/ICLTensor.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_I_CL_TENSOR_H__
-#define __NEURUN_BACKEND_ACL_CL_OPERAND_I_CL_TENSOR_H__
-
-#include <arm_compute/core/ITensorInfo.h>
-#include <arm_compute/core/CL/ICLTensor.h>
-#include "backend/interface/operand/ITensor.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-class ICLTensor : public ::neurun::backend::operand::ITensor
-{
-public:
- ICLTensor() = default;
- ICLTensor(const ICLTensor &) = delete;
- ICLTensor &operator=(const ICLTensor &) = delete;
- ICLTensor(ICLTensor &&) = default;
- ICLTensor &operator=(ICLTensor &&) = default;
- virtual ~ICLTensor() = default;
-
-public:
- virtual arm_compute::ICLTensor *handle() = 0;
- virtual arm_compute::ICLTensor *handle() const = 0;
-
-public:
- uint8_t *buffer() const override;
- size_t total_size() const override;
- size_t dimension(size_t index) const override;
- size_t num_dimensions() const override;
- size_t calcOffset(const neurun::util::feature::Coordinate4D &coords) override;
-
-public:
- arm_compute::DataType data_type() const;
- const cl::Buffer &cl_buffer() const;
- arm_compute::ITensorInfo *info() const;
- arm_compute::ITensorInfo *info();
- void map(cl::CommandQueue &q, bool blocking = true);
- void unmap(cl::CommandQueue &q);
- void clear(cl::CommandQueue &q);
-
-protected:
- virtual uint8_t *doMap(cl::CommandQueue &q, bool blocking) = 0;
- virtual void doUnmap(cl::CommandQueue &q) = 0;
-};
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_I_CL_TENSOR_H__
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/Object.cc b/runtimes/neurun/src/backend/acl_cl/operand/Object.cc
deleted file mode 100644
index a84fa2366..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/Object.cc
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Object.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-void Object::access(
- const std::function<void(::neurun::backend::operand::ITensor &tensor)> &fn) const
-{
- auto &queue = ::arm_compute::CLScheduler::get().queue();
-
- _tensor->map(queue);
- fn(*_tensor);
- _tensor->unmap(queue);
-}
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/acl_cl/operand/Object.h b/runtimes/neurun/src/backend/acl_cl/operand/Object.h
deleted file mode 100644
index 4ba22b269..000000000
--- a/runtimes/neurun/src/backend/acl_cl/operand/Object.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ACL_CL_OPERAND_OBJECT_H__
-#define __NEURUN_BACKEND_ACL_CL_OPERAND_OBJECT_H__
-
-#include <memory>
-
-#include "backend/interface/operand/IObject.h"
-#include "backend/acl_cl/operand/ICLTensor.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace acl_cl
-{
-namespace operand
-{
-
-class Object : public backend::operand::IObject
-{
-public:
- Object() = default;
-
-public:
- Object(const std::shared_ptr<::neurun::backend::acl_cl::operand::ICLTensor> &tensor)
- : _tensor{tensor}
- {
- // DO NOTHING
- }
-
-public:
- ::neurun::backend::acl_cl::operand::ICLTensor *ptr(void) const override { return _tensor.get(); }
-
-private:
- std::shared_ptr<::neurun::backend::acl_cl::operand::ICLTensor> _tensor;
-
-public:
- void
- access(const std::function<void(::neurun::backend::operand::ITensor &tensor)> &fn) const override;
-};
-
-} // namespace operand
-} // namespace acl_cl
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ACL_CL_OPERAND_OBJECT_H__
diff --git a/runtimes/neurun/src/backend/cpu/CMakeLists.txt b/runtimes/neurun/src/backend/cpu/CMakeLists.txt
deleted file mode 100644
index dc4406a65..000000000
--- a/runtimes/neurun/src/backend/cpu/CMakeLists.txt
+++ /dev/null
@@ -1,18 +0,0 @@
-file(GLOB_RECURSE SOURCES "*.cc")
-
-add_library(${LIB_NEURUN_BACKEND_CPU} SHARED ${SOURCES})
-
-target_include_directories(${LIB_NEURUN_BACKEND_CPU} PUBLIC ${NNFW_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN_BACKEND_CPU} PUBLIC ${NEURUN_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN_BACKEND_CPU} PUBLIC ${CMAKE_SOURCE_DIR}/externals/tensorflow)
-
-target_link_libraries(${LIB_NEURUN_BACKEND_CPU} tensorflow-lite)
-target_link_libraries(${LIB_NEURUN_BACKEND_CPU} nnfw_lib_misc)
-target_link_libraries(${LIB_NEURUN_BACKEND_CPU} nnfw_lib_cpp14)
-target_link_libraries(${LIB_NEURUN_BACKEND_CPU} ${LIB_NEURUN_KERNEL_CPU})
-target_link_libraries(${LIB_NEURUN_BACKEND_CPU} ${LIB_NEURUN})
-
-target_compile_options(${LIB_NEURUN_BACKEND_CPU} PRIVATE -Wall -Wextra -Werror)
-
-set_target_properties(${LIB_NEURUN_BACKEND_CPU} PROPERTIES OUTPUT_NAME backend_cpu)
-install(TARGETS ${LIB_NEURUN_BACKEND_CPU} DESTINATION lib/neurun)
diff --git a/runtimes/neurun/src/backend/cpu/Config.cc b/runtimes/neurun/src/backend/cpu/Config.cc
deleted file mode 100644
index 001ba9d02..000000000
--- a/runtimes/neurun/src/backend/cpu/Config.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "backend/cpu/Config.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-void Config::initialize()
-{
- // DO NOTHING
-}
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/cpu/Config.h b/runtimes/neurun/src/backend/cpu/Config.h
deleted file mode 100644
index ad9ca0ee8..000000000
--- a/runtimes/neurun/src/backend/cpu/Config.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_CPU_CONFIG_H__
-#define __NEURUN_BACKEND_CPU_CONFIG_H__
-
-#include "backend/interface/IConfig.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-class Config : public IConfig
-{
-public:
- Config()
- {
- // DO NOTHING
- }
-
- virtual std::string id() override { return "cpu"; }
- virtual void initialize() override;
- virtual graph::operand::Layout getOperandLayout() { return graph::operand::Layout::NHWC; }
- virtual bool SupportSubTensorAlloc() override
- {
- // NOTE CPU allocator cannot support subtensor allocation yet
- return false;
- }
-};
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_CPU_CONFIG_H__
diff --git a/runtimes/neurun/src/backend/cpu/MemoryPlanner.cc b/runtimes/neurun/src/backend/cpu/MemoryPlanner.cc
deleted file mode 100644
index 2d0995b8a..000000000
--- a/runtimes/neurun/src/backend/cpu/MemoryPlanner.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "MemoryPlanner.h"
-#include "util/logging.h"
-#include <cassert>
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-Allocator::Allocator(uint32_t capacity)
-{
- assert(!_base && capacity != 0);
-
- _base = new uint8_t[capacity];
-
- VERBOSE(ALLOC) << "allocation capacity: " << capacity << std::endl;
- VERBOSE(ALLOC) << "base pointer: " << static_cast<void *>(_base) << std::endl;
-}
-
-Allocator::~Allocator() { delete[] _base; }
-
-void BumpPlanner::claim(const model::operand::Index &ind, size_t size)
-{
- assert(size != 0);
-
- Block blk{_capacity, size};
- _mem_plans[ind] = blk;
- _capacity += size;
-
- VERBOSE(BP_PLANNER) << "CLAIM(#" << ind.value() << "): " << blk.offset << ", " << blk.size
- << std::endl;
-}
-
-void BumpPlanner::release(const model::operand::Index &ind)
-{
- VERBOSE(BP_PLANNER) << "RELEASE(#" << ind.value() << "): "
- << "NOTHING does" << std::endl;
-}
-
-// There are some assumptions for claiming memory(== making a reservation for memory).
-// 1. About _claim_table(std::map).
-// - The table's data structure is std::map so that it always sorts
-// value(model::operand::Index) by key(base_offset).
-// - This claim() inserts key/value into _claim_table and the release() removes the key/value from
-// _claim_table.
-// - _claim_table shows the memory status at a certain point in time. Therefore,
-// - If _claim_table has an offset and a certain size at a certain point in time,
-// it means the place at the offset has been already claimed(== can't claim now. need to find
-// someplace new).
-// - If _claim_table doesn't have any element for an offset and a certain size at a certain
-// point in time, it means the place at the offset can be claimed.
-// 2. In the loop for _claim_table, we can assume the current claim_base_offset value is bigger than
-// the previous claim_base_offset.
-void FirstFitPlanner::claim(const model::operand::Index &ind, size_t size)
-{
- assert(size != 0);
-
- // Find the right position for claiming
- uint32_t next_offset = 0;
- for (auto &mem_claim : _claim_table)
- {
- auto claimed_base_offset = mem_claim.first;
- auto claimed_size = _mem_plans[mem_claim.second].size;
- if (next_offset + size <= claimed_base_offset)
- {
- break;
- }
- else
- {
- next_offset = claimed_base_offset + claimed_size;
- }
- }
-
- // Now next_offset is set to the proper offset
- _claim_table[next_offset] = ind;
- _mem_plans[ind] = {next_offset, size};
-
- VERBOSE(FF_PLANNER) << "claim(#" << ind.value() << "): [+" << next_offset << ", " << size << "sz]"
- << std::endl;
-
- if (_capacity < next_offset + size)
- {
- _capacity = next_offset + size;
- }
-}
-
-void FirstFitPlanner::release(const model::operand::Index &ind)
-{
- for (auto it = _claim_table.cbegin(); it != _claim_table.cend(); ++it)
- {
- if (it->second == ind)
- {
- uint32_t offset = it->first;
- uint32_t index = ind.value();
- uint32_t size = _mem_plans[ind].size;
-
- _claim_table.erase(it);
-
- VERBOSE(FF_PLANNER) << "release(#" << index << "): [+" << offset << ", " << size << "sz]"
- << std::endl;
- return;
- }
- }
- assert(!"Cannot release for given index. It has been not claimed or released already.");
-}
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/cpu/MemoryPlanner.h b/runtimes/neurun/src/backend/cpu/MemoryPlanner.h
deleted file mode 100644
index 4b2661223..000000000
--- a/runtimes/neurun/src/backend/cpu/MemoryPlanner.h
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        MemoryPlanner.h
- * @brief       This file contains Memory Planning related classes
- */
-
-#ifndef __NEURUN_BACKEND_CPU_MEMORY_PLANNER_H__
-#define __NEURUN_BACKEND_CPU_MEMORY_PLANNER_H__
-
-#include <map>
-#include <unordered_map>
-
-#include "model/operand/Index.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-/**
- * @brief Structure to have memory offset and size
- */
-struct Block
-{
- uint32_t offset;
- uint32_t size;
-};
-
-/**
- * @brief Class to allocate memory
- */
-class Allocator
-{
-public:
- Allocator(uint32_t capacity);
- ~Allocator();
- /**
- * @brief Get memory base pointer
- * @return base pointer
- */
- uint8_t *base() const { return _base; }
-
-private:
- uint8_t *_base = nullptr;
-};
-
-/**
- * @brief Interface to plan memory
- */
-struct IMemoryPlanner
-{
- using MemoryPlans = std::unordered_map<model::operand::Index, Block>;
-
- /**
- * @brief Claim memory for operand
- * @param[in] index The operand index
- * @param[in] size The size of the memory
- */
- virtual void claim(const model::operand::Index &, size_t) = 0;
- /**
- * @brief Release memory for operand
- * @param[in] index The operand index
- */
- virtual void release(const model::operand::Index &) = 0;
- /**
- * @brief Get capacity for memory planning
- * @return The value of capacity
- */
- virtual uint32_t capacity() = 0;
- /**
- * @brief Get MemoryPlans
- * @return MemoryPlans
- */
- virtual MemoryPlans &memory_plans() = 0;
-};
-
-/**
- * @brief Class to plan memory by bump way
- */
-class BumpPlanner : public IMemoryPlanner
-{
-public:
- /**
- * @brief Claim memory for operand by bump way
- * @param[in] index The operand index
- * @param[in] size The size of the memory
- */
- virtual void claim(const model::operand::Index &, size_t) override;
- /**
- * @brief Release memory for operand by bump way
- * @param[in] index The operand index
- */
- virtual void release(const model::operand::Index &) override;
- /**
- * @brief Get capacity for memory planning
- * @return The value of capacity
- */
- virtual uint32_t capacity() override { return _capacity; }
- /**
- * @brief Get MemoryPlans
- * @return MemoryPlans
- */
- virtual MemoryPlans &memory_plans() override { return _mem_plans; }
-
-private:
- uint32_t _capacity = 0;
- MemoryPlans _mem_plans;
-};
-
-/**
- * @brief Class to plan memory by firstfit way
- */
-class FirstFitPlanner : public IMemoryPlanner
-{
-public:
- /**
- * @brief Claim memory for operand by firstfit way
- * @param[in] index The operand index
- * @param[in] size The size of the memory
- */
- virtual void claim(const model::operand::Index &, size_t) override;
- /**
- * @brief Release memory for operand by firstfit way
- * @param[in] index The operand index
- */
- virtual void release(const model::operand::Index &) override;
- /**
- * @brief Get capacity for memory planning
- * @return The value of capacity
- */
- virtual uint32_t capacity() override { return _capacity; }
- /**
- * @brief Get MemoryPlans
- * @return MemoryPlans
- */
- virtual MemoryPlans &memory_plans() override { return _mem_plans; }
-
-private:
- uint32_t _capacity = 0;
- MemoryPlans _mem_plans;
- // Use std::map because claim() assumes that _claim_table is sorted by uint32_t(base_offset)
- std::map<uint32_t, model::operand::Index> _claim_table;
-};
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_CPU_MEMORY_PLANNER_H__
diff --git a/runtimes/neurun/src/backend/cpu/PluginClassesAllocator.cc b/runtimes/neurun/src/backend/cpu/PluginClassesAllocator.cc
deleted file mode 100644
index 26d4d8858..000000000
--- a/runtimes/neurun/src/backend/cpu/PluginClassesAllocator.cc
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <memory>
-#include "TensorBuilder.h"
-#include "StageGenerator.h"
-#include "Config.h"
-#include "util/logging.h"
-
-extern "C" {
-neurun::backend::cpu::TensorBuilder *allocate_TensorBuilder()
-{
- VERBOSE(allocate_TensorBuilder) << "loaded from CPU\n";
- return new neurun::backend::cpu::TensorBuilder;
-}
-
-neurun::backend::cpu::StageGenerator *
-allocate_StageGenerator(const neurun::model::operand::Set &operand_ctx,
- const std::shared_ptr<neurun::backend::cpu::TensorBuilder> &tensor_builder)
-{
- VERBOSE(allocate_StageGenerator) << "loaded from CPU\n";
- return new neurun::backend::cpu::StageGenerator(operand_ctx, tensor_builder);
-}
-
-neurun::backend::cpu::Config *allocate_Config()
-{
- VERBOSE(allocate_Config) << "loaded from CPU\n";
- return new neurun::backend::cpu::Config;
-}
-}
diff --git a/runtimes/neurun/src/backend/cpu/StageGenerator.cc b/runtimes/neurun/src/backend/cpu/StageGenerator.cc
deleted file mode 100644
index c53b320a4..000000000
--- a/runtimes/neurun/src/backend/cpu/StageGenerator.cc
+++ /dev/null
@@ -1,547 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "StageGenerator.h"
-
-#include <stdexcept>
-
-#include "cpp14/memory.h"
-#include "util/Padding.h"
-#include "kernel/cpu/OperationUtils.h"
-#include "kernel/cpu/ConvolutionLayer.h"
-#include "kernel/cpu/AvgPoolLayer.h"
-#include "kernel/cpu/MaxPoolLayer.h"
-#include "kernel/cpu/ConcatLayer.h"
-#include "kernel/cpu/FullyConnectedLayer.h"
-#include "kernel/cpu/ReshapeLayer.h"
-#include "kernel/cpu/SoftMaxLayer.h"
-#include "kernel/cpu/PermuteLayer.h"
-#include "backend/BackendManager.h"
-#include "backend/interface/IConfig.h"
-
-#include "util/logging.h"
-
-#include "util/Utils.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-StageGenerator::StageGenerator(const neurun::model::operand::Set &operand_ctx,
- const std::shared_ptr<TensorBuilder> &tensor_builder)
- : _ctx(operand_ctx), _tensor_builder(tensor_builder)
-{
- // DO NOTHING
-}
-
-void StageGenerator::visit(const model::operation::Conv2DNode &node)
-{
- using model::operation::Conv2DNode;
-
- const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
- const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
- const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
-
- const auto vstride_index{node.param().vstride_index};
- const auto hstride_index{node.param().hstride_index};
-
- const auto padding_index{node.param().padding_index};
- const auto activation_index{node.param().activation_index};
-
- const PaddingCode padding_type =
- static_cast<PaddingCode>(_ctx.at(padding_index).asScalar<int32_t>());
-
- assert((ANEURALNETWORKS_PADDING_SAME == padding_type) ||
- (ANEURALNETWORKS_PADDING_VALID == padding_type));
-
- util::Stride stride;
-
- stride.vertical = _ctx.at(vstride_index).asScalar<int32_t>();
- stride.horizontal = _ctx.at(hstride_index).asScalar<int32_t>();
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index ofm_index;
- model::operand::Index ifm_index;
- model::operand::Index ker_index;
- model::operand::Index bias_index;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- ::neurun::kernel::cpu::Shape ifm_shape;
- ::neurun::kernel::cpu::Shape ker_shape;
- ::neurun::kernel::cpu::Shape bias_shape;
-
- util::Padding padding;
- util::Stride stride;
-
- FuseCode activation;
- };
-
- Param param;
-
- param.ofm_index = ofm_index;
- param.ifm_index = ifm_index;
- param.ker_index = ker_index;
- param.bias_index = bias_index;
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ofm_index));
- param.ifm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ifm_index));
- param.ker_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ker_index));
- param.bias_shape = ::neurun::kernel::cpu::getShape(_ctx.at(bias_index));
-
- param.stride = stride;
- param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? util::same_padding(_ctx.at(ifm_index).shape().asFeature(),
- _ctx.at(ofm_index).shape().asFeature(), stride,
- _ctx.at(ker_index).shape().asKernel().W,
- _ctx.at(ker_index).shape().asKernel().H)
- : util::valid_padding();
-
- param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto ofm_alloc = tensors->at(param.ofm_index);
- auto ifm_alloc = tensors->at(param.ifm_index);
- auto ker_alloc = tensors->at(param.ker_index);
- auto bias_alloc = tensors->at(param.bias_index);
-
- std::unique_ptr<::neurun::kernel::cpu::ConvolutionLayer> fn{
- new ::neurun::kernel::cpu::ConvolutionLayer};
-
- fn->configure(ifm_alloc->buffer(), param.ifm_shape, ker_alloc->buffer(), param.ker_shape,
- bias_alloc->buffer(), param.bias_shape, param.padding.left, param.padding.right,
- param.padding.top, param.padding.bottom, param.stride.horizontal,
- param.stride.vertical, param.activation, ofm_alloc->buffer(), param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::MaxPool2DNode &node)
-{
- const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(model::operation::MaxPool2DNode::Input::INPUT)};
-
- const auto kh_index{node.param().kh_index};
- const auto kw_index{node.param().kw_index};
-
- const auto vstride_index{node.param().vstride_index};
- const auto hstride_index{node.param().hstride_index};
-
- const auto padding_index{node.param().padding_index};
- const auto activation_index{node.param().activation_index};
-
- const int32_t kh = _ctx.at(kh_index).asScalar<int32_t>();
- const int32_t kw = _ctx.at(kw_index).asScalar<int32_t>();
-
- const int32_t vstride = _ctx.at(vstride_index).asScalar<int32_t>();
- const int32_t hstride = _ctx.at(hstride_index).asScalar<int32_t>();
-
- const PaddingCode padding_type =
- static_cast<PaddingCode>(_ctx.at(padding_index).asScalar<int32_t>());
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index ofm_index;
- model::operand::Index ifm_index;
-
- uint32_t kw;
- uint32_t kh;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- ::neurun::kernel::cpu::Shape ifm_shape;
-
- util::Padding padding;
- util::Stride stride;
-
- FuseCode activation;
- };
-
- Param param;
-
- param.ofm_index = ofm_index;
- param.ifm_index = ifm_index;
-
- param.kh = kh;
- param.kw = kw;
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ofm_index));
- param.ifm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ifm_index));
-
- param.stride.vertical = vstride;
- param.stride.horizontal = hstride;
-
- param.padding =
- (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? util::same_padding(_ctx.at(ifm_index).shape().asFeature(),
- _ctx.at(ofm_index).shape().asFeature(), param.stride, kw, kh)
- : util::valid_padding();
-
- param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto ofm_alloc = tensors->at(param.ofm_index).get();
- auto ifm_alloc = tensors->at(param.ifm_index).get();
-
- std::unique_ptr<::neurun::kernel::cpu::MaxPoolLayer> fn{
- new ::neurun::kernel::cpu::MaxPoolLayer};
-
- fn->configure(ifm_alloc->buffer(), param.ifm_shape, param.padding.left, param.padding.right,
- param.padding.top, param.padding.bottom, param.stride.horizontal,
- param.stride.vertical, param.kw, param.kh, param.activation, ofm_alloc->buffer(),
- param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::AvgPool2DNode &node)
-{
- const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(model::operation::AvgPool2DNode::Input::INPUT)};
-
- const auto kh_index{node.param().kh_index};
- const auto kw_index{node.param().kw_index};
-
- const auto vstride_index{node.param().vstride_index};
- const auto hstride_index{node.param().hstride_index};
-
- const auto padding_index{node.param().padding_index};
- const auto activation_index{node.param().activation_index};
-
- const int32_t kh = _ctx.at(kh_index).asScalar<int32_t>();
- const int32_t kw = _ctx.at(kw_index).asScalar<int32_t>();
-
- const int32_t vstride = _ctx.at(vstride_index).asScalar<int32_t>();
- const int32_t hstride = _ctx.at(hstride_index).asScalar<int32_t>();
-
- const PaddingCode padding_type =
- static_cast<PaddingCode>(_ctx.at(padding_index).asScalar<int32_t>());
-
- assert((ANEURALNETWORKS_PADDING_SAME == padding_type) ||
- (ANEURALNETWORKS_PADDING_VALID == padding_type));
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index ofm_index;
- model::operand::Index ifm_index;
-
- uint32_t kw;
- uint32_t kh;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- ::neurun::kernel::cpu::Shape ifm_shape;
-
- util::Padding padding;
- util::Stride stride;
-
- FuseCode activation;
- };
-
- Param param;
-
- param.ofm_index = ofm_index;
- param.ifm_index = ifm_index;
-
- param.kh = kh;
- param.kw = kw;
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ofm_index));
- param.ifm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ifm_index));
-
- param.stride.vertical = vstride;
- param.stride.horizontal = hstride;
-
- param.padding =
- (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? util::same_padding(_ctx.at(ifm_index).shape().asFeature(),
- _ctx.at(ofm_index).shape().asFeature(), param.stride, kw, kh)
- : util::valid_padding();
-
- param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto ofm_alloc = tensors->at(param.ofm_index).get();
- auto ifm_alloc = tensors->at(param.ifm_index).get();
-
- std::unique_ptr<::neurun::kernel::cpu::AvgPoolLayer> fn{
- new ::neurun::kernel::cpu::AvgPoolLayer};
-
- fn->configure(ifm_alloc->buffer(), param.ifm_shape, param.padding.left, param.padding.right,
- param.padding.top, param.padding.bottom, param.stride.horizontal,
- param.stride.vertical, param.kw, param.kh, param.activation, ofm_alloc->buffer(),
- param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::ConcatNode &node)
-{
- const auto ofm_index{node.getOutputs().at(0)};
- const auto axis_index{node.param().axis_index};
-
- struct Param
- {
- model::operand::Index output_index;
- std::vector<model::operand::Index> input_indexes;
-
- int32_t axis;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- std::vector<::neurun::kernel::cpu::Shape> ifm_shapes;
- };
-
- Param param;
-
- param.output_index = ofm_index;
- for (const auto &e : node.getInputs())
- {
- param.input_indexes.emplace_back(e);
- }
- param.axis = _ctx.at(axis_index).asScalar<int32_t>();
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(ofm_index));
-
- for (auto e : node.getInputs())
- {
- param.ifm_shapes.emplace_back(::neurun::kernel::cpu::getShape(_ctx.at(e)));
- }
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
-
- std::vector<const uint8_t *> input_buffers;
- for (auto ifm_ind : param.input_indexes)
- {
- input_buffers.emplace_back(tensors->at(ifm_ind).get()->buffer());
- }
-
- std::unique_ptr<::neurun::kernel::cpu::ConcatLayer> fn{new ::neurun::kernel::cpu::ConcatLayer};
-
- fn->configure(input_buffers, param.ifm_shapes, param.axis, output_alloc->buffer(),
- param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::FullyConnectedNode &node)
-{
- using model::operation::FullyConnectedNode;
-
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
- const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
- const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
- const auto activation_index{node.param().activation_index};
-
- // Construct operation parameters
- struct Param
- {
- model::operand::Index output_index;
- model::operand::Index input_index;
- model::operand::Index weight_index;
- model::operand::Index bias_index;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- ::neurun::kernel::cpu::Shape ifm_shape;
- ::neurun::kernel::cpu::Shape weight_shape;
- ::neurun::kernel::cpu::Shape bias_shape;
-
- FuseCode activation;
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
- param.weight_index = weight_index;
- param.bias_index = bias_index;
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(output_index));
- param.ifm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(input_index));
- param.weight_shape = ::neurun::kernel::cpu::getShape(_ctx.at(weight_index));
- param.bias_shape = ::neurun::kernel::cpu::getShape(_ctx.at(bias_index));
-
- param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
- auto input_alloc = tensors->at(param.input_index).get();
- auto weight_alloc = tensors->at(param.weight_index).get();
- auto bias_alloc = tensors->at(param.bias_index).get();
-
- std::unique_ptr<::neurun::kernel::cpu::FullyConnectedLayer> fn{
- new ::neurun::kernel::cpu::FullyConnectedLayer};
-
- fn->configure(input_alloc->buffer(), param.ifm_shape, weight_alloc->buffer(),
- param.weight_shape, bias_alloc->buffer(), param.bias_shape, param.activation,
- output_alloc->buffer(), param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::ReshapeNode &node)
-{
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(model::operation::ReshapeNode::Input::INPUT)};
-
- struct Param
- {
- model::operand::Index output_index;
- model::operand::Index input_index;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- ::neurun::kernel::cpu::Shape ifm_shape;
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(output_index));
- param.ifm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(input_index));
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
- auto input_alloc = tensors->at(param.input_index).get();
-
- std::unique_ptr<::neurun::kernel::cpu::ReshapeLayer> fn{
- new ::neurun::kernel::cpu::ReshapeLayer};
-
- fn->configure(input_alloc->buffer(), param.ifm_shape, output_alloc->buffer(), param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::SoftmaxNode &node)
-{
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(model::operation::SoftmaxNode::Input::INPUT)};
- const auto scale_index{node.param().scale_index};
-
- struct Param
- {
- model::operand::Index output_index;
- model::operand::Index input_index;
-
- ::neurun::kernel::cpu::Shape ofm_shape;
- ::neurun::kernel::cpu::Shape ifm_shape;
-
- float scale;
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
-
- param.ofm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(output_index));
- param.ifm_shape = ::neurun::kernel::cpu::getShape(_ctx.at(input_index));
-
- param.scale = _ctx.at(scale_index).asScalar<float>();
-
- auto tensors = _tensor_builder;
-
- returnStage([tensors, param](IExecutionBuilder &builder) {
- auto output_alloc = tensors->at(param.output_index).get();
- auto input_alloc = tensors->at(param.input_index).get();
-
- std::unique_ptr<::neurun::kernel::cpu::SoftMaxLayer> fn{
- new ::neurun::kernel::cpu::SoftMaxLayer};
-
- fn->configure(input_alloc->buffer(), param.ifm_shape, param.scale, output_alloc->buffer(),
- param.ofm_shape);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::PermuteNode &node)
-{
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(0)};
-
- using PermuteType = model::operation::PermuteNode::Type;
-
- struct Param
- {
- model::operand::Index output_index;
- model::operand::Index input_index;
-
- model::operand::Shape shape;
-
- PermuteType type{PermuteType::COPY};
- };
-
- Param param;
-
- param.output_index = output_index;
- param.input_index = input_index;
-
- param.shape = _ctx.at(output_index).shape();
- param.type = node.param().type;
-
- // assert(param.shape == _ctx.at(input_index));
-
- const auto &input_li = _ctx.at(input_index).lower_info();
- const auto &output_li = _ctx.at(output_index).lower_info();
- const auto input_backend = input_li->def_backends().getOnlyElement();
- const auto output_backend = output_li->def_backends().getOnlyElement();
-
- const auto input_tensors = input_backend->tensor_builder();
- const auto output_tensors = output_backend->tensor_builder();
-
- returnStage([input_tensors, output_tensors, param](IExecutionBuilder &builder) {
- auto output_object = output_tensors->wrapTensor(param.output_index);
- auto input_object = input_tensors->wrapTensor(param.input_index);
-
- auto fn = nnfw::cpp14::make_unique<::neurun::kernel::cpu::PermuteLayer>();
-
- fn->configure(input_object, output_object, param.shape, param.type);
-
- builder.append(std::move(fn));
- });
-}
-
-void StageGenerator::visit(const model::operation::AddNode &) { throw std::runtime_error("NYI"); }
-
-} // namespace neurun
-} // namespace backend
-} // namespace cpu
diff --git a/runtimes/neurun/src/backend/cpu/StageGenerator.h b/runtimes/neurun/src/backend/cpu/StageGenerator.h
deleted file mode 100644
index 6a0e387da..000000000
--- a/runtimes/neurun/src/backend/cpu/StageGenerator.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_CPU_STAGE_GENERATOR_H__
-#define __NEURUN_BACKEND_CPU_STAGE_GENERATOR_H__
-
-#include "backend/interface/IStageGenerator.h"
-
-#include "model/operand/Set.h"
-#include "backend/cpu/operand/Tensor.h"
-#include "TensorBuilder.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-class StageGenerator : public IStageGenerator
-{
-public:
- StageGenerator(const neurun::model::operand::Set &ctx,
- const std::shared_ptr<TensorBuilder> &tensor_builder);
-
- virtual std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
-
-#define OP(InternalName, IsNnApi, NnApiName) \
- virtual void visit(const model::operation::InternalName &) override;
-#include "model/operation/Op.lst"
-#undef OP
-
-private:
- const neurun::model::operand::Set &_ctx;
- std::shared_ptr<TensorBuilder> _tensor_builder;
-};
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_CPU_STAGE_GENERATOR_H__
diff --git a/runtimes/neurun/src/backend/cpu/TensorBuilder.cc b/runtimes/neurun/src/backend/cpu/TensorBuilder.cc
deleted file mode 100644
index 9c39b9c00..000000000
--- a/runtimes/neurun/src/backend/cpu/TensorBuilder.cc
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "TensorBuilder.h"
-
-#include <cassert>
-
-#include "operand/Object.h"
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-TensorBuilder::TensorBuilder() : _mem_planner(std::make_shared<FirstFitPlanner>())
-{
- // DO NOTHING
-}
-
-void TensorBuilder::registerTensorInfo(const model::operand::Index &ind,
- const compiler::TensorInfo &info)
-{
- _tensor_info_map.insert({ind, info});
-}
-
-void TensorBuilder::registerSubTensorInfo(const model::operand::Index &,
- const compiler::SubTensorInfo &)
-{
- // Not supported yet
- assert(false);
-}
-
-void TensorBuilder::notifyFirstUse(const model::operand::Index &ind)
-{
- assert(_tensor_info_map.find(ind) != _tensor_info_map.end());
- const auto &info = _tensor_info_map.at(ind);
-
- const auto size = info.total_size();
- _mem_planner->claim(ind, size);
-}
-
-void TensorBuilder::notifyLastUse(const model::operand::Index &ind) { _mem_planner->release(ind); }
-
-void TensorBuilder::prepare(void)
-{
- assert(_tensors.size() == 0);
-
- _mem_alloc = std::make_shared<Allocator>(_mem_planner->capacity());
- assert(_mem_alloc->base());
-
- for (auto &mem_plan : _mem_planner->memory_plans())
- {
- auto ind = mem_plan.first;
- auto mem_blk = mem_plan.second;
- const auto &info = _tensor_info_map[ind];
-
- uint8_t *buffer = _mem_alloc->base() + mem_blk.offset;
- auto tensor = std::make_shared<operand::Tensor>(info);
- tensor->setBuffer(buffer);
- _tensors[ind] = tensor;
-
- VERBOSE(CPU_TENSORBUILDER) << "TENSOR(#" << ind.value() << "): " << static_cast<void *>(buffer)
- << std::endl;
-
- // If we do not make tensor here currently, stages would cause segment fault
- }
-}
-
-void TensorBuilder::allocate(void)
-{
- // NOTE For now nothing to do. Allocation is done in prepare stage, which is wrong
-}
-
-std::shared_ptr<::neurun::backend::operand::ITensor>
-TensorBuilder::tensorAt(const model::operand::Index &ind)
-{
- return _tensors.at(ind);
-}
-
-std::shared_ptr<backend::operand::IObject>
-TensorBuilder::wrapTensor(const model::operand::Index &ind)
-{
- if (_objects.find(ind) != _objects.end())
- {
- return _objects.at(ind);
- }
- else
- {
- return _objects[ind] = std::make_shared<operand::Object>(_tensors.at(ind));
- }
-}
-
-void TensorBuilder::iterate(const IterateFunction &fn)
-{
- for (auto it : _tensors)
- {
- fn(it.first);
- }
-}
-
-std::shared_ptr<operand::Tensor> TensorBuilder::at(const ::neurun::model::operand::Index &ind)
-{
- return _tensors.at(ind);
-}
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/cpu/TensorBuilder.h b/runtimes/neurun/src/backend/cpu/TensorBuilder.h
deleted file mode 100644
index 2715d57f0..000000000
--- a/runtimes/neurun/src/backend/cpu/TensorBuilder.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_CPU_TENSOR_BUILDER_H__
-#define __NEURUN_BACKEND_CPU_TENSOR_BUILDER_H__
-
-#include <unordered_map>
-
-#include "backend/interface/ITensorBuilder.h"
-#include "backend/cpu/operand/Tensor.h"
-#include "backend/cpu/operand/Object.h"
-#include "model/operand/Index.h"
-#include "MemoryPlanner.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-
-class TensorBuilder : public ITensorBuilder
-{
-public:
- TensorBuilder();
-
- /**
- * @brief Register tensor information to allocate on CPU backend
- * @param[in] ind Operand index
- * @param[in] info Tensor information
- */
- virtual void registerTensorInfo(const model::operand::Index &ind,
- const compiler::TensorInfo &info) override;
- /**
- * @brief Register subtensor information to allocate on CPU backend
- * @param[in] ind Operand index
- * @param[in] info Tensor information
- */
- virtual void registerSubTensorInfo(const model::operand::Index &ind,
- const compiler::SubTensorInfo &info) override;
-
- virtual void notifyFirstUse(const model::operand::Index &) override;
- virtual void notifyLastUse(const model::operand::Index &) override;
-
- virtual void prepare(void) override;
- virtual void allocate(void) override;
-
- virtual std::shared_ptr<::neurun::backend::operand::ITensor>
- tensorAt(const model::operand::Index &ind) override;
- virtual std::shared_ptr<backend::operand::IObject>
- wrapTensor(const model::operand::Index &ind) override;
- virtual void iterate(const IterateFunction &fn) override;
-
- std::shared_ptr<operand::Tensor> at(const ::neurun::model::operand::Index &ind);
-
-private:
- std::unordered_map<model::operand::Index, compiler::TensorInfo> _tensor_info_map;
- std::unordered_map<model::operand::Index, std::shared_ptr<operand::Tensor>> _tensors;
- std::unordered_map<model::operand::Index, std::shared_ptr<operand::Object>> _objects;
- std::unordered_map<model::operand::Index, Block> _tensor_mem_map;
- std::shared_ptr<IMemoryPlanner> _mem_planner;
- std::shared_ptr<Allocator> _mem_alloc;
-};
-
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_CPU_TENSOR_BUILDER_H__
diff --git a/runtimes/neurun/src/backend/cpu/operand/Object.cc b/runtimes/neurun/src/backend/cpu/operand/Object.cc
deleted file mode 100644
index 011747a8c..000000000
--- a/runtimes/neurun/src/backend/cpu/operand/Object.cc
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Object.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-namespace operand
-{
-
-void Object::access(
- const std::function<void(::neurun::backend::operand::ITensor &tensor)> &fn) const
-{
- fn(*_tensor);
-}
-
-} // namespace operand
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/cpu/operand/Object.h b/runtimes/neurun/src/backend/cpu/operand/Object.h
deleted file mode 100644
index 5ef7c4fbf..000000000
--- a/runtimes/neurun/src/backend/cpu/operand/Object.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_CPU_OPERAND_OBJECT_H__
-#define __NEURUN_BACKEND_CPU_OPERAND_OBJECT_H__
-
-#include <memory>
-#include "backend/interface/operand/ITensor.h"
-
-#include "backend/interface/operand/IObject.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-namespace operand
-{
-
-class Object : public backend::operand::IObject
-{
-public:
- Object() = default;
-
-public:
- Object(const std::shared_ptr<::neurun::backend::operand::ITensor> &tensor) : _tensor{tensor}
- {
- // DO NOTHING
- }
-
-public:
- ::neurun::backend::operand::ITensor *ptr(void) const override { return _tensor.get(); }
-
-private:
- std::shared_ptr<::neurun::backend::operand::ITensor> _tensor;
-
-public:
- void
- access(const std::function<void(::neurun::backend::operand::ITensor &tensor)> &fn) const override;
-};
-
-} // namespace operand
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_CPU_OPERAND_OBJECT_H__
diff --git a/runtimes/neurun/src/backend/cpu/operand/Tensor.cc b/runtimes/neurun/src/backend/cpu/operand/Tensor.cc
deleted file mode 100644
index a5251292e..000000000
--- a/runtimes/neurun/src/backend/cpu/operand/Tensor.cc
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Tensor.h"
-
-#define NO_USE(a) (void)(a)
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-namespace operand
-{
-
-size_t Tensor::calcOffset(const neurun::util::feature::Coordinate4D &coords)
-{
- NO_USE(coords);
- throw std::runtime_error("offset_element_in_bytes is not supported for cpu::Tensor now.");
-}
-
-} // namespace operand
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
diff --git a/runtimes/neurun/src/backend/cpu/operand/Tensor.h b/runtimes/neurun/src/backend/cpu/operand/Tensor.h
deleted file mode 100644
index 7500f890f..000000000
--- a/runtimes/neurun/src/backend/cpu/operand/Tensor.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_CPU_OPERAND_TENSOR_H__
-#define __NEURUN_BACKEND_CPU_OPERAND_TENSOR_H__
-
-#include "backend/interface/operand/ITensor.h"
-#include "compiler/TensorInfo.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace cpu
-{
-namespace operand
-{
-
-class Tensor : public ::neurun::backend::operand::ITensor
-{
-public:
- Tensor() = delete;
-
-public:
- Tensor(const compiler::TensorInfo &info) : _info(info)
- {
- // DO NOTHING
- }
-
-public:
- void setBuffer(uint8_t *buffer) { _buffer = buffer; }
- ::neurun::model::operand::DataType data_type() const { return _info.typeInfo().type(); }
-
-public:
- uint8_t *buffer() const override { return _buffer; }
- /**
- * @brief Get dimension by index
- *
- * @param index Index to get diemension
- * @return size_t Dimension at index
- * @note N : dimension(0)
- * H : dimension(1)
- * W : dimension(2)
- * C : dimension(3)
- */
- size_t dimension(size_t index) const override { return _info.shape().dim(index); }
- size_t num_dimensions() const override { return _info.shape().dims().size(); }
- size_t total_size() const override { return _info.total_size(); }
- size_t calcOffset(const neurun::util::feature::Coordinate4D &coords) override;
-
-private:
- compiler::TensorInfo _info;
- uint8_t *_buffer = nullptr;
-};
-
-} // namespace operand
-} // namespace cpu
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_CPU_OPERAND_TENSOR_H__
diff --git a/runtimes/neurun/src/backend/interface/IConfig.h b/runtimes/neurun/src/backend/interface/IConfig.h
deleted file mode 100644
index 82789d0ff..000000000
--- a/runtimes/neurun/src/backend/interface/IConfig.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ICONFIG_H__
-#define __NEURUN_BACKEND_ICONFIG_H__
-
-#include <string>
-
-#include "graph/operand/Layout.h"
-
-namespace neurun
-{
-namespace backend
-{
-
-struct IConfig
-{
- virtual ~IConfig() = default;
-
- virtual std::string id() = 0;
- virtual void initialize() = 0;
- // NOTE Assume backend has only one type of operand layout
- virtual graph::operand::Layout getOperandLayout() = 0;
- // Support subtensor allocation
- virtual bool SupportSubTensorAlloc() = 0;
-};
-
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ICONFIG_H__
diff --git a/runtimes/neurun/src/backend/interface/IStageGenerator.h b/runtimes/neurun/src/backend/interface/IStageGenerator.h
deleted file mode 100644
index 878a50e3f..000000000
--- a/runtimes/neurun/src/backend/interface/IStageGenerator.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ISTAGE_GENERATOR_H__
-#define __NEURUN_BACKEND_ISTAGE_GENERATOR_H__
-
-#include <memory>
-#include <functional>
-
-#include "exec/interface/IFunction.h"
-
-#include "backend/interface/ITensorBuilder.h"
-#include "model/operation/NodeVisitor.h"
-
-struct IExecutionBuilder
-{
- virtual ~IExecutionBuilder() = default;
-
- virtual void append(std::unique_ptr<::neurun::exec::IFunction> &&f) = 0;
-};
-
-using Stage = std::function<void(IExecutionBuilder &)>;
-
-namespace neurun
-{
-namespace backend
-{
-
-class IStageGenerator : model::operation::NodeVisitor
-{
-public:
- virtual ~IStageGenerator() = default;
-
- virtual std::shared_ptr<ITensorBuilder> tensor_builder() = 0;
-
-protected:
-#define OP(InternalName, IsNnApi, NnApiName) \
- virtual void visit(const model::operation::InternalName &) override {}
-#include "model/operation/Op.lst"
-#undef OP
-
-protected:
- void returnStage(const Stage &stage) { _return = stage; }
-
-public:
- Stage generate(const model::operation::Node &node)
- {
- node.accept(std::move(*this));
- return _return;
- }
-
-private:
- Stage _return = nullptr;
-};
-
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ISTAGE_GENERATOR_H__
diff --git a/runtimes/neurun/src/backend/interface/ITensorBuilder.h b/runtimes/neurun/src/backend/interface/ITensorBuilder.h
deleted file mode 100644
index 354a270e6..000000000
--- a/runtimes/neurun/src/backend/interface/ITensorBuilder.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_ITENSOR_BUILDER_H__
-#define __NEURUN_BACKEND_ITENSOR_BUILDER_H__
-
-#include <map>
-
-#include "model/operand/Index.h"
-#include "operand/IObject.h"
-#include "compiler/SubTensorInfo.h"
-#include "compiler/TensorInfo.h"
-#include "backend/interface/operand/ITensor.h"
-
-namespace neurun
-{
-namespace backend
-{
-
-struct ITensorBuilder
-{
- using IterateFunction = std::function<void(const model::operand::Index &)>;
-
- virtual ~ITensorBuilder(void) = default;
-
- // TODO Merge registerTensorInfo and registerSubTensorInfo using abstraction by internal class
- /**
- * @brief Register tensor information to allocate on backend
- */
- virtual void registerTensorInfo(const model::operand::Index &, const compiler::TensorInfo &) = 0;
- /**
- * @brief Register subtensor information to allocate on backend
- */
- virtual void registerSubTensorInfo(const model::operand::Index &,
- const compiler::SubTensorInfo &) = 0;
-
- virtual void notifyFirstUse(const model::operand::Index &) = 0;
- virtual void notifyLastUse(const model::operand::Index &) = 0;
-
- virtual void prepare(void) = 0;
- virtual void allocate(void) = 0;
-
- virtual std::shared_ptr<::neurun::backend::operand::ITensor>
- tensorAt(const model::operand::Index &ind) = 0;
- virtual std::shared_ptr<backend::operand::IObject>
- wrapTensor(const model::operand::Index &ind) = 0;
- virtual void iterate(const IterateFunction &fn) = 0;
-};
-
-} // namespace backend
-} // namespace neurun
-
-#include <set>
-#include <memory>
-
-namespace neurun
-{
-namespace backend
-{
-
-using TensorBuilderSet = std::set<std::shared_ptr<backend::ITensorBuilder>>;
-
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_ITENSOR_BUILDER_H__
diff --git a/runtimes/neurun/src/backend/interface/operand/IObject.h b/runtimes/neurun/src/backend/interface/operand/IObject.h
deleted file mode 100644
index 44b33b080..000000000
--- a/runtimes/neurun/src/backend/interface/operand/IObject.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_OPERAND_I_OBJECT_H__
-#define __NEURUN_BACKEND_OPERAND_I_OBJECT_H__
-
-#include <functional>
-
-#include "ITensor.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace operand
-{
-
-struct IObject
-{
- virtual ~IObject() = default;
- virtual ::neurun::backend::operand::ITensor *ptr(void) const = 0;
- virtual void
- access(const std::function<void(::neurun::backend::operand::ITensor &tensor)> &fn) const = 0;
-};
-
-} // namespace operand
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_OPERAND_I_OBJECT_H__
diff --git a/runtimes/neurun/src/backend/interface/operand/ITensor.h b/runtimes/neurun/src/backend/interface/operand/ITensor.h
deleted file mode 100644
index 8bc3ff465..000000000
--- a/runtimes/neurun/src/backend/interface/operand/ITensor.h
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_BACKEND_OPERAND_I_TENSOR_H__
-#define __NEURUN_BACKEND_OPERAND_I_TENSOR_H__
-
-#include <cstring>
-#include <cstdint>
-
-#include "util/feature/Coordinate4D.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace operand
-{
-
-class ITensor
-{
-public:
- virtual ~ITensor() = default;
-
-public:
- virtual uint8_t *buffer() const = 0;
- virtual size_t total_size() const = 0;
- virtual size_t dimension(size_t index) const = 0;
- virtual size_t num_dimensions() const = 0;
- virtual size_t calcOffset(const neurun::util::feature::Coordinate4D &coords) = 0;
-};
-
-} // namespace operand
-} // namespace backend
-} // namespace neurun
-
-#endif // __NEURUN_BACKEND_OPERAND_I_TENSOR_H__
diff --git a/runtimes/neurun/src/compiler/BackendResolver.cc b/runtimes/neurun/src/compiler/BackendResolver.cc
deleted file mode 100644
index 6c1f32603..000000000
--- a/runtimes/neurun/src/compiler/BackendResolver.cc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BackendResolver.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-// NOT IMPLEMENTED
-
-} // namespace compiler
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/BackendResolver.h b/runtimes/neurun/src/compiler/BackendResolver.h
deleted file mode 100644
index 4742b2d94..000000000
--- a/runtimes/neurun/src/compiler/BackendResolver.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_BACKEND_RESOLVER_H__
-#define __NEURUN_COMPILER_BACKEND_RESOLVER_H__
-
-#include <set>
-#include <unordered_map>
-#include <typeindex>
-
-#include "util/logging.h"
-#include "util/config/ConfigManager.h"
-#include "backend/BackendManager.h"
-#include "backend/interface/IStageGenerator.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-class BackendResolver
-{
-public:
- BackendResolver(const neurun::model::operand::Set &operands)
- {
- _backend_manager = std::make_shared<backend::BackendManager>(operands);
-
- const auto backend_all_str =
- config::ConfigManager::instance().get<std::string>("OP_BACKEND_ALLOPS");
- if (backend_all_str.compare("none") != 0)
- {
- VERBOSE(BackendResolver) << "Use backend for all ops: " << backend_all_str << std::endl;
-#define OP(InternalName, IsNnApi, NnApiName) \
- if (IsNnApi) \
- { \
- auto backend = _backend_manager->get(backend_all_str); \
- _gen_map[typeid(model::operation::InternalName)] = backend; \
- }
-#include "model/operation/Op.lst"
-#undef OP
- }
- else
- {
-#define OP(InternalName, IsNnApi, NnApiName) \
- if (IsNnApi) \
- { \
- const auto &backend_str = \
- config::ConfigManager::instance().get<std::string>("OP_BACKEND_" #NnApiName); \
- auto backend = _backend_manager->get(backend_str); \
- VERBOSE(BackendResolver) << "backend for " << #NnApiName << ": " << backend_str << std::endl; \
- _gen_map[typeid(model::operation::InternalName)] = backend; \
- }
-
-#include "model/operation/Op.lst"
-#undef OP
- }
- }
-
-public:
- const backend::Backend *getBackend(const std::type_index &type) { return _gen_map[type]; }
- const backend::Backend *getDefaultBackend() const
- {
- backend::Backend *default_backend = _backend_manager->get("cpu");
- return default_backend;
- }
-
-private:
- std::unordered_map<std::type_index, backend::Backend *> _gen_map;
- std::shared_ptr<backend::BackendManager> _backend_manager;
-};
-
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_BACKEND_RESOLVER_H__
diff --git a/runtimes/neurun/src/compiler/Compiler.cc b/runtimes/neurun/src/compiler/Compiler.cc
deleted file mode 100644
index 92ec69afb..000000000
--- a/runtimes/neurun/src/compiler/Compiler.cc
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Compiler.h"
-
-#include "OperationValidator.h"
-#include "SubTensorAnalyzer.h"
-#include "PlanBuilder.h"
-#include "ConstantInitializer.h"
-
-#include "graph/dumper/Dumper.h"
-#include "graph/operation/LowerInfo.h"
-#include "dumper/dot/DotDumper.h"
-#include "linear/Linear.h"
-
-namespace neurun
-{
-
-namespace compiler
-{
-
-void Compiler::compile(void)
-{
- auto &plan = this->plan();
- auto &graph = plan.model();
- const auto &operands = graph.operands();
-
- // Disable compile phase
- // When ready to use interpreter backend, remove this config and use backend setting
- const auto env_disable_compile = config::ConfigManager::instance().get<bool>("DISABLE_COMPILE");
- if (env_disable_compile)
- {
- plan.state(State::NOT_COMPILED);
- return;
- }
-
- /***************************************************
- * Backend independent analysis & optimization phase
- ***************************************************/
-
- /*************************************************************
- * Backend independent analysis & optimization phase finished
- *************************************************************/
-
- // dump graph to .dot
- neurun::dumper::dot::DotDumper dot_dumper(graph);
- dot_dumper.dumpIfNeeded("before_lower");
-
- // Lower: decide backend
- graph.lower();
- plan.state(State::LOWERED);
-
- dot_dumper.dumpIfNeeded("after_lower");
-
- auto linear = graph.linearize();
- plan.state(State::LINEARIZED);
-
- // Dump ops
- linear->accept(neurun::graph::dumper::Dumper{});
-
- linear->accept(OperationValidator{operands});
-
- /*************************************************
- * Backend dependent analysis & optimization phase
- *************************************************/
-
- // SubTensorInfo should be generated after lower, before stage generation and finalize
- // because SubTensorAnalyzer assume that insert permutation is already finished
- // lower: decide backend and insert permutation
- // stage generation: prepare codegen to optimization
- // finalize: generate tensor using subtensor info, then execute stage
- // Generated SubTensorInfo is in operand(Object)
- // for easy pass SubTensorInfo to plan builder and tensor builder
- linear->accept(SubTensorAnalyzer{graph.operands()});
-
- /**********************************************************
- * Backend dependent analysis & optimization phase finished
- **********************************************************/
-
- /***********************
- * Code generation phase
- ***********************/
-
- PlanBuilder plan_builder{plan};
-
- // Plan building
- linear->iterate([&](const linear::Element &element) {
- auto backend = element.lower_info->backend();
-
- // Generate Stage
- auto stage_gen = backend->stage_gen();
- plan_builder.addStage(stage_gen->generate(*element.node));
- });
-
- auto tensor_builders = linear->planTensors();
-
- // TODO Add optimization passes
- plan_builder.finalize(tensor_builders);
-
- ConstantInitializer{graph, plan}();
-
- /********************************
- * Code generation phase finished
- ********************************/
-
- plan.state(State::COMPILED);
-}
-
-} // namespace compiler
-
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/Compiler.h b/runtimes/neurun/src/compiler/Compiler.h
deleted file mode 100644
index d8f620a10..000000000
--- a/runtimes/neurun/src/compiler/Compiler.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Compiler.h
- * @brief This file contains Compiler class to define and run compilation phase
- */
-
-#ifndef __NEURUN_COMPILER_COMPILE_H_
-#define __NEURUN_COMPILER_COMPILE_H_
-
-#include "graph/Graph.h"
-#include "Plan.h"
-
-namespace neurun
-{
-
-namespace compiler
-{
-
-/**
- * @brief Class to compile graph model
- */
-class Compiler
-{
-public:
- /**
- * @brief Construct a new Compiler object
- * @param[in] model Graph model
- */
- Compiler(const std::shared_ptr<graph::Graph> &model) : _plan{new Plan{model}}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Return plan
- * @return Plan
- */
- Plan &plan(void) { return *_plan; }
- /**
- * @brief Run compilation
- */
- void compile(void);
- /**
- * @brief Pass plan reference
- * @param[out] plan Plan reference to return
- */
- void release(std::shared_ptr<const Plan> &plan) { plan = _plan; }
-
-private:
- std::shared_ptr<Plan> _plan;
-};
-
-} // namespace compiler
-
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_COMPILE_H_
diff --git a/runtimes/neurun/src/compiler/ConstantInitializer.cc b/runtimes/neurun/src/compiler/ConstantInitializer.cc
deleted file mode 100644
index d6d58e273..000000000
--- a/runtimes/neurun/src/compiler/ConstantInitializer.cc
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ConstantInitializer.h"
-
-#include "backend/interface/operand/IObject.h"
-#include "backend/interface/IConfig.h"
-#include "backend/BackendManager.h"
-#include "model/operation/FullyConnectedNode.h"
-#include "util/feature/nhwc/Reader.h"
-#include "util/feature/nhwc/View.h"
-#include "util/feature/nchw/View.h"
-#include "misc/feature/IndexIterator.h"
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-ConstantInitializer::ConstantInitializer(const graph::Graph &graph, Plan &plan)
- : _graph{graph}, _plan{plan}
-{
-}
-
-void ConstantInitializer::operator()()
-{
- // Fill operand data
- _plan.operands().iterate([&](int ind, neurun::backend::operand::IObject &obj) {
- neurun::model::operand::Index index(ind);
- const auto &model_obj = _graph.operands().at(index);
-
- // For only CONSTANTS
- if (model_obj.getUsage() != neurun::model::operand::OperandUsage::CONSTANT)
- return;
-
- // Only float32 is supported
- auto type = model_obj.typeInfo().type();
- if (type != ::neurun::model::operand::DataType::TENSOR_FLOAT32)
- throw std::runtime_error{"Unsupported data type. Only TENSOR_FLOAT32 is supported."};
-
- VERBOSE(FillOperandData) << "Fill data for operand " << ind << std::endl;
-
- auto layout =
- model_obj.lower_info()->def_backends().getOnlyElement()->config()->getOperandLayout();
- const auto shape = model_obj.shape();
- auto base = reinterpret_cast<const float *>(model_obj.data().base());
- auto size = model_obj.data().size();
-
- obj.access([&](::neurun::backend::operand::ITensor &tensor) {
- switch (shape.rank())
- {
- case 1:
- {
- auto vec_size = shape.asVector();
- for (int32_t n = 0; n < vec_size; ++n)
- {
- const float *from = reinterpret_cast<const float *>(base) + n;
- const auto value = *from;
-
- float *into = reinterpret_cast<float *>(tensor.buffer()) + n;
-
- *into = value;
- }
- break;
- }
- case 2:
- {
- // NOTE This is a WORKAROUND which supports FullyConnected weight only
- // For FullyConnected, we must know the IFM shape to deduce 2D weight shape from 4D
- // IFM.
- // This is because of NHWC/NCHW layout, the order of mapping will be different.
- // TODO Support general case - explicitly insert Reshape op for IFM as 2D
-
- // Find corresponding FullyConnected IFM
- auto operation_index = _graph.operands().at(index).getUses().list().front();
- auto operation = &_graph.operations().at(operation_index);
- auto fc_operation =
- dynamic_cast<const neurun::model::operation::FullyConnectedNode *>(operation);
-
- if (fc_operation == nullptr)
- break;
-
- auto ifm_index = fc_operation->getInputs().at(
- neurun::model::operation::FullyConnectedNode::Input::INPUT);
- const auto &ifm = _graph.operands().at(ifm_index);
- const auto ifm_shape = ifm.shape().asFeature();
- const auto num_output = shape.dim(0);
-
- const ::nnfw::misc::feature::Shape ker_shape{num_output, ifm_shape.C, ifm_shape.H,
- ifm_shape.W};
- const util::feature::nhwc::Reader<float> from{ker_shape, base, size};
-
- if (layout == neurun::graph::operand::Layout::NHWC)
- {
- ::nnfw::misc::feature::iterate(ker_shape)
- << [&](uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(nth, ch, row, col);
-
- uint32_t offset = 0;
-
- // NNAPI uses NHWC ordering
- offset += nth * ifm_shape.H * ifm_shape.W * ifm_shape.C;
- offset += row * ifm_shape.W * ifm_shape.C;
- offset += col * ifm_shape.C;
- offset += ch;
-
- float *into = reinterpret_cast<float *>(tensor.buffer()) + offset;
-
- *into = value;
- };
- }
- else
- {
- assert(layout == neurun::graph::operand::Layout::NCHW);
-
- ::nnfw::misc::feature::iterate(ker_shape)
- << [&](uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(nth, ch, row, col);
-
- uint32_t offset = 0;
-
- // 'NCHW' ordering
- offset += nth * ifm_shape.C * ifm_shape.H * ifm_shape.W;
- offset += ch * ifm_shape.H * ifm_shape.W;
- offset += row * ifm_shape.W;
- offset += col;
-
- float *into = reinterpret_cast<float *>(tensor.buffer()) + offset;
-
- *into = value;
- };
- }
-
- break;
- }
- case 4:
- {
- auto ker_shape = shape.asFeature();
- auto from = util::feature::nhwc::Reader<float>{ker_shape, base, size};
-
- if (layout == neurun::graph::operand::Layout::NHWC)
- {
- auto into = util::feature::nhwc::View<float>{
- ker_shape, reinterpret_cast<float *>(tensor.buffer()), size};
-
- ::nnfw::misc::feature::iterate(ker_shape)
- << [&](uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(nth, ch, row, col);
- into.at(nth, ch, row, col) = value;
- };
- }
- else
- {
- assert(layout == neurun::graph::operand::Layout::NCHW);
-
- auto into = util::feature::nchw::View<float>{&tensor};
-
- ::nnfw::misc::feature::iterate(ker_shape)
- << [&](uint32_t nth, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(nth, ch, row, col);
- into.at(nth, ch, row, col) = value;
- };
- }
- break;
- }
- default:
- throw std::runtime_error{"Not yet supported"};
- }
- });
- });
-}
-
-} // namespace codegen
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/ConstantInitializer.h b/runtimes/neurun/src/compiler/ConstantInitializer.h
deleted file mode 100644
index 7d9231908..000000000
--- a/runtimes/neurun/src/compiler/ConstantInitializer.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_CONSTANT_INITIALIZER_H__
-#define __NEURUN_COMPILER_CONSTANT_INITIALIZER_H__
-
-#include "graph/Graph.h"
-#include "Plan.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-class ConstantInitializer
-{
-public:
- ConstantInitializer(const graph::Graph &graph, Plan &plan);
-
- void operator()();
-
-private:
- const graph::Graph &_graph;
- Plan &_plan;
-};
-
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_CONSTANT_INITIALIZER_H__
diff --git a/runtimes/neurun/src/compiler/OperationValidator.cc b/runtimes/neurun/src/compiler/OperationValidator.cc
deleted file mode 100644
index 0110eccb8..000000000
--- a/runtimes/neurun/src/compiler/OperationValidator.cc
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "OperationValidator.h"
-
-#include <typeinfo>
-
-#include "model/operand/Set.h"
-#include "graph/operation/LowerInfo.h"
-
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-void OperationValidator::visit(const model::operation::Conv2DNode &)
-{
- // DO NOTHING
-}
-
-void OperationValidator::visit(const model::operation::MaxPool2DNode &)
-{
- // DO NOTHING
-}
-
-void OperationValidator::visit(const model::operation::AvgPool2DNode &)
-{
- // DO NOTHING
-}
-
-void OperationValidator::visit(const model::operation::ConcatNode &node)
-{
- (void)node; // NOTE To prevent from unused variable warning
-
- // NOTE This implementation assumes concat over feature depth
- // TODO Remove this assumption
- assert(_ctx.at(::neurun::model::operand::Index{node.param().axis_index}).asScalar<int32_t>() ==
- 3);
-}
-
-void OperationValidator::visit(const model::operation::FullyConnectedNode &)
-{
- // DO NOTHING
-}
-
-void OperationValidator::visit(const model::operation::ReshapeNode &node)
-{
- (void)node; // NOTE To prevent from unused variable warning
-
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(0)};
-
- // NOTE The content of a tensor specified by shape_index should be aligned with
- // output tensor shape
- // TODO Check consistency of ouput shape
-
- // 'Feature Map' to 'Vector' reshape
- assert(_ctx.at(input_index).shape().rank() == 4);
- assert(_ctx.at(output_index).shape().rank() == 2);
- assert(_ctx.at(output_index).shape().dim(0) == 1);
-
- // NOTE Vector element ordering issue arises when H or W is not 1
- assert(_ctx.at(input_index).shape().dim(1) == 1); // H
- assert(_ctx.at(input_index).shape().dim(2) == 1); // W
- // input(4D)'s C * H * W == output(2D)'s W
- assert((_ctx.at(input_index).shape().dim(3) * _ctx.at(input_index).shape().dim(1) *
- _ctx.at(input_index).shape().dim(2)) == _ctx.at(output_index).shape().dim(1));
-}
-
-void OperationValidator::visit(const model::operation::SoftmaxNode &node)
-{
- (void)node; // NOTE To prevent from unused variable warning
-
- VERBOSE(Softmax) << "Configure SOFTMAX operation" << std::endl;
-
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(0)};
-
- assert(_ctx.at(output_index).shape().rank() == _ctx.at(input_index).shape().rank());
-
- // TODO Support 'feature map' input
- assert(_ctx.at(input_index).shape().rank() == 2);
- assert(_ctx.at(input_index).shape().dim(0) == 1);
- assert(_ctx.at(input_index).shape().dim(0) == _ctx.at(output_index).shape().dim(0));
- assert(_ctx.at(input_index).shape().dim(1) == _ctx.at(output_index).shape().dim(1));
-}
-
-void OperationValidator::visit(const model::operation::PermuteNode &node)
-{
- (void)node; // NOTE To prevent from unused variable warning
-
- VERBOSE(Permute) << "Configure Permute operation" << std::endl;
-
- const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(0)};
-
- assert(_ctx.at(output_index).shape().rank() == _ctx.at(input_index).shape().rank());
-}
-
-void OperationValidator::visit(const model::operation::AddNode &)
-{
- // DO NOTHING
-}
-
-} // namespace compiler
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/OperationValidator.h b/runtimes/neurun/src/compiler/OperationValidator.h
deleted file mode 100644
index f4ed533ed..000000000
--- a/runtimes/neurun/src/compiler/OperationValidator.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
-#define __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
-
-#include "model/operation/NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-class Set;
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace compiler
-{
-
-class OperationValidator : public model::operation::NodeVisitor
-{
-public:
- OperationValidator(const neurun::model::operand::Set &ctx) : _ctx{ctx} {}
-
-public:
-#define OP(InternalName, IsNnApi, NnApiName) \
- virtual void visit(const model::operation::InternalName &) override;
-#include "model/operation/Op.lst"
-#undef OP
-
-private:
- const neurun::model::operand::Set &_ctx;
-};
-
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_OPERATION_VALIDATOR_H__
diff --git a/runtimes/neurun/src/compiler/Plan.cc b/runtimes/neurun/src/compiler/Plan.cc
deleted file mode 100644
index b7637b189..000000000
--- a/runtimes/neurun/src/compiler/Plan.cc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Plan.h"
-
-namespace neurun
-{
-namespace codegen
-{
-
-// NO IMPLEMENTATION YET
-
-} // namespace codegen
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/Plan.h b/runtimes/neurun/src/compiler/Plan.h
deleted file mode 100644
index f2a526e0e..000000000
--- a/runtimes/neurun/src/compiler/Plan.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_CODEGEN_PLAN_H__
-#define __NEURUN_CODEGEN_PLAN_H__
-
-#include "graph/Graph.h"
-#include "compiler/operand/Context.h"
-#include "compiler/operation/Sequence.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-enum class State
-{
- NONE, // Initial state
- LOWERED, // Backend is decided
- LINEARIZED, // Everything is moved to Linear object so this Graph object is no longer effective
- COMPILED, // Success compilation
- NOT_COMPILED // Not compiled by environment or graph status
-};
-
-class Plan
-{
-public:
- Plan(const std::shared_ptr<neurun::graph::Graph> &model) : _model(model), _state(State::NONE)
- {
- // DO NOTHING
- }
-
-public:
- neurun::graph::Graph &model(void) { return *_model; }
- const neurun::graph::Graph &model(void) const { return *_model; }
-
- void state(State state) { _state = state; }
- State state(void) const { return _state; }
-
-public:
- operand::Context &operands(void) { return _operands; }
- const operand::Context &operands(void) const { return _operands; }
-
-public:
- operation::Sequence &operations(void) { return _ops; }
- const operation::Sequence &operations(void) const { return _ops; }
-
-private:
- std::shared_ptr<neurun::graph::Graph> _model;
- operand::Context _operands;
- operation::Sequence _ops;
- State _state;
-};
-
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_CODEGEN_PLAN_H__
diff --git a/runtimes/neurun/src/compiler/PlanBuilder.cc b/runtimes/neurun/src/compiler/PlanBuilder.cc
deleted file mode 100644
index 8ef3fedbf..000000000
--- a/runtimes/neurun/src/compiler/PlanBuilder.cc
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "PlanBuilder.h"
-
-#include "backend/interface/operand/IObject.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-void PlanBuilder::addStage(const Stage &stage) { _stages.emplace_back(stage); }
-
-void PlanBuilder::finalize(const backend::TensorBuilderSet &tensor_builders)
-{
- auto &operands = _plan.operands();
-
- // Prepare tensors
- for (auto &tensor_builder : tensor_builders)
- {
- tensor_builder->prepare();
-
- // Wrap tensors as Object and store them to plan
- tensor_builder->iterate([&](const model::operand::Index &index) {
- auto object = tensor_builder->wrapTensor(index);
- operands.set(index, object);
- });
- }
-
- // Process Stage
- ExecutionBuilder execution_builder{_plan};
-
- for (const auto &stage : _stages)
- {
- stage(execution_builder);
- }
-
- // Allocate Tensor Memory for cl_tensors
- for (auto &tensor_builder : tensor_builders)
- {
- tensor_builder->allocate();
- }
-}
-
-} // namepsace compiler
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/PlanBuilder.h b/runtimes/neurun/src/compiler/PlanBuilder.h
deleted file mode 100644
index 3231906d2..000000000
--- a/runtimes/neurun/src/compiler/PlanBuilder.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_PLAN_BUILDER_H__
-#define __NEURUN_COMPILER_PLAN_BUILDER_H__
-
-#include "Plan.h"
-#include "backend/interface/IStageGenerator.h"
-#include "backend/interface/ITensorBuilder.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-class ExecutionBuilder final : public IExecutionBuilder
-{
-public:
- ExecutionBuilder(Plan &plan) : _plan{plan}
- {
- // DO NOTHING
- }
-
-public:
- void append(std::unique_ptr<::neurun::exec::IFunction> &&f) override
- {
- _plan.operations().append(std::move(f));
- }
-
-private:
- Plan &_plan;
-};
-
-class PlanBuilder
-{
-public:
- PlanBuilder(Plan &plan) : _plan{plan}
- {
- // DO NOTHING
- }
-
-public:
- void addStage(const Stage &stage);
-
-public:
- // TODO Remove the argument `tensor_builders`
- void finalize(const backend::TensorBuilderSet &tensor_builders);
-
-private:
- Plan &_plan;
-
-private:
- std::vector<Stage> _stages;
-};
-
-} // namepsace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_PLAN_BUILDER_H__
diff --git a/runtimes/neurun/src/compiler/SubTensorAnalyzer.cc b/runtimes/neurun/src/compiler/SubTensorAnalyzer.cc
deleted file mode 100644
index 0851b7991..000000000
--- a/runtimes/neurun/src/compiler/SubTensorAnalyzer.cc
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SubTensorAnalyzer.h"
-
-#include <typeinfo>
-
-#include "cpp14/memory.h"
-#include "model/operand/Set.h"
-#include "graph/operation/LowerInfo.h"
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-void SubTensorAnalyzer::visit(const model::operation::ConcatNode &node)
-{
- // If operator is concat (or other operators related with subsumption), fill subsumption info
- // TODO: if one tensor is subset of many parents or model input
- // Solution 1. Handle 1st parent only, ignore others (need to invert for other childrun)
- // Solution 2. Insert copy operation for other parents
- auto axis_index = node.param().axis_index;
-
- // To prepare concat elimination, axis should be constant
- if (_ctx.at(axis_index).getUsage() != model::operand::OperandUsage::CONSTANT)
- {
- VERBOSE(SUBTENSOR) << "Cannot handle non-constant axis" << std::endl;
- return;
- }
-
- // NOTE This implementation assumes concat over feature depth
- // TODO Remove this assumption
- int32_t axis = _ctx.at(axis_index).asScalar<int32_t>();
- if (axis != 3)
- {
- VERBOSE(SUBTENSOR) << "Cannot handle axis is not channel" << std::endl;
- return;
- }
-
- auto &output_index = node.getOutputs().at(0);
- auto &inputs = node.getInputs();
-
- int32_t axis_point = 0;
- for (auto &input_index : inputs)
- {
- auto input_shape_4D = _ctx.at(input_index).lower_info()->shape();
- std::vector<int32_t> offset = {0, 0, 0, 0};
- offset[axis] = axis_point;
- neurun::util::feature::Coordinate4D coordinate_info(offset[0], offset[1], offset[2], offset[3]);
- std::unique_ptr<graph::operand::ParentInfo> parentInfo =
- nnfw::cpp14::make_unique<graph::operand::ParentInfo>(output_index, coordinate_info);
-
- // NOTD Not support multiple parent tensor yet
- assert(_ctx.at(input_index).parent_info() == nullptr);
- _ctx.at(input_index).parent_info(std::move(parentInfo));
-
- // NOTE Only support when axis is 3(channel)
- axis_point += input_shape_4D.c();
- }
-}
-
-} // namespace compiler
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/SubTensorAnalyzer.h b/runtimes/neurun/src/compiler/SubTensorAnalyzer.h
deleted file mode 100644
index ddfd10263..000000000
--- a/runtimes/neurun/src/compiler/SubTensorAnalyzer.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file SubTensorAnalyzer.h
- * @brief This file contains SubTensorAnalyzer to analyze tensor subsumption
- * using operation visitor
- */
-
-#ifndef __NEURUN_COMPILER_SUBTENSOR_ANALYZER_H__
-#define __NEURUN_COMPILER_SUBTENSOR_ANALYZER_H__
-
-#include "model/operation/NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-class Set;
-} // namespace operation
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace compiler
-{
-
-/**
- * @brief Class to analyze tensor subsumption
- */
-class SubTensorAnalyzer : public model::operation::NodeVisitor
-{
-public:
- /**
- * @brief Construct a new SubTensorAnalyzer object
- * @param[in] ctx Graph operand set
- */
- SubTensorAnalyzer(neurun::model::operand::Set &ctx) : _ctx{ctx} {}
-
-public:
- virtual void visit(const model::operation::ConcatNode &) override;
-
-private:
- neurun::model::operand::Set &_ctx;
-};
-
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_SUBTENSOR_ANALYZER_H__
diff --git a/runtimes/neurun/src/compiler/SubTensorInfo.h b/runtimes/neurun/src/compiler/SubTensorInfo.h
deleted file mode 100644
index c0fb857d0..000000000
--- a/runtimes/neurun/src/compiler/SubTensorInfo.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file SubTensorInfo.h
- * @brief This file contains SubTensorInfo to represent subsumption between tensors
- * for backend tensor allocation
- */
-#ifndef __NEURUN_COMPILER_SUBTENSOR_INFO_H__
-#define __NEURUN_COMPILER_SUBTENSOR_INFO_H__
-
-#include "model/operand/Object.h"
-#include "util/feature/Coordinate4D.h"
-
-namespace neurun
-{
-namespace compiler
-{
-
-/**
- * @brief Class to represent information of subtensor
- */
-class SubTensorInfo
-{
-public:
- SubTensorInfo() = delete;
-
- /**
- * @brief Construct a new SubTensorInfo object
- * @param[in] obj SubTensor object
- */
- SubTensorInfo(const model::operand::Object &obj)
- : _parent{obj.parent_info()->parent()}, _shape{obj.shape()}, _type{obj.typeInfo()},
- _offset{obj.parent_info()->offset()}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Return parent tensor index
- * @return Parent tensor index
- */
- const model::operand::Index parent(void) const { return _parent; }
- /**
- * @brief Return tensor shape
- * @return Tensor shape
- */
- const model::operand::Shape shape(void) const { return _shape; }
- /**
- * @brief Return tensor type
- * @return Tensor type
- */
- const model::operand::TypeInfo type(void) const { return _type; }
- /**
- * @brief Return tensor's offset in parent tensor
- * @return Tensor offset
- */
- const neurun::util::feature::Coordinate4D offset(void) const { return _offset; }
-
-private:
- const model::operand::Index _parent;
- const model::operand::Shape _shape;
- const model::operand::TypeInfo _type;
- const neurun::util::feature::Coordinate4D _offset;
-};
-
-} // compiler
-} // neurun
-
-#endif // __NEURUN_COMPILER_SUBTENSOR_INFO_H__
diff --git a/runtimes/neurun/src/compiler/TensorInfo.h b/runtimes/neurun/src/compiler/TensorInfo.h
deleted file mode 100644
index 787c433e5..000000000
--- a/runtimes/neurun/src/compiler/TensorInfo.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_TENSOR_INFO_H__
-#define __NEURUN_COMPILER_TENSOR_INFO_H__
-
-#include "model/operand/Shape.h"
-#include "model/operand/TypeInfo.h"
-
-#include <numeric>
-
-namespace neurun
-{
-namespace compiler
-{
-
-class TensorInfo
-{
-public:
- TensorInfo() = default;
-
-public:
- TensorInfo(const ::neurun::model::operand::Shape &shape,
- const ::neurun::model::operand::TypeInfo &typeInfo)
- : _shape(shape), _typeInfo(typeInfo)
- {
- // DO NOTHING
- }
-
-public:
- const ::neurun::model::operand::Shape &shape() const { return _shape; }
- const ::neurun::model::operand::TypeInfo &typeInfo() const { return _typeInfo; }
- size_t total_size() const
- {
- const auto &dims = _shape.dims();
- return std::accumulate(dims.begin(), dims.end(), 4, std::multiplies<size_t>());
- }
-
-private:
- ::neurun::model::operand::Shape _shape;
- ::neurun::model::operand::TypeInfo _typeInfo;
-};
-
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_TENSOR_INFO_H__
diff --git a/runtimes/neurun/src/compiler/operand/Context.cc b/runtimes/neurun/src/compiler/operand/Context.cc
deleted file mode 100644
index 3fa529995..000000000
--- a/runtimes/neurun/src/compiler/operand/Context.cc
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Context.h"
-
-#include <cassert>
-
-namespace neurun
-{
-namespace compiler
-{
-namespace operand
-{
-
-Context &Context::set(const model::operand::Index &id,
- const std::shared_ptr<backend::operand::IObject> &object)
-{
- // Only one object for an id
- assert(_objects.find(id.value()) == _objects.end());
- _objects[id.value()] = object;
- return (*this);
-}
-
-void Context::iterate(const std::function<void(int, backend::operand::IObject &)> &fn)
-{
- for (auto &e : _objects)
- {
- fn(e.first, *e.second);
- }
-}
-
-} // namespace operand
-} // namespace compiler
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/operand/Context.h b/runtimes/neurun/src/compiler/operand/Context.h
deleted file mode 100644
index bc558404e..000000000
--- a/runtimes/neurun/src/compiler/operand/Context.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_OPERAND_CONTEXT_H__
-#define __NEURUN_COMPILER_OPERAND_CONTEXT_H__
-
-#include "backend/interface/operand/IObject.h"
-#include "model/operand/Index.h"
-
-#include <map>
-#include <memory>
-
-namespace neurun
-{
-namespace compiler
-{
-namespace operand
-{
-
-class Context
-{
-public:
- Context &set(const model::operand::Index &ind,
- const std::shared_ptr<backend::operand::IObject> &object);
-
-public:
- bool exist(const ::neurun::model::operand::Index &ind) const
- {
- return _objects.find(ind.asInt()) != _objects.end();
- }
-
-public:
- std::shared_ptr<backend::operand::IObject> at(const model::operand::Index &ind) const
- {
- return _objects.at(ind.asInt());
- }
-
- std::shared_ptr<backend::operand::IObject> &at(const model::operand::Index &ind)
- {
- return _objects.at(ind.asInt());
- }
-
- void iterate(const std::function<void(int, backend::operand::IObject &)> &fn);
-
-private:
- std::map<int, std::shared_ptr<backend::operand::IObject>> _objects;
-};
-
-} // namespace operand
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_OPERAND_CONTEXT_H__
diff --git a/runtimes/neurun/src/compiler/operation/Sequence.cc b/runtimes/neurun/src/compiler/operation/Sequence.cc
deleted file mode 100644
index 3160e04b6..000000000
--- a/runtimes/neurun/src/compiler/operation/Sequence.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Sequence.h"
-
-namespace neurun
-{
-namespace compiler
-{
-namespace operation
-{
-
-// NO IMPLEMENTATION YET
-
-} // namespace operation
-} // namespace compiler
-} // namespace neurun
diff --git a/runtimes/neurun/src/compiler/operation/Sequence.h b/runtimes/neurun/src/compiler/operation/Sequence.h
deleted file mode 100644
index d69cfcfe3..000000000
--- a/runtimes/neurun/src/compiler/operation/Sequence.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_COMPILER_OPERATION_SEQUENCE_H__
-#define __NEURUN_COMPILER_OPERATION_SEQUENCE_H__
-#include <stdint.h>
-#include "exec/interface/IFunction.h"
-#include <memory>
-#include <vector>
-
-namespace neurun
-{
-namespace compiler
-{
-namespace operation
-{
-
-class Sequence
-{
-public:
- uint32_t size(void) const { return _functions.size(); }
-
-public:
- Sequence &append(std::unique_ptr<::neurun::exec::IFunction> &&func)
- {
- _functions.emplace_back(std::move(func));
- return (*this);
- }
-
-public:
- ::neurun::exec::IFunction &at(uint32_t n) const { return *(_functions.at(n)); }
-
-private:
- std::vector<std::unique_ptr<::neurun::exec::IFunction>> _functions;
-};
-
-} // namespace operation
-} // namespace compiler
-} // namespace neurun
-
-#endif // __NEURUN_COMPILER_OPERATION_SEQUENCE_H__
diff --git a/runtimes/neurun/src/dumper/dot/DotBuilder.cc b/runtimes/neurun/src/dumper/dot/DotBuilder.cc
deleted file mode 100644
index d694323b4..000000000
--- a/runtimes/neurun/src/dumper/dot/DotBuilder.cc
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "DotBuilder.h"
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-// NodeAttr
-NodeAttr &NodeAttr::addAttr(const std::string &name, const std::string &attr)
-{
- _attrs.emplace_back(name, attr);
-
- return *this;
-}
-
-void NodeAttr::finish()
-{
- _attr_stream << "[";
- for (auto attr : _attrs)
- {
- _attr_stream << attr.first << "="
- << "\"" << attr.second << "\" ";
- }
- _attr_stream << "];\n";
-}
-
-// DotDumper
-DotBuilder::DotBuilder() {}
-
-void DotBuilder::update(const IDotInfo &node_info)
-{
- addNode(node_info);
- for (auto child : node_info.children())
- {
- addEdge(node_info, *child);
- }
-}
-
-void DotBuilder::writeDot(std::ostream &os)
-{
- os << "digraph D {\n"
- << _dot.str() << "\n"
- << "}\n";
-}
-
-void DotBuilder::addNode(const IDotInfo &dotinfo)
-{
- NodeAttr attr;
- attr.addAttr("shape", dotinfo.dot_shape())
- .addAttr("label", dotinfo.label())
- .addAttr("style", "filled")
- .addAttr("colorscheme", dotinfo.bg_color_scheme())
- .addAttr("fillcolor", dotinfo.bg_color());
-
- attr.finish();
-
- _dot << dotinfo.index_str() << attr.attr_stream();
-}
-
-void DotBuilder::addEdge(const IDotInfo &dotinfo1, const IDotInfo &dotinfo2)
-{
- _dot << dotinfo1.index_str() << " -> " << dotinfo2.index_str() << ";\n";
-}
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
diff --git a/runtimes/neurun/src/dumper/dot/DotBuilder.h b/runtimes/neurun/src/dumper/dot/DotBuilder.h
deleted file mode 100644
index 783e92b80..000000000
--- a/runtimes/neurun/src/dumper/dot/DotBuilder.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_DUMPER_DOT_DOT_BUILDER_H__
-#define __NEURUN_DUMPER_DOT_DOT_BUILDER_H__
-
-#include <sstream>
-
-#include "model/operation/Index.h"
-#include "model/operand/Index.h"
-
-#include "model/operation/Node.h"
-#include "model/operand/Object.h"
-
-#include "DotNodeInfo.h"
-#include "DotOperandInfo.h"
-
-using Node = neurun::model::operation::Node;
-using Object = neurun::model::operand::Object;
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-class NodeAttr
-{
-public:
- NodeAttr() = default;
-
-public:
- void finish();
- NodeAttr &addAttr(const std::string &name, const std::string &attr);
-
-public:
- std::stringbuf *attr_stream() { return _attr_stream.rdbuf(); }
-
-private:
- std::vector<std::pair<std::string, std::string>> _attrs;
- std::stringstream _attr_stream;
-};
-
-class DotBuilder
-{
-public:
- DotBuilder();
-
-public:
- void update(const IDotInfo &dotinfo);
-
- void writeDot(std::ostream &os);
-
-private:
- void addNode(const IDotInfo &dotinfo);
- void addEdge(const IDotInfo &dotinfo1, const IDotInfo &dotinfo2);
-
- std::stringstream _dot;
-};
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
-
-#endif // __NEURUN_DUMPER_DOT_DOT_BUILDER_H__
diff --git a/runtimes/neurun/src/dumper/dot/DotDumper.cc b/runtimes/neurun/src/dumper/dot/DotDumper.cc
deleted file mode 100644
index 1e53ece19..000000000
--- a/runtimes/neurun/src/dumper/dot/DotDumper.cc
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <fstream>
-
-#include "DotDumper.h"
-#include "DotBuilder.h"
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-using namespace neurun::graph;
-
-void DotDumper::dumpIfNeeded(const std::string &tag)
-{
- if (_option == OPTIONS::DUMP_OFF)
- {
- return;
- }
- neurun::dumper::dot::DotBuilder dot_builder;
-
- auto &operations = _graph.operations();
- auto &operands = _graph.operands();
-
- operations.iterate([&](const model::operation::Index &index, const model::operation::Node &node) {
- neurun::dumper::dot::DotNodeInfo node_info(_graph, index, node);
-
- for (auto output : node.getOutputs())
- {
- using neurun::dumper::dot::DotOperandInfo;
- auto child = std::make_shared<DotOperandInfo>(output, operands.at(output),
- DotOperandInfo::Type::MODEL_OUTPUT);
- node_info.appendChild(child);
- }
-
- dot_builder.update(node_info);
- });
-
- operands.iterate([&](const model::operand::Index &index, const model::operand::Object &object) {
- bool showing_cond = false;
- auto usage = object.getUsage();
- if (_option == OPTIONS::SHOW_CONSTANTS)
- {
- showing_cond = object.usageIsDefined();
- }
- else
- {
- showing_cond = (usage == model::operand::OperandUsage::MODEL_INPUT) ||
- (usage == model::operand::OperandUsage::OPERATION_OUTPUT);
- }
- if (usage != model::operand::OperandUsage::OPERATION_OUTPUT)
- {
- showing_cond = showing_cond && (object.getUses().size() > 0);
- }
- if (showing_cond)
- {
- auto type = [&]() {
- using neurun::dumper::dot::DotOperandInfo;
- if (_graph.getInputs().contains(index))
- return DotOperandInfo::Type::MODEL_INPUT;
- if (_graph.getOutputs().contains(index))
- return DotOperandInfo::Type::MODEL_OUTPUT;
- return DotOperandInfo::Type::INTERNAL;
- }();
-
- neurun::dumper::dot::DotOperandInfo operand_info(index, object, type);
-
- for (auto operation_index : object.getUses().list())
- {
- auto &node = operations.at(operation_index);
- auto child =
- std::make_shared<neurun::dumper::dot::DotNodeInfo>(_graph, operation_index, node);
- operand_info.appendChild(child);
- }
-
- dot_builder.update(operand_info);
- }
- });
-
- // Dump to file
- {
- std::string file_name;
- file_name += tag;
- file_name += ".dot";
- std::filebuf fb;
-
- fb.open(file_name, std::ios::out);
- std::ostream os(&fb);
-
- dot_builder.writeDot(os);
-
- fb.close();
- }
-}
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
diff --git a/runtimes/neurun/src/dumper/dot/DotDumper.h b/runtimes/neurun/src/dumper/dot/DotDumper.h
deleted file mode 100644
index 0c0a9b8df..000000000
--- a/runtimes/neurun/src/dumper/dot/DotDumper.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "graph/Graph.h"
-#include "util/config/ConfigManager.h"
-
-#ifndef __NEURUN_DUMPER_DOT_DOT_DUMPER_H__
-#define __NEURUN_DUMPER_DOT_DOT_DUMPER_H__
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-enum OPTIONS
-{
- DUMP_OFF = 0, // Don't dump
- DEFAULT = 1, // Show default dot graph
- SHOW_CONSTANTS // Show dot graph with input constants
-};
-
-class DotDumper
-{
-public:
- DotDumper(const neurun::graph::Graph &graph) : _graph(graph)
- {
- _option = config::ConfigManager::instance().get<int>("GRAPH_DOT_DUMP");
- }
-
-public:
- /**
- * @brief Dump to dot file as tag name if "GRAPH_DOT_DUMP" is set
- *
- * @param[in] tag The name of dot file that would be created
- * @return N/A
- */
- void dumpIfNeeded(const std::string &tag);
-
-private:
- const neurun::graph::Graph &_graph;
- uint32_t _option;
-};
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
-
-#endif // __NEURUN_DUMPER_DOT_DOT_DUMPER_H__
diff --git a/runtimes/neurun/src/dumper/dot/DotNodeInfo.cc b/runtimes/neurun/src/dumper/dot/DotNodeInfo.cc
deleted file mode 100644
index aefe12e2a..000000000
--- a/runtimes/neurun/src/dumper/dot/DotNodeInfo.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <sstream>
-
-#include "DotNodeInfo.h"
-#include "graph/Graph.h"
-#include "graph/operation/LowerInfo.h"
-#include "backend/interface/IConfig.h"
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-const std::string DotNodeInfo::NODE_SHAPE = "rect";
-const std::string DotNodeInfo::BG_COLOR_SCHEME = "pastel18";
-// RED BLUE ORANGE YELLOW GREEN PUPLE CYAN PINK
-const std::string DotNodeInfo::BG_COLORS[8] = {"1", "2", "5", "6", "3", "4", "7", "8"};
-
-DotNodeInfo::DotNodeInfo(const neurun::graph::Graph &graph,
- const neurun::model::operation::Index &index,
- const neurun::model::operation::Node &node)
- : _index(index), _node(node), _lower_info(graph.getLowerInfo(index))
-{
- addBackendLabel();
-}
-
-std::string DotNodeInfo::index_str() const
-{
- std::stringstream ss;
- ss << "node" << _index.value();
-
- return ss.str();
-}
-
-std::string DotNodeInfo::label() const
-{
- std::stringstream ss;
- ss << _index.value() << " : " << _node.getName() << std::endl;
- for (auto label : _labels)
- {
- ss << label << std::endl;
- }
-
- return ss.str();
-}
-
-std::string DotNodeInfo::dot_shape() const { return NODE_SHAPE; }
-
-std::string DotNodeInfo::bg_color_scheme() const { return BG_COLOR_SCHEME; }
-
-std::string DotNodeInfo::bg_color() const
-{
- if (!_lower_info)
- return DEFAULT_BG_COLOR;
- assert(_lower_info != nullptr);
- const auto &backend = _lower_info->backend();
- assert(backend != nullptr);
-
- std::string backend_id = backend->config()->id();
- // TODO : This is just workaround it can be made more efficient.
- if (backend_id == "acl_cl")
- {
- return BG_COLORS[RED];
- }
- else if (backend_id == "cpu")
- {
- return BG_COLORS[BLUE];
- }
- else
- {
- return DEFAULT_BG_COLOR;
- }
-}
-
-void DotNodeInfo::addBackendLabel()
-{
- if (!_lower_info)
- return;
-
- std::string label;
- const auto &backend = _lower_info->backend();
- assert(backend != nullptr);
-
- label += "[Backend] : ";
- label += backend->config()->id();
- _labels.emplace_back(label);
-}
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
diff --git a/runtimes/neurun/src/dumper/dot/DotNodeInfo.h b/runtimes/neurun/src/dumper/dot/DotNodeInfo.h
deleted file mode 100644
index 656a05af6..000000000
--- a/runtimes/neurun/src/dumper/dot/DotNodeInfo.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_DUMPER_DOT_DOT_NODE_INFO_H__
-#define __NEURUN_DUMPER_DOT_DOT_NODE_INFO_H__
-
-#include "IDotInfo.h"
-#include "model/operation/Node.h"
-#include "model/operation/Index.h"
-
-namespace neurun
-{
-namespace graph
-{
-class Graph;
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-class DotNodeInfo : public IDotInfo
-{
-public:
- static const std::string NODE_SHAPE;
- static const std::string BG_COLOR_SCHEME;
- static const std::string BG_COLORS[8];
-
-public:
- DotNodeInfo(const neurun::graph::Graph &graph, const neurun::model::operation::Index &index,
- const neurun::model::operation::Node &node);
-
-public:
- virtual std::string index_str() const override;
- virtual std::string label() const override;
- virtual std::string dot_shape() const override;
- virtual std::string bg_color_scheme() const override;
- virtual std::string bg_color() const override;
-
-private:
- void addBackendLabel();
-
-private:
- neurun::model::operation::Index _index;
- const neurun::model::operation::Node &_node;
- const neurun::graph::operation::LowerInfo *_lower_info;
- std::vector<std::string> _labels;
-};
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
-
-#endif // __NEURUN_DUMPER_DOT_DOT_NODE_INFO_H__
diff --git a/runtimes/neurun/src/dumper/dot/DotOperandInfo.cc b/runtimes/neurun/src/dumper/dot/DotOperandInfo.cc
deleted file mode 100644
index 8f5905020..000000000
--- a/runtimes/neurun/src/dumper/dot/DotOperandInfo.cc
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <sstream>
-
-#include "DotOperandInfo.h"
-#include "graph/operand/LowerInfo.h"
-#include "backend/interface/IConfig.h"
-#include "backend/BackendManager.h"
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-const std::string DotOperandInfo::INPUT_SHAPE = "doublecircle";
-const std::string DotOperandInfo::OUTPUT_SHAPE = "doublecircle";
-const std::string DotOperandInfo::OPERAND_SHAPE = "ellipse";
-const std::string DotOperandInfo::BG_COLOR_SCHEME = "set38";
-// RED BLUE ORANGE YELLOW GREEN PUPLE CYAN PINK
-const std::string DotOperandInfo::BG_COLORS[8] = {"4", "5", "6", "2", "7", "3", "1", "8"};
-
-DotOperandInfo::DotOperandInfo(const neurun::model::operand::Index &index,
- const neurun::model::operand::Object &object, Type type)
- : _index(index), _object(object), _type(type)
-{
- const auto &lower_info = object.lower_info();
- if (lower_info)
- {
- addBackendLabel();
- }
-}
-
-std::string DotOperandInfo::index_str() const
-{
- std::stringstream ss;
- ss << "obj" << _index.value();
-
- return ss.str();
-}
-
-std::string DotOperandInfo::label() const
-{
- std::stringstream ss;
- ss << _index.value() << std::endl;
- for (auto label : _labels)
- {
- ss << label << std::endl;
- }
-
- return ss.str();
-}
-
-std::string DotOperandInfo::dot_shape() const
-{
- switch (_type)
- {
- case Type::MODEL_INPUT:
- return INPUT_SHAPE;
-
- case Type::MODEL_OUTPUT:
- return OUTPUT_SHAPE;
-
- case Type::UNDEFINED:
- case Type::INTERNAL:
- default:
- return OPERAND_SHAPE;
- }
-}
-
-std::string DotOperandInfo::bg_color_scheme() const { return BG_COLOR_SCHEME; }
-
-std::string DotOperandInfo::bg_color() const
-{
- const auto &lower_info = _object.lower_info();
- if (!lower_info)
- return DEFAULT_BG_COLOR;
- assert(lower_info != nullptr);
- const auto &def_backends = lower_info->def_backends();
- assert(def_backends.size() == 1);
-
- std::string backend_id = def_backends.getOnlyElement()->config()->id();
- // TODO : This is just workaround it can be made more efficient.
- if (backend_id == "acl_cl")
- {
- return BG_COLORS[RED];
- }
- else if (backend_id == "cpu")
- {
- return BG_COLORS[BLUE];
- }
- else
- {
- return DEFAULT_BG_COLOR;
- }
-}
-
-void DotOperandInfo::addBackendLabel()
-{
- std::string label;
- const auto &lower_info = _object.lower_info();
- assert(lower_info != nullptr);
- const auto &def_backends = lower_info->def_backends();
- assert(def_backends.size() == 1);
-
- label += "[";
- label += def_backends.getOnlyElement()->config()->id();
- label += "]";
- _labels.emplace_back(label);
-}
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
diff --git a/runtimes/neurun/src/dumper/dot/DotOperandInfo.h b/runtimes/neurun/src/dumper/dot/DotOperandInfo.h
deleted file mode 100644
index c54da444d..000000000
--- a/runtimes/neurun/src/dumper/dot/DotOperandInfo.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_DUMPER_DOT_DOT_OPERAND_INFO_H__
-#define __NEURUN_DUMPER_DOT_DOT_OPERAND_INFO_H__
-
-#include <vector>
-
-#include "IDotInfo.h"
-#include "model/operand/Object.h"
-#include "model/operand/Index.h"
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-class DotOperandInfo : public IDotInfo
-{
-public:
- enum class Type
- {
- UNDEFINED,
- MODEL_INPUT,
- MODEL_OUTPUT,
- INTERNAL
- };
-
-public:
- static const std::string INPUT_SHAPE;
- static const std::string OUTPUT_SHAPE;
- static const std::string OPERAND_SHAPE;
- static const std::string BG_COLOR_SCHEME;
- static const std::string BG_COLORS[8];
-
-public:
- DotOperandInfo(const neurun::model::operand::Index &index,
- const neurun::model::operand::Object &object, Type type);
-
-public:
- virtual std::string index_str() const override;
- virtual std::string label() const override;
- virtual std::string dot_shape() const override;
- virtual std::string bg_color_scheme() const override;
- virtual std::string bg_color() const override;
-
-private:
- void addBackendLabel();
-
-private:
- const neurun::model::operand::Index &_index;
- const neurun::model::operand::Object &_object;
- Type _type;
-
- std::vector<std::string> _labels;
-};
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
-
-#endif // __NEURUN_DUMPER_DOT_DOT_OPERAND_INFO_H__
diff --git a/runtimes/neurun/src/dumper/dot/IDotInfo.h b/runtimes/neurun/src/dumper/dot/IDotInfo.h
deleted file mode 100644
index d507e724a..000000000
--- a/runtimes/neurun/src/dumper/dot/IDotInfo.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_DUMPER_DOT_IDOTINFO_H__
-#define __NEURUN_DUMPER_DOT_IDOTINFO_H__
-
-#include <string>
-#include <memory>
-#include <vector>
-
-namespace neurun
-{
-namespace dumper
-{
-namespace dot
-{
-
-#define DEFAULT_BG_COLOR_SCHEME "x11"
-#define DEFAULT_BG_COLOR "white"
-
-enum BGCOLORS : int
-{
- RED,
- BLUE,
- ORANGE,
- YELLOW,
- GREEN,
- PUPLE,
- CYAN,
- PINK
-};
-
-struct IDotInfo
-{
- virtual ~IDotInfo() = default;
-
- virtual std::string index_str() const = 0;
- virtual std::string label() const = 0;
- virtual std::string dot_shape() const = 0;
- virtual std::string bg_color_scheme() const { return DEFAULT_BG_COLOR_SCHEME; }
- virtual std::string bg_color() const { return DEFAULT_BG_COLOR; }
-
- void appendChild(std::shared_ptr<IDotInfo> dotinfo) { _children.emplace_back(dotinfo); }
- const std::vector<std::shared_ptr<IDotInfo>> &children() const { return _children; }
-
-private:
- std::vector<std::shared_ptr<IDotInfo>> _children;
-};
-
-} // namespace dot
-} // namespace dumper
-} // namespace neurun
-
-#endif // __NEURUN_DUMPER_DOT_IDOTINFO_H__
diff --git a/runtimes/neurun/src/exec/Sink.h b/runtimes/neurun/src/exec/Sink.h
deleted file mode 100644
index fe23e8ac3..000000000
--- a/runtimes/neurun/src/exec/Sink.h
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_EXEC_SINK_H__
-#define __NEURUN_EXEC_SINK_H__
-
-#include <cassert>
-
-#include "cpp14/memory.h"
-#include "util/feature/nhwc/View.h"
-#include "util/feature/nchw/View.h"
-#include <misc/feature/IndexIterator.h>
-
-namespace neurun
-{
-namespace exec
-{
-
-struct ISink
-{
- virtual ~ISink() = default;
-
- virtual void pull(::neurun::backend::operand::ITensor &tensor) const = 0;
-};
-
-template <typename T> class Sink final : public ISink
-{
-public:
- Sink(T *base, const size_t size) : _base{base}, _size{size} {}
-
-public:
- void pull(::neurun::backend::operand::ITensor &tensor) const override
- {
- memcpy(_base, tensor.buffer(), _size);
- }
-
-private:
- T *const _base;
- const size_t _size;
-};
-
-class PermutateSink final : public ISink
-{
-public:
- PermutateSink(void *output_buffer, const size_t &output_size, const model::operand::Shape &shape)
- : _output_buffer{(uint8_t *)output_buffer}, _output_size{output_size}, _shape{shape}
- {
- }
-
-public:
- void pull(neurun::backend::operand::ITensor &tensor) const override
- {
- // do NCHW_TO_NHWC permutation
- auto input_buffer = tensor.buffer();
- auto rank = _shape.rank();
-
- switch (rank)
- {
- case 0:
- case 1:
- {
- memcpy(_output_buffer, input_buffer, _output_size);
- break;
- }
- case 2:
- {
- auto matrix_shape = _shape.asMatrix();
-
- for (auto h = 0; h < matrix_shape.H; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, 0};
- memcpy(_output_buffer + h * matrix_shape.W, input_buffer + tensor.calcOffset(coord),
- matrix_shape.W * sizeof(float));
- }
- break;
- }
- case 3:
- {
- const int32_t depth = _shape.dim(0);
- const int32_t height = _shape.dim(1);
- const int32_t width = _shape.dim(2);
-
- for (auto c = 0; c < depth; ++c)
- {
- for (auto h = 0; h < height; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, c};
- memcpy(_output_buffer + c * height * width + h * width,
- input_buffer + tensor.calcOffset(coord), width * sizeof(float));
- }
- }
- break;
- }
- case 4:
- {
- auto feature = _shape.asFeature();
-
- const util::feature::nchw::View<float> from{&tensor};
- util::feature::nhwc::View<float> into{feature, reinterpret_cast<float *>(_output_buffer),
- _output_size};
-
- ::nnfw::misc::feature::iterate(feature)
- << [&](uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(batch, ch, row, col);
- into.at(batch, ch, row, col) = value;
- };
- break;
- }
- default:
- throw "NYI";
- break;
- }
- }
-
-private:
- uint8_t *_output_buffer;
- const size_t _output_size;
- const model::operand::Shape _shape;
-};
-
-} // namespace exec
-} // namespace neurun
-
-#endif // __NEURUN_EXEC_SINK_H__
diff --git a/runtimes/neurun/src/exec/Source.h b/runtimes/neurun/src/exec/Source.h
deleted file mode 100644
index 169f8b386..000000000
--- a/runtimes/neurun/src/exec/Source.h
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_EXEC_SOURCE_H__
-#define __NEURUN_EXEC_SOURCE_H__
-
-#include <cassert>
-
-#include "cpp14/memory.h"
-#include "util/feature/nchw/View.h"
-#include "util/feature/nhwc/Reader.h"
-#include "util/feature/Coordinate4D.h"
-#include <misc/feature/IndexIterator.h>
-
-namespace neurun
-{
-namespace exec
-{
-
-struct ISource
-{
- virtual ~ISource() = default;
-
- virtual void push(::neurun::backend::operand::ITensor &tensor) const = 0;
-};
-
-template <typename T> class Source final : public ISource
-{
-public:
- Source(const T *base, const size_t size) : _base{base}, _size{size} {}
-
-public:
- void push(::neurun::backend::operand::ITensor &tensor) const override
- {
- memcpy(tensor.buffer(), _base, _size);
- }
-
-private:
- const T *const _base;
- const size_t _size;
-};
-
-class PermutateSource final : public ISource
-{
-public:
- PermutateSource(const void *input_buffer, const size_t &input_size,
- const model::operand::Shape &shape)
- : _input_buffer{(uint8_t *)input_buffer}, _input_size{input_size}, _shape{shape}
- {
- }
-
-public:
- void push(neurun::backend::operand::ITensor &tensor) const override
- {
- // do NHWC_TO_NCHW permutation
- auto output_buffer = tensor.buffer();
- auto rank = _shape.rank();
-
- switch (rank)
- {
- case 0:
- case 1:
- {
- memcpy(output_buffer, _input_buffer, _input_size);
- break;
- }
- case 2:
- {
- auto matrix_shape = _shape.asMatrix();
-
- for (auto h = 0; h < matrix_shape.H; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, 0};
- memcpy(output_buffer + tensor.calcOffset(coord), _input_buffer + h * matrix_shape.W,
- matrix_shape.W * sizeof(float));
- }
- break;
- }
- case 3:
- {
- const int32_t depth = _shape.dim(0);
- const int32_t height = _shape.dim(1);
- const int32_t width = _shape.dim(2);
-
- for (auto c = 0; c < depth; ++c)
- {
- for (auto h = 0; h < height; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, c};
- memcpy(output_buffer + tensor.calcOffset(coord),
- _input_buffer + c * height * width + h * width, width * sizeof(float));
- }
- }
- break;
- }
- case 4:
- {
- auto feature = _shape.asFeature();
-
- const util::feature::nhwc::Reader<float> from{
- feature, reinterpret_cast<const float *>(_input_buffer), _input_size};
- util::feature::nchw::View<float> into{&tensor};
-
- ::nnfw::misc::feature::iterate(feature)
- << [&](uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(batch, ch, row, col);
- into.at(batch, ch, row, col) = value;
- };
- break;
- }
- default:
- throw "NYI";
- break;
- }
- }
-
-private:
- const uint8_t *_input_buffer;
- const size_t _input_size;
- const model::operand::Shape _shape;
-};
-
-} // namespace exec
-} // namespace neurun
-
-#endif // __NEURUN_EXEC_SOURCE_H__
diff --git a/runtimes/neurun/src/exec/interface/IFunction.h b/runtimes/neurun/src/exec/interface/IFunction.h
deleted file mode 100644
index b7a721d1d..000000000
--- a/runtimes/neurun/src/exec/interface/IFunction.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_EXEC_I_FUNCTION_H__
-#define __NEURUN_EXEC_I_FUNCTION_H__
-
-namespace neurun
-{
-namespace exec
-{
-
-class IFunction
-{
-public:
- virtual ~IFunction() = default;
- virtual void run() = 0;
- virtual void prepare() {}
-};
-
-} // namespace exec
-} // namespace neurun
-
-#endif // __NEURUN_EXEC_I_FUNCTION_H__
diff --git a/runtimes/neurun/src/frontend/compilation.cc b/runtimes/neurun/src/frontend/compilation.cc
deleted file mode 100644
index 9b0719f46..000000000
--- a/runtimes/neurun/src/frontend/compilation.cc
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <NeuralNetworks.h>
-
-#include <new>
-
-#include "frontend/wrapper/model.h"
-#include "frontend/wrapper/compilation.h"
-
-//
-// NNAPI Implementation
-//
-int ANeuralNetworksCompilation_create(ANeuralNetworksModel *model,
- ANeuralNetworksCompilation **compilation)
-{
- if ((model == nullptr) || (compilation == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- if (!model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- std::shared_ptr<neurun::graph::Graph> internal;
-
- model->release(internal);
-
- *compilation = new (std::nothrow) ANeuralNetworksCompilation(internal);
- if (*compilation == nullptr)
- {
- return ANEURALNETWORKS_OUT_OF_MEMORY;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation *compilation)
-{
- if (compilation == nullptr)
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- return compilation->finish();
-}
-
-void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation *compilation)
-{
- delete compilation;
-}
-
-int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation *compilation,
- int32_t /* preference */)
-{
- if (compilation == nullptr)
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // NYi
- return ANEURALNETWORKS_NO_ERROR;
-}
diff --git a/runtimes/neurun/src/frontend/event.cc b/runtimes/neurun/src/frontend/event.cc
deleted file mode 100644
index cd47cc691..000000000
--- a/runtimes/neurun/src/frontend/event.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <NeuralNetworks.h>
-
-#include "frontend/wrapper/event.h"
-
-int ANeuralNetworksEvent_wait(ANeuralNetworksEvent *event)
-{
- if (event == nullptr)
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-void ANeuralNetworksEvent_free(ANeuralNetworksEvent *event) { delete event; }
diff --git a/runtimes/neurun/src/frontend/execution.cc b/runtimes/neurun/src/frontend/execution.cc
deleted file mode 100644
index 5f1729b30..000000000
--- a/runtimes/neurun/src/frontend/execution.cc
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <NeuralNetworks.h>
-
-#include <new>
-
-#include "frontend/wrapper/compilation.h"
-#include "frontend/wrapper/execution.h"
-#include "frontend/wrapper/event.h"
-
-#include "model/operand/DataType.h"
-#include "model/operand/Index.h"
-#include "graph/operand/Layout.h"
-#include "backend/BackendManager.h"
-#include "backend/interface/IConfig.h"
-#include "compiler/BackendResolver.h"
-#include "compiler/TensorInfo.h"
-#include "backend/interface/operand/ITensor.h"
-
-inline void source(ANeuralNetworksExecution *execution,
- const ::neurun::model::operand::DataType &type, int32_t index,
- const void *buffer, size_t length)
-{
- const auto &operands = execution->plan().model().operands();
- neurun::model::operand::IO::Index input_index{index};
-
- const auto operand_index = execution->plan().model().getInputs().at(input_index);
- auto operand = &operands.at(operand_index);
- auto operand_li = operand->lower_info();
- const auto output_backend = operand_li->def_backends().getOnlyElement();
- const auto output_layout = output_backend->config()->getOperandLayout();
- auto input_layout = execution->plan()
- .model()
- .backend_resolver()
- ->getDefaultBackend()
- ->config()
- ->getOperandLayout();
- if (input_layout == neurun::graph::operand::Layout::NHWC &&
- output_layout == neurun::graph::operand::Layout::NCHW)
- {
- const auto tensor_info = neurun::compiler::TensorInfo(operand->shape(), operand->typeInfo());
-
- execution->source<::neurun::exec::PermutateSource>(index, buffer, tensor_info.total_size(),
- operand->shape());
- return;
- }
- using ::neurun::model::operand::DataType;
- switch (type)
- {
- case DataType::SCALAR_FLOAT32:
- case DataType::TENSOR_FLOAT32:
- execution->source<::neurun::exec::Source<float>>(
- index, reinterpret_cast<const float *>(buffer), length);
- break;
- case DataType::SCALAR_INT32:
- case DataType::TENSOR_INT32:
- execution->source<::neurun::exec::Source<int32_t>>(
- index, reinterpret_cast<const int32_t *>(buffer), length);
- break;
- case DataType::SCALAR_UINT32:
- execution->source<::neurun::exec::Source<uint32_t>>(
- index, reinterpret_cast<const uint32_t *>(buffer), length);
- break;
- case DataType::TENSOR_QUANT8_ASYMM:
- execution->source<::neurun::exec::Source<uint8_t>>(
- index, reinterpret_cast<const uint8_t *>(buffer), length);
- break;
- default:
- throw std::runtime_error("Not supported, yet");
- break;
- }
-}
-
-inline void sink(ANeuralNetworksExecution *execution,
- const ::neurun::model::operand::DataType &type, int32_t index, void *buffer,
- size_t length)
-{
- const auto &operands = execution->plan().model().operands();
- neurun::model::operand::IO::Index input_index{index};
-
- const auto operand_index = execution->plan().model().getOutputs().at(input_index);
- auto operand = &operands.at(operand_index);
- auto operand_li = operand->lower_info();
- const auto input_backend = operand_li->def_backends().getOnlyElement();
- const auto input_layout = input_backend->config()->getOperandLayout();
- auto output_layout = execution->plan()
- .model()
- .backend_resolver()
- ->getDefaultBackend()
- ->config()
- ->getOperandLayout();
- if (input_layout == neurun::graph::operand::Layout::NCHW &&
- output_layout == neurun::graph::operand::Layout::NHWC)
- {
- const auto tensor_info = neurun::compiler::TensorInfo(operand->shape(), operand->typeInfo());
-
- execution->sink<::neurun::exec::PermutateSink>(index, buffer, tensor_info.total_size(),
- operand->shape());
- return;
- }
- using ::neurun::model::operand::DataType;
- switch (type)
- {
- case DataType::SCALAR_FLOAT32:
- case DataType::TENSOR_FLOAT32:
- execution->sink<::neurun::exec::Sink<float>>(index, reinterpret_cast<float *>(buffer),
- length);
- break;
- case DataType::SCALAR_INT32:
- case DataType::TENSOR_INT32:
- execution->sink<::neurun::exec::Sink<int32_t>>(index, reinterpret_cast<int32_t *>(buffer),
- length);
- break;
- case DataType::SCALAR_UINT32:
- execution->sink<::neurun::exec::Sink<uint32_t>>(index, reinterpret_cast<uint32_t *>(buffer),
- length);
- break;
- case DataType::TENSOR_QUANT8_ASYMM:
- execution->sink<::neurun::exec::Sink<uint8_t>>(index, reinterpret_cast<uint8_t *>(buffer),
- length);
- break;
- default:
- throw std::runtime_error("Not supported, yet");
- break;
- }
-}
-
-//
-// NNAPI Implementation
-//
-int ANeuralNetworksExecution_create(ANeuralNetworksCompilation *compilation,
- ANeuralNetworksExecution **execution)
-{
- if ((compilation == nullptr) || (execution == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // Can handle compiled state only
- if (compilation->plan().state() != neurun::compiler::State::COMPILED)
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- std::shared_ptr<const neurun::compiler::Plan> plan;
-
- compilation->publish(plan);
-
- *execution = new (std::nothrow) ANeuralNetworksExecution{plan};
- if (*execution == nullptr)
- {
- return ANEURALNETWORKS_OUT_OF_MEMORY;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution *execution, int32_t index,
- const ANeuralNetworksOperandType * /* type */,
- const void *buffer, size_t length)
-{
- // Don't check type
- // Comment about ANeuralNetworksOperandType in NeuralNetworks.h:
- // If the input or output is optional and omitted then it need not have a fully specified tensor
- // operand type
- if ((execution == nullptr) || ((buffer == nullptr) && (length != 0)))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // TODO Handle optional input
- if (buffer == nullptr)
- {
- throw std::runtime_error("Not supported optional input, yet");
- }
-
- const auto &operands = execution->plan().model().operands();
-
- // TODO Check type conflicts
-
- neurun::model::operand::IO::Index input_index{index};
-
- const auto operand_index = execution->plan().model().getInputs().at(input_index);
- const auto data_type = operands.at(operand_index).typeInfo().type();
- const auto operand_shape = operands.at(operand_index).shape();
-
- source(execution, data_type, index, buffer, length);
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution *execution, int32_t index,
- const ANeuralNetworksOperandType * /* type */, void *buffer,
- size_t length)
-{
- // Don't check type
- // Comment about ANeuralNetworksOperandType in NeuralNetworks.h:
- // If the input or output is optional and omitted then it need not have a fully specified tensor
- // operand type
- if ((execution == nullptr) || ((buffer == nullptr) && (length != 0)))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // Handle optional output
- if (buffer == nullptr)
- {
- return ANEURALNETWORKS_NO_ERROR;
- }
-
- const auto &operands = execution->plan().model().operands();
-
- // TODO Check type conflicts
-
- neurun::model::operand::IO::Index output_index{index};
-
- const auto operand_index = execution->plan().model().getOutputs().at(output_index);
- const auto data_type = operands.at(operand_index).typeInfo().type();
- const auto operand_shape = operands.at(operand_index).shape();
-
- sink(execution, data_type, index, buffer, length);
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution *execution,
- ANeuralNetworksEvent **event)
-{
- if ((execution == nullptr) || (event == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // TODO: Handle event
- *event = new (std::nothrow) ANeuralNetworksEvent{};
- if (*event == nullptr)
- {
- return ANEURALNETWORKS_OUT_OF_MEMORY;
- }
-
- const auto &plan = execution->plan();
- const auto &model = plan.model();
-
- // Set input(s)
- for (uint32_t n = 0; n < model.getInputs().size(); ++n)
- {
- auto setter = [&](::neurun::backend::operand::ITensor &tensor) {
- execution->source(n).push(tensor);
- };
-
- neurun::model::operand::IO::Index input_index{n};
-
- ::neurun::model::operand::Index index{model.getInputs().at(input_index)};
- auto object = plan.operands().at(index);
-
- object->access(setter);
- }
-
- const auto &operations = execution->plan().operations();
-
- for (uint32_t n = 0; n < operations.size(); ++n)
- {
- operations.at(n).run();
- }
-
- // Get output(s)
- for (uint32_t n = 0; n < model.getOutputs().size(); ++n)
- {
- auto getter = [&](::neurun::backend::operand::ITensor &tensor) {
- execution->sink(n).pull(tensor);
- };
-
- neurun::model::operand::IO::Index output_index{n};
-
- ::neurun::model::operand::Index index{model.getOutputs().at(output_index)};
- auto object = plan.operands().at(index);
-
- object->access(getter);
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-void ANeuralNetworksExecution_free(ANeuralNetworksExecution * /* execution */) {}
-
-int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution *execution,
- int32_t /* index */,
- const ANeuralNetworksOperandType * /* type */,
- const ANeuralNetworksMemory *memory,
- size_t /* offset */, size_t /* length */)
-{
- if ((execution == nullptr) || (memory == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // NYI
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution *execution,
- int32_t /* index */,
- const ANeuralNetworksOperandType * /* type */,
- const ANeuralNetworksMemory *memory,
- size_t /* offset */, size_t /* length */)
-{
- if ((execution == nullptr) || (memory == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // NYI
- return ANEURALNETWORKS_NO_ERROR;
-}
diff --git a/runtimes/neurun/src/frontend/memory.cc b/runtimes/neurun/src/frontend/memory.cc
deleted file mode 100644
index b2f6ab2d0..000000000
--- a/runtimes/neurun/src/frontend/memory.cc
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <NeuralNetworks.h>
-#include <sys/mman.h>
-#include <new>
-#include <memory>
-
-#include "cpp14/memory.h"
-#include "frontend/wrapper/memory.h"
-
-int ANeuralNetworksMemory_createFromFd(size_t size, int protect, int fd, size_t offset,
- ANeuralNetworksMemory **memory)
-{
- if (memory == nullptr)
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- // Use unique pointer to avoid memory leak
- std::unique_ptr<ANeuralNetworksMemory> memory_ptr =
- nnfw::cpp14::make_unique<ANeuralNetworksMemory>(size, protect, fd, offset);
- if (memory_ptr == nullptr)
- {
- return ANEURALNETWORKS_OUT_OF_MEMORY;
- }
- *memory = memory_ptr.release();
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-void ANeuralNetworksMemory_free(ANeuralNetworksMemory *memory) { delete memory; }
diff --git a/runtimes/neurun/src/frontend/model.cc b/runtimes/neurun/src/frontend/model.cc
deleted file mode 100644
index 3aa2aa2ff..000000000
--- a/runtimes/neurun/src/frontend/model.cc
+++ /dev/null
@@ -1,480 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <NeuralNetworks.h>
-#include <NeuralNetworksEx.h>
-
-#include <cassert>
-#include <stdexcept>
-#include <new>
-
-#include "cpp14/memory.h"
-
-#include "graph/Graph.h"
-#include "frontend/wrapper/model.h"
-#include "frontend/wrapper/memory.h"
-#include "model/operation/Node.Include.h"
-
-int ANeuralNetworksModel_create(ANeuralNetworksModel **model)
-{
- if (model == nullptr)
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- *model = new (std::nothrow) ANeuralNetworksModel{};
- if (*model == nullptr)
- {
- return ANEURALNETWORKS_OUT_OF_MEMORY;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-void ANeuralNetworksModel_free(ANeuralNetworksModel *model) { delete model; }
-
-int ANeuralNetworksModel_addOperand(ANeuralNetworksModel *model,
- const ANeuralNetworksOperandType *type)
-{
- if ((model == nullptr) || (type == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- if (model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- // scale and zeroPoint should be zero for scalars and non-fixed point tensors
- // Quantized:
- // scale: a 32 bit floating point value greater than zero
- // zeroPoint: a 32 bit integer, in range [0, 255]
- if (type->type == ANEURALNETWORKS_TENSOR_QUANT8_ASYMM)
- {
- if (!(type->scale > 0.0f))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- if ((type->zeroPoint < 0) || (type->zeroPoint > 255))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- }
- else if ((type->scale != 0.0f) || (type->zeroPoint != 0))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- // dimensionCount should be zero for scalars
- if ((type->dimensionCount != 0) &&
- ((type->type == ANEURALNETWORKS_FLOAT32) || (type->type == ANEURALNETWORKS_INT32) ||
- (type->type == ANEURALNETWORKS_UINT32)))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- ::neurun::model::operand::Shape shape(type->dimensionCount);
- ::neurun::model::operand::TypeInfo typeInfo((OperandCode)(type->type), type->scale,
- type->zeroPoint);
-
- for (uint32_t axis = 0; axis < type->dimensionCount; ++axis)
- {
- shape.dim(axis) = type->dimensions[axis];
- }
-
- model->deref().addOperand(shape, typeInfo);
-
- // NOTE We do NOT allocate CLTensor here as we do not how to interpret this one.
- // TensorFlow Lite may interpret a rank-4 tensor either as a feature map (with batch) or
- // a convolution kernel.
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel *model, int32_t index,
- const void *buffer, size_t length)
-{
- const bool isOptional = ((buffer == nullptr) && (length == 0));
-
- if ((model == nullptr) || ((buffer == nullptr) && (length != 0)))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- if (model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- // Negative index value is not allowed
- if (index < 0)
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- const neurun::model::operand::Index ind{static_cast<uint32_t>(index)};
-
- if (!model->deref().operands().exist(ind))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- auto &obj = model->deref().operands().at(ind);
- if ((obj.operandSize() != length) && !isOptional)
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- if (!obj.setAsConstant())
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- using ::neurun::model::operand::CachedData;
- using ::neurun::model::operand::ExternalData;
-
- // Remain operands.at(ind).data()->base() as nullptr for optional operand
- // This will be filled when model finished
- if (isOptional)
- {
- model->setOptionalOperand(ind);
- }
-
- // NNAPI spec in NeuralNetworks.h
- // For values of length greater than ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES,
- // the application is responsible for not changing the content of this region
- // until all executions using this model have completed
- if (length <= ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES)
- {
- model->deref().setOperandValue(ind, nnfw::cpp14::make_unique<CachedData>(
- reinterpret_cast<const uint8_t *>(buffer), length));
- }
- else
- {
- model->deref().setOperandValue(ind, nnfw::cpp14::make_unique<ExternalData>(
- reinterpret_cast<const uint8_t *>(buffer), length));
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel *model, int32_t index,
- const ANeuralNetworksMemory *memory,
- size_t offset, size_t length)
-{
- if ((model == nullptr) || (memory == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- if (model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- // Negative index value is not allowed
- if (index < 0)
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- const neurun::model::operand::Index ind{static_cast<uint32_t>(index)};
-
- if (!model->deref().operands().exist(ind))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- auto &obj = model->deref().operands().at(ind);
- if ((obj.operandSize() != length) || (memory->size() < (offset + length)))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- if (!obj.setAsConstant())
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- using ::neurun::model::operand::ExternalData;
-
- model->deref().setOperandValue(
- ind, nnfw::cpp14::make_unique<ExternalData>(
- reinterpret_cast<const uint8_t *>(memory->base() + offset), length));
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksModel_addOperation(ANeuralNetworksModel *model,
- ANeuralNetworksOperationType type, uint32_t inputCount,
- const uint32_t *inputs, uint32_t outputCount,
- const uint32_t *outputs)
-{
- if ((model == nullptr) || (inputs == nullptr) || (outputs == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- const ANeuralNetworksOperationType FIRST_OPERATION = ANEURALNETWORKS_ADD;
- const ANeuralNetworksOperationType LAST_OPERATION = ANEURALNETWORKS_TRANSPOSE;
- if ((type < FIRST_OPERATION) || (type > LAST_OPERATION))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- if (model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- for (uint32_t i = 0; i < outputCount; i++)
- {
- const ::neurun::model::operand::Index ind{outputs[i]};
- auto &obj = model->deref().operands().at(ind);
-
- if (!obj.setAsOperationOutput())
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- }
-
- auto &graph = model->deref();
-
- auto node_param =
- neurun::model::operation::Node::InitParam{inputCount, inputs, outputCount, outputs};
-
- try
- {
- switch (type)
- {
- case ANEURALNETWORKS_CONV_2D:
- {
- // inputCount is either 7 or 10 acccording to NN API specification.
- // - Padding is implicit when inputCount is 7
- // - Padding is explicit when inputCount is 10
- assert(inputCount == 7 || inputCount == 10);
- assert(outputCount == 1);
-
- if (inputCount == 7)
- {
- using GraphNode = neurun::model::operation::Conv2DNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
- }
- else
- {
- throw std::runtime_error{"Explicit padding in Conv2D is not supported, yet"};
- }
-
- break;
- }
- case ANEURALNETWORKS_MAX_POOL_2D:
- {
- // inputCount is either 7 or 10 acccording to NN API specification.
- // - Padding is implicit when inputCount is 7
- // - Padding is explicit when inputCount is 10
- assert(inputCount == 7 || inputCount == 10);
- assert(outputCount == 1);
-
- if (inputCount == 7)
- {
- using GraphNode = neurun::model::operation::MaxPool2DNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
- }
- else
- {
- throw std::runtime_error{"Explicit padding in MaxPool2D is not supported, yet"};
- }
-
- break;
- }
- case ANEURALNETWORKS_AVERAGE_POOL_2D:
- {
- // inputCount is either 7 or 10 acccording to NN API specification.
- // - Padding is implicit when inputCount is 7
- // - Padding is explicit when inputCount is 10
- assert(inputCount == 7 || inputCount == 10);
- assert(outputCount == 1);
-
- if (inputCount == 7)
- {
- using GraphNode = neurun::model::operation::AvgPool2DNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
- }
- else
- {
- throw std::runtime_error{"Explicit padding in AvgPool2D is not supported, yet"};
- }
-
- break;
- }
- case ANEURALNETWORKS_CONCATENATION:
- {
- using GraphNode = neurun::model::operation::ConcatNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
-
- break;
- }
- case ANEURALNETWORKS_RESHAPE:
- {
- using GraphNode = neurun::model::operation::ReshapeNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
-
- break;
- }
- case ANEURALNETWORKS_FULLY_CONNECTED:
- {
- using GraphNode = neurun::model::operation::FullyConnectedNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
-
- break;
- }
- case ANEURALNETWORKS_SOFTMAX:
- {
- using GraphNode = neurun::model::operation::SoftmaxNode;
-
- graph.addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
-
- break;
- }
- default:
- throw std::runtime_error{"Not supported operation"};
- };
- }
- catch (const std::exception &e)
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
- ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
- const uint32_t *inputs, uint32_t outputCount,
- const uint32_t *outputs)
-{
- if ((model == nullptr) || (inputs == nullptr) || (outputs == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- if (model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- const ANeuralNetworksOperationTypeEx FIRST_OPERATION = ANEURALNETWORKS_GATHER_EX;
- const ANeuralNetworksOperationTypeEx LAST_OPERATION = ANEURALNETWORKS_PRELU_EX;
- if ((type < FIRST_OPERATION) || (type > LAST_OPERATION))
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- for (uint32_t i = 0; i < outputCount; i++)
- {
- const ::neurun::model::operand::Index ind{outputs[i]};
- auto &obj = model->deref().operands().at(ind);
-
- if (!obj.setAsOperationOutput())
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- }
-
- // Workaround: to avoid compile error by unused-parameter, use inputCount
- if (inputCount == 0)
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
-
- try
- {
- switch (type)
- {
- default:
- throw std::runtime_error{"Not supported operation"};
- }
- }
- catch (const std::exception &e)
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel *model, uint32_t inputCount,
- const uint32_t *inputs, uint32_t outputCount,
- const uint32_t *outputs)
-{
- if ((model == nullptr) || (inputs == nullptr) || (outputs == nullptr))
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- if (model->isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- // NOTE ::neurun::model::operand::Index uses int as its underlying type as various NNAPI
- // functions such as ANeuralNetworksModel_setOperandValue use int to represent operand index
- //
- // ANeuralNetworksModel_identifyInputsAndOutputs, however, uses uint32_t to represent operand
- // index.
- //
- // Below, static_cast<int>(...) is introduced to eliminate compiler warning.
- for (uint32_t n = 0; n < inputCount; ++n)
- {
- const neurun::model::operand::Index ind{static_cast<uint32_t>(inputs[n])};
- model->deref().addInput(ind);
-
- auto &obj = model->deref().operands().at(ind);
- if (!obj.setAsModelInput())
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- }
-
- for (uint32_t n = 0; n < outputCount; ++n)
- {
- const neurun::model::operand::Index ind{static_cast<uint32_t>(outputs[n])};
- model->deref().addOutput(ind);
-
- auto &obj = model->deref().operands().at(ind);
- // Model output cannot become model input
- if (obj.isModelInput())
- {
- return ANEURALNETWORKS_BAD_DATA;
- }
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-int ANeuralNetworksModel_finish(ANeuralNetworksModel *model)
-{
- if (model == nullptr)
- {
- return ANEURALNETWORKS_UNEXPECTED_NULL;
- }
-
- return model->finish();
-}
diff --git a/runtimes/neurun/src/frontend/wrapper/compilation.cc b/runtimes/neurun/src/frontend/wrapper/compilation.cc
deleted file mode 100644
index e4aa99f7a..000000000
--- a/runtimes/neurun/src/frontend/wrapper/compilation.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "compilation.h"
-
-int ANeuralNetworksCompilation::finish()
-{
- try
- {
- _compiler->compile();
- }
- catch (const std::exception &e)
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- return ANEURALNETWORKS_NO_ERROR;
-}
diff --git a/runtimes/neurun/src/frontend/wrapper/compilation.h b/runtimes/neurun/src/frontend/wrapper/compilation.h
deleted file mode 100644
index d4ba32ea5..000000000
--- a/runtimes/neurun/src/frontend/wrapper/compilation.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __COMPILATION_H__
-#define __COMPILATION_H__
-
-#include "compiler/Compiler.h"
-#include "graph/Graph.h"
-
-struct ANeuralNetworksCompilation
-{
-public:
- ANeuralNetworksCompilation(const std::shared_ptr<neurun::graph::Graph> &model)
- : _compiler{new neurun::compiler::Compiler{model}}
- {
- // DO NOTHING
- }
-
-public:
- neurun::compiler::Plan &plan(void) { return _compiler->plan(); }
-
-public:
- void publish(std::shared_ptr<const neurun::compiler::Plan> &plan) { _compiler->release(plan); }
- int finish();
-
-private:
- std::shared_ptr<neurun::compiler::Compiler> _compiler;
-};
-
-#endif
diff --git a/runtimes/neurun/src/frontend/wrapper/event.h b/runtimes/neurun/src/frontend/wrapper/event.h
deleted file mode 100644
index d144b7c07..000000000
--- a/runtimes/neurun/src/frontend/wrapper/event.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __EVENT_H__
-#define __EVENT_H__
-
-struct ANeuralNetworksEvent
-{
-};
-
-#endif
diff --git a/runtimes/neurun/src/frontend/wrapper/execution.h b/runtimes/neurun/src/frontend/wrapper/execution.h
deleted file mode 100644
index b68a7b967..000000000
--- a/runtimes/neurun/src/frontend/wrapper/execution.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __EXECUTION_H__
-#define __EXECUTION_H__
-
-#include "compiler/Plan.h"
-#include "exec/Source.h"
-#include "exec/Sink.h"
-
-struct ANeuralNetworksExecution
-{
-public:
- ANeuralNetworksExecution(const std::shared_ptr<const neurun::compiler::Plan> &plan) : _plan{plan}
- {
- _sources.resize(_plan->model().getInputs().size());
- _sinks.resize(_plan->model().getOutputs().size());
- }
-
-public:
- const neurun::compiler::Plan &plan(void) const { return *_plan; }
-
-private:
- std::shared_ptr<const neurun::compiler::Plan> _plan;
-
-public:
- // TODO Use InputIndex instead of int
- void source(int n, std::unique_ptr<neurun::exec::ISource> &&source)
- {
- _sources.at(n) = std::move(source);
- }
- template <typename T, typename... Args> void source(int n, Args &&... args)
- {
- source(n, std::unique_ptr<T>{new T{std::forward<Args>(args)...}});
- }
-
-public:
- const neurun::exec::ISource &source(int n) const { return *(_sources.at(n)); }
-
-public:
- // TODO Use OutputIndex instead of int
- void sink(int n, std::unique_ptr<neurun::exec::ISink> &&sink) { _sinks.at(n) = std::move(sink); }
- template <typename T, typename... Args> void sink(int n, Args &&... args)
- {
- sink(n, std::unique_ptr<T>{new T{std::forward<Args>(args)...}});
- }
-
-public:
- const neurun::exec::ISink &sink(int n) const { return *(_sinks.at(n)); }
-
-private:
- std::vector<std::unique_ptr<neurun::exec::ISource>> _sources;
- std::vector<std::unique_ptr<neurun::exec::ISink>> _sinks;
-};
-
-#endif
diff --git a/runtimes/neurun/src/frontend/wrapper/memory.cc b/runtimes/neurun/src/frontend/wrapper/memory.cc
deleted file mode 100644
index 456015123..000000000
--- a/runtimes/neurun/src/frontend/wrapper/memory.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <NeuralNetworks.h>
-#include <sys/mman.h>
-
-#include "memory.h"
-
-//
-// ANeuralNetworksMemory
-//
-ANeuralNetworksMemory::ANeuralNetworksMemory(size_t size, int protect, int fd, size_t offset)
-{
- _base = reinterpret_cast<uint8_t *>(mmap(nullptr, size, protect, MAP_PRIVATE, fd, offset));
- _size = size;
-}
-
-ANeuralNetworksMemory::~ANeuralNetworksMemory() { munmap(reinterpret_cast<void *>(_base), _size); }
diff --git a/runtimes/neurun/src/frontend/wrapper/memory.h b/runtimes/neurun/src/frontend/wrapper/memory.h
deleted file mode 100644
index a430bcf49..000000000
--- a/runtimes/neurun/src/frontend/wrapper/memory.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __MEMORY_H__
-#define __MEMORY_H__
-
-#include <cstdint>
-
-struct ANeuralNetworksMemory
-{
-public:
- ANeuralNetworksMemory(size_t size, int protect, int fd, size_t offset);
- ~ANeuralNetworksMemory();
-
-public:
- size_t size(void) const { return _size; }
- uint8_t *base(void) { return _base; }
- const uint8_t *base(void) const { return _base; }
-
-private:
- size_t _size;
- uint8_t *_base;
-};
-
-#endif // __MEMORY_H__
diff --git a/runtimes/neurun/src/frontend/wrapper/model.cc b/runtimes/neurun/src/frontend/wrapper/model.cc
deleted file mode 100644
index a7a9275fc..000000000
--- a/runtimes/neurun/src/frontend/wrapper/model.cc
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "model.h"
-
-#include "graph/Graph.h"
-
-//
-// ANeuralNetworksModel
-//
-ANeuralNetworksModel::ANeuralNetworksModel()
- : _model{new neurun::graph::Graph}, _optional_operands{}
-{
- // DO NOTHING
-}
-
-ResultCode ANeuralNetworksModel::finish()
-{
- // This function must only be called once for a given model
- if (isFinished())
- {
- return ANEURALNETWORKS_BAD_STATE;
- }
-
- fillOptionalOperand();
-
- _model->finishBuilding();
-
- return ANEURALNETWORKS_NO_ERROR;
-}
-
-void ANeuralNetworksModel::fillOptionalOperand(void)
-{
- _model->operations().iterate(
- [&](const ::neurun::model::operation::Index &, ::neurun::model::operation::Node &node) {
- for (auto input : node.getInputs())
- {
- // TODO fill default value for optional operands
- if (_optional_operands.find(input) != _optional_operands.end())
- {
- throw std::runtime_error{"Optional operand is not supported yet"};
- }
- }
- });
-}
diff --git a/runtimes/neurun/src/frontend/wrapper/model.h b/runtimes/neurun/src/frontend/wrapper/model.h
deleted file mode 100644
index 2386a648d..000000000
--- a/runtimes/neurun/src/frontend/wrapper/model.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __MODEL_H__
-#define __MODEL_H__
-
-#include <NeuralNetworks.h>
-
-#include "graph/Graph.h"
-
-struct ANeuralNetworksModel
-{
-public:
- ANeuralNetworksModel();
-
-public:
- neurun::graph::Graph &deref(void) { return *_model; }
- ResultCode finish();
- bool isFinished() { return !_model->isBuildingPhase(); }
- void release(std::shared_ptr<neurun::graph::Graph> &model) { model = _model; }
- void setOptionalOperand(const neurun::model::operand::Index idx)
- {
- _optional_operands.insert(idx);
- }
-
-private:
- void fillOptionalOperand(void);
-
-private:
- std::shared_ptr<neurun::graph::Graph> _model;
- std::unordered_set<neurun::model::operand::Index> _optional_operands;
-};
-
-#endif // __MODEL_H__
diff --git a/runtimes/neurun/src/graph/Graph.cc b/runtimes/neurun/src/graph/Graph.cc
deleted file mode 100644
index 832e2b887..000000000
--- a/runtimes/neurun/src/graph/Graph.cc
+++ /dev/null
@@ -1,334 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Graph.h"
-
-#include <algorithm>
-#include <bitset>
-
-#include "util/logging.h"
-#include "verifier/Verifier.h"
-#include "cpp14/memory.h"
-#include "linear/Linear.h"
-#include "operation/LowerInfo.h"
-#include "operand/LowerInfo.h"
-#include "operand/Shape4DConvert.h"
-#include "compiler/BackendResolver.h"
-#include "backend/interface/IConfig.h"
-#include "pass/PermutationInsertionPass.h"
-#include "pass/PermutationEliminationPass.h"
-
-namespace neurun
-{
-namespace graph
-{
-
-Graph::Graph(void) = default;
-
-Graph::~Graph(void) = default;
-
-model::operand::Index Graph::addOperand(const model::operand::Shape &shape,
- const model::operand::TypeInfo &type)
-{
- return _model->operands.append(shape, type);
-}
-
-model::operation::Index Graph::addOperation(std::unique_ptr<model::operation::Node> &&node)
-{
- assert(isBuildingPhase());
- return _model->operations.append(std::move(node));
-}
-
-void Graph::setOperandValue(const model::operand::Index &ind,
- std::unique_ptr<model::operand::Data> &&data)
-{
- assert(isBuildingPhase());
- assert(_model->operands.exist(ind));
- _model->operands.at(ind).data(std::move(data));
-}
-
-void Graph::addInput(const model::operand::Index &ind)
-{
- assert(isBuildingPhase());
- _model->inputs.append(ind);
-}
-
-void Graph::addOutput(const model::operand::Index &ind)
-{
- assert(isBuildingPhase());
- _model->outputs.append(ind);
-}
-
-void Graph::finishBuilding(void)
-{
- assert(isBuildingPhase());
- _phase = Phase::MODEL;
-
- // Initialize operand use-def
- initializeUseDef();
-
- // Call graph verifications for the MODEL phase
- {
- assert(verifier::DAGChecker().verify(*this));
- assert(verifier::EdgeConsistencyChecker().verify(*this));
- }
-}
-
-void Graph::lower(void)
-{
- assert(_phase == Phase::MODEL);
-
- // Lower
- {
- // operand::LowerInfo holder
- std::unordered_map<model::operand::Index, std::unique_ptr<operand::LowerInfo>>
- operands_lower_info;
-
- _model->operands.iterate([&](const model::operand::Index &index,
- const model::operand::Object &object) {
- operands_lower_info[index] =
- nnfw::cpp14::make_unique<operand::LowerInfo>(graph::operand::asShape4D(object.shape()));
- });
-
- _backend_resolver = nnfw::cpp14::make_unique<compiler::BackendResolver>(_model->operands);
-
- _model->operations.iterate(
- [&](const model::operation::Index &index, model::operation::Node &node) {
- auto backend = _backend_resolver->getBackend(typeid(node));
-
- // Operation LowerInfo
- setLowerInfo(index, nnfw::cpp14::make_unique<graph::operation::LowerInfo>(backend));
-
- // LowerInfo for in/output operands
- for (auto operand : node.getInputs())
- {
- auto &&lower_info = operands_lower_info.at(operand);
- lower_info->addUseBackend(backend);
- }
- for (auto operand : node.getOutputs())
- {
- auto &&lower_info = operands_lower_info.at(operand);
- lower_info->addDefBackend(backend);
- }
- });
-
- // Add def backend to model input/output operand as default backend
- for (auto index : getInputs())
- {
- auto &&lower_info = operands_lower_info.at(index);
- lower_info->addDefBackend(_backend_resolver->getDefaultBackend());
- }
-
- for (auto index : getOutputs())
- {
- auto &&lower_info = operands_lower_info.at(index);
- lower_info->addUseBackend(_backend_resolver->getDefaultBackend());
- }
-
- // Add DefBackend constants same as UseBackend
- // NOTE This assumes a constant operand is used by only one operation
- _model->operations.iterate([&](const model::operation::Index &, model::operation::Node &node) {
- // LowerInfo for input operands
- for (auto operand : node.getInputs())
- {
- auto &&lower_info = operands_lower_info.at(operand);
- if (lower_info->def_backends().empty())
- {
- lower_info->addDefBackend(lower_info->use_backends().getOnlyElement());
- }
- }
- });
-
- // Set LowerInfo for each operand from the operand::LowerInfo holder
- _model->operands.iterate([&](const model::operand::Index &index,
- model::operand::Object &object) {
- object.lower_info(std::move(operands_lower_info[index]));
-
- // Dump operand LowerInfo
- // TODO Extract this dumping procedure to be reusable
- if (!object.lower_info()->def_backends().empty() ||
- !object.lower_info()->use_backends().empty())
- {
- auto backends_to_string = [](const operand::BackendSet &backends) {
- std::string str;
- for (auto backend : backends)
- {
- str += backend->config()->id();
- str += " ";
- }
- return "{ " + str + "}";
- };
-
- auto operation_index_to_string = [](const model::operation::IndexList &operations) {
- std::string str;
- for (auto op : operations.list())
- {
- str += std::to_string(op.value());
- str += " ";
- }
- return "{ " + str + "}";
- };
-
- const auto &lower_info = object.lower_info();
- const auto &shape = object.shape();
- const auto &lower_shape = lower_info->shape();
- std::string def_ops = operation_index_to_string(object.getDef());
- std::string use_ops = operation_index_to_string(object.getUses());
- std::string def_layouts = backends_to_string(lower_info->def_backends());
- std::string use_layouts = backends_to_string(lower_info->use_backends());
- VERBOSE(Lower) << "* Operand #" << index.value() << " LowerInfo" << std::endl;
- VERBOSE(Lower) << " - Shape : { " << shape.dim(0) << " "
- << (shape.rank() > 1 ? shape.dim(1) : 0) << " "
- << (shape.rank() > 2 ? shape.dim(2) : 0) << " "
- << (shape.rank() > 3 ? shape.dim(3) : 0) << " "
- << "}" << std::endl;
- VERBOSE(Lower) << " - Def Operations : " << def_ops << std::endl;
- VERBOSE(Lower) << " - Use Operations : " << use_ops << std::endl;
- VERBOSE(Lower) << " - Lower Info" << std::endl;
- VERBOSE(Lower) << " - 4D Shape (NHWC) : { " << lower_shape.n() << " " << lower_shape.h()
- << " " << lower_shape.w() << " " << lower_shape.c() << " "
- << "}" << std::endl;
- VERBOSE(Lower) << " - Def Backends : " << def_layouts << std::endl;
- VERBOSE(Lower) << " - Use Backends : " << use_layouts << std::endl;
- }
- });
- }
-
- // Run PermutationInsertionPass
- {
- pass::PermutationInsertionPass pi_pass(*this);
- pi_pass.run();
- pass::PermutationEliminationPass pe_pass(*this);
- pe_pass.run();
- }
-
- // Graph verifications for the LOWERED phase
- {
- assert(verifier::DAGChecker().verify(*this));
- assert(verifier::EdgeConsistencyChecker().verify(*this));
- }
-}
-
-std::unique_ptr<linear::Linear> Graph::linearize(void)
-{
- assert(_phase == Phase::MODEL);
-
- auto linear = nnfw::cpp14::make_unique<linear::Linear>(*this);
-
- // TODO Move the operations and operands to linear object
- return std::move(linear);
-}
-
-void Graph::initializeUseDef()
-{
- operations().iterate(
- [&](const model::operation::Index &index, const model::operation::Node &node) -> void {
- auto outputs = node.getOutputs();
- for (auto output : outputs)
- {
- operands().at(output).appendDef(index);
- }
-
- auto inputs = node.getInputs();
- for (auto input : inputs)
- {
- operands().at(input).appendUse(index);
- }
- });
-}
-
-const operation::LowerInfo *Graph::getLowerInfo(const model::operation::Index &index) const
-{
- auto itr = _operation_lower_info.find(index);
- if (itr == _operation_lower_info.end())
- return nullptr;
- return itr->second.get();
-}
-
-void Graph::setLowerInfo(const model::operation::Index &index,
- std::unique_ptr<operation::LowerInfo> &&lower_info)
-{
- _operation_lower_info.insert(std::make_pair(index, std::move(lower_info)));
-}
-
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-
-// Explicit instantiations to have implementation in the source file.
-
-template class Graph::DefaultIterator<true>;
-template class Graph::DefaultIterator<false>;
-
-template class Graph::PostDfsIterator<true>;
-template class Graph::PostDfsIterator<false>;
-
-//
-// Graph::DefaultIterator
-//
-
-template <bool is_const>
-void Graph::DefaultIterator<is_const>::iterate(GraphRef graph, const IterFn &fn) const
-{
- graph.operations().iterate(
- [&](const model::operation::Index &index, NodeRef node) -> void { fn(index, node); });
-}
-
-//
-// Graph::PostDfsIterator
-//
-
-template <bool is_const>
-void Graph::PostDfsIterator<is_const>::iterate(GraphRef graph, const IterFn &fn) const
-{
- assert(!graph.isBuildingPhase()); // Restrict iteration condition
-
- std::unordered_map<model::operation::Index, bool> visited;
- graph.operations().iterate(
- [&](const model::operation::Index &index, NodeRef) { visited[index] = false; });
-
- std::function<void(const model::operation::Index &, NodeRef)> dfs_recursive =
- [&](const model::operation::Index &index, NodeRef node) -> void {
- if (visited[index])
- return;
- visited[index] = true;
-
- for (auto output : node.getOutputs())
- {
- const auto &operand = graph.operands().at(output);
- for (const auto &use : operand.getUses().list())
- {
- dfs_recursive(use, graph.operations().at(use));
- }
- }
-
- fn(index, node);
- };
-
- graph.operations().iterate(dfs_recursive);
-
- // All of the operations(nodes) must have been visited.
- assert(std::all_of(
- visited.begin(), visited.end(),
- [](const std::pair<const model::operation::Index, bool> &v) { return v.second; }));
-}
-
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/Graph.h b/runtimes/neurun/src/graph/Graph.h
deleted file mode 100644
index afcfdce12..000000000
--- a/runtimes/neurun/src/graph/Graph.h
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_GRAPH_H__
-#define __NEURUN_GRAPH_GRAPH_H__
-
-#include <functional>
-
-#include "model/operation/Node.h"
-#include "graph/Model.h"
-
-namespace neurun
-{
-namespace linear
-{
-class Linear;
-} // namespace linear
-} // namespace neurun
-
-namespace neurun
-{
-namespace compiler
-{
-class BackendResolver;
-} // namespace compiler
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-
-class Graph
-{
-private:
- enum class Phase
- {
- BUILDING,
- MODEL
- };
-
-public:
- template <bool is_const> class Iterator
- {
- public:
- using GraphRef = typename std::conditional<is_const, const Graph &, Graph &>::type;
- using IndexRef = const model::operation::Index &;
- using NodeRef = typename std::conditional<is_const, const model::operation::Node &,
- model::operation::Node &>::type;
- using IterFn = std::function<void(IndexRef, NodeRef)>;
-
- public:
- virtual ~Iterator() = default;
- virtual void iterate(GraphRef graph, const IterFn &fn) const = 0;
- };
-
- template <bool is_const = false> class DefaultIterator final : public Iterator<is_const>
- {
- public:
- using GraphRef = typename Iterator<is_const>::GraphRef;
- using IndexRef = typename Iterator<is_const>::IndexRef;
- using NodeRef = typename Iterator<is_const>::NodeRef;
- using IterFn = typename Iterator<is_const>::IterFn;
-
- public:
- void iterate(GraphRef graph, const IterFn &fn) const;
- };
- using DefaultConstIterator = DefaultIterator<true>;
-
- template <bool is_const = false> class PostDfsIterator final : public Iterator<is_const>
- {
- public:
- using GraphRef = typename Iterator<is_const>::GraphRef;
- using IndexRef = typename Iterator<is_const>::IndexRef;
- using NodeRef = typename Iterator<is_const>::NodeRef;
- using IterFn = typename Iterator<is_const>::IterFn;
-
- public:
- void iterate(GraphRef graph, const IterFn &fn) const;
- };
- using PostDfsConstIterator = PostDfsIterator<true>;
-
-public:
- Graph(void);
- ~Graph(void);
-
- // Graph Building
-public:
- model::operand::Index addOperand(const model::operand::Shape &shape,
- const model::operand::TypeInfo &type);
- model::operation::Index addOperation(std::unique_ptr<model::operation::Node> &&node);
- void setOperandValue(const model::operand::Index &ind,
- std::unique_ptr<model::operand::Data> &&data);
- void addInput(const model::operand::Index &ind);
- void addOutput(const model::operand::Index &ind);
- void finishBuilding(void);
- void lower(void);
- void removeOperand(const model::operand::Index &ind) { _model->operands.remove(ind); }
- std::unique_ptr<linear::Linear> linearize(void);
- bool isBuildingPhase(void) const { return _phase == Phase::BUILDING; }
-
-private:
- void initializeUseDef();
-
- // Accessors
-public:
- const model::operand::IndexSet &getInputs() const { return _model->inputs; }
- model::operand::IndexSet &getInputs() { return _model->inputs; }
- const model::operand::IndexSet &getOutputs() const { return _model->outputs; }
- model::operand::IndexSet &getOutputs() { return _model->outputs; }
- const model::operand::Set &operands() const { return _model->operands; }
- model::operand::Set &operands()
- {
- return _model->operands;
- } // TODO Remove this non-const accessor
- const model::operation::Set &operations() const { return _model->operations; }
- model::operation::Set &operations() { return _model->operations; }
- const compiler::BackendResolver *backend_resolver() const { return _backend_resolver.get(); }
-
-private:
- Phase _phase{Phase::BUILDING};
- std::unique_ptr<Model> _model{new Model};
-
- // For LOWERED phase
-public:
- const operation::LowerInfo *getLowerInfo(const model::operation::Index &index) const;
- void setLowerInfo(const model::operation::Index &index,
- std::unique_ptr<operation::LowerInfo> &&lower_info);
-
-private:
- std::unique_ptr<compiler::BackendResolver> _backend_resolver;
- std::unordered_map<model::operation::Index, std::unique_ptr<operation::LowerInfo>>
- _operation_lower_info;
-};
-
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_GRAPH_H__
diff --git a/runtimes/neurun/src/graph/Index.h b/runtimes/neurun/src/graph/Index.h
deleted file mode 100644
index 3263d12ad..000000000
--- a/runtimes/neurun/src/graph/Index.h
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_INDEX_H__
-#define __NEURUN_GRAPH_INDEX_H__
-
-#include <functional>
-#include <limits>
-#include <stdint.h>
-
-namespace neurun
-{
-namespace graph
-{
-
-template <typename T, typename DummyTag> class Index
-{
-private:
- static const T UNDEFINED = std::numeric_limits<T>::max();
-
-public:
- explicit Index(void) : _index{UNDEFINED} {}
- explicit Index(T o) : _index{o} {}
- explicit Index(int32_t o) : _index{static_cast<T>(o)} {} // For legacy code compatibility
- Index(const Index &o) : _index{o._index} {}
-
- Index &operator=(T o)
- {
- _index = o;
- return *this;
- }
-
- Index &operator=(const T &o)
- {
- _index = o._index;
- return *this;
- }
-
- bool operator==(T o) const { return _index == o; }
- bool operator==(const Index &o) const { return _index == o._index; }
- bool operator!=(T o) const { return !(*this == o); }
- bool operator!=(const Index &o) const { return !(*this == o); }
-
- T value() const { return _index; }
- int32_t asInt() const { return static_cast<int32_t>(_index); } // For legacy code compatibility
-
-private:
- T _index;
-};
-
-} // namespace graph
-} // namespace neurun
-
-namespace std
-{
-
-template <typename T, typename Tag> struct hash<::neurun::graph::Index<T, Tag>>
-{
- size_t operator()(const ::neurun::graph::Index<T, Tag> &index) const noexcept
- {
- return hash<T>()(index.value());
- }
-};
-
-} // namespace std
-
-#endif // __NEURUN_GRAPH_INDEX_H__
diff --git a/runtimes/neurun/src/graph/Model.h b/runtimes/neurun/src/graph/Model.h
deleted file mode 100644
index 20bb713af..000000000
--- a/runtimes/neurun/src/graph/Model.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_MODEL_H__
-#define __NEURUN_GRAPH_MODEL_H__
-
-#include "model/operation/Set.h"
-#include "model/operand/IndexSet.h"
-#include "model/operand/Set.h"
-
-namespace neurun
-{
-namespace graph
-{
-
-struct Model
-{
- model::operation::Set operations;
- model::operand::Set operands;
- model::operand::IndexSet inputs;
- model::operand::IndexSet outputs;
-};
-
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_MODEL_H__
diff --git a/runtimes/neurun/src/graph/dumper/Dumper.cc b/runtimes/neurun/src/graph/dumper/Dumper.cc
deleted file mode 100644
index efffc5849..000000000
--- a/runtimes/neurun/src/graph/dumper/Dumper.cc
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Dumper.h"
-
-#include <string>
-
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace dumper
-{
-
-using namespace neurun::model::operation;
-
-void Dumper::visit(const Conv2DNode &node)
-{
- VERBOSE(LIR) << "* Conv2D(Implicit)" << std::endl;
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ") Kernel("
- << node.getInputs().at(1).value() << ") Bias(" << node.getInputs().at(2).value()
- << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const MaxPool2DNode &node)
-{
- VERBOSE(LIR) << "* MaxPool2D(Implicit)" << std::endl;
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const AvgPool2DNode &node)
-{
- VERBOSE(LIR) << "* AvgPool2D(Implicit)" << std::endl;
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const ConcatNode &node)
-{
- VERBOSE(LIR) << "* Concat" << std::endl;
- std::string inputs;
- for (auto i : node.getInputs())
- {
- inputs += std::to_string(i.value()) + ",";
- }
- VERBOSE(LIR) << " - Inputs : IFM(" << inputs << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const FullyConnectedNode &node)
-{
- VERBOSE(LIR) << "* FullyConnected" << std::endl;
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ") Weight("
- << node.getInputs().at(1).value() << ") Bias(" << node.getInputs().at(2).value()
- << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const ReshapeNode &node)
-{
- VERBOSE(LIR) << "* Reshape" << std::endl;
- // TODO The shape index should be "node.getInputs().at(1).value()" but not valid for now
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ") Shape("
- << "?"
- << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const SoftmaxNode &node)
-{
- VERBOSE(LIR) << "* Softmax" << std::endl;
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const PermuteNode &node)
-{
- VERBOSE(LIR) << "* Permute" << std::endl;
- VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
- VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-void Dumper::visit(const AddNode &node)
-{
- VERBOSE(LIR) << "* Add" << std::endl;
- VERBOSE(LIR) << " - Inputs : Input(" << node.getInputs().at(0).value() << ", "
- << node.getInputs().at(1).value() << ")" << std::endl;
- VERBOSE(LIR) << " - Output : Output(" << node.getOutputs().at(0).value() << ")" << std::endl;
-}
-
-} // namespace dumper
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/dumper/Dumper.h b/runtimes/neurun/src/graph/dumper/Dumper.h
deleted file mode 100644
index 8c079a11d..000000000
--- a/runtimes/neurun/src/graph/dumper/Dumper.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_DUMPER_H__
-#define __NEURUN_GRAPH_DUMPER_H__
-
-#include "model/operation/NodeVisitor.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace dumper
-{
-
-class Dumper : public model::operation::NodeVisitor
-{
-public:
- Dumper() = default;
-
-public:
- void visit(const model::operation::Conv2DNode &node) override;
- void visit(const model::operation::MaxPool2DNode &node) override;
- void visit(const model::operation::AvgPool2DNode &node) override;
- void visit(const model::operation::ConcatNode &node) override;
- void visit(const model::operation::FullyConnectedNode &node) override;
- void visit(const model::operation::ReshapeNode &node) override;
- void visit(const model::operation::SoftmaxNode &node) override;
- void visit(const model::operation::PermuteNode &node) override;
- void visit(const model::operation::AddNode &node) override;
-};
-
-} // namespace dumper
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_DUMPER_H__
diff --git a/runtimes/neurun/src/graph/operand/BackendSet.cc b/runtimes/neurun/src/graph/operand/BackendSet.cc
deleted file mode 100644
index 9a284d722..000000000
--- a/runtimes/neurun/src/graph/operand/BackendSet.cc
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BackendSet.h"
-
-#include <cassert>
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-BackendSet::BackendSet(std::initializer_list<const backend::Backend *> backends)
-{
- for (auto backend : backends)
- {
- _set.insert(backend);
- }
-}
-
-const backend::Backend *BackendSet::getOnlyElement() const
-{
- assert(_set.size() == 1u);
- return *_set.begin();
-}
-
-BackendSet BackendSet::operator|(const BackendSet &other) const
-{
- auto ret = *this;
- for (auto backend : other)
- {
- ret.add(backend);
- }
- return ret;
-}
-
-BackendSet BackendSet::operator&(const BackendSet &other) const
-{
- BackendSet ret;
- for (auto backend : other)
- {
- if (contains(backend))
- {
- ret.add(backend);
- }
- }
- return ret;
-}
-
-BackendSet BackendSet::operator-(const BackendSet &other) const
-{
- auto ret = *this;
- for (auto backend : other)
- {
- ret.remove(backend);
- }
- return ret;
-}
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/operand/BackendSet.h b/runtimes/neurun/src/graph/operand/BackendSet.h
deleted file mode 100644
index 8b457a084..000000000
--- a/runtimes/neurun/src/graph/operand/BackendSet.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_OPERAND_BACKEND_SET_H__
-#define __NEURUN_GRAPH_OPERAND_BACKEND_SET_H__
-
-#include <initializer_list>
-#include <unordered_set>
-
-namespace neurun
-{
-namespace backend
-{
-class Backend;
-} // namespace backend
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-class BackendSet
-{
-public:
- BackendSet() = default;
- BackendSet(std::initializer_list<const backend::Backend *> backends);
-
-public:
- void add(const backend::Backend *backend) { _set.insert(backend); }
- void remove(const backend::Backend *backend) { _set.erase(backend); }
- uint32_t size() const { return static_cast<uint32_t>(_set.size()); }
- bool empty() const { return _set.empty(); }
- bool contains(const backend::Backend *backend) const { return _set.find(backend) != _set.end(); }
- const backend::Backend *getOnlyElement() const;
-
-public:
- BackendSet operator|(const BackendSet &other) const; // Union
- BackendSet operator&(const BackendSet &other) const; // Intersect
- BackendSet operator-(const BackendSet &other) const; // Minus
-
-public:
- std::unordered_set<const backend::Backend *>::const_iterator begin() const
- {
- return _set.begin();
- }
- std::unordered_set<const backend::Backend *>::const_iterator end() const { return _set.end(); }
-
-private:
- std::unordered_set<const backend::Backend *> _set;
-};
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_OPERAND_BACKEND_SET_H__
diff --git a/runtimes/neurun/src/graph/operand/Layout.h b/runtimes/neurun/src/graph/operand/Layout.h
deleted file mode 100644
index 023ecbdad..000000000
--- a/runtimes/neurun/src/graph/operand/Layout.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_OPERAND_LAYOUT_H__
-#define __NEURUN_GRAPH_OPERAND_LAYOUT_H__
-
-#include <functional>
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-enum class Layout
-{
- UNKNOWN = 0,
- NHWC,
- NCHW
-};
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-namespace std
-{
-
-template <> struct hash<::neurun::graph::operand::Layout>
-{
- size_t operator()(const ::neurun::graph::operand::Layout &value) const noexcept
- {
- using type = typename std::underlying_type<::neurun::graph::operand::Layout>::type;
- return hash<type>()(static_cast<type>(value));
- }
-};
-
-} // namespace std
-
-#endif // __NEURUN_GRAPH_OPERAND_LAYOUT_H__
diff --git a/runtimes/neurun/src/graph/operand/LayoutSet.cc b/runtimes/neurun/src/graph/operand/LayoutSet.cc
deleted file mode 100644
index 47bb5900a..000000000
--- a/runtimes/neurun/src/graph/operand/LayoutSet.cc
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "LayoutSet.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-LayoutSet::LayoutSet(std::initializer_list<Layout> layouts)
-{
- for (auto layout : layouts)
- {
- _set.insert(layout);
- }
-}
-
-LayoutSet LayoutSet::operator|(const LayoutSet &other) const
-{
- auto ret = *this;
- for (auto layout : other)
- {
- ret.add(layout);
- }
- return ret;
-}
-
-LayoutSet LayoutSet::operator&(const LayoutSet &other) const
-{
- LayoutSet ret;
- for (auto layout : other)
- {
- if (contains(layout))
- {
- ret.add(layout);
- }
- }
- return ret;
-}
-
-LayoutSet LayoutSet::operator-(const LayoutSet &other) const
-{
- auto ret = *this;
- for (auto layout : other)
- {
- ret.remove(layout);
- }
- return ret;
-}
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/operand/LayoutSet.h b/runtimes/neurun/src/graph/operand/LayoutSet.h
deleted file mode 100644
index 928259c87..000000000
--- a/runtimes/neurun/src/graph/operand/LayoutSet.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_OPERAND_LAYOUT_SET_H__
-#define __NEURUN_GRAPH_OPERAND_LAYOUT_SET_H__
-
-#include <initializer_list>
-#include <unordered_set>
-
-#include "Layout.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-class LayoutSet
-{
-public:
- LayoutSet() = default;
- LayoutSet(std::initializer_list<Layout> layouts);
-
-public:
- void add(const Layout &layout) { _set.insert(layout); }
- void remove(const Layout &layout) { _set.erase(layout); }
- uint32_t size() const { return static_cast<uint32_t>(_set.size()); }
- bool contains(const Layout &layout) const { return _set.find(layout) != _set.end(); }
-
-public:
- LayoutSet operator|(const LayoutSet &other) const; // Union
- LayoutSet operator&(const LayoutSet &other) const; // Intersect
- LayoutSet operator-(const LayoutSet &other) const; // Minus
-
-public:
- std::unordered_set<Layout>::const_iterator begin() const { return _set.begin(); }
- std::unordered_set<Layout>::const_iterator end() const { return _set.end(); }
-
-private:
- std::unordered_set<Layout> _set;
-};
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_OPERAND_LAYOUT_SET_H__
diff --git a/runtimes/neurun/src/graph/operand/LowerInfo.cc b/runtimes/neurun/src/graph/operand/LowerInfo.cc
deleted file mode 100644
index c26965911..000000000
--- a/runtimes/neurun/src/graph/operand/LowerInfo.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "LowerInfo.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-// NO IMPLEMENTATION YET
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/operand/LowerInfo.h b/runtimes/neurun/src/graph/operand/LowerInfo.h
deleted file mode 100644
index 7900e54d9..000000000
--- a/runtimes/neurun/src/graph/operand/LowerInfo.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_OPERAND_LOWER_INFO_H__
-#define __NEURUN_GRAPH_OPERAND_LOWER_INFO_H__
-
-#include <stdint.h>
-
-#include "BackendSet.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-class LowerInfo
-{
-public:
- class Shape4D
- {
- public:
- Shape4D(uint32_t n, uint32_t h, uint32_t w, uint32_t c) : _n{n}, _h{h}, _w{w}, _c{c}
- {
- // DO NOTHING
- }
-
- public:
- uint32_t n(void) const { return _n; }
- uint32_t h(void) const { return _h; }
- uint32_t w(void) const { return _w; }
- uint32_t c(void) const { return _c; }
-
- private:
- uint32_t _n;
- uint32_t _h;
- uint32_t _w;
- uint32_t _c;
- };
-
-public:
- LowerInfo(const Shape4D &shape) : _shape{shape}
- {
- // DO NOTHING
- }
-
-public:
- const Shape4D &shape(void) const { return _shape; }
- const BackendSet &def_backends(void) const { return _def_backends; }
- const BackendSet &use_backends(void) const { return _use_backends; }
-
-public:
- void addDefBackend(const backend::Backend *backend) { _def_backends.add(backend); }
- void addUseBackend(const backend::Backend *backend) { _use_backends.add(backend); }
- void removeDefBackend(const backend::Backend *backend) { _def_backends.remove(backend); }
- void removeUseBackend(const backend::Backend *backend) { _use_backends.remove(backend); }
-
-private:
- Shape4D _shape;
- BackendSet _def_backends;
- BackendSet _use_backends;
-};
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_OPERAND_LOWED_INFO_H__
diff --git a/runtimes/neurun/src/graph/operand/ParentInfo.h b/runtimes/neurun/src/graph/operand/ParentInfo.h
deleted file mode 100644
index 5e6f56237..000000000
--- a/runtimes/neurun/src/graph/operand/ParentInfo.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file ParentInfo.h
- * @brief This file contains ParentInfo class and internal Coordinate4D class
- * to represent subsumption between operand
- */
-
-#ifndef __NEURUN_GRAPH_OPERAND_PARENT_INFO_H__
-#define __NEURUN_GRAPH_OPERAND_PARENT_INFO_H__
-
-#include <stdint.h>
-
-#include "model/operand/Index.h"
-#include "util/feature/Coordinate4D.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-using neurun::util::feature::Coordinate4D;
-
-/**
- * @brief Class to represent parent operand in child operand
- */
-class ParentInfo
-{
-public:
- /**
- * @brief Construct a new ParentInfo object
- * @param[in] parent Index of parent operand
- * @param[in] coordinate Offset of child operand in parent operand
- * @return
- */
- ParentInfo(const model::operand::Index parent, const Coordinate4D &coordinate)
- : _parent{parent}, _coordinate{coordinate}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Return parent index
- * @return Parent index
- */
- model::operand::Index parent(void) const { return _parent; }
- /**
- * @brief Retern offset in parent
- * @return Offset
- */
- Coordinate4D offset(void) const { return _coordinate; }
-
-private:
- model::operand::Index _parent;
- Coordinate4D _coordinate;
-};
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_OPERAND_PARENT_INFO_H__
diff --git a/runtimes/neurun/src/graph/operand/Shape4DConvert.h b/runtimes/neurun/src/graph/operand/Shape4DConvert.h
deleted file mode 100644
index 73cf0903a..000000000
--- a/runtimes/neurun/src/graph/operand/Shape4DConvert.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_OPERAND_SHAPE4D_CONVERT_H__
-#define __NEURUN_GRAPH_OPERAND_SHAPE4D_CONVERT_H__
-
-#include "LowerInfo.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operand
-{
-
-inline LowerInfo::Shape4D asShape4D(const model::operand::Shape &shape)
-{
- switch (shape.rank())
- {
- case 0u:
- return LowerInfo::Shape4D(1, 1, 1, 1);
-
- case 1u:
- return LowerInfo::Shape4D(shape.dim(0), 1, 1, 1);
-
- case 2u:
- return LowerInfo::Shape4D(shape.dim(0), shape.dim(1), 1, 1);
-
- case 3u:
- return LowerInfo::Shape4D(shape.dim(0), shape.dim(1), shape.dim(2), 1);
-
- case 4u:
- return LowerInfo::Shape4D(shape.dim(0), shape.dim(1), shape.dim(2), shape.dim(3));
-
- default:
- throw "Unsupported rank > 4";
- }
-}
-
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_OPERAND_SHAPE4D_CONVERT_H__
diff --git a/runtimes/neurun/src/graph/operation/LowerInfo.cc b/runtimes/neurun/src/graph/operation/LowerInfo.cc
deleted file mode 100644
index 7862fd0c9..000000000
--- a/runtimes/neurun/src/graph/operation/LowerInfo.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "LowerInfo.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operation
-{
-
-LowerInfo::LowerInfo(const backend::Backend *backend) : _backend(backend)
-{
- // DO NOTHING
-}
-
-} // namespace operation
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/operation/LowerInfo.h b/runtimes/neurun/src/graph/operation/LowerInfo.h
deleted file mode 100644
index e920b0eb9..000000000
--- a/runtimes/neurun/src/graph/operation/LowerInfo.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_OPERATION_LOWER_INFO_H__
-#define __NEURUN_GRAPH_OPERATION_LOWER_INFO_H__
-
-#include <string>
-
-#include "backend/BackendManager.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operation
-{
-
-class LowerInfo
-{
-public:
- LowerInfo(const backend::Backend *backend);
- const backend::Backend *backend() const { return _backend; }
-
-private:
- const backend::Backend *_backend;
-};
-
-} // namespace operation
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_OPERATION_LOWER_INFO_H__
diff --git a/runtimes/neurun/src/graph/pass/OperandPass.cc b/runtimes/neurun/src/graph/pass/OperandPass.cc
deleted file mode 100644
index 3c24d3830..000000000
--- a/runtimes/neurun/src/graph/pass/OperandPass.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "OperandPass.h"
-
-#include "graph/Graph.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-void OperandPass::run()
-{
- _graph.operands().iterate([&](const model::operand::Index &index,
- model::operand::Object &object) { callback(index, object); });
-}
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/pass/OperandPass.h b/runtimes/neurun/src/graph/pass/OperandPass.h
deleted file mode 100644
index b84391082..000000000
--- a/runtimes/neurun/src/graph/pass/OperandPass.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_PASS_OPERAND_PASS_H__
-#define __NEURUN_GRAPH_PASS_OPERAND_PASS_H__
-
-#include "Pass.h"
-#include "model/operand/Index.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-class Object;
-} // namespace operand
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-class OperandPass : public Pass
-{
-public:
- using Pass::Pass;
-
-public:
- virtual std::string id() = 0;
- virtual void run() override final;
- virtual void callback(const model::operand::Index &i, model::operand::Object &o) = 0;
-};
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_PASS_OPERAND_PASS_H__
diff --git a/runtimes/neurun/src/graph/pass/OperationPass.cc b/runtimes/neurun/src/graph/pass/OperationPass.cc
deleted file mode 100644
index e71f79188..000000000
--- a/runtimes/neurun/src/graph/pass/OperationPass.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "OperationPass.h"
-
-#include "graph/Graph.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-void OperationPass::run()
-{
- _graph.operations().iterate([&](const model::operation::Index &index,
- model::operation::Node &node) { callback(index, node); });
-}
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/pass/OperationPass.h b/runtimes/neurun/src/graph/pass/OperationPass.h
deleted file mode 100644
index e86f1aa57..000000000
--- a/runtimes/neurun/src/graph/pass/OperationPass.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file OperationPass.h
- * @brief This file contains OperationPass class
- */
-
-#ifndef __NEURUN_GRAPH_PASS_OPERATION_PASS_H__
-#define __NEURUN_GRAPH_PASS_OPERATION_PASS_H__
-
-#include "Pass.h"
-
-#include "model/operation/Index.h"
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-/**
- * @brief Class to iterate over operations and calls callback() method
- */
-class OperationPass : public Pass
-{
-public:
- using Pass::Pass;
-
-public:
- /**
- * @brief Returns string id for this pass. Same with class name.
- *
- * @return string id
- */
- virtual std::string id() = 0;
-
- /**
- * @brief Run the pass
- */
- virtual void run() override final;
-
- /**
- * @brief The function that will be executed for each operations
- *
- * @param i[in] Index of the operation node
- * @param n[in] The operation node
- */
- virtual void callback(const model::operation::Index &i, model::operation::Node &n) = 0;
-};
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_PASS_OPERATION_PASS_H__
diff --git a/runtimes/neurun/src/graph/pass/PermutationEliminationPass.cc b/runtimes/neurun/src/graph/pass/PermutationEliminationPass.cc
deleted file mode 100644
index 848f6b574..000000000
--- a/runtimes/neurun/src/graph/pass/PermutationEliminationPass.cc
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "PermutationEliminationPass.h"
-
-#include "model/operand/Object.h"
-#include "graph/Graph.h"
-#include "backend/interface/IConfig.h"
-#include "util/logging.h"
-#include "compiler/BackendResolver.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-void PermutationEliminationPass::callback(const model::operand::Index &inp_index,
- model::operand::Object &object)
-{
- if (_graph.getInputs().contains(inp_index))
- {
- eliminateInput(inp_index, object);
- }
- else if (_graph.getOutputs().contains(inp_index))
- {
- eliminateOutput(inp_index, object);
- }
-}
-
-void PermutationEliminationPass::eliminateInput(const model::operand::Index &inp_index,
- model::operand::Object &object)
-{
- auto &model_inputs = _graph.getInputs();
-
- // get uses of the model's given input
- auto uses = object.getUses();
-
- // input must be used just by permutation
- if (uses.size() != 1)
- {
- return;
- }
-
- for (auto input_use : uses.list())
- {
- auto &perm_operation = _graph.operations().at(input_use);
- auto perm_inputs = perm_operation.getInputs();
-
- auto perm_outputs = perm_operation.getOutputs();
-
- if (!isPermuteLayerToEliminate(perm_inputs, perm_outputs, true))
- {
- return;
- }
-
- assert(perm_inputs.at(0) == inp_index);
-
- VERBOSE(PermutationEliminationPass::EliminateInput) << "remove NHWC_TO_NCHW permutation\n";
-
- // set model's new input, which was output of permutation
- model_inputs.replace(inp_index, perm_outputs.at(0));
-
- // remove model's input, which is also input of permutation
- _graph.removeOperand(inp_index);
-
- // remove permutation operation
- _graph.operations().remove(input_use);
-
- VERBOSE(PermutationEliminationPass::EliminateInput)
- << inp_index.value() << " is model's input and is removed. New input is "
- << perm_outputs.at(0).value() << "\n"
- << input_use.value() << " is removed permutation operation\n";
- }
-}
-
-void PermutationEliminationPass::eliminateOutput(const model::operand::Index &out_index,
- model::operand::Object &object)
-{
- auto &model_outputs = _graph.getOutputs();
-
- // get defs of the model's given output
- auto defs = object.getDef();
-
- // output must use just permutation
- if (defs.size() != 1)
- {
- return;
- }
-
- for (auto output_def : defs.list())
- {
- auto &perm_operation = _graph.operations().at(output_def);
- auto perm_outputs = perm_operation.getOutputs();
-
- auto perm_inputs = perm_operation.getInputs();
- if (!isPermuteLayerToEliminate(perm_inputs, perm_outputs, false))
- {
- return;
- }
-
- assert(perm_outputs.at(0) == out_index);
-
- VERBOSE(PermutationEliminationPass::EliminateOutput) << "remove NCHW_TO_NHWC permutation\n";
-
- // Update operations' output that is used by permute operand
- for (auto perm_input_index : perm_inputs)
- {
- auto &perm_input_operand = _graph.operands().at(perm_input_index);
- perm_input_operand.removeUse(output_def);
- }
-
- // set model's new output, which was input of permutation
- model_outputs.replace(out_index, perm_inputs.at(0));
-
- // remove model's output, which is also output of permutation
- _graph.removeOperand(out_index);
-
- // remove permutation operation
- _graph.operations().remove(output_def);
-
- VERBOSE(PermutationEliminationPass::EliminateOutput)
- << out_index.value() << " is model's output and is removed. New output is "
- << perm_inputs.at(0).value() << "\n"
- << output_def.value() << " is removed permutation operation\n";
- }
-}
-
-bool PermutationEliminationPass::isPermuteLayerToEliminate(
- const model::operand::IndexSet &inp_indexes, const model::operand::IndexSet &out_indexes,
- bool is_for_model_input)
-{
- auto input_def_backends = _graph.operands().at(inp_indexes.at(0)).lower_info()->def_backends();
- auto output_def_backends = _graph.operands().at(out_indexes.at(0)).lower_info()->def_backends();
-
- auto input_layout = input_def_backends.getOnlyElement()->config()->getOperandLayout();
- auto output_layout = output_def_backends.getOnlyElement()->config()->getOperandLayout();
-
- if (input_def_backends.size() != 1 || output_def_backends.size() != 1)
- {
- return false;
- }
-
- // all operands' backend must be the same
- for (auto index : inp_indexes)
- {
- auto op_backend_set = _graph.operands().at(index).lower_info()->def_backends();
- if (op_backend_set.size() != 1 ||
- input_layout != op_backend_set.getOnlyElement()->config()->getOperandLayout())
- {
- return false;
- }
- }
- // all operands' backend must be the same
- for (auto index : out_indexes)
- {
- auto op_backend_set = _graph.operands().at(index).lower_info()->def_backends();
- if (op_backend_set.size() != 1 ||
- output_layout != op_backend_set.getOnlyElement()->config()->getOperandLayout())
- {
- return false;
- }
- }
-
- if (is_for_model_input)
- {
- // check if this is NHWC_TO_NCHW permutation: must have single input, which is model's input
- return (inp_indexes.size() == 1 && input_layout == graph::operand::Layout::NHWC &&
- output_layout == graph::operand::Layout::NCHW);
- }
-
- // check if this is NCHW_TO_NHWC permutation: must have single output, which is model's output
- return (out_indexes.size() == 1 && input_layout == graph::operand::Layout::NCHW &&
- output_layout == graph::operand::Layout::NHWC);
-}
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/pass/PermutationEliminationPass.h b/runtimes/neurun/src/graph/pass/PermutationEliminationPass.h
deleted file mode 100644
index 2b528c479..000000000
--- a/runtimes/neurun/src/graph/pass/PermutationEliminationPass.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_PASS_PERMUTATION_ELIMINATION_PASS_H__
-#define __NEURUN_GRAPH_PASS_PERMUTATION_ELIMINATION_PASS_H__
-
-#include "OperandPass.h"
-#include "model/operand/Object.h"
-#include "model/operand/IndexSet.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-class PermutationEliminationPass : public OperandPass
-{
-public:
- using OperandPass::OperandPass;
-
-public:
- virtual std::string id() override { return "PermutationEliminationPass"; }
-
- virtual void callback(const model::operand::Index &index, model::operand::Object &object);
-
-private:
- /**
- * @brief Remove Permute operation that permutates input
- *
- * Note: This function aslo removes model's input and
- * sets output of permutation as model's new input
- *
- * @param inp_index is the target operand index for the elimination
- * @param object is the target operand object for the elimination
- *
- * @return
- */
- void eliminateInput(const model::operand::Index &inp_index, model::operand::Object &object);
-
- /**
- * @brief Remove Permute operation that permutates output of a model
- *
- * Note: This function aslo removes model's output and
- * sets input of permutation as model's new output
- *
- * @param out_index is the target operand index for the elimination
- * @param object is the target operand object for the elimination
- *
- * @return
- */
- void eliminateOutput(const model::operand::Index &out_index, model::operand::Object &object);
-
- /**
- * @brief Determine if passed operands are permute layer's input and output, that must be
- * eliminated
- *
- * @param inp_index indexes of the input operand to operation
- * @param out_index indexes of the output operand to operation
- * @param is_for_model_input checking for model's input or output
- *
- * @return if it is permutation layer
- */
- bool isPermuteLayerToEliminate(const model::operand::IndexSet &inp_indexes,
- const model::operand::IndexSet &out_indexes,
- bool is_for_model_input);
-};
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_PASS_PERMUTATION_ELIMINATION_PASS_H__
diff --git a/runtimes/neurun/src/graph/pass/PermutationInsertionPass.cc b/runtimes/neurun/src/graph/pass/PermutationInsertionPass.cc
deleted file mode 100644
index 9b833b8c5..000000000
--- a/runtimes/neurun/src/graph/pass/PermutationInsertionPass.cc
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "PermutationInsertionPass.h"
-
-#include <cassert>
-#include <utility>
-#include <unordered_map>
-
-#include "model/operand/Object.h"
-#include "graph/operation/LowerInfo.h"
-#include "graph/Graph.h"
-#include "backend/interface/IConfig.h"
-#include "util/logging.h"
-#include "cpp14/memory.h"
-#include "model/operation/PermuteNode.h"
-#include "graph/operand/Shape4DConvert.h"
-#include "compiler/BackendResolver.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-void PermutationInsertionPass::callback(const model::operand::Index &index,
- model::operand::Object &object)
-{
- auto &&operand_li = object.lower_info();
- assert(operand_li);
-
- // NOTE Later, constants also will have Def
- // Ignore constants
- if (operand_li->def_backends().size() == 0)
- {
- return;
- }
-
- std::list<model::operation::Index> permute_indexes;
-
- // Build a map for all necessary type of operands
- std::unordered_map<const backend::Backend *, model::operand::Index> backend_to_index;
- {
- assert(operand_li->def_backends().size() == 1);
- for (auto backend : operand_li->def_backends())
- {
- backend_to_index.insert({backend, index});
- }
-
- auto insert_set = operand_li->use_backends() - operand_li->def_backends();
- for (auto backend : insert_set)
- {
- const auto permute_operation_index = insertPermute(index, backend);
- permute_indexes.push_back(permute_operation_index);
- VERBOSE(PermutationInsertionPass) << "Insert 'Permute' operation for operand "
- << index.value() << std::endl;
- const auto &permute_operation = _graph.operations().at(permute_operation_index);
- const auto permuted_operand_index = permute_operation.getOutputs().at(0);
- backend_to_index.insert({backend, permuted_operand_index});
- }
- }
-
- // Update operations' input that uses this operand
- {
- std::list<model::operation::Index> remove_list;
-
- auto uses = object.getUses();
- for (auto use : uses.list())
- {
- // If permute operation, ignore it
- if (std::find(permute_indexes.begin(), permute_indexes.end(), use) != permute_indexes.end())
- continue;
-
- auto &operation = _graph.operations().at(use);
- auto operation_li = _graph.getLowerInfo(use);
- assert(operation_li);
- auto backend = operation_li->backend();
-
- auto use_node_inputs = operation.getInputs();
- assert(use_node_inputs.contains(index));
-
- auto new_index = backend_to_index.at(backend);
- if (index != new_index)
- {
- // Update from operation
- operation.replaceInput(index, new_index);
-
- // Update from operand
- remove_list.push_back(
- use); // Removal should be done in another loop since we are in the loop
- _graph.operands().at(new_index).appendUse(use);
- }
- }
-
- for (auto &operation : remove_list)
- {
- object.removeUse(operation);
- }
- }
-}
-
-model::operation::Index
-PermutationInsertionPass::insertPermute(const model::operand::Index &operand_index,
- const backend::Backend *backend)
-{
- assert(!_graph.isBuildingPhase());
-
- auto &operand = _graph.operands().at(operand_index);
-
- // Generate output operand and permute operation
- auto out_operand_index = _graph.addOperand(operand.shape(), operand.typeInfo());
- auto &out_operand = _graph.operands().at(out_operand_index);
- out_operand.setAsOperationOutput();
- // change model output if operand_index is model output index
- auto &model_outputs = _graph.getOutputs();
- if (model_outputs.contains(operand_index))
- {
- model_outputs.replace(operand_index, out_operand_index);
- }
- out_operand.setAsOperationOutput();
- auto out_operand_li =
- nnfw::cpp14::make_unique<operand::LowerInfo>(operand::asShape4D(operand.shape()));
- out_operand_li->addDefBackend(backend);
- out_operand_li->addUseBackend(backend);
- out_operand.lower_info(std::move(out_operand_li));
-
- // Update LowerInfo of input operand
- operand.lower_info()->removeUseBackend(backend);
- operand.lower_info()->addUseBackend(operand.lower_info()->def_backends().getOnlyElement());
-
- using PermuteNode = model::operation::PermuteNode;
-
- // Find Permutation Type
- auto type = [&]() {
- auto input_layout =
- operand.lower_info()->def_backends().getOnlyElement()->config()->getOperandLayout();
- auto output_layout =
- out_operand.lower_info()->def_backends().getOnlyElement()->config()->getOperandLayout();
-
- if (input_layout == graph::operand::Layout::NHWC &&
- output_layout == graph::operand::Layout::NCHW)
- {
- return PermuteNode::Type::NHWC_TO_NCHW;
- }
- else if (input_layout == graph::operand::Layout::NCHW &&
- output_layout == graph::operand::Layout::NHWC)
- {
- return PermuteNode::Type::NCHW_TO_NHWC;
- }
- else
- {
- return PermuteNode::Type::COPY;
- }
- }();
-
- // Insert permute operation to the graph
- auto insert_node = nnfw::cpp14::make_unique<PermuteNode>(operand_index, out_operand_index, type);
-
- auto node_index = _graph.operations().append(std::move(insert_node));
- const auto &node = _graph.operations().at(node_index);
-
- _graph.setLowerInfo(node_index, nnfw::cpp14::make_unique<graph::operation::LowerInfo>(
- _graph.backend_resolver()->getDefaultBackend()));
-
- // Update Use/Def info
- {
- _graph.operands().at(operand_index).appendUse(node_index);
-
- auto node_out_indexes = node.getOutputs();
- auto node_out_index = node_out_indexes.at(model::operand::IO::Index{0});
- _graph.operands().at(node_out_index).appendDef(node_index);
- }
- return node_index;
-}
-} // namespace pass
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/pass/PermutationInsertionPass.h b/runtimes/neurun/src/graph/pass/PermutationInsertionPass.h
deleted file mode 100644
index b2d417e82..000000000
--- a/runtimes/neurun/src/graph/pass/PermutationInsertionPass.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_PASS_PERMUTATION_INSERTION_PASS_H__
-#define __NEURUN_GRAPH_PASS_PERMUTATION_INSERTION_PASS_H__
-
-#include "OperandPass.h"
-#include "model/operand/Object.h" //for model::operation::Index
-
-namespace neurun
-{
-namespace graph
-{
-namespace pass
-{
-
-class PermutationInsertionPass : public OperandPass
-{
-public:
- using OperandPass::OperandPass;
-
-public:
- virtual std::string id() override { return "PermutationInsertionPass"; }
- virtual void callback(const model::operand::Index &index, model::operand::Object &object);
-
- /**
- * @brief Insert Permute operation that has given operand as input
- *
- * @param operand_index is the target operand index for the insertion
- * @param backend is the output operand's backend type
- *
- * @return model::operation::Index
- */
- model::operation::Index insertPermute(const model::operand::Index &operand_index,
- const backend::Backend *backend);
-
-private:
-};
-
-} // namespace pass
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_PASS_PERMUTATION_INSERTION_PASS_H__
diff --git a/runtimes/neurun/src/graph/verifier/Verifier.cc b/runtimes/neurun/src/graph/verifier/Verifier.cc
deleted file mode 100644
index a5b53af85..000000000
--- a/runtimes/neurun/src/graph/verifier/Verifier.cc
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Verifier.h"
-
-#include "graph/Graph.h"
-
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace verifier
-{
-
-//
-// DAGChecker
-//
-
-bool DAGChecker::verify(const Graph &graph) const
-{
- auto &operations = graph.operations();
- bool cyclic = false;
-
- std::unordered_map<model::operation::Index, bool> visited;
- operations.iterate([&](const model::operation::Index &index, const model::operation::Node &) {
- visited[index] = false;
- });
- std::unordered_map<model::operation::Index, bool> on_stack = visited; // Copy from visited
-
- std::function<void(const model::operation::Index &index, const model::operation::Node &)>
- dfs_recursive =
- [&](const model::operation::Index &index, const model::operation::Node &node) -> void {
- if (on_stack[index])
- cyclic = true;
- if (visited[index])
- return;
- visited[index] = true;
- on_stack[index] = true;
-
- for (auto output : node.getOutputs())
- {
- const auto &operand = graph.operands().at(output);
- for (const auto &use : operand.getUses().list())
- {
- dfs_recursive(use, graph.operations().at(use));
- }
- }
-
- on_stack[index] = false;
- };
-
- operations.iterate(dfs_recursive);
-
- return !cyclic;
-}
-
-//
-// EdgeConsistencyVerifier
-//
-
-bool EdgeConsistencyChecker::verify(const Graph &graph) const
-{
- auto &operations = graph.operations();
- uint32_t mismatches = 0;
- operations.iterate([&](const model::operation::Index &index, const model::operation::Node &node) {
- for (auto operand_index : node.getInputs())
- {
- auto &operand = graph.operands().at(operand_index);
- mismatches += (operand.getUses().contains(index) ? 0 : 1);
- }
- for (auto operand_index : node.getOutputs())
- {
- auto &operand = graph.operands().at(operand_index);
- mismatches += (operand.getDef().contains(index) ? 0 : 1);
- }
- });
- return mismatches == 0;
-}
-
-} // namespace verifier
-} // namespace graph
-} // namespace neurun
diff --git a/runtimes/neurun/src/graph/verifier/Verifier.h b/runtimes/neurun/src/graph/verifier/Verifier.h
deleted file mode 100644
index 5f1f79ee6..000000000
--- a/runtimes/neurun/src/graph/verifier/Verifier.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_GRAPH_VERIFIER_VERIFIER_H__
-#define __NEURUN_GRAPH_VERIFIER_VERIFIER_H__
-
-namespace neurun
-{
-namespace graph
-{
-class Graph;
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-namespace verifier
-{
-
-struct IVerifier
-{
- virtual ~IVerifier() = default;
- virtual bool verify(const Graph &graph) const = 0;
-};
-
-} // namespace verifier
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-namespace verifier
-{
-
-class DAGChecker : public IVerifier
-{
-public:
- virtual bool verify(const Graph &graph) const override;
-};
-
-class EdgeConsistencyChecker : public IVerifier
-{
-public:
- virtual bool verify(const Graph &graph) const override;
-};
-
-} // namespace verifier
-} // namespace graph
-} // namespace neurun
-
-#endif // __NEURUN_GRAPH_VERIFIER_VERIFIER_H__
diff --git a/runtimes/neurun/src/kernel/CMakeLists.txt b/runtimes/neurun/src/kernel/CMakeLists.txt
deleted file mode 100644
index a39823102..000000000
--- a/runtimes/neurun/src/kernel/CMakeLists.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-add_subdirectory(cpu)
-add_subdirectory(acl_cl)
diff --git a/runtimes/neurun/src/kernel/acl_cl/CLFunction.h b/runtimes/neurun/src/kernel/acl_cl/CLFunction.h
deleted file mode 100644
index f34210c8a..000000000
--- a/runtimes/neurun/src/kernel/acl_cl/CLFunction.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_ACL_CL_CL_FUNCTION_H__
-#define __NEURUN_KERNEL_ACL_CL_CL_FUNCTION_H__
-
-#include "exec/interface/IFunction.h"
-#include <arm_compute/runtime/IFunction.h>
-#include <memory>
-
-namespace neurun
-{
-namespace kernel
-{
-namespace acl_cl
-{
-
-class CLFunction : public ::neurun::exec::IFunction
-{
-public:
- CLFunction() = delete;
-
-public:
- CLFunction(std::unique_ptr<::arm_compute::IFunction> &&func)
- : _func(std::forward<std::unique_ptr<::arm_compute::IFunction>>(func))
- {
- // DO NOTHING
- }
-
-public:
- void run() override { _func->run(); }
- void prepare() override { _func->prepare(); }
-
-private:
- std::unique_ptr<::arm_compute::IFunction> _func;
-};
-
-} // namespace acl_cl
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_ACL_CL_CL_FUNCTION_H__
diff --git a/runtimes/neurun/src/kernel/acl_cl/CMakeLists.txt b/runtimes/neurun/src/kernel/acl_cl/CMakeLists.txt
deleted file mode 100644
index 0658effea..000000000
--- a/runtimes/neurun/src/kernel/acl_cl/CMakeLists.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-file(GLOB SOURCES "*.cc")
-
-add_library(${LIB_NEURUN_KERNEL_ACL_CL} STATIC ${SOURCES})
-
-target_include_directories(${LIB_NEURUN_KERNEL_ACL_CL} PUBLIC ${NNFW_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN_KERNEL_ACL_CL} PUBLIC ${NEURUN_INCLUDE_DIR})
-
-target_link_libraries(${LIB_NEURUN_KERNEL_ACL_CL} arm_compute)
-target_link_libraries(${LIB_NEURUN_KERNEL_ACL_CL} nnfw_lib_misc)
-
-set_target_properties(${LIB_NEURUN_KERNEL_ACL_CL} PROPERTIES POSITION_INDEPENDENT_CODE ON)
-set_target_properties(${LIB_NEURUN_KERNEL_ACL_CL} PROPERTIES OUTPUT_NAME kernel_acl_cl)
-install(TARGETS ${LIB_NEURUN_KERNEL_ACL_CL} DESTINATION lib/neurun)
diff --git a/runtimes/neurun/src/kernel/acl_cl/ConcatLayer.cc b/runtimes/neurun/src/kernel/acl_cl/ConcatLayer.cc
deleted file mode 100644
index 3844317ab..000000000
--- a/runtimes/neurun/src/kernel/acl_cl/ConcatLayer.cc
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ConcatLayer.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-#include "util/feature/nchw/View.h"
-#include "util/logging.h"
-
-namespace
-{
-
-bool matchSizeExceptAxis(const ::neurun::backend::acl_cl::operand::ICLTensor *t1,
- const ::neurun::backend::acl_cl::operand::ICLTensor *t2, uint32_t axis)
-{
- assert(t1->num_dimensions() <= 4);
- assert(t2->num_dimensions() <= 4);
-
- for (uint32_t i = 0; i < 4; i++)
- {
- if (axis == i)
- continue;
- if (t1->dimension(i) != t2->dimension(i))
- return false;
- }
- return true;
-}
-
-} // namespace {anonymous}
-
-namespace neurun
-{
-namespace kernel
-{
-namespace acl_cl
-{
-
-ConcatLayer::ConcatLayer()
- : _input_allocs(), _output_alloc(nullptr), _axis(0), _input_type(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-bool ConcatLayer::concatenationFloat32()
-{
- // Input and output size check
- {
- // NOTE Support only tensor with dimension 4 or less
-
- uint32_t axis_sum = 0;
-
- for (auto input : _input_allocs)
- {
- assert(matchSizeExceptAxis(_output_alloc, input, _axis));
- axis_sum += input->dimension(_axis);
- }
-
- assert(_output_alloc->dimension(_axis) == axis_sum);
- }
-
- VERBOSE(Concat_RUN) << "START Concat" << std::endl;
-
- // Perform operation
- {
- uint32_t axis_offset = 0;
-
- auto &queue = ::arm_compute::CLScheduler::get().queue();
-
- _output_alloc->map(queue);
- util::feature::nchw::View<float> output_view{_output_alloc};
-
- for (auto input : _input_allocs)
- {
- input->map(queue);
- const util::feature::nchw::View<float> input_reader{input};
-
- for (uint32_t n = 0; n < input_reader.shape().N; n++)
- {
- for (uint32_t c = 0; c < input_reader.shape().C; c++)
- {
- for (uint32_t h = 0; h < input_reader.shape().H; h++)
- {
- for (uint32_t w = 0; w < input_reader.shape().W; w++)
- {
- uint32_t no = (_axis == 3) ? axis_offset : 0;
- uint32_t co = (_axis == 2) ? axis_offset : 0;
- uint32_t ho = (_axis == 1) ? axis_offset : 0;
- uint32_t wo = (_axis == 0) ? axis_offset : 0;
- output_view.at(n + no, c + co, h + ho, w + wo) = input_reader.at(n, c, h, w);
- }
- }
- }
- }
- if (_axis == 3)
- axis_offset += input_reader.shape().N;
- if (_axis == 2)
- axis_offset += input_reader.shape().C;
- if (_axis == 1)
- axis_offset += input_reader.shape().H;
- if (_axis == 0)
- axis_offset += input_reader.shape().W;
-
- input->unmap(queue);
- }
- _output_alloc->unmap(queue);
- }
-
- VERBOSE(Concat_RUN) << "End Concat" << std::endl;
-
- return true;
-}
-
-void ConcatLayer::configure(
- const std::vector<::neurun::backend::acl_cl::operand::ICLTensor *> &input_allocs, int32_t axis,
- ::neurun::backend::acl_cl::operand::ICLTensor *output_alloc)
-{
- _input_allocs = input_allocs;
- _output_alloc = output_alloc;
-
- assert(axis < 4);
-
- // This map converts NHWC to NCHW(reversed)
- // NHWC -> WHCN
- static const uint32_t axis_map[] = {3, 1, 0, 2};
- _axis = axis_map[axis];
-
- // TODO Support Quant8
- _input_type = OperandType::TENSOR_FLOAT32;
-}
-
-void ConcatLayer::run()
-{
- if (_input_type == OperandType::TENSOR_FLOAT32)
- {
- concatenationFloat32();
- }
- else if (_input_type == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error("NYI - concatenationQuant8()");
- }
-}
-
-} // namespace acl_cl
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/acl_cl/ConcatLayer.h b/runtimes/neurun/src/kernel/acl_cl/ConcatLayer.h
deleted file mode 100644
index d468a6dfb..000000000
--- a/runtimes/neurun/src/kernel/acl_cl/ConcatLayer.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_ACL_CL_CONCAT_LAYER_H__
-#define __NEURUN_KERNEL_ACL_CL_CONCAT_LAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include <arm_compute/runtime/IFunction.h>
-
-#include "model/operand/DataType.h"
-#include "backend/acl_cl/operand/ICLTensor.h"
-
-using OperandType = neurun::model::operand::DataType;
-
-namespace neurun
-{
-namespace kernel
-{
-namespace acl_cl
-{
-
-//
-// neurun::kernel::acl_cl::ConcatLayer
-// A naive implementation of ConcatLayer for ACL
-//
-
-class ConcatLayer : public ::arm_compute::IFunction
-{
-public:
- ConcatLayer();
-
-public:
- void configure(const std::vector<::neurun::backend::acl_cl::operand::ICLTensor *> &input_allocs,
- int32_t axis /* NNAPI tensor axis from NHWC order */,
- ::neurun::backend::acl_cl::operand::ICLTensor *output_alloc);
-
- void run();
-
-private:
- bool concatenationFloat32();
-
-private:
- std::vector<::neurun::backend::acl_cl::operand::ICLTensor *> _input_allocs;
- ::neurun::backend::acl_cl::operand::ICLTensor *_output_alloc;
- int32_t _axis;
- OperandType _input_type;
-};
-
-} // namespace acl_cl
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_ACL_CL_CONCAT_LAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/AvgPoolLayer.cc b/runtimes/neurun/src/kernel/cpu/AvgPoolLayer.cc
deleted file mode 100644
index f434a6dec..000000000
--- a/runtimes/neurun/src/kernel/cpu/AvgPoolLayer.cc
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "AvgPoolLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-#define AVGPOOLING_PARAMETERS \
- tflite::PoolParams op_params; \
- op_params.stride_height = _strideHeight; \
- op_params.stride_width = _strideWidth; \
- op_params.filter_height = _kernelHeight; \
- op_params.filter_width = _kernelWidth; \
- op_params.padding_values.height = (int8_t)_paddingTop; \
- op_params.padding_values.width = (int8_t)_paddingLeft;
-
-AvgPoolLayer::AvgPoolLayer()
- : _inputData(nullptr), _outputData(nullptr), _inputShape(), _outputShape(), _paddingLeft(0),
- _paddingTop(0), _paddingRight(0), _paddingBottom(0), _strideWidth(0), _strideHeight(0),
- _kernelWidth(0), _kernelHeight(0), _activation(ANEURALNETWORKS_FUSED_NONE),
- _inputType(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-bool AvgPoolLayer::averagePoolFloat32()
-{
- AVGPOOLING_PARAMETERS
- float output_activation_min, output_activation_max;
- CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
- op_params.float_activation_min = output_activation_min;
- op_params.float_activation_max = output_activation_max;
-
- ::tflite::optimized_ops::AveragePool(op_params, convertShapeToTFLiteShape(_inputShape),
- reinterpret_cast<const float *>(_inputData),
- convertShapeToTFLiteShape(_outputShape),
- reinterpret_cast<float *>(_outputData));
- return true;
-}
-bool AvgPoolLayer::averagePoolQuant8()
-{
- AVGPOOLING_PARAMETERS
- int32_t output_activation_min = 0;
- int32_t output_activation_max = 0;
- CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
- &output_activation_max);
- op_params.quantized_activation_min = output_activation_min;
- op_params.quantized_activation_max = output_activation_max;
-
- ::tflite::optimized_ops::AveragePool(op_params, convertShapeToTFLiteShape(_inputShape),
- _inputData, convertShapeToTFLiteShape(_outputShape),
- _outputData);
- return true;
-}
-
-void AvgPoolLayer::configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
- const uint32_t paddingRight, const uint32_t paddingTop,
- const uint32_t paddingBottom, const uint32_t strideWidth,
- const uint32_t strideHeight, const uint32_t kernelWidth,
- const uint32_t kernelHeight, const FuseCode activation,
- uint8_t *outputData, const Shape outputShape)
-{
- _inputData = inputData;
- _inputShape = inputShape;
- _inputType = inputShape.type;
- _paddingLeft = paddingLeft;
- _paddingRight = paddingRight;
- _paddingTop = paddingTop;
- _paddingBottom = paddingBottom;
- _strideWidth = strideWidth;
- _strideHeight = strideHeight;
- _kernelWidth = kernelWidth;
- _kernelHeight = kernelHeight;
- _activation = activation;
- _outputData = outputData;
- _outputShape = outputShape;
-}
-
-void AvgPoolLayer::run()
-{
- if (_inputType == OperandType::TENSOR_FLOAT32)
- {
- averagePoolFloat32();
- }
- else if (_inputType == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error{"AvgPoolLayer : Not tested for TENSOR_QUANT8_ASYMM"};
- // averagePoolQuant8();
- }
-}
-
-#undef AVGPOOLING_PARAMETERS
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/AvgPoolLayer.h b/runtimes/neurun/src/kernel/cpu/AvgPoolLayer.h
deleted file mode 100644
index 280f7ae5f..000000000
--- a/runtimes/neurun/src/kernel/cpu/AvgPoolLayer.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_AVGPOOLLAYER_H__
-#define __NEURUN_KERNEL_CPU_AVGPOOLLAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class AvgPoolLayer : public ::neurun::exec::IFunction
-{
-public:
- AvgPoolLayer();
-
-public:
- bool averagePoolFloat32();
-
- bool averagePoolQuant8();
-
- void configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
- const uint32_t paddingRight, const uint32_t paddingTop,
- const uint32_t paddingBottom, const uint32_t strideWidth,
- const uint32_t strideHeight, const uint32_t kernelWidth,
- const uint32_t kernelHeight, const FuseCode activation, uint8_t *outputData,
- const Shape outputShape);
-
- void run();
-
-private:
- uint8_t *_inputData;
- uint8_t *_outputData;
-
- Shape _inputShape;
- Shape _outputShape;
-
- uint32_t _paddingLeft;
- uint32_t _paddingTop;
- uint32_t _paddingRight;
- uint32_t _paddingBottom;
-
- uint32_t _strideWidth;
- uint32_t _strideHeight;
- uint32_t _kernelWidth;
- uint32_t _kernelHeight;
-
- FuseCode _activation;
-
- OperandType _inputType;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_AVGPOOLLAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/CMakeLists.txt b/runtimes/neurun/src/kernel/cpu/CMakeLists.txt
deleted file mode 100644
index 436cb898c..000000000
--- a/runtimes/neurun/src/kernel/cpu/CMakeLists.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-file(GLOB SOURCES "*.cc")
-
-add_library(${LIB_NEURUN_KERNEL_CPU} STATIC ${SOURCES})
-
-target_include_directories(${LIB_NEURUN_KERNEL_CPU} PUBLIC ${NNFW_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN_KERNEL_CPU} PUBLIC ${NEURUN_INCLUDE_DIR})
-target_include_directories(${LIB_NEURUN_KERNEL_CPU} PUBLIC ${CMAKE_SOURCE_DIR}/externals/tensorflow)
-
-target_link_libraries(${LIB_NEURUN_KERNEL_CPU} tensorflow-lite)
-target_link_libraries(${LIB_NEURUN_KERNEL_CPU} nnfw_lib_misc)
-
-set_target_properties(${LIB_NEURUN_KERNEL_CPU} PROPERTIES POSITION_INDEPENDENT_CODE ON)
-set_target_properties(${LIB_NEURUN_KERNEL_CPU} PROPERTIES OUTPUT_NAME kernel_cpu)
-install(TARGETS ${LIB_NEURUN_KERNEL_CPU} DESTINATION lib/neurun)
diff --git a/runtimes/neurun/src/kernel/cpu/ConcatLayer.cc b/runtimes/neurun/src/kernel/cpu/ConcatLayer.cc
deleted file mode 100644
index be093b437..000000000
--- a/runtimes/neurun/src/kernel/cpu/ConcatLayer.cc
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ConcatLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-
-namespace cpu
-{
-
-ConcatLayer::ConcatLayer()
- : _inputDataPtrs(), _outputData(nullptr), _axis(0), _inputShapes(), _outputShape(),
- _inputType(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-bool ConcatLayer::concatenationFloat32()
-{
- uint32_t num_inputs = _inputShapes.size();
-
- tflite::ConcatenationParams op_params;
- op_params.axis = _axis;
- op_params.inputs_count = num_inputs;
-
- std::vector<::tflite::RuntimeShape *> inputDimsPtr;
- std::vector<::tflite::RuntimeShape> inputDims;
- inputDimsPtr.reserve(num_inputs);
- inputDims.reserve(num_inputs);
-
- for (uint32_t i = 0; i < num_inputs; i++)
- {
- inputDims.push_back(convertShapeToTFLiteShape(_inputShapes[i]));
- inputDimsPtr.push_back(&inputDims[i]);
- }
-
- std::vector<const float *> inputFloatPtrs;
-
- for (auto ptr : _inputDataPtrs)
- {
- inputFloatPtrs.emplace_back(reinterpret_cast<const float *>(ptr));
- }
-
- ::tflite::optimized_ops::Concatenation<float>(
- op_params, inputDimsPtr.data(), inputFloatPtrs.data(),
- convertShapeToTFLiteShape(_outputShape), reinterpret_cast<float *>(_outputData));
- return true;
-}
-bool ConcatLayer::concatenationQuant8()
-{
- int num_inputs = _inputShapes.size();
-
- std::vector<int32_t> input_zeropoints(num_inputs);
- std::vector<float> input_scales(num_inputs);
- for (uint32_t i = 0; i < num_inputs; i++)
- {
- input_zeropoints[i] = _inputShapes[i].offset;
- input_scales[i] = _inputShapes[i].scale;
- }
-
- tflite::ConcatenationParams op_params;
- op_params.axis = _axis;
- op_params.inputs_count = num_inputs;
- op_params.input_zeropoint = input_zeropoints.data();
- op_params.input_scale = input_scales.data();
- op_params.output_zeropoint = _outputShape.offset;
- op_params.output_scale = _outputShape.scale;
-
- std::vector<::tflite::RuntimeShape *> inputDimsPtr;
- std::vector<::tflite::RuntimeShape> inputDims;
- inputDimsPtr.reserve(num_inputs);
- inputDims.reserve(num_inputs);
- for (uint32_t i = 0; i < num_inputs; i++)
- {
- inputDims.push_back(convertShapeToTFLiteShape(_inputShapes[i]));
- inputDimsPtr.push_back(&inputDims[i]);
- }
-
- ::tflite::optimized_ops::Concatenation<uint8_t>(
- op_params, inputDimsPtr.data(), _inputDataPtrs.data(),
- convertShapeToTFLiteShape(_outputShape), _outputData);
- return true;
-}
-
-void ConcatLayer::configure(const std::vector<const uint8_t *> &inputDataPtrs,
- const std::vector<Shape> &inputShapes, int32_t axis,
- uint8_t *outputData, const Shape outputShape)
-{
- _inputDataPtrs = inputDataPtrs;
-
- for (auto shape : inputShapes)
- {
- _inputShapes.emplace_back(shape);
- _inputType = shape.type;
- }
-
- _axis = axis;
-
- _outputData = outputData;
- _outputShape = outputShape;
-}
-
-void ConcatLayer::run()
-{
- if (_inputType == OperandType::TENSOR_FLOAT32)
- {
- concatenationFloat32();
- }
- else if (_inputType == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error{"ConcatLayer : Not tested for TENSOR_QUANT8_ASYMM"};
- // concatenationQuant8();
- }
-}
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/ConcatLayer.h b/runtimes/neurun/src/kernel/cpu/ConcatLayer.h
deleted file mode 100644
index 64f813508..000000000
--- a/runtimes/neurun/src/kernel/cpu/ConcatLayer.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_CONCATLAYER_H__
-#define __NEURUN_KERNEL_CPU_CONCATLAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class ConcatLayer : public ::neurun::exec::IFunction
-{
-public:
- ConcatLayer();
-
-public:
- bool concatenationFloat32();
-
- bool concatenationQuant8();
-
- void configure(const std::vector<const uint8_t *> &inputDataPtrs,
- const std::vector<Shape> &inputShapes, int32_t axis, uint8_t *outputData,
- const Shape outputShape);
-
- void run();
-
-private:
- std::vector<const uint8_t *> _inputDataPtrs;
- uint8_t *_outputData;
-
- int32_t _axis;
-
- std::vector<Shape> _inputShapes;
- Shape _outputShape;
-
- OperandType _inputType;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_CONCATLAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/ConvolutionLayer.cc b/runtimes/neurun/src/kernel/cpu/ConvolutionLayer.cc
deleted file mode 100644
index c694fa75f..000000000
--- a/runtimes/neurun/src/kernel/cpu/ConvolutionLayer.cc
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ConvolutionLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-#include <mutex>
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-// If possible we will use this static buffer for the tensor.
-static constexpr int kStaticBufferSize = 1605632;
-static char static_scratch_buffer[kStaticBufferSize];
-static std::mutex executionMutex;
-
-#define ANDROID_NN_CONV_PARAMETERS(Type) \
- uint32_t height = getSizeOfDimension(_inputShape, 1); \
- uint32_t width = getSizeOfDimension(_inputShape, 2); \
- uint32_t kernelHeight = getSizeOfDimension(_kernelShape, 1); \
- uint32_t kernelWidth = getSizeOfDimension(_kernelShape, 2); \
- uint32_t outHeight = getSizeOfDimension(_outputShape, 1); \
- uint32_t outWidth = getSizeOfDimension(_outputShape, 2); \
- uint32_t inDepth = getSizeOfDimension(_inputShape, 3); \
- \
- uint32_t paddingHeight = (uint32_t)_paddingTop; \
- uint32_t paddingWidth = (uint32_t)_paddingLeft; \
- \
- Shape im2colShape; \
- im2colShape.dimensions.resize(4); \
- im2colShape.dimensions[0] = getSizeOfDimension(_outputShape, 0); \
- im2colShape.dimensions[1] = getSizeOfDimension(_outputShape, 1); \
- im2colShape.dimensions[2] = getSizeOfDimension(_outputShape, 2); \
- im2colShape.dimensions[3] = inDepth * kernelHeight * kernelWidth; \
- \
- Type *im2colData = nullptr; \
- uint64_t im2colByteSize = sizeof(Type); \
- std::unique_ptr<Type[]> im2colGuard; \
- for (int i = 0; i < 4; i++) \
- { \
- im2colByteSize *= im2colShape.dimensions[i]; \
- } \
- /* http://b/77982879, tflite::optimized_ops::Conv uses int for offsets */ \
- if (im2colByteSize >= 0x7fffffff) \
- { \
- std::cout << "Conv size is too large, not enough memory" << std::endl; \
- return false; \
- } \
- if (im2colByteSize <= kStaticBufferSize) \
- { \
- im2colData = reinterpret_cast<Type *>(static_scratch_buffer); \
- } \
- else \
- { \
- im2colData = new (std::nothrow) Type[im2colByteSize / sizeof(Type)]; \
- if (im2colData == nullptr) \
- { \
- std::cout << "Conv size is too large, not enough memory" << std::endl; \
- return false; \
- } \
- im2colGuard.reset(im2colData); \
- }
-
-ConvolutionLayer::ConvolutionLayer()
- : _inputData(nullptr), _kernelData(nullptr), _outputData(nullptr), _biasData(nullptr),
- _inputShape(), _kernelShape(), _outputShape(), _biasShape(), _paddingLeft(0), _paddingTop(0),
- _paddingRight(0), _paddingBottom(0), _strideWidth(0), _strideHeight(0),
- _activation(ANEURALNETWORKS_FUSED_NONE), _inputType(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-bool ConvolutionLayer::convFloat32()
-{
- ANDROID_NN_CONV_PARAMETERS(float)
-
- const ::tflite::Dims<4> &kernel_dim = convertShapeToDims(_kernelShape);
- const int kernel_width = ArraySize(kernel_dim, 1);
- const int kernel_height = ArraySize(kernel_dim, 2);
- const bool need_im2col =
- _strideWidth != 1 || _strideHeight != 1 || kernel_width != 1 || kernel_height != 1;
-
- float *im2colDataToPass = nullptr;
- if (need_im2col)
- {
- im2colDataToPass = im2colData;
- }
-
- float output_activation_min, output_activation_max;
- CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
- int32_t dilationWidthFactor = 1, dilationHeightFactor = 1;
-
- ::tflite::ConvParams op_params;
- op_params.padding_type = ::tflite::PaddingType::kSame;
- op_params.padding_values.width = paddingWidth;
- op_params.padding_values.height = paddingHeight;
- op_params.stride_width = _strideWidth;
- op_params.stride_height = _strideHeight;
- op_params.dilation_width_factor = dilationWidthFactor;
- op_params.dilation_height_factor = dilationHeightFactor;
- op_params.float_activation_min = output_activation_min;
- op_params.float_activation_max = output_activation_max;
-
- ::tflite::optimized_ops::Conv(
- op_params, convertShapeToTFLiteShape(_inputShape),
- reinterpret_cast<const float *>(_inputData), convertShapeToTFLiteShape(_kernelShape),
- reinterpret_cast<const float *>(_kernelData), convertShapeToTFLiteShape(_biasShape),
- reinterpret_cast<const float *>(_biasData), convertShapeToTFLiteShape(_outputShape),
- reinterpret_cast<float *>(_outputData), convertShapeToTFLiteShape(im2colShape),
- im2colDataToPass);
- return true;
-}
-
-bool ConvolutionLayer::convQuant8()
-{
- ANDROID_NN_CONV_PARAMETERS(uint8_t)
-
- int32_t inputOffset = -_inputShape.offset;
- int32_t kernelOffset = -_kernelShape.offset;
- int32_t outputOffset = _outputShape.offset;
- float real_multiplier = 0.0;
- int32_t output_multiplier = 0;
- int32_t output_shift = 0;
- int32_t output_activation_min = 0;
- int32_t output_activation_max = 0;
- if (!GetQuantizedConvolutionMultipler(_inputShape, _kernelShape, _biasShape, _outputShape,
- &real_multiplier) ||
- !QuantizeMultiplierSmallerThanOne(real_multiplier, &output_multiplier, &output_shift))
- {
- return false;
- }
- CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
- &output_activation_max);
- int32_t dilationWidthFactor = 1, dilationHeightFactor = 1;
-
- ::tflite::ConvParams op_params;
- op_params.padding_type = ::tflite::PaddingType::kSame;
- op_params.padding_values.width = paddingWidth;
- op_params.padding_values.height = paddingHeight;
- op_params.stride_width = _strideWidth;
- op_params.stride_height = _strideHeight;
- op_params.dilation_width_factor = dilationWidthFactor;
- op_params.dilation_height_factor = dilationHeightFactor;
- op_params.input_offset = inputOffset;
- op_params.weights_offset = kernelOffset;
- op_params.output_offset = outputOffset;
- op_params.output_multiplier = output_multiplier;
- op_params.output_shift = output_shift;
- op_params.quantized_activation_min = output_activation_min;
- op_params.quantized_activation_max = output_activation_max;
-
- static gemmlowp::GemmContext gemm_context;
- // Prevent concurrent executions that may access the scratch buffer and
- // gemm_context.
- std::unique_lock<std::mutex> lock(executionMutex);
- // Alow gemmlowp automatically decide how many threads to use.
- gemm_context.set_max_num_threads(0);
- ::tflite::optimized_ops::Conv(
- op_params, convertShapeToTFLiteShape(_inputShape), _inputData,
- convertShapeToTFLiteShape(_kernelShape), _kernelData, convertShapeToTFLiteShape(_biasShape),
- reinterpret_cast<const int32_t *>(_biasData), convertShapeToTFLiteShape(_outputShape),
- _outputData, convertShapeToTFLiteShape(im2colShape), im2colData, &gemm_context);
- return true;
-}
-
-void ConvolutionLayer::configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
- const Shape kernelShape, uint8_t *biasData, const Shape biasShape,
- const uint32_t paddingLeft, const uint32_t paddingRight,
- const uint32_t paddingTop, const uint32_t paddingBottom,
- const uint32_t strideWidth, const uint32_t strideHeight,
- const FuseCode activation, uint8_t *outputData,
- const Shape outputShape)
-{
- _inputData = inputData;
- _inputShape = inputShape;
- _inputType = inputShape.type;
- _kernelData = kernelData;
- _kernelShape = kernelShape;
- _biasData = biasData;
- _biasShape = biasShape;
- _paddingLeft = paddingLeft;
- _paddingRight = paddingRight;
- _paddingTop = paddingTop;
- _paddingBottom = paddingBottom;
- _strideWidth = strideWidth;
- _strideHeight = strideHeight;
- _activation = activation;
- _outputData = outputData;
- _outputShape = outputShape;
-}
-
-void ConvolutionLayer::run()
-{
- if (_inputType == OperandType::TENSOR_FLOAT32)
- {
- convFloat32();
- }
- else if (_inputType == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error{"ConvolutionLayer : Not tested for TENSOR_QUANT8_ASYMM"};
- // convQuant8();
- }
-}
-
-#undef ANDROID_NN_CONV_PARAMETERS
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/ConvolutionLayer.h b/runtimes/neurun/src/kernel/cpu/ConvolutionLayer.h
deleted file mode 100644
index 9b7f55ff1..000000000
--- a/runtimes/neurun/src/kernel/cpu/ConvolutionLayer.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_CONVOLUTIONLAYER_H__
-#define __NEURUN_KERNEL_CPU_CONVOLUTIONLAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class ConvolutionLayer : public ::neurun::exec::IFunction
-{
-public:
- ConvolutionLayer();
-
-public:
- bool convFloat32();
-
- bool convQuant8();
-
- void configure(uint8_t *inputData, const Shape inputShape, uint8_t *kernelData,
- const Shape kernelShape, uint8_t *biasData, const Shape biasShape,
- const uint32_t paddingLeft, const uint32_t paddingRight, const uint32_t paddingTop,
- const uint32_t paddingBottom, const uint32_t strideW, const uint32_t strideH,
- const FuseCode activation, uint8_t *outputData, const Shape outputShape);
-
- void run();
-
-private:
- uint8_t *_inputData;
- uint8_t *_kernelData;
- uint8_t *_outputData;
- uint8_t *_biasData;
-
- Shape _inputShape;
- Shape _kernelShape;
- Shape _outputShape;
- Shape _biasShape;
-
- uint32_t _paddingLeft;
- uint32_t _paddingTop;
- uint32_t _paddingRight;
- uint32_t _paddingBottom;
-
- uint32_t _strideWidth;
- uint32_t _strideHeight;
-
- FuseCode _activation;
-
- OperandType _inputType;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_CONVOLUTIONLAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.cc b/runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.cc
deleted file mode 100644
index abe82db5e..000000000
--- a/runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.cc
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "FullyConnectedLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "tensorflow/contrib/lite/kernels/internal/reference/reference_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-#include <mutex>
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-FullyConnectedLayer::FullyConnectedLayer()
- : _inputData(nullptr), _weightsData(nullptr), _biasData(nullptr), _outputData(nullptr),
- _inputShape(), _weightsShape(), _biasShape(), _outputShape(),
- _activation(ANEURALNETWORKS_FUSED_NONE), _inputType(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-// executionMutex is used to protect concurrent access of non-threadsafe resources
-// like gemmlowp::GemmContext.
-// std::mutex is safe for pthreads on Android.
-static std::mutex executionMutex;
-bool FullyConnectedLayer::fullyConnectedFloat32()
-{
- int total_input_size = 1;
- for (int i = 0; i < _inputShape.dimensions.size(); i++)
- {
- total_input_size *= _inputShape.dimensions[i];
- }
-
- int input_size = _weightsShape.dimensions[1];
- const int batch_size = total_input_size / input_size;
- const int num_units = _weightsShape.dimensions[0];
-
- TfLiteFusedActivation act = convertFusedActivation(_activation);
-
- ::tflite::tensor_utils::VectorBatchVectorAssign(reinterpret_cast<const float *>(_biasData),
- num_units, batch_size,
- reinterpret_cast<float *>(_outputData));
-
- // Compute output += weight * input
- ::tflite::tensor_utils::MatrixBatchVectorMultiplyAccumulate(
- reinterpret_cast<const float *>(_weightsData), num_units, input_size,
- reinterpret_cast<const float *>(_inputData), batch_size,
- reinterpret_cast<float *>(_outputData), /*result_stride=*/1);
-
- // Apply activation function
- ::tflite::tensor_utils::ApplyActivationToVector(reinterpret_cast<float *>(_outputData),
- batch_size * num_units, act,
- reinterpret_cast<float *>(_outputData));
-
- return true;
-}
-
-bool FullyConnectedLayer::fullyConnectedQuant8()
-{
- throw std::runtime_error{"FullyConnectedLayer : Not tested for TENSOR_QUANT8_ASYMM"};
-}
-
-void FullyConnectedLayer::configure(uint8_t *inputData, const Shape inputShape,
- uint8_t *weightsData, const Shape weightsShape,
- uint8_t *biasData, const Shape biasShape, FuseCode activation,
- uint8_t *outputData, const Shape outputShape)
-{
- _inputData = inputData;
- _inputShape = inputShape;
- _inputType = inputShape.type;
- _weightsData = weightsData;
- _weightsShape = weightsShape;
- _biasData = biasData;
- _biasShape = biasShape;
- _activation = activation;
- _outputData = outputData;
- _outputShape = outputShape;
-}
-
-void FullyConnectedLayer::run()
-{
- if (_inputType == OperandType::TENSOR_FLOAT32)
- {
- fullyConnectedFloat32();
- }
- else if (_inputType == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error{"FullyConnectedLayer : Not tested for TENSOR_QUANT8_ASYMM"};
- // fullyConnectedQuant8();
- }
-}
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.h b/runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.h
deleted file mode 100644
index 20a388349..000000000
--- a/runtimes/neurun/src/kernel/cpu/FullyConnectedLayer.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_FULLYCONNECTEDLAYER_H__
-#define __NEURUN_KERNEL_CPU_FULLYCONNECTEDLAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class FullyConnectedLayer : public ::neurun::exec::IFunction
-{
-public:
- FullyConnectedLayer();
-
-public:
- bool fullyConnectedFloat32();
-
- bool fullyConnectedQuant8();
-
- void configure(uint8_t *inputData, const Shape inputShape, uint8_t *weightsData,
- const Shape weightsShape, uint8_t *biasData, const Shape biasShape,
- FuseCode activation, uint8_t *outputData, const Shape outputShape);
-
- void run();
-
-private:
- uint8_t *_inputData;
- uint8_t *_weightsData;
- uint8_t *_biasData;
- uint8_t *_outputData;
-
- Shape _inputShape;
- Shape _weightsShape;
- Shape _biasShape;
- Shape _outputShape;
-
- FuseCode _activation;
-
- OperandType _inputType;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_FULLYCONNECTEDLAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/MaxPoolLayer.cc b/runtimes/neurun/src/kernel/cpu/MaxPoolLayer.cc
deleted file mode 100644
index c4a288b07..000000000
--- a/runtimes/neurun/src/kernel/cpu/MaxPoolLayer.cc
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "MaxPoolLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-#define MAXPOOLING_PARAMETERS \
- tflite::PoolParams op_params; \
- op_params.stride_height = _strideHeight; \
- op_params.stride_width = _strideWidth; \
- op_params.filter_height = _kernelHeight; \
- op_params.filter_width = _kernelWidth; \
- op_params.padding_values.height = (int8_t)_paddingTop; \
- op_params.padding_values.width = (int8_t)_paddingLeft;
-
-MaxPoolLayer::MaxPoolLayer()
- : _inputData(nullptr), _outputData(nullptr), _inputShape(), _outputShape(), _paddingLeft(0),
- _paddingTop(0), _paddingRight(0), _paddingBottom(0), _strideWidth(0), _strideHeight(0),
- _kernelWidth(0), _kernelHeight(0), _activation(ANEURALNETWORKS_FUSED_NONE),
- _inputType(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-bool MaxPoolLayer::maxPoolFloat32()
-{
- MAXPOOLING_PARAMETERS
- float output_activation_min, output_activation_max;
- CalculateActivationRangeFloat(_activation, &output_activation_min, &output_activation_max);
- op_params.float_activation_min = output_activation_min;
- op_params.float_activation_max = output_activation_max;
-
- ::tflite::optimized_ops::MaxPool(op_params, convertShapeToTFLiteShape(_inputShape),
- reinterpret_cast<const float *>(_inputData),
- convertShapeToTFLiteShape(_outputShape),
- reinterpret_cast<float *>(_outputData));
- return true;
-}
-bool MaxPoolLayer::maxPoolQuant8()
-{
- MAXPOOLING_PARAMETERS
- int32_t output_activation_min = 0;
- int32_t output_activation_max = 0;
- CalculateActivationRangeUint8(_activation, _outputShape, &output_activation_min,
- &output_activation_max);
- op_params.quantized_activation_min = output_activation_min;
- op_params.quantized_activation_max = output_activation_max;
-
- ::tflite::optimized_ops::MaxPool(op_params, convertShapeToTFLiteShape(_inputShape), _inputData,
- convertShapeToTFLiteShape(_outputShape), _outputData);
- return true;
-}
-
-void MaxPoolLayer::configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
- const uint32_t paddingRight, const uint32_t paddingTop,
- const uint32_t paddingBottom, const uint32_t strideWidth,
- const uint32_t strideHeight, const uint32_t kernelWidth,
- const uint32_t kernelHeight, const FuseCode activation,
- uint8_t *outputData, const Shape outputShape)
-{
- _inputData = inputData;
-
- _inputShape = inputShape;
- _inputType = inputShape.type;
- _paddingLeft = paddingLeft;
- _paddingRight = paddingRight;
- _paddingTop = paddingTop;
- _paddingBottom = paddingBottom;
- _strideWidth = strideWidth;
- _strideHeight = strideHeight;
- _kernelWidth = kernelWidth;
- _kernelHeight = kernelHeight;
- _activation = activation;
- _outputData = outputData;
- _outputShape = outputShape;
-}
-
-void MaxPoolLayer::run()
-{
- if (_inputType == OperandType::TENSOR_FLOAT32)
- {
- maxPoolFloat32();
- }
- else if (_inputType == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error{"MaxPoolLayer : Not tested for TENSOR_QUANT8_ASYMM"};
- // maxPoolQuant8();
- }
-}
-
-#undef MAXPOOLING_PARAMETERS
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/MaxPoolLayer.h b/runtimes/neurun/src/kernel/cpu/MaxPoolLayer.h
deleted file mode 100644
index 2b185550b..000000000
--- a/runtimes/neurun/src/kernel/cpu/MaxPoolLayer.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_MAXPOOLLAYER_H__
-#define __NEURUN_KERNEL_CPU_MAXPOOLLAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class MaxPoolLayer : public ::neurun::exec::IFunction
-{
-public:
- MaxPoolLayer();
-
-public:
- bool maxPoolFloat32();
-
- bool maxPoolQuant8();
-
- void configure(uint8_t *inputData, const Shape inputShape, const uint32_t paddingLeft,
- const uint32_t paddingRight, const uint32_t paddingTop,
- const uint32_t paddingBottom, const uint32_t strideWidth,
- const uint32_t strideHeight, const uint32_t kernelWidth,
- const uint32_t kernelHeight, const FuseCode activation, uint8_t *outputData,
- const Shape outputShape);
-
- void run();
-
-private:
- uint8_t *_inputData;
- uint8_t *_outputData;
-
- Shape _inputShape;
- Shape _outputShape;
-
- uint32_t _paddingLeft;
- uint32_t _paddingTop;
- uint32_t _paddingRight;
- uint32_t _paddingBottom;
-
- uint32_t _strideWidth;
- uint32_t _strideHeight;
- uint32_t _kernelWidth;
- uint32_t _kernelHeight;
-
- FuseCode _activation;
-
- OperandType _inputType;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_MAXPOOLLAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/OperationUtils.cc b/runtimes/neurun/src/kernel/cpu/OperationUtils.cc
deleted file mode 100644
index b28508c27..000000000
--- a/runtimes/neurun/src/kernel/cpu/OperationUtils.cc
+++ /dev/null
@@ -1,230 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "kernel/cpu/OperationUtils.h"
-
-#include <cmath>
-#include <algorithm>
-#include <cassert>
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-uint32_t getNumberOfDimensions(const Shape &shape) { return shape.dimensions.size(); }
-
-uint32_t getNumberOfElements(const Shape &shape)
-{
- uint32_t count = 1;
- for (size_t i = 0; i < shape.dimensions.size(); i++)
- {
- count *= shape.dimensions[i];
- }
- return count;
-}
-
-uint32_t getSizeOfDimension(const Shape &shape, uint32_t dimensionIdx)
-{
- if (dimensionIdx >= shape.dimensions.size())
- {
- // TODO, log the error
- return 0;
- }
- return shape.dimensions[dimensionIdx];
-}
-
-bool QuantizeMultiplierSmallerThanOne(double double_multiplier, int32_t *quantized_multiplier,
- int32_t *right_shift)
-{
- assert(double_multiplier >= 0.);
- assert(double_multiplier < 1.);
- if (double_multiplier == 0.)
- {
- *quantized_multiplier = 0;
- *right_shift = 0;
- return true;
- }
- assert(double_multiplier > 0.);
- const double q = std::frexp(double_multiplier, right_shift);
- *right_shift *= -1;
- int64_t q_fixed = static_cast<int64_t>(std::round(q * (1ll << 31)));
- assert(q_fixed <= (1ll << 31));
- if (q_fixed == (1ll << 31))
- {
- q_fixed /= 2;
- --*right_shift;
- }
- assert(*right_shift >= 0);
- assert(q_fixed <= std::numeric_limits<int32_t>::max());
- *quantized_multiplier = static_cast<int32_t>(q_fixed);
- return true;
-}
-
-bool GetQuantizedConvolutionMultipler(const Shape &inputShape, const Shape &filterShape,
- const Shape &biasShape, const Shape &outputShape,
- float *multiplier)
-{
- const float input_product_scale = inputShape.scale * filterShape.scale;
- const float bias_scale = biasShape.scale;
- const float output_scale = outputShape.scale;
- // The following conditions must be guaranteed by the training pipeline.
- assert(std::abs(input_product_scale - bias_scale) <=
- 1e-6 * std::min(input_product_scale, bias_scale));
- assert(input_product_scale >= 0);
- assert(input_product_scale < output_scale);
- *multiplier = input_product_scale / output_scale;
- return true;
-}
-
-bool QuantizeMultiplierGreaterThanOne(double double_multiplier, int32_t *quantized_multiplier,
- int *left_shift)
-{
- assert(double_multiplier > 1.);
- const double q = std::frexp(double_multiplier, left_shift);
- int64_t q_fixed = static_cast<int64_t>(std::round(q * (1ll << 31)));
- assert(q_fixed <= (1ll << 31));
- if (q_fixed == (1ll << 31))
- {
- q_fixed /= 2;
- ++*left_shift;
- }
- assert(*left_shift >= 0);
- assert(q_fixed <= std::numeric_limits<int32_t>::max());
- *quantized_multiplier = static_cast<int32_t>(q_fixed);
- return true;
-}
-
-void CalculateActivationRangeFloat(int32_t activation, float *activation_min, float *activation_max)
-{
- if (activation == ANEURALNETWORKS_FUSED_RELU)
- {
- *activation_min = 0.f;
- *activation_max = std::numeric_limits<float>::max();
- }
- else if (activation == ANEURALNETWORKS_FUSED_RELU6)
- {
- *activation_min = 0.f;
- *activation_max = 6.f;
- }
- else if (activation == ANEURALNETWORKS_FUSED_RELU1)
- {
- *activation_min = -1.f;
- *activation_max = 1.f;
- }
- else if (activation == ANEURALNETWORKS_FUSED_NONE)
- {
- *activation_min = std::numeric_limits<float>::lowest();
- *activation_max = std::numeric_limits<float>::max();
- }
- else
- {
- std::cout << "Unsupported fused activation function." << std::endl;
- }
-}
-
-void CalculateActivationRangeUint8(int32_t activation, const Shape &outputShape, int32_t *act_min,
- int32_t *act_max)
-{
- const int32_t qmin = std::numeric_limits<uint8_t>::min();
- const int32_t qmax = std::numeric_limits<uint8_t>::max();
- const auto scale = outputShape.scale;
- const auto zero_point = outputShape.offset;
- auto quantize = [scale, zero_point](float f) {
- return zero_point + static_cast<int32_t>(std::round(f / scale));
- };
- if (activation == ANEURALNETWORKS_FUSED_RELU)
- {
- *act_min = std::max(qmin, quantize(0.0));
- *act_max = qmax;
- }
- else if (activation == ANEURALNETWORKS_FUSED_RELU6)
- {
- *act_min = std::max(qmin, quantize(0.0));
- *act_max = std::min(qmax, quantize(6.0));
- }
- else if (activation == ANEURALNETWORKS_FUSED_RELU1)
- {
- *act_min = std::max(qmin, quantize(-1.0));
- *act_max = std::min(qmax, quantize(1.0));
- }
- else if (activation == ANEURALNETWORKS_FUSED_NONE)
- {
- *act_min = qmin;
- *act_max = qmax;
- }
- else
- {
- std::cout << "Unsupported fused activation function." << std::endl;
- }
-}
-
-int32_t CalculateInputRadius(int input_integer_bits, int input_left_shift)
-{
- const double max_input_rescaled = 1.0 * ((1 << input_integer_bits) - 1) *
- (1ll << (31 - input_integer_bits)) / (1ll << input_left_shift);
- // Tighten bound using floor. Suppose that we could use the exact value.
- // After scaling the difference, the result would be at the maximum. Thus we
- // must ensure that our value has lower magnitude.
- return static_cast<int32_t>(std::floor(max_input_rescaled));
-}
-
-Shape getShape(const ::neurun::model::operand::Object &o)
-{
- Shape shape;
-
- shape.type = static_cast<OperandType>(static_cast<int32_t>(o.typeInfo().type()));
- shape.dimensions = std::vector<uint32_t>(o.shape().dims().begin(), o.shape().dims().end());
- shape.scale = o.typeInfo().scale();
- // shape.offset = _offset;
-
- return shape;
-}
-
-size_t sizeOfData(OperandType type, const std::vector<uint32_t> &dimensions)
-{
- size_t size = 4;
-
- switch (type)
- {
- case OperandType::SCALAR_FLOAT32:
- case OperandType::SCALAR_INT32:
- case OperandType::SCALAR_UINT32:
- case OperandType::TENSOR_FLOAT32:
- case OperandType::TENSOR_INT32:
- size = 4;
- break;
- case OperandType::TENSOR_QUANT8_ASYMM:
- size = 1;
- break;
- default:
- throw std::runtime_error("Not supported operand type.");
- break;
- }
-
- for (auto d : dimensions)
- {
- size *= d;
- }
-
- return size;
-}
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/OperationUtils.h b/runtimes/neurun/src/kernel/cpu/OperationUtils.h
deleted file mode 100644
index 3610990a5..000000000
--- a/runtimes/neurun/src/kernel/cpu/OperationUtils.h
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NNFW_SUPPORT_NNAPI_OPERATION_UTILS_H__
-#define __NNFW_SUPPORT_NNAPI_OPERATION_UTILS_H__
-
-#include <NeuralNetworks.h>
-
-#include <iostream>
-#include <limits>
-#include <vector>
-
-#include "tensorflow/contrib/lite/c/builtin_op_data.h"
-#include "tensorflow/contrib/lite/kernels/internal/types.h"
-#include "tensorflow/contrib/lite/kernels/internal/tensor.h"
-#include "model/operand/Object.h"
-#include "model/operand/DataType.h"
-
-using OperandType = neurun::model::operand::DataType;
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-struct Shape
-{
- OperandType type;
- std::vector<uint32_t> dimensions;
- float scale;
- int32_t offset;
-};
-
-uint32_t getNumberOfDimensions(const Shape &shape);
-
-uint32_t getNumberOfElements(const Shape &shape);
-
-uint32_t getSizeOfDimension(const Shape &shape, uint32_t dimensionIdx);
-
-inline ::tflite::Dims<4> convertShapeToDims(const Shape &shape)
-{
- // nnAssert(shape.dimensions.size() <= 4);
- ::tflite::Dims<4> dims;
- // The dimensions are reversed in Dims<4>.
- for (int i = 0; i < 4; ++i)
- {
- int src = static_cast<int>(shape.dimensions.size()) - i - 1;
- if (src >= 0)
- {
- dims.sizes[i] = static_cast<int>(getSizeOfDimension(shape, src));
- }
- else
- {
- dims.sizes[i] = 1;
- }
- }
- dims.strides[0] = 1;
- for (int i = 1; i < 4; i++)
- {
- dims.strides[i] = dims.strides[i - 1] * dims.sizes[i - 1];
- }
- return dims;
-}
-
-inline ::tflite::RuntimeShape convertShapeToTFLiteShape(const Shape &shape)
-{
- std::vector<int32_t> raw_shape;
- raw_shape.resize(4);
-
- for (uint32_t i = 0; i < 4; ++i)
- {
- if (i >= shape.dimensions.size())
- {
- raw_shape[i] = 1;
- }
- else
- {
- raw_shape[i] = shape.dimensions[i];
- }
- }
-
- return ::tflite::GetTensorShape(raw_shape);
-}
-
-inline TfLiteFusedActivation convertFusedActivation(FuseCode act)
-{
- if (act == ANEURALNETWORKS_FUSED_NONE)
- {
- return kTfLiteActNone;
- }
-
- if (act == ANEURALNETWORKS_FUSED_RELU)
- {
- return kTfLiteActRelu;
- }
-
- if (act == ANEURALNETWORKS_FUSED_RELU1)
- {
- return kTfLiteActRelu1;
- }
-
- if (act == ANEURALNETWORKS_FUSED_RELU6)
- {
- return kTfLiteActRelu6;
- }
-
- return kTfLiteActNone;
-}
-
-__wur bool QuantizeMultiplierSmallerThanOne(double double_multiplier, int32_t *quantized_multiplier,
- int32_t *right_shift);
-
-__wur bool GetQuantizedConvolutionMultipler(const Shape &inputShape, const Shape &filterShape,
- const Shape &biasShape, const Shape &outputShape,
- float *multiplier);
-__wur bool QuantizeMultiplierGreaterThanOne(double double_multiplier, int32_t *quantized_multiplier,
- int *left_shift);
-
-void CalculateActivationRangeFloat(int32_t activation, float *activation_min,
- float *activation_max);
-
-void CalculateActivationRangeUint8(int32_t activation, const Shape &outputShape, int32_t *act_min,
- int32_t *act_max);
-
-int32_t CalculateInputRadius(int input_integer_bits, int input_left_shift);
-
-Shape getShape(const ::neurun::model::operand::Object &o);
-
-uint32_t sizeOfData(OperandType type, const std::vector<uint32_t> &dimensions);
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NNFW_SUPPORT_NNAPI_OPERATION_UTILS_H__
diff --git a/runtimes/neurun/src/kernel/cpu/PermuteLayer.cc b/runtimes/neurun/src/kernel/cpu/PermuteLayer.cc
deleted file mode 100644
index ba8c5ab92..000000000
--- a/runtimes/neurun/src/kernel/cpu/PermuteLayer.cc
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "PermuteLayer.h"
-
-#include "util/feature/nhwc/Reader.h"
-#include "util/feature/nhwc/View.h"
-#include "util/feature/nchw/View.h"
-#include "util/feature/Coordinate4D.h"
-
-#include <misc/feature/IndexIterator.h>
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-using Type = model::operation::PermuteNode::Type;
-
-void PermuteLayer::configure(std::shared_ptr<::neurun::backend::operand::IObject> input,
- std::shared_ptr<::neurun::backend::operand::IObject> output,
- const model::operand::Shape &shape, Type type)
-{
- _input = input;
- _output = output;
- _shape = shape;
- _type = type;
-}
-
-void PermuteLayer::run()
-{
- auto rank = _shape.rank();
-
- switch (_type)
- {
- case Type::NHWC_TO_NCHW:
- {
- auto fn = [&](::neurun::backend::operand::ITensor &tensor) {
- auto input_tensor = _input->ptr();
-
- auto input_buffer = input_tensor->buffer();
- auto input_size = input_tensor->total_size();
-
- auto output_buffer = tensor.buffer();
- auto output_size = tensor.total_size();
- switch (rank)
- {
- case 0:
- case 1:
- {
- memcpy(output_buffer, input_buffer, input_size);
- break;
- }
- case 2:
- {
- auto matrix_shape = _shape.asMatrix();
-
- for (auto h = 0; h < matrix_shape.H; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, 0};
- memcpy(output_buffer + tensor.calcOffset(coord), input_buffer + h * matrix_shape.W,
- matrix_shape.W * sizeof(float));
- }
- break;
- }
- case 3:
- {
- const int32_t depth = _shape.dim(0);
- const int32_t height = _shape.dim(1);
- const int32_t width = _shape.dim(2);
-
- for (auto c = 0; c < depth; ++c)
- {
- for (auto h = 0; h < height; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, c};
- memcpy(output_buffer + tensor.calcOffset(coord),
- input_buffer + c * height * width + h * width, width * sizeof(float));
- }
- }
- break;
- }
- case 4:
- {
- auto feature = _shape.asFeature();
-
- const util::feature::nhwc::Reader<float> from{
- feature, reinterpret_cast<const float *>(input_buffer), input_size};
- util::feature::nchw::View<float> into{&tensor};
-
- ::nnfw::misc::feature::iterate(feature)
- << [&](uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(batch, ch, row, col);
- into.at(batch, ch, row, col) = value;
- };
- break;
- }
- default:
- throw "NYI";
- break;
- }
- };
- _output->access(fn);
- break;
- }
- case Type::NCHW_TO_NHWC:
- {
- auto fn = [&](::neurun::backend::operand::ITensor &tensor) {
- auto input_buffer = tensor.buffer();
- auto input_size = tensor.total_size();
-
- auto output_tensor = _output->ptr();
-
- auto output_buffer = output_tensor->buffer();
- auto output_size = output_tensor->total_size();
-
- switch (rank)
- {
- case 0:
- case 1:
- {
- memcpy(output_buffer, input_buffer, output_size);
- break;
- }
- case 2:
- {
- auto matrix_shape = _shape.asMatrix();
-
- for (auto h = 0; h < matrix_shape.H; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, 0};
- memcpy(output_buffer + h * matrix_shape.W, input_buffer + tensor.calcOffset(coord),
- matrix_shape.W * sizeof(float));
- }
- break;
- }
- case 3:
- {
- const int32_t depth = _shape.dim(0);
- const int32_t height = _shape.dim(1);
- const int32_t width = _shape.dim(2);
-
- for (auto c = 0; c < depth; ++c)
- {
- for (auto h = 0; h < height; ++h)
- {
- neurun::util::feature::Coordinate4D coord{0, h, 0, c};
- memcpy(output_buffer + c * height * width + h * width,
- input_buffer + tensor.calcOffset(coord), width * sizeof(float));
- }
- }
- break;
- }
- case 4:
- {
- auto feature = _shape.asFeature();
-
- const util::feature::nchw::View<float> from{&tensor};
- util::feature::nhwc::View<float> into{feature, reinterpret_cast<float *>(output_buffer),
- output_size};
-
- ::nnfw::misc::feature::iterate(feature)
- << [&](uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) {
- const auto value = from.at(batch, ch, row, col);
- into.at(batch, ch, row, col) = value;
- };
- break;
- }
- default:
- throw "NYI";
- break;
- }
- };
- _input->access(fn);
- break;
- }
- case Type::COPY:
- // If two different backends using same tensor layout, we need this.
- throw "NYI";
- break;
- }
-}
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/PermuteLayer.h b/runtimes/neurun/src/kernel/cpu/PermuteLayer.h
deleted file mode 100644
index d9e1709bc..000000000
--- a/runtimes/neurun/src/kernel/cpu/PermuteLayer.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_PERMUTE_LAYER_H__
-#define __NEURUN_KERNEL_CPU_PERMUTE_LAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "util/feature/nhwc/View.h"
-#include "OperationUtils.h"
-#include "backend/interface/operand/IObject.h"
-#include "model/operation/PermuteNode.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class PermuteLayer : public ::neurun::exec::IFunction
-{
-public:
- PermuteLayer() = default;
-
-public:
- void configure(std::shared_ptr<::neurun::backend::operand::IObject> input,
- std::shared_ptr<::neurun::backend::operand::IObject> output,
- const model::operand::Shape &shape, model::operation::PermuteNode::Type type);
- void run();
-
-private:
- std::shared_ptr<::neurun::backend::operand::IObject> _input;
- std::shared_ptr<::neurun::backend::operand::IObject> _output;
- model::operand::Shape _shape;
- model::operation::PermuteNode::Type _type;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_PERMUTE_LAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/ReshapeLayer.cc b/runtimes/neurun/src/kernel/cpu/ReshapeLayer.cc
deleted file mode 100644
index 377f783e0..000000000
--- a/runtimes/neurun/src/kernel/cpu/ReshapeLayer.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ReshapeLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "tensorflow/contrib/lite/kernels/internal/reference/reference_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-ReshapeLayer::ReshapeLayer()
- : _inputData(nullptr), _outputData(nullptr), _inputShape(), _outputShape()
-{
- // DO NOTHING
-}
-
-bool ReshapeLayer::reshapeGeneric()
-{
- size_t count = sizeOfData(_inputShape.type, _inputShape.dimensions);
- memcpy(reinterpret_cast<void *>(_outputData), reinterpret_cast<const void *>(_inputData), count);
- return true;
-}
-
-void ReshapeLayer::configure(uint8_t *inputData, const Shape &inputShape, uint8_t *outputData,
- const Shape &outputShape)
-{
- _inputData = inputData;
- _inputShape = inputShape;
- _outputData = outputData;
- _outputShape = outputShape;
-}
-
-void ReshapeLayer::run() { reshapeGeneric(); }
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/ReshapeLayer.h b/runtimes/neurun/src/kernel/cpu/ReshapeLayer.h
deleted file mode 100644
index 51d0bacee..000000000
--- a/runtimes/neurun/src/kernel/cpu/ReshapeLayer.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_RESHAPELAYER_H__
-#define __NEURUN_KERNEL_CPU_RESHAPELAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class ReshapeLayer : public ::neurun::exec::IFunction
-{
-public:
- ReshapeLayer();
-
-public:
- bool reshapeGeneric();
-
- void configure(uint8_t *inputData, const Shape &inputShape, uint8_t *outputData,
- const Shape &outputShape);
-
- void run();
-
-private:
- uint8_t *_inputData;
- uint8_t *_outputData;
-
- Shape _inputShape;
- Shape _outputShape;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_RESHAPELAYER_H__
diff --git a/runtimes/neurun/src/kernel/cpu/SoftMaxLayer.cc b/runtimes/neurun/src/kernel/cpu/SoftMaxLayer.cc
deleted file mode 100644
index c998c65f6..000000000
--- a/runtimes/neurun/src/kernel/cpu/SoftMaxLayer.cc
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SoftMaxLayer.h"
-
-#include "tensorflow/contrib/lite/kernels/internal/optimized/optimized_ops.h"
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-SoftMaxLayer::SoftMaxLayer()
- : _inputData(nullptr), _outputData(nullptr), _beta(0.0), _inputShape(), _outputShape(),
- _inputType(OperandType::SCALAR_FLOAT32)
-{
- // DO NOTHING
-}
-
-// Performs softmax along the input of size (input_size * batch_size).
-void Softmax(const float *in, const int input_size, const int batch_size, const float beta,
- float *out)
-{
- TF_LITE_ASSERT(input_size > 0);
-
- // For each batch
- for (int b = 0; b < batch_size; b++)
- {
- // Find the max coeff.
- float max_coeff = in[0];
- for (int i = 1; i < input_size; i++)
- {
- if (in[i] > max_coeff)
- max_coeff = in[i];
- }
-
- // Compute the normalized sum of exps.
- float exp_sum = 0.0;
- for (int i = 0; i < input_size; i++)
- {
- out[i] = std::exp((in[i] - max_coeff) * beta);
- exp_sum += out[i];
- }
-
- // Divide by the sum of exps.
- float reciprocal_sum_exp = 1.f / exp_sum;
- for (int i = 0; i < input_size; i++)
- {
- out[i] *= reciprocal_sum_exp;
- }
-
- // Advance in and out pointers for the next batch.
- in += input_size;
- out += input_size;
- }
-}
-
-bool SoftMaxLayer::softmaxFloat32()
-{
- Shape shapeIn4D;
-
- if (getNumberOfDimensions(_inputShape) == 2)
- {
- uint32_t batch_size = getSizeOfDimension(_inputShape, 0);
- uint32_t input_size = getNumberOfElements(_inputShape) / batch_size;
- Softmax(reinterpret_cast<const float *>(_inputData), input_size, batch_size, _beta,
- reinterpret_cast<float *>(_outputData));
- }
- else if (getNumberOfDimensions(_inputShape) == 4)
- {
- ::tflite::SoftmaxParams op_params;
- op_params.beta = _beta;
- ::tflite::optimized_ops::Softmax(op_params, convertShapeToTFLiteShape(_inputShape),
- reinterpret_cast<const float *>(_inputData),
- convertShapeToTFLiteShape(_outputShape),
- reinterpret_cast<float *>(_outputData));
- }
- else
- {
- std::cout << "only 2D and 4D tensors supported" << std::endl;
- return false;
- }
-
- return true;
-}
-
-bool SoftMaxLayer::softmaxQuant8()
-{
- Shape shapeIn4D = _inputShape;
-
- if (getNumberOfDimensions(_inputShape) == 2)
- {
- uint32_t batch_size = getSizeOfDimension(_inputShape, 0);
- uint32_t input_size = getNumberOfElements(_inputShape) / batch_size;
- shapeIn4D.dimensions = {batch_size, 1, 1, input_size};
- }
- else if (getNumberOfDimensions(_inputShape) == 4)
- {
- shapeIn4D = _inputShape;
- }
- else
- {
- std::cout << "only 2D and 4D tensors supported" << std::endl;
- return false;
- }
- if (_outputShape.offset != 0 || _outputShape.scale != 1.f / 256)
- {
- std::cout << "incorrect scale / offset for output" << std::endl;
- return false;
- }
- static const int32_t kScaledDiffIntegerBits = 5;
- const double input_beta_real_multiplier = std::min(
- 1.0 * _beta * _inputShape.scale * (1 << (31 - kScaledDiffIntegerBits)), (1ll << 31) - 1.0);
- int32_t input_multiplier = 0;
- int32_t input_left_shift = 0;
- if (!QuantizeMultiplierGreaterThanOne(input_beta_real_multiplier, &input_multiplier,
- &input_left_shift))
- {
- return false;
- }
- float diff_min = -1.0f * CalculateInputRadius(kScaledDiffIntegerBits, input_left_shift);
-
- ::tflite::SoftmaxParams op_params;
- op_params.input_multiplier = input_multiplier;
- op_params.input_left_shift = input_left_shift;
- op_params.diff_min = diff_min;
- ::tflite::optimized_ops::Softmax(op_params, convertShapeToTFLiteShape(shapeIn4D), _inputData,
- convertShapeToTFLiteShape(shapeIn4D), _outputData);
- return true;
-}
-
-void SoftMaxLayer::configure(uint8_t *inputData, const Shape &inputShape, const float beta,
- uint8_t *outputData, const Shape &outputShape)
-{
- _inputData = inputData;
- _inputShape = inputShape;
- _inputType = inputShape.type;
- _outputData = outputData;
- _outputShape = outputShape;
- _beta = beta;
-}
-
-void SoftMaxLayer::run()
-{
- if (_inputType == OperandType::TENSOR_FLOAT32)
- {
- softmaxFloat32();
- }
- else if (_inputType == OperandType::TENSOR_QUANT8_ASYMM)
- {
- throw std::runtime_error{"SoftMaxLayer : Not tested for TENSOR_QUANT8_ASYMM"};
- // softmaxQuant8();
- }
-}
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
diff --git a/runtimes/neurun/src/kernel/cpu/SoftMaxLayer.h b/runtimes/neurun/src/kernel/cpu/SoftMaxLayer.h
deleted file mode 100644
index df1aa4044..000000000
--- a/runtimes/neurun/src/kernel/cpu/SoftMaxLayer.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_KERNEL_CPU_SOFTMAXLAYER_H__
-#define __NEURUN_KERNEL_CPU_SOFTMAXLAYER_H__
-
-#include <NeuralNetworks.h>
-
-#include "exec/interface/IFunction.h"
-
-#include "kernel/cpu/OperationUtils.h"
-
-namespace neurun
-{
-namespace kernel
-{
-namespace cpu
-{
-
-class SoftMaxLayer : public ::neurun::exec::IFunction
-{
-public:
- SoftMaxLayer();
-
-public:
- bool softmaxFloat32();
-
- bool softmaxQuant8();
-
- void configure(uint8_t *inputData, const Shape &inputShape, const float beta, uint8_t *outputData,
- const Shape &outputShape);
-
- void run();
-
-private:
- uint8_t *_inputData;
- uint8_t *_outputData;
-
- float _beta;
-
- Shape _inputShape;
- Shape _outputShape;
-
- OperandType _inputType;
-};
-
-} // namespace cpu
-} // namespace kernel
-} // namespace neurun
-
-#endif // __NEURUN_KERNEL_CPU_SOFTMAXLAYER_H__
diff --git a/runtimes/neurun/src/library_info.cc b/runtimes/neurun/src/library_info.cc
deleted file mode 100644
index 4adf70465..000000000
--- a/runtimes/neurun/src/library_info.cc
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-volatile static const char info[] = "library information : runtime=neurun";
diff --git a/runtimes/neurun/src/linear/Linear.cc b/runtimes/neurun/src/linear/Linear.cc
deleted file mode 100644
index 6452bbd49..000000000
--- a/runtimes/neurun/src/linear/Linear.cc
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <algorithm>
-
-#include "Linear.h"
-
-#include "graph/Graph.h"
-
-#include "graph/operation/LowerInfo.h"
-#include "backend/interface/IStageGenerator.h"
-#include "backend/interface/IConfig.h"
-#include "compiler/SubTensorInfo.h"
-#include "compiler/TensorInfo.h"
-
-#include "util/logging.h"
-
-namespace neurun
-{
-namespace linear
-{
-
-Linear::Linear(const graph::Graph &graph) : _graph(graph)
-{
- // Linearize with topological sort
- //
- // Topological sort algorithm
- // 1. Iterate with DFS
- // 2. Append the node to vector when DFS for the node finishes(post order)
- // 3. Reverse the order of nodes
-
- graph::Graph::PostDfsConstIterator().iterate(
- graph, [&](const model::operation::Index &index, const model::operation::Node &node) {
- const auto lower_info = graph.getLowerInfo(index);
- _operations.emplace_back(&node, lower_info);
- });
-
- std::reverse(std::begin(_operations), std::end(_operations));
-}
-
-void Linear::accept(model::operation::NodeVisitor &&visitor) const
-{
- for (const auto op : _operations)
- {
- op.node->accept(std::move(visitor));
- }
-}
-
-backend::TensorBuilderSet Linear::planTensors()
-{
- using ITensorBuilderPtr = std::shared_ptr<backend::ITensorBuilder>;
- using FnOnTensorBuilder =
- std::function<void(const model::operand::Index &ind, ITensorBuilderPtr)>;
-
- const auto &operands = _graph.operands();
- auto iterTensorBuilders = [&operands](const model::operand::Index &ind, FnOnTensorBuilder fn) {
- const auto &obj = operands.at(ind);
- for (auto backend : obj.lower_info()->def_backends())
- {
- auto tensor_builder = backend->tensor_builder();
- fn(ind, tensor_builder);
- }
- };
-
- backend::TensorBuilderSet tensor_builders;
-
- std::unordered_map<model::operand::Index, uint32_t> uses_map;
- std::vector<model::operand::Index> constants;
-
- _graph.operands().iterate(
- [&](const model::operand::Index &ind, const model::operand::Object &obj) {
- uses_map[ind] = obj.getUses().size();
-
- // If a tensor is a constant, increase the use of the tensor.
- // It makes the tensor not be dealloced.
- if (obj.getUsage() == model::operand::OperandUsage::CONSTANT)
- {
- constants.push_back(ind);
- uses_map[ind]++;
- }
-
- for (auto backend : obj.lower_info()->def_backends())
- {
- bool isSubTensor = false;
- auto tensor_builder = backend->tensor_builder();
-
- if (backend->config()->SupportSubTensorAlloc())
- {
- const auto parentInfo = obj.parent_info();
- if (parentInfo != nullptr)
- {
- isSubTensor = true;
- }
- }
-
- if (isSubTensor)
- {
- const compiler::SubTensorInfo info(obj);
- tensor_builder->registerSubTensorInfo(ind, info);
- }
- else
- {
- const auto info = compiler::TensorInfo(obj.shape(), obj.typeInfo());
- tensor_builder->registerTensorInfo(ind, info);
- }
-
- // Prepare tensor builders to be returned
- tensor_builders.insert(tensor_builder);
- }
- });
-
- // If a tensor is model output, increase the use of the tensor.
- // This aim is same to above one.
- for (const auto &ind : _graph.getOutputs())
- {
- uses_map[ind]++;
- }
-
- // Allocate constant operands first
- VERBOSE(LINEAR) << "TENSORS as CONSTANT" << std::endl;
- for (const auto &ind : constants)
- {
- iterTensorBuilders(ind, [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
- tensor_builder->notifyFirstUse(ind);
- });
- }
-
- // Allocate Model's inputs
- VERBOSE(LINEAR) << "TENSORS as MODEL INPUT" << std::endl;
- for (const auto &ind : _graph.getInputs())
- {
- iterTensorBuilders(ind, [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
- tensor_builder->notifyFirstUse(ind);
- });
- }
-
- // At each operation,
- // 1. Scan USE of inputs. Decrease the USE and deallocate if the USE is 0
- // 2. Scan DEF of outputs. If the DEF, allocate it
- VERBOSE(LINEAR) << "TENSORS" << std::endl;
- for (const auto op : _operations)
- {
- for (const auto &ind : op.node->getOutputs())
- {
- const auto &obj = operands.at(ind);
- if (obj.getDef().size())
- {
- iterTensorBuilders(ind,
- [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
- tensor_builder->notifyFirstUse(ind);
- });
- }
- }
-
- for (const auto &ind : op.node->getInputs())
- {
- uses_map[ind]--;
- if (uses_map[ind] == 0)
- {
- iterTensorBuilders(ind,
- [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
- tensor_builder->notifyLastUse(ind);
- });
- }
- }
- }
-
- // Now, model outputs should be not deallocated
- assert(std::all_of(_graph.getOutputs().begin(), _graph.getOutputs().end(),
- [&uses_map](const model::operand::Index &ind) { return uses_map[ind] > 0; }));
-
- // Set subtensor information
- // Todo: move this phase outside as optimization phase
- return tensor_builders;
-}
-
-void Linear::iterate(const std::function<void(const Element &element)> &fn) const
-{
- for (const auto op : _operations)
- {
- fn(op);
- }
-}
-
-} // namespace linear
-} // namespace neurun
diff --git a/runtimes/neurun/src/linear/Linear.h b/runtimes/neurun/src/linear/Linear.h
deleted file mode 100644
index fb3f539d4..000000000
--- a/runtimes/neurun/src/linear/Linear.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_LINEAR_LINEAR_H__
-#define __NEURUN_LINEAR_LINEAR_H__
-
-#include <vector>
-
-#include "model/operation/Node.h"
-#include "backend/interface/ITensorBuilder.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operation
-{
-struct NodeVisitor;
-} // namespace operation
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace graph
-{
-class Graph;
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace linear
-{
-
-struct Element
-{
- const model::operation::Node *node;
- const graph::operation::LowerInfo *lower_info;
-
- Element(const model::operation::Node *node, const graph::operation::LowerInfo *lower_info)
- : node{node}, lower_info{lower_info}
- {
- }
-};
-
-class Linear
-{
-public:
- Linear(const graph::Graph &graph);
-
-public:
- Linear(const Linear &linear) = delete;
-
-public:
- void accept(model::operation::NodeVisitor &&visitor) const;
-
- // TODO Should not return TensorBuilderSet
- backend::TensorBuilderSet planTensors();
-
- void iterate(const std::function<void(const Element &element)> &fn) const;
-
-private:
- const graph::Graph &_graph;
- std::vector<Element> _operations;
-};
-
-} // namespace linear
-} // namespace neurun
-
-#endif // __NEURUN_LINEAR_LINEAR_H__
diff --git a/runtimes/neurun/src/model/operand/Data.h b/runtimes/neurun/src/model/operand/Data.h
deleted file mode 100644
index 506cb185a..000000000
--- a/runtimes/neurun/src/model/operand/Data.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_DATA_H__
-#define __NEURUN_MODEL_OPERAND_DATA_H__
-
-#include <algorithm>
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-struct Data
-{
- virtual ~Data() = default;
-
- virtual size_t size(void) const = 0;
- virtual const uint8_t *base(void) const = 0;
-};
-
-class CachedData final : public Data
-{
-public:
- CachedData(const uint8_t *base, size_t size) : _base{new uint8_t[size]}, _size{size}
- {
- std::copy(base, base + size, _base);
- }
-
-public:
- ~CachedData() { delete[] _base; }
-
-public:
- size_t size(void) const override { return _size; }
- const uint8_t *base(void) const override { return _base; }
-
-private:
- uint8_t *_base;
- size_t _size;
-};
-
-class ExternalData final : public Data
-{
-public:
- ExternalData(const uint8_t *base, size_t size) : _base{base}, _size{size}
- {
- // DO NOTHING
- }
-
-public:
- size_t size(void) const override { return _size; }
- const uint8_t *base(void) const override { return _base; }
-
-private:
- const uint8_t *_base;
- const size_t _size;
-};
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_DATA_H__
diff --git a/runtimes/neurun/src/model/operand/DataType.h b/runtimes/neurun/src/model/operand/DataType.h
deleted file mode 100644
index d75a0dbf1..000000000
--- a/runtimes/neurun/src/model/operand/DataType.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_DATATYPE_H__
-#define __NEURUN_MODEL_OPERAND_DATATYPE_H__
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-enum class DataType
-{
- SCALAR_FLOAT32 = 0,
- SCALAR_INT32 = 1,
- SCALAR_UINT32 = 2,
-
- TENSOR_FLOAT32 = 3,
- TENSOR_INT32 = 4,
-
- TENSOR_QUANT8_ASYMM = 5,
-};
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_DATATYPE_H__
diff --git a/runtimes/neurun/src/model/operand/Index.h b/runtimes/neurun/src/model/operand/Index.h
deleted file mode 100644
index 1c84ba451..000000000
--- a/runtimes/neurun/src/model/operand/Index.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_INDEX_H__
-#define __NEURUN_MODEL_OPERAND_INDEX_H__
-
-#include "graph/Index.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-using Index = ::neurun::graph::Index<uint32_t, struct IndexTag>;
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-namespace IO
-{
-
-using Index = ::neurun::graph::Index<uint32_t, struct IndexTag>;
-
-} // namespace IO
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_INDEX_H__
diff --git a/runtimes/neurun/src/model/operand/IndexSet.cc b/runtimes/neurun/src/model/operand/IndexSet.cc
deleted file mode 100644
index b83d314e4..000000000
--- a/runtimes/neurun/src/model/operand/IndexSet.cc
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "IndexSet.h"
-
-#include <algorithm>
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-IndexSet::IndexSet(std::initializer_list<Index> list) : _set(list)
-{
- // DO NOTHING
-}
-
-IndexSet::IndexSet(std::initializer_list<int32_t> list)
-{
- for (auto val : list)
- {
- _set.emplace_back(static_cast<uint32_t>(val));
- }
-}
-
-IndexSet::IndexSet(std::initializer_list<uint32_t> list)
-{
- for (auto val : list)
- {
- _set.emplace_back(val);
- }
-}
-
-bool IndexSet::contains(const Index &index) const
-{
- return std::find(_set.begin(), _set.end(), index) != _set.end();
-}
-
-void IndexSet::replace(const Index &from, const Index &to)
-{
- std::replace(_set.begin(), _set.end(), from, to);
-}
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operand/IndexSet.h b/runtimes/neurun/src/model/operand/IndexSet.h
deleted file mode 100644
index e8827de9c..000000000
--- a/runtimes/neurun/src/model/operand/IndexSet.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_INDEX_SET_H__
-#define __NEURUN_MODEL_OPERAND_INDEX_SET_H__
-
-#include <initializer_list>
-#include <vector>
-
-#include "Index.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-class IndexSet
-{
-public:
- IndexSet(void) = default;
- IndexSet(std::initializer_list<Index> list);
- IndexSet(std::initializer_list<int32_t> list);
- IndexSet(std::initializer_list<uint32_t> list);
-
-public:
- void append(const Index &index) { _set.emplace_back(index); }
-
-public:
- uint32_t size() const { return static_cast<uint32_t>(_set.size()); }
- const Index &at(IO::Index set_index) const { return _set.at(set_index.asInt()); }
- const Index &at(uint32_t index) const { return _set.at(index); }
- bool contains(const Index &index) const;
- void replace(const Index &from, const Index &to);
-
-public:
- std::vector<Index>::const_iterator begin(void) const { return _set.begin(); }
- std::vector<Index>::const_iterator end(void) const { return _set.end(); }
-
-private:
- std::vector<Index> _set;
-};
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_INDEX_SET_H__
diff --git a/runtimes/neurun/src/model/operand/Object.cc b/runtimes/neurun/src/model/operand/Object.cc
deleted file mode 100644
index 63cf29bd3..000000000
--- a/runtimes/neurun/src/model/operand/Object.cc
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Object.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-size_t Object::operandSize(void) const
-{
- const uint32_t ranks = _shape.rank();
- int32_t elements = 1;
-
- for (uint32_t rank = 0; rank < ranks; rank++)
- {
- elements *= _shape.dim(rank);
- }
-
- DataType type = _type.type();
- size_t element_size = 0;
-
- // Value of type is matched with OperandCode enum in NeuralNetworks.h
- switch (type)
- {
- case DataType::SCALAR_FLOAT32:
- case DataType::TENSOR_FLOAT32:
- element_size = sizeof(float);
- break;
- case DataType::SCALAR_INT32:
- case DataType::TENSOR_INT32:
- element_size = sizeof(int32_t);
- break;
- case DataType::SCALAR_UINT32:
- element_size = sizeof(uint32_t);
- break;
- case DataType::TENSOR_QUANT8_ASYMM:
- element_size = sizeof(uint8_t);
- break;
- default:
- throw std::runtime_error{"Unsuppported type size"};
- }
-
- return element_size * elements;
-}
-
-bool Object::setUsage(const OperandUsage usage)
-{
- if (usageIsDefined() && (_usage != usage))
- {
- // Already set as different type
- return false;
- }
-
- _usage = usage;
-
- return true;
-}
-
-void Object::appendUse(const ::neurun::model::operation::Index &idx)
-{
- assert(_usage != OperandUsage::NOT_DEFINED);
- assert(!_uses.contains(idx));
-
- _uses.append(idx);
-}
-
-void Object::removeUse(const ::neurun::model::operation::Index &idx)
-{
- assert(_usage != OperandUsage::NOT_DEFINED);
- assert(_uses.contains(idx));
-
- _uses.remove(idx);
-}
-
-void Object::appendDef(const ::neurun::model::operation::Index &idx)
-{
- assert(_usage != OperandUsage::NOT_DEFINED && _usage != OperandUsage::CONSTANT);
- assert(_def.size() == 0);
-
- _def.append(idx);
-}
-
-void Object::removeDef(const ::neurun::model::operation::Index &idx)
-{
- assert(_usage != OperandUsage::NOT_DEFINED);
- assert(_def.contains(idx));
-
- _def.remove(idx);
-}
-
-void Object::lower_info(std::unique_ptr<graph::operand::LowerInfo> &&lower_info)
-{
- _lower_info = std::move(lower_info);
-}
-
-const graph::operand::LowerInfo *Object::lower_info() const { return _lower_info.get(); }
-
-graph::operand::LowerInfo *Object::lower_info() { return _lower_info.get(); }
-
-void Object::parent_info(std::unique_ptr<graph::operand::ParentInfo> &&parent_info)
-{
- _parent_info = std::move(parent_info);
-}
-
-const graph::operand::ParentInfo *Object::parent_info() const { return _parent_info.get(); }
-
-graph::operand::ParentInfo *Object::parent_info() { return _parent_info.get(); }
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operand/Object.h b/runtimes/neurun/src/model/operand/Object.h
deleted file mode 100644
index eb5f6275e..000000000
--- a/runtimes/neurun/src/model/operand/Object.h
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_OBJECT_H__
-#define __NEURUN_MODEL_OPERAND_OBJECT_H__
-
-#include <cassert>
-#include <cstdint>
-#include <memory>
-#include <algorithm>
-
-#include "Shape.h"
-#include "Data.h"
-#include "TypeInfo.h"
-#include "graph/operand/LowerInfo.h" // TODO Remove this dependency
-#include "graph/operand/ParentInfo.h" // TODO Remove this dependency
-#include "model/operation/IndexList.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-// Operand usage should be exact one of these
-enum class OperandUsage
-{
- NOT_DEFINED,
- MODEL_INPUT,
- CONSTANT,
- OPERATION_OUTPUT,
-};
-
-class Object
-{
-public:
- explicit Object(const Shape &shape, const TypeInfo &type)
- : _shape{shape}, _type{type}, _usage{OperandUsage::NOT_DEFINED}
- {
- // DO NOTHING
- }
-
-public:
- const Shape &shape(void) const { return _shape; }
- const TypeInfo &typeInfo(void) const { return _type; }
- size_t operandSize(void) const;
- bool setAsConstant() { return setUsage(OperandUsage::CONSTANT); }
- bool setAsModelInput() { return setUsage(OperandUsage::MODEL_INPUT); }
- bool setAsOperationOutput() { return setUsage(OperandUsage::OPERATION_OUTPUT); }
- bool usageIsDefined(void) const { return _usage != OperandUsage::NOT_DEFINED; }
- bool isModelInput(void) const { return _usage == OperandUsage::MODEL_INPUT; }
- OperandUsage getUsage() const { return _usage; }
-
- const operation::IndexList &getUses() const { return _uses; }
- const operation::IndexList &getDef() const { return _def; }
- void appendUse(const operation::Index &idx);
- void removeUse(const operation::Index &idx);
- void appendDef(const operation::Index &idx);
- void removeDef(const operation::Index &idx);
-
-private:
- bool setUsage(OperandUsage usage);
-
-public:
- void data(std::unique_ptr<Data> &&data) { _data = std::move(data); }
- const Data &data(void) const { return *_data; }
-
-public:
- template <typename T, typename... Args> void data(Args &&... args)
- {
- data(std::unique_ptr<T>(new T{std::forward<Args>(args)...}));
- }
-
-public:
- template <typename T> T asScalar(void) const
- {
- assert((_shape.rank() == 0) || ((_shape.rank() == 1) && (_shape.dim(0) == 1)));
- assert(_data != nullptr);
- assert((_data->base() != nullptr) && (_data->size() == sizeof(T)));
-
- return *(reinterpret_cast<const T *>(_data->base()));
- }
-
-public:
- void lower_info(std::unique_ptr<graph::operand::LowerInfo> &&lower_info);
- const graph::operand::LowerInfo *lower_info() const;
- graph::operand::LowerInfo *lower_info();
- /**
- * @brief Set parent information
- * @param[in] parent_info Parent information
- */
- void parent_info(std::unique_ptr<graph::operand::ParentInfo> &&parent_info);
- /**
- * @brief Return parent information pointer as constant
- * @return Parent information pointer
- */
- const graph::operand::ParentInfo *parent_info() const;
- /**
- * @brief Return parent information pointer
- * @return Perent information pointer
- */
- graph::operand::ParentInfo *parent_info();
-
-private:
- const Shape _shape;
- const TypeInfo _type;
- std::unique_ptr<Data> _data;
- OperandUsage _usage;
-
- operation::IndexList _uses;
- operation::IndexList _def; // size is 0 (constant) or 1 (from def operation)
-
- std::unique_ptr<graph::operand::LowerInfo> _lower_info;
- std::unique_ptr<graph::operand::ParentInfo> _parent_info;
-};
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_OBJECT_H__
diff --git a/runtimes/neurun/src/model/operand/Set.cc b/runtimes/neurun/src/model/operand/Set.cc
deleted file mode 100644
index d93c21514..000000000
--- a/runtimes/neurun/src/model/operand/Set.cc
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Set.h"
-
-#include "cpp14/memory.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-const Index Set::generateIndex()
-{
- assert((_index_count) <= 0x7fffffff);
-
- return Index{_index_count++};
-}
-
-Index Set::append(const Shape &shape, const TypeInfo &type)
-{
- auto index = generateIndex();
-
- _objects[index] = nnfw::cpp14::make_unique<Object>(shape, type);
-
- return index;
-}
-
-const Object &Set::at(const Index &index) const { return *(_objects.at(index)); }
-
-Object &Set::at(const Index &index) { return *(_objects.at(index)); }
-
-bool Set::exist(const Index &index) const { return index.value() < _objects.size(); }
-
-void Set::iterate(const std::function<void(const Index &, const Object &)> &fn) const
-{
- for (const auto &e : _objects)
- {
- fn(e.first, *e.second);
- }
-}
-
-void Set::iterate(const std::function<void(const Index &, Object &)> &fn)
-{
- // TODO Remove this workaround
- // This implementation is a workaround in case of adding operands while iteration
- //
- // // Original Implementation (We probably should be back to this)
- // for (auto &e : _objects)
- // {
- // fn(e.first, *e.second);
- // }
-
- std::list<Index> l;
-
- for (auto &e : _objects)
- {
- l.push_back(e.first);
- }
-
- for (auto index : l)
- {
- fn(index, *_objects[index]);
- }
-}
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operand/Set.h b/runtimes/neurun/src/model/operand/Set.h
deleted file mode 100644
index 9dff7ec3c..000000000
--- a/runtimes/neurun/src/model/operand/Set.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_SET_H__
-#define __NEURUN_MODEL_OPERAND_SET_H__
-
-#include <memory>
-#include <unordered_map>
-
-#include "Object.h"
-#include "Index.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-class Set
-{
-public:
- Set() : _index_count(0) {}
-
-public:
- Index append(const Shape &, const TypeInfo &);
- void remove(const Index &index) { _objects.erase(index); };
-
-public:
- const Object &at(const Index &) const;
- Object &at(const Index &);
- bool exist(const Index &) const;
- void iterate(const std::function<void(const Index &, const Object &)> &fn) const;
- void iterate(const std::function<void(const Index &, Object &)> &fn);
-
-private:
- const Index generateIndex();
-
-private:
- std::unordered_map<Index, std::unique_ptr<Object>> _objects;
- uint32_t _index_count;
-};
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_SET_H__
diff --git a/runtimes/neurun/src/model/operand/Shape.cc b/runtimes/neurun/src/model/operand/Shape.cc
deleted file mode 100644
index f74c48d88..000000000
--- a/runtimes/neurun/src/model/operand/Shape.cc
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <cassert>
-
-#include "Shape.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-Shape::Shape(uint32_t rank) { _dims.resize(rank); }
-
-int32_t Shape::asVector(void) const
-{
- assert(rank() == 1);
-
- return dim(0);
-}
-
-nnfw::misc::matrix::Shape Shape::asMatrix(void) const
-{
- assert(rank() == 2);
-
- const auto height = dim(0);
- const auto width = dim(1);
-
- return nnfw::misc::matrix::Shape(height, width);
-}
-
-nnfw::misc::feature::Shape Shape::asFeature(void) const
-{
- assert(rank() == 4);
-
- // Feature Map in NNAPI
- // - Dimension(0) -> Batch
- // - Dimension(1) -> Height
- // - Dimension(2) -> Width
- // - Dimension(3) -> Depth
- const auto batch = dim(0);
- const auto depth = dim(3);
- const auto height = dim(1);
- const auto width = dim(2);
-
- return nnfw::misc::feature::Shape(batch, depth, height, width);
-}
-
-nnfw::misc::kernel::Shape Shape::asKernel(void) const
-{
- assert(rank() == 4);
-
- // Convolution Kernel in NNAPI
- // - Dimension(0) -> Count
- // - Dimension(1) -> Height
- // - Dimension(2) -> Width
- // - Dimension(3) -> Depth
- const auto count = dim(0);
- const auto depth = dim(3);
- const auto height = dim(1);
- const auto width = dim(2);
-
- return nnfw::misc::kernel::Shape(count, depth, height, width);
-}
-
-nnfw::misc::tensor::Shape Shape::asTensor(void) const
-{
- nnfw::misc::tensor::Shape shape{};
- for (uint32_t i = 0; i < rank(); ++i)
- {
- shape.append(dim(i));
- }
-
- return shape; // this shape represents shape of NNAPI
-}
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operand/Shape.h b/runtimes/neurun/src/model/operand/Shape.h
deleted file mode 100644
index b80f647d5..000000000
--- a/runtimes/neurun/src/model/operand/Shape.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_SHAPE_H__
-#define __NEURUN_MODEL_OPERAND_SHAPE_H__
-
-#include <vector>
-#include <cstdint>
-
-#include "misc/feature/Shape.h"
-#include "misc/kernel/Shape.h"
-#include "misc/matrix/Shape.h"
-#include "misc/tensor/Shape.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-struct Shape
-{
-public:
- Shape(uint32_t rank = 0);
-
-public:
- uint32_t rank(void) const { return _dims.size(); }
-
-public:
- int32_t dim(uint32_t n) const { return _dims.at(n); }
- int32_t &dim(uint32_t n) { return _dims.at(n); }
- const std::vector<int32_t> &dims() const { return _dims; }
-
-public:
- int32_t asVector(void) const;
- nnfw::misc::matrix::Shape asMatrix(void) const;
- nnfw::misc::feature::Shape asFeature(void) const;
- nnfw::misc::kernel::Shape asKernel(void) const;
- nnfw::misc::tensor::Shape asTensor(void) const;
-
-private:
- std::vector<int32_t> _dims;
-};
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_SHAPE_H__
diff --git a/runtimes/neurun/src/model/operand/TypeInfo.cc b/runtimes/neurun/src/model/operand/TypeInfo.cc
deleted file mode 100644
index 0b9f63c93..000000000
--- a/runtimes/neurun/src/model/operand/TypeInfo.cc
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "TypeInfo.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-DataType TypeInfo::typeFromOperandCode(OperandCode type)
-{
- // Now neurun::model::operand::DataType share same enum value with OperandCode
- // in NeuralNetworks.h.
- return static_cast<DataType>(static_cast<uint32_t>(type));
-}
-
-} // namespace operand
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operand/TypeInfo.h b/runtimes/neurun/src/model/operand/TypeInfo.h
deleted file mode 100644
index d16172a09..000000000
--- a/runtimes/neurun/src/model/operand/TypeInfo.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERAND_TYPEINFO_H__
-#define __NEURUN_MODEL_OPERAND_TYPEINFO_H__
-
-#include <cstdint>
-
-#include <NeuralNetworks.h>
-
-#include "DataType.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operand
-{
-
-class TypeInfo
-{
-public:
- TypeInfo() = default;
-
- TypeInfo(OperandCode type, float scale, int32_t offset)
- : _type(typeFromOperandCode(type)), _scale(scale), _offset(offset)
- {
- // DO NOTHING
- }
-
-public:
- DataType type() const { return _type; }
- float scale() const { return _scale; }
- int32_t offset() const { return _offset; }
-
-private:
- // Now neurun::model::operand::DataType share same enum value with OperandCode
- // in NeuralNetworks.h.
- // If we don't share same value, we must fix this mapping function.
- DataType typeFromOperandCode(OperandCode type);
-
-private:
- DataType _type;
- float _scale;
- int32_t _offset;
-};
-} // namespace operand
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERAND_TYPEINFO_H__
diff --git a/runtimes/neurun/src/model/operation/AddNode.cc b/runtimes/neurun/src/model/operation/AddNode.cc
deleted file mode 100644
index 0c9d4e09b..000000000
--- a/runtimes/neurun/src/model/operation/AddNode.cc
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "AddNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void AddNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-AddNode::AddNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(2u)}
-{
- assert(init_param.input_count == 2);
- assert(init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> Lefthand side operand
- // 1 -> Righthand side operand
-
- setInputs({init_param.inputs[0], init_param.inputs[1]});
- setOutputs({init_param.outputs[0]});
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/AddNode.h b/runtimes/neurun/src/model/operation/AddNode.h
deleted file mode 100644
index 533fb0ab3..000000000
--- a/runtimes/neurun/src/model/operation/AddNode.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_ADD_NODE_H__
-#define __NEURUN_MODEL_OPERATION_ADD_NODE_H__
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class AddNode : public model::operation::Node
-{
-public:
- AddNode(const model::operation::Node::InitParam &init_param);
-
- enum Input
- {
- LHS = 0,
- RHS
- };
-
- struct Param
- {
- operand::Index activation_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "Add"; }
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_ADD_H__
diff --git a/runtimes/neurun/src/model/operation/AvgPool2DNode.cc b/runtimes/neurun/src/model/operation/AvgPool2DNode.cc
deleted file mode 100644
index 8c688e60a..000000000
--- a/runtimes/neurun/src/model/operation/AvgPool2DNode.cc
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "AvgPool2DNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void AvgPool2DNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-AvgPool2DNode::AvgPool2DNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(1u)}
-{
- assert(init_param.input_count == 7);
- assert(init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> IFM Tensor Index
- // 1 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
- // 2 -> Horizontal (over width) Stride Index
- // 3 -> Vertial (over height) Stride Index
- // 4 -> Filter Width Index
- // 5 -> Filter Height Index
- // 6 -> FuseCode (activation) Index
-
- setInputs({init_param.inputs[0]});
- setOutputs({init_param.outputs[0]});
-
- _param.padding_index = operand::Index{init_param.inputs[1]};
- _param.hstride_index = operand::Index{init_param.inputs[2]};
- _param.vstride_index = operand::Index{init_param.inputs[3]};
-
- _param.kw_index = operand::Index{init_param.inputs[4]};
- _param.kh_index = operand::Index{init_param.inputs[5]};
- _param.activation_index = operand::Index{init_param.inputs[6]};
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/AvgPool2DNode.h b/runtimes/neurun/src/model/operation/AvgPool2DNode.h
deleted file mode 100644
index e66e6146e..000000000
--- a/runtimes/neurun/src/model/operation/AvgPool2DNode.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_AVGPOOL2D_NODE_H__
-#define __NEURUN_MODEL_OPERATION_AVGPOOL2D_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class AvgPool2DNode : public model::operation::Node
-{
-public:
- AvgPool2DNode(const model::operation::Node::InitParam &init_param);
-
- enum Input
- {
- INPUT = 0
- };
-
- struct Param
- {
- operand::Index kw_index;
- operand::Index kh_index;
-
- operand::Index hstride_index;
- operand::Index vstride_index;
-
- operand::Index padding_index;
- operand::Index activation_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "AvgPool2D"; }
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_AVGPOOL2D_H__
diff --git a/runtimes/neurun/src/model/operation/ConcatNode.cc b/runtimes/neurun/src/model/operation/ConcatNode.cc
deleted file mode 100644
index 23cfef294..000000000
--- a/runtimes/neurun/src/model/operation/ConcatNode.cc
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ConcatNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void ConcatNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-ConcatNode::ConcatNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createAtLeast(2u)}
-{
- assert(init_param.input_count >= 2); // At least one one input tensor and axis
- assert(init_param.output_count == 1);
-
- // When there are N + 1 inputs, each input should be interpreted as follows:
- //
- // [0, N) -> Input tensors
- // N -> Axis
- //
-
- {
- operand::IndexSet inds;
- for (uint32_t n = 0; n < init_param.input_count - 1; ++n)
- {
- inds.append(operand::Index{init_param.inputs[n]});
- }
- setInputs(inds);
- }
- setOutputs({init_param.outputs[0]});
-
- _param.axis_index = operand::Index{init_param.inputs[init_param.input_count - 1]};
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/ConcatNode.h b/runtimes/neurun/src/model/operation/ConcatNode.h
deleted file mode 100644
index b69ee2f23..000000000
--- a/runtimes/neurun/src/model/operation/ConcatNode.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_CONCAT_NODE_H__
-#define __NEURUN_MODEL_OPERATION_CONCAT_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class ConcatNode : public model::operation::Node
-{
-public:
- ConcatNode(const model::operation::Node::InitParam &init_param);
-
- struct Param
- {
- operand::Index axis_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "Concat"; }
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_CONCAT_H__
diff --git a/runtimes/neurun/src/model/operation/Conv2DNode.cc b/runtimes/neurun/src/model/operation/Conv2DNode.cc
deleted file mode 100644
index 7eb2b183d..000000000
--- a/runtimes/neurun/src/model/operation/Conv2DNode.cc
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Conv2DNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void Conv2DNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-Conv2DNode::Conv2DNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(3u)}
-{
- assert(init_param.input_count == 7 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- //
- // 0 -> IFM Tensor Index
- // 1 -> Kernel Tensor Index
- // 2 -> Bias Tensor Index
- // 3 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
- // 4 -> Stride (width) Index
- // 5 -> Stride (height) INdex
- // 6 -> Activation Index
-
- setInputs({init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]});
- setOutputs({init_param.outputs[0]});
-
- _param.padding_index = operand::Index{init_param.inputs[3]};
- _param.hstride_index = operand::Index{init_param.inputs[4]};
- _param.vstride_index = operand::Index{init_param.inputs[5]};
- _param.activation_index = operand::Index{init_param.inputs[6]};
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/Conv2DNode.h b/runtimes/neurun/src/model/operation/Conv2DNode.h
deleted file mode 100644
index 34a95f0d9..000000000
--- a/runtimes/neurun/src/model/operation/Conv2DNode.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_CONV2D_NODE_H__
-#define __NEURUN_MODEL_OPERATION_CONV2D_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class Conv2DNode : public model::operation::Node
-{
-public:
- Conv2DNode(const model::operation::Node::InitParam &);
-
- enum Input
- {
- INPUT = 0,
- KERNEL,
- BIAS
- };
-
- struct Param
- {
- operand::Index hstride_index;
- operand::Index vstride_index;
-
- operand::Index padding_index;
- operand::Index activation_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "Conv2D"; }
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_CONV2D_NODE_H__
diff --git a/runtimes/neurun/src/model/operation/FullyConnectedNode.cc b/runtimes/neurun/src/model/operation/FullyConnectedNode.cc
deleted file mode 100644
index 0fde5182d..000000000
--- a/runtimes/neurun/src/model/operation/FullyConnectedNode.cc
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "FullyConnectedNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void FullyConnectedNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-FullyConnectedNode::FullyConnectedNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(3u)}
-{
- assert(init_param.input_count == 4 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> A tensor, specifying the input.
- // 1 -> A 2-D tensor, specifying the weights
- // 2 -> A 1-D tensor, specifying the bias
- // 3 -> An INT32 value, and has to be one of the FuseCode values
-
- setInputs({init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]});
- setOutputs({init_param.outputs[0]});
-
- _param.activation_index = operand::Index{init_param.inputs[3]};
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/FullyConnectedNode.h b/runtimes/neurun/src/model/operation/FullyConnectedNode.h
deleted file mode 100644
index 9820ddc8c..000000000
--- a/runtimes/neurun/src/model/operation/FullyConnectedNode.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_FULLYCONNECTED_NODE_H__
-#define __NEURUN_MODEL_OPERATION_FULLYCONNECTED_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class FullyConnectedNode : public model::operation::Node
-{
-public:
- FullyConnectedNode(const model::operation::Node::InitParam &init_param);
-
- enum Input
- {
- INPUT = 0,
- WEIGHT,
- BIAS
- };
-
- struct Param
- {
- operand::Index activation_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "FullyConnected"; }
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_FULLYCONNECTED_NODE_H__
diff --git a/runtimes/neurun/src/model/operation/Index.h b/runtimes/neurun/src/model/operation/Index.h
deleted file mode 100644
index e03dd74d6..000000000
--- a/runtimes/neurun/src/model/operation/Index.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_INDEX_H__
-#define __NEURUN_MODEL_OPERATION_INDEX_H__
-
-#include "graph/Index.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-using Index = ::neurun::graph::Index<uint32_t, struct IndexTag>;
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_INDEX_H__
diff --git a/runtimes/neurun/src/model/operation/IndexList.cc b/runtimes/neurun/src/model/operation/IndexList.cc
deleted file mode 100644
index e46987036..000000000
--- a/runtimes/neurun/src/model/operation/IndexList.cc
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "IndexList.h"
-
-#include <algorithm>
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-IndexList::IndexList(std::initializer_list<Index> list) : _list(list)
-{
- // DO NOTHING
-}
-
-bool IndexList::contains(const ::neurun::model::operation::Index &index) const
-{
- return std::find(_list.begin(), _list.end(), index) != _list.end();
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/IndexList.h b/runtimes/neurun/src/model/operation/IndexList.h
deleted file mode 100644
index c0af29829..000000000
--- a/runtimes/neurun/src/model/operation/IndexList.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_INDEX_LIST_H__
-#define __NEURUN_MODEL_OPERATION_INDEX_LIST_H__
-
-#include <initializer_list>
-#include <list>
-
-#include "Index.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class IndexList
-{
-public:
- IndexList(void) = default;
- IndexList(std::initializer_list<Index> list);
-
-public:
- void append(const Index &index) { _list.push_back(index); }
- void remove(const Index &index) { _list.remove(index); }
-
-public:
- uint32_t size() const { return static_cast<uint32_t>(_list.size()); }
- const std::list<Index> &list() const { return _list; }
- bool contains(const Index &index) const;
-
-private:
- std::list<Index> _list;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_INDEX_LIST_H__
diff --git a/runtimes/neurun/src/model/operation/MaxPool2DNode.cc b/runtimes/neurun/src/model/operation/MaxPool2DNode.cc
deleted file mode 100644
index 3d3686b0e..000000000
--- a/runtimes/neurun/src/model/operation/MaxPool2DNode.cc
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "MaxPool2DNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void MaxPool2DNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-MaxPool2DNode::MaxPool2DNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(1u)}
-{
- assert(init_param.input_count == 7);
- assert(init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> IFM Tensor Index
- // 1 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
- // 2 -> Horizontal (over width) Stride Index
- // 3 -> Vertial (over height) Stride Index
- // 4 -> Filter Width Index
- // 5 -> Filter Height Index
- // 6 -> FuseCode (activation) Index
-
- setInputs({init_param.inputs[0]});
- setOutputs({init_param.outputs[0]});
-
- _param.padding_index = operand::Index{init_param.inputs[1]};
- _param.hstride_index = operand::Index{init_param.inputs[2]};
- _param.vstride_index = operand::Index{init_param.inputs[3]};
-
- _param.kw_index = operand::Index{init_param.inputs[4]};
- _param.kh_index = operand::Index{init_param.inputs[5]};
- _param.activation_index = operand::Index{init_param.inputs[6]};
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/MaxPool2DNode.h b/runtimes/neurun/src/model/operation/MaxPool2DNode.h
deleted file mode 100644
index 96d1210a7..000000000
--- a/runtimes/neurun/src/model/operation/MaxPool2DNode.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_MAXPOOL2D_NODE_H__
-#define __NEURUN_MODEL_OPERATION_MAXPOOL2D_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class MaxPool2DNode : public model::operation::Node
-{
-public:
- MaxPool2DNode(const model::operation::Node::InitParam &init_param);
-
- enum Input
- {
- INPUT = 0
- };
-
- struct Param
- {
- operand::Index kw_index;
- operand::Index kh_index;
-
- operand::Index hstride_index;
- operand::Index vstride_index;
-
- operand::Index padding_index;
- operand::Index activation_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "MaxPool2D"; }
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_MAXPOOL2D_NODE_H__
diff --git a/runtimes/neurun/src/model/operation/Node.Include.h b/runtimes/neurun/src/model/operation/Node.Include.h
deleted file mode 100644
index 95e78c7b5..000000000
--- a/runtimes/neurun/src/model/operation/Node.Include.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// This file has no ifdef guard intentionally
-
-#include "Conv2DNode.h"
-#include "MaxPool2DNode.h"
-#include "AvgPool2DNode.h"
-#include "ConcatNode.h"
-#include "ReshapeNode.h"
-#include "FullyConnectedNode.h"
-#include "SoftmaxNode.h"
-#include "PermuteNode.h"
-#include "AddNode.h"
diff --git a/runtimes/neurun/src/model/operation/Node.cc b/runtimes/neurun/src/model/operation/Node.cc
deleted file mode 100644
index 76397afde..000000000
--- a/runtimes/neurun/src/model/operation/Node.cc
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Node.h"
-
-#include <cassert>
-
-#include "graph/operation/LowerInfo.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-Node::Node(OperandConstraint input_constr) : _input_constr{input_constr} {}
-
-Node::~Node() = default;
-
-void Node::setInputs(const operand::IndexSet &indexes)
-{
- assert(_input_constr.check(indexes.size()));
- _inputs = indexes;
-}
-
-void Node::setOutputs(const operand::IndexSet &indexes) { _outputs = indexes; }
-
-void Node::replaceInput(const operand::Index &from, const operand::Index &to)
-{
- _inputs.replace(from, to);
-}
-
-void Node::replaceOutput(const operand::Index &from, const operand::Index &to)
-{
- _outputs.replace(from, to);
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/Node.h b/runtimes/neurun/src/model/operation/Node.h
deleted file mode 100644
index 76f0d2d00..000000000
--- a/runtimes/neurun/src/model/operation/Node.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_NODE_H__
-#define __NEURUN_MODEL_OPERATION_NODE_H__
-
-#include <memory>
-
-#include "model/operand/Object.h"
-#include "model/operand/IndexSet.h"
-#include "OperandConstraint.h"
-
-namespace neurun
-{
-namespace graph
-{
-namespace operation
-{
-class LowerInfo;
-} // namespace operation
-} // namespace graph
-} // namespace neurun
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-struct NodeVisitor;
-
-class Node
-{
-public:
- struct InitParam
- {
- uint32_t input_count;
- const uint32_t *inputs;
- uint32_t output_count;
- const uint32_t *outputs;
- };
-
-public:
- Node(OperandConstraint input_constr);
- virtual ~Node();
-
-public:
- virtual void accept(NodeVisitor &&) const = 0;
- virtual std::string getName() const = 0;
-
-public:
- void replaceInput(const operand::Index &from, const operand::Index &to);
- void replaceOutput(const operand::Index &from, const operand::Index &to);
- const operand::IndexSet &getInputs() const { return _inputs; }
- const operand::IndexSet &getOutputs() const { return _outputs; }
- // It's for only input/output tensors but const data.
- void setInputs(const operand::IndexSet &indexes);
- void setOutputs(const operand::IndexSet &indexes);
-
-private:
- operand::IndexSet _inputs;
- operand::IndexSet _outputs;
- OperandConstraint _input_constr;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_NODE_H__
diff --git a/runtimes/neurun/src/model/operation/NodeVisitor.h b/runtimes/neurun/src/model/operation/NodeVisitor.h
deleted file mode 100644
index 8420de998..000000000
--- a/runtimes/neurun/src/model/operation/NodeVisitor.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_NODE_VISITOR_H__
-#define __NEURUN_MODEL_OPERATION_NODE_VISITOR_H__
-
-#include "Node.Include.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-struct NodeVisitor
-{
- virtual ~NodeVisitor() = default;
-
-#define OP(InternalName, IsNnApi, NnApiName) \
- virtual void visit(const InternalName &) {}
-#include "model/operation/Op.lst"
-#undef OP
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_NODE_VISITOR_H__
diff --git a/runtimes/neurun/src/model/operation/Op.lst b/runtimes/neurun/src/model/operation/Op.lst
deleted file mode 100644
index 23f4b5118..000000000
--- a/runtimes/neurun/src/model/operation/Op.lst
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OP
-#error Define OP before including this file
-#endif
-
-// NOTE The relation between "Internal Name" and "NN API Name" is "1 : N".
-
-// Internal Name | NN API? | NN API Name
-OP(AddNode , true , ADD)
-OP(Conv2DNode , true , CONV_2D)
-OP(AvgPool2DNode , true , AVERAGE_POOL_2D)
-OP(MaxPool2DNode , true , MAX_POOL_2D)
-OP(ConcatNode , true , CONCATENATION)
-OP(FullyConnectedNode , true , FULLY_CONNECTED)
-OP(ReshapeNode , true , RESHAPE)
-OP(SoftmaxNode , true , SOFTMAX)
-OP(PermuteNode , false , NOT_AVAILABLE)
diff --git a/runtimes/neurun/src/model/operation/OperandConstraint.cc b/runtimes/neurun/src/model/operation/OperandConstraint.cc
deleted file mode 100644
index 5c69de928..000000000
--- a/runtimes/neurun/src/model/operation/OperandConstraint.cc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "OperandConstraint.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/OperandConstraint.h b/runtimes/neurun/src/model/operation/OperandConstraint.h
deleted file mode 100644
index d1cd8aa2c..000000000
--- a/runtimes/neurun/src/model/operation/OperandConstraint.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_OPERAND_CONSTRAINT_H__
-#define __NEURUN_MODEL_OPERATION_OPERAND_CONSTRAINT_H__
-
-#include <stdint.h>
-#include <limits>
-#include <set>
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class OperandConstraint
-{
-private:
- static const uint32_t INF = std::numeric_limits<uint32_t>::max();
-
-public:
- static OperandConstraint createAny() { return OperandConstraint{0u, INF}; }
- static OperandConstraint createExact(uint32_t exact) { return OperandConstraint{exact, exact}; }
- static OperandConstraint createAtMost(uint32_t end) { return OperandConstraint{0u, end}; }
- static OperandConstraint createAtLeast(uint32_t begin) { return OperandConstraint{begin, INF}; }
- static OperandConstraint createInRange(uint32_t begin, uint32_t end)
- {
- return OperandConstraint{begin, end};
- }
-
-private:
- OperandConstraint(uint32_t begin, uint32_t end) : _begin{begin}, _end{end} {}
-
-public:
- bool check(uint32_t ind) const { return _begin <= ind && ind <= _end; }
-
-private:
- uint32_t _begin;
- uint32_t _end;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_OPERAND_CONSTRAINT_H__
diff --git a/runtimes/neurun/src/model/operation/PermuteNode.cc b/runtimes/neurun/src/model/operation/PermuteNode.cc
deleted file mode 100644
index 174d2a86b..000000000
--- a/runtimes/neurun/src/model/operation/PermuteNode.cc
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "PermuteNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void PermuteNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-PermuteNode::PermuteNode(const operand::Index &input, const operand::Index &output, Type type)
- : model::operation::Node{OperandConstraint::createExact(1u)}, _param{type}
-{
- setInputs({input});
- setOutputs({output});
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/PermuteNode.h b/runtimes/neurun/src/model/operation/PermuteNode.h
deleted file mode 100644
index b589975be..000000000
--- a/runtimes/neurun/src/model/operation/PermuteNode.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_PERMUTE_NODE_H__
-#define __NEURUN_MODEL_OPERATION_PERMUTE_NODE_H__
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class PermuteNode : public model::operation::Node
-{
-public:
- enum class Type
- {
- NHWC_TO_NCHW,
- NCHW_TO_NHWC,
- COPY
- };
-
- struct Param
- {
- Type type;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "Permute"; }
-
-public:
- PermuteNode(const operand::Index &input, const operand::Index &output, Type type);
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_PERMUTE_NODE_H__
diff --git a/runtimes/neurun/src/model/operation/ReshapeNode.cc b/runtimes/neurun/src/model/operation/ReshapeNode.cc
deleted file mode 100644
index 616b8cd65..000000000
--- a/runtimes/neurun/src/model/operation/ReshapeNode.cc
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ReshapeNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void ReshapeNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-ReshapeNode::ReshapeNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(1u)}
-{
- assert(init_param.input_count == 2 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> A tensor, specifying the tensor to be reshaped.
- // 1 -> A 1-D tensor of type ANEURALNETWORKS_TENSOR_INT32, defining the shape of the output
- // tensor
-
- // TODO Second input should be shape tensor (init_param.inputs[1])
- setInputs({init_param.inputs[0] /* , init_param.inputs[1] */});
- setOutputs({init_param.outputs[0]});
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/ReshapeNode.h b/runtimes/neurun/src/model/operation/ReshapeNode.h
deleted file mode 100644
index 1758e9ec8..000000000
--- a/runtimes/neurun/src/model/operation/ReshapeNode.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_RESHAPE_NODE_H__
-#define __NEURUN_MODEL_OPERATION_RESHAPE_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class ReshapeNode : public model::operation::Node
-{
-public:
- ReshapeNode(const model::operation::Node::InitParam &init_param);
-
- enum Input
- {
- INPUT = 0
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "Reshape"; }
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_RESHAPE_NODE_H__
diff --git a/runtimes/neurun/src/model/operation/Set.cc b/runtimes/neurun/src/model/operation/Set.cc
deleted file mode 100644
index 14bd4f584..000000000
--- a/runtimes/neurun/src/model/operation/Set.cc
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Set.h"
-
-#include <cassert>
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-const Index Set::generateIndex()
-{
- assert((_index_count) <= 0x7fffffff);
-
- return Index{_index_count++};
-}
-
-Index Set::append(std::unique_ptr<Node> &&node)
-{
- auto index = generateIndex();
-
- _nodes[index] = std::move(node);
- return index;
-}
-
-const Node &Set::at(const Index &index) const { return *(_nodes.at(index)); }
-
-Node &Set::at(const Index &index) { return *(_nodes.at(index)); }
-
-bool Set::exist(const Index &index) const { return _nodes.find(index) != _nodes.end(); }
-
-void Set::iterate(const std::function<void(const Index &, const Node &)> &fn) const
-{
- for (auto it = _nodes.begin(); it != _nodes.end(); ++it)
- {
- fn(it->first, *it->second);
- }
-}
-
-void Set::iterate(const std::function<void(const Index &, Node &)> &fn)
-{
- for (auto it = _nodes.begin(); it != _nodes.end(); ++it)
- {
- fn(it->first, *it->second);
- }
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/Set.h b/runtimes/neurun/src/model/operation/Set.h
deleted file mode 100644
index eebf91e65..000000000
--- a/runtimes/neurun/src/model/operation/Set.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_SET_H__
-#define __NEURUN_MODEL_OPERATION_SET_H__
-
-#include <memory>
-
-#include "model/operation/Index.h"
-#include "Node.h"
-
-#include <unordered_map>
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class Set
-{
-public:
- Set() : _index_count(0) {}
-
-public:
- Index append(std::unique_ptr<Node> &&node);
- void remove(const Index &index) { _nodes.erase(index); };
-
-public:
- const Node &at(const Index &) const;
- Node &at(const Index &);
- bool exist(const Index &) const;
- uint32_t size() const { return _nodes.size(); }
- void iterate(const std::function<void(const Index &, const Node &)> &fn) const;
- void iterate(const std::function<void(const Index &, Node &)> &fn);
-
-private:
- const Index generateIndex();
-
-private:
- std::unordered_map<Index, std::unique_ptr<Node>> _nodes;
- uint32_t _index_count;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_SET_H__
diff --git a/runtimes/neurun/src/model/operation/SoftmaxNode.cc b/runtimes/neurun/src/model/operation/SoftmaxNode.cc
deleted file mode 100644
index d157aa4a7..000000000
--- a/runtimes/neurun/src/model/operation/SoftmaxNode.cc
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SoftmaxNode.h"
-
-#include <cassert>
-
-#include "NodeVisitor.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-void SoftmaxNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-
-SoftmaxNode::SoftmaxNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(1u)}
-{
- assert(init_param.input_count == 2 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> A 2-D or 4-D tensor, specifying the tensor to be reshaped.
- // 1 -> FLOAT32 value, specifying the positive scaling factor for the exponent, beta.
-
- setInputs({init_param.inputs[0]});
- setOutputs({init_param.outputs[0]});
-
- _param.scale_index = operand::Index{init_param.inputs[1]};
-}
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
diff --git a/runtimes/neurun/src/model/operation/SoftmaxNode.h b/runtimes/neurun/src/model/operation/SoftmaxNode.h
deleted file mode 100644
index 4a5a72e5a..000000000
--- a/runtimes/neurun/src/model/operation/SoftmaxNode.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_MODEL_OPERATION_SOFTMAX_NODE_H__
-#define __NEURUN_MODEL_OPERATION_SOFTMAX_NODE_H__
-
-#include <memory>
-
-#include "model/operation/Node.h"
-
-namespace neurun
-{
-namespace model
-{
-namespace operation
-{
-
-class SoftmaxNode : public model::operation::Node
-{
-public:
- SoftmaxNode(const model::operation::Node::InitParam &init_param);
- enum Input
- {
- INPUT = 0
- };
-
- struct Param
- {
- operand::Index scale_index;
- };
-
-public:
- virtual void accept(NodeVisitor &&) const override;
- virtual std::string getName() const override { return "SoftMax"; }
-
-public:
- const Param &param() const { return _param; }
-
-private:
- Param _param;
-};
-
-} // namespace operation
-} // namespace model
-} // namespace neurun
-
-#endif // __NEURUN_MODEL_OPERATION_SOFTMAX_NODE_H__
diff --git a/runtimes/neurun/src/util/Padding.cc b/runtimes/neurun/src/util/Padding.cc
deleted file mode 100644
index a24c9ddf7..000000000
--- a/runtimes/neurun/src/util/Padding.cc
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "util/Padding.h"
-
-#include <algorithm>
-
-namespace neurun
-{
-namespace util
-{
-
-Padding valid_padding(void)
-{
- //
- // ANEURALNETWORKS_PADDING_VALID
- //
- // VALID padding. No padding.
- //
- // When the input size is not evenly divisible by the filter size,
- // the input at the end that could not fill the whole filter tile
- // will simply be ignored.
- //
- Padding padding;
-
- padding.top = 0;
- padding.bottom = 0;
- padding.left = 0;
- padding.right = 0;
-
- return padding;
-}
-
-Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape,
- const nnfw::misc::feature::Shape &ofm_shape, const Stride &stride, uint32_t kw,
- uint32_t kh)
-{
- Padding padding;
-
- // ANEURALNETWORKS_PADDING_SAME (from NNAPI spec)
- //
- // SAME padding. Padding on both ends are the "same":
- //
- // padding_to_beginning = total_padding / 2
- // padding_to_end = (total_padding + 1)/2.
- //
- const int32_t vertical_needed_input = (ofm_shape.H - 1) * stride.vertical + kh;
- const int32_t vertical_total_padding = std::max(0, vertical_needed_input - ifm_shape.H);
-
- const int32_t horizontal_needed_input = (ofm_shape.W - 1) * stride.horizontal + kw;
- const int32_t horizontal_total_padding = std::max(0, horizontal_needed_input - ifm_shape.W);
-
- padding.top = vertical_total_padding / 2;
- padding.bottom = (vertical_total_padding + 1) / 2;
- padding.left = horizontal_total_padding / 2;
- padding.right = (horizontal_total_padding + 1) / 2;
-
- return padding;
-}
-
-} // namespace util
-} // namespace neurun
diff --git a/runtimes/neurun/src/util/Padding.h b/runtimes/neurun/src/util/Padding.h
deleted file mode 100644
index 05a14eb31..000000000
--- a/runtimes/neurun/src/util/Padding.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_UTIL_PADDING_H__
-#define __NEURUN_UTIL_PADDING_H__
-
-#include <stdint.h>
-
-#include <misc/feature/Shape.h>
-
-namespace neurun
-{
-namespace util
-{
-
-struct Padding
-{
- uint32_t top;
- uint32_t bottom;
- uint32_t left;
- uint32_t right;
-};
-
-struct Stride
-{
- uint32_t vertical;
- uint32_t horizontal;
-};
-
-Padding valid_padding(void);
-Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape,
- const nnfw::misc::feature::Shape &ofm_shape, const Stride &stride, uint32_t kw,
- uint32_t kh);
-
-} // namespace util
-} // namespace neurun
-
-#endif // __NEURUN_UTIL_PADDING_H__
diff --git a/runtimes/neurun/src/util/Utils.cc b/runtimes/neurun/src/util/Utils.cc
deleted file mode 100644
index def02db69..000000000
--- a/runtimes/neurun/src/util/Utils.cc
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Utils.h"
-
-#include <cassert>
-
-namespace neurun
-{
-namespace util
-{
-
-const char *to_string(const PaddingCode &code)
-{
- assert((ANEURALNETWORKS_PADDING_SAME == code) || (ANEURALNETWORKS_PADDING_VALID == code));
-
- switch (code)
- {
- case ANEURALNETWORKS_PADDING_SAME:
- return "ANEURALNETWORKS_PADDING_SAME";
- case ANEURALNETWORKS_PADDING_VALID:
- return "ANEURALNETWORKS_PADDING_VALID";
- }
-
- return nullptr;
-}
-
-} // namespace util
-} // namespace neurun
diff --git a/runtimes/neurun/src/util/Utils.h b/runtimes/neurun/src/util/Utils.h
deleted file mode 100644
index a1e5bf0ba..000000000
--- a/runtimes/neurun/src/util/Utils.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file Utils.h
- * @brief This file contains utility functions
- * @ingroup COM_AI_RUNTIME
- */
-
-#ifndef __NEURUN_UTIL_UTILS_H__
-#define __NEURUN_UTIL_UTILS_H__
-
-#include "NeuralNetworks.h"
-
-namespace neurun
-{
-namespace util
-{
-
-/**
- * @brief Converts a PaddingCode to const char*
- * @param[in] code The PaddingCode to be converted
- * @return A string holding the converted value
- */
-const char *to_string(const PaddingCode &code);
-
-} // namespace util
-} // namespace neurun
-
-#endif // __NEURUN_UTIL_UTILS_H__
diff --git a/runtimes/neurun/src/util/config/Config.lst b/runtimes/neurun/src/util/config/Config.lst
deleted file mode 100644
index e029ebe37..000000000
--- a/runtimes/neurun/src/util/config/Config.lst
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CONFIG
-#error Define CONFIG before including this file
-#endif
-
-// Name | Type | Default
-CONFIG(GRAPH_DOT_DUMP , int , "0")
-CONFIG(BACKENDS , std::string , "cpu;acl_cl")
-CONFIG(OP_BACKEND_ALLOPS , std::string , "none")
-CONFIG(DISABLE_COMPILE , bool , "0")
-
-
-// Auto-generate all operations
-
-#define OP(InternalName, IsNnApi, NnApiName) \
- CONFIG(OP_BACKEND_ ## NnApiName, std::string, "acl_cl")
-#include "model/operation/Op.lst"
-#undef OP
-
diff --git a/runtimes/neurun/src/util/config/ConfigManager.cc b/runtimes/neurun/src/util/config/ConfigManager.cc
deleted file mode 100644
index 46b80311c..000000000
--- a/runtimes/neurun/src/util/config/ConfigManager.cc
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ConfigManager.h"
-
-#include <cstdlib>
-
-namespace neurun
-{
-namespace config
-{
-
-ConfigManager &ConfigManager::instance()
-{
- static ConfigManager manager;
- return manager;
-}
-
-ConfigManager::ConfigManager()
-{
- auto fetch_from_env_var = [&](const std::string &key) {
- const char *value = std::getenv(key.c_str());
- if (value != nullptr)
- {
- _map[key] = value;
- }
- };
-
-#define CONFIG(Name, Type, Default) \
- _map.insert({std::string{#Name}, std::string{Default}}); \
- fetch_from_env_var(#Name);
-
-#include "Config.lst"
-
-#undef CONFIG
-}
-
-template <> bool ConfigManager::get<bool>(const std::string &key) const
-{
- auto raw = _map.at(key);
-
- static const std::array<std::string, 5> false_list{"0", "OFF", "FALSE", "N", "NO"};
- auto false_found = std::find(false_list.begin(), false_list.end(), raw);
-
- return (false_found == false_list.end());
-}
-
-template <> int ConfigManager::get<int>(const std::string &key) const
-{
- auto raw = _map.at(key);
- return std::stoi(raw);
-}
-
-template <> std::string ConfigManager::get<std::string>(const std::string &key) const
-{
- auto raw = _map.at(key);
- return raw;
-}
-
-} // namespace config
-} // namespace neurun
diff --git a/runtimes/neurun/src/util/config/ConfigManager.h b/runtimes/neurun/src/util/config/ConfigManager.h
deleted file mode 100644
index 78db03dc1..000000000
--- a/runtimes/neurun/src/util/config/ConfigManager.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_CONFIG_CONFIG_MANAGER_H__
-#define __NEURUN_CONFIG_CONFIG_MANAGER_H__
-
-#include <algorithm>
-#include <string>
-#include <unordered_map>
-
-/**
- * @file ConfigManager.h
- * @brief This file contains neurun::config::ConfigManager class
- */
-
-namespace neurun
-{
-namespace config
-{
-
-/**
- * @brief Class that manages configurations
- */
-
-class ConfigManager
-{
-public:
- static ConfigManager &instance();
-
-private:
- /**
- * @brief Construct a new ConfigManager object. Fetch variables from Environment Variables.
- */
- ConfigManager();
-
-public:
- /**
- * @brief Return the configuration value of given key
- *
- * @tparam T Type of the config
- * @param key String key value
- *
- * @return The configuration value of given key value
- */
- template <typename T> T get(const std::string &key) const;
-
-private:
- std::unordered_map<std::string, std::string> _map;
-};
-
-template <> bool ConfigManager::get<bool>(const std::string &key) const;
-template <> int ConfigManager::get<int>(const std::string &key) const;
-template <> std::string ConfigManager::get<std::string>(const std::string &key) const;
-
-} // namespace config
-} // namespace neurun
-
-#endif // __NEURUN_CONFIG_CONFIG_MANAGER_H__
diff --git a/runtimes/neurun/src/util/feature/Coordinate4D.h b/runtimes/neurun/src/util/feature/Coordinate4D.h
deleted file mode 100644
index 27d6f7b9e..000000000
--- a/runtimes/neurun/src/util/feature/Coordinate4D.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_UTIL_FEATURE_COORDINATE_4D_H__
-#define __NEURUN_UTIL_FEATURE_COORDINATE_4D_H__
-
-#include <stdint.h>
-
-namespace neurun
-{
-namespace util
-{
-namespace feature
-{
-
-/**
- * @brief Class to represent position(offset) of subtensor.\n
- * Assume that parent and child are already lowered (can get Shape4D).
- */
-class Coordinate4D
-{
-public:
- /**
- * @brief Construct a new Coordinate4D object
- */
- Coordinate4D(void) : _n{0}, _h{0}, _w{0}, _c{0}
- {
- // DO NOTHING
- }
- /**
- * @brief Construct a new Coordinate4D object
- * @param[in] n Batch offset
- * @param[in] h Height offset
- * @param[in] w Width offset
- * @param[in] c Channel offset
- * @return
- */
- Coordinate4D(int32_t n, int32_t h, int32_t w, int32_t c) : _n{n}, _h{h}, _w{w}, _c{c}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Return batch offset
- * @return Batch offset
- */
- int32_t n(void) const { return _n; }
- /**
- * @brief Return height offset
- * @return Height offset
- */
- int32_t h(void) const { return _h; }
- /**
- * @brief Return width offset
- * @return Width offset
- */
- int32_t w(void) const { return _w; }
- /**
- * @brief Return channel offset
- * @return Channel offset
- */
- int32_t c(void) const { return _c; }
-
-private:
- int32_t _n;
- int32_t _h;
- int32_t _w;
- int32_t _c;
-};
-
-} // namespace feature
-} // namespace util
-} // namespace neurun
-
-#endif // __NEURUN_UTIL_FEATURE_COORDINATE_4D_H__
diff --git a/runtimes/neurun/src/util/feature/nchw/View.h b/runtimes/neurun/src/util/feature/nchw/View.h
deleted file mode 100644
index 048fdecd8..000000000
--- a/runtimes/neurun/src/util/feature/nchw/View.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_UTIL_FEATURE_NCHW_VIEW_H__
-#define __NEURUN_UTIL_FEATURE_NCHW_VIEW_H__
-
-#include "misc/feature/Reader.h"
-#include "misc/feature/Shape.h"
-
-#include "backend/interface/operand/ITensor.h"
-#include "util/feature/Coordinate4D.h"
-
-#include <cassert>
-
-namespace neurun
-{
-namespace util
-{
-namespace feature
-{
-namespace nchw
-{
-
-template <typename T> class View final : public nnfw::misc::feature::Reader<T>
-{
-public:
- View(::neurun::backend::operand::ITensor *tensor) : _tensor{tensor}
- {
- // TODO Validate whether tensor is a feature map, or not
- _shape.N = tensor->dimension(3);
- _shape.C = tensor->dimension(2);
- _shape.H = tensor->dimension(1);
- _shape.W = tensor->dimension(0);
- }
-
-public:
- const ::nnfw::misc::feature::Shape &shape(void) const { return _shape; }
-
-public:
- T at(uint32_t ch, uint32_t row, uint32_t col) const override
- {
- const auto offset = feature_index_to_byte_offset(0, ch, row, col);
-
- T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
-
- return *ptr;
- }
- T at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const override
- {
- const auto offset = feature_index_to_byte_offset(batch, ch, row, col);
-
- T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
-
- return *ptr;
- }
-
-public:
- T &at(uint32_t ch, uint32_t row, uint32_t col)
- {
- const auto offset = feature_index_to_byte_offset(0, ch, row, col);
-
- T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
-
- return *ptr;
- }
- T &at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col)
- {
- const auto offset = feature_index_to_byte_offset(batch, ch, row, col);
-
- T *ptr = reinterpret_cast<T *>(_tensor->buffer() + offset);
-
- return *ptr;
- }
-
-private:
- size_t feature_index_to_byte_offset(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const
- {
- return _tensor->calcOffset(
- neurun::util::feature::Coordinate4D{static_cast<int32_t>(batch), static_cast<int32_t>(row),
- static_cast<int32_t>(col), static_cast<int32_t>(ch)});
- }
-
-private:
- ::nnfw::misc::feature::Shape _shape;
- ::neurun::backend::operand::ITensor *_tensor;
-};
-
-} // namespace nchw
-} // namespace feature
-} // namespace util
-} // namespace neurun
-
-#endif // __NEURUN_UTIL_FEATURE_NCHW_VIEW_H__
diff --git a/runtimes/neurun/src/util/feature/nhwc/Reader.h b/runtimes/neurun/src/util/feature/nhwc/Reader.h
deleted file mode 100644
index 85b8cab74..000000000
--- a/runtimes/neurun/src/util/feature/nhwc/Reader.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_UTIL_FEATURE_NHWC_READER_H__
-#define __NEURUN_UTIL_FEATURE_NHWC_READER_H__
-
-#include "Utils.h"
-
-#include "misc/feature/Reader.h"
-
-namespace neurun
-{
-namespace util
-{
-namespace feature
-{
-namespace nhwc
-{
-
-template <typename T> class Reader final : public nnfw::misc::feature::Reader<T>
-{
-public:
- Reader(const ::nnfw::misc::feature::Shape &shape, const T *ptr, size_t len)
- : _shape{shape}, _ptr{ptr}
- {
- (void)len; // Workaround for unused variable in release mode
- assert(shape.N * shape.C * shape.H * shape.W * sizeof(T) == len);
- }
-
-public:
- const nnfw::misc::feature::Shape &shape(void) const { return _shape; }
-
-public:
- T at(uint32_t ch, uint32_t row, uint32_t col) const override
- {
- uint32_t index = index_of(_shape, ch, row, col);
-
- return _ptr[index];
- }
- T at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const override
- {
- uint32_t index = index_of(_shape, batch, ch, row, col);
-
- return _ptr[index];
- }
-
-private:
- nnfw::misc::feature::Shape _shape;
-
-private:
- const T *_ptr;
-};
-
-} // namespace nhwc
-} // namespace feature
-} // namespace util
-} // namespace neurun
-
-#endif // __NEURUN_UTIL_FEATURE_NHWC_READER_H__
diff --git a/runtimes/neurun/src/util/feature/nhwc/View.h b/runtimes/neurun/src/util/feature/nhwc/View.h
deleted file mode 100644
index 1b9be9e1d..000000000
--- a/runtimes/neurun/src/util/feature/nhwc/View.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_UTIL_FEATURE_NHWC_VIEW_H__
-#define __NEURUN_UTIL_FEATURE_NHWC_VIEW_H__
-
-#include <cassert>
-
-#include "Utils.h"
-
-#include "misc/feature/Reader.h"
-
-namespace neurun
-{
-namespace util
-{
-namespace feature
-{
-namespace nhwc
-{
-
-template <typename T> class View final : public nnfw::misc::feature::Reader<T>
-{
-public:
- View(const ::nnfw::misc::feature::Shape &shape, T *ptr, size_t len) : _shape{shape}, _ptr{ptr}
- {
- (void)len; // Workaround for unused variable in release mode
- assert(shape.N * shape.C * shape.H * shape.W * sizeof(T) == len);
- }
-
-public:
- const nnfw::misc::feature::Shape &shape(void) const { return _shape; }
-
-public:
- T at(uint32_t ch, uint32_t row, uint32_t col) const override
- {
- uint32_t index = index_of(_shape, ch, row, col);
-
- return _ptr[index];
- }
-
- T at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col) const override
- {
- uint32_t index = index_of(_shape, batch, ch, row, col);
-
- return _ptr[index];
- }
-
- T &at(uint32_t ch, uint32_t row, uint32_t col)
- {
- uint32_t index = index_of(_shape, ch, row, col);
-
- return _ptr[index];
- }
-
- T &at(uint32_t batch, uint32_t ch, uint32_t row, uint32_t col)
- {
- uint32_t index = index_of(_shape, batch, ch, row, col);
-
- return _ptr[index];
- }
-
-private:
- nnfw::misc::feature::Shape _shape;
-
-private:
- T *_ptr;
-};
-
-} // namespace nhwc
-} // namespace feature
-} // namespace util
-} // namespace neurun
-
-#endif // __NEURUN_UTIL_FEATURE_NHWC_VIEW_H__
diff --git a/runtimes/neurun/src/util/logging.h b/runtimes/neurun/src/util/logging.h
deleted file mode 100644
index 62d563967..000000000
--- a/runtimes/neurun/src/util/logging.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __NEURUN_UTIL_LOGGING_H__
-#define __NEURUN_UTIL_LOGGING_H__
-
-#include <iostream>
-
-namespace neurun
-{
-namespace util
-{
-namespace logging
-{
-
-class Context
-{
-public:
- Context() : _enabled{false}
- {
- auto env = std::getenv("NEURUN_LOG_ENABLE");
-
- if (env && std::atoi(env) > 0)
- {
- _enabled = true;
- }
- }
-
-public:
- bool enabled(void) const { return _enabled; }
-
-private:
- bool _enabled;
-};
-
-static Context ctx;
-
-} // namespace logging
-} // namespace util
-} // namespace neurun
-
-#define VERBOSE(name) \
- if (::neurun::util::logging::ctx.enabled()) \
- std::cout << "[" << #name << "] "
-
-#endif // __NEURUN_UTIL_LOGGING_H__
diff --git a/runtimes/neurun/test/CMakeLists.txt b/runtimes/neurun/test/CMakeLists.txt
new file mode 100644
index 000000000..f21caabc3
--- /dev/null
+++ b/runtimes/neurun/test/CMakeLists.txt
@@ -0,0 +1,15 @@
+set(TEST_NEURUN test_neurun)
+
+file(GLOB_RECURSE TESTS "*.cc")
+
+add_executable(${TEST_NEURUN} ${TESTS})
+
+target_include_directories(${TEST_NEURUN} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/../core/src)
+
+target_link_libraries(${TEST_NEURUN} neurun_core)
+target_link_libraries(${TEST_NEURUN} gtest)
+target_link_libraries(${TEST_NEURUN} gtest_main)
+target_link_libraries(${TEST_NEURUN} ${LIB_PTHREAD})
+add_test(${TEST_NEURUN} ${TEST_NEURUN})
+
+install(TARGETS ${TEST_NEURUN} DESTINATION unittest)
diff --git a/runtimes/neurun/test/backend/cpu/MemoryPlanner.cc b/runtimes/neurun/test/backend/cpu/MemoryPlanner.cc
deleted file mode 100644
index 04f2e5da4..000000000
--- a/runtimes/neurun/test/backend/cpu/MemoryPlanner.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-
-#include "backend/cpu/MemoryPlanner.h"
-#include "model/operand/Index.h"
-
-TEST(Allocator, allocate_test)
-{
- ::neurun::backend::cpu::Allocator allocator(1024);
- ASSERT_NE(allocator.base(), nullptr);
-}
-
-TEST(BumpPlanner, claim_test)
-{
- ::neurun::backend::cpu::BumpPlanner planner;
-
- auto claim = [&planner](uint32_t index, size_t size, uint32_t expected_offset) {
- ::neurun::model::operand::Index mem_idx(index);
- planner.claim(mem_idx, size);
- auto mem_blk = planner.memory_plans()[mem_idx];
- ASSERT_EQ(mem_blk.offset, expected_offset);
- ASSERT_EQ(mem_blk.size, size);
- };
-
- claim(0, 10, 0);
- claim(1, 20, 10);
- claim(2, 30, 30);
-}
-
-TEST(FirstFitPlanner, claim_release_test)
-{
- ::neurun::backend::cpu::FirstFitPlanner planner;
-
- auto claim = [&planner](uint32_t index, size_t size, uint32_t expected_offset) {
- ::neurun::model::operand::Index mem_idx(index);
- planner.claim(mem_idx, size);
- auto mem_blk = planner.memory_plans()[mem_idx];
- ASSERT_EQ(mem_blk.offset, expected_offset);
- ASSERT_EQ(mem_blk.size, size);
- };
-
- auto release = [&planner](uint32_t index) {
- ::neurun::model::operand::Index mem_idx(index);
- planner.release(mem_idx);
- };
-
- // 0 CLAIM - 10
- claim(0, 10, 0);
-
- // 1 CLAIM - 20
- claim(1, 20, 10);
-
- // 2 CLAIM - 30
- claim(2, 30, 30);
-
- // 0 RELEASE - 10
- release(0);
-
- // 3 CLAIM - 20
- claim(3, 20, 60);
-
- // 4 CLAIM - 5
- claim(4, 5, 0);
-
- // 5 CLAIM - 10
- claim(5, 10, 80);
-
- // 6 CLAIM - 5
- claim(6, 5, 5);
-
- // 2 RELEASE - 30
- release(2);
-
- // 7 CLAIM - 35
- claim(7, 35, 90);
-
- // 8 CLAIM - 10
- claim(8, 10, 30);
-
- // 4 RELEASE - 5
- release(4);
-
- // 9 CLAIM - 10
- claim(9, 10, 40);
-
- // 10 CLAIM - 10
- claim(10, 10, 50);
-
- // 6 RELEASE
- release(6);
-
- // 1 RELEASE
- release(1);
-
- // 8 RELEASE
- release(8);
-
- // 9 RELEASE
- release(9);
-
- // 10 RELEASE
- release(10);
-
- // 3 RELEASE
- release(3);
-
- // 5 RELEASE
- release(5);
-
- // 7 RELEASE
- release(7);
-}
diff --git a/runtimes/neurun/test/core/backend/ExecTime.test.cc b/runtimes/neurun/test/core/backend/ExecTime.test.cc
new file mode 100644
index 000000000..0409c0752
--- /dev/null
+++ b/runtimes/neurun/test/core/backend/ExecTime.test.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "backend/ExecTime.h"
+#include "backend/IConfig.h"
+#include "backend/Backend.h"
+#include <gtest/gtest.h>
+#include <string>
+
+namespace
+{
+using namespace neurun;
+using namespace backend;
+
+struct MockConfig : public IConfig
+{
+ std::string id() override { return "b1"; }
+ void initialize() override{};
+ bool SupportSubTensorAlloc() override { return false; }
+};
+
+struct MockBackend : public ::neurun::backend::Backend
+{
+ std::shared_ptr<neurun::backend::IConfig> config() const override
+ {
+ return std::make_shared<MockConfig>();
+ }
+ std::unique_ptr<BackendContext>
+ newContext(const model::Operands &,
+ const std::shared_ptr<backend::custom::KernelRegistry> &) const override
+ {
+ return nullptr;
+ }
+};
+
+TEST(ExecTime, roundtrip_ok)
+{
+ const auto *b = new MockBackend();
+ std::vector<const Backend *> bs = {b};
+ {
+ ExecTime et(bs);
+ et.updateOperationExecTime(b, "op1", true, 100, 100);
+ et.updateOperationExecTime(b, "op1", true, 200, 200);
+ et.updateOperationExecTime(b, "op1", false, 100, 888);
+ et.uploadOperationsExecTime();
+ }
+ {
+ ExecTime et(bs);
+ auto time = et.getOperationExecTime(b, "op1", true, 100);
+ ASSERT_EQ(time, 100);
+ // Check interpolation
+ time = et.getOperationExecTime(b, "op1", true, 150);
+ ASSERT_EQ(time, 150);
+ time = et.getOperationExecTime(b, "op1", false, 100);
+ ASSERT_EQ(time, 888);
+ et.uploadOperationsExecTime();
+ }
+ // clean up
+ EXPECT_EQ(remove("exec_time.json"), 0);
+}
+
+TEST(ExecTime, structure)
+{
+
+ const auto *b = new MockBackend();
+ std::vector<const Backend *> bs = {b};
+ {
+ ExecTime et(bs);
+ et.updateOperationExecTime(b, "op1", true, 100, 100);
+ et.updateOperationExecTime(b, "op1", true, 200, 200);
+ et.uploadOperationsExecTime();
+ }
+ {
+ ExecTime et(bs);
+ auto time = et.getOperationExecTime(b, "op1", true, 100);
+ ASSERT_EQ(time, 100);
+ // Check interpolation
+ time = et.getOperationExecTime(b, "op1", true, 200);
+ ASSERT_EQ(time, 200);
+ et.uploadOperationsExecTime();
+ }
+ // clean up
+ EXPECT_EQ(remove("exec_time.json"), 0);
+}
+} // unnamed namespace
diff --git a/runtimes/neurun/test/core/compiler/Scheduler.cc b/runtimes/neurun/test/core/compiler/Scheduler.cc
new file mode 100644
index 000000000..8c5424a9d
--- /dev/null
+++ b/runtimes/neurun/test/core/compiler/Scheduler.cc
@@ -0,0 +1,554 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <compiler/HEScheduler.h>
+#include <backend/ExecTime.h>
+#include <backend/IShapeFixer.h>
+
+#include <model/Model.h>
+#include <model/Shape.h>
+#include <model/InternalType.h>
+#include <model/TypeInfo.h>
+#include <model/DataType.h>
+
+#include <model/operation/AddNode.h>
+#include <model/operation/SubNode.h>
+#include <model/operation/MulNode.h>
+#include <model/operation/FullyConnectedNode.h>
+
+#include <gtest/gtest.h>
+
+namespace
+{
+using namespace neurun;
+using namespace model;
+using namespace backend;
+using namespace operation;
+
+//
+// Mock backends classes
+//
+
+// Backend could be created without ShapeFixer.
+// But it is used by scheduler to detect which operations are supported by backend.
+struct MockShapeFixer : IShapeFixer
+{
+ void visit(const model::operation::AddNode &) override{};
+ void visit(const model::operation::SubNode &) override{};
+ void visit(const model::operation::MulNode &) override{};
+ void visit(const model::operation::FullyConnectedNode &) override{};
+ std::shared_ptr<ITensorBuilder> tensor_builder() override { return nullptr; };
+};
+
+struct MockConfigCPU : public IConfig
+{
+ std::string id() override { return "cpu"; }
+ void initialize() override{};
+ bool SupportSubTensorAlloc() override { return false; }
+};
+
+struct MockBackendCPU : public Backend
+{
+ std::shared_ptr<IConfig> config() const override { return std::make_shared<MockConfigCPU>(); }
+ std::unique_ptr<BackendContext>
+ newContext(const Operands &,
+ const std::shared_ptr<backend::custom::KernelRegistry> &) const override
+ {
+ return std::unique_ptr<BackendContext>(
+ new BackendContext{this, nullptr, nullptr, nullptr, std::make_shared<MockShapeFixer>()});
+ }
+};
+
+struct MockConfigGPU : public IConfig
+{
+ std::string id() override { return "gpu"; }
+ void initialize() override{};
+ bool SupportSubTensorAlloc() override { return false; }
+};
+
+struct MockBackendGPU : public Backend
+{
+ std::shared_ptr<IConfig> config() const override { return std::make_shared<MockConfigGPU>(); }
+ std::unique_ptr<BackendContext>
+ newContext(const Operands &,
+ const std::shared_ptr<backend::custom::KernelRegistry> &) const override
+ {
+ return std::unique_ptr<BackendContext>(
+ new BackendContext{this, nullptr, nullptr, nullptr, std::make_shared<MockShapeFixer>()});
+ }
+};
+
+struct MockConfigNPU : public IConfig
+{
+ std::string id() override { return "npu"; }
+ void initialize() override{};
+ bool SupportSubTensorAlloc() override { return false; }
+};
+
+struct MockBackendNPU : public Backend
+{
+ std::shared_ptr<IConfig> config() const override { return std::make_shared<MockConfigNPU>(); }
+ std::unique_ptr<BackendContext>
+ newContext(const Operands &,
+ const std::shared_ptr<backend::custom::KernelRegistry> &) const override
+ {
+ return std::unique_ptr<BackendContext>(
+ new BackendContext{this, nullptr, nullptr, nullptr, std::make_shared<MockShapeFixer>()});
+ }
+};
+
+//
+// Constants
+//
+
+const int OPERAND_ELEMS = 268203;
+const int OPERAND_SIZE = OPERAND_ELEMS * 4;
+const int OPERATION_SIZE = OPERAND_SIZE * 3;
+
+const std::string LINEAR("Linear");
+const std::string DATAFLOW("Dataflow");
+const std::string PARALLEL("Parallel");
+
+//
+// Helper functions
+//
+
+// Set executor through environment variable
+void setExecutor(const std::string &executor) { setenv("EXECUTOR", executor.c_str(), true); }
+
+// Set profiling mode through environment variable
+void setProfilingMode(const bool value) { setenv("PROFILING_MODE", value ? "1" : "0", true); }
+
+// Calculate operation size by addition sizes of all input and output operands
+uint32_t calcOpSize(const std::shared_ptr<graph::Graph> &graph, const OperationIndex &op_idx)
+{
+ uint32_t size = 0;
+ for (const auto &input : graph->operations().at(op_idx).getInputs())
+ size += graph->operands().at(input).info().total_size();
+ for (const auto &output : graph->operations().at(op_idx).getOutputs())
+ size += graph->operands().at(output).info().total_size();
+ return size;
+}
+
+// Set execution operation time. This method is needed since ExecutionTime has only
+// 'updateOperationExecTime' method.
+void setOperationExecTime(ExecTime &et, const Backend *backend, const std::string &operation,
+ bool quant, uint32_t op_size, int64_t time)
+{
+ // You shouldn't set negative time with this method since nnfw JSON deserializer can't read it
+ assert(time > 0);
+ int64_t prev_time = et.getOperationExecTime(backend, operation, quant, op_size);
+ int64_t time_to_set = prev_time == ExecTime::NOT_FOUND ? time : 2 * time - prev_time;
+ et.updateOperationExecTime(backend, operation, quant, op_size, time_to_set);
+ assert(et.getOperationExecTime(backend, operation, quant, op_size) == time);
+}
+
+// Set same execution time for all given backends/operations
+void setOperationsExecutionTime(const std::vector<const Backend *> &backends,
+ const std::vector<std::string> &op_names,
+ const std::vector<uint32_t> &op_sizes, int64_t exec_time)
+{
+ assert(op_names.size() == op_sizes.size());
+ ExecTime et(backends);
+ for (int i = 0; i < op_names.size(); ++i)
+ {
+ for (auto &backend : backends)
+ setOperationExecTime(et, backend, op_names[i], false, op_sizes[i], exec_time);
+ }
+ et.uploadOperationsExecTime();
+}
+
+// Set permute time from one backend to another. This method is needed since ExecutionTime has only
+// 'updatePermuteTime' method.
+void setPermutationTime(ExecTime &et, const Backend *from_backend, const Backend *to_backend,
+ bool quant, uint32_t op_size, int64_t time)
+{
+ // You shouldn't set negative time with this method since nnfw JSON deserializer can't read it
+ assert(time > 0);
+ int64_t prev_time = et.getPermuteTime(from_backend, to_backend, quant, op_size);
+ int64_t time_to_set = prev_time == ExecTime::NOT_FOUND ? time : 2 * time - prev_time;
+ et.updatePermuteTime(from_backend, to_backend, quant, op_size, time_to_set);
+ assert(et.getPermuteTime(from_backend, to_backend, quant, op_size) == time);
+}
+
+// Set same permutation time between all given backends
+void setPermutationsExecutionTime(const std::vector<const Backend *> &backends,
+ const int operand_size, const int64_t exec_time)
+{
+ ExecTime et(backends);
+ for (const auto &backend : backends)
+ {
+ for (auto &other_backend : backends)
+ {
+ if (backend == other_backend)
+ continue;
+ setPermutationTime(et, backend, other_backend, false, operand_size, exec_time);
+ }
+ }
+ et.uploadOperationsExecTime();
+}
+
+//
+// Functions for creating graphs
+//
+
+using OIS = OperandIndexSequence;
+
+template <typename NodeT, typename... Types>
+model::OperationIndex createNode(std::shared_ptr<graph::Graph> graph, Types &&... args)
+{
+ typename NodeT::Param op_params{Activation::NONE};
+ auto op = nnfw::cpp14::make_unique<NodeT>(std::forward<Types>(args)..., op_params);
+ auto op_idx = graph->addOperation(std::move(op));
+ // For now in scheduler test all operations in tested graphs has same size (for simplicity)
+ assert(calcOpSize(graph, op_idx) == OPERATION_SIZE);
+ return op_idx;
+}
+
+// Create straight graph: Add->Sub->Mul
+std::shared_ptr<graph::Graph> createStraightGraph()
+{
+ auto graph = std::make_shared<graph::Graph>(nnfw::cpp14::make_unique<Model>());
+ const TypeInfo float_op(DataType::FLOAT32);
+
+ // Create add node
+ auto add_lhs_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto add_rhs_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto add_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<AddNode>(graph, OIS{add_lhs_idx, add_rhs_idx}, OIS{add_out_idx});
+
+ // Create sub node
+ auto sub_const_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto sub_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<SubNode>(graph, OIS{add_out_idx, sub_const_idx}, OIS{sub_out_idx});
+
+ // Create mul node
+ auto mul_const_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto mul_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<MulNode>(graph, OIS{sub_out_idx, mul_const_idx}, OIS{mul_out_idx});
+
+ graph->finishBuilding();
+ return graph;
+}
+
+/* Create branched graph:
+ * [Add]
+ * // \\
+ * [Mul1] [FC2]
+ * || ||
+ * [Mul2] [FC2]
+ * \\ //
+ * [Sub]
+ */
+std::shared_ptr<graph::Graph> createBranchedGraph()
+{
+ auto graph = std::make_shared<graph::Graph>(nnfw::cpp14::make_unique<Model>());
+ const TypeInfo float_op(DataType::FLOAT32);
+
+ // Create add node
+ auto add_lhs_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto add_rhs_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto add_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<AddNode>(graph, OIS{add_lhs_idx, add_rhs_idx}, OIS{add_out_idx});
+
+ // Create mul1 node
+ auto mul1_const_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto mul1_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<MulNode>(graph, OIS{add_out_idx, mul1_const_idx}, OIS{mul1_out_idx});
+
+ // Create mul2 node
+ auto mul2_const_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto mul2_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<MulNode>(graph, OIS{mul1_out_idx, mul2_const_idx}, OIS{mul2_out_idx});
+
+ // Create fc1 node
+ auto fc1_const_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto fc1_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<FullyConnectedNode>(graph, OIS{add_out_idx, fc1_const_idx}, OIS{fc1_out_idx});
+
+ // Create fc2 node
+ auto fc2_const_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ auto fc2_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<FullyConnectedNode>(graph, OIS{fc1_out_idx, fc2_const_idx}, OIS{fc2_out_idx});
+
+ // Create add2 node
+ auto sub_out_idx = graph->addOperand(Shape{OPERAND_ELEMS}, float_op);
+ createNode<SubNode>(graph, OIS{mul2_out_idx, fc2_out_idx}, OIS{sub_out_idx});
+
+ graph->finishBuilding();
+ return graph;
+}
+
+//
+// Tests setup/teardown
+//
+
+// SetUp/TearDown methods runs before/after each test and performs actions common for each test
+class SchedulerTest : public ::testing::Test
+{
+protected:
+ void SetUp() override
+ {
+ // Initialize mock backends
+ _cpu_backend = new MockBackendCPU();
+ _gpu_backend = new MockBackendGPU();
+ _npu_backend = new MockBackendNPU();
+ _mock_backends = {_cpu_backend, _gpu_backend, _npu_backend};
+
+ // Remove previous profile data if it exists
+ if (!remove("exec_time.json"))
+ {
+ // DO NOTHING (no profile data)
+ }
+
+ // Remember original value of 'EXECUTOR' environment variable
+ char *executor = std::getenv("EXECUTOR");
+ _original_executor = executor == nullptr ? "" : executor;
+
+ // Remember original value of 'PROFILING_MODE' environment variable
+ char *profiling_mode = std::getenv("PROFILING_MODE");
+ _original_profiling_mode = profiling_mode == nullptr ? "" : profiling_mode;
+ }
+
+ void TearDown() override
+ {
+ delete _cpu_backend;
+ delete _gpu_backend;
+ delete _npu_backend;
+ EXPECT_EQ(remove("exec_time.json"), 0);
+ setenv("EXECUTOR", _original_executor.c_str(), true);
+ setenv("PROFILING_MODE", _original_profiling_mode.c_str(), true);
+ }
+
+ const MockBackendCPU *_cpu_backend{nullptr};
+ const MockBackendGPU *_gpu_backend{nullptr};
+ const MockBackendNPU *_npu_backend{nullptr};
+ std::vector<const Backend *> _mock_backends;
+
+ std::string _original_executor;
+ std::string _original_profiling_mode;
+};
+
+class SchedulerTestWithExecutorParam : public SchedulerTest,
+ public testing::WithParamInterface<std::string>
+{
+};
+
+//
+// HEScheduler tests
+//
+
+// Test scheduler behavior for straight graph with known execution time of all nodes and permutes.
+TEST_P(SchedulerTestWithExecutorParam, straight_graph_known_exec_time)
+{
+ setExecutor(GetParam());
+
+ // Prepare graph
+ auto graph(createStraightGraph());
+ OperationIndex add_op_idx(0), sub_op_idx(1), mul_op_idx(2);
+
+ // Set default execution and transfer time
+ setPermutationsExecutionTime(_mock_backends, OPERAND_SIZE, 1);
+ setOperationsExecutionTime(_mock_backends, {"Add", "Sub", "Mul"},
+ {OPERATION_SIZE, OPERATION_SIZE, OPERATION_SIZE}, 1e4);
+
+ // Test 1
+ // Expected behaviour: scheduler assigns different backend to each node
+ {
+ // For each backend reduce execution time of one node
+ ExecTime et(_mock_backends);
+ setOperationExecTime(et, _cpu_backend, "Add", false, OPERATION_SIZE, 1);
+ setOperationExecTime(et, _gpu_backend, "Sub", false, OPERATION_SIZE, 1);
+ setOperationExecTime(et, _npu_backend, "Mul", false, OPERATION_SIZE, 1);
+ et.uploadOperationsExecTime();
+
+ // Test scheduler
+ auto scheduler = compiler::HEScheduler(graph->operands(), _mock_backends, nullptr);
+ const auto br = scheduler.schedule(*graph);
+ ASSERT_EQ(br->getBackend(add_op_idx)->config()->id(), "cpu");
+ ASSERT_EQ(br->getBackend(sub_op_idx)->config()->id(), "gpu");
+ ASSERT_EQ(br->getBackend(mul_op_idx)->config()->id(), "npu");
+ }
+
+ // Test 2
+ // Expected behaviour: scheduler assigns single backend to all nodes because of big transfer time
+ {
+ // Increase transfer time
+ setPermutationsExecutionTime(_mock_backends, OPERAND_SIZE, 1e5);
+
+ // Test scheduler
+ auto scheduler = compiler::HEScheduler(graph->operands(), _mock_backends, nullptr);
+ const auto br = scheduler.schedule(*graph);
+ ASSERT_EQ(br->getBackend(add_op_idx)->config()->id(), "cpu");
+ ASSERT_EQ(br->getBackend(sub_op_idx)->config()->id(), "cpu");
+ ASSERT_EQ(br->getBackend(mul_op_idx)->config()->id(), "cpu");
+ }
+}
+
+// Test scheduler behavior for branched graph with known execution time of all nodes and permutes
+TEST_P(SchedulerTestWithExecutorParam, branched_graph_known_exec_time)
+{
+ const int64_t NPU_ET = 5000;
+ setExecutor(GetParam());
+
+ // Prepare graph
+ auto graph(createBranchedGraph());
+ OperationIndex add_op_idx(0), mul1_op_idx(1), mul2_op_idx(2), fc1_op_idx(3), fc2_op_idx(4),
+ sub_op_idx(5);
+
+ // Set default execution and transfer time
+ setPermutationsExecutionTime(_mock_backends, OPERAND_SIZE, 1000);
+ setOperationsExecutionTime(_mock_backends, {"Add", "Sub", "Mul", "FullyConnected"},
+ {OPERATION_SIZE, OPERATION_SIZE, OPERATION_SIZE, OPERATION_SIZE}, 1e4);
+
+ // Test 1
+ // Expected behaviour: for dataflow and linear executors scheduler assigns fastest backend to all
+ // nodes, in case of parallel executor scheduler assigns different backends to branches.
+ {
+ // Reduce execution time
+ ExecTime et(_mock_backends);
+ setOperationExecTime(et, _npu_backend, "Add", false, OPERATION_SIZE, NPU_ET);
+ setOperationExecTime(et, _npu_backend, "Mul", false, OPERATION_SIZE, NPU_ET);
+ setOperationExecTime(et, _npu_backend, "Sub", false, OPERATION_SIZE, NPU_ET);
+ setOperationExecTime(et, _npu_backend, "FullyConnected", false, OPERATION_SIZE, NPU_ET);
+ setOperationExecTime(et, _gpu_backend, "Mul", false, OPERATION_SIZE, NPU_ET + 1000);
+ setOperationExecTime(et, _gpu_backend, "FullyConnected", false, OPERATION_SIZE, NPU_ET + 1000);
+ et.uploadOperationsExecTime();
+
+ // Test scheduler
+ auto scheduler = compiler::HEScheduler(graph->operands(), _mock_backends, nullptr);
+ const auto br = scheduler.schedule(*graph);
+
+ std::string branch1_expected_backend("npu"), branch2_expected_backend("npu");
+ if (GetParam() == PARALLEL)
+ {
+ branch1_expected_backend =
+ br->getBackend(mul1_op_idx)->config()->id() == "npu" ? "npu" : "gpu";
+ branch2_expected_backend = branch1_expected_backend == "npu" ? "gpu" : "npu";
+ }
+
+ ASSERT_EQ(br->getBackend(add_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(mul1_op_idx)->config()->id(), branch1_expected_backend);
+ ASSERT_EQ(br->getBackend(mul2_op_idx)->config()->id(), branch1_expected_backend);
+ ASSERT_EQ(br->getBackend(fc1_op_idx)->config()->id(), branch2_expected_backend);
+ ASSERT_EQ(br->getBackend(fc2_op_idx)->config()->id(), branch2_expected_backend);
+ ASSERT_EQ(br->getBackend(sub_op_idx)->config()->id(), "npu");
+ }
+
+ // Test 2
+ // Expected behaviour: scheduler assigns single backend to all nodes
+ {
+ // Increase execution time for GPU backend
+ ExecTime et(_mock_backends);
+ /* for parallel executor: set a time, that is larger than branches_cnt*npu_exec_time
+ so that npu is prefered: the i+1 level node of the first branch will wait for npu
+ until it finishes the i-th nodes of all other branches in BFS order*/
+ setOperationExecTime(et, _gpu_backend, "Mul", false, OPERATION_SIZE, NPU_ET * 2 + 1);
+ /* for parallel executor: set ET of FC larger than Mul's to be determinant:
+ if they are equal and scheduling is done in order mul1->FC1->FC2->mul2,
+ then for mul2 gpu is selected since NPU_ET*3 > GPU_ET(which is NPU_ET * 2 + 1)*/
+ setOperationExecTime(et, _gpu_backend, "FullyConnected", false, OPERATION_SIZE, NPU_ET * 2 + 2);
+ et.uploadOperationsExecTime();
+
+ // Test scheduler
+ auto scheduler = compiler::HEScheduler(graph->operands(), _mock_backends, nullptr);
+ const auto br = scheduler.schedule(*graph);
+ ASSERT_EQ(br->getBackend(add_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(mul1_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(mul2_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(fc1_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(fc2_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(sub_op_idx)->config()->id(), "npu");
+ }
+}
+
+// SchedulerTestWithExecutorParam tests are parameterized with executor name and runs three times -
+// one time for each executor
+INSTANTIATE_TEST_CASE_P(AllExecutors, SchedulerTestWithExecutorParam,
+ testing::Values(LINEAR, DATAFLOW, PARALLEL));
+
+// Test scheduler behavior for branched graph and enabled profiling mode
+TEST_F(SchedulerTest, branched_graph_profiling_mode)
+{
+ const int ET = 1e5;
+
+ // Turn on profiling mode
+ setProfilingMode(true);
+ setExecutor(DATAFLOW);
+
+ // Prepare graph
+ auto graph(createBranchedGraph());
+ OperationIndex add_op_idx(0), mul1_op_idx(1), mul2_op_idx(2), fc1_op_idx(3), fc2_op_idx(4),
+ sub_op_idx(5);
+
+ // Test 1
+ // Expected behaviour: scheduler assigns backends to nodes with unknown execution time
+ {
+ // Set execution time for all backends/nodes except for cpu/Sub, npu/Mul, gpu/FC
+ ExecTime et(_mock_backends);
+ setOperationExecTime(et, _cpu_backend, "Add", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _cpu_backend, "Mul", false, OPERATION_SIZE, ET + 1);
+ setOperationExecTime(et, _cpu_backend, "FullyConnected", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _npu_backend, "Add", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _npu_backend, "FullyConnected", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _npu_backend, "Sub", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _gpu_backend, "Add", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _gpu_backend, "Mul", false, OPERATION_SIZE, ET + 1);
+ setOperationExecTime(et, _gpu_backend, "Sub", false, OPERATION_SIZE, ET);
+ et.uploadOperationsExecTime();
+
+ // Test scheduler
+ auto scheduler = compiler::HEScheduler(graph->operands(), _mock_backends, nullptr);
+ const auto br = scheduler.schedule(*graph);
+ ASSERT_EQ(br->getBackend(mul1_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(mul2_op_idx)->config()->id(), "npu");
+ ASSERT_EQ(br->getBackend(fc1_op_idx)->config()->id(), "gpu");
+ ASSERT_EQ(br->getBackend(fc2_op_idx)->config()->id(), "gpu");
+ ASSERT_EQ(br->getBackend(sub_op_idx)->config()->id(), "cpu");
+ }
+
+ // Test 2
+ // Expected behaviour: scheduler shuffling backends, so different backends are assigned to
+ // neighbor nodes
+ {
+ // Set execution time for rest backends/nodes (cpu/Sub, npu/Mul, gpu/FC)
+ ExecTime et(_mock_backends);
+ setOperationExecTime(et, _cpu_backend, "Sub", false, OPERATION_SIZE, ET);
+ setOperationExecTime(et, _npu_backend, "Mul", false, OPERATION_SIZE, ET + 1);
+ setOperationExecTime(et, _gpu_backend, "FullyConnected", false, OPERATION_SIZE, ET);
+ et.uploadOperationsExecTime();
+
+ // Test scheduler
+ auto scheduler = compiler::HEScheduler(graph->operands(), _mock_backends, nullptr);
+ const auto br = scheduler.schedule(*graph);
+ ASSERT_NE(br->getBackend(add_op_idx)->config()->id(),
+ br->getBackend(mul1_op_idx)->config()->id());
+ ASSERT_NE(br->getBackend(add_op_idx)->config()->id(),
+ br->getBackend(fc1_op_idx)->config()->id());
+ ASSERT_NE(br->getBackend(mul1_op_idx)->config()->id(),
+ br->getBackend(mul2_op_idx)->config()->id());
+ ASSERT_NE(br->getBackend(fc1_op_idx)->config()->id(),
+ br->getBackend(fc2_op_idx)->config()->id());
+ ASSERT_NE(br->getBackend(mul2_op_idx)->config()->id(),
+ br->getBackend(sub_op_idx)->config()->id());
+ ASSERT_NE(br->getBackend(fc2_op_idx)->config()->id(),
+ br->getBackend(sub_op_idx)->config()->id());
+ }
+}
+
+// TODO: Add tests with unknown execution and permutation time
+
+} // unnamed namespace
diff --git a/runtimes/neurun/test/core/exec/ExecInstance.cc b/runtimes/neurun/test/core/exec/ExecInstance.cc
new file mode 100644
index 000000000..2e962a4b2
--- /dev/null
+++ b/runtimes/neurun/test/core/exec/ExecInstance.cc
@@ -0,0 +1,312 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <thread>
+
+#include "graph/Graph.h"
+#include "model/Model.h"
+#include "compiler/Compiler.h"
+#include "exec/Execution.h"
+#include "model/operation/AddNode.h"
+
+namespace
+{
+
+using namespace neurun::model;
+using DataType = neurun::model::DataType;
+using Model = neurun::model::Model;
+
+class CompiledMockUpModel
+{
+public:
+ CompiledMockUpModel()
+ {
+ // Model: two elementwise add operation
+ // model input: lhs, rhs1
+ // model output: second add result (result2)
+ // constant: rhs2
+ // result1 <= (lhs + rhs)
+ // result2 <= (result1 + rhs2)
+ // lhs, rhs1, rh2, result1, result2 shape: {1, 2, 2, 1}
+ // activation: none (constant)
+ std::unique_ptr<neurun::model::Model> model = nnfw::cpp14::make_unique<neurun::model::Model>();
+ // 1st add operands (result1 <= lhs + rhs1)
+ Shape shape{1, 2, 2, 1};
+ TypeInfo type{DataType::FLOAT32};
+ static float rhs2_data[4] = {3, 1, -1, 5};
+ auto operand_lhs = model->operands.emplace(shape, type);
+ auto operand_rhs1 = model->operands.emplace(shape, type);
+ auto operand_result1 = model->operands.emplace(shape, type);
+ auto operand_rhs2 = model->operands.emplace(shape, type);
+ auto operand_result2 = model->operands.emplace(shape, type);
+ model->operands.at(operand_rhs2)
+ .data(nnfw::cpp14::make_unique<CachedData>(reinterpret_cast<const uint8_t *>(&rhs2_data),
+ 16));
+ // 2nd add operations (result2 <= result1 + rhs2)
+ operation::AddNode::Param param1;
+ param1.activation = neurun::model::Activation::NONE;
+ auto input_set1 = OperandIndexSequence{operand_lhs, operand_rhs1};
+ auto output_set1 = OperandIndexSequence{operand_result1};
+ model->operations.push(
+ nnfw::cpp14::make_unique<operation::AddNode>(input_set1, output_set1, param1));
+ operation::AddNode::Param param2;
+ param2.activation = neurun::model::Activation::NONE;
+ auto input_set2 = OperandIndexSequence{operand_result1, operand_rhs2};
+ auto output_set2 = OperandIndexSequence{operand_result2};
+ model->operations.push(
+ nnfw::cpp14::make_unique<operation::AddNode>(input_set2, output_set2, param2));
+ // Identify model inputs and outputs
+ model->inputs.append(operand_lhs);
+ model->inputs.append(operand_rhs1);
+ model->outputs.append(operand_result2);
+ graph = std::make_shared<::neurun::graph::Graph>(std::move(model));
+ graph->finishBuilding();
+
+ // Compile
+ auto compiler = new neurun::compiler::Compiler{graph};
+ compiler->compile();
+ compiler->release(executor);
+ delete compiler;
+ }
+
+public:
+ std::shared_ptr<::neurun::graph::Graph> graph;
+ std::shared_ptr<neurun::exec::IExecutor> executor;
+};
+
+TEST(ExecInstance, simple)
+{
+ auto mockup = CompiledMockUpModel();
+ auto graph = mockup.graph;
+ auto executor = mockup.executor;
+
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto output = IOIndex{0};
+
+ const float input1_buffer[4] = {1, 0, -1, -2};
+ const float input2_buffer[4] = {1, -3, 2, -4};
+ float output_buffer[4] = {};
+ const float output_expected[4] = {5, -2, 0, -1};
+
+ auto execution = new neurun::exec::Execution(executor);
+
+ execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 16);
+ execution->setInput(input2, reinterpret_cast<const void *>(input2_buffer), 16);
+ execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 16);
+ execution->execute();
+
+ for (auto i = 0; i < 4; i++)
+ {
+ EXPECT_EQ(output_buffer[i], output_expected[i]);
+ }
+
+ delete execution;
+}
+
+TEST(ExecInstance, twoCompile)
+{
+ auto mockup = CompiledMockUpModel();
+ auto graph = mockup.graph;
+ auto executor1 = mockup.executor;
+ auto execution1 = new neurun::exec::Execution(executor1);
+
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto output = IOIndex{0};
+
+ const float exe1_input1_buffer[4] = {1, 0, -1, -2};
+ const float exe1_input2_buffer[4] = {1, -3, 2, -4};
+ float exe1_output_buffer[4] = {};
+ const float exe1_output_expected[4] = {5, -2, 0, -1};
+
+ execution1->setInput(input1, reinterpret_cast<const void *>(exe1_input1_buffer), 16);
+ execution1->setInput(input2, reinterpret_cast<const void *>(exe1_input2_buffer), 16);
+ execution1->setOutput(output, reinterpret_cast<void *>(exe1_output_buffer), 16);
+
+ // Make new executor: compile again
+ auto compiler = new neurun::compiler::Compiler{graph};
+ compiler->compile();
+ std::shared_ptr<neurun::exec::IExecutor> executor2;
+ compiler->release(executor2);
+ auto execution2 = new neurun::exec::Execution(executor2);
+
+ const float exe2_input1_buffer[4] = {2, 1, -2, 0};
+ const float exe2_input2_buffer[4] = {-3, 3, 1, 2};
+ float exe2_output_buffer[4] = {};
+ const float exe2_output_expected[4] = {2, 5, -2, 7};
+
+ execution2->setInput(input1, reinterpret_cast<const void *>(exe2_input1_buffer), 16);
+ execution2->setInput(input2, reinterpret_cast<const void *>(exe2_input2_buffer), 16);
+ execution2->setOutput(output, reinterpret_cast<void *>(exe2_output_buffer), 16);
+
+ execution1->execute();
+ execution2->execute();
+
+ for (auto i = 0; i < 4; i++)
+ {
+ EXPECT_EQ(exe1_output_buffer[i], exe1_output_expected[i]);
+ EXPECT_EQ(exe2_output_buffer[i], exe2_output_expected[i]);
+ }
+
+ delete compiler;
+ delete execution1;
+ delete execution2;
+}
+
+// Support two initialized execution instance then ordered execution
+TEST(ExecInstance, twoExecution)
+{
+ auto mockup = CompiledMockUpModel();
+ auto executor = mockup.executor;
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto output1 = IOIndex{0};
+
+ const float exe1_input1_buffer[4] = {1, 0, -1, -2};
+ const float exe1_input2_buffer[4] = {1, -3, 2, -4};
+ float exe1_output_buffer[4] = {};
+ const float exe1_output_expected[4] = {5, -2, 0, -1};
+ const float exe2_output_expected[4] = {2, 5, -2, 7};
+
+ auto execution1 = new neurun::exec::Execution(executor);
+ execution1->setInput(input1, reinterpret_cast<const void *>(exe1_input1_buffer), 16);
+ execution1->setInput(input2, reinterpret_cast<const void *>(exe1_input2_buffer), 16);
+ execution1->setOutput(output1, reinterpret_cast<void *>(exe1_output_buffer), 16);
+
+ const float exe2_input1_buffer[4] = {2, 1, -2, 0};
+ const float exe2_input2_buffer[4] = {-3, 3, 1, 2};
+ float exe2_output_buffer[4] = {};
+
+ // Make new execution
+ auto execution2 = new neurun::exec::Execution(executor);
+ execution2->setInput(input1, reinterpret_cast<const void *>(exe2_input1_buffer), 16);
+ execution2->setInput(input2, reinterpret_cast<const void *>(exe2_input2_buffer), 16);
+ execution2->setOutput(output1, reinterpret_cast<void *>(exe2_output_buffer), 16);
+
+ execution1->execute();
+ execution2->execute();
+
+ for (auto i = 0; i < 4; i++)
+ {
+ EXPECT_EQ(exe1_output_buffer[i], exe1_output_expected[i]);
+ EXPECT_EQ(exe2_output_buffer[i], exe2_output_expected[i]);
+ }
+
+ delete execution1;
+ delete execution2;
+}
+
+class Inference
+{
+public:
+ Inference(const float (&input1)[4], const float (&input2)[4], float (&output)[4],
+ std::shared_ptr<neurun::exec::IExecutor> &executor)
+ : _input1{input1}, _input2{input2}, _output{output}, _executor{executor}
+ {
+ // DO NOTHING
+ }
+
+ void inference(void)
+ {
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto output1 = IOIndex{0};
+
+ auto execution = new neurun::exec::Execution(_executor);
+ execution->setInput(input1, reinterpret_cast<const void *>(_input1), 16);
+ execution->setInput(input2, reinterpret_cast<const void *>(_input2), 16);
+ execution->setOutput(output1, reinterpret_cast<void *>(_output), 16);
+
+ execution->execute();
+
+ delete execution;
+ }
+
+private:
+ const float (&_input1)[4];
+ const float (&_input2)[4];
+ float (&_output)[4];
+ std::shared_ptr<neurun::exec::IExecutor> &_executor;
+};
+
+// Support multi-thread execution
+TEST(ExecInstance, twoThreads)
+{
+ auto mockup = CompiledMockUpModel();
+ auto executor = mockup.executor;
+
+ const float exe1_input1_buffer[4] = {1, 0, -1, -2};
+ const float exe1_input2_buffer[4] = {1, -3, 2, -4};
+ float exe1_output_buffer[4] = {};
+ const float exe1_output_expected[4] = {5, -2, 0, -1};
+
+ Inference execution1{exe1_input1_buffer, exe1_input2_buffer, exe1_output_buffer, executor};
+
+ const float exe2_input1_buffer[4] = {2, 1, -2, 0};
+ const float exe2_input2_buffer[4] = {-3, 3, 1, 2};
+ float exe2_output_buffer[4] = {};
+ const float exe2_output_expected[4] = {2, 5, -2, 7};
+
+ Inference execution2{exe2_input1_buffer, exe2_input2_buffer, exe2_output_buffer, executor};
+
+ std::thread t1{&Inference::inference, &execution1};
+ std::thread t2{&Inference::inference, &execution2};
+
+ t1.join();
+ t2.join();
+
+ for (auto i = 0; i < 4; i++)
+ {
+ EXPECT_EQ(exe1_output_buffer[i], exe1_output_expected[i]);
+ EXPECT_EQ(exe2_output_buffer[i], exe2_output_expected[i]);
+ }
+}
+
+// Support asynchronous execution
+TEST(ExecInstance, async)
+{
+ auto mockup = CompiledMockUpModel();
+ auto graph = mockup.graph;
+ auto executor = mockup.executor;
+
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto output = IOIndex{0};
+
+ const float input1_buffer[4] = {1, 0, -1, -2};
+ const float input2_buffer[4] = {1, -3, 2, -4};
+ float output_buffer[4] = {};
+ const float output_expected[4] = {5, -2, 0, -1};
+
+ auto execution = new neurun::exec::Execution(executor);
+
+ execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 16);
+ execution->setInput(input2, reinterpret_cast<const void *>(input2_buffer), 16);
+ execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 16);
+ execution->startExecute();
+ execution->waitFinish();
+
+ for (auto i = 0; i < 4; i++)
+ {
+ EXPECT_EQ(output_buffer[i], output_expected[i]);
+ }
+
+ delete execution;
+}
+
+} // namespace
diff --git a/runtimes/neurun/test/core/exec/interp/ExecManager.cc b/runtimes/neurun/test/core/exec/interp/ExecManager.cc
new file mode 100644
index 000000000..501f2b827
--- /dev/null
+++ b/runtimes/neurun/test/core/exec/interp/ExecManager.cc
@@ -0,0 +1,338 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include <cpp14/memory.h>
+
+#include "graph/Graph.h"
+#include "exec/interp/ExecManager.h"
+#include "exec/Execution.h"
+#include "model/Model.h"
+#include "model/operation/AddNode.h"
+
+namespace
+{
+
+using namespace neurun::model;
+using DataType = neurun::model::DataType;
+using ExecManager = neurun::exec::interp::ExecManager;
+using Execution = neurun::exec::Execution;
+using Model = neurun::model::Model;
+
+class InterpExecManagerTest : public ::testing::Test
+{
+protected:
+ virtual void SetUp() {}
+ void CreateSimpleModel()
+ {
+ // Model: one elementwise add operation
+ // model input: lhs, rhs
+ // model output: add result
+ // lhs, rhs, result shape: {1, 2, 2, 1}
+ // activation: none (constant)
+ std::unique_ptr<neurun::model::Model> model = nnfw::cpp14::make_unique<neurun::model::Model>();
+
+ // Add operands
+
+ Shape shape{1, 2, 2, 1};
+ TypeInfo type{DataType::INT32};
+ Shape shape_scalar(0);
+ TypeInfo type_scalar{DataType::INT32};
+
+ auto operand_lhs = model->operands.emplace(shape, type);
+ auto operand_rhs = model->operands.emplace(shape, type);
+ auto operand_result = model->operands.emplace(shape, type);
+
+ // Add operations
+
+ operation::AddNode::Param param;
+ param.activation = neurun::model::Activation::NONE;
+ auto input_set = OperandIndexSequence{operand_lhs, operand_rhs};
+ auto output_set = OperandIndexSequence{operand_result};
+ model->operations.push(
+ nnfw::cpp14::make_unique<operation::AddNode>(input_set, output_set, param));
+
+ // Identify model inputs and outputs
+
+ model->inputs.append(operand_lhs);
+ model->inputs.append(operand_rhs);
+ model->outputs.append(operand_result);
+
+ _graph = nnfw::cpp14::make_unique<::neurun::graph::Graph>(std::move(model));
+ _graph->finishBuilding();
+
+ _executor = nnfw::cpp14::make_unique<ExecManager>(_graph->shareModel());
+ }
+
+ void CreateTwoStepModel()
+ {
+ // Model: two elementwise add operation
+ // model input: lhs, rhs1
+ // model output: second add result (result2)
+ // constant: rhs2
+ // result1 <= (lhs + rhs)
+ // result2 <= (result1 + rhs2)
+ // lhs, rhs1, rh2, result1, result2 shape: {1, 2, 2, 1}
+ // activation: none (constant)
+ std::unique_ptr<neurun::model::Model> model = nnfw::cpp14::make_unique<neurun::model::Model>();
+
+ // 1st add operands (result1 <= lhs + rhs1)
+
+ Shape shape{1, 2, 2, 1};
+ TypeInfo type{DataType::INT32};
+ Shape shape_scalar(0);
+ TypeInfo type_scalar{DataType::INT32};
+
+ static int32_t rhs2_data[4] = {3, 1, -1, 5};
+
+ auto operand_lhs = model->operands.emplace(shape, type);
+ auto operand_rhs1 = model->operands.emplace(shape, type);
+ auto operand_result1 = model->operands.emplace(shape, type);
+ auto operand_rhs2 = model->operands.emplace(shape, type);
+ auto operand_result2 = model->operands.emplace(shape, type);
+ model->operands.at(operand_rhs2)
+ .data(nnfw::cpp14::make_unique<CachedData>(reinterpret_cast<const uint8_t *>(&rhs2_data),
+ 16));
+
+ // 2nd add operations (result2 <= result1 + rhs2)
+
+ operation::AddNode::Param param1;
+ param1.activation = neurun::model::Activation::NONE;
+ auto input_set1 = OperandIndexSequence{operand_lhs, operand_rhs1};
+ auto output_set1 = OperandIndexSequence{operand_result1};
+ model->operations.push(
+ nnfw::cpp14::make_unique<operation::AddNode>(input_set1, output_set1, param1));
+
+ operation::AddNode::Param param2;
+ param2.activation = neurun::model::Activation::NONE;
+ auto input_set2 = OperandIndexSequence{operand_result1, operand_rhs2};
+ auto output_set2 = OperandIndexSequence{operand_result2};
+ model->operations.push(
+ nnfw::cpp14::make_unique<operation::AddNode>(input_set2, output_set2, param2));
+
+ // Identify model inputs and outputs
+
+ model->inputs.append(operand_lhs);
+ model->inputs.append(operand_rhs1);
+ model->outputs.append(operand_result2);
+
+ _graph = nnfw::cpp14::make_unique<::neurun::graph::Graph>(std::move(model));
+ _graph->finishBuilding();
+
+ _executor = nnfw::cpp14::make_unique<ExecManager>(_graph->shareModel());
+ }
+
+ void CreateUnspecifiedDimensionsModel()
+ {
+ // Model: one elementwise add operation
+ // model input: lhs, rhs
+ // model output: add result
+ // lhs, rhs, result shape: {1, unknown, 2, 1}
+ // activation: none (constant)
+ std::unique_ptr<neurun::model::Model> model = nnfw::cpp14::make_unique<neurun::model::Model>();
+
+ // Add operands
+
+ Shape shape{1, 0, 2, 1};
+ TypeInfo type{DataType::INT32};
+ Shape shape_scalar(0);
+ TypeInfo type_scalar{DataType::INT32};
+
+ auto operand_lhs = model->operands.emplace(shape, type);
+ auto operand_rhs = model->operands.emplace(shape, type);
+
+ auto operand_activation = model->operands.emplace(shape_scalar, type_scalar);
+ model->operands.at(operand_activation)
+ .data(nnfw::cpp14::make_unique<CachedData>(
+ reinterpret_cast<const uint8_t *>(&_activation_value), 4));
+
+ auto operand_result = model->operands.emplace(shape, type);
+
+ // Add operations
+
+ operation::AddNode::Param param;
+ param.activation = neurun::model::Activation::NONE;
+ auto input_set = OperandIndexSequence{operand_lhs, operand_rhs};
+ auto output_set = OperandIndexSequence{operand_result};
+
+ // Identify model inputs and outputs
+
+ model->inputs.append(operand_lhs);
+ model->inputs.append(operand_rhs);
+ model->outputs.append(operand_result);
+
+ _graph = nnfw::cpp14::make_unique<::neurun::graph::Graph>(std::move(model));
+ _graph->finishBuilding();
+
+ _executor = nnfw::cpp14::make_unique<ExecManager>(_graph->shareModel());
+ }
+
+ void createExecution() { _execution = nnfw::cpp14::make_unique<Execution>(_executor); }
+
+ virtual void TearDown() { _executor = nullptr; }
+
+ std::unique_ptr<::neurun::graph::Graph> _graph{nullptr};
+ std::shared_ptr<ExecManager> _executor{nullptr};
+ std::unique_ptr<Execution> _execution{nullptr};
+ const int32_t _activation_value{0};
+};
+
+TEST_F(InterpExecManagerTest, create_empty)
+{
+ _executor = nnfw::cpp14::make_unique<ExecManager>(std::make_shared<Model>());
+ ASSERT_NE(_executor, nullptr);
+}
+
+TEST_F(InterpExecManagerTest, create_simple)
+{
+ CreateSimpleModel();
+ ASSERT_NE(_executor, nullptr);
+}
+
+TEST_F(InterpExecManagerTest, setInput)
+{
+ CreateSimpleModel();
+ createExecution();
+
+ auto input1 = IOIndex{0};
+ const int32_t input1_buffer[4] = {1, 0, -1, -2};
+
+ EXPECT_THROW(_execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 4),
+ std::runtime_error);
+ EXPECT_THROW(_execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 12),
+ std::runtime_error);
+ EXPECT_NO_THROW(_execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 16));
+}
+
+TEST_F(InterpExecManagerTest, setOutput)
+{
+ CreateSimpleModel();
+ createExecution();
+
+ auto output = IOIndex{0};
+ auto output_idx = _graph->getOutputs().at(output);
+
+ int32_t output_buffer[4] = {};
+
+ EXPECT_THROW(_execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 4),
+ std::runtime_error);
+ EXPECT_THROW(_execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 12),
+ std::runtime_error);
+ EXPECT_NO_THROW(_execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 16));
+}
+
+TEST_F(InterpExecManagerTest, setInputForUnspecifiedDimensions)
+{
+ CreateUnspecifiedDimensionsModel();
+ createExecution();
+
+ auto input1 = IOIndex{0};
+ const int32_t input1_buffer[4] = {1, 0, -1, -2};
+
+ TypeInfo operand_type{DataType::INT32};
+ Shape operand_shape{1, 2, 2, 1};
+
+ EXPECT_THROW(_execution->setInput(input1, operand_type, operand_shape,
+ reinterpret_cast<const void *>(input1_buffer), 4),
+ std::runtime_error);
+ EXPECT_THROW(_execution->setInput(input1, operand_type, operand_shape,
+ reinterpret_cast<const void *>(input1_buffer), 12),
+ std::runtime_error);
+ EXPECT_NO_THROW(_execution->setInput(input1, operand_type, operand_shape,
+ reinterpret_cast<const void *>(input1_buffer), 16));
+}
+
+TEST_F(InterpExecManagerTest, setOutputForUnspecifiedDimensions)
+{
+ CreateUnspecifiedDimensionsModel();
+ createExecution();
+
+ auto output = IOIndex{0};
+ auto output_idx = _graph->getOutputs().at(output);
+
+ TypeInfo operand_type{DataType::INT32};
+ Shape operand_shape{1, 2, 2, 1};
+
+ int32_t output_buffer[4] = {};
+
+ EXPECT_THROW(_execution->setOutput(output, operand_type, operand_shape,
+ reinterpret_cast<void *>(output_buffer), 4),
+ std::runtime_error);
+ EXPECT_THROW(_execution->setOutput(output, operand_type, operand_shape,
+ reinterpret_cast<void *>(output_buffer), 12),
+ std::runtime_error);
+ EXPECT_NO_THROW(_execution->setOutput(output, operand_type, operand_shape,
+ reinterpret_cast<void *>(output_buffer), 16));
+}
+
+TEST_F(InterpExecManagerTest, execute)
+{
+ CreateSimpleModel();
+ createExecution();
+
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto input1_idx = _graph->getInputs().at(input1);
+ auto input2_idx = _graph->getInputs().at(input2);
+
+ const int32_t input1_buffer[4] = {1, 0, -1, -2};
+ const int32_t input2_buffer[4] = {1, -3, 2, -4};
+
+ auto output = IOIndex{0};
+ auto output_idx = _graph->getOutputs().at(output);
+
+ int32_t output_buffer[4] = {};
+
+ EXPECT_NO_THROW(_execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 16));
+ EXPECT_NO_THROW(_execution->setInput(input2, reinterpret_cast<const void *>(input2_buffer), 16));
+ EXPECT_NO_THROW(_execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 16));
+ EXPECT_NO_THROW(_execution->execute());
+ EXPECT_EQ(output_buffer[0], 2);
+ EXPECT_EQ(output_buffer[1], -3);
+ EXPECT_EQ(output_buffer[2], 1);
+ EXPECT_EQ(output_buffer[3], -6);
+}
+
+TEST_F(InterpExecManagerTest, executeTwoStep)
+{
+ CreateTwoStepModel();
+ createExecution();
+
+ auto input1 = IOIndex{0};
+ auto input2 = IOIndex{1};
+ auto input1_idx = _graph->getInputs().at(input1);
+ auto input2_idx = _graph->getInputs().at(input2);
+
+ const int32_t input1_buffer[4] = {1, 0, -1, -2};
+ const int32_t input2_buffer[4] = {1, -3, 2, -4};
+
+ auto output = IOIndex{0};
+ auto output_idx = _graph->getOutputs().at(output);
+
+ int32_t output_buffer[4] = {};
+
+ EXPECT_NO_THROW(_execution->setInput(input1, reinterpret_cast<const void *>(input1_buffer), 16));
+ EXPECT_NO_THROW(_execution->setInput(input2, reinterpret_cast<const void *>(input2_buffer), 16));
+ EXPECT_NO_THROW(_execution->setOutput(output, reinterpret_cast<void *>(output_buffer), 16));
+ EXPECT_NO_THROW(_execution->execute());
+ EXPECT_EQ(output_buffer[0], 5);
+ EXPECT_EQ(output_buffer[1], -2);
+ EXPECT_EQ(output_buffer[2], 0);
+ EXPECT_EQ(output_buffer[3], -1);
+}
+
+} // namespace
diff --git a/runtimes/neurun/test/graph/Graph.cc b/runtimes/neurun/test/graph/Graph.cc
index e6db3fe49..ba5271668 100644
--- a/runtimes/neurun/test/graph/Graph.cc
+++ b/runtimes/neurun/test/graph/Graph.cc
@@ -16,37 +16,38 @@
#include <gtest/gtest.h>
-#include "graph/Graph.h"
+#include "model/Model.h"
+// TODO Change name to Model
TEST(Graph, inputs_and_outputs)
{
- ::neurun::graph::Graph graph;
+ ::neurun::model::Model model;
- ::neurun::model::operand::Index index0{0u};
- ::neurun::model::operand::Index index1{1u};
+ ::neurun::model::OperandIndex index0{0u};
+ ::neurun::model::OperandIndex index1{1u};
- graph.addInput({index0});
- graph.addInput({index1});
+ model.inputs.append({index0});
+ model.inputs.append({index1});
- ::neurun::model::operand::Index index10{10u};
- ::neurun::model::operand::Index index11{11u};
- ::neurun::model::operand::Index index12{12u};
+ ::neurun::model::OperandIndex index10{10u};
+ ::neurun::model::OperandIndex index11{11u};
+ ::neurun::model::OperandIndex index12{12u};
- graph.addOutput({index10});
- graph.addOutput({index11});
- graph.addOutput({index12});
+ model.outputs.append({index10});
+ model.outputs.append({index11});
+ model.outputs.append({index12});
- ASSERT_EQ(graph.getInputs().size(), 2);
- ASSERT_EQ(graph.getOutputs().size(), 3);
+ ASSERT_EQ(model.inputs.size(), 2);
+ ASSERT_EQ(model.outputs.size(), 3);
- ::neurun::model::operand::IO::Index io_index0{0};
- ::neurun::model::operand::IO::Index io_index1{1};
- ::neurun::model::operand::IO::Index io_index2{2};
+ ::neurun::model::IOIndex io_index0{0};
+ ::neurun::model::IOIndex io_index1{1};
+ ::neurun::model::IOIndex io_index2{2};
- ASSERT_EQ(graph.getInputs().at(io_index0), 0);
- ASSERT_EQ(graph.getInputs().at(io_index1), 1);
+ ASSERT_EQ(model.inputs.at(io_index0), 0);
+ ASSERT_EQ(model.inputs.at(io_index1), 1);
- ASSERT_EQ(graph.getOutputs().at(io_index0), 10);
- ASSERT_EQ(graph.getOutputs().at(io_index1), 11);
- ASSERT_EQ(graph.getOutputs().at(io_index2), 12);
+ ASSERT_EQ(model.outputs.at(io_index0), 10);
+ ASSERT_EQ(model.outputs.at(io_index1), 11);
+ ASSERT_EQ(model.outputs.at(io_index2), 12);
}
diff --git a/runtimes/neurun/test/graph/Index.cc b/runtimes/neurun/test/graph/Index.cc
index c605cc4aa..c24c2dd5e 100644
--- a/runtimes/neurun/test/graph/Index.cc
+++ b/runtimes/neurun/test/graph/Index.cc
@@ -16,9 +16,9 @@
#include <gtest/gtest.h>
-#include "graph/Index.h"
+#include "util/Index.h"
-using Index = ::neurun::graph::Index<uint32_t, struct TestTag>;
+using Index = ::neurun::util::Index<uint32_t, struct TestTag>;
TEST(Index, index_test)
{
diff --git a/runtimes/neurun/test/graph/MockNode.h b/runtimes/neurun/test/graph/MockNode.h
index 46a6274dd..9a5df7360 100644
--- a/runtimes/neurun/test/graph/MockNode.h
+++ b/runtimes/neurun/test/graph/MockNode.h
@@ -17,28 +17,28 @@
#ifndef __NEURUN_TEST_GRAPH_MOCK_NODE_H__
#define __NEURUN_TEST_GRAPH_MOCK_NODE_H__
-#include "model/operation/Node.h"
-#include "model/operand/IndexSet.h"
+#include "model/Operation.h"
+#include "model/OperandIndexSequence.h"
namespace neurun_test
{
namespace graph
{
-class SimpleMockNode : public neurun::model::operation::Node
+class SimpleMockNode : public neurun::model::Operation
{
public:
- SimpleMockNode(const neurun::model::operand::IndexSet &inputs,
- const neurun::model::operand::IndexSet &outputs)
- : neurun::model::operation::Node{neurun::model::operation::OperandConstraint::createAny()}
+ SimpleMockNode(const neurun::model::OperandIndexSequence &inputs,
+ const neurun::model::OperandIndexSequence &outputs)
+ : neurun::model::Operation{neurun::model::operation::OperandConstraint::createAny()}
{
setInputs(inputs);
setOutputs(outputs);
}
public:
- virtual void accept(neurun::model::operation::NodeVisitor &&) const override {}
- virtual std::string getName() const override { return "SimpleMockNode"; }
+ void accept(neurun::model::OperationVisitor &) const override {}
+ std::string getName() const override { return "SimpleMockNode"; }
};
} // namespace graph
diff --git a/runtimes/neurun/test/graph/operand/IndexSet.cc b/runtimes/neurun/test/graph/operand/IndexSet.cc
index de4768cda..73e7fd8ac 100644
--- a/runtimes/neurun/test/graph/operand/IndexSet.cc
+++ b/runtimes/neurun/test/graph/operand/IndexSet.cc
@@ -16,37 +16,37 @@
#include <gtest/gtest.h>
-#include "model/operand/IndexSet.h"
+#include "model/OperandIndexSequence.h"
-using neurun::model::operand::Index;
-using neurun::model::operand::IndexSet;
+using neurun::model::OperandIndex;
+using neurun::model::OperandIndexSequence;
-TEST(graph_operand_IndexSet, append)
+TEST(graph_OperandIndexSequence, append)
{
- IndexSet iset{0, 2, 4, 8};
+ OperandIndexSequence iset{0, 2, 4, 8};
ASSERT_EQ(iset.size(), 4);
- iset.append(Index{10});
+ iset.append(OperandIndex{10});
ASSERT_EQ(iset.size(), 5);
- neurun::model::operand::IO::Index index1{1};
- neurun::model::operand::IO::Index index2{4};
+ neurun::model::IOIndex index1{1};
+ neurun::model::IOIndex index2{4};
ASSERT_EQ(iset.at(index1), 2);
ASSERT_EQ(iset.at(index2), 10);
- ASSERT_TRUE(iset.contains(Index{2}));
- ASSERT_TRUE(iset.contains(Index{10}));
- ASSERT_FALSE(iset.contains(Index{11}));
+ ASSERT_TRUE(iset.contains(OperandIndex{2}));
+ ASSERT_TRUE(iset.contains(OperandIndex{10}));
+ ASSERT_FALSE(iset.contains(OperandIndex{11}));
}
-TEST(graph_operand_IndexSet, replace)
+TEST(graph_OperandIndexSequence, replace)
{
- IndexSet iset{0, 1, 2, 3};
+ OperandIndexSequence iset{0, 1, 2, 3};
- iset.replace(Index{1}, Index{9});
- ASSERT_FALSE(iset.contains(Index{1}));
- ASSERT_TRUE(iset.contains(Index{9}));
+ iset.replace(OperandIndex{1}, OperandIndex{9});
+ ASSERT_FALSE(iset.contains(OperandIndex{1}));
+ ASSERT_TRUE(iset.contains(OperandIndex{9}));
}
diff --git a/runtimes/neurun/test/graph/operand/LayoutSet.cc b/runtimes/neurun/test/graph/operand/LayoutSet.cc
index f83e76e30..b79fd13a3 100644
--- a/runtimes/neurun/test/graph/operand/LayoutSet.cc
+++ b/runtimes/neurun/test/graph/operand/LayoutSet.cc
@@ -16,10 +16,10 @@
#include <gtest/gtest.h>
-#include "graph/operand/LayoutSet.h"
+#include "model/LayoutSet.h"
-using neurun::graph::operand::Layout;
-using neurun::graph::operand::LayoutSet;
+using neurun::model::Layout;
+using neurun::model::LayoutSet;
TEST(graph_operand_LayoutSet, layout_set_operators)
{
diff --git a/runtimes/neurun/test/graph/operand/Set.cc b/runtimes/neurun/test/graph/operand/Set.cc
index 00b6a7222..fce422398 100644
--- a/runtimes/neurun/test/graph/operand/Set.cc
+++ b/runtimes/neurun/test/graph/operand/Set.cc
@@ -16,33 +16,30 @@
#include <gtest/gtest.h>
-#include "model/operand/Set.h"
+#include "model/Operands.h"
TEST(graph_operand_Set, set_test)
{
- neurun::model::operand::Set set;
+ neurun::model::Operands set;
- ::neurun::model::operand::Shape shape0{3};
- shape0.dim(0) = 1;
- shape0.dim(1) = 2;
- shape0.dim(2) = 3;
+ ::neurun::model::Shape shape0{1, 2, 3};
- ::neurun::model::operand::Shape shape1{4};
+ ::neurun::model::Shape shape1(4);
shape1.dim(0) = 10;
shape1.dim(1) = 20;
shape1.dim(2) = 30;
shape1.dim(3) = 40;
- ::neurun::model::operand::TypeInfo type{ANEURALNETWORKS_TENSOR_INT32, 0, 0};
+ ::neurun::model::TypeInfo type{neurun::model::DataType::INT32};
- set.append(shape0, type);
- set.append(shape1, type);
+ set.emplace(shape0, type);
+ set.emplace(shape1, type);
- ASSERT_EQ(set.exist(neurun::model::operand::Index{0u}), true);
- ASSERT_EQ(set.exist(neurun::model::operand::Index{1u}), true);
- ASSERT_EQ(set.exist(neurun::model::operand::Index{2u}), false);
+ ASSERT_EQ(set.exist(neurun::model::OperandIndex{0u}), true);
+ ASSERT_EQ(set.exist(neurun::model::OperandIndex{1u}), true);
+ ASSERT_EQ(set.exist(neurun::model::OperandIndex{2u}), false);
- ASSERT_EQ(set.at(neurun::model::operand::Index{0u}).shape().dim(0), 1);
- ASSERT_EQ(set.at(neurun::model::operand::Index{0u}).shape().dim(1), 2);
- ASSERT_EQ(set.at(neurun::model::operand::Index{0u}).shape().dim(2), 3);
+ ASSERT_EQ(set.at(neurun::model::OperandIndex{0u}).shape().dim(0), 1);
+ ASSERT_EQ(set.at(neurun::model::OperandIndex{0u}).shape().dim(1), 2);
+ ASSERT_EQ(set.at(neurun::model::OperandIndex{0u}).shape().dim(2), 3);
}
diff --git a/runtimes/neurun/test/graph/operand/UseDef.cc b/runtimes/neurun/test/graph/operand/UseDef.cc
index e3792f746..3afd6daa8 100644
--- a/runtimes/neurun/test/graph/operand/UseDef.cc
+++ b/runtimes/neurun/test/graph/operand/UseDef.cc
@@ -20,56 +20,52 @@
#include "graph/verifier/Verifier.h"
#include "cpp14/memory.h"
#include "../MockNode.h"
+#include "model/Model.h"
#include <typeindex>
namespace
{
-using IndexSet = neurun::model::operand::IndexSet;
+using IndexSet = neurun::model::OperandIndexSequence;
using MockNode = neurun_test::graph::SimpleMockNode;
} // namespace anonymous
TEST(graph_operand_usedef, usedef_test)
{
- neurun::graph::Graph graph;
+ std::unique_ptr<neurun::model::Model> model = nnfw::cpp14::make_unique<neurun::model::Model>();
neurun::graph::verifier::DAGChecker verifier;
- neurun::model::operand::Shape shape{1u};
- neurun::model::operand::TypeInfo type{ANEURALNETWORKS_TENSOR_INT32, 0, 0};
- shape.dim(0) = 3;
+ neurun::model::Shape shape(3);
+ neurun::model::TypeInfo type{neurun::model::DataType::INT32};
// Model Input/Output
- auto input_operand = graph.addOperand(shape, type);
- auto output_operand = graph.addOperand(shape, type);
+ auto input_operand = model->operands.emplace(shape, type);
+ auto output_operand = model->operands.emplace(shape, type);
- graph.addInput(input_operand);
- graph.operands().at(input_operand).setAsModelInput();
- graph.addOutput(output_operand);
- graph.operands().at(output_operand).setAsOperationOutput();
+ model->inputs.append(input_operand);
+ model->outputs.append(output_operand);
// MockNode1
- auto operand_index1 = graph.addOperand(shape, type);
- graph.operands().at(operand_index1).setAsOperationOutput();
- auto mocknode_index1 = graph.addOperation(
+ auto operand_index1 = model->operands.emplace(shape, type);
+ auto mocknode_index1 = model->operations.push(
nnfw::cpp14::make_unique<MockNode>(IndexSet{input_operand}, IndexSet{operand_index1}));
// MockNode2
- auto operand_index2 = graph.addOperand(shape, type);
- graph.operands().at(operand_index2).setAsOperationOutput();
- auto mocknode_index2 = graph.addOperation(
+ auto operand_index2 = model->operands.emplace(shape, type);
+ auto mocknode_index2 = model->operations.push(
nnfw::cpp14::make_unique<MockNode>(IndexSet{input_operand}, IndexSet{operand_index2}));
// MockNode3(two input)
- auto multiinput_index = graph.addOperation(nnfw::cpp14::make_unique<MockNode>(
+ auto multiinput_index = model->operations.push(nnfw::cpp14::make_unique<MockNode>(
IndexSet{operand_index1, operand_index2}, IndexSet{output_operand}));
+ neurun::graph::Graph graph{std::move(model)};
graph.finishBuilding();
ASSERT_EQ(verifier.verify(graph), true);
- const auto &operations = graph.operations();
// Check def
ASSERT_EQ(graph.operands().at(operand_index1).getDef().contains(mocknode_index1), true);
ASSERT_EQ(graph.operands().at(operand_index2).getDef().contains(mocknode_index2), true);
diff --git a/runtimes/neurun/test/graph/operation/Set.cc b/runtimes/neurun/test/graph/operation/Set.cc
index 3560482ee..3c5fd8345 100644
--- a/runtimes/neurun/test/graph/operation/Set.cc
+++ b/runtimes/neurun/test/graph/operation/Set.cc
@@ -17,18 +17,18 @@
#include <gtest/gtest.h>
#include "../MockNode.h"
-#include "model/operation/Set.h"
+#include "model/Operations.h"
-using neurun::model::operation::Set;
-using neurun::model::operation::Node;
-using neurun::model::operation::Index;
+using neurun::model::Operations;
+using neurun::model::Operation;
+using neurun::model::OperationIndex;
TEST(graph_operation_Set, operation_test)
{
- Set set;
- set.append(
- std::unique_ptr<Node>(new neurun_test::graph::SimpleMockNode({1, 2, 3, 4}, {5, 6, 7})));
- Index idx{0u};
- ASSERT_EQ(set.at(idx).getInputs().size(), 4);
- ASSERT_EQ(set.at(idx).getOutputs().size(), 3);
+ Operations ops;
+ ops.push(
+ std::unique_ptr<Operation>(new neurun_test::graph::SimpleMockNode({1, 2, 3, 4}, {5, 6, 7})));
+ OperationIndex idx{0u};
+ ASSERT_EQ(ops.at(idx).getInputs().size(), 4);
+ ASSERT_EQ(ops.at(idx).getOutputs().size(), 3);
}
diff --git a/runtimes/neurun/test/graph/operation/SetIO.cc b/runtimes/neurun/test/graph/operation/SetIO.cc
index a475bdcc9..88e111e97 100644
--- a/runtimes/neurun/test/graph/operation/SetIO.cc
+++ b/runtimes/neurun/test/graph/operation/SetIO.cc
@@ -17,72 +17,84 @@
#include <gtest/gtest.h>
#include "graph/Graph.h"
-#include "cpp14/memory.h"
+#include "model/Model.h"
+#include "model/Index.h"
+#include "model/OperandIndexSequence.h"
#include "model/operation/Conv2DNode.h"
#include "model/operation/ConcatNode.h"
-#include "model/operand/Index.h"
-#include "model/operand/IndexSet.h"
+
+#include <cpp14/memory.h>
#include <stdexcept>
-using Index = neurun::model::operand::IO::Index;
-using IndexSet = neurun::model::operand::IndexSet;
-using GraphNodeInitParam = neurun::model::operation::Node::InitParam;
+using Index = neurun::model::IOIndex;
+using IndexSet = neurun::model::OperandIndexSequence;
TEST(graph_operation_setIO, operation_setIO_conv)
{
- neurun::graph::Graph graph;
+ neurun::model::Model model;
- neurun::model::operand::Shape shape{1u};
- neurun::model::operand::TypeInfo type{ANEURALNETWORKS_TENSOR_INT32, 0, 0};
- shape.dim(0) = 3;
+ neurun::model::Shape shape{3};
+ neurun::model::TypeInfo type{neurun::model::DataType::INT32};
// Add Conv
- std::vector<uint32_t> params;
- for (int i = 0; i < 7; ++i)
- {
- params.emplace_back(graph.addOperand(shape, type).asInt());
- }
- uint32_t outoperand = graph.addOperand(shape, type).asInt();
-
using GraphNode = neurun::model::operation::Conv2DNode;
- auto conv =
- nnfw::cpp14::make_unique<GraphNode>(GraphNodeInitParam{7, params.data(), 1, &outoperand});
- ASSERT_EQ(conv->getInputs().at(Index{0}).asInt(), params[0]);
+ auto input_operand = model.operands.emplace(shape, type);
+ auto kernel_operand = model.operands.emplace(shape, type);
+ auto bias_operand = model.operands.emplace(shape, type);
+ IndexSet inputs{input_operand, kernel_operand, bias_operand};
+
+ GraphNode::Param conv_params;
+ conv_params.padding.type = neurun::model::PaddingType::SAME;
+ conv_params.stride.horizontal = 1;
+ conv_params.stride.vertical = 1;
+ conv_params.activation = neurun::model::Activation::NONE;
+
+ auto output_operand = model.operands.emplace(shape, type).value();
+ IndexSet outputs{output_operand};
+
+ auto conv = nnfw::cpp14::make_unique<GraphNode>(inputs, outputs, conv_params);
+
+ ASSERT_NE(conv, nullptr);
+ ASSERT_EQ(conv->getInputs().at(Index{0}).value(), inputs.at(0).value());
conv->setInputs({8, 9, 10});
- ASSERT_NE(conv->getInputs().at(Index{0}).asInt(), params[0]);
- ASSERT_EQ(conv->getInputs().at(Index{0}).asInt(), 8);
+ ASSERT_NE(conv->getInputs().at(Index{0}).value(), inputs.at(0).value());
+ ASSERT_EQ(conv->getInputs().at(Index{0}).value(), 8);
}
TEST(graph_operation_setIO, operation_setIO_concat)
{
- neurun::graph::Graph graph;
+ neurun::model::Model model;
+
+ neurun::model::Shape shape{3};
+
+ neurun::model::TypeInfo type{neurun::model::DataType::INT32};
- neurun::model::operand::Shape shape{1u};
- neurun::model::operand::TypeInfo type{ANEURALNETWORKS_TENSOR_INT32, 0, 0};
- shape.dim(0) = 3;
+ using GraphNode = neurun::model::operation::ConcatNode;
// Add Concat
- std::vector<uint32_t> params;
- for (int i = 0; i < 7; ++i)
+ IndexSet inputs;
+ for (int i = 0; i < 6; ++i)
{
- params.emplace_back(graph.addOperand(shape, type).asInt());
+ inputs.append(model.operands.emplace(shape, type));
}
- uint32_t outoperand = graph.addOperand(shape, type).asInt();
- using GraphNode = neurun::model::operation::ConcatNode;
+ GraphNode::Param concat_params{0};
+
+ auto output_operand = model.operands.emplace(shape, type).value();
+ IndexSet outputs{output_operand};
- auto concat =
- nnfw::cpp14::make_unique<GraphNode>(GraphNodeInitParam{7, params.data(), 1, &outoperand});
+ auto concat = nnfw::cpp14::make_unique<GraphNode>(inputs, outputs, concat_params);
+ ASSERT_NE(concat, nullptr);
ASSERT_EQ(concat->getInputs().size(), 6);
- ASSERT_EQ(concat->getInputs().at(Index{0}).asInt(), params[0]);
+ ASSERT_EQ(concat->getInputs().at(Index{0}).value(), inputs.at(0).value());
concat->setInputs({80, 6, 9, 11});
ASSERT_EQ(concat->getInputs().size(), 4);
- ASSERT_NE(concat->getInputs().at(Index{0}).asInt(), params[0]);
- ASSERT_EQ(concat->getInputs().at(Index{0}).asInt(), 80);
- ASSERT_EQ(concat->getInputs().at(Index{2}).asInt(), 9);
+ ASSERT_NE(concat->getInputs().at(Index{0}).value(), inputs.at(0).value());
+ ASSERT_EQ(concat->getInputs().at(Index{0}).value(), 80);
+ ASSERT_EQ(concat->getInputs().at(Index{2}).value(), 9);
ASSERT_THROW(concat->getInputs().at(Index{5}), std::out_of_range);
}
diff --git a/runtimes/neurun/test/graph/verifier/Verifier.cc b/runtimes/neurun/test/graph/verifier/Verifier.cc
index a37b0ac1f..45e4d727b 100644
--- a/runtimes/neurun/test/graph/verifier/Verifier.cc
+++ b/runtimes/neurun/test/graph/verifier/Verifier.cc
@@ -16,36 +16,37 @@
#include <gtest/gtest.h>
-#include "model/operation/Node.h"
+#include "model/Operation.h"
#include "graph/Graph.h"
#include "graph/verifier/Verifier.h"
#include "cpp14/memory.h"
-#include "model/operand/Object.h"
+#include "model/Model.h"
+#include "model/Operand.h"
#include "../MockNode.h"
-using IndexSet = neurun::model::operand::IndexSet;
+using IndexSet = neurun::model::OperandIndexSequence;
using MockNode = neurun_test::graph::SimpleMockNode;
TEST(Verifier, dag_checker)
{
- neurun::graph::Graph graph;
- neurun::graph::verifier::DAGChecker verifier;
+ std::unique_ptr<neurun::model::Model> model = nnfw::cpp14::make_unique<neurun::model::Model>();
- ::neurun::model::operand::Shape shape{1u};
- ::neurun::model::operand::TypeInfo type{ANEURALNETWORKS_TENSOR_INT32, 0, 0};
- shape.dim(0) = 3;
+ ::neurun::model::Shape shape{3};
+ ::neurun::model::TypeInfo type{neurun::model::DataType::INT32};
- auto operand1 = graph.addOperand(shape, type);
- auto operand2 = graph.addOperand(shape, type);
+ auto operand1 = model->operands.emplace(shape, type);
+ auto operand2 = model->operands.emplace(shape, type);
- graph.addInput(operand1);
- graph.operands().at(operand1).setAsModelInput();
- graph.addOutput(operand2);
- graph.operands().at(operand2).setAsOperationOutput();
+ model->inputs.append(operand1);
+ model->outputs.append(operand2);
- graph.addOperation(nnfw::cpp14::make_unique<MockNode>(IndexSet{operand1}, IndexSet{operand2}));
+ model->operations.push(
+ nnfw::cpp14::make_unique<MockNode>(IndexSet{operand1}, IndexSet{operand2}));
+ neurun::graph::Graph graph{std::move(model)};
graph.finishBuilding();
+ neurun::graph::verifier::DAGChecker verifier;
+
ASSERT_EQ(verifier.verify(graph), true);
}
diff --git a/runtimes/neurun/test/model.cc b/runtimes/neurun/test/model.cc
deleted file mode 100644
index 2ba22a204..000000000
--- a/runtimes/neurun/test/model.cc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-
-#include "frontend/wrapper/model.h"
-
-TEST(MODEL, model_build)
-{
- ANeuralNetworksModel model;
- ASSERT_EQ(model.isFinished(), false);
-}
diff --git a/runtimes/neurun/test/util/ShapeInference.cc b/runtimes/neurun/test/util/ShapeInference.cc
new file mode 100644
index 000000000..a2b8cb9eb
--- /dev/null
+++ b/runtimes/neurun/test/util/ShapeInference.cc
@@ -0,0 +1,233 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "model/Layout.h"
+#include "util/ShapeInference.h"
+
+using namespace neurun::model;
+
+TEST(ShapeInference, ElementwiseNode)
+{
+ Shape lhs_shape{1, 299, 299, 3};
+ Shape rhs_shape{3};
+ auto infered_shapes = neurun::shape_inference::inferEltwiseShape(lhs_shape, rhs_shape);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.dim(0), 1);
+ ASSERT_EQ(infered_out_shape.dim(1), 299);
+ ASSERT_EQ(infered_out_shape.dim(2), 299);
+ ASSERT_EQ(infered_out_shape.dim(3), 3);
+}
+
+TEST(ShapeInference, IncorrectElementwiseNode)
+{
+ Shape lhs_shape{1, 299, 299, 3};
+ Shape rhs_shape{5, 3};
+ ASSERT_THROW(neurun::shape_inference::inferEltwiseShape(lhs_shape, rhs_shape),
+ std::runtime_error);
+}
+
+TEST(ShapeInference, Pool2DNodeSame)
+{
+ Shape in_shape{10, 6, 12, 20};
+ Stride stride{3, 7};
+ Padding padding{PaddingType::SAME};
+
+ operation::AvgPool2DNode::Param avg_pool_param{3, 6, stride, padding, Activation::NONE};
+ auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, avg_pool_param);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 20);
+
+ operation::MaxPool2DNode::Param max_pool_param{3, 6, stride, padding, Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferMaxPoolShape(in_shape, max_pool_param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 20);
+}
+
+TEST(ShapeInference, Pool2DNodeValid)
+{
+ Shape in_shape{10, 6, 12, 20};
+ Stride stride{3, 7};
+ Padding padding{PaddingType::VALID};
+
+ operation::AvgPool2DNode::Param avg_pool_param{3, 6, stride, padding, Activation::NONE};
+ auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, avg_pool_param);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 1);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 20);
+
+ operation::MaxPool2DNode::Param max_pool_param{3, 6, stride, padding, Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferMaxPoolShape(in_shape, max_pool_param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 1);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 20);
+}
+
+TEST(ShapeInference, Pool2DNodeExplicit)
+{
+ Shape in_shape{10, 3, 5, 20};
+
+ Stride stride{3, 7};
+ Padding padding{PaddingType::EXPLICIT, {4, 3, 2, 1}};
+
+ operation::AvgPool2DNode::Param avg_pool_param{3, 6, stride, padding, Activation::NONE};
+ auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, avg_pool_param);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 1);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 20);
+
+ operation::MaxPool2DNode::Param max_pool_param{3, 6, stride, padding, Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferMaxPoolShape(in_shape, max_pool_param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 1);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 20);
+}
+
+TEST(ShapeInference, Conv2DNode)
+{
+ Shape in_shape{10, 6, 12, 20};
+ Shape ker_shape{30, 3, 6, 20};
+
+ operation::Conv2DNode::Param param{Stride{3, 7}, Padding{PaddingType::VALID}, Activation::NONE};
+ auto infered_shapes = neurun::shape_inference::inferConv2DShape(in_shape, ker_shape, param);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 1);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 30);
+
+ param = operation::Conv2DNode::Param{Stride{3, 7}, Padding{PaddingType::SAME}, Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferConv2DShape(in_shape, ker_shape, param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 30);
+
+ param = operation::Conv2DNode::Param{Stride{3, 7}, Padding{PaddingType::EXPLICIT, {4, 3, 2, 1}},
+ Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferConv2DShape(in_shape, ker_shape, param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 3);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 30);
+}
+
+TEST(ShapeInference, DepthwiseConv2DNode)
+{
+ Shape in_shape{10, 6, 12, 20};
+ Shape ker_shape{1, 3, 6, 60};
+
+ operation::DepthwiseConv2DNode::Param param{Stride{3, 7}, Padding{PaddingType::VALID}, 3,
+ Activation::NONE};
+ auto infered_shapes =
+ neurun::shape_inference::inferDepthwiseConv2DShape(in_shape, ker_shape, param);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 1);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 60);
+
+ param = operation::DepthwiseConv2DNode::Param{Stride{3, 7}, Padding{PaddingType::SAME}, 3,
+ Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferDepthwiseConv2DShape(in_shape, ker_shape, param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 60);
+
+ param = operation::DepthwiseConv2DNode::Param{
+ Stride{3, 7}, Padding{PaddingType::EXPLICIT, {4, 3, 2, 1}}, 3, Activation::NONE};
+ infered_shapes = neurun::shape_inference::inferDepthwiseConv2DShape(in_shape, ker_shape, param);
+ infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 4);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).N, 10);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).H, 3);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).W, 2);
+ ASSERT_EQ(infered_out_shape.asFeature(Layout::NHWC).C, 60);
+}
+
+TEST(ShapeInference, ConcatNode)
+{
+ Shape in1{10, 20, 30, 3, 50};
+ Shape in2{10, 20, 30, 2, 50};
+ Shape in3{10, 20, 30, 2, 50};
+
+ operation::ConcatNode::Param param{3};
+ auto infered_shapes = neurun::shape_inference::inferConcatShape({in1, in2, in3}, param);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 5);
+ ASSERT_EQ(infered_out_shape.dim(0), 10);
+ ASSERT_EQ(infered_out_shape.dim(1), 20);
+ ASSERT_EQ(infered_out_shape.dim(2), 30);
+ ASSERT_EQ(infered_out_shape.dim(3), 7);
+ ASSERT_EQ(infered_out_shape.dim(4), 50);
+}
+
+TEST(ShapeInference, FullyConnectedNode)
+{
+ Shape in_shape{3, 4, 5, 6};
+ Shape ker_shape{3, 10};
+ auto infered_shapes = neurun::shape_inference::inferFullyConnectedShape(in_shape, ker_shape);
+ auto infered_out_shape = infered_shapes[0];
+
+ ASSERT_EQ(infered_out_shape.rank(), 2);
+ ASSERT_EQ(infered_out_shape.dim(0), 36);
+ ASSERT_EQ(infered_out_shape.dim(1), 3);
+}
diff --git a/runtimes/pure_arm_compute/CMakeLists.txt b/runtimes/pure_arm_compute/CMakeLists.txt
index 2a26a7bf2..32a0ca7ac 100644
--- a/runtimes/pure_arm_compute/CMakeLists.txt
+++ b/runtimes/pure_arm_compute/CMakeLists.txt
@@ -9,11 +9,11 @@ file(GLOB_RECURSE SOURCES "src/*.cc")
add_library(nnapi_pure_arm_compute SHARED ${SOURCES})
# To ignore compiler warnings from ARM Compute Library
target_compile_options(nnapi_pure_arm_compute PRIVATE -Wno-ignored-attributes)
-target_include_directories(nnapi_pure_arm_compute PUBLIC ${NNFW_INCLUDE_DIR})
target_include_directories(nnapi_pure_arm_compute PUBLIC src)
if(BUILD_TFLITE_BENCHMARK_MODEL)
target_compile_definitions(nnapi_pure_arm_compute PUBLIC "TFLITE_PROFILING_ENABLED")
endif()
+target_link_libraries(nnapi_pure_arm_compute nnfw-header)
target_link_libraries(nnapi_pure_arm_compute arm_compute arm_compute_ex)
target_link_libraries(nnapi_pure_arm_compute nnfw_lib_cpp14 nnfw_lib_misc nnfw_lib_profiling)
@@ -23,7 +23,6 @@ endif()
set_target_properties(nnapi_pure_arm_compute PROPERTIES OUTPUT_NAME neuralnetworks)
install(TARGETS nnapi_pure_arm_compute DESTINATION lib/pureacl RENAME neuralnetworks)
-install(TARGETS nnapi_pure_arm_compute DESTINATION lib RENAME neuralnetworks)
# To prevent undefined references
add_executable(pure_arm_compute_symbolcheck symbolcheck.cpp)
diff --git a/runtimes/pure_arm_compute/src/compilation.cc b/runtimes/pure_arm_compute/src/compilation.cc
index bed42529b..8cc86ebae 100644
--- a/runtimes/pure_arm_compute/src/compilation.cc
+++ b/runtimes/pure_arm_compute/src/compilation.cc
@@ -30,56 +30,12 @@
#include <arm_compute/runtime/IFunction.h>
#include <arm_compute/runtime/CL/CLScheduler.h>
#include <arm_compute/runtime/CL/CLSubTensor.h>
-#include <arm_compute/runtime/CL/functions/CLArithmeticAddition.h>
-#include <arm_compute/runtime/CL/functions/CLArithmeticSubtractionEx.h>
-#include <arm_compute/runtime/CL/functions/CLPadLayerEx.h>
-#include <arm_compute/runtime/CL/functions/CLPixelWiseMultiplication.h>
-#include <arm_compute/runtime/CL/functions/CLPixelWiseDivision.h>
-#include <arm_compute/runtime/CL/functions/CLPoolingLayer.h>
-#include <arm_compute/runtime/CL/functions/CLActivationLayer.h>
-#include <arm_compute/runtime/CL/functions/CLActivationLayerEx.h>
-#include <arm_compute/runtime/CL/functions/CLScale.h>
-#include <arm_compute/runtime/CL/functions/CLSpaceToBatchND.h>
-#include <arm_compute/runtime/CL/functions/CLSpaceToDepth.h>
-#include <arm_compute/runtime/CL/functions/CLReshapeLayer.h>
-#include <arm_compute/runtime/CL/functions/CLStridedSliceEx.h>
-#include <arm_compute/runtime/CL/functions/CLSoftmaxLayer.h>
-#include <arm_compute/runtime/CL/functions/CLGather.h>
-#include <arm_compute/runtime/CL/functions/CLHashtableLookup.h>
-#include <arm_compute/runtime/CL/functions/CLTopKV2.h>
-#include <arm_compute/runtime/CL/functions/CLArgMinMax.h>
-#include <arm_compute/runtime/CL/functions/CLCast.h>
-#include <arm_compute/runtime/CL/functions/CLConvolutionLayer.h>
-#include <arm_compute/runtime/CL/functions/CLDepthwiseConvolutionLayer.h>
-#include <arm_compute/runtime/CL/functions/CLDequantizationLayer.h>
-#include <arm_compute/runtime/CL/functions/CLDepthToSpace.h>
-#include <arm_compute/runtime/CL/functions/CLPermuteEx.h>
-#include <arm_compute/runtime/CL/functions/CLReduceOperation.h>
-#include <arm_compute/runtime/CL/functions/CLRNNLayer.h>
-#include <arm_compute/runtime/CL/functions/CLFloor.h>
-#include <arm_compute/runtime/CL/functions/CLCopy.h>
-#include <arm_compute/runtime/CL/functions/CLNormalizationLayerEx.h>
-#include <arm_compute/runtime/CL/functions/CLExp.h>
-#include <arm_compute/runtime/CL/functions/CLBatchToSpaceND.h>
-#include <arm_compute/runtime/CL/functions/CLEmbeddingLookup.h>
-#include <arm_compute/runtime/CL/functions/CLSquaredDifference.h>
-#include <arm_compute/runtime/CL/functions/CLNeg.h>
-#include <arm_compute/runtime/CL/functions/CLPReLU.h>
-#include <arm_compute/runtime/CL/functions/CLBinaryLogicalOp.h>
-#include <arm_compute/runtime/CL/functions/CLComparisonOp.h>
+#include <arm_compute/runtime/CL/CLFunctions.h> // Include all ARM Compute CL functions
+#include <arm_compute/runtime/CL/CLFunctionsEx.h> // Include all ARM Compute EX CL functions
#include <arm_compute/runtime/SubTensor.h>
-#include <arm_compute/runtime/NEON/functions/NESoftmaxLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEArithmeticAddition.h>
-#include <arm_compute/runtime/NEON/functions/NEArithmeticSubtraction.h>
-#include <arm_compute/runtime/NEON/functions/NEPixelWiseMultiplication.h>
-#include <arm_compute/runtime/NEON/functions/NEPoolingLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEActivationLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEConvolutionLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEDepthwiseConvolutionLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEFloor.h>
-#include <arm_compute/runtime/NEON/functions/NENormalizationLayerEx.h>
-#include <arm_compute/runtime/NEON/functions/NEReshapeLayer.h>
+#include <arm_compute/runtime/NEON/NEFunctions.h> // Include all ARM Compute NEON functions
+#include <arm_compute/runtime/NEON/NEFunctionsEx.h> // Include all ARM Compute EX NEON functions
#include "internal/arm_compute.h"
#include "internal/arm_compute/Cast.h"
@@ -92,23 +48,9 @@
#include "internal/nnapi/tensor/Reader.h"
#include "internal/arm_compute/feature/View.h"
#include "internal/arm_compute/tensor/View.h"
-#include "internal/layers/GenericReshapeLayer.h"
-#include "internal/layers/SimpleArithmeticAddition.h"
-#include "internal/layers/SimplePadLayer.h"
-#include "internal/layers/SimpleCastLayer.h"
-#include "internal/layers/SimpleTransposeConv.h"
-#include "internal/layers/GenericFullyConnectedLayer.h"
-#include "internal/layers/SimpleSpaceToDepth.h"
-#include "internal/layers/SimpleEmbeddingLookup.h"
-#include "internal/layers/SimpleDepthToSpace.h"
-#include "internal/layers/SimpleBatchToSpaceNd.h"
-#include "internal/layers/SimpleHashtableLookupLayer.h"
-#include "internal/layers/SimplePackLayer.h"
-#include "internal/layers/SimpleSpaceToBatchND.h"
-#include "internal/layers/SimpleNeg.h"
-#include "internal/layers/SimpleUnpackLayer.h"
-#include "internal/layers/SimpleSQRT.h"
-#include "internal/layers/SimpleArgMinMax.h"
+
+#include <arm_compute/runtime/misc/functions/GenericReshapeLayer.h>
+#include <arm_compute/runtime/misc/functions/GenericGather.h>
#include "misc/matrix/IndexIterator.h"
#include "misc/kernel/IndexIterator.h"
@@ -121,6 +63,8 @@
#include "model.h"
#include "logging.h"
+using namespace arm_compute::misc;
+
template <typename T> T from_env(const char *);
template <> bool from_env(const char *s)
@@ -183,7 +127,8 @@ Padding valid_padding(void)
return padding;
}
-Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape, const Stride &stride, uint32_t kw,
+Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape,
+ const nnfw::misc::feature::Shape &ofm_shape, const Stride &stride, uint32_t kw,
uint32_t kh)
{
Padding padding;
@@ -195,13 +140,10 @@ Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape, const Stride &
// padding_to_beginning = total_padding / 2
// padding_to_end = (total_padding + 1)/2.
//
- const int32_t out_size_height = (ifm_shape.H + stride.vertical - 1) / stride.vertical;
- const int32_t out_size_width = (ifm_shape.W + stride.horizontal - 1) / stride.horizontal;
-
- const int32_t vertical_needed_input = (out_size_height - 1) * stride.vertical + kh;
+ const int32_t vertical_needed_input = (ofm_shape.H - 1) * stride.vertical + kh;
const int32_t vertical_total_padding = std::max(0, vertical_needed_input - ifm_shape.H);
- const int32_t horizontal_needed_input = (out_size_width - 1) * stride.horizontal + kw;
+ const int32_t horizontal_needed_input = (ofm_shape.W - 1) * stride.horizontal + kw;
const int32_t horizontal_total_padding = std::max(0, horizontal_needed_input - ifm_shape.W);
padding.top = vertical_total_padding / 2;
@@ -212,7 +154,7 @@ Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape, const Stride &
return padding;
}
-::arm_compute::PadStrideInfo asPadStringInfo(const Padding &padding, const Stride &stride)
+::arm_compute::PadStrideInfo asPadStrideInfo(const Padding &padding, const Stride &stride)
{
return ::arm_compute::PadStrideInfo{stride.horizontal,
stride.vertical,
@@ -223,6 +165,33 @@ Padding same_padding(const nnfw::misc::feature::Shape &ifm_shape, const Stride &
::arm_compute::DimensionRoundingType::FLOOR};
}
+::arm_compute::ActivationLayerInfo asActInfo(FuseCode act)
+{
+ if (act == ANEURALNETWORKS_FUSED_NONE)
+ {
+ return ::arm_compute::ActivationLayerInfo();
+ }
+ else if (act == ANEURALNETWORKS_FUSED_RELU)
+ {
+ return ::arm_compute::ActivationLayerInfo(
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::RELU);
+ }
+ else if (act == ANEURALNETWORKS_FUSED_RELU1)
+ {
+ return ::arm_compute::ActivationLayerInfo(
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 1.0f, -1.0f);
+ }
+ else if (act == ANEURALNETWORKS_FUSED_RELU6)
+ {
+ return ::arm_compute::ActivationLayerInfo(
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.0f, 0.0f);
+ }
+ else
+ {
+ throw std::runtime_error("Not supported, yet");
+ }
+}
+
struct IAllocationContext
{
virtual ~IAllocationContext() = default;
@@ -641,19 +610,6 @@ void Planner::visit(const ::internal::tflite::op::Add::Node &node)
std::unique_ptr<::arm_compute::IFunction> fn;
- // NOTE SimpleArithmeticAddition is quite slow, but may be useful for debugging
- if (from_env<bool>(std::getenv("USE_SIMPLE_ARITHMETIC_ADDITION")))
- {
- // NOTE SimpleArithmeticAddition does not support broadcasting
- assert(lhs_shape == rhs_shape);
-
- auto l = nnfw::cpp14::make_unique<SimpleArithmeticAddition>();
-
- l->configure(lhs_alloc, rhs_alloc, ofm_alloc);
-
- fn = std::move(l);
- }
- else
{
if (::internal::arm_compute::isGpuMode())
{
@@ -737,7 +693,7 @@ void Planner::visit(const ::internal::tflite::op::Sub::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArithmeticSubtractionEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArithmeticSubtraction>();
// TODO Decide ConvertPolicy (WARP? SATURATE?) according to NN API specification
fn->configure(CAST_CL(lhs_alloc), CAST_CL(rhs_alloc), CAST_CL(ofm_alloc),
@@ -900,11 +856,9 @@ void Planner::visit(const ::internal::tflite::op::Div::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPixelWiseDivision>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArithmeticDivision>();
- fn->configure(CAST_CL(lhs_alloc), CAST_CL(rhs_alloc), CAST_CL(ofm_alloc),
- 1.0, // scale
- arm_compute::ConvertPolicy::SATURATE, arm_compute::RoundingPolicy::TO_ZERO);
+ fn->configure(CAST_CL(lhs_alloc), CAST_CL(rhs_alloc), CAST_CL(ofm_alloc));
builder.append("Div", std::move(fn));
}
@@ -1039,7 +993,7 @@ void Planner::visit(const ::internal::tflite::op::Conv2D::Implicit::Node &node)
param.stride = stride;
param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? same_padding(ifm_shape, stride, ker_shape.W, ker_shape.H)
+ ? same_padding(ifm_shape, ofm_shape, stride, ker_shape.W, ker_shape.H)
: valid_padding();
param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
@@ -1050,14 +1004,18 @@ void Planner::visit(const ::internal::tflite::op::Conv2D::Implicit::Node &node)
auto ker_alloc = ctx.at(::internal::tflite::operand::Index{param.ker_index});
auto bias_alloc = ctx.at(::internal::tflite::operand::Index{param.bias_index});
- const auto conv_info = asPadStringInfo(param.padding, param.stride);
+ const auto conv_info = asPadStrideInfo(param.padding, param.stride);
+ const auto fused_act = asActInfo(param.activation);
if (::internal::arm_compute::isGpuMode())
{
std::unique_ptr<::arm_compute::CLConvolutionLayer> fn{new ::arm_compute::CLConvolutionLayer};
+ // To pass the fused_act parameter, it calls the WeightsInfo() and Size2D(1U, 1U) (dilation)
+ // functions like the default parameter.
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ker_alloc), CAST_CL(bias_alloc), CAST_CL(ofm_alloc),
- conv_info);
+ conv_info, ::arm_compute::WeightsInfo(), ::arm_compute::Size2D(1U, 1U),
+ fused_act);
builder.append("Conv2D", std::move(fn));
}
@@ -1065,12 +1023,13 @@ void Planner::visit(const ::internal::tflite::op::Conv2D::Implicit::Node &node)
{
std::unique_ptr<::arm_compute::NEConvolutionLayer> fn{new ::arm_compute::NEConvolutionLayer};
- fn->configure(ifm_alloc, ker_alloc, bias_alloc, ofm_alloc, conv_info);
+ // To pass the fused_act parameter, it calls the WeightsInfo() and Size2D(1U, 1U) (dilation)
+ // functions like the default parameter.
+ fn->configure(ifm_alloc, ker_alloc, bias_alloc, ofm_alloc, conv_info,
+ ::arm_compute::WeightsInfo(), ::arm_compute::Size2D(1U, 1U), fused_act);
builder.append("Conv2D", std::move(fn));
}
-
- ActivationBuilder{builder}.append(param.activation, ofm_alloc);
};
_builder.addStage(stage);
@@ -1215,14 +1174,18 @@ void Planner::visit(const ::internal::tflite::op::Conv2D::Explicit::Node &node)
auto ker_alloc = ctx.at(::internal::tflite::operand::Index{param.ker_index});
auto bias_alloc = ctx.at(::internal::tflite::operand::Index{param.bias_index});
- const auto conv_info = asPadStringInfo(param.padding, param.stride);
+ const auto conv_info = asPadStrideInfo(param.padding, param.stride);
+ const auto fused_act = asActInfo(param.activation);
if (::internal::arm_compute::isGpuMode())
{
std::unique_ptr<::arm_compute::CLConvolutionLayer> fn{new ::arm_compute::CLConvolutionLayer};
+ // To pass the fused_act parameter, it calls the WeightsInfo() and Size2D(1U, 1U) (dilation)
+ // functions like the default parameter.
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ker_alloc), CAST_CL(bias_alloc), CAST_CL(ofm_alloc),
- conv_info);
+ conv_info, ::arm_compute::WeightsInfo(), ::arm_compute::Size2D(1U, 1U),
+ fused_act);
builder.append("Conv2D", std::move(fn));
}
@@ -1230,12 +1193,13 @@ void Planner::visit(const ::internal::tflite::op::Conv2D::Explicit::Node &node)
{
std::unique_ptr<::arm_compute::NEConvolutionLayer> fn{new ::arm_compute::NEConvolutionLayer};
- fn->configure(ifm_alloc, ker_alloc, bias_alloc, ofm_alloc, conv_info);
+ // To pass the fused_act parameter, it calls the WeightsInfo() and Size2D(1U, 1U) (dilation)
+ // functions like the default parameter.
+ fn->configure(ifm_alloc, ker_alloc, bias_alloc, ofm_alloc, conv_info,
+ ::arm_compute::WeightsInfo(), ::arm_compute::Size2D(1U, 1U), fused_act);
builder.append("Conv2D", std::move(fn));
}
-
- ActivationBuilder{builder}.append(param.activation, ofm_alloc);
};
_builder.addStage(stage);
@@ -1253,7 +1217,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Implicit::Nod
const ::internal::tflite::operand::Index hstride_index{node.param().hstride_index};
const ::internal::tflite::operand::Index padding_index{node.param().padding_index};
- const ::internal::tflite::operand::Index multipler_index{node.param().multipler_index};
+ const ::internal::tflite::operand::Index multiplier_index{node.param().multiplier_index};
const ::internal::tflite::operand::Index activation_index{node.param().activation_index};
const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
@@ -1262,7 +1226,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Implicit::Nod
const auto ker_shape = _ctx.at(ker_index).shape().asFeature();
const auto bias_size = _ctx.at(bias_index).shape().asVector();
- auto multiplier = _ctx.at(multipler_index).asScalar<int>();
+ auto multiplier = _ctx.at(multiplier_index).asScalar<int>();
assert(ker_shape.C == bias_size);
assert(ker_shape.C == ifm_shape.C * multiplier);
@@ -1319,7 +1283,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Implicit::Nod
param.stride = stride;
param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? same_padding(ifm_shape, stride, ker_shape.W, ker_shape.H)
+ ? same_padding(ifm_shape, ofm_shape, stride, ker_shape.W, ker_shape.H)
: valid_padding();
param.multipler = multiplier;
@@ -1353,7 +1317,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Implicit::Nod
auto ker_alloc = ctx.at(::internal::tflite::operand::Index{param.ker_index});
auto bias_alloc = ctx.at(::internal::tflite::operand::Index{param.bias_index});
- const auto conv_info = asPadStringInfo(param.padding, param.stride);
+ const auto conv_info = asPadStrideInfo(param.padding, param.stride);
if (::internal::arm_compute::isGpuMode())
{
@@ -1395,7 +1359,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Explicit::Nod
const ::internal::tflite::operand::Index padding_top_index{node.param().padding_top_index};
const ::internal::tflite::operand::Index padding_bottom_index{node.param().padding_bottom_index};
- const ::internal::tflite::operand::Index multipler_index{node.param().multipler_index};
+ const ::internal::tflite::operand::Index multiplier_index{node.param().multiplier_index};
const ::internal::tflite::operand::Index activation_index{node.param().activation_index};
const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
@@ -1404,7 +1368,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Explicit::Nod
const auto ker_shape = _ctx.at(ker_index).shape().asFeature();
const auto bias_size = _ctx.at(bias_index).shape().asVector();
- auto multiplier = _ctx.at(multipler_index).asScalar<int>();
+ auto multiplier = _ctx.at(multiplier_index).asScalar<int>();
assert(ker_shape.C == bias_size);
assert(ker_shape.C == ifm_shape.C * multiplier);
@@ -1496,7 +1460,7 @@ void Planner::visit(const ::internal::tflite::op::DepthwiseConv2D::Explicit::Nod
auto ker_alloc = ctx.at(::internal::tflite::operand::Index{param.ker_index});
auto bias_alloc = ctx.at(::internal::tflite::operand::Index{param.bias_index});
- const auto conv_info = asPadStringInfo(param.padding, param.stride);
+ const auto conv_info = asPadStrideInfo(param.padding, param.stride);
if (::internal::arm_compute::isGpuMode())
{
@@ -1560,15 +1524,6 @@ void Planner::visit(const ::internal::tflite::op::Dequantize::Node &node)
std::unique_ptr<::arm_compute::IFunction> fn;
- if (from_env<bool>(std::getenv("USE_SIMPLE_CAST")))
- {
- // Use the CPU version of CAST operation
- auto l = nnfw::cpp14::make_unique<SimpleCastLayer>();
-
- l->configure(input_alloc, output_alloc);
- fn = std::move(l);
- }
- else // Use the OpenCL version of CAST operation
{
if (::internal::arm_compute::isGpuMode())
{
@@ -1652,7 +1607,7 @@ void Planner::visit(const ::internal::tflite::op::MaxPool2D::Implicit::Node &nod
param.stride.horizontal = hstride;
param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? same_padding(ifm_shape, param.stride, kw, kh)
+ ? same_padding(ifm_shape, ofm_shape, param.stride, kw, kh)
: valid_padding();
param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
@@ -1675,7 +1630,7 @@ void Planner::visit(const ::internal::tflite::op::MaxPool2D::Implicit::Node &nod
::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::MAX,
::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride)};
+ asPadStrideInfo(param.padding, param.stride)};
if (::internal::arm_compute::isGpuMode())
{
@@ -1793,7 +1748,7 @@ void Planner::visit(const ::internal::tflite::op::MaxPool2D::Explicit::Node &nod
::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::MAX,
::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride)};
+ asPadStrideInfo(param.padding, param.stride)};
if (::internal::arm_compute::isGpuMode())
{
@@ -1883,7 +1838,7 @@ void Planner::visit(const ::internal::tflite::op::AvgPool2D::Implicit::Node &nod
param.stride.horizontal = hstride;
param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? same_padding(ifm_shape, param.stride, kw, kh)
+ ? same_padding(ifm_shape, ofm_shape, param.stride, kw, kh)
: valid_padding();
param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
@@ -1908,7 +1863,7 @@ void Planner::visit(const ::internal::tflite::op::AvgPool2D::Implicit::Node &nod
::arm_compute::PoolingLayerInfo info{
::arm_compute::PoolingType::AVG, ::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride), true /* exclude_padding */};
+ asPadStrideInfo(param.padding, param.stride), true /* exclude_padding */};
if (::internal::arm_compute::isGpuMode())
{
@@ -2026,7 +1981,7 @@ void Planner::visit(const ::internal::tflite::op::AvgPool2D::Explicit::Node &nod
::arm_compute::PoolingLayerInfo info{
::arm_compute::PoolingType::AVG, ::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride), true /* exclude_padding */};
+ asPadStrideInfo(param.padding, param.stride), true /* exclude_padding */};
if (::internal::arm_compute::isGpuMode())
{
@@ -2137,7 +2092,7 @@ void Planner::visit(const ::internal::tflite::op::FullyConnected::Node &node)
assert(feature_size == batch_size * input_size);
_builder.addShapeConstr(input_index,
- asTensorInfo(asTensorShape(_ctx.at(input_index).shape()),
+ asTensorInfo(asTensorShape(_ctx.at(input_index).shape(), false),
_ctx.at(input_index).type(), _ctx.at(input_index).scale(),
_ctx.at(input_index).zeroPoint()));
@@ -2202,10 +2157,10 @@ void Planner::visit(const ::internal::tflite::op::FullyConnected::Node &node)
auto weight_alloc = ctx.at(::internal::tflite::operand::Index{param.weight_index});
auto bias_alloc = ctx.at(::internal::tflite::operand::Index{param.bias_index});
- auto fn = nnfw::cpp14::make_unique<GenericFullyConnectedLayer>();
+ auto fn = nnfw::cpp14::make_unique<arm_compute::CLFullyConnectedReshapingLayer>();
- fn->configure(input_alloc, weight_alloc, bias_alloc, output_alloc, needs_reshape,
- asTensorShape(reshape));
+ fn->configure(CAST_CL(input_alloc), CAST_CL(weight_alloc), CAST_CL(bias_alloc),
+ CAST_CL(output_alloc), needs_reshape, asTensorShape(reshape));
builder.append("FullyConnected", std::move(fn));
@@ -2275,8 +2230,8 @@ void Planner::visit(const ::internal::tflite::op::Reshape::Node &node)
const ::internal::tflite::operand::Index output_index{node.param().output_index};
const ::internal::tflite::operand::Index input_index{node.param().input_index};
- auto input_shape = asTensorShape(_ctx.at(input_index).shape());
- auto output_shape = asTensorShape(_ctx.at(output_index).shape());
+ auto input_shape = asTensorShape(_ctx.at(input_index).shape(), false);
+ auto output_shape = asTensorShape(_ctx.at(output_index).shape(), false);
assert(input_shape[0] * input_shape[1] * input_shape[2] * input_shape[3] ==
output_shape[0] * output_shape[1] * output_shape[2] * output_shape[3]);
@@ -2481,7 +2436,15 @@ void Planner::visit(const ::internal::tflite::op::StridedSlice::Node &node)
_ctx.at(stridesData_index).scale(), _ctx.at(stridesData_index).zeroPoint()));
// Set initializers for indices data such as order of inputData
- {
+ int input_rank = _ctx.at(inputData_index).shape().rank();
+ std::vector<int32_t> starts;
+ std::vector<int32_t> ends;
+ std::vector<int32_t> strides;
+ starts.resize(input_rank, 0);
+ ends.resize(input_rank, 0);
+ strides.resize(input_rank, 0);
+ {
+ auto input_shape = _ctx.at(inputData_index).shape();
auto startData_base = _ctx.at(startData_index).data().base();
auto endData_base = _ctx.at(endData_index).data().base();
auto stridesData_base = _ctx.at(stridesData_index).data().base();
@@ -2490,19 +2453,26 @@ void Planner::visit(const ::internal::tflite::op::StridedSlice::Node &node)
const auto stridesData_size = _ctx.at(stridesData_index).shape().asVector();
assert(_ctx.at(startData_index).type() == ANEURALNETWORKS_TENSOR_INT32);
- auto startData_initializer =
- std::bind(initReorderVectorTensor<int32_t>, _1, startData_base, startData_size);
- _builder.addInitializer(startData_index, startData_initializer);
-
assert(_ctx.at(endData_index).type() == ANEURALNETWORKS_TENSOR_INT32);
- auto endData_initializer =
- std::bind(initReorderVectorTensor<int32_t>, _1, endData_base, endData_size);
- _builder.addInitializer(endData_index, endData_initializer);
-
assert(_ctx.at(stridesData_index).type() == ANEURALNETWORKS_TENSOR_INT32);
- auto stridesData_initializer =
- std::bind(initReorderVectorTensor<int32_t>, _1, stridesData_base, stridesData_size);
- _builder.addInitializer(stridesData_index, stridesData_initializer);
+ assert(startData_size == input_rank);
+ assert(endData_size == input_rank);
+ assert(stridesData_size == input_rank);
+
+ assert(startData_base != nullptr);
+ for (uint32_t n = 0; n < input_rank; ++n)
+ {
+ auto axis = ToARMComputeAxis(input_rank, n).value();
+
+ int32_t start_value = *(reinterpret_cast<const int32_t *>(startData_base) + n);
+ starts[axis] = start_value;
+
+ int32_t end_value = *(reinterpret_cast<const int32_t *>(endData_base) + n);
+ ends[axis] = end_value;
+
+ int32_t strides_value = *(reinterpret_cast<const int32_t *>(stridesData_base) + n);
+ strides[axis] = strides_value;
+ }
}
struct Param
@@ -2510,9 +2480,9 @@ void Planner::visit(const ::internal::tflite::op::StridedSlice::Node &node)
int32_t outputData_index;
int32_t inputData_index;
- int32_t startData_index;
- int32_t endData_index;
- int32_t stridesData_index;
+ std::vector<int32_t> starts;
+ std::vector<int32_t> ends;
+ std::vector<int32_t> strides;
int32_t beginMask;
int32_t endMask;
@@ -2523,31 +2493,35 @@ void Planner::visit(const ::internal::tflite::op::StridedSlice::Node &node)
param.outputData_index = outputData_index.asInt();
param.inputData_index = inputData_index.asInt();
- param.startData_index = startData_index.asInt();
- param.endData_index = endData_index.asInt();
- param.stridesData_index = stridesData_index.asInt();
+ param.starts = starts;
+ param.ends = ends;
+ param.strides = strides;
// Set mask bits such as order of inputData
- const auto inputData_rank = _ctx.at(inputData_index).shape().rank();
- param.beginMask = _ctx.at(beginMask_index).asReorderBits<int32_t>(inputData_rank);
- param.endMask = _ctx.at(endMask_index).asReorderBits<int32_t>(inputData_rank);
- param.shrinkAxisMask = _ctx.at(shrinkAxisMask_index).asReorderBits<int32_t>(inputData_rank);
+ param.beginMask = _ctx.at(beginMask_index).asReorderBits<int32_t>(input_rank);
+ param.endMask = _ctx.at(endMask_index).asReorderBits<int32_t>(input_rank);
+ param.shrinkAxisMask = _ctx.at(shrinkAxisMask_index).asReorderBits<int32_t>(input_rank);
auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
auto outputData_alloc = ctx.at(::internal::tflite::operand::Index{param.outputData_index});
auto inputData_alloc = ctx.at(::internal::tflite::operand::Index{param.inputData_index});
- auto startData_alloc = ctx.at(::internal::tflite::operand::Index{param.startData_index});
- auto endData_alloc = ctx.at(::internal::tflite::operand::Index{param.endData_index});
- auto stridesData_alloc = ctx.at(::internal::tflite::operand::Index{param.stridesData_index});
+ ::arm_compute::Coordinates starts;
+ ::arm_compute::Coordinates ends;
+ ::arm_compute::BiStrides strides;
+ for (int i = 0; i < param.starts.size(); ++i)
+ {
+ starts.set(i, param.starts[i]);
+ ends.set(i, param.ends[i]);
+ strides.set(i, param.strides[i]);
+ }
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLStridedSliceEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLStridedSlice>();
- fn->configure(CAST_CL(inputData_alloc), CAST_CL(outputData_alloc), CAST_CL(startData_alloc),
- CAST_CL(endData_alloc), CAST_CL(stridesData_alloc), param.beginMask,
- param.endMask, param.shrinkAxisMask);
+ fn->configure(CAST_CL(inputData_alloc), CAST_CL(outputData_alloc), starts, ends, strides,
+ param.beginMask, param.endMask, param.shrinkAxisMask);
builder.append("StridedSlice", std::move(fn));
}
@@ -2846,15 +2820,6 @@ void Planner::visit(const ::internal::tflite::op::Cast::Node &node)
std::unique_ptr<::arm_compute::IFunction> fn;
- if (from_env<bool>(std::getenv("USE_SIMPLE_CAST")))
- {
- // Use the CPU version of CAST operation
- auto l = nnfw::cpp14::make_unique<SimpleCastLayer>();
-
- l->configure(input_alloc, output_alloc);
- fn = std::move(l);
- }
- else // Use the OpenCL version of CAST operation
{
if (::internal::arm_compute::isGpuMode())
{
@@ -2944,32 +2909,45 @@ void Planner::visit(const ::internal::tflite::op::Gather::Node &node)
{
const ::internal::tflite::operand::Index ofm_index{node.param().ofm_index};
- const ::internal::tflite::operand::Index lhs_index{node.param().lhs_index};
- const ::internal::tflite::operand::Index rhs_index{node.param().rhs_index};
+ const ::internal::tflite::operand::Index ifm_index{node.param().ifm_index};
+ const ::internal::tflite::operand::Index indices_index{node.param().indices_index};
const ::internal::tflite::operand::Index axis_index{node.param().axis_index};
- // Currently, 1D-input and 2D-input are supported.
- assert(_ctx.at(lhs_index).shape().rank() == 1 || _ctx.at(lhs_index).shape().rank() == 2);
- assert(_ctx.at(rhs_index).shape().rank() == 1);
+ const auto ifm_shape = _ctx.at(ifm_index).shape();
+ const auto indices_shape = _ctx.at(indices_index).shape();
+ const auto axis_shape = _ctx.at(axis_index).shape();
+ const auto ofm_shape = _ctx.at(ofm_index).shape();
+
+ assert(ifm_shape.rank() <= 4);
+ assert(indices_shape.rank() <= 3);
+ assert(ofm_shape.rank() <= 4);
+ assert(_ctx.at(axis_index).hasData());
+ assert(axis_shape.rank() == 0);
// Set Shape Constraints
+ _builder.addShapeConstr(ofm_index,
+ asTensorInfo(asTensorShape(_ctx.at(ofm_index).shape(), false),
+ _ctx.at(ofm_index).type(), _ctx.at(ofm_index).scale(),
+ _ctx.at(ofm_index).zeroPoint()));
+ _builder.addShapeConstr(ifm_index,
+ asTensorInfo(asTensorShape(_ctx.at(ifm_index).shape(), false),
+ _ctx.at(ifm_index).type(), _ctx.at(ifm_index).scale(),
+ _ctx.at(ifm_index).zeroPoint()));
_builder.addShapeConstr(
- ofm_index, asTensorInfo(asTensorShape(_ctx.at(ofm_index).shape()), _ctx.at(ofm_index).type(),
- _ctx.at(ofm_index).scale(), _ctx.at(ofm_index).zeroPoint()));
- _builder.addShapeConstr(
- lhs_index, asTensorInfo(asTensorShape(_ctx.at(lhs_index).shape()), _ctx.at(lhs_index).type(),
- _ctx.at(lhs_index).scale(), _ctx.at(lhs_index).zeroPoint()));
- _builder.addShapeConstr(
- rhs_index, asTensorInfo(asTensorShape(_ctx.at(rhs_index).shape()), _ctx.at(rhs_index).type(),
- _ctx.at(rhs_index).scale(), _ctx.at(ofm_index).zeroPoint()));
+ indices_index, asTensorInfo(asTensorShape(_ctx.at(indices_index).shape(), false),
+ _ctx.at(indices_index).type(), _ctx.at(indices_index).scale(),
+ _ctx.at(indices_index).zeroPoint()));
+
+ const int32_t axis_value = static_cast<int>(_ctx.at(axis_index).asScalar<int32_t>());
+ const int axis = ToARMComputeAxis(ifm_shape.rank(), axis_value).value();
// Construct operation parameters
struct Param
{
int ofm_index;
- int lhs_index;
- int rhs_index;
+ int ifm_index;
+ int indices_index;
int axis;
};
@@ -2977,22 +2955,22 @@ void Planner::visit(const ::internal::tflite::op::Gather::Node &node)
Param param;
param.ofm_index = ofm_index.asInt();
- param.lhs_index = lhs_index.asInt();
- param.rhs_index = rhs_index.asInt();
+ param.ifm_index = ifm_index.asInt();
+ param.indices_index = indices_index.asInt();
- param.axis = static_cast<int>(_ctx.at(axis_index).asScalar<int32_t>());
+ param.axis = axis;
auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
- auto lhs_alloc = ctx.at(::internal::tflite::operand::Index{param.lhs_index});
- auto rhs_alloc = ctx.at(::internal::tflite::operand::Index{param.rhs_index});
+ auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
+ auto indices_alloc = ctx.at(::internal::tflite::operand::Index{param.indices_index});
if (::internal::arm_compute::isGpuMode())
{
std::unique_ptr<::arm_compute::IFunction> fn;
- auto l = nnfw::cpp14::make_unique<::arm_compute::CLGather>();
- l->configure(CAST_CL(lhs_alloc), CAST_CL(rhs_alloc), CAST_CL(ofm_alloc));
+ auto l = nnfw::cpp14::make_unique<GenericGather>();
+ l->configure(CAST_CL(ifm_alloc), CAST_CL(indices_alloc), CAST_CL(ofm_alloc), param.axis);
fn = std::move(l);
builder.append("Gather", std::move(fn));
}
@@ -3011,10 +2989,20 @@ void Planner::visit(const ::internal::tflite::op::PReLU::Node &node)
const ::internal::tflite::operand::Index ifm_index{node.param().ifm_index};
const ::internal::tflite::operand::Index alpha_index{node.param().alpha_index};
- // Set shape constraints
+ // Set Shape Constraints and TensorInfo
_builder.addShapeConstr(
ofm_index, asTensorInfo(asTensorShape(_ctx.at(ofm_index).shape()), _ctx.at(ofm_index).type(),
_ctx.at(ofm_index).scale(), _ctx.at(ofm_index).zeroPoint()));
+
+ if (!(_ctx.at(ifm_index).shape() == _ctx.at(alpha_index).shape()))
+ {
+ const auto broadcast_rank =
+ std::max(_ctx.at(ifm_index).shape().rank(), _ctx.at(alpha_index).shape().rank());
+ const_cast<::internal::tflite::operand::Shape &>(_ctx.at(ifm_index).shape())
+ .extendRank(broadcast_rank);
+ const_cast<::internal::tflite::operand::Shape &>(_ctx.at(alpha_index).shape())
+ .extendRank(broadcast_rank);
+ }
_builder.addShapeConstr(
ifm_index, asTensorInfo(asTensorShape(_ctx.at(ifm_index).shape()), _ctx.at(ifm_index).type(),
_ctx.at(ifm_index).scale(), _ctx.at(ifm_index).zeroPoint()));
@@ -3430,6 +3418,7 @@ void Planner::visit(const ::internal::tflite::op::Mean::Node &node)
{
int ofm_index;
int ifm_index;
+ bool keep_dims;
std::set<uint32_t> axis;
};
@@ -3437,18 +3426,25 @@ void Planner::visit(const ::internal::tflite::op::Mean::Node &node)
param.ofm_index = ofm_index.asInt();
param.ifm_index = ifm_index.asInt();
+ param.keep_dims = keep_dims > 0 ? true : false;
param.axis = axis;
auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
+ ::arm_compute::Coordinates reduction_axis;
+ size_t i = 0;
+ for (auto index : param.axis)
+ {
+ reduction_axis.set(i++, index);
+ }
+
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLReduceOperation>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLReduceMean>();
- fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc), param.axis,
- ::arm_compute::ReduceOperation::MEAN);
+ fn->configure(CAST_CL(ifm_alloc), reduction_axis, param.keep_dims, CAST_CL(ofm_alloc));
builder.append("Mean", std::move(fn));
}
@@ -3640,7 +3636,7 @@ void Planner::visit(const ::internal::tflite::op::Transpose::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPermuteEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPermute>();
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc),
getARMComputePermutationVector(param.rank, param.pv));
@@ -3723,7 +3719,7 @@ void Planner::visit(const ::internal::tflite::op::ArgMax::Node &node)
assert(_ctx.at(axis_index).hasData());
// Axis dimension is always 1.
assert(axis_shape.rank() == 1);
- assert(ifm_shape.rank() == ofm_shape.rank());
+ assert((ifm_shape.rank() - 1) == ofm_shape.rank());
_builder.addShapeConstr(ofm_index, asTensorInfo(asTensorShape(_ctx.at(ofm_index).shape(), false),
_ctx.at(ofm_index).type()));
@@ -3766,21 +3762,11 @@ void Planner::visit(const ::internal::tflite::op::ArgMax::Node &node)
auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
- if (from_env<bool>(std::getenv("USE_SIMPLE_ARGMINMAX")))
- {
- // USE CPU VERSION OF ARGMAX
- auto fn = nnfw::cpp14::make_unique<SimpleArgMinMax>();
-
- fn->configure(ifm_alloc, ofm_alloc, param.axis, ::arm_compute::ArgOperation::MAX);
-
- builder.append("ArgMax", std::move(fn));
- }
- else
{
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArgMinMax>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLArgOperation>();
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc), param.axis,
::arm_compute::ArgOperation::MAX);
@@ -3830,16 +3816,6 @@ void Planner::visit(const ::internal::tflite::op::SQRT::Node &node)
const ::arm_compute::ActivationLayerInfo act_info{
::arm_compute::ActivationLayerInfo::ActivationFunction::SQRT};
- if (from_env<bool>(std::getenv("USE_SIMPLE_SQRT")))
- {
- // USE CPU VERSION OF SQRT
- auto fn = nnfw::cpp14::make_unique<SimpleSQRT>();
-
- fn->configure(input_alloc, output_alloc);
-
- builder.append("SQRT", std::move(fn));
- }
- else
{
if (::internal::arm_compute::isGpuMode())
{
@@ -3895,14 +3871,11 @@ void Planner::visit(const ::internal::tflite::op::RSQRT::Node &node)
auto output_alloc = ctx.at(::internal::tflite::operand::Index{param.output_index});
auto input_alloc = ctx.at(::internal::tflite::operand::Index{param.input_index});
- const ::arm_compute::ActivationLayerInfoEx act_info{
- ::arm_compute::ActivationLayerInfoEx::ActivationFunction::RSQRT};
-
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayerEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLRsqrtLayer>();
- fn->configure(CAST_CL(input_alloc), CAST_CL(output_alloc), act_info);
+ fn->configure(CAST_CL(input_alloc), CAST_CL(output_alloc));
builder.append("RSQRT", std::move(fn));
}
@@ -3963,10 +3936,10 @@ void Planner::visit(const ::internal::tflite::op::Equal::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLComparisonOp>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLComparison>();
fn->configure(CAST_CL(input1_alloc), CAST_CL(input2_alloc), CAST_CL(output_alloc),
- ::arm_compute::ComparisonOperation::EQUAL);
+ ::arm_compute::ComparisonOperation::Equal);
builder.append("Equal", std::move(fn));
}
@@ -4037,6 +4010,8 @@ void Planner::visit(const ::internal::tflite::op::TransposeConv::Node &node)
int ker_index;
Padding padding;
Stride stride;
+ uint32_t invalid_horizontal;
+ uint32_t invalid_vertical;
};
Param param;
@@ -4049,24 +4024,41 @@ void Planner::visit(const ::internal::tflite::op::TransposeConv::Node &node)
param.stride.vertical = vstride;
param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? same_padding(ifm_shape, param.stride, ker_shape.W, ker_shape.H)
+ ? same_padding(ofm_shape, ifm_shape, param.stride, ker_shape.W, ker_shape.H)
: valid_padding();
+ param.invalid_horizontal =
+ (padding_type == ANEURALNETWORKS_PADDING_SAME)
+ ? 0
+ : ofm_shape.W - (1 + (ifm_shape.W - 1) * hstride) - (ker_shape.W - 1);
+ param.invalid_vertical =
+ (padding_type == ANEURALNETWORKS_PADDING_SAME)
+ ? 0
+ : ofm_shape.H - (1 + (ifm_shape.H - 1) * param.stride.vertical) - (ker_shape.H - 1);
+
auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
auto ker_alloc = ctx.at(::internal::tflite::operand::Index{param.ker_index});
- auto fn = nnfw::cpp14::make_unique<SimpleTransposeConv>();
-
// Only rank 4 is supported
const int rank = 4;
- auto tconv_info = asPadStringInfo(param.padding, param.stride);
+ if (::internal::arm_compute::isGpuMode())
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLTransposeConvLayer>();
- fn->configure(ifm_alloc, ker_alloc, ofm_alloc, tconv_info, getARMComputeAxises(rank));
+ auto symmetric_tconv_info = asPadStrideInfo(param.padding, param.stride);
- builder.append("TransposeConv", std::move(fn));
+ // TODO Support WeightInfo in some cases in order to performance improvement
+ fn->configure(CAST_CL(ifm_alloc), CAST_CL(ker_alloc), nullptr, CAST_CL(ofm_alloc),
+ symmetric_tconv_info, param.invalid_horizontal, param.invalid_vertical);
+ builder.append("TransposeConv", std::move(fn));
+ }
+ else
+ {
+ throw std::runtime_error("Not supported, yet");
+ }
};
_builder.addStage(stage);
}
@@ -4119,7 +4111,7 @@ void Planner::visit(const ::internal::tflite::op::SquaredDifference::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLSquaredDifference>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLElementwiseSquaredDiff>();
fn->configure(CAST_CL(lhs_alloc), CAST_CL(rhs_alloc), CAST_CL(ofm_alloc));
builder.append("SquaredDifference", std::move(fn));
@@ -4204,62 +4196,58 @@ void Planner::visit(const ::internal::tflite::op::Pad::Node &node)
_ctx.at(paddings_index).type(), _ctx.at(paddings_index).scale(),
_ctx.at(paddings_index).zeroPoint()));
- // initializer for padding
- {
- auto pad_type = _ctx.at(paddings_index).type();
-
- if (pad_type == ANEURALNETWORKS_TENSOR_INT32)
- {
- auto pad_base = _ctx.at(paddings_index).data().base();
- auto pad_size = _ctx.at(paddings_index).data().size();
- auto pad_shape = _ctx.at(paddings_index).shape().asMatrix();
-
- // Supported padding for height and width only.
- auto initializer = std::bind(initMatrixTensor<int32_t>, _1, pad_shape, pad_base, pad_size);
- _builder.addInitializer(paddings_index, initializer);
- }
- else
- {
- throw std::runtime_error("Only Int32 datatype is supported for Pad values");
- }
- }
-
// Construct operation parameters
struct Param
{
int ofm_index;
int ifm_index;
- int padding_index;
+ ::arm_compute::PixelValue pixel_value;
+ ::arm_compute::PaddingList padding_list;
};
Param param;
param.ofm_index = ofm_index.asInt();
param.ifm_index = ifm_index.asInt();
- param.padding_index = paddings_index.asInt();
- auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
- auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
- auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
- auto pad_alloc = ctx.at(::internal::tflite::operand::Index{param.padding_index});
+ // initializer for padding
+ auto rank = _ctx.at(ifm_index).shape().rank();
+ auto pad_type = _ctx.at(paddings_index).type();
- if (from_env<bool>(std::getenv("USE_SIMPLE_PAD")))
- {
- // USE CPU VERSION OF PADLAYER
- auto rank = 4;
- auto fn = nnfw::cpp14::make_unique<SimplePadLayer>();
+ if (pad_type == ANEURALNETWORKS_TENSOR_INT32)
+ {
+ auto pad_base = _ctx.at(paddings_index).data().base();
+ auto pad_shape = _ctx.at(paddings_index).shape();
- fn->configure(ifm_alloc, ofm_alloc, pad_alloc, getARMComputeAxises(rank));
+ param.padding_list.resize(rank);
+ for (int32_t n = 0; n < rank; ++n)
+ {
+ const int32_t *from = reinterpret_cast<const int32_t *>(pad_base) + (n * pad_shape.dim(1));
+ auto axis = ToARMComputeAxis(rank, n).value();
- builder.append("PAD", std::move(fn));
+ param.padding_list[axis] = ::arm_compute::PaddingInfo{from[0], from[1]};
}
- else
+ auto data_type = asDataType(_ctx.at(ifm_index).type());
+ auto quant_info =
+ asQuantizationInfo(_ctx.at(ifm_index).scale(), _ctx.at(ifm_index).zeroPoint());
+ param.pixel_value = ::arm_compute::PixelValue{0, data_type, quant_info};
+ }
+ else
+ {
+ throw std::runtime_error("Only Int32 datatype is supported for Pad values");
+ }
+
+ auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
+ auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
+ auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
+
{
if (::internal::arm_compute::isGpuMode()) // GPU
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPadLayerEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLPadLayer>();
- fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc), CAST_CL(pad_alloc));
+ fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc), param.padding_list,
+ param.pixel_value);
builder.append("PAD", std::move(fn));
}
@@ -4323,17 +4311,6 @@ void Planner::visit(const ::internal::tflite::op::SpaceToDepth::Node &node)
auto output_alloc = ctx.at(::internal::tflite::operand::Index{param.output_index});
auto input_alloc = ctx.at(::internal::tflite::operand::Index{param.input_index});
- if (from_env<bool>(std::getenv("USE_SIMPLE_SPACETODEPTH")))
- {
- // USE CPU VERSION OF SPACETODEPTH
- auto rank = 4;
- auto fn = nnfw::cpp14::make_unique<SimpleSpaceToDepth>();
-
- fn->configure(input_alloc, output_alloc, param.block_size, getARMComputeAxises(rank));
-
- builder.append("SpaceToDepth", std::move(fn));
- }
- else
{
if (::internal::arm_compute::isGpuMode()) // GPU
{
@@ -4361,25 +4338,39 @@ void Planner::visit(const ::internal::tflite::op::SpaceToBatchND::Node &node)
const ::internal::tflite::operand::Index block_size_index{node.param().block_size_index};
const ::internal::tflite::operand::Index padding_size_index{node.param().padding_size_index};
+ const auto &output_shape = _ctx.at(output_index).shape();
+ const auto &input_shape = _ctx.at(input_index).shape();
+ const auto &padding_size_shape = _ctx.at(padding_size_index).shape();
+ auto block_size_base = reinterpret_cast<const int32_t *>(_ctx.at(block_size_index).data().base());
+ auto padding_size_base =
+ reinterpret_cast<const int32_t *>(_ctx.at(padding_size_index).data().base());
+
{ // New block for assertions
+ const auto &block_size_shape = _ctx.at(block_size_index).shape();
// Currently, only 4D NHWC input/output op_context are supported.
// The 4D array need to have exactly 2 spatial dimensions.
// TODO: Support arbitrary dimension in SpaceToBatchND.
- assert(_ctx.at(input_index).shape().rank() == 4);
- assert(_ctx.at(output_index).shape().rank() == 4);
- assert(_ctx.at(block_size_index).shape().rank() == 1);
- assert(_ctx.at(padding_size_index).shape().rank() == 2);
-
- const auto &output_shape = _ctx.at(output_index).shape();
- const auto &input_shape = _ctx.at(input_index).shape();
- const auto &block_size_shape = _ctx.at(block_size_index).shape();
- const auto &padding_size_shape = _ctx.at(padding_size_index).shape();
+ assert(input_shape.rank() == 4);
+ assert(output_shape.rank() == 4);
+ assert(block_size_shape.rank() == 1);
+ assert(padding_size_shape.rank() == 2);
assert(output_shape.dim(3) == input_shape.dim(3));
assert(block_size_shape.dim(0) == 2);
assert(padding_size_shape.dim(0) == 2);
assert(padding_size_shape.dim(1) == 2);
+
+ assert(_ctx.at(block_size_index).hasData() && _ctx.at(padding_size_index).hasData());
+ assert(_ctx.at(block_size_index).type() == ANEURALNETWORKS_TENSOR_INT32);
+ assert(_ctx.at(padding_size_index).type() == ANEURALNETWORKS_TENSOR_INT32);
+
+ assert(block_size_base[0] > 0 && block_size_base[1] > 0);
+ assert(output_shape.dim(0) == input_shape.dim(0) * block_size_base[0] * block_size_base[1]);
+ assert(output_shape.dim(1) ==
+ (input_shape.dim(1) + padding_size_base[0] + padding_size_base[1]) / block_size_base[0]);
+ assert(output_shape.dim(2) ==
+ (input_shape.dim(2) + padding_size_base[2] + padding_size_base[3]) / block_size_base[1]);
}
// Set Shape Constraints and TensorInfo
@@ -4404,72 +4395,31 @@ void Planner::visit(const ::internal::tflite::op::SpaceToBatchND::Node &node)
_ctx.at(padding_size_index).scale(),
_ctx.at(padding_size_index).zeroPoint()));
- if (_ctx.at(block_size_index).hasData())
- {
- const auto rank = _ctx.at(input_index).shape().rank();
- const auto num_of_block_size = _ctx.at(block_size_index).shape().asVector();
- auto block_size_base = _ctx.at(block_size_index).data().base();
- auto block_size_type = _ctx.at(block_size_index).type();
-
- switch (block_size_type)
- {
- case ANEURALNETWORKS_TENSOR_INT32:
- {
- auto initializer = [block_size_base, num_of_block_size,
- rank](::arm_compute::ITensor &tensor) {
- assert(num_of_block_size < 4);
- for (size_t n = 0; n < num_of_block_size; ++n)
- {
- const int32_t *from = reinterpret_cast<const int32_t *>(block_size_base) + n;
- int32_t *into = reinterpret_cast<int32_t *>(
- tensor.ptr_to_element({ToARMComputeAxis(rank, n + 1).value()}));
- *into = *from;
- }
- };
- _builder.addInitializer(block_size_index, initializer);
+ { // Append block_size initializer
+ auto initializer = [block_size_base](::arm_compute::ITensor &tensor) {
+ const auto block_size_y = block_size_base[0];
+ const auto block_size_x = block_size_base[1];
- break;
- }
- default:
- {
- throw std::runtime_error("Not supported");
- }
- }
+ auto into = reinterpret_cast<int32_t *>(tensor.ptr_to_element({0}));
+ into[0] = block_size_x;
+ into[1] = block_size_y;
+ };
+ _builder.addInitializer(block_size_index, initializer);
}
- if (_ctx.at(padding_size_index).hasData())
- {
- const auto padding_size_shape = _ctx.at(padding_size_index).shape();
- const auto rank = _ctx.at(input_index).shape().rank();
- auto padding_size_base = _ctx.at(padding_size_index).data().base();
- auto padding_size_type = _ctx.at(padding_size_index).type();
-
- switch (padding_size_type)
- {
- case ANEURALNETWORKS_TENSOR_INT32:
- {
- auto initializer = [padding_size_base, padding_size_shape,
- rank](::arm_compute::ITensor &tensor) {
- assert(padding_size_shape.dim(1) == 2);
- assert(padding_size_shape.dim(0) < 4);
- for (size_t n = 0; n < padding_size_shape.dim(0); ++n)
- {
- const int32_t *from = reinterpret_cast<const int32_t *>(padding_size_base) +
- (n * padding_size_shape.dim(1));
- int32_t *into = reinterpret_cast<int32_t *>(
- tensor.ptr_to_element({0, ToARMComputeAxis(rank, n + 1).value()}));
- into[0] = from[0];
- into[1] = from[1];
- }
- };
- _builder.addInitializer(padding_size_index, initializer);
- break;
- }
- default:
+ { // Append padding_size initializer
+ auto initializer = [padding_size_base, padding_size_shape](::arm_compute::ITensor &tensor) {
+ // If n == 0, then the axis is the height
+ // If n == 1, then the axis is the width
+ for (size_t n = 0; n < padding_size_shape.dim(0); ++n)
{
- throw std::runtime_error("Not supported");
+ const auto from = padding_size_base + (n * padding_size_shape.dim(1));
+ auto into = reinterpret_cast<int32_t *>(tensor.ptr_to_element({0, 1 - n}));
+ into[0] = from[0];
+ into[1] = from[1];
}
- }
+ };
+ _builder.addInitializer(padding_size_index, initializer);
}
// Construct operation parameters
@@ -4496,15 +4446,7 @@ void Planner::visit(const ::internal::tflite::op::SpaceToBatchND::Node &node)
auto block_size_alloc = ctx.at(::internal::tflite::operand::Index{param.block_size_index});
auto padding_size_alloc = ctx.at(::internal::tflite::operand::Index{param.padding_size_index});
- // NOTE SimpleSpaceToBatchND is quite slow
- if (from_env<bool>(std::getenv("USE_SIMPLE_SPACE_TO_BATCH_ND")))
- {
- auto fn = nnfw::cpp14::make_unique<SimpleSpaceToBatchND>();
-
- fn->configure(input_alloc, block_size_alloc, padding_size_alloc, output_alloc);
- builder.append("SpaceToBatchND", std::move(fn));
- }
- else if (::internal::arm_compute::isGpuMode())
+ if (::internal::arm_compute::isGpuMode())
{
auto fn = nnfw::cpp14::make_unique<::arm_compute::CLSpaceToBatchND>();
@@ -4530,6 +4472,7 @@ void Planner::visit(const ::internal::tflite::op::BatchToSpaceNd::Node &node)
assert(_ctx.at(input_index).shape().rank() == 4);
assert(_ctx.at(output_index).shape().rank() == 4);
+ assert(_ctx.at(block_size_index).shape().rank() == 1);
assert(_ctx.at(block_size_index).hasData() == true);
const int32_t *block_size =
@@ -4537,15 +4480,7 @@ void Planner::visit(const ::internal::tflite::op::BatchToSpaceNd::Node &node)
const auto &output_shape = _ctx.at(output_index).shape();
const auto &input_shape = _ctx.at(input_index).shape();
-
- assert((_ctx.at(block_size_index).data().size() / sizeof(int32_t)) == 2 && block_size[0] > 0 &&
- block_size[1] > 0);
- {
- assert(output_shape.dim(3) == input_shape.dim(3));
- assert(output_shape.dim(1) == input_shape.dim(1) * block_size[0]);
- assert(output_shape.dim(2) == input_shape.dim(2) * block_size[1]);
- assert(output_shape.dim(0) == input_shape.dim(0) / (block_size[0] * block_size[1]));
- }
+ const auto &block_size_shape = _ctx.at(block_size_index).shape();
// Set Shape Constraints and TensorInfo
_builder.addShapeConstr(
@@ -4555,11 +4490,40 @@ void Planner::visit(const ::internal::tflite::op::BatchToSpaceNd::Node &node)
input_index, asTensorInfo(asTensorShape(input_shape, false), _ctx.at(input_index).type(),
_ctx.at(input_index).scale(), _ctx.at(input_index).zeroPoint()));
+ _builder.addShapeConstr(block_size_index, asTensorInfo(asTensorShape(block_size_shape),
+ _ctx.at(block_size_index).type(),
+ _ctx.at(block_size_index).scale(),
+ _ctx.at(block_size_index).zeroPoint()));
+
+ // initializer for block_size
+ {
+ const auto block_size_base =
+ reinterpret_cast<const int32_t *>(_ctx.at(block_size_index).data().base());
+
+ assert(output_shape.dim(3) == input_shape.dim(3));
+ assert(output_shape.dim(1) == input_shape.dim(1) * block_size_base[0]);
+ assert(output_shape.dim(2) == input_shape.dim(2) * block_size_base[1]);
+ assert(output_shape.dim(0) == input_shape.dim(0) / (block_size_base[0] * block_size_base[1]));
+ assert(_ctx.at(block_size_index).type() == ANEURALNETWORKS_TENSOR_INT32);
+
+ assert((_ctx.at(block_size_index).data().size() / sizeof(int32_t)) == 2 &&
+ block_size_base[0] > 0 && block_size_base[1] > 0);
+
+ auto initializer = [block_size_base](::arm_compute::ITensor &tensor) {
+ const int32_t *from = reinterpret_cast<const int32_t *>(block_size_base);
+ int32_t *into = reinterpret_cast<int32_t *>(tensor.ptr_to_element({0}));
+ into[0] = from[1];
+ into[1] = from[0];
+ };
+ _builder.addInitializer(block_size_index, initializer);
+ }
+
// Construct operation parameters
struct Param
{
int output_index;
int input_index;
+ int block_size_index;
const int32_t *block_size;
int32_t rank;
};
@@ -4568,26 +4532,20 @@ void Planner::visit(const ::internal::tflite::op::BatchToSpaceNd::Node &node)
param.output_index = output_index.asInt();
param.input_index = input_index.asInt();
+ param.block_size_index = block_size_index.asInt();
param.block_size = block_size;
param.rank = _ctx.at(input_index).shape().rank();
auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
auto output_alloc = ctx.at(::internal::tflite::operand::Index{param.output_index});
auto input_alloc = ctx.at(::internal::tflite::operand::Index{param.input_index});
+ auto block_size_alloc = ctx.at(::internal::tflite::operand::Index{param.block_size_index});
- // NOTE SimpleBatchToSpaceND is quite slow, but may be useful for debugging
- if (from_env<bool>(std::getenv("USE_SIMPLE_BATCH_TO_SPACE_ND")))
- {
- auto fn = nnfw::cpp14::make_unique<SimpleBatchToSpaceND>();
-
- fn->configure(input_alloc, output_alloc, param.block_size, getARMComputeAxises(param.rank));
- builder.append("BatchToSpaceND", std::move(fn));
- }
- else if (::internal::arm_compute::isGpuMode())
+ if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLBatchToSpaceND>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLBatchToSpaceLayer>();
- fn->configure(CAST_CL(input_alloc), CAST_CL(output_alloc), param.block_size);
+ fn->configure(CAST_CL(input_alloc), CAST_CL(block_size_alloc), CAST_CL(output_alloc));
builder.append("BatchToSpaceND", std::move(fn));
}
else
@@ -4595,7 +4553,6 @@ void Planner::visit(const ::internal::tflite::op::BatchToSpaceNd::Node &node)
// TODO Enable NEON Support
throw std::runtime_error("Not supported, yet");
}
-
};
_builder.addStage(stage);
@@ -4651,7 +4608,7 @@ void Planner::visit(const ::internal::tflite::op::L2Normalization::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNormalizationLayerEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNormalizationLayer>();
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc), norm_info);
@@ -4659,7 +4616,7 @@ void Planner::visit(const ::internal::tflite::op::L2Normalization::Node &node)
}
else
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::NENormalizationLayerEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NENormalizationLayer>();
fn->configure(ifm_alloc, ofm_alloc, norm_info);
@@ -4733,7 +4690,7 @@ void Planner::visit(const ::internal::tflite::op::L2Pool2D::Implicit::Node &node
param.stride.horizontal = hstride;
param.padding = (padding_type == ANEURALNETWORKS_PADDING_SAME)
- ? same_padding(ifm_shape, param.stride, kw, kh)
+ ? same_padding(ifm_shape, ofm_shape, param.stride, kw, kh)
: valid_padding();
param.activation = static_cast<FuseCode>(_ctx.at(activation_index).asScalar<int32_t>());
@@ -4743,7 +4700,7 @@ void Planner::visit(const ::internal::tflite::op::L2Pool2D::Implicit::Node &node
::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::L2,
::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride)};
+ asPadStrideInfo(param.padding, param.stride)};
if (::internal::arm_compute::isGpuMode())
{
@@ -4843,7 +4800,7 @@ void Planner::visit(const ::internal::tflite::op::L2Pool2D::Explicit::Node &node
::arm_compute::PoolingLayerInfo info{::arm_compute::PoolingType::L2,
::arm_compute::Size2D{param.kw, param.kh},
- asPadStringInfo(param.padding, param.stride)};
+ asPadStrideInfo(param.padding, param.stride)};
if (::internal::arm_compute::isGpuMode())
{
@@ -4878,7 +4835,7 @@ void Planner::visit(const ::internal::tflite::op::EmbeddingLookup::Node &node)
const auto &lookups_obj = _ctx.at(lookups_index);
const auto &values_obj = _ctx.at(values_index);
- // Verify operand here, not at SimpleEmbeddingLookup::configure() to avoid acl's modifying
+ // Verify operand here, not at configure() to avoid acl's modifying
// TensorShape sometimes(Issue: https://github.sec.samsung.net/STAR/nnfw/issues/729)
{
assert(lookups_obj.type() == ANEURALNETWORKS_TENSOR_INT32);
@@ -4930,15 +4887,7 @@ void Planner::visit(const ::internal::tflite::op::EmbeddingLookup::Node &node)
auto lookups_alloc = ctx.at(::internal::tflite::operand::Index{param.lookups_index});
auto values_alloc = ctx.at(::internal::tflite::operand::Index{param.values_index});
- if (from_env<bool>(std::getenv("USE_SIMPLE_EMBEDDINGLOOKUP")))
- {
- auto fn = nnfw::cpp14::make_unique<SimpleEmbeddingLookup>();
-
- fn->configure(lookups_alloc, values_alloc, output_alloc);
-
- builder.append("EmbeddingLookup", std::move(fn));
- }
- else if (::internal::arm_compute::isGpuMode())
+ if (::internal::arm_compute::isGpuMode())
{
auto fn = nnfw::cpp14::make_unique<::arm_compute::CLEmbeddingLookup>();
@@ -5034,15 +4983,7 @@ void Planner::visit(const ::internal::tflite::op::HashtableLookup::Node &node)
auto values_alloc = ctx.at(::internal::tflite::operand::Index{param.values_index});
auto keys_alloc = ctx.at(::internal::tflite::operand::Index{param.keys_index});
- if (from_env<bool>(std::getenv("USE_SIMPLE_HASHTABLELOOKUP")))
- {
- auto fn = nnfw::cpp14::make_unique<SimpleHashtableLookupLayer>();
-
- fn->configure(lookups_alloc, keys_alloc, values_alloc, output_alloc, hits_alloc);
-
- builder.append("HashtableLookup", std::move(fn));
- }
- else if (::internal::arm_compute::isGpuMode()) // GPU
+ if (::internal::arm_compute::isGpuMode()) // GPU
{
auto fn = nnfw::cpp14::make_unique<::arm_compute::CLHashtableLookup>();
@@ -5103,12 +5044,12 @@ void Planner::visit(const ::internal::tflite::op::LocalResponseNormalization::No
auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
- const auto norm_info =
- ::arm_compute::NormalizationLayerInfo(::arm_compute::NormType::CROSS_MAP, param.radius,
- param.alpha, param.beta, param.bias, false);
+ const auto norm_info = ::arm_compute::NormalizationLayerInfo(::arm_compute::NormType::CROSS_MAP,
+ param.radius * 2 + 1, param.alpha,
+ param.beta, param.bias, false);
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNormalizationLayerEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNormalizationLayer>();
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc), norm_info);
@@ -5116,7 +5057,7 @@ void Planner::visit(const ::internal::tflite::op::LocalResponseNormalization::No
}
else
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::NENormalizationLayerEx>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NENormalizationLayer>();
fn->configure(ifm_alloc, ofm_alloc, norm_info);
@@ -5177,17 +5118,6 @@ void Planner::visit(const ::internal::tflite::op::DepthToSpace::Node &node)
auto output_alloc = ctx.at(::internal::tflite::operand::Index{param.output_index});
auto input_alloc = ctx.at(::internal::tflite::operand::Index{param.input_index});
- if (from_env<bool>(std::getenv("USE_SIMPLE_DEPTHTOSPACE")))
- {
- // USE CPU VERSION OF DEPTHTOSPACE
- auto rank = 4;
- auto fn = nnfw::cpp14::make_unique<SimpleDepthToSpace>();
-
- fn->configure(input_alloc, output_alloc, param.block_size, getARMComputeAxises(rank));
-
- builder.append("DepthToSpace", std::move(fn));
- }
- else
{
if (::internal::arm_compute::isGpuMode()) // GPU
{
@@ -5220,6 +5150,13 @@ void Planner::visit(const ::internal::tflite::op::Unpack::Node &node)
int32_t axis =
_ctx.at(::internal::tflite::operand::Index{node.param().axis_index}).asScalar<int32_t>();
+ // Negatige axis is supported, -1 implies R-1 axis where R is input rank
+ if (axis < 0)
+ {
+ axis += input_rank;
+ assert(axis >= 0);
+ }
+ uint32_t axis_uint = ToARMComputeAxis(input_rank, axis).value();
// int32_t num_split =
// _ctx.at(::internal::tflite::operand::Index{node.param().num_split_index}).asScalar<int32_t>();
@@ -5234,14 +5171,19 @@ void Planner::visit(const ::internal::tflite::op::Unpack::Node &node)
{
std::vector<int32_t> ofm_indexes;
int ifm_index;
- int axis;
+ uint32_t axis;
};
if (input_rank == 4)
{
+ // TODO: generate test case for this and generalize 4D method all cases.
+ throw std::runtime_error("UNPACK_4D not implemented");
+ }
+ else if (input_rank == 3)
+ {
Param param;
param.ifm_index = ifm_index.asInt();
- param.axis = axis;
+ param.axis = axis_uint;
for (const auto &index : node.param().ofm_indexes)
{
param.ofm_indexes.push_back(index);
@@ -5252,7 +5194,7 @@ void Planner::visit(const ::internal::tflite::op::Unpack::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<SimpleUnpackLayer>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLUnstack>();
std::vector<::arm_compute::ICLTensor *> outputs;
for (const auto &index : param.ofm_indexes)
{
@@ -5269,11 +5211,6 @@ void Planner::visit(const ::internal::tflite::op::Unpack::Node &node)
_builder.addStage(stage);
}
- else if (input_rank == 3)
- {
- // TODO: generate test case for this and generalize 4D method all cases.
- throw std::runtime_error("UNPACK_3D not implemented");
- }
else if (input_rank == 2)
{
throw std::runtime_error("UNPACK_2D not implemented");
@@ -5307,25 +5244,33 @@ void Planner::visit(const ::internal::tflite::op::Pack::Node &node)
int32_t axis =
_ctx.at(::internal::tflite::operand::Index{node.param().axis_index}).asScalar<int32_t>();
+ // A negative axis implies axis from the end.
+ // For example, axis = -1 implies the first axis from the end, i.e. axis = Rank - 1.
+ // Similarly, axis = -2 imples second axis from the end, i.e. axis = Rank - 2.
+ if (axis < 0)
+ {
+ axis += output_rank;
+ assert(axis >= 0);
+ }
+ uint32_t axis_uint = ToARMComputeAxis(output_rank, axis).value();
struct Param
{
std::vector<int32_t> ifm_indexes;
int ofm_index;
- int axis;
+ uint32_t axis;
};
if (input_rank == 3)
{
+ // TODO: generate test case for this and generalize 4D method all cases.
+ throw std::runtime_error("PACK_3D not implemented");
+ }
+ else if (input_rank == 2)
+ {
Param param;
param.ofm_index = ofm_index.asInt();
- param.axis = axis;
-
- // TODO: Fix this once all permutations are present.
- if (param.axis != 0)
- {
- throw std::runtime_error("This axis not supported, some 4D permutations are missing");
- }
+ param.axis = axis_uint;
for (const auto &index : node.param().ifm_indexes)
{
@@ -5337,14 +5282,14 @@ void Planner::visit(const ::internal::tflite::op::Pack::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<SimplePackLayer>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLStackLayer>();
std::vector<::arm_compute::ICLTensor *> inputs;
for (const auto &index : param.ifm_indexes)
{
auto input_alloc = ctx.at(::internal::tflite::operand::Index{index});
inputs.push_back(CAST_CL(input_alloc));
}
- fn->configure(inputs, CAST_CL(output_alloc), param.axis);
+ fn->configure(inputs, param.axis, CAST_CL(output_alloc));
builder.append("Pack", std::move(fn));
}
@@ -5354,11 +5299,6 @@ void Planner::visit(const ::internal::tflite::op::Pack::Node &node)
_builder.addStage(stage);
}
- else if (input_rank == 2)
- {
- // TODO: generate test case for this and generalize 4D method all cases.
- throw std::runtime_error("PACK_2D not implemented");
- }
else if (input_rank == 1)
{
throw std::runtime_error("PACK_1D not implemented");
@@ -5398,15 +5338,7 @@ void Planner::visit(const ::internal::tflite::op::Neg::Node &node)
auto ofm_alloc = ctx.at(::internal::tflite::operand::Index{param.ofm_index});
auto ifm_alloc = ctx.at(::internal::tflite::operand::Index{param.ifm_index});
- // NOTE SimpleNeg is quite slow, but may be useful for debugging
- if (from_env<bool>(std::getenv("USE_SIMPLE_NEG")))
- {
- auto fn = nnfw::cpp14::make_unique<SimpleNeg>();
-
- fn->configure(ifm_alloc, ofm_alloc);
- builder.append("Neg", std::move(fn));
- }
- else if (::internal::arm_compute::isGpuMode())
+ if (::internal::arm_compute::isGpuMode())
{
auto fn = nnfw::cpp14::make_unique<::arm_compute::CLNeg>();
@@ -5455,7 +5387,7 @@ void Planner::visit(const ::internal::tflite::op::Exp::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLExp>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLExpLayer>();
fn->configure(CAST_CL(ifm_alloc), CAST_CL(ofm_alloc));
@@ -5596,8 +5528,58 @@ void Planner::visit(const ::internal::tflite::op::ReduceSum::Node &node)
void Planner::visit(const ::internal::tflite::op::Abs::Node &node)
{
- // TODO Implement Abs op
- throw std::runtime_error("Not supported yet");
+ VERBOSE(Tanh) << "Configure Abs operation" << std::endl;
+
+ const ::internal::tflite::operand::Index output_index{node.param().output_index};
+ const ::internal::tflite::operand::Index input_index{node.param().input_index};
+
+ // Set shape constraints
+ _builder.addShapeConstr(output_index,
+ asTensorInfo(asTensorShape(_ctx.at(output_index).shape()),
+ _ctx.at(output_index).type(), _ctx.at(output_index).scale(),
+ _ctx.at(output_index).zeroPoint()));
+ _builder.addShapeConstr(input_index,
+ asTensorInfo(asTensorShape(_ctx.at(input_index).shape()),
+ _ctx.at(input_index).type(), _ctx.at(output_index).scale(),
+ _ctx.at(output_index).zeroPoint()));
+
+ struct Param
+ {
+ int output_index;
+ int input_index;
+ };
+
+ Param param;
+
+ param.output_index = output_index.asInt();
+ param.input_index = input_index.asInt();
+
+ auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
+ auto output_alloc = ctx.at(::internal::tflite::operand::Index{param.output_index});
+ auto input_alloc = ctx.at(::internal::tflite::operand::Index{param.input_index});
+
+ const ::arm_compute::ActivationLayerInfo act_info{
+ ::arm_compute::ActivationLayerInfo::ActivationFunction::ABS};
+
+ if (::internal::arm_compute::isGpuMode())
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLActivationLayer>();
+
+ fn->configure(CAST_CL(input_alloc), CAST_CL(output_alloc), act_info);
+
+ builder.append("Abs", std::move(fn));
+ }
+ else
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::NEActivationLayer>();
+
+ fn->configure(input_alloc, output_alloc, act_info);
+
+ builder.append("Abs", std::move(fn));
+ }
+ };
+
+ _builder.addStage(stage);
}
void Planner::visit(const ::internal::tflite::op::NotEqual::Node &node)
@@ -5650,10 +5632,10 @@ void Planner::visit(const ::internal::tflite::op::NotEqual::Node &node)
if (::internal::arm_compute::isGpuMode())
{
- auto fn = nnfw::cpp14::make_unique<::arm_compute::CLComparisonOp>();
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLComparison>();
fn->configure(CAST_CL(input1_alloc), CAST_CL(input2_alloc), CAST_CL(output_alloc),
- ::arm_compute::ComparisonOperation::NOT_EQUAL);
+ ::arm_compute::ComparisonOperation::NotEqual);
builder.append("NotEqual", std::move(fn));
}
@@ -5738,8 +5720,50 @@ void Planner::visit(const ::internal::tflite::op::LogicalAnd::Node &node)
void Planner::visit(const ::internal::tflite::op::LogicalNot::Node &node)
{
- // TODO Implement LogicalNot op
- throw std::runtime_error("Not supported yet");
+ const ::internal::tflite::operand::Index output_index{node.param().output_index};
+ const ::internal::tflite::operand::Index input_index{node.param().input_index};
+
+ // Set Shape Constraints and TensorInfo
+ _builder.addShapeConstr(output_index,
+ asTensorInfo(asTensorShape(_ctx.at(output_index).shape()),
+ ::arm_compute::DataType::U8, _ctx.at(output_index).scale(),
+ _ctx.at(output_index).zeroPoint()));
+
+ _builder.addShapeConstr(input_index,
+ asTensorInfo(asTensorShape(_ctx.at(input_index).shape()),
+ ::arm_compute::DataType::U8, _ctx.at(input_index).scale(),
+ _ctx.at(input_index).zeroPoint()));
+
+ // Construct operation parameters
+ struct Param
+ {
+ int output_index;
+ int input_index;
+ };
+
+ Param param;
+
+ param.output_index = output_index.asInt();
+ param.input_index = input_index.asInt();
+ auto stage = [param](const IAllocationContext &ctx, IExecutionBuilder &builder) {
+ auto output_alloc = ctx.at(::internal::tflite::operand::Index{param.output_index});
+ auto input_alloc = ctx.at(::internal::tflite::operand::Index{param.input_index});
+ if (::internal::arm_compute::isGpuMode())
+ {
+ auto fn = nnfw::cpp14::make_unique<::arm_compute::CLBitwiseNot>();
+
+ fn->configure(CAST_CL(input_alloc), CAST_CL(output_alloc));
+
+ builder.append("LogicalNot", std::move(fn));
+ }
+ else
+ {
+ // TODO Add NEON support
+
+ throw std::runtime_error("Not supported yet");
+ }
+ };
+ _builder.addStage(stage);
}
void Planner::visit(const ::internal::tflite::op::LogicalOr::Node &node)
diff --git a/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.cc b/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.cc
index ff2f79309..1a5c735ee 100644
--- a/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.cc
+++ b/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.cc
@@ -55,36 +55,16 @@
int new_pv[4] = {0};
::arm_compute::Coordinates axises = getARMComputeAxises(rank);
- if (rank == 4)
+ for (uint32_t i = 0; i < rank; ++i)
{
- /**
- axises = {3,1,0,2}
- NNAPI PermutationVector
- N 0 3
- H 1 1
- W 2 0
- C 3 2
- **/
- new_pv[0] = axises[runtime_pv[2]];
- new_pv[1] = axises[runtime_pv[1]];
- new_pv[2] = axises[runtime_pv[3]];
- new_pv[3] = axises[runtime_pv[0]];
- }
- else
- {
- /**
- mapping/axises = {rank-1 to 0}
- CHW --------> WHC
- or
- WH ----------> HW
- **/
- for (int id = 0; id < rank; ++id)
- {
- new_pv[id] = axises[runtime_pv[rank - id - 1]];
- }
+ new_pv[axises[i]] = ToARMComputeAxis(rank, runtime_pv[i]).value();
}
- return ::arm_compute::PermutationVector{new_pv[0], new_pv[1], new_pv[2], new_pv[3]};
+ ::arm_compute::PermutationVector ACL_PV =
+ ::arm_compute::PermutationVector{new_pv[0], new_pv[1], new_pv[2], new_pv[3]};
+ ACL_PV.set_num_dimensions(rank);
+
+ return ACL_PV;
}
::arm_compute::TensorShape asTensorShape(const internal::tflite::operand::Shape &shape,
@@ -163,3 +143,10 @@
return ::arm_compute::TensorInfo(shape, 1, asDataType(type),
asQuantizationInfo(scale, zeroPoint));
}
+
+::arm_compute::TensorInfo asTensorInfo(const ::arm_compute::TensorShape &shape,
+ const ::arm_compute::DataType &type, const float scale,
+ const int32_t zeroPoint)
+{
+ return ::arm_compute::TensorInfo(shape, 1, type, asQuantizationInfo(scale, zeroPoint));
+}
diff --git a/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.h b/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.h
index 42b547feb..211a6ac87 100644
--- a/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.h
+++ b/runtimes/pure_arm_compute/src/internal/arm_compute/Cast.h
@@ -100,6 +100,18 @@
const float scale = 0.0f, const int32_t zeroPoint = 0);
/**
+ * @brief Cast from internal tensor info to tensor info object of arm compute
+ * @param[in] shape Tensor shape
+ * @param[in] type Tensor type of arm compute
+ * @param[in] scale Scale of tensor quantization
+ * @param[in] zeroPoint Zeropoint of tensor quantization
+ * @return TensorInfo object of arm compute
+ */
+::arm_compute::TensorInfo asTensorInfo(const ::arm_compute::TensorShape &shape,
+ const ::arm_compute::DataType &type, const float scale,
+ const int32_t zeroPoint);
+
+/**
* @brief Set value to arm compute tensor with casting
* @param[in] value Value to set
* @param[out] to Target tensor of arm compute
diff --git a/runtimes/pure_arm_compute/src/internal/layers/FeatureLoggingLayer.h b/runtimes/pure_arm_compute/src/internal/layers/FeatureLoggingLayer.h
deleted file mode 100644
index 83ae7c17b..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/FeatureLoggingLayer.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        FeatureLoggingLayer.h
- * @brief       This file contains FeatureLoggingLayer class
- * @ingroup     COM_AI_RUNTIME
- */
-
-#ifndef __FEATURE_LOGGING_LAYER_H__
-#define __FEATURE_LOGGING_LAYER_H__
-
-#include <arm_compute/core/ITensor.h>
-#include <arm_compute/runtime/IFunction.h>
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-#include <iostream>
-#include <iomanip>
-#include <limits>
-
-#include "internal/arm_compute.h"
-
-/**
- * @brief Class to run FeatureLogging Layer
- */
-class FeatureLoggingLayer : public ::arm_compute::IFunction
-{
-public:
- FeatureLoggingLayer(void) : _tag(""), _target(nullptr)
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Configure the layer
- * @param[in] tag Text tag for this layer
- * @param[in] target The feature tensor to be printed
- * @return N/A
- */
- void configure(const std::string &tag, ::arm_compute::ITensor *target)
- {
- _tag = tag;
- _target = target;
- }
-
-public:
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run(void) override
- {
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
- CAST_CL(_target)->map(q);
- }
-
- const size_t W = _target->info()->dimension(0);
- const size_t H = _target->info()->dimension(1);
- const size_t C = _target->info()->dimension(2);
-
- std::cout << _tag << std::endl;
-
- for (size_t ch = 0; ch < C; ++ch)
- {
- std::cout << "Channel #" << ch << std::endl;
- for (size_t row = 0; row < H; ++row)
- {
- for (size_t col = 0; col < W; ++col)
- {
- const arm_compute::Coordinates id{col, row, ch};
- const auto value = *reinterpret_cast<float *>(_target->ptr_to_element(id));
-
- // TODO Generalize this to integer types
- std::cout << std::setprecision(2);
- std::cout << std::setw(7);
- std::cout << std::setfill(' ');
- std::cout << std::fixed;
- std::cout << value << " ";
- }
- std::cout << std::endl;
- }
- std::cout << std::endl;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
- CAST_CL(_target)->unmap(q);
- }
- }
-
-private:
- std::string _tag;
- ::arm_compute::ITensor *_target;
-};
-
-#endif // __FEATURE_LOGGING_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.cc
deleted file mode 100644
index 28789a801..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.cc
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "GenericFullyConnectedLayer.h"
-#include "internal/arm_compute.h"
-
-void GenericFullyConnectedLayer::configure(::arm_compute::ITensor *input,
- ::arm_compute::ITensor *weights,
- ::arm_compute::ITensor *biases,
- ::arm_compute::ITensor *output, bool needs_reshape,
- ::arm_compute::TensorShape reshape)
-{
- _input = input;
- _weights = weights;
- _biases = biases;
- _output = output;
- _needs_reshape = needs_reshape;
-
- // TODO Too many duplicated code. Revise below code.
- if (::internal::arm_compute::isGpuMode())
- {
- if (_needs_reshape)
- {
- // reshape
- auto_init_if_empty(*_cl_buffer.info(), _input->info()->clone()->set_tensor_shape(reshape));
- _generic_reshape.configure(CAST_CL(_input), &_cl_buffer);
-
- _cl_fc.configure(&_cl_buffer, CAST_CL(_weights), CAST_CL(_biases), CAST_CL(_output));
-
- // NOTE _cl_buffer is inaccessible from outside, and thus it is safe to invoke allocate here.
- _cl_buffer.allocator()->allocate();
- }
- else
- {
- _cl_fc.configure(CAST_CL(_input), CAST_CL(_weights), CAST_CL(_biases), CAST_CL(_output));
- }
- }
- else
- {
- if (_needs_reshape)
- {
- // reshape
- auto_init_if_empty(*_neon_buffer.info(), _input->info()->clone()->set_tensor_shape(reshape));
- _generic_reshape.configure(_input, &_neon_buffer);
-
- _neon_fc.configure(&_neon_buffer, _weights, _biases, _output);
-
- // NOTE _neon_buffer is inaccessible from outside, and thus it is safe to invoke allocate
- // here.
- _neon_buffer.allocator()->allocate();
- }
- else
- {
- _neon_fc.configure(_input, _weights, _biases, _output);
- }
- }
-}
-
-void GenericFullyConnectedLayer::run(void)
-{
- if (::internal::arm_compute::isGpuMode())
- {
- if (_needs_reshape)
- _generic_reshape.run();
-
- _cl_fc.run();
- }
- else
- {
- if (_needs_reshape)
- _generic_reshape.run();
-
- _neon_fc.run();
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.h b/runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.h
deleted file mode 100644
index f1519f54d..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/GenericFullyConnectedLayer.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        GenericFullyConnectedLayer.h
- * @brief       This file contains GenericFullyConnectedLayer class
- * @ingroup     COM_AI_RUNTIME
- */
-
-#ifndef __GENERIC_FULLY_CONNECTED_LAYER_H__
-#define __GENERIC_FULLY_CONNECTED_LAYER_H__
-
-#include <arm_compute/runtime/CL/functions/CLFullyConnectedLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEFullyConnectedLayer.h>
-#include "internal/layers/GenericReshapeLayer.h"
-
-/**
- * @brief Class to run FullyConnected Layer with both CPU and GPU
- */
-class GenericFullyConnectedLayer : public ::arm_compute::IFunction
-{
-public:
- GenericFullyConnectedLayer(void)
- : _input(nullptr), _weights(nullptr), _biases(nullptr), _output(nullptr), _cl_buffer{},
- _neon_buffer{}, _cl_fc{}, _neon_fc{}, _generic_reshape{}, _needs_reshape(false)
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Configure the layer
- * @param[in] input The source tensor
- * @param[in] weights The tensor that is filled with weight values
- * @param[in] biases The tensor that is filled with biase values
- * @param[in] output The destination tensor
- * @param[in] needs_reshape Whether it needs to be reshaped or not
- * @param[in] reshape The tensor shape to be reshaped. Only valid when needs_reshape is true.
- * @return N/A
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *weights,
- ::arm_compute::ITensor *biases, ::arm_compute::ITensor *output, bool needs_reshape,
- ::arm_compute::TensorShape reshape);
-
-public:
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run(void) override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_weights;
- ::arm_compute::ITensor *_biases;
- ::arm_compute::ITensor *_output;
-
- // buffer for reshaping input tensor
- ::arm_compute::CLTensor _cl_buffer;
- ::arm_compute::Tensor _neon_buffer;
-
-private:
- ::arm_compute::CLFullyConnectedLayer _cl_fc;
- ::arm_compute::NEFullyConnectedLayer _neon_fc;
- GenericReshapeLayer _generic_reshape;
- bool _needs_reshape;
-};
-
-#endif // __GENERIC_FULLY_CONNECTED_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.cc
deleted file mode 100644
index c38c2e9e3..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.cc
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "GenericReshapeLayer.h"
-#include "internal/arm_compute.h"
-
-void GenericReshapeLayer::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output)
-{
- _input = input;
- _output = output;
-
- // NOTE This vector comes from CLPermuteKernel implementation
- //
- // This implementation permutes a tensor of shape W / H / C into another tensor of shape C / W / H
- //
- // Original | Permuted
- // 0 | W | C (from 2)
- // 1 | H | W (from 0)
- // 2 | C | H (from 1)
- //
- const ::arm_compute::PermutationVector pv{2, 0, 1};
-
- if (::internal::arm_compute::isGpuMode())
- {
- _cl_permute.configure(CAST_CL(input), &_cl_permuted, pv);
- _cl_reshape.configure(&_cl_permuted, CAST_CL(output));
-
- // NOTE _permuted is inaccessible from outside, and thus it is safe to invoke allocate here.
- _cl_permuted.allocator()->allocate();
- }
- else
- {
- _neon_permute.configure(input, &_neon_permuted, pv);
- _neon_reshape.configure(&_neon_permuted, output);
-
- // NOTE _permuted is inaccessible from outside, and thus it is safe to invoke allocate here.
- _neon_permuted.allocator()->allocate();
- }
-}
-
-void GenericReshapeLayer::run(void)
-{
- if (::internal::arm_compute::isGpuMode())
- {
- _cl_permute.run();
- _cl_reshape.run();
- }
- else
- {
- _neon_permute.run();
- _neon_reshape.run();
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.h b/runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.h
deleted file mode 100644
index a22c14c8b..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/GenericReshapeLayer.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        GenericReshapeLayer.h
- * @brief       This file contains GenericReshapeLayer class
- * @ingroup     COM_AI_RUNTIME
- */
-
-#ifndef __GENERIC_RESHAPE_LAYER_H__
-#define __GENERIC_RESHAPE_LAYER_H__
-
-#include <arm_compute/runtime/Tensor.h>
-#include <arm_compute/runtime/CL/CLTensor.h>
-
-#include <arm_compute/runtime/CL/functions/CLPermute.h>
-#include <arm_compute/runtime/CL/functions/CLReshapeLayer.h>
-#include <arm_compute/runtime/NEON/functions/NEPermute.h>
-#include <arm_compute/runtime/NEON/functions/NEReshapeLayer.h>
-
-/**
- * @brief Class to run Reshape Layer with both CPU and GPU
- */
-class GenericReshapeLayer : public ::arm_compute::IFunction
-{
-public:
- GenericReshapeLayer(void)
- : _input(nullptr), _output(nullptr), _cl_permuted{}, _neon_permuted{}, _cl_permute{},
- _cl_reshape{}, _neon_permute{}, _neon_reshape{}
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Configure the layer
- * @param[in] input The source tensor
- * @param[in] output The destination tensor
- * @return N/A
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output);
-
-public:
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run(void) override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
- ::arm_compute::CLTensor _cl_permuted;
- ::arm_compute::Tensor _neon_permuted;
-
-private:
- ::arm_compute::CLPermute _cl_permute;
- ::arm_compute::CLReshapeLayer _cl_reshape;
-
- ::arm_compute::NEPermute _neon_permute;
- ::arm_compute::NEReshapeLayer _neon_reshape;
-};
-
-#endif // __GENERIC_RESHAPE_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.cc
deleted file mode 100644
index 6d348e814..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.cc
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleArgMinMax.h"
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleArgMinMax::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- std::vector<uint32_t> axis, ::arm_compute::ArgOperation op)
-{
- _input = input;
- _output = output;
- _axis = axis;
- _input_rank = input->info()->num_dimensions();
- _op_type = op;
-}
-
-inline const ::arm_compute::TensorShape
-inferOutputShape(const ::arm_compute::TensorShape &input_shape, const std::vector<uint32_t> &axis,
- int input_rank)
-{
- ::arm_compute::TensorShape out_shape{};
- size_t dim = 1;
- for (int i = 0; i < input_rank; ++i)
- {
- dim = input_shape[i];
- out_shape.set(i, dim);
- }
-
- for (int i = 0; i < axis.size(); ++i)
- {
- out_shape.set(axis[i], 1);
- }
-
- return out_shape;
-}
-
-template <typename T>
-inline T getArgMinMaxEle(const ::arm_compute::ITensor *input,
- const ::arm_compute::TensorShape &input_shape,
- const ::arm_compute::TensorShape &output_shape, const size_t b,
- const size_t d, const size_t h, const size_t w, const int axis,
- const ::arm_compute::ArgOperation op_type)
-{
- // If output[dimention] == 1, will check all values of that dimension because of reducing
- // dimension.
- // Else will check only one value.
- const size_t start_b = output_shape[3] == 1 ? 0 : b;
- const size_t start_d = output_shape[2] == 1 ? 0 : d;
- const size_t start_h = output_shape[1] == 1 ? 0 : h;
- const size_t start_w = output_shape[0] == 1 ? 0 : w;
- const size_t stop_b = output_shape[3] == 1 ? input_shape[3] - 1 : b;
- const size_t stop_d = output_shape[2] == 1 ? input_shape[2] - 1 : d;
- const size_t stop_h = output_shape[1] == 1 ? input_shape[1] - 1 : h;
- const size_t stop_w = output_shape[0] == 1 ? input_shape[0] - 1 : w;
-
- ::arm_compute::Coordinates id{w, h, d, b};
- ::arm_compute::Coordinates min_max_id{w, h, d, b};
-
- T value = *reinterpret_cast<T *>(input->ptr_to_element(id));
- T tval = *reinterpret_cast<T *>(input->ptr_to_element(id));
-
- for (size_t in_b = start_b; in_b <= stop_b; ++in_b)
- {
- id.set(3, in_b);
- for (size_t in_d = start_d; in_d <= stop_d; ++in_d)
- {
- id.set(2, in_d);
- for (size_t in_h = start_h; in_h <= stop_h; ++in_h)
- {
- id.set(1, in_h);
- for (size_t in_w = start_w; in_w <= stop_w; ++in_w)
- {
- id.set(0, in_w);
- if (op_type == ::arm_compute::ArgOperation::MIN)
- {
- value = std::min<T>(value, *reinterpret_cast<T *>(input->ptr_to_element(id)));
- }
- else if (op_type == ::arm_compute::ArgOperation::MAX)
- {
- value = std::max<T>(value, *reinterpret_cast<T *>(input->ptr_to_element(id)));
- }
- else
- throw std::runtime_error("This Arg operation is not supported, yet");
-
- if (tval != value)
- {
- min_max_id = id;
- tval = value;
- }
- }
- }
- }
- }
-
- return min_max_id[axis];
-}
-
-template <typename T>
-inline void
-getArgMinMax(const ::arm_compute::ITensor *input, const ::arm_compute::TensorShape &input_shape,
- const ::arm_compute::TensorShape &output_shape, ::arm_compute::ITensor *output,
- const int axis, const ::arm_compute::ArgOperation op_type)
-{
- ::arm_compute::Coordinates id;
- for (size_t out_b = 0; out_b < output_shape[3]; ++out_b)
- {
- id.set(3, out_b);
- for (size_t out_d = 0; out_d < output_shape[2]; ++out_d)
- {
- id.set(2, out_d);
- for (size_t out_h = 0; out_h < output_shape[1]; ++out_h)
- {
- id.set(1, out_h);
- for (size_t out_w = 0; out_w < output_shape[0]; ++out_w)
- {
- id.set(0, out_w);
- *reinterpret_cast<int *>(output->ptr_to_element(id)) = getArgMinMaxEle<T>(
- input, input_shape, output_shape, out_b, out_d, out_h, out_w, axis, op_type);
- }
- }
- }
- }
-}
-
-void SimpleArgMinMax::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- ::arm_compute::TensorShape input_shape = _input->info()->tensor_shape();
-
- // Axis dimension is 1 and size is 1.
- // TODO support axis size > 1.
- int axis_val = _axis[0];
- ::arm_compute::TensorShape output_shape = inferOutputShape(input_shape, _axis, _input_rank);
-
- _output->info()->set_tensor_shape(output_shape);
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::QASYMM8:
- getArgMinMax<uint8_t>(_input, input_shape, output_shape, _output, axis_val, _op_type);
- break;
- case ::arm_compute::DataType::S32:
- getArgMinMax<int32_t>(_input, input_shape, output_shape, _output, axis_val, _op_type);
- break;
- case ::arm_compute::DataType::F32:
- getArgMinMax<float>(_input, input_shape, output_shape, _output, axis_val, _op_type);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- _output->info()->set_tensor_shape(output_shape);
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.h
deleted file mode 100644
index b90e74579..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleArgMinMax.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_ARG_MIN_MAX_H__
-#define __SIMPLE_ARG_MIN_MAX_H__
-
-#include "internal/arm_compute.h"
-#include "arm_compute/core/TypesEx.h"
-
-class SimpleArgMinMax : public ::arm_compute::IFunction
-{
-public:
- SimpleArgMinMax(void) : _input(nullptr), _output(nullptr), _axis(), _input_rank(0)
- {
- // DO NOTHING
- }
-
-public:
- /** Initialise input and output
- *
- * @param[in] input First tensor input.
- * @param[out] output Output tensor.
- * @param[in] axis Dimension along which to find Min or Max Index.
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- std::vector<uint32_t> axis, ::arm_compute::ArgOperation _op_type);
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
- std::vector<uint32_t> _axis;
- int _input_rank;
- ::arm_compute::ArgOperation _op_type;
-};
-
-#endif /*__SIMPLE_ARG_MIN_MAX_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleArithmeticAddition.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleArithmeticAddition.h
deleted file mode 100644
index aed9ae286..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleArithmeticAddition.h
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        SimpleArithmeticAddition.h
- * @brief       This file contains SimpleArithmeticAddition class
- * @ingroup     COM_AI_RUNTIME
- */
-
-#ifndef __SIMPLE_ARITHMETIC_ADDITION_H__
-#define __SIMPLE_ARITHMETIC_ADDITION_H__
-
-#include "internal/arm_compute.h"
-#include <arm_compute/core/ITensor.h>
-
-/**
- * @brief Class to run SimpleArithmeticAddition Layer
- */
-class SimpleArithmeticAddition : public ::arm_compute::IFunction
-{
-public:
- SimpleArithmeticAddition(void) : _lhs(nullptr), _rhs(nullptr), _out(nullptr)
- {
- // DO NOTHING
- }
-
- /**
- * @brief Configure the layer
- * @param[in] lhs Lefthand-side operand
- * @param[in] rhs Righthand-side operand
- * @param[in] out The destination tensor(Result operand)
- * @return N/A
- */
- void configure(::arm_compute::ITensor *lhs, ::arm_compute::ITensor *rhs,
- ::arm_compute::ITensor *out)
- {
- _lhs = lhs;
- _rhs = rhs;
- _out = out;
- }
-
-public:
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run(void) override
- {
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_lhs)->map(q);
- CAST_CL(_rhs)->map(q);
- CAST_CL(_out)->map(q);
- }
-
- arm_compute::Window window;
- window.use_tensor_dimensions(_out->info()->tensor_shape());
-
- execute_window_loop(window, [this](const arm_compute::Coordinates &id) {
- // NOTE Must be two input tensors of identical type
- // Must be output tensor of the same type as input0.
- assert(_lhs->info()->data_type() == _rhs->info()->data_type());
- assert(_lhs->info()->data_type() == _out->info()->data_type());
-
- switch (_lhs->info()->data_type())
- {
- case ::arm_compute::DataType::F32:
- {
- const auto lhs_value = *reinterpret_cast<float *>(_lhs->ptr_to_element(id));
- const auto rhs_value = *reinterpret_cast<float *>(_rhs->ptr_to_element(id));
- *reinterpret_cast<float *>(_out->ptr_to_element(id)) = lhs_value + rhs_value;
- break;
- }
- case ::arm_compute::DataType::S32:
- {
- const auto lhs_value = *reinterpret_cast<int32_t *>(_lhs->ptr_to_element(id));
- const auto rhs_value = *reinterpret_cast<int32_t *>(_rhs->ptr_to_element(id));
- *reinterpret_cast<int32_t *>(_out->ptr_to_element(id)) = lhs_value + rhs_value;
- break;
- }
- case ::arm_compute::DataType::U32:
- {
- const auto lhs_value = *reinterpret_cast<uint32_t *>(_lhs->ptr_to_element(id));
- const auto rhs_value = *reinterpret_cast<uint32_t *>(_rhs->ptr_to_element(id));
- *reinterpret_cast<uint32_t *>(_out->ptr_to_element(id)) = lhs_value + rhs_value;
- break;
- }
- case ::arm_compute::DataType::QASYMM8:
- {
- const auto lhs_value = *reinterpret_cast<uint8_t *>(_lhs->ptr_to_element(id));
- const auto rhs_value = *reinterpret_cast<uint8_t *>(_rhs->ptr_to_element(id));
- // How to handle with overflow?
- *reinterpret_cast<uint8_t *>(_out->ptr_to_element(id)) = lhs_value + rhs_value;
- break;
- }
- default:
- throw std::runtime_error("Not supported, yet");
- break;
- }
- });
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_out)->unmap(q);
- CAST_CL(_rhs)->unmap(q);
- CAST_CL(_lhs)->unmap(q);
- }
- }
-
-private:
- ::arm_compute::ITensor *_lhs;
- ::arm_compute::ITensor *_rhs;
- ::arm_compute::ITensor *_out;
-};
-
-#endif // __SIMPLE_ARITHMETIC_ADDITION_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.cc
deleted file mode 100644
index 87175ee1a..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.cc
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleBatchToSpaceNd.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleBatchToSpaceND::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- const int32_t *block_size,
- const ::arm_compute::Coordinates &axises)
-{
- const auto rank = axises.num_dimensions();
- assert(rank == 4);
-
- for (int i = 0; i < rank; ++i)
- assert(axises[i] >= 0 && axises[i] < rank);
-
- _input = input;
- _output = output;
- _block_size = block_size;
- _axises = axises;
-}
-
-template <typename T>
-inline void BatchToSpaceND(const ::arm_compute::ITensor *input,
- const ::arm_compute::TensorShape &input_shape,
- const int32_t *block_size_data, ::arm_compute::ITensor *output,
- const ::arm_compute::TensorShape &output_shape,
- const ::arm_compute::Coordinates &axises)
-{
- const int output_batch = output_shape[axises[0]];
- const int output_height = output_shape[axises[1]];
- const int output_width = output_shape[axises[2]];
- const int depth = output_shape[axises[3]];
-
- for (int out_b = 0; out_b < output_batch; ++out_b)
- {
- for (int out_h = 0; out_h < output_height; ++out_h)
- {
- for (int out_w = 0; out_w < output_width; ++out_w)
- {
- for (int out_d = 0; out_d < depth; ++out_d)
- {
- const int in_d = out_d;
- const int in_h = out_h / block_size_data[0];
- const int in_w = out_w / block_size_data[1];
- const int in_b =
- out_b +
- ((out_h % block_size_data[0]) * block_size_data[1] + out_w % block_size_data[1]) *
- output_batch;
-
- auto input_id =
- asARMComputeCoordinates(::arm_compute::Coordinates{in_b, in_h, in_w, in_d}, axises);
- auto output_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{out_b, out_h, out_w, out_d}, axises);
-
- *reinterpret_cast<T *>(output->ptr_to_element(output_id)) =
- *reinterpret_cast<T *>(input->ptr_to_element(input_id));
- }
- }
- }
- }
-}
-void SimpleBatchToSpaceND::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::U8:
- case ::arm_compute::DataType::QASYMM8:
- BatchToSpaceND<uint8_t>(_input, _input->info()->tensor_shape(), _block_size, _output,
- _output->info()->tensor_shape(), _axises);
- break;
- case ::arm_compute::DataType::F32:
- BatchToSpaceND<float>(_input, _input->info()->tensor_shape(), _block_size, _output,
- _output->info()->tensor_shape(), _axises);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.h
deleted file mode 100644
index 5695d9719..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleBatchToSpaceNd.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- *Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_BATCH_TO_SPACE_ND_H__
-#define __SIMPLE_BATCH_TO_SPACE_ND_H__
-
-#include "internal/arm_compute.h"
-#include "internal/arm_compute/Cast.h"
-
-class SimpleBatchToSpaceND : public ::arm_compute::IFunction
-{
-public:
- SimpleBatchToSpaceND(void) : _input(nullptr), _output(nullptr), _block_size(nullptr), _axises{}
- {
- // DO NOTHING
- }
-
- /** Initialise input and output
- *
- * @param[in] input First tensor input.
- * @param[out] output Output tensor.
- * @param[in] block_size Block size.
- * @param[in] axises Axises of rank 4
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- const int32_t *block_size,
- const ::arm_compute::Coordinates &axises = getARMComputeAxises(4));
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
- const int32_t *_block_size;
- ::arm_compute::Coordinates _axises;
-};
-
-#endif /*__SIMPLE_BATCH_TO_SPACE_ND_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.cc
deleted file mode 100644
index 7c7706a78..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.cc
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "internal/layers/SimpleCastLayer.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleCastLayer::castData(::arm_compute::ITensor *in, ::arm_compute::ITensor *out,
- const arm_compute::Coordinates &id)
-{
- switch (in->info()->data_type())
- {
- case ::arm_compute::DataType::F32:
- {
- copyCast(*reinterpret_cast<float *>(in->ptr_to_element(id)), out, id);
- break;
- }
- case ::arm_compute::DataType::S32:
- {
- copyCast(*reinterpret_cast<int32_t *>(in->ptr_to_element(id)), out, id);
- break;
- }
- case ::arm_compute::DataType::U32:
- {
- copyCast(*reinterpret_cast<uint32_t *>(in->ptr_to_element(id)), out, id);
- break;
- }
- case ::arm_compute::DataType::QASYMM8:
- {
- const uint8_t quantizedValue = *(in->ptr_to_element(id));
- copyCast(in->info()->quantization_info().dequantize(quantizedValue), out, id);
- break;
- }
- default:
- throw std::runtime_error("Not supported, yet");
- break;
- }
-}
-
-void SimpleCastLayer::configure(::arm_compute::ITensor *in, ::arm_compute::ITensor *out)
-{
- _in = in;
- _out = out;
-}
-
-void SimpleCastLayer::run(void)
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
- CAST_CL(_in)->map(q);
- CAST_CL(_out)->map(q);
- }
-
- arm_compute::Window window;
- window.use_tensor_dimensions(_out->info()->tensor_shape());
-
- execute_window_loop(window,
- [this](const arm_compute::Coordinates &id) { castData(_in, _out, id); });
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
- CAST_CL(_out)->unmap(q);
- CAST_CL(_in)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.h
deleted file mode 100644
index f9a48b481..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleCastLayer.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        SimpleCastLayer.h
- * @brief       This file contains SimpleCastLayer class
- * @ingroup     COM_AI_RUNTIME
- */
-
-#ifndef __SIMPLE_CAST_LAYER_H__
-#define __SIMPLE_CAST_LAYER_H__
-
-#include "internal/arm_compute.h"
-#include "internal/arm_compute/Cast.h"
-
-/**
- * @brief Class to run SimpleCast Layer
- */
-class SimpleCastLayer : public ::arm_compute::IFunction
-{
-public:
- SimpleCastLayer(void) : _in(nullptr), _out(nullptr)
- {
- // DO NOTHING
- }
-
- /**
- * @brief Configure the layer
- * @param[in] in The source tensor
- * @param[in] out The destination tensor
- * @return N/A
- */
- void configure(::arm_compute::ITensor *in, ::arm_compute::ITensor *out);
-
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run(void) override;
-
-private:
- /**
- * @brief Cast and copy data from one tensor to another
- *
- * @param[in] in The source tensor
- * @param[out] out The destination tensor
- * @param[in] id Coordinates to copy
- * @return N/A
- */
- void castData(::arm_compute::ITensor *in, ::arm_compute::ITensor *out,
- const arm_compute::Coordinates &id);
-
- ::arm_compute::ITensor *_in;
- ::arm_compute::ITensor *_out;
-};
-
-#endif // __SIMPLE_CAST_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.cc
deleted file mode 100644
index d62a8321b..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.cc
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleDepthToSpace.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleDepthToSpace::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- int32_t block_size, const ::arm_compute::Coordinates &axises)
-{
- const auto rank = axises.num_dimensions();
- assert(rank == 4);
- for (int i = 0; i < rank; ++i)
- {
- assert(axises[i] >= 0);
- assert(axises[i] < rank);
- }
-
- _input = input;
- _output = output;
- _block_size = block_size;
- _axises = axises;
-}
-
-template <typename T>
-inline void DepthToSpace(const ::arm_compute::ITensor *input,
- const ::arm_compute::TensorShape &input_shape, int32_t block_size,
- ::arm_compute::ITensor *output,
- const ::arm_compute::TensorShape &output_shape,
- const ::arm_compute::Coordinates &axises)
-{
- const int output_batch = output_shape[axises[0]];
- const int output_height = output_shape[axises[1]];
- const int output_width = output_shape[axises[2]];
- const int output_depth = output_shape[axises[3]];
-
- for (int out_b = 0; out_b < output_batch; ++out_b)
- {
- for (int out_h = 0; out_h < output_height; ++out_h)
- {
- for (int out_w = 0; out_w < output_width; ++out_w)
- {
- for (int out_d = 0; out_d < output_depth; ++out_d)
- {
- const int in_b = out_b;
- const int in_h = out_h / block_size;
- const int in_w = out_w / block_size;
- const int in_d =
- out_d + ((out_h % block_size) * block_size + out_w % block_size) * output_depth;
-
- auto input_id =
- asARMComputeCoordinates(::arm_compute::Coordinates{in_b, in_h, in_w, in_d}, axises);
- auto output_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{out_b, out_h, out_w, out_d}, axises);
-
- *reinterpret_cast<T *>(output->ptr_to_element(output_id)) =
- *reinterpret_cast<T *>(input->ptr_to_element(input_id));
- }
- }
- }
- }
-}
-
-void SimpleDepthToSpace::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::U8:
- case ::arm_compute::DataType::QASYMM8:
- DepthToSpace<uint8_t>(_input, _input->info()->tensor_shape(), _block_size, _output,
- _output->info()->tensor_shape(), _axises);
- break;
- case ::arm_compute::DataType::F32:
- DepthToSpace<float>(_input, _input->info()->tensor_shape(), _block_size, _output,
- _output->info()->tensor_shape(), _axises);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.h
deleted file mode 100644
index 1032aaa47..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleDepthToSpace.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_DEPTH_TO_SPACE_H__
-#define __SIMPLE_DEPTH_TO_SPACE_H__
-
-#include "internal/arm_compute.h"
-#include "internal/arm_compute/Cast.h"
-
-class SimpleDepthToSpace : public ::arm_compute::IFunction
-{
-public:
- SimpleDepthToSpace(void) : _input(nullptr), _output(nullptr), _block_size(0), _axises{}
- {
- // DO NOTHING
- }
-
-public:
- /** Initialise input and output
- *
- * @param[in] input First tensor input.
- * @param[out] output Output tensor.
- * @param[in] block_size Block size.
- * @param[in] axises Axises of rank 4
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output, int32_t block_size,
- const ::arm_compute::Coordinates &axises = getARMComputeAxises(4));
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
- int32_t _block_size;
- ::arm_compute::Coordinates _axises;
-};
-
-#endif /*__SIMPLE_DEPTH_TO_SPACE_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.cc
deleted file mode 100644
index ae740bb10..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "internal/layers/SimpleEmbeddingLookup.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleEmbeddingLookup::configure(::arm_compute::ITensor *lookups,
- ::arm_compute::ITensor *values,
- ::arm_compute::ITensor *output)
-{
- assert(values->info()->num_dimensions() == output->info()->num_dimensions());
- assert(values->info()->num_dimensions() > 1 && values->info()->num_dimensions() <= 4);
- _lookups = lookups;
- _values = values;
- _output = output;
-}
-
-void SimpleEmbeddingLookup::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_lookups)->map(q);
- CAST_CL(_values)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- // type of elements of lookups is always integer
- const int32_t *lookups_buf = reinterpret_cast<int32_t *>(_lookups->buffer());
-
- const auto lookups_info = _lookups->info();
- const auto values_info = _values->info();
- const auto output_info = _output->info();
-
- // NOTE The first dimension's position is always at the end of dimensions.
- const auto first_dim_pos = values_info->num_dimensions() - 1;
-
- const size_t first_dim = values_info->dimension(first_dim_pos);
- for (size_t i = 0; i < lookups_info->dimension(0); ++i)
- {
- if (lookups_buf[i] < 0 || lookups_buf[i] >= first_dim)
- throw std::runtime_error("Embedding Lookup: index out of bounds.");
- }
-
- // If each strides of values and output are different, applied padding size of the two tensors are
- // different, therefore, it can not be copied at once.
- auto can_copy_at_once = [&]() -> bool {
- const auto &values_strides = values_info->strides_in_bytes();
- const auto &output_strides = output_info->strides_in_bytes();
-
- for (size_t i = 0; i < first_dim_pos; ++i)
- {
- if (values_strides[i] != values_strides[i])
- return false;
- }
-
- return true;
- };
-
- using ::arm_compute::Window;
- using ::arm_compute::Iterator;
-
- size_t copy_bytes;
- Window window;
- if (can_copy_at_once())
- {
- copy_bytes = values_info->total_size() / first_dim;
- window.use_tensor_dimensions(output_info->tensor_shape(), first_dim_pos);
- }
- else
- {
- copy_bytes = values_info->dimension(0) * values_info->element_size();
- window.use_tensor_dimensions(output_info->tensor_shape(), Window::DimY);
- }
-
- Iterator it(_output, window);
- execute_window_loop(window,
- [&](const ::arm_compute::Coordinates &id) {
- ::arm_compute::Coordinates values_id = id;
- const int idx = id[first_dim_pos];
- values_id.set(first_dim_pos, lookups_buf[idx]);
- memcpy(it.ptr(), _values->ptr_to_element(values_id), copy_bytes);
- },
- it);
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_lookups)->unmap(q);
- CAST_CL(_values)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.h
deleted file mode 100644
index fd499437f..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleEmbeddingLookup.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __SIMPLE_EMBEDDING_LOOKUP_H__
-#define __SIMPLE_EMBEDDING_LOOKUP_H__
-
-#include "internal/arm_compute.h"
-
-/**
- * @file        SimpleEmbeddingLookup.h
- * @brief       This file contains SimpleEmbeddingLookup class
- * @ingroup     COM_AI_RUNTIME
- */
-
-/**
- * @brief Class to run SimpleEmbeddingLookup Layer
- */
-class SimpleEmbeddingLookup : public ::arm_compute::IFunction
-{
-public:
- SimpleEmbeddingLookup(void) : _lookups(nullptr), _values(nullptr), _output(nullptr)
- {
- // DO NOTHING
- }
-
-public:
- /**
- * @brief Configure the layer
- * @param[in] lookups 1D tensor which contains lookup values
- * @param[in] values The source tensor
- * @param[in] output The destination tensor
- * @return N/A
- */
- void configure(::arm_compute::ITensor *lookups, ::arm_compute::ITensor *values,
- ::arm_compute::ITensor *output);
-
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run() override;
-
-private:
- ::arm_compute::ITensor *_lookups;
- ::arm_compute::ITensor *_values;
- ::arm_compute::ITensor *_output;
-};
-
-#endif /*__SIMPLE_EMBEDDING_LOOKUP_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.cc
deleted file mode 100644
index 7f8ae2505..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.cc
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleHashtableLookupLayer.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleHashtableLookupLayer::configure(::arm_compute::ITensor *lookups,
- ::arm_compute::ITensor *keys,
- ::arm_compute::ITensor *values,
- ::arm_compute::ITensor *output,
- ::arm_compute::ITensor *hits)
-{
- _lookups = lookups;
- _keys = keys;
- _values = values;
- _output = output;
- _hits = hits;
- _lookup_indices.resize(lookups->info()->dimension(0), -1);
-}
-
-void SimpleHashtableLookupLayer::run()
-{
- auto &queue = ::arm_compute::CLScheduler::get().queue();
- if (::internal::arm_compute::isGpuMode())
- {
- CAST_CL(_lookups)->map(queue);
- CAST_CL(_keys)->map(queue);
- CAST_CL(_values)->map(queue);
- CAST_CL(_output)->map(queue);
- CAST_CL(_hits)->map(queue);
- }
-
- const int32_t *lookups_buf = reinterpret_cast<int32_t *>(_lookups->buffer());
- const int32_t *keys_buf = reinterpret_cast<int32_t *>(_keys->buffer());
- uint8_t *hits_buf = reinterpret_cast<uint8_t *>(_hits->buffer());
-
- const auto lookups_info = _lookups->info();
- const auto values_info = _values->info();
- const auto keys_info = _keys->info();
- const auto output_info = _output->info();
-
- // NOTE The first dimension's position must be always at the end of dimensions.
- const auto first_dim_pos = values_info->num_dimensions() - 1;
- const size_t first_dim = values_info->dimension(first_dim_pos);
-
- std::map<int32_t, size_t> key_map;
- const int keys_num = keys_info->dimension(0);
- for (size_t key_index = 0; key_index < keys_num; key_index++)
- {
- key_map[keys_buf[key_index]] = key_index;
- }
-
- const int lookups_num = lookups_info->dimension(0);
- for (size_t i = 0; i < lookups_num; ++i)
- {
- const auto lookup_value = lookups_buf[i];
- const auto it = key_map.find(lookup_value);
- if (it != key_map.end())
- {
- if (it->second >= first_dim)
- throw std::runtime_error("HashTable Lookup: index out of bounds.");
- _lookup_indices[i] = it->second;
- }
- }
-
- // If each strides of values and output are different, applied padding size of the two tensors are
- // different, therefore, it can not be copied at once.
- auto can_copy_at_once = [&]() -> bool {
- const auto &values_strides = values_info->strides_in_bytes();
- const auto &output_strides = output_info->strides_in_bytes();
-
- for (size_t i = 0; i < first_dim_pos; ++i)
- {
- if (values_strides[i] != values_strides[i])
- return false;
- }
-
- return true;
- };
-
- using ::arm_compute::Window;
- using ::arm_compute::Iterator;
- using ::arm_compute::Coordinates;
-
- size_t copy_bytes;
- Window window;
- if (can_copy_at_once())
- {
- copy_bytes = values_info->total_size() / first_dim;
- window.use_tensor_dimensions(output_info->tensor_shape(), first_dim_pos);
- }
- else
- {
- copy_bytes = values_info->dimension(0) * values_info->element_size();
- window.use_tensor_dimensions(output_info->tensor_shape(), Window::DimY);
- }
-
- Iterator it(_output, window);
- execute_window_loop(window,
- [&](const Coordinates &id) {
- Coordinates values_id = id;
- const int idx = id[first_dim_pos];
- const int lookup_index = _lookup_indices[idx];
- if (lookup_index >= 0)
- {
- values_id.set(first_dim_pos, lookup_index);
- memcpy(it.ptr(), _values->ptr_to_element(values_id), copy_bytes);
- hits_buf[lookup_index] = 1;
- }
- else
- {
- memset(it.ptr(), 0, copy_bytes);
- hits_buf[lookup_index] = 0;
- }
- },
- it);
-
- if (::internal::arm_compute::isGpuMode())
- {
- CAST_CL(_lookups)->unmap(queue);
- CAST_CL(_keys)->unmap(queue);
- CAST_CL(_values)->unmap(queue);
- CAST_CL(_output)->unmap(queue);
- CAST_CL(_hits)->unmap(queue);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.h
deleted file mode 100644
index ba9d2ec0d..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleHashtableLookupLayer.h
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_HASHTABLE_LOOKUP_H__
-#define __SIMPLE_HASHTABLE_LOOKUP_H__
-
-#include "internal/arm_compute.h"
-
-class SimpleHashtableLookupLayer : public ::arm_compute::IFunction
-{
-public:
- SimpleHashtableLookupLayer(void)
- : _lookups(nullptr), _keys(nullptr), _values(nullptr), _output(nullptr), _hits(nullptr)
- {
- // DO NOTHING
- }
-
- void configure(::arm_compute::ITensor *lookups, ::arm_compute::ITensor *keys,
- ::arm_compute::ITensor *values, ::arm_compute::ITensor *output,
- ::arm_compute::ITensor *hits);
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_lookups;
- ::arm_compute::ITensor *_keys;
- ::arm_compute::ITensor *_values;
- ::arm_compute::ITensor *_output;
- ::arm_compute::ITensor *_hits;
- std::vector<int32_t> _lookup_indices;
-};
-
-#endif /*__SIMPLE_HASHTABLE_LOOKUP_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.cc
deleted file mode 100644
index d3943ad40..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.cc
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleNeg.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleNeg::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output)
-{
- _input = input;
- _output = output;
-}
-
-void SimpleNeg::run()
-{
- auto &queue = ::arm_compute::CLScheduler::get().queue();
- if (::internal::arm_compute::isGpuMode())
- {
- CAST_CL(_input)->map(queue);
- CAST_CL(_output)->map(queue);
- }
-
- arm_compute::Window window;
- window.use_tensor_dimensions(_output->info()->tensor_shape());
-
- execute_window_loop(window, [this](const arm_compute::Coordinates &id) {
- // NOTE Must be two input tensors of identical type
- // Must be output tensor of the same type as input0.
- assert(_input->info()->data_type() == _output->info()->data_type());
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::F32:
- {
- const auto input_value = *reinterpret_cast<float *>(_input->ptr_to_element(id));
- *reinterpret_cast<float *>(_output->ptr_to_element(id)) = -input_value;
- break;
- }
- case ::arm_compute::DataType::S32:
- {
- const auto input_value = *reinterpret_cast<int32_t *>(_input->ptr_to_element(id));
- *reinterpret_cast<int32_t *>(_output->ptr_to_element(id)) = -input_value;
- break;
- }
- case ::arm_compute::DataType::U32:
- {
- const auto input_value = *reinterpret_cast<uint32_t *>(_input->ptr_to_element(id));
- *reinterpret_cast<uint32_t *>(_output->ptr_to_element(id)) = -input_value;
- break;
- }
- default:
- throw std::runtime_error("Not supported, yet");
- break;
- }
- });
-
- if (::internal::arm_compute::isGpuMode())
- {
- CAST_CL(_input)->unmap(queue);
- CAST_CL(_output)->unmap(queue);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.h
deleted file mode 100644
index 4ca88e7f8..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleNeg.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_NEG_H__
-#define __SIMPLE_NEG_H__
-
-#include "internal/arm_compute.h"
-
-class SimpleNeg : public ::arm_compute::IFunction
-{
-public:
- SimpleNeg(void) : _input(nullptr), _output(nullptr)
- {
- // DO NOTHING
- }
-
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output);
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
-};
-
-#endif /*__SIMPLE_NEG_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.cc
deleted file mode 100644
index 2a0a25f0c..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.cc
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "internal/arm_compute.h"
-#include "SimplePackLayer.h"
-
-void SimplePackLayer::configure(const std::vector<::arm_compute::ICLTensor *> &input_vector,
- ::arm_compute::ICLTensor *output, int32_t axis)
-{
- uint32_t nr_inputs = input_vector.size();
- uint32_t output_rank = output->info()->num_dimensions();
- const ::arm_compute::PermutationVector pv{1, 2, 0};
- _cl_permuted_vector.resize(nr_inputs);
- _cl_permute_vector.resize(nr_inputs);
-
- _output = output;
- // A negative axis implies axis from the end.
- // For example, axis = -1 implies the first axis from the end, i.e. axis = Rank - 1.
- // Similarly, axis = -2 imples second axis from the end, i.e. axis = Rank - 2.
- if (axis < 0)
- {
- axis += output_rank;
- }
- _axis = ToARMComputeAxis(output_rank, axis).value();
- _cl_reshape_vector.resize(nr_inputs);
-
- ::arm_compute::TensorShape subTensor_shape{};
- for (int i = 0; i < output_rank; i++)
- {
- if (i != _axis)
- {
- subTensor_shape.set(i, _output->info()->tensor_shape()[i]);
- }
- else
- {
- subTensor_shape.set(i, 1);
- }
- }
-
- auto subTensor_offset = ::arm_compute::Coordinates{};
- subTensor_offset.set_num_dimensions(output_rank);
-
- for (int i = 0; i < input_vector.size(); i++)
- {
- _input_vector.push_back(input_vector[i]);
- subTensor_offset[_axis] = i;
- auto temp_tensor = std::make_shared<::arm_compute::CLSubTensor>(
- CAST_CL(_output), subTensor_shape, subTensor_offset, true);
- _sub_tensor_vector.push_back(temp_tensor);
- // configure to resize of input tensor in sub tensor offseted, dimension expansion will be
- // automatic
- _cl_permute_vector[i].configure(CAST_CL(_input_vector[i]), &_cl_permuted_vector[i], pv);
- _cl_reshape_vector[i].configure(&_cl_permuted_vector[i], _sub_tensor_vector[i].get());
- _cl_permuted_vector[i].allocator()->allocate();
- }
-}
-
-void SimplePackLayer::run(void)
-{
- for (int i = 0; i < _input_vector.size(); i++)
- {
- _cl_permute_vector[i].run();
- _cl_reshape_vector[i].run();
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.h b/runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.h
deleted file mode 100644
index 2c2fc37f2..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimplePackLayer.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __SIMPLE_PACK_LAYER_H__
-#define __SIMPLE_PACK_LAYER_H__
-
-#include <arm_compute/runtime/CL/CLTensor.h>
-#include <arm_compute/runtime/CL/CLSubTensor.h>
-#include <arm_compute/runtime/CL/functions/CLReshapeLayer.h>
-#include <arm_compute/runtime/CL/functions/CLPermute.h>
-
-class SimplePackLayer : public ::arm_compute::IFunction
-{
-public:
- SimplePackLayer(void)
- : _cl_permuted_vector{}, _input_vector{}, _sub_tensor_vector{}, _cl_reshape_vector{},
- _cl_permute_vector{}, _output(nullptr), _axis(0)
- {
- // DO NOTHING
- }
-
-public:
- void configure(const std::vector<::arm_compute::ICLTensor *> &input_vector,
- ::arm_compute::ICLTensor *output, int axis);
-
-public:
- void run(void) override;
-
-private:
- std::vector<::arm_compute::CLTensor> _cl_permuted_vector;
- std::vector<::arm_compute::ICLTensor *> _input_vector;
- std::vector<std::shared_ptr<::arm_compute::CLSubTensor>> _sub_tensor_vector;
- std::vector<::arm_compute::CLReshapeLayer> _cl_reshape_vector;
- std::vector<::arm_compute::CLPermute> _cl_permute_vector;
- ::arm_compute::ICLTensor *_output;
- int _axis;
-};
-
-#endif // __SIMPLE_PACK_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.cc
deleted file mode 100644
index 64236603f..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.cc
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimplePadLayer.h"
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-namespace
-{
-bool validate_arg(const ::arm_compute::ITensor *input, const ::arm_compute::ITensor *output,
- const ::arm_compute::ITensor *padding_size,
- const ::arm_compute::Coordinates &axises)
-{
- const int input_batch = input->info()->tensor_shape()[axises[0]];
- const int input_height = input->info()->tensor_shape()[axises[1]];
- const int input_width = input->info()->tensor_shape()[axises[2]];
- const int input_depth = input->info()->tensor_shape()[axises[3]];
-
- const int output_batch = output->info()->tensor_shape()[axises[0]];
- const int output_height = output->info()->tensor_shape()[axises[1]];
- const int output_width = output->info()->tensor_shape()[axises[2]];
- const int output_depth = output->info()->tensor_shape()[axises[3]];
-
- auto pad_batch_up = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 0}));
- auto pad_batch_down = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({1, 0}));
- auto pad_height_top = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 1}));
- auto pad_height_bottom = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({1, 1}));
- auto pad_width_left = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 2}));
- auto pad_width_right = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({1, 2}));
- auto pad_depth_front = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 3}));
- auto pad_depth_back = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({1, 3}));
-
- const int padded_batch = input_batch + pad_batch_up + pad_batch_down;
- const int padded_height = input_height + pad_height_top + pad_height_bottom;
- const int padded_width = input_width + pad_width_left + pad_width_right;
- const int padded_depth = input_depth + pad_depth_front + pad_depth_back;
-
- return (padded_batch == output_batch) && (padded_height == output_height) &&
- (padded_width == output_width) && (padded_depth == output_depth);
-}
-} // namespace
-
-void SimplePadLayer::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- ::arm_compute::ITensor *padding_size,
- const ::arm_compute::Coordinates &axises)
-{
-
- const auto rank = axises.num_dimensions();
- assert(rank == 4);
- assert(input != nullptr && output != nullptr && padding_size != nullptr);
-
- for (int i = 0; i < rank; ++i)
- {
- assert(axises[i] >= 0);
- assert(axises[i] < rank);
- }
-
- _input = input;
- _output = output;
- _padding_size = padding_size;
- _axises = axises;
-}
-
-template <typename T>
-inline void ApplyPadding(const ::arm_compute::ITensor *input_data,
- const ::arm_compute::TensorShape &input_shape,
- const ::arm_compute::ITensor *padding_size,
- ::arm_compute::ITensor *output_data,
- const ::arm_compute::TensorShape &output_shape,
- const ::arm_compute::Coordinates &axises, T zero_value)
-{
-
- assert(validate_arg(input_data, output_data, padding_size, axises) &&
- "Padded Input shape does not match to output shape");
-
- const int input_batch = input_shape[axises[0]];
- const int input_height = input_shape[axises[1]];
- const int input_width = input_shape[axises[2]];
- const int input_depth = input_shape[axises[3]];
-
- const int output_batch = output_shape[axises[0]];
- const int output_height = output_shape[axises[1]];
- const int output_width = output_shape[axises[2]];
- const int output_depth = output_shape[axises[3]];
-
- // Padding size for Up, Top, Left and Front are required.
- auto pad_batch_up = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 0}));
- auto pad_height_top = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 1}));
- auto pad_width_left = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 2}));
- auto pad_depth_front = *reinterpret_cast<const int32_t *>(padding_size->ptr_to_element({0, 3}));
-
- for (int out_b = 0; out_b < output_batch; ++out_b)
- {
- for (int out_h = 0; out_h < output_height; ++out_h)
- {
- for (int out_w = 0; out_w < output_width; ++out_w)
- {
- for (int out_d = 0; out_d < output_depth; ++out_d)
- {
- auto output_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{out_b, out_h, out_w, out_d}, axises);
-
- if (out_b < pad_batch_up || out_b >= (input_batch + pad_batch_up) ||
- out_h < pad_height_top || out_h >= (input_height + pad_height_top) ||
- out_w < pad_width_left || out_w >= (input_width + pad_width_left) ||
- out_d < pad_depth_front || out_d >= (input_depth + pad_depth_front))
- {
- *reinterpret_cast<T *>(output_data->ptr_to_element(output_id)) = zero_value;
- }
- else
- {
- auto input_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{out_b - pad_batch_up, out_h - pad_height_top,
- out_w - pad_width_left, out_d - pad_depth_front},
- axises);
- *reinterpret_cast<T *>(output_data->ptr_to_element(output_id)) =
- *reinterpret_cast<T *>(input_data->ptr_to_element(input_id));
- }
- }
- }
- }
- }
-}
-void SimplePadLayer::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_output)->map(q);
- CAST_CL(_padding_size)->map(q);
- }
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::U8:
- case ::arm_compute::DataType::QASYMM8:
- ApplyPadding<uint8_t>(_input, _input->info()->tensor_shape(), _padding_size, _output,
- _output->info()->tensor_shape(), _axises,
- _input->info()->quantization_info().offset);
- break;
- case ::arm_compute::DataType::F32:
- ApplyPadding<float>(_input, _input->info()->tensor_shape(), _padding_size, _output,
- _output->info()->tensor_shape(), _axises, 0.0f);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_output)->unmap(q);
- CAST_CL(_padding_size)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.h b/runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.h
deleted file mode 100644
index 8cb6659ce..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimplePadLayer.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_PAD_LAYER_H__
-#define __SIMPLE_PAD_LAYER_H__
-
-#include "internal/arm_compute.h"
-#include "internal/arm_compute/Cast.h"
-
-class SimplePadLayer : public ::arm_compute::IFunction
-{
-public:
- SimplePadLayer(void) : _input(nullptr), _output(nullptr), _padding_size(nullptr), _axises{}
- {
- // DO NOTHING
- }
-
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- ::arm_compute::ITensor *padding_size,
- const ::arm_compute::Coordinates &axises = getARMComputeAxises(4));
-
- void run(void) override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
- ::arm_compute::ITensor *_padding_size;
- ::arm_compute::Coordinates _axises;
-};
-
-#endif // __SIMPLE_PAD_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.cc
deleted file mode 100644
index b5b3a0950..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.cc
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleSQRT.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleSQRT::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output)
-{
- _input = input;
- _output = output;
-}
-
-void SimpleSQRT::run()
-{
- auto &queue = ::arm_compute::CLScheduler::get().queue();
- if (::internal::arm_compute::isGpuMode())
- {
- CAST_CL(_input)->map(queue);
- CAST_CL(_output)->map(queue);
- }
-
- arm_compute::Window window;
- window.use_tensor_dimensions(_output->info()->tensor_shape());
-
- execute_window_loop(window, [this](const arm_compute::Coordinates &id) {
- // NOTE Must be two input tensors of identical type
- // Must be output tensor of the same type as input0.
- assert(_input->info()->data_type() == _output->info()->data_type());
-
- const auto input_value = *reinterpret_cast<float *>(_input->ptr_to_element(id));
- *reinterpret_cast<float *>(_output->ptr_to_element(id)) = sqrt(input_value);
- });
-
- if (::internal::arm_compute::isGpuMode())
- {
- CAST_CL(_input)->unmap(queue);
- CAST_CL(_output)->unmap(queue);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.h
deleted file mode 100644
index b05a9e32e..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleSQRT.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_SQRT_H__
-#define __SIMPLE_SQRT_H__
-
-#include "internal/arm_compute.h"
-
-class SimpleSQRT : public ::arm_compute::IFunction
-{
-public:
- SimpleSQRT(void) : _input(nullptr), _output(nullptr)
- {
- // DO NOTHING
- }
-
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output);
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
-};
-
-#endif /*__SIMPLE_SQRT_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.cc
deleted file mode 100644
index f53675b99..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.cc
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleSpaceToBatchND.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleSpaceToBatchND::configure(::arm_compute::ITensor *input,
- ::arm_compute::ITensor *block_size,
- ::arm_compute::ITensor *padding_size,
- ::arm_compute::ITensor *output)
-{
- const auto rank = input->info()->num_dimensions();
- assert(rank == 4);
-
- _input = input;
- _block_size = block_size;
- _padding_size = padding_size;
- _output = output;
-}
-
-template <typename T>
-inline void
-SpaceToBatchND(const ::arm_compute::ITensor *input, const ::arm_compute::TensorShape &input_shape,
- const ::arm_compute::ITensor *block_size, const ::arm_compute::ITensor *padding_size,
- const ::arm_compute::ITensor *output, const ::arm_compute::TensorShape &output_shape,
- T zero_value)
-{
- const int input_batch = input_shape[3];
- const int input_height = input_shape[1];
- const int input_width = input_shape[0];
-
- const int depth = output_shape[2];
-
- const int padding_height_left = *reinterpret_cast<int *>(padding_size->ptr_to_element({0, 1}));
- const int padding_height_right = *reinterpret_cast<int *>(padding_size->ptr_to_element({1, 1}));
- const int padding_width_left = *reinterpret_cast<int *>(padding_size->ptr_to_element({0, 0}));
- const int padding_width_right = *reinterpret_cast<int *>(padding_size->ptr_to_element({1, 0}));
- const int padded_height = input_height + padding_height_left + padding_height_right;
- const int padded_width = input_width + padding_width_left + padding_width_right;
-
- const int block_size_height = *reinterpret_cast<int *>(block_size->ptr_to_element({1}));
- const int block_size_width = *reinterpret_cast<int *>(block_size->ptr_to_element({0}));
-
- assert(padding_height_left >= 0);
- assert(padding_height_right >= 0);
- assert(padding_width_left >= 0);
- assert(padding_width_right >= 0);
- assert(block_size_height >= 1);
- assert(block_size_width >= 1);
- assert(padded_height % block_size_height == 0);
- assert(padded_width % block_size_width == 0);
- assert(output->info()->dimension(3) ==
- input->info()->dimension(3) * (block_size_height * block_size_width));
-
- for (int in_b = 0; in_b < input_batch; ++in_b)
- {
- for (int in_d = 0; in_d < depth; ++in_d)
- {
- for (int in_h = 0; in_h < padded_height; ++in_h)
- {
- for (int in_w = 0; in_w < padded_width; ++in_w)
- {
- const int out_d = in_d;
- const int out_h = in_h / block_size_height;
- const int out_w = in_w / block_size_width;
- const int out_b =
- in_b +
- ((in_h % block_size_height) * block_size_width + in_w % block_size_width) *
- input_batch;
-
- const ::arm_compute::Coordinates output_id{out_w, out_h, out_d, out_b};
-
- if (in_h < padding_height_left || in_h >= (input_height + padding_height_left) ||
- in_w < padding_width_left || in_w >= (input_width + padding_width_left))
- {
- *reinterpret_cast<T *>(output->ptr_to_element(output_id)) = zero_value;
- }
- else
- {
- const ::arm_compute::Coordinates input_id{in_w - padding_width_left,
- in_h - padding_height_left, in_d, in_b};
- *reinterpret_cast<T *>(output->ptr_to_element(output_id)) =
- *reinterpret_cast<T *>(input->ptr_to_element(input_id));
- }
- }
- }
- }
- }
-}
-void SimpleSpaceToBatchND::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_block_size)->map(q);
- CAST_CL(_padding_size)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::U8:
- case ::arm_compute::DataType::QASYMM8:
- SpaceToBatchND<uint8_t>(_input, _input->info()->tensor_shape(), _block_size, _padding_size,
- _output, _output->info()->tensor_shape(),
- _input->info()->quantization_info().offset);
- break;
- case ::arm_compute::DataType::F32:
- SpaceToBatchND<float>(_input, _input->info()->tensor_shape(), _block_size, _padding_size,
- _output, _output->info()->tensor_shape(), 0.0f);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_block_size)->unmap(q);
- CAST_CL(_padding_size)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.h
deleted file mode 100644
index 4af961d34..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToBatchND.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __SIMPLE_SPACE_TO_BATCHND_H__
-#define __SIMPLE_SPACE_TO_BATCHND_H__
-
-#include "internal/arm_compute.h"
-
-class SimpleSpaceToBatchND : public ::arm_compute::IFunction
-{
-public:
- SimpleSpaceToBatchND(void)
- : _input(nullptr), _block_size(nullptr), _padding_size(nullptr), _output(nullptr)
- {
- // DO NOTHING
- }
-
- /** Initialise input and output
- *
- * @param[in] input First tensor input.
- * @param[in] block_size Block size.
- * @param[in] padding_size Padding size.
- * @param[out] output Output tensor.
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *block_size,
- ::arm_compute::ITensor *padding_size, ::arm_compute::ITensor *output);
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_block_size;
- ::arm_compute::ITensor *_padding_size;
- ::arm_compute::ITensor *_output;
-};
-
-#endif /*__SIMPLE_SPACE_TO_BATCHND_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.cc
deleted file mode 100644
index 3519da1f3..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.cc
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleSpaceToDepth.h"
-
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleSpaceToDepth::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output,
- int32_t block_size, const ::arm_compute::Coordinates &axises)
-{
- const auto rank = axises.num_dimensions();
- assert(rank == 4);
- for (int i = 0; i < rank; ++i)
- {
- assert(axises[i] >= 0);
- assert(axises[i] < rank);
- }
-
- _input = input;
- _output = output;
- _block_size = block_size;
- _axises = axises;
-}
-
-template <typename T>
-inline void SpaceToDepth(const ::arm_compute::ITensor *input,
- const ::arm_compute::TensorShape &input_shape, int32_t block_size,
- ::arm_compute::ITensor *output,
- const ::arm_compute::TensorShape &output_shape,
- const ::arm_compute::Coordinates &axises)
-{
- const int input_batch = input_shape[axises[0]];
- const int input_height = input_shape[axises[1]];
- const int input_width = input_shape[axises[2]];
- const int input_depth = input_shape[axises[3]];
-
- for (int in_b = 0; in_b < input_batch; ++in_b)
- {
- for (int in_h = 0; in_h < input_height; ++in_h)
- {
- for (int in_w = 0; in_w < input_width; ++in_w)
- {
- for (int in_d = 0; in_d < input_depth; ++in_d)
- {
- const int out_b = in_b;
- const int out_h = in_h / block_size;
- const int out_w = in_w / block_size;
- const int out_d =
- in_d + ((in_h % block_size) * block_size + in_w % block_size) * input_depth;
-
- auto input_id =
- asARMComputeCoordinates(::arm_compute::Coordinates{in_b, in_h, in_w, in_d}, axises);
- auto output_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{out_b, out_h, out_w, out_d}, axises);
-
- *reinterpret_cast<T *>(output->ptr_to_element(output_id)) =
- *reinterpret_cast<T *>(input->ptr_to_element(input_id));
- }
- }
- }
- }
-}
-
-void SimpleSpaceToDepth::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::U8:
- case ::arm_compute::DataType::QASYMM8:
- SpaceToDepth<uint8_t>(_input, _input->info()->tensor_shape(), _block_size, _output,
- _output->info()->tensor_shape(), _axises);
- break;
- case ::arm_compute::DataType::F32:
- SpaceToDepth<float>(_input, _input->info()->tensor_shape(), _block_size, _output,
- _output->info()->tensor_shape(), _axises);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.h
deleted file mode 100644
index 9e87c364c..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleSpaceToDepth.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file        SimpleSpaceToDepth.h
- * @brief       This file contains SimpleSpaceToDepth class
- * @ingroup     COM_AI_RUNTIME
- */
-
-#ifndef __SIMPLE_SPACE_TO_DEPTH_H__
-#define __SIMPLE_SPACE_TO_DEPTH_H__
-
-#include "internal/arm_compute.h"
-#include "internal/arm_compute/Cast.h"
-
-/**
- * @brief Class to run SimpleEmbeddingLookup Layer
- */
-class SimpleSpaceToDepth : public ::arm_compute::IFunction
-{
-public:
- SimpleSpaceToDepth(void) : _input(nullptr), _output(nullptr), _block_size(0), _axises{}
- {
- // DO NOTHING
- }
-
- /**
- * @brief Configure the layer
- * @param[in] input First tensor input.
- * @param[in] output Output tensor.
- * @param[in] block_size Block size.
- * @param[in] axises Axises of rank 4
- * @return N/A
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *output, int32_t block_size,
- const ::arm_compute::Coordinates &axises = getARMComputeAxises(4));
-
- /**
- * @brief Run the operation. Must be called after configure().
- * @return N/A
- */
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_output;
- int32_t _block_size;
- ::arm_compute::Coordinates _axises;
-};
-
-#endif /*__SIMPLE_SPACE_TO_DEPTH_H__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.cc
deleted file mode 100644
index abc291289..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.cc
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "internal/layers/SimpleTransposeConv.h"
-#include <arm_compute/runtime/CL/CLScheduler.h>
-
-void SimpleTransposeConv::configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *weights,
- ::arm_compute::ITensor *output,
- ::arm_compute::PadStrideInfo &tconv_info,
- ::arm_compute::Coordinates axises)
-{
- auto rank = axises.num_dimensions();
-
- assert(rank == 4);
-
- _input = input;
- _weights = weights;
- _output = output;
- _stride_width = tconv_info.stride().first;
- _stride_height = tconv_info.stride().second;
- _pad_width = tconv_info.pad_left();
- _pad_height = tconv_info.pad_top();
- _axises = axises;
-}
-
-template <typename T>
-inline void ApplyTransposeConv(
- const ::arm_compute::TensorShape &input_shape, const ::arm_compute::ITensor *input_data,
- const ::arm_compute::TensorShape &filter_shape, const ::arm_compute::ITensor *filter_data,
- const ::arm_compute::TensorShape &output_shape, const ::arm_compute::ITensor *output_data,
- const int32_t stride_width, const int32_t stride_height, const int32_t pad_width,
- const int32_t pad_height, const ::arm_compute::Coordinates axises)
-{
- const int batches = input_shape[axises[0]];
- const int input_height = input_shape[axises[1]];
- const int input_width = input_shape[axises[2]];
- const int input_depth = input_shape[axises[3]];
-
- const int filter_height = filter_shape[axises[1]];
- const int filter_width = filter_shape[axises[2]];
-
- const int output_height = output_shape[axises[1]];
- const int output_width = output_shape[axises[2]];
- const int output_depth = output_shape[axises[3]];
-
- assert(batches == output_shape[axises[0]]);
- assert(input_depth == filter_shape[axises[3]]);
- assert(filter_shape[axises[0]] == output_depth);
-
- // Although transpose convolution simplifies to convolution with transposed
- // weights for strides of 1, non-unitary striding complicates matters. To
- // keep this reference implementation as clear as possible, we use a
- // "scatter" access pattern, where we loop through all the input elements,
- // computing their influence on the output, rather than looping through the
- // output elements in the typical "gather" access pattern of a conv. We
- // therefore must initialize the output array to zero.
-
- // Loop through input elements one at a time.
- for (int batch = 0; batch < batches; ++batch)
- {
- for (int in_y = 0; in_y < input_height; ++in_y)
- {
- for (int in_x = 0; in_x < input_width; ++in_x)
- {
- for (int in_channel = 0; in_channel < input_depth; ++in_channel)
- {
- // Loop through the output elements it will influence
- const int out_x_origin = (in_x * stride_width) - pad_width;
- const int out_y_origin = (in_y * stride_height) - pad_height;
- for (int filter_y = 0; filter_y < filter_height; ++filter_y)
- {
- for (int filter_x = 0; filter_x < filter_width; ++filter_x)
- {
- for (int out_channel = 0; out_channel < output_depth; ++out_channel)
- {
- // Compute output element location
- const int out_x = out_x_origin + filter_x;
- const int out_y = out_y_origin + filter_y;
- // We cannot accumulate out of bounds
- if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) &&
- (out_y < output_height))
- {
- auto input_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{batch, in_y, in_x, in_channel}, axises);
- auto filter_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{in_channel, filter_y, filter_x, out_channel},
- axises);
- auto output_id = asARMComputeCoordinates(
- ::arm_compute::Coordinates{batch, out_y, out_x, out_channel}, axises);
- T input_value = *reinterpret_cast<T *>(input_data->ptr_to_element(input_id));
- T filter_value = *reinterpret_cast<T *>(filter_data->ptr_to_element(filter_id));
- *reinterpret_cast<T *>(output_data->ptr_to_element(output_id)) +=
- input_value * filter_value;
- }
- }
- }
- }
- }
- }
- }
- }
-}
-
-void SimpleTransposeConv::run()
-{
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->map(q);
- CAST_CL(_weights)->map(q);
- CAST_CL(_output)->map(q);
- }
-
- switch (_input->info()->data_type())
- {
- case ::arm_compute::DataType::S32:
- ApplyTransposeConv<int32_t>(_input->info()->tensor_shape(), _input,
- _weights->info()->tensor_shape(), _weights,
- _output->info()->tensor_shape(), _output, _stride_width,
- _stride_height, _pad_width, _pad_height, _axises);
- break;
- case ::arm_compute::DataType::F32:
- ApplyTransposeConv<float>(_input->info()->tensor_shape(), _input,
- _weights->info()->tensor_shape(), _weights,
- _output->info()->tensor_shape(), _output, _stride_width,
- _stride_height, _pad_width, _pad_height, _axises);
- break;
- default:
- ARM_COMPUTE_ERROR("DataType not supported");
- break;
- }
-
- if (::internal::arm_compute::isGpuMode())
- {
- auto &q = ::arm_compute::CLScheduler::get().queue();
-
- CAST_CL(_input)->unmap(q);
- CAST_CL(_weights)->unmap(q);
- CAST_CL(_output)->unmap(q);
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.h
deleted file mode 100644
index c5519828b..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleTransposeConv.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __TRANSPOSE_CONV_EX__
-#define __TRANSPOSE_CONV_EX__
-
-#include "internal/arm_compute.h"
-#include "internal/arm_compute/Cast.h"
-
-class SimpleTransposeConv : public ::arm_compute::IFunction
-{
-public:
- SimpleTransposeConv()
- : _input(nullptr), _weights(nullptr), _output(nullptr), _stride_width(0), _stride_height(0),
- _pad_width(0), _pad_height(0)
- {
- // DO NOTHING
- }
-
- /** Initialise input and output
- *
- * @param[in] input First tensor input.
- * @param[in] weights Weights
- * @param[out] output Output tensor.
- * @param[in] tc_info Contains padding and policies to be used in the deconvolution,
- * this is decribed in @ref PadStrideInfo.
- * @param[in] axises Axises of rank 4
- */
- void configure(::arm_compute::ITensor *input, ::arm_compute::ITensor *weights,
- ::arm_compute::ITensor *output, ::arm_compute::PadStrideInfo &tconv_info,
- ::arm_compute::Coordinates axises = getARMComputeAxises(4));
-
- void run() override;
-
-private:
- ::arm_compute::ITensor *_input;
- ::arm_compute::ITensor *_weights;
- ::arm_compute::ITensor *_output;
- int32_t _stride_width;
- int32_t _stride_height;
- int32_t _pad_width;
- int32_t _pad_height;
- ::arm_compute::Coordinates _axises;
-};
-
-#endif /*__TRANSPOSE_CONV_EX__ */
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.cc b/runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.cc
deleted file mode 100644
index 910595a44..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.cc
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include "internal/arm_compute.h"
-#include "SimpleUnpackLayer.h"
-
-void SimpleUnpackLayer::configure(::arm_compute::ICLTensor *input,
- const std::vector<::arm_compute::ICLTensor *> &output_vector,
- int32_t axis)
-{
- uint32_t nr_outputs = output_vector.size();
- _cl_permuted_vector.resize(nr_outputs);
- _cl_permute_vector.resize(nr_outputs);
- uint32_t input_rank = input->info()->num_dimensions();
- const ::arm_compute::PermutationVector pv{2, 0, 1};
- _input = input;
- // Negatige axis is supported, -1 implies R-1 axis where R is input rank
- if (axis < 0)
- {
- axis += input_rank;
- }
- _axis = ToARMComputeAxis(input_rank, axis).value();
- _cl_reshape_vector.resize(nr_outputs);
-
- ::arm_compute::TensorShape subTensor_shape{};
- for (int i = 0; i < input_rank; i++)
- {
- if (i != _axis)
- {
- subTensor_shape.set(i, _input->info()->tensor_shape()[i]);
- }
- else
- {
- subTensor_shape.set(i, 1);
- }
- }
-
- auto subTensor_offset = ::arm_compute::Coordinates{};
- subTensor_offset.set_num_dimensions(input_rank);
-
- for (int i = 0; i < output_vector.size(); i++)
- {
- _output_vector.push_back(output_vector[i]);
- subTensor_offset[_axis] = i;
- auto temp_tensor = std::make_shared<::arm_compute::CLSubTensor>(
- CAST_CL(_input), subTensor_shape, subTensor_offset, true);
- _sub_tensor_vector.push_back(temp_tensor);
- // Copies into the subtensor
- _cl_permute_vector[i].configure(_sub_tensor_vector[i].get(), &_cl_permuted_vector[i], pv);
- _cl_reshape_vector[i].configure(&_cl_permuted_vector[i], CAST_CL(_output_vector[i]));
- _cl_permuted_vector[i].allocator()->allocate();
- }
-}
-
-void SimpleUnpackLayer::run(void)
-{
- for (int i = 0; i < _output_vector.size(); i++)
- {
- _cl_permute_vector[i].run();
- _cl_reshape_vector[i].run();
- }
-}
diff --git a/runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.h b/runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.h
deleted file mode 100644
index 52fc7513d..000000000
--- a/runtimes/pure_arm_compute/src/internal/layers/SimpleUnpackLayer.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef __UNPACK_LAYER_H__
-#define __UNPACK_LAYER_H__
-
-#include <arm_compute/runtime/CL/CLTensor.h>
-#include <arm_compute/runtime/CL/CLSubTensor.h>
-#include <arm_compute/runtime/CL/functions/CLReshapeLayer.h>
-#include <arm_compute/runtime/CL/functions/CLPermute.h>
-
-class SimpleUnpackLayer : public ::arm_compute::IFunction
-{
-public:
- SimpleUnpackLayer(void)
- : _cl_permuted_vector{}, _output_vector{}, _sub_tensor_vector{}, _cl_reshape_vector{},
- _cl_permute_vector{}, _input(nullptr), _axis(0)
- {
- // DO NOTHING
- }
-
-public:
- void configure(::arm_compute::ICLTensor *input,
- const std::vector<::arm_compute::ICLTensor *> &output_vector, int32_t axis);
-
-public:
- void run(void) override;
-
-private:
- std::vector<::arm_compute::CLTensor> _cl_permuted_vector;
- std::vector<::arm_compute::ICLTensor *> _output_vector;
- std::vector<std::shared_ptr<::arm_compute::CLSubTensor>> _sub_tensor_vector;
- std::vector<::arm_compute::CLReshapeLayer> _cl_reshape_vector;
- std::vector<::arm_compute::CLPermute> _cl_permute_vector;
- ::arm_compute::ICLTensor *_input;
- int32_t _axis;
-};
-
-#endif // __UNPACK_LAYER_H__
diff --git a/runtimes/pure_arm_compute/src/internal/nnapi/tensor/Reader.h b/runtimes/pure_arm_compute/src/internal/nnapi/tensor/Reader.h
index cc51db594..fc6d490da 100644
--- a/runtimes/pure_arm_compute/src/internal/nnapi/tensor/Reader.h
+++ b/runtimes/pure_arm_compute/src/internal/nnapi/tensor/Reader.h
@@ -49,7 +49,7 @@ public:
Reader(const ::nnfw::misc::tensor::Shape &shape, const T *ptr, size_t len)
: _shape{shape}, _ptr{ptr}
{
- assert(shape.element_nums() * sizeof(T) == len);
+ assert(shape.num_elements() * sizeof(T) == len);
initialize();
}
diff --git a/runtimes/pure_arm_compute/src/internal/nnapi/tensor/View.h b/runtimes/pure_arm_compute/src/internal/nnapi/tensor/View.h
index f8f297f97..4766851b9 100644
--- a/runtimes/pure_arm_compute/src/internal/nnapi/tensor/View.h
+++ b/runtimes/pure_arm_compute/src/internal/nnapi/tensor/View.h
@@ -47,7 +47,7 @@ public:
// NOTE The parameter len denotes the number of bytes.
View(const ::nnfw::misc::tensor::Shape &shape, T *ptr, size_t len) : _shape{shape}, _ptr{ptr}
{
- assert(shape.element_nums() * sizeof(T) == len);
+ assert(shape.num_elements() * sizeof(T) == len);
}
public:
diff --git a/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.cc b/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.cc
index f91f834d6..f4d1ca3c5 100644
--- a/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.cc
+++ b/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.cc
@@ -74,7 +74,7 @@ Param::Param(uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
// 6 -> Padding_bottom index
// 7 -> Stride (width) Index
// 8 -> Stride (height) INdex
- // 9 -> Depthwise Multipler
+ // 9 -> Depthwise Multiplier
// 10 -> Activation Index
ifm_index = inputs[0];
ker_index = inputs[1];
@@ -85,7 +85,7 @@ Param::Param(uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
padding_bottom_index = inputs[6];
hstride_index = inputs[7];
vstride_index = inputs[8];
- multipler_index = inputs[9];
+ multiplier_index = inputs[9];
activation_index = inputs[10];
}
@@ -109,7 +109,7 @@ Param::Param(uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
// 3 -> Padding Code (ANEURALNETWORKS_PADDING_SAME or ANEURALNETWORKS_PADDING_VALID) Index
// 4 -> Stride (width) Index
// 5 -> Stride (height) INdex
- // 6 -> Depthwise Multipler
+ // 6 -> Depthwise Multiplier
// 7 -> Activation Index
ifm_index = inputs[0];
ker_index = inputs[1];
@@ -117,7 +117,7 @@ Param::Param(uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
padding_index = inputs[3];
hstride_index = inputs[4];
vstride_index = inputs[5];
- multipler_index = inputs[6];
+ multiplier_index = inputs[6];
activation_index = inputs[7];
}
diff --git a/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.h b/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.h
index c63e30aae..01a9e48be 100644
--- a/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.h
+++ b/runtimes/pure_arm_compute/src/internal/op/DepthwiseConv2D.h
@@ -57,7 +57,7 @@ struct Param
int32_t padding_top_index; /**< Index of padding top */
int32_t padding_bottom_index; /**< Index of padding bottom */
- int32_t multipler_index; /**< Index of multipler */
+ int32_t multiplier_index; /**< Index of multipler */
int32_t activation_index; /**< Index of activation */
/**
* @brief Construct as default
@@ -133,7 +133,7 @@ struct Param
int32_t vstride_index; /**< Index of vertical stride */
int32_t padding_index; /**< Index of padding */
- int32_t multipler_index; /**< Index of multipler */
+ int32_t multiplier_index; /**< Index of multipler */
int32_t activation_index; /**< Index of activation */
/**
* @brief Construct as default
diff --git a/runtimes/pure_arm_compute/src/internal/op/Gather.cc b/runtimes/pure_arm_compute/src/internal/op/Gather.cc
index 6c0dbaf75..bc517d28c 100644
--- a/runtimes/pure_arm_compute/src/internal/op/Gather.cc
+++ b/runtimes/pure_arm_compute/src/internal/op/Gather.cc
@@ -53,11 +53,11 @@ Param::Param(uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
// Each input should be interpreted as follows:
//
- // 0 -> LHS Tensor Index
- // 1 -> RHS Tensor Index
+ // 0 -> input Tensor Index
+ // 1 -> indices Tensor Index
// 2 -> axis Index
- lhs_index = inputs[0];
- rhs_index = inputs[1];
+ ifm_index = inputs[0];
+ indices_index = inputs[1];
axis_index = inputs[2];
}
diff --git a/runtimes/pure_arm_compute/src/internal/op/Gather.h b/runtimes/pure_arm_compute/src/internal/op/Gather.h
index 4470236eb..d40794f99 100644
--- a/runtimes/pure_arm_compute/src/internal/op/Gather.h
+++ b/runtimes/pure_arm_compute/src/internal/op/Gather.h
@@ -43,9 +43,9 @@ struct Param
{
int32_t ofm_index; //!< index for output feature map
- int32_t lhs_index; //!< index for lhs tensor
- int32_t rhs_index; //!< index for rhs tensor
- int32_t axis_index; //!< index for axis
+ int32_t ifm_index; //!< index for ifm tensor
+ int32_t indices_index; //!< index for indices tensor
+ int32_t axis_index; //!< index for axis
/**
* @brief Default Constructor
diff --git a/runtimes/pure_arm_compute/src/internal/op/Split.cc b/runtimes/pure_arm_compute/src/internal/op/Split.cc
index cbd863fce..6457a106a 100644
--- a/runtimes/pure_arm_compute/src/internal/op/Split.cc
+++ b/runtimes/pure_arm_compute/src/internal/op/Split.cc
@@ -47,13 +47,16 @@ namespace Split
Param::Param(uint32_t inputCount, const uint32_t *inputs, uint32_t outputCount,
const uint32_t *outputs)
{
- assert(inputCount == 2);
+ assert(inputCount == 3);
// Each input should be interpreted as follows:
- // 0 -> A 0-D int32 tensor, indicating the dimension along which to split.
- // 1 -> An n-D tensor, specifying the tensor to be split.
- axis_index = inputs[0];
- ifm_index = inputs[1];
+ // 0 -> An n-D tensor, specifying the tensor to be split.
+ // 1 -> A 0-D int32 tensor, indicating the dimension along which to split.
+ // 2 -> A 0-D int32 tensor, indicating the number of outputs
+ // (It can be ignored on pacl becasue pacl don't support dynamic tensor shape,
+ // and can be used for verification only)
+ ifm_index = inputs[0];
+ axis_index = inputs[1];
// Each output should be interpreted as follow:
// [0, outputCount) -> An n-D tensor.
diff --git a/runtimes/pure_arm_compute/src/internal/op/Split.h b/runtimes/pure_arm_compute/src/internal/op/Split.h
index b2c6c2fd1..cb5f3eb2d 100644
--- a/runtimes/pure_arm_compute/src/internal/op/Split.h
+++ b/runtimes/pure_arm_compute/src/internal/op/Split.h
@@ -42,8 +42,8 @@ namespace Split
*/
struct Param
{
- int32_t axis_index; //!< index for axis
int32_t ifm_index; //!< index for input feature map
+ int32_t axis_index; //!< index for axis
std::vector<int32_t> ofm_indexes; //!< index for output feature map
diff --git a/runtimes/pure_arm_compute/src/logging.h b/runtimes/pure_arm_compute/src/logging.h
index 914b63057..447da03e2 100644
--- a/runtimes/pure_arm_compute/src/logging.h
+++ b/runtimes/pure_arm_compute/src/logging.h
@@ -41,7 +41,7 @@ public:
{
auto env = std::getenv("PURE_ARM_COMPUTE_LOG_ENABLE");
- if (env && std::atoi(env) > 0)
+ if (env && std::strtol(env, NULL, 0) > 0)
{
_enabled = true;
}
diff --git a/runtimes/pure_arm_compute/src/model.cc b/runtimes/pure_arm_compute/src/model.cc
index 2c4120d7a..ddca589db 100644
--- a/runtimes/pure_arm_compute/src/model.cc
+++ b/runtimes/pure_arm_compute/src/model.cc
@@ -763,7 +763,7 @@ int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
break;
}
- case ANEURALNETWORKS_TENSORFLOW_MAX_EX:
+ case ANEURALNETWORKS_REDUCE_MAX_EX:
{
using internal::tflite::op::ReduceMax::Param;
using internal::tflite::op::ReduceMax::Node;
@@ -822,6 +822,18 @@ int ANeuralNetworksModel_addOperationEx(ANeuralNetworksModel *model,
break;
}
+ case ANEURALNETWORKS_LOGICAL_NOT_EX:
+ {
+ using internal::tflite::op::LogicalNot::Param;
+ using internal::tflite::op::LogicalNot::Node;
+
+ // Add 'operations'
+ auto &operations = model->deref().operations();
+
+ operations.emplace_back<Node>(Param{inputCount, inputs, outputCount, outputs});
+
+ break;
+ }
case ANEURALNETWORKS_RSQRT_EX:
{
using internal::tflite::op::RSQRT::Param;
diff --git a/scripts/command/build b/scripts/command/build
deleted file mode 100644
index 1e8b9b731..000000000
--- a/scripts/command/build
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-# NOTE 'run' sets NNFW_SCRIPT_PATH and invokes this script
-source "${NNFW_SCRIPT_PATH}/config/build.configuration"
-
-if [[ ! -d "${BUILD_ALIAS}" ]]; then
- echo "'${BUILD_ALIAS}' does not exist. Please run 'configure' first"
- exit 255
-fi
-
-# Set parallel build
-# TODO Use argument instead of environment variable
-HOST_OS=${HOST_OS:-linux}
-NPROCS=${NPROCS:-1}
-PARALLEL_BUILD=${PARALLEL_BUILD:-1}
-
-if [ "${PARALLEL_BUILD}" == "1" ]; then
- # Get number of processors (linux only for now)
- if [ "${HOST_OS}" == "linux" ]; then
- NPROCS="$(grep -c ^processor /proc/cpuinfo)"
- fi
-fi
-
-cd ${BUILD_ALIAS}
-make -j ${NPROCS} "$@"
diff --git a/scripts/command/build-acl b/scripts/command/build-acl
deleted file mode 100644
index a9957555b..000000000
--- a/scripts/command/build-acl
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/bin/bash
-
-# NOTE 'run' sets NNFW_SCRIPT_PATH and invokes this script
-source "${NNFW_SCRIPT_PATH}/config/build.configuration"
-
-# Set parallel build
-# TODO: use argument instead of environment variable and merge with build command
-TARGET_ARCH=${TARGET_ARCH:-armv7l}
-BUILD_TYPE=${BUILD_TYPE:-Debug}
-NPROCS=${NPROCS:-1}
-PARALLEL_BUILD=${PARALLEL_BUILD:-1}
-HOST_OS=${HOST_OS:-linux}
-TARGET_OS=${TARGET_OS:-linux}
-
-# make TARGET and TYPE to lowercase
-TARGET_ARCH_LC="$(echo ${TARGET_ARCH} | tr A-Z a-z)"
-BUILD_TYPE_LC="$(echo ${BUILD_TYPE} | tr A-Z a-z)"
-# we need base name 'arm` for all arm arch
-TARGET_ARCH_BASE="${TARGET_ARCH_LC}"
-
-if [ "${TARGET_ARCH_BASE}" == *"arm64"* ]; then
- # For now Android is the only option for arm64
- TARGET_ARCH_BASE=arm64
- TARGET_OS=android
-fi
-
-if [ "${PARALLEL_BUILD}" == "1" ]; then
- # Get number of processors (linux only for now)
- if [ "${HOST_OS}" == "linux" ]; then
- NPROCS="$(grep -c ^processor /proc/cpuinfo)"
- fi
-fi
-
-ACL_FOLDER="externals/acl"
-ACL_COMMAND="scons -j${NPROCS} neon=1 opencl=1 examples=0 embed_kernels=1 os=${TARGET_OS}"
-if [ "${TARGET_ARCH_LC}" == "armv7l" ]; then
- ACL_COMMAND+=" arch=armv7a"
- ACL_BUILD_OUT="armv7a-${TARGET_OS}"
-elif [ "${TARGET_ARCH_LC}" == "aarch64" ]; then
- ACL_COMMAND+=" arch=arm64-v8a"
- ACL_BUILD_OUT="arm64-v8a-${TARGET_OS}"
-elif [ "${TARGET_ARCH_BASE}" == "arm64" ]; then
- ACL_COMMAND+=" arch=arm64-v8a"
- ACL_BUILD_OUT="arm64-v8a-${TARGET_OS}"
-fi
-
-if [ "${TARGET_OS}" == "android" ]; then
- ACL_COMMAND+=" Werror=0"
- ANDROID_GNUSTL_PATH="${ROOTFS_ARM64}/bin:${ROOTFS_ARM64}/aarch64-linux-android/bin:$$PATH"
-else
- ACL_COMMAND+=" Werror=1"
-fi
-
-if [ "${BUILD_TYPE_LC}" == "debug" ]; then
- ACL_COMMAND+=" debug=1 asserts=1"
-fi
-
-ACL_FOLDER_NAME="${ACL_BUILD_OUT}.${BUILD_TYPE_LC}"
-ACL_COMMAND+=" build_dir=${ACL_FOLDER_NAME}"
-ACL_FOLDER_BUILD="${ACL_FOLDER}/build/${ACL_FOLDER_NAME}"
-
-pushd ${ACL_FOLDER}
-if [ "${TARGET_OS}" == "android" ]; then
- CXX=clang++ CC=clang PATH=${ANDROID_GNUSTL_PATH} ${ACL_COMMAND}
-else
- ${ACL_COMMAND}
-fi
-popd
-
-WORK_FOLDER=${TARGET_ARCH_LC}-${TARGET_OS}.${BUILD_TYPE_LC}
-WORK_PATH=${NNFW_PROJECT_PATH}/${WORKSPACE_RPATH}/${WORK_FOLDER}
-INSTALL_PATH=${NNFW_INSTALL_PATH:-${WORK_PATH}/out}
-
-mkdir -vp ${INSTALL_PATH}/lib
-cp -v ${ACL_FOLDER_BUILD}/libarm_compute_core.so ${INSTALL_PATH}/lib/.
-cp -v ${ACL_FOLDER_BUILD}/libarm_compute_graph.so ${INSTALL_PATH}/lib/.
-cp -v ${ACL_FOLDER_BUILD}/libarm_compute.so ${INSTALL_PATH}/lib/.
diff --git a/scripts/command/build-docker b/scripts/command/build-docker
deleted file mode 100644
index 2da70be78..000000000
--- a/scripts/command/build-docker
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/bin/bash
-
-function Usage()
-{
- echo "Usage: $0 $(basename ${BASH_SOURCE[0]}) [OPTIONS]"
- echo ""
- echo "Options:"
- echo " --tizen Build docker image for tizen build"
- echo "Options can use as docker build option:"
- docker build --help
-}
-
-DOCKER_FILE_RPATH="docker/Dockerfile"
-DOCKER_BUILD_ARGS=()
-
-# Handle argument for this script
-# Set default docker image name, tag
-for i in "$@"
-do
- case $i in
- -h|--help|help)
- Usage
- exit 1
- ;;
- --tizen)
- DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker_tizen}
- DOCKER_FILE_RPATH="docker/Dockerfile_tizen"
- ;;
- esac
-done
-source "$NNFW_SCRIPT_PATH/config/image_name.configuration"
-DOCKER_BUILD_ARGS+="-t ${DOCKER_IMAGE_NAME}"
-
-# Argument for docker build commands
-for i in "$@"
-do
- case $i in
- -h|--help|help)
- --tizen)
- # Already handled argument
- ;;
- *)
- DOCKER_BUILD_ARGS+=($1)
- ;;
- esac
- shift
-done
-
-docker build --build-arg http_proxy="$http_proxy" \
- --build-arg https_proxy="$https_proxy" \
- ${DOCKER_BUILD_ARGS[@]} \
- - < $NNFW_SCRIPT_PATH/$DOCKER_FILE_RPATH
diff --git a/scripts/command/configure b/scripts/command/configure
deleted file mode 100644
index 3b654d677..000000000
--- a/scripts/command/configure
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/bash
-
-source "${NNFW_SCRIPT_PATH}/config/build.configuration"
-
-# Set target platform using environment variable
-# TODO: use argument instead of environment variable
-TARGET_ARCH=${TARGET_ARCH:-$(uname -p)}
-BUILD_TYPE=${BUILD_TYPE:-Debug}
-CROSS_BUILD=${CROSS_BUILD:-0}
-HOST_OS=${HOST_OS:-linux}
-TARGET_OS=${TARGET_OS:-linux}
-## TODO: fix obs build break
-OBS_BUILD=${OBS_BUILD:-OFF}
-COVERAGE_BUILD=${COVERAGE_BUILD:-0}
-BENCHMARK_ACL_BUILD=${BENCHMARK_ACL_BUILD:-0}
-OPTIONS=${OPTIONS:-}
-
-# make TARGET and TYPE to lowercase
-TARGET_ARCH_LC="$(echo ${TARGET_ARCH} | tr A-Z a-z)"
-BUILD_TYPE_LC="$(echo ${BUILD_TYPE} | tr A-Z a-z)"
-
-if [ "${TARGET_ARCH_LC}" == *"arm64"* ]; then
- # arm64 as target-arch comes from Android
- if [ ! -z ${ROOTFS_DIR} ]; then
- ROOTFS_ARM64="${ROOTFS_DIR}"
- export ROOTFS_ARM64
- fi
- # For now Android is the only option for arm64
- TARGET_OS=android
-elif [ "${TARGET_ARCH_LC}" == *"arm"* ]; then
- if [ ! -z ${ROOTFS_DIR} ] ; then
- ROOTFS_ARM="${ROOTFS_DIR}"
- export ROOTFS_ARM
- fi
-elif [ "${TARGET_ARCH_LC}" == *"aarch64"* ]; then
- # aarch64 as target-arch comes from all except for Android
- if [ ! -z ${ROOTFS_DIR} ] ; then
- ROOTFS_ARM64="${ROOTFS_DIR}"
- export ROOTFS_ARM64
- fi
-fi
-
-# Todo: we may set CROSS_BUILD=1 when ROOTFS_DIR is given
-# the toolchain file, only for cross build
-if [ "${CROSS_BUILD}" == "1" ]; then
- TOOLCHAIN_FILE="cmake/config/config_${TARGET_ARCH_LC}-${TARGET_OS}.cmake"
- OPTION_TOOLCHAIN="-DCMAKE_TOOLCHAIN_FILE=${TOOLCHAIN_FILE}"
-else
- OPTION_TOOLCHAIN=
-fi
-
-if [ "${COVERAGE_BUILD}" == "1" ]; then
- OPTIONS+=" -DCOVERAGE_BUILD=1"
-else
- OPTIONS+=" -DCOVERAGE_BUILD=0"
-fi
-
-if [ "${BENCHMARK_ACL_BUILD}" == "1" ]; then
- OPTIONS+=" -DBUILD_BENCHMARK_ACL=1"
-fi
-
-WORK_FOLDER=${TARGET_ARCH_LC}-${TARGET_OS}.${BUILD_TYPE_LC}
-WORK_PATH=${NNFW_PROJECT_PATH}/${WORKSPACE_RPATH}/${WORK_FOLDER}
-BUILD_PATH=${WORK_PATH}/obj
-INSTALL_PATH=${NNFW_INSTALL_PATH:-${WORK_PATH}/out}
-
-mkdir -vp ${BUILD_PATH}
-mkdir -vp ${INSTALL_PATH}
-rm -rf ${BUILD_ALIAS}
-rm -rf ${INSTALL_ALIAS}
-ln -s ${BUILD_PATH} ${BUILD_ALIAS}
-ln -s ${INSTALL_PATH} ${INSTALL_ALIAS}
-
-cd "${BUILD_PATH}"
-
-cmake \
- -B${BUILD_PATH} -H${NNFW_PROJECT_PATH} \
- -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} \
- -DCMAKE_BUILD_TYPE=${BUILD_TYPE_LC} -DTARGET_ARCH=${TARGET_ARCH_LC} \
- -DHOST_OS=${HOST_OS} \
- -DTARGET_OS=${TARGET_OS} \
- -DOBS_BUILD=${OBS_BUILD} \
- ${OPTION_TOOLCHAIN} \
- ${OPTIONS} \
- "${NNFW_PROJECT_PATH}" "$@"
diff --git a/scripts/command/docker-run b/scripts/command/docker-run
deleted file mode 100644
index ff0881967..000000000
--- a/scripts/command/docker-run
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-source "${NNFW_SCRIPT_PATH}/config/docker.configuration"
-
-docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "$@"
-EXITCODE=$?
-
-docker_cleanup
-
-exit $EXITCODE
diff --git a/scripts/command/docker-run-user b/scripts/command/docker-run-user
deleted file mode 100644
index d5b74db39..000000000
--- a/scripts/command/docker-run-user
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-source "${NNFW_SCRIPT_PATH}/config/docker.configuration"
-
-DOCKER_RUN_OPTS+=" -u $(id -u):$(id -g)"
-
-docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "$@"
-EXITCODE=$?
-
-exit $EXITCODE
diff --git a/scripts/command/docker-shell b/scripts/command/docker-shell
deleted file mode 100644
index 1f942cedb..000000000
--- a/scripts/command/docker-shell
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-source "${NNFW_SCRIPT_PATH}/config/docker.configuration"
-
-DOCKER_RUN_OPTS+=" -it"
-docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME "/bin/bash"
-EXITCODE=$?
-
-docker_cleanup
-
-exit $EXITCODE
diff --git a/scripts/command/docker_build_cross_arm_ubuntu.sh b/scripts/command/docker_build_cross_arm_ubuntu.sh
deleted file mode 100755
index 29f27260e..000000000
--- a/scripts/command/docker_build_cross_arm_ubuntu.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
-DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
-DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
-
-# prepare rootfs
-if [[ ! -d $ROOTFS_DIR ]]; then
- echo "cannot find rootfs"
- exit 1
-fi
-
-DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
-
-export DOCKER_ENV_VARS
-export DOCKER_VOLUMES
-
-pushd $NNFW_PROJECT_PATH
-
-if [ -n "$DOCKER_INTERACTIVE" ]; then
- source run docker-shell
-else
- CMD="export BENCHMARK_ACL_BUILD=1 && make acl && make && make install && make build_test_suite"
- source run docker-run-user bash -c "${CMD}"
-fi
-EXITCODE=$?
-
-popd
-
-exit $EXITCODE
diff --git a/scripts/command/docker_build_tizen_cross.sh b/scripts/command/docker_build_tizen_cross.sh
deleted file mode 100755
index 6d750b1a9..000000000
--- a/scripts/command/docker_build_tizen_cross.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-
-DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
-DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
-DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
-DOCKER_ENV_VARS+=" -e TARGET_OS=tizen"
-
-# prepare rootfs
-if [[ ! -d $ROOTFS_DIR ]]; then
- echo "cannot find rootfs"
- exit 1
-fi
-
-DOCKER_VOLUMES+=" -v $ROOTFS_DIR/:/opt/rootfs"
-
-export NNFW_PROJECT_PATH
-export NNFW_SCRIPT_PATH
-
-pushd $NNFW_PROJECT_PATH
-
-CMD="make && make install && make build_test_suite"
-source run docker-run-user bash -c "${CMD}"
-EXITCODE=$?
-
-popd
-
-exit $EXITCODE
diff --git a/scripts/command/docker_coverage_report.sh b/scripts/command/docker_coverage_report.sh
deleted file mode 100755
index e4d3a92aa..000000000
--- a/scripts/command/docker_coverage_report.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/bash
-
-DOCKER_ENV_VARS+=" -e SRC_PREFIX=${NNFW_PROJECT_PATH}"
-
-pushd ${NNFW_PROJECT_PATH} > /dev/null
-
-source run docker-run-user bash -c "./run gen_coverage_report.sh"
-
-popd > /dev/null
diff --git a/scripts/command/docker_cross_test_coverage_build.sh b/scripts/command/docker_cross_test_coverage_build.sh
deleted file mode 100755
index 8d8e10835..000000000
--- a/scripts/command/docker_cross_test_coverage_build.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
-DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
-DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
-DOCKER_ENV_VARS+=" -e COVERAGE_BUILD=1"
-
-# prepare rootfs
-if [[ ! -d $ROOTFS_DIR ]]; then
- echo "cannot find rootfs"
- exit 1
-fi
-
-DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
-
-pushd $NNFW_PROJECT_PATH > /dev/null
-
-CMD="make acl && make && make install && make build_coverage_suite"
-source run docker-run-user bash -c "${CMD}"
-
-popd > /dev/null
diff --git a/scripts/command/docker_env_neurun b/scripts/command/docker_env_neurun
deleted file mode 100644
index 53b3bbc83..000000000
--- a/scripts/command/docker_env_neurun
+++ /dev/null
@@ -1 +0,0 @@
-OPTIONS=-DBUILD_NEURUN=ON
diff --git a/scripts/command/docker_env_pureacl_tflite_benchmark_model b/scripts/command/docker_env_pureacl_tflite_benchmark_model
deleted file mode 100644
index b1c40e16a..000000000
--- a/scripts/command/docker_env_pureacl_tflite_benchmark_model
+++ /dev/null
@@ -1,2 +0,0 @@
-OPTIONS=-DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_TFLITE_BENCHMARK_MODEL=ON
-BUILD_TYPE=Release
diff --git a/scripts/command/docker_gbs_build.sh b/scripts/command/docker_gbs_build.sh
deleted file mode 100755
index 047d9eff8..000000000
--- a/scripts/command/docker_gbs_build.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker_tizen}
-DOCKER_RPM_HOME=$NNFW_PROJECT_PATH/rpm
-
-if [ "${GBS_RPM_DIR}" == "" ];
-then
- GBS_RPM_DIR=$NNFW_PROJECT_PATH/Product/out/rpm
- mkdir -p ${GBS_RPM_DIR}
-fi
-
-DOCKER_VOLUMES+=" -v ${GBS_RPM_DIR}:${DOCKER_RPM_HOME}"
-
-export NNFW_PROJECT_PATH
-export NNFW_SCRIPT_PATH
-
-pushd $NNFW_PROJECT_PATH
-
-CMD="gbs -c ${NNFW_PROJECT_PATH}/scripts/config/gbs.conf build -A armv7l --profile=profile.tizen --clean --include-all --define '${GBS_DEFINE}' &&
- cp -rf /home/GBS-ROOT/local/repos/tizen/armv7l/RPMS/*.rpm ${DOCKER_RPM_HOME}/."
-source run docker-run bash -c "${CMD}"
-EXITCODE=$?
-
-popd
-
-exit $EXITCODE
diff --git a/scripts/command/docker_run_test.sh b/scripts/command/docker_run_test.sh
deleted file mode 100755
index dadd19f8a..000000000
--- a/scripts/command/docker_run_test.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-
-export DOCKER_ENV_VARS
-export DOCKER_VOLUMES
-
-pushd $NNFW_PROJECT_PATH
-
-if [ -n "$DOCKER_INTERACTIVE" ]; then
- source run docker-shell
-else
- CMD="make install"
- source run docker-run-user bash -c "${CMD}"
-fi
-EXITCODE=$?
-
-popd
-
-exit $EXITCODE
diff --git a/scripts/command/format-checker.sh b/scripts/command/format-checker.sh
deleted file mode 100755
index 2e947ec70..000000000
--- a/scripts/command/format-checker.sh
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/bin/bash
-
-INVALID_EXIT=0
-
-pushd () {
- command pushd "$@" > /dev/null
-}
-
-popd () {
- command popd "$@" > /dev/null
-}
-
-command_exists() {
- command -v $1 > /dev/null 2>&1
-}
-
-check_cpp_tool() {
- if ! command_exists clang-format-3.9; then
- echo "Error: clang-format-3.9 is not available."
- echo " Please install clang-format-3.9."
- exit 1
- fi
-}
-
-check_python_tool() {
- if ! command_exists yapf; then
- echo "Error: yapf is not available."
- echo " Please install yapf."
- exit 1
- fi
-}
-
-check_newline() {
- # Check all files (CMakeLists.txt, *.cl, ... not only for C++, Python)
- FILES_TO_CHECK=$(git ls-files)
- if [[ ${#FILES_TO_CHECK} -ne 0 ]]; then
- CRCHECK=$(file $FILES_TO_CHECK | grep 'with CR')
- fi
- FILES_TO_FIX=($(echo "$CRCHECK" | grep "with CRLF line" | cut -d':' -f1))
- for f in ${FILES_TO_FIX[@]}; do
- tr -d '\r' < $f > $f.fixed && cat $f.fixed > $f && rm $f.fixed
- done
- FILES_TO_FIX=($(echo "$CRCHECK" | grep "with CR line" | cut -d':' -f1))
- for f in ${FILES_TO_FIX[@]}; do
- tr '\r' '\n' < $f > $f.fixed && cat $f.fixed > $f && rm $f.fixed
- done
-}
-
-check_permission() {
- # Check all files except script
- FILES_TO_CHECK=()
- for NON_SCRIPT_FILE in $(git ls-files -- . ':!:run' ':!:scripts/git-hooks/*' ':!:*.sh' ':!:*.py'); do
- FILES_TO_CHECK+=("${NON_SCRIPT_FILE}")
- done
-
- if [[ ${#FILES_TO_CHECK} -eq 0 ]]; then
- return
- fi
- for FILE_TO_CHECK in ${FILES_TO_CHECK[@]}; do
- RESULT=$(stat -c '%A' ${FILE_TO_CHECK} | grep 'x')
- if [ "${RESULT}" != "" ]; then
- chmod a-x ${FILE_TO_CHECK}
- fi
- done
-}
-
-check_cpp_files() {
- DIRECTORIES_TO_BE_TESTED=$1
- DIRECTORIES_NOT_TO_BE_TESTED=$2
-
- # Check c++ files
- for TEST_DIR in ${DIRECTORIES_TO_BE_TESTED[@]}; do
- pushd $TEST_DIR
- CPP_FILES_TO_CHECK=$(git ls-files '*.h' '*.cpp' '*.cc' ':!:NeuralNetworks.h')
- ARR=($CPP_FILES_TO_CHECK)
- for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
- if [[ $s = $TEST_DIR* ]]; then
- skip=${s#$TEST_DIR/}/
- ARR=(${ARR[*]//$skip*})
- fi
- done
- CPP_FILES_TO_CHECK=${ARR[*]}
- if [[ ${#CPP_FILES_TO_CHECK} -ne 0 ]]; then
- clang-format-3.9 -i $CPP_FILES_TO_CHECK
- EXIT_CODE=$?
- if [[ $EXIT_CODE -ne 0 ]]; then
- INVALID_EXIT=$EXIT_CODE
- fi
- fi
- popd
- done
-}
-
-check_python_files() {
- DIRECTORIES_TO_BE_TESTED=$1
- DIRECTORIES_NOT_TO_BE_TESTED=$2
-
- # Check python files
- for TEST_DIR in ${DIRECTORIES_TO_BE_TESTED[@]}; do
- pushd $TEST_DIR
- PYTHON_FILES_TO_CHECK=$(git ls-files '*.py')
- ARR=($PYTHON_FILES_TO_CHECK)
- for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
- if [[ $s = $TEST_DIR* ]]; then
- skip=${s#$TEST_DIR/}/
- ARR=(${ARR[*]//$skip*})
- fi
- done
- PYTHON_FILES_TO_CHECK=${ARR[*]}
- if [[ ${#PYTHON_FILES_TO_CHECK} -ne 0 ]]; then
- yapf -i --style='{based_on_style: pep8, column_limit: 90}' $PYTHON_FILES_TO_CHECK
- EXIT_CODE=$?
- if [[ $EXIT_CODE -ne 0 ]]; then
- INVALID_EXIT=$EXIT_CODE
- fi
- fi
- popd
- done
-}
-
-echo "Make sure commit all changes before running this checker."
-
-__Check_CPP=${CHECK_CPP:-"1"}
-__Check_PYTHON=${CHECK_PYTHON:-"1"}
-
-DIRECTORIES_TO_BE_TESTED=()
-DIRECTORIES_NOT_TO_BE_TESTED=()
-
-for DIR_TO_BE_TESTED in $(find -name '.FORMATCHECKED' -exec dirname {} \;); do
- DIRECTORIES_TO_BE_TESTED+=("$DIR_TO_BE_TESTED")
-done
-
-for DIR_NOT_TO_BE_TESTED in $(find -name '.FORMATDENY' -exec dirname {} \;); do
- DIRECTORIES_NOT_TO_BE_TESTED+=("$DIR_NOT_TO_BE_TESTED")
-done
-
-if [[ ${#DIRECTORIES_TO_BE_TESTED[@]} -eq 0 ]]; then
- echo "No directories to be checked"
- exit 0
-fi
-
-check_newline
-check_permission
-
-if [[ $__Check_CPP -ne 0 ]]; then
- check_cpp_tool
- check_cpp_files $DIRECTORIES_TO_BE_TESTED $DIRECTORIES_NOT_TO_BE_TESTED
-fi
-
-if [[ $__Check_PYTHON -ne 0 ]]; then
- check_python_tool
- check_python_files $DIRECTORIES_TO_BE_TESTED $DIRECTORIES_NOT_TO_BE_TESTED
-fi
-
-git diff --ignore-submodules > format.patch
-PATCHFILE_SIZE=$(stat -c%s format.patch)
-
-if [[ -z "${CRCHECK}" ]] && [[ $PATCHFILE_SIZE -eq 0 ]] && [[ $INVALID_EXIT -eq 0 ]]; then
- echo "[PASSED] Format checker succeed."
- exit 0
-fi
-
-# Something went wrong
-
-if [[ ! -z "${CRCHECK}" ]]; then
- echo "[FAILED] Please use LF for newline for following files."
- echo "$CRCHECK"
-fi
-
-if [[ $PATCHFILE_SIZE -ne 0 ]]; then
- echo "[FAILED] Format checker failed and update code to follow convention."
- echo " You can find changes in format.patch"
-fi
-
-if [[ $INVALID_EXIT -ne 0 ]]; then
- echo "[[FAILED] Invalid format checker exit."
-fi
-exit 1
diff --git a/scripts/command/gen_coverage_report.sh b/scripts/command/gen_coverage_report.sh
deleted file mode 100755
index fa72717cf..000000000
--- a/scripts/command/gen_coverage_report.sh
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-
-# This file is based on https://github.sec.samsung.net/STAR/nncc/pull/80
-
-SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-HOST_HOME=$SCRIPT_ROOT/../..
-
-LCOV_PATH=$(command -v lcov)
-GENHTML_PATH=$(command -v genhtml)
-
-
-SRC_PREFIX=${SRC_PREFIX:-${DOCKER_HOME}}
-
-if [[ -z "${LCOV_PATH}" ]]; then
- echo "ERROR: 'lcov' is not found"
- exit 255
-fi
-
-if [[ -z "${GENHTML_PATH}" ]]; then
- echo "ERROR: 'genhtml' is not found"
- exit 255
-fi
-
-OUTPUT_PATH="$1"
-
-if [[ -z "${OUTPUT_PATH}" ]]; then
- OUTPUT_PATH="$HOST_HOME/coverage"
-fi
-
-if [[ -e "${OUTPUT_PATH}" ]]; then
- echo "ERROR: '${OUTPUT_PATH}' already exists"
- exit 255
-fi
-
-mkdir -p "${OUTPUT_PATH}"
-
-RAW_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.raw.info"
-LIBS_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.libs.info"
-INCLUDE_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.include.info"
-RUNTIMES_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.runtimes.info"
-TOOLS_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.tools.info"
-FINAL_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.info"
-HTML_PATH="${OUTPUT_PATH}/html"
-COVERTURA_PATH="${OUTPUT_PATH}/nnfw_coverage.xml"
-
-"${LCOV_PATH}" -c -d "${HOST_HOME}" -o "${RAW_COVERAGE_INFO_PATH}"
-"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${LIBS_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/libs/*"
-"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${INCLUDE_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/include/*"
-"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${RUNTIMES_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/runtimes/*"
-"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${TOOLS_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/tests/tools/*"
-"${LCOV_PATH}" -a "${LIBS_COVERAGE_INFO_PATH}" -a "${INCLUDE_COVERAGE_INFO_PATH}" \
- -a "${RUNTIMES_COVERAGE_INFO_PATH}" -a "${TOOLS_COVERAGE_INFO_PATH}" \
- -o "${FINAL_COVERAGE_INFO_PATH}"
-"${GENHTML_PATH}" "${FINAL_COVERAGE_INFO_PATH}" --output-directory "${HTML_PATH}" ${GENHTML_FLAG:-}
-
-tar -zcf "${OUTPUT_PATH}"/coverage_report.tar.gz "${HTML_PATH}"
-python ${HOST_HOME}/externals/lcov-to-cobertura-xml/lcov_cobertura.py "${FINAL_COVERAGE_INFO_PATH}" -o "${COVERTURA_PATH}"
diff --git a/scripts/command/install b/scripts/command/install
deleted file mode 100644
index a40268a50..000000000
--- a/scripts/command/install
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-source "${NNFW_SCRIPT_PATH}/config/build.configuration"
-
-if [[ ! -d "${BUILD_ALIAS}" ]]; then
- echo "'${BUILD_ALIAS}' does not exist. Please run 'configure' first"
- exit 255
-fi
-
-if [[ ! -d "${INSTALL_ALIAS}" ]]; then
- echo "'${INSTALL_ALIAS}' does not exist. Please run 'configure' first"
- exit 255
-fi
-
-cd ${BUILD_ALIAS}
-make install
diff --git a/scripts/command/nnfw_docker b/scripts/command/nnfw_docker
deleted file mode 100644
index 211124105..000000000
--- a/scripts/command/nnfw_docker
+++ /dev/null
@@ -1,14 +0,0 @@
-DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
-
-echo "Using docker image $DOCKER_IMAGE_NAME"
-if [[ "$(docker images -q $DOCKER_IMAGE_NAME 2> /dev/null)" == "" ]]; then
- echo "Need docker image: $DOCKER_IMAGE_NAME"
- exit 1
-fi
-
-DOCKER_HOME=/home/nnfw
-
-GIT_SSL_NO_VERIFY=1
-DOCKER_ENV_VARS+=" -e http_proxy"
-DOCKER_ENV_VARS+=" -e no_proxy"
-DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
diff --git a/scripts/command/nnfw_docker_tizen b/scripts/command/nnfw_docker_tizen
deleted file mode 100644
index f2fae9c97..000000000
--- a/scripts/command/nnfw_docker_tizen
+++ /dev/null
@@ -1,14 +0,0 @@
-DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker_tizen}
-
-echo "Using docker image $DOCKER_IMAGE_NAME"
-if [[ "$(docker images -q $DOCKER_IMAGE_NAME 2> /dev/null)" == "" ]]; then
- echo "Need docker image: $DOCKER_IMAGE_NAME"
- exit 1
-fi
-
-DOCKER_HOME=/home/nnfw
-
-GIT_SSL_NO_VERIFY=1
-DOCKER_ENV_VARS+=" -e http_proxy"
-DOCKER_ENV_VARS+=" -e no_proxy"
-DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
diff --git a/scripts/command/tizen_xu4_test.sh b/scripts/command/tizen_xu4_test.sh
deleted file mode 100755
index 38894df0f..000000000
--- a/scripts/command/tizen_xu4_test.sh
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/bin/bash
-
-SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-HOST_HOME=$SCRIPT_ROOT/../..
-if [ -z "$TEST_ROOT" ]; then
- TEST_ROOT=/opt/usr/nnfw-test
-fi
-
-function Usage()
-{
- echo "Usage: ./tizen_xu4_test.sh --rpm-dir=path/to/rpm-dir --unittest --verification"
- echo "Usage: ./tizen_xu4_test.sh --test-suite-path=path/to/test-suite.tar.gz --unittest --verification"
- echo "--rpm-dir : directory containing nnfw.rpm and nnfw-test.rpm"
- echo "--test-suite-path : filepath to test-suite.tar.gz"
- echo "--unittest : run unittest"
- echo "--verification : run verification"
- echo "--framework : run framework"
- echo "--gcov-dir : directory to save gcov files"
-}
-
-
-function prepare_rpm_test()
-{
- echo "======= Test with rpm packages(gbs build) ======="
- # clean up
- $SDB_CMD shell rm -rf $TEST_ROOT
- $SDB_CMD shell mkdir -p $TEST_ROOT
- # install nnfw nnfw-test rpms
- for file in $RPM_DIR/*
- do
- $SDB_CMD push $file $TEST_ROOT
- $SDB_CMD shell rpm -Uvh $TEST_ROOT/$(basename $file) --force --nodeps
- done
-
- # download tflite model files
- pushd $HOST_HOME
- tests/framework/run_test.sh --download=on
- tar -zcf cache.tar.gz tests/framework/cache
- $SDB_CMD push cache.tar.gz $TEST_ROOT/.
- rm -rf cache.tar.gz
- $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT
-}
-
-function prepare_suite_test()
-{
- echo "======= Test with test-suite(cross build) ======="
- # clean up
- $SDB_CMD shell rm -rf $TEST_ROOT
- $SDB_CMD shell mkdir -p $TEST_ROOT
-
- # install test-suite
- $SDB_CMD push $TEST_SUITE_PATH $TEST_ROOT/$(basename $TEST_SUITE_PATH)
- $SDB_CMD shell tar -zxf $TEST_ROOT/$(basename $TEST_SUITE_PATH) -C $TEST_ROOT
-
- # download tflite model files
- pushd $HOST_HOME
- tests/framework/run_test.sh --download=on
- tar -zcf cache.tar.gz tests/framework/cache
- $SDB_CMD push cache.tar.gz $TEST_ROOT/.
- rm -rf cache.tar.gz
- $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT
-}
-
-
-# Parse command argv
-for i in "$@"
-do
- case $i in
- -h|--help|help)
- Usage
- exit 1
- ;;
- --rpm-dir=*)
- RPM_DIR=${i#*=}
- ;;
- --test-suite-path=*)
- TEST_SUITE_PATH=${i#*=}
- ;;
- --unittest)
- UNITTEST=on
- ;;
- --verification)
- VERIFICATION=on
- ;;
- --framework)
- FRAMEWORK=on
- ;;
- --gcov-dir=*)
- GCOV_DIR=${i#*=}
- ;;
- esac
- shift
-done
-
-
-N=`sdb devices 2>/dev/null | wc -l`
-
-# exit if no device found
-if [[ $N -le 1 ]]; then
- echo "No device found."
- exit 1;
-fi
-
-NUM_DEV=$(($N-1))
-echo "device list"
-DEVICE_LIST=`sdb devices 2>/dev/null`
-echo "$DEVICE_LIST" | tail -n"$NUM_DEV"
-
-if [ -z "$SERIAL" ]; then
- SERIAL=`echo "$DEVICE_LIST" | tail -n1 | awk '{print $1}'`
-fi
-SDB_CMD="sdb -s $SERIAL "
-
-# root on, remount as rw
-$SDB_CMD root on
-$SDB_CMD shell mount -o rw,remount /
-
-SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-ROOT=$SCRIPT_ROOT/../
-
-if [ -z "$RPM_DIR" ] && [ -z "$TEST_SUITE_PATH" ]; then
- echo "Please provide --rpm-dir or --test-suite-path"
- exit 255
-fi
-
-if [ ! -z "$RPM_DIR" ]; then
- prepare_rpm_test
-else
- prepare_suite_test
-fi
-
-# run unittest
-if [ "$UNITTEST" == "on" ]; then
- $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --unittest --artifactpath=$TEST_ROOT
-fi
-
-# run framework test
-if [ "$FRAMEWORK" == "on" ]; then
- $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --frameworktest --artifactpath=$TEST_ROOT
-fi
-
-# run verification
-if [ "$VERIFICATION" == "on" ]; then
- $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --verification --artifactpath=$TEST_ROOT
-fi
-
-# pull gcov files
-if [ -n "$GCOV_DIR" ]; then
- $SDB_CMD shell 'rm -rf /home/gcov && mkdir -p /home/gcov'
- $SDB_CMD shell 'find / -type f \( -iname "*.gcda" -or -iname "*.gcno" \) -exec cp {} /home/gcov/. \;'
- $SDB_CMD shell 'cd /home/ && tar -zcvf gcov.tar.gz ./gcov '
- cd $GCOV_DIR
- sdb pull /home/gcov.tar.gz
- tar -zxvf gcov.tar.gz
-fi
diff --git a/scripts/config/docker.configuration b/scripts/config/docker.configuration
deleted file mode 100644
index 806ffb32e..000000000
--- a/scripts/config/docker.configuration
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-source "$NNFW_SCRIPT_PATH/config/image_name.configuration"
-
-echo "Using docker image $DOCKER_IMAGE_NAME"
-
-if [ -z "`docker images | grep $DOCKER_IMAGE_NAME`" ]; then
- echo "Need docker image!"
- exit 1
-fi
-
-HOST_PATH="$NNFW_PROJECT_PATH"
-DOCKER_PATH="$NNFW_PROJECT_PATH"
-
-export GIT_SSL_NO_VERIFY=1
-
-DOCKER_VOLUMES+=" -v $HOST_PATH:$DOCKER_PATH"
-
-if [[ ! -z $ENV_FILE ]]; then
- DOCKER_ENV_VARS+=" --env-file ${ENV_FILE} "
-fi
-
-DOCKER_ENV_VARS+=" -e http_proxy"
-DOCKER_ENV_VARS+=" -e no_proxy"
-DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
-DOCKER_ENV_VARS+=" -e EXTERNAL_DOWNLOAD_SERVER"
-
-DOCKER_RUN_OPTS="${DOCKER_OPTS}"
-DOCKER_RUN_OPTS+=" --rm"
-DOCKER_RUN_OPTS+=" -w $DOCKER_PATH"
-
-function docker_cleanup()
-{
- # Newly created files during during docker run can have different ownership.
- # This may cause some problems, for example, some jenkins slaves or developers
- # can't remove built files due to lack of permission.
- # To address this issue, let's change owner of all files
- # in nncc to owner of nncc.
- NNFW_OWNER_UID=$(stat -c "%u" $HOST_PATH)
- NNFW_OWNER_GID=$(stat -c "%g" $HOST_PATH)
-
- CMD="chown -R $NNFW_OWNER_UID:$NNFW_OWNER_GID $DOCKER_PATH"
- docker run $DOCKER_RUN_OPTS $DOCKER_ENV_VARS $DOCKER_VOLUMES $DOCKER_IMAGE_NAME $CMD
-}
diff --git a/scripts/config/gbs.conf b/scripts/config/gbs.conf
deleted file mode 100644
index af555c399..000000000
--- a/scripts/config/gbs.conf
+++ /dev/null
@@ -1,21 +0,0 @@
-[general]
-#Current profile name which should match a profile section name
-profile = profile.tizen
-
-[profile.tizen]
-user=obs_viewer
-passwdx = QlpoOTFBWSZTWWV18UwAAAKDgAAAkiCZgCAAMQZMQQDJ6jQwAvxdyRThQkGV18Uw
-obs = obs.tizen
-repos = repo.tizen_base,repo.tizen_mobile
-buildroot = /home/GBS-ROOT/
-
-[obs.tizen]
-url = http://api.tizen.org
-
-[repo.tizen_mobile]
-url = http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/
-
-[repo.tizen_base]
-url = http://download.tizen.org/snapshots/tizen/base/latest/repos/standard/packages/
-
-
diff --git a/scripts/config/image_name.configuration b/scripts/config/image_name.configuration
deleted file mode 100644
index 507f154c7..000000000
--- a/scripts/config/image_name.configuration
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw_docker}
diff --git a/scripts/docker/Dockerfile b/scripts/docker/Dockerfile
deleted file mode 100644
index 37e4e1ebf..000000000
--- a/scripts/docker/Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-FROM ubuntu:16.04
-
-ENV http_proxy $http_proxy
-ENV https_proxy $https_proxy
-
-RUN apt-get update && apt-get --yes --force-yes install build-essential
-RUN apt-get update && apt-get --yes --force-yes install scons cmake
-RUN apt-get update && apt-get --yes --force-yes install libboost-all-dev
-RUN apt-get update && apt-get --yes --force-yes install git
-RUN apt-get update && apt-get --yes --force-yes install gcc-5-arm-linux-gnueabi g++-5-arm-linux-gnueabi
-RUN apt-get update && apt-get --yes --force-yes install lcov
-RUN apt-get update && apt-get --yes --force-yes install clang-format-3.9 python-pip
-RUN pip install yapf==0.22.0
-RUN apt-get update && apt-get --yes --force-yes install doxygen graphviz
-RUN apt-get update && apt-get --yes --force-yes install wget
-RUN wget https://releases.linaro.org/components/toolchain/binaries/6.3-2017.02/arm-linux-gnueabihf/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf.tar.xz -O gcc.tar.xz -nv
-RUN tar -xf gcc.tar.xz -C /opt/ && rm -rf gcc.tar.xz
-ENV PATH "/opt/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf/bin:$PATH"
-RUN apt-get update && apt-get --yes --force-yes install python3
diff --git a/scripts/docker/Dockerfile_tizen b/scripts/docker/Dockerfile_tizen
deleted file mode 100644
index d56cf78f1..000000000
--- a/scripts/docker/Dockerfile_tizen
+++ /dev/null
@@ -1,13 +0,0 @@
-FROM ubuntu:16.04
-
-ENV http_proxy $http_proxy
-ENV https_proxy $https_proxy
-
-RUN echo 'deb [trusted=yes] http://download.tizen.org/tools/latest-release/Ubuntu_16.04/ /' | cat >> /etc/apt/sources.list
-
-RUN apt-get update && apt-get --yes --force-yes install gbs
-
-RUN apt-get --yes --force-yes install wget unzip
-RUN wget http://download.tizen.org/sdk/tizenstudio/official/binary/sdb_3.1.4_ubuntu-64.zip -O sdb.zip
-RUN unzip -d tmp sdb.zip && rm sdb.zip
-RUN cp tmp/data/tools/sdb /usr/bin/. && rm -rf tmp \ No newline at end of file
diff --git a/scripts/git-hooks/install_hooks.sh b/scripts/git-hooks/install_hooks.sh
deleted file mode 100755
index 9ab7342fc..000000000
--- a/scripts/git-hooks/install_hooks.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-REPO_PATH=$(git rev-parse --show-toplevel)
-REPO_HOOKS_PATH=scripts/git-hooks
-GIT_HOOKS_PATH=$REPO_PATH/.git/hooks
-REPO_PATH_REL=../.. # Relative path from REPO_HOOKS_PATH
-
-# Create symbolic links to hooks dir
-
-# NOTE `ln -s` does not overwrite if the file exists.
-ln -s $REPO_PATH_REL/$REPO_HOOKS_PATH/pre-push $GIT_HOOKS_PATH/pre-push
diff --git a/scripts/git-hooks/pre-push b/scripts/git-hooks/pre-push
deleted file mode 100755
index adad9bd38..000000000
--- a/scripts/git-hooks/pre-push
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/sh
-
-# An example hook script to verify what is about to be pushed. Called by "git
-# push" after it has checked the remote status, but before anything has been
-# pushed. If this script exits with a non-zero status nothing will be pushed.
-#
-# This hook is called with the following parameters:
-#
-# $1 -- Name of the remote to which the push is being done
-# $2 -- URL to which the push is being done
-#
-# If pushing without using a named remote those arguments will be equal.
-#
-# Information about the commits which are being pushed is supplied as lines to
-# the standard input in the form:
-#
-# <local ref> <local sha1> <remote ref> <remote sha1>
-#
-# This sample shows how to prevent push of commits where the log message starts
-# with "WIP" (work in progress).
-
-remote="$1"
-url="$2"
-
-# RUN FORMAT CHECKER
-
-REPO_PATH=$(git rev-parse --show-toplevel)
-cd $REPO_PATH
-
-./run format-checker.sh
-
-exit $?
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 23d09d13e..96b610a45 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -1,2 +1,3 @@
add_subdirectory(nnapi)
add_subdirectory(tools)
+add_subdirectory(custom_op)
diff --git a/tests/custom_op/CMakeLists.txt b/tests/custom_op/CMakeLists.txt
new file mode 100644
index 000000000..ffdfc877c
--- /dev/null
+++ b/tests/custom_op/CMakeLists.txt
@@ -0,0 +1,6 @@
+if(NOT BUILD_NEURUN)
+ return()
+endif(NOT BUILD_NEURUN)
+
+add_subdirectory(apps)
+add_subdirectory(kernels)
diff --git a/tests/custom_op/apps/CMakeLists.txt b/tests/custom_op/apps/CMakeLists.txt
new file mode 100644
index 000000000..18eef3f0e
--- /dev/null
+++ b/tests/custom_op/apps/CMakeLists.txt
@@ -0,0 +1,20 @@
+if(NOT BUILD_NEURUN)
+ return()
+endif(NOT BUILD_NEURUN)
+
+# Takes target name, source list and kernel list
+function(add_nnfw_custom_op_app NAME)
+ cmake_parse_arguments(
+ PARSED_ARGS # prefix of output variables
+ "" # list of names of the boolean arguments (only defined ones will be true)
+ "" # list of names of mono-valued arguments
+ "SOURCES;KERNELS" # list of names of multi-valued arguments (output variables are lists)
+ ${ARGN} # arguments of the function to parse, here we take the all original ones
+ )
+ add_executable(${NAME} ${PARSED_ARGS_SOURCES})
+ target_link_libraries(${NAME} PRIVATE ${PARSED_ARGS_KERNELS})
+ target_link_libraries(${NAME} PRIVATE nnfw-dev)
+ target_link_libraries(${NAME} PRIVATE dl ${LIB_PTHREAD})
+endfunction()
+
+add_subdirectories()
diff --git a/tests/custom_op/apps/FillFrom/CMakeLists.txt b/tests/custom_op/apps/FillFrom/CMakeLists.txt
new file mode 100644
index 000000000..c0914e389
--- /dev/null
+++ b/tests/custom_op/apps/FillFrom/CMakeLists.txt
@@ -0,0 +1,4 @@
+add_nnfw_custom_op_app(FillFrom_runner
+ SOURCES FillFrom_runner.cc
+ KERNELS FillFrom
+ )
diff --git a/tests/custom_op/apps/FillFrom/FillFrom_runner.cc b/tests/custom_op/apps/FillFrom/FillFrom_runner.cc
new file mode 100644
index 000000000..585ffb9cb
--- /dev/null
+++ b/tests/custom_op/apps/FillFrom/FillFrom_runner.cc
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "nnfw.h"
+#include "nnfw_dev.h"
+
+#include <cassert>
+#include <iostream>
+#include <vector>
+#include <chrono>
+#include <cmath>
+
+#define NNPR_ENSURE_STATUS(a) \
+ do \
+ { \
+ if ((a) != NNFW_STATUS_NO_ERROR) \
+ { \
+ exit(-1); \
+ } \
+ } while (0)
+
+extern "C" void FillFromEval(nnfw_custom_kernel_params *params, char *userdata,
+ size_t userdata_size);
+
+const nnfw_custom_eval custom_func_ptr_list[] = {FillFromEval};
+const char *custom_func_name_list[] = {"FillFrom"};
+int custom_func_list_size = 1;
+
+void register_custom_operations(nnfw_session *session)
+{
+ for (int i = 0; i < custom_func_list_size; ++i)
+ {
+ auto name = custom_func_name_list[i];
+ custom_kernel_registration_info info;
+ info.eval_function = custom_func_ptr_list[i];
+ NNPR_ENSURE_STATUS(nnfw_register_custom_op_info(session, name, &info));
+ }
+}
+
+uint64_t NowMicros()
+{
+ auto time_point = std::chrono::high_resolution_clock::now();
+ auto since_epoch = time_point.time_since_epoch();
+ // default precision of high resolution clock is 10e-9 (nanoseconds)
+ return std::chrono::duration_cast<std::chrono::microseconds>(since_epoch).count();
+}
+
+uint64_t num_elems(const nnfw_tensorinfo *ti)
+{
+ uint64_t n = 1;
+ for (uint32_t i = 0; i < ti->rank; ++i)
+ {
+ assert(ti->dims[i] >= 0);
+ n *= ti->dims[i];
+ }
+ return n;
+};
+
+// TODO replace with data import
+// Valid only for model FillFrom.tflite
+// FillFrom(idx=3, val=1.1)
+static const float in_data[10] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
+static const float ref_data[10] = {0, 0, 0, 4, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1};
+
+std::vector<float> genData(uint64_t size)
+{
+ assert(size == sizeof(in_data) / sizeof(in_data[0]));
+ std::cout << "Warning: runner uses hardcoded data form in_data" << std::endl;
+ std::vector<float> vec(size);
+ for (uint64_t i = 0; i < size; i++)
+ vec[i] = in_data[i];
+ return vec;
+}
+
+template <typename InIter1, typename InIter2>
+static auto findMaxDifference(InIter1 first1, InIter1 last1, InIter2 first2)
+ -> decltype(*first1 - *first2)
+{
+ auto max_difference = std::abs(*first1 - *first2);
+ for (; first1 != last1; ++first1, ++first2)
+ {
+ auto diff = std::abs(*first1 - *first2);
+ if (diff > max_difference)
+ {
+ max_difference = diff;
+ }
+ }
+ return max_difference;
+}
+
+int main(const int argc, char **argv)
+{
+ if (argc < 2)
+ {
+ std::cerr << "[ERROR] No model specified\n";
+ return 1;
+ }
+
+ char *model_path = argv[1];
+
+ nnfw_session *session = nullptr;
+ NNPR_ENSURE_STATUS(nnfw_create_session(&session));
+
+ register_custom_operations(session);
+
+ NNPR_ENSURE_STATUS(nnfw_load_model_from_file(session, model_path));
+
+ uint32_t num_inputs;
+ NNPR_ENSURE_STATUS(nnfw_input_size(session, &num_inputs));
+
+ // verify input and output
+
+ if (num_inputs == 0)
+ {
+ std::cerr << "[ ERROR ] "
+ << "No inputs in model => execution is not possible" << std::endl;
+ exit(1);
+ }
+
+ auto verifyInputTypes = [session]() {
+ uint32_t sz;
+ NNPR_ENSURE_STATUS(nnfw_input_size(session, &sz));
+ for (uint32_t i = 0; i < sz; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_input_tensorinfo(session, i, &ti));
+ if (ti.dtype != NNFW_TYPE_TENSOR_FLOAT32)
+ {
+ std::cerr << "Only float 32bit is supported." << std::endl;
+ exit(-1);
+ }
+ }
+ };
+
+ auto verifyOutputTypes = [session]() {
+ uint32_t sz;
+ NNPR_ENSURE_STATUS(nnfw_output_size(session, &sz));
+
+ for (uint32_t i = 0; i < sz; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_output_tensorinfo(session, i, &ti));
+ if (ti.dtype != NNFW_TYPE_TENSOR_FLOAT32)
+ {
+ std::cerr << "Only float 32bit is supported." << std::endl;
+ exit(-1);
+ }
+ }
+ };
+
+ verifyInputTypes();
+ verifyOutputTypes();
+
+ // prepare execution
+
+ uint64_t prepare_ms = NowMicros();
+ NNPR_ENSURE_STATUS(nnfw_prepare(session));
+ prepare_ms = NowMicros() - prepare_ms;
+
+ // prepare input
+ std::vector<std::vector<float>> inputs(num_inputs);
+
+ auto generateInputs = [session, num_inputs, &inputs]() {
+ // generate random data
+ const int seed = 1;
+ for (uint32_t i = 0; i < num_inputs; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_input_tensorinfo(session, i, &ti));
+ auto input_num_elements = num_elems(&ti);
+ inputs[i] = genData(input_num_elements);
+ NNPR_ENSURE_STATUS(nnfw_set_input(session, i, NNFW_TYPE_TENSOR_FLOAT32, inputs[i].data(),
+ sizeof(float) * input_num_elements));
+ }
+ };
+
+ generateInputs();
+
+ // prepare output
+ uint32_t num_outputs = 0;
+ NNPR_ENSURE_STATUS(nnfw_output_size(session, &num_outputs));
+ std::vector<std::vector<float>> outputs(num_outputs);
+
+ for (uint32_t i = 0; i < num_outputs; i++)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_output_tensorinfo(session, i, &ti));
+ auto output_num_elements = num_elems(&ti);
+ outputs[i].resize(output_num_elements);
+ NNPR_ENSURE_STATUS(nnfw_set_output(session, i, NNFW_TYPE_TENSOR_FLOAT32, outputs[i].data(),
+ sizeof(float) * output_num_elements));
+ }
+
+ uint64_t run_ms = NowMicros();
+ NNPR_ENSURE_STATUS(nnfw_run(session));
+ run_ms = NowMicros() - run_ms;
+
+ const float tolerance = 0.01f;
+ auto max_difference =
+ findMaxDifference(outputs[0].begin(), outputs[0].end(), std::begin(ref_data));
+
+ if (max_difference > tolerance)
+ {
+ std::cout << "Max difference is more than tolerance" << std::endl;
+ std::cout << "Max difference is " << max_difference << std::endl;
+ }
+
+ std::cout << "nnfw_prepare takes " << prepare_ms / 1e3 << " sec" << std::endl;
+ std::cout << "nnfw_run takes " << run_ms / 1e3 << " sec" << std::endl;
+
+ NNPR_ENSURE_STATUS(nnfw_close_session(session));
+
+ return 0;
+}
diff --git a/tests/custom_op/kernels/CMakeLists.txt b/tests/custom_op/kernels/CMakeLists.txt
new file mode 100644
index 000000000..9b960a3b3
--- /dev/null
+++ b/tests/custom_op/kernels/CMakeLists.txt
@@ -0,0 +1,9 @@
+nnfw_find_package(FlatBuffers REQUIRED)
+
+function(add_nnfw_kernel NAME)
+ add_library(${NAME} STATIC ${ARGN})
+ target_link_libraries(${NAME} PRIVATE nnfw-dev)
+ target_link_libraries(${NAME} PRIVATE flatbuffers)
+endfunction()
+
+add_subdirectories()
diff --git a/tests/custom_op/kernels/FillFrom/CMakeLists.txt b/tests/custom_op/kernels/FillFrom/CMakeLists.txt
new file mode 100644
index 000000000..acef865a4
--- /dev/null
+++ b/tests/custom_op/kernels/FillFrom/CMakeLists.txt
@@ -0,0 +1 @@
+add_nnfw_kernel(FillFrom FillFromKernel.cc)
diff --git a/tests/custom_op/kernels/FillFrom/FillFromKernel.cc b/tests/custom_op/kernels/FillFrom/FillFromKernel.cc
new file mode 100644
index 000000000..3f8da5a92
--- /dev/null
+++ b/tests/custom_op/kernels/FillFrom/FillFromKernel.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "nnfw_dev.h"
+
+#include "flatbuffers/flexbuffers.h"
+
+extern "C" void FillFromEval(nnfw_custom_kernel_params *params, char *userdata,
+ size_t userdata_size)
+{
+ auto userdata_root = flexbuffers::GetRoot(reinterpret_cast<uint8_t *>(userdata), userdata_size);
+
+ auto attr_map = userdata_root.AsMap();
+
+ auto idx = attr_map["idx"].AsInt32();
+ auto val = attr_map["val"].AsFloat();
+
+ int32_t flat_size = 1;
+ for (int32_t i = 0; i < params->inputs[0].type.rank; ++i)
+ {
+ flat_size *= params->inputs[0].type.dims[i];
+ }
+
+ assert(idx <= flat_size);
+
+ auto output_flat = static_cast<float *>(params->outputs[0].allocation);
+ auto input_flat = static_cast<float *>(params->inputs[0].allocation);
+
+ for (int32_t i = 0; i < idx; ++i)
+ {
+ output_flat[i] = 0;
+ }
+
+ for (int32_t i = idx; i < flat_size; ++i)
+ {
+ output_flat[i] = val;
+ }
+
+ output_flat[idx] = input_flat[idx];
+}
diff --git a/tests/custom_op/nnpkgs/FillFrom/FillFrom.json b/tests/custom_op/nnpkgs/FillFrom/FillFrom.json
new file mode 100644
index 000000000..5dac87d6a
--- /dev/null
+++ b/tests/custom_op/nnpkgs/FillFrom/FillFrom.json
@@ -0,0 +1,115 @@
+{
+ version: 3,
+ operator_codes: [
+ {
+ builtin_code: "CUSTOM",
+ custom_code: "FillFrom",
+ version: 1
+ }
+ ],
+ subgraphs: [
+ {
+ tensors: [
+ {
+ shape: [
+ 10
+ ],
+ type: "FLOAT32",
+ buffer: 1,
+ name: "t_0",
+ is_variable: false
+ },
+ {
+ shape: [
+ 10
+ ],
+ type: "FLOAT32",
+ buffer: 2,
+ name: "t_1",
+ is_variable: false
+ },
+ {
+ shape: [
+ 10
+ ],
+ type: "FLOAT32",
+ buffer: 3,
+ name: "t_2",
+ is_variable: false
+ }
+ ],
+ inputs: [
+ 0
+ ],
+ outputs: [
+ 1
+ ],
+ operators: [
+ {
+ opcode_index: 0,
+ inputs: [
+ 0
+ ],
+ outputs: [
+ 1
+ ],
+ builtin_options_type: 0,
+ custom_options: [
+ 105,
+ 100,
+ 120,
+ 0,
+ 118,
+ 97,
+ 108,
+ 0,
+ 2,
+ 9,
+ 6,
+ 0,
+ 3,
+ 0,
+ 0,
+ 0,
+ 1,
+ 0,
+ 0,
+ 0,
+ 2,
+ 0,
+ 0,
+ 0,
+ 3,
+ 0,
+ 0,
+ 0,
+ 205,
+ 204,
+ 140,
+ 63,
+ 6,
+ 14,
+ 10,
+ 38,
+ 1
+ ],
+ custom_options_format: "FLEXBUFFERS"
+ }
+ ]
+ }
+ ],
+ description: "nnpackage",
+ buffers: [
+ {
+ },
+ {
+ },
+ {
+ },
+ {
+ }
+ ],
+ metadata_buffer: [
+
+ ]
+}
diff --git a/tests/custom_op/nnpkgs/FillFrom/FillFrom.tflite b/tests/custom_op/nnpkgs/FillFrom/FillFrom.tflite
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/custom_op/nnpkgs/FillFrom/FillFrom.tflite
diff --git a/tests/custom_op/nnpkgs/FillFrom/metadata/MANIFEST b/tests/custom_op/nnpkgs/FillFrom/metadata/MANIFEST
new file mode 100644
index 000000000..4e97cf146
--- /dev/null
+++ b/tests/custom_op/nnpkgs/FillFrom/metadata/MANIFEST
@@ -0,0 +1,7 @@
+{
+ "major-version" : "1",
+ "minor-version" : "0",
+ "patch-version" : "0",
+ "models" : [ "FillFrom.tflite" ],
+ "model-types" : [ "tflite" ]
+}
diff --git a/tests/framework/run_test.sh b/tests/framework/run_test.sh
index a9feaab55..930971534 100755
--- a/tests/framework/run_test.sh
+++ b/tests/framework/run_test.sh
@@ -36,24 +36,26 @@ function Usage()
echo ""
}
-function verify_downloaded_file()
+function need_download()
{
LOCAL_PATH=$1
REMOTE_URL=$2
-
+ if [ ! -e $LOCAL_PATH ]; then
+ return 0;
+ fi
# Ignore checking md5 in cache
if [ ! -z $IGNORE_MD5 ] && [ "$IGNORE_MD5" == "1" ]; then
- return 0
+ return 1
fi
LOCAL_HASH=$(md5sum $LOCAL_PATH | awk '{ print $1 }')
- REMOTE_HASH=$(curl -I -ss $REMOTE_URL | grep '^Content-MD5' | tr -d '\r\n' | awk '{ print $2 }' | base64 -d | xxd -p)
+ REMOTE_HASH=$(curl -ss $REMOTE_URL | md5sum | awk '{ print $1 }')
# TODO Emit an error when Content-MD5 field was not found. (Server configuration issue)
-
if [ "$LOCAL_HASH" != "$REMOTE_HASH" ]; then
echo "Downloaded file is outdated or incomplete."
- return 1
+ return 0
fi
+ return 1
}
DRIVER_BIN=""
@@ -159,7 +161,7 @@ run_tests()
fi
# Download unless we have it in cache (Also check md5sum)
- if [ ! -e $MODELFILE ] || ! verify_downloaded_file "$MODELFILE" "$MODELFILE_URL"; then
+ if need_download "$MODELFILE" "$MODELFILE_URL"; then
echo ""
echo "Download test file for $TEST_NAME"
echo "======================"
@@ -231,7 +233,7 @@ download_tests()
fi
# Download unless we have it in cache (Also check md5sum)
- if [ ! -e $MODELFILE ] || ! verify_downloaded_file "$MODELFILE" "$MODELFILE_URL"; then
+ if need_download "$MODELFILE" "$MODELFILE_URL"; then
echo ""
echo "Download test file for $TEST_NAME"
echo "======================"
diff --git a/tests/framework/tests/MODELS/mobilenet/config.sh b/tests/framework/tests/MODELS/mobilenet/config.sh
index b17a3bd54..b23d687cd 100644
--- a/tests/framework/tests/MODELS/mobilenet/config.sh
+++ b/tests/framework/tests/MODELS/mobilenet/config.sh
@@ -1,3 +1,2 @@
MODELFILE_SERVER_PATH="https://storage.googleapis.com/download.tensorflow.org/models/tflite"
MODELFILE_NAME="mobilenet_v1_0.25_128_float_2017_11_08.zip"
-STATUS="enabled"
diff --git a/tests/framework/tests/concat/2D/config.sh b/tests/framework/tests/concat/2D/config.sh
index bb71cad83..fd22e708c 100644
--- a/tests/framework/tests/concat/2D/config.sh
+++ b/tests/framework/tests/concat/2D/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="concat_test_2d.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/custom/abs/config.sh b/tests/framework/tests/custom/abs/config.sh
new file mode 100644
index 000000000..3030bcb72
--- /dev/null
+++ b/tests/framework/tests/custom/abs/config.sh
@@ -0,0 +1 @@
+MODELFILE_NAME="custom_abs_test.tflite"
diff --git a/tests/framework/tests/custom/squared_difference/config.sh b/tests/framework/tests/custom/squared_difference/config.sh
new file mode 100644
index 000000000..745a84447
--- /dev/null
+++ b/tests/framework/tests/custom/squared_difference/config.sh
@@ -0,0 +1 @@
+MODELFILE_NAME="custom_squared_diff_test.tflite"
diff --git a/tests/framework/tests/custom/tensorflowmax/config.sh b/tests/framework/tests/custom/tensorflowmax/config.sh
index 058799935..122c459db 100644
--- a/tests/framework/tests/custom/tensorflowmax/config.sh
+++ b/tests/framework/tests/custom/tensorflowmax/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="custom_max_test.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/custom/tensorflowsum/config.sh b/tests/framework/tests/custom/tensorflowsum/config.sh
new file mode 100644
index 000000000..0a6dfe348
--- /dev/null
+++ b/tests/framework/tests/custom/tensorflowsum/config.sh
@@ -0,0 +1 @@
+MODELFILE_NAME="custom_sum_test.tflite"
diff --git a/tests/framework/tests/exp/config.sh b/tests/framework/tests/exp/config.sh
index 47f878783..944f0bbce 100644
--- a/tests/framework/tests/exp/config.sh
+++ b/tests/framework/tests/exp/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="exp_4d.tflite"
-STATUS="enabled"
diff --git a/tests/framework/tests/hashtable_lookup/config.sh b/tests/framework/tests/hashtable_lookup/config.sh
index 1176a36f1..3222ee4d2 100644
--- a/tests/framework/tests/hashtable_lookup/config.sh
+++ b/tests/framework/tests/hashtable_lookup/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="hashtable_lookup_test1.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/logistic/config.sh b/tests/framework/tests/logistic/config.sh
index 456773aa9..632c3629e 100644
--- a/tests/framework/tests/logistic/config.sh
+++ b/tests/framework/tests/logistic/config.sh
@@ -1 +1,2 @@
MODELFILE_NAME="sigmoid_test.tflite"
+STATUS="disabled"
diff --git a/tests/framework/tests/neg/config.sh b/tests/framework/tests/neg/config.sh
index 8cde1a258..000f7c811 100644
--- a/tests/framework/tests/neg/config.sh
+++ b/tests/framework/tests/neg/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="neg_4d.tflite"
-STATUS="enabled"
diff --git a/tests/framework/tests/pad/pad1/config.sh b/tests/framework/tests/pad/pad1/config.sh
index 1c154c1dd..088cd8962 100644
--- a/tests/framework/tests/pad/pad1/config.sh
+++ b/tests/framework/tests/pad/pad1/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="pad_test.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/pad/pad2/config.sh b/tests/framework/tests/pad/pad2/config.sh
index 6a0b2d30f..1683f5350 100644
--- a/tests/framework/tests/pad/pad2/config.sh
+++ b/tests/framework/tests/pad/pad2/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="pad_test2.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/reduce_max/config.sh b/tests/framework/tests/reduce_max/config.sh
new file mode 100644
index 000000000..d636b8bd3
--- /dev/null
+++ b/tests/framework/tests/reduce_max/config.sh
@@ -0,0 +1 @@
+MODELFILE_NAME="reduce_max_1d.tflite"
diff --git a/tests/framework/tests/reduce_mean/test1/config.sh b/tests/framework/tests/reduce_mean/test1/config.sh
index 7884000aa..2f370ea4e 100644
--- a/tests/framework/tests/reduce_mean/test1/config.sh
+++ b/tests/framework/tests/reduce_mean/test1/config.sh
@@ -1,3 +1 @@
-# REDUCE_MEAN is supported after tensorflow 1.10
MODELFILE_NAME="reduce_mean_test.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/reduce_mean/test2/config.sh b/tests/framework/tests/reduce_mean/test2/config.sh
index d9ca89a7a..6c54779a9 100644
--- a/tests/framework/tests/reduce_mean/test2/config.sh
+++ b/tests/framework/tests/reduce_mean/test2/config.sh
@@ -1,3 +1 @@
-# REDUCE_MEAN is supported after tensorflow 1.10
MODELFILE_NAME="reduce_mean_test_2.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/rsqrt/config.sh b/tests/framework/tests/rsqrt/config.sh
index c97f8af6e..87aa85277 100644
--- a/tests/framework/tests/rsqrt/config.sh
+++ b/tests/framework/tests/rsqrt/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="rsqrt_4d.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/sub/broadcast/config.sh b/tests/framework/tests/sub/broadcast/config.sh
index f61285915..2b1add0e5 100644
--- a/tests/framework/tests/sub/broadcast/config.sh
+++ b/tests/framework/tests/sub/broadcast/config.sh
@@ -1,2 +1 @@
MODELFILE_NAME="sub_test_broadcast.tflite"
-STATUS="disabled"
diff --git a/tests/framework/tests/tranpose/config.sh b/tests/framework/tests/transpose/config.sh
index 9adb85e70..9adb85e70 100644
--- a/tests/framework/tests/tranpose/config.sh
+++ b/tests/framework/tests/transpose/config.sh
diff --git a/tests/framework/tests/transpose_conv/same/config.sh b/tests/framework/tests/transpose_conv/same/config.sh
new file mode 100644
index 000000000..2cca86e03
--- /dev/null
+++ b/tests/framework/tests/transpose_conv/same/config.sh
@@ -0,0 +1 @@
+MODELFILE_NAME="transpose_conv_test.tflite"
diff --git a/tests/framework/tests/transpose_conv/valid/config.sh b/tests/framework/tests/transpose_conv/valid/config.sh
new file mode 100644
index 000000000..d162331a3
--- /dev/null
+++ b/tests/framework/tests/transpose_conv/valid/config.sh
@@ -0,0 +1 @@
+MODELFILE_NAME="transpose_conv_valid_test.tflite"
diff --git a/tests/nnapi/CMakeLists.txt b/tests/nnapi/CMakeLists.txt
index d3c9cab4c..dc8ba865f 100644
--- a/tests/nnapi/CMakeLists.txt
+++ b/tests/nnapi/CMakeLists.txt
@@ -2,6 +2,20 @@ if (NOT BUILD_RUNTIME_NNAPI_TEST)
return()
endif(NOT BUILD_RUNTIME_NNAPI_TEST)
+if (GENERATE_RUNTIME_NNAPI_TESTS)
+ set(GENERATOR_SCRIPT "${CMAKE_CURRENT_SOURCE_DIR}/specs/generate_test.sh")
+ file(GLOB_RECURSE TEST_SPECS "${CMAKE_CURRENT_SOURCE_DIR}/specs/*.mod.py")
+ set(GENERATED_CPPS "${CMAKE_CURRENT_SOURCE_DIR}/src/generated/all_generated_V1_1_cts_tests.cpp"
+ "${CMAKE_CURRENT_SOURCE_DIR}/src/generated/all_generated_V1_0_cts_tests.cpp"
+ "${CMAKE_CURRENT_SOURCE_DIR}/src/generated/all_generated_Ex_cts_tests.cpp")
+ add_custom_command(OUTPUT ${GENERATED_CPPS}
+ COMMAND ${GENERATOR_SCRIPT}
+ DEPENDS ${GENERATOR_SCRIPT} ${TEST_SPECS}
+ COMMENT "Generating runtime nnapi tests")
+ add_custom_target(generate-runtime-nnapi-tests
+ DEPENDS ${GENERATED_CPPS})
+endif(GENERATE_RUNTIME_NNAPI_TESTS)
+
# Executable `runtime_run` (Dummy runner executable using NN API)
set(RUNTIME_NNAPI_TEST nnapi_gtest)
set(RUNTIME_NNAPI_TEST_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/TestMain.cpp
@@ -12,45 +26,38 @@ set(GENERATED_TEST_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/TestGenerated_V1_0.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/TestGenerated_V1_1.cpp
${CMAKE_CURRENT_SOURCE_DIR}/src/TestGenerated_Ex.cpp)
-# g++-6.3 can build TestGenerated_XXX.cpp.
-if (CMAKE_COMPILER_IS_GNUCC AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 6.3)
- set(RUNTIME_NNAPI_TEST_SRC ${RUNTIME_NNAPI_TEST_SRC} ${GENERATED_TEST_SRC})
-endif()
-
-# Both gbs and cross for tizen support building generated test
-if (OBS_BUILD)
- set(RUNTIME_NNAPI_TEST_SRC ${RUNTIME_NNAPI_TEST_SRC} ${GENERATED_TEST_SRC})
-endif()
+set(RUNTIME_NNAPI_TEST_SRC ${RUNTIME_NNAPI_TEST_SRC} ${GENERATED_TEST_SRC})
add_executable(${RUNTIME_NNAPI_TEST} ${RUNTIME_NNAPI_TEST_SRC})
+if (GENERATE_RUNTIME_NNAPI_TESTS)
+ add_dependencies(${RUNTIME_NNAPI_TEST} generate-runtime-nnapi-tests)
+endif(GENERATE_RUNTIME_NNAPI_TESTS)
+
nnfw_find_package(GTest)
-set(RUNTIME_NNAPI_TEST_SRC_INC ${NNFW_INCLUDE_DIR}
- ${CMAKE_CURRENT_SOURCE_DIR}/include
+set(RUNTIME_NNAPI_TEST_SRC_INC ${CMAKE_CURRENT_SOURCE_DIR}/include
${CMAKE_CURRENT_SOURCE_DIR}/src)
target_include_directories(${RUNTIME_NNAPI_TEST} PRIVATE ${RUNTIME_NNAPI_TEST_SRC_INC})
if (BUILD_NEURUN)
- set(SKIPLIST_FILE_NAME ${RUNTIME_NNAPI_TEST}.skip.${TARGET_PLATFORM}.neurun)
-elseif (BUILD_PURE_ARM_COMPUTE)
set(SKIPLIST_FILE_NAME ${RUNTIME_NNAPI_TEST}.skip.${TARGET_PLATFORM})
+elseif (BUILD_PURE_ARM_COMPUTE)
+ set(SKIPLIST_FILE_NAME ${RUNTIME_NNAPI_TEST}.skip.${TARGET_PLATFORM}.pacl)
endif()
+target_link_libraries(${RUNTIME_NNAPI_TEST} nnfw_lib_nnapi)
target_link_libraries(${RUNTIME_NNAPI_TEST} gtest)
target_link_libraries(${RUNTIME_NNAPI_TEST} ${LIB_PTHREAD} dl)
-install(TARGETS nnapi_gtest DESTINATION unittest)
+install(TARGETS ${RUNTIME_NNAPI_TEST} DESTINATION unittest)
install(FILES ${SKIPLIST_FILE_NAME}
DESTINATION unittest
RENAME ${RUNTIME_NNAPI_TEST}.skip
OPTIONAL)
-# If build both runtime (pure_arm_compute and neurun) at once,
-# install both skiplist file as backup
-if (BUILD_NEURUN AND BUILD_PURE_ARM_COMPUTE)
- set(SKIPLIST_ALL_RUNTIME
- ${RUNTIME_NNAPI_TEST}.skip.${TARGET_PLATFORM}
- ${RUNTIME_NNAPI_TEST}.skip.${TARGET_PLATFORM}.neurun)
- install(FILES ${SKIPLIST_ALL_RUNTIME} DESTINATION unittest OPTIONAL)
-endif()
+# Install skiplist file for target as backup
+FILE(GLOB SKIPLIST_TARGET ${CMAKE_CURRENT_SOURCE_DIR}/${RUNTIME_NNAPI_TEST}.skip.${TARGET_PLATFORM}*)
+FILE(GLOB SKIPLIST_NOARCH ${CMAKE_CURRENT_SOURCE_DIR}/${RUNTIME_NNAPI_TEST}.skip.noarch.*)
+list(APPEND SKIPLIST_ALL_RUNTIME ${SKIPLIST_TARGET} ${SKIPLIST_NOARCH})
+install(FILES ${SKIPLIST_ALL_RUNTIME} DESTINATION unittest OPTIONAL)
diff --git a/tests/nnapi/nnapi_gtest.skip.armv7l-linux b/tests/nnapi/nnapi_gtest.skip.armv7l-linux
index 1c85418a9..124634a3f 100644
--- a/tests/nnapi/nnapi_gtest.skip.armv7l-linux
+++ b/tests/nnapi/nnapi_gtest.skip.armv7l-linux
@@ -1,44 +1,15 @@
-GeneratedTests.add_broadcast_quant8
-GeneratedTests.add_quant8
-GeneratedTests.argmax_ex_quant8*
-GeneratedTests.logical_not_ex*
-GeneratedTests.logistic_quant8_1
-GeneratedTests.logistic_quant8_2
-GeneratedTests.lsh_projection
-GeneratedTests.lsh_projection_2
-GeneratedTests.lsh_projection_weights_as_inputs
-GeneratedTests.lstm
-GeneratedTests.lstm2
-GeneratedTests.lstm2_state
-GeneratedTests.lstm2_state2
-GeneratedTests.lstm3
-GeneratedTests.lstm3_state
-GeneratedTests.lstm3_state2
-GeneratedTests.lstm3_state3
-GeneratedTests.lstm_state
-GeneratedTests.lstm_state2
-GeneratedTests.mul_broadcast_quant8
-GeneratedTests.mul_quant8
+#
+# Following tests will be skipped on armv7l-linux
+#
+# Not support operations
+GeneratedTests.lsh_projection*
+GeneratedTests.mobilenet*
+GeneratedTests.svdf*
+GeneratedTests.batch_to_space*
+GeneratedTests.space_to_batch*
+# Unexpected result
GeneratedTests.pack*
-GeneratedTests.svdf
-GeneratedTests.svdf2
-GeneratedTests.svdf_state
-ValidationTestCompilation.CreateExecution
-ValidationTestCompilation.Finish
-ValidationTestCompilation.SetPreference
-ValidationTestExecution.EventWait
-ValidationTestExecution.SetInput
-ValidationTestExecution.SetInputFromMemory
-ValidationTestExecution.SetOutput
-ValidationTestExecution.SetOutputFromMemory
-ValidationTestExecution.StartCompute
-ValidationTestIdentify.DuplicateInputs
-ValidationTestIdentify.DuplicateOutputs
-ValidationTestIdentify.InputIsOutput
-ValidationTestIdentify.OutputIsInput
-ValidationTestModel.AddOperand
-ValidationTestModel.CreateCompilation
-ValidationTestModel.Finish
-ValidationTestModel.IdentifyInputsAndOutputs
-ValidationTestModel.SetOperandValue
-ValidationTestModel.SetOperandValueFromMemory
+# Not support broadcast
+GeneratedTests.logical_or_ex_broadcast_4D_2D
+# Unsupported optional input that has shape
+GeneratedTests.lstm2*
diff --git a/tests/nnapi/nnapi_gtest.skip.armv7l-linux.acl_neon b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.acl_neon
new file mode 100644
index 000000000..964696245
--- /dev/null
+++ b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.acl_neon
@@ -0,0 +1,30 @@
+#
+# Following tests will be skipped on armv7l-linux acl_neon
+#
+# Not support operations
+TrivialTest.BroadcastMulTwo
+GeneratedTests.depth_to_space*
+GeneratedTests.dequantize
+GeneratedTests.embedding_lookup*
+GeneratedTests.hashtable_lookup*
+GeneratedTests.lsh_projection*
+GeneratedTests.mobilenet*
+GeneratedTests.reduce_min*
+GeneratedTests.space_to_depth*
+GeneratedTests.svdf*
+GeneratedTests.batch_to_space*
+GeneratedTests.space_to_batch*
+GeneratedTests.cast_ex*
+GeneratedTests.gather_ex*
+GeneratedTests.reduce_max_ex*
+GeneratedTests.topk_v2*
+# Unexpected result
+GeneratedTests.pack*
+# Float error
+GeneratedTests.exp_ex_1D_float
+GeneratedTests.exp_ex_2D_float
+# Unsupported optional input that has shape
+GeneratedTests.lstm2*
+# Unsupported data type
+GeneratedTests.argmax_ex_int32
+GeneratedTests.argmax_ex_neg_axis_int32
diff --git a/tests/nnapi/nnapi_gtest.skip.armv7l-linux.cpu b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.cpu
new file mode 100644
index 000000000..39b7271ec
--- /dev/null
+++ b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.cpu
@@ -0,0 +1,77 @@
+#
+# Following tests will be skipped on armv7l-linux
+#
+# Not support operations
+TrivialTest.AddTwo
+TrivialTest.AddThree
+TrivialTest.BroadcastAddTwo
+TrivialTest.BroadcastMulTwo
+ValidationTestCompilation.SetPreference
+ValidationTestCompilation.CreateExecution
+ValidationTestCompilation.Finish
+ValidationTestExecution.SetInput
+ValidationTestExecution.SetOutput
+ValidationTestExecution.SetInputFromMemory
+ValidationTestExecution.SetOutputFromMemory
+ValidationTestExecution.StartCompute
+ValidationTestExecution.EventWait
+GeneratedTests.add_broadcast*
+GeneratedTests.add_quant*
+GeneratedTests.argmax*
+GeneratedTests.depth_to_space*
+GeneratedTests.dequantize
+GeneratedTests.embedding_lookup
+GeneratedTests.embedding_lookup_2d_nnfw
+GeneratedTests.embedding_lookup_4d_nnfw
+GeneratedTests.equal_ex*
+GeneratedTests.exp_ex*
+GeneratedTests.floor_
+GeneratedTests.greater_equal_ex*
+GeneratedTests.hashtable_lookup*
+GeneratedTests.l2_normalization*
+GeneratedTests.l2_pool*
+GeneratedTests.local_response_norm*
+GeneratedTests.less_ex*
+GeneratedTests.logical_and_ex*
+GeneratedTests.logical_or_ex*
+GeneratedTests.logistic*
+GeneratedTests.lsh_projection*
+GeneratedTests.lstm*
+GeneratedTests.mobilenet*
+GeneratedTests.mul*
+GeneratedTests.neg*
+GeneratedTests.notequal*
+GeneratedTests.prelu_ex*
+GeneratedTests.reduce_min*
+GeneratedTests.relu1*
+GeneratedTests.relu6*
+GeneratedTests.relu*
+GeneratedTests.resize_bilinear*
+GeneratedTests.rnn*
+GeneratedTests.rsqrt*
+GeneratedTests.mean*
+GeneratedTests.pad*
+GeneratedTests.space_to_depth*
+GeneratedTests.sqrt_ex*
+GeneratedTests.squared_difference_ex*
+GeneratedTests.svdf*
+GeneratedTests.tanh_
+GeneratedTests.batch_to_space*
+GeneratedTests.div_*
+GeneratedTests.space_to_batch*
+GeneratedTests.squeeze*
+GeneratedTests.strided_slice*
+GeneratedTests.sub*
+GeneratedTests.transpose*
+GeneratedTests.cast_ex*
+GeneratedTests.gather_ex*
+GeneratedTests.strided_slice_ex*
+GeneratedTests.reduce_max_ex*
+GeneratedTests.reduce_sum_ex*
+GeneratedTests.topk_v2*
+# Unexpected result
+GeneratedTests.split*
+GeneratedTests.transpose_conv*
+GeneratedTests.pack*
+GeneratedTests.unpack*
+GeneratedTests.logical_not_ex*
diff --git a/tests/nnapi/nnapi_gtest.skip.armv7l-linux.neurun b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.neurun
deleted file mode 100644
index 59957ba60..000000000
--- a/tests/nnapi/nnapi_gtest.skip.armv7l-linux.neurun
+++ /dev/null
@@ -1,117 +0,0 @@
-#
-# Following tests will be skipped on armv7l-linux
-#
-# Not support operations
-TrivialTest.AddTwo
-TrivialTest.AddThree
-TrivialTest.BroadcastAddTwo
-TrivialTest.BroadcastMulTwo
-ValidationTestCompilation.SetPreference
-ValidationTestCompilation.CreateExecution
-ValidationTestCompilation.Finish
-ValidationTestIdentify.InputIsOutput
-ValidationTestIdentify.OutputIsInput
-ValidationTestIdentify.DuplicateInputs
-ValidationTestIdentify.DuplicateOutputs
-ValidationTestIdentify.Ok
-ValidationTestExecution.SetInput
-ValidationTestExecution.SetOutput
-ValidationTestExecution.SetInputFromMemory
-ValidationTestExecution.SetOutputFromMemory
-ValidationTestExecution.StartCompute
-ValidationTestExecution.EventWait
-GeneratedTests.add*
-GeneratedTests.argmax*
-GeneratedTests.avg_pool_float_1
-GeneratedTests.avg_pool_float_2
-GeneratedTests.avg_pool_float_3
-GeneratedTests.avg_pool_float_4
-GeneratedTests.avg_pool_quant8_1
-GeneratedTests.avg_pool_quant8_2
-GeneratedTests.avg_pool_quant8_3
-GeneratedTests.avg_pool_quant8_4
-GeneratedTests.conv_float_channels
-GeneratedTests.conv_float_channels_weights_as_inputs
-GeneratedTests.conv_float_large
-GeneratedTests.conv_float_large_weights_as_inputs
-GeneratedTests.conv_float
-GeneratedTests.conv_float_weights_as_inputs
-GeneratedTests.conv_quant8_channels
-GeneratedTests.conv_quant8_channels_weights_as_inputs
-GeneratedTests.conv_quant8_large
-GeneratedTests.conv_quant8_large_weights_as_inputs
-GeneratedTests.conv_quant8
-GeneratedTests.conv_quant8_overflow
-GeneratedTests.conv_quant8_overflow_weights_as_inputs
-GeneratedTests.conv_quant8_weights_as_inputs
-GeneratedTests.depth_to_space*
-GeneratedTests.depthwise_conv2d*
-GeneratedTests.depthwise_conv
-GeneratedTests.dequantize
-GeneratedTests.embedding_lookup
-GeneratedTests.embedding_lookup_2d_nnfw
-GeneratedTests.embedding_lookup_4d_nnfw
-GeneratedTests.equal_ex*
-GeneratedTests.exp_ex*
-GeneratedTests.floor_
-GeneratedTests.hashtable_lookup*
-GeneratedTests.l2_normalization*
-GeneratedTests.l2_pool*
-GeneratedTests.local_response_norm*
-GeneratedTests.logical_and_ex*
-GeneratedTests.logical_or_ex*
-GeneratedTests.logistic*
-GeneratedTests.lsh_projection*
-GeneratedTests.lstm*
-GeneratedTests.max_pool_float_1
-GeneratedTests.max_pool_float_2
-GeneratedTests.max_pool_float_3
-GeneratedTests.max_pool_quant8_1
-GeneratedTests.max_pool_quant8_2
-GeneratedTests.max_pool_quant8_3
-GeneratedTests.mobilenet*
-GeneratedTests.mul*
-GeneratedTests.neg*
-GeneratedTests.notequal*
-GeneratedTests.prelu_ex*
-GeneratedTests.reduce_min*
-GeneratedTests.relu1*
-GeneratedTests.relu6*
-GeneratedTests.relu*
-GeneratedTests.resize_bilinear*
-GeneratedTests.rnn*
-GeneratedTests.rsqrt*
-GeneratedTests.mean*
-GeneratedTests.pad*
-GeneratedTests.space_to_depth*
-GeneratedTests.sqrt_ex*
-GeneratedTests.squared_difference_ex*
-GeneratedTests.svdf*
-GeneratedTests.tanh_
-GeneratedTests.batch_to_space*
-GeneratedTests.div_*
-GeneratedTests.space_to_batch*
-GeneratedTests.squeeze*
-GeneratedTests.strided_slice*
-GeneratedTests.sub*
-GeneratedTests.transpose*
-GeneratedTests.cast_ex*
-GeneratedTests.gather_ex*
-GeneratedTests.strided_slice_ex*
-GeneratedTests.tensorflowmax_ex*
-GeneratedTests.reduce_sum_ex*
-GeneratedTests.topk_v2*
-# Unhandled exception
-GeneratedTests.concat*
-GeneratedTests.fully_connected*
-GeneratedTests.reshape*
-# Unexpected result
-GeneratedTests.avg_pool_quant8_5
-GeneratedTests.conv_quant8_2
-GeneratedTests.max_pool_quant8_4
-GeneratedTests.softmax*
-GeneratedTests.split*
-GeneratedTests.transpose_conv*
-GeneratedTests.pack*
-GeneratedTests.unpack*
-GeneratedTests.logical_not_ex*
diff --git a/tests/nnapi/nnapi_gtest.skip.armv7l-linux.pacl b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.pacl
new file mode 100644
index 000000000..c6c4f2694
--- /dev/null
+++ b/tests/nnapi/nnapi_gtest.skip.armv7l-linux.pacl
@@ -0,0 +1,38 @@
+GeneratedTests.greater_equal_ex*
+GeneratedTests.less_ex*
+GeneratedTests.lsh_projection
+GeneratedTests.lsh_projection_2
+GeneratedTests.lsh_projection_weights_as_inputs
+GeneratedTests.lstm
+GeneratedTests.lstm2
+GeneratedTests.lstm2_state
+GeneratedTests.lstm2_state2
+GeneratedTests.lstm3
+GeneratedTests.lstm3_state
+GeneratedTests.lstm3_state2
+GeneratedTests.lstm3_state3
+GeneratedTests.lstm_state
+GeneratedTests.lstm_state2
+GeneratedTests.prelu_ex_quant8_1
+GeneratedTests.prelu_ex_broadcast_quant8_1
+GeneratedTests.svdf*
+GeneratedTests.transpose_conv_ex_float_4
+ValidationTestCompilation.CreateExecution
+ValidationTestCompilation.Finish
+ValidationTestCompilation.SetPreference
+ValidationTestExecution.EventWait
+ValidationTestExecution.SetInput
+ValidationTestExecution.SetInputFromMemory
+ValidationTestExecution.SetOutput
+ValidationTestExecution.SetOutputFromMemory
+ValidationTestExecution.StartCompute
+ValidationTestIdentify.DuplicateInputs
+ValidationTestIdentify.DuplicateOutputs
+ValidationTestIdentify.InputIsOutput
+ValidationTestIdentify.OutputIsInput
+ValidationTestModel.AddOperand
+ValidationTestModel.CreateCompilation
+ValidationTestModel.Finish
+ValidationTestModel.IdentifyInputsAndOutputs
+ValidationTestModel.SetOperandValue
+ValidationTestModel.SetOperandValueFromMemory
diff --git a/tests/nnapi/nnapi_gtest.skip.armv7l-tizen b/tests/nnapi/nnapi_gtest.skip.armv7l-tizen
index ebb51de94..124634a3f 100644
--- a/tests/nnapi/nnapi_gtest.skip.armv7l-tizen
+++ b/tests/nnapi/nnapi_gtest.skip.armv7l-tizen
@@ -1,47 +1,15 @@
-GeneratedTests.lsh_projection
-GeneratedTests.lsh_projection_2
-GeneratedTests.lsh_projection_weights_as_inputs
-GeneratedTests.lstm
-GeneratedTests.lstm2
-GeneratedTests.lstm2_state
-GeneratedTests.lstm2_state2
-GeneratedTests.lstm3
-GeneratedTests.lstm3_state
-GeneratedTests.lstm3_state2
-GeneratedTests.lstm3_state3
-GeneratedTests.lstm_state
-GeneratedTests.lstm_state2
-GeneratedTests.notequal*
-GeneratedTests.prelu_ex*
-GeneratedTests.svdf
-GeneratedTests.svdf2
-GeneratedTests.svdf_state
-GeneratedTests.logical_not_ex*
-ValidationTestCompilation.CreateExecution
-ValidationTestCompilation.SetPreference
-ValidationTestIdentify.DuplicateInputs
-ValidationTestIdentify.DuplicateOutputs
-ValidationTestIdentify.InputIsOutput
-ValidationTestIdentify.OutputIsInput
-ValidationTestModel.AddOperand
-ValidationTestModel.CreateCompilation
-ValidationTestModel.Finish
-ValidationTestModel.IdentifyInputsAndOutputs
-ValidationTestModel.SetOperandValue
-ValidationTestModel.SetOperandValueFromMemory
-ValidationTestCompilation.Finish
-ValidationTestExecution.SetInput
-ValidationTestExecution.SetOutput
-ValidationTestExecution.SetInputFromMemory
-ValidationTestExecution.SetOutputFromMemory
-ValidationTestExecution.StartCompute
-ValidationTestExecution.EventWait
-GeneratedTests.add_broadcast_quant8
-GeneratedTests.add_quant8
-GeneratedTests.logical_and_ex*
-GeneratedTests.logical_or_ex*
-GeneratedTests.logistic_quant8_1
-GeneratedTests.logistic_quant8_2
-GeneratedTests.mul_broadcast_quant8
-GeneratedTests.mul_quant8
+#
+# Following tests will be skipped on armv7l-linux
+#
+# Not support operations
+GeneratedTests.lsh_projection*
+GeneratedTests.mobilenet*
+GeneratedTests.svdf*
+GeneratedTests.batch_to_space*
+GeneratedTests.space_to_batch*
+# Unexpected result
GeneratedTests.pack*
+# Not support broadcast
+GeneratedTests.logical_or_ex_broadcast_4D_2D
+# Unsupported optional input that has shape
+GeneratedTests.lstm2*
diff --git a/tests/nnapi/nnapi_gtest.skip.noarch.interp b/tests/nnapi/nnapi_gtest.skip.noarch.interp
new file mode 100644
index 000000000..6f97b8235
--- /dev/null
+++ b/tests/nnapi/nnapi_gtest.skip.noarch.interp
@@ -0,0 +1,72 @@
+#
+# Following tests will be skipped on x86_64-linux interpreter
+#
+# Not support operations
+TrivialTest.Broadcast*
+GeneratedTests.avg_pool_quant*
+GeneratedTests.concat_quant*
+GeneratedTests.conv_quant*
+GeneratedTests.depthwise_conv2d_quant*
+GeneratedTests.fully_connected_quant*
+GeneratedTests.max_pool_quant*
+GeneratedTests.softmax_quant*
+GeneratedTests.add_broadcast*
+GeneratedTests.add_quant*
+GeneratedTests.argmax*
+GeneratedTests.depth_to_space*
+GeneratedTests.dequantize
+GeneratedTests.embedding_lookup
+GeneratedTests.embedding_lookup_2d_nnfw
+GeneratedTests.embedding_lookup_4d_nnfw
+GeneratedTests.equal_ex*
+GeneratedTests.exp_ex*
+GeneratedTests.floor_
+GeneratedTests.greater_equal_ex*
+GeneratedTests.hashtable_lookup*
+GeneratedTests.l2_normalization*
+GeneratedTests.l2_pool*
+GeneratedTests.local_response_norm*
+GeneratedTests.less_ex*
+GeneratedTests.logical_and_ex*
+GeneratedTests.logical_or_ex*
+GeneratedTests.logistic*
+GeneratedTests.lsh_projection*
+GeneratedTests.lstm*
+GeneratedTests.mobilenet*
+GeneratedTests.mul*
+GeneratedTests.neg*
+GeneratedTests.notequal*
+GeneratedTests.prelu_ex*
+GeneratedTests.reduce_min*
+GeneratedTests.relu1*
+GeneratedTests.relu6*
+GeneratedTests.relu*
+GeneratedTests.resize_bilinear*
+GeneratedTests.rnn*
+GeneratedTests.rsqrt*
+GeneratedTests.mean*
+GeneratedTests.pad*
+GeneratedTests.space_to_depth*
+GeneratedTests.sqrt_ex*
+GeneratedTests.squared_difference_ex*
+GeneratedTests.svdf*
+GeneratedTests.tanh_
+GeneratedTests.batch_to_space*
+GeneratedTests.div_*
+GeneratedTests.space_to_batch*
+GeneratedTests.squeeze*
+GeneratedTests.strided_slice*
+GeneratedTests.sub*
+GeneratedTests.transpose*
+GeneratedTests.cast_ex*
+GeneratedTests.gather_ex*
+GeneratedTests.strided_slice_ex*
+GeneratedTests.reduce_max_ex*
+GeneratedTests.reduce_sum_ex*
+GeneratedTests.topk_v2*
+# Unexpected result
+GeneratedTests.split*
+GeneratedTests.transpose_conv*
+GeneratedTests.pack*
+GeneratedTests.unpack*
+GeneratedTests.logical_not_ex*
diff --git a/tests/nnapi/nnapi_gtest.skip.x86_64-linux b/tests/nnapi/nnapi_gtest.skip.x86_64-linux
index e87c4acf6..901a10391 100644
--- a/tests/nnapi/nnapi_gtest.skip.x86_64-linux
+++ b/tests/nnapi/nnapi_gtest.skip.x86_64-linux
@@ -1,88 +1,78 @@
#
# Following tests will be skipped on x86_64-linux
#
-# =====================
-# Fails in CPU fallback
-# =====================
-GeneratedTests.depth_to_space_float_1
-GeneratedTests.depth_to_space_float_2
-GeneratedTests.depth_to_space_float_3
-GeneratedTests.depth_to_space_quant8_1
-GeneratedTests.depth_to_space_quant8_2
+# Not support operations
+TrivialTest.AddTwo
+TrivialTest.AddThree
+TrivialTest.BroadcastAddTwo
+TrivialTest.BroadcastMulTwo
+ValidationTestCompilation.SetPreference
+ValidationTestCompilation.CreateExecution
+ValidationTestCompilation.Finish
+ValidationTestExecution.SetInput
+ValidationTestExecution.SetOutput
+ValidationTestExecution.SetInputFromMemory
+ValidationTestExecution.SetOutputFromMemory
+ValidationTestExecution.StartCompute
+ValidationTestExecution.EventWait
+GeneratedTests.add_broadcast*
+GeneratedTests.add_quant*
+GeneratedTests.argmax*
+GeneratedTests.depth_to_space*
+GeneratedTests.depthwise_conv2d_quant8*
GeneratedTests.dequantize
GeneratedTests.embedding_lookup
-GeneratedTests.exp_ex_1D_float
+GeneratedTests.embedding_lookup_2d_nnfw
+GeneratedTests.embedding_lookup_4d_nnfw
GeneratedTests.equal_ex*
-GeneratedTests.exp_ex_2D_float
+GeneratedTests.exp_ex*
GeneratedTests.floor_
-GeneratedTests.hashtable_lookup_float
-GeneratedTests.hashtable_lookup_quant8
-GeneratedTests.l2_normalization_2
-GeneratedTests.l2_normalization_large
-GeneratedTests.l2_normalization
-GeneratedTests.local_response_norm_float_1
-GeneratedTests.local_response_norm_float_2
-GeneratedTests.local_response_norm_float_3
-GeneratedTests.local_response_norm_float_4
+GeneratedTests.greater_equal_ex*
+GeneratedTests.hashtable_lookup*
+GeneratedTests.l2_normalization*
+GeneratedTests.l2_pool*
+GeneratedTests.less*
+GeneratedTests.local_response_norm*
GeneratedTests.logical_and_ex*
-GeneratedTests.lsh_projection_2
-GeneratedTests.lsh_projection
-GeneratedTests.lsh_projection_weights_as_inputs
-GeneratedTests.lstm2
-GeneratedTests.lstm2_state2
-GeneratedTests.lstm2_state
-GeneratedTests.lstm3
-GeneratedTests.lstm3_state2
-GeneratedTests.lstm3_state3
-GeneratedTests.lstm3_state
-GeneratedTests.lstm
-GeneratedTests.lstm_state2
-GeneratedTests.lstm_state
+GeneratedTests.logical_or_ex*
+GeneratedTests.logistic*
+GeneratedTests.lsh_projection*
+GeneratedTests.lstm*
+GeneratedTests.mobilenet*
+GeneratedTests.mul*
GeneratedTests.neg*
GeneratedTests.notequal*
GeneratedTests.prelu_ex*
-GeneratedTests.reduce_min_ex*
-GeneratedTests.relu1_float_1
-GeneratedTests.relu1_float_2
-GeneratedTests.relu1_quant8_1
-GeneratedTests.relu1_quant8_2
-GeneratedTests.rnn
-GeneratedTests.rnn_state
-GeneratedTests.rsqrt_ex_float_1
-GeneratedTests.space_to_depth_float_1
-GeneratedTests.space_to_depth_float_2
-GeneratedTests.space_to_depth_float_3
-GeneratedTests.space_to_depth_quant8_1
-GeneratedTests.space_to_depth_quant8_2
+GeneratedTests.reduce_min*
+GeneratedTests.relu1*
+GeneratedTests.relu6*
+GeneratedTests.relu*
+GeneratedTests.resize_bilinear*
+GeneratedTests.rnn*
+GeneratedTests.rsqrt*
+GeneratedTests.mean*
+GeneratedTests.pad*
+GeneratedTests.space_to_depth*
GeneratedTests.sqrt_ex*
GeneratedTests.squared_difference_ex*
-GeneratedTests.svdf2
-GeneratedTests.svdf
-GeneratedTests.svdf_state
-GeneratedTests.strided_slice_float_10
-GeneratedTests.strided_slice_float_1
-GeneratedTests.strided_slice_float_2
-GeneratedTests.strided_slice_float_3
-GeneratedTests.strided_slice_float_4
-GeneratedTests.strided_slice_float_5
-GeneratedTests.strided_slice_float_6
-GeneratedTests.strided_slice_float_7
-GeneratedTests.strided_slice_float_8
-GeneratedTests.strided_slice_float_9
-GeneratedTests.strided_slice
-GeneratedTests.tensorflowmax_ex*
+GeneratedTests.svdf*
+GeneratedTests.tanh_
+GeneratedTests.batch_to_space*
+GeneratedTests.div_*
+GeneratedTests.space_to_batch*
+GeneratedTests.squeeze*
+GeneratedTests.strided_slice*
+GeneratedTests.sub*
+GeneratedTests.transpose*
+GeneratedTests.cast_ex*
+GeneratedTests.gather_ex*
+GeneratedTests.strided_slice_ex*
+GeneratedTests.reduce_max_ex*
GeneratedTests.reduce_sum_ex*
-GeneratedTests.sub_broadcast_float
-GeneratedTests.logical_not_ex*
-TrivialTest.*
-ValidationTest.*
-ValidationTestModel.*
-ValidationTestCompilation.*
-ValidationTestIdentify.*
-ValidationTestExecution.*
-GeneratedTests.gather_ex_1D_quant8
-GeneratedTests.gather_ex_2D_quant8
+GeneratedTests.topk_v2*
+# Unexpected result
GeneratedTests.split*
GeneratedTests.transpose_conv*
GeneratedTests.pack*
GeneratedTests.unpack*
+GeneratedTests.logical_not_ex*
diff --git a/tests/nnapi/nnapi_test_generator/android-p/README.md b/tests/nnapi/nnapi_test_generator/android-p/README.md
new file mode 100644
index 000000000..469e467ea
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-p/README.md
@@ -0,0 +1,11 @@
+# nnapi test generator
+
+_nnapi test generator_ aims at generating NN public C API tests.
+
+To generate tests, run the following command.
+
+```
+$ tests/nnapi/specs/generate_test.sh
+```
+
+Original code is at https://android.googlesource.com/platform/frameworks/ml/+/efd22b6.
diff --git a/externals/nnapi_test_generator/include/TestHarness.h b/tests/nnapi/nnapi_test_generator/android-p/include/TestHarness.h
index 1fcb0d661..1fcb0d661 100644
--- a/externals/nnapi_test_generator/include/TestHarness.h
+++ b/tests/nnapi/nnapi_test_generator/android-p/include/TestHarness.h
diff --git a/externals/nnapi_test_generator/slicing.py b/tests/nnapi/nnapi_test_generator/android-p/slicing.py
index f08e9d1a1..f08e9d1a1 100755..100644
--- a/externals/nnapi_test_generator/slicing.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/slicing.py
diff --git a/externals/nnapi_test_generator/test_generator.py b/tests/nnapi/nnapi_test_generator/android-p/test_generator.py
index 922ef7754..922ef7754 100755..100644
--- a/externals/nnapi_test_generator/test_generator.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/test_generator.py
diff --git a/externals/nnapi_test_generator/tests/P_conv/conv_1_h3_w2_SAME.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/conv_1_h3_w2_SAME.mod.py
index 8e93749e2..8e93749e2 100644
--- a/externals/nnapi_test_generator/tests/P_conv/conv_1_h3_w2_SAME.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/conv_1_h3_w2_SAME.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_conv/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_conv/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_conv/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/stdout.txt.expect
index 47d92b6b8..47d92b6b8 100644
--- a/externals/nnapi_test_generator/tests/P_conv/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_conv/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_depthwise_conv/depthwise_conv.bin.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/depthwise_conv.bin.mod.py
index 8738ee01b..8738ee01b 100644
--- a/externals/nnapi_test_generator/tests/P_depthwise_conv/depthwise_conv.bin.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/depthwise_conv.bin.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_depthwise_conv/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_depthwise_conv/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_depthwise_conv/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/stdout.txt.expect
index 9a22cc3e3..9a22cc3e3 100644
--- a/externals/nnapi_test_generator/tests/P_depthwise_conv/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_depthwise_conv/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_explicit/explicit_add.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/explicit_add.mod.py
index b1c8f99a4..b1c8f99a4 100644
--- a/externals/nnapi_test_generator/tests/P_explicit/explicit_add.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/explicit_add.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_explicit/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_explicit/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_explicit/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/stdout.txt.expect
index 1221b7bda..1221b7bda 100644
--- a/externals/nnapi_test_generator/tests/P_explicit/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_explicit/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_float/addfloat.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_float/addfloat.mod.py
index f0e4f0430..f0e4f0430 100644
--- a/externals/nnapi_test_generator/tests/P_float/addfloat.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_float/addfloat.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_float/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_float/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_float/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_float/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_float/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_float/stdout.txt.expect
index eb8cc146b..eb8cc146b 100644
--- a/externals/nnapi_test_generator/tests/P_float/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_float/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_full/addfloat.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_full/addfloat.mod.py
index dbe7701a1..dbe7701a1 100644
--- a/externals/nnapi_test_generator/tests/P_full/addfloat.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_full/addfloat.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_full/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_full/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_full/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_full/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_full/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_full/stdout.txt.expect
index e3d2af3fa..e3d2af3fa 100644
--- a/externals/nnapi_test_generator/tests/P_full/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_full/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_lstm/lstm.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/lstm.mod.py
index cb1bf6010..cb1bf6010 100644
--- a/externals/nnapi_test_generator/tests/P_lstm/lstm.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/lstm.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_lstm/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_lstm/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_lstm/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/stdout.txt.expect
index 2ba320d77..2ba320d77 100644
--- a/externals/nnapi_test_generator/tests/P_lstm/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_lstm/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_quantized_avgpool/averpoolfloat.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/averpoolfloat.mod.py
index 17d6e0a4f..17d6e0a4f 100644
--- a/externals/nnapi_test_generator/tests/P_quantized_avgpool/averpoolfloat.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/averpoolfloat.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_quantized_avgpool/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_quantized_avgpool/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_quantized_avgpool/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/stdout.txt.expect
index b4632d34d..b4632d34d 100644
--- a/externals/nnapi_test_generator/tests/P_quantized_avgpool/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_avgpool/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_quantized_conv/quantized.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/quantized.mod.py
index 7ef623513..7ef623513 100644
--- a/externals/nnapi_test_generator/tests/P_quantized_conv/quantized.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/quantized.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_quantized_conv/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_quantized_conv/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_quantized_conv/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/stdout.txt.expect
index 6b28bdd54..6b28bdd54 100644
--- a/externals/nnapi_test_generator/tests/P_quantized_conv/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_quantized_conv/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_vts_full/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/stderr.txt.expect
index 3decb4c1c..3decb4c1c 100644
--- a/externals/nnapi_test_generator/tests/P_vts_full/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_vts_full/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/stdout.txt.expect
index 14cd4f99d..14cd4f99d 100644
--- a/externals/nnapi_test_generator/tests/P_vts_full/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_vts_full/vts_full.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/vts_full.mod.py
index 4ad3b2e4b..4ad3b2e4b 100644
--- a/externals/nnapi_test_generator/tests/P_vts_full/vts_full.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_full/vts_full.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_vts_operands/addfloat.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/addfloat.mod.py
index 976cb35ec..976cb35ec 100644
--- a/externals/nnapi_test_generator/tests/P_vts_operands/addfloat.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/addfloat.mod.py
diff --git a/externals/nnapi_test_generator/tests/P_vts_operands/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/stderr.txt.expect
index 3decb4c1c..3decb4c1c 100644
--- a/externals/nnapi_test_generator/tests/P_vts_operands/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_vts_operands/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/stdout.txt.expect
index 2e74d1fc0..2e74d1fc0 100644
--- a/externals/nnapi_test_generator/tests/P_vts_operands/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_vts_operands/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_weird/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/stderr.txt.expect
index c5a6e36b9..c5a6e36b9 100644
--- a/externals/nnapi_test_generator/tests/P_weird/stderr.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/stderr.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_weird/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/stdout.txt.expect
index fa67d68ac..fa67d68ac 100644
--- a/externals/nnapi_test_generator/tests/P_weird/stdout.txt.expect
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/stdout.txt.expect
diff --git a/externals/nnapi_test_generator/tests/P_weird/weird_add.mod.py b/tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/weird_add.mod.py
index a230267a4..a230267a4 100644
--- a/externals/nnapi_test_generator/tests/P_weird/weird_add.mod.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/P_weird/weird_add.mod.py
diff --git a/externals/nnapi_test_generator/tests/test.py b/tests/nnapi/nnapi_test_generator/android-p/tests/test.py
index c987cf680..c987cf680 100755..100644
--- a/externals/nnapi_test_generator/tests/test.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/tests/test.py
diff --git a/externals/nnapi_test_generator/vts_generator.py b/tests/nnapi/nnapi_test_generator/android-p/vts_generator.py
index ab34e2bda..ab34e2bda 100755..100644
--- a/externals/nnapi_test_generator/vts_generator.py
+++ b/tests/nnapi/nnapi_test_generator/android-p/vts_generator.py
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/README.md b/tests/nnapi/nnapi_test_generator/android-q-beta/README.md
new file mode 100644
index 000000000..62a6978cd
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/README.md
@@ -0,0 +1,408 @@
+# NN-API Test Generator
+
+Original code is at https://android.googlesource.com/platform/frameworks/ml/+/ee61649/nn/tools/test_generator/
+
+### Fix for neurun
+
+- Update path in this README.md file for neurun NNAPI frontend test
+ - `nn/runtime/test/specs/` => `tests/nnapi/specs/`
+ - $ANDROID_BUILD_TOP/frameworks/ml/nn/runtime/test/specs => $NNAS_PROJECT_PATH/tests/nnapi/specs
+ - Rebuild with mm afterwards => Rebuild afterwards (mm is not supported)
+
+---
+
+# Using the NN-API Test Generator
+
+## Prerequisites
+
+- Python3
+- Numpy
+
+## Writing a Test Specification
+
+You should create new test specs in `tests/nnapi/specs/<version>/` and name it with `.mod.py` suffix, so that other tools can automatically update the unit tests.
+
+### Specifying Operands
+
+#### Syntax
+
+```
+OperandType(name, (type, shape, <optional scale, zero point>), <optional initializer>)
+```
+
+For example,
+
+```Python
+# p1 is a 2-by-2 fp matrix parameter, with value [1, 2; 3, 4]
+p1 = Parameter("param", ("TENSOR_FLOAT32", [2, 2]), [1, 2, 3, 4])
+
+# i1 is a quantized input of shape (2, 256, 256, 3), with scale = 0.5, zero point = 128
+i1 = Input("input", ("TENSOR_QUANT8_ASYMM", [2, 256, 256, 3], 0.5, 128))
+
+# p2 is an Int32 scalar with value 1
+p2 = Int32Scalar("act", 1)
+```
+
+#### OperandType
+
+There are currently 10 operand types supported by the test generator.
+
+- Input
+- Output
+ * IgnoredOutput, will not compare results in the test
+- Parameter
+ * Int32Scalar, shorthand for parameter with type INT32
+ * Float32Scalar, shorthand for parameter with type FLOAT32
+ * Int32Vector, shorthand for 1-D TENSOR_INT32 parameter
+ * Float32Vector, shorthand for 1-D TENSOR_FLOAT32 parameter
+- Internal, for model with multiple operations
+
+### Specifying Models
+
+#### Instantiate a model
+
+```Python
+# Instantiate a model
+model = Model()
+
+# Instantiate a model with a name
+model2 = Model("model_name")
+```
+
+#### Add an operation
+
+```
+model.Operation(optype, i1, i2, ...).To(o1, o2, ...)
+```
+
+For example,
+
+```Python
+model.Operation("ADD", i1, i2, act).To(o1)
+```
+
+#### Use implicit operands
+
+Simple scalar and 1-D vector parameters can now be directly passed to Operation constructor, and test generator will deduce the operand type from the value provided.
+
+```Python
+model.Operation("MEAN", i1, [1], 0) # axis = [1], keep_dims = 0
+```
+
+Note that, for fp values, the initializer should all be Python fp numbers, e.g. use `1.0` or `1.` instead of `1` for implicit fp operands.
+
+### Specifying Inputs and Expected Outputs
+
+The combination of inputs and expected outputs is called an example for a given model. An example is defined like
+
+```Python
+# Example 1, separate dictionary for inputs and outputs
+input1 = {
+ i1: [1, 2],
+ i2: [3, 4]
+}
+output1 = {o1: [4, 6]}
+
+# Example 2, combined dictionary
+example2_values = {
+ i1: [5, 6],
+ i2: [7, 8],
+ o1: [12, 14]
+}
+
+# Instantiate an example
+Example((input1, output1), example2_values)
+```
+
+By default, examples will be attached to the most recent instantiated model. You can explicitly specify the target model, and optionally, the example name by
+
+```Python
+Example((input1, output1), example2_values, model=model, name="example_name")
+```
+
+### Specifying Variations
+
+You can add variations to the example so that the test generator can automatically create multiple tests. Currently, 6 types of variation are supported:
+
+- DefaultVariation, i.e. no variation
+- DataTypeConverter
+- DataLayoutConverter
+- AxisConverter
+- RelaxedModeConverter
+- ParameterAsInputConverter
+- ActivationConverter
+
+#### DataTypeConverter
+
+Convert input/parameter/output to the specified type, e.g. float32 -> quant8. The target data type for each operand to transform has to be explicitly specified. It is the spec writer's responsibility to ensure such conversion is valid.
+
+```Python
+converter = DataTypeConverter(name="variation_name").Identify({
+ op1: (target_type, target_scale, target_zero_point),
+ op2: (target_type, target_scale, target_zero_point),
+ ...
+})
+```
+
+#### DataLayoutConverter
+
+Convert input/parameter/output between NHWC and NCHW. The caller need to provide a list of target operands to transform, and also the data layout parameter to set.
+
+```Python
+converter = DataLayoutConverter(target_data_layout, name="variation_name").Identify(
+ [op1, op2, ..., layout_parameter]
+)
+```
+
+#### AxisConverter
+
+Transpose a certain axis in input/output to target position, and optionally remove some axis. The caller need to provide a list of target operands to transform, and also the axis parameter to set.
+
+```Python
+converter = AxisConverter(originalAxis, targetAxis, dimension, drop=[], name="variation_name").Identify(
+ [op1, op2, ..., axis_parameter]
+)
+```
+
+This model variation is for ops that apply calculation along certain axis, such as L2_NORMALIZATION, SOFTMAX, and CHANNEL_SHUFFLE. For example, consider L2_NORMALIZATION with input of shape [2, 3, 4, 5] along the last axis, i.e. axis = -1. The output shape would be the same as input. We can create a new model which will do the calculation along axis 0 by transposing input and output shape to [5, 2, 3, 4] and modify the axis parameter to 0. Such converter can be defined as
+
+```Python
+toAxis0 = AxisConverter(-1, 0, 4).Identify([input, output, axis])
+```
+
+The target axis can also be negative to test the negative indexing
+
+```Python
+toAxis0 = AxisConverter(-1, -4, 4).Identify([input, output, axis])
+```
+
+Consider the same L2_NORMALIZATION example, we can also create a new model with input/output of 2D shape [4, 5] by removing the first two dimension. This is essentially doing `new_input = input[0,0,:,:]` in numpy. Such converter can be defined as
+
+```Python
+toDim2 = AxisConverter(-1, -1, 4, drop=[0, 1]).Identify([input, output, axis])
+```
+
+If transposition and removal are specified at the same time, the converter will do transposition first and then remove the axis. For example, the following converter will result in shape [5, 4] and axis 0.
+
+```Python
+toDim2Axis0 = AxisConverter(-1, 2, 4, drop=[0, 1]).Identify([input, output, axis])
+```
+
+#### RelaxedModeConverter
+
+Convert the model to enable/disable relaxed computation.
+
+```Python
+converter = RelaxedModeConverter(is_relaxed, name="variation_name")
+```
+
+#### ParameterAsInputConverter
+
+Convert a certain parameter to model input, e.g. weight in CONV_2D. The caller need to provide a list of target operands to convert.
+
+```Python
+converter = ParameterAsInputConverter(name="variation_name").Identify(
+ [op1, op2, ...]
+)
+```
+
+#### ActivationConverter
+
+Convert the output by certain activation, the original activation is assumed to be NONE. The caller need to provide a list of target operands to transform, and also the activation parameter to set.
+
+```Python
+converter = ActivationConverter(name="variation_name").Identify(
+ [op1, op2, ..., act_parameter]
+)
+```
+
+#### Add variation to example
+
+Each example can have multiple groups of variations, and if so, will take the cartesian product of the groups. For example, suppose we declare a model with two groups, and each group has two variations: `[[default, nchw], [default, relaxed, quant8]]`. This will result in 6 examples: `[default, default], [default, relaxed], [default, quant8], [nchw, default], [nchw, relaxed], [nchw, quant8]`.
+
+Use `AddVariations` to add a group of variations to the example
+
+```Python
+# Add two groups of variations [default, nchw] and [default, relaxed, quant8]
+example.AddVariations(nchw).AddVariations(relaxed, quant8)
+```
+
+By default, when you add a group of variation, a unnamed default variation will be automatically included in the list. You can name the default variation by
+
+```Python
+example.AddVariations(nchw, defaultName="nhwc").AddVariations(relaxed, quant8)
+```
+
+Also, you can choose not to include default by
+
+```Python
+# Add two groups of variations [nchw] and [default, relaxed, quant8]
+example.AddVariations(nchw, includeDefault=False).AddVariations(relaxed, quant8)
+```
+
+The example above will result in 3 examples: `[nchw, default], [nchw, relaxed], [nchw, quant8]`.
+
+#### Some helper functions
+
+The test generator provides several helper functions or shorthands to add commonly used group of variations.
+
+```Python
+# Each following group of statements are equivalent
+
+# DataTypeConverter
+example.AddVariations(DataTypeConverter().Identify({op1: "TENSOR_FLOAT16", ...}))
+example.AddVariations("float16") # will apply to every TENSOR_FLOAT32 operands
+
+example.AddVariations(DataTypeConverter().Identify({op1: "TENSOR_INT32", ...}))
+example.AddVariations("int32") # will apply to every TENSOR_FLOAT32 operands
+
+# DataLayoutConverter
+example.AddVariations(DataLayoutConverter("nchw").Identify(op_list))
+example.AddVariations(("nchw", op_list))
+example.AddNchw(*op_list)
+
+# AxisConverter
+# original axis and dim are deduced from the op_list
+example.AddVariations(*[AxisConverter(origin, t, dim).Identify(op_list) for t in targets])
+example.AddAxis(targets, *op_list)
+
+example.AddVariations(*[
+ AxisConverter(origin, t, dim).Identify(op_list) for t in range(dim)
+ ], includeDefault=False)
+example.AddAllPositiveAxis(*op_list)
+
+example.AddVariations(*[
+ AxisConverter(origin, t, dim).Identify(op_list) for t in range(-dim, dim)
+ ], includeDefault=False)
+example.AddAllAxis(*op_list)
+
+drop = list(range(dim))
+drop.pop(origin)
+example.AddVariations(*[
+ AxisConverter(origin, origin, dim, drop[0:(dim-i)]).Identify(op_list) for i in dims])
+example.AddDims(dims, *op_list)
+
+example.AddVariations(*[
+ AxisConverter(origin, origin, dim, drop[0:i]).Identify(op_list) for i in range(dim)])
+example.AddAllDims(dims, *op_list)
+
+example.AddVariations(*[
+ AxisConverter(origin, j, dim, range(i)).Identify(op_list) \
+ for i in range(dim) for j in range(i, dim)
+ ], includeDefault=False)
+example.AddAllDimsAndPositiveAxis(dims, *op_list)
+
+example.AddVariations(*[
+ AxisConverter(origin, k, dim, range(i)).Identify(op_list) \
+ for i in range(dim) for j in range(i, dim) for k in [j, j - dim]
+ ], includeDefault=False)
+example.AddAllDimsAndAxis(dims, *op_list)
+
+# ParameterAsInputConverter
+example.AddVariations(ParameterAsInputConverter().Identify(op_list))
+example.AddVariations(("as_input", op_list))
+example.AddInput(*op_list)
+
+# RelaxedModeConverter
+example.Addvariations(RelaxedModeConverter(True))
+example.AddVariations("relaxed")
+example.AddRelaxed()
+
+# ActivationConverter
+example.AddVariations(ActivationConverter("relu").Identify(op_list))
+example.AddVariations(("relu", op_list))
+example.AddRelu(*op_list)
+
+example.AddVariations(
+ ActivationConverter("relu").Identify(op_list),
+ ActivationConverter("relu1").Identify(op_list),
+ ActivationConverter("relu6").Identify(op_list))
+example.AddVariations(
+ ("relu", op_list),
+ ("relu1", op_list),
+ ("relu6", op_list))
+example.AddAllActivations(*op_list)
+```
+
+### Specifying the Model Version
+
+If not explicitly specified, the minimal required HAL version will be inferred from the path, e.g. the models defined in `tests/nnapi/specs/V1_0/add.mod.py` will all have version `V1_0`. However there are several exceptions that a certain operation is under-tested in previous version and more tests are added in a later version. In this case, two methods are provided to set the version manually.
+
+#### Set the version when creating the model
+
+Use `IntroducedIn` to set the version of a model. All variations of the model will have the same version.
+
+```Python
+model_V1_0 = Model().IntroducedIn("V1_0")
+...
+# All variations of model_V1_0 will have the same version V1_0.
+Example(example, model=model_V1_0).AddVariations(var0, var1, ...)
+```
+
+#### Set the version overrides
+
+Use `Example.SetVersion` to override the model version for specific tests. The target tests are specified by names. This method can also override the version specified by `IntroducedIn`.
+
+```Python
+Example.SetVersion(<version>, testName0, testName1, ...)
+```
+
+This is useful when only a subset of variations has a different version.
+
+### Creating negative tests
+
+Negative test, also known as validation test, is a testing method that supplies invalid model or request, and expects the target framework or driver to fail gracefully. You can use `ExpectFailure` to tag a example as invalid.
+
+```Python
+Example.ExpectFailure()
+```
+
+### A Complete Example
+
+```Python
+# Declare input, output, and parameters
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 3, 4, 1}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{1, 3, 3, 1}", [1, 4, 7, 2, 5, 8, 3, 6, 9])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+act = Int32Scalar("act", 0)
+o1 = Output("op4", "TENSOR_FLOAT32", "{1, 3, 4, 1}")
+
+# Instantiate a model and add CONV_2D operation
+# Use implicit parameter for implicit padding and strides
+Model().Operation("CONV_2D", i1, f1, b1, 1, 1, 1, act, layout).To(o1)
+
+# Additional data type
+quant8 = DataTypeConverter().Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 127),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.5, 127),
+ b1: ("TENSOR_INT32", 0.25, 0),
+ o1: ("TENSOR_QUANT8_ASYMM", 1.0, 50)
+})
+
+# Instantiate an example
+example = Example({
+ i1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
+ o1: [0, 0, 0, 0, 35, 112, 157, 0, 0, 34, 61, 0]
+})
+
+# Only use NCHW data layout
+example.AddNchw(i1, f1, o1, layout, includeDefault=False)
+
+# Add two more groups of variations
+example.AddInput(f1, b1).AddVariations("relaxed", quant8).AddAllActivations(o1, act)
+```
+
+The spec above will result in 24 tests.
+
+## Generate Tests
+
+Once you have your model ready, run
+
+```
+$NNAS_PROJECT_PATH/tests/nnapi/specs/generate_test.sh
+$NNAS_PROJECT_PATH/tests/nnapi/specs/generate_vts_test.sh
+```
+
+It will read and generate all CTS/VTS unit tests based on spec files in `tests/nnapi/specs/V1_*/*` if needed. CTS test generator is able to identify which spec files are modified since last generation and only regenerate those files to reduce compilation time. To force a regeneration, use `-f` flag. The VTS test generator will regenerate tests targeting the latest HAL version by default. Pass the `all` positional argument to override.
+
+Rebuild afterwards.
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/cts_generator.py b/tests/nnapi/nnapi_test_generator/android-q-beta/cts_generator.py
new file mode 100644
index 000000000..bb65762fc
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/cts_generator.py
@@ -0,0 +1,314 @@
+#!/usr/bin/python3
+
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""CTS testcase generator
+
+Implements CTS test backend. Invoked by ml/nn/runtime/test/specs/generate_tests.sh;
+See that script for details on how this script is used.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import argparse
+import math
+import os
+import re
+import sys
+import traceback
+
+# Stuff from test generator
+import test_generator as tg
+from test_generator import ActivationConverter
+from test_generator import BoolScalar
+from test_generator import Configuration
+from test_generator import DataTypeConverter
+from test_generator import DataLayoutConverter
+from test_generator import Example
+from test_generator import Float16Scalar
+from test_generator import Float32Scalar
+from test_generator import Float32Vector
+from test_generator import GetJointStr
+from test_generator import IgnoredOutput
+from test_generator import Input
+from test_generator import Int32Scalar
+from test_generator import Int32Vector
+from test_generator import Internal
+from test_generator import Model
+from test_generator import Operand
+from test_generator import Output
+from test_generator import Parameter
+from test_generator import ParameterAsInputConverter
+from test_generator import RelaxedModeConverter
+from test_generator import SmartOpen
+from test_generator import SymmPerChannelQuantParams
+
+def IndentedPrint(s, indent=2, *args, **kwargs):
+ print('\n'.join([" " * indent + i for i in s.split('\n')]), *args, **kwargs)
+
+# Take a model from command line
+def ParseCmdLine():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("spec", help="the spec file/directory")
+ parser.add_argument(
+ "-m", "--model", help="the output model file/directory", default="-")
+ parser.add_argument(
+ "-e", "--example", help="the output example file/directory", default="-")
+ parser.add_argument(
+ "-t", "--test", help="the output test file/directory", default="-")
+ parser.add_argument(
+ "-f", "--force", help="force to regenerate all spec files", action="store_true")
+ args = parser.parse_args()
+ tg.FileNames.InitializeFileLists(args.spec, args.model, args.example, args.test)
+ Configuration.force_regenerate = args.force
+
+def NeedRegenerate():
+ if not all(os.path.exists(f) for f in \
+ [tg.FileNames.modelFile, tg.FileNames.exampleFile, tg.FileNames.testFile]):
+ return True
+ specTime = os.path.getmtime(tg.FileNames.specFile) + 10
+ modelTime = os.path.getmtime(tg.FileNames.modelFile)
+ exampleTime = os.path.getmtime(tg.FileNames.exampleFile)
+ testTime = os.path.getmtime(tg.FileNames.testFile)
+ if all(t > specTime for t in [modelTime, exampleTime, testTime]):
+ return False
+ return True
+
+# Write headers for generated files, which are boilerplate codes only related to filenames
+def InitializeFiles(model_fd, example_fd, test_fd):
+ specFileBase = os.path.basename(tg.FileNames.specFile)
+ fileHeader = """\
+// Generated from {spec_file}
+// DO NOT EDIT
+// clang-format off
+#include "{header}"
+"""
+ print(fileHeader.format(spec_file=specFileBase, header="TestGenerated.h"), file=test_fd)
+ print(fileHeader.format(spec_file=specFileBase, header="TestGenerated.h"), file=model_fd)
+ print(fileHeader.format(spec_file=specFileBase, header="TestHarness.h"), file=example_fd)
+
+# Dump is_ignored function for IgnoredOutput
+def DumpCtsIsIgnored(model, model_fd):
+ isIgnoredTemplate = """\
+bool {is_ignored_name}(int i) {{
+ static std::set<int> ignore = {{{ignored_index}}};
+ return ignore.find(i) != ignore.end();
+}}
+"""
+ print(isIgnoredTemplate.format(
+ ignored_index=tg.GetJointStr(model.GetIgnoredOutputs(), method=lambda x: str(x.index)),
+ is_ignored_name=str(model.isIgnoredFunctionName)), file=model_fd)
+
+# Dump Model file for Cts tests
+def DumpCtsModel(model, model_fd):
+ assert model.compiled
+ if model.dumped:
+ return
+ namespace = "generated_tests::{spec_name}".format(spec_name=tg.FileNames.specName)
+ print("namespace {namespace} {{\n".format(namespace=namespace), file=model_fd)
+ print("void %s(Model *model) {"%(model.createFunctionName), file=model_fd)
+
+ # Phase 0: types
+ for t in model.GetTypes():
+ if t.scale == 0.0 and t.zeroPoint == 0 and t.extraParams is None:
+ typeDef = "OperandType %s(Type::%s, %s);"%(t, t.type, t.GetDimensionsString())
+ else:
+ if t.extraParams is None or t.extraParams.hide:
+ typeDef = "OperandType %s(Type::%s, %s, %s, %d);"%(
+ t, t.type, t.GetDimensionsString(), tg.PrettyPrintAsFloat(t.scale), t.zeroPoint)
+ else:
+ assert t.type == "TENSOR_QUANT8_SYMM_PER_CHANNEL", "Unexpected model configuration. " \
+ "Extra params are currently expected for " \
+ "TENSOR_QUANT8_SYMM_PER_CHANNEL operand type. "
+ assert t.scale == 0.0 and t.zeroPoint == 0, "Scale and zero point are always zero for " \
+ "TENSOR_QUANT8_SYMM_PER_CHANNEL operands"
+ typeDef = "OperandType %s(Type::%s, %s, %s);"%(
+ t, t.type, t.GetDimensionsString(), t.extraParams.GetConstructor())
+
+ IndentedPrint(typeDef, file=model_fd)
+
+ # Phase 1: add operands
+ print(" // Phase 1, operands", file=model_fd)
+ for op in model.operands:
+ IndentedPrint("auto %s = model->addOperand(&%s);"%(op, op.type), file=model_fd)
+
+ # Phase 2: operations
+ print(" // Phase 2, operations", file=model_fd)
+ for p in model.GetParameters():
+ paramDef = "static %s %s[] = %s;\nmodel->setOperandValue(%s, %s, sizeof(%s) * %d);"%(
+ p.type.GetCppTypeString(), p.initializer, p.GetListInitialization(), p,
+ p.initializer, p.type.GetCppTypeString(), p.type.GetNumberOfElements())
+ IndentedPrint(paramDef, file=model_fd)
+ for op in model.operations:
+ IndentedPrint("model->addOperation(ANEURALNETWORKS_%s, {%s}, {%s});"%(
+ op.optype, tg.GetJointStr(op.ins), tg.GetJointStr(op.outs)), file=model_fd)
+
+ # Phase 3: add inputs and outputs
+ print (" // Phase 3, inputs and outputs", file=model_fd)
+ IndentedPrint("model->identifyInputsAndOutputs(\n {%s},\n {%s});"%(
+ tg.GetJointStr(model.GetInputs()), tg.GetJointStr(model.GetOutputs())), file=model_fd)
+
+ # Phase 4: set relaxed execution if needed
+ if (model.isRelaxed):
+ print (" // Phase 4: set relaxed execution", file=model_fd)
+ print (" model->relaxComputationFloat32toFloat16(true);", file=model_fd)
+
+ print (" assert(model->isValid());", file=model_fd)
+ print ("}\n", file=model_fd)
+ DumpCtsIsIgnored(model, model_fd)
+ print("}} // namespace {namespace}".format(namespace=namespace), file=model_fd)
+ model.dumped = True
+
+def DumpMixedType(operands, feedDict):
+ supportedTensors = [
+ "DIMENSIONS",
+ "TENSOR_FLOAT32",
+ "TENSOR_INT32",
+ "TENSOR_QUANT8_ASYMM",
+ "TENSOR_OEM_BYTE",
+ "TENSOR_QUANT16_SYMM",
+ "TENSOR_FLOAT16",
+ "TENSOR_BOOL8",
+ "TENSOR_QUANT8_SYMM_PER_CHANNEL",
+ "TENSOR_QUANT16_ASYMM",
+ "TENSOR_QUANT8_SYMM",
+ ]
+ typedMap = {t: [] for t in supportedTensors}
+ FeedAndGet = lambda op, d: op.Feed(d).GetListInitialization()
+ # group the operands by type
+ for operand in operands:
+ try:
+ typedMap[operand.type.type].append(FeedAndGet(operand, feedDict))
+ typedMap["DIMENSIONS"].append("{%d, {%s}}"%(
+ operand.index, GetJointStr(operand.dimensions)))
+ except KeyError as e:
+ traceback.print_exc()
+ sys.exit("Cannot dump tensor of type {}".format(operand.type.type))
+ mixedTypeTemplate = """\
+{{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> Dimensions map
+ .operandDimensions = {{{dimensions_map}}},
+ // int -> FLOAT32 map
+ .float32Operands = {{{float32_map}}},
+ // int -> INT32 map
+ .int32Operands = {{{int32_map}}},
+ // int -> QUANT8_ASYMM map
+ .quant8AsymmOperands = {{{uint8_map}}},
+ // int -> QUANT16_SYMM map
+ .quant16SymmOperands = {{{int16_map}}},
+ // int -> FLOAT16 map
+ .float16Operands = {{{float16_map}}},
+ // int -> BOOL8 map
+ .bool8Operands = {{{bool8_map}}},
+ // int -> QUANT8_SYMM_PER_CHANNEL map
+ .quant8ChannelOperands = {{{int8_map}}},
+ // int -> QUANT16_ASYMM map
+ .quant16AsymmOperands = {{{uint16_map}}},
+ // int -> QUANT8_SYMM map
+ .quant8SymmOperands = {{{quant8_symm_map}}},
+}}"""
+ return mixedTypeTemplate.format(
+ dimensions_map=tg.GetJointStr(typedMap.get("DIMENSIONS", [])),
+ float32_map=tg.GetJointStr(typedMap.get("TENSOR_FLOAT32", [])),
+ int32_map=tg.GetJointStr(typedMap.get("TENSOR_INT32", [])),
+ uint8_map=tg.GetJointStr(typedMap.get("TENSOR_QUANT8_ASYMM", []) +
+ typedMap.get("TENSOR_OEM_BYTE", [])),
+ int16_map=tg.GetJointStr(typedMap.get("TENSOR_QUANT16_SYMM", [])),
+ float16_map=tg.GetJointStr(typedMap.get("TENSOR_FLOAT16", [])),
+ int8_map=tg.GetJointStr(typedMap.get("TENSOR_QUANT8_SYMM_PER_CHANNEL", [])),
+ bool8_map=tg.GetJointStr(typedMap.get("TENSOR_BOOL8", [])),
+ uint16_map=tg.GetJointStr(typedMap.get("TENSOR_QUANT16_ASYMM", [])),
+ quant8_symm_map=tg.GetJointStr(typedMap.get("TENSOR_QUANT8_SYMM", []))
+ )
+
+# Dump Example file for Cts tests
+def DumpCtsExample(example, example_fd):
+ namespace = "generated_tests::{spec_name}".format(spec_name=tg.FileNames.specName)
+ print("namespace {namespace} {{\n".format(namespace=namespace), file=example_fd)
+ print("std::vector<::test_helper::MixedTypedExample>& get_%s() {" % (example.examplesName), file=example_fd)
+ print("static std::vector<::test_helper::MixedTypedExample> %s = {" % (example.examplesName), file=example_fd)
+ for inputFeedDict, outputFeedDict in example.feedDicts:
+ print ('// Begin of an example', file = example_fd)
+ print ('{\n.operands = {', file = example_fd)
+ inputs = DumpMixedType(example.model.GetInputs(), inputFeedDict)
+ outputs = DumpMixedType(example.model.GetOutputs(), outputFeedDict)
+ print ('//Input(s)\n%s,' % inputs , file = example_fd)
+ print ('//Output(s)\n%s' % outputs, file = example_fd)
+ print ('},', file = example_fd)
+ if example.expectedMultinomialDistributionTolerance is not None:
+ print ('.expectedMultinomialDistributionTolerance = %f' %
+ example.expectedMultinomialDistributionTolerance, file = example_fd)
+ print ('}, // End of an example', file = example_fd)
+ print("};", file=example_fd)
+ print("return %s;" % (example.examplesName), file=example_fd)
+ print("};", file=example_fd)
+ print("\n}} // namespace {namespace}".format(namespace=namespace), file=example_fd)
+
+# Dump Test file for Cts tests
+def DumpCtsTest(example, test_fd):
+ namespace = "generated_tests::{spec_name}".format(spec_name=tg.FileNames.specName)
+ testTemplate = """\
+namespace {namespace} {{
+
+void {create_model_name}(Model *model);
+bool {is_ignored_name}(int);
+std::vector<::test_helper::MixedTypedExample>& get_{examples_name}();
+
+TEST_F({test_case_name}, {test_name}) {{
+ execute({create_model_name},
+ {is_ignored_name},
+ get_{examples_name}());
+}}
+
+}} // namespace {namespace}
+"""
+ if example.model.version is not None and not example.expectFailure:
+ testTemplate += """\
+TEST_AVAILABLE_SINCE({version}, {test_name}, {namespace}::{create_model_name})\n"""
+
+ if example.expectFailure:
+ testCaseName = "GeneratedValidationTests"
+ elif example.model.hasDynamicOutputShape:
+ testCaseName = "DynamicOutputShapeTest"
+ else:
+ testCaseName = "GeneratedTests"
+
+ print(testTemplate.format(
+ test_case_name=testCaseName,
+ test_name=str(example.testName),
+ namespace=namespace,
+ create_model_name=str(example.model.createFunctionName),
+ is_ignored_name=str(example.model.isIgnoredFunctionName),
+ examples_name=str(example.examplesName),
+ version=example.model.version), file=test_fd)
+
+if __name__ == '__main__':
+ ParseCmdLine()
+ while tg.FileNames.NextFile():
+ if Configuration.force_regenerate or NeedRegenerate():
+ print("Generating CTS tests from spec %s" % tg.FileNames.specFile, file=sys.stderr)
+ exec(open(tg.FileNames.specFile, "r").read())
+ with SmartOpen(tg.FileNames.modelFile) as model_fd, \
+ SmartOpen(tg.FileNames.exampleFile) as example_fd, \
+ SmartOpen(tg.FileNames.testFile) as test_fd:
+ InitializeFiles(model_fd, example_fd, test_fd)
+ Example.DumpAllExamples(
+ DumpModel=DumpCtsModel, model_fd=model_fd,
+ DumpExample=DumpCtsExample, example_fd=example_fd,
+ DumpTest=DumpCtsTest, test_fd=test_fd)
+ else:
+ print("Skip file: %s" % tg.FileNames.specFile, file=sys.stderr)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/include/TestHarness.h b/tests/nnapi/nnapi_test_generator/android-q-beta/include/TestHarness.h
new file mode 100644
index 000000000..3b4b26b16
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/include/TestHarness.h
@@ -0,0 +1,426 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Header-only library for various helpers of test harness
+ * See frameworks/ml/nn/runtime/test/TestGenerated.cpp for how this is used.
+ */
+#ifndef ANDROID_FRAMEWORKS_ML_NN_TOOLS_TEST_GENERATOR_TEST_HARNESS_H
+#define ANDROID_FRAMEWORKS_ML_NN_TOOLS_TEST_GENERATOR_TEST_HARNESS_H
+
+#include <gmock/gmock-matchers.h>
+#include <gtest/gtest.h>
+
+#include <cmath>
+#include <functional>
+#include <map>
+#include <tuple>
+#include <vector>
+
+namespace test_helper {
+
+constexpr const size_t gMaximumNumberOfErrorMessages = 10;
+
+// TODO: Figure out the build dependency to make including "CpuOperationUtils.h" work.
+inline void convertFloat16ToFloat32(const _Float16* input, std::vector<float>* output) {
+ for (size_t i = 0; i < output->size(); ++i) {
+ (*output)[i] = static_cast<float>(input[i]);
+ }
+}
+
+// This class is a workaround for two issues our code relies on:
+// 1. sizeof(bool) is implementation defined.
+// 2. vector<bool> does not allow direct pointer access via the data() method.
+class bool8 {
+ public:
+ bool8() : mValue() {}
+ /* implicit */ bool8(bool value) : mValue(value) {}
+ inline operator bool() const { return mValue != 0; }
+
+ private:
+ uint8_t mValue;
+};
+
+static_assert(sizeof(bool8) == 1, "size of bool8 must be 8 bits");
+
+typedef std::map<int, std::vector<uint32_t>> OperandDimensions;
+typedef std::map<int, std::vector<float>> Float32Operands;
+typedef std::map<int, std::vector<int32_t>> Int32Operands;
+typedef std::map<int, std::vector<uint8_t>> Quant8AsymmOperands;
+typedef std::map<int, std::vector<int16_t>> Quant16SymmOperands;
+typedef std::map<int, std::vector<_Float16>> Float16Operands;
+typedef std::map<int, std::vector<bool8>> Bool8Operands;
+typedef std::map<int, std::vector<int8_t>> Quant8ChannelOperands;
+typedef std::map<int, std::vector<uint16_t>> Quant16AsymmOperands;
+typedef std::map<int, std::vector<int8_t>> Quant8SymmOperands;
+struct MixedTyped {
+ static constexpr size_t kNumTypes = 9;
+ OperandDimensions operandDimensions;
+ Float32Operands float32Operands;
+ Int32Operands int32Operands;
+ Quant8AsymmOperands quant8AsymmOperands;
+ Quant16SymmOperands quant16SymmOperands;
+ Float16Operands float16Operands;
+ Bool8Operands bool8Operands;
+ Quant8ChannelOperands quant8ChannelOperands;
+ Quant16AsymmOperands quant16AsymmOperands;
+ Quant8SymmOperands quant8SymmOperands;
+};
+typedef std::pair<MixedTyped, MixedTyped> MixedTypedExampleType;
+
+// Mixed-typed examples
+typedef struct {
+ MixedTypedExampleType operands;
+ // Specifies the RANDOM_MULTINOMIAL distribution tolerance.
+ // If set to greater than zero, the input is compared as log-probabilities
+ // to the output and must be within this tolerance to pass.
+ float expectedMultinomialDistributionTolerance = 0.0;
+} MixedTypedExample;
+
+// Go through all index-value pairs of a given input type
+template <typename T>
+inline void for_each(const std::map<int, std::vector<T>>& idx_and_data,
+ std::function<void(int, const std::vector<T>&)> execute) {
+ for (auto& i : idx_and_data) {
+ execute(i.first, i.second);
+ }
+}
+
+// non-const variant of for_each
+template <typename T>
+inline void for_each(std::map<int, std::vector<T>>& idx_and_data,
+ std::function<void(int, std::vector<T>&)> execute) {
+ for (auto& i : idx_and_data) {
+ execute(i.first, i.second);
+ }
+}
+
+// Go through all index-value pairs of a given input type
+template <typename T>
+inline void for_each(const std::map<int, std::vector<T>>& golden,
+ std::map<int, std::vector<T>>& test,
+ std::function<void(int, const std::vector<T>&, std::vector<T>&)> execute) {
+ for_each<T>(golden, [&test, &execute](int index, const std::vector<T>& g) {
+ auto& t = test[index];
+ execute(index, g, t);
+ });
+}
+
+// Go through all index-value pairs of a given input type
+template <typename T>
+inline void for_each(
+ const std::map<int, std::vector<T>>& golden, const std::map<int, std::vector<T>>& test,
+ std::function<void(int, const std::vector<T>&, const std::vector<T>&)> execute) {
+ for_each<T>(golden, [&test, &execute](int index, const std::vector<T>& g) {
+ auto t = test.find(index);
+ ASSERT_NE(t, test.end());
+ execute(index, g, t->second);
+ });
+}
+
+// internal helper for for_all
+template <typename T>
+inline void for_all_internal(std::map<int, std::vector<T>>& idx_and_data,
+ std::function<void(int, void*, size_t)> execute_this) {
+ for_each<T>(idx_and_data, [&execute_this](int idx, std::vector<T>& m) {
+ execute_this(idx, static_cast<void*>(m.data()), m.size() * sizeof(T));
+ });
+}
+
+// Go through all index-value pairs of all input types
+// expects a functor that takes (int index, void *raw data, size_t sz)
+inline void for_all(MixedTyped& idx_and_data,
+ std::function<void(int, void*, size_t)> execute_this) {
+ for_all_internal(idx_and_data.float32Operands, execute_this);
+ for_all_internal(idx_and_data.int32Operands, execute_this);
+ for_all_internal(idx_and_data.quant8AsymmOperands, execute_this);
+ for_all_internal(idx_and_data.quant16SymmOperands, execute_this);
+ for_all_internal(idx_and_data.float16Operands, execute_this);
+ for_all_internal(idx_and_data.bool8Operands, execute_this);
+ for_all_internal(idx_and_data.quant8ChannelOperands, execute_this);
+ for_all_internal(idx_and_data.quant16AsymmOperands, execute_this);
+ for_all_internal(idx_and_data.quant8SymmOperands, execute_this);
+ static_assert(9 == MixedTyped::kNumTypes,
+ "Number of types in MixedTyped changed, but for_all function wasn't updated");
+}
+
+// Const variant of internal helper for for_all
+template <typename T>
+inline void for_all_internal(const std::map<int, std::vector<T>>& idx_and_data,
+ std::function<void(int, const void*, size_t)> execute_this) {
+ for_each<T>(idx_and_data, [&execute_this](int idx, const std::vector<T>& m) {
+ execute_this(idx, static_cast<const void*>(m.data()), m.size() * sizeof(T));
+ });
+}
+
+// Go through all index-value pairs (const variant)
+// expects a functor that takes (int index, const void *raw data, size_t sz)
+inline void for_all(const MixedTyped& idx_and_data,
+ std::function<void(int, const void*, size_t)> execute_this) {
+ for_all_internal(idx_and_data.float32Operands, execute_this);
+ for_all_internal(idx_and_data.int32Operands, execute_this);
+ for_all_internal(idx_and_data.quant8AsymmOperands, execute_this);
+ for_all_internal(idx_and_data.quant16SymmOperands, execute_this);
+ for_all_internal(idx_and_data.float16Operands, execute_this);
+ for_all_internal(idx_and_data.bool8Operands, execute_this);
+ for_all_internal(idx_and_data.quant8ChannelOperands, execute_this);
+ for_all_internal(idx_and_data.quant16AsymmOperands, execute_this);
+ for_all_internal(idx_and_data.quant8SymmOperands, execute_this);
+ static_assert(
+ 9 == MixedTyped::kNumTypes,
+ "Number of types in MixedTyped changed, but const for_all function wasn't updated");
+}
+
+// Helper template - resize test output per golden
+template <typename T>
+inline void resize_accordingly_(const std::map<int, std::vector<T>>& golden,
+ std::map<int, std::vector<T>>& test) {
+ for_each<T>(golden, test,
+ [](int, const std::vector<T>& g, std::vector<T>& t) { t.resize(g.size()); });
+}
+
+template <>
+inline void resize_accordingly_<uint32_t>(const OperandDimensions& golden,
+ OperandDimensions& test) {
+ for_each<uint32_t>(
+ golden, test,
+ [](int, const std::vector<uint32_t>& g, std::vector<uint32_t>& t) { t = g; });
+}
+
+inline void resize_accordingly(const MixedTyped& golden, MixedTyped& test) {
+ resize_accordingly_(golden.operandDimensions, test.operandDimensions);
+ resize_accordingly_(golden.float32Operands, test.float32Operands);
+ resize_accordingly_(golden.int32Operands, test.int32Operands);
+ resize_accordingly_(golden.quant8AsymmOperands, test.quant8AsymmOperands);
+ resize_accordingly_(golden.quant16SymmOperands, test.quant16SymmOperands);
+ resize_accordingly_(golden.float16Operands, test.float16Operands);
+ resize_accordingly_(golden.bool8Operands, test.bool8Operands);
+ resize_accordingly_(golden.quant8ChannelOperands, test.quant8ChannelOperands);
+ resize_accordingly_(golden.quant16AsymmOperands, test.quant16AsymmOperands);
+ resize_accordingly_(golden.quant8SymmOperands, test.quant8SymmOperands);
+ static_assert(9 == MixedTyped::kNumTypes,
+ "Number of types in MixedTyped changed, but resize_accordingly function wasn't "
+ "updated");
+}
+
+template <typename T>
+void filter_internal(const std::map<int, std::vector<T>>& golden,
+ std::map<int, std::vector<T>>* filtered, std::function<bool(int)> is_ignored) {
+ for_each<T>(golden, [filtered, &is_ignored](int index, const std::vector<T>& m) {
+ auto& g = *filtered;
+ if (!is_ignored(index)) g[index] = m;
+ });
+}
+
+inline MixedTyped filter(const MixedTyped& golden, std::function<bool(int)> is_ignored) {
+ MixedTyped filtered;
+ filter_internal(golden.operandDimensions, &filtered.operandDimensions, is_ignored);
+ filter_internal(golden.float32Operands, &filtered.float32Operands, is_ignored);
+ filter_internal(golden.int32Operands, &filtered.int32Operands, is_ignored);
+ filter_internal(golden.quant8AsymmOperands, &filtered.quant8AsymmOperands, is_ignored);
+ filter_internal(golden.quant16SymmOperands, &filtered.quant16SymmOperands, is_ignored);
+ filter_internal(golden.float16Operands, &filtered.float16Operands, is_ignored);
+ filter_internal(golden.bool8Operands, &filtered.bool8Operands, is_ignored);
+ filter_internal(golden.quant8ChannelOperands, &filtered.quant8ChannelOperands, is_ignored);
+ filter_internal(golden.quant16AsymmOperands, &filtered.quant16AsymmOperands, is_ignored);
+ filter_internal(golden.quant8SymmOperands, &filtered.quant8SymmOperands, is_ignored);
+ static_assert(9 == MixedTyped::kNumTypes,
+ "Number of types in MixedTyped changed, but compare function wasn't updated");
+ return filtered;
+}
+
+// Compare results
+template <typename T>
+void compare_(const std::map<int, std::vector<T>>& golden,
+ const std::map<int, std::vector<T>>& test, std::function<void(T, T)> cmp) {
+ for_each<T>(golden, test, [&cmp](int index, const std::vector<T>& g, const std::vector<T>& t) {
+ for (unsigned int i = 0; i < g.size(); i++) {
+ SCOPED_TRACE(testing::Message()
+ << "When comparing output " << index << " element " << i);
+ cmp(g[i], t[i]);
+ }
+ });
+}
+
+// TODO: Allow passing accuracy criteria from spec.
+// Currently we only need relaxed accuracy criteria on mobilenet tests, so we return the quant8
+// tolerance simply based on the current test name.
+inline int getQuant8AllowedError() {
+ const ::testing::TestInfo* const testInfo =
+ ::testing::UnitTest::GetInstance()->current_test_info();
+ const std::string testCaseName = testInfo->test_case_name();
+ const std::string testName = testInfo->name();
+ // We relax the quant8 precision for all tests with mobilenet:
+ // - CTS/VTS GeneratedTest and DynamicOutputShapeTest with mobilenet
+ // - VTS CompilationCachingTest and CompilationCachingSecurityTest except for TOCTOU tests
+ if (testName.find("mobilenet") != std::string::npos ||
+ (testCaseName.find("CompilationCaching") != std::string::npos &&
+ testName.find("TOCTOU") == std::string::npos)) {
+ return 2;
+ } else {
+ return 1;
+ }
+}
+
+inline void compare(const MixedTyped& golden, const MixedTyped& test, float fpAtol = 1e-5f,
+ float fpRtol = 1e-5f) {
+ int quant8AllowedError = getQuant8AllowedError();
+ for_each<uint32_t>(
+ golden.operandDimensions, test.operandDimensions,
+ [](int index, const std::vector<uint32_t>& g, const std::vector<uint32_t>& t) {
+ SCOPED_TRACE(testing::Message()
+ << "When comparing dimensions for output " << index);
+ EXPECT_EQ(g, t);
+ });
+ size_t totalNumberOfErrors = 0;
+ compare_<float>(golden.float32Operands, test.float32Operands,
+ [&totalNumberOfErrors, fpAtol, fpRtol](float expected, float actual) {
+ // Compute the range based on both absolute tolerance and relative tolerance
+ float fpRange = fpAtol + fpRtol * std::abs(expected);
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, fpRange);
+ }
+ if (std::abs(expected - actual) > fpRange) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<int32_t>(golden.int32Operands, test.int32Operands,
+ [&totalNumberOfErrors](int32_t expected, int32_t actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_EQ(expected, actual);
+ }
+ if (expected != actual) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<uint8_t>(golden.quant8AsymmOperands, test.quant8AsymmOperands,
+ [&totalNumberOfErrors, quant8AllowedError](uint8_t expected, uint8_t actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, quant8AllowedError);
+ }
+ if (std::abs(expected - actual) > quant8AllowedError) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<int16_t>(golden.quant16SymmOperands, test.quant16SymmOperands,
+ [&totalNumberOfErrors](int16_t expected, int16_t actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, 1);
+ }
+ if (std::abs(expected - actual) > 1) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<_Float16>(golden.float16Operands, test.float16Operands,
+ [&totalNumberOfErrors, fpAtol, fpRtol](_Float16 expected, _Float16 actual) {
+ // Compute the range based on both absolute tolerance and relative
+ // tolerance
+ float fpRange = fpAtol + fpRtol * std::abs(static_cast<float>(expected));
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, fpRange);
+ }
+ if (std::abs(static_cast<float>(expected - actual)) > fpRange) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<bool8>(golden.bool8Operands, test.bool8Operands,
+ [&totalNumberOfErrors](bool expected, bool actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_EQ(expected, actual);
+ }
+ if (expected != actual) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<int8_t>(golden.quant8ChannelOperands, test.quant8ChannelOperands,
+ [&totalNumberOfErrors, &quant8AllowedError](int8_t expected, int8_t actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, quant8AllowedError);
+ }
+ if (std::abs(static_cast<int>(expected) - static_cast<int>(actual)) >
+ quant8AllowedError) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<uint16_t>(golden.quant16AsymmOperands, test.quant16AsymmOperands,
+ [&totalNumberOfErrors](int16_t expected, int16_t actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, 1);
+ }
+ if (std::abs(expected - actual) > 1) {
+ totalNumberOfErrors++;
+ }
+ });
+ compare_<int8_t>(golden.quant8SymmOperands, test.quant8SymmOperands,
+ [&totalNumberOfErrors, quant8AllowedError](int8_t expected, int8_t actual) {
+ if (totalNumberOfErrors < gMaximumNumberOfErrorMessages) {
+ EXPECT_NEAR(expected, actual, quant8AllowedError);
+ }
+ if (std::abs(static_cast<int>(expected) - static_cast<int>(actual)) >
+ quant8AllowedError) {
+ totalNumberOfErrors++;
+ }
+ });
+
+ static_assert(9 == MixedTyped::kNumTypes,
+ "Number of types in MixedTyped changed, but compare function wasn't updated");
+ EXPECT_EQ(size_t{0}, totalNumberOfErrors);
+}
+
+// Calculates the expected probability from the unnormalized log-probability of
+// each class in the input and compares it to the actual ocurrence of that class
+// in the output.
+inline void expectMultinomialDistributionWithinTolerance(const MixedTyped& test,
+ const MixedTypedExample& example) {
+ // TODO: These should be parameters but aren't currently preserved in the example.
+ const int kBatchSize = 1;
+ const int kNumClasses = 1024;
+ const int kNumSamples = 128;
+
+ std::vector<int32_t> output = test.int32Operands.at(0);
+ std::vector<int> class_counts;
+ class_counts.resize(kNumClasses);
+ for (int index : output) {
+ class_counts[index]++;
+ }
+ std::vector<float> input;
+ Float32Operands float32Operands = example.operands.first.float32Operands;
+ if (!float32Operands.empty()) {
+ input = example.operands.first.float32Operands.at(0);
+ } else {
+ std::vector<_Float16> inputFloat16 = example.operands.first.float16Operands.at(0);
+ input.resize(inputFloat16.size());
+ convertFloat16ToFloat32(inputFloat16.data(), &input);
+ }
+ for (int b = 0; b < kBatchSize; ++b) {
+ float probability_sum = 0;
+ const int batch_index = kBatchSize * b;
+ for (int i = 0; i < kNumClasses; ++i) {
+ probability_sum += expf(input[batch_index + i]);
+ }
+ for (int i = 0; i < kNumClasses; ++i) {
+ float probability =
+ static_cast<float>(class_counts[i]) / static_cast<float>(kNumSamples);
+ float probability_expected = expf(input[batch_index + i]) / probability_sum;
+ EXPECT_THAT(probability,
+ ::testing::FloatNear(probability_expected,
+ example.expectedMultinomialDistributionTolerance));
+ }
+ }
+}
+
+}; // namespace test_helper
+
+#endif // ANDROID_FRAMEWORKS_ML_NN_TOOLS_TEST_GENERATOR_TEST_HARNESS_H
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/spec_visualizer.py b/tests/nnapi/nnapi_test_generator/android-q-beta/spec_visualizer.py
new file mode 100644
index 000000000..73a9628b7
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/spec_visualizer.py
@@ -0,0 +1,266 @@
+#!/usr/bin/python3
+
+# Copyright 2019, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Spec Visualizer
+
+Visualize python spec file for test generator.
+Invoked by ml/nn/runtime/test/specs/visualize_spec.sh;
+See that script for details on how this script is used.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import argparse
+import json
+import os
+import sys
+from string import Template
+
+# Stuff from test generator
+import test_generator as tg
+from test_generator import ActivationConverter
+from test_generator import BoolScalar
+from test_generator import Configuration
+from test_generator import DataTypeConverter
+from test_generator import DataLayoutConverter
+from test_generator import Example
+from test_generator import Float16Scalar
+from test_generator import Float32Scalar
+from test_generator import Float32Vector
+from test_generator import GetJointStr
+from test_generator import IgnoredOutput
+from test_generator import Input
+from test_generator import Int32Scalar
+from test_generator import Int32Vector
+from test_generator import Internal
+from test_generator import Model
+from test_generator import Operand
+from test_generator import Output
+from test_generator import Parameter
+from test_generator import ParameterAsInputConverter
+from test_generator import RelaxedModeConverter
+from test_generator import SymmPerChannelQuantParams
+
+
+TEMPLATE_FILE = os.path.join(os.path.dirname(os.path.realpath(__file__)), "spec_viz_template.html")
+global_graphs = dict()
+
+
+def FormatArray(data, is_scalar=False):
+ if is_scalar:
+ assert len(data) == 1
+ return str(data[0])
+ else:
+ return "[%s]" % (", ".join(str(i) for i in data))
+
+
+def FormatDict(data):
+ return "<br/>".join("<b>%s:</b> %s"%(k.capitalize(), v) for k, v in data.items())
+
+
+def GetOperandInfo(op):
+ op_info = {"lifetime": op.lifetime, "type": op.type.type}
+
+ if not op.type.IsScalar():
+ op_info["dimensions"] = FormatArray(op.type.dimensions)
+
+ if op.type.scale != 0:
+ op_info["scale"] = op.type.scale
+ op_info["zero point"] = op.type.zeroPoint
+ if op.type.type == "TENSOR_QUANT8_SYMM_PER_CHANNEL":
+ op_info["scale"] = FormatArray(op.type.extraParams.scales)
+ op_info["channel dim"] = op.type.extraParams.channelDim
+
+ return op_info
+
+
+def FormatOperand(op):
+ # All keys and values in op_info will appear in the tooltip. We only display the operand data
+ # if the length is less than 10. This should be convenient enough for most parameters.
+ op_info = GetOperandInfo(op)
+ if isinstance(op, Parameter) and len(op.value) <= 10:
+ op_info["data"] = FormatArray(op.value, op.type.IsScalar())
+
+ template = "<span class='tooltip'><span class='tooltipcontent'>{tooltip_content}</span><a href=\"{inpage_link}\">{op_name}</a></span>"
+ return template.format(
+ op_name=str(op),
+ tooltip_content=FormatDict(op_info),
+ inpage_link="#details-operands-%d" % (op.model_index),
+ )
+
+
+def GetSubgraph(example):
+ """Produces the nodes and edges information for d3 visualization."""
+
+ node_index_map = {}
+ topological_order = []
+
+ def AddToTopologicalOrder(op):
+ if op not in node_index_map:
+ node_index_map[op] = len(topological_order)
+ topological_order.append(op)
+
+ # Get the topological order, both operands and operations are treated the same.
+ # Given that the example.model.operations is already topologically sorted, here we simply
+ # iterate through and insert inputs and outputs.
+ for op in example.model.operations:
+ for i in op.ins:
+ AddToTopologicalOrder(i)
+ AddToTopologicalOrder(op)
+ for o in op.outs:
+ AddToTopologicalOrder(o)
+
+ # Assign layers to the nodes.
+ layers = {}
+ for node in topological_order:
+ layers[node] = max([layers[i] for i in node.ins], default=-1) + 1
+ for node in reversed(topological_order):
+ layers[node] = min([layers[o] for o in node.outs], default=layers[node]+1) - 1
+ num_layers = max(layers.values()) + 1
+
+ # Assign coordinates to the nodes. Nodes are equally spaced.
+ CoordX = lambda index: (index + 0.5) * 200 # 200px spacing horizontally
+ CoordY = lambda index: (index + 0.5) * 100 # 100px spacing vertically
+ coords = {}
+ layer_cnt = [0] * num_layers
+ for node in topological_order:
+ coords[node] = (CoordX(layer_cnt[layers[node]]), CoordY(layers[node]))
+ layer_cnt[layers[node]] += 1
+
+ # Create edges and nodes dictionaries for d3 visualization.
+ OpName = lambda idx: "operation%d" % idx
+ edges = []
+ nodes = []
+ for ind, op in enumerate(example.model.operations):
+ for tensor in op.ins:
+ edges.append({
+ "source": str(tensor),
+ "target": OpName(ind)
+ })
+ for tensor in op.outs:
+ edges.append({
+ "target": str(tensor),
+ "source": OpName(ind)
+ })
+ nodes.append({
+ "index": ind,
+ "id": OpName(ind),
+ "name": op.optype,
+ "group": 2,
+ "x": coords[op][0],
+ "y": coords[op][1],
+ })
+
+ for ind, op in enumerate(example.model.operands):
+ nodes.append({
+ "index": ind,
+ "id": str(op),
+ "name": str(op),
+ "group": 1,
+ "x": coords[op][0],
+ "y": coords[op][1],
+ })
+
+ return {"nodes": nodes, "edges": edges}
+
+
+# The following Get**Info methods will each return a list of dictionaries,
+# whose content will appear in the tables and sidebar views.
+def GetConfigurationsInfo(example):
+ return [{
+ "relaxed": str(example.model.isRelaxed),
+ "use shared memory": str(tg.Configuration.useSHM()),
+ "expect failure": str(example.expectFailure),
+ }]
+
+
+def GetOperandsInfo(example):
+ ret = []
+ for index, op in enumerate(example.model.operands):
+ ret.append({
+ "index": index,
+ "name": str(op),
+ "group": "operand"
+ })
+ ret[-1].update(GetOperandInfo(op))
+ if isinstance(op, Parameter):
+ ret[-1]["data"] = FormatArray(op.value, op.type.IsScalar())
+ elif isinstance(op, Input):
+ ret[-1]["data"] = FormatArray(example.feedDicts[0][0][op], op.type.IsScalar())
+ elif isinstance(op, Output) and not isinstance(op, IgnoredOutput):
+ ret[-1]["data"] = FormatArray(example.feedDicts[0][1][op], op.type.IsScalar())
+ return ret
+
+
+def GetOperationsInfo(example):
+ return [{
+ "index": index,
+ "name": op.optype,
+ "group": "operation",
+ "opcode": op.optype,
+ "inputs": ", ".join(FormatOperand(i) for i in op.ins),
+ "outputs": ", ".join(FormatOperand(o) for o in op.outs),
+ } for index,op in enumerate(example.model.operations)]
+
+
+# TODO: Remove the unused fd from the parameter.
+def ProcessExample(example, fd):
+ """Process an example and save the information into the global dictionary global_graphs."""
+
+ global global_graphs
+ print(" Processing variation %s" % example.testName)
+ global_graphs[str(example.testName)] = {
+ "subgraph": GetSubgraph(example),
+ "details": {
+ "configurations": GetConfigurationsInfo(example),
+ "operands": GetOperandsInfo(example),
+ "operations": GetOperationsInfo(example)
+ }
+ }
+
+
+def DumpHtml(spec_file, out_file):
+ """Dump the final HTML file by replacing entries from a template file."""
+
+ with open(TEMPLATE_FILE, "r") as template_fd:
+ html_template = template_fd.read()
+
+ with open(out_file, "w") as out_fd:
+ out_fd.write(Template(html_template).substitute(
+ spec_name=os.path.basename(spec_file),
+ graph_dump=json.dumps(global_graphs),
+ ))
+
+
+def ParseCmdLine():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("spec", help="the spec file")
+ parser.add_argument("-o", "--out", help="the output html path", default="out.html")
+ args = parser.parse_args()
+ tg.FileNames.InitializeFileLists(args.spec, "-", "-", "-")
+ tg.FileNames.NextFile()
+ return os.path.abspath(args.spec), os.path.abspath(args.out)
+
+
+if __name__ == '__main__':
+ spec_file, out_file = ParseCmdLine()
+ print("Visualizing from spec: %s" % spec_file)
+ exec(open(spec_file, "r").read())
+ Example.DumpAllExamples(DumpExample=ProcessExample, example_fd=0)
+ DumpHtml(spec_file, out_file)
+ print("Output HTML file: %s" % out_file)
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/spec_viz_template.html b/tests/nnapi/nnapi_test_generator/android-q-beta/spec_viz_template.html
new file mode 100644
index 000000000..befe469dc
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/spec_viz_template.html
@@ -0,0 +1,438 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>$spec_name</title>
+ <style>
+ body {
+ font-family: "Roboto", sans-serif;
+ margin: 0;
+ height: 100%;
+ background-color: rgb(61, 65, 77);
+ }
+
+ #main {
+ width: 62%;
+ transition: 0.5s;
+ }
+
+ #main h1 {
+ padding: 20px;
+ color: #eee;
+ font-size: 24px;
+ }
+
+ .subgraph h3 {
+ text-transform: capitalize;
+ }
+
+ .subgraph {
+ padding: 20px;
+ margin: 20px;
+ border-radius: 10px;
+ background-color: #fff;
+ }
+
+ .subgraph table {
+ border-collapse: collapse;
+ border-spacing: 0;
+ }
+
+ .subgraph thead {
+ background-color: rgb(61, 65, 77);
+ color: white;
+ text-transform: capitalize;
+ }
+
+ .subgraph tbody tr:nth-child(odd) {
+ background-color: #f2f2f2;
+ }
+
+ .subgraph tbody tr:hover {
+ background-color: #d8d8d8;
+ }
+
+ .subgraph td {
+ border: 1px solid #ddd;
+ padding: 8px;
+ }
+
+ .subgraph select {
+ font-weight: bold;
+ text-transform: uppercase;
+ font-size: 18px;
+ color: black;
+ }
+
+ .subgraph svg {
+ background: white;
+ border: 1px solid #ccc;
+ }
+
+ .subgraph .edges line {
+ stroke: #333;
+ }
+
+ .subgraph .nodes text {
+ color: black;
+ pointer-events: none;
+ font-family: sans-serif;
+ font-size: 11px;
+ }
+
+ #sidebar {
+ height: 100%;
+ width: 38%;
+ position: fixed;
+ z-index: 1;
+ top: 0;
+ right: 0;
+ background-color: #eee;
+ overflow-x: hidden;
+ transition: 0.5s;
+ border-left: 1px solid #ccc;
+ }
+
+ #sidebar #sidebar-main {
+ padding: 50px;
+ }
+
+ #sidebar h1 {
+ margin-top: 6px;
+ margin-bottom: 24px;
+ font-weight: bold;
+ font-size: 18px;
+ text-transform: uppercase;
+ }
+
+ #sidebar .subtitle {
+ margin-bottom: 6px;
+ border-bottom: 1px solid #ccc;
+ padding-bottom: 4px;
+ font-weight: bold;
+ font-size: 12px;
+ text-transform: uppercase;
+ color: #555;
+ }
+
+ #sidebar .property {
+ display: block;
+ margin-bottom: 16px;
+ }
+
+ #sidebar .property_title {
+ float: left;
+ width: 80px;
+ margin-top: 0;
+ padding-top: 10px;
+ font-weight: bold;
+ font-size: 12px;
+ text-transform: uppercase;
+ color: #555;
+ }
+
+ #sidebar .property_text {
+ margin-top: 8px;
+ margin-left: 100px;
+ border: 1px solid #ccc;
+ border-radius: 2px;
+ padding: 8px;
+ font-size: 14px;
+ background-color: #fff;
+ }
+
+ #sidebar .closebtn {
+ position: absolute;
+ top: 0;
+ right: 25px;
+ font-size: 36px;
+ margin-left: 50px;
+ text-decoration: none;
+ color: #555;
+ }
+
+ .tooltip {
+ color: blue;
+ }
+
+ .tooltip .tooltipcontent {
+ visibility: hidden;
+ color: black;
+ background-color: #eee;
+ margin-top: 18px;
+ padding: 5px;
+ border: 1px solid #ccc;
+ border-radius: 4px;
+ position: absolute;
+ z-index: 1;
+ }
+
+ .tooltip:hover .tooltipcontent {
+ visibility: visible;
+ }
+ </style>
+ <link href="https://fonts.googleapis.com/css?family=Roboto&display=swap" rel="stylesheet" />
+ <script src="https://d3js.org/d3.v4.min.js"></script>
+ <script>
+ graphs = $graph_dump;
+ </script>
+</head>
+
+<body>
+ <div id="main">
+ <h1>$spec_name</h1>
+ <div class="subgraph" id="main-subgraph">
+ <label for="main-selector">Choose a subgraph: </label>
+ <select id="main-selector" onchange="renderSubgraph(this.value)"></select>
+ <div id="main-tables"></div>
+ <h3>Visual Graph</h3>
+ <svg id="subgraph-svg" width="100%" height="720"></svg>
+ </div>
+ </div>
+
+ <div id="sidebar">
+ <div id="sidebar-main">
+ </div>
+ </div>
+
+ <script>
+ // Render the sidebar view of a given node object.
+ // The node must have "name" and "group" fields available.
+ function renderSidebar(node) {
+ var sidebar = document.getElementById("sidebar-main");
+ sidebar.innerHTML = "";
+ if (node == null) return;
+
+ // Sidebar subtitle -- text taken from node.group.
+ var subtitle = document.createElement("p");
+ subtitle.classList.add("subtitle");
+ subtitle.innerHTML = node.group;
+ sidebar.appendChild(subtitle);
+
+ // Sidebar title -- text taken from node.name.
+ var title = document.createElement("h1");
+ title.innerHTML = node.name;
+ sidebar.appendChild(title);
+
+ // List all the other fields in sidebar.
+ var ignoredFields = ["name", "group"];
+ for (var property in node) {
+ if (ignoredFields.includes(property)) continue;
+
+ var propertyTitle = document.createElement("h2");
+ propertyTitle.classList.add("property_title");
+ propertyTitle.innerHTML = property;
+
+ var propertyText = document.createElement("p");
+ propertyText.classList.add("property_text");
+ propertyText.innerHTML = node[property];
+
+ var propertyDiv = document.createElement("div");
+ propertyDiv.classList.add("property");
+ propertyDiv.appendChild(propertyTitle);
+ propertyDiv.appendChild(propertyText);
+ sidebar.appendChild(propertyDiv);
+ }
+ }
+
+ // Render the SVG DAG visualization, from TFLite graph visualizer.
+ // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/tools/visualize.py
+ //
+ // The node coordiates are pre-calculated from the python visualizer.
+ function renderSvg(subgraph) {
+ var data = graphs[subgraph]["subgraph"];
+ var svg = d3.select("#subgraph-svg");
+ svg.selectAll("*").remove();
+ var width = svg.attr("width");
+ var height = svg.attr("height");
+ // Make the graph scrollable.
+ svg = svg.call(d3.zoom().on("zoom", function () {
+ svg.attr("transform", d3.event.transform);
+ })).append("g");
+ var color = d3.scaleOrdinal(d3.schemeDark2);
+ var simulation = d3.forceSimulation()
+ .force("link", d3.forceLink().id(function (d) { return d.id; }))
+ .force("charge", d3.forceManyBody())
+ .force("center", d3.forceCenter(0.5 * width, 0.5 * height));
+ var edge = svg.append("g").attr("class", "edges").selectAll("line")
+ .data(data.edges).enter().append("path").attr("stroke", "black").attr("fill", "none")
+ // Make the node group
+ var node = svg.selectAll(".nodes")
+ .data(data.nodes)
+ .enter().append("g")
+ .attr("x", function (d) { return d.x })
+ .attr("y", function (d) { return d.y })
+ .attr("transform", function (d) {
+ return "translate( " + d.x + ", " + d.y + ")"
+ })
+ .attr("class", "nodes")
+ .call(d3.drag()
+ .on("start", function (d) {
+ if (!d3.event.active) simulation.alphaTarget(1.0).restart();
+ d.fx = d.x; d.fy = d.y;
+ })
+ .on("drag", function (d) {
+ d.fx = d3.event.x; d.fy = d3.event.y;
+ })
+ .on("end", function (d) {
+ if (!d3.event.active) simulation.alphaTarget(0);
+ d.fx = d.fy = null;
+ }));
+ // Within the group, draw a box for the node position and text
+ // on the side.
+ var node_width = 150;
+ var node_height = 30;
+ node.append("rect")
+ .attr("r", "5px")
+ .attr("width", function (d) { return d.group == 1 ? node_width : node_width + 50; })
+ .attr("height", node_height)
+ .attr("rx", function (d) { return d.group == 1 ? 1 : 10; })
+ .attr("stroke", "#000000")
+ .attr("fill", function (d) { return d.group == 1 ? "#dddddd" : "#000000"; })
+ .attr("onclick", function (d) {
+ return "renderSidebar(graphs." + subgraph + ".details." +
+ (d.group == 1 ? "operands" : "operations") + "[" +
+ d.index.toString() + "])";
+ });
+ node.append("text")
+ .text(function (d) { return d.name; })
+ .attr("x", 5)
+ .attr("y", 20)
+ .attr("fill", function (d) { return d.group == 1 ? "#000000" : "#eeeeee"; })
+ // Setup force parameters and update position callback
+ var node = svg.selectAll(".nodes")
+ .data(data.nodes);
+ // Bind the links
+ var name_to_g = {}
+ node.each(function (data, index, nodes) {
+ name_to_g[data.id] = this;
+ });
+ function proc(w, t) {
+ return parseInt(w.getAttribute(t));
+ }
+ edge.attr("d", function (d) {
+ function lerp(t, a, b) {
+ return (1.0 - t) * a + t * b;
+ }
+ var x1 = proc(name_to_g[d.source], "x") + node_width / 2;
+ var y1 = proc(name_to_g[d.source], "y") + node_height;
+ var x2 = proc(name_to_g[d.target], "x") + node_width / 2;
+ var y2 = proc(name_to_g[d.target], "y");
+ var s = "M " + x1 + " " + y1
+ + " C " + x1 + " " + lerp(.5, y1, y2)
+ + " " + x2 + " " + lerp(.5, y1, y2)
+ + " " + x2 + " " + y2
+ return s;
+ });
+ }
+
+ // Open a new window and present the full text data.
+ function showFullData(data) {
+ window.open().document.write(data);
+ }
+
+ // Renders a single table.
+ function renderTable(title, data, headers) {
+ var parent = document.getElementById("main-tables");
+
+ // Create heading.
+ var heading = document.createElement("h3");
+ heading.innerHTML = title;
+ parent.appendChild(heading);
+
+ // Filter out headers that do not appear in any data element.
+ headers = headers.filter(function (key) {
+ return data.some(function (elem) { return key in elem; });
+ });
+
+ // Render the table headers.
+ var table = document.createElement("table");
+ let header = table.createTHead().insertRow();
+ for (let key of headers) { header.insertCell().innerHTML = key; }
+
+ // Render the table body.
+ // Since the "data" field could be very large, we omit the full content and
+ // append a "View Full" button to the end.
+ var omittableFields = ["data"];
+ let body = table.createTBody();
+ for (const [index, elem] of data.entries()) {
+ let row = body.insertRow();
+ row.id = "details-" + title.toLowerCase() + "-" + index.toString();
+
+ for (let key of headers) {
+ var cell = row.insertCell();
+ var data = key in elem ? elem[key] : "-";
+ if (omittableFields.includes(key) && data.length > 100) {
+ // If the data exceeds the length limit, only print the first 80 and
+ // the last 20 characters.
+ data = data.substring(0, 80) + " ... " +
+ data.substring(data.length - 20, data.length) + " ";
+ cell.innerHTML = data;
+
+ // Append a "View Full" button to the end.
+ var href = document.createElement("a");
+ href.innerHTML = "View Full";
+ href.href = "javascript:void(0)";
+ href.onclick = function () { showFullData(elem[key]); };
+ cell.appendChild(href);
+ } else {
+ cell.innerHTML = data;
+ }
+ }
+ }
+ parent.appendChild(table);
+ }
+
+ function renderTables(subgraph) {
+ document.getElementById("main-tables").innerHTML = "";
+ renderTable("Configurations", graphs[subgraph].details.configurations, [
+ "relaxed",
+ "use shared memory",
+ "expect failure"
+ ]);
+ renderTable("Operands", graphs[subgraph].details.operands, [
+ "index",
+ "name",
+ "type",
+ "dimensions",
+ "scale",
+ "zero point",
+ "channel dim",
+ "lifetime",
+ "data"
+ ]);
+ renderTable("Operations", graphs[subgraph].details.operations, [
+ "index",
+ "opcode",
+ "inputs",
+ "outputs"
+ ]);
+ }
+
+ // Re-render all the information related to a subgraph.
+ // Invoked everytime when the main-selector changes.
+ function renderSubgraph(subgraph) {
+ renderTables(subgraph);
+ renderSvg(subgraph);
+ renderSidebar(null); // Clear sidebar.
+ }
+
+ // Renders the main-selector and the first subgraph choice in the main-selector.
+ // Only invoked once when the page gets loaded the first time.
+ function renderMain() {
+ var selector = document.getElementById("main-selector");
+ var first = true;
+ for (var subgraph in graphs) {
+ var option = document.createElement("option");
+ option.value = subgraph;
+ option.text = subgraph;
+ selector.appendChild(option);
+ if (first) {
+ first = false;
+ renderSubgraph(subgraph);
+ }
+ }
+ }
+ renderMain();
+ </script>
+</body>
+</html>
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/test_generator.py b/tests/nnapi/nnapi_test_generator/android-q-beta/test_generator.py
new file mode 100644
index 000000000..f49385c78
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/test_generator.py
@@ -0,0 +1,1236 @@
+#!/usr/bin/python3
+
+# Copyright 2017, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""NN model compiler
+
+Contain classes definition and utilify functions for compiling models and
+examples into NDK-based CTS and VTS unit tests.
+
+Used by cts_generator.py, vts_generator.py, and spec_visualizer.py
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import argparse
+import copy
+from functools import reduce
+import itertools
+import math
+import os
+import re
+import struct
+import sys
+import contextlib
+import pprint
+import numpy as np
+
+def GetJointStr(l, sep=", ", method=str):
+ return sep.join([method(i) for i in l])
+
+# Print in C float literal format
+def PrettyPrintAsFloat(x):
+ s = str(float(x))
+ if s.find(".") >= 0 or s.find("e") >= 0:
+ return s + "f"
+ else:
+ return s + ".0f"
+
+# Transform from original type to float32
+def Dequantize(v, ty):
+ v -= ty.zeroPoint
+ if ty.scale != 0:
+ v *= ty.scale
+ if isinstance(ty.extraParams, SymmPerChannelQuantParams):
+ v *= ty.extraParams.GetScalesBroadcastArray(ty.dimensions)
+ return v
+
+# Transform float32 to target data type
+def Quantize(v, ty):
+ if ty.scale != 0:
+ v /= ty.scale
+ if isinstance(ty.extraParams, SymmPerChannelQuantParams):
+ v = v / ty.extraParams.GetScalesBroadcastArray(ty.dimensions)
+ v += ty.zeroPoint
+ if not ty.IsFloat():
+ v = np.round(v)
+ v = int(v) if np.isscalar(v) else v.astype(int)
+ if ty.type == "TENSOR_QUANT8_ASYMM":
+ v = np.minimum(np.maximum(v, 0), 255)
+ elif ty.type == "TENSOR_QUANT16_ASYMM":
+ v = np.minimum(np.maximum(v, 0), 65535)
+ elif ty.type == "TENSOR_QUANT8_SYMM_PER_CHANNEL":
+ v = np.minimum(np.maximum(v, -127), 127)
+ elif ty.type == "UINT32":
+ v = np.maximum(v, 0)
+ return v
+
+@contextlib.contextmanager
+def SmartOpen(filename=None, mode="w"):
+ if filename and filename != '-':
+ fh = open(filename, mode)
+ else:
+ fh = sys.stdout
+
+ try:
+ yield fh
+ finally:
+ if fh is not sys.stdout:
+ fh.close()
+
+# Tracking objects inside a model with a unique name
+class NamedObject:
+ existingNames = set()
+
+ def __init__(self, *args, sep="_", showZero=False, startsFrom=0, skipRenaming=False):
+ name = GetJointStr([i for i in args if i is not None and i != ""], sep=sep)
+ if skipRenaming:
+ self.name = name
+ return
+ # make the name unique by renaming with a suffix number
+ uniqueName = name if showZero is False else name + sep + str(startsFrom)
+ while uniqueName in self.__class__.existingNames:
+ startsFrom += 1
+ uniqueName = name + sep + str(startsFrom)
+ self.__class__.existingNames.add(uniqueName)
+ self.name = uniqueName
+
+ def __str__(self):
+ return self.name
+ __repr__ = __str__
+
+ # Since names are unique, objects with the same name are considered equal
+ def __eq__(self, other):
+ return isinstance(other, NamedObject) and self.name == other.name
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash(self.name)
+
+ def __lt__(self, other):
+ return self.name < other.name
+
+# Types, operands should all have a unique name since they share the same namespace
+class NamedVariable(NamedObject):
+ existingNames = set()
+ def __init__(self, *args, sep="_", showZero=False, startsFrom=0, skipRenaming=False):
+ NamedObject.__init__(self, *args, sep=sep, showZero=showZero,
+ startsFrom=startsFrom, skipRenaming=skipRenaming)
+
+# Global variables in the spec namespace such as CreateModel, is_ignored, and examples
+class GlobalVariable(NamedVariable):
+ def __init__(self, *args, skipRenaming=False):
+ NamedObject.__init__(self, *args, startsFrom=1, skipRenaming=skipRenaming)
+
+# Each test should have a unique name, but will not conflict with variables
+class NamedTest(NamedObject):
+ existingNames = set()
+ def __init__(self, *args, startsFrom=0, skipRenaming=False):
+ NamedObject.__init__(self, *args, startsFrom=1, skipRenaming=skipRenaming)
+
+class Type(NamedVariable):
+ typesMap = dict()
+ typeLookup = {
+ "INT32": "int32_t",
+ "UINT32": "uint32_t",
+ "FLOAT32": "float",
+ "FLOAT16": "_Float16",
+ "TENSOR_INT32": "int32_t",
+ "TENSOR_FLOAT16": "_Float16",
+ "TENSOR_FLOAT32": "float",
+ "TENSOR_QUANT8_ASYMM": "uint8_t",
+ "TENSOR_QUANT8_SYMM": "int8_t",
+ "BOOL": "bool8",
+ "TENSOR_QUANT16_ASYMM": "uint16_t",
+ "TENSOR_QUANT16_SYMM": "int16_t",
+ "TENSOR_BOOL8": "bool8",
+ "TENSOR_QUANT8_SYMM_PER_CHANNEL": "int8_t",
+# "OEM_SCALAR": this is service-defined.
+ "TENSOR_OEM_BYTE": "uint8_t",
+ }
+
+ # types are named as "type0", "type1", ...
+ def __init__(self, vt, dimensions, scale, zeroPoint, name="type", skipRenaming=False,
+ extraParams=None):
+ NamedVariable.__init__(self, name, sep="", showZero=True, skipRenaming=skipRenaming)
+ self.type = vt
+ self.dimensions = dimensions
+ self.scale = float(scale)
+ self.zeroPoint = int(zeroPoint)
+ self.extraParams = extraParams
+
+ # Factory for Type object, only create a new Type if requested type does
+ # not have a match with all existing types
+ @staticmethod
+ def GetType(vt, dimensions, scale=0, zeroPoint=0, extraParams=None):
+ key = ",".join([vt, str(dimensions), str(scale), str(zeroPoint), str(extraParams)])
+ if key not in Type.typesMap:
+ Type.typesMap[key] = Type(vt, dimensions, scale, zeroPoint, extraParams=extraParams)
+ return Type.typesMap[key]
+
+ @staticmethod
+ def GetAllTypes():
+ # sort to ensure a stable order when dumping the code
+ return sorted(Type.typesMap.values())
+
+ # For backward-compatibility
+ @staticmethod
+ def GetTypeFromString(vt, shape, extraParams=None):
+ dimensions, scale, zeroPoint = Type.GetParsedShape(shape)
+ scale = float(scale)
+ zeroPoint = int(zeroPoint)
+ return Type.GetType(vt, dimensions, scale, zeroPoint, extraParams)
+
+ # For backward-compatibility
+ @staticmethod
+ def GetParsedShape(shape):
+ # Parse shape
+ if (shape != "" and shape != "{}"):
+ left, sep, right = shape.partition('{')
+ real_shape, sep, right = right.partition('}')
+ shape = [int(x) for x in real_shape.split(",")]
+ # left now looks like "0.0f, 127.5f, "
+ scale, sep, zero_point = right.rpartition(',')
+ if scale == "":
+ if zero_point == "":
+ return shape, "0", "0"
+ return shape, zero_point, "0"
+ left, sep, scale = scale.partition(',')
+ return shape, scale.replace("f", ""), zero_point
+ else:
+ return [], "0", "0"
+
+ def GetNumberOfElements(self):
+ return reduce(lambda x,y: x*y, self.dimensions, 1)
+
+ def GetCppTypeString(self):
+ return Type.typeLookup[self.type]
+
+ def IsFloat(self):
+ return self.GetCppTypeString() in ["float", "_Float16"]
+
+ def IsBool(self):
+ return self.GetCppTypeString() == "bool8"
+
+ def IsScalar(self):
+ return not self.type.startswith("TENSOR_")
+
+ def GetElementByteSize(self):
+ cppTypeString = self.GetCppTypeString()
+ if cppTypeString in ["uint8_t", "int8_t", "bool8"]:
+ return 1
+ elif cppTypeString in ["int16_t", "uint16_t", "_Float16"]:
+ return 2
+ else:
+ return 4
+
+ def GetByteSize(self):
+ return self.GetElementByteSize() * self.GetNumberOfElements()
+
+ def GetDimensionsString(self):
+ return "{" + GetJointStr(self.dimensions) + "}"
+
+ def GetSignatureTuple(self):
+ return (self.type, self.dimensions, self.scale, self.zeroPoint)
+
+ def ToUnspecifiedDim(self):
+ return Type.GetType(self.type, [0] * len(self.dimensions), self.scale, self.zeroPoint)
+
+# To track implicitly convertible parameter types
+class ImplicitParameter():
+ @staticmethod
+ def ImplicitConvertion(value):
+ if isinstance(value, Operand):
+ return value
+ for implicitType in ImplicitParameter.__subclasses__():
+ if implicitType.IsCompatible(value):
+ return implicitType("param", value)
+ assert False, "%s not supported for implicit parameter"%value
+
+
+# ExtraParams with per-channel quantization.
+class SymmPerChannelQuantParams():
+ def __init__(self, channelDim, scales, hide = False):
+ self.channelDim = channelDim
+ self.scales = scales
+ self.hide = hide
+
+ def GetScalesBroadcastArray(self, dimensions):
+ bshape = [1] * len(dimensions)
+ bshape[self.channelDim] = len(self.scales)
+ return np.array(self.scales).reshape(bshape)
+
+ def GetConstructor(self):
+ return "SymmPerChannelQuantParams({%s},%d)" % (
+ ", ".join(str(x) + "f" for x in self.scales), self.channelDim)
+
+ def GetVtsSetter(self):
+ return "channelQuant"
+
+ def GetVtsConstructor(self):
+ return "SymmPerChannelQuantParams{.scales={%s}, .channelDim=%d}" % (
+ ", ".join(str(x) + "f" for x in self.scales), self.channelDim)
+
+
+# An operand that can be fed into operations. Also, an operand is always
+# declared before operations.
+class Operand(NamedVariable):
+
+ def __init__(self, name, opType, value, backward=None, skipRenaming=False, extraParams=None):
+ NamedVariable.__init__(self, name, sep="", skipRenaming=skipRenaming)
+ if type(opType) is str:
+ self.type = Type.GetTypeFromString(opType, value, extraParams)
+ value = backward
+ else:
+ self.type = Type.GetType(*opType, extraParams=extraParams)
+ self.SetValue(value)
+ self.dimensions = self.type.dimensions
+ self.lifetime = "TEMPORARY_VARIABLE"
+ self.model_index = None
+ self.ins = []
+ self.outs = []
+
+ def SetValue(self, value):
+ self.value = value if type(value) is list or type(value) is tuple else [value]
+ return self
+
+ def SetValueFromNumpy(self, value):
+ self.value = value.flatten().tolist()
+ return self
+
+ def GetValueAsNumpy(self):
+ return np.array(self.value).reshape(self.type.dimensions)
+
+ # Print value as cpp-style list initialization
+ def GetListInitialization(self):
+ assert self.value is not None, \
+ "Trying to print operand %s with None value"%(str(self))
+ if self.type.IsFloat():
+ return "{%s}"%(GetJointStr(self.value, method=PrettyPrintAsFloat))
+ elif self.type.IsBool():
+ return "{%s}"%(GetJointStr(self.value, method=lambda v: "true" if v else "false"))
+ else:
+ return "{%s}"%(GetJointStr(self.value, method=lambda x: str(int(x))))
+
+ def ToUnspecifiedDim(self):
+ self.dimensions = self.type.dimensions
+ self.type = self.type.ToUnspecifiedDim()
+
+# Base class of user-defined input/output operand
+class InOut(Operand):
+
+ def __init__(self, name, opType, backward=None, skipRenaming=False, extraParams=None):
+ Operand.__init__(self, name, opType, backward, None, skipRenaming=skipRenaming, extraParams=extraParams)
+ self.lifetime = "MODEL_INPUT"
+ self.index = 0
+
+ def Feed(self, value):
+ self.SetValue(value[self] if type(value) is dict else value)
+ return self
+
+ def GetListInitialization(self):
+ return "{%d, %s}"%(self.index, super().GetListInitialization())
+
+# A user-declared input operand
+class Input(InOut):
+ def __init__(self, name, opType, backward=None, skipRenaming=False, extraParams=None):
+ InOut.__init__(self, name, opType, backward, skipRenaming=skipRenaming, extraParams=extraParams)
+ self.lifetime = "MODEL_INPUT"
+
+# A user-declared output operand
+class Output(InOut):
+ def __init__(self, name, opType, backward=None, skipRenaming=False):
+ InOut.__init__(self, name, opType, backward, skipRenaming=skipRenaming)
+ self.lifetime = "MODEL_OUTPUT"
+
+# An output that we don't want to compare the results
+class IgnoredOutput(Output):
+ def __init__(self, name, opType, backward=None, skipRenaming=False):
+ Output.__init__(self, name, opType, backward, skipRenaming=skipRenaming)
+ self.lifetime = "MODEL_OUTPUT"
+ def Feed(self, value):
+ numElements = reduce(lambda x,y: x*y, self.dimensions, 1)
+ self.value = [0 for x in range(numElements)]
+ return self
+
+# An explicitly declared parameter
+class Parameter(Operand):
+ def __init__(self, name, opType, value, backward=None, skipRenaming=False, extraParams=None):
+ Operand.__init__(self, name, opType, value, backward, skipRenaming=skipRenaming,
+ extraParams=extraParams)
+ self.initializer = NamedVariable(str(self) + "_init")
+ self.lifetime = "CONSTANT_REFERENCE" if Configuration.useSHM() else "CONSTANT_COPY"
+
+# A shortcut for parameters of INT32
+class Int32Scalar(Parameter, ImplicitParameter):
+ def __init__(self, name, value):
+ Parameter.__init__(self, name, ("INT32", []), int(value))
+ @staticmethod
+ def IsCompatible(value):
+ return type(value) is int
+
+# A shortcut for parameters of FLOAT16
+class Float16Scalar(Parameter, ImplicitParameter):
+ def __init__(self, name, value):
+ Parameter.__init__(self, name, ("FLOAT16", []), float(value))
+ @staticmethod
+ def IsCompatible(value):
+ return False
+
+# A shortcut for parameters of FLOAT32
+class Float32Scalar(Parameter, ImplicitParameter):
+ def __init__(self, name, value):
+ Parameter.__init__(self, name, ("FLOAT32", []), float(value))
+ @staticmethod
+ def IsCompatible(value):
+ return type(value) is float
+
+# A shortcut for parameters of BOOL
+class BoolScalar(Parameter, ImplicitParameter):
+ def __init__(self, name, value):
+ Parameter.__init__(self, name, ("BOOL", []), bool(value))
+ @staticmethod
+ def IsCompatible(value):
+ return type(value) is bool
+
+# A shortcut for parameter of 1-D TENSOR_INT32
+class Int32Vector(Parameter, ImplicitParameter):
+ def __init__(self, name, value):
+ Parameter.__init__(self, name, ("TENSOR_INT32", [len(value)]), [int(v) for v in value])
+ @staticmethod
+ def IsCompatible(value):
+ if type(value) is not list and type(value) is not tuple:
+ return False
+ return all(type(i) is int for i in value)
+
+# A shortcut for parameter of 1-D TENSOR_FLOAT32
+class Float32Vector(Parameter, ImplicitParameter):
+ def __init__(self, name, value):
+ Parameter.__init__(self, name, ("TENSOR_FLOAT32", [len(value)]), [float(v) for v in value])
+ @staticmethod
+ def IsCompatible(value):
+ if type(value) is not list and type(value) is not tuple:
+ return False
+ return all(type(i) is float for i in value)
+
+# An explicitly declared intermediate result
+class Internal(Operand):
+ def __init__(self, name, opType, backward=None, skipRenaming=False):
+ Operand.__init__(self, name, opType, backward, None, skipRenaming=skipRenaming)
+ self.lifetime = "TEMPORARY_VARIABLE"
+
+# An operation in a model, does not need a name
+class Operation:
+
+ def __init__(self, optype, ins, outs):
+ self.optype = optype
+ self.SetInputs(ins)
+ self.SetOutputs(outs)
+
+ # for the ease of debugging
+ def __str__(self):
+ insString = GetJointStr(self.ins)
+ outsString = GetJointStr(self.outs)
+ return "Operation %s: [%s] -> [%s]"%(self.optype, insString, outsString)
+ __repr__ = __str__
+
+ def SetInputs(self, ins):
+ self.ins = [ImplicitParameter.ImplicitConvertion(i) for i in ins]
+ return self
+
+ def SetOutputs(self, outs):
+ self.outs = list(outs)
+ return self
+
+# Main interface
+class Model:
+ models = list()
+
+ def __init__(self, name=None):
+ self.name = name
+ self.operations = []
+ self.operands = []
+ self.isRelaxed = False
+ self.compiled = False
+ self.dumped = False
+ self.hasDynamicOutputShape = False
+ self.version = FileNames.version
+ Model.models.append(self)
+
+ def WithSuffix(self, *args):
+ self.createFunctionName = GlobalVariable("CreateModel", self.name, *args)
+ self.createTestFunctionName = GlobalVariable("createTestModel", self.name, *args)
+ self.isIgnoredFunctionName = GlobalVariable("is_ignored", self.name, *args)
+ return self
+
+ def AddOperation(self, operation):
+ self.operations.append(operation)
+ for i in operation.ins:
+ if i not in self.operands:
+ self.operands.append(i)
+ for o in operation.outs:
+ if o not in self.operands:
+ self.operands.append(o)
+ return self
+
+ def Operation(self, op_name, *args):
+ return self.AddOperation(Operation(op_name, args, []))
+
+ def To(self, *args):
+ assert len(self.operations) > 0
+ if type(args[0]) is tuple or type(args[0]) is list:
+ outs = args[0]
+ else:
+ outs = args
+ self.operations[-1].SetOutputs(outs)
+ for o in outs:
+ if o not in self.operands:
+ self.operands.append(o)
+ return self
+
+ def RelaxedExecution(self, isRelaxed):
+ self.isRelaxed = isRelaxed
+ return self
+
+ def TestDynamicOutputShape(self, hasDynamicOutputShape):
+ self.hasDynamicOutputShape = hasDynamicOutputShape
+ return self
+
+ # Sets the version of the model in compliance tests. Set to None to disable the test.
+ def IntroducedIn(self, ver):
+ self.version = ver
+ return self
+
+ def GetTypes(self):
+ return sorted(list(set(op.type for op in self.operands)))
+
+ def GetInputs(self):
+ return [i for i in self.operands if isinstance(i, Input)]
+
+ def GetOutputs(self):
+ return [o for o in self.operands if isinstance(o, Output)]
+
+ def GetInputsIndex(self):
+ return [i for i,op in enumerate(self.operands) if isinstance(op, Input)]
+
+ def GetOutputsIndex(self):
+ return [o for o,op in enumerate(self.operands) if isinstance(op, Output)]
+
+ def GetIndexOfOperands(self, operands):
+ return [self.operands.index(i) for i in operands]
+
+ def GetIgnoredOutputs(self):
+ return [o for o in self.operands if isinstance(o, IgnoredOutput)]
+
+ def GetParameters(self):
+ return [p for p in self.operands if isinstance(p, Parameter)]
+
+ def GetEquivalentOperands(self, targets):
+ return [self.operands[self.operands.index(t)] for t in targets]
+
+ def UpdateEquivalentOperands(self, targets):
+ for t in targets:
+ self.operands[self.operands.index(t)] = t
+ return self
+
+ def SetOperandIndex(self):
+ for ind, i in enumerate(self.GetInputs()):
+ i.index = ind
+ for ind, o in enumerate(self.GetOutputs()):
+ o.index = ind
+ for ind, op in enumerate(self.operands):
+ op.model_index = ind
+ return self
+
+ def SetOperandInsAndOuts(self):
+ for op in self.operands:
+ op.ins = list()
+ op.outs = list()
+ for op in self.operations:
+ op.ins = self.GetEquivalentOperands(op.ins)
+ op.outs = self.GetEquivalentOperands(op.outs)
+ for i in op.ins:
+ i.outs.append(op)
+ for o in op.outs:
+ o.ins.append(op)
+ return self
+
+ def TopologicalSortHelper(self, op, deps, visited):
+ if op in visited:
+ assert op not in deps, "Cycle detected in the graph"
+ else:
+ visited.add(op)
+ for i in deps[op]:
+ self.TopologicalSortHelper(i, deps, visited)
+ self.operations.append(op)
+ deps.pop(op)
+
+ # Topological sort of the operations, and detect if there is a cycle is the graph
+ def TopologicalSort(self):
+ deps = {op: list() for op in self.operations}
+ [deps[o].append(i) for op in self.operands for o in op.outs for i in op.ins]
+ operations = self.operations.copy()
+ self.operations = []
+ visited = set()
+ for op in operations:
+ self.TopologicalSortHelper(op, deps, visited)
+
+ def SetOutputUnspecified(self):
+ for op in self.operands:
+ op.dimensions = op.type.dimensions
+ if self.hasDynamicOutputShape:
+ for op in self.GetOutputs():
+ op.ToUnspecifiedDim()
+ return self
+
+ def Compile(self):
+ if self.compiled:
+ return self
+ self.SetOperandIndex()
+ self.SetOperandInsAndOuts()
+ self.TopologicalSort()
+ self.SetOutputUnspecified()
+ # Do not check compliance for relaxed mode and dynamic output shape tests.
+ if self.isRelaxed or self.hasDynamicOutputShape:
+ self.IntroducedIn(None)
+ self.compiled = True
+ return self
+
+# To track implicitly convertible variation types
+class ImplicitVariation:
+ @staticmethod
+ def ImplicitConvertion(value):
+ if isinstance(value, ModelVariation):
+ return value
+ for implicitType in ImplicitVariation.__subclasses__():
+ value = value if type(value) is tuple or type(value) is list else [value]
+ if implicitType.IsCompatible(value[0]):
+ var = implicitType(value[0])
+ if len(value) > 1:
+ var.Identify(*value[1:])
+ return var
+ assert False, "%s not supported for implicit variation"%value[0]
+
+# The base class for model variations
+class ModelVariation:
+
+ def __init__(self, name=None):
+ self.targetOperands = {}
+ self.name = name
+
+ def ApplyToHelper(self, model, args, feedDicts, transform):
+ opVarList = []
+ for op in model.GetEquivalentOperands(sorted(args.keys())):
+ opVar = op
+ feedDictsVar = []
+ if isinstance(op, Input) or isinstance(op, Output):
+ for feedDict in feedDicts:
+ op_tmp = copy.deepcopy(op)
+ if op_tmp in feedDict[0]:
+ opVar = transform(op_tmp.Feed(feedDict[0]), args[op_tmp])
+ elif op_tmp in feedDict[1]:
+ opVar = transform(op_tmp.Feed(feedDict[1]), args[op_tmp])
+ else:
+ assert False
+ feedDictsVar.append(opVar.value)
+ assert type(op) == type(opVar), "Can not handle %s -> %s"%(type(op), type(opVar))
+ else:
+ opVar = transform(op, args[op])
+ # handle Parameter -> Input
+ if isinstance(opVar, Input) or isinstance(opVar, Output):
+ feedDictsVar = [opVar.value] * len(feedDicts)
+ if isinstance(opVar, Input) or isinstance(opVar, Output):
+ for feedDict, feedDictVar in zip(feedDicts, feedDictsVar):
+ if opVar in feedDict[1]:
+ feedDict[1][opVar] = feedDictVar
+ else:
+ feedDict[0][opVar] = feedDictVar
+ opVarList.append(opVar)
+ return opVarList
+
+ # Make a deepcopy of the model and feedDicts, and apply the change
+ def ApplyTo(self, modelOrigin, feedDictsOrigin):
+ model, feedDicts = copy.deepcopy((modelOrigin, feedDictsOrigin))
+ model.compiled = False
+ model.dumped = False
+
+ if not self.targetOperands:
+ self.AutoIdentify(model)
+
+ # get transformed operands and update feedDicts
+ operandsVar = self.ApplyToHelper(
+ model, self.targetOperands, feedDicts, self.TransformOperand)
+
+ model = self.TransformModel(model)
+ model.UpdateEquivalentOperands(operandsVar)
+ return model, feedDicts
+
+ def IdentifyOperands(self, args=None):
+ if args is None:
+ return self
+ self.targetOperands = args if type(args) is dict else {i: None for i in args}
+ return self
+
+ def Identify(self, operandArgs=None, paramArgs=None):
+ self.IdentifyOperands(operandArgs)
+ return self
+
+ # Set variation to its default name
+ def SetToDefaultName(self):
+ self.name = ""
+ return self
+
+ # Automatically select the target operand list
+ def AutoIdentify(self, model):
+ return self
+
+ # Transform operands that are marked by IdentifyOperands()
+ def TransformOperand(self, op, arg=None):
+ return op
+
+ # Transform the model
+ def TransformModel(self, model):
+ return model
+
+# Default variation that does nothing
+class DefaultVariation(ModelVariation):
+
+ def __init__(self, name=None):
+ ModelVariation.__init__(self, name=name)
+
+# Convert operand data type
+class DataTypeConverter(ModelVariation, ImplicitVariation):
+
+ def __init__(self, targetType=None, name=None):
+ ModelVariation.__init__(self, name=name)
+ if targetType is not None:
+ assert DataTypeConverter.IsCompatible(targetType)
+ self.targetType = targetType
+
+ @staticmethod
+ def IsCompatible(value):
+ return value.lower() in ["float16", "int32"]
+
+ def SetToDefaultName(self):
+ if self.targetType is not None:
+ self.name = self.targetType.lower()
+ return self
+ # get all target types
+ targetTypes = list(zip(*self.targetOperands.values()))[0]
+ if "TENSOR_QUANT8_SYMM_PER_CHANNEL" in targetTypes:
+ self.name = "channelQuant8"
+ elif "TENSOR_QUANT8_ASYMM" in targetTypes:
+ self.name = "quant8"
+ elif "TENSOR_INT32" in targetTypes:
+ self.name = "int32"
+ elif "TENSOR_FLOAT16" in targetTypes:
+ self.name = "float16"
+ else:
+ self.name = "float32"
+ return self
+
+ def AutoIdentify(self, model):
+ if self.targetType is not None:
+ # By default, select all the float32 tensors/scalars
+ targets = {op: ["TENSOR_" + self.targetType.upper()] \
+ for op in model.operands if op.type.type == "TENSOR_FLOAT32"}
+ targets.update({op: [self.targetType.upper()] \
+ for op in model.operands if op.type.type == "FLOAT32"})
+ self.Identify(targets)
+ return self
+
+ def TransformOperand(self, op, arg=None):
+ if len(arg) == 1:
+ typeTuple = (arg[0], op.type.dimensions)
+ else:
+ typeTuple = (arg[0], op.type.dimensions, *arg[1:])
+ # To handle Internal operands
+ if op.value is None or op.type.GetNumberOfElements() == 0:
+ op.type = Type.GetType(*typeTuple)
+ else:
+ v = Dequantize(op.GetValueAsNumpy().astype(np.float32), op.type)
+ op.type = Type.GetType(*typeTuple)
+ v = Quantize(v, op.type)
+ op.SetValueFromNumpy(v)
+ return op
+
+# Convert model to turn on/off relaxed computation
+class RelaxedModeConverter(ModelVariation, ImplicitVariation):
+
+ def __init__(self, isRelaxed=True, name=None):
+ ModelVariation.__init__(self, name=name)
+ if isinstance(isRelaxed, bool):
+ self.isRelaxed = isRelaxed
+ else:
+ assert RelaxedModeConverter.IsCompatible(isRelaxed.lower())
+ self.isRelaxed = True
+
+ @staticmethod
+ def IsCompatible(value):
+ return value.lower() in ["relaxed"]
+
+ def SetToDefaultName(self):
+ self.name = "relaxed" if self.isRelaxed else "float"
+ return self
+
+ def TransformModel(self, model):
+ model.RelaxedExecution(self.isRelaxed)
+ return model
+
+# Convert data layout between "NHWC" amd "NCHW"
+class DataLayoutConverter(ModelVariation, ImplicitVariation):
+
+ def __init__(self, targetLayout="nchw", name=None):
+ ModelVariation.__init__(self, name=name)
+ self.targetLayout = targetLayout.lower()
+ assert DataLayoutConverter.IsCompatible(self.targetLayout)
+ self.perm = (0, 3, 1, 2) if self.targetLayout == "nchw" else (0, 2, 3, 1)
+ self.param = True if self.targetLayout == "nchw" else False
+
+ @staticmethod
+ def IsCompatible(value):
+ return value.lower() in ["nhwc", "nchw"]
+
+ def SetToDefaultName(self):
+ self.name = self.targetLayout
+ return self
+
+ def TransformOperand(self, op, arg=None):
+ if len(op.type.dimensions) == 4:
+ # To handle Internal operands
+ if op.value is not None and op.type.GetNumberOfElements() != 0:
+ op.SetValueFromNumpy(op.GetValueAsNumpy().transpose(self.perm))
+ newDim = [op.type.dimensions[i] for i in self.perm]
+ op.type = Type.GetType(op.type.type, newDim, op.type.scale, op.type.zeroPoint)
+ elif len(op.type.dimensions) == 1 and len(op.value) == 4:
+ op.SetValueFromNumpy(op.GetValueAsNumpy()[list(self.perm)])
+ elif op.type.type == "BOOL":
+ op.SetValue(self.param)
+ else:
+ assert False, "%s not supported by DataLayoutConverter"%op
+ return op
+
+# Convert data by tansposing and removing axis
+class AxisConverter(ModelVariation):
+
+ def __init__(self, origin, target, dim, drop=[], name=None):
+ ModelVariation.__init__(self, name=name)
+ self.origin = origin
+ self.target = target
+ assert all(i >= -dim and i < dim for i in [self.origin, self.target])
+ self.dim = dim
+ self.perm = list(range(dim))
+ self.perm.insert(target if target >= 0 else target + dim, self.perm.pop(origin))
+ self.drop = [drop] if type(drop) is int else list(drop)
+ assert all(i >= -dim and i < dim for i in self.drop)
+ self.drop = [i if i >= 0 else i + dim for i in self.drop]
+ assert target not in self.drop and target + dim not in self.drop
+
+ def SetToDefaultName(self):
+ axis = self.target if self.target >= 0 else self.target + self.dim
+ axis -= sum(i < axis for i in self.drop)
+ neg = "" if self.target >= 0 else "_neg"
+ self.name = "dim%d_axis%d%s"%(self.dim - len(self.drop), axis, neg)
+ return self
+
+ def TransposeAxis(self, op):
+ if op.type.type == "INT32":
+ op.SetValue(self.target)
+ elif len(op.type.dimensions) == self.dim:
+ # To handle Internal operands
+ if op.value is not None:
+ op.SetValueFromNumpy(op.GetValueAsNumpy().transpose(self.perm))
+ newDim = [op.type.dimensions[i] for i in self.perm]
+ op.type = Type.GetType(op.type.type, newDim, op.type.scale, op.type.zeroPoint)
+ else:
+ assert False, "%s not supported by AxisConverter"%op
+ return op
+
+ def RemoveAxis(self, op):
+ if op.type.type == "INT32":
+ if op.value[0] >= 0:
+ op.SetValue(op.value[0] - sum(i < op.value[0] for i in self.drop))
+ else:
+ op.SetValue(op.value[0] + sum(i > (op.value[0] + self.dim) for i in self.drop))
+ elif len(op.type.dimensions) == self.dim:
+ if op.value is not None:
+ val = op.GetValueAsNumpy()
+ for i in sorted(self.drop, reverse=True):
+ val = np.take(val, 0, axis=i)
+ op.SetValueFromNumpy(val)
+ newDim = [op.type.dimensions[i] for i in range(self.dim) if i not in self.drop]
+ op.type = Type.GetType(op.type.type, newDim, op.type.scale, op.type.zeroPoint)
+ else:
+ assert False, "%s not supported by AxisConverter"%op
+ return op
+
+ def TransformOperand(self, op, arg=None):
+ op = self.TransposeAxis(op)
+ op = self.RemoveAxis(op)
+ return op
+
+# Convert a Parameter to Input
+class ParameterAsInputConverter(ModelVariation, ImplicitVariation):
+
+ def __init__(self, arg="as_input", prefix="weight", name=None):
+ ModelVariation.__init__(self, name=name)
+ assert ParameterAsInputConverter.IsCompatible(arg.lower())
+ self.prefix = prefix
+
+ @staticmethod
+ def IsCompatible(value):
+ return value.lower() in ["as_input"]
+
+ def SetToDefaultName(self):
+ self.name = self.prefix + "_as_input"
+ return self
+
+ def TransformOperand(self, op, arg=None):
+ assert isinstance(op, Parameter), "%s cannot be converted to Input."%type(op)
+ newop = Input(op.name, op.type.GetSignatureTuple(), skipRenaming=True, extraParams=op.type.extraParams)
+ newop.SetValue(op.value)
+ return newop
+
+# Convert Output based on activation
+class ActivationConverter(ModelVariation, ImplicitVariation):
+ # (Enum, low, high)
+ actMap = {
+ "none": (0, None, None),
+ "relu": (1, 0.0, None),
+ "relu1": (2, -1.0, 1.0),
+ "relu6": (3, 0.0, 6.0),
+ }
+ def __init__(self, act="relu", name=None):
+ ModelVariation.__init__(self, name=name)
+ self.act = act.lower()
+ assert ActivationConverter.IsCompatible(self.act)
+ self.enum = ActivationConverter.actMap[self.act][0]
+ self.low = ActivationConverter.actMap[self.act][1]
+ self.high = ActivationConverter.actMap[self.act][2]
+
+ @staticmethod
+ def IsCompatible(value):
+ return value.lower() in ActivationConverter.actMap.keys()
+
+ def SetToDefaultName(self):
+ self.name = self.act
+ return self
+
+ def TransformOperand(self, op, arg=None):
+ if op.type.type == "INT32": # activation enum
+ return op.SetValue(self.enum)
+ else:
+ assert isinstance(op, Output)
+ v = op.GetValueAsNumpy()
+ if self.low is not None:
+ low = Quantize(self.low, op.type)
+ v = np.maximum(v, low)
+ if self.high is not None:
+ high = Quantize(self.high, op.type)
+ v = np.minimum(v, high)
+ return op.SetValueFromNumpy(v)
+
+class DynamicOutputShapeConverter(ModelVariation):
+ def __init__(self, name=None):
+ ModelVariation.__init__(self, name=name)
+
+ def SetToDefaultName(self):
+ self.name = "dynamic_output_shape"
+ return self
+
+ def TransformModel(self, model):
+ model.TestDynamicOutputShape(True)
+ return model
+
+# An example is always attached to a model, and could have multiple variations
+class Example:
+ examples = []
+ versionOverrides = {}
+
+ def __init__(self, *args, model=None, name=None):
+ self.model = Model.models[-1] if model is None else model
+ self.name = name
+ self.expectedMultinomialDistributionTolerance = None
+ self.expectFailure = False
+ self.feedDicts = []
+ for feedDict in args:
+ if type(feedDict) is tuple or type(feedDict) is list:
+ self.feedDicts.append(feedDict)
+ elif type(feedDict) is dict:
+ self.feedDicts.append((
+ {i: feedDict[i] for i in self.model.GetInputs()},
+ {o: feedDict[o] for o in self.model.GetOutputs()}
+ ))
+ else:
+ assert False
+ if Configuration.test_dynamic_output_shape:
+ self.variations = [[DefaultVariation(), DynamicOutputShapeConverter()]]
+ else:
+ self.variations = []
+ Example.examples.append(self)
+
+ @staticmethod
+ def SetVersion(ver, *args):
+ for name in args:
+ Example.versionOverrides[name] = ver
+
+ # Main entrance of test generator
+ @staticmethod
+ def DumpAllExamples(DumpModel=None, model_fd=None,
+ DumpExample=None, example_fd=None,
+ DumpTest=None, test_fd=None):
+ Example.CombineAllExamples()
+ for example in Example.examples:
+ example.Dump(DumpModel, model_fd, DumpExample, example_fd, DumpTest, test_fd)
+
+ # Combine examples with the same model, same name, and same set of variations
+ @staticmethod
+ def CombineAllExamples():
+ modelMap = {}
+ newExamples = []
+ for example in Example.examples:
+ key = (example.model, example.name, tuple(tuple(e) for e in example.variations))
+ if key in modelMap:
+ modelMap[key].Combine(example)
+ else:
+ modelMap[key] = example
+ newExamples.append(example)
+ Example.examples = newExamples
+
+ def AddVariations(self, *args, includeDefault=True, defaultName=None):
+ self.variations.append([DefaultVariation(defaultName)] if includeDefault else [])
+ self.variations[-1].extend(ImplicitVariation.ImplicitConvertion(i) for i in args)
+ return self
+
+ def AddNchw(self, *args, includeDefault=True, defaultName="nhwc"):
+ var = DataLayoutConverter("nchw").Identify(args)
+ self.AddVariations(var, includeDefault=includeDefault, defaultName=defaultName)
+ return self
+
+ def AddRelaxed(self, isRelaxed=True, includeDefault=True, defaultName=None):
+ var = RelaxedModeConverter(isRelaxed)
+ self.AddVariations(var, includeDefault=includeDefault, defaultName=defaultName)
+ return self
+
+ def AddInput(self, *args, includeDefault=True, defaultName=None):
+ var = ParameterAsInputConverter().Identify(args)
+ self.AddVariations(var, includeDefault=includeDefault, defaultName=defaultName)
+ return self
+
+ def AddRelu(self, *args, includeDefault=True, defaultName=None):
+ var = ActivationConverter("relu").Identify(args)
+ self.AddVariations(var, includeDefault=includeDefault, defaultName=defaultName)
+ return self
+
+ def AddAllActivations(self, *args):
+ var = [ActivationConverter(i).Identify(args)
+ for i in sorted(ActivationConverter.actMap.keys())]
+ self.AddVariations(*var, includeDefault=False)
+ return self
+
+ def GuessOriginalAxisAndDim(self, *args):
+ origin = None
+ dim = None
+ for arg in args:
+ if arg.type.type == "INT32":
+ origin = arg.value[0]
+ else:
+ if dim is None:
+ dim = len(arg.type.dimensions)
+ else:
+ assert dim == len(arg.type.dimensions)
+ assert dim is not None
+ origin = dim - 1 if origin is None else origin
+ origin = origin + dim if origin < 0 else origin
+ return origin, dim
+
+ def AddAxis(self, axis, *args, includeDefault=True, defaultName=None):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ axis = [axis] if type(axis) is int else list(axis)
+ var = [AxisConverter(origin, a, dim).Identify(args) for a in axis]
+ self.AddVariations(*var, includeDefault=includeDefault, defaultName=defaultName)
+ return self
+
+ def AddAllPositiveAxis(self, *args):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ var = [AxisConverter(origin, a, dim).Identify(args) for a in range(dim)]
+ self.AddVariations(*var, includeDefault=False)
+ return self
+
+ def AddAllAxis(self, *args):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ var = [AxisConverter(origin, a, dim).Identify(args) for a in range(-dim, dim)]
+ self.AddVariations(*var, includeDefault=False)
+ return self
+
+ def AddDims(self, dims, *args, includeDefault=True, defaultName=None):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ dims = [dims] if type(dims) is int else list(dims)
+ drop = list(range(dim))
+ drop.pop(origin)
+ var = [AxisConverter(origin, origin, dim, drop[0:(dim-i)]).Identify(args) for i in dims]
+ self.AddVariations(*var, includeDefault=includeDefault, defaultName=defaultName)
+ return self
+
+ def AddAllDims(self, *args):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ drop = list(range(dim))
+ drop.pop(origin)
+ var = [AxisConverter(origin, origin, dim, drop[0:i]).Identify(args) for i in range(dim)]
+ self.AddVariations(*var, includeDefault=False)
+ return self
+
+ def AddAllDimsAndPositiveAxis(self, *args):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ var = [AxisConverter(origin, j, dim, range(i)).Identify(args) \
+ for i in range(dim) for j in range(i, dim)]
+ self.AddVariations(*var, includeDefault=False)
+ return self
+
+ def AddAllDimsAndAxis(self, *args):
+ origin, dim = self.GuessOriginalAxisAndDim(*args)
+ var = [AxisConverter(origin, k, dim, range(i)).Identify(args) \
+ for i in range(dim) for j in range(i, dim) for k in [j, j - dim]]
+ self.AddVariations(*var, includeDefault=False)
+ return self
+
+ def Combine(self, other):
+ assert self.model is other.model, "Only examples targetting the same model can be combined"
+ assert tuple(self.variations) == tuple(other.variations), \
+ "Only examples with the same set of variations can be combined"
+ assert self.name == other.name, "Only examples with the same name can be combined"
+ self.feedDicts.extend(other.feedDicts)
+ return self
+
+ def Dump(self, DumpModel, model_fd, DumpExample, example_fd, DumpTest, test_fd):
+ [v.SetToDefaultName() for vs in self.variations for v in vs if v.name is None]
+ for variationList in itertools.product(*self.variations):
+ # Apply variations
+ modelOrigin, feedDictsOrigin = self.model, self.feedDicts
+ self.model, self.feedDicts = copy.deepcopy((self.model, self.feedDicts))
+ for variation in variationList:
+ self.model, self.feedDicts = variation.ApplyTo(self.model, self.feedDicts)
+ # Concat names for test and examples
+ varNames = [v.name for v in variationList]
+ self.testName = NamedTest(FileNames.specName, self.model.name, self.name, *varNames)
+ self.examplesName = GlobalVariable("examples", self.model.name, self.name, *varNames)
+ if str(self.testName) in Example.versionOverrides:
+ self.model.IntroducedIn(Example.versionOverrides[str(self.testName)])
+ self.model.WithSuffix(*varNames).Compile()
+ # Dump files
+ if DumpModel is not None and model_fd is not None:
+ DumpModel(self.model, model_fd)
+ if DumpExample is not None and example_fd is not None:
+ DumpExample(self, example_fd)
+ if DumpTest is not None and test_fd is not None:
+ DumpTest(self, test_fd)
+ # Restore model and feedDicts before variation
+ self.model = modelOrigin
+ self.feedDicts = feedDictsOrigin
+ return self
+
+ # Specifies the RANDOM_MULTINOMIAL distribution tolerance.
+ # If set to greater than zero, the input is compared as log-probabilities
+ # to the output and must be within this tolerance to pass.
+ def WithMultinomialDistributionTolerance(self, expectedTolerance):
+ assert self.expectFailure is False
+ self.expectedMultinomialDistributionTolerance = expectedTolerance
+ return self
+
+ # Specifies that this example is expected to fail during compilation or execution.
+ def ExpectFailure(self):
+ assert self.expectedMultinomialDistributionTolerance is None
+ self.expectFailure = True
+ return self
+
+class FileNames:
+ specFiles = []
+ specNames = []
+ modelFiles = []
+ exampleFiles = []
+ testFiles = []
+ specFile = ""
+ specName = ""
+ modelFile = ""
+ exampleFile = ""
+ testFile = ""
+ version = ""
+ fileIndex = 0
+
+ @staticmethod
+ def InitializeFileLists(spec, model, example, test):
+ # get all spec files and target files
+ if os.path.isfile(spec):
+ FileNames.specFiles = [os.path.abspath(spec)]
+ elif os.path.isdir(spec):
+ FileNames.specFiles = sorted([os.path.abspath(os.path.join(spec, f))
+ for f in os.listdir(spec) if f.endswith(".mod.py")])
+ else:
+ assert False, "%s is neither a file or a directory"%spec
+ FileNames.specNames = [re.sub(r"\..*", "", os.path.basename(f))
+ for f in FileNames.specFiles]
+ FileNames.modelFiles = FileNames.ParseTargetFiles(model, ".model.cpp")
+ FileNames.exampleFiles = FileNames.ParseTargetFiles(example, ".example.cpp")
+ FileNames.testFiles = FileNames.ParseTargetFiles(test, ".mod.py.cpp")
+
+ @staticmethod
+ def ParseTargetFiles(arg, ext):
+ numFiles = len(FileNames.specFiles)
+ absPath = os.path.abspath(arg)
+ if os.path.isdir(arg):
+ target = [os.path.join(absPath, f + ext) for f in FileNames.specNames]
+ elif arg == "-":
+ target = ["-"] * numFiles
+ else:
+ target = [absPath] * numFiles
+ return target
+
+ @staticmethod
+ def NextFile():
+ if FileNames.fileIndex >= len(FileNames.specFiles):
+ return False
+ FileNames.specFile = FileNames.specFiles[FileNames.fileIndex]
+ FileNames.specName = FileNames.specNames[FileNames.fileIndex]
+ FileNames.modelFile = FileNames.modelFiles[FileNames.fileIndex]
+ FileNames.exampleFile = FileNames.exampleFiles[FileNames.fileIndex]
+ FileNames.testFile = FileNames.testFiles[FileNames.fileIndex]
+ FileNames.fileIndex += 1
+ NamedObject.existingNames = set()
+ NamedVariable.existingNames = set()
+ NamedTest.existingNames = set()
+ Type.typesMap = dict()
+ Model.models = list()
+ Example.examples = list()
+ Configuration.use_shm_for_weights = False
+
+ # Extract version from absolute file path.
+ versionMatch = re.findall(r"/V\d_\d/", FileNames.specFile)
+ if len(versionMatch) == 1:
+ FileNames.version = versionMatch[0].strip('/')
+ else:
+ FileNames.version = None
+ return True
+
+class Configuration:
+ use_shm_for_weights = False
+ force_regenerate = False
+ test_dynamic_output_shape = True
+
+ @staticmethod
+ def useSHM():
+ return Configuration.use_shm_for_weights
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/lstm_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/lstm_float.mod.py
new file mode 100644
index 000000000..60eec8280
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/lstm_float.mod.py
@@ -0,0 +1,145 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+
+n_batch = 1
+n_input = 2
+# n_cell and n_output have the same size when there is no projection.
+n_cell = 4
+n_output = 4
+
+input = Input("input", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_input))
+
+input_to_input_weights = Input("input_to_input_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+input_to_forget_weights = Input("input_to_forget_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+input_to_cell_weights = Input("input_to_cell_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+input_to_output_weights = Input("input_to_output_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+
+recurrent_to_input_weights = Input("recurrent_to_intput_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+recurrent_to_forget_weights = Input("recurrent_to_forget_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+recurrent_to_cell_weights = Input("recurrent_to_cell_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+recurrent_to_output_weights = Input("recurrent_to_output_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+
+cell_to_input_weights = Input("cell_to_input_weights", "TENSOR_FLOAT32", "{0}")
+cell_to_forget_weights = Input("cell_to_forget_weights", "TENSOR_FLOAT32", "{0}")
+cell_to_output_weights = Input("cell_to_output_weights", "TENSOR_FLOAT32", "{0}")
+
+input_gate_bias = Input("input_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+forget_gate_bias = Input("forget_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+cell_gate_bias = Input("cell_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+output_gate_bias = Input("output_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+
+projection_weights = Input("projection_weights", "TENSOR_FLOAT32", "{0,0}")
+projection_bias = Input("projection_bias", "TENSOR_FLOAT32", "{0}")
+
+output_state_in = Input("output_state_in", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
+cell_state_in = Input("cell_state_in", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
+
+activation_param = Int32Scalar("activation_param", 4) # Tanh
+cell_clip_param = Float32Scalar("cell_clip_param", 0.)
+proj_clip_param = Float32Scalar("proj_clip_param", 0.)
+
+scratch_buffer = IgnoredOutput("scratch_buffer", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, (n_cell * 4)))
+output_state_out = Output("output_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
+cell_state_out = Output("cell_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
+output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
+
+model = model.Operation("LSTM",
+ input,
+
+ input_to_input_weights,
+ input_to_forget_weights,
+ input_to_cell_weights,
+ input_to_output_weights,
+
+ recurrent_to_input_weights,
+ recurrent_to_forget_weights,
+ recurrent_to_cell_weights,
+ recurrent_to_output_weights,
+
+ cell_to_input_weights,
+ cell_to_forget_weights,
+ cell_to_output_weights,
+
+ input_gate_bias,
+ forget_gate_bias,
+ cell_gate_bias,
+ output_gate_bias,
+
+ projection_weights,
+ projection_bias,
+
+ output_state_in,
+ cell_state_in,
+
+ activation_param,
+ cell_clip_param,
+ proj_clip_param
+).To([scratch_buffer, output_state_out, cell_state_out, output])
+model = model.RelaxedExecution(True)
+
+# Example 1. Input in operand 0,
+input0 = {input_to_input_weights: [-0.45018822, -0.02338299, -0.0870589, -0.34550029, 0.04266912, -0.15680569, -0.34856534, 0.43890524],
+ input_to_forget_weights: [0.09701663, 0.20334584, -0.50592935, -0.31343272, -0.40032279, 0.44781327, 0.01387155, -0.35593212],
+ input_to_cell_weights: [-0.50013041, 0.1370284, 0.11810488, 0.2013163, -0.20583314, 0.44344562, 0.22077113, -0.29909778],
+ input_to_output_weights: [-0.25065863, -0.28290087, 0.04613829, 0.40525138, 0.44272184, 0.03897077, -0.1556896, 0.19487578],
+
+ input_gate_bias: [0.,0.,0.,0.],
+ forget_gate_bias: [1.,1.,1.,1.],
+ cell_gate_bias: [0.,0.,0.,0.],
+ output_gate_bias: [0.,0.,0.,0.],
+
+ recurrent_to_input_weights: [
+ -0.0063535, -0.2042388, 0.31454784, -0.35746509, 0.28902304, 0.08183324,
+ -0.16555229, 0.02286911, -0.13566875, 0.03034258, 0.48091322,
+ -0.12528998, 0.24077177, -0.51332325, -0.33502164, 0.10629296],
+
+ recurrent_to_cell_weights: [
+ -0.3407414, 0.24443203, -0.2078532, 0.26320225, 0.05695659, -0.00123841,
+ -0.4744786, -0.35869038, -0.06418842, -0.13502428, -0.501764, 0.22830659,
+ -0.46367589, 0.26016325, -0.03894562, -0.16368064],
+
+ recurrent_to_forget_weights: [
+ -0.48684245, -0.06655136, 0.42224967, 0.2112639, 0.27654213, 0.20864892,
+ -0.07646349, 0.45877004, 0.00141793, -0.14609534, 0.36447752, 0.09196436,
+ 0.28053468, 0.01560611, -0.20127171, -0.01140004],
+
+ recurrent_to_output_weights: [
+ 0.43385774, -0.17194885, 0.2718237, 0.09215671, 0.24107647, -0.39835793,
+ 0.18212086, 0.01301402, 0.48572797, -0.50656658, 0.20047462, -0.20607421,
+ -0.51818722, -0.15390486, 0.0468148, 0.39922136],
+
+ cell_to_input_weights: [],
+ cell_to_forget_weights: [],
+ cell_to_output_weights: [],
+
+ projection_weights: [],
+ projection_bias: [],
+}
+
+test_input = [2., 3.]
+output_state = [0, 0, 0, 0]
+cell_state = [0, 0, 0, 0]
+golden_output = [-0.02973187, 0.1229473, 0.20885126, -0.15358765,]
+output0 = {
+ scratch_buffer: [ 0 for x in range(n_batch * n_cell * 4) ],
+ cell_state_out: [ -0.145439, 0.157475, 0.293663, -0.277353 ],
+ output_state_out: [ -0.0297319, 0.122947, 0.208851, -0.153588 ],
+ output: golden_output
+}
+input0[input] = test_input
+input0[output_state_in] = output_state
+input0[cell_state_in] = cell_state
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stdout.txt.expect
new file mode 100644
index 000000000..5fdca97d1
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_float/stdout.txt.expect
@@ -0,0 +1,107 @@
+// clang-format off
+// Generated file (from: lstm_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: lstm_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: lstm_float.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace lstm_float {
+// Generated lstm_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace lstm_float
+
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {4, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {4, 4});
+ OperandType type3(Type::TENSOR_FLOAT32, {0});
+ OperandType type4(Type::TENSOR_FLOAT32, {4});
+ OperandType type5(Type::TENSOR_FLOAT32, {0, 0});
+ OperandType type6(Type::TENSOR_FLOAT32, {1, 4});
+ OperandType type7(Type::INT32, {});
+ OperandType type8(Type::FLOAT32, {});
+ OperandType type9(Type::TENSOR_FLOAT32, {1, 16});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto input_to_input_weights = model->addOperand(&type1);
+ auto input_to_forget_weights = model->addOperand(&type1);
+ auto input_to_cell_weights = model->addOperand(&type1);
+ auto input_to_output_weights = model->addOperand(&type1);
+ auto recurrent_to_intput_weights = model->addOperand(&type2);
+ auto recurrent_to_forget_weights = model->addOperand(&type2);
+ auto recurrent_to_cell_weights = model->addOperand(&type2);
+ auto recurrent_to_output_weights = model->addOperand(&type2);
+ auto cell_to_input_weights = model->addOperand(&type3);
+ auto cell_to_forget_weights = model->addOperand(&type3);
+ auto cell_to_output_weights = model->addOperand(&type3);
+ auto input_gate_bias = model->addOperand(&type4);
+ auto forget_gate_bias = model->addOperand(&type4);
+ auto cell_gate_bias = model->addOperand(&type4);
+ auto output_gate_bias = model->addOperand(&type4);
+ auto projection_weights = model->addOperand(&type5);
+ auto projection_bias = model->addOperand(&type3);
+ auto output_state_in = model->addOperand(&type6);
+ auto cell_state_in = model->addOperand(&type6);
+ auto activation_param = model->addOperand(&type7);
+ auto cell_clip_param = model->addOperand(&type8);
+ auto proj_clip_param = model->addOperand(&type8);
+ auto scratch_buffer = model->addOperand(&type9);
+ auto output_state_out = model->addOperand(&type6);
+ auto cell_state_out = model->addOperand(&type6);
+ auto output = model->addOperand(&type6);
+ // Phase 2, operations
+ static int32_t activation_param_init[] = {4};
+ model->setOperandValue(activation_param, activation_param_init, sizeof(int32_t) * 1);
+ static float cell_clip_param_init[] = {0.0f};
+ model->setOperandValue(cell_clip_param, cell_clip_param_init, sizeof(float) * 1);
+ static float proj_clip_param_init[] = {0.0f};
+ model->setOperandValue(proj_clip_param, proj_clip_param_init, sizeof(float) * 1);
+ model->addOperation(ANEURALNETWORKS_LSTM, {input, input_to_input_weights, input_to_forget_weights, input_to_cell_weights, input_to_output_weights, recurrent_to_intput_weights, recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights, cell_to_input_weights, cell_to_forget_weights, cell_to_output_weights, input_gate_bias, forget_gate_bias, cell_gate_bias, output_gate_bias, projection_weights, projection_bias, output_state_in, cell_state_in, activation_param, cell_clip_param, proj_clip_param}, {scratch_buffer, output_state_out, cell_state_out, output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input, input_to_input_weights, input_to_forget_weights, input_to_cell_weights, input_to_output_weights, recurrent_to_intput_weights, recurrent_to_forget_weights, recurrent_to_cell_weights, recurrent_to_output_weights, cell_to_input_weights, cell_to_forget_weights, cell_to_output_weights, input_gate_bias, forget_gate_bias, cell_gate_bias, output_gate_bias, projection_weights, projection_bias, output_state_in, cell_state_in},
+ {scratch_buffer, output_state_out, cell_state_out, output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {0};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2.0f, 3.0f}}, {1, {-0.45018822f, -0.02338299f, -0.0870589f, -0.34550029f, 0.04266912f, -0.15680569f, -0.34856534f, 0.43890524f}}, {2, {0.09701663f, 0.20334584f, -0.50592935f, -0.31343272f, -0.40032279f, 0.44781327f, 0.01387155f, -0.35593212f}}, {3, {-0.50013041f, 0.1370284f, 0.11810488f, 0.2013163f, -0.20583314f, 0.44344562f, 0.22077113f, -0.29909778f}}, {4, {-0.25065863f, -0.28290087f, 0.04613829f, 0.40525138f, 0.44272184f, 0.03897077f, -0.1556896f, 0.19487578f}}, {5, {-0.0063535f, -0.2042388f, 0.31454784f, -0.35746509f, 0.28902304f, 0.08183324f, -0.16555229f, 0.02286911f, -0.13566875f, 0.03034258f, 0.48091322f, -0.12528998f, 0.24077177f, -0.51332325f, -0.33502164f, 0.10629296f}}, {6, {-0.48684245f, -0.06655136f, 0.42224967f, 0.2112639f, 0.27654213f, 0.20864892f, -0.07646349f, 0.45877004f, 0.00141793f, -0.14609534f, 0.36447752f, 0.09196436f, 0.28053468f, 0.01560611f, -0.20127171f, -0.01140004f}}, {7, {-0.3407414f, 0.24443203f, -0.2078532f, 0.26320225f, 0.05695659f, -0.00123841f, -0.4744786f, -0.35869038f, -0.06418842f, -0.13502428f, -0.501764f, 0.22830659f, -0.46367589f, 0.26016325f, -0.03894562f, -0.16368064f}}, {8, {0.43385774f, -0.17194885f, 0.2718237f, 0.09215671f, 0.24107647f, -0.39835793f, 0.18212086f, 0.01301402f, 0.48572797f, -0.50656658f, 0.20047462f, -0.20607421f, -0.51818722f, -0.15390486f, 0.0468148f, 0.39922136f}}, {9, {}}, {10, {}}, {11, {}}, {12, {0.0f, 0.0f, 0.0f, 0.0f}}, {13, {1.0f, 1.0f, 1.0f, 1.0f}}, {14, {0.0f, 0.0f, 0.0f, 0.0f}}, {15, {0.0f, 0.0f, 0.0f, 0.0f}}, {16, {}}, {17, {}}, {18, {0.0f, 0.0f, 0.0f, 0.0f}}, {19, {0.0f, 0.0f, 0.0f, 0.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {1, {-0.0297319f, 0.122947f, 0.208851f, -0.153588f}}, {2, {-0.145439f, 0.157475f, 0.293663f, -0.277353f}}, {3, {-0.02973187f, 0.1229473f, 0.20885126f, -0.15358765f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, lstm_float) {
+ execute(lstm_float::CreateModel,
+ lstm_float::is_ignored,
+ lstm_float::examples);
+}
+
+#include "../generated/tests/lstm_float.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py
new file mode 100644
index 000000000..051780877
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py
@@ -0,0 +1,43 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 2}, 0.5f, 0")
+f1 = Input("op2", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 2}, 0.5f, 0")
+b1 = Input("op3", "TENSOR_INT32", "{2}, 0.25f, 0")
+pad0 = Int32Scalar("pad0", 0)
+act = Int32Scalar("act", 0)
+stride = Int32Scalar("stride", 1)
+cm = Int32Scalar("channelMultiplier", 1)
+output = Output("op4", "TENSOR_QUANT8_ASYMM", "{1,1,1,2}, 1.f, 0")
+
+model = model.Operation("DEPTHWISE_CONV_2D",
+ i1, f1, b1,
+ pad0, pad0, pad0, pad0,
+ stride, stride,
+ cm, act).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [4, 16, 4, 32, 4, 64, 4, 128],
+ f1:
+ [2, 4, 2, 0, 2, 2, 2, 0],
+ b1:
+ [0, 0]}
+# (i1 (depthconv) f1)
+output0 = {output: # output 0
+ [8, 48]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stdout.txt.expect
new file mode 100644
index 000000000..9c54412de
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_backward_compatibility_quant8/stdout.txt.expect
@@ -0,0 +1,82 @@
+// clang-format off
+// Generated file (from: depthwise_conv2d_quant8.mod.py). Do not edit
+// clang-format off
+// Generated file (from: depthwise_conv2d_quant8.mod.py). Do not edit
+// clang-format off
+// Generated file (from: depthwise_conv2d_quant8.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace depthwise_conv2d_quant8 {
+// Generated depthwise_conv2d_quant8 test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace depthwise_conv2d_quant8
+
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 0);
+ OperandType type1(Type::TENSOR_INT32, {2}, 0.25f, 0);
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_QUANT8_ASYMM, {1, 1, 1, 2}, 1.0f, 0);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ auto op3 = model->addOperand(&type1);
+ auto pad0 = model->addOperand(&type2);
+ auto stride = model->addOperand(&type2);
+ auto channelMultiplier = model->addOperand(&type2);
+ auto act = model->addOperand(&type2);
+ auto op4 = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t channelMultiplier_init[] = {1};
+ model->setOperandValue(channelMultiplier, channelMultiplier_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, channelMultiplier, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2, op3},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {{2, {0, 0}}},
+ // int -> QUANT8_ASYMM map
+ {{0, {4, 16, 4, 32, 4, 64, 4, 128}}, {1, {2, 4, 2, 0, 2, 2, 2, 0}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {8, 48}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, depthwise_conv2d_quant8) {
+ execute(depthwise_conv2d_quant8::CreateModel,
+ depthwise_conv2d_quant8::is_ignored,
+ depthwise_conv2d_quant8::examples);
+}
+
+#include "../generated/tests/depthwise_conv2d_quant8.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/conv_float.mod.py
new file mode 100644
index 000000000..f6b3e89ca
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/conv_float.mod.py
@@ -0,0 +1,35 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 3, 3, 1}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{1, 2, 2, 1}", [.25, .25, .25, .25])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [0])
+pad0 = Int32Scalar("pad0", 0)
+act = Int32Scalar("act", 0)
+stride = Int32Scalar("stride", 1)
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad0, pad0, pad0, pad0, stride, stride, act).To(output)
+model = model.RelaxedExecution(True)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0]}
+
+output0 = {output: # output 0
+ [.875, .875, .875, .875]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stdout.txt.expect
new file mode 100644
index 000000000..cb6c58a3c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_float/stdout.txt.expect
@@ -0,0 +1,85 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 3, 3, 1});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad0 = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type1);
+ // Phase 2, operations
+ static float op2_init[] = {0.25f, 0.25f, 0.25f, 0.25f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 4);
+ static float op3_init[] = {0.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 0.5f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.875f, 0.875f, 0.875f, 0.875f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float) {
+ execute(conv_float::CreateModel,
+ conv_float::is_ignored,
+ conv_float::examples);
+}
+
+#include "../generated/tests/conv_float.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/mean_implicit.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/mean_implicit.mod.py
new file mode 100644
index 000000000..be7b22bdb
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/mean_implicit.mod.py
@@ -0,0 +1,41 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+i0 = Input("i0", ("TENSOR_FLOAT32", [2, 2])) # input 0
+
+o1 = Output("o1", ("TENSOR_FLOAT32", [1, 2])) # output for model1
+o2 = Output("o2", ("TENSOR_FLOAT32", [2, 1])) # output for model2
+o3 = Output("o3", ("TENSOR_FLOAT32", [1])) # output for model3
+
+model1 = Model().Operation("MEAN", i0, [0], 1).To(o1) # along axis 0, keep_dim=True
+model2 = Model().Operation("MEAN", i0, [1], 1).To(o2) # along axis 1, keep_dim=True
+model3 = Model().Operation("MEAN", i0, [0, 1], 0).To(o3) # along both axis, keep_dim=False
+
+inputs1 = {i0: [1, 2, 3, 4]}
+outputs11 = {o1: [4, 6]}
+outputs12 = {o2: [3, 7]}
+outputs13 = {o3: [10]}
+
+inputs2 = {i0: [-1, -2, -3, -4]}
+outputs21 = {o1: [-4, -6]}
+outputs22 = {o2: [-3, -7]}
+outputs23 = {o3: [-10]}
+
+Example((inputs1, outputs11), model=model1)
+Example((inputs1, outputs12), model=model2)
+Example((inputs1, outputs13), model=model3)
+
+Example((inputs2, outputs21), model=model1)
+Example((inputs2, outputs22), model=model2)
+Example((inputs2, outputs23), model=model3)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stdout.txt.expect
new file mode 100644
index 000000000..feebbcc81
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_parameter/stdout.txt.expect
@@ -0,0 +1,262 @@
+// clang-format off
+// Generated file (from: mean_implicit.mod.py). Do not edit
+// clang-format off
+// Generated file (from: mean_implicit.mod.py). Do not edit
+// clang-format off
+// Generated file (from: mean_implicit.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace mean_implicit {
+// Generated mean_implicit test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace mean_implicit
+
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {2, 1});
+ OperandType type3(Type::TENSOR_FLOAT32, {1});
+ OperandType type4(Type::TENSOR_INT32, {1});
+ OperandType type5(Type::INT32, {});
+ OperandType type6(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto i0 = model->addOperand(&type0);
+ auto param = model->addOperand(&type4);
+ auto param1 = model->addOperand(&type5);
+ auto o1 = model->addOperand(&type1);
+ // Phase 2, operations
+ static int32_t param_init[] = {0};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MEAN, {i0, param, param1}, {o1});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {i0},
+ {o1});
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {4.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.0f, -2.0f, -3.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-4.0f, -6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, mean_implicit) {
+ execute(mean_implicit::CreateModel,
+ mean_implicit::is_ignored,
+ mean_implicit::examples);
+}
+
+void CreateModel_2(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {2, 1});
+ OperandType type3(Type::TENSOR_FLOAT32, {1});
+ OperandType type4(Type::TENSOR_INT32, {1});
+ OperandType type5(Type::INT32, {});
+ OperandType type6(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto i0 = model->addOperand(&type0);
+ auto param2 = model->addOperand(&type4);
+ auto param3 = model->addOperand(&type5);
+ auto o2 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t param3_init[] = {1};
+ model->setOperandValue(param3, param3_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MEAN, {i0, param2, param3}, {o2});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {i0},
+ {o2});
+ assert(model->isValid());
+}
+
+bool is_ignored_2(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_2 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {3.0f, 7.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.0f, -2.0f, -3.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-3.0f, -7.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, mean_implicit_2) {
+ execute(mean_implicit::CreateModel_2,
+ mean_implicit::is_ignored_2,
+ mean_implicit::examples_2);
+}
+
+void CreateModel_3(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {2, 1});
+ OperandType type3(Type::TENSOR_FLOAT32, {1});
+ OperandType type4(Type::TENSOR_INT32, {1});
+ OperandType type5(Type::INT32, {});
+ OperandType type6(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto i0 = model->addOperand(&type0);
+ auto param4 = model->addOperand(&type6);
+ auto param5 = model->addOperand(&type5);
+ auto o3 = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t param4_init[] = {0, 1};
+ model->setOperandValue(param4, param4_init, sizeof(int32_t) * 2);
+ static int32_t param5_init[] = {0};
+ model->setOperandValue(param5, param5_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MEAN, {i0, param4, param5}, {o3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {i0},
+ {o3});
+ assert(model->isValid());
+}
+
+bool is_ignored_3(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_3 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.0f, -2.0f, -3.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-10.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, mean_implicit_3) {
+ execute(mean_implicit::CreateModel_3,
+ mean_implicit::is_ignored_3,
+ mean_implicit::examples_3);
+}
+
+#include "../generated/tests/mean_implicit.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/conv_float.mod.py
new file mode 100644
index 000000000..826f390c0
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/conv_float.mod.py
@@ -0,0 +1,52 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}", [1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+act = Int32Scalar("act", 0) # None activation
+layout = Int32Scalar("layout", 0) # NHWC
+
+model = model.Operation("CONV_2D", i1, f1, b1, 1, 1, 1, act, layout).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8]}
+
+output0 = {output: # output 0
+ [204, 120, 94, 104, 70, 164, 23, 112]}
+
+quant8 = DataTypeConverter().Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
+ b1: ("TENSOR_INT32", 0.125, 0),
+ output: ("TENSOR_QUANT8_ASYMM", 2, 100)
+})
+
+# Instantiate an example
+Example(
+ (input0, output0)
+).AddVariations(
+ ("NCHW", [i1, f1, output], [layout])
+).AddVariations(
+ ("relu", [output], [act]),
+ ("relu6", [output], [act]),
+ includeDefault=False
+).AddVariations(
+ ("as_input", [f1])
+).AddVariations(
+ "relaxed", quant8
+)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stdout.txt.expect
new file mode 100644
index 000000000..7d3cba69a
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_implicit_variation/stdout.txt.expect
@@ -0,0 +1,1848 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+void CreateModel_relu(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu) {
+ execute(conv_float::CreateModel_relu,
+ conv_float::is_ignored_relu,
+ conv_float::examples_relu);
+}
+
+void CreateModel_relu_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_relaxed) {
+ execute(conv_float::CreateModel_relu_relaxed,
+ conv_float::is_ignored_relu_relaxed,
+ conv_float::examples_relu_relaxed);
+}
+
+void CreateModel_relu_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_quant8) {
+ execute(conv_float::CreateModel_relu_quant8,
+ conv_float::is_ignored_relu_quant8,
+ conv_float::examples_relu_quant8);
+}
+
+void CreateModel_relu_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_weight_as_input) {
+ execute(conv_float::CreateModel_relu_weight_as_input,
+ conv_float::is_ignored_relu_weight_as_input,
+ conv_float::examples_relu_weight_as_input);
+}
+
+void CreateModel_relu_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_relu_weight_as_input_relaxed,
+ conv_float::examples_relu_weight_as_input_relaxed);
+}
+
+void CreateModel_relu_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_relu_weight_as_input_quant8,
+ conv_float::is_ignored_relu_weight_as_input_quant8,
+ conv_float::examples_relu_weight_as_input_quant8);
+}
+
+void CreateModel_relu6(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6) {
+ execute(conv_float::CreateModel_relu6,
+ conv_float::is_ignored_relu6,
+ conv_float::examples_relu6);
+}
+
+void CreateModel_relu6_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_relaxed) {
+ execute(conv_float::CreateModel_relu6_relaxed,
+ conv_float::is_ignored_relu6_relaxed,
+ conv_float::examples_relu6_relaxed);
+}
+
+void CreateModel_relu6_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_quant8) {
+ execute(conv_float::CreateModel_relu6_quant8,
+ conv_float::is_ignored_relu6_quant8,
+ conv_float::examples_relu6_quant8);
+}
+
+void CreateModel_relu6_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_weight_as_input) {
+ execute(conv_float::CreateModel_relu6_weight_as_input,
+ conv_float::is_ignored_relu6_weight_as_input,
+ conv_float::examples_relu6_weight_as_input);
+}
+
+void CreateModel_relu6_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_relu6_weight_as_input_relaxed,
+ conv_float::examples_relu6_weight_as_input_relaxed);
+}
+
+void CreateModel_relu6_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_relu6_weight_as_input_quant8,
+ conv_float::examples_relu6_weight_as_input_quant8);
+}
+
+void CreateModel_nchw_relu(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu) {
+ execute(conv_float::CreateModel_nchw_relu,
+ conv_float::is_ignored_nchw_relu,
+ conv_float::examples_nchw_relu);
+}
+
+void CreateModel_nchw_relu_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu_relaxed,
+ conv_float::is_ignored_nchw_relu_relaxed,
+ conv_float::examples_nchw_relu_relaxed);
+}
+
+void CreateModel_nchw_relu_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_quant8) {
+ execute(conv_float::CreateModel_nchw_relu_quant8,
+ conv_float::is_ignored_nchw_relu_quant8,
+ conv_float::examples_nchw_relu_quant8);
+}
+
+void CreateModel_nchw_relu_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_weight_as_input) {
+ execute(conv_float::CreateModel_nchw_relu_weight_as_input,
+ conv_float::is_ignored_nchw_relu_weight_as_input,
+ conv_float::examples_nchw_relu_weight_as_input);
+}
+
+void CreateModel_nchw_relu_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu_weight_as_input_relaxed);
+}
+
+void CreateModel_nchw_relu_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_nchw_relu_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu_weight_as_input_quant8,
+ conv_float::examples_nchw_relu_weight_as_input_quant8);
+}
+
+void CreateModel_nchw_relu6(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6) {
+ execute(conv_float::CreateModel_nchw_relu6,
+ conv_float::is_ignored_nchw_relu6,
+ conv_float::examples_nchw_relu6);
+}
+
+void CreateModel_nchw_relu6_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu6_relaxed,
+ conv_float::is_ignored_nchw_relu6_relaxed,
+ conv_float::examples_nchw_relu6_relaxed);
+}
+
+void CreateModel_nchw_relu6_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_quant8) {
+ execute(conv_float::CreateModel_nchw_relu6_quant8,
+ conv_float::is_ignored_nchw_relu6_quant8,
+ conv_float::examples_nchw_relu6_quant8);
+}
+
+void CreateModel_nchw_relu6_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_weight_as_input) {
+ execute(conv_float::CreateModel_nchw_relu6_weight_as_input,
+ conv_float::is_ignored_nchw_relu6_weight_as_input,
+ conv_float::examples_nchw_relu6_weight_as_input);
+}
+
+void CreateModel_nchw_relu6_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu6_weight_as_input_relaxed);
+}
+
+void CreateModel_nchw_relu6_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_nchw_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_quant8,
+ conv_float::examples_nchw_relu6_weight_as_input_quant8);
+}
+
+#include "../generated/tests/conv_float.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/add_internal.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/add_internal.mod.py
new file mode 100644
index 000000000..28c4afc52
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/add_internal.mod.py
@@ -0,0 +1,71 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+
+i0 = Input("i0", ("TENSOR_FLOAT32", [2])) # input 0
+i1 = Input("i1", ("TENSOR_FLOAT32", [2])) # input 0
+i2 = Input("i2", ("TENSOR_FLOAT32", [2])) # input 0
+i3 = Input("i3", ("TENSOR_FLOAT32", [2])) # input 0
+i4 = Input("i4", ("TENSOR_FLOAT32", [2])) # input 0
+i5 = Input("i5", ("TENSOR_FLOAT32", [2])) # input 0
+i6 = Input("i6", ("TENSOR_FLOAT32", [2])) # input 0
+i7 = Input("i7", ("TENSOR_FLOAT32", [2])) # input 0
+i8 = Input("i8", ("TENSOR_FLOAT32", [2])) # input 0
+
+t0 = Internal("t0", ("TENSOR_FLOAT32", [2]))
+t1 = Internal("t1", ("TENSOR_FLOAT32", [2]))
+t2 = Internal("t2", ("TENSOR_FLOAT32", [2]))
+t3 = Internal("t3", ("TENSOR_FLOAT32", [2]))
+t4 = Internal("t4", ("TENSOR_FLOAT32", [2]))
+t5 = Internal("t5", ("TENSOR_FLOAT32", [2]))
+t6 = Internal("t6", ("TENSOR_FLOAT32", [2]))
+
+o0 = Output("o0", ("TENSOR_FLOAT32", [2]))
+o1 = Output("o1", ("TENSOR_FLOAT32", [2]))
+o2 = Output("o2", ("TENSOR_FLOAT32", [2]))
+
+p0 = Parameter("p0", ("TENSOR_FLOAT32", [2]), [0.0, 1.0])
+act = Int32Scalar("act", 0)
+
+model.Operation("ADD", o0, o1, act).To(o2)
+model.Operation("ADD", p0, t5, act).To(t6)
+model.Operation("ADD", i2, t0, act).To(t1)
+model.Operation("ADD", i6, p0, act).To(t5)
+model.Operation("ADD", i0, i1, act).To(t0)
+model.Operation("ADD", t1, t3, act).To(t4)
+model.Operation("ADD", t2, i5, act).To(t3)
+model.Operation("ADD", t4, t6, act).To(o0)
+model.Operation("ADD", i3, i4, act).To(t2)
+model.Operation("ADD", i7, i8, act).To(o1)
+
+inputs = {
+ i0: [0, 0],
+ i1: [0, 0],
+ i2: [0, 0],
+ i3: [0, 0],
+ i4: [0, 0],
+ i5: [0, 0],
+ i6: [0, 0],
+ i7: [0, 0],
+ i8: [0, 0]
+}
+
+outputs = {
+ o0: [0, 2],
+ o1: [0, 0],
+ o2: [0, 2]
+}
+
+Example((inputs, outputs))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stdout.txt.expect
new file mode 100644
index 000000000..2e271c712
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_internal/stdout.txt.expect
@@ -0,0 +1,98 @@
+// clang-format off
+// Generated file (from: add_internal.mod.py). Do not edit
+// clang-format off
+// Generated file (from: add_internal.mod.py). Do not edit
+// clang-format off
+// Generated file (from: add_internal.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace add_internal {
+// Generated add_internal test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace add_internal
+
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {2});
+ OperandType type1(Type::INT32, {});
+ // Phase 1, operands
+ auto o0 = model->addOperand(&type0);
+ auto o1 = model->addOperand(&type0);
+ auto act = model->addOperand(&type1);
+ auto o2 = model->addOperand(&type0);
+ auto p0 = model->addOperand(&type0);
+ auto t5 = model->addOperand(&type0);
+ auto t6 = model->addOperand(&type0);
+ auto i2 = model->addOperand(&type0);
+ auto t0 = model->addOperand(&type0);
+ auto t1 = model->addOperand(&type0);
+ auto i6 = model->addOperand(&type0);
+ auto i0 = model->addOperand(&type0);
+ auto i1 = model->addOperand(&type0);
+ auto t3 = model->addOperand(&type0);
+ auto t4 = model->addOperand(&type0);
+ auto t2 = model->addOperand(&type0);
+ auto i5 = model->addOperand(&type0);
+ auto i3 = model->addOperand(&type0);
+ auto i4 = model->addOperand(&type0);
+ auto i7 = model->addOperand(&type0);
+ auto i8 = model->addOperand(&type0);
+ // Phase 2, operations
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static float p0_init[] = {0.0f, 1.0f};
+ model->setOperandValue(p0, p0_init, sizeof(float) * 2);
+ model->addOperation(ANEURALNETWORKS_ADD, {i6, p0, act}, {t5});
+ model->addOperation(ANEURALNETWORKS_ADD, {p0, t5, act}, {t6});
+ model->addOperation(ANEURALNETWORKS_ADD, {i0, i1, act}, {t0});
+ model->addOperation(ANEURALNETWORKS_ADD, {i2, t0, act}, {t1});
+ model->addOperation(ANEURALNETWORKS_ADD, {i3, i4, act}, {t2});
+ model->addOperation(ANEURALNETWORKS_ADD, {t2, i5, act}, {t3});
+ model->addOperation(ANEURALNETWORKS_ADD, {t1, t3, act}, {t4});
+ model->addOperation(ANEURALNETWORKS_ADD, {t4, t6, act}, {o0});
+ model->addOperation(ANEURALNETWORKS_ADD, {i7, i8, act}, {o1});
+ model->addOperation(ANEURALNETWORKS_ADD, {o0, o1, act}, {o2});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {i2, i6, i0, i1, i5, i3, i4, i7, i8},
+ {o0, o1, o2});
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 0.0f}}, {1, {0.0f, 0.0f}}, {2, {0.0f, 0.0f}}, {3, {0.0f, 0.0f}}, {4, {0.0f, 0.0f}}, {5, {0.0f, 0.0f}}, {6, {0.0f, 0.0f}}, {7, {0.0f, 0.0f}}, {8, {0.0f, 0.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 2.0f}}, {1, {0.0f, 0.0f}}, {2, {0.0f, 2.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, add_internal) {
+ execute(add_internal::CreateModel,
+ add_internal::is_ignored,
+ add_internal::examples);
+}
+
+#include "../generated/tests/add_internal.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/conv_float.mod.py
new file mode 100644
index 000000000..61f7c92ee
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/conv_float.mod.py
@@ -0,0 +1,61 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model("model_name")
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}", [1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+act = Int32Scalar("act", 0) # None activation
+layout = Int32Scalar("layout", 0) # NHWC
+pad = Int32Scalar("param", 1)
+stride0 = Int32Scalar("param1", 1)
+stride1 = Int32Scalar("param2", 1)
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad, stride0, stride1, act, layout).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8]}
+
+output0 = {output: # output 0
+ [204, 120, 94, 104, 70, 164, 23, 112]}
+
+quant8 = DataTypeConverter(name="quantized").Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
+ b1: ("TENSOR_INT32", 0.125, 0),
+ output: ("TENSOR_QUANT8_ASYMM", 2, 100)
+})
+nchw = DataLayoutConverter("NCHW", name="nchw_layout").Identify([i1, f1, output], [layout])
+relu = ActivationConverter("relu", name="act").Identify([output], [act])
+relu6 = ActivationConverter("relu6").Identify([output], [act])
+weight_as_input = ParameterAsInputConverter(name="w_as_input").Identify([f1])
+relax = RelaxedModeConverter(True, name="float_relaxed")
+
+# Instantiate an example
+# Will produce cartesian product of
+# [nhwc, nchw_layout] * [act, relu6] * [w_as_param, w_as_input] * [float, float_relaxed, quantized]
+# 24 variations
+Example(
+ (input0, output0), name="example_name"
+).AddVariations(
+ nchw, defaultName="nhwc"
+).AddVariations(
+ relu, relu6, includeDefault=False
+).AddVariations(
+ weight_as_input, defaultName="w_as_param"
+).AddVariations(
+ relax, quant8, defaultName="float"
+)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stdout.txt.expect
new file mode 100644
index 000000000..8cc78fe5a
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_naming/stdout.txt.expect
@@ -0,0 +1,1848 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+void CreateModel_model_name_nhwc_act_w_as_param_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_act_w_as_param_float) {
+ execute(conv_float::CreateModel_model_name_nhwc_act_w_as_param_float,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_param_float,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_param_float);
+}
+
+void CreateModel_model_name_nhwc_act_w_as_param_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_act_w_as_param_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nhwc_act_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_param_float_relaxed);
+}
+
+void CreateModel_model_name_nhwc_act_w_as_param_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_act_w_as_param_quantized) {
+ execute(conv_float::CreateModel_model_name_nhwc_act_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_param_quantized);
+}
+
+void CreateModel_model_name_nhwc_act_w_as_input_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_act_w_as_input_float) {
+ execute(conv_float::CreateModel_model_name_nhwc_act_w_as_input_float,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_input_float,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_input_float);
+}
+
+void CreateModel_model_name_nhwc_act_w_as_input_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_act_w_as_input_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nhwc_act_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_input_float_relaxed);
+}
+
+void CreateModel_model_name_nhwc_act_w_as_input_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_act_w_as_input_quantized) {
+ execute(conv_float::CreateModel_model_name_nhwc_act_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_input_quantized);
+}
+
+void CreateModel_model_name_nhwc_relu6_w_as_param_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_relu6_w_as_param_float) {
+ execute(conv_float::CreateModel_model_name_nhwc_relu6_w_as_param_float,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_param_float,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_param_float);
+}
+
+void CreateModel_model_name_nhwc_relu6_w_as_param_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_relu6_w_as_param_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nhwc_relu6_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_param_float_relaxed);
+}
+
+void CreateModel_model_name_nhwc_relu6_w_as_param_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_relu6_w_as_param_quantized) {
+ execute(conv_float::CreateModel_model_name_nhwc_relu6_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_param_quantized);
+}
+
+void CreateModel_model_name_nhwc_relu6_w_as_input_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_relu6_w_as_input_float) {
+ execute(conv_float::CreateModel_model_name_nhwc_relu6_w_as_input_float,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_input_float,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_input_float);
+}
+
+void CreateModel_model_name_nhwc_relu6_w_as_input_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_relu6_w_as_input_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nhwc_relu6_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_input_float_relaxed);
+}
+
+void CreateModel_model_name_nhwc_relu6_w_as_input_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nhwc_relu6_w_as_input_quantized) {
+ execute(conv_float::CreateModel_model_name_nhwc_relu6_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_input_quantized);
+}
+
+void CreateModel_model_name_nchw_layout_act_w_as_param_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_act_w_as_param_float) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_act_w_as_param_float,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_param_float,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_param_float);
+}
+
+void CreateModel_model_name_nchw_layout_act_w_as_param_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_act_w_as_param_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_act_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_param_float_relaxed);
+}
+
+void CreateModel_model_name_nchw_layout_act_w_as_param_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_act_w_as_param_quantized) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_act_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_param_quantized);
+}
+
+void CreateModel_model_name_nchw_layout_act_w_as_input_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_act_w_as_input_float) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_act_w_as_input_float,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_input_float,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_input_float);
+}
+
+void CreateModel_model_name_nchw_layout_act_w_as_input_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_act_w_as_input_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_act_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_input_float_relaxed);
+}
+
+void CreateModel_model_name_nchw_layout_act_w_as_input_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_act_w_as_input_quantized) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_act_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_input_quantized);
+}
+
+void CreateModel_model_name_nchw_layout_relu6_w_as_param_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_relu6_w_as_param_float) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_relu6_w_as_param_float,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_param_float,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_param_float);
+}
+
+void CreateModel_model_name_nchw_layout_relu6_w_as_param_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_relu6_w_as_param_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_relu6_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_param_float_relaxed);
+}
+
+void CreateModel_model_name_nchw_layout_relu6_w_as_param_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_relu6_w_as_param_quantized) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_relu6_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_param_quantized);
+}
+
+void CreateModel_model_name_nchw_layout_relu6_w_as_input_float(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_relu6_w_as_input_float) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_relu6_w_as_input_float,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_input_float,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_input_float);
+}
+
+void CreateModel_model_name_nchw_layout_relu6_w_as_input_float_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_relu6_w_as_input_float_relaxed) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_relu6_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_input_float_relaxed);
+}
+
+void CreateModel_model_name_nchw_layout_relu6_w_as_input_quantized(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_model_name_example_name_nchw_layout_relu6_w_as_input_quantized) {
+ execute(conv_float::CreateModel_model_name_nchw_layout_relu6_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_input_quantized);
+}
+
+#include "../generated/tests/conv_float.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/conv_quant8.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/conv_quant8.mod.py
new file mode 100644
index 000000000..f6b3e89ca
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/conv_quant8.mod.py
@@ -0,0 +1,35 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 3, 3, 1}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{1, 2, 2, 1}", [.25, .25, .25, .25])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [0])
+pad0 = Int32Scalar("pad0", 0)
+act = Int32Scalar("act", 0)
+stride = Int32Scalar("stride", 1)
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad0, pad0, pad0, pad0, stride, stride, act).To(output)
+model = model.RelaxedExecution(True)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0]}
+
+output0 = {output: # output 0
+ [.875, .875, .875, .875]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stdout.txt.expect
new file mode 100644
index 000000000..e0195afb1
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_quant8/stdout.txt.expect
@@ -0,0 +1,85 @@
+// clang-format off
+// Generated file (from: conv_quant8.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_quant8.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_quant8.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace conv_quant8 {
+// Generated conv_quant8 test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_quant8
+
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 3, 3, 1});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad0 = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type1);
+ // Phase 2, operations
+ static float op2_init[] = {0.25f, 0.25f, 0.25f, 0.25f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 4);
+ static float op3_init[] = {0.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 0.5f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.875f, 0.875f, 0.875f, 0.875f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_quant8) {
+ execute(conv_quant8::CreateModel,
+ conv_quant8::is_ignored,
+ conv_quant8::examples);
+}
+
+#include "../generated/tests/conv_quant8.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/conv_float.mod.py
new file mode 100644
index 000000000..c5ec8bf4e
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/conv_float.mod.py
@@ -0,0 +1,44 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}", [1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+act = Int32Scalar("act", 0) # None activation
+layout = Int32Scalar("layout", 0) # NHWC
+pad = Int32Scalar("param", 1)
+stride0 = Int32Scalar("param1", 1)
+stride1 = Int32Scalar("param2", 1)
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad, stride0, stride1, act, layout).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8]}
+
+output0 = {output: # output 0
+ [204, 120, 94, 104, 70, 164, 23, 112]}
+
+quant8 = DataTypeConverter().Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
+ b1: ("TENSOR_INT32", 0.125, 0),
+ output: ("TENSOR_QUANT8_ASYMM", 2, 100)
+})
+
+# Instantiate an example
+Example((input0, output0)).AddNchw(i1, f1, output, layout).AddAllActivations(
+ output, act).AddInput(f1).AddVariations(RelaxedModeConverter(True), quant8)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stderr.txt.expect
new file mode 100644
index 000000000..7ac2240b2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stderr.txt.expect
@@ -0,0 +1,3 @@
+Output CTS model: -
+Output example:-
+Output CTS test: -
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stdout.txt.expect
new file mode 100644
index 000000000..87e74f722
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_variation/stdout.txt.expect
@@ -0,0 +1,3688 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+void CreateModel_none(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_none(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_none) {
+ execute(conv_float::CreateModel_none,
+ conv_float::is_ignored_none,
+ conv_float::examples_none);
+}
+
+void CreateModel_none_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_none_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_none_relaxed) {
+ execute(conv_float::CreateModel_none_relaxed,
+ conv_float::is_ignored_none_relaxed,
+ conv_float::examples_none_relaxed);
+}
+
+void CreateModel_none_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_none_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_none_quant8) {
+ execute(conv_float::CreateModel_none_quant8,
+ conv_float::is_ignored_none_quant8,
+ conv_float::examples_none_quant8);
+}
+
+void CreateModel_none_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_none_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_none_weight_as_input) {
+ execute(conv_float::CreateModel_none_weight_as_input,
+ conv_float::is_ignored_none_weight_as_input,
+ conv_float::examples_none_weight_as_input);
+}
+
+void CreateModel_none_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_none_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_none_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_none_weight_as_input_relaxed,
+ conv_float::is_ignored_none_weight_as_input_relaxed,
+ conv_float::examples_none_weight_as_input_relaxed);
+}
+
+void CreateModel_none_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_none_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_none_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_none_weight_as_input_quant8,
+ conv_float::is_ignored_none_weight_as_input_quant8,
+ conv_float::examples_none_weight_as_input_quant8);
+}
+
+void CreateModel_relu(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu) {
+ execute(conv_float::CreateModel_relu,
+ conv_float::is_ignored_relu,
+ conv_float::examples_relu);
+}
+
+void CreateModel_relu_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_relaxed) {
+ execute(conv_float::CreateModel_relu_relaxed,
+ conv_float::is_ignored_relu_relaxed,
+ conv_float::examples_relu_relaxed);
+}
+
+void CreateModel_relu_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_quant8) {
+ execute(conv_float::CreateModel_relu_quant8,
+ conv_float::is_ignored_relu_quant8,
+ conv_float::examples_relu_quant8);
+}
+
+void CreateModel_relu_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_weight_as_input) {
+ execute(conv_float::CreateModel_relu_weight_as_input,
+ conv_float::is_ignored_relu_weight_as_input,
+ conv_float::examples_relu_weight_as_input);
+}
+
+void CreateModel_relu_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_relu_weight_as_input_relaxed,
+ conv_float::examples_relu_weight_as_input_relaxed);
+}
+
+void CreateModel_relu_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_relu_weight_as_input_quant8,
+ conv_float::is_ignored_relu_weight_as_input_quant8,
+ conv_float::examples_relu_weight_as_input_quant8);
+}
+
+void CreateModel_relu1(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu1(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu1) {
+ execute(conv_float::CreateModel_relu1,
+ conv_float::is_ignored_relu1,
+ conv_float::examples_relu1);
+}
+
+void CreateModel_relu1_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu1_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu1_relaxed) {
+ execute(conv_float::CreateModel_relu1_relaxed,
+ conv_float::is_ignored_relu1_relaxed,
+ conv_float::examples_relu1_relaxed);
+}
+
+void CreateModel_relu1_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu1_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu1_quant8) {
+ execute(conv_float::CreateModel_relu1_quant8,
+ conv_float::is_ignored_relu1_quant8,
+ conv_float::examples_relu1_quant8);
+}
+
+void CreateModel_relu1_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu1_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu1_weight_as_input) {
+ execute(conv_float::CreateModel_relu1_weight_as_input,
+ conv_float::is_ignored_relu1_weight_as_input,
+ conv_float::examples_relu1_weight_as_input);
+}
+
+void CreateModel_relu1_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu1_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu1_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_relu1_weight_as_input_relaxed,
+ conv_float::is_ignored_relu1_weight_as_input_relaxed,
+ conv_float::examples_relu1_weight_as_input_relaxed);
+}
+
+void CreateModel_relu1_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu1_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu1_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_relu1_weight_as_input_quant8,
+ conv_float::is_ignored_relu1_weight_as_input_quant8,
+ conv_float::examples_relu1_weight_as_input_quant8);
+}
+
+void CreateModel_relu6(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6) {
+ execute(conv_float::CreateModel_relu6,
+ conv_float::is_ignored_relu6,
+ conv_float::examples_relu6);
+}
+
+void CreateModel_relu6_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_relaxed) {
+ execute(conv_float::CreateModel_relu6_relaxed,
+ conv_float::is_ignored_relu6_relaxed,
+ conv_float::examples_relu6_relaxed);
+}
+
+void CreateModel_relu6_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_quant8) {
+ execute(conv_float::CreateModel_relu6_quant8,
+ conv_float::is_ignored_relu6_quant8,
+ conv_float::examples_relu6_quant8);
+}
+
+void CreateModel_relu6_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_weight_as_input) {
+ execute(conv_float::CreateModel_relu6_weight_as_input,
+ conv_float::is_ignored_relu6_weight_as_input,
+ conv_float::examples_relu6_weight_as_input);
+}
+
+void CreateModel_relu6_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_relu6_weight_as_input_relaxed,
+ conv_float::examples_relu6_weight_as_input_relaxed);
+}
+
+void CreateModel_relu6_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {0};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_relu6_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_relu6_weight_as_input_quant8,
+ conv_float::examples_relu6_weight_as_input_quant8);
+}
+
+void CreateModel_nchw_none(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_none(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_none) {
+ execute(conv_float::CreateModel_nchw_none,
+ conv_float::is_ignored_nchw_none,
+ conv_float::examples_nchw_none);
+}
+
+void CreateModel_nchw_none_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_none_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_none_relaxed) {
+ execute(conv_float::CreateModel_nchw_none_relaxed,
+ conv_float::is_ignored_nchw_none_relaxed,
+ conv_float::examples_nchw_none_relaxed);
+}
+
+void CreateModel_nchw_none_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_none_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_none_quant8) {
+ execute(conv_float::CreateModel_nchw_none_quant8,
+ conv_float::is_ignored_nchw_none_quant8,
+ conv_float::examples_nchw_none_quant8);
+}
+
+void CreateModel_nchw_none_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_none_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_none_weight_as_input) {
+ execute(conv_float::CreateModel_nchw_none_weight_as_input,
+ conv_float::is_ignored_nchw_none_weight_as_input,
+ conv_float::examples_nchw_none_weight_as_input);
+}
+
+void CreateModel_nchw_none_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_none_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_none_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_nchw_none_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_none_weight_as_input_relaxed,
+ conv_float::examples_nchw_none_weight_as_input_relaxed);
+}
+
+void CreateModel_nchw_none_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_none_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_none_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_nchw_none_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_none_weight_as_input_quant8,
+ conv_float::examples_nchw_none_weight_as_input_quant8);
+}
+
+void CreateModel_nchw_relu(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu) {
+ execute(conv_float::CreateModel_nchw_relu,
+ conv_float::is_ignored_nchw_relu,
+ conv_float::examples_nchw_relu);
+}
+
+void CreateModel_nchw_relu_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu_relaxed,
+ conv_float::is_ignored_nchw_relu_relaxed,
+ conv_float::examples_nchw_relu_relaxed);
+}
+
+void CreateModel_nchw_relu_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_quant8) {
+ execute(conv_float::CreateModel_nchw_relu_quant8,
+ conv_float::is_ignored_nchw_relu_quant8,
+ conv_float::examples_nchw_relu_quant8);
+}
+
+void CreateModel_nchw_relu_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_weight_as_input) {
+ execute(conv_float::CreateModel_nchw_relu_weight_as_input,
+ conv_float::is_ignored_nchw_relu_weight_as_input,
+ conv_float::examples_nchw_relu_weight_as_input);
+}
+
+void CreateModel_nchw_relu_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu_weight_as_input_relaxed);
+}
+
+void CreateModel_nchw_relu_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {1};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_nchw_relu_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu_weight_as_input_quant8,
+ conv_float::examples_nchw_relu_weight_as_input_quant8);
+}
+
+void CreateModel_nchw_relu1(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu1(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu1) {
+ execute(conv_float::CreateModel_nchw_relu1,
+ conv_float::is_ignored_nchw_relu1,
+ conv_float::examples_nchw_relu1);
+}
+
+void CreateModel_nchw_relu1_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu1_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu1_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu1_relaxed,
+ conv_float::is_ignored_nchw_relu1_relaxed,
+ conv_float::examples_nchw_relu1_relaxed);
+}
+
+void CreateModel_nchw_relu1_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu1_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu1_quant8) {
+ execute(conv_float::CreateModel_nchw_relu1_quant8,
+ conv_float::is_ignored_nchw_relu1_quant8,
+ conv_float::examples_nchw_relu1_quant8);
+}
+
+void CreateModel_nchw_relu1_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu1_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu1_weight_as_input) {
+ execute(conv_float::CreateModel_nchw_relu1_weight_as_input,
+ conv_float::is_ignored_nchw_relu1_weight_as_input,
+ conv_float::examples_nchw_relu1_weight_as_input);
+}
+
+void CreateModel_nchw_relu1_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu1_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu1_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu1_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu1_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu1_weight_as_input_relaxed);
+}
+
+void CreateModel_nchw_relu1_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {2};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu1_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu1_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_nchw_relu1_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu1_weight_as_input_quant8,
+ conv_float::examples_nchw_relu1_weight_as_input_quant8);
+}
+
+void CreateModel_nchw_relu6(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6) {
+ execute(conv_float::CreateModel_nchw_relu6,
+ conv_float::is_ignored_nchw_relu6,
+ conv_float::examples_nchw_relu6);
+}
+
+void CreateModel_nchw_relu6_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu6_relaxed,
+ conv_float::is_ignored_nchw_relu6_relaxed,
+ conv_float::examples_nchw_relu6_relaxed);
+}
+
+void CreateModel_nchw_relu6_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static uint8_t op2_init[] = {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132};
+ model->setOperandValue(op2, op2_init, sizeof(uint8_t) * 16);
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_quant8) {
+ execute(conv_float::CreateModel_nchw_relu6_quant8,
+ conv_float::is_ignored_nchw_relu6_quant8,
+ conv_float::examples_nchw_relu6_quant8);
+}
+
+void CreateModel_nchw_relu6_weight_as_input(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_weight_as_input) {
+ execute(conv_float::CreateModel_nchw_relu6_weight_as_input,
+ conv_float::is_ignored_nchw_relu6_weight_as_input,
+ conv_float::examples_nchw_relu6_weight_as_input);
+}
+
+void CreateModel_nchw_relu6_weight_as_input_relaxed(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_weight_as_input_relaxed) {
+ execute(conv_float::CreateModel_nchw_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu6_weight_as_input_relaxed);
+}
+
+void CreateModel_nchw_relu6_weight_as_input_quant8(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 0.5f, 128);
+ OperandType type5(Type::TENSOR_QUANT8_ASYMM, {2, 2, 2, 2}, 0.25f, 128);
+ OperandType type6(Type::TENSOR_INT32, {1}, 0.125f, 0);
+ OperandType type7(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 2}, 2.0f, 100);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type4);
+ auto op2 = model->addOperand(&type5);
+ auto op3 = model->addOperand(&type6);
+ auto param = model->addOperand(&type3);
+ auto param1 = model->addOperand(&type3);
+ auto param2 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto layout = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type7);
+ // Phase 2, operations
+ static int32_t op3_init[] = {-1600};
+ model->setOperandValue(op3, op3_init, sizeof(int32_t) * 1);
+ static int32_t param_init[] = {1};
+ model->setOperandValue(param, param_init, sizeof(int32_t) * 1);
+ static int32_t param1_init[] = {1};
+ model->setOperandValue(param1, param1_init, sizeof(int32_t) * 1);
+ static int32_t param2_init[] = {1};
+ model->setOperandValue(param2, param2_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {3};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t layout_init[] = {1};
+ model->setOperandValue(layout, layout_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, param, param1, param2, act, layout}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op4});
+ assert(model->isValid());
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(GeneratedTests, conv_float_nchw_relu6_weight_as_input_quant8) {
+ execute(conv_float::CreateModel_nchw_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_quant8,
+ conv_float::examples_nchw_relu6_weight_as_input_quant8);
+}
+
+#include "../generated/tests/conv_float.mod.py.cpp"
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/lstm_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/lstm_float.mod.py
new file mode 100644
index 000000000..60eec8280
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/lstm_float.mod.py
@@ -0,0 +1,145 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+
+n_batch = 1
+n_input = 2
+# n_cell and n_output have the same size when there is no projection.
+n_cell = 4
+n_output = 4
+
+input = Input("input", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_input))
+
+input_to_input_weights = Input("input_to_input_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+input_to_forget_weights = Input("input_to_forget_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+input_to_cell_weights = Input("input_to_cell_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+input_to_output_weights = Input("input_to_output_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_input))
+
+recurrent_to_input_weights = Input("recurrent_to_intput_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+recurrent_to_forget_weights = Input("recurrent_to_forget_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+recurrent_to_cell_weights = Input("recurrent_to_cell_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+recurrent_to_output_weights = Input("recurrent_to_output_weights", "TENSOR_FLOAT32", "{%d, %d}" % (n_cell, n_output))
+
+cell_to_input_weights = Input("cell_to_input_weights", "TENSOR_FLOAT32", "{0}")
+cell_to_forget_weights = Input("cell_to_forget_weights", "TENSOR_FLOAT32", "{0}")
+cell_to_output_weights = Input("cell_to_output_weights", "TENSOR_FLOAT32", "{0}")
+
+input_gate_bias = Input("input_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+forget_gate_bias = Input("forget_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+cell_gate_bias = Input("cell_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+output_gate_bias = Input("output_gate_bias", "TENSOR_FLOAT32", "{%d}"%(n_cell))
+
+projection_weights = Input("projection_weights", "TENSOR_FLOAT32", "{0,0}")
+projection_bias = Input("projection_bias", "TENSOR_FLOAT32", "{0}")
+
+output_state_in = Input("output_state_in", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
+cell_state_in = Input("cell_state_in", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
+
+activation_param = Int32Scalar("activation_param", 4) # Tanh
+cell_clip_param = Float32Scalar("cell_clip_param", 0.)
+proj_clip_param = Float32Scalar("proj_clip_param", 0.)
+
+scratch_buffer = IgnoredOutput("scratch_buffer", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, (n_cell * 4)))
+output_state_out = Output("output_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
+cell_state_out = Output("cell_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
+output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
+
+model = model.Operation("LSTM",
+ input,
+
+ input_to_input_weights,
+ input_to_forget_weights,
+ input_to_cell_weights,
+ input_to_output_weights,
+
+ recurrent_to_input_weights,
+ recurrent_to_forget_weights,
+ recurrent_to_cell_weights,
+ recurrent_to_output_weights,
+
+ cell_to_input_weights,
+ cell_to_forget_weights,
+ cell_to_output_weights,
+
+ input_gate_bias,
+ forget_gate_bias,
+ cell_gate_bias,
+ output_gate_bias,
+
+ projection_weights,
+ projection_bias,
+
+ output_state_in,
+ cell_state_in,
+
+ activation_param,
+ cell_clip_param,
+ proj_clip_param
+).To([scratch_buffer, output_state_out, cell_state_out, output])
+model = model.RelaxedExecution(True)
+
+# Example 1. Input in operand 0,
+input0 = {input_to_input_weights: [-0.45018822, -0.02338299, -0.0870589, -0.34550029, 0.04266912, -0.15680569, -0.34856534, 0.43890524],
+ input_to_forget_weights: [0.09701663, 0.20334584, -0.50592935, -0.31343272, -0.40032279, 0.44781327, 0.01387155, -0.35593212],
+ input_to_cell_weights: [-0.50013041, 0.1370284, 0.11810488, 0.2013163, -0.20583314, 0.44344562, 0.22077113, -0.29909778],
+ input_to_output_weights: [-0.25065863, -0.28290087, 0.04613829, 0.40525138, 0.44272184, 0.03897077, -0.1556896, 0.19487578],
+
+ input_gate_bias: [0.,0.,0.,0.],
+ forget_gate_bias: [1.,1.,1.,1.],
+ cell_gate_bias: [0.,0.,0.,0.],
+ output_gate_bias: [0.,0.,0.,0.],
+
+ recurrent_to_input_weights: [
+ -0.0063535, -0.2042388, 0.31454784, -0.35746509, 0.28902304, 0.08183324,
+ -0.16555229, 0.02286911, -0.13566875, 0.03034258, 0.48091322,
+ -0.12528998, 0.24077177, -0.51332325, -0.33502164, 0.10629296],
+
+ recurrent_to_cell_weights: [
+ -0.3407414, 0.24443203, -0.2078532, 0.26320225, 0.05695659, -0.00123841,
+ -0.4744786, -0.35869038, -0.06418842, -0.13502428, -0.501764, 0.22830659,
+ -0.46367589, 0.26016325, -0.03894562, -0.16368064],
+
+ recurrent_to_forget_weights: [
+ -0.48684245, -0.06655136, 0.42224967, 0.2112639, 0.27654213, 0.20864892,
+ -0.07646349, 0.45877004, 0.00141793, -0.14609534, 0.36447752, 0.09196436,
+ 0.28053468, 0.01560611, -0.20127171, -0.01140004],
+
+ recurrent_to_output_weights: [
+ 0.43385774, -0.17194885, 0.2718237, 0.09215671, 0.24107647, -0.39835793,
+ 0.18212086, 0.01301402, 0.48572797, -0.50656658, 0.20047462, -0.20607421,
+ -0.51818722, -0.15390486, 0.0468148, 0.39922136],
+
+ cell_to_input_weights: [],
+ cell_to_forget_weights: [],
+ cell_to_output_weights: [],
+
+ projection_weights: [],
+ projection_bias: [],
+}
+
+test_input = [2., 3.]
+output_state = [0, 0, 0, 0]
+cell_state = [0, 0, 0, 0]
+golden_output = [-0.02973187, 0.1229473, 0.20885126, -0.15358765,]
+output0 = {
+ scratch_buffer: [ 0 for x in range(n_batch * n_cell * 4) ],
+ cell_state_out: [ -0.145439, 0.157475, 0.293663, -0.277353 ],
+ output_state_out: [ -0.0297319, 0.122947, 0.208851, -0.153588 ],
+ output: golden_output
+}
+input0[input] = test_input
+input0[output_state_in] = output_state
+input0[cell_state_in] = cell_state
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stdout.txt.expect
new file mode 100644
index 000000000..4ac72618c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_float/stdout.txt.expect
@@ -0,0 +1,322 @@
+// clang-format off
+// Generated file (from: lstm_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: lstm_float.mod.py). Do not edit
+// Generated from: lstm_float.mod.py.
+namespace lstm_float {
+// Generated lstm_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace lstm_float
+
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {0},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {0},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {0},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {0, 0},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {0},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::FLOAT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::FLOAT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 16},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::LSTM,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22},
+ .outputs = {23, 24, 25, 26},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19};
+ const std::vector<uint32_t> outputIndexes = {23, 24, 25, 26};
+ std::vector<uint8_t> operandValues = {
+ 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {0};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2.0f, 3.0f}}, {1, {-0.45018822f, -0.02338299f, -0.0870589f, -0.34550029f, 0.04266912f, -0.15680569f, -0.34856534f, 0.43890524f}}, {2, {0.09701663f, 0.20334584f, -0.50592935f, -0.31343272f, -0.40032279f, 0.44781327f, 0.01387155f, -0.35593212f}}, {3, {-0.50013041f, 0.1370284f, 0.11810488f, 0.2013163f, -0.20583314f, 0.44344562f, 0.22077113f, -0.29909778f}}, {4, {-0.25065863f, -0.28290087f, 0.04613829f, 0.40525138f, 0.44272184f, 0.03897077f, -0.1556896f, 0.19487578f}}, {5, {-0.0063535f, -0.2042388f, 0.31454784f, -0.35746509f, 0.28902304f, 0.08183324f, -0.16555229f, 0.02286911f, -0.13566875f, 0.03034258f, 0.48091322f, -0.12528998f, 0.24077177f, -0.51332325f, -0.33502164f, 0.10629296f}}, {6, {-0.48684245f, -0.06655136f, 0.42224967f, 0.2112639f, 0.27654213f, 0.20864892f, -0.07646349f, 0.45877004f, 0.00141793f, -0.14609534f, 0.36447752f, 0.09196436f, 0.28053468f, 0.01560611f, -0.20127171f, -0.01140004f}}, {7, {-0.3407414f, 0.24443203f, -0.2078532f, 0.26320225f, 0.05695659f, -0.00123841f, -0.4744786f, -0.35869038f, -0.06418842f, -0.13502428f, -0.501764f, 0.22830659f, -0.46367589f, 0.26016325f, -0.03894562f, -0.16368064f}}, {8, {0.43385774f, -0.17194885f, 0.2718237f, 0.09215671f, 0.24107647f, -0.39835793f, 0.18212086f, 0.01301402f, 0.48572797f, -0.50656658f, 0.20047462f, -0.20607421f, -0.51818722f, -0.15390486f, 0.0468148f, 0.39922136f}}, {9, {}}, {10, {}}, {11, {}}, {12, {0.0f, 0.0f, 0.0f, 0.0f}}, {13, {1.0f, 1.0f, 1.0f, 1.0f}}, {14, {0.0f, 0.0f, 0.0f, 0.0f}}, {15, {0.0f, 0.0f, 0.0f, 0.0f}}, {16, {}}, {17, {}}, {18, {0.0f, 0.0f, 0.0f, 0.0f}}, {19, {0.0f, 0.0f, 0.0f, 0.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}, {1, {-0.0297319f, 0.122947f, 0.208851f, -0.153588f}}, {2, {-0.145439f, 0.157475f, 0.293663f, -0.277353f}}, {3, {-0.02973187f, 0.1229473f, 0.20885126f, -0.15358765f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, lstm_float) {
+ generated_tests::Execute(device,
+ lstm_float::createTestModel,
+ lstm_float::is_ignored,
+ lstm_float::examples);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py
new file mode 100644
index 000000000..051780877
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/depthwise_conv2d_quant8.mod.py
@@ -0,0 +1,43 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 2}, 0.5f, 0")
+f1 = Input("op2", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 2}, 0.5f, 0")
+b1 = Input("op3", "TENSOR_INT32", "{2}, 0.25f, 0")
+pad0 = Int32Scalar("pad0", 0)
+act = Int32Scalar("act", 0)
+stride = Int32Scalar("stride", 1)
+cm = Int32Scalar("channelMultiplier", 1)
+output = Output("op4", "TENSOR_QUANT8_ASYMM", "{1,1,1,2}, 1.f, 0")
+
+model = model.Operation("DEPTHWISE_CONV_2D",
+ i1, f1, b1,
+ pad0, pad0, pad0, pad0,
+ stride, stride,
+ cm, act).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [4, 16, 4, 32, 4, 64, 4, 128],
+ f1:
+ [2, 4, 2, 0, 2, 2, 2, 0],
+ b1:
+ [0, 0]}
+# (i1 (depthconv) f1)
+output0 = {output: # output 0
+ [8, 48]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stdout.txt.expect
new file mode 100644
index 000000000..760430798
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_backward_compatibility_quant8/stdout.txt.expect
@@ -0,0 +1,150 @@
+// clang-format off
+// Generated file (from: depthwise_conv2d_quant8.mod.py). Do not edit
+// clang-format off
+// Generated file (from: depthwise_conv2d_quant8.mod.py). Do not edit
+// Generated from: depthwise_conv2d_quant8.mod.py.
+namespace depthwise_conv2d_quant8 {
+// Generated depthwise_conv2d_quant8 test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace depthwise_conv2d_quant8
+
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 1, 1, 2},
+ .numberOfConsumers = 0,
+ .scale = 1.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEPTHWISE_CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 4, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1, 2};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {{2, {0, 0}}},
+ // int -> QUANT8_ASYMM map
+ {{0, {4, 16, 4, 32, 4, 64, 4, 128}}, {1, {2, 4, 2, 0, 2, 2, 2, 0}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {8, 48}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_quant8) {
+ generated_tests::Execute(device,
+ depthwise_conv2d_quant8::createTestModel,
+ depthwise_conv2d_quant8::is_ignored,
+ depthwise_conv2d_quant8::examples);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/conv_float.mod.py
new file mode 100644
index 000000000..f6b3e89ca
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/conv_float.mod.py
@@ -0,0 +1,35 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 3, 3, 1}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{1, 2, 2, 1}", [.25, .25, .25, .25])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [0])
+pad0 = Int32Scalar("pad0", 0)
+act = Int32Scalar("act", 0)
+stride = Int32Scalar("stride", 1)
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad0, pad0, pad0, pad0, stride, stride, act).To(output)
+model = model.RelaxedExecution(True)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0]}
+
+output0 = {output: # output 0
+ [.875, .875, .875, .875]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stdout.txt.expect
new file mode 100644
index 000000000..ad54e14e2
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_float/stdout.txt.expect
@@ -0,0 +1,142 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// Generated from: conv_float.mod.py.
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 3, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 4, 4, 5},
+ .outputs = {6},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {6};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 62, 0, 0, 128, 62, 0, 0, 128, 62, 0, 0, 128, 62, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 0.5f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.875f, 0.875f, 0.875f, 0.875f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel,
+ conv_float::is_ignored,
+ conv_float::examples);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/mean_implicit.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/mean_implicit.mod.py
new file mode 100644
index 000000000..be7b22bdb
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/mean_implicit.mod.py
@@ -0,0 +1,41 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+i0 = Input("i0", ("TENSOR_FLOAT32", [2, 2])) # input 0
+
+o1 = Output("o1", ("TENSOR_FLOAT32", [1, 2])) # output for model1
+o2 = Output("o2", ("TENSOR_FLOAT32", [2, 1])) # output for model2
+o3 = Output("o3", ("TENSOR_FLOAT32", [1])) # output for model3
+
+model1 = Model().Operation("MEAN", i0, [0], 1).To(o1) # along axis 0, keep_dim=True
+model2 = Model().Operation("MEAN", i0, [1], 1).To(o2) # along axis 1, keep_dim=True
+model3 = Model().Operation("MEAN", i0, [0, 1], 0).To(o3) # along both axis, keep_dim=False
+
+inputs1 = {i0: [1, 2, 3, 4]}
+outputs11 = {o1: [4, 6]}
+outputs12 = {o2: [3, 7]}
+outputs13 = {o3: [10]}
+
+inputs2 = {i0: [-1, -2, -3, -4]}
+outputs21 = {o1: [-4, -6]}
+outputs22 = {o2: [-3, -7]}
+outputs23 = {o3: [-10]}
+
+Example((inputs1, outputs11), model=model1)
+Example((inputs1, outputs12), model=model2)
+Example((inputs1, outputs13), model=model3)
+
+Example((inputs2, outputs21), model=model1)
+Example((inputs2, outputs22), model=model2)
+Example((inputs2, outputs23), model=model3)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stdout.txt.expect
new file mode 100644
index 000000000..f996d8470
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_parameter/stdout.txt.expect
@@ -0,0 +1,381 @@
+// clang-format off
+// Generated file (from: mean_implicit.mod.py). Do not edit
+// clang-format off
+// Generated file (from: mean_implicit.mod.py). Do not edit
+// Generated from: mean_implicit.mod.py.
+namespace mean_implicit {
+// Generated mean_implicit test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace mean_implicit
+
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 3,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MEAN,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {4.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.0f, -2.0f, -3.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-4.0f, -6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, mean_implicit) {
+ generated_tests::Execute(device,
+ mean_implicit::createTestModel,
+ mean_implicit::is_ignored,
+ mean_implicit::examples);
+}
+
+// Create the model
+Model createTestModel_2() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 3,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MEAN,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_2(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_2 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {3.0f, 7.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.0f, -2.0f, -3.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-3.0f, -7.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, mean_implicit_2) {
+ generated_tests::Execute(device,
+ mean_implicit::createTestModel_2,
+ mean_implicit::is_ignored_2,
+ mean_implicit::examples_2);
+}
+
+// Create the model
+Model createTestModel_3() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 3,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MEAN,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_3(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_3 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.0f, -2.0f, -3.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-10.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, mean_implicit_3) {
+ generated_tests::Execute(device,
+ mean_implicit::createTestModel_3,
+ mean_implicit::is_ignored_3,
+ mean_implicit::examples_3);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/conv_float.mod.py
new file mode 100644
index 000000000..826f390c0
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/conv_float.mod.py
@@ -0,0 +1,52 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}", [1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+act = Int32Scalar("act", 0) # None activation
+layout = Int32Scalar("layout", 0) # NHWC
+
+model = model.Operation("CONV_2D", i1, f1, b1, 1, 1, 1, act, layout).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8]}
+
+output0 = {output: # output 0
+ [204, 120, 94, 104, 70, 164, 23, 112]}
+
+quant8 = DataTypeConverter().Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
+ b1: ("TENSOR_INT32", 0.125, 0),
+ output: ("TENSOR_QUANT8_ASYMM", 2, 100)
+})
+
+# Instantiate an example
+Example(
+ (input0, output0)
+).AddVariations(
+ ("NCHW", [i1, f1, output], [layout])
+).AddVariations(
+ ("relu", [output], [act]),
+ ("relu6", [output], [act]),
+ includeDefault=False
+).AddVariations(
+ ("as_input", [f1])
+).AddVariations(
+ "relaxed", quant8
+)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stdout.txt.expect
new file mode 100644
index 000000000..7e100da9b
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_implicit_variation/stdout.txt.expect
@@ -0,0 +1,3548 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// Generated from: conv_float.mod.py.
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+// Create the model
+Model createTestModel_relu() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu,
+ conv_float::is_ignored_relu,
+ conv_float::examples_relu);
+}
+
+// Create the model
+Model createTestModel_relu_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_relaxed,
+ conv_float::is_ignored_relu_relaxed,
+ conv_float::examples_relu_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_quant8,
+ conv_float::is_ignored_relu_quant8,
+ conv_float::examples_relu_quant8);
+}
+
+// Create the model
+Model createTestModel_relu_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_weight_as_input,
+ conv_float::is_ignored_relu_weight_as_input,
+ conv_float::examples_relu_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_relu_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_relu_weight_as_input_relaxed,
+ conv_float::examples_relu_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_weight_as_input_quant8,
+ conv_float::is_ignored_relu_weight_as_input_quant8,
+ conv_float::examples_relu_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_relu6() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6,
+ conv_float::is_ignored_relu6,
+ conv_float::examples_relu6);
+}
+
+// Create the model
+Model createTestModel_relu6_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_relaxed,
+ conv_float::is_ignored_relu6_relaxed,
+ conv_float::examples_relu6_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu6_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_quant8,
+ conv_float::is_ignored_relu6_quant8,
+ conv_float::examples_relu6_quant8);
+}
+
+// Create the model
+Model createTestModel_relu6_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_weight_as_input,
+ conv_float::is_ignored_relu6_weight_as_input,
+ conv_float::examples_relu6_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_relu6_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_relu6_weight_as_input_relaxed,
+ conv_float::examples_relu6_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu6_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_relu6_weight_as_input_quant8,
+ conv_float::examples_relu6_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu,
+ conv_float::is_ignored_nchw_relu,
+ conv_float::examples_nchw_relu);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_relaxed,
+ conv_float::is_ignored_nchw_relu_relaxed,
+ conv_float::examples_nchw_relu_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_quant8,
+ conv_float::is_ignored_nchw_relu_quant8,
+ conv_float::examples_nchw_relu_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_weight_as_input,
+ conv_float::is_ignored_nchw_relu_weight_as_input,
+ conv_float::examples_nchw_relu_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu_weight_as_input_quant8,
+ conv_float::examples_nchw_relu_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6,
+ conv_float::is_ignored_nchw_relu6,
+ conv_float::examples_nchw_relu6);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_relaxed,
+ conv_float::is_ignored_nchw_relu6_relaxed,
+ conv_float::examples_nchw_relu6_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_quant8,
+ conv_float::is_ignored_nchw_relu6_quant8,
+ conv_float::examples_nchw_relu6_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_weight_as_input,
+ conv_float::is_ignored_nchw_relu6_weight_as_input,
+ conv_float::examples_nchw_relu6_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu6_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_quant8,
+ conv_float::examples_nchw_relu6_weight_as_input_quant8);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/add_internal.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/add_internal.mod.py
new file mode 100644
index 000000000..28c4afc52
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/add_internal.mod.py
@@ -0,0 +1,71 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+
+i0 = Input("i0", ("TENSOR_FLOAT32", [2])) # input 0
+i1 = Input("i1", ("TENSOR_FLOAT32", [2])) # input 0
+i2 = Input("i2", ("TENSOR_FLOAT32", [2])) # input 0
+i3 = Input("i3", ("TENSOR_FLOAT32", [2])) # input 0
+i4 = Input("i4", ("TENSOR_FLOAT32", [2])) # input 0
+i5 = Input("i5", ("TENSOR_FLOAT32", [2])) # input 0
+i6 = Input("i6", ("TENSOR_FLOAT32", [2])) # input 0
+i7 = Input("i7", ("TENSOR_FLOAT32", [2])) # input 0
+i8 = Input("i8", ("TENSOR_FLOAT32", [2])) # input 0
+
+t0 = Internal("t0", ("TENSOR_FLOAT32", [2]))
+t1 = Internal("t1", ("TENSOR_FLOAT32", [2]))
+t2 = Internal("t2", ("TENSOR_FLOAT32", [2]))
+t3 = Internal("t3", ("TENSOR_FLOAT32", [2]))
+t4 = Internal("t4", ("TENSOR_FLOAT32", [2]))
+t5 = Internal("t5", ("TENSOR_FLOAT32", [2]))
+t6 = Internal("t6", ("TENSOR_FLOAT32", [2]))
+
+o0 = Output("o0", ("TENSOR_FLOAT32", [2]))
+o1 = Output("o1", ("TENSOR_FLOAT32", [2]))
+o2 = Output("o2", ("TENSOR_FLOAT32", [2]))
+
+p0 = Parameter("p0", ("TENSOR_FLOAT32", [2]), [0.0, 1.0])
+act = Int32Scalar("act", 0)
+
+model.Operation("ADD", o0, o1, act).To(o2)
+model.Operation("ADD", p0, t5, act).To(t6)
+model.Operation("ADD", i2, t0, act).To(t1)
+model.Operation("ADD", i6, p0, act).To(t5)
+model.Operation("ADD", i0, i1, act).To(t0)
+model.Operation("ADD", t1, t3, act).To(t4)
+model.Operation("ADD", t2, i5, act).To(t3)
+model.Operation("ADD", t4, t6, act).To(o0)
+model.Operation("ADD", i3, i4, act).To(t2)
+model.Operation("ADD", i7, i8, act).To(o1)
+
+inputs = {
+ i0: [0, 0],
+ i1: [0, 0],
+ i2: [0, 0],
+ i3: [0, 0],
+ i4: [0, 0],
+ i5: [0, 0],
+ i6: [0, 0],
+ i7: [0, 0],
+ i8: [0, 0]
+}
+
+outputs = {
+ o0: [0, 2],
+ o1: [0, 0],
+ o2: [0, 2]
+}
+
+Example((inputs, outputs))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stdout.txt.expect
new file mode 100644
index 000000000..c656504a1
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_internal/stdout.txt.expect
@@ -0,0 +1,312 @@
+// clang-format off
+// Generated file (from: add_internal.mod.py). Do not edit
+// clang-format off
+// Generated file (from: add_internal.mod.py). Do not edit
+// Generated from: add_internal.mod.py.
+namespace add_internal {
+// Generated add_internal test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace add_internal
+
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 10,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::TEMPORARY_VARIABLE,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::ADD,
+ .inputs = {10, 4, 2},
+ .outputs = {5},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {4, 5, 2},
+ .outputs = {6},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {11, 12, 2},
+ .outputs = {8},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {7, 8, 2},
+ .outputs = {9},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {17, 18, 2},
+ .outputs = {15},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {15, 16, 2},
+ .outputs = {13},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {9, 13, 2},
+ .outputs = {14},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {14, 6, 2},
+ .outputs = {0},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {19, 20, 2},
+ .outputs = {1},
+ },
+ {
+ .type = OperationType::ADD,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {7, 10, 11, 12, 16, 17, 18, 19, 20};
+ const std::vector<uint32_t> outputIndexes = {0, 1, 3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 63
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 0.0f}}, {1, {0.0f, 0.0f}}, {2, {0.0f, 0.0f}}, {3, {0.0f, 0.0f}}, {4, {0.0f, 0.0f}}, {5, {0.0f, 0.0f}}, {6, {0.0f, 0.0f}}, {7, {0.0f, 0.0f}}, {8, {0.0f, 0.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 2.0f}}, {1, {0.0f, 0.0f}}, {2, {0.0f, 2.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, add_internal) {
+ generated_tests::Execute(device,
+ add_internal::createTestModel,
+ add_internal::is_ignored,
+ add_internal::examples);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/conv_float.mod.py
new file mode 100644
index 000000000..61f7c92ee
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/conv_float.mod.py
@@ -0,0 +1,61 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model("model_name")
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}", [1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+act = Int32Scalar("act", 0) # None activation
+layout = Int32Scalar("layout", 0) # NHWC
+pad = Int32Scalar("param", 1)
+stride0 = Int32Scalar("param1", 1)
+stride1 = Int32Scalar("param2", 1)
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad, stride0, stride1, act, layout).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8]}
+
+output0 = {output: # output 0
+ [204, 120, 94, 104, 70, 164, 23, 112]}
+
+quant8 = DataTypeConverter(name="quantized").Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
+ b1: ("TENSOR_INT32", 0.125, 0),
+ output: ("TENSOR_QUANT8_ASYMM", 2, 100)
+})
+nchw = DataLayoutConverter("NCHW", name="nchw_layout").Identify([i1, f1, output], [layout])
+relu = ActivationConverter("relu", name="act").Identify([output], [act])
+relu6 = ActivationConverter("relu6").Identify([output], [act])
+weight_as_input = ParameterAsInputConverter(name="w_as_input").Identify([f1])
+relax = RelaxedModeConverter(True, name="float_relaxed")
+
+# Instantiate an example
+# Will produce cartesian product of
+# [nhwc, nchw_layout] * [act, relu6] * [w_as_param, w_as_input] * [float, float_relaxed, quantized]
+# 24 variations
+Example(
+ (input0, output0), name="example_name"
+).AddVariations(
+ nchw, defaultName="nhwc"
+).AddVariations(
+ relu, relu6, includeDefault=False
+).AddVariations(
+ weight_as_input, defaultName="w_as_param"
+).AddVariations(
+ relax, quant8, defaultName="float"
+)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stdout.txt.expect
new file mode 100644
index 000000000..892700036
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_naming/stdout.txt.expect
@@ -0,0 +1,3548 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// Generated from: conv_float.mod.py.
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+// Create the model
+Model createTestModel_model_name_nhwc_act_w_as_param_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_act_w_as_param_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_act_w_as_param_float,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_param_float,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_param_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_act_w_as_param_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_act_w_as_param_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_act_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_param_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_act_w_as_param_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_act_w_as_param_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_act_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_param_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_act_w_as_input_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_act_w_as_input_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_act_w_as_input_float,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_input_float,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_input_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_act_w_as_input_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_act_w_as_input_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_act_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_input_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_act_w_as_input_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_act_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_act_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_act_w_as_input_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_act_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nhwc_act_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nhwc_act_w_as_input_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_relu6_w_as_param_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_relu6_w_as_param_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_relu6_w_as_param_float,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_param_float,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_param_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_relu6_w_as_param_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_relu6_w_as_param_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_relu6_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_param_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_relu6_w_as_param_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_relu6_w_as_param_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_relu6_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_param_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_relu6_w_as_input_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_relu6_w_as_input_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_relu6_w_as_input_float,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_input_float,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_input_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_relu6_w_as_input_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_relu6_w_as_input_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_relu6_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_input_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nhwc_relu6_w_as_input_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nhwc_relu6_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nhwc_relu6_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nhwc_relu6_w_as_input_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nhwc_relu6_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nhwc_relu6_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nhwc_relu6_w_as_input_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_act_w_as_param_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_act_w_as_param_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_act_w_as_param_float,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_param_float,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_param_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_act_w_as_param_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_act_w_as_param_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_act_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_param_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_act_w_as_param_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_act_w_as_param_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_act_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_param_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_act_w_as_input_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_act_w_as_input_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_act_w_as_input_float,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_input_float,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_input_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_act_w_as_input_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_act_w_as_input_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_act_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_input_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_act_w_as_input_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_act_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_act_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_act_w_as_input_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_act_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_act_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_act_w_as_input_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_relu6_w_as_param_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_param_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_param_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_relu6_w_as_param_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_relu6_w_as_param_float,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_param_float,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_param_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_relu6_w_as_param_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_param_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_param_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_relu6_w_as_param_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_relu6_w_as_param_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_param_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_param_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_relu6_w_as_param_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_param_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_param_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_relu6_w_as_param_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_relu6_w_as_param_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_param_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_param_quantized);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_relu6_w_as_input_float() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_input_float(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_input_float = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_relu6_w_as_input_float) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_relu6_w_as_input_float,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_input_float,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_input_float);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_relu6_w_as_input_float_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_input_float_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_input_float_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_relu6_w_as_input_float_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_relu6_w_as_input_float_relaxed,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_input_float_relaxed,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_input_float_relaxed);
+}
+
+// Create the model
+Model createTestModel_model_name_nchw_layout_relu6_w_as_input_quantized() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_model_name_nchw_layout_relu6_w_as_input_quantized(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_model_name_example_name_nchw_layout_relu6_w_as_input_quantized = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_model_name_example_name_nchw_layout_relu6_w_as_input_quantized) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_model_name_nchw_layout_relu6_w_as_input_quantized,
+ conv_float::is_ignored_model_name_nchw_layout_relu6_w_as_input_quantized,
+ conv_float::examples_model_name_example_name_nchw_layout_relu6_w_as_input_quantized);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/conv_quant8.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/conv_quant8.mod.py
new file mode 100644
index 000000000..f6b3e89ca
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/conv_quant8.mod.py
@@ -0,0 +1,35 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 3, 3, 1}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{1, 2, 2, 1}", [.25, .25, .25, .25])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [0])
+pad0 = Int32Scalar("pad0", 0)
+act = Int32Scalar("act", 0)
+stride = Int32Scalar("stride", 1)
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad0, pad0, pad0, pad0, stride, stride, act).To(output)
+model = model.RelaxedExecution(True)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0]}
+
+output0 = {output: # output 0
+ [.875, .875, .875, .875]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stdout.txt.expect
new file mode 100644
index 000000000..0df371ba0
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_quant8/stdout.txt.expect
@@ -0,0 +1,142 @@
+// clang-format off
+// Generated file (from: conv_quant8.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_quant8.mod.py). Do not edit
+// Generated from: conv_quant8.mod.py.
+namespace conv_quant8 {
+// Generated conv_quant8 test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_quant8
+
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 3, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 4, 4, 5},
+ .outputs = {6},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {6};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 62, 0, 0, 128, 62, 0, 0, 128, 62, 0, 0, 128, 62, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 0.5f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.875f, 0.875f, 0.875f, 0.875f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_quant8) {
+ generated_tests::Execute(device,
+ conv_quant8::createTestModel,
+ conv_quant8::is_ignored,
+ conv_quant8::examples);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/conv_float.mod.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/conv_float.mod.py
new file mode 100644
index 000000000..c95696203
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/conv_float.mod.py
@@ -0,0 +1,46 @@
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+f1 = Parameter("op2", "TENSOR_FLOAT32", "{2, 2, 2, 2}", [1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1])
+b1 = Parameter("op3", "TENSOR_FLOAT32", "{1}", [-200])
+output = Output("op4", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
+act = Int32Scalar("act", 0) # None activation
+layout = Int32Scalar("layout", 0) # NHWC
+pad = Int32Scalar("param", 1)
+stride0 = Int32Scalar("param1", 1)
+stride1 = Int32Scalar("param2", 1)
+
+model = model.Operation("CONV_2D", i1, f1, b1, pad, stride0, stride1, act, layout).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8]}
+
+output0 = {output: # output 0
+ [204, 120, 94, 104, 70, 164, 23, 112]}
+
+quant8 = DataTypeConverter().Identify({
+ i1: ("TENSOR_QUANT8_ASYMM", 0.5, 128),
+ f1: ("TENSOR_QUANT8_ASYMM", 0.25, 128),
+ b1: ("TENSOR_INT32", 0.125, 0),
+ output: ("TENSOR_QUANT8_ASYMM", 2, 100)
+})
+relu = ActivationConverter("relu").Identify([output], [act])
+relu6 = ActivationConverter("relu6").Identify([output], [act])
+
+# Instantiate an example
+Example((input0, output0)).AddNchw(i1, f1, output, layout).AddAllActivations(
+ output, act).AddInput(f1).AddVariations(RelaxedModeConverter(True), quant8)
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stderr.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stderr.txt.expect
new file mode 100644
index 000000000..3decb4c1c
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stderr.txt.expect
@@ -0,0 +1,2 @@
+Output VTS model: -
+Output example:-
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stdout.txt.expect b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stdout.txt.expect
new file mode 100644
index 000000000..6ae6401bf
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/P_vts_variation/stdout.txt.expect
@@ -0,0 +1,7084 @@
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// clang-format off
+// Generated file (from: conv_float.mod.py). Do not edit
+// Generated from: conv_float.mod.py.
+namespace conv_float {
+// Generated conv_float test
+#include "-"
+// Generated model constructor
+#include "-"
+} // namespace conv_float
+
+// Create the model
+Model createTestModel_none() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_none(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_none) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_none,
+ conv_float::is_ignored_none,
+ conv_float::examples_none);
+}
+
+// Create the model
+Model createTestModel_none_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_none_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_none_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_none_relaxed,
+ conv_float::is_ignored_none_relaxed,
+ conv_float::examples_none_relaxed);
+}
+
+// Create the model
+Model createTestModel_none_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_none_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_none_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_none_quant8,
+ conv_float::is_ignored_none_quant8,
+ conv_float::examples_none_quant8);
+}
+
+// Create the model
+Model createTestModel_none_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_none_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_none_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_none_weight_as_input,
+ conv_float::is_ignored_none_weight_as_input,
+ conv_float::examples_none_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_none_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_none_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_none_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_none_weight_as_input_relaxed,
+ conv_float::is_ignored_none_weight_as_input_relaxed,
+ conv_float::examples_none_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_none_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_none_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_none_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_none_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_none_weight_as_input_quant8,
+ conv_float::is_ignored_none_weight_as_input_quant8,
+ conv_float::examples_none_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_relu() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu,
+ conv_float::is_ignored_relu,
+ conv_float::examples_relu);
+}
+
+// Create the model
+Model createTestModel_relu_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_relaxed,
+ conv_float::is_ignored_relu_relaxed,
+ conv_float::examples_relu_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_quant8,
+ conv_float::is_ignored_relu_quant8,
+ conv_float::examples_relu_quant8);
+}
+
+// Create the model
+Model createTestModel_relu_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_weight_as_input,
+ conv_float::is_ignored_relu_weight_as_input,
+ conv_float::examples_relu_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_relu_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 120.0f, 94.0f, 104.0f, 70.0f, 164.0f, 23.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_relu_weight_as_input_relaxed,
+ conv_float::examples_relu_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 160, 147, 152, 135, 182, 112, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu_weight_as_input_quant8,
+ conv_float::is_ignored_relu_weight_as_input_quant8,
+ conv_float::examples_relu_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_relu1() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu1(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu1) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu1,
+ conv_float::is_ignored_relu1,
+ conv_float::examples_relu1);
+}
+
+// Create the model
+Model createTestModel_relu1_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu1_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu1_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu1_relaxed,
+ conv_float::is_ignored_relu1_relaxed,
+ conv_float::examples_relu1_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu1_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu1_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu1_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu1_quant8,
+ conv_float::is_ignored_relu1_quant8,
+ conv_float::examples_relu1_quant8);
+}
+
+// Create the model
+Model createTestModel_relu1_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu1_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu1_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu1_weight_as_input,
+ conv_float::is_ignored_relu1_weight_as_input,
+ conv_float::examples_relu1_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_relu1_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu1_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu1_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu1_weight_as_input_relaxed,
+ conv_float::is_ignored_relu1_weight_as_input_relaxed,
+ conv_float::examples_relu1_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu1_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu1_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu1_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu1_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu1_weight_as_input_quant8,
+ conv_float::is_ignored_relu1_weight_as_input_quant8,
+ conv_float::examples_relu1_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_relu6() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6,
+ conv_float::is_ignored_relu6,
+ conv_float::examples_relu6);
+}
+
+// Create the model
+Model createTestModel_relu6_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 224, 64, 0, 0, 192, 64, 0, 0, 160, 64, 0, 0, 128, 64, 0, 0, 64, 64, 0, 0, 0, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_relaxed,
+ conv_float::is_ignored_relu6_relaxed,
+ conv_float::examples_relu6_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu6_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_quant8,
+ conv_float::is_ignored_relu6_quant8,
+ conv_float::examples_relu6_quant8);
+}
+
+// Create the model
+Model createTestModel_relu6_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_weight_as_input,
+ conv_float::is_ignored_relu6_weight_as_input,
+ conv_float::examples_relu6_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_relu6_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f}}, {1, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 8.0f, 7.0f, 6.0f, 5.0f, 4.0f, 3.0f, 2.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_relu6_weight_as_input_relaxed,
+ conv_float::examples_relu6_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_relu6_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 132, 134, 136, 138, 140, 142, 144}}, {1, {132, 136, 140, 144, 148, 152, 156, 160, 160, 156, 152, 148, 144, 140, 136, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_relu6_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_relu6_weight_as_input_quant8,
+ conv_float::examples_relu6_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_none() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_none(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_none) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_none,
+ conv_float::is_ignored_nchw_none,
+ conv_float::examples_nchw_none);
+}
+
+// Create the model
+Model createTestModel_nchw_none_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_none_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_none_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_none_relaxed,
+ conv_float::is_ignored_nchw_none_relaxed,
+ conv_float::examples_nchw_none_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_none_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_none_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_none_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_none_quant8,
+ conv_float::is_ignored_nchw_none_quant8,
+ conv_float::examples_nchw_none_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_none_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_none_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_none_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_none_weight_as_input,
+ conv_float::is_ignored_nchw_none_weight_as_input,
+ conv_float::examples_nchw_none_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_nchw_none_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_none_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_none_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_none_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_none_weight_as_input_relaxed,
+ conv_float::examples_nchw_none_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_none_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_none_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_none_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_none_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_none_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_none_weight_as_input_quant8,
+ conv_float::examples_nchw_none_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu,
+ conv_float::is_ignored_nchw_relu,
+ conv_float::examples_nchw_relu);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_relaxed,
+ conv_float::is_ignored_nchw_relu_relaxed,
+ conv_float::examples_nchw_relu_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_quant8,
+ conv_float::is_ignored_nchw_relu_quant8,
+ conv_float::examples_nchw_relu_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_weight_as_input,
+ conv_float::is_ignored_nchw_relu_weight_as_input,
+ conv_float::examples_nchw_relu_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {204.0f, 94.0f, 70.0f, 23.0f, 120.0f, 104.0f, 164.0f, 112.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {202, 147, 135, 112, 160, 152, 182, 156}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu_weight_as_input_quant8,
+ conv_float::examples_nchw_relu_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu1() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu1(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu1) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu1,
+ conv_float::is_ignored_nchw_relu1,
+ conv_float::examples_nchw_relu1);
+}
+
+// Create the model
+Model createTestModel_nchw_relu1_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu1_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu1_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu1_relaxed,
+ conv_float::is_ignored_nchw_relu1_relaxed,
+ conv_float::examples_nchw_relu1_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu1_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu1_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu1_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu1_quant8,
+ conv_float::is_ignored_nchw_relu1_quant8,
+ conv_float::examples_nchw_relu1_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu1_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu1_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu1_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu1_weight_as_input,
+ conv_float::is_ignored_nchw_relu1_weight_as_input,
+ conv_float::examples_nchw_relu1_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_nchw_relu1_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu1_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu1_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu1_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu1_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu1_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu1_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu1_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu1_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {100, 100, 100, 100, 100, 100, 100, 100}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu1_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu1_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu1_weight_as_input_quant8,
+ conv_float::examples_nchw_relu1_weight_as_input_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6,
+ conv_float::is_ignored_nchw_relu6,
+ conv_float::examples_nchw_relu6);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 68, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 72, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 76, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 64, 64, 0, 0, 160, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 128, 64, 0, 0, 192, 64, 0, 0, 0, 65, 0, 0, 0, 65, 0, 0, 192, 64, 0, 0, 128, 64, 0, 0, 0, 64, 0, 0, 224, 64, 0, 0, 160, 64, 0, 0, 64, 64, 0, 0, 128, 63, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu6_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_relaxed,
+ conv_float::is_ignored_nchw_relu6_relaxed,
+ conv_float::examples_nchw_relu6_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132, 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_quant8,
+ conv_float::is_ignored_nchw_relu6_quant8,
+ conv_float::examples_nchw_relu6_quant8);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_weight_as_input() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6_weight_as_input(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_weight_as_input) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_weight_as_input,
+ conv_float::is_ignored_nchw_relu6_weight_as_input,
+ conv_float::examples_nchw_relu6_weight_as_input);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_weight_as_input_relaxed() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_relaxed(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_relaxed = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f}}, {1, {1.0f, 3.0f, 5.0f, 7.0f, 2.0f, 4.0f, 6.0f, 8.0f, 8.0f, 6.0f, 4.0f, 2.0f, 7.0f, 5.0f, 3.0f, 1.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_weight_as_input_relaxed) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_weight_as_input_relaxed,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_relaxed,
+ conv_float::examples_nchw_relu6_weight_as_input_relaxed);
+}
+
+// Create the model
+Model createTestModel_nchw_relu6_weight_as_input_quant8() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.5f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {2, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.25f,
+ .zeroPoint = 128,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.125f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 2.0f,
+ .zeroPoint = 100,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 4, 5, 6, 7},
+ .outputs = {8},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {8};
+ std::vector<uint8_t> operandValues = {
+ 192, 249, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ };
+}
+
+bool is_ignored_nchw_relu6_weight_as_input_quant8(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
+
+std::vector<MixedTypedExample> examples_nchw_relu6_weight_as_input_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {130, 134, 138, 142, 132, 136, 140, 144}}, {1, {132, 140, 148, 156, 136, 144, 152, 160, 160, 152, 144, 136, 156, 148, 140, 132}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {103, 103, 103, 103, 103, 103, 103, 103}}}
+}
+}, // End of an example
+};
+
+TEST_F(NeuralnetworksHidlTest, conv_float_nchw_relu6_weight_as_input_quant8) {
+ generated_tests::Execute(device,
+ conv_float::createTestModel_nchw_relu6_weight_as_input_quant8,
+ conv_float::is_ignored_nchw_relu6_weight_as_input_quant8,
+ conv_float::examples_nchw_relu6_weight_as_input_quant8);
+}
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/tests/test.py b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/test.py
new file mode 100644
index 000000000..248fae3a4
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/tests/test.py
@@ -0,0 +1,328 @@
+#!/usr/bin/python3
+
+# Copyright 2017, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""NN Model Test Compiler Test.
+
+Runs subdirectories of tests for the test generator/compiler.
+"""
+
+import filecmp
+import glob
+import os
+import re
+import shutil
+import subprocess
+import sys
+import unittest
+
+
+__author__ = 'Android'
+
+
+DOTTED_LINE = '................'
+
+class OrigFile:
+ OrigDir = None
+
+class TestGeneratorTests(unittest.TestCase):
+ """Class to contain all the unittest test cases.
+
+ Tests will be dynamically added to this class as methods.
+ No static tests, so this class is initially empty.
+
+ """
+ pass
+
+
+def GenerateTests(dir_name):
+ """Creates a test method that can be added as method to GenerateTests."""
+ cwd = os.getcwd()
+ def Test(self):
+ os.chdir(cwd)
+ ExecTest(dir_name, self)
+ return Test
+
+
+def AddUnitTests(test_dirs):
+ """Adds a test to Tests for each directory in test_dirs."""
+
+ for t in test_dirs:
+ # Must start with 'test_' according to unittest
+ test_name = 'test_%s' % t
+ test = GenerateTests(t)
+ # Add test as method to TestGeneratorTests with test_name as method name
+ setattr(TestGeneratorTests, test_name, test)
+
+
+class Options(object):
+ verbose = 0
+ cleanup = 1
+ update_cts = 0
+ zero_return = 0
+
+
+def CompareFiles(actual, expect):
+ """Compares actual and expect for equality."""
+ if not os.path.isfile(actual):
+ if Options.verbose:
+ print ('Could not find %s' % actual)
+ return False
+ if not os.path.isfile(expect):
+ if Options.verbose:
+ print ('Could not find %s' % expect)
+ return False
+
+ return filecmp.cmp(actual, expect, False)
+
+
+def CopyIfDifferent(src, dst):
+ """Updates dst if it is different from src."""
+ if not CompareFiles(src, dst):
+ if Options.verbose:
+ print ('Copying from %s to %s' % (src, dst))
+ shutil.copyfile(src, dst)
+
+
+def GetCommandLineArgs(filename):
+ """Extracts command line arguments from first comment line in a file."""
+ f = open(filename, 'r')
+ line = f.readline()
+ f.close()
+ if line[0] == '/' and line[1] == '/':
+ return line[2:].strip()
+ else:
+ return ''
+
+
+def ReadFileToStr(filename):
+ """Returns contents of file as a str."""
+ with open(filename, 'r') as f:
+ return f.read()
+
+
+def ReportIfDifferFromExpected(tests, name, file1, file2):
+ """Fails tests if file1 and file2 differ."""
+ if not CompareFiles(file1, file2):
+ if Options.verbose:
+ err_message = ('%s is different:\n'
+ 'expected:\n%s\n%s%s\n\n'
+ 'actual:\n%s\n%s%s\n') % (
+ name,
+ DOTTED_LINE, ReadFileToStr(file1), DOTTED_LINE,
+ DOTTED_LINE, ReadFileToStr(file2), DOTTED_LINE)
+ else:
+ err_message = '%s is different' % name
+ tests.fail(err_message)
+
+
+def GetRSFiles():
+ """Returns a list of files in cwd with extension '.rs' or '.fs'."""
+ rs_files = glob.glob('*.mod.py')
+ rs_files.sort()
+ return rs_files
+
+
+def GetOutDir():
+ return os.path.abspath(os.path.join(OrigFile.OrigDir, "../"))
+
+
+# Declare/define cache variable for GetOutDir to cache results
+# This way we only need to call subprocesses once to get the directory
+GetOutDir.cache = None
+
+
+def CreateCmd(run_vts):
+ """Creates the test command to run for the current test."""
+ cmd_string = ('%s/%s_generator.py'
+ ) % (GetOutDir(), "cts" if not run_vts else "vts")
+ base_args = cmd_string.split()
+ rs_files = GetRSFiles()
+
+ # Extra command line arguments can be placed as // comments at the start of
+ # any .rs file. We automatically bundle up all of these extra args and invoke
+ # llvm-rs-cc with them.
+ extra_args_str = ''
+ for rs_file in rs_files:
+ extra_args_str += GetCommandLineArgs(rs_file)
+ extra_args = extra_args_str.split()
+
+ args = base_args + extra_args + rs_files
+ return args
+
+def Cleanup():
+ """Cleans up the cwd of any tmp files created in current test."""
+ try:
+ os.remove('stdout.txt')
+ os.remove('stderr.txt')
+ shutil.rmtree('tmp/')
+ except OSError:
+ pass
+
+
+def CheckTestResult(dir_name, subprocess_ret, tests, args):
+ """Checks the result of the subprocess command to see if it passed/failed.
+
+ If dir_name starts with 'F_', then subprocess is expected to fail.
+ If it instead succeeded, then this test is failed.
+ Vice versa with a dir_name starting with 'P_'.
+
+ Args:
+ dir_name: name of current directory/test name
+ subprocess_ret: return code of subprocess
+ tests: unittest, call tests.fail(reason) when failure
+ args: the arguments for the command that was run
+ """
+ if dir_name[0:2] == 'F_':
+ if subprocess_ret == 0:
+ if Options.verbose:
+ err_message = ('Command (%s) passed on invalid input\n'
+ 'stdout:\n%s\n%s%s\n') % (
+ ' '.join(args),
+ DOTTED_LINE, ReadFileToStr('stdout.txt'), DOTTED_LINE
+ )
+ else:
+ err_message = 'Command passed on invalid input'
+ tests.fail(err_message)
+ elif dir_name[0:2] == 'P_':
+ if subprocess_ret != 0:
+ if Options.verbose:
+ err_message = ('Command (%s) failed on valid input\n'
+ 'stderr:\n%s\n%s%s\n') % (
+ ' '.join(args),
+ DOTTED_LINE, ReadFileToStr('stderr.txt'), DOTTED_LINE
+ )
+ else:
+ err_message = 'Command failed on valid input'
+ tests.fail(err_message)
+ else:
+ tests.fail('Invalid test name: ' + dir_name +
+ ', should start with F_ or P_')
+
+
+
+def ExecTest(dir_name, tests):
+ """Executes an test generator test from dir_name."""
+
+ os.chdir(dir_name)
+ stdout_file = open('stdout.txt', 'w+')
+ stderr_file = open('stderr.txt', 'w+')
+ run_vts = (dir_name[2:5] == 'vts')
+ args = CreateCmd(run_vts)
+
+ if Options.verbose > 1:
+ print ('Executing:', ' '.join(args))
+
+ # Execute the command and check the resulting shell return value.
+ # All tests that are expected to FAIL have directory names that
+ # start with 'F_'. Other tests that are expected to PASS have
+ # directory names that start with 'P_'.
+ ret = 0
+ try:
+ ret = subprocess.call(args, stdout=stdout_file, stderr=stderr_file)
+ except OSError:
+ tests.fail('subprocess.call failed: ' + ' '.join(args))
+
+ stdout_file.close()
+ stderr_file.close()
+
+ CheckTestResult(dir_name, ret, tests, args)
+
+ ReportIfDifferFromExpected(tests, 'stdout', 'stdout.txt.expect', 'stdout.txt')
+ ReportIfDifferFromExpected(tests, 'stderr', 'stderr.txt.expect', 'stderr.txt')
+
+ if Options.cleanup:
+ Cleanup()
+
+
+def Usage():
+ """Print out usage information."""
+ print ('Usage: %s [OPTION]... [TESTNAME]...'
+ 'Renderscript Compiler Test Harness\n'
+ 'Runs TESTNAMEs (all tests by default)\n'
+ 'Available Options:\n'
+ ' -h, --help Help message\n'
+ ' -n, --no-cleanup Don\'t clean up after running tests\n'
+ ' -v, --verbose Verbose output. Enter multiple -v to get more verbose.\n'
+ ' -z, --zero-return Return 0 as exit code no matter if tests fail. Required for TreeHugger.\n'
+ ) % (sys.argv[0]),
+ return
+
+
+def main():
+ """Runs the unittest suite.
+
+ Parses command line arguments, adds test directories as tests.
+
+ Returns:
+ 0 if '-z' flag is set.
+ Else unittest.main() returns with its own error code.
+ """
+
+ OrigFile.OrigDir = os.path.dirname(os.path.abspath(__file__))
+ # Chdir to the directory this file is in since tests are in this directory
+ os.chdir(OrigFile.OrigDir)
+ files = []
+ for arg in sys.argv[1:]:
+ if arg in ('-h', '--help'):
+ Usage()
+ return 0
+ elif arg in ('-n', '--no-cleanup'):
+ Options.cleanup = 0
+ elif arg in ('-u', '--update-cts'):
+ Options.update_cts = 1
+ elif arg in ('-v', '--verbose'):
+ Options.verbose += 1
+ elif arg in ('-z', '--zero-return'):
+ Options.zero_return = 1
+ else:
+ # Test list to run
+ if os.path.isdir(arg):
+ files.append(arg)
+ else:
+ print >> sys.stderr, 'Invalid test or option: %s' % arg
+ return 1
+
+ if not files:
+ file_names = os.listdir('.')
+ # Test names must start with 'F_' or 'P_'
+ # 'F_' tests are expected to fail
+ # 'P_' tests are expected to pass
+ for f in file_names:
+ if os.path.isdir(f) and (f[0:2] == 'F_' or f[0:2] == 'P_'):
+ files.append(f)
+ files.sort()
+
+ AddUnitTests(files)
+
+ # verbosity=2 is necessary for PythonUnitTestRunner to parse the results
+ # Otherwise verbosity does not matter
+ # If Options.zero_return is set, do not let unittest.main() exit
+ # This is necessary in TreeHugger to distinguish between failing tests and
+ # failing to execute the python script
+ # If Options.zero_return is not set, let unittest.main() exit
+ # In this case it will return a non-zero code if any tests fail
+ unittest_exit = Options.zero_return == 0
+ unittest.main(verbosity=2,
+ argv=[sys.argv[0]] + ['TestGeneratorTests'],
+ exit=unittest_exit)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+
diff --git a/tests/nnapi/nnapi_test_generator/android-q-beta/vts_generator.py b/tests/nnapi/nnapi_test_generator/android-q-beta/vts_generator.py
new file mode 100644
index 000000000..2aa8731a0
--- /dev/null
+++ b/tests/nnapi/nnapi_test_generator/android-q-beta/vts_generator.py
@@ -0,0 +1,361 @@
+#!/usr/bin/python3
+
+# Copyright 2017, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""VTS testcase generator
+
+Implements VTS test backend. Shares most logic with the CTS test
+generator. Invoked by ml/nn/runtime/test/specs/generate_vts_tests.sh;
+See that script for details on how this script is used.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import argparse
+from functools import reduce
+import math
+import numpy as np
+import os
+import re
+import struct
+import contextlib
+import pprint
+
+# Stuff from test generator
+import test_generator as tg
+from test_generator import ActivationConverter
+from test_generator import BoolScalar
+from test_generator import Configuration
+from test_generator import DataTypeConverter
+from test_generator import DataLayoutConverter
+from test_generator import Example
+from test_generator import Float16Scalar
+from test_generator import Float32Scalar
+from test_generator import Float32Vector
+from test_generator import IgnoredOutput
+from test_generator import Input
+from test_generator import Int32Scalar
+from test_generator import Int32Vector
+from test_generator import Internal
+from test_generator import Model
+from test_generator import Operand
+from test_generator import Output
+from test_generator import Parameter
+from test_generator import ParameterAsInputConverter
+from test_generator import RelaxedModeConverter
+from test_generator import SmartOpen
+from test_generator import SymmPerChannelQuantParams
+
+# Dumping methods that shared with CTS generator
+from cts_generator import DumpCtsIsIgnored
+
+
+# TODO: Make this part of tg.Configuration?
+target_hal_version = None
+
+
+# Take a model from command line
+def ParseCmdLine():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("spec", help="the spec file")
+ parser.add_argument(
+ "-m", "--model", help="the output model file", default="-")
+ parser.add_argument(
+ "-t", "--test", help="the output test file", default="-")
+ parser.add_argument(
+ "--target_hal_version",
+ help="the HAL version of the output",
+ required=True,
+ choices=["V1_0", "V1_1", "V1_2"])
+ args = parser.parse_args()
+ example = "-" # VTS generator does not generate examples. See cts_generator.py.
+ tg.FileNames.InitializeFileLists(
+ args.spec, args.model, example, args.test)
+ global target_hal_version
+ target_hal_version = args.target_hal_version
+
+# Generate operands in VTS format
+def generate_vts_operands(model):
+ # Dump operand definitions
+ op_def = """\
+ {{
+ .type = OperandType::{operand_type},
+ .dimensions = {shape},
+ .numberOfConsumers = {no_consumers},
+ .scale = {scale},
+ .zeroPoint = {zero_point},
+ .lifetime = OperandLifeTime::{lifetime},
+ .location = {{.poolIndex = 0, .offset = {offset}, .length = {length}}},{extraParams}
+ }}"""
+ offset = 0
+ op_definitions = []
+ extra_params_definitions = []
+ for index, o in enumerate(model.operands):
+ length = o.type.GetByteSize() if isinstance(o, Parameter) else 0
+ add_extra_params = o.type.extraParams is not None and not o.type.extraParams.hide
+ op = {
+ "operand_type": o.type.type,
+ "shape": o.type.GetDimensionsString(),
+ "no_consumers": len(o.outs),
+ "scale": tg.PrettyPrintAsFloat(o.type.scale),
+ "zero_point": str(int(o.type.zeroPoint)),
+ "lifetime": o.lifetime,
+ "offset": offset if isinstance(o, Parameter) else 0,
+ "length": length,
+ "extraParams": "" if not add_extra_params else "\n .extraParams = std::move(extraParams%d)," % (index,),
+ }
+ offset += length
+ op_definitions.append(op_def.format(**op))
+
+ extra_params_def = """\
+ Operand::ExtraParams extraParams{index};
+ extraParams{index}.{setMethodName}({param});
+"""
+
+ if add_extra_params:
+ ep = o.type.extraParams
+ op = {
+ "index": index,
+ "setMethodName": ep.GetVtsSetter(),
+ "param": ep.GetVtsConstructor(),
+ }
+ extra_params_definitions.append(extra_params_def.format(**op))
+
+ op_vec = """{0}\
+ const std::vector<Operand> operands = {{
+{1}
+ }};""".format(",\n".join(extra_params_definitions), ",\n".join(op_definitions))
+ return op_vec
+
+# Generate VTS operand values
+def generate_vts_operand_values(operands):
+ weights = [o for o in operands if isinstance(o, Parameter)]
+ binit = []
+ for w in weights:
+ ty = w.type.type
+ if ty == "TENSOR_QUANT8_ASYMM":
+ binit += w.value
+ elif ty == "TENSOR_QUANT8_SYMM_PER_CHANNEL" or ty == "TENSOR_QUANT8_SYMM":
+ binit += [struct.pack("b", value)[0] for value in w.value]
+ elif ty == "BOOL" or ty == "TENSOR_BOOL8":
+ binit += [1 if x else 0 for x in w.value]
+ elif ty == "TENSOR_FLOAT16" or ty == "FLOAT16":
+ for f in w.value:
+ # The pack format for float16 is not available until Python 3.6.
+ binit += [int(x) for x in np.float16(f).tostring()]
+ elif ty in {"TENSOR_FLOAT32", "FLOAT32", "TENSOR_INT32", "INT32", "TENSOR_QUANT16_ASYMM"}:
+ if ty in ["TENSOR_FLOAT32", "FLOAT32"]:
+ fmt = "f"
+ elif ty in ["TENSOR_INT32", "INT32"]:
+ fmt = "i"
+ elif ty == "TENSOR_QUANT16_ASYMM":
+ fmt = "H"
+ for f in w.value:
+ binit += [int(x) for x in struct.pack(fmt, f)]
+ else:
+ assert 0 and "Unsupported VTS operand type"
+
+ init_defs = ", ".join([str(x) for x in binit])
+ if (init_defs != ""):
+ init_defs = "\n %s\n " % init_defs
+ byte_vec_fmt = """{%s}""" % init_defs
+ return byte_vec_fmt
+
+# Generate VTS operations
+def generate_vts_operation(op, model):
+ op_fmt = """\
+ {{
+ .type = OperationType::{op_code},
+ .inputs = {{{ins}}},
+ .outputs = {{{outs}}},
+ }}"""
+ op_content = {
+ 'op_code': op.optype,
+ 'ins': tg.GetJointStr(model.GetIndexOfOperands(op.ins)),
+ 'outs': tg.GetJointStr(model.GetIndexOfOperands(op.outs))
+ }
+ return op_fmt.format(**op_content)
+
+def generate_vts_operations(model):
+ vts_ops = [generate_vts_operation(op, model) for op in model.operations]
+ return ",\n".join(vts_ops)
+
+def generate_vts_model(model, model_file):
+ operand_values_fmt = ""
+ if Configuration.useSHM():
+ # Boilerplate code for passing weights in shared memory
+ operand_values_fmt = """\
+ std::vector<uint8_t> operandValues = {{}};
+ const uint8_t data[] = {operand_values};
+
+ // Allocate segment of android shared memory, wrapped in hidl_memory.
+ // This object will be automatically freed when sharedMemory is destroyed.
+ hidl_memory sharedMemory = ::android::nn::allocateSharedMemory(sizeof(data));
+
+ // Mmap ashmem into usable address and hold it within the mappedMemory object.
+ // MappedMemory will automatically munmap the memory when it is destroyed.
+ sp<::android::hidl::memory::V1_0::IMemory> mappedMemory = mapMemory(sharedMemory);
+
+ if (mappedMemory != nullptr) {{
+ // Retrieve the mmapped pointer.
+ uint8_t* mappedPointer =
+ static_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
+
+ if (mappedPointer != nullptr) {{
+ // Acquire the write lock for the shared memory segment, upload the data,
+ // and release the lock.
+ mappedMemory->update();
+ std::copy(data, data + sizeof(data), mappedPointer);
+ mappedMemory->commit();
+ }}
+ }}
+
+ const std::vector<hidl_memory> pools = {{sharedMemory}};
+"""
+ else:
+ # Passing weights via operandValues
+ operand_values_fmt = """\
+ std::vector<uint8_t> operandValues = {operand_values};
+ const std::vector<hidl_memory> pools = {{}};
+"""
+
+ operand_values_val = {
+ 'operand_values': generate_vts_operand_values(model.operands)
+ }
+ operand_values = operand_values_fmt.format(**operand_values_val)
+ # operand_values = operand_values_fmt
+ model_fmt = """\
+// Create the model
+Model {create_test_model_name}() {{
+{operand_decls}
+
+ const std::vector<Operation> operations = {{
+{operations}
+ }};
+
+ const std::vector<uint32_t> inputIndexes = {{{input_indices}}};
+ const std::vector<uint32_t> outputIndexes = {{{output_indices}}};
+{operand_values}
+ return {{
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,{relaxed_field}
+ }};
+}}
+"""
+ model_dict = {
+ "hal_version": target_hal_version,
+ "create_test_model_name": str(model.createTestFunctionName),
+ "operations": generate_vts_operations(model),
+ "operand_decls": generate_vts_operands(model),
+ "operand_values": operand_values,
+ "output_indices": tg.GetJointStr(model.GetOutputsIndex()),
+ "input_indices": tg.GetJointStr(model.GetInputsIndex()),
+ "relaxed_field":
+ "\n .relaxComputationFloat32toFloat16 = true," if (model.isRelaxed) else ""
+ }
+ print(model_fmt.format(**model_dict), file = model_file)
+
+def generate_vts(model, model_file):
+ assert model.compiled
+ # Do not generate DynamicOutputShapeTest for pre-1.2 VTS.
+ if model.hasDynamicOutputShape and target_hal_version < "V1_2":
+ return
+ namespace = "android::hardware::neuralnetworks::{hal_version}::generated_tests::{spec_name}".format(spec_name=tg.FileNames.specName, hal_version=target_hal_version)
+ print("namespace {namespace} {{\n".format(namespace=namespace), file=model_file)
+ generate_vts_model(model, model_file)
+ DumpCtsIsIgnored(model, model_file)
+ print("}} // namespace {namespace}".format(namespace=namespace), file=model_file)
+
+def generate_vts_test(example, test_file):
+ # Do not generate DynamicOutputShapeTest for pre-1.2 VTS.
+ if example.model.hasDynamicOutputShape and target_hal_version < "V1_2":
+ return
+
+ generated_vts_namespace = "android::hardware::neuralnetworks::{hal_version}::generated_tests::{spec_name}".format(spec_name=tg.FileNames.specName, hal_version=target_hal_version)
+ generated_cts_namespace = "generated_tests::{spec_name}".format(spec_name=tg.FileNames.specName)
+ testTemplate = """\
+namespace {generated_cts_namespace} {{
+
+std::vector<::test_helper::MixedTypedExample>& get_{examples_name}();
+
+}} // namespace {generated_cts_namespace}
+
+namespace {generated_vts_namespace} {{
+
+Model {create_model_name}();
+bool {is_ignored_name}(int);
+"""
+
+ if not example.expectFailure:
+ testTemplate += """
+TEST_F({test_case_name}, {test_name}) {{
+ Execute(device,
+ {create_model_name},
+ {is_ignored_name},
+ ::{generated_cts_namespace}::get_{examples_name}(){test_dynamic_output_shape});
+}}
+"""
+
+ testTemplate += """
+TEST_F(ValidationTest, {test_name}) {{
+ const Model model = {create_model_name}();
+ const std::vector<Request> requests = createRequests(::{generated_cts_namespace}::get_{examples_name}());
+ validateEverything(model, requests);
+}}
+
+}} // namespace {generated_vts_namespace}
+"""
+
+ print(testTemplate.format(
+ test_case_name="DynamicOutputShapeTest" if example.model.hasDynamicOutputShape \
+ else "NeuralnetworksHidlTest",
+ test_name=str(example.testName),
+ generated_vts_namespace=generated_vts_namespace,
+ generated_cts_namespace=generated_cts_namespace,
+ hal_version=target_hal_version,
+ create_model_name=str(example.model.createTestFunctionName),
+ is_ignored_name=str(example.model.isIgnoredFunctionName),
+ examples_name=str(example.examplesName),
+ test_dynamic_output_shape=", true" if example.model.hasDynamicOutputShape else "",
+ validation_method="validateFailure" if example.expectFailure else "validateEverything",
+ ), file=test_fd)
+
+def InitializeFiles(model_fd, test_fd):
+ specFileBase = os.path.basename(tg.FileNames.specFile)
+ fileHeader = """\
+// Generated from {spec_file}
+// DO NOT EDIT
+// clang-format off
+#include "GeneratedTests.h"
+""".format(spec_file=specFileBase)
+ print(fileHeader, file=model_fd)
+ print(fileHeader, file=test_fd)
+
+if __name__ == "__main__":
+ ParseCmdLine()
+ while tg.FileNames.NextFile():
+ print("Generating VTS tests from %s" % tg.FileNames.specFile)
+ exec (open(tg.FileNames.specFile, "r").read())
+ with SmartOpen(tg.FileNames.modelFile) as model_fd, \
+ SmartOpen(tg.FileNames.testFile) as test_fd:
+ InitializeFiles(model_fd, test_fd)
+ Example.DumpAllExamples(
+ DumpModel=generate_vts, model_fd=model_fd,
+ DumpTest=generate_vts_test, test_fd=test_fd)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_float_1.mod.py b/tests/nnapi/specs/Ex/argmax_ex_float_1.mod.py
index e2255e004..dc29fb358 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_float_1.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_float_1.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [1])
-output = Output("output", "TENSOR_INT32", "{1, 1, 2, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 2, 1}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_float_2.mod.py b/tests/nnapi/specs/Ex/argmax_ex_float_2.mod.py
index 6f06bfd0b..8b6ed565f 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_float_2.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_float_2.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [3])
-output = Output("output", "TENSOR_INT32", "{1, 2, 2, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 2, 2}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_int32.mod.py b/tests/nnapi/specs/Ex/argmax_ex_int32.mod.py
index f7f98afe2..b5d01dd85 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_int32.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_int32.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_INT32", "{1, 2, 2, 1}")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [1])
-output = Output("output", "TENSOR_INT32", "{1, 1, 2, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 2, 1}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_neg_axis_float.mod.py b/tests/nnapi/specs/Ex/argmax_ex_neg_axis_float.mod.py
index b29cf1eb6..977cac061 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_neg_axis_float.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_neg_axis_float.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 4, 1}")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [-3])
-output = Output("output", "TENSOR_INT32", "{1, 1, 4, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 4, 1}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_neg_axis_int32.mod.py b/tests/nnapi/specs/Ex/argmax_ex_neg_axis_int32.mod.py
index ddc27b35a..9f448e08d 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_neg_axis_int32.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_neg_axis_int32.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_INT32", "{1, 2, 4, 1}")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [-3])
-output = Output("output", "TENSOR_INT32", "{1, 1, 4, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 4, 1}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_quant8.mod.py b/tests/nnapi/specs/Ex/argmax_ex_quant8.mod.py
index fd3c5230b..c3131ef91 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_quant8.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_quant8.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 1}, 0.5f, 2")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [1])
-output = Output("output", "TENSOR_INT32", "{1, 1, 2, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 2, 1}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/argmax_ex_quant8_neg_axis.mod.py b/tests/nnapi/specs/Ex/argmax_ex_quant8_neg_axis.mod.py
index c7ddc0e5d..9146d8f14 100644
--- a/tests/nnapi/specs/Ex/argmax_ex_quant8_neg_axis.mod.py
+++ b/tests/nnapi/specs/Ex/argmax_ex_quant8_neg_axis.mod.py
@@ -1,7 +1,7 @@
model = Model()
i1 = Input("input", "TENSOR_QUANT8_ASYMM", "{1, 2, 4, 1}, 0.5f, 5")
axis = Parameter("axis", "TENSOR_INT32", "{1}", [-3])
-output = Output("output", "TENSOR_INT32", "{1, 1, 4, 1}")
+output = Output("output", "TENSOR_INT32", "{1, 4, 1}")
model = model.Operation("ARGMAX_EX", i1, axis).To(output)
diff --git a/tests/nnapi/specs/Ex/gather_ex_2D_2D_float_1.mod.py b/tests/nnapi/specs/Ex/gather_ex_2D_2D_float_1.mod.py
new file mode 100644
index 000000000..9edc80deb
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_2D_2D_float_1.mod.py
@@ -0,0 +1,22 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{3,4}") # a vector of 12 float32s
+i2 = Input("op2", "TENSOR_INT32", "{1,2}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 0)
+i3 = Output("op3", "TENSOR_FLOAT32", "{1,2,4}") # a vector of 8 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789],
+ i2: # input 1
+ [1, 0]}
+
+output0 = {i3: # output 0
+ [7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_2D_2D_float_2.mod.py b/tests/nnapi/specs/Ex/gather_ex_2D_2D_float_2.mod.py
new file mode 100644
index 000000000..85fd43466
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_2D_2D_float_2.mod.py
@@ -0,0 +1,23 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{3,4}") # a vector of 12 float32s
+i2 = Input("op2", "TENSOR_INT32", "{1,2}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 1)
+i3 = Output("op3", "TENSOR_FLOAT32", "{3,1,2}") # a vector of 6 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789],
+ i2: # input 1
+ [1, 0]}
+
+output0 = {i3: # output 0
+ [4.123456789123456789, 3.123456789123456789,
+ 8.123456789123456789, 7.123456789123456789,
+ 18.123456789123456789, 2.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_2D_3D_float_1.mod.py b/tests/nnapi/specs/Ex/gather_ex_2D_3D_float_1.mod.py
new file mode 100644
index 000000000..652016af5
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_2D_3D_float_1.mod.py
@@ -0,0 +1,22 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{3,4}") # a vector of 12 float32s
+i2 = Input("op2", "TENSOR_INT32", "{1,1,2}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 0)
+i3 = Output("op3", "TENSOR_FLOAT32", "{1,1,2,4}") # a vector of 8 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789],
+ i2: # input 1
+ [1, 0]}
+
+output0 = {i3: # output 0
+ [7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_2D_3D_float_2.mod.py b/tests/nnapi/specs/Ex/gather_ex_2D_3D_float_2.mod.py
new file mode 100644
index 000000000..850d330b9
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_2D_3D_float_2.mod.py
@@ -0,0 +1,23 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{3,4}") # a vector of 12 float32s
+i2 = Input("op2", "TENSOR_INT32", "{1,2,1}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 1)
+i3 = Output("op3", "TENSOR_FLOAT32", "{3,1,2,1}") # a vector of 6 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789],
+ i2: # input 1
+ [1, 0]}
+
+output0 = {i3: # output 0
+ [4.123456789123456789, 3.123456789123456789,
+ 8.123456789123456789, 7.123456789123456789,
+ 18.123456789123456789, 2.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_1.mod.py b/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_1.mod.py
new file mode 100644
index 000000000..09db60294
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_1.mod.py
@@ -0,0 +1,29 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{2,3,4}") # a vector of 24 float32s
+i2 = Input("op2", "TENSOR_INT32", "{1,2}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 0)
+i3 = Output("op3", "TENSOR_FLOAT32", "{1,2,3,4}") # a vector of 24 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 12.123456789123456789, 28.123456789123456789, 29.123456789123456789, 21.123456789123456789],
+ i2: # input 1
+ [1, 0]}
+
+output0 = {i3: # output 0
+ [13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 12.123456789123456789, 28.123456789123456789, 29.123456789123456789, 21.123456789123456789,
+ 3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_2.mod.py b/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_2.mod.py
new file mode 100644
index 000000000..bfdc83c58
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_2.mod.py
@@ -0,0 +1,29 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{2,3,4}") # a vector of 24 float32s
+i2 = Input("op2", "TENSOR_INT32", "{2,1}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 0)
+i3 = Output("op3", "TENSOR_FLOAT32", "{2,1,3,4}") # a vector of 24 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 12.123456789123456789, 28.123456789123456789, 29.123456789123456789, 21.123456789123456789],
+ i2: # input 1
+ [0, 0]}
+
+output0 = {i3: # output 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_3.mod.py b/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_3.mod.py
new file mode 100644
index 000000000..cfcd1b479
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_3D_2D_float_3.mod.py
@@ -0,0 +1,26 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{2,3,4}") # a vector of 24 float32s
+i2 = Input("op2", "TENSOR_INT32", "{2,1}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 2)
+i3 = Output("op3", "TENSOR_FLOAT32", "{2,3,2,1}") # a vector of 12 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3.123456789123456789, 4.123456789123456789, 5.123456789123456789, 6.123456789123456789,
+ 7.123456789123456789, 8.123456789123456789, 9.123456789123456789, 1.123456789123456789,
+ 2.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 11.123456789123456789,
+ 12.123456789123456789, 28.123456789123456789, 29.123456789123456789, 21.123456789123456789],
+ i2: # input 1
+ [3, 1]}
+
+output0 = {i3: # output 0
+ [6.123456789123456789, 4.123456789123456789, 1.123456789123456789, 8.123456789123456789,
+ 11.123456789123456789, 18.123456789123456789, 16.123456789123456789, 14.123456789123456789,
+ 11.123456789123456789, 18.123456789123456789, 21.123456789123456789, 28.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/gather_ex_4D_float.mod.py b/tests/nnapi/specs/Ex/gather_ex_4D_float.mod.py
new file mode 100644
index 000000000..38fa8d015
--- /dev/null
+++ b/tests/nnapi/specs/Ex/gather_ex_4D_float.mod.py
@@ -0,0 +1,41 @@
+# This test case is for 4d input gather operator.
+# The input shape is [1,2,3,4] and this test produces output
+# by referencing the data on axis 0 from the input to the indices value [0,0].
+# In this case, the output shape changeds to [2,2,3,4] because it uses [0,0] indices on the 0 axis,
+# and the data is configured as filling the 0th data of the input axis 0 twice.
+
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1,2,3,4}") # a vector of 24 float32s
+i2 = Input("op2", "TENSOR_INT32", "{2}") # another vector of 2 int32s
+axis = Int32Scalar("axis", 0)
+i3 = Output("op3", "TENSOR_FLOAT32", "{2,2,3,4}") # a vector of 48 float32s
+model = model.Operation("GATHER_EX", i1, i2, axis).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.123456789123456789, 2.123456789123456789, 3.123456789123456789, 4.123456789123456789,
+ 5.123456789123456789, 6.123456789123456789, 7.123456789123456789, 8.123456789123456789,
+ 9.123456789123456789, 10.123456789123456789, 11.123456789123456789, 12.123456789123456789,
+ 13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 20.123456789123456789,
+ 21.123456789123456789, 22.123456789123456789, 23.123456789123456789, 24.123456789123456789],
+ i2: # input 1
+ [0, 0]}
+
+output0 = {i3: # output 0
+ [1.123456789123456789, 2.123456789123456789, 3.123456789123456789, 4.123456789123456789,
+ 5.123456789123456789, 6.123456789123456789, 7.123456789123456789, 8.123456789123456789,
+ 9.123456789123456789, 10.123456789123456789, 11.123456789123456789, 12.123456789123456789,
+ 13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 20.123456789123456789,
+ 21.123456789123456789, 22.123456789123456789, 23.123456789123456789, 24.123456789123456789,
+ 1.123456789123456789, 2.123456789123456789, 3.123456789123456789, 4.123456789123456789,
+ 5.123456789123456789, 6.123456789123456789, 7.123456789123456789, 8.123456789123456789,
+ 9.123456789123456789, 10.123456789123456789, 11.123456789123456789, 12.123456789123456789,
+ 13.123456789123456789, 14.123456789123456789, 15.123456789123456789, 16.123456789123456789,
+ 17.123456789123456789, 18.123456789123456789, 19.123456789123456789, 20.123456789123456789,
+ 21.123456789123456789, 22.123456789123456789, 23.123456789123456789, 24.123456789123456789]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/greater_equal_ex.mod.py b/tests/nnapi/specs/Ex/greater_equal_ex.mod.py
new file mode 100644
index 000000000..7c62d568b
--- /dev/null
+++ b/tests/nnapi/specs/Ex/greater_equal_ex.mod.py
@@ -0,0 +1,35 @@
+#
+# Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{2, 1}")
+i2 = Input("op2", "TENSOR_FLOAT32", "{2}")
+i3 = Output("op3", "TENSOR_QUANT8_ASYMM", "{2, 2}, 1.0, 0")
+model = model.Operation("GREATER_EQUAL_EX", i1, i2).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [5, 10],
+ i2: # input 1
+ [10, 5]}
+
+output0 = {i3: # output 0
+ [0, 255, 255, 255]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/less_ex.mod.py b/tests/nnapi/specs/Ex/less_ex.mod.py
new file mode 100644
index 000000000..3ae15b62f
--- /dev/null
+++ b/tests/nnapi/specs/Ex/less_ex.mod.py
@@ -0,0 +1,35 @@
+#
+# Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{2, 1}")
+i2 = Input("op2", "TENSOR_FLOAT32", "{2}")
+i3 = Output("op3", "TENSOR_QUANT8_ASYMM", "{2, 2}, 1.0, 0")
+model = model.Operation("LESS_EX", i1, i2).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [5, 10],
+ i2: # input 1
+ [10, 5]}
+
+output0 = {i3: # output 0
+ [255, 0, 0, 0]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_2D_float_1.mod.py b/tests/nnapi/specs/Ex/pack_ex_2D_float_1.mod.py
new file mode 100644
index 000000000..06f6e6a7b
--- /dev/null
+++ b/tests/nnapi/specs/Ex/pack_ex_2D_float_1.mod.py
@@ -0,0 +1,54 @@
+# Sample Stack or Pack model
+model = Model()
+i1 = Input("input1", "TENSOR_FLOAT32", "{6, 4}")
+i2 = Input("input2", "TENSOR_FLOAT32", "{6, 4}")
+i3 = Input("input3", "TENSOR_FLOAT32", "{6, 4}")
+num = Int32Scalar("num_tensors", 3)
+axis = Int32Scalar("axis", 0)
+out = Output("output", "TENSOR_FLOAT32", "{3, 6, 4}")
+model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
+
+input0 = {i1: # input 0
+ [0.3, 1.0, 2.0, 3.0,
+ 4.0, 5.5, 6.3, 7.2,
+ 8.22, 9.8, 10.3, 11.0,
+ 12.22, 13.2, 14.44, 15.32,
+ 16.55, 17.33, 18.1, 19.0,
+ 20.32, 21.9, 22.1, 23.22],
+ i2: # input 1
+ [24.22, 25.1, 26.0, 27.12,
+ 28.32, 29.11, 30.0, 31.98,
+ 32.99, 33.11, 34.1, 35.123,
+ 36.21, 37.22, 38.23, 39.76,
+ 40.1, 41.43, 42.34, 43.1,
+ 44.123, 45.43, 46.1, 47.1],
+ i3: # input 2
+ [48.0, 49.76, 50.0, 51.1,
+ 52.22, 53.12, 54.1, 55.5,
+ 56.5, 57.4, 58.1, 59.23,
+ 60.2, 61.12, 62.11, 63.34,
+ 64.11, 65.1, 66.43, 67.1,
+ 68.1, 69.34, 70.11, 71.45]}
+
+output0 = {out: # output 0
+ [0.3, 1.0, 2.0, 3.0,
+ 4.0, 5.5, 6.3, 7.2,
+ 8.22, 9.8, 10.3, 11.0,
+ 12.22, 13.2, 14.44, 15.32,
+ 16.55, 17.33, 18.1, 19.0,
+ 20.32, 21.9, 22.1, 23.22,
+ 24.22, 25.1, 26.0, 27.12,
+ 28.32, 29.11, 30.0, 31.98,
+ 32.99, 33.11, 34.1, 35.123,
+ 36.21, 37.22, 38.23, 39.76,
+ 40.1, 41.43, 42.34, 43.1,
+ 44.123, 45.43, 46.1, 47.1,
+ 48.0, 49.76, 50.0, 51.1,
+ 52.22, 53.12, 54.1, 55.5,
+ 56.5, 57.4, 58.1, 59.23,
+ 60.2, 61.12, 62.11, 63.34,
+ 64.11, 65.1, 66.43, 67.1,
+ 68.1, 69.34, 70.11, 71.45]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_2D_float_2.mod.py b/tests/nnapi/specs/Ex/pack_ex_2D_float_2.mod.py
new file mode 100644
index 000000000..acc8c891f
--- /dev/null
+++ b/tests/nnapi/specs/Ex/pack_ex_2D_float_2.mod.py
@@ -0,0 +1,54 @@
+# Sample Stack or Pack model
+model = Model()
+i1 = Input("input1", "TENSOR_FLOAT32", "{6, 4}")
+i2 = Input("input2", "TENSOR_FLOAT32", "{6, 4}")
+i3 = Input("input3", "TENSOR_FLOAT32", "{6, 4}")
+num = Int32Scalar("num_tensors", 3)
+axis = Int32Scalar("axis", 1)
+out = Output("output", "TENSOR_FLOAT32", "{6, 3, 4}")
+model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
+
+input0 = {i1: # input 0
+ [0.3, 1.0, 2.0, 3.0,
+ 4.0, 5.5, 6.3, 7.2,
+ 8.22, 9.8, 10.3, 11.0,
+ 12.22, 13.2, 14.44, 15.32,
+ 16.55, 17.33, 18.1, 19.0,
+ 20.32, 21.9, 22.1, 23.22],
+ i2: # input 1
+ [24.22, 25.1, 26.0, 27.12,
+ 28.32, 29.11, 30.0, 31.98,
+ 32.99, 33.11, 34.1, 35.123,
+ 36.21, 37.22, 38.23, 39.76,
+ 40.1, 41.43, 42.34, 43.1,
+ 44.123, 45.43, 46.1, 47.1],
+ i3: # input 2
+ [48.0, 49.76, 50.0, 51.1,
+ 52.22, 53.12, 54.1, 55.5,
+ 56.5, 57.4, 58.1, 59.23,
+ 60.2, 61.12, 62.11, 63.34,
+ 64.11, 65.1, 66.43, 67.1,
+ 68.1, 69.34, 70.11, 71.45]}
+
+output0 = {out: # output 0
+ [0.3, 1.0, 2.0, 3.0,
+ 24.22, 25.1, 26.0, 27.12,
+ 48.0, 49.76, 50.0, 51.1,
+ 4.0, 5.5, 6.3, 7.2,
+ 28.32, 29.11, 30.0, 31.98,
+ 52.22, 53.12, 54.1, 55.5,
+ 8.22, 9.8, 10.3, 11.0,
+ 32.99, 33.11, 34.1, 35.123,
+ 56.5, 57.4, 58.1, 59.23,
+ 12.22, 13.2, 14.44, 15.32,
+ 36.21, 37.22, 38.23, 39.76,
+ 60.2, 61.12, 62.11, 63.34,
+ 16.55, 17.33, 18.1, 19.0,
+ 40.1, 41.43, 42.34, 43.1,
+ 64.11, 65.1, 66.43, 67.1,
+ 20.32, 21.9, 22.1, 23.22,
+ 44.123, 45.43, 46.1, 47.1,
+ 68.1, 69.34, 70.11, 71.45]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_2D_int_1.mod.py b/tests/nnapi/specs/Ex/pack_ex_2D_int_1.mod.py
new file mode 100644
index 000000000..0ffd6cf9d
--- /dev/null
+++ b/tests/nnapi/specs/Ex/pack_ex_2D_int_1.mod.py
@@ -0,0 +1,25 @@
+# Sample Stack or Pack model
+model = Model()
+i1 = Input("input1", "TENSOR_INT32", "{6, 4}")
+i2 = Input("input2", "TENSOR_INT32", "{6, 4}")
+i3 = Input("input3", "TENSOR_INT32", "{6, 4}")
+num = Int32Scalar("num_tensors", 3)
+axis = Int32Scalar("axis", 0)
+out = Output("output", "TENSOR_INT32", "{3, 6, 4}")
+model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
+
+input0 = {i1: # input 0
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],
+ i2: # input 1
+ [24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47],
+ i3: # input 2
+ [48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71]}
+
+output0 = {out: # output 0
+ [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
+ 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_2D_int_2.mod.py b/tests/nnapi/specs/Ex/pack_ex_2D_int_2.mod.py
new file mode 100644
index 000000000..c9bdc419b
--- /dev/null
+++ b/tests/nnapi/specs/Ex/pack_ex_2D_int_2.mod.py
@@ -0,0 +1,25 @@
+# Sample Stack or Pack model
+model = Model()
+i1 = Input("input1", "TENSOR_INT32", "{6, 4}")
+i2 = Input("input2", "TENSOR_INT32", "{6, 4}")
+i3 = Input("input3", "TENSOR_INT32", "{6, 4}")
+num = Int32Scalar("num_tensors", 3)
+axis = Int32Scalar("axis", 1)
+out = Output("output", "TENSOR_INT32", "{6, 3, 4}")
+model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
+
+input0 = {i1: # input 0
+ [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],
+ i2: # input 1
+ [24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47],
+ i3: # input 2
+ [48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71]}
+
+output0 = {out: # output 0
+ [ 0, 1, 2, 3, 24, 25, 26, 27, 48, 49, 50, 51, 4, 5, 6, 7, 28, 29,
+ 30, 31, 52, 53, 54, 55, 8, 9, 10, 11, 32, 33, 34, 35, 56, 57, 58, 59,
+ 12, 13, 14, 15, 36, 37, 38, 39, 60, 61, 62, 63, 16, 17, 18, 19, 40, 41,
+ 42, 43, 64, 65, 66, 67, 20, 21, 22, 23, 44, 45, 46, 47, 68, 69, 70, 71]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_3D_float_1.mod.py b/tests/nnapi/specs/Ex/pack_ex_3D_float_1.mod.py
deleted file mode 100644
index 3f3ea683f..000000000
--- a/tests/nnapi/specs/Ex/pack_ex_3D_float_1.mod.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Sample Stack or Pack model
-model = Model()
-i1 = Input("input1", "TENSOR_FLOAT32", "{2, 3, 4}")
-i2 = Input("input2", "TENSOR_FLOAT32", "{2, 3, 4}")
-i3 = Input("input3", "TENSOR_FLOAT32", "{2, 3, 4}")
-num = Int32Scalar("num_tensors", 3)
-axis = Int32Scalar("axis", 0)
-out = Output("output", "TENSOR_FLOAT32", "{3, 2, 3, 4}")
-model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
-
-input0 = {i1: # input 0
- [0.3, 1.0, 2.0, 3.0,
- 4.0, 5.5, 6.3, 7.2,
- 8.22, 9.8, 10.3, 11.0,
- 12.22, 13.2, 14.44, 15.32,
- 16.55, 17.33, 18.1, 19.0,
- 20.32, 21.9, 22.1, 23.22],
- i2: # input 1
- [24.22, 25.1, 26.0, 27.12,
- 28.32, 29.11, 30.0, 31.98,
- 32.99, 33.11, 34.1, 35.123,
- 36.21, 37.22, 38.23, 39.76,
- 40.1, 41.43, 42.34, 43.1,
- 44.123, 45.43, 46.1, 47.1],
- i3: # input 2
- [48.0, 49.76, 50.0, 51.1,
- 52.22, 53.12, 54.1, 55.5,
- 56.5, 57.4, 58.1, 59.23,
- 60.2, 61.12, 62.11, 63.34,
- 64.11, 65.1, 66.43, 67.1,
- 68.1, 69.34, 70.11, 71.45]}
-
-output0 = {out: # output 0
- [0.3, 1.0, 2.0, 3.0,
- 4.0, 5.5, 6.3, 7.2,
- 8.22, 9.8, 10.3, 11.0,
- 12.22, 13.2, 14.44, 15.32,
- 16.55, 17.33, 18.1, 19.0,
- 20.32, 21.9, 22.1, 23.22,
- 24.22, 25.1, 26.0, 27.12,
- 28.32, 29.11, 30.0, 31.98,
- 32.99, 33.11, 34.1, 35.123,
- 36.21, 37.22, 38.23, 39.76,
- 40.1, 41.43, 42.34, 43.1,
- 44.123, 45.43, 46.1, 47.1,
- 48.0, 49.76, 50.0, 51.1,
- 52.22, 53.12, 54.1, 55.5,
- 56.5, 57.4, 58.1, 59.23,
- 60.2, 61.12, 62.11, 63.34,
- 64.11, 65.1, 66.43, 67.1,
- 68.1, 69.34, 70.11, 71.45]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_3D_float_2.mod.py b/tests/nnapi/specs/Ex/pack_ex_3D_float_2.mod.py
deleted file mode 100644
index d0caa02cc..000000000
--- a/tests/nnapi/specs/Ex/pack_ex_3D_float_2.mod.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Sample Stack or Pack model
-model = Model()
-i1 = Input("input1", "TENSOR_FLOAT32", "{2, 3, 4}")
-i2 = Input("input2", "TENSOR_FLOAT32", "{2, 3, 4}")
-i3 = Input("input3", "TENSOR_FLOAT32", "{2, 3, 4}")
-num = Int32Scalar("num_tensors", 3)
-axis = Int32Scalar("axis", 2)
-out = Output("output", "TENSOR_FLOAT32", "{2, 3, 3, 4}")
-model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
-
-input0 = {i1: # input 0
- [0.3, 1.0, 2.0, 3.0,
- 4.0, 5.5, 6.3, 7.2,
- 8.22, 9.8, 10.3, 11.0,
- 12.22, 13.2, 14.44, 15.32,
- 16.55, 17.33, 18.1, 19.0,
- 20.32, 21.9, 22.1, 23.22],
- i2: # input 1
- [24.22, 25.1, 26.0, 27.12,
- 28.32, 29.11, 30.0, 31.98,
- 32.99, 33.11, 34.1, 35.123,
- 36.21, 37.22, 38.23, 39.76,
- 40.1, 41.43, 42.34, 43.1,
- 44.123, 45.43, 46.1, 47.1],
- i3: # input 2
- [48.0, 49.76, 50.0, 51.1,
- 52.22, 53.12, 54.1, 55.5,
- 56.5, 57.4, 58.1, 59.23,
- 60.2, 61.12, 62.11, 63.34,
- 64.11, 65.1, 66.43, 67.1,
- 68.1, 69.34, 70.11, 71.45]}
-
-output0 = {out: # output 0
- [0.3, 1.0, 2.0, 3.0,
- 24.22, 25.1, 26.0, 27.12,
- 48.0, 49.76, 50.0, 51.1,
- 4.0, 5.5, 6.3, 7.2,
- 28.32, 29.11, 30.0, 31.98,
- 52.22, 53.12, 54.1, 55.5,
- 8.22, 9.8, 10.3, 11.0,
- 32.99, 33.11, 34.1, 35.123,
- 56.5, 57.4, 58.1, 59.23,
- 12.22, 13.2, 14.44, 15.32,
- 36.21, 37.22, 38.23, 39.76,
- 60.2, 61.12, 62.11, 63.34,
- 16.55, 17.33, 18.1, 19.0,
- 40.1, 41.43, 42.34, 43.1,
- 64.11, 65.1, 66.43, 67.1,
- 20.32, 21.9, 22.1, 23.22,
- 44.123, 45.43, 46.1, 47.1,
- 68.1, 69.34, 70.11, 71.45]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_3D_int_1.mod.py b/tests/nnapi/specs/Ex/pack_ex_3D_int_1.mod.py
deleted file mode 100644
index 23b365957..000000000
--- a/tests/nnapi/specs/Ex/pack_ex_3D_int_1.mod.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Sample Stack or Pack model
-model = Model()
-i1 = Input("input1", "TENSOR_INT32", "{2, 3, 4}")
-i2 = Input("input2", "TENSOR_INT32", "{2, 3, 4}")
-i3 = Input("input3", "TENSOR_INT32", "{2, 3, 4}")
-num = Int32Scalar("num_tensors", 3)
-axis = Int32Scalar("axis", 0)
-out = Output("output", "TENSOR_INT32", "{3, 2, 3, 4}")
-model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
-
-input0 = {i1: # input 0
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],
- i2: # input 1
- [24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47],
- i3: # input 2
- [48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71]}
-
-output0 = {out: # output 0
- [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/pack_ex_3D_int_2.mod.py b/tests/nnapi/specs/Ex/pack_ex_3D_int_2.mod.py
deleted file mode 100644
index 747fcc628..000000000
--- a/tests/nnapi/specs/Ex/pack_ex_3D_int_2.mod.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Sample Stack or Pack model
-model = Model()
-i1 = Input("input1", "TENSOR_INT32", "{2, 3, 4}")
-i2 = Input("input2", "TENSOR_INT32", "{2, 3, 4}")
-i3 = Input("input3", "TENSOR_INT32", "{2, 3, 4}")
-num = Int32Scalar("num_tensors", 3)
-axis = Int32Scalar("axis", 2)
-out = Output("output", "TENSOR_INT32", "{2, 3, 3, 4}")
-model = model.Operation("PACK_EX", i1, i2, i3, num, axis).To(out)
-
-input0 = {i1: # input 0
- [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],
- i2: # input 1
- [24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47],
- i3: # input 2
- [48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71]}
-
-output0 = {out: # output 0
- [ 0, 1, 2, 3, 24, 25, 26, 27, 48, 49, 50, 51, 4, 5, 6, 7, 28, 29,
- 30, 31, 52, 53, 54, 55, 8, 9, 10, 11, 32, 33, 34, 35, 56, 57, 58, 59,
- 12, 13, 14, 15, 36, 37, 38, 39, 60, 61, 62, 63, 16, 17, 18, 19, 40, 41,
- 42, 43, 64, 65, 66, 67, 20, 21, 22, 23, 44, 45, 46, 47, 68, 69, 70, 71]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/prelu_ex_broadcast_float_1.mod.py b/tests/nnapi/specs/Ex/prelu_ex_broadcast_float_1.mod.py
new file mode 100644
index 000000000..23e363f5c
--- /dev/null
+++ b/tests/nnapi/specs/Ex/prelu_ex_broadcast_float_1.mod.py
@@ -0,0 +1,23 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 3}") # a vector of input
+i2 = Input("op2", "TENSOR_FLOAT32", "{1, 1, 1, 3}") # a vector of alpha
+i3 = Output("op3", "TENSOR_FLOAT32", "{1, 2, 2, 3}") # a vector of output
+model = model.Operation("PRELU_EX", i1, i2).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [0.0, 0.0, 0.0,
+ 1.0, 1.0, 1.0,
+ -1.0, -1.0, -1.0,
+ -2.0, -2.0, -2.0],
+ i2: # input 1
+ [0.0, 1.0, 2.0]}
+
+output0 = {i3: # output 0
+ [0.0, 0.0, 0.0,
+ 1.0, 1.0, 1.0,
+ 0.0, -1.0, -2.0,
+ 0.0, -2.0, -4.0]}
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/prelu_ex_broadcast_quant8_1.mod.py b/tests/nnapi/specs/Ex/prelu_ex_broadcast_quant8_1.mod.py
new file mode 100644
index 000000000..5fc6884bc
--- /dev/null
+++ b/tests/nnapi/specs/Ex/prelu_ex_broadcast_quant8_1.mod.py
@@ -0,0 +1,24 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 3}, 1.0f, 2") # a vector of input
+i2 = Input("op2", "TENSOR_QUANT8_ASYMM", "{1, 1, 3}, 1.0f, 1") # a vector of alpha
+i3 = Output("op3", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 3}, 0.5f, 3") # a vector of output
+model = model.Operation("PRELU_EX", i1, i2).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 1, 1,
+ 2, 2, 2,
+ 3, 3, 3,
+ 1, 2, 3],
+ i2: # input 1
+ [0, 1, 2]}
+
+output0 = {i3: # output 0
+ [5, 3, 1,
+ 3, 3, 3,
+ 5, 5, 5,
+ 5, 3, 5]}
+# Instantiate an example
+Example((input0, output0))
+
diff --git a/tests/nnapi/specs/Ex/prelu_ex_float_2.mod.py b/tests/nnapi/specs/Ex/prelu_ex_float_2.mod.py
deleted file mode 100644
index d2ebd2a0b..000000000
--- a/tests/nnapi/specs/Ex/prelu_ex_float_2.mod.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# model
-model = Model()
-i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 1}") # a vector of input
-i2 = Input("op2", "TENSOR_FLOAT32", "{1, 2, 2, 1}") # a vector of alpha
-i3 = Output("op3", "TENSOR_FLOAT32", "{1, 2, 2, 1}") # a vector of output
-model = model.Operation("PRELU_EX", i1, i2).To(i3)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- [3.0, -2.0,
- -1.0, -2.0
- ],
- i2: # input 1
- [0.0, 1.0,
- 1.0, 2.0]}
-
-output0 = {i3: # output 0
- [3.0, -2.0,
- -1.0, -4.0
- ]}
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/prelu_ex_quant8_1.mod.py b/tests/nnapi/specs/Ex/prelu_ex_quant8_1.mod.py
new file mode 100644
index 000000000..9548f8255
--- /dev/null
+++ b/tests/nnapi/specs/Ex/prelu_ex_quant8_1.mod.py
@@ -0,0 +1,23 @@
+# model
+model = Model()
+i1 = Input("op1", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 1}, 0.5f, 5") # a vector of input
+i2 = Input("op2", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 1}, 0.5f, 1") # a vector of alpha
+i3 = Output("op3", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 1}, 0.1f, 3") # a vector of output
+model = model.Operation("PRELU_EX", i1, i2).To(i3)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [3, 1,
+ 7, 11
+ ],
+ i2: # input 1
+ [0, 1,
+ 2, 2]}
+
+output0 = {i3: # output 0
+ [8, 3,
+ 13, 33
+ ]}
+# Instantiate an example
+Example((input0, output0))
+
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_2D_float.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_2D_float.mod.py
new file mode 100644
index 000000000..5df76cbdb
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_2D_float.mod.py
@@ -0,0 +1,18 @@
+# model
+model = Model()
+i1 = Input("input", "TENSOR_FLOAT32", "{3, 4}")
+axis = Int32Scalar("axis", 1)
+out1 = Output("output", "TENSOR_FLOAT32", "{3}")
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(out1)
+
+# Example 1. Input in operand 0, 1
+input0 = {i1: # input 0
+ [3.2, 11.47, 3.8, 5.76,
+ 28.2, 0.999, -1.3, -13.5,
+ -3.4, -22.1, -2.2, -49.7]}
+
+output0 = {out1: # output 0
+ [11.47, 28.2, -2.2]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_2D_int32.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_2D_int32.mod.py
new file mode 100644
index 000000000..94e952646
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_2D_int32.mod.py
@@ -0,0 +1,18 @@
+# model
+model = Model()
+i1 = Input("input", "TENSOR_INT32", "{3, 4}")
+axis = Int32Scalar("axis", 1)
+out1 = Output("output", "TENSOR_INT32", "{3}")
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(out1)
+
+# Example 1. Input in operand 0, 1
+input0 = {i1: # input 0
+ [3, 11, 3, 5,
+ 28, 0, -1, -13,
+ -4, -22, -2, -49]}
+
+output0 = {out1: # output 0
+ [11, 28, -2]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_C.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_C.mod.py
new file mode 100644
index 000000000..4143b4b4c
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_C.mod.py
@@ -0,0 +1,34 @@
+batch = 2
+rows = 3
+cols = 4
+depth = 5
+
+input_table = [x for x in range(batch * rows * cols * depth)]
+for i in range(batch):
+ for j in range(rows):
+ for k in range(cols):
+ for l in range(depth):
+ input_table[i * rows * cols * depth + j * cols * depth + k * depth + l] = i * rows * cols * depth + j * cols * depth + k * depth + l;
+
+output_table = [x for x in range(batch * rows * cols)]
+for i in range(batch):
+ for j in range(rows):
+ for k in range(cols):
+ output_table[i * rows * cols + j * cols + k] = i * rows * cols * depth + j * cols * depth + k * depth + depth - 1;
+
+model = Model()
+i1 = Input("input", "TENSOR_FLOAT32", "{%d, %d, %d, %d}" % (batch, rows, cols, depth))
+axis = Parameter("axis", "TENSOR_INT32", "{2}", [3, -1])
+output = Output("output", "TENSOR_FLOAT32", "{%d, %d, %d}" % (batch, rows, cols))
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ input_table}
+
+output0 = {output: # output 0
+ output_table}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_HW.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_HW.mod.py
new file mode 100644
index 000000000..e220df7a8
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_4D_float_reducing_HW.mod.py
@@ -0,0 +1,34 @@
+batch = 2
+rows = 3
+cols = 4
+depth = 5
+
+input_table = [x for x in range(batch * rows * cols * depth)]
+for i in range(batch):
+ for j in range(rows):
+ for k in range(cols):
+ for l in range(depth):
+ input_table[i * rows * cols * depth + j * cols * depth + k * depth + l] = i * rows * cols * depth + j * cols * depth + k * depth + l;
+
+# Since the axises to be reduced are {rows, cols} and the value of the input always increases in here, the output's values are i * rows * cols * depth + (rows - 1) * cols * depth + (cols - 1) * depth + l.
+output_table = [x for x in range(batch * depth)]
+for i in range(batch):
+ for l in range(depth):
+ output_table[i * depth + l] = i * rows * cols * depth + (rows - 1) * cols * depth + (cols - 1) * depth + l;
+
+model = Model()
+i1 = Input("input", "TENSOR_FLOAT32", "{%d, %d, %d, %d}" % (batch, rows, cols, depth))
+axis = Parameter("axis", "TENSOR_INT32", "{4}", [1, 2, -3, -2])
+output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (batch, depth))
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ input_table}
+
+output0 = {output: # output 0
+ output_table}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_float.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_float.mod.py
new file mode 100644
index 000000000..c6387ae3d
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_float.mod.py
@@ -0,0 +1,18 @@
+model = Model()
+i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
+axis = Parameter("axis", "TENSOR_INT32", "{1}", [2])
+output = Output("output", "TENSOR_FLOAT32", "{1, 2, 1}")
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 2.0,
+ 3.0, 4.0]}
+
+output0 = {output: # output 0
+ [2.0,
+ 4.0]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_float_1.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_float_1.mod.py
new file mode 100644
index 000000000..7871c6123
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_float_1.mod.py
@@ -0,0 +1,17 @@
+model = Model()
+i1 = Input("input", "TENSOR_FLOAT32", "{4, 3, 2}")
+axis = Parameter("axis", "TENSOR_INT32", "{4}", [1, 0, -3, -3])
+output = Output("output", "TENSOR_FLOAT32", "{2}")
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0,
+ 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0]}
+
+output0 = {output: # output 0
+ [23.0, 24.0]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_float_2.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_float_2.mod.py
new file mode 100644
index 000000000..fb523b294
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_float_2.mod.py
@@ -0,0 +1,17 @@
+model = Model()
+i1 = Input("input", "TENSOR_FLOAT32", "{4, 3, 2}")
+axis = Parameter("axis", "TENSOR_INT32", "{2}", [0, 2])
+output = Output("output", "TENSOR_FLOAT32", "{1, 3, 1}")
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0,
+ 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0]}
+
+output0 = {output: # output 0
+ [20, 22, 24]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_quant8_1.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_quant8_1.mod.py
new file mode 100644
index 000000000..9cef1def4
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_quant8_1.mod.py
@@ -0,0 +1,18 @@
+model = Model()
+i1 = Input("input", "TENSOR_QUANT8_ASYMM", "{4, 3, 2}, 0.8, 5")
+axis = Parameter("axis", "TENSOR_INT32", "{4}", [1, 0, -3, -3])
+output = Output("output", "TENSOR_QUANT8_ASYMM", "{2}, 0.8, 5")
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24]}
+
+output0 = {output: # output 0
+ [23, 24]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/reduce_max_ex_quant8_2.mod.py b/tests/nnapi/specs/Ex/reduce_max_ex_quant8_2.mod.py
new file mode 100644
index 000000000..d59c11b0e
--- /dev/null
+++ b/tests/nnapi/specs/Ex/reduce_max_ex_quant8_2.mod.py
@@ -0,0 +1,18 @@
+model = Model()
+i1 = Input("input", "TENSOR_QUANT8_ASYMM", "{4, 3, 2}, 0.8, 5")
+axis = Parameter("axis", "TENSOR_INT32", "{2}", [0, 2])
+output = Output("output", "TENSOR_QUANT8_ASYMM", "{1, 3, 1}, 0.8, 5")
+
+model = model.Operation("REDUCE_MAX_EX", i1, axis).To(output)
+
+# Example 1. Input in operand 0,
+input0 = {i1: # input 0
+ [1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24]}
+
+output0 = {output: # output 0
+ [20, 22, 24]}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/split_ex_1D_float.mod.py b/tests/nnapi/specs/Ex/split_ex_1D_float.mod.py
index 97b61462e..137ece828 100644
--- a/tests/nnapi/specs/Ex/split_ex_1D_float.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_1D_float.mod.py
@@ -2,6 +2,7 @@
model = Model()
i1 = Input("op1", "TENSOR_FLOAT32", "{8}")
axis = Int32Scalar("axis", 0)
+num_out = Int32Scalar("num_out", 8)
i2 = Output("op2", "TENSOR_FLOAT32", "{1}")
i3 = Output("op3", "TENSOR_FLOAT32", "{1}")
i4 = Output("op4", "TENSOR_FLOAT32", "{1}")
@@ -11,7 +12,7 @@ i7 = Output("op7", "TENSOR_FLOAT32", "{1}")
i8 = Output("op8", "TENSOR_FLOAT32", "{1}")
i9 = Output("op9", "TENSOR_FLOAT32", "{1}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3, i4, i5, i6, i7, i8, i9])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3, i4, i5, i6, i7, i8, i9])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_1D_int32.mod.py b/tests/nnapi/specs/Ex/split_ex_1D_int32.mod.py
index 378b37707..9bfc215f6 100644
--- a/tests/nnapi/specs/Ex/split_ex_1D_int32.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_1D_int32.mod.py
@@ -2,6 +2,7 @@
model = Model()
i1 = Input("op1", "TENSOR_INT32", "{8}")
axis = Int32Scalar("axis", 0)
+num_out = Int32Scalar("num_out", 8)
i2 = Output("op2", "TENSOR_INT32", "{1}")
i3 = Output("op3", "TENSOR_INT32", "{1}")
i4 = Output("op4", "TENSOR_INT32", "{1}")
@@ -11,7 +12,7 @@ i7 = Output("op7", "TENSOR_INT32", "{1}")
i8 = Output("op8", "TENSOR_INT32", "{1}")
i9 = Output("op9", "TENSOR_INT32", "{1}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3, i4, i5, i6, i7, i8, i9])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3, i4, i5, i6, i7, i8, i9])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_float_1.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_float_1.mod.py
index 3059fae7c..2353a8a0f 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_float_1.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_float_1.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_FLOAT32", "{2,2,2,2}")
axis = Int32Scalar("axis", 0)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_FLOAT32", "{1,2,2,2}")
i3 = Output("op3", "TENSOR_FLOAT32", "{1,2,2,2}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_float_2.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_float_2.mod.py
index faa59bc3c..30ecf2416 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_float_2.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_float_2.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_FLOAT32", "{2,2,2,2}")
axis = Int32Scalar("axis", 3)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_FLOAT32", "{2,2,2,1}")
i3 = Output("op3", "TENSOR_FLOAT32", "{2,2,2,1}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_float_3.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_float_3.mod.py
index 2091e4b4a..1f7a07880 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_float_3.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_float_3.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_FLOAT32", "{2,2,2,2}")
axis = Int32Scalar("axis", -4) # Negative axis
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_FLOAT32", "{1,2,2,2}")
i3 = Output("op3", "TENSOR_FLOAT32", "{1,2,2,2}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_int32_1.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_int32_1.mod.py
index 3e9438f50..b1c6692ee 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_int32_1.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_int32_1.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_INT32", "{2,2,2,2}")
axis = Int32Scalar("axis", 0)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_INT32", "{1,2,2,2}")
i3 = Output("op3", "TENSOR_INT32", "{1,2,2,2}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_int32_2.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_int32_2.mod.py
index 83253cb14..88a7cca48 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_int32_2.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_int32_2.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_INT32", "{2,2,2,2}")
axis = Int32Scalar("axis", 1)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_INT32", "{2,1,2,2}")
i3 = Output("op3", "TENSOR_INT32", "{2,1,2,2}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_int32_3.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_int32_3.mod.py
index b20b2053b..6f5cc621b 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_int32_3.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_int32_3.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_INT32", "{2,2,2,2}")
axis = Int32Scalar("axis", 2)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_INT32", "{2,2,1,2}")
i3 = Output("op3", "TENSOR_INT32", "{2,2,1,2}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_int32_4.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_int32_4.mod.py
index 3c255cc48..fb6710171 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_int32_4.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_int32_4.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_INT32", "{2,2,2,2}")
axis = Int32Scalar("axis", 3)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_INT32", "{2,2,2,1}")
i3 = Output("op3", "TENSOR_INT32", "{2,2,2,1}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_int32_5.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_int32_5.mod.py
index 30e5e245d..338794855 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_int32_5.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_int32_5.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_INT32", "{2,2,2,2}")
axis = Int32Scalar("axis", -4) # Negative axis
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_INT32", "{1,2,2,2}")
i3 = Output("op3", "TENSOR_INT32", "{1,2,2,2}")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/split_ex_4D_quant8.mod.py b/tests/nnapi/specs/Ex/split_ex_4D_quant8.mod.py
index 46cb3ab78..a3dbffaa9 100644
--- a/tests/nnapi/specs/Ex/split_ex_4D_quant8.mod.py
+++ b/tests/nnapi/specs/Ex/split_ex_4D_quant8.mod.py
@@ -2,9 +2,10 @@
model = Model()
i1 = Input("op1", "TENSOR_QUANT8_ASYMM", "{2,2,2,2}, 0.5f, 1")
axis = Int32Scalar("axis", 0)
+num_out = Int32Scalar("num_out", 2)
i2 = Output("op2", "TENSOR_QUANT8_ASYMM", "{1,2,2,2}, 0.5f, 1")
i3 = Output("op3", "TENSOR_QUANT8_ASYMM", "{1,2,2,2}, 0.5f, 1")
-model = model.Operation("SPLIT_EX", axis, i1).To([i2, i3])
+model = model.Operation("SPLIT_EX", i1, axis, num_out).To([i2, i3])
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_2D_float.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_2D_float.mod.py
deleted file mode 100644
index 6cf8d83e7..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_2D_float.mod.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# model
-model = Model()
-i1 = Input("input", "TENSOR_FLOAT32", "{3, 4}")
-axis = Int32Scalar("axis", 1)
-out1 = Output("output", "TENSOR_FLOAT32", "{3}")
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(out1)
-
-# Example 1. Input in operand 0, 1
-input0 = {i1: # input 0
- [3.2, 11.47, 3.8, 5.76,
- 28.2, 0.999, -1.3, -13.5,
- -3.4, -22.1, -2.2, -49.7]}
-
-output0 = {out1: # output 0
- [11.47, 28.2, -2.2]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_2D_int32.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_2D_int32.mod.py
deleted file mode 100644
index 940dab3c3..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_2D_int32.mod.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# model
-model = Model()
-i1 = Input("input", "TENSOR_INT32", "{3, 4}")
-axis = Int32Scalar("axis", 1)
-out1 = Output("output", "TENSOR_INT32", "{3}")
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(out1)
-
-# Example 1. Input in operand 0, 1
-input0 = {i1: # input 0
- [3, 11, 3, 5,
- 28, 0, -1, -13,
- -4, -22, -2, -49]}
-
-output0 = {out1: # output 0
- [11, 28, -2]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_C.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_C.mod.py
deleted file mode 100644
index 82a5dbb93..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_C.mod.py
+++ /dev/null
@@ -1,34 +0,0 @@
-batch = 2
-rows = 3
-cols = 4
-depth = 5
-
-input_table = [x for x in range(batch * rows * cols * depth)]
-for i in range(batch):
- for j in range(rows):
- for k in range(cols):
- for l in range(depth):
- input_table[i * rows * cols * depth + j * cols * depth + k * depth + l] = i * rows * cols * depth + j * cols * depth + k * depth + l;
-
-output_table = [x for x in range(batch * rows * cols)]
-for i in range(batch):
- for j in range(rows):
- for k in range(cols):
- output_table[i * rows * cols + j * cols + k] = i * rows * cols * depth + j * cols * depth + k * depth + depth - 1;
-
-model = Model()
-i1 = Input("input", "TENSOR_FLOAT32", "{%d, %d, %d, %d}" % (batch, rows, cols, depth))
-axis = Parameter("axis", "TENSOR_INT32", "{2}", [3, -1])
-output = Output("output", "TENSOR_FLOAT32", "{%d, %d, %d}" % (batch, rows, cols))
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- input_table}
-
-output0 = {output: # output 0
- output_table}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_HW.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_HW.mod.py
deleted file mode 100644
index a12762055..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_4D_float_reducing_HW.mod.py
+++ /dev/null
@@ -1,34 +0,0 @@
-batch = 2
-rows = 3
-cols = 4
-depth = 5
-
-input_table = [x for x in range(batch * rows * cols * depth)]
-for i in range(batch):
- for j in range(rows):
- for k in range(cols):
- for l in range(depth):
- input_table[i * rows * cols * depth + j * cols * depth + k * depth + l] = i * rows * cols * depth + j * cols * depth + k * depth + l;
-
-# Since the axises to be reduced are {rows, cols} and the value of the input always increases in here, the output's values are i * rows * cols * depth + (rows - 1) * cols * depth + (cols - 1) * depth + l.
-output_table = [x for x in range(batch * depth)]
-for i in range(batch):
- for l in range(depth):
- output_table[i * depth + l] = i * rows * cols * depth + (rows - 1) * cols * depth + (cols - 1) * depth + l;
-
-model = Model()
-i1 = Input("input", "TENSOR_FLOAT32", "{%d, %d, %d, %d}" % (batch, rows, cols, depth))
-axis = Parameter("axis", "TENSOR_INT32", "{4}", [1, 2, -3, -2])
-output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (batch, depth))
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- input_table}
-
-output0 = {output: # output 0
- output_table}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_float.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_float.mod.py
deleted file mode 100644
index 6de220721..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_float.mod.py
+++ /dev/null
@@ -1,18 +0,0 @@
-model = Model()
-i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
-axis = Parameter("axis", "TENSOR_INT32", "{1}", [2])
-output = Output("output", "TENSOR_FLOAT32", "{1, 2, 1}")
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- [1.0, 2.0,
- 3.0, 4.0]}
-
-output0 = {output: # output 0
- [2.0,
- 4.0]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_float_1.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_float_1.mod.py
deleted file mode 100644
index 42dc9d94b..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_float_1.mod.py
+++ /dev/null
@@ -1,17 +0,0 @@
-model = Model()
-i1 = Input("input", "TENSOR_FLOAT32", "{4, 3, 2}")
-axis = Parameter("axis", "TENSOR_INT32", "{4}", [1, 0, -3, -3])
-output = Output("output", "TENSOR_FLOAT32", "{2}")
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0,
- 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0]}
-
-output0 = {output: # output 0
- [23.0, 24.0]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_float_2.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_float_2.mod.py
deleted file mode 100644
index 98c0d9d03..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_float_2.mod.py
+++ /dev/null
@@ -1,17 +0,0 @@
-model = Model()
-i1 = Input("input", "TENSOR_FLOAT32", "{4, 3, 2}")
-axis = Parameter("axis", "TENSOR_INT32", "{2}", [0, 2])
-output = Output("output", "TENSOR_FLOAT32", "{1, 3, 1}")
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0,
- 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0]}
-
-output0 = {output: # output 0
- [20, 22, 24]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_1.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_1.mod.py
deleted file mode 100644
index 8e85633b3..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_1.mod.py
+++ /dev/null
@@ -1,18 +0,0 @@
-model = Model()
-i1 = Input("input", "TENSOR_QUANT8_ASYMM", "{4, 3, 2}, 0.8, 5")
-axis = Parameter("axis", "TENSOR_INT32", "{4}", [1, 0, -3, -3])
-output = Output("output", "TENSOR_QUANT8_ASYMM", "{2}, 0.8, 5")
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- [1, 2, 3, 4, 5, 6, 7, 8,
- 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 20, 21, 22, 23, 24]}
-
-output0 = {output: # output 0
- [23, 24]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_2.mod.py b/tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_2.mod.py
deleted file mode 100644
index 75138b14a..000000000
--- a/tests/nnapi/specs/Ex/tensorflowmax_ex_quant8_2.mod.py
+++ /dev/null
@@ -1,18 +0,0 @@
-model = Model()
-i1 = Input("input", "TENSOR_QUANT8_ASYMM", "{4, 3, 2}, 0.8, 5")
-axis = Parameter("axis", "TENSOR_INT32", "{2}", [0, 2])
-output = Output("output", "TENSOR_QUANT8_ASYMM", "{1, 3, 1}, 0.8, 5")
-
-model = model.Operation("TENSORFLOW_MAX_EX", i1, axis).To(output)
-
-# Example 1. Input in operand 0,
-input0 = {i1: # input 0
- [1, 2, 3, 4, 5, 6, 7, 8,
- 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 20, 21, 22, 23, 24]}
-
-output0 = {output: # output 0
- [20, 22, 24]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/transpose_conv_ex_float_4.mod.py b/tests/nnapi/specs/Ex/transpose_conv_ex_float_4.mod.py
new file mode 100644
index 000000000..71383b4c1
--- /dev/null
+++ b/tests/nnapi/specs/Ex/transpose_conv_ex_float_4.mod.py
@@ -0,0 +1,56 @@
+# model
+model = Model()
+i0 = Input("op_shape", "TENSOR_INT32", "{4}")
+weights = Input("ker", "TENSOR_FLOAT32", "{1, 3, 3, 1}")
+i1 = Input("in", "TENSOR_FLOAT32", "{1, 4, 4, 1}" )
+pad = Int32Scalar("pad_same", 1)
+s_x = Int32Scalar("stride_x", 2)
+s_y = Int32Scalar("stride_y", 2)
+i2 = Output("op", "TENSOR_FLOAT32", "{1, 8, 8, 1}")
+model = model.Operation("TRANSPOSE_CONV_EX", i0, weights, i1, pad, s_x, s_y).To(i2)
+
+batch = 1
+in_chans = 1
+out_chans = 1
+in_rows = 4
+in_cols = 4
+out_rows = 8
+out_cols = 8
+ker_rows = 3
+ker_cols = 3
+stride = 2
+# pad is 0 (left: 0 right: 1 top: 0 bottom: 1)
+input_table = [x for x in range(batch * in_rows * in_cols * in_chans)]
+kernel_table = [x for x in range(out_chans * ker_rows * ker_cols * in_chans)]
+out_table = [0 for x in range(batch * out_rows * out_cols * out_chans)]
+
+for i in range(batch):
+ for j in range(in_rows):
+ for k in range(in_cols):
+ for l in range(in_chans):
+ out_row_origin = j * stride
+ out_col_origin = k * stride
+ input_value = input_table[((i * in_rows + j) * in_cols + k) * in_chans + l]
+
+ for m in range(ker_rows):
+ for n in range(ker_cols):
+ for o in range(out_chans):
+ out_row = out_row_origin + m
+ out_col = out_col_origin + n
+ if (out_row < out_rows) and (out_col < out_cols) and (out_row >= 0) and (out_col >= 0):
+ kernel_value = kernel_table[((o * ker_rows + m) * ker_cols + n) * in_chans + l]
+ out_table[((i * out_rows + out_row) * out_cols + out_col) * out_chans + o] += (input_value * kernel_value)
+
+# Example 1. Input in operand 0,
+input0 = {i0: # output shape
+ [1, 8, 8, 1],
+ i1: # input 0
+ input_table,
+ weights: # input 1
+ kernel_table}
+
+output0 = {i2: # output 0
+ out_table}
+
+# Instantiate an example
+Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/transpose_conv_ex_int_1.mod.py b/tests/nnapi/specs/Ex/transpose_conv_ex_int_1.mod.py
deleted file mode 100644
index fb8eb72be..000000000
--- a/tests/nnapi/specs/Ex/transpose_conv_ex_int_1.mod.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# model
-model = Model()
-i0 = Input("op_shape", "TENSOR_INT32", "{4}")
-weights = Input("ker", "TENSOR_INT32", "{1, 3, 3, 1}")
-i1 = Input("in", "TENSOR_INT32", "{1, 4, 4, 1}" )
-pad = Int32Scalar("pad_same", 1)
-s_x = Int32Scalar("stride_x", 1)
-s_y = Int32Scalar("stride_y", 1)
-i2 = Output("op", "TENSOR_INT32", "{1, 4, 4, 1}")
-model = model.Operation("TRANSPOSE_CONV_EX", i0, weights, i1, pad, s_x, s_y).To(i2)
-
-# Example 1. Input in operand 0,
-input0 = {i0: # output shape
- [1, 4, 4, 1],
- i1: # input 0
- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
- weights: # input 1
- [1, 2, 3, 4, 5, 6, 7, 8, 9]}
-
-output0 = {i2: # output 0
- [29, 62, 83, 75, 99, 192, 237, 198, 207, 372, 417, 330, 263, 446, 485, 365]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/transpose_conv_ex_int_2.mod.py b/tests/nnapi/specs/Ex/transpose_conv_ex_int_2.mod.py
deleted file mode 100644
index 9bf4b113d..000000000
--- a/tests/nnapi/specs/Ex/transpose_conv_ex_int_2.mod.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# model
-model = Model()
-i0 = Input("op_shape", "TENSOR_INT32", "{4}")
-weights = Input("ker", "TENSOR_INT32", "{1, 3, 3, 1}")
-i1 = Input("in", "TENSOR_INT32", "{1, 2, 2, 1}" )
-pad = Int32Scalar("pad_valid", 2)
-s_x = Int32Scalar("stride_x", 2)
-s_y = Int32Scalar("stride_y", 2)
-i2 = Output("op", "TENSOR_INT32", "{1, 5, 5, 1}")
-model = model.Operation("TRANSPOSE_CONV_EX", i0, weights, i1, pad, s_x, s_y).To(i2)
-
-# Example 1. Input in operand 0,
-input0 = {i0: # output shape
- [1, 5, 5, 1],
- i1: # input 0
- [1, 2, 3, 4],
- weights: # input 1
- [1, 2, 3, 4, 5, 6, 7, 8, 9]}
-
-output0 = {i2: # output 0
- [1, 2, 5, 4, 6, 4, 5, 14, 10, 12, 10, 14, 36,
- 24, 30, 12, 15, 34, 20, 24, 21, 24, 55, 32, 36]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/transpose_conv_ex_int_3.mod.py b/tests/nnapi/specs/Ex/transpose_conv_ex_int_3.mod.py
deleted file mode 100644
index e44480968..000000000
--- a/tests/nnapi/specs/Ex/transpose_conv_ex_int_3.mod.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# model
-model = Model()
-i0 = Input("op_shape", "TENSOR_INT32", "{4}")
-weights = Input("ker", "TENSOR_INT32", "{2, 3, 3, 1}")
-i1 = Input("in", "TENSOR_INT32", "{1, 2, 2, 1}" )
-pad = Int32Scalar("pad_valid", 2)
-s_x = Int32Scalar("stride_x", 2)
-s_y = Int32Scalar("stride_y", 2)
-i2 = Output("op", "TENSOR_INT32", "{1, 5, 5, 2}")
-model = model.Operation("TRANSPOSE_CONV_EX", i0, weights, i1, pad, s_x, s_y).To(i2)
-
-# Example 1. Input in operand 0,
-input0 = {i0: # output shape
- [1, 5, 5, 2],
- i1: # input 0
- [1, 2, 3, 4],
- weights: # input 1
- [1, 3, 5, 7, 9, 11, 13, 15, 17, 2, 4, 6, 8, 10, 12, 14, 16, 18]}
-
-output0 = {i2: # output 0
- [1, 2, 3, 4, 7, 10, 6, 8, 10, 12, 7, 8, 9,
- 10, 25, 28, 18, 20, 22, 24, 16, 20, 24, 28, 62, 72,
- 42, 48, 54, 60, 21, 24, 27, 30, 61, 68, 36, 40, 44,
- 48, 39, 42, 45, 48, 103, 110, 60, 64, 68, 72]}
-
-# Instantiate an example
-Example((input0, output0))
diff --git a/tests/nnapi/specs/Ex/unpack_ex_3D_float_1.mod.py b/tests/nnapi/specs/Ex/unpack_ex_3D_float_1.mod.py
index e5cb38e4b..7e8ef6086 100644
--- a/tests/nnapi/specs/Ex/unpack_ex_3D_float_1.mod.py
+++ b/tests/nnapi/specs/Ex/unpack_ex_3D_float_1.mod.py
@@ -1,11 +1,11 @@
# Sample UnPack model, axis = 0
model = Model()
-input = Input("input", "TENSOR_FLOAT32", "{3, 2, 3, 4}")
+input = Input("input", "TENSOR_FLOAT32", "{3, 6, 4}")
axis = Int32Scalar("axis", 0)
num_splits = Int32Scalar("num_splits", 3)
-out1 = Output("output1", "TENSOR_FLOAT32", "{2, 3, 4}")
-out2 = Output("output2", "TENSOR_FLOAT32", "{2, 3, 4}")
-out3 = Output("output3", "TENSOR_FLOAT32", "{2, 3, 4}")
+out1 = Output("output1", "TENSOR_FLOAT32", "{6, 4}")
+out2 = Output("output2", "TENSOR_FLOAT32", "{6, 4}")
+out3 = Output("output3", "TENSOR_FLOAT32", "{6, 4}")
model = model.Operation("UNPACK_EX", input, num_splits, axis).To([out1, out2, out3])
input0 = {input: # input 0
diff --git a/tests/nnapi/specs/Ex/unpack_ex_3D_float_2.mod.py b/tests/nnapi/specs/Ex/unpack_ex_3D_float_2.mod.py
index 1d1045ae9..ed7800ad4 100644
--- a/tests/nnapi/specs/Ex/unpack_ex_3D_float_2.mod.py
+++ b/tests/nnapi/specs/Ex/unpack_ex_3D_float_2.mod.py
@@ -1,11 +1,11 @@
-# Sample UnPack model, axis = 2
+# Sample UnPack model, axis = 1
model = Model()
-input = Input("input", "TENSOR_FLOAT32", "{3, 2, 3, 4}")
-axis = Int32Scalar("axis", 2)
+input = Input("input", "TENSOR_FLOAT32", "{6, 3, 4}")
+axis = Int32Scalar("axis", 1)
num_splits = Int32Scalar("num_splits", 3)
-out1 = Output("output1", "TENSOR_FLOAT32", "{3, 2, 4}")
-out2 = Output("output2", "TENSOR_FLOAT32", "{3, 2, 4}")
-out3 = Output("output3", "TENSOR_FLOAT32", "{3, 2, 4}")
+out1 = Output("output1", "TENSOR_FLOAT32", "{6, 4}")
+out2 = Output("output2", "TENSOR_FLOAT32", "{6, 4}")
+out3 = Output("output3", "TENSOR_FLOAT32", "{6, 4}")
model = model.Operation("UNPACK_EX", input, num_splits, axis).To([out1, out2, out3])
input0 = {input: # input 0
diff --git a/tests/nnapi/specs/Ex/unpack_ex_3D_int_1.mod.py b/tests/nnapi/specs/Ex/unpack_ex_3D_int_1.mod.py
index 2a668cf7e..34e153bfc 100644
--- a/tests/nnapi/specs/Ex/unpack_ex_3D_int_1.mod.py
+++ b/tests/nnapi/specs/Ex/unpack_ex_3D_int_1.mod.py
@@ -1,11 +1,11 @@
# Sample UnPack model, axis = 0
model = Model()
-input = Input("input", "TENSOR_INT32", "{3, 2, 3, 4}")
+input = Input("input", "TENSOR_INT32", "{3, 6, 4}")
axis = Int32Scalar("axis", 0)
num_splits = Int32Scalar("num_splits", 3)
-out1 = Output("output1", "TENSOR_INT32", "{2, 3, 4}")
-out2 = Output("output2", "TENSOR_INT32", "{2, 3, 4}")
-out3 = Output("output3", "TENSOR_INT32", "{2, 3, 4}")
+out1 = Output("output1", "TENSOR_INT32", "{6, 4}")
+out2 = Output("output2", "TENSOR_INT32", "{6, 4}")
+out3 = Output("output3", "TENSOR_INT32", "{6, 4}")
model = model.Operation("UNPACK_EX", input, num_splits, axis).To([out1, out2, out3])
input0 = {input: # input 0
diff --git a/tests/nnapi/specs/Ex/unpack_ex_3D_int_2.mod.py b/tests/nnapi/specs/Ex/unpack_ex_3D_int_2.mod.py
index 115954972..db51351c5 100644
--- a/tests/nnapi/specs/Ex/unpack_ex_3D_int_2.mod.py
+++ b/tests/nnapi/specs/Ex/unpack_ex_3D_int_2.mod.py
@@ -1,11 +1,11 @@
-# Sample UnPack model, axis = 2
+# Sample UnPack model, axis = 1
model = Model()
-input = Input("input", "TENSOR_INT32", "{3, 2, 3, 4}")
-axis = Int32Scalar("axis", 2)
+input = Input("input", "TENSOR_INT32", "{6, 3, 4}")
+axis = Int32Scalar("axis", 1)
num_splits = Int32Scalar("num_splits", 3)
-out1 = Output("output1", "TENSOR_INT32", "{3, 2, 4}")
-out2 = Output("output2", "TENSOR_INT32", "{3, 2, 4}")
-out3 = Output("output3", "TENSOR_INT32", "{3, 2, 4}")
+out1 = Output("output1", "TENSOR_INT32", "{6, 4}")
+out2 = Output("output2", "TENSOR_INT32", "{6, 4}")
+out3 = Output("output3", "TENSOR_INT32", "{6, 4}")
model = model.Operation("UNPACK_EX", input, num_splits, axis).To([out1, out2, out3])
input0 = {input: # input 0
diff --git a/tests/nnapi/specs/V1_0/conv_1_h3_w2_SAME.mod.py b/tests/nnapi/specs/V1_0/conv_1_h3_w2_SAME.mod.py
index 34f2c49f9..45a356927 100644
--- a/tests/nnapi/specs/V1_0/conv_1_h3_w2_SAME.mod.py
+++ b/tests/nnapi/specs/V1_0/conv_1_h3_w2_SAME.mod.py
@@ -7,7 +7,7 @@ i2 = Input("op2", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # input 0
i3 = Output("op3", "TENSOR_FLOAT32", "{1, 8, 8, 1}") # output 0
i0 = Parameter("op0", "TENSOR_FLOAT32", "{1, 3, 2, 3}", [-0.966213, -0.467474, -0.82203, -0.579455, 0.0278809, -0.79946, -0.684259, 0.563238, 0.37289, 0.738216, 0.386045, -0.917775, 0.184325, -0.270568, 0.82236, 0.0973683, -0.941308, -0.144706]) # parameters
i1 = Parameter("op1", "TENSOR_FLOAT32", "{1}", [0]) # parameters
-model = model.Conv(i2, i0, i1, i4, i5, i6, i7).To(i3)
+model = model.Operation("CONV_2D", i2, i0, i1, i4, i5, i6, i7).To(i3)
input0 = {i2: [-0.869931, 0.644628, -0.918393, 0.153672, 0.868562, -0.358177, -0.134931, -0.247565, 0.22174, -0.259157, -0.284296, -0.538065, 0.765559, 0.41986, -0.556241, 0.658494, 0.214355, -0.850169, -0.252893, -0.478935, 0.530526, -0.0700663, -0.988729, -0.303061, 0.150845, 0.829915, 0.476349, 0.406537, -0.355343, 0.757145, -0.356362, 0.800482, -0.713861, 0.210483, -0.634303, 0.718236, -0.752038, 0.457547, -0.550769, -0.551178, 0.446766, -0.227462, 0.216348, -0.852806, -0.351486, 0.55906, -0.668493, -0.303493, -0.363763, -0.162837, 0.0701012, 0.756097, -0.142269, 0.329724, -0.656317, -0.998086, -0.652949, -0.40316, -0.893682, 0.432744, 0.612362, -0.869588, -0.71327, -0.398092, -0.0423559, 0.436576, -0.925272, 0.176549, 0.822904, 0.096833, -0.296802, -0.427195, 0.031654, -0.254479, 0.244905, 0.0948254, 0.643769, -0.90391, 0.352665, -0.901179, 0.266159, -0.968068, -0.615401, -0.388975, 0.939052, -0.116289, 0.107523, -0.0582711, 0.435172, 0.334675, 0.459711, 0.717436, 0.496627, -0.680175, -0.415066, 0.339848, 0.506004, -0.337808, -0.107218, -0.172496, 0.870638, 0.931872, -0.953884, 0.903042, 0.760078, 0.209727, -0.285384, -0.45514, 0.113194, 0.0756611, 0.0924435, -0.472863, 0.960609, -0.160385, -0.839445, 0.457097, 0.163348, 0.344867, -0.131619, 0.688715, -0.540827, 0.571259, -0.95587, 0.506164, -0.155839, 0.0789621, 0.756772, -0.662069, 0.242908, 0.460821, 0.177872, -0.289839, -0.640603, 0.702598, -0.506406, -0.568262, -0.0713716, 0.413792, 0.159673, -0.305208, 0.133816, -0.160254, 0.787323, -0.753244, 0.600721, 0.263186, -0.162387, 0.477962, -0.702951, -0.731036, -0.939481, -0.524519, 0.934072, -0.511637, -0.503499, 0.106236, -0.323684, 0.534444, -0.843745, 0.364171, 0.0370358, -0.168801, -0.404559, -0.814178, 0.91745, -0.334276, 0.66925, -0.801201, 0.156511, -0.427949, 0.379153, 0.818597, -0.649902, 0.427087, -0.586015, -0.559789, -0.833923, 0.0892409, -0.621251, 0.213826, 0.465509, 0.4704, 0.380261, 0.413067, 0.180822, 0.172866, 0.59614, 0.825575, 0.662916, -0.704381, -0.297631, 0.697778]}
diff --git a/tests/nnapi/specs/V1_0/conv_1_h3_w2_VALID.mod.py b/tests/nnapi/specs/V1_0/conv_1_h3_w2_VALID.mod.py
index 3a4a2a1aa..0e0974371 100644
--- a/tests/nnapi/specs/V1_0/conv_1_h3_w2_VALID.mod.py
+++ b/tests/nnapi/specs/V1_0/conv_1_h3_w2_VALID.mod.py
@@ -7,7 +7,7 @@ i2 = Input("op2", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # input 0
i3 = Output("op3", "TENSOR_FLOAT32", "{1, 6, 7, 1}") # output 0
i0 = Parameter("op0", "TENSOR_FLOAT32", "{1, 3, 2, 3}", [-0.966213, -0.467474, -0.82203, -0.579455, 0.0278809, -0.79946, -0.684259, 0.563238, 0.37289, 0.738216, 0.386045, -0.917775, 0.184325, -0.270568, 0.82236, 0.0973683, -0.941308, -0.144706]) # parameters
i1 = Parameter("op1", "TENSOR_FLOAT32", "{1}", [0]) # parameters
-model = model.Conv(i2, i0, i1, i4, i5, i6, i7).To(i3)
+model = model.Operation("CONV_2D", i2, i0, i1, i4, i5, i6, i7).To(i3)
input0 = {i2: [-0.869931, 0.644628, -0.918393, 0.153672, 0.868562, -0.358177, -0.134931, -0.247565, 0.22174, -0.259157, -0.284296, -0.538065, 0.765559, 0.41986, -0.556241, 0.658494, 0.214355, -0.850169, -0.252893, -0.478935, 0.530526, -0.0700663, -0.988729, -0.303061, 0.150845, 0.829915, 0.476349, 0.406537, -0.355343, 0.757145, -0.356362, 0.800482, -0.713861, 0.210483, -0.634303, 0.718236, -0.752038, 0.457547, -0.550769, -0.551178, 0.446766, -0.227462, 0.216348, -0.852806, -0.351486, 0.55906, -0.668493, -0.303493, -0.363763, -0.162837, 0.0701012, 0.756097, -0.142269, 0.329724, -0.656317, -0.998086, -0.652949, -0.40316, -0.893682, 0.432744, 0.612362, -0.869588, -0.71327, -0.398092, -0.0423559, 0.436576, -0.925272, 0.176549, 0.822904, 0.096833, -0.296802, -0.427195, 0.031654, -0.254479, 0.244905, 0.0948254, 0.643769, -0.90391, 0.352665, -0.901179, 0.266159, -0.968068, -0.615401, -0.388975, 0.939052, -0.116289, 0.107523, -0.0582711, 0.435172, 0.334675, 0.459711, 0.717436, 0.496627, -0.680175, -0.415066, 0.339848, 0.506004, -0.337808, -0.107218, -0.172496, 0.870638, 0.931872, -0.953884, 0.903042, 0.760078, 0.209727, -0.285384, -0.45514, 0.113194, 0.0756611, 0.0924435, -0.472863, 0.960609, -0.160385, -0.839445, 0.457097, 0.163348, 0.344867, -0.131619, 0.688715, -0.540827, 0.571259, -0.95587, 0.506164, -0.155839, 0.0789621, 0.756772, -0.662069, 0.242908, 0.460821, 0.177872, -0.289839, -0.640603, 0.702598, -0.506406, -0.568262, -0.0713716, 0.413792, 0.159673, -0.305208, 0.133816, -0.160254, 0.787323, -0.753244, 0.600721, 0.263186, -0.162387, 0.477962, -0.702951, -0.731036, -0.939481, -0.524519, 0.934072, -0.511637, -0.503499, 0.106236, -0.323684, 0.534444, -0.843745, 0.364171, 0.0370358, -0.168801, -0.404559, -0.814178, 0.91745, -0.334276, 0.66925, -0.801201, 0.156511, -0.427949, 0.379153, 0.818597, -0.649902, 0.427087, -0.586015, -0.559789, -0.833923, 0.0892409, -0.621251, 0.213826, 0.465509, 0.4704, 0.380261, 0.413067, 0.180822, 0.172866, 0.59614, 0.825575, 0.662916, -0.704381, -0.297631, 0.697778]}
diff --git a/tests/nnapi/specs/V1_0/conv_3_h3_w2_SAME.mod.py b/tests/nnapi/specs/V1_0/conv_3_h3_w2_SAME.mod.py
index d7df3a34f..3ba4bad52 100644
--- a/tests/nnapi/specs/V1_0/conv_3_h3_w2_SAME.mod.py
+++ b/tests/nnapi/specs/V1_0/conv_3_h3_w2_SAME.mod.py
@@ -7,7 +7,7 @@ i2 = Input("op2", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # input 0
i3 = Output("op3", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # output 0
i0 = Parameter("op0", "TENSOR_FLOAT32", "{3, 3, 2, 3}", [-0.966213, -0.579455, -0.684259, 0.738216, 0.184325, 0.0973683, -0.176863, -0.23936, -0.000233404, 0.055546, -0.232658, -0.316404, -0.012904, 0.320705, -0.326657, -0.919674, 0.868081, -0.824608, -0.467474, 0.0278809, 0.563238, 0.386045, -0.270568, -0.941308, -0.779227, -0.261492, -0.774804, -0.79665, 0.22473, -0.414312, 0.685897, -0.327792, 0.77395, -0.714578, -0.972365, 0.0696099, -0.82203, -0.79946, 0.37289, -0.917775, 0.82236, -0.144706, -0.167188, 0.268062, 0.702641, -0.412223, 0.755759, 0.721547, -0.43637, -0.274905, -0.269165, 0.16102, 0.819857, -0.312008]) # parameters
i1 = Parameter("op1", "TENSOR_FLOAT32", "{3}", [0, 0, 0]) # parameters
-model = model.Conv(i2, i0, i1, i4, i5, i6, i7).To(i3)
+model = model.Operation("CONV_2D", i2, i0, i1, i4, i5, i6, i7).To(i3)
input0 = {i2: [-0.869931, 0.644628, -0.918393, 0.153672, 0.868562, -0.358177, -0.134931, -0.247565, 0.22174, -0.259157, -0.284296, -0.538065, 0.765559, 0.41986, -0.556241, 0.658494, 0.214355, -0.850169, -0.252893, -0.478935, 0.530526, -0.0700663, -0.988729, -0.303061, 0.150845, 0.829915, 0.476349, 0.406537, -0.355343, 0.757145, -0.356362, 0.800482, -0.713861, 0.210483, -0.634303, 0.718236, -0.752038, 0.457547, -0.550769, -0.551178, 0.446766, -0.227462, 0.216348, -0.852806, -0.351486, 0.55906, -0.668493, -0.303493, -0.363763, -0.162837, 0.0701012, 0.756097, -0.142269, 0.329724, -0.656317, -0.998086, -0.652949, -0.40316, -0.893682, 0.432744, 0.612362, -0.869588, -0.71327, -0.398092, -0.0423559, 0.436576, -0.925272, 0.176549, 0.822904, 0.096833, -0.296802, -0.427195, 0.031654, -0.254479, 0.244905, 0.0948254, 0.643769, -0.90391, 0.352665, -0.901179, 0.266159, -0.968068, -0.615401, -0.388975, 0.939052, -0.116289, 0.107523, -0.0582711, 0.435172, 0.334675, 0.459711, 0.717436, 0.496627, -0.680175, -0.415066, 0.339848, 0.506004, -0.337808, -0.107218, -0.172496, 0.870638, 0.931872, -0.953884, 0.903042, 0.760078, 0.209727, -0.285384, -0.45514, 0.113194, 0.0756611, 0.0924435, -0.472863, 0.960609, -0.160385, -0.839445, 0.457097, 0.163348, 0.344867, -0.131619, 0.688715, -0.540827, 0.571259, -0.95587, 0.506164, -0.155839, 0.0789621, 0.756772, -0.662069, 0.242908, 0.460821, 0.177872, -0.289839, -0.640603, 0.702598, -0.506406, -0.568262, -0.0713716, 0.413792, 0.159673, -0.305208, 0.133816, -0.160254, 0.787323, -0.753244, 0.600721, 0.263186, -0.162387, 0.477962, -0.702951, -0.731036, -0.939481, -0.524519, 0.934072, -0.511637, -0.503499, 0.106236, -0.323684, 0.534444, -0.843745, 0.364171, 0.0370358, -0.168801, -0.404559, -0.814178, 0.91745, -0.334276, 0.66925, -0.801201, 0.156511, -0.427949, 0.379153, 0.818597, -0.649902, 0.427087, -0.586015, -0.559789, -0.833923, 0.0892409, -0.621251, 0.213826, 0.465509, 0.4704, 0.380261, 0.413067, 0.180822, 0.172866, 0.59614, 0.825575, 0.662916, -0.704381, -0.297631, 0.697778]}
diff --git a/tests/nnapi/specs/V1_0/conv_3_h3_w2_VALID.mod.py b/tests/nnapi/specs/V1_0/conv_3_h3_w2_VALID.mod.py
index a7ab91f33..545e9fdc6 100644
--- a/tests/nnapi/specs/V1_0/conv_3_h3_w2_VALID.mod.py
+++ b/tests/nnapi/specs/V1_0/conv_3_h3_w2_VALID.mod.py
@@ -7,7 +7,7 @@ i2 = Input("op2", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # input 0
i3 = Output("op3", "TENSOR_FLOAT32", "{1, 6, 7, 3}") # output 0
i0 = Parameter("op0", "TENSOR_FLOAT32", "{3, 3, 2, 3}", [-0.966213, -0.579455, -0.684259, 0.738216, 0.184325, 0.0973683, -0.176863, -0.23936, -0.000233404, 0.055546, -0.232658, -0.316404, -0.012904, 0.320705, -0.326657, -0.919674, 0.868081, -0.824608, -0.467474, 0.0278809, 0.563238, 0.386045, -0.270568, -0.941308, -0.779227, -0.261492, -0.774804, -0.79665, 0.22473, -0.414312, 0.685897, -0.327792, 0.77395, -0.714578, -0.972365, 0.0696099, -0.82203, -0.79946, 0.37289, -0.917775, 0.82236, -0.144706, -0.167188, 0.268062, 0.702641, -0.412223, 0.755759, 0.721547, -0.43637, -0.274905, -0.269165, 0.16102, 0.819857, -0.312008]) # parameters
i1 = Parameter("op1", "TENSOR_FLOAT32", "{3}", [0, 0, 0]) # parameters
-model = model.Conv(i2, i0, i1, i4, i5, i6, i7).To(i3)
+model = model.Operation("CONV_2D", i2, i0, i1, i4, i5, i6, i7).To(i3)
input0 = {i2: [-0.869931, 0.644628, -0.918393, 0.153672, 0.868562, -0.358177, -0.134931, -0.247565, 0.22174, -0.259157, -0.284296, -0.538065, 0.765559, 0.41986, -0.556241, 0.658494, 0.214355, -0.850169, -0.252893, -0.478935, 0.530526, -0.0700663, -0.988729, -0.303061, 0.150845, 0.829915, 0.476349, 0.406537, -0.355343, 0.757145, -0.356362, 0.800482, -0.713861, 0.210483, -0.634303, 0.718236, -0.752038, 0.457547, -0.550769, -0.551178, 0.446766, -0.227462, 0.216348, -0.852806, -0.351486, 0.55906, -0.668493, -0.303493, -0.363763, -0.162837, 0.0701012, 0.756097, -0.142269, 0.329724, -0.656317, -0.998086, -0.652949, -0.40316, -0.893682, 0.432744, 0.612362, -0.869588, -0.71327, -0.398092, -0.0423559, 0.436576, -0.925272, 0.176549, 0.822904, 0.096833, -0.296802, -0.427195, 0.031654, -0.254479, 0.244905, 0.0948254, 0.643769, -0.90391, 0.352665, -0.901179, 0.266159, -0.968068, -0.615401, -0.388975, 0.939052, -0.116289, 0.107523, -0.0582711, 0.435172, 0.334675, 0.459711, 0.717436, 0.496627, -0.680175, -0.415066, 0.339848, 0.506004, -0.337808, -0.107218, -0.172496, 0.870638, 0.931872, -0.953884, 0.903042, 0.760078, 0.209727, -0.285384, -0.45514, 0.113194, 0.0756611, 0.0924435, -0.472863, 0.960609, -0.160385, -0.839445, 0.457097, 0.163348, 0.344867, -0.131619, 0.688715, -0.540827, 0.571259, -0.95587, 0.506164, -0.155839, 0.0789621, 0.756772, -0.662069, 0.242908, 0.460821, 0.177872, -0.289839, -0.640603, 0.702598, -0.506406, -0.568262, -0.0713716, 0.413792, 0.159673, -0.305208, 0.133816, -0.160254, 0.787323, -0.753244, 0.600721, 0.263186, -0.162387, 0.477962, -0.702951, -0.731036, -0.939481, -0.524519, 0.934072, -0.511637, -0.503499, 0.106236, -0.323684, 0.534444, -0.843745, 0.364171, 0.0370358, -0.168801, -0.404559, -0.814178, 0.91745, -0.334276, 0.66925, -0.801201, 0.156511, -0.427949, 0.379153, 0.818597, -0.649902, 0.427087, -0.586015, -0.559789, -0.833923, 0.0892409, -0.621251, 0.213826, 0.465509, 0.4704, 0.380261, 0.413067, 0.180822, 0.172866, 0.59614, 0.825575, 0.662916, -0.704381, -0.297631, 0.697778]}
diff --git a/tests/nnapi/specs/V1_0/depthwise_conv.mod.py b/tests/nnapi/specs/V1_0/depthwise_conv.mod.py
index d88206fdd..5e7886d26 100644
--- a/tests/nnapi/specs/V1_0/depthwise_conv.mod.py
+++ b/tests/nnapi/specs/V1_0/depthwise_conv.mod.py
@@ -8,7 +8,7 @@ i2 = Input("op2", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # input 0
i3 = Output("op3", "TENSOR_FLOAT32", "{1, 8, 8, 3}") # output 0
i0 = Parameter("op0", "TENSOR_FLOAT32", "{1, 1, 1, 3}", [-0.966213, -0.467474, -0.82203]) # parameters
i1 = Parameter("op1", "TENSOR_FLOAT32", "{3}", [0, 0, 0]) # parameters
-model = model.DepthWiseConv(i2, i0, i1, i4, i5, i6, i7, i8).To(i3)
+model = model.Operation("DEPTHWISE_CONV_2D", i2, i0, i1, i4, i5, i6, i7, i8).To(i3)
input0 = {i2: [-0.869931, 0.644628, -0.918393, 0.153672, 0.868562, -0.358177, -0.134931, -0.247565, 0.22174, -0.259157, -0.284296, -0.538065, 0.765559, 0.41986, -0.556241, 0.658494, 0.214355, -0.850169, -0.252893, -0.478935, 0.530526, -0.0700663, -0.988729, -0.303061, 0.150845, 0.829915, 0.476349, 0.406537, -0.355343, 0.757145, -0.356362, 0.800482, -0.713861, 0.210483, -0.634303, 0.718236, -0.752038, 0.457547, -0.550769, -0.551178, 0.446766, -0.227462, 0.216348, -0.852806, -0.351486, 0.55906, -0.668493, -0.303493, -0.363763, -0.162837, 0.0701012, 0.756097, -0.142269, 0.329724, -0.656317, -0.998086, -0.652949, -0.40316, -0.893682, 0.432744, 0.612362, -0.869588, -0.71327, -0.398092, -0.0423559, 0.436576, -0.925272, 0.176549, 0.822904, 0.096833, -0.296802, -0.427195, 0.031654, -0.254479, 0.244905, 0.0948254, 0.643769, -0.90391, 0.352665, -0.901179, 0.266159, -0.968068, -0.615401, -0.388975, 0.939052, -0.116289, 0.107523, -0.0582711, 0.435172, 0.334675, 0.459711, 0.717436, 0.496627, -0.680175, -0.415066, 0.339848, 0.506004, -0.337808, -0.107218, -0.172496, 0.870638, 0.931872, -0.953884, 0.903042, 0.760078, 0.209727, -0.285384, -0.45514, 0.113194, 0.0756611, 0.0924435, -0.472863, 0.960609, -0.160385, -0.839445, 0.457097, 0.163348, 0.344867, -0.131619, 0.688715, -0.540827, 0.571259, -0.95587, 0.506164, -0.155839, 0.0789621, 0.756772, -0.662069, 0.242908, 0.460821, 0.177872, -0.289839, -0.640603, 0.702598, -0.506406, -0.568262, -0.0713716, 0.413792, 0.159673, -0.305208, 0.133816, -0.160254, 0.787323, -0.753244, 0.600721, 0.263186, -0.162387, 0.477962, -0.702951, -0.731036, -0.939481, -0.524519, 0.934072, -0.511637, -0.503499, 0.106236, -0.323684, 0.534444, -0.843745, 0.364171, 0.0370358, -0.168801, -0.404559, -0.814178, 0.91745, -0.334276, 0.66925, -0.801201, 0.156511, -0.427949, 0.379153, 0.818597, -0.649902, 0.427087, -0.586015, -0.559789, -0.833923, 0.0892409, -0.621251, 0.213826, 0.465509, 0.4704, 0.380261, 0.413067, 0.180822, 0.172866, 0.59614, 0.825575, 0.662916, -0.704381, -0.297631, 0.697778]}
diff --git a/tests/nnapi/specs/V1_0/lstm3.mod.py b/tests/nnapi/specs/V1_0/lstm3.mod.py
index a9d2bf884..3abbaa001 100644
--- a/tests/nnapi/specs/V1_0/lstm3.mod.py
+++ b/tests/nnapi/specs/V1_0/lstm3.mod.py
@@ -60,7 +60,6 @@ output_state_out = Output("output_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_
cell_state_out = Output("cell_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
-# TODO: need support for more than one output
model = model.Operation("LSTM",
input,
diff --git a/tests/nnapi/specs/V1_0/lstm3_state.mod.py b/tests/nnapi/specs/V1_0/lstm3_state.mod.py
index 1fd91a91e..18a2d11f3 100644
--- a/tests/nnapi/specs/V1_0/lstm3_state.mod.py
+++ b/tests/nnapi/specs/V1_0/lstm3_state.mod.py
@@ -60,7 +60,6 @@ output_state_out = Output("output_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_
cell_state_out = Output("cell_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
-# TODO: need support for more than one output
model = model.Operation("LSTM",
input,
diff --git a/tests/nnapi/specs/V1_0/lstm3_state2.mod.py b/tests/nnapi/specs/V1_0/lstm3_state2.mod.py
index bc6ae7e86..38be40645 100644
--- a/tests/nnapi/specs/V1_0/lstm3_state2.mod.py
+++ b/tests/nnapi/specs/V1_0/lstm3_state2.mod.py
@@ -60,7 +60,6 @@ output_state_out = Output("output_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_
cell_state_out = Output("cell_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
-# TODO: need support for more than one output
model = model.Operation("LSTM",
input,
diff --git a/tests/nnapi/specs/V1_0/lstm3_state3.mod.py b/tests/nnapi/specs/V1_0/lstm3_state3.mod.py
index 40262c251..a0cd9df58 100644
--- a/tests/nnapi/specs/V1_0/lstm3_state3.mod.py
+++ b/tests/nnapi/specs/V1_0/lstm3_state3.mod.py
@@ -60,7 +60,6 @@ output_state_out = IgnoredOutput("output_state_out", "TENSOR_FLOAT32", "{%d, %d}
cell_state_out = IgnoredOutput("cell_state_out", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_cell))
output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (n_batch, n_output))
-# TODO: need support for more than one output
model = model.Operation("LSTM",
input,
diff --git a/tests/nnapi/specs/V1_0/mobilenet_224_gender_basic_fixed.mod.py b/tests/nnapi/specs/V1_0/mobilenet_224_gender_basic_fixed.mod.py
index 390ccc35e..c08bb310b 100644
--- a/tests/nnapi/specs/V1_0/mobilenet_224_gender_basic_fixed.mod.py
+++ b/tests/nnapi/specs/V1_0/mobilenet_224_gender_basic_fixed.mod.py
@@ -1,3 +1,5 @@
+# Comment out: too large test. we can test mobilenet using frameworktest
+"""
# Passing weights via shared memory
Configuration.use_shm_for_weights = True
model = Model()
@@ -219,36 +221,36 @@ i81 = Internal("op81", "TENSOR_FLOAT32", "{1, 1, 1, 256}") # intermediate result
i82 = Internal("op82", "TENSOR_FLOAT32", "{1, 1, 1, 11}") # intermediate result
i83 = Parameter("op83", "TENSOR_FLOAT32", "{11}", [-0.0293549, -0.0835053, -0.228576, -0.0980885, -0.0370638, -0.195879, -0.0452258, 0.0371641, -0.0585841, -0.0984154, -0.141293]) # parameters
i84 = Parameter("op84", "TENSOR_FLOAT32", "{11, 1, 1, 256}", [0.0984852, 0.672424, 0.392549, -0.262182, 0.303914, -0.0118188, 0.027943, 0.0164078, 0.200583, -0.808626, 0.234772, -0.0253635, -0.198519, -0.176824, -0.580674, 0.0681573, -0.0134279, 0.172173, -0.284882, -0.0895141, 0.0142356, -0.0479431, 0.0736678, 0.00298977, 0.152355, -0.0370715, 0.463201, 0.0146613, 0.0971624, -0.0791196, 0.556621, -0.00950762, 0.0160531, 0.091037, 0.376353, -0.0996081, -0.0418334, -0.427482, -0.202679, -0.197079, 0.021873, -0.105617, 0.36447, 0.389277, 0.0429815, 0.0480496, -0.170086, -0.191548, -0.237921, 0.155838, -0.100796, 0.0539355, 0.103154, 0.0441985, -0.20672, 0.358565, -0.105794, -0.635905, 0.193301, 0.112419, -0.184668, 0.157954, -0.301095, -0.153072, -0.0535615, -0.0661999, -0.197056, -0.0835003, -0.074932, -0.111766, -0.356266, 0.649165, -0.0527003, -0.0597135, 0.109839, -0.270809, 0.0333183, -0.211454, 0.0594729, -0.166949, 0.21057, 0.224925, -0.222835, -0.0178217, 0.127268, 0.229248, 0.262987, 0.0318244, 0.293201, -0.361254, -0.0624992, -0.0696259, 0.0456531, -0.0287401, 0.0863351, -0.106142, 0.81137, 0.305728, 0.398482, -0.0190696, -0.133965, -0.223757, -0.153242, -0.261303, 0.111363, -0.113733, 0.0028724, -0.0878969, 0.0498853, -0.000613516, -0.0819123, -0.0154599, -0.0938842, -0.108295, 0.340323, -0.139579, -0.117066, 0.145283, -0.106254, 0.201248, -0.152479, 0.162457, -0.0751263, 0.00127508, -0.0218281, 0.126278, -0.100075, 0.426783, -0.108719, 0.207569, -0.327427, 0.277309, 0.0404061, -0.334901, 0.154047, -0.287619, 0.0161922, -0.00054208, -0.233675, 0.564603, 0.201628, 0.0510375, -0.16502, -0.0155493, -0.125359, -0.0996153, 0.0133961, -0.492208, 0.109118, -0.136327, 0.0252329, 0.0556799, -0.196804, -0.0612012, -0.0392273, 0.133385, 0.253763, -0.208136, -0.00507434, -0.0584744, 0.0855089, -0.00321895, -0.209376, 0.0618401, 0.0129248, -0.130721, -0.168413, 0.122652, 0.0927544, -0.180775, -0.0463842, -0.626248, -0.00596579, 0.0822374, -0.254325, -0.361624, 0.778701, -0.0705549, 0.40832, 0.0932269, 0.10348, 0.258843, -0.117135, 0.131713, -0.457018, -0.364692, 0.0741725, 0.168267, 0.0904773, -0.333243, 0.18358, -0.0407786, -0.0115824, 0.304328, 0.177285, 0.206312, -0.503914, 0.310439, 0.533919, 0.0925376, 0.449889, -0.45417, 0.89017, -0.00580558, 0.317744, 0.0176692, -0.0267303, -0.0657997, -0.333455, -0.0895455, -0.0203959, -0.329956, 0.0542947, -0.03533, 0.0496151, 0.145015, 0.135449, -0.239986, -0.442413, -0.0922021, 0.396803, 0.0695849, -0.00921835, 0.405834, 0.477558, 0.08952, 0.101425, -0.0264703, -0.124621, 0.070554, -0.101953, 0.224768, 0.021384, 0.293433, -0.297231, 0.0841252, 0.0290684, -0.211267, -0.116215, 0.433678, -0.626231, -0.139838, 0.0290375, -0.24486, 0.282119, -0.486426, -0.402424, -0.561959, -0.450933, 0.0501238, -0.194682, -0.231145, -0.210372, -0.0802564, -0.170723, -0.248902, -0.0122576, 0.0776341, 0.197615, 0.094212, 0.0318287, -0.237544, 0.135516, -0.537321, -0.0906906, 0.172587, 0.179816, 0.0792088, 0.354531, 0.0801259, 0.0145845, -0.14874, 0.0367363, -0.0733148, -0.125755, -0.252037, -0.101672, -0.14809, -0.188341, -0.264003, -0.201581, -0.0605842, 0.0142779, -0.322517, -0.130978, 0.301363, -0.276394, 0.0248554, -0.168732, 0.158651, 0.150037, -0.0472578, 0.241238, -0.109832, -0.500172, -0.0574687, 0.143137, 0.177313, 0.0489008, 0.24142, -0.0742049, -0.103464, -0.0383113, -0.0148622, -0.101849, 0.0425005, 0.0543708, 0.0710147, 0.169901, 0.304119, 0.180413, -0.330647, -0.250029, 0.0651902, 0.173465, -0.475872, 0.393697, 0.147345, -0.00802343, -0.0545821, -0.119, -0.0282713, 0.0414947, 0.0618215, -0.132909, 0.480818, -0.124287, -0.0484199, -0.344362, 0.071471, 0.267047, -0.279627, -0.289336, 0.0609794, 0.339502, -0.0956702, -0.361749, -0.0153208, -0.102628, 0.0936787, -0.130392, 0.348396, 0.200636, -0.249164, -0.177583, -0.0716032, 0.118703, 0.123365, -0.0366422, 0.231096, 0.0022177, 0.128202, 0.222367, -0.176409, -0.153065, -0.0287899, -0.355792, -0.543125, 0.177245, 0.116598, 0.0451388, -0.0286715, -0.174033, 0.476808, 0.298325, -0.0593149, -0.0491401, 0.0263619, 0.0565123, 0.0500395, -0.40961, -0.0481743, -0.0744737, -0.050528, -0.428685, -0.0457881, -0.105794, 0.0951161, -0.299268, -0.229566, -0.206985, -0.0780657, -0.0322681, 0.266195, -0.0781984, -0.598814, -0.280207, 0.0516518, -0.0447187, 0.0980521, 0.0216666, 0.038809, 0.147272, -0.357397, 0.0504251, 0.126596, -0.0935991, -0.142778, 0.0864683, -0.116768, -0.164657, -0.380078, 0.00184015, -0.0684899, -0.134349, 0.184285, -0.281853, -0.185581, 0.347765, 0.301739, -0.17311, -0.0586592, -0.253355, 0.135704, -0.025141, -0.398732, 0.176819, 0.164295, -0.0964961, 0.235867, -0.162969, -0.365092, 0.0342, 0.305977, 0.192868, -0.150942, 0.132645, 0.220341, -0.158242, -0.168888, 0.103491, -0.1672, 0.0127892, -0.0176947, 0.230234, -0.129157, -0.319789, -0.188887, 0.469657, 0.0599872, 0.173128, 0.207658, -0.257826, 0.422512, 0.0304435, -0.0700446, 0.00292699, -0.254277, -0.0987592, 0.0906241, -0.234816, 0.030083, -0.00973596, 0.120037, -0.317601, -0.12708, 0.102184, 0.0740557, 0.191923, 0.215419, 0.090792, -0.416807, -0.211088, -0.0667573, -0.042666, 0.00698668, -0.187608, 0.11397, 0.0282127, -0.0646227, -0.0786383, 0.338181, -0.158486, -0.0404435, -0.148313, 0.129857, 0.036822, 0.214085, 0.0271965, 0.0712011, -0.0142654, 0.21793, -0.101845, -0.0134659, -0.386899, -0.253225, -0.201138, -0.168, -0.111886, 0.149919, -0.252716, -0.312013, -0.494531, 0.20132, 0.1455, -0.0390248, -0.2497, 0.0187322, 0.212352, 0.176346, -0.0186768, -0.0587664, 0.140535, 0.130711, -0.048937, -0.0333832, 0.146999, -0.0536035, -0.210655, 0.277771, 0.136683, -0.458041, 0.106529, -0.152398, -0.0336699, 0.151721, -0.0533765, -0.168961, 0.175815, -0.24888, 0.0907924, -0.0133408, 0.175644, -0.0246879, -0.00687254, 0.185182, -0.256385, -0.163355, -0.256984, -0.315761, -0.181824, -0.0306672, 0.152588, -0.0713595, -0.0721906, -0.332328, -0.322698, -0.00929737, 0.0818944, 0.0742352, -0.166805, 0.0944738, -0.167636, 0.0871255, 0.0792785, 0.0354259, 0.293364, 0.215322, 0.272799, -0.0492312, -0.269483, -0.220346, -0.0881883, -0.105395, 0.170322, 0.0396378, 0.0702321, 0.0164758, -0.0229642, -0.120222, -0.00534489, 0.138123, -0.141178, 0.00600586, 0.0114309, 0.160046, -0.0782422, -0.221657, -0.222359, -0.0160572, -0.0427344, -0.0939852, 0.19013, 0.128755, 0.0826387, 0.0959137, -0.121338, 0.116419, -0.0815084, -0.148231, -0.102396, -0.302046, -0.0136386, 0.146457, -0.273797, -0.0766018, 0.103427, -0.0941844, -0.236219, -0.106905, 0.188707, -0.119065, -0.109619, -0.376718, -0.250552, -0.119213, -0.0698239, 0.0548951, -0.0984231, -0.274015, 0.0116218, -0.0560431, -0.0176495, 0.106143, 0.191658, -0.291245, 0.198666, -0.1415, 0.121305, 0.00787936, -0.161106, -0.0559996, -0.025235, -0.227444, 0.124586, 0.153714, 0.0339968, -0.0791643, -0.204395, -0.139891, -0.136988, -0.182275, 0.059441, -0.135392, -0.0206536, -0.177236, -0.0461415, 0.0707632, 0.279827, -0.00538458, -0.0227107, -0.0780397, 0.0654234, -0.00893195, -0.111956, -0.298613, -0.35016, 0.0515563, -0.257037, 0.139683, -0.0568245, -0.18912, 0.054686, 0.230304, 0.0682762, -0.104554, -0.267018, -0.00695182, -0.42745, -0.118246, 0.240312, -0.0283745, -0.0410208, -0.204045, 0.0536799, 0.158019, -0.217282, -0.255996, -0.130733, -0.0754242, -0.205957, -0.042236, -0.237091, -0.0547223, 0.318243, 0.114416, -0.135642, -0.0316242, -0.347453, 0.101281, 0.012845, -0.212307, 0.135502, -0.217902, -0.0520036, -0.169676, 0.0155753, -0.378887, -0.120698, 0.278682, -0.208085, 0.0188473, -0.167479, 0.3823, -0.262327, 0.0653896, 0.0837105, -0.175588, -0.172008, 0.279217, 0.109674, -0.0610411, -0.261709, -0.12329, -0.214598, 0.0449085, 0.0995378, 0.123743, -0.20637, 0.0336271, 0.179009, -0.103686, -0.0319235, 0.0991055, -0.15149, 0.11167, -0.0458526, -0.216373, 0.0944096, 0.257391, -0.138348, -0.0792016, 0.236858, -0.177544, 0.00179313, -0.0475954, -0.325425, -0.443611, 0.269018, 0.0823181, -0.189893, -0.00310759, 0.38809, -0.0297613, -0.0772569, 0.117555, -0.0146545, 0.24652, -0.124915, -0.0226053, -0.00351846, 0.123489, 0.374272, 0.00411916, -0.0530559, -0.459548, -0.068397, 0.351112, 0.20717, -0.169705, -0.191568, -0.0149611, -0.200327, -0.0366789, -0.000831896, 0.0329813, 0.0928899, -0.217083, -0.1015, -0.108356, -0.155276, -0.224902, -0.161009, -0.195741, -0.196345, 0.0696936, -0.0903938, 0.0346839, 0.0342342, 0.108802, 0.0224264, -0.116966, -0.0868056, 0.41173, -0.139741, 0.0816925, 0.0206459, -0.0857387, -0.0889723, 0.0252684, 0.122225, 0.281325, -0.0975601, -0.0890313, -0.202703, -0.232747, -0.16356, -0.109103, -0.000627448, -0.281988, 0.133017, 0.199669, -0.305566, -0.298914, -0.120265, -0.0757179, -0.298619, 0.183222, -0.142981, 0.0896671, 0.175904, 0.0175519, -0.16538, -0.0520677, -0.0670482, -0.00336189, -0.223379, -0.0609024, -0.27571, -0.0763393, 0.295597, 0.00951529, 0.127656, 0.323394, 0.321615, 0.184786, 0.120165, 0.0270615, 0.232585, -0.378135, 0.00705762, -0.152686, -0.25289, 0.0996134, 0.0515323, 0.0147273, -0.746546, -0.161453, 0.0907721, 0.015299, -0.0842891, -0.0432424, -0.523789, -0.271467, 0.0367782, -0.24899, 0.207861, 0.0755162, 0.173391, 0.222453, -0.113516, -0.24137, 0.100824, -0.0606065, 0.00548546, 0.0558509, -0.0575758, 0.245029, 0.178345, 0.143839, -0.244105, -0.172561, -0.338056, -0.127348, 0.31021, -0.115489, -0.0672434, -0.0625748, -0.180578, -0.227379, 0.11236, 0.10313, 0.166569, 0.158167, -0.0638876, 0.161796, 0.0371649, -0.328319, -0.336786, -0.211983, 0.0293737, -0.115773, 0.00937545, -0.246018, 0.35231, 0.195708, 0.0478146, -0.103948, -0.106301, 0.211148, 0.379093, 0.416716, -0.174341, -0.0187881, -0.510292, 0.0914475, 0.0227487, -0.100022, -0.141782, -0.0911218, 0.0475971, -0.244332, -0.0995312, -0.209683, 0.0118146, -0.333827, 0.0784702, 0.152256, -0.0219116, 0.138452, -0.0222356, -0.0565779, 0.158486, -0.24482, -0.00680468, 0.197839, 0.0154492, -0.00997484, -0.221046, -0.0717462, -0.174674, -0.121365, -0.225961, 0.0249583, -0.012674, -0.0461503, 0.326105, 0.159991, 0.0172039, -0.33672, -0.0282964, 0.340149, -0.102354, -0.32463, 0.0968813, 0.142316, -0.0457009, -0.449412, 0.010723, 0.234789, -0.0556804, 0.13699, 0.346469, 0.0485624, 0.158279, -0.064993, -0.103656, -0.058024, -0.160934, -0.154483, -0.208516, 0.171658, -0.105681, -0.0694062, -0.430509, 0.0281458, -0.145734, 0.00672611, -0.263346, 0.398998, -0.107815, 0.0612669, 0.229766, -0.0120696, 0.221093, -0.172262, 0.0251312, -0.0730561, -0.316371, 0.188185, -0.046221, -0.199885, 0.119867, 0.218638, -0.329465, -0.324384, -0.141421, 0.0441414, 0.0694141, 0.255176, 0.0668514, -0.0346173, -0.00232405, 0.194615, 0.281005, -0.0199741, 0.035436, 0.130112, -0.0913306, 0.329646, -0.0752686, 0.109595, 0.0791733, -0.0692778, 0.305223, -0.203791, 0.124741, 0.235692, 0.0366247, 0.0102351, 0.0518547, -0.0949171, 0.149521, -0.0588182, -0.0129089, -0.232551, -0.0145967, -0.0175136, -0.0871548, 0.0947253, 0.0243044, -0.0628619, -0.0492656, -0.299999, -0.217482, -0.140209, -0.0874081, 0.0812857, 0.0233994, -0.389155, 0.200308, -0.131029, 0.299059, -0.110117, -0.289113, -0.0365339, -0.233167, -0.108743, -0.261932, -0.159673, -0.106053, 0.199852, -0.106121, 0.0759607, 0.472064, -0.163932, -0.31763, 0.0104898, -0.0210451, -0.0787518, 0.155917, 0.102614, -0.0425018, 0.104758, 0.0857415, -0.155914, 0.239264, -0.144245, 0.0138479, -0.196582, -0.225119, 0.119061, 0.0667646, 0.0661826, -0.190163, 0.146226, 0.0857013, -0.39394, 0.00735058, 0.17735, 0.244409, 0.06301, 0.169556, -0.178062, 0.12862, 0.416925, 0.0967157, -0.00742805, -0.000430865, 0.151077, -0.135911, -0.259045, -0.367174, -0.13922, 0.23333, -0.219153, -0.101108, -0.108457, -0.0457349, -0.0666834, 0.222968, 0.0223704, 0.0866147, 0.0902093, 0.141006, 0.230202, 0.0586954, 0.26749, 0.0443342, 0.424975, -0.159726, -0.16713, -0.10332, 0.126135, 0.125221, 0.220837, -0.121812, -0.20649, 0.161173, -0.0608088, 0.751833, 0.177478, -0.107548, 0.0103489, -0.212986, 0.177713, -0.353158, -0.0872167, 0.126602, 0.0343864, 0.0116791, 0.0520713, 0.00361525, 0.194245, -0.114742, 0.020037, -0.114726, 0.126897, 0.039019, 0.445555, -0.0193091, 0.0637067, -0.128501, -0.0345904, 0.0988956, 0.178154, -0.0259671, -0.0257689, -0.091025, 0.0684302, 0.131971, 0.0459931, 0.278118, -0.0376653, -0.156248, -0.0789752, -0.160455, 0.353474, 0.0503084, -0.194132, 0.124681, -0.0915903, 0.117273, 0.0232574, -0.0337332, 0.0175596, -0.203004, 0.132872, -0.200533, 0.111507, 0.452312, 0.0770053, 0.201455, -0.267448, 0.0539831, -0.187271, -0.0896206, -0.0906231, 0.174122, 0.00151794, -0.44301, -0.038296, -0.179995, -0.0717158, -0.136493, -0.163935, -0.0208884, 0.361374, 0.219308, -0.0691815, 0.20319, -0.0567725, 0.272091, 0.228685, 0.0701021, -0.122392, -0.280011, 0.0584825, -0.054271, 0.00700558, 0.0727541, 0.0566045, -0.197892, 0.024467, -0.192888, -0.0819263, -0.0201281, 0.248612, 0.0373216, 0.0864792, 0.283391, 0.189835, 0.0781828, -0.0364776, -0.00516293, -0.136433, -0.0563264, 0.184467, -0.103843, 0.143026, 0.153189, -0.0523581, 0.213201, 0.144222, -0.368817, 0.150695, 0.0357488, 0.44351, -0.167891, 0.289154, -0.227813, -0.321075, 0.0209248, 0.00428332, 0.0969976, -0.108528, 0.0284129, 0.0762366, 0.107821, 0.119178, 0.213134, -0.061735, -0.172152, 0.161251, -0.0093495, 0.32946, 0.219039, -0.287137, -0.0450728, -0.0452836, -0.212494, -0.107495, -0.188338, 0.0459348, -0.0377559, -0.0839975, -0.00428969, -0.0232576, 0.0289588, 0.164926, -0.0425852, -0.0543849, 0.11673, 0.158114, 0.159165, 0.0941762, -0.0546047, 0.237165, -0.0486095, -0.146102, -0.196763, -0.300198, 0.0103576, -0.309314, -0.122579, -0.147076, -0.252579, -0.00101733, -0.288208, -0.22112, 0.311517, -0.112453, 0.129476, -0.324617, -0.122931, -0.123137, 0.000923043, -0.117103, 0.0235433, -0.271816, 0.141558, -0.057682, -0.120304, -0.106198, 0.0265892, 0.254805, 0.173984, -0.266907, 0.0103511, -0.0901396, -0.164973, -0.226945, 0.0137655, 0.0133529, -0.151525, 0.256784, 0.132003, 0.24828, -0.0647662, 0.143638, 0.0600663, -0.18841, -0.0538587, 0.293896, -0.103811, -0.389949, 0.073149, 0.102529, 0.00501293, 0.315232, 0.231291, -0.176493, -0.140862, -0.133106, 0.0161411, -0.210105, -0.125995, -0.0174128, 0.00283163, -0.16739, -0.00931349, -0.26984, -0.315777, -0.248987, -0.144968, 0.166966, 0.169746, -0.220713, -0.0312972, 0.156324, -0.0407818, -0.139328, -0.440265, -0.0850991, 0.188168, 0.106694, 0.154731, 0.159212, -0.200953, -0.037807, 0.36218, -0.123355, 0.396598, -0.036044, -0.071492, 0.189546, -0.115796, -0.0827317, -0.0544022, -0.222727, 0.0347514, -0.0295377, 0.101372, -0.0471416, 0.218466, -0.0403298, -0.0743297, -0.0607741, -0.0177818, -0.0976377, 0.182365, -0.26278, 0.0619466, 0.335466, -0.039433, -0.214658, -0.00413142, 0.118605, -0.0871774, -0.013047, -0.0139049, -0.0566686, -0.0765434, -0.0230406, -0.10839, -0.164259, -0.110342, -0.0567072, 0.0359454, 0.161352, -0.271192, 0.0673184, -0.0400687, -0.0291176, -0.0505437, -0.167017, -0.244246, 0.0127467, -0.188325, -0.171548, 0.0819252, -0.184143, -0.0280647, -0.175439, -0.0298673, 0.0928547, -0.114129, 0.160686, 0.124866, -0.0799349, -0.0461555, -0.0569828, -0.07544, -0.254674, 0.200119, 0.395232, -0.104755, -0.0705698, -0.168159, -0.363371, -0.28949, -0.157786, 0.0803677, 0.253256, 0.183266, -0.098531, -0.217913, -0.277753, -0.0412087, 0.0929791, 0.0416587, -0.393095, -0.194569, 0.115027, 0.00374004, -0.230992, 0.178052, 0.11554, -0.112156, -0.136296, 0.147941, 0.160641, -0.0988691, -0.156255, -0.183889, -0.198891, 0.0487718, -0.10064, 0.0618672, 0.129453, 0.245253, -0.0609817, -0.0423283, 0.209125, -0.00764558, -0.207093, 0.090427, 0.344761, -0.210035, 0.0190305, 0.177226, -0.478754, 0.102217, -0.0815951, 0.184152, -0.0708748, -0.288034, 0.212553, -0.00799922, 0.0402337, -0.0634731, -0.0157662, 0.0380505, 0.297157, -0.102219, 0.270945, -0.0364033, -0.223053, -0.313967, -0.256362, 0.00947424, 0.1584, 0.0508195, 0.127063, 0.161099, -0.176547, -0.06178, 0.28597, 0.0661753, 0.115497, -0.266217, 0.207641, 0.288968, -0.147556, 0.00127605, 0.25902, 0.0888035, -0.172818, 0.0106958, -0.259761, -0.0210704, 0.11259, 0.118585, -0.131654, 0.0889418, -0.141959, 0.0686276, 0.119914, -0.315549, -0.106624, 0.356014, 0.0856996, -0.121974, -0.0188067, -0.150179, -0.0971979, -0.15594, 0.15098, -0.111329, -0.258716, -0.390928, 0.105128, -0.170122, -0.114675, -0.119159, 0.0893049, 0.0829629, -0.174787, -0.020651, 0.059119, -0.120192, -0.192243, 0.22854, 0.0524963, -0.17855, 0.129937, 0.0181097, 0.151171, -0.104886, -0.195503, 0.166139, -0.132779, -0.0952646, -0.238117, -0.120478, 0.250843, 0.0198936, -0.16349, 0.00793157, -0.139775, 0.0621653, 0.102649, 0.0159358, -0.173693, 0.000424589, 0.0499097, -0.213681, 0.000829991, 0.0470139, -0.104087, -0.104971, 0.154429, -0.0514045, 0.021679, 0.0637851, 0.0263575, -0.0773971, 0.0792207, 0.0289109, -0.190421, -0.114429, -0.0980095, 0.0697401, -0.128251, 0.0884518, 0.215688, -0.503879, -0.0634976, -0.0256412, 0.26015, -0.082886, 0.0134682, -0.1982, 0.203755, 0.237095, -0.178199, -0.110421, -0.123333, 0.0505219, 0.0872408, 0.134674, -0.151414, -0.20904, 0.0162698, -0.0281258, -0.0696107, 0.0384256, -0.316446, -0.0999238, -0.0215575, -0.16317, -0.422117, -0.401993, 0.0318225, 0.179985, 0.0327708, 0.237595, 0.00156168, 0.190076, 0.0242173, -0.149916, -0.0292071, -0.0634601, -0.353369, 0.191598, 0.268846, 0.0919142, -0.0838139, 0.041469, 0.195228, -0.304184, -0.0524774, 0.0257366, -0.0669865, 0.0712212, -0.165418, -0.0485386, 0.135066, 0.178966, -0.315931, -0.160149, 0.198644, 0.117106, -0.130927, -0.254406, -0.151422, 0.0451171, 0.0421164, -0.120035, 0.0517401, 0.0150269, 0.0749926, 0.268662, -0.213943, -0.0568393, 0.122747, 0.154528, -0.0203424, -0.0819281, -0.201227, 0.155029, -0.285458, -0.081893, 0.141846, 0.12811, 0.17107, -0.262672, -0.112772, -0.186101, -0.257387, -0.169401, -0.263488, 0.370405, -0.462936, -0.188147, -0.332351, 0.0125391, 0.215156, -0.513405, -0.289543, -0.443262, -0.0851796, -0.157583, -0.22628, 0.0640168, 0.0691075, 0.169624, -0.0885214, 0.0678881, -0.178388, 0.11724, -0.0459048, 0.0283356, 0.135743, 0.21108, 0.197132, -0.298021, -0.127577, -0.0454851, -0.295987, -0.113867, 0.0862119, -0.0201072, -0.290276, 0.0147507, -0.247042, 0.420167, -0.376847, 0.203432, -0.158043, 0.0810597, -0.566199, 0.218187, -0.318247, -0.400209, -0.219316, -0.0448023, -0.357235, -0.26102, -0.303588, 0.00072887, -0.205802, -0.175228, -0.0968084, -0.0754828, 0.047413, 0.131296, -0.112247, 0.183774, 0.0840453, -0.0239575, 0.0597386, 0.0678879, 0.208753, -0.381256, 0.0543436, 0.0230677, -0.275275, 0.197361, 0.318349, 0.230976, -0.0475114, 0.0923948, 0.270554, 0.0193927, -0.0845898, -0.074267, -0.185875, 0.329959, -0.00671641, -0.19907, -0.208328, 0.089362, 0.0418336, -0.054819, 0.138547, 0.318673, 0.300046, -0.149823, -0.146389, -0.178329, 0.260826, -0.0446269, 0.22329, 0.0233915, -0.408598, -0.210239, -0.0839846, -0.210073, -0.203917, 0.333065, 0.0654963, -0.110438, 0.0976637, -0.171706, -0.0396424, 0.196927, 0.107167, -0.526091, -0.272819, -0.0621517, -0.360691, -0.0803204, -0.0894648, -0.215345, 0.0738301, -0.165395, -0.505362, -0.510371, 0.495546, 0.281085, -0.349988, -0.102217, 0.29955, 0.101695, 0.216987, 0.220804, -0.264158, 0.208857, 0.490646, -0.235616, 0.0697848, -0.0828848, -0.0676367, -0.137579, 0.0101326, -0.0646971, -0.245946, -0.0958766, -0.274682, -0.467907, 0.0970127, -0.254426, 0.03253, 0.0122821, -0.0339391, -0.364834, 0.164962, -0.180429, -0.378582, -0.00960021, -0.228418, -0.0264938, 0.0259812, -0.295185, -0.357585, -0.380096, 0.0525056, -0.233331, 0.13387, 0.105961, 0.243387, 0.258494, 0.0371437, 0.0632561, 0.110992, -0.208983, -0.185678, 0.292418, 0.0286353, -0.00408131, 0.102217, -0.136994, 0.0622825, 0.395963, -0.348133, -0.223302, 0.273627, -0.193556, 0.338264, -0.159462, -0.491361, 0.161778, 0.156135, 0.0641617, 0.0999903, -0.529532, -0.285966, -0.135576, 0.236579, -0.130519, -0.0764042, 0.493032, -0.0883978, 0.150384, 0.106229, 0.02975, 0.318695, 0.265394, 0.130223, -0.0455514, -0.115114, 0.107133, -0.250837, -0.0966183, -0.123644, 0.342727, -0.0986773, -0.0127951, -0.434297, -0.0685123, 0.0869741, -0.269507, 0.396272, 0.305987, 0.145169, -0.250147, 0.0425825, -0.27173, -0.0943471, -0.401917, -0.0518213, 0.220465, -0.00776957, -0.308669, 0.151246, 0.040435, -0.246938, 0.161326, -0.657021, -0.029663, -0.156154, -0.0231731, -0.0567502, -0.149723, -0.157589, -0.0150168, 0.143093, 0.0119803, -0.282194, 0.00609295, 0.133509, -0.238658, 0.469585, -0.15437, 0.123749, -0.438739, -0.235357, 0.196981, -0.178078, 0.179464, -0.360465, 0.146581, -0.0722637, -0.359168, -0.0213761, -0.0719016, 0.228349, 0.00872679, -0.0720084, 0.0129347, -0.0606057, 0.209901, 0.261428, 0.318637, 0.0668506, 0.262152, -0.188527, 0.017398, 0.238802, -0.119243, -0.335925, -0.0708997, 0.0131007, -0.183616, 0.139393, 0.229401, -0.0356139, 0.117969, -0.0359544, -0.0976415, -0.261919, -0.132652, 0.0511542, 0.0250922, -0.202336, 0.156581, -0.21006, -0.164616, 0.49608, -0.143283, 0.0167009, 0.0382558, -0.192059, -0.0298086, 0.16408, 0.0327906, -0.0112998, 0.107964, -0.805638, 0.341425, 0.104876, -0.379418, -0.16812, 0.0873235, -0.591176, 0.347932, -0.092094, -0.0951583, -0.079231, -0.102, 0.430467, -0.0629909, 0.103386, -0.394243, 0.0921294, -0.303268, -0.0878409, 0.0222568, 0.177541, 0.05269, -0.245371, -0.394972, 0.169095, -0.0322228, 0.0854907, -0.277685, 0.169834, -0.157112, -0.125601, -0.123642, 0.287326, -0.11461, -0.0400871, 0.0935002, -0.239499, -0.00406349, 0.116467, 0.195647, 0.0169376, 0.108949, -0.256211, 0.199251, -0.22503, 0.183724, -0.0459538, -0.0573185, -0.135267, -0.17563, -0.105615, -0.216777, 0.136895, -0.131041, 0.143448, 0.116321, 0.341659, 0.04663, -0.138582, 0.113484, 0.000281706, 0.183075, -0.205364, 0.217528, -0.0325774, -0.0481017, -0.00686094, -0.13989, 0.0995296, -0.476637, 0.120914, 0.178213, 0.11095, -0.154424, 0.169363, 0.288232, 0.105104, 0.440652, 0.0404736, -0.163574, -0.0724218, -0.174028, 0.137715, 0.255176, -0.133188, -0.10359, -0.150963, -0.0850369, 0.162774, -0.00694466, -0.523244, -0.400547, -0.11478, 0.0923003, 0.00922158, 0.165169, 0.114364, 0.396211, 0.0621255, 0.413189, 0.0759307, -0.148507, 0.243803, 0.066523, -0.0649491, 0.0867938, 0.134912, -0.44741, 0.133082, 0.0237098, -0.327549, -0.0172026, -0.104394, -0.204443, 0.0804548, -0.25669, -0.280141, 0.184742, -0.182915, -0.301567, -0.132653, -0.362342, -0.0867399, -0.248574, 0.018783, -0.0144377, -0.193732, -0.0568637, 0.0212203, 0.145462, -0.04467, 0.188485, -0.0192423, -0.162427, -0.431459, -0.316196, -0.0197834, 0.142554, 0.161446, -0.204556, 0.10123, 0.136505, -0.0421437, 0.0382004, -0.0105015, 0.26352, 0.128504, 0.220373, -0.0459283, -0.0794771, 0.126873, 0.102329, 0.160555, -0.344226, 0.11844, -0.152884, -0.369259, -0.732194, -0.285659, 0.27297, 0.0434638, -0.115029, -0.178296, -0.010171, -0.108856, 0.243398, -0.120003, 0.0617609, -0.0377697, 0.0882623, 0.317397, -0.142634, 0.0613519, 0.0625693, 0.29804, -0.276065, -0.283755, -0.0586926, 0.0609932, 0.172328, 0.380084, 0.0817355, -0.0889897, 0.16975, -0.0727911, 0.558122, 0.129139, 0.0967012, -0.00808779, -0.281368, 0.229454, -0.0657459, 0.110639, 0.0990761, -0.0734602, -0.124961, 0.120193, 0.0117927, -0.00164934, -0.068704, 0.0934271, -0.150389, 0.267866, 0.111924, 0.22073, -0.0826743, 0.0181881, 0.164808, 0.08553, 0.0064627, -0.100066, -0.196847, -0.260685, -0.161078, -0.0889612, 0.267343, -0.183189, 0.099878, 0.206179, -0.134037, -0.0753274, 0.073361, 0.123856, -0.11014, -0.23651, -0.079332, -0.179564, -0.0953625, 0.0816014, -0.0153009, 0.0216921, -0.214616, 0.0721763, -0.337629, 0.113998, 0.30383, 0.213949, 0.0748996, -0.154083, 0.082343, 0.0915755, -0.165324, -0.161256, -0.0732527, -0.0771391, -0.179746, 0.148814, -0.229269, -0.00684043, -0.0877735, -0.232043, 0.0358457, 0.0860737, -0.016937, 0.0052483, 0.203986, -0.0327027, 0.0828824, 0.0515511, -0.0446207, 0.0495584, 0.06504, -0.0502581, -0.0989093, -0.242931, -0.161322, 0.0412978, 0.0882053, -0.0868244, 0.0333411, 0.0033292, 0.0956053, 0.224343, -0.0605414, 0.200487, 0.139677, 0.0741737, 0.131144, -0.0156217, 0.119855, -0.0672591, 0.0646749, 0.0212678, -0.0612522, 0.127438, 0.165742, 0.149455, 0.120228, 0.245928, -0.536011, -0.0221017, 0.0210271, 0.196356, 0.0401149, -0.00733165, -0.270396, -0.00968083, -0.0709557, -0.120717, 0.140489, 0.0935343, -0.172696, 0.301435, -0.0935873, -0.0353977, 0.0539549, -0.0338224, -0.239903, -0.0209894, -0.17114, 0.267786, 0.20251, -0.0980189, -0.04852, -0.207071, -0.253257, -0.0564701, -0.0518127, -0.0537929, -0.390881, 0.0470064, 0.0306878, 0.104422, 0.150282, 0.0117885, -0.093087, -0.0377776, -0.0618607, -0.0869537, 0.137726, 0.0903727, 0.0346921, 0.0111, -0.241767, -0.201946, 0.09471, -0.156048, -0.0978701, -0.239229, -0.0308635, -0.122071, -0.433478, -0.0514787, -0.182472, -0.181954, 0.0416541, -0.0883368, 0.157402, -0.462445, -0.103609, -0.160994, -0.0133393, -0.096508, 0.100438, 0.00418135, -0.0122206, 0.172408, 0.0437795, -0.172367, -0.0189107, -0.0304423, 0.0780768, -0.116228, -0.0305065, -0.0440305, 0.00286725, -0.157059, 0.132452, -0.101883, -0.138483, 0.00723927, 0.0342281, 0.206677, -0.0770022, 0.0227105, -0.111016, -0.170921, 0.055846, 0.246527, -0.142554, -0.380108, -0.0346903, 0.138706, -0.176424, 0.112018, 0.0435032, -0.127998, -0.169885, -0.0509104, -0.0870096, -0.535699, -0.0638343, -0.0311837, 0.078099, -0.0342351, 0.0749799, 0.3883, -0.154977, 0.224178, 0.0550229, 0.107375, 0.33049, 0.0969202, 0.0756623, -0.233299, -0.104361, 0.442374, 0.0844492, 0.0705411, -0.140545, -0.0663961, -0.0728755, -0.0621244, -0.0819853, -0.112193, -0.176114, -0.0938139, -0.214228, 0.0190762, -0.213562, -0.190233, 0.133314, -0.148665, 0.0915799, 0.187216, -0.284974, 0.00733069, 0.0156916, 0.015107, 0.0318654, 0.346104, -0.124227, 0.137341, 0.0592528, -0.387351, -0.221991, 0.360592, -0.0931174, -0.0492834, 0.199867, -0.0852204, 0.150399, 0.0413833, 0.235906, -0.0706518, -0.166653, -0.0586646, -0.109711, -0.0823073, 0.257342, -0.224644, -0.430506, -0.105588, 0.0250296, -0.042311, -0.0996558, -0.115579, -0.286667, -0.154598, -0.137322, 0.176363, 0.088216, 0.161978, 0.255623, -0.0123169, -0.00387241, -0.318043, -0.21894, -0.412465, -0.415855, 0.255024, 0.361044, 0.0300423, -0.119439, 0.0657428, -0.238206, 0.340391, 0.201176, 0.102395, 0.216324, -0.121531, 0.265799, 0.0327802, 0.194072, -0.0792337, 0.456093, 0.0971469, -0.0170099, -0.0294468, -0.318039, -0.242527, -0.1083, 0.295943, -0.0284033, -0.156199, -0.20311, -0.075091, 0.528829, -0.165604, 0.0532403, 0.0505752, -0.413034, 0.175453, -0.0970195, -0.029351, 0.103333, 0.271092, 0.0511197, -0.182135, 0.112932, -0.32439, 0.294457, -0.0818895, 0.0914322, 0.185025, 0.0543957, -0.0167575, 0.504046, -0.0647153, -0.166975, 0.0248059, 0.0379442, 0.0980366, -0.178135, 0.143822, 0.45732, -0.0912428, -0.179338, 0.349726, -0.0596313, -0.299861, 0.112567, 0.0666395, 0.345303, 0.164124, -0.00265316, -0.0732412, 0.348079, -0.249414, 0.0465329, 0.0693596, 0.0799214, 0.000123214, 0.180679, 0.0912923, -0.300121, -0.288428, 0.150135, 0.112936, 0.104813, -0.0555879, -0.00205972, -0.0251151, -0.0788264, -0.016778, -0.110796, -0.083048, -0.212734, 0.288568, -0.114228, -0.113358, 0.110789, 0.118645, 0.133466, -0.0298552, -0.241374, 0.157257, 0.0861554, -0.0909277, 0.00156177, 0.106539, -0.209104, -0.106974, 0.0203283, -0.18111, -0.311602, -0.00371812, 0.0711113, -0.206721, 0.286076, 0.139713, 0.116621, 0.182792, 0.0246107, -0.17972, 0.041917, 0.0724635, 0.266344, 0.0989191, 0.0723898, 0.0257298, 0.104898, 0.0681826, -0.0704781, 0.00212139, -0.363547, 0.0274255, -0.106295, -0.363965, 0.127051, -0.0575343, -0.200952, -0.0666189, -0.139465, -0.0171747, 0.253794, -0.258602, -0.166356, -0.107649, 0.267331, 0.104521, -0.020921, -0.0780469, 0.125002, 0.0202556, -0.0899181, -0.126559, -0.297855, 0.121539, -0.0671643, -0.0444782, 0.334408, 0.0882725, -0.0879492, -0.00277655, -0.0616985, 0.0564236, -0.11618, -0.22836, 0.112953, 0.176082, 0.09988, -0.00635589, -0.114234, 0.241135, 0.0966775, -0.0961065, 0.137214, -0.0832349, -0.54299, -0.2335, -0.033801, -0.11505, -0.366386, -0.238099, -0.0951656, 0.263106, 0.129292, -0.14762, 0.0700404, 0.0195349, -0.286227, -0.273371, 0.0587288, -0.257152, -0.136248, -0.13336, -0.248086, 0.273973, -0.302625, -0.085841, -0.0839808, -0.130464, 0.252972, -0.0415149, -0.0695038, -0.091557, -0.262375, -0.0645785, 0.188566, -0.202261, -0.112712, 0.00631479, 0.0132917, -0.0130675, -0.302285, 0.0556928, -0.0211812, -0.0555546, 0.0291112, 0.168815, 0.143654, -0.00564186, -0.0614248, -0.0939664, 0.0959667, -0.209823, -0.103889, -0.206011, -0.0394793, 0.0545815, -0.0348762, -0.132075, -0.0489917, -0.177563, -0.164591, -0.0174372, -0.276844, -0.132214, -0.236278, -0.0614254, -0.230962, -0.409367, -0.08959, 0.182197, -0.341314, -0.0645579, -0.0161434, -0.166644, -0.0784324, -0.387537, 0.236617, -0.115318, -0.11315, -0.109817, -0.0949309, -0.253715, -0.254404, -0.0876592, -0.243118, -0.219172, 0.0341202, 0.0203343, 0.0435131, -0.0266338, 0.140304, -0.20669, -0.130739, 0.0213059, 0.182793, -0.0711616, -0.165651, -0.212373, -0.0972764, -0.284464, -0.0834676, -0.129573, -0.133945, 0.0684521, -0.133913, 0.165726, -0.176839, -0.0940447, -0.145421, -0.0471074, 0.00950449, 0.0308656, -0.00761046, -0.19397, -0.161623, 0.10975, -0.0398157, 0.00168868, 0.0626417, -0.118388, -0.134741, -0.243707, 0.146451, -0.165854, 0.0585878, 0.0269307, 0.163195, -0.197056, 0.0438799, -0.152668, -0.178631, -0.167278, 0.0258257, -0.22958, -0.101918, 0.0360034, -0.165612, -0.112482, -0.419959, -0.369384, 0.0468117, 0.202511, 0.161559, 0.0360435, -0.211843, 0.0480519, -0.252478, -0.0951382, 0.100791, -0.379245, -0.129869, -0.036501, 0.0685223, 0.0247177, -0.0751386, -0.12451, 0.244585, -0.0103249, -0.346383, -0.300614, 0.230366, -0.187795, -0.0326416, 0.0735751, -0.0136039, -0.0219528, 0.0629145, -0.0308739, -0.101514, -0.169444, 0.058706, -0.133274, -0.200294, -0.372511, -0.214898, -0.184366, 0.253648, -0.0362453, 0.0618937, 0.0838244, -0.0386255, 0.129191, -0.147435, -0.180809, -0.0797491, -0.286544, -0.273005, 0.116222, -0.255255, -0.0504643, -0.0567216, -0.0204081, 0.206331, -0.225266, -0.211665, -0.259216, -0.0676753, -0.176153, 0.285802, -0.00560349, -0.0253936, -0.182537, -0.344487, -0.341246, -0.171879, 0.24462, 0.015354, -0.0255803, -0.0855239, -0.151488, -0.0329621, 0.311794, 0.0889872, -0.142655, -0.00124048, 0.0175189, 0.0459686, 0.279491, -0.237445, 0.0570048, -0.00665275, -0.0558817, 0.0731352, 0.0291331, 0.0918153, 0.0276626, -0.135103, -0.303909, 0.0283329, -0.203482, -0.0849922, -0.284485, -0.214908, 0.0836636, -0.219738, 0.136157, 0.0332432, -0.143305, 0.0283252, -0.178703, -0.0742534, -0.153174, 0.02235, -0.0753622, -0.210102, -0.0915751, -0.0189732, -0.239039, -0.135349, -0.104589, -0.0658414, -0.183206, -0.123006, 0.0835748, -0.0703047, -0.207461, -0.274129, -0.225327, -0.113485, 0.13316, 0.0295303, -0.0958281]) # parameters
-model = model.Conv(i86, i2, i1, i87, i88, i89, i90).To(i0)
-model = model.DepthWiseConv(i0, i29, i28, i91, i92, i93, i94, i95).To(i27)
-model = model.Conv(i27, i32, i31, i96, i97, i98, i99).To(i30)
-model = model.DepthWiseConv(i30, i35, i34, i100, i101, i102, i103, i104).To(i33)
-model = model.Conv(i33, i38, i37, i105, i106, i107, i108).To(i36)
-model = model.DepthWiseConv(i36, i41, i40, i109, i110, i111, i112, i113).To(i39)
-model = model.Conv(i39, i44, i43, i114, i115, i116, i117).To(i42)
-model = model.DepthWiseConv(i42, i47, i46, i118, i119, i120, i121, i122).To(i45)
-model = model.Conv(i45, i50, i49, i123, i124, i125, i126).To(i48)
-model = model.DepthWiseConv(i48, i53, i52, i127, i128, i129, i130, i131).To(i51)
-model = model.Conv(i51, i56, i55, i132, i133, i134, i135).To(i54)
-model = model.DepthWiseConv(i54, i59, i58, i136, i137, i138, i139, i140).To(i57)
-model = model.Conv(i57, i62, i61, i141, i142, i143, i144).To(i60)
-model = model.DepthWiseConv(i60, i65, i64, i145, i146, i147, i148, i149).To(i63)
-model = model.Conv(i63, i68, i67, i150, i151, i152, i153).To(i66)
-model = model.DepthWiseConv(i66, i71, i70, i154, i155, i156, i157, i158).To(i69)
-model = model.Conv(i69, i74, i73, i159, i160, i161, i162).To(i72)
-model = model.DepthWiseConv(i72, i77, i76, i163, i164, i165, i166, i167).To(i75)
-model = model.Conv(i75, i80, i79, i168, i169, i170, i171).To(i78)
-model = model.DepthWiseConv(i78, i5, i4, i172, i173, i174, i175, i176).To(i3)
-model = model.Conv(i3, i8, i7, i177, i178, i179, i180).To(i6)
-model = model.DepthWiseConv(i6, i11, i10, i181, i182, i183, i184, i185).To(i9)
-model = model.Conv(i9, i14, i13, i186, i187, i188, i189).To(i12)
-model = model.DepthWiseConv(i12, i17, i16, i190, i191, i192, i193, i194).To(i15)
-model = model.Conv(i15, i20, i19, i195, i196, i197, i198).To(i18)
-model = model.DepthWiseConv(i18, i23, i22, i199, i200, i201, i202, i203).To(i21)
-model = model.Conv(i21, i26, i25, i204, i205, i206, i207).To(i24)
-model = model.AveragePool(i24, i208, i209, i210, i211, i212, i213).To(i81)
-model = model.Conv(i81, i84, i83, i214, i215, i216, i217).To(i82)
-model = model.Logistic(i82).To(i85)
+model = model.Operation("CONV_2D", i86, i2, i1, i87, i88, i89, i90).To(i0)
+model = model.Operation("DEPTHWISE_CONV_2D", i0, i29, i28, i91, i92, i93, i94, i95).To(i27)
+model = model.Operation("CONV_2D", i27, i32, i31, i96, i97, i98, i99).To(i30)
+model = model.Operation("DEPTHWISE_CONV_2D", i30, i35, i34, i100, i101, i102, i103, i104).To(i33)
+model = model.Operation("CONV_2D", i33, i38, i37, i105, i106, i107, i108).To(i36)
+model = model.Operation("DEPTHWISE_CONV_2D", i36, i41, i40, i109, i110, i111, i112, i113).To(i39)
+model = model.Operation("CONV_2D", i39, i44, i43, i114, i115, i116, i117).To(i42)
+model = model.Operation("DEPTHWISE_CONV_2D", i42, i47, i46, i118, i119, i120, i121, i122).To(i45)
+model = model.Operation("CONV_2D", i45, i50, i49, i123, i124, i125, i126).To(i48)
+model = model.Operation("DEPTHWISE_CONV_2D", i48, i53, i52, i127, i128, i129, i130, i131).To(i51)
+model = model.Operation("CONV_2D", i51, i56, i55, i132, i133, i134, i135).To(i54)
+model = model.Operation("DEPTHWISE_CONV_2D", i54, i59, i58, i136, i137, i138, i139, i140).To(i57)
+model = model.Operation("CONV_2D", i57, i62, i61, i141, i142, i143, i144).To(i60)
+model = model.Operation("DEPTHWISE_CONV_2D", i60, i65, i64, i145, i146, i147, i148, i149).To(i63)
+model = model.Operation("CONV_2D", i63, i68, i67, i150, i151, i152, i153).To(i66)
+model = model.Operation("DEPTHWISE_CONV_2D", i66, i71, i70, i154, i155, i156, i157, i158).To(i69)
+model = model.Operation("CONV_2D", i69, i74, i73, i159, i160, i161, i162).To(i72)
+model = model.Operation("DEPTHWISE_CONV_2D", i72, i77, i76, i163, i164, i165, i166, i167).To(i75)
+model = model.Operation("CONV_2D", i75, i80, i79, i168, i169, i170, i171).To(i78)
+model = model.Operation("DEPTHWISE_CONV_2D", i78, i5, i4, i172, i173, i174, i175, i176).To(i3)
+model = model.Operation("CONV_2D", i3, i8, i7, i177, i178, i179, i180).To(i6)
+model = model.Operation("DEPTHWISE_CONV_2D", i6, i11, i10, i181, i182, i183, i184, i185).To(i9)
+model = model.Operation("CONV_2D", i9, i14, i13, i186, i187, i188, i189).To(i12)
+model = model.Operation("DEPTHWISE_CONV_2D", i12, i17, i16, i190, i191, i192, i193, i194).To(i15)
+model = model.Operation("CONV_2D", i15, i20, i19, i195, i196, i197, i198).To(i18)
+model = model.Operation("DEPTHWISE_CONV_2D", i18, i23, i22, i199, i200, i201, i202, i203).To(i21)
+model = model.Operation("CONV_2D", i21, i26, i25, i204, i205, i206, i207).To(i24)
+model = model.Operation("AVERAGE_POOL_2D", i24, i208, i209, i210, i211, i212, i213).To(i81)
+model = model.Operation("CONV_2D", i81, i84, i83, i214, i215, i216, i217).To(i82)
+model = model.Operation("LOGISTIC", i82).To(i85)
# Example 1
input0 = { i86: [0.791841, 0.131271, 0.840444, 0.524133, 0.301398, 0.711596, 8.78908e-05, 0.358888, 0.156309, 0.273055, 0.143706, 0.663516, 0.817875, 0.0178006, 0.0663899, 0.38532, 0.108224, 0.120309, 0.252355, 0.468634, 0.535587, 0.372087, 0.0191815, 0.0750463, 0.896686, 0.564102, 0.781927, 0.138955, 0.876559, 0.672544, 0.47254, 0.685613, 0.864712, 0.678431, 0.681087, 0.954624, 0.707985, 0.0945955, 0.663676, 0.0174337, 0.514265, 0.0993437, 0.837646, 0.16886, 0.0206787, 0.460454, 0.298217, 0.0939906, 0.792089, 0.145739, 0.124991, 0.152928, 0.18507, 0.0910606, 0.886938, 0.703805, 0.738545, 0.0914867, 0.785625, 0.255978, 0.25971, 0.564347, 0.496365, 0.40138, 0.181351, 0.229051, 0.528806, 0.815946, 0.723594, 0.0782805, 0.417488, 0.357831, 0.699987, 0.110877, 0.706929, 0.361355, 0.44735, 0.51417, 0.234213, 0.49157, 0.754239, 0.35671, 0.90015, 0.368686, 0.162982, 0.467298, 0.712059, 0.73199, 0.394652, 0.216439, 0.0325005, 0.675254, 0.0965207, 0.919576, 0.46926, 0.923022, 0.0721544, 0.0620356, 0.570436, 0.418233, 0.734994, 0.626141, 0.189962, 0.680934, 0.576122, 0.377738, 0.927142, 0.31627, 0.0390065, 0.305891, 0.777981, 0.576827, 0.999185, 0.849222, 0.221597, 0.0529092, 0.741046, 0.810611, 0.641734, 0.2737, 0.0306115, 0.382373, 0.117532, 0.690233, 0.20231, 0.599944, 0.464123, 0.79606, 0.171281, 0.0924674, 0.502584, 0.689291, 0.549744, 0.951152, 0.579524, 0.796293, 0.971173, 0.903601, 0.376714, 0.915486, 0.521651, 0.888365, 0.355228, 0.648741, 0.900593, 0.48239, 0.317946, 0.524421, 0.99061, 0.842221, 0.102702, 0.679481, 0.588507, 0.7512, 0.177413, 0.762043, 0.63814, 0.509979, 0.245258, 0.97713, 0.206899, 0.0342631, 0.582589, 0.677466, 0.548678, 0.969459, 0.763207, 0.659911, 0.720006, 0.492729, 0.721137, 0.198181, 0.0767499, 0.279166, 0.563685, 0.578014, 0.284114, 0.0313309, 0.545107, 0.675547, 0.902745, 0.799514, 0.14417, 0.197687, 0.187282, 0.0420023, 0.238308, 0.154671, 0.243883, 0.777129, 0.291907, 0.959133, 0.166203, 0.355963, 0.831646, 0.308086, 0.148097, 0.342163, 0.366069, 0.669976, 0.482005, 0.50455, 0.114772, 0.909615, 0.592529, 0.819863, 0.7448, 0.733599, 0.0818953, 0.333677, 0.436425, 0.640028, 0.128689, 0.802751, 0.053884, 0.0968059, 0.0141547, 0.189447, 0.458282, 0.301719, 0.332669, 0.697625, 0.607359, 0.96581, 0.0482129, 0.658574, 0.283779, 0.428031, 0.120848, 0.387377, 0.169832, 0.0381746, 0.549549, 0.721105, 0.281996, 0.282577, 0.183188, 0.237784, 0.590087, 0.289398, 0.540784, 0.301099, 0.396178, 0.855404, 0.460563, 0.125632, 0.150601, 0.271058, 0.696146, 0.947483, 0.735739, 0.425141, 0.977515, 0.3452, 0.143326, 0.274614, 0.925826, 0.81308, 0.535078, 0.998599, 0.427464, 0.260845, 0.137486, 0.837797, 0.0631066, 0.0876196, 0.588056, 0.40014, 0.815027, 0.494389, 0.355774, 0.450119, 0.717888, 0.423054, 0.20979, 0.842124, 0.399906, 0.410846, 0.559933, 0.451017, 0.558568, 0.944169, 0.881834, 0.00384158, 0.103625, 0.309752, 0.40491, 0.98423, 0.303001, 0.486862, 0.394447, 0.108623, 0.258184, 0.172573, 0.0685831, 0.358462, 0.272746, 0.875298, 0.690985, 0.800497, 0.512517, 0.692277, 0.0706645, 0.259856, 0.37559, 0.213658, 0.53025, 0.405643, 0.656994, 0.228048, 0.247219, 0.300241, 0.700641, 0.0177141, 0.466215, 0.200764, 0.857363, 0.483106, 0.522657, 0.144218, 0.267424, 0.291684, 0.67167, 0.0357309, 0.265176, 0.520329, 0.556993, 0.984931, 0.183227, 0.335942, 0.992855, 0.175479, 0.425192, 0.826319, 0.433081, 0.727791, 0.245524, 0.0597154, 0.156634, 0.64591, 0.958063, 0.537418, 0.295089, 0.0732913, 0.742363, 0.642074, 0.826581, 0.682193, 0.852718, 0.999771, 0.315818, 0.571465, 0.491486, 0.235984, 0.0818711, 0.636604, 0.212895, 0.660947, 0.765877, 0.0767624, 0.73344, 0.679814, 0.36851, 0.915551, 0.0944369, 0.982257, 0.872403, 0.796865, 0.114389, 0.967463, 0.957193, 0.575821, 0.713856, 0.91271, 0.124751, 0.106865, 0.376894, 0.421127, 0.793781, 0.914849, 0.691005, 0.944351, 0.332563, 0.170314, 0.447437, 0.760359, 0.683639, 0.393852, 0.668889, 0.222266, 0.667067, 0.217586, 0.189967, 0.136813, 0.329066, 0.51118, 0.15714, 0.403605, 0.674683, 0.644709, 0.908952, 0.496509, 0.0120233, 0.245706, 0.0329395, 0.795719, 0.986633, 0.865211, 0.0771508, 0.487609, 0.241025, 0.669604, 0.477396, 0.428557, 0.0621134, 0.68842, 0.784684, 0.556526, 0.0546337, 0.498104, 0.944155, 0.975526, 0.0519842, 0.828273, 0.220436, 0.0973805, 0.639122, 0.750508, 0.236016, 0.866084, 0.276684, 0.992722, 0.183576, 0.535933, 0.792447, 0.142149, 0.988627, 0.818945, 0.724294, 0.276561, 0.745799, 0.390849, 0.752617, 0.732283, 0.401179, 0.683818, 0.660091, 0.844869, 0.287746, 0.187653, 0.968993, 0.541457, 0.543372, 0.775215, 0.92933, 0.475041, 0.306358, 0.783032, 0.593079, 0.119069, 0.869258, 0.970535, 0.351855, 0.558143, 0.969403, 0.412738, 0.264689, 0.482702, 0.789336, 0.437969, 0.159124, 0.426539, 0.198885, 0.0177727, 0.691272, 0.0595643, 0.359525, 0.6306, 0.149484, 0.994781, 0.845899, 0.811227, 0.82501, 0.27116, 0.7401, 0.190015, 0.458648, 0.841917, 0.328061, 0.231616, 0.510571, 0.0631396, 0.292842, 0.333388, 0.216002, 0.933854, 0.765494, 0.300234, 0.158843, 0.608681, 0.49692, 0.622592, 0.869762, 0.515378, 0.243813, 0.351183, 0.529368, 0.311112, 0.238909, 0.433752, 0.998878, 0.62677, 0.838139, 0.650211, 0.636101, 0.667345, 0.948906, 0.309283, 0.202239, 0.449868, 0.432025, 0.187258, 0.99759, 0.487673, 0.824721, 0.51251, 0.854737, 0.412896, 0.810302, 0.105718, 0.319232, 0.99164, 0.123529, 0.316028, 0.570233, 0.662391, 0.669733, 0.0749745, 0.66906, 0.00903542, 0.510595, 0.967114, 0.783757, 0.869923, 0.655048, 0.260669, 0.68839, 0.87354, 0.913864, 0.113415, 0.625024, 0.474351, 0.776453, 0.21527, 0.113414, 0.749888, 0.626217, 0.388871, 0.0309753, 0.475298, 0.422064, 0.460825, 0.32816, 0.955097, 0.627315, 0.792411, 0.00766529, 0.566721, 0.407374, 0.634685, 0.627003, 0.375373, 0.31221, 0.861205, 0.608589, 0.880119, 0.52554, 0.605035, 0.118977, 0.613206, 0.383087, 0.716612, 0.622154, 0.851895, 0.0783142, 0.475583, 0.21843, 0.584996, 0.938854, 0.62971, 0.813131, 0.892749, 0.798681, 0.795511, 0.0655248, 0.285758, 0.246531, 0.78642, 0.342706, 0.583047, 0.0344585, 0.144597, 0.251586, 0.915092, 0.863438, 0.647058, 0.0484427, 0.898403, 0.423501, 0.992785, 0.0703023, 0.799657, 0.665863, 0.0711288, 0.680655, 0.65026, 0.1549, 0.593819, 0.767113, 0.150623, 0.918489, 0.176067, 0.85076, 0.979277, 0.572358, 0.13148, 0.643712, 0.831597, 0.0992917, 0.272232, 0.694243, 0.335103, 0.241076, 0.246106, 0.942713, 0.432468, 0.789363, 0.458507, 0.339482, 0.0900444, 0.74853, 0.14387, 0.38917, 0.957029, 0.989021, 0.611135, 0.697256, 0.0395212, 0.549286, 0.985937, 0.312926, 0.527949, 0.354932, 0.589522, 0.936589, 0.198696, 0.480116, 0.992084, 0.407626, 0.139969, 0.0251239, 0.499093, 0.985195, 0.595041, 0.680062, 0.801376, 0.61929, 0.480137, 0.331, 0.345052, 0.859727, 0.588247, 0.295778, 0.0999083, 0.376957, 0.470159, 0.216982, 0.530417, 0.551786, 0.491341, 0.598367, 0.749522, 0.181984, 0.472262, 0.934373, 0.378052, 0.376463, 0.439012, 0.64088, 0.202156, 0.905179, 0.79377, 0.695248, 0.815266, 0.0739469, 0.620277, 0.889489, 0.361428, 0.37442, 0.936218, 0.255595, 0.112557, 0.398399, 0.11098, 0.222453, 0.265583, 0.0120594, 0.814637, 0.373474, 0.369039, 0.877383, 0.91715, 0.759644, 0.170179, 0.658204, 0.0327627, 0.837132, 0.225002, 0.0310278, 0.275552, 0.929744, 0.718458, 0.343128, 0.762734, 0.354592, 0.220131, 0.885119, 0.477841, 0.080636, 0.342969, 0.402588, 0.0586417, 0.866942, 0.657128, 0.135711, 0.23269, 0.885087, 0.301901, 0.0223638, 0.243521, 0.897715, 0.831138, 0.422695, 0.373375, 0.604348, 0.216366, 0.117223, 0.52669, 0.957595, 0.397123, 0.527852, 0.162748, 0.331502, 0.0476454, 0.493669, 0.45054, 0.0189271, 0.832804, 0.245005, 0.0257984, 0.806287, 0.573803, 0.554897, 0.242035, 0.551262, 0.807645, 0.697002, 0.236734, 0.0732667, 0.825466, 0.273506, 0.918823, 0.728595, 0.514056, 0.913061, 0.990209, 0.569672, 0.73299, 0.254236, 0.226596, 0.438448, 0.718309, 0.0261628, 0.522876, 0.13089, 0.518293, 0.489113, 0.856953, 0.859139, 0.0102561, 0.241273, 0.265863, 0.46517, 0.692028, 0.0965296, 0.357579, 0.0809109, 0.973039, 0.241768, 0.178742, 0.16546, 0.32408, 0.51216, 0.353633, 0.914092, 0.996733, 0.527617, 0.38878, 0.968574, 0.189173, 0.297074, 0.0769255, 0.329634, 0.999992, 0.376291, 0.353023, 0.968517, 0.936789, 0.839482, 0.141374, 0.287281, 0.899092, 0.276857, 0.701517, 0.705605, 0.694727, 0.709399, 0.150969, 0.838456, 0.872205, 0.251672, 0.0823043, 0.433517, 0.114241, 0.686941, 0.604298, 0.96366, 0.998541, 0.542768, 0.763697, 0.487684, 0.614873, 0.118265, 0.82044, 0.928577, 0.130926, 0.788807, 0.116383, 0.961747, 0.119231, 0.584441, 0.492563, 0.310756, 0.386993, 0.706782, 0.856075, 0.351463, 0.88558, 0.953703, 0.625848, 0.152387, 0.725336, 0.209592, 0.510357, 0.994541, 0.563889, 0.112611, 0.188244, 0.200779, 0.603122, 0.498709, 0.401355, 0.256663, 0.72548, 0.286149, 0.585824, 0.729582, 0.109245, 0.750599, 0.526262, 0.2682, 0.0439657, 0.940121, 0.51635, 0.563138, 0.648896, 0.626415, 0.326178, 0.514948, 0.792292, 0.132806, 0.0344517, 0.68247, 0.688457, 0.0830311, 0.822472, 0.421894, 0.363927, 0.258686, 0.469445, 0.484979, 0.261765, 0.0107293, 0.823743, 0.898295, 0.685985, 0.12627, 0.0935537, 0.710923, 0.221798, 0.466435, 0.204661, 0.419436, 0.888813, 0.812406, 0.0987169, 0.56719, 0.0848368, 0.415454, 0.0568779, 0.0836185, 0.591172, 0.222817, 0.084403, 0.443582, 0.511802, 0.291254, 0.57206, 0.538638, 0.0480104, 0.394755, 0.146417, 0.486177, 0.275562, 0.991969, 0.152291, 0.125037, 0.734148, 0.273974, 0.907751, 0.0557118, 0.625865, 0.540812, 0.404363, 0.683585, 0.817448, 0.880429, 0.0368383, 0.117144, 0.452914, 0.601129, 0.816468, 0.177218, 0.341007, 0.437119, 0.512966, 0.734273, 0.670025, 0.774994, 0.516255, 0.403913, 0.194468, 0.730509, 0.915396, 0.733057, 0.226151, 0.773553, 0.560888, 0.322723, 0.880917, 0.354637, 0.764857, 0.101728, 0.98936, 0.770464, 0.363922, 0.268453, 0.122471, 0.468997, 0.227406, 0.908916, 0.838655, 0.675934, 0.297773, 0.820353, 0.968448, 0.22299, 0.45603, 0.883587, 0.961793, 0.981808, 0.0918925, 0.289544, 0.0126749, 0.952685, 0.433636, 0.888319, 0.0345352, 0.0537092, 0.630984, 0.290415, 0.442073, 0.957977, 0.528899, 0.258949, 0.517977, 0.555961, 0.962038, 0.103875, 0.120518, 0.524644, 0.901207, 0.976801, 0.20839, 0.791273, 0.928386, 0.269995, 0.851301, 0.155978, 0.95959, 0.471912, 0.558651, 0.0792384, 0.305654, 0.0687847, 0.407363, 0.854473, 0.726835, 0.434604, 0.0319832, 0.258654, 0.228612, 0.88968, 0.233836, 0.187192, 0.0812797, 0.685767, 0.227064, 0.625626, 0.0418912, 0.091916, 0.893489, 0.906418, 0.343487, 0.259549, 0.500555, 0.49464, 0.358855, 0.453654, 0.625872, 0.260065, 0.526648, 0.270802, 0.629696, 0.652685, 0.886566, 0.0175723, 0.192974, 0.772648, 0.322638, 0.441971, 0.326457, 0.314981, 0.789898, 0.609642, 0.417324, 0.901833, 0.625863, 0.520122, 0.714596, 0.0476563, 0.130953, 0.230048, 0.938159, 0.560143, 0.0489532, 0.172541, 0.941601, 0.0195008, 0.262638, 0.882763, 0.515662, 0.696145, 0.0346089, 0.472647, 0.673146, 0.664696, 0.171615, 0.157088, 0.320476, 0.644456, 0.655435, 0.438065, 0.828032, 0.75158, 0.671098, 0.651288, 0.194411, 0.99512, 0.324144, 0.327449, 0.138816, 0.826698, 0.702613, 0.785437, 0.292426, 0.32113, 0.934188, 0.138683, 0.10368, 0.664516, 0.364206, 0.992479, 0.365432, 0.864658, 0.0582836, 0.220552, 0.603853, 0.0222399, 0.801677, 0.136697, 0.918184, 0.534017, 0.605448, 0.172941, 0.0832636, 0.820645, 0.321777, 0.828627, 0.184141, 0.824755, 0.859183, 0.0145646, 0.180491, 0.381069, 0.685228, 0.631852, 0.0338391, 0.361849, 0.424745, 0.051589, 0.293966, 0.844586, 0.0737949, 0.255997, 0.134207, 0.724875, 0.606424, 0.18282, 0.94089, 0.513781, 0.583085, 0.73825, 0.29708, 0.113687, 0.242324, 0.595915, 0.744452, 0.300691, 0.122135, 0.186776, 0.424907, 0.0548492, 0.926156, 0.784282, 0.487589, 0.708848, 0.421664, 0.91074, 0.414106, 0.257951, 0.598353, 0.511842, 0.822584, 0.659914, 0.734142, 0.129923, 0.893404, 0.499535, 0.376755, 0.351202, 0.181314, 0.467408, 0.125804, 0.750217, 0.325757, 0.0533974, 0.0611866, 0.927609, 0.146805, 0.00837644, 0.761909, 0.250079, 0.28004, 0.00418437, 0.875385, 0.502056, 0.121803, 0.876352, 0.964255, 0.661306, 0.945703, 0.697616, 0.375116, 0.379839, 0.991601, 0.204653, 0.23887, 0.944915, 0.363196, 0.538404, 0.260688, 0.0577462, 0.7928, 0.0965343, 0.0313786, 0.406476, 0.0356123, 0.920733, 0.384257, 0.396769, 0.565033, 0.816765, 0.0231784, 0.297517, 0.691497, 0.125348, 0.90672, 0.870792, 0.839266, 0.619161, 0.415235, 0.0855324, 0.317575, 0.0321516, 0.0459598, 0.779923, 0.208385, 0.319421, 0.719141, 0.235942, 0.931433, 0.738555, 0.498458, 0.716415, 0.422233, 0.889511, 0.689843, 0.00346445, 0.501386, 0.836507, 0.267413, 0.20589, 0.676368, 0.162917, 0.461109, 0.200088, 0.640686, 0.870575, 0.684988, 0.629306, 0.314908, 0.949392, 0.59184, 0.96408, 0.443349, 0.786204, 0.475125, 0.46335, 0.120098, 0.521526, 0.381189, 0.0282903, 0.338781, 0.0147252, 0.105385, 0.523878, 0.397474, 0.922144, 0.360621, 0.967328, 0.0795957, 0.614568, 0.941387, 0.256149, 0.460269, 0.275171, 0.156944, 0.160385, 0.190592, 0.972277, 0.995504, 0.0148588, 0.0328694, 0.288649, 0.624778, 0.815458, 0.0562352, 0.17142, 0.892587, 0.813621, 0.365365, 0.287373, 0.633701, 0.612011, 0.188615, 0.266795, 0.247953, 0.929241, 0.724582, 0.168495, 0.184623, 0.676937, 0.545181, 0.779166, 0.401639, 0.791977, 0.89885, 0.0700023, 0.323039, 0.916252, 0.666769, 0.430525, 0.521759, 0.251172, 0.18914, 0.166139, 0.550677, 0.489721, 0.273977, 0.209432, 0.941569, 0.688973, 0.534458, 0.705287, 0.445498, 0.432936, 0.674555, 0.152962, 0.350929, 0.623852, 0.104778, 0.921401, 0.0822757, 0.773417, 0.577079, 0.52454, 0.340603, 0.180518, 0.743391, 0.309313, 0.647283, 0.508779, 0.676723, 0.871975, 0.141776, 0.662358, 0.365894, 0.614934, 0.692909, 0.271235, 0.493481, 0.0191645, 0.433778, 0.82123, 0.440987, 0.17822, 0.158052, 0.351538, 0.904367, 0.932931, 0.771275, 0.683578, 0.147603, 0.612052, 0.238339, 0.370294, 0.377275, 0.618327, 0.23039, 0.025349, 0.881252, 0.656383, 0.848147, 0.158194, 0.925577, 0.715534, 0.566595, 0.46578, 0.953175, 0.910838, 0.422878, 0.810046, 0.0736497, 0.767067, 0.639362, 0.0337872, 0.0552913, 0.196417, 0.793722, 0.638354, 0.162484, 0.250245, 0.755271, 0.563528, 0.0426519, 0.331396, 0.686429, 0.573515, 0.580952, 0.0780432, 0.399008, 0.837097, 0.427702, 0.879839, 0.717721, 0.798127, 0.310848, 0.0969406, 0.112661, 0.0588716, 0.617045, 0.810882, 0.72268, 0.536256, 0.577522, 0.82076, 0.431478, 0.582098, 0.0867875, 0.0819969, 0.278815, 0.442372, 0.36417, 0.504359, 0.790864, 0.150706, 0.789597, 0.11971, 0.208723, 0.0420081, 0.439192, 0.32817, 0.510673, 0.453106, 0.131887, 0.963443, 0.46543, 0.417993, 0.515867, 0.501282, 0.358465, 0.0408286, 0.212398, 0.636908, 0.939945, 0.64096, 0.729772, 0.782885, 0.0180581, 0.292225, 0.0476142, 0.014517, 0.933166, 0.677903, 0.471022, 0.0626455, 0.34605, 0.729552, 0.0758758, 0.030948, 0.913585, 0.0579351, 0.604563, 0.661979, 0.380977, 0.931586, 0.10853, 0.219475, 0.534411, 0.323453, 0.191781, 0.529629, 0.24442, 0.0668745, 0.991815, 0.811953, 0.742443, 0.851713, 0.470675, 0.439336, 0.591621, 0.0528885, 0.293893, 0.247487, 0.153686, 0.712198, 0.308949, 0.888146, 0.483333, 0.958923, 0.773188, 0.111127, 0.931216, 0.00803783, 0.523384, 0.589483, 0.111122, 0.874195, 0.471779, 0.866746, 0.393314, 0.917975, 0.47388, 0.45923, 0.184881, 0.000267302, 0.118287, 0.294422, 0.478528, 0.798024, 0.819273, 0.134578, 0.833257, 0.754163, 0.0372118, 0.476365, 0.883108, 0.295347, 0.506178, 0.159185, 0.486809, 0.997655, 0.542137, 0.722696, 0.87696, 0.0533655, 0.576914, 0.635649, 0.843527, 0.996396, 0.633346, 0.0469884, 0.983361, 0.274715, 0.733967, 0.587163, 0.335545, 0.63203, 0.322497, 0.342558, 0.613547, 0.852798, 0.97598, 0.289202, 0.112494, 0.604807, 0.171367, 0.645325, 0.120826, 0.983477, 0.346941, 0.695627, 0.0933139, 0.0904035, 0.945534, 0.699159, 0.918074, 0.127972, 0.146117, 0.153176, 0.34932, 0.274975, 0.128079, 0.981156, 0.64941, 0.329935, 0.388786, 0.992289, 0.97107, 0.636135, 0.825807, 0.0221501, 0.778773, 0.886846, 0.607811, 0.814742, 0.371163, 0.143338, 0.833689, 0.548518, 0.0863777, 0.455333, 0.654521, 0.690034, 0.528876, 0.330968, 0.1821, 0.199122, 0.574718, 0.137327, 0.451855, 0.991012, 0.703202, 0.740871, 0.78313, 0.363101, 0.789682, 0.458018, 0.129508, 0.224462, 0.792609, 0.289557, 0.869839, 0.877946, 0.871263, 0.143763, 0.374451, 0.801211, 0.459942, 0.262103, 0.115882, 0.703586, 0.14139, 0.856733, 0.379291, 0.409448, 0.847103, 0.298199, 0.274674, 0.993469, 0.91734, 0.171558, 0.548496, 0.825017, 0.230367, 0.933356, 0.111477, 0.244218, 0.0821157, 0.655875, 0.509365, 0.202234, 0.879215, 0.517252, 0.642754, 0.680939, 0.590988, 0.466308, 0.0191027, 0.486767, 0.929551, 0.412309, 0.593949, 0.818184, 0.615371, 0.424783, 0.268686, 0.882975, 0.856814, 0.0412179, 0.62009, 0.509358, 0.773243, 0.669224, 0.101498, 0.155394, 0.236388, 0.279887, 0.139492, 0.908711, 0.0246424, 0.416223, 0.851026, 0.859623, 0.905136, 0.810206, 0.504903, 0.155452, 0.982737, 0.06796, 0.654384, 0.0777361, 0.726558, 0.162373, 0.707787, 0.0106446, 0.863501, 0.16515, 0.454644, 0.352805, 0.110448, 0.0987702, 0.38532, 0.831841, 0.7131, 0.884184, 0.49833, 0.403592, 0.136039, 0.402964, 0.75637, 0.852311, 0.226679, 0.677947, 0.539316, 0.90116, 0.150665, 0.326361, 0.69401, 0.642847, 0.637767, 0.0383073, 0.21091, 0.738813, 0.704299, 0.622894, 0.316864, 0.673828, 0.00453082, 0.998097, 0.477128, 0.561196, 0.143336, 0.852607, 0.0538591, 0.705491, 0.680765, 0.922218, 0.335589, 0.212406, 0.788774, 0.50572, 0.315882, 0.997041, 0.28049, 0.316806, 0.0301507, 0.0161073, 0.833198, 0.976399, 0.299819, 0.705579, 0.699842, 0.0229762, 0.547337, 0.576429, 0.586619, 0.580927, 0.704043, 0.712396, 0.704521, 0.461941, 0.271101, 0.300411, 0.543945, 0.83888, 0.927517, 0.696113, 0.806099, 0.61674, 0.565187, 0.262063, 0.584379, 0.651243, 0.769777, 0.964328, 0.824391, 0.418492, 0.44945, 0.828669, 0.383372, 0.643618, 0.112057, 0.90306, 0.952415, 0.135493, 0.445303, 0.10469, 0.876504, 0.780554, 0.304308, 0.92487, 0.112365, 0.761206, 0.0440797, 0.0245029, 0.39543, 0.200187, 0.24704, 0.990857, 0.880704, 0.231992, 0.475104, 0.572161, 0.657611, 0.671187, 0.00252225, 0.773633, 0.652176, 0.85746, 0.0924269, 0.33908, 0.0523477, 0.105062, 0.946441, 0.0981879, 0.720323, 0.823436, 0.372996, 0.567674, 0.0662654, 0.0282141, 0.265578, 0.973171, 0.666187, 0.161559, 0.18833, 0.692873, 0.0415668, 0.180542, 0.100651, 0.9122, 0.848933, 0.0986598, 0.413384, 0.600706, 0.921116, 0.417723, 0.915706, 0.4991, 0.235294, 0.642321, 0.294678, 0.459773, 0.821892, 0.521086, 0.820835, 0.420583, 0.0694356, 0.142045, 0.519777, 0.996035, 0.230064, 0.901462, 0.0570203, 0.936726, 0.0804466, 0.324219, 0.231704, 0.653857, 0.77686, 0.877787, 0.585532, 0.907335, 0.478509, 0.898244, 0.509399, 0.93998, 0.333457, 0.911111, 0.240376, 0.433741, 0.343811, 0.640166, 0.803994, 0.338771, 0.25318, 0.247124, 0.667644, 0.85778, 0.380125, 0.279352, 0.385812, 0.485787, 0.073276, 0.632746, 0.602181, 0.839795, 0.383827, 0.999008, 0.161368, 0.696009, 0.0803506, 0.0669083, 0.627762, 0.0293949, 0.575738, 0.783778, 0.894671, 0.0255047, 0.0312134, 0.995024, 0.966357, 0.060387, 0.11614, 0.910705, 0.97223, 0.288007, 0.859113, 0.538287, 0.901633, 0.114395, 0.80549, 0.119997, 0.913041, 0.96864, 0.847122, 0.376428, 0.304454, 0.3547, 0.763945, 0.994089, 0.131418, 0.210581, 0.741318, 0.327375, 0.965342, 0.417546, 0.37163, 0.279321, 0.0719678, 0.856568, 0.653297, 0.823873, 0.788634, 0.561447, 0.0368962, 0.164636, 0.45971, 0.243616, 0.552693, 0.91708, 0.407654, 0.684348, 0.788239, 0.739047, 0.641911, 0.293135, 0.614863, 0.454767, 0.555778, 0.294433, 0.29805, 0.788502, 0.0334243, 0.676237, 0.65361, 0.214046, 0.0177261, 0.189304, 0.498024, 0.118308, 0.963888, 0.407714, 0.629715, 0.396166, 0.385083, 0.681577, 0.950198, 0.644092, 0.728264, 0.376151, 0.982424, 0.917, 0.97048, 0.0279279, 0.144728, 0.372595, 0.796806, 0.345967, 0.513465, 0.617405, 0.637598, 0.399704, 0.708465, 0.681057, 0.753367, 0.882881, 0.0401207, 0.250032, 0.573113, 0.17008, 0.595031, 0.330492, 0.651312, 0.62463, 0.353036, 0.020142, 0.284584, 0.977296, 0.247234, 0.417433, 0.685337, 0.76027, 0.0594405, 0.160709, 0.559443, 0.00535314, 0.720624, 0.293242, 0.740578, 0.37522, 0.989147, 0.373445, 0.843482, 0.805096, 0.23085, 0.48666, 0.163527, 0.314849, 0.559939, 0.903911, 0.817242, 0.503036, 0.853727, 0.909022, 0.747273, 0.590485, 0.630844, 0.630728, 0.629548, 0.43543, 0.316281, 0.556645, 0.444374, 0.565821, 0.729194, 0.195997, 0.0265693, 0.986677, 0.203989, 0.176029, 0.0199535, 0.299316, 0.464491, 0.728061, 0.229464, 0.323904, 0.901236, 0.753814, 0.512545, 0.889499, 0.945957, 0.685098, 0.545315, 0.0216512, 0.540925, 0.774731, 0.883067, 0.629874, 0.0150073, 0.45581, 0.497568, 0.656032, 0.38256, 0.119854, 0.0818771, 0.0944882, 0.379706, 0.783539, 0.75417, 0.350797, 0.706184, 0.709958, 0.822739, 0.493493, 0.857186, 0.792274, 0.16993, 0.498329, 0.603569, 0.448396, 0.80538, 0.703538, 0.146, 0.491452, 0.983054, 0.010177, 0.471146, 0.929877, 0.705029, 0.140753, 0.420074, 0.502608, 0.278529, 0.0754144, 0.0367962, 0.640371, 0.951626, 0.76067, 0.091209, 0.850092, 0.792538, 0.621864, 0.158214, 0.675515, 0.3349, 0.142078, 0.358923, 0.880086, 0.795988, 0.250101, 0.156054, 0.307004, 0.455991, 0.682152, 0.665944, 0.239613, 0.246061, 0.606435, 0.691024, 0.825271, 0.854492, 0.00488661, 0.334328, 0.929062, 0.758144, 0.572786, 0.0638767, 0.185312, 0.561627, 0.3009, 0.312006, 0.291547, 0.111193, 0.81938, 0.789581, 0.608088, 0.344256, 0.39222, 0.305515, 0.344941, 0.925881, 0.900206, 0.403886, 0.039095, 0.950403, 0.866024, 0.252525, 0.661937, 0.706071, 0.205677, 0.688487, 0.229861, 0.621937, 0.864278, 0.965386, 0.0472875, 0.448469, 0.465542, 0.647983, 0.925366, 0.725641, 0.902711, 0.352396, 0.57343, 0.0987301, 0.988477, 0.403776, 0.655851, 0.564298, 0.191757, 0.909853, 0.817385, 0.944658, 0.429676, 0.617083, 0.386296, 0.789026, 0.503205, 0.696269, 0.430404, 0.591985, 0.743304, 0.324891, 0.990424, 0.921132, 0.425184, 0.995688, 0.839809, 0.644021, 0.0190031, 0.285654, 0.558158, 0.689764, 0.435316, 0.0569915, 0.340858, 0.394885, 0.365642, 0.631062, 0.530601, 0.712735, 0.990042, 0.281404, 0.668662, 0.679428, 0.471992, 0.596845, 0.982266, 0.257362, 0.50196, 0.130304, 0.254164, 0.112169, 0.838253, 0.570304, 0.436114, 0.11975, 0.870855, 0.558439, 0.368332, 0.858584, 0.414183, 0.954945, 0.802257, 0.818688, 0.857604, 0.901512, 0.619855, 0.698505, 0.00155338, 0.229836, 0.821184, 0.73179, 0.40613, 0.0514568, 0.795304, 0.474855, 0.586639, 0.285376, 0.937922, 0.796976, 0.644185, 0.250051, 0.262254, 0.224909, 0.322996, 0.312023, 0.657926, 0.773502, 0.0526391, 0.194886, 0.355555, 0.965753, 0.555937, 0.369659, 0.763032, 0.729601, 0.234942, 0.13263, 0.208587, 0.790927, 0.39756, 0.0963787, 0.35287, 0.90399, 0.714379, 0.332029, 0.110528, 0.192698, 0.11752, 0.356852, 0.340058, 0.45982, 0.258951, 0.37678, 0.641335, 0.806043, 0.132535, 0.734649, 0.554962, 0.286301, 0.634713, 0.58303, 0.564306, 0.142513, 0.293555, 0.340322, 0.805673, 0.422831, 0.851423, 0.977257, 0.740316, 0.0864667, 0.474178, 0.428437, 0.948543, 0.45573, 0.886006, 0.943641, 0.483835, 0.69031, 0.53496, 0.719321, 0.584282, 0.359293, 0.0660638, 0.802731, 0.593861, 0.0970506, 0.594245, 0.483069, 0.593339, 0.996173, 0.141448, 0.92018, 0.600887, 0.260395, 0.684758, 0.512614, 0.666603, 0.57535, 0.913307, 0.993897, 0.382855, 0.0805282, 0.806475, 0.130722, 0.259415, 0.724846, 0.22201, 0.588911, 0.96823, 0.477599, 0.612768, 0.389618, 0.931194, 0.914288, 0.216016, 0.26814, 0.686002, 0.360242, 0.706958, 0.469066, 0.717673, 0.76456, 0.929895, 0.318081, 0.495209, 0.47529, 0.533926, 0.122629, 0.266277, 0.998293, 0.301682, 0.268766, 0.521445, 0.897113, 0.140565, 0.82481, 0.54482, 0.0411331, 0.138178, 0.16028, 0.339749, 0.279758, 0.0690523, 0.229626, 0.773689, 0.985868, 0.50159, 0.802227, 0.863187, 0.895413, 0.496282, 0.565149, 0.990162, 0.679116, 0.0414271, 0.229311, 0.969948, 0.782062, 0.831456, 0.218894, 0.26371, 0.657502, 0.806402, 0.648849, 0.258534, 0.976998, 0.490664, 0.304466, 0.844254, 0.97523, 0.786658, 0.687765, 0.81279, 0.73874, 0.602575, 0.128759, 0.375526, 0.323907, 0.495158, 0.817866, 0.786536, 0.413495, 0.442395, 0.308477, 0.652321, 0.694181, 0.644425, 0.428442, 0.998757, 0.218575, 0.750969, 0.5844, 0.479108, 0.859069, 0.682045, 0.731818, 0.769043, 0.194212, 0.462301, 0.272821, 0.651029, 0.900891, 0.429824, 0.423801, 0.735368, 0.453367, 0.514288, 0.20012, 0.610554, 0.628963, 0.0701268, 0.839386, 0.454427, 0.892609, 0.966491, 0.524895, 0.52262, 0.997758, 0.181262, 0.546566, 0.901785, 0.546405, 0.803243, 0.529801, 0.385335, 0.578729, 0.326991, 0.976556, 0.248603, 0.301094, 0.0429519, 0.0164084, 0.450241, 0.654039, 0.00272868, 0.712623, 0.0130062, 0.00747252, 0.533141, 0.946142, 0.692075, 0.992352, 0.362955, 0.182655, 0.39437, 0.999539, 0.835556, 0.935062, 0.129013, 0.191655, 0.534981, 0.709061, 0.789354, 0.284771, 0.431047, 0.285171, 0.271077, 0.366255, 0.468024, 0.810362, 0.805887, 0.351508, 0.219024, 0.558153, 0.463561, 0.746664, 0.194695, 0.695571, 0.534415, 0.656065, 0.79305, 0.942087, 0.418493, 0.00941055, 0.163401, 0.103549, 0.733254, 0.858649, 0.329274, 0.530856, 0.213811, 0.315749, 0.364533, 0.580353, 0.840329, 0.857601, 0.958896, 0.28791, 0.796802, 0.330086, 0.795437, 0.218877, 0.350685, 0.361633, 0.265448, 0.805827, 0.668202, 0.460781, 0.946176, 0.618074, 0.558126, 0.723665, 0.899372, 0.203582, 0.598526, 0.272009, 0.150661, 0.508734, 0.457369, 0.0249552, 0.328787, 0.434246, 0.0548489, 0.516235, 0.531443, 0.794063, 0.204529, 0.519404, 0.153389, 0.552393, 0.830848, 0.750029, 0.437622, 0.255244, 0.713685, 0.636782, 0.965375, 0.840532, 0.422907, 0.63536, 0.647608, 0.518581, 0.480577, 0.918447, 0.555567, 0.650525, 0.765643, 0.987827, 0.987532, 0.653915, 0.88091, 0.654662, 0.413945, 0.972806, 0.178423, 0.777592, 0.490022, 0.789507, 0.96624, 0.253769, 0.433083, 0.545944, 0.241708, 0.141122, 0.713111, 0.249066, 0.392112, 0.884242, 0.0381838, 0.396806, 0.653674, 0.490497, 0.565764, 0.260669, 0.0874451, 0.585119, 0.165015, 0.956667, 0.525893, 0.523706, 0.00542197, 0.158665, 0.914547, 0.892813, 0.662642, 0.185382, 0.64453, 0.360967, 0.12203, 0.135463, 0.892902, 0.792004, 0.0943852, 0.590347, 0.553052, 0.901379, 0.380196, 0.39361, 0.691038, 0.0602027, 0.261203, 0.317452, 0.741232, 0.690551, 0.38444, 0.941376, 0.719326, 0.265698, 0.0284344, 0.663863, 0.434642, 0.175415, 0.155145, 0.795022, 0.802594, 0.53775, 0.383979, 0.627522, 0.960725, 0.582843, 0.103717, 0.829046, 0.191652, 0.746783, 0.32102, 0.443852, 0.643639, 0.912117, 0.882651, 0.719068, 0.171581, 0.0919251, 0.35425, 0.584854, 0.226458, 0.732623, 0.521055, 0.461375, 0.95405, 0.93298, 0.0117622, 0.00387392, 0.493186, 0.290839, 0.385281, 0.989674, 0.231794, 0.602307, 0.72179, 0.338036, 0.935942, 0.897179, 0.183423, 0.678317, 0.711053, 0.0193125, 0.410937, 0.507207, 0.173439, 0.39218, 0.64884, 0.679692, 0.344518, 0.956455, 0.912477, 0.895762, 0.245218, 0.909765, 0.886632, 0.162184, 0.842051, 0.587987, 0.0911946, 0.0616147, 0.991976, 0.754416, 0.925751, 0.104254, 0.427802, 0.67159, 0.0994975, 0.771514, 0.446055, 0.135193, 0.135505, 0.568103, 0.248988, 0.175478, 0.0694318, 0.777683, 0.403786, 0.713657, 0.129955, 0.944242, 0.947967, 0.898715, 0.104035, 0.10935, 0.447119, 0.239982, 0.0664569, 0.80167, 0.00730447, 0.808298, 0.899291, 0.912966, 0.394694, 0.719938, 0.869431, 0.349673, 0.92118, 0.392323, 0.306616, 0.623095, 0.801703, 0.902663, 0.243608, 0.795898, 0.913631, 0.222991, 0.813668, 0.470407, 0.644124, 0.82575, 0.714662, 0.0314611, 0.97902, 0.65823, 0.445888, 0.486358, 0.164978, 0.0376539, 0.250487, 0.726926, 0.137483, 0.165057, 0.486292, 0.0713929, 0.39403, 0.742192, 0.306382, 0.477877, 0.466838, 0.583454, 0.92042, 0.290258, 0.301665, 0.878339, 0.046988, 0.974246, 0.651198, 0.710518, 0.860777, 0.163627, 0.185359, 0.686777, 0.881123, 0.0163253, 0.425492, 0.689934, 0.62808, 0.73478, 0.270705, 0.642689, 0.557245, 0.883161, 0.430399, 0.81064, 0.538683, 0.415912, 0.199571, 0.999213, 0.103815, 0.195871, 0.373067, 0.589498, 0.282816, 0.171064, 0.368255, 0.34819, 0.204787, 0.78065, 0.954606, 0.285295, 0.67862, 0.313054, 0.158713, 0.701675, 0.898511, 0.385409, 0.769553, 0.552321, 0.277662, 0.577892, 0.825347, 0.865165, 0.720462, 0.451984, 0.326939, 0.342138, 0.551345, 0.419862, 0.746212, 0.910316, 0.742052, 0.757438, 0.405861, 0.503971, 0.944662, 0.360125, 0.00822654, 0.564533, 0.299163, 0.816413, 0.537946, 0.517861, 0.849585, 0.188971, 0.0634645, 0.0673212, 0.146342, 0.175278, 0.795673, 0.373777, 0.546286, 0.773253, 0.0912896, 0.506337, 0.933412, 0.320582, 0.3445, 0.872153, 0.175124, 0.405562, 0.886953, 0.9944, 0.811637, 0.692684, 0.793248, 0.307852, 0.116635, 0.74099, 0.0661277, 0.566807, 0.432869, 0.337176, 0.821848, 0.734541, 0.963709, 0.711432, 0.658906, 0.0891826, 0.377552, 0.8065, 0.0649709, 0.758778, 0.783087, 0.31149, 0.945898, 0.0985, 0.388848, 0.465363, 0.84646, 0.163045, 0.909335, 0.407798, 0.343222, 0.388481, 0.775582, 0.521063, 0.762849, 0.958468, 0.204207, 0.493618, 0.800463, 0.00552669, 0.0787602, 0.608685, 0.209282, 0.354252, 0.77908, 0.248923, 0.258977, 0.710994, 0.940551, 0.590085, 0.0249065, 0.560352, 0.39465, 0.0679778, 0.530731, 0.382096, 0.744742, 0.956843, 0.228906, 0.650365, 0.600112, 0.619173, 0.342611, 0.0347588, 0.892132, 0.083481, 0.232227, 0.120129, 0.820559, 0.829139, 0.101528, 0.859875, 0.317799, 0.449449, 0.41438, 0.873112, 0.992732, 0.660953, 0.985393, 0.177618, 0.583418, 0.478677, 0.880801, 0.465477, 0.114581, 0.185467, 0.946421, 0.510866, 0.463243, 0.467624, 0.352251, 0.593756, 0.0279382, 0.800112, 0.142995, 0.514325, 0.469238, 0.28473, 0.223652, 0.715997, 0.488095, 0.801699, 0.631788, 0.956418, 0.976292, 0.0937843, 0.133368, 0.37809, 0.587238, 0.545838, 0.0967338, 0.919094, 0.282771, 0.992767, 0.0407912, 0.841486, 0.0646004, 0.542386, 0.072265, 0.311254, 0.217099, 0.201156, 0.719131, 0.415653, 0.525554, 0.217228, 0.20288, 0.275595, 0.555147, 0.0505124, 0.173597, 0.127641, 0.629114, 0.794809, 0.060557, 0.927047, 0.197431, 0.0738419, 0.00370722, 0.545598, 0.933096, 0.511001, 0.530726, 0.97507, 0.532409, 0.13159, 0.320286, 0.326397, 0.00880228, 0.995005, 0.617147, 0.845487, 0.415267, 0.0459226, 0.454742, 0.645369, 0.423785, 0.843181, 0.196786, 0.494675, 0.510028, 0.562132, 0.123534, 0.138025, 0.11027, 0.284769, 0.195783, 0.999027, 0.351437, 0.993307, 0.288432, 0.0379774, 0.114064, 0.91708, 0.716298, 0.115793, 0.0531683, 0.686147, 0.077986, 0.0188457, 0.637753, 0.903638, 0.766706, 0.392183, 0.721581, 0.256664, 0.175902, 0.231257, 0.393836, 0.00843427, 0.334791, 0.618091, 0.510078, 0.299471, 0.334034, 0.28075, 0.0550865, 0.745136, 0.536174, 0.870911, 0.203483, 0.0980402, 0.349197, 0.412849, 0.935278, 0.0153529, 0.0651288, 0.809422, 0.564107, 0.803961, 0.0177405, 0.613071, 0.800291, 0.258902, 0.918228, 0.156976, 0.178348, 0.0908561, 0.89576, 0.992044, 0.726232, 0.780642, 0.937897, 0.62199, 0.160523, 0.739961, 0.94062, 0.469771, 0.70994, 0.00281299, 0.0805916, 0.402899, 0.984933, 0.655274, 0.886486, 0.612083, 0.661397, 0.881197, 0.937729, 0.650793, 0.539215, 0.964424, 0.989627, 0.338105, 0.622144, 0.118824, 0.0989292, 0.322776, 0.445588, 0.911383, 0.390822, 0.0203864, 0.232822, 0.461619, 0.663026, 0.32982, 0.756794, 0.610385, 0.0242097, 0.994838, 0.477536, 0.433771, 0.671218, 0.382746, 0.201504, 0.584247, 0.891399, 0.547265, 0.919655, 0.516605, 0.859649, 0.50646, 0.55505, 0.826467, 0.220646, 0.97932, 0.0688576, 0.58229, 0.548189, 0.0749138, 0.478794, 0.823197, 0.225188, 0.0161685, 0.71767, 0.676555, 0.54906, 0.851259, 0.110762, 0.0208955, 0.762172, 0.812562, 0.251429, 0.775334, 0.66187, 0.456114, 0.540954, 0.359825, 0.0929966, 0.613078, 0.69473, 0.350031, 0.230682, 0.073909, 0.364522, 0.568473, 0.501085, 0.424935, 0.818985, 0.857166, 0.916295, 0.701885, 0.0609002, 0.296466, 0.184376, 0.762703, 0.0838022, 0.759049, 0.853185, 0.809232, 0.802498, 0.716835, 0.311225, 0.867661, 0.293557, 0.213948, 0.545535, 0.960924, 0.255488, 0.195426, 0.515302, 0.971729, 0.944004, 0.854764, 0.64686, 0.807915, 0.755688, 0.971568, 0.224035, 0.101062, 0.205162, 0.842769, 0.964089, 0.961896, 0.531705, 0.857807, 0.0429816, 0.77319, 0.712875, 0.986951, 0.275632, 0.811121, 0.509338, 0.342154, 0.896854, 0.169781, 0.31657, 0.19919, 0.680331, 0.643143, 0.244623, 0.890083, 0.812999, 0.667182, 0.714316, 0.503336, 0.544116, 0.871858, 0.6963, 0.800237, 0.989315, 0.187384, 0.392903, 0.716434, 0.701279, 0.0526855, 0.866285, 0.367396, 0.66601, 0.338738, 0.0953665, 0.306527, 0.117144, 0.321205, 0.905984, 0.253406, 0.212369, 0.981002, 0.559785, 0.210445, 0.721006, 0.445134, 0.579764, 0.680618, 0.742718, 0.727408, 0.111223, 0.842617, 0.698718, 0.625936, 0.98341, 0.144354, 0.823984, 0.952947, 0.127075, 0.827679, 0.0251896, 0.251956, 0.655068, 0.753275, 0.360489, 0.988721, 0.511388, 0.272199, 0.699127, 0.690071, 0.0140954, 0.858805, 0.216496, 0.618667, 0.591202, 0.250346, 0.691095, 0.47548, 0.616506, 0.654329, 0.99823, 0.324044, 0.937373, 0.0172027, 0.821493, 0.672795, 0.664694, 0.616335, 0.497045, 0.419486, 0.117143, 0.296083, 0.517373, 0.604572, 0.738358, 0.312212, 0.203983, 0.0989904, 0.408029, 0.202268, 0.961834, 0.21801, 0.701251, 0.0124776, 0.739799, 0.330518, 0.544307, 0.721845, 0.0924289, 0.619021, 0.00489626, 0.357682, 0.0167915, 0.0139818, 0.230874, 0.219651, 0.108326, 0.305198, 0.878217, 0.0735699, 0.595523, 0.96629, 0.928103, 0.462759, 0.629168, 0.578629, 0.116303, 0.745272, 0.212191, 0.862487, 0.0226316, 0.643428, 0.712581, 0.399366, 0.362851, 0.637957, 0.170903, 0.334893, 0.767069, 0.926067, 0.19111, 0.829639, 0.0489744, 0.0718803, 0.772276, 0.0671379, 0.219526, 0.666425, 0.324516, 0.849223, 0.0444149, 0.0508325, 0.994352, 0.992029, 0.837827, 0.729395, 0.772436, 0.929232, 0.63574, 0.489766, 0.449699, 0.533129, 0.811033, 0.389346, 0.559836, 0.393563, 0.692987, 0.900019, 0.183969, 0.378872, 0.749458, 0.187582, 0.0738074, 0.715319, 0.183984, 0.739933, 0.931859, 0.123202, 0.80449, 0.943757, 0.363412, 0.844419, 0.987341, 0.445451, 0.184425, 0.885213, 0.12904, 0.227998, 0.857338, 0.619134, 0.0796953, 0.603121, 0.94235, 0.157178, 0.221489, 0.358134, 0.663804, 0.108247, 0.807823, 0.975851, 0.172652, 0.688951, 0.619695, 0.152105, 0.925419, 0.426855, 0.98555, 0.999436, 0.018817, 0.789437, 0.478625, 0.750359, 0.249799, 0.00447455, 0.220368, 0.602763, 0.0840897, 0.38022, 0.173056, 0.3414, 0.496965, 0.317232, 0.826596, 0.171061, 0.224177, 0.93226, 0.381889, 0.37478, 0.381204, 0.925264, 0.473476, 0.590457, 0.250811, 0.531595, 0.558642, 0.023216, 0.302092, 0.726639, 0.212013, 0.979149, 0.361128, 0.33014, 0.284158, 0.271395, 0.751215, 0.51771, 0.241838, 0.906616, 0.852835, 0.690813, 0.923208, 0.533442, 0.637914, 0.672593, 0.277868, 0.0871473, 0.443862, 0.327002, 0.653336, 0.363183, 0.638585, 0.307927, 0.875472, 0.924286, 0.440336, 0.166823, 0.74671, 0.484405, 0.473298, 0.782216, 0.18792, 0.154303, 0.658264, 0.375911, 0.258576, 0.65102, 0.352268, 0.105328, 0.689562, 0.647228, 0.958253, 0.921509, 0.320463, 0.75973, 0.516058, 0.326907, 0.398956, 0.696665, 0.386193, 0.0330367, 0.805989, 0.461668, 0.832142, 0.482986, 0.903818, 0.758131, 0.18437, 0.85459, 0.411093, 0.197721, 0.137124, 0.818522, 0.439978, 0.918848, 0.459881, 0.905069, 0.224842, 0.172502, 0.51133, 0.45367, 0.289531, 0.922129, 0.283209, 0.859298, 0.541385, 0.186997, 0.0963847, 0.242027, 0.317196, 0.249943, 0.956512, 0.795562, 0.447017, 0.758737, 0.597899, 0.0678205, 0.714589, 0.450639, 0.289431, 0.121322, 0.534766, 0.0952377, 0.612136, 0.572657, 0.260953, 0.967063, 0.837276, 0.326271, 0.22595, 0.548158, 0.948147, 0.458449, 0.164319, 0.337362, 0.640601, 0.234542, 0.6541, 0.553137, 0.437913, 0.675261, 0.826249, 0.378051, 0.299304, 0.198478, 0.979453, 0.947471, 0.213475, 0.887924, 0.323763, 0.387056, 0.59038, 0.0878572, 0.611317, 0.939023, 0.464749, 0.884521, 0.136075, 0.308545, 0.0501225, 0.46804, 0.656506, 0.53156, 0.185471, 0.588786, 0.866148, 0.43435, 0.25395, 0.560148, 0.365888, 0.697416, 0.684085, 0.236197, 0.858656, 0.857796, 0.772509, 0.424949, 0.980972, 0.27236, 0.31821, 0.378948, 0.302754, 0.624945, 0.375999, 0.326303, 0.904928, 0.632524, 0.0823725, 0.967724, 0.791641, 0.774254, 0.625037, 0.213603, 0.353812, 0.0860418, 0.751271, 0.773454, 0.00618982, 0.67233, 0.597448, 0.393159, 0.0497163, 0.475396, 0.566993, 0.980683, 0.288339, 0.96918, 0.884001, 0.832028, 0.144988, 0.294111, 0.49664, 0.117055, 0.259346, 0.821602, 0.061518, 0.736345, 0.425512, 0.369898, 0.211762, 0.693884, 0.676335, 0.0152524, 0.97211, 0.0433982, 0.366512, 0.888694, 0.955822, 0.889153, 0.0372125, 0.21345, 0.883083, 0.242744, 0.768975, 0.853045, 0.0945063, 0.858629, 0.799843, 0.450901, 0.446201, 0.692674, 0.115186, 0.897038, 0.850661, 0.208599, 0.157899, 0.355117, 0.555143, 0.667026, 0.00534716, 0.358461, 0.763451, 0.669782, 0.330498, 0.189446, 0.98289, 0.450107, 0.605622, 0.161092, 0.604846, 0.337464, 0.646855, 0.910536, 0.0654921, 0.106992, 0.40464, 0.391736, 0.851763, 0.491897, 0.402315, 0.334675, 0.084682, 0.248078, 0.961128, 0.731344, 0.565869, 0.894578, 0.519648, 0.751937, 0.44442, 0.614476, 0.407141, 0.807049, 0.620295, 0.47751, 0.0896978, 0.0534998, 0.522895, 0.534609, 0.395034, 0.845148, 0.254379, 0.41921, 0.656933, 0.878164, 0.578541, 0.745261, 0.57209, 0.144998, 0.417937, 0.705993, 0.653521, 0.179933, 0.267852, 0.888023, 0.202804, 0.881854, 0.733401, 0.569979, 0.965619, 0.116868, 0.765611, 0.0117354, 0.66067, 0.184871, 0.996124, 0.22134, 0.253103, 0.614095, 0.924677, 0.479717, 0.397747, 0.460996, 0.597254, 0.982537, 0.230638, 0.802576, 0.161248, 0.967605, 0.968063, 0.90488, 0.877759, 0.581426, 0.323351, 0.891146, 0.565997, 0.42863, 0.0265817, 0.185198, 0.433823, 0.923209, 0.616753, 0.082132, 0.797626, 0.471495, 0.601742, 0.803097, 0.935267, 0.580271, 0.85766, 0.913317, 0.452457, 0.988367, 0.784135, 0.906452, 0.085474, 0.578816, 0.305636, 0.650333, 0.879154, 0.678051, 0.00822624, 0.102976, 0.338909, 0.446611, 0.434003, 0.570668, 0.300273, 0.483605, 0.816909, 0.758663, 0.48808, 0.55355, 0.510856, 0.984661, 0.878827, 0.605359, 0.95541, 0.531169, 0.747942, 0.963056, 0.158588, 0.446416, 0.203324, 0.733546, 0.20617, 0.693553, 0.128812, 0.250175, 0.986887, 0.869491, 0.267422, 0.223943, 0.975318, 0.415954, 0.335685, 0.834873, 0.226776, 0.721708, 0.175894, 0.434294, 0.954723, 0.373049, 0.49672, 0.236456, 0.744485, 0.495852, 0.125034, 0.834052, 0.301719, 0.446698, 0.315242, 0.888832, 0.160571, 0.345245, 0.0832564, 0.0767585, 0.689498, 0.957219, 0.26761, 0.968276, 0.467476, 0.920767, 0.652193, 0.845709, 0.179733, 0.387691, 0.283818, 0.672523, 0.265497, 0.907044, 0.33555, 0.89252, 0.992113, 0.640206, 0.0964299, 0.170263, 0.723773, 0.879968, 0.554244, 0.485016, 0.840035, 0.683328, 0.660686, 0.878358, 0.897059, 0.414904, 0.756054, 0.273302, 0.822468, 0.852035, 0.651968, 0.602582, 0.946796, 0.555171, 0.669272, 0.938815, 0.197596, 0.523584, 0.573327, 0.796545, 0.937456, 0.048061, 0.544333, 0.467184, 0.430173, 0.775402, 0.952241, 0.142089, 0.629312, 0.666139, 0.0154637, 0.759027, 0.437917, 0.58459, 0.815763, 0.966364, 0.293571, 0.872789, 0.39528, 0.744223, 0.391567, 0.839123, 0.191041, 0.542033, 0.187362, 0.80233, 0.0946452, 0.423851, 0.972116, 0.937188, 0.499391, 0.242787, 0.205823, 0.280878, 0.131757, 0.924721, 0.688502, 0.26433, 0.558425, 0.0342803, 0.291342, 0.493633, 0.888612, 0.46967, 0.860545, 0.154498, 0.925012, 0.1687, 0.524462, 0.998458, 0.376503, 0.440609, 0.13213, 0.186003, 0.625476, 0.854618, 0.417362, 0.954802, 0.672711, 0.230013, 0.840147, 0.198032, 0.159848, 0.799946, 0.326989, 0.822754, 0.607978, 0.33105, 0.317631, 0.0692817, 0.597222, 0.710338, 0.656665, 0.690403, 0.691704, 0.233809, 0.680931, 0.628137, 0.299253, 0.690258, 0.74661, 0.896513, 0.939887, 0.726954, 0.7281, 0.423418, 0.350914, 0.388573, 0.0262691, 0.111094, 0.0155945, 0.911295, 0.253345, 0.832734, 0.329541, 0.215786, 0.794686, 0.590766, 0.103029, 0.0678239, 0.306235, 0.214924, 0.966952, 0.942075, 0.609991, 0.902089, 0.0930404, 0.241278, 0.979989, 0.99641, 0.835776, 0.333605, 0.239885, 0.773994, 0.905422, 0.622631, 0.132915, 0.875424, 0.997041, 0.510196, 0.0594318, 0.566395, 0.180253, 0.168608, 0.845103, 0.278105, 0.215273, 0.54686, 0.673526, 0.216569, 0.0567452, 0.457604, 0.52021, 0.513327, 0.898756, 0.709734, 0.552623, 0.0439586, 0.840305, 0.792597, 0.864871, 0.835436, 0.677993, 0.658806, 0.425473, 0.19661, 0.3182, 0.00621336, 0.661405, 0.795435, 0.930732, 0.747833, 0.183835, 0.0501365, 0.798479, 0.620259, 0.268471, 0.797801, 0.781326, 0.324357, 0.939805, 0.0559456, 0.767421, 0.755513, 0.0519764, 0.132493, 0.0248076, 0.304857, 0.841179, 0.291624, 0.368053, 0.863488, 0.92917, 0.526666, 0.373791, 0.0771637, 0.936361, 0.0211384, 0.0483962, 0.247142, 0.393017, 0.322973, 0.304369, 0.351668, 0.428691, 0.831733, 0.796247, 0.252503, 0.979405, 0.53587, 0.298145, 0.766767, 0.448849, 0.586296, 0.48682, 0.0229408, 0.971635, 0.0476733, 0.418205, 0.736875, 0.927284, 0.334773, 0.136525, 0.249275, 0.813632, 0.583152, 0.526065, 0.358386, 0.456534, 0.542725, 0.99574, 0.188249, 0.206639, 0.816704, 0.682333, 0.344471, 0.206846, 0.233427, 0.622954, 0.0839815, 0.0590858, 0.163643, 0.938242, 0.925145, 0.824585, 0.850146, 0.161545, 0.650623, 0.36668, 0.726433, 0.573498, 0.118839, 0.309652, 0.875825, 0.708539, 0.3994, 0.378292, 0.346823, 0.966397, 0.0341669, 0.748059, 0.550583, 0.885271, 0.840168, 0.712603, 0.763671, 0.118454, 0.106439, 0.663247, 0.877381, 0.710707, 0.247122, 0.438658, 0.770922, 0.474123, 0.500763, 0.315488, 0.119231, 0.652578, 0.520729, 0.468573, 0.273246, 0.992347, 0.475993, 0.239294, 0.40192, 0.815229, 0.0236239, 0.85648, 0.697711, 0.599634, 0.775674, 0.666996, 0.944384, 0.61399, 0.168068, 0.046793, 0.651445, 0.152846, 0.191041, 0.617886, 0.145447, 0.182264, 0.821485, 0.167448, 0.563202, 0.298918, 0.63115, 0.589014, 0.635087, 0.350239, 0.0969615, 0.664998, 0.721069, 0.55308, 0.627465, 0.858022, 0.195475, 0.233206, 0.842556, 0.844793, 0.860757, 0.460435, 0.361283, 0.428379, 0.429536, 0.982359, 0.197429, 0.520442, 0.388261, 0.544675, 0.567665, 0.417953, 0.400498, 0.987421, 0.426966, 0.574687, 0.0727614, 0.588935, 0.40437, 0.567111, 0.764138, 0.919409, 0.49954, 0.210881, 0.0138897, 0.807207, 0.40468, 0.926323, 0.356926, 0.686285, 0.781897, 0.0382501, 0.870871, 0.989177, 0.022897, 0.868981, 0.72859, 0.0963152, 0.36607, 0.158067, 0.982238, 0.921682, 0.947472, 0.746331, 0.824697, 0.834751, 0.0741107, 0.852303, 0.0662244, 0.0778359, 0.859693, 0.301823, 0.40863, 0.478555, 0.261194, 0.628854, 0.563349, 0.742922, 0.631935, 0.629782, 0.481529, 0.727923, 0.558879, 0.0496743, 0.771751, 0.635876, 0.594351, 0.496492, 0.305198, 0.386633, 0.0511966, 0.888565, 0.0525001, 0.0145788, 0.969231, 0.115474, 0.858109, 0.654224, 0.409081, 0.902949, 0.0205533, 0.373431, 0.271492, 0.783677, 0.809073, 0.176137, 0.0683039, 0.493189, 0.487658, 0.122231, 0.183799, 0.783245, 0.317946, 0.0118384, 0.490967, 0.177391, 0.511249, 0.438773, 0.405479, 0.456114, 0.237934, 0.424058, 0.448476, 0.992217, 0.0793886, 0.220207, 0.890814, 0.608875, 0.384056, 0.322394, 0.114703, 0.630404, 0.500499, 0.217064, 0.011879, 0.293389, 0.0328165, 0.0455728, 0.163443, 0.955574, 0.647403, 0.27716, 0.615788, 0.782945, 0.7538, 0.638152, 0.271617, 0.255083, 0.706809, 0.571695, 0.771807, 0.836542, 0.000567578, 0.264149, 0.510301, 0.103659, 0.277211, 0.810934, 0.713591, 0.847755, 0.129939, 0.191732, 0.575733, 0.427882, 0.306037, 0.798808, 0.43265, 0.570117, 0.610907, 0.371059, 0.194394, 0.869866, 0.0980053, 0.328966, 0.583839, 0.686702, 0.450148, 0.0825889, 0.639514, 0.934002, 0.176331, 0.870912, 0.884921, 0.892602, 0.83422, 0.332842, 0.655451, 0.995743, 0.123723, 0.0838874, 0.0735535, 0.328897, 0.776021, 0.663424, 0.842025, 0.165418, 0.796723, 0.994479, 0.777982, 0.988285, 0.81751, 0.232685, 0.796556, 0.134869, 0.937736, 0.730038, 0.102924, 0.797538, 0.398124, 0.2193, 0.941041, 0.178793, 0.370095, 0.456298, 0.10271, 0.88975, 0.554706, 0.938587, 0.00952443, 0.832839, 0.614512, 0.389695, 0.338475, 0.105779, 0.030013, 0.145834, 0.979163, 0.283427, 0.982259, 0.638209, 0.502113, 0.975792, 0.258517, 0.811306, 0.888677, 0.0226233, 0.686223, 0.281673, 0.075892, 0.380821, 0.354112, 0.739364, 0.00326506, 0.408601, 0.838882, 0.148927, 0.588438, 0.803971, 0.861352, 0.0281249, 0.142082, 0.980767, 0.4727, 0.318568, 0.732069, 0.767087, 0.408185, 0.275385, 0.746653, 0.302207, 0.328272, 0.980878, 0.237112, 0.279175, 0.05732, 0.949098, 0.95211, 0.41683, 0.913593, 0.585272, 0.731422, 0.831071, 0.523386, 0.2435, 0.402443, 0.96218, 0.330094, 0.506793, 0.667463, 0.0532463, 0.384656, 0.480892, 0.907828, 0.242703, 0.62508, 0.0628068, 0.412704, 0.859126, 0.46665, 0.839594, 0.196547, 0.885917, 0.52919, 0.845043, 0.997674, 0.742276, 0.150465, 0.312334, 0.371924, 0.823548, 0.776735, 0.933922, 0.822807, 0.488695, 0.694652, 0.841843, 0.29865, 0.216355, 0.606901, 0.811569, 0.402505, 0.469624, 0.871989, 0.410021, 0.540256, 0.459707, 0.929718, 0.490855, 0.206233, 0.100186, 0.851738, 0.35273, 0.129216, 0.0723206, 0.800592, 0.220906, 0.71523, 0.977059, 0.957233, 0.311989, 0.477305, 0.983293, 0.403222, 0.79318, 0.680981, 0.960544, 0.346573, 0.206157, 0.724948, 0.778475, 0.485141, 0.990731, 0.86454, 0.153972, 0.274695, 0.430869, 0.0343598, 0.396289, 0.360719, 0.935147, 0.398451, 0.552993, 0.791499, 0.726453, 0.216642, 0.33641, 0.911936, 0.98014, 0.0752805, 0.617927, 0.641831, 0.694679, 0.528714, 0.863448, 0.943898, 0.842945, 0.457326, 0.655613, 0.617851, 0.0624182, 0.917751, 0.153406, 0.00703155, 0.161815, 0.161192, 0.794526, 0.611304, 0.388697, 0.375908, 0.321482, 0.46238, 0.890841, 0.726304, 0.119445, 0.45945, 0.642372, 0.27819, 0.991807, 0.903393, 0.32075, 0.256834, 0.434594, 0.795423, 0.0743181, 0.0985843, 0.00851327, 0.742762, 0.580673, 0.495459, 0.752728, 0.533136, 0.0654414, 0.876363, 0.0468122, 0.196566, 0.61732, 0.373321, 0.188806, 0.427122, 0.488199, 0.594674, 0.560862, 0.0124738, 0.614262, 0.487562, 0.994964, 0.54029, 0.236196, 0.936407, 0.767081, 0.802202, 0.481116, 0.371681, 0.0662999, 0.881687, 0.0460712, 0.638318, 0.457029, 0.957172, 0.316483, 0.620507, 0.477535, 0.196048, 0.232071, 0.0761495, 0.484229, 0.281814, 0.918541, 0.842766, 0.645656, 0.977852, 0.253823, 0.0163168, 0.488694, 0.719363, 0.149927, 0.907923, 0.385506, 0.447903, 0.577078, 0.865281, 0.748615, 0.825646, 0.938315, 0.583101, 0.79978, 0.786278, 0.889857, 0.616162, 0.973753, 0.123688, 0.997866, 0.197926, 0.454422, 0.134615, 0.995783, 0.0901228, 0.372619, 0.633932, 0.573633, 0.29503, 0.40696, 0.692533, 0.418123, 0.08999, 0.621666, 0.610839, 0.319541, 0.130944, 0.863841, 0.79175, 0.387958, 0.311667, 0.314145, 0.880035, 0.195331, 0.102769, 0.077521, 0.348547, 0.778556, 0.0351196, 0.313872, 0.619104, 0.380903, 0.166721, 0.084808, 0.238924, 0.0855165, 0.590484, 0.303223, 0.439155, 0.388211, 0.748415, 0.599395, 0.871233, 0.551148, 0.168111, 0.341538, 0.216981, 0.589836, 0.0111858, 0.676537, 0.880288, 0.749203, 0.718391, 0.660749, 0.475023, 0.81667, 0.41358, 0.0460364, 0.458593, 0.636802, 0.474983, 0.435445, 0.913075, 0.755092, 0.548132, 0.761527, 0.0540475, 0.0404141, 0.384099, 0.962312, 0.0894471, 0.880373, 0.907926, 0.512747, 0.892662, 0.145532, 0.211194, 0.852958, 0.277697, 0.711003, 0.40561, 0.487462, 0.93438, 0.988322, 0.657402, 0.561201, 0.700019, 0.549434, 0.622157, 0.403026, 0.694054, 0.624761, 0.0266026, 0.661599, 0.152754, 0.548711, 0.620303, 0.693589, 0.0947435, 0.73273, 0.272588, 0.504322, 0.756621, 0.0512852, 0.296373, 0.858782, 0.0012049, 0.136039, 0.726055, 0.499283, 0.609583, 0.060158, 0.516988, 0.591727, 0.127641, 0.33049, 0.107105, 0.073845, 0.147858, 0.132661, 0.879463, 0.985763, 0.164884, 0.325925, 0.183114, 0.5153, 0.275172, 0.206799, 0.953775, 0.265559, 0.667223, 0.45182, 0.630925, 0.404591, 0.30674, 0.117535, 0.437484, 0.551694, 0.980916, 0.484019, 0.34339, 0.383797, 0.777714, 0.496557, 0.104148, 0.136362, 0.496879, 0.2592, 0.842034, 0.113755, 0.934215, 0.684111, 0.297187, 0.902223, 0.623226, 0.215905, 0.111469, 0.751581, 0.404772, 0.18181, 0.0458085, 0.9405, 0.207297, 0.936861, 0.916877, 0.511745, 0.588955, 0.081103, 0.414922, 0.827207, 0.304441, 0.367503, 0.883934, 0.941778, 0.170289, 0.781402, 0.736297, 0.755603, 0.256458, 0.140324, 0.931026, 0.226043, 0.488777, 0.532656, 0.886382, 0.0726266, 0.00147335, 0.967352, 0.9559, 0.0585412, 0.368839, 0.86124, 0.355364, 0.0318065, 0.672371, 0.34971, 0.416424, 0.804753, 0.770332, 0.422839, 0.932511, 0.78332, 0.140325, 0.520752, 0.687964, 0.590559, 0.27232, 0.564823, 0.470769, 0.791079, 0.538737, 0.0750123, 0.322227, 0.24511, 0.158453, 0.781853, 0.736341, 0.782021, 0.413774, 0.794788, 0.103546, 0.660628, 0.712196, 0.123067, 0.438179, 0.417233, 0.539498, 0.924306, 0.597289, 0.0455687, 0.995272, 0.33533, 0.576448, 0.553769, 0.404858, 0.148773, 0.985233, 0.0603152, 0.149942, 0.891444, 0.527585, 0.793543, 0.428866, 0.31304, 0.070286, 0.957161, 0.642738, 0.355503, 0.99813, 0.638407, 0.591191, 0.760062, 0.389171, 0.612085, 0.540977, 0.216573, 0.379186, 0.246497, 0.950577, 0.976035, 0.00247349, 0.416511, 0.705125, 0.659157, 0.202592, 0.94161, 0.536315, 0.375576, 0.575986, 0.474521, 0.220533, 0.81084, 0.70606, 0.378051, 0.644783, 0.509928, 0.189258, 0.903192, 0.926658, 0.229202, 0.530374, 0.755699, 0.935052, 0.75209, 0.42277, 0.307052, 0.744172, 0.414551, 0.601431, 0.203123, 0.970106, 0.447853, 0.774443, 0.550283, 0.531463, 0.454428, 0.624175, 0.80451, 0.0122733, 0.984326, 0.77515, 0.860882, 0.00545328, 0.644511, 0.513259, 0.256793, 0.424406, 0.480532, 0.55194, 0.858007, 0.867714, 0.618724, 0.0617085, 0.215776, 0.63076, 0.53226, 0.260894, 0.98903, 0.119926, 0.310574, 0.573305, 0.905882, 0.0615376, 0.220883, 0.192277, 0.461793, 0.39844, 0.4698, 0.68659, 0.164087, 0.559168, 0.188446, 0.0603457, 0.0778744, 0.0692135, 0.306004, 0.996223, 0.482311, 0.14447, 0.5473, 0.578195, 0.0243164, 0.268366, 0.853925, 0.741338, 0.510542, 0.787791, 0.342535, 0.46082, 0.695999, 0.300268, 0.35698, 0.085459, 0.620649, 0.508802, 0.977806, 0.14818, 0.771428, 0.692267, 0.638859, 0.821941, 0.474646, 0.819243, 0.053599, 0.0805181, 0.622247, 0.249756, 0.160091, 0.339989, 0.572766, 0.701115, 0.373954, 0.402776, 0.516323, 0.862228, 0.600366, 0.856622, 0.670096, 0.017159, 0.505392, 0.0971802, 0.634215, 0.798549, 0.836786, 0.878541, 0.782955, 0.610123, 0.800318, 0.742984, 0.792854, 0.858895, 0.530985, 0.201989, 0.614193, 0.250036, 0.0329096, 0.541469, 0.314625, 0.267206, 0.535112, 0.756444, 0.525629, 0.569525, 0.128482, 0.504332, 0.961639, 0.451456, 0.542839, 0.676709, 0.388083, 0.967815, 0.28566, 0.575298, 0.199581, 0.503048, 0.91344, 0.534635, 0.0923399, 0.216477, 0.766435, 0.625943, 0.188939, 0.166453, 0.675521, 0.107427, 0.525369, 0.392974, 0.71329, 0.053587, 0.716162, 0.719565, 0.318373, 0.722562, 0.133746, 0.518253, 0.653409, 0.239526, 0.677378, 0.337883, 0.059063, 0.103771, 0.565242, 0.219584, 0.667821, 0.823601, 0.0941707, 0.775054, 0.193914, 0.068389, 0.758509, 0.159793, 0.167676, 0.25734, 0.933402, 0.965449, 0.470546, 0.3762, 0.816548, 0.800427, 0.837739, 0.373602, 0.336122, 0.945839, 0.520147, 0.593223, 0.932222, 0.317687, 0.0448492, 0.661502, 0.678291, 0.0185625, 0.43597, 0.171614, 0.650499, 0.23122, 0.788721, 0.691268, 0.38924, 0.765713, 0.368306, 0.027837, 0.330792, 0.404094, 0.548701, 0.160988, 0.793108, 0.150796, 0.0184224, 0.726547, 0.262104, 0.854258, 0.931108, 0.286701, 0.131018, 0.282084, 0.519234, 0.131277, 0.677196, 0.765588, 0.0170084, 0.870373, 0.949925, 0.344269, 0.106134, 0.284501, 0.117467, 0.571933, 0.278027, 0.830683, 0.794426, 0.802415, 0.682303, 0.248905, 0.603236, 0.530809, 0.420478, 0.569727, 0.973215, 0.317705, 0.52998, 0.42522, 0.856434, 0.775098, 0.612858, 0.348566, 0.246178, 0.498915, 0.202352, 0.607909, 0.0290249, 0.605622, 0.598653, 0.319962, 0.362838, 0.981371, 0.570996, 0.872617, 0.638643, 0.760747, 0.172332, 0.697107, 0.396599, 0.331669, 0.534245, 0.980304, 0.438336, 0.939648, 0.951851, 0.243193, 0.263216, 0.346751, 0.352664, 0.565522, 0.522972, 0.648305, 0.569183, 0.619839, 0.791415, 0.191347, 0.596297, 0.859243, 0.281288, 0.231212, 0.93154, 0.321489, 0.192133, 0.153875, 0.101907, 0.930146, 0.0104959, 0.130838, 0.74774, 0.977053, 0.321188, 0.430843, 0.189028, 0.555248, 0.877483, 0.00888616, 0.455066, 0.0837809, 0.949844, 0.844458, 0.557468, 0.12781, 0.853863, 0.933043, 0.53722, 0.289994, 0.740046, 0.0865591, 0.513104, 0.978469, 0.187784, 0.703587, 0.0206445, 0.977205, 0.865323, 0.374663, 0.276854, 0.0620759, 0.210361, 0.253033, 0.569303, 0.899107, 0.84108, 0.437966, 0.872324, 0.907059, 0.0545911, 0.643914, 0.818937, 0.651434, 0.819382, 0.0167287, 0.745918, 0.956177, 0.212305, 0.108908, 0.377741, 0.930721, 0.54714, 0.0650606, 0.171071, 0.783276, 0.976184, 0.433602, 0.852014, 0.0186597, 0.159334, 0.252568, 0.742758, 0.677328, 0.656731, 0.535654, 0.67263, 0.330072, 0.179511, 0.429737, 0.722825, 0.0859345, 0.15145, 0.318227, 0.776601, 0.591421, 0.205821, 0.251174, 0.845826, 0.296537, 0.438482, 0.567676, 0.508687, 0.73799, 0.91896, 0.84002, 0.63714, 0.795617, 0.945293, 0.660635, 0.720161, 0.642968, 0.713816, 0.982925, 0.0943227, 0.446776, 0.233126, 0.686313, 0.252798, 0.614421, 0.407957, 0.564496, 0.198181, 0.352621, 0.03432, 0.784207, 0.103678, 0.741437, 0.652841, 0.691829, 0.368661, 0.412088, 0.633289, 0.550927, 0.605668, 0.409671, 0.276473, 0.801483, 0.0601193, 0.479828, 0.525028, 0.995852, 0.484342, 0.5934, 0.973162, 0.167221, 0.701611, 0.503665, 0.210718, 0.849848, 0.0729654, 0.552033, 0.907674, 0.541271, 0.20123, 0.219367, 0.396734, 0.910584, 0.389428, 0.308437, 0.678582, 0.7795, 0.62043, 0.717345, 0.897138, 0.955562, 0.0458908, 0.203156, 0.502305, 0.661072, 0.720254, 0.691286, 0.189975, 0.517452, 0.700525, 0.473886, 0.308164, 0.258712, 0.500066, 0.978486, 0.070673, 0.982826, 0.996819, 0.992306, 0.750977, 0.603368, 0.509356, 0.596632, 0.711075, 0.901693, 0.631545, 0.74325, 0.0678517, 0.472053, 0.80544, 0.332577, 0.52847, 0.784145, 0.929382, 0.408199, 0.37805, 0.9085, 0.989946, 0.230063, 0.756818, 0.456959, 0.564848, 0.837682, 0.606645, 0.826045, 0.764971, 0.4254, 0.255228, 0.176442, 0.541592, 0.0383729, 0.216528, 0.480499, 0.00584255, 0.552458, 0.173364, 0.392994, 0.782133, 0.937217, 0.0818183, 0.129732, 0.661808, 0.278623, 0.42883, 0.113099, 0.78603, 0.850019, 0.878083, 0.0210051, 0.718046, 0.645013, 0.134954, 0.367119, 0.907327, 0.749089, 0.201227, 0.947814, 0.429924, 0.344656, 0.920301, 0.54574, 0.0873397, 0.161313, 0.212593, 0.670147, 0.828216, 0.40527, 0.270828, 0.684729, 0.00894568, 0.017525, 0.857584, 0.831911, 0.355444, 0.0885186, 0.16125, 0.219739, 0.083562, 0.225856, 0.642688, 0.959339, 0.585117, 0.313103, 0.942706, 0.47125, 0.04174, 0.868185, 0.691651, 0.115435, 0.858834, 0.070767, 0.656556, 0.278267, 0.943871, 0.636004, 0.62193, 0.760602, 0.141458, 0.301863, 0.878847, 0.260272, 0.436808, 0.549506, 0.522095, 0.842824, 0.622312, 0.0894913, 0.536627, 0.57376, 0.507491, 0.267119, 0.646091, 0.641091, 0.566119, 0.437834, 0.615521, 0.790321, 0.780238, 0.0306823, 0.324406, 0.148009, 0.129943, 0.826998, 0.407336, 0.381937, 0.253466, 0.270639, 0.948253, 0.750944, 0.307795, 0.973567, 0.413926, 0.0101885, 0.998211, 0.962249, 0.622416, 0.0710249, 0.0819806, 0.0743988, 0.84601, 0.691164, 0.685333, 0.668754, 0.675898, 0.765744, 0.793243, 0.867064, 0.922397, 0.363611, 0.3963, 0.76194, 0.0416252, 0.865838, 0.427631, 0.821304, 0.0332679, 0.632924, 0.0972608, 0.584571, 0.890572, 0.0833811, 0.0501982, 0.415017, 0.581504, 0.37737, 0.716057, 0.137826, 0.37492, 0.569505, 0.153757, 0.233306, 0.197997, 0.307718, 0.777254, 0.211829, 0.551106, 0.732888, 0.836681, 0.0774476, 0.720612, 0.0758478, 0.706555, 0.497854, 0.171229, 0.376935, 0.349104, 0.146891, 0.269855, 0.128086, 0.260694, 0.860494, 0.383145, 0.82853, 0.190987, 0.550705, 0.0653792, 0.487985, 0.306168, 0.401766, 0.578184, 0.59628, 0.538351, 0.820714, 0.173859, 0.880567, 0.00212733, 0.377413, 0.81118, 0.0260755, 0.229782, 0.620876, 0.904938, 0.782493, 0.181994, 0.30039, 0.846472, 0.370099, 0.777706, 0.220626, 0.709231, 0.550022, 0.302602, 0.34935, 0.213974, 0.782877, 0.345519, 0.233449, 0.840186, 0.242156, 0.529018, 0.518134, 0.212812, 0.398608, 0.787973, 0.0135168, 0.478868, 0.833135, 0.692261, 0.916571, 0.448403, 0.720458, 0.969085, 0.775502, 0.190532, 0.199317, 0.486771, 0.839715, 0.975533, 0.707038, 0.629707, 0.554501, 0.951124, 0.722807, 0.780159, 0.178306, 0.826408, 0.769103, 0.674648, 0.531527, 0.501066, 0.294615, 0.26425, 0.104227, 0.779103, 0.188097, 0.251246, 0.678221, 0.323596, 0.924817, 0.478126, 0.698434, 0.666344, 0.728313, 0.804891, 0.105832, 0.702245, 0.127214, 0.361724, 0.231855, 0.533985, 0.390546, 0.147519, 0.130404, 0.101021, 0.175344, 0.900074, 0.0690384, 0.839916, 0.298388, 0.868831, 0.253131, 0.770954, 0.0973254, 0.0810165, 0.0360158, 0.480229, 0.554621, 0.0664773, 0.910996, 0.493534, 0.451332, 0.0733803, 0.590965, 0.83281, 0.95637, 0.301041, 0.195014, 0.600048, 0.377172, 0.12934, 0.152917, 0.24745, 0.193457, 0.499116, 0.908873, 0.301171, 0.272838, 0.381785, 0.62267, 0.97932, 0.794863, 0.907003, 0.471885, 0.603262, 0.736802, 0.876966, 0.667812, 0.340376, 0.788925, 0.889081, 0.624403, 0.329373, 0.289359, 0.85086, 0.484386, 0.667459, 0.881724, 0.7047, 0.30936, 0.712072, 0.803781, 0.0873292, 0.256712, 0.681333, 0.794373, 0.836645, 0.227533, 0.449342, 0.578331, 0.359461, 0.951998, 0.349693, 0.733305, 0.212919, 0.56575, 0.490442, 0.734411, 0.801308, 0.323216, 0.925937, 0.932465, 0.702191, 0.689425, 0.548882, 0.671576, 0.499659, 0.989929, 0.212353, 0.348017, 0.84331, 0.834755, 0.278097, 0.897702, 0.941198, 0.91133, 0.00614004, 0.238388, 0.448227, 0.618809, 0.138466, 0.388041, 0.457745, 0.742009, 0.70753, 0.320637, 0.329873, 0.752051, 0.689848, 0.810418, 0.659539, 0.119535, 0.422586, 0.330316, 0.129902, 0.69991, 0.25556, 0.989403, 0.762385, 0.655517, 0.926823, 0.186008, 0.471091, 0.581717, 0.116584, 0.111205, 0.521409, 0.0395977, 0.336983, 0.818073, 0.714302, 0.148646, 0.272808, 0.838615, 0.153176, 0.168221, 0.558182, 0.82624, 0.937945, 0.248246, 0.843212, 0.664871, 0.308959, 0.691776, 0.203333, 0.446136, 0.298831, 0.172589, 0.601197, 0.61045, 0.326337, 0.270748, 0.263793, 0.652502, 0.901041, 0.995958, 0.586851, 0.512593, 0.219412, 0.438089, 0.31753, 0.0161652, 0.0827927, 0.535625, 0.365465, 0.670418, 0.113629, 0.682373, 0.126157, 0.535467, 0.141072, 0.718386, 0.372858, 0.44713, 0.417665, 0.57813, 0.619787, 0.416969, 0.465585, 0.737977, 0.234892, 0.0967611, 0.824322, 0.189961, 0.243761, 0.255116, 0.620698, 0.307735, 0.211343, 0.167206, 0.324388, 0.789726, 0.714917, 0.278007, 0.246024, 0.111303, 0.0926666, 0.465172, 0.277434, 0.942427, 0.000364714, 0.296953, 0.528964, 0.120688, 0.779078, 0.435304, 0.396874, 0.819971, 0.816767, 0.807546, 0.695585, 0.587133, 0.0793149, 0.672106, 0.126437, 0.290589, 0.572299, 0.954352, 0.137102, 0.0322239, 0.160362, 0.098661, 0.261525, 0.693996, 0.765613, 0.0906752, 0.560577, 0.298069, 0.0358857, 0.208007, 0.30035, 0.314611, 0.0822887, 0.43365, 0.432997, 0.294052, 0.461014, 0.289126, 0.527946, 0.55843, 0.707958, 0.989021, 0.67065, 0.31208, 0.882033, 0.356563, 0.143575, 0.39391, 0.989768, 0.670452, 0.191913, 0.862726, 0.209854, 0.22389, 0.386431, 0.943268, 0.211982, 0.400653, 0.819452, 0.137472, 0.212021, 0.34023, 0.902901, 0.236404, 0.358529, 0.398488, 0.731032, 0.0810002, 0.68721, 0.761582, 0.536575, 0.0791034, 0.912254, 0.941131, 0.179732, 0.405205, 0.326498, 0.388041, 0.738087, 0.0453206, 0.850094, 0.990425, 0.562124, 0.528829, 0.669991, 0.269767, 0.651324, 0.076527, 0.471104, 0.944213, 0.76615, 0.864152, 0.221544, 0.13444, 0.744768, 0.66061, 0.853453, 0.243712, 0.662378, 0.831419, 0.279089, 0.723714, 0.673407, 0.322496, 0.249414, 0.0808028, 0.669178, 0.129126, 0.225351, 0.80709, 0.932651, 0.946436, 0.312796, 0.619974, 0.750565, 0.471067, 0.109912, 0.494185, 0.0164315, 0.819836, 0.538484, 0.58588, 0.701269, 0.256867, 0.398663, 0.751361, 0.216873, 0.569277, 0.305113, 0.413615, 0.708337, 0.897865, 0.99918, 0.00804, 0.0130421, 0.13172, 0.880096, 0.771592, 0.659678, 0.61829, 0.842695, 0.876267, 0.0873269, 0.388796, 0.62127, 0.403993, 0.0521579, 0.795213, 0.554273, 0.494142, 0.0852959, 0.0147486, 0.68282, 0.725617, 0.59583, 0.361159, 0.263347, 0.902028, 0.371777, 0.171678, 0.129875, 0.836056, 0.533855, 0.435758, 0.179898, 0.0617481, 0.634131, 0.432243, 0.537918, 0.264819, 0.490502, 0.502087, 0.261294, 0.460881, 0.796605, 0.685365, 0.73922, 0.414836, 0.13934, 0.335801, 0.928842, 0.128643, 0.690233, 0.931754, 0.589909, 0.0521498, 0.357284, 0.215132, 0.12519, 0.667059, 0.741028, 0.858196, 0.250268, 0.931752, 0.226455, 0.0926751, 0.900977, 0.72546, 0.128732, 0.395859, 0.383374, 0.145999, 0.255524, 0.908561, 0.0880098, 0.687142, 0.435483, 0.376106, 0.047316, 0.453296, 0.704014, 0.682668, 0.957836, 0.861988, 0.0790265, 0.859532, 0.812038, 0.424282, 0.386979, 0.717574, 0.879845, 0.690133, 0.038615, 0.651056, 0.145131, 0.365493, 0.0344689, 0.408053, 0.66932, 0.438042, 0.226078, 0.600565, 0.448246, 0.132582, 0.582608, 0.259319, 0.724597, 0.698481, 0.391032, 0.318994, 0.767922, 0.48811, 0.528084, 0.499119, 0.867388, 0.2441, 0.039396, 0.54157, 0.618235, 0.462272, 0.516772, 0.566249, 0.386605, 0.444759, 0.907246, 0.763911, 0.785003, 0.322037, 0.318618, 0.359412, 0.398723, 0.863641, 0.0867606, 0.852835, 0.176591, 0.920423, 0.141677, 0.52492, 0.658256, 0.75469, 0.164688, 0.10115, 0.977233, 0.163985, 0.501142, 0.339944, 0.572064, 0.259706, 0.934196, 0.132286, 0.159633, 0.478493, 0.340137, 0.777931, 0.768235, 0.587262, 0.183898, 0.0847388, 0.655122, 0.399469, 0.688903, 0.758979, 0.463119, 0.191444, 0.413135, 0.592141, 0.215283, 0.966305, 0.274424, 0.959315, 0.863006, 0.514405, 0.512454, 0.273139, 0.418159, 0.0225537, 0.616309, 0.0102618, 0.777998, 0.736739, 0.588183, 0.174512, 0.00154778, 0.809377, 0.841374, 0.597238, 0.162157, 0.644668, 0.893445, 0.564681, 0.245195, 0.773268, 0.402547, 0.409795, 0.573206, 0.873709, 0.9881, 0.23034, 0.344563, 0.174986, 0.311645, 0.786533, 0.271418, 0.771397, 0.162902, 0.426276, 0.267799, 0.373866, 0.123405, 0.870209, 0.714825, 0.00543591, 0.0400893, 0.780131, 0.241383, 0.0701372, 0.91969, 0.690341, 0.135406, 0.329318, 0.379639, 0.50552, 0.109923, 0.909374, 0.316821, 0.711275, 0.620121, 0.0197481, 0.643886, 0.0958327, 0.414528, 0.922054, 0.0507135, 0.272248, 0.902437, 0.0705583, 0.0356929, 0.68747, 0.462343, 0.0628224, 0.975121, 0.985913, 0.867299, 0.575736, 0.812543, 0.846538, 0.370523, 0.488339, 0.264663, 0.451796, 0.405147, 0.622412, 0.823059, 0.866325, 0.798976, 0.532499, 0.494897, 0.436404, 0.0862593, 0.95266, 0.35852, 0.834699, 0.0143271, 0.162523, 0.863941, 0.136128, 0.101709, 0.384516, 0.0672405, 0.235785, 0.345931, 0.480923, 0.108362, 0.688198, 0.663375, 0.0442804, 0.793499, 0.250479, 0.84077, 0.547606, 0.607305, 0.945874, 0.94773, 0.101684, 0.355956, 0.898357, 0.370004, 0.899053, 0.597031, 0.446462, 0.636742, 0.202748, 0.166338, 0.1694, 0.66574, 0.620938, 0.106766, 0.0254695, 0.220305, 0.678259, 0.354764, 0.483237, 0.935125, 0.0618308, 0.76038, 0.24248, 0.895524, 0.207504, 0.982442, 0.246557, 0.767985, 0.168583, 0.488733, 0.255555, 0.314345, 0.97827, 0.0474358, 0.550337, 0.374238, 0.990384, 0.11637, 0.558007, 0.553717, 0.101164, 0.128323, 0.826148, 0.686183, 0.632341, 0.442759, 0.213565, 0.305745, 0.980627, 0.611632, 0.821573, 0.164835, 0.800218, 0.136598, 0.0943636, 0.613361, 0.749911, 0.19825, 0.315868, 0.584715, 0.340269, 0.681216, 0.628213, 0.739377, 0.498469, 0.217486, 0.875165, 0.107256, 0.0388254, 0.684437, 0.648387, 0.759068, 0.0863455, 0.0356647, 0.475533, 0.457221, 0.814866, 0.155224, 0.159775, 0.225762, 0.844223, 0.887599, 0.0328769, 0.882558, 0.43301, 0.891047, 0.641237, 0.436625, 0.506043, 0.412549, 0.294326, 0.00107597, 0.198905, 0.823397, 0.258908, 0.640055, 0.114144, 0.94769, 0.68169, 0.0611514, 0.934918, 0.594428, 0.559658, 0.651315, 0.783615, 0.881746, 0.413528, 0.462815, 0.830706, 0.327034, 0.699745, 0.898011, 0.532332, 0.502941, 0.178127, 0.582383, 0.946175, 0.252039, 0.197491, 0.21839, 0.304603, 0.783971, 0.643806, 0.4803, 0.145688, 0.779963, 0.181941, 0.797857, 0.840552, 0.415948, 0.782541, 0.514972, 0.433902, 0.198357, 0.534732, 0.457025, 0.133016, 0.447026, 0.414561, 0.583342, 0.0479735, 0.85276, 0.523466, 0.111548, 0.88397, 0.62687, 0.69954, 0.0883176, 0.962754, 0.624452, 0.850527, 0.0484547, 0.451703, 0.0884455, 0.480931, 0.0402721, 0.390394, 0.313574, 0.515101, 0.233129, 0.526889, 0.24189, 0.10573, 0.810232, 0.297812, 0.269022, 0.0505037, 0.860077, 0.55559, 0.834222, 0.575633, 0.021159, 0.849752, 0.740829, 0.558883, 0.736135, 0.465014, 0.825803, 0.581932, 0.0871889, 0.986642, 0.832284, 0.38697, 0.765759, 0.899416, 0.843232, 0.531267, 0.433739, 0.966289, 0.846241, 0.00426384, 0.95494, 0.353395, 0.39295, 0.620015, 0.451371, 0.476604, 0.187976, 0.171317, 0.420016, 0.192102, 0.773659, 0.91198, 0.608269, 0.170778, 0.341426, 0.661402, 0.424353, 0.6424, 0.896002, 0.61916, 0.529042, 0.891919, 0.91544, 0.237337, 0.547919, 0.167059, 0.463118, 0.509404, 0.397028, 0.0424441, 0.279269, 0.033374, 0.381536, 0.26654, 0.909158, 0.0750624, 0.476824, 0.10118, 0.903691, 0.0563105, 0.380574, 0.113715, 0.379801, 0.165381, 0.242048, 0.930619, 0.730273, 0.105671, 0.248541, 0.0594918, 0.054523, 0.529601, 0.642662, 0.76983, 0.482375, 0.594625, 0.135894, 0.699304, 0.241644, 0.958944, 0.178967, 0.904528, 0.683951, 0.101332, 0.912811, 0.616354, 0.202038, 0.050257, 0.590397, 0.566977, 0.726602, 0.937506, 0.802981, 0.0549501, 0.673011, 0.583607, 0.881868, 0.996313, 0.763281, 0.376004, 0.736794, 0.885829, 0.377308, 0.647416, 0.222001, 0.899687, 0.000399166, 0.785371, 0.706067, 0.419677, 0.331527, 0.913849, 0.287799, 0.863842, 0.394116, 0.252452, 0.50452, 0.721638, 0.315363, 0.857111, 0.128289, 0.321032, 0.899571, 0.013474, 0.387252, 0.773403, 0.996704, 0.999318, 0.436876, 0.83472, 0.166456, 0.312107, 0.434034, 0.0173215, 0.507042, 0.529282, 0.21992, 0.940103, 0.57176, 0.210635, 0.271865, 0.368036, 0.378554, 0.933001, 0.22344, 0.268014, 0.332343, 0.773332, 0.607381, 0.452452, 0.346824, 0.437242, 0.751531, 0.631105, 0.646027, 0.113901, 0.372686, 0.710771, 0.877413, 0.804387, 0.627982, 0.0816407, 0.759812, 0.785542, 0.76272, 0.585159, 0.539945, 0.534011, 0.907124, 0.514477, 0.0602369, 0.859823, 0.0983746, 0.604961, 0.33828, 0.0922718, 0.833016, 0.934691, 0.237954, 0.183435, 0.425417, 0.410531, 0.912271, 0.282749, 0.592619, 0.120565, 0.603792, 0.0200075, 0.66565, 0.779393, 0.656786, 0.0482675, 0.400373, 0.25322, 0.15115, 0.457793, 0.416774, 0.0140241, 0.14431, 0.348996, 0.406658, 0.854026, 0.587938, 0.676665, 0.303858, 0.421046, 0.871677, 0.488381, 0.145231, 0.329375, 0.330104, 0.265049, 0.646151, 0.321822, 0.692473, 0.641129, 0.962209, 0.289868, 0.564468, 0.743103, 0.796955, 0.549788, 0.294766, 0.634667, 0.912357, 0.0590331, 0.0826667, 0.590767, 0.125625, 0.625491, 0.800706, 0.813995, 0.234459, 0.564327, 0.413291, 0.68746, 0.186044, 0.206155, 0.667765, 0.498724, 0.791174, 0.864356, 0.118522, 0.0642801, 0.111739, 0.417754, 0.83959, 0.988943, 0.120211, 0.479872, 0.941327, 0.559271, 0.578378, 0.779107, 0.678067, 0.461335, 0.127909, 0.221766, 0.857017, 0.234974, 0.5312, 0.435006, 0.307173, 0.10292, 0.833312, 0.459131, 0.438498, 0.0272183, 0.946388, 0.754151, 0.718232, 0.705287, 0.77261, 0.0954795, 0.709051, 0.660344, 0.876939, 0.757057, 0.953471, 0.794308, 0.0782602, 0.619402, 0.735706, 0.629598, 0.603956, 0.273298, 0.0906271, 0.180643, 0.875974, 0.469385, 0.0172323, 0.516847, 0.594435, 0.483427, 0.161724, 0.694752, 0.660991, 0.958244, 0.713839, 0.232535, 0.642906, 0.453343, 0.0259699, 0.668546, 0.979621, 0.412286, 0.351582, 0.521411, 0.222158, 0.0362529, 0.432446, 0.626985, 0.539545, 0.406467, 0.913475, 0.843376, 0.967379, 0.202197, 0.94961, 0.11005, 0.668596, 0.0677949, 0.1737, 0.720505, 0.220703, 0.550035, 0.770747, 0.855581, 0.607484, 0.690045, 0.507089, 0.121695, 0.634992, 0.594639, 0.0290535, 0.263052, 0.297266, 0.455815, 0.99085, 0.44507, 0.114915, 0.888266, 0.0216848, 0.120336, 0.973144, 0.872735, 0.992345, 0.927404, 0.156868, 0.73961, 0.184074, 0.859208, 0.64004, 0.302357, 0.291901, 0.106613, 0.132052, 0.894399, 0.269791, 0.450062, 0.340077, 0.629331, 0.235629, 0.089653, 0.412883, 0.900113, 0.281339, 0.478687, 0.4991, 0.637249, 0.888193, 0.404431, 0.735389, 0.543217, 0.665372, 0.104277, 0.0115973, 0.0760221, 0.42581, 0.116402, 0.52517, 0.16289, 0.719278, 0.0275049, 0.563258, 0.754053, 0.428043, 0.251288, 0.635316, 0.836843, 0.699439, 0.149634, 0.325126, 0.993622, 0.389127, 0.412413, 0.506132, 0.768242, 0.428553, 0.102398, 0.233465, 0.514597, 0.0857406, 0.211276, 0.0190255, 0.294531, 0.0834323, 0.327302, 0.742976, 0.706107, 0.913361, 0.85401, 0.0645883, 0.327679, 0.110592, 0.14578, 0.926463, 0.302773, 0.337391, 0.226428, 0.988777, 0.248064, 0.373741, 0.688498, 0.57615, 0.866976, 0.10375, 0.984208, 0.552134, 0.530496, 0.100562, 0.381206, 0.401954, 0.424405, 0.273553, 0.526922, 0.562716, 0.538475, 0.444868, 0.940651, 0.510176, 0.961141, 0.801338, 0.844992, 0.357811, 0.125, 0.0565314, 0.360065, 0.5808, 0.00103131, 0.445364, 0.907767, 0.648957, 0.0373842, 0.339993, 0.678008, 0.822738, 0.476965, 0.138021, 0.462472, 0.010953, 0.852434, 0.406233, 0.377486, 0.786894, 0.367656, 0.902125, 0.228506, 0.0884607, 0.454144, 0.0626972, 0.989365, 0.0664401, 0.946825, 0.687592, 0.683559, 0.228131, 0.88693, 0.184427, 0.356165, 0.172682, 0.648249, 0.00718054, 0.355028, 0.195836, 0.570653, 0.837798, 0.624247, 0.63765, 0.180403, 0.275514, 0.980999, 0.304085, 0.726668, 0.263012, 0.476592, 0.314497, 0.352536, 0.93333, 0.694555, 0.916235, 0.290485, 0.160748, 0.299706, 0.555438, 0.579219, 0.114588, 0.405392, 0.138082, 0.46718, 0.222959, 0.66349, 0.970223, 0.908511, 0.955852, 0.397103, 0.125646, 0.74287, 0.663413, 0.819174, 0.0557902, 0.538024, 0.981734, 0.82484, 0.316546, 0.524897, 0.136838, 0.17505, 0.450692, 0.141453, 0.35099, 0.543927, 0.481581, 0.76077, 0.556214, 0.280659, 0.624437, 0.39479, 0.76014, 0.689714, 0.586183, 0.333313, 0.782148, 0.217834, 0.787186, 0.516531, 0.159284, 0.176605, 0.933335, 0.512654, 0.414592, 0.977185, 0.330633, 0.390163, 0.833851, 0.178697, 0.435806, 0.33165, 0.150526, 0.0968346, 0.892193, 0.881156, 0.86083, 0.32752, 0.219283, 0.498117, 0.6107, 0.248277, 0.599805, 0.768165, 0.553606, 0.288643, 0.528303, 0.901746, 0.764183, 0.455745, 0.387556, 0.0928248, 0.609989, 0.945597, 0.364361, 0.940392, 0.0596991, 0.22698, 0.606877, 0.92329, 0.154676, 0.481196, 0.664357, 0.323176, 0.979333, 0.595477, 0.15909, 0.785875, 0.191812, 0.991284, 0.488776, 0.798016, 0.0327427, 0.145919, 0.348871, 0.825308, 0.177508, 0.0947293, 0.178007, 0.0258089, 0.459542, 0.900765, 0.185631, 0.753224, 0.835949, 0.476019, 0.418449, 0.159907, 0.635526, 0.19939, 0.464808, 0.166286, 0.813637, 0.715158, 0.899118, 0.926415, 0.829257, 0.488908, 0.346654, 0.939304, 0.337547, 0.0613047, 0.382953, 0.175986, 0.8827, 0.912719, 0.861593, 0.186233, 0.411177, 0.0560949, 0.604721, 0.132047, 0.336403, 0.770969, 0.120769, 0.0828647, 0.712818, 0.676046, 0.229989, 0.0665386, 0.533117, 0.262892, 0.80655, 0.389792, 0.0184218, 0.596974, 0.252876, 0.798406, 0.273866, 0.630499, 0.166514, 0.933421, 0.455705, 0.219386, 0.0802194, 0.963171, 0.556084, 0.129555, 0.23051, 0.768886, 0.303718, 0.652342, 0.682682, 0.746003, 0.798312, 0.88324, 0.256958, 0.428659, 0.7857, 0.981503, 0.994557, 0.206317, 0.280171, 0.19274, 0.0152624, 0.150316, 0.657604, 0.455652, 0.288921, 0.901448, 0.543475, 0.432654, 0.75324, 0.985192, 0.257068, 0.517374, 0.274911, 0.108556, 0.380111, 0.00175874, 0.491546, 0.372086, 0.28108, 0.360878, 0.293488, 0.244742, 0.210927, 0.938699, 0.433631, 0.456019, 0.325943, 0.633567, 0.711283, 0.119612, 0.277167, 0.0640342, 0.452501, 0.669847, 0.940001, 0.203586, 0.814233, 0.98656, 0.51456, 0.485865, 0.567411, 0.201055, 0.454202, 0.969252, 0.00566036, 0.578845, 0.29853, 0.232686, 0.475156, 0.00966334, 0.962894, 0.906029, 0.506608, 0.593363, 0.356936, 0.630766, 0.951057, 0.55187, 0.443026, 0.0803929, 0.365892, 0.276321, 0.00406996, 0.474201, 0.798014, 0.551764, 0.690219, 0.507894, 0.1067, 0.187027, 0.625773, 0.292099, 0.19579, 0.105589, 0.500117, 0.775274, 0.666072, 0.0817655, 0.0632198, 0.838135, 0.562056, 0.178746, 0.239655, 0.921649, 0.993835, 0.0284272, 0.283064, 0.884358, 0.0037693, 0.175753, 0.912022, 0.398886, 0.230738, 0.870265, 0.422453, 0.166136, 0.444403, 0.134394, 0.0243013, 0.57289, 0.215749, 0.203832, 0.862608, 0.199681, 0.17864, 0.679575, 0.381172, 0.368123, 0.28913, 0.612708, 0.948579, 0.224444, 0.452975, 0.809432, 0.770786, 0.25513, 0.770731, 0.77347, 0.394963, 0.810749, 0.0856477, 0.825576, 0.531417, 0.540847, 0.968534, 0.650507, 0.829096, 0.0741462, 0.83297, 0.182788, 0.586927, 0.64391, 0.278483, 0.369935, 0.35762, 0.751446, 0.348311, 0.750885, 0.891035, 0.962312, 0.162112, 0.742538, 0.59719, 0.826395, 0.60207, 0.624687, 0.682101, 0.92347, 0.880812, 0.223429, 0.0163193, 0.902673, 0.361036, 0.1315, 0.592649, 0.292352, 0.178773, 0.253377, 0.0387774, 0.643361, 0.845786, 0.754204, 0.364412, 0.688774, 0.543894, 0.626031, 0.129036, 0.43138, 0.864455, 0.499019, 0.167178, 0.972318, 0.897078, 0.434557, 0.264836, 0.663721, 0.91563, 0.36214, 0.268102, 0.702476, 0.217885, 0.371567, 0.801458, 0.453572, 0.348115, 0.779678, 0.109024, 0.156197, 0.457548, 0.818176, 0.887956, 0.371319, 0.340275, 0.113297, 0.72692, 0.0864579, 0.972086, 0.226355, 0.943158, 0.963383, 0.565491, 0.135682, 0.262048, 0.663787, 0.0371624, 0.416354, 0.761012, 0.596951, 0.613046, 0.0913029, 0.562425, 0.514882, 0.497304, 0.78304, 0.557374, 0.206304, 0.901521, 0.186182, 0.427212, 0.513115, 0.0477757, 0.483842, 0.043603, 0.220047, 0.69668, 0.443932, 0.510881, 0.205206, 0.121241, 0.979441, 0.754877, 0.740813, 0.588721, 0.212736, 0.218135, 0.752825, 0.216225, 0.0572983, 0.17229, 0.802782, 0.41308, 0.416236, 0.264691, 0.9516, 0.47015, 0.834422, 0.157561, 0.660617, 0.42175, 0.21121, 0.916822, 0.362644, 0.344465, 0.869498, 0.132344, 0.515614, 0.637452, 0.0856399, 0.702774, 0.632814, 0.994222, 0.813245, 0.324698, 0.261958, 0.512191, 0.314155, 0.0298459, 0.193722, 0.0246972, 0.845139, 0.216759, 0.12464, 0.416077, 0.615874, 0.966491, 0.485168, 0.79835, 0.910517, 0.361892, 0.268438, 0.195217, 0.475558, 0.415002, 0.264999, 0.937565, 0.981831, 0.0622473, 0.855345, 0.0823687, 0.935555, 0.259333, 0.314524, 0.519565, 0.440571, 0.238681, 0.921524, 0.325186, 0.579364, 0.768366, 0.23077, 0.141342, 0.159562, 0.291442, 0.608055, 0.935602, 0.690342, 0.128028, 0.263339, 0.818997, 0.50609, 0.0633179, 0.330733, 0.889486, 0.0222448, 0.0822985, 0.424211, 0.601327, 0.389043, 0.317734, 0.815557, 0.996757, 0.593428, 0.457296, 0.55754, 0.7118, 0.918263, 0.667889, 0.0416402, 0.927053, 0.684176, 0.361, 0.933606, 0.350922, 0.839093, 0.347744, 0.55185, 0.399171, 0.641256, 0.458768, 0.399365, 0.915305, 0.0905295, 0.769893, 0.873669, 0.533639, 0.737803, 0.873882, 0.79035, 0.460389, 0.959252, 0.0235075, 0.664966, 0.309276, 0.907091, 0.767865, 0.940397, 0.430394, 0.207188, 0.698954, 0.525385, 0.780439, 0.443817, 0.947394, 0.449379, 0.2446, 0.803833, 0.837961, 0.874072, 0.355932, 0.871123, 0.892925, 0.876261, 0.401232, 0.840419, 0.0846447, 0.245079, 0.753676, 0.621389, 0.908238, 0.0897526, 0.22832, 0.209713, 0.945288, 0.474243, 0.584965, 0.788261, 0.178892, 0.116444, 0.286751, 0.975243, 0.285705, 0.0762904, 0.762451, 0.287118, 0.361205, 0.43199, 0.84422, 0.313397, 0.927558, 0.628644, 0.296487, 0.987807, 0.564613, 0.450969, 0.948415, 0.190203, 0.621007, 0.552632, 0.52465, 0.334195, 0.2038, 0.975997, 0.191036, 0.932729, 0.901927, 0.372546, 0.785125, 0.219687, 0.0603403, 0.95474, 0.872343, 0.502369, 0.260347, 0.100719, 0.107591, 0.918544, 0.947276, 0.911697, 0.0857927, 0.0543282, 0.287237, 0.817589, 0.603739, 0.529234, 0.345922, 0.769665, 0.81464, 0.36659, 0.248734, 0.454401, 0.858325, 0.485563, 0.109681, 0.605191, 0.882583, 0.490471, 0.66374, 0.322778, 0.309942, 0.549554, 0.927963, 0.0336136, 0.601127, 0.341718, 0.434179, 0.288023, 0.244169, 0.644237, 0.199442, 0.791785, 0.609136, 0.932866, 0.499607, 0.97615, 0.0352206, 0.352143, 0.38461, 0.934046, 0.83575, 0.66898, 0.995401, 0.424422, 0.00196869, 0.0124084, 0.206138, 0.0381497, 0.183296, 0.362223, 0.646135, 0.270299, 0.646394, 0.479877, 0.362601, 0.405054, 0.817061, 0.620404, 0.923365, 0.0286819, 0.272914, 0.0550773, 0.795127, 0.272573, 0.419841, 0.436207, 0.00644269, 0.472745, 0.306172, 0.683208, 0.0846664, 0.278022, 0.937906, 0.736935, 0.391318, 0.635296, 0.453457, 0.359954, 0.187794, 0.0275754, 0.916056, 0.115623, 0.0890904, 0.236138, 0.579221, 0.792092, 0.985648, 0.816696, 0.427662, 0.978369, 0.891848, 0.889974, 0.97991, 0.199756, 0.910905, 0.643061, 0.779523, 0.195637, 0.316388, 0.585641, 0.476156, 0.60803, 0.728508, 0.711617, 0.353096, 0.806364, 0.30593, 0.805763, 0.503923, 0.435795, 0.659308, 0.765653, 0.596422, 0.787324, 0.903191, 0.918849, 0.984993, 0.533512, 0.1665, 0.0220359, 0.678531, 0.296687, 0.686673, 0.341881, 0.81616, 0.657489, 0.799677, 0.50715, 0.978349, 0.163066, 0.846449, 0.122153, 0.110569, 0.80878, 0.793933, 0.109858, 0.495731, 0.351193, 0.0320605, 0.52948, 0.994171, 0.291095, 0.158886, 0.694642, 0.827215, 0.779676, 0.468675, 0.864875, 0.961448, 0.419436, 0.271058, 0.250975, 0.989009, 0.732048, 0.24875, 0.674875, 0.211105, 0.560233, 0.845549, 0.772419, 0.361914, 0.327298, 0.0669711, 0.685546, 0.802015, 0.419861, 0.0906196, 0.690113, 0.92287, 0.813777, 0.401564, 0.969683, 0.806847, 0.561618, 0.109984, 0.353588, 0.183174, 0.296495, 0.113677, 0.151616, 0.160105, 0.572576, 0.952327, 0.640968, 0.00196501, 0.111049, 0.435984, 0.22377, 0.848953, 0.764457, 0.874029, 0.476708, 0.97932, 0.129035, 0.514941, 0.542317, 0.844183, 0.381789, 0.561663, 0.771794, 0.303102, 0.434983, 0.272583, 0.486209, 0.201214, 0.0226167, 0.451245, 0.355426, 0.0545809, 0.888966, 0.10582, 0.0706944, 0.791683, 0.403473, 0.56042, 0.0609565, 0.773855, 0.369322, 0.736534, 0.699121, 0.0289885, 0.823741, 0.555119, 0.20037, 0.905732, 0.718291, 0.0844641, 0.964405, 0.877972, 0.976542, 0.863431, 0.435554, 0.458124, 0.66085, 0.559653, 0.899991, 0.696537, 0.884407, 0.485241, 0.169693, 0.492214, 0.327817, 0.464557, 0.72688, 0.510636, 3.72256e-05, 0.640568, 0.634003, 0.599166, 0.244155, 0.485384, 0.368018, 0.224248, 0.17215, 0.608862, 0.050801, 0.709557, 0.168748, 0.545006, 0.0330671, 0.744038, 0.444494, 0.999193, 0.0136202, 0.667463, 0.966947, 0.341507, 0.122541, 0.553909, 0.110285, 0.341132, 0.00632731, 0.209161, 0.0710216, 0.725341, 0.385506, 0.366752, 0.852555, 0.0429621, 0.713515, 0.592362, 0.988586, 0.109851, 0.335065, 0.538086, 0.532595, 0.59895, 0.994938, 0.395045, 0.432586, 0.917574, 0.600706, 0.996087, 0.689497, 0.810614, 0.788031, 0.768774, 0.618556, 0.0169556, 0.700734, 0.827853, 0.824144, 0.324779, 0.0568658, 0.418598, 0.998419, 0.160677, 0.548672, 0.201268, 0.22995, 0.95029, 0.146139, 0.336814, 0.057686, 0.565388, 0.169347, 0.797201, 0.374061, 0.739456, 0.736939, 0.0534442, 0.998434, 0.0051399, 0.835541, 0.198279, 0.45296, 0.98264, 0.560263, 0.188885, 0.892668, 0.39621, 0.109386, 0.316836, 0.284272, 0.366511, 0.160651, 0.0262769, 0.257196, 0.854866, 0.284365, 0.366404, 0.330294, 0.535286, 0.363246, 0.971337, 0.444471, 0.511065, 0.193465, 0.631262, 0.111417, 0.0541947, 0.429366, 0.454222, 0.0146598, 0.371483, 0.544778, 0.385455, 0.0993689, 0.506556, 0.674363, 0.846479, 0.239483, 0.378822, 0.468172, 0.128559, 0.691275, 0.742371, 0.284059, 0.613244, 0.106969, 0.737075, 0.814352, 0.447224, 0.432634, 0.597521, 0.182805, 0.161794, 0.0734229, 0.517181, 0.892901, 0.411676, 0.196756, 0.89416, 0.129698, 0.977754, 0.506168, 0.474227, 0.486898, 0.486799, 0.141466, 0.427511, 0.301441, 0.472785, 0.930508, 0.181775, 0.049949, 0.354898, 0.904821, 0.950944, 0.04103, 0.901985, 0.414743, 0.357464, 0.518813, 0.650724, 0.0569143, 0.558344, 0.659814, 0.333305, 0.449404, 0.599986, 0.256237, 0.678698, 0.850326, 0.716557, 0.249928, 0.104894, 0.817699, 0.904429, 0.155052, 0.300824, 0.0647324, 0.378203, 0.42182, 0.934557, 0.303367, 0.127447, 0.63626, 0.484572, 0.360052, 0.774446, 0.975823, 0.63545, 0.696228, 0.649522, 0.389415, 0.670237, 0.423613, 0.511352, 0.0355687, 0.0215115, 0.632218, 0.47376, 0.248807, 0.288003, 0.873852, 0.315307, 0.769269, 0.688987, 0.61817, 0.728338, 0.163371, 0.487634, 0.820945, 0.990741, 0.931875, 0.435982, 0.377883, 0.24133, 0.380386, 0.814295, 0.0209269, 0.885169, 0.709465, 0.93636, 0.0794399, 0.162973, 0.189059, 0.364105, 0.87738, 0.187765, 0.69413, 0.368692, 0.665187, 0.560796, 0.868607, 0.733205, 0.186302, 0.15196, 0.528003, 0.120308, 0.320919, 0.960412, 0.881067, 0.295913, 0.628835, 0.807905, 0.661576, 0.341103, 0.249475, 0.900911, 0.594571, 0.725973, 0.359318, 0.724656, 0.557005, 0.00766138, 0.141964, 0.718821, 0.931195, 0.867777, 0.414506, 0.503894, 0.421022, 0.825651, 0.206141, 0.91292, 0.548944, 0.198744, 0.60538, 0.993888, 0.127873, 0.419187, 0.588483, 0.182965, 0.0140592, 0.0600551, 0.261279, 0.2509, 0.25292, 0.216752, 0.04289, 0.827798, 0.580436, 0.0296799, 0.00926336, 0.557311, 0.660548, 0.79703, 0.858165, 0.691386, 0.607407, 0.129743, 0.917318, 0.147724, 0.768431, 0.184998, 0.140749, 0.577814, 0.928866, 0.874441, 0.844962, 0.911559, 0.168025, 0.643675, 0.704571, 0.78985, 0.276804, 0.281197, 0.0338128, 0.407325, 0.248202, 0.239628, 0.830321, 0.891477, 0.916737, 0.655029, 0.142365, 0.967451, 0.59282, 0.212244, 0.949718, 0.288187, 0.362454, 0.674981, 0.301357, 0.554134, 0.535576, 0.627044, 0.731061, 0.0726705, 0.0078096, 0.879073, 0.604342, 0.517625, 0.254952, 0.423111, 0.246639, 0.177197, 0.0659997, 0.835938, 0.322095, 0.394152, 0.874921, 0.561595, 0.498812, 0.748584, 0.616486, 0.331368, 0.589113, 0.227836, 0.442071, 0.562138, 0.770235, 0.441344, 0.975359, 0.34946, 0.617164, 0.979317, 0.26282, 0.0133631, 0.436993, 0.850822, 0.176373, 0.633789, 0.0109913, 0.992509, 0.851532, 0.143701, 0.31525, 0.944126, 0.984413, 0.404519, 0.378241, 0.944833, 0.0635824, 0.96717, 0.596602, 0.912801, 0.194747, 0.312028, 0.407516, 0.854388, 0.811451, 0.785035, 0.865755, 0.696651, 0.62489, 0.582013, 0.0687964, 0.0169895, 0.559723, 0.176556, 0.687998, 0.437269, 0.910802, 0.780115, 0.427156, 0.760038, 0.583962, 0.630915, 0.381982, 0.912495, 0.595187, 0.839338, 0.957846, 0.359903, 0.816214, 0.806878, 0.983288, 0.724097, 0.39241, 0.33402, 0.546075, 0.91049, 0.738866, 0.557567, 0.341265, 0.94488, 0.977952, 0.856056, 0.500289, 0.00265159, 0.460949, 0.15744, 0.769394, 0.495677, 0.245077, 0.887521, 0.775622, 0.468344, 0.390112, 0.842494, 0.385402, 0.539452, 0.489353, 0.336008, 0.15959, 0.165115, 0.553005, 0.980836, 0.206596, 0.863876, 0.654002, 0.215715, 0.306183, 0.486224, 0.426681, 0.852498, 0.574104, 0.727996, 0.305048, 0.236096, 0.565487, 0.128005, 0.519435, 0.138282, 0.420805, 0.734288, 0.484306, 0.0296007, 0.127708, 0.633982, 0.318204, 0.426714, 0.118984, 0.358504, 0.432299, 0.242051, 0.598057, 0.424738, 0.694848, 0.704655, 0.922875, 0.326423, 0.390694, 0.0606628, 0.421118, 0.290532, 0.870364, 0.637565, 0.613844, 0.878634, 0.35141, 0.957963, 0.890242, 0.404047, 0.0760006, 0.865201, 0.907828, 0.524186, 0.396781, 0.445076, 0.406459, 0.664782, 0.146491, 0.97103, 0.586007, 0.138321, 0.839083, 0.814549, 0.789171, 0.456028, 0.920055, 0.784548, 0.397995, 0.719838, 0.616418, 0.35416, 0.676665, 0.132795, 0.933376, 0.312559, 0.485991, 0.51831, 0.191254, 0.251019, 0.354653, 0.722027, 0.978415, 0.984746, 0.828504, 0.2601, 0.282231, 0.275912, 0.97673, 0.441132, 0.462412, 0.521256, 0.0938004, 0.895448, 0.410385, 0.0433032, 0.862253, 0.711822, 0.804567, 0.943025, 0.591489, 0.223069, 0.838248, 0.628455, 0.946139, 0.131133, 0.602136, 0.38264, 0.985207, 0.0808959, 0.273875, 0.318797, 0.328903, 0.428212, 0.124446, 0.165636, 0.23997, 0.165155, 0.361324, 0.178443, 0.282322, 0.172854, 0.271868, 0.250238, 0.717937, 0.974338, 0.743888, 0.96678, 0.986844, 0.856045, 0.653421, 0.179241, 0.0740971, 0.0766304, 0.336375, 0.212894, 0.934211, 0.997278, 0.269538, 0.421702, 0.442992, 0.430517, 0.0223909, 0.388047, 0.203982, 0.198411, 0.746942, 0.923477, 0.791974, 0.334785, 0.293055, 0.252477, 0.930505, 0.477281, 0.974369, 0.542273, 0.0565671, 0.855793, 0.320822, 0.264207, 0.745842, 0.392368, 0.746509, 0.459824, 0.472743, 0.721712, 0.709905, 0.350475, 0.620184, 0.145545, 0.0473067, 0.432195, 0.294732, 0.140441, 0.960437, 0.415798, 0.0431584, 0.679308, 0.909568, 0.0640943, 0.188892, 0.104753, 0.745761, 0.436645, 0.270399, 0.226013, 0.504947, 0.669316, 0.820962, 0.535999, 0.273035, 0.337008, 0.203571, 0.114914, 0.260652, 0.282534, 0.965981, 0.571098, 0.393268, 0.0361742, 0.666332, 0.125355, 0.833944, 0.245563, 0.773657, 0.138083, 0.575615, 0.800172, 0.647944, 0.674765, 0.543427, 0.312599, 0.200428, 0.892622, 0.485047, 0.972385, 0.496801, 0.319, 0.720034, 0.889781, 0.167518, 0.651482, 0.361188, 0.198271, 0.199293, 0.102301, 0.252575, 0.460878, 0.609484, 0.456292, 0.784005, 0.472145, 0.428725, 0.468657, 0.746991, 0.402354, 0.139418, 0.804557, 0.233456, 0.393089, 0.881374, 0.501119, 0.783106, 0.206664, 0.0644822, 0.597583, 0.64018, 0.560844, 0.032219, 0.528946, 0.0442685, 0.997045, 0.538214, 0.979589, 0.229041, 0.871115, 0.71063, 0.112246, 0.859703, 0.332685, 0.631384, 0.948647, 0.898164, 0.40927, 0.267202, 0.0917065, 0.779611, 0.204703, 0.370609, 0.412853, 0.472051, 0.104356, 0.590126, 0.0634925, 0.104977, 0.48445, 0.218552, 0.204078, 0.257033, 0.411859, 0.939311, 0.751081, 0.261188, 0.446775, 0.632727, 0.217953, 0.376171, 0.875016, 0.381464, 0.490141, 0.220281, 0.0472918, 0.367374, 0.22742, 0.515624, 0.190072, 0.601286, 0.297674, 0.096679, 0.648618, 0.579397, 0.295318, 0.440695, 0.618294, 0.301135, 0.893181, 0.160605, 0.705197, 0.684114, 0.231152, 0.141517, 0.604802, 0.213413, 0.390435, 0.0895624, 0.807746, 0.77274, 0.397655, 0.936012, 0.247536, 0.200595, 0.743014, 0.105074, 0.203182, 0.732056, 0.283836, 0.399282, 0.936678, 0.593811, 0.849943, 0.312948, 0.135168, 0.764301, 0.816494, 0.720607, 0.03582, 0.23626, 0.224345, 0.849473, 0.694959, 0.000914197, 0.489516, 0.89354, 0.164496, 0.871999, 0.383981, 0.728333, 0.516825, 0.604732, 0.998215, 0.22903, 0.370376, 0.888036, 0.734281, 0.603638, 0.0482923, 0.481127, 0.263689, 0.665475, 0.0702011, 0.592302, 0.16822, 0.34008, 0.302855, 0.86014, 0.170547, 0.361272, 0.34135, 0.509187, 0.841397, 0.150502, 0.872685, 0.24873, 0.756792, 0.0825902, 0.153533, 0.361082, 0.570998, 0.933709, 0.3299, 0.107717, 0.677329, 0.349624, 0.575538, 0.804646, 0.908089, 0.127185, 0.2456, 0.118156, 0.375953, 0.41017, 0.575039, 0.623639, 0.101804, 0.0558454, 0.125484, 0.607622, 0.203948, 0.418327, 0.405499, 0.507386, 0.737731, 0.894946, 0.667289, 0.147949, 0.81825, 0.942886, 0.108505, 0.524192, 0.00685106, 0.652932, 0.867348, 0.447596, 0.803778, 0.714046, 0.801762, 0.414293, 0.727759, 0.00605316, 0.515224, 0.29596, 0.549072, 0.473334, 0.211385, 0.647646, 0.252182, 0.488966, 0.0630215, 0.56367, 0.988093, 0.66257, 0.908756, 0.363363, 0.852328, 0.310326, 0.535536, 0.0170825, 0.729327, 0.116874, 0.838879, 0.870218, 0.691357, 0.193253, 0.957675, 0.742277, 0.257731, 0.0763241, 0.553045, 0.799275, 0.768486, 0.845476, 0.271778, 0.330848, 0.878108, 0.861101, 0.230572, 0.365767, 0.364425, 0.0460902, 0.564315, 0.329199, 0.159647, 0.889362, 0.665056, 0.360699, 0.234941, 0.0333162, 0.743062, 0.465344, 0.425126, 0.0448667, 0.487174, 0.521977, 0.0537651, 0.806049, 0.279968, 0.39395, 0.645693, 0.403757, 0.590454, 0.00837271, 0.784342, 0.70525, 0.839753, 0.0341006, 0.12629, 0.0069515, 0.0778617, 0.28737, 0.381177, 0.70132, 0.764585, 0.17237, 0.0529595, 0.462094, 0.023213, 0.558201, 0.280587, 0.657852, 0.022757, 0.314341, 0.153706, 0.162868, 0.641745, 0.598052, 0.319029, 0.826756, 0.0320386, 0.38043, 0.641555, 0.235267, 0.725618, 0.42515, 0.323938, 0.347753, 0.476153, 0.257355, 0.886651, 0.154094, 0.224312, 0.920766, 0.492778, 0.385719, 0.759328, 0.977097, 0.559022, 0.884808, 0.699165, 0.481631, 0.515907, 0.501303, 0.0446112, 0.391572, 0.773959, 0.777541, 0.903734, 0.913829, 0.370903, 0.368487, 0.553682, 0.761292, 0.614349, 0.957474, 0.395824, 0.822164, 0.98039, 0.750599, 0.356927, 0.762398, 0.355322, 0.840202, 0.366736, 0.402031, 0.676999, 0.194498, 0.173499, 0.846403, 0.274069, 0.575318, 0.421541, 0.82154, 0.0089027, 0.493083, 0.370883, 0.680493, 0.995773, 0.148005, 0.763908, 0.345673, 0.930257, 0.0182135, 0.517633, 0.283785, 0.731326, 0.543859, 0.693829, 0.762018, 0.945229, 0.0558828, 0.222589, 0.978122, 0.390255, 0.865985, 0.974409, 0.792804, 0.0940792, 0.376653, 0.922032, 0.153586, 0.192909, 0.173532, 0.716163, 0.130845, 0.645625, 0.0888867, 0.536087, 0.97714, 0.89536, 0.67602, 0.0953925, 0.686974, 0.247298, 0.151452, 0.332873, 0.0981902, 0.988683, 0.412657, 0.0851397, 0.0318406, 0.510257, 0.668259, 0.517164, 0.0391547, 0.806276, 0.797526, 0.427589, 0.117561, 0.538082, 0.674851, 0.0979511, 0.0507985, 0.714029, 0.911853, 0.964538, 0.375582, 0.522158, 0.218965, 0.968945, 0.0646577, 0.559528, 0.101097, 0.327255, 0.695524, 0.261179, 0.069643, 0.359392, 0.14242, 0.983725, 0.659602, 0.611082, 0.0832276, 0.805646, 0.244534, 0.917801, 0.613117, 0.0805238, 0.580163, 0.65044, 0.389736, 0.372102, 0.0151678, 0.125226, 0.232792, 0.817115, 0.308677, 0.550586, 0.569279, 0.715612, 0.427575, 0.0844825, 0.148163, 0.752786, 0.863253, 0.803135, 0.383866, 0.366217, 0.946739, 0.647277, 0.0860237, 0.601253, 0.651199, 0.76133, 0.213472, 0.0704136, 0.345536, 0.182688, 0.819225, 0.349157, 0.266612, 0.754589, 0.65234, 0.519933, 0.951381, 0.587869, 0.447968, 0.0936515, 0.195183, 0.581064, 0.417971, 0.221355, 0.639969, 0.524327, 0.717288, 0.877326, 0.828831, 0.466257, 0.31155, 0.783436, 0.327745, 0.615294, 0.306666, 0.361565, 0.961635, 0.869163, 0.242875, 0.365518, 0.394293, 0.0124096, 0.870355, 0.505084, 0.457293, 0.73447, 0.521223, 0.738282, 0.887272, 0.904392, 0.222565, 0.270792, 0.553299, 0.803908, 0.0835112, 0.50363, 0.389321, 0.80033, 0.999442, 0.739336, 0.253486, 0.423819, 0.191611, 0.208143, 0.784414, 0.541643, 0.675212, 0.632842, 0.960044, 0.635753, 0.865634, 0.358805, 0.95182, 0.861882, 0.107108, 0.645361, 0.852852, 0.512897, 0.0423952, 0.968991, 0.101197, 0.357598, 0.983648, 0.332065, 0.549322, 0.945327, 0.954272, 0.765911, 0.211604, 0.132759, 0.789122, 0.283424, 0.848477, 0.152383, 0.238871, 0.993014, 0.804065, 0.364784, 0.634562, 0.700287, 0.266394, 0.463335, 0.393761, 0.905476, 0.264196, 0.398405, 0.174744, 0.476953, 0.576694, 0.0524362, 0.240431, 0.495561, 0.155262, 0.527025, 0.886564, 0.0654902, 0.150847, 0.427092, 0.903602, 0.171493, 0.636779, 0.514837, 0.306811, 0.28194, 0.255086, 0.326178, 0.276933, 0.376174, 0.979233, 0.826831, 0.118635, 0.910199, 0.566246, 0.234089, 0.0188039, 0.922586, 0.842611, 0.38541, 0.981104, 0.108803, 0.545946, 0.469737, 0.577147, 0.651099, 0.553758, 0.794908, 0.634391, 0.361312, 0.819246, 0.810993, 0.366507, 0.698215, 0.553286, 0.546802, 0.553614, 0.183637, 0.119029, 0.633808, 0.200805, 0.0372143, 0.97548, 0.966938, 0.785146, 0.917965, 0.42305, 0.795677, 0.31634, 0.597346, 0.550683, 0.21344, 0.416174, 0.578587, 0.165249, 0.481249, 0.38813, 0.320805, 0.940898, 0.595799, 0.667156, 0.524314, 0.574669, 0.555044, 0.338005, 0.485876, 0.88912, 0.982312, 0.164608, 0.215708, 0.0675536, 0.0081713, 0.0552409, 0.87401, 0.300918, 0.988349, 0.827005, 0.186376, 0.157681, 0.00302455, 0.169866, 0.471027, 0.558492, 0.655853, 0.912541, 0.0740924, 0.334979, 0.733308, 0.467699, 0.584625, 0.587347, 0.232699, 0.933024, 0.682288, 0.612663, 0.412829, 0.665977, 0.207301, 0.490511, 0.607697, 0.774543, 0.774809, 0.950977, 0.756992, 0.269366, 0.477028, 0.681323, 0.651992, 0.0885744, 0.748902, 0.516897, 0.451794, 0.836757, 0.122795, 0.361501, 0.153291, 0.933768, 0.740094, 0.542968, 0.610731, 0.0368064, 0.803068, 0.00482385, 0.734192, 0.644232, 0.402771, 0.791875, 0.890462, 0.44001, 0.0830733, 0.869611, 0.964678, 0.477981, 0.945277, 0.370107, 0.652578, 0.293131, 0.04376, 0.864256, 0.623258, 0.48307, 0.164201, 0.943638, 0.441348, 0.926081, 0.0267296, 0.472526, 0.854453, 0.0891213, 0.475857, 0.871879, 0.509499, 0.280201, 0.939867, 0.260134, 0.0308078, 0.661491, 0.956955, 0.156406, 0.716674, 0.683081, 0.590255, 0.675594, 0.674348, 0.67377, 0.638153, 0.160912, 0.150911, 0.702488, 0.4165, 0.764524, 0.690996, 0.190479, 0.791328, 0.239311, 0.207419, 0.848268, 0.204797, 0.7104, 0.842953, 0.896385, 0.779759, 0.705262, 0.0478738, 0.55702, 0.478191, 0.606844, 0.118996, 0.0225894, 0.54085, 0.343021, 0.218631, 0.397936, 0.406045, 0.714357, 0.74911, 0.828513, 0.568655, 0.680846, 0.401819, 0.0608094, 0.214434, 0.542424, 0.472403, 0.821633, 0.45271, 0.731169, 0.76499, 0.509922, 0.565094, 0.176391, 0.651348, 0.307081, 0.292411, 0.0471905, 0.534923, 0.706235, 0.620766, 0.808308, 0.962332, 0.251427, 0.670097, 0.185047, 0.77783, 0.545547, 0.214405, 0.523846, 0.28188, 0.805017, 0.712864, 0.956599, 0.362843, 0.470433, 0.772978, 0.858986, 0.143639, 0.891464, 0.513285, 0.482104, 0.132897, 0.719146, 0.921183, 0.0655506, 0.00946056, 0.198652, 0.173242, 0.730765, 0.501729, 0.207614, 0.217488, 0.122694, 0.479763, 0.390774, 0.250547, 0.653028, 0.806164, 0.239921, 0.898338, 0.305939, 0.191451, 0.648294, 0.15154, 0.801401, 0.301872, 0.745287, 0.226763, 0.331783, 0.73963, 0.94339, 0.539125, 0.344924, 0.402575, 0.509457, 0.292524, 0.105412, 0.448621, 0.741957, 0.410952, 0.356394, 0.889006, 0.403332, 0.678526, 0.833845, 0.812577, 0.985769, 0.036368, 0.29008, 0.570656, 0.557277, 0.0717374, 0.434824, 0.339859, 0.58858, 0.975325, 0.919597, 0.457951, 0.620135, 0.384427, 0.228209, 0.31669, 0.667723, 0.994115, 0.51765, 0.563327, 0.589836, 0.210515, 0.346323, 0.493738, 0.301921, 0.664338, 0.87641, 0.460502, 0.396923, 0.966443, 0.494459, 0.971593, 0.262549, 0.0694564, 0.172827, 0.382514, 0.968542, 0.274666, 0.00331836, 0.250202, 0.00344558, 0.0274703, 0.00688962, 0.215951, 0.461463, 0.513889, 0.270262, 0.786878, 0.751636, 0.935399, 0.20562, 0.348898, 0.894775, 0.783169, 0.934923, 0.399237, 0.460612, 0.511877, 0.331075, 0.32564, 0.878951, 0.747974, 0.804088, 0.69554, 0.678725, 0.100393, 0.907764, 0.30675, 0.419933, 0.549802, 0.514653, 0.913517, 0.316668, 0.52095, 0.267707, 0.52142, 0.421244, 0.877753, 0.219721, 0.96638, 0.390328, 0.514977, 0.156551, 0.683053, 0.313855, 0.0468885, 0.145941, 0.286755, 0.953429, 0.945449, 0.30043, 0.141133, 0.154855, 0.518679, 0.493933, 0.674391, 0.344393, 0.764361, 0.564702, 0.161935, 0.800611, 0.0110539, 0.570565, 0.51828, 0.890983, 0.305598, 0.704893, 0.752696, 0.705727, 0.769524, 0.32399, 0.364751, 0.623441, 0.620293, 0.116416, 0.788385, 0.636862, 0.189527, 0.63281, 0.152059, 0.575479, 0.761368, 0.290289, 0.0808718, 0.499316, 7.71683e-05, 0.4727, 0.317388, 0.860308, 0.864656, 0.448016, 0.56233, 0.411283, 0.652817, 0.826704, 0.59099, 0.669423, 0.212474, 0.0471869, 0.920933, 0.312141, 0.181511, 0.814098, 0.831468, 0.167068, 0.636611, 0.104285, 0.703134, 0.623731, 0.0243967, 0.839699, 0.342128, 0.424502, 0.623919, 0.764014, 0.644001, 0.995352, 0.297929, 0.821413, 0.241151, 0.0656122, 0.47265, 0.86186, 0.661683, 0.129377, 0.419626, 0.617842, 0.3183, 0.385351, 0.188848, 0.322851, 0.865621, 0.479505, 0.152292, 0.408311, 0.772338, 0.499745, 0.80009, 0.962285, 0.582576, 0.970549, 0.639813, 0.571203, 0.759291, 0.0960695, 0.53903, 0.227856, 0.763825, 0.0383792, 0.644669, 0.587712, 0.102593, 0.924982, 0.25159, 0.250572, 0.0802209, 0.383922, 0.106164, 0.56256, 0.173975, 0.207292, 0.410468, 0.953122, 0.836021, 0.637233, 0.157448, 0.711174, 0.279807, 0.530596, 0.901919, 0.827159, 0.492436, 0.835496, 0.507331, 0.196427, 0.420931, 0.845796, 0.762819, 0.0662774, 0.871291, 0.972134, 0.375973, 0.410125, 0.0517371, 0.612242, 0.107208, 0.566992, 0.970326, 0.677435, 0.930902, 0.235176, 0.192749, 0.923456, 0.886289, 0.027513, 0.645326, 0.771353, 0.963128, 0.99619, 0.0659412, 0.428019, 0.980823, 0.647443, 0.916983, 0.226263, 0.412835, 0.491762, 0.40497, 0.156026, 0.855684, 0.882071, 0.926891, 0.76975, 0.481481, 0.0161217, 0.00522831, 0.319988, 0.155074, 0.412051, 0.0933146, 0.0886088, 0.895277, 0.582174, 0.448018, 0.409235, 0.0669516, 0.132087, 0.666296, 0.00866833, 0.155096, 0.374669, 0.648701, 0.375146, 0.152913, 0.109636, 0.154415, 0.614108, 0.0916618, 0.909499, 0.579876, 0.0395756, 0.213966, 0.356406, 0.984556, 0.373228, 0.507934, 0.932854, 0.360523, 0.232493, 0.57162, 0.0595727, 0.658732, 0.247998, 0.366988, 0.504186, 0.992903, 0.223777, 0.597548, 0.75979, 0.687973, 0.162974, 0.679501, 0.155538, 0.283408, 0.673023, 0.107771, 0.376834, 0.864856, 0.52621, 0.321577, 0.175433, 0.00619351, 0.275832, 0.0907132, 0.0718382, 0.674084, 0.442669, 0.790168, 0.526573, 0.855596, 0.88713, 0.117057, 0.227912, 0.951747, 0.789088, 0.695195, 0.00615152, 0.961394, 0.0945758, 0.935776, 0.101446, 0.887727, 0.989453, 0.0320962, 0.32615, 0.47812, 0.69467, 0.303993, 0.882058, 0.000734467, 0.366207, 0.544315, 0.480295, 0.147126, 0.789424, 0.527295, 0.588564, 0.574151, 0.526338, 0.209439, 0.487335, 0.801044, 0.985004, 0.376859, 0.167142, 0.688227, 0.235405, 0.813848, 0.701136, 0.645054, 0.661662, 0.437714, 0.486964, 0.740157, 0.529264, 0.937413, 0.510578, 0.022128, 0.036041, 0.0699558, 0.117385, 0.316326, 0.885856, 0.0403546, 0.231338, 0.635229, 0.241592, 0.250467, 0.0945615, 0.800571, 0.0840602, 0.315926, 0.128503, 0.813934, 0.0258813, 0.17596, 0.332589, 0.669219, 0.705743, 0.671, 0.261769, 0.381551, 0.239807, 0.88429, 0.219624, 0.083958, 0.00626468, 0.818293, 0.606267, 0.40237, 0.417485, 0.591737, 0.527343, 0.479031, 0.801822, 0.658967, 0.716588, 0.744282, 0.155606, 0.469337, 0.766239, 0.290825, 0.558359, 0.71311, 0.285717, 0.749339, 0.627337, 0.191896, 0.271965, 0.566574, 0.400855, 0.0837091, 0.21823, 0.496795, 0.334895, 0.135946, 0.572812, 0.836966, 0.893186, 0.962159, 0.942248, 0.836315, 0.771091, 0.598405, 0.392909, 0.390642, 0.367415, 0.108506, 0.509422, 0.54232, 0.237082, 0.978606, 0.872314, 0.633089, 0.238001, 0.460591, 0.201899, 0.184534, 0.325397, 0.934418, 0.963759, 0.786507, 0.0375785, 0.647151, 0.829225, 0.742572, 0.135389, 0.542884, 0.855696, 0.97466, 0.215872, 0.0772942, 0.130509, 0.229885, 0.310712, 0.964669, 0.532942, 0.721281, 0.335187, 0.626803, 0.887096, 0.2246, 0.137863, 0.363398, 0.201565, 0.344398, 0.664164, 0.579189, 0.576948, 0.837698, 0.878516, 0.376328, 0.55172, 0.634133, 0.400293, 0.0209109, 0.20232, 0.102912, 0.366968, 0.563481, 0.282843, 0.398847, 0.843752, 0.294638, 0.154387, 0.671821, 0.058539, 0.404984, 0.885257, 0.518336, 0.8205, 0.537827, 0.95051, 0.381612, 0.404012, 0.964825, 0.380396, 0.274525, 0.222361, 0.494103, 0.532174, 0.367596, 0.0728736, 0.269724, 0.292736, 0.30423, 0.361644, 0.322779, 0.46089, 0.346687, 0.722558, 0.576871, 0.355958, 0.995846, 0.0720617, 0.859159, 0.0573213, 0.00642774, 0.815126, 0.432851, 0.528463, 0.632068, 0.386987, 0.979928, 0.17268, 0.356581, 0.361901, 0.76519, 0.767691, 0.237448, 0.92085, 0.970494, 0.0405644, 0.488781, 0.129028, 0.805264, 0.461414, 0.774707, 0.772119, 0.883158, 0.193649, 0.725189, 0.946668, 0.317352, 0.143978, 0.596241, 0.266404, 0.61899, 0.130789, 0.117894, 0.140445, 0.828193, 0.350748, 0.168147, 0.73843, 0.914863, 0.210849, 0.300128, 0.80821, 0.652808, 0.194055, 0.724624, 0.465249, 0.659502, 0.933436, 0.319047, 0.944818, 0.998419, 0.23791, 0.700198, 0.384879, 0.793092, 0.697484, 0.236586, 0.190138, 0.189159, 0.181015, 0.37148, 0.333317, 0.270334, 0.564374, 0.618572, 0.781573, 0.141661, 0.418931, 0.31021, 0.656223, 0.100783, 0.411129, 0.720769, 0.621313, 0.679977, 0.881772, 0.589841, 0.408094, 0.365121, 0.628722, 0.603232, 0.818907, 0.14653, 0.921324, 0.72214, 0.113515, 0.112672, 0.41947, 0.111943, 0.748427, 0.135856, 0.438782, 0.858249, 0.871748, 0.108394, 0.255696, 0.485165, 0.764928, 0.642615, 0.527349, 0.656961, 0.827604, 0.623513, 0.0672744, 0.759552, 0.0518418, 0.383462, 0.752072, 0.0236901, 0.881091, 0.237981, 0.00071645, 0.0996164, 0.822663, 0.501758, 0.741016, 0.632086, 0.438797, 0.871133, 0.317806, 0.047918, 0.715757, 0.382381, 0.935342, 0.339043, 0.523275, 0.0327271, 0.320565, 0.0870344, 0.823622, 0.319834, 0.153559, 0.594371, 0.267636, 0.698517, 0.138399, 0.39011, 0.277741, 0.860284, 0.499121, 0.835385, 0.68415, 0.366272, 0.382257, 0.463301, 0.74469, 0.341145, 0.816966, 0.128728, 0.67425, 0.44642, 0.377272, 0.383248, 0.0781609, 0.809464, 0.911476, 0.323137, 0.76409, 0.963659, 0.794773, 0.191325, 0.935969, 0.357465, 0.548294, 0.58069, 0.415215, 0.72179, 0.76476, 0.926536, 0.402793, 0.618887, 0.446004, 0.0953802, 0.859194, 0.177043, 0.10478, 0.202788, 0.749946, 0.931449, 0.563113, 0.23868, 0.649197, 0.86184, 0.327508, 0.267626, 0.735183, 0.750009, 0.252342, 0.0876918, 0.234842, 0.153437, 0.618, 0.180229, 0.975278, 0.923142, 0.369849, 0.707614, 0.824766, 0.554974, 0.323158, 0.414107, 0.584251, 0.225541, 0.410231, 0.0637928, 0.798973, 0.107233, 0.870988, 0.438926, 0.307955, 0.595037, 0.728912, 0.352082, 0.397782, 0.318208, 0.0216747, 0.306465, 0.414503, 0.367382, 0.750917, 0.164633, 0.62087, 0.702756, 0.805683, 0.281092, 0.871067, 0.274652, 0.582594, 0.249105, 0.877142, 0.129128, 0.499272, 0.343304, 0.878821, 0.400978, 0.154943, 0.0106663, 0.156787, 0.0905567, 0.141933, 0.86586, 0.131593, 0.760632, 0.273636, 0.213491, 0.926497, 0.981464, 0.442809, 0.314826, 0.0164317, 0.410826, 0.482253, 0.0714787, 0.495202, 0.505363, 0.597986, 0.173456, 0.592068, 0.895854, 0.180029, 0.0414354, 0.424477, 0.695132, 0.557805, 0.760983, 0.890888, 0.566274, 0.349029, 0.662938, 0.427012, 0.284279, 0.579002, 0.0998554, 0.185824, 0.919968, 0.350051, 0.335549, 0.799062, 0.411838, 0.269206, 0.369434, 0.654228, 0.287728, 0.698396, 0.743933, 0.792599, 0.537847, 0.521517, 0.347953, 0.0222605, 0.871713, 0.130449, 0.0415191, 0.409467, 0.681558, 0.149537, 0.463528, 0.642759, 0.330412, 0.609467, 0.645689, 0.260059, 0.423172, 0.200007, 0.841315, 0.899878, 0.48096, 0.763262, 0.262757, 0.61073, 0.0382123, 0.974216, 0.571543, 0.911999, 0.0921221, 0.561062, 0.224255, 0.622645, 0.868477, 0.722134, 0.418045, 0.584071, 0.867199, 0.81787, 0.358168, 0.622796, 0.723772, 0.410458, 0.0113081, 0.257779, 0.377846, 0.413835, 0.573993, 0.565759, 0.90793, 0.880657, 0.392036, 0.244096, 0.039189, 0.655473, 0.740594, 0.419871, 0.703066, 0.668243, 0.0899058, 0.00525947, 0.787991, 0.344101, 0.0696129, 0.526693, 0.632484, 0.375728, 0.541594, 0.302108, 0.943538, 0.839067, 0.910706, 0.255279, 0.357752, 0.0497454, 0.48508, 0.863901, 0.951625, 0.109146, 0.992858, 0.870187, 0.385033, 0.677675, 0.931436, 0.941014, 0.00521233, 0.105928, 0.379234, 0.939234, 0.53624, 0.384615, 0.00650715, 0.808601, 0.662308, 0.735212, 0.0695151, 0.479795, 0.929365, 0.701006, 0.834088, 0.578986, 0.744037, 0.0661025, 0.973867, 0.261607, 0.985263, 0.253784, 0.544204, 0.395415, 0.811786, 0.944674, 0.753067, 0.189604, 0.972342, 0.245785, 0.147862, 0.151078, 0.57075, 0.635751, 0.0225594, 0.345569, 0.71366, 0.725875, 0.899222, 0.403696, 0.782293, 0.0489821, 0.887563, 0.250663, 0.0903273, 0.819318, 0.253849, 0.0132522, 0.82073, 0.139879, 0.655119, 0.255256, 0.469931, 0.593655, 0.0965594, 0.732102, 0.728242, 0.0409236, 0.251098, 0.401525, 0.0154955, 0.510258, 0.110728, 0.152886, 0.542219, 0.0361622, 0.682425, 0.511941, 0.298943, 0.813534, 0.388954, 0.997212, 0.693391, 0.854456, 0.967423, 0.0728081, 0.156642, 0.81487, 0.0938402, 0.870291, 0.00359174, 0.202676, 0.257248, 0.234333, 0.943676, 0.106374, 0.133571, 0.617775, 0.246666, 0.0606647, 0.482131, 0.969785, 0.507517, 0.668117, 0.275743, 0.902052, 0.531065, 0.79073, 0.494458, 0.887946, 0.186152, 0.428784, 0.891797, 0.382117, 0.129718, 0.281837, 0.629808, 0.759124, 0.747099, 0.197973, 0.334452, 0.628123, 0.442394, 0.991491, 0.722564, 0.62702, 0.125107, 0.868864, 0.123806, 0.84943, 0.109148, 0.583516, 0.267467, 0.0877627, 0.356361, 0.183282, 0.460304, 0.472661, 0.808155, 0.075511, 0.805119, 0.183075, 0.761517, 0.0534024, 0.991497, 0.317623, 0.714449, 0.764721, 0.218848, 0.324785, 0.154461, 0.222869, 0.0563985, 0.297541, 0.266297, 0.542826, 0.215959, 0.640029, 0.988854, 0.128625, 0.314472, 0.439426, 0.000551791, 0.912863, 0.531367, 0.679102, 0.601362, 0.105811, 0.314219, 0.618917, 0.764213, 0.80435, 0.297977, 0.671961, 0.471981, 0.241108, 0.44709, 0.0413762, 0.34009, 0.78462, 0.57256, 0.956279, 0.143586, 0.51728, 0.199193, 0.258778, 0.759231, 0.491842, 0.74638, 0.447776, 0.67335, 0.207011, 0.381928, 0.864476, 0.533022, 0.330994, 0.124275, 0.609111, 0.597315, 0.620957, 0.607921, 0.366002, 0.726719, 0.267236, 0.29924, 0.772097, 0.515079, 0.666223, 0.87179, 0.442252, 0.151785, 0.90383, 0.23036, 0.754115, 0.116684, 0.814789, 0.68958, 0.90636, 0.172124, 0.7386, 0.155248, 0.0162329, 0.624234, 0.258763, 0.948516, 0.381014, 0.810977, 0.359154, 0.235406, 0.356012, 0.0572227, 0.804645, 0.537197, 0.0874575, 0.896756, 0.136418, 0.636846, 0.634131, 0.314875, 0.548805, 0.846664, 0.01975, 0.0153124, 0.100952, 0.505608, 0.441595, 0.195817, 0.0710239, 0.418799, 0.25282, 0.502326, 0.206681, 0.22771, 0.674104, 0.517355, 0.979707, 0.987148, 0.0530045, 0.20894, 0.768026, 0.00615492, 0.125182, 0.131701, 0.33811, 0.035456, 0.409955, 0.0982514, 0.512931, 0.639528, 0.757367, 0.180887, 0.0871219, 0.693054, 0.033327, 0.432908, 0.302984, 0.306617, 0.90297, 0.231033, 0.932071, 0.621631, 0.929752, 0.0404275, 0.227181, 0.861182, 0.480741, 0.422462, 0.615124, 0.915947, 0.688272, 0.607257, 0.1993, 0.186042, 0.61691, 0.891939, 0.496047, 0.781836, 0.199635, 0.182435, 0.612945, 0.894777, 0.879591, 0.660906, 0.685511, 0.788352, 0.958656, 0.150172, 0.0447784, 0.273053, 0.216175, 0.674368, 0.93646, 0.140018, 0.47613, 0.698821, 0.453178, 0.888401, 0.933526, 0.615275, 0.000670214, 0.959936, 0.496414, 0.213354, 0.210349, 0.839183, 0.51017, 0.556663, 0.925326, 0.329077, 0.184683, 0.193059, 0.656384, 0.325528, 0.632767, 0.126699, 0.925868, 0.509177, 0.17664, 0.356426, 0.678694, 0.236191, 0.33291, 0.977011, 0.476779, 0.718923, 0.919251, 0.929422, 0.29569, 0.517599, 0.615562, 0.240366, 0.76536, 0.261594, 0.063962, 0.0635975, 0.7285, 0.0742968, 0.332238, 0.812666, 0.695448, 0.45352, 0.573693, 0.913696, 0.859744, 0.56925, 0.212218, 0.455896, 0.61064, 0.844059, 0.128933, 0.716734, 0.348449, 0.456306, 0.910307, 0.820405, 0.452022, 0.510913, 0.258321, 0.742017, 0.731542, 0.0235238, 0.040882, 0.476367, 0.656551, 0.624292, 0.204659, 0.111937, 0.615081, 0.82015, 0.275911, 0.498798, 0.461723, 0.657982, 0.853992, 0.558108, 0.906463, 0.924759, 0.619716, 0.95225, 0.589059, 0.0982191, 0.751691, 0.211192, 0.671573, 0.260589, 0.588264, 0.0346792, 0.0482873, 0.510086, 0.349504, 0.660904, 0.826408, 0.0655397, 0.580624, 0.3739, 0.163301, 0.545839, 0.625426, 0.200927, 0.273938, 0.5373, 0.403679, 0.456807, 0.239441, 0.996926, 0.102652, 0.68999, 0.475656, 0.741837, 0.401398, 0.812666, 0.546468, 0.189733, 0.342652, 0.499778, 0.717045, 0.305709, 0.550696, 0.53988, 0.100314, 0.643375, 0.474279, 0.656759, 0.466198, 0.662595, 0.633828, 0.758396, 0.761698, 0.541539, 0.489411, 0.618273, 0.434244, 0.742823, 0.606137, 0.7101, 0.954731, 0.948781, 0.7361, 0.587935, 0.486731, 0.647607, 0.75671, 0.702835, 0.711711, 0.586863, 0.848073, 0.300948, 0.148951, 0.816649, 0.988274, 0.0570969, 0.1096, 0.337949, 0.976047, 0.719306, 0.483844, 0.0940332, 0.196285, 0.747962, 0.379384, 0.106179, 0.799456, 0.174423, 0.406282, 0.246921, 0.256564, 0.828962, 0.698113, 0.0399356, 0.765064, 0.0191448, 0.386214, 0.658786, 0.652422, 0.337354, 0.288205, 0.891075, 0.158086, 0.906013, 0.018156, 0.714958, 0.89861, 0.234953, 0.527373, 0.582787, 0.568185, 0.959503, 0.636878, 0.527452, 0.121195, 0.107195, 0.441484, 0.418231, 0.753505, 0.727609, 0.0541921, 0.485776, 0.503656, 0.433017, 0.29717, 0.392388, 0.301751, 0.315352, 0.874547, 0.575243, 0.246232, 0.825157, 0.464173, 0.680217, 0.390777, 0.822336, 0.212753, 0.787785, 0.510527, 0.467947, 0.241204, 0.521058, 0.0909303, 0.200768, 0.394032, 0.0165349, 0.805721, 0.72412, 0.770236, 0.0582422, 0.971895, 0.792536, 0.542421, 0.78991, 0.590295, 0.747239, 0.99464, 0.200602, 0.241855, 0.598453, 0.493848, 0.27987, 0.30733, 0.127177, 0.254606, 0.494435, 0.0841555, 0.576549, 0.145725, 0.0546114, 0.0362378, 0.957958, 0.288541, 0.0565653, 0.145986, 0.36743, 0.192862, 0.290345, 0.537247, 0.746676, 0.681115, 0.996155, 0.234617, 0.369617, 0.121955, 0.580465, 0.307617, 0.136119, 0.104037, 0.865525, 0.124501, 0.532013, 0.64481, 0.739872, 0.632477, 0.814801, 0.93168, 0.38521, 0.908957, 0.807734, 0.321802, 0.896562, 0.236808, 0.6599, 0.498504, 0.347144, 0.180491, 0.101337, 0.191679, 0.855543, 0.612942, 0.407704, 0.425052, 0.272515, 0.550212, 0.567009, 0.108733, 0.146584, 0.208679, 0.234469, 0.465162, 0.624395, 0.774398, 0.218611, 0.409766, 0.0898917, 0.161216, 0.148789, 0.183195, 0.0843108, 0.869797, 0.224993, 0.499894, 0.862288, 0.241025, 0.752014, 0.36165, 0.654792, 0.923862, 0.0966728, 0.789465, 0.159298, 0.740185, 0.77383, 0.991635, 0.713953, 0.367305, 0.040462, 0.623321, 0.927709, 0.756282, 0.606602, 0.113327, 0.413703, 0.0914552, 0.420561, 0.748681, 0.306706, 0.72615, 0.695731, 0.548968, 0.573537, 0.979815, 0.626213, 0.305221, 0.158659, 0.633565, 0.170755, 0.485148, 0.268398, 0.507022, 0.906592, 0.809645, 0.627242, 0.999936, 0.0216729, 0.980077, 0.52859, 0.0123064, 0.00232397, 0.972017, 0.382635, 0.121302, 0.493779, 0.567817, 0.911133, 0.0871138, 0.599627, 0.115265, 0.450287, 0.780666, 0.865141, 0.307955, 0.735813, 0.567671, 0.591114, 0.941282, 0.83531, 0.67837, 0.130881, 0.0951256, 0.502334, 0.261934, 0.857612, 0.0353245, 0.708687, 0.316288, 0.0805712, 0.457426, 0.680125, 0.0843126, 0.0490645, 0.231759, 0.578554, 0.330789, 0.8628, 0.824783, 0.166781, 0.862871, 0.81565, 0.505021, 0.885875, 0.460335, 0.865464, 0.447298, 0.876686, 0.0830998, 0.15168, 0.627932, 0.182174, 0.753408, 0.836232, 0.672615, 0.656261, 0.302819, 0.740058, 0.689478, 0.279968, 0.635182, 0.117409, 0.00626437, 0.456218, 0.807713, 0.251191, 0.820771, 0.77408, 0.174188, 0.711287, 0.754445, 0.273267, 0.767036, 0.730595, 0.522546, 0.88703, 0.361198, 0.0353319, 0.877134, 0.988986, 0.204904, 0.992377, 0.939382, 0.341524, 0.396681, 0.272108, 0.239635, 0.662509, 0.22789, 0.0619506, 0.616147, 0.938789, 0.775433, 0.990873, 0.339021, 0.779552, 0.117211, 0.712956, 0.717854, 0.131823, 0.595103, 0.0292718, 0.292804, 0.193673, 0.933603, 0.593664, 0.466499, 0.128053, 0.663693, 0.42322, 0.382425, 0.570339, 0.776564, 0.943218, 0.093447, 0.0682411, 0.69909, 0.443366, 0.317931, 0.84869, 0.183775, 0.312399, 0.945411, 0.463951, 0.432878, 0.145746, 0.188364, 0.188764, 0.457817, 0.572288, 0.537736, 0.738041, 0.459331, 0.64282, 0.0624071, 0.474108, 0.182366, 0.922296, 0.839323, 0.74715, 0.778098, 0.573252, 0.0640188, 0.944897, 0.64589, 0.999235, 0.590727, 0.337527, 0.59781, 0.0359797, 0.955797, 0.77844, 0.662479, 0.513896, 0.0163096, 0.747998, 0.0360402, 0.705696, 0.973208, 0.845952, 0.709785, 0.67494, 0.805323, 0.0969568, 0.325202, 0.0714327, 0.791022, 0.757233, 0.0614314, 0.740612, 0.605001, 0.413097, 0.954425, 0.600159, 0.873595, 0.0655813, 0.384691, 0.251813, 0.745998, 0.533274, 0.443477, 0.854914, 0.147696, 0.70125, 0.872174, 0.765999, 0.894347, 0.879889, 0.660996, 0.969646, 0.304701, 0.445453, 0.659715, 0.381944, 0.520705, 0.714079, 0.836213, 0.832662, 0.754242, 0.30226, 0.829836, 0.370085, 0.459174, 0.0184747, 0.338752, 0.174265, 0.0608699, 0.538762, 0.819461, 0.129266, 0.145965, 0.487517, 0.780537, 0.412783, 0.679595, 0.43848, 0.378472, 0.0195386, 0.773959, 0.261836, 0.370295, 0.33285, 0.803101, 0.536888, 0.787662, 0.209807, 0.592566, 0.648303, 0.390306, 0.103752, 0.986843, 0.929569, 0.71857, 0.957646, 0.910195, 0.0648032, 0.319069, 0.680391, 0.826063, 0.0757938, 0.561298, 0.857515, 0.794047, 0.899224, 0.712926, 0.401044, 0.117418, 0.600851, 0.190359, 0.249319, 0.20464, 0.662339, 0.52592, 0.0274111, 0.836938, 0.239578, 0.651635, 0.0611872, 0.0945418, 0.528391, 0.283961, 0.364771, 0.848008, 0.485995, 0.321334, 0.515475, 0.928903, 0.570365, 0.600579, 0.434511, 0.489399, 0.52756, 0.210483, 0.967534, 0.578201, 0.859337, 0.455444, 0.601162, 0.968564, 0.464987, 0.294394, 0.950558, 0.859097, 0.353452, 0.508704, 0.0572695, 0.878435, 0.497444, 0.712181, 0.603459, 0.294603, 0.0183161, 0.742282, 0.134371, 0.838665, 0.244169, 0.215071, 0.219618, 0.868365, 0.223855, 0.10396, 0.640758, 0.360926, 0.600482, 0.428329, 0.6458, 0.884642, 0.821694, 0.564888, 0.354406, 0.50628, 0.56707, 0.48061, 0.256565, 0.162061, 0.264642, 0.176, 0.797713, 0.695703, 0.0347149, 0.358338, 0.738206, 0.996526, 0.866617, 0.702375, 0.769468, 0.778317, 0.958645, 0.705213, 0.24094, 0.613293, 0.064266, 0.355869, 0.28731, 0.755503, 0.972533, 0.236803, 0.631452, 0.337084, 0.569386, 0.597535, 0.86532, 0.107403, 0.729218, 0.929612, 0.316315, 0.374365, 0.419137, 0.0346695, 0.22599, 0.765266, 0.762818, 0.421948, 0.776579, 0.048408, 0.82698, 0.0443337, 0.110848, 0.945854, 0.622762, 0.824669, 0.452839, 0.13288, 0.0346938, 0.00553679, 0.465445, 0.348694, 0.457617, 0.214636, 0.684004, 0.65387, 0.890835, 0.103985, 0.783019, 0.618773, 0.350593, 0.0775283, 0.444621, 0.218794, 0.241285, 0.958123, 0.489579, 0.432822, 0.568368, 0.913775, 0.943068, 0.627533, 0.603186, 0.540667, 0.744381, 0.875275, 0.844755, 0.0174166, 0.464942, 0.317896, 0.0332355, 0.458499, 0.205926, 0.903786, 0.750243, 0.567707, 0.98369, 0.371693, 0.669664, 0.531643, 0.180359, 0.361585, 0.00335906, 0.318602, 0.195698, 0.369699, 0.990901, 0.677709, 0.407581, 0.493613, 0.344604, 0.676065, 0.906602, 0.638278, 0.994109, 0.740991, 0.131499, 0.929675, 0.221182, 0.13649, 0.332655, 0.340143, 0.721501, 0.769877, 0.236854, 0.480517, 0.726663, 0.351297, 0.926842, 0.083694, 0.494518, 0.155554, 0.450391, 0.982103, 0.996747, 0.174879, 0.256117, 0.139142, 0.214806, 0.158685, 0.586095, 0.338354, 0.614175, 0.841028, 0.292725, 0.466278, 0.0118071, 0.42022, 0.359505, 0.181877, 0.96953, 0.00445722, 0.348085, 0.82084, 0.700947, 0.375728, 0.960564, 0.111186, 0.540983, 0.0949823, 0.171617, 0.212728, 0.16643, 0.196016, 0.587307, 0.275467, 0.522472, 0.771585, 0.949014, 0.143102, 0.695324, 0.161092, 0.927273, 0.191568, 0.942837, 0.993984, 0.625371, 0.679651, 0.228692, 0.0646917, 0.510172, 0.240227, 0.32828, 0.693107, 0.433269, 0.605574, 0.904861, 0.0463241, 0.834283, 0.688484, 0.656301, 0.58022, 0.4408, 0.103781, 0.630454, 0.322349, 0.896852, 0.449489, 0.0570403, 0.617251, 0.883792, 0.179986, 0.707624, 0.77835, 0.243734, 0.561266, 0.586223, 0.438363, 0.953877, 0.743421, 0.733121, 0.216126, 0.90829, 0.516516, 0.692057, 0.951343, 0.0675496, 0.316683, 0.244522, 0.792675, 0.376051, 0.0143059, 0.643436, 0.298803, 0.0576619, 0.380479, 0.746218, 0.287584, 0.187898, 0.920636, 0.194111, 0.34229, 0.790386, 0.14599, 0.556051, 0.705318, 0.656441, 0.496029, 0.542332, 0.1645, 0.492319, 0.954601, 0.177927, 0.276164, 0.22762, 0.32493, 0.91123, 0.896803, 0.654353, 0.157141, 0.224378, 0.91494, 0.162872, 0.678897, 0.993529, 0.637636, 0.694994, 0.0509588, 0.836324, 0.695569, 0.44761, 0.955311, 0.582084, 0.958026, 0.712391, 0.00320049, 0.538606, 0.332169, 0.857005, 0.414114, 0.96268, 0.274303, 0.415184, 0.523528, 0.721638, 0.599046, 0.242755, 0.615222, 0.843255, 0.470847, 0.920254, 0.316538, 0.274159, 0.8287, 0.274861, 0.824354, 0.437267, 0.277049, 0.332389, 0.190334, 0.331419, 0.109626, 0.502458, 0.182766, 0.297817, 0.732984, 0.640832, 0.555837, 0.914909, 0.468271, 0.467219, 0.40506, 0.520978, 0.656365, 0.11869, 0.456818, 0.559413, 0.144938, 0.74336, 0.584908, 0.537623, 0.674104, 0.943294, 0.659128, 0.237188, 0.146685, 0.53129, 0.0179168, 0.356908, 0.732138, 0.702265, 0.349355, 0.763519, 0.617723, 0.523395, 0.965883, 0.487675, 0.427342, 0.373506, 0.453232, 0.0394451, 0.369668, 0.00914111, 0.881545, 0.0724042, 0.927582, 0.324555, 0.0941736, 0.47234, 0.756342, 0.752158, 0.449889, 0.117064, 0.114565, 0.0045895, 0.327032, 0.387492, 0.425723, 0.862187, 0.222687, 0.766443, 0.367362, 0.690474, 0.178881, 0.135989, 0.114383, 0.825347, 0.341722, 0.782338, 0.308572, 0.293956, 0.387742, 0.0142812, 0.401329, 0.144238, 0.0495865, 0.356923, 0.156867, 0.170243, 0.718662, 0.912029, 0.834639, 0.215414, 0.0839454, 0.148086, 0.217804, 0.811317, 0.920402, 0.0586855, 0.0329758, 0.0263719, 0.658111, 0.574262, 0.692549, 0.68292, 0.913411, 0.240955, 0.186842, 0.284684, 0.117448, 0.637308, 0.957041, 0.16833, 0.719782, 0.102436, 0.379235, 0.122089, 0.602184, 0.485571, 0.84005, 0.105527, 0.469871, 0.248729, 0.00676744, 0.366441, 0.403439, 0.33634, 0.757695, 0.0786852, 0.836826, 0.78299, 0.0722696, 0.78057, 0.657075, 0.193909, 0.550114, 0.652188, 0.0142821, 0.229322, 0.934737, 0.149157, 0.88318, 0.774805, 0.964661, 0.643678, 0.360938, 0.778543, 0.157738, 0.103629, 0.931544, 0.380012, 0.121943, 0.238004, 0.760559, 0.725979, 0.53513, 0.766723, 0.0401009, 0.620393, 0.630249, 0.405008, 0.959788, 0.80575, 0.236967, 0.625684, 0.388467, 0.953654, 0.994125, 0.0230346, 0.507076, 0.210318, 0.0385301, 0.552732, 0.0265099, 0.222398, 0.423747, 0.272351, 0.834126, 0.99586, 0.0463798, 0.829511, 0.757224, 0.147362, 0.663439, 0.268156, 0.849856, 0.918821, 0.411856, 0.297368, 0.080905, 0.835083, 0.780202, 0.0422531, 0.447372, 0.737908, 0.102246, 0.729214, 0.224439, 0.428741, 0.222256, 0.306354, 0.629916, 0.00518644, 0.477361, 0.0474277, 0.467655, 0.686702, 0.817537, 0.504495, 0.518373, 0.146359, 0.252321, 0.203889, 0.910166, 0.531066, 0.313883, 0.2381, 0.545953, 0.216966, 0.418459, 0.912806, 0.176371, 0.438982, 0.240744, 0.0794779, 0.994595, 0.278453, 0.400769, 0.0782807, 0.889999, 0.418176, 0.347143, 0.273475, 0.205514, 0.759991, 0.903665, 0.409366, 0.0882369, 0.376072, 0.721938, 0.937471, 0.120882, 0.397291, 0.869296, 0.266098, 0.295355, 0.174629, 0.37502, 0.155379, 0.404924, 0.1164, 0.00462026, 0.763402, 0.496744, 0.298112, 0.951157, 0.713349, 0.0997802, 0.194445, 0.677812, 0.570464, 0.540525, 0.611322, 0.79392, 0.663679, 0.402192, 0.989105, 0.589964, 0.738755, 0.986626, 0.11957, 0.0732776, 0.240861, 0.186489, 0.512955, 0.432652, 0.0161617, 0.38682, 0.925296, 0.342052, 0.371361, 0.470874, 0.949556, 0.480838, 0.0379082, 0.9407, 0.0782391, 0.979132, 0.204998, 0.224844, 0.229374, 0.745913, 0.928883, 0.622481, 0.519469, 0.799578, 0.792531, 0.554854, 0.291082, 0.294391, 0.971212, 0.288005, 0.747603, 0.809856, 0.791771, 0.630849, 0.303592, 0.353968, 0.249254, 0.68871, 0.396236, 0.605167, 0.796824, 0.138875, 0.455243, 0.385981, 0.964006, 0.0848079, 0.792819, 0.869265, 0.803885, 0.935309, 0.417941, 0.203644, 0.956039, 0.64233, 0.35857, 0.39705, 0.811816, 0.728825, 0.685236, 0.0717718, 0.340452, 0.201315, 0.231661, 0.367571, 0.825858, 0.965742, 0.535537, 0.557745, 0.795049, 0.688065, 0.715029, 0.11602, 0.346939, 0.0779004, 0.610731, 0.0363135, 0.191785, 0.149359, 0.179063, 0.619259, 0.894837, 0.606516, 0.835649, 0.744404, 0.85976, 0.00801029, 0.314161, 0.530013, 0.78919, 0.798813, 0.198568, 0.13958, 0.455709, 0.618315, 0.192537, 0.0597794, 0.656235, 0.383071, 0.160273, 0.991552, 0.100751, 0.0272762, 0.953681, 0.965513, 0.169782, 0.0975637, 0.888773, 0.468986, 0.98299, 0.642847, 0.982967, 0.355461, 0.784416, 0.0804038, 0.104699, 0.0973213, 0.964221, 0.299878, 0.382839, 0.457528, 0.163674, 0.563012, 0.07733, 0.848005, 0.13214, 0.608961, 0.756853, 0.134063, 0.368275, 0.909723, 0.946898, 0.00995811, 0.850901, 0.0314379, 0.195158, 0.913039, 0.279981, 0.292827, 0.602928, 0.103975, 0.0230125, 0.417945, 0.518034, 0.787355, 0.912443, 0.628286, 0.711559, 0.47867, 0.145804, 0.528277, 0.425184, 0.487301, 0.22956, 0.212752, 0.781067, 0.256432, 0.554427, 0.289068, 0.796314, 0.411552, 0.322083, 0.409769, 0.910203, 0.400269, 0.344701, 0.35303, 0.44672, 0.792342, 0.303823, 0.494524, 0.229047, 0.818262, 0.96123, 0.206095, 0.984454, 0.278431, 0.345315, 0.504815, 0.0530352, 0.362624, 0.593876, 0.104974, 0.836431, 0.528722, 0.16591, 0.263385, 0.0849035, 0.980139, 0.618537, 0.822645, 0.247457, 0.209401, 0.262689, 0.450511, 0.650026, 0.803747, 0.725778, 0.832191, 0.194117, 0.576881, 0.986633, 0.933653, 0.387109, 0.566776, 0.297886, 0.608794, 0.0209304, 0.257444, 0.555433, 0.89687, 0.572963, 0.8439, 0.287748, 0.470621, 0.0169381, 0.262726, 0.681365, 0.0883942, 0.757639, 0.464354, 0.341893, 0.949142, 0.581639, 0.423602, 0.385882, 0.208789, 0.386493, 0.314197, 0.575431, 0.120347, 0.70004, 0.662324, 0.01264, 0.114641, 0.941158, 0.897689, 0.503811, 0.574991, 0.914898, 0.913198, 0.461668, 0.101269, 0.14161, 0.394222, 0.10442, 0.395801, 0.230198, 0.188025, 0.492823, 0.109863, 0.0250429, 0.969826, 0.133829, 0.234891, 0.0746836, 0.5961, 0.286069, 0.800607, 0.165306, 0.0433477, 0.322804, 0.222233, 0.295832, 0.257818, 0.0825347, 0.216227, 0.752737, 0.0751812, 0.645368, 0.632464, 0.626096, 0.621743, 0.232217, 0.628269, 0.318728, 0.325243, 0.057867, 0.698394, 0.189754, 0.334127, 0.424018, 0.268702, 0.30531, 0.530198, 0.823827, 0.643645, 0.462101, 0.847939, 0.563049, 0.261867, 0.571493, 0.262511, 0.791703, 0.957797, 0.328557, 0.646388, 0.157902, 0.372722, 0.101383, 0.734011, 0.477462, 0.321272, 0.0474629, 0.0760908, 0.114701, 0.746874, 0.373927, 0.337629, 0.977809, 0.672818, 0.395289, 0.681723, 0.0131544, 0.493766, 0.346927, 0.689504, 0.434476, 0.413359, 0.99947, 0.964719, 0.475063, 0.727556, 0.734707, 0.223579, 0.426226, 0.756575, 0.468032, 0.248982, 0.431785, 0.668905, 0.583883, 0.354555, 0.207369, 0.549294, 0.94, 0.459033, 0.502353, 0.958875, 0.562426, 0.0652912, 0.102932, 0.212556, 0.413588, 0.429526, 0.851667, 0.410742, 0.228292, 0.963911, 0.496349, 0.430071, 0.439852, 0.767093, 0.546101, 0.255007, 0.201509, 0.563849, 0.838783, 0.365303, 0.0655348, 0.800652, 0.978375, 0.810624, 0.211064, 0.62007, 0.428826, 0.573271, 0.750786, 0.0810026, 0.416919, 0.462141, 0.000943617, 0.977593, 0.861739, 0.884804, 0.350952, 0.240372, 0.379984, 0.452709, 0.853189, 0.87503, 0.677902, 0.572678, 0.10547, 0.296211, 0.862692, 0.092125, 0.81823, 0.257098, 0.42691, 0.366794, 0.0364343, 0.482036, 0.596226, 0.33398, 0.859671, 0.28293, 0.308289, 0.577807, 0.569985, 0.162831, 0.94961, 0.723335, 0.79226, 0.87131, 0.234758, 0.55269, 0.875409, 0.320202, 0.119095, 0.91847, 0.191865, 0.659438, 0.760577, 0.902728, 0.906769, 0.119967, 0.487316, 0.975686, 0.0340326, 0.65472, 0.309501, 0.381418, 0.530099, 0.753478, 0.93878, 0.678147, 0.688696, 0.00245004, 0.490236, 0.685161, 0.421622, 0.958775, 0.154161, 0.0930764, 0.234936, 0.198518, 0.423502, 0.870014, 0.952276, 0.955144, 0.842629, 0.836175, 0.832134, 0.538878, 0.22937, 0.885935, 0.241952, 0.319063, 0.70922, 0.269259, 0.0778196, 0.34701, 0.00989241, 0.0159236, 0.635993, 0.402755, 0.942878, 0.411093, 0.98434, 0.235589, 0.994109, 0.0852568, 0.134627, 0.209175, 0.509332, 0.236745, 0.731749, 0.354398, 0.781152, 0.751758, 0.177992, 0.710649, 0.435356, 0.605941, 0.729911, 0.695595, 0.210447, 0.0477255, 0.26159, 0.605423, 0.324117, 0.343792, 0.912852, 0.369955, 0.79601, 0.975273, 0.507518, 0.448687, 0.381703, 0.629233, 0.813722, 0.259551, 0.226481, 0.835694, 0.89278, 0.184935, 0.211618, 0.769111, 0.662097, 0.571907, 0.590063, 0.623795, 0.468348, 0.414587, 0.822965, 0.713424, 0.780369, 0.803439, 0.568014, 0.563408, 0.851325, 0.281048, 0.876279, 0.868437, 0.356462, 0.406371, 0.892966, 0.382094, 0.55697, 0.553828, 0.906916, 0.158109, 0.719806, 0.881736, 0.548851, 0.618368, 0.148101, 0.0724919, 0.537699, 0.208591, 0.239957, 0.963071, 0.0796868, 0.252854, 0.876368, 0.820898, 0.72598, 0.852893, 0.468395, 0.0790652, 0.694691, 0.418392, 0.99056, 0.297655, 0.996152, 0.0701952, 0.386079, 0.794964, 0.315134, 0.151314, 0.0534037, 0.66005, 0.544951, 0.926899, 0.49286, 0.351729, 0.615444, 0.744605, 0.140895, 0.969391, 0.675596, 0.490887, 0.421324, 0.473488, 0.0438011, 0.373039, 0.996306, 0.454424, 0.71002, 0.107729, 0.138564, 0.585049, 0.0476726, 0.419484, 0.255132, 0.378118, 0.838807, 0.7523, 0.954979, 0.903597, 0.611968, 0.0974334, 0.112477, 0.935068, 0.391622, 0.939205, 0.643313, 0.0548231, 0.915073, 0.43213, 0.102186, 0.559038, 0.993986, 0.420751, 0.340141, 0.715827, 0.699336, 0.353872, 0.792753, 0.965768, 0.702062, 0.322321, 0.135546, 0.528437, 0.900718, 0.735977, 0.817645, 0.27058, 0.518426, 0.488729, 0.896793, 0.0816079, 0.643914, 0.74724, 0.411665, 0.914873, 0.540361, 0.688787, 0.438461, 0.463882, 0.911583, 0.82772, 0.301927, 0.590404, 0.105204, 0.36647, 0.367514, 0.224721, 0.925065, 0.446643, 0.259639, 0.904391, 0.0983455, 0.130981, 0.875149, 0.636404, 0.376162, 0.127885, 0.755759, 0.604847, 0.507388, 0.604483, 0.97776, 0.27063, 0.757754, 0.40943, 0.475205, 0.165916, 0.243159, 0.35404, 0.65918, 0.941012, 0.17674, 0.538121, 0.474917, 0.60938, 0.440644, 0.168155, 0.721901, 0.0850913, 0.206669, 0.591958, 0.520876, 0.218974, 0.874243, 0.971393, 0.00567012, 0.357306, 0.0156748, 0.40943, 0.158562, 0.69115, 0.0883478, 0.312241, 0.0634005, 0.757253, 0.203473, 0.370921, 0.644864, 0.989725, 0.423615, 0.429686, 0.96982, 0.855455, 0.416648, 0.199682, 0.359497, 0.352384, 0.69602, 0.34028, 0.80014, 0.750196, 0.460324, 0.382704, 0.359189, 0.512177, 0.186704, 0.542782, 0.731731, 0.91864, 0.606535, 0.0746311, 0.763923, 0.388399, 0.519247, 0.955015, 0.0450288, 0.191685, 0.102118, 0.0870352, 0.940065, 0.458274, 0.850774, 0.739829, 0.798335, 0.446416, 0.0188362, 0.370395, 0.0983858, 0.102169, 0.514217, 0.290697, 0.272838, 0.0472025, 0.547151, 0.765991, 0.418189, 0.533692, 0.25021, 0.979033, 0.549196, 0.189791, 0.81571, 0.311049, 0.163646, 0.231815, 0.722312, 0.283984, 0.247224, 0.109344, 0.0676099, 0.945489, 0.192456, 0.845967, 0.332795, 0.61661, 0.319271, 0.900084, 0.0380171, 0.0324031, 0.627179, 0.587327, 0.0152705, 0.582405, 0.684816, 0.483042, 0.336544, 0.472563, 0.0322619, 0.724403, 0.646268, 0.0558811, 0.541398, 0.895589, 0.293008, 0.903387, 0.493332, 0.697902, 0.477073, 0.333856, 0.143396, 0.0932805, 0.275849, 0.655504, 0.624897, 0.717618, 0.605292, 0.424038, 0.306989, 0.324962, 0.45878, 0.976576, 0.991506, 0.0725176, 0.837343, 0.588093, 0.519565, 0.817018, 0.788645, 0.420119, 0.215411, 0.141267, 0.0376012, 0.460681, 0.361827, 0.995282, 0.905563, 0.0909029, 0.808559, 0.595975, 0.977723, 0.622259, 0.0849786, 0.642514, 0.853086, 0.812154, 0.696658, 0.671264, 0.692986, 0.269517, 0.847163, 0.496374, 0.48532, 0.983314, 0.709504, 0.944325, 0.717459, 0.150419, 0.320894, 0.164588, 0.119457, 0.663862, 0.528351, 0.160297, 0.543173, 0.289749, 0.706371, 0.759099, 0.456273, 0.583254, 0.882913, 0.575666, 0.655283, 0.234053, 0.263827, 0.807735, 0.869351, 0.633226, 0.744627, 0.391122, 0.176336, 0.667208, 0.538645, 0.972913, 0.675642, 0.0848427, 0.0397247, 0.66174, 0.240831, 0.148035, 0.388729, 0.742836, 0.902954, 0.285685, 0.45282, 0.125667, 0.253222, 0.886802, 0.258285, 0.754176, 0.354836, 0.877242, 0.561047, 0.20338, 0.263599, 0.601812, 0.833404, 0.166952, 0.827574, 0.0578827, 0.906011, 0.816079, 0.928142, 0.122575, 0.830498, 0.330445, 0.771716, 0.635982, 0.829167, 0.886338, 0.0943431, 0.803303, 0.541538, 0.511255, 0.863668, 0.404193, 0.536002, 0.487578, 0.712253, 0.692238, 0.44266, 0.863275, 0.598403, 0.926607, 0.529861, 0.487415, 0.126769, 0.999496, 0.798262, 0.374716, 0.867532, 0.957177, 0.936064, 0.00403304, 0.974509, 0.40569, 0.789351, 0.0955056, 0.0637894, 0.664924, 0.896185, 0.44127, 0.481997, 0.383161, 0.200857, 0.816596, 0.336022, 0.268181, 0.796397, 0.513889, 0.791563, 0.872379, 0.290329, 0.695973, 0.375641, 0.477351, 0.648416, 0.483137, 0.0231412, 0.880657, 0.634508, 0.234432, 0.25497, 0.108345, 0.661131, 0.734542, 0.496154, 0.933504, 0.780499, 0.418836, 0.222636, 0.844764, 0.597745, 0.462602, 0.162704, 0.714849, 0.298815, 0.284211, 0.598849, 0.122437, 0.900357, 0.880802, 0.623805, 0.637685, 0.105521, 0.697668, 0.29021, 0.613283, 0.344496, 0.557836, 0.262358, 0.723321, 0.857867, 0.219355, 0.173519, 0.254927, 0.452068, 0.733882, 0.128371, 0.53921, 0.842773, 0.200744, 0.808661, 0.356352, 0.380975, 0.110313, 0.343576, 0.917519, 0.269396, 0.145521, 0.749168, 0.783015, 0.360068, 0.652331, 0.216453, 0.229264, 0.0487651, 0.508933, 0.0552494, 0.486092, 0.179105, 0.629595, 0.690417, 0.985936, 0.589246, 0.0332772, 0.521238, 0.601636, 0.512677, 0.152481, 0.448167, 0.799148, 0.986308, 0.354621, 0.712387, 0.0206986, 0.289294, 0.738303, 0.100764, 0.799571, 0.539667, 0.932385, 0.21403, 0.444996, 0.622763, 0.137635, 0.830477, 0.301631, 0.694403, 0.662686, 0.485794, 0.937107, 0.25165, 0.18586, 0.656206, 0.721032, 0.335955, 0.586088, 0.465777, 0.668967, 0.0257542, 0.413625, 0.945949, 0.194406, 0.942379, 0.716879, 0.308961, 0.917595, 0.436479, 0.0311892, 0.120783, 0.0177039, 0.821967, 0.308052, 0.961341, 0.324881, 0.919124, 0.430241, 0.525789, 0.88756, 0.88067, 0.503975, 0.0487123, 0.0710132, 0.956585, 0.712219, 0.313906, 0.720764, 0.00444719, 0.114896, 0.57459, 0.980179, 0.128499, 0.470857, 0.955974, 0.56468, 0.923821, 0.19156, 0.616556, 0.198483, 0.546788, 0.78281, 0.670577, 0.760508, 0.109579, 0.460778, 0.835972, 0.0775543, 0.729754, 0.947057, 0.436774, 0.568689, 0.670996, 0.322469, 0.813586, 0.440571, 0.116003, 0.414831, 0.67704, 0.0157284, 0.585772, 0.361394, 0.457603, 0.0797914, 0.513506, 0.764773, 0.0246359, 0.974115, 0.694106, 0.0648179, 0.433138, 0.263551, 0.185633, 0.754115, 0.354349, 0.227196, 0.931046, 0.0731813, 0.300091, 0.736974, 0.781009, 0.0421925, 0.552624, 0.0631947, 0.581713, 0.933546, 0.0920806, 0.351353, 0.355516, 0.628235, 0.275462, 0.913275, 0.158415, 0.586904, 0.316023, 0.122767, 0.225402, 0.562004, 0.489399, 0.601167, 0.68016, 0.920806, 0.3201, 0.873, 0.170307, 0.559888, 0.348109, 0.629961, 0.767266, 0.421276, 0.929529, 0.708598, 0.22938, 0.0440367, 0.848638, 0.0428049, 0.252743, 0.0193065, 0.503607, 0.325506, 0.287519, 0.374286, 0.251823, 0.438668, 0.023447, 0.0734565, 0.378211, 0.248276, 0.9017, 0.523796, 0.0595612, 0.0253173, 0.441401, 0.437461, 0.887637, 0.710748, 0.475479, 0.227227, 0.658916, 0.659166, 0.964871, 0.770273, 0.899362, 0.191513, 0.547282, 0.998571, 0.943687, 0.738217, 0.526649, 0.067912, 0.384769, 0.0241023, 0.903523, 0.886881, 0.356369, 0.262555, 0.87154, 0.745538, 0.855665, 0.607144, 0.588464, 0.399931, 0.663382, 0.582526, 0.232184, 0.126784, 0.54676, 0.234503, 0.262531, 0.668196, 0.254086, 0.121672, 0.718222, 0.187247, 0.361491, 0.492809, 0.938355, 0.535587, 0.00922292, 0.136878, 0.670968, 0.710436, 0.62229, 0.708223, 0.259453, 0.482349, 0.290708, 0.151858, 0.349127, 0.259001, 0.271877, 0.902338, 0.789938, 0.437855, 0.125478, 0.358252, 0.786521, 0.420754, 0.658861, 0.0163478, 0.105161, 0.203591, 0.774375, 0.473021, 0.599754, 0.334038, 0.0959251, 0.470531, 0.119243, 0.113657, 0.0858567, 0.0181975, 0.0339619, 0.642201, 0.895081, 0.860213, 0.672983, 0.384996, 0.423965, 0.780832, 0.326618, 0.0349688, 0.525361, 0.241029, 0.178718, 0.640927, 0.858357, 0.890641, 0.11797, 0.592586, 0.800438, 0.260998, 0.750592, 0.715358, 0.0179428, 0.0456378, 0.581487, 0.287291, 0.768357, 0.502813, 0.488287, 0.69343, 0.0259953, 0.409564, 0.454791, 0.146126, 0.519897, 0.499388, 0.287014, 0.207348, 0.771959, 0.645787, 0.297462, 0.28361, 0.105186, 0.482753, 0.809022, 0.854729, 0.133819, 0.336536, 0.219343, 0.459939, 0.317168, 0.157819, 0.205831, 0.267773, 0.552205, 0.0871527, 0.0539443, 0.0259766, 0.775192, 0.689066, 0.577877, 0.317817, 0.761853, 0.535322, 0.461685, 0.651818, 0.89517, 0.144383, 0.643697, 0.951557, 0.0604552, 0.634357, 0.583163, 0.921442, 0.150374, 0.591284, 0.897311, 0.815218, 0.395696, 0.320972, 0.752767, 0.980248, 0.474563, 0.134607, 0.77258, 0.843649, 0.674173, 0.524785, 0.161748, 0.535902, 0.660756, 0.433184, 0.31268, 0.777464, 0.109717, 0.805761, 0.832127, 0.559389, 0.507908, 0.697832, 0.459661, 0.448527, 0.964555, 0.512994, 0.524819, 0.196892, 0.648324, 0.982221, 0.282745, 0.00992841, 0.758498, 0.490073, 0.842627, 0.258158, 0.272767, 0.400226, 0.610492, 0.544288, 0.273564, 0.988447, 0.817051, 0.605815, 0.511681, 0.416597, 0.11132, 0.329803, 0.411382, 0.144655, 0.705241, 0.265537, 0.553677, 0.119043, 0.5451, 0.559877, 0.743401, 0.222226, 0.25808, 0.827941, 0.834873, 0.829685, 0.153731, 0.238038, 0.306866, 0.0944361, 0.339559, 0.809345, 0.402423, 0.374995, 0.365251, 0.527857, 0.819625, 0.392417, 0.329393, 0.170145, 0.858625, 0.737087, 0.205577, 0.42929, 0.196763, 0.997353, 0.226181, 0.389221, 0.802482, 0.705276, 0.934375, 0.368473, 0.567592, 0.550568, 0.572299, 0.101953, 0.174655, 0.555237, 0.574632, 0.657468, 0.738106, 0.223913, 0.333262, 0.704456, 0.223142, 0.768767, 0.437104, 0.0278027, 0.0498141, 0.0119726, 0.439666, 0.994019, 0.80227, 0.566117, 0.285581, 0.806632, 0.796964, 0.363939, 0.070755, 0.279007, 0.168604, 0.673322, 0.678708, 0.0699169, 0.597309, 0.855873, 0.696652, 0.770829, 0.794169, 0.176425, 0.348368, 0.0412353, 0.578438, 0.474733, 0.777684, 0.16383, 0.224556, 0.468841, 0.369487, 0.61892, 0.488486, 0.854106, 0.653314, 0.104232, 0.443175, 0.822462, 0.915707, 0.507484, 0.70635, 0.693621, 0.306757, 0.779918, 0.635788, 0.234195, 0.514534, 0.803395, 0.0552052, 0.822434, 0.804407, 0.0622927, 0.10914, 0.41838, 0.735576, 0.34439, 0.729037, 0.78918, 0.488477, 0.519064, 0.286652, 0.512734, 0.833559, 0.00576855, 0.453959, 0.64032, 0.0760396, 0.926822, 0.224319, 0.265194, 0.802482, 0.322861, 0.429746, 0.569138, 0.648447, 0.74595, 0.861876, 0.702314, 0.374532, 0.350631, 0.146556, 0.0341286, 0.094743, 0.707971, 0.762888, 0.516107, 0.778746, 0.478407, 0.677862, 0.779092, 0.0937173, 0.0757762, 0.692174, 0.569725, 0.987113, 0.510313, 0.471568, 0.713331, 0.92913, 0.452878, 0.888942, 0.530988, 0.907967, 0.654409, 0.21239, 0.97151, 0.594865, 0.679568, 0.467381, 0.0556365, 0.129969, 0.800023, 0.43437, 0.203467, 0.478574, 0.756405, 0.949415, 0.378931, 0.179897, 0.0495461, 0.867269, 0.260723, 0.748442, 0.61383, 0.923404, 0.752083, 0.417138, 0.995403, 0.233821, 0.886093, 0.230389, 0.322411, 0.0361808, 0.339915, 0.657302, 0.906619, 0.199931, 0.770688, 0.644829, 0.985518, 0.525221, 0.493475, 0.0286236, 0.439242, 0.209735, 0.101835, 0.637201, 0.831662, 0.756119, 0.174718, 0.914269, 0.957915, 0.0208806, 0.169436, 0.754754, 0.273484, 0.522076, 0.455332, 0.91563, 0.313839, 0.660479, 0.510527, 0.833741, 0.46268, 0.809294, 0.741722, 0.168658, 0.978001, 0.149184, 0.735709, 0.522709, 0.479326, 0.613118, 0.830525, 0.0227621, 0.505576, 0.377747, 0.1913, 0.858142, 0.00790081, 0.851575, 0.0387322, 0.32718, 0.641125, 0.636967, 0.735984, 0.0245758, 0.728612, 0.522017, 0.313846, 0.866817, 0.221846, 0.5459, 0.42157, 0.126635, 0.178881, 0.303887, 0.92745, 0.955136, 0.590524, 0.35817, 0.804892, 0.279127, 0.640639, 0.969997, 0.3401, 0.379279, 0.573095, 0.784084, 0.724919, 0.526721, 0.233893, 0.300491, 0.473953, 0.560633, 0.915887, 0.188727, 0.96499, 0.283966, 0.310681, 0.327539, 0.096427, 0.428594, 0.782244, 0.285651, 0.924983, 0.898228, 0.966544, 0.847905, 0.209188, 0.0634488, 0.782668, 0.49904, 0.169958, 0.898914, 0.908876, 0.700203, 0.907172, 0.124395, 0.0290647, 0.256473, 0.0487889, 0.832138, 0.870097, 0.896979, 0.0979281, 0.419425, 0.246167, 0.92867, 0.793419, 0.486866, 0.0357821, 0.339683, 0.249494, 0.80475, 0.244975, 0.740229, 0.187267, 0.463109, 0.205894, 0.388738, 0.978894, 0.471492, 0.456544, 0.715454, 0.511397, 0.0495463, 0.205433, 0.60397, 0.988917, 0.733172, 0.887978, 0.886802, 0.932716, 0.127125, 0.595316, 0.304644, 0.576185, 0.642502, 0.850191, 0.668777, 0.989563, 0.425959, 0.355702, 0.233301, 0.862421, 0.117375, 0.100915, 0.0256044, 0.761887, 0.79053, 0.789986, 0.761832, 0.749213, 0.0912176, 0.088784, 0.951521, 0.881513, 0.842809, 0.971145, 0.0721858, 0.355696, 0.383446, 0.147876, 0.176785, 0.862082, 0.313441, 0.140238, 0.723286, 0.926468, 0.552427, 0.827319, 0.286137, 0.988371, 0.0219655, 0.337728, 0.00817745, 0.153742, 0.504816, 0.945675, 0.984808, 0.59934, 0.728762, 0.136799, 0.41137, 0.322069, 0.595073, 0.357691, 0.454214, 0.541163, 0.539565, 0.978184, 0.165992, 0.654794, 0.323778, 0.968373, 0.658925, 0.749794, 0.739035, 0.384683, 0.536995, 0.333679, 0.865163, 0.74353, 0.3922, 0.671401, 0.352177, 0.150295, 0.970785, 0.29962, 0.660788, 0.197748, 0.977711, 0.758596, 0.946852, 0.892975, 0.446841, 0.940762, 0.667322, 0.800427, 0.312651, 0.186342, 0.819286, 0.493216, 0.545115, 0.778504, 0.797159, 0.376478, 0.98896, 0.479383, 0.281122, 0.852965, 0.0572699, 0.721198, 0.301352, 0.130462, 0.6671, 0.800354, 0.891946, 0.523626, 0.275167, 0.870484, 0.82264, 0.57437, 0.829882, 0.321675, 0.772209, 0.654474, 0.398218, 0.109524, 0.403978, 0.413849, 0.579426, 0.591973, 0.762529, 0.856503, 0.674411, 0.398713, 0.797512, 0.099607, 0.433157, 0.136374, 0.764637, 0.908399, 0.672609, 0.439977, 0.100954, 0.93422, 0.0240447, 0.069266, 0.225199, 0.187255, 0.728396, 0.875993, 0.104968, 0.969209, 0.609646, 0.00564365, 0.502753, 0.431294, 0.66885, 0.695816, 0.0626746, 0.647059, 0.6223, 0.986112, 0.0272147, 0.626888, 0.678837, 0.558326, 0.947133, 0.0913324, 0.260489, 0.602543, 0.17503, 0.0521068, 0.290135, 0.521271, 0.884639, 0.49815, 0.593014, 0.0312677, 0.0158837, 0.382965, 0.680572, 0.258062, 0.883899, 0.814458, 0.120813, 0.866968, 0.972773, 0.620354, 0.511413, 0.180057, 0.146324, 0.641695, 0.986334, 0.350223, 0.0729314, 0.0992284, 0.283357, 0.919984, 0.240578, 0.622459, 0.268718, 0.148164, 0.193705, 0.708666, 0.656999, 0.28972, 0.209773, 0.528043, 0.468637, 0.0358156, 0.299129, 0.259809, 0.533727, 0.469928, 0.141334, 0.306205, 0.605487, 0.565809, 0.868145, 0.297265, 0.701073, 0.533545, 0.846218, 0.541661, 0.944032, 0.691853, 0.603265, 0.0399459, 0.692711, 0.32824, 0.402316, 0.424132, 0.266506, 0.868972, 0.109447, 0.249775, 0.784166, 0.0113776, 0.411315, 0.480772, 0.0472917, 0.556464, 0.0399193, 0.685194, 0.946098, 0.4144, 0.29149, 0.478439, 0.538894, 0.0534367, 0.381781, 0.371658, 0.277564, 0.215527, 0.797192, 0.218983, 0.25126, 0.0844661, 0.860819, 0.56643, 0.0382517, 0.233701, 0.302991, 0.691647, 0.150235, 0.247625, 0.228741, 0.635467, 0.152624, 0.963169, 0.922654, 0.766378, 0.333444, 0.362525, 0.675758, 0.42109, 0.279389, 0.393732, 0.445885, 0.125507, 0.275854, 0.897779, 0.224451, 0.604121, 0.622897, 0.136374, 0.606896, 0.827597, 0.429127, 0.897548, 0.941983, 0.989255, 0.509353, 0.505851, 0.0407215, 0.724423, 0.658362, 0.46157, 0.8577, 0.469323, 0.771896, 0.200368, 0.861432, 0.73596, 0.299543, 0.0188045, 0.453314, 0.685942, 0.561086, 0.265975, 0.263175, 0.72702, 0.423578, 0.245681, 0.506245, 0.564327, 0.914246, 0.470419, 0.974931, 0.462892, 0.399281, 0.185056, 0.413307, 0.8192, 0.888201, 0.868315, 0.573061, 0.00615268, 0.668579, 0.838993, 0.619337, 0.149363, 0.855573, 0.412548, 0.0649104, 0.264731, 0.25247, 0.92999, 0.177883, 0.192354, 0.187284, 0.615144, 0.25021, 0.617814, 0.849546, 0.589285, 0.214996, 0.567073, 0.00423686, 0.967902, 0.761565, 0.291421, 0.684, 0.428611, 0.324624, 0.522639, 0.406633, 0.716488, 0.944101, 0.157778, 0.306648, 0.341889, 0.848255, 0.301799, 0.67418, 0.94508, 0.146865, 0.041367, 0.24051, 0.910075, 0.887601, 0.137959, 0.872836, 0.334127, 0.158881, 0.983702, 0.239623, 0.679831, 0.767194, 0.294324, 0.211114, 0.904165, 0.052296, 0.226415, 0.404681, 0.287089, 0.133754, 0.451804, 0.236459, 0.716149, 0.74667, 0.154989, 0.578519, 0.628974, 0.288437, 0.263638, 0.25087, 0.967908, 0.96635, 0.596557, 0.902517, 0.846765, 0.482424, 0.295941, 0.536514, 0.827909, 0.220118, 0.0502084, 0.774865, 0.967328, 0.462489, 0.58572, 0.0887014, 0.321492, 0.51892, 0.470366, 0.0330294, 0.518588, 0.21838, 0.291623, 0.470716, 0.695058, 0.714625, 0.190646, 0.0231871, 0.281582, 0.468095, 0.205045, 0.0288922, 0.934656, 0.935327, 0.107926, 0.582314, 0.119016, 0.540747, 0.992215, 0.667739, 0.940446, 0.450382, 0.803589, 0.405663, 0.695985, 0.559924, 0.863922, 0.996253, 0.628522, 0.774343, 0.0609057, 0.742031, 0.573559, 0.891099, 0.55779, 0.179774, 0.153914, 0.686911, 0.427056, 0.259254, 0.210548, 0.614287, 0.487579, 0.659451, 0.488579, 0.376451, 0.605206, 0.900657, 0.790692, 0.761121, 0.147397, 0.795318, 0.255353, 0.934713, 0.154522, 0.0295818, 0.603662, 0.804485, 0.757634, 0.410537, 0.17362, 0.0996387, 0.440978, 0.275976, 0.189733, 0.891104, 0.850894, 0.723685, 0.092275, 0.269365, 0.104267, 0.698008, 0.556828, 0.711428, 0.261121, 0.960865, 0.417842, 0.633708, 0.963086, 0.376368, 0.229058, 0.203199, 0.0503907, 0.854779, 0.305427, 0.82535, 0.858066, 0.932523, 0.711383, 0.53411, 0.0314635, 0.432959, 0.842057, 0.432475, 0.453294, 0.79789, 0.975555, 0.0504897, 0.244885, 0.357642, 0.134659, 0.483704, 0.685202, 0.331816, 0.257082, 0.870629, 0.389445, 0.881769, 0.273335, 0.230795, 0.445922, 0.39336, 0.78925, 0.848591, 0.62275, 0.961237, 0.393189, 0.296289, 0.513323, 0.10086, 0.723271, 0.770201, 0.962873, 0.113967, 0.747678, 0.122865, 0.0753474, 0.0613695, 0.385465, 0.5711, 0.515398, 0.363743, 0.402684, 0.555656, 0.587572, 0.0699067, 0.70225, 0.863724, 0.942348, 0.437034, 0.357689, 0.459404, 0.155154, 0.773141, 0.465093, 0.393312, 0.51403, 0.675212, 0.2879, 0.162334, 0.914525, 0.826562, 0.811875, 0.498997, 0.986285, 0.130915, 0.666472, 0.341303, 0.189673, 0.74172, 0.220727, 0.445003, 0.695051, 0.296795, 0.368061, 0.443056, 0.720546, 0.759742, 0.121954, 0.727911, 0.912206, 0.957863, 0.645014, 0.341238, 0.860865, 0.232864, 0.767099, 0.61917, 0.187952, 0.512629, 0.843988, 0.915118, 0.540496, 0.295861, 0.0505123, 0.686434, 0.0882185, 0.262171, 0.420626, 0.350696, 0.746233, 0.928398, 0.761852, 0.97396, 0.514489, 0.472265, 0.845776, 0.601219, 0.805928, 0.474424, 0.803486, 0.240769, 0.911924, 0.267951, 0.777723, 0.361283, 0.771012, 0.148457, 0.46787, 0.418144, 0.629043, 0.737655, 0.282112, 0.00456141, 0.778242, 0.0999024, 0.7935, 0.0889299, 0.537175, 0.388826, 0.810697, 0.347182, 0.10621, 0.819707, 0.340037, 0.152333, 0.283279, 0.979276, 0.0344373, 0.147429, 0.619208, 0.65292, 0.88814, 0.934013, 0.582763, 0.215668, 0.450112, 0.308237, 0.118658, 0.997809, 0.680986, 0.463954, 0.554396, 0.75153, 0.595619, 0.105937, 0.774186, 0.375027, 0.0250026, 0.986005, 0.392617, 0.308179, 0.043807, 0.554291, 0.540267, 0.0184381, 0.779082, 0.376191, 0.165338, 0.0928612, 0.785754, 0.373029, 0.613713, 0.289496, 0.339712, 0.303308, 0.215463, 0.305553, 0.202537, 0.949914, 0.448179, 0.138853, 0.319808, 0.659447, 0.547816, 0.498334, 0.971879, 0.217961, 0.706793, 0.642109, 0.475035, 0.360916, 0.079352, 0.885504, 0.0837289, 0.193923, 0.382282, 0.692797, 0.482966, 0.613371, 0.188388, 0.515092, 0.482802, 0.106635, 0.0881122, 0.724642, 0.767621, 0.360897, 0.133009, 0.949843, 0.812019, 0.26449, 0.534689, 0.430324, 0.407389, 0.00824263, 0.0165155, 0.618694, 0.122889, 0.448866, 0.336765, 0.178415, 0.295317, 0.753804, 0.727945, 0.558352, 0.625571, 0.235639, 0.865961, 0.532891, 0.883289, 0.833618, 0.655229, 0.444043, 0.457365, 0.0660586, 0.47803, 0.502257, 0.72243, 0.790979, 0.935351, 0.914287, 0.718386, 0.794411, 0.795054, 0.599672, 0.819843, 0.828356, 0.455682, 0.293304, 0.104403, 0.453432, 0.0959749, 0.532267, 0.178598, 0.212675, 0.356589, 0.105227, 0.00837097, 0.495895, 0.669521, 0.200667, 0.18022, 0.428803, 0.809122, 0.400827, 0.85987, 0.418807, 0.94294, 0.0407713, 0.256012, 0.319734, 0.461881, 0.451634, 0.606438, 0.238249, 0.539151, 0.102726, 0.869729, 0.874945, 0.105987, 0.422048, 0.197332, 0.895269, 0.931491, 0.059015, 0.909954, 0.755381, 0.144851, 0.975514, 0.168514, 0.505131, 0.516318, 0.172756, 0.935684, 0.426949, 0.823466, 0.910205, 0.924367, 0.793256, 0.772964, 0.890707, 0.0971714, 0.428978, 0.516884, 0.714687, 0.545899, 0.483664, 0.360612, 0.652205, 0.236498, 0.801999, 0.123016, 0.528238, 0.964533, 0.771916, 0.936722, 0.596547, 0.818391, 0.307263, 0.660682, 0.0800409, 0.371235, 0.735035, 0.850496, 0.468993, 0.551387, 0.274825, 0.368857, 0.816828, 0.0681967, 0.946167, 0.718806, 0.0888603, 0.591699, 0.641283, 0.594397, 0.398665, 0.315722, 0.135859, 0.978853, 0.309079, 0.140261, 0.313691, 0.673345, 0.440642, 0.821303, 0.552961, 0.979481, 0.504034, 0.58892, 0.585527, 0.853985, 0.336332, 0.0911784, 0.466332, 0.905361, 0.819639, 0.893948, 0.869733, 0.298763, 0.91263, 0.622764, 0.540313, 0.398469, 0.962979, 0.383353, 0.160262, 0.703152, 0.779832, 0.795827, 0.810667, 0.439871, 0.641499, 0.785883, 0.318648, 0.853352, 0.766444, 0.706264, 0.335101, 0.624577, 0.37212, 0.201078, 0.37352, 0.851304, 0.491342, 0.825413, 0.133976, 0.846265, 0.915505, 0.907472, 0.755025, 0.261162, 0.420295, 0.994158, 0.693767, 0.0778706, 0.811818, 0.495303, 0.395107, 0.169426, 0.327305, 0.250433, 0.682925, 0.160715, 0.448879, 0.992313, 0.0539564, 0.550548, 0.196541, 0.706638, 0.815899, 0.75727, 0.422836, 0.0438398, 0.97144, 0.178127, 0.412192, 0.462741, 0.806464, 0.332466, 0.177072, 0.46366, 0.653041, 0.760624, 0.102086, 0.696651, 0.0980398, 0.434837, 0.153154, 0.516072, 0.872444, 0.20529, 0.110408, 0.836553, 0.785342, 0.0681476, 0.402417, 0.182108, 0.806673, 0.33937, 0.0178925, 0.00914763, 0.141008, 0.396542, 0.389862, 0.985174, 0.279027, 0.700033, 0.167493, 0.551906, 0.302566, 0.0497502, 0.0198132, 0.902196, 0.760487, 0.195984, 0.363775, 0.876091, 0.918512, 0.305526, 0.0707057, 0.3784, 0.146303, 0.376803, 0.368367, 0.128923, 0.140323, 0.05595, 0.726568, 0.327314, 0.828088, 0.535435, 0.705327, 0.592081, 0.634712, 0.146107, 0.279934, 0.480118, 0.486797, 0.783111, 0.251334, 0.452389, 0.531007, 0.132591, 0.300987, 0.515879, 0.777004, 0.493762, 0.357516, 0.0579536, 0.908164, 0.0571019, 0.528834, 0.0666216, 0.138407, 0.124962, 0.964148, 0.360372, 0.113073, 0.0409483, 0.34229, 0.615291, 0.694401, 0.23139, 0.608677, 0.0634488, 0.85828, 0.994858, 0.273237, 0.67353, 0.921285, 0.467996, 0.372084, 0.179355, 0.371782, 0.47465, 0.82362, 0.96306, 0.430145, 0.686474, 0.316809, 0.0679466, 0.850082, 0.689786, 0.982811, 0.276689, 0.138777, 0.627145, 0.0227534, 0.611696, 0.123369, 0.572623, 0.345923, 0.84582, 0.0345596, 0.518239, 0.347555, 0.453245, 0.710756, 0.064706, 0.544765, 0.46104, 0.24783, 0.799783, 0.48744, 0.0250994, 0.543547, 0.277255, 0.478538, 0.946262, 0.974664, 0.205728, 0.777003, 0.201277, 0.33814, 0.243907, 0.553557, 0.233149, 0.899776, 0.995486, 0.108742, 0.0942018, 0.0567805, 0.347665, 0.687884, 0.830904, 0.27803, 0.760594, 0.44421, 0.924831, 0.64463, 0.905892, 0.495716, 0.511623, 0.605581, 0.677949, 0.272243, 0.765111, 0.427617, 0.92866, 0.485402, 0.978395, 0.969747, 0.515605, 0.371774, 0.807003, 0.977104, 0.048981, 0.909226, 0.428103, 0.0229549, 0.118309, 0.599141, 0.857947, 0.974639, 0.897659, 0.0615562, 0.289933, 0.0276253, 0.473482, 0.711814, 0.194624, 4.80611e-05, 0.736741, 0.605372, 0.391581, 0.614274, 0.484809, 0.634995, 0.535787, 0.404942, 0.988702, 0.240248, 0.743364, 0.0838462, 0.187371, 0.174739, 0.477928, 0.382234, 0.148336, 0.354439, 0.54166, 0.507996, 0.804864, 0.21883, 0.121817, 0.0232491, 0.11177, 0.686514, 0.70907, 0.546844, 0.79575, 0.607758, 0.204538, 0.634101, 0.999171, 0.182992, 0.400803, 0.617142, 0.887625, 0.947406, 0.140956, 0.497278, 0.83746, 0.317648, 0.999817, 0.9142, 0.804219, 0.223033, 0.105793, 0.912205, 0.777269, 0.530467, 0.420292, 0.393993, 0.909749, 0.376047, 0.881531, 0.296851, 0.679283, 0.421624, 0.531365, 0.234937, 0.78704, 0.251236, 0.710318, 0.487208, 0.765078, 0.447392, 0.0101642, 0.548467, 0.574968, 0.452874, 0.860894, 0.0284263, 0.800348, 0.438501, 0.819314, 0.654027, 0.994083, 0.650283, 0.862997, 0.603083, 0.999607, 0.41725, 0.199425, 0.321242, 0.122539, 0.45267, 0.978689, 0.251487, 0.865512, 0.107143, 0.3163, 0.781222, 0.218965, 0.678676, 0.361759, 0.383299, 0.511072, 0.474337, 0.126522, 0.0144572, 0.743275, 0.339008, 0.588972, 0.619087, 0.143189, 0.0114255, 0.322651, 0.563701, 0.189263, 0.418681, 0.814716, 0.676409, 0.487035, 0.453306, 0.795916, 0.696411, 0.422332, 0.0126313, 0.508399, 0.110933, 0.546677, 0.539665, 0.00415484, 0.810452, 0.173893, 0.552889, 0.89214, 0.468457, 0.205398, 0.728536, 0.509075, 0.391114, 0.888057, 0.812956, 0.196968, 0.669791, 0.791387, 0.771562, 0.202315, 0.651431, 0.420198, 0.628602, 0.50791, 0.640779, 0.338275, 0.405885, 0.0964908, 0.79902, 0.922061, 0.493261, 0.216489, 0.543238, 0.333484, 0.755377, 0.576159, 0.969472, 0.522098, 0.457775, 0.598693, 0.330805, 0.863314, 0.831992, 0.748437, 0.616371, 0.264622, 0.259064, 0.865064, 0.764203, 0.252056, 0.119722, 0.114517, 0.342252, 0.880784, 0.281413, 0.205044, 0.840007, 0.255194, 0.834143, 0.871411, 0.16023, 0.320923, 0.0834858, 0.478612, 0.272666, 0.00756577, 0.879042, 0.071868, 0.746385, 0.41312, 0.242321, 0.0599442, 0.755287, 0.467244, 0.543174, 0.635324, 0.427335, 0.278091, 0.334103, 0.362711, 0.192493, 0.760886, 0.974795, 0.757204, 0.805059, 0.354442, 0.873249, 0.268084, 0.228769, 0.533957, 0.207452, 0.318189, 0.958777, 0.948857, 0.686822, 0.989912, 0.849037, 0.548661, 0.366009, 0.00317662, 0.804437, 0.158709, 0.42255, 0.367908, 0.402966, 0.224004, 0.251398, 0.622359, 0.752307, 0.767072, 0.851199, 0.706457, 0.189544, 0.297875, 0.403974, 0.663527, 0.366106, 0.662648, 0.429398, 0.499944, 0.453279, 0.162184, 0.0747734, 0.298532, 0.157023, 0.139665, 0.495046, 0.926777, 0.922881, 0.956901, 0.945996, 0.38083, 0.905667, 0.744382, 0.894385, 0.270544, 0.944221, 0.199435, 0.0738544, 0.350719, 0.500552, 0.938809, 0.427182, 0.545711, 0.63355, 0.520618, 0.347827, 0.751288, 0.861464, 0.959739, 0.575297, 0.422754, 0.205007, 0.500525, 0.711207, 0.490991, 0.306655, 0.142307, 0.313213, 0.877154, 0.0158508, 0.143379, 0.684596, 0.310244, 0.237449, 0.378082, 0.0469691, 0.436923, 0.881211, 0.119355, 0.59732, 0.758995, 0.0476668, 0.895984, 0.339286, 0.231267, 0.829895, 0.35967, 0.306362, 0.788418, 0.699965, 0.378785, 0.197394, 0.0500028, 0.67789, 0.497717, 0.443508, 0.457965, 0.103399, 0.628687, 0.35011, 0.704075, 0.876073, 0.911205, 0.576663, 0.947544, 0.365168, 0.797632, 0.785952, 0.893983, 0.582941, 0.827894, 0.448882, 0.73855, 0.607924, 0.950706, 0.511431, 0.200783, 0.548876, 0.244863, 0.932279, 0.810936, 0.0103678, 0.612129, 0.169734, 0.209493, 0.0879184, 0.442374, 0.401189, 0.105173, 0.345967, 0.182109, 0.36976, 0.09317, 0.970026, 0.643652, 0.962568, 0.848314, 0.393592, 0.902751, 0.854983, 0.078482, 0.825743, 0.122724, 0.758879, 0.329561, 0.0480038, 0.0714241, 0.412128, 0.242228, 0.721571, 0.472363, 0.661625, 0.606352, 0.593979, 0.809245, 0.32716, 0.924303, 0.870162, 0.323733, 0.0693531, 0.55446, 0.186764, 0.840536, 0.711154, 0.0941028, 0.293861, 0.131322, 0.167072, 0.992632, 0.0937776, 0.300854, 0.609393, 0.742901, 0.445719, 0.49257, 0.615199, 0.880634, 0.880552, 0.899752, 0.0147246, 0.226301, 0.182127, 0.94602, 0.296728, 0.955755, 0.536567, 0.140081, 0.430715, 0.022259, 0.587231, 0.589689, 0.755506, 0.392815, 0.354239, 0.539713, 0.272697, 0.917373, 0.746497, 0.684056, 0.523074, 0.557538, 0.995858, 0.779072, 0.139476, 0.106625, 0.357108, 0.611952, 0.581954, 0.726367, 0.758944, 0.0931899, 0.571819, 0.851852, 0.496957, 0.604608, 0.963661, 0.688038, 0.284632, 0.511381, 0.49557, 0.794898, 0.185432, 0.275114, 0.798329, 0.292664, 0.924401, 0.142738, 0.743984, 0.246694, 0.837575, 0.0657893, 0.856324, 0.877732, 0.433399, 0.897591, 0.059876, 0.828088, 0.364716, 0.138416, 0.991326, 0.377147, 0.376242, 0.751253, 0.325726, 0.531879, 0.154677, 0.0220212, 0.194491, 0.389655, 0.570077, 0.578739, 0.289017, 0.336876, 0.40144, 0.250632, 0.181667, 0.400037, 0.250547, 0.0483655, 0.336806, 0.970015, 0.297673, 0.668749, 0.500495, 0.539103, 0.996542, 0.34859, 0.0512221, 0.945941, 0.88657, 0.656174, 0.113437, 0.753097, 0.323058, 0.47051, 0.220096, 0.721297, 0.270871, 0.489782, 0.402427, 0.165052, 0.206266, 0.667431, 0.852144, 0.47244, 0.0671329, 0.458507, 0.781481, 0.743982, 0.214483, 0.702029, 0.177756, 0.704948, 0.551822, 0.194999, 0.885391, 0.770998, 0.696298, 0.870784, 0.756745, 0.744028, 0.968355, 0.229207, 0.923294, 0.338094, 0.00576244, 0.164907, 0.250586, 0.945024, 0.424857, 0.227656, 0.188152, 0.480328, 0.490762, 0.798034, 0.473941, 0.505145, 0.204663, 0.914953, 0.530389, 0.584498, 0.669886, 0.745894, 0.562383, 0.280916, 0.88575, 0.250918, 0.0274456, 0.696608, 0.911081, 0.158052, 0.996168, 0.789893, 0.508773, 0.476583, 0.773221, 0.0636369, 0.981401, 0.0452942, 0.0399631, 0.0125742, 0.0699655, 0.304051, 0.457593, 0.882599, 0.340307, 0.838223, 0.0610541, 0.653261, 0.490571, 0.985308, 0.800377, 0.250854, 0.19761, 0.0550185, 0.371155, 0.2663, 0.749814, 0.390379, 0.540894, 0.866225, 0.599341, 0.623863, 0.147507, 0.340541, 0.562505, 0.524171, 0.803077, 0.212648, 0.103451, 0.939081, 0.821543, 0.613976, 0.870403, 0.297362, 0.978688, 0.430094, 0.659263, 0.0556336, 0.957836, 0.355746, 0.310996, 0.350179, 0.930027, 0.411764, 0.249732, 0.864553, 0.642199, 0.364663, 0.672106, 0.827456, 0.313871, 0.167488, 0.931276, 0.966087, 0.374795, 0.433548, 0.52337, 0.978344, 0.190493, 0.0730634, 0.306037, 0.71492, 0.531433, 0.502825, 0.071328, 0.291312, 0.855648, 0.25017, 0.278271, 0.499139, 0.497774, 0.44888, 0.577385, 0.876234, 0.759508, 0.1953, 0.862256, 0.877218, 0.104284, 0.376674, 0.410412, 0.234124, 0.0318643, 0.864037, 0.927956, 0.703499, 0.956903, 0.552247, 0.658513, 0.573978, 0.270947, 0.742917, 0.660652, 0.816154, 4.32132e-05, 0.677011, 0.988374, 0.192157, 0.198066, 0.401928, 0.0106299, 0.942823, 0.929371, 0.369708, 0.314788, 0.0972919, 0.610464, 0.712952, 0.29258, 0.541078, 0.753934, 0.886256, 0.120093, 0.914417, 0.0949847, 0.466657, 0.246748, 0.769934, 0.129665, 0.740422, 0.0670465, 0.829317, 0.449931, 0.990937, 0.904759, 0.198826, 0.51592, 0.239497, 0.519742, 0.628357, 0.6759, 0.398503, 0.30371, 0.552565, 0.0333937, 0.733752, 0.625645, 0.464227, 0.488482, 0.757457, 0.637261, 0.310318, 0.964036, 0.909093, 0.115461, 0.148526, 0.555448, 0.922466, 0.343884, 0.419185, 0.356125, 0.213423, 0.0435329, 0.898867, 0.0891735, 0.641862, 0.213841, 0.208514, 0.831035, 0.0517722, 0.26471, 0.878577, 0.938453, 0.224824, 0.100465, 0.667, 0.870299, 0.766457, 0.931991, 0.362135, 0.424383, 0.782503, 0.487522, 0.910816, 0.323453, 0.164971, 0.922241, 0.785173, 0.912289, 0.634274, 0.810572, 0.425389, 0.690095, 0.417522, 0.593327, 0.773463, 0.2411, 0.946195, 0.384804, 0.249281, 0.104386, 0.888578, 0.68157, 0.0452774, 0.499313, 0.458826, 0.0660197, 0.178962, 0.800402, 0.89069, 0.302542, 0.498244, 0.569138, 0.967543, 0.116232, 0.747443, 0.11968, 0.28874, 0.730311, 0.704777, 0.974278, 0.651736, 0.201235, 0.88472, 0.462319, 0.338158, 0.618101, 0.0240971, 0.517106, 0.523773, 0.00687858, 0.449423, 0.674406, 0.0622918, 0.211566, 0.0119114, 0.890941, 0.856063, 0.704314, 0.605034, 0.576547, 0.00426751, 0.99426, 0.714742, 0.746024, 0.317009, 0.280869, 0.841606, 0.689827, 0.424418, 0.60691, 0.0523809, 0.318917, 0.463461, 0.56929, 0.396706, 0.642232, 0.437812, 0.0497973, 0.622397, 0.0107665, 0.157465, 0.956789, 0.345101, 0.509413, 0.178574, 0.567599, 0.467401, 0.0663775, 0.751779, 0.467074, 0.203206, 0.565589, 0.695089, 0.930904, 0.355701, 0.494375, 0.0548747, 0.995289, 0.738122, 0.966138, 0.74783, 0.331163, 0.547964, 0.750492, 0.0873614, 0.803391, 0.875753, 0.983908, 0.615168, 0.881782, 0.66364, 0.821011, 0.0437424, 0.158211, 0.81537, 0.254053, 0.0841519, 0.600394, 0.54709, 0.304293, 0.161425, 0.680684, 0.134827, 0.822791, 0.171279, 0.263711, 0.904173, 0.730721, 0.837627, 0.630713, 0.424171, 0.1436, 0.490414, 0.588829, 0.277178, 0.501833, 0.132825, 0.99945, 0.82426, 0.346863, 0.802016, 0.563137, 0.405649, 0.300828, 0.30626, 0.959529, 0.317436, 0.757366, 0.168739, 0.814747, 0.795999, 0.987332, 0.851515, 0.175644, 0.45275, 0.319911, 0.511743, 0.769124, 0.682705, 0.611449, 0.149342, 0.473472, 0.829222, 0.340482, 0.910498, 0.410818, 0.149561, 0.313595, 0.16491, 0.175943, 0.592428, 0.775068, 0.296364, 0.129827, 0.720047, 0.0424121, 0.0195966, 0.550885, 0.628628, 0.860307, 0.457882, 0.604149, 0.292863, 0.221377, 0.725782, 0.467317, 0.245825, 0.959323, 0.579488, 0.053509, 0.623069, 0.84234, 0.0756839, 0.490533, 0.535881, 0.78284, 0.003483, 0.294261, 0.577209, 0.85636, 0.848122, 0.215597, 0.401663, 0.499195, 0.383997, 0.0772853, 0.393267, 0.967649, 0.723874, 0.0134035, 0.417533, 0.616528, 0.275959, 0.542618, 0.0333001, 0.051544, 0.683903, 0.405466, 0.635196, 0.897953, 0.235144, 0.395946, 0.140848, 0.461057, 0.741495, 0.261481, 0.996666, 0.456418, 0.938732, 0.434404, 0.785783, 0.31698, 0.501701, 0.24481, 0.538954, 0.238666, 0.600782, 0.446896, 0.94859, 0.0814037, 0.211605, 0.892435, 0.65906, 0.074547, 0.991276, 0.711306, 0.0208342, 0.147429, 0.152632, 0.224157, 0.277968, 0.414186, 0.00208745, 0.775913, 0.571588, 0.758412, 0.828677, 0.0829325, 0.933862, 0.106283, 0.519618, 0.0881866, 0.224594, 0.0189084, 0.0659271, 0.974928, 0.82854, 0.134003, 0.738624, 0.543466, 0.0863006, 0.882607, 0.320078, 0.666928, 0.693226, 0.70438, 0.85448, 0.775868, 0.334582, 0.635723, 0.219116, 0.909936, 0.736899, 0.409991, 0.157747, 0.616639, 0.561564, 0.12609, 0.651409, 0.917947, 0.129513, 0.596813, 0.704076, 0.728928, 0.611481, 0.291842, 0.273992, 0.454171, 0.62646, 0.0388674, 0.625337, 0.817961, 0.500557, 0.812369, 0.344025, 0.509396, 0.309911, 0.128096, 0.278844, 0.596262, 0.530951, 0.952717, 0.289722, 0.787079, 0.746903, 0.685077, 0.412685, 0.055122, 0.39083, 0.929635, 0.933345, 0.0566744, 0.137104, 0.531075, 0.420183, 0.915949, 0.233181, 0.877726, 0.151468, 0.158288, 0.949597, 0.861046, 0.411398, 0.0607589, 0.860483, 0.747925, 0.417887, 0.542449, 0.541183, 0.0290597, 0.490308, 0.994314, 0.0288154, 0.448569, 0.29556, 0.254556, 0.214834, 0.994449, 0.455901, 0.424348, 0.0455689, 0.943316, 0.921489, 0.531744, 0.78504, 0.348116, 0.367114, 0.963063, 0.283874, 0.332566, 0.227099, 0.464252, 0.593859, 0.686899, 0.839795, 0.367608, 0.381914, 0.674551, 0.217788, 0.124624, 0.25339, 0.0966057, 0.753043, 0.918051, 0.699762, 0.646178, 0.37249, 0.285682, 0.518535, 0.551352, 0.067261, 0.299475, 0.302092, 0.0265967, 0.160134, 0.956269, 0.771048, 0.415498, 0.223088, 0.470771, 0.158735, 0.0508719, 0.796851, 0.71981, 0.334176, 0.402225, 0.734121, 0.411236, 0.28083, 0.604116, 0.284539, 0.109407, 0.392471, 0.223762, 0.757649, 0.751274, 0.726113, 0.788269, 0.0118473, 0.20756, 0.135646, 0.814533, 0.366708, 0.180041, 0.895747, 0.213798, 0.930997, 0.169217, 0.916498, 0.297585, 0.507309, 0.566975, 0.245468, 0.614427, 0.603645, 0.555494, 0.222661, 0.0870348, 0.76626, 0.615529, 0.986071, 0.61517, 0.504321, 0.0445614, 0.373633, 0.664185, 0.8271, 0.278683, 0.686827, 0.0810148, 0.0660232, 0.286511, 0.512195, 0.508847, 0.985641, 0.619457, 0.600938, 0.0439228, 0.161026, 0.163524, 0.830615, 0.640138, 0.122284, 0.79851, 0.586152, 0.804195, 0.624873, 0.599216, 0.0661066, 0.976798, 0.300219, 0.753325, 0.653453, 0.382173, 0.0178898, 0.318674, 0.492323, 0.765235, 0.727468, 0.564317, 0.225747, 0.0612711, 0.345757, 0.834737, 0.827275, 0.316303, 0.744176, 0.183844, 0.409722, 0.8062, 0.561327, 0.564014, 0.11746, 0.914365, 0.271437, 0.509299, 0.945122, 0.253176, 0.617672, 0.0162595, 0.310009, 0.492477, 0.304483, 0.766467, 0.748623, 0.11091, 0.903961, 0.985104, 0.736744, 0.0377434, 0.581304, 0.968533, 0.027765, 0.918994, 0.669358, 0.955353, 0.461749, 0.579524, 0.351479, 0.484179, 0.758077, 0.280257, 0.309951, 0.159065, 0.222593, 0.563052, 0.0881087, 0.597631, 0.396301, 0.590142, 0.140283, 0.14033, 0.963856, 0.430781, 0.598894, 0.453247, 0.043933, 0.5655, 0.296147, 0.920062, 0.790409, 0.898751, 0.365284, 0.899785, 0.49559, 0.357233, 0.456651, 0.913636, 0.0415706, 0.27083, 0.361005, 0.512235, 0.910817, 0.640769, 0.41952, 0.225875, 0.0826572, 0.872911, 0.779527, 0.425178, 0.230952, 0.792504, 0.538477, 0.518485, 0.759812, 0.775982, 0.267845, 0.151747, 0.0620934, 0.380695, 0.767428, 0.180766, 0.768072, 0.178788, 0.306042, 0.265824, 0.625785, 0.191432, 0.684929, 0.892178, 0.880488, 0.0335891, 0.624288, 0.321644, 0.997681, 0.676068, 0.576232, 0.0639671, 0.919977, 0.228481, 0.9968, 0.87273, 0.93279, 0.680226, 0.276056, 0.59775, 0.960059, 0.835946, 0.130988, 0.676691, 0.374077, 0.871108, 0.660882, 0.819851, 0.412905, 0.715654, 0.288157, 0.421731, 0.344712, 0.340205, 0.726632, 0.346875, 0.494607, 0.154577, 0.419996, 0.866988, 0.923872, 0.224584, 0.188632, 0.688256, 0.62134, 0.566141, 0.508034, 0.446173, 0.276762, 0.186727, 0.84382, 0.520522, 0.146267, 0.326724, 0.276408, 0.0433107, 0.775426, 0.0716586, 0.598046, 0.201579, 0.0470062, 0.110896, 0.742639, 0.733008, 0.964045, 0.234818, 0.00908989, 0.14847, 0.764839, 0.372257, 0.0732454, 0.736048, 0.530724, 0.611581, 0.704895, 0.978628, 0.104067, 0.0587821, 0.466607, 0.513859, 0.073367, 0.456279, 0.950761, 0.779717, 0.743702, 0.85057, 0.837475, 0.21548, 0.0758656, 0.572578, 0.0399919, 0.13249, 0.558964, 0.183807, 0.175215, 0.887784, 0.70033, 0.280512, 0.211932, 0.383173, 0.400913, 0.282131, 0.395625, 0.0756162, 0.0690113, 0.508337, 0.617142, 0.137114, 0.0948981, 0.495932, 0.789715, 0.685948, 0.883749, 0.648337, 0.769573, 0.521979, 0.0355754, 0.373375, 0.257872, 0.209324, 0.62097, 0.615032, 0.570179, 0.308089, 0.263833, 0.266339, 0.449532, 0.333846, 0.334076, 0.29857, 0.560688, 0.0930012, 0.407661, 0.733649, 0.435651, 0.0500815, 0.708807, 0.618053, 0.0498194, 0.576019, 0.905721, 0.751875, 0.283647, 0.132191, 0.572939, 0.792567, 0.598213, 0.861111, 0.231506, 0.249733, 0.918481, 0.896616, 0.595286, 0.445973, 0.125517, 0.341052, 0.633957, 0.170552, 0.961218, 0.633031, 0.957408, 0.513017, 0.849744, 0.692573, 0.341432, 0.603468, 0.120112, 0.924676, 0.963316, 0.0206015, 0.778159, 0.33578, 0.700126, 0.0166289, 0.653527, 0.508319, 0.244928, 0.144974, 0.805342, 0.142692, 0.276244, 0.90889, 0.243504, 0.815123, 0.643733, 0.249388, 0.87785, 0.397713, 0.726635, 0.901361, 0.373727, 0.308042, 0.332409, 0.457064, 0.621418, 0.501509, 0.708201, 0.624883, 0.843138, 0.139259, 0.0915826, 0.692634, 0.969916, 0.310174, 0.840849, 0.357211, 0.39896, 0.916974, 0.109918, 0.993132, 0.719899, 0.416538, 0.691254, 0.532508, 0.120668, 0.23541, 0.320957, 0.918355, 0.750493, 0.150184, 0.204495, 0.851455, 0.612642, 0.640601, 0.807641, 0.0322647, 0.611851, 0.693634, 0.291402, 0.814455, 0.273817, 0.485362, 0.354739, 0.0473627, 0.125383, 0.504974, 0.678705, 0.411693, 0.841946, 0.0586182, 0.769021, 0.148958, 0.540816, 0.0035579, 0.167399, 0.992512, 0.424822, 0.967926, 0.891685, 0.417781, 0.700522, 0.898224, 0.0247773, 0.755678, 0.719676, 0.841502, 0.404121, 0.791645, 0.415749, 0.386102, 0.2485, 0.849075, 0.217277, 0.283421, 0.393216, 0.928606, 0.899547, 0.408452, 0.345138, 0.293429, 0.701644, 0.971665, 0.68058, 0.69751, 0.0275219, 0.866652, 0.763843, 0.27899, 0.950548, 0.645158, 0.569168, 0.611048, 0.179961, 0.931273, 0.673296, 0.535193, 0.980806, 0.462838, 0.365812, 0.433387, 0.34521, 0.353489, 0.865886, 0.390057, 0.826091, 0.402746, 0.490749, 0.0730381, 0.88921, 0.548313, 0.568108, 0.446319, 0.976011, 0.0100278, 0.150469, 0.129043, 0.789769, 0.547251, 0.228088, 0.210507, 0.182635, 0.865737, 0.895096, 0.363658, 0.981409, 0.847858, 0.914729, 0.824938, 0.432977, 0.651361, 0.77585, 0.774417, 0.802889, 0.836025, 0.561158, 0.600279, 0.925, 0.690457, 0.781047, 0.805659, 0.900502, 0.0238837, 0.101743, 0.171844, 0.209899, 0.197662, 0.140042, 0.955598, 0.880282, 0.562671, 0.436887, 0.964637, 0.647575, 0.276856, 0.256236, 0.495836, 0.95714, 0.549969, 0.0156375, 0.0639922, 0.582348, 0.446425, 0.361302, 0.148736, 0.760943, 0.796099, 0.855681, 0.212617, 0.467775, 0.602453, 0.68756, 0.912814, 0.367024, 0.557372, 0.819638, 0.630635, 0.328515, 0.797586, 0.867159, 0.433949, 0.226666, 0.774877, 0.41631, 0.811486, 0.655874, 0.597928, 0.866057, 0.451696, 0.956782, 0.169419, 0.511258, 0.653091, 0.268976, 0.900806, 0.993532, 0.804963, 0.832374, 0.879101, 0.950586, 0.672735, 0.515634, 0.274928, 0.117228, 0.573043, 0.845599, 0.393487, 0.576845, 0.475925, 0.236152, 0.88211, 0.520927, 0.612104, 0.840932, 0.54499, 0.51173, 0.917307, 0.321128, 0.181292, 0.310309, 0.945798, 0.200201, 0.0525791, 0.751168, 0.409206, 0.83109, 0.0427054, 0.442786, 0.482222, 0.298017, 0.486019, 0.2301, 0.158034, 0.476069, 0.100636, 0.443695, 0.998788, 0.440997, 0.389098, 0.0823183, 0.473866, 0.932606, 0.219362, 0.611553, 0.928534, 0.707063, 0.0527836, 0.713546, 0.899773, 0.299491, 0.144848, 0.506873, 0.0747662, 0.482198, 0.957113, 0.0717272, 0.534383, 0.860147, 0.677232, 0.221148, 0.416428, 0.301721, 0.18942, 0.0665931, 0.412089, 0.713876, 0.549685, 0.740688, 0.160386, 0.461875, 0.988385, 0.549881, 0.336303, 0.546259, 0.891491, 0.534317, 0.138813, 0.926054, 0.271542, 0.27805, 0.534913, 0.00615897, 0.692222, 0.127229, 0.730899, 0.918962, 0.548539, 0.830681, 0.886659, 0.0198554, 0.214369, 0.944238, 0.469166, 0.9595, 0.714853, 0.754693, 0.632177, 0.136082, 0.214233, 0.148486, 0.142554, 0.71482, 0.610274, 0.939756, 0.921736, 0.0163028, 0.675795, 0.964418, 0.465224, 0.932563, 0.78443, 0.215089, 0.903087, 0.572354, 0.684852, 0.411951, 0.251114, 0.446106, 0.875319, 0.105901, 0.79616, 0.239362, 0.932456, 0.925477, 0.280446, 0.964613, 0.523841, 0.823652, 0.272638, 0.0929042, 0.253928, 0.73175, 0.440296, 0.471819, 0.750174, 0.205456, 0.403266, 0.433924, 0.0774383, 0.948408, 0.337614, 0.50142, 0.682877, 0.854771, 0.703206, 0.42341, 0.676134, 0.214581, 0.85109, 0.0440497, 0.943383, 0.95148, 0.722163, 0.236531, 0.378478, 0.583662, 0.947408, 0.870719, 0.615271, 0.222844, 0.298359, 0.539429, 0.674788, 0.0780377, 0.940042, 0.0124713, 0.489701, 0.2671, 0.65149, 0.736091, 0.10314, 0.208335, 0.344688, 0.854764, 0.960612, 0.172681, 0.403244, 0.606578, 0.558464, 0.750431, 0.251524, 0.869247, 0.884348, 0.739821, 0.886854, 0.917043, 0.240966, 0.611184, 0.67041, 0.0332811, 0.999595, 0.900896, 0.125225, 0.61326, 0.621172, 0.765687, 0.556, 0.906384, 0.79286, 0.0830869, 0.561454, 0.415814, 0.496362, 0.704361, 0.744402, 0.453154, 0.4897, 0.691936, 0.513089, 0.626925, 0.309659, 0.668496, 0.706798, 0.123579, 0.0452503, 0.701868, 0.68447, 0.322042, 0.755855, 0.353057, 0.954946, 0.297459, 0.554577, 0.0398144, 0.0716342, 0.911758, 0.0967312, 0.279211, 0.640686, 0.290193, 0.294131, 0.889354, 0.107775, 0.0859083, 0.955491, 0.750323, 0.301426, 0.0679114, 0.542268, 0.0971271, 0.175738, 0.939917, 0.740314, 0.757107, 0.601177, 0.0935744, 0.344923, 0.968417, 0.431867, 0.498361, 0.295489, 0.103358, 0.134519, 0.291735, 0.647672, 0.336172, 0.756561, 0.945544, 0.0282516, 0.341329, 0.743685, 0.298849, 0.205219, 0.242202, 0.87606, 0.827173, 0.984938, 0.854145, 0.190253, 0.609475, 0.311419, 0.988331, 0.666682, 0.9607, 0.643002, 0.0448832, 0.738248, 0.348618, 0.0763026, 0.888354, 0.517763, 0.109996, 0.0868614, 0.82533, 0.396627, 0.137868, 0.826429, 0.867586, 0.555135, 0.892493, 0.585946, 0.305518, 0.315007, 0.701989, 0.802518, 0.683298, 0.00264837, 0.679673, 0.709136, 0.142642, 0.533601, 0.346676, 0.942817, 0.45962, 0.524578, 0.0179627, 0.378634, 0.820991, 0.775419, 0.102314, 0.0266546, 0.143167, 0.265264, 0.218971, 0.146096, 0.635497, 0.717874, 0.110783, 0.560967, 0.300097, 0.204259, 0.897322, 0.668512, 0.235515, 0.173443, 0.734684, 0.780446, 0.572501, 0.406989, 0.315402, 0.796833, 0.0231761, 0.0218985, 0.298767, 0.475227, 0.754351, 0.967537, 0.664917, 0.980077, 0.21669, 0.416873, 0.21657, 0.476457, 0.814323, 0.984516, 0.871082, 0.304772, 0.324579, 0.0843155, 0.896098, 0.372684, 0.39024, 0.424434, 0.511256, 0.91814, 0.984691, 0.708243, 0.673182, 0.366004, 0.123732, 0.701677, 0.408152, 0.342148, 0.166041, 0.692533, 0.708754, 0.0508529, 0.0662668, 0.888426, 0.0280568, 0.201639, 0.71051, 0.984445, 0.204678, 0.555812, 0.956799, 0.734969, 0.19871, 0.823173, 0.262225, 0.394371, 0.631518, 0.47478, 0.0770741, 0.551515, 0.370017, 0.0741412, 0.0500548, 0.263441, 0.327276, 0.939112, 0.411624, 0.544791, 0.66275, 0.283403, 0.88434, 0.0294057, 0.316909, 0.0725998, 0.00835549, 0.809334, 0.849179, 0.375754, 0.957422, 0.85466, 0.217407, 0.542908, 0.175748, 0.504585, 0.976336, 0.491499, 0.778212, 0.366838, 0.178977, 0.0574047, 0.11183, 0.0680625, 0.415275, 0.142, 0.527739, 0.773302, 0.405801, 0.824558, 0.9404, 0.78715, 0.155868, 0.358388, 0.66603, 0.998367, 0.796693, 0.39847, 0.0166702, 0.760833, 0.947697, 0.773071, 0.430953, 0.199777, 0.440754, 0.789584, 0.402203, 0.290959, 0.136039, 0.407225, 0.886502, 0.224862, 0.534533, 0.504989, 0.57205, 0.289784, 0.0567022, 0.513797, 0.688991, 0.489846, 0.0791771, 0.355252, 0.0180781, 0.882109, 0.130447, 0.349479, 0.967927, 0.326071, 0.675689, 0.849642, 0.343251, 0.0138873, 0.172055, 0.573791, 0.713112, 0.642188, 0.231037, 0.482308, 0.429474, 0.817152, 0.604725, 0.576264, 0.590874, 0.530946, 0.060237, 0.834396, 0.25964, 0.513882, 0.330849, 0.367801, 0.4338, 0.96881, 0.392219, 0.891667, 0.0558481, 0.881091, 0.652911, 0.907213, 0.525012, 0.799427, 0.10591, 0.823859, 0.25909, 0.148185, 0.30551, 0.0979791, 0.823973, 0.493586, 0.510876, 0.405858, 0.44503, 0.0469695, 0.392873, 0.983446, 0.824499, 0.560729, 0.874036, 0.0364178, 0.0264234, 0.972361, 0.180638, 0.761379, 0.673746, 0.421156, 0.448217, 0.587261, 0.189683, 0.875711, 0.442644, 0.496144, 0.0587351, 0.819399, 0.298208, 0.283541, 0.997576, 0.355854, 0.83655, 0.728251, 0.55746, 0.840863, 0.746343, 0.640369, 0.902633, 0.3231, 0.536743, 0.60424, 0.858438, 0.402129, 0.119052, 0.197639, 0.545146, 0.0509377, 0.785403, 0.359014, 0.435355, 0.445491, 0.15645, 0.419904, 0.558313, 0.733979, 0.0500345, 0.259926, 0.829724, 0.704491, 0.118991, 0.0171485, 0.953601, 0.403183, 0.804015, 0.204906, 0.226606, 0.958313, 0.899884, 0.123221, 0.582415, 0.167405, 0.551889, 0.577661, 0.351546, 0.666651, 0.750385, 0.42224, 0.1394, 0.957366, 0.190762, 0.397029, 0.236701, 0.807296, 0.977773, 0.908144, 0.63838, 0.0202056, 0.102166, 0.0770063, 0.0896048, 0.592962, 0.331862, 0.290044, 0.753807, 0.844978, 0.220282, 0.524613, 0.674022, 0.804005, 0.564743, 0.0162725, 0.433844, 0.844632, 0.778595, 0.0121974, 0.939128, 0.352941, 0.965113, 0.785378, 0.281219, 0.0634396, 0.00429965, 0.401227, 0.198532, 0.40841, 0.845486, 0.311989, 0.00704158, 0.821301, 0.959929, 0.0583634, 0.574366, 0.432346, 0.0660277, 0.0407462, 0.716607, 0.279973, 0.968249, 0.118468, 0.0713564, 0.617792, 0.0364391, 0.789564, 0.931509, 0.646733, 0.894797, 0.0255862, 0.521541, 0.47941, 0.911951, 0.108453, 0.29824, 0.934456, 0.122792, 0.906146, 0.0347667, 0.885765, 0.236362, 0.66265, 0.325736, 0.250516, 0.00275351, 0.215824, 0.0109774, 0.500205, 0.619165, 0.268715, 0.00368781, 0.570198, 0.962896, 0.2447, 0.693203, 0.59192, 0.513152, 0.0424015, 0.44479, 0.974031, 0.487295, 0.577131, 0.385076, 0.72506, 0.595617, 0.595126, 0.671998, 0.797152, 0.735002, 0.561665, 0.751351, 0.437535, 0.316728, 0.927018, 0.724848, 0.670374, 0.828052, 0.200936, 0.600996, 0.979797, 0.234869, 0.0929985, 0.179676, 0.206935, 0.577415, 0.591904, 0.676284, 0.0182387, 0.133701, 0.390187, 0.492462, 0.569746, 0.556183, 0.300664, 0.128553, 0.386784, 0.929474, 0.90866, 0.0408087, 0.447141, 0.0394187, 0.210142, 0.800794, 0.249493, 0.1711, 0.775389, 0.321793, 0.497328, 0.920615, 0.72654, 0.987001, 0.865239, 0.168, 0.41209, 0.560018, 0.425795, 0.050327, 0.181931, 0.78959, 0.00529848, 0.302152, 0.214343, 0.445677, 0.448867, 0.325477, 0.395507, 0.307492, 0.0688672, 0.299394, 0.750222, 0.459083, 0.357636, 0.81627, 0.549338, 0.173428, 0.755245, 0.461379, 0.880511, 0.32672, 0.659535, 0.725552, 0.343132, 0.459094, 0.588021, 0.924431, 0.279072, 0.365516, 0.51584, 0.925071, 0.782208, 0.402797, 0.0848286, 0.894447, 0.25516, 0.71576, 0.209828, 0.625123, 0.719563, 0.156484, 0.788108, 0.0235596, 0.550313, 0.107401, 0.531978, 0.700822, 0.138419, 0.93222, 0.0250308, 0.761861, 0.600686, 0.927817, 0.590488, 0.544644, 0.689929, 0.646576, 0.45495, 0.250062, 0.285164, 0.0444433, 0.756472, 0.649678, 0.227151, 0.400516, 0.0890544, 0.599938, 0.0134461, 0.944401, 0.161036, 0.285342, 0.942577, 0.418774, 0.531273, 0.615817, 0.777693, 0.356737, 0.0277513, 0.311302, 0.322879, 0.733471, 0.687102, 0.519443, 0.324993, 0.996888, 0.600846, 0.88903, 0.447834, 0.484094, 0.857112, 0.308958, 0.114202, 0.618884, 0.887633, 0.181305, 0.521852, 0.839859, 0.360276, 0.357392, 0.978203, 0.00644566, 0.353331, 0.590259, 0.250829, 0.0271141, 0.972923, 0.46393, 0.828972, 0.455742, 0.0231219, 0.577045, 0.650473, 0.818425, 0.695791, 0.275232, 0.0350587, 0.773464, 0.0403544, 0.739522, 0.141117, 0.637566, 0.828575, 0.730955, 0.125611, 0.594952, 0.911731, 0.985904, 0.00857818, 0.427475, 0.931194, 0.966889, 0.369253, 0.459884, 0.266648, 0.312622, 0.456116, 0.977555, 0.289786, 0.116616, 0.345017, 0.330979, 0.392478, 0.756026, 0.324448, 0.247345, 0.377854, 0.844884, 0.220027, 0.29662, 0.661884, 0.330342, 0.545344, 0.536275, 0.308303, 0.598373, 0.768796, 0.154837, 0.463053, 0.538544, 0.868723, 0.881907, 0.882959, 0.841358, 0.0700229, 0.209758, 0.389461, 0.0936458, 0.712309, 0.267845, 0.255603, 0.558159, 0.657676, 0.413247, 0.523729, 0.139476, 0.31866, 0.108104, 0.748128, 0.373597, 0.202816, 0.380043, 0.19868, 0.232872, 0.894402, 0.0514955, 0.532503, 0.100772, 0.037074, 0.10981, 0.86109, 0.654005, 0.154299, 0.898359, 0.408183, 0.679988, 0.90754, 0.375236, 0.882333, 0.270426, 0.513036, 0.581996, 0.932715, 0.837767, 0.173011, 0.0419643, 0.574801, 0.0963259, 0.788492, 0.733621, 0.119557, 0.9871, 0.957762, 0.465913, 0.609707, 0.942469, 0.620195, 0.453049, 0.453488, 0.956275, 0.686171, 0.882992, 0.931574, 0.872779, 0.890029, 0.183952, 0.943933, 0.107183, 0.181155, 0.201269, 0.702818, 0.606594, 0.773789, 0.498294, 0.852091, 0.920335, 0.987666, 0.64869, 0.889185, 0.623223, 0.947191, 0.477914, 0.49591, 0.308485, 0.857547, 0.932055, 0.910387, 0.473953, 0.688625, 0.401032, 0.132168, 0.303434, 0.634216, 0.992237, 0.0731308, 0.041773, 0.511691, 0.25241, 0.554861, 0.455313, 0.678287, 0.530853, 0.225368, 0.43792, 0.532123, 0.485212, 0.637144, 0.960978, 0.95891, 0.0782347, 0.289332, 0.886921, 0.764571, 0.361793, 0.0746127, 0.993632, 0.694509, 0.623157, 0.130792, 0.893395, 0.0873223, 0.252372, 0.0040305, 0.724391, 0.942513, 0.678298, 0.78984, 0.726096, 0.251289, 0.365509, 0.697511, 0.182567, 0.384673, 0.0197732, 0.889548, 0.889591, 0.781046, 0.442412, 0.777671, 0.898534, 0.54291, 0.636089, 0.419532, 0.614074, 0.246672, 0.104112, 0.410356, 0.0850863, 0.732314, 0.576642, 0.983862, 0.245529, 0.905021, 0.423117, 0.642733, 0.0384267, 0.905644, 0.189703, 0.846216, 0.638472, 0.212359, 0.13522, 0.458133, 0.592051, 0.978257, 0.951733, 0.545079, 0.261652, 0.699546, 0.433683, 0.0664002, 0.436459, 0.705024, 0.609772, 0.324989, 0.266375, 0.318822, 0.168132, 0.120973, 0.0613638, 0.960365, 0.0411135, 0.79781, 0.37686, 0.161793, 0.267026, 0.0747603, 0.143638, 0.873175, 0.29606, 0.144062, 0.527469, 0.888221, 0.926065, 0.973356, 0.418015, 0.266929, 0.212153, 0.365911, 0.507319, 0.734776, 0.196497, 0.0282453, 0.599506, 0.672937, 0.521375, 0.131725, 0.949371, 0.1465, 0.146601, 0.199998, 0.234061, 0.179435, 0.073721, 0.658238, 0.692011, 0.294476, 0.636462, 0.811079, 0.447009, 0.346376, 0.540229, 0.142998, 0.809889, 0.149238, 0.623237, 0.440844, 0.446792, 0.050568, 0.954065, 0.122756, 0.970355, 0.434692, 0.602249, 0.879318, 0.781173, 0.921066, 0.854981, 0.87454, 0.358674, 0.27127, 0.584635, 0.267057, 0.0961367, 0.160083, 0.0938329, 0.585566, 0.703595, 0.767254, 0.714843, 0.172718, 0.392079, 0.000955963, 0.977616, 0.575433, 0.423307, 0.678327, 0.976696, 0.536711, 0.41057, 0.58762, 0.196584, 0.321225, 0.76111, 0.208201, 0.522568, 0.512635, 0.516359, 0.686973, 0.974234, 0.0214595, 0.346165, 0.445053, 0.551545, 0.831634, 0.661682, 0.129605, 0.67415, 0.779018, 0.420759, 0.199274, 0.570723, 0.596118, 0.338123, 0.821161, 0.63861, 0.855365, 0.197256, 0.887666, 0.464242, 0.362706, 0.609213, 0.830651, 0.676957, 0.941982, 0.649738, 0.0908446, 0.153289, 0.458819, 0.225334, 0.727708, 0.796149, 0.961531, 0.944226, 0.722972, 0.537162, 0.794307, 0.820672, 0.303677, 0.0092616, 0.150626, 0.278319, 0.401405, 0.119615, 0.775608, 0.257171, 0.144378, 0.630399, 0.201702, 0.994009, 0.545814, 0.0633206, 0.0854413, 0.0138782, 0.9435, 0.293533, 0.084039, 0.286295, 0.709022, 0.309837, 0.914594, 0.81354, 0.318379, 0.0720562, 0.796569, 0.581334, 0.448156, 0.590333, 0.638818, 0.90309, 0.907353, 0.267875, 0.739135, 0.0269618, 0.415492, 0.210443, 0.11701, 0.2802, 0.476753, 0.798298, 0.459586, 0.980068, 0.623407, 0.102737, 0.815412, 0.721706, 0.0192618, 0.622737, 0.410737, 0.966186, 0.451259, 0.178196, 0.590272, 0.632138, 0.0150193, 0.754371, 0.282721, 0.111791, 0.964293, 0.757311, 0.863531, 0.815876, 0.741579, 0.132043, 0.311717, 0.375529, 0.423573, 0.879152, 0.687964, 0.623009, 0.928642, 0.211231, 0.599021, 0.315594, 0.48983, 0.924192, 0.15759, 0.835065, 0.270752, 0.519572, 0.983971, 0.933466, 0.39281, 0.164771, 0.69996, 0.695087, 0.939934, 0.360593, 0.884087, 0.0705368, 0.195783, 0.840753, 0.640125, 0.608968, 0.660921, 0.391866, 0.560112, 0.279624, 0.130927, 0.379738, 0.29212, 0.0839564, 0.959167, 0.481734, 0.761034, 0.218957, 0.657724, 0.89205, 0.632027, 0.00426973, 0.901251, 0.611128, 0.93658, 0.192968, 0.800139, 0.734223, 0.257371, 0.583209, 0.205984, 0.306612, 0.732238, 0.767415, 0.306473, 0.301493, 0.655717, 0.806719, 0.185761, 0.108008, 0.369758, 0.5799, 0.39172, 0.586472, 0.912723, 0.996995, 0.328359, 0.728283, 0.913385, 0.458639, 0.487452, 0.315121, 0.487541, 0.689887, 0.334267, 0.261757, 0.554891, 0.74423, 0.81897, 0.199157, 0.92522, 0.795319, 0.29064, 0.734204, 0.428365, 0.64949, 0.83396, 0.468518, 0.322345, 0.887436, 0.679391, 0.793715, 0.627969, 0.549529, 0.216202, 0.935997, 0.303923, 0.0265268, 0.256032, 0.00753601, 0.740424, 0.765904, 0.448312, 0.0849806, 0.219901, 0.641763, 0.732962, 0.562491, 0.698137, 0.390629, 0.441117, 0.914715, 0.371376, 0.0270291, 0.565576, 0.872735, 0.283587, 0.300075, 0.428127, 0.765926, 0.601275, 0.116522, 0.184715, 0.0921416, 0.225001, 0.328041, 0.831487, 0.240143, 0.298623, 0.0183053, 0.423449, 0.173679, 0.532816, 0.174162, 0.724482, 0.338944, 0.634093, 0.825546, 0.598415, 0.712967, 0.281606, 0.626934, 0.972574, 0.749146, 0.521465, 0.817477, 0.820694, 0.674425, 0.822765, 0.76617, 0.0219426, 0.481244, 0.799323, 0.54821, 0.99158, 0.467712, 0.608509, 0.413756, 0.164914, 0.627942, 0.479727, 0.602386, 0.330174, 0.690089, 0.129653, 0.158252, 0.577024, 0.44558, 0.549267, 0.161021, 0.468495, 0.760144, 0.872291, 0.850721, 0.74684, 0.315013, 0.0820171, 0.195057, 0.229297, 0.0356003, 0.625246, 0.0778245, 0.998756, 0.947639, 0.607523, 0.434552, 0.989694, 0.815607, 0.941278, 0.861908, 0.999584, 0.989924, 0.492682, 0.622809, 0.0118228, 0.128034, 0.508684, 0.676673, 0.596012, 0.081458, 0.436571, 0.633841, 0.906294, 0.11493, 0.836429, 0.942813, 0.660931, 0.61251, 0.200747, 0.634191, 0.0971297, 0.9871, 0.525109, 0.957392, 0.849594, 0.838424, 0.43607, 0.786804, 0.250731, 0.565066, 0.0139926, 0.731502, 0.066213, 0.352713, 0.866957, 0.809287, 0.187923, 0.852455, 0.028073, 0.894598, 0.592446, 0.454888, 0.471943, 0.734707, 0.565559, 0.720943, 0.100021, 0.156881, 0.298176, 0.719757, 0.190602, 0.499897, 0.455767, 0.413476, 0.561167, 0.217268, 0.615113, 0.652399, 0.167765, 0.2394, 0.908603, 0.44359, 0.762971, 0.262683, 0.979575, 0.0046518, 0.78731, 0.560064, 0.409568, 0.470744, 0.221643, 0.172649, 0.916843, 0.840882, 0.494234, 0.796157, 0.82787, 0.0642344, 0.149631, 0.121883, 0.471611, 0.725665, 0.517188, 0.92359, 0.0724756, 0.167474, 0.974842, 0.429479, 0.582507, 0.753831, 0.941557, 0.652318, 0.999982, 0.880069, 0.170462, 0.642863, 0.297986, 0.754937, 0.715194, 0.382265, 0.0684199, 0.152803, 0.346498, 0.0980933, 0.539281, 0.313148, 0.329868, 0.659931, 0.460339, 0.756118, 0.948438, 0.47178, 0.608557, 0.943064, 0.972609, 0.731033, 0.0835427, 0.116397, 0.960032, 0.878928, 0.158863, 0.96914, 0.215856, 0.490591, 0.932403, 0.00532221, 0.56509, 0.184162, 0.071154, 0.187463, 0.67118, 0.224376, 0.812902, 0.587138, 0.414517, 0.906184, 0.212648, 0.83957, 0.0392536, 0.541782, 0.295983, 0.30007, 0.22822, 0.262378, 0.834384, 0.315133, 0.324306, 0.0557477, 0.82832, 0.928104, 0.367016, 0.649099, 0.0396088, 0.733263, 0.0114925, 0.189227, 0.435077, 0.57049, 0.209751, 0.99275, 0.0132314, 0.748614, 0.754128, 0.308886, 0.940483, 0.563096, 0.417193, 0.689936, 0.944569, 0.758204, 0.61821, 0.896863, 0.273171, 0.889657, 0.599449, 0.336948, 0.994484, 0.516204, 0.113564, 0.725726, 0.306077, 0.834276, 0.0798385, 0.390562, 0.851716, 0.38912, 0.49531, 0.305076, 0.258891, 0.553501, 0.114869, 0.0733474, 0.640746, 0.959401, 0.361267, 0.844638, 0.970882, 0.334052, 0.950295, 0.953569, 0.285906, 0.698312, 0.860996, 0.686962, 0.470302, 0.0658563, 0.74922, 0.986479, 0.719605, 0.58211, 0.0382286, 0.516559, 0.80634, 0.407344, 0.282383, 0.0815423, 0.020983, 0.956287, 0.95763, 0.13336, 0.944486, 0.0188899, 0.811448, 0.57224, 0.0696519, 0.588823, 0.595569, 0.857287, 0.626235, 0.0776045, 0.826725, 0.111139, 0.892575, 0.544137, 0.124404, 0.802925, 0.211684, 0.519513, 0.46979, 0.302065, 0.932356, 0.611625, 0.937576, 0.817479, 0.119266, 0.410209, 0.605702, 0.782327, 0.158269, 0.903792, 0.241192, 0.771094, 0.724703, 0.643881, 0.433215, 0.638512, 0.182143, 0.709728, 0.841723, 0.0507608, 0.327897, 0.344321, 0.722545, 0.447383, 0.285312, 0.0242404, 0.785832, 0.477245, 0.733531, 0.793145, 0.388553, 0.453201, 0.965354, 0.387875, 0.880384, 0.0192083, 0.372754, 0.802752, 0.500992, 0.20782, 0.025106, 0.937312, 0.161403, 0.325695, 0.554755, 0.921233, 0.969702, 0.177798, 0.635858, 0.228456, 0.517535, 0.764385, 0.0310674, 0.663643, 0.51201, 0.878013, 0.857616, 0.160906, 0.546719, 0.237824, 0.400096, 0.687779, 0.601709, 0.00459333, 0.649596, 0.29988, 0.0568715, 0.58124, 0.180596, 0.60101, 0.00389178, 0.328437, 0.333262, 0.284833, 0.716697, 0.651259, 0.730291, 0.290504, 0.574079, 0.447069, 0.201367, 0.407274, 0.237481, 0.850878, 0.274307, 0.303421, 0.518478, 0.917222, 0.0848286, 0.556413, 0.924497, 0.107259, 0.230666, 0.828556, 0.575067, 0.336448, 0.0485684, 0.0609157, 0.357136, 0.094745, 0.857984, 0.623518, 0.256357, 0.790221, 0.0788018, 0.380383, 0.91978, 0.18314, 0.287352, 0.907766, 0.390134, 0.512253, 0.569944, 0.661262, 0.365042, 0.72941, 0.746994, 0.526814, 0.56478, 0.578807, 0.427708, 0.893288, 0.447646, 0.31169, 0.741519, 0.172659, 0.315436, 0.407067, 0.065012, 0.604022, 0.241147, 0.28989, 0.25807, 0.834869, 0.56664, 0.0757672, 0.0614231, 0.922294, 0.699248, 0.478451, 0.210576, 0.868179, 0.0737086, 0.696861, 0.0934867, 0.103978, 0.213312, 0.12607, 0.647746, 0.0123185, 0.550134, 0.370287, 0.242576, 0.691588, 0.760269, 0.229541, 0.0712436, 0.670378, 0.148687, 0.352557, 0.792037, 0.723984, 0.78095, 0.0760833, 0.214986, 0.794623, 0.656526, 0.2657, 0.162698, 0.249672, 0.737294, 0.144896, 0.960271, 0.952706, 0.247689, 0.781195, 0.637473, 0.651339, 0.480488, 0.624827, 0.168967, 0.882051, 0.432428, 0.821382, 0.126006, 0.888336, 0.537809, 0.975249, 0.47041, 0.186036, 0.385578, 0.294402, 0.278556, 0.303968, 0.101222, 0.984892, 0.962658, 0.285753, 0.0867446, 0.267672, 0.944927, 0.619094, 0.109094, 0.586843, 0.441005, 0.572039, 0.686227, 0.825132, 0.807977, 0.464764, 0.270553, 0.566108, 0.20144, 0.82994, 0.277983, 0.127498, 0.980175, 0.490948, 0.588068, 0.842485, 0.73467, 0.300052, 0.794655, 0.709573, 0.633959, 0.617273, 0.277497, 0.231344, 0.576921, 0.330769, 0.387124, 0.559898, 0.489555, 0.262435, 0.897746, 0.942984, 0.696111, 0.330836, 0.258096, 0.18709, 0.250549, 0.695652, 0.00178437, 0.771559, 0.935707, 0.0570436, 0.222083, 0.0285951, 0.793577, 0.778705, 0.829804, 0.552224, 0.878264, 0.590307, 0.416663, 0.968305, 0.980497, 0.374351, 0.416178, 0.96846, 0.408325, 0.380784, 0.157902, 0.207659, 0.420486, 0.246321, 0.0205687, 0.728861, 0.415458, 0.545777, 0.763633, 0.120258, 0.486911, 0.969393, 0.286764, 0.336028, 0.286827, 0.585956, 0.450174, 0.0558024, 0.680114, 0.0793479, 0.48357, 0.602861, 0.775916, 0.0411381, 0.338329, 0.8924, 0.824861, 0.110559, 0.306005, 0.717528, 0.180393, 0.653016, 0.500991, 0.480166, 0.813107, 0.217375, 0.465885, 0.383509, 0.703033, 0.859691, 0.0195925, 0.834752, 0.734476, 0.955846, 0.580859, 0.953796, 0.654574, 0.44423, 0.0112598, 0.282579, 0.49631, 0.545955, 0.175376, 0.19637, 0.556494, 0.0788281, 0.634461, 0.415645, 0.240133, 0.131654, 0.937616, 0.802098, 0.447082, 0.781492, 0.655502, 0.0768588, 0.0264196, 0.440422, 0.340152, 0.848416, 0.18037, 0.996482, 0.529455, 0.77265, 0.795671, 0.27664, 0.596482, 0.156991, 0.0264286, 0.0321646, 0.313622, 0.66091, 0.102391, 0.247598, 0.633924, 0.366711, 0.67399, 0.770374, 0.305931, 0.694115, 0.218569, 0.688451, 0.395182, 0.254572, 0.667813, 0.0587792, 0.467662, 0.161722, 0.724485, 0.621927, 0.516418, 0.949979, 0.306452, 0.63481, 0.314079, 0.218101, 0.30225, 0.537741, 0.513449, 0.70897, 0.993491, 0.5555, 0.555535, 0.859809, 0.548939, 0.318943, 0.992481, 0.992319, 0.806028, 0.317697, 0.479037, 0.450285, 0.0430833, 0.68005, 0.241278, 0.785787, 0.848216, 0.442909, 0.731278, 0.507967, 0.644294, 0.384743, 0.490276, 0.514169, 0.028633, 0.382395, 0.158087, 0.888714, 0.0219941, 0.843342, 0.486631, 0.75527, 0.171793, 0.807343, 0.512682, 0.137995, 0.553477, 0.754692, 0.414911, 0.297277, 0.748853, 0.644971, 0.258304, 0.503775, 0.50907, 0.108981, 0.0239485, 0.974731, 0.0142304, 0.921031, 0.384431, 0.91562, 0.509847, 0.469516, 0.73212, 0.684066, 0.653302, 0.228694, 0.863653, 0.041899, 0.825711, 0.315551, 0.305999, 0.875391, 0.524647, 0.737578, 0.843897, 0.20596, 0.569398, 0.891875, 0.514638, 0.177141, 0.337022, 0.238634, 0.450862, 0.0958515, 0.0170952, 0.291028, 0.264376, 0.219396, 0.280907, 0.238399, 0.485541, 0.186888, 0.685824, 0.845492, 0.158039, 0.624817, 0.617941, 0.0335596, 0.929544, 0.283022, 0.913518, 0.824676, 0.748839, 0.210475, 0.80756, 0.550077, 0.540211, 0.593669, 0.817956, 0.590681, 0.580638, 0.863733, 0.369527, 0.522274, 0.970446, 0.27185, 0.244164, 0.848418, 0.317831, 0.796962, 0.39336, 0.61686, 0.443003, 0.886598, 0.296609, 0.79706, 0.505852, 0.135758, 0.729181, 0.568836, 0.228146, 0.628907, 0.557686, 0.210052, 0.666452, 0.369343, 0.637187, 0.913858, 0.0676483, 0.386314, 0.74614, 0.460027, 0.769665, 0.607163, 0.661136, 0.408317, 0.082415, 0.330202, 0.973103, 0.906002, 0.592822, 0.49552, 0.464728, 0.927377, 0.79601, 0.297257, 0.0802152, 0.875166, 0.965711, 0.828689, 0.935954, 0.222853, 0.960175, 0.885684, 0.0260691, 0.651986, 0.927343, 0.0823473, 0.951757, 0.760611, 0.54416, 0.835401, 0.210591, 0.0724783, 0.391011, 0.0143496, 0.293461, 0.194537, 0.91084, 0.504482, 0.841295, 0.905512, 0.225208, 0.449512, 0.622702, 0.560123, 0.890952, 0.169299, 0.381673, 0.84665, 0.948667, 0.987612, 0.729889, 0.908907, 0.365192, 0.338496, 0.735753, 0.527905, 0.468758, 0.0385625, 0.996785, 0.108748, 0.481158, 0.00839623, 0.466563, 0.865383, 0.409138, 0.460578, 0.360472, 0.0407806, 0.364375, 0.977482, 0.811772, 0.0990622, 0.933134, 0.163347, 0.939935, 0.630137, 0.256183, 0.983861, 0.724552, 0.303294, 0.914815, 0.482606, 0.912499, 0.660085, 0.366056, 0.746248, 0.219051, 0.104823, 0.102237, 0.629716, 0.958589, 0.513937, 0.156952, 0.837467, 0.279681, 0.417555, 0.618307, 0.933961, 0.710576, 0.64998, 0.591747, 0.18498, 0.645953, 0.30677, 0.125125, 0.041388, 0.801042, 0.424426, 0.351224, 0.513681, 0.175725, 0.36503, 0.312664, 0.106289, 0.530154, 0.864183, 0.0462141, 0.146542, 0.747002, 0.0271387, 0.847553, 0.374374, 0.141571, 0.804145, 0.781911, 0.908875, 0.640773, 0.942928, 0.932247, 0.0819903, 0.702697, 0.796138, 0.0998107, 0.477658, 0.71308, 0.230403, 0.39062, 0.412141, 0.560688, 0.0396883, 0.300266, 0.552077, 0.401183, 0.697795, 0.345155, 0.973869, 0.99401, 0.724117, 0.37781, 0.475506, 0.259888, 0.816185, 0.340527, 0.555918, 0.0614762, 0.327173, 0.33215, 0.879987, 0.638624, 0.462407, 0.911328, 0.848515, 0.101813, 0.337031, 0.589615, 0.444778, 0.939433, 0.452765, 0.164072, 0.313226, 0.121912, 0.964394, 0.545237, 0.119571, 0.320902, 0.32713, 0.297954, 0.92257, 0.581893, 0.605318, 0.454684, 0.981156, 0.680498, 0.646043, 0.848569, 0.727255, 0.26652, 0.192383, 0.924268, 0.114181, 0.210008, 0.0881303, 0.493128, 0.761511, 0.544935, 0.0904996, 0.304229, 0.668192, 0.129051, 0.620008, 0.710355, 0.804815, 0.858795, 0.328434, 0.638237, 0.922537, 0.37459, 0.580962, 0.856638, 0.108657, 0.759089, 0.48636, 0.233826, 0.646782, 0.608141, 0.078016, 0.112631, 0.870167, 0.79137, 0.224489, 0.0101558, 0.00762725, 0.0954137, 0.55212, 0.795537, 0.904098, 0.591631, 0.480995, 0.856099, 0.523218, 0.832067, 0.771458, 0.606072, 0.104547, 0.771592, 0.908995, 0.693579, 0.684904, 0.695537, 0.821532, 0.307386, 0.71525, 0.813333, 0.647794, 0.442722, 0.0453486, 0.348228, 0.40358, 0.278184, 0.64213, 0.0118423, 0.706857, 0.523328, 0.969652, 0.60106, 0.640635, 0.701873, 0.498151, 0.472944, 0.12873, 0.0483712, 0.644485, 0.717798, 0.396157, 0.246251, 0.0118826, 0.116676, 0.0344178, 0.484916, 0.170281, 0.0870461, 0.372743, 0.0366434, 0.618328, 0.228173, 0.487171, 0.753924, 0.711603, 0.2192, 0.00744546, 0.555475, 0.488775, 0.0123579, 0.0985529, 0.837939, 0.240533, 0.926339, 0.0881075, 0.593701, 0.751206, 0.0428366, 0.353683, 0.00202499, 0.95949, 0.0779273, 0.337559, 0.107304, 0.52039, 0.634236, 0.573349, 0.210973, 0.13295, 0.636381, 0.312231, 0.745826, 0.595705, 0.494064, 0.596859, 0.254767, 0.934892, 0.574779, 0.42702, 0.885871, 0.530271, 0.351459, 0.917716, 0.599362, 0.879352, 0.742866, 0.751279, 0.255945, 0.953363, 0.364924, 0.613204, 0.395512, 0.42505, 0.775299, 0.739922, 0.610164, 0.519186, 0.54946, 0.759337, 0.065786, 0.851736, 0.513869, 0.538094, 0.298297, 0.936134, 0.819454, 0.589035, 0.146889, 0.789904, 0.265699, 0.922212, 0.824522, 0.280013, 0.723715, 0.22222, 0.215838, 0.170854, 0.84406, 0.720984, 0.517679, 0.985518, 0.828351, 0.680872, 0.527865, 0.360798, 0.0171757, 0.335389, 0.127265, 0.0372285, 0.743543, 0.120925, 0.992412, 0.893428, 0.224306, 0.427493, 0.400916, 0.453714, 0.844501, 0.432694, 0.907014, 0.387987, 0.197357, 0.353356, 0.163248, 0.581933, 0.0802244, 0.101659, 0.948038, 0.298489, 0.487947, 0.805005, 0.292222, 0.104251, 0.262406, 0.756303, 0.0289556, 0.539178, 0.612408, 0.260213, 0.87181, 0.125249, 0.877138, 0.56453, 0.260431, 0.115406, 0.844685, 0.795905, 0.0845446, 0.403418, 0.203438, 0.501762, 0.04233, 0.883746, 0.888299, 0.0241302, 0.0477649, 0.678425, 0.973873, 0.875213, 0.255399, 0.554313, 0.509247, 0.810753, 0.662169, 0.0431242, 0.620963, 0.532629, 0.414566, 0.202682, 0.431377, 0.359614, 0.741483, 0.287146, 0.930683, 0.427412, 0.990319, 0.954914, 0.109501, 0.945121, 0.0656726, 0.528272, 0.865429, 0.813012, 0.804701, 0.178151, 0.987579, 0.367919, 0.455077, 0.816022, 0.662191, 0.843173, 0.677972, 0.838458, 0.669437, 0.130338, 0.0307111, 0.0626129, 0.65216, 0.89493, 0.682701, 0.687862, 0.930354, 0.989369, 0.127442, 0.7715, 0.142004, 0.203572, 0.797808, 0.69064, 0.515438, 0.824036, 0.480253, 0.429861, 0.193539, 0.0359497, 0.991032, 0.158966, 0.765634, 0.489247, 0.638873, 0.851344, 0.144914, 0.604943, 0.295531, 0.736863, 0.637076, 0.888163, 0.955339, 0.089028, 0.509549, 0.18744, 0.0881126, 0.240522, 0.971756, 0.0332888, 0.551699, 0.385016, 0.0123994, 0.288318, 0.0737334, 0.773612, 0.149373, 0.245974, 0.051311, 0.628455, 0.176365, 0.939546, 0.16157, 0.16799, 0.755808, 0.553828, 0.774415, 0.693948, 0.484905, 0.386581, 0.782892, 0.300995, 0.458443, 0.791017, 0.400073, 0.827436, 0.526291, 0.0786245, 0.169529, 0.645122, 0.897908, 0.615324, 0.0569726, 0.940118, 0.689557, 0.908606, 0.351026, 0.336397, 0.114568, 0.437256, 0.721008, 0.905267, 0.193115, 0.767424, 0.446772, 0.394282, 0.302813, 0.948594, 0.401708, 0.00299313, 0.768493, 0.755339, 0.614702, 0.329795, 0.336652, 0.37557, 0.681038, 0.22798, 0.624969, 0.297243, 0.592397, 0.0193065, 0.647642, 0.299208, 0.873181, 0.465653, 0.9561, 0.845995, 0.592208, 0.610495, 0.693635, 0.83781, 0.871429, 0.482782, 0.019577, 0.566867, 0.16798, 0.265707, 0.970517, 0.793594, 0.0707822, 0.517747, 0.0848429, 0.657993, 0.353269, 0.528811, 0.0576434, 0.789224, 0.142582, 0.136927, 0.275891, 0.613849, 0.631172, 0.43431, 0.708835, 0.24269, 0.652269, 0.838496, 0.810745, 0.443645, 0.990308, 0.716902, 0.915889, 0.53024, 0.0882673, 0.531856, 0.513347, 0.47327, 0.321808, 0.230628, 0.696743, 0.103835, 0.105626, 0.931232, 0.954198, 0.668827, 0.9736, 0.484766, 0.650168, 0.172564, 0.694854, 0.311507, 0.256866, 0.960496, 0.805261, 0.960536, 0.125208, 0.941721, 0.0252524, 0.811851, 0.0497205, 0.665747, 0.940021, 0.834874, 0.484493, 0.0483284, 0.930201, 0.92104, 0.920169, 0.489162, 0.107738, 0.590964, 0.857815, 0.108885, 0.703273, 0.98203, 0.526217, 0.391751, 0.424956, 0.959739, 0.427961, 0.55528, 0.678307, 0.89085, 0.564021, 0.0254301, 0.267811, 0.966403, 0.255753, 0.434848, 0.948481, 0.0797267, 0.107002, 0.613125, 0.308759, 0.951783, 0.482604, 0.212008, 0.920039, 0.437873, 0.914546, 0.495611, 0.158746, 0.679416, 0.251326, 0.132333, 0.0188234, 0.0826892, 0.0968132, 0.00930595, 0.216777, 0.107398, 0.439804, 0.945324, 0.200513, 0.127535, 0.774592, 0.456462, 0.710033, 0.528864, 0.691696, 0.234751, 0.483404, 0.848106, 0.973856, 0.879214, 0.277933, 0.723969, 0.533849, 0.724845, 0.0916249, 0.457768, 0.194096, 0.25009, 0.193369, 0.0138378, 0.0989235, 0.875745, 0.223662, 0.834419, 0.678243, 0.426739, 0.971297, 0.431455, 0.217424, 0.190131, 0.605275, 0.464854, 0.106896, 0.832803, 0.302449, 0.961006, 0.855226, 0.521602, 0.65972, 0.180617, 0.741415, 0.96466, 0.71623, 0.187566, 0.0841722, 0.0878926, 0.190294, 0.727046, 0.172442, 0.412167, 0.545072, 0.466853, 0.660928, 0.486961, 0.778572, 0.147888, 0.954745, 0.200231, 0.744128, 0.294402, 0.859378, 0.906787, 0.519812, 0.551985, 0.79456, 0.418839, 0.414822, 0.125102, 0.490049, 0.959723, 0.210006, 0.589855, 0.365312, 0.316106, 0.0577263, 0.237431, 0.0110919, 0.330336, 0.058622, 0.808817, 0.896923, 0.356248, 0.0971567, 0.334918, 0.842778, 0.112614, 0.669964, 0.666608, 0.682843, 0.737819, 0.141793, 0.276603, 0.0331375, 0.783688, 0.434657, 0.45442, 0.599295, 0.387187, 0.846923, 0.179479, 0.121494, 0.98973, 0.478073, 0.827297, 0.379425, 0.232607, 0.118598, 0.169086, 0.902703, 0.095138, 0.0735214, 0.683167, 0.9624, 0.130515, 0.488498, 0.62583, 0.00683649, 0.254995, 0.929025, 0.599082, 0.661573, 0.928941, 0.831784, 0.179927, 0.053024, 0.412189, 0.947525, 0.661833, 0.902597, 0.31522, 0.132675, 0.154595, 0.274591, 0.499663, 0.993421, 0.923582, 0.443526, 0.925272, 0.869103, 0.353508, 0.389103, 0.0583933, 0.426538, 0.700325, 0.290663, 0.778628, 0.587522, 0.252839, 0.230107, 0.730163, 0.829537, 0.207341, 0.445899, 0.875232, 0.318014, 0.74751, 0.975023, 0.014299, 0.17128, 0.457809, 0.14812, 0.0197848, 0.118775, 0.230753, 0.997848, 0.45795, 0.449937, 0.0894907, 0.547274, 0.948523, 0.630865, 0.624967, 0.498096, 0.501158, 0.211525, 0.286669, 0.0116561, 0.161249, 0.399316, 0.124989, 0.646028, 0.770584, 0.793763, 0.762284, 0.445668, 0.857498, 0.625118, 0.666783, 0.113078, 0.0596466, 0.185806, 0.591636, 0.635886, 0.450358, 0.587621, 0.5527, 0.747684, 0.248808, 0.667088, 0.383176, 0.358038, 0.790323, 0.9834, 0.668535, 0.869853, 0.933976, 0.787124, 0.811976, 0.515406, 0.937923, 0.879118, 0.755636, 0.8476, 0.0172126, 0.403315, 0.655652, 0.920229, 0.710497, 0.614395, 0.00722982, 0.430953, 0.345282, 0.103916, 0.355235, 0.904136, 0.79404, 0.0106131, 0.494148, 0.991351, 0.0350799, 0.630222, 0.000749717, 0.979426, 0.422986, 0.289786, 0.330692, 0.0552139, 0.0369849, 0.777986, 0.946767, 0.72669, 0.183823, 0.823817, 0.56029, 0.849984, 0.668362, 0.736155, 0.222733, 0.178803, 0.0433033, 0.0545745, 0.355848, 0.829792, 0.0321856, 0.900058, 0.565675, 0.264814, 0.565534, 0.967712, 0.903356, 0.598438, 0.281995, 0.721174, 0.912927, 0.309744, 0.555706, 0.939278, 0.397977, 0.882638, 0.406464, 0.101314, 0.310882, 0.69791, 0.61566, 0.770006, 0.534857, 0.264891, 0.275198, 0.285514, 0.387835, 0.915057, 0.307186, 0.99959, 0.929944, 0.518083, 0.406515, 0.511193, 0.111044, 0.490271, 0.984333, 0.322023, 0.525149, 0.574954, 0.377631, 0.980368, 0.482464, 0.376603, 0.918488, 0.47039, 0.198118, 0.966154, 0.975313, 0.0497318, 0.574031, 0.101026, 0.593685, 0.285728, 0.555814, 0.0161684, 0.465769, 0.392347, 0.268933, 0.219111, 0.0614982, 0.681534, 0.453335, 0.219628, 0.00521107, 0.531894, 0.0103605, 0.0160745, 0.706182, 0.105306, 0.639396, 0.913112, 0.169314, 0.46118, 0.530876, 0.530852, 0.144749, 0.061771, 0.345383, 0.793049, 0.930435, 0.0387522, 0.870827, 0.99605, 0.86432, 0.186146, 0.516547, 0.259903, 0.0542989, 0.354779, 0.633639, 0.369478, 0.883022, 0.831576, 0.503522, 0.259566, 0.10825, 0.720064, 0.0503783, 0.355704, 0.654066, 0.135843, 0.904345, 0.732615, 0.418578, 0.707522, 0.523412, 0.427052, 0.266195, 0.0485478, 0.151569, 0.234289, 0.86771, 0.691468, 0.976096, 0.322316, 0.474722, 0.930385, 0.247036, 0.0111069, 0.695143, 0.501857, 0.3928, 0.494987, 0.725543, 0.603904, 0.349952, 0.370164, 0.214488, 0.871452, 0.520086, 0.801125, 0.222537, 0.75987, 0.839324, 0.776784, 0.725548, 0.745395, 0.0295877, 0.150169, 0.111521, 0.291749, 0.86594, 0.667406, 0.624777, 0.519074, 0.0645655, 0.573982, 0.343976, 0.238698, 0.329343, 0.499002, 0.691519, 0.679615, 0.0686207, 0.444707, 0.650487, 0.862795, 0.501802, 0.165066, 0.791704, 0.15477, 0.794657, 0.705931, 0.0741224, 0.37554, 0.58899, 0.510674, 0.675896, 0.242068, 0.26872, 0.906989, 0.868277, 0.418305, 0.0849626, 0.843582, 0.185997, 0.0494805, 0.602731, 0.7365, 0.923882, 0.750356, 0.40581, 0.112037, 0.651375, 0.612261, 0.717376, 0.0812118, 0.275665, 0.476249, 0.473285, 0.739432, 0.143818, 0.183861, 0.532398, 0.41748, 0.921911, 0.788728, 0.779641, 0.807648, 0.246671, 0.0510445, 0.0254668, 0.156033, 0.932309, 0.763114, 0.254263, 0.457501, 0.461505, 0.440129, 0.344309, 0.995591, 0.760076, 0.730269, 0.986784, 0.73832, 0.87352, 0.537939, 0.0494327, 0.0392552, 0.0630789, 0.454545, 0.722735, 0.484011, 0.901769, 0.00145949, 0.545437, 0.466967, 0.412684, 0.327767, 0.433916, 0.368346, 0.782479, 0.392597, 0.742118, 0.611192, 0.622068, 0.186453, 0.279677, 0.221972, 0.379552, 0.888944, 0.304705, 0.497728, 0.318121, 0.92822, 0.693819, 0.434451, 0.0217611, 0.800265, 0.892447, 0.522391, 0.152433, 0.180016, 0.474903, 0.0238855, 0.588777, 0.583758, 0.317013, 0.499803, 0.340935, 0.545624, 0.828265, 0.901466, 0.717656, 0.929255, 0.08775, 0.234531, 0.44157, 0.739418, 0.686425, 0.589344, 0.1422, 0.474432, 0.632737, 0.715697, 0.0770064, 0.784035, 0.141011, 0.933972, 0.856698, 0.839336, 0.98125, 0.513388, 0.668878, 0.133803, 0.890941, 0.628038, 0.603997, 0.704268, 0.036449, 0.310906, 0.352725, 0.428873, 0.43815, 0.562343, 0.564143, 0.635723, 0.336465, 0.800038, 0.475138, 0.506607, 0.398739, 0.276327, 0.664221, 0.029289, 0.717647, 0.580891, 0.396879, 0.477323, 0.285387, 0.630561, 0.922368, 0.459898, 0.792469, 0.552005, 0.298405, 0.0882608, 0.741296, 0.351056, 0.632286, 0.0179663, 0.427562, 0.74226, 0.00450135, 0.400636, 0.25329, 0.663112, 0.20838, 0.935161, 0.228668, 0.989608, 0.182795, 0.763212, 0.931301, 0.0463401, 0.845698, 0.426351, 0.223439, 0.397415, 0.902058, 0.408063, 0.819568, 0.695039, 0.640855, 0.482843, 0.107455, 0.677767, 0.196258, 0.231481, 0.812896, 0.206368, 0.126421, 0.524307, 0.587915, 0.0986925, 0.59961, 0.606076, 0.880473, 0.0577288, 0.13447, 0.947586, 0.0571611, 0.213013, 0.489456, 0.435708, 0.969083, 0.942995, 0.681764, 0.896178, 0.300682, 0.832375, 0.5195, 0.333091, 0.0556431, 0.469664, 0.7462, 0.0570697, 0.204859, 0.0263461, 0.494899, 0.244498, 0.171041, 0.693177, 0.122295, 0.354362, 0.715442, 0.73101, 0.115082, 0.102726, 0.0862697, 0.436487, 0.562216, 0.453726, 0.569051, 0.646143, 0.106447, 0.940343, 0.491902, 0.180173, 0.471478, 0.246272, 0.75232, 0.6348, 0.0633012, 0.493141, 0.709767, 0.224286, 0.647168, 0.682731, 0.212962, 0.893845, 0.688404, 0.476236, 0.260774, 0.200194, 0.318474, 0.197934, 0.573814, 0.92719, 0.714944, 0.677063, 0.708418, 0.48033, 0.504772, 0.682564, 0.92862, 0.69065, 0.101651, 0.0757732, 0.00122868, 0.871525, 0.630305, 0.0794149, 0.633552, 0.015418, 0.737001, 0.0723624, 0.839766, 0.657991, 0.0813443, 0.407034, 0.0593015, 0.0496819, 0.460121, 0.404443, 0.355606, 0.424626, 0.959821, 0.0930389, 0.41632, 0.885564, 0.974195, 0.072074, 0.97165, 0.0287434, 0.580037, 0.951944, 0.886314, 0.557303, 0.137478, 0.91437, 0.293779, 0.313597, 0.762046, 0.34298, 0.947834, 0.05537, 0.348866, 0.0220232, 0.0761242, 0.483674, 0.543624, 0.660201, 0.0495366, 0.302063, 0.685885, 0.560935, 0.816931, 0.215362, 0.735803, 0.484398, 0.880909, 0.298182, 0.128468, 0.354139, 0.0527205, 0.9533, 0.465524, 0.395123, 0.707392, 0.863054, 0.769466, 0.0349387, 0.734949, 0.349819, 0.0670821, 0.553813, 0.415804, 0.118037, 0.451046, 0.464906, 0.374042, 0.489618, 0.313256, 0.502164, 0.748751, 0.0353211, 0.603173, 0.780302, 0.784802, 0.103803, 0.664729, 0.514665, 0.733879, 0.485716, 0.247943, 0.220476, 0.68938, 0.622017, 0.532858, 0.552822, 0.483941, 0.0735217, 0.395001, 0.0156244, 0.874974, 0.455845, 0.40001, 0.525399, 0.472578, 0.829458, 0.339706, 0.0085181, 0.569525, 0.941497, 0.443829, 0.801316, 0.184509, 0.487628, 0.891121, 0.00554423, 0.601123, 0.993802, 0.328686, 0.417133, 0.75797, 0.41597, 0.682396, 0.654455, 0.348509, 0.0974186, 0.508677, 0.434163, 0.848895, 0.700171, 0.430938, 0.495374, 0.870377, 0.136626, 0.0252389, 0.385227, 0.0386377, 0.918474, 0.975657, 0.0885951, 0.342444, 0.635184, 0.894417, 0.788663, 0.0108794, 0.899092, 0.42093, 0.854927, 0.216658, 0.66055, 0.928396, 0.0311457, 0.95406, 0.78459, 0.52698, 0.954647, 0.0553724, 0.112884, 0.199379, 0.575085, 0.952266, 0.252752, 0.531791, 0.127426, 0.829004, 0.773397, 0.524273, 0.709102, 0.536364, 0.647877, 0.833191, 0.17091, 0.273609, 0.623056, 0.573185, 0.21786, 0.378892, 0.939695, 0.037874, 0.830446, 0.571572, 0.65958, 0.607392, 0.632481, 0.121963, 0.517775, 0.620987, 0.345017, 0.769316, 0.257625, 0.0339667, 0.580798, 0.0237706, 0.127731, 0.555469, 0.0487686, 0.207704, 0.339207, 0.895657, 0.700992, 0.151549, 0.134486, 0.928575, 0.0443563, 0.709693, 0.981987, 0.450482, 0.488103, 0.913204, 0.369439, 0.484755, 0.612307, 0.309857, 0.473988, 0.835232, 0.100386, 0.551637, 0.577682, 0.397991, 0.841518, 0.532709, 0.141247, 0.409456, 0.786516, 0.826065, 0.64496, 0.536202, 0.344456, 0.277559, 0.830111, 0.390734, 0.450151, 0.0218602, 0.990687, 0.189214, 0.336835, 0.578587, 0.739667, 0.46232, 0.986131, 0.312953, 0.137354, 0.831438, 0.359346, 0.147321, 0.372741, 0.620005, 0.81637, 0.223003, 0.947173, 0.27233, 0.91975, 0.518553, 0.38585, 0.563678, 0.846023, 0.529032, 0.738313, 0.815021, 0.484458, 0.576681, 0.0926511, 0.0255715, 0.0670623, 0.780394, 0.0825894, 0.631561, 0.338682, 0.124318, 0.871413, 0.321059, 0.165747, 0.97701, 0.0734802, 0.189865, 0.104041, 0.893658, 0.457512, 0.436778, 0.88349, 0.821133, 0.372976, 0.19636, 0.650987, 0.508544, 0.146217, 0.314452, 0.302964, 0.869022, 0.69999, 0.384766, 0.656917, 0.0622906, 0.783322, 0.549016, 0.48294, 0.853204, 0.984512, 0.0309319, 0.845943, 0.438341, 0.737687, 0.282315, 0.431338, 0.601569, 0.889015, 0.289548, 0.43581, 0.100949, 0.000932943, 0.309186, 0.429961, 0.474935, 0.9183, 0.258138, 0.84934, 0.880087, 0.552398, 0.141286, 0.283584, 0.931196, 0.37491, 0.200074, 0.0740094, 0.848848, 0.704655, 0.414608, 0.152468, 0.907735, 0.772761, 0.0360872, 0.180056, 0.197521, 0.409589, 0.319259, 0.66497, 0.116785, 0.717584, 0.535645, 0.159959, 0.692151, 0.376607, 0.314093, 0.334771, 0.957417, 0.964038, 0.922174, 0.747482, 0.255195, 0.973346, 0.354712, 0.815583, 0.254272, 0.252127, 0.592754, 0.312424, 0.204188, 0.206599, 0.191229, 0.771721, 0.16871, 0.514607, 0.944417, 0.11727, 0.996607, 0.910027, 0.855123, 0.422208, 0.0914395, 0.753779, 0.39944, 0.822808, 0.864212, 0.541907, 0.0831886, 0.42194, 0.32566, 0.0259759, 0.843884, 0.328822, 0.290296, 0.842189, 0.00545835, 0.377651, 0.754198, 0.432982, 0.211467, 0.507297, 0.322964, 0.944953, 0.67269, 0.908696, 0.823821, 0.754947, 0.954246, 0.117408, 0.817159, 0.0698135, 0.895377, 0.818969, 0.470528, 0.571387, 0.947834, 0.595896, 0.487859, 0.0287326, 0.720533, 0.877903, 0.702942, 0.841893, 0.79942, 0.11582, 0.182782, 0.762298, 0.539455, 0.647334, 0.553466, 0.614602, 0.29902, 0.200425, 0.774054, 0.680762, 0.0497778, 0.227848, 0.589457, 0.764415, 0.738369, 0.823087, 0.0782207, 0.0477335, 0.664587, 0.962675, 0.666643, 0.392024, 0.985393, 0.43173, 0.951072, 0.317355, 0.636815, 0.982784, 0.199483, 0.100302, 0.0983773, 0.743348, 0.447584, 0.326214, 0.188603, 0.234952, 0.0836973, 0.193172, 0.339608, 0.0171846, 0.140456, 0.408248, 0.731662, 0.550608, 0.761596, 0.620372, 0.762473, 0.342705, 0.0431834, 0.879889, 0.949776, 0.0174416, 0.0430366, 0.689595, 0.862824, 0.897548, 0.571938, 0.221444, 0.0842679, 0.929114, 0.348507, 0.0102636, 0.221872, 0.998716, 0.354849, 0.0494321, 0.615513, 0.606909, 0.701758, 0.0548691, 0.917509, 0.813837, 0.891332, 0.454314, 0.512912, 0.115186, 0.321759, 0.0608864, 0.587886, 0.0310354, 0.131396, 0.810433, 0.325341, 0.795835, 0.559347, 0.532573, 0.649707, 0.667578, 0.474935, 0.492523, 0.474219, 0.719782, 0.719336, 0.166523, 0.0935351, 0.0290707, 0.627281, 0.120119, 0.337353, 0.833613, 0.529305, 0.160354, 0.192708, 0.517011, 0.393018, 0.266867, 0.67993, 0.2966, 0.0675627, 0.049884, 0.802648, 0.721408, 0.300104, 0.224207, 0.355234, 0.369416, 0.666041, 0.778924, 0.678344, 0.244476, 0.51117, 0.769703, 0.194689, 0.750743, 0.496835, 0.947952, 0.792589, 0.571899, 0.934115, 0.239655, 0.992504, 0.0846078, 0.618461, 0.649232, 0.295413, 0.94919, 0.274345, 0.509407, 0.271909, 0.000930362, 0.158036, 0.900723, 0.212063, 0.96839, 0.374585, 0.469782, 0.531897, 0.690381, 0.468584, 0.0359002, 0.261997, 0.899164, 0.13039, 0.6485, 0.378338, 0.221372, 0.575356, 0.600011, 0.333292, 0.472703, 0.744745, 0.292519, 0.902518, 0.736262, 0.39382, 0.193482, 0.218638, 0.37395, 0.455858, 0.112487, 0.750094, 0.714776, 0.528615, 0.715634, 0.307591, 0.507527, 0.270608, 0.783474, 0.503542, 0.188904, 0.256813, 0.367277, 0.0523162, 0.94068, 0.921846, 0.777667, 0.411176, 0.0133664, 0.206929, 0.893635, 0.624345, 0.776478, 0.649293, 0.505523, 0.296455, 0.183283, 0.13781, 0.639469, 0.903623, 0.731678, 0.320149, 0.627007, 0.936021, 0.864199, 0.517562, 0.286761, 0.17914, 0.0648794, 0.0978378, 0.500528, 0.712065, 0.171874, 0.561778, 0.630316, 0.337516, 0.998464, 0.808983, 0.350591, 0.628368, 0.912493, 0.871607, 0.849695, 0.422593, 0.658803, 0.83347, 0.447237, 0.798792, 0.294156, 0.105724, 0.0741703, 0.944466, 0.746695, 0.57705, 0.454319, 0.537558, 0.978656, 0.275082, 0.865394, 0.42905, 0.321618, 0.619303, 0.719888, 0.20614, 0.590662, 0.335587, 0.571819, 0.14077, 0.512955, 0.688468, 0.753296, 0.430348, 0.228874, 0.880236, 0.563226, 0.400975, 0.135245, 0.608373, 0.542516, 0.876987, 0.470293, 0.307949, 0.598335, 0.24133, 0.726464, 0.953135, 0.100928, 0.434708, 0.186309, 0.0502947, 0.38873, 0.837416, 0.947704, 0.691398, 0.0461486, 0.995426, 0.659582, 0.455137, 0.719528, 0.433284, 0.447412, 0.815005, 0.801782, 0.20814, 0.363933, 0.807072, 0.366598, 0.881259, 0.36765, 0.170048, 0.700181, 0.190792, 0.422015, 0.371692, 0.804004, 0.877961, 0.700853, 0.824119, 0.972574, 0.759579, 0.421504, 0.515063, 0.218148, 0.0687566, 0.304502, 0.897699, 0.928322, 0.797918, 0.863318, 0.360217, 0.762498, 0.992061, 0.472135, 0.166534, 0.581114, 0.66621, 0.195785, 0.0437133, 0.24186, 0.567203, 0.984934, 0.258978, 0.957256, 0.844922, 0.713605, 0.0541155, 0.887392, 0.0579775, 0.341528, 0.00656558, 0.412303, 0.0148872, 0.462868, 0.337523, 0.939532, 0.507882, 0.187285, 0.139041, 0.729862, 0.350465, 0.267288, 0.831128, 0.341595, 0.234875, 0.526828, 0.0478633, 0.297971, 0.444694, 0.0888507, 0.788782, 0.0197432, 0.879393, 0.0294991, 0.841766, 0.489023, 0.503464, 0.47285, 0.545518, 0.934726, 0.0409531, 0.421955, 0.501159, 0.273089, 0.581311, 0.74753, 0.302673, 0.294014, 0.301206, 0.22304, 0.735983, 0.567154, 0.774057, 0.0655665, 0.766488, 0.798884, 0.90914, 0.403654, 0.440633, 0.793028, 0.992759, 0.624937, 0.564271, 0.578377, 0.53693, 0.28699, 0.710963, 0.0154664, 0.361751, 0.447944, 0.376651, 0.976943, 0.197851, 0.547489, 0.227106, 0.456446, 0.742688, 0.989491, 0.368837, 0.0729938, 0.145014, 0.451494, 0.862849, 0.483792, 0.145617, 0.259996, 0.290529, 0.0574864, 0.315212, 0.256489, 0.250316, 0.41138, 0.882464, 0.746337, 0.812296, 0.463426, 0.31247, 0.913875, 0.204899, 0.197659, 0.624119, 0.145738, 0.828249, 0.173153, 0.821114, 0.224993, 0.880486, 0.393319, 0.5228, 0.324117, 0.970565, 0.822643, 0.711942, 0.265506, 0.962097, 0.0305713, 0.93128, 0.715791, 0.305274, 0.980283, 0.385826, 0.250016, 0.794107, 0.214281, 0.7842, 0.159098, 0.168762, 0.557931, 0.259428, 0.105516, 0.0894675, 0.069389, 0.453177, 0.831908, 0.126979, 0.827831, 0.302858, 0.123757, 0.87119, 0.925624, 0.530038, 0.571532, 0.39731, 0.345498, 0.689683, 0.182037, 0.91741, 0.545841, 0.312773, 0.309144, 0.0123853, 0.980819, 0.350091, 0.0961307, 0.0646169, 0.0719253, 0.947185, 0.887502, 0.506696, 0.372855, 0.245585, 0.219438, 0.444893, 0.271645, 0.654493, 0.915768, 0.386677, 0.311144, 0.735514, 0.275864, 0.900572, 0.574876, 0.377688, 0.92217, 0.704141, 0.501957, 0.144012, 0.490947, 0.283826, 0.778355, 0.328462, 0.478082, 0.486394, 0.932712, 0.536281, 0.644762, 0.157485, 0.700024, 0.816247, 0.883428, 0.477485, 0.0263772, 0.416996, 0.145468, 0.0296548, 0.289283, 0.283224, 0.128746, 0.966503, 0.526181, 0.764314, 0.606014, 0.56261, 0.316953, 0.402768, 0.459073, 0.973306, 0.294563, 0.837289, 0.466905, 0.0921972, 0.083203, 0.323473, 0.190788, 0.892925, 0.179937, 0.813307, 0.922673, 0.503515, 0.0126516, 0.413086, 0.278325, 0.47703, 0.939909, 0.492061, 0.395506, 0.790437, 0.989499, 0.801666, 0.741973, 0.256637, 0.16019, 0.368278, 0.955072, 0.263978, 0.588447, 0.787951, 0.0155537, 0.415966, 0.672965, 0.96584, 0.870968, 0.857937, 0.137439, 0.35347, 0.378724, 0.964373, 0.768734, 0.440575, 0.0405468, 0.767168, 0.551087, 0.638026, 0.632401, 0.130231, 0.722439, 0.201121, 0.990679, 0.241409, 0.405717, 0.0887516, 0.433311, 0.325229, 0.338765, 0.313306, 0.819185, 0.281072, 0.569576, 0.539048, 0.475869, 0.309965, 0.294609, 0.986804, 0.356367, 0.531965, 0.323966, 0.982108, 0.922606, 0.78987, 0.184678, 0.919313, 0.66172, 0.78001, 0.538111, 0.504061, 0.551569, 0.197561, 0.172693, 0.0875003, 0.504177, 0.536836, 0.494697, 0.273618, 0.909722, 0.328595, 0.520832, 0.490617, 0.12315, 0.68816, 0.315788, 0.722511, 0.162925, 0.490397, 0.457867, 0.0508113, 0.988206, 0.288068, 0.0844054, 0.229343, 0.288736, 0.274956, 0.162419, 0.567865, 0.0658495, 0.922953, 0.888595, 0.891891, 0.422081, 0.894278, 0.636198, 0.669447, 0.507817, 0.695411, 0.419091, 0.50959, 0.858347, 0.365119, 0.351961, 0.927468, 0.898844, 0.702686, 0.613672, 0.388132, 0.662109, 0.776763, 0.608112, 0.255512, 0.449684, 0.901849, 0.734983, 0.184068, 0.606383, 0.807874, 0.114904, 0.369849, 0.631545, 0.0902525, 0.35459, 0.629446, 0.755188, 0.00710723, 0.283259, 0.213081, 0.841356, 0.574098, 0.479044, 0.827427, 0.127553, 0.418677, 0.584996, 0.181074, 0.173117, 0.534258, 0.452522, 0.323254, 0.199639, 0.19934, 0.477959, 0.516973, 0.300863, 0.33596, 0.614113, 0.0463051, 0.806675, 0.716888, 0.560459, 0.78294, 0.588177, 0.108478, 0.871551, 0.122526, 0.879612, 0.323951, 0.964344, 0.272828, 0.599901, 0.780299, 0.878432, 0.827981, 0.862602, 0.238203, 0.995725, 0.807474, 0.859691, 0.745089, 0.427182, 0.584508, 0.815208, 0.0209309, 0.977772, 0.11708, 0.51597, 0.198436, 0.600696, 0.178902, 0.528177, 0.113928, 0.959053, 0.377403, 0.206195, 0.678897, 0.298803, 0.602845, 0.779048, 0.61736, 0.0538978, 0.922456, 0.251961, 0.344247, 0.0469277, 0.0997539, 0.134385, 0.433126, 0.917378, 0.501004, 0.432078, 0.609415, 0.399673, 0.840714, 0.648393, 0.975679, 0.488398, 0.707056, 0.917814, 0.241519, 0.151391, 0.217867, 0.455515, 0.736847, 0.276058, 0.429626, 0.458444, 0.809978, 0.3343, 0.212938, 0.391852, 0.982906, 0.188652, 0.94218, 0.573306, 0.858379, 0.646604, 0.996944, 0.8909, 0.284794, 0.27596, 0.900314, 0.264372, 0.403445, 0.460954, 0.367555, 0.513943, 0.756927, 0.901388, 0.744883, 0.419572, 0.0291924, 0.388149, 0.311144, 0.758906, 0.494793, 0.800513, 0.823058, 0.21676, 0.315118, 0.132913, 0.615567, 0.366563, 0.253824, 0.908813, 0.37287, 0.562833, 0.603878, 0.182627, 0.181748, 0.626329, 0.72973, 0.755745, 0.934465, 0.771756, 0.74092, 0.764703, 0.847717, 0.380055, 0.956026, 0.554559, 0.153008, 0.817651, 0.351589, 0.858646, 0.356547, 0.591945, 0.481525, 0.566407, 0.075726, 0.804183, 0.738637, 0.0518405, 0.615393, 0.18159, 0.705874, 0.270865, 0.214856, 0.715119, 0.421676, 0.312135, 0.0893758, 0.830401, 0.0118446, 0.790957, 0.000994886, 0.772949, 0.830426, 0.872599, 0.262893, 0.611248, 0.327013, 0.209951, 0.0868283, 0.657425, 0.586466, 0.42304, 0.311497, 0.00599936, 0.0047943, 0.314901, 0.459674, 0.832237, 0.593066, 0.533902, 0.986659, 0.532806, 0.334881, 0.331746, 0.682138, 0.403476, 0.157666, 0.0933196, 0.687572, 0.864067, 0.623627, 0.436928, 0.634422, 0.315886, 0.776535, 0.28992, 0.486322, 0.633281, 0.680106, 0.413941, 0.74878, 0.0557304, 0.746276, 0.253372, 0.677609, 0.03761, 0.841179, 0.462366, 0.140944, 0.666033, 0.444185, 0.896211, 0.745836, 0.837855, 0.781673, 0.992182, 0.759924, 0.149578, 0.801817, 0.23307, 0.983902, 0.279981, 0.436416, 0.408519, 0.977166, 0.486979, 0.616462, 0.717189, 0.78756, 0.979531, 0.211748, 0.281095, 0.808292, 0.452116, 0.505222, 0.211008, 0.0880939, 0.304145, 0.157218, 0.823591, 0.881477, 0.63282, 0.43668, 0.0421872, 0.92511, 0.208938, 0.595937, 0.319492, 0.386474, 0.912154, 0.86807, 0.973331, 0.576398, 0.423191, 0.191564, 0.271486, 0.166704, 0.59387, 0.846881, 0.603252, 0.726757, 0.283076, 0.252814, 0.0950604, 0.533592, 0.609895, 0.911565, 0.402955, 0.519665, 0.719536, 0.779983, 0.58922, 0.50628, 0.0937121, 0.535797, 0.385731, 0.117425, 0.265472, 0.237342, 0.166879, 0.377402, 0.387306, 0.370544, 0.680795, 0.182085, 0.472747, 0.681323, 0.799128, 0.401971, 0.824955, 0.541769, 0.0634075, 0.357216, 0.564308, 0.106465, 0.112271, 0.227668, 0.363854, 0.323736, 0.175635, 0.855609, 0.122859, 0.634043, 0.00744051, 0.443882, 0.918507, 0.910142, 0.482702, 0.0487369, 0.22982, 0.119722, 0.630875, 0.775613, 0.127926, 0.825253, 0.20214, 0.366558, 0.231207, 0.489712, 0.782439, 0.783108, 0.635982, 0.684556, 0.444892, 0.740059, 0.74257, 0.556034, 0.020627, 0.169975, 0.318743, 0.987251, 0.848496, 0.961086, 0.313144, 0.469643, 0.280877, 0.444973, 0.565544, 0.674149, 0.966, 0.840922, 0.624866, 0.898954, 0.360874, 0.333776, 0.416233, 0.964037, 0.995817, 0.8895, 0.101838, 0.213644, 0.672951, 0.367858, 0.473691, 0.849456, 0.0204076, 0.9011, 0.0515015, 0.311752, 0.42627, 0.612015, 0.03455, 0.36429, 0.809447, 0.0841483, 0.902302, 0.1439, 0.358429, 0.79011, 0.884318, 0.176961, 0.267724, 0.227391, 0.41377, 0.630351, 0.503522, 0.886437, 0.611218, 0.315786, 0.656199, 0.718377, 0.569848, 0.202811, 0.937216, 0.353074, 0.51868, 0.190367, 0.697473, 0.0124044, 0.554073, 0.251852, 0.839427, 0.0618167, 0.656978, 0.105702, 0.128592, 0.660755, 0.49681, 0.438251, 0.22643, 0.773987, 0.892038, 0.268779, 0.827232, 0.80203, 0.558867, 0.801823, 0.779758, 0.24148, 0.979899, 0.044897, 0.387513, 0.148037, 0.971127, 0.25907, 0.701731, 0.702838, 0.841079, 0.471461, 0.00816618, 0.763383, 0.845966, 0.390235, 0.479232, 0.272554, 0.428787, 0.630907, 0.144826, 0.795675, 0.885258, 0.877329, 0.260514, 0.88605, 0.251618, 0.0982398, 0.240143, 0.336942, 0.497409, 0.328716, 0.559073, 0.710497, 0.407214, 0.140225, 0.568899, 0.305758, 0.985192, 0.353355, 0.451623, 0.806361, 0.552935, 0.137101, 0.39632, 0.807873, 0.730659, 0.83679, 0.474723, 0.902925, 0.181643, 0.519008, 0.495691, 0.0574197, 0.399138, 0.376197, 0.728309, 0.563635, 0.429087, 0.900816, 0.738019, 0.664776, 0.472119, 0.866737, 0.672326, 0.820026, 0.55473, 0.543396, 0.430427, 0.0390094, 0.14597, 0.763778, 0.925085, 0.182753, 0.927547, 0.38596, 0.0845172, 0.686091, 0.470664, 0.633901, 0.379529, 0.808742, 0.677396, 0.660347, 0.824392, 0.138273, 0.913349, 0.282948, 0.182555, 0.195728, 0.0569882, 0.0180547, 0.285175, 0.141473, 0.891612, 0.916347, 0.170171, 0.0141192, 0.0746619, 0.848639, 0.88324, 0.224257, 0.398841, 0.446264, 0.367285, 0.394011, 0.593844, 0.421785, 0.0978993, 0.748422, 0.546064, 0.0749291, 0.679814, 0.773036, 0.190595, 0.9074, 0.132129, 0.589965, 0.310527, 0.312563, 0.496822, 0.0182632, 0.421471, 0.246528, 0.111739, 0.146622, 0.0458184, 0.499015, 0.370329, 0.38683, 0.239333, 0.81416, 0.292132, 0.932061, 0.155805, 0.0530965, 0.412611, 0.177749, 0.0743324, 0.679697, 0.990631, 0.772683, 0.658341, 0.633155, 0.595962, 0.101049, 0.0893765, 0.652252, 0.828871, 0.292416, 0.812544, 0.338977, 0.574637, 0.662387, 0.150282, 0.445916, 0.982466, 0.403631, 0.237927, 0.605144, 0.178157, 0.139473, 0.409146, 0.032356, 0.623801, 0.892386, 0.686936, 0.208553, 0.0746324, 0.429076, 0.804958, 0.551032, 0.0414786, 0.268112, 0.282896, 0.191952, 0.631162, 0.338374, 0.294875, 0.113726, 0.861872, 0.441595, 0.214191, 0.66156, 0.0493908, 0.373903, 0.351066, 0.754358, 0.203796, 0.232518, 0.936212, 0.894607, 0.733782, 0.499273, 0.352905, 0.785701, 0.269482, 0.964939, 0.0445552, 0.191663, 0.380432, 0.263502, 0.460111, 0.250836, 0.164842, 0.517637, 0.740929, 0.566001, 0.228714, 0.678602, 0.0194022, 0.737847, 0.959707, 0.463411, 0.945044, 0.68689, 0.833525, 0.149539, 0.47417, 0.681319, 0.351924, 0.58604, 0.693724, 0.549001, 0.267608, 0.17901, 0.308286, 0.317175, 0.303205, 0.276014, 0.180304, 0.111004, 0.313073, 0.119443, 0.966616, 0.503921, 0.516759, 0.928561, 0.0495854, 0.797306, 0.00506445, 0.953303, 0.680102, 0.609641, 0.4532, 0.604242, 0.592065, 0.555589, 0.885786, 0.379935, 0.840548, 0.747545, 0.713203, 0.0289334, 0.825641, 0.00517996, 0.31366, 0.102872, 0.00632973, 0.0294658, 0.937041, 0.744933, 0.429079, 0.2378, 0.532026, 0.395294, 0.887661, 0.17933, 0.440347, 0.296402, 0.00236916, 0.209272, 0.5654, 0.834848, 0.460221, 0.0318093, 0.123416, 0.671155, 0.508426, 0.638721, 0.279426, 0.977275, 7.68745e-05, 0.955426, 0.765669, 0.835244, 0.374852, 0.3037, 0.182404, 0.563928, 0.206628, 0.845717, 0.0740084, 0.534655, 0.948065, 0.860815, 0.727566, 0.592863, 0.611181, 0.51679, 0.878952, 0.225228, 0.506738, 0.912728, 0.750646, 0.352038, 0.663531, 0.717603, 0.263244, 0.312346, 0.900763, 0.3021, 0.710917, 0.801194, 0.275892, 0.135634, 0.013046, 0.143271, 0.706457, 0.4556, 0.880494, 0.323518, 0.914485, 0.948586, 0.837672, 0.268554, 0.492549, 0.835405, 0.424024, 0.251007, 0.670121, 0.894743, 0.936056, 0.878409, 0.516729, 0.980338, 0.858777, 0.515476, 0.806565, 0.459549, 0.673979, 0.0373015, 0.225456, 0.448623, 0.742773, 0.30995, 0.730075, 0.645957, 0.416948, 0.252095, 0.0891325, 0.269174, 0.277805, 0.0260466, 0.438988, 0.912189, 0.273474, 0.296135, 0.0327403, 0.410277, 0.885817, 0.672258, 0.957767, 0.0637427, 0.705864, 0.750885, 0.532208, 0.500071, 0.989295, 0.198371, 0.155448, 0.893755, 0.55062, 0.787393, 0.673014, 0.700156, 0.785568, 0.849068, 0.471647, 0.929353, 0.830086, 0.164187, 0.95872, 0.707694, 0.254694, 0.516321, 0.198168, 0.652784, 0.554721, 0.600844, 0.00513052, 0.777388, 0.267702, 0.554596, 0.966946, 0.982968, 0.91765, 0.572923, 0.505429, 0.544413, 0.920607, 0.277406, 0.358607, 0.644469, 0.802135, 0.417853, 0.0748436, 0.0656076, 0.667222, 0.00809094, 0.511886, 0.062951, 0.140421, 0.786651, 0.275054, 0.718954, 0.307897, 0.899251, 0.940873, 0.635788, 0.966653, 0.308592, 0.0912164, 0.996029, 0.440294, 0.025451, 0.57036, 0.0815001, 0.785151, 0.673662, 0.555222, 0.72086, 0.635426, 0.916495, 0.53304, 0.37754, 0.745192, 0.892548, 0.440584, 0.0808152, 0.658269, 0.417595, 0.821983, 0.454843, 0.510409, 0.443047, 0.455659, 0.0762574, 0.68442, 0.764375, 0.118305, 0.117737, 0.109274, 0.830061, 0.622865, 0.621019, 0.363026, 0.437709, 0.0458459, 0.748531, 0.896462, 0.0851837, 0.422135, 0.109305, 0.805803, 0.888655, 0.294698, 0.511156, 0.580689, 0.254876, 0.877237, 0.965993, 0.958119, 0.0194436, 0.340496, 0.74835, 0.988802, 0.571348, 0.766744, 0.511712, 0.408168, 0.162194, 0.877615, 0.343821, 0.702854, 0.795688, 0.0172876, 0.252809, 0.818424, 0.649099, 0.112949, 0.435633, 0.606987, 0.929316, 0.253601, 0.429676, 0.0881306, 0.712534, 0.337336, 0.693027, 0.22348, 0.347465, 0.360248, 0.0609941, 0.807739, 0.891838, 0.871452, 0.965494, 0.414782, 0.107206, 0.514451, 0.571409, 0.849897, 0.949761, 0.438625, 0.972175, 0.519867, 0.176188, 0.42903, 0.706912, 0.799488, 0.216814, 0.192208, 0.549729, 0.959527, 0.0193243, 0.142677, 0.561262, 0.472697, 0.45902, 0.461403, 0.247776, 0.421327, 0.882689, 0.0900163, 0.484854, 0.752666, 0.878102, 0.485377, 0.102796, 0.642061, 0.561637, 0.738313, 0.502398, 0.318698, 0.652747, 0.847825, 0.709615, 0.967789, 0.437439, 0.940151, 0.788328, 0.0527278, 0.207602, 0.332577, 0.143298, 0.963026, 0.905102, 0.325186, 0.321057, 0.996634, 0.787633, 0.2473, 0.612468, 0.680492, 0.77213, 0.651793, 0.112792, 0.600331, 0.0214044, 0.392964, 0.346418, 0.211294, 0.420163, 0.517522, 0.814278, 0.454198, 0.563263, 0.266748, 0.136205, 0.502684, 0.8188, 0.400989, 0.118937, 0.0411338, 0.858195, 0.834085, 0.902483, 0.263586, 0.238561, 0.107777, 0.535858, 0.239505, 0.277898, 0.735298, 0.950311, 0.216747, 0.0549665, 0.435693, 0.56972, 0.725088, 0.476098, 0.590023, 0.791093, 0.448307, 0.144103, 0.473148, 0.0971389, 0.527271, 0.84826, 0.451409, 0.624358, 0.790694, 0.58076, 0.947789, 0.847968, 0.268656, 0.0502452, 0.427562, 0.2381, 0.37817, 0.340982, 0.990372, 0.707389, 0.881687, 0.614775, 0.459538, 0.689486, 0.257183, 0.508028, 0.951647, 0.239295, 0.510228, 0.0355115, 0.674017, 0.650262, 0.629765, 0.172136, 0.664333, 0.319067, 0.626285, 0.760027, 0.918703, 0.394823, 0.0561703, 0.685326, 0.387615, 0.3077, 0.456536, 0.716168, 0.554496, 0.922222, 0.465481, 0.422972, 0.284398, 0.316601, 0.117767, 0.664932, 0.818385, 0.0772614, 0.797809, 0.565864, 0.806523, 0.502041, 0.986052, 0.40194, 0.853426, 0.923334, 0.229677, 0.409202, 0.103052, 0.803749, 0.741309, 0.584289, 0.17029, 0.813915, 0.486107, 0.335076, 0.0723319, 0.0648056, 0.0169297, 0.276338, 0.803466, 0.941484, 0.675518, 0.25859, 0.674748, 0.145562, 0.933589, 0.74698, 0.162899, 0.987096, 0.901738, 0.234055, 0.957422, 0.636644, 0.835198, 0.548654, 0.171008, 0.328339, 0.176741, 0.108074, 0.0648382, 0.973493, 0.0507243, 0.69277, 0.860223, 0.851903, 0.526105, 0.186676, 0.485972, 0.0915541, 0.123117, 0.263321, 0.114275, 0.471546, 0.910374, 0.840445, 0.435861, 0.472122, 0.510161, 0.0544588, 0.0254585, 0.84312, 0.822587, 0.761342, 0.096239, 0.519051, 0.109043, 0.331702, 0.0499664, 0.298945, 0.481621, 0.134952, 0.952646, 0.801653, 0.890752, 0.31178, 0.91065, 0.860964, 0.260392, 0.790841, 0.939215, 0.541865, 0.288362, 0.759087, 0.278885, 0.608133, 0.50618, 0.642822, 0.170413, 0.191113, 0.536587, 0.548019, 0.7547, 0.853169, 0.374626, 0.250609, 0.478192, 0.992317, 0.921845, 0.113108, 0.90873, 0.483042, 0.296314, 0.5585, 0.101148, 0.798438, 0.76298, 0.671514, 0.836567, 0.745829, 0.979008, 0.176332, 0.504983, 0.0675006, 0.37174, 0.730015, 0.949321, 0.963235, 0.114482, 0.798551, 0.8641, 0.155542, 0.43491, 0.529986, 0.0602692, 0.965662, 0.973488, 0.12901, 0.497966, 0.974907, 0.955867, 0.212779, 0.992117, 0.852558, 0.0952857, 0.867269, 0.690625, 0.686375, 0.74081, 0.668102, 0.536224, 0.0999359, 0.285415, 0.0213675, 0.186844, 0.539411, 0.46288, 0.785401, 0.481897, 0.307431, 0.325025, 0.326074, 0.806278, 0.430591, 0.723728, 0.173602, 0.962883, 0.816989, 0.903304, 0.469133, 0.604026, 0.0321729, 0.840409, 0.24737, 0.736421, 0.48015, 0.192789, 0.951041, 0.593976, 0.151651, 0.443673, 0.34385, 0.577428, 0.140803, 0.566435, 0.11966, 0.72859, 0.368858, 0.0577975, 0.34032, 0.609083, 0.256059, 0.348514, 0.0508468, 0.00316749, 0.555017, 0.655676, 0.737948, 0.567344, 0.362025, 0.146449, 0.00129713, 0.844723, 0.916729, 0.00308606, 0.0870135, 0.877402, 0.951417, 0.221822, 0.772431, 0.643395, 0.67301, 0.116911, 0.46719, 0.0669138, 0.130404, 0.619855, 0.801693, 0.774829, 0.989154, 0.856526, 0.302431, 0.648183, 0.215863, 0.804791, 0.201173, 0.336564, 0.97062, 0.288187, 0.507616, 0.646082, 0.733398, 0.224121, 0.504736, 0.00772472, 0.78743, 0.771164, 0.559108, 0.795581, 0.367603, 0.468444, 0.585013, 0.586772, 0.653175, 0.0160145, 0.522016, 0.58587, 0.209264, 0.458205, 0.616487, 0.117648, 0.115699, 0.583878, 0.794704, 0.221679, 0.911828, 0.968042, 0.718035, 0.594693, 0.188187, 0.937082, 0.670652, 0.205657, 0.0996074, 0.0260505, 0.26999, 0.74838, 0.934845, 0.677448, 0.560191, 0.176769, 0.066693, 0.506686, 0.430665, 0.821599, 0.92822, 0.44078, 0.443573, 0.292611, 0.189979, 0.925306, 0.973656, 0.949399, 0.719787, 0.570721, 0.182352, 0.844719, 0.435064, 0.588637, 0.999896, 0.549393, 0.767109, 0.014801, 0.486565, 0.840187, 0.808312, 0.18391, 0.234522, 0.584381, 0.37433, 0.29087, 0.0704586, 0.304292, 0.319893, 0.0587912, 0.845847, 0.00314954, 0.837245, 0.462001, 0.589969, 0.482115, 0.647743, 0.787038, 0.723061, 0.326683, 0.85121, 0.355814, 0.705157, 0.989882, 0.929909, 0.542999, 0.632499, 0.842266, 0.176936, 0.29883, 0.861086, 0.189718, 0.432002, 0.175787, 0.327401, 0.838435, 0.401861, 0.0125604, 0.203791, 0.444693, 0.187772, 0.152064, 0.815757, 0.684142, 0.813747, 0.823545, 0.134347, 0.291306, 0.637104, 0.536827, 0.947242, 0.627055, 0.850737, 0.447708, 0.827583, 0.200962, 0.420909, 0.828729, 0.430872, 0.907566, 0.491373, 0.260588, 0.101679, 0.0487735, 0.818573, 0.808401, 0.597819, 0.937844, 0.554982, 0.0817186, 0.637007, 0.208433, 0.684939, 0.678865, 0.938025, 0.0468879, 0.106424, 0.279055, 0.325352, 0.835876, 0.732463, 0.956187, 0.776323, 0.52492, 0.809183, 0.256638, 0.222042, 0.612182, 0.246364, 0.751067, 0.697015, 0.557753, 0.80429, 0.892181, 0.0235493, 0.392911, 0.251057, 0.936298, 0.842089, 0.561525, 0.810932, 0.104489, 0.108559, 0.939076, 0.522494, 0.901654, 0.253191, 0.0305009, 0.000174918, 0.548136, 0.269365, 0.374307, 0.779397, 0.889697, 0.265327, 0.530964, 0.40879, 0.0999762, 0.164271, 0.867453, 0.961738, 0.292759, 0.14762, 0.237227, 0.722776, 0.680346, 0.313074, 0.823815, 0.385996, 0.0297546, 0.425466, 0.579438, 0.0663015, 0.159994, 0.438489, 0.358821, 0.21364, 0.481726, 0.483257, 0.980104, 0.953453, 0.829254, 0.82761, 0.700659, 0.625401, 0.37035, 0.904741, 0.102187, 0.223586, 0.990634, 0.419582, 0.621645, 0.27542, 0.709467, 0.517586, 0.384775, 0.061399, 0.406794, 0.0198032, 0.170385, 0.152503, 0.249718, 0.611619, 0.353437, 0.585372, 0.778843, 0.748258, 0.00522621, 0.0147192, 0.0705208, 0.442315, 0.166621, 0.0496034, 0.00780444, 0.0572878, 0.759933, 0.574716, 0.93493, 0.971876, 0.62744, 0.626682, 0.0144254, 0.224486, 0.292508, 0.16733, 0.461605, 0.539995, 0.769798, 0.580077, 0.379058, 0.0096561, 0.717079, 0.43319, 0.478359, 0.383725, 0.973667, 0.551515, 0.159994, 0.331342, 0.89847, 0.104814, 0.186139, 0.791425, 0.513075, 0.207098, 0.705184, 0.959963, 0.109553, 0.0240362, 0.525282, 0.11477, 0.90424, 0.163693, 0.303969, 0.108482, 0.122791, 0.012939, 0.342588, 0.0272011, 0.600804, 0.921436, 0.512615, 0.0500179, 0.600177, 0.618209, 0.508877, 0.460674, 0.147336, 0.926143, 0.849058, 0.106621, 0.442745, 0.854984, 0.649785, 0.680815, 0.215518, 0.54041, 0.0592017, 0.213795, 0.275691, 0.692786, 0.812412, 0.730742, 0.0430776, 0.515699, 0.0471932, 0.268972, 0.191225, 0.162789, 0.859575, 0.457822, 0.82466, 0.0802463, 0.21362, 0.347316, 0.924177, 0.535158, 0.279048, 0.0177046, 0.212148, 0.809314, 0.92635, 0.0842272, 0.0464712, 0.156035, 0.113283, 0.633604, 0.586258, 0.138507, 0.106527, 0.485321, 0.696954, 0.563075, 0.0615132, 0.500333, 0.869193, 0.760761, 0.161124, 0.925194, 0.977073, 0.178616, 0.966401, 0.328208, 0.270984, 0.367092, 0.498169, 0.338269, 0.750939, 0.543984, 0.9023, 0.543534, 0.016312, 0.502658, 0.86578, 0.298257, 0.369636, 0.664773, 0.266585, 0.36256, 0.616225, 0.180534, 0.2579, 0.762294, 0.458767, 0.493684, 0.0541348, 0.688507, 0.401433, 0.704303, 0.893807, 0.474275, 0.750621, 0.337068, 0.789275, 0.0368413, 0.90707, 0.63747, 0.311911, 0.433968, 0.797275, 0.946471, 0.392497, 0.57048, 0.324949, 0.707479, 0.473632, 0.252424, 0.882691, 0.95303, 0.145117, 0.359622, 0.323966, 0.850115, 0.657502, 0.490679, 0.117636, 0.622882, 0.952931, 0.752963, 0.748988, 0.0824225, 0.220845, 0.518163, 0.384343, 0.325047, 0.0654623, 0.224933, 0.100739, 0.712389, 0.433919, 0.946161, 0.91136, 0.54095, 0.276587, 0.712972, 0.287296, 0.636123, 0.482552, 0.969401, 0.277553, 0.88716, 0.154472, 0.186099, 0.187503, 0.888573, 0.886183, 0.852393, 0.0354023, 0.218044, 0.439132, 0.0759184, 0.970972, 0.903231, 0.0863043, 0.841808, 0.877451, 0.212733, 0.780609, 0.260082, 0.148804, 0.211385, 0.736429, 0.447931, 0.73038, 0.613024, 0.925157, 0.411162, 0.519772, 0.771778, 0.0303801, 0.336019, 0.706645, 0.320265, 0.447593, 0.616613, 0.349174, 0.309556, 0.409711, 0.700563, 0.663469, 0.665177, 0.975724, 0.262737, 0.0576114, 0.456657, 0.202072, 0.548093, 0.70414, 0.00151419, 0.591219, 0.723603, 0.580681, 0.203495, 0.00301552, 0.363667, 0.701647, 0.0898662, 0.0175085, 0.57922, 0.481037, 0.511311, 0.517898, 0.638491, 0.19532, 0.116193, 0.45704, 0.94091, 0.892934, 0.458747, 0.0299241, 0.432842, 0.0990674, 0.388252, 0.218489, 0.303574, 0.489316, 0.909617, 0.38945, 0.348984, 0.625489, 0.0481935, 0.475787, 0.239942, 0.51331, 0.0755039, 0.459515, 0.973249, 0.74645, 0.206274, 0.17815, 0.417064, 0.636705, 0.60885, 0.17796, 0.203345, 0.682714, 0.642812, 0.184734, 0.405048, 0.907048, 0.291277, 0.0844157, 0.593001, 0.153298, 0.151499, 0.993113, 0.21695, 0.0255066, 0.360015, 0.647722, 0.707693, 0.923729, 0.152495, 0.132257, 0.818965, 0.0722895, 0.943851, 0.385379, 0.921495, 0.00638984, 0.547315, 0.841284, 0.407772, 0.86433, 0.810574, 0.0753104, 0.47821, 0.595804, 0.792667, 0.724536, 0.269371, 0.194357, 0.224023, 0.763775, 0.234777, 0.725786, 0.855229, 0.759268, 0.510267, 0.588473, 0.0172854, 0.4479, 0.664336, 0.50244, 0.298565, 0.412266, 0.70789, 0.258666, 0.669177, 0.97114, 0.794049, 0.652256, 0.552754, 0.640575, 0.19356, 0.0469088, 0.774715, 0.704023, 0.810834, 0.00158777, 0.0347898, 0.526573, 0.453529, 0.335324, 0.823378, 0.718534, 0.0208861, 0.435365, 0.705493, 0.174218, 0.983121, 0.564026, 0.750141, 0.545693, 0.592673, 0.0115324, 0.652051, 0.43416, 0.780084, 0.715096, 0.445158, 0.13432, 0.722025, 0.594671, 0.425649, 0.401439, 0.85149, 0.664776, 0.59369, 0.981758, 0.332022, 0.317091, 0.821814, 0.932877, 0.934732, 0.645572, 0.48562, 0.666455, 0.487356, 0.582273, 0.793477, 0.705752, 0.0679657, 0.539385, 0.92631, 0.140349, 0.419209, 0.613488, 0.186375, 0.497371, 0.654363, 0.605766, 0.353864, 0.521137, 0.365737, 0.922172, 0.260254, 0.904775, 0.158833, 0.871395, 0.0284482, 0.138727, 0.981333, 0.0385456, 0.825761, 0.899612, 0.695491, 0.599947, 0.107973, 0.904803, 0.784907, 0.348681, 0.0967727, 0.290476, 0.47869, 0.724451, 0.696268, 0.390858, 0.804403, 0.915155, 0.500381, 0.326014, 0.879121, 0.747531, 0.29866, 0.836154, 0.466789, 0.0554815, 0.521147, 0.0193921, 0.277637, 0.412436, 0.165975, 0.0933043, 0.396028, 0.364232, 0.894119, 0.662706, 0.844554, 0.627276, 0.510571, 0.775876, 0.559223, 0.140766, 0.132849, 0.630919, 0.993621, 0.312176, 0.762026, 0.269199, 0.421158, 0.995547, 0.1647, 0.281031, 0.242504, 0.124864, 0.0631884, 0.783051, 0.832809, 0.370051, 0.837016, 0.322727, 0.969841, 0.635818, 0.373681, 0.0906802, 0.285469, 0.452135, 0.74719, 0.1876, 0.530949, 0.301913, 0.181019, 0.758152, 0.435024, 0.439012, 0.0774962, 0.617846, 0.648043, 0.315735, 0.376518, 0.314902, 0.409523, 0.485949, 0.402967, 0.605442, 0.0406506, 0.784141, 0.846694, 0.41006, 0.013384, 0.366095, 0.78164, 0.0102839, 0.6342, 0.913823, 0.349234, 0.112328, 0.124363, 0.152649, 0.711797, 0.386605, 0.471072, 0.818652, 0.504349, 0.648708, 0.188056, 0.826415, 0.101712, 0.0379787, 0.661428, 0.260474, 0.259856, 0.530603, 0.696072, 0.889401, 0.908128, 0.651632, 0.509349, 0.590079, 0.522361, 0.235219, 0.952806, 0.273418, 0.305395, 0.688828, 0.666369, 0.413037, 0.686757, 0.505578, 0.356498, 0.431466, 0.609216, 0.642127, 0.988884, 0.674534, 0.793127, 0.875922, 0.804245, 0.555926, 0.180767, 0.591838, 0.536362, 0.0700143, 0.690774, 0.141717, 0.425298, 0.901915, 0.0963091, 0.310052, 0.294121, 0.601604, 0.097364, 0.708817, 0.0427036, 0.50961, 0.349314, 0.164607, 0.971869, 0.986749, 0.626938, 0.269309, 0.182684, 0.888293, 0.968728, 0.629877, 0.626314, 0.882564, 0.266396, 0.894487, 0.346194, 0.636177, 0.378728, 0.409531, 0.829683, 0.613478, 0.175034, 0.556522, 0.970561, 0.716135, 0.0405869, 0.705143, 0.649151, 0.0682475, 0.143428, 0.888939, 0.163392, 0.0536951, 0.966391, 0.157317, 0.601915, 0.818234, 0.274833, 0.644002, 0.17447, 0.132195, 0.603534, 0.102146, 0.0229057, 0.449928, 0.0959411, 0.404557, 0.313371, 0.0421685, 0.680261, 0.0663152, 0.655345, 0.748725, 0.124891, 0.822067, 0.253951, 0.124592, 0.698393, 0.164684, 0.616958, 0.824313, 0.964058, 0.238139, 0.402992, 0.382403, 0.457389, 0.0418984, 0.259257, 0.20708, 0.378181, 0.595022, 0.0855787, 0.672578, 0.582013, 0.0439431, 0.0288226, 0.100938, 0.615393, 0.731328, 0.656283, 0.330191, 0.182827, 0.390147, 0.714379, 0.176488, 0.990829, 0.809143, 0.568215, 0.303771, 0.91016, 0.44329, 0.593669, 0.722076, 0.560905, 0.560642, 0.513886, 0.855322, 0.607786, 0.781349, 0.548843, 0.546036, 0.124078, 0.237524, 0.867486, 0.357688, 0.338076, 0.786907, 0.20566, 0.353073, 0.898358, 0.177624, 0.284894, 0.527775, 0.264077, 0.748425, 0.554378, 0.590301, 0.143675, 0.410008, 0.416264, 0.763627, 0.815686, 0.639639, 0.361523, 0.567836, 0.550479, 0.606591, 0.0178056, 0.166134, 0.469154, 0.890778, 0.0870741, 0.641713, 0.272892, 0.142382, 0.157895, 0.875142, 0.817378, 0.115733, 0.650699, 0.954393, 0.190758, 0.719208, 0.797697, 0.581234, 0.217666, 0.76857, 0.870738, 0.635089, 0.532965, 0.237063, 0.396399, 0.317764, 0.844411, 0.594896, 0.985531, 0.965591, 0.148934, 0.533466, 0.326614, 0.0676991, 0.0409045, 0.259027, 0.619908, 0.460028, 0.0391452, 0.35105, 0.879854, 0.920177, 0.949852, 0.0869352, 0.110073, 0.313809, 0.47796, 0.539343, 0.510045, 0.512023, 0.0600525, 0.617031, 0.604771, 0.533327, 0.105267, 0.632391, 0.308386, 0.490456, 0.702948, 0.616332, 0.634406, 0.130287, 0.819221, 0.0894389, 0.171534, 0.980235, 0.455154, 0.551306, 0.661436, 0.899947, 0.78834, 0.168625, 0.0589216, 0.097285, 0.700832, 0.821703, 0.017523, 0.210227, 0.328355, 0.861135, 0.209952, 0.518591, 0.391321, 0.846423, 0.702223, 0.491477, 0.210212, 0.267899, 0.437041, 0.288857, 0.953474, 0.964247, 0.350472, 0.334289, 0.386696, 0.278018, 0.201989, 0.899821, 0.98322, 0.143034, 0.420224, 0.37725, 0.144546, 0.968261, 0.587383, 0.640063, 0.0493429, 0.694887, 0.972637, 0.832521, 0.234671, 0.810305, 0.190715, 0.257148, 0.678743, 0.067473, 0.194639, 0.311488, 0.24708, 0.162158, 0.728028, 0.551571, 0.95262, 0.292592, 0.342366, 0.383834, 0.317163, 0.88439, 0.196063, 0.645091, 0.728461, 0.328206, 0.423494, 0.765601, 0.382084, 0.256062, 0.598997, 0.839553, 0.0702839, 0.81107, 0.665389, 0.41877, 0.550256, 0.619956, 0.129191, 0.256608, 0.768557, 0.79812, 0.527826, 0.425846, 0.973146, 0.913008, 0.0309738, 0.129424, 0.385945, 0.48925, 0.36247, 0.651493, 0.239735, 0.113484, 0.885722, 0.827916, 0.692666, 0.0900508, 0.0624783, 0.297762, 0.910664, 0.641529, 0.812213, 0.669625, 0.695333, 0.864861, 0.696586, 0.240099, 0.873849, 0.267616, 0.0150577, 0.496142, 0.0181793, 0.315326, 0.117324, 0.403701, 0.624366, 0.856175, 0.000704271, 0.0208226, 0.518633, 0.494967, 0.0915335, 0.797735, 0.293147, 0.144645, 0.321851, 0.237865, 0.729335, 0.83753, 0.444805, 0.39933, 0.497946, 0.36823, 0.249807, 0.485935, 0.128588, 0.667983, 0.335525, 0.469585, 0.337843, 0.718907, 0.24901, 0.70854, 0.992345, 0.578137, 0.579341, 0.776219, 0.485066, 0.19798, 0.373463, 0.931199, 0.912336, 0.746203, 0.750536, 0.951535, 0.691765, 0.776318, 0.357287, 0.72839, 0.0249142, 0.628353, 0.239833, 0.109642, 0.772355, 0.648747, 0.489777, 0.56614, 0.774993, 0.547418, 0.991198, 0.271008, 0.496569, 0.621571, 0.299536, 0.00810245, 0.0864569, 0.533341, 0.783043, 0.809771, 0.597423, 0.412529, 0.79456, 0.52394, 0.82497, 0.3518, 0.941565, 0.157979, 0.479138, 0.496974, 0.938128, 0.209493, 0.950286, 0.836756, 0.558122, 0.521529, 0.232116, 0.33917, 0.596949, 0.93423, 0.0692899, 0.369535, 0.749326, 0.556352, 0.982935, 0.143458, 0.893278, 0.304947, 0.400386, 0.491137, 0.711417, 0.764818, 0.235418, 0.487678, 0.253228, 0.455756, 0.551269, 0.579563, 0.235527, 0.895806, 0.474983, 0.353294, 0.984331, 0.784567, 0.422001, 0.0192113, 0.528427, 0.574712, 0.140488, 0.506967, 0.86844, 0.138171, 0.418844, 0.551601, 0.700853, 0.41318, 0.921164, 0.788882, 0.329395, 0.992396, 0.0819402, 0.0071935, 0.992821, 0.985718, 0.314136, 0.200217, 0.314595, 0.599339, 0.0609108, 0.744403, 0.0657664, 0.808466, 0.432487, 0.245351, 0.456413, 0.955912, 0.0368268, 0.173992, 0.613383, 0.389589, 0.0655167, 0.492878, 0.697627, 0.128874, 0.11259, 0.422782, 0.252359, 0.0858661, 0.338557, 0.0700371, 0.342711, 0.716044, 0.174474, 0.800268, 0.620241, 0.678921, 0.315725, 0.563589, 0.679173, 0.312168, 0.190003, 0.263356, 0.443382, 0.160867, 0.293886, 0.369934, 0.621355, 0.178948, 0.799668, 0.168929, 0.886597, 0.157751, 0.827426, 0.998084, 0.709695, 0.994338, 0.448279, 0.640411, 0.928919, 0.459238, 0.067816, 0.605502, 0.244398, 0.608718, 0.214511, 0.12063, 0.945361, 0.509654, 0.0255167, 0.417937, 0.818315, 0.987609, 0.413759, 0.538989, 0.0248027, 0.454687, 0.0517092, 0.90271, 0.425484, 0.862402, 0.625027, 0.876214, 0.675517, 0.192748, 0.231229, 0.819236, 0.823086, 0.667963, 0.281389, 0.00746112, 0.968602, 0.317103, 0.477637, 0.0761143, 0.376027, 0.0466629, 0.191921, 0.73032, 0.252946, 0.267239, 0.0905882, 0.849323, 0.946896, 0.181835, 0.270043, 0.056533, 0.528718, 0.34072, 0.195902, 0.706542, 0.991418, 0.119169, 0.891355, 0.864471, 0.699044, 0.651331, 0.285457, 0.670664, 0.466954, 0.801451, 0.64839, 0.485259, 0.881206, 0.0398237, 0.933549, 0.257922, 0.587636, 0.112448, 0.0148977, 0.937841, 0.765008, 0.0828351, 0.816488, 0.960685, 0.61599, 0.859782, 0.290726, 0.513923, 0.063747, 0.370326, 0.159592, 0.468146, 0.651206, 0.310093, 0.298917, 0.359615, 0.972081, 0.705177, 0.171894, 0.737643, 0.264526, 0.228415, 0.206767, 0.215091, 0.888902, 0.566527, 0.838089, 0.28529, 0.609758, 0.708566, 0.81208, 0.433847, 0.126582, 0.937056, 0.757773, 0.690594, 0.392319, 0.550614, 0.392574, 0.670266, 0.49032, 0.205898, 0.831668, 0.190098, 0.291416, 0.317894, 0.0457926, 0.872519, 0.000115332, 0.729135, 0.741646, 0.453272, 0.604688, 0.0199639, 0.707024, 0.0497294, 0.686269, 0.671024, 0.220367, 0.825915, 0.326601, 0.392532, 0.357285, 0.518455, 0.859585, 0.73902, 0.143127, 0.2866, 0.345896, 0.0462287, 0.941818, 0.501028, 0.344384, 0.0554794, 0.324246, 0.848657, 0.356346, 0.285384, 0.952473, 0.814546, 0.330119, 0.788547, 0.741966, 0.377757, 0.155739, 0.551952, 0.689385, 0.323114, 0.512186, 0.440853, 0.684254, 0.655622, 0.160188, 0.87879, 0.94967, 0.611931, 0.577409, 0.254201, 0.266333, 0.61696, 0.718902, 0.726468, 0.998383, 0.0919312, 0.0664134, 0.828854, 0.472295, 0.842708, 0.64022, 0.305899, 0.0237165, 0.876209, 0.133535, 0.17101, 0.658875, 0.13261, 0.981066, 0.986838, 0.891752, 0.296951, 0.434631, 0.967742, 0.802787, 0.366509, 0.719581, 0.503294, 0.748413, 0.161317, 0.66709, 0.444871, 0.36463, 0.0446563, 0.874504, 0.127738, 0.347592, 0.398526, 0.0199116, 0.914809, 0.167554, 0.189426, 0.855066, 0.694113, 0.799023, 0.0902301, 0.676734, 0.0229465, 0.806434, 0.343859, 0.559115, 0.0432249, 0.659427, 0.354253, 0.616094, 0.411097, 0.806377, 0.77531, 0.741445, 0.000473142, 0.639601, 0.536549, 0.806591, 0.548107, 0.732347, 0.427543, 0.867243, 0.186006, 0.588977, 0.52012, 0.409918, 0.460944, 0.368319, 0.361351, 0.292433, 0.907614, 0.874799, 0.2355, 0.123767, 0.981005, 0.421588, 0.147668, 0.523858, 0.311282, 0.680003, 0.0692566, 0.999342, 0.261984, 0.655352, 0.133604, 0.5844, 0.426139, 0.854832, 0.438496, 0.675043, 0.420723, 0.445101, 0.86545, 0.208841, 0.121776, 0.906885, 0.66269, 0.123358, 0.921135, 0.54658, 0.461271, 0.0967301, 0.402302, 0.71524, 0.182779, 0.50309, 0.730806, 0.250203, 0.584698, 0.628875, 0.485839, 0.321928, 0.367213, 0.983743, 0.493145, 0.173286, 0.760994, 0.306477, 0.0946855, 0.11765, 0.526887, 0.35585, 0.801342, 0.417659, 0.481917, 0.816113, 0.0390786, 0.352499, 0.455266, 0.885476, 0.01811, 0.421414, 0.806737, 0.648411, 0.757006, 0.187066, 0.535292, 0.726305, 0.505754, 0.339057, 0.672727, 0.548889, 0.557402, 0.765191, 0.98779, 0.0785735, 0.687429, 0.437189, 0.437966, 0.0490676, 0.251836, 0.87836, 0.661856, 0.413129, 0.909673, 0.969351, 0.445319, 0.529583, 0.845193, 0.339396, 0.195819, 0.671612, 0.907388, 0.242167, 0.329103, 0.388934, 0.755661, 0.592306, 0.954056, 0.568219, 0.929298, 0.00538758, 0.386031, 0.431302, 0.260224, 0.779087, 0.657806, 0.0189166, 0.430028, 0.260458, 0.977439, 0.388769, 0.340334, 0.106024, 0.881939, 0.838445, 0.413673, 0.335674, 0.494271, 0.334412, 0.593359, 0.49516, 0.976, 0.153431, 0.413018, 0.164818, 0.252509, 0.742455, 0.713348, 0.0204139, 0.538596, 0.707846, 0.0221754, 0.522451, 0.0391039, 0.455664, 0.504789, 0.738907, 0.58575, 0.815157, 0.48306, 0.811862, 0.86745, 0.311792, 0.685545, 0.309708, 0.788836, 0.837826, 0.399528, 0.450462, 0.596774, 0.214621, 0.526566, 0.0688926, 0.448344, 0.120725, 0.315122, 0.787048, 0.963238, 0.340877, 0.199999, 0.079017, 0.864607, 0.764251, 0.718319, 0.111497, 0.279891, 0.115554, 0.262699, 0.890724, 0.353046, 0.810957, 0.866735, 0.00286452, 0.00347619, 0.270853, 0.614247, 0.561387, 0.0215133, 0.891057, 0.315399, 0.25655, 0.241741, 0.15039, 0.374129, 0.0691939, 0.799363, 0.744584, 0.61992, 0.83166, 0.591765, 0.336728, 0.581412, 0.928769, 0.559652, 0.882395, 0.652254, 0.26545, 0.0670024, 0.98697, 0.391175, 0.224069, 0.323982, 0.279011, 0.820496, 0.557159, 0.327779, 0.352074, 0.206076, 0.717108, 0.0965557, 0.138739, 0.227808, 0.563218, 0.48847, 0.945773, 0.234884, 0.858546, 0.159396, 0.385241, 0.623445, 0.596752, 0.443205, 0.887035, 0.640344, 0.0998492, 0.163499, 0.6304, 0.480013, 0.0448055, 0.312842, 0.841812, 0.0231258, 0.873638, 0.694296, 0.592714, 0.87127, 0.434592, 0.612617, 0.236753, 0.24755, 0.0956149, 0.579344, 0.68045, 0.817486, 0.547015, 0.0270583, 0.692874, 0.805408, 0.90689, 0.506499, 0.757605, 0.440627, 0.0319319, 0.878953, 0.327406, 0.0859084, 0.540661, 0.658303, 0.247164, 0.988378, 0.953815, 0.835087, 0.794337, 0.183549, 0.143762, 0.188967, 0.797925, 0.155226, 0.678526, 0.223761, 0.142341, 0.971828, 0.912707, 0.354852, 0.23376, 0.859434, 0.864688, 0.643642, 0.279002, 0.872146, 0.717845, 0.689394, 0.584934, 0.925325, 0.64485, 0.0868768, 0.440149, 0.0802374, 0.169745, 0.0772872, 0.148129, 0.838965, 0.500671, 0.233542, 0.289451, 0.157224, 0.24364, 0.412919, 0.0722526, 0.248199, 0.308479, 0.3912, 0.500142, 0.222, 0.0948341, 0.917636, 0.50172, 0.465381, 0.440189, 0.286679, 0.842397, 0.90052, 0.290512, 0.803082, 0.303241, 0.919119, 0.725945, 0.0349066, 0.211141, 0.395546, 0.763755, 0.733756, 0.871343, 0.243095, 0.366657, 0.0390919, 0.792521, 0.60397, 0.0279148, 0.943343, 0.253192, 0.238217, 0.342333, 0.923191, 0.98298, 0.872482, 0.833295, 0.0152523, 0.203533, 0.0602389, 0.705377, 0.515586, 0.44581, 0.648555, 0.401494, 0.496757, 0.352891, 0.657929, 0.882993, 0.490186, 0.240471, 0.249185, 0.932046, 0.323939, 0.947336, 0.550444, 0.28375, 0.75001, 0.942039, 0.701898, 0.0971857, 0.93847, 0.0757117, 0.681359, 0.378749, 0.63677, 0.242046, 0.691409, 0.543192, 0.541103, 0.0552468, 0.519849, 0.391595, 0.528859, 0.231188, 0.563127, 0.708664, 0.793334, 0.613483, 0.54913, 0.813614, 0.042799, 0.30788, 0.55394, 0.444663, 0.346912, 0.49824, 0.426941, 0.645794, 0.526599, 0.346327, 0.3573, 0.0900302, 0.883809, 0.404628, 0.868461, 0.923339, 0.749753, 0.537338, 0.460161, 0.660874, 0.0147659, 0.393392, 0.558639, 0.293429, 0.719359, 0.114173, 0.653513, 0.0139946, 0.954595, 0.286828, 0.0624148, 0.385569, 0.547447, 0.543222, 0.176947, 0.960972, 0.278089, 0.388656, 0.841098, 0.446601, 0.470455, 0.920701, 0.846298, 0.493898, 0.95992, 0.57772, 0.260233, 0.684009, 0.173487, 0.323381, 0.0104636, 0.288452, 0.551542, 0.651847, 0.78528, 0.0337595, 0.508202, 0.948789, 0.766963, 0.294237, 0.303095, 0.932342, 0.396169, 0.743192, 0.0843343, 0.635914, 0.158435, 0.330101, 0.74867, 0.848881, 0.136496, 0.289118, 0.6916, 0.272927, 0.290798, 0.904767, 0.31142, 0.166282, 0.36705, 0.549303, 0.469151, 0.0969193, 0.128408, 0.83649, 0.929419, 0.158906, 0.628187, 0.406191, 0.382885, 0.579948, 0.039028, 0.169621, 0.989329, 0.0815238, 0.923423, 0.60885, 0.106084, 0.394455, 0.628407, 0.943118, 0.295768, 0.94597, 0.502776, 0.282522, 0.212961, 0.592191, 0.217716, 0.84636, 0.813798, 0.330917, 0.40851, 0.811735, 0.507795, 0.192148, 0.476361, 0.696752, 0.976219, 0.0985664, 0.130977, 0.581818, 0.972998, 0.240433, 0.321268, 0.369408, 0.209879, 0.144105, 0.31964, 0.645313, 0.437016, 0.0419897, 0.375873, 0.0804421, 0.0512266, 0.0509438, 0.697631, 0.948479, 0.532177, 0.672666, 0.733022, 0.572237, 0.14163, 0.357718, 0.604449, 0.876126, 0.409308, 0.555404, 0.428393, 0.610674, 0.0540597, 0.103888, 0.733858, 0.499431, 0.325702, 0.33307, 0.412439, 0.536351, 0.10531, 0.465118, 0.83234, 0.0853166, 0.787354, 0.151916, 0.813868, 0.375883, 0.668342, 0.725351, 0.610602, 0.568374, 0.750339, 0.112482, 0.847242, 0.0451396, 0.75429, 0.959095, 0.112375, 0.235576, 0.10334, 0.259363, 0.108213, 0.284684, 0.264342, 0.543585, 0.57483, 0.617723, 0.626695, 0.932913, 0.742072, 0.2597, 0.877427, 0.377372, 0.801181, 0.846736, 0.326486, 0.903304, 0.756728, 0.68676, 0.916784, 0.615015, 0.63057, 0.196501, 0.463048, 0.636259, 0.369163, 0.754758, 0.112082, 0.112358, 0.975391, 0.87108, 0.442962, 0.051052, 0.955866, 0.965001, 0.0991575, 0.387232, 0.763433, 0.84831, 0.422091, 0.878841, 0.334149, 0.612291, 0.34583, 0.543017, 0.980456, 0.925603, 0.114561, 0.936859, 0.343194, 0.0879617, 0.895599, 0.196207, 0.95123, 0.0743102, 0.471337, 0.164966, 0.738517, 0.475985, 0.493499, 0.0356601, 0.0216095, 0.553481, 0.301842, 0.181291, 0.223093, 0.545786, 0.771299, 0.778807, 0.157046, 0.139492, 0.111831, 0.740367, 0.398365, 0.170494, 0.581992, 0.860194, 0.213607, 0.0784152, 0.415623, 0.100157, 0.352112, 0.675707, 0.531062, 0.183464, 0.661215, 0.300351, 0.440552, 0.585824, 0.716819, 0.631349, 0.327658, 0.405028, 0.859839, 0.104944, 0.160611, 0.0472555, 0.734982, 0.827174, 0.0545748, 0.893999, 0.808877, 0.679652, 0.465044, 0.750202, 0.436215, 0.822035, 0.317308, 0.262415, 0.918618, 0.863142, 0.574615, 0.660461, 0.43724, 0.909306, 0.972506, 0.723827, 0.248622, 0.163049, 0.386939, 0.106643, 0.994871, 0.855658, 0.455771, 0.164918, 0.30611, 0.908631, 0.688893, 0.7613, 0.261632, 0.00253999, 0.271221, 0.432736, 0.635471, 0.181422, 0.380618, 0.564341, 0.434719, 0.966238, 0.708924, 0.405715, 0.703709, 0.548696, 0.12822, 0.144822, 0.428797, 0.184346, 0.338066, 0.955262, 0.300652, 0.298374, 0.316968, 0.108116, 0.823357, 0.151366, 0.50352, 0.434533, 0.0268823, 0.233961, 0.430651, 0.169694, 0.562766, 0.420687, 0.74611, 0.366372, 0.836339, 0.530026, 0.186737, 0.938637, 0.875394, 0.425433, 0.772448, 0.667455, 0.764654, 0.0874587, 0.743262, 0.355979, 0.436202, 0.148673, 0.201261, 0.276859, 0.553841, 0.95306, 0.090599, 0.620788, 0.109106, 0.844815, 0.115852, 0.98086, 0.996081, 0.44014, 0.341718, 0.280627, 0.642585, 0.776772, 0.0884373, 0.524358, 0.442389, 0.571881, 0.502865, 0.14464, 0.701467, 0.68293, 0.463895, 0.0902899, 0.816324, 0.382356, 0.473168, 0.197288, 0.390467, 0.287006, 0.895552, 0.0384493, 0.353009, 0.908499, 0.194202, 0.671324, 0.816639, 0.0423796, 0.546441, 0.0342219, 0.289165, 0.797636, 0.800629, 0.0945803, 0.40729, 0.8805, 0.728001, 0.0598097, 0.0407465, 0.461957, 0.968095, 0.81558, 0.288942, 0.303981, 0.0932752, 0.225836, 0.424889, 0.477242, 0.424551, 0.214986, 0.14967, 0.839184, 0.698299, 0.245224, 0.534186, 0.609572, 0.98554, 0.264859, 0.921869, 0.515783, 0.609405, 0.666567, 0.642494, 0.454781, 0.0514512, 0.845558, 0.462281, 0.77345, 0.517576, 0.394532, 0.298696, 0.0637802, 0.939939, 0.897187, 0.64845, 0.654141, 0.5422, 0.378496, 0.112206, 0.0735552, 0.791746, 0.195783, 0.189425, 0.272562, 0.16373, 0.502485, 0.938173, 0.234381, 0.0216515, 0.098865, 0.784636, 0.327652, 0.758766, 0.239961, 0.673782, 0.215241, 0.0551604, 0.638107, 0.199927, 0.603767, 0.68115, 0.830862, 0.786071, 0.999128, 0.707448, 0.615595, 0.579995, 0.16302, 0.849775, 0.358486, 0.809041, 0.58912, 0.815306, 0.816334, 0.837699, 0.492808, 0.371968, 0.947081, 0.299125, 0.946594, 0.00616334, 0.495702, 0.605294, 0.240714, 0.070094, 0.621159, 0.906691, 0.116446, 0.538016, 0.558571, 0.578832, 0.694791, 0.053463, 0.386424, 0.997221, 0.625712, 0.979621, 0.413771, 0.369701, 0.906975, 0.895063, 0.542317, 0.77892, 0.195462, 0.531385, 0.29435, 0.223329, 0.989861, 0.640537, 0.435093, 0.579481, 0.403141, 0.00389478, 0.132103, 0.644417, 0.621178, 0.954442, 0.0298517, 0.698377, 0.0861056, 0.511701, 0.804046, 0.925313, 0.895933, 0.852232, 0.326931, 0.249424, 0.0391691, 0.764963, 0.307361, 0.340186, 0.860699, 0.00334707, 0.452865, 0.180791, 0.283536, 0.625822, 0.789606, 0.478775, 0.934851, 0.462845, 0.570526, 0.244256, 0.997985, 0.28044, 0.544024, 0.18943, 0.818593, 0.185194, 0.0231956, 0.866088, 0.926126, 0.397896, 0.332372, 0.358482, 0.0417129, 0.984173, 0.695827, 0.332034, 0.122609, 0.203093, 0.314443, 0.437393, 0.296373, 0.509336, 0.244107, 0.234058, 0.690037, 0.284188, 0.221039, 0.361181, 0.91032, 0.547321, 0.975328, 0.068128, 0.0801906, 0.528644, 0.524512, 0.177174, 0.894682, 0.955069, 0.552512, 0.672156, 0.198971, 0.669977, 0.418543, 0.876825, 0.414695, 0.238743, 0.275038, 0.163796, 0.509435, 0.713545, 0.173844, 0.524071, 0.0835044, 0.824222, 0.753913, 0.0644402, 0.900321, 0.815974, 0.836045, 0.847429, 0.471409, 0.326379, 0.629818, 0.444759, 0.350045, 0.912832, 0.884349, 0.549902, 0.242738, 0.850416, 0.0617729, 0.140373, 0.440812, 0.72903, 0.893515, 0.207915, 0.604921, 0.480557, 0.755408, 0.0180358, 0.154815, 0.557772, 0.487432, 0.845579, 0.00213903, 0.645807, 0.497907, 0.563857, 0.862445, 0.180516, 0.931488, 0.737159, 0.648239, 0.0344065, 0.538289, 0.956338, 0.850626, 0.98302, 0.586991, 0.617682, 0.914337, 0.960085, 0.893208, 0.00806309, 0.431762, 0.757136, 0.924223, 0.158815, 0.251725, 0.569288, 0.366741, 0.00675499, 0.292936, 0.92097, 0.94205, 0.0956674, 0.161022, 0.701299, 0.736288, 0.0344687, 0.241482, 0.102898, 0.277884, 0.780629, 0.744666, 0.332055, 0.388952, 0.764887, 0.264898, 0.479994, 0.123352, 0.0341801, 0.781711, 0.88336, 0.368756, 0.196185, 0.701533, 0.425115, 0.946277, 0.280047, 0.557773, 0.0789337, 0.958856, 0.986791, 0.202065, 0.169607, 0.395141, 0.0409483, 0.64695, 0.492192, 0.362814, 0.81448, 0.16285, 0.296021, 0.0499376, 0.190696, 0.63939, 0.0335765, 0.599953, 0.902445, 0.042125, 0.284877, 0.961642, 0.403516, 0.317403, 0.466792, 0.10303, 0.79637, 0.0808633, 0.939454, 0.371804, 0.197288, 0.996496, 0.0102507, 0.029832, 0.538724, 0.26238, 0.14706, 0.368093, 0.268087, 0.949004, 0.266385, 0.471935, 0.391111, 0.0526285, 0.109101, 0.197184, 0.84688, 0.909992, 0.599527, 0.384441, 0.770833, 0.693056, 0.888264, 0.435594, 0.258062, 0.459511, 0.496008, 0.236393, 0.172671, 0.467522, 0.0280065, 0.26842, 0.311446, 0.779192, 0.989998, 0.0225157, 0.736742, 0.40066, 0.422172, 0.789103, 0.494358, 0.880555, 0.196643, 0.615858, 0.294632, 0.214528, 0.876536, 0.103891, 0.0429321, 0.309648, 0.87223, 0.391974, 0.69175, 0.825653, 0.274149, 0.56648, 0.893573, 0.255601, 0.217857, 0.0863715, 0.33166, 0.545067, 0.720115, 0.308524, 0.763465, 0.898536, 0.434902, 0.580537, 0.182824, 0.221024, 0.0161732, 0.746002, 0.262537, 0.0377952, 0.417375, 0.529417, 0.225243, 0.78082, 0.504494, 0.628211, 0.882423, 0.577104, 0.207672, 0.6147, 0.693291, 0.0332987, 0.91873, 0.561389, 0.835958, 0.780318, 0.287622, 0.813036, 0.259987, 0.89446, 0.403804, 0.465006, 0.89041, 0.254563, 0.0405819, 0.308872, 0.332119, 0.398827, 0.314813, 0.00374371, 0.0162247, 0.943943, 0.593976, 0.503378, 0.571029, 0.518946, 0.99204, 0.301182, 0.746012, 0.205308, 0.0311136, 0.0600072, 0.628276, 0.0692109, 0.328635, 0.873159, 0.692923, 0.716217, 0.522212, 0.869064, 0.581255, 0.435035, 0.376923, 0.705279, 0.181347, 0.433562, 0.741623, 0.869501, 0.0269738, 0.366206, 0.703613, 0.198466, 0.317634, 0.301124, 0.0647373, 0.480298, 0.282097, 0.667181, 0.134705, 0.727296, 0.357044, 0.75068, 0.695344, 0.919044, 0.232232, 0.905678, 0.929539, 0.221667, 0.859635, 0.314313, 0.0181896, 0.207636, 0.0894953, 0.311575, 0.720474, 0.530752, 0.0443453, 0.438311, 0.134759, 0.954803, 0.0332241, 0.236766, 0.871747, 0.581201, 0.268526, 0.462712, 0.298389, 0.865605, 0.185796, 0.947704, 0.143154, 0.0242203, 0.92274, 0.70675, 0.0165168, 0.442303, 0.833985, 0.260064, 0.975228, 0.952075, 0.377776, 0.882164, 0.0293832, 0.640253, 0.439204, 0.280474, 0.145732, 0.581118, 0.451207, 0.255284, 0.781568, 0.324417, 0.52736, 0.416398, 0.415699, 0.111875, 0.249532, 0.440094, 0.36148, 0.344022, 0.568508, 0.380597, 0.774403, 0.522126, 0.24831, 0.656351, 0.457071, 0.559514, 0.464628, 0.212683, 0.185925, 0.191342, 0.793873, 0.694486, 0.726579, 0.449636, 0.34599, 0.914453, 0.934441, 0.297545, 0.504448, 0.947946, 0.601821, 0.561874, 0.960582, 0.354686, 0.122419, 0.109913, 0.504334, 0.343276, 0.412616, 0.348994, 0.0366128, 0.479268, 0.864824, 0.517947, 0.0820124, 0.305641, 0.301882, 0.0627611, 0.415498, 0.0371123, 0.0915617, 0.31759, 0.326098, 0.887276, 0.317074, 0.949069, 0.790414, 0.508064, 0.856512, 0.0717677, 0.155067, 0.458459, 0.464344, 0.858695, 0.237248, 0.424609, 0.0409658, 0.925551, 0.932285, 0.742581, 0.974907, 0.341533, 0.980469, 0.209856, 0.555253, 0.834628, 0.939695, 0.492141, 0.533083, 0.0770837, 0.224348, 0.85108, 0.395584, 0.734646, 0.535954, 0.675955, 0.460483, 0.779008, 0.179741, 0.992827, 0.222586, 0.236466, 0.292186, 0.141888, 0.651591, 0.264082, 0.535197, 0.725217, 0.472508, 0.315369, 0.0442671, 0.826404, 0.664442, 0.210318, 0.781154, 0.768779, 0.130535, 0.789821, 0.0872829, 0.000345506, 0.989456, 0.697414, 0.243103, 0.934252, 0.392799, 0.869709, 0.983473, 0.957166, 0.298384, 0.950966, 0.945291, 0.791484, 0.611383, 0.00773862, 0.546973, 0.743926, 0.309783, 0.593539, 0.000150709, 0.943393, 0.814505, 0.292044, 0.556328, 0.150154, 0.952166, 0.814909, 0.502722, 0.398686, 0.328962, 0.766985, 0.920527, 0.427594, 0.659691, 0.0124402, 0.469223, 0.830091, 0.673413, 0.413477, 0.957588, 0.749862, 0.318045, 0.387874, 0.489802, 0.790802, 0.496226, 0.977664, 0.313017, 0.512363, 0.0335574, 0.882417, 0.986866, 0.25166, 0.773055, 0.44442, 0.156512, 0.0503038, 0.258552, 0.717986, 0.452502, 0.495818, 0.883668, 0.882702, 0.373779, 0.356799, 0.484667, 0.812132, 0.187131, 0.157797, 0.212676, 0.26866, 0.40355, 0.824234, 0.740606, 0.306995, 0.317752, 0.423819, 0.517614, 0.196941, 0.571482, 0.925737, 0.123843, 0.129535, 0.400391, 0.491381, 0.650341, 0.72332, 0.196269, 0.630798, 0.495517, 0.622336, 0.988314, 0.0691449, 0.450294, 0.0676124, 0.168269, 0.207228, 0.489192, 0.6602, 0.687884, 0.0119344, 0.70527, 0.983733, 0.982384, 0.205083, 0.861384, 0.787663, 0.742554, 0.206209, 0.212616, 0.268012, 0.390901, 0.65883, 0.68938, 0.22444, 0.0192429, 0.388283, 0.429813, 0.556358, 0.683561, 0.386254, 0.782344, 0.588964, 0.55072, 0.748183, 0.204753, 0.096801, 0.747743, 0.441655, 0.365034, 0.845842, 0.495637, 0.728004, 0.500585, 0.998266, 0.683184, 0.529804, 0.279395, 0.0847966, 0.911018, 0.463934, 0.923531, 0.885698, 0.104163, 0.990858, 0.436038, 0.51552, 0.408262, 0.892803, 0.0476812, 0.0586898, 0.864969, 0.573018, 0.609611, 0.467167, 0.670962, 0.651588, 0.45333, 0.957268, 0.633035, 0.186001, 0.494991, 0.280636, 0.772204, 0.743438, 0.347833, 0.873078, 0.753847, 0.685593, 0.744352, 0.318947, 0.000933816, 0.106924, 0.536107, 0.639829, 0.206119, 0.290089, 0.514712, 0.274866, 0.871336, 0.802907, 0.922184, 0.118177, 0.310597, 0.953389, 0.0724043, 0.199281, 0.386491, 0.467763, 0.430769, 0.315458, 0.0265129, 0.467898, 0.250111, 0.804908, 0.191977, 0.645413, 0.472067, 0.135614, 0.461252, 0.734281, 0.59659, 0.825864, 0.532474, 0.998466, 0.969667, 0.331398, 0.448755, 0.00692174, 0.153388, 0.393273, 0.313802, 0.372704, 0.190606, 0.304895, 0.671254, 0.160957, 0.800807, 0.278713, 0.436136, 0.106469, 0.0285365, 0.897001, 0.580738, 0.370273, 0.419885, 0.868881, 0.631461, 0.549333, 0.883746, 0.397048, 0.169611, 0.149362, 0.262929, 0.866222, 0.738744, 0.553673, 0.940457, 0.507536, 0.449506, 0.715586, 0.734003, 0.949907, 0.975064, 0.962441, 0.675457, 0.78993, 0.359971, 0.308689, 0.734226, 0.754878, 0.441995, 0.58604, 0.888753, 0.541922, 0.804717, 0.939941, 0.511089, 0.059096, 0.309463, 0.186918, 0.349145, 0.377007, 0.192307, 0.250198, 0.843252, 0.868986, 0.39968, 0.238747, 0.845026, 0.511491, 0.279175, 0.386605, 0.606426, 0.254615, 0.901591, 0.0902917, 0.0290092, 0.847245, 0.113106, 0.0610486, 0.920226, 0.865475, 0.392629, 0.841488, 0.207402, 0.2355, 0.686044, 0.167998, 0.979337, 0.153475, 0.757106, 0.849751, 0.63244, 0.0188624, 0.560794, 0.992078, 0.0909231, 0.0885458, 0.849227, 0.996531, 0.536453, 0.446947, 0.88522, 0.364139, 0.460961, 0.474815, 0.398555, 0.864047, 0.648844, 0.986066, 0.317278, 0.45119, 0.457284, 0.862431, 0.200464, 0.586896, 0.507887, 0.478319, 0.253593, 0.142232, 0.22894, 0.790407, 0.717963, 0.45883, 0.525727, 0.610991, 0.0606806, 0.536986, 0.593655, 0.850723, 0.305421, 0.100182, 0.527007, 0.765736, 0.858126, 0.595021, 0.967857, 0.761493, 0.696527, 0.449307, 0.932336, 0.299135, 0.358472, 0.228388, 0.242661, 0.339309, 0.805666, 0.721293, 0.560974, 0.057952, 0.432618, 0.00532646, 0.247144, 0.933411, 0.288424, 0.65877, 0.111568, 0.52991, 0.246124, 0.816316, 0.989767, 0.425535, 0.357157, 0.804862, 0.0871632, 0.764356, 0.150044, 0.37453, 0.117095, 0.907181, 0.746269, 0.503772, 0.876714, 0.956733, 0.850514, 0.882735, 0.209248, 0.534389, 0.836352, 0.872055, 0.264027, 0.0105678, 0.784881, 0.523245, 0.530977, 0.249264, 0.296541, 0.313034, 0.0419802, 0.306476, 0.748028, 0.0510997, 0.394418, 0.0434473, 0.41069, 0.4958, 0.82988, 0.993508, 0.0226086, 0.536659, 0.601497, 0.278772, 0.317662, 0.683175, 0.428438, 0.955904, 0.626727, 0.707521, 0.901637, 0.531014, 0.868496, 0.566689, 0.0401177, 0.135806, 0.468599, 0.733522, 0.673194, 0.0231164, 0.690726, 0.727451, 0.481481, 0.190158, 0.961606, 0.270164, 0.115293, 0.0736417, 0.187547, 0.00130081, 0.20917, 0.529096, 0.872191, 0.168243, 0.0367341, 0.218131, 0.368563, 0.263263, 0.960075, 0.755443, 0.925066, 0.262938, 0.964269, 0.354926, 0.382723, 0.934509, 0.769771, 0.293814, 0.228827, 0.0643684, 0.454316, 0.968496, 0.502272, 0.89256, 0.451724, 0.410523, 0.113414, 0.435572, 0.491274, 0.150822, 0.0316294, 0.541724, 0.785892, 0.359348, 0.453757, 0.766495, 0.749102, 0.303961, 0.367119, 0.990696, 0.454912, 0.933072, 0.0308887, 0.486821, 0.424341, 0.731196, 0.432225, 0.992916, 0.214123, 0.644641, 0.372599, 0.951961, 0.0972326, 0.154543, 0.866991, 0.596344, 0.758693, 0.362703, 0.185587, 0.756834, 0.0522437, 0.406655, 0.286283, 0.624453, 0.65098, 0.126626, 0.50958, 0.676144, 0.241151, 0.704983, 0.00092231, 0.592649, 0.45497, 0.478202, 0.944385, 0.618972, 0.660407, 0.882134, 0.598851, 0.0721889, 0.428745, 0.0795186, 0.148123, 0.886525, 0.162734, 0.824951, 0.683045, 0.503343, 0.589938, 0.0723046, 0.115081, 0.0201089, 0.322682, 0.129508, 0.618312, 0.030028, 0.806149, 0.694088, 0.109434, 0.154758, 2.56987e-05, 0.204626, 0.616414, 0.226096, 0.386395, 0.779831, 0.901758, 0.163856, 0.648934, 0.338192, 0.202123, 0.5336, 0.730557, 0.558773, 0.824843, 0.727957, 0.620088, 0.878383, 0.0985933, 0.708119, 0.643123, 0.91529, 0.79023, 0.73863, 0.954561, 0.295152, 0.589698, 0.317382, 0.60369, 0.195049, 0.898866, 0.606234, 0.194511, 0.447969, 0.322482, 0.586575, 0.249628, 0.873322, 0.594945, 0.9835, 0.64875, 0.912699, 0.456709, 0.348595, 0.595872, 0.185971, 0.086924, 0.0262257, 0.695916, 0.941024, 0.461552, 0.323183, 0.707817, 0.253687, 0.608315, 0.477998, 0.897206, 0.653489, 0.749276, 0.393501, 0.354142, 0.854916, 0.460109, 0.41504, 0.584033, 0.952008, 0.361548, 0.784401, 0.377392, 0.20778, 0.587108, 0.689111, 0.0389034, 0.425034, 0.385394, 0.248345, 0.380086, 0.575475, 0.638013, 0.0801842, 0.0997491, 0.364944, 0.599061, 0.0702159, 0.0402721, 0.0991669, 0.493616, 0.412747, 0.800816, 0.758369, 0.665841, 0.902528, 0.0418127, 0.1759, 0.483507, 0.387907, 0.8783, 0.41027, 0.462613, 0.32364, 0.18377, 0.250224, 0.278371, 0.9717, 0.500004, 0.251009, 0.769054, 0.349166, 0.512457, 0.933079, 0.463936, 0.366296, 0.000990404, 0.654282, 0.879606, 0.564059, 0.270697, 0.806186, 0.341294, 0.777393, 0.590548, 0.978357, 0.653695, 0.707876, 0.715257, 0.499117, 0.769642, 0.018444, 0.425349, 0.191002, 0.653968, 0.892596, 0.0685558, 0.043615, 0.69562, 0.303121, 0.0711234, 0.614355, 0.97748, 0.155872, 0.843808, 0.283898, 0.023926, 0.798128, 0.291579, 0.512036, 0.640579, 0.0650972, 0.865339, 0.989509, 0.820208, 0.122347, 0.905576, 0.710012, 0.472021, 0.0729493, 0.8676, 0.351875, 0.597197, 0.923503, 0.344575, 0.00368959, 0.101002, 0.865642, 0.417901, 0.336183, 0.403366, 0.609777, 0.229407, 0.338873, 0.92018, 0.341668, 0.177561, 0.711885, 0.721438, 0.151257, 0.897723, 0.734261, 0.828299, 0.654796, 0.610168, 0.322614, 0.057609, 0.669868, 0.0351103, 0.497851, 0.899341, 0.659735, 0.833629, 0.247816, 0.647905, 0.818258, 0.997715, 0.784559, 0.648556, 0.283222, 0.885833, 0.00499132, 0.187462, 0.926025, 0.48638, 0.610984, 0.58284, 0.536159, 0.789185, 0.899492, 0.38008, 0.627457, 0.375201, 0.888226, 0.723031, 0.555612, 0.649365, 0.83337, 0.0822123, 0.740998, 0.257561, 0.0729339, 0.515953, 0.64114, 0.827667, 0.521557, 0.628335, 0.467915, 0.0776531, 0.362262, 0.379132, 0.280284, 0.810736, 0.361727, 0.49232, 0.689625, 0.304187, 0.627473, 0.633657, 0.794066, 0.63248, 0.210397, 0.102277, 0.928669, 0.483447, 0.40887, 0.913735, 0.483358, 0.503764, 0.254536, 0.596166, 0.143644, 0.904753, 0.0256079, 0.756253, 0.169432, 0.0729975, 0.109243, 0.0309298, 0.0890768, 0.869929, 0.756607, 0.814741, 0.150594, 0.470673, 0.498989, 0.398265, 0.94255, 0.868611, 0.461242, 0.43474, 0.621181, 0.447873, 0.952126, 0.903815, 0.324511, 0.478995, 0.682979, 0.621209, 0.292499, 0.358295, 0.129948, 0.804466, 0.0499845, 0.0481898, 0.782715, 0.542998, 0.765441, 0.113682, 0.134923, 0.899975, 0.155663, 0.208583, 0.484905, 0.821416, 0.268545, 0.481221, 0.584605, 0.861309, 0.920582, 0.871126, 0.269647, 0.439819, 0.126401, 0.482138, 0.625803, 0.0798102, 0.0392741, 0.517811, 0.426276, 0.713428, 0.287859, 0.831379, 0.464117, 0.3917, 0.574551, 0.0105812, 0.481456, 0.513039, 0.563318, 0.552785, 0.342518, 0.384482, 0.696738, 0.945141, 0.0719859, 0.302028, 0.604319, 0.954776, 0.36243, 0.290512, 0.504883, 0.379147, 0.125761, 0.161326, 0.591145, 0.566471, 0.217208, 0.553199, 0.667357, 0.376792, 0.607599, 0.0254957, 0.121186, 0.763961, 0.0768277, 0.35534, 0.838269, 0.985063, 0.357764, 0.181454, 0.141499, 0.897183, 0.286145, 0.249903, 0.334809, 0.966785, 0.149642, 0.289421, 0.916148, 0.35175, 0.317636, 0.806016, 0.190981, 0.813306, 0.395627, 0.95738, 0.0939752, 0.361082, 0.899597, 0.486998, 0.760254, 0.15083, 0.837766, 0.444012, 0.738797, 0.678772, 0.129165, 0.3699, 0.75069, 0.466849, 0.974521, 0.732234, 0.0783179, 0.724613, 0.218733, 0.831013, 0.583469, 0.0475636, 0.948548, 0.522795, 0.485984, 0.409257, 0.989019, 0.260101, 0.344105, 0.58524, 0.837142, 0.623739, 0.6157, 0.85168, 0.627927, 0.0602272, 0.505626, 0.955254, 0.570295, 0.558829, 0.600042, 0.999514, 0.833246, 0.24374, 0.0154709, 0.848761, 0.9958, 0.72799, 0.813976, 0.325564, 0.350278, 0.466454, 0.302844, 0.890359, 0.313658, 0.823481, 0.296992, 0.956713, 0.0910392, 0.650355, 0.557088, 0.095005, 0.352954, 0.156921, 0.309125, 0.71187, 0.141045, 0.256612, 0.310431, 0.679947, 0.431878, 0.373703, 0.0504158, 0.0102907, 0.974581, 0.277101, 0.864156, 0.628683, 0.67264, 0.842547, 0.160469, 0.983212, 0.103462, 0.479975, 0.32402, 0.739311, 0.379698, 0.634078, 0.997384, 0.824819, 0.541734, 0.348484, 0.725955, 0.731188, 0.642025, 0.440999, 0.0768537, 0.129418, 0.478026, 0.132223, 0.305538, 0.201746, 0.156575, 0.633534, 0.752542, 0.904999, 0.379745, 0.880192, 0.387398, 0.712953, 0.956166, 0.913473, 0.599241, 0.685988, 0.676363, 0.49636, 0.876042, 0.226236, 0.643089, 0.833073, 0.425287, 0.218588, 0.453172, 0.301031, 0.129325, 0.672147, 0.0501358, 0.767502, 0.791748, 0.0583502, 0.279158, 0.931757, 0.785566, 0.347409, 0.641288, 0.524969, 0.599637, 0.84177, 0.347914, 0.0694182, 0.564342, 0.968, 0.824233, 0.578435, 0.858182, 0.00754457, 0.187994, 0.688498, 0.556711, 0.882394, 0.801016, 0.986133, 0.656237, 0.265303, 0.681323, 0.0731858, 0.602838, 0.385357, 0.649846, 0.139613, 0.0904347, 0.404407, 0.207975, 0.973413, 0.701989, 0.824314, 0.0984482, 0.679328, 0.626865, 0.269018, 0.957835, 0.842923, 0.331784, 0.412132, 0.520471, 0.497815, 0.822525, 0.871805, 0.0733129, 0.425953, 0.796373, 0.966112, 0.0627229, 0.959532, 0.41307, 0.965177, 0.823462, 0.12273, 0.885688, 0.33166, 0.880389, 0.706374, 0.341835, 0.689376, 0.685831, 0.771321, 0.73638, 0.141495, 0.302706, 0.354543, 0.131411, 0.635745, 0.214079, 0.527011, 0.33498, 0.218695, 0.721317, 0.4567, 0.909037, 0.300844, 0.525485, 0.767189, 0.767813, 0.240571, 0.676688, 0.95017, 0.993843, 0.74748, 0.763925, 0.975675, 0.156042, 0.297274, 0.706146, 0.720106, 0.639694, 0.232139, 0.608569, 0.513802, 0.172265, 0.462801, 0.472826, 0.424445, 0.837643, 0.488186, 0.23021, 0.803263, 0.906617, 0.402349, 0.966051, 0.0956306, 0.220729, 0.557124, 0.220361, 0.393405, 0.773573, 0.246708, 0.505566, 0.283299, 0.772797, 0.374605, 0.734612, 0.5373, 0.281068, 0.757695, 0.218682, 0.981331, 0.599125, 0.542024, 0.428893, 0.420057, 0.118232, 0.207046, 0.65774, 0.964914, 0.858489, 0.734383, 0.369598, 0.347372, 0.784544, 0.773473, 0.869541, 0.893491, 0.362739, 0.663225, 0.860732, 0.67147, 0.952458, 0.466423, 0.387925, 0.778971, 0.114828, 0.998257, 0.0589432, 0.616061, 0.126458, 0.849906, 0.546542, 0.297723, 0.666959, 0.419942, 0.772989, 0.247082, 0.0619857, 0.988765, 0.970786, 0.262429, 0.856642, 0.0921937, 0.296052, 0.673798, 0.656831, 0.956036, 0.802563, 0.113222, 0.399848, 0.292586, 0.865429, 0.609193, 0.141084, 0.177469, 0.437117, 0.905161, 0.89846, 0.389897, 0.052486, 0.697587, 0.119383, 0.395026, 0.0976641, 0.496683, 0.7955, 0.210444, 0.279085, 0.195578, 0.520628, 0.779125, 0.394464, 0.453638, 0.574049, 0.112393, 0.778453, 0.442345, 0.888885, 0.226023, 0.360812, 0.781228, 0.788189, 0.282583, 0.685241, 0.835446, 0.524939, 0.361392, 0.540987, 0.395676, 0.566165, 0.939502, 0.955474, 0.18314, 0.3015, 0.270143, 0.659835, 0.836446, 0.292781, 0.814734, 0.532113, 0.111528, 0.459271, 0.196421, 0.787388, 0.103804, 0.870897, 0.359717, 0.585971, 0.163043, 0.451947, 0.926129, 0.789389, 0.166518, 0.00893984, 0.415228, 0.240198, 0.116047, 0.470002, 0.218984, 0.727727, 0.398721, 0.521073, 0.158229, 0.144604, 0.761999, 0.38401, 0.432494, 0.653375, 0.0730347, 0.616424, 0.280337, 0.862444, 0.440583, 0.559214, 0.147057, 0.704923, 0.93255, 0.263881, 0.534977, 0.920313, 0.064911, 0.106614, 0.624643, 0.555238, 0.784854, 0.727642, 0.203985, 0.826267, 0.800398, 0.904129, 0.518111, 0.605353, 0.996529, 0.118805, 0.730681, 0.419901, 0.562658, 0.871913, 0.293172, 0.764371, 0.633763, 0.569482, 0.157068, 0.353686, 0.682644, 0.593726, 0.215397, 0.144348, 0.466125, 0.649096, 0.545312, 0.0118634, 0.483801, 0.272915, 0.337477, 0.60647, 0.443478, 0.747968, 0.219216, 0.247628, 0.632165, 0.211532, 0.316309, 0.955708, 0.795433, 0.594182, 0.365671, 0.33438, 0.286422, 0.431039, 0.809342, 0.593367, 0.959696, 0.955307, 0.31016, 0.182294, 0.299192, 0.120918, 0.929864, 0.689391, 0.23631, 0.58615, 0.886487, 0.351416, 0.457955, 0.190829, 0.156397, 0.726705, 0.297811, 0.150887, 0.057679, 0.360501, 0.403437, 0.25916, 0.10528, 0.758308, 0.72983, 0.904402, 0.813209, 0.785291, 0.704828, 0.300872, 0.76364, 0.337292, 0.656557, 0.45731, 0.485393, 0.338694, 0.405888, 0.783928, 0.603029, 0.0763875, 0.498216, 0.0564534, 0.744232, 0.555828, 0.584224, 0.368689, 0.592171, 0.385448, 0.0590442, 0.567869, 0.0232154, 0.374745, 0.513172, 0.594496, 0.961366, 0.0157991, 0.13877, 0.711031, 0.995987, 0.231812, 0.695821, 0.702872, 0.556626, 0.705109, 0.329319, 0.493629, 0.576941, 0.449903, 0.822009, 0.168521, 0.619943, 0.017456, 0.710251, 0.310071, 0.0651204, 0.058773, 0.365577, 0.152726, 0.917356, 0.870767, 0.660269, 0.609751, 0.0838286, 0.804317, 0.816216, 0.485209, 0.581226, 0.524509, 0.942802, 0.329456, 0.593995, 0.322137, 0.413573, 0.54018, 0.613874, 0.845805, 0.539607, 0.0206375, 0.528865, 0.0821751, 0.34753, 0.910612, 0.466336, 0.838856, 0.861694, 0.0698929, 0.672051, 0.594492, 0.728451, 0.397881, 0.00461097, 0.702363, 0.188952, 0.939421, 0.946213, 0.960378, 0.389982, 0.520646, 0.24928, 0.29729, 0.337122, 0.690893, 0.427248, 0.188455, 0.795957, 0.474583, 0.981236, 0.151678, 0.41763, 0.165036, 0.371617, 0.235073, 0.30582, 0.133022, 0.735801, 0.451906, 0.839734, 0.668141, 0.896027, 0.981395, 0.884464, 0.0843179, 0.671041, 0.882248, 0.653632, 0.898526, 0.365433, 0.240294, 0.775207, 0.966018, 0.557077, 0.743962, 0.677631, 0.808168, 0.574465, 0.727681, 0.380411, 0.550759, 0.321366, 0.276032, 0.788655, 0.00279036, 0.993586, 0.980278, 0.765003, 0.725823, 0.862131, 0.0946915, 0.0364604, 0.340569, 0.735472, 0.571962, 0.214603, 0.595016, 0.857064, 0.874329, 0.417231, 0.582463, 0.667877, 0.197306, 0.123092, 0.884843, 0.403445, 0.440139, 0.747906, 0.93275, 0.814713, 0.80654, 0.391511, 0.360735, 0.555696, 0.765015, 0.66295, 0.575404, 0.648329, 0.500322, 0.783604, 0.21304, 0.960552, 0.980978, 0.539858, 0.13802, 0.28033, 0.39874, 0.227902, 0.209034, 0.770591, 0.301531, 0.368787, 0.801642, 0.279324, 0.907473, 0.501665, 0.307345, 0.968106, 0.933816, 0.459697, 0.770253, 0.581576, 0.0688806, 0.259007, 0.327849, 0.178024, 0.222708, 0.912022, 0.615995, 0.102392, 0.594242, 0.749859, 0.538833, 0.968877, 0.659977, 0.461957, 0.372547, 0.0160606, 0.228645, 0.782572, 0.405235, 0.387843, 0.914316, 0.751925, 0.236772, 0.729338, 0.561382, 0.553663, 0.725962, 0.519571, 0.440092, 0.168168, 0.206103, 0.377336, 0.334977, 0.883353, 0.216671, 0.877449, 0.838604, 0.273019, 0.597619, 0.366431, 0.915683, 0.655653, 0.252677, 0.886238, 0.542183, 0.502111, 0.608109, 0.326259, 0.0906759, 0.679254, 0.0147414, 0.777385, 0.00221018, 0.241277, 0.775484, 0.347707, 0.854678, 0.636974, 0.499864, 0.161999, 0.327853, 0.635921, 0.299185, 0.874256, 0.830407, 0.735375, 0.632697, 0.906426, 0.199595, 0.273056, 0.236413, 0.32598, 0.645958, 0.801011, 0.457271, 0.985472, 0.246888, 0.421704, 0.871138, 0.884915, 0.404577, 0.391836, 0.877224, 0.847255, 0.872369, 0.969852, 0.892759, 0.400235, 0.347586, 0.161509, 0.0347511, 0.13587, 0.455344, 0.995971, 0.712554, 0.553124, 0.106507, 0.0318053, 0.906824, 0.555715, 0.296352, 0.0706357, 0.446166, 0.952881, 0.586039, 0.745597, 0.888554, 0.602956, 0.515031, 0.28764, 0.0158423, 0.316142, 0.842837, 0.154322, 0.0279544, 0.410253, 0.306332, 0.824045, 0.919182, 0.935183, 0.0303386, 0.397972, 0.50727, 0.832767, 0.60966, 0.26604, 0.708126, 0.610427, 0.751626, 0.777763, 0.804541, 0.249522, 0.27554, 0.88482, 0.307588, 0.345245, 0.730903, 0.978285, 0.50753, 0.9094, 0.482313, 0.296566, 0.609479, 0.57509, 0.29013, 0.691454, 0.886938, 0.0939114, 0.841611, 0.489405, 0.910133, 0.919384, 0.145776, 0.53278, 0.960463, 0.703907, 0.735499, 0.180398, 0.744317, 0.739894, 0.974108, 0.253742, 0.10396, 0.185484, 0.309632, 0.30132, 0.453218, 0.950699, 0.171613, 0.2543, 0.431187, 0.0848507, 0.436987, 0.698256, 0.874196, 0.86954, 0.187222, 0.394914, 0.605215, 0.419544, 0.152479, 0.199695, 0.0773146, 0.682431, 0.502337, 0.569457, 0.709919, 0.720315, 0.444999, 0.449921, 0.914747, 0.725205, 0.0537083, 0.206684, 0.419841, 0.149202, 0.269791, 0.181819, 0.577072, 0.591886, 0.364795, 0.953729, 0.521686, 0.581989, 0.999322, 0.636341, 0.829721, 0.874806, 0.705226, 0.795947, 0.215506, 0.0102638, 0.0343296, 0.800241, 0.192067, 0.484436, 0.629007, 0.780006, 0.00131953, 0.485003, 0.942173, 0.120313, 0.341539, 0.214798, 0.188026, 0.692592, 0.713431, 0.691049, 0.630346, 0.4873, 0.571554, 0.787812, 0.331332, 0.0872181, 0.130659, 0.394247, 0.09801, 0.695021, 0.989159, 0.837546, 0.200426, 0.240456, 0.922907, 0.270908, 0.65915, 0.158174, 0.00334375, 0.746574, 0.851048, 0.587035, 0.0633085, 0.913169, 0.777554, 0.236667, 0.234207, 0.17911, 0.186361, 0.903656, 0.333015, 0.875141, 0.645586, 0.318849, 0.664855, 0.878928, 0.762863, 0.977254, 0.0172621, 0.201766, 0.862212, 0.574036, 0.244244, 0.731959, 0.40921, 0.494382, 0.193301, 0.570127, 0.151417, 0.74944, 0.126903, 0.185286, 0.453312, 0.936876, 0.327583, 0.134956, 0.50641, 0.459594, 0.424571, 0.990207, 0.064337, 0.106534, 0.526547, 0.8594, 0.332429, 0.0639371, 0.772424, 0.826797, 0.618949, 0.772937, 0.53056, 0.00973798, 0.39374, 0.884778, 0.245443, 0.06919, 0.594798, 0.184776, 0.364028, 0.873345, 0.580107, 0.158233, 0.0446554, 0.69395, 0.971424, 0.574679, 0.0773261, 0.944376, 0.12155, 0.287307, 0.176213, 0.518498, 0.232103, 0.792641, 0.297863, 0.276815, 0.561422, 0.994374, 0.191645, 0.804779, 0.981542, 0.636961, 0.643547, 0.744312, 0.604157, 0.847172, 0.579992, 0.496372, 0.858127, 0.369366, 0.205903, 0.730069, 0.479403, 0.868542, 0.136488, 0.90674, 0.529816, 0.781855, 0.0649976, 0.668096, 0.494856, 0.0638234, 0.360958, 0.552289, 0.238966, 0.49305, 0.190249, 0.761841, 0.407433, 0.80929, 0.555612, 0.483822, 0.471128, 0.262473, 0.321644, 0.437509, 0.671387, 0.307411, 0.121059, 0.720705, 0.932223, 0.898239, 0.599308, 0.125401, 0.368328, 0.353016, 0.328337, 0.937212, 0.514395, 0.17328, 0.359606, 0.711442, 0.232667, 0.26698, 0.491444, 0.74731, 0.930641, 0.797538, 0.863282, 0.2168, 0.502098, 0.721903, 0.553448, 0.466544, 0.303485, 0.294104, 0.183489, 0.65499, 0.802825, 0.935142, 0.984117, 0.383918, 0.188537, 0.331175, 0.550642, 0.471258, 0.762299, 0.919209, 0.745168, 0.914067, 0.0416888, 0.0975676, 0.0931216, 0.607324, 0.178878, 0.12279, 0.893096, 0.690991, 0.900308, 0.800631, 0.926555, 0.711897, 0.288533, 0.332376, 0.00905249, 0.84123, 0.960575, 0.0319604, 0.369096, 0.767044, 0.715626, 0.525262, 0.252224, 0.565997, 0.343859, 0.158063, 0.138894, 0.689129, 0.342809, 0.183794, 0.82614, 0.991731, 0.755848, 0.0108939, 0.714842, 0.800775, 0.560252, 0.692211, 0.694404, 0.311101, 0.135252, 0.866966, 0.38565, 0.347737, 0.106284, 0.93013, 0.586545, 0.264152, 0.446422, 0.705464, 0.185858, 0.488937, 0.251186, 0.840572, 0.14205, 0.364426, 0.469244, 0.570762, 0.552092, 0.426965, 0.841185, 0.132047, 0.375376, 0.887949, 0.541828, 0.647123, 0.560808, 0.495915, 0.0962973, 0.0385913, 0.553713, 0.874003, 0.474506, 0.000777658, 0.444272, 0.884977, 0.688203, 0.212003, 0.0802666, 0.235195, 0.397631, 0.0417011, 0.214066, 0.579276, 0.54287, 0.344086, 0.86551, 0.583989, 0.312997, 0.515879, 0.104037, 0.110049, 0.222868, 0.293697, 0.625381, 0.010964, 0.777401, 0.701773, 0.797638, 0.0793351, 0.820053, 0.407289, 0.226114, 0.90848, 0.0343324, 0.45694, 0.035028, 0.641701, 0.947505, 0.699581, 0.849871, 0.547629, 0.755379, 0.0390888, 0.311657, 0.812492, 0.020861, 0.0942137, 0.680174, 0.176826, 0.322858, 0.687073, 0.854345, 0.381066, 0.616549, 0.749572, 0.767614, 0.528256, 0.511663, 0.221588, 0.724295, 0.511734, 0.768513, 0.51312, 0.122633, 0.522321, 0.805992, 0.776932, 0.676213, 0.208613, 0.353855, 0.76291, 0.448692, 0.498069, 0.830893, 0.913691, 0.0222994, 0.0578672, 0.750894, 0.926357, 0.987307, 0.976556, 0.235189, 0.516564, 0.0200475, 0.906917, 0.350626, 0.213812, 0.283041, 0.589167, 0.68681, 0.561795, 0.992766, 0.942088, 0.310273, 0.490602, 0.949859, 0.275675, 0.522123, 0.719316, 0.370759, 0.365939, 0.598772, 0.626904, 0.0987906, 0.526551, 0.478697, 0.559602, 0.0273137, 0.148764, 0.271684, 0.965364, 0.363064, 0.987974, 0.650176, 0.617937, 0.00130275, 0.402278, 0.837153, 0.736941, 0.205603, 0.449054, 0.84464, 0.451436, 0.131468, 0.891429, 0.177098, 0.36259, 0.88528, 0.223152, 0.216514, 0.213914, 0.136098, 0.75117, 0.138026, 0.0987427, 0.792407, 0.0353701, 0.0906482, 0.639022, 0.622591, 0.895374, 0.26032, 0.345129, 0.0747699, 0.738453, 0.333885, 0.919971, 0.352252, 0.578604, 0.937149, 0.144683, 0.253752, 0.256942, 0.457853, 0.471414, 0.507352, 0.892234, 0.909228, 0.00329278, 0.0180677, 0.796181, 0.20146, 0.142061, 0.987358, 0.76974, 0.167349, 0.143547, 0.508355, 0.822919, 0.645006, 0.571511, 0.298048, 0.460747, 0.394257, 0.501418, 0.615513, 0.6513, 0.983713, 0.326694, 0.755208, 0.0988431, 0.0870775, 0.0398203, 0.249378, 0.542663, 0.732222, 0.704023, 0.435363, 0.7424, 0.901974, 0.688849, 0.0487533, 0.445071, 0.622934, 0.3675, 0.547532, 0.197576, 0.311719, 0.00565676, 0.819191, 0.392192, 0.819148, 0.805763, 0.294271, 0.6616, 0.427193, 0.136726, 0.601793, 0.094568, 0.911729, 0.16542, 0.0812021, 0.805632, 0.26551, 0.0902784, 0.308651, 0.32682, 0.215467, 0.96553, 0.789361, 0.449022, 0.445233, 0.0265775, 0.418994, 0.151084, 0.135968, 0.92182, 0.134002, 0.434998, 0.487732, 0.642278, 0.258272, 0.539513, 0.0268435, 0.741964, 0.540567, 0.621808, 0.408971, 0.768853, 0.570809, 0.969586, 0.282704, 0.925872, 0.325744, 0.362651, 0.833404, 0.232724, 0.403817, 0.567664, 0.952683, 0.0419014, 0.353897, 0.191945, 0.998043, 0.159514, 0.231205, 0.275391, 0.42408, 0.178731, 0.498255, 0.362033, 0.841557, 0.636187, 0.244842, 0.813572, 0.295401, 0.197305, 0.965915, 0.608717, 0.820573, 0.0090073, 0.478346, 0.0721363, 0.736803, 0.923146, 0.641174, 0.985651, 0.0365937, 0.515331, 0.473106, 0.390553, 0.252486, 0.731916, 0.374309, 0.223908, 0.637786, 0.925047, 0.526012, 0.98548, 0.924257, 0.312522, 0.133142, 0.0482379, 0.473875, 0.128306, 0.393488, 0.679431, 0.383353, 0.603976, 0.00586202, 0.272987, 0.797249, 0.898166, 0.445249, 0.0574907, 0.134751, 0.616874, 0.0232398, 0.175256, 0.321776, 0.1898, 0.449263, 0.33701, 0.10175, 0.206535, 0.10799, 0.169666, 0.462349, 0.454415, 0.392774, 0.900612, 0.192834, 0.0137032, 0.0178092, 0.555685, 0.224059, 0.35258, 0.887389, 0.485591, 0.617759, 0.623354, 0.340261, 0.878144, 0.891336, 0.701189, 0.880464, 0.381086, 0.00327111, 0.108989, 0.707695, 0.90182, 0.388077, 0.789142, 0.911373, 0.666312, 0.571874, 0.865994, 0.0116219, 0.114747, 0.870926, 0.859643, 0.00323219, 0.0135644, 0.823899, 0.051973, 0.264082, 0.998878, 0.0403321, 0.846126, 0.064427, 0.163861, 0.593141, 0.748308, 0.904944, 0.282592, 0.0527456, 0.886154, 0.626503, 0.528129, 0.230604, 0.824467, 0.140143, 0.0196574, 0.23524, 0.69417, 0.435249, 0.630171, 0.846594, 0.654387, 0.325503, 0.979048, 0.728364, 0.192634, 0.834208, 0.0565809, 0.804196, 0.0184922, 0.0137656, 0.782126, 0.147527, 0.0799028, 0.64633, 0.237645, 0.670488, 0.422177, 0.265002, 0.65076, 0.199112, 0.0910186, 0.816517, 0.518889, 0.6201, 0.412025, 0.492492, 0.491252, 0.985639, 0.7657, 0.150037, 0.147065, 0.0873867, 0.872067, 0.21046, 0.911581, 0.0925562, 0.519009, 0.969517, 0.0459625, 0.664883, 0.762107, 0.833484, 0.668001, 0.406106, 0.0284692, 0.356254, 0.170324, 0.927495, 0.317833, 0.00615591, 0.56578, 0.692096, 0.248763, 0.750471, 0.405881, 0.919707, 0.977053, 0.821874, 0.0312805, 0.471579, 0.268082, 0.880038, 0.870717, 0.963674, 0.206997, 0.415366, 0.549987, 0.846256, 0.34677, 0.20934, 0.542081, 0.112321, 0.675948, 0.518671, 0.656701, 0.621171, 0.548739, 0.837395, 0.936235, 0.273487, 0.421024, 0.145012, 0.0126815, 0.670202, 0.970681, 0.805192, 0.501387, 0.9467, 0.290478, 0.808476, 0.931461, 0.871901, 0.611392, 0.515823, 0.583067, 0.693718, 0.584383, 0.819427, 0.946328, 0.917942, 0.47207, 0.531398, 0.0807021, 0.767138, 0.66715, 0.642545, 0.946657, 0.216715, 0.510254, 0.654515, 0.497081, 0.48995, 0.538192, 0.201574, 0.752296, 0.391295, 0.305656, 0.20921, 0.330178, 0.984512, 0.931774, 0.769527, 0.906174, 0.284548, 0.421696, 0.819102, 0.255651, 0.544797, 0.810979, 0.912587, 0.673852, 0.750697, 0.233697, 0.35362, 0.956389, 0.959141, 0.0190663, 0.289421, 0.317616, 0.0699108, 0.235855, 0.784889, 0.136121, 0.486156, 0.112423, 0.363548, 0.149246, 0.175064, 0.439234, 0.801776, 0.286258, 0.728427, 0.17595, 0.307135, 0.935157, 0.749456, 0.193305, 0.368802, 0.720708, 0.338627, 0.204435, 0.865355, 0.0702231, 0.538829, 0.389271, 0.821794, 0.508844, 0.855184, 0.206595, 0.402728, 0.306347, 0.129643, 0.750954, 0.0773251, 0.333561, 0.37314, 0.617978, 0.144395, 0.382954, 0.704227, 0.357421, 0.129209, 0.83736, 0.975836, 0.272759, 0.780056, 0.571226, 0.540537, 0.169283, 0.071173, 0.448437, 0.343956, 0.106477, 0.787509, 0.88849, 0.0674257, 0.572059, 0.974853, 0.937704, 0.263151, 0.61667, 0.344633, 0.351047, 0.0186771, 0.344398, 0.19507, 0.559431, 0.583208, 0.827275, 0.0812181, 0.383126, 0.5464, 0.570708, 0.276859, 0.901026, 0.743717, 0.721394, 0.25876, 0.892386, 0.772171, 0.0512278, 0.51966, 0.847713, 0.897012, 0.826788, 0.00636873, 0.211952, 0.447471, 0.951253, 0.132729, 0.739933, 0.692215, 0.152446, 0.704046, 0.103456, 0.513915, 0.728064, 0.0417322, 0.189512, 0.181578, 0.37114, 0.862312, 0.405194, 0.00510079, 0.178813, 0.00954289, 0.94835, 0.298806, 0.608421, 0.667955, 0.997134, 0.663122, 0.763772, 0.185894, 0.262087, 0.974947, 0.640075, 0.898494, 0.864845, 0.960168, 0.134398, 0.971132, 0.613015, 0.486827, 0.939965, 0.3703, 0.105103, 0.0663184, 0.372946, 0.479501, 0.105647, 0.727137, 0.399131, 0.205576, 0.384866, 0.0618376, 0.817596, 0.0388573, 0.594512, 0.173804, 0.855658, 0.101565, 0.366431, 0.0515843, 0.371371, 0.722805, 0.404299, 0.590029, 0.0486788, 0.0769522, 0.192199, 0.89153, 0.0824779, 0.184488, 0.317731, 0.298764, 0.524211, 0.740081, 0.0751576, 0.677991, 0.645256, 0.821468, 0.959768, 0.664128, 0.776426, 0.760039, 0.514726, 0.0305994, 0.270476, 0.041064, 0.541011, 0.874955, 0.961098, 0.498787, 0.714625, 0.845608, 0.679769, 0.42589, 0.0675676, 0.150137, 0.868572, 0.918925, 0.62686, 0.765263, 0.159211, 0.0809829, 0.54262, 0.771211, 0.447672, 0.234558, 0.277458, 0.0216618, 0.629838, 0.701434, 0.417866, 0.896705, 0.361485, 0.0588967, 0.807243, 0.808105, 0.143687, 0.545155, 0.792441, 0.730585, 0.376137, 0.317132, 0.673849, 0.966338, 0.841097, 0.952571, 0.580044, 0.226831, 0.946181, 0.702304, 0.334078, 0.0722082, 0.112592, 0.116075, 0.573995, 0.972941, 0.384045, 0.591413, 0.578233, 0.0721871, 0.746136, 0.820278, 0.423675, 0.961072, 0.561365, 0.856365, 0.651747, 0.994307, 0.0627695, 0.612759, 0.657777, 0.147026, 0.383492, 0.604057, 0.542221, 0.57693, 0.485087, 0.96997, 0.584556, 0.0710188, 0.53617, 0.857877, 0.300881, 0.788797, 0.882248, 0.870584, 0.647004, 0.0800327, 0.292343, 0.696522, 0.226362, 0.736003, 0.0775605, 0.0155354, 0.399078, 0.46536, 0.892267, 0.321188, 0.594121, 0.0718052, 0.32176, 0.846072, 0.33626, 0.164579, 0.049256, 0.718731, 0.517533, 0.896032, 0.582994, 0.773891, 0.515742, 0.735114, 0.192266, 0.870132, 0.211024, 0.458395, 0.0646748, 0.816954, 0.9889, 0.747679, 0.428774, 0.0195346, 0.139605, 0.203366, 0.103588, 0.85924, 0.0981775, 0.221539, 0.0128668, 0.273743, 0.222276, 0.789723, 0.948841, 0.76955, 0.096254, 0.184291, 0.86163, 0.372266, 0.0883018, 0.940716, 0.346417, 0.376696, 0.921643, 0.206702, 0.828796, 0.209551, 0.811755, 0.789655, 0.463534, 0.785495, 0.582792, 0.08772, 0.960717, 0.41261, 0.541069, 0.560536, 0.160356, 0.216015, 0.722684, 0.650591, 0.643378, 0.821075, 0.245157, 0.206439, 0.391602, 0.97929, 0.826906, 0.202789, 0.0945328, 0.348373, 0.623106, 0.886295, 0.101004, 0.438883, 0.512111, 0.640436, 0.316134, 0.0976838, 0.775334, 0.860079, 0.430115, 0.209609, 0.454237, 0.531859, 0.00843881, 0.424001, 0.349336, 0.779242, 0.208718, 0.311589, 0.149421, 0.414515, 0.877802, 0.928613, 0.180454, 0.161653, 0.393545, 0.606784, 0.744265, 0.835414, 0.326639, 0.533545, 0.0654361, 0.738186, 0.917834, 0.151804, 0.515367, 0.58829, 0.00394297, 0.962013, 0.137931, 0.383815, 0.915434, 0.014898, 0.830232, 0.965526, 0.309276, 0.65653, 0.650565, 0.095314, 0.527773, 0.181954, 0.122552, 0.35269, 0.472954, 0.0123366, 0.432987, 0.284771, 0.870714, 0.204394, 0.715494, 0.421107, 0.76778, 0.353974, 0.645342, 0.285393, 0.87628, 0.0517254, 0.470937, 0.417002, 0.650161, 0.949794, 0.30027, 0.666194, 0.682279, 0.526796, 0.508713, 0.91281, 0.0628552, 0.113695, 0.912997, 0.769714, 0.857324, 0.313152, 0.664986, 0.21166, 0.247659, 0.237378, 0.727705, 0.200328, 0.305774, 0.203644, 0.822602, 0.37086, 0.232025, 0.695365, 0.270604, 0.319389, 0.193718, 0.682487, 0.359624, 0.750914, 0.825571, 0.927503, 0.764489, 0.84035, 0.113934, 0.0500237, 0.926019, 0.56567, 0.616827, 0.23388, 0.473759, 0.70013, 0.974772, 0.169055, 0.10647, 0.826032, 0.906048, 0.0714178, 0.987559, 0.274953, 0.128556, 0.700137, 0.0632663, 0.92487, 0.738529, 0.406588, 0.284953, 0.311487, 0.756152, 0.861598, 0.61967, 0.288971, 0.309857, 0.579817, 0.457184, 0.348044, 0.388976, 0.736399, 0.864035, 0.890982, 0.274802, 0.22692, 0.422194, 0.826935, 0.314869, 0.0339264, 0.0421313, 0.177218, 0.0207811, 0.132176, 0.286861, 0.823502, 0.0474397, 0.807458, 0.868916, 0.362884, 0.993984, 0.419491, 0.660039, 0.99099, 0.599047, 0.877648, 0.344249, 0.113648, 0.210306, 0.491341, 0.956818, 0.437145, 0.658771, 0.750698, 0.324197, 0.718631, 0.584393, 0.055116, 0.0249433, 0.521817, 0.777152, 0.757824, 0.150538, 0.0276715, 0.0475303, 0.996481, 0.828652, 0.939678, 0.840537, 0.279091, 0.328291, 0.200324, 0.352818, 0.581722, 0.419725, 0.403519, 0.49856, 0.768156, 0.67908, 0.520272, 0.778047, 0.933585, 0.548489, 0.357608, 0.251649, 0.611211, 0.728919, 0.525709, 0.135838, 0.603606, 0.625682, 0.0136436, 0.156557, 0.243296, 0.0818856, 0.856759, 0.173989, 0.231058, 0.5472, 0.126285, 0.720134, 0.758737, 0.679699, 0.915199, 0.826776, 0.219521, 0.584993, 0.241556, 0.783745, 0.202263, 0.793945, 0.271308, 0.577369, 0.548968, 0.335643, 0.444673, 0.473328, 0.651202, 0.631931, 0.587115, 0.735795, 0.601255, 0.961266, 0.339463, 0.232631, 0.922583, 0.292956, 0.35933, 0.853628, 0.306667, 0.00139812, 0.854761, 0.25882, 0.713127, 0.947542, 0.293397, 0.404635, 0.117932, 0.122836, 0.583413, 0.809211, 0.195561, 0.967682, 0.705693, 0.633366, 0.383304, 0.955578, 0.0568998, 0.879834, 0.654029, 0.531025, 0.54895, 0.37417, 0.536177, 0.466993, 0.480903, 0.663043, 0.389677, 0.169098, 0.643039, 0.532917, 0.456419, 0.893383, 0.713274, 0.802183, 0.648224, 0.00211824, 0.791559, 0.338353, 0.345956, 0.285798, 0.474201, 0.665453, 0.540921, 0.0428913, 0.862996, 0.750627, 0.275932, 0.709712, 0.524933, 0.757441, 0.536971, 0.629938, 0.223009, 0.839669, 0.961988, 0.0560968, 0.011461, 0.11266, 0.141584, 0.918288, 0.622703, 0.802097, 0.843312, 0.590929, 0.403981, 0.648432, 0.517367, 0.930954, 0.720151, 0.242404, 0.337773, 0.93929, 0.243685, 0.400477, 0.928123, 0.635857, 0.815884, 0.320734, 0.389107, 0.0263945, 0.240817, 0.00669968, 0.297484, 0.458402, 0.700029, 0.727746, 0.557621, 0.171227, 0.869515, 0.868918, 0.621846, 0.921996, 0.997787, 0.425811, 0.253914, 0.550426, 0.9278, 0.31154, 0.313836, 0.566579, 0.942595, 0.39588, 0.800495, 0.684254, 0.547619, 0.113869, 0.69799, 0.854356, 0.78815, 0.939179, 0.640633, 0.395235, 0.832261, 0.0404723, 0.221356, 0.183182, 0.654481, 0.149301, 0.757693, 0.603265, 0.0838338, 0.29521, 0.314195, 0.533095, 0.10218, 0.0232758, 0.855926, 0.579563, 0.914892, 0.464866, 0.844169, 0.152526, 0.672769, 0.502728, 0.974392, 0.570655, 0.655723, 0.59724, 0.614872, 0.199059, 0.605251, 0.186892, 0.0984888, 0.494894, 0.695916, 0.0597618, 0.759769, 0.230111, 0.563326, 0.0667818, 0.620895, 0.646685, 0.940375, 0.84536, 0.114677, 0.608223, 0.142101, 0.420423, 0.252474, 0.0604144, 0.875979, 0.905167, 0.0265054, 0.466251, 0.75565, 0.0740227, 0.877662, 0.650452, 0.92081, 0.3777, 0.626005, 0.907778, 0.516238, 0.611419, 0.226706, 0.371777, 0.376579, 0.966798, 0.682201, 0.506406, 0.0761719, 0.930662, 0.678035, 0.7321, 0.353314, 0.937612, 0.957416, 0.239887, 0.981352, 0.744812, 0.154989, 0.950178, 0.958441, 0.408084, 0.295179, 0.464794, 0.949799, 0.469583, 0.362328, 0.723564, 0.902623, 0.300571, 0.613874, 0.503863, 0.338771, 0.778864, 0.781723, 0.780094, 0.870503, 0.565277, 0.146562, 0.220441, 0.0403779, 0.570337, 0.525284, 0.813142, 0.604706, 0.774011, 0.463355, 0.404553, 0.707355, 0.628562, 0.204487, 0.164525, 0.0826636, 0.692862, 0.719929, 0.365481, 0.786214, 0.694265, 0.0199902, 0.586621, 0.267635, 0.972709, 0.757734, 0.602918, 0.143402, 0.414592, 0.125973, 0.287903, 0.958884, 0.960068, 0.0520962, 0.0517856, 0.8543, 0.704178, 0.0884781, 0.271183, 0.423453, 0.903875, 0.881974, 0.957884, 0.443705, 0.922107, 0.265558, 0.17788, 0.981575, 0.161273, 0.678772, 0.385417, 0.252879, 0.0682349, 0.249464, 0.873124, 0.849096, 0.947327, 0.678964, 0.777931, 0.816525, 0.0929126, 0.108118, 0.449243, 0.899974, 0.0665108, 0.5858, 0.531323, 0.927827, 0.0103511, 0.0709703, 0.98223, 0.142706, 0.842133, 0.911505, 0.504504, 0.110272, 0.692185, 0.269672, 0.0712576, 0.187133, 0.619414, 0.961262, 0.854949, 0.493323, 0.351446, 0.128194, 0.0557344, 0.653307, 0.130216, 0.988166, 0.253008, 0.152657, 0.88042, 0.120014, 0.798364, 0.0106696, 0.67923, 0.251691, 0.701816, 0.527149, 0.52756, 0.0415346, 0.383856, 0.00619447, 0.389934, 0.875241, 0.501905, 0.906452, 0.723437, 0.664307, 0.360348, 0.312127, 0.366683, 0.720471, 0.83222, 0.0103533, 0.519401, 0.257593, 0.302394, 0.207028, 0.568188, 0.316302, 0.0292051, 0.34822, 0.15207, 0.83051, 0.379777, 0.2471, 0.114589, 0.331163, 0.185262, 0.573429, 0.0430788, 0.363802, 0.840727, 0.353224, 0.609202, 0.64672, 0.706685, 0.504047, 0.820693, 0.801658, 0.0572598, 0.637391, 0.191445, 0.267491, 0.854911, 0.0953261, 0.852589, 0.716014, 0.822446, 0.512768, 0.352639, 0.0350068, 0.774418, 0.119878, 0.841299, 0.206156, 0.497147, 0.655733, 0.0179589, 0.837417, 0.269018, 0.104988, 0.695914, 0.633118, 0.245564, 0.636497, 0.201487, 0.0914673, 0.493446, 0.263571, 0.729389, 0.722021, 0.0341174, 0.308849, 0.518817, 0.00421491, 0.149905, 0.197745, 0.0727395, 0.680185, 0.734017, 0.626194, 0.853396, 0.983469, 0.888025, 0.643226, 0.688728, 0.0733777, 0.751326, 0.0737934, 0.815835, 0.434945, 0.0172566, 0.764255, 0.677955, 0.673818, 0.831331, 0.535208, 0.500875, 0.215157, 0.985641, 0.443511, 0.416833, 0.484668, 0.0193438, 0.573345, 0.225457, 0.927377, 0.0517498, 0.643728, 0.660206, 0.39357, 0.643083, 0.775607, 0.542725, 0.079272, 0.0208061, 0.390891, 0.671967, 0.331133, 0.25388, 0.896223, 0.645433, 0.799778, 0.797972, 0.488416, 0.156987, 0.845106, 0.285015, 0.770429, 0.372185, 0.728626, 0.736244, 0.118279, 0.550658, 0.30722, 0.561361, 0.886628, 0.923549, 0.304531, 0.218098, 0.394841, 0.0267663, 0.242774, 0.258786, 0.244238, 0.117854, 0.0932354, 0.2854, 0.464496, 0.00752329, 0.429269, 0.273578, 0.712826, 0.616639, 0.511301, 0.347792, 0.356546, 0.152697, 0.609434, 0.953238, 0.723729, 0.632691, 0.907658, 0.0527546, 0.707755, 0.419546, 0.535719, 0.872594, 0.0125693, 0.0043075, 0.130423, 0.895473, 0.788803, 0.490615, 0.147407, 0.132436, 0.680474, 0.0769968, 0.0233969, 0.22011, 0.0224425, 0.411541, 0.0301846, 0.165329, 0.755186, 0.301133, 0.937698, 0.450628, 0.49429, 0.221952, 0.659716, 0.635105, 0.322943, 0.108761, 0.63984, 0.0847122, 0.355704, 0.915998, 0.910214, 0.684879, 0.759497, 0.364458, 0.50224, 0.701672, 0.494477, 0.516754, 0.505819, 0.677254, 0.44494, 0.093918, 0.0450103, 0.470969, 0.61147, 0.142336, 0.20379, 0.252378, 0.701415, 0.41394, 0.389155, 0.575148, 0.0904997, 0.160174, 0.442228, 0.335517, 0.606848, 0.491457, 0.741488, 0.904751, 0.816057, 0.946159, 0.945805, 0.967881, 0.8091, 0.835975, 0.380987, 0.868559, 0.345182, 0.899446, 0.0734777, 0.332584, 0.212357, 0.546893, 0.294194, 0.0126306, 0.21435, 0.0615077, 0.518999, 0.758815, 0.564635, 0.499624, 0.644529, 0.220087, 0.171863, 0.147525, 0.540522, 0.00824396, 0.227479, 0.977094, 0.839567, 0.216683, 0.354336, 0.185727, 0.868563, 0.750004, 0.724345, 0.470594, 0.521224, 0.190473, 0.683208, 0.528662, 0.461864, 0.56832, 0.952714, 0.712739, 0.546274, 0.0122462, 0.275508, 0.918051, 0.526557, 0.681236, 0.398984, 0.63048, 0.354933, 0.403561, 0.101648, 0.0562702, 0.241833, 0.592405, 0.421249, 0.657693, 0.0846664, 0.234533, 0.0396919, 0.474402, 0.489025, 0.543942, 0.0897639, 0.580506, 0.289577, 0.0660818, 0.0762375, 0.628673, 0.506282, 0.0681379, 0.0899876, 0.796152, 0.818178, 0.16779, 0.790892, 0.043165, 0.414121, 0.99349, 0.684195, 0.311252, 0.302221, 0.675541, 0.270056, 0.959073, 0.326282, 0.664505, 0.559604, 0.0952867, 0.551741, 0.0928987, 0.561517, 0.407526, 0.937794, 0.303156, 0.4952, 0.705503, 0.537706, 0.635268, 0.5738, 0.94522, 0.322591, 0.3247, 0.529497, 0.757241, 0.321749, 0.142469, 0.882844, 0.573009, 0.116535, 0.523637, 0.565337, 0.513183, 0.103523, 0.879514, 0.144516, 0.619534, 0.919028, 0.341362, 0.888068, 0.772503, 0.723993, 0.411224, 0.541629, 0.256347, 0.0795694, 0.746589, 0.519741, 0.377331, 0.616379, 0.839363, 0.337203, 0.0567739, 0.9393, 0.0770384, 0.632138, 0.186337, 0.880371, 0.85753, 0.247797, 0.289247, 0.961378, 0.129725, 0.983914, 0.050808, 0.437132, 0.226872, 0.815869, 0.595999, 0.746545, 0.549781, 0.987117, 0.0934909, 0.126796, 0.64447, 0.718189, 0.048991, 0.611927, 0.686634, 0.100763, 0.742385, 0.0734721, 0.97888, 0.024929, 0.815125, 0.45628, 0.107423, 0.941999, 0.522722, 0.273253, 0.0154277, 0.730733, 0.578584, 0.364589, 0.857841, 0.901476, 0.0449603, 0.33454, 0.828717, 0.153718, 0.539485, 0.309033, 0.384265, 0.924793, 0.545885, 0.702045, 0.359185, 0.383262, 0.539557, 0.691854, 0.471785, 0.853779, 0.857287, 0.0927615, 0.60401, 0.0880085, 0.621144, 0.506072, 0.440031, 0.737594, 0.917021, 0.799029, 0.666887, 0.186601, 0.714612, 0.366976, 0.550683, 0.733676, 0.0375142, 0.967885, 0.252707, 0.707691, 0.115038, 0.225648, 0.467573, 0.958789, 0.881286, 0.17738, 0.290469, 0.295604, 0.413997, 0.637772, 0.727194, 0.330566, 0.627053, 0.583289, 0.740741, 0.773945, 0.342496, 0.533862, 0.870174, 0.989627, 0.873875, 0.561373, 0.441546, 0.708051, 0.268613, 0.26053, 0.509849, 0.507627, 0.98207, 0.684085, 0.181028, 0.906017, 0.00216948, 0.336878, 0.597817, 0.51993, 0.249721, 0.0256173, 0.208029, 0.143804, 0.611499, 0.32617, 0.7734, 0.0544934, 0.254328, 0.180442, 0.370125, 0.870335, 0.224201, 0.451518, 0.387688, 0.957677, 0.622975, 0.8907, 0.303326, 0.0985096, 0.895284, 0.489867, 0.806726, 0.884398, 0.277065, 0.0636961, 0.398198, 0.0958202, 0.312547, 0.964302, 0.757456, 0.892549, 0.256111, 0.0147848, 0.241539, 0.859771, 0.925692, 0.824503, 0.601704, 0.447839, 0.512976, 0.19286, 0.147443, 0.0169463, 0.979687, 0.567411, 0.827278, 0.0896635, 0.716312, 0.772048, 0.362417, 0.405062, 0.916391, 0.962836, 0.118004, 0.388326, 0.783519, 0.0244739, 0.781859, 0.433875, 0.986643, 0.282539, 0.00296915, 0.141867, 0.126083, 0.0856213, 0.620482, 0.241517, 0.441718, 0.133953, 0.602742, 0.119222, 0.0663478, 0.812402, 0.208449, 0.233881, 0.865819, 0.366654, 0.154105, 0.415295, 0.721995, 0.577181, 0.390623, 0.0315853, 0.738421, 0.980721, 0.573123, 0.0620009, 0.524203, 0.321561, 0.955075, 0.256743, 0.00204561, 0.953667, 0.472738, 0.0420401, 0.392787, 0.259279, 0.730203, 0.997399, 0.254064, 0.103841, 0.00265456, 0.874534, 0.593939, 0.983618, 0.5476, 0.0546917, 0.0322818, 0.250248, 0.304583, 0.311665, 0.696452, 0.329408, 0.994201, 0.409265, 0.00786796, 0.781897, 0.212029, 0.24925, 0.644273, 0.0803339, 0.528839, 0.964255, 0.920708, 0.535481, 0.900399, 0.357512, 0.975778, 0.838073, 0.497157, 0.273335, 0.966203, 0.791596, 0.688788, 0.0249902, 0.94414, 0.310898, 0.423375, 0.388014, 0.927173, 0.865875, 0.851401, 0.387537, 0.206307, 0.256625, 0.912294, 0.481787, 0.601942, 0.0410598, 0.903484, 0.68312, 0.748168, 0.920237, 0.793257, 0.152311, 0.594561, 0.656997, 0.69248, 0.69695, 0.972571, 0.543518, 0.879487, 0.923869, 0.091676, 0.134122, 0.912097, 0.707876, 0.0427013, 0.087932, 0.260906, 0.43408, 0.526185, 0.33047, 0.798255, 0.647453, 0.368317, 0.78966, 0.767112, 0.464327, 0.348322, 0.528336, 0.867542, 0.162588, 0.335524, 0.154495, 0.959609, 0.817976, 0.108975, 0.715679, 0.976916, 0.102865, 0.156351, 0.361452, 0.237886, 0.421522, 0.620469, 0.272403, 0.0409431, 0.269093, 0.810988, 0.643812, 0.222394, 0.651189, 0.900043, 0.938994, 0.488929, 0.0489833, 0.999099, 0.721957, 0.227903, 0.0947337, 0.0465675, 0.274118, 0.436387, 0.495401, 0.27126, 0.348366, 0.869134, 0.569686, 0.869595, 0.962246, 0.287962, 0.837559, 0.119458, 0.915843, 0.669699, 0.172183, 0.935518, 0.782555, 0.645365, 0.881284, 0.822506, 0.5936, 0.75008, 0.73816, 0.961731, 0.102539, 0.18431, 0.747774, 0.123972, 0.0437521, 0.203027, 0.585387, 0.415051, 0.121672, 0.647093, 0.880603, 0.105394, 0.882614, 0.718948, 0.541158, 0.974741, 0.045262, 0.127256, 0.373407, 0.10848, 0.885007, 0.309382, 0.359374, 0.0171696, 0.455111, 0.400381, 0.13168, 0.894455, 0.614822, 0.2029, 0.63639, 0.587735, 0.800371, 0.705013, 0.357067, 0.598852, 0.418176, 0.920631, 0.133357, 0.641344, 0.123774, 0.549626, 0.746176, 0.41312, 0.544176, 0.139924, 0.186859, 0.815729, 0.141528, 0.799317, 0.723028, 0.00646469, 0.306431, 0.193953, 0.880584, 0.441676, 0.503649, 0.100283, 0.36592, 0.00703817, 0.821632, 0.866334, 0.23857, 0.177503, 0.200214, 0.498202, 0.218265, 0.38326, 0.322585, 0.827902, 0.48848, 0.925217, 0.756711, 0.865992, 0.0998433, 0.546017, 0.329258, 0.206277, 0.362251, 0.4988, 0.699784, 0.876167, 0.407713, 0.966473, 0.699797, 0.0939646, 0.0193712, 0.890308, 0.509845, 0.104309, 0.0336535, 0.596791, 0.0518554, 0.539505, 0.663415, 0.199421, 0.660138, 0.12727, 0.0455535, 0.814605, 0.762778, 0.0691568, 0.0436918, 0.578214, 0.6505, 0.691882, 0.681034, 0.150299, 0.769346, 0.930997, 0.86266, 0.20747, 0.363362, 0.084075, 0.139273, 0.659603, 0.175426, 0.0886449, 0.0544458, 0.87465, 0.748701, 0.222579, 0.525007, 0.116009, 0.0784596, 0.97162, 0.958678, 0.220946, 0.810572, 0.940776, 0.759188, 0.528482, 0.832487, 0.830387, 0.979419, 0.798622, 0.125728, 0.595178, 0.0490897, 0.767816, 0.620371, 0.663792, 0.966933, 0.420774, 0.788886, 0.0220837, 0.30565, 0.14275, 0.245404, 0.90319, 0.507932, 0.690273, 0.908939, 0.115463, 0.686819, 0.417396, 0.150329, 0.0737752, 0.577233, 0.322931, 0.12717, 0.613882, 0.58847, 0.479238, 0.403759, 0.0771313, 0.106414, 0.673572, 0.350583, 0.344535, 0.68153, 0.460022, 0.15081, 0.729054, 0.58455, 0.31535, 0.885813, 0.420472, 0.0255402, 0.515425, 0.935814, 0.124837, 0.686423, 0.840649, 0.78662, 0.349033, 0.559209, 0.954029, 0.393045, 0.0646583, 0.370909, 0.4484, 0.431719, 0.706785, 0.388648, 0.637737, 0.0589366, 0.668077, 0.538074, 0.12675, 0.304515, 0.890387, 0.801784, 0.471512, 0.794736, 0.248499, 0.126123, 0.684644, 0.686978, 0.846712, 0.316026, 0.517567, 0.0262958, 0.584169, 0.509851, 0.288584, 0.679869, 0.0597293, 0.366682, 0.399531, 0.461923, 0.144886, 0.836279, 0.831469, 0.427781, 0.492023, 0.545547, 0.587171, 0.656915, 0.307317, 0.987408, 0.817642, 0.0436993, 0.32873, 0.934365, 0.52028, 0.896459, 0.741594, 0.471362, 0.847753, 0.328883, 0.772851, 0.307808, 0.296269, 0.297292, 0.159325, 0.603638, 0.917443, 0.312732, 0.524107, 0.840835, 0.0487732, 0.642619, 0.827288, 0.713174, 0.534953, 0.805813, 0.524788, 0.700008, 0.428503, 0.966822, 0.0931093, 0.522227, 0.97329, 0.317087, 0.108293, 0.293492, 0.920515, 0.391827, 0.817358, 0.730106, 0.41202, 0.825189, 0.620132, 0.644621, 0.266817, 0.399723, 0.791575, 0.110672, 0.72385, 0.275793, 0.794949, 0.892833, 0.200648, 0.807969, 0.645763, 0.751513, 0.11756, 0.42533, 0.92628, 0.114567, 0.607613, 0.668239, 0.100192, 0.114422, 0.352144, 0.507415, 0.882684, 0.873127, 0.506768, 0.344886, 0.833814, 0.0242183, 0.930751, 0.885974, 0.732036, 0.961444, 0.550254, 0.322358, 0.563877, 0.809922, 0.276575, 0.0562522, 0.143204, 0.289824, 0.200382, 0.972508, 0.774703, 0.843532, 0.246285, 0.159254, 0.157435, 0.575731, 0.803842, 0.149768, 0.365218, 0.263792, 0.0829507, 0.765149, 0.934117, 0.569531, 0.962897, 0.926989, 0.44059, 0.316736, 0.794048, 0.112996, 0.29266, 0.419849, 0.307225, 0.783262, 0.538579, 0.242878, 0.991289, 0.969825, 0.548047, 0.290566, 0.292351, 0.868032, 0.460246, 0.698321, 0.800126, 0.583279, 0.717189, 0.703257, 0.0884206, 0.175846, 0.27626, 0.859412, 0.358694, 0.639239, 0.140631, 0.704478, 0.417812, 0.0676671, 0.521065, 0.884021, 0.168365, 0.624165, 0.284287, 0.964878, 0.94158, 0.662307, 0.432806, 0.97545, 0.459688, 0.733035, 0.581206, 0.915372, 0.208417, 0.555513, 0.597207, 0.400138, 0.755858, 0.552204, 0.201865, 0.480121, 0.913508, 0.835636, 0.291581, 0.178903, 0.220853, 0.214738, 0.519559, 0.278173, 0.390957, 0.0395054, 0.00595424, 0.759536, 0.543856, 0.990689, 0.0938712, 0.0310068, 0.508122, 0.620263, 0.348884, 0.945497, 0.299965, 0.635908, 0.132086, 0.657791, 0.238095, 0.621725, 0.912678, 0.808336, 0.66988, 0.71485, 0.840787, 0.664992, 0.658862, 0.277171, 0.647297, 0.0421899, 0.954794, 0.0751451, 0.773294, 0.145824, 0.910845, 0.960064, 0.240367, 0.304227, 0.107258, 0.632377, 0.385348, 0.95717, 0.534541, 0.975734, 0.288228, 0.140664, 0.701792, 0.747395, 0.982609, 0.120717, 0.568635, 0.0653954, 0.841714, 0.91516, 0.479761, 0.534312, 0.681924, 0.935348, 0.201925, 0.113211, 0.117036, 0.80399, 0.43298, 0.69961, 0.340217, 0.35513, 0.614448, 0.0237451, 0.642954, 0.585243, 0.312187, 0.131821, 0.403962, 0.751647, 0.0439979, 0.754596, 0.44068, 0.204481, 0.293113, 0.543158, 0.523721, 0.0979635, 0.214104, 0.315379, 0.178005, 0.124782, 0.304416, 0.0750819, 0.352707, 0.548382, 0.600607, 0.0934735, 0.104828, 0.983191, 0.0757027, 0.534711, 0.0676276, 0.170516, 0.469788, 0.0840215, 0.885204, 0.674039, 0.187585, 0.196624, 0.531166, 0.0696874, 0.539674, 0.49392, 0.408729, 0.061994, 0.302405, 0.252941, 0.799968, 0.827175, 0.611391, 0.362369, 0.657713, 0.707267, 0.894151, 0.536904, 0.13435, 0.78815, 0.529078, 0.198909, 0.913081, 0.469675, 0.072014, 0.692494, 0.909327, 0.126075, 0.989177, 0.210604, 0.966014, 0.584509, 0.758058, 0.310283, 0.0453492, 0.424746, 0.37663, 0.96411, 0.200581, 0.586958, 0.091529, 0.265724, 0.681694, 0.324513, 0.669907, 0.58941, 0.440361, 0.528831, 0.105561, 0.973964, 0.654456, 0.970698, 0.284738, 0.549127, 0.428925, 0.348274, 0.622524, 0.270289, 0.500476, 0.847353, 0.507849, 0.157441, 0.230561, 0.944067, 0.985883, 0.69581, 0.141776, 0.377412, 0.401211, 0.444734, 0.980702, 0.675309, 0.875477, 0.794507, 0.349532, 0.170162, 0.624894, 0.417962, 0.415546, 0.36032, 0.757239, 0.257441, 0.913047, 0.659254, 0.145894, 0.666919, 0.0175704, 0.549286, 0.614001, 0.962877, 0.539759, 0.391621, 0.236881, 0.122099, 0.814604, 0.0891521, 0.482365, 0.554869, 0.138315, 0.963149, 0.429151, 0.203627, 0.236203, 0.170238, 0.998642, 0.801235, 0.388047, 0.768687, 0.27182, 0.673135, 0.399079, 0.85069, 0.684319, 0.986399, 0.631844, 0.927408, 0.737798, 0.995274, 0.66533, 0.245754, 0.472095, 0.290813, 0.810834, 0.313349, 0.447148, 0.571328, 0.627194, 0.57629, 0.713469, 0.422445, 0.949226, 0.902399, 0.0371075, 0.470613, 0.907833, 0.452794, 0.853629, 0.262033, 0.0207634, 0.816207, 0.446048, 0.893255, 0.265039, 0.0509093, 0.863866, 0.301752, 0.638668, 0.490167, 0.889994, 0.84687, 0.158144, 0.65982, 0.26235, 0.95718, 0.662398, 0.107077, 0.85311, 0.932123, 0.581673, 0.61381, 0.80501, 0.185818, 0.877686, 0.304985, 0.813848, 0.296963, 0.105196, 0.0412069, 0.590802, 0.476628, 0.203454, 0.987047, 0.856256, 0.237643, 0.989676, 0.216779, 0.923763, 0.323657, 0.198351, 0.241511, 0.280164, 0.591196, 0.859441, 0.679932, 0.743648, 0.947348, 0.351457, 0.103951, 0.14544, 0.331489, 0.720028, 0.460479, 0.973947, 0.432496, 0.117495, 0.542874, 0.85118, 0.521614, 0.832358, 0.589992, 0.262417, 0.865436, 0.28816, 0.657736, 0.751303, 0.883796, 0.419268, 0.517379, 0.00849907, 0.275356, 0.560118, 0.5704, 0.693962, 0.32664, 0.653743, 0.629599, 0.500192, 0.0437695, 0.408629, 0.652927, 0.727753, 0.726597, 0.673221, 0.737262, 0.359457, 0.939694, 0.59496, 0.776528, 0.336817, 0.514643, 0.410871, 0.461674, 0.661165, 0.88696, 0.0887153, 0.899074, 0.52993, 0.763246, 0.953701, 0.611576, 0.509133, 0.270107, 0.364873, 0.929217, 0.342699, 0.623959, 0.886126, 0.406349, 0.67395, 0.999035, 0.504545, 0.925682, 0.806084, 0.681021, 0.549536, 0.265445, 0.414815, 0.627838, 0.0808093, 0.723738, 0.0815012, 0.771236, 0.73842, 0.809941, 0.41711, 0.653149, 0.0462295, 0.172731, 0.062684, 0.499982, 0.384897, 0.323556, 0.644623, 0.195763, 0.832568, 0.871539, 0.0842767, 0.902823, 0.12852, 0.266654, 0.132603, 0.825525, 0.183502, 0.927431, 0.291925, 0.469783, 0.845891, 0.213524, 0.469971, 0.0519577, 0.273665, 0.0283654, 0.129275, 0.0133337, 0.455978, 0.754961, 0.790605, 0.839277, 0.00355995, 0.467845, 0.385371, 0.939362, 0.536838, 0.912425, 0.431739, 0.0786191, 0.3032, 0.71803, 0.0454604, 0.654944, 0.422661, 0.89835, 0.104968, 0.352199, 0.853833, 0.49648, 0.925281, 0.823627, 0.585987, 0.898465, 0.326791, 0.592133, 0.463077, 0.690367, 0.0534051, 0.696711, 0.879301, 0.226565, 0.843417, 0.774814, 0.848457, 0.440648, 0.724936, 0.545384, 0.710813, 0.574709, 0.326078, 0.513247, 0.399664, 0.9986, 0.17735, 0.073329, 0.537802, 0.280107, 0.92919, 0.141223, 0.263987, 0.044565, 0.565706, 0.730661, 0.172426, 0.298544, 0.73557, 0.499133, 0.655023, 0.884787, 0.927725, 0.540348, 0.732994, 0.416806, 0.683407, 0.672019, 0.226764, 0.559166, 0.880894, 0.211082, 0.351384, 0.803358, 0.0817335, 0.0290782, 0.431313, 0.433303, 0.294876, 0.248437, 0.494062, 0.316968, 0.239748, 0.418382, 0.758703, 0.819905, 0.33438, 0.24151, 0.820807, 0.376312, 0.0603143, 0.435946, 0.840847, 0.752812, 0.841729, 0.837204, 0.523985, 0.837111, 0.235468, 0.526222, 0.414712, 0.560508, 0.329657, 0.208302, 0.341473, 0.326454, 0.536594, 0.518118, 0.12619, 0.803484, 0.714879, 0.49794, 0.868055, 0.289, 0.0652668, 0.955847, 0.881797, 0.372157, 0.526866, 0.744218, 0.0102619, 0.667189, 0.296191, 0.55762, 0.462473, 0.231383, 0.00379398, 0.675219, 0.801112, 0.142568, 0.845674, 0.271624, 0.602504, 0.0880555, 0.332806, 0.906774, 0.0917857, 0.706064, 0.569323, 0.65879, 0.964485, 0.215885, 0.433708, 0.385675, 0.20189, 0.138252, 0.997569, 0.384503, 0.642629, 0.624547, 0.565289, 0.378957, 0.47499, 0.224468, 0.339102, 0.673985, 0.426598, 0.0212697, 0.765904, 0.162709, 0.184998, 0.765993, 0.322265, 0.501502, 0.736527, 0.963061, 0.938897, 0.934525, 0.91499, 0.813492, 0.591905, 0.00209281, 0.807754, 0.875947, 0.403504, 0.292973, 0.991515, 0.702833, 0.667988, 0.823181, 0.264541, 0.653264, 0.84179, 0.516506, 0.221366, 0.856864, 0.771658, 0.849874, 0.965903, 0.720022, 0.546299, 0.162553, 0.842109, 0.00376673, 0.965212, 0.708506, 0.735937, 0.451234, 0.990287, 0.190653, 0.173985, 0.678833, 0.142683, 0.752652, 0.902907, 0.752725, 0.456146, 0.875956, 0.274689, 0.724019, 0.286962, 0.755524, 0.243662, 0.678634, 0.545567, 0.366002, 0.424252, 0.753029, 0.161247, 0.308859, 0.768887, 0.503259, 0.251248, 0.274069, 0.123186, 0.257923, 0.553813, 0.328447, 0.432531, 0.409458, 0.781136, 0.354439, 0.929638, 0.641831, 0.736621, 0.414275, 0.850833, 0.787645, 0.166013, 0.86714, 0.815449, 0.576641, 0.108081, 0.411362, 0.425229, 0.44125, 0.782235, 0.524877, 0.695368, 0.166618, 0.928522, 0.784976, 0.672973, 0.872188, 0.940872, 0.580408, 0.638464, 0.462909, 0.459911, 0.32465, 0.809455, 0.560667, 0.380102, 0.861866, 0.109721, 0.0308464, 0.623274, 0.45616, 0.897797, 0.484541, 0.949567, 0.0837994, 0.23621, 0.212597, 0.439361, 0.728416, 0.27532, 0.26466, 0.808841, 0.421971, 0.696874, 0.828717, 0.504303, 0.662372, 0.327151, 0.184963, 0.666238, 0.781659, 0.282019, 0.487977, 0.275363, 0.092091, 0.535932, 0.49683, 0.467149, 0.502567, 0.840322, 0.286123, 0.930709, 0.983031, 0.557765, 0.265848, 0.207847, 0.509513, 0.635265, 0.566415, 0.286493, 0.311504, 0.0527314, 0.202769, 0.759897, 0.839499, 0.0422418, 0.808762, 0.860245, 0.867181, 0.199014, 0.878043, 0.0259532, 0.81451, 0.449817, 0.46622, 0.673454, 0.0773194, 0.801992, 0.0215891, 0.299486, 0.202662, 0.844368, 0.539223, 0.794393, 0.498429, 0.1719, 0.516754, 0.624653, 0.941274, 0.449317, 0.829167, 0.57099, 0.118988, 0.369464, 0.392911, 0.488977, 0.603277, 0.146521, 0.824633, 0.526214, 0.538801, 0.150353, 0.724502, 0.131537, 0.988849, 0.333432, 0.718172, 0.74426, 0.129661, 0.820868, 0.909861, 0.136001, 0.270309, 0.704288, 0.893886, 0.326444, 0.364767, 0.422896, 0.684922, 0.37052, 0.248242, 0.0453765, 0.0848157, 0.629291, 0.786327, 0.999893, 0.289456, 0.847154, 0.2287, 0.0663366, 0.127314, 0.0115147, 0.814405, 0.886679, 0.745017, 0.361332, 0.960407, 0.956814, 0.00031382, 0.0818927, 0.0792855, 0.305876, 0.129241, 0.370793, 0.872396, 0.160768, 0.369604, 0.968642, 0.983399, 0.219455, 0.368195, 0.479561, 0.736244, 0.853021, 0.263501, 0.331484, 0.421875, 0.820267, 0.824629, 0.180069, 0.979761, 0.685614, 0.151672, 0.368202, 0.935376, 0.723351, 0.947521, 0.812962, 0.0393219, 0.448852, 0.916772, 0.376377, 0.656037, 0.596934, 0.407713, 0.581081, 0.499572, 0.15246, 0.415338, 0.959572, 0.289051, 0.532543, 0.399378, 0.707961, 0.353649, 0.999977, 0.411315, 0.507638, 0.695433, 0.686485, 0.946923, 0.554696, 0.828731, 0.103839, 0.402531, 0.314835, 0.31834, 0.811244, 0.816987, 0.318343, 0.379974, 0.017095, 0.490605, 0.774623, 0.0256612, 0.771085, 0.337502, 0.798936, 0.485491, 0.384387, 0.934485, 0.128224, 0.881266, 0.55429, 0.0821492, 0.839872, 0.35792, 0.17427, 0.457722, 0.132874, 0.848926, 0.473386, 0.35084, 0.884521, 0.633309, 0.328557, 0.491455, 0.470415, 0.413116, 0.0545538, 0.0749842, 0.28592, 0.146577, 0.912355, 0.627379, 0.635914, 0.978279, 0.605406, 0.937944, 0.123009, 0.484798, 0.56439, 0.00739178, 0.711569, 0.079539, 0.985346, 0.678384, 0.86812, 0.433614, 0.80086, 0.320245, 0.592016, 0.37018, 0.835839, 0.925468, 0.536741, 0.229944, 0.53053, 0.871769, 0.73258, 0.973737, 0.703955, 0.412242, 0.67909, 0.893231, 0.482548, 0.033109, 0.127787, 0.85196, 0.763455, 0.725058, 0.637974, 0.95114, 0.736879, 0.540986, 0.537117, 0.662274, 0.465006, 0.174403, 0.825465, 0.537735, 0.26325, 0.960223, 0.393766, 0.0259233, 0.49806, 0.2551, 0.318262, 0.816348, 0.82046, 0.592883, 0.0831828, 0.545193, 0.645472, 0.727568, 0.391205, 0.865609, 0.22572, 0.593496, 0.711482, 0.181181, 0.871405, 0.461435, 0.629724, 0.146651, 0.187936, 0.653171, 0.522262, 0.564234, 0.0488697, 0.631423, 0.0656051, 0.277527, 0.395756, 0.835641, 0.125169, 0.031573, 0.827047, 0.76422, 0.70184, 0.536186, 0.719809, 0.392548, 0.248494, 0.0113598, 0.145692, 0.0715961, 0.00824071, 0.0893908, 0.908226, 0.688277, 0.597482, 0.42348, 0.933487, 0.448177, 0.940487, 0.287481, 0.471769, 0.837522, 0.754441, 0.774111, 0.726068, 0.186716, 0.424487, 0.799763, 0.784653, 0.0287716, 0.517704, 0.19538, 0.609822, 0.793181, 0.961625, 0.901127, 0.976353, 0.0431655, 0.544816, 0.293044, 0.845437, 0.614945, 0.377541, 0.571479, 0.0378839, 0.345483, 0.818315, 0.83208, 0.505378, 0.670653, 0.0233108, 0.649274, 0.980283, 0.0817684, 0.6392, 0.355697, 0.996377, 0.877605, 0.28135, 0.72108, 0.957454, 0.653142, 0.660603, 0.888257, 0.904027, 0.268201, 0.135722, 0.681844, 0.085175, 0.46722, 0.576889, 0.508221, 0.397492, 0.031058, 0.118869, 0.0649997, 0.497825, 0.4128, 0.475993, 0.787082, 0.697152, 0.0876402, 0.00368021, 0.879489, 0.362483, 0.893105, 0.504865, 0.667119, 0.263053, 0.677176, 0.669501, 0.219379, 0.786473, 0.233583, 0.778745, 0.565559, 0.210947, 0.416327, 0.292413, 0.241213, 0.666406, 0.0222339, 0.0350166, 0.648889, 0.733443, 0.88821, 0.055882, 0.0277956, 0.504415, 0.737115, 0.641023, 0.0619757, 0.0734278, 0.71448, 0.383871, 0.182692, 0.995474, 0.376517, 0.912033, 0.481474, 0.870682, 0.0162815, 0.317314, 0.813583, 0.968563, 0.436631, 0.278392, 0.225784, 0.723891, 0.412729, 0.771891, 0.299832, 0.50019, 0.522494, 0.625582, 0.247386, 0.0624364, 0.198028, 0.0965692, 0.615351, 0.921409, 0.234406, 0.977569, 0.362076, 0.0474897, 0.000719785, 0.32906, 0.480448, 0.0747429, 0.84337, 0.969968, 0.0750479, 0.327639, 0.749891, 0.813955, 0.544825, 0.901279, 0.630745, 0.593813, 0.321331, 0.552946, 0.604974, 0.486309, 0.808165, 0.79014, 0.411673, 0.374508, 0.526008, 0.438269, 0.591808, 0.899624, 0.589784, 0.422551, 0.726518, 0.385523, 0.0812598, 0.823857, 0.969916, 0.119352, 0.720789, 0.465167, 0.414751, 0.948992, 0.178657, 0.614829, 0.0262519, 0.835114, 0.9793, 0.447871, 0.297571, 0.602232, 0.13819, 0.0357472, 0.559548, 0.164173, 0.363394, 0.330852, 0.832982, 0.803092, 0.03183, 0.203531, 0.369255, 0.0189111, 0.742462, 0.849378, 0.0451305, 0.869346, 0.966823, 0.659599, 0.408523, 0.634494, 0.267026, 0.120147, 0.0942491, 0.803437, 0.820474, 0.0740196, 0.871302, 0.415977, 0.494594, 0.0293509, 0.33357, 0.704333, 0.599486, 0.444793, 0.907338, 0.517565, 0.266307, 0.642615, 0.630478, 0.382786, 0.207283, 0.138784, 0.361832, 0.00835775, 0.0247863, 0.346629, 0.801693, 0.638861, 0.690865, 0.572333, 0.472501, 0.0470447, 0.43521, 0.0716343, 0.0270219, 0.526611, 0.647479, 0.843117, 0.39167, 0.485885, 0.291377, 0.939419, 0.0395494, 0.312875, 0.305732, 0.641768, 0.381201, 0.278588, 0.0678477, 0.128049, 0.40274, 0.391973, 0.777331, 0.354055, 0.76454, 0.363069, 0.564766, 0.33317, 0.46361, 0.185451, 0.0263633, 0.936658, 0.352718, 0.457105, 0.854162, 0.862375, 0.988636, 0.732053, 0.661575, 0.910471, 0.378453, 0.778195, 0.693411, 0.385346, 0.474262, 0.843387, 0.0689777, 0.00299793, 0.8032, 0.795708, 0.547995, 0.151327, 0.46796, 0.809076, 0.539852, 0.230707, 0.0629656, 0.373144, 0.0845302, 0.0159617, 0.454861, 0.182251, 0.674457, 0.732855, 0.316629, 0.0511217, 0.392652, 0.0173745, 0.574732, 0.60335, 0.399655, 0.766155, 0.381027, 0.61383, 0.232291, 0.531796, 0.729258, 0.785192, 0.331604, 0.86379, 0.128494, 0.969683, 0.999763, 0.0998704, 0.573769, 0.187448, 0.674414, 0.194858, 0.953877, 0.201425, 0.543769, 0.308643, 0.792264, 0.398644, 0.708149, 0.289014, 0.819657, 0.253265, 0.837789, 0.304976, 0.575838, 0.435386, 0.579792, 0.739268, 0.155193, 0.228266, 0.687053, 0.52085, 0.0392726, 0.125017, 0.00318429, 0.687647, 0.47317, 0.341067, 0.763186, 0.42641, 0.926529, 0.331726, 0.531519, 0.752008, 0.104633, 0.666371, 0.933613, 0.833319, 0.925057, 0.601297, 0.432739, 0.773991, 0.254665, 0.271045, 0.488001, 0.98369, 0.765446, 0.00696623, 0.242701, 0.701162, 0.587165, 0.651144, 0.372115, 0.308176, 0.548135, 0.727489, 0.330426, 0.895366, 0.206513, 0.0489515, 0.764614, 0.495505, 0.110225, 0.722651, 0.438193, 0.801399, 0.548682, 0.434788, 0.175742, 0.785937, 0.75756, 0.330464, 0.865882, 0.302393, 0.181894, 0.779485, 0.430365, 0.782417, 0.314907, 0.481456, 0.857318, 0.0113857, 0.173096, 0.254854, 0.361403, 0.551114, 0.912298, 0.291188, 0.712132, 0.524902, 0.589849, 0.735236, 0.0515289, 0.161229, 0.245797, 0.944604, 0.631969, 0.265742, 0.197306, 0.0768234, 0.0194839, 0.0701168, 0.694827, 0.393103, 0.110566, 0.602244, 0.228535, 0.172539, 0.103389, 0.0175603, 0.543229, 0.204025, 0.114078, 0.516247, 0.972776, 0.206016, 0.495504, 0.902649, 0.521167, 0.273193, 0.779409, 0.899815, 0.200795, 0.884517, 0.2051, 0.942331, 0.928528, 0.42342, 0.153295, 0.813989, 0.944438, 0.830663, 0.28702, 0.608891, 0.400393, 0.0942167, 0.552048, 0.0156807, 0.531824, 0.87719, 0.202555, 0.940243, 0.242386, 0.851822, 0.878135, 0.16821, 0.131348, 0.230732, 0.597644, 0.745814, 0.181948, 0.532724, 0.906058, 0.386221, 0.273775, 0.837916, 0.338809, 0.295876, 0.432598, 0.0469732, 0.0388639, 0.225856, 0.889241, 0.875772, 0.490003, 0.573914, 0.903424, 0.0782142, 0.815816, 0.354702, 0.462083, 0.589756, 0.262515, 0.233802, 0.189547, 0.996142, 0.780253, 0.103435, 0.402775, 0.437038, 0.483267, 0.172879, 0.917637, 0.14663, 0.354259, 0.121247, 0.730811, 0.411633, 0.772367, 0.28071, 0.385159, 0.136306, 0.562222, 0.422636, 0.0900755, 0.0490742, 0.441638, 0.972136, 0.28366, 0.722472, 0.159515, 0.533905, 0.592363, 0.347499, 0.728473, 0.683163, 0.776232, 0.191833, 0.698048, 0.515704, 0.797297, 0.303258, 0.531089, 0.686102, 0.341439, 0.903674, 0.541508, 0.431689, 0.0165836, 0.331906, 0.646335, 0.534429, 0.405256, 0.0990474, 0.0653716, 0.93658, 0.54225, 0.889823, 0.274116, 0.786693, 0.460302, 0.492471, 0.16118, 0.0725846, 0.423202, 0.289644, 0.589097, 0.335348, 0.0187064, 0.898956, 0.644845, 0.161203, 0.322998, 0.709912, 0.613727, 0.883755, 0.257027, 0.417126, 0.765556, 0.185499, 0.899844, 0.432989, 0.941765, 0.290679, 0.552303, 0.5248, 0.867102, 0.843107, 0.194849, 0.178186, 0.890691, 0.762402, 0.98445, 0.896385, 0.138908, 0.541767, 0.465325, 0.148249, 0.791812, 0.643343, 0.266132, 0.514905, 0.385602, 0.551542, 0.767467, 0.191721, 0.194929, 0.763939, 0.480432, 0.262448, 0.656023, 0.862894, 0.0523637, 0.222938, 0.233722, 0.410409, 0.276134, 0.267563, 0.596115, 0.23797, 0.466054, 0.744026, 0.0609154, 0.67997, 0.942111, 0.491477, 0.397575, 0.115714, 0.975031, 0.339329, 0.783043, 0.620843, 0.67738, 0.0263275, 0.802089, 0.512084, 0.688234, 0.00734883, 0.729239, 0.951641, 0.572257, 0.274996, 0.963747, 0.653032, 0.40655, 0.374964, 0.317177, 0.0885474, 0.278359, 0.134799, 0.562288, 0.753961, 0.165827, 0.0501638, 0.0879694, 0.877446, 0.91488, 0.342373, 0.672112, 0.513737, 0.461238, 0.937427, 0.588051, 0.261043, 0.59019, 0.491552, 0.247191, 0.268557, 0.940984, 0.133495, 0.225512, 0.061612, 0.725114, 0.427663, 0.00527795, 0.692709, 0.13052, 0.401754, 0.650497, 0.93261, 0.834951, 0.819397, 0.0708086, 0.469795, 0.4609, 0.82386, 0.878738, 0.506777, 0.545598, 0.820291, 0.871072, 0.40302, 0.551472, 0.260097, 0.971456, 0.904821, 0.65532, 0.606182, 0.00195895, 0.923481, 0.0240523, 0.999963, 0.67457, 0.384125, 0.935217, 0.314049, 0.596162, 0.904788, 0.83433, 0.83861, 0.00678903, 0.0554508, 0.840524, 0.182566, 0.692484, 0.18624, 0.0356324, 0.219805, 0.082061, 0.252837, 0.896384, 0.913405, 0.0780995, 0.40322, 0.460075, 0.844018, 0.872995, 0.689628, 0.905133, 0.527041, 0.697146, 0.725787, 0.261525, 0.617749, 0.885002, 0.743966, 0.677214, 0.949382, 0.654274, 0.793361, 0.462747, 0.658381, 0.635382, 0.499737, 0.467287, 0.359796, 0.959459, 0.728091, 0.23242, 0.0901241, 0.122147, 0.59385, 0.725853, 0.472269, 0.152191, 0.283303, 0.957536, 0.643152, 0.739371, 0.887909, 0.90516, 0.380709, 0.682323, 0.059883, 0.168969, 0.296506, 0.710234, 0.0843966, 0.904016, 0.166514, 0.597223, 0.193536, 0.469915, 0.286458, 0.731118, 0.625388, 0.0429173, 0.539192, 0.0481399, 0.211433, 0.25051, 0.425512, 0.0181936, 0.121157, 0.9789, 0.520572, 0.400318, 0.58233, 0.761052, 0.604665, 0.944583, 0.39336, 0.227694, 0.96296, 0.876985, 0.0501449, 0.288584, 0.451995, 0.534457, 0.655872, 0.661063, 0.754428, 0.966692, 0.0150086, 0.104045, 0.095014, 0.868285, 0.0476501, 0.820102, 0.0679207, 0.801386, 0.845822, 0.439861, 0.857085, 0.102827, 0.87434, 0.562673, 0.120314, 0.144534, 0.871773, 0.094574, 0.246483, 0.235665, 0.833357, 0.85404, 0.848544, 0.0604521, 0.0186945, 0.795606, 0.682973, 0.649498, 0.657285, 0.0696037, 0.793504, 0.797787, 0.511502, 0.52392, 0.700018, 0.565786, 0.616767, 0.282619, 0.339069, 0.64898, 0.865851, 0.00999974, 0.0853183, 0.176463, 0.358187, 0.906751, 0.65451, 0.232918, 0.0523113, 0.308852, 0.506029, 0.0155211, 0.464285, 0.098526, 0.807842, 0.745457, 0.877439, 0.972399, 0.309026, 0.741423, 0.0863205, 0.127728, 0.578785, 0.272265, 0.688036, 0.289875, 0.478403, 0.149041, 0.612673, 0.169721, 0.0749017, 0.950864, 0.697547, 0.223317, 0.53656, 0.244241, 0.0230917, 0.469217, 0.321902, 0.918212, 0.14048, 0.497955, 0.137043, 0.446622, 0.0820836, 0.883689, 0.850482, 0.976799, 0.255676, 0.570913, 0.32842, 0.825705, 0.876435, 0.967062, 0.941922, 0.533506, 0.666526, 0.483001, 0.91087, 0.997286, 0.450467, 0.342494, 0.859067, 0.531275, 0.463573, 0.952184, 0.53284, 0.558022, 0.725426, 0.474411, 0.655587, 0.420087, 0.468508, 0.308208, 0.604864, 0.817855, 0.728975, 0.122265, 0.306047, 0.184579, 0.16592, 0.504201, 0.536009, 0.491814, 0.398283, 0.879826, 0.287263, 0.462895, 0.516067, 0.541015, 0.725253, 0.196295, 0.606033, 0.843471, 0.453701, 0.205685, 0.634091, 0.861297, 0.465242, 0.602618, 0.720813, 0.109542, 0.518306, 0.496782, 0.799802, 0.420652, 0.224371, 0.266482, 0.654803, 0.047895, 0.432791, 0.0991827, 0.772198, 0.32209, 0.330437, 0.185746, 0.140597, 0.306, 0.334362, 0.00169165, 0.816549, 0.0763457, 0.856974, 0.0709023, 0.476311, 0.587085, 0.829337, 0.505602, 0.119236, 0.796571, 0.684383, 0.815761, 0.696858, 0.709625, 0.174017, 0.203209, 0.42155, 0.532394, 0.0445279, 0.690665, 0.610445, 0.565935, 0.833406, 0.374485, 0.0464426, 0.573286, 0.104844, 0.0645017, 0.112781, 0.573605, 0.488074, 0.522915, 0.731857, 0.967717, 0.997032, 0.572101, 0.942378, 0.345968, 0.301739, 0.176546, 0.0560019, 0.0350762, 0.563506, 0.548198, 0.872069, 0.377967, 0.348896, 0.266325, 0.284741, 0.633524, 0.889082, 0.993557, 0.846756, 0.725345, 0.572848, 0.871653, 0.368854, 0.503869, 0.0727189, 0.0918519, 0.920518, 0.946622, 0.467996, 0.759991, 0.601632, 0.318524, 0.741685, 0.213693, 0.157869, 0.952439, 0.645408, 0.938636, 0.072953, 0.814662, 0.367685, 0.746321, 0.0308021, 0.555824, 0.540047, 0.98481, 0.451255, 0.231776, 0.571407, 0.114218, 0.740337, 0.644461, 0.705899, 0.000324034, 0.0669688, 0.704823, 0.816899, 0.258044, 0.846036, 0.831444, 0.329306, 0.0978028, 0.797346, 0.0860686, 0.500717, 0.404287, 0.665233, 0.212074, 0.0193281, 0.351646, 0.709888, 0.428712, 0.463282, 0.596454, 0.0112623, 0.369824, 0.993872, 0.660341, 0.867077, 0.238456, 0.718436, 0.707635, 0.478597, 0.493712, 0.946965, 0.679729, 0.747171, 0.192647, 0.220157, 0.616217, 0.274945, 0.410491, 0.670034, 0.529077, 0.422064, 0.449074, 0.50408, 0.797496, 0.365237, 0.829194, 0.941683, 0.684282, 0.0480276, 0.530699, 0.474956, 0.0512108, 0.272672, 0.0933672, 0.883944, 0.0853204, 0.864014, 0.989528, 0.346204, 0.668628, 0.0958885, 0.372699, 0.728539, 0.673392, 0.165501, 0.563271, 0.183946, 0.0750424, 0.755246, 0.937016, 0.644245, 0.746287, 0.831148, 0.0521238, 0.265193, 0.0866676, 0.350039, 0.339534, 0.180864, 0.196189, 0.381186, 0.790401, 0.997931, 0.998157, 0.0498226, 0.357231, 0.706713, 0.0320284, 0.745339, 0.549743, 0.640489, 0.798846, 0.222073, 0.708906, 0.17552, 0.817831, 0.576041, 0.0395129, 0.591903, 0.0668133, 0.266916, 0.582943, 0.480411, 0.102654, 0.0456304, 0.410205, 0.0901037, 0.104304, 0.577822, 0.724465, 0.928792, 0.983905, 0.0215146, 0.78416, 0.500699, 0.800233, 0.389331, 0.19249, 0.99276, 0.666015, 0.532314, 0.321296, 0.905419, 0.93963, 0.45092, 0.714422, 0.415709, 0.384093, 0.0577467, 0.158675, 0.378667, 0.800723, 0.363547, 0.997812, 0.561783, 0.148821, 0.770375, 0.830476, 0.667792, 0.305506, 0.846649, 0.687359, 0.382275, 0.374334, 0.383001, 0.014579, 0.333735, 0.396562, 0.977267, 0.806299, 0.698036, 0.682723, 0.727546, 0.274506, 0.22807, 0.0282678, 0.0096508, 0.245845, 0.669269, 0.210805, 0.657272, 0.257349, 0.882168, 0.56864, 0.0725941, 0.443277, 0.948587, 0.161456, 0.654467, 0.717582, 0.107923, 0.421968, 0.699075, 0.229357, 0.826711, 0.355575, 0.781551, 0.68971, 0.0588251, 0.403448, 0.732937, 0.0690303, 0.573226, 0.468383, 0.347646, 0.685601, 0.912646, 0.374278, 0.476829, 0.262196, 0.503916, 0.203185, 0.933735, 0.836232, 0.378327, 0.622366, 0.66969, 0.17668, 0.653573, 0.880322, 0.908573, 0.683539, 0.280947, 0.212366, 0.513669, 0.379096, 0.482158, 0.558209, 0.866087, 0.994374, 0.082474, 0.553292, 0.125581, 0.293149, 0.416828, 0.753735, 0.318881, 0.288113, 0.436025, 0.177524, 0.0651407, 0.162611, 0.302722, 0.808142, 0.387352, 0.928794, 0.484503, 0.970821, 0.993738, 0.985168, 0.882121, 0.333048, 0.7672, 0.554213, 0.975204, 0.00498456, 0.0589748, 0.567172, 0.797277, 0.22337, 0.976087, 0.569992, 0.968481, 0.931772, 0.534102, 0.487766, 0.158958, 0.382199, 0.640329, 0.88933, 0.258885, 0.716616, 0.388569, 0.622888, 0.518576, 0.312578, 0.877588, 0.77198, 0.253978, 0.450153, 0.0842559, 0.766154, 0.693676, 0.743777, 0.229778, 0.148903, 0.550572, 0.500557, 0.624845, 0.669729, 0.619281, 0.301052, 0.931496, 0.926573, 0.355867, 0.821787, 0.562358, 0.0748949, 0.927636, 0.840228, 0.0752844, 0.282079, 0.359595, 0.837961, 0.496845, 0.116626, 0.721654, 0.760899, 0.98996, 0.185363, 0.227649, 0.369601, 0.314955, 0.101791, 0.921601, 0.316116, 0.910024, 0.173297, 0.137118, 0.454023, 0.905088, 0.602478, 0.57654, 0.341757, 0.571889, 0.438844, 0.618136, 0.466023, 0.15874, 0.209441, 0.789919, 0.844397, 0.619178, 0.768327, 0.666072, 0.93239, 0.276612, 0.784224, 0.0826931, 0.0291408, 0.743507, 0.349243, 0.901697, 0.433116, 0.788764, 0.972337, 0.601347, 0.370131, 0.777478, 0.648265, 0.639736, 0.554009, 0.86256, 0.540843, 0.147768, 0.705168, 0.0272573, 0.66463, 0.939175, 0.940183, 0.644806, 0.941683, 0.98579, 0.4893, 0.424486, 0.0741436, 0.865929, 0.848053, 0.377642, 0.372867, 0.832198, 0.085784, 0.449933, 0.10641, 0.363953, 0.713693, 0.483051, 0.914744, 0.237893, 0.929199, 0.704769, 0.517149, 0.632056, 0.356342, 0.749495, 0.736436, 0.64935, 0.943998, 0.47263, 0.335078, 0.306414, 0.67485, 0.211166, 0.970188, 0.5774, 0.361373, 0.703422, 0.0155961, 0.132124, 0.744184, 0.843494, 0.427285, 0.049488, 0.556826, 0.487275, 0.99587, 0.125392, 0.0107001, 0.836028, 0.313925, 0.132863, 0.916593, 0.181805, 0.354308, 0.562205, 0.361136, 0.487999, 0.488793, 0.8746, 0.604234, 0.325402, 0.838701, 0.4072, 0.692251, 0.0839244, 0.151218, 0.666377, 0.333471, 0.489228, 0.288571, 0.0924642, 0.267739, 0.127873, 0.076535, 0.583941, 0.501515, 0.148763, 0.454595, 0.308205, 0.563837, 0.123453, 0.152495, 0.932308, 0.627603, 0.877615, 0.794499, 0.60423, 0.640577, 0.404485, 0.439997, 0.181546, 0.492963, 0.807206, 0.29167, 0.846094, 0.458601, 0.813685, 0.46568, 0.626272, 0.848856, 0.633707, 0.498661, 0.675078, 0.620549, 0.456073, 0.729302, 0.560664, 0.366745, 0.0020874, 0.970074, 0.125042, 0.264685, 0.132465, 0.112823, 0.988554, 0.722156, 0.0396353, 0.865664, 0.484429, 0.548686, 0.59103, 0.582476, 0.734894, 0.889404, 0.285218, 0.956761, 0.541518, 0.0898954, 0.227648, 0.442708, 0.3522, 0.893348, 0.120664, 0.882216, 0.378029, 0.401639, 0.450792, 0.268672, 0.52879, 0.435971, 0.211729, 0.526568, 0.99103, 0.904192, 0.11201, 0.513164, 0.656673, 0.0417688, 0.612218, 0.442739, 0.11838, 0.748262, 0.0553697, 0.870026, 0.552763, 0.751894, 0.0466498, 0.376235, 0.19962, 0.138471, 0.583898, 0.708559, 0.143441, 0.550168, 0.972127, 0.659844, 0.84563, 0.764332, 0.851975, 0.7284, 0.0798467, 0.136125, 0.246906, 0.286265, 0.667871, 0.393268, 0.484624, 0.482572, 0.458289, 0.479928, 0.798781, 0.59654, 0.597004, 0.966279, 0.548111, 0.901875, 0.0518197, 0.828546, 0.428622, 0.709387, 0.432428, 0.379198, 0.162804, 0.0395384, 0.51822, 0.121117, 0.306746, 0.0220598, 0.556756, 0.907205, 0.274725, 0.414212, 0.646132, 0.879867, 0.208859, 0.622705, 0.584889, 0.579855, 0.84896, 0.865105, 0.622523, 0.128977, 0.344812, 0.826247, 0.190077, 0.687314, 0.112438, 0.633044, 0.0851668, 0.312351, 0.0345673, 0.337747, 0.863044, 0.768314, 0.127261, 0.817728, 0.498857, 0.850444, 0.0438929, 0.344282, 0.59962, 0.48862, 0.843066, 0.0400887, 0.344755, 0.755413, 0.71701, 0.843423, 0.859438, 0.384985, 0.221923, 0.795133, 0.868294, 0.15968, 0.15446, 0.161986, 0.520422, 0.82244, 0.141847, 0.0142685, 0.231809, 0.166331, 0.296579, 0.519988, 0.898191, 0.986998, 0.581005, 0.764441, 0.246703, 0.962021, 0.305047, 0.385845, 0.558912, 0.734187, 0.486664, 0.727426, 0.645535, 0.77368, 0.620122, 0.549884, 0.151009, 0.719495, 0.198852, 0.663813, 0.0212, 0.590674, 0.339498, 0.910916, 0.67548, 0.704248, 0.10258, 0.378093, 0.504135, 0.731606, 0.37598, 0.524782, 0.305907, 0.840336, 0.0435258, 0.517255, 0.820124, 0.455485, 0.382279, 0.258978, 0.0118907, 0.574468, 0.000370391, 0.535995, 0.0437597, 0.494886, 0.529831, 0.371977, 0.878498, 0.592735, 0.579801, 0.213448, 0.136356, 0.502449, 0.421104, 0.0246227, 0.138632, 0.166079, 0.903021, 0.208556, 0.0275364, 0.59723, 0.572199, 0.612322, 0.0801436, 0.755741, 0.635786, 0.748222, 0.844535, 0.854072, 0.545856, 0.981599, 0.259136, 0.815326, 0.682794, 0.225271, 0.278167, 0.293423, 0.032971, 0.898458, 0.207444, 0.591989, 0.0866576, 0.137149, 0.56238, 0.566976, 0.805517, 0.0404672, 0.472306, 0.0942821, 0.972937, 0.467296, 0.270338, 0.710223, 0.244662, 0.664424, 0.592779, 0.305683, 0.242862, 0.706691, 0.916735, 0.306694, 0.768377, 0.868677, 0.908373, 0.768993, 0.927949, 0.468041, 0.460656, 0.400875, 0.434938, 0.290367, 0.232434, 0.357157, 0.0128459, 0.437152, 0.466177, 0.657715, 0.727624, 0.972282, 0.733378, 0.856529, 0.633336, 0.816966, 0.717271, 0.707479, 0.157193, 0.104709, 0.663118, 0.354479, 0.298013, 0.978317, 0.359396, 0.0353948, 0.935718, 0.491857, 0.268235, 0.777505, 0.228963, 0.541129, 0.393863, 0.55355, 0.269054, 0.14284, 0.766364, 0.968131, 0.948895, 0.106029, 0.4013, 0.321956, 0.410192, 0.578325, 0.692227, 0.727088, 0.493178, 0.583339, 0.79957, 0.483175, 0.369821, 0.685034, 0.481991, 0.172286, 0.589553, 0.816481, 0.517605, 0.878193, 0.710538, 0.899827, 0.268533, 0.584388, 0.76881, 0.526527, 0.896592, 0.274417, 0.46026, 0.795278, 0.807539, 0.968379, 0.234594, 0.0148252, 0.29242, 0.31012, 0.966512, 0.511125, 0.892265, 0.545913, 0.48244, 0.647221, 0.645802, 0.0762699, 0.41632, 0.274838, 0.434929, 0.950228, 0.544201, 0.954981, 0.0945208, 0.970956, 0.122549, 0.870073, 0.344364, 0.867787, 0.418752, 0.762004, 0.580309, 0.0475792, 0.867132, 0.830557, 0.409413, 0.670011, 0.593376, 0.289237, 0.269791, 0.178829, 0.505401, 0.565177, 0.753664, 0.318758, 0.258873, 0.825573, 0.0131477, 0.0329788, 0.191514, 0.406331, 0.55217, 0.911449, 0.42773, 0.912159, 0.743448, 0.329714, 0.174694, 0.599153, 0.725145, 0.489082, 0.122147, 0.922033, 0.0334803, 0.875512, 0.735841, 0.396567, 0.666394, 0.5934, 0.0900951, 0.546536, 0.132565, 0.030356, 0.108104, 0.318712, 0.905341, 0.561999, 0.00227073, 0.611036, 0.446425, 0.642674, 0.791669, 0.745705, 0.627445, 0.711358, 0.300592, 0.191491, 0.0734605, 0.926486, 0.971534, 0.216448, 0.358948, 0.797552, 0.688223, 0.902068, 0.327787, 0.538522, 0.0104212, 0.861041, 0.230189, 0.724042, 0.7413, 0.238936, 0.979917, 0.842628, 0.00761529, 0.276444, 0.728593, 0.721653, 0.466402, 0.493275, 0.850108, 0.957943, 0.0201826, 0.285425, 0.46538, 0.889115, 0.934114, 0.323059, 0.0640352, 0.412287, 0.069839, 0.815932, 0.535348, 0.618055, 0.282584, 0.645283, 0.256193, 0.831606, 0.61696, 0.478114, 0.330771, 0.0245916, 0.148101, 0.221962, 0.0699719, 0.418363, 0.136999, 0.272119, 0.332734, 0.323022, 0.651946, 0.230852, 0.57896, 0.445126, 0.241903, 0.782573, 0.212796, 0.912807, 0.713923, 0.184787, 0.17394, 0.45225, 0.0527854, 0.956684, 0.913021, 0.918269, 0.329428, 0.178272, 0.996181, 0.147719, 0.620964, 0.977922, 0.221524, 0.909216, 0.762584, 0.15674, 0.877335, 0.853397, 0.0209881, 0.00119366, 0.00917235, 0.837708, 0.98003, 0.749812, 0.238334, 0.290615, 0.765851, 0.827644, 0.0230157, 0.488133, 0.716798, 0.630641, 0.375724, 0.932843, 0.582879, 0.526096, 0.63768, 0.960393, 0.0611636, 0.287294, 0.339218, 0.255489, 0.160588, 0.361207, 0.864941, 0.117415, 0.914865, 0.925241, 0.934864, 0.363426, 0.06626, 0.883645, 0.548197, 0.988083, 0.687007, 0.922413, 0.416053, 0.584185, 0.454536, 0.966501, 0.468235, 0.473619, 0.564859, 0.781558, 0.292176, 0.913656, 0.234248, 0.390464, 0.223829, 0.936763, 0.222495, 0.00311183, 0.23431, 0.67509, 0.881006, 0.5757, 0.501437, 0.403494, 0.746247, 0.827228, 0.798622, 0.0268835, 0.620371, 0.315765, 0.256495, 0.232294, 0.524494, 0.701232, 0.966621, 0.399158, 0.877522, 0.50706, 0.848481, 0.571029, 0.844272, 0.269454, 0.264118, 0.566283, 0.584814, 0.798329, 0.806105, 0.693213, 0.652205, 0.523262, 0.666253, 0.102989, 0.989356, 0.618731, 0.535509, 0.736631, 0.433495, 0.15008, 0.0706011, 0.873053, 0.0523965, 0.918687, 0.393388, 0.172067, 0.930569, 0.0978199, 0.447101, 0.523367, 0.609299, 0.0901271, 0.890782, 0.307543, 0.945913, 0.997703, 0.108629, 0.348304, 0.348333, 0.00204869, 0.122618, 0.0938569, 0.794874, 0.305123, 0.364302, 0.728145, 0.274895, 0.00512434, 0.417486, 0.870662, 0.616409, 0.0514523, 0.394643, 0.0589114, 0.952772, 0.82066, 0.58701, 0.630937, 0.0806043, 0.153536, 0.753016, 0.673523, 0.116033, 0.795464, 0.774427, 0.161471, 0.514159, 0.352222, 0.175217, 0.947267, 0.238156, 0.696229, 0.673384, 0.800644, 0.805077, 0.833149, 0.910582, 0.903417, 0.719841, 0.535921, 0.321334, 0.731686, 0.165966, 0.29207, 0.0382712, 0.724159, 0.444494, 0.228684, 0.850965, 0.239151, 0.0766897, 0.939425, 0.355534, 0.732126, 0.647372, 0.0665641, 0.237258, 0.567454, 0.990579, 0.00466232, 0.404217, 0.99872, 0.846181, 0.708998, 0.264811, 0.137258, 0.390717, 0.646865, 0.578728, 0.940499, 0.398576, 0.791966, 0.847309, 0.910536, 0.305601, 0.223202, 0.671371, 0.575081, 0.418981, 0.718384, 0.785501, 0.714349, 0.993204, 0.220676, 0.0463192, 0.436105, 0.969991, 0.788946, 0.839453, 0.205625, 0.765504, 0.549265, 0.952862, 0.315553, 0.708387, 0.278794, 0.53273, 0.396267, 0.639137, 0.417389, 0.536962, 0.778538, 0.0704751, 0.209171, 0.421222, 0.329966, 0.958506, 0.362686, 0.781173, 0.36151, 0.992726, 0.739856, 0.404086, 0.955045, 0.335428, 0.317527, 0.033478, 0.266236, 0.329765, 0.898863, 0.892905, 0.548673, 0.974924, 0.222934, 0.300501, 0.803623, 0.657457, 0.0926151, 0.893657, 0.863848, 0.317299, 0.368237, 0.404105, 0.856653, 0.459946, 0.417223, 0.164491, 0.517928, 0.249446, 0.855886, 0.395833, 0.111271, 0.972095, 0.867095, 0.791977, 0.850217, 0.281371, 0.311965, 0.16241, 0.137629, 0.636136, 0.0583402, 0.614807, 0.164026, 0.869045, 0.278282, 0.215324, 0.506442, 0.530095, 0.164135, 0.813471, 0.420058, 0.375853, 0.688981, 0.976384, 0.179628, 0.620888, 0.952071, 0.17401, 0.682865, 0.607785, 0.472553, 0.0930836, 0.321095, 0.275772, 0.484247, 0.606148, 0.246217, 0.6663, 0.0347261, 0.38875, 0.963596, 0.570533, 0.0792511, 0.639529, 0.918089, 0.34228, 0.0651234, 0.847136, 0.048013, 0.0773289, 0.118597, 0.557397, 0.92688, 0.502407, 0.205696, 0.871466, 0.145665, 0.948064, 0.527651, 0.309527, 0.525342, 0.848226, 0.48169, 0.287602, 0.911198, 0.816496, 0.954854, 0.958746, 0.830337, 0.0193145, 0.234588, 0.708238, 0.96585, 0.281358, 0.748971, 0.846833, 0.150531, 0.149437, 0.59636, 0.132067, 0.684251, 0.00824953, 0.470071, 0.646633, 0.834605, 0.702461, 0.699978, 0.0397383, 0.0385084, 0.408672, 0.7886, 0.704932, 0.905495, 0.949505, 0.0158733, 0.0311443, 0.904186, 0.725075, 0.96919, 0.0891038, 0.721771, 0.812275, 0.0929913, 0.752498, 0.702842, 0.617272, 0.00415427, 0.195183, 0.0923236, 0.584529, 0.100009, 0.965539, 0.853122, 0.186088, 0.354098, 0.545862, 0.593046, 0.0472148, 0.431368, 0.951433, 0.350727, 0.128205, 0.011742, 0.15187, 0.379414, 0.370228, 0.704354, 0.446088, 0.390218, 0.0558316, 0.505985, 0.522278, 0.330331, 0.879995, 0.983962, 0.702965, 0.839292, 0.644988, 0.497466, 0.137148, 0.36727, 0.241909, 0.0717726, 0.163127, 0.356664, 0.945429, 0.525523, 0.407935, 0.895344, 0.271098, 0.564412, 0.279167, 0.332507, 0.0262576, 0.356067, 0.519654, 0.562258, 0.0140433, 0.775724, 0.14861, 0.501813, 0.644077, 0.507464, 0.039158, 0.227907, 0.564984, 0.0942694, 0.99136, 0.421523, 0.199493, 0.414941, 0.758915, 0.471849, 0.540802, 0.540249, 0.044356, 0.458094, 0.419721, 0.414693, 0.491922, 0.598583, 0.436652, 0.0778936, 0.871026, 0.18228, 0.703523, 0.95991, 0.736018, 0.884932, 0.372552, 0.633964, 0.505102, 0.701651, 0.120149, 0.705858, 0.846772, 0.356507, 0.453378, 0.488071, 0.455906, 0.818895, 0.768284, 0.749975, 0.424067, 0.763081, 0.4972, 0.525275, 0.436984, 0.720872, 0.841366, 0.603379, 0.203636, 0.247814, 0.0345843, 0.233446, 0.437879, 0.494484, 0.442268, 0.172725, 0.446337, 0.0668237, 0.622904, 0.650961, 0.235228, 0.578706, 0.00703805, 0.751831, 0.622382, 0.605212, 0.168761, 0.282641, 0.946553, 0.629277, 0.576287, 0.863389, 0.618427, 0.366435, 0.879814, 0.281041, 0.514958, 0.472468, 0.972822, 0.683152, 0.381074, 0.363323, 0.4224, 0.196619, 0.789101, 0.578645, 0.725778, 0.408127, 0.853077, 0.702061, 0.376187, 0.0197272, 0.72053, 0.0561716, 0.869047, 0.167927, 0.840342, 0.280166, 0.719879, 0.0903317, 0.970616, 0.716263, 0.962585, 0.967102, 0.478998, 0.400097, 0.0836012, 0.396025, 0.76018, 0.588775, 0.619031, 0.946067, 0.725609, 0.689145, 0.590953, 0.00765291, 0.828622, 0.00518687, 0.580943, 0.485026, 0.625951, 0.478734, 0.160152, 0.694386, 0.651113, 0.88445, 0.136163, 0.515494, 0.0715804, 0.999748, 0.0278632, 0.997742, 0.137615, 0.912324, 0.548444, 0.107588, 0.261158, 0.177504, 0.656161, 0.494523, 0.654332, 0.553197, 0.0268269, 0.429406, 0.4434, 0.530819, 0.951542, 0.86911, 0.75036, 0.423428, 0.35158, 0.0924659, 0.333847, 0.841109, 0.198169, 0.881253, 0.760105, 0.780111, 0.55177, 0.517346, 0.868177, 0.931512, 0.0381506, 0.922693, 0.988944, 0.40053, 0.821572, 0.782145, 0.443022, 0.820517, 0.00390795, 0.840286, 0.676976, 0.123703, 0.313046, 0.120498, 0.968606, 0.112176, 0.18899, 0.713064, 0.486598, 0.278596, 0.640871, 0.890861, 0.951679, 0.499786, 0.673355, 0.444727, 0.442644, 0.0730863, 0.812623, 0.493647, 0.603035, 0.376251, 0.116347, 0.130119, 0.398594, 0.123375, 0.475441, 0.179478, 0.152404, 0.984536, 0.733495, 0.460671, 0.821167, 0.326116, 0.373722, 0.00906352, 0.470281, 0.654762, 0.14537, 0.475416, 0.670019, 0.485323, 0.768884, 0.143904, 0.119491, 0.928004, 0.545244, 0.73309, 0.343575, 0.269428, 0.834233, 0.100279, 0.629533, 0.223478, 0.450849, 0.577506, 0.188136, 0.128219, 0.705481, 0.606171, 0.832358, 0.931524, 0.361377, 0.0558561, 0.361013, 0.732288, 0.730804, 0.219975, 0.413246, 0.621713, 0.940006, 0.570548, 0.0191787, 0.347977, 0.788357, 0.725187, 0.429053, 0.659885, 0.562865, 0.268399, 0.63032, 0.888551, 0.258158, 0.975291, 0.698359, 0.156681, 0.1022, 0.989513, 0.472163, 0.911954, 0.607903, 0.87819, 0.0606918, 0.507271, 0.921854, 0.623615, 0.915042, 0.24713, 0.543187, 0.110192, 0.451849, 0.163348, 0.462212, 0.841569, 0.0780443, 0.116758, 0.467247, 0.673064, 0.379408, 0.292629, 0.436334, 0.442518, 0.819146, 0.631085, 0.616979, 0.0483111, 0.4686, 0.37471, 0.830827, 0.619665, 0.795623, 0.102122, 0.489741, 0.0455496, 0.683585, 0.494164, 0.745055, 0.398608, 0.295058, 0.371217, 0.872504, 0.480796, 0.0173388, 0.219129, 0.980498, 0.0183539, 0.913636, 0.790602, 0.370361, 0.681016, 0.981706, 0.305975, 0.545098, 0.373718, 0.213687, 0.217866, 0.395677, 0.948431, 0.00718711, 0.457265, 0.320963, 0.522622, 0.519726, 0.717045, 0.415632, 0.701428, 0.152629, 0.895539, 0.947062, 0.656078, 0.127093, 0.542125, 0.635967, 0.835364, 0.0349438, 0.792948, 0.960612, 0.379756, 0.95115, 0.873927, 0.655596, 0.148976, 0.941628, 0.380821, 0.00826947, 0.409044, 0.657992, 0.0977822, 0.829968, 0.00873706, 0.848394, 0.265941, 0.382801, 0.977807, 0.101359, 0.965118, 0.448092, 0.655582, 0.952793, 0.868007, 0.221387, 0.0432116, 0.85909, 0.260964, 0.497213, 0.359838, 0.146938, 0.499124, 0.445027, 0.956239, 0.588245, 0.147695, 0.84798, 0.693045, 0.268923, 0.173119, 0.83999, 0.617523, 0.0598578, 0.34214, 0.13044, 0.152647, 0.56152, 0.413558, 0.617507, 0.812179, 0.822745, 0.2821, 0.143157, 0.454069, 0.765582, 0.366272, 0.78521, 0.903761, 0.719538, 0.262567, 0.483337, 0.0533463, 0.973436, 0.192161, 0.224715, 0.988409, 0.873201, 0.936524, 0.487589, 0.786111, 0.442394, 0.699116, 0.731332, 0.127783, 0.380516, 0.983647, 0.0366471, 0.812196, 0.815976, 0.314281, 0.0170025, 0.849733, 0.271804, 0.125294, 0.247795, 0.591157, 0.640856, 0.659722, 0.39571, 0.494928, 0.231015, 0.354281, 0.368856, 0.348567, 0.697968, 0.457834, 0.153735, 0.741447, 0.161638, 0.781368, 0.832466, 0.670395, 0.818395, 0.805167, 0.320835, 0.18359, 0.0279467, 0.511568, 0.585307, 0.460593, 0.881784, 0.0247689, 0.69081, 0.160754, 0.10876, 0.141596, 0.298767, 0.684746, 0.386982, 0.603978, 0.0342262, 0.192914, 0.535633, 0.689069, 0.574535, 0.382116, 0.541315, 0.60476, 0.0825558, 0.887844, 0.527358, 0.292189, 0.360623, 0.459004, 0.100286, 0.923335, 0.259623, 0.0195067, 0.076642, 0.794031, 0.438567, 0.738351, 0.0244942, 0.133518, 0.575819, 0.148434, 0.0901353, 0.685618, 0.644974, 0.427246, 0.391579, 0.824913, 0.828225, 0.260032, 0.938803, 0.77495, 0.230349, 0.478557, 0.411945, 0.970463, 0.458079, 0.220708, 0.471714, 0.531153, 0.424397, 0.737736, 0.885419, 0.194601, 0.11476, 0.68696, 0.0892846, 0.839981, 0.583536, 0.436056, 0.382743, 0.311323, 0.319753, 0.58397, 0.0815436, 0.380069, 0.18136, 0.267518, 0.830066, 0.0584171, 0.139266, 0.273065, 0.171726, 0.00458768, 0.222857, 0.925623, 0.900361, 0.51574, 0.596751, 0.813189, 0.197677, 0.0567514, 0.710202, 0.824086, 0.439462, 0.0231343, 0.218383, 0.791127, 0.0592606, 0.819213, 0.293942, 0.732215, 0.524439, 0.0745607, 0.566805, 0.327065, 0.297679, 0.863037, 0.192395, 0.550365, 0.505564, 0.721567, 0.235523, 0.190249, 0.704856, 0.610154, 0.63136, 0.50305, 0.0138194, 0.850425, 0.267358, 0.88102, 0.618658, 0.333025, 0.714303, 0.85966, 0.278003, 0.948647, 0.274045, 0.548061, 0.432696, 0.238113, 0.434919, 0.838559, 0.332847, 0.466082, 0.0999418, 0.714829, 0.23849, 0.189547, 0.300622, 0.378781, 0.412515, 0.085281, 0.376209, 0.130332, 0.394384, 0.513449, 0.14577, 0.878797, 0.344062, 0.0608711, 0.334307, 0.479975, 0.96941, 0.883748, 0.81579, 0.708103, 0.118562, 0.951975, 0.569436, 0.13403, 0.436409, 0.677935, 0.273193, 0.335325, 0.187648, 0.899499, 0.872155, 0.151509, 0.145164, 0.5847, 0.22331, 0.987865, 0.50343, 0.44699, 0.254158, 0.417958, 0.0958172, 0.812483, 0.61296, 0.545265, 0.717791, 0.344686, 0.970344, 0.26587, 0.27406, 0.696818, 0.659104, 0.0912778, 0.467363, 0.89569, 0.327195, 0.990539, 0.691432, 0.693244, 0.301676, 0.328546, 0.142788, 0.534478, 0.310121, 0.606257, 0.388767, 0.327255, 0.148937, 0.0259379, 0.411983, 0.26551, 0.321967, 0.306039, 0.228785, 0.706771, 0.257146, 0.530639, 0.100971, 0.490982, 0.171569, 0.690203, 0.0361563, 0.767452, 0.0241055, 0.819421, 0.184101, 0.535402, 0.620448, 0.37025, 0.911208, 0.0526985, 0.0206299, 0.58876, 0.78452, 0.492915, 0.906777, 0.186986, 0.87997, 0.917257, 0.688995, 0.343211, 0.734597, 0.22362, 0.368586, 0.0182799, 0.719629, 0.115835, 0.495449, 0.809409, 0.286502, 0.0485891, 0.184236, 0.439232, 0.667535, 0.00833676, 0.059481, 0.967009, 0.0198153, 0.264556, 0.00745546, 0.799353, 0.466681, 0.0255029, 0.678498, 0.910946, 0.324607, 0.464767, 0.552058, 0.18444, 0.0361328, 0.0227582, 0.0674545, 0.368733, 0.400464, 0.906985, 0.271797, 0.559386, 0.533035, 0.294351, 0.318373, 0.653618, 0.57181, 0.198214, 0.603756, 0.390974, 0.524731, 0.520782, 0.935463, 0.322266, 0.292861, 0.682226, 0.797106, 0.657701, 0.620764, 0.935634, 0.207937, 0.150121, 0.77422, 0.498341, 0.854207, 0.73603, 0.618928, 0.857614, 0.615123, 0.436601, 0.999654, 0.0346577, 0.382397, 0.23578, 0.159694, 0.802898, 0.849086, 0.173601, 0.566291, 0.503663, 0.000907791, 0.432908, 0.53626, 0.635057, 0.628095, 0.540216, 0.441974, 0.143542, 0.504998, 0.377366, 0.0626754, 0.982903, 0.132925, 0.0336424, 0.312857, 0.968753, 0.114558, 0.608768, 0.809536, 0.935645, 0.43272, 0.98957, 0.988015, 0.274683, 0.436491, 0.837444, 0.63347, 0.823653, 0.736343, 0.373134, 0.330846, 0.840348, 0.774259, 0.830891, 0.0710832, 0.795044, 0.0787441, 0.11598, 0.881855, 0.046912, 0.575393, 0.296416, 0.690396, 0.234348, 0.75564, 0.422779, 0.00751931, 0.495845, 0.840722, 0.742031, 0.378829, 0.0470899, 0.453174, 0.0802341, 0.469482, 0.574094, 0.592449, 0.765529, 0.053402, 0.0650745, 0.607268, 0.181792, 0.0122882, 0.412182, 0.403988, 0.50623, 0.206251, 0.594131, 0.00580781, 0.354479, 0.425606, 0.641162, 0.139522, 0.965019, 0.464224, 0.661, 0.400405, 0.244725, 0.210046, 0.760081, 0.366116, 0.941694, 0.457962, 0.452597, 0.00660253, 0.491631, 0.953025, 0.870781, 0.658575, 0.877662, 0.565561, 0.290575, 0.575348, 0.733281, 0.628795, 0.978421, 0.560043, 0.490786, 0.884137, 0.2642, 0.32807, 0.352803, 0.246648, 0.354934, 0.747577, 0.538506, 0.736983, 0.365859, 0.902217, 0.925401, 0.12419, 0.234035, 0.85746, 0.627068, 0.297742, 0.571606, 0.113314, 0.211255, 0.796477, 0.695486, 0.198958, 0.00534874, 0.838085, 0.717911, 0.843605, 0.90015, 0.196225, 0.847855, 0.970182, 0.0179115, 0.920947, 0.0310982, 0.0923537, 0.37261, 0.248712, 0.331397, 0.765657, 0.107262, 0.991801, 0.267758, 0.602164, 0.994922, 0.96184, 0.908249, 0.0253487, 0.468285, 0.225145, 0.507848, 0.231158, 0.826418, 0.487689, 0.431842, 0.216524, 0.873633, 0.6352, 0.332248, 0.176249, 0.771305, 0.764552, 0.585991, 0.137447, 0.640685, 0.550652, 0.652009, 0.0314484, 0.789505, 0.512971, 0.0248802, 0.933123, 0.063926, 0.875311, 0.905833, 0.163972, 0.451923, 0.0538253, 0.454843, 0.306446, 0.676538, 0.456339, 0.855465, 0.539155, 0.598855, 0.0534005, 0.597041, 0.535691, 0.206426, 0.877376, 0.476916, 0.980647, 0.79618, 0.0319222, 0.758395, 0.732991, 0.823359, 0.246999, 0.00202494, 0.960105, 0.695987, 0.708624, 0.90967, 0.823499, 0.580391, 0.657391, 0.72101, 0.0825454, 0.537206, 0.966665, 0.615661, 0.675438, 0.573968, 0.258932, 0.0796575, 0.214016, 0.720248, 0.205112, 0.932885, 0.350038, 0.78394, 0.674189, 0.712033, 0.0894086, 0.166538, 0.68876, 0.855589, 0.284384, 0.0357575, 0.225892, 0.801603, 0.957358, 0.308581, 0.877395, 0.895488, 0.394516, 0.689515, 0.333916, 0.416193, 0.953052, 0.650364, 0.394527, 0.755367, 0.396756, 0.704499, 0.0453959, 0.94101, 0.368692, 0.267167, 0.888319, 0.288094, 0.957726, 0.882679, 0.668571, 0.440301, 0.128254, 0.98723, 0.859162, 0.373156, 0.894042, 0.565849, 0.437508, 0.728492, 0.0955409, 0.250962, 0.926881, 0.48577, 0.353221, 0.98645, 0.506324, 0.0770055, 0.341273, 0.523949, 0.0977201, 0.782153, 0.97177, 0.0705393, 0.862642, 0.353276, 0.729284, 0.180151, 0.429601, 0.285683, 0.921411, 0.975469, 0.139252, 0.53892, 0.0100419, 0.785333, 0.395086, 0.282003, 0.110533, 0.502591, 0.680647, 0.899728, 0.722176, 0.608043, 0.119715, 0.361689, 0.201586, 0.752717, 0.312592, 0.689275, 0.848409, 0.248991, 0.921938, 0.554438, 0.901945, 0.827006, 0.426345, 0.576863, 0.964859, 0.806244, 0.0234316, 0.665693, 0.551361, 0.252924, 0.213853, 0.15006, 0.741355, 0.172567, 0.828515, 0.605658, 0.869355, 0.271804, 0.0344066, 0.290958, 0.976558, 0.694829, 0.14866, 0.889921, 0.543477, 0.646464, 0.439362, 0.894357, 0.867245, 0.425842, 0.82009, 0.273842, 0.43805, 0.559275, 0.202145, 0.316164, 0.214817, 0.605823, 0.101551, 0.994712, 0.241246, 0.124724, 0.778991, 0.229642, 0.432384, 0.0791583, 0.460771, 0.280549, 0.897206, 0.986457, 0.649935, 0.0944081, 0.55202, 0.368493, 0.20841, 0.320486, 0.799556, 0.136362, 0.652654, 0.913601, 0.805755, 0.731705, 0.781295, 0.731737, 0.452055, 0.117141, 0.455526, 0.802994, 0.899669, 0.589887, 0.564842, 0.281019, 0.844517, 0.0374629, 0.729554, 0.200922, 0.100941, 0.939185, 0.277081, 0.567922, 0.453395, 0.241402, 0.659322, 0.5809, 0.449243, 0.532492, 0.289835, 0.0836161, 0.157201, 0.236284, 0.0372842, 0.776088, 0.0816245, 0.260832, 0.770074, 0.112621, 0.226661, 0.845001, 0.781261, 0.680766, 0.109199, 0.453802, 0.773556, 0.270642, 0.838856, 0.495279, 0.0995587, 0.96076, 0.570524, 0.139087, 0.35498, 0.283695, 0.398509, 0.638504, 0.305007, 0.727488, 0.982274, 0.723471, 0.757266, 0.717239, 0.842543, 0.144779, 0.52728, 0.376157, 0.375466, 0.383735, 0.0684799, 0.75257, 0.241517, 0.189292, 0.510436, 0.882453, 0.395124, 0.629403, 0.71659, 0.164524, 0.856016, 0.29599, 0.00388109, 0.0455246, 0.553535, 0.31949, 0.606457, 0.314088, 0.168787, 0.866016, 0.17326, 0.985027, 0.840441, 0.162676, 0.308419, 0.17278, 0.630288, 0.286061, 0.390912, 0.565466, 0.482973, 0.590098, 0.363479, 0.858017, 0.324386, 0.0734636, 0.171853, 0.285879, 0.677045, 0.746952, 0.362523, 0.277648, 0.640389, 0.542529, 0.884883, 0.873881, 0.57068, 0.12913, 0.0727719, 0.490731, 0.193608, 0.537301, 0.174337, 0.167731, 0.251585, 0.7421, 0.695198, 0.0714191, 0.753958, 0.175875, 0.333626, 0.410325, 0.0853843, 0.0373917, 0.165545, 0.585172, 0.653036, 0.197596, 0.708798, 0.891825, 0.0530165, 0.317699, 0.0242943, 0.828267, 0.298553, 0.606522, 0.40722, 0.3028, 0.428068, 0.413235, 0.290394, 0.928764, 0.138489, 0.621233, 0.473565, 0.502569, 0.915934, 0.852129, 0.958256, 0.833769, 0.330935, 0.285965, 0.39583, 0.34437, 0.79406, 0.695471, 0.489019, 0.647326, 0.887059, 0.196765, 0.800299, 0.577649, 0.162322, 0.197486, 0.532213, 0.17609, 0.339781, 0.803967, 0.969291, 0.562249, 0.311154, 0.108128, 0.0232262, 0.254611, 0.541964, 0.302667, 0.0340515, 0.856238, 0.0921222, 0.0162616, 0.0129496, 0.643613, 0.668873, 0.949685, 0.243754, 0.160607, 0.66376, 0.100144, 0.406273, 0.901384, 0.821828, 0.449341, 0.323655, 0.659461, 0.335132, 0.713815, 0.876345, 0.860541, 0.8655, 0.118372, 0.915771, 0.264669, 0.186303, 0.987255, 0.0511815, 0.242539, 0.481547, 0.238834, 0.90557, 0.610321, 0.0153901, 0.0686374, 0.990621, 0.155924, 0.237609, 0.867834, 0.402889, 0.234611, 0.502406, 0.454898, 0.033149, 0.963711, 0.29053, 0.00613623, 0.713888, 0.83206, 0.544744, 0.912221, 0.418114, 0.104555, 0.852651, 0.239303, 0.533597, 0.0633394, 0.369369, 0.0363332, 0.738775, 0.129987, 0.989882, 0.0903476, 0.67066, 0.984484, 0.731707, 0.204108, 0.262011, 0.790277, 0.407431, 0.695728, 0.515828, 0.80544, 0.396803, 0.738432, 0.300266, 0.41094, 0.0174427, 0.792764, 0.962807, 0.19489, 0.392264, 0.0250602, 0.549187, 0.423237, 0.990243, 0.0709377, 0.279543, 0.441153, 0.829393, 0.79389, 0.342466, 0.556978, 0.723297, 0.61761, 0.474607, 0.703597, 0.314514, 0.436039, 0.0284574, 0.919066, 0.361321, 0.144575, 0.0145018, 0.0113933, 0.0174701, 0.987526, 0.630435, 0.0479874, 0.858087, 0.84096, 0.0279664, 0.991547, 0.971255, 0.693827, 0.213036, 0.983827, 0.0670754, 0.502464, 0.67391, 0.860976, 0.0579069, 0.652105, 0.516673, 0.177516, 0.788812, 0.691486, 0.243459, 0.318354, 0.642212, 0.233111, 0.16263, 0.780506, 0.277115, 0.476356, 0.927041, 0.945656, 0.757738, 0.343521, 0.0523147, 0.987188, 0.49376, 0.980823, 0.160174, 0.376507, 0.530395, 0.364825, 0.615636, 0.646437, 0.843024, 0.17215, 0.358871, 0.477917, 0.647662, 0.298575, 0.85336, 0.225717, 0.324678, 0.0683453, 0.0224925, 0.677301, 0.503955, 0.529906, 0.91943, 0.167513, 0.547852, 0.696627, 0.922141, 0.647636, 0.107806, 0.412627, 0.714917, 0.27587, 0.337171, 0.704684, 0.575313, 0.124983, 0.370637, 0.0729516, 0.532589, 0.976727, 0.819767, 0.749335, 0.26185, 0.0511744, 0.887902, 0.655393, 0.923195, 0.487177, 0.289932, 0.95037, 0.331985, 0.272447, 0.912072, 0.0122396, 0.0228211, 0.0912927, 0.543144, 0.130772, 0.67181, 0.279073, 0.28372, 0.215929, 0.517065, 0.12952, 0.393084, 0.262185, 0.249635, 0.145934, 0.807771, 0.289482, 0.200324, 0.681676, 0.971608, 0.65552, 0.748534, 0.732571, 0.670498, 0.0325363, 0.580648, 0.656321, 0.387915, 0.124873, 0.75155, 0.968077, 0.857231, 0.749537, 0.769885, 0.735312, 0.705178, 0.716916, 0.315119, 0.225334, 0.781481, 0.628665, 0.261255, 0.279811, 0.238185, 0.604197, 0.543634, 0.277021, 0.0191785, 0.718772, 0.196327, 0.372165, 0.355685, 0.62155, 0.834478, 0.651207, 0.284066, 0.210423, 0.258764, 0.91941, 0.888097, 0.59582, 0.58614, 0.491729, 0.624123, 0.103061, 0.704686, 0.45165, 0.816415, 0.334252, 0.641878, 0.237831, 0.406237, 0.727529, 0.870321, 0.337048, 0.880207, 0.285846, 0.645347, 0.797818, 0.259991, 0.939288, 0.578513, 0.781581, 0.375879, 0.879636, 0.0595484, 0.0972308, 0.85256, 0.428763, 0.741762, 0.01782, 0.870616, 0.815716, 0.442021, 0.48371, 0.892933, 0.0430317, 0.578327, 0.728407, 0.32479, 0.479309, 0.411943, 0.0872487, 0.835479, 0.483019, 0.895263, 0.783879, 0.51492, 0.795512, 0.724458, 0.917798, 0.282119, 0.729926, 0.40114, 0.985559, 0.332845, 0.746442, 0.433681, 0.0434488, 0.0174582, 0.895252, 0.0738798, 0.79687, 0.868855, 0.351842, 0.397739, 0.325166, 0.0802933, 0.535825, 0.531884, 0.844924, 0.130376, 0.803715, 0.126476, 0.192147, 0.494812, 0.237002, 0.857296, 0.309453, 0.0581357, 0.0837743, 0.639852, 0.354632, 0.459995, 0.246271, 0.262474, 0.735014, 0.143103, 0.267747, 0.0753686, 0.739178, 0.056982, 0.000799715, 0.892406, 0.0882378, 0.307848, 0.803027, 0.379611, 0.85492, 0.759238, 0.163213, 0.702486, 0.882134, 0.186, 0.313584, 0.00636078, 0.0902764, 0.279897, 0.002731, 0.0761079, 0.384237, 0.510629, 0.265343, 0.752802, 0.315032, 0.826784, 0.882114, 0.684061, 0.19909, 0.436137, 0.092918, 0.797831, 0.473867, 0.427897, 0.651829, 0.47013, 0.678715, 0.702407, 0.751334, 0.898704, 0.503614, 0.391591, 0.818978, 0.809887, 0.173134, 0.11457, 0.499702, 0.119411, 0.432502, 0.447374, 0.110724, 0.887832, 0.373947, 0.923572, 0.58658, 0.105036, 0.395687, 0.779106, 0.0784555, 0.651793, 0.605048, 0.538188, 0.660321, 0.145508, 0.648324, 0.802861, 0.702262, 0.881059, 0.0741259, 0.590445, 0.383188, 0.706176, 0.325079, 0.0229505, 0.592123, 0.184454, 0.0730534, 0.185963, 0.558702, 0.375218, 0.213493, 0.738839, 0.729522, 0.775799, 0.368516, 0.408319, 0.816075, 0.444964, 0.700222, 0.969873, 0.507899, 0.395838, 0.572048, 0.691337, 0.0971932, 0.75407, 0.397584, 0.545701, 0.517708, 0.0109691, 0.24587, 0.163002, 0.247222, 0.602707, 0.274294, 0.220517, 0.687453, 0.625314, 0.354336, 0.0765008, 0.63092, 0.0958511, 0.992166, 0.890581, 0.145185, 0.692677, 0.461927, 0.833639, 0.289073, 0.419842, 0.583822, 0.615317, 0.443634, 0.756505, 0.576425, 0.453629, 0.555153, 0.910312, 0.3237, 0.533967, 0.513721, 0.0136397, 0.684138, 0.297949, 0.787737, 0.763512, 0.764406, 0.236147, 0.821422, 0.0213322, 0.666233, 0.849554, 0.11414, 0.192536, 0.285084, 0.653564, 0.434286, 0.786304, 0.528907, 0.761106, 0.0746288, 0.239765, 0.514716, 0.902441, 0.607569, 0.112535, 0.584006, 0.745105, 0.4699, 0.368743, 0.080726, 0.255656, 0.869604, 0.845947, 0.415419, 0.304072, 0.911197, 0.884525, 0.510308, 0.0487977, 0.0136579, 0.289214, 0.584306, 0.22658, 0.586476, 0.46972, 0.498366, 0.0254632, 0.302498, 0.0300933, 0.951936, 0.588805, 0.687054, 0.612769, 0.238319, 0.920395, 0.820834, 0.359552, 0.772723, 0.179063, 0.129578, 0.686681, 0.433922, 0.272652, 0.870841, 0.185005, 0.169098, 0.809957, 0.737317, 0.978393, 0.75442, 0.326429, 0.574537, 0.731576, 0.427291, 0.0750111, 0.457059, 0.365844, 0.982787, 0.713434, 0.626172, 0.943071, 0.0118104, 0.65924, 0.858937, 0.674435, 0.824603, 0.196461, 0.47366, 0.91418, 0.262746, 0.0576108, 0.27483, 0.0246422, 0.0231248, 0.963983, 0.69429, 0.817413, 0.260235, 0.198479, 0.673689, 0.437309, 0.387193, 0.425213, 0.181452, 0.290977, 0.316833, 0.998587, 0.931413, 0.601316, 0.278275, 0.456332, 0.895515, 0.50595, 0.147378, 0.719282, 0.00545705, 0.799354, 0.371468, 0.175213, 0.00978514, 0.648104, 0.140326, 0.558741, 0.850327, 0.530164, 0.424834, 0.132659, 0.627704, 0.105294, 0.319888, 0.500085, 0.170711, 0.538634, 0.372466, 0.946422, 0.705489, 0.870202, 0.789601, 0.169897, 0.182741, 0.391257, 0.79137, 0.926907, 0.13628, 0.704615, 0.283963, 0.434937, 0.629526, 0.0748007, 0.295045, 0.28707, 0.34446, 0.0169352, 0.554462, 0.528898, 0.34901, 0.736192, 0.718325, 0.718091, 0.726318, 0.436983, 0.573098, 0.725244, 0.921197, 0.518262, 0.827842, 0.0278995, 0.510658, 0.957951, 0.922278, 0.0271964, 0.11938, 0.248752, 0.0871802, 0.164434, 0.76044, 0.47576, 0.309438, 0.596115, 0.636404, 0.862279, 0.359065, 0.961984, 0.153274, 0.856665, 0.428014, 0.0413247, 0.246481, 0.725885, 0.866403, 0.91884, 0.000633561, 0.287143, 0.561318, 0.00102842, 0.603613, 0.787054, 0.938665, 0.270061, 0.452229, 0.159402, 0.112165, 0.145411, 0.250973, 0.0143143, 0.919613, 0.010381, 0.365027, 0.865447, 0.723604, 0.165974, 0.845208, 0.666214, 0.339759, 0.945371, 0.351369, 0.374229, 0.796259, 0.630932, 0.0399321, 0.868337, 0.771841, 0.773753, 0.360378, 0.795727, 0.907178, 0.549629, 0.202363, 0.93144, 0.957635, 0.829509, 0.596325, 0.0036505, 0.791142, 0.673159, 0.950541, 0.0915305, 0.315443, 0.990222, 0.885324, 0.460653, 0.857475, 0.696758, 0.502075, 0.229629, 0.137468, 0.563552, 0.844787, 0.639077, 0.121934, 0.0153792, 0.118298, 0.768082, 0.181479, 0.966914, 0.518583, 0.607825, 0.868244, 0.0226285, 0.125735, 0.305763, 0.57073, 0.0768162, 0.0807106, 0.969461, 0.218329, 0.938805, 0.299221, 0.371175, 0.51509, 0.524831, 0.231765, 0.583423, 0.782201, 0.452083, 0.568586, 0.859447, 0.549287, 0.540983, 0.328377, 0.397845, 0.650473, 0.940715, 0.297151, 0.384308, 0.00248369, 0.830295, 0.329032, 0.438871, 0.324094, 0.936792, 0.967745, 0.980783, 0.531688, 0.132704, 0.222562, 0.0313415, 0.918055, 0.17073, 0.95063, 0.0851412, 0.680607, 0.410405, 0.340138, 0.805791, 0.781496, 0.812348, 0.804547, 0.427944, 0.351167, 0.22775, 0.423274, 0.145577, 0.794884, 0.958136, 0.0637143, 0.698572, 0.539314, 0.551179, 0.609488, 0.925665, 0.614639, 0.637752, 0.663022, 0.241247, 0.0815666, 0.42624, 0.402996, 0.501672, 0.75054, 0.970453, 0.55942, 0.961818, 0.127586, 0.195195, 0.432428, 0.642632, 0.280214, 0.381199, 0.222965, 0.04361, 0.231427, 0.750528, 0.592084, 0.178209, 0.427117, 0.432842, 0.360393, 0.564562, 0.837777, 0.739208, 0.66282, 0.57374, 0.349421, 0.300318, 0.952045, 0.214642, 0.608816, 0.276419, 0.981838, 0.924332, 0.470357, 0.784598, 0.234197, 0.386811, 0.337882, 0.22286, 0.362129, 0.044151, 0.439697, 0.376008, 0.492033, 0.110027, 0.404034, 0.92845, 0.883454, 9.20955e-06, 0.427804, 0.625631, 0.945112, 0.443768, 0.617487, 0.552308, 0.874938, 0.240181, 0.156624, 0.763623, 0.0994235, 0.313841, 0.458318, 0.617322, 0.818676, 0.760326, 0.206138, 0.600317, 0.676339, 0.913658, 0.522778, 0.13781, 0.529129, 0.676201, 0.0997124, 0.825882, 0.342245, 0.701089, 0.419201, 0.216893, 0.596375, 0.970032, 0.35936, 0.397763, 0.351533, 0.425464, 0.490964, 0.914249, 0.894091, 0.118882, 0.0726074, 0.508444, 0.662858, 0.225404, 0.373918, 0.538371, 0.352321, 0.520999, 0.620749, 0.216278, 0.185088, 0.13693, 0.904191, 0.128102, 0.444181, 0.673541, 0.824811, 0.0298134, 0.525013, 0.809707, 0.161747, 0.709675, 0.803454, 0.23123, 0.691511, 0.437803, 0.446471, 0.470057, 0.127524, 0.376906, 0.0964944, 0.215639, 0.743227, 0.840237, 0.726717, 0.492798, 0.398671, 0.816375, 0.358903, 0.856632, 0.337391, 0.300591, 0.945666, 0.613881, 0.420898, 0.815055, 0.48278, 0.542244, 0.329735, 0.485236, 0.462731, 0.052929, 0.959346, 0.439004, 0.214346, 0.978555, 0.0373361, 0.0145994, 0.971468, 0.82311, 0.599798, 0.627661, 0.945441, 0.900095, 0.21355, 0.582631, 0.168009, 0.887485, 0.483977, 0.731445, 0.868779, 0.973907, 0.617269, 0.142719, 0.493231, 0.211385, 0.959368, 0.985605, 0.1044, 0.658787, 0.268061, 0.627579, 0.726954, 0.754517, 0.854008, 0.238922, 0.765741, 0.665729, 0.354013, 0.152634, 0.373761, 0.547785, 0.225744, 0.278346, 0.182884, 0.574961, 0.491564, 0.806318, 0.670477, 0.971696, 0.637222, 0.682487, 0.0696124, 0.611466, 0.711547, 0.378591, 0.959788, 0.224027, 0.357015, 0.409039, 0.977697, 0.0639567, 0.99189, 0.344892, 0.518262, 0.133529, 0.89597, 0.708624, 0.939704, 0.362208, 0.743186, 0.407725, 0.753125, 0.33541, 0.232519, 0.586176, 0.494532, 0.154323, 0.28919, 0.218706, 0.971498, 0.769848, 0.199773, 0.199842, 0.832263, 0.298279, 0.161605, 0.830388, 0.45103, 0.449516, 0.216712, 0.406236, 0.390806, 0.527003, 0.720394, 0.965764, 0.47614, 0.491749, 0.605551, 0.920255, 0.561416, 0.850835, 0.716615, 0.577531, 0.234504, 0.196767, 0.309561, 0.0540015, 0.236115, 0.0647383, 0.94031, 0.782261, 0.193652, 0.897891, 0.822425, 0.776651, 0.961456, 0.6151, 0.325957, 0.156517, 0.385579, 0.144628, 0.100468, 0.386446, 0.665891, 0.917745, 0.440557, 0.966559, 0.906212, 0.0991053, 0.442167, 0.514038, 0.136895, 0.389679, 0.878286, 0.987036, 0.647489, 0.772652, 0.0596531, 0.137768, 0.867434, 0.0248745, 0.507673, 0.993101, 0.00956828, 0.296852, 0.897351, 0.14875, 0.323651, 0.0732329, 0.673215, 0.0435495, 0.893435, 0.282982, 0.918068, 0.409819, 0.743783, 0.104833, 0.939149, 0.609604, 0.684542, 0.796889, 0.887128, 0.0552839, 0.549993, 0.940735, 0.977522, 0.0892621, 0.276351, 0.653479, 0.117041, 0.393059, 0.0378214, 0.991785, 0.784296, 0.405971, 0.676139, 0.812947, 0.631336, 0.754924, 0.67995, 0.607849, 0.0195246, 0.758245, 0.0117788, 0.859992, 0.196167, 0.465584, 0.188559, 0.782512, 0.840836, 0.587122, 0.466824, 0.620506, 0.60335, 0.980952, 0.266406, 0.137192, 0.487811, 0.452897, 0.450012, 0.491876, 0.0906254, 0.796341, 0.589543, 0.563734, 0.0342728, 0.551774, 0.782854, 0.122009, 0.234738, 0.644906, 0.452563, 0.132356, 0.743137, 0.207254, 0.837961, 0.267638, 0.266158, 0.895388, 0.883835, 0.679257, 0.239133, 0.626719, 0.627379, 0.249918, 0.219559, 0.913258, 0.214489, 0.563058, 0.309849, 0.588892, 0.849314, 0.892735, 0.0153528, 0.825552, 0.594559, 0.887532, 0.447185, 0.862193, 0.982494, 0.420673, 0.425409, 0.245566, 0.423263, 0.973329, 0.536092, 0.0281592, 0.632002, 0.0483676, 0.695143, 0.703635, 0.940282, 0.594419, 0.135663, 0.835342, 0.0167258, 0.62401, 0.364911, 0.182246, 0.990571, 0.686539, 0.686448, 0.617893, 0.800968, 0.0837678, 0.0733075, 0.973099, 0.967344, 0.567045, 0.211758, 0.535945, 0.189859, 0.242974, 0.213595, 0.737839, 0.8066, 0.548361, 0.851537, 0.382127, 0.184277, 0.08541, 0.739887, 0.27149, 0.936032, 0.337244, 0.106314, 0.399024, 0.0629548, 0.433659, 0.999772, 0.537112, 0.345139, 0.949056, 0.238801, 0.891697, 0.505647, 0.933695, 0.265259, 0.542074, 0.815094, 0.642507, 0.998236, 0.637564, 0.550156, 0.606018, 0.873737, 0.317555, 0.730636, 0.844028, 0.0390197, 0.842812, 0.480561, 0.691286, 0.00295733, 0.919325, 0.473461, 0.784858, 0.814156, 0.0912686, 0.403945, 0.28338, 0.585214, 0.645965, 0.317993, 0.939089, 0.783608, 0.835148, 0.638605, 0.629331, 0.171287, 0.250434, 0.635279, 0.213497, 0.694413, 0.0707755, 0.983926, 0.973839, 0.349415, 0.953162, 0.200589, 0.154119, 0.771383, 0.836393, 0.706541, 0.674026, 0.1878, 0.385638, 0.912494, 0.504632, 0.0466343, 0.450686, 0.845616, 0.710788, 0.0947787, 0.479396, 0.412178, 0.37111, 0.71844, 0.564508, 0.382976, 0.258863, 0.647761, 0.734297, 0.393745, 0.315436, 0.601107, 0.817479, 0.942905, 0.0972653, 0.788756, 0.843212, 0.592457, 0.703508, 0.550176, 0.370677, 0.352558, 0.201701, 0.608106, 0.480642, 0.815508, 0.921177, 0.7606, 0.208658, 0.197253, 0.79362, 0.0647093, 0.846116, 0.12328, 0.976125, 0.849035, 0.175173, 0.103671, 0.621033, 0.541288, 0.804469, 0.565353, 0.788961, 0.70966, 0.11589, 0.172359, 0.956232, 0.00444929, 0.952664, 0.920222, 0.156244, 0.0964654, 0.58569, 0.313021, 0.0865403, 0.973574, 0.485838, 0.294507, 0.709875, 0.0764558, 0.857014, 0.672743, 0.728314, 0.185804, 0.478334, 0.284808, 0.212212, 0.75047, 0.07419, 0.878726, 0.303373, 0.157442, 0.832152, 0.358087, 0.0877035, 0.220242, 0.00752386, 0.943168, 0.0944187, 0.335775, 0.875908, 0.00341338, 0.902309, 0.637416, 0.218752, 0.331346, 0.540993, 0.805102, 0.267758, 0.0702342, 0.573947, 0.986206, 0.637667, 0.313183, 0.406677, 0.69629, 0.820912, 0.632487, 0.449504, 0.601832, 0.188476, 0.869593, 0.666638, 0.0179505, 0.687915, 0.0492094, 0.769238, 0.204836, 0.588908, 0.437742, 0.59448, 0.00915766, 0.720968, 0.828406, 0.829383, 0.0352128, 0.179024, 0.358655, 0.926073, 0.397781, 0.103781, 0.113197, 0.829407, 0.919549, 0.163854, 0.507403, 0.657788, 0.151868, 0.636554, 0.116516, 0.810623, 0.361253, 0.132407, 0.909135, 0.531187, 0.291935, 0.788969, 0.839437, 0.950706, 0.20696, 0.234068, 0.429527, 0.929159, 0.920396, 0.37527, 0.256818, 0.714332, 0.779368, 0.437543, 0.786339, 0.190171, 0.431068, 0.0201946, 0.694304, 0.250447, 0.456195, 0.390567, 0.436, 0.222168, 0.132072, 0.160953, 0.40152, 0.869772, 0.107552, 0.450487, 0.499401, 0.598804, 0.742389, 0.0348395, 0.863944, 0.605634, 0.377078, 0.814854, 0.60815, 0.157984, 0.320349, 0.582299, 0.861435, 0.485736, 0.0720602, 0.636357, 0.165244, 0.0488178, 0.559464, 0.385086, 0.237701, 0.931245, 0.307075, 0.993439, 0.578477, 0.419532, 0.302884, 0.66098, 0.00921507, 0.828726, 0.436555, 0.276156, 0.389469, 0.866946, 0.04639, 0.561253, 0.221024, 0.989742, 0.105593, 0.29141, 0.481599, 0.0652974, 0.23962, 0.700024, 0.162525, 0.616694, 0.818685, 0.928904, 0.575266, 0.994048, 0.264861, 0.399138, 0.378382, 0.784798, 0.958837, 0.966213, 0.000872461, 0.0700252, 0.261724, 0.382432, 0.726177, 0.856526, 0.55035, 0.148673, 0.010882, 0.85469, 0.490462, 0.86233, 0.919517, 0.379194, 0.484635, 0.747023, 0.29862, 0.62039, 0.391917, 0.882988, 0.129953, 0.848137, 0.570856, 0.396335, 0.742932, 0.665201, 0.708586, 0.689406, 0.785374, 0.749921, 0.141877, 0.894856, 0.906826, 0.416775, 0.391618, 0.530175, 0.385679, 0.0676853, 0.104955, 0.598074, 0.657765, 0.712316, 0.810347, 0.445948, 0.758438, 0.690593, 0.189248, 0.148497, 0.238736, 0.91429, 0.811374, 0.699704, 0.0222553, 0.462092, 0.0176483, 0.315804, 0.646342, 0.152988, 0.935727, 0.185243, 0.317748, 0.657791, 0.127194, 0.771139, 0.310894, 0.442085, 0.976524, 0.852047, 0.940219, 0.227883, 0.0737196, 0.321444, 0.715404, 0.862237, 0.623349, 0.390471, 0.716934, 0.183341, 0.684268, 0.828615, 0.44467, 0.854221, 0.303279, 0.00474047, 0.746008, 0.990403, 0.0245057, 0.892543, 0.333438, 0.645945, 0.894897, 0.537852, 0.0365033, 0.51783, 0.190039, 0.556986, 0.25483, 0.354558, 0.00457464, 0.570015, 0.30223, 0.282845, 0.252269, 0.222274, 0.630466, 0.730861, 0.470052, 0.0677504, 0.346821, 0.173333, 0.42684, 0.204224, 0.498089, 0.373997, 0.535713, 0.338757, 0.577109, 0.265868, 0.634154, 0.785207, 0.166153, 0.955728, 0.25402, 0.116213, 0.0915607, 0.865838, 0.645909, 0.593727, 0.128846, 0.0245891, 0.968786, 0.760299, 0.147346, 0.815896, 0.960407, 0.14677, 0.22147, 0.257619, 0.670644, 0.399816, 0.463644, 0.369857, 0.855528, 0.647237, 0.257168, 0.806647, 0.207128, 0.155278, 0.932385, 0.842062, 0.926278, 0.710952, 0.516185, 0.902378, 0.299687, 0.540293, 0.644212, 0.451105, 0.950815, 0.0739414, 0.197949, 0.351032, 0.234098, 0.191684, 0.522057, 0.511787, 0.250952, 0.233442, 0.623991, 0.727221, 0.626169, 0.825453, 0.685542, 0.327878, 0.194404, 0.650026, 0.199135, 0.813948, 0.726281, 0.476963, 0.449556, 0.688463, 0.948903, 0.128969, 0.997454, 0.881724, 0.492254, 0.721315, 0.0634852, 0.978964, 0.313024, 0.94243, 0.175312, 0.635139, 0.272767, 0.330437, 0.567331, 0.923177, 0.606822, 0.114219, 0.227044, 0.627984, 0.146989, 0.406714, 0.550754, 0.311881, 0.155117, 0.515884, 0.54031, 0.422817, 0.619403, 0.966624, 0.117989, 0.140872, 0.924545, 0.82679, 0.48987, 0.302746, 0.931573, 0.769272, 0.596746, 0.858332, 0.99427, 0.60916, 0.92109, 0.902344, 0.322089, 0.891114, 0.861235, 0.0447001, 0.140763, 0.810269, 0.760182, 0.835655, 0.0852238, 0.0204504, 0.180674, 0.232077, 0.702038, 0.092814, 0.641924, 0.642582, 0.926926, 0.433865, 0.37269, 0.771032, 0.582382, 0.257629, 0.19077, 0.892117, 0.180281, 0.101532, 0.166997, 0.0283434, 0.468934, 0.603733, 0.109284, 0.423224, 0.392599, 0.400548, 0.931693, 0.19806, 0.427202, 0.743382, 0.622772, 0.890371, 0.169439, 0.60091, 0.436395, 0.557217, 0.870708, 0.395755, 0.994234, 0.936305, 0.80959, 0.408934, 0.636522, 0.365479, 0.266042, 0.791631, 0.824197, 0.216946, 0.229634, 0.680746, 0.781349, 0.168808, 0.841236, 0.153844, 0.980929, 0.0426859, 0.464202, 0.167075, 0.523266, 0.934005, 0.936896, 0.826264, 0.044715, 0.438219, 0.34769, 0.626727, 0.439684, 0.831946, 0.484933, 0.404827, 0.260654, 0.81202, 0.174004, 0.787851, 0.455138, 0.655473, 0.109901, 0.890659, 0.947074, 0.361157, 0.16539, 0.58113, 0.564428, 0.895702, 0.547181, 0.441325, 0.694036, 0.0577419, 0.159258, 0.737418, 0.651805, 0.128015, 0.922053, 0.445111, 0.0784964, 0.452877, 0.447184, 0.813371, 0.522148, 0.115545, 0.756981, 0.454446, 0.063932, 0.498896, 0.609644, 0.322579, 0.498747, 0.657123, 0.658618, 0.653773, 0.233941, 0.679365, 0.068694, 0.393955, 0.281882, 0.343787, 0.971124, 0.425295, 0.88358, 0.678806, 0.571204, 0.252578, 0.42372, 0.270893, 0.24403, 0.481379, 0.0432195, 0.256206, 0.658633, 0.573123, 0.765552, 0.506332, 0.301392, 0.162464, 0.533705, 0.823675, 0.111261, 0.557734, 0.784037, 0.0099777, 0.015297, 0.290589, 0.0453075, 0.78254, 0.279832, 0.389579, 0.557797, 0.93069, 0.098577, 0.161702, 0.0175863, 0.160465, 0.249734, 0.452553, 0.754312, 0.594882, 0.596358, 0.491436, 0.396948, 0.760428, 0.288846, 0.243815, 0.338206, 0.793308, 0.965915, 0.0172737, 0.445257, 0.322325, 0.772404, 0.654016, 0.80926, 0.776535, 0.304082, 0.0630144, 0.583282, 0.846366, 0.499674, 0.299911, 0.513443, 0.545478, 0.0164018, 0.725634, 0.253983, 0.561844, 0.980391, 0.735945, 0.816901, 0.974125, 0.384052, 0.813821, 0.469815, 0.948957, 0.970426, 0.470512, 0.23746, 0.229858, 0.52859, 0.826144, 0.994975, 0.0484827, 0.9569, 0.414816, 0.186097, 0.344149, 0.92083, 0.90155, 0.446289, 0.220355, 0.233272, 0.604058, 0.0654023, 0.179335, 0.0837292, 0.770125, 0.205524, 0.475131, 0.631108, 0.358362, 0.746616, 0.660189, 0.117527, 0.111248, 0.131471, 0.613953, 0.813998, 0.658308, 0.0130244, 0.277711, 0.276137, 0.76768, 0.29524, 0.0903824, 0.940259, 0.887613, 0.058253, 0.814281, 0.489991, 0.329663, 0.0230103, 0.912906, 0.668551, 0.661821, 0.877177, 0.834014, 0.0399352, 0.00101552, 0.871034, 0.14299, 0.665033, 0.894723, 0.793121, 0.291678, 0.446687, 0.970797, 0.246298, 0.542464, 0.479604, 0.851147, 0.740605, 0.312288, 0.730644, 0.754502, 0.558063, 0.992145, 0.468094, 0.222104, 0.139942, 0.689898, 0.192548, 0.688286, 0.0736191, 0.676422, 0.277903, 0.0473723, 0.328127, 0.843367, 0.22378, 0.430559, 0.30508, 0.440893, 0.782238, 0.00392183, 0.859052, 0.337715, 0.345673, 0.585426, 0.765532, 0.00776284, 0.824311, 0.455099, 0.544381, 0.462037, 0.784649, 0.859314, 0.543724, 0.127025, 0.618281, 0.669525, 0.779777, 0.296608, 0.159788, 0.113799, 0.926704, 0.793592, 0.326342, 0.970287, 0.479055, 0.556199, 0.9544, 0.248976, 0.227611, 0.731636, 0.147031, 0.883422, 0.785642, 0.849175, 0.584176, 0.95449, 0.656298, 0.920753, 0.0539454, 0.806535, 0.939187, 0.359771, 0.72007, 0.881336, 0.481058, 0.88465, 0.126115, 0.633827, 0.566964, 0.973076, 0.29133, 0.460039, 0.0727633, 0.227582, 0.215589, 0.432656, 0.675801, 0.937676, 0.0110957, 0.215127, 0.651935, 0.119513, 0.797525, 0.535864, 0.173114, 0.65133, 0.332392, 0.192153, 0.943845, 0.859665, 0.996203, 0.658336, 0.939141, 0.281987, 0.691982, 0.382253, 0.484265, 0.111423, 0.0783975, 0.209906, 0.338327, 0.696582, 0.458292, 0.921108, 0.230857, 0.469616, 0.788192, 0.605087, 0.330865, 0.202253, 0.885138, 0.398612, 0.174863, 0.344729, 0.245864, 0.0829636, 0.587539, 0.94773, 0.400121, 0.638613, 0.474506, 0.859106, 0.432006, 0.463063, 0.262733, 0.428857, 0.249494, 0.872689, 0.151902, 0.786426, 0.285359, 0.688028, 0.36568, 0.0731272, 0.586232, 0.776614, 0.0946996, 0.486384, 0.80218, 0.293806, 0.881017, 0.208664, 0.693056, 0.420064, 0.787199, 0.747993, 0.165428, 0.739872, 0.00293114, 0.267649, 0.330248, 0.202902, 0.778428, 0.535537, 0.0925189, 0.696239, 0.200978, 0.518677, 0.652183, 0.0965469, 0.0867163, 0.219582, 0.662279, 0.00280588, 0.272441, 0.00465201, 0.676683, 0.825306, 0.141091, 0.95705, 0.804377, 0.512947, 0.32589, 0.201093, 0.868994, 0.229481, 0.545692, 0.022053, 0.149268, 0.56327, 0.200395, 0.846691, 0.645707, 0.098429, 0.332268, 0.77089, 0.544161, 0.534925, 0.0404122, 0.15222, 0.00219823, 0.857041, 0.598994, 0.706285, 0.00155841, 0.660715, 0.187972, 0.639581, 0.452117, 0.491033, 0.0578531, 0.772023, 0.947644, 0.0997224, 0.00814886, 0.798098, 0.425449, 0.190257, 0.705707, 0.574332, 0.959989, 0.647748, 0.585037, 0.936321, 0.0581099, 0.237389, 0.151609, 0.996003, 0.792569, 0.0978881, 0.951228, 0.301671, 0.779645, 0.0288584, 0.037397, 0.945627, 0.304568, 0.426189, 0.4257, 0.279629, 0.819273, 0.666387, 0.630945, 0.220709, 0.404553, 0.0883133, 0.463987, 0.212745, 0.214379, 0.61238, 0.977878, 0.210322, 0.923227, 0.992948, 0.123616, 0.53986, 0.292867, 0.477867, 0.369048, 0.328313, 0.49386, 0.656201, 0.824022, 0.0282923, 0.286493, 0.711301, 0.883975, 0.455991, 0.673695, 0.818143, 0.613079, 0.0335363, 0.0680771, 0.668399, 0.503759, 0.264248, 0.903885, 0.843643, 0.0533691, 0.560798, 0.631014, 0.857126, 0.633154, 0.516262, 0.265955, 0.143606, 0.935716, 0.0741677, 0.648374, 0.720572, 0.337443, 0.384864, 0.966169, 0.501034, 0.352625, 0.597608, 0.173608, 0.29504, 0.778325, 0.966678, 0.284452, 0.169445, 0.538185, 0.919894, 0.956887, 0.964023, 0.763123, 0.709229, 0.866486, 0.448187, 0.628869, 0.0399799, 0.450391, 0.924285, 0.438688, 0.022503, 0.770717, 0.570704, 0.0638541, 0.677098, 0.944873, 0.116451, 0.917285, 0.2318, 0.336039, 0.0499738, 0.696717, 0.948234, 0.605339, 0.45729, 0.843917, 0.287549, 0.663221, 0.167949, 0.094581, 0.90674, 0.185287, 0.776953, 0.268195, 0.014379, 0.501671, 0.753098, 0.650432, 0.534088, 0.889096, 0.949667, 0.0219556, 0.214308, 0.224668, 0.154131, 0.368965, 0.122133, 0.65116, 0.197679, 0.783807, 0.270285, 0.694187, 0.95038, 0.110979, 0.242191, 0.213992, 0.13952, 0.931086, 0.226877, 0.305606, 0.979378, 0.953768, 0.241063, 0.124948, 0.105372, 0.218845, 0.637486, 0.362314, 0.857944, 0.582592, 0.502794, 0.114551, 0.237904, 0.924022, 0.864367, 0.231353, 0.44384, 0.323362, 0.76038, 0.423856, 0.452531, 0.0115742, 0.46815, 0.48013, 0.0473933, 0.438219, 0.129854, 0.706835, 0.436701, 0.400324, 0.0583846, 0.940718, 0.975561, 0.507812, 0.190554, 0.893113, 0.636696, 0.118974, 0.53061, 0.607902, 0.914011, 0.143261, 0.0857454, 0.770538, 0.0139434, 0.407664, 0.92041, 0.157959, 0.584356, 0.969034, 0.339759, 0.642837, 0.681889, 0.223098, 0.661695, 0.0705865, 0.259975, 0.00655343, 0.277794, 0.037764, 0.333258, 0.0823079, 0.552069, 0.188632, 0.85048, 0.248655, 0.508736, 0.989409, 0.676781, 0.992566, 0.856983, 0.257727, 0.182448, 0.0497544, 0.310994, 0.743109, 0.874533, 0.943384, 0.632456, 0.359589, 0.795129, 0.983523, 0.841, 0.482241, 0.61151, 0.556987, 0.246489, 0.258235, 0.87155, 0.433148, 0.694615, 0.658671, 0.984038, 0.61421, 0.33806, 0.815241, 0.539621, 0.977415, 0.82646, 0.430315, 0.633469, 0.350096, 0.630504, 0.334125, 0.919803, 0.307272, 0.337716, 0.644142, 0.444452, 0.250146, 0.497466, 0.170974, 0.993792, 0.113174, 0.905625, 0.962882, 0.473052, 0.927356, 0.874466, 0.918066, 0.0378646, 0.00216351, 0.516137, 0.92798, 0.366644, 0.979269, 0.951728, 0.939299, 0.756642, 0.696261, 0.129648, 0.937637, 0.500117, 0.688063, 0.582785, 0.161508, 0.424508, 0.866168, 0.777887, 0.613746, 0.684095, 0.547183, 0.757883, 0.926348, 0.346787, 0.786552, 0.717098, 0.845607, 0.198555, 0.745752, 0.497787, 0.19327, 0.453797, 0.438268, 0.0426972, 0.622592, 0.496309, 0.0136058, 0.229186, 0.689517, 0.333235, 0.160382, 0.793794, 0.750165, 0.19645, 0.718008, 0.125284, 0.21572, 0.589293, 0.973847, 0.0785087, 0.0198053, 0.454311, 0.368018, 0.379007, 0.0581889, 0.344199, 0.814206, 0.481824, 0.155735, 0.692866, 0.191024, 0.553743, 0.826572, 0.897576, 0.992562, 0.56565, 0.254903, 0.670156, 0.252855, 0.218925, 0.178696, 0.140874, 0.525638, 0.116564, 0.806125, 0.958895, 0.308583, 0.204709, 0.0891942, 0.561715, 0.334925, 0.45576, 0.301204, 0.830934, 0.387709, 0.591454, 0.847475, 0.19337, 0.671108, 0.739031, 0.176061, 0.301973, 0.0669643, 0.73126, 0.028597, 0.362926, 0.762928, 0.633278, 0.746555, 0.282621, 0.677689, 0.0583414, 0.444705, 0.293137, 0.854232, 0.795338, 0.665034, 0.195256, 0.234747, 0.150547, 0.58638, 0.449964, 0.901731, 0.12916, 0.167021, 0.5046, 0.749535, 0.706862, 0.221479, 0.292998, 0.151705, 0.433615, 0.087336, 0.954833, 0.473797, 0.839106, 0.572769, 0.641167, 0.78574, 0.736492, 0.448075, 0.608816, 0.743468, 0.0280638, 0.27064, 0.404634, 0.506965, 0.435358, 0.477639, 0.00992643, 0.977332, 0.14674, 0.434715, 0.47873, 0.143352, 0.543109, 0.189631, 0.707195, 0.186987, 0.426414, 0.374762, 0.961331, 0.0627827, 0.507393, 0.538008, 0.715081, 0.0486075, 0.046958, 0.1885, 0.498497, 0.590686, 0.375021, 0.0493965, 0.87902, 0.0749071, 0.559669, 0.743872, 0.23736, 0.435335, 0.161112, 0.295725, 0.070784, 0.601639, 0.373585, 0.490168, 0.60244, 0.685891, 0.788243, 0.761859, 0.43799, 0.897879, 0.557984, 0.154428, 0.141681, 0.836575, 0.27661, 0.611036, 0.341046, 0.238751, 0.0575928, 0.96499, 0.784873, 0.391325, 0.669215, 0.813261, 0.136233, 0.191541, 0.0882364, 0.264766, 0.645055, 0.344129, 0.0731303, 0.555086, 0.499143, 0.779818, 0.00924445, 0.34994, 0.309598, 0.216979, 0.247651, 0.835235, 0.615547, 0.527414, 0.797394, 0.677455, 0.675774, 0.00242763, 0.443769, 0.542442, 0.480859, 0.296827, 0.818849, 0.480649, 0.967986, 0.848345, 0.867917, 0.0943004, 0.913108, 0.619542, 0.783462, 0.512367, 0.313758, 0.0485785, 0.90419, 0.190018, 0.626179, 0.535686, 0.854101, 0.583291, 0.452416, 0.379689, 0.918079, 0.66777, 0.154128, 0.367591, 0.666413, 0.335948, 0.836923, 0.876434, 0.921662, 0.256027, 0.503321, 0.574414, 0.598992, 0.618266, 0.302425, 0.0550202, 0.391808, 0.352064, 0.566041, 0.128833, 0.563173, 0.743693, 0.593108, 0.399454, 0.674834, 0.205011, 0.875525, 0.772046, 0.765087, 0.80139, 0.530288, 0.146983, 0.791488, 0.348383, 0.530773, 0.85473, 0.686642, 0.0243471, 0.0860245, 0.318361, 0.332882, 0.986241, 0.73623, 0.694229, 0.349744, 0.274173, 0.244774, 0.974007, 0.909, 0.646432, 0.858665, 0.908377, 0.346717, 0.721945, 0.473378, 0.240438, 0.690158, 0.790779, 0.121759, 0.224026, 0.437467, 0.421195, 0.750327, 0.598979, 0.19742, 0.866705, 0.0759297, 0.769428, 0.591641, 0.617373, 0.397108, 0.236631, 0.819696, 0.463099, 0.0384623, 0.56491, 0.799657, 0.409825, 0.0211287, 0.399213, 0.860971, 0.21247, 0.529959, 0.995821, 0.319635, 0.318951, 0.249199, 0.912273, 0.880425, 0.453482, 0.744753, 0.947907, 0.850558, 0.873718, 0.694035, 0.182624, 0.979205, 0.220903, 0.831905, 0.765287, 0.627847, 0.60698, 0.00014169, 0.266254, 0.468853, 0.546685, 0.711686, 0.570707, 0.815501, 0.816111, 0.274253, 0.230299, 0.410759, 0.912284, 0.340372, 0.655273, 0.984131, 0.928951, 0.301123, 0.811709, 0.607374, 0.629956, 0.958786, 0.42334, 0.966065, 0.23172, 0.0514258, 0.729242, 0.33187, 0.0577545, 0.940886, 0.81556, 0.706079, 0.521733, 0.762507, 0.126839, 0.39848, 0.348391, 0.759298, 0.797595, 0.864869, 0.678706, 0.552319, 0.0781707, 0.836225, 0.431585, 0.299173, 0.446646, 0.273999, 0.889, 0.613147, 0.90554, 0.316265, 0.805125, 0.351913, 0.246921, 0.992693, 0.285047, 0.418111, 0.614795, 0.148183, 0.915623, 0.0824796, 0.97153, 0.342076, 0.226939, 0.409704, 0.144162, 0.645093, 0.104499, 0.523058, 0.30061, 0.417962, 0.850426, 0.576325, 0.159152, 0.841744, 0.281478, 0.701127, 0.602403, 0.137689, 0.0968616, 0.138916, 0.925421, 0.142064, 0.568757, 0.0363991, 0.220104, 0.823729, 0.377243, 0.382477, 0.791612, 0.132756, 0.633164, 0.844557, 0.484731, 0.432837, 0.0358545, 0.434492, 0.696509, 0.27217, 0.872712, 0.689914, 0.526976, 0.00845231, 0.717582, 0.646745, 0.964498, 0.260307, 0.460807, 0.799241, 0.495831, 0.534291, 0.337725, 0.0201459, 0.0588205, 0.099626, 0.0445335, 0.631459, 0.421706, 0.737725, 0.15237, 0.522567, 0.354908, 0.462786, 0.960176, 0.749139, 0.0917709, 0.368801, 0.835182, 0.30741, 0.305869, 0.0499054, 0.344192, 0.0774335, 0.345958, 0.374308, 0.469739, 0.181264, 0.129416, 0.110752, 0.121005, 0.478941, 0.965105, 0.107185, 0.804921, 0.918202, 0.948278, 0.625083, 0.69052, 0.714591, 0.525246, 0.51034, 0.47609, 0.776902, 0.851069, 0.18501, 0.703499, 0.337351, 0.596395, 0.0415835, 0.792511, 0.571984, 0.821413, 0.445606, 0.458539, 0.951438, 0.127543, 0.397203, 0.91401, 0.542644, 0.08584, 0.773343, 0.246973, 0.804756, 0.523513, 0.134362, 0.444727, 0.336684, 0.7137, 0.0018896, 0.716948, 0.874143, 0.750324, 0.117239, 0.263621, 0.339174, 0.865795, 0.0901357, 0.157137, 0.220915, 0.000640357, 0.847563, 0.899636, 0.0691022, 0.105871, 0.760859, 0.976666, 0.909564, 0.193656, 0.737687, 0.811764, 0.0487426, 0.812717, 0.864286, 0.643114, 0.388686, 0.242115, 0.35791, 0.0845, 0.917465, 0.855177, 0.966218, 0.647099, 0.847149, 0.6222, 0.639152, 0.0962149, 0.599481, 0.37997, 0.512591, 0.24068, 0.207296, 0.923443, 0.00410921, 0.114458, 0.284364, 0.653197, 0.427076, 0.718724, 0.481602, 0.175418, 0.581547, 0.695499, 0.0131583, 0.381376, 0.989818, 0.921505, 0.627384, 0.151569, 0.232453, 0.356891, 0.47682, 0.962142, 0.392632, 0.789847, 0.945584, 0.846705, 0.610754, 0.408712, 0.0529858, 0.168715, 0.0349119, 0.782198, 0.175419, 0.433237, 0.746159, 0.607084, 0.198991, 0.249497, 0.337107, 0.944549, 0.921506, 0.991261, 0.559888, 0.316679, 0.301507, 0.239754, 0.0968068, 0.614661, 0.521572, 0.519729, 0.908341, 0.03586, 0.618171, 0.836746, 0.873387, 0.608938, 0.409483, 0.555192, 0.249651, 0.650548, 0.223973, 0.78059, 0.411687, 0.83924, 0.269758, 0.00621065, 0.990223, 0.671403, 0.706867, 0.798772, 0.487375, 0.788655, 0.606103, 0.910993, 0.805561, 0.342278, 0.991981, 0.992517, 0.87053, 0.0522469, 0.570367, 0.235565, 0.168307, 0.957502, 0.341051, 0.984299, 0.944804, 0.701308, 0.208552, 0.46789, 0.0798323, 0.247649, 0.965581, 0.991729, 0.469432, 0.317762, 0.764117, 0.0861203, 0.738733, 0.739078, 0.418193, 0.252833, 0.0136712, 0.194591, 0.0823039, 0.351238, 0.634819, 0.289065, 0.522258, 0.100051, 0.628743, 0.802032, 0.0512145, 0.539622, 0.79265, 0.378523, 0.339629, 0.320432, 0.575917, 0.368186, 0.602909, 0.12064, 0.217229, 0.747264, 0.765453, 0.479258, 0.168903, 0.687945, 0.424234, 0.0182483, 0.535874, 0.440657, 0.759257, 0.525349, 0.119938, 0.261061, 0.0862075, 0.445322, 0.36843, 0.532416, 0.972018, 0.350263, 0.274387, 0.13524, 0.767913, 0.682112, 0.186862, 0.675467, 0.64265, 0.719899, 0.822824, 0.978052, 0.376683, 0.0465045, 0.344499, 0.156657, 0.480702, 0.0107018, 0.0210955, 0.754463, 0.579424, 0.811882, 0.267681, 0.656261, 0.55529, 0.114898, 0.381888, 0.412306, 0.166698, 0.540337, 0.422237, 0.225848, 0.319184, 0.359518, 0.0697977, 0.490755, 0.351974, 0.408113, 0.413704, 0.0233192, 0.502726, 0.0808228, 0.253266, 0.393719, 0.233204, 0.541699, 0.510262, 0.653899, 0.727227, 0.299199, 0.606695, 0.745681, 0.233805, 0.650707, 0.938541, 0.626558, 0.163539, 0.181739, 0.585307, 0.72919, 0.207946, 0.393336, 0.0598465, 0.142842, 0.503608, 0.0107723, 0.769077, 0.73303, 0.557175, 0.174763, 0.446938, 0.254235, 0.276446, 0.340177, 0.467342, 0.246174, 0.319751, 0.0668283, 0.695673, 0.680345, 0.0935564, 0.267785, 0.761973, 0.778107, 0.206745, 0.501926, 0.851717, 0.0171054, 0.58509, 0.44091, 0.168636, 0.922044, 0.398684, 0.85791, 0.329332, 0.0421944, 0.0811428, 0.178988, 0.0217178, 0.867476, 0.0242361, 0.054365, 0.0350767, 0.858391, 0.513896, 0.58669, 0.0422648, 0.660024, 0.892557, 0.618048, 0.167999, 0.374249, 0.61947, 0.234545, 0.301993, 0.664666, 0.307192, 0.830035, 0.16981, 0.647669, 0.210328, 0.55722, 0.725259, 0.227851, 0.746418, 0.143219, 0.578732, 0.912123, 0.627189, 0.951027, 0.0741806, 0.785681, 0.474788, 0.501871, 0.586705, 0.356456, 0.994087, 0.702202, 0.110053, 0.939102, 0.978551, 0.547684, 0.537043, 0.463263, 0.79543, 0.544923, 0.488685, 0.446546, 0.0184627, 0.838136, 0.506632, 0.424576, 0.66903, 0.99525, 0.446845, 0.433408, 0.068933, 0.888118, 0.885535, 0.823443, 0.911199, 0.15379, 0.173262, 0.194629, 0.991104, 0.419665, 0.141868, 0.976054, 0.0904315, 0.481936, 0.933807, 0.812616, 0.31648, 0.707818, 0.211447, 0.391328, 0.393768, 0.208413, 0.0949373, 0.340323, 0.930488, 0.377775, 0.320927, 0.600667, 0.183198, 0.604513, 0.876236, 0.293807, 0.6533, 0.8053, 0.0456367, 0.79004, 0.519155, 0.940581, 0.539176, 0.360457, 0.845534, 0.230465, 0.0660766, 0.641243, 0.360056, 0.281113, 0.347411, 0.44536, 0.221333, 0.344579, 0.931765, 0.332007, 0.567415, 0.75061, 0.77218, 0.259708, 0.234034, 0.197387, 0.290365, 0.330381, 0.473265, 0.869024, 0.330283, 0.585211, 0.784976, 0.0936646, 0.622822, 0.515882, 0.154393, 0.931172, 0.351259, 0.0384373, 0.348363, 0.183751, 0.45849, 0.368929, 0.00469111, 0.870597, 0.684903, 0.368441, 0.0478761, 0.488639, 0.0835063, 0.952199, 0.643149, 0.892231, 0.216109, 0.751933, 0.18647, 0.390735, 0.198425, 0.941996, 0.757303, 0.723334, 0.706664, 0.0554332, 0.490989, 0.950898, 0.0629882, 0.261767, 0.112804, 0.113924, 0.238037, 0.270354, 0.751243, 0.380492, 0.740241, 0.207205, 0.0989996, 0.84937, 0.488469, 0.708236, 0.238914, 0.598806, 0.724916, 0.398191, 0.144674, 0.906878, 0.157843, 0.297494, 0.412601, 0.794125, 0.477363, 0.379034, 0.00671669, 0.646993, 0.271325, 0.465328, 0.746437, 0.149025, 0.679058, 0.281474, 0.520522, 0.415491, 0.0136646, 0.524842, 0.874952, 0.466775, 0.245805, 0.893021, 0.468606, 0.571214, 0.336392, 0.492457, 0.527156, 0.41838, 0.89118, 0.143941, 0.192362, 0.548482, 0.267326, 0.550028, 0.0937403, 0.460918, 0.928665, 0.391746, 0.334116, 0.845814, 0.45314, 0.563016, 0.343298, 0.508847, 0.54797, 0.720585, 0.995596, 0.469857, 0.797195, 0.0758571, 0.193271, 0.399435, 0.286663, 0.0153666, 0.725576, 0.505342, 0.702736, 0.574938, 0.360447, 0.293658, 0.00267113, 0.90265, 0.0410623, 0.12695, 0.825533, 0.666178, 0.0493994, 0.315431, 0.275716, 0.672728, 0.336946, 0.153213, 0.333597, 0.156937, 0.352507, 0.781166, 0.441395, 0.711237, 0.319494, 0.253406, 0.980385, 0.27169, 0.610252, 0.0315895, 0.173092, 0.294815, 0.24346, 0.501749, 0.671395, 0.174916, 0.269241, 0.271962, 0.490413, 0.91111, 0.347766, 0.612575, 0.0220016, 0.807224, 0.485833, 0.930981, 0.982028, 0.41133, 0.8265, 0.338231, 0.858166, 0.866513, 0.184553, 0.0287946, 0.924813, 0.584187, 0.929221, 0.202744, 0.109128, 0.469215, 0.71036, 0.938545, 0.0344073, 0.425781, 0.128227, 0.876826, 0.251219, 0.615095, 0.308785, 0.985871, 0.202102, 0.762202, 0.989555, 0.110346, 0.78657, 0.322666, 0.425638, 0.4092, 0.929239, 0.409257, 0.916413, 0.0362295, 0.345098, 0.804149, 0.291017, 0.865797, 0.550924, 0.558234, 0.631668, 0.661393, 0.706274, 0.121489, 0.155439, 0.0155989, 0.343656, 0.900521, 0.176187, 0.408318, 0.968061, 0.936298, 0.00851265, 0.877008, 0.228096, 0.959744, 0.514649, 0.133714, 0.0895707, 0.240558, 0.727993, 0.0857289, 0.298125, 0.209947, 0.160823, 0.29274, 0.0399278, 0.957869, 0.603791, 0.221571, 0.799041, 0.736731, 0.484513, 0.196295, 0.808976, 0.320616, 0.308682, 0.0439747, 0.842259, 0.0977955, 0.0833671, 0.503262, 0.851656, 0.479866, 0.950625, 0.411628, 0.219651, 0.522312, 0.976037, 0.383527, 0.691701, 0.9642, 0.161143, 0.0314316, 0.724831, 0.985053, 0.997432, 0.15179, 0.829539, 0.730971, 0.143914, 0.179806, 0.987131, 0.210939, 0.206804, 0.309737, 0.848782, 0.93808, 0.322453, 0.925034, 0.481564, 0.320244, 0.0992068, 0.43445, 0.0556858, 0.734951, 0.0993144, 0.714255, 0.0302214, 0.932355, 0.251016, 0.933741, 0.254321, 0.144415, 0.625023, 0.92362, 0.586254, 0.355336, 0.94009, 0.695213, 0.931397, 0.618968, 0.739586, 0.103737, 0.368076, 0.220176, 0.883225, 0.222019, 0.0677259, 0.303266, 0.968175, 0.552004, 0.115311, 0.0455872, 0.684416, 0.900796, 0.792241, 0.617604, 0.356265, 0.919644, 0.61758, 0.0280784, 0.575976, 0.560994, 0.620322, 0.665619, 0.91024, 0.737454, 0.754261, 0.0779842, 0.520832, 0.974872, 0.19936, 0.766045, 0.212369, 0.449112, 0.532945, 0.52625, 0.638304, 0.420285, 0.865498, 0.100521, 0.108552, 0.0484256, 0.638022, 0.556459, 0.830696, 0.665436, 0.357766, 0.462635, 0.858165, 0.0240363, 0.805024, 0.632522, 0.616207, 0.809035, 0.831974, 0.919004, 0.168812, 0.133027, 0.211103, 0.0454932, 0.179853, 0.964951, 0.260681, 0.856561, 0.50412, 0.200919, 0.713513, 0.963631, 0.148498, 0.455128, 0.470364, 0.374871, 0.834743, 0.0342008, 0.627801, 0.676421, 0.229822, 0.947038, 0.197101, 0.813979, 0.558724, 0.677722, 0.61336, 0.842315, 0.505677, 0.203234, 0.944472, 0.273246, 0.472616, 0.731367, 0.385469, 0.038378, 0.0210421, 0.8101, 0.771924, 0.724295, 0.69712, 0.231483, 0.766871, 0.22118, 0.952272, 0.793797, 0.344759, 0.848569, 0.619039, 0.381783, 0.515454, 0.212298, 0.864574, 0.102096, 0.759013, 0.0348328, 0.530797, 0.854771, 0.404852, 0.750551, 0.406793, 0.379375, 0.555641, 0.157567, 0.193177, 0.845328, 0.157988, 0.592267, 0.832538, 0.464178, 0.227504, 0.0803521, 0.49972, 0.993602, 0.782846, 0.729766, 0.306847, 0.769817, 0.641094, 0.547556, 0.166469, 0.997491, 0.330004, 0.697845, 0.50871, 0.937293, 0.284859, 0.164627, 0.446595, 0.249429, 0.12309, 0.334094, 0.437629, 0.515888, 0.098429, 0.52362, 0.880512, 0.736821, 0.599419, 0.551261, 0.534756, 0.863241, 0.640294, 0.376647, 0.702365, 0.311803, 0.762398, 0.439358, 0.601853, 0.165669, 0.9539, 0.896226, 0.461351, 0.383213, 0.455631, 0.0290472, 0.116985, 0.48155, 0.554937, 0.942049, 0.712137, 0.710326, 0.804921, 0.181305, 0.606548, 0.389173, 0.368416, 0.542916, 0.666213, 0.0234955, 0.190332, 0.907848, 0.595852, 0.57392, 0.633509, 0.598628, 0.720329, 0.0172913, 0.814983, 0.656013, 0.762097, 0.571442, 0.557574, 0.366098, 0.307833, 0.378715, 0.365035, 0.227814, 0.471622, 0.0541737, 0.609736, 0.642234, 0.866126, 0.324962, 0.226084, 0.89159, 0.480385, 0.508564, 0.93119, 0.0433976, 0.713315, 0.914745, 0.798368, 0.780862, 0.969165, 0.175906, 0.19473, 0.61445, 0.934252, 0.0185282, 0.24897, 0.277972, 0.723536, 0.566537, 0.0233552, 0.923078, 0.581616, 0.316423, 0.24501, 0.698762, 0.69784, 0.424567, 0.189484, 0.387559, 0.932004, 0.966823, 0.0538948, 0.412291, 0.725883, 0.866159, 0.215323, 0.655808, 0.156526, 0.277802, 0.434198, 0.208185, 0.627663, 0.837767, 0.698486, 0.868006, 0.557899, 0.532694, 0.890736, 0.338171, 0.95361, 0.731255, 0.51718, 0.097908, 0.604915, 0.387281, 0.873079, 0.709184, 0.705164, 0.0409565, 0.441308, 0.321482, 0.494341, 0.670925, 0.426667, 0.408377, 0.395348, 0.584804, 0.27815, 0.869304, 0.4718, 0.215857, 0.620928, 0.212542, 0.478639, 0.218803, 0.291689, 0.128241, 0.993284, 0.92956, 0.675266, 0.836845, 0.646884, 0.457059, 0.154723, 0.381244, 0.725699, 0.0512737, 0.283163, 0.256986, 0.120155, 0.105429, 0.841173, 0.817785, 0.496723, 0.963866, 0.549727, 0.919526, 0.215732, 0.394426, 0.517125, 0.531066, 0.861426, 0.557868, 0.0129223, 0.140279, 0.103462, 0.674733, 0.8508, 0.143007, 0.594329, 0.310337, 0.469411, 0.662791, 0.199112, 0.982426, 0.98463, 0.962226, 0.937973, 0.16067, 0.245988, 0.104573, 0.182241, 0.68625, 0.069606, 0.465912, 0.231955, 0.466497, 0.855007, 0.360995, 0.824617, 0.770135, 0.913424, 0.13453, 0.435888, 0.550898, 0.534564, 0.0788811, 0.972792, 0.855578, 0.582295, 0.0749266, 0.0526137, 0.935008, 0.687653, 0.00945538, 0.706855, 0.667849, 0.517686, 0.418906, 0.789432, 0.8065, 0.629954, 0.985267, 0.688833, 0.00845489, 0.0939969, 0.272987, 0.510031, 0.562054, 0.933702, 0.415083, 0.782134, 0.897633, 0.633468, 0.994013, 0.580576, 0.00873444, 0.832382, 0.986717, 0.743726, 0.582978, 0.73837, 0.0521541, 0.727402, 0.476097, 0.480672, 0.953124, 0.866351, 0.602734, 0.605534, 0.704575, 0.845739, 0.249312, 0.400276, 0.441239, 0.63189, 0.418332, 0.872115, 0.9682, 0.144187, 0.633727, 0.197865, 0.456069, 0.697859, 0.175897, 0.128784, 0.906407, 0.0165433, 0.116214, 0.420433, 0.106288, 0.885471, 0.195202, 0.0122948, 0.347396, 0.231516, 0.079076, 0.942653, 0.722066, 0.676564, 0.243936, 0.80042, 0.370243, 0.655602, 0.504644, 0.908901, 0.435098, 0.85424, 0.565821, 0.980889, 0.122889, 0.802084, 0.761899, 0.531931, 0.802322, 0.0340665, 0.820782, 0.52478, 0.657767, 0.710471, 0.968145, 0.315111, 0.356929, 0.72612, 0.380387, 0.157531, 0.771541, 0.281162, 0.773407, 0.153415, 0.699645, 0.138378, 0.243136, 0.0163576, 0.205247, 0.690435, 0.147783, 0.0530863, 0.930954, 0.375418, 0.741039, 0.350249, 0.369976, 0.700277, 0.879363, 0.965882, 0.934557, 0.454716, 0.884906, 0.968857, 0.5108, 0.111213, 0.111757, 0.380992, 0.788635, 0.825531, 0.483706, 0.325941, 0.547729, 0.116752, 0.483973, 0.561566, 0.0476853, 0.288421, 0.650054, 0.276799, 0.379037, 0.860518, 0.0228963, 0.288618, 0.881359, 0.604251, 0.927271, 0.614673, 0.0643546, 0.436028, 0.288054, 0.652952, 0.209591, 0.773054, 0.345684, 0.435882, 0.496525, 0.733283, 0.310134, 0.661542, 0.481354, 0.799479, 0.999455, 0.00901059, 0.60553, 0.847005, 0.2234, 0.170015, 0.055915, 0.648482, 0.603476, 0.263597, 0.650536, 0.548014, 0.095071, 0.242958, 0.314119, 0.893043, 0.312585, 0.361183, 0.45584, 0.538559, 0.355636, 0.743869, 2.96342e-05, 0.421807, 0.909309, 0.290235, 0.502632, 0.784674, 0.187569, 0.308071, 0.267185, 0.387117, 0.830383, 0.699576, 0.861807, 0.635586, 0.0797072, 0.982407, 0.197537, 0.389593, 0.210325, 0.0925069, 0.853925, 0.66364, 0.142711, 0.72401, 0.41041, 0.293595, 0.180084, 0.857877, 0.22609, 0.0536478, 0.0149311, 0.792549, 0.16302, 0.117321, 0.212745, 0.0973031, 0.432302, 0.334026, 0.209042, 0.753679, 0.762934, 0.708298, 0.0171909, 0.0571178, 0.54626, 0.0221952, 0.242029, 0.0918763, 0.989952, 0.184176, 0.392578, 0.534408, 0.277992, 0.426756, 0.698145, 0.969953, 0.195217, 0.846796, 0.726991, 0.6899, 0.967913, 0.62467, 0.434259, 0.618726, 0.754401, 0.177388, 0.528281, 0.726961, 0.963401, 0.0707188, 0.0444969, 0.479264, 0.24116, 0.422797, 0.320627, 0.615334, 0.544112, 0.330703, 0.591762, 0.155187, 0.472551, 0.714999, 0.532782, 0.528907, 0.513376, 0.86704, 0.0328651, 0.727746, 0.325856, 0.618814, 0.629094, 0.280548, 0.717686, 0.757481, 0.410286, 0.709102, 0.522417, 0.639293, 0.555453, 0.719215, 0.900213, 0.0793727, 0.916506, 0.44328, 0.352898, 0.398034, 0.384751, 0.161074, 0.7696, 0.770671, 0.538404, 0.0318024, 0.437774, 0.0593149, 0.70262, 0.369751, 0.38269, 0.585291, 0.899732, 0.398359, 0.159842, 0.765145, 0.297718, 0.265596, 0.52447, 0.414421, 0.428482, 0.557065, 0.559619, 0.842593, 0.956909, 0.816163, 0.195375, 0.931258, 0.465971, 0.424543, 0.774653, 0.590041, 0.0739423, 0.196671, 0.307677, 0.261933, 0.0285516, 0.738984, 0.354039, 0.391523, 0.397201, 0.664987, 0.805306, 0.937924, 0.880471, 0.341039, 0.924895, 0.869036, 0.413439, 0.541304, 0.292062, 0.426397, 0.964427, 0.208438, 0.438794, 0.577651, 0.300885, 0.461668, 0.405974, 0.0812924, 0.175145, 0.178804, 0.441197, 0.948142, 0.847824, 0.335842, 0.725334, 0.311568, 0.652327, 0.0200386, 0.284258, 0.876683, 0.890234, 0.993462, 0.43376, 0.638062, 0.88501, 0.72586, 0.156183, 0.0465424, 0.769994, 0.183733, 0.64696, 0.645721, 0.376613, 0.220952, 0.835697, 0.980755, 0.3631, 0.471998, 0.555526, 0.95436, 0.108197, 0.235975, 0.505983, 0.483132, 0.937686, 0.981442, 0.78427, 0.97735, 0.831985, 0.510713, 0.314267, 0.000832427, 0.288103, 0.398739, 0.209325, 0.783176, 0.102928, 0.251047, 0.954025, 0.0499731, 0.505271, 0.794893, 0.0162987, 0.95054, 0.230905, 0.113767, 0.266758, 0.869384, 0.128996, 0.78173, 0.337336, 0.171694, 0.541244, 0.881261, 0.276736, 0.552753, 0.808098, 0.025242, 0.265823, 0.97056, 0.980095, 0.09552, 0.41776, 0.645108, 0.60675, 0.722814, 0.71645, 0.554537, 0.396683, 0.610738, 0.927738, 0.676049, 0.41029, 0.553841, 0.503551, 0.409644, 0.0463647, 0.214084, 0.508941, 0.841484, 0.0321953, 0.927781, 0.934039, 0.377543, 0.437581, 0.0498346, 0.472919, 0.0323662, 0.111413, 0.0808458, 0.0368907, 0.627915, 0.992583, 0.360955, 0.647195, 0.0489745, 0.623311, 0.0431752, 0.767147, 0.818697, 0.103949, 0.868243, 0.0448967, 0.822708, 0.204724, 0.605053, 0.91675, 0.744936, 0.868857, 0.743858, 0.0523163, 0.623608, 0.42094, 0.0573949, 0.172074, 0.511883, 0.964499, 0.853567, 0.315642, 0.655739, 0.204765, 0.0406335, 0.673253, 0.984438, 0.820486, 0.564025, 0.315305, 0.629325, 0.460477, 0.00706145, 0.320799, 0.571476, 0.361183, 0.938008, 0.965623, 0.601531, 0.649039, 0.571339, 0.234161, 0.305591, 0.215976, 0.516389, 0.959338, 0.650377, 0.111133, 0.199876, 0.574731, 0.316545, 0.0982121, 0.674108, 0.530823, 0.878089, 0.16988, 0.619327, 0.481569, 0.792752, 0.839371, 0.788766, 0.902313, 0.815574, 0.0821149, 0.232334, 0.60097, 0.359872, 0.573477, 0.838035, 0.363156, 0.215208, 0.0890882, 0.216272, 0.233223, 0.209986, 0.252158, 0.0550161, 0.0262118, 0.0775798, 0.529402, 0.150711, 0.919968, 0.131453, 0.507233, 0.262521, 0.0827009, 0.436821, 0.808552, 0.250228, 0.213137, 0.459742, 0.594686, 0.0496132, 0.314262, 0.308954, 0.741435, 0.404692, 0.0113842, 0.228259, 0.521657, 0.354893, 0.533571, 0.990354, 0.0535394, 0.704199, 0.107439, 0.706493, 0.966148, 0.262109, 0.366967, 0.194762, 0.0522012, 0.668898, 0.709822, 0.642879, 0.697398, 0.868102, 0.309041, 0.214379, 0.566242, 0.304703, 0.504027, 0.687267, 0.186212, 0.602461, 0.0249466, 0.786558, 0.305553, 0.491429, 0.661511, 0.0946824, 0.137817, 0.174723, 0.60314, 0.271917, 0.845652, 0.576868, 0.800857, 0.927972, 0.185335, 0.397098, 0.207879, 0.254672, 0.395767, 0.677578, 0.689829, 0.399777, 0.609087, 0.550189, 0.660056, 0.594321, 0.982477, 0.419657, 0.752959, 0.410347, 0.604976, 0.934088, 0.832204, 0.231608, 0.960599, 0.884443, 0.924598, 0.553883, 0.907342, 0.290755, 0.264968, 0.573887, 0.659652, 0.720379, 0.608417, 0.0284497, 0.378218, 0.26479, 0.916425, 0.407464, 0.528473, 0.48941, 0.328966, 0.398123, 0.562249, 0.0747244, 0.48064, 0.498243, 0.783406, 0.798375, 0.958975, 0.79876, 0.0211302, 0.410217, 0.518641, 0.132979, 0.847882, 0.990361, 0.987076, 0.799242, 0.648283, 0.59208, 0.296394, 0.756467, 0.944225, 0.0765554, 0.600636, 0.317597, 0.541923, 0.310111, 0.114584, 0.176047, 0.83561, 0.642901, 0.363241, 0.331812, 0.925724, 0.843066, 0.426798, 0.314231, 0.62937, 0.00519717, 0.359633, 0.132014, 0.112669, 0.468842, 0.718372, 0.148742, 0.351903, 0.245704, 0.794502, 0.876104, 0.30645, 0.935792, 0.28211, 0.24046, 0.195609, 0.276916, 0.687715, 0.490041, 0.501194, 0.153291, 0.0829542, 0.164332, 0.468091, 0.0247278, 0.394173, 0.210914, 0.86059, 0.339874, 0.61321, 0.930054, 0.556142, 0.159933, 0.00420368, 0.652075, 0.667575, 0.289459, 0.730602, 0.555569, 0.944058, 0.97697, 0.917328, 0.433618, 0.887698, 0.29905, 0.271253, 0.247104, 0.239831, 0.858059, 0.460138, 0.633534, 0.0551935, 0.2281, 0.338819, 0.474196, 0.014922, 0.228013, 0.136836, 0.406081, 0.0531204, 0.98602, 0.599853, 0.0315795, 0.325749, 0.0364954, 0.314839, 0.865815, 0.233163, 0.660901, 0.272757, 0.721703, 0.0143465, 0.0771391, 0.286474, 0.000510703, 0.985903, 0.0929513, 0.474664, 0.650234, 0.972448, 0.617551, 0.247075, 0.877322, 0.649853, 0.662885, 0.219109, 0.375737, 0.905421, 0.459193, 0.928444, 0.411585, 0.0962024, 0.944366, 0.269214, 0.401052, 0.513182, 0.810974, 0.557663, 0.759865, 0.509085, 0.921738, 0.121149, 0.484349, 0.529608, 0.638714, 0.535673, 0.774346, 0.853434, 0.420625, 0.860491, 0.648348, 0.15337, 0.84287, 0.794019, 0.392978, 0.274666, 0.462768, 0.961056, 0.236077, 0.100099, 0.896931, 0.307691, 0.279006, 0.598285, 0.116766, 0.42967, 0.686491, 0.92182, 0.473785, 0.171836, 0.463263, 0.333596, 0.10533, 0.65519, 0.110283, 0.576726, 0.870012, 0.828654, 0.298535, 0.116733, 0.500204, 0.471227, 0.490484, 0.885683, 0.238032, 0.625088, 0.512383, 0.555377, 0.384792, 0.867605, 0.576642, 0.201817, 0.14677, 0.130611, 0.681495, 0.658775, 0.265814, 0.928467, 0.332115, 0.853925, 0.29802, 0.642942, 0.226712, 0.253215, 0.351142, 0.998212, 0.028135, 0.802334, 0.0519587, 0.982477, 0.655262, 0.526479, 0.313167, 0.413869, 0.350746, 0.653669, 0.283342, 0.999014, 0.904547, 0.417504, 0.214072, 0.356605, 0.169751, 0.991036, 0.584668, 0.847046, 0.549047, 0.969677, 0.312476, 0.873823, 0.901067, 0.935559, 0.497459, 0.347855, 0.472104, 0.515479, 0.923861, 0.594239, 0.935341, 0.138412, 0.534849, 0.138707, 0.646969, 0.0588493, 0.460917, 0.335397, 0.163272, 0.813929, 0.695145, 0.185964, 0.295093, 0.789526, 0.98727, 0.972209, 0.201822, 0.361339, 0.117944, 0.0268668, 0.600853, 0.833237, 0.132332, 0.410353, 0.011527, 0.650644, 0.134855, 0.743185, 0.734788, 0.451774, 0.655363, 0.14838, 0.984824, 0.805629, 0.879287, 0.16018, 0.588786, 0.957526, 0.850179, 0.852263, 0.60276, 0.693684, 0.185038, 0.827625, 0.304756, 0.359668, 0.975668, 0.00979758, 0.165051, 0.538797, 0.62155, 0.925761, 0.518746, 0.0949743, 0.798904, 0.0996748, 0.258126, 0.241501, 0.0231525, 0.50768, 0.861858, 0.95564, 0.674394, 0.48676, 0.442908, 0.54732, 0.855726, 0.727641, 0.276677, 0.00767715, 0.264663, 0.824018, 0.334708, 0.309031, 0.856454, 0.912137, 0.980128, 0.953356, 0.239518, 0.904269, 0.105412, 0.120604, 0.971261, 0.632819, 0.399633, 0.74892, 0.0397495, 0.990884, 0.659839, 0.787128, 0.0371576, 0.233544, 0.615285, 0.330857, 0.541825, 0.821655, 0.358368, 0.754652, 0.605243, 0.409314, 0.743172, 0.120013, 0.152825, 0.833333, 0.954085, 0.397493, 0.754619, 0.0291426, 0.265614, 0.499785, 0.503684, 0.063207, 0.958214, 0.758642, 0.882101, 0.746943, 0.40887, 0.598289, 0.148958, 0.01237, 0.470252, 0.770874, 0.282855, 0.196647, 0.182888, 0.775261, 0.603524, 0.64581, 0.211286, 0.928953, 0.0808277, 0.94412, 0.793587, 0.189275, 0.228528, 0.452367, 0.134137, 0.191626, 0.740312, 0.371979, 0.306545, 0.111149, 0.947749, 0.670048, 0.401308, 0.814009, 0.557634, 0.765922, 0.305189, 0.0691121, 0.206145, 0.972946, 0.483041, 0.674879, 0.64419, 0.440743, 0.603566, 0.299055, 0.770067, 0.0277483, 0.822632, 0.899626, 0.0947621, 0.623652, 0.385972, 0.295955, 0.988109, 0.292027, 0.24126, 0.851533, 0.928924, 0.506672, 0.485787, 0.167212, 0.0648757, 0.90929, 0.588648, 0.144438, 0.477218, 0.744802, 0.369811, 0.879862, 0.776209, 0.658917, 0.260289, 0.449339, 0.156398, 0.825112, 0.0244576, 0.0114107, 0.143369, 0.936058, 0.91859, 0.766248, 0.22063, 0.0354328, 0.18146, 0.303906, 0.0585913, 0.873195, 0.820437, 0.520642, 0.301715, 0.445923, 0.772482, 0.53381, 0.850808, 0.936274, 0.709723, 0.811877, 0.0333295, 0.308054, 0.574707, 0.0104636, 0.225917, 0.0125933, 0.390455, 0.52888, 0.694311, 0.633775, 0.493789, 0.453533, 0.204401, 0.542741, 0.0427775, 0.47859, 0.747509, 0.975907, 0.243713, 0.0750013, 0.810944, 0.978885, 0.634111, 0.181389, 0.0870874, 0.558881, 0.507689, 0.844796, 0.290514, 0.0966477, 0.701067, 0.126584, 0.143832, 0.861762, 0.593865, 0.569792, 0.663625, 0.0383406, 0.285857, 0.967887, 0.388321, 0.730572, 0.0502237, 0.370907, 0.599961, 0.704411, 0.997353, 0.313822, 0.837331, 0.560436, 0.966545, 0.564464, 0.530616, 0.222637, 0.883258, 0.405127, 0.766165, 0.970267, 0.605508, 0.27344, 0.417443, 0.6804, 0.899621, 0.943515, 0.697902, 0.917226, 0.585366, 0.867522, 0.288848, 0.332414, 0.418357, 0.765176, 0.58805, 0.689675, 0.918882, 0.223019, 0.0301171, 0.45733, 0.23023, 0.997342, 0.708967, 0.690235, 0.778435, 0.186999, 0.0062136, 0.799721, 0.288684, 0.145258, 0.715006, 0.567267, 0.191902, 0.222308, 0.16527, 0.141028, 0.498656, 0.471809, 0.812105, 0.410669, 0.122852, 0.538152, 0.985828, 0.14648, 0.815531, 0.822378, 0.285319, 0.340083, 0.335656, 0.874195, 0.399742, 0.145658, 0.383607, 0.571692, 0.061859, 0.0808494, 0.970098, 0.66054, 0.387502, 0.879439, 0.771714, 0.323171, 0.769871, 0.901997, 0.668459, 0.155517, 0.0328072, 0.72767, 0.367528, 0.934856, 0.80916, 0.186836, 0.511356, 0.875518, 0.629981, 0.120147, 0.217666, 0.503442, 0.364572, 0.530868, 0.733359, 0.0985443, 0.781528, 0.600487, 0.734789, 0.259964, 0.420761, 0.690644, 0.762461, 0.357923, 0.374403, 0.0213049, 0.480311, 0.93855, 0.743486, 0.193385, 0.358376, 0.89866, 0.995547, 0.597713, 0.787219, 0.184868, 0.885802, 0.871711, 0.276687, 0.0774475, 0.559774, 0.639416, 0.82337, 0.264089, 0.961965, 0.505332, 0.43159, 0.343287, 0.966538, 0.292866, 0.851804, 0.573717, 0.241853, 0.529008, 0.368493, 0.178781, 0.485219, 0.152118, 0.215771, 0.95961, 0.239652, 0.0222711, 0.41502, 0.544832, 0.340101, 0.791993, 0.810729, 0.454822, 0.278915, 0.487453, 0.956725, 0.745353, 0.971265, 0.282904, 0.335591, 0.0607439, 0.528187, 0.462494, 0.367153, 0.648365, 0.477752, 0.957297, 0.5961, 0.46201, 0.470777, 0.682839, 0.617738, 0.494029, 0.8018, 0.75107, 0.0747347, 0.312343, 0.818323, 0.242369, 0.824779, 0.0967584, 0.602757, 0.453952, 0.347854, 0.911595, 0.709323, 0.603896, 0.560401, 0.98434, 0.0364084, 0.129774, 0.190367, 0.509978, 0.885952, 0.511804, 0.184469, 0.931065, 0.754266, 0.35684, 0.950373, 0.9612, 0.518543, 0.327415, 0.541612, 0.855774, 0.613072, 0.851914, 0.653144, 0.648377, 0.367146, 0.254005, 0.533603, 0.340066, 0.0406995, 0.642052, 0.331147, 0.827025, 0.000408638, 0.85964, 0.620224, 0.107599, 0.0787695, 0.586626, 0.441973, 0.758552, 0.193283, 0.590967, 0.373918, 0.954407, 0.750803, 0.590125, 0.916398, 0.289596, 0.893226, 0.641432, 0.864426, 0.165384, 0.959013, 0.172217, 0.201353, 0.565754, 0.0103648, 0.399282, 0.191406, 0.147727, 0.64197, 0.419365, 0.80361, 0.806011, 0.253815, 0.385829, 0.318207, 0.752515, 0.290406, 0.594081, 0.103586, 0.103909, 0.727477, 0.145286, 0.702887, 0.639973, 0.906371, 0.954972, 0.688807, 0.537407, 0.36753, 0.611981, 0.56082, 0.548586, 0.626795, 0.75437, 0.454324, 0.281335, 0.932877, 0.0556889, 0.431626, 0.823033, 0.746636, 0.884209, 0.755189, 0.150605, 0.251293, 0.273213, 0.32985, 0.157047, 0.253227, 0.882795, 0.751524, 0.86565, 0.980124, 0.458957, 0.0259092, 0.139669, 0.73587, 0.48146, 0.564327, 0.418925, 0.15718, 0.599346, 0.0948768, 0.548157, 0.983815, 0.896747, 0.93467, 0.12094, 0.653166, 0.445269, 0.987496, 0.89148, 0.432709, 0.00386192, 0.423232, 0.744804, 0.449125, 0.0268778, 0.38598, 0.229567, 0.307825, 0.15474, 0.61837, 0.0342545, 0.101533, 0.167021, 0.850369, 0.925965, 0.0288318, 0.968488, 0.986427, 0.143009, 0.555865, 0.715264, 0.352634, 0.292474, 0.225275, 0.405263, 0.809803, 0.0659627, 0.329628, 0.397733, 0.385378, 0.860776, 0.250435, 0.689519, 0.522654, 0.64017, 0.682935, 0.461006, 0.63006, 0.80172, 0.73188, 0.256695, 0.662023, 0.348731, 0.922653, 0.919557, 0.425762, 0.910753, 0.75513, 0.518977, 0.739286, 0.664524, 0.224873, 0.651623, 0.706705, 0.156789, 0.478963, 0.118234, 0.101867, 0.105066, 0.42649, 0.120927, 0.0114221, 0.382267, 0.901766, 0.0430191, 0.507606, 0.57271, 0.44979, 0.199321, 0.826183, 0.0436905, 0.623164, 0.754249, 0.179847, 0.609664, 0.938128, 0.0994121, 0.415671, 0.993652, 0.714302, 0.993769, 0.840236, 0.920536, 0.667547, 0.334224, 0.543902, 0.701776, 0.37346, 0.955404, 0.838328, 0.15956, 0.434229, 0.0100381, 0.863377, 0.342316, 0.307566, 0.830467, 0.663271, 0.152056, 0.893691, 0.938791, 0.818353, 0.0938728, 0.341518, 0.94682, 0.843918, 0.0174661, 0.385148, 0.0616036, 0.395143, 0.346758, 0.748034, 0.887631, 0.447507, 0.0868775, 0.625436, 0.599049, 0.160688, 0.542005, 0.0673281, 0.301457, 0.865826, 0.449351, 0.815057, 0.278228, 0.453133, 0.625479, 0.369853, 0.752237, 0.616496, 0.964257, 0.538866, 0.97705, 0.779135, 0.107661, 0.141694, 0.293515, 0.97431, 0.309492, 0.339334, 0.668479, 0.701346, 0.612186, 0.388952, 0.489523, 0.337777, 0.335632, 0.021722, 0.608576, 0.571344, 0.00593222, 0.570806, 0.748318, 0.0684743, 0.166752, 0.228047, 0.931145, 0.471832, 0.672698, 0.0547746, 0.20706, 0.515942, 0.685615, 0.718145, 0.45582, 0.609663, 0.43809, 0.501516, 0.802087, 0.706998, 0.87524, 0.719174, 0.724364, 0.750863, 0.0439049, 0.184696, 0.50599, 0.434176, 0.860739, 0.427148, 0.11695, 0.770119, 0.955365, 0.385201, 0.762092, 0.154867, 0.349278, 0.0588391, 0.680864, 0.197148, 0.923099, 0.648124, 0.261028, 0.380421, 0.897241, 0.776916, 0.498693, 0.278177, 0.513424, 0.182462, 0.640768, 0.87773, 0.640719, 0.984933, 0.0608642, 0.461069, 0.90336, 0.697298, 0.0887178, 0.738091, 0.847818, 0.345321, 0.678449, 0.0661008, 0.222803, 0.15153, 0.976997, 0.63716, 0.399377, 0.17271, 0.519796, 0.0767646, 0.960392, 0.0281541, 0.824571, 0.11165, 0.158792, 0.196758, 0.672518, 0.386981, 0.81231, 0.814184, 0.0779816, 0.201569, 0.212442, 0.35373, 0.776933, 0.18751, 0.891843, 0.179971, 0.648483, 0.649158, 0.633252, 0.364332, 0.955416, 0.954116, 0.460376, 0.719585, 0.71924, 0.379218, 0.302536, 0.468745, 0.833374, 0.11807, 0.236876, 0.265245, 0.786198, 0.616682, 0.176263, 0.16878, 0.746592, 0.325369, 0.93201, 0.210689, 0.404906, 0.0811554, 0.130468, 0.717783, 0.779477, 0.767393, 0.258517, 0.329721, 0.777283, 0.832453, 0.407952, 0.219291, 0.787133, 0.908551, 0.804323, 0.63265, 0.871746, 0.28071, 0.292123, 0.665286, 0.486935, 0.833672, 0.330147, 0.74882, 0.921321, 0.0570516, 0.994558, 0.18646, 0.930969, 0.963765, 0.611787, 0.816732, 0.255255, 0.127493, 0.653724, 0.67634, 0.963859, 0.884945, 0.545391, 0.259864, 0.976026, 0.848169, 0.190596, 0.934318, 0.780122, 0.54417, 0.269209, 0.04525, 0.961655, 0.988442, 0.299954, 0.287465, 0.621607, 0.319467, 0.418552, 0.306947, 0.826193, 0.967477, 0.708696, 0.23199, 0.839541, 0.855875, 0.376691, 0.35591, 0.0623152, 0.364152, 0.0163067, 0.414714, 0.367221, 0.916573, 0.783944, 0.953854, 0.429259, 0.821322, 0.182304, 0.861738, 0.892593, 0.363974, 0.940841, 0.414829, 0.96554, 0.0697756, 0.808214, 0.279903, 0.753596, 0.170598, 0.662963, 0.506491, 0.989795, 0.833547, 0.0964362, 0.217928, 0.182511, 0.916405, 0.929663, 0.892808, 0.960704, 0.622028, 0.0328625, 0.875278, 0.671331, 0.120736, 0.484243, 0.661575, 0.262103, 0.989783, 0.529199, 0.53198, 0.642562, 0.659543, 0.535853, 0.0279862, 0.927086, 0.226503, 0.766204, 0.67968, 0.664501, 0.000147433, 0.747497, 0.57694, 0.56268, 0.743567, 0.846374, 0.228906, 0.881546, 0.35777, 0.535058, 0.867154, 0.423761, 0.435927, 0.385817, 0.22335, 0.731321, 0.565765, 0.729349, 0.657976, 0.223232, 0.134646, 0.757165, 0.977772, 0.13174, 0.489814, 0.390622, 0.030793, 0.526329, 0.937251, 0.329882, 0.849362, 0.0605701, 0.628265, 0.254025, 0.839644, 0.388817, 0.0326272, 0.808323, 0.704152, 0.322941, 0.216104, 0.241679, 0.909248, 0.878167, 0.486013, 0.556686, 0.223157, 0.556652, 0.378219, 0.378978, 0.412863, 0.00537912, 0.888955, 0.239765, 0.738339, 0.795672, 0.0115921, 0.342575, 0.526488, 0.161514, 0.679725, 0.45338, 0.350966, 0.0698645, 0.371294, 0.802938, 0.119685, 0.112734, 0.539275, 0.32482, 0.324537, 0.102019, 0.26908, 0.869889, 0.939194, 0.010289, 0.937126, 0.869746, 0.355097, 0.895072, 0.358766, 0.787611, 0.631421, 0.489595, 0.906862, 0.934934, 0.159889, 0.327911, 0.248797, 0.994135, 0.637473, 0.162049, 0.317193, 0.100519, 0.731889, 0.178731, 0.147433, 0.553671, 0.0860994, 0.0637024, 0.230915, 0.396349, 0.0496842, 0.668902, 0.468149, 0.744955, 0.731002, 0.869422, 0.0487048, 0.395655, 0.983973, 0.967485, 0.909141, 0.590347, 0.550386, 0.229202, 0.809708, 0.401689, 0.269933, 0.357436, 0.40813, 0.715089, 0.483819, 0.523768, 0.730117, 0.0273757, 0.38216, 0.376063, 0.0495842, 0.540363, 0.403137, 0.435207, 0.962374, 0.243001, 0.2155, 0.116579, 0.00596777, 0.499365, 0.464358, 0.941541, 0.0129719, 0.707456, 0.12581, 0.845498, 0.768457, 0.837357, 0.511282, 0.255564, 0.426139, 0.80073, 0.764699, 0.477662, 0.489556, 0.0937584, 0.680824, 0.194003, 0.910425, 0.344751, 0.473603, 0.342575, 0.945425, 0.232417, 0.469331, 0.858076, 0.600164, 0.572563, 0.935231, 0.844502, 0.435598, 0.259354, 0.345243, 0.575524, 0.856887, 0.769067, 0.0561712, 0.861937, 0.477175, 0.642632, 0.989681, 0.657354, 0.495374, 0.890809, 0.361776, 0.753478, 0.352576, 0.316028, 0.55934, 0.748111, 0.0242301, 0.63698, 0.95354, 0.999331, 0.499351, 0.0964141, 0.0932584, 0.181925, 0.470591, 0.58111, 0.473299, 0.568515, 0.159936, 0.352641, 0.986706, 0.39087, 0.791791, 0.197131, 0.905259, 0.438271, 0.353538, 0.471969, 0.954098, 0.654215, 0.316385, 0.374373, 0.124096, 0.296559, 0.0191982, 0.253671, 0.453193, 0.957264, 0.851118, 0.131151, 0.824015, 0.0761794, 0.939823, 0.557335, 0.298368, 0.652939, 0.446571, 0.202696, 0.741027, 0.822296, 0.932177, 0.232095, 0.206957, 0.822518, 0.393793, 0.251782, 0.131201, 0.0727387, 0.770932, 0.354888, 0.00648049, 0.372038, 0.303916, 0.788783, 0.779157, 0.445444, 0.474583, 0.485494, 0.324881, 0.0291973, 0.993241, 0.15197, 0.429672, 0.635156, 0.0466568, 0.493696, 0.484205, 0.899548, 0.673079, 0.887542, 0.068839, 0.956253, 0.11191, 0.43216, 0.726792, 0.324185, 0.961165, 0.315227, 0.123589, 0.320405, 0.341128, 0.008635, 0.213393, 0.86098, 0.895268, 0.500419, 0.356494, 0.222006, 0.245306, 0.020221, 0.851059, 0.808538, 0.436756, 0.218902, 0.672502, 0.212524, 0.096553, 0.441897, 0.72069, 0.506839, 0.0231929, 0.295169, 0.937824, 0.411616, 0.317353, 0.535467, 0.289522, 0.0975993, 0.573829, 0.894906, 0.912567, 0.503889, 0.40862, 0.87605, 0.513327, 0.897704, 0.79112, 0.00330601, 0.301307, 0.924933, 0.210136, 0.399789, 0.383247, 0.7213, 0.465368, 0.379037, 0.222073, 0.290803, 0.75425, 0.896244, 0.661921, 0.113953, 0.175, 0.801437, 0.0791816, 0.438189, 0.554194, 0.698299, 0.533362, 0.559726, 0.945257, 0.389805, 0.45708, 0.937364, 0.980872, 0.268851, 0.709539, 0.0834161, 0.363897, 0.664832, 0.179982, 0.401349, 0.603644, 0.707299, 0.804133, 0.983241, 0.311261, 0.345365, 0.706893, 0.648039, 0.565055, 0.858242, 0.421955, 0.677403, 0.158963, 0.997875, 0.695644, 0.743536, 0.861741, 0.963077, 0.783475, 0.314471, 0.909776, 0.13434, 0.00685226, 0.315992, 0.505044, 0.884214, 0.448849, 0.0145242, 0.142816, 0.911866, 0.721801, 0.682015, 0.459583, 0.150321, 0.679745, 0.502825, 0.746225, 0.323068, 0.425318, 0.879868, 0.0165599, 0.176365, 0.500729, 0.138804, 0.834064, 0.471713, 0.836846, 0.232585, 0.285301, 0.211742, 0.229187, 0.782864, 0.312932, 0.312711, 0.578149, 0.620285, 0.991363, 0.346231, 0.267146, 0.979091, 0.288597, 0.963832, 0.244004, 0.943013, 0.74329, 0.783499, 0.245982, 0.664863, 0.396302, 0.302394, 0.754113, 0.151776, 0.758769, 0.500095, 0.5571, 0.489359, 0.793128, 0.493564, 0.0816229, 0.292844, 0.940675, 0.647185, 0.324899, 0.271784, 0.25973, 0.580248, 0.882846, 0.344809, 0.806539, 0.619961, 0.977769, 0.810815, 0.525316, 0.391413, 0.284474, 0.661297, 0.0822648, 0.556091, 0.872535, 0.320108, 0.228955, 0.759105, 0.284027, 0.698262, 0.817571, 0.904471, 0.232707, 0.178595, 0.225731, 0.136551, 0.661829, 0.689704, 0.420479, 0.179358, 0.495817, 0.195059, 0.661894, 0.960159, 0.374729, 0.172821, 0.433865, 0.896433, 0.274884, 0.101668, 0.0195123, 0.945159, 0.932629, 0.678708, 0.49671, 0.195285, 0.736573, 0.69074, 0.98924, 0.0231014, 0.866265, 0.0593382, 0.920476, 0.55871, 0.790958, 0.374598, 0.308956, 0.203835, 0.13896, 0.163524, 0.61125, 0.800125, 0.604689, 0.38293, 0.239883, 0.366654, 0.275717, 0.879044, 0.0345125, 0.0253883, 0.968474, 0.56119, 0.535113, 0.482592, 0.286101, 0.254786, 0.131018, 0.273073, 0.469337, 0.835887, 0.304907, 0.206999, 0.321034, 0.24286, 0.337337, 0.482167, 0.648614, 0.356313, 0.406781, 0.554926, 0.0955417, 0.934744, 0.741193, 0.969687, 0.0340277, 0.883645, 0.0500965, 0.850189, 0.147868, 0.618493, 0.054891, 0.410866, 0.303067, 0.768236, 0.407313, 0.369819, 0.36136, 0.912514, 0.383098, 0.462211, 0.330407, 0.368682, 0.450357, 0.427571, 0.727853, 0.910354, 0.470153, 0.294346, 0.841256, 0.71934, 0.774548, 0.231094, 0.485361, 0.129264, 0.872633, 0.808079, 0.54418, 0.661798, 0.354002, 0.115732, 0.338149, 0.685062, 0.88414, 0.240032, 0.0920179, 0.384616, 0.241623, 0.90478, 0.84621, 0.693784, 0.94463, 0.656525, 0.261012, 0.212651, 0.470449, 0.525292, 0.997417, 0.635873, 0.644776, 0.915483, 0.712238, 0.989577, 0.104516, 0.37643, 0.0156817, 0.549739, 0.353259, 0.567545, 0.320195, 0.851348, 0.289467, 0.883448, 0.385313, 0.975386, 0.766049, 0.170221, 0.704467, 0.728959, 0.612054, 0.828066, 0.840407, 0.409382, 0.760033, 0.967005, 0.194654, 0.34006, 0.213734, 0.0905185, 0.500102, 0.971728, 0.621986, 0.684992, 0.128856, 0.837135, 0.918145, 0.190976, 0.911247, 0.0514864, 0.80624, 0.408798, 0.220493, 0.224521, 0.877322, 0.0385099, 0.614169, 0.88817, 0.229666, 0.721188, 0.264943, 0.47653, 0.567368, 0.902633, 0.535977, 0.253827, 0.640493, 0.308578, 0.658295, 0.111301, 0.873223, 0.555314, 0.812779, 0.00807135, 0.66976, 0.212383, 0.659653, 0.751593, 0.615185, 0.41265, 0.447981, 0.646186, 0.939346, 0.368446, 0.206647, 0.463926, 0.309895, 0.588963, 0.331488, 0.236124, 0.832178, 0.892812, 0.460671, 0.857674, 0.941492, 0.633461, 0.0486092, 0.749437, 0.570053, 0.880335, 0.562532, 0.70153, 0.655857, 0.925519, 0.877252, 0.659761, 0.711964, 0.565728, 0.348877, 0.726564, 0.483625, 0.244734, 0.159083, 0.69571, 0.261487, 0.896328, 0.816056, 0.285894, 0.327687, 0.716706, 0.124344, 0.0265256, 0.203348, 0.398197, 0.47497, 0.500634, 0.964644, 0.376745, 0.840805, 0.849704, 0.953422, 0.71555, 0.72831, 0.337122, 0.945125, 0.388672, 0.455131, 0.118639, 0.0304581, 0.591968, 0.812254, 0.782987, 0.433481, 0.846389, 0.864012, 0.171962, 0.162654, 0.266689, 0.391112, 0.978379, 0.0346552, 0.586952, 0.031833, 0.289047, 0.415365, 0.596916, 0.530372, 0.87871, 0.959697, 0.972931, 0.363648, 0.96167, 0.487289, 0.112012, 0.539674, 0.236667, 0.777703, 0.00697131, 0.0309734, 0.230873, 0.947152, 0.476038, 0.183969, 0.228085, 0.849571, 0.275421, 0.286444, 0.735786, 0.0521317, 0.299832, 0.905572, 0.895332, 0.366051, 0.411741, 0.537879, 0.419337, 0.079378, 0.805156, 0.793631, 0.0965928, 0.598531, 0.598318, 0.773153, 0.436538, 0.994977, 0.208626, 0.294514, 0.520345, 0.647471, 0.699797, 0.013547, 0.54276, 0.482843, 0.861626, 0.406481, 0.8596, 0.860219, 0.489928, 0.647189, 0.663616, 0.220261, 0.961611, 0.923143, 0.501189, 0.317585, 0.36669, 0.0416075, 0.654256, 0.596517, 0.802064, 0.45533, 0.000281196, 0.991567, 0.588185, 0.0750981, 0.444266, 0.631594, 0.834741, 0.532738, 0.204862, 0.00149195, 0.582887, 0.843941, 0.706665, 0.439569, 0.0554153, 0.618165, 0.0763626, 0.576006, 0.781224, 0.16191, 0.192681, 0.0672024, 0.149948, 0.798083, 0.134072, 0.37307, 0.726517, 0.411759, 0.0727131, 0.979837, 0.260264, 0.838751, 0.77407, 0.377062, 0.584006, 0.283138, 0.481253, 0.158843, 0.215801, 0.149508, 0.759539, 0.25145, 0.802058, 0.366407, 0.553301, 0.372751, 0.540117, 0.888902, 0.332264, 0.644533, 0.974196, 0.828204, 0.156058, 0.680516, 0.113255, 0.609647, 0.258563, 0.858746, 0.201673, 0.325294, 0.996466, 0.581742, 0.270683, 0.00505102, 0.405913, 0.373273, 0.368075, 0.338605, 0.382544, 0.185442, 0.0716168, 0.642014, 0.735161, 0.231736, 0.280926, 0.806458, 0.288724, 0.927298, 0.866443, 0.74659, 0.582568, 0.189337, 0.139825, 0.558644, 0.938737, 0.424728, 0.419706, 0.432514, 0.606433, 0.464234, 0.070579, 0.01948, 0.0443568, 0.648825, 0.537498, 0.536141, 0.282356, 0.81914, 0.77952, 0.716556, 0.785401, 0.422103, 0.122599, 0.758018, 0.884172, 0.876852, 0.219623, 0.179689, 0.0147427, 0.975805, 0.295704, 0.764913, 0.195597, 0.222327, 0.726522, 0.44143, 0.756984, 0.147425, 0.44931, 0.678357, 0.309006, 0.912911, 0.956464, 0.809775, 0.100437, 0.231838, 0.995658, 0.831089, 0.567412, 0.140523, 0.0208844, 0.47221, 0.434762, 0.369761, 0.122273, 0.93022, 0.427289, 0.00138968, 0.919055, 0.394731, 0.170408, 0.5272, 0.580728, 0.172421, 0.738614, 0.858706, 0.515514, 0.936177, 0.110865, 0.329799, 0.144704, 0.394163, 0.437431, 0.0769508, 0.726508, 0.275889, 0.935542, 0.509661, 0.99689, 0.904397, 0.0337019, 0.775803, 0.380429, 0.569091, 0.190502, 0.245785, 0.880573, 0.600758, 0.47721, 0.391609, 0.412372, 0.897756, 0.919213, 0.370103, 0.141054, 1.20191e-05, 0.365291, 0.212163, 0.229545, 0.586162, 0.595195, 0.134095, 0.623336, 0.227264, 0.234759, 0.702092, 0.45902, 0.891786, 0.525516, 0.0639256, 0.796936, 0.706818, 0.850832, 0.953001, 0.556, 0.486836, 0.590989, 0.949839, 0.587092, 0.180256, 0.0184283, 0.486817, 0.311688, 0.249696, 0.963459, 0.370298, 0.8425, 0.403054, 0.752779, 0.932325, 0.648017, 0.731718, 0.0381195, 0.170692, 0.391021, 0.611984, 0.144065, 0.244927, 0.0581516, 0.128183, 0.511728, 0.653593, 0.866311, 0.796827, 0.521031, 0.931203, 0.721434, 0.872993, 0.409144, 0.515814, 0.513196, 0.107652, 0.270955, 0.937846, 0.433211, 0.354637, 0.0366308, 0.227131, 0.403311, 0.920548, 0.808059, 0.438508, 0.0492272, 0.606517, 0.410118, 0.736848, 0.644825, 0.516166, 0.0807724, 0.990559, 0.954847, 0.266734, 0.591672, 0.329617, 0.887104, 0.623414, 0.812345, 0.431342, 0.688292, 0.426681, 0.733457, 0.674946, 0.409627, 0.779863, 0.0219157, 0.531316, 0.888139, 0.334436, 0.667037, 0.458643, 0.295736, 0.918199, 0.815375, 0.869937, 0.00293661, 0.0102475, 0.937571, 0.328071, 0.36619, 0.197613, 0.338288, 0.480335, 0.642143, 0.0975859, 0.431129, 0.410333, 0.466688, 0.367867, 0.328595, 0.452947, 0.281265, 0.112837, 0.175668, 0.0962016, 0.733594, 0.176017, 0.931858, 0.61507, 0.738507, 0.994837, 0.0268085, 0.528805, 0.33309, 0.380145, 0.331328, 0.0147547, 0.823919, 0.339264, 0.797164, 0.347199, 0.307953, 0.195341, 0.637957, 0.37147, 0.116191, 0.740541, 0.231847, 0.740459, 0.682739, 0.713985, 0.10958, 0.756749, 0.516334, 0.919339, 0.152182, 0.29618, 0.192737, 0.212312, 0.19894, 0.500458, 0.693438, 0.175143, 0.212807, 0.558498, 0.844056, 0.598588, 0.589126, 0.233124, 0.46378, 0.170068, 0.0432387, 0.0578934, 0.671602, 0.648146, 0.72269, 0.411652, 0.562052, 0.266995, 0.730292, 0.833077, 0.364173, 0.384425, 0.754691, 0.520318, 0.384952, 0.561649, 0.716546, 0.563515, 0.391405, 0.473887, 0.807366, 0.455498, 0.797823, 0.99329, 0.0566501, 0.474465, 0.263827, 0.527949, 0.826321, 0.546833, 0.761573, 0.161726, 0.477097, 0.387216, 0.00908078, 0.342699, 0.590716, 0.612891, 0.381776, 0.286736, 0.138769, 0.426118, 0.630125, 0.114435, 0.022749, 0.167659, 0.115384, 0.722943, 0.219961, 0.915897, 0.876669, 0.175827, 0.0939754, 0.675282, 0.163174, 0.91795, 0.282888, 0.492402, 0.946611, 0.554716, 0.942357, 0.208018, 0.263696, 0.791903, 0.937721, 0.09841, 0.675157, 0.996191, 0.0226336, 0.67563, 0.88438, 0.346829, 0.514374, 0.418684, 0.565869, 0.78322, 0.790592, 0.73859, 0.146951, 0.399049, 0.0563276, 0.743575, 0.935618, 0.706072, 0.285999, 0.645951, 0.553035, 0.19193, 0.702908, 0.432067, 0.897954, 0.984509, 0.420495, 0.93247, 0.834486, 0.168918, 0.744768, 0.811155, 0.501421, 0.633726, 0.323166, 0.740346, 0.140135, 0.655225, 0.0429154, 0.186313, 0.15005, 0.0254319, 0.296293, 0.440591, 0.310251, 0.796307, 0.171079, 0.447977, 0.891993, 0.490897, 0.258092, 0.319501, 0.332444, 0.606872, 0.979289, 0.707228, 0.171903, 0.521651, 0.0343072, 0.499281, 0.092603, 0.996022, 0.204854, 0.381848, 0.546358, 0.488753, 0.917605, 0.0666593, 0.416414, 0.969993, 0.567659, 0.661384, 0.898857, 0.0449544, 0.135001, 0.685978, 0.720896, 0.345444, 0.919905, 0.970294, 0.448231, 0.513812, 0.346224, 0.368123, 0.685517, 0.395256, 0.682131, 0.304535, 0.129473, 0.792542, 0.79066, 0.477583, 0.129269, 0.133367, 0.322014, 0.863826, 0.527786, 0.617268, 0.267729, 0.451918, 0.547666, 0.886851, 0.219427, 0.24168, 0.276449, 0.359385, 0.224571, 0.211978, 0.826226, 0.686937, 0.483489, 0.118527, 0.076641, 0.052633, 0.808651, 0.0522376, 0.824982, 0.337232, 0.188409, 0.453277, 0.916369, 0.923392, 0.252049, 0.170647, 0.445271, 0.196551, 0.232924, 0.787748, 0.109796, 0.615006, 0.982263, 0.0633428, 0.681307, 0.151348, 0.374495, 0.884825, 0.153523, 0.470031, 0.439075, 0.451554, 0.898641, 0.19146, 0.414453, 0.183566, 0.0184529, 0.277495, 0.222426, 0.804856, 0.412671, 0.603429, 0.725025, 0.300665, 0.0885731, 0.55064, 0.97189, 0.469399, 0.61324, 0.890766, 0.142142, 0.491928, 0.0740167, 0.624685, 0.651399, 0.0930465, 0.14016, 0.399946, 0.368041, 0.378598, 0.626114, 0.277339, 0.65447, 0.945918, 0.480228, 0.976482, 0.519389, 0.708988, 0.490985, 0.0277287, 0.546813, 0.30012, 0.822819, 0.288361, 0.580487, 0.988715, 0.996758, 0.120198, 0.588552, 0.372554, 0.669815, 0.540238, 0.31336, 0.991082, 0.68981, 0.742698, 0.232958, 0.254088, 0.808317, 0.0617008, 0.68953, 0.695233, 0.686344, 0.869441, 0.978004, 0.00859007, 0.772477, 0.812442, 0.214557, 0.802496, 0.163306, 0.177761, 0.464414, 0.47685, 0.434799, 0.607867, 0.570816, 0.960285, 0.461333, 0.303739, 0.176515, 0.360075, 0.81964, 0.827713, 0.0612792, 0.0166165, 0.826839, 0.16666, 0.178995, 0.420688, 0.390429, 0.715626, 0.487459, 0.403873, 0.624145, 0.421205, 0.670543, 0.481124, 0.54779, 0.52255, 0.0584929, 0.657646, 0.206261, 0.197449, 0.29716, 0.767754, 0.866218, 0.763391, 0.52885, 0.891186, 0.628071, 0.152637, 0.902212, 0.0273998, 0.151154, 0.65819, 0.230012, 0.432827, 0.406489, 0.525832, 0.796426, 0.0745825, 0.867294, 0.531743, 0.920105, 0.270408, 0.278391, 0.143686, 0.0993648, 0.573185, 0.037966, 0.527293, 0.47524, 0.579917, 0.590847, 0.774383, 0.231358, 0.617451, 0.0489643, 0.835647, 0.498334, 0.404771, 0.70137, 0.529698, 0.85204, 0.636195, 0.369463, 0.414361, 0.165747, 0.532382, 0.112811, 0.246537, 0.354534, 0.382025, 0.222022, 0.715132, 0.456273, 0.896151, 0.784903, 0.0295712, 0.301331, 0.995529, 0.65379, 0.162802, 0.998677, 0.762339, 0.980458, 0.819869, 0.846046, 0.851623, 0.171729, 0.152807, 0.424439, 0.324728, 0.483842, 0.352168, 0.302195, 0.504997, 0.0173178, 0.714817, 0.174607, 0.280562, 0.0418441, 0.864934, 0.339356, 0.00350309, 0.252601, 0.221072, 0.388686, 0.851341, 0.0158434, 0.622924, 0.741444, 0.588701, 0.841873, 0.309563, 0.650706, 0.110386, 0.371108, 0.366786, 0.204512, 0.271584, 0.0732657, 0.0488429, 0.267072, 0.58363, 0.795581, 0.478233, 0.801881, 0.89168, 0.156593, 0.54992, 0.817708, 0.237794, 0.0235752, 0.293729, 0.466128, 0.191812, 0.025816, 0.416069, 0.74633, 0.135171, 0.193046, 0.263051, 0.0185888, 0.0255881, 0.817849, 0.717857, 0.883062, 0.68031, 0.232177, 0.885226, 0.719076, 0.943462, 0.774649, 0.0353471, 0.170481, 0.114228, 0.881744, 0.0325668, 0.0889019, 0.328576, 0.0250801, 0.61062, 0.791818, 0.042517, 0.538554, 0.755303, 0.397703, 0.169893, 0.0481056, 0.795939, 0.0761528, 0.341664, 0.793874, 0.238545, 0.0823149, 0.916699, 0.94509, 0.919353, 0.640574, 0.327297, 0.994329, 0.565157, 0.965312, 0.57068, 0.325908, 0.822986, 0.637845, 0.106812, 0.288664, 0.000280415, 0.462714, 0.94019, 0.688605, 0.2141, 0.220406, 0.932058, 0.900517, 0.961201, 0.353682, 0.0134808, 0.947345, 0.351386, 0.895641, 0.650239, 0.302241, 0.499506, 0.030298, 0.757766, 0.125468, 0.315489, 0.0304711, 0.309228, 0.56651, 0.0440132, 0.245361, 0.454452, 0.64075, 0.0924665, 0.126414, 0.196168, 0.997471, 0.760789, 0.424195, 0.594977, 0.207334, 0.708264, 0.0392481, 0.133198, 0.736111, 0.844827, 0.523823, 0.593987, 0.416252, 0.0167493, 0.890991, 0.601037, 0.281004, 0.24087, 0.66464, 0.864845, 0.945065, 0.234776, 0.284041, 0.399313, 0.476435, 0.339889, 0.00814128, 0.0337326, 0.198256, 0.826663, 0.312544, 0.429936, 0.567452, 0.823396, 0.448132, 0.514092, 0.467839, 0.882552, 0.168643, 0.254434, 0.79187, 0.927874, 0.900874, 0.167309, 0.427338, 0.932403, 0.766785, 0.231561, 0.133529, 0.00477333, 0.985558, 0.227331, 0.866009, 0.969119, 0.0362453, 0.842505, 0.906709, 0.967231, 0.494966, 0.0577779, 0.996638, 0.163748, 0.817862, 0.824771, 0.175573, 0.513421, 0.39277, 0.906579, 0.54055, 0.851661, 0.333983, 0.879314, 0.550784, 0.300129, 0.589179, 0.504992, 0.418578, 0.964773, 0.0316574, 0.286033, 0.23432, 0.322326, 0.0590002, 0.666221, 0.233421, 0.216785, 0.263413, 0.883685, 0.673656, 0.134459, 0.0308802, 0.686496, 0.776702, 0.758612, 0.645055, 0.723295, 0.871401, 0.262998, 0.190512, 0.341973, 0.945545, 0.114799, 0.804076, 0.647282, 0.48961, 0.62273, 0.846765, 0.891106, 0.48904, 0.581319, 0.00704831, 0.674611, 0.395743, 0.767623, 0.313934, 0.431812, 0.307768, 0.278318, 0.952107, 0.538664, 0.675371, 0.672763, 0.835563, 0.237913, 0.333882, 0.527918, 0.271057, 0.639353, 0.111164, 0.85953, 0.838041, 0.774141, 0.962816, 0.272035, 0.81665, 0.593591, 0.310545, 0.870241, 0.279337, 0.330605, 0.877284, 0.520274, 0.296165, 0.711201, 0.641985, 0.0489538, 0.0712451, 0.887277, 0.154994, 0.221739, 0.876242, 0.0356585, 0.428483, 0.837244, 0.173529, 0.66835, 0.755205, 0.398871, 0.345334, 0.12228, 0.19509, 0.304864, 0.255771, 0.734434, 0.7724, 0.730117, 0.796747, 0.417643, 0.569641, 0.502623, 0.328753, 0.858583, 0.990092, 0.275007, 0.923961, 0.438773, 0.146837, 0.510396, 0.539952, 0.0501829, 0.64086, 0.875995, 0.935705, 0.412992, 0.562118, 0.442682, 0.973155, 0.473943, 0.676603, 0.863844, 0.87501, 0.545267, 0.338438, 0.967789, 0.0523696, 0.300151, 0.58506, 0.490291, 0.647978, 0.664205, 0.549048, 0.736857, 0.0173515, 0.698325, 0.160941, 0.359318, 0.160925, 0.569788, 0.133554, 0.0942726, 0.207233, 0.726874, 0.174369, 0.24083, 0.656282, 0.503842, 0.0740461, 0.496514, 0.261925, 0.465108, 0.169835, 0.564814, 0.877148, 0.0175314, 0.0211451, 0.629332, 0.248965, 0.668303, 0.197435, 0.391905, 0.352134, 0.331264, 0.90149, 0.257259, 0.0813841, 0.0399163, 0.792019, 0.387755, 0.201367, 0.0151784, 0.513423, 0.585728, 0.0105189, 0.223943, 0.248908, 0.659531, 0.861605, 0.411425, 0.0371997, 0.33768, 0.810674, 0.0911325, 0.805898, 0.310135, 0.538922, 0.959327, 0.0575719, 0.10032, 0.394009, 0.238023, 0.917953, 0.859144, 0.816647, 0.208772, 0.145089, 0.529068, 0.745421, 0.48935, 0.761454, 0.614997, 0.154428, 0.536519, 0.151021, 0.0787678, 0.86659, 0.881324, 0.55984, 0.659421, 0.647602, 0.812297, 0.159516, 0.0479314, 0.234965, 0.240048, 0.951764, 0.218511, 0.11362, 0.657955, 0.280555, 0.905338, 0.822578, 0.180551, 0.319624, 0.250925, 0.191614, 0.496626, 0.0332085, 0.641516, 0.566172, 0.179212, 0.537525, 0.113642, 0.730111, 0.329651, 0.150623, 0.378651, 0.0484126, 0.676304, 0.121143, 0.712375, 0.302806, 0.485029, 0.471278, 0.462295, 0.1531, 0.322728, 0.713649, 0.678131, 0.334441, 0.141824, 0.176085, 0.36499, 0.0708497, 0.45882, 0.20669, 0.0474943, 0.43842, 0.516284, 0.594318, 0.875291, 0.153793, 0.737812, 0.170561, 0.923815, 0.907882, 0.512332, 0.722701, 0.370129, 0.603691, 0.196788, 0.892866, 0.362655, 0.55953, 0.824129, 0.250474, 0.556085, 0.431808, 0.128446, 0.26744, 0.244952, 0.189547, 0.576773, 0.917766, 0.84623, 0.594782, 0.680705, 0.988671, 0.0481905, 0.137674, 0.714443, 0.532153, 0.310852, 0.843688, 0.934825, 0.0629238, 0.993139, 0.142027, 0.0320124, 0.0518313, 0.114066, 0.0712663, 0.646594, 0.844838, 0.171494, 0.662638, 0.815908, 0.809541, 0.67866, 0.0357709, 0.266772, 0.206819, 0.502557, 0.339673, 0.217025, 0.53615, 0.956099, 0.66959, 0.207329, 0.914755, 0.898438, 0.655533, 0.444381, 0.304079, 0.287243, 0.287973, 0.774938, 0.0563981, 0.127154, 0.508492, 0.393986, 0.749432, 0.036603, 0.660682, 0.809726, 0.451685, 0.279454, 0.0326374, 0.300253, 0.0110367, 0.609701, 0.471395, 0.332119, 0.437578, 0.43588, 0.0996411, 0.126195, 0.885371, 0.594247, 0.456797, 0.113052, 0.505541, 0.232079, 0.929253, 0.333387, 0.209212, 0.746411, 0.482957, 0.532837, 0.919979, 0.437855, 0.175411, 0.463638, 0.880244, 0.744835, 0.546536, 0.612644, 0.484625, 0.288318, 0.135661, 0.214567, 0.83766, 0.62984, 0.700477, 0.398816, 0.150959, 0.527268, 0.666571, 0.394123, 0.00844103, 0.52856, 0.935265, 0.764811, 0.20486, 0.128771, 0.901865, 0.297184, 0.418478, 0.0462558, 0.387355, 0.528544, 0.450452, 0.227042, 0.0849481, 0.364143, 0.108157, 0.311307, 0.971291, 0.152913, 0.233171, 0.501068, 0.388727, 0.54036, 0.528268, 0.967951, 0.32463, 0.112689, 0.0762757, 0.569825, 0.851812, 0.883149, 0.695108, 0.96731, 0.685907, 0.841211, 0.560393, 0.448472, 0.48896, 0.323559, 0.618597, 0.513798, 0.694817, 0.287827, 0.586571, 0.380688, 0.306745, 0.0100176, 0.263236, 0.836942, 0.448779, 0.655984, 0.243875, 0.485407, 0.56637, 0.693995, 0.0750122, 0.679938, 0.38921, 0.105836, 0.474113, 0.908511, 0.114192, 0.529503, 0.739903, 0.376307, 0.766913, 0.891882, 0.952218, 0.596948, 0.124917, 0.119852, 0.681285, 0.11056, 0.0421997, 0.0740387, 0.846792, 0.171963, 0.226138, 0.447823, 0.423381, 0.119162, 0.0963041, 0.897442, 0.571307, 0.84302, 0.502465, 0.110057, 0.923009, 0.80842, 0.102467, 0.807205, 0.906988, 0.00687938, 0.0845105, 0.193596, 0.772054, 0.895773, 0.182554, 0.0749097, 0.36914, 0.947626, 0.935897, 0.709873, 0.460564, 0.320383, 0.285843, 0.411644, 0.587284, 0.571001, 0.201918, 0.696921, 0.663395, 0.117765, 0.433447, 0.119632, 0.45053, 0.11587, 0.648144, 0.653318, 0.0132917, 0.524782, 0.283608, 0.122124, 0.5259, 0.391915, 0.287969, 0.266864, 0.493395, 0.986789, 0.397913, 0.0892273, 0.122315, 0.197488, 0.239375, 0.142441, 0.713533, 0.339869, 0.809532, 0.715427, 0.698948, 0.623316, 0.0641153, 0.769319, 0.0695443, 0.0855769, 0.658299, 0.0899478, 0.0601225, 0.722062, 0.421102, 0.333026, 0.870482, 0.931882, 0.229236, 0.364981, 0.887809, 0.978826, 0.014522, 0.340128, 0.981108, 0.842573, 0.201703, 0.745958, 0.227492, 0.761999, 0.295944, 0.729199, 0.187346, 0.943026, 0.0105494, 0.39783, 0.583415, 0.243335, 0.541508, 0.578077, 0.295405, 0.0814667, 0.529202, 0.988785, 0.394976, 0.68877, 0.229253, 0.0849348, 0.0339687, 0.370966, 0.707144, 0.980882, 0.365654, 0.697787, 0.591989, 0.0293919, 0.638661, 0.0675529, 0.384984, 0.639085, 0.70715, 0.463815, 0.751835, 0.237533, 0.935665, 0.698684, 0.530724, 0.560631, 0.899975, 0.852685, 0.787844, 0.779743, 0.744731, 0.138768, 0.526725, 0.493108, 0.926152, 0.368556, 0.929163, 0.641783, 0.511469, 0.393187, 0.0824426, 0.431576, 0.622319, 0.548548, 0.035575, 0.825666, 0.269376, 0.0913602, 0.372635, 0.38487, 0.0906382, 0.315676, 0.0645145, 0.447505, 0.905866, 0.296598, 0.855463, 0.609323, 0.59174, 0.795297, 0.624946, 0.329668, 0.332246, 0.732409, 0.843695, 0.236205, 0.692938, 0.965589, 0.56019, 0.538383, 0.761268, 0.48515, 0.421152, 0.547355, 0.609943, 0.823565, 0.732828, 0.64059, 0.857855, 0.513389, 0.254, 0.755954, 0.416683, 0.317996, 0.293253, 0.664124, 0.941729, 0.150869, 0.512851, 0.898977, 0.624365, 0.817087, 0.336162, 0.477889, 0.966191, 0.313993, 0.133734, 0.661253, 0.849473, 0.715564, 0.510133, 0.75148, 0.604027, 0.632663, 0.135236, 0.599463, 0.0327076, 0.857848, 0.145505, 0.992245, 0.20774, 0.0786172, 0.312129, 0.0971802, 0.904136, 0.359232, 0.832288, 0.362956, 0.630257, 0.191595, 0.9119, 0.616889, 0.819632, 0.437823, 0.878439, 0.311629, 0.405242, 0.583834, 0.607413, 0.695011, 0.253874, 0.288259, 0.563804, 0.648179, 0.359066, 0.50901, 0.687083, 0.205109, 0.193449, 0.759391, 0.328667, 0.656092, 0.206288, 0.0623738, 0.450473, 0.47376, 0.439961, 0.381667, 0.096583, 0.84527, 0.686133, 0.163243, 0.978766, 0.59959, 0.647816, 0.259232, 0.896248, 0.0680054, 0.362097, 0.902687, 0.097296, 0.488055, 0.58164, 0.403066, 0.579204, 0.855055, 0.750976, 0.376216, 0.633781, 0.256236, 0.388568, 0.0867576, 0.0117217, 0.194627, 0.200052, 0.694049, 0.824496, 0.360713, 0.723254, 0.595559, 0.923657, 0.59036, 0.6563, 0.177194, 0.792463, 0.171986, 0.479814, 0.0912182, 0.62897, 0.913829, 0.798994, 0.716449, 0.548011, 0.684962, 0.797857, 0.762225, 0.237444, 0.591305, 0.601787, 0.848342, 0.496632, 0.289776, 0.899311, 0.429076, 0.396456, 0.770349, 0.370505, 0.097602, 0.182601, 0.911995, 0.915318, 0.590273, 0.964508, 0.478509, 0.767288, 0.466258, 0.0502008, 0.0988117, 0.626614, 0.220539, 0.633559, 0.0282357, 0.555049, 0.594994, 0.165034, 0.595876, 0.610316, 0.822023, 0.424342, 0.171725, 0.356189, 0.420226, 0.3426, 0.9266, 0.174428, 0.0605539, 0.344594, 0.520125, 0.935027, 0.207641, 0.420421, 0.0941734, 0.445178, 0.778392, 0.729336, 0.0113313, 0.747309, 0.0232427, 0.400348, 0.831972, 0.31958, 0.821585, 0.804603, 0.160086, 0.0857735, 0.811761, 0.0733643, 0.705643, 0.24753, 0.754742, 0.694147, 0.950033, 0.696942, 0.90747, 0.797599, 0.591987, 0.0650946, 0.847517, 0.276937, 0.823598, 0.50711, 0.42896, 0.0995254, 0.945282, 0.715105, 0.629399, 0.263378, 0.0254412, 0.647343, 0.849229, 0.474649, 0.369794, 0.559946, 0.577379, 0.817203, 0.589239, 0.595788, 0.387239, 0.0224561, 0.43111, 0.922646, 0.148943, 0.104584, 0.902042, 0.502038, 0.987925, 0.97373, 0.951235, 0.449732, 0.581847, 0.747052, 0.124775, 0.488138, 0.355401, 0.773033, 0.494585, 0.496174, 0.69698, 0.379714, 0.694903, 0.077038, 0.685187, 0.515888, 0.739523, 0.85641, 0.526524, 0.796094, 0.204457, 0.105733, 0.745853, 0.844415, 0.588465, 0.478419, 0.1446, 0.202476, 0.667741, 0.494301, 0.0335055, 0.120191, 0.0084502, 0.989238, 0.306892, 0.509825, 0.564446, 0.490707, 0.224671, 0.930068, 0.121323, 0.377139, 0.999044, 0.646334, 0.171395, 0.418921, 0.846088, 0.462129, 0.965202, 0.407284, 0.14838, 0.239469, 0.799115, 0.787609, 0.606225, 0.0471999, 0.573742, 0.157793, 0.318201, 0.213735, 0.0707531, 0.554441, 0.947575, 0.0495039, 0.446243, 0.0464633, 0.612654, 0.415336, 0.0389473, 0.741115, 0.700916, 0.592345, 0.427995, 0.584374, 0.473134, 0.722755, 0.734312, 0.957571, 0.123449, 0.826485, 0.0270396, 0.745278, 0.482321, 0.214278, 0.25277, 0.375662, 0.41588, 0.961684, 0.819722, 0.135851, 0.766622, 0.0878297, 0.928593, 0.122511, 0.580273, 0.434781, 0.474652, 0.731661, 0.901691, 0.147487, 0.658038, 0.48893, 0.12693, 0.533371, 0.0841936, 0.363146, 0.828549, 0.539438, 0.786856, 0.351054, 0.731142, 0.233761, 0.931138, 0.806257, 0.807374, 0.172688, 0.828602, 0.953915, 0.473652, 0.382548, 0.357981, 0.5963, 0.265732, 0.914037, 0.188111, 0.74128, 0.743633, 0.247761, 0.158465, 0.52252, 0.800593, 0.512861, 0.333242, 0.986477, 0.26932, 0.561066, 0.904287, 0.320868, 0.838198, 0.88361, 0.133302, 0.930164, 0.750391, 0.187244, 0.491315, 0.0982396, 0.284781, 0.388216, 0.354819, 0.940943, 0.431945, 0.865286, 0.0681679, 0.154232, 0.622275, 0.768949, 0.765559, 0.624581, 0.608164, 0.415398, 0.0569954, 0.617883, 0.22415, 0.889372, 0.535689, 0.576916, 0.296475, 0.829001, 0.824504, 0.0861363, 0.390256, 0.26195, 0.386549, 0.464313, 0.584617, 0.427067, 0.830522, 0.141594, 0.268901, 0.321917, 0.995861, 0.234313, 0.752381, 0.199834, 0.643007, 0.242113, 0.342142, 0.645541, 0.513711, 0.947553, 0.202209, 0.69997, 0.732298, 0.546207, 0.909885, 0.441927, 0.621059, 0.133464, 0.683791, 0.602374, 0.917825, 0.744225, 0.478784, 0.19923, 0.277641, 0.871182, 0.902672, 0.931098, 0.534009, 0.193389, 0.798878, 0.992723, 0.877838, 0.895002, 0.516418, 0.99492, 0.451821, 0.118106, 0.285755, 0.93387, 0.712791, 0.713099, 0.157295, 0.447819, 0.789059, 0.972432, 0.740688, 0.28434, 0.589186, 0.751243, 0.449302, 0.776258, 0.921298, 0.741599, 0.510316, 0.642133, 0.802657, 0.184818, 0.956007, 0.707658, 0.653938, 0.0425206, 0.869644, 0.241867, 0.59502, 0.399322, 0.650882, 0.84735, 0.107822, 0.481473, 0.994608, 0.831817, 0.624027, 0.75346, 0.203081, 0.0454528, 0.615748, 0.420401, 0.536439, 0.246667, 0.562155, 0.0642064, 0.762507, 0.207881, 0.918567, 0.935136, 0.95591, 0.112227, 0.643769, 0.072078, 0.467719, 0.216183, 0.0826399, 0.887609, 0.373829, 0.898391, 0.365567, 0.0825346, 0.647592, 0.882828, 0.85406, 0.412205, 0.0338052, 0.205062, 0.736613, 0.638607, 0.611988, 0.490074, 0.144148, 0.391377, 0.164745, 0.664129, 0.61886, 0.404987, 0.590734, 0.750537, 0.943542, 0.877202, 0.0189034, 0.431634, 0.553794, 0.150283, 0.921783, 0.142319, 0.711886, 0.702466, 0.446256, 0.947935, 0.897201, 0.182397, 0.784559, 0.27897, 0.976729, 0.718449, 0.733462, 0.845627, 0.370785, 0.169054, 0.367375, 0.0667599, 0.441103, 0.522313, 0.943191, 0.228149, 0.703903, 0.141448, 0.0483062, 0.402232, 0.210588, 0.735638, 0.529482, 0.692995, 0.305162, 0.946559, 0.655759, 0.512732, 0.958042, 0.0325353, 0.39025, 0.582741, 0.737131, 0.383514, 0.740761, 0.0172872, 0.0496383, 0.379124, 0.0443368, 0.700332, 0.504416, 0.975272, 0.257555, 0.95874, 0.630045, 0.782987, 0.339182, 0.0107481, 0.205458, 0.755836, 0.551076, 0.0750751, 0.823827, 0.392427, 0.462071, 0.293287, 0.183514, 0.583074, 0.492738, 0.407717, 0.559001, 0.454406, 0.0872644, 0.198206, 0.973656, 0.910874, 0.628127, 0.147729, 0.296648, 0.410129, 0.458397, 0.304627, 0.314145, 0.0842917, 0.854725, 0.98295, 0.995142, 0.12097, 0.0855334, 0.58699, 0.495223, 0.981343, 0.667315, 0.607667, 0.880606, 0.203245, 0.807278, 0.15376, 0.789492, 0.217277, 0.0948716, 0.150338, 0.198203, 0.396679, 0.524842, 0.934821, 0.391884, 0.535226, 0.139255, 0.890324, 0.819746, 0.0691174, 0.568563, 0.961456, 0.729993, 0.59617, 0.458534, 0.890504, 0.490753, 0.502552, 0.2698, 0.353599, 0.170654, 0.978598, 0.79202, 0.847386, 0.112152, 0.256585, 0.280995, 0.347462, 0.155091, 0.457856, 0.915769, 0.427935, 0.279657, 0.199543, 0.030673, 0.490789, 0.412269, 0.980538, 0.455362, 0.236022, 0.672848, 0.0843078, 0.473051, 0.584604, 0.333791, 0.872939, 0.884161, 0.82561, 0.671148, 0.804559, 0.0571492, 0.506491, 0.587321, 0.320331, 0.105971, 0.0656528, 0.192821, 0.273989, 0.818754, 0.606188, 0.69404, 0.186976, 0.287752, 0.646347, 0.498658, 0.293975, 0.509989, 0.0678056, 0.317477, 0.14645, 0.694471, 0.75618, 0.740541, 0.108958, 0.705042, 0.972045, 0.987658, 0.00562353, 0.73289, 0.478175, 0.444326, 0.481034, 0.486778, 0.833366, 0.186433, 0.207425, 0.66315, 0.254926, 0.344074, 0.828795, 0.0652598, 0.204524, 0.608561, 0.0659682, 0.365835, 0.755454, 0.18244, 0.0311781, 0.336347, 0.441798, 0.907119, 0.919638, 0.831951, 0.55613, 0.0567533, 0.566532, 0.950928, 0.951063, 0.637377, 0.233777, 0.719936, 0.806261, 0.974567, 0.94225, 0.597856, 0.697234, 0.691378, 0.49689, 0.907236, 0.607242, 0.18597, 0.183143, 0.0962611, 0.184931, 0.000833121, 0.487038, 0.714777, 0.0601879, 0.106848, 0.0454389, 0.275154, 0.0362009, 0.652776, 0.647269, 0.0451591, 0.222329, 0.873698, 0.771548, 0.49427, 0.494441, 0.448411, 0.955785, 0.709217, 0.116396, 0.391161, 0.383969, 0.267803, 0.31987, 0.242902, 0.642496, 0.60187, 0.360532, 0.938925, 0.510473, 0.260975, 0.902704, 0.748941, 0.277152, 0.630993, 0.933111, 0.491047, 0.48199, 0.482289, 0.453252, 0.3532, 0.715598, 0.0922259, 0.724063, 0.194187, 0.0173517, 0.278226, 0.0569869, 0.157893, 0.722153, 0.0673078, 0.139394, 0.00138647, 0.314579, 0.527069, 0.400797, 0.338557, 0.0446162, 0.262916, 0.85003, 0.280461, 0.150433, 0.805651, 0.57948, 0.0258817, 0.0567081, 0.733717, 0.116632, 0.938169, 0.0276677, 0.243835, 0.00157272, 0.693641, 0.540975, 0.564382, 0.285459, 0.346625, 0.316888, 0.757294, 0.0934159, 0.481623, 0.543655, 0.964689, 0.635952, 0.919444, 0.79599, 0.76748, 0.163268, 0.448353, 0.0920454, 0.32188, 0.554036, 0.844806, 0.522827, 0.458522, 0.0213779, 0.0584381, 0.113687, 0.223912, 0.145314, 0.0649469, 0.439308, 0.38745, 0.629751, 0.197155, 0.278737, 0.0481005, 0.670399, 0.686822, 0.925355, 0.350732, 0.61908, 0.688475, 0.278723, 0.538385, 0.0759035, 0.189597, 0.247853, 0.0854431, 0.70223, 0.0481766, 0.112708, 0.885024, 0.145418, 0.342815, 0.570951, 0.615716, 0.838364, 0.229125, 0.914869, 0.0807566, 0.841244, 0.801064, 0.100729, 0.825629, 0.435581, 0.333583, 0.87399, 0.873529, 0.186217, 0.296313, 0.730403, 0.248867, 0.0047435, 0.503056, 0.379944, 0.735921, 0.806074, 0.696736, 0.3877, 0.635475, 0.821489, 0.997038, 0.773904, 0.697325, 0.0823173, 0.989297, 0.534374, 0.395458, 0.779097, 0.251209, 0.536462, 0.378766, 0.838378, 0.819028, 0.0134496, 0.49593, 0.907435, 0.305601, 0.673914, 0.910395, 0.37772, 0.731496, 0.534258, 0.165475, 0.942286, 0.350023, 0.864445, 0.532633, 0.635889, 0.808484, 0.781158, 0.957145, 0.584252, 0.122449, 0.203071, 0.272199, 0.846977, 0.565184, 0.915573, 0.471642, 0.0657705, 0.107319, 0.745156, 0.182566, 0.676648, 0.0963369, 0.0250064, 0.900877, 0.841255, 0.649461, 0.64128, 0.924952, 0.977799, 0.511202, 0.927327, 0.722476, 0.612947, 0.0327017, 0.688698, 0.570857, 0.0987894, 0.725083, 0.391378, 0.538569, 0.28648, 0.861244, 0.0274032, 0.815053, 0.824076, 0.942452, 0.527812, 0.578409, 0.129797, 0.510809, 0.0820612, 0.931813, 0.491563, 0.791236, 0.790234, 0.901141, 0.789327, 0.907997, 0.142342, 0.528214, 0.716877, 0.958462, 0.954974, 0.242113, 0.919851, 0.251386, 0.56156, 0.682846, 0.308639, 0.115634, 0.967474, 0.429962, 0.0486278, 0.55142, 0.351646, 0.548327, 0.530753, 0.104318, 0.0895818, 0.307136, 0.536774, 0.106918, 0.421321, 0.666497, 0.711117, 0.289704, 0.787347, 0.948744, 0.549595, 0.936131, 0.136233, 0.610194, 0.186622, 0.27585, 0.571027, 0.586027, 0.837927, 0.193882, 0.35946, 0.175026, 0.0824834, 0.0126935, 0.339797, 0.818046, 0.439991, 0.269957, 0.137731, 0.33262, 0.491249, 0.767624, 0.256845, 0.398864, 0.258536, 0.271324, 0.982432, 0.439209, 0.740897, 0.353588, 0.799446, 0.810211, 0.415814, 0.794809, 0.320497, 0.252934, 0.795301, 0.588321, 0.753028, 0.165118, 0.324748, 0.510637, 0.799677, 0.243883, 0.767565, 0.0532222, 0.247092, 0.069637, 0.928513, 0.753128, 0.305723, 0.702824, 0.382697, 0.441858, 0.72836, 0.186217, 0.858214, 0.617516, 0.589245, 0.482866, 0.120475, 0.795432, 0.564749, 0.483426, 0.980871, 0.786813, 0.69169, 0.403745, 0.745458, 0.918907, 0.403531, 0.451615, 0.0550625, 0.783483, 0.424662, 0.428934, 0.671318, 0.568676, 0.0994755, 0.938827, 0.542863, 0.738337, 0.873928, 0.419431, 0.337526, 0.629237, 0.679101, 0.622262, 0.320956, 0.0626878, 0.999204, 0.951498, 0.0958139, 0.50477, 0.0680271, 0.676681, 0.407226, 0.447046, 0.503213, 0.538867, 0.741814, 0.0891491, 0.531931, 0.813284, 0.659832, 0.361232, 0.935574, 0.0579028, 0.797737, 0.316299, 0.840737, 0.198096, 0.176278, 0.523454, 0.749724, 0.865867, 0.163428, 0.611741, 0.785425, 0.776603, 0.719072, 0.164897, 0.66426, 0.297643, 0.493834, 0.161139, 0.144617, 0.856268, 0.829604, 0.846565, 0.145734, 0.698211, 0.970246, 0.766181, 0.468508, 0.888683, 0.971707, 0.572371, 0.465724, 0.0273943, 0.0190296, 0.125226, 0.5102, 0.31494, 0.232586, 0.709194, 0.990483, 0.579528, 0.430838, 0.649575, 0.961249, 0.814985, 0.349948, 0.970205, 0.873427, 0.396802, 0.0647769, 0.455932, 0.916176, 0.343859, 0.073931, 0.150787, 0.32747, 0.881908, 0.963701, 0.280559, 0.0145761, 0.211231, 0.13017, 0.712058, 0.686147, 0.653135, 0.273876, 0.297828, 0.706835, 0.64975, 0.829058, 0.207557, 0.24807, 0.0674541, 0.933501, 0.95747, 0.501197, 0.764878, 0.809991, 0.109319, 0.97374, 0.294305, 0.65838, 0.997028, 0.385114, 0.187525, 0.164058, 0.787641, 0.334825, 0.2028, 0.868044, 0.0966004, 0.103965, 0.162358, 0.846588, 0.658266, 0.671984, 0.007802, 0.186191, 0.14611, 0.718566, 0.581245, 0.234107, 0.0916243, 0.149704, 0.100661, 0.137573, 0.260829, 0.733083, 0.582407, 0.0161812, 0.905822, 0.0470349, 0.643669, 0.185742, 0.911995, 0.767803, 0.072931, 0.491999, 0.032019, 0.201497, 0.796155, 0.447767, 0.202882, 0.659047, 0.860667, 0.231065, 0.348377, 0.626993, 0.261161, 0.612507, 0.93157, 0.617792, 0.0418991, 0.250086, 0.833258, 0.48148, 0.39641, 0.603641, 0.692034, 0.160123, 0.515866, 0.417584, 0.89555, 0.779499, 0.3432, 0.245985, 0.102742, 0.736205, 0.99383, 0.876987, 0.250527, 0.37881, 0.299656, 0.159117, 0.173121, 0.810005, 0.550245, 0.183481, 0.836829, 0.369465, 0.0784068, 0.21991, 0.743373, 0.382761, 0.0534794, 0.0327177, 0.851849, 0.54143, 0.884582, 0.811702, 0.262948, 0.798388, 0.262586, 0.12314, 0.431092, 0.599366, 0.15252, 0.718778, 0.386064, 0.974922, 0.196319, 0.945306, 0.676892, 0.32944, 0.0230613, 0.791914, 0.845819, 0.344745, 0.757456, 0.332878, 0.0475688, 0.409226, 0.363361, 0.103045, 0.351446, 0.256201, 0.859651, 0.0347259, 0.278092, 0.598939, 0.056031, 0.119117, 0.267192, 0.747133, 0.651704, 0.822329, 0.883514, 0.747336, 0.394452, 0.391557, 0.883501, 0.628572, 0.349799, 0.0699412, 0.242391, 0.347295, 0.309552, 0.936614, 0.96757, 0.673689, 0.457238, 0.102201, 0.0283051, 0.756324, 0.89344, 0.393054, 0.843095, 0.0201433, 0.0508908, 0.928129, 0.678484, 0.966097, 0.970277, 0.0487653, 0.99047, 0.427686, 0.101749, 0.389994, 0.801796, 0.9433, 0.27742, 0.122309, 0.812336, 0.304178, 0.404869, 0.145225, 0.763077, 0.349284, 0.973616, 0.941684, 0.966734, 0.482917, 0.792331, 0.40256, 0.228404, 0.241401, 0.530716, 0.775069, 0.611068, 0.877061, 0.00822817, 0.944816, 0.671341, 0.597713, 0.1963, 0.191041, 0.863042, 0.97787, 0.266656, 0.944135, 0.153973, 0.84478, 0.934518, 0.0101324, 0.896805, 0.925821, 0.808599, 0.506862, 0.834502, 0.206873, 0.616657, 0.190184, 0.110574, 0.309445, 0.678084, 0.68764, 0.0803695, 0.102537, 0.330622, 0.409365, 0.830257, 0.920365, 0.109779, 0.928976, 0.242173, 0.0872111, 0.976005, 0.427813, 0.208303, 0.0669568, 0.339877, 0.526027, 0.623694, 0.0909495, 0.662156, 0.83307, 0.748714, 0.496375, 0.928164, 0.0888929, 0.966127, 0.701186, 0.290311, 0.955522, 0.817517, 0.574922, 0.118915, 0.123963, 0.931446, 0.173343, 0.0673987, 0.0796155, 0.509616, 0.27848, 0.647366, 0.845342, 0.894016, 0.135002, 0.14444, 0.801373, 0.631623, 0.304413, 0.172931, 0.851753, 0.397908, 0.858392, 0.603678, 0.983564, 0.206338, 0.985833, 0.0389553, 0.968582, 0.0316138, 0.52255, 0.940026, 0.480945, 0.204097, 0.800486, 0.387806, 0.811569, 0.327236, 0.364931, 0.0161828, 0.755807, 0.808606, 0.153863, 0.677506, 0.639948, 0.728528, 0.545357, 0.874714, 0.633961, 0.672314, 0.854193, 0.333972, 0.275265, 0.505065, 0.0390262, 0.546134, 0.748211, 0.483414, 0.238871, 0.086358, 0.211785, 0.333985, 0.616154, 0.407236, 0.470474, 0.369164, 0.905938, 0.751004, 0.576953, 0.359972, 0.525121, 0.180454, 0.125343, 0.682514, 0.540055, 0.262287, 0.613434, 0.20555, 0.268176, 0.944076, 0.683887, 0.304354, 0.525964, 0.0563507, 0.186128, 0.901076, 0.376996, 0.418124, 0.846206, 0.701657, 0.6163, 0.409259, 0.603853, 0.989163, 0.990951, 0.140016, 0.577198, 0.0717203, 0.190814, 0.570447, 0.0848522, 0.394837, 0.0410504, 0.90176, 0.386076, 0.440847, 0.190578, 0.412392, 0.305423, 0.387254, 0.877219, 0.858702, 0.16202, 0.249316, 0.0931462, 0.851088, 0.986627, 0.140982, 0.280751, 0.758626, 0.628956, 0.711465, 0.568046, 0.00580886, 0.863143, 0.888469, 0.819644, 0.504548, 0.564843, 0.579675, 0.942337, 0.765505, 0.408913, 0.692154, 0.206791, 0.397213, 0.131126, 0.395996, 0.438039, 0.883587, 0.370056, 0.711419, 0.905807, 0.664689, 0.234771, 0.604785, 0.0235357, 0.321959, 0.146825, 0.588254, 0.904173, 0.884088, 0.173588, 0.441115, 0.256385, 0.219919, 0.337976, 0.348612, 0.723518, 0.13678, 0.872432, 0.758708, 0.0651159, 0.397546, 0.0789455, 0.494306, 0.997313, 0.4093, 0.128159, 0.450131, 0.424133, 0.936298, 0.556522, 0.111565, 0.886528, 0.538497, 0.742189, 0.988109, 0.770794, 0.36197, 0.929169, 0.654865, 0.735449, 0.0882158, 0.121988, 0.812405, 0.0323147, 0.356516, 0.743183, 0.49272, 0.502774, 0.530402, 0.80063, 0.927351, 0.881046, 0.97202, 0.172543, 0.158431, 0.354625, 0.732495, 0.665111, 0.118994, 0.654775, 0.908427, 0.49957, 0.111015, 0.907592, 0.549344, 0.0501071, 0.310785, 0.295828, 0.780552, 0.87482, 0.143214, 0.285174, 0.742135, 0.235012, 0.922698, 0.700166, 0.864935, 0.449539, 0.00129627, 0.461474, 0.59276, 0.212348, 0.510781, 0.659891, 0.462252, 0.0869465, 0.369206, 0.512066, 0.868178, 0.875224, 0.23292, 0.56033, 0.471704, 0.697728, 0.587438, 0.691271, 0.357137, 0.198208, 0.893252, 0.88539, 0.885235, 0.657946, 0.968727, 0.157589, 0.547534, 0.690851, 0.76294, 0.598798, 0.0940266, 0.866911, 0.462136, 0.62536, 0.473049, 0.78864, 0.949104, 0.205362, 0.266732, 0.20068, 0.22428, 0.579975, 0.85568, 0.385347, 0.366773, 0.699094, 0.12392, 0.710276, 0.814907, 0.672587, 0.234216, 0.318904, 0.731612, 0.94729, 0.5885, 0.832636, 0.975877, 0.668734, 0.486714, 0.905976, 0.317388, 0.853022, 0.65888, 0.896131, 0.0888655, 0.677008, 0.354645, 0.277106, 0.680412, 0.244873, 0.604277, 0.730003, 0.755222, 0.807247, 0.306295, 0.947223, 0.571953, 0.6879, 0.67177, 0.425921, 0.817848, 0.484288, 0.917376, 0.920418, 0.412034, 0.377941, 0.765428, 0.904247, 0.3247, 0.638179, 0.00771101, 0.744418, 0.976353, 0.313007, 0.941025, 0.567758, 0.814006, 0.631265, 0.861989, 0.00451235, 0.573643, 0.302395, 0.014097, 0.402934, 0.184395, 0.106216, 0.40854, 0.668758, 0.0545764, 0.0553679, 0.139938, 0.129192, 0.387968, 0.457384, 0.565187, 0.517776, 0.0817044, 0.408435, 0.278675, 0.909138, 0.015214, 0.329972, 0.575212, 0.375611, 0.0032542, 0.968081, 0.679393, 0.771907, 0.998229, 0.0978324, 0.152322, 0.657974, 0.0424364, 0.898498, 0.00781248, 0.966604, 0.555214, 0.538572, 0.138067, 0.245444, 0.70275, 0.375037, 0.359377, 0.521197, 0.839466, 0.309528, 0.820303, 0.408387, 0.491143, 0.388267, 0.294487, 0.0337572, 0.460792, 0.142401, 0.635061, 0.338179, 0.79421, 0.0821413, 0.713417, 0.119723, 0.40366, 0.214843, 0.277811, 0.500774, 0.760807, 0.505912, 0.860669, 0.0112127, 0.98156, 0.533639, 0.158576, 0.558546, 0.412941, 0.800568, 0.299821, 0.698518, 0.618905, 0.474764, 0.862655, 0.797544, 0.0687973, 0.0207867, 0.213764, 0.171063, 0.863256, 0.881686, 0.207783, 0.879188, 0.037853, 0.455884, 0.453878, 0.290796, 0.0655869, 0.405618, 0.00459906, 0.305834, 0.672784, 0.411524, 0.625711, 0.823909, 0.950785, 0.164711, 0.0981265, 0.507428, 0.198156, 0.855555, 0.151924, 0.0468619, 0.184465, 0.0490477, 0.110812, 0.179069, 0.534728, 0.537124, 0.0209009, 0.0639172, 0.688472, 0.299513, 0.786082, 0.0106266, 0.652687, 0.469544, 0.0120093, 0.805626, 0.247813, 0.808545, 0.0770186, 0.289019, 0.784351, 0.666679, 0.39459, 0.24278, 0.699469, 0.576991, 0.248606, 0.40172, 0.524148, 0.729291, 0.848295, 0.865359, 0.528305, 0.980017, 0.780771, 0.632278, 0.00220072, 0.898266, 0.124942, 0.418234, 0.794272, 0.283826, 0.55652, 0.701737, 0.863986, 0.430347, 0.265604, 0.54903, 0.439853, 0.500332, 0.523264, 0.424742, 0.360083, 0.307529, 0.782259, 0.181462, 0.1145, 0.189727, 0.925073, 0.898144, 0.426075, 0.4916, 0.855005, 0.137732, 0.596704, 0.223484, 0.593847, 0.309636, 0.362375, 0.578144, 0.994404, 0.597797, 0.931226, 0.670181, 0.0432512, 0.153872, 0.35103, 0.315945, 0.538562, 0.932876, 0.377005, 0.544964, 0.338992, 0.795902, 0.197154, 0.672297, 0.490805, 0.590352, 0.250013, 0.845371, 0.447785, 0.335133, 0.646024, 0.2021, 0.605049, 0.691671, 0.206017, 0.731294, 0.640739, 0.765759, 0.370657, 0.0312338, 0.197648, 0.824302, 0.619517, 0.613733, 0.827673, 0.658405, 0.0745359, 0.971399, 0.0159525, 0.955448, 0.0116033, 0.943235, 0.272295, 0.531009, 0.466417, 0.962721, 0.531325, 0.970817, 0.342374, 0.372079, 0.140249, 0.679542, 0.973079, 0.601624, 0.365839, 0.620408, 0.345363, 0.595607, 0.288867, 0.559873, 0.414348, 0.682725, 0.0607263, 0.0944081, 0.618428, 0.752767, 0.773098, 0.928129, 0.886303, 0.268078, 0.22035, 0.159019, 0.756596, 0.618913, 0.223985, 0.745674, 0.0350222, 0.737339, 0.233143, 0.943329, 0.58761, 0.0675672, 0.184227, 0.441476, 0.215799, 0.983103, 0.17817, 0.499477, 0.0125585, 0.108036, 0.0168981, 0.792401, 0.08275, 0.113531, 0.0458031, 0.835127, 0.769068, 0.260122, 0.697836, 0.259229, 0.436322, 0.886532, 0.048632, 0.0479159, 0.0720011, 0.526291, 0.676736, 0.753908, 0.101618, 0.335222, 0.316349, 0.388356, 0.667449, 0.190856, 0.52523, 0.932477, 0.903691, 0.387469, 0.902516, 0.898816, 0.405547, 0.806302, 0.460635, 0.979476, 0.336249, 0.383538, 0.512612, 0.860613, 0.835158, 0.406269, 0.473369, 0.0954731, 0.454457, 0.385452, 0.87065, 0.502873, 0.823331, 0.402513, 0.0186827, 0.892463, 0.760045, 0.69478, 0.041612, 0.660795, 0.342862, 0.595638, 0.130851, 0.099257, 0.120403, 0.980318, 0.366255, 0.328121, 0.150642, 0.809234, 0.612507, 0.552784, 0.0610674, 0.302397, 0.19588, 0.474791, 0.834239, 0.625413, 0.0552931, 0.251426, 0.469178, 0.04744, 0.975585, 0.826749, 0.958573, 0.406457, 0.211197, 0.759212, 0.175226, 0.851629, 0.611174, 0.638489, 0.925408, 0.416981, 0.371532, 0.77157, 0.921326, 0.603638, 0.784612, 0.394229, 0.241148, 0.271205, 0.481803, 0.799405, 0.036087, 0.175162, 0.696693, 0.469841, 0.486332, 0.966725, 0.768314, 0.751339, 0.390397, 0.228439, 0.553128, 0.942552, 0.200973, 0.526934, 0.886443, 0.923001, 0.248618, 0.425527, 0.709908, 0.90811, 0.892864, 0.0945733, 0.341522, 0.837068, 0.400341, 0.823601, 0.341407, 0.993676, 0.773236, 0.300178, 0.396217, 0.944661, 0.55428, 0.282266, 0.028602, 0.684893, 0.781507, 0.124207, 0.724927, 0.028357, 0.251368, 0.536366, 0.573445, 0.308141, 0.440888, 0.650418, 0.651798, 0.0451078, 0.699159, 0.962452, 0.847135, 0.862229, 0.653956, 0.895986, 0.843736, 0.393805, 0.223378, 0.372848, 0.252978, 0.895763, 0.508724, 0.721902, 0.147755, 0.693241, 0.44614, 0.764728, 0.548472, 0.532634, 0.948304, 0.299262, 0.6434, 0.0420454, 0.996188, 0.583344, 0.485099, 0.948438, 0.243411, 0.819856, 0.369474, 0.655671, 0.0592489, 0.0543621, 0.993549, 0.311992, 0.671202, 0.212992, 0.131394, 0.533834, 0.335838, 0.679306, 0.523942, 0.074431, 0.886281, 0.770568, 0.654036, 0.933765, 0.937995, 0.372563, 0.841528, 0.188379, 0.228937, 0.828668, 0.39616, 0.413786, 0.697796, 0.618801, 0.0265031, 0.551609, 0.515108, 0.245815, 0.151793, 0.655353, 0.517303, 0.847651, 0.471491, 0.6685, 0.514759, 0.242699, 0.858416, 0.637236, 0.11225, 0.181223, 0.959921, 0.469161, 0.18669, 0.168928, 0.000697603, 0.376498, 0.65994, 0.409595, 0.564361, 0.602178, 0.70382, 0.877324, 0.551369, 0.178609, 0.978868, 0.502309, 0.0340832, 0.247358, 0.652003, 0.392224, 0.478071, 0.753149, 0.333342, 0.582696, 0.283487, 0.495504, 0.595418, 0.0904018, 0.585908, 0.0328579, 0.99048, 0.613487, 0.609382, 0.605551, 0.0569953, 0.189185, 0.429995, 0.955398, 0.505957, 0.73145, 0.61423, 0.588236, 0.58305, 0.0533574, 0.101198, 0.581949, 0.66963, 0.448777, 0.768895, 0.643032, 0.304342, 0.189424, 0.253976, 0.319645, 0.741379, 0.259629, 0.455083, 0.149009, 0.296796, 0.651832, 0.115124, 0.348416, 0.568379, 0.44429, 0.630183, 0.250472, 0.524372, 0.956228, 0.235359, 0.37149, 0.563646, 0.985041, 0.339083, 0.00591837, 0.336901, 0.13234, 0.629991, 0.741838, 0.179552, 0.857007, 0.825316, 0.142745, 0.0195872, 0.663252, 0.0308009, 0.95679, 0.486118, 0.162211, 0.941189, 0.215582, 0.773495, 0.452049, 0.0825298, 0.312025, 0.312332, 0.102718, 0.00433459, 0.0168344, 0.842451, 0.569562, 0.888201, 0.327072, 0.401624, 0.832171, 0.672292, 0.610303, 0.795345, 0.433221, 0.544712, 0.237545, 0.767694, 0.343271, 0.992533, 0.741302, 0.183655, 0.0421285, 0.928359, 0.25151, 0.834844, 0.461125, 0.866202, 0.914736, 0.675099, 0.631222, 0.665714, 0.573919, 0.915292, 0.798542, 0.177166, 0.514498, 0.882526, 0.101341, 0.634753, 0.843519, 0.570457, 0.226903, 0.716373, 0.040913, 0.630804, 0.494964, 0.601292, 0.846046, 0.0174172, 0.46755, 0.0332544, 0.110311, 0.84702, 0.849973, 0.856198, 0.785795, 0.354674, 0.782393, 0.930543, 0.0125036, 0.301069, 0.591966, 0.519331, 0.116692, 0.181888, 0.251217, 0.788606, 0.0137773, 0.255872, 0.591144, 0.862128, 0.249039, 0.0641142, 0.697651, 0.647981, 0.0663405, 0.0303, 0.108705, 0.925369, 0.332775, 0.0243789, 0.896468, 0.448633, 0.511638, 0.105575, 0.0886638, 0.0498761, 0.782248, 0.34918, 0.0128387, 0.160853, 0.735607, 0.279319, 0.0873188, 0.629893, 0.755911, 0.921293, 0.457801, 0.73937, 0.729324, 0.373976, 0.678172, 0.607887, 0.564579, 0.701282, 0.660622, 0.587753, 0.672583, 0.675394, 0.652153, 0.808644, 0.689712, 0.359753, 0.234674, 0.892729, 0.913404, 0.499294, 0.596976, 0.360313, 0.201506, 0.462168, 0.38058, 0.482462, 0.522578, 0.584784, 0.982469, 0.523538, 0.744513, 0.483653, 0.114149, 0.693467, 0.457098, 0.38871, 0.37314, 0.705251, 0.94976, 0.434253, 0.592257, 0.335648, 0.240914, 0.682171, 0.140619, 0.782256, 0.502838, 0.379335, 0.884377, 0.00497879, 0.632287, 0.242756, 0.991024, 0.873668, 0.321323, 0.201198, 0.00319217, 0.152522, 0.500503, 0.0321389, 0.360202, 0.394758, 0.56698, 0.359528, 0.381789, 0.315304, 0.960853, 0.903119, 0.353078, 0.15645, 0.213577, 0.0903123, 0.826641, 0.605427, 0.746317, 0.322645, 0.868287, 0.151039, 0.254095, 0.182594, 0.364951, 0.737455, 0.691678, 0.898399, 0.000732542, 0.248012, 0.349148, 0.569901, 0.452226, 0.515371, 0.302485, 0.347357, 0.123955, 0.469894, 0.330666, 0.663855, 0.143615, 0.261839, 0.803338, 0.868671, 0.682263, 0.246076, 0.113804, 0.708683, 0.449585, 0.482124, 0.365102, 0.177578, 0.731094, 0.600148, 0.611593, 0.205905, 0.882991, 0.887807, 0.922277, 0.224486, 0.160939, 0.0871117, 0.69155, 0.0134491, 0.0853617, 0.196541, 0.206459, 0.947997, 0.333451, 0.389338, 0.0890182, 0.299461, 0.731301, 0.630373, 0.185242, 0.441689, 0.180832, 0.560951, 0.38455, 0.623194, 0.580431, 0.265926, 0.599774, 0.534595, 0.679334, 0.835523, 0.221747, 0.626694, 0.80204, 0.726532, 0.742793, 0.791996, 0.909271, 0.785695, 0.801973, 0.289567, 0.416437, 0.873448, 0.0779163, 0.646142, 0.863765, 0.973106, 0.481713, 0.306551, 0.697726, 0.0755091, 0.298716, 0.0598534, 0.702293, 0.211307, 0.915152, 0.310151, 0.262061, 0.597905, 0.155325, 0.401069, 0.387548, 0.158868, 0.12185, 0.230751, 0.323125, 0.46201, 0.382306, 0.822698, 0.502208, 0.875634, 0.546919, 0.858633, 0.104027, 0.277316, 0.351157, 0.769473, 0.592918, 0.272187, 0.38216, 0.838504, 0.361207, 0.684142, 0.971113, 0.602431, 0.00208178, 0.676511, 0.919325, 0.164403, 0.143945, 0.180004, 0.58075, 0.973844, 0.741869, 0.999857, 0.0169106, 0.671591, 0.537973, 0.171692, 0.261065, 0.924552, 0.368675, 0.00814049, 0.233023, 0.540569, 0.505196, 0.327395, 0.888035, 0.22145, 0.722096, 0.381902, 0.0368577, 0.150889, 0.724966, 0.396629, 0.799571, 0.094748, 0.552577, 0.252088, 0.204257, 0.809496, 0.330689, 0.98061, 0.0898285, 0.303362, 0.347282, 0.241588, 0.623002, 0.470105, 0.796742, 0.495913, 0.931737, 0.32535, 0.0388156, 0.97914, 0.909124, 0.649404, 0.767606, 0.753396, 0.35215, 0.941861, 0.93643, 0.433868, 0.20656, 0.120542, 0.221083, 0.41337, 0.0942159, 0.961272, 0.230308, 0.957303, 0.900571, 0.866549, 0.499876, 0.343048, 0.555759, 0.386027, 0.926825, 0.0727304, 0.386093, 0.647028, 0.712647, 0.727604, 0.690124, 0.108712, 0.178468, 0.0411527, 0.724617, 0.559902, 0.062367, 0.93262, 0.528495, 0.708706, 0.75693, 0.978897, 0.830077, 0.0205915, 0.884267, 0.171194, 0.444059, 0.509467, 0.164957, 0.758619, 0.412541, 0.340623, 0.395828, 0.613924, 0.670326, 0.49157, 0.562591, 0.889637, 0.774551, 0.238951, 0.689965, 0.403226, 0.0101909, 0.63063, 0.694367, 0.715743, 0.452086, 0.433565, 0.526304, 0.749933, 0.397791, 0.920038, 0.309825, 0.405743, 0.95665, 0.435092, 0.239381, 0.58089, 0.89111, 0.769719, 0.684106, 0.333506, 0.0439281, 0.660142, 0.661301, 0.49023, 0.079462, 0.691252, 0.623664, 0.346586, 0.967734, 0.330367, 0.475246, 0.707362, 0.295545, 0.774153, 0.639437, 0.686184, 0.45172, 0.564618, 0.356746, 0.400369, 0.409768, 0.147848, 0.389762, 0.647711, 0.0625404, 0.69911, 0.776906, 0.946781, 0.126086, 0.35563, 0.682276, 0.156443, 0.816989, 0.489514, 0.337157, 0.237935, 0.262935, 0.573797, 0.605099, 0.996849, 0.157335, 0.640406, 0.201796, 0.258889, 0.169679, 0.372656, 0.890571, 0.141793, 0.952647, 0.19912, 0.0986685, 0.837219, 0.231743, 0.166329, 0.358461, 0.16411, 0.304759, 0.0675818, 0.0716336, 0.70638, 0.756794, 0.520327, 0.508684, 0.246657, 0.666337, 0.136267, 0.29245, 0.433688, 0.882437, 0.431607, 0.468901, 0.841016, 0.0502369, 0.285901, 0.813383, 0.514364, 0.433242, 0.146407, 0.940796, 0.171346, 0.221461, 0.867377, 0.216934, 0.552228, 0.45611, 0.917649, 0.321481, 0.61029, 0.0959728, 0.436011, 0.152552, 0.526612, 0.978915, 0.0586536, 0.521181, 0.757215, 0.34051, 0.141117, 0.693102, 0.434253, 0.459614, 0.557028, 0.00349169, 0.728583, 0.442637, 0.576415, 0.185731, 0.543305, 0.527449, 0.478752, 0.387701, 0.0757474, 0.467792, 0.111125, 0.892577, 0.265495, 0.185827, 0.541454, 0.439251, 0.728464, 0.120708, 0.122473, 0.130466, 0.875577, 0.0863882, 0.834825, 0.649316, 0.334043, 0.909384, 0.341323, 0.0491015, 0.815392, 0.0309161, 0.587517, 0.631605, 0.972673, 0.0821971, 0.127742, 0.354403, 0.448749, 0.492077, 0.133726, 0.344919, 0.451413, 0.639855, 0.211991, 0.738967, 0.556907, 0.526736, 0.507465, 0.303413, 0.651308, 0.367555, 0.643425, 0.86316, 0.294116, 0.919841, 0.849688, 0.121847, 0.696514, 0.778465, 0.451666, 0.307937, 0.47853, 0.564549, 0.847828, 0.806186, 0.596311, 0.592267, 0.779436, 0.0840583, 0.785821, 0.171637, 0.0334959, 0.429124, 0.737065, 0.50365, 0.745145, 0.743668, 0.982665, 0.911391, 0.671417, 0.626701, 0.291563, 0.517938, 0.516245, 0.0801527, 0.567097, 0.367505, 0.922839, 0.439014, 0.53182, 0.578468, 0.149689, 0.669688, 0.97581, 0.715261, 0.584185, 0.148229, 0.199314, 0.185977, 0.730802, 0.104037, 0.639689, 0.739299, 0.198473, 0.767699, 0.853707, 0.19065, 0.529447, 0.766285, 0.66186, 0.921171, 0.549118, 0.892584, 0.840488, 0.178862, 0.927226, 0.890344, 0.62377, 0.86714, 0.460387, 0.392646, 0.471308, 0.800714, 0.782617, 0.634291, 0.179337, 0.407751, 0.252144, 0.82712, 0.632213, 0.0723105, 0.18076, 0.68329, 0.693678, 0.313102, 0.156329, 0.394112, 0.662618, 0.508978, 0.897664, 0.999901, 0.791451, 0.581513, 0.790202, 0.907885, 0.248239, 0.6598, 0.606133, 0.675442, 0.667401, 0.151201, 0.155847, 0.0127462, 0.745108, 0.0528394, 0.713167, 0.471145, 0.12397, 0.884378, 0.083084, 0.0953668, 0.606258, 0.836271, 0.148819, 0.57087, 0.0425285, 0.166045, 0.183413, 0.700694, 0.623195, 0.757091, 0.440645, 0.0596429, 0.249881, 0.602575, 0.950073, 0.451812, 0.164828, 0.0182645, 0.11402, 0.975934, 0.426403, 0.715862, 0.805848, 0.262033, 0.785654, 0.239822, 0.678738, 0.866985, 0.08376, 0.571485, 0.0679907, 0.437592, 0.356353, 0.821237, 0.354842, 0.853205, 0.872567, 0.184443, 0.127188, 0.853291, 0.18183, 0.952172, 0.212465, 0.975464, 0.304758, 0.5734, 0.170927, 0.296267, 0.0125493, 0.291528, 0.00192874, 0.3964, 0.957282, 0.511497, 0.233394, 0.623138, 0.859947, 0.080578, 0.909789, 0.846534, 0.414356, 0.403885, 0.0692069, 0.920154, 0.0644305, 0.977542, 0.585973, 0.337576, 0.776913, 0.622741, 0.199003, 0.712303, 0.640121, 0.101498, 0.884389, 0.228357, 0.468964, 0.155111, 0.440082, 0.488965, 0.830177, 0.454889, 0.209705, 0.677652, 0.411443, 0.440909, 0.741252, 0.010858, 0.75862, 0.0255627, 0.106067, 0.50874, 0.527138, 0.759874, 0.287933, 0.166317, 0.961957, 0.892252, 0.397093, 0.885714, 0.396317, 0.633364, 0.560446, 0.514592, 0.641407, 0.347794, 0.162266, 0.758312, 0.0736515, 0.198958, 0.306381, 0.346772, 0.117421, 0.0688292, 0.227756, 0.374787, 0.439756, 0.0762597, 0.140795, 0.746462, 0.806024, 0.178773, 0.293483, 0.397699, 0.460144, 0.135792, 0.614245, 0.587954, 0.420387, 0.699406, 0.587129, 0.283204, 0.281297, 0.795374, 0.114985, 0.745571, 0.352045, 0.0611762, 0.593975, 0.192326, 0.113619, 0.867458, 0.567034, 0.280148, 0.41742, 0.140169, 0.783424, 0.12378, 0.0268995, 0.67683, 0.607597, 0.102353, 0.423897, 0.43861, 0.977933, 0.0404341, 0.0481451, 0.542879, 0.275053, 0.832638, 0.513499, 0.244118, 0.914467, 0.709417, 0.651207, 0.797396, 0.162939, 0.733169, 0.481811, 0.466419, 0.885944, 0.988476, 0.93639, 0.394164, 0.635034, 0.755194, 0.854726, 0.301369, 0.124011, 0.189902, 0.824301, 0.451238, 0.890921, 0.400186, 0.707203, 0.401668, 0.282433, 0.577071, 0.534646, 0.957866, 0.826413, 0.426026, 0.0910196, 0.588473, 0.0319855, 0.474734, 0.78612, 0.524206, 0.350851, 0.270021, 0.978624, 0.523786, 0.240499, 0.260164, 0.789584, 0.65675, 0.201923, 0.948229, 0.649666, 0.927211, 0.040211, 0.41337, 0.77551, 0.571141, 0.686762, 0.156277, 0.226256, 0.933085, 0.706712, 0.639384, 0.627922, 0.526238, 0.927197, 0.225351, 0.103548, 0.549622, 0.292483, 0.626381, 0.803683, 0.226612, 0.473502, 0.525893, 0.320801, 0.581936, 0.0611407, 0.447279, 0.233025, 0.821232, 0.795829, 0.286259, 0.341609, 0.144731, 0.110285, 0.140597, 0.0625647, 0.960759, 0.162913, 0.940253, 0.95897, 0.802897, 0.738006, 0.502822, 0.960198, 0.585094, 0.971332, 0.975069, 0.319401, 0.97369, 0.849848, 0.823669, 0.252366, 0.175272, 0.568308, 0.0696765, 0.0346971, 0.734827, 0.374973, 0.455794, 0.168271, 0.384961, 0.0177511, 0.609373, 0.909626, 0.14768, 0.888394, 0.359283, 0.319978, 0.948521, 0.271924, 0.27314, 0.600558, 0.242389, 0.0542751, 0.0572893, 0.683381, 0.810487, 0.425903, 0.441715, 0.726695, 0.281386, 0.821817, 0.40055, 0.715599, 0.857166, 0.392479, 0.730487, 0.968301, 0.491452, 0.316442, 0.494362, 0.0133906, 0.958286, 0.809101, 0.181718, 0.0236134, 0.281216, 0.38442, 0.578918, 0.636792, 0.0555437, 0.767418, 0.575868, 0.980857, 0.368326, 0.784954, 0.159947, 0.545253, 0.635897, 0.412699, 0.804465, 0.309414, 0.769159, 0.95191, 0.496597, 0.145394, 0.223412, 0.310794, 0.0891287, 0.556778, 0.562303, 0.762924, 0.774332, 0.90256, 0.880377, 0.237701, 0.833076, 0.464439, 0.961118, 0.315651, 0.248011, 0.197734, 0.743461, 0.894968, 0.888755, 0.191371, 0.0606955, 0.779843, 0.67044, 0.000701003, 0.847468, 0.110179, 0.345346, 0.596703, 0.366331, 0.491411, 0.43866, 0.88716, 0.942445, 0.341056, 0.374603, 0.189615, 0.734346, 0.477594, 0.306855, 0.314374, 0.905783, 0.545652, 0.525045, 0.897234, 0.137442, 0.0158416, 0.815698, 0.516642, 0.744205, 0.750442, 0.409368, 0.748892, 0.0140665, 0.0825914, 0.450686, 0.518803, 0.0387293, 0.995793, 0.677971, 0.184414, 0.490881, 0.564195, 0.130463, 0.274728, 0.0517014, 0.795809, 0.496882, 0.0917907, 0.961887, 0.568578, 0.0785582, 0.10867, 0.660636, 0.919073, 0.0513685, 0.197261, 0.995502, 0.211636, 0.42973, 0.178131, 0.943105, 0.241031, 0.740778, 0.736333, 0.0758931, 0.471215, 0.691511, 0.617629, 0.982214, 0.388555, 0.372131, 0.402224, 0.169748, 0.996091, 0.854671, 0.0438401, 0.344562, 0.42399, 0.170027, 0.853375, 0.0901204, 0.0216403, 0.237151, 0.119388, 0.597195, 0.312474, 0.670449, 0.92445, 0.661728, 0.530687, 0.865478, 0.848771, 0.933046, 0.610892, 0.649236, 0.254884, 0.0716313, 0.312273, 0.0567216, 0.616856, 0.248198, 0.549667, 0.225755, 0.725631, 0.397707, 0.622773, 0.362331, 0.68781, 0.0602003, 0.826523, 0.724918, 0.734515, 0.391861, 0.733534, 0.178399, 0.564966, 0.591202, 0.0472756, 0.569476, 0.301053, 0.445811, 0.835684, 0.193328, 0.56886, 0.983536, 0.995492, 0.931116, 0.591528, 0.292752, 0.514855, 0.600805, 0.467793, 0.241835, 0.764401, 0.563282, 0.863718, 0.144303, 0.231504, 0.00520572, 0.278638, 0.19954, 0.379569, 0.0475114, 0.104026, 0.763559, 0.846914, 0.85017, 0.14415, 0.750602, 0.272317, 0.254685, 0.167263, 0.131831, 0.663505, 0.36003, 0.334448, 0.362862, 0.60826, 0.057726, 0.0704663, 0.941601, 0.341211, 0.54861, 0.232092, 0.661149, 0.605109, 0.339767, 0.535967, 0.961068, 0.175784, 0.992689, 0.900477, 0.284569, 0.119902, 0.354833, 0.176419, 0.084993, 0.230209, 0.934694, 0.0071083, 0.257565, 0.961404, 0.451827, 0.673569, 0.190417, 0.149317, 0.592344, 0.45611, 0.609225, 0.260743, 0.753217, 0.776916, 0.722043, 0.73019, 0.281829, 0.82717, 0.575628, 0.213365, 0.00978711, 0.378466, 0.349125, 0.244571, 0.471582, 0.803671, 0.997812, 0.0521185, 0.612033, 0.449384, 0.512475, 0.722084, 0.0717956, 0.458755, 0.760821, 0.844698, 0.374668, 0.400767, 0.709865, 0.113621, 0.714132, 0.970394, 0.707974, 0.883892, 0.290158, 0.0600632, 0.639521, 0.0197071, 0.0703243, 0.540964, 0.490692, 0.540472, 0.4527, 0.43367, 0.470396, 0.639226, 0.454894, 0.161796, 0.283119, 0.389119, 0.99098, 0.354374, 0.306925, 0.571848, 0.987829, 0.654278, 0.942796, 0.626379, 0.425776, 0.10538, 0.235674, 0.302389, 0.38002, 0.788549, 0.135715, 0.919757, 0.288126, 0.499033, 0.454735, 0.314993, 0.564696, 0.755851, 0.311844, 0.88601, 0.624923, 0.597122, 0.6463, 0.599467, 0.618822, 0.0339805, 0.535648, 0.505867, 0.0283788, 0.927671, 0.32945, 0.349241, 0.118857, 0.802099, 0.479279, 0.343102, 0.0556478, 0.0645085, 0.138695, 0.501754, 0.601575, 0.465747, 0.645858, 0.0488047, 0.567975, 0.634429, 0.095771, 0.263161, 0.125117, 0.896023, 0.416766, 0.0760043, 0.904781, 0.503658, 0.596885, 0.917895, 0.446839, 0.845408, 0.347075, 0.726262, 0.0325798, 0.596209, 0.67089, 0.420953, 0.364253, 0.566215, 0.0240224, 0.299557, 0.755112, 0.518478, 0.172691, 0.440801, 0.993724, 0.403926, 0.686368, 0.0119942, 0.0752334, 0.569332, 0.586567, 0.84281, 0.690945, 0.964742, 0.601104, 0.734396, 0.548406, 0.764456, 0.805043, 0.51092, 0.760391, 0.173881, 0.0235308, 0.356031, 0.53105, 0.086914, 0.664032, 0.571966, 0.92573, 0.59094, 0.927327, 0.692028, 0.842902, 0.67167, 0.276918, 0.467458, 0.752591, 0.9499, 0.467204, 0.447911, 0.474744, 0.388578, 0.557763, 0.883731, 0.794943, 0.429311, 0.63033, 0.709716, 0.0540243, 0.458512, 0.54981, 0.388782, 0.634203, 0.68688, 0.989504, 0.904139, 0.661675, 0.749267, 0.372069, 0.0318962, 0.4609, 0.517865, 0.886023, 0.787396, 0.0912821, 0.229197, 0.2815, 0.770678, 0.3427, 0.390274, 0.810432, 0.265945, 0.837044, 0.366723, 0.549445, 0.909393, 0.654305, 0.68561, 0.582384, 0.0771761, 0.68467, 0.246603, 0.516234, 0.679899, 0.0538224, 0.35085, 0.541939, 0.257065, 0.523613, 0.750268, 0.281682, 0.761999, 0.187258, 0.935161, 0.726177, 0.983635, 0.213881, 0.837402, 0.0311317, 0.713501, 0.0597453, 0.069166, 0.616787, 0.237867, 0.898007, 0.291914, 0.639359, 0.344517, 0.662031, 0.990589, 0.746652, 0.299035, 0.282434, 0.592909, 0.269854, 0.598541, 0.60132, 0.93501, 0.279348, 0.952177, 0.3203, 0.237311, 0.449025, 0.112849, 0.347608, 0.603105, 0.0826972, 0.0645104, 0.654211, 0.787298, 0.183199, 0.136849, 0.421642, 0.457025, 0.177792, 0.292448, 0.622044, 0.450702, 0.622207, 0.74748, 0.111626, 0.540305, 0.357124, 0.112576, 0.0920041, 0.901445, 0.591537, 0.120162, 0.277792, 0.85936, 0.644468, 0.964015, 0.121913, 0.46483, 0.90891, 0.317464, 0.450741, 0.135112, 0.64213, 0.491018, 0.896948, 0.0406019, 0.380772, 0.500875, 0.40253, 0.150461, 0.526617, 0.431422, 0.953507, 0.872597, 0.60421, 0.581048, 0.649227, 0.351248, 0.0962856, 0.527706, 0.62949, 0.297496, 0.424442, 0.211807, 0.191928, 0.978057, 0.659203, 0.424782, 0.321244, 0.309401, 0.155644, 0.018394, 0.981963, 0.790722, 0.315559, 0.132894, 0.540455, 0.673194, 0.166135, 0.963324, 0.991482, 0.606148, 0.641684, 0.321732, 0.357962, 0.419031, 0.524566, 0.121163, 0.167526, 0.419093, 0.773295, 0.591982, 0.177544, 0.221321, 0.79453, 0.187352, 0.945812, 0.230775, 0.759209, 0.223633, 0.782123, 0.762374, 0.554152, 0.758007, 0.646692, 0.164805, 0.439458, 0.668481, 0.394392, 0.310737, 0.520953, 0.23468, 0.329919, 0.42598, 0.826062, 0.677959, 0.270117, 0.964114, 0.835354, 0.184171, 0.94592, 0.538805, 0.21293, 0.674282, 0.613245, 0.627513, 0.657674, 0.272839, 0.0842785, 0.950948, 0.373451, 0.439554, 0.159792, 0.0401004, 0.0151278, 0.903084, 0.297656, 0.379113, 0.211713, 0.0208064, 0.14493, 0.347375, 0.385924, 0.00268695, 0.12542, 0.739048, 0.676817, 0.736418, 0.227088, 0.412102, 0.198277, 0.410965, 0.91409, 0.717919, 0.215226, 0.667909, 0.442317, 0.10207, 0.381269, 0.427142, 0.333933, 0.245708, 0.329177, 0.461323, 0.664618, 0.0679301, 0.544018, 0.674657, 0.178317, 0.459781, 0.89711, 0.276327, 0.784713, 0.27331, 0.487891, 0.180568, 0.96831, 0.323182, 0.0866663, 0.512143, 0.919757, 0.393651, 0.789092, 0.29094, 0.188134, 0.560856, 0.955586, 0.0718635, 0.0269404, 0.203678, 0.983172, 0.51758, 0.968003, 0.331021, 0.830203, 0.0181279, 0.0141556, 0.697402, 0.0525922, 0.514023, 0.508186, 0.256969, 0.933136, 0.200218, 0.0110242, 0.860594, 0.0303397, 0.157889, 0.392822, 0.374512, 0.400553, 0.270855, 0.568008, 0.558921, 0.956101, 0.744385, 0.719175, 0.916029, 0.884867, 0.247502, 0.073517, 0.780392, 0.279199, 0.827558, 0.339382, 0.584516, 0.79727, 0.0643695, 0.0317724, 0.435497, 0.180505, 0.76243, 0.195217, 0.82011, 0.101829, 0.0997807, 0.953348, 0.15367, 0.104359, 0.487753, 0.343762, 0.654727, 0.132477, 0.999045, 0.406191, 0.598411, 0.860789, 0.306506, 0.138799, 0.339544, 0.527326, 0.564002, 0.97529, 0.0840793, 0.471393, 0.818627, 0.0416602, 0.460369, 0.651059, 0.369103, 0.943312, 0.53025, 0.120124, 0.658747, 0.284755, 0.0118443, 0.647365, 0.619388, 0.0555024, 0.031167, 0.836224, 0.107065, 0.655598, 0.0652551, 0.949251, 0.0430759, 0.348073, 0.468649, 0.282215, 0.823767, 0.621657, 0.301013, 0.447086, 0.569542, 0.477536, 0.276875, 0.473196, 0.646234, 0.140407, 0.661602, 0.48413, 0.640481, 0.1237, 0.807747, 0.34507, 0.155209, 0.724469, 0.129897, 0.261054, 0.797226, 0.141759, 0.660291, 0.178049, 0.635802, 0.642498, 0.335264, 0.0986802, 0.560285, 0.258029, 0.408848, 0.963392, 0.127648, 0.522938, 0.343361, 0.0591521, 0.538662, 0.723705, 0.143253, 0.406725, 0.762505, 0.858145, 0.652872, 0.0751902, 0.474162, 0.456277, 0.420937, 0.770795, 0.574915, 0.536308, 0.594785, 0.40898, 0.288524, 0.871027, 0.942149, 0.359192, 0.677768, 0.464186, 0.863689, 0.403723, 0.152206, 0.0318572, 0.206376, 0.383112, 0.939003, 0.279417, 0.864818, 0.921036, 0.530947, 0.255451, 0.0888463, 0.990938, 0.118905, 0.692524, 0.256951, 0.289297, 0.747234, 0.592557, 0.135735, 0.703777, 0.207975, 0.274076, 0.793419, 0.416538, 0.679037, 0.569987, 0.425747, 0.448826, 0.807602, 0.338074, 0.925081, 0.120731, 0.206021, 0.102171, 0.131164, 0.779013, 0.883488, 0.326324, 0.21583, 0.147292, 0.430501, 0.599133, 0.772507, 0.327405, 0.226773, 0.384921, 0.881793, 0.238704, 0.345292, 0.255344, 0.91225, 0.181126, 0.550924, 0.132555, 0.510167, 0.95588, 0.940686, 0.436938, 0.789056, 0.0285278, 0.966325, 0.352782, 0.213606, 0.282055, 0.961657, 0.0717836, 0.150889, 0.0156478, 0.382351, 0.732958, 0.973349, 0.805664, 0.566904, 0.594289, 0.286691, 0.700367, 0.182755, 0.647849, 0.162256, 0.478529, 0.963923, 0.711276, 0.863389, 0.882019, 0.820617, 0.0506087, 0.794141, 0.714298, 0.573767, 0.567374, 0.30751, 0.807773, 0.192612, 0.906685, 0.711471, 0.35731, 0.619318, 0.732762, 0.400525, 0.828465, 0.862046, 0.53438, 0.819672, 0.937958, 0.360399, 0.997121, 0.717269, 0.685371, 0.47348, 0.806581, 0.727519, 0.315085, 0.601397, 0.269334, 0.568207, 0.027692, 0.703402, 0.170218, 0.500086, 0.607612, 0.573989, 0.0406072, 0.183721, 0.861845, 0.70674, 0.117189, 0.176553, 0.317406, 0.904584, 0.333126, 0.164825, 0.313377, 0.37939, 0.118558, 0.436089, 0.251973, 0.253981, 0.108738, 0.219032, 0.716901, 0.172093, 0.616285, 0.264775, 0.23992, 0.896254, 0.295272, 0.791905, 0.656538, 0.0384521, 0.196358, 0.736726, 0.415566, 0.240984, 0.70861, 0.610987, 0.733977, 0.124111, 0.0136321, 0.919147, 0.0704665, 0.843662, 0.964286, 0.506136, 0.692433, 0.935884, 0.990476, 0.22058, 0.875116, 0.897394, 0.111069, 0.534619, 0.581804, 0.846851, 0.611967, 0.499796, 0.429545, 0.798909, 0.315004, 0.942182, 0.428862, 0.614155, 0.966451, 0.974008, 0.441044, 0.859631, 0.444931, 0.918694, 0.247554, 0.201709, 0.925149, 0.778544, 0.42768, 0.613957, 0.688951, 0.873456, 0.504223, 0.845928, 0.986889, 0.539666, 0.656433, 0.749198, 0.584904, 0.40616, 0.652864, 0.129738, 0.997156, 0.548049, 0.866048, 0.0787458, 0.0794804, 0.728205, 0.884015, 0.212964, 0.0511247, 0.207538, 0.336783, 0.421898, 0.793931, 0.6185, 0.676942, 0.727137, 0.832176, 0.960208, 0.228816, 0.578622, 0.0726547, 0.863936, 0.728756, 0.895077, 0.713311, 0.109769, 0.704642, 0.95281, 0.839504, 0.945565, 0.190666, 0.933363, 0.632555, 0.550899, 0.802728, 0.057105, 0.894612, 0.38568, 0.135467, 0.0176021, 0.327382, 0.358374, 0.447524, 0.228588, 0.811691, 0.661628, 0.546452, 0.350899, 0.00507256, 0.471622, 0.907124, 0.628432, 0.96551, 0.190372, 0.671076, 0.714566, 0.4601, 0.701459, 0.927465, 0.628752, 0.142717, 0.412611, 0.730583, 0.343111, 0.34992, 0.440681, 0.642637, 0.222177, 0.431251, 0.50514, 0.125927, 0.754476, 0.00296051, 0.568277, 0.900355, 0.326807, 0.138109, 0.836854, 0.667035, 0.540292, 0.112458, 0.713526, 0.800975, 0.337264, 0.29358, 0.593161, 0.265627, 0.0889931, 0.857896, 0.330727, 0.841462, 0.908689, 0.60503, 0.477598, 0.298034, 0.911471, 0.608683, 0.398816, 0.983949, 0.240758, 0.606816, 0.602618, 0.160728, 0.352549, 0.718983, 0.462568, 0.805229, 0.252996, 0.73863, 0.000422414, 0.812615, 0.724826, 0.443087, 0.290496, 0.393446, 0.992466, 0.0837757, 0.00583894, 0.930767, 0.536693, 0.811747, 0.0511222, 0.708225, 0.901528, 0.2509, 0.444992, 0.397405, 0.542782, 0.122836, 0.861212, 0.249062, 0.174464, 0.050134, 0.378451, 0.571994, 0.881506, 0.0499796, 0.787135, 0.22121, 0.645498, 0.436943, 0.787478, 0.460505, 0.68293, 0.991135, 0.88287, 0.722825, 0.796469, 0.5201, 0.956356, 0.67584, 0.693191, 0.730392, 0.121242, 0.709526, 0.544418, 0.262852, 0.487887, 0.514067, 0.0911956, 0.0860523, 0.474676, 0.585426, 0.310218, 0.886219, 0.290739, 0.0282541, 0.656092, 0.928931, 0.941969, 0.10195, 0.818213, 0.781855, 0.433706, 0.311066, 0.339732, 0.496567, 0.65566, 0.485256, 0.497272, 0.364859, 0.126097, 0.0740863, 0.405792, 0.0969492, 0.4864, 0.210317, 0.711933, 0.0635027, 0.0361123, 0.493013, 0.946083, 0.0066597, 0.546463, 0.0163349, 0.0228225, 0.977677, 0.0772557, 0.306093, 0.310278, 0.665144, 0.383851, 0.330056, 0.883021, 0.689747, 0.546534, 0.0734483, 0.411925, 0.797349, 0.228309, 0.233533, 0.796033, 0.0030193, 0.513364, 0.368626, 0.930374, 0.725822, 0.0219896, 0.299541, 0.140862, 0.315275, 0.783451, 0.944752, 0.606796, 0.873046, 0.326616, 0.371542, 0.855865, 0.425102, 0.861019, 0.352546, 0.525911, 0.751078, 0.668595, 0.953006, 0.759148, 0.366194, 0.28042, 0.972413, 0.168339, 0.757616, 0.166174, 0.777926, 0.093088, 0.329875, 0.255593, 0.359008, 0.111147, 0.542383, 0.859861, 0.772084, 0.377405, 0.117666, 0.391373, 0.132639, 0.854042, 0.286216, 0.35719, 0.798284, 0.360051, 0.795723, 0.465997, 0.561341, 0.460214, 0.769956, 0.749655, 0.152443, 0.183006, 0.242389, 0.0737898, 0.68665, 0.192032, 0.263023, 0.616373, 0.857243, 0.640726, 0.0621201, 0.162099, 0.492626, 0.53992, 0.299509, 0.154796, 0.779567, 0.290502, 0.00579967, 0.124107, 0.472051, 0.000434841, 0.235547, 0.712911, 0.488378, 0.26475, 0.814806, 0.0754994, 0.406514, 0.683893, 0.548816, 0.387705, 0.331393, 0.411969, 0.0160922, 0.0911969, 0.815596, 0.432159, 0.661646, 0.228131, 0.16644, 0.540622, 0.806374, 0.91399, 0.0151226, 0.327789, 0.486473, 0.0520317, 0.489694, 0.369283, 0.577062, 0.590977, 0.745221, 0.882706, 0.723975, 0.866051, 0.766082, 0.480659, 0.737826, 0.189252, 0.618888, 0.359069, 0.790591, 0.406381, 0.910825, 0.252672, 0.731272, 0.13744, 0.412974, 0.733391, 0.347641, 0.871649, 0.626397, 0.251238, 0.814955, 0.884079, 0.954014, 0.38571, 0.0546944, 0.098018, 0.349328, 0.865735, 0.68303, 0.25361, 0.50894, 0.152982, 0.42769, 0.476476, 0.411445, 0.739538, 0.20844, 0.908736, 0.946669, 0.957216, 0.830711, 0.285214, 0.430605, 0.123387, 0.366716, 0.0966705, 0.171379, 0.140106, 0.00523692, 0.483994, 0.836323, 0.000203898, 0.847099, 0.199114, 0.0786781, 0.498113, 0.472991, 0.430339, 0.717644, 0.771445, 0.933048, 0.650317, 0.592575, 0.69774, 0.841689, 0.470272, 0.255145, 0.642192, 0.683545, 0.129527, 0.7225, 0.143363, 0.700158, 0.573115, 0.496788, 0.672769, 0.528652, 0.516476, 0.797372, 0.205896, 0.624819, 0.940306, 0.799173, 0.89588, 0.981444, 0.294351, 0.285886, 0.871644, 0.728298, 0.525132, 0.696356, 0.328752, 0.0683865, 0.107999, 0.448041, 0.0561753, 0.256848, 0.845314, 0.000561138, 0.024717, 0.214498, 0.0110802, 0.842329, 0.191831, 0.55362, 0.577487, 0.47658, 0.926408, 0.825722, 0.734621, 0.517285, 0.18766, 0.531805, 0.932464, 0.867263, 0.870883, 0.0587472, 0.339046, 0.788978, 0.519536, 0.980831, 0.917215, 0.314639, 0.345936, 0.737894, 0.163836, 0.911963, 0.306258, 0.853049, 0.819763, 0.834382, 0.118071, 0.0517382, 0.741186, 0.218079, 0.718497, 0.216362, 0.358845, 0.0262473, 0.573432, 0.877798, 0.434158, 0.689208, 0.28387, 0.631236, 0.0276151, 0.569504, 0.451447, 0.569556, 0.142046, 0.125314, 0.685423, 0.874892, 0.661033, 0.0480938, 0.678272, 0.225373, 0.420258, 0.940519, 0.991653, 0.22264, 0.0383297, 0.087134, 0.72526, 0.221824, 0.0761745, 0.109705, 0.455931, 0.181691, 0.958014, 0.829454, 0.840142, 0.822616, 0.41962, 0.278158, 0.911439, 0.83333, 0.528914, 0.972686, 0.057116, 0.885617, 0.426003, 0.0722767, 0.205997, 0.460486, 0.750409, 0.467783, 0.622696, 0.218373, 0.443269, 0.325559, 0.152971, 0.971439, 0.109743, 0.14042, 0.370876, 0.936533, 0.572726, 0.981721, 0.710409, 0.666631, 0.783337, 0.279378, 0.82056, 0.5105, 0.383959, 0.929276, 0.158211, 0.0924422, 0.448461, 0.995812, 0.199213, 0.340225, 0.458053, 0.434526, 0.143288, 0.0727889, 0.601088, 0.174134, 0.742817, 0.25083, 0.148017, 0.816632, 0.283205, 0.684135, 0.708765, 0.656708, 0.231398, 0.101216, 0.948646, 0.974955, 0.13018, 0.249274, 0.251885, 0.4138, 0.0993538, 0.870605, 0.607726, 0.53469, 0.425744, 0.534512, 0.422424, 0.707954, 0.806989, 0.833396, 0.612742, 0.129842, 0.818643, 0.784665, 0.328811, 0.254497, 0.425704, 0.198761, 0.00636503, 0.331532, 0.0979688, 0.217644, 0.631499, 0.153504, 0.47421, 0.000996381, 0.643052, 0.421039, 0.886781, 0.469619, 0.599094, 0.519344, 0.829217, 0.295929, 0.459046, 0.929458, 0.482433, 0.454514, 0.781803, 0.701602, 0.400688, 0.289321, 0.834496, 0.563524, 0.858043, 0.797295, 0.185198, 0.002268, 0.792045, 0.137388, 0.201848, 0.206613, 0.576547, 0.281215, 0.969594, 0.729729, 0.811817, 0.368265, 0.305238, 0.140258, 0.817175, 0.580202, 0.146454, 0.0976395, 0.0116953, 0.692592, 0.0456688, 0.787763, 0.911869, 0.190774, 0.0421205, 0.742239, 0.215383, 0.634856, 0.217612, 0.976046, 0.629438, 0.575507, 0.991286, 0.739136, 0.515219, 0.152072, 0.06971, 0.671442, 0.550124, 0.315606, 0.669713, 0.579424, 0.0432335, 0.220854, 0.386082, 0.900995, 0.261122, 0.430265, 0.161644, 0.129911, 0.724532, 0.232359, 0.169264, 0.17024, 0.300017, 0.0453636, 0.588913, 0.901443, 0.697316, 0.847893, 0.0753857, 0.427729, 0.968554, 0.813486, 0.86194, 0.752806, 0.900089, 0.857901, 0.486216, 0.906749, 0.107639, 0.783365, 0.542632, 0.811905, 0.359218, 0.896544, 0.966524, 0.161207, 0.205371, 0.821549, 0.521102, 0.830914, 0.40505, 0.522809, 0.126623, 0.604354, 0.045568, 0.210009, 0.580072, 0.0603523, 0.0196878, 0.248134, 0.901776, 0.80374, 0.344628, 0.926973, 0.0793407, 0.476877, 0.909748, 0.574286, 0.192875, 0.89499, 0.549265, 0.122798, 0.202247, 0.754604, 0.627077, 0.489349, 0.471488, 0.362998, 0.510526, 0.285607, 0.292884, 0.857883, 0.818358, 0.998242, 0.986864, 0.393744, 0.701509, 0.308962, 0.787377, 0.310688, 0.551803, 0.439958, 0.427111, 0.430914, 0.125983, 0.955682, 0.506946, 0.475104, 0.42386, 0.743888, 0.254028, 0.393156, 0.79219, 0.0677401, 0.586007, 0.594929, 0.0176444, 0.0772566, 0.320507, 0.992986, 0.207901, 0.528106, 0.136103, 0.191147, 0.708463, 0.328756, 0.935148, 0.0558156, 0.10463, 0.698945, 0.1983, 0.0516235, 0.976899, 0.190763, 0.506738, 0.903816, 0.470891, 0.579311, 0.459764, 0.870779, 0.467084, 0.202093, 0.382113, 0.758982, 0.543233, 0.646937, 0.275654, 0.4393, 0.333932, 0.0695708, 0.156887, 0.721319, 0.984171, 0.252371, 0.983907, 0.775737, 0.0589212, 0.445042, 0.461394, 0.466213, 0.373344, 0.930697, 0.406812, 0.662108, 0.122191, 0.247269, 0.258097, 0.0407154, 0.855269, 0.235527, 0.876875, 0.805798, 0.0807154, 0.960267, 0.689527, 0.920841, 0.452028, 0.313812, 0.507496, 0.101225, 0.765568, 0.952103, 0.210943, 0.792417, 0.812604, 0.530017, 0.117172, 0.413165, 0.65056, 0.774613, 0.0447885, 0.16903, 0.413123, 0.5215, 0.463947, 0.454876, 0.409058, 0.839745, 0.048039, 0.922402, 0.865291, 0.903217, 0.825776, 0.258469, 0.306235, 0.280674, 0.499215, 0.0321913, 0.43813, 0.648241, 0.863866, 0.961258, 0.872787, 0.965832, 0.280185, 0.867821, 0.333726, 0.546419, 0.560417, 0.93491, 0.710542, 0.261666, 0.798367, 0.168999, 0.920242, 0.726819, 0.377246, 0.395024, 0.274166, 0.264527, 0.20049, 0.359811, 0.804623, 0.00360393, 0.73255, 0.771133, 0.170565, 0.68284, 0.331344, 0.706333, 0.462146, 0.469002, 0.305191, 0.539346, 0.233156, 0.404299, 0.942348, 0.245893, 0.373266, 0.47572, 0.421276, 0.307568, 0.854683, 0.774993, 0.945097, 0.829606, 0.405773, 0.87252, 0.176255, 0.87888, 0.279033, 0.506288, 0.510867, 0.853068, 0.0954172, 0.0451458, 0.0464311, 0.0171631, 0.0558304, 0.65644, 0.343285, 0.537229, 0.456696, 0.749976, 0.0701074, 0.0709438, 0.790112, 0.47651, 0.491217, 0.568951, 0.87156, 0.176341, 0.40739, 0.495322, 0.275302, 0.770716, 0.278422, 0.0661859, 0.905895, 0.362887, 0.899367, 0.921387, 0.0564634, 0.573581, 0.156361, 0.0436128, 0.108496, 0.831278, 0.815231, 0.0652065, 0.40572, 0.295175, 0.548291, 0.517641, 0.536337, 0.686713, 0.413358, 0.130384, 0.0510004, 0.582349, 0.747402, 0.307916, 0.353884, 0.462563, 0.542269, 0.207305, 0.173616, 0.291938, 0.634112, 0.907511, 0.0626015, 0.879159, 0.429013, 0.504899, 0.934654, 0.116295, 0.136615, 0.31684, 0.58602, 0.458877, 0.368319, 0.682573, 0.372163, 0.178365, 0.78596, 0.271155, 0.742319, 0.621476, 0.111365, 0.34581, 0.831871, 0.500833, 0.507965, 0.572682, 0.0626494, 0.0098932, 0.233187, 0.884227, 0.110608, 0.665661, 0.691152, 0.136284, 0.178324, 0.858383, 0.428119, 0.864211, 0.707418, 0.657407, 0.381127, 0.189932, 0.312904, 0.0789875, 0.900363, 0.283227, 0.904997, 0.116342, 0.708026, 0.49802, 0.670096, 0.299116, 0.884948, 0.470176, 0.774387, 0.642584, 0.305323, 0.804183, 0.888465, 0.818885, 0.410525, 0.425071, 0.726494, 0.430591, 0.709948, 0.412813, 0.323481, 0.208116, 0.777231, 0.116319, 0.332237, 0.0846122, 0.770269, 0.352339, 0.601469, 0.556197, 0.733306, 0.0663171, 0.862323, 0.544184, 0.0272064, 0.165657, 0.788109, 0.673075, 0.315027, 0.159948, 0.506468, 0.652056, 0.000920887, 0.215675, 0.464952, 0.743509, 0.351673, 0.602006, 0.367186, 0.177594, 0.941535, 0.306144, 0.378103, 0.212491, 0.997629, 0.80487, 0.127841, 0.619001, 0.833757, 0.697103, 0.0588221, 0.7419, 0.792471, 0.832259, 0.036171, 0.468769, 0.389836, 0.255074, 0.196213, 0.736734, 0.478865, 0.723545, 0.5987, 0.291994, 0.49389, 0.121163, 0.592705, 0.65, 0.34932, 0.369126, 0.0713003, 0.122628, 0.844854, 0.635947, 0.374196, 0.358014, 0.481653, 0.700881, 0.0205248, 0.13835, 0.807291, 0.735876, 0.759992, 0.472683, 0.302549, 0.787, 0.225814, 0.185531, 0.231139, 0.935187, 0.403908, 0.64596, 0.34262, 0.357467, 0.10672, 0.305625, 0.52338, 0.827492, 0.955765, 0.790972, 0.177031, 0.927101, 0.810977, 0.499514, 0.322676, 0.57625, 0.372786, 0.382195, 0.988088, 0.618742, 0.425023, 0.673301, 0.241715, 0.174997, 0.5439, 0.170907, 0.349194, 0.820161, 0.233589, 0.318476, 0.197934, 0.699364, 0.637542, 0.465011, 0.0187581, 0.190163, 0.756208, 0.472132, 0.4868, 0.723026, 0.379682, 0.365346, 0.894813, 0.751082, 0.480833, 0.928701, 0.128808, 0.830305, 0.100163, 0.90188, 0.911492, 0.466998, 0.433854, 0.015408, 0.167343, 0.642858, 0.455742, 0.610448, 0.326629, 0.421488, 0.458191, 0.637302, 0.533665, 0.812558, 0.969173, 0.326991, 0.126867, 0.848999, 0.716219, 0.981126, 0.327416, 0.284432, 0.792503, 0.694015, 0.248582, 0.770959, 0.751586, 0.38134, 0.965801, 0.505019, 0.14646, 0.374775, 0.321738, 0.458752, 0.377833, 0.519381, 0.679915, 0.458723, 0.0370693, 0.849639, 0.396516, 0.83156, 0.444756, 0.797976, 0.995591, 0.538567, 0.689927, 0.45096, 0.868299, 0.811123, 0.293234, 0.721305, 0.177166, 0.894952, 0.62641, 0.687413, 0.694175, 0.304217, 0.0136684, 0.890674, 0.899722, 0.637766, 0.528021, 0.801501, 0.792564, 0.538398, 0.877006, 0.264564, 0.407734, 0.645108, 0.18197, 0.152754, 0.815848, 0.577899, 0.703031, 0.242478, 0.894567, 0.96087, 0.408947, 0.315054, 0.302289, 0.478926, 0.669814, 0.959103, 0.344021, 0.658806, 0.448365, 0.838467, 0.779806, 0.865458, 0.41381, 0.893845, 0.457803, 0.889834, 0.946956, 0.933004, 0.83567, 0.136374, 0.049155, 0.3097, 0.251154, 0.305113, 0.799089, 0.300594, 0.582202, 0.31084, 0.385809, 0.144171, 0.434686, 0.754535, 0.53353, 0.522426, 0.0434329, 0.91338, 0.660406, 0.150252, 0.90375, 0.0102227, 0.729369, 0.461165, 0.506007, 0.308079, 0.47514, 0.12175, 0.886592, 0.0350306, 0.606862, 0.313458, 0.750547, 0.0801592, 0.457862, 0.533468, 0.885263, 0.198575, 0.309171, 0.305542, 0.199726, 0.793719, 0.804498, 0.0358066, 0.978661, 0.827523, 0.35161, 0.762326, 0.535313, 0.938559, 0.4503, 0.112569, 0.66282, 0.370995, 0.569321, 0.119367, 0.501329, 0.846056, 0.539359, 0.839922, 0.442332, 0.884, 0.0591876, 0.539168, 0.984781, 0.871205, 0.0854703, 0.717857, 0.232444, 0.457933, 0.649101, 0.148704, 0.670437, 0.443175, 0.302435, 0.0377366, 0.454601, 0.336314, 0.81688, 0.261152, 0.498628, 0.759005, 0.567544, 0.632342, 0.690219, 0.845594, 0.531945, 0.691674, 0.595055, 0.725202, 0.537517, 0.711232, 0.993834, 0.441047, 0.990451, 0.799802, 0.961666, 0.457168, 0.709478, 0.980607, 0.844829, 0.405211, 0.223999, 0.852146, 0.773323, 0.842033, 0.317416, 0.397522, 0.824772, 0.453149, 0.0522264, 0.614552, 0.81791, 0.225596, 0.99387, 0.0682345, 0.614353, 0.742055, 0.618717, 0.663419, 0.321116, 0.901984, 0.11989, 0.235557, 0.356499, 0.577774, 0.555067, 0.0863137, 0.472922, 0.884758, 0.824288, 0.498867, 0.994645, 0.868219, 0.6208, 0.947028, 0.212622, 0.59932, 0.79759, 0.668395, 0.427304, 0.690056, 0.439286, 0.0700809, 0.27491, 0.0755251, 0.442506, 0.657334, 0.264498, 0.671154, 0.166596, 0.269773, 0.107788, 0.349553, 0.695793, 0.703822, 0.215588, 0.204637, 0.0798103, 0.763996, 0.0612667, 0.571063, 0.0526654, 0.515163, 0.953367, 0.775436, 0.312229, 0.296158, 0.900139, 0.483289, 0.254503, 0.935625, 0.478267, 0.316831, 0.115587, 0.442304, 0.0596865, 0.19508, 0.774136, 0.988787, 0.60814, 0.00981545, 0.398596, 0.768476, 0.906271, 0.46773, 0.256136, 0.618474, 0.179652, 0.888705, 0.586942, 0.321217, 0.651605, 0.862069, 0.114186, 0.17861, 0.0417655, 0.176729, 0.42583, 0.82061, 0.860383, 0.193185, 0.404549, 0.379155, 0.485282, 0.016342, 0.693068, 0.125953, 0.658611, 0.270833, 0.285506, 0.32126, 0.739323, 0.437358, 0.150252, 0.733181, 0.690758, 0.520076, 0.700211, 0.579535, 0.0495254, 0.283641, 0.536096, 0.418035, 0.63815, 0.294261, 0.48992, 0.734675, 0.552556, 0.620045, 0.589756, 0.671385, 0.886652, 0.0971896, 0.66922, 0.695882, 0.347832, 0.371098, 0.856624, 0.691958, 0.407796, 0.932549, 0.342606, 0.288123, 0.489522, 0.231401, 0.387611, 0.0551225, 0.375943, 0.224691, 0.289555, 0.455238, 0.0430309, 0.584177, 0.146183, 0.680249, 0.347606, 0.763452, 0.530888, 0.387532, 0.282439, 0.255751, 0.36775, 0.925498, 0.971201, 0.926978, 0.627056, 0.665154, 0.454747, 0.341733, 0.32008, 0.168399, 0.423126, 0.299849, 0.820325, 0.723267, 0.0637205, 0.918364, 0.158676, 0.589659, 0.316966, 0.592171, 0.207242, 0.661177, 0.752052, 0.804279, 0.292697, 0.605208, 0.250544, 0.125328, 0.645588, 0.868387, 0.857539, 0.686003, 0.45299, 0.536528, 0.924602, 0.984829, 0.255735, 0.313742, 0.226247, 0.185502, 0.827408, 0.497741, 0.848919, 0.316295, 0.843783, 0.710022, 0.638775, 0.371836, 0.164846, 0.250868, 0.832528, 0.335575, 0.968559, 0.149633, 0.203899, 0.788485, 0.0398888, 0.67515, 0.0586368, 0.772751, 0.109719, 0.762025, 0.710197, 0.316576, 0.594539, 0.80667, 0.0368775, 0.0513186, 0.970405, 0.561416, 0.249725, 0.585635, 0.532354, 0.125889, 0.156559, 0.572113, 0.358728, 0.600729, 0.361381, 0.970288, 0.851343, 0.490879, 0.458854, 0.0291644, 0.549936, 0.301511, 0.746525, 0.502274, 0.835356, 0.422023, 0.516765, 0.96343, 0.574654, 0.40334, 0.265409, 0.453785, 0.400419, 0.17958, 0.356209, 0.48938, 0.565072, 0.307234, 0.285915, 0.647999, 0.719095, 0.910425, 0.823406, 0.916754, 0.961534, 0.779739, 0.136354, 0.32201, 0.332292, 0.109666, 0.00891325, 0.877549, 0.618466, 0.063447, 0.668056, 0.273041, 0.327224, 0.878953, 0.474336, 0.503986, 0.803222, 0.786647, 0.244807, 0.724198, 0.433548, 0.0616928, 0.470861, 0.39902, 0.13408, 0.679739, 0.1709, 0.880907, 0.982239, 0.688888, 0.293035, 0.570804, 0.382169, 0.063379, 0.0784823, 0.875044, 0.769845, 0.225419, 0.296662, 0.875628, 0.344728, 0.20352, 0.923303, 0.331475, 0.346848, 0.833991, 0.685642, 0.227788, 0.628054, 0.262699, 0.732654, 0.15477, 0.327979, 0.651856, 0.47106, 0.636137, 0.572486, 0.183852, 0.104058, 0.987281, 0.336027, 0.72372, 0.8582, 0.58961, 0.743704, 0.578231, 0.542347, 0.423738, 0.381478, 0.084426, 0.662044, 0.652717, 0.443199, 0.392809, 0.30298, 0.790753, 0.328842, 0.75938, 0.0488912, 0.899114, 0.559141, 0.0631922, 0.0118264, 0.723217, 0.275262, 0.136921, 0.864023, 0.662238, 0.306619, 0.0893467, 0.888845, 0.185989, 0.526927, 0.0147209, 0.160433, 0.775214, 0.863504, 0.0621255, 0.134734, 0.553247, 0.241655, 0.77989, 0.909935, 0.721599, 0.563924, 0.204608, 0.393781, 0.12511, 0.447523, 0.324561, 0.967459, 0.0631283, 0.35271, 0.833536, 0.852548, 0.517544, 0.40873, 0.646201, 0.595205, 0.971308, 0.166952, 0.985942, 0.591575, 0.244875, 0.693938, 0.113778, 0.759724, 0.813694, 0.393444, 0.343845, 0.103497, 0.121951, 0.233799, 0.762405, 0.500507, 0.919505, 0.456497, 0.840675, 0.604321, 0.543137, 0.0211698, 0.0758669, 0.12913, 0.549984, 0.263862, 0.0346989, 0.335107, 0.835223, 0.566316, 0.0244453, 0.578691, 0.664038, 0.650779, 0.164656, 0.627452, 0.722843, 0.800206, 0.116107, 0.0203035, 0.374407, 0.965705, 0.0110325, 0.998024, 0.783014, 0.0832406, 0.335642, 0.774395, 0.371153, 0.941007, 0.63636, 0.0240382, 0.959488, 0.923166, 0.439688, 0.864186, 0.699498, 0.344454, 0.179774, 0.936647, 0.731408, 0.816577, 0.687972, 0.617568, 0.928552, 0.971177, 0.134631, 0.168535, 0.699836, 0.321496, 0.319605, 0.618294, 0.514369, 0.945269, 0.88228, 0.407133, 0.750522, 0.205377, 0.202692, 0.875734, 0.951163, 0.591304, 0.174682, 0.34534, 0.552553, 0.280804, 0.363245, 0.114267, 0.906492, 0.882822, 0.24086, 0.426481, 0.235711, 0.312802, 0.262856, 0.936412, 0.24493, 0.597656, 0.659288, 0.357394, 0.685456, 0.791699, 0.887432, 0.346623, 0.111041, 0.207639, 0.739972, 0.419978, 0.845298, 0.0778717, 0.724928, 0.706066, 0.860665, 0.160268, 0.244845, 0.463204, 0.498739, 0.588374, 0.368338, 0.0892011, 0.107076, 0.478193, 0.631327, 0.811656, 0.464272, 0.421894, 0.137954, 0.410764, 0.435121, 0.24257, 0.527901, 0.404133, 0.0545035, 0.742448, 0.442365, 0.863438, 0.789441, 0.394402, 0.00613398, 0.867081, 0.00384251, 0.511533, 0.57806, 0.888777, 0.0259426, 0.793212, 0.609102, 0.301156, 0.0877352, 0.828016, 0.561448, 0.0595132, 0.471967, 0.893999, 0.668739, 0.847616, 0.743314, 0.743303, 0.0722257, 0.140424, 0.275918, 0.625615, 0.764147, 0.139934, 0.725906, 0.0909708, 0.698548, 0.546715, 0.123564, 0.610412, 0.474304, 0.925668, 0.0509514, 0.0574054, 0.899886, 0.65906, 0.999451, 0.656265, 0.036704, 0.878708, 0.887585, 0.305513, 0.367564, 0.978791, 0.31831, 0.669875, 0.367108, 0.772971, 0.0896797, 0.833546, 0.793242, 0.490412, 0.889564, 0.0859814, 0.381787, 0.00539598, 0.700407, 0.131112, 0.632591, 0.732806, 0.816641, 0.178393, 0.077443, 0.973116, 0.734785, 0.324847, 0.528034, 0.349091, 0.574272, 0.89613, 0.621639, 0.0487464, 0.677461, 0.830756, 0.657379, 0.526372, 0.365305, 0.369929, 0.868658, 0.136915, 0.507724, 0.921089, 0.313955, 0.350456, 0.927952, 0.00628777, 0.550445, 0.164051, 0.730685, 0.0247993, 0.1408, 0.265129, 0.905673, 0.779407, 0.092088, 0.22462, 0.578342, 0.805298, 0.677176, 0.9582, 0.79486, 0.627776, 0.662102, 0.439483, 0.268846, 0.189297, 0.394185, 0.429062, 0.614969, 0.167243, 0.788504, 0.0661483, 0.721089, 0.774584, 0.316421, 0.681918, 0.245269, 0.29937, 0.840421, 0.722573, 0.0120884, 0.265541, 0.146931, 0.953688, 0.562294, 0.991365, 0.840994, 0.720479, 0.179228, 0.223416, 0.458426, 0.99785, 0.337146, 0.816261, 0.481227, 0.553636, 0.297855, 0.538486, 0.608309, 0.721136, 0.833913, 0.24333, 0.910893, 0.727897, 0.109237, 0.593581, 0.874631, 0.0709713, 0.796996, 0.925099, 0.586683, 0.96676, 0.351985, 0.751722, 0.356668, 0.669211, 0.489198, 0.293081, 0.311791, 0.924245, 0.841715, 0.466269, 0.889917, 0.45499, 0.886618, 0.529714, 0.492143, 0.895005, 0.320168, 0.372601, 0.914721, 0.0800021, 0.848954, 0.00908589, 0.918013, 0.281478, 0.544026, 0.664106, 0.0375566, 0.312872, 0.0342114, 0.942924, 0.138984, 0.332442, 0.871673, 0.525317, 0.418497, 0.790423, 0.723684, 0.873188, 0.318098, 0.467299, 0.157948, 0.0454114, 0.32999, 0.787019, 0.884097, 0.285882, 0.358632, 0.144098, 0.719018, 0.719071, 0.697976, 0.186055, 0.501236, 0.884748, 0.632005, 0.430949, 0.387263, 0.805957, 0.193986, 0.347255, 0.0570445, 0.006797, 0.500401, 0.294487, 0.0680388, 0.805326, 0.181984, 0.0501751, 0.0766015, 0.526423, 0.188243, 0.414886, 0.592165, 0.753081, 0.730982, 0.165123, 0.107674, 0.423133, 0.228369, 0.224986, 0.577592, 0.635908, 0.943174, 0.335085, 0.836889, 0.342952, 0.167118, 0.624525, 0.856313, 0.921166, 0.801291, 0.758801, 0.637488, 0.680047, 0.303448, 0.869554, 0.290099, 0.474593, 0.672515, 0.0637003, 0.186288, 0.625233, 0.399161, 0.722362, 0.423572, 0.942582, 0.0886553, 0.212074, 0.0391753, 0.861589, 0.400305, 0.10486, 0.384078, 0.202166, 0.672622, 0.91722, 0.603885, 0.672414, 0.846603, 0.333102, 0.486922, 0.907598, 0.867504, 0.56478, 0.767009, 0.848718, 0.124062, 0.551917, 0.941137, 0.346769, 0.657047, 0.289148, 0.388219, 0.626385, 0.697405, 0.0377644, 0.908079, 0.668569, 0.119673, 0.215121, 0.376058, 0.0374965, 0.386697, 0.563823, 0.885494, 0.243946, 0.355869, 0.257652, 0.046335, 0.916384, 0.680229, 0.464651, 0.709885, 0.484906, 0.91259, 0.498101, 0.223619, 0.153989, 0.111489, 0.505192, 0.601299, 0.564115, 0.898977, 0.600714, 0.444093, 0.538388, 0.330582, 0.12034, 0.0536305, 0.0222202, 0.742813, 0.546253, 0.791928, 0.069975, 0.196255, 0.765296, 0.205934, 0.893889, 0.844343, 0.777305, 0.937906, 0.392683, 0.414534, 0.0414895, 0.387903, 0.954064, 0.802791, 0.537087, 0.532209, 0.887904, 0.594908, 0.586865, 0.0547664, 0.40019, 0.481301, 0.709963, 0.0121169, 0.828262, 0.965955, 0.949611, 0.65088, 0.448005, 0.993904, 0.164882, 0.354858, 0.367886, 0.426567, 0.687085, 0.605885, 0.508063, 0.58993, 0.248294, 0.74219, 0.969719, 0.202111, 0.89544, 0.583348, 0.823776, 0.439039, 0.421996, 0.720746, 0.897236, 0.374662, 0.872577, 0.105817, 0.720689, 0.824346, 0.158175, 0.395905, 0.100654, 0.707863, 0.423092, 0.157991, 0.156658, 0.702542, 0.296186, 0.458731, 0.150583, 0.885817, 0.199738, 0.898556, 0.716553, 0.420452, 0.31157, 0.149567, 0.683278, 0.46182, 0.718851, 0.244332, 0.33307, 0.757627, 0.553181, 0.311749, 0.123247, 0.919328, 0.847211, 0.0841077, 0.898785, 0.0570432, 0.779025, 0.330567, 0.559177, 0.25126, 0.82179, 0.0735284, 0.657439, 0.0731305, 0.0511692, 0.280545, 0.0772603, 0.676196, 0.293328, 0.115305, 0.891287, 0.679459, 0.174581, 0.695196, 0.123975, 0.785274, 0.242571, 0.479348, 0.765869, 0.336896, 0.782724, 0.708965, 0.775772, 0.772306, 0.736247, 0.8866, 0.873607, 0.905697, 0.765402, 0.161844, 0.960967, 0.972999, 0.422707, 0.8069, 0.948782, 0.565063, 0.693451, 0.602445, 0.814243, 0.916487, 0.268815, 0.95423, 0.546828, 0.277289, 0.946279, 0.646002, 0.093949, 0.782924, 0.274266, 0.993749, 0.910229, 0.0989096, 0.35894, 0.274044, 0.384003, 0.338796, 0.0555018, 0.726783, 0.908397, 0.405586, 0.0936273, 0.957714, 0.888016, 0.874309, 0.793413, 0.871222, 0.518545, 0.972874, 0.0078246, 0.985028, 0.984621, 0.590917, 0.386753, 0.585227, 0.381218, 0.625137, 0.100634, 0.919112, 0.293435, 0.495189, 0.510776, 0.298816, 0.095965, 0.103011, 0.447301, 0.125291, 0.928486, 0.882791, 0.783282, 0.00272597, 0.68653, 0.108257, 0.946475, 0.496814, 0.328259, 0.104734, 0.619026, 0.34315, 0.0453975, 0.755696, 0.962885, 0.239685, 0.187631, 0.964565, 0.765593, 0.672336, 0.090252, 0.210921, 0.680636, 0.218681, 0.0107781, 0.522013, 0.325851, 0.1911, 0.420952, 0.029574, 0.351209, 0.129009, 0.563426, 0.227105, 0.07612, 0.487968, 0.83752, 0.735883, 0.666915, 0.445869, 0.791671, 0.281661, 0.928018, 0.669721, 0.682338, 0.79125, 0.356164, 0.462099, 0.855418, 0.214802, 0.902109, 0.953848, 0.410862, 0.36956, 0.421996, 0.934848, 0.106823, 0.308401, 0.14928, 0.72386, 0.0378415, 0.663265, 0.00682835, 0.633184, 0.534593, 0.0156501, 0.151601, 0.211653, 0.468096, 0.905673, 0.805541, 0.749493, 0.208294, 0.97087, 0.943601, 0.969517, 0.384328, 0.922651, 0.940334, 0.204516, 0.715793, 0.26435, 0.89528, 0.855626, 0.9497, 0.379236, 0.668378, 0.852895, 0.187018, 0.975123, 0.506262, 0.0583515, 0.0810914, 0.47934, 0.00242032, 0.768933, 0.557646, 0.43155, 0.112644, 0.78149, 0.854212, 0.843992, 0.849422, 0.611063, 0.182226, 0.330712, 0.180957, 0.819837, 0.802097, 0.422339, 0.297575, 0.172512, 0.0700954, 0.972202, 0.101391, 0.507276, 0.0169995, 0.0254523, 0.954711, 0.10593, 0.883098, 0.346894, 0.751155, 0.244221, 0.416072, 0.824888, 0.904692, 0.125086, 0.185577, 0.19544, 0.723256, 0.24917, 0.48036, 0.973321, 0.0213415, 0.441908, 0.433604, 0.74882, 0.148368, 0.0724756, 0.856612, 0.523868, 0.9712, 0.558063, 0.982476, 0.0572319, 0.653012, 0.346464, 0.933288, 0.876528, 0.421949, 0.0778321, 0.197076, 0.18523, 0.155189, 0.824885, 0.153041, 0.352194, 0.714309, 0.0207662, 0.0519233, 0.549587, 0.521096, 0.529766, 0.927778, 0.210935, 0.803238, 0.451755, 0.799907, 0.21564, 0.95605, 0.9331, 0.845079, 0.029185, 0.655086, 0.359136, 0.59256, 0.279384, 0.297993, 0.464518, 0.449132, 0.829833, 0.485665, 0.661307, 0.203452, 0.053043, 0.897858, 0.606581, 0.831515, 0.334763, 0.437941, 0.406986, 0.975581, 0.456347, 0.579385, 0.0273983, 0.470545, 0.446877, 0.611386, 0.449618, 0.0843964, 0.0108698, 0.306468, 0.059336, 0.550231, 0.336181, 0.531002, 0.152256, 0.824983, 0.345761, 0.135409, 0.885335, 0.665116, 0.669245, 0.538414, 0.0691357, 0.536132, 0.29768, 0.414128, 0.745982, 0.629456, 0.325496, 0.853354, 0.973727, 0.176844, 0.113371, 0.286286, 0.468106, 0.0649777, 0.526946, 0.785171, 0.766632, 0.0625148, 0.438023, 0.693556, 0.846619, 0.706524, 0.334763, 0.0467728, 0.297823, 0.985897, 0.150503, 0.915487, 0.951665, 0.972201, 0.700895, 0.216043, 0.662013, 0.797464, 0.473218, 0.994398, 0.906182, 0.459944, 0.966297, 0.233741, 0.136611, 0.961101, 0.842715, 0.818375, 0.697074, 0.386101, 0.360208, 0.261699, 0.95886, 0.309039, 0.0493484, 0.153351, 0.752365, 0.792846, 0.940465, 0.37238, 0.314019, 0.0514882, 0.821461, 0.844165, 0.493629, 0.988331, 0.368021, 0.38424, 0.92453, 0.229737, 0.958241, 0.702214, 0.405154, 0.560592, 0.597024, 0.786127, 0.0597326, 0.0428245, 0.297238, 0.49482, 0.399929, 0.724312, 0.107373, 0.36756, 0.325617, 0.981816, 0.485813, 0.633366, 0.711929, 0.873733, 0.638776, 0.109222, 0.430409, 0.774384, 0.974168, 0.104081, 0.590271, 0.129647, 0.133071, 0.341982, 0.298487, 0.509727, 0.256758, 0.0396291, 0.661724, 0.503199, 0.105958, 0.187221, 0.312695, 0.993555, 0.127059, 0.0448764, 0.855391, 0.702581, 0.0203665, 0.827487, 0.289003, 0.499851, 0.383601, 0.677723, 0.809673, 0.382683, 0.904806, 0.991494, 0.951881, 0.520166, 0.901293, 0.747227, 0.423873, 0.481918, 0.427671, 0.326114, 0.341688, 0.685606, 0.518872, 0.530867, 0.835228, 0.810857, 0.620203, 0.507715, 0.632993, 0.82505, 0.578728, 0.700222, 0.83451, 0.492908, 0.638573, 0.573057, 0.00891188, 0.713448, 0.925986, 0.0393316, 0.0335817, 0.790412, 0.0323502, 0.307637, 0.352329, 0.922402, 0.516065, 0.861165, 0.806933, 0.678143, 0.772472, 0.367841, 0.902365, 0.71558, 0.458232, 0.967204, 0.179482, 0.0361507, 0.94922, 0.162945, 0.628609, 0.877003, 0.470757, 0.410595, 0.0216603, 0.460343, 0.858546, 0.363512, 0.607368, 0.515995, 0.530213, 0.0367822, 0.532001, 0.571669, 0.397947, 0.487743, 0.213033, 0.749217, 0.8274, 0.737503, 0.657423, 0.0312227, 0.781163, 0.886139, 0.331717, 0.55673, 0.986234, 0.253588, 0.810303, 0.520106, 0.419689, 0.0699049, 0.153596, 0.794232, 0.0483519, 0.83783, 0.597201, 0.516072, 0.844409, 0.385921, 0.611924, 0.955325, 0.519208, 0.373878, 0.629046, 0.690831, 0.697245, 0.119742, 0.056846, 0.456071, 0.799117, 0.527102, 0.150697, 0.842322, 0.214973, 0.744237, 0.456457, 0.482834, 0.745881, 0.411206, 0.617174, 0.420588, 0.286817, 0.0496779, 0.730341, 0.34842, 0.250543, 0.740527, 0.303643, 0.281129, 0.1872, 0.4808, 0.716071, 0.983636, 0.449663, 0.698014, 0.73312, 0.622484, 0.85485, 0.409041, 0.720464, 0.675804, 0.559418, 0.920296, 0.754098, 0.638082, 0.234053, 0.018729, 0.362872, 0.652148, 0.620817, 0.369305, 0.38631, 0.930427, 0.641007, 0.109921, 0.623183, 0.352949, 0.874117, 0.448717, 0.455285, 0.345244, 0.451122, 0.948271, 0.556486, 0.602188, 0.776034, 0.482165, 0.7329, 0.197306, 0.0657418, 0.0654793, 0.5727, 0.807578, 0.999897, 0.161097, 0.509372, 0.61751, 0.0745205, 0.678902, 0.179759, 0.159726, 0.587556, 0.3301, 0.491297, 0.921946, 0.985123, 0.86514, 0.881982, 0.566821, 0.621254, 0.103478, 0.788699, 0.904587, 0.877758, 0.643877, 0.153881, 0.524796, 0.375401, 0.951869, 0.948358, 0.833498, 0.593298, 0.0201101, 0.126275, 0.765023, 0.904025, 0.648421, 0.707049, 0.0150131, 0.7442, 0.5978, 0.925498, 0.310578, 0.151377, 0.90317, 0.540783, 0.159372, 0.602278, 0.358967, 0.514979, 0.557601, 0.848365, 0.61464, 0.369268, 0.684617, 0.872303, 0.136651, 0.611522, 0.869738, 0.512325, 0.217256, 0.178135, 0.223219, 0.0219734, 0.756035, 0.245299, 0.210532, 0.133664, 0.341583, 0.316551, 0.723035, 0.551892, 0.734721, 0.0922746, 0.946082, 0.967512, 0.578441, 0.132522, 0.717207, 0.624223, 0.760253, 0.184065, 0.990692, 0.566182, 0.14733, 0.861481, 0.801199, 0.263972, 0.374555, 0.258605, 0.67979, 0.201417, 0.715122, 0.570144, 0.566226, 0.158468, 0.04718, 0.0937663, 0.266541, 0.601237, 0.69566, 0.144284, 0.9475, 0.474596, 0.0888045, 0.416191, 0.702359, 0.867205, 0.424696, 0.591265, 0.908696, 0.257274, 0.648302, 0.198409, 0.695839, 0.931819, 0.660077, 0.215693, 0.45629, 0.894729, 0.983438, 0.183173, 0.56049, 0.539245, 0.996379, 0.370151, 0.250879, 0.402447, 0.432257, 0.0674984, 0.31702, 0.731698, 0.668893, 0.808452, 0.109053, 0.508808, 0.342527, 0.513582, 0.123082, 0.785695, 0.732745, 0.268861, 0.250659, 0.545527, 0.857127, 0.41173, 0.804247, 0.111304, 0.413358, 0.296181, 0.672227, 0.115622, 0.225399, 0.864891, 0.962351, 0.814655, 0.521629, 0.396566, 0.552732, 0.19779, 0.156093, 0.384171, 0.253139, 0.722354, 0.599587, 0.411506, 0.288726, 0.129142, 0.326284, 0.528871, 0.231405, 0.931537, 0.910982, 0.74273, 0.179939, 0.393404, 0.204973, 0.700174, 0.602977, 0.562797, 0.792018, 0.0424811, 0.235302, 0.992325, 0.176502, 0.554295, 0.0324226, 0.284686, 0.00700358, 0.325967, 0.340064, 0.383622, 0.296746, 0.387355, 0.990186, 0.108578, 0.221806, 0.64229, 0.818318, 0.312632, 0.867823, 0.883808, 0.599856, 0.509203, 0.0152882, 0.136774, 0.848128, 0.213386, 0.487014, 0.95346, 0.789232, 0.611782, 0.426443, 0.405727, 0.86852, 0.305588, 0.195924, 0.695828, 0.935965, 0.641291, 0.688478, 0.813829, 0.615355, 0.735923, 0.42909, 0.541464, 0.346598, 0.25247, 0.673428, 0.981367, 0.997846, 0.297658, 0.0142858, 0.80571, 0.267435, 0.377597, 0.81435, 0.492265, 0.628247, 0.187726, 0.11812, 0.716165, 0.694685, 0.346032, 0.913525, 0.253758, 0.515381, 0.214674, 0.956031, 0.957614, 0.0872555, 0.199956, 0.430683, 0.157594, 0.562096, 0.392935, 0.511123, 0.920476, 0.868776, 0.576936, 0.0278721, 0.219658, 0.774331, 0.625886, 0.0082881, 0.997553, 0.329826, 0.832248, 0.996308, 0.388564, 0.729274, 0.323474, 0.425097, 0.301228, 0.299065, 0.983669, 0.400248, 0.503789, 0.813026, 0.136986, 0.151817, 0.729839, 0.764368, 0.0280042, 0.264067, 0.900445, 0.231267, 0.502126, 0.565454, 0.843213, 0.579648, 0.789364, 0.669677, 0.04461, 0.684907, 0.425413, 0.288632, 0.00395595, 0.542435, 0.216071, 0.18917, 0.970851, 0.419426, 0.237613, 0.730309, 0.199577, 0.732611, 0.744815, 0.58676, 0.671423, 0.494676, 0.225424, 0.70758, 0.996235, 0.964135, 0.802666, 0.312883, 0.135183, 0.846743, 0.260284, 0.716979, 0.567487, 0.0730147, 0.542468, 0.659719, 0.155479, 0.770085, 0.992266, 0.353544, 0.774101, 0.965537, 0.998775, 0.34354, 0.0773092, 0.453505, 0.669693, 0.353933, 0.753243, 0.944286, 0.652485, 0.737946, 0.392414, 0.325862, 0.728856, 0.83094, 0.749988, 0.801557, 0.613172, 0.808371, 0.986402, 0.715234, 0.240682, 0.392313, 0.891651, 0.259872, 0.0986346, 0.86297, 0.106848, 0.776397, 0.553577, 0.215339, 0.0441361, 0.265432, 0.0720464, 0.284713, 0.0283263, 0.53634, 0.493958, 0.476731, 0.0943269, 0.773665, 0.991534, 0.50939, 0.335962, 0.06123, 0.417067, 0.800443, 0.519171, 0.427361, 0.217762, 0.0510568, 0.36815, 0.563254, 0.854689, 0.335899, 0.171498, 0.548312, 0.195189, 0.0535248, 0.865462, 0.87322, 0.866146, 0.85746, 0.102272, 0.150039, 0.0667764, 0.445532, 0.170749, 0.406866, 0.75219, 0.969756, 0.816015, 0.35656, 0.071648, 0.612682, 0.153806, 0.150379, 0.58119, 0.54882, 0.376379, 0.555642, 0.97008, 0.252967, 0.10617, 0.38813, 0.256052, 0.606034, 0.330212, 0.385566, 0.508587, 0.750938, 0.031118, 0.423059, 0.244267, 0.20265, 0.787553, 0.366797, 0.711147, 0.505731, 0.757017, 0.45655, 0.651996, 0.284821, 0.0598917, 0.99037, 0.931341, 0.112815, 0.0909045, 0.187748, 0.791663, 0.827594, 0.804124, 0.925645, 0.887251, 0.393529, 0.191947, 0.408688, 0.804312, 0.66514, 0.115176, 0.285518, 0.204117, 0.157981, 0.463733, 0.479301, 0.856981, 0.937528, 0.411315, 0.863007, 0.699701, 0.229089, 0.950686, 0.867727, 0.971115, 0.885927, 0.844345, 0.076912, 0.869651, 0.76921, 0.0378565, 0.622212, 0.493562, 0.608605, 0.831141, 0.971259, 0.905759, 0.874348, 0.69186, 0.148418, 0.337865, 0.22719, 0.412224, 0.186524, 0.814085, 0.692819, 0.586107, 0.345021, 0.872031, 0.185449, 0.00439599, 0.335658, 0.0405403, 0.741025, 0.725927, 0.154015, 0.137111, 0.0232265, 0.930676, 0.654835, 0.908547, 0.094071, 0.990514, 0.437413, 0.743797, 0.00788466, 0.446104, 0.246498, 0.666793, 0.819619, 0.102986, 0.852622, 0.788565, 0.62364, 0.791175, 0.42685, 0.0765536, 0.100236, 0.446536, 0.33308, 0.905593, 0.584922, 0.0234402, 0.895991, 0.595334, 0.681095, 0.407522, 0.546711, 0.57838, 0.888381, 0.288598, 0.899159, 0.270344, 0.813493, 0.655682, 0.578521, 0.467381, 0.691264, 0.716711, 0.504354, 0.141709, 0.422382, 0.40133, 0.346244, 0.171395, 0.605301, 0.869774, 0.495527, 0.705591, 0.561917, 0.776828, 0.0685969, 0.73957, 0.647541, 0.608498, 0.924928, 0.947169, 0.456873, 0.00828233, 0.0954775, 0.700587, 0.520686, 0.257912, 0.92712, 0.326307, 0.821409, 0.981017, 0.0119735, 0.870095, 0.265674, 0.122687, 0.125838, 0.255999, 0.593459, 0.239397, 0.0350162, 0.743425, 0.985695, 0.291538, 0.507485, 0.272803, 0.541422, 0.653463, 0.402406, 0.627482, 0.12628, 0.749723, 0.115818, 0.35766, 0.501103, 0.606518, 0.907292, 0.489708, 0.141918, 0.211133, 0.233792, 0.138817, 0.0188088, 0.425801, 0.396851, 0.817976, 0.917922, 0.89365, 0.465329, 0.69366, 0.987449, 0.899463, 0.785092, 0.204321, 0.461154, 0.410804, 0.388639, 0.0825985, 0.135478, 0.20132, 0.422745, 0.390548, 0.489493, 0.253859, 0.482277, 0.666313, 0.839463, 0.405183, 0.55627, 0.100002, 0.895627, 0.721706, 0.628305, 0.0710177, 0.160533, 0.22906, 0.730038, 0.616375, 0.352401, 0.201681, 0.197307, 0.81351, 0.982901, 0.576121, 0.835851, 0.813159, 0.405018, 0.981582, 0.757436, 0.311719, 0.48819, 0.0826216, 0.990143, 0.699225, 0.189513, 0.504728, 0.686396, 0.527798, 0.183832, 0.843663, 0.286397, 0.694677, 0.535705, 0.0570376, 0.615431, 0.0534782, 0.415158, 0.909671, 0.215951, 0.160384, 0.56208, 0.326519, 0.0202129, 0.986293, 0.598507, 0.533435, 0.731775, 0.938914, 0.501171, 0.451895, 0.665232, 0.387985, 0.505002, 0.250808, 0.701774, 0.383783, 0.798534, 0.218294, 0.402173, 0.516096, 0.997964, 0.222554, 0.853627, 0.263625, 0.451092, 0.520025, 0.756755, 0.668227, 0.420038, 0.967938, 0.175036, 0.459307, 0.967397, 0.509015, 0.159399, 0.436078, 0.532903, 0.0923253, 0.749582, 0.427083, 0.128942, 0.766158, 0.287336, 0.570364, 0.216147, 0.477547, 0.695998, 0.547537, 0.942974, 0.0292585, 0.589732, 0.513919, 0.615032, 0.0166233, 0.381025, 0.585116, 0.438534, 0.240048, 0.656944, 0.828787, 0.260304, 0.0184595, 0.676453, 0.421382, 0.746309, 0.737324, 0.307824, 0.359532, 0.438462, 0.0557004, 0.384388, 0.351528, 0.258295, 0.677667, 0.809185, 0.0118964, 0.679571, 0.186422, 0.132335, 0.553132, 0.856355, 0.337596, 0.174414, 0.00509927, 0.44934, 0.327064, 0.790492, 0.398911, 0.574262, 0.153893, 0.514169, 0.691686, 0.858967, 0.573273, 0.668025, 0.979862, 0.893851, 0.657798, 0.0585913, 0.589984, 0.0942525, 0.372763, 0.285639, 0.14401, 0.263325, 0.463679, 0.527697, 0.446536, 0.360917, 0.384701, 0.0506526, 0.721321, 0.597825, 0.0214683, 0.11416, 0.700745, 0.79477, 0.0921258, 0.785929, 0.0259505, 0.194054, 0.305135, 0.432538, 0.599081, 0.281118, 0.5734, 0.724326, 0.400447, 0.22191, 0.438663, 0.133757, 0.0668173, 0.478327, 0.995522, 0.545614, 0.293205, 0.663079, 0.86955, 0.150913, 0.363281, 0.193324, 0.450551, 0.220092, 0.426935, 0.183398, 0.926087, 0.00888572, 0.00780228, 0.814242, 0.790096, 0.137877, 0.211914, 0.667349, 0.494069, 0.655491, 0.362551, 0.054272, 0.881904, 0.0506702, 0.676432, 0.181077, 0.573144, 0.836107, 0.773527, 0.984899, 0.23801, 0.821837, 0.941972, 0.867295, 0.123919, 0.333527, 0.285781, 0.263757, 0.904559, 0.544361, 0.509233, 0.605513, 0.663093, 0.79277, 0.62842, 0.304771, 0.783085, 0.186073, 0.43221, 0.529232, 0.0314923, 0.994417, 0.576632, 0.0437377, 0.733374, 0.932039, 0.125679, 0.451923, 0.79018, 0.477478, 0.483779, 0.532409, 0.27873, 0.875928, 0.571014, 0.583013, 0.891833, 0.620377, 0.271269, 0.330521, 0.979837, 0.700779, 0.249666, 0.951454, 0.240501, 0.749557, 0.126723, 0.347458, 0.563714, 0.825771, 0.0300354, 0.183928, 0.428681, 0.953028, 0.631215, 0.965704, 0.273817, 0.265083, 0.430852, 0.417258, 0.613196, 0.423968, 0.0680174, 0.638343, 0.363991, 0.169832, 0.0779664, 0.495797, 0.0923707, 0.0147055, 0.647875, 0.998512, 0.880195, 0.665924, 0.222713, 0.552008, 0.906499, 0.952438, 0.316669, 0.497773, 0.606909, 0.0373035, 0.973113, 0.749352, 0.672115, 0.0371679, 0.0221643, 0.153936, 0.324902, 0.116732, 0.199502, 0.453173, 0.140486, 0.857874, 0.500424, 0.561531, 0.646703, 0.0439429, 0.947895, 0.161695, 0.637395, 0.682654, 0.0346284, 0.040631, 0.429956, 0.46522, 0.801661, 0.399987, 0.0208613, 0.348847, 0.475412, 0.796777, 0.540579, 0.357489, 0.0746613, 0.213067, 0.761432, 0.0143938, 0.646772, 0.919113, 0.255361, 0.979193, 0.868147, 0.154713, 0.778923, 0.286271, 0.428898, 0.857063, 0.00948257, 0.772213, 0.907271, 0.843089, 0.230618, 0.9172, 0.943433, 0.368784, 0.0593151, 0.197673, 0.935532, 0.0678369, 0.940027, 0.08405, 0.547681, 0.507156, 0.884479, 0.719812, 0.722796, 0.819992, 0.0880767, 0.143816, 0.470205, 0.363248, 0.260774, 0.471419, 0.315017, 0.085655, 0.850258, 0.952386, 0.116359, 0.905662, 0.376133, 0.247312, 0.375566, 0.0750932, 0.750727, 0.600567, 0.38088, 0.260095, 0.122676, 0.416322, 0.344845, 0.271798, 0.864271, 0.863079, 0.57887, 0.0920866, 0.763279, 0.378082, 0.575777, 0.487278, 0.377556, 0.415196, 0.816426, 0.978372, 0.774955, 0.306551, 0.936601, 0.242495, 0.380497, 0.0283852, 0.429017, 0.187186, 0.940919, 0.868282, 0.132747, 0.427618, 0.0868148, 0.449171, 0.157487, 0.855666, 0.743791, 0.565989, 0.0300736, 0.153334, 0.286405, 0.976528, 0.542745, 0.973117, 0.327521, 0.500886, 0.362997, 0.905621, 0.530701, 0.670396, 0.264269, 0.0230654, 0.332958, 0.34225, 0.114581, 0.160618, 0.626045, 0.897061, 0.0454061, 0.463853, 0.994645, 0.455889, 0.569284, 0.538186, 0.293597, 0.854565, 0.729241, 0.868935, 0.32124, 0.680401, 0.30655, 0.976241, 0.947252, 0.43007, 0.93592, 0.813614, 0.129284, 0.299743, 0.443032, 0.111744, 0.814934, 0.126161, 0.234231, 0.42606, 0.182855, 0.49952, 0.947077, 0.519476, 0.127596, 0.58245, 0.685402, 0.323423, 0.845971, 0.170554, 0.106695, 0.247107, 0.84115, 0.538526, 0.990646, 0.750535, 0.515641, 0.666909, 0.805849, 0.375607, 0.108081, 0.706109, 0.45729, 0.852729, 0.398563, 0.633165, 0.445097, 0.88811, 0.115462, 0.391862, 0.77775, 0.662464, 0.310253, 0.65022, 0.383526, 0.866334, 0.477081, 0.750928, 0.904093, 0.955062, 0.855731, 0.381278, 0.986175, 0.119963, 0.0139481, 0.867997, 0.980113, 0.0376974, 0.67347, 0.735159, 0.289926, 0.783574, 0.384761, 0.232743, 0.298739, 0.511803, 0.0774394, 0.282291, 0.835873, 0.385259, 0.692952, 0.475552, 0.820027, 0.241047, 0.25828, 0.596372, 0.655784, 0.493115, 0.36083, 0.296977, 0.325491, 0.300083, 0.214548, 0.520312, 0.224524, 0.157033, 0.505183, 0.567012, 0.0926136, 0.709242, 0.910198, 0.941818, 0.806985, 0.578555, 0.442866, 0.74562, 0.512557, 0.889682, 0.988818, 0.245681, 0.377054, 0.954408, 0.383965, 0.188972, 0.529379, 0.815615, 0.562475, 0.847996, 0.776091, 0.151217, 0.380486, 0.713626, 0.539941, 0.260308, 0.828106, 0.842803, 0.559088, 0.369566, 0.551427, 0.158416, 0.476782, 0.966928, 0.928356, 0.501102, 0.99043, 0.0912297, 0.818995, 0.820435, 0.887529, 0.236475, 0.986218, 0.419954, 0.043222, 0.566057, 0.605133, 0.02538, 0.486661, 0.71177, 0.580676, 0.61295, 0.804704, 0.113879, 0.96885, 0.345662, 0.200206, 0.10503, 0.362518, 0.334527, 0.323805, 0.447663, 0.747838, 0.786654, 0.420119, 0.526687, 0.0287588, 0.693505, 0.423247, 0.061824, 0.887531, 0.325806, 0.00556715, 0.75787, 0.21453, 0.851873, 0.551226, 0.450655, 0.550821, 0.24018, 0.482584, 0.195766, 0.22553, 0.707355, 0.379226, 0.096144, 0.607049, 0.905306, 0.734175, 0.667554, 0.392769, 0.39463, 0.67916, 0.281361, 0.66691, 0.117686, 0.826537, 0.200226, 0.731425, 0.926141, 0.515001, 0.367618, 0.588697, 0.440898, 0.69464, 0.467509, 0.499914, 0.467499, 0.451169, 0.0231868, 0.22663, 0.457808, 0.0399817, 0.323167, 0.721905, 0.0529664, 0.169948, 0.673992, 0.466347, 0.385812, 0.785695, 0.84239, 0.319007, 0.820188, 0.234389, 0.625299, 0.0252847, 0.59125, 0.456074, 0.37046, 0.384022, 0.56234, 0.181446, 0.712361, 0.54405, 0.263049, 0.635011, 0.951016, 0.3358, 0.879353, 0.0831292, 0.0842165, 0.0232483, 0.395973, 0.785624, 0.585341, 0.567839, 0.404285, 0.29226, 0.748867, 0.393779, 0.213787, 0.149072, 0.109442, 0.736842, 0.564606, 0.331428, 0.291899, 0.675978, 0.836395, 0.706679, 0.564943, 0.12472, 0.808345, 0.30535, 0.275445, 0.298057, 0.279457, 0.97645, 0.816883, 0.914561, 0.892189, 0.77112, 0.613258, 0.783793, 0.502306, 0.205586, 0.4358, 0.340456, 0.426298, 0.313759, 0.225679, 0.438416, 0.930064, 0.882277, 0.725139, 0.54135, 0.354032, 0.632378, 0.665131, 0.910576, 0.338133, 0.413062, 0.148235, 0.4137, 0.201197, 0.927081, 0.572361, 0.142785, 0.588912, 0.599713, 0.719765, 0.108914, 0.745517, 0.978794, 0.748982, 0.349309, 0.929206, 0.549384, 0.492809, 0.258113, 0.878477, 0.953323, 0.889002, 0.495883, 0.952265, 0.322544, 0.504439, 0.978133, 0.706179, 0.357271, 0.564249, 0.532395, 0.697648, 0.760478, 0.181389, 0.159854, 0.796149, 0.102953, 0.829313, 0.579002, 0.195884, 0.929353, 0.454893, 0.330432, 0.556409, 0.226399, 0.19401, 0.781442, 0.00908038, 0.342131, 0.612928, 0.107233, 0.898847, 0.905318, 0.625553, 0.429657, 0.377211, 0.362926, 0.775476, 0.664609, 0.240606, 0.673913, 0.24566, 0.3759, 0.30294, 0.238277, 0.309649, 0.158961, 0.268136, 0.0474387, 0.0249614, 0.502213, 0.835242, 0.808086, 0.0587386, 0.0279884, 0.373568, 0.179991, 0.837421, 0.183592, 0.162894, 0.441337, 0.83694, 0.383792, 0.452136, 0.162437, 0.189275, 0.249038, 0.705957, 0.346687, 0.822571, 0.153658, 0.441246, 0.894684, 0.768269, 0.386205, 0.2928, 0.243175, 0.497614, 0.204097, 0.229398, 0.0688555, 0.72533, 0.910051, 0.751693, 0.801424, 0.357692, 0.140261, 0.576602, 0.235305, 0.351963, 0.864571, 0.848564, 0.742295, 0.301651, 0.533631, 0.139609, 0.457968, 0.903583, 0.0587152, 0.127043, 0.240497, 0.850801, 0.730677, 0.443789, 0.243372, 0.705738, 0.602757, 0.767775, 0.335897, 0.0712162, 0.448794, 0.47651, 0.76841, 0.237581, 0.765044, 0.52708, 0.948902, 0.666026, 0.963973, 0.939841, 0.848581, 0.029732, 0.930066, 0.490798, 0.631854, 0.984634, 0.742517, 0.200003, 0.625618, 0.678956, 0.584002, 0.71094, 0.523336, 0.681997, 0.361293, 0.812933, 0.459831, 0.350654, 0.946357, 0.854295, 0.798742, 0.473338, 0.288282, 0.117292, 0.583824, 0.889357, 0.0131608, 0.500116, 0.628454, 0.0224112, 0.348022, 0.719283, 0.90583, 0.499719, 0.663154, 0.000967554, 0.170538, 0.0428783, 0.896685, 0.643987, 0.273411, 0.78688, 0.396023, 0.626472, 0.215756, 0.844723, 0.872213, 0.410202, 0.878054, 0.674196, 0.287355, 0.492367, 0.137073, 0.0237426, 0.388965, 0.629704, 0.687775, 0.334195, 0.32888, 0.442265, 0.909167, 0.437461, 0.372817, 0.885616, 0.874001, 0.809981, 0.651169, 0.530512, 0.917954, 0.441524, 0.762291, 0.880403, 0.737363, 0.708009, 0.600234, 0.528857, 0.968567, 0.468732, 0.720534, 0.724765, 0.67854, 0.835982, 0.405137, 0.197206, 0.142327, 0.867971, 0.920047, 0.690973, 0.339469, 0.633086, 0.176185, 0.776793, 0.322005, 0.182915, 0.00502998, 0.148064, 0.906332, 0.267127, 0.705925, 0.898904, 0.835066, 0.132771, 0.7174, 0.54423, 0.392887, 0.884388, 0.742055, 0.816903, 0.322242, 0.845872, 0.465193, 0.209498, 0.795287, 0.691878, 0.790636, 0.693203, 0.643963, 0.15282, 0.984348, 0.570035, 0.466674, 0.152595, 0.802262, 0.478275, 0.514444, 0.44056, 0.33875, 0.539541, 0.671539, 0.967589, 0.253503, 0.302934, 0.410017, 0.210206, 0.580143, 0.0563475, 0.693325, 0.944428, 0.084813, 0.9124, 0.615986, 0.939101, 0.800773, 0.880769, 0.256613, 0.413419, 0.966736, 0.710748, 0.21007, 0.456469, 0.160282, 0.597707, 0.239246, 0.177383, 0.537376, 0.160526, 0.316377, 0.790776, 0.777723, 0.470773, 0.843404, 0.0261224, 0.811399, 0.038649, 0.818928, 0.0582613, 0.674766, 0.508751, 0.898782, 0.0919244, 0.75635, 0.499773, 0.365075, 0.750378, 0.127403, 0.281425, 0.909092, 0.838663, 0.851529, 0.564507, 0.381524, 0.963769, 0.499829, 0.132106, 0.561772, 0.913486, 0.840092, 0.393176, 0.648487, 0.519296, 0.664618, 0.268991, 0.252152, 0.283572, 0.673352, 0.27139, 0.503949, 0.993029, 0.413596, 0.24433, 0.23055, 0.704351, 0.0230931, 0.84791, 0.00476671, 0.818885, 0.454821, 0.726305, 0.767912, 0.817841, 0.999974, 0.590962, 0.977082, 0.968455, 0.519364, 0.514248, 0.971315, 0.951993, 0.472547, 0.700405, 0.549309, 0.552369, 0.0456289, 0.585039, 0.854381, 0.0383671, 0.141601, 0.66116, 0.790092, 0.912461, 0.044778, 0.666423, 0.461235, 0.540005, 0.817538, 0.601107, 0.411045, 0.205044, 0.733088, 0.00062296, 0.36983, 0.138776, 0.937605, 0.193134, 0.56797, 0.544194, 0.231138, 0.725802, 0.480193, 0.575373, 0.484463, 0.975497, 0.0100739, 0.968146, 0.768475, 0.635657, 0.435189, 0.629053, 0.221428, 0.232516, 0.306875, 0.836148, 0.796361, 0.100233, 0.587085, 0.0760022, 0.55652, 0.726717, 0.0816417, 0.139665, 0.528876, 0.896761, 0.571698, 0.11319, 0.56549, 0.345951, 0.0661693, 0.845047, 0.890938, 0.412432, 0.996887, 0.0935362, 0.576111, 0.872789, 0.456589, 0.026871, 0.30534, 0.327539, 0.982649, 0.518664, 0.522286, 0.726954, 0.661363, 0.586535, 0.462767, 0.511343, 0.533509, 0.43612, 0.146776, 0.151319, 0.584294, 0.239675, 0.826119, 0.280354, 0.514849, 0.519899, 0.389866, 0.577378, 0.839103, 0.0943049, 0.961246, 0.188205, 0.096847, 0.0871042, 0.622268, 0.821903, 0.360083, 0.903953, 0.497596, 0.478317, 0.765328, 0.795846, 0.61934, 0.447344, 0.794528, 0.887741, 0.0749957, 0.110563, 0.60998, 0.329275, 0.516203, 0.292358, 0.592168, 0.125333, 0.837721, 0.222245, 0.566869, 0.626346, 0.0951495, 0.821676, 0.0171195, 0.468955, 0.843058, 0.0427089, 0.290602, 0.531874, 0.675561, 0.759463, 0.776708, 0.97121, 0.552055, 0.91365, 0.795505, 0.432957, 0.945928, 0.629969, 0.706315, 0.10789, 0.631945, 0.560697, 0.286696, 0.847755, 0.5106, 0.616399, 0.458012, 0.66941, 0.394222, 0.441996, 0.931881, 0.6098, 0.814459, 0.801516, 0.479609, 0.916285, 0.491031, 0.52531, 0.630179, 0.707767, 0.18495, 0.11281, 0.217438, 0.649659, 0.257633, 0.190622, 0.461525, 0.53659, 0.548985, 0.935432, 0.468119, 0.528654, 0.288002, 0.86274, 0.583306, 0.837442, 0.744019, 0.00205006, 0.101775, 0.117706, 0.39285, 0.812016, 0.718513, 0.608292, 0.369581, 0.509854, 0.841733, 0.589126, 0.0334576, 0.886984, 0.903502, 0.134757, 0.0234959, 0.621053, 0.833687, 0.908372, 0.355786, 0.594968, 0.278546, 0.0616916, 0.991928, 0.816218, 0.791833, 0.934049, 0.865326, 0.0253704, 0.349297, 0.663226, 0.357989, 0.680557, 0.802511, 0.429642, 0.883689, 0.783824, 0.67522, 0.697438, 0.120879, 0.729575, 0.00486169, 0.942129, 0.125739, 0.663795, 0.00914756, 0.711479, 0.998232, 0.822952, 0.473205, 0.335837, 0.136472, 0.609196, 0.318704, 0.161532, 0.568412, 0.0233597, 0.906629, 0.873049, 0.731596, 0.613444, 0.0593645, 0.63478, 0.814573, 0.0340471, 0.0763958, 0.363773, 0.790524, 0.807419, 0.584907, 0.397459, 0.869917, 0.147044, 0.169051, 0.713994, 0.455301, 0.993618, 0.736364, 0.429536, 0.233719, 0.660053, 0.65125, 0.12594, 0.0831675, 0.784556, 0.382005, 0.601863, 0.567812, 0.312718, 0.976143, 0.853011, 0.610176, 0.700731, 0.0871456, 0.338224, 0.885135, 0.504926, 0.45179, 0.849376, 0.95661, 0.655126, 0.70496, 0.805156, 0.462604, 0.592633, 0.390113, 0.118923, 0.637586, 0.025129, 0.62638, 0.689463, 0.773287, 0.531734, 0.795471, 0.358706, 0.262297, 0.200212, 0.538472, 0.111887, 0.100556, 0.480524, 0.392042, 0.385916, 0.316496, 0.702092, 0.3041, 0.388474, 0.0657167, 0.983429, 0.138827, 0.750319, 0.257262, 0.58212, 0.902273, 0.455071, 0.864611, 0.882043, 0.0805284, 0.328308, 0.292215, 0.45982, 0.410047, 0.304071, 0.930239, 0.563922, 0.238724, 0.901428, 0.395368, 0.748328, 0.863523, 0.699313, 0.14609, 0.802765, 0.725857, 0.111176, 0.778348, 0.100649, 0.934328, 0.626439, 0.0632857, 0.863918, 0.300232, 0.211113, 0.965086, 0.526931, 0.547023, 0.961509, 0.438035, 0.219677, 0.223246, 0.467086, 0.763893, 0.322219, 0.332271, 0.701771, 0.0221609, 0.48011, 0.331815, 0.525376, 0.0581144, 0.12665, 0.568667, 0.444604, 0.652657, 0.623396, 0.79662, 0.00975556, 0.891481, 0.496778, 0.352577, 0.720776, 0.70058, 0.187884, 0.388072, 0.274114, 0.114906, 0.778608, 0.194883, 0.909946, 0.997911, 0.0186819, 0.374863, 0.787027, 0.512095, 0.500573, 0.120213, 0.358705, 0.215354, 0.251092, 0.562016, 0.448529, 0.171838, 0.0396383, 0.514233, 0.513974, 0.791807, 0.830635, 0.649071, 0.0641319, 0.968229, 0.580936, 0.14864, 0.574407, 0.0467385, 0.3524, 0.582797, 0.764113, 0.192708, 0.168954, 0.591749, 0.614163, 0.199349, 0.340908, 0.985934, 0.62084, 0.968246, 0.57038, 0.92467, 0.726849, 0.624844, 0.665523, 0.886327, 0.132568, 0.498797, 0.425351, 0.477744, 0.935619, 0.260727, 0.946411, 0.905412, 0.481464, 0.609082, 0.132837, 0.313348, 0.812787, 0.489934, 0.126657, 0.567723, 0.197598, 0.0267277, 0.457983, 0.404278, 0.983718, 0.0943151, 0.819164, 0.519075, 0.889077, 0.910087, 0.968704, 0.0729851, 0.696807, 0.0655221, 0.750525, 0.891697, 0.965248, 0.493765, 0.305968, 0.111152, 0.913344, 0.52132, 0.215332, 0.446812, 0.642562, 0.316112, 0.634171, 0.206858, 0.412666, 0.783655, 0.223874, 0.214292, 0.228773, 0.0170766, 0.854296, 0.501929, 0.362381, 0.89997, 0.863746, 0.951309, 0.745953, 0.301861, 0.223935, 0.746708, 0.0834991, 0.828797, 0.90307, 0.715688, 0.218957, 0.960773, 0.46454, 0.0591914, 0.66914, 0.398428, 0.118712, 0.0315554, 0.485259, 0.378371, 0.185238, 0.523267, 0.902114, 0.437574, 0.784998, 0.878045, 0.578451, 0.64194, 0.808302, 0.337437, 0.507242, 0.834155, 0.378338, 0.0677763, 0.635153, 0.0311098, 0.581117, 0.0917316, 0.494079, 0.20769, 0.0238931, 0.0650699, 0.0744345, 0.705953, 0.284797, 0.453965, 0.365721, 0.905599, 0.332815, 0.137703, 0.492798, 0.431644, 0.230892, 0.0976786, 0.497994, 0.849518, 0.510788, 0.0512111, 0.413285, 0.981149, 0.799329, 0.912977, 0.411737, 0.175266, 0.0162868, 0.437307, 0.827285, 0.998476, 0.623463, 0.659742, 0.396828, 0.618946, 0.107416, 0.494369, 0.361829, 0.607361, 0.0737961, 0.69223, 0.35132, 0.200138, 0.520147, 0.656058, 0.327483, 0.846206, 0.425823, 0.8901, 0.15337, 0.628552, 0.126219, 0.585266, 0.909236, 0.278323, 0.129568, 0.0375425, 0.613099, 0.827206, 0.307324, 0.329171, 0.715313, 0.336085, 0.0274621, 0.198092, 0.938599, 0.0772011, 0.294892, 0.512744, 0.089627, 0.0421751, 0.0118223, 0.31073, 0.770852, 0.536522, 0.059069, 0.56233, 0.088411, 0.076111, 0.031168, 0.631453, 0.44039, 0.943973, 0.576273, 0.20395, 0.135953, 0.0106208, 0.216673, 0.931502, 0.0495763, 0.597698, 0.597646, 0.491867, 0.248687, 0.243881, 0.0237097, 0.694195, 0.786182, 0.981708, 0.275827, 0.323773, 0.443118, 0.674906, 0.320792, 0.360525, 0.873703, 0.695221, 0.76985, 0.790831, 0.86997, 0.045759, 0.516379, 0.743353, 0.0742389, 0.964091, 0.296322, 0.556038, 0.940668, 0.551205, 0.358508, 0.657447, 0.799673, 0.492495, 0.284206, 0.991219, 0.660055, 0.837689, 0.377476, 0.432817, 0.226188, 0.475938, 0.378378, 0.291231, 0.716247, 0.509335, 0.0457365, 0.976215, 0.043596, 0.159073, 0.329412, 0.087858, 0.764277, 0.421929, 0.4978, 0.0620849, 0.623438, 0.237322, 0.297502, 0.86469, 0.51249, 0.183782, 0.379948, 0.574941, 0.248499, 0.183998, 0.934304, 0.15599, 0.537776, 0.822229, 0.802189, 0.655585, 0.00750821, 0.0626426, 0.292754, 0.498728, 0.728805, 0.657789, 0.843344, 0.850275, 0.223317, 0.922173, 0.407713, 0.0507778, 0.635838, 0.727573, 0.961397, 0.106634, 0.938564, 0.71468, 0.798185, 0.84374, 0.696797, 0.485912, 0.960621, 0.0816286, 0.17049, 0.714688, 0.0476964, 0.135115, 0.153788, 0.2184, 0.267659, 0.0849586, 0.189475, 0.130633, 0.876729, 0.455935, 0.641272, 0.0132095, 0.975701, 0.777732, 0.271581, 0.129144, 0.852597, 0.677346, 0.71834, 0.160533, 0.12054, 0.0630788, 0.972361, 0.843149, 0.816396, 0.275598, 0.64788, 0.208262, 0.127042, 0.702926, 0.0505036, 0.873385, 0.221025, 0.984649, 0.577352, 0.639605, 0.322605, 0.250405, 0.984464, 0.818028, 0.672574, 0.621214, 0.0918164, 0.12095, 0.252448, 0.710752, 0.469303, 0.6018, 0.733434, 0.52322, 0.410691, 0.385672, 0.201854, 0.0862988, 0.0298816, 0.370035, 0.219729, 0.0726707, 0.0866237, 0.316337, 0.876509, 0.477838, 0.920677, 0.826762, 0.211238, 0.0135761, 0.228637, 0.844486, 0.962545, 0.940982, 0.0631598, 0.323485, 0.0861547, 0.841514, 0.375765, 0.467707, 0.336049, 0.674912, 0.07832, 0.89714, 0.88253, 0.288133, 0.0825826, 0.130463, 0.289422, 0.382404, 0.352322, 0.945798, 0.788903, 0.152637, 0.382991, 0.878203, 0.125118, 0.468765, 0.566938, 0.791704, 0.752722, 0.0589502, 0.169835, 0.432356, 0.457663, 0.497164, 0.624532, 0.949889, 0.228649, 0.344103, 0.295122, 0.0317235, 0.565761, 0.0469677, 0.963813, 0.671784, 0.52826, 0.127347, 0.251045, 0.578931, 0.242764, 0.00486782, 0.188777, 0.792218, 0.791818, 0.907704, 0.0898959, 0.759028, 0.36183, 0.901095, 0.755006, 0.33788, 0.904409, 0.300434, 0.0996702, 0.177243, 0.0702578, 0.935815, 0.20728, 0.418299, 0.602555, 0.895595, 0.469956, 0.245849, 0.0353629, 0.534024, 0.422565, 0.931913, 0.675508, 0.376668, 0.63693, 0.95662, 0.877581, 0.577555, 0.0498293, 0.31831, 0.520131, 0.53852, 0.693178, 0.45666, 0.227345, 0.66616, 0.342917, 0.13955, 0.878439, 0.191593, 0.154664, 0.328999, 0.342149, 0.383607, 0.936534, 0.963995, 0.699574, 0.527155, 0.636315, 0.312233, 0.203408, 0.929949, 0.0791683, 0.61232, 0.319537, 0.955306, 0.509218, 0.114131, 0.869919, 0.035276, 0.818544, 0.293381, 0.481256, 0.349804, 0.098767, 0.84718, 0.788674, 0.149179, 0.338152, 0.108682, 0.0449744, 0.138881, 0.00605887, 0.811871, 0.520997, 0.886976, 0.127615, 0.0331085, 0.942922, 0.614076, 0.0759743, 0.390041, 0.529303, 0.511758, 0.895567, 0.948853, 0.433576, 0.17309, 0.652144, 0.0288429, 0.342657, 0.454861, 0.646342, 0.753379, 0.708448, 0.632343, 0.840109, 0.389132, 0.136653, 0.190046, 0.597401, 0.155061, 0.521456, 0.639845, 0.355061, 0.206304, 0.124109, 0.289374, 0.819008, 0.610052, 0.00159884, 0.605796, 0.0967719, 0.966663, 0.205166, 0.298949, 0.567913, 0.208008, 0.793465, 0.655996, 0.794862, 0.436975, 0.935848, 0.61181, 0.338816, 0.737157, 0.712815, 0.253938, 0.530953, 0.803682, 0.275016, 0.290807, 0.858666, 0.826177, 0.119083, 0.776663, 0.504533, 0.322858, 0.427753, 0.745725, 0.639632, 0.650729, 0.575988, 0.393773, 0.449192, 0.310235, 0.150806, 0.692183, 0.235024, 0.983001, 0.314622, 0.945062, 0.0764785, 0.61788, 0.906432, 0.881992, 0.438869, 0.400672, 0.658858, 0.0604561, 0.926554, 0.426711, 0.602234, 0.104497, 0.199815, 0.103513, 0.662379, 0.226066, 0.0101929, 0.742598, 0.652809, 0.10637, 0.153118, 0.140593, 0.168109, 0.179682, 0.00302799, 0.186447, 0.32944, 0.302015, 0.898168, 0.570514, 0.691865, 0.663872, 0.99749, 0.91487, 0.758193, 0.387378, 0.0602731, 0.227863, 0.370452, 0.775759, 0.708117, 0.711626, 0.643531, 0.228441, 0.13471, 0.747542, 0.916811, 0.815666, 0.0417707, 0.414724, 0.667125, 0.867545, 0.703499, 0.100023, 0.893888, 0.923414, 0.157799, 0.507501, 0.823401, 0.38602, 0.591168, 0.711305, 0.998878, 0.605571, 0.204845, 0.97374, 0.197952, 0.257173, 0.729155, 0.666006, 0.497514, 0.0428708, 0.752168, 0.154699, 0.828572, 0.212338, 0.30994, 0.707759, 0.82363, 0.676404, 0.137495, 0.0627531, 0.232994, 0.143842, 0.704448, 0.546854, 0.982679, 0.277961, 0.79428, 0.163905, 0.207904, 0.86609, 0.855661, 0.152206, 0.960195, 0.125017, 0.142374, 0.450204, 0.710121, 0.246666, 0.588894, 0.981368, 0.527033, 0.394608, 0.349601, 0.221471, 0.293904, 0.336037, 0.00259696, 0.699489, 0.161962, 0.571639, 0.892842, 0.914197, 0.28676, 0.281609, 0.994657, 0.639364, 0.603787, 0.539887, 0.785149, 0.460243, 0.37672, 0.132444, 0.887442, 0.866378, 0.232449, 0.72669, 0.755273, 0.430215, 0.194615, 0.131255, 0.566025, 0.770472, 0.722037, 0.164613, 0.168985, 0.913707, 0.711705, 0.918756, 0.322963, 0.49093, 0.579027, 0.865533, 0.769857, 0.313123, 0.805174, 0.448557, 0.178727, 0.649516, 0.264805, 0.373405, 0.949695, 0.4949, 0.321312, 0.368868, 0.498668, 0.772344, 0.232678, 0.791155, 0.741142, 0.958026, 0.561081, 0.388229, 0.948016, 0.595783, 0.920074, 0.0243532, 0.585051, 0.976676, 0.583785, 0.217646, 0.374358, 0.667096, 0.192653, 0.627001, 0.890845, 0.139863, 0.531645, 0.0704259, 0.46927, 0.773745, 0.934911, 0.164283, 0.934369, 0.782979, 0.453403, 0.0303334, 0.426107, 0.93287, 0.9735, 0.154466, 0.112923, 0.350711, 0.432676, 0.323113, 0.50943, 0.630948, 0.137959, 0.930709, 0.853858, 0.617902, 0.752889, 0.818451, 0.204078, 0.445972, 0.883571, 0.29343, 0.504448, 0.643829, 0.380958, 0.683129, 0.902781, 0.30215, 0.717228, 0.0341791, 0.335812, 0.0400368, 0.506984, 0.682838, 0.658386, 0.396279, 0.481036, 0.597432, 0.695264, 0.498466, 0.57108, 0.576224, 0.237881, 0.537692, 0.669681, 0.861073, 0.339143, 0.128874, 0.758498, 0.590967, 0.609758, 0.1918, 0.787835, 0.0733111, 0.386826, 0.270046, 0.550001, 0.854969, 0.185734, 0.714339, 0.316777, 0.607318, 0.0160249, 0.640761, 0.913706, 0.24314, 0.24657, 0.645394, 0.612006, 0.820106, 0.640932, 0.749067, 0.188238, 0.760935, 0.215138, 0.671003, 0.662455, 0.222162, 0.33339, 0.92759, 0.659331, 0.74537, 0.721369, 0.534219, 0.88334, 0.0572954, 0.79585, 0.276496, 0.505335, 0.819837, 0.689644, 0.70926, 0.59755, 0.901053, 0.900456, 0.0376507, 0.121934, 0.354706, 0.381737, 0.479468, 0.355909, 0.174316, 0.181837, 0.313877, 0.334222, 0.207634, 0.327185, 0.726592, 0.700012, 0.00398625, 0.358816, 0.798144, 0.305795, 0.422172, 0.596942, 0.526189, 0.305732, 0.0507706, 0.949991, 0.699761, 0.888091, 0.196203, 0.276968, 0.358945, 0.87025, 0.541177, 0.239796, 0.89137, 0.934846, 0.47618, 0.019426, 0.839516, 0.251267, 0.75769, 0.768516, 0.519644, 0.250475, 0.818662, 0.291886, 0.019663, 0.425612, 0.580393, 0.885728, 0.113159, 0.653428, 0.550446, 0.23838, 0.551808, 0.959538, 0.841172, 0.848139, 0.65274, 0.648367, 0.893737, 0.865221, 0.0342293, 0.271677, 0.65207, 0.674937, 0.374037, 0.27464, 0.0804224, 0.26228, 0.945206, 0.388134, 0.699125, 0.324448, 0.753384, 0.732933, 0.371485, 0.695354, 0.32375, 0.191551, 0.0347926, 0.0962695, 0.212859, 0.485581, 0.387944, 0.854283, 0.976668, 0.266977, 0.172495, 0.579628, 0.894021, 0.390852, 0.485798, 0.881021, 0.408174, 0.137029, 0.0635737, 0.740292, 0.0122054, 0.716796, 0.287173, 0.530409, 0.595257, 0.18799, 0.335254, 0.958158, 0.436372, 0.0948741, 0.374735, 0.476342, 0.891142, 0.347581, 0.22424, 0.112034, 0.865894, 0.327254, 0.570148, 0.055232, 0.15246, 0.812931, 0.187759, 0.158984, 0.297219, 0.0251392, 0.341505, 0.658094, 0.612514, 0.381531, 0.887536, 0.708922, 0.951087, 0.971615, 0.257677, 0.160405, 0.738208, 0.385831, 0.0891363, 0.0535125, 0.883081, 0.226629, 0.895362, 0.0838472, 0.606598, 0.617891, 0.942248, 0.0165184, 0.367718, 0.841899, 0.120951, 0.668887, 0.610117, 0.548597, 0.303294, 0.97241, 0.217284, 0.331522, 0.155917, 0.955807, 0.140771, 0.0817281, 0.152859, 0.0663303, 0.588881, 0.45401, 0.529076, 0.728947, 0.14698, 0.417969, 0.572019, 0.618527, 0.0167997, 0.0227421, 0.730966, 0.265919, 0.867031, 0.206636, 0.267057, 0.142021, 0.977823, 0.939898, 0.755495, 0.0845602, 0.247048, 0.772434, 0.236113, 0.332354, 0.133507, 0.955107, 0.980707, 0.235108, 0.928933, 0.826946, 0.893378, 0.141928, 0.329898, 0.493367, 0.370846, 0.143658, 0.00202746, 0.679575, 0.585135, 0.714419, 0.36591, 0.0545446, 0.107256, 0.422153, 0.190365, 0.621623, 0.305493, 0.808021, 0.393654, 0.0540119, 0.86019, 0.292196, 0.790964, 0.107455, 0.340372, 0.617566, 0.853727, 0.0310108, 0.720649, 0.35964, 0.0115787, 0.0613885, 0.879281, 0.152947, 0.786291, 0.220739, 0.514787, 0.308396, 0.417215, 0.971446, 0.747835, 0.513741, 0.239153, 0.626783, 0.404352, 0.485722, 0.548899, 0.162911, 0.520282, 0.671437, 0.555578, 0.357543, 0.731693, 0.928549, 0.905165, 0.313252, 0.961054, 0.842681, 0.559669, 0.63635, 0.461326, 0.411086, 0.530294, 0.140501, 0.87088, 0.0101158, 0.547086, 0.175221, 0.540607, 0.500827, 0.937612, 0.323788, 0.274769, 0.0202466, 0.314251, 0.841809, 0.41639, 0.981335, 0.550772, 0.0144147, 0.806042, 0.444206, 0.373938, 0.694654, 0.869487, 0.328589, 0.325989, 0.211454, 0.733658, 0.280569, 0.967328, 0.436483, 0.175311, 0.493676, 0.277896, 0.827926, 0.639886, 0.901391, 0.596811, 0.691325, 0.836249, 0.25949, 0.623802, 0.959036, 0.139302, 0.126115, 0.695212, 0.625436, 0.823793, 0.33582, 0.0311073, 0.263335, 0.360626, 0.500309, 0.624226, 0.714741, 0.951524, 0.500566, 0.226305, 0.753058, 0.660043, 0.637268, 0.247431, 0.644939, 0.589004, 0.135806, 0.549984, 0.834647, 0.630019, 0.146096, 0.930873, 0.95415, 0.853141, 0.958451, 0.863476, 0.217199, 0.59804, 0.272421, 0.960765, 0.416773, 0.0416794, 0.565417, 0.898356, 0.439586, 0.632754, 0.523673, 0.321099, 0.776491, 0.690107, 0.283122, 0.0153155, 0.0767199, 0.316719, 0.993109, 0.508936, 0.582169, 0.321623, 0.844378, 0.475753, 0.796866, 0.453595, 0.713937, 0.917951, 0.486903, 0.485272, 0.566186, 0.840784, 0.547778, 0.197835, 0.707853, 0.0653836, 0.342131, 0.59209, 0.584128, 0.963081, 0.354824, 0.71979, 0.148018, 0.778586, 0.135654, 0.0463006, 0.560683, 0.213356, 0.394918, 0.51796, 0.287214, 0.897186, 0.717927, 0.431383, 0.935475, 0.588724, 0.156565, 0.721329, 0.0595823, 0.46983, 0.156027, 0.0231713, 0.133631, 0.0109753, 0.532869, 0.349927, 0.487912, 0.358322, 0.139167, 0.598273, 0.679047, 0.184063, 0.519092, 0.625679, 0.60103, 0.783507, 0.311387, 0.177798, 0.0633276, 0.360676, 0.457058, 0.563934, 0.967651, 0.0826769, 0.00685052, 0.458695, 0.694655, 0.600191, 0.698508, 0.827872, 0.151557, 0.960818, 0.0786889, 0.761128, 0.903934, 0.322106, 0.246907, 0.535726, 0.816486, 0.797461, 0.724812, 0.313288, 0.209304, 0.690814, 0.00881371, 0.820364, 0.647531, 0.158463, 0.295231, 0.75686, 0.52148, 0.925354, 0.996306, 0.994077, 0.950929, 0.668927, 0.193741, 0.711666, 0.754405, 0.205479, 0.0333758, 0.881362, 0.596171, 0.0336671, 0.268492, 0.0564118, 0.608191, 0.25073, 0.504501, 0.535716, 0.694344, 0.35986, 0.514343, 0.651793, 0.86104, 0.575599, 0.88146, 0.536594, 0.224893, 0.931368, 0.645742, 0.0264881, 0.157655, 0.661512, 0.293198, 0.779997, 0.896589, 0.499772, 0.0891904, 0.530555, 0.688447, 0.907232, 0.556538, 0.680706, 0.912112, 0.693081, 0.159184, 0.675044, 0.742653, 0.563608, 0.856565, 0.188295, 0.807417, 0.237631, 0.62816, 0.61084, 0.554166, 0.0068842, 0.444902, 0.381657, 0.606606, 0.346843, 0.927536, 0.216415, 0.960973, 0.196831, 0.723094, 0.915932, 0.408041, 0.936323, 0.807819, 0.675618, 0.636721, 0.270937, 0.735941, 0.189361, 0.315193, 0.245696, 0.664268, 0.735748, 0.173677, 0.809368, 0.913082, 0.541734, 0.178922, 0.0545169, 0.649474, 0.88468, 0.0566645, 0.177642, 0.479071, 0.659481, 0.0257169, 0.907861, 0.479456, 0.152425, 0.302627, 0.0409482, 0.34204, 0.85816, 0.526716, 0.773709, 0.480385, 0.498402, 0.488388, 0.175127, 0.413145, 0.658467, 0.869233, 0.750651, 0.708544, 0.557844, 0.680534, 0.551274, 0.636, 0.616331, 0.994595, 0.533047, 0.46812, 0.83225, 0.844266, 0.204587, 0.641098, 0.332966, 0.136553, 0.658664, 0.337022, 0.773921, 0.472624, 0.859956, 0.426666, 0.283833, 0.461272, 0.809071, 0.628312, 0.630704, 0.317496, 0.166126, 0.892486, 0.00430007, 0.00942372, 0.751673, 0.632624, 0.390288, 0.363411, 0.243234, 0.684022, 0.350832, 0.0353484, 0.750161, 0.705702, 0.537362, 0.41485, 0.331047, 0.997107, 0.784934, 0.338367, 0.819223, 0.739058, 0.360845, 0.32228, 0.304302, 0.964012, 0.813109, 0.264232, 0.280438, 0.46333, 0.0201669, 0.595788, 0.975322, 0.873393, 0.126217, 0.60345, 0.286267, 0.67071, 0.820844, 0.460882, 0.734792, 0.306117, 0.706839, 0.998932, 0.805255, 0.725007, 0.395518, 0.687883, 0.51383, 0.01043, 0.420077, 0.720032, 0.24608, 0.584036, 0.248156, 0.217733, 0.197825, 0.000519127, 0.898562, 0.083093, 0.978477, 0.144212, 0.553447, 0.00280658, 0.108937, 0.0422978, 0.812926, 0.208478, 0.458945, 0.645374, 0.431019, 0.052305, 0.110701, 0.484866, 0.388538, 0.975449, 0.32165, 0.768631, 0.341433, 0.162662, 0.495283, 0.313763, 0.76239, 0.247348, 0.588679, 0.747693, 0.18162, 0.866697, 0.887435, 0.43091, 0.185098, 0.650646, 0.96517, 0.764861, 0.208918, 0.758799, 0.671678, 0.828623, 0.851287, 0.945618, 0.170334, 0.633327, 0.379662, 0.874017, 0.0534979, 0.44164, 0.932428, 0.0312905, 0.406336, 0.228884, 0.122527, 0.483334, 0.731855, 0.734141, 0.887326, 0.942772, 0.445553, 0.828676, 0.103279, 0.161905, 0.678867, 0.148354, 0.887371, 0.617493, 0.958594, 0.501664, 0.528139, 0.973888, 0.347961, 0.448014, 0.335346, 0.697505, 0.787289, 0.0700397, 0.106526, 0.415613, 0.0440243, 0.120018, 0.267311, 0.581838, 0.803008, 0.57569, 0.314399, 0.761148, 0.0271103, 0.948228, 0.159833, 0.264391, 0.824001, 0.767562, 0.919574, 0.106003, 0.417103, 0.748968, 0.406296, 0.926414, 0.673791, 0.932979, 0.0938298, 0.813315, 0.861851, 0.302881, 0.557386, 0.276823, 0.0363473, 0.326655, 0.92094, 0.117535, 0.608588, 0.751379, 0.659397, 0.922751, 0.295826, 0.662205, 0.385391, 0.864506, 0.20537, 0.336669, 0.522039, 0.983507, 0.817523, 0.317454, 0.539499, 0.101561, 0.822734, 0.176609, 0.0133603, 0.836252, 0.968958, 0.146352, 0.300826, 0.565188, 0.57764, 0.153698, 0.967762, 0.717343, 0.865264, 0.125627, 0.318489, 0.978283, 0.637769, 0.451065, 0.987542, 0.596472, 0.552366, 0.647461, 0.863899, 0.817172, 0.60458, 0.563881, 0.261542, 0.605088, 0.267668, 0.858997, 0.062567, 0.163623, 0.541057, 0.152194, 0.321293, 0.150754, 0.706254, 0.309005, 0.510549, 0.237623, 0.990975, 0.398258, 0.622684, 0.500228, 0.689494, 0.937762, 0.231227, 0.796004, 0.249561, 0.662257, 0.175869, 0.940583, 0.11868, 0.108467, 0.00355987, 0.801821, 0.960053, 0.206508, 0.0319452, 0.21876, 0.0320969, 0.690034, 0.894622, 0.377359, 0.987851, 0.841586, 0.166087, 0.466118, 0.36914, 0.588008, 0.166193, 0.243228, 0.0962444, 0.914416, 0.112919, 0.865626, 0.917752, 0.18967, 0.495992, 0.409698, 0.956721, 0.655777, 0.366935, 0.451237, 0.0336591, 0.597537, 0.729895, 0.933683, 0.747495, 0.242842, 0.733821, 0.325877, 0.573726, 0.283331, 0.437443, 0.77622, 0.644359, 0.210571, 0.82264, 0.173423, 0.236414, 0.24517, 0.362943, 0.715997, 0.482509, 0.241853, 0.827222, 0.666046, 0.626988, 0.246454, 0.568862, 0.972462, 0.708927, 0.129856, 0.597723, 0.97496, 0.413029, 0.337928, 0.943675, 0.653626, 0.972777, 0.598935, 0.0566712, 0.876356, 0.614742, 0.849901, 0.316001, 0.161447, 0.853611, 0.00645767, 0.606836, 0.747936, 0.125026, 0.125517, 0.84846, 0.4219, 0.910054, 0.259172, 0.73551, 0.908055, 0.800021, 0.613516, 0.827135, 0.4591, 0.385619, 0.702226, 0.815595, 0.274222, 0.342133, 0.159873, 0.698813, 0.242246, 0.747493, 0.825599, 0.342742, 0.571876, 0.618704, 0.0520652, 0.0488784, 0.951388, 0.827011, 0.257575, 0.899737, 0.675098, 0.786007, 0.947872, 0.220126, 0.394217, 0.230291, 0.815844, 0.530422, 0.920339, 0.563101, 0.328669, 0.0554478, 0.625222, 0.976902, 0.224994, 0.444574, 0.862091, 0.738029, 0.535614, 0.410976, 0.131416, 0.152513, 0.433533, 0.363121, 0.777556, 0.0393402, 0.77959, 0.646262, 0.637031, 0.455674, 0.249158, 0.608989, 0.466823, 0.760638, 0.667691, 0.789573, 0.7937, 0.0611302, 0.544423, 0.0422076, 0.402138, 0.87025, 0.69898, 0.795973, 0.0644478, 0.311235, 0.420594, 0.548076, 0.660818, 0.437931, 0.598268, 0.640167, 0.0306512, 0.709823, 0.504726, 0.138497, 0.815641, 0.761596, 0.601683, 0.30541, 0.28802, 0.223101, 0.049064, 0.378858, 0.312118, 0.673797, 0.285427, 0.0145132, 0.77588, 0.842179, 0.702699, 0.872948, 0.531603, 0.606845, 0.25431, 0.585463, 0.941533, 0.306215, 0.583604, 0.271493, 0.108136, 0.372856, 0.399096, 0.237461, 0.74132, 0.472761, 0.365717, 0.939339, 0.0739496, 0.57482, 0.34303, 0.221022, 0.790876, 0.11543, 0.779532, 0.0456825, 0.140028, 0.310828, 0.944378, 0.39217, 0.619247, 0.385917, 0.540877, 0.949759, 0.472127, 0.827582, 0.410104, 0.253109, 0.526111, 0.682869, 0.266621, 0.227526, 0.784668, 0.712683, 0.760375, 0.214349, 0.967766, 0.239999, 0.172715, 0.250994, 0.993223, 0.730586, 0.324532, 0.207587, 0.665345, 0.395415, 0.181013, 0.537802, 0.34828, 0.162476, 0.480095, 0.876674, 0.439372, 0.785432, 0.0203158, 0.570823, 0.963498, 0.402567, 0.909907, 0.606901, 0.32069, 0.168083, 0.0552018, 0.171833, 0.883225, 0.911166, 0.989238, 0.643792, 0.0876833, 0.497366, 0.0391143, 0.982985, 0.678125, 0.740735, 0.391678, 0.83025, 0.8895, 0.706485, 0.162528, 0.536452, 0.575643, 0.192419, 0.0820395, 0.462196, 0.904244, 0.432438, 0.371109, 0.0141529, 0.335974, 0.273617, 0.28088, 0.579158, 0.855597, 0.0309763, 0.220642, 0.813886, 0.297333, 0.651621, 0.0109932, 0.0257023, 0.275789, 0.477455, 0.698262, 0.780415, 0.545663, 0.372264, 0.549103, 0.246443, 0.962574, 0.0914624, 0.456858, 0.41112, 0.918784, 0.72038, 0.515839, 0.132978, 0.371473, 0.747621, 0.933631, 0.015056, 0.673797, 0.0771618, 0.609213, 0.649701, 0.933946, 0.226445, 0.430041, 0.876818, 0.0845608, 0.881974, 0.215235, 0.83455, 0.786135, 0.737094, 0.427343, 0.912624, 0.069267, 0.0816051, 0.2506, 0.507685, 0.235479, 0.711272, 0.825633, 0.58141, 0.0794911, 0.620859, 0.329306, 0.0119017, 0.106674, 0.23155, 0.0596855, 0.605844, 0.770002, 0.622888, 0.76007, 0.569407, 0.290351, 0.555176, 0.205316, 0.351891, 0.7017, 0.544441, 0.129238, 0.107956, 0.444828, 0.553731, 0.884072, 0.480326, 0.416969, 0.785289, 0.410502, 0.00826378, 0.431934, 0.998456, 0.233627, 0.507864, 0.285601, 0.844032, 0.286312, 0.396679, 0.717884, 0.961531, 0.11234, 0.880527, 0.504979, 0.267628, 0.172393, 0.420261, 0.559234, 0.634317, 0.524828, 0.00943912, 0.951068, 0.904872, 0.581567, 0.0740622, 0.828133, 0.482231, 0.125118, 0.0417098, 0.298903, 0.204699, 0.00935577, 0.484651, 0.694955, 0.564987, 0.560743, 0.719082, 0.447489, 0.217865, 0.353221, 0.865043, 0.790954, 0.260239, 0.524727, 0.792833, 0.0672077, 0.667036, 0.105864, 0.68807, 0.999467, 0.593298, 0.146567, 0.289339, 0.631646, 0.26246, 0.415804, 0.530709, 0.823074, 0.476407, 0.00307427, 0.705422, 0.0120725, 0.304214, 0.917697, 0.592769, 0.0764513, 0.544957, 0.968635, 0.401258, 0.621596, 0.337831, 0.799542, 0.826744, 0.723547, 0.792237, 0.468728, 0.132873, 0.524227, 0.965479, 0.739429, 0.901234, 0.567083, 0.315987, 0.228559, 0.513327, 0.285659, 0.604675, 0.427648, 0.345918, 0.296984, 0.590084, 0.703401, 0.1831, 0.707313, 0.0410708, 0.473386, 0.907264, 0.528752, 0.81663, 0.590208, 0.033293, 0.932312, 0.379052, 0.357581, 0.257937, 0.886998, 0.30427, 0.201665, 0.258415, 0.629557, 0.526584, 0.717717, 0.992605, 0.582512, 0.859669, 0.990447, 0.137361, 0.179532, 0.77959, 0.675441, 0.592123, 0.716406, 0.417769, 0.834476, 0.589954, 0.517081, 0.390805, 0.591431, 0.445001, 0.409755, 0.140016, 0.374135, 0.125935, 0.541378, 0.886082, 0.160457, 0.720008, 0.255203, 0.686246, 0.130463, 0.309388, 0.117829, 0.302849, 0.162766, 0.812961, 0.638556, 0.0938669, 0.99757, 0.534274, 0.284855, 0.704709, 0.505292, 0.27232, 0.22415, 0.192001, 0.18286, 0.754513, 0.318402, 0.88675, 0.385372, 0.320284, 0.538758, 0.298244, 0.775506, 0.361091, 0.398002, 0.698587, 0.591924, 0.863561, 0.55393, 0.337657, 0.706471, 0.638426, 0.0175023, 0.749578, 0.224943, 0.352789, 0.138568, 0.635246, 0.518563, 0.331428, 0.387995, 0.687786, 0.0272836, 0.574935, 0.993065, 0.488469, 0.75833, 0.224193, 0.10441, 0.24138, 0.309069, 0.991912, 0.297144, 1.24243e-05, 0.956606, 0.155435, 0.801928, 0.00748076, 0.211659, 0.530047, 0.169149, 0.379619, 0.541412, 0.0941778, 0.226117, 0.343829, 0.78411, 0.964835, 0.661392, 0.322789, 0.68766, 0.508442, 0.764577, 0.479622, 0.366466, 0.75998, 0.756816, 0.738645, 0.284067, 0.96874, 0.91511, 0.175682, 0.022082, 0.916111, 0.0049284, 0.271859, 0.485214, 0.783551, 0.721535, 0.490036, 0.556404, 0.577775, 0.495591, 0.09079, 0.889282, 0.396698, 0.32623, 0.378662, 0.89265, 0.0203042, 0.904788, 0.84036, 0.989236, 0.734799, 0.534322, 0.813772, 0.729349, 0.120081, 0.64732, 0.341254, 0.211279, 0.301084, 0.2112, 0.254672, 0.270536, 0.302479, 0.165598, 0.363984, 0.595284, 0.397747, 0.0786652, 0.816884, 0.677035, 0.163011, 0.428865, 0.261262, 0.161525, 0.820249, 0.449973, 0.502118, 0.870741, 0.872425, 0.549107, 0.093493, 0.293108, 0.407806, 0.0290223, 0.415517, 0.550318, 0.189582, 0.401833, 0.500474, 0.802816, 0.908242, 0.0547127, 0.477145, 0.659132, 0.376985, 0.399163, 0.139739, 0.268315, 0.618914, 0.283807, 0.0454501, 0.401464, 0.658063, 0.245998, 0.523662, 0.00372331, 0.70967, 0.238583, 0.678783, 0.423037, 0.755315, 0.535826, 0.416763, 0.983476, 0.270984, 0.790093, 0.887908, 0.532165, 0.503925, 0.550084, 0.806392, 0.593717, 0.452279, 0.796592, 0.554949, 0.0265816, 0.725485, 0.655859, 0.173116, 0.140496, 0.964536, 0.241896, 0.577612, 0.480463, 0.201199, 0.672386, 0.6272, 0.78803, 0.504123, 0.082894, 0.423626, 0.49161, 0.756764, 0.12343, 0.507778, 0.385329, 0.400582, 0.672109, 0.287395, 0.355353, 0.0229364, 0.504916, 0.747238, 0.492063, 0.0902071, 0.581785, 0.0497485, 0.178017, 0.591023, 0.0277203, 0.798361, 0.976042, 0.3078, 0.85762, 0.658656, 0.425106, 0.0774957, 0.193401, 0.598397, 0.557949, 0.750814, 0.590794, 0.0128073, 0.565158, 0.597502, 0.447526, 0.241906, 0.420288, 0.774951, 0.300938, 0.18762, 0.309934, 0.507038, 0.942326, 0.107535, 0.817324, 0.169265, 0.401484, 0.0672086, 0.146513, 0.38801, 0.699678, 0.928292, 0.1585, 0.189847, 0.218258, 0.997386, 0.807761, 0.429214, 0.922135, 0.732114, 0.315095, 0.0135532, 0.125392, 0.0740355, 0.899277, 0.160085, 0.793358, 0.0858927, 0.395255, 0.286439, 0.620215, 0.335846, 0.663155, 0.333771, 0.604071, 0.00871753, 0.876666, 0.129331, 0.956528, 0.0982035, 0.828907, 0.894696, 0.684195, 0.698587, 0.485445, 0.548052, 0.268406, 0.0960809, 0.999609, 0.105991, 0.600666, 0.0580121, 0.459656, 0.692681, 0.360651, 0.941943, 0.796676, 0.677869, 0.839906, 0.813682, 0.413039, 0.180618, 0.176448, 0.0840036, 0.687712, 0.759733, 0.931299, 0.110492, 0.22682, 0.999694, 0.990125, 0.0176829, 0.379297, 0.213525, 0.362231, 0.612688, 0.0793717, 0.407156, 0.63851, 0.877408, 0.342648, 0.74956, 0.335767, 0.724994, 0.626303, 0.430143, 0.855555, 0.458094, 0.178266, 0.579633, 0.0305432, 0.182081, 0.200173, 0.502698, 0.470588, 0.196118, 0.52229, 0.655738, 0.203167, 0.0850935, 0.107051, 0.684899, 0.0779629, 0.998, 0.805075, 0.466293, 0.811035, 0.0127426, 0.877579, 0.382167, 0.208238, 0.193264, 0.581621, 0.74473, 0.362614, 0.563278, 0.807022, 0.456634, 0.452991, 0.495488, 0.464415, 0.149532, 0.241338, 0.72833, 0.707539, 0.507903, 0.582288, 0.42852, 0.152086, 0.718713, 0.977795, 0.660267, 0.640717, 0.490115, 0.304021, 0.164872, 0.336759, 0.572142, 0.948436, 0.363279, 0.079069, 0.987311, 0.435656, 0.564604, 0.00193385, 0.791747, 0.402436, 0.706287, 0.238848, 0.915714, 0.174906, 0.965938, 0.0384408, 0.918158, 0.513198, 0.29182, 0.0658808, 0.954564, 0.70594, 0.170079, 0.0330911, 0.0433405, 0.598742, 0.472153, 0.696886, 0.325759, 0.0567848, 0.350115, 0.133799, 0.76163, 0.779429, 0.446678, 0.611299, 0.625526, 0.639065, 0.73732, 0.963087, 0.345357, 0.965977, 0.751335, 0.590881, 0.684322, 0.68226, 0.828329, 0.128378, 0.877101, 0.733654, 0.015523, 0.360709, 0.0707062, 0.0786413, 0.809354, 0.981508, 0.976154, 0.790231, 0.172446, 0.727283, 0.534363, 0.92872, 0.769091, 0.815536, 0.525322, 0.264115, 0.979644, 0.376391, 0.468697, 0.318961, 0.798537, 0.780037, 0.0233127, 0.98107, 0.167618, 0.52514, 0.407053, 0.454821, 0.457579, 0.443139, 0.122531, 0.898809, 0.408107, 0.0924378, 0.762959, 0.50518, 0.647983, 0.546704, 0.749073, 0.39872, 0.917352, 0.977788, 0.985947, 0.717616, 0.770878, 0.216882, 0.527066, 0.388122, 0.807191, 0.0642288, 0.72412, 0.186179, 0.448087, 0.856582, 0.0722809, 0.12798, 0.921263, 0.682491, 0.910814, 0.214291, 0.00368035, 0.711403, 0.956773, 0.93008, 0.372449, 0.345067, 0.250683, 0.941182, 0.85771, 0.969274, 0.722491, 0.18259, 0.607606, 0.524986, 0.00420109, 0.15941, 0.0776245, 0.56232, 0.569107, 0.464477, 0.332502, 0.556105, 0.134392, 0.989199, 0.213231, 0.675439, 0.296878, 0.765381, 0.294682, 0.868181, 0.192154, 0.280048, 0.785932, 0.794206, 0.716621, 0.491201, 0.750607, 0.568602, 0.968887, 0.530632, 0.466478, 0.846657, 0.686268, 0.997402, 0.397548, 0.955002, 0.663621, 0.2972, 0.367026, 0.228433, 0.529092, 0.710098, 0.567397, 0.983211, 0.0139249, 0.136422, 0.396022, 0.737076, 0.125274, 0.44975, 0.962357, 0.797143, 0.582861, 0.5527, 0.274522, 0.310848, 0.107026, 0.565651, 0.300736, 0.904682, 0.263527, 0.806907, 0.65984, 0.414182, 0.1636, 0.615943, 0.0715306, 0.785954, 0.828757, 0.660765, 0.0757584, 0.120611, 0.97223, 0.461112, 0.313375, 0.630749, 0.321526, 0.539842, 0.193604, 0.600617, 0.505401, 0.578991, 0.85863, 0.262772, 0.240262, 0.481997, 0.693685, 0.872617, 0.664167, 0.357806, 0.69779, 0.144749, 0.0603708, 0.346813, 0.871866, 0.418938, 0.582604, 0.122422, 0.731071, 0.0382756, 0.606676, 0.558107, 0.572594, 0.938878, 0.511932, 0.092367, 0.861729, 0.385881, 0.809785, 0.66054, 0.504926, 0.949541, 0.594529, 0.621222, 0.857473, 0.689365, 0.257566, 0.198218, 0.724715, 0.535962, 0.544353, 0.984481, 0.182449, 0.878939, 0.650007, 0.673402, 0.269187, 0.600569, 0.617209, 0.537987, 0.664277, 0.0861642, 0.156208, 0.109356, 0.685213, 0.646577, 0.859734, 0.240261, 0.0272754, 0.334729, 0.845619, 0.560034, 0.712505, 0.411602, 0.121457, 0.855901, 0.451466, 0.184801, 0.21999, 0.967509, 0.646905, 0.638598, 0.057186, 0.32916, 0.0871855, 0.0478142, 0.707339, 0.287566, 0.480243, 0.754268, 0.48237, 0.39079, 0.845006, 0.823381, 0.0915864, 0.932019, 0.573948, 0.394927, 0.507394, 0.732169, 0.228346, 0.926473, 0.532532, 0.152355, 0.776304, 0.679865, 0.920439, 0.0158594, 0.89487, 0.659647, 0.871782, 0.560851, 0.57435, 0.3083, 0.663412, 0.757481, 0.960128, 0.452001, 0.887086, 0.442016, 0.92099, 0.0110993, 0.365542, 0.929865, 0.656568, 0.174363, 0.615039, 0.539452, 0.806939, 0.0893947, 0.413348, 0.864007, 0.205508, 0.390376, 0.533917, 0.439459, 0.905103, 0.341882, 0.624755, 0.397079, 0.541082, 0.0682523, 0.983015, 0.443842, 0.629982, 0.282008, 0.386631, 0.0396435, 0.478885, 0.4212, 0.181879, 0.689716, 0.609262, 0.521219, 0.856247, 0.939949, 0.240021, 0.665151, 0.0608336, 0.359835, 0.431747, 0.691869, 0.591311, 0.388467, 0.588822, 0.502485, 0.877954, 0.583312, 0.670907, 0.379027, 0.450994, 0.016911, 0.740099, 0.379876, 0.66288, 0.0689599, 0.25757, 0.454995, 0.960567, 0.200728, 0.325239, 0.913429, 0.041381, 0.859662, 0.895766, 0.790458, 0.106767, 0.703295, 0.191236, 0.204426, 0.684595, 0.691486, 0.175409, 0.524724, 0.367155, 0.867933, 0.741568, 0.569575, 0.176538, 0.285695, 0.455791, 0.187718, 0.362868, 0.552052, 0.247035, 0.391779, 0.987161, 0.4793, 0.0774468, 0.968417, 0.763525, 0.285887, 0.482418, 0.759539, 0.234663, 0.735722, 0.813635, 0.447822, 0.324047, 0.180318, 0.0297703, 0.454804, 0.21158, 0.457381, 0.890023, 0.900955, 0.840244, 0.706408, 0.178501, 0.763669, 0.372941, 0.710012, 0.671042, 0.822437, 0.227934, 0.297541, 0.9467, 0.0488573, 0.825662, 0.929774, 0.805125, 0.16774, 0.366645, 0.459201, 0.917446, 0.104398, 0.0573441, 0.685905, 0.637186, 0.18841, 0.237238, 0.367388, 0.612208, 0.538027, 0.245563, 0.448944, 0.946351, 0.078478, 0.96132, 0.653643, 0.649054, 0.463215, 0.646464, 0.635334, 0.959491, 0.00335262, 0.182497, 0.954482, 0.106696, 0.197427, 0.499572, 0.274776, 0.751705, 0.434975, 0.0485731, 0.165528, 0.01801, 0.422813, 0.305316, 0.470734, 0.374091, 0.139333, 0.470029, 0.896567, 0.339369, 0.473794, 0.41436, 0.610052, 0.124939, 0.881318, 0.486986, 0.660467, 0.891443, 0.195453, 0.974218, 0.846444, 0.926831, 0.344164, 0.0419922, 0.803992, 0.743913, 0.554845, 0.0106056, 0.418169, 0.41055, 0.829961, 0.665717, 0.408672, 0.747485, 0.136918, 0.560109, 0.72498, 0.517661, 0.437334, 0.245188, 0.410817, 0.0930914, 0.601875, 0.912221, 0.208718, 0.544533, 0.838509, 0.959712, 0.0127595, 0.0623689, 0.310829, 0.591135, 0.883425, 0.24215, 0.0995855, 0.313635, 0.0410957, 0.918939, 0.783048, 0.0153892, 0.12018, 0.32194, 0.815532, 0.0266919, 0.722792, 0.233864, 0.34853, 0.0631725, 0.627564, 0.0811205, 0.871095, 0.741563, 0.361075, 0.325906, 0.464204, 0.397942, 0.486534, 0.922494, 0.832773, 0.626561, 0.529538, 0.314284, 0.638165, 0.9011, 0.900388, 0.244313, 0.839225, 0.0761662, 0.834317, 0.320673, 0.474447, 0.217126, 0.193795, 0.168358, 0.438687, 0.198715, 0.46446, 0.771117, 0.862153, 0.851919, 0.807231, 0.491553, 0.760402, 0.340707, 0.0607091, 0.361827, 0.619789, 0.110278, 0.590294, 0.957111, 0.744611, 0.559881, 0.994887, 0.919425, 0.637753, 0.533503, 0.450887, 0.175615, 0.634571, 0.797628, 0.178719, 0.699851, 0.658949, 0.906386, 0.438896, 0.0417218, 0.438413, 0.826829, 0.841082, 0.980786, 0.927255, 0.138567, 0.820165, 0.313088, 0.0878685, 0.692968, 0.797086, 0.0908378, 0.318713, 0.633711, 0.525103, 0.190014, 0.310485, 0.597693, 0.655324, 0.267112, 0.518204, 0.715525, 0.486913, 0.783756, 0.0793533, 0.359718, 0.230675, 0.199174, 0.215922, 0.102897, 0.186138, 0.836079, 0.910483, 0.694167, 0.813909, 0.199938, 0.408655, 0.119606, 0.887828, 0.333976, 0.205177, 0.609055, 0.76697, 0.387091, 0.614617, 0.60186, 0.546753, 0.765856, 0.457407, 0.630233, 0.866919, 0.937295, 0.214419, 0.367671, 0.98412, 0.748556, 0.214578, 0.270122, 0.847595, 0.912332, 0.294221, 0.339163, 0.320192, 0.0850987, 0.133728, 0.795094, 0.359125, 0.715362, 0.493328, 0.405118, 0.387583, 0.225176, 0.175518, 0.644012, 0.179279, 0.198195, 0.384717, 0.611793, 0.385488, 0.491305, 0.998748, 0.146324, 0.81189, 0.203875, 0.495301, 0.619699, 0.542314, 0.964081, 0.651557, 0.115988, 0.661655, 0.712517, 0.0501672, 0.326884, 0.109749, 0.896658, 0.1904, 0.380272, 0.514918, 0.863364, 0.685467, 0.996615, 0.499541, 0.673198, 0.942305, 0.691121, 0.205226, 0.231887, 0.743837, 0.13908, 0.907879, 0.743439, 0.143676, 0.305427, 0.24824, 0.608515, 0.439296, 0.847471, 0.789528, 0.51187, 0.254966, 0.685155, 0.585688, 0.989821, 0.946481, 0.450107, 0.922811, 0.555053, 0.329394, 0.39229, 0.131353, 0.450519, 0.81199, 0.630134, 0.460913, 0.670854, 0.895434, 0.726601, 0.471248, 0.750922, 0.0946608, 0.710068, 0.697105, 0.733508, 0.220683, 0.666638, 0.0952539, 0.630399, 0.154864, 0.766862, 0.355345, 0.790745, 0.281507, 0.164292, 0.650522, 0.629903, 0.981355, 0.145751, 0.799657, 0.0196028, 0.724183, 0.125143, 0.719542, 0.924714, 0.615013, 0.922675, 0.904591, 0.624601, 0.139733, 0.369898, 0.278809, 0.219397, 0.777665, 0.559285, 0.713929, 0.736891, 0.766342, 0.658174, 0.645476, 0.251764, 0.610972, 0.496852, 0.683424, 0.989308, 0.956882, 0.497361, 0.429103, 0.416429, 0.662068, 0.0963544, 0.551931, 0.889057, 0.167578, 0.638879, 0.0980833, 0.0518262, 0.646338, 0.165541, 0.564348, 0.8532, 0.376433, 0.893288, 0.394515, 0.699307, 0.569125, 0.954349, 0.292597, 0.56059, 0.526105, 0.862391, 0.235818, 0.797581, 0.172794, 0.854972, 0.561266, 0.0256928, 0.88453, 0.777506, 0.538351, 0.594944, 0.78804, 0.906662, 0.377078, 0.135404, 0.126215, 0.412107, 0.440339, 0.460594, 0.244971, 0.907837, 0.165576, 0.220725, 0.893386, 0.595752, 0.0477381, 0.171484, 0.756221, 0.107375, 0.651348, 0.340616, 0.785978, 0.498044, 0.460329, 0.180839, 0.568633, 0.0654769, 0.156544, 0.637076, 0.699879, 0.982172, 0.538528, 0.745358, 0.839453, 0.417995, 0.636056, 0.528779, 0.780283, 0.335406, 0.201206, 0.642997, 0.382086, 0.907109, 0.124882, 0.306389, 0.130438, 0.926737, 0.83236, 0.635197, 0.0893036, 0.300226, 0.409036, 0.330194, 0.525545, 0.500854, 0.486374, 0.493427, 0.323961, 0.581634, 0.481929, 0.169736, 0.302538, 0.355156, 0.180988, 0.224372, 0.887529, 0.0694339, 0.458228, 0.92593, 0.814719, 0.138363, 0.552697, 0.531863, 0.886667, 0.476801, 0.156363, 0.861231, 0.7417, 0.890515, 0.130867, 0.126969, 0.343434, 0.952659, 0.432004, 0.565955, 0.987161, 0.753083, 0.911315, 0.741906, 0.708598, 0.843306, 0.381457, 0.84063, 0.711479, 0.495271, 0.632042, 0.32002, 0.38246, 0.810932, 0.217966, 0.731042, 0.575892, 0.781178, 0.617689, 0.128939, 0.150718, 0.806429, 0.704081, 0.575492, 0.190971, 0.134197, 0.937842, 0.98535, 0.809467, 0.779322, 0.858818, 0.548991, 0.312554, 0.274714, 0.986938, 0.524766, 0.187022, 0.865706, 0.530344, 0.872097, 0.401882, 0.891508, 0.313377, 0.957398, 0.108127, 0.602892, 0.699594, 0.674728, 0.351897, 0.414457, 0.594854, 0.995978, 0.880093, 0.393842, 0.861727, 0.87038, 0.320563, 0.509479, 0.842871, 0.272854, 0.189172, 0.796218, 0.596634, 0.205636, 0.742273, 0.742419, 0.760309, 0.729056, 0.0437783, 0.445359, 0.0322238, 0.184335, 0.988407, 0.512953, 0.550221, 0.771869, 0.381686, 0.85382, 0.426408, 0.120799, 0.0645409, 0.0328801, 0.0570775, 0.444643, 0.269454, 0.500089, 0.373609, 0.504251, 0.910849, 0.253371, 0.994127, 0.938921, 0.951095, 0.852516, 0.0903004, 0.486111, 0.17418, 0.271288, 0.612032, 0.997632, 0.53036, 0.611951, 0.0641582, 0.396866, 0.919247, 0.640878, 0.770191, 0.481362, 0.409374, 0.2625, 0.319186, 0.928482, 0.472299, 0.0358065, 0.49353, 0.322652, 0.0382739, 0.798662, 0.966877, 0.326344, 0.0953948, 0.400169, 0.510833, 0.0684865, 0.83235, 0.696809, 0.974604, 0.668398, 0.122265, 0.369233, 0.955626, 0.188605, 0.480427, 0.286203, 0.320616, 0.954954, 0.782486, 0.186486, 0.105481, 0.945421, 0.767992, 0.356977, 0.669292, 0.0116432, 0.260886, 0.553776, 0.143967, 0.565441, 0.924606, 0.376176, 0.735383, 0.240687, 0.230876, 0.124107, 0.399972, 0.151305, 0.16136, 0.643358, 0.165622, 0.174629, 0.514337, 0.815659, 0.115184, 0.699359, 0.694579, 0.909514, 0.952258, 0.586617, 0.806359, 0.430576, 0.172374, 0.404369, 0.101979, 0.201102, 0.066589, 0.372448, 0.990175, 0.636888, 0.158449, 0.59424, 0.801662, 0.477782, 0.0196779, 0.800519, 0.226145, 0.147307, 0.925814, 0.629851, 0.858079, 0.529705, 0.96928, 0.135855, 0.121891, 0.475163, 0.444364, 0.391397, 0.0779118, 0.566445, 0.251613, 0.389013, 0.802393, 0.392058, 0.299606, 0.302262, 0.732294, 0.019227, 0.473395, 0.412521, 0.76701, 0.0892253, 0.703217, 0.349949, 0.122614, 0.105802, 0.327151, 0.425376, 0.117406, 0.395957, 0.217755, 0.0362698, 0.351134, 0.108123, 0.679722, 0.578639, 0.757755, 0.782326, 0.993839, 0.383555, 0.786901, 0.637786, 0.890523, 0.0454754, 0.653257, 0.84477, 0.490936, 0.204621, 0.775073, 0.683816, 0.675775, 0.859286, 0.0484595, 0.470092, 0.223353, 0.565578, 0.862822, 0.568036, 0.518957, 0.667993, 0.412956, 0.177394, 0.643091, 0.926192, 0.710928, 0.906935, 0.340001, 0.307947, 0.537978, 0.583401, 0.448395, 0.83372, 0.50821, 0.281675, 0.437248, 0.831335, 0.379171, 0.528369, 0.0386753, 0.439311, 0.57382, 0.640347, 0.979313, 0.0362142, 0.830339, 0.344896, 0.385437, 0.974212, 0.240347, 0.122867, 0.422403, 0.586582, 0.356558, 0.238593, 0.216972, 0.487292, 0.688729, 0.646868, 0.886362, 0.139602, 0.8453, 0.497696, 0.311339, 0.426771, 0.880241, 0.881009, 0.686507, 0.204272, 0.00691477, 0.401115, 0.393756, 0.910776, 0.889087, 0.237115, 0.797237, 0.705786, 0.307576, 0.0129004, 0.222088, 0.596397, 0.684274, 0.438425, 0.404496, 0.790856, 0.61561, 0.764173, 0.996443, 0.449856, 0.388379, 0.164582, 0.450946, 0.136325, 0.104272, 0.159408, 0.943254, 0.231601, 0.96012, 0.122654, 0.77375, 0.698247, 0.635342, 0.695868, 0.517988, 0.905835, 0.449273, 0.1554, 0.328498, 0.342687, 0.789064, 0.90011, 0.346621, 0.77674, 0.424599, 0.279622, 0.861007, 0.400212, 0.707104, 0.305798, 0.226575, 0.548174, 0.256156, 0.266986, 0.160528, 0.331082, 0.354184, 0.363671, 0.458763, 0.970597, 0.934086, 0.424846, 0.788564, 0.0627965, 0.262678, 0.912137, 0.761774, 0.34604, 0.116035, 0.486341, 0.660035, 0.428965, 0.682454, 0.857807, 0.209625, 0.853028, 0.790084, 0.824912, 0.0883281, 0.906446, 0.120536, 0.157414, 0.269506, 0.418842, 0.768363, 0.905914, 0.233608, 0.63689, 0.128794, 0.300428, 0.754335, 0.784397, 0.408365, 0.201501, 0.100617, 0.213767, 0.296005, 0.992797, 0.283339, 0.447858, 0.263813, 0.59937, 0.883649, 0.135107, 0.363725, 0.629425, 0.934755, 0.432171, 0.571044, 0.78362, 0.732185, 0.502777, 0.850255, 0.950607, 0.879127, 0.344331, 0.828602, 0.725226, 0.685943, 0.380096, 0.191079, 0.817829, 0.680173, 0.0223915, 0.252598, 0.0514904, 0.561978, 0.32495, 0.605516, 0.0450375, 0.0375528, 0.509012, 0.253094, 0.958415, 0.914325, 0.465613, 0.202329, 0.664152, 0.133459, 0.69897, 0.788707, 0.00200394, 0.0945128, 0.359398, 0.797491, 0.368139, 0.194859, 0.544294, 0.43436, 0.61703, 0.727634, 0.593148, 0.624451, 0.991047, 0.145967, 0.0313034, 0.167285, 0.241529, 0.322303, 0.451572, 0.942235, 0.970542, 0.56176, 0.993246, 0.0279265, 0.377604, 0.16416, 0.375309, 0.369389, 0.119511, 0.120647, 0.301037, 0.073944, 0.789161, 0.181405, 0.887082, 0.956426, 0.902397, 0.739655, 0.508133, 0.0747696, 0.391219, 0.228832, 0.800251, 0.869061, 0.597398, 0.201573, 0.477442, 0.66435, 0.690299, 0.32831, 0.0570044, 0.636411, 0.768536, 0.115984, 0.0367252, 0.16301, 0.698855, 0.0880642, 0.209967, 0.571202, 0.909398, 0.928888, 0.503773, 0.447952, 0.488892, 0.322869, 0.662932, 0.898899, 0.117182, 0.857915, 0.744107, 0.824755, 0.920689, 0.758126, 0.297619, 0.521496, 0.2284, 0.0751337, 0.867074, 0.518767, 0.358837, 0.00144092, 0.0497619, 0.0668159, 0.388102, 0.383741, 0.110213, 0.532467, 0.596091, 0.00134374, 0.833005, 0.474105, 0.343856, 0.99895, 0.242376, 0.0767005, 0.751615, 0.490932, 0.907981, 0.638962, 0.792366, 0.30173, 0.333721, 0.0544114, 0.326532, 0.408815, 0.738012, 0.0674122, 0.599458, 0.772409, 0.479695, 0.357774, 0.783448, 0.48809, 0.254677, 0.586735, 0.357024, 0.327491, 0.785926, 0.23041, 0.644132, 0.413049, 0.735151, 0.867416, 0.928182, 0.788073, 0.632708, 0.68678, 0.31539, 0.30885, 0.894032, 0.0544455, 0.673054, 0.425651, 0.203765, 0.0151665, 0.672158, 0.86361, 0.0568408, 0.448561, 0.0297595, 0.381173, 0.227334, 0.723746, 0.375939, 0.305096, 0.666427, 0.952864, 0.741191, 0.642785, 0.0175393, 0.252206, 0.190683, 0.704187, 0.942112, 0.07693, 0.994348, 0.329894, 0.609689, 0.50401, 0.729247, 0.520244, 0.000946484, 0.541776, 0.138247, 0.186836, 0.391014, 0.0731181, 0.670878, 0.0338262, 0.590797, 0.698486, 0.444108, 0.539547, 0.173782, 0.262909, 0.231219, 0.256835, 0.845156, 0.632973, 0.346624, 0.60681, 0.0932223, 0.251465, 0.0825293, 0.265202, 0.734433, 0.166266, 0.753448, 0.258316, 0.813767, 0.701849, 0.476919, 0.756122, 0.950176, 0.455397, 0.409883, 0.992775, 0.584045, 0.558231, 0.585266, 0.648186, 0.530603, 0.71656, 0.800558, 0.833787, 0.232484, 0.339823, 0.318783, 0.337165, 0.364923, 0.823954, 0.360866, 0.715251, 0.5378, 0.864293, 0.422829, 0.233751, 0.190532, 0.984108, 0.0647921, 0.735035, 0.0907616, 0.0666643, 0.0924067, 0.502576, 0.511322, 0.345263, 0.264773, 0.96058, 0.0867236, 0.179448, 0.838338, 0.236504, 0.288376, 0.579939, 0.747783, 0.960027, 0.905078, 0.424557, 0.281381, 0.750459, 0.652348, 0.439034, 0.999016, 0.117254, 0.369881, 0.55118, 0.113318, 0.350891, 0.615329, 0.647776, 0.74371, 0.596626, 0.83351, 0.498064, 0.0450331, 0.220228, 0.020771, 0.680722, 0.75407, 0.601801, 0.0530075, 0.462111, 0.519968, 0.792487, 0.581101, 0.0211003, 0.229924, 0.78715, 0.727014, 0.915237, 0.631788, 0.799126, 0.546876, 0.742726, 0.670435, 0.304231, 0.879851, 0.995614, 0.766319, 0.42179, 0.558265, 0.817397, 0.995562, 0.285287, 0.260675, 0.929824, 0.807761, 0.162499, 0.397752, 0.709695, 0.286947, 0.096302, 0.590545, 0.755536, 0.83637, 0.185184, 0.10565, 0.187634, 0.615155, 0.81464, 0.332655, 0.290453, 0.434497, 0.8873, 0.409133, 0.881288, 0.331319, 0.515638, 0.731008, 0.40766, 0.398138, 0.447024, 0.0110212, 0.43768, 0.0142694, 0.909536, 0.476164, 0.173389, 0.188101, 0.647039, 0.183863, 0.677867, 0.782275, 0.47767, 0.425961, 0.419715, 0.130223, 0.363212, 0.802346, 0.31485, 0.460572, 0.470326, 0.432771, 0.592056, 0.432036, 0.703806, 0.149798, 0.398114, 0.799945, 0.911993, 0.650931, 0.532968, 0.460181, 0.140933, 0.802706, 0.727349, 0.369486, 0.15512, 0.503928, 0.974148, 0.937846, 0.207428, 0.901552, 0.165991, 0.398288, 0.277009, 0.429129, 0.176726, 0.380608, 0.755432, 0.442744, 0.705476, 0.521909, 0.167085, 0.0421919, 0.105396, 0.153568, 0.608793, 0.590906, 0.268729, 0.19125, 0.830014, 0.185807, 0.890209, 0.83474, 0.310515, 0.195112, 0.982706, 0.189772, 0.700988, 0.214694, 0.774826, 0.327473, 0.868802, 0.16681, 0.58841, 0.881917, 0.117363, 0.599747, 0.158003, 0.517942, 0.432007, 0.648865, 0.0624208, 0.0122082, 0.329588, 0.504368, 0.0203229, 0.139813, 0.394387, 0.12387, 0.181334, 0.868906, 0.526532, 0.880958, 0.413481, 0.982045, 0.471394, 0.538444, 0.730804, 0.85219, 0.400402, 0.251662, 0.937787, 0.00174408, 0.652608, 0.261504, 0.388878, 0.868629, 0.537448, 0.0981852, 0.948251, 0.633976, 0.645068, 0.25576, 0.548145, 0.0116775, 0.592169, 0.143432, 0.330232, 0.241711, 0.970109, 0.852308, 0.852483, 0.151413, 0.591363, 0.384637, 0.549586, 0.60464, 0.895377, 0.579498, 0.466505, 0.945094, 0.767294, 0.252708, 0.622607, 0.382841, 0.493243, 0.549084, 0.690041, 0.0365459, 0.69902, 0.239746, 0.54563, 0.670083, 0.548381, 0.0894636, 0.445598, 0.664948, 0.477631, 0.585061, 0.606681, 0.676272, 0.827953, 0.969431, 0.426087, 0.181042, 0.335697, 0.0937029, 0.0367387, 0.228312, 0.0617288, 0.335622, 0.944988, 0.296869, 0.756548, 0.0404717, 0.677043, 0.762679, 0.700488, 0.318338, 0.0365351, 0.612665, 0.376546, 0.209593, 0.512024, 0.12407, 0.279746, 0.755882, 0.111437, 0.299563, 0.997316, 0.723257, 0.636209, 0.226971, 0.0659718, 0.140536, 0.255796, 0.564739, 0.588969, 0.168314, 0.543733, 0.924602, 0.244122, 0.543139, 0.0785925, 0.194683, 0.899421, 0.403202, 0.0356301, 0.681685, 0.887435, 0.856302, 0.193578, 0.496868, 0.467903, 0.0387348, 0.638649, 0.90526, 0.324396, 0.247047, 0.169258, 0.266024, 0.986432, 0.521846, 0.49993, 0.637412, 0.797028, 0.699123, 0.835982, 0.739283, 0.708695, 0.107002, 0.732937, 0.844857, 0.174538, 0.22849, 0.222638, 0.796332, 0.933367, 0.759566, 0.513495, 0.803074, 0.107489, 0.399826, 0.656787, 0.106884, 0.210021, 0.094437, 0.435416, 0.60731, 0.1567, 0.841264, 0.0968264, 0.306852, 0.345901, 0.141018, 0.159966, 0.821595, 0.320059, 0.0656764, 0.0505248, 0.432913, 0.747983, 0.750045, 0.0417933, 0.138653, 0.137631, 0.320256, 0.584708, 0.0532872, 0.318578, 0.63761, 0.564704, 0.154647, 0.642569, 0.622576, 0.244021, 0.87078, 0.500933, 0.00645393, 0.0539389, 0.0485263, 0.796574, 0.793253, 0.732992, 0.645884, 0.379617, 0.555762, 0.00629676, 0.259832, 0.798554, 0.0315776, 0.450822, 0.101757, 0.809952, 0.313552, 0.783575, 0.0359134, 0.948098, 0.161367, 0.525707, 0.0963111, 0.132763, 0.22445, 0.461381, 0.713727, 0.943816, 0.658751, 0.327083, 0.794496, 0.198395, 0.841364, 0.116349, 0.370534, 0.542816, 0.72763, 0.374359, 0.533259, 0.887736, 0.0196068, 0.502101, 0.412768, 0.0106867, 0.678288, 0.858775, 0.335465, 0.730297, 0.992653, 0.717325, 0.507287, 0.187472, 0.877163, 0.52541, 0.183381, 0.0743946, 0.258771, 0.848135, 0.805477, 0.316745, 0.453276, 0.266309, 0.110357, 0.153075, 0.690539, 0.201145, 0.920943, 0.507428, 0.445467, 0.78514, 0.616036, 0.258572, 0.942877, 0.760505, 0.238735, 0.466058, 0.0212599, 0.657658, 0.555341, 0.803491, 0.0782692, 0.220241, 0.944674, 0.000973093, 0.749505, 0.106599, 0.15009, 0.0835378, 0.181226, 0.0515253, 0.88494, 0.102723, 0.974932, 0.231358, 0.467657, 0.183007, 0.920526, 0.288254, 0.830973, 0.553001, 0.93022, 0.414924, 0.376628, 0.334164, 0.609779, 0.539633, 0.80234, 0.775187, 0.0439149, 0.813741, 0.556876, 0.465937, 0.181576, 0.14384, 0.859396, 0.191365, 0.364365, 0.95841, 0.298458, 0.179076, 0.287059, 0.478908, 0.737661, 0.301259, 0.0961071, 0.672842, 0.783623, 0.118186, 0.166335, 0.26924, 0.453664, 0.688125, 0.947874, 0.36531, 0.0269986, 0.218112, 0.512163, 0.191052, 0.299443, 0.730737, 0.226903, 0.340436, 0.403929, 0.0852968, 0.480171, 0.603748, 0.421509, 0.435747, 0.406636, 0.0178122, 0.322006, 0.0404101, 0.860913, 0.0730553, 0.88948, 0.177384, 0.517096, 0.718766, 0.2512, 0.602436, 0.0417423, 0.475102, 0.720779, 0.626538, 0.283841, 0.238276, 0.987086, 0.591778, 0.1842, 0.520921, 0.153227, 0.556522, 0.146511, 0.604621, 0.116201, 0.0737713, 0.543386, 0.0369681, 0.398191, 0.0236996, 0.0167607, 0.615953, 0.127181, 0.454078, 0.636039, 0.919659, 0.951307, 0.888242, 0.880994, 0.433766, 0.676223, 0.777643, 0.0852578, 0.170134, 0.903301, 0.82682, 0.888324, 0.258651, 0.321851, 0.740333, 0.136134, 0.246879, 0.00224358, 0.157725, 0.199889, 0.569027, 0.0310247, 0.275402, 0.729987, 0.688425, 0.604258, 0.760276, 0.957512, 0.564194, 0.463683, 0.340383, 0.285583, 0.500558, 0.304194, 0.357911, 0.84807, 0.99273, 0.226164, 0.709829, 0.166442, 0.915231, 0.798527, 0.125432, 0.222153, 0.496277, 0.100268, 0.333152, 0.83063, 0.779307, 0.581274, 0.550133, 0.998805, 0.885069, 0.393751, 0.692026, 0.537398, 0.584291, 0.0203061, 0.0124475, 0.161726, 0.785518, 0.160358, 0.76372, 0.783276, 0.969793, 0.844359, 0.856301, 0.721894, 0.978718, 0.130515, 0.89933, 0.448584, 0.122808, 0.515401, 0.787199, 0.438048, 0.395707, 0.598831, 0.772227, 0.944482, 0.892404, 0.820393, 0.716444, 0.723121, 0.0701328, 0.716947, 0.886684, 0.009141, 0.412401, 0.473811, 0.178099, 0.446003, 0.567898, 0.980485, 0.411368, 0.300427, 0.554554, 0.326299, 0.914831, 0.00431922, 0.989365, 0.461677, 0.304597, 0.861639, 0.503709, 0.919688, 0.86521, 0.858752, 0.401042, 0.499406, 0.999762, 0.145825, 0.11521, 0.425163, 0.265864, 0.630985, 0.740153, 0.338885, 0.484607, 0.677764, 0.00381102, 0.809534, 0.145251, 0.541035, 0.0462432, 0.0662148, 0.506739, 0.689138, 0.725102, 0.560043, 0.920058, 0.707384, 0.0907164, 0.575176, 0.591147, 0.76119, 0.977688, 0.594382, 0.0568761, 0.334937, 0.0236219, 0.247397, 0.82412, 0.250413, 0.563062, 0.126827, 0.614318, 0.45863, 0.570689, 0.0717203, 0.71442, 0.852698, 0.319204, 0.673932, 0.66984, 0.791503, 0.0991388, 0.727569, 0.673013, 0.531488, 0.973541, 0.490356, 0.689448, 0.51114, 0.0510635, 0.144568, 0.0488804, 0.907358, 0.630259, 0.801694, 0.690012, 0.142377, 0.376682, 0.144956, 0.824021, 0.721665, 0.938729, 0.947014, 0.950303, 0.625824, 0.0518692, 0.372296, 0.696759, 0.477766, 0.38459, 0.218042, 0.858416, 0.115892, 0.0320822, 0.495412, 0.862494, 0.819502, 0.941878, 0.0387679, 0.794199, 0.820976, 0.248613, 0.308583, 0.0627562, 0.479179, 0.460774, 0.427001, 0.032369, 0.998002, 0.690198, 0.301801, 0.276364, 0.710231, 0.893871, 0.785995, 0.365127, 0.272635, 0.606906, 0.886356, 0.683694, 0.267291, 0.218239, 0.217794, 0.879303, 0.800404, 0.753908, 0.381, 0.38011, 0.868807, 0.915108, 0.679771, 0.425887, 0.232021, 0.3732, 0.524585, 0.227168, 0.332095, 0.159611, 0.474157, 0.834698, 0.119122, 0.703672, 0.472869, 0.642509, 0.454468, 0.677509, 0.14933, 0.0275297, 0.886753, 0.183027, 0.504664, 0.13094, 0.471414, 0.291249, 0.0897135, 0.883799, 0.948179, 0.209196, 0.261283, 0.434814, 0.764381, 0.333649, 0.323645, 0.732802, 0.754794, 0.772194, 0.888666, 0.794492, 0.470613, 0.180972, 0.533535, 0.638344, 0.0309298, 0.473841, 0.572842, 0.911616, 0.0359518, 0.504784, 0.864904, 0.791921, 0.952028, 0.14703, 0.482688, 0.38138, 0.689176, 0.358107, 0.964222, 0.664341, 0.364756, 0.462358, 0.871348, 0.811468, 0.805107, 0.372945, 0.709743, 0.126389, 0.891431, 0.437558, 0.570483, 0.788012, 0.964243, 0.258805, 0.280408, 0.0702566, 0.657459, 0.683867, 0.0888091, 0.55652, 0.70077, 0.444457, 0.487276, 0.205754, 0.659927, 0.926596, 0.445687, 0.20498, 0.548544, 0.343877, 0.512677, 0.990822, 0.6532, 0.268565, 0.962165, 0.327383, 0.674243, 0.228823, 0.998085, 0.227313, 0.800336, 0.718163, 0.782706, 0.941869, 0.33199, 0.849003, 0.194824, 0.902575, 0.501552, 0.518239, 0.150476, 0.448765, 0.211319, 0.158607, 0.640021, 0.952545, 0.717359, 0.280481, 0.909379, 0.908767, 0.528414, 0.974888, 0.276211, 0.92698, 0.835883, 0.901567, 0.666688, 0.529453, 0.465804, 0.294289, 0.139984, 0.999112, 0.844271, 0.662428, 0.792039, 0.183089, 0.797879, 0.297615, 0.108843, 0.128322, 0.827902, 0.416487, 0.236675, 0.806017, 0.279535, 0.652342, 0.610113, 0.371188, 0.216679, 0.402532, 0.969157, 0.301669, 0.0383836, 0.499437, 0.118573, 0.362627, 0.18339, 0.222873, 0.526366, 0.147472, 0.993054, 0.565005, 0.29172, 0.547762, 0.852949, 0.357201, 0.491189, 0.863435, 0.518439, 0.0594226, 0.109189, 0.945859, 0.739604, 0.143206, 0.435732, 0.553778, 0.0035747, 0.772776, 0.505716, 0.153306, 0.664995, 0.832633, 0.565863, 0.665123, 0.666381, 0.394328, 0.0855326, 0.573496, 0.885602, 0.390953, 0.708925, 0.786931, 0.522786, 0.291825, 0.0634514, 0.928188, 0.168637, 0.253001, 0.862502, 0.929582, 0.646378, 0.989392, 0.521706, 0.0178385, 0.694728, 0.202491, 0.549003, 0.909403, 0.406013, 0.387248, 0.760753, 0.852096, 0.358302, 0.245531, 0.71053, 0.811774, 0.92079, 0.810023, 0.174339, 0.536559, 0.00591831, 0.374183, 0.616586, 0.940266, 0.100919, 0.198732, 0.598882, 0.608117, 0.930093, 0.521744, 0.822235, 0.549358, 0.149835, 0.344036, 0.650126, 0.0347386, 0.0659594, 0.775347, 0.632604, 0.896795, 0.944731, 0.051625, 0.328262, 0.489635, 0.771074, 0.520485, 0.445573, 0.99153, 0.453152, 0.74483, 0.359229, 0.842507, 0.754794, 0.050389, 0.469458, 0.345899, 0.724442, 0.1258, 0.997286, 0.542507, 0.734243, 0.218028, 0.57567, 0.0840791, 0.831345, 0.572729, 0.638681, 0.46461, 0.21553, 0.496753, 0.651072, 0.728837, 0.383105, 0.913925, 0.608814, 0.69361, 0.284668, 0.580892, 0.0829319, 0.996964, 0.694332, 0.0266658, 0.343305, 0.860466, 0.410865, 0.344897, 0.076182, 0.0945845, 0.606471, 0.910185, 0.440777, 0.83356, 0.415651, 0.470332, 0.577427, 0.286812, 0.458414, 0.154092, 0.726048, 0.448766, 0.536047, 0.881113, 0.0834768, 0.680602, 0.100508, 0.614056, 0.920217, 0.554259, 0.0771404, 0.200639, 0.997162, 0.711893, 0.580785, 0.517384, 0.236981, 0.961531, 0.262726, 0.0279213, 0.601705, 0.922513, 0.457747, 0.622179, 0.586986, 0.815665, 0.740151, 0.71075, 0.744337, 0.267282, 0.714992, 0.995485, 0.373861, 0.330064, 0.707927, 0.4794, 0.467112, 0.517784, 0.750343, 0.301888, 0.31469, 0.143334, 0.016394, 0.929898, 0.610353, 0.378339, 0.961137, 0.286113, 0.998841, 0.619241, 0.847544, 0.140022, 0.6554, 0.212334, 0.92588, 0.11685, 0.480002, 0.927651, 0.285044, 0.888313, 0.171126, 0.561373, 0.458234, 0.157669, 0.777138, 0.418818, 0.404983, 0.0466326, 0.101271, 0.143908, 0.0234543, 0.405157, 0.947144, 0.028322, 0.325914, 0.475486, 0.338324, 0.607948, 0.869158, 0.907468, 0.930774, 0.501023, 0.925042, 0.454899, 0.946359, 0.125395, 0.471872, 0.620577, 0.155292, 0.566721, 0.671337, 0.265429, 0.752127, 0.50987, 0.618642, 0.715742, 0.694601, 0.625403, 0.52693, 0.0518873, 0.419928, 0.668601, 0.949508, 0.540852, 0.554487, 0.52369, 0.28843, 0.488686, 0.200984, 0.837579, 0.269874, 0.195823, 0.515413, 0.578064, 0.745128, 0.331575, 0.588522, 0.490486, 0.931496, 0.802797, 0.924865, 0.356336, 0.581246, 0.328024, 0.60922, 0.0407957, 0.0587663, 0.0332815, 0.469145, 0.40867, 0.857537, 0.807716, 0.386968, 0.0427086, 0.204949, 0.7688, 0.440749, 0.346744, 0.121736, 0.269309, 0.882475, 0.570572, 0.919927, 0.163374, 0.0382734, 0.244256, 0.608912, 0.0744772, 0.199804, 0.0177384, 0.562952, 0.724068, 0.548369, 0.709908, 0.267533, 0.423085, 0.801727, 0.891623, 0.508236, 0.449655, 0.168635, 0.266666, 0.618473, 0.574547, 0.568718, 0.0749061, 0.977498, 0.230497, 0.773333, 0.433951, 0.753883, 0.359061, 0.395892, 0.999135, 0.979185, 0.935089, 0.717506, 0.0794712, 0.110681, 0.261232, 0.0781289, 0.506005, 0.0258885, 0.03633, 0.102523, 0.975296, 0.238929, 0.651561, 0.875218, 0.0468139, 0.754441, 0.384344, 0.236493, 0.424086, 0.92452, 0.605211, 0.542885, 0.213593, 0.888459, 0.645171, 0.444662, 0.34014, 0.0488637, 0.468849, 0.154743, 0.641403, 0.614473, 0.382053, 0.732735, 0.060139, 0.575865, 0.906177, 0.233673, 0.972487, 0.608601, 0.109103, 0.933532, 0.383431, 0.982875, 0.75572, 0.920607, 0.260268, 0.967597, 0.165602, 0.145568, 0.794938, 0.645477, 0.894148, 0.488078, 0.829059, 0.909446, 0.952965, 0.994641, 0.184752, 0.862099, 0.449358, 0.498181, 0.803842, 0.499511, 0.74574, 0.345415, 0.335949, 0.917581, 0.502464, 0.757405, 0.556406, 0.802122, 0.494112, 0.204485, 0.280133, 0.250274, 0.0719013, 0.350682, 0.320498, 0.0652321, 0.0980936, 0.858129, 0.633885, 0.687396, 0.0741431, 0.921754, 0.821764, 0.548975, 0.291379, 0.210214, 0.251713, 0.56441, 0.831934, 0.194279, 0.984741, 0.983676, 0.500923, 0.88643, 0.446535, 0.31721, 0.43667, 0.138071, 0.777664, 0.899611, 0.679442, 0.977321, 0.48769, 0.698537, 0.114834, 0.732036, 0.935299, 0.443224, 0.606917, 0.00343731, 0.918788, 0.179939, 0.179491, 0.0933978, 0.308888, 0.835705, 0.883739, 0.0795075, 0.108373, 0.444887, 0.0272249, 0.665521, 0.936684, 0.918134, 0.506515, 0.0310974, 0.318531, 0.334573, 0.95934, 0.773097, 0.870414, 0.622534, 0.772598, 0.267527, 0.957514, 0.321456, 0.433731, 0.0378987, 0.303143, 0.946338, 0.771424, 0.968829, 0.0182637, 0.153095, 0.791273, 0.0869237, 0.908045, 0.285497, 0.456298, 0.319449, 0.948262, 0.0108198, 0.0791605, 0.138426, 0.0493138, 0.297055, 0.831732, 0.846622, 0.977141, 0.666201, 0.0894669, 0.474935, 0.597488, 0.745701, 0.998088, 0.259435, 0.448089, 0.757547, 0.695585, 0.928415, 0.254816, 0.391428, 0.239077, 0.343674, 0.707079, 0.840506, 0.855248, 0.0720463, 0.273765, 0.414277, 0.28574, 0.872958, 0.600764, 0.707009, 0.152044, 0.749205, 0.736148, 0.0434453, 0.480865, 0.290354, 0.081511, 0.704921, 0.592041, 0.987253, 0.236817, 0.218098, 0.636332, 0.166966, 0.141217, 0.616854, 0.883307, 0.90587, 0.213539, 0.0203048, 0.152704, 0.665115, 0.331953, 0.917368, 0.185204, 0.344195, 0.0684662, 0.827963, 0.697345, 0.382241, 0.11038, 0.432941, 0.0522772, 0.340995, 0.790436, 0.221271, 0.153565, 0.7529, 0.439033, 0.0688691, 0.942102, 0.0991886, 0.861902, 0.653214, 0.533909, 0.946111, 0.811667, 0.835058, 0.906783, 0.889796, 0.180832, 0.747069, 0.189066, 0.928808, 0.902241, 0.374762, 0.307401, 0.595117, 0.357318, 0.669907, 0.244712, 0.977176, 0.107172, 0.459304, 0.942739, 0.601034, 0.351095, 0.969381, 0.70954, 0.474592, 0.849265, 0.319091, 0.325441, 0.810371, 0.382643, 0.231443, 0.896858, 0.619304, 0.712551, 0.818118, 0.583754, 0.744646, 0.948482, 0.26386, 0.649881, 0.762572, 0.191877, 0.00646703, 0.357565, 0.514021, 0.272099, 0.404912, 0.744339, 0.0570981, 0.188718, 0.488049, 0.910256, 0.381391, 0.780401, 0.249811, 0.898262, 0.562622, 0.544915, 0.995939, 0.163083, 0.700276, 0.339799, 0.371208, 0.256466, 0.207121, 0.838345, 0.116212, 0.215533, 0.0896088, 0.765079, 0.746264, 0.458304, 0.562406, 0.53256, 0.401652, 0.902194, 0.728271, 0.401179, 0.68653, 0.982712, 0.970421, 0.776619, 0.335532, 0.57628, 0.283883, 0.337312, 0.587295, 0.605534, 0.257679, 0.977735, 0.606707, 0.384693, 0.148313, 0.91652, 0.876226, 0.200527, 0.159544, 0.478128, 0.788105, 0.168764, 0.820108, 0.639652, 0.313061, 0.300923, 0.00679751, 0.128614, 0.743182, 0.591579, 0.227884, 0.276947, 0.353419, 0.449458, 0.857557, 0.311474, 0.168574, 0.231684, 0.973643, 0.467229, 0.401413, 0.824339, 0.648709, 0.992076, 0.561148, 0.539587, 0.88606, 0.0101625, 0.321866, 0.0864855, 0.823361, 0.171294, 0.851256, 0.904512, 0.868669, 0.461754, 0.337381, 0.805864, 0.282016, 0.256525, 0.367835, 0.36925, 0.323166, 0.544029, 0.260691, 0.487742, 0.501015, 0.871728, 0.816517, 0.177717, 0.349107, 0.9712, 0.445068, 0.554921, 0.853349, 0.642411, 0.274062, 0.393994, 0.405508, 0.979283, 0.947317, 0.1413, 0.804752, 0.282827, 0.00433583, 0.209862, 0.805964, 0.240927, 0.923283, 0.29844, 0.577112, 0.197106, 0.941629, 0.0260883, 0.783104, 0.428877, 0.375769, 0.47005, 0.618762, 0.129121, 0.96162, 0.133023, 0.757693, 0.521303, 0.425115, 0.895157, 0.548349, 0.789657, 0.773709, 0.814288, 0.219591, 0.927035, 0.554026, 0.73962, 0.228319, 0.985027, 0.203205, 0.670233, 0.845321, 0.122509, 0.525979, 0.774974, 0.35449, 0.0202482, 0.0553284, 0.718248, 0.968736, 0.775884, 0.872025, 0.249245, 0.223508, 0.373448, 0.873227, 0.818622, 0.170087, 0.45397, 0.211645, 0.0912735, 0.885725, 0.408843, 0.816578, 0.947253, 0.660385, 0.905483, 0.438485, 0.592493, 0.964808, 0.390944, 0.434461, 0.624856, 0.494029, 0.540508, 0.51451, 0.897778, 0.271091, 0.917262, 0.832085, 0.908695, 0.363763, 0.280258, 0.0206989, 0.938979, 0.264247, 0.348786, 0.136345, 0.504444, 0.774869, 0.446413, 0.991975, 0.810845, 0.0198101, 0.938085, 0.121592, 0.246139, 0.872632, 0.00349399, 0.447693, 0.619095, 0.773532, 0.911036, 0.313008, 0.423558, 0.266044, 0.0985083, 0.357055, 0.249961, 0.00837236, 0.668355, 0.946894, 0.287958, 0.49005, 0.613826, 0.459912, 0.761238, 0.504224, 0.905499, 0.786928, 0.609886, 0.457182, 0.319103, 0.00407148, 0.0451923, 0.938474, 0.69964, 0.761635, 0.648927, 0.743729, 0.341382, 0.0405512, 0.434753, 0.430363, 0.737545, 0.753841, 0.309877, 0.682698, 0.52824, 0.282921, 0.0398223, 0.108494, 0.275523, 0.193035, 0.0649202, 0.32645, 0.571398, 0.111497, 0.743052, 0.880672, 0.623785, 0.780881, 0.928861, 0.939333, 0.535115, 0.481065, 0.557964, 0.791385, 0.131174, 0.427806, 0.380435, 0.128044, 0.667008, 0.743779, 0.0241728, 0.433819, 0.787634, 0.808397, 0.162604, 0.601796, 0.322157, 0.166734, 0.544991, 0.012839, 0.352769, 0.575641, 0.785877, 0.464232, 0.436805, 0.816211, 0.677876, 0.200392, 0.967292, 0.822277, 0.510058, 0.959157, 0.532251, 0.173079, 0.0579285, 0.426559, 0.493084, 0.85396, 0.68958, 0.332149, 0.727335, 0.301874, 0.53545, 0.648203, 0.113179, 0.500634, 0.736473, 0.228937, 0.615063, 0.89921, 0.275833, 0.685858, 0.63812, 0.426503, 0.327029, 0.0407834, 0.571616, 0.798809, 0.61719, 0.627599, 0.667153, 0.511011, 0.301111, 0.214023, 0.745447, 0.897346, 0.869188, 0.404666, 0.25638, 0.738289, 0.95814, 0.39046, 0.690222, 0.486324, 0.681128, 0.454494, 0.131966, 0.299781, 0.435809, 0.64067, 0.127576, 0.0933634, 0.516857, 0.534826, 0.062432, 0.254201, 0.370317, 0.276649, 0.212643, 0.0365374, 0.215861, 0.98183, 0.558461, 0.882114, 0.760298, 0.0230026, 0.212627, 0.452885, 0.863122, 0.501076, 0.261205, 0.526928, 0.0882687, 0.382562, 0.799093, 0.217433, 0.872696, 0.352978, 0.162366, 0.200554, 0.0853807, 0.617135, 0.803623, 0.053892, 0.768681, 0.72646, 0.186899, 0.233063, 0.44126, 0.351109, 0.51456, 0.471347, 0.14898, 0.237901, 0.390519, 0.401825, 0.0780749, 0.927048, 0.277374, 0.403331, 0.817587, 0.272252, 0.55869, 0.0700548, 0.837055, 0.658314, 0.258131, 0.386913, 0.161075, 0.198577, 0.105789, 0.364399, 0.561513, 0.907905, 0.23445, 0.876724, 0.287997, 0.337503, 0.580209, 0.700581, 0.893672, 0.303112, 0.400399, 0.900429, 0.960338, 0.205897, 0.13644, 0.606848, 0.278969, 0.444764, 0.277917, 0.727935, 0.261535, 0.76446, 0.263156, 0.361122, 0.377576, 0.66583, 0.203896, 0.242684, 0.131856, 0.00691427, 0.384593, 0.0738922, 0.796443, 0.386714, 0.646815, 0.0465242, 0.37696, 0.839695, 0.516433, 0.51168, 0.735366, 0.313122, 0.926391, 0.38615, 0.328949, 0.908023, 0.022474, 0.944344, 0.0855576, 0.181333, 0.705536, 0.551495, 0.789005, 0.991004, 0.867341, 0.156493, 0.204584, 0.325171, 0.358104, 0.966632, 0.916216, 0.350342, 0.26379, 0.185092, 0.676694, 0.788359, 0.260773, 0.629055, 0.77337, 0.381681, 0.890435, 0.818667, 0.998066, 0.220561, 0.702782, 0.321089, 0.120623, 0.391574, 0.0824004, 0.335235, 0.0896565, 0.319899, 0.973759, 0.109473, 0.682094, 0.108171, 0.465936, 0.67167, 0.637918, 0.541815, 0.766935, 0.0189718, 0.879942, 0.270628, 0.932306, 0.983927, 0.0468443, 0.628777, 0.0181506, 0.380767, 0.826538, 0.733648, 0.45519, 0.821057, 0.513454, 0.348211, 0.693584, 0.148583, 0.977117, 0.430738, 0.61182, 0.418308, 0.673495, 0.3394, 0.427247, 0.810305, 0.184395, 0.153866, 0.265563, 0.478276, 0.120549, 0.392185, 0.297863, 0.344672, 0.334586, 0.0398942, 0.285567, 0.654178, 0.00459614, 0.57217, 0.942972, 0.775396, 0.709488, 0.86794, 0.515648, 0.594831, 0.381757, 0.757027, 0.727035, 0.786055, 0.203072, 0.276466, 0.158124, 0.808501, 0.102053, 0.997998, 0.928943, 0.339197, 0.422558, 0.403526, 0.811289, 0.682467, 0.519364, 0.985631, 0.695145, 0.87326, 0.520205, 0.0111355, 0.809212, 0.0752159, 0.687829, 0.760943, 0.459425, 0.892339, 0.879954, 0.778171, 0.923641, 0.146616, 0.560208, 0.94499, 0.682062, 0.835312, 0.575422, 0.554996, 0.397026, 0.97177, 0.0242832, 0.0925651, 0.987186, 0.936212, 0.206866, 0.80444, 0.38113, 0.501063, 0.843346, 0.864754, 0.684651, 0.226487, 0.832008, 0.500568, 0.268452, 0.35985, 0.482842, 0.351491, 0.117092, 0.616672, 0.950603, 0.442528, 0.108993, 0.147171, 0.0811919, 0.620192, 0.164973, 0.570533, 0.344352, 0.227946, 0.706041, 0.319002, 0.836146, 0.431537, 0.373224, 0.830264, 0.859218, 0.135633, 0.204645, 0.497329, 0.476283, 0.898832, 0.318157, 0.0518413, 0.787788, 0.749823, 0.990882, 0.310739, 0.373329, 0.00699056, 0.763808, 0.189834, 0.993723, 0.323071, 0.556897, 0.955455, 0.871199, 0.783104, 0.84266, 0.657628, 0.118837, 0.396545, 0.836526, 0.421213, 0.796618, 0.92848, 0.196703, 0.453052, 0.21279, 0.0781732, 0.290214, 0.85674, 0.450393, 0.269058, 0.231177, 0.382764, 0.150857, 0.959229, 0.350774, 0.219011, 0.505458, 0.0262893, 0.753044, 0.193166, 0.164141, 0.228447, 0.950066, 0.1562, 0.465342, 0.0933231, 0.51868, 0.467452, 0.320578, 0.333187, 0.750958, 0.609314, 0.557419, 0.632221, 0.911576, 0.051691, 0.838721, 0.592503, 0.556489, 0.28693, 0.79994, 0.186534, 0.137593, 0.565969, 0.208817, 0.616289, 0.111668, 0.339988, 0.298029, 0.709292, 0.373836, 0.480828, 0.379162, 0.882696, 0.413005, 0.128272, 0.557877, 0.0916656, 0.323702, 0.727149, 0.385419, 0.945602, 0.924983, 0.5581, 0.605596, 0.00841205, 0.790584, 0.679761, 0.0157527, 0.882751, 0.856038, 0.822924, 0.199808, 0.208543, 0.319852, 0.804386, 0.835573, 0.428316, 0.0441746, 0.401189, 0.973443, 0.693568, 0.77892, 0.426246, 0.578862, 0.884189, 0.881808, 0.99988, 0.41897, 0.635222, 0.243414, 0.687665, 0.662656, 0.921206, 0.931552, 0.487752, 0.954991, 0.770023, 0.413163, 0.166651, 0.146153, 0.552683, 0.359568, 0.430048, 0.495043, 0.355809, 0.985572, 0.696166, 0.513866, 0.838096, 0.926908, 0.421392, 0.394051, 0.298775, 0.957049, 0.44277, 0.295917, 0.321999, 0.200425, 0.376574, 0.0706394, 0.976208, 0.712279, 0.127169, 0.991754, 0.270863, 0.363507, 0.373852, 0.349077, 0.713998, 0.160536, 0.535705, 0.457604, 0.505169, 0.824704, 0.239087, 0.257304, 0.202766, 0.236109, 0.332589, 0.0238311, 0.260527, 0.00371746, 0.412384, 0.482204, 0.711049, 0.4693, 0.184225, 0.831808, 0.268629, 0.687246, 0.364263, 0.320361, 0.605563, 0.431841, 0.571692, 0.918598, 0.0189125, 0.722953, 0.197843, 0.644876, 0.860719, 0.254702, 0.158021, 0.393331, 0.535694, 0.272737, 0.770715, 0.362387, 0.197682, 0.387888, 0.71038, 0.663247, 0.556153, 0.138118, 0.327036, 0.891755, 0.444031, 0.171477, 0.106749, 0.920795, 0.886496, 0.0492393, 0.945329, 0.577699, 0.973865, 0.329843, 0.520608, 0.396895, 0.319288, 0.903879, 0.962445, 0.653325, 0.0848507, 0.393099, 0.458913, 0.981772, 0.942494, 0.0410877, 0.285956, 0.595612, 0.942453, 0.446092, 0.83528, 0.898894, 0.062709, 0.0657916, 0.484063, 0.8508, 0.227725, 0.752846, 0.92173, 0.480526, 0.0964081, 0.948487, 0.193416, 0.682024, 0.501548, 0.862019, 0.614713, 0.950482, 0.70864, 0.359331, 0.0469288, 0.657403, 0.203831, 0.687638, 0.222811, 0.483744, 0.529177, 0.621497, 0.485951, 0.534666, 0.446161, 0.250163, 0.889559, 0.166582, 0.920422, 0.769784, 0.547718, 0.465831, 0.10656, 0.235391, 0.0369706, 0.431307, 0.772417, 0.529851, 0.114619, 0.484428, 0.241478, 0.419788, 0.499923, 0.554834, 0.228483, 0.18014, 0.0919056, 0.280975, 0.164224, 0.0682185, 0.887408, 0.997653, 0.945901, 0.949911, 0.986903, 0.713256, 0.930299, 0.875153, 0.134418, 0.0864959, 0.197977, 0.762442, 0.89542, 0.951293, 0.908159, 0.960262, 0.356559, 0.542306, 0.308836, 0.31353, 0.83746, 0.914564, 0.932738, 0.716796, 0.95133, 0.977492, 0.835335, 0.811688, 0.867739, 0.658936, 0.534261, 0.465219, 0.946242, 0.0916335, 0.15547, 0.0837199, 0.328251, 0.353232, 0.52683, 0.0530961, 0.844124, 0.49339, 0.574155, 0.259173, 0.807415, 0.742004, 0.367564, 0.532553, 0.355819, 0.160348, 0.812619, 0.682341, 0.460134, 0.472149, 0.877119, 0.691216, 0.716469, 0.732743, 0.101109, 0.163486, 0.426536, 0.00438677, 0.341102, 0.127627, 0.974165, 0.451785, 0.582795, 0.960278, 0.215978, 0.30271, 0.728424, 0.39953, 0.0907827, 0.140596, 0.666724, 0.509406, 0.639216, 0.583129, 0.639649, 0.0487489, 0.962468, 0.586461, 0.255437, 0.357483, 0.248231, 0.824475, 0.35872, 0.298363, 0.756249, 0.968111, 0.654413, 0.55423, 0.273308, 0.790274, 0.789088, 0.582524, 0.988269, 0.613496, 0.497697, 0.664909, 0.158875, 0.686242, 0.792637, 0.5494, 0.390752, 0.0232917, 0.816841, 0.393919, 0.803319, 0.025962, 0.236398, 0.771318, 0.0280134, 0.955289, 0.870184, 0.803725, 0.90259, 0.579897, 0.72182, 0.831769, 0.756231, 0.0269657, 0.0572628, 0.795962, 0.63808, 0.792817, 0.465746, 0.29455, 0.256751, 0.349447, 0.205934, 0.214258, 0.587056, 0.0334117, 0.65442, 0.568748, 0.40156, 0.797245, 0.00572588, 0.777739, 0.526307, 0.909452, 0.799369, 0.861574, 0.86602, 0.345673, 0.709272, 0.198909, 0.454914, 0.951196, 0.302842, 0.956016, 0.202018, 0.627276, 0.42665, 0.357785, 0.389542, 0.391284, 0.852638, 0.275424, 0.524347, 0.232507, 0.0251263, 0.857599, 0.184222, 0.165429, 0.778964, 0.595748, 0.84945, 0.0479109, 0.573057, 0.973021, 0.0168931, 0.263429, 0.247403, 0.214808, 0.758783, 0.608823, 0.450129, 0.46428, 0.296656, 0.54451, 0.202912, 0.955391, 0.410756, 0.478998, 0.161717, 0.74582, 0.984672, 0.631941, 0.845032, 0.865216, 0.734043, 0.913582, 0.74956, 0.913796, 0.852189, 0.0526545, 0.620136, 0.335249, 0.891606, 0.04877, 0.11121, 0.237891, 0.610885, 0.951454, 0.425678, 0.855927, 0.70552, 0.906134, 0.962957, 0.760637, 0.48084, 0.472374, 0.296373, 0.679627, 0.0545963, 0.308886, 0.0136083, 0.0843587, 0.750473, 0.0169516, 0.408982, 0.487306, 0.44847, 0.497858, 0.166848, 0.536904, 0.0238632, 0.532125, 0.608967, 0.880586, 0.916938, 0.466882, 0.183992, 0.99038, 0.969359, 0.704156, 0.0707591, 0.311747, 0.643952, 0.961373, 0.117798, 0.0127019, 0.568872, 0.611655, 0.132922, 0.122016, 0.328011, 0.667601, 0.41521, 0.84017, 0.951388, 0.438889, 0.998321, 0.246801, 0.857503, 0.549359, 0.448145, 0.910791, 0.663354, 0.466896, 0.969398, 0.185962, 0.606303, 0.351741, 0.554721, 0.211537, 0.287294, 0.450199, 0.0845679, 0.895073, 0.950543, 0.112979, 0.366648, 0.353683, 0.839115, 0.78919, 0.730804, 0.0128286, 0.782073, 0.679974, 0.938358, 0.917862, 0.893455, 0.40637, 0.708488, 0.0402671, 0.466489, 0.830445, 0.207765, 0.633953, 0.885332, 0.186791, 0.322338, 0.471009, 0.195981, 0.806484, 0.0898016, 0.342517, 0.402745, 0.897323, 0.50386, 0.886677, 0.775764, 0.340886, 0.0178599, 0.071966, 0.355321, 0.526534, 0.571361, 0.891612, 0.535703, 0.680975, 0.375466, 0.473323, 0.346191, 0.150443, 0.0793787, 0.736522, 0.156608, 0.723893, 0.0418027, 0.244151, 0.281301, 0.242679, 0.962778, 0.659296, 0.0406442, 0.745364, 0.0491158, 0.207134, 0.29123, 0.390656, 0.851116, 0.775254, 0.676169, 0.775072, 0.201753, 0.911746, 0.916252, 0.490718, 0.47074, 0.208377, 0.437142, 0.190671, 0.317374, 0.716163, 0.155438, 0.211228, 0.0992648, 0.734662, 0.622772, 0.682174, 0.364472, 0.426553, 0.54833, 0.561615, 0.346359, 0.0637497, 0.134678, 0.516024, 0.838203, 0.0333728, 0.999817, 0.405307, 0.882846, 0.523026, 0.247645, 0.700042, 0.597067, 0.846675, 0.41602, 0.968026, 0.0935502, 0.0795948, 0.222172, 0.311875, 0.522947, 0.532991, 0.691376, 0.456124, 0.307415, 0.151083, 0.518461, 0.975926, 0.788788, 0.992122, 0.442011, 0.221183, 0.126321, 0.491181, 0.0782218, 0.00835354, 0.0153873, 0.849414, 0.781973, 0.72006, 0.0754405, 0.270888, 0.23216, 0.678088, 0.781213, 0.0681483, 0.973903, 0.567992, 0.305081, 0.25545, 0.31452, 0.601257, 0.0730959, 0.324151, 0.679678, 0.466336, 0.0866749, 0.148334, 0.844256, 0.597887, 0.420549, 0.485273, 0.84316, 0.266977, 0.782732, 0.518613, 0.175381, 0.831228, 0.0826966, 0.95727, 0.272967, 0.904909, 0.714836, 0.852345, 0.0263536, 0.55001, 0.376085, 0.527171, 0.848943, 0.596237, 0.213725, 0.360239, 0.989413, 0.793701, 0.144587, 0.422074, 0.252385, 0.035152, 0.121605, 0.66383, 0.218948, 0.928759, 0.123024, 0.18125, 0.927481, 0.736012, 0.825113, 0.464479, 0.947227, 0.524561, 0.591668, 0.562223, 0.0891991, 0.870499, 0.248908, 0.251128, 0.185599, 0.0911438, 0.0606068, 0.418797, 0.251509, 0.0401454, 0.708806, 0.362404, 0.10882, 0.205306, 0.338228, 0.567909, 0.956244, 0.404162, 0.100574, 0.675315, 0.321541, 0.167381, 0.054955, 0.45024, 0.308185, 0.180924, 0.018628, 0.206415, 0.486079, 0.789622, 0.683993, 0.186931, 0.989305, 0.795853, 0.311741, 0.433837, 0.36134, 0.469609, 0.650928, 0.000152883, 0.296594, 0.552277, 0.822009, 0.412609, 0.805788, 0.340937, 0.118595, 0.714683, 0.0813911, 0.669685, 0.89432, 0.735282, 0.333414, 0.800931, 0.866457, 0.11765, 0.0291047, 0.68426, 0.97027, 0.898661, 0.509765, 0.452202, 0.609894, 0.874029, 0.651235, 0.967725, 0.371908, 0.719054, 0.459508, 0.361349, 0.248698, 0.988409, 0.832164, 0.267637, 0.852097, 0.27578, 0.145816, 0.0521819, 0.497729, 0.438889, 0.946071, 0.244535, 0.044904, 0.395039, 0.220939, 0.225013, 0.907887, 0.439219, 0.108259, 0.573008, 0.327463, 0.513467, 0.384904, 0.394456, 0.627174, 0.525882, 0.105474, 0.546369, 0.324586, 0.128633, 0.279543, 0.308454, 0.399267, 0.501467, 0.555036, 0.482513, 0.648467, 0.44224, 0.0430528, 0.0470284, 0.303334, 0.11172, 0.344984, 0.728024, 0.968193, 0.998746, 0.714951, 0.22583, 0.304332, 0.311972, 0.203844, 0.932815, 0.729309, 0.910439, 0.59709, 0.248474, 0.929357, 0.966751, 0.407996, 0.79199, 0.314115, 0.334815, 0.463686, 0.271123, 0.792893, 0.0220363, 0.0470036, 0.364282, 0.7702, 0.761336, 0.365589, 0.232697, 0.426116, 0.767974, 0.942658, 0.533315, 0.187012, 0.272715, 0.512104, 0.0753536, 0.920745, 0.749625, 0.397315, 0.440973, 0.157731, 0.142438, 0.366963, 0.98238, 0.778702, 0.666278, 0.119054, 0.702818, 0.131588, 0.316012, 0.10308, 0.343372, 0.015803, 0.746052, 0.537793, 0.179641, 0.347586, 0.038689, 0.0419869, 0.121337, 0.274554, 0.660173, 0.391787, 0.197452, 0.448393, 0.83109, 0.000621278, 0.0744846, 0.520339, 0.0639139, 0.052574, 0.945808, 0.286307, 0.210483, 0.543927, 0.0473871, 0.292822, 0.540231, 0.961765, 0.0334788, 0.73264, 0.0930378, 0.116691, 0.416015, 0.275887, 0.591091, 0.201551, 0.812337, 0.122688, 0.81739, 0.214905, 0.327088, 0.826873, 0.231944, 0.0714683, 0.100988, 0.796134, 0.625878, 0.148388, 0.651136, 0.2714, 0.777948, 0.526884, 0.23928, 0.177201, 0.556602, 0.443363, 0.711407, 0.412621, 0.390755, 0.652081, 0.553327, 0.348046, 0.690167, 0.139136, 0.638676, 0.719849, 0.969193, 0.226688, 0.0559289, 0.179877, 0.349542, 0.154048, 0.859177, 0.348149, 0.129761, 0.0752452, 0.75169, 0.209575, 0.0382844, 0.33134, 0.217399, 0.76794, 0.247522, 0.137297, 0.742603, 0.930618, 0.252263, 0.277657, 0.190196, 0.960708, 0.955557, 0.954958, 0.126494, 0.885494, 0.153457, 0.508318, 0.577839, 0.0704635, 0.131583, 0.432711, 0.587058, 0.16253, 0.691451, 0.512166, 0.644221, 0.0817846, 0.174534, 0.0211406, 0.982946, 0.897297, 0.143828, 0.250435, 0.315898, 0.485206, 0.346533, 0.906953, 0.658875, 0.901944, 0.353259, 0.649786, 0.98355, 0.96192, 0.0486344, 0.0733306, 0.530502, 0.51493, 0.669462, 0.712869, 0.443836, 0.375251, 0.616511, 0.17634, 0.679968, 0.23312, 0.86475, 0.0923384, 0.445303, 0.0426846, 0.466083, 0.115238, 0.634103, 0.145757, 0.964278, 0.368903, 0.282854, 0.857168, 0.262771, 0.544067, 0.975218, 0.114069, 0.564343, 0.00196608, 0.255016, 0.121178, 0.555693, 0.384026, 0.541916, 0.862135, 0.861063, 0.061127, 0.401126, 0.0718832, 0.592997, 0.221712, 0.665091, 0.392748, 0.0284544, 0.00480383, 0.331763, 0.258329, 0.907294, 0.882583, 0.747399, 0.149289, 0.684526, 0.0928661, 0.47255, 0.509291, 0.159447, 0.0495326, 0.176567, 0.875763, 0.282266, 0.461443, 0.304288, 0.954562, 0.0246622, 0.910701, 0.259072, 0.235545, 0.589303, 0.0198529, 0.633738, 0.603969, 0.125811, 0.946871, 0.849569, 0.219594, 0.871518, 0.0852934, 0.487085, 0.587286, 0.0324913, 0.694259, 0.833348, 0.088147, 0.335614, 0.856393, 0.99062, 0.0129477, 0.29039, 0.652248, 0.494523, 0.620483, 0.337184, 0.307545, 0.909197, 0.948583, 0.735265, 0.121434, 0.20092, 0.794952, 0.811182, 0.638097, 0.0519457, 0.754894, 0.681289, 0.931989, 0.187526, 0.854531, 0.400485, 0.0750136, 0.417656, 0.00785688, 0.353627, 0.874442, 0.914037, 0.549368, 0.961301, 0.693744, 0.560381, 0.186382, 0.892252, 0.132605, 0.851413, 0.528603, 0.855361, 0.900714, 0.381003, 0.367088, 0.446593, 0.0565229, 0.730683, 0.798101, 0.00584498, 0.337595, 0.548247, 0.292728, 0.704436, 0.235424, 0.980487, 0.266191, 0.721307, 0.333607, 0.517506, 0.0155455, 0.570335, 0.935535, 0.697771, 0.529979, 0.389916, 0.0594917, 0.48949, 0.859058, 0.573714, 0.0524481, 0.800078, 0.276035, 0.461155, 0.609801, 0.56982, 0.532928, 0.647428, 0.385473, 0.529303, 0.90532, 0.0749909, 0.311591, 0.406808, 0.250333, 0.428554, 0.0954362, 0.299415, 0.23062, 0.0758303, 0.330095, 0.864771, 0.802615, 0.0790398, 0.795515, 0.924973, 0.997844, 0.231854, 0.400956, 0.624391, 0.33233, 0.265495, 0.70581, 0.278682, 0.0577491, 0.61073, 0.849807, 0.122588, 0.740201, 0.465677, 0.232248, 0.362477, 0.916382, 0.206151, 0.342154, 0.969134, 0.29218, 0.048263, 0.849728, 0.855132, 0.311117, 0.299375, 0.513553, 0.993093, 0.981964, 0.313915, 0.361531, 0.297721, 0.265988, 0.287707, 0.648229, 0.185482, 0.824324, 0.237209, 0.695025, 0.0765719, 0.751906, 0.774135, 0.146658, 0.193966, 0.0945798, 0.814675, 0.676216, 0.522658, 0.172905, 0.891806, 0.612833, 0.860443, 0.0138056, 0.41671, 0.111926, 0.897906, 0.616594, 0.0187485, 0.377173, 0.2049, 0.313285, 0.34492, 0.761399, 0.48825, 0.977209, 0.376702, 0.232358, 0.710273, 0.614304, 0.752861, 0.473428, 0.619673, 0.996026, 0.857368, 0.280089, 0.589473, 0.347378, 0.761665, 0.234101, 0.891007, 0.963599, 0.537103, 0.6287, 0.928706, 0.159532, 0.422172, 0.472491, 0.503188, 0.407906, 0.965226, 0.0893149, 0.0742298, 0.511836, 0.0566646, 0.989978, 0.810493, 0.607607, 0.350978, 0.532754, 0.590771, 0.652036, 0.519291, 0.028445, 0.295716, 0.554999, 0.532723, 0.648494, 0.958449, 0.989351, 0.986528, 0.85819, 0.129323, 0.645523, 0.701979, 0.961875, 0.77837, 0.37565, 0.6681, 0.938844, 0.150579, 0.733476, 0.83588, 0.414966, 0.279319, 0.271954, 0.697562, 0.328876, 0.473416, 0.507454, 0.492868, 0.942246, 0.0539982, 0.889359, 0.562326, 0.0874451, 0.634312, 0.0570977, 0.828171, 0.640236, 0.369778, 0.225997, 0.478475, 0.604702, 0.7699, 0.564844, 0.170982, 0.772329, 0.104521, 0.646618, 0.910601, 0.471964, 0.640794, 0.772525, 0.377406, 0.427506, 0.246801, 0.767264, 0.435218, 0.909188, 0.578062, 0.728639, 0.281208, 0.986284, 0.389003, 0.883398, 0.592691, 0.212958, 0.299002, 0.0623587, 0.732634, 0.909699, 0.157832, 0.94244, 0.344185, 0.515051, 0.526997, 0.184985, 0.230327, 0.848123, 0.375193, 0.663042, 0.216331, 0.0996718, 0.845606, 0.845052, 0.245986, 0.505333, 0.18181, 0.659141, 0.0854387, 0.0770834, 0.854145, 0.0621484, 0.650899, 0.0464725, 0.862467, 0.562941, 0.695814, 0.107036, 0.525648, 0.489519, 0.667978, 0.541843, 0.965847, 0.813297, 0.973964, 0.832747, 0.633505, 0.29484, 0.925737, 0.671868, 0.782548, 0.0406698, 0.0388169, 0.530286, 0.945494, 0.385998, 0.426855, 0.708667, 0.446111, 0.675056, 0.754651, 0.49899, 0.000123706, 0.82139, 0.634399, 0.5122, 0.0436464, 0.59634, 0.210051, 0.892268, 0.939437, 0.768522, 0.829532, 0.0121239, 0.793321, 0.677915, 0.402389, 0.381086, 0.23661, 0.700266, 0.272838, 0.799273, 0.142421, 0.421968, 0.454904, 0.125529, 0.328087, 0.53168, 0.571942, 0.745459, 0.517662, 0.185116, 0.400209, 0.306928, 0.784875, 0.206251, 0.800349, 0.998392, 0.319547, 0.652783, 0.307645, 0.46269, 0.29107, 0.272444, 0.460975, 0.0228258, 0.727633, 0.101851, 0.839993, 0.155526, 0.663295, 0.00431488, 0.963233, 0.272761, 0.94667, 0.220149, 0.654932, 0.260382, 0.372564, 0.208659, 0.808699, 0.366168, 0.978037, 0.637084, 0.397285, 0.341259, 0.193716, 0.518331, 0.711828, 0.16307, 0.859098, 0.154016, 0.712408, 0.780152, 0.321595, 0.964141, 0.146146, 0.026113, 0.442154, 0.883813, 0.0235895, 0.0497293, 0.514756, 0.763943, 0.510599, 0.748551, 0.987939, 0.556698, 0.756057, 0.94576, 0.325178, 0.953888, 0.683355, 0.365568, 0.904735, 0.799573, 0.0678182, 0.415067, 0.776265, 0.608494, 0.392023, 0.633459, 0.985935, 0.0337849, 0.565807, 0.546233, 0.428603, 0.203867, 0.674154, 0.510406, 0.863566, 0.82298, 0.82407, 0.626021, 0.998042, 0.543192, 0.728762, 0.33349, 0.908589, 0.633196, 0.87327, 0.841696, 0.0274701, 0.6926, 0.585719, 0.0407735, 0.752691, 0.732556, 0.36135, 0.39899, 0.473126, 0.957789, 0.491305, 0.716153, 0.268729, 0.0228071, 0.940054, 0.500136, 0.522634, 0.792832, 0.998733, 0.0497923, 0.354264, 0.900295, 0.08666, 0.0213953, 0.116608, 0.207397, 0.855374, 0.616082, 0.49466, 0.845885, 0.563914, 0.638902, 0.330478, 0.703766, 0.974715, 0.0469293, 0.704443, 0.965969, 0.566873, 0.364951, 0.329615, 0.326412, 0.708717, 0.623441, 0.805946, 0.111684, 0.289626, 0.360422, 0.858949, 0.833835, 0.145493, 0.851671, 0.576223, 0.0612911, 0.311278, 0.544059, 0.655213, 0.98384, 0.17825, 0.58869, 0.902854, 0.623592, 0.996593, 0.255208, 0.938732, 0.187494, 0.930822, 0.731068, 0.313847, 0.997787, 0.484787, 0.0690296, 0.972855, 0.761277, 0.837678, 0.21026, 0.0938839, 0.605135, 0.40101, 0.698054, 0.870501, 0.566661, 0.377853, 0.734238, 0.528549, 0.536111, 0.828237, 0.353234, 0.0212073, 0.995206, 0.997322, 0.801091, 0.253836, 0.422774, 0.439376, 0.30424, 0.725407, 0.661256, 0.000934579, 0.536666, 0.372358, 0.554708, 0.566091, 0.478092, 0.149034, 0.866995, 0.257266, 0.356091, 0.514894, 0.866604, 0.0946213, 0.674661, 0.360633, 0.0235747, 0.44877, 0.196386, 0.0110327, 0.564695, 0.256307, 0.465752, 0.940587, 0.157807, 0.752528, 0.236261, 0.334061, 0.824516, 0.750611, 0.171739, 0.955024, 0.155174, 0.811317, 0.151852, 0.370083, 0.137807, 0.62584, 0.676249, 0.126773, 0.269, 0.0133221, 0.0409992, 0.909648, 0.92545, 0.692274, 0.808631, 5.22226e-05, 0.445186, 0.907233, 0.646883, 0.380297, 0.145854, 0.0366033, 0.543275, 0.589321, 0.770368, 0.76493, 0.907667, 0.013469, 0.45581, 0.407324, 0.400907, 0.64677, 0.800205, 0.974743, 0.199658, 0.476778, 0.155743, 0.511614, 0.29068, 0.129454, 0.403126, 0.133288, 0.828854, 0.797533, 0.0408515, 0.929734, 0.89193, 0.489751, 0.242435, 0.0353366, 0.884019, 0.413616, 0.625557, 0.165755, 0.965712, 0.211118, 0.0516026, 0.728871, 0.343795, 0.936064, 0.882308, 0.508928, 0.0317102, 0.522808, 0.989725, 0.468226, 0.900931, 0.518687, 0.0558699, 0.827276, 0.939467, 0.824394, 0.396372, 0.463108, 0.991826, 0.867848, 0.163382, 0.341291, 0.0408896, 0.891399, 0.946901, 0.648964, 0.331905, 0.106113, 0.294778, 0.5142, 0.821597, 0.980835, 0.251497, 0.404104, 0.796816, 0.907509, 0.224666, 0.740501, 0.746964, 0.154066, 0.24623, 0.965865, 0.0257201, 0.349377, 0.19348, 0.774477, 0.673669, 0.00334104, 0.197086, 0.693573, 0.451286, 0.630362, 0.479849, 0.549777, 0.499125, 0.77955, 0.420615, 0.904979, 0.818498, 0.315175, 0.676506, 0.559792, 0.710228, 0.535712, 0.206535, 0.671435, 0.94613, 0.018489, 0.536387, 0.171385, 0.531761, 0.107144, 0.440946, 0.467984, 0.114912, 0.644664, 0.947846, 0.189076, 0.79553, 0.489926, 0.84867, 0.44369, 0.748664, 0.491966, 0.424008, 0.900913, 0.345689, 0.170591, 0.778731, 0.289108, 0.563852, 0.272435, 0.398348, 0.917094, 0.672414, 0.0790704, 0.965411, 0.752036, 0.858433, 0.598492, 0.416661, 0.838474, 0.0743478, 0.332426, 0.265022, 0.817673, 0.0111449, 0.97324, 0.585007, 0.965854, 0.703864, 0.430449, 0.126915, 0.263827, 0.713899, 0.788896, 0.459829, 0.74091, 0.25231, 0.656755, 0.878163, 0.0135091, 0.971193, 0.0679634, 0.695175, 0.635316, 0.81517, 0.816021, 0.859318, 0.185514, 0.536759, 0.0854268, 0.0198438, 0.396785, 0.165767, 0.021142, 0.844817, 0.278006, 0.248765, 0.215202, 0.650363, 0.421792, 0.0231004, 0.381938, 0.322261, 0.95927, 0.587418, 0.890705, 0.0918716, 0.756206, 0.307385, 0.363735, 0.449385, 0.214686, 0.498561, 0.553097, 0.771608, 0.0452504, 0.789986, 0.784346, 0.671845, 0.347079, 0.892937, 0.771831, 0.244026, 0.422099, 0.647207, 0.55745, 0.341059, 0.986261, 0.854703, 0.632232, 0.168835, 0.122339, 0.748749, 0.417043, 0.334404, 0.760741, 0.0747617, 0.0674361, 0.909866, 0.548944, 0.228904, 0.307954, 0.432871, 0.758219, 0.912944, 0.874557, 0.204402, 0.608711, 0.892022, 0.584904, 0.693536, 0.143239, 0.147304, 0.65105, 0.61727, 0.310902, 0.268455, 0.232825, 0.242917, 0.284695, 0.425098, 0.823516, 0.103411, 0.37422, 0.627118, 0.165332, 0.0996086, 0.229779, 0.704111, 0.247478, 0.442131, 0.230572, 0.254941, 0.668633, 0.00632709, 0.902998, 0.14493, 0.185539, 0.988112, 0.743908, 0.576387, 0.270589, 0.810955, 0.519569, 0.587609, 0.154675, 0.930571, 0.336278, 0.791468, 0.680265, 0.156684, 0.9047, 0.836809, 0.0627824, 0.779515, 0.951883, 0.0402845, 0.667585, 0.0596659, 0.00554327, 0.174659, 0.559528, 0.71804, 0.827254, 0.752481, 0.254988, 0.257062, 0.243701, 0.52447, 0.419191, 0.246754, 0.560454, 0.954979, 0.106195, 0.312706, 0.821995, 0.691169, 0.733579, 0.614484, 0.395418, 0.398191, 0.796054, 0.729584, 0.871194, 0.798215, 0.714881, 0.62515, 0.868716, 0.233857, 0.604042, 0.989776, 0.68056, 0.588099, 0.468001, 0.410207, 0.703179, 0.0518371, 0.371548, 0.244777, 0.676483, 0.448314, 0.817105, 0.261293, 0.0560965, 0.305512, 0.410769, 0.53507, 0.810238, 0.487978, 0.717539, 0.229925, 0.755798, 0.849829, 0.935615, 0.479142, 0.956455, 0.178576, 0.306604, 0.590131, 0.461184, 0.726295, 0.339509, 0.931755, 0.51469, 0.562869, 0.233045, 0.669005, 0.458389, 0.304234, 0.998437, 0.802958, 0.00287254, 0.549664, 0.405988, 0.669151, 0.931069, 0.443184, 0.786795, 0.563031, 0.904177, 0.564392, 0.0695452, 0.537668, 0.898558, 0.161831, 0.94417, 0.0660374, 0.272476, 0.884633, 0.448868, 0.216023, 0.879965, 0.536118, 0.0129657, 0.918419, 0.731987, 0.238858, 0.919786, 0.633086, 0.3979, 0.991612, 0.0248761, 0.466187, 0.518563, 0.00135427, 0.783371, 0.81312, 0.845813, 0.440608, 0.268748, 0.177056, 0.534299, 0.744045, 0.5338, 0.0960609, 0.770534, 0.195621, 0.664168, 0.810979, 0.291199, 0.379753, 0.270422, 0.386728, 0.634924, 0.124886, 0.0528745, 0.0441749, 0.62043, 0.362443, 0.594055, 0.239577, 0.378929, 0.00362681, 0.899329, 0.741276, 0.310037, 0.591128, 0.59827, 0.701737, 0.754295, 0.901834, 0.366462, 0.424326, 0.578993, 0.973271, 0.979568, 0.329886, 0.417171, 0.600718, 0.829349, 0.140139, 0.591929, 0.360373, 0.710421, 0.20802, 0.45779, 0.946281, 0.804943, 0.347426, 0.0933944, 0.0850293, 0.678406, 0.500757, 0.801478, 0.856268, 0.772649, 0.625221, 0.811458, 0.674832, 0.201819, 0.348209, 0.829938, 0.351239, 0.767899, 0.236525, 0.879076, 0.307544, 0.588421, 0.994447, 0.484925, 0.322582, 0.136779, 0.0596135, 0.985532, 0.823767, 0.348197, 0.839272, 0.446138, 0.0600339, 0.188546, 0.784976, 0.924112, 0.0285873, 0.2615, 0.420199, 0.484778, 0.314624, 0.650726, 0.741877, 0.348454, 0.493312, 0.868449, 0.16494, 0.101972, 0.150256, 0.642995, 0.609494, 0.211414, 0.170353, 0.155403, 0.973829, 0.218438, 0.771411, 0.533138, 0.256995, 0.508021, 0.386629, 0.875839, 0.471398, 0.259714, 0.046088, 0.661181, 0.109682, 0.953305, 0.426744, 0.318613, 0.131064, 0.528107, 0.901909, 0.704159, 0.390971, 0.117592, 0.340228, 0.0271295, 0.0343565, 0.65433, 0.630972, 0.643468, 0.956491, 0.290683, 0.210351, 0.0684726, 0.879056, 0.538815, 0.73113, 0.321552, 0.789439, 0.0220458, 0.681739, 0.658808, 0.0207763, 0.399012, 0.442289, 0.0081904, 0.965355, 0.968383, 0.505478, 0.734544, 0.456459, 0.946505, 0.5838, 0.0841368, 0.871819, 0.966062, 0.529957, 0.0685184, 0.505448, 0.00104559, 0.519425, 0.0994998, 0.94876, 0.187865, 0.0478558, 0.203239, 0.121373, 0.380502, 0.476708, 0.253908, 0.760487, 0.524501, 0.00378336, 0.424938, 0.396477, 0.988718, 0.675748, 0.897479, 0.393099, 0.192654, 0.24022, 0.818951, 0.452006, 0.470301, 0.511348, 0.152986, 0.816335, 0.827905, 0.900473, 0.526164, 0.522903, 0.597298, 0.855704, 0.0843944, 0.271079, 0.261082, 0.632587, 0.215406, 0.735931, 0.963066, 0.369052, 0.331391, 0.856121, 0.926687, 0.872205, 0.193515, 0.658951, 0.213902, 0.419573, 0.842185, 0.972646, 0.928725, 0.825522, 0.374839, 0.380365, 0.477968, 0.43664, 0.792471, 0.373677, 0.928721, 0.891099, 0.00457181, 0.0151176, 0.155974, 0.231442, 0.888252, 0.593933, 0.567595, 0.742686, 0.450106, 0.46405, 0.972927, 0.700518, 0.746699, 0.672083, 0.7745, 0.258898, 0.492833, 0.345259, 0.117912, 0.813262, 0.287436, 0.766698, 0.397228, 0.71379, 0.912584, 0.623629, 0.998045, 0.335258, 0.305145, 0.922401, 0.233494, 0.402921, 0.793861, 0.317098, 0.854625, 0.932591, 0.448019, 0.188165, 0.358034, 0.628645, 0.592274, 0.728908, 0.664309, 0.236272, 0.782148, 0.406931, 0.464944, 0.687448, 0.641086, 0.653573, 0.698409, 0.722093, 0.0807905, 0.14551, 0.774841, 0.112036, 0.611869, 0.378101, 0.968302, 0.935715, 0.591888, 0.379607, 0.0465486, 0.756377, 0.708986, 0.162911, 0.185895, 0.292757, 0.222649, 0.0541529, 0.66727, 0.148261, 0.798123, 0.53794, 0.791816, 0.339215, 0.750851, 0.928108, 0.202508, 0.174139, 0.879858, 0.785746, 0.37254, 0.0724217, 0.890984, 0.506575, 0.179996, 0.493687, 0.274805, 0.829046, 0.685959, 0.222078, 0.477704, 0.457378, 0.123367, 0.307678, 0.958242, 0.328171, 0.704138, 0.505905, 0.442728, 0.478052, 0.188494, 0.819543, 0.695942, 0.418202, 0.905725, 0.539901, 0.822009, 0.590204, 0.986972, 0.658684, 0.653203, 0.759507, 0.153342, 0.230653, 0.975424, 0.492179, 0.773185, 0.984421, 0.591061, 0.304929, 0.285553, 0.248724, 0.868777, 0.165803, 0.571886, 0.0382151, 0.737778, 0.496819, 0.593074, 0.543781, 0.870749, 0.473773, 0.385994, 0.181543, 0.983263, 0.749487, 0.768961, 0.869838, 0.424798, 0.212381, 0.91479, 0.908922, 0.88557, 0.0849491, 0.332601, 0.994214, 0.0708667, 0.68541, 0.683132, 0.47715, 0.463184, 0.377995, 0.739698, 0.854983, 0.416695, 0.0590477, 0.0277457, 0.972863, 0.56666, 0.982644, 0.0616205, 0.317295, 0.610883, 0.49001, 0.969976, 0.473553, 0.144414, 0.163364, 0.263974, 0.0800355, 0.799327, 0.215528, 0.555437, 0.355901, 0.095414, 0.526159, 0.922489, 0.884723, 0.552319, 0.221846, 0.342006, 0.833543, 0.0961636, 0.854414, 0.225768, 0.345182, 0.467698, 0.2331, 0.752314, 0.782751, 0.838056, 0.897655, 0.394737, 0.709481, 0.431709, 0.640688, 0.374215, 0.415891, 0.177086, 0.849688, 0.495799, 0.413364, 0.869047, 0.302828, 0.384615, 0.777178, 0.999214, 0.972958, 0.295983, 0.653377, 0.977729, 0.91197, 0.676175, 0.567707, 0.747602, 0.942602, 0.00302163, 0.354911, 0.0682867, 0.876355, 0.603126, 0.307834, 0.0845982, 0.140071, 0.575807, 0.137581, 0.130204, 0.730814, 0.488753, 0.1717, 0.535585, 0.18301, 0.974918, 0.355627, 0.240771, 0.32037, 0.36747, 0.969411, 0.097091, 0.634293, 0.281204, 0.857913, 0.684348, 0.657576, 0.267203, 0.0507833, 0.543298, 0.663755, 0.915279, 0.223358, 0.122932, 0.530327, 0.307423, 0.577205, 0.0229419, 0.538232, 0.85606, 0.55781, 0.890343, 0.316649, 0.0318871, 0.000642694, 0.935739, 0.358688, 0.536053, 0.793839, 0.917359, 0.790575, 0.199029, 0.404584, 0.839985, 0.453373, 0.312628, 0.838703, 0.485974, 0.436318, 0.958224, 0.235372, 0.765102, 0.909398, 0.414794, 0.297755, 0.969311, 0.880834, 0.257721, 0.455292, 0.0254474, 0.338577, 0.11561, 0.970854, 0.768593, 0.930545, 0.109438, 0.217873, 0.716096, 0.447394, 0.848354, 0.899927, 0.582273, 0.354063, 0.410766, 0.543359, 0.718875, 0.82564, 0.917152, 0.503246, 0.995162, 0.718001, 0.945581, 0.435387, 0.203257, 0.650221, 0.801618, 0.219279, 0.0379758, 0.61318, 0.404316, 0.496502, 0.203822, 0.799091, 0.696386, 0.540442, 0.494644, 0.612522, 0.820322, 0.642791, 0.74066, 0.762133, 0.71405, 0.424672, 0.0449132, 0.179444, 0.0435775, 0.471613, 0.209735, 0.921058, 0.115136, 0.403898, 0.610307, 0.704204, 0.0167934, 0.460815, 0.262622, 0.856886, 0.587343, 0.801911, 0.183914, 0.185664, 0.0466625, 0.269531, 0.262823, 0.437082, 0.220476, 0.614148, 0.594051, 0.854244, 0.745051, 0.28351, 0.034429, 0.308382, 0.505497, 0.0232656, 0.493913, 0.34385, 0.0164371, 0.769361, 0.0158044, 0.474548, 0.0477609, 0.762977, 0.493348, 0.108953, 0.206988, 0.397475, 0.486607, 0.257376, 0.360176, 0.283457, 0.293826, 0.280395, 0.185887, 0.528896, 0.571727, 0.861589, 0.962315, 0.290201, 0.405897, 0.509888, 0.81183, 0.0305172, 0.914301, 0.381285, 0.522811, 0.342282, 0.337673, 0.378216, 0.153737, 0.853185, 0.566533, 0.731384, 0.0867227, 0.309118, 0.241587, 0.513134, 0.95369, 0.465721, 0.260558, 0.835028, 0.113438, 0.342119, 0.425102, 0.293586, 0.744854, 0.682647, 0.115621, 0.589579, 0.201552, 0.673105, 0.27366, 0.874928, 0.380599, 0.657444, 0.156169, 0.427213, 0.640715, 0.668032, 0.522963, 0.11628, 0.992925, 0.324943, 0.897585, 0.547061, 0.43195, 0.524784, 0.293176, 0.0135561, 0.256651, 0.783486, 0.825907, 0.274228, 0.7481, 0.547017, 0.42749, 0.999425, 0.0670568, 0.00511866, 0.731329, 0.972605, 0.708638, 0.228876, 0.0474373, 0.885787, 0.217058, 0.402035, 0.433456, 0.169397, 0.462544, 0.275932, 0.608642, 0.566552, 0.528844, 0.99498, 0.64558, 0.896656, 0.925517, 0.570801, 0.760168, 0.346442, 0.887954, 0.734144, 0.110051, 0.251261, 0.458466, 0.472215, 0.857166, 0.173167, 0.293479, 0.923124, 0.158581, 0.613774, 0.129953, 0.171725, 0.0886381, 0.460035, 0.0062677, 0.824812, 0.750263, 0.353371, 0.39708, 0.461503, 0.981582, 0.99504, 0.374635, 0.707599, 0.756032, 0.340171, 0.854328, 0.592721, 0.925933, 0.249643, 0.983378, 0.619575, 0.595765, 0.556694, 0.916095, 0.0959277, 0.0916626, 0.219815, 0.473381, 0.401479, 0.360776, 0.203101, 0.279391, 0.68334, 0.285407, 0.289709, 0.786986, 0.00396632, 0.953997, 0.264644, 0.719435, 0.491971, 0.282312, 0.521394, 0.39459, 0.787033, 0.360966, 0.61271, 0.0769574, 0.640273, 0.725047, 0.066325, 0.83893, 0.913585, 0.686559, 0.634176, 0.494692, 0.901601, 0.974235, 0.95669, 0.294684, 0.102111, 0.573252, 0.368238, 0.431652, 0.709033, 0.53859, 0.74427, 0.0197083, 0.654015, 0.740016, 0.290272, 0.891679, 0.472455, 0.345248, 0.128804, 0.152924, 0.618935, 0.736109, 0.158865, 0.415915, 0.364294, 0.414978, 0.388488, 0.0551452, 0.16907, 0.256867, 0.499739, 0.751166, 0.144495, 0.898076, 0.155751, 0.776259, 0.467601, 0.46448, 0.623566, 0.721487, 0.767064, 0.743511, 0.205517, 0.835401, 0.580185, 0.884513, 0.434778, 0.0642024, 0.342705, 0.248814, 0.970129, 0.0709074, 0.322985, 0.539647, 0.912792, 0.339331, 0.368452, 0.179004, 0.407487, 0.716975, 0.851857, 0.51384, 0.490104, 0.0778379, 0.108685, 0.161458, 0.11773, 0.448846, 0.828538, 0.584346, 0.153791, 0.493178, 0.310083, 0.0396996, 0.145746, 0.743706, 0.0227019, 0.394465, 0.0852307, 0.842261, 0.74815, 0.222245, 0.438022, 0.854282, 0.633587, 0.316247, 0.464999, 0.948878, 0.667656, 0.820542, 0.34369, 0.215555, 0.946595, 0.721601, 0.22043, 0.261352, 0.612186, 0.870671, 0.714755, 0.36966, 0.175521, 0.237019, 0.000851959, 0.906143, 0.833103, 0.638396, 0.601893, 0.926978, 0.390023, 0.255927, 0.451647, 0.834361, 0.652625, 0.401701, 0.99337, 0.667848, 0.659488, 0.124219, 0.486276, 0.548035, 0.400169, 0.151954, 0.17069, 0.416361, 0.0465857, 0.759331, 0.628014, 0.108069, 0.755979, 0.0579521, 0.101546, 0.0659961, 0.505494, 0.954371, 0.361013, 0.536745, 0.868043, 0.59628, 0.372852, 0.461984, 0.851089, 0.307691, 0.240107, 0.196247, 0.985667, 0.758297, 0.543791, 0.257839, 0.915945, 0.732074, 0.0586091, 0.629994, 0.445587, 0.434561, 0.360788, 0.89158, 0.871263, 0.496539, 0.965595, 0.532112, 0.17287, 0.774456, 0.702859, 0.980716, 0.72158, 0.750644, 0.520465, 0.689206, 0.00144652, 0.146425, 0.567374, 0.710851, 0.884231, 0.861781, 0.765327, 0.206968, 0.771618, 0.444785, 0.239685, 0.0222081, 0.525997, 0.318925, 0.917438, 0.963911, 0.294071, 0.523117, 0.128301, 0.328972, 0.139177, 0.363254, 0.24417, 0.0443521, 0.973206, 0.848795, 0.606445, 0.404438, 0.618627, 0.660174, 0.414985, 0.67641, 0.577587, 0.190155, 0.270518, 0.393263, 0.294353, 0.88946, 0.221099, 0.85264, 0.101308, 0.659589, 0.439255, 0.781684, 0.572417, 0.212141, 0.716473, 0.582686, 0.403213, 0.794935, 0.309954, 0.067165, 0.796011, 0.516649, 0.0718782, 0.0992761, 0.507827, 0.0684038, 0.666391, 0.884733, 0.183418, 0.490677, 0.661127, 0.617068, 0.85481, 0.398186, 0.476335, 0.388993, 0.167819, 0.509307, 0.0938016, 0.0734592, 0.248536, 0.989461, 0.273933, 0.509542, 0.433594, 0.963002, 0.0171066, 0.504101, 0.524548, 0.233202, 0.782803, 0.820336, 0.767046, 0.259418, 0.651923, 0.956426, 0.216327, 0.47554, 0.273976, 0.616272, 0.955602, 0.735892, 0.995032, 0.000578952, 0.979426, 0.939597, 0.546853, 0.761098, 0.259445, 0.920414, 0.749957, 0.195534, 0.0838292, 0.325312, 0.925283, 0.697592, 0.995375, 0.0368342, 0.398746, 0.554275, 0.395625, 0.387227, 0.103134, 0.982489, 0.851634, 0.697924, 0.0178833, 0.507153, 0.0122278, 0.23025, 0.648683, 0.400998, 0.195654, 0.675248, 0.252502, 0.991594, 0.784603, 0.544893, 0.615543, 0.152306, 0.322445, 0.793154, 0.915662, 0.983772, 0.763142, 0.57135, 0.82519, 0.202078, 0.522719, 0.885207, 0.275491, 0.595655, 0.818549, 0.0524568, 0.250328, 0.880077, 0.322784, 0.572431, 0.215638, 0.878305, 0.383094, 0.74717, 0.145541, 0.0785346, 0.999607, 0.882858, 0.356455, 0.678831, 0.541308, 0.811356, 0.105513, 0.366959, 0.350074, 0.390021, 0.0786344, 0.599412, 0.398567, 0.973736, 0.703613, 0.435228, 0.686653, 0.538779, 0.7215, 0.482741, 0.615045, 0.559884, 0.257104, 0.69479, 0.893027, 0.473533, 0.328841, 0.489412, 0.671038, 0.0651754, 0.490045, 0.0542036, 0.887402, 0.874585, 0.00341829, 0.89898, 0.618432, 0.950719, 0.174197, 0.883646, 0.0387309, 0.928414, 0.786324, 0.0760994, 0.341326, 0.301425, 0.977251, 0.8747, 0.446584, 0.386461, 0.396124, 0.877713, 0.148695, 0.526064, 0.152571, 0.599459, 0.326556, 0.144899, 0.0794079, 0.691528, 0.787482, 0.985864, 0.230712, 0.512173, 0.670602, 0.42419, 0.392844, 0.62434, 0.0253744, 0.245074, 0.940214, 0.962911, 0.771694, 0.975384, 0.434182, 0.188041, 0.884865, 0.126492, 0.439198, 0.27441, 0.975909, 0.20862, 0.908787, 0.0162853, 0.820684, 0.351949, 0.516667, 0.0583989, 0.916234, 0.610965, 0.917754, 0.75513, 0.606843, 0.719708, 0.337624, 0.0756874, 0.729834, 0.231996, 0.992609, 0.682814, 0.531456, 0.909809, 0.0926208, 0.494448, 0.359783, 0.607765, 0.946855, 0.270242, 0.283128, 0.324045, 0.481104, 0.576593, 0.522148, 0.942568, 0.38905, 0.331622, 0.00690282, 0.0136987, 0.986947, 0.455013, 0.191329, 0.244427, 0.216666, 0.983571, 0.742834, 0.76621, 0.739938, 0.158041, 0.453855, 0.814879, 0.172858, 0.726555, 0.383684, 0.560341, 0.714618, 0.277704, 0.830432, 0.166897, 0.825986, 0.860308, 0.360711, 0.419045, 0.637577, 0.495151, 0.20889, 0.586641, 0.0216315, 0.428847, 0.580844, 0.935929, 0.270237, 0.919914, 0.080893, 0.676047, 0.275461, 0.727691, 0.417885, 0.84971, 0.483685, 0.500607, 0.0224612, 0.922548, 0.336375, 0.994413, 0.0786807, 0.989071, 0.164141, 0.816234, 0.752992, 0.844229, 0.333462, 0.871827, 0.845022, 0.408043, 0.174995, 0.402443, 0.247912, 0.760893, 0.958729, 0.390989, 0.323057, 0.599271, 0.919247, 0.944151, 0.875851, 0.663034, 0.85682, 0.528876, 0.722132, 0.298359, 0.443992, 0.549704, 0.491908, 0.121226, 0.589618, 0.756258, 0.423677, 0.274582, 0.14371, 0.339394, 0.575378, 0.203654, 0.0561348, 0.0263823, 0.730298, 0.605367, 0.852316, 0.737382, 0.276795, 0.0408388, 0.366526, 0.949913, 0.240951, 0.902703, 0.685708, 0.138125, 0.414444, 0.457696, 0.752413, 0.430569, 0.904826, 0.698174, 0.708283, 0.538915, 0.230378, 0.52034, 0.714914, 0.108449, 0.648489, 0.612991, 0.471486, 0.267029, 0.680143, 0.933346, 0.940395, 0.652139, 0.0067817, 0.88264, 0.505585, 0.420107, 0.460078, 0.0731711, 0.718455, 0.0584943, 0.894402, 0.998216, 0.183412, 0.995003, 0.190485, 0.410254, 0.378028, 0.194847, 0.897731, 0.126729, 0.223848, 0.405301, 0.48036, 0.28306, 0.731551, 0.217869, 0.17749, 0.284244, 0.280107, 0.940153, 0.806977, 0.0378991, 0.420325, 0.667665, 0.07608, 0.826678, 0.728906, 0.499561, 0.58638, 0.156616, 0.416205, 0.956702, 0.883833, 0.292335, 0.346829, 0.0405572, 0.18781, 0.915004, 0.518918, 0.781497, 0.468654, 0.261209, 0.971299, 0.266337, 0.482536, 0.0581405, 0.311585, 0.995574, 0.535754, 0.421429, 0.457745, 0.114087, 0.0277285, 0.53857, 0.55278, 0.291556, 0.423342, 0.639879, 0.761468, 0.379133, 0.580781, 0.647024, 0.966475, 0.363773, 0.352841, 0.994402, 0.66689, 0.96636, 0.0824195, 0.453254, 0.722302, 0.75103, 0.351102, 0.933818, 0.632064, 0.806556, 0.617393, 0.460752, 0.842769, 0.439656, 0.693975, 0.809105, 0.495239, 0.936034, 0.968222, 0.0669405, 0.651868, 0.553302, 0.856937, 0.586286, 0.398986, 0.843976, 0.381932, 0.204986, 0.454772, 0.635495, 0.788241, 0.579793, 0.784523, 0.711034, 0.120702, 0.865627, 0.850707, 0.236543, 0.488028, 0.499958, 0.193758, 0.950078, 0.463927, 0.553199, 0.682081, 0.103961, 0.787376, 0.170691, 0.94369, 0.412624, 0.821339, 0.490102, 0.1178, 0.318214, 0.279096, 0.273385, 0.234091, 0.0329628, 0.568795, 0.99155, 0.202706, 0.833978, 0.397504, 0.148435, 0.128027, 0.00865821, 0.920576, 0.617613, 0.556491, 0.277433, 0.922053, 0.253828, 0.261701, 0.922062, 0.527259, 0.254352, 0.0882278, 0.269511, 0.829041, 0.981663, 0.543658, 0.734067, 0.0305914, 0.150435, 0.425536, 0.734781, 0.766622, 0.354839, 0.905933, 0.0114792, 0.920432, 0.305249, 0.549855, 0.226855, 0.507703, 0.878084, 0.855841, 0.415489, 0.0967012, 0.0705745, 0.948124, 0.112601, 0.668636, 0.832847, 0.598723, 0.134116, 0.416556, 0.782924, 0.821437, 0.799143, 0.224983, 0.601066, 0.59193, 0.463013, 0.35248, 0.175495, 0.421776, 0.114031, 0.924254, 0.629984, 0.146712, 0.965943, 0.216638, 0.153906, 0.589181, 0.748575, 0.238596, 0.777913, 0.107996, 0.542583, 0.216307, 0.327313, 0.67431, 0.911768, 0.720548, 0.685776, 0.538077, 0.164609, 0.761278, 0.372187, 0.10697, 0.0469801, 0.713064, 0.175164, 0.363707, 0.402513, 0.61414, 0.0445109, 0.352064, 0.196112, 0.332916, 0.277907, 0.926622, 0.501156, 0.193773, 0.654004, 0.877652, 0.136468, 0.0841603, 0.900932, 0.487381, 0.872843, 0.128697, 0.0326086, 0.238987, 0.959209, 0.838067, 0.415121, 0.54681, 0.908723, 0.586238, 0.252606, 0.465217, 0.663561, 0.929449, 0.872382, 0.130099, 0.619685, 0.110434, 0.819971, 0.555038, 0.242786, 0.0414619, 0.691008, 0.167812, 0.425525, 0.204575, 0.802411, 0.193472, 0.00331085, 0.325937, 0.33384, 0.231158, 0.471596, 0.283891, 0.961667, 0.80617, 0.391424, 0.328921, 0.55692, 0.943881, 0.589228, 0.578214, 0.0285151, 0.279367, 0.454733, 0.0858754, 0.791617, 0.265243, 0.244759, 0.0300598, 0.391804, 0.772079, 0.707331, 0.529046, 0.240202, 0.687263, 0.794243, 0.619298, 0.593131, 0.462572, 0.525417, 0.963625, 0.594954, 0.863234, 0.0445225, 0.530425, 0.179533, 0.280874, 0.734682, 0.578481, 0.766435, 0.66733, 0.780478, 0.92408, 0.713012, 0.910428, 0.915277, 0.938228, 0.0791011, 0.172629, 0.510816, 0.571722, 0.700757, 0.51459, 0.146138, 0.572696, 0.543248, 0.196256, 0.210629, 0.307071, 0.113425, 0.0253573, 0.276641, 0.874393, 0.542933, 0.0449171, 0.64137, 0.907429, 0.453603, 0.904712, 0.815911, 0.0278734, 0.659648, 0.523139, 0.674296, 0.392313, 0.0491121, 0.943228, 0.649367, 0.483467, 0.353026, 0.706945, 0.634937, 0.553236, 0.10229, 0.159277, 0.121844, 0.898335, 0.0309602, 0.635292, 0.621516, 0.673105, 0.0247392, 0.544543, 0.957866, 0.730262, 0.789455, 0.456272, 0.752565, 0.538326, 0.241237, 0.0172564, 0.0886359, 0.919896, 0.898584, 0.1415, 0.391898, 0.369437, 0.401157, 0.0449611, 0.380616, 0.86737, 0.5042, 0.0118761, 0.025319, 0.127751, 0.704699, 0.187504, 0.221951, 0.346842, 0.941551, 0.241693, 0.730141, 0.678747, 0.105955, 0.138252, 0.595356, 0.173004, 0.0262196, 0.608228, 0.735464, 0.65623, 0.658145, 0.903512, 0.0619312, 0.307118, 0.523098, 0.38022, 0.182865, 0.205431, 0.554103, 0.689677, 0.687046, 0.398688, 0.220187, 0.987921, 0.806736, 0.18512, 0.100951, 0.0318992, 0.395293, 0.894413, 0.391095, 0.536072, 0.365466, 0.900981, 0.44976, 0.745292, 0.00269919, 0.161792, 0.804464, 0.402976, 0.514242, 0.894882, 0.467178, 0.443345, 0.557251, 0.662993, 0.169558, 0.177831, 0.340122, 0.230403, 0.7209, 0.900017, 0.348684, 0.760011, 0.586478, 0.540105, 0.959653, 0.258187, 0.557428, 0.81617, 0.470539, 0.350093, 0.924689, 0.396413, 0.605822, 0.809761, 0.393909, 0.847537, 0.760716, 0.965154, 0.274259, 0.423478, 0.578721, 0.626461, 0.482197, 0.47902, 0.112662, 0.300021, 0.952363, 0.871049, 0.164691, 0.587641, 0.986854, 0.785386, 0.685767, 0.361704, 0.383185, 0.718012, 0.887985, 0.816043, 0.151235, 0.807262, 0.923368, 0.170587, 0.0453598, 0.354248, 0.818845, 0.433263, 0.459893, 0.101068, 0.332779, 0.312316, 0.575915, 0.346561, 0.57309, 0.651234, 0.787049, 0.875996, 0.781741, 0.281502, 0.851361, 0.700611, 0.788317, 0.162706, 0.274352, 0.204869, 0.538933, 0.612158, 0.986538, 0.293636, 0.253022, 0.983186, 0.69333, 0.4558, 0.57389, 0.541949, 0.242896, 0.315584, 0.128803, 0.619416, 0.73207, 0.49515, 0.414852, 0.221251, 0.32171, 0.450767, 0.613941, 0.0635788, 0.365669, 0.0673486, 0.657975, 0.809637, 0.831814, 0.767476, 0.113712, 0.67349, 0.00897053, 0.0243029, 0.113721, 0.958285, 0.931447, 0.245056, 0.226525, 0.633526, 0.0713207, 0.450486, 0.320382, 0.247415, 0.346027, 0.95667, 0.680298, 0.646958, 0.160396, 0.103668, 0.334639, 0.0190628, 0.180642, 0.996745, 0.848917, 0.45189, 0.828155, 0.695767, 0.222793, 0.506566, 0.869116, 0.530392, 0.367377, 0.268675, 0.864288, 0.485946, 0.43006, 0.641621, 0.332084, 0.976041, 0.460188, 0.860068, 0.435064, 0.115542, 0.30856, 0.187088, 0.0506429, 0.702233, 0.146119, 0.954782, 0.0949941, 0.740496, 0.531023, 0.161372, 0.388384, 0.990412, 0.515755, 0.55135, 0.599431, 0.615233, 0.112216, 0.882591, 0.73124, 0.606734, 0.0380082, 0.54616, 0.361464, 0.261464, 0.608565, 0.204731, 0.622548, 0.260873, 0.829266, 0.681501, 0.472059, 0.248992, 0.109236, 0.643807, 0.559218, 0.499386, 0.0776536, 0.690507, 0.0131072, 0.00128421, 0.859237, 0.953749, 0.492431, 0.983477, 0.451742, 0.282001, 0.84973, 0.412843, 0.815722, 0.571076, 0.928998, 0.00366933, 0.565809, 0.02758, 0.426682, 0.0234673, 0.0315049, 0.507217, 0.112427, 0.223982, 0.77988, 0.0847727, 0.616534, 0.869932, 0.0127468, 0.00376011, 0.212537, 0.266691, 0.583224, 0.191814, 0.892874, 0.219661, 0.847788, 0.596181, 0.326485, 0.131051, 0.502695, 0.1665, 0.191719, 0.430664, 0.770787, 0.80924, 0.415358, 0.362276, 0.992043, 0.212586, 0.10825, 0.480884, 0.578593, 0.283246, 0.498406, 0.846395, 0.995079, 0.196608, 0.204897, 0.576864, 0.484274, 0.722582, 0.439047, 0.707682, 0.961247, 0.257391, 0.409792, 0.970793, 0.369866, 0.813647, 0.260741, 0.578559, 0.98362, 0.760652, 0.748633, 0.729462, 0.70189, 0.595779, 0.896848, 0.153314, 0.304294, 0.392959, 0.580822, 0.196154, 0.0693971, 0.493143, 0.445022, 0.0757038, 0.744543, 0.586538, 0.107481, 0.633554, 0.323894, 0.382525, 0.864584, 0.348254, 0.482544, 0.904677, 0.0130212, 0.708715, 0.548724, 0.89729, 0.269458, 0.720173, 0.651139, 0.443372, 0.851238, 0.932782, 0.588859, 0.411447, 0.0490516, 0.260104, 0.597765, 0.550021, 0.999469, 0.429038, 0.161861, 0.955556, 0.315843, 0.331969, 0.726946, 0.743858, 0.460919, 0.863003, 0.299829, 0.648156, 0.409192, 0.282788, 0.876251, 0.411816, 0.302483, 0.400292, 0.851434, 0.451902, 0.831296, 0.560187, 0.596311, 0.775953, 0.101386, 0.506111, 0.0104792, 0.00547416, 0.252678, 0.17857, 0.78436, 0.657489, 0.0389345, 0.466887, 0.405835, 0.604221, 0.0771849, 0.64215, 0.354854, 0.103845, 0.22058, 0.162561, 0.438923, 0.211571, 0.299419, 0.434235, 0.66236, 0.560895, 0.245476, 0.67336, 0.740411, 0.412394, 0.0717185, 0.128178, 0.27888, 0.236083, 0.364672, 0.0376948, 0.448312, 0.641603, 0.871354, 0.137495, 0.588375, 0.916368, 0.734254, 0.0942038, 0.762401, 0.808585, 0.592534, 0.335929, 0.520638, 0.851411, 0.69917, 0.461936, 0.520234, 0.47287, 0.903557, 0.52273, 0.570016, 0.690135, 0.0334597, 0.544777, 0.936128, 0.750147, 0.427645, 0.287573, 0.273406, 0.569816, 0.890769, 0.926121, 0.916869, 0.639991, 0.554234, 0.73795, 0.727385, 0.380851, 0.220515, 0.786247, 0.980673, 0.0897098, 0.494517, 0.447723, 0.524713, 0.480446, 0.930906, 0.886619, 0.745773, 0.42244, 0.839893, 0.680388, 0.137976, 0.224613, 0.556411, 0.237616, 0.90556, 0.205239, 0.311139, 0.3102, 0.499394, 0.679864, 0.647738, 0.314856, 0.348947, 0.916257, 0.938318, 0.347793, 0.642121, 0.0115018, 0.507953, 0.678153, 0.536422, 0.284329, 0.308285, 0.106443, 0.130605, 0.0707415, 0.207012, 0.569307, 0.744861, 0.895839, 0.763779, 0.856683, 0.251718, 0.0933365, 0.832461, 0.544165, 0.255046, 0.792613, 0.792149, 0.882607, 0.614716, 0.144427, 0.130996, 0.402445, 0.732257, 0.198873, 0.559896, 0.457659, 0.780888, 0.0565166, 0.420061, 0.85289, 0.0339575, 0.851365, 0.915661, 0.734664, 0.313297, 0.0537396, 0.192802, 0.0936432, 0.0149107, 0.599219, 0.823639, 0.71537, 0.703168, 0.578299, 0.419909, 0.527863, 0.266757, 0.178719, 0.00179818, 0.391777, 0.480779, 0.21292, 0.33087, 0.82191, 0.16913, 0.393736, 0.815357, 0.158662, 0.759544, 0.0917913, 0.502571, 0.626879, 0.866529, 0.0145252, 0.750456, 0.347476, 0.308043, 0.112856, 0.243447, 0.0808196, 0.207538, 0.532262, 0.957601, 0.414993, 0.0212746, 0.51757, 0.750652, 0.120918, 0.198828, 0.767442, 0.0509944, 0.310467, 0.867684, 0.143594, 0.269652, 0.166788, 0.0482729, 0.959377, 0.0227016, 0.381065, 0.0466958, 0.706408, 0.822551, 0.351691, 0.758189, 0.916421, 0.335477, 0.568934, 0.0950658, 0.0215652, 0.0393662, 0.859198, 0.828, 0.525979, 0.347294, 0.351906, 0.305357, 0.512158, 0.772087, 0.48377, 0.371976, 0.123643, 0.705157, 0.721921, 0.467902, 0.987688, 0.931075, 0.881334, 0.799335, 0.898644, 0.421681, 0.0452764, 0.769417, 0.652862, 0.919228, 0.327836, 0.915252, 0.542893, 0.995346, 0.464939, 0.385581, 0.755661, 0.0462519, 0.438096, 0.716777, 0.0873773, 0.519387, 0.72837, 0.644616, 0.81641, 0.770924, 0.456952, 0.998386, 0.27846, 0.289119, 0.696656, 0.00923793, 0.845503, 0.260857, 0.699512, 0.0275932, 0.315732, 0.0285291, 0.94531, 0.339671, 0.936523, 0.78267, 0.282767, 0.89977, 0.778833, 0.793924, 0.468636, 0.898903, 0.974987, 0.165838, 0.8451, 0.100429, 0.33006, 0.341665, 0.664152, 0.534129, 0.448803, 0.872411, 0.834794, 0.599272, 0.520897, 0.0577161, 0.0906672, 0.527342, 0.618991, 0.362663, 0.958124, 0.685386, 0.545507, 0.353392, 0.359803, 0.354475, 0.960168, 0.405825, 0.557439, 0.273456, 0.0278656, 0.762911, 0.565401, 0.230271, 0.0322394, 0.188624, 0.593584, 0.964253, 0.802663, 0.282211, 0.0111207, 0.142253, 0.513156, 0.942532, 0.393904, 0.126776, 0.385484, 0.0694841, 0.0850033, 0.745054, 0.0341894, 0.31932, 0.528118, 0.358518, 0.765269, 0.414043, 0.208501, 0.932321, 0.266267, 0.977639, 0.394195, 0.550205, 0.507989, 0.138608, 0.0975725, 0.715032, 0.458535, 0.524329, 0.326665, 0.276661, 0.864643, 0.0214222, 0.664928, 0.115379, 0.710931, 0.618722, 0.841434, 0.38875, 0.394257, 0.949326, 0.653783, 0.0534453, 0.345023, 0.282792, 0.195276, 0.423252, 0.622851, 0.713911, 0.127509, 0.311566, 0.245728, 0.85966, 0.575867, 0.0689418, 0.0952579, 0.586971, 0.243941, 0.0834489, 0.255213, 0.213337, 0.862964, 0.334737, 0.405149, 0.172359, 0.188284, 0.780944, 0.485417, 0.251398, 0.599116, 0.586665, 0.409307, 0.580595, 0.103454, 0.320942, 0.486805, 0.197634, 0.20807, 0.908571, 0.132826, 0.382863, 0.212925, 0.246455, 0.619471, 0.117344, 0.558972, 0.0580035, 0.711549, 0.155809, 0.663877, 0.424317, 0.610468, 0.930087, 0.172949, 0.00517805, 0.582138, 0.195799, 0.861302, 0.598117, 0.451871, 0.448281, 0.158697, 0.000542584, 0.334286, 0.788786, 0.157838, 0.477899, 0.97582, 0.242801, 0.848349, 0.332519, 0.0980363, 0.427908, 0.345233, 0.367508, 0.350688, 0.843337, 0.235817, 0.405097, 0.436821, 0.667814, 0.361552, 0.280638, 0.855458, 0.261004, 0.747798, 0.757861, 0.540364, 0.175219, 0.279659, 0.446355, 0.365813, 0.267402, 0.698932, 0.879136, 0.743106, 0.291694, 0.991212, 0.12792, 0.474301, 0.0353802, 0.311849, 0.707376, 0.0604965, 0.212644, 0.0846226, 0.0892034, 0.161925, 0.436091, 0.436157, 0.482554, 0.370197, 0.91565, 0.799866, 0.649117, 0.465609, 0.434417, 0.258765, 0.185015, 0.480145, 0.174129, 0.928315, 0.25319, 0.401189, 0.802245, 0.780487, 0.494216, 0.336273, 0.767529, 0.768242, 0.152223, 0.0521661, 0.21053, 0.882708, 0.347742, 0.374891, 0.0797832, 0.581046, 0.206673, 0.0560839, 0.595372, 0.136616, 0.357512, 0.0209166, 0.0016705, 0.93445, 0.206748, 0.824873, 0.788357, 0.55925, 0.920285, 0.582929, 0.702822, 0.36813, 0.447606, 0.6106, 0.634253, 0.207762, 0.427202, 0.500577, 0.389809, 0.19959, 0.681723, 0.120753, 0.786173, 0.543604, 0.979328, 0.376943, 0.918816, 0.452252, 0.00499074, 0.635706, 0.35694, 0.867121, 0.791436, 0.880958, 0.680748, 0.831066, 0.293691, 0.887553, 0.187268, 0.715575, 0.844413, 0.637634, 0.73157, 0.859324, 0.407261, 0.737167, 0.422764, 0.923301, 0.649342, 0.170625, 0.366718, 0.942043, 0.238334, 0.470234, 0.884955, 0.060161, 0.181418, 0.646665, 0.341334, 0.108285, 0.698794, 0.235432, 0.880182, 0.787166, 0.0924313, 0.567718, 0.245406, 0.839764, 0.298682, 0.283608, 0.396466, 0.346695, 0.666372, 0.578194, 0.354995, 0.742842, 0.32011, 0.980964, 0.00271253, 0.69714, 0.910403, 0.920273, 0.485676, 0.697684, 0.0695238, 0.687605, 0.203331, 0.435267, 0.257599, 0.860991, 0.882308, 0.795157, 0.698597, 0.322865, 0.903791, 0.684318, 0.664499, 0.669392, 0.780595, 0.557328, 0.293327, 0.265196, 0.289732, 0.326938, 0.92325, 0.551842, 0.426428, 0.305414, 0.755801, 0.824476, 0.442615, 0.0180313, 0.35833, 0.454835, 0.539032, 0.274815, 0.764296, 0.564245, 0.325704, 0.508451, 0.0271415, 0.581645, 0.694036, 0.641851, 0.834904, 0.150452, 0.526213, 0.0506494, 0.513135, 0.12368, 0.71454, 0.438677, 0.465832, 0.786114, 0.838098, 0.751306, 0.188757, 0.146228, 0.743609, 0.245643, 0.97717, 0.412334, 0.144464, 0.529573, 0.455666, 0.400118, 0.253627, 0.581757, 0.542677, 0.790956, 0.586529, 0.0948754, 0.718301, 0.668166, 0.327256, 0.563516, 0.48726, 0.489141, 0.569486, 0.262042, 0.309541, 0.626612, 0.745274, 0.751799, 0.529559, 0.455644, 0.748598, 0.946315, 0.8265, 0.414619, 0.984799, 0.161437, 0.821717, 0.28339, 0.72679, 0.154853, 0.871201, 0.0711385, 0.102505, 0.726382, 0.491184, 0.573875, 0.615597, 0.356484, 0.909637, 0.486158, 0.810938, 0.564761, 0.460728, 0.852867, 0.366667, 0.41533, 0.0148784, 0.972416, 0.534227, 0.798354, 0.795607, 0.976434, 0.355941, 0.910177, 0.159594, 0.0438041, 0.654905, 0.395871, 0.115006, 0.540252, 0.577534, 0.962924, 0.945156, 0.360973, 0.0926194, 0.24857, 0.795894, 0.13686, 0.454092, 0.865174, 0.98353, 0.635807, 0.934014, 0.5153, 0.517563, 0.926611, 0.454739, 0.0372238, 0.360538, 0.91321, 0.799502, 0.271155, 0.221958, 0.548051, 0.907589, 0.794299, 0.961704, 0.673944, 0.0175033, 0.492384, 0.918665, 0.637901, 0.199075, 0.726617, 0.311334, 0.879681, 0.987701, 0.719458, 0.393797, 0.855523, 0.826618, 0.689592, 0.688999, 0.697588, 0.780641, 0.0636748, 0.140083, 0.958032, 0.892292, 0.577225, 0.519504, 0.0735575, 0.74484, 0.964864, 0.219465, 0.868061, 0.549526, 0.634009, 0.0171299, 0.257061, 0.919109, 0.302247, 0.664563, 0.797629, 0.464015, 0.310099, 0.019177, 0.357502, 0.620309, 0.239394, 0.917188, 0.785023, 0.131056, 0.727617, 0.530709, 0.578615, 0.201885, 0.278196, 0.518636, 0.235054, 0.522682, 0.19339, 0.0846954, 0.4492, 0.323331, 0.707992, 0.523493, 0.940948, 0.723369, 0.390074, 0.774236, 0.433164, 0.0979982, 0.0709218, 0.103836, 0.422072, 0.986132, 0.169095, 0.0657681, 0.259039, 0.396461, 0.854803, 0.0914236, 0.557688, 0.527779, 0.371118, 0.0822192, 0.302785, 0.655894, 0.88463, 0.375363, 0.52466, 0.466245, 0.38354, 0.455769, 0.901777, 0.0759798, 0.876514, 0.4332, 0.566397, 0.765223, 0.640635, 0.102071, 0.799557, 0.939988, 0.195413, 0.0748973, 0.00976407, 0.51755, 0.508755, 0.919853, 0.935985, 0.563762, 0.93531, 0.426331, 0.77547, 0.812972, 0.191429, 0.16026, 0.56597, 0.435486, 0.768526, 0.101867, 0.406524, 0.823415, 0.222716, 0.154984, 0.116542, 0.763154, 0.771641, 0.902853, 0.543089, 0.352641, 0.258023, 0.647828, 0.140055, 0.0542513, 0.0372325, 0.649413, 0.978192, 0.832517, 0.896181, 0.472388, 0.443075, 0.140154, 0.0761419, 0.666329, 0.950345, 0.823171, 0.370979, 0.65291, 0.408132, 0.948844, 0.155691, 0.914282, 0.301515, 0.461811, 0.795944, 0.239535, 0.664828, 0.6233, 0.145132, 0.37435, 0.886511, 0.317883, 0.518877, 0.518151, 0.536818, 0.643334, 0.0808949, 0.068941, 0.138229, 0.729329, 0.853963, 0.746799, 0.711397, 0.570258, 0.83942, 0.905887, 0.400872, 0.345677, 0.914797, 0.332045, 0.292297, 0.940083, 0.563066, 0.617197, 0.569981, 0.25026, 0.0677223, 0.867086, 0.961472, 0.660134, 0.0674513, 0.525426, 0.632111, 0.602913, 0.506025, 0.554512, 0.0680667, 0.918949, 0.358327, 0.578486, 0.348154, 0.390886, 0.755981, 0.715591, 0.33099, 0.385277, 0.89233, 0.200971, 0.252035, 0.200555, 0.0110863, 0.862266, 0.0333783, 0.211591, 0.23431, 0.821902, 0.540553, 0.185168, 0.420759, 0.631121, 0.900911, 0.190397, 0.52558, 0.538736, 0.971211, 0.120859, 0.135918, 0.839331, 0.273747, 0.759608, 0.394084, 0.436614, 0.976545, 0.962145, 0.94815, 0.600011, 0.0810576, 0.582324, 0.912166, 0.096998, 0.536383, 0.297478, 0.200268, 0.205488, 0.200496, 0.031067, 0.834057, 0.667748, 0.874206, 0.0351124, 0.0630484, 0.860482, 0.26558, 0.879304, 0.617219, 0.0774375, 0.196608, 0.934267, 0.75751, 0.296633, 0.0225115, 0.189789, 0.214884, 0.449621, 0.0468889, 0.155538, 0.431228, 0.958695, 0.741599, 0.213075, 0.209571, 0.592277, 0.766705, 0.72382, 0.708333, 0.960869, 0.810145, 0.379104, 0.339159, 0.674899, 0.188226, 0.952889, 0.595141, 0.11762, 0.380935, 0.608181, 0.201387, 0.884799, 0.483553, 0.916809, 0.735928, 0.281482, 0.00455585, 0.462535, 0.380936, 0.0936541, 0.487765, 0.675265, 0.510321, 0.921262, 0.500261, 0.782013, 0.310351, 0.128784, 0.437556, 0.28149, 0.841472, 0.758188, 0.622538, 0.850795, 0.372449, 0.0142433, 0.703519, 0.530403, 0.246252, 0.915198, 0.0578612, 0.816753, 0.985853, 0.286485, 0.690584, 0.419702, 0.403298, 0.22708, 0.446561, 0.954207, 0.252224, 0.00337301, 0.338911, 0.92823, 0.102058, 0.702199, 0.773641, 0.413213, 0.175673, 0.502506, 0.745158, 0.485363, 0.627887, 0.983253, 0.535411, 0.0960549, 0.806528, 0.727513, 0.204301, 0.561324, 0.593839, 0.204487, 0.0195615, 0.584486, 0.809775, 0.0141238, 0.484117, 0.200835, 0.215795, 0.243407, 0.365022, 0.689046, 0.217585, 0.07781, 0.852946, 0.00244379, 0.191489, 0.516965, 0.064143, 0.848968, 0.220109, 0.402326, 0.4353, 0.318334, 0.859112, 0.197103, 0.120497, 0.577541, 0.935029, 0.347344, 0.678479, 0.0344002, 0.646592, 0.799282, 0.650723, 0.0183968, 0.371303, 0.462043, 0.518063, 0.902786, 0.616112, 0.524261, 0.0796001, 0.709403, 0.265955, 0.0488311, 0.615803, 0.509311, 0.483271, 0.620407, 0.447288, 0.908265, 0.223942, 0.576624, 0.3599, 0.505111, 0.61968, 0.883126, 0.655734, 0.0432178, 0.997728, 0.64409, 0.849678, 0.734431, 0.745452, 0.62498, 0.463477, 0.495921, 0.794164, 0.853491, 0.311384, 0.904331, 0.129272, 0.287849, 0.448194, 0.193666, 0.322777, 0.251713, 0.881793, 0.0970413, 0.233054, 0.347529, 0.463414, 0.611134, 0.878204, 0.157578, 0.188353, 0.456962, 0.747669, 0.869698, 0.564151, 0.0328977, 0.519942, 0.831659, 0.799956, 0.264594, 0.277307, 0.585456, 0.364483, 0.0277267, 0.527506, 0.984514, 0.195863, 0.546274, 0.452644, 0.709574, 0.94972, 0.420405, 0.617003, 0.535575, 0.145515, 0.251569, 0.579092, 0.979789, 0.582783, 0.683675, 0.547612, 0.873507, 0.832615, 0.0464497, 0.221379, 0.207657, 0.85259, 0.373575, 0.249047, 0.960638, 0.54802, 0.610878, 0.409865, 0.476497, 0.413089, 0.313342, 0.480978, 0.205176, 0.0268723, 0.844218, 0.418675, 0.922538, 0.244907, 0.958208, 0.961732, 0.672194, 0.092853, 0.311077, 0.0946453, 0.363624, 0.474164, 0.603574, 0.833505, 0.533449, 0.256537, 0.225558, 0.226048, 0.538559, 0.638723, 0.805419, 0.14832, 0.897226, 0.373224, 0.369662, 0.484443, 0.115926, 0.685181, 0.967777, 0.479863, 0.517747, 0.0585734, 0.506837, 0.275404, 0.866345, 0.502318, 0.0955882, 0.335369, 0.745772, 0.63887, 0.736681, 0.638938, 0.83989, 0.676867, 0.243325, 0.143963, 0.493835, 0.76239, 0.502014, 0.389397, 0.84351, 0.617143, 0.171437, 0.865222, 0.310702, 0.990583, 0.663565, 0.59032, 0.887284, 0.0442938, 0.232086, 0.845838, 0.623216, 0.134591, 0.245323, 0.202399, 0.730686, 0.29686, 0.73742, 0.182036, 0.891799, 0.996495, 0.991935, 0.963694, 0.61934, 0.87183, 0.942809, 0.552179, 0.0926725, 0.169732, 0.722361, 0.495521, 0.254044, 0.0840458, 0.889859, 0.923809, 0.134491, 0.486075, 0.102635, 0.499984, 0.992553, 0.0564187, 0.797292, 0.241622, 0.0486058, 0.801535, 0.31539, 0.0143841, 0.362139, 0.980739, 0.300859, 0.391281, 0.167751, 0.7215, 0.427135, 0.351335, 0.933799, 0.140761, 0.7363, 0.465188, 0.828012, 0.649004, 0.520969, 0.574344, 0.501355, 0.952165, 0.93164, 0.509089, 0.526875, 0.71231, 0.718113, 0.670296, 0.280508, 0.434312, 0.946176, 0.62393, 0.398959, 0.698747, 0.0204612, 0.79547, 0.652899, 0.455246, 0.486616, 0.867367, 0.493178, 0.355426, 0.210206, 0.228179, 0.651399, 0.0161561, 0.140239, 0.155556, 0.574745, 0.999008, 0.86796, 0.860705, 0.246552, 0.503608, 0.445886, 0.0580185, 0.800574, 0.437075, 0.573912, 0.0424352, 0.604857, 0.264873, 0.34638, 0.435088, 0.316541, 0.161882, 0.426731, 0.802425, 0.499536, 0.328287, 0.133744, 0.42007, 0.720955, 0.0136993, 0.647566, 0.973718, 0.901711, 0.175931, 0.878922, 0.93672, 0.133331, 0.79393, 0.0764088, 0.916768, 0.260156, 0.666782, 0.321252, 0.233666, 0.186357, 0.304028, 0.707395, 0.178687, 0.0232966, 0.922896, 0.526882, 0.81221, 0.945092, 0.138419, 0.0952502, 0.625846, 0.213548, 0.422367, 0.602847, 0.231354, 0.538758, 0.797799, 0.654113, 0.287098, 0.276612, 0.659605, 0.532692, 0.514879, 0.525104, 0.242574, 0.442326, 0.822493, 0.107437, 0.280184, 0.37356, 0.309485, 0.511257, 0.761947, 0.557022, 0.520128, 0.694639, 0.110733, 0.137554, 0.338047, 0.999134, 0.27477, 0.916114, 0.770727, 0.0976081, 0.676984, 0.825778, 0.759363, 0.729444, 0.0242647, 0.0258947, 0.154187, 0.47188, 0.0445769, 0.0121177, 0.843762, 0.291093, 0.309643, 0.485361, 0.198252, 0.572985, 0.033286, 0.739473, 0.545352, 0.211468, 0.858458, 0.405647, 0.258037, 0.912533, 0.61541, 0.567429, 0.93901, 0.704859, 0.0118469, 0.703613, 0.517348, 0.540871, 0.729027, 0.850876, 0.440293, 0.553048, 0.61358, 0.00979307, 0.316454, 0.720052, 0.503127, 0.47962, 0.421531, 0.0151173, 0.438838, 0.197708, 0.780987, 0.812092, 0.382447, 0.424093, 0.385472, 0.550044, 0.656086, 0.268404, 0.851007, 0.310699, 0.937935, 0.870147, 0.80102, 0.961378, 0.696737, 0.38453, 0.800148, 0.16592, 0.884815, 0.413447, 0.617014, 0.524054, 0.739236, 0.666972, 0.710395, 0.687772, 0.859121, 0.388768, 0.718233, 0.388154, 0.476246, 0.944478, 0.970432, 0.691643, 0.464942, 0.0394952, 0.927023, 0.410307, 0.629788, 0.223931, 0.0297518, 0.933033, 0.163113, 0.336661, 0.32108, 0.359461, 0.947965, 0.872167, 0.322335, 0.764382, 0.750062, 0.225013, 0.0805743, 0.534038, 0.0233317, 0.158118, 0.805001, 0.296921, 0.788203, 0.729876, 0.951557, 0.303947, 0.251502, 0.494804, 0.644525, 0.936817, 0.324311, 0.637434, 0.565348, 0.0717788, 0.0867897, 0.737313, 0.402824, 0.0963357, 0.106229, 0.94528, 0.764048, 0.247693, 0.443471, 0.940045, 0.493419, 0.735704, 0.944935, 0.991131, 0.062571, 0.0335439, 0.0746373, 0.623001, 0.809056, 0.503777, 0.446029, 0.933175, 0.965526, 0.063913, 0.673178, 0.59794, 0.0840257, 0.25734, 0.908005, 0.583821, 0.752881, 0.472197, 0.967097, 0.264547, 0.0486276, 0.173953, 0.338983, 0.706356, 0.884443, 0.569925, 0.287352, 0.237714, 0.630705, 0.035348, 0.908494, 0.315352, 0.273475, 0.277392, 0.25668, 0.368349, 0.519099, 0.0555855, 0.633588, 0.882294, 0.863841, 0.298166, 0.799546, 0.846736, 0.511809, 0.599913, 0.473183, 0.481996, 0.877521, 0.0442833, 0.940653, 0.369215, 0.818309, 0.73849, 0.634953, 0.712853, 0.26148, 0.0692014, 0.831899, 0.685415, 0.283157, 0.736262, 0.67264, 0.190786, 0.0676505, 0.707078, 0.581308, 0.67221, 0.11386, 0.457312, 0.392375, 0.439219, 0.769211, 0.561258, 0.866343, 0.397924, 0.523627, 0.0617751, 0.831688, 0.512164, 0.821152, 0.502621, 0.0959835, 0.555967, 0.042713, 0.0924852, 0.508336, 0.473067, 0.451851, 0.992751, 0.641368, 0.299594, 0.320116, 0.454368, 0.436705, 0.601497, 0.135846, 0.484692, 0.284487, 0.540245, 0.190384, 0.54509, 0.143587, 0.629552, 0.324553, 0.595499, 0.844717, 0.201874, 0.814576, 0.197463, 0.986802, 0.148523, 0.567081, 0.136555, 0.780773, 0.204797, 0.118722, 0.881193, 0.432444, 0.280083, 0.0217249, 0.54529, 0.503856, 0.89004, 0.470679, 0.676714, 0.457365, 0.255248, 0.923055, 0.0443761, 0.552984, 0.142928, 0.69265, 0.0879481, 0.295874, 0.567054, 0.0142062, 0.929392, 0.908689, 0.448321, 0.651092, 0.0557385, 0.101893, 0.399297, 0.48606, 0.527425, 0.00356473, 0.616862, 0.211265, 0.984485, 0.567244, 0.317252, 0.751636, 0.987065, 0.152741, 0.0829415, 0.00948974, 0.0764865, 0.859403, 0.667688, 0.408172, 0.960244, 0.804079, 0.938819, 0.21643, 0.122151, 0.416281, 0.707288, 0.297755, 0.370217, 0.0417245, 0.56875, 0.115263, 0.633719, 0.459242, 0.319668, 0.101516, 0.535005, 0.687743, 0.0977507, 0.0834855, 0.251136, 0.233068, 0.596992, 0.833053, 0.584381, 0.519565, 0.192852, 0.592033, 0.382766, 0.0436281, 0.923847, 0.921051, 0.590964, 0.619312, 0.568616, 0.613199, 0.467516, 0.991839, 0.944851, 0.991241, 0.134093, 0.950621, 0.430953, 0.441657, 0.46819, 0.393092, 0.0628691, 0.0521101, 0.452955, 0.189459, 0.779002, 0.9959, 0.26145, 0.673013, 0.794916, 0.103836, 0.465761, 0.85105, 0.985666, 0.836831, 0.516769, 0.716464, 0.19357, 0.242615, 0.382491, 0.313633, 0.848997, 0.859264, 0.727595, 0.29825, 0.899589, 0.584103, 0.926454, 0.690807, 0.439755, 0.340988, 0.258296, 0.931776, 0.0256324, 0.316941, 0.757241, 0.844794, 0.694142, 0.881089, 0.874026, 0.690994, 0.789913, 0.00531765, 0.940563, 0.723211, 0.12176, 0.131234, 0.731573, 0.495006, 0.0516717, 0.604952, 0.369937, 0.843237, 0.920012, 0.628829, 0.420778, 0.973326, 0.15281, 0.00139745, 0.853154, 0.851795, 0.741038, 0.117861, 0.882016, 0.509814, 0.516959, 0.907246, 0.697692, 0.668194, 0.846559, 0.576253, 0.922104, 0.359445, 0.0611456, 0.206406, 0.105926, 0.154387, 0.585589, 0.437756, 0.3165, 0.587017, 0.0847536, 0.886007, 0.119009, 0.979246, 0.213609, 0.465853, 0.272178, 0.439809, 0.988716, 0.488015, 0.417512, 0.518881, 0.288539, 0.903198, 0.787729, 0.454393, 0.262874, 0.894397, 0.153654, 0.400079, 0.643713, 0.123454, 0.844473, 0.372163, 0.877639, 0.833295, 0.549217, 0.685982, 0.127869, 0.95698, 0.779552, 0.128356, 0.259681, 0.0754659, 0.0835692, 0.537261, 0.817103, 0.614817, 0.0325292, 0.0207384, 0.827517, 0.808246, 0.26346, 0.803749, 0.15587, 0.74223, 0.634925, 0.773878, 0.131699, 0.450653, 0.981115, 0.00958384, 0.287704, 0.443703, 0.216868, 0.792684, 0.907434, 0.378667, 0.165051, 0.748475, 0.136135, 0.659939, 0.958408, 0.744024, 0.725204, 0.632248, 0.299641, 0.922975, 0.919256, 0.230537, 0.691596, 0.249633, 0.0888904, 0.892798, 0.710505, 0.943026, 0.386294, 0.600829, 0.676397, 0.132339, 0.0813851, 0.785415, 0.732045, 0.721045, 0.343798, 0.718697, 0.441188, 0.322981, 0.121907, 0.806755, 0.597458, 0.64584, 0.717871, 0.209593, 0.598041, 0.350003, 0.445812, 0.613345, 0.437672, 0.382453, 0.131631, 0.66655, 0.401186, 0.0837397, 0.446507, 0.726394, 0.681641, 0.586691, 0.248831, 0.449398, 0.930469, 0.0666129, 0.780538, 0.589497, 0.840138, 0.564973, 0.0612079, 0.821556, 0.554189, 0.799224, 0.736642, 0.269208, 0.292914, 0.596551, 0.729427, 0.49402, 0.773192, 0.219683, 0.824812, 0.840631, 0.0501591, 0.134543, 0.583413, 0.791894, 0.986005, 0.862345, 0.915087, 0.369461, 0.588393, 0.512686, 0.678814, 0.458227, 0.806283, 0.701131, 0.571952, 0.702168, 0.202198, 0.519259, 0.172496, 0.599499, 0.147234, 0.02575, 0.97035, 0.898833, 0.874705, 0.79621, 0.980984, 0.656772, 0.751776, 0.251058, 0.763427, 0.224289, 0.295172, 0.00522108, 0.762511, 0.787555, 0.00467026, 0.128477, 0.701758, 0.978284, 0.00768466, 0.865646, 0.841283, 0.524041, 0.375165, 0.674822, 0.0847203, 0.0682938, 0.969406, 0.195572, 0.551269, 0.985086, 0.622014, 0.424878, 0.097507, 0.140416, 0.838827, 0.485533, 0.737476, 0.735698, 0.0454709, 0.311185, 0.0898184, 0.795013, 0.529925, 0.474088, 0.344048, 0.943813, 0.527794, 0.324931, 0.961191, 0.267476, 0.603167, 0.178598, 0.7503, 0.987956, 0.555531, 0.215508, 0.382784, 0.656302, 0.993506, 0.842078, 0.372415, 0.266801, 0.156394, 0.831571, 0.137432, 0.624203, 0.835836, 0.606334, 0.838089, 0.694928, 0.856823, 0.180364, 0.338147, 0.759741, 0.206241, 0.829618, 0.9061, 0.597172, 0.11924, 0.358647, 0.142011, 0.945087, 0.474836, 0.648693, 0.86104, 0.47665, 0.757002, 0.494125, 0.868545, 0.306198, 0.639508, 0.704826, 0.995186, 0.175675, 0.0806422, 0.138095, 0.0881491, 0.428765, 0.451255, 0.918603, 0.262482, 0.2982, 0.537161, 0.838592, 0.235128, 0.0353797, 0.884897, 0.940754, 0.79639, 0.946324, 0.569961, 0.303534, 0.0571423, 0.278763, 0.40931, 0.675236, 0.728678, 0.247318, 0.198361, 0.892861, 0.188737, 0.340076, 0.194367, 0.0645981, 0.873336, 0.00749971, 0.569247, 0.633541, 0.959514, 0.089256, 0.203743, 0.228359, 0.584494, 0.550622, 0.897252, 0.383225, 0.45936, 0.866613, 0.125279, 0.120562, 0.0766141, 0.816719, 0.0671213, 0.440454, 0.45157, 0.0530044, 0.413946, 0.0535851, 0.0575507, 0.45907, 0.923819, 0.491762, 0.0633612, 0.468905, 0.884023, 0.0176603, 0.736134, 0.205456, 0.646979, 0.390688, 0.112598, 0.275741, 0.0894479, 0.51998, 0.810482, 0.403162, 0.555734, 0.0242944, 0.110794, 0.829616, 0.683351, 0.391986, 0.502472, 0.0781493, 0.15912, 0.478358, 0.570563, 0.0540207, 0.967255, 0.342681, 0.274707, 0.333183, 0.43953, 0.535844, 0.783524, 0.0546468, 0.30066, 0.066254, 0.483775, 0.161847, 0.782927, 0.0172688, 0.0646114, 0.669899, 0.714202, 0.679073, 0.0763925, 0.816612, 0.201151, 0.571907, 0.321925, 0.37268, 0.299367, 0.126555, 0.934309, 0.903748, 0.444695, 0.564585, 0.64596, 0.506742, 0.202397, 0.893715, 0.388493, 0.2197, 0.143169, 0.699541, 0.701281, 0.432727, 0.955981, 0.0870339, 0.489724, 0.886849, 0.473284, 0.824864, 0.941409, 0.70849, 0.950999, 0.456397, 0.084792, 0.59477, 0.326086, 0.244944, 0.984625, 0.424332, 0.903711, 0.721101, 0.0111986, 0.305794, 0.311496, 0.515195, 0.239026, 0.536631, 0.366645, 0.394777, 0.3873, 0.328341, 0.609614, 0.389689, 0.391797, 0.434262, 0.556263, 0.287584, 0.188432, 0.956155, 0.304082, 0.503736, 0.915129, 0.954323, 0.412722, 0.506281, 0.194787, 0.490927, 0.264479, 0.254388, 0.849598, 0.801772, 0.620258, 0.0071051, 0.454038, 0.0346459, 0.57328, 0.549966, 0.162462, 0.229432, 0.563818, 0.431203, 0.451108, 0.239087, 0.0223499, 0.767006, 0.941499, 0.909868, 0.4759, 0.289608, 0.382406, 0.69978, 0.324804, 0.829982, 0.57533, 0.581939, 0.0393368, 0.579975, 0.639501, 0.285916, 0.518992, 0.443243, 0.951329, 0.569127, 0.97862, 0.386882, 0.195255, 0.181794, 0.809233, 0.560124, 0.668163, 0.513488, 0.0876116, 0.943987, 0.436513, 0.743975, 0.383052, 0.277199, 0.598564, 0.138906, 0.31616, 0.560288, 0.463073, 0.731662, 0.869975, 0.123616, 0.634357, 0.354597, 0.120499, 0.383482, 0.846482, 0.967433, 0.108526, 0.584536, 0.555393, 0.73107, 0.749304, 0.688501, 0.400935, 0.772408, 0.104643, 0.756827, 0.94838, 0.756186, 0.362544, 0.627231, 0.848773, 0.432341, 0.65668, 0.735194, 0.850105, 0.062755, 0.146389, 0.317991, 0.0737026, 0.67427, 0.485489, 0.270793, 0.787026, 0.0720512, 0.987549, 0.0569218, 0.255272, 0.68926, 0.608096, 0.633461, 0.648547, 0.769407, 0.830182, 0.322531, 0.144775, 0.779242, 0.845269, 0.0243326, 0.0405723, 0.873524, 0.753952, 0.972986, 0.510371, 0.476096, 0.159424, 0.701166, 0.376896, 0.858272, 0.565385, 0.250291, 0.610069, 0.620679, 0.458904, 0.720255, 0.616195, 0.227057, 0.0542266, 0.848246, 0.70316, 0.535677, 0.900922, 0.920894, 0.146911, 0.3533, 0.561009, 0.284157, 0.174043, 0.656678, 0.196008, 0.199449, 0.0442609, 0.680369, 0.428183, 0.262736, 0.743547, 0.488797, 0.981806, 0.64651, 0.838369, 0.870618, 0.980026, 0.0422801, 0.422279, 0.988454, 0.667217, 0.749536, 0.0917313, 0.902354, 0.0681661, 0.0282509, 0.19348, 0.377296, 0.932179, 0.576296, 0.735157, 0.795614, 0.136861, 0.703529, 0.132033, 0.950966, 0.307774, 0.363944, 0.115775, 0.281715, 0.937972, 0.143547, 0.930937, 0.438592, 0.489861, 0.8832, 0.123867, 0.381358, 0.391431, 0.0780478, 0.509486, 0.425146, 0.303328, 0.133067, 0.198759, 0.692799, 0.0851842, 0.897908, 0.741917, 0.512414, 0.345901, 0.0204231, 0.272198, 0.727947, 0.626249, 0.173805, 0.652727, 0.101894, 0.750636, 0.49131, 0.6225, 0.748916, 0.134644, 0.80854, 0.496879, 0.173762, 0.370642, 0.534129, 0.163196, 0.185132, 0.442204, 0.0942393, 0.167814, 0.214614, 0.435669, 0.551141, 0.146443, 0.590189, 0.0431735, 0.233042, 0.438819, 0.306157, 0.130507, 0.824651, 0.913926, 0.0630085, 0.513333, 0.316679, 0.0334001, 0.961398, 0.424998, 0.871981, 0.0704637, 0.798644, 0.853883, 0.957443, 0.0902814, 0.531197, 0.622826, 0.969367, 0.566445, 0.329525, 0.656435, 0.937621, 0.160278, 0.5453, 0.724801, 0.0263785, 0.651574, 0.780021, 0.62934, 0.347304, 0.495063, 0.162784, 0.364913, 0.163988, 0.305767, 0.70278, 0.407849, 0.0919207, 0.317498, 0.73957, 0.199775, 0.834567, 0.101468, 0.400429, 0.818875, 0.950692, 0.285147, 0.143908, 0.818654, 0.683222, 0.829862, 0.892098, 0.716362, 0.414078, 0.18835, 0.992772, 0.511298, 0.220548, 0.0635921, 0.297934, 0.807925, 0.607986, 0.0877825, 0.132301, 0.728175, 0.812418, 0.898057, 0.653904, 0.960266, 0.320779, 0.445331, 0.776218, 0.409451, 0.359341, 0.524256, 0.028275, 0.977368, 0.301357, 0.696436, 0.0459401, 0.76983, 0.601018, 0.340728, 0.408712, 0.568339, 0.407496, 0.58856, 0.0757559, 0.35058, 0.00837173, 0.00562885, 0.975881, 0.810547, 0.488337, 0.312578, 0.98549, 0.851896, 0.322023, 0.34399, 0.23832, 0.258811, 0.823943, 0.0716831, 0.988663, 0.0460019, 0.275174, 0.810075, 0.619748, 0.0901874, 0.033642, 0.566351, 0.712041, 0.313472, 0.0923219, 0.986056, 0.673149, 0.878125, 0.140528, 0.166762, 0.994571, 0.894365, 0.774653, 0.417661, 0.666718, 0.898203, 0.227722, 0.598469, 0.928879, 0.379994, 0.984697, 0.231839, 0.310736, 0.341496, 0.459493, 0.302215, 0.641954, 0.898099, 0.0209997, 0.737949, 0.0626889, 0.831237, 0.0313842, 0.180131, 0.601948, 0.797174, 0.0884408, 0.642245, 0.459643, 0.893664, 0.40371, 0.658711, 0.881245, 0.591686, 0.878759, 0.755583, 0.204975, 0.306776, 0.73216, 0.791955, 0.514798, 0.337436, 0.783948, 0.0557404, 0.452276, 0.84683, 0.145528, 0.220488, 0.241047, 0.557556, 0.415007, 0.242669, 0.360846, 0.522166, 0.292326, 0.587668, 0.638592, 0.436138, 0.110713, 0.196891, 0.591566, 0.283637, 0.0102975, 0.387574, 0.479553, 0.741745, 0.678875, 0.937641, 0.609497, 0.32044, 0.00940666, 0.127061, 0.194438, 0.905781, 0.860754, 0.445213, 0.854932, 0.729661, 0.136819, 0.0108726, 0.980453, 0.0270293, 0.139369, 0.793899, 0.642188, 0.86674, 0.279443, 0.79022, 0.867757, 0.0843141, 0.995867, 0.537412, 0.753968, 0.607794, 0.944857, 0.190186, 0.0997002, 0.984877, 0.0701893, 0.576752, 0.0580847, 0.0867438, 0.444684, 0.916767, 0.213647, 0.749927, 0.738357, 0.845744, 0.362546, 0.648168, 0.398376, 0.949009, 0.863117, 0.027939, 0.85657, 0.591879, 0.193953, 0.502273, 0.599404, 0.375159, 0.512255, 0.267081, 0.511835, 0.955413, 0.0697231, 0.724459, 0.352424, 0.947933, 0.664158, 0.698859, 0.803828, 0.504431, 0.751099, 0.590227, 0.280099, 0.584827, 0.0923306, 0.203134, 0.702428, 0.218551, 0.319046, 0.412581, 0.398295, 0.378101, 0.241449, 0.45341, 0.670537, 0.602427, 0.378714, 0.790711, 0.374994, 0.462503, 0.0953595, 0.548471, 0.0991406, 0.949991, 0.822281, 0.531369, 0.324175, 0.730305, 0.736284, 0.138818, 0.0894012, 0.311242, 0.840171, 0.119094, 0.801153, 0.569853, 0.590313, 0.758391, 0.266365, 0.395676, 0.236105, 0.583137, 0.442199, 0.426346, 0.268307, 0.336632, 0.793279, 0.468524, 0.52161, 0.496028, 0.689862, 0.434917, 0.476048, 0.703495, 0.527915, 0.701419, 0.640665, 0.471923, 0.509542, 0.801378, 0.937335, 0.378823, 0.876191, 0.870085, 0.0750793, 0.749023, 0.135718, 0.0954842, 0.990025, 0.114978, 0.232678, 0.927843, 0.107381, 0.0494472, 0.810999, 0.610688, 0.500701, 0.281554, 0.146244, 0.0468897, 0.238109, 0.622833, 0.668599, 0.338654, 0.704283, 0.726535, 0.00568137, 0.0550614, 0.375877, 0.629756, 0.208639, 0.197606, 0.322134, 0.102052, 0.141556, 0.865068, 0.609663, 0.618753, 0.5473, 0.75936, 0.799073, 0.951893, 0.223308, 0.385897, 0.709781, 0.661421, 0.677739, 0.922779, 0.855047, 0.899517, 0.914109, 0.980888, 0.0344011, 0.188345, 0.701074, 0.94298, 0.218323, 0.332275, 0.0500087, 0.176891, 0.896834, 0.193207, 0.977044, 0.3858, 0.619537, 0.437899, 0.0960618, 0.795966, 0.628384, 0.0776769, 0.795065, 0.404605, 0.613843, 0.837637, 0.175999, 0.512611, 0.600492, 0.0617713, 0.897539, 0.77196, 0.126898, 0.0734735, 0.109476, 0.127198, 0.573614, 0.00498635, 0.651288, 0.950238, 0.503129, 0.553157, 0.156796, 0.197561, 0.435144, 0.334538, 0.96178, 0.917661, 0.834138, 0.287505, 0.727459, 0.157645, 0.447587, 0.0999288, 0.049521, 0.400007, 0.134395, 0.942935, 0.516067, 0.505666, 0.175857, 0.136792, 0.974526, 0.853546, 0.55864, 0.357897, 0.258976, 0.935017, 0.33172, 0.702221, 0.590312, 0.972901, 0.968517, 0.653598, 0.499361, 0.843778, 0.717343, 0.819272, 0.0534352, 0.738794, 0.865196, 0.952694, 0.546635, 0.233636, 0.351183, 0.735601, 0.96854, 0.366732, 0.823323, 0.490432, 0.226938, 0.485313, 0.345484, 0.892524, 0.501069, 0.964373, 0.335591, 0.879288, 0.18686, 0.0321524, 0.75162, 0.93297, 0.630067, 0.90457, 0.4632, 0.480852, 0.0442153, 0.44118, 0.317977, 0.616316, 0.879621, 0.458411, 0.144191, 0.641805, 0.947707, 0.431239, 0.449253, 0.268696, 0.52497, 0.320578, 0.743107, 0.0864674, 0.682139, 0.968487, 0.665753, 0.213749, 0.352546, 0.910324, 0.20406, 0.923442, 0.370019, 0.0305328, 0.0703193, 0.684224, 0.682035, 0.235759, 0.0598054, 0.153728, 0.933166, 0.959948, 0.714164, 0.968741, 0.527351, 0.259318, 0.595798, 0.343745, 0.695488, 0.135958, 0.855851, 0.136124, 0.770493, 0.441507, 0.488841, 0.81911, 0.78977, 0.0110584, 0.109171, 0.266597, 0.821372, 0.660634, 0.318648, 0.566565, 0.788814, 0.45255, 0.871084, 0.0325625, 0.129959, 0.773467, 0.376836, 0.281526, 0.121645, 0.107249, 0.341292, 0.925537, 0.545791, 0.290171, 0.0491037, 0.519666, 0.192151, 0.630722, 0.760955, 0.175683, 0.166529, 0.882514, 0.992895, 0.783557, 0.225989, 0.732245, 0.942898, 0.66103, 0.865137, 0.397328, 0.857736, 0.750886, 0.474988, 0.337379, 0.789872, 0.125787, 0.478338, 0.831392, 0.824986, 0.888459, 0.827697, 0.289947, 0.741771, 0.573338, 0.652634, 0.517717, 0.579062, 0.348018, 0.00771095, 0.339708, 0.440065, 0.484628, 0.95306, 0.700092, 0.141648, 0.102574, 0.636408, 0.193749, 0.013662, 0.0382704, 0.902556, 0.986144, 0.0240769, 0.775517, 0.895917, 0.48779, 0.235743, 0.756005, 0.603748, 0.343958, 0.336382, 0.521828, 0.631383, 0.931337, 0.0306554, 0.719708, 0.90717, 0.607976, 0.588364, 0.378322, 0.767749, 0.728055, 0.207107, 0.587616, 0.634505, 0.701187, 0.126658, 0.762471, 0.430484, 0.093472, 0.493245, 0.399296, 0.0229555, 0.940902, 0.889504, 0.447475, 0.109747, 0.752437, 0.163893, 0.814091, 0.625132, 0.578121, 0.831421, 0.102199, 0.0416217, 0.836125, 0.0918715, 0.959896, 0.0926651, 0.959124, 0.849847, 0.553316, 0.102173, 0.723574, 0.727884, 0.480109, 0.137392, 0.213244, 0.0733534, 0.974065, 0.230907, 0.639206, 0.594454, 0.991072, 0.98572, 0.293588, 0.79368, 0.029905, 0.384468, 0.589754, 0.710729, 0.254933, 0.615357, 0.0573365, 0.463609, 0.989581, 0.400036, 0.540456, 0.37337, 0.235228, 0.849844, 0.385581, 0.742108, 0.283697, 0.927321, 0.612357, 0.324445, 0.662181, 0.954055, 0.741108, 0.219976, 0.69014, 0.845905, 0.787315, 0.993019, 0.40631, 0.421558, 0.393458, 0.402337, 0.22323, 0.570397, 0.204724, 0.69529, 0.629551, 0.384306, 0.477271, 0.951049, 0.629508, 0.00393453, 0.676047, 0.219835, 0.899089, 0.072471, 0.151961, 0.67556, 0.0710069, 0.752718, 0.494007, 0.129299, 0.680351, 0.445954, 0.377099, 0.315048, 0.898073, 0.100339, 0.547383, 0.689143, 0.727338, 0.0878929, 0.805415, 0.0332215, 0.0135245, 0.968045, 0.470475, 0.426812, 0.151168, 0.327002, 0.0746566, 0.694102, 0.249846, 0.53805, 0.1441, 0.466625, 0.363997, 0.580457, 0.753993, 0.97815, 0.74302, 0.961015, 0.470217, 0.6599, 0.56503, 0.324455, 0.816659, 0.875343, 0.383594, 0.820232, 0.861688, 0.492614, 0.23466, 0.0433097, 0.829025, 0.217438, 0.790776, 0.0391246, 0.81177, 0.311744, 0.52641, 0.63995, 0.877915, 0.373086, 0.439808, 0.18672, 0.589243, 0.588398, 0.481351, 0.678122, 0.0946417, 0.102651, 0.125072, 0.475044, 0.895553, 0.236166, 0.338332, 0.639829, 0.173296, 0.892164, 0.246241, 0.53134, 0.357817, 0.395977, 0.652176, 0.473679, 0.45795, 0.341496, 0.477644, 0.789266, 0.35552, 0.872661, 0.688363, 0.0498584, 0.671786, 0.517444, 0.565655, 0.414789, 0.118537, 0.313813, 0.812885, 0.653228, 0.679541, 0.84281, 0.935871, 0.363317, 0.731685, 0.386262, 0.627775, 0.631725, 0.663584, 0.242816, 0.129252, 0.334088, 0.850846, 0.942745, 0.85427, 0.603297, 0.633843, 0.697019, 0.305404, 0.612945, 0.852006, 0.614222, 0.345229, 0.789779, 0.0566866, 0.0562014, 0.660246, 0.307533, 0.33637, 0.0899305, 0.416154, 0.151361, 0.770565, 0.48958, 0.604568, 0.825134, 0.13394, 0.82566, 0.567464, 0.751875, 0.659559, 0.758812, 0.817692, 0.483871, 0.520196, 0.535844, 0.775365, 0.227697, 0.36938, 0.833216, 0.254575, 0.762152, 0.741347, 0.545219, 0.441746, 0.849555, 0.594417, 0.300895, 0.124644, 0.131122, 0.593037, 0.436388, 0.41736, 0.283297, 0.759466, 0.837342, 0.481078, 0.864737, 0.917869, 0.529607, 0.238303, 0.621135, 0.884823, 0.45449, 0.86557, 0.235833, 0.416272, 0.554211, 0.554456, 0.705827, 0.012815, 0.854081, 0.718499, 0.356533, 0.202595, 0.935999, 0.244963, 0.519483, 0.152643, 0.893804, 0.636804, 0.96507, 0.56994, 0.446239, 0.744207, 0.728589, 0.218044, 0.345814, 0.760004, 0.560206, 0.145292, 0.867799, 0.446157, 0.551633, 0.674266, 0.29103, 0.365323, 0.397435, 0.065432, 0.211588, 0.655559, 0.973423, 0.51257, 0.404396, 0.664652, 0.0237282, 0.57849, 0.763437, 0.683453, 0.446456, 0.0926756, 0.11274, 0.211531, 0.422243, 0.945334, 0.697837, 0.0741882, 0.634644, 0.236934, 0.322322, 0.522515, 0.636193, 0.877149, 0.247213, 0.374506, 0.844406, 0.440529, 0.309886, 0.479818, 0.946139, 0.643806, 0.281207, 0.482617, 0.849676, 0.813769, 0.710806, 0.946587, 0.565293, 0.82707, 0.678064, 0.441115, 0.149597, 0.648184, 0.816263, 0.109848, 0.900922, 0.933684, 0.217823, 0.565672, 0.594669, 0.188874, 0.551345, 0.899023, 0.00244778, 0.678969, 0.828241, 0.401308, 0.811662, 0.315149, 0.909772, 0.857309, 0.82949, 0.921723, 0.914055, 0.30077, 0.138511, 0.260424, 0.593307, 0.135603, 0.22236, 0.83948, 0.474553, 0.620751, 0.406678, 0.718506, 0.07157, 0.279514, 0.0359867, 0.228832, 0.987212, 0.342517, 0.576881, 0.13873, 0.08103, 0.0102425, 0.393001, 0.607157, 0.593528, 0.723289, 0.240303, 0.353078, 0.700454, 0.813112, 0.473639, 0.970229, 0.807953, 0.359786, 0.698703, 0.295981, 0.886268, 0.877439, 0.295527, 0.869798, 0.900195, 0.979531, 0.00316615, 0.37165, 0.303764, 0.763804, 0.0960253, 0.410254, 0.449359, 0.031615, 0.604923, 0.115525, 0.650832, 0.361404, 0.296024, 0.832259, 0.346458, 0.322406, 0.430431, 0.238739, 0.456459, 0.309478, 0.772186, 0.253587, 0.701643, 0.254196, 0.877354, 0.30961, 0.877492, 0.453411, 0.971726, 0.656834, 0.682272, 0.0405702, 0.68186, 0.028651, 0.799801, 0.918522, 0.727565, 0.33074, 0.785966, 0.972694, 0.781375, 0.987683, 0.0937511, 0.984581, 0.822636, 0.853316, 0.233179, 0.804334, 0.545647, 0.941473, 0.429909, 0.157613, 0.042413, 0.0882351, 0.0386067, 0.168782, 0.106085, 0.851197, 0.118448, 0.534217, 0.787002, 0.307399, 0.180329, 0.942904, 0.976713, 0.796245, 0.673216, 0.665704, 0.0614658, 0.39687, 0.0665469, 0.690911, 0.961947, 0.431956, 0.518961, 0.112723, 0.764305, 0.169623, 0.216358, 0.0508767, 0.0368147, 0.513833, 0.285068, 0.312922, 0.0283073, 0.528693, 0.901117, 0.00231425, 0.455522, 0.134293, 0.20798, 0.0685364, 0.465098, 0.513741, 0.590951, 0.585929, 0.39238, 0.813761, 0.234718, 0.00798184, 0.589921, 0.1209, 0.544092, 0.0728818, 0.986865, 0.963078, 0.434846, 0.0592174, 0.988931, 0.148709, 0.554836, 0.681992, 0.248978, 0.771741, 0.477407, 0.24027, 0.578471, 0.399146, 0.940467, 0.0342766, 0.487298, 0.484237, 0.86446, 0.54988, 0.920788, 0.482808, 0.384468, 0.323015, 0.0640676, 0.590045, 0.847143, 0.262915, 0.15837, 0.346961, 0.903005, 0.0904639, 0.398354, 0.353884, 0.381244, 0.724662, 0.659623, 0.905798, 0.484195, 0.883331, 0.686008, 0.777975, 0.580637, 0.746604, 0.266623, 0.129117, 0.287785, 0.467636, 0.077441, 0.444312, 0.0606906, 0.606737, 0.587066, 0.708817, 0.133486, 0.623995, 0.390257, 0.466193, 0.572793, 0.793069, 0.923973, 0.885275, 0.613505, 0.621508, 0.278417, 0.544427, 0.227023, 0.579488, 0.944205, 0.554251, 0.92258, 0.368187, 0.0617916, 0.485602, 0.847412, 0.262056, 0.605826, 0.79763, 0.386413, 0.240654, 0.126525, 0.152372, 0.790945, 0.890004, 0.49452, 0.310679, 0.569422, 0.557446, 0.0972526, 0.974292, 0.319327, 0.170425, 0.158451, 0.181292, 0.324343, 0.482575, 0.23888, 0.0473619, 0.712567, 0.894911, 0.333934, 0.636447, 0.421969, 0.464149, 0.728045, 0.694584, 0.332859, 0.27852, 0.224442, 0.500255, 0.11779, 0.872372, 0.111546, 0.288005, 0.248654, 0.767496, 0.548099, 0.00729712, 0.530681, 0.672828, 0.675277, 0.00641172, 0.693643, 0.201998, 0.8822, 0.71006, 0.845775, 0.219052, 0.526977, 0.683796, 0.0651949, 0.885758, 0.0993864, 0.7521, 0.486963, 0.565048, 0.0380388, 0.78097, 0.423225, 0.232267, 0.752506, 0.935552, 0.991302, 0.363166, 0.166649, 0.553225, 0.766028, 0.726783, 0.234747, 0.651213, 0.0501403, 0.215555, 0.721124, 0.526623, 0.107751, 0.0986246, 0.350576, 0.632459, 0.806925, 0.986724, 0.503427, 0.89563, 0.135046, 0.131089, 0.13042, 0.680536, 0.194616, 0.0838534, 0.340484, 0.584794, 0.529969, 0.90306, 0.970121, 0.774843, 0.740918, 0.631843, 0.849509, 0.827182, 0.119973, 0.0955378, 0.075458, 0.834584, 0.532189, 0.665657, 0.805814, 0.858219, 0.730203, 0.196198, 0.566032, 0.603753, 0.159033, 0.0184471, 0.162071, 0.318345, 0.343281, 0.497409, 0.538695, 0.250598, 0.774254, 0.0926591, 0.833227, 0.453374, 0.854888, 0.828887, 0.803833, 0.258102, 0.847914, 0.578454, 0.85191, 0.0348968, 0.00709585, 0.430207, 0.526198, 0.938686, 0.730122, 0.661965, 0.496286, 0.926161, 0.190591, 0.775205, 0.462369, 0.0878296, 0.179312, 0.806409, 0.990705, 0.716664, 0.785454, 0.284018, 0.750923, 0.688451, 0.675176, 0.0552854, 0.137542, 0.795408, 0.799335, 0.72947, 0.559885, 0.681329, 0.892869, 0.337649, 0.877184, 0.775649, 0.355719, 0.99419, 0.348827, 0.352518, 0.524771, 0.253467, 0.338579, 0.580983, 0.803195, 0.522006, 0.523621, 0.670412, 0.492276, 0.671907, 0.0820314, 0.0755928, 0.904965, 0.893803, 0.188026, 0.509634, 0.975302, 0.249063, 0.179232, 0.32184, 0.00170983, 0.291098, 0.82429, 0.185485, 0.999333, 0.275786, 0.947017, 0.93035, 0.957691, 0.809511, 0.0708173, 0.214065, 0.982884, 0.606091, 0.332046, 0.773362, 0.253258, 0.396683, 0.542991, 0.532282, 0.0806511, 0.258931, 0.907905, 0.572239, 0.781055, 0.165028, 0.963063, 0.751667, 0.0629895, 0.0533429, 0.313023, 0.679359, 0.254258, 0.653318, 0.309982, 0.908514, 0.508617, 0.387468, 0.964641, 0.296241, 0.766548, 0.103427, 0.549489, 0.520825, 0.326314, 0.236405, 0.0752578, 0.064323, 0.936622, 0.64045, 0.273236, 0.777044, 0.829876, 0.795377, 0.0883918, 0.606991, 0.18328, 0.202031, 0.601384, 0.834526, 0.668589, 0.679766, 0.515227, 0.0901979, 0.457051, 0.68674, 0.727562, 0.227659, 0.709396, 0.436637, 0.474253, 0.0137946, 0.0644583, 0.254809, 0.0233766, 0.0756732, 0.726718, 0.15911, 0.850898, 0.434646, 0.44796, 0.053404, 0.900504, 0.587858, 0.423076, 0.720507, 0.334164, 0.706948, 0.909105, 0.300613, 0.604851, 0.86867, 0.271388, 0.669847, 0.0167656, 0.861554, 0.077821, 0.733018, 0.348124, 0.599139, 0.771288, 0.062589, 0.740457, 0.00565081, 0.589702, 0.182778, 0.820999, 0.593757, 0.761818, 0.335597, 0.935937, 0.858532, 0.0631401, 0.343626, 0.3535, 0.921326, 0.388196, 0.795474, 0.819477, 0.871345, 0.715264, 0.236699, 0.0888337, 0.460016, 0.717278, 0.241951, 0.572598, 0.138847, 0.761385, 0.689444, 0.336782, 0.402254, 0.253664, 0.957427, 0.609713, 0.950624, 0.304603, 0.610227, 0.525213, 0.0259711, 0.316714, 0.572127, 0.82157, 0.407217, 0.529548, 0.330687, 0.987616, 0.135846, 0.169265, 0.460935, 0.0892792, 0.0736386, 0.487791, 0.278372, 0.44187, 0.683973, 0.77193, 0.787922, 0.945419, 0.372715, 0.0631618, 0.94083, 0.0148512, 0.672022, 0.775876, 0.0020973, 0.322331, 0.185871, 0.0218866, 0.601585, 0.0541359, 0.28853, 0.557182, 0.61673, 0.00376734, 0.175793, 0.210734, 0.157517, 0.710277, 0.190945, 0.143232, 0.806647, 0.154759, 0.262934, 0.630335, 0.231349, 0.7522, 0.00389908, 0.723769, 0.959224, 0.291919, 0.347028, 0.714319, 0.229783, 0.233013, 0.723434, 0.717867, 0.0989735, 0.750173, 0.0774084, 0.864228, 0.655428, 0.848505, 0.514754, 0.45118, 0.269854, 0.650527, 0.967951, 0.220624, 0.597638, 0.717378, 0.329439, 0.131894, 0.464656, 0.566333, 0.482473, 0.969188, 0.259669, 0.230619, 0.901337, 0.443747, 0.662217, 0.332049, 0.0475301, 0.249253, 0.114379, 0.285178, 0.904313, 0.0910859, 0.800636, 0.677768, 0.291465, 0.308626, 0.182143, 0.37033, 0.912108, 0.888104, 0.595926, 0.354441, 0.231607, 0.00429094, 0.752775, 0.155457, 0.613871, 0.229395, 0.996085, 0.564239, 0.948173, 0.331992, 0.611012, 0.0780731, 0.283474, 0.794546, 0.147155, 0.865008, 0.310865, 0.65613, 0.819575, 0.836494, 0.434378, 0.332479, 0.462109, 0.802556, 0.557888, 0.894007, 0.84688, 0.658564, 0.439436, 0.157999, 0.0187777, 0.668829, 0.0981938, 0.406871, 0.0598554, 0.0440665, 0.7231, 0.627785, 0.228452, 0.369441, 0.818003, 0.949541, 0.309793, 0.426666, 0.953626, 0.310622, 0.636521, 0.0078374, 0.52237, 0.546259, 0.14587, 0.127715, 0.77997, 0.19584, 0.0460907, 0.222583, 0.245641, 0.0744197, 0.062041, 0.154346, 0.294956, 0.0346921, 0.961903, 0.596522, 0.920379, 0.637078, 0.452331, 0.037705, 0.300133, 0.392585, 0.444479, 0.809781, 0.91904, 0.34609, 0.337059, 0.772944, 0.509076, 0.813418, 0.743843, 0.832422, 0.513355, 0.737389, 0.909522, 0.400143, 0.989196, 0.706781, 0.936147, 0.967412, 0.0790019, 0.683452, 0.0513959, 0.0220148, 0.445829, 0.379712, 0.436938, 0.262968, 0.71458, 0.521984, 0.29096, 0.910776, 0.182706, 0.990957, 0.762616, 0.838326, 0.975322, 0.0426235, 0.157322, 0.529008, 0.718687, 0.720107, 0.106729, 0.108269, 0.495979, 0.141722, 0.351645, 0.839378, 0.345489, 0.526144, 0.367063, 0.462042, 0.591808, 0.661215, 0.576531, 0.48666, 0.768595, 0.00274219, 0.362288, 0.914434, 0.242182, 0.17169, 0.231551, 0.380276, 0.119062, 0.122866, 0.943268, 0.736052, 0.581239, 0.601212, 0.280017, 0.0845222, 0.405734, 0.386272, 0.835841, 0.481437, 0.902706, 0.729822, 0.483459, 0.516436, 0.320269, 0.0900959, 0.854665, 0.425034, 0.658621, 0.672968, 0.274236, 0.159773, 0.155908, 0.908024, 0.874571, 0.157201, 0.479119, 0.924767, 0.865331, 0.243567, 0.17988, 0.560174, 0.899771, 0.880168, 0.492128, 0.545013, 0.0557516, 0.213721, 0.433411, 0.926724, 0.555485, 0.66838, 0.314699, 0.973913, 0.419982, 0.162812, 0.718897, 0.824389, 0.0573174, 0.267585, 0.263586, 0.535152, 0.429409, 0.279717, 0.216125, 0.421766, 0.358811, 0.738326, 0.293389, 0.291716, 0.610258, 0.347914, 0.274395, 0.623084, 0.84017, 0.809506, 0.328248, 0.0550458, 0.154956, 0.779291, 0.965005, 0.829546, 0.582364, 0.803129, 0.394827, 0.327245, 0.863805, 0.827684, 0.444272, 0.463576, 0.477969, 0.310528, 0.356725, 0.163275, 0.767826, 0.406951, 0.993501, 0.480717, 0.851361, 0.822044, 0.115931, 0.246821, 0.892049, 0.318809, 0.0882003, 0.156773, 0.678496, 0.495272, 0.571106, 0.703626, 0.813238, 0.636719, 0.674915, 0.60579, 0.370936, 0.685799, 0.868517, 0.537657, 0.942709, 0.46806, 0.956807, 0.047549, 0.332575, 0.603114, 0.314875, 0.378283, 0.657079, 0.439649, 0.949845, 0.190255, 0.557969, 0.0978686, 0.180199, 0.0712606, 0.42219, 0.24346, 0.74311, 0.51032, 0.339512, 0.96454, 0.85125, 0.904598, 0.709244, 0.909396, 0.604158, 0.0167792, 0.824189, 0.131455, 0.506255, 0.424453, 0.887814, 0.523445, 0.949804, 0.642764, 0.39447, 0.549367, 0.430829, 0.708628, 0.0988114, 0.882531, 0.137133, 0.460929, 0.0122454, 0.514485, 0.472725, 0.860747, 0.424559, 0.127027, 0.443558, 0.248595, 0.963645, 0.824439, 0.829111, 0.253433, 0.88862, 0.337656, 0.398615, 0.074632, 0.504647, 0.173831, 0.780968, 0.0923362, 0.136717, 0.512784, 0.317403, 0.0866651, 0.621285, 0.410034, 0.153893, 0.377459, 0.698771, 0.0795732, 0.346077, 0.512959, 0.761268, 0.94475, 0.961858, 0.483854, 0.660332, 0.92906, 0.611741, 0.581595, 0.311685, 0.929314, 0.9404, 0.41046, 0.276307, 0.00848755, 0.969586, 0.222466, 0.824088, 0.226543, 0.0996573, 0.932443, 0.152814, 0.0705176, 0.101769, 0.0654273, 0.14785, 0.00966242, 0.904179, 0.0265705, 0.147682, 0.966167, 0.735643, 0.458483, 0.361496, 0.140284, 0.994168, 0.384072, 0.12949, 0.256833, 0.961848, 0.297969, 0.135262, 0.554605, 0.971555, 0.118056, 0.123181, 0.938647, 0.69431, 0.735165, 0.0754149, 0.777311, 0.580156, 0.284853, 0.0186222, 0.877017, 0.166516, 0.872438, 0.837276, 0.177283, 0.209166, 0.930639, 0.992038, 0.071956, 0.302043, 0.989175, 0.331572, 0.810496, 0.721497, 0.249547, 0.88981, 0.126121, 0.26953, 0.183717, 0.438436, 0.0113474, 0.23086, 0.982592, 0.561045, 0.987936, 0.216965, 0.136639, 0.890613, 0.575762, 0.621453, 0.00423407, 0.281635, 0.630328, 0.762428, 0.586908, 0.336568, 0.157239, 0.0181237, 0.278881, 0.280268, 0.310723, 0.107755, 0.794932, 0.94234, 0.82636, 0.0447968, 0.0484417, 0.764108, 0.486863, 0.123541, 0.408821, 0.548363, 0.565195, 0.993258, 0.340211, 0.101428, 0.639352, 0.0331267, 0.722798, 0.968099, 0.44833, 0.104309, 0.432915, 0.514847, 0.180713, 0.255512, 0.149729, 0.709521, 0.278671, 0.630209, 0.720918, 0.666975, 0.0961393, 0.977855, 0.177188, 0.898692, 0.325751, 0.0991652, 0.758603, 0.940151, 0.62894, 0.235112, 0.312575, 0.710541, 0.773737, 0.58237, 0.874781, 0.639464, 0.290089, 0.569295, 0.164518, 0.657867, 0.786519, 0.491751, 0.466474, 0.966587, 0.881463, 0.84333, 0.0278504, 0.966788, 0.754078, 0.055957, 0.426219, 0.398211, 0.832891, 0.424339, 0.388594, 0.689108, 0.640693, 0.443203, 0.244257, 0.563948, 0.875906, 0.00518997, 0.676692, 0.159401, 0.0246055, 0.464375, 0.65923, 0.219716, 0.503831, 0.717623, 0.15368, 0.504457, 0.719898, 0.765235, 0.800815, 0.0451803, 0.825401, 0.609148, 0.935513, 0.389204, 0.698918, 0.770326, 0.975974, 0.909186, 0.18634, 0.163325, 0.703226, 0.823482, 0.88745, 0.530157, 0.377282, 0.412649, 0.0550173, 0.56406, 0.965351, 0.588554, 0.479646, 0.433134, 0.36783, 0.83208, 0.0279519, 0.059072, 0.317357, 0.579981, 0.417228, 0.868393, 0.719391, 0.307402, 0.987383, 0.0988718, 0.623523, 0.533682, 0.102385, 0.0310876, 0.142984, 0.248113, 0.947057, 0.394611, 0.366998, 0.564097, 0.405845, 0.721784, 0.0687225, 0.412298, 0.655871, 0.213394, 0.179709, 0.212455, 0.514997, 0.6472, 0.204435, 0.423867, 0.531151, 0.845199, 0.968685, 0.52798, 0.805347, 0.780149, 0.823828, 0.0588349, 0.236761, 0.921897, 0.21372, 0.595434, 0.87177, 0.599367, 0.106801, 0.575967, 0.318829, 0.450477, 0.150316, 0.296109, 0.499768, 0.47738, 0.128069, 0.0715869, 0.72745, 0.528861, 0.495088, 0.265099, 0.539606, 0.0896716, 0.468149, 0.705684, 0.828803, 0.351793, 0.872687, 0.0503727, 0.057916, 0.543063, 0.570048, 0.973978, 0.116472, 0.960034, 0.302765, 0.534159, 0.446823, 0.571178, 0.788278, 0.685542, 0.624223, 0.845715, 0.325013, 0.0851581, 0.143677, 0.514044, 0.67958, 0.106702, 0.0944046, 0.538803, 0.450271, 0.749616, 0.443358, 0.989337, 0.591366, 0.729911, 0.507205, 0.573434, 0.230006, 0.81932, 0.557344, 0.00369812, 0.729565, 0.668036, 0.679606, 0.307034, 0.771778, 0.194125, 0.295945, 0.262545, 0.482182, 0.879381, 0.909916, 0.379052, 0.596569, 0.348802, 0.0713749, 0.659126, 0.8039, 0.904802, 0.998137, 0.188463, 0.304638, 0.180875, 0.851244, 0.89946, 0.364322, 0.482404, 0.769585, 0.684069, 0.357839, 0.927881, 0.849572, 0.0148404, 0.173851, 0.55656, 0.0420279, 0.469893, 0.929734, 0.373059, 0.867094, 0.562266, 0.869882, 0.803044, 0.824802, 0.591238, 0.645355, 0.654374, 0.739676, 0.569853, 0.592839, 0.578108, 0.172859, 0.161045, 0.307407, 0.687171, 0.315771, 0.777463, 0.395209, 0.389749, 0.142843, 0.382948, 0.139211, 0.713281, 0.559466, 0.543633, 0.550603, 0.632542, 0.550871, 0.247174, 0.389212, 0.753951, 0.384542, 0.000148124, 0.72436, 0.56891, 0.57391, 0.512715, 0.898969, 0.594874, 0.557189, 0.908967, 0.617391, 0.392906, 0.653329, 0.777935, 0.58031, 0.625075, 0.242305, 0.1198, 0.122227, 0.111774, 0.722752, 0.879915, 0.782339, 0.934779, 0.0186954, 0.0362658, 0.947503, 0.841379, 0.336723, 0.185926, 0.738201, 0.428007, 0.903615, 0.866105, 0.475728, 0.202284, 0.272111, 0.0571659, 0.210737, 0.476015, 0.469073, 0.0611642, 0.0665944, 0.325353, 0.964703, 0.0953343, 0.0682228, 0.83638, 0.601042, 0.0126703, 0.201357, 0.0904805, 0.297979, 0.474657, 0.638882, 0.981478, 0.99664, 0.430499, 0.252657, 0.736575, 0.490264, 0.767054, 0.211742, 0.864591, 0.260054, 0.809009, 0.913942, 0.30213, 0.753506, 0.457655, 0.370254, 0.214893, 0.736468, 0.476589, 0.346709, 0.295778, 0.79027, 0.842565, 0.728237, 0.848804, 0.24776, 0.108304, 0.887907, 0.523082, 0.888509, 0.693114, 0.514055, 0.00202371, 0.440257, 0.811403, 0.459141, 0.765207, 0.898165, 0.946443, 0.925234, 0.941297, 0.688213, 0.815815, 0.0213245, 0.0624914, 0.248203, 0.331839, 0.861401, 0.550611, 0.0653023, 0.254555, 0.4925, 0.0976798, 0.858426, 0.53645, 0.00381209, 0.568045, 0.534942, 0.721444, 0.276038, 0.11547, 0.260258, 0.710199, 0.640796, 0.620652, 0.63514, 0.284293, 0.713824, 0.415793, 0.649285, 0.0575196, 0.618474, 0.596102, 0.864243, 0.864486, 0.116281, 0.00390904, 0.846489, 0.874261, 0.713002, 0.921523, 0.433395, 0.0862394, 0.536478, 0.793742, 0.755592, 0.185232, 0.791599, 0.848238, 0.789237, 0.697622, 0.103263, 0.456274, 0.368645, 0.835412, 0.321818, 0.974223, 0.583227, 0.305607, 0.010266, 0.149779, 0.627738, 0.696854, 0.347965, 0.351781, 0.284056, 0.317928, 0.872251, 0.543762, 0.892236, 0.973598, 0.738517, 0.174874, 0.251516, 0.754915, 0.822497, 0.407258, 0.763458, 0.587331, 0.177994, 0.795502, 0.526714, 0.243223, 0.47884, 0.538333, 0.296961, 0.689567, 0.284069, 0.150483, 0.29841, 0.83895, 0.163957, 0.550412, 0.759699, 0.228623, 0.358266, 0.764388, 0.0343603, 0.573761, 0.0995418, 0.256265, 0.447006, 0.584478, 0.868124, 0.996181, 0.460933, 0.969999, 0.117898, 0.830214, 0.832377, 0.0981657, 0.752475, 0.0130955, 0.591671, 0.945099, 0.173983, 0.834611, 0.321708, 0.00127714, 0.45433, 0.956984, 0.554794, 0.0443828, 0.0564657, 0.0607032, 0.206122, 0.0508211, 0.193611, 0.885675, 0.248248, 0.0177595, 0.46287, 0.511605, 0.217043, 0.766855, 0.748022, 0.506122, 0.284082, 0.56234, 0.0373831, 0.736352, 0.374462, 0.503192, 0.920619, 0.679205, 0.298362, 0.952551, 0.0242068, 0.670969, 0.207361, 0.757293, 0.818589, 0.712484, 0.51068, 0.953238, 0.740579, 0.484941, 0.703389, 0.0998717, 0.693497, 0.277543, 0.412835, 0.0898437, 0.17018, 0.380656, 0.67726, 0.829685, 0.160571, 0.982515, 0.0105622, 0.178074, 0.180181, 0.985837, 0.04553, 0.879438, 0.39348, 0.541605, 0.105272, 0.509245, 0.45921, 0.186196, 0.364205, 0.440382, 0.648964, 0.406605, 0.885256, 0.739323, 0.846065, 0.773372, 0.42297, 0.39465, 0.547566, 0.846827, 0.783635, 0.698584, 0.802073, 0.115203, 0.715732, 0.796066, 0.590882, 0.793278, 0.583701, 0.370246, 0.0525421, 0.62654, 0.226048, 0.203063, 0.363741, 0.0796044, 0.302193, 0.104387, 0.719226, 0.363957, 0.156718, 0.416547, 0.0413003, 0.0534442, 0.689326, 0.495003, 0.581874, 0.94833, 0.590377, 0.86838, 0.165258, 0.396636, 0.370142, 0.756148, 0.895842, 0.788295, 0.292392, 0.0942366, 0.207767, 0.521764, 0.786284, 0.656198, 0.900616, 0.036613, 0.101972, 0.588965, 0.468545, 0.398949, 0.192605, 0.442695, 0.866613, 0.490497, 0.938659, 0.163865, 0.658106, 0.728801, 0.232202, 0.794796, 0.51414, 0.101016, 0.237841, 0.109641, 0.315046, 0.845791, 0.275807, 0.588913, 0.520171, 0.707007, 0.601738, 0.617549, 0.688958, 0.0586862, 0.253376, 0.311548, 0.811601, 0.345321, 0.883751, 0.240324, 0.83187, 0.156642, 0.296653, 0.671268, 0.820915, 0.31121, 0.37509, 0.161503, 0.694744, 0.600375, 0.599661, 0.468312, 0.28947, 0.374086, 0.119787, 0.063204, 0.955248, 0.251944, 0.977308, 0.237282, 0.563092, 0.441562, 0.0589088, 0.711063, 0.604121, 0.642621, 0.66315, 0.831686, 0.971107, 0.498149, 0.797319, 0.287449, 0.660532, 0.543949, 0.923563, 0.0116415, 0.438079, 0.760688, 0.367991, 0.674523, 0.954855, 0.0322069, 0.716675, 0.293589, 0.880414, 0.360645, 0.787416, 0.371654, 0.662933, 0.622078, 0.703166, 0.720878, 0.63169, 0.146368, 0.206037, 0.786252, 0.843981, 0.603251, 0.0284235, 0.329744, 0.0306811, 0.331363, 0.965364, 0.20077, 0.499229, 0.303143, 0.0352855, 0.0362593, 0.768574, 0.851408, 0.139309, 0.235714, 0.358162, 0.642589, 0.887479, 0.636, 0.92517, 0.233587, 0.701345, 0.860022, 0.467247, 0.999242, 0.613187, 0.543184, 0.309764, 0.884758, 0.594529, 0.00921185, 0.257275, 0.555912, 0.230975, 0.940304, 0.0342401, 0.886004, 0.987629, 0.290885, 0.0431279, 0.192952, 0.184686, 0.510438, 0.399143, 0.293712, 0.0965965, 0.521944, 0.635594, 0.109729, 0.668824, 0.907203, 0.0184083, 0.123525, 0.353211, 0.0878773, 0.735733, 0.439931, 0.821095, 0.414072, 0.790761, 0.999123, 0.0736174, 0.201622, 0.258622, 0.721344, 0.504993, 0.685997, 0.118691, 0.191002, 0.831992, 0.690067, 0.0679851, 0.80831, 0.659104, 0.731067, 0.352092, 0.293331, 0.957118, 0.345333, 0.348707, 0.692154, 0.47406, 0.670504, 0.893248, 0.658411, 0.791697, 0.131207, 0.146328, 0.454866, 0.259299, 0.019731, 0.633885, 0.485144, 0.867405, 0.102936, 0.282226, 0.154727, 0.209302, 0.642264, 0.91506, 0.64719, 0.417825, 0.242511, 0.971619, 0.500971, 0.564887, 0.0283391, 0.460881, 0.413526, 0.23318, 0.620868, 0.117714, 0.476234, 0.0324641, 0.347729, 0.844452, 0.111522, 0.495495, 0.171264, 0.240243, 0.309112, 0.063638, 0.346869, 0.958359, 0.884077, 0.817148, 0.343429, 0.176891, 0.222144, 0.472591, 0.534492, 0.381877, 0.826799, 0.694224, 0.836962, 0.253868, 0.222637, 0.938627, 0.344025, 0.531818, 0.886584, 0.136461, 0.883926, 0.6252, 0.450213, 0.675853, 0.833127, 0.531458, 0.219885, 0.516905, 0.538804, 0.270947, 0.8083, 0.342117, 0.164903, 0.717373, 0.259723, 0.34928, 0.0031411, 0.477951, 0.76696, 0.631356, 0.56084, 0.393835, 0.62975, 0.442054, 0.760919, 0.438761, 0.588724, 0.394976, 0.740429, 0.892797, 0.890638, 0.889373, 0.479524, 0.898522, 0.252899, 0.601043, 0.340232, 0.507867, 0.604934, 0.673391, 0.864168, 0.761415, 0.0193604, 0.917449, 0.0233324, 0.176041, 0.79358, 0.759087, 0.00338592, 0.418596, 0.946881, 0.836999, 0.783908, 0.536279, 0.0901404, 0.37536, 0.727554, 0.362474, 0.32167, 0.552061, 0.938832, 0.698234, 0.990923, 0.172385, 0.99277, 0.99088, 0.834197, 0.509995, 0.885742, 0.305868, 0.881945, 0.593989, 0.280978, 0.738778, 0.446081, 0.484508, 0.620926, 0.292129, 0.526718, 0.665401, 0.189873, 0.877087, 0.701434, 0.686591, 0.819066, 0.581826, 0.452287, 0.648421, 0.814646, 0.711528, 0.67888, 0.803137, 0.335464, 0.433189, 0.248543, 0.863502, 0.093987, 0.920554, 0.610589, 0.803123, 0.507438, 0.213006, 0.635474, 0.897421, 0.425198, 0.959406, 0.744501, 0.544635, 0.85241, 0.886215, 0.581579, 0.0143649, 0.787452, 0.528399, 0.927382, 0.666474, 0.88946, 0.171892, 0.908565, 0.72192, 0.623877, 0.319243, 0.442565, 0.560949, 0.629502, 0.731693, 0.893384, 0.529135, 0.465402, 0.643891, 0.432774, 0.237178, 0.729202, 0.254623, 0.793939, 0.0540441, 0.916015, 0.547684, 0.291954, 0.378851, 0.992345, 0.675009, 0.445953, 0.451707, 0.384178, 0.64188, 0.966359, 0.386137, 0.329766, 0.638585, 0.351205, 0.217723, 0.409932, 0.798015, 0.376577, 0.911426, 0.896666, 0.0532622, 0.364394, 0.978061, 0.431195, 0.655887, 0.511757, 0.676384, 0.0226849, 0.429696, 0.27891, 0.1292, 0.442489, 0.101619, 0.760526, 0.811433, 0.0115432, 0.0957934, 0.519178, 0.058131, 0.0607445, 0.570893, 0.486126, 0.334195, 0.16865, 0.194041, 0.15197, 0.39907, 0.39531, 0.66054, 0.368456, 0.744452, 0.746989, 0.158254, 0.426506, 0.60924, 0.647154, 0.222331, 0.364358, 0.37559, 0.91225, 0.516574, 0.0581788, 0.335011, 0.0888585, 0.491322, 0.991695, 0.236961, 0.223844, 0.438208, 0.566438, 0.816695, 0.97718, 0.151424, 0.991711, 0.762377, 0.884529, 0.524874, 0.70195, 0.0683297, 0.629884, 0.721056, 0.136015, 0.181119, 0.776701, 0.628543, 0.843678, 0.925435, 0.0315292, 0.723458, 0.0801435, 0.347634, 0.0693801, 0.271896, 0.419464, 0.389288, 0.809939, 0.119708, 0.961704, 0.0539787, 0.0928968, 0.494872, 0.742237, 0.632632, 0.427045, 0.99255, 0.38223, 0.944426, 0.55129, 0.120628, 0.199883, 0.114481, 0.695233, 0.549377, 0.48581, 0.469033, 0.0897173, 0.972331, 0.207159, 0.605164, 0.251204, 0.0292478, 0.249625, 0.851037, 0.985599, 0.851117, 0.492, 0.128298, 0.587453, 0.492101, 0.542004, 0.596331, 0.130162, 0.132661, 0.114587, 0.446182, 0.62133, 0.345193, 0.970332, 0.304415, 0.711366, 0.593442, 0.238918, 0.49472, 0.219047, 0.882048, 0.655913, 0.0858985, 0.0448975, 0.744293, 0.559759, 0.727168, 0.948462, 0.872775, 0.887675, 0.97566, 0.602118, 0.59441, 0.365167, 0.0178453, 0.677012, 0.000469322, 0.462239, 0.406242, 0.220816, 0.534038, 0.754588, 0.988072, 0.674532, 0.984156, 0.938116, 0.726921, 0.0813114, 0.0698321, 0.53369, 0.52899, 0.373353, 0.715339, 0.648526, 0.203647, 0.679892, 0.138689, 0.717086, 0.130382, 0.641169, 0.935975, 0.234003, 0.785587, 0.524012, 0.350948, 0.867169, 0.516038, 0.0972155, 0.204833, 0.44361, 0.272956, 0.963037, 0.311152, 0.604657, 0.142892, 0.141633, 0.619187, 0.29317, 0.726191, 0.443992, 0.251751, 0.403269, 0.093485, 0.0333194, 0.732796, 0.45749, 0.688286, 0.440866, 0.408456, 0.973198, 0.229783, 0.31757, 0.330477, 0.461278, 0.373952, 0.51402, 0.759988, 0.542619, 0.329818, 0.51143, 0.70557, 0.257815, 0.989691, 0.832469, 0.635778, 0.73187, 0.878585, 0.435296, 0.303173, 0.834875, 0.581567, 0.99194, 0.406117, 0.0915859, 0.05272, 0.331072, 0.508428, 0.794462, 0.19633, 0.768132, 0.293733, 0.295751, 0.191498, 0.257508, 0.0606061, 0.469303, 0.875147, 0.663145, 0.588876, 0.072709, 0.648664, 0.92418, 0.366776, 0.49849, 0.546391, 0.0735117, 0.0646461, 0.717583, 0.552678, 0.00543609, 0.180325, 0.506415, 0.602656, 0.281583, 0.251179, 0.251321, 0.747652, 0.802051, 0.236779, 0.834184, 0.510039, 0.0827592, 0.390959, 0.662091, 0.397174, 0.175574, 0.260066, 0.166856, 0.195407, 0.185696, 0.895769, 0.525751, 0.877596, 0.0125126, 0.482765, 0.872977, 0.825291, 0.444211, 0.503061, 0.559385, 0.877482, 0.292179, 0.816029, 0.50973, 0.714857, 0.0713315, 0.325027, 0.209972, 0.342602, 0.958246, 0.553407, 0.0536296, 0.775703, 0.716464, 0.574337, 0.88593, 0.56053, 0.185352, 0.988696, 0.0619346, 0.673126, 0.242359, 0.710365, 0.117933, 0.13089, 0.269189, 0.825123, 0.782908, 0.876054, 0.618151, 0.0318198, 0.191444, 0.671135, 0.709634, 0.691659, 0.871847, 0.449904, 0.614982, 0.754703, 0.295812, 0.436184, 0.460485, 0.559034, 0.971071, 0.0835596, 0.325722, 0.683165, 0.486895, 0.625746, 0.606516, 0.970145, 0.806648, 0.219093, 0.2069, 0.625969, 0.874585, 0.8453, 0.535089, 0.788458, 0.149386, 0.526117, 0.79274, 0.16895, 0.502175, 0.233237, 0.738958, 0.282391, 0.973136, 0.32103, 0.765401, 0.513252, 0.637431, 0.614987, 0.634239, 0.841921, 0.988349, 0.518761, 0.400584, 0.37837, 0.669983, 0.825831, 0.865357, 0.0415287, 0.632536, 0.461784, 0.96576, 0.105234, 0.437686, 0.389517, 0.158363, 0.927401, 0.24937, 0.270551, 0.0975506, 0.305709, 0.359394, 0.130371, 0.374196, 0.344173, 0.519781, 0.993596, 0.544894, 0.131868, 0.128031, 0.901344, 0.176863, 0.464752, 0.646638, 0.326548, 0.587591, 0.247731, 0.89514, 0.224064, 0.0250704, 0.442619, 0.38589, 0.752707, 0.462433, 0.663886, 0.275957, 0.986956, 0.482773, 0.262438, 0.522622, 0.138242, 0.438995, 0.862814, 0.143452, 0.0950212, 0.881319, 0.576341, 0.298245, 0.879771, 0.194538, 0.455735, 0.435099, 0.890082, 0.173699, 0.895137, 0.939618, 0.174012, 0.910283, 0.120991, 0.280603, 0.320783, 0.189493, 0.933469, 0.449997, 0.741839, 0.955152, 0.53903, 0.336764, 0.183585, 0.372795, 0.0701182, 0.904878, 0.899548, 0.475179, 0.618857, 0.832811, 0.655082, 0.99559, 0.690304, 0.712506, 0.174392, 0.727426, 0.0838329, 0.937813, 0.0924383, 0.0719185, 0.23245, 0.927018, 0.105399, 0.10073, 0.634962, 0.772058, 0.755873, 0.894292, 0.0164735, 0.232915, 0.264722, 0.69504, 0.201968, 0.851764, 0.985482, 0.676076, 0.711461, 0.212235, 0.542927, 0.192808, 0.790123, 0.735341, 0.0462154, 0.597233, 0.275964, 0.497216, 0.72545, 0.576124, 0.182752, 0.400787, 0.0880176, 0.154409, 0.28781, 0.80831, 0.800509, 0.484427, 0.389254, 0.679824, 0.817012, 0.195804, 0.351946, 0.407288, 0.460556, 0.788487, 0.434113, 0.0804973, 0.650214, 0.851455, 0.0790833, 0.674424, 0.19845, 0.940721, 0.228667, 0.584472, 0.103982, 0.170653, 0.974974, 0.577703, 0.717665, 0.558994, 0.467201, 0.981192, 0.911936, 0.452082, 0.567041, 0.589434, 0.247865, 0.0231813, 0.947266, 0.343523, 0.0990505, 0.721825, 0.770362, 0.9321, 0.716394, 0.378036, 0.340831, 0.922086, 0.367607, 0.227732, 0.754128, 0.467512, 0.185746, 0.712086, 0.827322, 0.874163, 0.80717, 0.47347, 0.266213, 0.237322, 0.697254, 0.287224, 0.287679, 0.708701, 0.447164, 0.966912, 0.0886931, 0.559857, 0.450786, 0.0322091, 0.518248, 0.218225, 0.0868987, 0.848535, 0.945032, 0.160127, 0.514129, 0.430556, 0.663002, 0.00521174, 0.352251, 0.348137, 0.899454, 0.296413, 0.508459, 0.592558, 0.708089, 0.98805, 0.142662, 0.813717, 0.0965165, 0.340975, 0.57454, 0.055429, 0.679794, 0.748739, 0.26523, 0.811711, 0.0803593, 0.689267, 0.948111, 0.261202, 0.0712096, 0.673757, 0.94536, 0.862339, 0.702204, 0.303758, 0.918833, 0.390345, 0.171957, 0.833633, 0.0352809, 0.0619804, 0.0634282, 0.839622, 0.830667, 0.87572, 0.974781, 0.932585, 0.428813, 0.0104468, 0.972434, 0.820552, 0.950346, 0.117655, 0.523806, 0.422234, 0.302305, 0.090535, 0.795667, 0.779505, 0.47993, 0.624446, 0.516996, 0.705758, 0.0779813, 0.645648, 0.691177, 0.252693, 0.597603, 0.0376277, 0.712566, 0.319926, 0.306324, 0.180171, 0.495661, 0.348016, 0.722844, 0.938575, 0.810016, 0.79415, 0.0286016, 0.341867, 0.586811, 0.908918, 0.617296, 0.194261, 0.489661, 0.58256, 0.272265, 0.464972, 0.261022, 0.992713, 0.084131, 0.861791, 0.939094, 0.666077, 0.557338, 0.76821, 0.918701, 0.945593, 0.222807, 0.631304, 0.491077, 0.631885, 0.960694, 0.555827, 0.0440505, 0.349719, 0.0220607, 0.964515, 0.721428, 0.802072, 0.475958, 0.537504, 0.470392, 0.839894, 0.129662, 0.753762, 0.478501, 0.344349, 0.554631, 0.646709, 0.475004, 0.569789, 0.661667, 0.550246, 0.864076, 0.322903, 0.193854, 0.449472, 0.0489499, 0.776404, 0.171195, 0.0490578, 0.387318, 0.471987, 0.281251, 0.530484, 0.871881, 0.00738331, 0.756389, 0.366249, 0.523742, 0.955334, 0.900337, 0.862764, 0.0251068, 0.807591, 0.329, 0.655859, 0.43806, 0.967088, 0.856614, 0.564868, 0.263827, 0.88379, 0.394052, 0.331613, 0.129589, 0.961031, 0.313168, 0.748675, 0.301716, 0.642899, 0.530788, 0.429123, 0.957201, 0.566146, 0.333744, 0.575132, 0.883897, 0.735218, 0.301634, 0.415317, 0.541749, 0.510866, 0.940398, 0.709419, 0.166982, 0.694368, 0.300638, 0.409976, 0.826152, 0.0791525, 0.430932, 0.556051, 0.16027, 0.659595, 0.299579, 0.982933, 0.218503, 0.622171, 0.22258, 0.710604, 0.0246091, 0.514223, 0.722444, 0.991928, 0.149312, 0.191685, 0.309154, 0.747623, 0.13437, 0.31049, 0.612046, 0.270341, 0.00458145, 0.359804, 0.439719, 0.190176, 0.723132, 0.64264, 0.263273, 0.862144, 0.76636, 0.860399, 0.963442, 0.113827, 0.937807, 0.697795, 0.340797, 0.0702309, 0.466482, 0.21466, 0.103165, 0.600706, 0.765315, 0.816003, 0.00213699, 0.880588, 0.300601, 0.697652, 0.706224, 0.405168, 0.735619, 0.503161, 0.823492, 0.297079, 0.993075, 0.1272, 0.928639, 0.766022, 0.862957, 0.115023, 0.37654, 0.997056, 0.442415, 0.10121, 0.576175, 0.568001, 0.054658, 0.539375, 0.0369961, 0.233537, 0.45195, 0.69698, 0.491197, 0.83229, 0.749307, 0.956535, 0.461022, 0.608285, 0.0708996, 0.858763, 0.83665, 0.0540675, 0.176012, 0.0782618, 0.383573, 0.10049, 0.17039, 0.837669, 0.824347, 0.672561, 0.226258, 0.993359, 0.828738, 0.745087, 0.847143, 0.214562, 0.0966094, 0.292813, 0.172754, 0.690461, 0.306624, 0.493792, 0.082351, 0.463749, 0.709288, 0.77587, 0.769697, 0.258762, 0.552895, 0.691946, 0.428443, 0.803173, 0.319662, 0.70558, 0.684336, 0.772958, 0.192036, 0.91128, 0.784571, 0.00169756, 0.0220194, 0.691098, 0.303216, 0.715526, 0.616449, 0.889499, 0.601632, 0.858683, 0.379488, 0.623479, 0.063779, 0.975611, 0.107904, 0.325446, 0.917735, 0.647174, 0.169204, 0.445308, 0.50126, 0.888059, 0.530384, 0.323393, 0.209886, 0.0370801, 0.766925, 0.760249, 0.801241, 0.198717, 0.582479, 0.344044, 0.893696, 0.388924, 0.729045, 0.469392, 0.857614, 0.558328, 0.953396, 0.201699, 0.729881, 0.0783627, 0.866433, 0.807658, 0.25476, 0.333156, 0.22571, 0.150533, 0.199606, 0.0655663, 0.745986, 0.0226287, 0.567979, 0.759151, 0.232618, 0.607517, 0.743174, 0.540987, 0.762208, 0.0955089, 0.559815, 0.951449, 0.401348, 0.331212, 0.642409, 0.807065, 0.0274912, 0.191121, 0.221269, 0.0196289, 0.0883137, 0.401632, 0.536547, 0.354751, 0.208566, 0.655834, 0.5751, 0.592522, 0.254354, 0.473421, 0.594959, 0.592251, 0.917631, 0.16714, 0.0527308, 0.120719, 0.145502, 0.748346, 0.073153, 0.613902, 0.262683, 0.655453, 0.64941, 0.00870569, 0.664284, 0.865813, 0.311702, 0.0668387, 0.514261, 0.612054, 0.439666, 0.145043, 0.697004, 0.736379, 0.5541, 0.51808, 0.248846, 0.324673, 0.546084, 0.341804, 0.397126, 0.901601, 0.811299, 0.234554, 0.983733, 0.835457, 0.656698, 0.556549, 0.543326, 0.446011, 0.446224, 0.556029, 0.813568, 0.832785, 0.342328, 0.99177, 0.465976, 0.435072, 0.154672, 0.132097, 0.196764, 0.669401, 0.0425384, 0.0923796, 0.534846, 0.313129, 0.650769, 0.895801, 0.329439, 0.111618, 0.0561264, 0.401462, 0.876942, 0.751772, 0.626181, 0.811451, 0.370353, 0.988758, 0.772378, 0.198887, 0.911808, 0.338299, 0.733134, 0.523288, 0.274299, 0.0417327, 0.214103, 0.267197, 0.777308, 0.207001, 0.648355, 0.191525, 0.566804, 0.933276, 0.487603, 0.819337, 0.707737, 0.217949, 0.46097, 0.360888, 0.865464, 0.448872, 0.503242, 0.752233, 0.986821, 0.546211, 0.0769288, 0.558356, 0.24947, 0.449025, 0.945505, 0.776708, 0.741667, 0.204081, 0.385307, 0.853636, 0.992854, 0.736488, 0.066858, 0.77241, 0.593993, 0.60012, 0.992664, 0.871801, 0.0559049, 0.872657, 0.88195, 0.775283, 0.626557, 0.949173, 0.141391, 0.756121, 0.463127, 0.794921, 0.179207, 0.686091, 0.924453, 0.957064, 0.562544, 0.199546, 0.506354, 0.686503, 0.498402, 0.994487, 0.727037, 0.39265, 0.766344, 0.404101, 0.0747312, 0.199944, 0.718873, 0.556318, 0.742083, 0.271842, 0.894729, 0.37571, 0.312619, 0.984724, 0.674071, 0.131572, 0.754846, 0.57373, 0.624965, 0.31371, 0.430913, 0.0704488, 0.389447, 0.0172197, 0.551376, 0.0406306, 0.737618, 0.725938, 0.148273, 0.695974, 0.686078, 0.829045, 0.964759, 0.609736, 0.820877, 0.971562, 0.787693, 0.163315, 0.346086, 0.219964, 0.300065, 0.727209, 0.976838, 0.827079, 0.233998, 0.657966, 0.205213, 0.933434, 0.418687, 0.921342, 0.826052, 0.959587, 0.228782, 0.714562, 0.939341, 0.880912, 0.368531, 0.759872, 0.198899, 0.183194, 0.858083, 0.858741, 0.296783, 0.411624, 0.588002, 0.718193, 0.0850838, 0.182008, 0.174099, 0.844767, 0.447305, 0.362803, 0.44193, 0.384677, 0.926375, 0.205336, 0.553223, 0.438452, 0.511155, 0.487039, 0.0977765, 0.399715, 0.386486, 0.70692, 0.882859, 0.468372, 0.957847, 0.3876, 0.776125, 0.158929, 0.922795, 0.839811, 0.619146, 0.549494, 0.0531412, 0.0756902, 0.497634, 0.169805, 0.102454, 0.120973, 0.978255, 0.872794, 0.126948, 0.0909931, 0.271697, 0.381201, 0.514382, 0.243212, 0.599241, 0.77792, 0.773524, 0.672781, 0.487241, 0.0839376, 0.391357, 0.476453, 0.0362453, 0.2344, 0.312296, 0.874476, 0.424861, 0.508472, 0.0558742, 0.0344206, 0.520736, 0.822892, 0.0290548, 0.119493, 0.723907, 0.67696, 0.361477, 0.136354, 0.437292, 0.406212, 0.0337393, 0.914554, 0.770004, 0.0112404, 0.170742, 0.316886, 0.212726, 0.820511, 0.16808, 0.548176, 0.370641, 0.191594, 0.456723, 0.269329, 0.802692, 0.582023, 0.704971, 0.097267, 0.874061, 0.545042, 0.248182, 0.60387, 0.0681193, 0.77384, 0.598909, 0.302987, 0.325852, 0.0673554, 0.11917, 0.749633, 0.355096, 0.643983, 0.0612776, 0.413992, 0.0139508, 0.722052, 0.315302, 0.287624, 0.354419, 0.0825466, 0.0346354, 0.415257, 0.198645, 0.316376, 0.639655, 0.943921, 0.798084, 0.798457, 0.698719, 0.381064, 0.144777, 0.498166, 0.886078, 0.353141, 0.949453, 0.710696, 0.966272, 0.29046, 0.215821, 0.932166, 0.277124, 0.508371, 0.849439, 0.0589401, 0.199342, 0.684583, 0.513649, 0.0423359, 0.898127, 0.691819, 0.398893, 0.0518767, 0.668939, 0.351535, 0.629585, 0.833358, 0.898651, 0.508718, 0.125405, 0.964831, 0.880354, 0.267236, 0.713629, 0.273668, 0.324527, 0.642784, 0.659036, 0.995745, 0.588816, 0.503675, 0.382374, 0.910729, 0.0355149, 0.611326, 0.554885, 0.735088, 0.0116437, 0.423462, 0.572976, 0.352222, 0.991798, 0.515002, 0.229352, 0.499804, 0.0845002, 0.41998, 0.599966, 0.160524, 0.394522, 0.971303, 0.580399, 0.392318, 0.42332, 0.783143, 0.512804, 0.626817, 0.42784, 0.869087, 0.2468, 0.678014, 0.658944, 0.20067, 0.461268, 0.92506, 0.451263, 0.766105, 0.212103, 0.703272, 0.218508, 0.688644, 0.413297, 0.599667, 0.195505, 0.34711, 0.125653, 0.671712, 0.455277, 0.430877, 0.136527, 0.848753, 0.417919, 0.530157, 0.0739151, 0.586103, 0.485083, 0.204155, 0.046138, 0.895842, 0.658664, 0.859067, 0.989272, 0.32699, 0.60984, 0.61495, 0.408844, 0.961566, 0.0553046, 0.547022, 0.879174, 0.162621, 0.119264, 0.687481, 0.426132, 0.733945, 0.492731, 0.53976, 0.421309, 0.543462, 0.937871, 0.356429, 0.721671, 0.96829, 0.66883, 0.62982, 0.488102, 0.885906, 0.408379, 0.445873, 0.0458212, 0.816757, 0.978667, 0.484184, 0.0821203, 0.00863886, 0.775971, 0.94334, 0.847172, 0.298308, 0.810391, 0.223445, 0.206679, 0.105182, 0.416122, 0.437131, 0.100253, 0.850094, 0.555826, 0.789377, 0.953452, 0.736583, 0.57701, 0.00567146, 0.615353, 0.215437, 0.723756, 0.10222, 0.960171, 0.678917, 0.186895, 0.107598, 0.345352, 0.909776, 0.0311485, 0.244998, 0.291455, 0.0696102, 0.765578, 0.67159, 0.792059, 0.866026, 0.337893, 0.410551, 0.325039, 0.870761, 0.932909, 0.926541, 0.123433, 0.228789, 0.515803, 0.77022, 0.260285, 0.943768, 0.166832, 0.42901, 0.790805, 0.484536, 0.850543, 0.129168, 0.525561, 0.836864, 0.670899, 0.989392, 0.40615, 0.84195, 0.703529, 0.0277729, 0.970586, 0.578251, 0.712678, 0.998142, 0.909949, 0.105793, 0.70533, 0.0624429, 0.749519, 0.866561, 0.236314, 0.663275, 0.732815, 0.869669, 0.976787, 0.357853, 0.53248, 0.426877, 0.753899, 0.617453, 0.698508, 0.68922, 0.494785, 0.149783, 0.0995106, 0.40116, 0.503474, 0.373518, 0.0836477, 0.13425, 0.21681, 0.118726, 0.634819, 0.977089, 0.265256, 0.748042, 0.888691, 0.787634, 0.809536, 0.644835, 0.29316, 0.327593, 0.0160854, 0.125146, 0.275558, 0.523259, 0.516161, 0.20558, 0.539658, 0.134941, 0.227353, 0.0504165, 0.336375, 0.681371, 0.703282, 0.228959, 0.960439, 0.132553, 0.638413, 0.432722, 0.249623, 0.119023, 0.863847, 0.845593, 0.733617, 0.564818, 0.354842, 0.317251, 0.846096, 0.673276, 0.967712, 0.476869, 0.933322, 0.380587, 0.738658, 0.950401, 0.740611, 0.217912, 0.497239, 0.645424, 0.0460094, 0.31764, 0.495142, 0.66771, 0.920877, 0.204059, 0.979247, 0.0579065, 0.00885498, 0.754811, 0.0516429, 0.055936, 0.34907, 0.766974, 0.458223, 0.712893, 0.721489, 0.74117, 0.113732, 0.159755, 0.306196, 0.287906, 0.0586668, 0.227036, 0.375864, 0.731875, 0.261966, 0.703742, 0.551798, 0.259109, 0.0335273, 0.469508, 0.872367, 0.269142, 0.919286, 0.0388054, 0.382238, 0.972526, 0.505148, 0.552892, 0.83519, 0.00472243, 0.968694, 0.964798, 0.746429, 0.186531, 0.624439, 0.311215, 0.0425672, 0.144405, 0.28095, 0.620576, 0.89762, 0.148073, 0.765404, 0.999952, 0.955194, 0.880677, 0.551894, 0.0689755, 0.944031, 0.443773, 0.782756, 0.310599, 0.753234, 0.391641, 0.110407, 0.0777928, 0.532599, 0.65657, 0.752063, 0.58651, 0.273044, 0.204238, 0.894772, 0.330859, 0.647798, 0.221537, 0.618673, 0.513573, 0.892098, 0.354175, 0.32973, 0.44207, 0.643923, 0.0657749, 0.544895, 0.822677, 0.201704, 0.318968, 0.01588, 0.387015, 0.346915, 0.62158, 0.502372, 0.668671, 0.947056, 0.94263, 0.514519, 0.970057, 0.71259, 0.0564903, 0.670559, 0.415756, 0.190109, 0.754052, 0.479532, 0.802145, 0.878031, 0.792649, 0.97403, 0.416255, 0.727476, 0.165484, 0.217993, 0.988402, 0.0679934, 0.435791, 0.638089, 0.723752, 0.872205, 0.432567, 0.398599, 0.601029, 0.961794, 0.113623, 0.745874, 0.0571676, 0.0533746, 0.0395844, 0.203789, 0.989465, 0.0442559, 0.422949, 0.249979, 0.30196, 0.25443, 0.571869, 0.657256, 0.465526, 0.21422, 0.624284, 0.664228, 0.505148, 0.801648, 0.86753, 0.353587, 0.667585, 0.574137, 0.45417, 0.326524, 0.170598, 0.684429, 0.594146, 0.883172, 0.00597591, 0.507128, 0.668825, 0.857399, 0.238628, 0.344619, 0.921385, 0.390454, 0.767467, 0.526448, 0.209308, 0.482941, 0.0592267, 0.16138, 0.649495, 0.875742, 0.189382, 0.575094, 0.0628197, 0.804806, 0.973028, 0.644706, 0.995757, 0.367492, 0.407543, 0.692324, 0.111098, 0.165432, 0.905684, 0.750752, 0.968726, 0.596715, 0.170166, 0.33299, 0.00352108, 0.140982, 0.33248, 0.151946, 0.835857, 0.260298, 0.329285, 0.503132, 0.891641, 0.0309688, 0.686558, 0.930664, 0.652069, 0.906784, 0.655034, 0.508219, 0.530145, 0.0808804, 0.186955, 0.731151, 0.559371, 0.914596, 0.751523, 0.892675, 0.591954, 0.727949, 0.667489, 0.371732, 0.0235313, 0.382307, 0.782786, 0.803038, 0.0337037, 0.759655, 0.782478, 0.0433699, 0.17195, 0.983183, 0.0282266, 0.537087, 0.66325, 0.57343, 0.963328, 0.601718, 0.834538, 0.867519, 0.974822, 0.379965, 0.896917, 0.861391, 0.270236, 0.187672, 0.646133, 0.419483, 0.907196, 0.338165, 0.0257366, 0.504104, 0.565183, 0.714826, 0.00315621, 0.0881414, 0.539258, 0.685598, 0.249477, 0.106901, 0.880652, 0.467278, 0.162514, 0.891133, 0.794572, 0.000827441, 0.227503, 0.549867, 0.844087, 0.792277, 0.236174, 0.760025, 0.172165, 0.00446258, 0.738857, 0.730426, 0.485266, 0.291554, 0.369628, 0.671225, 0.137284, 0.140493, 0.811242, 0.519306, 0.303478, 0.639524, 0.193834, 0.144751, 0.198503, 0.322032, 0.820268, 0.854904, 0.410045, 0.621214, 0.854527, 0.458774, 0.647537, 0.445168, 0.575495, 0.317267, 0.998118, 0.224215, 0.895092, 0.435073, 0.863703, 0.635868, 0.659358, 0.803934, 0.319564, 0.0792921, 0.224559, 0.223369, 0.519262, 0.0365442, 0.629175, 0.240582, 0.385111, 0.693782, 0.149806, 0.823158, 0.769552, 0.78699, 0.665843, 0.110583, 0.593072, 0.931465, 0.339334, 0.995643, 0.361876, 0.908706, 0.455961, 0.296954, 0.248799, 0.843042, 0.757229, 0.544311, 0.381377, 0.915678, 0.969917, 0.18056, 0.633374, 0.959469, 0.792785, 0.978871, 0.298855, 0.654883, 0.550835, 0.488215, 0.579131, 0.865617, 0.837995, 0.651112, 0.80166, 0.297399, 0.78068, 0.250189, 0.81908, 0.667668, 0.314874, 0.462695, 0.834969, 0.994565, 0.101436, 0.608893, 0.754995, 0.667686, 0.608132, 0.949893, 0.942439, 0.113526, 0.655998, 0.406024, 0.0651648, 0.945811, 0.743999, 0.150349, 0.224319, 0.0521876, 0.0219746, 0.888158, 0.800638, 0.765049, 0.296809, 0.92984, 0.612383, 0.55553, 0.392979, 0.454009, 0.526442, 0.898217, 0.37843, 0.932444, 0.151336, 0.602143, 0.546911, 0.98982, 0.327695, 0.255483, 0.497197, 0.668865, 0.350015, 0.588987, 0.920687, 0.974143, 0.855903, 0.903463, 0.653577, 0.815682, 0.950908, 0.438637, 0.00769402, 0.308675, 0.998112, 0.586912, 0.110359, 0.354577, 0.696572, 0.366189, 0.188528, 0.298549, 0.636802, 0.060719, 0.133377, 0.893643, 0.477545, 0.324431, 0.0917787, 0.997444, 0.338215, 0.21877, 0.621238, 0.612159, 0.897854, 0.761928, 0.655464, 0.516508, 0.386329, 0.456673, 0.895152, 0.194767, 0.980796, 0.395748, 0.783539, 0.520146, 0.551608, 0.745029, 0.154286, 0.309419, 0.755306, 0.504286, 0.0142801, 0.420135, 0.722898, 0.821541, 0.179877, 0.399128, 0.656364, 0.153503, 0.632952, 0.93078, 0.24482, 0.12708, 0.503946, 0.965516, 0.763884, 0.827007, 0.968163, 0.263956, 0.694266, 0.573084, 0.777834, 0.155003, 0.547457, 0.922914, 0.0898469, 0.102103, 0.984725, 0.679942, 0.807202, 0.82757, 0.731029, 0.809984, 0.10444, 0.137751, 0.088757, 0.665421, 0.354944, 0.68699, 0.368189, 0.7585, 0.978332, 0.0331932, 0.961104, 0.036009, 0.410524, 0.788157, 0.707072, 0.394044, 0.0692635, 0.979939, 0.27294, 0.358806, 0.192622, 0.4507, 0.748704, 0.247296, 0.878215, 0.395735, 0.116569, 0.678133, 0.523666, 0.116462, 0.0450298, 0.884906, 0.946411, 0.905795, 0.239766, 0.744178, 0.864674, 0.600205, 0.23557, 0.793186, 0.709741, 0.389478, 0.720254, 0.822208, 0.234686, 0.550661, 0.136569, 0.581818, 0.491411, 0.0752614, 0.205601, 0.0167381, 0.421298, 0.764442, 0.0171872, 0.172914, 0.673424, 0.315788, 0.851922, 0.881794, 0.785323, 0.327871, 0.670798, 0.766904, 0.564501, 0.726402, 0.0814526, 0.757423, 0.144176, 0.187301, 0.434783, 0.23245, 0.672692, 0.333969, 0.829689, 0.933773, 0.339591, 0.862948, 0.5749, 0.900262, 0.73103, 0.186806, 0.350651, 0.870933, 0.721535, 0.37476, 0.50296, 0.152629, 0.982446, 0.81711, 0.102984, 0.224948, 0.95851, 0.754144, 0.137815, 0.61963, 0.393328, 0.458565, 0.542409, 0.422276, 0.863659, 0.692765, 0.940446, 0.479611, 0.196775, 0.0839665, 0.320951, 0.587056, 0.45924, 0.778098, 0.622774, 0.290534, 0.512412, 0.0832628, 0.766276, 0.760224, 0.0564326, 0.467499, 0.741098, 0.121712, 0.0810672, 0.0909302, 0.335258, 0.769957, 0.867494, 0.916659, 0.345656, 0.226352, 0.40573, 0.476093, 0.607439, 0.400266, 0.187752, 0.963017, 0.815361, 0.0973165, 0.160789, 0.638864, 0.139204, 0.559465, 0.93341, 0.263078, 0.942776, 0.0153435, 0.231091, 0.673392, 0.63059, 0.18909, 0.0191485, 0.886081, 0.86486, 0.148113, 0.0820599, 0.0157417, 0.924182, 0.132114, 0.125733, 0.30346, 0.308734, 0.954042, 0.585159, 0.284965, 0.327968, 0.255368, 0.379835, 0.969258, 0.633694, 0.919243, 0.0458492, 0.635932, 0.397332, 0.288385, 0.907132, 0.00302684, 0.482494, 0.631764, 0.387833, 0.795207, 0.0300032, 0.663524, 0.899644, 0.664472, 0.168047, 0.150681, 0.433692, 0.542278, 0.0288503, 0.128063, 0.40074, 0.212265, 0.486038, 0.924121, 0.355241, 0.449506, 0.7156, 0.207991, 0.953381, 0.809205, 0.313129, 0.195995, 0.649473, 0.459197, 0.158379, 0.302819, 0.701419, 0.681182, 0.698063, 0.17504, 0.871989, 0.680003, 0.65117, 0.698771, 0.61352, 0.803916, 0.010114, 0.846744, 0.228342, 0.995165, 0.608888, 0.843031, 0.247895, 0.364018, 0.468815, 0.675551, 0.186589, 0.48162, 0.342694, 0.38233, 0.349958, 0.166936, 0.26509, 0.907016, 0.905385, 0.777616, 0.399376, 0.272609, 0.400588, 0.199563, 0.525106, 0.39659, 0.124749, 0.611521, 0.924923, 0.318051, 0.173597, 0.708517, 0.114291, 0.450762, 0.719288, 0.577154, 0.262858, 0.622216, 0.482128, 0.391707, 0.944732, 0.210622, 0.0868526, 0.0241349, 0.818769, 0.201008, 0.591625, 0.607778, 0.650676, 0.208491, 0.934444, 0.705194, 0.274327, 0.853312, 0.0570209, 0.158762, 0.968459, 0.557991, 0.376927, 0.0348551, 0.359051, 0.440901, 0.749227, 0.814309, 0.829274, 0.425025, 0.436305, 0.701279, 0.311615, 0.763631, 0.42415, 0.0272155, 0.517186, 0.798847, 0.740948, 0.178524, 0.119816, 0.659664, 0.275078, 0.333098, 0.772992, 0.0615561, 0.0946026, 0.17025, 0.364631, 0.14993, 0.579287, 0.478926, 0.35614, 0.927322, 0.0338156, 0.312057, 0.0528351, 0.051598, 0.542136, 0.676923, 0.187734, 0.405661, 0.935259, 0.0904673, 0.624009, 0.16051, 0.6656, 0.506345, 0.920895, 0.431514, 0.912401, 0.75231, 0.720978, 0.276488, 0.960488, 0.333447, 0.793667, 0.94685, 0.778957, 0.626865, 0.293164, 0.958117, 0.0912412, 0.532753, 0.0334823, 0.981575, 0.783156, 0.515076, 0.52798, 0.325697, 0.734427, 0.858838, 0.750756, 0.77533, 0.835655, 0.0591921, 0.734253, 0.941809, 0.831376, 0.463972, 0.530686, 0.353341, 0.483996, 0.602564, 0.222382, 0.97027, 0.898463, 0.364311, 0.851978, 0.141098, 0.327123, 0.617082, 0.644931, 0.86876, 0.736332, 0.565139, 0.811522, 0.398173, 0.841776, 0.858903, 0.83216, 0.831254, 0.801072, 0.100523, 0.569488, 0.369805, 0.712837, 0.765152, 0.154792, 0.953785, 0.199436, 0.298907, 0.817079, 0.0777453, 0.306883, 0.79892, 0.298705, 0.335315, 0.459788, 0.18998, 0.347822, 0.496791, 0.782749, 0.579375, 0.629859, 0.760899, 0.24263, 0.903779, 0.641997, 0.767871, 0.486727, 0.380535, 0.823305, 0.546105, 0.210259, 0.350411, 0.319371, 0.703495, 0.0388703, 0.888008, 0.223666, 0.264767, 0.487172, 0.0499301, 0.639737, 0.815608, 0.580698, 0.967961, 0.745559, 0.541315, 0.765711, 0.376151, 0.385906, 0.685164, 0.755746, 0.3303, 0.508632, 0.362014, 0.127399, 0.914393, 0.761036, 0.0528031, 0.670941, 0.384801, 0.44075, 0.754592, 0.9947, 0.608693, 0.161242, 0.754208, 0.474214, 0.345606, 0.771122, 0.531007, 0.149091, 0.0657207, 0.326845, 0.439334, 0.784645, 0.720818, 0.258805, 0.343428, 0.637885, 0.0827685, 0.39513, 0.925853, 0.0312068, 0.163843, 0.983136, 0.551929, 0.184303, 0.69171, 0.288535, 0.880705, 0.685089, 0.46196, 0.240781, 0.234958, 0.753081, 0.985398, 0.915627, 0.283294, 0.793807, 0.289058, 0.209614, 0.416559, 0.959514, 0.903148, 0.305969, 0.955588, 0.259555, 0.224086, 0.156081, 0.534712, 0.604497, 0.610424, 0.332198, 0.257572, 0.12916, 0.0687468, 0.831499, 0.62096, 0.572576, 0.716722, 0.767641, 0.534015, 0.0135136, 0.91111, 0.460403, 0.548833, 0.669269, 0.783693, 0.518675, 0.377206, 0.884586, 0.810017, 0.912431, 0.192926, 0.863331, 0.56807, 0.614778, 0.880361, 0.261284, 0.288423, 0.126633, 0.29487, 0.694761, 0.779481, 0.716876, 0.378049, 0.657953, 0.204218, 0.496597, 0.994978, 0.542497, 0.079833, 0.849108, 0.448209, 0.667724, 0.618385, 0.740615, 0.550231, 0.109703, 0.446934, 0.580084, 0.0957419, 0.599407, 0.612611, 0.603736, 0.333509, 0.168628, 0.337423, 0.7554, 0.839855, 0.440986, 0.0899992, 0.307834, 0.546041, 0.855516, 0.656322, 0.7718, 0.694913, 0.0648656, 0.491048, 0.7489, 0.12655, 0.545464, 0.018184, 0.812458, 0.866595, 0.103517, 0.636715, 0.671445, 0.713342, 0.371498, 0.800884, 0.877138, 0.475207, 0.0848736, 0.648431, 0.536069, 0.652911, 0.284277, 0.803859, 0.0484337, 0.50652, 0.0398949, 0.479366, 0.120236, 0.32288, 0.749295, 0.580263, 0.767637, 0.672611, 0.393901, 0.777995, 0.889774, 0.624877, 0.756535, 0.00571682, 0.513941, 0.548558, 0.72907, 0.231662, 0.844679, 0.591023, 0.839903, 0.481472, 0.676318, 0.198008, 0.75681, 0.645876, 0.562686, 0.079962, 0.169139, 0.286892, 0.100562, 0.429251, 0.253854, 0.350135, 0.39899, 0.686675, 0.322439, 0.526083, 0.794266, 0.934693, 0.478445, 0.101997, 0.851451, 0.829357, 0.243839, 0.390292, 0.388305, 0.552538, 0.370908, 0.578443, 0.198918, 0.504473, 0.183195, 0.754082, 0.378272, 0.722134, 0.761737, 0.523804, 0.231593, 0.568605, 0.287656, 0.742644, 0.755678, 0.645733, 0.58888, 0.535038, 0.105255, 0.922569, 0.950013, 0.780757, 0.382734, 0.352771, 0.85794, 0.133944, 0.560458, 0.631823, 0.44393, 0.291665, 0.496848, 0.845192, 0.710818, 0.058463, 0.474545, 0.712372, 0.0456705, 0.606829, 0.669204, 0.353969, 0.620841, 0.728018, 0.219443, 0.235813, 0.254554, 0.314446, 0.59158, 0.746337, 0.254853, 0.45596, 0.481081, 0.633749, 0.73213, 0.623932, 0.946175, 0.828381, 0.949588, 0.581629, 0.337972, 0.06381, 0.194401, 0.0309433, 0.960873, 0.631665, 0.0771852, 0.158121, 0.731532, 0.34285, 0.594808, 0.280501, 0.881476, 0.224315, 0.706794, 0.742863, 0.105461, 0.676151, 0.337508, 0.207194, 0.600399, 0.136105, 0.926055, 0.165974, 0.140703, 0.800153, 0.323374, 0.0896541, 0.654905, 0.072488, 0.607162, 0.0506916, 0.697011, 0.473891, 0.880713, 0.110731, 0.0598434, 0.83014, 0.225518, 0.145203, 0.768439, 0.026211, 0.419088, 0.802136, 0.942558, 0.210761, 0.831386, 0.650558, 0.0736329, 0.127142, 0.463057, 0.292362, 0.0285124, 0.283766, 0.435857, 0.726721, 0.0779182, 0.498829, 0.559289, 0.199677, 0.513753, 0.139156, 0.425093, 0.160535, 0.817125, 0.123229, 0.847384, 0.828925, 0.0110141, 0.622069, 0.2447, 0.284562, 0.389288, 0.809355, 0.574809, 0.670684, 0.457213, 0.149171, 0.448983, 0.789663, 0.0770385, 0.331893, 0.835118, 0.874993, 0.537364, 0.90296, 0.194646, 0.554327, 0.413262, 0.686287, 0.0460736, 0.657312, 0.667965, 0.986008, 0.372767, 0.660598, 0.781281, 0.768111, 0.146487, 0.970999, 0.123628, 0.239487, 0.434452, 0.9391, 0.838563, 0.702661, 0.824352, 0.341867, 0.403821, 0.724461, 0.980311, 0.188418, 0.0484973, 0.605844, 0.87872, 0.868193, 0.202999, 0.694913, 0.275642, 0.250358, 0.740812, 0.841174, 0.988914, 0.981867, 0.00333478, 0.614899, 0.184444, 0.328618, 0.963147, 0.493104, 0.593076, 0.114963, 0.373047, 0.313624, 0.807956, 0.0866033, 0.800743, 0.71014, 0.937928, 0.329274, 0.292652, 0.341297, 0.416849, 0.287444, 0.727679, 0.724495, 0.0434232, 0.239289, 0.522142, 0.29313, 0.275679, 0.570131, 0.773577, 0.821627, 0.822114, 0.244051, 0.960265, 0.82899, 0.894021, 0.569527, 0.416546, 0.183194, 0.15653, 0.227623, 0.28034, 0.203311, 0.437247, 0.911898, 0.119247, 0.719101, 0.280853, 0.937291, 0.50856, 0.424669, 0.433576, 0.994411, 0.546783, 0.397445, 0.881072, 0.504712, 0.297616, 0.406185, 0.210026, 0.621007, 0.356914, 0.642402, 0.126357, 0.815523, 0.309994, 0.237093, 0.622791, 0.26855, 0.126356, 0.17314, 0.290044, 0.17338, 0.302987, 0.0448135, 0.67497, 0.469662, 0.914101, 0.142401, 0.897664, 0.472217, 0.495884, 0.549, 0.132171, 0.248441, 0.510462, 0.389556, 0.2354, 0.629603, 0.872555, 0.0493606, 0.0725572, 0.376942, 0.751133, 0.618445, 0.712209, 0.347773, 0.483322, 0.584708, 0.0706037, 0.0963956, 0.179324, 0.783534, 0.757523, 0.554968, 0.414064, 0.382571, 0.12167, 0.409857, 0.126992, 0.0497454, 0.537536, 0.044531, 0.445929, 0.341687, 0.669602, 0.0432625, 0.761958, 0.331426, 0.324327, 0.251563, 0.0129412, 0.360202, 0.130586, 0.940089, 0.695765, 0.286493, 0.605139, 0.384726, 0.890997, 0.646292, 0.845314, 0.770079, 0.782947, 0.901444, 0.95964, 0.586765, 0.594756, 0.147127, 0.496968, 0.957409, 0.723964, 0.663662, 0.31232, 0.866906, 0.333128, 0.489326, 0.331988, 0.617437, 0.0578985, 0.592175, 0.275489, 0.892077, 0.564233, 0.279844, 0.0438687, 0.739232, 0.768333, 0.2198, 0.991895, 0.819663, 0.506983, 0.111369, 0.867805, 0.978645, 0.717804, 0.690495, 0.330825, 0.806611, 0.716774, 0.905406, 0.229175, 0.424245, 0.931656, 0.554977, 0.408551, 0.762738, 0.435672, 0.865603, 0.791919, 0.0992489, 0.159783, 0.497971, 0.451001, 0.26398, 0.914848, 0.294899, 0.611104, 0.813629, 0.159112, 0.289609, 0.189781, 0.100203, 0.00218531, 0.619137, 0.556688, 0.617244, 0.199415, 0.840917, 0.112093, 0.0597368, 0.590946, 0.167967, 0.0545969, 0.673482, 0.139997, 0.98981, 0.877854, 0.45269, 0.983928, 0.314507, 0.621934, 0.80674, 0.701665, 0.848201, 0.500142, 0.681047, 0.216517, 0.68729, 0.854486, 0.788196, 0.274461, 0.786415, 0.431206, 0.372595, 0.510444, 0.593515, 0.718559, 0.885887, 0.866836, 0.0371313, 0.810007, 0.871523, 0.698879, 0.371323, 0.191039, 0.537038, 0.960647, 0.254478, 0.897432, 0.694227, 0.167212, 0.113439, 0.664799, 0.763649, 0.546014, 0.830362, 0.612029, 0.506648, 0.841873, 0.861451, 0.428097, 0.890039, 0.516876, 0.491963, 0.93884, 0.378404, 0.884525, 0.323906, 0.828282, 0.102281, 0.585777, 0.027535, 0.891171, 0.501899, 0.288433, 0.454675, 0.582245, 0.735327, 0.0265284, 0.384378, 0.924647, 0.130409, 0.0828567, 0.00890599, 0.678779, 0.615555, 0.069379, 0.157736, 0.192191, 0.286016, 0.0344896, 0.836338, 0.920767, 0.428389, 0.716931, 0.124982, 0.905061, 0.213815, 0.283816, 0.967598, 0.763338, 0.766874, 0.522257, 0.91158, 0.540527, 0.0323609, 0.811738, 0.893974, 0.0661371, 0.680707, 0.910926, 0.0449852, 0.801076, 0.286747, 0.965164, 0.309269, 0.724278, 0.874435, 0.845933, 0.289633, 0.1259, 0.265294, 0.466266, 0.144441, 0.701114, 0.484066, 0.118355, 0.0350617, 0.377718, 0.430922, 0.990826, 0.826193, 0.545904, 0.345839, 0.0789937, 0.281095, 0.748537, 0.532293, 0.123628, 0.830699, 0.222947, 0.928491, 0.405731, 0.709187, 0.581953, 0.0634311, 0.280612, 0.0257072, 0.460992, 0.0175489, 0.184961, 0.480011, 0.184844, 0.272508, 0.208068, 0.240318, 0.480907, 0.397661, 0.197304, 0.514379, 0.682133, 0.835306, 0.133983, 0.240427, 0.776184, 0.715199, 0.515536, 0.757504, 0.105235, 0.408316, 0.98075, 0.494859, 0.189793, 0.840125, 0.186288, 0.273149, 0.697799, 0.440657, 0.755024, 0.29632, 0.0655284, 0.182251, 0.423769, 0.136593, 0.156609, 0.0729708, 0.501816, 0.93482, 0.525891, 0.213114, 0.547868, 0.668326, 0.504127, 0.605789, 0.87385, 0.287723, 0.141654, 0.154971, 0.603998, 0.958831, 0.616316, 0.696878, 0.16593, 0.884261, 0.652659, 0.619816, 0.78266, 0.969088, 0.262262, 0.99525, 0.894344, 0.329002, 0.486264, 0.491973, 0.245971, 0.338964, 0.487509, 0.637083, 0.556442, 0.369561, 0.7682, 0.741506, 0.0148511, 0.110189, 0.660813, 0.896278, 0.0660888, 0.365283, 0.384459, 0.626001, 0.105742, 0.335175, 0.122741, 0.937633, 0.841694, 0.184531, 0.341765, 0.987346, 0.066847, 0.953777, 0.339168, 0.381597, 0.105309, 0.0876349, 0.105715, 0.704842, 0.55161, 0.559171, 0.687293, 0.658392, 0.999134, 0.106751, 0.712417, 0.77874, 0.256541, 0.594729, 0.251902, 0.147832, 0.788117, 0.890049, 0.864009, 0.0474648, 0.393876, 0.52962, 0.532864, 0.3692, 0.0773447, 0.806042, 0.275049, 0.606734, 0.340569, 0.862035, 0.907584, 0.162131, 0.233687, 0.562201, 0.981415, 0.928342, 0.122804, 0.823976, 0.184586, 0.156561, 0.292509, 0.975062, 0.833452, 0.142523, 0.166308, 0.200379, 0.548413, 0.469655, 0.607216, 0.0779073, 0.317011, 0.860167, 0.650872, 0.362243, 0.536813, 0.424811, 0.267452, 0.34468, 0.0123388, 0.903273, 0.961784, 0.83874, 0.638318, 0.492336, 0.0909165, 0.359403, 0.963458, 0.0616303, 0.565071, 0.35294, 0.086659, 0.692219, 0.13438, 0.42499, 0.369364, 0.69069, 0.644828, 0.477503, 0.299398, 0.541722, 0.69283, 0.632501, 0.00306792, 0.0483982, 0.882961, 0.51907, 0.141715, 0.459418, 0.191355, 0.971781, 0.141313, 0.257873, 0.9303, 0.630052, 0.605048, 0.819116, 0.581039, 0.28248, 0.643367, 0.730733, 0.326217, 0.739549, 0.348881, 0.437051, 0.848351, 0.199802, 0.443829, 0.555329, 0.599613, 0.0723815, 0.782875, 0.441, 0.859947, 0.0311278, 0.511227, 0.778796, 0.901747, 0.759793, 0.550004, 0.201596, 0.662721, 0.955202, 0.404327, 0.679435, 0.197671, 0.685506, 0.769799, 0.475121, 0.605155, 0.702198, 0.441649, 0.198031, 0.808838, 0.5642, 0.0110931, 0.301183, 0.174863, 0.155583, 0.0786712, 0.62277, 0.85596, 0.0640452, 0.597892, 0.6303, 0.583965, 0.170196, 0.490898, 0.111661, 0.377785, 0.148729, 0.0764751, 0.510136, 0.577573, 0.35216, 0.457893, 0.131651, 0.0674973, 0.400704, 0.26385, 0.28382, 0.488834, 0.982769, 0.977161, 0.486142, 0.426614, 0.756323, 0.687557, 0.280973, 0.777007, 0.0214047, 0.532722, 0.814832, 0.844896, 0.810531, 0.15238, 0.481408, 0.252198, 0.641424, 0.490777, 0.45379, 0.407933, 0.556685, 0.392659, 0.344904, 0.892815, 0.442315, 0.457526, 0.283864, 0.573172, 0.252106, 0.889373, 0.0959851, 0.485266, 0.56632, 0.755285, 0.116206, 0.0879613, 0.88384, 0.208538, 0.586969, 0.544328, 0.434613, 0.888554, 0.311024, 0.863446, 0.188419, 0.609841, 0.0758603, 0.0637179, 0.308095, 0.801903, 0.450507, 0.467786, 0.0278549, 0.66171, 0.446835, 0.25888, 0.668963, 0.0646442, 0.179912, 0.437277, 0.90624, 0.948291, 0.104249, 0.874975, 0.380445, 0.49662, 0.00583912, 0.63552, 0.0421233, 0.233686, 0.882509, 0.617947, 0.0601858, 0.813047, 0.215782, 0.596862, 0.957235, 0.535076, 0.0786324, 0.255989, 0.926716, 0.682072, 0.0922327, 0.290346, 0.549256, 0.405545, 0.309677, 0.628118, 0.604299, 0.38654, 0.256896, 0.825052, 0.72912, 0.45662, 0.324713, 0.362083, 0.567061, 0.79357, 0.69459, 0.0113587, 0.94402, 0.290002, 0.729962, 0.197591, 0.674741, 0.497621, 0.773008, 0.502734, 0.571341, 0.838387, 0.324542, 0.963401, 0.741953, 0.958939, 0.10931, 0.139844, 0.993872, 0.565773, 0.679312, 0.0471499, 0.204902, 0.947279, 0.208953, 0.251513, 0.63701, 0.0478966, 0.650567, 0.261162, 0.391348, 0.030731, 0.71075, 0.656933, 0.952349, 0.591575, 0.017522, 0.728526, 0.264305, 0.337627, 0.149399, 0.872799, 0.779262, 0.297888, 0.337437, 0.588511, 0.376096, 0.309665, 0.979529, 0.85919, 0.638385, 0.343725, 0.632841, 0.125036, 0.740403, 0.72003, 0.214014, 0.370201, 0.555011, 0.17778, 0.601526, 0.734616, 0.439846, 0.132179, 0.095331, 0.523829, 0.0351053, 0.0512562, 0.296228, 0.981164, 0.0543974, 0.34006, 0.210961, 0.0556726, 0.962318, 0.0939309, 0.516581, 0.628765, 0.602055, 0.684933, 0.21763, 0.142926, 0.0353188, 0.306516, 0.458799, 0.413334, 0.0436211, 0.57727, 0.953413, 0.161229, 0.8501, 0.0916226, 0.269427, 0.91748, 0.574193, 0.899598, 0.319444, 0.233433, 0.648605, 0.690887, 0.983802, 0.0207619, 0.298203, 0.848647, 0.0157108, 0.878187, 0.772797, 0.160191, 0.798175, 0.0680941, 0.304792, 0.0833616, 0.714442, 0.627836, 0.0309248, 0.759876, 0.0660692, 0.622789, 0.602247, 0.822516, 0.286038, 0.0927557, 0.422667, 0.546066, 0.0457439, 0.204376, 0.512364, 0.4758, 0.206259, 0.161087, 0.545031, 0.0515958, 0.450428, 0.75845, 0.0201123, 0.181248, 0.844833, 0.21848, 0.483734, 0.812416, 0.981927, 0.0559635, 0.115077, 0.897971, 0.523059, 0.703528, 0.767036, 0.554893, 0.488185, 0.535006, 0.797307, 0.638141, 0.523118, 0.176801, 0.216229, 0.505141, 0.382202, 0.822446, 0.238033, 0.000277854, 0.264681, 0.738855, 0.297957, 0.326263, 0.426843, 0.921162, 0.900661, 0.888326, 0.824683, 0.877769, 0.0626536, 0.801865, 0.084674, 0.751356, 0.232566, 0.0636655, 0.365036, 0.978918, 0.998175, 0.595347, 0.872441, 0.843013, 0.550284, 0.112336, 0.0195829, 0.749011, 0.990358, 0.305964, 0.366854, 0.444114, 0.111154, 0.807868, 0.52786, 0.525456, 0.553126, 0.485843, 0.466757, 0.193964, 0.122533, 0.306315, 0.203339, 0.529981, 0.136092, 0.810034, 0.438329, 0.348119, 0.32752, 0.765696, 0.659981, 0.299019, 0.20003, 0.22669, 0.832525, 0.538563, 0.563979, 0.48955, 0.149843, 0.719718, 0.62427, 0.264455, 0.481612, 0.106369, 0.745398, 0.968375, 0.495988, 0.400647, 0.770756, 0.322593, 0.326873, 0.25276, 0.417912, 0.76998, 0.638416, 0.665478, 0.962782, 0.961164, 0.839027, 0.219606, 0.684202, 0.962211, 0.0565806, 0.665497, 0.126568, 0.821387, 0.426374, 0.888736, 0.280979, 0.40441, 0.449909, 0.685115, 0.764469, 0.687274, 0.781109, 0.393814, 0.0475992, 0.165983, 0.724644, 0.460645, 0.200301, 0.916552, 0.212703, 0.547837, 0.00781687, 0.0249964, 0.207673, 0.477714, 0.567627, 0.899241, 0.153251, 0.955403, 0.970569, 0.997858, 0.752693, 0.392974, 0.842982, 0.306313, 0.0425919, 0.510512, 0.251106, 0.472173, 0.1914, 0.930749, 0.775218, 0.691618, 0.963536, 0.298038, 0.982468, 0.650224, 0.36777, 0.976604, 0.298366, 0.415434, 0.0719724, 0.9851, 0.446777, 0.502681, 0.535599, 0.0658757, 0.677791, 0.997595, 0.83343, 0.785185, 0.228516, 0.690948, 0.121957, 0.581649, 0.593838, 0.306382, 0.133295, 0.452292, 0.522665, 0.756939, 0.315004, 0.182419, 0.654039, 0.275704, 0.977242, 0.501399, 0.489595, 0.833172, 0.704274, 0.980769, 0.638236, 0.478103, 0.537539, 0.737008, 0.572928, 0.19145, 0.658329, 0.156334, 0.944287, 0.679241, 0.404867, 0.766855, 0.521703, 0.166728, 0.914696, 0.866328, 0.48321, 0.512178, 0.257116, 0.713401, 0.507879, 0.0522681, 0.694809, 0.119729, 0.215353, 0.848758, 0.267479, 0.280395, 0.321514, 0.905568, 0.294922, 0.133331, 0.733498, 0.386997, 0.172241, 0.832129, 0.313627, 0.807737, 0.732755, 0.214301, 0.963792, 0.874799, 0.222574, 0.210106, 0.511397, 0.570735, 0.957265, 0.869915, 0.606136, 0.0732058, 0.234833, 0.850227, 0.249415, 0.652435, 0.387416, 0.878995, 0.232929, 0.831395, 0.212343, 0.430933, 0.580873, 0.783224, 0.310255, 0.809713, 0.260027, 0.0845431, 0.165741, 0.278104, 0.711014, 0.800481, 0.878116, 0.243189, 0.352771, 0.0612841, 0.413917, 0.467947, 0.442886, 0.0588046, 0.0964151, 0.358584, 0.627281, 0.257136, 0.134019, 0.243915, 0.613396, 0.475635, 0.581989, 0.528766, 0.396408, 0.547891, 0.815438, 0.982647, 0.645256, 0.266519, 0.96154, 0.463393, 0.728307, 0.731172, 0.556617, 0.19503, 0.341295, 0.455572, 0.740281, 0.185829, 0.925094, 0.362876, 0.114177, 0.782391, 0.37643, 0.798906, 0.953667, 0.762082, 0.014657, 0.122138, 0.127993, 0.320434, 0.241728, 0.35324, 0.772408, 0.333324, 0.178056, 0.281275, 0.281805, 0.68461, 0.292989, 0.873566, 0.535667, 0.341327, 0.761019, 0.959403, 0.4896, 0.609321, 0.248797, 0.185441, 0.0270256, 0.170015, 0.75439, 0.288386, 0.564582, 0.0222456, 0.463749, 0.206792, 0.962644, 0.643758, 0.587204, 0.0469728, 0.699921, 0.222869, 0.29055, 0.15115, 0.626993, 0.960964, 0.469697, 0.573563, 0.650817, 0.119187, 0.399504, 0.514426, 0.739538, 0.30274, 0.579294, 0.620414, 0.809761, 0.355991, 0.785923, 0.00897206, 0.656859, 0.0617263, 0.115124, 0.523844, 0.680298, 0.582897, 0.906579, 0.685952, 0.681651, 0.165907, 0.547198, 0.562837, 0.930629, 0.0213666, 0.328399, 0.704386, 0.796115, 0.068072, 0.956054, 0.374939, 0.726748, 0.620003, 0.0746187, 0.839367, 0.0709211, 0.302125, 0.139077, 0.966599, 0.214637, 0.100745, 0.0111777, 0.364859, 0.784851, 0.490575, 0.830201, 0.353925, 0.52514, 0.184955, 0.725045, 0.692005, 0.751114, 0.91373, 0.00291698, 0.183436, 0.942894, 0.667646, 0.227563, 0.446547, 0.092398, 0.403965, 0.181365, 0.224128, 0.456113, 0.93516, 0.286571, 0.448873, 0.254616, 0.0234942, 0.381599, 0.167409, 0.531255, 0.43117, 0.947885, 0.689665, 0.358446, 0.99491, 0.1528, 0.820309, 0.322453, 0.845845, 0.702546, 0.389202, 0.69712, 0.858937, 0.923114, 0.394433, 0.144911, 0.855152, 0.665982, 0.640551, 0.139194, 0.195617, 0.144379, 0.791576, 0.565324, 0.544417, 0.336372, 0.384354, 0.41409, 0.50561, 0.176046, 0.929189, 0.242993, 0.611226, 0.220806, 0.182804, 0.567333, 0.897341, 0.166296, 0.568052, 0.868234, 0.993518, 0.531125, 0.974952, 0.34009, 0.00118497, 0.721142, 0.909258, 0.583536, 0.570128, 0.250796, 0.589652, 0.9133, 0.95001, 0.227505, 0.984849, 0.315529, 0.767328, 0.398994, 0.12537, 0.0418404, 0.530614, 0.716528, 0.398305, 0.701367, 0.44568, 0.276829, 0.645333, 0.835415, 0.0465233, 0.224956, 0.667437, 0.152279, 0.492711, 0.731436, 0.231145, 0.16636, 0.252344, 0.641588, 0.924098, 0.780297, 0.447466, 0.270129, 0.519677, 0.711927, 0.666198, 0.528732, 0.397797, 0.94731, 0.47105, 0.169681, 0.0689022, 0.909454, 0.48887, 0.338298, 0.864493, 0.727502, 0.989843, 0.00595985, 0.954904, 0.428008, 0.331139, 0.235197, 0.893303, 0.445351, 0.830387, 0.254327, 0.968733, 0.649004, 0.693966, 0.585433, 0.889678, 0.320023, 0.777071, 0.858482, 0.986065, 0.318021, 0.841677, 0.509663, 0.495693, 0.600332, 0.906159, 0.349668, 0.696006, 0.995319, 0.684848, 0.771808, 0.175734, 0.0168563, 0.963885, 0.256553, 0.360876, 0.759457, 0.401408, 0.981954, 0.626502, 0.563023, 0.659433, 0.910864, 0.558888, 0.507613, 0.510995, 0.727248, 0.68819, 0.193195, 0.303283, 0.502424, 0.885505, 0.735989, 0.652483, 0.230644, 0.850919, 0.131194, 0.0236364, 0.335627, 0.7323, 0.298706, 0.780775, 0.694135, 0.273734, 0.331162, 0.132365, 0.976536, 0.802124, 0.232805, 0.730333, 0.176602, 0.51351, 0.851469, 0.880454, 0.490637, 0.445693, 0.331288, 0.551407, 0.781109, 0.0924386, 0.976995, 0.133824, 0.779658, 0.277526, 0.285495, 0.716245, 0.64469, 0.446768, 0.94155, 0.0654177, 0.245474, 0.471065, 0.796581, 0.239434, 0.905379, 0.0354931, 0.155393, 0.997709, 0.346793, 0.171411, 0.801898, 0.533516, 0.187127, 0.926988, 0.656354, 0.629434, 0.495656, 0.226093, 0.858517, 0.68242, 0.70426, 0.939288, 0.526619, 0.526184, 0.266593, 0.686914, 0.00155542, 0.927718, 0.04613, 0.95066, 0.763412, 0.0642208, 0.0229945, 0.619014, 0.612731, 0.480407, 0.823741, 0.444178, 0.233648, 0.351313, 0.18131, 0.261641, 0.378168, 0.781103, 0.77266, 0.335612, 0.721479, 0.586627, 0.918034, 0.667742, 0.911525, 0.834869, 0.979858, 0.105877, 0.68092, 0.291748, 0.717349, 0.0445674, 0.0143131, 0.0117362, 0.281285, 0.46247, 0.495847, 0.735287, 0.088833, 0.479189, 0.542617, 0.292785, 0.659989, 0.336397, 0.632401, 0.420709, 0.00250625, 0.709559, 0.110518, 0.351807, 0.185619, 0.655246, 0.298226, 0.238629, 0.0408697, 0.960399, 0.867659, 0.254167, 0.175976, 0.181935, 0.476168, 0.666971, 0.181789, 0.18922, 0.293109, 0.40944, 0.587306, 0.047309, 0.383197, 0.11214, 0.492445, 0.303954, 0.896018, 0.920548, 0.186454, 0.956295, 0.174857, 0.259837, 0.213495, 0.330886, 0.322003, 0.62633, 0.471895, 0.984244, 0.72708, 0.532441, 0.199213, 0.0806102, 0.365985, 0.889816, 0.327471, 0.791387, 0.031357, 0.111667, 0.642776, 0.808755, 0.263011, 0.729673, 0.281746, 0.418753, 0.0178116, 0.221522, 0.981707, 0.993976, 0.291144, 0.5504, 0.437406, 0.839687, 0.248528, 0.434314, 0.886793, 0.301432, 0.699775, 0.539329, 0.88398, 0.928958, 0.576982, 0.682972, 0.0963353, 0.459443, 0.314181, 0.723099, 0.674171, 0.771388, 0.376967, 0.699611, 0.640212, 0.457369, 0.0925651, 0.563241, 0.3704, 0.593069, 0.401959, 0.0378746, 0.816992, 0.854208, 0.494584, 0.516438, 0.752953, 0.291509, 0.556481, 0.113466, 0.406382, 0.870468, 0.308661, 0.508829, 0.0782531, 0.784794, 0.873834, 0.0796295, 0.332769, 0.618955, 0.317678, 0.517487, 0.969253, 0.808632, 0.51958, 0.0830913, 0.0379246, 0.239989, 0.688237, 0.287361, 0.627135, 0.194175, 0.925559, 0.200431, 0.20995, 0.142007, 0.816057, 0.450422, 0.152835, 0.86489, 0.541712, 0.148234, 0.998775, 0.823314, 0.499182, 0.440296, 0.331616, 0.426489, 0.696471, 0.268979, 0.334777, 0.975781, 0.389977, 0.663315, 0.276437, 0.27795, 0.642763, 0.790189, 0.0402131, 0.0615905, 0.791676, 0.707201, 0.311094, 0.863777, 0.353634, 0.267153, 0.785566, 0.195712, 0.969889, 0.785286, 0.795823, 0.868895, 0.300683, 0.0619177, 0.649635, 0.408871, 0.508251, 0.47058, 0.817351, 0.281847, 0.590995, 0.700125, 0.507325, 0.940381, 0.213701, 0.589021, 0.791475, 0.202392, 0.0609428, 0.613555, 0.180216, 0.196492, 0.678086, 0.193892, 0.973438, 0.702902, 0.0149802, 0.956909, 0.694691, 0.690709, 0.606087, 0.235772, 0.837038, 0.018357, 0.453154, 0.685738, 0.252985, 0.167596, 0.957168, 0.162042, 0.701536, 0.258119, 0.243023, 0.287879, 0.915982, 0.297634, 0.740599, 0.613924, 0.0530076, 0.0705584, 0.311554, 0.781782, 0.0491582, 0.361963, 0.963385, 0.94931, 0.522734, 0.508282, 0.574593, 0.733251, 0.305097, 0.951962, 0.90494, 0.271572, 0.41666, 0.735302, 0.470283, 0.474879, 0.888888, 0.500184, 0.83993, 0.101616, 0.353366, 0.336758, 0.220401, 0.267699, 0.731965, 0.701568, 0.134716, 0.843125, 0.583155, 0.92675, 0.662535, 0.937861, 0.963641, 0.43113, 0.179293, 0.271057, 0.989385, 0.102155, 0.909557, 0.393676, 0.0970826, 0.369484, 0.28362, 0.89565, 0.365745, 0.786246, 0.912654, 0.847388, 0.310394, 0.965121, 0.765785, 0.122156, 0.0317397, 0.521194, 0.685856, 0.649231, 0.187277, 0.696647, 0.290668, 0.599364, 0.619346, 0.0922423, 0.74544, 0.829272, 0.998726, 0.507624, 0.179443, 0.466956, 0.0595983, 0.211277, 0.897726, 0.64407, 0.459841, 0.352153, 0.357459, 0.317043, 0.922847, 0.788995, 0.192469, 0.790991, 0.981335, 0.483017, 0.131913, 0.610957, 0.275227, 0.2941, 0.158151, 0.481018, 0.64167, 0.631444, 0.628155, 0.525741, 0.851589, 0.83897, 0.969717, 0.800086, 0.315871, 0.341646, 0.906146, 0.252658, 0.482959, 0.23568, 0.782886, 0.687143, 0.244124, 0.151301, 0.422368, 0.586512, 0.262556, 0.160689, 0.127373, 0.887785, 0.868947, 0.912987, 0.332867, 0.301728, 0.712994, 0.979397, 0.363192, 0.114619, 0.682255, 0.141706, 0.320313, 0.856347, 0.578575, 0.881893, 0.448112, 0.968206, 0.0184664, 0.944073, 0.0712266, 0.65535, 0.00525782, 0.968056, 0.956909, 0.406435, 0.937493, 0.207579, 0.559663, 0.4413, 0.533044, 0.375814, 0.809769, 0.830527, 0.136295, 0.648902, 0.252054, 0.628484, 0.908412, 0.883995, 0.0913548, 0.176676, 0.750068, 0.57081, 0.718888, 0.122341, 0.963544, 0.313369, 0.151373, 0.478553, 0.427352, 0.141612, 0.901883, 0.112199, 0.0637606, 0.8183, 0.692998, 0.169271, 0.526569, 0.75107, 0.579626, 0.985856, 0.360326, 0.397701, 0.610527, 0.522891, 0.504134, 0.601123, 0.180686, 0.895381, 0.0553699, 0.845222, 0.540461, 0.445749, 0.203239, 0.0839599, 0.574058, 0.208464, 0.159438, 0.510487, 0.53153, 0.965698, 0.432833, 0.960601, 0.0314765, 0.356788, 0.592935, 0.202284, 0.0420978, 0.769117, 0.351933, 0.305801, 0.951143, 0.720038, 0.979939, 0.13027, 0.127677, 0.15707, 0.666171, 0.282351, 0.408165, 0.101669, 0.37713, 0.289386, 0.825685, 0.772972, 0.287087, 0.483195, 0.518193, 0.857148, 0.231962, 0.342772, 0.514379, 0.205219, 0.673235, 0.129295, 0.344384, 0.380353, 0.128981, 0.415241, 0.221281, 0.552054, 0.0728381, 0.0211884, 0.889232, 0.820747, 0.364529, 0.881878, 0.775155, 0.471228, 0.783677, 0.687301, 0.0473796, 0.75559, 0.207462, 0.728033, 0.143192, 0.311165, 0.0687048, 0.0506925, 0.331048, 0.230187, 0.828601, 0.547144, 0.741586, 0.67625, 0.452529, 0.0214194, 0.237796, 0.668306, 0.337735, 0.246399, 0.83663, 0.815895, 0.328788, 0.846376, 0.326103, 0.300141, 0.703082, 0.161504, 0.270621, 0.358893, 0.092777, 0.634171, 0.747569, 0.880028, 0.421774, 0.83451, 0.0289538, 0.601804, 0.0682301, 0.838697, 0.421283, 0.0107802, 0.924154, 0.744722, 0.649421, 0.29553, 0.382578, 0.912939, 0.124705, 0.425581, 0.505076, 0.627919, 0.357637, 0.878916, 0.423384, 0.382756, 0.990056, 0.976844, 0.828146, 0.0293946, 0.650928, 0.00351939, 0.218963, 0.861339, 0.0983828, 0.419058, 0.265909, 0.905529, 0.26981, 0.282951, 0.879036, 0.911603, 0.904584, 0.258169, 0.999342, 0.967355, 0.302176, 0.712034, 0.0535345, 0.878392, 0.791693, 0.0788202, 0.124117, 0.946683, 0.228736, 0.814142, 0.222778, 0.400231, 0.233423, 0.193474, 0.344432, 0.114617, 0.491859, 0.778117, 0.160341, 0.512593, 0.199137, 0.406493, 0.093878, 0.531607, 0.57685, 0.0429921, 0.140615, 0.232808, 0.792075, 0.695901, 0.515208, 0.723303, 0.120167, 0.0848111, 0.0257059, 0.069167, 0.0962686, 0.815963, 0.623719, 0.322465, 0.0436557, 0.0457932, 0.880397, 0.248271, 0.799595, 0.942299, 0.327552, 0.546092, 0.845722, 0.249952, 0.165359, 0.818325, 0.31515, 0.324321, 0.599571, 0.569378, 0.185475, 0.946115, 0.752457, 0.209583, 0.680331, 0.149508, 0.727546, 0.883316, 0.169385, 0.461399, 0.423986, 0.483602, 0.587853, 0.38961, 0.742984, 0.573985, 0.643779, 0.918392, 0.0212002, 0.983966, 0.601392, 0.914242, 0.578249, 0.0785484, 0.284722, 0.612524, 0.305447, 0.879861, 0.571116, 0.340345, 0.77051, 0.196389, 0.523607, 0.19175, 0.947384, 0.196434, 0.452568, 0.156455, 0.691206, 0.724007, 0.329117, 0.503925, 0.129639, 0.598567, 0.646797, 0.318501, 0.969771, 0.290599, 0.451135, 0.582563, 0.366916, 0.433685, 0.675702, 0.702015, 0.148669, 0.485186, 0.99228, 0.314424, 0.0300252, 0.500173, 0.11492, 0.15507, 0.0347326, 0.406929, 0.127201, 0.506661, 0.254545, 0.248539, 0.0441037, 0.988872, 0.976104, 0.541646, 0.37743, 0.507879, 0.0503406, 0.939495, 0.885026, 0.409377, 0.0565382, 0.892767, 0.558826, 0.280748, 0.462651, 0.936694, 0.477156, 0.534092, 0.259836, 0.0493089, 0.14547, 0.572022, 0.615188, 0.441674, 0.783068, 0.6781, 0.230618, 0.647285, 0.59767, 0.0127817, 0.3306, 0.897164, 0.656878, 0.595638, 0.485695, 0.988592, 0.803959, 0.157477, 0.188967, 0.321743, 0.248873, 0.882845, 0.157493, 0.364844, 0.719777, 0.989941, 0.0849237, 0.805028, 0.200688, 0.221226, 0.763431, 0.833604, 0.580838, 0.279532, 0.427327, 0.488258, 0.516944, 0.897521, 0.086223, 0.834054, 0.885842, 0.854962, 0.309542, 0.674509, 0.343226, 0.988286, 0.471698, 0.0734752, 0.437736, 0.126389, 0.873189, 0.307488, 0.910877, 0.378279, 0.600488, 0.256123, 0.0336207, 0.795893, 0.566673, 0.248597, 0.502109, 0.361798, 0.257769, 0.534063, 0.249482, 0.129576, 0.788627, 0.011144, 0.46344, 0.932547, 0.660116, 0.050771, 0.272999, 0.851341, 0.458077, 0.261602, 0.144189, 0.549428, 0.452305, 0.334867, 0.471445, 0.521803, 0.35048, 0.681348, 0.597689, 0.265137, 0.012798, 0.552491, 0.748532, 0.954552, 0.951256, 0.450892, 0.264925, 0.467626, 0.718889, 0.436329, 0.269176, 0.00866137, 0.397007, 0.29693, 0.689002, 0.446116, 0.659732, 0.658286, 0.453444, 0.871203, 0.0085921, 0.104667, 0.253307, 0.513151, 0.839097, 0.269905, 0.846144, 0.682346, 0.345175, 0.257712, 0.920847, 0.015, 0.527813, 0.55833, 0.544131, 0.248702, 0.275199, 0.818973, 0.378529, 0.113632, 0.587611, 0.763347, 0.252203, 0.00116456, 0.725082, 0.646987, 0.207479, 0.648843, 0.957664, 0.750242, 0.0976647, 0.239976, 0.0863225, 0.672624, 0.57292, 0.700872, 0.0781549, 0.0614034, 0.693625, 0.213332, 0.211861, 0.149414, 0.163842, 0.429011, 0.599788, 0.603976, 0.952084, 0.214137, 0.522412, 0.0912155, 0.656561, 0.191533, 0.845911, 0.248849, 0.294503, 0.492611, 0.372581, 0.214887, 0.452217, 0.377597, 0.289727, 0.566185, 0.126835, 0.300616, 0.853052, 0.911649, 0.475767, 0.135621, 0.869745, 0.513815, 0.569568, 0.917429, 0.894771, 0.252062, 0.318876, 0.533912, 0.602982, 0.429067, 0.788358, 0.17136, 0.336137, 0.844681, 0.58513, 0.351575, 0.734729, 0.0366377, 0.475571, 0.371119, 0.277795, 0.461627, 0.873762, 0.263099, 0.447481, 0.312437, 0.0437438, 0.962217, 0.132691, 0.570783, 0.294223, 0.631041, 0.526803, 0.987261, 0.487454, 0.0726847, 0.543884, 0.0053687, 0.881964, 0.998775, 0.612637, 0.937348, 0.676706, 0.976233, 0.0637702, 0.847239, 0.136188, 0.0431565, 0.926459, 0.32152, 0.32446, 0.512036, 0.69745, 0.0294216, 0.0855434, 0.815068, 0.546145, 0.927886, 0.379195, 0.254471, 0.944315, 0.322588, 0.957175, 0.282346, 0.659034, 0.0201403, 0.0719348, 0.945103, 0.534361, 0.411844, 0.765219, 0.067882, 0.278189, 0.0868578, 0.163579, 0.665753, 0.113592, 0.112568, 0.10699, 0.0595734, 0.162962, 0.918671, 0.224692, 0.0819119, 0.454791, 0.569074, 0.571892, 0.647528, 0.34026, 0.193834, 0.121119, 0.220117, 0.846289, 0.264911, 0.0330401, 0.0684504, 0.904016, 0.69732, 0.724082, 0.869786, 0.152701, 0.106843, 0.648375, 0.534446, 0.318278, 0.213744, 0.693214, 0.0563572, 0.839421, 0.172291, 0.692026, 0.0745269, 0.886434, 0.243813, 0.0512777, 0.321178, 0.13266, 0.982236, 0.40121, 0.584256, 0.895731, 0.304286, 0.105001, 0.339316, 0.885125, 0.853868, 0.0440071, 0.819516, 0.48869, 0.628928, 0.33235, 0.307699, 0.58554, 0.0375623, 0.472584, 0.518456, 0.266189, 0.85068, 0.268341, 0.96792, 0.419352, 0.1173, 0.177419, 0.345789, 0.162403, 0.0234518, 0.869624, 0.71387, 0.0198971, 0.531024, 0.215712, 0.405516, 0.0474058, 0.420991, 0.944478, 0.738921, 0.14787, 0.54363, 0.446904, 0.163239, 0.456148, 0.844835, 0.145833, 0.625621, 0.971887, 0.727065, 0.328642, 0.320998, 0.0701854, 0.140327, 0.240073, 0.680661, 0.399884, 0.228593, 0.759478, 0.426611, 0.950217, 0.428955, 0.45019, 0.908847, 0.471782, 0.389483, 0.695342, 0.337059, 0.770883, 0.838181, 0.210233, 0.0595027, 0.904492, 0.00351462, 0.565647, 0.982774, 0.149771, 0.0637599, 0.405145, 0.445735, 0.816068, 0.538072, 0.430753, 0.348029, 0.831015, 0.0380645, 0.158113, 0.651193, 0.333812, 0.0819951, 0.962705, 0.745059, 0.914157, 0.0793309, 0.747745, 0.224942, 0.46577, 0.220276, 0.825112, 0.980792, 0.556163, 0.588705, 0.271999, 0.116405, 0.708346, 0.168837, 0.518623, 0.346737, 0.655913, 0.734508, 0.873901, 0.540134, 0.0243613, 0.395516, 0.189993, 0.853518, 0.493774, 0.142472, 0.531821, 0.443036, 0.274065, 0.322846, 0.0353078, 0.837069, 0.406647, 0.273343, 0.21152, 0.637631, 0.482531, 0.52177, 0.2741, 0.682133, 0.0352195, 0.430186, 0.8561, 0.663455, 0.211103, 0.854863, 0.468608, 0.356419, 0.509518, 0.62751, 0.793734, 0.526289, 0.547531, 0.335063, 0.625628, 0.367247, 0.329412, 0.884946, 0.220805, 0.149462, 0.236733, 0.437201, 0.581038, 0.807608, 0.707072, 0.67478, 0.974621, 0.753644, 0.124537, 0.942944, 0.515231, 0.601375, 0.927873, 0.794737, 0.355005, 0.677403, 0.108838, 0.275615, 0.131303, 0.689428, 0.777147, 0.327886, 0.975205, 0.257742, 0.97645, 0.682553, 0.122908, 0.331947, 0.42261, 0.338974, 0.0225699, 0.570415, 0.635975, 0.163886, 0.593781, 0.861384, 0.592634, 0.466178, 0.154522, 0.568408, 0.607836, 0.776125, 0.962187, 0.67819, 0.0891308, 0.150665, 0.166416, 0.729866, 0.328638, 0.520148, 0.375362, 0.443706, 0.647937, 0.310221, 0.965876, 0.469779, 0.823885, 0.274833, 0.316625, 0.226207, 0.536463, 0.847172, 0.683087, 0.151119, 0.542337, 0.197068, 0.832676, 0.0322242, 0.168607, 0.784136, 0.800374, 0.393773, 0.339262, 0.318185, 0.359085, 0.737433, 0.578348, 0.754185, 0.43222, 0.759138, 0.179196, 0.218247, 0.604319, 0.484122, 0.342351, 0.15649, 0.329544, 0.968702, 0.921633, 0.365857, 0.0214546, 0.934058, 0.282105, 0.82954, 0.246377, 0.222097, 0.341538, 0.58899, 0.309089, 0.157814, 0.112673, 0.746926, 0.611638, 0.206396, 0.479152, 0.492632, 0.562217, 0.651548, 0.497202, 0.969626, 0.508606, 0.580386, 0.387848, 0.85588, 0.021174, 0.114449, 0.343018, 0.523163, 0.724753, 0.716847, 0.598035, 0.87584, 0.83408, 0.538258, 0.255923, 0.324454, 0.655085, 0.57429, 0.550904, 0.68564, 0.650388, 0.284076, 0.741694, 0.582598, 0.31984, 0.317297, 0.282127, 0.253139, 0.0262349, 0.27491, 0.895392, 0.576654, 0.710636, 0.353244, 0.595169, 0.288046, 0.45929, 0.360504, 0.0847704, 0.661651, 0.360278, 0.565211, 0.340817, 0.423077, 0.182081, 0.420885, 0.823542, 0.884483, 0.678122, 0.850341, 0.115289, 0.403558, 0.748946, 0.723952, 0.00194501, 0.521547, 0.706713, 0.542025, 0.511056, 0.595664, 0.385158, 0.788109, 0.823781, 0.946641, 0.0869575, 0.885374, 0.0929692, 0.282388, 0.19181, 0.344959, 0.265325, 0.762568, 0.239918, 0.775536, 0.536382, 0.675624, 0.784612, 0.469719, 0.811278, 0.148115, 0.325118, 0.401762, 0.112758, 0.932467, 0.149154, 0.956423, 0.469938, 0.258067, 0.239403, 0.222177, 0.329883, 0.336914, 0.719498, 0.0763891, 0.0435183, 0.71227, 0.997935, 0.763495, 0.715784, 0.0608482, 0.155195, 0.834585, 0.869089, 0.999609, 0.789348, 0.384231, 0.890897, 0.380875, 0.594219, 0.815373, 0.673195, 0.619164, 0.407296, 0.227292, 0.518035, 0.155214, 0.480509, 0.835703, 0.251825, 0.720069, 0.612969, 0.830241, 0.547686, 0.0471758, 0.0784273, 0.416132, 0.540376, 0.217496, 0.313601, 0.625845, 0.0872483, 0.848773, 0.285885, 0.372968, 0.690592, 0.67525, 0.0975763, 0.22521, 0.419661, 0.484302, 0.279406, 0.189793, 0.99352, 0.489158, 0.737016, 0.561644, 0.0129174, 0.84917, 0.735305, 0.878209, 0.725973, 0.366662, 0.230208, 0.339701, 0.565154, 0.467896, 0.657671, 0.487067, 0.705041, 0.440978, 0.729998, 0.199224, 0.328137, 0.14812, 0.773305, 0.359092, 0.356545, 0.562114, 0.714331, 0.573768, 0.32319, 0.946686, 0.290456, 0.456035, 0.312373, 0.829563, 0.721841, 0.341316, 0.847362, 0.893612, 0.895206, 0.808026, 0.11987, 0.343496, 0.627386, 0.190805, 0.3352, 0.56177, 0.769637, 0.754866, 0.0338124, 0.816776, 0.676171, 0.0304451, 0.758759, 0.378565, 0.457934, 0.23191, 0.319193, 0.714472, 0.41122, 0.365741, 0.22797, 0.506934, 0.268159, 0.421016, 0.896935, 0.699606, 0.970328, 0.312996, 0.786921, 0.775494, 0.0332552, 0.667787, 0.966997, 0.933915, 0.169752, 0.188917, 0.96328, 0.624588, 0.0359824, 0.225685, 0.291476, 0.5152, 0.0620432, 0.00369823, 0.62943, 0.583583, 0.97948, 0.676751, 0.117789, 0.553108, 0.584581, 0.0896354, 0.656394, 0.126621, 0.624566, 0.45226, 0.360525, 0.66226, 0.457774, 0.766773, 0.117905, 0.68308, 0.21678, 0.122781, 0.707784, 0.530599, 0.122162, 0.65119, 0.823386, 0.940991, 0.207807, 0.329127, 0.860951, 0.551366, 0.830482, 0.615333, 0.160315, 0.85203, 0.748065, 0.894926, 0.430614, 0.00411698, 0.975764, 0.496664, 0.746566, 0.538874, 0.886717, 0.652903, 0.321574, 0.873388, 0.754763, 0.628639, 0.533674, 0.302541, 0.602323, 0.430416, 0.94908, 0.292064, 0.477345, 0.337788, 0.617156, 0.923061, 0.492171, 0.532951, 0.773264, 0.0554359, 0.232772, 0.217784, 0.0165228, 0.887589, 0.990668, 0.121263, 0.191839, 0.697681, 0.689844, 0.435574, 0.238431, 0.21188, 0.771684, 0.813501, 0.295074, 0.753822, 0.283023, 0.116061, 0.109815, 0.34854, 0.228258, 0.917577, 0.936318, 0.719136, 0.627008, 0.290248, 0.273571, 0.309747, 0.998204, 0.999586, 0.143504, 0.308188, 0.914165, 0.202398, 0.517918, 0.0418613, 0.00625668, 0.594584, 0.44866, 0.426179, 0.259366, 0.195162, 0.778245, 0.829782, 0.432879, 0.853082, 0.720255, 0.862675, 0.27348, 0.612262, 0.217127, 0.643844, 0.908318, 0.742459, 0.756025, 0.727934, 0.644897, 0.492537, 0.727896, 0.212918, 0.514331, 0.820046, 0.590255, 0.174711, 0.431646, 0.276477, 0.0405892, 0.715193, 0.449519, 0.906522, 0.621376, 0.0503141, 0.97553, 0.832326, 0.727575, 0.967561, 0.40575, 0.665467, 0.175575, 0.257853, 0.776247, 0.116415, 0.830292, 0.153735, 0.875794, 0.682263, 0.477441, 0.242043, 0.316703, 0.391689, 0.700265, 0.848909, 0.204354, 0.702249, 0.503154, 0.277355, 0.491703, 0.822928, 0.765257, 0.292318, 0.101989, 0.289665, 0.83507, 0.149712, 0.726482, 0.636965, 0.491852, 0.672152, 0.0257181, 0.277221, 0.0365796, 0.617739, 0.784594, 0.210728, 0.719884, 0.618546, 0.0621452, 0.671866, 0.325799, 0.0721333, 0.00566608, 0.451301, 0.909247, 0.422503, 0.957009, 0.991108, 0.0397077, 0.301983, 0.297236, 0.467481, 0.948317, 0.968396, 0.652605, 0.344135, 0.309752, 0.190503, 0.606621, 0.27058, 0.116304, 0.312, 0.0764449, 0.350404, 0.240971, 0.799348, 0.899711, 0.122545, 0.784379, 0.877913, 0.251779, 0.311578, 0.326848, 0.248869, 0.200661, 0.896651, 0.0230978, 0.403657, 0.972373, 0.76107, 0.360943, 0.873258, 0.75699, 0.98436, 0.975581, 0.339908, 0.0502815, 0.398836, 0.549227, 0.833793, 0.841285, 0.579242, 0.0494795, 0.0122801, 0.656708, 0.508653, 0.985266, 0.173516, 0.804665, 0.558906, 0.888237, 0.870707, 0.396705, 0.737786, 0.429249, 0.820259, 0.347267, 0.314842, 0.646713, 0.486638, 0.106182, 0.610525, 0.284259, 0.808261, 0.279249, 0.558611, 0.419055, 0.933694, 0.927874, 0.212084, 0.530335, 0.597555, 0.376588, 0.966603, 0.908474, 0.21774, 0.45999, 0.0880853, 0.257713, 0.420641, 0.523394, 0.963878, 0.317493, 0.390739, 0.999927, 0.261827, 0.915155, 0.147217, 0.226079, 0.210811, 0.924862, 0.719825, 0.63024, 0.556891, 0.286605, 0.932826, 0.203718, 0.69941, 0.244328, 0.163412, 0.882965, 0.782171, 0.560417, 0.985394, 0.572545, 0.0643758, 0.379096, 0.355619, 0.444694, 0.172803, 0.252421, 0.287592, 0.554304, 0.750925, 0.582436, 0.709757, 0.458762, 0.769239, 0.894216, 0.504458, 0.924531, 0.901206, 0.746323, 0.0365078, 0.533222, 0.619761, 0.699668, 0.76796, 0.652278, 0.345191, 0.445502, 0.78572, 0.637482, 0.642379, 0.902138, 0.356315, 0.0206236, 0.214261, 0.962538, 0.333282, 0.703234, 0.0400771, 0.593779, 0.0750121, 0.589034, 0.570307, 0.0122357, 0.231655, 0.559805, 0.823308, 0.963708, 0.754108, 0.734388, 0.101353, 0.303654, 0.964648, 0.532303, 0.685543, 0.977904, 0.281432, 0.580814, 0.315792, 0.730555, 0.391481, 0.771007, 0.809532, 0.243718, 0.514558, 0.653127, 0.240548, 0.922023, 0.762384, 0.798677, 0.0513616, 0.802956, 0.845462, 0.0811823, 0.192813, 0.582311, 0.226245, 0.480867, 0.373309, 0.668357, 0.554323, 0.221669, 0.293095, 0.170885, 0.369216, 0.861891, 0.0903646, 0.608813, 0.653833, 0.707802, 0.93812, 0.566926, 0.260698, 0.297984, 0.267603, 0.130237, 0.448459, 0.399144, 0.606496, 0.823424, 0.107738, 0.337029, 0.380577, 0.896837, 0.398403, 0.751783, 0.637511, 0.843949, 0.253551, 0.683301, 0.744298, 0.0966335, 0.704624, 0.317678, 0.839543, 0.877993, 0.422029, 0.492129, 0.164136, 0.71169, 0.853503, 0.874012, 0.911242, 0.463979, 0.57413, 0.238607, 0.0707812, 0.635761, 0.258682, 0.558912, 0.00813576, 0.909634, 0.902105, 0.487712, 0.131959, 0.922792, 0.415813, 0.480931, 0.211557, 0.265499, 0.740622, 0.721032, 0.882587, 0.4154, 0.871322, 0.529191, 0.94779, 0.837873, 0.633955, 0.361985, 0.300756, 0.295236, 0.97158, 0.0107261, 0.720439, 0.423649, 0.36981, 0.955399, 0.0836756, 0.758839, 0.735046, 0.52096, 0.0960638, 0.820243, 0.634862, 0.655774, 0.703882, 0.0539417, 0.590383, 0.0339599, 0.695313, 0.635042, 0.0430869, 0.128552, 0.72793, 0.422591, 0.629823, 0.575309, 0.686091, 0.204312, 0.771282, 0.202309, 0.972021, 0.943678, 0.521917, 0.916424, 0.782824, 0.2343, 0.740007, 0.866494, 0.335054, 0.471311, 0.453799, 0.567741, 0.335944, 0.577245, 0.81055, 0.0376232, 0.249293, 0.617434, 0.195287, 0.533622, 0.695026, 0.106478, 0.254421, 0.22592, 0.328622, 0.724656, 0.311924, 0.64578, 0.0845344, 0.0672177, 0.292095, 0.421916, 0.717418, 0.602544, 0.658155, 0.425879, 0.388379, 0.387705, 0.414084, 0.249436, 0.737999, 0.440861, 0.469498, 0.643737, 0.926567, 0.168103, 0.912737, 0.44983, 0.761354, 0.70971, 0.461735, 0.716239, 0.94361, 0.481279, 0.497743, 0.841223, 0.625982, 0.369029, 0.955912, 0.103134, 0.257649, 0.0677644, 0.551804, 0.287404, 0.535048, 0.94676, 0.940998, 0.731118, 0.487944, 0.429823, 0.519651, 0.507745, 0.726266, 0.673864, 0.328724, 0.76548, 0.867712, 0.767068, 0.222795, 0.0630727, 0.432802, 0.269174, 0.274281, 0.475448, 0.860368, 0.181595, 0.460972, 0.253384, 0.199747, 0.85067, 0.256039, 0.407108, 0.536273, 0.830839, 0.493328, 0.930773, 0.185389, 0.667034, 0.954132, 0.770057, 0.0570016, 0.357668, 0.51673, 0.248197, 0.448142, 0.209801, 0.466072, 0.449465, 0.857624, 0.341148, 0.996611, 0.932119, 0.807806, 0.608877, 0.0886362, 0.669081, 0.0425297, 0.379729, 0.431256, 0.591004, 0.32401, 0.364102, 0.0112252, 0.577283, 0.61104, 0.389568, 0.0618338, 0.489641, 0.784731, 0.798139, 0.582719, 0.53593, 0.675573, 0.0236412, 0.757729, 0.247307, 0.140043, 0.842903, 0.212953, 0.698981, 0.637433, 0.957588, 0.706099, 0.609729, 0.387273, 0.803494, 0.627456, 0.352881, 0.629117, 0.770967, 0.349952, 0.794217, 0.460269, 0.289858, 0.427424, 0.859604, 0.614877, 0.627268, 0.289232, 0.680268, 0.804652, 0.695936, 0.543098, 0.823489, 0.516873, 0.728121, 0.216566, 0.92983, 0.587176, 0.552859, 0.988048, 0.112148, 0.825353, 0.213509, 0.316869, 0.0798808, 0.946805, 0.42924, 0.404865, 0.407178, 0.170639, 0.829338, 0.82508, 0.313421, 0.113583, 0.0264537, 0.156781, 0.490163, 0.24183, 0.527696, 0.296247, 0.597739, 0.49373, 0.805524, 0.81522, 0.207776, 0.070541, 0.873843, 0.358434, 0.356914, 0.821749, 0.172439, 0.308093, 0.0832385, 0.813647, 0.678579, 0.342968, 0.64811, 0.14488, 0.120402, 0.44961, 0.95508, 0.361503, 0.535717, 0.518889, 0.100048, 0.404472, 0.19506, 0.386073, 0.473854, 0.641773, 0.236702, 0.0550125, 0.590603, 0.5527, 0.341272, 0.431917, 0.420711, 0.794005, 0.681318, 0.13312, 0.935004, 0.323219, 0.778307, 0.859016, 0.415393, 0.468577, 0.211118, 0.438695, 0.123158, 0.637092, 0.460244, 0.598071, 0.231173, 0.016797, 0.334836, 0.138386, 0.560526, 0.0835137, 0.60421, 0.700779, 0.83187, 0.294512, 0.51337, 0.0202051, 0.174129, 0.923679, 0.472313, 0.52481, 0.724994, 0.28695, 0.0204447, 0.931106, 0.310566, 0.853471, 0.105913, 0.860516, 0.877959, 0.307981, 0.846917, 0.633876, 0.676709, 0.952519, 0.669374, 0.82699, 0.646989, 0.322494, 0.411912, 0.211543, 0.853364, 0.212531, 0.699592, 0.851319, 0.166323, 0.314704, 0.180119, 0.0511063, 0.703247, 0.456749, 0.0952764, 0.333696, 0.689204, 0.446771, 0.568659, 0.3105, 0.627604, 0.27507, 0.344802, 0.517481, 0.683402, 0.719146, 0.0515266, 0.958567, 0.415007, 0.962756, 0.814256, 0.816333, 0.158331, 0.820954, 0.261193, 0.347959, 0.221901, 0.127494, 0.543496, 0.837035, 0.819778, 0.218426, 0.673528, 0.509844, 0.457907, 0.0202857, 0.71613, 0.256688, 0.631295, 0.0699392, 0.520135, 0.0924218, 0.836843, 0.188659, 0.367028, 0.757746, 0.185398, 0.626506, 0.419708, 0.291774, 0.462497, 0.561037, 0.557264, 0.324388, 0.818497, 0.589801, 0.957563, 0.86074, 0.0473976, 0.209955, 0.332045, 0.408941, 0.381801, 0.67951, 0.593068, 0.013784, 0.290742, 0.711, 0.262272, 0.835702, 0.92988, 0.227408, 0.392522, 0.0874849, 0.174771, 0.388741, 0.38575, 0.685479, 0.0320136, 0.396033, 0.214761, 0.613196, 0.0915487, 0.54813, 0.320967, 0.492146, 0.964279, 0.618353, 0.615633, 0.801621, 0.723783, 0.165171, 0.754033, 0.151565, 0.622612, 0.282483, 0.425882, 0.0526811, 0.699048, 0.37748, 0.617965, 0.244653, 0.487206, 0.954366, 0.487909, 0.0487261, 0.253659, 0.965087, 0.55919, 0.232056, 0.995675, 0.96729, 0.34866, 0.694014, 0.686933, 0.206105, 0.987857, 0.00528206, 0.934499, 0.861199, 0.162609, 0.422528, 0.721059, 0.401983, 0.219342, 0.547353, 0.467702, 0.426866, 0.0985705, 0.301489, 0.636567, 0.609446, 0.778716, 0.199101, 0.141631, 0.408992, 0.15862, 0.951258, 0.370879, 0.113898, 0.677033, 0.633627, 0.657546, 0.230455, 0.201419, 0.186193, 0.409626, 0.685885, 0.389648, 0.511679, 0.355286, 0.892571, 0.102479, 0.769516, 0.546067, 0.185712, 0.148104, 0.963418, 0.589483, 0.458965, 0.635108, 0.979402, 0.114336, 0.459704, 0.763414, 0.148888, 0.775504, 0.915703, 0.291144, 0.313944, 0.712198, 0.551069, 0.610023, 0.550448, 0.0949958, 0.787097, 0.333915, 0.897527, 0.386582, 0.667075, 0.443497, 0.71539, 0.131661, 0.648665, 0.626672, 0.895059, 0.180754, 0.616166, 0.828719, 0.135838, 0.0110219, 0.774477, 0.14943, 0.0979744, 0.379332, 0.568952, 0.749321, 0.00246856, 0.559976, 0.210275, 0.697018, 0.840576, 0.850855, 0.686321, 0.484197, 0.94107, 0.787591, 0.0515763, 0.856728, 0.498463, 0.856106, 0.875068, 0.608193, 0.753641, 0.847357, 0.238942, 0.174096, 0.124826, 0.703822, 0.414074, 0.856259, 0.15821, 0.233124, 0.0811753, 0.570522, 0.217, 0.0858566, 0.536606, 0.0525788, 0.47397, 0.307299, 0.893432, 0.273713, 0.0921186, 0.163628, 0.774949, 0.0383822, 0.971339, 0.227033, 0.630513, 0.521304, 0.771722, 0.752037, 0.227997, 0.951463, 0.915167, 0.745911, 0.960752, 0.921913, 0.677775, 0.506006, 0.660425, 0.737013, 0.234304, 0.650567, 0.285645, 0.627549, 0.724179, 0.735362, 0.751054, 0.742892, 0.5349, 0.0340623, 0.654263, 0.114409, 0.0739177, 0.78117, 0.766772, 0.284805, 0.882784, 0.770684, 0.65742, 0.266355, 0.948033, 0.377346, 0.230501, 0.54382, 0.334071, 0.186995, 0.23143, 0.0947131, 0.955076, 0.568663, 0.258273, 0.294791, 0.326614, 0.710372, 0.157307, 0.257738, 0.820396, 0.67441, 0.380863, 0.57792, 0.232758, 0.0620197, 0.350386, 0.258487, 0.902477, 0.228053, 0.248016, 0.662936, 0.700286, 0.193155, 0.670281, 0.772846, 0.678572, 0.586518, 0.87227, 0.381207, 0.96242, 0.824641, 0.584737, 0.68552, 0.22591, 0.0063159, 0.249958, 0.619253, 0.442369, 0.343834, 0.458693, 0.78975, 0.996348, 0.716623, 0.910166, 0.142621, 0.596729, 0.366853, 0.390577, 0.974675, 0.772302, 0.699393, 0.411428, 0.52734, 0.645652, 0.760538, 0.199266, 0.417639, 0.931522, 0.284259, 0.682519, 0.188477, 0.635607, 0.370065, 0.538539, 0.633446, 0.688715, 0.94006, 0.477396, 0.86209, 0.054199, 0.855804, 0.410172, 0.251128, 0.830972, 0.282699, 0.383014, 0.117019, 0.640977, 0.973587, 0.481625, 0.611474, 0.990382, 0.5059, 0.672565, 0.0202108, 0.582947, 0.0136221, 0.178035, 0.368841, 0.81812, 0.769489, 0.644509, 0.37348, 0.120281, 0.823506, 0.693859, 0.463414, 0.182675, 0.865192, 0.0276884, 0.496397, 0.964378, 0.641822, 0.677764, 0.412708, 0.00292643, 0.670148, 0.86073, 0.536736, 0.21797, 0.149772, 0.359512, 0.956203, 0.599491, 0.175738, 0.123739, 0.00688263, 0.0907703, 0.600065, 0.720228, 0.757179, 0.704742, 0.147682, 0.168863, 0.68926, 0.817109, 0.665291, 0.675294, 0.869075, 0.338797, 0.400802, 0.757248, 0.184531, 0.0769999, 0.845662, 0.236621, 0.113821, 0.745335, 0.226766, 0.100512, 0.356874, 0.500064, 0.99653, 0.841126, 0.417973, 0.793745, 0.0796873, 0.99213, 0.878007, 0.661723, 0.654042, 0.355236, 0.772393, 0.572179, 0.12688, 0.26505, 0.98449, 0.449076, 0.641671, 0.204734, 0.622518, 0.221602, 0.685798, 0.83397, 0.307166, 0.377713, 0.446752, 0.633034, 0.208031, 0.831139, 0.322364, 0.23388, 0.926244, 0.582568, 0.15845, 0.593766, 0.0726884, 0.738815, 0.966635, 0.922058, 0.465648, 0.797606, 0.647969, 0.76734, 0.110426, 0.0755897, 0.0728861, 0.640038, 0.157715, 0.670731, 0.00872158, 0.72429, 0.439391, 0.338223, 0.810034, 0.871096, 0.365613, 0.818626, 0.919099, 0.383008, 0.792088, 0.166972, 0.624496, 0.356871, 0.501678, 0.639525, 0.840328, 0.412824, 0.0932329, 0.111289, 0.724415, 0.623654, 0.53872, 0.659724, 0.753525, 0.837912, 0.161788, 0.382286, 0.884168, 0.794045, 0.523351, 0.556366, 0.0513674, 0.41215, 0.271863, 0.885614, 0.831207, 0.0969816, 0.915898, 0.563384, 0.893467, 0.829464, 0.242205, 0.729995, 0.39411, 0.453822, 0.775291, 0.785893, 0.678241, 0.765576, 0.697497, 0.869171, 0.854339, 0.987105, 0.0597787, 0.0108559, 0.921626, 0.742091, 0.419284, 0.945689, 0.236553, 0.863462, 0.576416, 0.802249, 0.155175, 0.676429, 0.744429, 0.658288, 0.213687, 0.667174, 0.891232, 0.551984, 0.302685, 0.414941, 0.293601, 0.00106408, 0.0241778, 0.00180441, 0.00668921, 0.224079, 0.932823, 0.790372, 0.117324, 0.668195, 0.0171749, 0.851547, 0.441639, 0.563881, 0.186563, 0.43539, 0.505915, 0.86787, 0.371474, 0.139565, 0.33326, 0.576365, 0.7019, 0.77176, 0.210137, 0.439934, 0.630084, 0.119083, 0.263833, 0.909826, 0.137509, 0.906319, 0.0842959, 0.646533, 0.932021, 0.970351, 0.583306, 0.833753, 0.846507, 0.253477, 0.119003, 0.00558582, 0.001398, 0.883284, 0.269051, 0.694504, 0.922912, 0.236143, 0.680205, 0.0310032, 0.0323088, 0.557608, 0.110848, 0.456599, 0.912836, 0.892128, 0.206123, 0.314372, 0.257554, 0.976526, 0.893388, 0.470747, 0.215682, 0.885755, 0.0257514, 0.666208, 0.788347, 0.893589, 0.625256, 0.521227, 0.48823, 0.452164, 0.7592, 0.288722, 0.704164, 0.333967, 0.0249497, 0.373069, 0.140549, 0.868282, 0.710221, 0.804359, 0.38871, 0.0400849, 0.718373, 0.895892, 0.318676, 0.475759, 0.220205, 0.152593, 0.66422, 0.182417, 0.454496, 0.612757, 0.960192, 0.106337, 0.352851, 0.653202, 0.574667, 0.0754936, 0.0805307, 0.0864846, 0.970072, 0.779454, 0.799679, 0.375467, 0.0837045, 0.333977, 0.162421, 0.249397, 0.100598, 0.866754, 0.963817, 0.929224, 0.330761, 0.81857, 0.620179, 0.144541, 0.200187, 0.390916, 0.470811, 0.201679, 0.328561, 0.685465, 0.606776, 0.746216, 0.238537, 0.664041, 0.504179, 0.528982, 0.775587, 0.597509, 0.703819, 0.465856, 0.719608, 0.699046, 0.500017, 0.652985, 0.77198, 0.874632, 0.258022, 0.316531, 0.263782, 0.985455, 0.128295, 0.221651, 0.725959, 0.65969, 0.135669, 0.59214, 0.185946, 0.549432, 0.265365, 0.415855, 0.581584, 0.859672, 0.813966, 0.396238, 0.695556, 0.532895, 0.297105, 0.337337, 0.891178, 0.114393, 0.593805, 0.907801, 0.174794, 0.372857, 0.0248354, 0.263824, 0.572763, 0.0982025, 0.864261, 0.834625, 0.337708, 0.189469, 0.361923, 0.625785, 0.261108, 0.775844, 0.852493, 0.0949709, 0.0979683, 0.934444, 0.0544419, 0.283204, 0.416359, 0.0434888, 0.92381, 0.343553, 0.0769861, 0.589362, 0.82132, 0.0485852, 0.896658, 0.0316668, 0.707779, 0.680386, 0.980316, 0.970593, 0.987526, 0.109227, 0.691583, 0.887017, 0.349748, 0.835415, 0.36057, 0.252305, 0.106924, 0.661509, 0.762768, 0.507334, 0.400594, 0.0321928, 0.914954, 0.066717, 0.513402, 0.910172, 0.0616166, 0.335694, 0.39358, 0.127995, 0.311314, 0.339676, 0.688501, 0.709888, 0.062848, 0.682497, 0.540525, 0.718128, 0.794207, 0.0441771, 0.986113, 0.136567, 0.355656, 0.999445, 0.15004, 0.381088, 0.0203178, 0.0824093, 0.556949, 0.905727, 0.474253, 0.0837329, 0.118499, 0.84057, 0.352348, 0.753817, 0.153472, 0.168021, 0.548498, 0.886833, 0.443833, 0.381542, 0.855886, 0.691931, 0.892762, 0.100045, 0.638215, 0.188494, 0.00818149, 0.875871, 0.532124, 0.586146, 0.815154, 0.944589, 0.274561, 0.906582, 0.00776694, 0.66117, 0.579047, 0.987331, 0.427147, 0.375723, 0.31839, 0.200724, 0.106331, 0.762523, 0.408919, 0.219073, 0.549145, 0.146873, 0.702442, 0.267826, 0.171143, 0.471984, 0.298283, 0.819285, 0.828251, 0.384217, 0.221415, 0.548437, 0.598658, 0.517612, 0.715119, 0.942431, 0.428131, 0.103713, 0.718112, 0.545915, 0.905434, 0.0417096, 0.864922, 0.0919797, 0.199275, 0.00664503, 0.938453, 0.927443, 0.435517, 0.765285, 0.949314, 0.257499, 0.663638, 0.950816, 0.5058, 0.0153361, 0.850979, 0.538474, 0.547083, 0.170574, 0.463045, 0.362734, 0.813406, 0.199206, 0.322996, 0.981702, 0.132911, 0.561486, 0.727108, 0.384904, 0.972872, 0.609987, 0.146484, 0.945631, 0.98901, 0.630719, 0.385017, 0.622342, 0.551934, 0.938095, 0.333759, 0.424097, 0.816664, 0.586314, 0.592952, 0.903655, 0.699977, 0.880342, 0.778092, 0.605997, 0.208481, 0.57197, 0.150941, 0.382229, 0.343875, 0.446767, 0.0609308, 0.309189, 0.955356, 0.452305, 0.158355, 0.895658, 0.420931, 0.868014, 0.571878, 0.498291, 0.934939, 0.116603, 0.104431, 0.192855, 0.337519, 0.880644, 0.164783, 0.417785, 0.390948, 0.947713, 0.033545, 0.102575, 0.974468, 0.709245, 0.898776, 0.726694, 0.251227, 0.760285, 0.86575, 0.706157, 0.112987, 0.529949, 0.21792, 0.564668, 0.307695, 0.419375, 0.688999, 0.617501, 0.445752, 0.277257, 0.715238, 0.304063, 0.132491, 0.0770358, 0.622772, 0.691975, 0.84441, 0.622969, 0.6856, 0.51067, 0.95175, 0.565329, 0.280985, 0.297438, 0.248585, 0.660183, 0.797266, 0.4799, 0.0123372, 0.845429, 0.841494, 0.918484, 0.14736, 0.176414, 0.776317, 0.995957, 0.403513, 0.3656, 0.421816, 0.856609, 0.861975, 0.974229, 0.021713, 0.568926, 0.122837, 0.415271, 0.632128, 0.0654283, 0.0762905, 0.548621, 0.179276, 0.738465, 0.893959, 0.994393, 0.174888, 0.319457, 0.400915, 0.907595, 0.632721, 0.796498, 0.862756, 0.0020552, 0.133031, 0.649062, 0.204599, 0.446987, 0.160217, 0.478109, 0.142825, 0.0620559, 0.278049, 0.748039, 0.755827, 0.923555, 0.345105, 0.648536, 0.686537, 0.971725, 0.464599, 0.760275, 0.105162, 0.983246, 0.00604525, 0.140177, 0.905, 0.938368, 0.550615, 0.762948, 0.358636, 0.189097, 0.860389, 0.848237, 0.331012, 0.135174, 0.43077, 0.377338, 0.0258247, 0.840109, 0.447487, 0.100205, 0.558228, 0.343605, 0.711903, 0.242802, 0.812641, 0.501949, 0.239866, 0.406506, 0.795585, 0.690768, 0.0181014, 0.597976, 0.627273, 0.50937, 0.121486, 0.0436525, 0.197956, 0.25864, 0.819611, 0.783811, 0.406142, 0.304749, 0.599887, 0.720109, 0.251736, 0.473498, 0.522691, 0.863379, 0.858623, 0.934269, 0.36418, 0.0841608, 0.788856, 0.686599, 0.829538, 0.164947, 0.844065, 0.993576, 0.724929, 0.692888, 0.251, 0.807681, 0.564336, 0.35226, 0.776656, 0.394954, 0.371132, 0.294816, 0.638015, 0.194861, 0.183644, 0.163626, 0.383555, 0.747841, 0.487575, 0.27055, 0.425705, 0.835043, 0.0680608, 0.228085, 0.653843, 0.21128, 0.590651, 0.577416, 0.308909, 0.764115, 0.705085, 0.22404, 0.931519, 0.562128, 0.522182, 0.366499, 0.0674083, 0.679526, 0.0604887, 0.730983, 0.993301, 0.556663, 0.0421419, 0.589714, 0.704518, 0.68187, 0.243529, 0.443112, 0.537233, 0.662457, 0.630459, 0.879828, 0.495696, 0.788551, 0.699764, 0.943416, 0.287901, 0.312387, 0.185625, 0.68931, 0.444885, 0.887877, 0.365092, 0.71022, 0.276571, 0.539053, 0.432607, 0.692496, 0.781982, 0.720861, 0.169869, 0.481523, 0.625876, 0.961458, 0.0592874, 0.301462, 0.151786, 0.736584, 0.258375, 0.347829, 0.65187, 0.345502, 0.957465, 0.841177, 0.907116, 0.976527, 0.560808, 0.100525, 0.10193, 0.0819206, 0.539046, 0.0109153, 0.439407, 0.154654, 0.679919, 0.806437, 0.162809, 0.648396, 0.205724, 0.272304, 0.0924056, 0.157329, 0.315478, 0.439663, 0.688246, 0.2127, 0.850182, 0.0128729, 0.0903344, 0.171465, 0.0104714, 0.750506, 0.454575, 0.689295, 0.238364, 0.261256, 0.239745, 0.546799, 0.469137, 0.372374, 0.339013, 0.547718, 0.829973, 0.276864, 0.538141, 0.105929, 0.170644, 0.421469, 0.840333, 0.956239, 0.672689, 0.451812, 0.58031, 0.357299, 0.215684, 0.519496, 0.460056, 0.716747, 0.491304, 0.229834, 0.127486, 0.905092, 0.555906, 0.0721259, 0.041581, 0.510215, 0.395276, 0.438857, 0.94381, 0.626606, 0.184174, 0.915979, 0.215971, 0.154997, 0.864942, 0.0988611, 0.251944, 0.864195, 0.886574, 0.0456422, 0.916006, 0.264125, 0.859057, 0.245051, 0.706641, 0.10155, 0.243685, 0.389626, 0.185941, 0.688391, 0.768579, 0.488701, 0.739821, 0.679663, 0.348402, 0.409395, 0.0091656, 0.160903, 0.0804745, 0.51335, 0.301049, 0.0231793, 0.204497, 0.386904, 0.0312164, 0.356758, 0.706266, 0.0686764, 0.0684509, 0.563464, 0.104772, 0.119621, 0.377996, 0.359239, 0.837839, 0.45694, 0.503392, 0.830893, 0.645893, 0.808829, 0.406021, 0.423789, 0.884968, 0.677216, 0.188494, 0.0565997, 0.483745, 0.220211, 0.782445, 0.651556, 0.816596, 0.196575, 0.688753, 0.482428, 0.586265, 0.539181, 0.206743, 0.288573, 0.441543, 0.0807429, 0.61317, 0.215667, 0.701933, 0.228446, 0.577729, 0.736862, 0.921574, 0.318777, 0.446166, 0.00249746, 0.42165, 0.194976, 0.565887, 0.993061, 0.240508, 0.453809, 0.997439, 0.474129, 0.886124, 0.225129, 0.606746, 0.998707, 0.232559, 0.0422347, 0.262956, 0.672915, 0.211194, 0.341752, 0.219858, 0.84216, 0.918537, 0.490828, 0.386068, 0.231688, 0.91578, 0.669895, 0.388457, 0.181955, 0.504756, 0.598224, 0.0739774, 0.72259, 0.664891, 0.89631, 0.272059, 0.397199, 0.204638, 0.300248, 0.0328023, 0.389526, 0.856474, 0.0912415, 0.236462, 0.718316, 0.738118, 0.0460803, 0.807778, 0.647155, 0.400048, 0.376813, 0.485327, 0.986201, 0.866023, 0.124365, 0.245155, 0.385591, 0.333758, 0.415666, 0.433378, 0.925816, 0.719244, 0.065929, 0.678957, 0.0103201, 0.547205, 0.219298, 0.935709, 0.017852, 0.865221, 0.0904996, 0.276277, 0.828221, 0.426476, 0.80664, 0.652762, 0.810459, 0.281391, 0.161404, 0.301521, 0.302399, 0.0308812, 0.811819, 0.688969, 0.923022, 0.306926, 0.62788, 0.59629, 0.240553, 0.0901609, 0.0165326, 0.94753, 0.345511, 0.0972646, 0.960353, 0.639537, 0.271484, 0.660993, 0.400956, 0.897515, 0.110177, 0.196591, 0.28291, 0.97968, 0.9634, 0.748608, 0.954301, 0.264691, 0.939441, 0.382081, 0.708375, 0.711272, 0.61525, 0.9727, 0.632167, 0.0635572, 0.874455, 0.187718, 0.674742, 0.0422768, 0.00512436, 0.432706, 0.479179, 0.198661, 0.459669, 0.733175, 0.762433, 0.987435, 0.986954, 0.632057, 0.0256972, 0.264978, 0.0691882, 0.609645, 0.761305, 0.339233, 0.0253089, 0.0726548, 0.402878, 0.53039, 0.464782, 0.104477, 0.0790445, 0.688595, 0.924308, 0.230273, 0.0648814, 0.528301, 0.0705961, 0.399499, 0.343114, 0.302305, 0.151242, 0.0299219, 0.985795, 0.762433, 0.310035, 0.946421, 0.783076, 0.80776, 0.759192, 0.623617, 0.73097, 0.65107, 0.957996, 0.400192, 0.751235, 0.189983, 0.986402, 0.911553, 0.0981043, 0.87139, 0.197119, 0.146904, 0.905052, 0.0864646, 0.183412, 0.873011, 0.458889, 0.35564, 0.775034, 0.124453, 0.738635, 0.953718, 0.889123, 0.691987, 0.987672, 0.4107, 0.892616, 0.684998, 0.863265, 0.0425638, 0.90798, 0.106003, 0.340602, 0.919752, 0.260214, 0.0680068, 0.489028, 0.102956, 0.958904, 0.531019, 0.663626, 0.968338, 0.580892, 0.712355, 0.128102, 0.890613, 0.637565, 0.930356, 0.737044, 0.859221, 0.986567, 0.399964, 0.0870206, 0.661551, 0.281999, 0.222849, 0.655717, 0.920301, 0.614943, 0.677214, 0.33046, 0.567663, 0.853335, 0.32623, 0.777185, 0.559091, 0.552188, 0.619115, 0.0102889, 0.637657, 0.702025, 0.880334, 0.266588, 0.395937, 0.376869, 0.27901, 0.512039, 0.410283, 0.0673083, 0.0520634, 0.649678, 0.00357422, 0.141621, 0.794679, 0.0380337, 0.223083, 0.488931, 0.60701, 0.725548, 0.974076, 0.151649, 0.416018, 0.715222, 0.893282, 0.672605, 0.0753095, 0.561179, 0.0467337, 0.110497, 0.366252, 0.208156, 0.76351, 0.401775, 0.930952, 0.349385, 0.121233, 0.0279252, 0.613772, 0.927652, 0.490538, 0.393573, 0.836098, 0.0222542, 0.990171, 0.520922, 0.196606, 0.646755, 0.115132, 0.00654864, 0.798696, 0.679517, 0.113255, 0.157396, 0.852971, 0.55404, 0.29054, 0.350618, 0.725176, 0.0539168, 0.624628, 0.515153, 0.593488, 0.367925, 0.010023, 0.605986, 0.355383, 0.495606, 0.273268, 0.465714, 0.771026, 0.768542, 0.867831, 0.395598, 0.394169, 0.862229, 0.41805, 0.389145, 0.84222, 0.350594, 0.0898763, 0.54263, 0.30973, 0.0341653, 0.715797, 0.0584596, 0.724317, 0.661337, 0.665782, 0.389317, 0.312689, 0.943407, 0.582541, 0.904562, 0.217721, 0.648274, 0.619905, 0.841299, 0.427722, 0.482571, 0.0606806, 0.919532, 0.814801, 0.0741254, 0.937703, 0.030512, 0.244253, 0.659673, 0.18727, 0.671907, 0.331276, 0.883269, 0.765625, 0.603335, 0.603434, 0.716936, 0.968526, 0.074639, 0.208396, 0.0261722, 0.112852, 0.559216, 0.280051, 0.840165, 0.772825, 0.639054, 0.321138, 0.169607, 0.424415, 0.0907678, 0.136643, 0.521352, 0.649788, 0.630384, 0.0436767, 0.137933, 0.984635, 0.778038, 0.403808, 0.652748, 0.939979, 0.592971, 0.451965, 0.965087, 0.979144, 0.847556, 0.529275, 0.346044, 0.0697041, 0.363227, 0.0204759, 0.190434, 0.369636, 0.0437434, 0.560413, 0.240726, 0.843342, 0.373273, 0.857142, 0.535516, 0.280448, 0.100715, 0.0128236, 0.633847, 0.372356, 0.353266, 0.990752, 0.306887, 0.429416, 0.178416, 0.53182, 0.526097, 0.732644, 0.216363, 0.638996, 0.111085, 0.19813, 0.519994, 0.812839, 0.975136, 0.504073, 0.518966, 0.180944, 0.0496752, 0.701867, 0.488269, 0.716748, 0.184546, 0.512495, 0.761167, 0.697101, 0.304955, 0.511194, 0.554395, 0.262005, 0.755659, 0.124621, 0.414152, 0.146795, 0.964882, 0.139361, 0.342751, 0.473251, 0.137934, 0.581996, 0.766002, 0.328939, 0.984061, 0.458676, 0.0301047, 0.953052, 0.316643, 0.582869, 0.677505, 0.216975, 0.181675, 0.324635, 0.561192, 0.893183, 0.895312, 0.686326, 0.219969, 0.590357, 0.177063, 0.333892, 0.451406, 0.834775, 0.326919, 0.288777, 0.452857, 0.301572, 0.404564, 0.28453, 0.745707, 0.262772, 0.372933, 0.146127, 0.986781, 0.210229, 0.25186, 0.257708, 0.352966, 0.608253, 0.400728, 0.113505, 0.406653, 0.136362, 0.782824, 0.270184, 0.030661, 0.491218, 0.377122, 0.803226, 0.836448, 0.86283, 0.912815, 0.531513, 0.778374, 0.0229733, 0.808477, 0.812527, 0.04851, 0.114582, 0.365908, 0.224707, 0.87229, 0.25335, 0.480678, 0.771355, 0.844184, 0.276701, 0.91784, 0.398595, 0.795584, 0.427373, 0.147385, 0.901279, 0.909746, 0.507502, 0.412628, 0.944328, 0.966222, 0.375699, 0.89249, 0.262635, 0.278157, 0.459272, 0.921578, 0.890149, 0.543985, 0.892832, 0.186102, 0.983704, 0.546099, 0.309482, 0.986482, 0.767514, 0.08101, 0.836228, 0.131367, 0.746135, 0.00965489, 0.145888, 0.943013, 0.215062, 0.178234, 0.460253, 0.488205, 0.061198, 0.504047, 0.267959, 0.316017, 0.554742, 0.656633, 0.778005, 0.49826, 0.167624, 0.528963, 0.482193, 0.89901, 0.822549, 0.479874, 0.65603, 0.466686, 0.771283, 0.549574, 0.949751, 0.402474, 0.00182143, 0.0884202, 0.353422, 0.44906, 0.803841, 0.925841, 0.923201, 0.537734, 0.321972, 0.377491, 0.726946, 0.356182, 0.478594, 0.871006, 0.354972, 0.629241, 0.136013, 0.135282, 0.917863, 0.831401, 0.718899, 0.307374, 0.891555, 0.150281, 0.468951, 0.148366, 0.575577, 0.716896, 0.444223, 0.78969, 0.93541, 0.760058, 0.540905, 0.964043, 0.0171063, 0.39869, 0.224293, 0.934343, 0.365121, 0.364891, 0.42817, 0.235662, 0.46574, 0.531042, 0.621616, 0.649751, 0.576603, 0.0734606, 0.620751, 0.127296, 0.00635519, 0.305983, 0.076342, 0.495677, 0.562979, 0.508511, 0.812624, 0.307106, 0.122217, 0.413897, 0.409494, 0.919129, 0.615194, 0.375754, 0.874112, 0.272596, 0.936665, 0.585845, 0.505604, 0.888888, 0.63179, 0.847612, 0.236553, 0.576794, 0.86346, 0.607459, 0.975899, 0.739587, 0.388672, 0.700388, 0.994525, 0.609612, 0.905807, 0.235086, 0.0540026, 0.966042, 0.270291, 0.502012, 0.86695, 0.164866, 0.0548146, 0.238537, 0.747805, 0.32025, 0.0936289, 0.840122, 0.111128, 0.932048, 0.280194, 0.120439, 0.0918159, 0.0826257, 0.553689, 0.244285, 0.125624, 0.281038, 0.654053, 0.884274, 0.0580714, 0.338571, 0.501211, 0.645873, 0.82703, 0.7876, 0.812349, 0.9168, 0.241476, 0.537967, 0.062039, 0.665578, 0.547241, 0.465356, 0.867855, 0.521594, 0.0769235, 0.0369526, 0.705496, 0.71834, 0.919237, 0.1668, 0.685412, 0.70845, 0.547079, 0.682376, 0.158527, 0.183392, 0.969974, 0.635421, 0.656028, 0.793447, 0.0408104, 0.757823, 0.104907, 0.198083, 0.902505, 0.15909, 0.4763, 0.342744, 0.832242, 0.400666, 0.643322, 0.787608, 0.000399353, 0.476601, 0.394971, 0.0931529, 0.621979, 0.736769, 0.1838, 0.919627, 0.465261, 0.247757, 0.670256, 0.144105, 0.421988, 0.571865, 0.936538, 0.416873, 0.4175, 0.475867, 0.803234, 0.98123, 0.249467, 0.799148, 0.77949, 0.999975, 0.900311, 0.291606, 0.00782618, 0.0601554, 0.282416, 0.0800648, 0.882891, 0.907917, 0.661172, 0.459114, 0.488762, 0.537694, 0.657679, 0.727417, 0.349662, 0.198227, 0.941466, 0.271296, 0.977564, 0.119668, 0.604557, 0.464826, 0.117033, 0.843198, 0.336332, 0.857577, 0.163204, 0.64604, 0.724511, 0.3183, 0.830594, 0.302087, 0.152319, 0.0437645, 0.0787377, 0.496651, 0.154927, 0.345418, 0.646263, 0.613318, 0.245296, 0.114565, 0.533264, 0.15273, 0.653966, 0.395528, 0.26212, 0.188835, 0.68635, 0.229349, 0.398208, 0.9319, 0.381751, 0.0209623, 0.943169, 0.866172, 0.155764, 0.898351, 0.823702, 0.0191588, 0.661247, 0.931646, 0.400447, 0.994346, 0.341126, 0.564498, 0.624447, 0.158792, 0.808065, 0.679964, 0.485722, 0.779868, 0.067149, 0.388791, 0.581165, 0.354829, 0.85431, 0.933481, 0.168851, 0.000294656, 0.856926, 0.997469, 0.353068, 0.943656, 0.384281, 0.318365, 0.976918, 0.137135, 0.695559, 0.567548, 0.241029, 0.00823528, 0.93008, 0.738601, 0.620383, 0.166429, 0.903855, 0.551089, 0.83635, 0.309258, 0.667892, 0.0232796, 0.735321, 0.546513, 0.111919, 0.394206, 0.904596, 0.00708398, 0.32197, 0.15307, 0.460674, 0.546714, 0.71124, 0.880599, 0.149789, 0.161291, 0.393216, 0.266991, 0.488378, 0.703033, 0.48592, 0.740042, 0.434032, 0.698467, 0.263984, 0.0416684, 0.977174, 0.383117, 0.221453, 0.899662, 0.807155, 0.462059, 0.888809, 0.623808, 0.0997352, 0.433326, 0.661246, 0.343671, 0.291103, 0.990272, 0.825014, 0.745993, 0.523319, 0.472344, 0.0555014, 0.431283, 0.493758, 0.998934, 0.2667, 0.970814, 0.872602, 0.446468, 0.871257, 0.0794379, 0.155252, 0.82933, 0.574495, 0.585308, 0.581221, 0.220756, 0.281271, 0.817061, 0.656363, 0.288968, 0.945296, 0.378223, 0.990441, 0.585566, 0.358747, 0.709879, 0.83303, 0.0129373, 0.971915, 0.656242, 0.353935, 0.953862, 0.285984, 0.478817, 0.450383, 0.251035, 0.695874, 0.0156363, 0.991913, 0.0198609, 0.537361, 0.860761, 0.673861, 0.373493, 0.602062, 0.0114897, 0.287357, 0.265764, 0.349716, 0.554349, 0.00616973, 0.970814, 0.837191, 0.643539, 0.112861, 0.184371, 0.0154453, 0.72955, 0.0828188, 0.845026, 0.0601916, 0.880928, 0.283457, 0.175913, 0.936213, 0.0927206, 0.35249, 0.735381, 0.969044, 0.37677, 0.228734, 0.496109, 0.637784, 0.00295316, 0.841655, 0.975194, 0.489921, 0.876785, 0.752131, 0.876938, 0.753426, 0.531065, 0.0916247, 0.178488, 0.728463, 0.142101, 0.910096, 0.674422, 0.681664, 0.740674, 0.0603205, 0.628937, 0.76967, 0.518407, 0.609922, 0.501698, 0.539959, 0.278785, 0.211401, 0.80414, 0.891716, 0.262146, 0.0971195, 0.380371, 0.546597, 0.0837777, 0.930768, 0.229421, 0.875001, 0.445601, 0.872772, 0.958337, 0.78924, 0.443299, 0.823673, 0.00935907, 0.743904, 0.235989, 0.309456, 0.603649, 0.148912, 0.290181, 0.904612, 0.610951, 0.512436, 0.467655, 0.845492, 0.206641, 0.710138, 0.0959027, 0.148078, 0.863241, 0.0265704, 0.478677, 0.871582, 0.406062, 0.374668, 0.378334, 0.623243, 0.472581, 0.173382, 0.179565, 0.945537, 0.172663, 0.854806, 0.440089, 0.592258, 0.221576, 0.309153, 0.811548, 0.661643, 0.215244, 0.787424, 0.315975, 0.845402, 0.44165, 0.394172, 0.296902, 0.156559, 0.778107, 0.20306, 0.349658, 0.771635, 0.862133, 0.0375458, 0.511418, 0.226177, 0.925131, 0.711151, 0.545341, 0.534673, 0.186163, 0.379189, 0.4613, 0.108788, 0.399324, 0.37091, 0.96608, 0.624196, 0.602313, 0.00545327, 0.389537, 0.0303548, 0.362548, 0.593127, 0.80781, 0.634135, 0.03911, 0.131299, 0.379449, 0.0279775, 0.359845, 0.731879, 0.0169809, 0.170011, 0.140762, 0.278804, 0.0750271, 0.106302, 0.59557, 0.909087, 0.0572952, 0.652158, 0.805364, 0.175926, 0.189026, 0.864383, 0.786211, 0.767197, 0.718582, 0.153246, 0.960772, 0.0481537, 0.594105, 0.23968, 0.46001, 0.759285, 0.325942, 0.305964, 0.284812, 0.80936, 0.600545, 0.571067, 0.535677, 0.373421, 0.0752662, 0.757014, 0.841804, 0.886108, 0.249164, 0.0348715, 0.883352, 0.705347, 0.629279, 0.196914, 0.340224, 0.617199, 0.390929, 0.271045, 0.512861, 0.900765, 0.265655, 0.811987, 0.647324, 0.236222, 0.0818048, 0.190853, 0.000218703, 0.50519, 0.00105905, 0.762038, 0.0207905, 0.994609, 0.617184, 0.873554, 0.0965242, 0.713469, 0.702956, 0.767263, 0.228942, 0.777586, 0.0391998, 0.990284, 0.304829, 0.308069, 0.389208, 0.422205, 0.264052, 0.122489, 0.00340829, 0.777628, 0.163683, 0.650151, 0.322333, 0.920432, 0.79389, 0.617836, 0.660512, 0.830666, 0.597568, 0.558382, 0.671335, 0.862849, 0.420052, 0.461348, 0.749726, 0.984223, 0.60897, 0.850785, 0.837481, 0.856733, 0.165479, 0.940752, 0.7099, 0.371881, 0.0919674, 0.141736, 0.227378, 0.200822, 0.639784, 0.247916, 0.572339, 0.743998, 0.605198, 0.0173636, 0.187632, 0.405288, 0.446981, 0.722107, 0.435873, 0.181586, 0.106386, 0.0896067, 0.307722, 0.219436, 0.429529, 0.344989, 0.362277, 0.0593035, 0.763482, 0.445489, 0.330653, 0.364106, 0.385999, 0.918634, 0.132045, 0.109024, 0.0785774, 0.622362, 0.381878, 0.0150794, 0.316541, 0.0111919, 0.536969, 0.598133, 0.477955, 0.700706, 0.789294, 0.401944, 0.645036, 0.384389, 0.723184, 0.240664, 0.357563, 0.16782, 0.191381, 0.15069, 0.478654, 0.231836, 0.923415, 0.549508, 0.944393, 0.499945, 0.95037, 0.0860303, 0.679979, 0.869618, 0.435324, 0.827285, 0.816131, 0.913721, 0.354395, 0.634003, 0.509328, 0.861692, 0.614431, 0.200596, 0.964208, 0.657615, 0.311227, 0.412868, 0.459556, 0.389972, 0.639212, 0.402353, 0.696483, 0.0435619, 0.285246, 0.262497, 0.467145, 0.236525, 0.913868, 0.123966, 0.873677, 0.780057, 0.0459068, 0.360648, 0.838724, 0.409429, 0.932976, 0.381423, 0.307034, 0.550048, 0.872205, 0.56814, 0.750442, 0.113688, 0.370818, 0.214654, 0.0508989, 0.346999, 0.145708, 0.283303, 0.306971, 0.228532, 0.11092, 0.706493, 0.517462, 0.936693, 0.840298, 0.361782, 0.477729, 0.94521, 0.411169, 0.995889, 0.123281, 0.946468, 0.804008, 0.794051, 0.857532, 0.313439, 0.573503, 0.514412, 0.750911, 0.660566, 0.55739, 0.97204, 0.0753414, 0.954399, 0.159923, 0.692097, 0.203624, 0.927947, 0.690131, 0.251044, 0.697366, 0.168819, 0.673313, 0.274458, 0.147431, 0.332768, 0.985573, 0.505472, 0.397147, 0.392911, 0.632329, 0.868903, 0.880853, 0.670591, 0.628933, 0.543024, 0.52413, 0.820767, 0.311598, 0.248859, 0.742366, 0.141775, 0.25742, 0.0757426, 0.850661, 0.0659472, 0.384148, 0.420724, 0.973471, 0.942875, 0.722862, 0.250896, 0.00551143, 0.0414477, 0.600163, 0.32764, 0.0839769, 0.608694, 0.692732, 0.940255, 0.272977, 0.636382, 0.0139347, 0.00104183, 0.0386415, 0.971171, 0.146773, 0.00540592, 0.131402, 0.0815217, 0.77673, 0.697122, 0.934273, 0.560839, 0.623937, 0.158716, 0.313411, 0.931876, 0.0802085, 0.865537, 0.114872, 0.515546, 0.581057, 0.0749937, 0.383747, 0.873575, 0.611048, 0.517091, 0.417579, 0.724859, 0.959788, 0.0533205, 0.339808, 0.0167258, 0.165728, 0.411227, 0.959074, 0.849294, 0.336029, 0.95474, 0.956261, 0.393124, 0.0194744, 0.000992737, 0.54519, 0.701655, 0.349581, 0.409216, 0.131143, 0.466281, 0.873485, 0.166993, 0.202028, 0.59162, 0.852567, 0.775583, 0.22694, 0.11181, 0.270426, 0.592845, 0.0238296, 0.827968, 0.214972, 0.175604, 0.798684, 0.13578, 0.851981, 0.994326, 0.373355, 0.324792, 0.545002, 0.710066, 0.847072, 0.440287, 0.50767, 0.978756, 0.316187, 0.162747, 0.237358, 0.596382, 0.204115, 0.104693, 0.496969, 0.192991, 0.94269, 0.125823, 0.424083, 0.296575, 0.970382, 0.766013, 0.928947, 0.799516, 0.641478, 0.769511, 0.154549, 0.56211, 0.783829, 0.603426, 0.336835, 0.425253, 0.825161, 0.775855, 0.611405, 0.0330463, 0.33534, 0.363623, 0.656339, 0.341059, 0.294683, 0.072706, 0.467624, 0.857905, 0.586907, 0.127862, 0.0958836, 0.41597, 0.484593, 0.375203, 0.0235657, 0.898615, 0.92354, 0.268047, 0.217567, 0.998058, 0.790891, 0.264616, 0.643863, 0.69957, 0.224748, 0.0755962, 0.35056, 0.131899, 0.542324, 0.368258, 0.645118, 0.163377, 0.714073, 0.487837, 0.758985, 0.57821, 0.822528, 0.519542, 0.841026, 0.572584, 0.0756279, 0.143607, 0.385603, 0.728936, 0.341502, 0.841468, 0.753492, 0.468463, 0.252173, 0.346816, 0.453043, 0.897406, 0.455856, 0.381292, 0.970106, 0.712188, 0.225131, 0.145392, 0.192588, 0.736191, 0.52139, 0.49617, 0.640443, 0.0726618, 0.480087, 0.690714, 0.282055, 0.894357, 0.617277, 0.152901, 0.243754, 0.656954, 0.406464, 0.122146, 0.240153, 0.256626, 0.822585, 0.934248, 0.506646, 0.694571, 0.166801, 0.635915, 0.365792, 0.439549, 0.444856, 0.478435, 0.742428, 0.275742, 0.820728, 0.516879, 0.198363, 0.501988, 0.858141, 0.539035, 0.856738, 0.00251694, 0.48064, 0.0702053, 0.172868, 0.392388, 0.106483, 0.335064, 0.320851, 0.803738, 0.939155, 0.235864, 0.468474, 0.462578, 0.29897, 0.494682, 0.965463, 0.424026, 0.385577, 0.813073, 0.215964, 0.849215, 0.0119733, 0.587684, 0.28297, 0.878928, 0.3545, 0.839369, 0.211762, 0.505819, 0.390852, 0.754333, 0.627557, 0.514172, 0.47467, 0.494604, 0.390526, 0.0615872, 0.687667, 0.28645, 0.00341233, 0.514379, 0.453768, 0.352923, 0.548501, 0.803326, 0.355522, 0.969599, 0.323054, 0.0563699, 0.649562, 0.847051, 0.414793, 0.296222, 0.0415952, 0.561045, 0.663737, 0.73908, 0.344458, 0.38393, 0.538131, 0.564966, 0.80075, 0.0590656, 0.339149, 0.122435, 0.52556, 0.928764, 0.0608846, 0.747374, 0.716633, 0.957798, 0.818284, 0.277545, 0.465612, 0.569795, 0.598696, 0.251668, 0.0703928, 0.499936, 0.41683, 0.822097, 0.998107, 0.134309, 0.155362, 0.424333, 0.971144, 0.135916, 0.137756, 0.855301, 0.950257, 0.526271, 0.230157, 0.195093, 0.071293, 0.368317, 0.148256, 0.0478854, 0.820473, 0.390425, 0.868324, 0.581325, 0.798427, 0.681233, 0.649855, 0.461053, 0.449711, 0.452545, 0.351996, 0.117592, 0.259018, 0.696514, 0.173728, 0.651144, 0.99512, 0.226474, 0.499346, 0.724982, 0.812642, 0.786676, 0.258136, 0.121056, 0.70422, 0.354921, 0.340765, 0.740536, 0.588562, 0.822997, 0.231297, 0.149709, 0.990806, 0.868476, 0.296367, 0.396294, 0.0670195, 0.950553, 0.638583, 0.266305, 0.842912, 0.703177, 0.422618, 0.911208, 0.939006, 0.957994, 0.109891, 0.877659, 0.609161, 0.177602, 0.267662, 0.111445, 0.98173, 0.947085, 0.437708, 0.947995, 0.458174, 0.339516, 0.151839, 0.987987, 0.477754, 0.53734, 0.865415, 0.287151, 0.826549, 0.877143, 0.263663, 0.178255, 0.291243, 0.598879, 0.959213, 0.723151, 0.225439, 0.376546, 0.615932, 0.251752, 0.400445, 0.633658, 0.958621, 0.702791, 0.954957, 0.318743, 0.459994, 0.257323, 0.754593, 0.471568, 0.511838, 0.742679, 0.658338, 0.630268, 0.727203, 0.160336, 0.196, 0.751544, 0.938162, 0.567098, 0.949888, 0.355567, 0.965457, 0.0857119, 0.486897, 0.278068, 0.548048, 0.224767, 0.0499265, 0.0183716, 0.909204, 0.528779, 0.476961, 0.50481, 0.0480282, 0.388792, 0.362907, 0.469301, 0.541633, 0.880414, 0.184411, 0.441109, 0.657236, 0.728948, 0.264312, 0.640115, 0.0896601, 0.298694, 0.436818, 0.962136, 0.744225, 0.488834, 0.95336, 0.192223, 0.484707, 0.945129, 0.44818, 0.605894, 0.601704, 0.617095, 0.39637, 0.759154, 0.718362, 0.704336, 0.474691, 0.117038, 0.376634, 0.371886, 0.860787, 0.421998, 0.317667, 0.23051, 0.681704, 0.0047523, 0.439035, 0.547683, 0.745078, 0.282497, 0.608041, 0.988396, 0.647564, 0.51635, 0.374832, 0.817182, 0.449299, 0.256153, 0.858, 0.898385, 0.311486, 0.823174, 0.40091, 0.134722, 0.305494, 0.564294, 0.898109, 0.0811435, 0.615287, 0.190389, 0.239776, 0.0787815, 0.833256, 0.714881, 0.994731, 0.241663, 0.772977, 0.111288, 0.493099, 0.792154, 0.00464981, 0.852142, 0.667936, 0.336471, 0.673554, 0.22382, 0.889906, 0.0776155, 0.523798, 0.76821, 0.143542, 0.797831, 0.40701, 0.520776, 0.382292, 0.712893, 0.2759, 0.191533, 0.835125, 0.0941685, 0.162417, 0.635805, 0.383042, 0.971267, 0.0176181, 0.246271, 0.700082, 0.141054, 0.182837, 0.0250806, 0.176226, 0.49263, 0.559441, 0.187575, 0.292103, 0.92846, 0.789409, 0.393551, 0.377547, 0.947155, 0.128638, 0.452715, 0.0434053, 0.863882, 0.642877, 0.950036, 0.617336, 0.722055, 0.193565, 0.799629, 0.437422, 0.351018, 0.773917, 0.995013, 0.18258, 0.934327, 0.198648, 0.119767, 0.584644, 0.590204, 0.7835, 0.232693, 0.577164, 0.732372, 0.446173, 0.306383, 0.00327525, 0.816126, 0.282531, 0.134947, 0.289544, 0.718327, 0.907322, 0.254399, 0.621861, 0.314335, 0.375737, 0.0506785, 0.313753, 0.931691, 0.0180866, 0.83853, 0.81125, 0.971104, 0.145964, 0.396113, 0.524393, 0.970057, 0.325058, 0.406099, 0.540443, 0.298112, 0.958321, 0.729839, 0.248967, 0.8588, 0.487625, 0.455911, 0.845357, 0.00339663, 0.560143, 0.949738, 0.923877, 0.845575, 0.978285, 0.984827, 0.96773, 0.552703, 0.729922, 0.845661, 0.513045, 0.806496, 0.648893, 0.162614, 0.0328184, 0.75913, 0.156401, 0.449324, 0.67392, 0.0152599, 0.261176, 0.353996, 0.899338, 0.496926, 0.464873, 0.049658, 0.967735, 0.425144, 0.899519, 0.570731, 0.286559, 0.380081, 0.476342, 0.704329, 0.171593, 0.628955, 0.477934, 0.965496, 0.152881, 0.207124, 0.502824, 0.944706, 0.74156, 0.256649, 0.0892003, 0.0853848, 0.728572, 0.450564, 0.953785, 0.0167663, 0.309596, 0.901346, 0.864796, 0.200647, 0.0863206, 0.167901, 0.717276, 0.480477, 0.670538, 0.682435, 0.0977151, 0.0338763, 0.0147863, 0.661508, 0.904189, 0.648901, 0.956698, 0.41554, 0.172787, 0.595283, 0.431342, 0.293568, 0.785942, 0.919609, 0.166602, 0.320775, 0.528073, 0.0990903, 0.265096, 0.438566, 0.547424, 0.576834, 0.928281, 0.901388, 0.376757, 0.0205771, 0.904413, 0.584102, 0.206187, 0.590228, 0.182499, 0.292812, 0.526085, 0.899114, 0.335858, 0.875331, 0.655488, 0.63314, 0.464719, 0.269843, 0.887705, 0.373332, 0.563933, 0.0850486, 0.920482, 0.845155, 0.129019, 0.401493, 0.833988, 0.0971413, 0.440951, 0.806517, 0.117265, 0.485826, 0.24519, 0.842618, 0.771493, 0.619761, 0.476138, 0.61296, 0.378665, 0.255981, 0.747436, 0.0223136, 0.664901, 0.797449, 0.853001, 0.978497, 0.530371, 0.50038, 0.152446, 0.577182, 0.817145, 0.667901, 0.865757, 0.0738946, 0.0579322, 0.564019, 0.903349, 0.105812, 0.365451, 0.234994, 0.516525, 0.926631, 0.662471, 0.505573, 0.690623, 0.684882, 0.123012, 0.411239, 0.565191, 0.854881, 0.0537899, 0.892327, 0.69524, 0.282411, 0.22138, 0.838472, 0.647229, 0.69415, 0.593122, 0.711694, 0.333028, 0.292645, 0.471287, 0.873539, 0.112766, 0.0980646, 0.550198, 0.0688095, 0.287355, 0.250152, 0.424834, 0.542063, 0.82079, 0.295398, 0.332226, 0.873103, 0.542578, 0.796689, 0.461834, 0.426734, 0.13495, 0.0356655, 0.992334, 0.906661, 0.306737, 0.194348, 0.943622, 0.685089, 0.924932, 0.718253, 0.73489, 0.0170687, 0.787715, 0.860696, 0.351521, 0.208491, 0.697224, 0.747865, 0.635574, 0.379693, 0.837117, 0.710662, 0.00432677, 0.229939, 0.867585, 0.358378, 0.330032, 0.733357, 0.467798, 0.782565, 0.365293, 0.2413, 0.57351, 0.770883, 0.392699, 0.383293, 0.308602, 0.960606, 0.271709, 0.512113, 0.914453, 0.66306, 0.151019, 0.637687, 0.163423, 0.534973, 0.483551, 0.921052, 0.410297, 0.0939397, 0.508608, 0.985296, 0.890239, 0.732254, 0.699869, 0.350539, 0.832559, 0.533377, 0.0323833, 0.982445, 0.197072, 0.538365, 0.0025993, 0.163348, 0.802389, 0.719949, 0.449735, 0.532396, 0.574884, 0.2336, 0.782294, 0.738726, 0.75876, 0.829218, 0.0187037, 0.617774, 0.709808, 0.889014, 0.796174, 0.717802, 0.585036, 0.603948, 0.627945, 0.969691, 0.926295, 0.722668, 0.951171, 0.27665, 0.179878, 0.833592, 0.159347, 0.804025, 0.633766, 0.316839, 0.534361, 0.0940644, 0.407393, 0.400498, 0.766562, 0.172892, 0.209141, 0.759968, 0.577934, 0.854032, 0.573314, 0.244756, 0.282187, 0.912426, 0.989598, 0.875222, 0.681764, 0.47806, 0.160706, 0.591462, 0.66708, 0.054528, 0.581186, 0.816524, 0.760334, 0.215596, 0.0671788, 0.602943, 0.468351, 0.777716, 0.412426, 0.69171, 0.267339, 0.625768, 0.617816, 0.0703638, 0.751811, 0.738653, 0.0221117, 0.849286, 0.823695, 0.637441, 0.31406, 0.607907, 0.51686, 0.943545, 0.679253, 0.0307176, 0.862303, 0.0419296, 0.502102, 0.491889, 0.726784, 0.250574, 0.539501, 0.790374, 0.996973, 0.392414, 0.189252, 0.14281, 0.178591, 0.462199, 0.27548, 0.559133, 0.187847, 0.145499, 0.699106, 0.313703, 0.2329, 0.24947, 0.508881, 0.467391, 0.867357, 0.0583741, 0.668682, 0.188203, 0.302144, 0.437842, 0.835275, 0.951587, 0.853957, 0.662607, 0.341119, 0.778616, 0.0206911, 0.172403, 0.574422, 0.120645, 0.307521, 0.65196, 0.188896, 0.387661, 0.766046, 0.892257, 0.955558, 0.316392, 0.136856, 0.333727, 0.986896, 0.608826, 0.970487, 0.676149, 0.288542, 0.25434, 0.961327, 0.51671, 0.809868, 0.651024, 0.209427, 0.828585, 0.785459, 0.236438, 0.453734, 0.599005, 0.298851, 0.582575, 0.989621, 0.395471, 0.893503, 0.38376, 0.536306, 0.415724, 0.861456, 0.3366, 0.0312893, 0.190346, 0.396119, 0.697469, 0.994934, 0.128262, 0.419153, 0.671446, 0.0398925, 0.876904, 0.62958, 0.944477, 0.619219, 0.201925, 0.80216, 0.86131, 0.105909, 0.623381, 0.549769, 0.211667, 0.968921, 0.782577, 0.680614, 0.00695279, 0.433972, 0.0643976, 0.339294, 0.429445, 0.0375942, 0.824152, 0.0257821, 0.94974, 0.429498, 0.263656, 0.223601, 0.786552, 0.182119, 0.406113, 0.129883, 0.0898836, 0.933753, 0.217537, 0.29338, 0.268674, 0.327087, 0.454318, 0.276973, 0.646845, 0.87259, 0.413006, 0.988415, 0.720624, 0.804014, 0.847937, 0.176427, 0.964545, 0.190957, 0.869556, 0.790345, 0.43425, 0.177486, 0.448294, 0.845207, 0.578581, 0.505744, 0.461785, 0.831601, 0.447018, 0.197002, 0.265265, 0.225108, 0.0833506, 0.395207, 0.553318, 0.548843, 0.932681, 0.864908, 0.460695, 0.959091, 0.926696, 0.779957, 0.536611, 0.636922, 0.40484, 0.200688, 0.217747, 0.0599353, 0.779543, 0.57964, 0.0783717, 0.541801, 0.0562739, 0.564573, 0.826421, 0.264191, 0.320995, 0.26849, 0.954359, 0.869601, 0.0734364, 0.519532, 0.499068, 0.456206, 0.886625, 0.825898, 0.444915, 0.680324, 0.0939236, 0.356988, 0.448587, 0.718458, 0.234452, 0.36613, 0.282905, 0.0892427, 0.560307, 0.0178707, 0.431653, 0.367383, 0.370834, 0.532423, 0.490484, 0.2183, 0.935119, 0.818088, 0.940187, 0.278941, 0.443191, 0.478899, 0.107819, 0.958645, 0.348994, 0.108032, 0.890401, 0.213198, 0.0298222, 0.428392, 0.875241, 0.225698, 0.795573, 0.217211, 0.0181693, 0.102138, 0.670195, 0.833803, 0.249095, 0.0644382, 0.183264, 0.055327, 0.34412, 0.916767, 0.619247, 0.931403, 0.77136, 0.839997, 0.876792, 0.090668, 0.207728, 0.994736, 0.0285567, 0.0944899, 0.50845, 0.156887, 0.0141242, 0.302653, 0.597414, 0.928757, 0.372808, 0.835983, 0.604138, 0.530675, 0.692292, 0.299225, 0.407303, 0.17708, 0.581985, 0.516017, 0.966934, 0.160866, 0.430206, 0.834298, 0.615807, 0.154265, 0.0234658, 0.0651562, 0.828005, 0.252543, 0.85958, 0.489373, 0.0177778, 0.207443, 0.751971, 0.843683, 0.406198, 0.736118, 0.65596, 0.621032, 0.754299, 0.758707, 0.0285996, 0.698346, 0.090256, 0.65823, 0.387805, 0.520675, 0.638833, 0.144534, 0.0137919, 0.173456, 0.00782472, 0.399208, 0.464339, 0.0133367, 0.92029, 0.50911, 0.78332, 0.0126389, 0.944962, 0.699823, 0.863908, 0.628106, 0.371214, 0.440421, 0.934672, 0.0256003, 0.81409, 0.803173, 0.632829, 0.677038, 0.700546, 0.933585, 0.569808, 0.788691, 0.55276, 0.794442, 0.451989, 0.334972, 0.181076, 0.0991012, 0.562334, 0.560958, 0.932214, 0.244108, 0.966396, 0.815003, 0.210763, 0.918568, 0.253558, 0.133928, 0.752249, 0.990286, 0.984486, 0.725318, 0.470781, 0.242256, 0.440868, 0.416847, 0.884916, 0.716285, 0.863908, 0.15809, 0.880761, 0.433683, 0.633511, 0.552547, 0.111377, 0.151011, 0.615799, 0.522434, 0.539279, 0.265123, 0.407127, 0.853945, 0.41649, 0.682944, 0.354664, 0.222762, 0.128292, 0.725612, 0.190819, 0.945783, 0.424301, 0.827427, 0.242931, 0.267628, 0.460562, 0.876041, 0.42958, 0.505738, 0.460771, 0.268692, 0.374312, 0.485857, 0.42256, 0.68238, 0.378584, 0.38224, 0.250217, 0.657907, 0.760952, 0.427835, 0.668097, 0.814126, 0.680257, 0.958357, 0.556202, 0.831808, 0.461898, 0.0379653, 0.030702, 0.583122, 0.528302, 0.899793, 0.289771, 0.912226, 0.0366062, 0.69732, 0.545396, 0.864096, 0.859456, 0.338904, 0.336195, 0.932924, 0.2239, 0.241701, 0.551533, 0.543849, 0.0833371, 0.552945, 0.130344, 0.887318, 0.0753924, 0.748455, 0.857814, 0.162206, 0.77325, 0.107328, 0.223164, 0.323524, 0.836608, 0.927458, 0.136319, 0.178003, 0.465426, 0.68789, 0.742066, 0.365342, 0.665927, 0.126526, 0.622908, 0.205135, 0.841494, 0.910807, 0.269525, 0.166037, 0.604826, 0.877741, 0.339587, 0.106844, 0.224326, 0.173063, 0.313145, 0.599577, 0.79985, 0.409741, 0.74418, 0.16675, 0.683554, 0.392779, 0.619441, 0.471352, 0.834384, 0.614718, 0.872245, 0.234561, 0.326976, 0.314452, 0.88116, 0.0115019, 0.699577, 0.283755, 0.820075, 0.142817, 0.296488, 0.109529, 0.0916776, 0.483715, 0.0887671, 0.28953, 0.508167, 0.62469, 0.161046, 0.495873, 0.732922, 0.380595, 0.883255, 0.0997583, 0.827542, 0.551911, 0.708546, 0.233081, 0.396506, 0.562756, 0.237265, 0.519324, 0.450772, 0.460418, 0.978431, 0.813139, 0.970238, 0.548929, 0.367882, 0.397659, 0.453543, 0.259446, 0.345573, 0.343475, 0.301507, 0.709123, 0.375367, 0.618997, 0.483399, 0.999581, 0.91137, 0.000650729, 0.728203, 0.199909, 0.541405, 0.366193, 0.591566, 0.74248, 0.847444, 0.1818, 0.368208, 0.318079, 0.587773, 0.971922, 0.135016, 0.277245, 0.755322, 0.782739, 0.901244, 0.168333, 0.963281, 0.303083, 0.279084, 0.69983, 0.135039, 0.750534, 0.315619, 0.114457, 0.76844, 0.431874, 0.494145, 0.0207518, 0.253657, 0.80844, 0.280064, 0.433059, 0.688001, 0.445098, 0.733557, 0.561655, 0.855851, 0.386489, 0.637624, 0.168769, 0.804168, 0.415863, 0.660961, 0.729496, 0.847697, 0.412275, 0.346749, 0.653185, 0.501166, 0.0999276, 0.0776018, 0.803858, 0.137053, 0.396814, 0.264231, 0.0674385, 0.445475, 0.980143, 0.210441, 0.243263, 0.216573, 0.958617, 0.694359, 0.148892, 0.229197, 0.335392, 0.423843, 0.178855, 0.415413, 0.0358149, 0.579731, 0.700792, 0.914333, 0.85334, 0.0785062, 0.0482632, 0.963727, 0.170779, 0.648669, 0.362862, 0.835782, 0.369952, 0.385661, 0.504311, 0.916275, 0.602668, 0.663616, 0.513957, 0.873483, 0.495562, 0.33248, 0.649582, 0.0375281, 0.285771, 0.223602, 0.415968, 0.0469861, 0.311527, 0.589832, 0.696851, 0.507855, 0.223775, 0.0416614, 0.619463, 0.495802, 0.879608, 0.278164, 0.914285, 0.236087, 0.736617, 0.88685, 0.510509, 0.496804, 0.473422, 0.408415, 0.603875, 0.0797587, 0.768391, 0.593124, 0.546585, 0.262834, 0.281042, 0.459584, 0.173093, 0.602033, 0.30103, 0.165304, 0.0213275, 0.980875, 0.120081, 0.342542, 0.0518216, 0.731458, 0.191318, 0.773785, 0.24661, 0.217768, 0.612652, 0.973506, 0.273143, 0.914329, 0.837165, 0.801158, 0.671485, 0.00732313, 0.481713, 0.428169, 0.241406, 0.772142, 0.259145, 0.62506, 0.143326, 0.87671, 0.302519, 0.97961, 0.540028, 0.193667, 0.140499, 0.938559, 0.162144, 0.437091, 0.261038, 0.458394, 0.562715, 0.588731, 0.850573, 0.702459, 0.36892, 0.750344, 0.840476, 0.907573, 0.412686, 0.782853, 0.70378, 0.174329, 0.0954385, 0.16047, 0.869342, 0.0441803, 0.18642, 0.668284, 0.264554, 0.385327, 0.854955, 0.819063, 0.471354, 0.678031, 0.376733, 0.0982298, 0.438167, 0.293879, 0.92393, 0.69696, 0.725893, 0.317032, 0.203411, 0.65363, 0.49608, 0.658154, 0.842702, 0.313104, 0.863964, 0.855449, 0.218583, 0.623755, 0.0426666, 0.952675, 0.652668, 0.979785, 0.261897, 0.632642, 0.386615, 0.773058, 0.815306, 0.133311, 0.922995, 0.751383, 0.271896, 0.853526, 0.299945, 0.872168, 0.43469, 0.76393, 0.104357, 0.252107, 0.178744, 0.0254154, 0.0121536, 0.566338, 0.800853, 0.208862, 0.483347, 0.334656, 0.107808, 0.281412, 0.500319, 0.927732, 0.699393, 0.650721, 0.386036, 0.312013, 0.249287, 0.0132821, 0.896976, 0.337049, 0.693979, 0.807182, 0.948462, 0.208668, 0.720983, 0.225851, 0.918379, 0.728314, 0.677805, 0.171668, 0.64968, 0.922009, 0.484325, 0.647646, 0.835169, 0.302394, 0.278037, 0.187053, 0.450665, 0.851355, 0.895482, 0.536796, 0.915332, 0.982553, 0.939195, 0.908274, 0.640734, 0.526159, 0.987063, 0.561802, 0.432246, 0.961466, 0.608891, 0.261753, 0.57967, 0.380521, 0.0978926, 0.133828, 0.125992, 0.589354, 0.413223, 0.162819, 0.872708, 0.364546, 0.638731, 0.884653, 0.622718, 0.0430716, 0.0603181, 0.499674, 0.0564261, 0.939255, 0.938424, 0.0362947, 0.880209, 0.979, 0.585052, 0.3654, 0.280154, 0.683502, 0.68845, 0.554585, 0.745996, 0.733725, 0.529199, 0.441838, 0.634349, 0.900637, 0.812677, 0.211902, 0.826391, 0.792779, 0.769211, 0.848819, 0.720352, 0.74029, 0.265414, 0.0206869, 0.902535, 0.817578, 0.533248, 0.492245, 0.802497, 0.937129, 0.629572, 0.322584, 0.738333, 0.757755, 0.575196, 0.379416, 0.672236, 0.712214, 0.876568, 0.821622, 0.451208, 0.457679, 0.630899, 0.965935, 0.181881, 0.32953, 0.248688, 0.605226, 0.696451, 0.503154, 0.911905, 0.705517, 0.855191, 0.928214, 0.41761, 0.616507, 0.5642, 0.780828, 0.966177, 0.648054, 0.733489, 0.797892, 0.935768, 0.774451, 0.337367, 0.0918411, 0.639816, 0.200888, 0.0232862, 0.392057, 0.541138, 0.868435, 0.0935901, 0.373805, 0.0292326, 0.632368, 0.469387, 0.132166, 0.577847, 0.00155223, 0.0252468, 0.585503, 0.743681, 0.228514, 0.401641, 0.118074, 0.0817157, 0.52752, 0.0696408, 0.470273, 0.490671, 0.970841, 0.458247, 0.635328, 0.00276626, 0.316413, 0.419444, 0.292113, 0.823948, 0.0268925, 0.618504, 0.639367, 0.365153, 0.449928, 0.556959, 0.71793, 0.0204331, 0.412344, 0.87819, 0.327939, 0.312845, 0.838894, 0.566002, 0.433753, 0.994723, 0.640206, 0.0466234, 0.710895, 0.737575, 0.787048, 0.747796, 0.0245469, 0.116564, 0.865837, 0.0551101, 0.579056, 0.339799, 0.20624, 0.158054, 0.489429, 0.533097, 0.994276, 0.760484, 0.0930767, 0.119221, 0.373164, 0.115098, 0.390313, 0.525111, 0.258839, 0.888985, 0.108926, 0.878784, 0.879588, 0.5868, 0.596448, 0.281762, 0.496528, 0.404005, 0.914844, 0.89073, 0.563902, 0.553464, 0.182609, 0.784416, 0.677185, 0.0168709, 0.214446, 0.0308244, 0.00672971, 0.233529, 0.28171, 0.522376, 0.352466, 0.919316, 0.828716, 0.930794, 0.779163, 0.794395, 0.690658, 0.854013, 0.915137, 0.364633, 0.341119, 0.0224899, 0.65358, 0.215617, 0.0267426, 0.9482, 0.320538, 0.701167, 0.343636, 0.400683, 0.74544, 0.283001, 0.663068, 0.397258, 0.380092, 0.959899, 0.891996, 0.539061, 0.647997, 0.77335, 0.449796, 0.063318, 0.423011, 0.850814, 0.159287, 0.0083325, 0.406502, 0.950196, 0.365803, 0.0626204, 0.287745, 0.980158, 0.286673, 0.0085185, 0.00846485, 0.80549, 0.0776918, 0.299683, 0.118333, 0.564421, 0.138695, 0.794155, 0.787339, 0.650925, 0.362174, 0.808774, 0.429166, 0.0522014, 0.64772, 0.11187, 0.669222, 0.876807, 0.319318, 0.87249, 0.926642, 0.376485, 0.938455, 0.102091, 0.379409, 0.6545, 0.265923, 0.330364, 0.813973, 0.388237, 0.90402, 0.826352, 0.552258, 0.817111, 0.209886, 0.266258, 0.793415, 0.322234, 0.720448, 0.48194, 0.22206, 0.477152, 0.335076, 0.993003, 0.493297, 0.2343, 0.234164, 0.988364, 0.475453, 0.944354, 0.572656, 0.81982, 0.841225, 0.300953, 0.80228, 0.692862, 0.45582, 0.608203, 0.0223268, 0.544142, 0.48409, 0.883958, 0.859681, 0.945984, 0.776398, 0.833004, 0.686266, 0.353201, 0.936881, 0.3648, 0.728375, 0.252332, 0.667343, 0.620504, 0.667671, 0.274578, 0.798505, 0.918431, 0.127858, 0.450921, 0.10913, 0.86227, 0.495973, 0.553919, 0.0476738, 0.897428, 0.282044, 0.0762915, 0.522753, 0.36765, 0.443482, 0.636019, 0.698052, 0.0145264, 0.304865, 0.0610866, 0.998783, 0.325839, 0.827785, 0.179754, 0.00522982, 0.542933, 0.035591, 0.394302, 0.966684, 0.482807, 0.0839868, 0.695327, 0.376071, 0.721095, 0.685043, 0.960016, 0.223145, 0.855048, 0.501232, 0.0944823, 0.698887, 0.85304, 0.759519, 0.829787, 0.803766, 0.143614, 0.384703, 0.0724454, 0.497892, 0.162596, 0.49557, 0.0117661, 0.670604, 0.206836, 0.591222, 0.695109, 0.790669, 0.110832, 0.333949, 0.662575, 0.0585832, 0.85838, 0.735202, 0.79964, 0.848775, 0.461166, 0.133284, 0.0752046, 0.532981, 0.922539, 0.972658, 0.129547, 0.247307, 0.833091, 0.961799, 0.825765, 0.477902, 0.974688, 0.025077, 0.44261, 0.190338, 0.889398, 0.106471, 0.15423, 0.565496, 0.258079, 0.405506, 0.637308, 0.769219, 0.204806, 0.96938, 0.894214, 0.873387, 0.207823, 0.48801, 0.236858, 0.763161, 0.544051, 0.613768, 0.428753, 0.554566, 0.656197, 0.297499, 0.478763, 0.0976514, 0.59537, 0.579078, 0.0819789, 0.654088, 0.925821, 0.525639, 0.484559, 0.619819, 0.847242, 0.771479, 0.0087186, 0.455569, 0.214935, 0.911306, 0.632993, 0.832396, 0.928761, 0.674419, 0.761091, 0.812115, 0.895513, 0.87407, 0.877751, 0.207037, 0.215039, 0.916875, 0.326794, 0.262319, 0.0425866, 0.175073, 0.684167, 0.724021, 0.844524, 0.618179, 0.849266, 0.9983, 0.732457, 0.457777, 0.39765, 0.580307, 0.35046, 0.805789, 0.346821, 0.645252, 0.639165, 0.460699, 0.469279, 0.666495, 0.381163, 0.117864, 0.811299, 0.0922506, 0.171651, 0.820672, 0.475703, 0.874998, 0.24256, 0.341205, 0.73704, 0.457847, 0.440887, 0.175341, 0.807219, 0.986352, 0.248882, 0.954343, 0.166271, 0.775489, 0.622371, 0.372043, 0.191437, 0.515043, 0.344614, 0.165677, 0.411226, 0.359628, 0.608428, 0.311358, 0.0926872, 0.278223, 0.406027, 0.277742, 0.303786, 0.359265, 0.759172, 0.728643, 0.815346, 0.444012, 0.525172, 0.266253, 0.181554, 0.113638, 0.371597, 0.526024, 0.939392, 0.582793, 0.699262, 0.193345, 0.440089, 0.0957679, 0.65982, 0.83131, 0.121418, 0.954625, 0.124645, 0.678378, 0.876522, 0.468095, 0.542474, 0.533214, 0.866372, 0.780074, 0.989091, 0.431179, 0.565459, 0.216981, 0.0853234, 0.838334, 0.864885, 0.687704, 0.967232, 0.582386, 0.139376, 0.524288, 0.882608, 0.751836, 0.584906, 0.280211, 0.422397, 0.0946807, 0.647679, 0.455189, 0.0861948, 0.733463, 0.978775, 0.018109, 0.511789, 0.905584, 0.152733, 0.760864, 0.529059, 0.680823, 0.652798, 0.75089, 0.185731, 0.398219, 0.0925079, 0.455324, 0.925171, 0.278169, 0.0289997, 0.00579375, 0.545702, 0.827669, 0.846042, 0.112142, 0.830135, 0.0707057, 0.823006, 0.58776, 0.837205, 0.841997, 0.969016, 0.623585, 0.708216, 0.45952, 0.886268, 0.563773, 0.184274, 0.166326, 0.813176, 0.745364, 0.0702331, 0.4599, 0.429664, 0.364169, 0.283908, 0.839553, 0.149715, 0.064362, 0.666133, 0.542712, 0.0985222, 0.805338, 0.372754, 0.62279, 0.208252, 0.31167, 0.662493, 0.741929, 0.60662, 0.303545, 0.873878, 0.832027, 0.741637, 0.780812, 0.985012, 0.138755, 0.170565, 0.831754, 0.333998, 0.296058, 0.515891, 0.163649, 0.873358, 0.773499, 0.99773, 0.76183, 0.197894, 0.351665, 0.485613, 0.00191917, 0.591469, 0.859371, 0.683942, 0.448589, 0.898636, 0.938772, 0.421771, 0.0206661, 0.591344, 0.559504, 0.347369, 0.650871, 0.232122, 0.646934, 0.366716, 0.217148, 0.0276472, 0.344373, 0.086544, 0.123203, 0.742502, 0.232899, 0.502286, 0.438192, 0.0821565, 0.19761, 0.272053, 0.669708, 0.803645, 0.436057, 0.480031, 0.285902, 0.347965, 0.265545, 0.203791, 0.0780581, 0.372543, 0.468054, 0.38697, 0.46109, 0.227524, 0.639341, 0.407162, 0.292364, 0.264987, 0.77532, 0.335171, 0.466698, 0.0218887, 0.173082, 0.381662, 0.19919, 0.975095, 0.745291, 0.730873, 0.439737, 0.510066, 0.847857, 0.524264, 0.12422, 0.316082, 0.181008, 0.485218, 0.0875008, 0.376509, 0.120733, 0.0210153, 0.412181, 0.399375, 0.405339, 0.778421, 0.0183537, 0.109468, 0.207663, 0.680579, 0.199971, 0.111591, 0.815966, 0.711327, 0.980347, 0.154883, 0.524286, 0.758173, 0.196185, 0.596499, 0.566757, 0.702571, 0.12603, 0.907757, 0.145578, 0.827116, 0.85263, 0.749245, 0.377216, 0.991406, 0.341737, 0.65783, 0.710799, 0.276245, 0.21397, 0.653395, 0.60759, 0.428054, 0.223166, 0.289404, 0.779153, 0.264097, 0.333161, 0.518189, 0.511508, 0.0301043, 0.700396, 0.296287, 0.0730226, 0.0626335, 0.708054, 0.407603, 0.0478065, 0.687654, 0.970479, 0.931644, 0.188487, 0.749045, 0.259967, 0.348934, 0.915613, 0.783626, 0.642778, 0.30499, 0.982318, 0.233114, 0.439615, 0.500804, 0.422079, 0.834522, 0.936686, 0.674656, 0.750855, 0.241206, 0.495153, 0.706212, 0.22935, 0.655109, 0.274586, 0.916446, 0.26678, 0.69987, 0.525289, 0.235553, 0.724885, 0.870327, 0.771253, 0.545269, 0.241219, 0.354546, 0.324689, 0.496322, 0.820226, 0.794634, 0.151333, 0.299279, 0.730443, 0.39681, 0.375833, 0.506248, 0.314417, 0.106419, 0.309005, 0.0118986, 0.352038, 0.975727, 0.596796, 0.050637, 0.621501, 0.352225, 0.222378, 0.594895, 0.147632, 0.35089, 0.772587, 0.419115, 0.897692, 0.382616, 0.684076, 0.0216641, 0.882061, 0.563223, 0.169828, 0.337242, 0.180684, 0.831768, 0.000880582, 0.035212, 0.380242, 0.321027, 0.20206, 0.30052, 0.0940876, 0.409633, 0.12443, 0.0371241, 0.169491, 0.922009, 0.716664, 0.963839, 0.103883, 0.989453, 0.22687, 0.94148, 0.245776, 0.659306, 0.909624, 0.621806, 0.264493, 0.670412, 0.221511, 0.79922, 0.168689, 0.989254, 0.130889, 0.508618, 0.908252, 0.975679, 0.728257, 0.522825, 0.0801624, 0.375765, 0.0563316, 0.893081, 0.542348, 0.737506, 0.48611, 0.984519, 0.0078226, 0.81894, 0.764225, 0.915142, 0.579383, 0.673858, 0.942977, 0.55327, 0.0198962, 0.727537, 0.757616, 0.20476, 0.795402, 0.265199, 0.720311, 0.29669, 0.670042, 0.838673, 0.0593059, 0.643605, 0.270137, 0.107003, 0.400765, 0.340219, 0.0106881, 0.346084, 0.221771, 0.5477, 0.145729, 0.647225, 0.215053, 0.142598, 0.861337, 0.909449, 0.510385, 0.943163, 0.166127, 0.633206, 0.910521, 0.178354, 0.92209, 0.227964, 0.356388, 0.128171, 0.174726, 0.785485, 0.575592, 0.274269, 0.611314, 0.751373, 0.931777, 0.689917, 0.738703, 0.705754, 0.839984, 0.087151, 0.0827527, 0.135115, 0.29786, 0.156464, 0.360025, 0.275173, 0.816818, 0.175458, 0.728818, 0.279461, 0.146934, 0.812872, 0.569866, 0.0566355, 0.0806752, 0.198211, 0.112924, 0.48319, 0.320613, 0.0158336, 0.120042, 0.950126, 0.0492626, 0.12184, 0.379713, 0.728355, 0.701685, 0.953788, 0.111446, 0.534288, 0.460916, 0.348581, 0.0491475, 0.84858, 0.00138538, 0.492785, 0.779054, 0.994262, 0.433473, 0.602259, 0.0411816, 0.142509, 0.804109, 0.0784028, 0.807498, 0.860025, 0.583898, 0.214806, 0.37402, 0.187868, 0.383618, 0.486166, 0.184232, 0.706437, 0.877481, 0.19742, 0.7042, 0.249755, 0.564731, 0.474408, 0.183711, 0.474954, 0.381775, 0.113742, 0.480437, 0.982564, 0.000426251, 0.206507, 0.357601, 0.0764998, 0.307462, 0.13654, 0.723176, 0.980171, 0.148159, 0.587817, 0.532939, 0.991298, 0.881553, 0.685757, 0.0848166, 0.404382, 0.481173, 0.0891813, 0.608265, 0.373653, 0.165365, 0.0949041, 0.938192, 0.527496, 0.689969, 0.251991, 0.816745, 0.537979, 0.632852, 0.801855, 0.51635, 0.72296, 0.830555, 0.280027, 0.602003, 0.0849229, 0.99122, 0.0768961, 0.496196, 0.308519, 0.100239, 0.194686, 0.255449, 0.492251, 0.400271, 0.517834, 0.414514, 0.302919, 0.19172, 0.842483, 0.536281, 0.700811, 0.0290133, 0.890229, 0.413407, 0.0349808, 0.500724, 0.542962, 0.0994638, 0.669554, 0.647204, 0.423555, 0.691766, 0.201164, 0.330492, 0.580948, 0.76759, 0.745527, 0.803631, 0.54167, 0.2282, 0.185509, 0.12553, 0.229768, 0.303903, 0.670466, 0.801704, 0.377607, 0.841936, 0.381325, 0.582298, 0.133827, 0.519022, 0.0639882, 0.31382, 0.381568, 0.982314, 0.55829, 0.833623, 0.746109, 0.630393, 0.353753, 0.192484, 0.945326, 0.65395, 0.791256, 0.155652, 0.731879, 0.251792, 0.403846, 0.527225, 0.943291, 0.459221, 0.560742, 0.340407, 0.224403, 0.00743414, 0.762825, 0.612189, 0.0859213, 0.670368, 0.84152, 0.756552, 0.913609, 0.130314, 0.344554, 0.751845, 0.182745, 0.64356, 0.123482, 0.0696099, 0.149629, 0.358217, 0.293502, 0.45525, 0.135931, 0.157483, 0.267527, 0.160255, 0.245916, 0.83299, 0.911332, 0.816536, 0.977977, 0.97271, 0.163029, 0.345785, 0.126773, 0.815453, 0.350847, 0.970734, 0.392701, 0.244312, 0.0199651, 0.874517, 0.800012, 0.476579, 0.0127928, 0.378158, 0.692009, 0.587867, 0.281408, 0.623901, 0.328944, 0.541419, 0.539783, 0.757558, 0.148147, 0.0681028, 0.85519, 0.562344, 0.681205, 0.416375, 0.569483, 0.511124, 0.230555, 0.969944, 0.520844, 0.555162, 0.955413, 0.107302, 0.691061, 0.47665, 0.949219, 0.237087, 0.522283, 0.743928, 0.425981, 0.702944, 0.515884, 0.8432, 0.596011, 0.550869, 0.695034, 0.122294, 0.630313, 0.246141, 0.516009, 0.949877, 0.165293, 0.947501, 0.912248, 0.169362, 0.29496, 0.733054, 0.500297, 0.632604, 0.44588, 0.740171, 0.262932, 0.582571, 0.227411, 0.301401, 0.327583, 0.522596, 0.527579, 0.121228, 0.00864084, 0.330013, 0.165413, 0.0277039, 0.706386, 0.407959, 0.751436, 0.378717, 0.199228, 0.766191, 0.184537, 0.202327, 0.788221, 0.55589, 0.382068, 0.760043, 0.432347, 0.816282, 0.895474, 0.107054, 0.425757, 0.0939529, 0.818596, 0.92217, 0.88338, 0.413655, 0.144898, 0.123842, 0.300132, 0.0172145, 0.323029, 0.770783, 0.338488, 0.659724, 0.397907, 0.841668, 0.0248078, 0.560817, 0.75631, 0.216071, 0.945316, 0.756295, 0.618953, 0.293676, 0.713934, 0.260252, 0.599536, 0.0450698, 0.992442, 0.614849, 0.77318, 0.216432, 0.541778, 0.726992, 0.295268, 0.176633, 0.532288, 0.313879, 0.555873, 0.405705, 0.153774, 0.667703, 0.594061, 0.0580947, 0.604919, 0.588593, 0.653251, 0.173753, 0.838343, 0.441918, 0.387072, 0.744385, 0.419127, 0.506196, 0.645351, 0.454678, 0.925833, 0.143292, 0.660574, 0.722864, 0.63793, 0.0788522, 0.363169, 0.564739, 0.191534, 0.259257, 0.0347999, 0.6413, 0.101712, 0.449541, 0.370545, 0.997122, 0.540823, 0.0884563, 0.208965, 0.613264, 0.985142, 0.40103, 0.919549, 0.224497, 0.616591, 0.593156, 0.801722, 0.223876, 0.206958, 0.501691, 0.551562, 0.74282, 0.181208, 0.771393, 0.393918, 0.922321, 0.638577, 0.376863, 0.0401266, 0.816643, 0.785584, 0.203166, 0.878251, 0.65944, 0.882809, 0.0863928, 0.745687, 0.40847, 0.832032, 0.0809256, 0.094153, 0.361189, 0.812077, 0.234691, 0.42694, 0.200852, 0.975523, 0.354141, 0.421757, 0.0774215, 0.070992, 0.421937, 0.633423, 0.283779, 0.81905, 0.794535, 0.0960576, 0.629297, 0.535167, 0.508523, 0.527075, 0.999334, 0.0173005, 0.311656, 0.923278, 0.0455971, 0.444995, 0.133671, 0.680912, 0.155071, 0.723781, 0.461663, 0.93426, 0.329707, 0.714499, 0.169596, 0.67502, 0.391618, 0.118416, 0.33398, 0.968866, 0.0195507, 0.450389, 0.214882, 0.355019, 0.551473, 0.393801, 0.539777, 0.73026, 0.361477, 0.220424, 0.300348, 0.617481, 0.886439, 0.397227, 0.525943, 0.850235, 0.740317, 0.998272, 0.976569, 0.341928, 0.160851, 0.429201, 0.457287, 0.309672, 0.580991, 0.273954, 0.326242, 0.0643254, 0.573575, 0.287755, 0.686969, 0.984569, 0.554197, 0.7274, 0.216069, 0.621872, 0.643813, 0.876882, 0.468438, 0.0936584, 0.513361, 0.670193, 0.532297, 0.104199, 0.423867, 0.224115, 0.798091, 0.177146, 0.555254, 0.248109, 0.160211, 0.0221616, 0.132389, 0.999707, 0.965038, 0.554444, 0.565595, 0.237503, 0.00169962, 0.229568, 0.682788, 0.764481, 0.0487813, 0.864486, 0.904476, 0.0525319, 0.898714, 0.0173873, 0.985162, 0.436532, 0.582763, 0.0158723, 0.789094, 0.786233, 0.410659, 0.604324, 0.475477, 0.319492, 0.220243, 0.169876, 0.0914102, 0.233776, 0.732329, 0.939244, 0.553129, 0.601285, 0.405567, 0.959259, 0.236196, 0.516522, 0.139614, 0.981068, 0.229505, 0.571244, 0.406352, 0.746558, 0.521592, 0.187013, 0.701303, 0.783859, 0.704499, 0.567539, 0.448151, 0.841309, 0.811219, 0.580277, 0.538911, 0.893996, 0.232525, 0.888414, 0.736725, 0.417165, 0.211215, 0.310771, 0.70188, 0.507595, 0.510334, 0.913556, 0.0478733, 0.575113, 0.630415, 0.990332, 0.57646, 0.546378, 0.92641, 0.623053, 0.321859, 0.0157159, 0.295079, 0.127641, 0.923763, 0.613171, 0.164406, 0.28307, 0.340805, 0.228384, 0.55602, 0.994059, 0.187031, 0.10987, 0.144074, 0.143496, 0.338501, 0.505964, 0.168797, 0.48837, 0.784524, 0.758003, 0.605508, 0.949441, 0.971073, 0.259546, 0.810502, 0.805251, 0.892052, 0.720263, 0.100148, 0.497694, 0.46166, 0.857795, 0.27426, 0.11979, 0.329579, 0.182483, 0.167435, 0.955931, 0.189914, 0.28703, 0.813201, 0.854609, 0.273005, 0.137551, 0.752829, 0.538577, 0.0628072, 0.776421, 0.537499, 0.0547614, 0.885418, 0.920425, 0.788888, 0.00927441, 0.722344, 0.369018, 0.121754, 0.455063, 0.424021, 0.113252, 0.324646, 0.694949, 0.669383, 0.0572191, 0.570372, 0.659508, 0.152914, 0.142012, 0.920393, 0.38614, 0.467542, 0.35072, 0.281447, 0.944685, 0.215558, 0.454926, 0.536026, 0.44778, 0.920869, 0.638052, 0.534191, 0.0641143, 0.828026, 0.873776, 0.164457, 0.552525, 0.207604, 0.452393, 0.205054, 0.895241, 0.0895964, 0.248367, 0.781899, 0.44924, 0.754355, 0.103789, 0.208486, 0.242908, 0.121276, 0.127973, 0.402872, 0.442435, 0.525921, 0.0637256, 0.0361188, 0.571006, 0.606648, 0.947884, 0.760438, 0.0309971, 0.196711, 0.95827, 0.364995, 0.33025, 0.844207, 0.597559, 0.239721, 0.643562, 0.696375, 0.264842, 0.788867, 0.547416, 0.116078, 0.00181673, 0.0610323, 0.35044, 0.722475, 0.201206, 0.589462, 0.514436, 0.524413, 0.54, 0.150274, 0.987583, 0.190674, 0.20016, 0.523651, 0.494359, 0.791559, 0.283106, 0.677997, 0.872868, 0.967687, 0.512397, 0.392591, 0.0656109, 0.782488, 0.429386, 0.412632, 0.358234, 0.116172, 0.221527, 0.694443, 0.799309, 0.0969967, 0.851847, 0.153804, 0.876318, 0.731137, 0.121943, 0.177912, 0.927979, 0.532124, 0.107388, 0.111061, 0.839484, 0.604183, 0.502345, 0.0353025, 0.556722, 0.736888, 0.170137, 0.966485, 0.196674, 0.34073, 0.840239, 0.314031, 0.254273, 0.888353, 0.667272, 0.653667, 0.950473, 0.95575, 0.589813, 0.242019, 0.386154, 0.720908, 0.0230627, 0.131933, 0.860859, 0.275647, 0.457245, 0.51807, 0.362195, 0.668601, 0.402841, 0.943437, 0.654454, 0.244928, 0.628289, 0.186225, 0.908723, 0.809243, 0.841236, 0.71593, 0.348131, 0.505465, 0.889749, 0.234725, 0.792285, 0.277952, 0.301562, 0.885563, 0.410757, 0.80986, 0.0936623, 0.879623, 0.538852, 0.69582, 0.387751, 0.30954, 0.124704, 0.771897, 0.565482, 0.509211, 0.356417, 0.114909, 0.090619, 0.514345, 0.965393, 0.844088, 0.618682, 0.523679, 0.791183, 0.00318767, 0.634567, 0.370863, 0.869661, 0.835118, 0.851767, 0.418507, 0.0536338, 0.185084, 0.183014, 0.528634, 0.381339, 0.344865, 0.290802, 0.867557, 0.473365, 0.563617, 0.00453586, 0.365663, 0.379836, 0.12498, 0.56141, 0.68537, 0.451273, 0.884056, 0.858573, 0.736995, 0.987575, 0.524262, 0.902263, 0.0411267, 0.293483, 0.626195, 0.411771, 0.310665, 0.290257, 0.410445, 0.0286189, 0.362421, 0.195714, 0.965872, 0.96694, 0.361025, 0.66546, 0.278483, 0.918264, 0.261512, 0.548885, 0.71121, 0.00362248, 0.182363, 0.474657, 0.48244, 0.0516065, 0.158192, 0.97494, 0.699373, 0.909023, 0.707281, 0.473288, 0.169886, 0.0643692, 0.0449337, 0.659065, 0.937262, 0.381404, 0.825512, 0.65109, 0.125254, 0.732339, 0.601261, 0.697315, 0.303673, 0.522208, 0.0152708, 0.507449, 0.783954, 0.28115, 0.295958, 0.622733, 0.786373, 0.39511, 0.159152, 0.251225, 0.603123, 0.745414, 0.653556, 0.748722, 0.730079, 0.340812, 0.664719, 0.40022, 0.182874, 0.37144, 0.0832018, 0.811496, 0.674253, 0.739171, 0.845764, 0.178027, 0.545391, 0.540179, 0.948017, 0.135733, 0.504476, 0.486853, 0.835047, 0.231292, 0.25473, 0.00212214, 0.212284, 0.472472, 0.112381, 0.418387, 0.0418642, 0.611147, 0.0643578, 0.206525, 0.804208, 0.573227, 0.663163, 0.164013, 0.914674, 0.305367, 0.903968, 0.0232696, 0.563987, 0.560816, 0.414911, 0.400634, 0.131686, 0.0193554, 0.607752, 0.164197, 0.567341, 0.42141, 0.83941, 0.460983, 0.985455, 0.870885, 0.202614, 0.968553, 0.80669, 0.372927, 0.788877, 0.357564, 0.0966117, 0.306816, 0.336928, 0.040926, 0.436574, 0.510561, 0.969237, 0.239551, 0.731059, 0.90222, 0.965847, 0.608129, 0.0812972, 0.353123, 0.015714, 0.0231182, 0.0392268, 0.0738136, 0.58702, 0.171462, 0.370901, 0.678717, 0.468467, 0.410735, 0.673824, 0.158814, 0.533841, 0.916291, 0.029516, 0.9556, 0.664704, 0.102852, 0.948735, 0.754373, 0.643268, 0.0982024, 0.942864, 0.274011, 0.476923, 0.106461, 0.225548, 0.313291, 0.329554, 0.851883, 0.314722, 0.938837, 0.322335, 0.585171, 0.0277564, 0.318626, 0.629204, 0.532407, 0.740747, 0.919904, 0.420374, 0.724614, 0.108623, 0.738651, 0.907322, 0.0670096, 0.311997, 0.852269, 0.702452, 0.127123, 0.901492, 0.693842, 0.839737, 0.339155, 0.147633, 0.355625, 0.145871, 0.115069, 0.957674, 0.560687, 0.064264, 0.506556, 0.554516, 0.73012, 0.409934, 0.802327, 0.998475, 0.647414, 0.127543, 0.265695, 0.578531, 0.1327, 0.355447, 0.558171, 0.00598838, 0.940226, 0.369519, 0.586139, 0.55968, 0.636518, 0.857409, 0.963241, 0.0116475, 0.667325, 0.407958, 0.335186, 0.479968, 0.270378, 0.919637, 0.826009, 0.199082, 0.322783, 0.39992, 0.758611, 0.255333, 0.302068, 0.902525, 0.500197, 0.855985, 0.500728, 0.314063, 0.0381636, 0.557419, 0.152195, 0.803799, 0.84627, 0.10769, 0.345615, 0.269928, 0.609945, 0.279088, 0.200761, 0.766667, 0.731349, 0.335105, 0.254386, 0.53166, 0.194555, 0.521007, 0.162585, 0.449878, 0.100261, 0.622192, 0.509137, 0.121375, 0.315335, 0.485973, 0.434955, 0.754392, 0.924451, 0.458096, 0.0209212, 0.639647, 0.311079, 0.402749, 0.679186, 0.332598, 0.730444, 0.10064, 0.637107, 0.389819, 0.607408, 0.646883, 0.347521, 0.168656, 0.866111, 0.0461562, 0.777592, 0.120227, 0.862783, 0.189477, 0.263367, 0.283009, 0.013176, 0.836167, 0.131011, 0.181339, 0.388115, 0.713889, 0.58026, 0.978099, 0.560269, 0.474457, 0.251467, 0.736789, 0.85937, 0.438229, 0.805346, 0.576954, 0.589921, 0.165357, 0.898489, 0.469481, 0.179003, 0.560535, 0.101505, 0.997484, 0.817597, 0.456714, 0.517765, 0.539166, 0.0549501, 0.13851, 0.144562, 0.84891, 0.426885, 0.102703, 0.0221969, 0.899891, 0.827862, 0.020504, 0.987958, 0.346579, 0.014053, 0.872403, 0.367345, 0.0392524, 0.917112, 0.680638, 0.315322, 0.0444468, 0.704286, 0.47484, 0.262109, 0.713528, 0.0696204, 0.553667, 0.0572242, 0.222615, 0.421279, 0.954459, 0.630813, 0.498017, 0.613005, 0.162239, 0.469291, 0.739683, 0.429791, 0.156983, 0.430067, 0.630572, 0.743539, 0.228513, 0.609373, 0.423079, 0.674534, 0.951026, 0.170747, 0.676115, 0.341737, 0.420619, 0.880342, 0.561069, 0.486017, 0.533275, 0.16003, 0.99196, 0.802565, 0.635778, 0.75065, 0.540375, 0.208002, 0.0790226, 0.300026, 0.26242, 0.485571, 0.80065, 0.739541, 0.533718, 0.456979, 0.028113, 0.991271, 0.862795, 0.484163, 0.430773, 0.255418, 0.398744, 0.113383, 0.426061, 0.362484, 0.568603, 0.0652797, 0.253389, 0.717145, 0.48661, 0.802034, 0.926832, 0.611447, 0.619769, 0.683132, 0.819905, 0.103712, 0.199142, 0.511242, 0.853235, 0.0824173, 0.110265, 0.447908, 0.710412, 0.934489, 0.272197, 0.0303202, 0.256232, 0.0225588, 0.481865, 0.236125, 0.0195839, 0.993028, 0.580165, 0.666793, 0.269172, 0.176457, 0.796069, 0.165905, 0.634858, 0.254795, 0.859056, 0.103369, 0.949433, 0.792508, 0.378716, 0.944026, 0.227297, 0.8783, 0.323629, 0.621736, 0.914955, 0.823668, 0.299256, 0.123202, 0.617012, 0.446672, 0.953833, 0.613775, 0.0391566, 0.554599, 0.91968, 0.275098, 0.833774, 0.953738, 0.583018, 0.746144, 0.533393, 0.405325, 0.679003, 0.37107, 0.328953, 0.44203, 0.0994147, 0.18665, 0.657475, 0.288277, 0.930274, 0.418843, 0.469237, 0.35161, 0.12955, 0.089204, 0.532626, 0.812124, 0.436089, 0.0448819, 0.512143, 0.059357, 0.0345068, 0.469822, 0.253855, 0.620757, 0.0337107, 0.956324, 0.146751, 0.460174, 0.509127, 0.882615, 0.589425, 0.759984, 0.981331, 0.683049, 0.310719, 0.729234, 0.122272, 0.586035, 0.991812, 0.582853, 0.453523, 0.181746, 0.892268, 0.334669, 0.986757, 0.817444, 0.590081, 0.850383, 0.595176, 0.240762, 0.8305, 0.149676, 0.437148, 0.542704, 0.671214, 0.0366027, 0.663488, 0.531283, 0.121862, 0.640571, 0.165648, 0.461287, 0.273252, 0.844644, 0.825103, 0.148409, 0.251839, 0.446473, 0.0960271, 0.00706158, 0.630802, 0.848176, 0.375203, 0.0320539, 0.130486, 0.149418, 0.183657, 0.455004, 0.8082, 0.269266, 0.957152, 0.557931, 0.0748252, 0.749464, 0.504835, 0.81596, 0.34321, 0.477582, 0.801332, 0.461577, 0.926013, 0.661129, 0.649922, 0.452354, 0.218746, 0.905802, 0.0605009, 0.418734, 0.501866, 0.635877, 0.705736, 0.181324, 0.104941, 0.832578, 0.874072, 0.141237, 0.142201, 0.280765, 0.5323, 0.674359, 0.652151, 0.190034, 0.967754, 0.696816, 0.969444, 0.0220326, 0.687023, 0.37009, 0.285545, 0.181625, 0.831447, 0.835206, 0.575393, 0.0947767, 0.445169, 0.507885, 0.425157, 0.613484, 0.682745, 0.0263245, 0.665957, 0.0166366, 0.65463, 0.789293, 0.0336617, 0.164098, 0.924421, 0.132713, 0.456148, 0.574763, 0.668189, 0.489693, 0.0441914, 0.518647, 0.214696, 0.112737, 0.44819, 0.303738, 0.399455, 0.546151, 0.158835, 0.00180918, 0.262032, 0.384566, 0.89461, 0.164237, 0.818893, 0.49971, 0.120254, 0.23836, 0.029748, 0.995163, 0.226978, 0.0484893, 0.231094, 0.305973, 0.71215, 0.822466, 0.0508578, 0.997569, 0.898337, 0.817448, 0.874781, 0.689182, 0.973867, 0.628597, 0.620877, 0.45498, 0.717818, 0.938396, 0.427875, 0.366748, 0.776689, 0.96113, 0.76588, 0.85267, 0.0487189, 0.683232, 0.042304, 0.781456, 0.0592676, 0.129742, 0.571455, 0.768879, 0.703165, 0.0624367, 0.190833, 0.0241882, 0.805154, 0.335558, 0.910022, 0.332639, 0.323955, 0.228345, 0.887001, 0.980327, 0.870543, 0.990502, 0.265794, 0.694965, 0.00930077, 0.731371, 0.223209, 0.405306, 0.109496, 0.965509, 0.958465, 0.840793, 0.889724, 0.064064, 0.451945, 0.206655, 0.38059, 0.106237, 0.491083, 0.93959, 0.487393, 0.232154, 0.915776, 0.171781, 0.493335, 0.474649, 0.698498, 0.530765, 0.52425, 0.449313, 0.81784, 0.483302, 0.73427, 0.962126, 0.718749, 0.446763, 0.893096, 0.725123, 0.466613, 0.83961, 0.98493, 0.537082, 0.0800491, 0.405409, 0.983864, 0.39792, 0.0114679, 0.150705, 0.100391, 0.290206, 0.898127, 0.587066, 0.0429559, 0.659294, 0.850143, 0.00734813, 0.101774, 0.717331, 0.0820413, 0.13315, 0.0223735, 0.142763, 0.376158, 0.962344, 0.257931, 0.30701, 0.286801, 0.0103284, 0.777624, 0.125984, 0.134889, 0.924669, 0.237833, 0.227038, 0.514285, 0.805988, 0.143093, 0.529449, 0.15169, 0.351351, 0.736351, 0.308342, 0.294531, 0.708191, 0.99304, 0.84248, 0.503519, 0.0269532, 0.358566, 0.0602081, 0.411598, 0.285303, 0.273901, 0.905446, 0.798683, 0.0151927, 0.052071, 0.0621774, 0.130585, 0.704555, 0.915636, 0.711788, 0.158899, 0.629154, 0.324008, 0.95695, 0.281377, 0.940854, 0.80062, 0.912882, 0.284001, 0.559985, 0.201188, 0.583779, 0.0224731, 0.306439, 0.248416, 0.00758427, 0.557572, 0.942114, 0.554073, 0.937692, 0.458901, 0.215002, 0.737547, 0.530686, 0.190635, 0.80603, 0.979828, 0.256353, 0.976253, 0.736808, 0.65022, 0.349429, 0.637669, 0.397362, 0.00712503, 0.690147, 0.985218, 0.834008, 0.347133, 0.602926, 0.823941, 0.970718, 0.234268, 0.506128, 0.989503, 0.883743, 0.717558, 0.475378, 0.367747, 0.718978, 0.589846, 0.768416, 0.666121, 0.412887, 0.199796, 0.57815, 0.188739, 0.589283, 0.92877, 0.852514, 0.773125, 0.359723, 0.627715, 0.0199192, 0.0866482, 0.843929, 0.967316, 0.926631, 0.911379, 0.937028, 0.0500699, 0.738367, 0.70269, 0.168276, 0.48319, 0.120307, 0.949687, 0.192057, 0.264129, 0.314993, 0.369187, 0.462775, 0.31784, 0.324946, 0.0102614, 0.793894, 0.402858, 0.530403, 0.79561, 0.81572, 0.942598, 0.211626, 0.227524, 0.0135033, 0.859035, 0.32734, 0.573035, 0.982225, 0.0224267, 0.347177, 0.71185, 0.261203, 0.359006, 0.97465, 0.929987, 0.318933, 0.0621373, 0.380474, 0.573457, 0.926413, 0.716498, 0.888477, 0.893428, 0.547621, 0.40203, 0.478106, 0.282419, 0.989463, 0.927382, 0.43853, 0.0925404, 0.574581, 0.68347, 0.666289, 0.0394626, 0.0910755, 0.061274, 0.462583, 0.222248, 0.734661, 0.586794, 0.921215, 0.0876263, 0.486106, 0.713872, 0.893221, 0.143838, 0.639398, 0.848009, 0.280608, 0.670753, 0.486026, 0.879492, 0.195502, 0.446925, 0.148831, 0.311913, 0.826572, 0.0745106, 0.967327, 0.176825, 0.837993, 0.901025, 0.145146, 0.102935, 0.183943, 0.441984, 0.890556, 0.985702, 0.43369, 0.0862162, 0.639421, 0.782565, 0.343339, 0.938177, 0.260718, 0.632064, 0.717315, 0.587037, 0.450545, 0.886596, 0.730935, 0.292592, 0.239379, 0.890215, 0.304032, 0.515059, 0.699912, 0.194646, 0.0724452, 0.952158, 0.948487, 0.864882, 0.194389, 0.666615, 0.0957282, 0.922265, 0.248937, 0.635332, 0.714348, 0.989062, 0.0572661, 0.272326, 0.521135, 0.0849775, 0.868695, 0.835632, 0.249649, 0.318595, 0.119722, 0.142894, 0.682807, 0.267935, 0.657104, 0.436956, 0.0389745, 0.22653, 0.410546, 0.725259, 0.747194, 0.0454387, 0.44659, 0.139582, 0.785488, 0.0223576, 0.0735775, 0.13351, 0.296764, 0.618207, 0.940226, 0.354419, 0.514127, 0.314087, 0.953654, 0.0748327, 0.191724, 0.895162, 0.665453, 0.696588, 0.432883, 0.899274, 0.867747, 0.120454, 0.891239, 0.749333, 0.891805, 0.788669, 0.953287, 0.416784, 0.521032, 0.704389, 0.0472966, 0.0240078, 0.0331113, 0.406872, 0.473289, 0.924241, 0.667755, 0.0811827, 0.454321, 0.15741, 0.497819, 0.716903, 0.444339, 0.395287, 0.464974, 0.12151, 0.404004, 0.0934605, 0.441825, 0.697486, 0.351276, 0.442934, 0.220947, 0.86621, 0.223565, 0.116137, 0.807875, 0.29863, 0.170471, 0.411124, 0.744533, 0.193647, 0.717145, 0.342939, 0.221676, 0.232901, 0.341465, 0.446438, 0.300425, 0.374265, 0.658203, 0.822078, 0.496284, 0.199386, 0.989217, 0.0821325, 0.275467, 0.277882, 0.893707, 0.405232, 0.567861, 0.8975, 0.18138, 0.308309, 0.477461, 0.984025, 0.830471, 0.42237, 0.364545, 0.516352, 0.841026, 0.67769, 0.487623, 0.273439, 0.971741, 0.466953, 0.273495, 0.846301, 0.166227, 0.95255, 0.210152, 0.682805, 0.751633, 0.3129, 0.909138, 0.825527, 0.659544, 0.908908, 0.580157, 0.876501, 0.607299, 0.242618, 0.818114, 0.00131603, 0.776379, 0.188393, 0.30809, 0.544675, 0.869784, 0.413121, 0.518545, 0.116167, 0.649879, 0.964, 0.677545, 0.927208, 0.933423, 0.275191, 0.434273, 0.124179, 0.466204, 0.276987, 0.936144, 0.750637, 0.238819, 0.738644, 0.463506, 0.969592, 0.447774, 0.591227, 0.609234, 0.98293, 0.640797, 0.390193, 0.444909, 0.788283, 0.587941, 0.157216, 0.00131569, 0.779473, 0.429672, 0.54189, 0.831539, 0.970397, 0.684854, 0.487185, 0.531324, 0.832011, 0.777649, 0.804686, 0.675394, 0.923088, 0.308004, 0.428661, 0.555492, 0.861357, 0.628359, 0.316157, 0.817916, 0.374223, 0.970965, 0.795997, 0.510906, 0.672588, 0.424412, 0.175958, 0.769263, 0.497293, 0.196555, 0.0611545, 0.00530789, 0.965853, 0.570024, 0.360339, 0.187082, 0.748634, 0.190719, 0.806228, 0.0352233, 0.192849, 0.965821, 0.00679308, 0.99123, 0.73737, 0.775546, 0.0977013, 0.308288, 0.431117, 0.153415, 0.804882, 0.669997, 0.670311, 0.612123, 0.782227, 0.11331, 0.154333, 0.296159, 0.174976, 0.0483842, 0.24777, 0.620132, 0.861002, 0.334178, 0.866623, 0.361461, 0.710116, 0.0635535, 0.249805, 0.549268, 0.362231, 0.239818, 0.378997, 0.681933, 0.933777, 0.291056, 0.456954, 0.18463, 0.540136, 0.0437274, 0.734342, 0.366618, 0.269954, 0.310104, 0.732228, 0.0728633, 0.989473, 0.252805, 0.0756632, 0.0375949, 0.0332494, 0.324117, 0.932543, 0.326445, 0.805994, 0.269755, 0.072031, 0.821557, 0.943744, 0.640986, 0.750658, 0.69089, 0.809074, 0.374401, 0.779458, 0.494652, 0.539367, 0.648802, 0.904723, 0.91116, 0.539082, 0.538251, 0.134833, 0.387795, 0.64978, 0.599567, 0.417636, 0.551634, 0.322439, 0.340208, 0.912534, 0.733872, 0.579219, 0.0793208, 0.626329, 0.905718, 0.582516, 0.92312, 0.50702, 0.414618, 0.334607, 0.825264, 0.171573, 0.571544, 0.596331, 0.111448, 0.190892, 0.00608648, 0.961301, 0.06295, 0.700943, 0.104887, 0.665655, 0.150576, 0.368126, 0.560471, 0.811381, 0.670625, 0.687407, 0.206057, 0.554732, 0.792035, 0.238631, 0.967303, 0.229403, 0.820224, 0.403868, 0.334452, 0.410095, 0.141435, 0.385321, 0.78013, 0.120142, 0.131125, 0.531591, 0.284758, 0.225754, 0.821743, 0.479127, 0.670744, 0.0632617, 0.190213, 0.342228, 0.611834, 0.147144, 0.749315, 0.812352, 0.220116, 0.0743763, 0.798754, 0.186146, 0.653518, 0.952988, 0.224036, 0.979533, 0.0512328, 0.855497, 0.970378, 0.330205, 0.893624, 0.951954, 0.446248, 0.241224, 0.358395, 0.823622, 0.575698, 0.0235875, 0.340075, 0.599497, 0.358554, 0.928837, 0.306355, 0.571435, 0.719717, 0.165151, 0.0797963, 0.831695, 0.88422, 0.601322, 0.247884, 0.628228, 0.498607, 0.00621075, 0.915399, 0.143344, 0.116065, 0.94494, 0.466709, 0.317744, 0.344857, 0.0949592, 0.964505, 0.35357, 0.303505, 0.606102, 0.0665468, 0.225249, 0.458599, 0.970461, 0.395171, 0.422658, 0.679472, 0.410228, 0.19959, 0.842336, 0.668081, 0.664293, 0.402229, 0.812145, 0.272561, 0.502467, 0.770362, 0.929647, 0.15165, 0.276435, 0.342583, 0.433105, 0.282908, 0.745474, 0.516557, 0.0118625, 0.317457, 0.148295, 0.685141, 0.588302, 0.16608, 0.0774353, 0.635194, 0.218951, 0.0168833, 0.00626268, 0.868376, 0.931036, 0.320997, 0.459506, 0.295636, 0.0551453, 0.0681321, 0.559253, 0.677468, 0.157699, 0.577135, 0.802595, 0.706154, 0.559748, 0.797186, 0.0456973, 0.774654, 0.734378, 0.0919026, 0.7873, 0.372076, 0.469205, 0.00575541, 0.192196, 0.13782, 0.392825, 0.552584, 0.257179, 0.414549, 0.910588, 0.105094, 0.759801, 0.0528332, 0.990542, 0.569218, 0.576016, 0.799242, 0.278953, 0.231101, 0.297748, 0.878379, 0.943777, 0.53018, 0.84597, 0.940981, 0.00342711, 0.218818, 0.980644, 0.0654761, 0.172279, 0.818572, 0.474842, 0.929259, 0.350181, 0.912524, 0.651795, 0.788866, 0.0493693, 0.730662, 0.931843, 0.318642, 0.311007, 0.922937, 0.175911, 0.475047, 0.366807, 0.110681, 0.384554, 0.185636, 0.749514, 0.861982, 0.164914, 0.52161, 0.410423, 0.247525, 0.700921, 0.452227, 0.750821, 0.39097, 0.280601, 0.471775, 0.35919, 0.14004, 0.731463, 0.476488, 0.337308, 0.330169, 0.527366, 0.850729, 0.655397, 0.0842694, 0.0643169, 0.962994, 0.26068, 0.153789, 0.273396, 0.85484, 0.965423, 0.394847, 0.433896, 0.115241, 0.390524, 0.405932, 0.938156, 0.198021, 0.738096, 0.0385308, 0.617535, 0.802318, 0.573192, 0.763268, 0.0779548, 0.320115, 0.748815, 0.291995, 0.389855, 0.614649, 0.514202, 0.205235, 0.0963104, 0.503745, 0.31952, 0.355385, 0.942694, 0.929783, 0.0220208, 0.459694, 0.84832, 0.941392, 0.737276, 0.262519, 0.227443, 0.253201, 0.228847, 0.0953443, 0.556171, 0.166964, 0.270108, 0.926642, 0.654498, 0.140178, 0.306913, 0.990324, 0.580342, 0.0324181, 0.623065, 0.646986, 0.898798, 0.976565, 0.119017, 0.688754, 0.998727, 0.222624, 0.943651, 0.273386, 0.48019, 0.207396, 0.128925, 0.878636, 0.562034, 0.057086, 0.624526, 0.257558, 0.394908, 0.295239, 0.577043, 0.395199, 0.504665, 0.328583, 0.98399, 0.397759, 0.0383109, 0.665752, 0.429645, 0.767314, 0.928769, 0.76124, 0.902218, 0.146777, 0.350768, 0.680122, 0.296311, 0.468029, 0.826814, 0.101591, 0.664039, 0.801716, 0.147115, 0.108351, 0.116276, 0.861291, 0.167491, 0.946289, 0.693907, 0.313647, 0.777845, 0.263364, 0.139229, 0.740363, 0.989704, 0.432943, 0.570094, 0.322981, 0.476946, 0.49074, 0.958952, 0.578021, 0.0932085, 0.821197, 0.205692, 0.692186, 0.875159, 0.402558, 0.900931, 0.816728, 0.616854, 0.522478, 0.487102, 0.517616, 0.915677, 0.66723, 0.310415, 0.80434, 0.766109, 0.0184057, 0.894138, 0.499064, 0.192616, 0.985936, 0.665655, 0.455956, 0.457536, 0.85326, 0.284357, 0.201585, 0.474892, 0.598309, 0.856298, 0.797672, 0.542194, 0.875995, 0.796188, 0.571213, 0.548237, 0.57115, 0.336469, 0.693064, 0.414336, 0.998054, 0.423767, 0.346776, 0.0062636, 0.631417, 0.913248, 0.554041, 0.873048, 0.717099, 0.190542, 0.445506, 0.465368, 0.566584, 0.901952, 0.890297, 0.186303, 0.77379, 0.897066, 0.537364, 0.924561, 0.156125, 0.573204, 0.105287, 0.468789, 0.491821, 0.711121, 0.0273963, 0.498252, 0.143125, 0.925854, 0.497041, 0.57547, 0.242062, 0.012427, 0.969893, 0.436162, 0.0943087, 0.433064, 0.47741, 0.391195, 0.762221, 0.699659, 0.218952, 0.472897, 0.602302, 0.665468, 0.889846, 0.438605, 0.3467, 0.0341724, 0.264448, 0.00160433, 0.598278, 0.261374, 0.707071, 0.478422, 0.240603, 0.33752, 0.616253, 0.769533, 0.0996562, 0.279065, 0.30718, 0.293483, 0.978438, 0.909758, 0.133283, 0.964745, 0.476413, 0.0521153, 0.917585, 0.343908, 0.707254, 0.969343, 0.138905, 0.796633, 0.268497, 0.0173456, 0.814986, 0.388645, 0.951678, 0.481828, 0.174195, 0.552913, 0.648707, 0.461039, 0.631946, 0.392304, 0.700226, 0.924364, 0.139219, 0.0778452, 0.143639, 0.698261, 0.889259, 0.0129595, 0.830755, 0.660252, 0.459239, 0.846976, 0.991724, 0.278637, 0.34602, 0.600882, 0.811927, 0.968476, 0.747932, 0.24639, 0.742693, 0.445385, 0.341478, 0.324177, 0.0706899, 0.944761, 0.434242, 0.489379, 0.38672, 0.837475, 0.152152, 0.222192, 0.895271, 0.52972, 0.16663, 0.918529, 0.591741, 0.329087, 0.583589, 0.542022, 0.265522, 0.683751, 0.628837, 0.0971741, 0.157937, 0.518461, 0.593073, 0.534005, 0.037028, 0.273651, 0.423647, 0.578465, 0.507036, 0.493013, 0.21444, 0.209466, 0.603251, 0.0092898, 0.244992, 0.422513, 0.201632, 0.761322, 0.0846159, 0.397488, 0.826415, 0.813036, 0.379624, 0.540605, 0.599324, 0.763757, 0.588955, 0.872442, 0.0215331, 0.566805, 0.294548, 0.897124, 0.864667, 0.357449, 0.381755, 0.0890578, 0.781388, 0.683016, 0.769563, 0.443538, 0.467661, 0.270731, 0.754169, 0.0640097, 0.0685592, 0.574631, 0.00559693, 0.167295, 0.671187, 0.303472, 0.396746, 0.302382, 0.42929, 0.426672, 0.452811, 0.287465, 0.271403, 0.237308, 0.0238095, 0.183345, 0.995383, 0.713626, 0.151027, 0.812641, 0.0540519, 0.437533, 0.97883, 0.238399, 0.0276459, 0.360596, 0.136596, 0.325483, 0.437352, 0.684789, 0.458391, 0.45358, 0.802771, 0.878323, 0.88142, 0.571143, 0.787027, 0.774071, 0.724934, 0.280951, 0.955159, 0.855806, 0.0902775, 0.773264, 0.438617, 0.71604, 0.341795, 0.419745, 0.394258, 0.357691, 0.863999, 0.721997, 0.57028, 0.249531, 0.732312, 0.533183, 0.965219, 0.880907, 0.448262, 0.98237, 0.117052, 0.179045, 0.454526, 0.442181, 0.992582, 0.0774046, 0.551523, 0.997961, 0.871994, 0.44388, 0.853182, 0.267528, 0.351673, 0.0835476, 0.378, 0.738866, 0.402109, 0.578633, 0.511909, 0.287581, 0.701261, 0.368233, 0.703063, 0.499874, 0.0787218, 0.710596, 0.23429, 0.413396, 0.132403, 0.229315, 0.561188, 0.201001, 0.25111, 0.290239, 0.905979, 0.110316, 0.499715, 0.486985, 0.455562, 0.514485, 0.584096, 0.877169, 0.0979117, 0.963374, 0.415713, 0.0709176, 0.805822, 0.821558, 0.973488, 0.520248, 0.688989, 0.475652, 0.971518, 0.506942, 0.989807, 0.656753, 0.970419, 0.180893, 0.61649, 0.0237053, 0.242801, 0.337562, 0.233161, 0.392797, 0.990294, 0.199084, 0.057447, 0.671158, 0.747917, 0.668573, 0.0453435, 0.176139, 0.910425, 0.923741, 0.00176215, 0.296061, 0.478025, 0.181573, 0.036164, 0.702192, 0.0313901, 0.113348, 0.58428, 0.46213, 0.221134, 0.932671, 0.764427, 0.415765, 0.449912, 0.529514, 0.227868, 0.796867, 0.314191, 0.197278, 0.187912, 0.845347, 0.26772, 0.384787, 0.163984, 0.577743, 0.497061, 0.135861, 0.676472, 0.960896, 0.784094, 0.033677, 0.380924, 0.530175, 0.16563, 0.0695512, 0.349952, 0.151569, 0.344746, 0.0157297, 0.223123, 0.0640751, 0.495658, 0.310503, 0.692738, 0.612032, 0.567547, 0.857889, 0.670522, 0.685647, 0.383845, 0.0197798, 0.136059, 0.574925, 0.691993, 0.667442, 0.180047, 0.873677, 0.360731, 0.651153, 0.958467, 0.645701, 0.975104, 0.560409, 0.745208, 0.00676351, 0.586728, 0.304043, 0.880682, 0.221041, 0.15046, 0.239141, 0.249258, 0.196454, 0.245214, 0.684362, 0.0960514, 0.970904, 0.609519, 0.952968, 0.899556, 0.3414, 0.932593, 0.475766, 0.364046, 0.117809, 0.676439, 0.286498, 0.429399, 0.601948, 0.148346, 0.68029, 0.645234, 0.771374, 0.52105, 0.883415, 0.34897, 0.102868, 0.202091, 0.613328, 0.804702, 0.764001, 0.921364, 0.202624, 0.277871, 0.152549, 0.382469, 0.275849, 0.910749, 0.710937, 0.561205, 0.416221, 0.536861, 0.322434, 0.525319, 0.200341, 0.686401, 0.319362, 0.351195, 0.153495, 0.420281, 0.337442, 0.273369, 0.320972, 0.804164, 0.308775, 0.120205, 0.286141, 0.275005, 0.642853, 0.363757, 0.233674, 0.212187, 0.630722, 0.377742, 0.0663688, 0.949916, 0.320369, 0.857173, 0.449386, 0.285215, 0.371606, 0.0249118, 0.432057, 0.866585, 0.0182799, 0.277595, 0.511672, 0.657466, 0.470694, 0.876574, 0.21915, 0.314551, 0.879418, 0.925947, 0.214252, 0.235867, 0.867334, 0.585355, 0.236343, 0.0142805, 0.709999, 0.117742, 0.292881, 0.336431, 0.788103, 0.110984, 0.702093, 0.137928, 0.244125, 0.3929, 0.855334, 0.219038, 0.456583, 0.689839, 0.111941, 0.471571, 0.291894, 0.638545, 0.292937, 0.497642, 0.597071, 0.74203, 0.723507, 0.459369, 0.662521, 0.745359, 0.0500914, 0.795045, 0.422697, 0.380711, 0.512343, 0.660764, 0.61535, 0.486847, 0.816118, 0.00446232, 0.834289, 0.0770077, 0.103084, 0.0484243, 0.214223, 0.166285, 0.62742, 0.0472456, 0.837196, 0.116198, 0.719539, 0.365788, 0.604195, 0.216756, 0.506324, 0.964941, 0.758881, 0.287923, 0.242855, 0.845738, 0.813966, 0.267799, 0.0919762, 0.925327, 0.951424, 0.0960476, 0.566278, 0.316084, 0.341953, 0.899627, 0.417059, 0.175193, 0.0221171, 0.312482, 0.686303, 0.0341158, 0.736901, 0.178985, 0.0339552, 0.8749, 0.599127, 0.196911, 0.00325751, 0.308875, 0.0649545, 0.969831, 0.964767, 0.202674, 0.889139, 0.968017, 0.8597, 0.595312, 0.901864, 0.557683, 0.536565, 0.386705, 0.393704, 0.739662, 0.021824, 0.288998, 0.872303, 0.202635, 0.333721, 0.0343226, 0.725161, 0.521797, 0.115912, 0.393879, 0.326283, 0.654359, 0.626146, 0.979221, 0.763695, 0.274333, 0.0993109, 0.300461, 0.535933, 0.354048, 0.417858, 0.969487, 0.391937, 0.0182954, 0.426009, 0.160548, 0.253601, 0.321055, 0.81261, 0.672767, 0.332482, 0.688431, 0.492724, 0.278023, 0.395292, 0.994346, 0.990038, 0.940437, 0.987702, 0.897383, 0.618082, 0.145988, 0.880935, 0.842363, 0.125139, 0.116849, 0.460842, 0.314178, 0.043245, 0.4142, 0.89175, 0.574041, 0.931528, 0.0336511, 0.56791, 0.71331, 0.895303, 0.891334, 0.68638, 0.327461, 0.548034, 0.543829, 0.684925, 0.253966, 0.102468, 0.520761, 0.0809975, 0.131117, 0.154081, 0.0567908, 0.648202, 0.0766852, 0.823876, 0.897105, 0.70522, 0.764765, 0.875013, 0.135578, 0.404278, 0.523236, 0.255683, 0.512845, 0.938046, 0.856716, 0.468823, 0.804831, 0.355443, 0.0695988, 0.98588, 0.466855, 0.369401, 0.86012, 0.909353, 0.749526, 0.953432, 0.231657, 0.0795231, 0.118461, 0.163754, 0.230921, 0.911695, 0.87489, 0.488234, 0.551272, 0.905874, 0.730349, 0.759735, 0.560559, 0.615156, 0.648054, 0.896412, 0.953343, 0.150539, 0.139232, 0.843768, 0.395228, 0.457077, 0.945858, 0.537195, 0.127711, 0.831302, 0.60837, 0.672555, 0.187146, 0.217717, 0.420651, 0.435428, 0.44673, 0.665936, 0.451481, 0.975464, 0.983256, 0.754789, 0.454815, 0.947708, 0.636952, 0.795338, 0.289539, 0.359336, 0.632853, 0.0136475, 0.298275, 0.745858, 0.686665, 0.843021, 0.213234, 0.14509, 0.0492505, 0.70451, 0.579702, 0.756544, 0.186221, 0.904439, 0.622428, 0.261804, 0.230037, 0.483166, 0.842863, 0.64908, 0.412451, 0.195573, 0.397488, 0.542667, 0.973308, 0.508496, 0.718492, 0.527095, 0.0736815, 0.49691, 0.258268, 0.112948, 0.0283789, 0.120366, 0.971176, 0.437164, 0.779575, 0.253816, 0.287045, 0.849195, 0.152545, 0.351503, 0.972983, 0.190885, 0.392457, 0.0147285, 0.219052, 0.0749137, 0.130611, 0.835668, 0.992439, 0.842531, 0.551067, 0.804734, 0.308835, 0.933938, 0.943624, 0.637959, 0.421375, 0.64275, 0.521519, 0.203072, 0.935172, 0.877969, 0.188652, 0.399836, 0.405885, 0.398434, 0.72053, 0.706255, 0.275481, 0.893655, 0.401233, 0.866799, 0.387276, 0.0657183, 0.442247, 0.609669, 0.576605, 0.344086, 0.44144, 0.913102, 0.398888, 0.968562, 0.573847, 0.0929474, 0.958679, 0.563598, 0.642657, 0.0945982, 0.482204, 0.904375, 0.994957, 0.802897, 0.502411, 0.84817, 0.963312, 0.445155, 0.916581, 0.249594, 0.963417, 0.257998, 0.506907, 0.432447, 0.645067, 0.993154, 0.35397, 0.291207, 0.805341, 0.784517, 0.0604069, 0.0838133, 0.27463, 0.0784429, 0.18857, 0.502832, 0.0992244, 0.912633, 0.117638, 0.590154, 0.666609, 0.0591217, 0.255805, 0.77494, 0.491763, 0.191678, 0.103678, 0.0159232, 0.152233, 0.715904, 0.679012, 0.7397, 0.090081, 0.21494, 0.507191, 0.174881, 0.571327, 0.104085, 0.152587, 0.641194, 0.928891, 0.570947, 0.971235, 0.229893, 0.319219, 0.805914, 0.221738, 0.28536, 0.821369, 0.839343, 0.417271, 0.801295, 0.610942, 0.669377, 0.162577, 0.402268, 0.127565, 0.404972, 0.849395, 0.657328, 0.94097, 0.398055, 0.170477, 0.705278, 0.988333, 0.251353, 0.792618, 0.486964, 0.815419, 0.341093, 0.0126532, 0.410235, 0.55881, 0.0887572, 0.763223, 0.313562, 0.631297, 0.245806, 0.577398, 0.0776973, 0.61505, 0.397992, 0.0350695, 0.648541, 0.727337, 0.936424, 0.517721, 0.111229, 0.913763, 0.592251, 0.00559626, 0.559341, 0.994376, 0.416614, 0.292548, 0.202029, 0.0229131, 0.480779, 0.657034, 0.742421, 0.532592, 0.702188, 0.749077, 0.859528, 0.310363, 0.426907, 0.797739, 0.488193, 0.813929, 0.38762, 0.28087, 0.735248, 0.0859911, 0.871615, 0.0778746, 0.924287, 0.738598, 0.615197, 0.758289, 0.353542, 0.609701, 0.884774, 0.418233, 0.321753, 0.364006, 0.123441, 0.391384, 0.828042, 0.199458, 0.0170972, 0.311899, 0.602985, 0.927549, 0.353119, 0.735897, 0.610741, 0.504227, 0.621832, 0.410354, 0.441631, 0.22674, 0.0221614, 0.826528, 0.100437, 0.610663, 0.852608, 0.24596, 0.596498, 0.289481, 0.401341, 0.0212896, 0.954353, 0.126383, 0.388487, 0.86353, 0.530459, 0.859083, 0.966511, 0.795838, 0.990489, 0.908049, 0.696744, 0.130696, 0.653053, 0.323704, 0.501594, 0.476333, 0.125171, 0.86815, 0.962872, 0.800502, 0.967837, 0.605911, 0.543959, 0.377618, 0.639163, 0.0913697, 0.944725, 0.388407, 0.836197, 0.608426, 0.00363455, 0.365028, 0.912473, 0.615499, 0.978168, 0.821505, 0.777072, 0.0574939, 0.0304424, 0.931179, 0.163111, 0.0839781, 0.361373, 0.618165, 0.387367, 0.499838, 0.0465583, 0.109394, 0.909945, 0.103316, 0.0633438, 0.928474, 0.151977, 0.727656, 0.270818, 0.330468, 0.498373, 0.352822, 0.744426, 0.94411, 0.805679, 0.562243, 0.731822, 0.682423, 0.445707, 0.714585, 0.215232, 0.611213, 0.53818, 0.120947, 0.0604359, 0.317274, 0.170441, 0.197338, 0.111559, 0.438467, 0.807278, 0.0341178, 0.661611, 0.104411, 0.553421, 0.940274, 0.567132, 0.756098, 0.9386, 0.096182, 0.928292, 0.687291, 0.269224, 0.0413027, 0.987283, 0.224203, 0.363323, 0.553496, 0.295024, 0.953036, 0.343879, 0.401579, 0.182595, 0.761384, 0.289736, 0.0878732, 0.478722, 0.643365, 0.116882, 0.172611, 0.771067, 0.222788, 0.909552, 0.744369, 0.0590302, 0.310517, 0.741929, 0.791745, 0.9682, 0.0820786, 0.711899, 0.0155918, 0.0208627, 0.879141, 0.602773, 0.377763, 0.961243, 0.858891, 0.548141, 0.472792, 0.46125, 0.000115944, 0.57931, 0.404151, 0.259395, 0.941644, 0.473876, 0.80341, 0.55139, 0.462402, 0.34294, 0.433134, 0.354584, 0.546915, 0.0810531, 0.971719, 0.100658, 0.11246, 0.646787, 0.82041, 0.535585, 0.093344, 0.811832, 0.0267991, 0.117219, 0.835432, 0.737747, 0.854519, 0.81372, 0.0821774, 0.787324, 0.644162, 0.445221, 0.413507, 0.830886, 0.0991985, 0.164089, 0.962671, 0.727787, 0.423429, 0.72256, 0.96671, 0.447832, 0.733062, 0.450399, 0.538211, 0.41307, 0.958651, 0.649948, 0.149403, 0.568052, 0.947765, 0.770423, 0.326301, 0.012517, 0.288425, 0.655866, 0.682063, 0.570179, 0.118495, 0.954162, 0.688585, 0.24033, 0.603098, 0.18776, 0.732199, 0.701395, 0.96712, 0.324475, 0.626275, 0.628983, 0.762816, 0.721907, 0.23464, 0.109477, 0.322375, 0.164724, 0.619672, 0.290712, 0.628885, 0.18272, 0.696498, 0.625579, 0.911595, 0.0962853, 0.751789, 0.0270006, 0.947332, 0.696595, 0.0363827, 0.74882, 0.481896, 0.431251, 0.877495, 0.284251, 0.15089, 0.0553474, 0.923986, 0.169439, 0.656966, 0.387588, 0.857278, 0.174838, 0.29654, 0.392849, 0.513939, 0.656664, 0.10045, 0.378594, 0.328626, 0.839265, 0.0391475, 0.224348, 0.0736716, 0.576007, 0.686996, 0.990826, 0.751486, 0.218788, 0.256935, 0.601321, 0.914903, 0.150567, 0.638969, 0.581219, 0.372882, 0.957224, 0.852324, 0.461872, 0.268735, 0.647408, 0.536274, 0.0426807, 0.617604, 0.015211, 0.950039, 0.0523193, 0.471668, 0.679202, 0.929816, 0.165255, 0.315943, 0.686393, 0.595025, 0.949278, 0.538595, 0.303422, 0.0740217, 0.501687, 0.731599, 0.571007, 0.119565, 0.65584, 0.598696, 0.63467, 0.776305, 0.562427, 0.313772, 0.928061, 0.450614, 0.960383, 0.855092, 0.992951, 0.0154827, 0.0779528, 0.730416, 0.322916, 0.233521, 0.0831729, 0.356272, 0.673177, 0.720174, 0.505821, 0.260221, 0.309618, 0.671653, 0.0610973, 0.444349, 0.688994, 0.709228, 0.410851, 0.406549, 0.437169, 0.256708, 0.562061, 0.270783, 0.349388, 0.0272204, 0.195832, 0.136048, 0.510488, 0.657729, 0.485408, 0.189335, 0.628932, 0.370586, 0.799635, 0.938806, 0.956182, 0.350723, 0.828468, 0.953248, 0.10736, 0.758159, 0.434707, 0.63785, 0.506331, 0.38516, 0.997164, 0.386713, 0.265962, 0.367723, 0.103524, 0.577439, 0.501831, 0.451098, 0.480994, 0.0405122, 0.174294, 0.837503, 0.341736, 0.388626, 0.381439, 0.712605, 0.876063, 0.175593, 0.00248847, 0.439806, 0.634979, 0.966749, 0.090913, 0.358547, 0.430334, 0.799127, 0.719711, 0.287019, 0.20864, 0.19331, 0.474922, 0.0376009, 0.0341275, 0.0874646, 0.982185, 0.676308, 0.748173, 0.942651, 0.111877, 0.631365, 0.85004, 0.495776, 0.199792, 0.602691, 0.827331, 0.996597, 0.183644, 0.486967, 0.324724, 0.555411, 0.690267, 0.693458, 0.35693, 0.244766, 0.44147, 0.385816, 0.975314, 0.992426, 0.744957, 0.764903, 0.849005, 0.861171, 0.692125, 0.456767, 0.508464, 0.776381, 0.450884, 0.51987, 0.345787, 0.256052, 0.779928, 0.0568505, 0.831422, 0.476321, 0.538945, 0.106334, 0.706746, 0.0853278, 0.183265, 0.1092, 0.0367683, 0.631005, 0.0128032, 0.405197, 0.292649, 0.834136, 0.933866, 0.0126851, 0.553596, 0.397056, 0.854291, 0.105993, 0.572903, 0.640369, 0.13378, 0.653865, 0.0556563, 0.73828, 0.623158, 0.541798, 0.470322, 0.313415, 0.777172, 0.462445, 0.736641, 0.15414, 0.759531, 0.330711, 0.522968, 0.288768, 0.387064, 0.259262, 0.6222, 0.845375, 0.753184, 0.955956, 0.582769, 0.882583, 0.605337, 0.793343, 0.842571, 0.119, 0.621153, 0.67241, 0.660594, 0.0719063, 0.802956, 0.0299832, 0.805772, 0.197772, 0.108709, 0.0596038, 0.361572, 0.109106, 0.0504262, 0.379068, 0.135993, 0.932123, 0.655711, 0.467488, 0.953651, 0.995183, 0.600547, 0.579435, 0.361683, 0.883898, 0.957224, 0.414232, 0.859719, 0.675841, 0.859598, 0.737493, 0.422068, 0.380431, 0.615861, 0.395193, 0.93744, 0.938338, 0.212604, 0.15612, 0.842522, 0.143106, 0.788143, 0.351586, 0.680701, 0.0892752, 0.172652, 0.377726, 0.931982, 0.426281, 0.410915, 0.330425, 0.564319, 0.490617, 0.654643, 0.00690133, 0.0815571, 0.522269, 0.024506, 0.0677792, 0.992591, 0.31626, 0.568166, 0.167079, 0.0595757, 0.779459, 0.486391, 0.935332, 0.128365, 0.999419, 0.919318, 0.88351, 0.374481, 0.641246, 0.540668, 0.546789, 0.919843, 0.888966, 0.253205, 0.082144, 0.0527624, 0.405415, 0.777987, 0.554768, 0.587669, 0.294094, 0.555695, 0.472644, 0.638553, 0.917157, 0.73733, 0.0576607, 0.521733, 0.515638, 0.149688, 0.76494, 0.691338, 0.853385, 0.396735, 0.135935, 0.0166418, 0.206788, 0.997894, 0.342916, 0.55061, 0.0754545, 0.0201314, 0.251448, 0.639895, 0.0700902, 0.766749, 0.561373, 0.153651, 0.797279, 0.555698, 0.694349, 0.0948617, 0.703878, 0.844539, 0.0398525, 0.559561, 0.812489, 0.915344, 0.489303, 0.109182, 0.679305, 0.76255, 0.9383, 0.253246, 0.680216, 0.0110565, 0.618663, 0.689689, 0.163132, 0.0493963, 0.49011, 0.958381, 0.983173, 0.855168, 0.760666, 0.178369, 0.00199092, 0.607146, 0.279149, 0.599521, 0.827971, 0.237027, 0.351067, 0.296445, 0.372319, 0.256365, 0.313126, 0.381475, 0.132621, 0.855192, 0.786922, 0.619528, 0.628451, 0.587853, 0.0535287, 0.761674, 0.545655, 0.14277, 0.934656, 0.473947, 0.440523, 0.63071, 0.494316, 0.726436, 0.769289, 0.668158, 0.39968, 0.179117, 0.83941, 0.0741787, 0.196486, 0.529427, 0.65171, 0.565489, 0.091988, 0.803926, 0.326504, 0.469101, 0.164344, 0.700227, 0.401462, 0.316418, 0.956057, 0.591453, 0.951575, 0.995776, 0.51148, 0.700014, 0.960786, 0.614441, 0.429915, 0.660289, 0.025473, 0.93263, 0.700426, 0.744124, 0.378079, 0.318421, 0.612451, 0.905505, 0.894334, 0.969661, 0.76068, 0.111572, 0.995026, 0.866487, 0.6214, 0.450522, 0.807505, 0.450048, 0.0979121, 0.969788, 0.046331, 0.480078, 0.0681263, 0.37238, 0.331279, 0.774952, 0.886629, 0.0322941, 0.658397, 0.963058, 0.731739, 0.436177, 0.973296, 0.306804, 0.214159, 0.938776, 0.293845, 0.474659, 0.0141862, 0.0132554, 0.339886, 0.798059, 0.218099, 0.311044, 0.889401, 0.47282, 0.482657, 0.17873, 0.903616, 0.947343, 0.384226, 0.437886, 0.891671, 0.168244, 0.0821683, 0.918306, 0.68467, 0.27137, 0.175523, 0.986191, 0.99138, 0.306749, 0.992668, 0.655103, 0.441743, 0.964043, 0.515631, 0.75838, 0.2101, 0.25686, 0.237985, 0.629618, 0.0135306, 0.0943133, 0.611178, 0.329292, 0.654048, 0.681409, 0.391284, 0.611702, 0.854192, 0.395516, 0.124073, 0.544719, 0.0211423, 0.944481, 0.469952, 0.73843, 0.995262, 0.189854, 0.726325, 0.152935, 0.420038, 0.0152348, 0.297722, 0.21551, 0.856999, 0.429922, 0.733841, 0.256103, 0.1931, 0.503399, 0.930168, 0.668618, 0.219196, 0.515633, 0.530485, 0.542396, 0.572251, 0.731562, 0.483968, 0.380541, 0.380553, 0.0215347, 0.883127, 0.0273358, 0.89078, 0.916494, 0.454347, 0.527829, 0.507785, 0.0478597, 0.563378, 0.616642, 0.0074503, 0.290186, 0.602187, 0.179113, 0.961316, 0.928421, 0.391013, 0.590035, 0.0624214, 0.0821742, 0.770818, 0.152966, 0.524695, 0.113039, 0.988545, 0.303614, 0.0996593, 0.482933, 0.529027, 0.393564, 0.243453, 0.864431, 0.544316, 0.757896, 0.57916, 0.398254, 0.558485, 0.601869, 0.468, 0.713684, 0.126366, 0.782666, 0.854537, 0.0155665, 0.496068, 0.680507, 0.529421, 0.717767, 0.24828, 0.664336, 0.900595, 0.630351, 0.14928, 0.714254, 0.251655, 0.563342, 0.402903, 0.715264, 0.207282, 0.918101, 0.592773, 0.0692932, 0.0514421, 0.0780173, 0.873395, 0.204864, 0.75043, 0.159181, 0.386723, 0.698629, 0.365307, 0.384159, 0.742745, 0.229768, 0.801563, 0.462753, 0.700254, 0.0858789, 0.242187, 0.933512, 0.477361, 0.0657471, 0.944888, 0.0747551, 0.203376, 0.0691697, 0.538435, 0.829061, 0.65918, 0.471535, 0.44385, 0.607346, 0.926966, 0.513643, 0.914418, 0.125999, 0.0390181, 0.488024, 0.640222, 0.0809229, 0.0292556, 0.886372, 0.15527, 0.434866, 0.78704, 0.050664, 0.169771, 0.0945192, 0.00428554, 0.527147, 0.714936, 0.511054, 0.216053, 0.234769, 0.0535851, 0.450229, 0.939208, 0.428055, 0.909624, 0.0451906, 0.838102, 0.0949935, 0.526808, 0.100957, 0.599204, 0.20424, 0.323692, 0.169839, 0.703684, 0.116666, 0.0306695, 0.365019, 0.381662, 0.104124, 0.915699, 0.886522, 0.271398, 0.281559, 0.0668022, 0.402649, 0.430309, 0.395822, 0.64094, 0.133712, 0.639176, 0.856029, 0.72744, 0.208855, 0.720241, 0.412243, 0.875964, 0.519342, 0.756396, 0.60035, 0.63725, 0.893206, 0.313324, 0.420214, 0.334377, 0.0604622, 0.717698, 0.183966, 0.504313, 0.763231, 0.619038, 0.866789, 0.171457, 0.919811, 0.424679, 0.885576, 0.0701226, 0.123179, 0.396825, 0.649088, 0.848186, 0.316389, 0.362565, 0.865504, 0.782406, 0.921865, 0.552542, 0.753322, 0.0596514, 0.701613, 0.640246, 0.240584, 0.248258, 0.277888, 0.196749, 0.383629, 0.235176, 0.624186, 0.0494645, 0.56708, 0.264282, 0.916124, 0.953176, 0.0732884, 0.0341454, 0.384885, 0.815925, 0.289558, 0.0723223, 0.0782428, 0.354908, 0.838946, 0.829841, 0.947262, 0.304285, 0.806128, 0.0247178, 0.236912, 0.989356, 0.842223, 0.87976, 0.487751, 0.512436, 0.895716, 0.027243, 0.676653, 0.662825, 0.810001, 0.585047, 0.860084, 0.946514, 0.00889925, 0.761715, 0.601329, 0.949837, 0.889462, 0.669818, 0.61214, 0.443721, 0.526107, 0.765669, 0.449521, 0.419699, 0.345418, 0.0806505, 0.440493, 0.13885, 0.156493, 0.292409, 0.396526, 0.521697, 0.929818, 0.954097, 0.868602, 0.312119, 0.977433, 0.898196, 0.743006, 0.902996, 0.785161, 0.393288, 0.902601, 0.130492, 0.0191844, 0.232752, 0.509778, 0.123395, 0.622873, 0.277308, 0.69595, 0.716329, 0.961681, 0.946271, 0.893484, 0.0615568, 0.417046, 0.236197, 0.0636924, 0.641134, 0.969274, 0.251438, 0.784564, 0.271554, 0.359259, 0.123831, 0.270473, 0.901638, 0.950994, 0.160282, 0.629517, 0.916716, 0.639394, 0.225, 0.520646, 0.577397, 0.424217, 0.481537, 0.209618, 0.980867, 0.973294, 0.106634, 0.349323, 0.692033, 0.728899, 0.247615, 0.912327, 0.644278, 0.258974, 0.741965, 0.437609, 0.42484, 0.0738021, 0.311847, 0.548849, 0.110502, 0.954575, 0.111587, 0.947695, 0.465286, 0.0231469, 0.520316, 0.0585843, 0.450655, 0.27101, 0.809019, 0.559178, 0.105629, 0.772859, 0.416773, 0.511276, 0.0302692, 0.909653, 0.84874, 0.561421, 0.331931, 0.317488, 0.71822, 0.334825, 0.189305, 0.809674, 0.383952, 0.42976, 0.163802, 0.364921, 0.554067, 0.854067, 0.895681, 0.224681, 0.0461515, 0.210409, 0.405695, 0.210234, 0.896022, 0.416252, 0.379688, 0.0404849, 0.262162, 0.736689, 0.929544, 0.0224726, 0.165999, 0.404505, 0.856974, 0.302964, 0.245277, 0.221148, 0.116983, 0.487788, 0.478373, 0.323024, 0.99257, 0.554101, 0.743092, 0.109451, 0.71124, 0.809317, 0.386675, 0.88938, 0.987189, 0.0506929, 0.736438, 0.277834, 0.728736, 0.812051, 0.588502, 0.496814, 0.327882, 0.350621, 0.442151, 0.800165, 0.959016, 0.199712, 0.288561, 0.625356, 0.0663741, 0.16253, 0.540014, 0.0530989, 0.725205, 0.666362, 0.669788, 0.75412, 0.560063, 0.381198, 0.46331, 0.69749, 0.880856, 0.11325, 0.56145, 0.551666, 0.303476, 0.930746, 0.493665, 0.0393743, 0.208011, 0.0926338, 0.085625, 0.541639, 0.0913621, 0.334134, 0.564333, 0.248172, 0.80962, 0.852913, 0.654567, 0.718814, 0.372071, 0.938999, 0.511519, 0.515802, 0.997772, 0.785145, 0.11178, 0.0164981, 0.79932, 0.0111799, 0.210068, 0.81141, 0.704657, 0.697119, 0.00637811, 0.127444, 0.783655, 0.181962, 0.488771, 0.180929, 0.742406, 0.951332, 0.442068, 0.781665, 0.953388, 0.883542, 0.761271, 0.519147, 0.351601, 0.330865, 0.425916, 0.506969, 0.2123, 0.678322, 0.069484, 0.376491, 0.0808396, 0.272536, 0.639588, 0.0474258, 0.475981, 0.430449, 0.105085, 0.178061, 0.179986, 0.669677, 0.613189, 0.512817, 0.00236322, 0.986869, 0.528479, 0.125615, 0.605176, 0.415238, 0.419945, 0.157577, 0.0262994, 0.0123918, 0.524661, 0.671977, 0.084633, 0.800645, 0.503123, 0.155638, 0.454508, 0.651908, 0.66678, 0.565682, 0.432033, 0.326811, 0.0729654, 0.47984, 0.481572, 0.199118, 0.736326, 0.665823, 0.391671, 0.408702, 0.209706, 0.459449, 0.141601, 0.946343, 0.728172, 0.711024, 0.948224, 0.670107, 0.905047, 0.779312, 0.603412, 0.558603, 0.115324, 0.72896, 0.343519, 0.724847, 0.0545496, 0.172527, 0.460869, 0.154799, 0.777705, 0.768706, 0.127243, 0.0575518, 0.56434, 0.875226, 0.0368527, 0.581871, 0.836586, 0.0309622, 0.122295, 0.764266, 0.507662, 0.0430015, 0.54627, 0.968345, 0.896402, 0.988119, 0.651644, 0.469269, 0.860165, 0.0467171, 0.119166, 0.382994, 0.775127, 0.330595, 0.00564631, 0.792798, 0.27756, 0.635727, 0.969351, 0.339394, 0.386299, 0.50311, 0.501594, 0.450898, 0.129224, 0.897076, 0.569154, 0.859665, 0.0457243, 0.786076, 0.815472, 0.401849, 0.0443366, 0.667972, 0.858227, 0.0339023, 0.38162, 0.364467, 0.546809, 0.792158, 0.896732, 0.338401, 0.790985, 0.709323, 0.389329, 0.412661, 0.267131, 0.570449, 0.41529, 0.455563, 0.584359, 0.361192, 0.689631, 0.148885, 0.565805, 0.912932, 0.799891, 0.140446, 0.490216, 0.46693, 0.731235, 0.0714677, 0.87303, 0.51681, 0.0520606, 0.93384, 0.724988, 0.34197, 0.593085, 0.725204, 0.528663, 0.542755, 0.545362, 0.589811, 0.974497, 0.933991, 0.302597, 0.612706, 0.235915, 0.858704, 0.558845, 0.531967, 0.390625, 0.137459, 0.873559, 0.400807, 0.012224, 0.484347, 0.335695, 0.532124, 0.792587, 0.379314, 0.0136789, 0.427216, 0.549887, 0.132834, 0.552848, 0.202254, 0.371912, 0.358651, 0.58261, 0.436477, 0.224322, 0.701961, 0.699412, 0.0974816, 0.691123, 0.652295, 0.0158443, 0.487528, 0.438068, 0.227273, 0.513123, 0.097225, 0.945677, 0.643387, 0.610069, 0.814074, 0.728981, 0.176358, 0.187068, 0.417991, 0.0292344, 0.854177, 0.858445, 0.952635, 0.0906597, 0.54569, 0.718818, 0.99294, 0.228028, 0.0292397, 0.61293, 0.275814, 0.512207, 0.163246, 0.0122796, 0.031737, 0.341767, 0.216047, 0.139495, 0.188192, 0.621195, 0.797469, 0.104665, 0.731331, 0.975936, 0.889107, 0.135412, 0.736147, 0.834163, 0.905084, 0.22627, 0.80387, 0.0210373, 0.661656, 0.345425, 0.939666, 0.421124, 0.701128, 0.374584, 0.10811, 0.757839, 0.49641, 0.657283, 0.132898, 0.853988, 0.108884, 0.692365, 0.574381, 0.416176, 0.317909, 0.13654, 0.390142, 0.722439, 0.0488129, 0.635142, 0.783404, 0.0589331, 0.0413732, 0.16019, 0.846985, 0.968052, 0.84166, 0.549916, 0.300047, 0.423671, 0.298409, 0.581053, 0.924904, 0.701728, 0.20165, 0.507981, 0.70413, 0.615116, 0.712667, 0.26879, 0.471878, 0.0874282, 0.731852, 0.87302, 0.80928, 0.694741, 0.652464, 0.465504, 0.395786, 0.160542, 0.598191, 0.429329, 0.704256, 0.374392, 0.746214, 0.0233449, 0.654639, 0.0349414, 0.00683532, 0.278451, 0.796978, 0.357104, 0.0156055, 0.158203, 0.871539, 0.469787, 0.132897, 0.349076, 0.73376, 0.0119042, 0.171374, 0.695413, 0.325416, 0.642164, 0.251904, 0.87876, 0.456654, 0.666775, 0.6687, 0.520977, 0.377512, 0.994112, 0.320774, 0.360039, 0.919251, 0.491349, 0.587866, 0.70591, 0.899371, 0.366001, 0.858993, 0.149257, 0.946509, 0.165769, 0.571561, 0.857243, 0.324397, 0.799916, 0.119903, 0.38724, 0.519052, 0.331488, 0.560134, 0.852444, 0.940917, 0.313066, 0.0774066, 0.928305, 0.974356, 0.910505, 0.850625, 0.365606, 0.971177, 0.0528662, 0.362559, 0.509423, 0.875696, 0.299147, 0.868427, 0.0039314, 0.34909, 0.0799048, 0.834776, 0.883081, 0.49742, 0.304393, 0.60627, 0.195296, 0.904527, 0.492664, 0.678657, 0.794073, 0.96278, 0.0713241, 0.519281, 0.886132, 0.958058, 0.0883916, 0.664776, 0.882585, 0.113846, 0.0913884, 0.988083, 0.742503, 0.203616, 0.170706, 0.342501, 0.812241, 0.479815, 0.287091, 0.797085, 0.173639, 0.39054, 0.573799, 0.429264, 0.464452, 0.793, 0.837476, 0.949658, 0.16387, 0.847986, 0.589639, 0.348009, 0.621442, 0.466354, 0.82176, 0.21911, 0.496997, 0.410376, 0.251014, 0.221496, 0.583801, 0.0422436, 0.746097, 0.936768, 0.0179766, 0.281008, 0.375702, 0.344484, 0.0107574, 0.837914, 0.0589023, 0.556087, 0.596397, 0.498273, 0.639891, 0.101969, 0.443676, 0.316228, 0.58433, 0.280776, 0.823466, 0.476449, 0.0868284, 0.503147, 0.7468, 0.734962, 0.485219, 0.280746, 0.0357745, 0.0618097, 0.523439, 0.18097, 0.120984, 0.878572, 0.174271, 0.942672, 0.856728, 0.0185821, 0.607996, 0.439849, 0.983726, 0.340202, 0.056741, 0.228597, 0.168307, 0.777143, 0.421289, 0.479623, 0.0292844, 0.700163, 0.946515, 0.663362, 0.0439645, 0.939458, 0.448791, 0.128492, 0.836196, 0.679991, 0.225344, 0.490306, 0.225636, 0.297498, 0.357109, 0.325808, 0.784908, 0.989303, 0.996555, 0.651505, 0.162203, 0.397522, 0.00785712, 0.824634, 0.931041, 0.841834, 0.0629619, 0.347198, 0.279424, 0.782931, 0.216024, 0.781053, 0.610348, 0.106273, 0.310082, 0.496607, 0.167085, 0.237712, 0.51644, 0.655786, 0.157309, 0.419842, 0.866477, 0.0680756, 0.987071, 0.237696, 0.0920581, 0.162765, 0.869637, 0.00581341, 0.561312, 0.37681, 0.918244, 0.923747, 0.345497, 0.118504, 0.954044, 0.486536, 0.705112, 0.0107289, 0.0997009, 0.584292, 0.814548, 0.295217, 0.422746, 0.638047, 0.991411, 0.0414895, 0.264283, 0.92863, 0.537146, 0.837495, 0.063343, 0.0125096, 0.465855, 0.222677, 0.211889, 0.321458, 0.14737, 0.139611, 0.930517, 0.189931, 0.204486, 0.105795, 0.344059, 0.649703, 0.483116, 0.833087, 0.931833, 0.873272, 0.488936, 0.729881, 0.0375505, 0.388399, 0.763125, 0.165755, 0.0216007, 0.770192, 0.272306, 0.216977, 0.370487, 0.828848, 0.645115, 0.498873, 0.419112, 0.578879, 0.766108, 0.457062, 0.566512, 0.0419029, 0.0249131, 0.959833, 0.973717, 0.471592, 0.232222, 0.238165, 0.97537, 0.489763, 0.998287, 0.680591, 0.207046, 0.429929, 0.169956, 0.670421, 0.73972, 0.353622, 0.441594, 0.419911, 0.263872, 0.0498089, 0.813765, 0.470915, 0.819718, 0.449614, 0.946796, 0.591326, 0.939916, 0.679555, 0.92406, 0.144002, 0.0820027, 0.924349, 0.617001, 0.985386, 0.686784, 0.505408, 0.566169, 0.164351, 0.766942, 0.0591424, 0.158236, 0.840936, 0.771826, 0.570361, 0.384152, 0.223917, 0.992899, 0.567184, 0.549263, 0.179237, 0.633187, 0.101612, 0.0585411, 0.485621, 0.438939, 0.866184, 0.156396, 0.718994, 0.365662, 0.752446, 0.906147, 0.107901, 0.214145, 0.761709, 0.66301, 0.859022, 0.312942, 0.209947, 0.78767, 0.70115, 0.63421, 0.808469, 0.218522, 0.899243, 0.338673, 0.402534, 0.643909, 0.953206, 0.145768, 0.877047, 0.195736, 0.0413286, 0.854383, 0.899998, 0.571806, 0.412295, 0.141662, 0.26884, 0.615193, 0.102438, 0.4244, 0.298273, 0.464611, 0.909593, 0.939816, 0.768945, 0.652107, 0.448032, 0.991791, 0.659261, 0.0223205, 0.537184, 0.781411, 0.223751, 0.770998, 0.435699, 0.395948, 0.363257, 0.848033, 0.987993, 0.403496, 0.905348, 0.64754, 0.937878, 0.0317852, 0.372872, 0.855145, 0.863334, 0.00879159, 0.353361, 0.390425, 0.27679, 0.61777, 0.301791, 0.0775752, 0.993317, 0.433401, 0.964675, 0.652717, 0.918207, 0.385286, 0.730553, 0.146572, 0.367731, 0.123737, 0.506913, 0.783507, 0.0319109, 0.577727, 0.641786, 0.918864, 0.0881872, 0.217138, 0.151036, 0.652563, 0.919869, 0.265934, 0.064997, 0.527071, 0.519617, 0.648192, 0.438752, 0.0952445, 0.174509, 0.579974, 0.933857, 0.038362, 0.379932, 0.609649, 0.804567, 0.821148, 0.556734, 0.890532, 0.560089, 0.74925, 0.578395, 0.229507, 0.582763, 0.739994, 0.18902, 0.162725, 0.964016, 0.63295, 0.781545, 0.350326, 0.149459, 0.594382, 0.833603, 0.323194, 0.665038, 0.193284, 0.910884, 0.0383404, 0.953567, 0.599549, 0.0964137, 0.758202, 0.98538, 0.19992, 0.242626, 0.185377, 0.399657, 0.16926, 0.621697, 0.138662, 0.585958, 0.372343, 0.705057, 0.535386, 0.876265, 0.221161, 0.679675, 0.70328, 0.14226, 0.223886, 0.492413, 0.328784, 0.0019295, 0.0618598, 0.32195, 0.792413, 0.974461, 0.258373, 0.546162, 0.638832, 0.193545, 0.718397, 0.297724, 0.87448, 0.281254, 0.401263, 0.732132, 0.840846, 0.51548, 0.477188, 0.520529, 0.0574211, 0.879896, 0.825221, 0.823569, 0.450494, 0.643669, 0.738295, 0.637289, 0.747925, 0.1375, 0.584215, 0.486092, 0.980283, 0.834996, 0.0355096, 0.000402285, 0.39892, 0.475797, 0.677773, 0.903426, 0.786041, 0.948753, 0.758994, 0.300903, 0.948204, 0.980958, 0.24324, 0.224714, 0.476874, 0.998334, 0.253935, 0.695822, 0.771502, 0.260869, 0.404175, 0.613603, 0.412185, 0.482095, 0.701283, 0.399169, 0.393656, 0.7898, 0.373355, 0.349326, 0.0763198, 0.755844, 0.970799, 0.702633, 0.9694, 0.806628, 0.283078, 0.635329, 0.261826, 0.613068, 0.427522, 0.906394, 0.838702, 0.0584486, 0.93759, 0.279747, 0.613374, 0.726184, 0.570557, 0.132885, 0.157892, 0.661021, 0.624447, 0.238375, 0.265631, 0.210045, 0.20074, 0.554434, 0.462112, 0.961678, 0.774916, 0.828709, 0.160807, 0.387183, 0.854869, 0.772969, 0.800111, 0.146033, 0.430107, 0.473189, 0.871752, 0.414768, 0.63419, 0.371186, 0.169567, 0.113154, 0.785562, 0.485109, 0.640857, 0.438677, 0.088096, 0.861557, 0.669475, 0.0899299, 0.68428, 0.953528, 0.944065, 0.318532, 0.568949, 0.97022, 0.241968, 0.438976, 0.706167, 0.484883, 0.723827, 0.649034, 0.98121, 0.462545, 0.718383, 0.397894, 0.330632, 0.337969, 0.235948, 0.152374, 0.987381, 0.0938732, 0.30328, 0.891918, 0.271634, 0.277232, 0.386458, 0.299482, 0.0179493, 0.530283, 0.25965, 0.76149, 0.792983, 0.764066, 0.811147, 0.579048, 0.285016, 0.732108, 0.0827798, 0.529541, 0.587462, 0.345881, 0.0788342, 0.766859, 0.777163, 0.0787395, 0.318288, 0.104054, 0.901717, 0.224164, 0.831469, 0.958816, 0.571994, 0.20373, 0.446843, 0.103416, 0.469303, 0.585657, 0.528659, 0.589496, 0.741476, 0.223181, 0.576454, 0.280938, 0.942264, 0.78115, 0.906357, 0.497306, 0.997612, 0.128139, 0.520737, 0.813598, 0.562275, 0.988924, 0.391404, 0.44395, 0.627078, 0.594139, 0.299362, 0.151443, 0.840365, 0.280755, 0.212267, 0.401839, 0.605054, 0.57246, 0.111736, 0.591323, 0.34224, 0.684317, 0.581794, 0.996372, 0.255064, 0.327143, 0.0313873, 0.502993, 0.573032, 0.0913457, 0.7693, 0.460883, 0.609705, 0.508603, 0.32091, 0.665962, 0.592396, 0.919613, 0.8542, 0.288894, 0.884967, 0.990343, 0.240901, 0.0860902, 0.722785, 0.281888, 0.502922, 0.430888, 0.196899, 0.865514, 0.830811, 0.345966, 0.620081, 0.363402, 0.61164, 0.331896, 0.493554, 0.759704, 0.0400446, 0.487329, 0.825074, 0.462664, 0.95134, 0.585477, 0.0819551, 0.024318, 0.277774, 0.909848, 0.313303, 0.636655, 0.873505, 0.286556, 0.333964, 0.736289, 0.381869, 0.770467, 0.804935, 0.178941, 0.888171, 0.918411, 0.44095, 0.617121, 0.951869, 0.581627, 0.691829, 0.895297, 0.196821, 0.688995, 0.860461, 0.618282, 0.280913, 0.641262, 0.0850014, 0.582677, 0.352706, 0.132268, 0.195049, 0.0168379, 0.894165, 0.905649, 0.227368, 0.214574, 0.794269, 0.479328, 0.286995, 0.388914, 0.171636, 0.169882, 0.136362, 0.052331, 0.202716, 0.624452, 0.378773, 0.314778, 0.23387, 0.00301555, 0.72689, 0.737593, 0.128541, 0.473689, 0.91113, 0.0552363, 0.839909, 0.776099, 0.925945, 0.977754, 0.594462, 0.840629, 0.829297, 0.0733164, 0.629317, 0.807152, 0.487663, 0.752347, 0.19413, 0.675738, 0.0783495, 0.735455, 0.451829, 0.243855, 0.223202, 0.635667, 0.598798, 0.396199, 0.74669, 0.977885, 0.634633, 0.0819732, 0.552457, 0.834431, 0.928032, 0.560666, 0.450888, 0.309971, 0.899419, 0.0437204, 0.545207, 0.439469, 0.126718, 0.78781, 0.332239, 0.805375, 0.470927, 0.430111, 0.922911, 0.556821, 0.71086, 0.522538, 0.59139, 0.374755, 0.933486, 0.711548, 0.963196, 0.153702, 0.11815, 0.989622, 0.0472076, 0.641319, 0.384259, 0.261419, 0.958408, 0.479235, 0.339818, 0.981219, 0.432372, 0.610901, 0.712508, 0.094701, 0.660963, 0.0658332, 0.937364, 0.741942, 0.157425, 0.603666, 0.847272, 0.673403, 0.737276, 0.735831, 0.908019, 0.0509612, 0.325314, 0.474769, 0.979748, 0.758369, 0.460515, 0.25145, 0.970487, 0.862656, 0.364827, 0.731362, 0.466785, 0.648431, 0.537698, 0.187376, 0.798932, 0.635509, 0.805815, 0.240966, 0.226872, 0.505037, 0.863647, 0.701438, 0.85284, 0.414368, 0.501964, 0.608772, 0.0189699, 0.540045, 0.863871, 0.105737, 0.355939, 0.822397, 0.391781, 0.913539, 0.461184, 0.416663, 0.609891, 0.967796, 0.764992, 0.888574, 0.849941, 0.736768, 0.183908, 0.937005, 0.805695, 0.861486, 0.832524, 0.729004, 0.765262, 0.520346, 0.115059, 0.870998, 0.361088, 0.513614, 0.842225, 0.0881088, 0.292429, 0.137908, 0.356444, 0.599227, 0.301219, 0.0408697, 0.480646, 0.681125, 0.189021, 0.685976, 0.945816, 0.260809, 0.10544, 0.367217, 0.305965, 0.352297, 0.531191, 0.041613, 0.37311, 0.0421497, 0.190846, 0.569045, 0.0161333, 0.50502, 0.288929, 0.0668288, 0.194221, 0.338985, 0.815525, 0.287069, 0.306393, 0.651041, 0.840396, 0.865013, 0.64269, 0.992619, 0.779065, 0.853414, 0.278578, 0.855896, 0.0876856, 0.658969, 0.199614, 0.0981758, 0.678445, 0.783786, 0.936603, 0.551777, 0.523649, 0.163702, 0.70052, 0.945307, 0.156769, 0.235407, 0.355036, 0.954613, 0.587805, 0.347914, 0.325659, 0.952508, 0.718319, 0.332959, 0.503972, 0.770691, 0.571798, 0.211117, 0.872724, 0.225197, 0.343215, 0.172587, 0.778286, 0.941321, 0.846916, 0.634631, 0.502396, 0.696883, 0.320475, 0.00211559, 0.48145, 0.150909, 0.237578, 0.349145, 0.212481, 0.124726, 0.129041, 0.151703, 0.895788, 0.667502, 0.201572, 0.786089, 0.148381, 0.152508, 0.312843, 0.0899695, 0.914312, 0.62648, 0.105145, 0.96813, 0.376314, 0.155949, 0.956938, 0.822807, 0.582056, 0.589151, 0.743151, 0.501574, 0.653275, 0.0697762, 0.54845, 0.905452, 0.612918, 0.398713, 0.21043, 0.497297, 0.629791, 0.491014, 0.25102, 0.324733, 0.876637, 0.221174, 0.164433, 0.304469, 0.207277, 0.666364, 0.209962, 0.854519, 0.544456, 0.380603, 0.515054, 0.116261, 0.721621, 0.263809, 0.745411, 0.94723, 0.0715318, 0.0988318, 0.834298, 0.678132, 0.257814, 0.18577, 0.655749, 0.874559, 0.475069, 0.777368, 0.920175, 0.433812, 0.392358, 0.967079, 0.094701, 0.542215, 0.516423, 0.162233, 0.280253, 0.782812, 0.483115, 0.809369, 0.929462, 0.504624, 0.920034, 0.188641, 0.452626, 0.509906, 0.070377, 0.908057, 0.042267, 0.877117, 0.918825, 0.787805, 0.997339, 0.195188, 0.908248, 0.0861618, 0.199936, 0.895063, 0.476756, 0.115844, 0.0640491, 0.392435, 0.494361, 0.509511, 0.293985, 0.0741485, 0.0754649, 0.719831, 0.281555, 0.787704, 0.353963, 0.961797, 0.345935, 0.0333126, 0.575006, 0.1463, 0.49132, 0.215853, 0.233856, 0.372685, 0.705419, 0.745246, 0.189824, 0.893101, 0.147042, 0.742171, 0.487025, 0.968412, 0.902037, 0.34699, 0.103765, 0.163472, 0.351792, 0.408973, 0.987175, 0.749758, 0.708926, 0.0282582, 0.675637, 0.877405, 0.440848, 0.46761, 0.404586, 0.498625, 0.741978, 0.890635, 0.606071, 0.132024, 0.521916, 0.274432, 0.636984, 0.177254, 0.854536, 0.590174, 0.0104405, 0.751678, 0.838834, 0.531277, 0.396504, 0.334877, 0.983738, 0.820574, 0.452551, 0.111009, 0.923228, 0.919036, 0.000190886, 0.640469, 0.694707, 0.825114, 0.373546, 0.396162, 0.25915, 0.699044, 0.469011, 0.351018, 0.751517, 0.0135174, 0.499949, 0.301832, 0.860023, 0.106183, 0.552073, 0.645349, 0.766318, 0.296837, 0.0505084, 0.780736, 0.263128, 0.115076, 0.43891, 0.362991, 0.500796, 0.154705, 0.69483, 0.599993, 0.0731594, 0.578293, 0.985218, 0.943442, 0.246202, 0.085457, 0.974608, 0.740269, 0.13506, 0.532824, 0.126926, 0.205299, 0.162119, 0.630172, 0.785893, 0.084874, 0.354642, 0.957355, 0.575205, 0.816908, 0.275245, 0.657655, 0.709045, 0.0740773, 0.072105, 0.250221, 0.454954, 0.787529, 0.852794, 0.221947, 0.723994, 0.525126, 0.191795, 0.10192, 0.65747, 0.595842, 0.865054, 0.129639, 0.780963, 0.454539, 0.471482, 0.433569, 0.150568, 0.0643218, 0.864143, 0.405488, 0.985996, 0.9868, 0.840035, 0.493108, 0.578922, 0.716618, 0.521337, 0.43809, 0.168781, 0.685721, 0.241985, 0.508433, 0.87115, 0.73972, 0.52165, 0.5967, 0.682724, 0.424041, 0.45458, 0.11836, 0.640508, 0.00572994, 0.423046, 0.55627, 0.640683, 0.932487, 0.457145, 0.797803, 0.936772, 0.245257, 0.516073, 0.842246, 0.768496, 0.820881, 0.84557, 0.257549, 0.201377, 0.787107, 0.843035, 0.741106, 0.950916, 0.0673781, 0.764331, 0.392215, 0.867684, 0.208666, 0.767705, 0.807338, 0.955783, 0.911108, 0.821371, 0.754469, 0.118085, 0.483238, 0.752593, 0.78099, 0.716943, 0.290101, 0.667643, 0.058115, 0.865124, 0.494295, 0.599276, 0.729424, 0.563727, 0.120082, 0.471502, 0.525764, 0.363146, 0.796546, 0.194646, 0.335098, 0.593727, 0.993955, 0.193552, 0.282172, 0.344447, 0.188144, 0.367341, 0.369837, 0.167983, 0.231762, 0.309907, 0.569787, 0.783164, 0.0142529, 0.731876, 0.686713, 0.429142, 0.163084, 0.685502, 0.152035, 0.340327, 0.653107, 0.854134, 0.545174, 0.280415, 0.246839, 0.910729, 0.44662, 0.37323, 0.933784, 0.11956, 0.753155, 0.41805, 0.727492, 0.296936, 0.114884, 0.653231, 0.552082, 0.0770612, 0.802197, 0.543453, 0.308937, 0.752614, 0.0418398, 0.840589, 0.239802, 0.832301, 0.621549, 0.495012, 0.775235, 0.84562, 0.790266, 0.875287, 0.842682, 0.782191, 0.207096, 0.434501, 0.16516, 0.25881, 0.882546, 0.0520791, 0.910952, 0.550428, 0.979296, 0.822338, 0.693308, 0.010686, 0.386402, 0.867304, 0.939875, 0.560713, 0.270201, 0.443515, 0.119218, 0.391537, 0.792742, 0.230883, 0.612542, 0.578704, 0.6343, 0.00493186, 0.962376, 0.814058, 0.128649, 0.159273, 0.126206, 0.950942, 0.0415704, 0.686412, 0.843708, 0.384128, 0.704849, 0.325643, 0.88536, 0.107272, 0.934188, 0.859817, 0.81426, 0.483232, 0.343538, 0.186065, 0.238609, 0.773701, 0.976711, 0.554868, 0.445094, 0.463711, 0.772335, 0.37477, 0.335995, 0.448906, 0.888065, 0.892437, 0.462752, 0.275714, 0.848819, 0.728886, 0.298102, 0.35338, 0.137639, 0.135059, 0.906505, 0.238063, 0.805348, 0.126255, 0.920968, 0.930728, 0.551014, 0.920872, 0.739565, 0.542894, 0.10619, 0.204478, 0.771847, 0.902597, 0.355718, 0.911566, 0.0575367, 0.562718, 0.891847, 0.7837, 0.997554, 0.569518, 0.495948, 0.710388, 0.738174, 0.466616, 0.401706, 0.248976, 0.258626, 0.83484, 0.216465, 0.0729643, 0.164988, 0.837252, 0.442034, 0.296171, 0.798832, 0.0558175, 0.863475, 0.77015, 0.39575, 0.304668, 0.0203643, 0.20633, 0.695167, 0.919692, 0.436391, 0.963263, 0.303426, 0.193292, 0.768452, 0.45916, 0.699389, 0.690452, 0.651119, 0.888002, 0.355534, 0.198755, 0.803778, 0.870089, 0.809225, 0.0915294, 0.0459957, 0.986851, 0.250905, 0.190439, 0.429193, 0.370636, 0.336584, 0.384537, 0.768718, 0.0982904, 0.95832, 0.840603, 0.542528, 0.286739, 0.540849, 0.752271, 0.513449, 0.766535, 0.102456, 0.334374, 0.634429, 0.0449059, 0.56841, 0.937627, 0.216849, 0.359826, 0.693254, 0.502889, 0.693115, 0.506843, 0.872625, 0.424638, 0.174276, 0.363718, 0.536146, 0.355215, 0.0546224, 0.90829, 0.967318, 0.693203, 0.97595, 0.79812, 0.924415, 0.5999, 0.732423, 0.481824, 0.0390074, 0.858689, 0.0819876, 0.135897, 0.0322327, 0.314027, 0.601123, 0.00499386, 0.610211, 0.0319087, 0.0406788, 0.801345, 0.696581, 0.366098, 0.266956, 0.21206, 0.904011, 0.118976, 0.00920613, 0.69959, 0.829873, 0.943921, 0.558463, 0.60533, 0.724935, 0.549891, 0.733106, 0.530256, 0.694571, 0.713422, 0.611106, 0.072951, 0.404777, 0.0158893, 0.161346, 0.693612, 0.518936, 0.878131, 0.703876, 0.611929, 0.408529, 0.768275, 0.808623, 0.959361, 0.326656, 0.83589, 0.521971, 0.977083, 0.867278, 0.395405, 0.736166, 0.0280796, 0.940964, 0.732938, 0.217135, 0.780848, 0.725279, 0.613338, 0.546845, 0.810228, 0.647924, 0.525042, 0.971578, 0.335967, 0.588887, 0.928548, 0.957302, 0.787647, 0.0901167, 0.175435, 0.704188, 0.00509918, 0.839273, 0.954534, 0.310311, 0.744614, 0.598133, 0.511078, 0.294883, 0.821891, 0.658491, 0.223005, 0.0152977, 0.822106, 0.11655, 0.380148, 0.53276, 0.511126, 0.512797, 0.365613, 0.777057, 0.733977, 0.522855, 0.281136, 0.89646, 0.467778, 0.361306, 0.0228623, 0.286544, 0.409327, 0.497749, 0.822417, 0.136394, 0.682058, 0.916336, 0.910317, 0.232289, 0.639161, 0.580547, 0.602081, 0.0719611, 0.291157, 0.40773, 0.0201031, 0.728334, 0.458429, 0.362484, 0.996845, 0.366949, 0.586223, 0.681143, 0.0242279, 0.620375, 0.51662, 0.93688, 0.538165, 0.912697, 0.791674, 0.713292, 0.433204, 0.0843528, 0.697764, 0.43884, 0.891681, 0.689411, 0.791984, 0.224784, 0.349407, 0.375761, 0.335028, 0.311385, 0.750942, 0.616441, 0.844438, 0.251643, 0.660741, 0.0805609, 0.769433, 0.407609, 0.132638, 0.0332942, 0.675042, 0.809139, 0.519062, 0.00151899, 0.721963, 0.579542, 0.248223, 0.860977, 0.123355, 0.208809, 0.771286, 0.181604, 0.49451, 0.0744826, 0.72343, 0.847404, 0.0304009, 0.491753, 0.330725, 0.48852, 0.452269, 0.56276, 0.352293, 0.731438, 0.789613, 0.198553, 0.411078, 0.284744, 0.471403, 0.464478, 0.799206, 0.728932, 0.884635, 0.590945, 0.0569764, 0.0725263, 0.342085, 0.370218, 0.639383, 0.932643, 0.171662, 0.675389, 0.508058, 0.678454, 0.75074, 0.292735, 0.0321314, 0.152056, 0.887809, 0.0336618, 0.0938312, 0.37829, 0.0992418, 0.539253, 0.748581, 0.308604, 0.135415, 0.22339, 0.970354, 0.579154, 0.484325, 0.259782, 0.0175138, 0.504341, 0.768727, 0.704132, 0.736099, 0.924297, 0.123621, 0.331132, 0.764928, 0.142885, 0.630501, 0.754362, 0.643633, 0.675589, 0.793947, 0.264666, 0.293463, 0.86976, 0.13139, 0.910967, 0.579427, 0.775444, 0.509456, 0.568785, 0.481629, 0.534621, 0.963375, 0.926956, 0.596428, 0.695135, 0.976368, 0.985007, 0.219229, 0.18263, 0.0330791, 0.794537, 0.36597, 0.521017, 0.437158, 0.501348, 0.751041, 0.766312, 0.46439, 0.00366036, 0.599896, 0.398209, 0.76599, 0.95793, 0.364972, 0.493257, 0.452247, 0.712015, 0.024297, 0.225755, 0.527445, 0.844692, 0.587732, 0.0250402, 0.337706, 0.908794, 0.589894, 0.831098, 0.270304, 0.638438, 0.0316758, 0.573326, 0.0306914, 0.946889, 0.0775509, 0.42889, 0.250056, 0.316976, 0.456179, 0.917887, 0.917448, 0.11967, 0.591144, 0.352981, 0.849511, 0.465982, 0.00976796, 0.697137, 0.320775, 0.530182, 0.791732, 0.781348, 0.630783, 0.570108, 0.867648, 0.0258425, 0.147678, 0.39056, 0.0981929, 0.982114, 0.44062, 0.879375, 0.964472, 0.459122, 0.957155, 0.656362, 0.142271, 0.0621468, 0.791664, 0.472776, 0.618339, 0.864549, 0.778153, 0.311727, 0.398461, 0.455023, 0.973779, 0.999543, 0.414123, 0.709867, 0.440624, 0.951762, 0.0568399, 0.97083, 0.712216, 0.921851, 0.479456, 0.253647, 0.613246, 0.975682, 0.761829, 0.831174, 0.868983, 0.420924, 0.264717, 0.547227, 0.346723, 0.856154, 0.492, 0.0520774, 0.555698, 0.717819, 0.154834, 0.102231, 0.516433, 0.418083, 0.455069, 0.0905887, 0.961797, 0.277239, 0.187967, 0.462781, 0.346729, 0.31986, 0.58213, 0.605526, 0.480522, 0.178034, 0.59996, 0.0820642, 0.372264, 0.154094, 0.685137, 0.926591, 0.506341, 0.374656, 0.949043, 0.820158, 0.912711, 0.150832, 0.141389, 0.806484, 0.668464, 0.569262, 0.194434, 0.805651, 0.338127, 0.0445907, 0.553085, 0.585123, 0.859129, 0.267628, 0.430494, 0.236305, 0.602553, 0.93172, 0.958859, 0.815454, 0.737249, 0.665764, 0.827119, 0.467034, 0.47401, 0.125334, 0.0909302, 0.884802, 0.880571, 0.552597, 0.568278, 0.899672, 0.900913, 0.390872, 0.267432, 0.678024, 0.734613, 0.379154, 0.00800285, 0.575992, 0.471158, 0.371449, 0.303787, 0.430055, 0.533649, 0.533947, 0.595581, 0.537309, 0.077545, 0.190509, 0.378629, 0.744644, 0.797956, 0.979207, 0.112392, 0.237731, 0.948207, 0.518345, 0.801375, 0.30987, 0.590398, 0.97661, 0.474416, 0.703823, 0.520864, 0.965192, 0.119271, 0.558809, 0.640866, 0.288936, 0.910419, 0.741629, 0.424635, 0.779616, 0.768092, 0.507772, 0.504063, 0.316496, 0.541074, 0.68036, 0.538136, 0.579494, 0.294867, 0.308948, 0.80587, 0.463329, 0.251374, 0.663507, 0.943482, 0.363929, 0.555071, 0.709577, 0.430258, 0.342018, 0.102579, 0.203525, 0.95593, 0.601485, 0.790328, 0.662096, 0.957734, 0.845471, 0.108412, 0.46634, 0.252193, 0.643831, 0.11127, 0.376411, 0.023525, 0.287631, 0.994963, 0.658874, 0.0210645, 0.603629, 0.622803, 0.0472722, 0.315973, 0.923111, 0.717089, 0.922971, 0.104088, 0.0062942, 0.789094, 0.625341, 0.224942, 0.398873, 0.910434, 0.932619, 0.768445, 0.323064, 0.738007, 0.53752, 0.502822, 0.593059, 0.385173, 0.96417, 0.664428, 0.115134, 0.841562, 0.030039, 0.146718, 0.870019, 0.602876, 0.338186, 0.479086, 0.729154, 0.910322, 0.650492, 0.345689, 0.668121, 0.427259, 0.57197, 0.869793, 0.576452, 0.244191, 0.0571584, 0.825081, 0.596242, 0.0448514, 0.42959, 0.951414, 0.72456, 0.00677971, 0.872711, 0.671347, 0.192745, 0.968447, 0.56047, 0.920649, 0.752342, 0.966234, 0.613444, 0.033573, 0.468321, 0.844581, 0.775329, 0.122645, 0.02613, 0.344292, 0.310932, 0.790536, 0.389491, 0.869365, 0.826368, 0.0173728, 0.189828, 0.523768, 0.117784, 0.0199382, 0.532736, 0.754888, 0.469754, 0.294376, 0.0780923, 0.126736, 0.660983, 0.430775, 0.21072, 0.63831, 0.379654, 0.361582, 0.230799, 0.113835, 0.267163, 0.117418, 0.136472, 0.938982, 0.265427, 0.255827, 0.515764, 0.607022, 0.533919, 0.463674, 0.415789, 0.63417, 0.0901891, 0.294229, 0.604955, 0.424594, 0.0620128, 0.11916, 0.112472, 0.996593, 0.84225, 0.327968, 0.714541, 0.304632, 0.164565, 0.741463, 0.112919, 0.502746, 0.90645, 0.752368, 0.974846, 0.549887, 0.108335, 0.600625, 0.681496, 0.713924, 0.934521, 0.978134, 0.407894, 0.343613, 0.440783, 0.664972, 0.838276, 0.0019049, 0.019787, 0.50369, 0.259691, 0.403031, 0.557228, 0.479232, 0.690514, 0.0048832, 0.0951374, 0.00396124, 0.664695, 0.700051, 0.761949, 0.415953, 0.234898, 0.852, 0.639213, 0.30993, 0.909582, 0.89205, 0.673465, 0.934445, 0.95726, 0.867526, 0.604767, 0.91859, 0.478939, 0.268658, 0.756328, 0.969407, 0.0815263, 0.0577255, 0.9332, 0.39228, 0.531781, 0.1917, 0.195857, 0.540173, 0.00600479, 0.869316, 0.545209, 0.645048, 0.43061, 0.206057, 0.442541, 0.0948908, 0.946553, 0.674933, 0.950173, 0.263825, 0.495335, 0.668037, 0.803664, 0.777506, 0.989808, 0.513813, 0.962018, 0.611008, 0.0673268, 0.999634, 0.238826, 0.653811, 0.939171, 0.571092, 0.408408, 0.189841, 0.35669, 0.922076, 0.191623, 0.38311, 0.713345, 0.670143, 0.97811, 0.271622, 0.943943, 0.824031, 0.206058, 0.461943, 0.297259, 0.698813, 0.663358, 0.557625, 0.315034, 0.347587, 0.460004, 0.826429, 0.838686, 0.852591, 0.320376, 0.00415945, 0.685438, 0.600111, 0.241398, 0.840719, 0.960097, 0.422577, 0.98152, 0.286617, 0.771774, 0.874038, 0.250729, 0.800102, 0.56279, 0.311846, 0.111826, 0.778888, 0.691236, 0.534948, 0.288923, 0.0833545, 0.447614, 0.86312, 0.477696, 0.901524, 0.433181, 0.191154, 0.281116, 0.212128, 0.731244, 0.538579, 0.620449, 0.704903, 0.460499, 0.37742, 0.823034, 0.237254, 0.55763, 0.316825, 0.0284923, 0.794764, 0.0689214, 0.620568, 0.818483, 0.049073, 0.520155, 0.80956, 0.403729, 0.0384824, 0.884952, 0.381709, 0.169691, 0.379571, 0.844339, 0.784253, 0.515285, 0.196288, 0.0223558, 0.373821, 0.782384, 0.303983, 0.864093, 0.774289, 0.240766, 0.58568, 0.328797, 0.126315, 0.841865, 0.283418, 0.748826, 0.270055, 0.12889, 0.570367, 0.217194, 0.323025, 0.565146, 0.457298, 0.222445, 0.98836, 0.492686, 0.293779, 0.0162616, 0.774602, 0.565758, 0.357465, 0.516019, 0.137609, 0.0597346, 0.394048, 0.452074, 0.0817901, 0.906816, 0.499637, 0.694108, 0.481, 0.30652, 0.220167, 0.978509, 0.174359, 0.983153, 0.68811, 0.0781752, 0.643734, 0.835491, 0.025745, 0.537208, 0.550764, 0.146115, 0.444928, 0.855384, 0.636416, 0.361787, 0.898798, 0.385655, 0.515164, 0.246801, 0.984843, 0.650865, 0.413739, 0.475613, 0.346935, 0.579753, 0.517482, 0.00844559, 0.326494, 0.150286, 0.622365, 0.753325, 0.916103, 0.556752, 0.274754, 0.626378, 0.523771, 0.0342807, 0.600559, 0.274172, 0.210256, 0.606753, 0.578822, 0.318668, 0.39041, 0.683452, 0.375313, 0.353341, 0.58294, 0.925155, 0.140171, 0.0646038, 0.450619, 0.199829, 0.500982, 0.631797, 0.777428, 0.0981268, 0.600188, 0.682762, 0.161681, 0.159471, 0.908154, 0.851515, 0.395125, 0.637718, 0.105254, 0.419895, 0.0359921, 0.370579, 0.42579, 0.29233, 0.456937, 0.0862449, 0.180989, 0.0550971, 0.981704, 0.22094, 0.303764, 0.690003, 0.136119, 0.218901, 0.346908, 0.332063, 0.275067, 0.364109, 0.5226, 0.700079, 0.856375, 0.262692, 0.848152, 0.0964617, 0.183504, 0.121691, 0.562524, 0.859729, 0.326392, 0.463397, 0.879175, 0.603054, 0.446576, 0.0491054, 0.341939, 0.947599, 0.238099, 0.361434, 0.277334, 0.0844512, 0.444674, 0.521843, 0.323605, 0.743892, 0.417587, 0.539746, 0.838717, 0.522419, 0.944407, 0.303898, 0.0965467, 0.738773, 0.204419, 0.488576, 0.0935113, 0.395192, 0.138957, 0.374688, 0.643051, 0.384333, 0.246379, 0.440989, 0.522972, 0.598874, 0.3693, 0.754691, 0.00516612, 0.0332441, 0.745578, 0.500539, 0.167944, 0.499101, 0.0951122, 0.557973, 0.320253, 0.299863, 0.00852917, 0.866493, 0.804178, 0.661775, 0.872044, 0.148866, 0.631578, 0.748701, 0.825885, 0.224996, 0.581948, 0.11067, 0.143304, 0.031682, 0.392887, 0.167357, 0.382367, 0.311425, 0.201765, 0.0485107, 0.362529, 0.403489, 0.858543, 0.425199, 0.65105, 0.61213, 0.0104368, 0.0959071, 0.866512, 0.0848124, 0.808674, 0.395662, 0.27686, 0.173812, 0.188937, 0.0759889, 0.698132, 0.406877, 0.946371, 0.29629, 0.636286, 0.205707, 0.610048, 0.153584, 0.804778, 0.979005, 0.435582, 0.0617383, 0.852159, 0.345466, 0.317229, 0.331469, 0.26471, 0.25735, 0.278036, 0.473363, 0.168857, 0.458015, 0.0207869, 0.433927, 0.663217, 0.134433, 0.208947, 0.636476, 0.280646, 0.0840033, 0.969425, 0.70312, 0.718875, 0.301033, 0.987558, 0.755513, 0.356862, 0.84162, 0.336091, 0.60303, 0.803903, 0.429738, 0.826224, 0.0821901, 0.720067, 0.388101, 0.904775, 0.176448, 0.707284, 0.0962457, 0.86433, 0.804201, 0.403867, 0.835983, 0.258573, 0.128271, 0.795915, 0.598073, 0.384385, 0.893598, 0.13519, 0.349795, 0.391911, 0.364873, 0.363213, 0.0324984, 0.353857, 0.0263627, 0.821102, 0.238852, 0.155727, 0.831169, 0.923858, 0.777412, 0.698669, 0.746724, 0.376989, 0.366644, 0.961419, 0.272156, 0.144976, 0.240225, 0.129988, 0.229844, 0.0692617, 0.413399, 0.309317, 0.375905, 0.639856, 0.20138, 0.574299, 0.218684, 0.251975, 0.704074, 0.160467, 0.066386, 0.0657724, 0.818871, 0.616836, 0.993455, 0.722543, 0.71007, 0.12334, 0.022833, 0.322339, 0.423563, 0.265318, 0.499809, 0.789941, 0.187409, 0.839217, 0.682156, 0.102909, 0.319399, 0.911683, 0.479359, 0.567347, 0.712737, 0.709301, 0.92589, 0.665616, 0.690399, 0.703394, 0.229336, 0.781397, 0.891572, 0.839947, 0.276139, 0.00983069, 0.627141, 0.491705, 0.941038, 0.256982, 0.846472, 0.165488, 0.889032, 0.458546, 0.155815, 0.707996, 0.274945, 0.459801, 0.243167, 0.445849, 0.19494, 0.453255, 0.867502, 0.735341, 0.142157, 0.710191, 0.362551, 0.42352, 0.0612732, 0.359541, 0.780752, 0.663786, 0.0623036, 0.586138, 0.480311, 0.269546, 0.428785, 0.860652, 0.525945, 0.849798, 0.198831, 0.512015, 0.276636, 0.808629, 0.618659, 0.347047, 0.296567, 0.6308, 0.891711, 0.728909, 0.00615773, 0.580163, 0.761194, 0.890536, 0.979987, 0.803402, 0.128086, 0.875911, 0.830572, 0.72082, 0.45184, 0.895492, 0.953345, 0.518064, 0.981779, 0.46652, 0.232154, 0.712889, 0.0240985, 0.486057, 0.886377, 0.800894, 0.364092, 0.183536, 0.556499, 0.137635, 0.772141, 0.00157282, 0.796502, 0.540139, 0.350468, 0.212424, 0.729362, 0.545618, 0.278264, 0.545053, 0.0649081, 0.211462, 0.858909, 0.44332, 0.0306744, 0.568006, 0.430571, 0.103188, 0.753411, 0.60898, 0.654854, 0.0538912, 0.934674, 0.399954, 0.195588, 0.619179, 0.0197198, 0.915685, 0.22772, 0.209736, 0.256244, 0.63981, 0.852806, 0.640165, 0.244563, 0.666166, 0.107168, 0.0646473, 0.948008, 0.0879691, 0.163075, 0.0113279, 0.647452, 0.648633, 0.934379, 0.48736, 0.112774, 0.833852, 0.01571, 0.0490363, 0.55119, 0.821667, 0.630919, 0.725307, 0.851313, 0.67138, 0.958159, 0.35837, 0.44038, 0.223307, 0.456454, 0.707208, 0.14199, 0.497361, 0.727194, 0.098994, 0.341493, 0.287234, 0.685074, 0.871397, 0.118388, 0.649109, 0.648334, 0.745055, 0.311592, 0.467422, 0.0227754, 0.26709, 0.854419, 0.892129, 0.527672, 0.404777, 0.196179, 0.586777, 0.58063, 0.293115, 0.421652, 0.507975, 0.368012, 0.0571903, 0.808171, 0.81496, 0.162705, 0.453734, 0.130286, 0.703768, 0.230519, 0.500453, 0.601802, 0.812536, 0.953156, 0.505225, 0.372403, 0.866647, 0.362258, 0.130303, 0.0960682, 0.883785, 0.884231, 0.572242, 0.573087, 0.0649872, 0.904235, 0.493275, 0.484062, 0.0326532, 0.145145, 0.177581, 0.914643, 0.150563, 0.415583, 0.874436, 0.938479, 0.225047, 0.76867, 0.535047, 0.944481, 0.741913, 0.850204, 0.719304, 0.921887, 0.025558, 0.364053, 0.905346, 0.839574, 0.64003, 0.37887, 0.657531, 0.975924, 0.848139, 0.835465, 0.346445, 0.596387, 0.0921106, 0.525727, 0.642527, 0.726617, 0.267825, 0.470306, 0.53443, 0.39647, 0.361847, 0.168555, 0.880501, 0.851166, 0.77163, 0.3534, 0.862777, 0.617816, 0.889374, 0.139884, 0.213892, 0.457168, 0.662212, 0.919846, 0.891103, 0.58003, 0.25251, 0.179788, 0.727992, 0.0942184, 0.673038, 0.738826, 0.677809, 0.0223256, 0.431998, 0.491778, 0.998121, 0.0459948, 0.0863441, 0.0226274, 0.346567, 0.910443, 0.53951, 0.632083, 0.141175, 0.841427, 0.0937164, 0.219005, 0.598953, 0.254778, 0.272004, 0.990294, 0.284358, 0.629232, 0.654993, 0.890749, 0.490674, 0.467832, 0.810197, 0.67584, 0.484549, 0.0355721, 0.810278, 0.359813, 0.503325, 0.60993, 0.177014, 0.266437, 0.82521, 0.700959, 0.580357, 0.0329066, 0.477505, 0.975919, 0.765812, 0.513353, 0.759346, 0.896418, 0.514122, 0.762671, 0.481069, 0.781122, 0.308061, 0.0188265, 0.336858, 0.428663, 0.290707, 0.39264, 0.203424, 0.771924, 0.0579965, 0.338418, 0.329157, 0.54683, 0.443618, 0.809492, 0.252591, 0.841314, 0.65044, 0.233733, 0.618145, 0.169619, 0.110788, 0.574787, 0.803592, 0.95607, 0.809714, 0.954832, 0.523285, 0.934249, 0.822042, 0.888136, 0.135298, 0.603218, 0.217902, 0.923115, 0.963834, 0.046993, 0.846042, 0.43431, 0.632692, 0.815737, 0.812383, 0.756145, 0.86021, 0.591091, 0.26494, 0.475869, 0.332984, 0.111581, 0.0923473, 0.263092, 0.536261, 0.461894, 0.237639, 0.0173009, 0.835964, 0.34082, 0.346269, 0.910341, 0.730623, 0.842476, 0.314167, 0.588686, 0.426967, 0.787806, 0.917108, 0.109982, 0.838064, 0.55325, 0.783717, 0.471997, 0.985364, 0.951281, 0.190212, 0.0488774, 0.198929, 0.503557, 0.556964, 0.235304, 0.691374, 0.294346, 0.0434012, 0.881617, 0.485982, 0.122715, 0.551463, 0.123022, 0.868927, 0.450013, 0.267355, 0.878404, 0.0635946, 0.195266, 0.674102, 0.684999, 0.554888, 0.00565896, 0.98166, 0.484559, 0.276923, 0.51482, 0.0482194, 0.445814, 0.363586, 0.609221, 0.82734, 0.134562, 0.639819, 0.422786, 0.715642, 0.586397, 0.00108825, 0.44231, 0.957283, 0.799705, 0.606493, 0.1704, 0.616888, 0.963721, 0.179014, 0.926394, 0.728839, 0.841263, 0.954068, 0.0573998, 0.90095, 0.957842, 0.706108, 0.39455, 0.604754, 0.139079, 0.412901, 0.157284, 0.594666, 0.332763, 0.901724, 0.727433, 0.859325, 0.311124, 0.805005, 0.131634, 0.638805, 0.0480423, 0.745349, 0.283945, 0.926438, 0.169615, 0.867695, 0.403487, 0.859359, 0.703778, 0.190037, 0.138594, 0.538213, 0.999882, 0.114442, 0.798489, 0.604526, 0.236565, 0.33969, 0.365351, 0.655584, 0.767672, 0.338211, 0.31313, 0.93109, 0.551706, 0.794758, 0.013099, 0.313884, 0.628301, 0.679539, 0.285262, 0.0200561, 0.493443, 0.576638, 0.218604, 0.284279, 0.398139, 0.445677, 0.354171, 0.660415, 0.747057, 0.662667, 0.560981, 0.0589827, 0.569345, 0.315776, 0.775442, 0.196486, 0.468511, 0.487565, 0.575478, 0.98576, 0.203644, 0.628232, 0.291471, 0.109699, 0.601743, 0.641302, 0.333487, 0.0835407, 0.924504, 0.855606, 0.810444, 0.442251, 0.206098, 0.188314, 0.649688, 0.577481, 0.400175, 0.396002, 0.339203, 0.989435, 0.0520413, 0.509233, 0.413266, 0.648791, 0.990687, 0.691524, 0.169735, 0.138492, 0.286327, 0.630889, 0.582814, 0.097934, 0.599507, 0.690031, 0.260219, 0.790027, 0.415555, 0.515342, 0.734153, 0.257156, 0.911997, 0.120776, 0.849259, 0.271723, 0.147105, 0.667802, 0.869832, 0.546765, 0.534638, 0.637949, 0.67855, 0.261329, 0.250368, 0.975991, 0.0352188, 0.616371, 0.897824, 0.251035, 0.194803, 0.923669, 0.0536966, 0.215195, 0.930365, 0.770669, 0.756988, 0.356637, 0.363472, 0.223229, 0.288866, 0.552404, 0.594253, 0.00666013, 0.568736, 0.0266256, 0.916363, 0.950948, 0.658631, 0.689048, 0.189652, 0.318544, 0.878162, 0.909667, 0.40504, 0.298761, 0.784087, 0.318184, 0.880637, 0.846613, 0.738347, 0.627163, 0.516213, 0.625051, 0.66359, 0.320635, 0.0559965, 0.874822, 0.162004, 0.309508, 0.418486, 0.19779, 0.822498, 0.241683, 0.0172514, 0.195514, 0.493682, 0.360261, 0.494828, 0.067534, 0.412249, 0.772915, 0.183354, 0.110332, 0.426438, 0.509137, 0.244624, 0.810335, 0.307242, 0.726412, 0.693609, 0.425482, 0.524024, 0.758772, 0.295339, 0.325908, 0.47107, 0.46494, 0.486571, 0.669965, 0.0729339, 0.684364, 0.021, 0.5094, 0.9029, 0.802745, 0.89819, 0.0294257, 0.845204, 0.690231, 0.641065, 0.85395, 0.758954, 0.626043, 0.769634, 0.36309, 0.313959, 0.821471, 0.800556, 0.666344, 0.098059, 0.451004, 0.587051, 0.581232, 0.914306, 0.391562, 0.359505, 0.349091, 0.549276, 0.113745, 0.15211, 0.95436, 0.717475, 0.167853, 0.886064, 0.517029, 0.698867, 0.411079, 0.43198, 0.339307, 0.635614, 0.157554, 0.387891, 0.0659931, 0.261794, 0.537798, 0.990654, 0.401801, 0.0789838, 0.64179, 0.567052, 0.870907, 0.160964, 0.736256, 0.805436, 0.873336, 0.916254, 0.144057, 0.00286148, 0.893824, 0.917897, 0.135173, 0.122999, 0.54781, 0.24783, 0.631928, 0.146863, 0.0823505, 0.829648, 0.716739, 0.947225, 0.58018, 0.583302, 0.00997576, 0.950082, 0.0683284, 0.557343, 0.232008, 0.894867, 0.87242, 0.971891, 0.340741, 0.234086, 0.063568, 0.281589, 0.388354, 0.631478, 0.330577, 0.819587, 0.361625, 0.0495101, 0.276587, 0.379001, 0.881574, 0.57234, 0.854303, 0.0354648, 0.214783, 0.0275671, 0.227575, 0.838316, 0.84354, 0.826011, 0.115646, 0.715474, 0.388171, 0.0533561, 0.0273449, 0.864577, 0.724575, 0.584559, 0.0478402, 0.385348, 0.288855, 0.745594, 0.473685, 0.067597, 0.0966291, 0.309623, 0.746176, 0.884663, 0.851867, 0.717935, 0.323082, 0.260631, 0.638873, 0.909799, 0.378827, 0.250598, 0.669078, 0.358743, 0.944822, 0.516899, 0.650863, 0.914101, 0.919328, 0.104253, 0.778744, 0.614235, 0.959365, 0.471582, 0.75461, 0.131796, 0.518513, 0.796229, 0.22335, 0.358356, 0.834743, 0.151196, 0.228007, 0.731939, 0.690158, 0.619762, 0.908891, 0.00935599, 0.151689, 0.895069, 0.583814, 0.739853, 0.10738, 0.507382, 0.76196, 0.945943, 0.224541, 0.655233, 0.340339, 0.286659, 0.198468, 0.455985, 0.202928, 0.102759, 0.117635, 0.987162, 0.513448, 0.0515212, 0.387943, 0.164047, 0.110183, 0.341008, 0.710716, 0.785942, 0.78434, 0.473991, 0.354403, 0.24259, 0.818226, 0.572671, 0.257936, 0.368975, 0.622038, 0.759281, 0.215874, 0.371487, 0.213655, 0.0225308, 0.514941, 0.90473, 0.470511, 0.549805, 0.95397, 0.862688, 0.975287, 0.302472, 0.00857932, 0.281589, 0.310911, 0.0253471, 0.575671, 0.49977, 0.17398, 0.351441, 0.747175, 0.905322, 0.509491, 0.690312, 0.486756, 0.777155, 0.430102, 0.606543, 0.368131, 0.682621, 0.630395, 0.852992, 0.974505, 0.196192, 0.48224, 0.837796, 0.554905, 0.667964, 0.495827, 0.38629, 0.223386, 0.26684, 0.505895, 0.115652, 0.620934, 0.315365, 0.103573, 0.417029, 0.430385, 0.169628, 0.770794, 0.589168, 0.587536, 0.335873, 0.835063, 0.501202, 0.677467, 0.383526, 0.68742, 0.581883, 0.344688, 0.00554769, 0.716246, 0.418168, 0.390993, 0.368735, 0.904264, 0.485587, 0.645208, 0.819884, 0.713942, 0.684267, 0.0213335, 0.715631, 0.559475, 0.547555, 0.39113, 0.0695155, 0.585026, 0.799126, 0.128797, 0.709349, 0.539211, 0.553569, 0.507888, 0.457815, 0.107576, 0.0641052, 0.283917, 0.973872, 0.0788143, 0.923277, 0.242687, 0.0207217, 0.208356, 0.482252, 0.584101, 0.43033, 0.665051, 0.220115, 0.0187984, 0.00274392, 0.722993, 0.840627, 0.971185, 0.270766, 0.107335, 0.658131, 0.77923, 0.878906, 0.522251, 0.270766, 0.0552041, 0.930282, 0.0679173, 0.694456, 0.878168, 0.495982, 0.110033, 0.723934, 0.122868, 0.15146, 0.610685, 0.161229, 0.788125, 0.596026, 0.396793, 0.823102, 0.0834243, 0.731238, 0.875904, 0.0024461, 0.0510364, 0.892, 0.77279, 0.449158, 0.039458, 0.0988294, 0.752611, 0.521684, 0.0262497, 0.639185, 0.0395647, 0.78477, 0.0504738, 0.517867, 0.950685, 0.0890516, 0.333973, 0.907944, 0.894111, 0.651692, 0.523152, 0.510844, 0.614001, 0.0605636, 0.193996, 0.223573, 0.688479, 0.980567, 0.902811, 0.280912, 0.162531, 0.0540762, 0.414345, 0.817166, 0.300617, 0.0784998, 0.0191517, 0.288846, 0.650191, 0.128051, 0.766181, 0.234693, 0.631353, 0.515858, 0.395164, 0.48482, 0.741345, 0.224187, 0.623646, 0.50843, 0.947179, 0.921144, 0.0411876, 0.0620683, 0.300789, 0.127641, 0.99347, 0.194463, 0.799161, 0.134198, 0.194098, 0.192386, 0.950583, 0.779795, 0.577767, 0.772097, 0.881238, 0.438153, 0.352816, 0.613207, 0.820944, 0.610075, 0.0102571, 0.722896, 0.845245, 0.563335, 0.194757, 0.704358, 0.112646, 0.576978, 0.636203, 0.177122, 0.962064, 0.140906, 0.909349, 0.724073, 0.437111, 0.919171, 0.265218, 0.483642, 0.657541, 0.689893, 0.592511, 0.299259, 0.671936, 0.725063, 0.906748, 0.908616, 0.691344, 0.733322, 0.0358387, 0.0145384, 0.0194284, 0.371836, 0.666234, 0.520111, 0.0156337, 0.932628, 0.295622, 0.0178778, 0.251465, 0.881716, 0.889716, 0.515676, 0.824123, 0.325642, 0.813515, 0.143581, 0.652969, 0.119205, 0.142467, 0.326919, 0.72218, 0.189729, 0.367562, 0.376117, 0.517343, 0.160571, 0.652061, 0.982528, 0.967784, 0.67242, 0.899708, 0.134544, 0.241151, 0.3378, 0.369193, 0.0187582, 0.71399, 0.624092, 0.727593, 0.16902, 0.377871, 0.210523, 0.827917, 0.852145, 0.808643, 0.104473, 0.586986, 0.394308, 0.507487, 0.541712, 0.894871, 0.112282, 0.311991, 0.755083, 0.860908, 0.143703, 0.694696, 0.410443, 0.701204, 0.754024, 0.247181, 0.464045, 0.439513, 0.787088, 0.263565, 0.136725, 0.184538, 0.903104, 0.2926, 0.500458, 0.281775, 0.896249, 0.288564, 0.363984, 0.487971, 0.901011, 0.716427, 0.729958, 0.441464, 0.0228345, 0.109672, 0.0767572, 0.372363, 0.928338, 0.937909, 0.992874, 0.76931, 0.96157, 0.785619, 0.0455519, 0.352093, 0.114345, 0.0485703, 0.0705642, 0.986669, 0.259092, 0.214464, 0.785229, 0.287832, 0.367194, 0.587579, 0.63899, 0.769462, 0.0196606, 0.311204, 0.180387, 0.0998978, 0.625155, 0.78767, 0.694034, 0.30335, 0.152637, 0.594031, 0.983387, 0.012683, 0.360469, 0.343095, 0.890132, 0.373022, 0.613124, 0.0664975, 0.866854, 0.509899, 0.521016, 0.591964, 0.163959, 0.293403, 0.495289, 0.21112, 0.254242, 0.928399, 0.957892, 0.976767, 0.277183, 0.507005, 0.154241, 0.481073, 0.880563, 0.882597, 0.589448, 0.180488, 0.26247, 0.311174, 0.717911, 0.149071, 0.269492, 0.178879, 0.677134, 0.819556, 0.0559708, 0.426378, 0.108577, 0.00154718, 0.819687, 0.363181, 0.532661, 0.123272, 0.394626, 0.85865, 0.0046781, 0.048084, 0.920112, 0.166069, 0.920547, 0.412907, 0.138344, 0.876265, 0.0688749, 0.0969028, 0.644361, 0.169685, 0.335652, 0.705437, 0.143203, 0.647942, 0.680218, 0.966092, 0.408209, 0.317951, 0.33836, 0.763171, 0.705177, 0.0616689, 0.102783, 0.377501, 0.12131, 0.889664, 0.851413, 0.666999, 0.0267393, 0.890247, 0.437947, 0.07473, 0.314552, 0.491806, 0.724258, 0.937625, 0.994299, 0.157432, 0.077267, 0.956389, 0.923912, 0.119921, 0.759951, 0.472597, 0.868388, 0.111203, 0.0456212, 0.619121, 0.870144, 0.387855, 0.341935, 0.292578, 0.508057, 0.862987, 0.434029, 0.128391, 0.644022, 0.932877, 0.0802235, 0.778851, 0.837766, 0.21515, 0.902426, 0.0803018, 0.0451043, 0.979109, 0.127859, 0.437255, 0.954738, 0.281417, 0.886565, 0.120915, 0.747099, 0.878146, 0.890052, 0.57775, 0.920686, 0.92972, 0.637755, 0.645211, 0.834063, 0.0340221, 0.646686, 0.623421, 0.85634, 0.786195, 0.16432, 0.326467, 0.181429, 0.986865, 0.669251, 0.804039, 0.510411, 0.015761, 0.692227, 0.843232, 0.923616, 0.48712, 0.749108, 0.585923, 0.557237, 0.212273, 0.829083, 0.732269, 0.569164, 0.109997, 0.785893, 0.442765, 0.679718, 0.500834, 0.321716, 0.203084, 0.723897, 0.0615703, 0.787114, 0.72705, 0.475697, 0.96277, 0.178659, 0.32012, 0.503377, 0.435492, 0.572015, 0.128768, 0.516791, 0.10983, 0.755166, 0.620428, 0.991006, 0.80475, 0.38448, 0.771185, 0.721985, 0.786759, 0.696493, 0.607626, 0.679078, 0.430349, 0.113553, 0.556384, 0.397408, 0.527625, 0.338182, 0.445802, 0.273999, 0.884071, 0.823653, 0.94114, 0.0951649, 0.696848, 0.460765, 0.15284, 0.922747, 0.871428, 0.139821, 0.00466255, 0.848562, 0.425235, 0.621252, 0.549582, 0.879424, 0.0986899, 0.805771, 0.521052, 0.486152, 0.313243, 0.0180174, 0.9112, 0.776452, 0.80454, 0.39278, 0.0340876, 0.983415, 0.0694965, 0.987536, 0.932105, 0.484271, 0.913353, 0.369432, 0.0209533, 0.518407, 0.850771, 0.498852, 0.682342, 0.372398, 0.164069, 0.152215, 0.0418867, 0.163156, 0.674762, 0.177445, 0.779985, 0.812932, 0.54534, 0.701747, 0.471655, 0.473376, 0.901218, 0.462879, 0.610169, 0.862822, 0.65, 0.911136, 0.834596, 0.4788, 0.236055, 0.470912, 0.319184, 0.49711, 0.105028, 0.74884, 0.329586, 0.28499, 0.222683, 0.572896, 0.446851, 0.502052, 0.669934, 0.138281, 0.368421, 0.161535, 0.645895, 0.0415182, 0.158503, 0.910143, 0.120797, 0.871673, 0.507205, 0.639364, 0.646757, 0.801356, 0.745394, 0.538818, 0.315332, 0.893748, 0.170985, 0.454368, 0.179982, 0.556947, 0.92625, 0.876332, 0.796983, 0.597766, 0.118834, 0.288336, 0.00902861, 0.597508, 0.13627, 0.918916, 0.678764, 0.0748315, 0.632011, 0.0165748, 0.0782079, 0.691913, 0.376937, 0.254107, 0.570884, 0.664063, 0.12492, 0.213602, 0.727811, 0.201032, 0.510617, 0.925473, 0.227767, 0.693433, 0.977334, 0.985919, 0.0955417, 0.924307, 0.566221, 0.797246, 0.0834255, 0.863745, 0.0138646, 0.234444, 0.175842, 0.927442, 0.0297915, 0.658783, 0.170299, 0.254717, 0.080356, 0.133733, 0.763135, 0.609988, 0.443638, 0.767418, 0.571825, 0.176435, 0.667801, 0.990997, 0.191781, 0.458619, 0.958853, 0.610158, 0.624669, 0.811651, 0.712555, 0.243122, 0.824353, 0.877148, 0.204267, 0.959502, 0.737188, 0.886727, 0.700872, 0.510737, 0.357619, 0.760414, 0.206601, 0.987029, 0.139643, 0.999218, 0.345107, 0.455409, 0.444326, 0.0753852, 0.745219, 0.734076, 0.908799, 0.0200817, 0.786224, 0.703674, 0.459613, 0.621943, 0.138336, 0.764446, 0.633774, 0.680809, 0.449635, 0.0719145, 0.761931, 0.494351, 0.257352, 0.409355, 0.473932, 0.959385, 0.271198, 0.552431, 0.525504, 0.598449, 0.595013, 0.32543, 0.316847, 0.790654, 0.650512, 0.698785, 0.681656, 0.609515, 0.853596, 0.678602, 0.312949, 0.175424, 0.912109, 0.318198, 0.0746615, 0.548998, 0.772536, 0.0958111, 0.233971, 0.407245, 0.61385, 0.636117, 0.486725, 0.551383, 0.144315, 0.199175, 0.601006, 0.195937, 0.697408, 0.646259, 0.448521, 0.41616, 0.463405, 0.644677, 0.359831, 0.412282, 0.256679, 0.465395, 0.298716, 0.576465, 0.315947, 0.450115, 0.726528, 0.766532, 0.568148, 0.345948, 0.741316, 0.977101, 0.22995, 0.092362, 0.275642, 0.143517, 0.256864, 0.81646, 0.667261, 0.565341, 0.667376, 0.857814, 0.948847, 0.606078, 0.374988, 0.351117, 0.0576456, 0.591925, 0.619963, 0.760534, 0.47216, 0.761783, 0.412779, 0.114894, 0.739858, 0.305439, 0.460004, 0.752199, 0.275571, 0.927707, 0.351944, 0.663135, 0.00334564, 0.582296, 0.0938497, 0.770812, 0.463215, 0.989269, 0.735711, 0.303654, 0.107126, 0.558942, 0.503576, 0.458912, 0.713015, 0.714765, 0.917074, 0.986127, 0.0473922, 0.970784, 0.716859, 0.183494, 0.918147, 0.923901, 0.993174, 0.586096, 0.278638, 0.151621, 0.874674, 0.522265, 0.272625, 0.820591, 0.0666701, 0.107058, 0.648372, 0.760414, 0.884554, 0.059045, 0.171872, 0.294926, 0.0595009, 0.825139, 0.373394, 0.644776, 0.754324, 0.0544411, 0.899969, 0.959131, 0.383885, 0.769346, 0.0776174, 0.318224, 0.377208, 0.376724, 0.369616, 0.59647, 0.578749, 0.409885, 0.531794, 0.36398, 0.70295, 0.189707, 0.897684, 0.221662, 0.299438, 0.110271, 0.676171, 0.558928, 0.532229, 0.771429, 0.405497, 0.771621, 0.715442, 0.178534, 0.984104, 0.111374, 0.0962679, 0.320358, 0.422318, 0.598903, 0.507563, 0.989016, 0.166151, 0.598786, 0.567608, 0.162307, 0.484981, 0.670278, 0.0314683, 0.897491, 0.0720178, 0.755144, 0.424002, 0.189968, 0.279647, 0.649401, 0.690684, 0.430014, 0.168234, 0.471749, 0.908376, 0.318989, 0.378047, 0.470537, 0.676383, 0.214086, 0.0488364, 0.436373, 0.312044, 0.734522, 0.808162, 0.273472, 0.462544, 0.32724, 0.0261346, 0.942444, 0.706858, 0.295395, 0.960221, 0.129805, 0.840379, 0.311513, 0.279213, 0.384314, 0.836912, 0.564845, 0.0953655, 0.979802, 0.35287, 0.923853, 0.376366, 0.303277, 0.0191299, 0.984847, 0.989031, 0.0148695, 0.507571, 0.305033, 0.780096, 0.99445, 0.437325, 0.677658, 0.724669, 0.721161, 0.992662, 0.790512, 0.0212762, 0.773491, 0.139351, 0.682911, 0.838645, 0.716991, 0.965125, 0.12593, 0.137419, 0.111096, 0.745214, 0.683809, 0.0115195, 0.240242, 0.815814, 0.487353, 0.227699, 0.136303, 0.602072, 0.794579, 0.220322, 0.480858, 0.115225, 0.157831, 0.429576, 0.306031, 0.0233022, 0.749157, 0.229859, 0.275899, 0.973788, 0.50898, 0.329544, 0.237412, 0.576228, 0.176119, 0.0808705, 0.632687, 0.0664463, 0.932916, 0.757007, 0.503162, 0.881075, 0.804913, 0.698248, 0.0966182, 0.985987, 0.0333883, 0.0548697, 0.0313378, 0.370456, 0.373595, 0.0445245, 0.572873, 0.0755044, 0.283707, 0.649732, 0.109311, 0.984903, 0.0381455, 0.0132352, 0.322137, 0.188456, 0.867793, 0.74778, 0.886352, 0.2751, 0.980411, 0.829127, 0.748694, 0.803083, 0.0653596, 0.986497, 0.772165, 0.505447, 0.518452, 0.791482, 0.668375, 0.581173, 0.783544, 0.716063, 0.955502, 0.2883, 0.962456, 0.711843, 0.124468, 0.790633, 0.983245, 0.862075, 0.162591, 0.33502, 0.577174, 0.343908, 0.367677, 0.225406, 0.0430848, 0.334844, 0.462063, 0.864885, 0.717092, 0.169577, 0.605596, 0.735193, 0.5393, 0.287271, 0.277606, 0.650465, 0.220211, 0.935788, 0.630324, 0.979109, 0.0830748, 0.693549, 0.342863, 0.278943, 0.727048, 0.39119, 0.309273, 0.759766, 0.0824832, 0.625185, 0.200219, 0.244177, 0.733927, 0.975549, 0.289963, 0.119918, 0.224893, 0.952579, 0.451229, 0.860666, 0.700345, 0.447346, 0.00181113, 0.952813, 0.959839, 0.36252, 0.530121, 0.461269, 0.54261, 0.883025, 0.871242, 0.460213, 0.362716, 0.976735, 0.36581, 0.290767, 0.919499, 0.638504, 0.422942, 0.300979, 0.261442, 0.244254, 0.939152, 0.646268, 0.79873, 0.22899, 0.337646, 0.450362, 0.704914, 0.306518, 0.354007, 0.687272, 0.655243, 0.244792, 0.799828, 0.0928204, 0.666103, 0.884498, 0.955203, 0.522784, 0.27796, 0.0209405, 0.820827, 0.453013, 0.101963, 0.528367, 0.639644, 0.811813, 0.76978, 0.247719, 0.0354135, 0.905917, 0.601452, 0.459166, 0.191727, 0.438274, 0.670012, 0.299358, 0.622696, 0.627664, 0.999268, 0.343241, 0.703173, 0.794576, 0.895694, 0.313623, 0.598373, 0.496664, 0.687066, 0.754277, 0.346263, 0.69346, 0.598999, 0.654974, 0.0704743, 0.116748, 0.145191, 0.780739, 0.381612, 0.51129, 0.719282, 0.724501, 0.74263, 0.103365, 0.0587005, 0.485191, 0.32691, 0.787762, 0.839321, 0.330616, 0.853304, 0.923892, 0.68512, 0.779334, 0.584438, 0.481314, 0.0105376, 0.303712, 0.316489, 0.427301, 0.560182, 0.737369, 0.572481, 0.495963, 0.167189, 0.23871, 0.854727, 0.627752, 0.389423, 0.852365, 0.380096, 0.322055, 0.8113, 0.454465, 0.992769, 0.98815, 0.272523, 0.602177, 0.122823, 0.432782, 0.101932, 0.79152, 0.679658, 0.0491258, 0.913776, 0.343202, 0.175034, 0.805583, 0.240973, 0.549299, 0.729888, 0.588345, 0.564187, 0.981299, 0.686898, 0.738476, 0.358315, 0.944322, 0.822422, 0.84533, 0.834876, 0.823454, 0.45766, 0.836877, 0.609717, 0.545318, 0.878127, 0.911519, 0.714443, 0.0778165, 0.696779, 0.0303045, 0.368364, 0.584011, 0.612042, 0.536962, 0.588786, 0.577894, 0.024653, 0.744226, 0.944098, 0.539099, 0.857989, 0.514487, 0.433228, 0.446744, 0.0715383, 0.705576, 0.0301129, 0.450418, 0.945505, 0.670988, 0.467019, 0.0876018, 0.660721, 0.754755, 0.0441223, 0.898232, 0.848917, 0.15667, 0.756819, 0.251943, 0.822477, 0.942001, 0.720629, 0.163266, 0.219495, 0.706376, 0.818512, 0.767663, 0.763705, 0.17625, 0.360164, 0.0937215, 0.310482, 0.355263, 0.0528329, 0.934358, 0.223503, 0.276768, 0.0332421, 0.577549, 0.0133042, 0.710759, 0.0246291, 0.0773239, 0.803711, 0.385237, 0.0962977, 0.153847, 0.22997, 0.917154, 0.259281, 0.0829079, 0.162123, 0.669634, 0.199903, 0.763024, 0.00849927, 0.84348, 0.159901, 0.77094, 0.117335, 0.412064, 0.818697, 0.416284, 0.38796, 0.483608, 0.334034, 0.581573, 0.636486, 0.231355, 0.488069, 0.772965, 0.795275, 0.893995, 0.78465, 0.33249, 0.138793, 0.419504, 0.346615, 0.561949, 0.836677, 0.716381, 0.324875, 0.704881, 0.509466, 0.486337, 0.589495, 0.503, 0.962225, 0.375876, 0.781881, 0.881115, 0.0271652, 0.536851, 0.914244, 0.0898586, 0.681218, 0.0421879, 0.558257, 0.792735, 0.936975, 0.220094, 0.838276, 0.432229, 0.657287, 0.000557041, 0.87876, 0.0462447, 0.0766155, 0.434478, 0.540778, 0.266958, 0.305681, 0.35341, 0.982716, 0.853999, 0.168278, 0.209823, 0.23564, 0.387844, 0.496483, 0.389447, 0.800901, 0.238034, 0.271112, 0.235406, 0.475446, 0.510581, 0.509755, 0.0306164, 0.773892, 0.0682994, 0.593298, 0.70023, 0.305391, 0.527503, 0.986504, 0.468877, 0.363031, 0.590614, 0.670856, 0.117848, 0.270858, 0.297653, 0.798311, 0.0598164, 0.01179, 0.0885904, 0.875096, 0.795287, 0.0327647, 0.887023, 0.49768, 0.66212, 0.742214, 0.363779, 0.44344, 0.768532, 0.412874, 0.632337, 0.242044, 0.0973732, 0.141231, 0.73618, 0.164057, 0.421549, 0.151926, 0.41925, 0.77361, 0.0907304, 0.169231, 0.741289, 0.70494, 0.301844, 0.236005, 0.0108597, 0.988762, 0.977511, 0.617033, 0.827478, 0.908315, 0.509449, 0.971713, 0.480515, 0.0456441, 0.885103, 0.987939, 0.597081, 0.506577, 0.638603, 0.210043, 0.0482548, 0.218189, 0.364151, 0.992051, 0.391723, 0.937119, 0.251871, 0.957992, 0.284341, 0.782485, 0.81023, 0.607596, 0.489848, 0.643257, 0.931433, 0.0258066, 0.126333, 0.42508, 0.0350583, 0.372974, 0.0678014, 0.658473, 0.896184, 0.797337, 0.0153604, 0.773966, 0.25911, 0.409394, 0.545123, 0.27523, 0.226616, 0.00803445, 0.117953, 0.542646, 0.148404, 0.800338, 0.795688, 0.375284, 0.413708, 0.750514, 0.463614, 0.724324, 0.966648, 0.317077, 0.311322, 0.695846, 0.28184, 0.984164, 0.0139568, 0.698512, 0.763251, 0.399808, 0.673187, 0.87423, 0.525077, 0.955267, 0.585872, 0.226821, 0.901486, 0.814495, 0.554943, 0.469786, 0.549271, 0.737929, 0.397005, 0.852182, 0.988831, 0.085674, 0.309818, 0.790014, 0.0789381, 0.768044, 0.898577, 0.679431, 0.972011, 0.920214, 0.18101, 0.275299, 0.531952, 0.652698, 0.100951, 0.0367618, 0.605456, 0.0197282, 0.910065, 0.711466, 0.84257, 0.27857, 0.953812, 0.855451, 0.547478, 0.648171, 0.684109, 0.98443, 0.756279, 0.0136925, 0.792284, 0.640487, 0.913982, 0.401468, 0.58301, 0.200603, 0.550865, 0.908127, 0.360074, 0.717536, 0.214011, 0.306599, 0.729123, 0.366054, 0.167277, 0.250172, 0.808669, 0.754737, 0.164529, 0.501607, 0.894202, 0.216327, 0.0453906, 0.446236, 0.48948, 0.67534, 0.830515, 0.473338, 0.976086, 0.23739, 0.827288, 0.98029, 0.660839, 0.63133, 0.480636, 0.814671, 0.335541, 0.598103, 0.835169, 0.574495, 0.462844, 0.218605, 0.196126, 0.178365, 0.428876, 0.729528, 0.865073, 0.314056, 0.185457, 0.653795, 0.436069, 0.678548, 0.848748, 0.470352, 0.205871, 0.0437198, 0.292583, 0.49679, 0.666188, 0.40126, 0.877809, 0.08233, 0.474202, 0.800116, 0.168554, 0.686721, 0.901693, 0.0413626, 0.474672, 0.463282, 0.864081, 0.570513, 0.910049, 0.464346, 0.898423, 0.645147, 0.115714, 0.189242, 0.869391, 0.546112, 0.729097, 0.568821, 0.336011, 0.968012, 0.0388515, 0.75613, 0.744529, 0.857462, 0.269142, 0.0672414, 0.454014, 0.500959, 0.987362, 0.737475, 0.404587, 0.345277, 0.518067, 0.245681, 0.140775, 0.927901, 0.166827, 0.850512, 0.918311, 0.988633, 0.767776, 0.467392, 0.051576, 0.404199, 0.373537, 0.777711, 0.926308, 0.437101, 0.596457, 0.968791, 0.323633, 0.468815, 0.973583, 0.37288, 0.80413, 0.29805, 0.199406, 0.307082, 0.940399, 0.881414, 0.477804, 0.957487, 0.00851349, 0.051291, 0.068923, 0.0366955, 0.336837, 0.707748, 0.0157435, 0.937564, 0.143265, 0.247996, 0.145816, 0.969541, 0.0425528, 0.0957087, 0.79162, 0.719365, 0.184057, 0.00303612, 0.164905, 0.419136, 0.185199, 0.593213, 0.151001, 0.146827, 0.338696, 0.344155, 0.227585, 0.00513494, 0.460116, 0.98544, 0.552033, 0.128287, 0.23807, 0.56195, 0.824506, 0.189258, 0.050227, 0.60951, 0.51519, 0.487767, 0.0468177, 0.880155, 0.451121, 0.274183, 0.199084, 0.389995, 0.203297, 0.209891, 0.322086, 0.80289, 0.0594358, 0.56246, 0.507736, 0.937147, 0.723294, 0.322261, 0.579476, 0.315868, 0.781141, 0.920919, 0.755381, 0.785117, 0.640189, 0.893114, 0.159824, 0.391023, 0.605418, 0.844595, 0.131717, 0.453106, 0.0354193, 0.782969, 0.937506, 0.21625, 0.907617, 0.898652, 0.981229, 0.551583, 0.273739, 0.186611, 0.649192, 0.686602, 0.922376, 0.402637, 0.441532, 0.207293, 0.651441, 0.556587, 0.762731, 0.89997, 0.731379, 0.862775, 0.427437, 0.309512, 0.661231, 0.800463, 0.884022, 0.338439, 0.159422, 0.522647, 0.733258, 0.756267, 0.793723, 0.537743, 0.647818, 0.449444, 0.939879, 0.0966314, 0.201966, 0.904184, 0.224987, 0.783014, 0.409826, 0.258782, 0.719051, 0.892906, 0.918176, 0.237974, 0.143519, 0.480685, 0.946013, 0.0711188, 0.229829, 0.929101, 0.572001, 0.842208, 0.632482, 0.680829, 0.0865911, 0.761439, 0.89428, 0.592355, 0.0671137, 0.98381, 0.853176, 0.824508, 0.267632, 0.892326, 0.32836, 0.418551, 0.346941, 0.882806, 0.309085, 0.282074, 0.982606, 0.261556, 0.249706, 0.0223953, 0.327736, 0.167582, 0.556684, 0.484188, 0.0893175, 0.255542, 0.457398, 0.334597, 0.255648, 0.0795601, 0.386407, 0.988893, 0.830851, 0.193508, 0.310472, 0.654364, 0.0670938, 0.112389, 0.880854, 0.353868, 0.690126, 0.255283, 0.390743, 0.332614, 0.0466242, 0.292756, 0.860734, 0.623766, 0.539129, 0.515861, 0.75701, 0.832037, 0.84632, 0.438671, 0.221459, 0.156299, 0.734671, 0.311118, 0.068164, 0.169287, 0.44764, 0.578644, 0.964037, 0.589212, 0.532715, 0.227103, 0.754949, 0.763223, 0.627145, 0.888786, 0.514252, 0.490693, 0.0111548, 0.0732617, 0.151022, 0.549105, 0.516264, 0.862733, 0.375128, 0.870217, 0.831778, 0.910208, 0.23067, 0.906488, 0.954314, 0.870618, 0.133721, 0.952172, 0.946163, 0.80043, 0.597841, 0.811577, 0.60672, 0.867176, 0.353186, 0.275191, 0.23565, 0.882186, 0.551459, 0.589618, 0.879264, 0.948948, 0.0572349, 0.36607, 0.519015, 0.31113, 0.543603, 0.449493, 0.616014, 0.989401, 0.839872, 0.667913, 0.490344, 0.701474, 0.744178, 0.234312, 0.544897, 0.0978574, 0.481315, 0.922963, 0.102181, 0.455316, 0.526248, 0.268792, 0.322846, 0.878355, 0.584459, 0.359044, 0.31167, 0.37407, 0.266124, 0.503871, 0.414904, 0.635525, 0.9518, 0.509755, 0.169408, 0.0472026, 0.667826, 0.992603, 0.251529, 0.114493, 0.679204, 0.480373, 0.18788, 0.870213, 0.992654, 0.534436, 0.941352, 0.0244387, 0.780228, 0.42067, 0.632817, 0.660344, 0.35694, 0.0250982, 0.217571, 0.414648, 0.492479, 0.207949, 0.599503, 0.297134, 0.563982, 0.437704, 0.899191, 0.758265, 0.0920194, 0.571411, 0.124159, 0.519195, 0.404076, 0.907871, 0.131346, 0.226601, 0.978353, 0.897656, 0.366342, 0.550962, 0.487607, 0.293029, 0.184642, 0.0524754, 0.567065, 0.978196, 0.318724, 0.478241, 0.706945, 0.419912, 0.756709, 0.421248, 0.654794, 0.140818, 0.0487424, 0.017824, 0.495759, 0.538872, 0.717103, 0.0712224, 0.824614, 0.639925, 0.578716, 0.12625, 0.0856402, 0.205179, 0.999377, 0.0688811, 0.410998, 0.810521, 0.669731, 0.33862, 0.394065, 0.533385, 0.142005, 0.443029, 0.290606, 0.929948, 0.901129, 0.476085, 0.621577, 0.637656, 0.608621, 0.762401, 0.58521, 0.663044, 0.300784, 0.816918, 0.509167, 0.080701, 0.442533, 0.062607, 0.52675, 0.544716, 0.341901, 0.394151, 0.150043, 0.268943, 0.105657, 0.515799, 0.975377, 0.283571, 0.67471, 0.380002, 0.841592, 0.311632, 0.0575393, 0.389877, 0.26193, 0.538756, 0.316056, 0.263691, 0.20521, 0.354642, 0.296586, 0.551078, 0.575484, 0.0839746, 0.392119, 0.756555, 0.502667, 0.458518, 0.00460051, 0.266237, 0.880153, 0.537387, 0.342953, 0.0895121, 0.47646, 0.707822, 0.892626, 0.957136, 0.570837, 0.505494, 0.658229, 0.684948, 0.921795, 0.894423, 0.13942, 0.44968, 0.825582, 0.902669, 0.376736, 0.0870701, 0.565238, 0.270674, 0.708481, 0.830713, 0.902815, 0.414868, 0.549203, 0.464674, 0.245451, 0.435281, 0.00176975, 0.754087, 0.735445, 0.0837703, 0.303677, 0.411996, 0.0742324, 0.425401, 0.572173, 0.0837011, 0.579745, 0.929229, 0.762363, 0.865219, 0.769798, 0.116447, 0.661699, 0.16947, 0.39375, 0.84314, 0.0903207, 0.788996, 0.398276, 0.00100532, 0.448497, 0.165718, 0.197389, 0.962928, 0.644996, 0.637676, 0.871885, 0.230402, 0.922665, 0.588452, 0.993352, 0.76889, 0.0436841, 0.972861, 0.802777, 0.467909, 0.86727, 0.184278, 0.789986, 0.457721, 0.325289, 0.794625, 0.443401, 0.308017, 0.715126, 0.517875, 0.693416, 0.736179, 0.316858, 0.142924, 0.721139, 0.521596, 0.236532, 0.507966, 0.608778, 0.96292, 0.558893, 0.651695, 0.137494, 0.755832, 0.771333, 0.878684, 0.405341, 0.00297893, 0.86097, 0.473275, 0.596905, 0.451374, 0.205335, 0.235015, 0.616377, 0.66013, 0.906755, 0.804357, 0.632587, 0.194849, 0.61264, 0.184936, 0.0597735, 0.303249, 0.949588, 0.0657283, 0.777175, 0.684766, 0.0628117, 0.611167, 0.183913, 0.745564, 0.833274, 0.135848, 0.981644, 0.551356, 0.905484, 0.169549, 0.272377, 0.681274, 0.797146, 0.27877, 0.301439, 0.410783, 0.285266, 0.144248, 0.833021, 0.611345, 0.524433, 0.829463, 0.0606167, 0.292792, 0.830006, 0.301647, 0.492409, 0.0382006, 0.997249, 0.50477, 0.683585, 0.996514, 0.560837, 0.767123, 0.00676823, 0.101117, 0.810901, 0.146895, 0.868268, 0.542988, 0.133829, 0.852774, 0.920998, 0.117943, 0.741353, 0.288714, 0.0230389, 0.401274, 0.796168, 0.577941, 0.259857, 0.552797, 0.197413, 0.842157, 0.832985, 0.125974, 0.0554495, 0.317629, 0.240185, 0.216632, 0.403821, 0.554579, 0.955922, 0.173677, 0.286429, 0.621043, 0.724967, 0.666616, 0.0555232, 0.265642, 0.85446, 0.860972, 0.281179, 0.37778, 0.814466, 0.785219, 0.767731, 0.360001, 0.7588, 0.669505, 0.779305, 0.471352, 0.275894, 0.773946, 0.387764, 0.353426, 0.815332, 0.610091, 0.0198395, 0.883935, 0.460346, 0.162273, 0.997625, 0.951337, 0.695928, 0.759933, 0.401278, 0.856218, 0.276184, 0.504109, 0.76299, 0.825496, 0.340869, 0.245508, 0.758234, 0.295632, 0.717019, 0.520062, 0.563225, 0.284643, 0.339057, 0.909208, 0.919052, 0.675407, 0.0378963, 0.67125, 0.618967, 0.969515, 0.845679, 0.863896, 0.915108, 0.675248, 0.663042, 0.751497, 0.296159, 0.731409, 0.0285581, 0.429313, 0.515649, 0.638039, 0.458869, 0.131941, 0.979324, 0.798137, 0.862869, 0.900293, 0.732474, 0.0705629, 0.270529, 0.498765, 0.607826, 0.500047, 0.676414, 0.756136, 0.190196, 0.686842, 0.65908, 0.76664, 0.831188, 0.887319, 0.191147, 0.98713, 0.736585, 0.971298, 0.342997, 0.912222, 0.628565, 0.345245, 0.691291, 0.899544, 0.79744, 0.16101, 0.425983, 0.572252, 0.788322, 0.417363, 0.244534, 0.488441, 0.921632, 0.37453, 0.736699, 0.352986, 0.674132, 0.780165, 0.642586, 0.730052, 0.787112, 0.728426, 0.572247, 0.03463, 0.68244, 0.73817, 0.75052, 0.537633, 0.939482, 0.995564, 0.0787703, 0.00219422, 0.380415, 0.811731, 0.0605845, 0.138851, 0.729498, 0.199837, 0.399193, 0.472924, 0.695298, 0.413797, 0.101758, 0.562025, 0.0636009, 0.394252, 0.318197, 0.481093, 0.735629, 0.883258, 0.511363, 0.0960127, 0.570261, 0.168865, 0.789992, 0.409975, 0.408898, 0.60817, 0.710565, 0.534163, 0.292034, 0.222824, 0.495537, 0.892074, 0.478677, 0.389163, 0.597385, 0.837123, 0.672154, 0.682096, 0.32456, 0.0964614, 0.0890683, 0.486375, 0.667079, 0.454212, 0.421606, 0.966446, 0.0669027, 0.0634238, 0.395007, 0.402031, 0.548816, 0.16586, 0.904213, 0.440969, 0.216063, 0.629673, 0.932029, 0.81584, 0.053853, 0.797816, 0.169428, 0.132681, 0.594715, 0.812022, 0.549036, 0.842683, 0.420224, 0.404203, 0.0583616, 0.0230985, 0.40045, 0.125443, 0.503097, 0.278942, 0.476684, 0.912539, 0.474602, 0.21935, 0.54441, 0.345139, 0.994644, 0.566553, 0.0520835, 0.808824, 0.603088, 0.415181, 0.546723, 0.0136869, 0.612214, 0.796772, 0.629557, 0.979886, 0.383687, 0.044898, 0.278622, 0.343954, 0.216075, 0.966239, 0.422888, 0.170783, 0.514171, 0.401367, 0.543856, 0.595471, 0.82722, 0.283978, 0.0922692, 0.932564, 0.0598004, 0.594301, 0.116594, 0.203614, 0.351593, 0.0684048, 0.0777549, 0.879696, 0.00430396, 0.909826, 0.0465579, 0.33161, 0.0236601, 0.17313, 0.365709, 0.408294, 0.601167, 0.482956, 0.372955, 0.400157, 0.0255608, 0.715832, 0.928293, 0.590281, 0.930925, 0.314273, 0.357687, 0.451469, 0.32756, 0.120092, 0.410322, 0.95425, 0.845381, 0.0681171, 0.372794, 0.342892, 0.732141, 0.867536, 0.874434, 0.85772, 0.60432, 0.0885038, 0.404264, 0.398074, 0.119433, 0.638316, 0.755464, 0.709284, 0.628798, 0.835598, 0.904575, 0.336293, 0.714058, 0.554574, 0.900748, 0.46594, 0.59166, 0.664044, 0.388797, 0.277636, 0.651409, 0.83225, 0.464688, 0.85777, 0.440545, 0.539436, 0.2756, 0.232104, 0.370369, 0.0403602, 0.465258, 0.109327, 0.0761089, 0.632137, 0.649382, 0.769321, 0.385889, 0.727804, 0.722116, 0.316823, 0.55617, 0.178259, 0.82641, 0.825978, 0.959284, 0.00369669, 0.678182, 0.484151, 0.778799, 0.802664, 0.686154, 0.668922, 0.753168, 0.58172, 0.940841, 0.150399, 0.310802, 0.478986, 0.86857, 0.0529694, 0.628518, 0.816007, 0.638773, 0.0276212, 0.504084, 0.00824906, 0.206548, 0.959743, 0.453159, 0.302913, 0.465664, 0.837696, 0.307528, 0.546657, 0.606967, 0.719644, 0.764328, 0.268436, 0.179318, 0.000796436, 0.0185983, 0.702706, 0.237367, 0.295528, 0.657002, 0.487574, 0.274441, 0.947329, 0.816869, 0.755226, 0.832704, 0.508501, 0.759611, 0.743919, 0.552021, 0.584527, 0.353846, 0.997122, 0.273265, 0.528766, 0.694072, 0.278199, 0.270164, 0.507998, 0.569064, 0.71648, 0.520015, 0.494647, 0.547632, 0.053048, 0.54686, 0.460557, 0.893281, 0.0826122, 0.780413, 0.318118, 0.327702, 0.948741, 0.749454, 0.379775, 0.197342, 0.441201, 0.487044, 0.102453, 0.62077, 0.208023, 0.59954, 0.437963, 0.645729, 0.280912, 0.23066, 0.895854, 0.158095, 0.365078, 0.0328177, 0.331309, 0.852248, 0.713241, 0.805001, 0.68867, 0.126665, 0.66136, 0.0456048, 0.275175, 0.728597, 0.876747, 0.703472, 0.569631, 0.991902, 0.580252, 0.222799, 0.721635, 0.389082, 0.34485, 0.898645, 0.780462, 0.519908, 0.538189, 0.774642, 0.833867, 0.93958, 0.926064, 0.411182, 0.539883, 0.854669, 0.773631, 0.0848835, 0.89232, 0.025475, 0.350189, 0.176215, 0.0607147, 0.541975, 0.822398, 0.984168, 0.796827, 0.526517, 0.147365, 0.461512, 0.880727, 0.0231515, 0.962392, 0.617255, 0.258091, 0.84885, 0.393485, 0.704886, 0.922723, 0.688269, 0.407139, 0.935418, 0.571042, 0.11942, 0.982132, 0.824296, 0.312048, 0.103785, 0.983351, 0.16652, 0.271652, 0.57492, 0.200661, 0.611072, 0.836853, 0.745464, 0.414009, 0.772654, 0.579234, 0.0370925, 0.283828, 0.803075, 0.221144, 0.0312848, 0.598398, 0.965683, 0.0336897, 0.107113, 0.729821, 0.087041, 0.543042, 0.358967, 0.518301, 0.156004, 0.306046, 0.633751, 0.199647, 0.522865, 0.107152, 0.169999, 0.431278, 0.978252, 0.193718, 0.943982, 0.237834, 0.2771, 0.9646, 0.762601, 0.207008, 0.645238, 0.150583, 0.423062, 0.717546, 0.0563815, 0.962171, 0.638373, 0.705042, 0.534317, 0.58723, 0.554048, 0.238623, 0.295577, 0.885911, 0.0529201, 0.898848, 0.370815, 0.583385, 0.996378, 0.618232, 0.641957, 0.316822, 0.56646, 0.421175, 0.0101171, 0.292266, 0.574045, 0.615887, 0.591589, 0.681195, 0.551657, 0.80939, 0.86992, 0.283634, 0.963932, 0.286, 0.68426, 0.177203, 0.0885207, 0.618486, 0.133223, 0.437307, 0.952512, 0.939701, 0.351882, 0.956029, 0.360258, 0.931521, 0.505386, 0.33155, 0.855847, 0.110569, 0.594976, 0.175584, 0.200721, 0.519908, 0.598791, 0.718774, 0.802696, 0.418697, 0.0505992, 0.82239, 0.82677, 0.737835, 0.997216, 0.87038, 0.469017, 0.213352, 0.0833311, 0.955601, 0.732695, 0.43127, 0.314403, 0.108329, 0.782133, 0.748727, 0.559826, 0.978846, 0.858408, 0.6793, 0.993276, 0.809714, 0.583099, 0.309991, 0.0698749, 0.868124, 0.0655025, 0.148763, 0.786372, 0.173378, 0.143929, 0.129673, 0.870449, 0.423094, 0.996349, 0.932729, 0.351846, 0.944564, 0.186328, 0.458144, 0.646232, 0.840148, 0.0521953, 0.490151, 0.861728, 0.945871, 0.123322, 0.0219116, 0.694025, 0.549486, 0.787336, 0.731366, 0.95245, 0.765569, 0.337117, 0.978759, 0.981853, 0.596123, 0.426706, 0.546525, 0.265562, 0.453131, 0.208441, 0.549061, 0.750872, 0.093735, 0.888499, 0.994944, 0.177307, 0.254411, 0.457744, 0.34775, 0.133779, 0.929043, 0.0944183, 0.138461, 0.218889, 0.63263, 0.880749, 0.211545, 0.567309, 0.913402, 0.282153, 0.43556, 0.630123, 0.965008, 0.692314, 0.772579, 0.0808181, 0.46651, 0.974586, 0.638555, 0.460937, 0.905597, 0.19107, 0.17764, 0.894252, 0.414824, 0.352195, 0.207721, 0.73078, 0.609946, 0.590582, 0.410915, 0.488869, 0.825836, 0.737748, 0.326396, 0.800481, 0.253808, 0.761464, 0.44629, 0.802371, 0.48098, 0.754115, 0.864285, 0.179325, 0.238373, 0.138004, 0.27962, 0.932364, 0.80228, 0.0271451, 0.987216, 0.629104, 0.0498651, 0.995528, 0.600753, 0.175326, 0.474049, 0.0378203, 0.83367, 0.13611, 0.13769, 0.139143, 0.914651, 0.243489, 0.77545, 0.161589, 0.58287, 0.687041, 0.247851, 0.18179, 0.910724, 0.823587, 0.0550599, 0.427548, 0.936595, 0.0205633, 0.0179543, 0.663135, 0.270309, 0.080666, 0.83573, 0.604453, 0.766691, 0.010323, 0.957523, 0.390917, 0.159005, 0.448106, 0.431302, 0.137943, 0.571494, 0.0941859, 0.971891, 0.44257, 0.596926, 0.815116, 0.209047, 0.528437, 0.868364, 0.857529, 0.265967, 0.0264147, 0.427806, 0.479732, 0.936526, 0.134279, 0.932668, 0.842761, 0.950844, 0.928037, 0.902087, 0.631585, 0.242776, 0.0651731, 0.703864, 0.570125, 0.395168, 0.00144219, 0.198232, 0.0439579, 0.103995, 0.209509, 0.977651, 0.522272, 0.0326016, 0.0587909, 0.714106, 0.375312, 0.600653, 0.799133, 0.256873, 0.0646299, 0.226333, 0.481605, 0.197983, 0.877393, 0.819033, 0.117936, 0.773533, 0.309804, 0.322527, 0.485049, 0.29339, 0.578335, 0.661432, 0.190533, 0.111928, 0.307344, 0.336837, 0.231922, 0.0420848, 0.514988, 0.985584, 0.59564, 0.859343, 0.428762, 0.918974, 0.782172, 0.881984, 0.616943, 0.709915, 0.143853, 0.516046, 0.84039, 0.0616408, 0.856248, 0.0715974, 0.178598, 0.0788329, 0.214251, 0.501827, 0.10165, 0.149686, 0.568122, 0.526522, 0.588385, 0.649552, 0.802502, 0.914132, 0.184306, 0.79369, 0.00410956, 0.599341, 0.761062, 0.317949, 0.572229, 0.772129, 0.414773, 0.867983, 0.275066, 0.577239, 0.86939, 0.126182, 0.530422, 0.536293, 0.216343, 0.433691, 0.577769, 0.4999, 0.211479, 0.690396, 0.908978, 0.357924, 0.475413, 0.888614, 0.594871, 0.805686, 0.824012, 0.565476, 0.447597, 0.742646, 0.601787, 0.214971, 0.414767, 0.814431, 0.860898, 0.660798, 0.923988, 0.873648, 0.475515, 0.271393, 0.445084, 0.401211, 0.824812, 0.526915, 0.506517, 0.838271, 0.552343, 0.410306, 0.976452, 0.160352, 0.145517, 0.329589, 0.508718, 0.433945, 0.62697, 0.666669, 0.301417, 0.600519, 0.140122, 0.0342462, 0.749468, 0.362294, 0.573222, 0.940565, 0.606496, 0.800919, 0.0969009, 0.612881, 0.328299, 0.202667, 0.478386, 0.061613, 0.0408247, 0.113888, 0.264164, 0.181642, 0.830401, 0.919018, 0.707304, 0.660754, 0.173174, 0.725508, 0.258154, 0.0935418, 0.880605, 0.979335, 0.979478, 0.463536, 0.775126, 0.521115, 0.381796, 0.813129, 0.879217, 0.0569293, 0.64193, 0.0681011, 0.421784, 0.1403, 0.94013, 0.10248, 0.649367, 0.941418, 0.670194, 0.888578, 0.765968, 0.320945, 0.685102, 0.852353, 0.389216, 0.834381, 0.786971, 0.4331, 0.0486131, 0.460831, 0.812581, 0.588036, 0.0386062, 0.769703, 0.605088, 0.722163, 0.0314492, 0.832922, 0.903044, 0.231055, 0.728042, 0.110897, 0.431594, 0.309462, 0.11294, 0.0670814, 0.625663, 0.425656, 0.177113, 0.438086, 0.951453, 0.415572, 0.824223, 0.31294, 0.957691, 0.808175, 0.659531, 0.397838, 0.501979, 0.139841, 0.838215, 0.588624, 0.183401, 0.835496, 0.268936, 0.320515, 0.808754, 0.881535, 0.641655, 0.868639, 0.538035, 0.383658, 0.81925, 0.244493, 0.868118, 0.108817, 0.930293, 0.625395, 0.821832, 0.189743, 0.751963, 0.544101, 0.505598, 0.537047, 0.661969, 0.933226, 0.466824, 0.47114, 0.918811, 0.750851, 0.671081, 0.821295, 0.945188, 0.943402, 0.306194, 0.875262, 0.344571, 0.28533, 0.358017, 0.510657, 0.819928, 0.811225, 0.575014, 0.200153, 0.442748, 0.781176, 0.117917, 0.0319228, 0.330275, 0.430613, 0.395767, 0.941447, 0.249399, 0.55878, 0.976363, 0.707941, 0.539029, 0.929557, 0.661428, 0.531866, 0.216032, 0.775277, 0.613841, 0.630223, 0.690056, 0.159605, 0.172662, 0.648255, 0.627551, 0.890362, 0.820532, 0.166492, 0.872343, 0.4956, 0.427278, 0.768214, 0.0719299, 0.987919, 0.360018, 0.288648, 0.409812, 0.386399, 0.00474841, 0.843279, 0.300295, 0.367316, 0.521666, 0.00945934, 0.928267, 0.716858, 0.269648, 0.23232, 0.86501, 0.374387, 0.612416, 0.953231, 0.226648, 0.0856993, 0.160165, 0.0797144, 0.221482, 0.431156, 0.413296, 0.358701, 0.418836, 0.283429, 0.534095, 0.367291, 0.394108, 0.667079, 0.456069, 0.78066, 0.155554, 0.132467, 0.394805, 0.595467, 0.0211626, 0.651095, 0.655613, 0.177805, 0.124999, 0.807011, 0.634552, 0.0271443, 0.288895, 0.906153, 0.448306, 0.0305001, 0.502583, 0.770978, 0.594258, 0.0445926, 0.76084, 0.440544, 0.137063, 0.662651, 0.711555, 0.653516, 0.0763224, 0.637551, 0.0517443, 0.0645197, 0.913255, 0.555985, 0.133238, 0.980545, 0.290299, 0.303275, 0.300487, 0.467988, 0.338358, 0.602253, 0.175903, 0.658607, 0.137025, 0.463191, 0.0167984, 0.533693, 0.458243, 0.284057, 0.0650987, 0.700486, 0.122906, 0.832178, 0.858904, 0.833125, 0.885373, 0.415651, 0.545531, 0.113715, 0.482108, 0.118734, 0.96855, 0.965216, 0.981809, 0.845925, 0.714154, 0.7886, 0.836292, 0.415452, 0.781196, 0.904018, 0.243951, 0.7855, 0.670328, 0.713856, 0.126852, 0.169495, 0.794933, 0.239344, 0.794406, 0.523669, 0.117212, 0.790886, 0.439327, 0.376101, 0.271754, 0.0610157, 0.86614, 0.0221538, 0.89961, 0.632884, 0.923022, 0.231558, 0.315743, 0.7219, 0.549763, 0.337929, 0.312484, 0.962081, 0.36933, 0.948569, 0.801396, 0.410944, 0.425275, 0.532603, 0.0630479, 0.690848, 0.223495, 0.149753, 0.922555, 0.686001, 0.793803, 0.972618, 0.896042, 0.612586, 0.204122, 0.700157, 0.871581, 0.00910096, 0.373863, 0.236761, 0.302365, 0.505682, 0.420966, 0.557633, 0.607625, 0.0651274, 0.627289, 0.331521, 0.185134, 0.518712, 0.308724, 0.811741, 0.251559, 0.568634, 0.914078, 0.604802, 0.164206, 0.381585, 0.4562, 0.395127, 0.740485, 0.326243, 0.160696, 0.483046, 0.2234, 0.0760913, 0.091152, 0.116994, 0.673546, 0.651748, 0.232191, 0.172033, 0.830639, 0.533481, 0.651716, 0.0933954, 0.258903, 0.199218, 0.319521, 0.952921, 0.538796, 0.607854, 0.493173, 0.313999, 0.097214, 0.137803, 0.0880292, 0.582761, 0.0501668, 0.264903, 0.732472, 0.039704, 0.00950285, 0.688666, 0.885808, 0.562967, 0.663141, 0.0436381, 0.381255, 0.536207, 0.627869, 0.501329, 0.671545, 0.66127, 0.473033, 0.732092, 0.203037, 0.876202, 0.601081, 0.524588, 0.421899, 0.691385, 0.869965, 0.944745, 0.377468, 0.0368267, 0.896866, 0.90816, 0.138054, 0.287083, 0.335218, 0.941943, 0.0479425, 0.470131, 0.00903862, 0.275327, 0.573073, 0.513652, 0.728636, 0.459603, 0.847832, 0.0503086, 0.748406, 0.706157, 0.517583, 0.0141996, 0.414979, 0.08405, 0.402525, 0.233583, 0.0878202, 0.209633, 0.496619, 0.901065, 0.0438425, 0.538073, 0.423226, 0.408765, 0.915521, 0.182872, 0.61773, 0.853257, 0.636302, 0.764843, 0.278297, 0.351968, 0.209274, 0.400615, 0.491891, 0.454867, 0.937252, 0.392006, 0.307152, 0.160138, 0.647409, 0.313708, 0.904029, 0.0805075, 0.123685, 0.541003, 0.62969, 0.966634, 0.789415, 0.89396, 0.326296, 0.837853, 0.478753, 0.794071, 0.420256, 0.971584, 0.259005, 0.530313, 0.0114982, 0.162983, 0.464409, 0.864759, 0.5617, 0.332869, 0.0481572, 0.701619, 0.0315991, 0.640619, 0.160601, 0.0622128, 0.96043, 0.983107, 0.953527, 0.864518, 0.218876, 0.954606, 0.428151, 0.903055, 0.568602, 0.937188, 0.98872, 0.85357, 0.030049, 0.767508, 0.830182, 0.512175, 0.590847, 0.662972, 0.0807161, 0.158365, 0.999249, 0.567872, 0.954536, 0.642529, 0.830874, 0.957597, 0.402465, 0.677928, 0.612069, 0.696666, 0.840936, 0.595151, 0.578211, 0.685434, 0.944613, 0.239791, 0.251588, 0.188414, 0.288159, 0.630142, 0.315011, 0.977375, 0.15699, 0.716794, 0.214411, 0.573798, 0.811806, 0.81383, 0.959703, 0.899141, 0.608386, 0.221686, 0.984997, 0.904847, 0.151414, 0.588994, 0.515622, 0.111433, 0.917686, 0.972467, 0.581991, 0.392258, 0.633952, 0.524463, 0.381312, 0.817605, 0.980241, 0.318238, 0.53001, 0.27213, 0.467488, 0.294794, 0.798982, 0.669384, 0.719301, 0.958382, 0.804702, 0.141455, 0.560358, 0.542309, 0.00725739, 0.446341, 0.249821, 0.949181, 0.941526, 0.993123, 0.0814309, 0.156428, 0.296623, 0.763557, 0.859472, 0.720457, 0.733383, 0.569087, 0.0409618, 0.309238, 0.332018, 0.262818, 0.721738, 0.0768555, 0.746524, 0.850146, 0.714619, 0.162055, 0.917288, 0.785032, 0.554666, 0.320327, 0.439398, 0.957625, 0.944559, 0.0686576, 0.257077, 0.970626, 0.62337, 0.867643, 0.667705, 0.93922, 0.00102708, 0.00206041, 0.595461, 0.432175, 0.367634, 0.649383, 0.402521, 0.3633, 0.261763, 0.587614, 0.504025, 0.190015, 0.627813, 0.397097, 0.788601, 0.725343, 0.554026, 0.83152, 0.495644, 0.277014, 0.388641, 0.762496, 0.374025, 0.86129, 0.0407293, 0.584523, 0.544645, 0.871006, 0.617099, 0.447033, 0.710581, 0.84883, 0.481444, 0.311391, 0.581752, 0.344506, 0.745902, 0.0426611, 0.162753, 0.704493, 0.250898, 0.68663, 0.796968, 0.581858, 0.991296, 0.704103, 0.732113, 0.428668, 0.622033, 0.617142, 0.422555, 0.526939, 0.939926, 0.48753, 0.607909, 0.0481371, 0.708371, 0.026737, 0.235826, 0.0291002, 0.119347, 0.211346, 0.230851, 0.279482, 0.519222, 0.39127, 0.626721, 0.603084, 0.339094, 0.142595, 0.696528, 0.284544, 0.24334, 0.573797, 0.203033, 0.180646, 0.7183, 0.675145, 0.326594, 0.765995, 0.545793, 0.885235, 0.229569, 0.396105, 0.963607, 0.29157, 0.100511, 0.380513, 0.297258, 0.36894, 0.306344, 0.80289, 0.893432, 0.766634, 0.227175, 0.287085, 0.606253, 0.575777, 0.337349, 0.71287, 0.565895, 0.88872, 0.631308, 0.142192, 0.343971, 0.290115, 0.172617, 0.125252, 0.850873, 0.375555, 0.132906, 0.209471, 0.941131, 0.0870937, 0.830269, 0.406239, 0.110883, 0.558027, 0.643672, 0.204696, 0.281936, 0.193102, 0.981494, 0.293286, 0.762034, 0.119543, 0.500904, 0.0199665, 0.781367, 0.813823, 0.685262, 0.270013, 0.317096, 0.123699, 0.842954, 0.164216, 0.0241652, 0.589907, 0.707067, 0.894229, 0.275417, 0.83801, 0.690485, 0.834596, 0.617023, 0.0834039, 0.603464, 0.623476, 0.436211, 0.980066, 0.0226747, 0.79512, 0.235173, 0.354364, 0.281109, 0.917475, 0.782733, 0.98023, 0.282638, 0.437491, 0.733053, 0.468777, 0.470778, 0.234974, 0.851907, 0.514415, 0.554819, 0.461715, 0.98942, 0.643724, 0.0663051, 0.390555, 0.038522, 0.174123, 0.697024, 0.819552, 0.280796, 0.709868, 0.860632, 0.906449, 0.374823, 0.719379, 0.735071, 0.798275, 0.291949, 0.695889, 0.380973, 0.125914, 0.954415, 0.239392, 0.226089, 0.177524, 0.829654, 0.648409, 0.40236, 0.204048, 0.861321, 0.318195, 0.444191, 0.946016, 0.933354, 0.695845, 0.157571, 0.3451, 0.828508, 0.263626, 0.514817, 0.946433, 0.572556, 0.595014, 0.954246, 0.578397, 0.274419, 0.398533, 0.677832, 0.756134, 0.518582, 0.934838, 0.926696, 0.408439, 0.357739, 0.333876, 0.559884, 0.614297, 0.371403, 0.11742, 0.960378, 0.883136, 0.481486, 0.184133, 0.58541, 0.607515, 0.0308657, 0.917573, 0.0647184, 0.258382, 0.18783, 0.924871, 0.686562, 0.657322, 0.431049, 0.241608, 0.147015, 0.920292, 0.383998, 0.304657, 0.282625, 0.302467, 0.546403, 0.436642, 0.91491, 0.717142, 0.217742, 0.158671, 0.814419, 0.837199, 0.994399, 0.263271, 0.581781, 0.12564, 0.137798, 0.0892224, 0.404939, 0.858036, 0.176193, 0.856965, 0.00815598, 0.449133, 0.129093, 0.0774662, 0.873704, 0.444002, 0.138117, 0.944524, 0.622295, 0.902876, 0.0370288, 0.418928, 0.659432, 0.0333929, 0.267613, 0.976323, 0.211061, 0.170965, 0.80337, 0.140518, 0.0104873, 0.0498906, 0.529262, 0.464673, 0.867122, 0.975732, 0.199705, 0.187325, 0.0741305, 0.551017, 0.470653, 0.0212216, 0.808663, 0.771354, 0.596296, 0.724894, 0.793356, 0.336402, 0.309215, 0.255515, 0.710968, 0.363766, 0.805144, 0.998737, 0.76862, 0.98081, 0.507679, 0.582929, 0.981082, 0.507978, 0.0752734, 0.0377054, 0.938455, 0.525395, 0.140897, 0.355304, 0.662316, 0.599575, 0.802774, 0.553816, 0.591719, 0.410345, 0.745233, 0.722667, 0.509046, 0.908693, 0.850967, 0.0324016, 0.924513, 0.157167, 0.739413, 0.37459, 0.839959, 0.923021, 0.412234, 0.0345796, 0.903386, 0.524018, 0.844048, 0.742732, 0.0567757, 0.501753, 0.00601583, 0.672038, 0.575995, 0.514017, 0.0965867, 0.368068, 0.876781, 0.455834, 0.988867, 0.320823, 0.791238, 0.685618, 0.688263, 0.30172, 0.154157, 0.38967, 0.893093, 0.682824, 0.491578, 0.147365, 0.422887, 0.77626, 0.196574, 0.152135, 0.458944, 0.434116, 0.541536, 0.766819, 0.655184, 0.196817, 0.505151, 0.757189, 0.954487, 0.579187, 0.216767, 0.964215, 0.839978, 0.670988, 0.331055, 0.182331, 0.703817, 0.744852, 0.524631, 0.582248, 0.387029, 0.936402, 0.452148, 0.226594, 0.451548, 0.482084, 0.11335, 0.130897, 0.375126, 0.585692, 0.641658, 0.597116, 0.714599, 0.679359, 0.821281, 0.529628, 0.552795, 0.833701, 0.553135, 0.376353, 0.418704, 0.667053, 0.593472, 0.0308925, 0.825259, 0.48765, 0.134168, 0.44145, 0.959864, 0.0557883, 0.588004, 0.141181, 0.256054, 0.351418, 0.793767, 0.427475, 0.274212, 0.834324, 0.604863, 0.208494, 0.158566, 0.0249299, 0.657403, 0.737942, 0.533073, 0.942514, 0.474294, 0.213768, 0.930265, 0.840173, 0.347749, 0.885124, 0.751381, 0.553558, 0.306672, 0.634265, 0.0902744, 0.747288, 0.23759, 0.0710373, 0.047687, 0.158565, 0.691001, 0.651163, 0.841605, 0.509969, 0.177072, 0.368876, 0.254477, 0.0600976, 0.247018, 0.307013, 0.470346, 0.856977, 0.486844, 0.0455714, 0.684224, 0.0756216, 0.835829, 0.419441, 0.550001, 0.583555, 0.399605, 0.171784, 0.469342, 0.97022, 0.857533, 0.0112206, 0.467318, 0.48188, 0.492544, 0.86111, 0.177766, 0.268772, 0.506046, 0.763479, 0.333753, 0.484561, 0.88045, 0.738057, 0.766067, 0.776356, 0.00245242, 0.436079, 0.0633805, 0.415047, 0.424136, 0.460834, 0.285765, 0.242759, 0.948502, 0.476294, 0.306081, 0.432637, 0.242089, 0.492189, 0.751988, 0.732986, 0.25638, 0.298272, 0.499268, 0.794713, 0.594566, 0.789394, 0.205432, 0.887851, 0.380283, 0.351148, 0.277007, 0.173036, 0.518288, 0.599575, 0.549851, 0.0718886, 0.878768, 0.976984, 0.904287, 0.404998, 0.246444, 0.486814, 0.33542, 0.384795, 0.312766, 0.551052, 0.832833, 0.964851, 0.112455, 0.34335, 0.827286, 0.468636, 0.968977, 0.199412, 0.753429, 0.669971, 0.299026, 0.945928, 0.580359, 0.747843, 0.953723, 0.318124, 0.460227, 0.40709, 0.327655, 0.5047, 0.683002, 0.0211267, 0.503006, 0.917106, 0.391914, 0.998695, 0.720124, 0.186802, 0.188715, 0.447346, 0.961447, 0.966021, 0.595159, 0.0295533, 0.821429, 0.451973, 0.577396, 0.469745, 0.543003, 0.561706, 0.925863, 0.0838483, 0.288927, 0.948506, 0.481951, 0.214777, 0.692486, 0.23302, 0.645125, 0.698085, 0.3244, 0.00853607, 0.50614, 0.654027, 0.579495, 0.292898, 0.648634, 0.833606, 0.884226, 0.148754, 0.950711, 0.331601, 0.572725, 0.642462, 0.957883, 0.0358616, 0.855363, 0.350406, 0.546699, 0.67605, 0.925901, 0.872315, 0.816289, 0.159165, 0.856594, 0.349519, 0.422961, 0.358601, 0.361202, 0.0431684, 0.891921, 0.224032, 0.232555, 0.683654, 0.943001, 0.159989, 0.32933, 0.649271, 0.963157, 0.478735, 0.500318, 0.100967, 0.596231, 0.323051, 0.287573, 0.208628, 0.149283, 0.0828627, 0.730904, 0.802556, 0.693205, 0.617672, 0.895647, 0.127091, 0.957333, 0.0131387, 0.252134, 0.479621, 0.508372, 0.28919, 0.786126, 0.333306, 0.51313, 0.145698, 0.901917, 0.96923, 0.850668, 0.750444, 0.673972, 0.312499, 0.976766, 0.155382, 0.617215, 0.661184, 0.731335, 0.33314, 0.00343491, 0.196903, 0.672564, 0.339787, 0.433247, 0.308537, 0.491999, 0.50052, 0.0471497, 0.852182, 0.400892, 0.964917, 0.121747, 0.910889, 0.965982, 0.581256, 0.1072, 0.680748, 0.0555249, 0.448541, 0.660725, 0.700136, 0.823149, 0.541174, 0.597859, 0.160044, 0.727947, 0.735551, 0.617442, 0.970376, 0.99503, 0.780881, 0.295009, 0.376218, 0.805222, 0.645669, 0.11074, 0.953806, 0.0242301, 0.750982, 0.514704, 0.5657, 0.368459, 0.54815, 0.329717, 0.414719, 0.666947, 0.675185, 0.0672083, 0.659835, 0.877681, 0.974564, 0.329524, 0.923178, 0.515386, 0.441206, 0.725525, 0.250916, 0.682244, 0.170287, 0.403503, 0.308291, 0.395623, 0.767037, 0.0206959, 0.862882, 0.873123, 0.428151, 0.156571, 0.24359, 0.521552, 0.278707, 0.720586, 0.103661, 0.734499, 0.908199, 0.11442, 0.239555, 0.951843, 0.124057, 0.764645, 0.257158, 0.92787, 0.0870966, 0.403574, 0.562358, 0.996112, 0.910104, 0.297306, 0.45252, 0.292735, 0.510299, 0.830176, 0.12918, 0.259885, 0.524489, 0.672908, 0.093028, 0.242972, 0.166893, 0.273353, 0.599158, 0.356853, 0.732843, 0.103803, 0.895193, 0.357082, 0.0386378, 0.987084, 0.350177, 0.301136, 0.586194, 0.15287, 0.539825, 0.396232, 0.931566, 0.409499, 0.344459, 0.0367832, 0.560898, 0.890848, 0.482983, 0.77089, 0.487851, 0.699821, 0.877822, 0.80039, 0.751955, 0.739457, 0.640459, 0.869741, 0.945419, 0.931186, 0.441144, 0.364651, 0.879771, 0.202563, 0.500254, 0.928054, 0.991443, 0.783682, 0.451575, 0.507351, 0.714173, 0.818625, 0.60254, 0.317403, 0.3855, 0.336991, 0.00133882, 0.0361199, 0.186633, 0.107588, 0.314715, 0.29347, 0.888815, 0.531259, 0.838933, 0.948802, 0.875055, 0.612094, 0.326938, 0.413269, 0.11842, 0.0209559, 0.583947, 0.432853, 0.811403, 0.689931, 0.266728, 0.27297, 0.698449, 0.734146, 0.7829, 0.753814, 0.194505, 0.0899534, 0.656829, 0.0137207, 0.955975, 0.395685, 0.55713, 0.97066, 0.147817, 0.931832, 0.545807, 0.500586, 0.655721, 0.590013, 0.0331138, 0.345493, 0.828562, 0.856873, 0.0159198, 0.378047, 0.787885, 0.402999, 0.496351, 0.618542, 0.296037, 0.523252, 0.199806, 0.874143, 0.589831, 0.293539, 0.0993665, 0.694305, 0.403531, 0.932365, 0.0359973, 0.650053, 0.475801, 0.606347, 0.297803, 0.233503, 0.793415, 0.744092, 0.829684, 0.165114, 0.319328, 0.214302, 0.321992, 0.133982, 0.589505, 0.391096, 0.164082, 0.755471, 0.910588, 0.246127, 0.566981, 0.400291, 0.455368, 0.360917, 0.680342, 0.221744, 0.903978, 0.743636, 0.228905, 0.7694, 0.99255, 0.23331, 0.143171, 0.174363, 0.828049, 0.0932809, 0.214401, 0.759514, 0.334247, 0.868765, 0.682726, 0.202166, 0.0248352, 0.599662, 0.614277, 0.456066, 0.935424, 0.764551, 0.97608, 0.714571, 0.452008, 0.384825, 0.265579, 0.924438, 0.472544, 0.44634, 0.964771, 0.727533, 0.0392375, 0.803777, 0.506951, 0.0858723, 0.284566, 0.572916, 0.831223, 0.527367, 0.430055, 0.645944, 0.905373, 0.694997, 0.595465, 0.904301, 0.228418, 0.838301, 0.644333, 0.987351, 0.0155199, 0.266114, 0.595526, 0.740514, 0.730127, 0.511462, 0.147132, 0.723015, 0.861887, 0.135874, 0.517908, 0.49663, 0.428724, 0.345783, 0.41971, 0.5259, 0.31237, 0.636449, 0.0762187, 0.553017, 0.785511, 0.23417, 0.520302, 0.0106807, 0.818606, 0.411141, 0.11986, 0.489176, 0.10744, 0.0908673, 0.0190166, 0.28363, 0.531797, 0.915454, 0.401515, 0.869094, 0.142864, 0.857725, 0.286094, 0.155983, 0.544001, 0.097237, 0.804455, 0.451285, 0.638986, 0.229406, 0.911183, 0.127759, 0.100493, 0.923189, 0.439978, 0.218921, 0.902318, 0.872992, 0.0758135, 0.723589, 0.425318, 0.605319, 0.336365, 0.714893, 0.788778, 0.0648718, 0.505498, 0.922362, 0.733696, 0.159567, 0.0580725, 0.979141, 0.222945, 0.68021, 0.720481, 0.417248, 0.540786, 0.918592, 0.774705, 0.0538489, 0.480015, 0.0992691, 0.916992, 0.743738, 0.339782, 0.0836222, 0.0728764, 0.0105342, 0.676257, 0.825676, 0.768047, 0.255021, 0.965637, 0.0560355, 0.332061, 0.685223, 0.642247, 0.604461, 0.179018, 0.829109, 0.573856, 0.693878, 0.927343, 0.622609, 0.172197, 0.0989473, 0.140669, 0.767246, 0.463837, 0.893652, 0.763666, 0.959632, 0.0820774, 0.158949, 0.846587, 0.981103, 0.439481, 0.848754, 0.392567, 0.776011, 0.17427, 0.823469, 0.00488921, 0.637263, 0.398956, 0.418484, 0.280438, 0.262279, 0.703354, 0.464967, 0.00209161, 0.976941, 0.24688, 0.966388, 0.522709, 0.863451, 0.643606, 0.680348, 0.700682, 0.697095, 0.159629, 0.168901, 0.988826, 0.27654, 0.370214, 0.304424, 0.546366, 0.962145, 0.434981, 0.13419, 0.812772, 0.850664, 0.765539, 0.593282, 0.88781, 0.366307, 0.423486, 0.830649, 0.848593, 0.890174, 0.59606, 0.124577, 0.0116874, 0.0796542, 0.15658, 0.417686, 0.0698055, 0.653551, 0.644604, 0.620626, 0.920253, 0.450845, 0.435975, 0.0288537, 0.963141, 0.050949, 0.464791, 0.898443, 0.0748599, 0.0796603, 0.952003, 0.814862, 0.483537, 0.818661, 0.174355, 0.3693, 0.769965, 0.411787, 0.570768, 0.00840454, 0.264794, 0.606351, 0.512022, 0.306872, 0.971752, 0.817324, 0.202587, 0.242578, 0.607889, 0.616934, 0.120978, 0.74438, 0.210367, 0.067249, 0.324255, 0.241723, 0.522241, 0.926163, 0.743968, 0.461381, 0.00381675, 0.142315, 0.562517, 0.302568, 0.312824, 0.875651, 0.424954, 0.302876, 0.69249, 0.377015, 0.56414, 0.522892, 0.348428, 0.650261, 0.658012, 0.21629, 0.418835, 0.202507, 0.925388, 0.52659, 0.402961, 0.580876, 0.183709, 0.641399, 0.825876, 0.115844, 0.509208, 0.442995, 0.643152, 0.325416, 0.745719, 0.930997, 0.429934, 0.842501, 0.315051, 0.930339, 0.421552, 0.0406437, 0.194395, 0.979417, 0.196005, 0.459418, 0.837538, 0.892313, 0.558789, 0.259116, 0.511931, 0.988011, 0.925103, 0.629429, 0.923004, 0.225344, 0.188839, 0.135982, 0.199065, 0.836222, 0.610291, 0.188366, 0.280982, 0.412775, 0.486768, 0.521274, 0.447508, 0.674142, 0.766363, 0.84946, 0.138023, 0.894015, 0.551769, 0.459831, 0.629775, 0.967962, 0.166656, 0.11692, 0.230943, 0.914559, 0.253356, 0.257224, 0.0365, 0.242121, 0.758501, 0.775766, 0.331163, 0.107237, 0.439971, 0.521181, 0.486016, 0.689438, 0.940671, 0.343961, 0.636371, 0.857439, 0.708101, 0.0647187, 0.463431, 0.0744246, 0.740449, 0.183769, 0.140942, 0.166013, 0.559759, 0.550765, 0.82458, 0.553142, 0.672139, 0.429642, 0.25702, 0.678835, 0.809112, 0.689459, 0.412666, 0.133956, 0.724851, 0.407923, 0.216539, 0.576855, 0.820825, 0.349875, 0.518718, 0.271224, 0.231833, 0.757653, 0.0895289, 0.257958, 0.912803, 0.0139883, 0.344305, 0.100899, 0.695395, 0.961772, 0.314913, 0.532151, 0.0283038, 0.366278, 0.0283777, 0.272278, 0.103625, 0.964489, 0.238313, 0.269668, 0.632904, 0.29736, 0.368477, 0.106029, 0.650432, 0.364907, 0.586893, 0.242471, 0.118621, 0.604534, 0.660508, 0.439595, 0.847147, 0.347881, 0.763572, 0.368871, 0.898759, 0.318257, 0.812952, 0.670345, 0.432463, 0.0923226, 0.217449, 0.525968, 0.133824, 0.302355, 0.644307, 0.0201002, 0.706517, 0.036478, 0.482702, 0.157479, 0.530659, 0.162151, 0.956726, 0.909987, 0.256089, 0.543585, 0.754504, 0.4189, 0.616675, 0.382668, 0.445062, 0.550738, 0.44239, 0.440681, 0.782925, 0.673463, 0.721832, 0.608257, 0.75857, 0.828145, 0.326748, 0.456011, 0.445707, 0.891813, 0.892112, 0.843948, 0.324528, 0.337412, 0.696943, 0.0881224, 0.513478, 0.385583, 0.241667, 0.0386527, 0.801009, 0.740472, 0.579326, 0.0885084, 0.249975, 0.973349, 0.716268, 0.90273, 0.384742, 0.195964, 0.277899, 0.597481, 0.480783, 0.241156, 0.663277, 0.551371, 0.578432, 0.473187, 0.938191, 0.434135, 0.133075, 0.761996, 0.389936, 0.172719, 0.611525, 0.204329, 0.263872, 0.727619, 0.100638, 0.763588, 0.237667, 0.133217, 0.104672, 0.190663, 0.0767215, 0.201889, 0.751326, 0.997555, 0.715038, 0.713715, 0.376388, 0.987886, 0.0617841, 0.661246, 0.652291, 0.0923544, 0.0308808, 0.501141, 0.736117, 0.712257, 0.656811, 0.64766, 0.984208, 0.0851364, 0.806212, 0.498606, 0.802776, 0.734658, 0.475529, 0.16211, 0.660125, 0.800007, 0.391612, 0.597028, 0.512106, 0.245036, 0.118873, 0.615805, 0.986868, 0.054786, 0.0454046, 0.640423, 0.736197, 0.814998, 0.194092, 0.562153, 0.317739, 0.389143, 0.78137, 0.135982, 0.997584, 0.168038, 0.68237, 0.66544, 0.935733, 0.0448366, 0.807947, 0.548399, 0.188544, 0.596718, 0.499102, 0.725755, 0.517412, 0.415171, 0.799607, 0.91989, 0.00617531, 0.325688, 0.884327, 0.390274, 0.365613, 0.345935, 0.964806, 0.977122, 0.896192, 0.00709287, 0.128217, 0.553729, 0.869597, 0.426966, 0.42264, 0.899114, 0.248113, 0.327942, 0.0200032, 0.650956, 0.852009, 0.553268, 0.282942, 0.368654, 0.255745, 0.706839, 0.122648, 0.637827, 0.742952, 0.917863, 0.981202, 0.849317, 0.451939, 0.299898, 0.604877, 0.582485, 0.406244, 0.280508, 0.378976, 0.8962, 0.155116, 0.705341, 0.853393, 0.796647, 0.835826, 0.537941, 0.356798, 0.406637, 0.220824, 0.821627, 0.213582, 0.158235, 0.418182, 0.512412, 0.867661, 0.143291, 0.328164, 0.420325, 0.952297, 0.523191, 0.778869, 0.17798, 0.148116, 0.896001, 0.838819, 0.334348, 0.680348, 0.233557, 0.358931, 0.777211, 0.455152, 0.353307, 0.427389, 0.694479, 0.454388, 0.503183, 0.459568, 0.14898, 0.751231, 0.339429, 0.976507, 0.397279, 0.365305, 0.730641, 0.314821, 0.299901, 0.0442625, 0.530148, 0.124517, 0.36441, 0.711645, 0.150867, 0.119311, 0.803493, 0.577025, 0.154133, 0.855526, 0.167556, 0.0726803, 0.986192, 0.711038, 0.700279, 0.101081, 0.376973, 0.339022, 0.714614, 0.845482, 0.748709, 0.590311, 0.549802, 0.88379, 0.90665, 0.599165, 0.318602, 0.450632, 0.614344, 0.599932, 0.993133, 0.488163, 0.981342, 0.660076, 0.357435, 0.838049, 0.275362, 0.527635, 0.33595, 0.725304, 0.201363, 0.415586, 0.477069, 0.183765, 0.842161, 0.71911, 0.0501112, 0.140767, 0.0630424, 0.727841, 0.255242, 0.215203, 0.878294, 0.80831, 0.55843, 0.849316, 0.602507, 0.429003, 0.926976, 0.496692, 0.848701, 0.387426, 0.897372, 0.985553, 0.917961, 0.620809, 0.210813, 0.713325, 0.407355, 0.608745, 0.64716, 0.0910119, 0.663932, 0.22989, 0.39938, 0.888425, 0.249655, 0.62257, 0.16863, 0.467721, 0.254101, 0.246962, 0.483187, 0.145163, 0.762424, 0.678772, 0.581166, 0.497457, 0.807209, 0.618063, 0.928199, 0.405722, 0.0639033, 0.304547, 0.721251, 0.0532577, 0.312875, 0.0208994, 0.28299, 0.245854, 0.24187, 0.195145, 0.426554, 0.590713, 0.926482, 0.884256, 0.244717, 0.640514, 0.888139, 0.869076, 0.375902, 0.611445, 0.103326, 0.871283, 0.349523, 0.0435, 0.407227, 0.47896, 0.26037, 0.117671, 0.643218, 0.735592, 0.0501995, 0.0539877, 0.731132, 0.647197, 0.121368, 0.755002, 0.00942637, 0.239114, 0.305674, 0.732567, 0.10165, 0.584198, 0.81425, 0.0772014, 0.202098, 0.143637, 0.916334, 0.370711, 0.913067, 0.401011, 0.419061, 0.202868, 0.402323, 0.715266, 0.0427013, 0.101992, 0.757109, 0.472456, 0.655636, 0.512524, 0.149225, 0.45658, 0.662492, 0.772303, 0.439502, 0.892644, 0.557149, 0.370436, 0.685567, 0.0729638, 0.255043, 0.255935, 0.6723, 0.61798, 0.242036, 0.791978, 0.827763, 0.0960702, 0.891181, 0.0336213, 0.140845, 0.223115, 0.640207, 0.334803, 0.441755, 0.475937, 0.490464, 0.304141, 0.495447, 0.721345, 0.161497, 0.760148, 0.399392, 0.341925, 0.501023, 0.683928, 0.862598, 0.291447, 0.176142, 0.746436, 0.348092, 0.924732, 0.667015, 0.148705, 0.169056, 0.232397, 0.416639, 0.830783, 0.0892667, 0.39345, 0.911377, 0.392667, 0.48043, 0.888646, 0.740502, 0.20495, 0.691639, 0.236369, 0.837036, 0.699674, 0.704523, 0.766803, 0.112831, 0.235875, 0.237561, 0.379964, 0.72585, 0.18868, 0.559117, 0.652368, 0.0557769, 0.059707, 0.280615, 0.970609, 0.254852, 0.425911, 0.64626, 0.532695, 0.988222, 0.236712, 0.721678, 0.936648, 0.281807, 0.977457, 0.567822, 0.537675, 0.266395, 0.964645, 0.424837, 0.396668, 0.876003, 0.177933, 0.847145, 0.220712, 0.145708, 0.970514, 0.839388, 0.339196, 0.203088, 0.0440583, 0.0574227, 0.822016, 0.151891, 0.00814889, 0.143404, 0.500437, 0.884329, 0.727449, 0.84794, 0.507022, 0.0639527, 0.926338, 0.562486, 0.584309, 0.785913, 0.476427, 0.223219, 0.906634, 0.921264, 0.663747, 0.568191, 0.0982033, 0.300347, 0.570192, 0.160566, 0.386881, 0.693185, 0.799988, 0.314383, 0.193663, 0.0463569, 0.86968, 0.940207, 0.610665, 0.134416, 0.0176809, 0.297985, 0.731683, 0.302342, 0.513029, 0.998688, 0.790487, 0.0817636, 0.123725, 0.570267, 0.435789, 0.319516, 0.0293822, 0.805744, 0.805451, 0.279035, 0.220198, 0.106837, 0.509024, 0.641325, 0.366067, 0.552911, 0.467587, 0.832667, 0.386892, 0.0388026, 0.81917, 0.970622, 0.860976, 0.769095, 0.512708, 0.87943, 0.694737, 0.869173, 0.705601, 0.317065, 0.104031, 0.941113, 0.037946, 0.652652, 0.450996, 0.494027, 0.509294, 0.350889, 0.296385, 0.135312, 0.945387, 0.199201, 0.500907, 0.645031, 0.564879, 0.422849, 0.830481, 0.527812, 0.587382, 0.519882, 0.958029, 0.152792, 0.216143, 0.334917, 0.80129, 0.427097, 0.747404, 0.743926, 0.689253, 0.850445, 0.445943, 0.854219, 0.0215406, 0.153485, 0.327062, 0.123348, 0.023221, 0.410068, 0.276339, 0.544041, 0.760288, 0.56301, 0.365529, 0.236871, 0.352406, 0.0326252, 0.0420352, 0.254531, 0.250577, 0.395593, 0.569709, 0.79069, 0.593037, 0.877957, 0.674513, 0.673686, 0.105089, 0.885284, 0.997107, 0.1188, 0.442964, 0.284258, 0.570847, 0.347612, 0.0810332, 0.306508, 0.0429041, 0.556203, 0.0786733, 0.00551572, 0.683756, 0.129694, 0.615503, 0.877287, 0.815159, 0.22483, 0.156588, 0.0199058, 0.742073, 0.155857, 0.417716, 0.514476, 0.482413, 0.506929, 0.841076, 0.524109, 0.196613, 0.577095, 0.964816, 0.867181, 0.599843, 0.641219, 0.310672, 0.353755, 0.0254282, 0.55567, 0.143545, 0.730299, 0.118997, 0.0797341, 0.430796, 0.346511, 0.781069, 0.890301, 0.174282, 0.850414, 0.416127, 0.969729, 0.382448, 0.662187, 0.424187, 0.181018, 0.701277, 0.831499, 0.569624, 0.133955, 0.775872, 0.714001, 0.0324761, 0.51312, 0.994919, 0.0251474, 0.128787, 0.565303, 0.973401, 0.891416, 0.444574, 0.125922, 0.449134, 0.963217, 0.647622, 0.0650193, 0.885061, 0.00146885, 0.580925, 0.473301, 0.610966, 0.0525989, 0.435777, 0.0144924, 0.0593824, 0.0571613, 0.974562, 0.431726, 0.915335, 0.445885, 0.375113, 0.326323, 0.983566, 0.760216, 0.234949, 0.155379, 0.252256, 0.0425016, 0.183463, 0.0574654, 0.343602, 0.764554, 0.0194104, 0.680357, 0.471967, 0.482903, 0.470674, 0.925088, 0.552035, 0.865305, 0.411616, 0.654372, 0.0357468, 0.122863, 0.940946, 0.817483, 0.499309, 0.970009, 0.498721, 0.0211476, 0.796745, 0.225094, 0.5829, 0.455718, 0.168262, 0.0890872, 0.0821087, 0.190761, 0.961716, 0.169272, 0.984354, 0.627827, 0.832658, 0.915731, 0.0453027, 0.685062, 0.796526, 0.952043, 0.553551, 0.644408, 0.443399, 0.514475, 0.132787, 0.697997, 0.871876, 0.559355, 0.114595, 0.161937, 0.614778, 0.508195, 0.905827, 0.59435, 0.249659, 0.133382, 0.274239, 0.886799, 0.699829, 0.828538, 0.649996, 0.621314, 0.687555, 0.0425832, 0.321515, 0.582666, 0.0872077, 0.933264, 0.100484, 0.728426, 0.592195, 0.825828, 0.951506, 0.557535, 0.67833, 0.601763, 0.263594, 0.124196, 0.492818, 0.942032, 0.831677, 0.982675, 0.163366, 0.903401, 0.589087, 0.822523, 0.946225, 0.397165, 0.17114, 0.366518, 0.378886, 0.45086, 0.221762, 0.42617, 0.22721, 0.0202198, 0.42354, 0.815119, 0.980494, 0.226656, 0.565361, 0.72072, 0.213007, 0.850502, 0.302439, 0.0923598, 0.318314, 0.650614, 0.177618, 0.416822, 0.0751303, 0.624968, 0.836197, 0.337152, 0.381186, 0.544312, 0.650402, 0.270946, 0.432987, 0.0311186, 0.737032, 0.186079, 0.611923, 0.215221, 0.23069, 0.356222, 0.317295, 0.71885, 0.733882, 0.459614, 0.155991, 0.262909, 0.0556322, 0.538095, 0.24667, 0.0529361, 0.653603, 0.00330699, 0.73626, 0.113499, 0.977045, 0.433087, 0.380206, 0.507099, 0.604064, 0.210001, 0.189443, 0.606213, 0.70066, 0.421405, 0.668883, 0.050616, 0.174679, 0.216191, 0.429013, 0.534606, 0.323534, 0.374716, 0.575614, 0.66395, 0.277489, 0.593676, 0.985517, 0.901258, 0.965618, 0.860015, 0.696336, 0.0827566, 0.592141, 0.40765, 0.150809, 0.229297, 0.134465, 0.717002, 0.493493, 0.922087, 0.530128, 0.41164, 0.0119914, 0.827675, 0.473716, 0.312342, 0.783842, 0.0437826, 0.655025, 0.5513, 0.357709, 0.771197, 0.681697, 0.383887, 0.395078, 0.469344, 0.868204, 0.328039, 0.503278, 0.571864, 0.0658645, 0.647429, 0.143174, 0.904441, 0.303786, 0.563792, 0.522408, 0.683271, 0.880676, 0.0166372, 0.276394, 0.0119003, 0.915804, 0.0259784, 0.523413, 0.718726, 0.486712, 0.930253, 0.371854, 0.0817778, 0.599925, 0.517217, 0.645896, 0.68982, 0.0358662, 0.940957, 0.356864, 0.66361, 0.971261, 0.263755, 0.880846, 0.440412, 0.59978, 0.15946, 0.0938822, 0.227498, 0.897745, 0.55373, 0.00787538, 0.652575, 0.302825, 0.997172, 0.164793, 0.56798, 0.315879, 0.646925, 0.0511829, 0.0249178, 0.0895109, 0.986492, 0.312031, 0.462938, 0.794628, 0.068469, 0.0678267, 0.496929, 0.14935, 0.466244, 0.981542, 0.610265, 0.756993, 0.801997, 0.223039, 0.498384, 0.320208, 0.329293, 0.0501127, 0.989374, 0.990738, 0.961756, 0.491521, 0.565403, 0.15118, 0.107286, 0.342324, 0.0564451, 0.196986, 0.666347, 0.898007, 0.794515, 0.78884, 0.230405, 0.20826, 0.327789, 0.640502, 0.566369, 0.791109, 0.84558, 0.803727, 0.981758, 0.680637, 0.437065, 0.221462, 0.908891, 0.55648, 0.498584, 0.772053, 0.571191, 0.893482, 0.34016, 0.151153, 0.703093, 0.960266, 0.427182, 0.76267, 0.682852, 0.900989, 0.682592, 0.557239, 0.856957, 0.304229, 0.357378, 0.977871, 0.466437, 0.66609, 0.18864, 0.754117, 0.86586, 0.744536, 0.878788, 0.306162, 0.669649, 0.598818, 0.644598, 0.177399, 0.927432, 0.578985, 0.384865, 0.387055, 0.872127, 0.820513, 0.5167, 0.0404335, 0.251028, 0.864299, 0.854716, 0.00788787, 0.329696, 0.277862, 0.059871, 0.964623, 0.301036, 0.856873, 0.303553, 0.246653, 0.829699, 0.245794, 0.638091, 0.196928, 0.450233, 0.265502, 0.638673, 0.820313, 0.692505, 0.26187, 0.210528, 0.83633, 0.00635626, 0.614559, 0.104998, 0.707099, 0.956968, 0.223384, 0.846147, 0.711675, 0.472535, 0.521083, 0.544865, 0.980233, 0.27125, 0.949522, 0.778742, 0.355291, 0.826126, 0.731626, 0.179577, 0.568426, 0.542278, 0.104136, 0.289748, 0.243144, 0.629661, 0.0759973, 0.233128, 0.192321, 0.773358, 0.794866, 0.846724, 0.490382, 0.452209, 0.452119, 0.209811, 0.771172, 0.0137926, 0.581328, 0.29578, 0.261849, 0.458448, 0.164983, 0.146459, 0.929715, 0.645353, 0.917462, 0.907967, 0.966155, 0.824325, 0.0940256, 0.97221, 0.383456, 0.311891, 0.533091, 0.634659, 0.742608, 0.163834, 0.114541, 0.922736, 0.109495, 0.178339, 0.487904, 0.648954, 0.684241, 0.16625, 0.445562, 0.673903, 0.0859252, 0.126856, 0.387942, 0.169866, 0.629438, 0.506087, 0.759372, 0.902355, 0.229161, 0.996228, 0.477016, 0.938824, 0.884247, 0.168796, 0.227945, 0.6192, 0.58942, 0.373129, 0.937472, 0.811925, 0.446544, 0.239153, 0.715383, 0.106531, 0.0316509, 0.130184, 0.587686, 0.586905, 0.473046, 0.799028, 0.569113, 0.313323, 0.529295, 0.814431, 0.524405, 0.638863, 0.420881, 0.935429, 0.712566, 0.764537, 0.229777, 0.694943, 0.584713, 0.208398, 0.333168, 0.176028, 0.466462, 0.56498, 0.413803, 0.148963, 0.559727, 0.0554545, 0.128145, 0.242096, 0.234113, 0.042183, 0.323769, 0.442763, 0.610392, 0.101455, 0.219017, 0.325968, 0.699561, 0.931567, 0.653069, 0.275124, 0.395588, 0.226311, 0.798339, 0.108119, 0.832235, 0.527387, 0.0410892, 0.844508, 0.186494, 0.337552, 0.627381, 0.267372, 0.991461, 0.690213, 0.695331, 0.0153019, 0.159658, 0.649657, 0.187354, 0.924875, 0.260669, 0.474225, 0.50578, 0.613507, 0.440053, 0.903643, 0.028104, 0.701285, 0.392058, 0.830173, 0.0663342, 0.934034, 0.356377, 0.313686, 0.0937587, 0.866284, 0.927016, 0.23924, 0.983261, 0.384249, 0.914655, 0.756388, 0.490309, 0.792666, 0.173106, 0.652794, 0.900278, 0.166199, 0.366032, 0.993017, 0.201876, 0.838224, 0.431544, 0.186205, 0.990122, 0.227314, 0.32672, 0.843638, 0.587606, 0.994955, 0.673793, 0.939636, 0.34401, 0.0929562, 0.483755, 0.00794636, 0.781863, 0.67067, 0.710165, 0.252352, 0.671768, 0.67911, 0.669496, 0.362743, 0.699145, 0.394767, 0.392629, 0.683332, 0.579736, 0.502725, 0.112535, 0.649659, 0.0463693, 0.726066, 0.798391, 0.16099, 0.535843, 0.493985, 0.212056, 0.291682, 0.934227, 0.661133, 0.814381, 0.192099, 0.572097, 0.34012, 0.226153, 0.817926, 0.123818, 0.683802, 0.640872, 0.603235, 0.849095, 0.750859, 0.42374, 0.222286, 0.82559, 0.777879, 0.34939, 0.4112, 0.826705, 0.337027, 0.799973, 0.000958123, 0.0875518, 0.755571, 0.202961, 0.702712, 0.501555, 0.818176, 0.00330532, 0.818856, 0.71221, 0.000500498, 0.153684, 0.237695, 0.417713, 0.8748, 0.893179, 0.982045, 0.429539, 0.741997, 0.292911, 0.0634303, 0.312663, 0.208602, 0.954402, 0.352523, 0.044081, 0.550157, 0.214102, 0.847674, 0.289649, 0.513429, 0.450314, 0.819657, 0.212665, 0.853312, 0.991389, 0.312814, 0.200088, 0.603983, 0.413039, 0.00539337, 0.0391682, 0.0985879, 0.837159, 0.102358, 0.363972, 0.553923, 0.376628, 0.80532, 0.491601, 0.184874, 0.11025, 0.67703, 0.593797, 0.195557, 0.145756, 0.679163, 0.0324885, 0.265745, 0.33161, 0.873777, 0.634638, 0.908656, 0.107826, 0.502514, 0.279305, 0.163612, 0.807596, 0.939629, 0.699609, 0.484153, 0.700102, 0.117146, 0.215443, 0.483145, 0.13176, 0.134584, 0.913199, 0.599423, 0.272955, 0.192264, 0.00365402, 0.25263, 0.192026, 0.0405651, 0.485668, 0.407445, 0.728536, 0.924742, 0.673144, 0.837039, 0.185345, 0.971447, 0.714345, 0.526862, 0.589391, 0.287087, 0.175721, 0.658951, 0.810848, 0.839415, 0.873788, 0.794288, 0.66059, 0.0965063, 0.167245, 0.24648, 0.0280972, 0.958082, 0.359086, 0.783201, 0.92109, 0.0647022, 0.522174, 0.227242, 0.302585, 0.956363, 0.239163, 0.0169744, 0.706166, 0.552493, 0.251071, 0.281932, 0.74269, 0.0509778, 0.707718, 0.908914, 0.584765, 0.404017, 0.0703691, 0.783477, 0.588686, 0.490385, 0.480844, 0.601782, 0.873125, 0.278685, 0.894893, 0.436574, 0.933266, 0.0404084, 0.799115, 0.410105, 0.0453886, 0.787509, 0.71924, 0.79232, 0.75664, 0.910624, 0.496162, 0.160523, 0.28118, 0.715159, 0.737146, 0.777853, 0.0303673, 0.00722299, 0.635187, 0.0576644, 0.770801, 0.110177, 0.00520291, 0.718079, 0.991158, 0.80526, 0.505923, 0.0243759, 0.314903, 0.0646168, 0.391038, 0.832974, 0.988515, 0.819772, 0.977661, 0.600815, 0.136723, 0.360897, 0.627892, 0.540068, 0.484779, 0.488839, 0.203719, 0.0488199, 0.788927, 0.501189, 0.170689, 0.0660145, 0.355081, 0.428183, 0.935023, 0.728545, 0.820263, 0.747069, 0.0244261, 0.736778, 0.0585081, 0.774807, 0.628749, 0.282975, 0.646311, 0.70732, 0.833819, 0.769264, 0.0292635, 0.385005, 0.835433, 0.584842, 0.269172, 0.453312, 0.811241, 0.146775, 0.514722, 0.626373, 0.984302, 0.0626056, 0.71144, 0.382443, 0.672491, 0.231363, 0.8256, 0.616714, 0.123098, 0.543026, 0.295349, 0.490021, 0.162363, 0.989856, 0.20914, 0.554875, 0.0999906, 0.910785, 0.661969, 0.0957499, 0.88802, 0.604018, 0.538319, 0.957755, 0.509128, 0.391562, 0.0206262, 0.907441, 0.457181, 0.955013, 0.387873, 0.0210598, 0.367796, 0.0859406, 0.22748, 0.03433, 0.541917, 0.0211158, 0.409897, 0.893971, 0.636963, 0.139376, 0.716417, 0.655806, 0.266559, 0.915389, 0.465934, 0.850601, 0.679295, 0.325744, 0.513572, 0.0306958, 0.0691112, 0.0871462, 0.451225, 0.718832, 0.371649, 0.443805, 0.502832, 0.406052, 0.197183, 0.994581, 0.914941, 0.180228, 0.239949, 0.593192, 0.552682, 0.502817, 0.87574, 0.284447, 0.729085, 0.808843, 0.0950477, 0.0729399, 0.667365, 0.985147, 0.178874, 0.431809, 0.549626, 0.491808, 0.685666, 0.851442, 0.526548, 0.930647, 0.998229, 0.536009, 0.0224966, 0.00988495, 0.629436, 0.976495, 0.56915, 0.461985, 0.739192, 0.283142, 0.590759, 0.0472357, 0.915142, 0.955991, 0.394271, 0.88164, 0.469591, 0.402637, 0.275159, 0.553982, 0.994032, 0.437168, 0.556088, 0.11567, 0.029193, 0.211848, 0.893864, 0.2361, 0.051841, 0.747276, 0.629714, 0.309363, 0.311942, 0.125599, 0.618932, 0.129755, 0.712699, 0.00356724, 0.106508, 0.0318713, 0.293758, 0.66347, 0.0143746, 0.591908, 0.0909588, 0.475048, 0.580249, 0.0754684, 0.346492, 0.562383, 0.676824, 0.444289, 0.182256, 0.89727, 0.586247, 0.216869, 0.243678, 0.592218, 0.845122, 0.101391, 0.160053, 0.719847, 0.237375, 0.377315, 0.537735, 0.524195, 0.130184, 0.588906, 0.0480896, 0.727814, 0.403256, 0.831867, 0.293048, 0.953633, 0.230206, 0.085901, 0.5664, 0.977159, 0.401211, 0.966178, 0.445442, 0.528728, 0.372189, 0.207416, 0.441281, 0.205967, 0.999735, 0.653867, 0.316896, 0.0416405, 0.0594561, 0.469194, 0.453907, 0.273697, 0.648805, 0.613758, 0.758674, 0.617143, 0.00235473, 0.159211, 0.765624, 0.919078, 0.906094, 0.117163, 0.706089, 0.751481, 0.427165, 0.573756, 0.111476, 0.953272, 0.665605, 0.695696, 0.976775, 0.255332, 0.637538, 0.0171614, 0.623989, 0.0639173, 0.555162, 0.589097, 0.572766, 0.431985, 0.948581, 0.659237, 0.81728, 0.335381, 0.419219, 0.536905, 0.660527, 0.695107, 0.703172, 0.204621, 0.55642, 0.922037, 0.913286, 0.774999, 0.946345, 0.713038, 0.33237, 0.665116, 0.210062, 0.483028, 0.700388, 0.881672, 0.89731, 0.838172, 0.0173835, 0.475631, 0.987371, 0.821474, 0.0554212, 0.55502, 0.357483, 0.316623, 0.300882, 0.916118, 0.134941, 0.291192, 0.8647, 0.704981, 0.647032, 0.642376, 0.572168, 0.764463, 0.430775, 0.759481, 0.195913, 0.725846, 0.66451, 0.414738, 0.313332, 0.598017, 0.650904, 0.52075, 0.381366, 0.776094, 0.26672, 0.40296, 0.621293, 0.920891, 0.634812, 0.669575, 0.174638, 0.504139, 0.601727, 0.184741, 0.98621, 0.963394, 0.0736871, 0.497811, 0.568394, 0.990391, 0.990926, 0.255887, 0.681178, 0.963114, 0.672425, 0.293293, 0.90469, 0.658146, 0.883987, 0.938543, 0.357932, 0.350685, 0.634712, 0.803173, 0.933822, 0.561869, 0.078421, 0.03377, 0.699357, 0.268766, 0.569036, 0.100409, 0.751346, 0.863191, 0.812587, 0.939063, 0.935909, 0.356535, 0.402327, 0.448619, 0.0798998, 0.0874934, 0.850393, 0.8261, 0.845429, 0.0608912, 0.460046, 0.915542, 0.794026, 0.2375, 0.176328, 0.233985, 0.0459265, 0.0925049, 0.469532, 0.104287, 0.0382714, 0.584239, 0.275631, 0.931689, 0.66653, 0.445012, 0.506552, 0.674013, 0.442549, 0.579111, 0.727182, 0.857951, 0.433216, 0.667478, 0.888192, 0.253487, 0.907697, 0.063528, 0.627395, 0.0530657, 0.252441, 0.306642, 0.378893, 0.514284, 0.113086, 0.641139, 0.0640446, 0.076686, 0.490099, 0.140404, 0.929355, 0.493549, 0.0752527, 0.559863, 0.625959, 0.347811, 0.0744179, 0.563211, 0.573803, 0.0636248, 0.812252, 0.267392, 0.100076, 0.439025, 0.302034, 0.344197, 0.982206, 0.650226, 0.654336, 0.393512, 0.220125, 0.803531, 0.269609, 0.426412, 0.965719, 0.230667, 0.889539, 0.532781, 0.717546, 0.227704, 0.859822, 0.765987, 0.734943, 0.0360507, 0.54699, 0.735232, 0.865057, 0.859511, 0.376633, 0.882564, 0.579947, 0.113221, 0.83282, 0.541157, 0.847104, 0.993415, 0.659034, 0.622417, 0.17779, 0.413297, 0.301806, 0.358908, 0.725212, 0.865409, 0.693159, 0.532789, 0.648088, 0.83576, 0.574132, 0.83344, 0.608122, 0.0206343, 0.421513, 0.686792, 0.518635, 0.614134, 0.860307, 0.779351, 0.426095, 0.970546, 0.810036, 0.692905, 0.666609, 0.612682, 0.130808, 0.502312, 0.023076, 0.688183, 0.775546, 0.351484, 0.619863, 0.725958, 0.778766, 0.791133, 0.85453, 0.0677412, 0.755906, 0.72252, 0.577958, 0.140604, 0.567497, 0.407786, 0.769519, 0.86806, 0.191907, 0.233901, 0.06372, 0.154035, 0.256789, 0.469125, 0.798345, 0.622035, 0.386632, 0.0779187, 0.351657, 0.652488, 0.778141, 0.372804, 0.857006, 0.330288, 0.709349, 0.690749, 0.28821, 0.54631, 0.165476, 0.577188, 0.156165, 0.392205, 0.925488, 0.768479, 0.0520163, 0.104604, 0.0372363, 0.531063, 0.266405, 0.72102, 0.566195, 0.266847, 0.576198, 0.423682, 0.478424, 0.248557, 0.368749, 0.965115, 0.844862, 0.81823, 0.504955, 0.804212, 0.567169, 0.00756038, 0.608908, 0.930227, 0.314526, 0.62899, 0.58888, 0.360393, 0.98636, 0.635971, 0.274053, 0.942382, 0.854473, 0.963533, 0.692537, 0.799372, 0.481354, 0.518208, 0.695557, 0.582804, 0.541781, 0.281654, 0.643284, 0.476456, 0.0208474, 0.380957, 0.0174194, 0.807173, 0.505201, 0.188807, 0.73891, 0.603733, 0.817934, 0.154823, 0.418004, 0.089249, 0.940777, 0.862959, 0.226419, 0.678874, 0.309632, 0.753683, 0.825072, 0.401445, 0.277864, 0.992829, 0.135828, 0.118742, 0.875836, 0.909088, 0.769799, 0.548372, 0.111686, 0.887402, 0.411827, 0.37357, 0.37249, 0.535817, 0.542136, 0.306913, 0.00612676, 0.244442, 0.916463, 0.723786, 0.469681, 0.366887, 0.218949, 0.993562, 0.10128, 0.855725, 0.955623, 0.744774, 0.159688, 0.351155, 0.695517, 0.129436, 0.55007, 0.445845, 0.732346, 0.30669, 0.418204, 0.622782, 0.943753, 0.80241, 0.234182, 0.853018, 0.95815, 0.0599405, 0.540154, 0.011261, 0.670714, 0.720759, 0.384776, 0.406652, 0.776178, 0.663601, 0.629138, 0.796056, 0.0846683, 0.560962, 0.104969, 0.286167, 0.379489, 0.135788, 0.980782, 0.217574, 0.361004, 0.601871, 0.364071, 0.662785, 0.283351, 0.625509, 0.907533, 0.230547, 0.105341, 0.187095, 0.659358, 0.542944, 0.914125, 0.593741, 0.00460789, 0.536677, 0.617605, 0.674914, 0.169949, 0.0447656, 0.706795, 0.831657, 0.959785, 0.841278, 0.188848, 0.568619, 0.140764, 0.148216, 0.868931, 0.516512, 0.738313, 0.693395, 0.594614, 0.287306, 0.938827, 0.708445, 0.854706, 0.752348, 0.799616, 0.0257541, 0.383012, 0.869477, 0.0234215, 0.476798, 0.586688, 0.904846, 0.923289, 0.263486, 0.459633, 0.978429, 0.688351, 0.0709777, 0.841215, 0.735626, 0.178871, 0.315524, 0.250976, 0.489735, 0.158802, 0.674123, 0.550604, 0.0189668, 0.564541, 0.797432, 0.18335, 0.905517, 0.45373, 0.581615, 0.87558, 0.972685, 0.267873, 0.393001, 0.119467, 0.0286767, 0.183137, 0.582576, 0.705721, 0.375103, 0.950031, 0.814518, 0.466996, 0.890245, 0.211236, 0.0253737, 0.253186, 0.382147, 0.565145, 0.531588, 0.491809, 0.481327, 0.0730178, 0.0979293, 0.737724, 0.538231, 0.268321, 0.383432, 0.078791, 0.515034, 0.244902, 0.676438, 0.34142, 0.981412, 0.0728127, 0.950487, 0.818435, 0.529703, 0.246925, 0.554012, 0.0694732, 0.834821, 0.824937, 0.296421, 0.747051, 0.690936, 0.306165, 0.948284, 0.0515413, 0.171419, 0.201856, 0.0509489, 0.0628976, 0.614766, 0.0308054, 0.49657, 0.686851, 0.653665, 0.186123, 0.805688, 0.33959, 0.445287, 0.142232, 0.371605, 0.681636, 0.441053, 0.904414, 0.410428, 0.47256, 0.162206, 0.142547, 0.891477, 0.126649, 0.00963837, 0.902017, 0.840981, 0.958291, 0.850666, 0.246335, 0.650503, 0.650334, 0.698674, 0.326043, 0.267565, 0.141239, 0.43157, 0.612756, 0.943698, 0.803956, 0.194803, 0.895302, 0.266704, 0.112431, 0.203755, 0.051201, 0.379072, 0.0170236, 0.596579, 0.328785, 0.768887, 0.972819, 0.974327, 0.188665, 0.008891, 0.860699, 0.422386, 0.531748, 0.027172, 0.213328, 0.325634, 0.994365, 0.199092, 0.678904, 0.623296, 0.497055, 0.0492969, 0.326377, 0.156372, 0.648821, 0.498767, 0.767369, 0.951213, 0.557371, 0.448982, 0.562124, 0.352221, 0.471582, 0.186278, 0.491317, 0.41716, 0.677821, 0.385444, 0.63037, 0.296433, 0.0899407, 0.0487928, 0.578887, 0.815803, 0.689649, 0.261851, 0.252038, 0.37926, 0.667175, 0.732286, 0.00749894, 0.012018, 0.728976, 0.340358, 0.726787, 0.0126228, 0.0844123, 0.300078, 0.619279, 0.900787, 0.800688, 0.558176, 0.937416, 0.732996, 0.743198, 0.447094, 0.866121, 0.477469, 0.467666, 0.92351, 0.572643, 0.773355, 0.858272, 0.913547, 0.00876636, 0.497344, 0.937264, 0.960613, 0.873036, 0.308764, 0.170865, 0.865535, 0.495567, 0.134872, 0.778007, 0.499843, 0.0313207, 0.456921, 0.323303, 0.504609, 0.352537, 0.198514, 0.620026, 0.0658834, 0.207019, 0.685177, 0.100808, 0.457405, 0.0977736, 0.585276, 0.641454, 0.104167, 0.635023, 0.725897, 0.51809, 0.786746, 0.448747, 0.376423, 0.673121, 0.0684879, 0.923983, 0.0449011, 0.537136, 0.114841, 0.549288, 0.750526, 0.11157, 0.448892, 0.953918, 0.205836, 0.987109, 0.390195, 0.402934, 0.29101, 0.145598, 0.0904446, 0.508479, 0.899575, 0.335628, 0.205328, 0.612887, 0.838188, 0.724113, 0.418279, 0.355856, 0.601943, 0.626041, 0.341544, 0.42137, 0.720236, 0.026311, 0.768529, 0.562989, 0.80661, 0.240361, 0.904136, 0.657071, 0.800744, 0.198787, 0.472487, 0.530819, 0.120024, 0.87302, 0.186508, 0.389849, 0.0884889, 0.468343, 0.270116, 0.0577192, 0.362591, 0.154844, 0.873442, 0.861372, 0.684509, 0.887037, 0.560456, 0.481647, 0.946096, 0.327903, 0.625961, 0.221942, 0.643654, 0.952271, 0.355433, 0.621739, 0.667939, 0.951968, 0.740857, 0.364482, 0.387577, 0.316803, 0.832348, 0.396577, 0.976845, 0.562211, 0.945433, 0.182286, 0.187513, 0.43601, 0.178474, 0.286544, 0.316904, 0.963928, 0.790494, 0.509314, 0.372089, 0.781143, 0.58786, 0.717809, 0.870357, 0.560268, 0.792536, 0.0338459, 0.0956249, 0.769001, 0.169085, 0.906055, 0.867625, 0.726361, 0.926679, 0.751265, 0.720305, 0.0716507, 0.425972, 0.0601926, 0.0424241, 0.0579652, 0.0815104, 0.454462, 0.697332, 0.69622, 0.382199, 0.382578, 0.510138, 0.295342, 0.657292, 0.558957, 0.279807, 0.70899, 0.894805, 0.355121, 0.0950023, 0.917664, 0.112727, 0.305631, 0.495151, 0.807186, 0.984091, 0.298012, 0.364434, 0.230468, 0.312518, 0.491191, 0.351394, 0.551979, 0.394254, 0.0731474, 0.395123, 0.571293, 0.330406, 0.0159654, 0.378904, 0.130546, 0.521402, 0.796156, 0.640325, 0.300186, 0.764442, 0.911224, 0.676175, 0.913732, 0.509067, 0.0551023, 0.937267, 0.174653, 0.435427, 0.65437, 0.135888, 0.989697, 0.787868, 0.821589, 0.488962, 0.0703476, 0.611484, 0.711801, 0.481169, 0.319423, 0.734781, 0.505285, 0.424958, 0.956943, 0.40072, 0.576567, 0.0459604, 0.0758141, 0.892585, 0.495602, 0.505073, 0.601984, 0.0296972, 0.801903, 0.424139, 0.617366, 0.954686, 0.191347, 0.768388, 0.36253, 0.65362, 0.765267, 0.979896, 0.500005, 0.232125, 0.541841, 0.362593, 0.154752, 0.327212, 0.84854, 0.170198, 0.92936, 0.0235973, 0.84099, 0.786756, 0.666204, 0.8138, 0.384289, 0.642254, 0.141085, 0.223595, 0.309771, 0.0179205, 0.607475, 0.355032, 0.209158, 0.242304, 0.0504485, 0.756668, 0.713193, 0.70217, 0.497415, 0.0634329, 0.120869, 0.880211, 0.119807, 0.878749, 0.685392, 0.650712, 0.0388514, 0.324076, 0.790914, 0.895817, 0.354683, 0.058055, 0.555214, 0.786784, 0.978338, 0.95984, 0.201696, 0.480538, 0.697051, 0.403472, 0.941783, 0.347462, 0.897183, 0.937269, 0.446034, 0.203126, 0.0735603, 0.842093, 0.270609, 0.532021, 0.110406, 0.439537, 0.986692, 0.762553, 0.0800624, 0.162521, 0.0787149, 0.688771, 0.882489, 0.687954, 0.418764, 0.289934, 0.727295, 0.0213109, 0.110143, 0.207256, 0.601022, 0.374144, 0.382161, 0.321523, 0.427436, 0.15197, 0.298821, 0.650907, 0.583441, 0.210527, 0.53983, 0.657975, 0.440817, 0.410976, 0.428073, 0.0240063, 0.135289, 0.984294, 0.859798, 0.701082, 0.285167, 0.481218, 0.468787, 0.557557, 0.419577, 0.423646, 0.406614, 0.794404, 0.630884, 0.418622, 0.21816, 0.744289, 0.0166752, 0.633789, 0.394236, 0.880665, 0.378953, 0.153176, 0.482406, 0.171829, 0.156358, 0.387583, 0.0319509, 0.212256, 0.904435, 0.907839, 0.831724, 0.480049, 0.451513, 0.628216, 0.46601, 0.401016, 0.580009, 0.544344, 0.925404, 0.451591, 0.350011, 0.0560693, 0.551604, 0.153178, 0.670187, 0.672673, 0.26853, 0.508881, 0.583921, 0.353768, 0.433873, 0.694867, 0.625415, 0.92178, 0.348364, 0.15958, 0.512416, 0.078001, 0.602934, 0.494176, 0.968622, 0.69839, 0.196948, 0.912552, 0.770323, 0.556439, 0.951618, 0.102979, 0.12159, 0.927202, 0.800828, 0.866306, 0.657199, 0.555737, 0.149462, 0.680874, 0.103636, 0.169904, 0.0268314, 0.414021, 0.436971, 0.227315, 0.16451, 0.605249, 0.88505, 0.540535, 0.263318, 0.377439, 0.180303, 0.295118, 0.518736, 0.711831, 0.744802, 0.202304, 0.068588, 0.981359, 0.49585, 0.233852, 0.769629, 0.633789, 0.859414, 0.510856, 0.571438, 0.788673, 0.339717, 0.820124, 0.584874, 0.535427, 0.489859, 0.789376, 0.00524708, 0.162477, 0.630305, 0.0310249, 0.958694, 0.403822, 0.580953, 0.652561, 0.861327, 0.206028, 0.995499, 0.471084, 0.850632, 0.529751, 0.167537, 0.212884, 0.8901, 0.472001, 0.922586, 0.654296, 0.637635, 0.26019, 0.824502, 0.720869, 0.167189, 0.385145, 0.521459, 0.824526, 0.941269, 0.783275, 0.940337, 0.833575, 0.485754, 0.497875, 0.522086, 0.396419, 0.506003, 0.223568, 0.378713, 0.524771, 0.627774, 0.379961, 0.0113596, 0.90562, 0.262278, 0.440791, 0.122275, 0.277647, 0.728027, 0.777318, 0.142085, 0.735699, 0.200232, 0.216511, 0.342314, 0.77208, 0.969821, 0.24888, 0.828207, 0.651368, 0.13482, 0.479871, 0.430622, 0.974602, 0.967598, 0.511278, 0.478787, 0.953568, 0.228034, 0.938029, 0.574486, 0.576503, 0.367655, 0.688902, 0.240765, 0.755151, 0.511214, 0.413689, 0.874389, 0.766239, 0.733195, 0.528584, 0.809898, 0.385999, 0.434193, 0.39561, 0.810905, 0.928129, 0.415032, 0.028492, 0.428988, 0.1721, 0.850928, 0.464735, 0.582068, 0.735104, 0.822143, 0.377147, 0.190491, 0.669161, 0.80899, 0.64389, 0.0351567, 0.354281, 0.581205, 0.791919, 0.502238, 0.952955, 0.40031, 0.535811, 0.930929, 0.512594, 0.084743, 0.453881, 0.425436, 0.286444, 0.977658, 0.684313, 0.160129, 0.182474, 0.235804, 0.828497, 0.836865, 0.660182, 0.818085, 0.861015, 0.500214, 0.0116115, 0.208374, 0.0988077, 0.357043, 0.538201, 0.806053, 0.652063, 0.806284, 0.896305, 0.774853, 0.328033, 0.84799, 0.0949448, 0.450385, 0.2535, 0.347378, 0.648877, 0.573409, 0.727559, 0.0924425, 0.701239, 0.138866, 0.106181, 0.4891, 0.616478, 0.572372, 0.742498, 0.844443, 0.0704961, 0.74343, 0.6042, 0.030248, 0.708378, 0.690297, 0.247331, 0.206112, 0.381753, 0.135333, 0.805018, 0.0659857, 0.36753, 0.789684, 0.19796, 0.553318, 0.440364, 0.51726, 0.0164073, 0.957443, 0.573192, 0.801326, 0.60658, 0.105812, 0.523577, 0.557767, 0.667081, 0.0984848, 0.699305, 0.0611913, 0.482328, 0.860499, 0.132132, 0.951346, 0.113375, 0.308959, 0.368957, 0.43369, 0.349244, 0.867766, 0.700065, 0.89433, 0.441894, 0.938852, 0.693054, 0.289863, 0.924269, 0.385195, 0.239487, 0.153881, 0.735591, 0.526503, 0.180008, 0.393228, 0.972246, 0.980854, 0.381101, 0.739409, 0.189131, 0.918227, 0.112858, 0.313625, 0.429922, 0.851084, 0.93912, 0.200137, 0.0985903, 0.595777, 0.591349, 0.207404, 0.459318, 0.647488, 0.419908, 0.820103, 0.105258, 0.795565, 0.225743, 0.816474, 0.621592, 0.275532, 0.0624432, 0.207009, 0.677055, 0.124676, 0.769083, 0.881069, 0.776066, 0.83893, 0.664457, 0.298255, 0.714483, 0.223298, 0.907889, 0.883852, 0.36537, 0.0423008, 0.321646, 0.386671, 0.89863, 0.354835, 0.117708, 0.784395, 0.141213, 0.489182, 0.927361, 0.662235, 0.724094, 0.947106, 0.461434, 0.379633, 0.675269, 0.895639, 0.443398, 0.872905, 0.7078, 0.602856, 0.615553, 0.828447, 0.983407, 0.867956, 0.960234, 0.854225, 0.95768, 0.846006, 0.993106, 0.652087, 0.80718, 0.387303, 0.568787, 0.960863, 0.527272, 0.977914, 0.558636, 0.517497, 0.289314, 0.400167, 0.955215, 0.967584, 0.890482, 0.761053, 0.644388, 0.0896281, 0.946905, 0.44007, 0.401923, 0.632901, 0.875976, 0.82693, 0.747322, 0.94296, 0.584106, 0.734379, 0.859386, 0.348528, 0.955978, 0.974558, 0.858937, 0.590861, 0.0371461, 0.322847, 0.21362, 0.0409147, 0.159066, 0.63671, 0.979201, 0.587865, 0.901966, 0.559122, 0.230168, 0.803065, 0.364141, 0.255879, 0.538784, 0.923088, 0.368559, 0.161817, 0.699224, 0.936226, 0.886078, 0.404526, 0.994803, 0.359979, 0.690947, 0.387693, 0.212088, 0.0593306, 0.0811423, 0.00384553, 0.130928, 0.949701, 0.983692, 0.959441, 0.232713, 0.422024, 0.0384694, 0.202225, 0.725466, 0.0943787, 0.927419, 0.366823, 0.0977647, 0.949396, 0.067692, 0.694956, 0.665991, 0.481763, 0.387406, 0.331822, 0.457642, 0.795502, 0.604977, 0.510923, 0.425644, 0.623727, 0.529629, 0.547543, 0.331643, 0.818541, 0.441864, 0.271722, 0.653498, 0.149501, 0.639891, 0.057536, 0.276263, 0.387051, 0.109804, 0.994598, 0.546664, 0.931055, 0.516266, 0.647591, 0.59613, 0.828497, 0.86839, 0.953553, 0.991558, 0.486381, 0.681546, 0.547796, 0.34338, 0.85507, 0.272332, 0.184842, 0.375237, 0.658012, 0.268606, 0.299662, 0.54102, 0.0892637, 0.681033, 0.563877, 0.604934, 0.272235, 0.225994, 0.396576, 0.50135, 0.213292, 0.151713, 0.169749, 0.182846, 0.0746022, 0.0525105, 0.512076, 0.350676, 0.386923, 0.0716417, 0.706776, 0.531437, 0.733413, 0.601435, 0.543231, 0.193002, 0.839681, 0.363113, 0.138547, 0.525965, 0.0105383, 0.415747, 0.562991, 0.0844708, 0.723957, 0.0968893, 0.142935, 0.146374, 0.638781, 0.103927, 0.537742, 0.400325, 0.427032, 0.435209, 0.918748, 0.638748, 0.554367, 0.661079, 0.66048, 0.963742, 0.920779, 0.284612, 0.0524512, 0.962995, 0.298949, 0.659297, 0.783819, 0.138867, 0.536478, 0.680169, 0.449397, 0.200154, 0.638287, 0.0127073, 0.689898, 0.86008, 0.488335, 0.116243, 0.794503, 0.910228, 0.591959, 0.426112, 0.141723, 0.203937, 0.843474, 0.0230569, 0.570641, 0.84311, 0.808082, 0.236676, 0.2775, 0.179525, 0.840489, 0.879903, 0.30792, 0.28674, 0.366845, 0.48468, 0.547914, 0.463143, 0.0684909, 0.936271, 0.770543, 0.624828, 0.424174, 0.792742, 0.507997, 0.331753, 0.39199, 0.763454, 0.0202253, 0.542059, 0.19315, 0.71053, 0.0529077, 0.27654, 0.167861, 0.901709, 0.909553, 0.663971, 0.417682, 0.218154, 0.768768, 0.0387871, 0.981962, 0.0566515, 0.668589, 0.396987, 0.11578, 0.0283513, 0.950349, 0.204337, 0.323745, 0.184177, 0.624042, 0.770279, 0.00786911, 0.95886, 0.570679, 0.785266, 0.425791, 0.710022, 0.0491318, 0.550038, 0.797806, 0.411732, 0.0264604, 0.487609, 0.412082, 0.903675, 0.93333, 0.280415, 0.554696, 0.733814, 0.1412, 0.0161727, 0.228676, 0.629394, 0.227567, 0.93656, 0.462834, 0.276516, 0.529687, 0.737354, 0.560569, 0.894543, 0.202988, 0.999051, 0.60471, 0.0443098, 0.112014, 0.660049, 0.284839, 0.505573, 0.644383, 0.332208, 0.29167, 0.000343225, 0.268097, 0.852227, 0.599293, 0.36666, 0.103163, 0.448847, 0.130198, 0.426567, 0.43813, 0.770131, 0.453424, 0.626394, 0.799118, 0.410953, 0.713702, 0.10277, 0.680933, 0.739307, 0.589131, 0.896462, 0.885072, 0.0622041, 0.0877848, 0.45064, 0.316362, 0.240547, 0.601225, 0.0695273, 0.578184, 0.729456, 0.401378, 0.0416692, 0.197792, 0.0641177, 0.785206, 0.571652, 0.879141, 0.880788, 0.539646, 0.555385, 0.382785, 0.718479, 0.0366768, 0.572852, 0.590954, 0.118257, 0.602162, 0.0795446, 0.835167, 0.75432, 0.898464, 0.658787, 0.450769, 0.940301, 0.90206, 0.563925, 0.134816, 0.0533965, 0.468155, 0.370117, 0.313159, 0.379347, 0.453397, 0.664017, 0.106684, 0.402152, 0.995593, 0.238902, 0.235804, 0.499983, 0.597113, 0.500353, 0.67415, 0.455596, 0.723423, 0.0586574, 0.435994, 0.342112, 0.611722, 0.24995, 0.568201, 0.773938, 0.791357, 0.470812, 0.692584, 0.697293, 0.701657, 0.971693, 0.300333, 0.211367, 0.184905, 0.915232, 0.424476, 0.239299, 0.621922, 0.801317, 0.615525, 0.659105, 0.177266, 0.314794, 0.348309, 0.984988, 0.844802, 0.0415893, 0.0307191, 0.109317, 0.456472, 0.0414806, 0.00614883, 0.996687, 0.280178, 0.217272, 0.375418, 0.657176, 0.952215, 0.514172, 0.473665, 0.559307, 0.665895, 0.762992, 0.371533, 0.404242, 0.574514, 0.349613, 0.259723, 0.682176, 0.325401, 0.913383, 0.752729, 0.956178, 0.0677681, 0.486769, 0.503736, 0.67392, 0.893188, 0.322593, 0.22014, 0.579357, 0.926983, 0.308941, 0.974191, 0.779624, 0.718414, 0.715724, 0.917279, 0.862888, 0.354193, 0.0711811, 0.107897, 0.94193, 0.408568, 0.890099, 0.829855, 0.881492, 0.22875, 0.0127978, 0.011247, 0.519295, 0.583926, 0.6766, 0.234748, 0.7012, 0.511272, 0.465539, 0.540992, 0.858866, 0.235691, 0.948146, 0.611724, 0.63148, 0.80454, 0.569192, 0.549704, 0.677574, 0.988443, 0.712659, 0.425419, 0.514053, 0.792905, 0.715539, 0.798611, 0.567692, 0.241777, 0.852295, 0.913802, 0.694366, 0.470208, 0.152784, 0.757416, 0.362365, 0.850051, 0.00264538, 0.460383, 0.213732, 0.531885, 0.433104, 0.0523126, 0.318962, 0.903564, 0.213849, 0.708347, 0.505023, 0.208189, 0.959419, 0.86761, 0.292633, 0.32004, 0.214158, 0.687243, 0.375985, 0.778947, 0.633588, 0.860767, 0.437353, 0.806654, 0.753497, 0.257359, 0.153327, 0.79372, 0.511514, 0.726074, 0.778678, 0.640024, 0.0935934, 0.612531, 0.845934, 0.856668, 0.159275, 0.0983851, 0.0338585, 0.0816387, 0.838208, 0.872847, 0.111055, 0.357432, 0.84666, 0.940788, 0.620127, 0.560973, 0.868608, 0.717113, 0.692519, 0.314356, 0.857225, 0.900682, 0.0181035, 0.00961911, 0.00612036, 0.825358, 0.725927, 0.62857, 0.168422, 0.898292, 0.747193, 0.0170086, 0.59325, 0.784296, 0.556686, 0.901377, 0.147478, 0.223836, 0.0801547, 0.869065, 0.429919, 0.106709, 0.762799, 0.985216, 0.927093, 0.559394, 0.420103, 0.231195, 0.193386, 0.150465, 0.162097, 0.353569, 0.171717, 0.77536, 0.655196, 0.144188, 0.757612, 0.673119, 0.809927, 0.505693, 0.518122, 0.16988, 0.103265, 0.917911, 0.672638, 0.278119, 0.33887, 0.672864, 0.167051, 0.917979, 0.946587, 0.151668, 0.0545913, 0.385106, 0.389816, 0.700828, 0.839174, 0.0119766, 0.0238333, 0.669581, 0.0673994, 0.565316, 0.561096, 0.577632, 0.169086, 0.804048, 0.761263, 0.981231, 0.979064, 0.152146, 0.503983, 0.957075, 0.232238, 0.693598, 0.312898, 0.782981, 0.984722, 0.75545, 0.460171, 0.0354336, 0.52951, 0.43004, 0.171406, 0.532135, 0.9776, 0.0985025, 0.898747, 0.72394, 0.726622, 0.670149, 0.130097, 0.406665, 0.420195, 0.523834, 0.809445, 0.700112, 0.822798, 0.816938, 0.978374, 0.542424, 0.12232, 0.645704, 0.531084, 0.789407, 0.639392, 0.294385, 0.370609, 0.736607, 0.450306, 0.745892, 0.466696, 0.0479832, 0.931319, 0.471393, 0.378464, 0.685517, 0.385303, 0.0793513, 0.65543, 0.141782, 0.883142, 0.298444, 0.999721, 0.710185, 0.779161, 0.343306, 0.754517, 0.746347, 0.768358, 0.556462, 0.298095, 0.0073683, 0.949943, 0.967652, 0.227505, 0.629643, 0.0353548, 0.959066, 0.612231, 0.501148, 0.828908, 0.245115, 0.884333, 0.712314, 0.95553, 0.326572, 0.984838, 0.966299, 0.127249, 0.885119, 0.410498, 0.203113, 0.927698, 0.161315, 0.768913, 0.999001, 0.71119, 0.832512, 0.950797, 0.730997, 0.87339, 0.754119, 0.379897, 0.155136, 0.995455, 0.960437, 0.521546, 0.325177, 0.975384, 0.864303, 0.774282, 0.470459, 0.854358, 0.213583, 0.149578, 0.0029431, 0.645778, 0.157656, 0.31494, 0.675235, 0.798073, 0.883995, 0.826306, 0.382837, 0.00961885, 0.948502, 0.715269, 0.507345, 0.316638, 0.123658, 0.0380885, 0.0760055, 0.244934, 0.844188, 0.97335, 0.369245, 0.746758, 0.63331, 0.0253718, 0.214573, 0.820219, 0.191801, 0.0849191, 0.811435, 0.979755, 0.491341, 0.047972, 0.654638, 0.875591, 0.458108, 0.127132, 0.741734, 0.503403, 0.619794, 0.52892, 0.400461, 0.567607, 0.225805, 0.972737, 0.818295, 0.892317, 0.779674, 0.737902, 0.857006, 0.23724, 0.754698, 0.839711, 0.804661, 0.14683, 0.890841, 0.952665, 0.684101, 0.692381, 0.77351, 0.362895, 0.797268, 0.365331, 0.564682, 0.512197, 0.231953, 0.724453, 0.739392, 0.953193, 0.352544, 0.873279, 0.108286, 0.574154, 0.124608, 0.297452, 0.343713, 0.705411, 0.471638, 0.568462, 0.0870916, 0.95709, 0.705516, 0.947715, 0.28019, 0.348241, 0.89545, 0.166359, 0.927557, 0.090227, 0.535189, 0.29296, 0.383784, 0.363511, 0.385727, 0.624506, 0.208662, 0.517785, 0.9226, 0.627992, 0.756734, 0.71849, 0.904747, 0.474553, 0.104712, 0.466847, 0.845483, 0.23704, 0.875972, 0.795674, 0.813228, 0.561381, 0.217489, 0.379153, 0.9217, 0.120657, 0.212865, 0.339643, 0.402998, 0.337485, 0.837689, 0.71588, 0.820437, 0.137161, 0.734836, 0.43384, 0.390606, 0.76863, 0.902485, 0.653448, 0.586886, 0.11454, 0.237244, 0.491758, 0.285477, 0.473126, 0.492431, 0.875863, 0.222312, 0.214462, 0.53967, 0.572137, 0.456285, 0.791134, 0.700224, 0.0943796, 0.485968, 0.820876, 0.71187, 0.87956, 0.322256, 0.289008, 0.642231, 0.562762, 0.590441, 0.844272, 0.533532, 0.431432, 0.84989, 0.525721, 0.621898, 0.954284, 0.822625, 0.185374, 0.818666, 0.213296, 0.461567, 0.181689, 0.473609, 0.431153, 0.573114, 0.0406941, 0.582626, 0.905356, 0.930616, 0.954859, 0.983171, 0.596248, 0.4527, 0.47677, 0.924188, 0.98532, 0.695984, 0.476758, 0.386629, 0.0126092, 0.146338, 0.565628, 0.578453, 0.289324, 0.30088, 0.539916, 0.93363, 0.0704658, 0.109648, 0.214744, 0.595283, 0.33294, 0.950965, 0.929428, 0.813884, 0.618206, 0.864781, 0.18343, 0.14465, 0.963859, 0.957667, 0.492841, 0.798349, 0.586916, 0.0313671, 0.265043, 0.89578, 0.173169, 0.285286, 0.173418, 0.156524, 0.688003, 0.572639, 0.106593, 0.731564, 0.421688, 0.449339, 0.523175, 0.62286, 0.185018, 0.652623, 0.547864, 0.357667, 0.84197, 0.423256, 0.953343, 0.106257, 0.0739082, 0.738384, 0.857672, 0.634112, 0.294086, 0.733901, 0.359825, 0.74773, 0.196471, 0.700408, 0.97943, 0.912977, 0.962321, 0.577774, 0.964198, 0.962612, 0.56931, 0.455372, 0.481912, 0.722396, 0.48471, 0.956901, 0.329356, 0.220315, 0.421001, 0.924976, 0.361622, 0.25113, 0.0609949, 0.557615, 0.960276, 0.974457, 0.556058, 0.258683, 0.30762, 0.875404, 0.879741, 0.557299, 0.840141, 0.677022, 0.429622, 0.629197, 0.566561, 0.734285, 0.660293, 0.375673, 0.296674, 0.00365564, 0.330919, 0.864809, 0.52764, 0.341352, 0.0446571, 0.772668, 0.315053, 0.563374, 0.174156, 0.823374, 0.1807, 0.948436, 0.890809, 0.71882, 0.182657, 0.353268, 0.745919, 0.448371, 0.458774, 0.596618, 0.640457, 0.25174, 0.152258, 0.0895677, 0.950536, 0.346543, 0.407567, 0.999578, 0.808697, 0.146934, 0.0503009, 0.957366, 0.471509, 0.769222, 0.56417, 0.00688867, 0.701474, 0.240237, 0.142249, 0.0375286, 0.426234, 0.363617, 0.0834601, 0.0836809, 0.200853, 0.241534, 0.447741, 0.714228, 0.954587, 0.420323, 0.571062, 0.972781, 0.100924, 0.184836, 0.376265, 0.69656, 0.142651, 0.992868, 0.649462, 0.690062, 0.842583, 0.786338, 0.256428, 0.122638, 0.224826, 0.611269, 0.948878, 0.0471729, 0.110879, 0.868052, 0.712388, 0.165319, 0.0485212, 0.727872, 0.788025, 0.44366, 0.554184, 0.355091, 0.457588, 0.457669, 0.0702683, 0.645146, 0.656114, 0.471351, 0.947632, 0.839267, 0.79427, 0.525043, 0.623998, 0.0718313, 0.0994423, 0.253284, 0.700895, 0.0620533, 0.834881, 0.183217, 0.204566, 0.483247, 0.68356, 0.654929, 0.325982, 0.0694041, 0.974176, 0.297671, 0.366199, 0.633409, 0.23664, 0.852447, 0.174777, 0.615545, 0.463499, 0.702313, 0.83723, 0.984157, 0.984313, 0.324485, 0.539787, 0.0217852, 0.903172, 0.0657557, 0.936454, 0.286387, 0.864025, 0.893007, 0.760028, 0.0631858, 0.527295, 0.650481, 0.674018, 0.870867, 0.226225, 0.635691, 0.694291, 0.796474, 0.325277, 0.230096, 0.990246, 0.862474, 0.611415, 0.837348, 0.667471, 0.893468, 0.137006, 0.805157, 0.727722, 0.575047, 0.0812447, 0.00332892, 0.0433659, 0.785997, 0.791497, 0.533775, 0.808561, 0.372525, 0.969321, 0.387242, 0.654623, 0.131302, 0.95441, 0.678156, 0.741826, 0.0784754, 0.995653, 0.142764, 0.0116793, 0.355654, 0.588896, 0.826082, 0.99155, 0.741596, 0.414717, 0.705101, 0.974348, 0.912202, 0.665971, 0.231464, 0.0922351, 0.848757, 0.805882, 0.51685, 0.688406, 0.737106, 0.396799, 0.172867, 0.511846, 0.899687, 0.22167, 0.928006, 0.271104, 0.324211, 0.514791, 0.171087, 0.997203, 0.959142, 0.890799, 0.941474, 0.853, 0.724237, 0.258315, 0.720872, 0.614316, 0.971943, 0.58439, 0.856296, 0.0299103, 0.110145, 0.850125, 0.448778, 0.709144, 0.941761, 0.759252, 0.471517, 0.544703, 0.9945, 0.82029, 0.900237, 0.0946534, 0.253158, 0.285683, 0.178263, 0.202297, 0.693995, 0.698544, 0.162467, 0.715778, 0.43853, 0.176624, 0.608896, 0.400533, 0.663253, 0.22253, 0.324155, 0.305958, 0.19465, 0.048463, 0.980281, 0.759648, 0.806053, 0.0957803, 0.330134, 0.645568, 0.678518, 0.105757, 0.278402, 0.4611, 0.116674, 0.648524, 0.354427, 0.593176, 0.826524, 0.378395, 0.74313, 0.0645038, 0.046516, 0.10157, 0.582681, 0.480452, 0.427569, 0.373421, 0.792701, 0.809276, 0.516038, 0.481245, 0.365289, 0.297136, 0.768756, 0.460179, 0.195181, 0.497868, 0.645771, 0.691792, 0.212452, 0.802635, 0.967529, 0.202553, 0.404711, 0.186143, 0.870516, 0.214961, 0.606942, 0.659521, 0.773491, 0.41635, 0.118373, 0.827707, 0.935168, 0.906606, 0.416938, 0.0941418, 0.184616, 0.0854075, 0.742107, 0.0377408, 0.0394036, 0.624772, 0.334759, 0.709317, 0.690739, 0.430583, 0.0661632, 0.795665, 0.143397, 0.0264727, 0.430718, 0.564947, 0.501005, 0.795074, 0.688456, 0.0758276, 0.114967, 0.318109, 0.268243, 0.491039, 0.029096, 0.19893, 0.454914, 0.508715, 0.880154, 0.950762, 0.717665, 0.270697, 0.918787, 0.40797, 0.629369, 0.572733, 0.160172, 0.133892, 0.459015, 0.753191, 0.705489, 0.947349, 0.890487, 0.259758, 0.268497, 0.0972714, 0.0695712, 0.932958, 0.519835, 0.306024, 0.330323, 0.532885, 0.287885, 0.357825, 0.844217, 0.25325, 0.934947, 0.587595, 0.779412, 0.0936196, 0.331646, 0.167257, 0.861833, 0.338368, 0.360992, 0.672436, 0.181959, 0.386948, 0.674912, 0.413251, 0.558263, 0.911673, 0.862047, 0.91121, 0.045779, 0.0691727, 0.814198, 0.332049, 0.121373, 0.967023, 0.963251, 0.307342, 0.134534, 0.7198, 0.192787, 0.297363, 0.0821706, 0.66607, 0.148892, 0.0083189, 0.722758, 0.802759, 0.4492, 0.611604, 0.728216, 0.898171, 0.999513, 0.513938, 0.449756, 0.574561, 0.494023, 0.214097, 0.0930019, 0.279293, 0.600591, 0.14003, 0.851867, 0.133887, 0.947935, 0.449993, 0.04731, 0.459171, 0.311677, 0.620215, 0.606521, 0.97574, 0.0868678, 0.354204, 0.413799, 0.949669, 0.827198, 0.0239478, 0.979115, 0.846768, 0.403074, 0.580696, 0.341495, 0.606055, 0.0822114, 0.468938, 0.515763, 0.156409, 0.457546, 0.83558, 0.34622, 0.317464, 0.4094, 0.0774554, 0.359015, 0.34625, 0.499322, 0.0888448, 0.909101, 0.0453446, 0.000850094, 0.295581, 0.401917, 0.716063, 0.428869, 0.921903, 0.888364, 0.184599, 0.776387, 0.810191, 0.392834, 0.153734, 0.203344, 0.837668, 0.976633, 0.847462, 0.874881, 0.668662, 0.652816, 0.914987, 0.00451315, 0.981843, 0.328409, 0.258969, 0.538078, 0.559569, 0.683026, 0.276111, 0.308796, 0.890213, 0.327725, 0.61278, 0.00449197, 0.346267, 0.692164, 0.392556, 0.422186, 0.475265, 0.120223, 0.925079, 0.5976, 0.752428, 0.858827, 0.249243, 0.173745, 0.626284, 0.0927299, 0.0349499, 0.875995, 0.0166703, 0.184881, 0.188308, 0.9454, 0.491007, 0.974666, 0.233835, 0.258856, 0.831252, 0.816365, 0.406586, 0.580223, 0.0649543, 0.638888, 0.248592, 0.21366, 0.441546, 0.634155, 0.996909, 0.609874, 0.890964, 0.455623, 0.746403, 0.781809, 0.889497, 0.941262, 0.243813, 0.851309, 0.682001, 0.303302, 0.83347, 0.701662, 0.838946, 0.485931, 0.0549321, 0.320827, 0.444271, 0.0381938, 0.735022, 0.70405, 0.516336, 0.0875818, 0.966047, 0.51303, 0.131951, 0.568519, 0.283458, 0.936966, 0.340116, 0.837635, 0.938004, 0.713295, 0.388906, 0.143626, 0.0348835, 0.214307, 0.730334, 0.350882, 0.0980965, 0.673089, 0.773921, 0.823503, 0.695154, 0.733075, 0.746001, 0.572553, 0.978582, 0.670927, 0.130238, 0.7828, 0.0441233, 0.96649, 0.112834, 0.782979, 0.26588, 0.804274, 0.560883, 0.60293, 0.425478, 0.42237, 0.893522, 0.971839, 0.510324, 0.945489, 0.793186, 0.641643, 0.480637, 0.904342, 0.4877, 0.412663, 0.427252, 0.327909, 0.731806, 0.545454, 0.726311, 0.791445, 0.134922, 0.618404, 0.916366, 0.626868, 0.176367, 0.438304, 0.0124284, 0.38679, 0.0709086, 0.523552, 0.359128, 0.931022, 0.273408, 0.805746, 0.836785, 0.896942, 0.929909, 0.472329, 0.546212, 0.559585, 0.828755, 0.132333, 0.273131, 0.767111, 0.867155, 0.568318, 0.402194, 0.80677, 0.874407, 0.706021, 0.743277, 0.147495, 0.497984, 0.0228355, 0.018538, 0.848837, 0.574097, 0.0909596, 0.193292, 0.0255437, 0.201144, 0.554184, 0.475002, 0.796415, 0.489669, 0.357315, 0.100423, 0.705043, 0.0197173, 0.762717, 0.143447, 0.204091, 0.196282, 0.3405, 0.266557, 0.988108, 0.454827, 0.493077, 0.555779, 0.28388, 0.488911, 0.292256, 0.265432, 0.0628563, 0.100546, 0.939694, 0.432897, 0.786097, 0.551135, 0.569943, 0.58404, 0.546083, 0.92622, 0.651898, 0.320026, 0.219867, 0.649951, 0.506432, 0.844216, 0.249193, 0.523493, 0.529749, 0.0274718, 0.473925, 0.667786, 0.583479, 0.777275, 0.764091, 0.286952, 0.177703, 0.226733, 0.219178, 0.98811, 0.871416, 0.380711, 0.460821, 0.8267, 0.0635667, 0.299989, 0.333724, 0.925175, 0.84771, 0.0957246, 0.818235, 0.657102, 0.742433, 0.918443, 0.89082, 0.816633, 0.379226, 0.0953685, 0.889036, 0.0915369, 0.108673, 0.776449, 0.411121, 0.0971075, 0.428249, 0.952901, 0.448069, 0.289354, 0.848445, 0.816563, 0.857344, 0.127785, 0.0876948, 0.704475, 0.458588, 0.0695453, 0.561227, 0.403416, 0.0340543, 0.695906, 0.721814, 0.545271, 0.692094, 0.449162, 0.0101133, 0.955742, 0.0744578, 0.0230509, 0.80775, 0.37216, 0.306166, 0.140889, 0.147295, 0.0910706, 0.228604, 0.0629362, 0.105301, 0.976443, 0.784602, 0.85427, 0.319584, 0.839332, 0.36463, 0.820518, 0.53021, 0.737608, 0.157278, 0.291652, 0.521901, 0.878088, 0.362409, 0.348868, 0.316342, 0.85833, 0.747563, 0.859804, 0.899792, 0.501878, 0.342417, 0.548447, 0.436572, 0.78287, 0.796021, 0.60547, 0.712261, 0.719629, 0.732916, 0.0691805, 0.913756, 0.423561, 0.0999369, 0.343425, 0.892776, 0.080708, 0.80676, 0.373842, 0.55274, 0.405536, 0.775874, 0.157916, 0.178894, 0.907335, 0.831711, 0.773824, 0.353116, 0.213652, 0.480229, 0.762299, 0.200408, 0.45256, 0.883378, 0.36445, 0.50118, 0.812127, 0.636674, 0.716257, 0.26549, 0.733552, 0.0164629, 0.852164, 0.730687, 0.142062, 0.538074, 0.470815, 0.373797, 0.656745, 0.939293, 0.825732, 0.193692, 0.271752, 0.878585, 0.897819, 0.0510397, 0.226767, 0.970232, 0.452866, 0.348396, 0.755521, 0.79419, 0.887324, 0.252607, 0.461492, 0.982379, 0.981183, 0.000563909, 0.166662, 0.947894, 0.555107, 0.343203, 0.814985, 0.100717, 0.130978, 0.887828, 0.0660569, 0.0786789, 0.50107, 0.546827, 0.555532, 0.301241, 0.347113, 0.682008, 0.833179, 0.912085, 0.434421, 0.0203689, 0.369309, 0.965034, 0.564557, 0.903803, 0.916688, 0.561587, 0.485956, 0.188589, 0.630288, 0.553069, 0.0818616, 0.549482, 0.737253, 0.972696, 0.619968, 0.91962, 0.648584, 0.285313, 0.139861, 0.978674, 0.855924, 0.770446, 0.320651, 0.0907761, 0.353458, 0.797755, 0.796704, 0.363182, 0.0477224, 0.377253, 0.988718, 0.422171, 0.0410823, 0.0648559, 0.860767, 0.0158402, 0.773974, 0.0878536, 0.866743, 0.433906, 0.954403, 0.717973, 0.516224, 0.188245, 0.646973, 0.289479, 0.608346, 0.480279, 0.00959032, 0.638653, 0.789162, 0.222084, 0.89853, 0.193127, 0.871977, 0.505576, 0.789938, 0.468664, 0.518494, 0.437183, 0.909318, 0.559601, 0.226763, 0.640364, 0.401715, 0.348778, 0.455293, 0.291447, 0.112201, 0.479516, 0.639978, 0.632286, 0.39995, 0.343699, 0.766534, 0.0903324, 0.465614, 0.00699166, 0.33698, 0.995661, 0.598812, 0.0409469, 0.849873, 0.549859, 0.227782, 0.573034, 0.875245, 0.574172, 0.315462, 0.64763, 0.734943, 0.23191, 0.0181727, 0.349757, 0.842983, 0.928829, 0.210515, 0.52014, 0.24415, 0.00709784, 0.298236, 0.70972, 0.775037, 0.131526, 0.254659, 0.109642, 0.902058, 0.0810867, 0.894335, 0.864379, 0.829596, 0.246748, 0.566536, 0.411108, 0.521713, 0.569299, 0.0919209, 0.729979, 0.407958, 0.944501, 0.556948, 0.457221, 0.885695, 0.932005, 0.585057, 0.901422, 0.313922, 0.666369, 0.394859, 0.0751098, 0.477243, 0.614728, 0.631539, 0.372708, 0.407733, 0.942874, 0.579093, 0.909681, 0.860376, 0.093909, 0.530909, 0.896598, 0.389745, 0.372498, 0.229945, 0.686302, 0.0808744, 0.0809309, 0.664149, 0.506449, 0.595882, 0.554841, 0.422232, 0.134856, 0.709975, 0.353235, 0.122968, 0.0801314, 0.499966, 0.0260317, 0.643237, 0.610541, 0.958465, 0.833629, 0.54053, 0.789644, 0.359161, 0.817851, 0.374599, 0.559545, 0.20301, 0.35341, 0.820238, 0.041116, 0.919098, 0.191215, 0.717778, 0.739251, 0.472893, 0.144513, 0.631646, 0.620376, 0.226243, 0.641126, 0.438453, 0.138248, 0.386422, 0.222032, 0.427272, 0.741038, 0.358608, 0.676207, 0.659127, 0.77423, 0.0056963, 0.303172, 0.376431, 0.266839, 0.0571987, 0.67568, 0.418615, 0.945006, 0.214421, 0.78264, 0.504071, 0.7178, 0.341034, 0.630461, 0.447086, 0.0439287, 0.8875, 0.544724, 0.112584, 0.998354, 0.9809, 0.258963, 0.191181, 0.0560489, 0.257646, 0.846637, 0.626145, 0.195457, 0.824605, 0.752355, 0.945746, 0.740393, 0.653025, 0.136753, 0.586021, 0.475397, 0.0714114, 0.147559, 0.8442, 0.380127, 0.738006, 0.609101, 0.188612, 0.632001, 0.603794, 0.537657, 0.177305, 0.852774, 0.966591, 0.665321, 0.503393, 0.210565, 0.715953, 0.387927, 0.0440149, 0.339485, 0.0823881, 0.367538, 0.767779, 0.884345, 0.109396, 0.63223, 0.412455, 0.178465, 0.0507706, 0.485121, 0.294607, 0.370991, 0.944419, 0.136482, 0.845479, 0.121908, 0.191002, 0.131244, 0.00622841, 0.567604, 0.212976, 0.36287, 0.607975, 0.431318, 0.40082, 0.709974, 0.804813, 0.952521, 0.276572, 0.17742, 0.105584, 0.888779, 0.683337, 0.170643, 0.440185, 0.543874, 0.119981, 0.589082, 0.186223, 0.982259, 0.231994, 0.810508, 0.552301, 0.237806, 0.198615, 0.805346, 0.0238344, 0.0928923, 0.0785491, 0.430659, 0.537014, 0.930875, 0.471287, 0.0484842, 0.969099, 0.793085, 0.884125, 0.952769, 0.647739, 0.559867, 0.924879, 0.0319831, 0.586215, 0.321531, 0.831833, 0.939589, 0.638587, 0.903751, 0.375221, 0.404078, 0.809914, 0.909374, 0.26586, 0.631018, 0.429911, 0.675685, 0.794163, 0.618099, 0.691647, 0.903324, 0.728971, 0.846593, 0.6666, 0.741252, 0.570946, 0.46837, 0.179672, 0.774219, 0.384656, 0.849555, 0.533305, 0.590401, 0.964318, 0.457615, 0.321516, 0.0208775, 0.373421, 0.510021, 0.982442, 0.232421, 0.750315, 0.361089, 0.397923, 0.206362, 0.214752, 0.33211, 0.914989, 0.600919, 0.650647, 0.763654, 0.2461, 0.190174, 0.333671, 0.317425, 0.933695, 0.9784, 0.960239, 0.194299, 0.450556, 0.10371, 0.524373, 0.345593, 0.850267, 0.935772, 0.649672, 0.901208, 0.486565, 0.108914, 0.464129, 0.88974, 0.476738, 0.797901, 0.831085, 0.966915, 0.0264328, 0.932948, 0.251282, 0.85788, 0.404111, 0.0912275, 0.668941, 0.0964484, 0.215694, 0.511941, 0.0665426, 0.731441, 0.599129, 0.605807, 0.207614, 0.572226, 0.832813, 0.456038, 0.407284, 0.561743, 0.3099, 0.647478, 0.836336, 0.176686, 0.0205143, 0.93403, 0.766272, 0.573602, 0.218061, 0.796744, 0.913891, 0.827296, 0.259705, 0.245004, 0.891785, 0.872052, 0.599454, 0.563108, 0.0328248, 0.0883664, 0.525946, 0.947825, 0.921859, 0.622588, 0.222157, 0.185422, 0.693733, 0.000926891, 0.652556, 0.488593, 0.0708218, 0.291109, 0.0743702, 0.139774, 0.851221, 0.610232, 0.411828, 0.513488, 0.982313, 0.874602, 0.122241, 0.617891, 0.716313, 0.183879, 0.21519, 0.479264, 0.840607, 0.0509966, 0.967678, 0.564133, 0.0912643, 0.140889, 0.93038, 0.887509, 0.222076, 0.574473, 0.910053, 0.3291, 0.0677432, 0.161977, 0.637396, 0.472756, 0.650265, 0.265576, 0.628293, 0.360273, 0.0922663, 0.894224, 0.669931, 0.322352, 0.149659, 0.53929, 0.266515, 0.0519883, 0.985195, 0.210595, 0.86933, 0.44075, 0.431001, 0.326156, 0.0404525, 0.596557, 0.638746, 0.738458, 0.403545, 0.986486, 0.582298, 0.868717, 0.501291, 0.929942, 0.445173, 0.781671, 0.923339, 0.0751635, 0.128539, 0.319603, 0.187612, 0.211502, 0.453255, 0.30613, 0.902157, 0.730254, 0.431835, 0.953954, 0.279854, 0.530319, 0.586014, 0.277366, 0.538647, 0.462936, 0.875658, 0.143087, 0.355321, 0.424377, 0.566115, 0.768805, 0.0632722, 0.227152, 0.794137, 0.834637, 0.399366, 0.376948, 0.060539, 0.175033, 0.934245, 0.00284063, 0.900301, 0.560909, 0.446826, 0.397935, 0.152239, 0.389596, 0.758969, 0.72322, 0.551895, 0.617212, 0.942954, 0.616343, 0.321175, 0.613545, 0.397637, 0.734759, 0.319936, 0.642448, 0.410287, 0.933028, 0.799424, 0.599313, 0.879514, 0.679369, 0.407169, 0.397339, 0.512815, 0.329459, 0.304717, 0.0825684, 0.146401, 0.374815, 0.92173, 0.13524, 0.935348, 0.0647594, 0.0038909, 0.200255, 0.751794, 0.201043, 0.236771, 0.18696, 0.480723, 0.808138, 0.062058, 0.170385, 0.0915421, 0.276257, 0.338185, 0.675228, 0.777146, 0.610218, 0.0298378, 0.437848, 0.0347487, 0.812859, 0.421852, 0.557814, 0.990453, 0.426372, 0.858653, 0.288976, 0.659399, 0.844169, 0.225242, 0.455817, 0.172944, 0.354925, 0.27737, 0.606244, 0.966951, 0.650472, 0.396416, 0.211964, 0.187634, 0.0160894, 0.0821767, 0.59884, 0.817367, 0.847383, 0.0300615, 0.0574575, 0.548312, 0.751748, 0.194943, 0.589198, 0.879761, 0.721534, 0.470941, 0.468215, 0.715272, 0.810768, 0.646068, 0.901215, 0.955512, 0.743163, 0.962022, 0.374871, 0.80422, 0.690661, 0.0250751, 0.79563, 0.102893, 0.322101, 0.0690305, 0.940337, 0.641574, 0.650997, 0.491007, 0.817132, 0.0148011, 0.403251, 0.992396, 0.188816, 0.376219, 0.10534, 0.177642, 0.431083, 0.86558, 0.892058, 0.779712, 0.449729, 0.389934, 0.992887, 0.931071, 0.864745, 0.779918, 0.596726, 0.585226, 0.352476, 0.826316, 0.127054, 0.478924, 0.110383, 0.580022, 0.82421, 0.800809, 0.865504, 0.910334, 0.0467139, 0.919823, 0.380784, 0.0631186, 0.508982, 0.801985, 0.175551, 0.451541, 0.372707, 0.226567, 0.570796, 0.125396, 0.943644, 0.401429, 0.989618, 0.242919, 0.97873, 0.47496, 0.063345, 0.77485, 0.292343, 0.0468066, 0.836154, 0.879952, 0.804999, 0.274545, 0.265815, 0.949376, 0.652124, 0.415027, 0.772576, 0.223914, 0.958858, 0.163547, 0.673092, 0.398409, 0.178012, 0.368437, 0.902936, 0.0266671, 0.825638, 0.0887684, 0.704695, 0.00433382, 0.77782, 0.559835, 0.233247, 0.227651, 0.358069, 0.749393, 0.117542, 0.433086, 0.211494, 0.340984, 0.24799, 0.786604, 0.269659, 0.432376, 0.312364, 0.719123, 0.370674, 0.00847289, 0.807866, 0.863069, 0.100777, 0.536513, 0.423337, 0.231107, 0.586951, 0.567401, 0.427884, 0.73487, 0.208975, 0.353041, 0.247483, 0.456553, 0.736874, 0.94209, 0.264569, 0.210946, 0.639523, 0.079536, 0.678965, 0.573502, 0.43078, 0.444149, 0.296157, 0.664199, 0.729483, 0.317224, 0.862687, 0.906606, 0.281476, 0.507251, 0.204797, 0.908557, 0.86077, 0.207878, 0.670553, 0.439633, 0.866356, 0.645031, 0.163276, 0.77942, 0.875031, 0.377858, 0.308918, 0.811035, 0.691951, 0.738388, 0.508979, 0.601705, 0.0702119, 0.263637, 0.109569, 0.867348, 0.603911, 0.0351418, 0.90264, 0.359667, 0.92715, 0.487009, 0.558423, 0.59962, 0.3381, 0.00260816, 0.2917, 0.347709, 0.674063, 0.773833, 0.0174277, 0.129053, 0.173304, 0.940745, 0.419558, 0.00326372, 0.40297, 0.96468, 0.509424, 0.67384, 0.0882732, 0.110611, 0.522027, 0.864685, 0.306044, 0.87055, 0.0679443, 0.481999, 0.18915, 0.695993, 0.331741, 0.850465, 0.512519, 0.419894, 0.39356, 0.476141, 0.370136, 0.262703, 0.247899, 0.850707, 0.486571, 0.011581, 0.45049, 0.232171, 0.440695, 0.00337811, 0.925948, 0.319457, 0.884806, 0.311409, 0.852698, 0.63994, 0.568866, 0.267045, 0.223009, 0.313733, 0.629241, 0.129372, 0.579141, 0.904992, 0.385199, 0.330084, 0.0683533, 0.248661, 0.0791767, 0.515006, 0.444598, 0.200089, 0.881632, 0.62213, 0.977745, 0.394738, 0.527943, 0.910211, 0.704782, 0.794402, 0.78068, 0.00189873, 0.836006, 0.29421, 0.541889, 0.798596, 0.636402, 0.918368, 0.892159, 0.501628, 0.585471, 0.786059, 0.142041, 0.416124, 0.556017, 0.965452, 0.826374, 0.27872, 0.717857, 0.909668, 0.341899, 0.0586162, 0.320897, 0.10592, 0.756336, 0.277543, 0.782822, 0.636118, 0.0536134, 0.937309, 0.388455, 0.346383, 0.447704, 0.987638, 0.677626, 0.843047, 0.944787, 0.898766, 0.432687, 0.816114, 0.405026, 0.704707, 0.0453077, 0.829558, 0.303178, 0.342896, 0.767674, 0.2017, 0.147938, 0.458819, 0.727087, 0.341563, 0.574069, 0.279976, 0.434043, 0.629108, 0.838056, 0.281801, 0.15954, 0.533016, 0.161662, 0.97801, 0.87128, 0.634115, 0.774679, 0.225399, 0.749904, 0.486693, 0.99924, 0.620515, 0.305748, 0.22576, 0.726796, 0.918976, 0.786063, 0.375058, 0.181839, 0.391005, 0.79399, 0.960566, 0.0754091, 0.855213, 0.42244, 0.310224, 0.203215, 0.491572, 0.67097, 0.285214, 0.0858207, 0.438187, 0.503256, 0.879449, 0.33861, 0.582943, 0.172445, 0.947563, 0.619313, 0.621448, 0.112419, 0.53002, 0.896355, 0.611235, 0.546437, 0.934089, 0.444589, 0.585632, 0.563237, 0.216405, 0.0073357, 0.932245, 0.38411, 0.69195, 0.695525, 0.560935, 0.258023, 0.537613, 0.74352, 0.180963, 0.355721, 0.13062, 0.573029, 0.548766, 0.373238, 0.183068, 0.789831, 0.838845, 0.918679, 0.5173, 0.452515, 0.821234, 0.231247, 0.00733139, 0.798577, 0.602046, 0.468893, 0.101449, 0.362702, 0.968019, 0.198854, 0.0733694, 0.959848, 0.67011, 0.806687, 0.78752, 0.0224502, 0.541197, 0.42703, 0.536542, 0.593846, 0.281127, 0.746273, 0.913023, 0.827128, 0.852095, 0.159293, 0.296859, 0.950085, 0.210788, 0.885017, 0.273105, 0.425117, 0.815194, 0.428134, 0.914246, 0.128769, 0.469691, 0.717688, 0.0313141, 0.968659, 0.920771, 0.149099, 0.367242, 0.352186, 0.778338, 0.456717, 0.844881, 0.217232, 0.953702, 0.138595, 0.776925, 0.237162, 0.0112592, 0.12796, 0.379588, 0.31727, 0.0842268, 0.549664, 0.0946314, 0.084875, 0.507039, 0.822972, 0.59034, 0.686113, 0.0621147, 0.3072, 0.694008, 0.554752, 0.702632, 0.711288, 0.817532, 0.323501, 0.0892521, 0.902616, 0.537902, 0.0875294, 0.120766, 0.119613, 0.707762, 0.724657, 0.150481, 0.257149, 0.689427, 0.450747, 0.976963, 0.149166, 0.627768, 0.0403627, 0.0942667, 0.773643, 0.126306, 0.569131, 0.196301, 0.872196, 0.389558, 0.668807, 0.495927, 0.086254, 0.571141, 0.766913, 0.626801, 0.587774, 0.725417, 0.00918404, 0.822281, 0.207662, 0.058398, 0.25222, 0.786782, 0.299126, 0.255607, 0.57141, 0.931478, 0.407586, 0.977963, 0.223107, 0.54599, 0.282617, 0.913212, 0.62677, 0.165772, 0.155456, 0.518098, 0.931691, 0.200463, 0.096959, 0.946527, 0.845319, 0.284366, 0.733283, 0.153038, 0.321763, 0.360312, 0.995035, 0.958094, 0.195862, 0.46631, 0.189629, 0.302557, 0.945048, 0.643645, 0.13327, 0.371641, 0.819777, 0.57319, 0.553086, 0.471611, 0.00809229, 0.369703, 0.784892, 0.306595, 0.591466, 0.907076, 0.525374, 0.200636, 0.956976, 0.494712, 0.415214, 0.273887, 0.006213, 0.778764, 0.25197, 0.197293, 0.530253, 0.270816, 0.542919, 0.865151, 0.161401, 0.599639, 0.425171, 0.02092, 0.252805, 0.936007, 0.452521, 0.804941, 0.257674, 0.47651, 0.290657, 0.877264, 0.2645, 0.0344232, 0.176063, 0.719805, 0.462195, 0.149023, 0.525856, 0.465283, 0.753542, 0.12032, 0.304783, 0.918456, 0.378908, 0.518244, 0.439006, 0.77231, 0.568517, 0.964053, 0.0689595, 0.504969, 0.785789, 0.0910465, 0.387359, 0.304203, 0.165416, 0.852903, 0.49645, 0.110255, 0.604735, 0.150103, 0.558969, 0.768177, 0.051041, 0.736039, 0.0896563, 0.417839, 0.893887, 0.604943, 0.330641, 0.221515, 0.0743126, 0.824649, 0.461708, 0.504435, 0.582551, 0.159506, 0.389979, 0.68579, 0.8831, 0.346473, 0.38616, 0.00216954, 0.0552161, 0.457719, 0.867273, 0.554183, 0.172436, 0.649613, 0.692574, 0.979692, 0.350168, 0.124738, 0.771034, 0.296946, 0.911974, 0.81801, 0.368762, 0.722024, 0.767779, 0.327289, 0.401161, 0.855711, 0.156584, 0.568558, 0.728227, 0.400495, 0.78513, 0.818739, 0.456153, 0.097777, 0.905695, 0.781541, 0.452064, 0.366879, 0.351342, 0.583155, 0.603932, 0.405335, 0.998143, 0.917378, 0.487447, 0.254047, 0.188246, 0.64938, 0.182407, 0.473846, 0.17883, 0.0205566, 0.48272, 0.126721, 0.0308788, 0.457392, 0.848312, 0.812729, 0.677913, 0.340886, 0.497855, 0.728041, 0.16448, 0.108282, 0.23469, 0.711535, 0.883408, 0.191181, 0.183052, 0.646282, 0.374632, 0.471404, 0.230154, 0.445262, 0.155405, 0.106815, 0.927205, 0.876595, 0.852655, 0.187916, 0.719432, 0.964222, 0.0317673, 0.211245, 0.390158, 0.199502, 0.764087, 0.489397, 0.262862, 0.375746, 0.766732, 0.227395, 0.827619, 0.924451, 0.964898, 0.890601, 0.194575, 0.00461227, 0.405292, 0.828893, 0.423598, 0.325351, 0.41934, 0.316225, 0.735053, 0.268635, 0.234122, 0.302902, 0.901617, 0.595159, 0.378335, 0.794058, 0.475479, 0.234191, 0.568599, 0.862924, 0.108729, 0.684379, 0.634164, 0.627819, 0.403464, 0.714119, 0.311638, 0.558869, 0.899291, 0.942521, 0.70004, 0.281052, 0.587603, 0.594774, 0.82425, 0.402055, 0.336297, 0.49131, 0.0355295, 0.20571, 0.0773773, 0.0854871, 0.590875, 0.465248, 0.573695, 0.514803, 0.0374262, 0.634345, 0.889226, 0.815978, 0.695728, 0.586232, 0.258294, 0.237564, 0.669203, 0.586342, 0.290764, 0.0272068, 0.224519, 0.454707, 0.312868, 0.162063, 0.49107, 0.389977, 0.825339, 0.669316, 0.364957, 0.746349, 0.937063, 0.895906, 0.45993, 0.415207, 0.777136, 0.570434, 0.410112, 0.94222, 0.337142, 0.638576, 0.0790274, 0.645746, 0.495093, 0.0112824, 0.457787, 0.0427242, 0.546968, 0.346656, 0.899557, 0.375191, 0.762052, 0.156298, 0.0423364, 0.54216, 0.901158, 0.6859, 0.149723, 0.260315, 0.429589, 0.282768, 0.84107, 0.912924, 0.105573, 0.899135, 0.316069, 0.388782, 0.214574, 0.520231, 0.644219, 0.809056, 0.164912, 0.90789, 0.654766, 0.753898, 0.715918, 0.212526, 0.210772, 0.656654, 0.555379, 0.874011, 0.0593176, 0.806727, 0.297839, 0.8511, 0.117454, 0.955629, 0.757795, 0.0158988, 0.333015, 0.753903, 0.848117, 0.441142, 0.458952, 0.589017, 0.926075, 0.37727, 0.771108, 0.696034, 0.566547, 0.298096, 0.860689, 0.626973, 0.305328, 0.390657, 0.407119, 0.342068, 0.348203, 0.346508, 0.522355, 0.482273, 0.407676, 0.404382, 0.30361, 0.568368, 0.871924, 0.485865, 0.26557, 0.135717, 0.358373, 0.312095, 0.663044, 0.287236, 0.374043, 0.0501449, 0.239964, 0.52668, 0.884362, 0.145246, 0.0841258, 0.660448, 0.821908, 0.400395, 0.232616, 0.56343, 0.290434, 0.608942, 0.331562, 0.0353069, 0.83608, 0.509274, 0.921468, 0.264453, 0.540291, 0.652523, 0.699866, 0.211111, 0.0818981, 0.755813, 0.590837, 0.104546, 0.715796, 0.261986, 0.73191, 0.0513411, 0.642299, 0.577822, 0.413361, 0.415893, 0.312461, 0.299206, 0.695782, 0.722502, 0.566072, 0.455906, 0.204741, 0.381895, 0.414785, 0.961251, 0.0785839, 0.251855, 0.297642, 0.52355, 0.202696, 0.233333, 0.816203, 0.302117, 0.0354591, 0.302033, 0.313475, 0.480165, 0.372702, 0.491462, 0.476555, 0.938915, 0.0948726, 0.289336, 0.479208, 0.65302, 0.820664, 0.138595, 0.898718, 0.0787371, 0.572545, 0.0301605, 0.712573, 0.382798, 0.710102, 0.306245, 0.288292, 0.0149951, 0.477971, 0.663424, 0.995834, 0.30158, 0.296989, 0.598632, 0.784215, 0.765998, 0.999818, 0.653874, 0.826555, 0.179131, 0.78748, 0.502525, 0.153814, 0.638521, 0.885808, 0.730842, 0.612152, 0.237592, 0.265346, 0.605184, 0.409794, 0.958669, 0.887942, 0.842166, 0.67466, 0.217852, 0.303594, 0.217622, 0.719919, 0.294258, 0.502798, 0.968737, 0.711519, 0.146334, 0.901055, 0.342814, 0.515048, 0.156569, 0.480063, 0.762152, 0.146304, 0.91738, 0.512755, 0.74993, 0.614057, 0.498649, 0.832608, 0.138007, 0.0635518, 0.90737, 0.984678, 0.792356, 0.439238, 0.920941, 0.467957, 0.997167, 0.780395, 0.498664, 0.830492, 0.176103, 0.875214, 0.62277, 0.645844, 0.207281, 0.123563, 0.591686, 0.780432, 0.0129608, 0.791339, 0.942195, 0.49052, 0.49108, 0.402874, 0.975778, 0.851602, 0.539155, 0.801932, 0.290795, 0.548088, 0.573889, 0.0428229, 0.297403, 0.366614, 0.0775476, 0.398522, 0.15321, 0.124954, 0.875133, 0.851476, 0.0164045, 0.963777, 0.657369, 0.74879, 0.451537, 0.519164, 0.948713, 0.516193, 0.169243, 0.885918, 0.123151, 0.454701, 0.471602, 0.918203, 0.150973, 0.184077, 0.0324483, 0.750839, 0.993702, 0.547591, 0.900177, 0.613035, 0.911406, 0.486874, 0.839213, 0.532897, 0.205058, 0.120355, 0.956325, 0.200277, 0.995595, 0.301477, 0.27882, 0.81556, 0.682971, 0.159015, 0.281016, 0.215435, 0.665187, 0.821636, 0.709168, 0.731362, 0.355398, 0.828751, 0.485887, 0.622877, 0.832851, 0.00500767, 0.205263, 0.610187, 0.0793023, 0.942119, 0.965432, 0.882908, 0.419753, 0.623326, 0.559083, 0.0768554, 0.478461, 0.635069, 0.652805, 0.563948, 0.961552, 0.478587, 0.149986, 0.781196, 0.27784, 0.322648, 0.496072, 0.445879, 0.854785, 0.665529, 0.451308, 0.423746, 0.323325, 0.387398, 0.333302, 0.548933, 0.609913, 0.399075, 0.342851, 0.703824, 0.697605, 0.249703, 0.33358, 0.208028, 0.368541, 0.674104, 0.366247, 0.711366, 0.64422, 0.731571, 0.756717, 0.182663, 0.063741, 0.544396, 0.460685, 0.681695, 0.523088, 0.975995, 0.391259, 0.269248, 0.956278, 0.956123, 0.33244, 0.284253, 0.940698, 0.789579, 0.642366, 0.869707, 0.914349, 0.200234, 0.685062, 0.366813, 0.592963, 0.176426, 0.94228, 0.990111, 0.197891, 0.942487, 0.157776, 0.851431, 0.809682, 0.14956, 0.204497, 0.606793, 0.210414, 0.626229, 0.274956, 0.0589762, 0.542507, 0.423364, 0.193907, 0.692688, 0.130227, 0.732838, 0.592308, 0.0463694, 0.244651, 0.196437, 0.431101, 0.245558, 0.445836, 0.423409, 0.340895, 0.77525, 0.0941416, 0.326716, 0.956279, 0.749079, 0.0113159, 0.741488, 0.272533, 0.740601, 0.381669, 0.9757, 0.0633884, 0.0507312, 0.813466, 0.312817, 0.831893, 0.0735946, 0.565494, 0.375287, 0.923022, 0.326449, 0.238649, 0.493041, 0.717581, 0.558414, 0.311803, 0.410783, 0.820905, 0.92364, 0.916962, 0.720033, 0.553118, 0.487445, 0.308895, 0.309555, 0.43254, 0.699058, 0.43688, 0.625924, 0.698767, 0.117668, 0.918938, 0.346946, 0.0425424, 0.944571, 0.677862, 0.99643, 0.799117, 0.194624, 0.656236, 0.18612, 0.964324, 0.490419, 0.948009, 0.755962, 0.26376, 0.431221, 0.369431, 0.203499, 0.471946, 0.102859, 0.387421, 0.321703, 0.815841, 0.209385, 0.337212, 0.880437, 0.562884, 0.702048, 0.000459374, 0.783616, 0.52945, 0.974002, 0.445899, 0.614568, 0.717301, 0.712262, 0.804264, 0.488487, 0.885669, 0.587038, 0.653692, 0.387325, 0.519875, 0.588843, 0.715098, 0.330496, 0.605493, 0.207392, 0.856513, 0.990133, 0.892779, 0.487827, 0.515743, 0.342204, 0.762723, 0.701436, 0.112907, 0.52613, 0.580736, 0.939468, 0.810839, 0.292307, 0.0776587, 0.353387, 0.211921, 0.983335, 0.509046, 0.720184, 0.929707, 0.962867, 0.325599, 0.21685, 0.700793, 0.610453, 0.806198, 0.306915, 0.424551, 0.535444, 0.543655, 0.43802, 0.111197, 0.520964, 0.375032, 0.751728, 0.87544, 0.0540034, 0.540669, 0.318817, 0.98065, 0.620196, 0.969598, 0.523801, 0.13368, 0.895382, 0.967372, 0.0376354, 0.564836, 0.32494, 0.988143, 0.718526, 0.422607, 0.656999, 0.198147, 0.268907, 0.772404, 0.891959, 0.400608, 0.244192, 0.102446, 0.68491, 0.306202, 0.757514, 0.508371, 0.966817, 0.354025, 0.98087, 0.631283, 0.333821, 0.129576, 0.00903872, 0.219943, 0.690989, 0.702476, 0.94906, 0.589556, 0.181258, 0.900491, 0.316564, 0.0748155, 0.315486, 0.832598, 0.931692, 0.750243, 0.324454, 0.299902, 0.458279, 0.960857, 0.42153, 0.208215, 0.997338, 0.947857, 0.022519, 0.757379, 0.707325, 0.113124, 0.352403, 0.502426, 0.987138, 0.272896, 0.203509, 0.26698, 0.697656, 0.653962, 0.308136, 0.41509, 0.790229, 0.3041, 0.421952, 0.051838, 0.422688, 0.233201, 0.762426, 0.0997682, 0.307766, 0.410076, 0.324199, 0.183727, 0.543223, 0.656292, 0.389546, 0.906389, 0.299238, 0.875743, 0.708753, 0.661783, 0.845525, 0.454504, 0.254718, 0.400637, 0.0231452, 0.500626, 0.704305, 0.744324, 0.112412, 0.283358, 0.191706, 0.281803, 0.747076, 0.605198, 0.184819, 0.666309, 0.799229, 0.0776687, 0.0802222, 0.312765, 0.946827, 0.0596306, 0.667841, 0.884433, 0.51361, 0.228784, 0.996955, 0.692894, 0.350533, 0.236139, 0.210733, 0.0519904, 0.565338, 0.496377, 0.834963, 0.151537, 0.675795, 0.765246, 0.197451, 0.830342, 0.705628, 0.633288, 0.0790493, 0.51424, 0.472419, 0.336734, 0.931291, 0.771549, 0.78136, 0.0825076, 0.865056, 0.764332, 0.213556, 0.709222, 0.0986586, 0.472112, 0.908587, 0.422681, 0.772503, 0.0943595, 0.0677044, 0.511535, 0.015377, 0.829827, 0.0513216, 0.0310989, 0.39529, 0.408324, 0.660395, 0.429288, 0.996907, 0.557591, 0.164672, 0.456345, 0.347624, 0.599917, 0.374569, 0.201176, 0.663327, 0.297464, 0.0500502, 0.737515, 0.729015, 0.14747, 0.946795, 0.659139, 0.271387, 0.556344, 0.816534, 0.581452, 0.731536, 0.354551, 0.791237, 0.561444, 0.388968, 0.355855, 0.490382, 0.437563, 0.119889, 0.0421211, 0.695471, 0.53896, 0.63328, 0.475862, 0.7677, 0.155043, 0.552161, 0.1977, 0.213232, 0.902141, 0.963417, 0.928794, 0.67945, 0.741175, 0.970789, 0.413755, 0.33189, 0.179538, 0.031885, 0.305146, 0.00171725, 0.473425, 0.928786, 0.0217968, 0.720502, 0.282527, 0.147384, 0.505182, 0.932958, 0.827321, 0.611847, 0.295508, 0.813456, 0.786285, 0.942785, 0.0452294, 0.0344326, 0.934324, 0.692599, 0.110871, 0.894507, 0.116253, 0.437586, 0.0585453, 0.0240826, 0.993231, 0.542855, 0.501712, 0.0721433, 0.0994377, 0.998065, 0.708815, 0.89808, 0.533229, 0.403015, 0.612847, 0.047366, 0.148115, 0.562168, 0.394409, 0.608027, 0.0267424, 0.290254, 0.47847, 0.686004, 0.732128, 0.42657, 0.788678, 0.772223, 0.62925, 0.759383, 0.452272, 0.0417733, 0.815828, 0.502242, 0.777239, 0.190182, 0.797986, 0.347557, 0.270428, 0.32897, 0.203132, 0.065833, 0.192312, 0.712893, 0.0878632, 0.450295, 0.348962, 0.324327, 0.848166, 0.902288, 0.103475, 0.0515426, 0.120736, 0.086399, 0.50396, 0.624497, 0.108548, 0.877673, 0.150397, 0.192996, 0.414415, 0.630816, 0.760576, 0.913109, 0.359354, 0.378046, 0.213008, 0.215852, 0.758265, 0.954744, 0.774403, 0.185868, 0.722292, 0.376673, 0.8275, 0.417598, 0.829849, 0.506043, 0.619417, 0.130901, 0.906095, 0.639795, 0.42788, 0.455821, 0.0438123, 0.501677, 0.346877, 0.619034, 0.745127, 0.567403, 0.975417, 0.229503, 0.194782, 0.603565, 0.239533, 0.535431, 0.167969, 0.113333, 0.246095, 0.140613, 0.873381, 0.911245, 0.634745, 0.19174, 0.406336, 0.20855, 0.620873, 0.823239, 0.0645393, 0.340056, 0.765581, 0.39219, 0.982776, 0.305914, 0.919302, 0.538892, 0.311472, 0.987755, 0.778656, 0.510709, 0.230951, 0.294692, 0.43749, 0.517912, 0.583967, 0.331689, 0.358365, 0.908121, 0.951953, 0.532865, 0.487829, 0.888651, 0.738156, 0.00227695, 0.409016, 0.29978, 0.277166, 0.0951966, 0.521998, 0.802438, 0.217107, 0.622685, 0.836014, 0.343891, 0.882098, 0.458475, 0.692639, 0.599496, 0.839918, 0.0620356, 0.800176, 0.0710426, 0.366246, 0.912615, 0.846913, 0.601073, 0.109573, 0.461957, 0.0704047, 0.502957, 0.264197, 0.242234, 0.066548, 0.562283, 0.428924, 0.298302, 0.838083, 0.782405, 0.891001, 0.553726, 0.0370573, 0.504141, 0.152362, 0.333185, 0.595767, 0.0645192, 0.206343, 0.645988, 0.046566, 0.0362076, 0.650496, 0.369786, 0.0155528, 0.186184, 0.136588, 0.197887, 0.316526, 0.205125, 0.932632, 0.331489, 0.962175, 0.220918, 0.814224, 0.401887, 0.631963, 0.935164, 0.230002, 0.374688, 0.510809, 0.553638, 0.170291, 0.500819, 0.636374, 0.681217, 0.701833, 0.998451, 0.726489, 0.430992, 0.502006, 0.642544, 0.744813, 0.993198, 0.205778, 0.26555, 0.332985, 0.190966, 0.43116, 0.134929, 0.74008, 0.218884, 0.0331861, 0.571262, 0.305478, 0.575809, 0.0896732, 0.996754, 0.355575, 0.309409, 0.382043, 0.165226, 0.814685, 0.942625, 0.817264, 0.439602, 0.824015, 0.235209, 0.771575, 0.216991, 0.875284, 0.63444, 0.766869, 0.714013, 0.00575976, 0.21631, 0.54574, 0.800772, 0.262503, 0.0049781, 0.852463, 0.505458, 0.751669, 0.544636, 0.541515, 0.418763, 0.809507, 0.357748, 0.0707932, 0.879101, 0.256054, 0.618641, 0.0207241, 0.987606, 0.603628, 0.263369, 0.0998284, 0.531484, 0.287135, 0.680079, 0.646468, 0.588465, 0.612844, 0.959933, 0.454027, 0.998903, 0.0750708, 0.484747, 0.137809, 0.30644, 0.476446, 0.764898, 0.729106, 0.865087, 0.404021, 0.566862, 0.916184, 0.0323896, 0.282523, 0.119637, 0.228027, 0.35099, 0.28294, 0.0240002, 0.916164, 0.511294, 0.531937, 0.973902, 0.744645, 0.198595, 0.473359, 0.612051, 0.761291, 0.0676087, 0.0364221, 0.769576, 0.256146, 0.62122, 0.286872, 0.749406, 0.196533, 0.901199, 0.860322, 0.0700879, 0.521278, 0.292573, 0.778591, 0.719234, 0.648337, 0.404644, 0.0845634, 0.138832, 0.515428, 0.159667, 0.283869, 0.176462, 0.334045, 0.857806, 0.219812, 0.208473, 0.832307, 0.54852, 0.540312, 0.279597, 0.743404, 0.805927, 0.115561, 0.639214, 0.13402, 0.144308, 0.283228, 0.744692, 0.884469, 0.843563, 0.912334, 0.0275226, 0.579395, 0.546761, 0.495795, 0.79755, 0.921985, 0.00752239, 0.97616, 0.0725076, 0.434346, 0.303505, 0.023725, 0.227658, 0.277927, 0.748995, 0.0630237, 0.204613, 0.586421, 0.244571, 0.592176, 0.703014, 0.722145, 0.439465, 0.127182, 0.424489, 0.172089, 0.226009, 0.17294, 0.199361, 0.278068, 0.79068, 0.840652, 0.853706, 0.960016, 0.614518, 0.960822, 0.997946, 0.977777, 0.339633, 0.676073, 0.941484, 0.34342, 0.829122, 0.796571, 0.370971, 0.390502, 0.900768, 0.545817, 0.155374, 0.900034, 0.613687, 0.622225, 0.207122, 0.428731, 0.347693, 0.610224, 0.747305, 0.9997, 0.574436, 0.0225464, 0.537181, 0.553919, 0.854886, 0.63086, 0.294037, 0.886799, 0.31187, 0.596727, 0.651751, 0.721402, 0.724773, 0.938274, 0.69281, 0.94695, 0.0887053, 0.260229, 0.0861957, 0.214669, 0.700685, 0.524997, 0.687995, 0.683631, 0.139977, 0.971096, 0.631791, 0.347941, 0.520404, 0.765819, 0.491614, 0.113508, 0.326125, 0.770543, 0.239641, 0.239453, 0.971334, 0.905452, 0.082197, 0.933207, 0.208841, 0.5037, 0.425159, 0.585294, 0.926005, 0.0331262, 0.119668, 0.538155, 0.733354, 0.566248, 0.842198, 0.760044, 0.410168, 0.674897, 0.330128, 0.997969, 0.954744, 0.679404, 0.5157, 0.155845, 0.743917, 0.108803, 0.16691, 0.70057, 0.114365, 0.248203, 0.45853, 0.201686, 0.297007, 0.129577, 0.997773, 0.432945, 0.277097, 0.876064, 0.539257, 0.405073, 0.596889, 0.686738, 0.719723, 0.842702, 0.647837, 0.979422, 0.106926, 0.103007, 0.539072, 0.726352, 0.607404, 0.364316, 0.135479, 0.505432, 0.462888, 0.282019, 0.660833, 0.577395, 0.0872616, 0.895474, 0.0309444, 0.246224, 0.326139, 0.0961142, 0.844835, 0.466664, 0.397518, 0.166221, 0.255632, 0.783326, 0.637884, 0.180354, 0.344443, 0.959245, 0.418229, 0.242485, 0.0763008, 0.300853, 0.183773, 0.792928, 0.9729, 0.358994, 0.500392, 0.336267, 0.745157, 0.915073, 0.931092, 0.0871831, 0.101795, 0.487572, 0.230753, 0.0285775, 0.660948, 0.175747, 0.13355, 0.259681, 0.442442, 0.992228, 0.451146, 0.798398, 0.715722, 0.730391, 0.0104432, 0.0911216, 0.646872, 0.514239, 0.431686, 0.409231, 0.449391, 0.731215, 0.95104, 0.126263, 0.289025, 0.924131, 0.501361, 0.71892, 0.704512, 0.460507, 0.445345, 0.20252, 0.619948, 0.181668, 0.610762, 0.252248, 0.364614, 0.98668, 0.427278, 0.213482, 0.0645361, 0.605884, 0.538641, 0.386122, 0.838646, 0.877546, 0.463743, 0.466673, 0.139024, 0.39403, 0.662316, 0.606556, 0.457391, 0.1327, 0.815707, 0.0351553, 0.129269, 0.992484, 0.134357, 0.964223, 0.670463, 0.914454, 0.278057, 0.373945, 0.810388, 0.794334, 0.769387, 0.645876, 0.762126, 0.954626, 0.0443219, 0.682765, 0.222122, 0.62291, 0.0248144, 0.31431, 0.749545, 0.428863, 0.573547, 0.165765, 0.329579, 0.258269, 0.211796, 0.798198, 0.775917, 0.0498462, 0.0225739, 0.41106, 0.418993, 0.708065, 0.798057, 0.358689, 0.974601, 0.946496, 0.748159, 0.440356, 0.962265, 0.324581, 0.558731, 0.714919, 0.18392, 0.353212, 0.590955, 0.698877, 0.455575, 0.71087, 0.213767, 0.933595, 0.468522, 0.921713, 0.486236, 0.490256, 0.665164, 0.273319, 0.370377, 0.331048, 0.939203, 0.887592, 0.121029, 0.566328, 0.601967, 0.581702, 0.920826, 0.121328, 0.651494, 0.354383, 0.839414, 0.503693, 0.372105, 0.592742, 0.438147, 0.621782, 0.354364, 0.503378, 0.871103, 0.782457, 0.378147, 0.431539, 0.0724265, 0.985554, 0.295643, 0.417093, 0.750814, 0.610749, 0.0368025, 0.186401, 0.919098, 0.989517, 0.675107, 0.11146, 0.719372, 0.775524, 0.60422, 0.0053235, 0.969482, 0.469206, 0.0535987, 0.835375, 0.279203, 0.338218, 0.569804, 0.497755, 0.636261, 0.0337694, 0.0218546, 0.36575, 0.619811, 0.409943, 0.0115243, 0.809308, 0.359385, 0.937377, 0.523961, 0.658996, 0.237549, 0.495443, 0.478506, 0.932587, 0.141647, 0.39376, 0.34401, 0.457826, 0.951581, 0.706873, 0.27061, 0.950437, 0.884162, 0.146736, 0.0204652, 0.592723, 0.056428, 0.0783631, 0.269389, 0.0591661, 0.0287375, 0.0193022, 0.40157, 0.00981542, 0.967253, 0.564831, 0.256039, 0.486816, 0.110971, 0.5415, 0.295467, 0.0365363, 0.183209, 0.868703, 0.592161, 0.375432, 0.844109, 0.925935, 0.0326413, 0.634369, 0.509423, 0.582345, 0.909867, 0.156127, 0.0352869, 0.870373, 0.469511, 0.282328, 0.570077, 0.968871, 0.955915, 0.834278, 0.890698, 0.901735, 0.519432, 0.0488613, 0.154037, 0.287447, 0.359883, 0.985389, 0.52664, 0.0570452, 0.851597, 0.894827, 0.0408393, 0.49287, 0.987677, 0.886312, 0.499923, 0.285968, 0.314708, 0.485444, 0.741816, 0.290477, 0.733859, 0.236378, 0.256612, 0.155221, 0.782483, 0.73168, 0.87311, 0.879609, 0.884024, 0.677188, 0.955055, 0.692199, 0.352102, 0.691087, 0.988245, 0.322867, 0.0380597, 0.77284, 0.203456, 0.730323, 0.761791, 0.400378, 0.874002, 0.547934, 0.953313, 0.219876, 0.100409, 0.768741, 0.635169, 0.82405, 0.0826316, 0.34113, 0.022735, 0.370105, 0.137198, 0.155422, 0.727368, 0.463257, 0.969237, 0.145959, 0.233517, 0.362324, 0.669382, 0.558766, 0.68986, 0.57231, 0.498823, 0.736345, 0.576362, 0.50987, 0.666443, 0.651084, 0.706354, 0.805967, 0.141184, 0.205241, 0.0468586, 0.717799, 0.30578, 0.692125, 0.916982, 0.513778, 0.818624, 0.0158147, 0.188299, 0.919569, 0.312238, 0.165987, 0.693178, 0.559865, 0.366556, 0.370438, 0.315617, 0.46419, 0.0519459, 0.31128, 0.528658, 0.0779606, 0.474219, 0.547363, 0.999653, 0.485361, 0.222646, 0.0112261, 0.16428, 0.648535, 0.653179, 0.193476, 0.98362, 0.466786, 0.841197, 0.0880118, 0.849756, 0.663088, 0.605092, 0.878049, 0.405664, 0.523132, 0.968847, 0.637803, 0.827179, 0.649504, 0.080405, 0.557131, 0.380966, 0.654977, 0.953605, 0.634304, 0.255329, 0.580189, 0.28379, 0.34013, 0.69615, 0.152203, 0.162791, 0.907203, 0.104816, 0.164774, 0.404359, 0.640923, 0.731013, 0.841513, 0.0663143, 0.424773, 0.379522, 0.524056, 0.182205, 0.0263843, 0.429137, 0.611556, 0.289765, 0.690918, 0.499398, 0.191333, 0.990329, 0.602522, 0.591796, 0.265113, 0.99066, 0.559795, 0.202085, 0.210071, 0.600537, 0.619323, 0.889541, 0.905362, 0.09818, 0.787055, 0.878502, 0.916668, 0.420763, 0.271097, 0.378082, 0.233722, 0.957123, 0.172673, 0.664395, 0.141372, 0.163182, 0.553433, 0.39348, 0.985466, 0.408348, 0.533142, 0.432394, 0.437719, 0.775577, 0.934864, 0.328533, 0.767522, 0.427572, 0.385896, 0.529203, 0.141147, 0.303762, 0.870935, 0.00144801, 0.524013, 0.0421703, 0.47047, 0.100445, 0.585934, 0.354223, 0.0834647, 0.913464, 0.581204, 0.627656, 0.440589, 0.99519, 0.726606, 0.230397, 0.451295, 0.591147, 0.315129, 0.212771, 0.986914, 0.597935, 0.639038, 0.171403, 0.884961, 0.85331, 0.0958421, 0.556571, 0.541104, 0.931672, 0.768871, 0.883653, 0.2884, 0.607495, 0.370771, 0.0263914, 0.221665, 0.579561, 0.0560537, 0.688331, 0.000530883, 0.763235, 0.113872, 0.670869, 0.382168, 0.951064, 0.230139, 0.0884889, 0.829555, 0.493269, 0.517877, 0.101586, 0.953319, 0.785498, 0.0463504, 0.375515, 0.105873, 0.838429, 0.639821, 0.277366, 0.193883, 0.450743, 0.84173, 0.188137, 0.65757, 0.570521, 0.561549, 0.982546, 0.268574, 0.0361614, 0.239571, 0.861502, 0.919403, 0.649889, 0.876869, 0.514662, 0.110274, 0.237857, 0.44978, 0.852857, 0.290724, 0.73828, 0.762216, 0.693903, 0.131173, 0.0502922, 0.017719, 0.141964, 0.37693, 0.758384, 0.453515, 0.961074, 0.906867, 0.597192, 0.0880735, 0.0932788, 0.0197004, 0.327367, 0.733527, 0.957174, 0.478613, 0.141557, 0.125371, 0.0670994, 0.609486, 0.0344699, 0.955827, 0.312528, 0.790934, 0.719149, 0.774618, 0.124207, 0.989422, 0.347883, 0.431302, 0.524219, 0.122455, 0.732618, 0.568935, 0.695886, 0.934012, 0.152918, 0.265866, 0.0329838, 0.995673, 0.26983, 0.800392, 0.555461, 0.340995, 0.489039, 0.277231, 0.781677, 0.10935, 0.187688, 0.26424, 0.761925, 0.89656, 0.697504, 0.363112, 0.793524, 0.125838, 0.694469, 0.768287, 0.184716, 0.957278, 0.685834, 0.0891029, 0.432854, 0.229721, 0.541634, 0.636061, 0.363804, 0.0615026, 0.852762, 0.234804, 0.124602, 0.463749, 0.583845, 0.21873, 0.358247, 0.554411, 0.519895, 0.952891, 0.492718, 0.675224, 0.732572, 0.598998, 0.40537, 0.429348, 0.797104, 0.212902, 0.257982, 0.309314, 0.895807, 0.207998, 0.0374725, 0.693981, 0.293132, 0.564519, 0.0386697, 0.286437, 0.890079, 0.555628, 0.952875, 0.7022, 0.710103, 0.295732, 0.947625, 0.655212, 0.454, 0.750466, 0.495312, 0.915, 0.177233, 0.792406, 0.325984, 0.698772, 0.995777, 0.462132, 0.530278, 0.451316, 0.266119, 0.429365, 0.576811, 0.599757, 0.38522, 0.0168795, 0.821437, 0.195268, 0.904495, 0.795734, 0.504767, 0.342887, 0.16251, 0.809289, 0.141759, 0.560768, 0.876467, 0.107043, 0.910884, 0.490546, 0.525436, 0.986358, 0.367851, 0.121117, 0.199685, 0.0494851, 0.204553, 0.33997, 0.26258, 0.443446, 0.535725, 0.392264, 0.0102544, 0.821591, 0.00237562, 0.335985, 0.988922, 0.140888, 0.752345, 0.0470223, 0.668686, 0.954664, 0.751732, 0.893064, 0.301303, 0.239239, 0.461059, 0.725767, 0.806086, 0.854317, 0.5304, 0.0725539, 0.784497, 0.547176, 0.13796, 0.468485, 0.560381, 0.604325, 0.979507, 0.416763, 0.477392, 0.978413, 0.0129352, 0.395437, 0.630093, 0.523866, 0.0329022, 0.7092, 0.917279, 0.721076, 0.587595, 0.768258, 0.805407, 0.965405, 0.435104, 0.308071, 0.358265, 0.944223, 0.611082, 0.704231, 0.963482, 0.0616783, 0.855464, 0.561312, 0.600447, 0.305126, 0.84011, 0.9094, 0.553137, 0.0964839, 0.174591, 0.934101, 0.531598, 0.424645, 0.217711, 0.296364, 0.0409469, 0.80714, 0.612858, 0.457655, 0.837201, 0.455085, 0.0633057, 0.088044, 0.656438, 0.668495, 0.219115, 0.405178, 0.92651, 0.195594, 0.196805, 0.486918, 0.746504, 0.528198, 0.881192, 0.148688, 0.0588538, 0.980801, 0.639967, 0.0963921, 0.940596, 0.567153, 0.916391, 0.132241, 0.75758, 0.833991, 0.255245, 0.835937, 0.91713, 0.900663, 0.54841, 0.788422, 0.207004, 0.0122745, 0.696936, 0.276365, 0.0921305, 0.372153, 0.289798, 0.354533, 0.66541, 0.719798, 0.892655, 0.691821, 0.158771, 0.391436, 0.961205, 0.229076, 0.517206, 0.70443, 0.955892, 0.673987, 0.345913, 0.947138, 0.332982, 0.470211, 0.0789133, 0.699266, 0.595584, 0.714101, 0.531917, 0.654689, 0.0830353, 0.929533, 0.77689, 0.395469, 0.738087, 0.614115, 0.383841, 0.0892102, 0.930917, 0.026924, 0.148354, 0.593354, 0.587583, 0.836964, 0.275544, 0.374362, 0.994229, 0.281204, 0.678332, 0.335057, 0.327262, 0.179402, 0.120084, 0.387661, 0.130125, 0.46928, 0.168189, 0.475752, 0.0491798, 0.192861, 0.660905, 0.327744, 0.220833, 0.33993, 0.730424, 0.0926077, 0.432199, 0.584865, 0.248809, 0.884808, 0.295352, 0.432771, 0.108191, 0.812276, 0.842286, 0.578389, 0.676405, 0.0438432, 0.667044, 0.350506, 0.642416, 0.127809, 0.361473, 0.386797, 0.638381, 0.834927, 0.894648, 0.047081, 0.945455, 0.768864, 0.625907, 0.411053, 0.366987, 0.736365, 0.770482, 0.721603, 0.105652, 0.356595, 0.455968, 0.324094, 0.64266, 0.557477, 0.70733, 0.856852, 0.437905, 0.968905, 0.709853, 0.636114, 0.0656424, 0.698599, 0.0163767, 0.0795992, 0.749982, 0.581594, 0.0975152, 0.9089, 0.45852, 0.408901, 0.651383, 0.0822779, 0.836105, 0.173849, 0.424383, 0.0234746, 0.921112, 0.040811, 0.0759405, 0.485741, 0.52121, 0.355386, 0.926052, 0.181826, 0.30377, 0.941976, 0.95971, 0.830878, 0.292631, 0.856022, 0.581403, 0.347075, 0.710355, 0.194496, 0.789651, 0.0972559, 0.658947, 0.49509, 0.530718, 0.535513, 0.800662, 0.106724, 0.0886827, 0.6503, 0.948656, 0.30601, 0.550014, 0.495767, 0.285114, 0.561632, 0.525229, 0.150295, 0.858642, 0.304058, 0.3444, 0.593324, 0.0674996, 0.974289, 0.751881, 0.867504, 0.496053, 0.0249461, 0.881343, 0.255988, 0.567881, 0.286744, 0.391873, 0.213207, 0.42003, 0.322785, 0.0545056, 0.812156, 0.521479, 0.0131528, 0.904396, 0.294556, 0.591013, 0.898646, 0.705362, 0.736616, 0.438744, 0.976117, 0.542808, 0.683477, 0.785221, 0.789859, 0.482724, 0.846109, 0.944255, 0.0918593, 0.509805, 0.327315, 0.930139, 0.0844249, 0.544293, 0.241036, 0.712018, 0.861089, 0.214794, 0.114647, 0.568504, 0.641023, 0.749465, 0.421019, 0.790446, 0.231891, 0.219589, 0.135377, 0.207071, 0.437452, 0.530619, 0.150124, 0.896802, 0.843331, 0.639401, 0.508767, 0.907281, 0.236526, 0.103391, 0.722656, 0.270883, 0.00526338, 0.753016, 0.881599, 0.282624, 0.967756, 0.211178, 0.19713, 0.186052, 0.450799, 0.883309, 0.715028, 0.491312, 0.270213, 0.784341, 0.876818, 0.462363, 0.645861, 0.30365, 0.726425, 0.0850515, 0.685352, 0.532666, 0.470819, 0.932496, 0.94819, 0.513388, 0.911458, 0.596679, 0.520154, 0.47787, 0.326133, 0.685435, 0.0874477, 0.285027, 0.136082, 0.810023, 0.60069, 0.818495, 0.28603, 0.406306, 0.0927686, 0.326538, 0.975186, 0.318257, 0.0757217, 0.275155, 0.17117, 0.740422, 0.573987, 0.748141, 0.443699, 0.935381, 0.601052, 0.0418846, 0.878326, 0.217061, 0.0981728, 0.58808, 0.948826, 0.547458, 0.0465522, 0.847922, 0.99061, 0.923358, 0.906015, 0.850834, 0.892246, 0.921648, 0.066566, 0.461191, 0.465599, 0.717356, 0.0347364, 0.631967, 0.734621, 0.579385, 0.890705, 0.540239, 0.492844, 0.807834, 0.921383, 0.809126, 0.315515, 0.646601, 0.611443, 0.594705, 0.853509, 0.348828, 0.430188, 0.957612, 0.196061, 0.523718, 0.469192, 0.0806763, 0.874672, 0.0198086, 0.485019, 0.918249, 0.918366, 0.850406, 0.53594, 0.862073, 0.821116, 0.249198, 0.934817, 0.629983, 0.40828, 0.153033, 0.0810286, 0.231719, 0.204541, 0.200261, 0.911439, 0.377936, 0.474523, 0.511993, 0.325127, 0.767908, 0.583156, 0.76049, 0.93169, 0.115439, 0.687285, 0.170706, 0.0405119, 0.579923, 0.865023, 0.220679, 0.917032, 0.382647, 0.702412, 0.41282, 0.274478, 0.866148, 0.183507, 0.57446, 0.375441, 0.45569, 0.208625, 0.14938, 0.0611816, 0.243342, 0.560659, 0.944514, 0.404453, 0.0324093, 0.332591, 0.898494, 0.171232, 0.369558, 0.795901, 0.689747, 0.261825, 0.720138, 0.579142, 0.959401, 0.537123, 0.910328, 0.496331, 0.632759, 0.924125, 0.411323, 0.332008, 0.8967, 0.832014, 0.776114, 0.711136, 0.673534, 0.0745876, 0.471501, 0.195611, 0.612842, 0.0282559, 0.823752, 0.976105, 0.932805, 0.760794, 0.0621428, 0.628563, 0.584414, 0.768426, 0.449936, 0.705981, 0.977829, 0.450984, 0.0627631, 0.464332, 0.0233331, 0.312305, 0.291767, 0.258894, 0.817472, 0.563718, 0.137744, 0.865692, 0.0516371, 0.755392, 0.0605751, 0.0490613, 0.626908, 0.654016, 0.926472, 0.913711, 0.793858, 0.605462, 0.668132, 0.150249, 0.938324, 0.892364, 0.361715, 0.354309, 0.275277, 0.292327, 0.263327, 0.246885, 0.053911, 0.344328, 0.769497, 0.117803, 0.401169, 0.446425, 0.617244, 0.808714, 0.5676, 0.923967, 0.41764, 0.510277, 0.287133, 0.134195, 0.766656, 0.0675931, 0.0816631, 0.318533, 0.685589, 0.591801, 0.251821, 0.168309, 0.260068, 0.839235, 0.915906, 0.850297, 0.66456, 0.685349, 0.266596, 0.785865, 0.316785, 0.294974, 0.611602, 0.94221, 0.765787, 0.734532, 0.454038, 0.522938, 0.96581, 0.349161, 0.194434, 0.234143, 0.794695, 0.762943, 0.0179404, 0.583041, 0.93006, 0.676737, 0.286451, 0.651952, 0.76715, 0.994021, 0.39291, 0.067219, 0.326723, 0.142595, 0.173758, 0.43416, 0.647532, 0.155302, 0.00369675, 0.258427, 0.969955, 0.268434, 0.714482, 0.420572, 0.0461504, 0.73216, 0.495917, 0.274553, 0.3161, 0.734696, 0.394434, 0.35067, 0.268173, 0.546141, 0.981829, 0.787362, 0.0957461, 0.251503, 0.839941, 0.165754, 0.122106, 0.407269, 0.465419, 0.855948, 0.538086, 0.0458484, 0.594903, 0.363308, 0.357769, 0.301892, 0.825676, 0.162865, 0.962774, 0.546735, 0.468579, 0.0618353, 0.642104, 0.979241, 0.670648, 0.69273, 0.253065, 0.16028, 0.700921, 0.95779, 0.415338, 0.901947, 0.29087, 0.714962, 0.519017, 0.984094, 0.951783, 0.900632, 0.981329, 0.894282, 0.529182, 0.378445, 0.56871, 0.0689003, 0.0399039, 0.529544, 0.877385, 0.77251, 0.377698, 0.597938, 0.460408, 0.784951, 0.740924, 0.00140497, 0.698268, 0.0734738, 0.889424, 0.0381724, 0.249256, 0.192611, 0.68401, 0.903465, 0.926387, 0.541491, 0.383938, 0.661396, 0.545428, 0.463807, 0.533043, 0.289835, 0.737829, 0.0166068, 0.47506, 0.0718027, 0.261786, 0.265741, 0.0532142, 0.353357, 0.306935, 0.984029, 0.214661, 0.675077, 0.325474, 0.0362308, 0.417308, 0.965288, 0.985633, 0.316755, 0.0538064, 0.864297, 0.233185, 0.974441, 0.822031, 0.76036, 0.580201, 0.101086, 0.292477, 0.455895, 0.21985, 0.66523, 0.545336, 0.555505, 0.748404, 0.362257, 0.0125992, 0.975642, 0.924374, 0.300842, 0.373651, 0.237157, 0.352575, 0.933896, 0.150759, 0.286007, 0.954847, 0.833998, 0.0443636, 0.889021, 0.644727, 0.172689, 0.73169, 0.710619, 0.558868, 0.483938, 0.280546, 0.70036, 0.987537, 0.338002, 0.749245, 0.765183, 0.601736, 0.463893, 0.435195, 0.268802, 0.765699, 0.235015, 0.610494, 0.115031, 0.701436, 0.804844, 0.996034, 0.977349, 0.982549, 0.56652, 0.974234, 0.0909494, 0.602784, 0.709772, 0.178061, 0.0478865, 0.372929, 0.905772, 0.833844, 0.773746, 0.589488, 0.592565, 0.713169, 0.154453, 0.519199, 0.516459, 0.814036, 0.828599, 0.237314, 0.346486, 0.619685, 0.677282, 0.832089, 0.160742, 0.58959, 0.367144, 0.877192, 0.401572, 0.068229, 0.0901807, 0.62183, 0.447372, 0.100731, 0.0367002, 0.830608, 0.785898, 0.326059, 0.776924, 0.367243, 0.523875, 0.652359, 0.823244, 0.345527, 0.850052, 0.177941, 0.570479, 0.979535, 0.315231, 0.343327, 0.820243, 0.643288, 0.841785, 0.272791, 0.598479, 0.488962, 0.896656, 0.716403, 0.888622, 0.869405, 0.520231, 0.205996, 0.445072, 0.190542, 0.5427, 0.525401, 0.290195, 0.675408, 0.491442, 0.477095, 0.212288, 0.119685, 0.307718, 0.298429, 0.0103334, 0.40656, 0.203992, 0.737477, 0.140582, 0.0138539, 0.696118, 0.270023, 0.710319, 0.624232, 0.956015, 0.73854, 0.214321, 0.899826, 0.155828, 0.0456123, 0.193526, 0.16787, 0.951217, 0.714801, 0.563744, 0.316542, 0.460755, 0.36157, 0.536868, 0.123611, 0.611604, 0.0805006, 0.593761, 0.463433, 0.578698, 0.981508, 0.285077, 0.568689, 0.122225, 0.810219, 0.421639, 0.529817, 0.814263, 0.721685, 0.26282, 0.983978, 0.852334, 0.834045, 0.31579, 0.590571, 0.776926, 0.717894, 0.325214, 0.914004, 0.571561, 0.962669, 0.872205, 0.715484, 0.141387, 0.898256, 0.245442, 0.00690491, 0.264613, 0.563598, 0.84102, 0.508624, 0.181626, 0.173448, 0.735778, 0.305023, 0.415642, 0.248116, 0.114118, 0.511834, 0.69202, 0.414072, 0.973577, 0.376243, 0.699143, 0.656045, 0.221311, 0.112149, 0.327161, 0.441988, 0.628914, 0.463092, 0.721788, 0.614185, 0.177694, 0.742627, 0.431882, 0.0890311, 0.507928, 0.100382, 0.296299, 0.636947, 0.291389, 0.364851, 0.668439, 0.879033, 0.344257, 0.609353, 0.415145, 0.215987, 0.753769, 0.769825, 0.427056, 0.880189, 0.77748, 0.318549, 0.566751, 0.681715, 0.703175, 0.394852, 0.189239, 0.221894, 0.52115, 0.0926394, 0.420422, 0.308635, 0.716731, 0.434997, 0.751326, 0.137572, 0.945996, 0.203424, 0.0646801, 0.636693, 0.239542, 0.240282, 0.418196, 0.0572711, 0.0709934, 0.349739, 0.295569, 0.345298, 0.756442, 0.356749, 0.475677, 0.212771, 0.803339, 0.657724, 0.0995816, 0.863861, 0.34778, 0.419936, 0.361357, 0.238835, 0.620249, 0.316184, 0.182584, 0.494993, 0.631117, 0.393286, 0.833995, 0.98722, 0.835306, 0.0729873, 0.149028, 0.322858, 0.219534, 0.443027, 0.894205, 0.999529, 0.914801, 0.63717, 0.478458, 0.799458, 0.992817, 0.843361, 0.868118, 0.0456673, 0.761873, 0.903886, 0.662665, 0.755355, 0.847703, 0.973704, 0.186082, 0.974756, 0.303198, 0.952711, 0.45242, 0.159349, 0.780025, 0.355679, 0.574793, 0.0102476, 0.752373, 0.246213, 0.00751543, 0.243853, 0.828987, 0.927389, 0.000967662, 0.215161, 0.906265, 0.257344, 0.117601, 0.30718, 0.169319, 0.404844, 0.360279, 0.683556, 0.57955, 0.385579, 0.274966, 0.0336187, 0.266425, 0.751518, 0.21888, 0.177437, 0.98156, 0.0279228, 0.235836, 0.994914, 0.937093, 0.0218233, 0.283259, 0.941252, 0.365088, 0.43569, 0.426335, 0.150931, 0.0249422, 0.659588, 0.469616, 0.841131, 0.321451, 0.0960266, 0.111795, 0.0411948, 0.666625, 0.159542, 0.102567, 0.0382657, 0.802677, 0.813833, 0.775476, 0.982349, 0.420094, 0.549191, 0.343159, 0.0776193, 0.956408, 0.254944, 0.567026, 0.0243003, 0.0329019, 0.210384, 0.27898, 0.727169, 0.592041, 0.314954, 0.839651, 0.707783, 0.559892, 0.784291, 0.977474, 0.486061, 0.453918, 0.5327, 0.646156, 0.606962, 0.426937, 0.444103, 0.260286, 0.337848, 0.430102, 0.598767, 0.784523, 0.242006, 0.472945, 0.0433889, 0.06967, 0.236836, 0.548881, 0.883244, 0.451034, 0.86139, 0.620741, 0.406395, 0.442029, 0.854033, 0.0424505, 0.127542, 0.0737809, 0.739017, 0.749057, 0.767756, 0.624435, 0.0685933, 0.0363357, 0.785819, 0.94934, 0.618505, 0.167897, 0.610146, 0.906818, 0.464915, 0.94703, 0.215139, 0.196322, 0.884601, 0.184787, 0.0147347, 0.695557, 0.882105, 0.938693, 0.69243, 0.277382, 0.470088, 0.0651881, 0.0809175, 0.0210149, 0.27701, 0.208657, 0.712748, 0.835842, 0.539177, 0.794017, 0.979136, 0.463491, 0.46603, 0.272159, 0.0587519, 0.343745, 0.0594342, 0.11452, 0.13373, 0.249941, 0.866703, 0.718719, 0.634638, 0.238383, 0.422729, 0.508818, 0.0343279, 0.211444, 0.726286, 0.364323, 0.673365, 0.90412, 0.143546, 0.0870725, 0.518883, 0.238802, 0.994439, 0.724867, 0.0577984, 0.0638713, 0.918263, 0.00591694, 0.709488, 0.199183, 0.582836, 0.0810226, 0.776759, 0.747521, 0.553668, 0.301735, 0.109321, 0.00419767, 0.930506, 0.661101, 0.577049, 0.272885, 0.114674, 0.299321, 0.101196, 0.884958, 0.523925, 0.330126, 0.303359, 0.404469, 0.0285216, 0.742822, 0.36941, 0.711034, 0.605484, 0.201626, 0.582907, 0.789505, 0.729289, 0.953198, 0.961727, 0.298915, 0.0811433, 0.092407, 0.712373, 0.100408, 0.859939, 0.231836, 0.369344, 0.27811, 0.86206, 0.0686547, 0.871463, 0.00279706, 0.402605, 0.88835, 0.109759, 0.84698, 0.287661, 0.446631, 0.360749, 0.637728, 0.339026, 0.0748655, 0.295142, 0.311845, 0.081733, 0.670569, 0.23095, 0.00812294, 0.00117778, 0.148767, 0.445743, 0.845989, 0.990733, 0.741713, 0.322031, 0.287333, 0.879225, 0.631874, 0.37337, 0.512687, 0.126233, 0.677371, 0.0669676, 0.994743, 0.46719, 0.278884, 0.769474, 0.116861, 0.489663, 0.148669, 0.259306, 0.00307888, 0.319387, 0.329526, 0.903629, 0.00739835, 0.660976, 0.457794, 0.720988, 0.933501, 0.70156, 0.825779, 0.0104466, 0.723503, 0.994294, 0.80497, 0.0837229, 0.0371602, 0.518077, 0.341169, 0.898467, 0.4119, 0.974203, 0.528178, 0.275806, 0.558864, 0.590083, 0.36069, 0.5985, 0.550322, 0.00322284, 0.964272, 0.692101, 0.295236, 0.598529, 0.416191, 0.880071, 0.320906, 0.928028, 0.735064, 0.268047, 0.388567, 0.736775, 0.883189, 0.121597, 0.650398, 0.614589, 0.606027, 0.298794, 0.274833, 0.401772, 0.645119, 0.343854, 0.61125, 0.40705, 0.28375, 0.18865, 0.0334682, 0.715168, 0.883912, 0.902918, 0.349881, 0.950771, 0.915172, 0.604889, 0.191305, 0.207965, 0.674039, 0.472276, 0.792574, 0.0424723, 0.845108, 0.71869, 0.986904, 0.484774, 0.0673107, 0.53096, 0.40897, 0.428653, 0.690819, 0.417753, 0.38642, 0.650099, 0.472372, 0.0180191, 0.322664, 0.73082, 0.632719, 0.460792, 0.794263, 0.0748028, 0.661996, 0.976789, 0.544277, 0.121436, 0.578298, 0.620466, 0.748716, 0.291072, 0.911739, 0.0700601, 0.63404, 0.106162, 0.0312323, 0.891399, 0.572174, 0.443902, 0.462337, 0.945991, 0.39687, 0.375296, 0.325718, 0.795836, 0.19911, 0.585768, 0.390344, 0.381199, 0.364747, 0.0326072, 0.470277, 0.306731, 0.250549, 0.931713, 0.963179, 0.470011, 0.339578, 0.835582, 0.63943, 0.0303026, 0.277513, 0.474861, 0.89772, 0.298515, 0.990435, 0.304965, 0.422295, 0.39287, 0.161606, 0.789675, 0.123178, 0.941126, 0.277487, 0.507002, 0.680308, 0.857293, 0.44714, 0.824616, 0.017484, 0.477551, 0.058476, 0.984209, 0.508838, 0.247145, 0.37202, 0.552131, 0.769, 0.20338, 0.661238, 0.99676, 0.348304, 0.297804, 0.894703, 0.508963, 0.0230383, 0.0835866, 0.0690568, 0.295055, 0.149184, 0.17607, 0.547003, 0.0141352, 0.618915, 0.654163, 0.356946, 0.0135127, 0.814264, 0.486929, 0.753879, 0.356074, 0.969044, 0.0302966, 0.959771, 0.998317, 0.632366, 0.00700747, 0.839543, 0.571449, 0.100513, 0.702013, 0.728682, 0.180028, 0.69867, 0.74286, 0.153003, 0.953248, 0.451781, 0.170989, 0.471099, 0.90267, 0.0117114, 0.721415, 0.745257, 0.677499, 0.849621, 0.117456, 0.486243, 0.297787, 0.333589, 0.538246, 0.659511, 0.983609, 0.0277514, 0.246375, 0.0891166, 0.325694, 0.993423, 0.794188, 0.347804, 0.167151, 0.719761, 0.917062, 0.617931, 0.234647, 0.181319, 0.416131, 0.537011, 0.917685, 0.579161, 0.451282, 0.35259, 0.37269, 0.217509, 0.0594936, 0.100888, 0.890128, 0.471953, 0.36669, 0.032169, 0.480751, 0.374217, 0.627174, 0.163406, 0.584003, 0.595819, 0.231716, 0.99713, 0.658608, 0.877558, 0.982247, 0.93675, 0.402589, 0.323494, 0.736381, 0.969406, 0.424023, 0.19835, 0.723081, 0.46446, 0.315328, 0.482378, 0.040066, 0.0948702, 0.494517, 0.401831, 0.864262, 0.622606, 0.45558, 0.592151, 0.221398, 0.234553, 0.419324, 0.0279188, 0.521746, 0.298299, 0.810423, 0.197255, 0.678615, 0.707791, 0.120415, 0.0367126, 0.401109, 0.708377, 0.316304, 0.652421, 0.0174278, 0.724053, 0.47594, 0.891617, 0.878212, 0.839345, 0.377355, 0.594664, 0.440637, 0.25196, 0.22677, 0.429425, 0.4201, 0.361566, 0.243301, 0.946008, 0.093391, 0.895967, 0.110418, 0.120055, 0.575129, 0.753429, 0.304101, 0.19964, 0.570396, 0.983574, 0.988702, 0.883311, 0.80188, 0.233527, 0.634443, 0.261782, 0.979174, 0.134869, 0.765734, 0.532329, 0.0429464, 0.408526, 0.924187, 0.350389, 0.73086, 0.462601, 0.714236, 0.992274, 0.439305, 0.806862, 0.290555, 0.29422, 0.973654, 0.0324598, 0.550384, 0.852523, 0.494517, 0.358827, 0.659189, 0.0456365, 0.2858, 0.109651, 0.915952, 0.0814468, 0.0690716, 0.574275, 0.993968, 0.853809, 0.345226, 0.187669, 0.328723, 0.567144, 0.448212, 0.817997, 0.105359, 0.584556, 0.190573, 0.0355504, 0.990292, 0.619409, 0.183897, 0.826743, 0.885364, 0.972115, 0.949095, 0.0520858, 0.381193, 0.762117, 0.64437, 0.105142, 0.679353, 0.717334, 0.912706, 0.0658206, 0.693812, 0.0166813, 0.51631, 0.520799, 0.0187077, 0.338061, 0.958623, 0.747098, 0.70519, 0.417181, 0.427818, 0.415098, 0.714567, 0.985927, 0.0210712, 0.461272, 0.354264, 0.265013, 0.519558, 0.116331, 0.535868, 0.352515, 0.151883, 0.197769, 0.0134653, 0.191709, 0.991597, 0.487075, 0.116496, 0.653735, 0.244293, 0.504451, 0.624997, 0.575076, 0.547174, 0.0165865, 0.968418, 0.531341, 0.535861, 0.680424, 0.532573, 0.950493, 0.602586, 0.155387, 0.350366, 0.960529, 0.868087, 0.914379, 0.738191, 0.265445, 0.766265, 0.911954, 0.424649, 0.354205, 0.666029, 0.912105, 0.886417, 0.231325, 0.0355263, 0.950339, 0.507061, 0.271066, 0.831718, 0.27213, 0.346277, 0.823456, 0.792705, 0.997281, 0.497942, 0.478723, 0.0544625, 0.705731, 0.943926, 0.0637384, 0.24178, 0.220038, 0.73526, 0.349917, 0.683923, 0.259271, 0.309605, 0.179657, 0.982663, 0.979263, 0.915554, 0.486691, 0.554517, 0.669794, 0.253162, 0.093543, 0.669497, 0.609819, 0.98844, 0.138513, 0.309594, 0.876058, 0.66811, 0.638072, 0.212735, 0.340255, 0.690781, 0.583122, 0.159234, 0.589236, 0.693139, 0.349297, 0.587949, 0.887529, 0.728229, 0.136892, 0.262204, 0.662484, 0.28472, 0.622015, 0.207545, 0.510029, 0.60721, 0.038113, 0.31417, 0.615402, 0.448743, 0.154446, 0.872305, 0.143924, 0.251979, 0.880669, 0.598887, 0.571896, 0.515851, 0.952398, 0.124642, 0.168786, 0.441385, 0.598686, 0.723883, 0.579736, 0.527448, 0.523717, 0.838053, 0.149124, 0.673669, 0.14229, 0.386783, 0.889584, 0.598184, 0.513162, 0.594427, 0.721933, 0.643449, 0.695772, 0.899081, 0.664523, 0.139265, 0.945223, 0.219066, 0.405808, 0.263777, 0.412627, 0.0857935, 0.889626, 0.926571, 0.87758, 0.198609, 0.856981, 0.869558, 0.305647, 0.812331, 0.692538, 0.451703, 0.951932, 0.495356, 0.864734, 0.902125, 0.569037, 0.520535, 0.216667, 0.255297, 0.591851, 0.543922, 0.611041, 0.786669, 0.088188, 0.985764, 0.842065, 0.58106, 0.620247, 0.605754, 0.975696, 0.954957, 0.608862, 0.655566, 0.674444, 0.698734, 0.156191, 0.145641, 0.672113, 0.18538, 0.0873615, 0.443558, 0.299587, 0.819442, 0.150055, 0.880069, 0.172834, 0.653473, 0.391496, 0.118508, 0.565083, 0.935051, 0.523522, 0.55255, 0.576251, 0.911273, 0.320996, 0.419382, 0.108884, 0.629189, 0.90214, 0.379324, 0.993774, 0.656224, 0.710223, 0.953051, 0.514451, 0.682708, 0.652504, 0.77341, 0.645461, 0.0391939, 0.471361, 0.760638, 0.854566, 0.697731, 0.0321098, 0.452528, 0.512962, 0.481702, 0.59688, 0.167095, 0.706933, 0.655891, 0.0525858, 0.222633, 0.113384, 0.558338, 0.0295255, 0.653678, 0.599873, 0.880331, 0.827267, 0.664861, 0.150684, 0.283206, 0.753851, 0.836932, 0.439752, 0.213542, 0.679184, 0.288703, 0.290065, 0.45681, 0.0810904, 0.998415, 0.900188, 0.0149383, 0.886106, 0.984188, 0.733709, 0.844325, 0.680569, 0.908838, 0.372753, 0.774053, 0.911128, 0.747994, 0.574793, 0.782937, 0.0510561, 0.506438, 0.464558, 0.588053, 0.439998, 0.2672, 0.513888, 0.269463, 0.702044, 0.317576, 0.216274, 0.031019, 0.558371, 0.708373, 0.0640268, 0.315883, 0.475111, 0.649129, 0.563481, 0.526673, 0.0235617, 0.860234, 0.244788, 0.736804, 0.512987, 0.539436, 0.0233963, 0.602965, 0.635571, 0.133214, 0.901046, 0.557304, 0.950225, 0.212929, 0.0560788, 0.565201, 0.433351, 0.870745, 0.437179, 0.958209, 0.215859, 0.509427, 0.320626, 0.619849, 0.491132, 0.0804033, 0.668352, 0.47902, 0.139108, 0.232216, 0.128463, 0.13565, 0.950811, 0.346151, 0.447842, 0.878921, 0.132984, 0.506792, 0.777438, 0.0717582, 0.713886, 0.201366, 0.0557625, 0.578446, 0.896453, 0.987899, 0.719708, 0.615456, 0.0890016, 0.738222, 0.118148, 0.0637626, 0.708409, 0.208099, 0.210579, 0.513177, 0.316742, 0.798657, 0.250799, 0.862258, 0.08806, 0.128748, 0.918765, 0.534242, 0.67558, 0.799917, 0.785529, 0.823307, 0.620837, 0.908448, 0.791941, 0.280674, 0.0792215, 0.61188, 0.939193, 0.874396, 0.248804, 0.165018, 0.935119, 0.252039, 0.433913, 0.230363, 0.645287, 0.0865046, 0.630551, 0.329475, 0.585583, 0.316526, 0.041871, 0.402855, 0.432552, 0.59056, 0.727475, 0.204989, 0.17798, 0.300884, 0.476527, 0.694128, 0.581187, 0.343164, 0.545756, 0.447758, 0.359712, 0.376672, 0.343796, 0.914163, 0.477172, 0.193058, 0.0193624, 0.429995, 0.548162, 0.118708, 0.0308346, 0.556546, 0.259306, 0.49907, 0.0918193, 0.600778, 0.301899, 0.684721, 0.347618, 0.0539441, 0.645642, 0.256792, 0.739836, 0.720385, 0.993516, 0.018852, 0.611061, 0.591492, 0.331202, 0.494316, 0.686017, 0.583879, 0.334357, 0.827355, 0.75122, 0.938576, 0.0462259, 0.480235, 0.941855, 0.703965, 0.825785, 0.511577, 0.547982, 0.65266, 0.195514, 0.762747, 0.754568, 0.672253, 0.67758, 0.918976, 0.0389931, 0.329636, 0.041438, 0.475509, 0.947122, 0.0900319, 0.721723, 0.331855, 0.474696, 0.0360539, 0.752195, 0.838539, 0.12994, 0.846914, 0.654886, 0.59994, 0.16267, 0.205384, 0.168954, 0.984104, 0.90327, 0.863875, 0.242552, 0.895462, 0.305136, 0.288444, 0.610684, 0.755873, 0.63405, 0.33295, 0.364817, 0.758494, 0.595479, 0.0847344, 0.179335, 0.980152, 0.815415, 0.109462, 0.483747, 0.429614, 0.216308, 0.795251, 0.632138, 0.0943184, 0.416516, 0.0922638, 0.142128, 0.786848, 0.942409, 0.81263, 0.720261, 0.152464, 0.356029, 0.854222, 0.931994, 0.1807, 0.916695, 0.199493, 0.0979094, 0.544592, 0.933433, 0.0671128, 0.365203, 0.658035, 0.431006, 0.146817, 0.593439, 0.706674, 0.995432, 0.785801, 0.269154, 0.468617, 0.359843, 0.690938, 0.443121, 0.0479989, 0.801794, 0.61063, 0.266135, 0.163648, 0.308716, 0.533489, 0.8515, 0.836461, 0.159344, 0.691289, 0.957633, 0.029623, 0.156255, 0.974458, 0.296656, 0.292426, 0.342406, 0.409721, 0.618199, 0.772889, 0.264086, 0.277807, 0.100902, 0.16407, 0.245145, 0.739411, 0.261475, 0.0857188, 0.742533, 0.88102, 0.770895, 0.354722, 0.187596, 0.562033, 0.71665, 0.869772, 0.154177, 0.810222, 0.0378963, 0.84708, 0.513031, 0.355424, 0.26396, 0.0493565, 0.0843991, 0.226172, 0.773675, 0.175576, 0.687945, 0.552908, 0.947004, 0.0715233, 0.034247, 0.112635, 0.793135, 0.402288, 0.543981, 0.00907678, 0.508435, 0.922146, 0.306007, 0.74684, 0.241617, 0.539046, 0.83593, 0.125505, 0.989815, 0.253745, 0.100077, 0.246528, 0.188176, 0.0689867, 0.0343654, 0.130735, 0.648559, 0.259024, 0.778773, 0.89595, 0.108323, 0.0792606, 0.947738, 0.92438, 0.285765, 0.0786912, 0.314013, 0.196006, 0.444298, 0.0252209, 0.505793, 0.160503, 0.179412, 0.532002, 0.294424, 0.272184, 0.766327, 0.395353, 0.816067, 0.196421, 0.497539, 0.878986, 0.0961405, 0.820007, 0.249975, 0.195096, 0.17911, 0.534018, 0.202452, 0.833013, 0.407809, 0.409207, 0.0293441, 0.159373, 0.97974, 0.797579, 0.585338, 0.205244, 0.939742, 0.188395, 0.856593, 0.987226, 0.741505, 0.188762, 0.399772, 0.527054, 0.168712, 0.648863, 0.76007, 0.888695, 0.474122, 0.54378, 0.850162, 0.00695785, 0.70291, 0.749108, 0.276164, 0.47393, 0.83693, 0.141871, 0.516607, 0.670846, 0.665674, 0.541732, 0.78388, 0.708681, 0.682724, 0.259671, 0.869733, 0.758025, 0.0200481, 0.794448, 0.681449, 0.925119, 0.734508, 0.950064, 0.574124, 0.019148, 0.249436, 0.619557, 0.500477, 0.0232975, 0.734679, 0.660496, 0.22638, 0.378174, 0.723271, 0.0617378, 0.517085, 0.324872, 0.625275, 0.17625, 0.745463, 0.0504503, 0.470506, 0.781117, 0.213397, 0.817098, 0.25337, 0.896663, 0.160532, 0.658547, 0.46961, 0.914045, 0.471251, 0.93375, 0.343209, 0.302037, 0.307772, 0.604093, 0.745497, 0.562435, 0.0445669, 0.660057, 0.307442, 0.596101, 0.38788, 0.5832, 0.477978, 0.578257, 0.66501, 0.494558, 0.931155, 0.338653, 0.63374, 0.355696, 0.159734, 0.312081, 0.707564, 0.89523, 0.485531, 0.0931612, 0.76101, 0.451376, 0.837182, 0.593376, 0.289006, 0.751113, 0.283227, 0.747966, 0.855681, 0.689777, 0.735593, 0.926367, 0.71427, 0.874113, 0.901599, 0.00239164, 0.0607418, 0.256944, 0.683404, 0.468545, 0.115099, 0.194632, 0.892871, 0.433851, 0.996485, 0.622798, 0.595824, 0.702424, 0.51244, 0.342794, 0.122775, 0.617851, 0.0492106, 0.60586, 0.690809, 0.358781, 0.849572, 0.408017, 0.846986, 0.41019, 0.0670375, 0.106252, 0.758986, 0.510894, 0.73255, 0.0907279, 0.9959, 0.392184, 0.779877, 0.736487, 0.74064, 0.105182, 0.708333, 0.588861, 0.899197, 0.547862, 0.57261, 0.997025, 0.769819, 0.00366789, 0.815132, 0.431208, 0.778708, 0.359116, 0.121065, 0.215232, 0.338811, 0.964539, 0.422779, 0.621301, 0.30488, 0.11085, 0.532405, 0.416646, 0.914285, 0.653672, 0.401255, 0.949251, 0.974299, 0.146397, 0.0662309, 0.476871, 0.0422005, 0.800192, 0.099558, 0.975073, 0.413739, 0.291759, 0.881841, 0.714534, 0.286928, 0.468234, 0.550947, 0.40592, 0.72912, 0.25504, 0.501294, 0.93432, 0.0385952, 0.348364, 0.718752, 0.692493, 0.104062, 0.606665, 0.407537, 0.737115, 0.371631, 0.611629, 0.37884, 0.0858408, 0.934316, 0.908099, 0.565021, 0.0567954, 0.775883, 0.197617, 0.234176, 0.969888, 0.201691, 0.594068, 0.429932, 0.081072, 0.611016, 0.593793, 0.451607, 0.794435, 0.721078, 0.420073, 0.859341, 0.717363, 0.747458, 0.406955, 0.0364475, 0.663231, 0.291466, 0.470375, 0.0047093, 0.532695, 0.982304, 0.0546338, 0.43695, 0.549221, 0.0228999, 0.313636, 0.724177, 0.569925, 0.721449, 0.952554, 0.309281, 0.593856, 0.506905, 0.48428, 0.478447, 0.0746817, 0.252027, 0.946105, 0.553573, 0.615992, 0.478797, 0.613452, 0.678677, 0.551615, 0.798669, 0.355127, 0.186931, 0.139173, 0.242292, 0.441654, 0.330708, 0.962091, 0.267849, 0.915209, 0.788517, 0.692206, 0.847091, 0.384341, 0.228045, 0.815585, 0.975597, 0.491128, 0.959654, 0.319, 0.517584, 0.857105, 0.670844, 0.900283, 0.667352, 0.988426, 0.451664, 0.805026, 0.283802, 0.73142, 0.0960275, 0.071514, 0.0449618, 0.898698, 0.699349, 0.650716, 0.737359, 0.697591, 0.239759, 0.242478, 0.773937, 0.598934, 0.562279, 0.990877, 0.542295, 0.706523, 0.0400013, 0.751255, 0.914907, 0.704425, 0.791048, 0.132692, 0.388448, 0.209881, 0.80192, 0.692122, 0.857476, 0.271173, 0.777848, 0.443368, 0.373062, 0.153535, 0.249409, 0.092942, 0.00638641, 0.777268, 0.7686, 0.478498, 0.887034, 0.378683, 0.894564, 0.367347, 0.0834392, 0.497305, 0.95591, 0.814606, 0.474987, 0.000995387, 0.350213, 0.689531, 0.0988024, 0.968241, 0.860767, 0.394065, 0.599377, 0.227755, 0.146452, 0.582612, 0.516709, 0.520141, 0.634226, 0.108507, 0.498854, 0.424619, 0.780326, 0.80567, 0.629687, 0.127723, 0.0336893, 0.631424, 0.666024, 0.866805, 0.287554, 0.523238, 0.183463, 0.417182, 0.400929, 0.886631, 0.160494, 0.128828, 0.0407184, 0.447326, 0.17435, 0.0237744, 0.987539, 0.722343, 0.676694, 0.0529974, 0.629661, 0.379117, 0.626704, 0.462754, 0.325253, 0.154611, 0.601948, 0.959109, 0.544204, 0.363885, 0.105403, 0.219433, 0.496932, 0.315283, 0.41122, 0.196829, 0.451421, 0.893969, 0.510993, 0.0063349, 0.951801, 0.129267, 0.807773, 0.847764, 0.76398, 0.233071, 0.468183, 0.0976127, 0.0472951, 0.298605, 0.469274, 0.134112, 0.67559, 0.12472, 0.327388, 0.928126, 0.382105, 0.311168, 0.625371, 0.0629552, 0.438605, 0.779704, 0.793958, 0.309734, 0.179684, 0.258045, 0.602858, 0.189726, 0.485729, 0.675793, 0.269612, 0.223249, 0.244902, 0.318763, 0.0615377, 0.390682, 0.477955, 0.0818311, 0.985299, 0.433035, 0.845484, 0.439238, 0.585415, 0.28516, 0.399402, 0.524232, 0.786048, 0.0700261, 0.8799, 0.899489, 0.903616, 0.271186, 0.0209665, 0.88615, 0.585638, 0.601248, 0.630287, 0.736329, 0.0780644, 0.418635, 0.242044, 0.823358, 0.963733, 0.970264, 0.0206052, 0.36637, 0.32973, 0.0733889, 0.0924538, 0.0537379, 0.089988, 0.293227, 0.167674, 0.170407, 0.369805, 0.162376, 0.0768395, 0.401258, 0.0205944, 0.214655, 0.565842, 0.0722265, 0.527809, 0.891475, 0.742952, 0.914229, 0.383209, 0.56968, 0.741784, 0.125203, 0.168684, 0.0958699, 0.359097, 0.299145, 0.419792, 0.0619757, 0.222475, 0.815539, 0.0333095, 0.272609, 0.851514, 0.283708, 0.525649, 0.148378, 0.561638, 0.817438, 0.145619, 0.262795, 0.692642, 0.231738, 0.444971, 0.428266, 0.842754, 0.0453067, 0.412802, 0.259347, 0.574169, 0.535283, 0.922358, 0.446502, 0.834965, 0.321975, 0.502065, 0.723217, 0.494265, 0.155392, 0.126007, 0.296005, 0.679054, 0.806038, 0.535309, 0.0206131, 0.792164, 0.77221, 0.680747, 0.149021, 0.466388, 0.794513, 0.370934, 0.632748, 0.253375, 0.966416, 0.0559242, 0.257231, 0.235184, 0.00847955, 0.405919, 0.663183, 0.947921, 0.560889, 0.524148, 0.736809, 0.108796, 0.435435, 0.706312, 0.0904993, 0.826288, 0.796573, 0.846489, 0.989677, 0.759413, 0.535015, 0.60565, 0.51432, 0.68337, 0.695341, 0.901442, 0.662793, 0.487029, 0.350308, 0.831463, 0.821471, 0.857085, 0.783135, 0.781994, 0.26521, 0.76974, 0.948408, 0.25824, 0.175769, 0.413641, 0.120984, 0.41467, 0.154133, 0.447632, 0.262196, 0.233872, 0.027248, 0.456559, 0.564264, 0.195967, 0.87957, 0.712771, 0.570181, 0.683408, 0.344589, 0.608551, 0.928497, 0.978937, 0.665815, 0.191938, 0.913725, 0.229831, 0.276609, 0.996722, 0.0614733, 0.756541, 0.150352, 0.81332, 0.364726, 0.698872, 0.0488913, 0.488767, 0.658977, 0.177127, 0.449505, 0.889814, 0.945673, 0.479735, 0.669513, 0.212768, 0.377593, 0.81207, 0.118393, 0.147627, 0.102521, 0.878881, 0.412474, 0.603358, 0.526986, 0.386922, 0.209253, 0.987137, 0.718463, 0.717605, 0.806746, 0.380589, 0.0898503, 0.340875, 0.0599232, 0.816005, 0.284385, 0.268579, 0.15688, 0.660675, 0.249349, 0.735313, 0.979476, 0.696837, 0.984901, 0.307522, 0.708236, 0.0849013, 0.297076, 0.359044, 0.0566235, 0.732831, 0.24032, 0.440776, 0.903334, 0.349843, 0.551364, 0.198349, 0.965711, 0.74446, 0.65457, 0.858117, 0.72294, 0.273375, 0.246046, 0.489922, 0.68976, 0.369694, 0.72915, 0.65725, 0.30668, 0.396834, 0.269671, 0.897012, 0.969904, 0.322165, 0.28877, 0.403919, 0.745858, 0.0508711, 0.275434, 0.968689, 0.508743, 0.895674, 0.415087, 0.888818, 0.935279, 0.393171, 0.691673, 0.634202, 0.406149, 0.437993, 0.444085, 0.382038, 0.36729, 0.37336, 0.41162, 0.0275015, 0.338587, 0.524497, 0.0507328, 0.647831, 0.627665, 0.76997, 0.949813, 0.688768, 0.87357, 0.386311, 0.609754, 0.474682, 0.776409, 0.7274, 0.429997, 0.326422, 0.991154, 0.563406, 0.797949, 0.806496, 0.472436, 0.861442, 0.241235, 0.115021, 0.841425, 0.067718, 0.855504, 0.869459, 0.818138, 0.47471, 0.305221, 0.985908, 0.741069, 0.0359858, 0.548461, 0.851616, 0.00124529, 0.831263, 0.524583, 0.700077, 0.486439, 0.690473, 0.139002, 0.25776, 0.724213, 0.784629, 0.361253, 0.559542, 0.51449, 0.149615, 0.258914, 0.275199, 0.385751, 0.220086, 0.0419866, 0.608323, 0.733945, 0.795928, 0.12021, 0.151217, 0.0478011, 0.861415, 0.555441, 0.0348796, 0.0723636, 0.827777, 0.332871, 0.887277, 0.877651, 0.0785353, 0.197532, 0.322672, 0.000916138, 0.940004, 0.753226, 0.804484, 0.975536, 0.192233, 0.384034, 0.881944, 0.292808, 0.368571, 0.18698, 0.992044, 0.354451, 0.527305, 0.348339, 0.773856, 0.23059, 0.0443612, 0.558384, 0.000874037, 0.274346, 0.280431, 0.764785, 0.380712, 0.254584, 0.902307, 0.97328, 0.573603, 0.95614, 0.360903, 0.934183, 0.339851, 0.789651, 0.882639, 0.0225178, 0.462162, 0.960021, 0.932408, 0.041856, 0.999388, 0.857339, 0.537505, 0.44124, 0.101818, 0.438442, 0.570012, 0.847642, 0.425616, 0.606355, 0.494681, 0.907646, 0.858777, 0.40926, 0.926747, 0.809049, 0.338416, 0.239173, 0.467034, 0.209364, 0.232802, 0.973714, 0.760907, 0.496914, 0.319526, 0.22954, 0.338775, 0.593522, 0.908391, 0.00225433, 0.613819, 0.263566, 0.90629, 0.0143624, 0.762406, 0.515316, 0.706617, 0.342984, 0.52917, 0.400909, 0.801304, 0.214512, 0.201463, 0.697807, 0.534033, 0.267793, 0.830113, 0.602329, 0.660705, 0.770421, 0.340517, 0.251196, 0.290911, 0.524877, 0.159722, 0.670449, 0.995454, 0.511623, 0.298313, 0.944021, 0.0948249, 0.73058, 0.838334, 0.643656, 0.783853, 0.583298, 0.455548, 0.230939, 0.457534, 0.00278924, 0.6042, 0.360588, 0.922623, 0.644665, 0.103884, 0.96859, 0.548638, 0.0705228, 0.5299, 0.499172, 0.201098, 0.378225, 0.24914, 0.198072, 0.822918, 0.622897, 0.568341, 0.98089, 0.620874, 0.963039, 0.895195, 0.292738, 0.0731504, 0.445395, 0.572362, 0.373684, 0.484523, 0.441638, 0.132963, 0.569192, 0.0756962, 0.572306, 0.884098, 0.556036, 0.0122078, 0.848378, 0.880924, 0.186021, 0.181906, 0.149379, 0.907829, 0.0792597, 0.65573, 0.371839, 0.658314, 0.973877, 0.626929, 0.853348, 0.538424, 0.119907, 0.104601, 0.447656, 0.338296, 0.416, 0.00649671, 0.567067, 0.586761, 0.457382, 0.0565516, 0.0336257, 0.882286, 0.879863, 0.123068, 5.65988e-05, 0.217614, 0.170666, 0.0279886, 0.994332, 0.406638, 0.55916, 0.955373, 0.498295, 0.462271, 0.2479, 0.233727, 0.507336, 0.255721, 0.723603, 0.911872, 0.226847, 0.749499, 0.086894, 0.506925, 0.822442, 0.0512878, 0.111878, 0.21249, 0.0178278, 0.123097, 0.912115, 0.915155, 0.506104, 0.527865, 0.922091, 0.39081, 0.644601, 0.908082, 0.562287, 0.0780806, 0.735341, 0.744356, 0.845752, 0.103222, 0.407326, 0.515565, 0.0541111, 0.890123, 0.00640169, 0.533819, 0.49394, 0.958385, 0.0709909, 0.805157, 0.218626, 0.395703, 0.344168, 0.722025, 0.446, 0.712189, 0.921133, 0.0254779, 0.646931, 0.654103, 0.365839, 0.776464, 0.524957, 0.455485, 0.811014, 0.992469, 0.814505, 0.134269, 0.780882, 0.106967, 0.426112, 0.0496123, 0.881183, 0.765958, 0.506653, 0.943266, 0.278787, 0.40261, 0.66606, 0.139293, 0.0228334, 0.425202, 0.167311, 0.943914, 0.779232, 0.327429, 0.693353, 0.668736, 0.286584, 0.262277, 0.478177, 0.259639, 0.0183194, 0.305498, 0.561589, 0.251985, 0.340317, 0.874261, 0.637282, 0.514902, 0.273802, 0.8099, 0.441, 0.535816, 0.474427, 0.388549, 0.793907, 0.996526, 0.439355, 0.466316, 0.830795, 0.671577, 0.807265, 0.401645, 0.541625, 0.00881933, 0.740642, 0.231504, 0.292738, 0.939252, 0.59696, 0.74875, 0.913195, 0.326506, 0.076236, 0.574883, 0.133972, 0.890266, 0.984688, 0.140902, 0.128486, 0.117951, 0.431119, 0.426284, 0.415743, 0.922487, 0.745546, 0.840077, 0.49333, 0.663276, 0.773438, 0.171853, 0.975787, 0.791077, 0.633345, 0.773184, 0.744493, 0.539753, 0.902543, 0.414323, 0.608237, 0.222546, 0.348496, 0.099356, 0.278749, 0.251611, 0.520344, 0.112646, 0.200462, 0.744055, 0.168325, 0.635503, 0.671938, 0.229579, 0.555053, 0.623382, 0.468171, 0.850039, 0.701759, 0.640127, 0.129329, 0.570423, 0.912828, 0.884657, 0.00423518, 0.955386, 0.742899, 0.0653215, 0.37956, 0.960933, 0.418469, 0.509939, 0.993672, 0.0861226, 0.650177, 0.192774, 0.42669, 0.598282, 0.879146, 0.531295, 0.529316, 0.306005, 0.21197, 0.311037, 0.00723488, 0.0537805, 0.987387, 0.948944, 0.326299, 0.292817, 0.833299, 0.233572, 0.450509, 0.0697152, 0.268766, 0.962614, 0.0761093, 0.268078, 0.0977896, 0.0284287, 0.356428, 0.864834, 0.310785, 0.362303, 0.164077, 0.756474, 0.42537, 0.0927695, 0.0604679, 0.823764, 0.323885, 0.761985, 0.864902, 0.11623, 0.397535, 0.718254, 0.637596, 0.490933, 0.269322, 0.660851, 0.652692, 0.61003, 0.589594, 0.706991, 0.261704, 0.333875, 0.898269, 0.483735, 0.660443, 0.305574, 0.676744, 0.0208565, 0.863915, 0.0416533, 0.880769, 0.641593, 0.826679, 0.536049, 0.650721, 0.926316, 0.452664, 0.13573, 0.0310741, 0.781587, 0.637631, 0.589902, 0.0853312, 0.0971944, 0.829053, 0.467658, 0.613862, 0.98756, 0.58061, 0.268108, 0.511828, 0.774297, 0.423103, 0.182906, 0.257511, 0.170173, 0.785892, 0.763226, 0.53282, 0.423058, 0.447917, 0.88953, 0.832336, 0.742542, 0.319188, 0.194562, 0.694791, 0.762418, 0.49335, 0.0660767, 0.0274218, 0.151684, 0.84565, 0.221872, 0.31347, 0.967216, 0.737638, 0.804929, 0.637495, 0.213278, 0.908625, 0.576544, 0.771004, 0.987418, 0.0409891, 0.467138, 0.617153, 0.736946, 0.867113, 0.951811, 0.154648, 0.740801, 0.270877, 0.853842, 0.00386076, 0.316807, 0.857982, 0.226176, 0.318465, 0.700062, 0.554091, 0.315492, 0.243599, 0.165242, 0.0287095, 0.471605, 0.269506, 0.0481783, 0.267451, 0.816572, 0.315644, 0.807287, 0.744161, 0.670262, 0.993573, 0.169422, 0.233041, 0.0409642, 0.815066, 0.169667, 0.170602, 0.0370063, 0.401435, 0.660011, 0.665482, 0.803983, 0.199319, 0.252062, 0.352841, 0.479694, 0.227764, 0.451998, 0.790808, 0.0903161, 0.909742, 0.535406, 0.251591, 0.633483, 0.263571, 0.0240886, 0.996613, 0.276152, 0.302253, 0.985316, 0.240864, 0.154651, 0.965778, 0.773741, 0.235246, 0.371968, 0.249547, 0.0566258, 0.471484, 0.877786, 0.972836, 0.639844, 0.0631808, 0.969798, 0.524733, 0.321179, 0.310998, 0.256906, 0.614546, 0.647912, 0.232712, 0.420284, 0.655213, 0.936621, 0.851222, 0.451812, 0.400019, 0.0840677, 0.290769, 0.243043, 0.902345, 0.432773, 0.184514, 0.420329, 0.845841, 0.599513, 0.526012, 0.951629, 0.165279, 0.150241, 0.117574, 0.636763, 0.684123, 0.715864, 0.587858, 0.025171, 0.354233, 0.297314, 0.8812, 0.844317, 0.819331, 0.880256, 0.74756, 0.980348, 0.629663, 0.268299, 0.465652, 0.324411, 0.455475, 0.225096, 0.790657, 0.484869, 0.368002, 0.95576, 0.335719, 0.423297, 0.0927763, 0.886378, 0.814155, 0.477265, 0.561966, 0.335924, 0.670047, 0.630704, 0.64197, 0.656276, 0.231895, 0.337536, 0.269058, 0.369385, 0.12747, 0.399863, 0.852, 0.636525, 0.2598, 0.307778, 0.493378, 0.0167589, 0.484461, 0.967248, 0.677949, 0.0127936, 0.543639, 0.885348, 0.978461, 0.845458, 0.789074, 0.957692, 0.310436, 0.191076, 0.999502, 0.334978, 0.170537, 0.817536, 0.618543, 0.555896, 0.0646441, 0.263872, 0.850947, 0.0523041, 0.536365, 0.408294, 0.562754, 0.271821, 0.765559, 0.794759, 0.10703, 0.00708059, 0.918917, 0.306319, 0.341069, 0.880398, 0.763809, 0.197381, 0.480464, 0.530267, 0.385899, 0.723375, 0.277873, 0.466349, 0.979985, 0.389348, 0.781419, 0.0531951, 0.469381, 0.459414, 0.799205, 0.0248302, 0.044803, 0.469655, 0.0129992, 0.61234, 0.894183, 0.783739, 0.904596, 0.866173, 0.351728, 0.228115, 0.0738683, 0.975912, 0.654697, 0.0676095, 0.245336, 0.296884, 0.431285, 0.894518, 0.295938, 0.621898, 0.486347, 0.382868, 0.261768, 0.454453, 0.694242, 0.229356, 0.414457, 0.756957, 0.375901, 0.986627, 0.662565, 0.979746, 0.459894, 0.478174, 0.0255572, 0.92557, 0.386836, 0.747861, 0.778869, 0.868357, 0.143423, 0.645793, 0.753509, 0.630005, 0.790593, 0.0570299, 0.433423, 0.130058, 0.11803, 0.0206789, 0.875954, 0.377795, 0.992081, 0.268633, 0.352508, 0.339212, 0.647901, 0.310948, 0.200319, 0.00643757, 0.941193, 0.49454, 0.03072, 0.420265, 0.975198, 0.773332, 0.869828, 0.673919, 0.780288, 0.527735, 0.478272, 0.0950875, 0.774917, 0.782396, 0.107367, 0.0598637, 0.00169379, 0.467261, 0.324674, 0.478194, 0.674764, 0.303398, 0.294699, 0.822782, 0.982428, 0.564287, 0.266489, 0.597584, 0.701892, 0.923458, 0.215085, 0.172613, 0.0817669, 0.543924, 0.847161, 0.845239, 0.321743, 0.868944, 0.232951, 0.0944018, 0.364834, 0.518375, 0.0074743, 0.798166, 0.119923, 0.728442, 0.0592487, 0.0585591, 0.645181, 0.250932, 0.285384, 0.701103, 0.998062, 0.504218, 0.284519, 0.0731458, 0.87343, 0.936796, 0.797734, 0.18008, 0.574698, 0.99614, 0.297293, 0.576629, 0.485442, 0.158327, 0.118332, 0.361367, 0.443002, 0.17552, 0.383444, 0.445807, 0.970911, 0.000348134, 0.00648888, 0.38191, 0.150335, 0.863533, 0.414863, 0.297503, 0.0815298, 0.858219, 0.236893, 0.613212, 0.618372, 0.97567, 0.114038, 0.624537, 0.0851054, 0.62879, 0.865654, 0.737639, 0.559916, 0.0729778, 0.183217, 0.977971, 0.0806749, 0.561561, 0.778839, 0.992282, 0.0630433, 0.481675, 0.205386, 0.187635, 0.0660333, 0.890362, 0.0724167, 0.993708, 0.723064, 0.820016, 0.0462525, 0.0191416, 0.0581466, 0.398842, 0.575788, 0.472682, 0.549188, 0.678496, 0.980795, 0.511205, 0.136541, 0.0410053, 0.800786, 0.155533, 0.0848418, 0.314689, 0.735425, 0.905364, 0.296371, 0.543801, 0.960839, 0.385233, 0.905602, 0.631929, 0.287987, 0.92218, 0.0226908, 0.426345, 0.93523, 0.765834, 0.692233, 0.987488, 0.576867, 0.316875, 0.844857, 0.521633, 0.270659, 0.835086, 0.0609623, 0.910122, 0.536354, 0.0832229, 0.414926, 0.724235, 0.884801, 0.341323, 0.628771, 0.40228, 0.349041, 0.307317, 0.981899, 0.713284, 0.246379, 0.505779, 0.495778, 0.505811, 0.611465, 0.907194, 0.629933, 0.12562, 0.405745, 0.352262, 0.524062, 0.267971, 0.466107, 0.721615, 0.807413, 0.348904, 0.167614, 0.578447, 0.935748, 0.0450163, 0.507162, 0.951889, 0.966317, 0.0877336, 0.431193, 0.876919, 0.950456, 0.900372, 0.158262, 0.33375, 0.0467862, 0.47814, 0.703455, 0.745038, 0.0469431, 0.791596, 0.368633, 0.340442, 0.355314, 0.334377, 0.175813, 0.456324, 0.700898, 0.915227, 0.440953, 0.0391988, 0.766786, 0.178846, 0.68907, 0.189534, 0.562301, 0.410683, 0.272582, 0.677843, 0.282324, 0.309716, 0.834704, 0.50158, 0.823312, 0.206172, 0.902249, 0.472739, 0.647539, 0.715614, 0.475139, 0.638383, 0.341666, 0.627013, 0.315206, 0.0682273, 0.0606975, 0.882019, 0.701052, 0.246667, 0.104804, 0.0455308, 0.285911, 0.220354, 0.56322, 0.393112, 0.578118, 0.744302, 0.689929, 0.518555, 0.870048, 0.890522, 0.889417, 0.325999, 0.494022, 0.233695, 0.952726, 0.0385373, 0.407067, 0.0347279, 0.19312, 0.753918, 0.515723, 0.797076, 0.566544, 0.695808, 0.968721, 0.113709, 0.0948931, 0.409733, 0.479719, 0.280147, 0.616622, 0.769387, 0.238637, 0.470991, 0.108955, 0.382733, 0.148666, 0.860224, 0.63555, 0.573585, 0.259686, 0.89585, 0.684758, 0.50128, 0.842045, 0.127411, 0.00251116, 0.353822, 0.168303, 0.235039, 0.507299, 0.389384, 0.466725, 0.807075, 0.0934202, 0.32084, 0.0965382, 0.000680077, 0.479738, 0.555253, 0.297796, 0.927839, 0.747806, 0.915874, 0.591448, 0.306583, 0.647932, 0.223656, 0.461806, 0.367503, 0.315465, 0.278264, 0.739667, 0.253012, 0.363668, 0.65443, 0.64289, 0.622744, 0.241148, 0.818162, 0.183644, 0.45706, 0.243083, 0.028495, 0.789955, 0.743086, 0.0981896, 0.41838, 0.209638, 0.962447, 0.262913, 0.313897, 0.27735, 0.0684438, 0.84612, 0.950009, 0.67067, 0.111097, 0.0312597, 0.543855, 0.675642, 0.875872, 0.106393, 0.105223, 0.754994, 0.630024, 0.206925, 0.165979, 0.546183, 0.747854, 0.425707, 0.115175, 0.162343, 0.0306342, 0.929389, 0.338631, 0.358558, 0.62614, 0.440899, 0.711497, 0.214624, 0.551539, 0.119272, 0.954884, 0.626478, 0.3612, 0.584047, 0.769484, 0.359127, 0.160751, 0.734744, 0.677129, 0.839935, 0.316505, 0.292216, 0.776418, 0.598551, 0.487589, 0.0491978, 0.310085, 0.403593, 0.88392, 0.649444, 0.164407, 0.0451848, 0.628468, 0.947493, 0.0589229, 0.937404, 0.0709359, 0.338652, 0.428097, 0.547914, 0.144509, 0.972245, 0.933511, 0.75426, 0.406064, 0.169455, 0.356325, 0.286159, 0.279474, 0.778376, 0.842366, 0.847085, 0.920419, 0.73594, 0.624887, 0.427434, 0.38396, 0.683614, 0.25583, 0.119329, 0.302285, 0.510922, 0.173838, 0.20548, 0.916401, 0.294471, 0.869802, 0.307878, 0.909924, 0.954921, 0.367555, 0.839034, 0.279102, 0.0433055, 0.968763, 0.2089, 0.676989, 0.581373, 0.624941, 0.560735, 0.744382, 0.0730278, 0.453458, 0.0570532, 0.268415, 0.307563, 0.641737, 0.106627, 0.0561247, 0.66462, 0.175538, 0.915098, 0.502369, 0.888687, 0.334271, 0.746737, 0.954022, 0.0798754, 0.00328626, 0.928035, 0.463675, 0.178931, 0.0495312, 0.444595, 0.320406, 0.891997, 0.127164, 0.707062, 0.768988, 0.163014, 0.242369, 0.525906, 0.74129, 0.698391, 0.54179, 0.911614, 0.260523, 0.787305, 0.133587, 0.518382, 0.87535, 0.865569, 0.0253879, 0.0277266, 0.211298, 0.722448, 0.691866, 0.060618, 0.129667, 0.49899, 0.415236, 0.0318108, 0.512726, 0.0938632, 0.0161872, 0.319841, 0.716943, 0.642935, 0.214237, 0.290147, 0.795971, 0.373505, 0.028079, 0.144618, 0.704154, 0.12628, 0.461725, 0.482941, 0.0128279, 0.851411, 0.440899, 0.326729, 0.468245, 0.145899, 0.326815, 0.876144, 0.920086, 0.809058, 0.232627, 0.803387, 0.442804, 0.939601, 0.945181, 0.55998, 0.528004, 0.649126, 0.699537, 0.332909, 0.256248, 0.839875, 0.726439, 0.426984, 0.616964, 0.871088, 0.237828, 0.890082, 0.896479, 0.892103, 0.421173, 0.740644, 0.281891, 0.0524454, 0.447202, 0.510898, 0.0801364, 0.570049, 0.600878, 0.0826094, 0.94344, 0.445363, 0.446237, 0.197255, 0.45926, 0.885508, 0.812071, 0.424999, 0.339396, 0.61542, 0.801851, 0.580263, 0.0226934, 0.313388, 0.578829, 0.374605, 0.784718, 0.578807, 0.211513, 0.928041, 0.965184, 0.284469, 0.112641, 0.832183, 0.838636, 0.00177267, 0.765646, 0.25224, 0.55226, 0.539089, 0.664783, 0.981227, 0.760706, 0.0514444, 0.0934847, 0.0465348, 0.558286, 0.907634, 0.382844, 0.574478, 0.0398199, 0.388391, 0.228121, 0.713064, 0.882326, 0.61628, 0.455327, 0.122208, 0.926622, 0.765222, 0.929361, 0.855264, 0.078441, 0.31893, 0.621053, 0.453362, 0.153213, 0.46565, 0.227835, 0.291579, 0.194077, 0.93233, 0.989632, 0.919703, 0.832392, 0.792795, 0.550827, 0.886083, 0.846107, 0.35329, 0.0121923, 0.451933, 0.663498, 0.997557, 0.468175, 0.916588, 0.309966, 0.645232, 0.087717, 0.39257, 0.630981, 0.519081, 0.582607, 0.91818, 0.61363, 0.186096, 0.532463, 0.731188, 0.909257, 0.805161, 0.0685559, 0.780836, 0.509562, 0.778068, 0.572497, 0.835978, 0.24032, 0.353555, 0.859011, 0.604704, 0.623506, 0.679616, 0.536171, 0.830041, 0.610155, 0.827619, 0.0802732, 0.192093, 0.254594, 0.0635951, 0.557926, 0.157881, 0.994781, 0.390153, 0.339989, 0.953151, 0.624543, 0.934007, 0.262501, 0.524387, 0.699062, 0.253728, 0.37125, 0.727105, 0.767199, 0.882345, 0.955355, 0.362745, 0.350257, 0.246785, 0.593549, 0.24824, 0.891887, 0.893343, 0.815335, 0.628436, 0.0511543, 0.0537873, 0.783568, 0.567237, 0.463775, 0.0825298, 0.305697, 0.816257, 0.818698, 0.472745, 0.265784, 0.606642, 0.504678, 0.0895341, 0.157104, 0.752828, 0.0362217, 0.551417, 0.848644, 0.263882, 0.960596, 0.773697, 0.124055, 0.0506841, 0.792589, 0.215727, 0.111101, 0.415109, 0.145027, 0.889637, 0.497904, 0.361074, 0.755449, 0.442404, 0.482078, 0.802638, 0.671628, 0.0367703, 0.565128, 0.594261, 0.490843, 0.0935567, 0.701285, 0.388274, 0.116071, 0.481517, 0.955268, 0.933249, 0.952566, 0.160955, 0.40389, 0.893398, 0.875578, 0.768152, 0.999904, 0.537894, 0.414038, 0.260777, 0.887919, 0.452562, 0.83896, 0.999787, 0.312943, 0.604865, 0.391353, 0.24266, 0.631627, 0.347626, 0.480514, 0.475706, 0.39494, 0.0940579, 0.0860045, 0.6199, 0.400146, 0.584296, 0.38691, 0.411376, 0.282076, 0.570497, 0.993794, 0.956034, 0.286617, 0.566959, 0.235066, 0.49901, 0.698238, 0.750195, 0.171546, 0.434758, 0.383917, 0.863634, 0.912861, 0.555333, 0.0149524, 0.486947, 0.0901688, 0.175708, 0.755796, 0.28411, 0.874395, 0.46586, 0.461822, 0.982737, 0.884454, 0.341579, 0.938815, 0.21413, 0.0163947, 0.0823033, 0.0262485, 0.00922418, 0.88355, 0.489256, 0.426046, 0.0142034, 0.686425, 0.108414, 0.960633, 0.172541, 0.288127, 0.65564, 0.544026, 0.844684, 0.782802, 0.685936, 0.899165, 0.38209, 0.262186, 0.958105, 0.827502, 0.174824, 0.406955, 0.857473, 0.373162, 0.0842634, 0.320952, 0.454719, 0.757906, 0.595194, 0.367628, 0.800223, 0.781486, 0.600837, 0.470053, 0.242958, 0.2793, 0.403083, 0.941732, 0.81935, 0.373362, 0.378287, 0.338918, 0.365397, 0.781183, 0.0988392, 0.877452, 0.957223, 0.78754, 0.660712, 0.368955, 0.94748, 0.026127, 0.434759, 0.401086, 0.610533, 0.951442, 0.197315, 0.209682, 0.859322, 0.00838967, 0.439858, 0.750969, 0.145801, 0.836012, 0.466286, 0.147904, 0.326764, 0.607017, 0.177294, 0.672328, 0.652166, 0.592166, 0.833207, 0.183496, 0.733261, 0.397153, 0.618445, 0.000366746, 0.957096, 0.533765, 0.940296, 0.207919, 0.365332, 0.647942, 0.759587, 0.292352, 0.813574, 0.76171, 0.218685, 0.093226, 0.740719, 0.587936, 0.577583, 0.736006, 0.969154, 0.952238, 0.133613, 0.758389, 0.696985, 0.582668, 0.875353, 0.76639, 0.559415, 0.605933, 0.0389316, 0.373498, 0.935082, 0.0809153, 0.711166, 0.600307, 0.295324, 0.74056, 0.58386, 0.664908, 0.0252907, 0.0884693, 0.430309, 0.708101, 0.293711, 0.240815, 0.188063, 0.188553, 0.0168749, 0.741539, 0.869527, 0.130696, 0.81571, 0.41825, 0.428448, 0.460647, 0.649009, 0.486002, 0.233451, 0.93064, 0.971417, 0.601498, 0.455405, 0.801179, 0.434047, 0.672158, 0.519798, 0.815082, 0.441363, 0.303562, 0.250718, 0.208747, 0.387846, 0.175246, 0.418738, 0.801075, 0.92771, 0.0658916, 0.868843, 0.837644, 0.186486, 0.381567, 0.885882, 0.340495, 0.208673, 0.130966, 0.513921, 0.458802, 0.10599, 0.0299906, 0.0404579, 0.865976, 0.757386, 0.334387, 0.379078, 0.163035, 0.241718, 0.110571, 0.360631, 0.736642, 0.777862, 0.67759, 0.937055, 0.234795, 0.150221, 0.913065, 0.592081, 0.0576773, 0.790348, 0.46455, 0.907578, 0.456741, 0.450005, 0.470878, 0.0554506, 0.431759, 2.01181e-05, 0.564749, 0.997117, 0.994438, 0.682621, 0.447522, 0.757602, 0.695597, 0.726673, 0.54065, 0.625737, 0.406653, 0.784196, 0.441998, 0.259794, 0.532077, 0.693751, 0.404864, 0.15539, 0.274393, 0.0451235, 0.346709, 0.715367, 0.551049, 0.974224, 0.252859, 0.722708, 0.238935, 0.504082, 0.841338, 0.505651, 0.096663, 0.167197, 0.70518, 0.419434, 0.0954384, 0.834063, 0.924045, 0.250123, 0.838178, 0.515398, 0.394269, 0.973769, 0.544868, 0.220531, 0.348264, 0.471231, 0.910399, 0.371692, 0.239035, 0.283137, 0.0198341, 0.633999, 0.017749, 0.0224845, 0.847823, 0.0703067, 0.269342, 0.816295, 0.139635, 0.216818, 0.738104, 0.925015, 0.582532, 0.454695, 0.514741, 0.151348, 0.247303, 0.71083, 0.616979, 0.380606, 0.558457, 0.952899, 0.837502, 0.434478, 0.107341, 0.00448188, 0.639574, 0.276825, 0.629249, 0.450228, 0.626403, 0.29188, 0.969095, 0.836786, 0.445677, 0.29231, 0.407464, 0.610164, 0.169872, 0.966892, 0.984232, 0.383148, 0.397382, 0.54027, 0.641753, 0.0268946, 0.953218, 0.912571, 0.647995, 0.254991, 0.953846, 0.662719, 0.892033, 0.486095, 0.555143, 0.0331859, 0.213047, 0.669907, 0.28687, 0.382696, 0.0998005, 0.509422, 0.183319, 0.347713, 0.241957, 0.64566, 0.331424, 0.134321, 0.73305, 0.12066, 0.0805394, 0.920498, 0.397307, 0.20363, 0.777729, 0.947299, 0.177939, 0.880234, 0.571719, 0.300867, 0.941472, 0.0483151, 0.80732, 0.400853, 0.431369, 0.645346, 0.416123, 0.568809, 0.267143, 0.880516, 0.78577, 0.785678, 0.0851067, 0.00255534, 0.157757, 0.662201, 0.915377, 0.723373, 0.352179, 0.835847, 0.512865, 0.762769, 0.0225603, 0.438353, 0.681039, 0.505969, 0.195328, 0.2453, 0.28594, 0.367198, 0.845016, 0.872612, 0.582698, 0.998599, 0.0558282, 0.723992, 0.984305, 0.598939, 0.677151, 0.595001, 0.211244, 0.397399, 0.220043, 0.403597, 0.164711, 0.49826, 0.229316, 0.549678, 0.640938, 0.824668, 0.0610166, 0.968765, 0.809941, 0.127634, 0.179147, 0.613843, 0.782951, 0.43243, 0.852956, 0.588353, 0.31927, 0.930154, 0.776681, 0.371916, 0.0454076, 0.517232, 0.417526, 0.464681, 0.334236, 0.977484, 0.56186, 0.446042, 0.863181, 0.764683, 0.722893, 0.19873, 0.452124, 0.423359, 0.250521, 0.0408669, 0.714988, 0.454818, 0.722377, 0.442493, 0.743869, 0.750445, 0.00896603, 0.999637, 0.859306, 0.622052, 0.00984311, 0.915339, 0.839071, 0.622021, 0.178664, 0.655102, 0.43843, 0.00882884, 0.874868, 0.0839007, 0.0698086, 0.770121, 0.0338071, 0.106887, 0.0125524, 0.0927738, 0.284345, 0.368431, 0.520371, 0.804542, 0.859059, 0.698052, 0.900442, 0.635178, 0.752663, 0.819541, 0.265296, 0.761921, 0.898906, 0.347021, 0.27233, 0.978199, 0.971507, 0.731056, 0.139336, 0.902561, 0.313997, 0.130853, 0.724506, 0.406152, 0.00781752, 0.217576, 0.477282, 0.696602, 0.631149, 0.200357, 0.552317, 0.485827, 0.687642, 0.400412, 0.0935699, 0.622655, 0.320318, 0.821327, 0.0553166, 0.191568, 0.105451, 0.923379, 0.865514, 0.457129, 0.329753, 0.373028, 0.0105954, 0.344259, 0.895799, 0.513171, 0.803857, 0.845156, 0.147428, 0.123813, 0.662467, 0.552214, 0.858177, 0.740461, 0.649366, 0.420809, 0.244729, 0.956317, 0.726337, 0.546206, 0.468602, 0.761364, 0.0537975, 0.375349, 0.821161, 0.0733319, 0.517111, 0.535727, 0.38521, 0.787484, 0.796368, 0.647965, 0.105453, 0.254369, 0.33338, 0.647677, 0.0612522, 0.741927, 0.349987, 0.179408, 0.6048, 0.153559, 0.354355, 0.363531, 0.37749, 0.469923, 0.381003, 0.372826, 0.366789, 0.672025, 0.523813, 0.116648, 0.612683, 0.397663, 0.134278, 0.0901552, 0.748652, 0.916673, 0.273518, 0.47875, 0.777335, 0.967673, 0.650504, 0.19722, 0.417324, 0.463681, 0.631837, 0.261126, 0.705794, 0.512245, 0.308212, 0.876678, 0.122392, 0.628208, 0.0640057, 0.279337, 0.175907, 0.968879, 0.588489, 0.329921, 0.931216, 0.221119, 0.966651, 0.584124, 0.358656, 0.329617, 0.141585, 0.727031, 0.123368, 0.961685, 0.80185, 0.790325, 0.319885, 0.830949, 0.875482, 0.774923, 0.382228, 0.901703, 0.530953, 0.866276, 0.410903, 0.132908, 0.5954, 0.163553, 0.220279, 0.35274, 0.965362, 0.27438, 0.613329, 0.890423, 0.601048, 0.764333, 0.90703, 0.608397, 0.832686, 0.274695, 0.634447, 0.359276, 0.904883, 0.926244, 0.00519944, 0.784488, 0.216816, 0.154995, 0.637794, 0.0546875, 0.137245, 0.243194, 0.861401, 0.800531, 0.12871, 0.622076, 0.608614, 0.2866, 0.746462, 0.774047, 0.315987, 0.524368, 0.159866, 0.368889, 0.0969622, 0.163438, 0.725631, 0.807078, 0.925556, 0.434968, 0.533191, 0.918657, 0.964278, 0.828131, 0.371973, 0.654585, 0.493809, 0.284604, 0.136881, 0.891064, 0.570485, 0.200445, 0.646669, 0.868014, 0.08682, 0.768373, 0.781637, 0.466424, 0.257744, 0.459711, 0.311552, 0.127242, 0.865274, 0.384451, 0.335898, 0.273185, 0.481254, 0.963494, 0.93857, 0.886942, 0.614986, 0.460825, 0.255676, 0.894295, 0.374841, 0.907363, 0.00195316, 0.275656, 0.709047, 0.526044, 0.719791, 0.0798555, 0.357383, 0.606549, 0.45923, 0.706147, 0.509634, 0.113529, 0.765745, 0.22959, 0.682917, 0.701335, 0.304154, 0.876052, 0.858176, 0.929161, 0.204912, 0.26973, 0.013806, 0.781078, 0.387176, 0.502847, 0.290296, 0.836649, 0.705063, 0.946561, 0.376247, 0.556752, 0.703012, 0.314943, 0.111907, 0.933476, 0.354623, 0.949927, 0.342079, 0.696365, 0.262628, 0.099443, 0.47574, 0.265505, 0.0510212, 0.0982934, 0.0999286, 0.449118, 0.883339, 0.967085, 0.955657, 0.376495, 0.869, 0.47896, 0.23083, 0.814084, 0.904404, 0.696811, 0.451695, 0.611052, 0.0439806, 0.122199, 0.0251573, 0.243319, 0.254371, 0.636349, 0.119272, 0.0607113, 0.248429, 0.806315, 0.894438, 0.546285, 0.863981, 0.44801, 0.0201322, 0.290848, 0.958003, 0.313714, 0.928208, 0.979005, 0.242459, 0.562001, 0.877748, 0.449589, 0.277236, 0.365184, 0.926922, 0.736471, 0.263271, 0.966279, 0.124347, 0.137125, 0.390781, 0.589505, 0.19321, 0.183549, 0.714863, 0.150356, 0.0459436, 0.152114, 0.824315, 0.677068, 0.113147, 0.218698, 0.282096, 0.401187, 0.0399562, 0.423185, 0.724604, 0.938288, 0.233997, 0.635381, 0.25946, 0.0593494, 0.193472, 0.897208, 0.311766, 0.719876, 0.405388, 0.20024, 0.435234, 0.792184, 0.897751, 0.941537, 0.455808, 0.175604, 0.990314, 0.399315, 0.339168, 0.601757, 0.538986, 0.122057, 0.637897, 0.947413, 0.912787, 0.51636, 0.761283, 0.49637, 0.424128, 0.555598, 0.170605, 0.323017, 0.666083, 0.114823, 0.790652, 0.341084, 0.737507, 0.745447, 0.0290631, 0.269597, 0.170961, 0.563971, 0.122285, 0.814161, 0.732418, 0.0886575, 0.238797, 0.203901, 0.274167, 0.0434857, 0.746683, 0.855452, 0.45143, 0.318749, 0.164572, 0.691934, 0.0904443, 0.582871, 0.741812, 0.303127, 0.295444, 0.0657427, 0.53616, 0.330586, 0.946181, 0.205412, 0.0506996, 0.658036, 0.321776, 0.785606, 0.675734, 0.898579, 0.389835, 0.344278, 0.499876, 0.87039, 0.486042, 0.128081, 0.983264, 0.0132197, 0.997429, 0.0517228, 0.817453, 0.90482, 0.482922, 0.562401, 0.79105, 0.0639654, 0.0137915, 0.8553, 0.990751, 0.06176, 0.793787, 0.488596, 0.320254, 0.172518, 0.70721, 0.241924, 0.0797175, 0.224385, 0.774535, 0.45973, 0.196836, 0.978669, 0.200328, 0.174035, 0.284143, 0.690624, 0.812423, 0.883215, 0.710838, 0.205723, 0.313969, 0.414089, 0.923223, 0.0447278, 0.303542, 0.530226, 0.396591, 0.946516, 0.580364, 0.0480212, 0.950906, 0.321324, 0.0490739, 0.19271, 0.186925, 0.190458, 0.217935, 0.279079, 0.701741, 0.822059, 0.665425, 0.879266, 0.550173, 0.799985, 0.772671, 0.227032, 0.241123, 0.173069, 0.628549, 0.487161, 0.387292, 0.77329, 0.838473, 0.505906, 0.0632759, 0.214405, 0.137743, 0.720014, 0.806922, 0.796701, 0.52869, 0.423776, 0.140956, 0.4748, 0.412897, 0.347936, 0.0529069, 0.989484, 0.0128336, 0.564307, 0.0052823, 0.824553, 0.596908, 0.521481, 0.121875, 0.336453, 0.318245, 0.497799, 0.0122771, 0.214458, 0.0498897, 0.129829, 0.0144892, 0.647091, 0.77252, 0.117643, 0.782651, 0.426921, 0.860579, 0.915341, 0.259429, 0.470326, 0.0109178, 0.0174838, 0.847082, 0.788038, 0.0903463, 0.897862, 0.279333, 0.100074, 0.314668, 0.440822, 0.165319, 0.470265, 0.233672, 0.584429, 0.969813, 0.906782, 0.873632, 0.122227, 0.731775, 0.409216, 0.757996, 0.244736, 0.201908, 0.291042, 0.0708636, 0.742604, 0.294348, 0.892837, 0.505908, 0.119993, 0.337955, 0.16956, 0.514808, 0.385131, 0.844181, 0.569758, 0.636196, 0.290218, 0.262795, 0.59575, 0.378289, 0.18943, 0.530439, 0.877016, 0.504115, 0.744198, 0.931448, 0.729929, 0.81529, 0.151229, 0.998913, 0.683131, 0.533626, 0.98593, 0.789981, 0.0445395, 0.591837, 0.640817, 0.852281, 0.252853, 0.827442, 0.907725, 0.102757, 0.154082, 0.426429, 0.181066, 0.121804, 0.301885, 0.52507, 0.364617, 0.736206, 0.27141, 0.767552, 0.624946, 0.169726, 0.397797, 0.598205, 0.404912, 0.240592, 0.910604, 0.638112, 0.939611, 0.858809, 0.0343435, 0.783867, 0.477592, 0.609369, 0.493529, 0.080321, 0.0363176, 0.218583, 0.637044, 0.823448, 0.547286, 0.380453, 0.822243, 0.234039, 0.707453, 0.172201, 0.416438, 0.114823, 0.936148, 0.594684, 0.113408, 0.552317, 0.570245, 0.888815, 0.0233248, 0.039044, 0.655267, 0.137629, 0.776311, 0.980918, 0.655818, 0.501625, 0.0939003, 0.251409, 0.829194, 0.23166, 0.779821, 0.726178, 0.480105, 0.407399, 0.600394, 0.942736, 0.743737, 0.0551799, 0.962286, 0.0436438, 0.331146, 0.416607, 0.65115, 0.759709, 0.418019, 0.294796, 0.349766, 0.798081, 0.337856, 0.4491, 0.882495, 0.695294, 0.595625, 0.752904, 0.990689, 0.232347, 0.34245, 0.136882, 0.306638, 0.0838308, 0.415441, 0.827331, 0.857429, 0.275774, 0.417677, 0.954315, 0.833945, 0.929689, 0.759664, 0.00218092, 0.0914797, 0.684926, 0.437745, 0.639114, 0.378457, 0.561354, 0.26847, 0.521602, 0.692652, 0.563274, 0.561891, 0.0305475, 0.89507, 0.399713, 0.0675574, 0.104589, 0.814862, 0.812869, 0.245546, 0.735254, 0.158187, 0.613588, 0.135882, 0.637868, 0.968308, 0.496096, 0.539877, 0.238747, 0.781662, 0.0322914, 0.560642, 0.892621, 0.415119, 0.776161, 0.673384, 0.0767484, 0.676924, 0.302974, 0.383223, 0.543464, 0.962444, 0.0686625, 0.575793, 0.46524, 0.258082, 0.226922, 0.678342, 0.616491, 0.222386, 0.0482167, 0.954957, 0.731126, 0.750062, 0.869887, 0.125806, 0.14666, 0.973101, 0.56538, 0.477578, 0.581036, 0.905772, 0.146275, 0.145673, 0.6169, 0.774936, 0.324981, 0.755871, 0.548047, 0.908285, 0.989573, 0.384762, 0.171821, 0.193951, 0.633262, 0.543976, 0.67202, 0.978979, 0.504997, 0.722625, 0.282309, 0.481227, 0.540674, 0.128014, 0.038791, 0.199907, 0.193449, 0.239077, 0.575561, 0.186136, 0.210012, 0.731633, 0.274126, 0.725303, 0.613663, 0.00822674, 0.963265, 0.139653, 0.689404, 0.54929, 0.870192, 0.347242, 0.374234, 0.826712, 0.801345, 0.941538, 0.113853, 0.968834, 0.543264, 0.24821, 0.918565, 0.76508, 0.723339, 0.3206, 0.377777, 0.31583, 0.843659, 0.0774698, 0.504317, 0.205468, 0.780123, 0.0157715, 0.930318, 0.871669, 0.144924, 0.953773, 0.017101, 0.148008, 0.263657, 0.363006, 0.338895, 0.606832, 0.206961, 0.150894, 0.234902, 0.638176, 0.348061, 0.326417, 0.134896, 0.747952, 0.993909, 0.842481, 0.0685452, 0.762195, 0.511804, 0.300961, 0.513149, 0.360493, 0.889725, 0.324002, 0.310859, 0.982326, 0.0466702, 0.593704, 0.473467, 0.924189, 0.880248, 0.495106, 0.927142, 0.377748, 0.706322, 0.804293, 0.0720817, 0.540124, 0.0865108, 0.224245, 0.850507, 0.660529, 0.179725, 0.49826, 0.369576, 0.255032, 0.540571, 0.695954, 0.302765, 0.369215, 0.214657, 0.946945, 0.125324, 0.775449, 0.921719, 0.397591, 0.287938, 0.371397, 0.917927, 0.551756, 0.121148, 0.0306337, 0.654094, 0.518487, 0.658671, 0.121926, 0.0492066, 0.148802, 0.490335, 0.134516, 0.382621, 0.23552, 0.358663, 0.466818, 0.635814, 0.207731, 0.376355, 0.54403, 0.873778, 0.71168, 0.26768, 0.0789914, 0.828967, 0.973269, 0.358554, 0.74177, 0.572445, 0.917102, 0.326703, 0.796317, 0.352011, 0.703486, 0.594356, 0.856993, 0.84417, 0.557007, 0.744967, 0.78415, 0.423279, 0.000984909, 0.647776, 0.740817, 0.914564, 0.461189, 0.77823, 0.136048, 0.475483, 0.5123, 0.554478, 0.962615, 0.982408, 0.0706779, 0.11223, 0.583584, 0.273575, 0.225014, 0.144659, 0.361406, 0.491196, 0.478521, 0.081707, 0.0301563, 0.444406, 0.358035, 0.719774, 0.863879, 0.123985, 0.79374, 0.632618, 0.113078, 0.612772, 0.62851, 0.622898, 0.502184, 0.727925, 0.778663, 0.819516, 0.746609, 0.98635, 0.698937, 0.1097, 0.318558, 0.779733, 0.890602, 0.917645, 0.611887, 0.45478, 0.576885, 0.443948, 0.667736, 0.0438628, 0.135404, 0.969112, 0.292939, 0.235428, 0.779689, 0.0654341, 0.113275, 0.0231172, 0.824732, 0.241197, 0.611541, 0.561196, 0.483876, 0.00924277, 0.72585, 0.0944964, 0.256522, 0.407835, 0.355858, 0.144608, 0.412801, 0.300103, 0.707576, 0.340839, 0.00384349, 0.20214, 0.815478, 0.0877837, 0.175005, 0.407143, 0.737904, 0.615384, 0.264751, 0.0491118, 0.105714, 0.310845, 0.0893253, 0.454752, 0.738792, 0.767073, 0.0513467, 0.55891, 0.415492, 0.352081, 0.668057, 0.173346, 0.591706, 0.396506, 0.701327, 0.762703, 0.198313, 0.367465, 0.233534, 0.936811, 0.32238, 0.921656, 0.257443, 0.966172, 0.877198, 0.457192, 0.0243984, 0.526178, 0.554127, 0.884106, 0.894318, 0.356031, 0.671404, 0.696101, 0.320449, 0.568093, 0.617295, 0.164693, 0.350625, 0.383226, 0.0108135, 0.248181, 0.583379, 0.377071, 0.692598, 0.506324, 0.277783, 0.441357, 0.911937, 0.592934, 0.454812, 0.902189, 0.76761, 0.200745, 0.124683, 0.258897, 0.0483087, 0.438118, 0.588599, 0.605622, 0.134144, 0.368316, 0.832876, 0.125596, 0.660043, 0.789469, 0.686678, 0.343531, 0.107777, 0.919302, 0.0344328, 0.976639, 0.652386, 0.645979, 0.644524, 0.165309, 0.806666, 0.164919, 0.897345, 0.761959, 0.4564, 0.63335, 0.10737, 0.831791, 0.629525, 0.742249, 0.730717, 0.477335, 0.351989, 0.213288, 0.888119, 0.957805, 0.525586, 0.557116, 0.768024, 0.00384484, 0.280657, 0.890381, 0.0863566, 0.506043, 0.230096, 0.124454, 0.40966, 0.72538, 0.416017, 0.857093, 0.139743, 0.647419, 0.684714, 0.117144, 0.130084, 0.216183, 0.814623, 0.00856023, 0.346146, 0.105752, 0.757444, 0.379316, 0.235754, 0.180546, 0.315897, 0.266971, 0.30603, 0.464522, 0.537007, 0.0650385, 0.664919, 0.558193, 0.870485, 0.814455, 0.10381, 0.618141, 0.209624, 0.0183247, 0.315218, 0.510714, 0.911416, 0.666801, 0.560948, 0.257764, 0.141677, 0.772748, 0.378741, 0.0294, 0.943343, 0.968386, 0.630751, 0.469737, 0.687592, 0.589797, 0.603339, 0.127696, 0.516254, 0.129981, 0.203356, 0.548175, 0.190491, 0.29231, 0.327737, 0.534063, 0.357824, 0.506308, 0.566082, 0.410452, 0.893481, 0.841993, 0.23095, 0.625385, 0.882701, 0.817725, 0.865872, 0.399528, 0.0597314, 0.588846, 0.599071, 0.203499, 0.424063, 0.404593, 0.871065, 0.918456, 0.252408, 0.659317, 0.928511, 0.643643, 0.529205, 0.176178, 0.68171, 0.145582, 0.614251, 0.0378445, 0.195719, 0.340559, 0.38689, 0.463108, 0.469367, 0.180748, 0.259177, 0.279443, 0.85095, 0.983037, 0.00437248, 0.567231, 0.163868, 0.814096, 0.918738, 0.361555, 0.298008, 0.809231, 0.751634, 0.499868, 0.437117, 0.0794018, 0.671718, 0.980115, 0.474996, 0.663312, 0.42995, 0.255926, 0.373481, 0.433671, 0.934134, 0.552791, 0.778007, 0.496655, 0.815432, 0.743138, 0.647624, 0.62516, 0.422604, 0.344029, 0.987558, 0.749323, 0.934349, 0.705713, 0.721724, 0.139055, 0.190689, 0.0513739, 0.8871, 0.0976423, 0.0129031, 0.903139, 0.259316, 0.639287, 0.00283311, 0.676172, 0.0815618, 0.686434, 0.782848, 0.892786, 0.711916, 0.157514, 0.237505, 0.991095, 0.746823, 0.724759, 0.738336, 0.516411, 0.200567, 0.569235, 0.674083, 0.359822, 0.225426, 0.852373, 0.661127, 0.323018, 0.227818, 0.0834445, 0.796956, 0.914307, 0.061523, 0.333107, 0.825767, 0.682765, 0.559472, 0.934191, 0.482074, 0.437265, 0.887334, 0.745784, 0.167249, 0.491538, 0.221516, 0.737791, 0.741839, 0.909253, 0.056869, 0.232827, 0.5376, 0.173416, 0.941524, 0.993636, 0.700648, 0.0409675, 0.394662, 0.0675375, 0.298768, 0.953611, 0.232216, 0.969508, 0.0117371, 0.479528, 0.371552, 0.177228, 0.729354, 0.00619975, 0.940133, 0.974401, 0.385155, 0.00112071, 0.600007, 0.171806, 0.890442, 0.555444, 0.513182, 0.587277, 0.42137, 0.556605, 0.968129, 0.277854, 0.18552, 0.512399, 0.721237, 0.970731, 0.842254, 0.254123, 0.915771, 0.599749, 0.540368, 0.0909383, 0.222427, 0.00798128, 0.275542, 0.852573, 0.172977, 0.665911, 0.305373, 0.890036, 0.1658, 0.050611, 0.199428, 0.93813, 0.315431, 0.400092, 0.58941, 0.217099, 0.683975, 0.446791, 0.44217, 0.375851, 0.81193, 0.929381, 0.61943, 0.283778, 0.292124, 0.520079, 0.612808, 0.503171, 0.0519591, 0.781982, 0.0112063, 0.426082, 0.731963, 0.457992, 0.456833, 0.243453, 0.989789, 0.706306, 0.329734, 0.990201, 0.900563, 0.0176693, 0.610208, 0.328353, 0.515748, 0.487477, 0.727222, 0.244993, 0.109078, 0.9493, 0.164468, 0.623749, 0.550423, 0.837159, 0.614272, 0.98444, 0.202044, 0.974959, 0.213957, 0.0106738, 0.17913, 0.0573865, 0.898056, 0.400389, 0.228625, 0.100919, 0.571154, 0.557923, 0.588339, 0.713439, 0.954356, 0.612362, 0.31241, 0.108665, 0.483927, 0.134253, 0.0373658, 0.200006, 0.0884437, 0.875433, 0.0193375, 0.925027, 0.948458, 0.0449223, 0.470591, 0.468375, 0.766846, 0.986304, 0.0576001, 0.0450738, 0.8804, 0.856861, 0.376115, 0.832475, 0.277332, 0.462522, 0.396504, 0.129171, 0.0882361, 0.565923, 0.872913, 0.095828, 0.473219, 0.335848, 0.858329, 0.535445, 0.726678, 0.873368, 0.823086, 0.652207, 0.980642, 0.569979, 0.924064, 0.287433, 0.409364, 0.442375, 0.111637, 0.151033, 0.516042, 0.124917, 0.0960117, 0.810186, 0.999034, 0.660116, 0.811946, 0.982929, 0.798113, 0.319457, 0.109237, 0.997054, 0.103119, 0.888453, 0.66031, 0.963913, 0.336695, 0.406176, 0.771933, 0.364443, 0.339033, 0.229561, 0.883676, 0.562639, 0.0927408, 0.733701, 0.674673, 0.240753, 0.423767, 0.0745625, 0.424083, 0.0612876, 0.0606274, 0.187531, 0.486008, 0.682745, 0.509585, 0.431587, 0.553392, 0.188209, 0.958802, 0.60708, 0.864467, 0.28405, 0.313113, 0.257204, 0.401804, 0.48159, 0.943549, 0.341534, 0.978812, 0.345999, 0.853777, 0.505901, 0.336448, 0.348729, 0.0783527, 0.585875, 0.360183, 0.0983696, 0.203078, 0.638107, 0.636812, 0.622545, 0.301667, 0.778487, 0.46337, 0.955994, 0.365957, 0.519406, 0.0875206, 0.105982, 0.459095, 0.850821, 0.351398, 0.231456, 0.786247, 0.807717, 0.10981, 0.293254, 0.278256, 0.224314, 0.248935, 0.0625637, 0.25175, 0.859811, 0.475158, 0.648331, 0.186225, 0.728881, 0.473795, 0.0749256, 0.618376, 0.344138, 0.710286, 0.033892, 0.601635, 0.477196, 0.0579297, 0.414087, 0.999735, 0.467269, 0.57317, 0.487919, 0.688687, 0.99023, 0.952865, 0.342888, 0.162112, 0.779636, 0.606866, 0.879109, 0.102022, 0.136183, 0.202889, 0.880669, 0.1599, 0.630564, 0.32756, 0.673156, 0.217449, 0.983035, 0.73512, 0.181484, 0.645252, 0.104394, 0.409139, 0.184051, 0.53203, 0.523733, 0.512527, 0.237084, 0.833945, 0.564631, 0.76993, 0.465717, 0.213226, 0.784222, 0.791822, 0.38873, 0.529419, 0.939541, 0.72214, 0.152588, 0.88955, 0.294117, 0.306257, 0.622878, 0.116946, 0.93452, 0.783403, 0.970702, 0.145569, 0.877434, 0.18653, 0.878954, 0.425119, 0.145386, 0.723857, 0.82325, 0.271551, 0.897703, 0.688842, 0.47597, 0.344448, 0.676271, 0.239445, 0.176618, 0.599697, 0.869274, 0.752876, 0.248275, 0.459451, 0.303438, 0.943959, 0.0233897, 0.689079, 0.131307, 0.276802, 0.252533, 0.216595, 0.260804, 0.615085, 0.165933, 0.932283, 0.0189436, 0.908477, 0.197883, 0.169859, 0.936382, 0.862876, 0.0519827, 0.946605, 0.860519, 0.837886, 0.807913, 0.259528, 0.75109, 0.319037, 0.945713, 0.614323, 0.0918232, 0.770758, 0.82073, 0.290179, 0.416657, 0.741327, 0.978016, 0.28442, 0.739228, 0.546107, 0.621335, 0.920388, 0.829804, 0.177259, 0.742716, 0.447198, 0.0543332, 0.571825, 0.307415, 0.00593027, 0.25037, 0.459257, 0.0344686, 0.856461, 0.415075, 0.192247, 0.882257, 0.782471, 0.078356, 0.478558, 0.983452, 0.665169, 0.907616, 0.575121, 0.952039, 0.583118, 0.317617, 0.508727, 0.09096, 0.863859, 0.509512, 0.0303329, 0.180813, 0.779974, 0.727272, 0.346672, 0.650674, 0.327377, 0.0122835, 0.43268, 0.0605496, 0.614263, 0.880262, 0.412614, 0.131595, 0.391251, 0.363229, 0.114194, 0.252032, 0.129373, 0.984332, 0.771638, 0.92286, 0.552292, 0.326887, 0.600196, 0.149717, 0.185568, 0.854102, 0.48371, 0.242598, 0.0710319, 0.41665, 0.579909, 0.0500394, 0.380013, 0.940424, 0.805382, 0.56579, 0.241516, 0.612358, 0.0355655, 0.426345, 0.13804, 0.575098, 0.526564, 0.45933, 0.0160067, 0.681902, 0.581776, 0.751469, 0.201621, 0.807885, 0.0914784, 0.559148, 0.446081, 0.477345, 0.606354, 0.7551, 0.22431, 0.450545, 0.80186, 0.67739, 0.603162, 0.282654, 0.40956, 0.088474, 0.179383, 0.0108372, 0.988068, 0.0397988, 0.859434, 0.948113, 0.586718, 0.806934, 0.545269, 0.546839, 0.990151, 0.319847, 0.767399, 0.0515536, 0.481835, 0.821892, 0.0410275, 0.48091, 0.758984, 0.0180316, 0.537854, 0.444937, 0.126727, 0.235564, 0.523965, 0.345516, 0.274046, 0.917508, 0.760029, 0.524916, 0.0662717, 0.0170895, 0.358614, 0.683861, 0.943013, 0.156716, 0.844151, 0.663587, 0.866454, 0.0168145, 0.826592, 0.82983, 0.207432, 0.547981, 0.407784, 0.94554, 0.184781, 0.721642, 0.454674, 0.00892544, 0.270488, 0.590793, 0.77615, 0.577345, 0.841434, 0.0208134, 0.316179, 0.654635, 0.130885, 0.856055, 0.531487, 0.837832, 0.892117, 0.303436, 0.892567, 0.257625, 0.345777, 0.439492, 0.339525, 0.680547, 0.924257, 0.556761, 0.360527, 0.0373072, 0.691515, 0.849526, 0.401483, 0.248336, 0.500587, 0.507617, 0.68883, 0.777038, 0.579384, 0.314063, 0.45054, 0.692744, 0.922073, 0.363166, 0.379201, 0.898792, 0.874305, 0.489274, 0.495039, 0.168172, 0.713977, 0.285693, 0.636691, 0.461037, 0.958287, 0.386788, 0.606482, 0.943823, 0.393971, 0.806899, 0.969267, 0.265483, 0.00688445, 0.126971, 0.889896, 0.163123, 0.180998, 0.659284, 0.277068, 0.738994, 0.261025, 0.620284, 0.981023, 0.878212, 0.584194, 0.433478, 0.172546, 0.239582, 0.900551, 0.731722, 0.72525, 0.0364797, 0.380943, 0.0560819, 0.636758, 0.379458, 0.496245, 0.201124, 0.525029, 0.097513, 0.713223, 0.777213, 0.321315, 0.588017, 0.137905, 0.0308744, 0.407071, 0.417575, 0.691682, 0.449768, 0.210941, 0.168387, 0.547847, 0.0190578, 0.358626, 0.217944, 0.266789, 0.384814, 0.178275, 0.319187, 0.176099, 0.648581, 0.132654, 0.59147, 0.441681, 0.193602, 0.64019, 0.224163, 0.521791, 0.229483, 0.400564, 0.859051, 0.990699, 0.756986, 0.661818, 0.273199, 0.481812, 0.379571, 0.62626, 0.75063, 0.267153, 0.0848633, 0.57962, 0.153337, 0.505616, 0.816552, 0.72844, 0.873409, 0.0295758, 0.382443, 0.235175, 0.720607, 0.255155, 0.742532, 0.87929, 0.132569, 0.0199717, 0.658568, 0.425309, 0.163051, 0.57509, 0.512819, 0.882217, 0.110785, 0.121593, 0.499359, 0.501363, 0.0110796, 0.350897, 0.792638, 0.507977, 0.0209075, 0.276143, 0.508, 0.00724977, 0.103344, 0.209396, 0.0231938, 0.515563, 0.107003, 0.277002, 0.527344, 0.592236, 0.117529, 0.308302, 0.1221, 0.588145, 0.955425, 0.296622, 0.570674, 0.23232, 0.461135, 0.61222, 0.582919, 0.644999, 0.643474, 0.903756, 0.920346, 0.745006, 0.116656, 0.11924, 0.766236, 0.656377, 0.0764579, 0.108666, 0.240429, 0.0280635, 0.441572, 0.190907, 0.429092, 0.0417142, 0.390164, 0.856827, 0.039781, 0.569936, 0.573069, 0.430331, 0.791502, 0.660886, 0.659253, 0.477999, 0.255375, 0.842358, 0.0236607, 0.200562, 0.731403, 0.761028, 0.173075, 0.0320001, 0.596479, 0.00482521, 0.721248, 0.260674, 0.0856477, 0.505605, 0.684763, 0.274704, 0.314994, 0.0977564, 0.0548509, 0.428415, 0.755969, 0.202471, 0.54318, 0.762787, 0.239097, 0.502653, 0.0991959, 0.866603, 0.710436, 0.624354, 0.231825, 0.895833, 0.707724, 0.704006, 0.588331, 0.512995, 0.490282, 0.102971, 0.644206, 0.278309, 0.619492, 0.10739, 0.031569, 0.229596, 0.459631, 0.6857, 0.819448, 0.0975513, 0.148909, 0.526895, 0.499892, 0.753641, 0.627101, 0.794425, 0.665003, 0.219606, 0.597999, 0.538522, 0.062112, 0.383639, 0.236073, 0.59815, 0.540973, 0.908751, 0.7413, 0.2057, 0.70532, 0.245086, 0.530658, 0.134414, 0.195691, 0.276764, 0.944645, 0.239167, 0.146722, 0.97706, 0.396387, 0.193859, 0.735596, 0.458095, 0.590589, 0.220982, 0.713774, 0.577566, 0.864676, 0.699872, 0.154451, 0.929021, 0.951376, 0.506626, 0.164632, 0.595988, 0.0653519, 0.108536, 0.254216, 0.831912, 0.57108, 0.840132, 0.293258, 0.300906, 0.213836, 0.687519, 0.125429, 0.0154527, 0.0319853, 0.942009, 0.146573, 0.804681, 0.247562, 0.040849, 0.0813666, 0.297249, 0.176232, 0.295414, 0.551832, 0.649087, 0.780787, 0.247937, 0.597096, 0.0236449, 0.879439, 0.889063, 0.397344, 0.91092, 0.603655, 0.521426, 0.195081, 0.26693, 0.27918, 0.198366, 0.254383, 0.215563, 0.767551, 0.295591, 0.674947, 0.0420507, 0.423799, 0.825114, 0.950827, 0.622371, 0.712773, 0.245555, 0.576014, 0.414811, 0.288255, 0.119534, 0.62588, 0.700754, 0.749591, 0.956372, 0.648471, 0.592476, 0.838473, 0.842345, 0.4325, 0.47928, 0.455091, 0.926852, 0.783866, 0.377821, 0.963088, 0.0190461, 0.769527, 0.0576378, 0.902081, 0.409815, 0.564548, 0.136893, 0.809001, 0.432426, 0.975666, 0.973344, 0.611916, 0.963548, 0.340273, 0.873961, 0.36428, 0.740172, 0.467133, 0.524688, 0.934967, 0.370723, 0.361181, 0.356284, 0.000200186, 0.796003, 0.831472, 0.285829, 0.511629, 0.275481, 0.788226, 0.983344, 0.772781, 0.914294, 0.8378, 0.892115, 0.372784, 0.299782, 0.321983, 0.574391, 0.270408, 0.565983, 0.754949, 0.658373, 0.304438, 0.918812, 0.31163, 0.900469, 0.172526, 0.85195, 0.303503, 0.317889, 0.352573, 0.651841, 0.745108, 0.227289, 0.983212, 0.742816, 0.371624, 0.120159, 0.0976097, 0.930425, 0.458614, 0.845312, 0.714786, 0.199871, 0.113015, 0.26686, 0.431549, 0.0113646, 0.640045, 0.355533, 0.529051, 0.179699, 0.0987579, 0.999858, 0.0752601, 0.850005, 0.674929, 0.567078, 0.35619, 0.345429, 0.242207, 0.487314, 0.516147, 0.693737, 0.0315665, 0.546881, 0.989227, 0.437677, 0.398134, 0.189141, 0.236878, 0.563278, 0.221663, 0.177436, 0.881522, 0.0182119, 0.382613, 0.59822, 0.666022, 0.429921, 0.738024, 0.0451276, 0.39517, 0.959361, 0.302131, 0.0510049, 0.0609704, 0.240774, 0.303435, 0.89799, 0.0217171, 0.463106, 0.37171, 0.636875, 0.436676, 0.601809, 0.733721, 0.861151, 0.64748, 0.437599, 0.935772, 0.0365896, 0.6243, 0.0631108, 0.36212, 0.220988, 0.47579, 0.688549, 0.991528, 0.344518, 0.197676, 0.473154, 0.174777, 0.682596, 0.560983, 0.655032, 0.302366, 0.274753, 0.245009, 0.404786, 0.99377, 0.426709, 0.845011, 0.970397, 0.931478, 0.444876, 0.0903494, 0.696205, 0.17419, 0.293931, 0.87711, 0.315322, 0.506828, 0.35034, 0.758811, 0.854096, 0.973935, 0.738458, 0.390207, 0.0589456, 0.966723, 0.0260099, 0.965222, 0.427509, 0.90527, 0.344511, 0.689796, 0.456483, 0.223026, 0.1372, 0.0515334, 0.640919, 0.351446, 0.477286, 0.865731, 0.978304, 0.736284, 0.312658, 0.16982, 0.777655, 0.136753, 0.993783, 0.283717, 0.206131, 0.549611, 0.445301, 0.542021, 0.238171, 0.842911, 0.362444, 0.75791, 0.836408, 0.292169, 0.885162, 0.93835, 0.144114, 0.0772436, 0.326158, 0.844657, 0.244554, 0.122619, 0.20847, 0.649231, 0.342979, 0.893815, 0.113022, 0.60074, 0.325661, 0.0136643, 0.525173, 0.132422, 0.187629, 0.332155, 0.0305176, 0.13807, 0.276103, 0.949142, 0.161537, 0.0181558, 0.935399, 0.229618, 0.683962, 0.213664, 0.426524, 0.583467, 0.65502, 0.470636, 0.859739, 0.316601, 0.667715, 0.138434, 0.359214, 0.0113959, 0.807722, 0.605412, 0.766416, 0.317377, 0.479528, 0.880543, 0.741589, 0.244249, 0.132463, 0.699367, 0.717938, 0.786699, 0.600406, 0.984877, 0.735642, 0.897786, 0.849399, 0.474007, 0.72506, 0.660477, 0.440065, 0.598694, 0.513666, 0.639673, 0.49764, 0.652242, 0.166539, 0.518156, 0.397441, 0.0180683, 0.368824, 0.0028388, 0.49821, 0.560417, 0.595594, 0.387247, 0.78124, 0.178909, 0.400339, 0.0589393, 0.536067, 0.886363, 0.844398, 0.123283, 0.212658, 0.201029, 0.734139, 0.167901, 0.0869218, 0.27254, 0.608849, 0.219371, 0.681524, 0.579072, 0.725842, 0.227678, 0.0397728, 0.348688, 0.483874, 0.441994, 0.83314, 0.741778, 0.862656, 0.289275, 0.549896, 0.690008, 0.40435, 0.332849, 0.057495, 0.190851, 0.0614963, 0.372094, 0.494503, 0.223638, 0.121088, 0.694385, 0.164, 0.978939, 0.0265477, 0.783247, 0.364793, 0.0140538, 0.113429, 0.0859375, 0.272075, 0.511651, 0.208955, 0.148247, 0.776097, 0.266488, 0.136438, 0.738454, 0.611663, 0.56791, 0.833214, 0.959146, 0.358978, 0.207743, 0.537063, 0.437717, 0.60199, 0.383474, 0.213916, 0.125251, 0.78066, 0.482704, 0.134592, 0.880776, 0.801872, 0.208513, 0.960464, 0.947722, 0.641671, 0.202998, 0.475369, 0.12731, 0.0263268, 0.650302, 0.363487, 0.603842, 0.0697654, 0.533586, 0.720898, 0.663448, 0.584509, 0.0694609, 0.924275, 0.822067, 0.375415, 0.799756, 0.694115, 0.580293, 0.334283, 0.0922166, 0.171261, 0.71009, 0.695641, 0.983752, 0.204798, 0.725317, 0.501215, 0.326378, 0.0791494, 0.801844, 0.0524501, 0.538012, 0.731976, 0.639471, 0.191962, 0.938879, 0.854176, 0.626389, 0.435476, 0.213822, 0.28105, 0.511943, 0.920372, 0.966166, 0.818484, 0.491012, 0.188083, 0.632422, 0.123717, 0.568356, 0.165152, 0.172649, 0.46527, 0.78615, 0.675015, 0.306201, 0.158993, 0.0792014, 0.967042, 0.729708, 0.488759, 0.417579, 0.836831, 0.0639718, 0.0951642, 0.407389, 0.247113, 0.057035, 0.91239, 0.768096, 0.133332, 0.06445, 0.319766, 0.0494294, 0.36631, 0.409032, 0.150527, 0.341765, 0.972424, 0.773659, 0.0102969, 0.0621577, 0.510706, 0.412811, 0.5055, 0.460341, 0.530126, 0.577187, 0.143287, 0.0889278, 0.24435, 0.954416, 0.333653, 0.793141, 0.422005, 0.360843, 0.186255, 0.144739, 0.663774, 0.248813, 0.194538, 0.957817, 0.891305, 0.201715, 0.449246, 0.286602, 0.438681, 0.063516, 0.86176, 0.607669, 0.458845, 0.192139, 0.467602, 0.498938, 0.528959, 0.628244, 0.307133, 0.502113, 0.790074, 0.439137, 0.297278, 0.0855112, 0.993146, 0.341803, 0.429483, 0.489207, 0.097826, 0.387804, 0.105836, 0.832144, 0.686801, 0.353345, 0.270722, 0.453971, 0.166628, 0.360213, 0.586645, 0.592227, 0.87898, 0.619847, 0.470664, 0.494789, 0.124589, 0.814768, 0.105868, 0.570889, 0.53332, 0.0484772, 0.234767, 0.0405342, 0.312458, 0.28473, 0.960765, 0.0786981, 0.942417, 0.804688, 0.336259, 0.63046, 0.23329, 0.851628, 0.887926, 0.999308, 0.300862, 0.792434, 0.0593845, 0.426165, 0.474874, 0.432486, 0.646565, 0.225466, 0.332777, 0.908149, 0.719494, 0.284807, 0.835582, 0.311455, 0.0362688, 0.31152, 0.128829, 0.397832, 0.285144, 0.20932, 0.988974, 0.50815, 0.0308556, 0.302613, 0.0360845, 0.524814, 0.768544, 0.705382, 0.199072, 0.607121, 0.136406, 0.470687, 0.203016, 0.729325, 0.333073, 0.746358, 0.195277, 0.304061, 0.487599, 0.871167, 0.722015, 0.267789, 0.977434, 0.393249, 0.522215, 0.666793, 0.892122, 0.623572, 0.937274, 0.206294, 0.758486, 0.23332, 0.906251, 0.812943, 0.987073, 0.748163, 0.552509, 0.0403306, 0.812408, 0.628796, 0.653199, 0.349886, 0.51297, 0.401466, 0.746051, 0.644171, 0.919166, 0.453167, 0.544111, 0.864165, 0.227377, 0.78908, 0.275189, 0.635981, 0.716067, 0.586381, 0.632425, 0.399512, 0.105899, 0.260164, 0.107326, 0.780907, 0.443946, 0.470192, 0.540053, 0.0619289, 0.471238, 0.824157, 0.531286, 0.448794, 0.908165, 0.274989, 0.418133, 0.898903, 0.143515, 0.836127, 0.0344939, 0.662182, 0.179498, 0.327589, 0.796382, 0.810047, 0.734709, 0.881899, 0.47577, 0.215189, 0.00626095, 0.797087, 0.119078, 0.38015, 0.463575, 0.0306364, 0.873173, 0.806818, 0.573985, 0.648762, 0.430314, 0.512317, 0.605053, 0.330103, 0.648338, 0.995127, 0.764925, 0.836998, 0.410201, 0.871226, 0.388434, 0.644309, 0.849519, 0.0508844, 0.868112, 0.662296, 0.28919, 0.582254, 0.758399, 0.138326, 0.774859, 0.065396, 0.848847, 0.0188271, 0.283098, 0.622852, 0.778154, 0.880147, 0.260418, 0.940757, 0.815913, 0.524643, 0.330396, 0.276408, 0.205845, 0.84478, 0.392218, 0.735127, 0.043299, 0.849097, 0.518467, 0.586761, 0.250364, 0.500118, 0.662909, 0.934101, 0.00782292, 0.452582, 0.335459, 0.981512, 0.272505, 0.727543, 0.136702, 0.494222, 0.378457, 0.0540869, 0.631997, 0.0824607, 0.16458, 0.797114, 0.0161222, 0.269075, 0.326394, 0.782215, 0.861174, 0.581392, 0.754731, 0.0722561, 0.583109, 0.497678, 0.0217069, 0.442749, 0.274421, 0.740624, 0.871762, 0.593714, 0.817941, 0.920725, 0.663765, 0.885732, 0.607902, 0.819708, 0.927395, 0.774414, 0.59827, 0.463188, 0.709475, 0.514606, 0.983765, 0.958663, 0.0928098, 0.0947121, 0.0565686, 0.616457, 0.135232, 0.419006, 0.660529, 0.857965, 0.951692, 0.597543, 0.114092, 0.0280256, 0.40186, 0.346991, 0.0407271, 0.831748, 0.205171, 0.420474, 0.48008, 0.757137, 0.687575, 0.806059, 0.213349, 0.716053, 0.441288, 0.217007, 0.420978, 0.523437, 0.404101, 0.834459, 0.580157, 0.481358, 0.867783, 0.00923892, 0.6734, 0.813537, 0.0828359, 0.266422, 0.680127, 0.0949555, 0.753066, 0.293837, 0.484535, 0.642965, 0.73337, 0.813223, 0.0288107, 0.0860963, 0.766266, 0.983775, 0.343079, 0.782403, 0.0921881, 0.977474, 0.108858, 0.982138, 0.325952, 0.971741, 0.675473, 0.875188, 0.0120778, 0.662449, 0.594604, 0.399967, 0.58295, 0.927819, 0.420584, 0.212146, 0.120091, 0.447372, 0.192807, 0.166516, 0.810726, 0.639078, 0.504555, 0.895836, 0.260907, 0.331909, 0.308899, 0.676879, 0.41273, 0.00506582, 0.428657, 0.805133, 0.103065, 0.121083, 0.375458, 0.520363, 0.604543, 0.429635, 0.438148, 0.760166, 0.766464, 0.0438251, 0.740575, 0.0293269, 0.183323, 0.00930769, 0.698414, 0.896991, 0.61929, 0.436696, 0.441205, 0.622654, 0.615977, 0.756678, 0.128649, 0.749992, 0.872171, 0.797274, 0.130622, 0.478863, 0.446768, 0.89785, 0.513121, 0.194588, 0.231796, 0.954301, 0.576324, 0.0562747, 0.61085, 0.577743, 0.545555, 0.363614, 0.843892, 0.400062, 0.717187, 0.740086, 0.528511, 0.779256, 0.745451, 0.589778, 0.977115, 0.251186, 0.916301, 0.761167, 0.0847637, 0.0230334, 0.14641, 0.725114, 0.618926, 0.432685, 0.561266, 0.128284, 0.887923, 0.045822, 0.493046, 0.143171, 0.671241, 0.679747, 0.335673, 0.115841, 0.93615, 0.339681, 0.247698, 0.0401278, 0.889537, 0.260072, 0.173812, 0.0365525, 0.204306, 0.84639, 0.832188, 0.141863, 0.0520722, 0.107015, 0.752642, 0.866039, 0.517256, 0.701401, 0.395515, 0.711873, 0.646394, 0.264936, 0.0187599, 0.369458, 0.904164, 0.0935391, 0.174715, 0.14083, 0.239559, 0.89748, 0.934013, 0.787458, 0.246227, 0.702091, 0.615122, 0.398795, 0.894223, 0.340275, 0.647777, 0.0492552, 0.907233, 0.504857, 0.501728, 0.626655, 0.648525, 0.194499, 0.0128158, 0.73601, 0.401778, 0.41494, 0.102481, 0.2945, 0.236058, 0.293323, 0.465413, 0.281178, 0.137138, 0.7493, 0.371651, 0.946542, 0.323555, 0.0745932, 0.157936, 0.998219, 0.877153, 0.125826, 0.324891, 0.34621, 0.241885, 0.676998, 0.931328, 0.930489, 0.250319, 0.602469, 0.834009, 0.346904, 0.101225, 0.546648, 0.554059, 0.376143, 0.295871, 0.833185, 0.204503, 0.186921, 0.863372, 0.822428, 0.603087, 0.921182, 0.704988, 0.142363, 0.221817, 0.639942, 0.279948, 0.502842, 0.754451, 0.218131, 0.848244, 0.500334, 0.724128, 0.524567, 0.380775, 0.143284, 0.412951, 0.667918, 0.515343, 0.381574, 0.080528, 0.262058, 0.890428, 0.312059, 0.464009, 0.584185, 0.52156, 0.21197, 0.651816, 0.167206, 0.884292, 0.720491, 0.917117, 0.821234, 0.167817, 0.857364, 0.149155, 0.268429, 0.775817, 0.427542, 0.892723, 0.352895, 0.66515, 0.474889, 0.375345, 0.225294, 0.804309, 0.875738, 0.486721, 0.122004, 0.722886, 0.297956, 0.0397858, 0.515044, 0.87911, 0.211498, 0.442214, 0.634027, 0.851447, 0.0666484, 0.0393785, 0.553852, 0.528016, 0.674943, 0.191433, 0.438034, 0.629693, 0.90824, 0.248927, 0.0244725, 0.905935, 0.566168, 0.33751, 0.178979, 0.300227, 0.601985, 0.873492, 0.738763, 0.502438, 0.319074, 0.113825, 0.183288, 0.899694, 0.767436, 0.468309, 0.709561, 0.86515, 0.487377, 0.21688, 0.684647, 0.576559, 0.863409, 0.550658, 0.566675, 0.882127, 0.742264, 0.798776, 0.0203523, 0.252537, 0.322721, 0.56767, 0.56067, 0.504041, 0.398241, 0.796125, 0.850756, 0.0350052, 0.644953, 0.674127, 0.206868, 0.860659, 0.199265, 0.857225, 0.429107, 0.822651, 0.665956, 0.00757618, 0.743728, 0.793881, 0.356099, 0.50414, 0.659538, 0.0998927, 0.790683, 0.0348981, 0.379323, 0.151468, 0.498949, 0.208541, 0.87533, 0.924126, 0.964347, 0.317048, 0.421789, 0.704327, 0.664026, 0.533839, 0.392701, 0.140075, 0.987913, 0.03069, 0.313169, 0.354796, 0.269842, 0.78199, 0.287759, 0.339711, 0.303852, 0.412044, 0.00941863, 0.975561, 0.854622, 0.160671, 0.154996, 0.864365, 0.835893, 0.248002, 0.618884, 0.713282, 0.00944035, 0.961391, 0.335907, 0.929753, 0.998946, 0.556517, 0.412419, 0.233943, 0.947435, 0.375575, 0.319604, 0.716044, 0.162663, 0.637383, 0.806768, 0.0856161, 0.956034, 0.245337, 0.551991, 0.851346, 0.254167, 0.744982, 0.564446, 0.429007, 0.870564, 0.376924, 0.904205, 0.885564, 0.214906, 0.183022, 0.925709, 0.357906, 0.288384, 0.605114, 0.614396, 0.396703, 0.0197939, 0.243897, 0.975216, 0.704497, 0.476525, 0.548474, 0.359391, 0.0163667, 0.666657, 0.311121, 0.0940198, 0.941416, 0.325115, 0.975641, 0.658675, 0.00407639, 0.365521, 0.692277, 0.372779, 0.200639, 0.674376, 0.380635, 0.531602, 0.693364, 0.227682, 0.84193, 0.950013, 0.451081, 0.514958, 0.670572, 0.711082, 0.364604, 0.648132, 0.523288, 0.450453, 0.53099, 0.988543, 0.669159, 0.830524, 0.290807, 0.140858, 0.799549, 0.583342, 0.257869, 0.59006, 0.217784, 0.197247, 0.938884, 0.36196, 0.844371, 0.0248709, 0.883103, 0.370168, 0.398312, 0.311908, 0.47474, 0.822861, 0.707444, 0.9443, 0.239513, 0.771527, 0.297216, 0.811968, 0.469184, 0.306376, 0.400149, 0.147905, 0.682306, 0.293364, 0.364349, 0.688556, 0.585631, 0.805093, 0.424534, 0.0733822, 0.543836, 0.733804, 0.649467, 0.30935, 0.685181, 0.858836, 0.517044, 0.47701, 0.638474, 0.0184836, 0.225102, 0.315323, 0.797589, 0.0778301, 0.0475099, 0.87561, 0.75544, 0.904796, 0.945879, 0.028123, 0.344476, 0.386814, 0.896624, 0.798412, 0.766277, 0.939873, 0.12153, 0.217026, 0.527635, 0.175489, 0.494636, 0.904428, 0.655793, 0.541505, 0.642757, 0.972037, 0.130352, 0.911651, 0.223711, 0.0406087, 0.381063, 0.746494, 0.827658, 0.0772224, 0.272106, 0.987622, 0.0052656, 0.606502, 0.85071, 0.0231136, 0.0652533, 0.857008, 0.00692946, 0.0733086, 0.367627, 0.315969, 0.292009, 0.232548, 0.994711, 0.998285, 0.308188, 0.584667, 0.752991, 0.191141, 0.240298, 0.0564915, 0.605211, 0.0147917, 0.550111, 0.206021, 0.244289, 0.135031, 0.237319, 0.40131, 0.26777, 0.532085, 0.177655, 0.868724, 0.485011, 0.371253, 0.185282, 0.644731, 0.622103, 0.855553, 0.56232, 0.417923, 0.787789, 0.91939, 0.491437, 0.520566, 0.59465, 0.701258, 0.458588, 0.472785, 0.430358, 0.641993, 0.278947, 0.81911, 0.706891, 0.29567, 0.705872, 0.354766, 0.00397476, 0.866153, 0.226897, 0.505979, 0.164834, 0.809293, 0.149693, 0.523287, 0.740916, 0.132538, 0.931738, 0.0903063, 0.371996, 0.907401, 0.813538, 0.92406, 0.307826, 0.60004, 0.969938, 0.744216, 0.926652, 0.526372, 0.741083, 0.948732, 0.82016, 0.185162, 0.575103, 0.210604, 0.514917, 0.845905, 0.323327, 0.40058, 0.0702564, 0.92683, 0.293106, 0.57313, 0.577772, 0.290759, 0.159196, 0.683577, 0.834745, 0.949849, 0.255035, 0.222031, 0.821555, 0.158011, 0.254514, 0.951792, 0.312939, 0.623394, 0.0596259, 0.860424, 0.183174, 0.730661, 0.955133, 0.836548, 0.779002, 0.0728992, 0.383247, 0.452559, 0.655328, 0.622909, 0.308009, 0.599691, 0.475055, 0.49587, 0.310641, 0.138457, 0.551152, 0.426814, 0.300667, 0.703175, 0.352065, 0.538981, 0.860645, 0.90593, 0.127653, 0.710765, 0.822372, 0.208033, 0.445604, 0.188133, 0.589692, 0.726903, 0.465544, 0.00409995, 0.987988, 0.979211, 0.24932, 0.704111, 0.730765, 0.534288, 0.235308, 0.744655, 0.943391, 0.38814, 0.462258, 0.826034, 0.109031, 0.628534, 0.511483, 0.0223714, 0.0708374, 0.630432, 0.300738, 0.919946, 0.463439, 0.558466, 0.78058, 0.37532, 0.98128, 0.528785, 0.737494, 0.6098, 0.695515, 0.122953, 0.646288, 0.385564, 0.737697, 0.131926, 0.414746, 0.0422695, 0.574735, 0.49658, 0.800849, 0.687207, 0.792034, 0.0924931, 0.386161, 0.663946, 0.395455, 0.106021, 0.74884, 0.681213, 0.908718, 0.421009, 0.58296, 0.000902615, 0.963181, 0.664368, 0.495164, 0.611767, 0.765692, 0.0809122, 0.452475, 0.934372, 0.979904, 0.915181, 0.398806, 0.221098, 0.129262, 0.421468, 0.622208, 0.0266502, 0.0698893, 0.666035, 0.0788684, 0.644953, 0.905166, 0.32171, 0.158854, 0.264691, 0.776421, 0.54107, 0.437091, 0.534772, 0.127446, 0.98168, 0.750536, 0.477281, 0.0383675, 0.474524, 0.732434, 0.471001, 0.300117, 0.397921, 0.269747, 0.655277, 0.901436, 0.907577, 0.225375, 0.479693, 0.200836, 0.177513, 0.0870924, 0.722634, 0.892637, 0.784116, 0.921343, 0.826337, 0.287859, 0.023142, 0.854584, 0.507308, 0.198762, 0.997025, 0.423951, 0.281525, 0.619001, 0.382824, 0.0785872, 0.468601, 0.511196, 0.346127, 0.952124, 0.148085, 0.437411, 0.945594, 0.395731, 0.160254, 0.489413, 0.754752, 0.3042, 0.653328, 0.387466, 0.548743, 0.42956, 0.739894, 0.832006, 0.138709, 0.0494386, 0.981079, 0.386781, 0.207027, 0.515991, 0.0338838, 0.842002, 0.399235, 0.95634, 0.715616, 0.5179, 0.959534, 0.559731, 0.0923886, 0.477529, 0.548104, 0.597878, 0.897928, 0.192379, 0.241017, 0.927968, 0.0923328, 0.469445, 0.319189, 0.766991, 0.672913, 0.802884, 0.335926, 0.454531, 0.549084, 0.483673, 0.493887, 0.413477, 0.651473, 0.875949, 0.922685, 0.880461, 0.779535, 0.967865, 0.122564, 0.0563977, 0.231148, 0.898896, 0.408888, 0.14527, 0.914878, 0.0976806, 0.56189, 0.61785, 0.668755, 0.481494, 0.799514, 0.508923, 0.0842405, 0.32201, 0.125059, 0.995587, 0.152596, 0.0496574, 0.261351, 0.0341027, 0.933704, 0.243774, 0.550941, 0.186483, 0.518109, 0.125313, 0.672872, 0.153328, 0.886958, 0.913331, 0.91518, 0.847563, 0.923794, 0.988247, 0.58427, 0.146684, 0.151417, 0.656044, 0.40859, 0.14817, 0.685165, 0.445984, 0.19485, 0.432712, 0.209893, 0.0505005, 0.790353, 0.5032, 0.081458, 0.0603039, 0.640263, 0.0914454, 0.777219, 0.460987, 0.138563, 0.382623, 0.842088, 0.641148, 0.54497, 0.326583, 0.0199233, 0.925389, 0.605686, 0.167242, 0.266449, 0.189608, 0.68873, 6.97044e-05, 0.272176, 0.534916, 0.286065, 0.699928, 0.0199234, 0.118455, 0.702812, 0.0438006, 0.602831, 0.301757, 0.0114333, 0.362431, 0.170829, 0.889331, 0.959898, 0.220713, 0.942668, 0.473341, 0.245121, 0.401105, 0.908635, 0.764165, 0.345782, 0.2635, 0.766043, 0.535833, 0.884238, 0.697501, 0.241719, 0.647971, 0.382307, 0.0776414, 0.543534, 0.625013, 0.303934, 0.942436, 0.958443, 0.100776, 0.251232, 0.603529, 0.396556, 0.923066, 0.548554, 0.220382, 0.548643, 0.274279, 0.164581, 0.95505, 0.25951, 0.261906, 0.577789, 0.741105, 0.43178, 0.671708, 0.331355, 0.39507, 0.410122, 0.310539, 0.631777, 0.990829, 0.458913, 0.384965, 0.000951788, 0.666651, 0.684922, 0.515917, 0.43763, 0.390086, 0.340545, 0.8425, 0.847068, 0.611617, 0.701035, 0.806373, 0.139428, 0.123705, 0.370887, 0.356397, 0.270458, 0.179828, 0.510626, 0.705079, 0.188944, 0.453232, 0.207811, 0.0473841, 0.876157, 0.352782, 0.992691, 0.990362, 0.614067, 0.341219, 0.109603, 0.434749, 0.409946, 0.926753, 0.927122, 0.196243, 0.78931, 0.242777, 0.0833232, 0.048814, 0.673235, 0.786945, 0.132571, 0.754882, 0.730221, 0.351982, 0.810955, 0.215565, 0.733762, 0.419478, 0.0429883, 0.0152969, 0.877608, 0.643404, 0.386359, 0.692136, 0.485291, 0.294757, 0.62227, 0.432487, 0.823588, 0.597304, 0.390734, 0.130017, 0.0323171, 0.0681704, 0.542906, 0.147647, 0.510956, 0.343039, 0.304513, 0.470067, 0.780405, 0.708418, 0.895061, 0.317382, 0.698365, 0.49651, 0.805618, 0.876528, 0.159333, 0.084484, 0.885828, 0.568193, 0.715078, 0.0726656, 0.720662, 0.242088, 0.407123, 0.740762, 0.100173, 0.344778, 0.183769, 0.447317, 0.274635, 0.796297, 0.660071, 0.587159, 0.136121, 0.0276473, 0.0709116, 0.836785, 0.274939, 0.87, 0.829027, 0.530006, 0.11207, 0.787696, 0.560119, 0.734515, 0.587682, 0.87197, 0.625314, 0.658184, 0.624214, 0.0705072, 0.297713, 0.76411, 0.877976, 0.045017, 0.263096, 0.297053, 0.609203, 0.441216, 0.742644, 0.327943, 0.288434, 0.101241, 0.259137, 0.565927, 0.268681, 0.10388, 0.615863, 0.883317, 0.522543, 0.460196, 0.504551, 0.0737397, 0.357484, 0.872328, 0.235187, 0.662268, 0.987824, 0.912386, 0.829971, 0.851458, 0.0526801, 0.520847, 0.366275, 0.227224, 0.551578, 0.0471393, 0.377022, 0.727276, 0.811789, 0.325827, 0.381726, 0.267145, 0.564409, 0.647686, 0.797945, 0.387828, 0.481548, 0.50858, 0.915173, 0.12693, 0.455427, 0.449081, 0.384271, 0.374061, 0.097169, 0.420527, 0.245558, 0.49597, 0.179161, 0.182186, 0.284332, 0.862269, 0.905787, 0.824996, 0.0911025, 0.850194, 0.305712, 0.538812, 0.680032, 0.825729, 0.774434, 0.00497431, 0.739186, 0.89747, 0.350157, 0.839806, 0.162065, 0.0315357, 0.171089, 0.309725, 0.509352, 0.897451, 0.254073, 0.589491, 0.18473, 0.779303, 0.287892, 0.529953, 0.698098, 0.498781, 0.17027, 0.807804, 0.713066, 0.443851, 0.147509, 0.476324, 0.0849093, 0.293804, 0.316159, 0.851617, 0.785632, 0.278999, 0.922958, 0.918628, 0.00110848, 0.855476, 0.939888, 0.838905, 0.454722, 0.049078, 0.0690804, 0.811063, 0.237738, 0.285593, 0.724508, 0.752612, 0.465725, 0.543701, 0.245843, 0.613217, 0.889532, 0.783317, 0.923225, 0.363409, 0.861243, 0.94711, 0.845222, 0.297557, 0.828068, 0.162559, 0.895627, 0.482789, 0.956056, 0.960334, 0.525915, 0.864784, 0.243446, 0.132257, 0.0355159, 0.48747, 0.13702, 0.0487109, 0.958976, 0.682305, 0.697404, 0.517756, 0.271305, 0.348725, 0.514959, 0.146714, 0.706208, 0.65107, 0.496005, 0.363852, 0.899967, 0.815151, 0.974469, 0.0844418, 0.440561, 0.911199, 0.762465, 0.597601, 0.24989, 0.42485, 0.688378, 0.997864, 0.133362, 0.259394, 0.0406596, 0.0860533, 0.158273, 0.158292, 0.797543, 0.538996, 0.480008, 0.422689, 0.182056, 0.669835, 0.363054, 0.146077, 0.902395, 0.629381, 0.0684445, 0.366411, 0.905245, 0.95533, 0.787215, 0.90363, 0.421145, 0.389334, 0.812495, 0.655009, 0.873205, 0.454083, 0.371221, 0.417499, 0.137931, 0.798543, 0.713944, 0.34444, 0.591934, 0.568805, 0.305445, 0.0535376, 0.873742, 0.429269, 0.166098, 0.168313, 0.404932, 0.0374966, 0.0669818, 0.89088, 0.0404485, 0.443517, 0.260303, 0.19161, 0.765812, 0.365226, 0.611378, 0.235126, 0.363451, 0.892738, 0.529157, 0.651125, 0.153076, 0.649078, 0.969791, 0.276867, 0.753009, 0.198297, 0.683023, 0.052406, 0.0254117, 0.00338464, 0.973576, 0.674069, 0.836126, 0.182508, 0.996128, 0.424246, 0.782711, 0.621323, 0.897955, 0.267909, 0.127903, 0.956464, 0.223168, 0.85823, 0.726266, 0.384882, 0.133664, 0.527722, 0.780722, 0.687852, 0.360357, 0.668218, 0.408891, 0.668433, 0.239858, 0.364373, 0.746525, 0.0194728, 0.346705, 0.0229462, 0.458635, 0.268359, 0.901841, 0.668297, 0.306051, 0.256688, 0.134941, 0.56859, 0.604442, 0.454286, 0.507528, 0.847724, 0.214442, 0.392379, 0.632621, 0.906297, 0.458079, 0.433776, 0.30273, 0.884566, 0.421768, 0.935194, 0.0921849, 0.423245, 0.615781, 0.869782, 0.739985, 0.762898, 0.655259, 0.332713, 0.79574, 0.0642832, 0.691164, 0.511502, 0.80821, 0.970405, 0.416269, 0.291265, 0.652546, 0.635271, 0.807694, 0.0431208, 0.493358, 0.256506, 0.976263, 0.23967, 0.126852, 0.497923, 0.747758, 0.289456, 0.0884913, 0.715343, 0.261956, 0.829906, 0.0533743, 0.505959, 0.246809, 0.0982734, 0.187019, 0.253518, 0.565694, 0.882792, 0.261432, 0.96548, 0.565113, 0.573391, 0.954133, 0.0158304, 0.605275, 0.278867, 0.557482, 0.0823503, 0.975507, 0.917146, 0.659914, 0.545931, 0.159105, 0.0839864, 0.994018, 0.938603, 0.0114792, 0.381102, 0.592332, 0.493459, 0.623574, 0.209693, 0.23726, 0.0727749, 0.584468, 0.987599, 0.685465, 0.904302, 0.660279, 0.0156542, 0.880792, 0.995401, 0.432219, 0.766088, 0.951417, 0.29196, 0.250044, 0.644601, 0.185979, 0.278347, 0.560231, 0.503529, 0.669737, 0.204006, 0.659249, 0.588698, 0.258539, 0.917531, 0.526103, 0.294503, 0.749646, 0.893066, 0.0400344, 0.933276, 0.298697, 0.332565, 0.120745, 0.32405, 0.437418, 0.343717, 0.275915, 0.687726, 0.378401, 0.116547, 0.0586583, 0.954957, 0.354961, 0.459451, 0.381851, 0.242585, 0.0527297, 0.387011, 0.78615, 0.679871, 0.0969096, 0.974251, 0.375427, 0.981451, 0.424016, 0.52282, 0.211821, 0.144911, 0.012798, 0.378344, 0.350074, 0.027395, 0.614111, 0.622761, 0.760705, 0.565331, 0.695602, 0.246383, 0.0715231, 0.665014, 0.342275, 0.446794, 0.632261, 0.8071, 0.230686, 0.472806, 0.495188, 0.0590397, 0.0258673, 0.994041, 0.0284946, 0.67796, 0.3943, 0.868992, 0.901415, 0.830276, 0.81449, 0.372918, 0.427919, 0.0609839, 0.498254, 0.928328, 0.969557, 0.00875862, 0.531724, 0.2854, 0.690292, 0.93172, 0.440842, 0.107637, 0.604463, 0.104505, 0.456251, 0.124982, 0.0394257, 0.90127, 0.178092, 0.270394, 0.674221, 0.410829, 0.341818, 0.681168, 0.190791, 0.254102, 0.245402, 0.680993, 0.119661, 0.880911, 0.0174896, 0.766244, 0.714052, 0.255102, 0.31572, 0.00887071, 0.335468, 0.0266612, 0.986794, 0.609976, 0.66684, 0.877991, 0.144678, 0.423934, 0.703842, 0.225452, 0.911991, 0.309387, 0.673259, 0.172029, 0.329916, 0.502351, 0.690981, 0.170024, 0.88533, 0.624785, 0.321363, 0.685606, 0.91955, 0.592923, 0.54156, 0.365244, 0.829578, 0.436584, 0.412874, 0.735137, 0.791498, 0.301722, 0.381989, 0.688878, 0.512202, 0.182662, 0.603295, 0.895481, 0.364057, 0.677434, 0.507175, 0.654368, 0.653867, 0.528956, 0.374771, 0.98773, 0.595307, 0.783905, 0.601861, 0.463332, 0.932498, 0.739323, 0.545642, 0.5791, 0.622483, 0.532126, 0.668479, 0.306928, 0.0195272, 0.453952, 0.614024, 0.410677, 0.0155925, 0.826704, 0.423931, 0.836678, 0.438924, 0.740696, 0.561482, 0.0893214, 0.288994, 0.571401, 0.677957, 0.380046, 0.276863, 0.342851, 0.0509712, 0.114583, 0.525371, 0.209344, 0.902236, 0.395247, 0.754452, 0.69313, 0.966128, 0.444688, 0.36747, 0.259551, 0.027126, 0.882527, 0.448274, 0.260347, 0.55595, 0.747249, 0.615728, 0.41376, 0.34802, 0.2875, 0.429296, 0.736503, 0.677609, 0.327682, 0.0887324, 0.331813, 0.711874, 0.918619, 0.872923, 0.0742797, 0.85766, 0.0707066, 0.534841, 0.921731, 0.0263108, 0.0598999, 0.37799, 0.499211, 0.676225, 0.464623, 0.253417, 0.130598, 0.924041, 0.0560727, 0.897775, 0.45905, 0.142797, 0.894595, 0.469825, 0.872596, 0.0691726, 0.14761, 0.644502, 0.427733, 0.694703, 0.823281, 0.869951, 0.71473, 0.493258, 0.536883, 0.255269, 0.293427, 0.831841, 0.680813, 0.0206072, 0.677639, 0.5316, 0.473076, 0.102568, 0.630431, 0.0832135, 0.289184, 0.798372, 0.231469, 0.0982872, 0.567834, 0.650897, 0.677726, 0.691671, 0.962228, 0.844364, 0.942152, 0.726276, 0.344664, 0.26788, 0.147159, 0.296549, 0.861301, 0.36695, 0.118315, 0.416061, 0.967727, 0.87995, 0.982524, 0.740762, 0.0335848, 0.851658, 0.698516, 0.865528, 0.369151, 0.183144, 0.629342, 0.775286, 0.265997, 0.0241675, 0.581314, 0.708552, 0.612795, 0.745523, 0.232616, 0.977005, 0.0635075, 0.763925, 0.468835, 0.213136, 0.00100157, 0.252305, 0.844795, 0.164959, 0.982506, 0.191049, 0.0285206, 0.487858, 0.178453, 0.270045, 0.825004, 0.207762, 0.170568, 0.948178, 0.0335208, 0.823119, 0.469793, 0.712395, 0.251105, 0.259527, 0.621902, 0.553753, 0.660528, 0.862246, 0.782147, 0.347054, 0.119024, 0.360907, 0.0204292, 0.972423, 0.626333, 0.78352, 0.775083, 0.782187, 0.584635, 0.630985, 0.146018, 0.208324, 0.206896, 0.59348, 0.934518, 0.506731, 0.546564, 0.469744, 0.244292, 0.539307, 0.335306, 0.309056, 0.232766, 0.898153, 0.0408301, 0.464669, 0.316146, 0.982274, 0.322839, 0.165663, 0.562336, 0.347879, 0.469302, 0.743295, 0.591931, 0.654187, 0.341286, 0.204763, 0.795382, 0.208074, 0.0120431, 0.590175, 0.775931, 0.676866, 0.324001, 0.399278, 0.183638, 0.585804, 0.689186, 0.757816, 0.147845, 0.347157, 0.0264234, 0.309659, 0.890931, 0.965132, 0.640717, 0.772183, 0.443222, 0.221438, 0.142323, 0.0950673, 0.280891, 0.400968, 0.442616, 0.145378, 0.795143, 0.5753, 0.634607, 0.748196, 0.486195, 0.69369, 0.81566, 0.00878605, 0.994677, 0.781603, 0.854253, 0.487573, 0.11681, 0.187154, 0.482745, 0.428969, 0.540055, 0.71439, 0.765667, 0.470962, 0.986268, 0.977392, 0.817257, 0.582757, 0.182142, 0.391291, 0.561361, 0.704339, 0.941356, 0.0553637, 0.460717, 0.020215, 0.899266, 0.106616, 0.777086, 0.732166, 0.159011, 0.290419, 0.56689, 0.00779044, 0.892882, 0.133409, 0.101799, 0.186129, 0.6709, 0.115771, 0.945025, 0.360708, 0.26327, 0.84268, 0.0845632, 0.342377, 0.959354, 0.715668, 0.940337, 0.646759, 0.658464, 0.170854, 0.0401572, 0.898814, 0.428535, 0.846646, 0.0622173, 0.481759, 0.805686, 0.930578, 0.474452, 0.908406, 0.808462, 0.834165, 0.452788, 0.719645, 0.274831, 0.652777, 0.554878, 0.892961, 0.207743, 0.27354, 0.288798, 0.131028, 0.800323, 0.291063, 0.555386, 0.391184, 0.666883, 0.823652, 0.402678, 0.481177, 0.982194, 0.446464, 0.19942, 0.0584617, 0.252946, 0.128255, 0.850211, 0.21515, 0.492428, 0.694589, 0.741449, 0.546204, 0.696326, 0.446408, 0.112463, 0.666474, 0.939459, 0.385872, 0.628081, 0.0464086, 0.630165, 0.0892842, 0.605734, 0.768762, 0.10757, 0.17385, 0.561266, 0.182718, 0.114847, 0.290036, 0.321573, 0.355856, 0.340549, 0.602114, 0.184965, 0.420035, 0.962014, 0.0199467, 0.873264, 0.39637, 0.0883592, 0.815046, 0.335132, 0.926742, 0.498382, 0.755187, 0.8201, 0.118857, 0.970355, 0.496689, 0.656959, 0.345073, 0.755824, 0.106491, 0.909558, 0.929634, 0.851547, 0.674076, 0.791228, 0.31031, 0.506334, 0.788087, 0.0272701, 0.528601, 0.77785, 0.742108, 0.938537, 0.618618, 0.457377, 0.525991, 0.365887, 0.23779, 0.31306, 0.676006, 0.407159, 0.339719, 0.995224, 0.99883, 0.565039, 0.916343, 0.0829878, 0.0640497, 0.951538, 0.459887, 0.659082, 0.0152949, 0.793012, 0.120974, 0.860868, 0.547291, 0.619261, 0.125329, 0.296842, 0.95894, 0.213264, 0.0800837, 0.814934, 0.0128622, 0.977173, 0.473298, 0.692574, 0.362815, 0.511559, 0.765763, 0.337082, 0.292514, 0.759194, 0.658489, 0.413138, 0.208933, 0.447627, 0.704491, 0.418062, 0.100603, 0.0684665, 0.876975, 0.00887432, 0.494588, 0.206948, 0.822848, 0.675951, 0.51903, 0.0624831, 0.887231, 0.126942, 0.910038, 0.136196, 0.257017, 0.73025, 0.804383, 0.842607, 0.358013, 0.635618, 0.208468, 0.463756, 0.321807, 0.586821, 0.538498, 0.812904, 0.969209, 0.332872, 0.857647, 0.901719, 0.545676, 0.902948, 0.185258, 0.025943, 0.763974, 0.861203, 0.95305, 0.529824, 0.2482, 0.726529, 0.464223, 0.946856, 0.133834, 0.696507, 0.0994677, 0.89814, 0.748362, 0.0262708, 0.667601, 0.590863, 0.403922, 0.117761, 0.386821, 0.80546, 0.334043, 0.314829, 0.200278, 0.306342, 0.52356, 0.474637, 0.106998, 0.9278, 0.675965, 0.663403, 0.101462, 0.583559, 0.0462734, 0.0352671, 0.90461, 0.790007, 0.758405, 0.841091, 0.0926943, 0.137215, 0.368548, 0.0705626, 0.545258, 0.117584, 0.498161, 0.5439, 0.0980244, 0.359515, 0.554699, 0.666477, 0.204211, 0.189021, 0.905762, 0.951077, 0.117013, 0.697655, 0.611902, 0.44213, 0.715191, 0.806257, 0.954509, 0.809172, 0.270034, 0.204865, 0.943159, 0.061596, 0.246246, 0.344185, 0.703708, 0.472256, 0.0147259, 0.0013571, 0.538292, 0.225187, 0.168966, 0.136675, 0.712792, 0.870868, 0.0291048, 0.826463, 0.636307, 0.195244, 0.337975, 0.40808, 0.901738, 0.538553, 0.770498, 0.738043, 0.503139, 0.0658936, 0.979355, 0.402137, 0.673127, 0.531231, 0.628136, 0.490108, 0.303373, 0.329643, 0.181567, 0.707066, 0.961684, 0.260575, 0.862115, 0.457136, 0.565408, 0.701363, 0.512112, 0.271181, 0.575812, 0.163291, 0.0146806, 0.303805, 0.0322961, 0.999351, 0.129029, 0.752638, 0.806291, 0.554292, 0.493333, 0.653076, 0.476907, 0.387074, 0.495866, 0.578622, 0.440685, 0.14802, 0.962741, 0.143877, 0.320731, 0.364887, 0.453192, 0.134608, 0.157618, 0.0746771, 0.521106, 0.353175, 0.271908, 0.265794, 0.511907, 0.65059, 0.628124, 0.125845, 0.448902, 0.0130005, 0.362455, 0.112936, 0.440093, 0.829233, 0.990599, 0.383463, 0.801014, 0.803162, 0.650828, 0.4055, 0.247662, 0.601687, 0.674877, 0.0954853, 0.373485, 0.259122, 0.350152, 0.504605, 0.0422898, 0.208451, 0.0145286, 0.481845, 0.650191, 0.26052, 0.523836, 0.961735, 0.42511, 0.508254, 0.635915, 0.866391, 0.597371, 0.894346, 0.34847, 0.564543, 0.783321, 0.165085, 0.482795, 0.480497, 0.312681, 0.699649, 0.405702, 0.299391, 0.393185, 0.580065, 0.942902, 0.36436, 0.135167, 0.860861, 0.437486, 0.349081, 0.0743343, 0.912358, 0.962811, 0.191964, 0.318093, 0.0134003, 0.388102, 0.491684, 0.94616, 0.489305, 0.674996, 0.684999, 0.300016, 0.657807, 0.85814, 0.0886809, 0.493227, 0.710839, 0.0607668, 0.0573066, 0.699759, 0.899117, 0.146274, 0.673067, 0.20316, 0.993279, 0.333727, 0.331023, 0.70526, 0.585366, 0.327836, 0.514168, 0.734879, 0.220123, 0.646333, 0.410052, 0.833534, 0.308118, 0.94664, 0.825775, 0.311175, 0.817723, 0.318438, 0.623486, 0.466668, 0.346387, 0.444601, 0.0431827, 0.226344, 0.712988, 0.742142, 0.998481, 0.169476, 0.743603, 0.956298, 0.714877, 0.849463, 0.878594, 0.712209, 0.180689, 0.970106, 0.258083, 0.0762421, 0.843759, 0.789727, 0.32206, 0.404498, 0.157956, 0.711342, 0.764646, 0.573514, 0.669096, 0.0169577, 0.98374, 0.872292, 0.927651, 0.920891, 0.421191, 0.739768, 0.48246, 0.0226258, 0.844997, 0.400884, 0.628137, 0.459454, 0.158492, 0.350767, 0.259643, 0.634974, 0.101789, 0.287066, 0.275365, 0.344751, 0.1319, 0.569736, 0.802776, 0.185886, 0.877973, 0.742424, 0.283848, 0.0628755, 0.548388, 0.968877, 0.196969, 0.993194, 0.737554, 0.421841, 0.133346, 0.323018, 0.678151, 0.539794, 0.528593, 0.621648, 0.800984, 0.362695, 0.666673, 0.297796, 0.35114, 0.952972, 0.996985, 0.804617, 0.769111, 0.211362, 0.071003, 0.273445, 0.389167, 0.672436, 0.524423, 0.360221, 0.220799, 0.417743, 0.600825, 0.861874, 0.325058, 0.37374, 0.978478, 0.743612, 0.515629, 0.446762, 0.140657, 0.974692, 0.589504, 0.307988, 0.357815, 0.074559, 0.595603, 0.174361, 0.516269, 0.857666, 0.921461, 0.3509, 0.914908, 0.385497, 0.594327, 0.132143, 0.00243453, 0.821895, 0.447849, 0.956253, 0.176787, 0.382489, 0.174103, 0.625296, 0.62577, 0.212753, 0.4215, 0.510081, 0.0451632, 0.787763, 0.0877071, 0.157109, 0.433643, 0.0546049, 0.126395, 0.764879, 0.401122, 0.314145, 0.721691, 0.189557, 0.953835, 0.397051, 0.274993, 0.639607, 0.00120164, 0.824579, 0.283382, 0.925793, 0.670764, 0.847156, 0.922985, 0.293448, 0.737972, 0.340344, 0.776504, 0.0259891, 0.0778378, 0.468436, 0.512705, 0.420573, 0.741593, 0.527772, 0.497272, 0.485672, 0.313134, 0.790152, 0.00858696, 0.865896, 0.345584, 0.261287, 0.00731203, 0.0949803, 0.776545, 0.774847, 0.331482, 0.418394, 0.915572, 0.309631, 0.224686, 0.183097, 0.60949, 0.308622, 0.216153, 0.115206, 0.974212, 0.763856, 0.0501892, 0.542696, 0.447503, 0.907984, 0.407614, 0.956497, 0.412391, 0.112248, 0.934585, 0.615769, 0.449886, 0.948145, 0.0341262, 0.791654, 0.876766, 0.918833, 0.677323, 0.573616, 0.68512, 0.063904, 0.744852, 0.889233, 0.300993, 0.355698, 0.51711, 0.191152, 0.39697, 0.135644, 0.379379, 0.263374, 0.005119, 0.801305, 0.148226, 0.129027, 0.75879, 0.926024, 0.077719, 0.12664, 0.784643, 0.422217, 0.483238, 0.86804, 0.982853, 0.0542705, 0.74908, 0.902672, 0.371814, 0.707943, 0.923811, 0.38666, 0.0241011, 0.65662, 0.269029, 0.402267, 0.325552, 0.232426, 0.0581101, 0.905211, 0.675483, 0.0774212, 0.039581, 0.131205, 0.749617, 0.0566855, 0.739756, 0.06836, 0.00835598, 0.0254742, 0.0447429, 0.233396, 0.482392, 0.447588, 0.643709, 0.832638, 0.670232, 0.043071, 0.578531, 0.662282, 0.286894, 0.423193, 0.0552578, 0.852691, 0.713245, 0.470162, 0.668054, 0.667899, 0.574541, 0.131059, 0.91276, 0.818326, 0.11599, 0.851034, 0.593224, 0.155014, 0.514159, 0.592336, 0.562494, 0.196258, 0.252103, 0.340876, 0.911879, 0.175632, 0.546079, 0.182363, 0.454414, 0.604122, 0.297298, 0.653722, 0.761407, 0.47926, 0.816178, 0.235929, 0.639344, 0.739464, 0.938616, 0.286809, 0.327739, 0.461707, 0.208154, 0.482954, 0.0509468, 0.142706, 0.89284, 0.237463, 0.586393, 0.800339, 0.560887, 0.295807, 0.330318, 0.0670258, 0.351697, 0.381988, 0.94602, 0.591504, 0.3632, 0.717619, 0.697342, 0.250028, 0.693048, 0.222405, 0.665521, 0.334338, 0.84925, 0.15161, 0.21402, 0.370436, 0.613952, 0.285156, 0.399465, 0.146095, 0.0125152, 0.0118508, 0.0156256, 0.740666, 0.437696, 0.403523, 0.850049, 0.927105, 0.0708434, 0.127489, 0.397059, 0.850749, 0.0904292, 0.796167, 0.0869627, 0.552714, 0.516711, 0.629009, 0.704758, 0.101544, 0.780694, 0.622194, 0.969904, 0.247187, 0.825026, 0.809368, 0.690484, 0.925001, 0.564509, 0.0470862, 0.865373, 0.137115, 0.273268, 0.847244, 0.504767, 0.336853, 0.144934, 0.068843, 0.52291, 0.23538, 0.577555, 0.16187, 0.0370332, 0.72807, 0.432009, 0.426133, 0.462264, 0.939607, 0.235215, 0.811247, 0.556375, 0.87118, 0.569085, 0.878304, 0.983183, 0.852026, 0.46259, 0.307348, 0.963419, 0.843539, 0.845906, 0.587644, 0.765891, 0.991794, 0.809161, 0.429153, 0.14479, 0.211685, 0.194344, 0.692745, 0.973786, 0.33619, 0.0689954, 0.793661, 0.139201, 0.541078, 0.112971, 0.278525, 0.967265, 0.738958, 0.434821, 0.0140112, 0.963971, 0.590263, 0.114507, 0.635127, 0.677356, 0.893178, 0.315607, 0.955298, 0.00820178, 0.00425983, 0.0551395, 0.111696, 0.516583, 0.272322, 0.468827, 0.217675, 0.311107, 0.751291, 0.915763, 0.28836, 0.792332, 0.0851952, 0.0043993, 0.20719, 0.865429, 0.432253, 0.558345, 0.0984699, 0.967917, 0.491382, 0.782148, 0.699392, 0.392874, 0.468504, 0.924621, 0.5672, 0.111487, 0.814167, 0.00100346, 0.66217, 0.885473, 0.420846, 0.0417279, 0.695533, 0.117853, 0.234109, 0.731629, 0.736747, 0.560958, 0.470938, 0.982085, 0.962364, 0.327289, 0.704605, 0.595809, 0.757899, 0.270644, 0.783712, 0.639928, 0.550749, 0.735806, 0.0453388, 0.719945, 0.457517, 0.578951, 0.955582, 0.244602, 0.368397, 0.0534791, 0.619606, 0.936891, 0.909496, 0.544918, 0.830714, 0.472908, 0.741619, 0.747178, 0.944943, 0.890203, 0.653188, 0.820486, 0.885751, 0.942118, 0.995787, 0.722743, 0.215096, 0.275333, 0.450616, 0.0303903, 0.0745061, 0.348647, 0.927486, 0.730308, 0.508685, 0.869741, 0.000199261, 0.586344, 0.0536505, 0.0119296, 0.0923863, 0.545536, 0.0608853, 0.380569, 0.912787, 0.0686252, 0.371146, 0.466165, 0.926151, 0.608997, 0.753636, 0.664229, 0.383292, 0.852328, 0.571204, 0.036878, 0.25953, 0.563706, 0.894019, 0.857279, 0.816177, 0.714966, 0.574699, 0.860752, 0.972654, 0.784208, 0.756852, 0.457417, 0.717886, 0.713831, 0.153024, 0.644426, 0.695995, 0.286087, 0.980746, 0.843952, 0.982575, 0.856584, 0.209565, 0.481541, 0.169354, 0.69629, 0.209612, 0.327471, 0.0551323, 0.509702, 0.237023, 0.45776, 0.208377, 0.669919, 0.578195, 0.870464, 0.53013, 0.304846, 0.826025, 0.7725, 0.690408, 0.499287, 0.885399, 0.467045, 0.0825642, 0.439889, 0.53252, 0.298833, 0.677996, 0.351126, 0.570108, 0.735968, 0.185331, 0.572396, 0.0718335, 0.043388, 0.960652, 0.244946, 0.840251, 0.0970145, 0.765337, 0.07533, 0.11174, 0.0798366, 0.563998, 0.547384, 0.685283, 0.698292, 0.0698455, 0.764944, 0.166621, 0.717051, 0.73781, 0.793447, 0.139141, 0.025704, 0.0299312, 0.127013, 0.665322, 0.252495, 0.702525, 0.461675, 0.788524, 0.892694, 0.109942, 0.756784, 0.182836, 0.155042, 0.266305, 0.508443, 0.285882, 0.784535, 0.818626, 0.928909, 0.333389, 0.758663, 0.313839, 0.301623, 0.360218, 0.310982, 0.927235, 0.35376, 0.552454, 0.290489, 0.02134, 0.225831, 0.122543, 0.108653, 0.938176, 0.282131, 0.757113, 0.66138, 0.134962, 0.887678, 0.229384, 0.300449, 0.337241, 0.668107, 0.575062, 0.393382, 0.0596951, 0.338745, 0.18376, 0.416311, 0.308475, 0.268968, 0.24063, 0.842395, 0.70662, 0.383827, 0.154508, 0.382982, 0.001046, 0.8198, 0.534798, 0.233568, 0.733703, 0.48442, 0.63067, 0.7776, 0.816091, 0.791681, 0.891629, 0.246947, 0.931949, 0.171218, 0.981435, 0.809544, 0.430647, 0.742869, 0.302689, 0.805772, 0.915023, 0.573546, 0.0655189, 0.819092, 0.647007, 0.367672, 0.436398, 0.497045, 0.27714, 0.586413, 0.291069, 0.359957, 0.422829, 0.189001, 0.291317, 0.472965, 0.985665, 0.202703, 0.874949, 0.915435, 0.124988, 0.314792, 0.81394, 0.303742, 0.35381, 0.122282, 0.745956, 0.209447, 0.257211, 0.927826, 0.907156, 0.461426, 0.106361, 0.954715, 0.644834, 0.663264, 0.723763, 0.802764, 0.452491, 0.690445, 0.0763885, 0.100535, 0.954263, 0.485459, 0.315302, 0.412006, 0.312095, 0.955058, 0.838809, 0.443168, 0.675165, 0.00597348, 0.396078, 0.926727, 0.668074, 0.215526, 0.776938, 0.932685, 0.179367, 0.0769971, 0.888411, 0.872208, 0.00631209, 0.278515, 0.491077, 0.878372, 0.701497, 0.265719, 0.592272, 0.415172, 0.867962, 0.859485, 0.587776, 0.178753, 0.490393, 0.470901, 0.677936, 0.763239, 0.332961, 0.953333, 0.942395, 0.0136726, 0.39762, 0.0989805, 0.617236, 0.155618, 0.440091, 0.91059, 0.814133, 0.756409, 0.975529, 0.572451, 0.844711, 0.950252, 0.990381, 0.761324, 0.180284, 0.206011, 0.0900117, 0.0221842, 0.331012, 0.270904, 0.958312, 0.389324, 0.324624, 0.301003, 0.0224642, 0.00124109, 0.874292, 0.717028, 0.295782, 0.0209107, 0.0302121, 0.892169, 0.503364, 0.320226, 0.272315, 0.65038, 0.784421, 0.0888919, 0.250147, 0.0424563, 0.560304, 0.432983, 0.501894, 0.437084, 0.489585, 0.314804, 0.183821, 0.181711, 0.0583365, 0.852978, 0.844904, 0.339829, 0.1833, 0.349819, 0.223401, 0.550625, 0.638733, 0.270964, 0.2392, 0.718568, 0.220275, 0.299293, 0.385766, 0.436482, 0.0886527, 0.173392, 0.18101, 0.300184, 0.122931, 0.23347, 0.674402, 0.19252, 0.8956, 0.748266, 0.746077, 0.533762, 0.246848, 0.219544, 0.689732, 0.295174, 0.244165, 0.203912, 0.925367, 0.998724, 0.727769, 0.372718, 0.559335, 0.731244, 0.775813, 0.818395, 0.410829, 0.0362289, 0.688645, 0.317817, 0.637866, 0.72063, 0.320848, 0.424657, 0.0157628, 0.14882, 0.345892, 0.162934, 0.193426, 0.00402687, 0.115824, 0.582711, 0.857726, 0.38583, 0.0702833, 0.35083, 0.0715505, 0.777583, 0.035525, 0.362197, 0.860771, 0.963233, 0.641075, 0.374879, 0.896295, 0.0165354, 0.91491, 0.830022, 0.49169, 0.583116, 0.577071, 0.133575, 0.443, 0.39942, 0.288246, 0.292011, 0.557059, 0.673048, 0.00417844, 0.211666, 0.0893447, 0.935207, 0.77357, 0.877631, 0.262308, 0.209458, 0.757559, 0.259865, 0.709038, 0.342092, 0.887604, 0.800483, 0.0608916, 0.0624347, 0.296387, 0.227653, 0.0905725, 0.96811, 0.876155, 0.712574, 0.475495, 0.837954, 0.82326, 0.901368, 0.978379, 0.374155, 0.3437, 0.820785, 0.852968, 0.554892, 0.45926, 0.714185, 0.580378, 0.530148, 0.254213, 0.950919, 0.314368, 0.424304, 0.388183, 0.244002, 0.306542, 0.570451, 0.164858, 0.609034, 0.184189, 0.451433, 0.269775, 0.17716, 0.391834, 0.00178003, 0.891679, 0.671473, 0.711705, 0.117475, 0.783716, 0.915749, 0.984903, 0.263905, 0.655211, 0.347225, 0.361571, 0.412794, 0.00185498, 0.206817, 0.884916, 0.186854, 0.38828, 0.0828301, 0.375498, 0.666243, 0.658897, 0.876611, 0.412295, 0.697257, 0.115523, 0.316816, 0.476403, 0.766361, 0.852835, 0.306754, 0.22442, 0.0357394, 0.409191, 0.333454, 0.164335, 0.987705, 0.411213, 0.234983, 0.158057, 0.43634, 0.658609, 0.174127, 0.020123, 0.437261, 0.0184341, 0.89343, 0.79349, 0.980645, 0.692591, 0.148737, 0.330185, 0.754389, 0.642548, 0.817865, 0.880654, 0.422831, 0.446075, 0.21881, 0.984493, 0.0661338, 0.461624, 0.105358, 0.534487, 0.213864, 0.148391, 0.951034, 0.271586, 0.559914, 0.879341, 0.602874, 0.220543, 0.476639, 0.435103, 0.105406, 0.310206, 0.793802, 0.151404, 0.487077, 0.172264, 0.960269, 0.713114, 0.999871, 0.420094, 0.515271, 0.451883, 0.479245, 0.348492, 0.990736, 0.308586, 0.63651, 0.808384, 0.561693, 0.0413513, 0.681456, 0.610626, 0.534659, 0.496177, 0.3667, 0.900975, 0.894569, 0.79565, 0.0186271, 0.989815, 0.991297, 0.779106, 0.519002, 0.487239, 0.869723, 0.00462705, 0.184937, 0.668712, 0.917448, 0.725175, 0.562443, 0.428636, 0.303813, 0.647251, 0.921253, 0.47985, 0.0837471, 0.744928, 0.861585, 0.839156, 0.834023, 0.264347, 0.302752, 0.647836, 0.310652, 0.287339, 0.22281, 0.58143, 0.0492512, 0.388226, 0.636916, 0.701096, 0.45844, 0.612194, 0.0897263, 0.613092, 0.21763, 0.251719, 0.581469, 0.842173, 0.104907, 0.852448, 0.8234, 0.164362, 0.888597, 0.0204464, 0.835671, 0.761565, 0.556342, 0.796369, 0.586651, 0.669905, 0.670633, 0.182719, 0.77019, 0.404177, 0.553797, 0.414769, 0.961517, 0.743827, 0.58166, 0.514762, 0.0927369, 0.201122, 0.510801, 0.338126, 0.992696, 0.803498, 0.648466, 0.729269, 0.455494, 0.115187, 0.33076, 0.453352, 0.475997, 0.0203883, 0.594224, 0.0339375, 0.020838, 0.55651, 0.404537, 0.949745, 0.854326, 0.651165, 0.104005, 0.694183, 0.00834956, 0.78608, 0.815726, 0.459997, 0.122715, 0.765904, 0.40332, 0.685294, 0.494859, 0.746013, 0.174657, 0.503882, 0.560745, 0.223367, 0.625813, 0.415538, 0.399038, 0.395755, 0.275441, 0.36147, 0.201579, 0.401547, 0.408787, 0.884887, 0.616629, 0.889319, 0.656236, 0.938783, 0.0637644, 0.973361, 0.879687, 0.529342, 0.568434, 0.431322, 0.62737, 0.974513, 0.845337, 0.996259, 0.159408, 0.631976, 0.643702, 0.665687, 0.139823, 0.843702, 0.246161, 0.76084, 0.728958, 0.535128, 0.136276, 0.184521, 0.904256, 0.294465, 0.778551, 0.377506, 0.787573, 0.436552, 0.320794, 0.965536, 0.692609, 0.618321, 0.786625, 0.790937, 0.634893, 0.207891, 0.643817, 0.222708, 0.425659, 0.674078, 0.504861, 0.131684, 0.0188377, 0.937917, 0.293583, 0.334396, 0.266537, 0.92676, 0.326858, 0.525026, 0.266233, 0.0477181, 0.598097, 0.0946177, 0.684865, 0.384384, 0.699691, 0.278586, 0.401219, 0.0221756, 0.792018, 0.497166, 0.515238, 0.695679, 0.719962, 0.349851, 0.440469, 0.567658, 0.624902, 0.576608, 0.635863, 0.0877794, 0.94749, 0.273357, 0.763191, 0.958012, 0.434347, 0.983312, 0.422428, 0.193906, 0.190518, 0.40998, 0.114308, 0.774496, 0.120317, 0.660065, 0.344058, 0.376515, 0.00188368, 0.498159, 0.136415, 0.97349, 0.360057, 0.468954, 0.677129, 0.988489, 0.100758, 0.987906, 0.0793113, 0.699757, 0.254688, 0.0192848, 0.0246752, 0.314687, 0.519168, 0.949958, 0.199203, 0.660482, 0.364254, 0.665527, 0.0424076, 0.0105091, 0.807254, 0.778683, 0.47006, 0.749578, 0.122787, 0.379606, 0.320607, 0.239831, 0.992628, 0.400976, 0.513473, 0.469938, 0.166511, 0.318388, 0.0512289, 0.16053, 0.677713, 0.995624, 0.0188381, 0.130674, 0.188496, 0.529163, 0.29398, 0.686658, 0.685019, 0.682503, 0.725215, 0.289632, 0.132913, 0.511885, 0.0148312, 0.820994, 0.652437, 0.130145, 0.911327, 0.627165, 0.103867, 0.13767, 0.110062, 0.406208, 0.590038, 0.0765644, 0.559362, 0.531457, 0.91515, 0.297759, 0.739756, 0.300428, 0.246681, 0.666189, 0.100013, 0.321947, 0.596659, 0.853213, 0.375101, 0.263643, 0.299012, 0.630002, 0.690559, 0.653102, 0.231595, 0.623702, 0.400381, 0.179103, 0.985133, 0.263407, 0.0276057, 0.792905, 0.94982, 0.788521, 0.444738, 0.0369177, 0.157686, 0.0208841, 0.338507, 0.235028, 0.135567, 0.898262, 0.663315, 0.304737, 0.401189, 0.501241, 0.346704, 0.560141, 0.881806, 0.243489, 0.639263, 0.599862, 0.253706, 0.854068, 0.133819, 0.405036, 0.311781, 0.401806, 0.872806, 0.321202, 0.752558, 0.175346, 0.378333, 0.323489, 0.383771, 0.858183, 0.0325355, 0.928269, 0.673296, 0.673749, 0.0901323, 0.72412, 0.649832, 0.135227, 0.35764, 0.288569, 0.254955, 0.443867, 0.517818, 0.297254, 0.590647, 0.107932, 0.449047, 0.882344, 0.225252, 0.433855, 0.762601, 0.696592, 0.944478, 0.811902, 0.446042, 0.739057, 0.916647, 0.560893, 0.481298, 0.0512294, 0.189685, 0.938124, 0.743038, 0.663919, 0.277515, 0.00490406, 0.105191, 0.745067, 0.610129, 0.875308, 0.0198431, 0.170976, 0.345018, 0.489012, 0.552749, 0.874204, 0.308591, 0.503163, 0.669615, 0.734265, 0.792719, 0.0165101, 0.760995, 0.821754, 0.223009, 0.757844, 0.830483, 0.132646, 0.61682, 0.837773, 0.0240728, 0.790019, 0.77177, 0.882421, 0.166526, 0.735692, 0.546135, 0.843767, 0.896736, 0.590712, 0.445507, 0.34464, 0.181952, 0.66194, 0.347118, 0.176585, 0.0663742, 0.581223, 0.470515, 0.0402068, 0.440101, 0.653776, 0.171315, 0.948476, 0.252551, 0.0267077, 0.214988, 0.906711, 0.658821, 0.0954548, 0.905208, 0.406649, 0.654212, 0.915519, 0.393234, 0.731812, 0.180086, 0.511531, 0.218638, 0.552076, 0.168674, 0.702122, 0.368917, 0.235562, 0.753486, 0.697328, 0.404258, 0.206343, 0.751382, 0.0744253, 0.776965, 0.0942195, 0.437631, 0.488783, 0.39511, 0.685955, 0.416878, 0.634365, 0.152642, 0.840603, 0.21257, 0.0627066, 0.353281, 0.884653, 0.151442, 0.193807, 0.063635, 0.631978, 0.149031, 0.655747, 0.888465, 0.900787, 0.929788, 0.851687, 0.715223, 0.342302, 0.90168, 0.137161, 0.46594, 0.816812, 0.965853, 0.765433, 0.369141, 0.463996, 0.964662, 0.732006, 0.173873, 0.882535, 0.648797, 0.0182048, 0.90321, 0.821726, 0.288565, 0.144074, 0.852497, 0.242443, 0.69388, 0.252887, 0.172532, 0.883051, 0.679245, 0.494158, 0.346952, 0.533348, 0.911159, 0.654162, 0.814161, 0.435313, 0.514984, 0.599249, 0.0797151, 0.805843, 0.65486, 0.931116, 0.816697, 0.581251, 0.586563, 0.836913, 0.898794, 0.95048, 0.313502, 0.387805, 0.630518, 0.418309, 0.310879, 0.866792, 0.470594, 0.207069, 0.699711, 0.794977, 0.242894, 0.478062, 0.642866, 0.530757, 0.97951, 0.654546, 0.9611, 0.424048, 0.461523, 0.819456, 0.464958, 0.680896, 0.818617, 0.60735, 0.873149, 0.554147, 0.948287, 0.677049, 0.628649, 0.389143, 0.219843, 0.206531, 0.382141, 0.97292, 0.436913, 0.527146, 0.584415, 0.638428, 0.964012, 0.67869, 0.743408, 0.364167, 0.935989, 0.319598, 0.97166, 0.531958, 0.226696, 0.676763, 0.908528, 0.616405, 0.678407, 0.183517, 0.449191, 0.0941468, 0.824995, 0.488591, 0.668747, 0.271305, 0.360296, 0.156978, 0.335918, 0.25947, 0.87536, 0.00516358, 0.634068, 0.327899, 0.106054, 0.845073, 0.81707, 0.388142, 0.70198, 0.0181476, 0.0736673, 0.187835, 0.512911, 0.175033, 0.97524, 0.849162, 0.14661, 0.928972, 0.732933, 0.844496, 0.751822, 0.919395, 0.542603, 0.356712, 0.742917, 0.647308, 0.462918, 0.434674, 0.553079, 0.0235603, 0.632526, 0.735501, 0.422099, 0.443653, 0.0204179, 0.815156, 0.233638, 0.628604, 0.0116476, 0.153278, 0.837472, 0.830971, 0.363338, 0.861741, 0.160206, 0.394161, 0.175412, 0.137136, 0.26409, 0.517597, 0.689166, 0.37129, 0.442019, 0.0914687, 0.37218, 0.930533, 0.889082, 0.483711, 0.377526, 0.252849, 0.838257, 0.801246, 0.848188, 0.361704, 0.488574, 0.148598, 0.294871, 0.266487, 0.774182, 0.187339, 0.0941953, 0.164017, 0.936422, 0.50731, 0.074095, 0.854587, 0.649322, 0.345458, 0.445889, 0.81422, 0.244509, 0.682901, 0.749852, 0.111593, 0.213816, 0.452847, 0.39045, 0.0703317, 0.804879, 0.0454746, 0.11173, 0.541566, 0.757812, 0.901329, 0.1559, 0.437596, 0.0668889, 0.51318, 0.239489, 0.991694, 0.00890407, 0.776454, 0.480847, 0.867292, 0.178512, 0.487028, 0.394301, 0.312336, 0.706071, 0.0676136, 0.417828, 0.144306, 0.467616, 0.535511, 0.451438, 0.732587, 0.81661, 0.17244, 0.239007, 0.384814, 0.519206, 0.946842, 0.745977, 0.0109876, 0.417681, 0.973558, 0.434035, 0.264324, 0.423259, 0.180296, 0.0602802, 0.905666, 0.297782, 0.704718, 0.0523451, 0.398693, 0.312, 0.714856, 0.176388, 0.922254, 0.997358, 0.112195, 0.505402, 0.848362, 0.213936, 0.846666, 0.609299, 0.833808, 0.693092, 0.548613, 0.66066, 0.86805, 0.787334, 0.00247236, 0.402374, 0.378345, 0.372522, 0.963886, 0.938627, 0.364029, 0.555715, 0.450016, 0.896565, 0.786311, 0.628032, 0.768357, 0.220717, 0.0791959, 0.588705, 0.937854, 0.306802, 0.358237, 0.625344, 0.776782, 0.813199, 0.045975, 0.4, 0.233179, 0.983195, 0.305067, 0.0694271, 0.359581, 0.067716, 0.885032, 0.273982, 0.570772, 0.888602, 0.219557, 0.378408, 0.241539, 0.258022, 0.475525, 0.857438, 0.948276, 0.00303445, 0.0524761, 0.0211743, 0.909011, 0.621567, 0.679538, 0.126809, 0.972133, 0.108976, 0.0159989, 0.977282, 0.392157, 0.291527, 0.222998, 0.324908, 0.242965, 0.20673, 0.514523, 0.480699, 0.692138, 0.382866, 0.21629, 0.841046, 0.268402, 0.76586, 0.874557, 0.814671, 0.581949, 0.0393721, 0.376769, 0.0832735, 0.0114397, 0.462073, 0.0464791, 0.468276, 0.815788, 0.062347, 0.0712963, 0.579323, 0.550473, 0.811739, 0.48989, 0.681401, 0.185871, 0.0397111, 0.880335, 0.347317, 0.613371, 0.207223, 0.587868, 0.66271, 0.370936, 0.385676, 0.564103, 0.32319, 0.203513, 0.20325, 0.28876, 0.636782, 0.133005, 0.154924, 0.814053, 0.577972, 0.590152, 0.74786, 0.733135, 0.93969, 0.357774, 0.361943, 0.823137, 0.81813, 0.331541, 0.784539, 0.634332, 0.897261, 0.232585, 0.260344, 0.585753, 0.949643, 0.394603, 0.629096, 0.571147, 0.250446, 0.639285, 0.874115, 0.0845131, 0.901405, 0.448808, 0.879161, 0.853172, 0.657264, 0.455202, 0.674769, 0.792406, 0.221688, 0.210912, 0.504814, 0.64103, 0.040465, 0.551683, 0.0399134, 0.985258, 0.566315, 0.153864, 0.558317, 0.920538, 0.504803, 0.0154709, 0.930551, 0.699332, 0.359302, 0.597273, 0.749038, 0.54641, 0.475377, 0.532226, 0.282638, 0.686299, 0.904624, 0.711049, 0.286856, 0.165259, 0.793737, 0.947973, 0.442122, 0.585861, 0.00375097, 0.477529, 0.966832, 0.0255681, 0.850698, 0.208501, 0.157569, 0.908811, 0.427259, 0.585685, 0.358088, 0.820059, 0.636096, 0.678087, 0.111219, 0.658997, 0.0135983, 0.230142, 0.956578, 0.0798112, 0.792803, 0.00995865, 0.621058, 0.859901, 0.940691, 0.485983, 0.766231, 0.617424, 0.768601, 0.459799, 0.0779353, 0.470563, 0.414345, 0.882353, 0.838752, 0.969884, 0.365109, 0.427634, 0.842743, 0.629724, 0.376985, 0.291547, 0.205478, 0.918634, 0.0106153, 0.675243, 0.503538, 0.311834, 0.363575, 0.685241, 0.612219, 0.243538, 0.0523428, 0.255861, 0.726771, 0.547785, 0.657322, 0.822952, 0.531548, 0.509373, 0.400932, 0.486141, 0.24589, 0.523337, 0.620198, 0.672751, 0.436069, 0.750501, 0.150979, 0.777565, 0.255165, 0.25547, 0.417277, 0.127728, 0.790892, 0.231964, 0.251233, 0.515786, 0.733995, 0.755056, 0.212614, 0.140287, 0.506838, 0.548005, 0.50844, 0.828861, 0.738998, 0.66115, 0.493259, 0.0879833, 0.0818416, 0.359093, 0.144662, 0.34801, 0.566687, 0.431741, 0.0395252, 0.972612, 0.173369, 0.282916, 0.0209703, 0.454247, 0.781002, 0.555626, 0.755128, 0.508045, 0.968286, 0.567804, 0.720546, 0.716028, 0.210509, 0.764112, 0.220195, 0.179263, 0.993463, 0.981587, 0.376688, 0.49655, 0.416868, 0.750882, 0.581115, 0.528931, 0.649702, 0.552172, 0.199754, 0.21184, 0.300756, 0.00206194, 0.790112, 0.661196, 0.407876, 0.121373, 0.0288388, 0.537629, 0.491145, 0.200387, 0.489174, 0.265322, 0.715038, 0.932368, 0.28523, 0.542864, 0.384893, 0.855611, 0.483112, 0.808174, 0.124034, 0.571804, 0.357685, 0.182198, 0.345442, 0.70914, 0.554292, 0.516797, 0.976609, 0.143118, 0.00196754, 0.389125, 0.304913, 0.896352, 0.137209, 0.897262, 0.174488, 0.945255, 0.738199, 0.446846, 0.651386, 0.103271, 0.015566, 0.669168, 0.111878, 0.720844, 0.571162, 0.414152, 0.632662, 0.124961, 0.117036, 0.547065, 0.769056, 0.212571, 0.127038, 0.653646, 0.600641, 0.894887, 0.391917, 0.510365, 0.0804136, 0.546756, 0.572669, 0.211586, 0.247839, 0.195546, 0.607518, 0.502233, 0.434904, 0.699243, 0.350656, 0.0359477, 0.491685, 0.0887684, 0.0941827, 0.722759, 0.115717, 0.266468, 0.631221, 0.157107, 0.198286, 0.226469, 0.159666, 0.128377, 0.218301, 0.361379, 0.4007, 0.533851, 0.557404, 0.0466327, 0.645502, 0.153742, 0.554059, 0.9425, 0.0661787, 0.0320266, 0.871934, 0.63689, 0.395157, 0.301419, 0.191002, 0.0203157, 0.362925, 0.418706, 0.109352, 0.127928, 0.735726, 0.85121, 0.217985, 0.72121, 0.606717, 0.969741, 0.350378, 0.945544, 0.665039, 0.496837, 0.745786, 0.686947, 0.558355, 0.651492, 0.196427, 0.381324, 0.481085, 0.270371, 0.737005, 0.542791, 0.664317, 0.845877, 0.106086, 0.599747, 0.805139, 0.314063, 0.920153, 0.890399, 0.0157717, 0.344151, 0.773477, 0.927122, 0.572308, 0.925828, 0.759559, 0.0634648, 0.278847, 0.786123, 0.896158, 0.79095, 0.0878904, 0.815251, 0.845217, 0.412765, 0.41615, 0.333214, 0.410225, 0.321003, 0.905244, 0.012862, 0.114231, 0.668392, 0.91973, 0.45456, 0.505116, 0.521361, 0.888924, 0.715811, 0.718758, 0.0619728, 0.0916167, 0.0335274, 0.0624644, 0.4784, 0.429709, 0.58511, 0.677507, 0.0806693, 0.591992, 0.246494, 0.967231, 0.527379, 0.561387, 0.253546, 0.0427814, 0.737071, 0.796151, 0.0255074, 0.105284, 0.827983, 0.535238, 0.844796, 0.841525, 0.591672, 0.0766255, 0.0881285, 0.704453, 0.671481, 0.768818, 0.111716, 0.71122, 0.950354, 0.899784, 0.433181, 0.40691, 0.503999, 0.422504, 0.581437, 0.797081, 0.531882, 0.0998619, 0.48154, 0.370861, 0.00221245, 0.410198, 0.391688, 0.947599, 0.228708, 0.0315346, 0.927713, 0.779427, 0.911971, 0.30753, 0.865715, 0.0593138, 0.350164, 0.301878, 0.72946, 0.431283, 0.357232, 0.212766, 0.71931, 0.515632, 0.816957, 0.191838, 0.775153, 0.432858, 0.959898, 0.634488, 0.622311, 0.599992, 0.512847, 0.997908, 0.939193, 0.581875, 0.560387, 0.78185, 0.285982, 0.280553, 0.308816, 0.134468, 0.321772, 0.110574, 0.489362, 0.872673, 0.0193961, 0.460953, 0.84901, 0.668887, 0.189664, 0.000549057, 0.161837, 0.72935, 0.709209, 0.00228495, 0.596407, 0.122947, 0.984808, 0.8726, 0.308435, 0.233373, 0.669192, 0.461376, 0.114877, 0.00838577, 0.926238, 0.315023, 0.307728, 0.753198, 0.415399, 0.349156, 0.421721, 0.0862714, 0.665752, 0.498863, 0.127277, 0.337532, 0.936022, 0.437087, 0.717495, 0.745443, 0.117998, 0.250283, 0.280285, 0.582203, 0.392373, 0.0847275, 0.337317, 0.314002, 0.329854, 0.0470029, 0.407913, 0.977614, 0.688895, 0.559196, 0.334343, 0.510806, 0.209277, 0.737387, 0.226032, 0.0296062, 0.338569, 0.0356785, 0.378598, 0.479794, 0.963596, 0.357715, 0.494336, 0.0956445, 0.278095, 0.225147, 0.379982, 0.8188, 0.950299, 0.0545482, 0.635316, 0.0816566, 0.989249, 0.00408357, 0.905033, 0.367877, 0.0417027, 0.972755, 0.457357, 0.692385, 0.150804, 0.26182, 0.725535, 0.960612, 0.11631, 0.971702, 0.0398852, 0.726893, 0.594899, 0.653482, 0.785575, 0.983186, 0.900966, 0.765669, 0.654914, 0.916507, 0.928975, 0.553837, 0.457753, 0.58964, 0.00780929, 0.282989, 0.202425, 0.158142, 0.310862, 0.507121, 0.657615, 0.884899, 0.234023, 0.0502052, 0.0933516, 0.573893, 0.736496, 0.248577, 0.57518, 0.344975, 0.130153, 0.655958, 0.763193, 0.218772, 0.680001, 0.597525, 0.0542872, 0.409138, 0.573627, 0.120364, 0.738672, 0.725271, 0.936735, 0.224608, 0.188548, 0.496173, 0.929599, 0.00923076, 0.565452, 0.0899691, 0.919553, 0.475673, 0.318576, 0.220193, 0.535904, 0.347333, 0.443615, 0.145647, 0.258849, 0.855244, 0.357146, 0.624343, 0.692086, 0.846721, 0.961042, 0.148, 0.0624468, 0.559837, 0.334245, 0.728384, 0.0147112, 0.406731, 0.422222, 0.936485, 0.0063763, 0.209485, 0.709123, 0.625679, 0.80454, 0.830715, 0.860449, 0.583985, 0.764824, 0.287307, 0.885473, 0.57181, 0.155419, 0.153509, 0.188338, 0.398051, 0.0866237, 0.512296, 0.502992, 0.35682, 0.289414, 0.170634, 0.490584, 0.959767, 0.860544, 0.413453, 0.810313, 0.0113791, 0.519471, 0.731116, 0.199219, 0.81898, 0.374283, 0.783708, 0.931345, 0.294982, 0.120297, 0.276786, 0.982351, 0.649792, 0.466978, 0.67864, 0.878387, 0.358194, 0.113574, 0.792296, 0.685458, 0.182877, 0.898499, 0.724257, 0.0528478, 0.374871, 0.809123, 0.338506, 0.360038, 0.288583, 0.578386, 0.464609, 0.371332, 0.355915, 0.771267, 0.588021, 0.0448652, 0.875129, 0.513044, 0.33325, 0.715725, 0.247601, 0.141509, 0.831008, 0.98756, 0.649571, 0.86587, 0.163103, 0.340718, 0.865083, 0.0677505, 0.216519, 0.570092, 0.975661, 0.0302013, 0.975612, 0.942977, 0.00373243, 0.640335, 0.53276, 0.245543, 0.12357, 0.131662, 0.896766, 0.171882, 0.37124, 0.19251, 0.582918, 0.894718, 0.0180517, 0.368494, 0.356654, 0.293119, 0.437928, 0.99279, 0.587389, 0.373991, 0.118354, 0.703704, 0.214441, 0.944031, 0.757169, 0.459805, 0.710348, 0.290625, 0.642241, 0.908912, 0.597383, 0.847746, 0.266141, 0.954442, 0.786096, 0.584809, 0.872451, 0.189283, 0.590112, 0.103202, 0.561726, 0.22862, 0.865938, 0.571098, 0.296255, 0.722207, 0.804941, 0.428624, 0.240523, 0.545486, 0.597495, 0.793655, 0.976565, 0.941642, 0.929814, 0.246966, 0.763846, 0.914594, 0.306194, 0.82581, 0.946183, 0.1705, 0.569774, 0.340134, 0.506855, 0.99366, 0.66201, 0.526022, 0.00202823, 0.532165, 0.973131, 0.653596, 0.551471, 0.760754, 0.0511451, 0.797115, 0.455992, 0.317481, 0.212322, 0.352279, 0.314062, 0.595491, 0.397062, 0.652924, 0.790962, 0.976547, 0.0622074, 0.33661, 0.646198, 0.542818, 0.281868, 0.203457, 0.652231, 0.123915, 0.501028, 0.480124, 0.160761, 0.134233, 0.743564, 0.415994, 0.288894, 0.833903, 0.712921, 0.727619, 0.64625, 0.225502, 0.291045, 0.905906, 0.528167, 0.742347, 0.486641, 0.335503, 0.766211, 0.338197, 0.937293, 0.367452, 0.207395, 0.375009, 0.355053, 0.961887, 0.240556, 0.525981, 0.0458107, 0.0874091, 0.612792, 0.0990322, 0.745701, 0.147858, 0.312751, 0.911464, 0.324989, 0.495886, 0.348204, 0.426207, 0.863553, 0.581139, 0.965369, 0.595424, 0.699875, 0.773731, 0.341878, 0.930402, 0.279704, 0.401378, 0.747993, 0.435297, 0.768118, 0.563954, 0.016211, 0.884873, 0.500864, 0.918371, 0.763598, 0.776229, 0.619362, 0.703753, 0.584146, 0.603731, 0.712628, 0.27401, 0.567779, 0.491112, 0.896256, 0.763065, 0.0339006, 0.848141, 0.6502, 0.73823, 0.541453, 0.350801, 0.0428536, 0.164055, 0.108277, 0.341676, 0.15516, 0.24938, 0.426811, 0.697442, 0.907769, 0.497783, 0.99617, 0.552364, 0.864926, 0.5084, 0.488835, 0.204708, 0.772011, 0.500959, 0.988587, 0.653905, 0.0344443, 0.518279, 0.231005, 0.618156, 0.50635, 0.58547, 0.416673, 0.876748, 0.318371, 0.708886, 0.49613, 0.700261, 0.93264, 0.116976, 0.486153, 0.192922, 0.85528, 0.86345, 0.520997, 0.02331, 0.458086, 0.223506, 0.577211, 0.552242, 0.0102861, 0.160216, 0.57318, 0.424467, 0.437013, 0.736506, 0.835531, 0.0998913, 0.0845583, 0.972808, 0.784317, 0.72042, 0.0886273, 0.42117, 0.395213, 0.234131, 0.885903, 0.169609, 0.451673, 0.0327804, 0.199422, 0.311461, 0.603402, 0.382815, 0.518431, 0.413673, 0.598206, 0.364313, 0.779445, 0.341969, 0.167242, 0.590968, 0.292988, 0.225162, 0.20739, 0.938427, 0.669011, 0.969483, 0.387452, 0.249334, 0.542592, 0.384693, 0.134744, 0.428328, 0.701529, 0.481345, 0.920091, 0.904572, 0.840509, 0.0919866, 0.206846, 0.852618, 0.584072, 0.955355, 0.741576, 0.629663, 0.612411, 0.667856, 0.845727, 0.675872, 0.594167, 0.975577, 0.8698, 0.158647, 0.703157, 0.584215, 0.00491956, 0.798266, 0.8218, 0.414095, 0.993723, 0.890489, 0.708743, 0.668226, 0.810432, 0.262654, 0.832021, 0.115476, 0.00390665, 0.362986, 0.492119, 0.870461, 0.269166, 0.172225, 0.564672, 0.400391, 0.740482, 0.906482, 0.328487, 0.989471, 0.151229, 0.859692, 0.25861, 0.0940253, 0.13692, 0.0757249, 0.998619, 0.842025, 0.322892, 0.286659, 0.381916, 0.985287, 0.515331, 0.916071, 0.528695, 0.211765, 0.390794, 0.496102, 0.12651, 0.0528892, 0.503706, 0.988945, 0.845423, 0.107962, 0.402534, 0.0852831, 0.066165, 0.829246, 0.828779, 0.536622, 0.289877, 0.872261, 0.524916, 0.931103, 0.00457239, 0.620804, 0.348598, 0.918016, 0.381111, 0.282309, 0.172538, 0.127281, 0.960613, 0.736746, 0.264226, 0.419735, 0.830129, 0.544679, 0.472383, 0.404834, 0.398699, 0.0316545, 0.892427, 0.50956, 0.457152, 0.529312, 0.9592, 0.888618, 0.804808, 0.159292, 0.436697, 0.209112, 0.422998, 0.426475, 0.663998, 0.292566, 0.161049, 0.703247, 0.495259, 0.343341, 0.251313, 0.77333, 0.0113886, 0.601211, 0.576168, 0.0706262, 0.360842, 0.647657, 0.856267, 0.411357, 0.908686, 0.35822, 0.325975, 0.825719, 0.765477, 0.801352, 0.203915, 0.63614, 0.228641, 0.770325, 0.946778, 0.818214, 0.914332, 0.905681, 0.352022, 0.994568, 0.614553, 0.952883, 0.335177, 0.926303, 0.969833, 0.0983308, 0.838996, 0.182769, 0.979836, 0.486662, 0.602633, 0.626398, 0.159378, 0.216179, 0.544848, 0.0878025, 0.244399, 0.546144, 0.966008, 0.621244, 0.96941, 0.603416, 0.33022, 0.803729, 0.289634, 0.259416, 0.931589, 0.638398, 0.0338544, 0.0435814, 0.257008, 0.812442, 0.727103, 0.126367, 0.682423, 0.458193, 0.00417282, 0.223973, 0.0692229, 0.62723, 0.0117099, 0.590087, 0.818927, 0.247285, 0.42548, 0.733347, 0.0664999, 0.39956, 0.883227, 0.695966, 0.816119, 0.0305324, 0.756018, 0.689714, 0.894324, 0.0645479, 0.196347, 0.155635, 0.672791, 0.344293, 0.423109, 0.415881, 0.204927, 0.665713, 0.579229, 0.658633, 0.845824, 0.985636, 0.426899, 0.807134, 0.839088, 0.0107139, 0.14249, 0.51986, 0.714971, 0.0845383, 0.571933, 0.590329, 0.107127, 0.438811, 0.165975, 0.76789, 0.290438, 0.705135, 0.026361, 0.529545, 0.823216, 0.388826, 0.615675, 0.517143, 0.566942, 0.703386, 0.444429, 0.6027, 0.200455, 0.722011, 0.422342, 0.181544, 0.279475, 0.582627, 0.498877, 0.42955, 0.426562, 0.103993, 0.285546, 0.414382, 0.265607, 0.541157, 0.223715, 0.138597, 0.608746, 0.470125, 0.985783, 0.150817, 0.46392, 0.000200871, 0.787561, 0.0966264, 0.348749, 0.645341, 0.560252, 0.170749, 0.285654, 0.0573061, 0.626963, 0.665299, 0.949342, 0.368927, 0.616642, 0.899843, 0.560864, 0.0500526, 0.636431, 0.609077, 0.221059, 0.944734, 0.83399, 0.197986, 0.102107, 0.304868, 0.99269, 0.393463, 0.547096, 0.749555, 0.866702, 0.54016, 0.761694, 0.00293374, 0.259535, 0.912372, 0.271655, 0.816248, 0.868964, 0.0294961, 0.104257, 0.86573, 0.276186, 0.707999, 0.92271, 0.841975, 0.526159, 0.534731, 0.685051, 0.563805, 0.538375, 0.00772071, 0.285702, 0.187482, 0.54118, 0.971558, 0.88197, 0.894442, 0.934236, 0.878159, 0.599847, 0.540383, 0.816625, 0.00306986, 0.708378, 0.447841, 0.0120822, 0.323399, 0.465694, 0.471394, 0.863878, 0.430542, 0.463848, 0.297583, 0.863157, 0.564637, 0.355597, 0.793509, 0.147592, 0.953117, 0.950615, 0.643658, 0.940714, 0.0628789, 0.205232, 0.204428, 0.969652, 0.767973, 0.0415497, 0.932636, 0.0573255, 0.733433, 0.350563, 0.903375, 0.719947, 0.911418, 0.0659568, 0.259685, 0.610348, 0.0463574, 0.554181, 0.940194, 0.651812, 0.196356, 0.344841, 0.834639, 0.430794, 0.953926, 0.465358, 0.739098, 0.26821, 0.378973, 0.690384, 0.581762, 0.270614, 0.797406, 0.42754, 0.290747, 0.979932, 0.654372, 0.354304, 0.0665159, 0.429663, 0.565186, 0.0834735, 0.404492, 0.835542, 0.110146, 0.231637, 0.0152121, 0.763023, 0.0616657, 0.450835, 0.10849, 0.300411, 0.931281, 0.498653, 0.688545, 0.78672, 0.464066, 0.291882, 0.311086, 0.482199, 0.275523, 0.0585518, 0.668633, 0.508012, 0.127602, 0.51531, 0.718507, 0.769994, 0.672421, 0.879333, 0.274856, 0.874205, 0.243628, 0.329747, 0.706719, 0.951337, 0.39424, 0.319537, 0.615233, 0.666551, 0.140004, 0.986185, 0.24362, 0.260255, 0.775704, 0.318598, 0.634543, 0.373432, 0.23914, 0.960406, 0.700363, 0.0150745, 0.207158, 0.0973178, 0.172616, 0.133483, 0.854305, 0.968142, 0.998083, 0.729599, 0.737585, 0.00674988, 0.0817041, 0.506241, 0.544573, 0.615566, 0.858829, 0.45843, 0.792255, 0.183437, 0.863698, 0.648547, 0.13791, 0.392648, 0.921829, 0.562101, 0.709357, 0.650223, 0.425849, 0.447496, 0.464585, 0.318584, 0.239279, 0.894132, 0.0399246, 0.574067, 0.00205177, 0.339859, 0.272871, 0.711183, 0.870697, 0.674962, 0.529911, 0.676338, 0.71056, 0.658444, 0.29523, 0.23069, 0.817284, 0.0576263, 0.555746, 0.868494, 0.0366979, 0.742614, 0.865596, 0.159675, 0.37778, 0.17525, 0.425312, 0.847687, 0.37265, 0.474997, 0.838556, 0.698778, 0.435983, 0.241735, 0.639332, 0.090232, 0.815012, 0.469081, 0.090894, 0.759303, 0.644909, 0.382755, 0.509077, 0.905721, 0.848911, 0.00579584, 0.517374, 0.314455, 0.090473, 0.73679, 0.666728, 0.961911, 0.990351, 0.33643, 0.0883509, 0.300408, 0.0702269, 0.844034, 0.569015, 0.465901, 0.0748921, 0.197839, 0.380249, 0.759736, 0.405091, 0.220558, 0.805229, 0.958237, 0.745353, 0.725116, 0.596741, 0.604463, 0.415037, 0.42541, 0.498128, 0.186665, 0.405051, 0.28792, 0.0203449, 0.195992, 0.921243, 0.191876, 0.818855, 0.975756, 0.748671, 0.813616, 0.987465, 0.082691, 0.231378, 0.199104, 0.202423, 0.367678, 0.291812, 0.763446, 0.986363, 0.865835, 0.776336, 0.964528, 0.451582, 0.281876, 0.82759, 0.804063, 0.020807, 0.0420121, 0.869471, 0.897517, 0.842594, 0.538692, 0.639519, 0.00635103, 0.939893, 0.668518, 0.521784, 0.58292, 0.00522094, 0.664122, 0.16847, 0.321834, 0.331613, 0.83081, 0.395113, 0.781049, 0.693139, 0.348966, 0.150701, 0.664904, 0.654168, 0.996648, 0.0939377, 0.724414, 0.937661, 0.560264, 0.319581, 0.762135, 0.0923492, 0.965276, 0.0243539, 0.100302, 0.244656, 0.531123, 0.71056, 0.166116, 0.469231, 0.210765, 0.206743, 0.962878, 0.607144, 0.305528, 0.453014, 0.727968, 0.176804, 0.116808, 0.00258623, 0.89623, 0.817662, 0.868491, 0.270113, 0.699267, 0.90892, 0.336316, 0.210581, 0.0204948, 0.791701, 0.31052, 0.24179, 0.553815, 0.590352, 0.723638, 0.912787, 0.402025, 0.36912, 0.141353, 0.351457, 0.0349154, 0.523278, 0.112241, 0.260141, 0.982615, 0.234799, 0.395018, 0.522477, 0.053424, 0.107475, 0.363252, 0.204558, 0.855529, 0.929576, 0.647918, 0.135676, 0.15198, 0.404128, 0.433395, 0.166823, 0.670806, 0.0906192, 0.851992, 0.694199, 0.671856, 0.597631, 0.20906, 0.179516, 0.0647193, 0.0296873, 0.834149, 0.136419, 0.706952, 0.664221, 0.548566, 0.401391, 0.526223, 0.224918, 0.445352, 0.0370809, 0.176695, 0.541017, 0.796263, 0.796275, 0.913017, 0.949191, 0.619724, 0.51641, 0.906705, 0.142056, 0.967786, 0.423085, 0.482451, 0.288789, 0.836869, 0.120046, 0.528131, 0.625834, 0.234306, 0.196148, 0.119733, 0.734559, 0.965229, 0.562937, 0.787852, 0.675926, 0.029867, 0.241398, 0.802881, 0.927945, 0.923412, 0.381762, 0.643686, 0.0761685, 0.838634, 0.467641, 0.681804, 0.63528, 0.653475, 0.975469, 0.380416, 0.320755, 0.993272, 0.548512, 0.135106, 0.923513, 0.465571, 0.3169, 0.167756, 0.257546, 0.433034, 0.739972, 0.960899, 0.608396, 0.930492, 0.735647, 0.463604, 0.362737, 0.884576, 0.926171, 0.0598129, 0.632085, 0.565815, 0.0310972, 0.752345, 0.818162, 0.308334, 0.175584, 0.750346, 0.0552021, 0.265004, 0.233168, 0.593114, 0.834025, 0.276204, 0.138598, 0.197715, 0.178756, 0.667517, 0.416278, 0.821277, 0.358993, 0.968456, 0.0335809, 0.266034, 0.882204, 0.522069, 0.630596, 0.211127, 0.740852, 0.747831, 0.0644943, 0.0345094, 0.329728, 0.41275, 0.21704, 0.490708, 0.985969, 0.392695, 0.907899, 0.643262, 0.0143452, 0.791734, 0.23178, 0.290934, 0.105063, 0.399584, 0.159166, 0.614385, 0.935482, 0.252265, 0.466793, 0.0633298, 0.622608, 0.764519, 0.392718, 0.438837, 0.389323, 0.591578, 0.989194, 0.527337, 0.660731, 0.889085, 0.619298, 0.600918, 0.379091, 0.741526, 0.0862965, 0.492049, 0.88669, 0.95967, 0.521622, 0.252513, 0.864143, 0.909911, 0.2173, 0.478902, 0.358323, 0.643247, 0.284693, 0.266228, 0.163657, 0.983907, 0.336515, 0.651207, 0.961555, 0.363472, 0.325315, 0.621677, 0.352404, 0.326053, 0.231271, 0.249802, 0.278977, 0.832877, 0.722816, 0.599307, 0.812224, 0.853521, 0.896083, 0.896791, 0.0556386, 0.532463, 0.0892749, 0.741247, 0.425154, 0.827661, 0.463396, 0.333345, 0.36799, 0.0731207, 0.998054, 0.756167, 0.985519, 0.155195, 0.574123, 0.128731, 0.15453, 0.663942, 0.15609, 0.647585, 0.497078, 0.138329, 0.20556, 0.707234, 0.0701173, 0.32703, 0.0736788, 0.151086, 0.034621, 0.775824, 0.395119, 0.491423, 0.811292, 0.555471, 0.399263, 0.896201, 0.168861, 0.580457, 0.837932, 0.670103, 0.050994, 0.293822, 0.124163, 0.339868, 0.979219, 0.544389, 0.519155, 0.508206, 0.184331, 0.584999, 0.730872, 0.980492, 0.536113, 0.616493, 0.275979, 0.0288866, 0.40827, 0.578146, 0.19725, 0.265708, 0.22907, 0.149651, 0.979306, 0.981751, 0.0337527, 0.940704, 0.636961, 0.66884, 0.397068, 0.600011, 0.322185, 0.247842, 0.903778, 0.465529, 0.0182291, 0.741215, 0.355892, 0.809661, 0.148362, 0.846179, 0.303348, 0.790928, 0.0170574, 0.607165, 0.876478, 0.00035118, 0.014497, 0.0108556, 0.919901, 0.994195, 0.163055, 0.90789, 0.909241, 0.701163, 0.157759, 0.389993, 0.65642, 0.604372, 0.822426, 0.741058, 0.516607, 0.996425, 0.672762, 0.20792, 0.565459, 0.693866, 0.289497, 0.925893, 0.928514, 0.86954, 0.775174, 0.112372, 0.134064, 0.43187, 0.779967, 0.741867, 0.813394, 0.928731, 0.498083, 0.917405, 0.498005, 0.789323, 0.586336, 0.44808, 0.104959, 0.827014, 0.229818, 0.888603, 0.847413, 0.940034, 0.593459, 0.431248, 0.566338, 0.367776, 0.802815, 0.564667, 0.703991, 0.836249, 0.214939, 0.29843, 0.843575, 0.548418, 0.00429634, 0.260532, 0.783796, 0.0827653, 0.909126, 0.753129, 0.112608, 0.600541, 0.664346, 0.23046, 0.743698, 0.327555, 0.151545, 0.700316, 0.315323, 0.643648, 0.580651, 0.121066, 0.592839, 0.484781, 0.467554, 0.179179, 0.604391, 0.717236, 0.745805, 0.502522, 0.0460628, 0.447133, 0.580186, 0.93037, 0.321131, 0.100553, 0.954835, 0.267758, 0.502147, 0.534792, 0.211809, 0.160501, 0.171858, 0.88894, 0.798534, 0.194293, 0.400821, 0.0234603, 0.119979, 0.824951, 0.360874, 0.518219, 0.415773, 0.45019, 0.455195, 0.585127, 0.256568, 0.204331, 0.414101, 0.69395, 0.2381, 0.261185, 0.209752, 0.0115052, 0.112926, 0.943212, 0.574441, 0.514057, 0.544136, 0.576757, 0.946525, 0.855984, 0.366236, 0.616032, 0.825127, 0.854987, 0.464268, 0.305881, 0.10641, 0.686624, 0.43191, 0.839187, 0.375332, 0.915516, 0.764687, 0.680906, 0.909116, 0.631644, 0.324785, 0.758552, 0.500855, 0.236844, 0.986931, 0.0211137, 0.695021, 0.901116, 0.308871, 0.216388, 0.304568, 0.0111088, 0.617964, 0.244016, 0.895608, 0.116573, 0.38365, 0.109429, 0.440777, 0.376023, 0.630833, 0.0729521, 0.827809, 0.150113, 0.87315, 0.281883, 0.115363, 0.0853128, 0.933364, 0.65708, 0.017312, 0.872395, 0.247909, 0.868631, 0.779587, 0.454693, 0.431712, 0.325233, 0.901397, 0.367187, 0.718744, 0.407726, 0.792426, 0.254828, 0.267451, 0.809714, 0.88461, 0.54456, 0.824247, 0.518043, 0.800778, 0.445437, 0.014163, 0.64314, 0.79269, 0.436548, 0.204399, 0.311823, 0.656984, 0.0499323, 0.662787, 0.174967, 0.701401, 0.922061, 0.436045, 0.884565, 0.992981, 0.412914, 0.295808, 0.601825, 0.948338, 0.0759658, 0.902624, 0.40913, 0.284306, 0.651176, 0.86046, 0.0900332, 0.412146, 0.613414, 0.501614, 0.118202, 0.679276, 0.254333, 0.797349, 0.865418, 0.727564, 0.891665, 0.00370642, 0.566895, 0.701209, 0.995844, 0.030836, 0.813926, 0.501887, 0.260956, 0.184355, 0.258789, 0.960773, 0.941786, 0.707603, 0.324272, 0.728184, 0.412306, 0.114169, 0.95476, 0.224615, 0.515423, 0.348148, 0.501832, 0.879398, 0.744902, 0.723606, 0.101973, 0.773845, 0.947844, 0.414223, 0.842809, 0.10853, 0.0228851, 0.672875, 0.807232, 0.0990737, 0.293034, 0.0116935, 0.294261, 0.71371, 0.566896, 0.665978, 0.0574669, 0.383927, 0.292232, 0.659186, 0.25795, 0.667451, 0.922412, 0.65384, 0.299946, 0.225055, 0.904578, 0.571749, 0.438771, 0.13227, 0.584159, 0.327093, 0.838427, 0.252241, 0.608643, 0.637982, 0.284738, 0.60947, 0.23709, 0.895966, 0.130962, 0.881189, 0.16436, 0.490026, 0.348695, 0.344268, 0.611604, 0.0924971, 0.659324, 0.258519, 0.323436, 0.218884, 0.717656, 0.563147, 0.491481, 0.638029, 0.187785, 0.527152, 0.409818, 0.0796029, 0.550197, 0.106779, 0.863336, 0.89693, 0.672314, 0.461292, 0.534388, 0.838775, 0.760253, 0.794555, 0.175541, 0.46315, 0.0430115, 0.292334, 0.409738, 0.534336, 0.607629, 0.259045, 0.853545, 0.822796, 0.438134, 0.277195, 0.375651, 0.692985, 0.707428, 0.295825, 0.974419, 0.556508, 0.493008, 0.456633, 0.441367, 0.91749, 0.93552, 0.71973, 0.381575, 0.108175, 0.287838, 0.723426, 0.688044, 0.409911, 0.826705, 0.399584, 0.045195, 0.522587, 0.974563, 0.560133, 0.165759, 0.223891, 0.104598, 0.596698, 0.350797, 0.751825, 0.806949, 0.0926424, 0.9807, 0.234214, 0.476828, 0.627716, 0.334558, 0.659462, 0.292002, 0.215695, 0.478504, 0.842043, 0.208658, 0.574693, 0.706193, 0.182325, 0.514058, 0.742052, 0.430874, 0.655785, 0.776095, 0.427489, 0.573386, 0.132211, 0.724419, 0.374021, 0.642952, 0.509729, 0.174314, 0.152859, 0.840252, 0.820897, 0.913226, 0.760157, 0.289457, 0.784954, 0.302112, 0.801207, 0.453753, 0.789927, 0.649221, 0.528698, 0.871934, 0.72262, 0.880094, 0.514637, 0.905289, 0.822404, 0.475876, 0.145079, 0.277701, 0.582699, 0.0200736, 0.699082, 0.623793, 0.549057, 0.379624, 0.984244, 0.970339, 0.497237, 0.381554, 0.359354, 0.00120478, 0.719147, 0.286087, 0.465576, 0.0458521, 0.667311, 0.509316, 0.871826, 0.437869, 0.568979, 0.380274, 0.165264, 0.59324, 0.399523, 0.182252, 0.345438, 0.586487, 0.114168, 0.84029, 0.208268, 0.782768, 0.271256, 0.813751, 0.778691, 0.386482, 0.445026, 0.344706, 0.295499, 0.917742, 0.017588, 0.442123, 0.89954, 0.651926, 0.6297, 0.0820331, 0.29287, 0.0730486, 0.469891, 0.930979, 0.751474, 0.914331, 0.540801, 0.561634, 0.848007, 0.373144, 0.313446, 0.0753001, 0.970079, 0.561994, 0.239609, 0.536868, 0.315583, 0.554862, 0.756354, 0.145848, 0.666259, 0.124349, 0.31953, 0.438672, 0.27094, 0.711085, 0.531289, 0.877004, 0.265989, 0.631045, 0.88132, 0.685856, 0.261297, 0.496775, 0.710571, 0.104709, 0.498863, 0.355969, 0.610158, 0.17301, 0.978776, 0.750499, 0.115035, 0.918091, 0.402919, 0.634628, 0.852173, 0.406181, 0.798554, 0.621079, 0.824542, 0.0592027, 0.792443, 0.931727, 0.218676, 0.739214, 0.680902, 0.662684, 0.584827, 0.0308682, 0.753073, 0.629906, 0.961798, 0.0154862, 0.509855, 0.50579, 0.154932, 0.445066, 0.797666, 0.0980351, 0.381174, 0.610172, 0.672341, 0.132992, 0.622815, 0.452504, 0.857514, 0.747917, 0.783866, 0.236015, 0.615881, 0.34953, 0.474907, 0.569256, 0.589659, 0.98423, 0.13752, 0.333018, 0.357911, 0.480973, 0.0646463, 0.793046, 0.644213, 0.974062, 0.707243, 0.273168, 0.491179, 0.548453, 0.508097, 0.322258, 0.719144, 0.219058, 0.254633, 0.452233, 0.763223, 0.21776, 0.850223, 0.575237, 0.776773, 0.796148, 0.292721, 0.547291, 0.34993, 0.540902, 0.247358, 0.632989, 0.658463, 0.648706, 0.468378, 0.41613, 0.0584094, 0.458238, 0.49945, 0.368629, 0.158795, 0.86675, 0.187539, 0.247226, 0.314224, 0.259569, 0.837135, 0.0354982, 0.312818, 0.674403, 0.265776, 0.682042, 0.259623, 0.933283, 0.912524, 0.619224, 0.472372, 0.474608, 0.38423, 0.760945, 0.220489, 0.783338, 0.00802285, 0.423839, 0.782985, 0.724448, 0.947651, 0.272443, 0.459552, 0.041788, 0.183062, 0.965342, 0.190206, 0.84062, 0.274621, 0.885556, 0.550416, 0.154001, 0.186097, 0.855902, 0.673944, 0.470926, 0.960764, 0.148265, 0.484413, 0.363845, 0.869619, 0.376051, 0.768723, 0.25102, 0.508597, 0.368859, 0.313424, 0.297966, 0.124184, 0.526412, 0.100375, 0.0850555, 0.209241, 0.985646, 0.467479, 0.741716, 0.880919, 0.239868, 0.125569, 0.766085, 0.414694, 0.155826, 0.661832, 0.898435, 0.32218, 0.182857, 0.588117, 0.905859, 0.222751, 0.713234, 0.960351, 0.570073, 0.26031, 0.501943, 0.0928177, 0.993911, 0.193006, 0.664554, 0.955191, 0.589961, 0.230473, 0.584139, 0.324643, 0.764747, 0.783914, 0.371301, 0.633118, 0.312599, 0.821442, 0.867296, 0.828298, 0.789692, 0.755405, 0.748992, 0.628563, 0.901995, 0.48306, 0.0464854, 0.0322784, 0.96636, 0.974842, 0.0860964, 0.440699, 0.161384, 0.865728, 0.0869808, 0.797345, 0.517103, 0.436557, 0.192385, 0.0218754, 0.701476, 0.0983975, 0.165935, 0.0729489, 0.661953, 0.124412, 0.973386, 0.526955, 0.710206, 0.917344, 0.944149, 0.568388, 0.933564, 0.286672, 0.980303, 0.907433, 0.875405, 0.743237, 0.402325, 0.0567592, 0.832722, 0.179377, 0.801224, 0.883203, 0.381213, 0.0258982, 0.117356, 0.960031, 0.984689, 0.625665, 0.0881564, 0.115545, 0.0249928, 0.497614, 0.234678, 0.227404, 0.400364, 0.233352, 0.585983, 0.946101, 0.717634, 0.938645, 0.923234, 0.626302, 0.755186, 0.822275, 0.931542, 0.0205411, 0.278447, 0.635083, 0.13394, 0.555069, 0.350612, 0.128086, 0.889004, 0.676447, 0.636477, 0.225248, 0.556167, 0.377751, 0.574902, 0.424374, 0.253218, 0.60506, 0.726014, 0.384679, 0.380904, 0.57604, 0.176138, 0.613727, 0.88269, 0.620452, 0.942352, 0.559772, 0.914514, 0.0597413, 0.783225, 0.124257, 0.147811, 0.99078, 0.22325, 0.0241154, 0.308585, 0.462974, 0.550367, 0.949106, 0.580759, 0.253475, 0.758766, 0.7966, 0.801193, 0.537815, 0.148124, 0.958577, 0.370819, 0.877432, 0.271177, 0.61121, 0.169104, 0.279981, 0.721685, 0.424901, 0.462961, 0.74627, 0.537585, 0.869691, 0.420511, 0.458058, 0.799109, 0.344508, 0.169611, 0.0256076, 0.819881, 0.468481, 0.382126, 0.576242, 0.328457, 0.2908, 0.92422, 0.246153, 0.174499, 0.162325, 0.848084, 0.813024, 0.598717, 0.081141, 0.699845, 0.616717, 0.427305, 0.258407, 0.812078, 0.599774, 0.441849, 0.871452, 0.921642, 0.0925834, 0.663771, 0.806246, 0.0799521, 0.53988, 0.802581, 0.822449, 0.227303, 0.629688, 0.769725, 0.6312, 0.776404, 0.0563044, 0.668515, 0.10632, 0.482832, 0.664467, 0.17943, 0.155185, 0.498768, 0.624199, 0.891363, 0.413369, 0.329932, 0.972195, 0.919222, 0.0372321, 0.172684, 0.962581, 0.176423, 0.655037, 0.535664, 0.0590107, 0.150341, 0.532048, 0.327029, 0.119509, 0.288328, 0.421066, 0.550553, 0.0739355, 0.489525, 0.29824, 0.0957491, 0.26151, 0.2643, 0.13023, 0.0565316, 0.589529, 0.963704, 0.896324, 0.778048, 0.125465, 0.716692, 0.737858, 0.397445, 0.913826, 0.144997, 0.371797, 0.783237, 0.038625, 0.305142, 0.445822, 0.352947, 0.940223, 0.519356, 0.119558, 0.221171, 0.752677, 0.465037, 0.433192, 0.951326, 0.00174407, 0.000852478, 0.326694, 0.836998, 0.551499, 0.290006, 0.938063, 0.742529, 0.98151, 0.777035, 0.258349, 0.673026, 0.963895, 0.179765, 0.396741, 0.528501, 0.282647, 0.77735, 0.0371651, 0.905108, 0.540387, 0.0731417, 0.626844, 0.594055, 0.287718, 0.793243, 0.668974, 0.387761, 0.83005, 0.682245, 0.707271, 0.934249, 0.774444, 0.538811, 0.204792, 0.475236, 0.778051, 0.368142, 0.819584, 0.583842, 0.114981, 0.819753, 0.425623, 0.982429, 0.492697, 0.686498, 0.461096, 0.90238, 0.202892, 0.490347, 0.322927, 0.202239, 0.297956, 0.951061, 0.130693, 0.694167, 0.553036, 0.839263, 0.694583, 0.422681, 0.703303, 0.639276, 0.407163, 0.599419, 0.951824, 0.525206, 0.75019, 0.106609, 0.0389167, 0.489264, 0.225616, 0.0821625, 0.293815, 0.0319412, 0.655122, 0.168102, 0.81337, 0.806153, 0.802901, 0.660158, 0.00617115, 0.109757, 0.62557, 0.0245337, 0.311922, 0.398968, 0.480767, 0.580234, 0.680186, 0.824447, 0.775051, 0.112181, 0.575273, 0.868062, 0.93091, 0.615659, 0.186859, 0.231254, 0.66266, 0.168416, 0.615304, 0.612983, 0.911719, 0.498838, 0.955301, 0.993775, 0.40017, 0.540156, 0.873754, 0.0377319, 0.582286, 0.444224, 0.990661, 0.601351, 0.746164, 0.840231, 0.9379, 0.548204, 0.78896, 0.458155, 0.309218, 0.666261, 0.284855, 0.622254, 0.457914, 0.921579, 0.74251, 0.636525, 0.644134, 0.774124, 0.15566, 0.889489, 0.743138, 0.9135, 0.477348, 0.605992, 0.57831, 0.466674, 0.752416, 0.877237, 0.278309, 0.753585, 0.871845, 0.901358, 0.0586715, 0.434308, 0.117971, 0.755632, 0.00294837, 0.566496, 0.246941, 0.53662, 0.478259, 0.974853, 0.556342, 0.00517481, 0.523726, 0.684089, 0.739755, 0.486899, 0.544261, 0.264827, 0.877572, 0.87167, 0.262036, 0.139675, 0.515183, 0.157899, 0.913045, 0.798245, 0.0503126, 0.505807, 0.983327, 0.705224, 0.517085, 0.261182, 0.0446573, 0.227707, 0.352375, 0.0515031, 0.754243, 0.698194, 0.180565, 0.439551, 0.83348, 0.670013, 0.237351, 0.609419, 0.397373, 0.576492, 0.961343, 0.719049, 0.927943, 0.989281, 0.34384, 0.787242, 0.538397, 0.347012, 0.0220672, 0.423566, 0.907935, 0.477733, 0.20742, 0.859281, 0.117473, 0.909488, 0.247089, 0.553861, 0.928132, 0.89112, 0.816372, 0.0238718, 0.752407, 0.438752, 0.122816, 0.665924, 0.277893, 0.554107, 0.284287, 0.70918, 0.297723, 0.260358, 0.0712148, 0.848991, 0.49641, 0.440257, 0.628057, 0.124644, 0.100055, 0.380851, 0.622258, 0.657038, 0.901209, 0.123963, 0.841385, 0.303333, 0.09721, 0.884663, 0.714303, 0.46114, 0.545124, 0.780385, 0.316296, 0.680153, 0.115062, 0.502013, 0.552295, 0.796584, 0.566508, 0.930178, 0.52719, 0.801343, 0.0935308, 0.27279, 0.595946, 0.174402, 0.770938, 0.322567, 0.170058, 0.970148, 0.757813, 0.0681377, 0.0450165, 0.602497, 0.782329, 0.950679, 0.782467, 0.0432937, 0.291854, 0.809315, 0.178888, 0.423167, 0.435534, 0.359823, 0.428627, 0.333091, 0.124443, 0.350348, 0.191246, 0.617864, 0.799096, 0.426804, 0.772329, 0.43208, 0.906347, 0.257781, 0.716487, 0.180127, 0.623993, 0.672109, 0.239313, 0.488359, 0.479487, 0.29074, 0.392749, 0.827346, 0.705263, 0.265863, 0.982821, 0.989852, 0.37734, 0.392601, 0.534571, 0.376238, 0.962671, 0.740355, 0.154218, 0.866869, 0.60661, 0.471131, 0.789162, 0.679582, 0.818826, 0.519382, 0.199361, 0.454289, 0.0818061, 0.67688, 0.430039, 0.215616, 0.812701, 0.776465, 0.243733, 0.461079, 0.424431, 0.64718, 0.867117, 0.639368, 0.455398, 0.363037, 0.137118, 0.754838, 0.932655, 0.0862599, 0.699841, 0.377795, 0.570325, 0.251683, 0.547029, 0.0242767, 0.371165, 0.55267, 0.869402, 0.0202103, 0.994045, 0.360752, 0.619197, 0.0180576, 0.56193, 0.957328, 0.246113, 0.868871, 0.242305, 0.546781, 0.906181, 0.643265, 0.433022, 0.663056, 0.0471264, 0.446656, 0.0022702, 0.666057, 0.0669204, 0.215709, 0.493289, 0.904332, 0.385009, 0.334148, 0.0627965, 0.954187, 0.0200881, 0.443694, 0.880873, 0.266442, 0.771994, 0.950973, 0.605418, 0.636895, 0.543338, 0.908259, 0.706086, 0.777986, 0.744501, 0.3056, 0.379276, 0.178968, 0.0348583, 0.658162, 0.24641, 0.966883, 0.961268, 0.656626, 0.415952, 0.993531, 0.974672, 0.412589, 0.347602, 0.820219, 0.254617, 0.562052, 0.645303, 0.0206252, 0.494782, 0.459434, 0.484668, 0.243249, 0.657745, 0.54916, 0.704609, 0.900557, 0.385821, 0.309218, 0.685758, 0.518557, 0.778011, 0.914133, 0.619028, 0.206377, 0.844328, 0.299949, 0.630885, 0.407831, 0.305057, 0.828275, 0.171423, 0.639153, 0.0211753, 0.484988, 0.26194, 0.631163, 0.913255, 0.450585, 0.715113, 0.825757, 0.0933041, 0.10379, 0.601412, 0.444863, 0.439811, 0.783182, 0.177187, 0.693553, 0.554622, 0.612014, 0.224615, 0.983296, 0.288059, 0.101529, 0.64367, 0.559435, 0.682705, 0.12073, 0.18114, 0.131592, 0.923007, 0.447648, 0.557072, 0.801549, 0.0532496, 0.793354, 0.937405, 0.731906, 0.112107, 0.783417, 0.740857, 0.542819, 0.562909, 0.499708, 0.666301, 0.0777642, 0.0500464, 0.938406, 0.457906, 0.857416, 0.494227, 0.53519, 0.192868, 0.175715, 0.309725, 0.691308, 0.483656, 0.779537, 0.798791, 0.794647, 0.71655, 0.133094, 0.732047, 0.797287, 0.522583, 0.764146, 0.715243, 0.4782, 0.25779, 0.605541, 0.235435, 0.743776, 0.945808, 0.663589, 0.965508, 0.6862, 0.640684, 0.536796, 0.94945, 0.680628, 0.916401, 0.0312525, 0.00242753, 0.610207, 0.261788, 0.0549341, 0.253116, 0.882896, 0.521907, 0.222643, 0.509067, 0.0546337, 0.445881, 0.534023, 0.690115, 0.714786, 0.613419, 0.524532, 0.374039, 0.868862, 0.841833, 0.580182, 0.626474, 0.204511, 0.137264, 0.232003, 0.747401, 0.189743, 0.297985, 0.714165, 0.711, 0.272648, 0.857174, 0.692015, 0.361292, 0.960424, 0.8561, 0.474445, 0.490332, 0.784748, 0.606424, 0.602914, 0.0474436, 0.699929, 0.0130786, 0.0139364, 0.864881, 0.529146, 0.708899, 0.654413, 0.919844, 0.40379, 0.201353, 0.0891713, 0.893179, 0.602878, 0.580059, 0.94724, 0.837759, 0.140837, 0.850607, 0.0862325, 0.706289, 0.569529, 0.452111, 0.4408, 0.936718, 0.432451, 0.378841, 0.970505, 0.46226, 0.642439, 0.329122, 0.129285, 0.868897, 0.245408, 0.75177, 0.0482141, 0.691092, 0.305157, 0.142369, 0.111204, 0.760218, 0.533274, 0.705933, 0.0305452, 0.813882, 0.905629, 0.0983319, 0.153983, 0.449431, 0.583974, 0.084071, 0.953052, 0.0759673, 0.133519, 0.871546, 0.0149097, 0.362542, 0.243673, 0.902765, 0.596286, 0.496029, 0.419529, 0.178434, 0.544694, 0.572879, 0.00291786, 0.885841, 0.851934, 0.841815, 0.9137, 0.516152, 0.720424, 0.965318, 0.596111, 0.796458, 0.585497, 0.295585, 0.60769, 0.350821, 0.54872, 0.992727, 0.632096, 0.448442, 0.705593, 0.582052, 0.217162, 0.34433, 0.82011, 0.408773, 0.224744, 0.859523, 0.238972, 0.430238, 0.425408, 0.885283, 0.924164, 0.986667, 0.152407, 0.093156, 0.997408, 0.90935, 0.358718, 0.412118, 0.172524, 0.8443, 0.0170455, 0.329667, 0.237855, 0.220471, 0.746387, 0.886045, 0.902062, 0.104344, 0.672009, 0.184057, 0.754634, 0.58176, 0.252119, 0.312642, 0.700702, 0.583266, 0.67987, 0.253873, 0.790255, 0.538543, 0.536032, 0.817753, 0.181572, 0.620293, 0.442388, 0.0175395, 0.0635097, 0.939067, 0.834639, 0.341639, 0.295838, 0.551145, 0.846727, 0.656249, 0.89565, 0.187729, 0.4568, 0.180518, 0.078222, 0.594297, 0.377796, 0.828102, 0.381405, 0.674002, 0.695807, 0.870921, 0.330099, 0.488697, 0.594359, 0.964025, 0.824489, 0.611639, 0.569748, 0.171683, 0.828723, 0.672401, 0.319036, 0.589223, 0.335221, 0.899901, 0.670486, 0.648361, 0.693741, 0.0196683, 0.814988, 0.197128, 0.823435, 0.613918, 0.0564868, 0.538383, 0.543875, 0.43771, 0.554109, 0.425805, 0.924813, 0.526049, 0.211858, 0.449276, 0.215117, 0.311793, 0.729418, 0.695139, 0.6163, 0.0563601, 0.335898, 0.283512, 0.304323, 0.415524, 0.436292, 0.926189, 0.123186, 0.0775493, 0.914466, 0.491747, 0.699727, 0.534681, 0.151101, 0.790599, 0.910126, 0.616692, 0.0518067, 0.150926, 0.760992, 0.974495, 0.454028, 0.122438, 0.0878844, 0.584389, 0.823148, 0.242497, 0.543378, 0.00668937, 0.604642, 0.744074, 0.386286, 0.665456, 0.354583, 0.903609, 0.672396, 0.443259, 0.41297, 0.209507, 0.700784, 0.883033, 0.161942, 0.0380805, 0.547087, 0.603664, 0.227137, 0.786679, 0.991841, 0.802047, 0.958698, 0.834265, 0.0367579, 0.192053, 0.586892, 0.352888, 0.578534, 0.350098, 0.195541, 0.604039, 0.854957, 0.213435, 0.681606, 0.867585, 0.667161, 0.309064, 0.614989, 0.589832, 0.559675, 0.948118, 0.394093, 0.268181, 0.240831, 0.957907, 0.16221, 0.573655, 0.245809, 0.393561, 0.173222, 0.220485, 0.0905068, 0.976427, 0.553997, 0.674146, 0.522876, 0.693774, 0.422856, 0.271173, 0.590398, 0.206753, 0.320659, 0.0796568, 0.207148, 0.722146, 0.563162, 0.490106, 0.174794, 0.587179, 0.554677, 0.183096, 0.372976, 0.209802, 0.662634, 0.455443, 0.970574, 0.585647, 0.927992, 0.422745, 0.641704, 0.0534836, 0.248369, 0.95498, 0.420947, 0.130937, 0.819082, 0.861819, 0.336658, 0.835567, 0.833752, 0.182832, 0.0678623, 0.491491, 0.107036, 0.537015, 0.313987, 0.611333, 0.648266, 0.226446, 0.570042, 0.212956, 0.493341, 0.0463949, 0.153225, 0.309417, 0.349475, 0.484731, 0.619564, 0.30544, 0.014575, 0.433738, 0.696053, 0.336016, 0.566659, 0.894815, 0.579978, 0.0669475, 0.428598, 0.892975, 0.0402705, 0.785657, 0.94177, 0.091456, 0.233186, 0.196609, 0.233832, 0.881768, 0.639201, 0.908186, 0.84707, 0.0163121, 0.298263, 0.168425, 0.226827, 0.820643, 0.908381, 0.61557, 0.211805, 0.780831, 0.194359, 0.0705504, 0.582305, 0.732502, 0.985155, 0.0436473, 0.00890268, 0.916767, 0.429273, 0.252931, 0.614374, 0.145934, 0.670483, 0.549671, 0.785289, 2.93743e-05, 0.137783, 0.938396, 0.175775, 0.414527, 0.237105, 0.241415, 0.646765, 0.457782, 0.0118686, 0.262429, 0.938411, 0.667787, 0.637551, 0.242764, 0.0024736, 0.393944, 0.857581, 0.432828, 0.68576, 0.462804, 0.464417, 0.127953, 0.40975, 0.308381, 0.92354, 0.341217, 0.369738, 0.819888, 0.0619571, 0.464809, 0.180899, 0.893546, 0.719092, 0.4782, 0.195465, 0.524857, 0.910797, 0.418687, 0.676008, 0.722386, 0.382214, 0.266871, 0.353728, 0.689718, 0.343171, 0.349328, 0.00456414, 0.23785, 0.777065, 0.130103, 0.553448, 0.958773, 0.583879, 0.105877, 0.297427, 0.706365, 0.0778059, 0.443982, 0.50586, 0.865588, 0.653115, 0.160897, 0.544656, 0.785115, 0.329742, 0.37091, 0.890802, 0.946412, 0.447079, 0.904402, 0.441834, 0.770383, 0.351896, 0.873653, 0.634946, 0.257757, 0.108805, 0.732341, 0.284968, 0.428779, 0.927488, 0.275254, 0.569032, 0.287706, 0.29044, 0.819326, 0.344959, 0.146929, 0.158939, 0.765505, 0.881878, 0.642616, 0.670341, 0.627242, 0.329185, 0.722018, 0.0301519, 0.337114, 0.653654, 0.842816, 0.0860576, 0.407936, 0.137672, 0.670754, 0.528559, 0.23469, 0.936103, 0.0203531, 0.475141, 0.117577, 0.533649, 0.870852, 0.667815, 0.73863, 0.137096, 0.133825, 0.0636257, 0.559701, 0.841388, 0.535773, 0.453817, 0.433776, 0.0598496, 0.513973, 0.5093, 0.0180754, 0.98395, 0.833081, 0.485592, 0.811734, 0.18781, 0.580459, 0.076301, 0.350521, 0.574399, 0.332405, 0.183742, 0.396749, 0.475677, 0.0206206, 0.206502, 0.414385, 0.161111, 0.546898, 0.233877, 0.824058, 0.654947, 0.570591, 0.254589, 0.623835, 0.667015, 0.199154, 0.406655, 0.432944, 0.797167, 0.628682, 0.061103, 0.793747, 0.629969, 0.222357, 0.758716, 0.961138, 0.367806, 0.252356, 0.805622, 0.681745, 0.414079, 0.42583, 0.616815, 0.510683, 0.856474, 0.186378, 0.741048, 0.171764, 0.208999, 0.924836, 0.232717, 0.811693, 0.942641, 0.745966, 0.708809, 0.301825, 0.717967, 0.370973, 0.833606, 0.32192, 0.334108, 0.569589, 0.397638, 0.743032, 0.724037, 0.422332, 0.672696, 0.437039, 0.294883, 0.797207, 0.974999, 0.0105471, 0.817756, 0.673138, 0.745646, 0.781353, 0.747134, 0.788207, 0.339826, 0.342609, 0.892701, 0.543509, 0.812048, 0.703814, 0.245834, 0.0604217, 0.66825, 0.736493, 0.914088, 0.0879902, 0.339839, 0.695309, 0.754951, 0.648037, 0.616954, 0.114367, 0.332567, 0.708419, 0.458937, 0.151418, 0.599519, 0.363702, 0.454054, 0.581712, 0.785629, 0.989615, 0.760428, 0.240069, 0.00655536, 0.944711, 0.433271, 0.902965, 0.901049, 0.275896, 0.499283, 0.79581, 0.537644, 0.678685, 0.730111, 0.805495, 0.488374, 0.190397, 0.611975, 0.914667, 0.560022, 0.458485, 0.173837, 0.7671, 0.756481, 0.342856, 0.809272, 0.835281, 0.817596, 0.716775, 0.798158, 0.285878, 0.813139, 0.776708, 0.381543, 0.184735, 0.848678, 0.666811, 0.368846, 0.0221904, 0.0784168, 0.973761, 0.304635, 0.563078, 0.444378, 0.529288, 0.950592, 0.366114, 0.461055, 0.994619, 0.461829, 0.025944, 0.113505, 0.253799, 0.439465, 0.262466, 0.63443, 0.295602, 0.130677, 0.791247, 0.232451, 0.326196, 0.348603, 0.100527, 0.14841, 0.16522, 0.919308, 0.379063, 0.44667, 0.824501, 0.379424, 0.416885, 0.182292, 0.713288, 0.597596, 0.569307, 0.800017, 0.68274, 0.455561, 0.238474, 0.300262, 0.169274, 0.753954, 0.38291, 0.579325, 0.375995, 0.470719, 0.0681019, 0.785805, 0.18757, 0.0616957, 0.17398, 0.996339, 0.218284, 0.434152, 0.0494018, 0.535415, 0.763225, 0.602223, 0.875339, 0.075351, 0.455301, 0.238316, 0.319386, 0.71196, 0.910622, 0.307543, 0.130202, 0.449451, 0.525929, 0.5612, 0.95917, 0.223535, 0.839161, 0.265646, 0.0342633, 0.690053, 0.540579, 0.623804, 0.0897791, 0.510019, 0.953491, 0.0901769, 0.252437, 0.0026872, 0.554937, 0.920422, 0.799388, 0.848727, 0.217254, 0.380818, 0.221324, 0.0738856, 0.000122714, 0.437933, 0.52941, 0.299864, 0.785771, 0.0549682, 0.888237, 0.0667256, 0.232171, 0.465281, 0.0760895, 0.41759, 0.533014, 0.645651, 0.224713, 0.75912, 0.6478, 0.237161, 0.922038, 0.216678, 0.39973, 0.344895, 0.339365, 0.407429, 0.976207, 0.130966, 0.87633, 0.170724, 0.384754, 0.2943, 0.948898, 0.931034, 0.495715, 0.971691, 0.244995, 0.377086, 0.568675, 0.458, 0.38225, 0.502437, 0.205303, 0.342002, 0.692816, 0.675023, 0.356523, 0.342073, 0.385802, 0.501364, 0.5792, 0.474233, 0.998386, 0.781904, 0.954279, 0.906955, 0.253836, 0.148687, 0.506651, 0.868825, 0.17903, 0.47435, 0.554572, 0.0868281, 0.280746, 0.573773, 0.565284, 0.107667, 0.828001, 0.61411, 0.991063, 0.376834, 0.695195, 0.214454, 0.927982, 0.0260029, 0.447002, 0.27579, 0.180025, 0.540228, 0.373212, 0.231209, 0.639406, 0.743067, 0.980555, 0.804219, 0.020596, 0.527276, 0.155662, 0.0171594, 0.0934418, 0.359313, 0.632162, 0.635465, 0.741116, 0.0324823, 0.0609536, 0.143551, 0.332689, 0.327101, 0.656394, 0.743249, 0.824207, 0.838604, 0.206603, 0.60942, 0.0466874, 0.34153, 0.396803, 0.0863154, 0.938587, 0.115326, 0.832928, 0.76383, 0.0477996, 0.589591, 0.517357, 0.00153371, 0.230895, 0.0164503, 0.857105, 0.109765, 0.568341, 0.876423, 0.191798, 0.469835, 0.45924, 0.355071, 0.418041, 0.0948443, 0.829732, 0.661499, 0.506711, 0.544885, 0.31725, 0.107178, 0.47551, 0.502825, 0.387776, 0.622294, 0.426275, 0.0617072, 0.55118, 0.5217, 0.55258, 0.61173, 0.928234, 0.507576, 0.727792, 0.374178, 0.818812, 0.0222083, 0.323577, 0.947573, 0.68111, 0.996513, 0.863556, 0.864499, 0.981838, 0.418183, 0.874134, 0.650143, 0.326361, 0.295483, 0.57914, 0.543618, 0.632917, 0.775706, 0.746207, 0.849637, 0.429899, 0.351495, 0.268899, 0.767588, 0.714059, 0.499168, 0.469066, 0.25488, 0.93542, 0.744678, 0.376386, 0.821541, 0.0215523, 0.726469, 0.999386, 0.323576, 0.116576, 0.791993, 0.967267, 0.33244, 0.929941, 0.655886, 0.850531, 0.929637, 0.652231, 0.147383, 0.313992, 0.5893, 0.327567, 0.186722, 0.416446, 0.23665, 0.068951, 0.0902886, 0.206437, 0.466131, 0.994251, 0.640948, 0.477065, 0.625764, 0.114972, 0.082417, 0.780419, 0.093996, 0.328218, 0.934135, 0.660206, 0.805122, 0.547285, 0.413155, 0.851648, 0.958469, 0.105686, 0.999529, 0.764199, 0.232699, 0.534876, 0.900669, 0.336866, 0.117499, 0.737167, 0.410377, 0.24998, 0.416482, 0.293996, 0.504759, 0.156092, 0.767367, 0.256123, 0.372485, 0.149028, 0.158947, 0.911312, 0.5435, 0.558564, 0.644299, 0.991434, 0.515469, 0.911864, 0.357269, 0.481236, 0.132541, 0.801901, 0.874397, 0.375827, 0.273718, 0.778889, 0.266327, 0.201597, 0.941098, 0.402948, 0.0144779, 0.79957, 0.176809, 0.126298, 0.441617, 0.883792, 0.9105, 0.778926, 0.438753, 0.118097, 0.724643, 0.383404, 0.059517, 0.66822, 0.0947962, 0.314721, 0.184681, 0.674995, 0.0978911, 0.369492, 0.0086417, 0.0186125, 0.687743, 0.282032, 0.1884, 0.823876, 0.262698, 0.618405, 0.847498, 0.34163, 0.745071, 0.650708, 0.34235, 0.62163, 0.851984, 0.445671, 0.215015, 0.460495, 0.430149, 0.307774, 0.799374, 0.199636, 0.103411, 0.158742, 0.194763, 0.366357, 0.587163, 0.991611, 0.536064, 0.967775, 0.683945, 0.0165994, 0.685153, 0.169493, 0.820166, 0.674759, 0.0652209, 0.617074, 0.0719827, 0.733189, 0.577215, 0.548849, 0.214184, 0.329439, 0.489709, 0.240344, 0.773691, 0.462625, 0.832751, 0.49877, 0.905665, 0.300145, 0.227945, 0.319922, 0.796124, 0.336471, 0.26384, 0.172915, 0.301686, 0.907982, 0.669673, 0.618241, 0.461905, 0.685511, 0.0330184, 0.353584, 0.756152, 0.907153, 0.45808, 0.762273, 0.969434, 0.197383, 0.530418, 0.646489, 0.601396, 0.929437, 0.279697, 0.942927, 0.72955, 0.855701, 0.834375, 0.699459, 0.0306069, 0.459645, 0.19041, 0.0968616, 0.646082, 0.401555, 0.0303573, 0.729982, 0.715356, 0.169817, 0.32259, 0.368546, 0.481556, 0.887325, 0.815447, 0.210569, 0.114777, 0.244363, 0.563674, 0.0993083, 0.926382, 0.579515, 0.604385, 0.165098, 0.978543, 0.986523, 0.0502906, 0.0766624, 0.539851, 0.403884, 0.182008, 0.389088, 0.273846, 0.858585, 0.593202, 0.929929, 0.940485, 0.970789, 0.168345, 0.593503, 0.722867, 0.559876, 0.0913571, 0.364557, 0.657325, 0.481733, 0.175144, 0.893575, 0.224465, 0.149952, 0.715092, 0.367577, 0.661431, 0.0943229, 0.887027, 0.96493, 0.493631, 0.0305581, 0.245535, 0.816682, 0.137657, 0.864905, 0.0327525, 0.841194, 0.0310711, 0.298508, 0.151638, 0.0939599, 0.760116, 0.867874, 0.825902, 0.28888, 0.0181206, 0.107323, 0.0196653, 0.676588, 0.96792, 0.245701, 0.417712, 0.215411, 0.794073, 0.753198, 0.866174, 0.757581, 0.160835, 0.962818, 0.368402, 0.757416, 0.317145, 0.945184, 0.523144, 0.961954, 0.211074, 0.995428, 0.689133, 0.388254, 0.163847, 0.132398, 0.578267, 0.688608, 0.448164, 0.149111, 0.670419, 0.770153, 0.265796, 0.119313, 0.00952636, 0.333354, 0.32457, 0.944522, 0.472341, 0.330589, 0.777376, 0.139584, 0.321974, 0.999896, 0.900484, 0.404496, 0.307012, 0.620225, 0.70123, 0.0738496, 0.187599, 0.834358, 0.0936031, 0.0677084, 0.77745, 0.57673, 0.514984, 0.676642, 0.907798, 0.281646, 0.732466, 0.236927, 0.718766, 0.30445, 0.182738, 0.136704, 0.369467, 0.804445, 0.539138, 0.237853, 0.695964, 0.00461763, 0.805089, 0.762751, 0.582421, 0.895735, 0.657319, 0.191711, 0.635133, 0.909233, 0.779651, 0.768555, 0.243218, 0.589038, 0.00521608, 0.212619, 0.618466, 0.345604, 0.512069, 0.400544, 0.721577, 0.078069, 0.803117, 0.681361, 0.145508, 0.445484, 0.13027, 0.975213, 0.335138, 0.676329, 0.584055, 0.328533, 0.777367, 0.375314, 0.500871, 0.274031, 0.758347, 0.850875, 0.0516321, 0.119776, 0.659553, 0.148233, 0.58129, 0.281835, 0.0653736, 0.609858, 0.785661, 0.427834, 0.509185, 0.554398, 0.0800974, 0.834723, 0.788448, 0.829264, 0.11242, 0.044501, 0.717812, 0.496454, 0.552878, 0.91177, 0.838499, 0.214569, 0.136391, 0.377353, 0.978933, 0.0964667, 0.770986, 0.0456969, 0.729294, 0.622665, 0.867686, 0.593344, 0.131652, 0.710065, 0.05886, 0.449821, 0.931761, 0.517155, 0.242112, 0.671254, 0.417985, 0.666281, 0.268961, 0.739002, 0.516142, 0.45308, 0.146306, 0.748715, 0.503656, 0.910697, 0.335632, 0.696612, 0.654792, 0.862619, 0.0682346, 0.770083, 0.978355, 0.401244, 0.00176029, 0.558156, 0.916832, 0.97322, 0.549838, 0.19869, 0.438463, 0.0156057, 0.83977, 0.105383, 0.299166, 0.99335, 0.581013, 0.0940476, 0.798757, 0.118182, 0.266796, 0.488131, 0.645558, 0.0384786, 0.304939, 0.35961, 0.986439, 0.843547, 0.3697, 0.852451, 0.794843, 0.0546904, 0.790721, 0.933048, 0.314777, 0.0438145, 0.931903, 0.61199, 0.78707, 0.275311, 0.894913, 0.260589, 0.489388, 0.99467, 0.961118, 0.330167, 0.875786, 0.0565952, 0.838825, 0.811146, 0.0279812, 0.544337, 0.103276, 0.0303973, 0.384458, 0.948577, 0.633818, 0.954165, 0.852837, 0.201179, 0.919404, 0.615282, 0.18759, 0.496785, 0.896411, 0.606767, 0.994157, 0.0166269, 0.50311, 0.495421, 0.300193, 0.524407, 0.123501, 0.970621, 0.705828, 0.424686, 0.236783, 0.233107, 0.0735741, 0.462477, 0.318771, 0.953457, 0.001118, 0.637781, 0.518401, 0.545005, 0.213403, 0.915296, 0.414932, 0.320413, 0.543889, 0.176325, 0.546838, 0.104671, 0.578876, 0.962853, 0.817569, 0.00301906, 0.0428612, 0.164119, 0.502298, 0.538741, 0.401706, 0.587071, 0.662706, 0.359987, 0.343828, 0.503272, 0.672525, 0.338472, 0.59923, 0.489641, 0.800391, 0.266119, 0.773703, 0.026278, 0.450742, 0.0257071, 0.215092, 0.20344, 0.837094, 0.271702, 0.754091, 0.229151, 0.958711, 0.0210307, 0.245963, 0.238919, 0.584489, 0.923226, 0.493625, 0.63361, 0.394205, 0.171767, 0.203015, 0.276054, 0.577253, 0.195312, 0.999466, 0.0352015, 0.48984, 0.434269, 0.207206, 0.796542, 0.0923338, 0.505757, 0.9232, 0.620437, 0.584372, 0.785207, 0.736546, 0.208744, 0.477267, 0.909428, 0.406968, 0.119203, 0.0915666, 0.950761, 0.242588, 0.776786, 0.0918372, 0.47149, 0.838527, 0.267107, 0.336596, 0.276568, 0.91368, 0.357471, 0.714987, 0.860658, 0.199195, 0.738763, 0.475023, 0.572437, 0.0687734, 0.175211, 0.564366, 0.382863, 0.778606, 0.0920013, 0.951513, 0.763532, 0.878139, 0.639052, 0.518825, 0.389098, 0.745893, 0.896298, 0.845904, 0.627195, 0.440325, 0.661317, 0.0685766, 0.505553, 0.257586, 0.573869, 0.70068, 0.44151, 0.342163, 0.953214, 0.504308, 0.128572, 0.985638, 0.695045, 0.624689, 0.879732, 0.792932, 0.540714, 0.490834, 0.111954, 0.793822, 0.00736275, 0.400705, 0.783914, 0.282839, 0.163008, 0.968853, 0.331046, 0.0971297, 0.316015, 0.306883, 0.363191, 0.71868, 0.16235, 0.236233, 0.170476, 0.954109, 0.557757, 0.987404, 0.0342678, 0.301085, 0.130566, 8.99479e-06, 0.492714, 0.449972, 0.429922, 0.707314, 0.453587, 0.144354, 0.811004, 0.412175, 0.443095, 0.224612, 0.565932, 0.932614, 0.830064, 0.057281, 0.518328, 0.783682, 0.548909, 0.499433, 0.326037, 0.347833, 0.305309, 0.574449, 0.616677, 0.461319, 0.452558, 0.382742, 0.278652, 0.450522, 0.0402624, 0.783456, 0.929137, 0.888308, 0.249146, 0.457229, 0.822089, 0.548378, 0.805055, 0.391676, 0.691428, 0.644834, 0.844889, 0.353146, 0.0929275, 0.157428, 0.515319, 0.208275, 0.495904, 0.723555, 0.122964, 0.36962, 0.41214, 0.306746, 0.329619, 0.196846, 0.454874, 0.789508, 0.28634, 0.941138, 0.569559, 0.721007, 0.626095, 0.879066, 0.78383, 0.429235, 0.168256, 0.127681, 0.878632, 0.614135, 0.680096, 0.708349, 0.922148, 0.123667, 0.617842, 0.53446, 0.786944, 0.284737, 0.951959, 0.0950369, 0.190759, 0.139618, 0.533479, 0.63933, 0.268021, 0.798943, 0.337936, 0.405574, 0.943102, 0.714451, 0.83261, 0.443561, 0.260152, 0.942133, 0.853385, 0.189669, 0.573713, 0.752624, 0.00792817, 0.834036, 0.917539, 0.46164, 0.92198, 0.706737, 0.180156, 0.926683, 0.462895, 0.977159, 0.283718, 0.363913, 0.402255, 0.770423, 0.415978, 0.828699, 0.41718, 0.302832, 0.288217, 0.631554, 0.326558, 0.566064, 0.828802, 0.123279, 0.327377, 0.397165, 0.308697, 0.702286, 0.45401, 0.969257, 0.136402, 0.86909, 0.0581259, 0.843663, 0.0837092, 0.289501, 0.913223, 0.927754, 0.0428233, 0.728459, 0.242074, 0.798512, 0.342475, 0.0846666, 0.158902, 0.717872, 0.814997, 0.833233, 0.696543, 0.0359532, 0.156974, 0.32208, 0.410787, 0.561302, 0.399088, 0.305644, 0.931902, 0.95712, 0.701232, 0.685619, 0.955853, 0.538418, 0.646915, 0.146731, 0.65422, 0.418238, 0.0750841, 0.350242, 0.358565, 0.358744, 0.183739, 0.405738, 0.835397, 0.683115, 0.883714, 0.557802, 0.710694, 0.783548, 0.493122, 0.814222, 0.168881, 0.749967, 0.00642282, 0.861179, 0.118478, 0.80518, 0.87412, 0.899408, 0.933523, 0.481826, 0.521988, 0.414407, 0.651312, 0.81063, 0.359634, 0.592237, 0.495092, 0.186289, 0.140337, 0.989699, 0.719063, 0.295893, 0.86223, 0.645697, 0.119774, 0.673202, 0.123001, 0.937681, 0.0759322, 0.80301, 0.107765, 0.512171, 0.0884891, 0.496683, 0.526593, 0.925926, 0.142882, 0.535131, 0.59712, 0.602905, 0.272904, 0.909842, 0.458215, 0.474302, 0.940236, 0.99925, 0.856123, 0.0648635, 0.33895, 0.0851604, 0.861585, 0.994629, 0.270295, 0.253986, 0.932597, 0.656535, 0.544405, 0.163579, 0.00565768, 0.425538, 0.217728, 0.94399, 0.533269, 0.364052, 0.980067, 0.812952, 0.959092, 0.93408, 0.435034, 0.129125, 0.744507, 0.751672, 0.448497, 0.778792, 0.695853, 0.747038, 0.0730961, 0.903926, 0.736134, 0.198268, 0.906906, 0.88627, 0.872989, 0.943695, 0.435863, 0.987455, 0.0591982, 0.924001, 0.255637, 0.323308, 0.318667, 0.873032, 0.867264, 0.48634, 0.294825, 0.804789, 0.183232, 0.643134, 0.617333, 0.983185, 0.689889, 0.762106, 0.235854, 0.923138, 0.0670237, 0.0938047, 0.129591, 0.114159, 0.210244, 0.125066, 0.724633, 0.791673, 0.985511, 0.787778, 0.14163, 0.528792, 0.21738, 0.612279, 0.888867, 0.193273, 0.665858, 0.918939, 0.467632, 0.955895, 0.762796, 0.440253, 0.331188, 0.529962, 0.773693, 0.400064, 0.337817, 0.477382, 0.83776, 0.754275, 0.653156, 0.330874, 0.416658, 0.163485, 0.564096, 0.589732, 0.114851, 0.558719, 0.0660067, 0.687986, 0.140014, 0.207164, 0.467089, 0.945626, 0.834166, 0.10679, 0.678549, 0.236083, 0.19538, 0.91674, 0.497997, 0.145263, 0.832838, 0.370289, 0.445591, 0.0804101, 0.994883, 0.600678, 0.785424, 0.556163, 0.944177, 0.672062, 0.913911, 0.719992, 0.21344, 0.424073, 0.504163, 0.179541, 0.694425, 0.704981, 0.719038, 0.664638, 0.675743, 0.00904641, 0.12478, 0.364036, 0.802474, 0.416197, 0.0907069, 0.350004, 0.737557, 0.854752, 0.979428, 0.6801, 0.00163546, 0.940433, 0.944515, 0.0478948, 0.828965, 0.0247498, 0.140715, 0.878443, 0.373219, 0.15744, 0.0684821, 0.238656, 0.62082, 0.396778, 0.75656, 0.183271, 0.199846, 0.859093, 0.729, 0.114022, 0.713058, 0.251534, 0.399791, 0.343449, 0.481608, 0.351112, 0.821257, 0.762525, 0.826417, 0.854871, 0.898275, 0.261042, 0.446316, 0.134348, 0.589654, 0.626137, 0.627859, 0.644243, 0.900311, 0.312348, 0.686388, 0.0638275, 0.942398, 0.997995, 0.519926, 0.108917, 0.474283, 0.459815, 0.0173454, 0.403841, 0.695965, 0.16122, 0.687384, 0.263314, 0.615264, 0.95124, 0.0679664, 0.440981, 0.374297, 0.8821, 0.764221, 0.201789, 0.749617, 0.333827, 0.367097, 0.072781, 0.299826, 0.828564, 0.0228201, 0.416227, 0.265307, 0.076264, 0.998028, 0.404334, 0.2468, 0.721791, 0.0487748, 0.840049, 0.122938, 0.473641, 0.18489, 0.8881, 0.890702, 0.402479, 0.359522, 0.166983, 0.12043, 0.104968, 0.7571, 0.316185, 0.427233, 0.456899, 0.789597, 0.306429, 0.686463, 0.914225, 0.148214, 0.386535, 0.661416, 0.0010132, 0.592672, 0.646657, 0.515067, 0.920167, 0.160818, 0.249605, 0.352428, 0.0756006, 0.753631, 0.803908, 0.562435, 0.944115, 0.516684, 0.667722, 0.722846, 0.0901999, 0.102956, 0.310629, 0.0718126, 0.939718, 0.0015867, 0.287318, 0.640484, 0.570949, 0.091604, 0.898546, 0.659228, 0.0322266, 0.803713, 0.0606192, 0.0440694, 0.66509, 0.0678347, 0.915303, 0.916844, 0.916624, 0.486296, 0.793855, 0.650598, 0.514116, 0.139537, 0.414233, 0.374154, 0.102838, 0.15141, 0.840178, 0.196955, 0.459359, 0.0665979, 0.371865, 0.847088, 0.865833, 0.938104, 0.299027, 0.841662, 0.327934, 0.845796, 0.148307, 0.207255, 0.807801, 0.447583, 0.491865, 0.667041, 0.802269, 0.797326, 0.198915, 0.649828, 0.483347, 0.267933, 0.836389, 0.356822, 0.847785, 0.929989, 0.514498, 0.181326, 0.165689, 0.948193, 0.728615, 0.594231, 0.812535, 0.174641, 0.950378, 0.482942, 0.967358, 0.0275906, 0.0311752, 0.614367, 0.970778, 0.164847, 0.209201, 0.66234, 0.102931, 0.799752, 0.543935, 0.630354, 0.873262, 0.544798, 0.660557, 0.891737, 0.567577, 0.939348, 0.78186, 0.0686499, 0.225852, 0.893362, 0.540483, 0.504786, 0.628149, 0.0838464, 0.377603, 0.743475, 0.96083, 0.45757, 0.221401, 0.592731, 0.618268, 0.771072, 0.0765574, 0.0476341, 0.635026, 0.383667, 0.916712, 0.171473, 0.378175, 0.239185, 0.324705, 0.257922, 0.870206, 0.360061, 0.531246, 0.408343, 0.487231, 0.978677, 0.207524, 0.00673317, 0.899411, 0.737445, 0.844541, 0.433296, 0.610042, 0.538669, 0.310229, 0.586103, 0.0650336, 0.120229, 0.32721, 0.329803, 0.941069, 0.384618, 0.755424, 0.147826, 0.643201, 0.0737775, 0.665542, 0.461143, 0.0717486, 0.187379, 0.799762, 0.743815, 0.914302, 0.972338, 0.0624001, 0.143945, 0.935498, 0.167011, 0.627861, 0.0730065, 0.610473, 0.917775, 0.21292, 0.80276, 0.975205, 0.309135, 0.689936, 0.795602, 0.703413, 0.905654, 0.443426, 0.785175, 0.884934, 0.451337, 0.185263, 0.906834, 0.238523, 0.581382, 0.844035, 0.630555, 0.450307, 0.107293, 0.798729, 0.712429, 0.606065, 0.0508112, 0.751825, 0.293466, 0.696673, 0.687424, 0.949014, 0.635518, 0.260788, 0.664012, 0.503692, 0.254894, 0.47687, 0.453398, 0.879328, 0.258936, 0.0584603, 0.328501, 0.836817, 0.158116, 0.0338683, 0.948123, 0.565302, 0.866665, 0.320561, 0.385397, 0.147682, 0.061885, 0.475893, 0.687086, 0.878538, 0.149937, 0.439654, 0.45665, 0.69964, 0.27501, 0.55027, 0.168487, 0.75805, 0.935977, 0.580474, 0.878249, 0.566541, 0.58974, 0.252357, 0.74584, 0.785329, 0.471127, 0.0698617, 0.987713, 0.272423, 0.329089, 0.131877, 0.270149, 0.0129784, 0.684912, 0.833625, 0.219229, 0.0552668, 0.99617, 0.833919, 0.755143, 0.632745, 0.195254, 0.238367, 0.665154, 0.150129, 0.13481, 0.542367, 0.770813, 0.974124, 0.927066, 0.921943, 0.754261, 0.241053, 0.03664, 0.538456, 0.332408, 0.509011, 0.868528, 0.373223, 0.185156, 0.173616, 0.0959532, 0.717941, 0.498915, 0.985696, 0.287694, 0.597918, 0.731743, 0.675398, 0.698306, 0.439155, 0.518567, 0.895002, 0.512035, 0.569751, 0.409759, 0.253181, 0.0834103, 0.111383, 0.538176, 0.917993, 0.690836, 0.89454, 0.647424, 0.696855, 0.924478, 0.0391853, 0.279857, 0.737514, 0.441775, 0.0284395, 0.656948, 0.0281428, 0.119077, 0.164105, 0.605908, 0.936884, 0.318437, 0.111724, 0.322787, 0.982408, 0.97011, 0.72668, 0.099519, 0.144687, 0.617189, 0.925421, 0.674846, 0.222799, 0.47666, 0.928695, 0.935499, 0.828436, 0.46032, 0.24613, 0.417807, 0.746523, 0.638837, 0.342988, 0.110714, 0.550722, 0.759884, 0.60739, 0.692204, 0.0526183, 0.177483, 0.437292, 0.493735, 0.631113, 0.525569, 0.753399, 0.157499, 0.308851, 0.644157, 0.072486, 0.55973, 0.0907334, 0.857988, 0.656363, 0.841934, 0.953069, 0.632553, 0.747385, 0.417105, 0.153193, 0.248604, 0.868737, 0.929238, 0.163154, 0.0623951, 0.506774, 0.985715, 0.597852, 0.61086, 0.51979, 0.429638, 0.723483, 0.571642, 0.709711, 0.0975933, 0.39613, 0.438011, 0.449673, 0.44669, 0.91548, 0.833362, 0.808329, 0.530815, 0.0326424, 0.568272, 0.0961351, 0.461136, 0.226645, 0.657493, 0.951434, 0.395262, 0.0437565, 0.505344, 0.86249, 0.49411, 0.0507871, 0.14814, 0.340194, 0.265498, 0.0754856, 0.19997, 0.837598, 0.0296773, 0.936372, 0.779978, 0.495378, 0.367692, 0.479471, 0.805453, 0.485373, 0.0953336, 0.14572, 0.579649, 0.239343, 0.243335, 0.403262, 0.633629, 0.0682983, 0.712714, 0.341858, 0.501027, 0.772609, 0.35328, 0.625386, 0.385904, 0.614652, 0.811551, 0.176481, 0.316492, 0.816493, 0.819353, 0.948973, 0.298714, 0.721146, 0.780835, 0.878107, 0.572032, 0.830147, 0.519363, 0.958325, 0.527391, 0.716759, 0.501259, 0.493692, 0.872762, 0.628699, 0.804758, 0.33735, 0.862575, 0.614228, 0.868128, 0.179866, 0.153829, 0.735529, 0.310759, 0.75861, 0.955902, 0.996456, 0.0155119, 0.868818, 0.200885, 0.674882, 0.239315, 0.888505, 0.811764, 0.467074, 0.0994684, 0.107193, 0.44433, 0.579151, 0.176343, 0.983351, 0.768789, 0.831046, 0.482872, 0.90771, 0.934568, 0.38876, 0.504796, 0.547039, 0.351825, 0.215924, 0.80159, 0.231762, 0.113694, 0.98638, 0.485296, 0.593716, 0.0785382, 0.744536, 0.735809, 0.597734, 0.701177, 0.718768, 0.72438, 0.794393, 0.332094, 0.686298, 0.354777, 0.498037, 0.965321, 0.793343, 0.601226, 0.59768, 0.546122, 0.854092, 0.542572, 0.416698, 0.882084, 0.449068, 0.0742358, 0.930291, 0.935775, 0.428394, 0.805404, 0.909318, 0.708399, 0.679808, 0.555537, 0.909858, 0.110007, 0.697008, 0.226286, 0.30794, 0.053389, 0.0916701, 0.168651, 0.675089, 0.137463, 0.361724, 0.664645, 0.695049, 0.332835, 0.23099, 0.864152, 0.419861, 0.083713, 0.990716, 0.00623633, 0.864072, 0.22218, 0.813389, 0.103877, 0.6216, 0.350807, 0.681206, 0.350022, 0.0559396, 0.609546, 0.103462, 0.451129, 0.173092, 0.928536, 0.761367, 0.625893, 0.999792, 0.794129, 0.610381, 0.611992, 0.759012, 0.861958, 0.0441375, 0.371323, 0.402609, 0.889682, 0.210105, 0.430555, 0.813706, 0.827418, 0.934669, 0.346201, 0.303894, 0.425123, 0.727389, 0.813788, 0.323004, 0.562072, 0.826403, 0.990877, 0.73152, 0.0854499, 0.302683, 0.326324, 0.885827, 0.407247, 0.965275, 0.990593, 0.607219, 0.784183, 0.877393, 0.17006, 0.806105, 0.436142, 0.43005, 0.210671, 0.583769, 0.271482, 0.679246, 0.739748, 0.223993, 0.73148, 0.460188, 0.341485, 0.179936, 0.114199, 0.562467, 0.210406, 0.274018, 0.0734009, 0.259328, 0.51934, 0.175807, 0.474026, 0.518619, 0.373289, 0.295022, 0.153322, 0.272974, 0.33554, 0.115575, 0.369072, 0.216294, 0.561812, 0.423946, 0.339774, 0.812142, 0.0389147, 0.728159, 0.406847, 0.0073503, 0.93889, 0.472488, 0.333231, 0.516892, 0.00982252, 0.0442528, 0.398625, 0.537754, 0.939892, 0.952943, 0.718147, 0.70743, 0.478117, 0.659833, 0.547758, 0.0547595, 0.0507752, 0.11535, 0.384005, 0.354978, 0.759896, 0.664564, 0.997026, 0.188457, 0.713732, 0.940001, 0.983527, 0.30942, 0.482499, 0.0291149, 0.0502972, 0.869711, 0.605827, 0.880391, 0.559943, 0.799703, 0.163051, 0.810344, 0.689329, 0.374283, 0.987522, 0.427716, 0.912901, 0.888009, 0.15132, 0.0403356, 0.483226, 0.151659, 0.534256, 0.199749, 0.334739, 0.294661, 0.0827428, 0.916822, 0.509013, 0.639294, 0.619049, 0.423424, 0.00960821, 0.152704, 0.584539, 0.97645, 0.830753, 0.563711, 0.756433, 0.206827, 0.452723, 0.46099, 0.615499, 0.891377, 0.14176, 0.441772, 0.7399, 0.963869, 0.351896, 0.797338, 0.717526, 0.558598, 0.200473, 0.0566216, 0.766157, 0.547293, 0.768586, 0.831479, 0.655012, 0.326318, 0.758386, 0.993428, 0.490431, 0.794475, 0.818778, 0.284037, 0.931828, 0.148299, 0.795698, 0.770279, 0.61012, 0.712323, 0.627313, 0.692585, 0.713857, 0.575104, 0.0705564, 0.384126, 0.813143, 0.870509, 0.260943, 0.0687908, 0.461427, 0.280083, 0.110856, 0.980626, 0.909306, 0.423537, 0.788898, 0.827466, 0.792313, 0.198289, 0.54203, 0.32424, 0.348408, 0.856534, 0.506747, 0.97225, 0.0550984, 0.0555453, 0.141956, 0.633462, 0.715796, 0.72804, 0.111788, 0.713685, 0.655457, 0.872259, 0.281724, 0.153277, 0.656518, 0.5761, 0.811273, 0.741021, 0.346319, 0.168153, 0.97488, 0.205865, 0.454107, 0.178165, 0.873168, 0.798471, 0.409507, 0.327403, 0.381364, 0.110272, 0.933571, 0.848571, 0.362224, 0.591039, 0.731441, 0.71913, 0.469547, 0.360276, 0.356864, 0.0109086, 0.109288, 0.51789, 0.232499, 0.302433, 0.533526, 0.633215, 0.389799, 0.534496, 0.673074, 0.80854, 0.192882, 0.319299, 0.107454, 0.251623, 0.00296579, 0.665672, 0.847694, 0.43587, 0.867145, 0.701412, 0.0231152, 0.461606, 0.199226, 0.533613, 0.765012, 0.820115, 0.706689, 0.734028, 0.0886849, 0.735587, 0.600506, 0.823775, 0.0373341, 0.812563, 0.2348, 0.825182, 0.997076, 0.566187, 0.566244, 0.690071, 0.290938, 0.832857, 0.45413, 0.924864, 0.747998, 0.961952, 0.967087, 0.00745362, 0.886759, 0.229773, 0.0123162, 0.13285, 0.586293, 0.285238, 0.330433, 0.581045, 0.0791134, 0.335392, 0.196941, 0.2336, 0.895863, 0.0548771, 0.187895, 0.0521924, 0.394677, 0.96342, 0.0864298, 0.347099, 0.551305, 0.0642886, 0.363123, 0.311614, 0.0681629, 0.90542, 0.61787, 0.559809, 0.640655, 0.146883, 0.726802, 0.653222, 0.825089, 0.367256, 0.658711, 0.530255, 0.142158, 0.335159, 0.576883, 0.815347, 0.538353, 0.42229, 0.578867, 0.121993, 0.186007, 0.915189, 0.922611, 0.112167, 0.876108, 0.82817, 0.871104, 0.192003, 0.124431, 0.802664, 0.512239, 0.163601, 0.460128, 0.79289, 0.509116, 0.287871, 0.664928, 0.444367, 0.513435, 0.543775, 0.836336, 0.057896, 0.801931, 0.213453, 0.774068, 0.821788, 0.169053, 0.0413279, 0.765749, 0.321284, 0.726605, 0.761212, 0.304786, 0.296818, 0.720861, 0.200606, 0.326656, 0.17193, 0.691957, 0.226087, 0.444533, 0.906708, 0.671676, 0.429218, 0.363985, 0.184063, 0.738182, 0.461176, 0.137039, 0.357278, 0.521567, 0.908227, 0.0501428, 0.0269577, 0.0479135, 0.65737, 0.738163, 0.742666, 0.59487, 0.222037, 0.525651, 0.0985925, 0.241994, 0.331669, 0.206876, 0.654418, 0.550447, 0.28119, 0.00454971, 0.245784, 0.259463, 0.129567, 0.280369, 0.979685, 0.294002, 0.124807, 0.850743, 0.91553, 0.109716, 0.387582, 0.834544, 0.66271, 0.0471684, 0.64241, 0.835814, 0.86512, 0.325619, 0.31638, 0.279696, 0.881424, 0.487695, 0.776307, 0.84578, 0.537472, 0.877102, 0.140269, 0.747971, 0.897241, 0.379206, 0.220176, 0.349415, 0.0128732, 0.994032, 0.909643, 0.155572, 0.77627, 0.11818, 0.263707, 0.900637, 0.335768, 0.322971, 0.360847, 0.614925, 0.0892853, 0.881562, 0.17091, 0.941587, 0.930328, 0.304348, 0.970301, 0.84352, 0.763354, 0.562173, 0.351434, 0.581137, 0.685157, 0.781364, 0.035766, 0.761083, 0.152155, 0.71605, 0.976006, 0.969557, 0.293685, 0.0549842, 0.133562, 0.82613, 0.967592, 0.846296, 0.211786, 0.467714, 0.670399, 0.893941, 0.260086, 0.00617443, 0.736166, 0.336154, 0.924484, 0.0194528, 0.472163, 0.947271, 0.843659, 0.637149, 0.974735, 0.296876, 0.669006, 0.569546, 0.20116, 0.654917, 0.546547, 0.820704, 0.605349, 0.673724, 0.556966, 0.070143, 0.0166695, 0.467624, 0.660667, 0.494119, 0.857361, 0.884427, 0.805896, 0.269175, 0.318624, 0.0725401, 0.680126, 0.076112, 0.815093, 0.637355, 0.378459, 0.555372, 0.318617, 0.664214, 0.498975, 0.839348, 0.928194, 0.885326, 0.382787, 0.55833, 0.632732, 0.699359, 0.939815, 0.0526236, 0.084209, 0.243343, 0.679591, 0.121869, 0.37269, 0.0869808, 0.56071, 0.161165, 0.350729, 0.286705, 0.658271, 0.0525391, 0.460454, 0.952039, 0.763749, 0.252623, 0.514233, 0.630456, 0.11327, 0.142385, 0.09471, 0.910605, 0.159448, 0.025932, 0.193778, 0.734099, 0.347049, 0.715323, 0.331636, 0.492643, 0.172658, 0.544111, 0.678647, 0.82822, 0.890233, 0.538535, 0.193232, 0.628986, 0.637995, 0.181863, 0.503949, 0.46497, 0.272194, 0.581389, 0.531691, 0.804678, 0.678814, 0.799185, 0.779134, 0.299096, 0.280952, 0.524904, 0.422067, 0.86106, 0.739144, 0.437234, 0.410388, 0.270291, 0.754206, 0.559645, 0.25864, 0.0243176, 0.770453, 0.56095, 0.898787, 0.245929, 0.681202, 0.95394, 0.569529, 0.452469, 0.975237, 0.896771, 0.223286, 0.989632, 0.292195, 0.18598, 0.0775047, 0.0462231, 0.121657, 0.765414, 0.940138, 0.320731, 0.956966, 0.617806, 0.848577, 0.770346, 0.54212, 0.422321, 0.488888, 0.157264, 0.0614975, 0.235589, 0.31519, 0.615688, 0.0252402, 0.620932, 0.429205, 0.78533, 0.296248, 0.861138, 0.41389, 0.87524, 0.411513, 0.888992, 0.191521, 0.177546, 0.770683, 0.412321, 0.99239, 0.168588, 0.152511, 0.68212, 0.931234, 0.395882, 0.834497, 0.313145, 0.826681, 0.71435, 0.20711, 0.170136, 0.527598, 0.591789, 0.655174, 0.908624, 0.637615, 0.847109, 0.0362419, 0.457725, 0.127153, 0.710776, 0.095789, 0.0364299, 0.971963, 0.503109, 0.907669, 0.00268601, 0.0376114, 0.330092, 0.00216109, 0.391222, 0.788424, 0.506559, 0.387133, 0.152886, 0.933285, 0.227926, 0.882869, 0.733733, 0.822269, 0.0970804, 0.255417, 0.15843, 0.301121, 0.801318, 0.703989, 0.438714, 0.813886, 0.647927, 0.818138, 0.840587, 0.979557, 0.64096, 0.148562, 0.241392, 0.805949, 0.188091, 0.983173, 0.629896, 0.0623323, 0.172154, 0.276237, 0.484383, 0.976235, 0.650926, 0.748822, 0.523162, 0.132491, 0.983323, 0.826655, 0.622063, 0.927223, 0.986423, 0.422319, 0.857705, 0.614299, 0.195418, 0.75345, 0.753493, 0.82528, 0.695535, 0.871959, 0.198182, 0.252445, 0.0848789, 0.0246484, 0.155478, 0.766652, 0.521933, 0.050651, 0.000431992, 0.366988, 0.361293, 0.215211, 0.0113326, 0.415247, 0.16039, 0.854401, 0.429871, 0.88418, 0.339746, 0.560085, 0.0304348, 0.0195183, 0.0423525, 0.39167, 0.416544, 0.57624, 0.960926, 0.722137, 0.548531, 0.972245, 0.0661971, 0.661878, 0.328153, 0.744898, 0.565517, 0.606903, 0.846644, 0.471272, 0.331511, 0.503534, 0.248501, 0.447479, 0.500011, 0.568788, 0.739181, 0.962357, 0.160933, 0.647045, 0.419411, 0.902111, 0.0363436, 0.876261, 0.992914, 0.201877, 0.491814, 0.935375, 0.495645, 0.480016, 0.250264, 0.746046, 0.387599, 0.332632, 0.699724, 0.160943, 0.18238, 0.157698, 0.0555704, 0.0943856, 0.815176, 0.95347, 0.438966, 0.719201, 0.146659, 0.499255, 0.647801, 0.592177, 0.223986, 0.121259, 0.212785, 0.682767, 0.662474, 0.744542, 0.145665, 0.0513408, 0.263725, 0.633071, 0.719178, 0.751036, 0.845591, 0.445084, 0.0600563, 0.484062, 0.0302693, 0.0467352, 0.290252, 0.568309, 0.130947, 0.00218117, 0.358267, 0.976421, 0.69907, 0.802502, 0.298183, 0.0968203, 0.179036, 0.928288, 0.104217, 0.414126, 0.850352, 0.421163, 0.54415, 0.166134, 0.902478, 0.17836, 0.405866, 0.50712, 0.335796, 0.715559, 0.135021, 0.105094, 0.787175, 0.367156, 0.395401, 0.164392, 0.39428, 0.604062, 0.947249, 0.884381, 0.654034, 0.423842, 0.013755, 0.455803, 0.836788, 0.998852, 0.891757, 0.806675, 0.183851, 0.501819, 0.0742693, 0.462696, 0.912391, 0.490791, 0.3255, 0.920401, 0.209304, 0.0509929, 0.00198485, 0.713042, 0.705819, 0.221963, 0.625561, 0.990062, 0.87923, 0.0403516, 0.902873, 0.587933, 0.74762, 0.219974, 0.522457, 0.651021, 0.185259, 0.401079, 0.527382, 0.41003, 0.255617, 0.204435, 0.184973, 0.23386, 0.764599, 0.814429, 0.232333, 0.949143, 0.285716, 0.571215, 0.194195, 0.127978, 0.220286, 0.699305, 0.301491, 0.780858, 0.456808, 0.381645, 0.580219, 0.913607, 0.18016, 0.551105, 0.599133, 0.8982, 0.288888, 0.129585, 0.155321, 0.906122, 0.132426, 0.627033, 0.0943135, 0.90147, 0.744035, 0.956401, 0.286716, 0.92634, 0.869586, 0.197875, 0.245068, 0.797152, 0.243978, 0.0029176, 0.536576, 0.448313, 0.578926, 0.576594, 0.0279766, 0.140589, 0.531467, 0.731435, 0.722225, 0.793909, 0.438038, 0.0416411, 0.356844, 0.802197, 0.879949, 0.78601, 0.54284, 0.284302, 0.082457, 0.297974, 0.432173, 0.516112, 0.362222, 0.805553, 0.910032, 0.14728, 0.317529, 0.40087, 0.259343, 0.567768, 0.541162, 0.135919, 0.924043, 0.389049, 0.126096, 0.0763522, 0.614717, 0.236133, 0.143414, 0.641401, 0.341851, 0.822335, 0.360423, 0.635841, 0.692367, 0.865551, 0.628575, 0.123744, 0.688649, 0.543258, 0.527071, 0.396863, 0.283506, 0.649493, 0.174779, 0.462425, 0.999697, 0.0520634, 0.155628, 0.934503, 0.347624, 0.387869, 0.850368, 0.704923, 0.803748, 0.93635, 0.284214, 0.292087, 0.490883, 0.63007, 0.479258, 0.63344, 0.913521, 0.820823, 0.519035, 0.155867, 0.0218023, 0.435236, 0.440155, 0.312685, 0.923886, 0.729272, 0.779987, 0.41635, 0.995626, 0.655923, 0.113123, 0.107358, 0.309989, 0.890005, 0.0393405, 0.566035, 0.718976, 0.0419229, 0.806378, 0.184939, 0.718635, 0.426383, 0.199313, 0.597232, 0.207885, 0.0979516, 0.18374, 0.650931, 0.508527, 0.868066, 0.374399, 0.763201, 0.389844, 0.851939, 0.102532, 0.569876, 0.641274, 0.512953, 0.5503, 0.38177, 0.952295, 0.845207, 0.627583, 0.154942, 0.918327, 0.383177, 0.0323818, 0.118195, 0.597676, 0.74541, 0.756755, 0.29324, 0.236624, 0.324037, 0.313808, 0.785109, 0.436458, 0.166312, 0.269294, 0.657411, 0.342839, 0.791003, 0.413033, 0.095342, 0.562967, 0.929033, 0.372442, 0.318249, 0.457151, 0.0845246, 0.541658, 0.360231, 0.511668, 0.412251, 0.912032, 0.505069, 0.262456, 0.737895, 0.274547, 0.0442639, 0.926439, 0.948311, 0.691462, 0.442879, 0.8754, 0.245462, 0.964447, 0.153074, 0.873637, 0.885767, 0.438426, 0.724104, 0.189726, 0.894894, 0.685126, 0.66051, 0.421584, 0.60197, 0.663103, 0.256222, 0.0951405, 0.253675, 0.742905, 0.732114, 0.972861, 0.646337, 0.206869, 0.933182, 0.460643, 0.617571, 0.155458, 0.685424, 0.921464, 0.112495, 0.0326442, 0.777823, 0.114861, 0.930235, 0.518505, 0.828133, 0.472568, 0.187044, 0.437994, 0.0758454, 0.970258, 0.627685, 0.702015, 0.22676, 0.311245, 0.904505, 0.980784, 0.152715, 0.896043, 0.796668, 0.758406, 0.778774, 0.697944, 0.00945681, 0.396067, 0.238081, 0.571738, 0.488675, 0.825316, 0.61304, 0.791359, 0.102336, 0.233998, 0.800525, 0.71137, 0.336429, 0.163301, 0.557184, 0.586251, 0.672416, 0.244185, 0.399021, 0.637515, 0.958232, 0.483219, 0.337786, 0.136581, 0.473416, 0.215908, 0.18168, 0.693056, 0.23042, 0.883538, 0.374635, 0.211695, 0.595548, 0.820593, 0.344573, 0.282011, 0.454332, 0.327038, 0.000917402, 0.0549154, 0.365315, 0.439875, 0.0844648, 0.254007, 0.794073, 0.308154, 0.437934, 0.276972, 0.919355, 0.0261948, 0.769438, 0.2641, 0.198885, 0.966101, 0.559764, 0.123938, 0.199953, 0.0299818, 0.653197, 0.614793, 0.475555, 0.324656, 0.281644, 0.796215, 0.648613, 0.532154, 0.334535, 0.157352, 0.888127, 0.184127, 0.766378, 0.966347, 0.972827, 0.161384, 0.789465, 0.161954, 0.643703, 0.959713, 0.186375, 0.356035, 0.98124, 0.890085, 0.951176, 0.738523, 0.567806, 0.788211, 0.697128, 0.158079, 0.615107, 0.175742, 0.00087403, 0.682844, 0.770078, 0.900257, 0.603647, 0.256177, 0.292316, 0.0521886, 0.733072, 0.732903, 0.15466, 0.859994, 0.749214, 0.81277, 0.690821, 0.0871517, 0.536864, 0.61165, 0.911363, 0.291076, 0.990806, 0.465582, 0.42672, 0.650891, 0.346158, 0.365325, 0.65537, 0.155177, 0.342061, 0.766712, 0.324513, 0.677385, 0.454501, 0.619278, 0.585384, 0.277829, 0.780265, 0.205013, 0.115243, 0.475227, 0.798551, 0.347859, 0.513611, 0.91549, 0.346032, 0.476028, 0.940483, 0.896188, 0.472846, 0.834689, 0.933848, 0.0773943, 0.730155, 0.647934, 0.484447, 0.0799081, 0.602382, 0.846805, 0.667573, 0.839229, 0.826398, 0.605332, 0.618392, 0.0509072, 0.0207609, 0.224265, 0.909765, 0.198481, 0.868754, 0.374745, 0.878138, 0.813178, 0.141953, 0.67433, 0.707803, 0.241497, 0.55633, 0.0970334, 0.704567, 0.322991, 0.759508, 0.789822, 0.253608, 0.450525, 0.678693, 0.673853, 0.4521, 0.121972, 0.72688, 0.545481, 0.818882, 0.514336, 0.26292, 0.0151278, 0.00732674, 0.939697, 0.263827, 0.171886, 0.585323, 0.899954, 0.0765824, 0.633741, 0.111016, 0.404388, 0.0727737, 0.470588, 0.268918, 0.499059, 0.748549, 0.41052, 0.0837198, 0.379509, 0.689658, 0.943127, 0.469812, 0.66954, 0.631608, 0.189718, 0.824274, 0.542368, 0.364657, 0.398373, 0.182024, 0.563191, 0.000463342, 0.053823, 0.482991, 0.678146, 0.855849, 0.253442, 0.230127, 0.869565, 0.832267, 0.143135, 0.865522, 0.122105, 0.697407, 0.393396, 0.594313, 0.723475, 0.507965, 0.163211, 0.353912, 0.170464, 0.69387, 0.677314, 0.0352086, 0.76777, 0.270768, 0.72994, 0.780155, 0.162973, 0.437177, 0.30326, 0.841017, 0.969776, 0.84691, 0.998957, 0.0803233, 0.34654, 0.626189, 0.256172, 0.169087, 0.011458, 0.141743, 0.985929, 0.305722, 0.42469, 0.770609, 0.895321, 0.345501, 0.0372501, 0.174137, 0.0892318, 0.149059, 0.217592, 0.980435, 0.372879, 0.801079, 0.611124, 0.55515, 0.71853, 0.777703, 0.282011, 0.677403, 0.284747, 0.893636, 0.526491, 0.578273, 0.605364, 0.536548, 0.876358, 0.977666, 0.82618, 0.327431, 0.43535, 0.976921, 0.365204, 0.360949, 0.661694, 0.3788, 0.303999, 0.833506, 0.769931, 0.984484, 0.830619, 0.596813, 0.256631, 0.342354, 0.211741, 0.480985, 0.156707, 0.194089, 0.294124, 0.0229276, 0.152847, 0.20838, 0.735298, 0.29585, 0.580515, 0.179788, 0.961631, 0.664593, 0.228788, 0.988589, 0.740765, 0.692283, 0.633887, 0.591812, 0.631869, 0.482088, 0.182103, 0.194961, 0.932069, 0.815949, 0.97663, 0.00321936, 0.416946, 0.751696, 0.276315, 0.42461, 0.162478, 0.122763, 0.976472, 0.197001, 0.249816, 0.0988475, 0.0492697, 0.170823, 0.308457, 0.566725, 0.969683, 0.82008, 0.443133, 0.290308, 0.712629, 0.104436, 0.271146, 0.433486, 0.162777, 0.295001, 0.00706051, 0.159164, 0.982884, 0.785968, 0.563684, 0.878506, 0.996777, 0.184828, 0.860534, 0.460421, 0.405588, 0.231332, 0.454473, 0.177382, 0.69441, 0.980233, 0.0406836, 0.770267, 0.490676, 0.228413, 0.484287, 0.106554, 0.76049, 0.572014, 0.435451, 0.31732, 0.036037, 0.846877, 0.618015, 0.478935, 0.386394, 0.878349, 0.921997, 0.425029, 0.0446865, 0.86969, 0.0922333, 0.196239, 0.756767, 0.560272, 0.807042, 0.309844, 0.74002, 0.381336, 0.794198, 0.544657, 0.384193, 0.884803, 0.364934, 0.241674, 0.207013, 0.7099, 0.58936, 0.876078, 0.398796, 0.508855, 0.352048, 0.837214, 0.108648, 0.733437, 0.141866, 0.713733, 0.481324, 0.99968, 0.279308, 0.693086, 0.12665, 0.680087, 0.913595, 0.979134, 0.929132, 0.088604, 0.0657521, 0.3873, 0.514501, 0.487289, 0.97546, 0.316114, 0.19143, 0.178553, 0.350775, 0.00432832, 0.197521, 0.611761, 0.738909, 0.35769, 0.888571, 0.985438, 0.196313, 0.227811, 0.230726, 0.491513, 0.590531, 0.588628, 0.597536, 0.127964, 0.709513, 0.998737, 0.657883, 0.380948, 0.638691, 0.935765, 0.724699, 0.634965, 0.0997543, 0.0848483, 0.563561, 0.224521, 0.68957, 0.018406, 0.973272, 0.245803, 0.0521846, 0.95593, 0.838984, 0.575486, 0.689339, 0.293988, 0.172232, 0.749088, 0.105492, 0.782773, 0.982242, 0.512855, 0.666952, 0.564991, 0.426079, 0.268583, 0.200784, 0.935242, 0.0378196, 0.447407, 0.547996, 0.965037, 0.934715, 0.440276, 0.958489, 0.173333, 0.123474, 0.0802499, 0.424981, 0.494363, 0.798343, 0.566251, 0.243821, 0.264189, 0.0951665, 0.537235, 0.53466, 0.976458, 0.268952, 0.749503, 0.154052, 0.436807, 0.25866, 0.958374, 0.899719, 0.439639, 0.0587805, 0.0208674, 0.917651, 0.921434, 0.165194, 0.760968, 0.671524, 0.0727238, 0.918638, 0.648259, 0.658971, 0.584858, 0.840813, 0.847814, 0.170104, 0.91862, 0.313914, 0.335469, 0.162633, 0.645348, 0.855711, 0.104972, 0.0329765, 0.396648, 0.400214, 0.603795, 0.384633, 0.859126, 0.0937945, 0.375272, 0.872602, 0.979902, 0.785618, 0.0772138, 0.540503, 0.09794, 0.17693, 0.641151, 0.763509, 0.631704, 0.794396, 0.120896, 0.070601, 0.157818, 0.489285, 0.119313, 0.715391, 0.540251, 0.254123, 0.952205, 0.546296, 0.493261, 0.229487, 0.0423695, 0.413891, 0.619045, 0.0137289, 0.963241, 0.068086, 0.877313, 0.576835, 0.893131, 0.800452, 0.796079, 0.0524707, 0.363082, 0.933241, 0.237574, 0.997836, 0.441316, 0.79692, 0.809117, 0.967089, 0.799937, 0.398065, 0.00212683, 0.469694, 0.885177, 0.341553, 0.13186, 0.91821, 0.358324, 0.310943, 0.145477, 0.830015, 0.589568, 0.44769, 0.674439, 0.162346, 0.237803, 0.540769, 0.451391, 0.598529, 0.282071, 0.293301, 0.0604747, 0.510597, 0.73881, 0.228732, 0.0567351, 0.455859, 0.19745, 0.553058, 0.564941, 0.775575, 0.693834, 0.258503, 0.237224, 0.339459, 0.14242, 0.239592, 0.783003, 0.050519, 0.515439, 0.325062, 0.78496, 0.983896, 0.947207, 0.728585, 0.21937, 0.7966, 0.668894, 0.434569, 0.880538, 0.0181109, 0.577499, 0.379877, 0.168695, 0.123193, 0.935591, 0.106843, 0.157375, 0.413981, 0.661202, 0.418603, 0.301505, 0.12235, 0.217523, 0.291922, 0.390134, 0.534702, 0.670236, 0.890481, 0.20324, 0.206657, 0.552903, 0.392794, 0.902983, 0.962504, 0.797904, 0.635953, 0.292137, 0.180469, 0.484039, 0.39552, 0.744105, 0.913101, 0.0137772, 0.124353, 0.197006, 0.406842, 0.622996, 0.142235, 0.996856, 0.434001, 0.656566, 0.59283, 0.176573, 0.144062, 0.852539, 0.500089, 0.869166, 0.11969, 0.586362, 0.422721, 0.246049, 0.159578, 0.00497406, 0.740171, 0.0764762, 0.430388, 0.87774, 0.908127, 0.985444, 0.14588, 0.722973, 0.733219, 0.191227, 0.443189, 0.0640015, 0.517273, 0.0306847, 0.373219, 0.351447, 0.770752, 0.746587, 0.551821, 0.809159, 0.59491, 0.188787, 0.112883, 0.566536, 0.819566, 0.410144, 0.54271, 0.332758, 0.615145, 0.191363, 0.489615, 0.354987, 0.960338, 0.623565, 0.957175, 0.505025, 0.642512, 0.473598, 0.375494, 0.998481, 0.785355, 0.110431, 0.431049, 0.751209, 0.349683, 0.422449, 0.711056, 0.56415, 0.548511, 0.723139, 0.553406, 0.781189, 0.933916, 0.198305, 0.713925, 0.996284, 0.129174, 0.197931, 0.686833, 0.179496, 0.503734, 0.588643, 0.891453, 0.104466, 0.295562, 0.501657, 0.354877, 0.850596, 0.144988, 0.513696, 0.675075, 0.547987, 0.958786, 0.816923, 0.482653, 0.0852548, 0.71496, 0.112995, 0.0584912, 0.0640326, 0.443337, 0.0521254, 0.932508, 0.682979, 0.780754, 0.153132, 0.520997, 0.593065, 0.879744, 0.829637, 0.580643, 0.618082, 0.560511, 0.971666, 0.0485484, 0.734169, 0.770737, 0.93916, 0.755172, 0.105978, 0.488665, 0.199998, 0.971012, 0.230891, 0.429226, 0.469178, 0.0224203, 0.439181, 0.635167, 0.162624, 0.102113, 0.499927, 0.989104, 0.323647, 0.520266, 0.182815, 0.894651, 0.183042, 0.184294, 0.371266, 0.923951, 0.297685, 0.661793, 0.162735, 0.54869, 0.612413, 0.849253, 0.0796882, 0.93868, 0.905974, 0.201802, 0.467066, 0.015462, 0.145179, 0.895501, 0.0399703, 0.73783, 0.928015, 0.290037, 0.0127794, 0.660176, 0.768915, 0.0611624, 0.59375, 0.197471, 0.993766, 0.668543, 0.561358, 0.705228, 0.193655, 0.340477, 0.175667, 0.411412, 0.495274, 0.20456, 0.439602, 0.429848, 0.789995, 0.668581, 0.140415, 0.755966, 0.878249, 0.0220638, 0.623944, 0.79379, 0.950259, 0.253693, 0.434523, 0.950525, 0.149035, 0.700093, 0.780686, 0.744102, 0.173265, 0.627226, 0.549647, 0.0228237, 0.473883, 0.00742775, 0.0821427, 0.379757, 0.344386, 0.736901, 0.339416, 0.262134, 0.38348, 0.101302, 0.152379, 0.0496804, 0.152217, 0.249193, 0.363546, 0.962433, 0.692226, 0.23119, 0.268991, 0.250231, 0.0950519, 0.351426, 0.646594, 0.341355, 0.997766, 0.510319, 0.72302, 0.552213, 0.207419, 0.750046, 0.70985, 0.420369, 0.985432, 0.909247, 0.205197, 0.478539, 0.512852, 0.672696, 0.971522, 0.556539, 0.780706, 0.889015, 0.544833, 0.498118, 0.272023, 0.018144, 0.746029, 0.083547, 0.980626, 0.699201, 0.461987, 0.624705, 0.563836, 0.708676, 0.492007, 0.687247, 0.993667, 0.446995, 0.197628, 0.599036, 0.0752622, 0.529617, 0.191455, 0.00515825, 0.127586, 0.312099, 0.223878, 0.00231979, 0.129675, 0.0727625, 0.909658, 0.524597, 0.937275, 0.827607, 0.736921, 0.494255, 0.291899, 0.453238, 0.396438, 0.0703756, 0.172434, 0.939525, 0.790865, 0.0328456, 0.588702, 0.217977, 0.412422, 0.460639, 0.465881, 0.0243425, 0.00895228, 0.0425363, 0.992284, 0.447183, 0.216484, 0.122755, 0.617058, 0.0431797, 0.692331, 0.818024, 0.825901, 0.88796, 0.0218601, 0.86947, 0.484876, 0.199033, 0.20534, 0.861005, 0.626784, 0.959516, 0.179813, 0.275457, 0.479256, 0.632075, 0.293118, 0.516963, 0.995624, 0.269653, 0.510676, 0.806218, 0.839128, 0.0744236, 0.00414232, 0.0808256, 0.290425, 0.850942, 0.251608, 0.432648, 0.254348, 0.729562, 0.130449, 0.0496187, 0.118903, 0.445543, 0.575739, 0.299474, 0.887744, 0.177957, 0.931709, 0.688859, 0.678725, 0.0825151, 0.561248, 0.0944602, 0.101246, 0.881836, 0.641342, 0.759043, 0.661163, 0.729505, 0.00116935, 0.951158, 0.651216, 0.582024, 0.183692, 0.0574782, 0.719584, 0.920926, 0.894845, 0.0250875, 0.236701, 0.0276926, 0.757403, 0.359298, 0.903896, 0.857549, 0.391836, 0.860684, 0.385672, 0.866655, 0.0525078, 0.199139, 0.734153, 0.113567, 0.962971, 0.696906, 0.109213, 0.732915, 0.519414, 0.254859, 0.563371, 0.802798, 0.703578, 0.219371, 0.326014, 0.51395, 0.0520437, 0.808046, 0.157353, 0.717751, 0.106514, 0.63996, 0.100496, 0.806815, 0.535318, 0.949571, 0.315457, 0.718858, 0.0144244, 0.493562, 0.679857, 0.537302, 0.257917, 0.248231, 0.970416, 0.215747, 0.260754, 0.705911, 0.337085, 0.0810561, 0.131216, 0.0964396, 0.0581134, 0.0652655, 0.367965, 0.570519, 0.543193, 0.485556, 0.877042, 0.68437, 0.0970229, 0.839031, 0.687555, 0.421574, 0.0072242, 0.112718, 0.376165, 0.256039, 0.905163, 0.481015, 0.976688, 0.44672, 0.505033, 0.827824, 0.871342, 0.612695, 0.368103, 0.316908, 0.0808598, 0.949908, 0.746749, 0.193154, 0.203643, 0.482257, 0.355387, 0.839368, 0.38212, 0.693222, 0.121367, 0.856845, 0.596884, 0.142164, 0.904448, 0.891239, 0.552777, 0.621616, 0.821323, 0.246115, 0.446838, 0.507474, 0.88037, 0.979186, 0.63586, 0.4092, 0.563701, 0.672052, 0.220747, 0.802606, 0.912292, 0.479805, 0.81934, 0.395447, 0.820209, 0.0807894, 0.0823812, 0.838224, 0.122338, 0.419926, 0.366944, 0.976147, 0.308975, 0.0502506, 0.164821, 0.508505, 0.05446, 0.417526, 0.674822, 0.451643, 0.738939, 0.907528, 0.139576, 0.815772, 0.604352, 0.335576, 0.629058, 0.739361, 0.339942, 0.654086, 0.983225, 0.800515, 0.421016, 0.242199, 0.705033, 0.667766, 0.607148, 0.883339, 0.129436, 0.960134, 0.416758, 0.782157, 0.575662, 0.310401, 0.280256, 0.0103829, 0.299996, 0.696368, 0.778568, 0.785373, 0.35414, 0.713419, 0.774064, 0.672779, 0.0335733, 0.0260403, 0.309257, 0.56561, 0.823762, 0.885718, 0.478146, 0.727463, 0.404819, 0.294524, 0.418617, 0.525707, 0.600867, 0.300731, 0.946569, 0.433926, 0.913954, 0.374384, 0.24482, 0.422946, 0.608803, 0.834739, 0.999752, 0.62248, 0.311682, 0.193417, 0.952556, 0.9488, 0.267526, 0.0338157, 0.334645, 0.70904, 0.713905, 0.845518, 0.213907, 0.321207, 0.15708, 0.65225, 0.746379, 0.192876, 0.503195, 0.77888, 0.070674, 0.316134, 0.72445, 0.695228, 0.152465, 0.545359, 0.400305, 0.498732, 0.216983, 0.484399, 0.0943073, 0.207141, 0.537247, 0.330498, 0.883461, 0.714721, 0.961275, 0.59487, 0.0636283, 0.737567, 0.6383, 0.483291, 0.247155, 0.857567, 0.631703, 0.429701, 0.454042, 0.714251, 0.979674, 0.665489, 0.53912, 0.523109, 0.0549938, 0.924437, 0.392357, 0.635091, 0.179913, 0.23256, 0.721207, 0.490126, 0.984059, 0.825843, 0.748589, 0.554943, 0.340699, 0.133856, 0.446496, 0.667434, 0.103882, 0.195872, 0.294135, 0.935846, 0.0890971, 0.544157, 0.563039, 0.46606, 0.1378, 0.654452, 0.122944, 0.073661, 0.553047, 0.32019, 0.897619, 0.564044, 0.470212, 0.491478, 0.692478, 0.059955, 0.690505, 0.585268, 0.839648, 0.487905, 0.795444, 0.855615, 0.588132, 0.493212, 0.313429, 0.301619, 0.958473, 0.705264, 0.508996, 0.709552, 0.363808, 0.236151, 0.641981, 0.379334, 0.588468, 0.920788, 0.247678, 0.724616, 0.126874, 0.411534, 0.467776, 0.176375, 0.902795, 0.257912, 0.523172, 0.12205, 0.64428, 0.690845, 0.807008, 0.400226, 0.055123, 0.691599, 0.329486, 0.490543, 0.471629, 0.946182, 0.383887, 0.624581, 0.514031, 0.356098, 0.479177, 0.270793, 0.522778, 0.541909, 0.188393, 0.770944, 0.727937, 0.586735, 0.812681, 0.00823873, 0.780915, 0.842566, 0.97061, 0.177203, 0.508702, 0.62454, 0.342503, 0.483114, 0.722538, 0.80982, 0.639306, 0.664388, 0.225568, 0.414917, 0.693165, 0.569524, 0.35426, 0.312786, 0.726347, 0.0639119, 0.476121, 0.192057, 0.543966, 0.590539, 0.231263, 0.686497, 0.98829, 0.75142, 0.564079, 0.0198914, 0.0859048, 0.452964, 0.7643, 0.932101, 0.979583, 0.186903, 0.385352, 0.517286, 0.552364, 0.903872, 0.248989, 0.197119, 0.424309, 0.969977, 0.524458, 0.471119, 0.833608, 0.980825, 0.28423, 0.212905, 0.259927, 0.709319, 0.952739, 0.400923, 0.346101, 0.557426, 0.345971, 0.696839, 0.369703, 0.74033, 0.317726, 0.810623, 0.00095053, 0.511518, 0.55232, 0.852479, 0.186896, 0.984056, 0.35133, 0.733324, 0.333542, 0.891915, 0.892631, 0.859045, 0.629717, 0.95311, 0.973292, 0.870052, 0.376442, 0.955109, 0.878706, 0.831966, 0.904439, 0.090226, 0.457511, 0.508178, 0.757862, 0.315269, 0.299051, 0.00307837, 0.0503489, 0.461593, 0.383666, 0.9412, 0.62874, 0.322915, 0.594926, 0.234609, 0.975646, 0.386876, 0.144096, 0.197405, 0.017263, 0.00194336, 0.529636, 0.490175, 0.0740788, 0.730821, 0.934975, 0.0852962, 0.961476, 0.628018, 0.171702, 0.469954, 0.360609, 0.554185, 0.470818, 0.161318, 0.890747, 0.729567, 0.839131, 0.757475, 0.885114, 0.861754, 0.684986, 0.822687, 0.190117, 0.0288109, 0.101413, 0.356411, 0.606827, 0.984595, 0.250643, 0.562617, 0.509067, 0.606134, 0.143183, 0.753764, 0.42892, 0.00569317, 0.331559, 0.228103, 0.856878, 0.139, 0.802178, 0.652949, 0.0898696, 0.349402, 0.0576169, 0.501473, 0.957921, 0.404505, 0.285333, 0.363342, 0.624577, 0.210267, 0.534573, 0.711512, 0.414286, 0.437086, 0.21548, 0.673452, 0.172285, 0.789582, 0.171501, 0.545055, 0.368777, 0.840091, 0.0795685, 0.443115, 0.937032, 0.97631, 0.833763, 0.350579, 0.612087, 0.241638, 0.604022, 0.205365, 0.0910252, 0.500496, 0.0618943, 0.306671, 0.135412, 0.879524, 0.727931, 0.311255, 0.613369, 0.553155, 0.97048, 0.286825, 0.65849, 0.328945, 0.926423, 0.222118, 0.770134, 0.00497807, 0.241586, 0.371391, 0.0499316, 0.860676, 0.388414, 0.0490442, 0.841403, 0.788269, 0.00710306, 0.591672, 0.476099, 0.863898, 0.646663, 0.26467, 0.865229, 0.878215, 0.745007, 0.18261, 0.321436, 0.0869906, 0.121174, 0.386345, 0.367444, 0.336269, 0.749858, 0.297982, 0.698849, 0.853547, 0.935968, 0.94279, 0.950681, 0.197965, 0.396266, 0.129254, 0.931324, 0.523649, 0.499535, 0.509434, 0.386011, 0.546035, 0.683498, 0.811974, 0.667707, 0.0393593, 0.255919, 0.924579, 0.871657, 0.469282, 0.042623, 0.227752, 0.805485, 0.536926, 0.982236, 0.336951, 0.155744, 0.252055, 0.0254342, 0.727967, 0.272359, 0.47154, 0.953559, 0.0391744, 0.231566, 0.79129, 0.799607, 0.0838479, 0.33954, 0.943043, 0.791206, 0.953935, 0.534319, 0.243267, 0.029661, 0.485159, 0.671489, 0.31243, 0.0255479, 0.0498638, 0.67887, 0.749428, 0.3063, 0.264987, 0.566766, 0.486102, 0.384858, 0.344514, 0.0420542, 0.740293, 0.456772, 0.555114, 0.5424, 0.343212, 0.54725, 0.634486, 0.501399, 0.788969, 0.647696, 0.711318, 0.21642, 0.552437, 0.860784, 0.483015, 0.102228, 0.0410136, 0.700498, 0.496406, 0.934693, 0.992023, 0.765688, 0.423802, 0.538972, 0.0667414, 0.390147, 0.11476, 0.598344, 0.638864, 0.0181589, 0.28639, 0.496866, 0.217171, 0.59036, 0.0793986, 0.899908, 0.830652, 0.164535, 0.614489, 0.125172, 0.702629, 0.765871, 0.0599793, 0.736008, 0.804631, 0.588334, 0.874484, 0.0219589, 0.665231, 0.021349, 0.763559, 0.271918, 0.895048, 0.360778, 0.154612, 0.756385, 0.791282, 0.761342, 0.48507, 0.333422, 0.561764, 0.337604, 0.863646, 0.569103, 0.225558, 0.959984, 0.302282, 0.482395, 0.492459, 0.546571, 0.0969414, 0.295959, 0.114857, 0.588218, 0.158047, 0.582859, 0.128798, 0.350742, 0.23493, 0.355987, 0.948633, 0.24425, 0.48679, 0.781629, 0.613991, 0.00259594, 0.832076, 0.226109, 0.353453, 0.293911, 0.53209, 0.280915, 0.312423, 0.853643, 0.735621, 0.786695, 0.558889, 0.826306, 0.824058, 0.865441, 0.691746, 0.984011, 0.953064, 0.768883, 0.075343, 0.644279, 0.606134, 0.637826, 0.96824, 0.867386, 0.662157, 0.282204, 0.854771, 0.575143, 0.760855, 0.425274, 0.526426, 0.51551, 0.173752, 0.250269, 0.0718145, 0.86036, 0.972755, 0.922556, 0.786071, 0.912873, 0.308962, 0.807096, 0.6535, 0.994705, 0.073716, 0.618295, 0.492507, 0.553491, 0.769549, 0.466125, 0.671833, 0.596665, 0.376966, 0.976303, 0.114262, 0.33294, 0.311872, 0.832442, 0.030041, 0.560114, 0.489543, 0.172893, 0.02074, 0.0487108, 0.709956, 0.714885, 0.499625, 0.505481, 0.0465617, 0.778833, 0.564333, 0.178409, 0.811857, 0.610094, 0.486218, 0.126869, 0.663186, 0.852589, 0.551538, 0.398321, 0.599403, 0.761502, 0.448972, 0.0349859, 0.381255, 0.459973, 0.357422, 0.591028, 0.551849, 0.780519, 0.399729, 0.992049, 0.610586, 0.167629, 0.913926, 0.00525993, 0.463541, 0.687116, 0.0301318, 0.998411, 0.303014, 0.389526, 0.191397, 0.0485435, 0.645825, 0.0644446, 0.545485, 0.803161, 0.527352, 0.0392867, 0.402408, 0.180068, 0.342006, 0.122349, 0.494184, 0.816004, 0.141213, 0.887642, 0.677055, 0.81867, 0.0713925, 0.294113, 0.243646, 0.0916459, 0.415963, 0.102229, 0.675766, 0.648715, 0.3249, 0.023914, 0.716525, 0.520458, 0.55829, 0.0192804, 0.792391, 0.327761, 0.00986895, 0.926089, 0.641364, 0.556168, 0.679491, 0.359651, 0.765137, 0.448557, 0.832623, 0.531892, 0.0401571, 0.820219, 0.4483, 0.399641, 0.0522186, 0.498818, 0.941872, 0.152986, 0.719836, 0.646759, 0.773646, 0.525147, 0.67968, 0.0677296, 0.414053, 0.191738, 0.137422, 0.112622, 0.36914, 0.935913, 0.118887, 0.431148, 0.79774, 0.266562, 0.29987, 0.487814, 0.632592, 0.728103, 0.788662, 0.928283, 0.680062, 0.92501, 0.572043, 0.837907, 0.377004, 0.260299, 0.419454, 0.434112, 0.812986, 0.867119, 0.906811, 0.929856, 0.252947, 0.365709, 0.176834, 0.270347, 0.821275, 0.788086, 0.746725, 0.527146, 0.773846, 0.34274, 0.972822, 0.419464, 0.873507, 0.508657, 0.930039, 0.0300526, 0.0276344, 0.742487, 0.920272, 0.667396, 0.636921, 0.356219, 0.421662, 0.97804, 0.795539, 0.691565, 0.449484, 0.927937, 0.446389, 0.31938, 0.963623, 0.977388, 0.267271, 0.981866, 0.178604, 0.927389, 0.692281, 0.398067, 0.26157, 0.275562, 0.740463, 0.0300114, 0.644454, 0.020473, 0.823367, 0.476134, 0.91428, 0.690258, 0.430381, 0.132247, 0.134522, 0.792323, 0.414874, 0.579072, 0.216163, 0.333439, 0.500119, 0.302189, 0.223479, 0.353756, 0.26351, 0.323033, 0.345635, 0.494739, 0.773011, 0.202248, 0.675009, 0.44436, 0.10568, 0.792557, 0.273839, 0.771164, 0.217629, 0.0117101, 0.447494, 0.262039, 0.0729463, 0.168122, 0.932708, 0.523333, 0.661263, 0.588571, 0.436633, 0.464305, 0.792759, 0.0922854, 0.226091, 0.457019, 0.991399, 0.297617, 0.891445, 0.674486, 0.610306, 0.458768, 0.346884, 0.859144, 0.394789, 0.505957, 0.756246, 0.927334, 0.18868, 0.446175, 0.889549, 0.810579, 0.129489, 0.935933, 0.791653, 0.800206, 0.492832, 0.247432, 0.333932, 0.865335, 0.850728, 0.0397239, 0.430762, 0.678164, 0.607935, 0.812988, 0.472954, 0.654096, 0.195102, 0.589594, 0.259284, 0.0235512, 0.816587, 0.913337, 0.626546, 0.197613, 0.960908, 0.448535, 0.826306, 0.974263, 0.237144, 0.283808, 0.76009, 0.774733, 0.521747, 0.093365, 0.56867, 0.974468, 0.82523, 0.486585, 0.0876118, 0.844382, 0.184445, 0.902508, 0.914591, 0.315663, 0.223033, 0.0797762, 0.520933, 0.80171, 0.558458, 0.19027, 0.0600096, 0.837953, 0.27773, 0.311072, 0.689664, 0.961714, 0.178571, 0.0782305, 0.255316, 0.852218, 0.284925, 0.169079, 0.247419, 0.428893, 0.509164, 0.529034, 0.0932099, 0.532579, 0.00556066, 0.661644, 0.0307554, 0.588391, 0.179421, 0.0119119, 0.514301, 0.259695, 0.626493, 0.379885, 0.511277, 0.279113, 0.353348, 0.972152, 0.166663, 0.838509, 0.422474, 0.171259, 0.400788, 0.601623, 0.470386, 0.503779, 0.62779, 0.82932, 0.895982, 0.0824422, 0.789089, 0.727913, 0.474258, 0.0497131, 0.00955813, 0.252122, 0.890049, 0.337881, 0.484709, 0.273788, 0.999112, 0.740117, 0.404148, 0.190148, 0.903226, 0.491247, 0.303718, 0.118576, 0.880404, 0.710057, 0.814665, 0.056526, 0.873991, 0.807616, 0.7606, 0.0980763, 0.479651, 0.469628, 0.721994, 0.798353, 0.189775, 0.655126, 0.36509, 0.369786, 0.315095, 0.343931, 0.983116, 0.555985, 0.225449, 0.566013, 0.125261, 0.0206297, 0.899223, 0.397869, 0.0086109, 0.528844, 0.36998, 0.419304, 0.352928, 0.398114, 0.738457, 0.229685, 0.322922, 0.991783, 0.993516, 0.00377801, 0.661103, 0.680122, 0.948009, 0.924472, 0.246157, 0.993241, 0.395537, 0.130494, 0.379061, 0.482428, 0.606916, 0.342766, 0.693158, 0.90456, 0.303971, 0.784298, 0.821562, 0.0634914, 0.851069, 0.0973823, 0.972907, 0.873059, 0.820636, 0.629946, 0.118271, 0.211263, 0.720244, 0.337837, 0.928264, 0.318676, 0.131561, 0.40809, 0.0659858, 0.79075, 0.274425, 0.156597, 0.514231, 0.366287, 0.996598, 0.681998, 0.57301, 0.165393, 0.163006, 0.53473, 0.155232, 0.802874, 0.490654, 0.442575, 0.646104, 0.0417704, 0.400137, 0.842228, 0.255275, 0.282046, 0.268781, 0.100579, 0.949506, 0.651639, 0.33989, 0.28822, 0.796744, 0.960923, 0.811584, 0.205499, 0.250308, 0.674535, 0.863348, 0.802493, 0.281164, 0.288604, 0.801861, 0.756335, 0.182745, 0.26038, 0.639078, 0.0118065, 0.771779, 0.749096, 0.756532, 0.576785, 0.699314, 0.716363, 0.387166, 0.684195, 0.227072, 0.599258, 0.374418, 0.515709, 0.0850697, 0.216767, 0.388641, 0.35353, 0.390818, 0.488494, 0.875992, 0.55097, 0.979684, 0.66537, 0.322002, 0.956732, 0.989972, 0.759803, 0.68069, 0.302221, 0.524167, 0.193841, 0.561364, 0.881131, 0.126314, 0.221103, 0.831082, 0.732482, 0.306293, 0.25238, 0.518912, 0.664075, 0.854708, 0.410863, 0.422132, 0.790214, 0.624666, 0.826667, 0.742433, 0.643664, 0.257051, 0.291887, 0.30994, 0.984902, 0.34293, 0.417699, 0.973489, 0.18253, 0.335018, 0.645121, 0.853594, 0.142092, 0.00140872, 0.438017, 0.660029, 0.0439089, 0.760549, 0.17638, 0.867179, 0.568021, 0.248953, 0.203794, 0.929354, 0.796162, 0.150492, 0.294376, 0.0365475, 0.7358, 0.580934, 0.745314, 0.621556, 0.555663, 0.947511, 0.940511, 0.595953, 0.669709, 0.0868572, 0.368872, 0.529678, 0.761891, 0.79758, 0.840227, 0.285224, 0.909886, 0.446478, 0.330743, 0.776215, 0.836051, 0.88733, 0.874798, 0.272198, 0.738723, 0.0737272, 0.918806, 0.127292, 0.524442, 0.132595, 0.787819, 0.517401, 0.642899, 0.378123, 0.841222, 0.585387, 0.621287, 0.0283291, 0.808297, 0.0812849, 0.850406, 0.460515, 0.767217, 0.322963, 0.192544, 0.716186, 0.193532, 0.484245, 0.673234, 0.847921, 0.225627, 0.848513, 0.40118, 0.849782, 0.418573, 0.347771, 0.855674, 0.0714761, 0.195996, 0.00437628, 0.352138, 0.473055, 0.70412, 0.621735, 0.178853, 0.371567, 0.653595, 0.189244, 0.351817, 0.899764, 0.235987, 0.221373, 0.458357, 0.340361, 0.684854, 0.408874, 0.777796, 0.969712, 0.85128, 0.348101, 0.801514, 0.458754, 0.713774, 0.682207, 0.416362, 0.954578, 0.0584264, 0.85257, 0.563311, 0.122204, 0.882302, 0.171371, 0.490891, 0.329193, 0.913678, 0.65997, 0.310574, 0.623693, 0.473249, 0.640358, 0.608075, 0.222066, 0.707867, 0.117805, 0.117652, 0.17752, 0.173416, 0.539982, 0.510867, 0.672772, 0.250098, 0.371021, 0.10894, 0.314708, 0.604465, 0.783892, 0.552285, 0.82579, 0.439523, 0.653172, 0.629957, 0.622902, 0.0065166, 0.841154, 0.0814163, 0.0985239, 0.593193, 0.22948, 0.531344, 0.481579, 0.767712, 0.166649, 0.841859, 0.683126, 0.867295, 0.832777, 0.973099, 0.0271803, 0.947553, 0.00621338, 0.0738052, 0.535525, 0.050823, 0.518615, 0.187707, 0.436181, 0.0177505, 0.947396, 0.319046, 0.0448127, 0.886991, 0.269284, 0.582218, 0.107443, 0.126828, 0.419803, 0.349143, 0.90281, 0.104078, 0.178948, 0.84054, 0.65621, 0.504473, 0.16552, 0.561898, 0.493207, 0.230607, 0.147987, 0.308696, 0.956418, 0.546902, 0.957827, 0.792695, 0.311232, 0.903448, 0.340009, 0.933647, 0.701222, 0.800572, 0.965257, 0.851995, 0.732689, 0.156221, 0.651537, 0.650012, 0.486839, 0.916366, 0.262605, 0.340967, 0.600215, 0.0109303, 0.884928, 0.705577, 0.662114, 0.463401, 0.305938, 0.598698, 0.156752, 0.0471922, 0.577322, 0.214467, 0.720244, 0.641074, 0.425668, 0.882058, 0.855877, 0.759533, 0.428873, 0.442939, 0.34863, 0.559511, 0.546673, 0.889679, 0.705352, 0.331606, 0.722972, 0.191987, 0.880071, 0.121, 0.229362, 0.900354, 0.160051, 0.416177, 0.746829, 0.373132, 0.907209, 0.476464, 0.981733, 0.442532, 0.783611, 0.437506, 0.388315, 0.615209, 0.107328, 0.0905414, 0.771929, 0.901086, 0.778449, 0.553645, 0.303516, 0.966506, 0.787031, 0.111003, 0.876478, 0.877366, 0.998614, 0.0275316, 0.126583, 0.333877, 0.320575, 0.695409, 0.367766, 0.0619515, 0.558233, 0.667851, 0.362001, 0.511192, 0.593832, 0.104918, 0.223187, 0.95935, 0.586756, 0.552799, 0.688142, 0.959247, 0.302578, 0.829863, 0.37575, 0.906485, 0.689028, 0.319606, 0.248993, 0.0570994, 0.725527, 0.0163315, 0.246428, 0.288259, 0.936057, 0.019056, 0.577446, 0.892901, 0.976875, 0.260174, 0.101546, 0.909216, 0.888497, 0.890482, 0.443225, 0.147186, 0.948733, 0.169857, 0.302271, 0.593198, 0.180348, 0.969808, 0.84065, 0.38528, 0.509264, 0.623012, 0.416222, 0.765992, 0.591265, 0.703121, 0.0857105, 0.129768, 0.345686, 0.911736, 0.521593, 0.583656, 0.495557, 0.524357, 0.611519, 0.0110056, 0.81857, 0.847559, 0.451589, 0.986858, 0.231394, 0.986387, 0.47108, 0.0798482, 0.0484913, 0.566229, 0.289017, 0.529059, 0.591231, 0.97866, 0.322908, 0.0274567, 0.296049, 0.974199, 0.0491088, 0.231948, 0.261758, 0.163036, 0.78452, 0.282729, 0.20247, 0.031378, 0.764975, 0.660229, 0.284101, 0.601626, 0.375652, 0.268876, 0.122623, 0.801724, 0.211906, 0.116186, 0.679825, 0.999403, 0.119206, 0.251004, 0.77768, 0.54229, 0.0891417, 0.691306, 0.0877097, 0.25466, 0.578414, 0.614318, 0.542089, 0.377539, 0.854583, 0.933576, 0.202052, 0.152883, 0.220008, 0.757031, 0.629259, 0.263746, 0.725689, 0.536366, 0.978988, 0.777539, 0.813108, 0.817181, 0.530806, 0.0939171, 0.242994, 0.874139, 0.747708, 0.807063, 0.0350361, 0.533946, 0.319516, 0.680287, 0.436791, 0.739375, 0.740547, 0.154306, 0.701016, 0.00234878, 0.939335, 0.862891, 0.593673, 0.719357, 0.181157, 0.0885877, 0.485182, 0.700532, 0.114159, 0.956177, 0.0805812, 0.302443, 0.117464, 0.783196, 0.639125, 0.753739, 0.379389, 0.502846, 0.583664, 0.0196653, 0.250722, 0.39182, 0.721552, 0.66313, 0.295055, 0.342602, 0.889733, 0.388999, 0.861722, 0.831452, 0.988328, 0.477979, 0.415305, 0.526842, 0.71775, 0.810882, 0.688081, 0.388987, 0.201506, 0.0903916, 0.66571, 0.401052, 0.655103, 0.869451, 0.980343, 0.369992, 0.164526, 0.113849, 0.420296, 0.0916802, 0.928891, 0.538109, 0.956781, 0.343222, 0.298249, 0.0215474, 0.545035, 0.830245, 0.662835, 0.104736, 0.0493058, 0.93374, 0.0414065, 0.833949, 0.138226, 0.557654, 0.594962, 0.22009, 0.722154, 0.8246, 0.0997313, 0.599946, 0.708322, 0.198689, 0.614759, 0.469621, 0.984223, 0.770478, 0.595345, 0.0143708, 0.261983, 0.103563, 0.207683, 0.996445, 0.583387, 0.87197, 0.399747, 0.15654, 0.356559, 0.839196, 0.0784418, 0.671753, 0.392062, 0.205537, 0.0189803, 0.807179, 0.495353, 0.00446501, 0.964691, 0.229715, 0.971734, 0.7399, 0.398118, 0.189112, 0.725908, 0.0376952, 0.471589, 0.734895, 0.749929, 0.456986, 0.251562, 0.695605, 0.485735, 0.911215, 0.0309049, 0.462976, 0.234532, 0.275878, 0.828262, 0.147824, 0.829732, 0.367973, 0.271491, 0.671973, 0.725092, 0.575875, 0.314053, 0.283632, 0.776727, 0.567926, 0.487624, 0.00710749, 0.4581, 0.507153, 0.438787, 0.599484, 0.0630961, 0.446721, 0.147339, 0.880934, 0.764336, 0.533937, 0.705525, 0.424653, 0.742123, 0.0696696, 0.199586, 0.495463, 0.182899, 0.864884, 0.141033, 0.713732, 0.670125, 0.39876, 0.200447, 0.935647, 0.142121, 0.823798, 0.894973, 0.0688477, 0.444488, 0.237844, 0.652158, 0.0583972, 0.672934, 0.0409217, 0.449197, 0.998904, 0.017062, 0.527327, 0.594482, 0.919851, 0.642763, 0.0166157, 0.146743, 0.202536, 0.710072, 0.220063, 0.42262, 0.401871, 0.442955, 0.0388413, 0.825911, 0.455906, 0.645273, 0.552892, 0.421103, 0.255049, 0.211356, 0.0938391, 0.370987, 0.923417, 0.411729, 0.215158, 0.660066, 0.518895, 0.116835, 0.258864, 0.502941, 0.547548, 0.534453, 0.621126, 0.135573, 0.674448, 0.425275, 0.899835, 0.438131, 0.711096, 0.616808, 0.376458, 0.379622, 0.00695235, 0.652446, 0.864579, 0.82628, 0.069206, 0.261676, 0.148714, 0.232246, 0.795325, 0.213598, 0.575413, 0.0716856, 0.649749, 0.359545, 0.548619, 0.190285, 0.290353, 0.285725, 0.531256, 0.748506, 0.612982, 0.166108, 0.804547, 0.0111855, 0.930841, 0.5736, 0.951222, 0.990194, 0.149537, 0.857895, 0.866818, 0.48156, 0.697083, 0.892048, 0.106432, 0.282388, 0.106496, 0.644213, 0.0825565, 0.99033, 0.74448, 0.912348, 0.513386, 0.618006, 0.0383737, 0.629989, 0.319011, 0.0181079, 0.986454, 0.349811, 0.314267, 0.240873, 0.167084, 0.290668, 0.660774, 0.918395, 0.257655, 0.7115, 0.440883, 0.0940582, 0.212359, 0.338975, 0.533599, 0.62604, 0.165259, 0.864207, 0.664372, 0.441045, 0.633473, 0.430536, 0.293942, 0.467641, 0.330037, 0.211291, 0.855025, 0.724142, 0.746025, 0.381033, 0.145113, 0.661244, 0.00047397, 0.318483, 0.125864, 0.154241, 0.951409, 0.296145, 0.144645, 0.916007, 0.45091, 0.329749, 0.68544, 0.271914, 0.968292, 0.538024, 0.362295, 0.226466, 0.720538, 0.446668, 0.55843, 0.984257, 0.57587, 0.435172, 0.465055, 0.974807, 0.879132, 0.606958, 0.272897, 0.928581, 0.163117, 0.338888, 0.187483, 0.777955, 0.0344172, 0.847382, 0.0359137, 0.82887, 0.0367074, 0.985197, 0.761066, 0.763216, 0.307284, 0.436844, 0.348138, 0.655239, 0.949047, 0.489637, 0.187679, 0.562968, 0.992744, 0.527728, 0.596024, 0.742456, 0.803745, 0.56077, 0.64699, 0.512688, 0.804106, 0.528222, 0.78619, 0.046336, 0.858301, 0.221091, 0.0275115, 0.614239, 0.584934, 0.894209, 0.433158, 0.397711, 0.54934, 0.238469, 0.756508, 0.461006, 0.536856, 0.61681, 0.904252, 0.957949, 0.00946035, 0.98205, 0.281272, 0.749333, 0.207199, 0.33586, 0.5621, 0.494833, 0.0953187, 0.264148, 0.978974, 0.624195, 0.422894, 0.574342, 0.586763, 0.407432, 0.671367, 0.691812, 0.0354768, 0.544334, 0.53725, 0.0523175, 0.931544, 0.696832, 0.535487, 0.562655, 0.978433, 0.9759, 0.6137, 0.88824, 0.634806, 0.91389, 0.802327, 0.788886, 0.29587, 0.359461, 0.330227, 0.192235, 0.33422, 0.166573, 0.410696, 0.173734, 0.914069, 0.295933, 0.119086, 0.408076, 0.538984, 0.142058, 0.00935989, 0.315463, 0.342517, 0.674444, 0.228584, 0.153145, 0.126419, 0.0146576, 0.543831, 0.399264, 0.266148, 0.897295, 0.42099, 0.45644, 0.951078, 0.933947, 0.152281, 0.893732, 0.0116767, 0.38166, 0.5368, 0.093994, 0.363905, 0.302806, 0.186361, 0.985696, 0.314611, 0.209669, 0.392243, 0.111831, 0.918114, 0.601192, 0.889127, 0.0664733, 0.877663, 0.237696, 0.48902, 0.00822174, 0.95089, 0.287086, 0.587423, 0.605854, 0.321565, 0.997006, 0.313941, 0.326961, 0.607774, 0.336284, 0.663275, 0.745426, 0.921719, 0.364348, 0.777875, 0.393044, 0.10163, 0.00308402, 0.0668498, 0.279901, 0.65965, 0.642138, 0.747309, 0.328212, 0.623957, 0.398941, 0.207494, 0.841148, 0.612283, 0.223398, 0.73807, 0.529274, 0.265768, 0.958483, 0.286796, 0.53327, 0.0956185, 0.52155, 0.912272, 0.812075, 0.76738, 0.724869, 0.0703279, 0.571684, 0.576001, 0.558419, 0.535236, 0.510872, 0.995674, 0.879704, 0.166254, 0.842393, 0.37984, 0.923193, 0.956935, 0.0477059, 0.0734965, 0.874884, 0.849077, 0.207027, 0.124655, 0.309133, 0.6932, 0.546135, 0.8569, 0.563165, 0.73295, 0.385436, 0.857465, 0.891117, 0.938905, 0.13618, 0.632439, 0.684117, 0.501954, 0.0890991, 0.288565, 0.17376, 0.0915308, 0.653107, 0.845979, 0.789127, 0.439814, 0.22862, 0.806013, 0.233774, 0.943114, 0.954302, 0.933807, 0.657556, 0.164121, 0.624513, 0.782399, 0.166331, 0.425038, 0.112441, 0.055798, 0.480522, 0.108632, 0.212424, 0.109991, 0.972284, 0.766122, 0.798159, 0.280962, 0.403135, 0.73327, 0.600336, 0.610574, 0.540811, 0.895212, 0.162785, 0.76752, 0.397826, 0.746574, 0.65947, 0.0890833, 0.174131, 0.348146, 0.89503, 0.787937, 0.829608, 0.263003, 0.521369, 0.879155, 0.269169, 0.142506, 0.0333792, 0.704816, 0.721753, 0.795799, 0.399332, 0.0642958, 0.838391, 0.203988, 0.703254, 0.578773, 0.260028, 0.244866, 0.0521185, 0.0643188, 0.902621, 0.333046, 0.34006, 0.763157, 0.438156, 0.995069, 0.503676, 0.0424489, 0.954542, 0.0501422, 0.72261, 0.180772, 0.405272, 0.905464, 0.397728, 0.481824, 0.219146, 0.18593, 0.361789, 0.540865, 0.212726, 0.0145311, 0.996108, 0.949028, 0.0313359, 0.892986, 0.209844, 0.0204217, 0.143729, 0.149781, 0.80839, 0.601906, 0.314669, 0.644023, 0.318997, 0.290432, 0.292971, 0.279199, 0.246305, 0.302936, 0.588377, 0.427576, 0.422404, 0.640749, 0.590413, 0.797506, 0.391294, 0.845834, 0.737661, 0.455596, 0.996297, 0.935487, 0.888209, 0.196764, 0.214778, 0.665268, 0.13161, 0.279638, 0.435887, 0.395731, 0.542431, 0.181147, 0.143058, 0.344884, 0.341706, 0.420025, 0.95141, 0.199822, 0.649671, 0.977817, 0.719646, 0.873828, 0.214876, 0.217572, 0.904443, 0.804816, 0.0704537, 0.332569, 0.244298, 0.30215, 0.0706434, 0.782896, 0.100429, 0.986729, 0.104373, 0.221871, 0.686011, 0.903187, 0.658629, 0.00593545, 0.66306, 0.510265, 0.824122, 0.66244, 0.845467, 0.979222, 0.107885, 0.158799, 0.903068, 0.939234, 0.412399, 0.324726, 0.16968, 0.910127, 0.107273, 0.316888, 0.835599, 0.0330597, 0.573721, 0.86464, 0.717939, 0.649407, 0.89027, 0.806881, 0.663934, 0.571069, 0.949662, 0.857635, 0.0942166, 0.656745, 0.316127, 0.137846, 0.358969, 0.747496, 0.427922, 0.584548, 0.43169, 0.454915, 0.356048, 0.00170449, 0.26271, 0.0553734, 0.712526, 0.699642, 0.626845, 0.216316, 0.59791, 0.914564, 0.73967, 0.0456612, 0.0303278, 0.619759, 0.93395, 0.0345666, 0.34491, 0.161455, 0.433299, 0.898209, 0.591036, 0.0803389, 0.331712, 0.921816, 0.856827, 0.236875, 0.618282, 0.893639, 0.698095, 0.228753, 0.946864, 0.45136, 0.298134, 0.207007, 0.0854858, 0.119187, 0.635895, 0.52909, 0.69962, 0.127592, 0.15653, 0.23986, 0.733369, 0.544024, 0.0871906, 0.494791, 0.802796, 0.657784, 0.229278, 0.667806, 0.661302, 0.796142, 0.904963, 0.366172, 0.545317, 0.669708, 0.810318, 0.459827, 0.486483, 0.74267, 0.624436, 0.818295, 0.371175, 0.497552, 0.521131, 0.984321, 0.125459, 0.422566, 0.176894, 0.597355, 0.387584, 0.390039, 0.676318, 0.76024, 0.682459, 0.887529, 0.982262, 0.546939, 0.666575, 0.121634, 0.128986, 0.848621, 0.563456, 0.610312, 0.655875, 0.10287, 0.271705, 0.709657, 0.360677, 0.166403, 0.944628, 0.479802, 0.410072, 0.350689, 0.387629, 0.32527, 0.204866, 0.32971, 0.323529, 0.0405677, 0.715867, 0.373414, 0.0464384, 0.41172, 0.453869, 0.764397, 0.907818, 0.0541775, 0.399667, 0.238141, 0.697363, 0.459104, 0.250045, 0.838594, 0.50154, 0.573339, 0.159081, 0.844144, 0.0314762, 0.454688, 0.785239, 0.801135, 0.639101, 0.373761, 0.48864, 0.649401, 0.786162, 0.124549, 0.817864, 0.819934, 0.827829, 0.49073, 0.233411, 0.0887261, 0.494884, 0.578041, 0.633325, 0.430376, 0.480944, 0.530314, 0.969412, 0.826959, 0.884511, 0.820284, 0.233308, 0.999118, 0.0519965, 0.825901, 0.708105, 0.734174, 0.448959, 0.195161, 0.804685, 0.680453, 0.506802, 0.246122, 0.9991, 0.281302, 0.14217, 0.358793, 0.729983, 0.614269, 0.799955, 0.877031, 0.305935, 0.780001, 0.713362, 0.920822, 0.264835, 0.710924, 0.990316, 0.842847, 0.852389, 0.972967, 0.76547, 0.10173, 0.48786, 0.0459325, 0.255809, 0.956663, 0.180142, 0.998191, 0.997955, 0.185299, 0.602267, 0.491361, 0.875398, 0.0658466, 0.567658, 0.997795, 0.591734, 0.249337, 0.552591, 0.647966, 0.805961, 0.851738, 0.377059, 0.530383, 0.653499, 0.107055, 0.487349, 0.676637, 0.829513, 0.649088, 0.626673, 0.922129, 0.771083, 0.249981, 0.459453, 0.26202, 0.249224, 0.692789, 0.934312, 0.7581, 0.334683, 0.925708, 0.0535473, 0.549126, 0.174379, 0.890911, 0.469886, 0.0168745, 0.225596, 0.490898, 0.665503, 0.56331, 0.785783, 0.406198, 0.648971, 0.556845, 0.968865, 0.584668, 0.0468998, 0.303567, 0.1311, 0.638951, 0.717894, 0.915575, 0.173391, 0.671815, 0.366176, 0.741083, 0.560976, 0.528959, 0.503424, 0.313394, 0.317896, 0.122852, 0.790566, 0.588063, 0.867979, 0.901013, 0.286144, 0.519279, 0.268692, 0.789779, 0.986381, 0.147227, 0.483949, 0.29949, 0.9893, 0.183272, 0.651737, 0.0700821, 0.513175, 0.321574, 0.533289, 0.0188718, 0.283338, 0.658904, 0.450936, 0.308419, 0.238598, 0.491723, 0.3378, 0.674554, 0.317872, 0.833756, 0.404867, 0.858322, 0.951014, 0.462002, 0.213524, 0.907227, 0.966018, 0.943972, 0.240083, 0.824311, 0.0136299, 0.0415106, 0.211277, 0.823492, 0.0955109, 0.491925, 0.489097, 0.131855, 0.576084, 0.185541, 0.367526, 0.639359, 0.10945, 0.0304542, 0.134525, 0.79432, 0.803408, 0.75494, 0.265617, 0.272427, 0.898437, 0.179157, 0.418492, 0.0192533, 0.712957, 0.414789, 0.460179, 0.354575, 0.403837, 0.799037, 0.597008, 0.965766, 0.334546, 0.166894, 0.473735, 0.464255, 0.640725, 0.569866, 0.333975, 0.219429, 0.153702, 0.753632, 0.451345, 0.43749, 0.994772, 0.956143, 0.0781375, 0.9015, 0.260371, 0.52886, 0.104433, 0.320454, 0.0454706, 0.856138, 0.426182, 0.69376, 0.766984, 0.84683, 0.490401, 0.682308, 0.0449822, 0.754187, 0.0369971, 0.911769, 0.0797669, 0.417506, 0.231764, 0.647801, 0.823268, 0.544316, 0.0549245, 0.225344, 0.793053, 0.820461, 0.931705, 0.50056, 0.459531, 0.192332, 0.799059, 0.620209, 0.0882766, 0.796886, 0.388648, 0.695499, 0.480631, 0.975325, 0.396253, 0.0500061, 0.13329, 0.765852, 0.0275982, 0.229545, 0.748629, 0.612383, 0.47132, 0.918444, 0.708658, 0.247371, 0.20317, 0.415411, 0.336757, 0.971761, 0.460052, 0.458069, 0.410259, 0.624179, 0.843347, 0.588644, 0.281138, 0.400356, 0.0439346, 0.376624, 0.92801, 0.287149, 0.0609871, 0.665996, 0.503455, 0.746663, 0.532239, 0.642501, 0.791451, 0.560526, 0.943232, 0.384689, 0.512636, 0.507541, 0.274697, 0.559578, 0.340455, 0.13262, 0.318885, 0.441004, 0.668159, 0.0839833, 0.894819, 0.141383, 0.158399, 0.78188, 0.923151, 0.0128941, 0.374007, 0.77666, 0.876198, 0.389595, 0.0871047, 0.996259, 0.845139, 0.687196, 0.121923, 0.445344, 0.963142, 0.117184, 0.909071, 0.582335, 0.0248982, 0.273772, 0.864425, 0.652238, 0.678229, 0.549301, 0.430469, 0.432778, 0.972329, 0.0430739, 0.230677, 0.378969, 0.770843, 0.571774, 0.253866, 0.232173, 0.771504, 0.379452, 0.650724, 0.885934, 0.759265, 0.685853, 0.209983, 0.991538, 0.312572, 0.124554, 0.93123, 0.465089, 0.426527, 0.364899, 0.444177, 0.538276, 0.981309, 0.509641, 0.680088, 0.688518, 0.722841, 0.0184349, 0.955936, 0.817036, 0.412555, 0.0763725, 0.523845, 0.00403837, 0.786148, 0.962713, 0.0550463, 0.960108, 0.411254, 0.537216, 0.32681, 0.10792, 0.477548, 0.262875, 0.423429, 0.898253, 0.690482, 0.871238, 0.569451, 0.48595, 0.927744, 0.389064, 0.480579, 0.610086, 0.339745, 0.967366, 0.247182, 0.629689, 0.479802, 0.289142, 0.957839, 0.290307, 0.0344763, 0.25645, 0.200464, 0.654806, 0.617545, 0.408059, 0.304512, 0.925965, 0.838382, 0.373956, 0.633223, 0.619079, 0.148073, 0.118666, 0.443222, 0.432855, 0.885924, 0.148833, 0.787847, 0.239637, 0.871192, 0.984566, 0.623271, 0.487293, 0.0352336, 0.457538, 0.928901, 0.850248, 0.431738, 0.36331, 0.645589, 0.498171, 0.794208, 0.0849365, 0.527237, 0.767714, 0.328182, 0.374523, 0.444516, 0.0559615, 0.0419167, 0.845386, 0.5857, 0.865954, 0.0309568, 0.453031, 0.00774489, 0.474677, 0.424881, 0.425867, 0.606955, 0.713672, 0.567942, 0.640633, 0.780415, 0.581821, 0.848634, 0.775869, 0.71623, 0.997316, 0.584945, 0.828567, 0.808222, 0.0395258, 0.594642, 0.764727, 0.997978, 0.24524, 0.373046, 0.176825, 0.308495, 0.766623, 0.467008, 0.606107, 0.288612, 0.231055, 0.808635, 0.472013, 0.368067, 0.696817, 0.278501, 0.101347, 0.0829357, 0.6004, 0.215608, 0.368665, 0.846323, 0.0981594, 0.313847, 0.433136, 0.689086, 0.824282, 0.727438, 0.108271, 0.748229, 0.337563, 0.425716, 0.868851, 0.180594, 0.860347, 0.978163, 0.191819, 0.936035, 0.873956, 0.588142, 0.240084, 0.282904, 0.998058, 0.115019, 0.666553, 0.573109, 0.651596, 0.736717, 0.175013, 0.603761, 0.50385, 0.607941, 0.0299177, 0.993083, 0.388408, 0.654143, 0.231389, 0.166839, 0.313691, 0.212742, 0.345719, 0.653166, 0.239532, 0.048652, 0.463448, 0.442667, 0.648099, 0.0208802, 0.614332, 0.737179, 0.167007, 0.0763713, 0.533591, 0.268109, 0.492427, 0.375391, 0.681049, 0.606853, 0.139883, 0.105637, 0.76219, 0.468941, 0.0529323, 0.828829, 0.937117, 0.433731, 0.289874, 0.892131, 0.19397, 0.0132927, 0.35071, 0.709076, 0.9679, 0.341529, 0.348603, 0.415816, 0.228108, 0.488989, 0.333788, 0.161957, 0.570714, 0.508283, 0.465879, 0.277558, 0.794945, 0.0121472, 0.427985, 0.665044, 0.833469, 0.94267, 0.33806, 0.285193, 0.723311, 0.974406, 0.695304, 0.460532, 0.675224, 0.0901268, 0.647891, 0.962078, 0.758498, 0.0277073, 0.980495, 0.884625, 0.0327249, 0.768726, 0.724125, 0.745225, 0.00158805, 0.938205, 0.606819, 0.742826, 0.255673, 0.421963, 0.371387, 0.432566, 0.0239052, 0.885388, 0.181065, 0.920133, 0.506331, 0.233921, 0.168982, 0.561517, 0.58763, 0.431242, 0.487615, 0.745375, 0.371755, 0.775379, 0.290286, 0.498143, 0.0404679, 0.00843829, 0.488149, 0.801368, 0.606526, 0.118841, 0.488715, 0.458259, 0.900423, 0.257545, 0.743129, 0.0907666, 0.986963, 0.369976, 0.607443, 0.651521, 0.900842, 0.618528, 0.100974, 0.442052, 0.61976, 0.560051, 0.24266, 0.150083, 0.436379, 0.095813, 0.079459, 0.791146, 0.850643, 0.11496, 0.0699083, 0.143467, 0.054094, 0.287862, 0.368917, 0.665334, 0.278383, 0.762243, 0.453091, 0.861052, 0.862359, 0.418895, 0.373134, 0.0971329, 0.264926, 0.75727, 0.858821, 0.427618, 0.914274, 0.467849, 0.919047, 0.172397, 0.16596, 0.326933, 0.489623, 0.439691, 0.747162, 0.803337, 0.862075, 0.27494, 0.90832, 0.0928894, 0.0353712, 0.627672, 0.297624, 0.423108, 0.926387, 0.606557, 0.590447, 0.809819, 0.79839, 0.624967, 0.974975, 0.336289, 0.424392, 0.353217, 0.555949, 0.801565, 0.237361, 0.818772, 0.767745, 0.706867, 0.786984, 0.367558, 0.697085, 0.95369, 0.782313, 0.901393, 0.802397, 0.979802, 0.53119, 0.431988, 0.473957, 0.278091, 0.577054, 0.561467, 0.278046, 0.484842, 0.798062, 0.523014, 0.887974, 0.549188, 0.264482, 0.967209, 0.439549, 0.287108, 0.489645, 0.0468009, 0.576898, 0.975522, 0.720684, 0.754914, 0.227268, 0.352387, 0.928097, 0.0313694, 0.258029, 0.241354, 0.0709821, 0.954653, 0.0894702, 0.356928, 0.676296, 0.919929, 0.793819, 0.761619, 0.742586, 0.677755, 0.631662, 0.690294, 0.528917, 0.844844, 0.386891, 0.451643, 0.894426, 0.152653, 0.988929, 0.4543, 0.766829, 0.82622, 0.360857, 0.88979, 0.438321, 0.147023, 0.0806725, 0.995685, 0.463487, 0.953199, 0.289579, 0.808851, 0.146665, 0.745513, 0.231612, 0.838304, 0.10183, 0.286207, 0.882168, 0.532409, 0.477905, 0.540575, 0.873101, 0.190819, 0.497637, 0.655335, 0.32515, 0.904345, 0.330202, 0.743194, 0.143097, 0.0565959, 0.044624, 0.519861, 0.108242, 0.282962, 0.533308, 0.237235, 0.289047, 0.0643234, 0.0227788, 0.75774, 0.990309, 0.354389, 0.394248, 0.485096, 0.0518331, 0.175479, 0.0172035, 0.918304, 0.83998, 0.294807, 0.139766, 0.63909, 0.766865, 0.331524, 0.178873, 0.858263, 0.104528, 0.506799, 0.25516, 0.0557155, 0.0896441, 0.476819, 0.0679275, 0.523957, 0.623747, 0.101086, 0.707701, 0.347366, 0.40146, 0.926623, 0.793363, 0.5183, 0.353883, 0.0170278, 0.550552, 0.566898, 0.761358, 0.969432, 0.0830791, 0.409803, 0.233851, 0.461094, 0.61238, 0.663356, 0.838447, 0.0936641, 0.896317, 0.0342364, 0.605435, 0.594078, 0.142495, 0.224693, 0.266037, 0.968376, 0.250094, 0.634636, 0.152495, 0.374344, 0.766249, 0.514728, 0.236246, 0.394472, 0.714031, 0.394143, 0.298254, 0.706938, 0.16714, 0.651014, 0.933411, 0.678186, 0.666454, 0.357514, 0.694775, 0.693118, 0.67172, 0.198115, 0.332186, 0.629232, 0.784186, 0.37814, 0.771727, 0.447776, 0.705953, 0.496304, 0.814896, 0.656693, 0.53542, 0.261516, 0.406417, 0.531009, 0.0275669, 0.448455, 0.680984, 0.772053, 0.665164, 0.644186, 0.270284, 0.949715, 0.977336, 0.2282, 0.113808, 0.547055, 0.59781, 0.00346764, 0.659073, 0.967426, 0.723033, 0.570098, 0.959023, 0.758847, 0.0443661, 0.329846, 0.180227, 0.640039, 0.418957, 0.31487, 0.171225, 0.359142, 0.663463, 0.0361466, 0.466258, 0.355709, 0.0728117, 0.0561539, 0.302612, 0.966088, 0.312772, 0.572468, 0.950502, 0.140537, 0.804575, 0.143191, 0.554696, 0.31913, 0.313319, 0.415355, 0.557689, 0.868706, 0.568624, 0.322493, 0.960727, 0.800806, 0.97337, 0.636029, 0.854868, 0.0316508, 0.537545, 0.62288, 0.126693, 0.638876, 0.22898, 0.88884, 0.322239, 0.408508, 0.876548, 0.283979, 0.229775, 0.197694, 0.756162, 0.605596, 0.475759, 0.716177, 0.313979, 0.824405, 0.431531, 0.93625, 0.29437, 0.57963, 0.942052, 0.0665976, 0.532284, 0.782766, 0.899503, 0.601734, 0.974714, 0.453663, 0.399994, 0.601611, 0.0351596, 0.350032, 0.950207, 0.964415, 0.958216, 0.639434, 0.420463, 0.185442, 0.382668, 0.504688, 0.523907, 0.402958, 0.790744, 0.411467, 0.731239, 0.899588, 0.0667593, 0.528711, 0.85476, 0.116003, 0.922713, 0.609319, 0.224816, 0.243179, 0.699852, 0.437211, 0.993828, 0.434946, 0.0437369, 0.40611, 0.188708, 0.147626, 0.956368, 0.375801, 0.739141, 0.0106513, 0.877191, 0.48731, 0.898436, 0.833442, 0.862006, 0.301964, 0.148368, 0.151504, 0.407424, 0.449227, 0.149134, 0.118857, 0.88912, 0.176878, 0.699557, 0.496226, 0.442326, 0.930008, 0.922029, 0.961842, 0.763233, 0.817566, 0.59639, 0.0646593, 0.537041, 0.860439, 0.801642, 0.030929, 0.190828, 0.41728, 0.622681, 0.632888, 0.59156, 0.769584, 0.37493, 0.17385, 0.0626788, 0.304964, 0.374021, 0.53998, 0.688026, 0.732396, 0.682658, 0.778569, 0.658455, 0.541922, 0.944647, 0.193992, 0.347534, 0.954737, 0.226359, 0.606846, 0.259203, 0.525089, 0.554629, 0.911144, 0.761243, 0.814348, 0.641373, 0.946439, 0.316011, 0.114762, 0.534172, 0.192506, 0.241857, 0.392134, 0.60636, 0.854794, 0.805252, 0.409614, 0.612887, 0.123575, 0.839406, 0.302633, 0.649842, 0.0356233, 0.742045, 0.413918, 0.745436, 0.459173, 0.297346, 0.169182, 0.793266, 0.0856126, 0.935399, 0.235959, 0.10074, 0.654248, 0.0952724, 0.663783, 0.996306, 0.709982, 0.905973, 0.888761, 0.720037, 0.25771, 0.848951, 0.976113, 0.63487, 0.565978, 0.908386, 0.273005, 0.848013, 0.978949, 0.908749, 0.601055, 0.186663, 0.992795, 0.425448, 0.967182, 0.602136, 0.643628, 0.0584813, 0.460012, 0.448426, 0.171141, 0.556465, 0.580555, 0.872362, 0.449653, 0.156139, 0.212328, 0.998929, 0.847401, 0.504401, 0.516766, 0.571697, 0.217583, 0.222284, 0.213078, 0.647338, 0.673887, 0.426729, 0.308536, 0.755475, 0.114233, 0.132511, 0.801619, 0.337967, 0.596502, 0.243033, 0.0515908, 0.982832, 0.349651, 0.427672, 0.97232, 0.259496, 0.291335, 0.570966, 0.664706, 0.301763, 0.633364, 0.54933, 0.877609, 0.125295, 0.984686, 0.28117, 0.886473, 0.218579, 0.323286, 0.287905, 0.880335, 0.559238, 0.726381, 0.238366, 0.41792, 0.995409, 0.322266, 0.0953773, 0.446585, 0.0716302, 0.721718, 0.746432, 0.961932, 0.112667, 0.757459, 0.654223, 0.848422, 0.697044, 0.423866, 0.929048, 0.111573, 0.582656, 0.0200577, 0.44937, 0.553144, 0.74263, 0.794523, 0.830143, 0.740351, 0.741544, 0.356784, 0.963254, 0.15918, 0.939537, 0.80688, 0.237186, 0.190782, 0.558923, 0.615213, 0.293705, 0.904548, 0.79284, 0.185429, 0.00810836, 0.721038, 0.564903, 0.268942, 0.257018, 0.0530519, 0.527247, 0.920222, 0.820124, 0.111895, 0.109811, 0.587158, 0.429519, 0.591644, 0.254192, 0.350801, 0.299972, 0.882022, 0.817647, 0.633054, 0.606266, 0.990079, 0.139097, 0.724972, 0.0772518, 0.67211, 0.700957, 0.202332, 0.481672, 0.90614, 0.310924, 0.19062, 0.0712553, 0.00612124, 0.782944, 0.0744189, 0.0263658, 0.305614, 0.248776, 0.0458998, 0.260601, 0.555472, 0.221336, 0.49202, 0.110911, 0.618794, 0.414334, 0.752921, 0.870457, 0.343256, 0.218376, 0.154645, 0.757857, 0.652319, 0.890323, 0.629723, 0.101674, 0.783192, 0.0596106, 0.144572, 0.474774, 0.174112, 0.195932, 0.493227, 0.112646, 0.996225, 0.134407, 0.881545, 0.450861, 0.310567, 0.509461, 0.521029, 0.269803, 0.897753, 0.0652435, 0.791288, 0.408702, 0.77576, 0.4061, 0.034406, 0.302217, 0.270236, 0.168705, 0.992101, 0.521785, 0.662134, 0.308386, 0.196541, 0.663632, 0.98909, 0.146276, 0.0388228, 0.705712, 0.244185, 0.845867, 0.113058, 0.0871161, 0.212426, 0.379537, 0.207442, 0.889858, 0.125029, 0.596035, 0.0982994, 0.452979, 0.313197, 0.208453, 0.313032, 0.181714, 0.725375, 0.158508, 0.79157, 0.0516266, 0.233432, 0.884625, 0.536409, 0.56068, 0.00272319, 0.409232, 0.628506, 0.806752, 0.458667, 0.292, 0.469911, 0.616007, 0.0677015, 0.479823, 0.237522, 0.375919, 0.311802, 0.41963, 0.361113, 0.739867, 0.799933, 0.920616, 0.868819, 0.0492135, 0.805827, 0.348683, 0.735856, 0.439831, 0.48462, 0.648015, 0.52143, 0.919378, 0.641207, 0.362818, 0.585762, 0.655237, 0.825461, 0.45015, 0.102988, 0.989794, 0.264378, 0.935502, 0.861179, 0.704196, 0.414725, 0.944714, 0.172455, 0.303253, 0.816398, 0.279285, 0.905868, 0.659504, 0.811797, 0.745049, 0.5312, 0.853933, 0.334082, 0.541627, 0.242702, 0.347521, 0.889458, 0.0294332, 0.984726, 0.996691, 0.67806, 0.496936, 0.548464, 0.973525, 0.839863, 0.0487781, 0.111969, 0.466638, 0.886713, 0.965632, 0.976538, 0.292486, 0.660092, 0.255806, 0.0997183, 0.0844828, 0.432525, 0.930895, 0.808542, 0.374321, 0.418907, 0.145023, 0.238337, 0.621779, 0.647911, 0.408323, 0.110784, 0.680445, 0.167629, 0.633968, 0.38811, 0.445281, 0.0107516, 0.489654, 0.885342, 0.587052, 0.0225841, 0.976548, 0.129556, 0.56597, 0.102489, 0.432207, 0.743853, 0.120321, 0.494494, 0.225072, 0.224017, 0.395315, 0.257046, 0.387203, 0.423059, 0.554966, 0.737481, 0.377376, 0.489935, 0.173226, 0.861628, 0.0333025, 0.0932294, 0.00671683, 0.827265, 0.663642, 0.540409, 0.99617, 0.725174, 0.190944, 0.728644, 0.579892, 0.748321, 0.617871, 0.130579, 0.245324, 0.203039, 0.821786, 0.0717586, 0.704972, 0.966078, 0.898646, 0.398553, 0.973942, 0.461521, 0.0644376, 0.30631, 0.383316, 0.891258, 0.259913, 0.169668, 0.714982, 0.615635, 0.559467, 0.616616, 0.770833, 0.169926, 0.388598, 0.208993, 0.278535, 0.554226, 0.0827589, 0.706816, 0.126392, 0.690821, 0.676062, 0.121265, 0.896982, 0.0388845, 0.670777, 0.708964, 0.994758, 0.341129, 0.37529, 0.101514, 0.153307, 0.842892, 0.477475, 0.0283206, 0.947715, 0.905052, 0.0397956, 0.738682, 0.0172491, 0.398329, 0.207674, 0.819838, 0.503421, 0.781826, 0.135193, 0.294824, 0.816439, 0.763956, 0.720865, 0.834563, 0.162494, 0.301494, 0.10709, 0.370368, 0.927791, 0.500762, 0.14252, 0.741825, 0.278368, 0.0206278, 0.182151, 0.971885, 0.137581, 0.483364, 0.384977, 0.696589, 0.340396, 0.745152, 0.802406, 0.203352, 0.919363, 0.753524, 0.372803, 0.449986, 0.696786, 0.535036, 0.499219, 0.0314585, 0.647437, 0.727908, 0.921411, 0.32227, 0.957174, 0.272344, 0.282078, 0.445171, 0.144022, 0.299585, 0.813071, 0.678695, 0.567202, 0.812063, 0.405263, 0.9, 0.992405, 0.396305, 0.92526, 0.276566, 0.546944, 0.75437, 0.531069, 0.586935, 0.916611, 0.137257, 0.828234, 0.0554768, 0.450604, 0.998812, 0.294749, 0.163121, 0.282978, 0.334924, 0.259305, 0.15947, 0.0787569, 0.754918, 0.886703, 0.0111505, 0.906784, 0.050589, 0.22299, 0.066966, 0.374187, 0.169991, 0.0816437, 0.170185, 0.882675, 0.270558, 0.0356211, 0.376503, 0.715536, 0.350391, 0.162741, 0.288581, 0.635397, 0.566589, 0.534105, 0.475434, 0.178913, 0.928208, 0.252494, 0.615583, 0.251827, 0.19447, 0.961938, 0.305609, 0.593316, 0.847266, 0.744115, 0.612543, 0.24277, 0.20847, 0.477976, 0.0518278, 0.0152872, 0.631431, 0.0549163, 0.328825, 0.59685, 0.558903, 0.683194, 0.255128, 0.115275, 0.952675, 0.264164, 0.193556, 0.246357, 0.198612, 0.681457, 0.270092, 0.714036, 0.817953, 0.636855, 0.283205, 0.131918, 0.841106, 0.123482, 0.028812, 0.995828, 0.384277, 0.843682, 0.764375, 0.567424, 0.941897, 0.951325, 0.18631, 0.667919, 0.738847, 0.62275, 0.964064, 0.995357, 0.425137, 0.906932, 0.688342, 0.0471274, 0.164173, 0.310808, 0.0994296, 0.318221, 0.293541, 0.00581898, 0.87577, 0.906742, 0.498474, 0.178821, 0.302167, 0.0999429, 0.135979, 0.0169123, 0.183307, 0.073467, 0.511984, 0.389584, 0.422023, 0.548747, 0.496203, 0.820108, 0.837884, 0.0116966, 0.75667, 0.306426, 0.446261, 0.740293, 0.0617691, 0.731056, 0.921502, 0.111565, 0.553342, 0.90181, 0.588966, 0.899782, 0.838532, 0.548925, 0.626024, 0.59153, 0.869307, 0.331242, 0.0256317, 0.757944, 0.374959, 0.695617, 0.340961, 0.834587, 0.998895, 0.879709, 0.437381, 0.99448, 0.735159, 0.659503, 0.5298, 0.93604, 0.186548, 0.0753599, 0.32131, 0.252669, 0.101402, 0.651554, 0.885193, 0.0628296, 0.919042, 0.110233, 0.70135, 0.309103, 0.884887, 0.243368, 0.720378, 0.0672109, 0.523772, 0.270488, 0.307305, 0.263874, 0.70498, 0.464827, 0.695951, 0.914524, 0.0221541, 0.287136, 0.3114, 0.389098, 0.385562, 0.975716, 0.170828, 0.217957, 0.927547, 0.910391, 0.653454, 0.0758241, 0.820644, 0.0665078, 0.110731, 0.882233, 0.20648, 0.952657, 0.32112, 0.952035, 0.224238, 0.927685, 0.680405, 0.139316, 0.378105, 0.381615, 0.594834, 0.471939, 0.408969, 0.491383, 0.454735, 0.768353, 0.353205, 0.80698, 0.251159, 0.0118578, 0.266174, 0.961116, 0.670582, 0.62263, 0.503359, 0.857653, 0.182818, 0.4892, 0.803616, 0.218005, 0.743287, 0.0763764, 0.908062, 0.328088, 0.842997, 0.59184, 0.559818, 0.0614147, 0.159875, 0.489821, 0.744524, 0.82447, 0.809519, 0.875425, 0.67424, 0.938252, 0.527641, 0.235542, 0.700584, 0.430199, 0.964745, 0.731701, 0.342589, 0.0273191, 0.0354802, 0.294863, 0.263651, 0.030499, 0.501893, 0.769643, 0.211326, 0.584002, 0.80953, 0.341599, 0.308817, 0.438602, 0.993219, 0.346702, 0.457499, 0.414148, 0.882384, 0.855597, 0.0641278, 0.42825, 0.273324, 0.4623, 0.668859, 0.63831, 0.857007, 0.845658, 0.146669, 0.211893, 0.502162, 0.799475, 0.65045, 0.305315, 0.274964, 0.182841, 0.954136, 0.463376, 0.834691, 0.0184708, 0.398692, 0.864279, 0.953113, 0.890904, 0.333008, 0.215403, 0.375784, 0.95605, 0.383674, 0.378765, 0.405168, 0.171666, 0.545179, 0.669923, 0.384985, 0.801676, 0.350565, 0.00337708, 0.631043, 0.0519745, 0.71121, 0.642583, 0.145279, 0.712888, 0.284956, 0.0921465, 0.577798, 0.34649, 0.937083, 0.929081, 0.612238, 0.490393, 0.0424106, 0.599653, 0.707806, 0.804232, 0.965361, 0.327246, 0.666762, 0.021784, 0.682931, 0.681838, 0.0724264, 0.205187, 0.668736, 0.473539, 0.961394, 0.729708, 0.024514, 0.566873, 0.497288, 0.558562, 0.138497, 0.941736, 0.972125, 0.111753, 0.288791, 0.912262, 0.559286, 0.185214, 0.7715, 0.989913, 0.376245, 0.060584, 0.154194, 0.978276, 0.882586, 0.0247684, 0.738887, 0.630539, 0.190555, 0.525523, 0.83807, 0.291769, 0.91042, 0.282253, 0.921474, 0.0236699, 0.277483, 0.104831, 0.939341, 0.476085, 0.689769, 0.736036, 0.206843, 0.382487, 0.0348795, 0.177103, 0.91728, 0.838084, 0.137021, 0.733489, 0.526792, 0.589004, 0.574598, 0.165783, 0.880822, 0.0123784, 0.288395, 0.101822, 0.0803773, 0.335963, 0.638068, 0.233806, 0.417383, 0.952079, 0.606481, 0.817961, 0.921173, 0.70529, 0.132949, 0.366304, 0.157502, 0.788171, 0.456362, 0.934255, 0.467461, 0.616087, 0.431466, 0.0107934, 0.0281284, 0.125773, 0.648653, 0.462969, 0.850999, 0.108886, 0.269924, 0.958876, 0.0607822, 0.377576, 0.0159777, 0.215239, 0.551793, 0.605937, 0.769733, 0.902639, 0.422435, 0.850766, 0.738137, 0.242012, 0.739853, 0.295256, 0.072313, 0.00499006, 0.134117, 0.0619816, 0.384254, 0.779899, 0.414359, 0.0622828, 0.868238, 0.879185, 0.720135, 0.599743, 0.67316, 0.627264, 0.788426, 0.520692, 0.77885, 0.521608, 0.419226, 0.125957, 0.948685, 0.456223, 0.122047, 0.968073, 0.745772, 0.0444724, 0.404498, 0.703905, 0.553675, 0.332, 0.936517, 0.608138, 0.651625, 0.525357, 0.0418359, 0.896627, 0.0912335, 0.267571, 0.553597, 0.354797, 0.241745, 0.338559, 0.066604, 0.0190339, 0.894, 0.586077, 0.766166, 0.435573, 0.00517216, 0.644431, 0.920422, 0.30956, 0.739591, 0.614317, 0.153654, 0.314762, 0.737547, 0.413188, 0.764076, 0.930615, 0.109515, 0.146156, 0.669901, 0.704036, 0.444418, 0.162992, 0.974594, 0.71312, 0.0301947, 0.153717, 0.411245, 0.950341, 0.164568, 0.687654, 0.521892, 0.639889, 0.277799, 0.0132736, 0.157706, 0.910418, 0.936465, 0.573647, 0.267704, 0.00714697, 0.182185, 0.741549, 0.475334, 0.28607, 0.674605, 0.105207, 0.667732, 0.472204, 0.297792, 0.955608, 0.137354, 0.581651, 0.0227979, 0.434107, 0.593207, 0.117993, 0.382285, 0.578991, 0.00468433, 0.489849, 0.482814, 0.601705, 0.784944, 0.687895, 0.809286, 0.503371, 0.224533, 0.982967, 0.935806, 0.83864, 0.765722, 0.78486, 0.255872, 0.573153, 0.57346, 0.666158, 0.0866073, 0.608673, 0.8369, 0.802918, 0.380381, 0.80692, 0.930134, 0.545355, 0.826637, 0.0235991, 0.296427, 0.264481, 0.0334558, 0.940874, 0.644491, 0.145155, 0.931148, 0.516667, 0.737245, 0.426548, 0.456089, 0.275857, 0.826369, 0.0312548, 0.162226, 0.835581, 0.0551627, 0.235584, 0.737127, 0.757186, 0.535057, 0.388136, 0.804533, 0.520342, 0.239397, 0.0111184, 0.886874, 0.671199, 0.0102062, 0.525171, 0.54132, 0.197769, 0.966243, 0.242902, 0.30363, 0.483834, 0.420135, 0.799298, 0.688168, 0.079587, 0.545242, 0.378923, 0.388659, 0.544782, 0.562912, 0.756239, 0.241471, 0.084162, 0.917187, 0.724327, 0.594817, 0.357432, 0.197771, 0.421488, 0.60364, 0.153701, 0.216854, 0.854795, 0.793797, 0.864704, 0.249874, 0.392237, 0.0893547, 0.126951, 0.162292, 0.348888, 0.104106, 0.459023, 0.360549, 0.225123, 0.619885, 0.556321, 0.533507, 0.369956, 0.707384, 0.00900924, 0.571209, 0.369161, 0.433963, 0.125497, 0.447801, 0.998385, 0.00721151, 0.596754, 0.198993, 0.544076, 0.256515, 0.824956, 0.395937, 0.82036, 0.724246, 0.274992, 0.931312, 0.211844, 0.888995, 0.567783, 0.403347, 0.0233643, 0.305413, 0.18654, 0.202946, 0.277751, 0.00679857, 0.621933, 0.714503, 0.903398, 0.857239, 0.841092, 0.167112, 0.2957, 0.53974, 0.124685, 0.303459, 0.940138, 0.0796079, 0.508971, 0.384426, 0.855042, 0.285243, 0.549414, 0.476934, 0.781292, 0.48505, 0.27334, 0.0602828, 0.829215, 0.457946, 0.466487, 0.246539, 0.81927, 0.426943, 0.114767, 0.780408, 0.16347, 0.0984895, 0.877369, 0.457513, 0.212888, 0.284341, 0.487792, 0.509021, 0.895808, 0.76787, 0.440658, 0.363798, 0.681661, 0.474451, 0.600067, 0.954591, 0.0890766, 0.730658, 0.224916, 0.921279, 0.376547, 0.260774, 0.601968, 0.4237, 0.739528, 0.145402, 0.41468, 0.152907, 0.47141, 0.119075, 0.273265, 0.950216, 0.55966, 0.968888, 0.939802, 0.776441, 0.668253, 0.771813, 0.722996, 0.23648, 0.343948, 0.347981, 0.552765, 0.375207, 0.924972, 0.592547, 0.508824, 0.496403, 0.0940674, 0.944428, 0.56447, 0.301115, 0.889245, 0.0223575, 0.763835, 0.267003, 0.500808, 0.824617, 0.838881, 0.944627, 0.496405, 0.14692, 0.315796, 0.77764, 0.240432, 0.306096, 0.292526, 0.296885, 0.373744, 0.346349, 0.322487, 0.883683, 0.16284, 0.0647008, 0.329655, 0.410038, 0.560839, 0.828523, 0.851761, 0.369957, 0.864426, 0.871436, 0.0633008, 0.16101, 0.615145, 0.148793, 0.322985, 0.330853, 0.868008, 0.43142, 0.811799, 0.135211, 0.458559, 0.108207, 0.695897, 0.315194, 0.637078, 0.563875, 0.597988, 0.427874, 0.689716, 0.471646, 0.725956, 0.622428, 0.471164, 0.823061, 0.505723, 0.297301, 0.0265635, 0.527678, 0.743072, 0.48214, 0.138272, 0.398495, 0.0859242, 0.460059, 0.942352, 0.898106, 0.798673, 0.187484, 0.357981, 0.431898, 0.41474, 0.394494, 0.884989, 0.993217, 0.713402, 0.680949, 0.753447, 0.262199, 0.475949, 0.581497, 0.997133, 0.387826, 0.403276, 0.275721, 0.380806, 0.209255, 0.60866, 0.623474, 0.543617, 0.241541, 0.425924, 0.428409, 0.397315, 0.09587, 0.620919, 0.126089, 0.38629, 0.610477, 0.374492, 0.338553, 0.218924, 0.806786, 0.832234, 0.80869, 0.294897, 0.376391, 0.153017, 0.653983, 0.208456, 0.726007, 0.602219, 0.791533, 0.33923, 0.97356, 0.577972, 0.265774, 0.206974, 0.962825, 0.950871, 0.0578961, 0.610349, 0.646963, 0.375467, 0.729135, 0.546659, 0.170292, 0.332371, 0.710321, 0.522126, 0.0930859, 0.264027, 0.470624, 0.581995, 0.676597, 0.280836, 0.689249, 0.787284, 0.949667, 0.642154, 0.417964, 0.455748, 0.208287, 0.859757, 0.481885, 0.804135, 0.056898, 0.693433, 0.160249, 0.837059, 0.0546828, 0.798152, 0.839746, 0.605187, 0.183213, 0.2599, 0.383068, 0.851459, 0.306443, 0.309147, 0.0381959, 0.719869, 0.242022, 0.22479, 0.994963, 0.366324, 0.717579, 0.217008, 0.431604, 0.594694, 0.624695, 0.814518, 0.961515, 0.475361, 0.578813, 0.84533, 0.305233, 0.0790036, 0.658456, 0.686757, 0.567089, 0.792823, 0.831184, 0.937264, 0.764091, 0.679602, 0.0103403, 0.948929, 0.241996, 0.657831, 0.453863, 0.714861, 0.241192, 0.924878, 0.632439, 0.105144, 0.788324, 0.810018, 0.210902, 0.550243, 0.486251, 0.0913733, 0.798959, 0.194691, 0.411302, 0.279607, 0.577979, 0.719798, 0.638684, 0.17349, 0.298712, 0.993729, 0.441379, 0.844718, 0.685629, 0.455068, 0.330483, 0.265889, 0.368193, 0.0688269, 0.138687, 0.627119, 0.355442, 0.805071, 0.807763, 0.673589, 0.000873119, 0.0354007, 0.230154, 0.831104, 0.833678, 0.469663, 0.214547, 0.758954, 0.590854, 0.341966, 0.875465, 0.478901, 0.2021, 0.666199, 0.600773, 0.00324002, 0.554362, 0.686026, 0.908726, 0.46423, 0.77122, 0.838512, 0.157614, 0.527408, 0.991745, 0.990287, 0.947701, 0.173036, 0.682941, 0.424896, 0.441374, 0.899804, 0.82589, 0.080801, 0.358268, 0.226601, 0.12377, 0.471976, 0.593304, 0.251538, 0.217325, 0.084385, 0.820333, 0.929372, 0.3018, 0.868119, 0.354941, 0.817628, 0.854257, 0.465488, 0.87711, 0.467136, 0.536533, 0.761141, 0.0176133, 0.824427, 0.35775, 0.883069, 0.926057, 0.468452, 0.736605, 0.595839, 0.863943, 0.714158, 0.7314, 0.893097, 0.438854, 0.327221, 0.791686, 0.5375, 0.87044, 0.509864, 0.707702, 0.246594, 0.600985, 0.769923, 0.937123, 0.487803, 0.162059, 0.945864, 0.783905, 0.935941, 0.466245, 0.857716, 0.507292, 0.899158, 0.673879, 0.673613, 0.288398, 0.6941, 0.738393, 0.55069, 0.976364, 0.118459, 0.237908, 0.31801, 0.695152, 0.806048, 0.996444, 0.271285, 0.430659, 0.0847206, 0.505409, 0.964534, 0.533697, 0.402601, 0.636051, 0.905861, 0.187011, 0.162622, 0.13625, 0.217484, 0.331014, 0.178526, 0.700494, 0.669568, 0.551995, 0.186994, 0.512293, 0.888964, 0.693861, 0.254946, 0.111767, 0.617156, 0.00432868, 0.736541, 0.538015, 0.924411, 0.703041, 0.356651, 0.82463, 0.338636, 0.337042, 0.589604, 0.446137, 0.928251, 0.223612, 0.308999, 0.336899, 0.448885, 0.973801, 0.697394, 0.0422027, 0.0897108, 0.399189, 0.703481, 0.543053, 0.72385, 0.159818, 0.782179, 0.304272, 0.826399, 0.0707951, 0.26092, 0.365618, 0.413427, 0.84302, 0.131124, 0.556202, 0.706646, 0.780119, 0.104328, 0.946335, 0.0637464, 0.0732376, 0.0595677, 0.525762, 0.987167, 0.32953, 0.815905, 0.478352, 0.296297, 0.625132, 0.553781, 0.592135, 0.28049, 0.192426, 0.23416, 0.817321, 0.826737, 0.0622751, 0.576221, 0.270625, 0.899673, 0.699229, 0.466704, 0.721756, 0.969387, 0.805635, 0.169292, 0.939678, 0.470333, 0.951276, 0.605481, 0.258478, 0.482099, 0.709427, 0.666574, 0.685317, 0.601123, 0.850523, 0.159515, 0.781839, 0.895366, 0.200522, 0.31974, 0.0524494, 0.529465, 0.551828, 0.73694, 0.922469, 0.218472, 0.734122, 0.688273, 0.864047, 0.130662, 0.786044, 0.220167, 0.607686, 0.312601, 0.641538, 0.567144, 0.828449, 0.619842, 0.83648, 0.611485, 0.898507, 0.411619, 0.583162, 0.530753, 0.83583, 0.626444, 0.557886, 0.180592, 0.198511, 0.232223, 0.526427, 0.615557, 0.411498, 0.273077, 0.74293, 0.835388, 0.270267, 0.805291, 0.0972044, 0.241121, 0.736203, 0.663411, 0.363918, 0.458822, 0.80512, 0.605458, 0.375393, 0.862337, 0.0593959, 0.253904, 0.536793, 0.589653, 0.933257, 0.953445, 0.568967, 0.210733, 0.555606, 0.432325, 0.646601, 0.93889, 0.614317, 0.560362, 0.710996, 0.210809, 0.379712, 0.887, 0.163997, 0.56018, 0.0240339, 0.49022, 0.742717, 0.519842, 0.586268, 0.78603, 0.636755, 0.582241, 0.796528, 0.259907, 0.0745921, 0.655932, 0.0389202, 0.0726865, 0.864875, 0.312982, 0.452136, 0.7015, 0.247177, 0.27854, 0.0632686, 0.0813697, 0.279874, 0.586784, 0.461093, 0.0778263, 0.662647, 0.357942, 0.710333, 0.956567, 0.898341, 0.174392, 0.00585557, 0.884456, 0.278071, 0.685814, 0.560874, 0.94756, 0.24619, 0.854, 0.353335, 0.254453, 0.792681, 0.305647, 0.99284, 0.270542, 0.588071, 0.653879, 0.685535, 0.740965, 0.0128991, 0.972981, 0.982195, 0.784451, 0.123193, 0.432136, 0.529464, 0.763657, 0.226716, 0.210642, 0.417861, 0.656148, 0.579897, 0.856393, 0.218321, 0.771127, 0.0338039, 0.567034, 0.630268, 0.908676, 0.521216, 0.701863, 0.514884, 0.421301, 0.752762, 0.524206, 0.117344, 0.173013, 0.612242, 0.394583, 0.865125, 0.752344, 0.315942, 0.142906, 0.665908, 0.907319, 0.524857, 0.0720145, 0.193825, 0.889935, 0.315781, 0.207526, 0.418102, 0.116293, 0.271694, 0.733742, 0.187469, 0.694859, 0.83994, 0.948731, 0.0598494, 0.586697, 0.493972, 0.955683, 0.37149, 0.199311, 0.146326, 0.601498, 0.946194, 0.831491, 0.320438, 0.801112, 0.484785, 0.0333543, 0.74116, 0.226681, 0.0447326, 0.315809, 0.303605, 0.497916, 0.967853, 0.710093, 0.961844, 0.316589, 0.923962, 0.206211, 0.632804, 0.702471, 0.293214, 0.459488, 0.240221, 0.273956, 0.214241, 0.895807, 0.428641, 0.0928225, 0.264767, 0.997019, 0.616878, 0.680763, 0.0939435, 0.418914, 0.0206921, 0.388642, 0.503535, 0.993482, 0.274211, 0.604227, 0.20554, 0.0823207, 0.484587, 0.132433, 0.165187, 0.74246, 0.905328, 0.870367, 0.747256, 0.0635862, 0.301832, 0.420023, 0.538413, 0.796804, 0.275343, 0.671668, 0.422441, 0.153756, 0.107716, 0.520214, 0.680467, 0.621875, 0.0994287, 0.251441, 0.410875, 0.341878, 0.0305172, 0.893489, 0.542774, 0.881573, 0.159878, 0.978881, 0.530476, 0.611658, 0.782136, 0.62399, 0.753435, 0.238884, 0.9115, 0.241982, 0.619147, 0.842602, 0.209986, 0.741427, 0.477977, 0.339636, 0.688182, 0.194028, 0.726511, 0.743978, 0.0382991, 0.737422, 0.0462307, 0.12968, 0.467882, 0.0295334, 0.712377, 0.446366, 0.0486053, 0.30363, 0.410012, 0.74849, 0.349723, 0.890933, 0.670958, 0.639506, 0.993475, 0.399773, 0.984779, 0.551193, 0.743251, 0.596086, 0.118846, 0.197895, 0.403803, 0.159353, 0.643236, 0.127084, 0.483102, 0.490118, 0.918884, 0.181435, 0.747842, 0.677603, 0.667513, 0.238327, 0.67862, 0.658214, 0.612771, 0.296302, 0.166472, 0.161012, 0.588091, 0.197845, 0.513839, 0.833909, 0.463192, 0.950884, 0.0533873, 0.432471, 0.863352, 0.822684, 0.0342464, 0.225652, 0.702801, 0.86235, 0.473653, 0.988191, 0.227529, 0.878562, 0.0485657, 0.0471784, 0.458532, 0.650269, 0.845933, 0.591122, 0.513426, 0.551219, 0.791989, 0.794158, 0.810032, 0.257106, 0.767354, 0.499332, 0.254888, 0.903956, 0.117117, 0.116458, 0.452096, 0.850185, 0.544318, 0.326435, 0.215906, 0.740467, 0.401712, 0.567547, 0.887705, 0.435517, 0.378752, 0.758246, 0.375751, 0.448469, 0.176343, 0.0144379, 0.624023, 0.489881, 0.593496, 0.989702, 0.244849, 0.175646, 0.173303, 0.586476, 0.99795, 0.79035, 0.374323, 0.698993, 0.921009, 0.290843, 0.183281, 0.4822, 0.576796, 0.789544, 0.0994564, 0.508556, 0.109639, 0.733261, 0.140442, 0.787, 0.254106, 0.0559873, 0.606764, 0.401688, 0.68709, 0.13815, 0.029515, 0.00818356, 0.782633, 0.181943, 0.0634527, 0.683833, 0.00813154, 0.486167, 0.854093, 0.191719, 0.86377, 0.609881, 0.515957, 0.487769, 0.0271336, 0.73167, 0.234502, 0.0613808, 0.961206, 0.999486, 0.823175, 0.300045, 0.354843, 0.246346, 0.123407, 0.347522, 0.354241, 0.694702, 0.323798, 0.0366013, 0.538566, 0.729588, 0.992623, 0.218945, 0.406589, 0.0438158, 0.155472, 0.970552, 0.409469, 0.153691, 0.0203969, 0.275005, 0.701389, 0.42303, 0.147195, 0.368979, 0.604133, 0.126673, 0.67592, 0.105526, 0.54339, 0.871531, 0.394669, 0.126213, 0.599392, 0.431891, 0.988939, 0.567489, 0.496111, 0.368918, 0.470483, 0.0421722, 0.675878, 0.603576, 0.44158, 0.992638, 0.836131, 0.420694, 0.741066, 0.403237, 0.574347, 0.648549, 0.250641, 0.798726, 0.891189, 0.0422597, 0.51827, 0.739092, 0.202508, 0.619545, 0.450979, 0.966478, 0.176743, 0.693801, 0.65698, 0.986479, 0.579998, 0.428425, 0.639208, 0.652359, 0.396176, 0.268982, 0.0115209, 0.689203, 0.0668897, 0.38583, 0.245746, 0.761784, 0.45692, 0.302772, 0.461184, 0.501472, 0.559463, 0.287875, 0.129634, 0.470875, 0.672356, 0.663933, 0.381709, 0.528953, 0.761458, 0.652946, 0.669969, 0.982658, 0.602676, 0.479083, 0.536523, 0.330208, 0.194745, 0.641579, 0.840228, 0.450018, 0.692135, 0.355857, 0.455585, 0.486695, 0.584968, 0.206563, 0.965777, 0.303506, 0.934809, 0.458971, 0.810761, 0.75679, 0.975268, 0.210766, 0.248278, 0.0802836, 0.694557, 0.559991, 0.919831, 0.898269, 0.423921, 0.402418, 0.862905, 0.383914, 0.088649, 0.355223, 0.0554185, 0.300398, 0.409473, 0.931162, 0.553423, 0.310724, 0.709629, 0.308529, 0.307363, 0.659964, 0.00663364, 0.258103, 0.360775, 0.663727, 0.490966, 0.940088, 0.396988, 0.564506, 0.315455, 0.959141, 0.057101, 0.0195982, 0.519149, 0.901276, 0.48174, 0.139353, 0.0851095, 0.0280393, 0.256546, 0.121875, 0.00495883, 0.999756, 0.657477, 0.571574, 0.5227, 0.694834, 0.946927, 0.366485, 0.434974, 0.478532, 0.412874, 0.867329, 0.0975095, 0.0271852, 0.778299, 0.565213, 0.53727, 0.290105, 0.791131, 0.0144325, 0.275715, 0.581405, 0.501518, 0.0129648, 0.297322, 0.275774, 0.0750361, 0.154392, 0.0974631, 0.309081, 0.172081, 0.848699, 0.840117, 0.0103903, 0.968982, 0.600323, 0.259561, 0.572473, 0.82728, 0.432188, 0.78679, 0.764765, 0.0234963, 0.280857, 0.206387, 0.519519, 0.241991, 0.586645, 0.045972, 0.089996, 0.133891, 0.608837, 0.00619712, 0.895718, 0.439825, 0.63276, 0.785436, 0.992793, 0.604488, 0.204594, 0.115584, 0.116164, 0.93579, 0.706941, 0.211937, 0.0692218, 0.3304, 0.318649, 0.710189, 0.982268, 0.116389, 0.298634, 0.411521, 0.954601, 0.72633, 0.550592, 0.251087, 0.462077, 0.817913, 0.384922, 0.762362, 0.322915, 0.389228, 0.225212, 0.713655, 0.349589, 0.764699, 0.166066, 0.374613, 0.508018, 0.58467, 0.736121, 0.68524, 0.160013, 0.364459, 0.120574, 0.190756, 0.318128, 0.35933, 0.184048, 0.0399303, 0.980637, 0.560814, 0.303475, 0.797824, 0.816656, 0.394171, 0.031498, 0.373127, 0.553551, 0.547722, 0.311606, 0.814148, 0.82553, 0.506601, 0.102923, 0.624813, 0.991727, 0.936333, 0.487983, 0.170005, 0.669482, 0.921824, 0.350938, 0.117821, 0.710319, 0.244534, 0.927799, 0.126162, 0.322003, 0.900591, 0.717494, 0.551761, 0.806162, 0.835034, 0.763623, 0.108624, 0.00990392, 0.902354, 0.824175, 0.554519, 0.885359, 0.308364, 0.572235, 0.63138, 0.335896, 0.605877, 0.46389, 0.806253, 0.766758, 0.526007, 0.397714, 0.60308, 0.93283, 0.545794, 0.660899, 0.604516, 0.29167, 0.214183, 0.0573974, 0.229982, 0.979779, 0.368175, 0.434765, 0.415207, 0.418815, 0.354797, 0.403499, 0.871134, 0.644369, 0.644598, 0.471213, 0.956022, 0.431544, 0.586249, 0.139016, 0.702862, 0.187983, 0.944175, 0.589743, 0.901744, 0.875612, 0.146502, 0.581585, 0.0458156, 0.538324, 0.52507, 0.767346, 0.948295, 0.0187263, 0.305238, 0.544379, 0.814933, 0.391502, 0.130744, 0.953611, 0.394568, 0.272426, 0.649744, 0.861592, 0.835051, 0.482925, 0.245874, 0.290328, 0.287375, 0.837873, 0.0066875, 0.413453, 0.262712, 0.0880322, 0.243793, 0.82881, 0.6031, 0.306577, 0.42487, 0.969813, 0.914381, 0.00901938, 0.856536, 0.71473, 0.831734, 0.0200897, 0.247702, 0.746342, 0.306575, 0.206043, 0.574619, 0.828791, 0.723755, 0.93929, 0.852708, 0.405139, 0.414797, 0.542813, 0.290311, 0.125276, 0.931837, 0.0717161, 0.791285, 0.14805, 0.836408, 0.398334, 0.485683, 0.682892, 0.0932766, 0.404823, 0.644894, 0.346455, 0.427197, 0.259743, 0.597138, 0.0679476, 0.0289522, 0.849093, 0.350032, 0.454357, 0.603621, 0.907092, 0.404162, 0.351306, 0.270505, 0.653166, 0.769691, 0.938824, 0.473653, 0.434848, 0.342873, 0.318166, 0.543479, 0.856732, 0.153989, 0.879561, 0.870354, 0.422029, 0.839287, 0.124407, 0.465203, 0.460858, 0.989059, 0.679192, 0.946028, 0.846782, 0.918742, 0.314033, 0.49666, 0.0920023, 0.843132, 0.839722, 0.272321, 0.979717, 0.348738, 0.444116, 0.983432, 0.795065, 0.794573, 0.671099, 0.511916, 0.0563633, 0.559247, 0.533963, 0.50525, 0.711275, 0.802424, 0.917303, 0.122918, 0.80023, 0.559959, 0.257324, 0.13848, 0.171588, 0.0874861, 0.0871963, 0.72098, 0.515483, 0.279985, 0.00499612, 0.225363, 0.238013, 0.269871, 0.990479, 0.766968, 0.999129, 0.715754, 0.00825198, 0.37617, 0.665278, 0.747982, 0.828414, 0.71452, 0.469985, 0.859196, 0.535797, 0.775488, 0.415358, 0.720911, 0.368545, 0.401326, 0.745221, 0.153016, 0.578119, 0.548487, 0.0108803, 0.525463, 0.85297, 0.401923, 0.125322, 0.0132056, 0.673783, 0.392176, 0.0484537, 0.983614, 0.733353, 0.648133, 0.103747, 0.619905, 0.345302, 0.53134, 0.687227, 0.370661, 0.177329, 0.362335, 0.898738, 0.123485, 0.136967, 0.888547, 0.21683, 0.366909, 0.653203, 0.335775, 0.869464, 0.0816942, 0.902404, 0.457509, 0.530483, 0.803953, 0.0166294, 0.13743, 0.636046, 0.0234916, 0.713996, 0.333192, 0.0889455, 0.0801999, 0.722104, 0.823817, 0.418968, 0.642144, 0.537163, 0.63819, 0.32562, 0.221282, 0.62735, 0.0201243, 0.838979, 0.577024, 0.339198, 0.791407, 0.250698, 0.22916, 0.506478, 0.184422, 0.345273, 0.0775661, 0.735546, 0.783081, 0.563042, 0.52338, 0.578283, 0.92201, 0.331034, 0.417922, 0.818801, 0.356291, 0.542004, 0.544088, 0.803525, 0.212841, 0.731448, 0.612983, 0.748431, 0.253143, 0.231774, 0.294001, 0.593668, 0.265074, 0.553367, 0.866867, 0.18076, 0.662788, 0.629639, 0.73018, 0.467859, 0.808596, 0.644864, 0.022537, 0.0364958, 0.54453, 0.592496, 0.43437, 0.952427, 0.218378, 0.244185, 0.436451, 0.71122, 0.593662, 0.835231, 0.0332252, 0.378105, 0.497219, 0.984476, 0.0730146, 0.472074, 0.48317, 0.139109, 0.593514, 0.340308, 0.619867, 0.28969, 0.00251761, 0.908032, 0.224354, 0.849769, 0.550466, 0.821944, 0.162849, 0.738565, 0.68601, 0.509678, 0.0200363, 0.507831, 0.866033, 0.638294, 0.116883, 0.843762, 0.889224, 0.260872, 0.18303, 0.130803, 0.769403, 0.414257, 0.0478247, 0.275858, 0.874808, 0.409034, 0.986896, 0.182275, 0.906189, 0.483678, 0.215844, 0.0907204, 0.0648673, 0.252829, 0.832949, 0.100708, 0.156126, 0.813764, 0.398981, 0.812887, 0.120614, 0.500278, 0.308537, 0.920879, 0.581683, 0.475571, 0.385586, 0.592255, 0.598037, 0.948205, 0.90025, 0.599848, 0.608096, 0.0367805, 0.990955, 0.68853, 0.548609, 0.294576, 0.30996, 0.883756, 0.967547, 0.218314, 0.511624, 0.590131, 0.0163507, 0.871621, 0.818298, 0.161421, 0.275694, 0.214449, 0.616178, 0.996522, 0.30145, 0.462069, 0.015384, 0.977253, 0.57646, 0.0811966, 0.271921, 0.924299, 0.106255, 0.0241342, 0.766667, 0.950522, 0.577713, 0.27764, 0.668152, 0.270199, 0.332724, 0.0121983, 0.600661, 0.555089, 0.530421, 0.72221, 0.380044, 0.653603, 0.89445, 0.0928122, 0.594942, 0.328926, 0.976484, 0.42039, 0.996746, 0.860078, 0.867956, 0.0398631, 0.806536, 0.0995371, 0.658456, 0.0472054, 0.66889, 0.540563, 0.532939, 0.865011, 0.401918, 0.820783, 0.0920071, 0.0658646, 0.253436, 0.263612, 0.669429, 0.296443, 0.706652, 0.351249, 0.91166, 0.913569, 0.788687, 0.234301, 0.831165, 0.980835, 0.203369, 0.373045, 0.423267, 0.670498, 0.802715, 0.936269, 0.78477, 0.915481, 0.113364, 0.601332, 0.310504, 0.743434, 0.765364, 0.603253, 0.483632, 0.125628, 0.289788, 0.997899, 0.503365, 0.726792, 0.800784, 0.450583, 0.269655, 0.444556, 0.555119, 0.93947, 0.533848, 0.0782537, 0.157712, 0.85099, 0.327261, 0.832598, 0.510364, 0.912637, 0.539609, 0.350554, 0.854618, 0.946954, 0.386823, 0.714464, 0.00204045, 0.968002, 0.48749, 0.562856, 0.427489, 0.848607, 0.493587, 0.826042, 0.445187, 0.0619741, 0.101287, 0.526013, 0.194595, 0.775672, 0.486149, 0.272355, 0.127834, 0.743296, 0.150596, 0.681096, 0.57281, 0.0672587, 0.255456, 0.841653, 0.872262, 0.649119, 0.123757, 0.179241, 0.875755, 0.647584, 0.705624, 0.648403, 0.0508813, 0.829607, 0.695577, 0.916696, 0.689075, 0.911448, 0.135606, 0.549444, 0.281353, 0.391995, 0.832494, 0.742705, 0.712778, 0.475071, 0.288333, 0.174372, 0.26146, 0.537422, 0.0596647, 0.608866, 0.423589, 0.609099, 0.143077, 0.919032, 0.902764, 0.595446, 0.380225, 0.321925, 0.647344, 0.812108, 0.969231, 0.761545, 0.619274, 0.187524, 0.799921, 0.307736, 0.389503, 0.250851, 0.576744, 0.470898, 0.520214, 0.247824, 0.875442, 0.458726, 0.0483422, 0.0345533, 0.246986, 0.764085, 0.0722482, 0.215028, 0.157917, 0.943235, 0.568253, 0.611883, 0.703953, 0.744177, 0.376982, 0.929422, 0.731934, 0.569088, 0.520327, 0.860078, 0.472155, 0.927979, 0.40246, 0.326026, 0.51597, 0.488182, 0.0903602, 0.810176, 0.98767, 0.306279, 0.222395, 0.449744, 0.341067, 0.923371, 0.805345, 0.880787, 0.354113, 0.520321, 0.125835, 0.354522, 0.886473, 0.879818, 0.166946, 0.800076, 0.493049, 0.755293, 0.121424, 0.36928, 0.475916, 0.60888, 0.419866, 0.486128, 0.953577, 0.0166693, 0.897612, 0.76408, 0.29579, 0.90232, 0.27467, 0.846143, 0.599024, 0.852155, 0.620326, 0.937455, 0.956604, 0.914655, 0.0814477, 0.467409, 0.0893654, 0.789564, 0.944074, 0.788553, 0.244569, 0.168005, 0.779147, 0.51142, 0.62443, 0.395262, 0.0596324, 0.654439, 0.168753, 0.99687, 0.657187, 0.760906, 0.915153, 0.467333, 0.436316, 0.528364, 0.482071, 0.810865, 0.561541, 0.835901, 0.61142, 0.748716, 0.00598633, 0.996771, 0.112488, 0.376406, 0.822818, 0.341099, 0.432226, 0.804915, 0.211529, 0.815314, 0.635673, 0.109762, 0.288317, 0.798835, 0.995449, 0.973567, 0.656728, 0.689204, 0.0583982, 0.118429, 0.828065, 0.00536045, 0.138903, 0.209343, 0.0480099, 0.443857, 0.125573, 0.681736, 0.407285, 0.0818151, 0.375224, 0.190126, 0.868232, 0.93468, 0.822556, 0.127266, 0.104354, 0.508798, 0.343762, 0.966641, 0.994762, 0.520799, 0.283876, 0.185071, 0.569375, 0.195671, 0.38562, 0.875036, 0.954734, 0.266918, 0.280726, 0.385349, 0.134495, 0.420917, 0.463568, 0.252675, 0.310618, 0.712974, 0.172954, 0.451586, 0.208031, 0.374081, 0.507844, 0.45277, 0.52935, 0.38675, 0.181988, 0.627821, 0.296563, 0.458081, 0.679424, 0.107427, 0.957737, 0.499756, 0.510599, 0.511263, 0.204941, 0.783688, 0.748013, 0.313695, 0.822132, 0.877007, 0.932383, 0.57994, 0.511188, 0.744959, 0.231559, 0.98697, 0.686656, 0.349502, 0.727331, 0.0570541, 0.460354, 0.317294, 0.29985, 0.614703, 0.897589, 0.672334, 0.80712, 0.203369, 0.739073, 0.663163, 0.840658, 0.442582, 0.972734, 0.822226, 0.0408981, 0.60215, 0.469584, 0.439678, 0.12661, 0.695087, 0.738005, 0.730278, 0.848876, 0.615831, 0.978384, 0.539188, 0.34485, 0.244979, 0.79015, 0.202316, 0.493524, 0.749691, 0.888322, 0.942176, 0.967565, 0.343653, 0.772204, 0.516119, 0.0458385, 0.0860436, 0.669792, 0.528784, 0.0186552, 0.970551, 0.674466, 0.185799, 0.569726, 0.392632, 0.0565441, 0.884705, 0.436255, 0.205135, 0.0951112, 0.516395, 0.902937, 0.820844, 0.619476, 0.454277, 0.630864, 0.972527, 0.257479, 0.554272, 0.339491, 0.948593, 0.330156, 0.894974, 0.82963, 0.0822031, 0.408084, 0.223281, 0.916071, 0.689165, 0.464661, 0.634858, 0.181302, 0.229017, 0.210696, 0.151731, 0.0317992, 0.222419, 0.788384, 0.105893, 0.850989, 0.0346799, 0.0334926, 0.135013, 0.437604, 0.507569, 0.0101919, 0.19781, 0.554976, 0.412081, 0.75034, 0.0838736, 0.184646, 0.414339, 0.690666, 0.68731, 0.913875, 0.102824, 0.284373, 0.542406, 0.0448331, 0.68077, 0.290499, 0.263842, 0.479761, 0.275982, 0.294421, 0.876497, 0.0382377, 0.149754, 0.114487, 0.34663, 0.724447, 0.389409, 0.802181, 0.512255, 0.871142, 0.817144, 0.943827, 0.888949, 0.629429, 0.287183, 0.249371, 0.872029, 0.585722, 0.11813, 0.520707, 0.791212, 0.552112, 0.589188, 0.38551, 0.242621, 0.661368, 0.0682262, 0.634157, 0.443192, 0.203633, 0.201176, 0.584291, 0.950453, 0.259034, 0.761136, 0.772442, 0.611783, 0.862685, 0.913769, 0.772802, 0.908552, 0.248499, 0.516554, 0.896326, 0.192584, 0.666127, 0.291131, 0.67864, 0.450392, 0.798543, 0.856595, 0.274009, 0.811873, 0.621547, 0.252523, 0.863666, 0.964819, 0.911717, 0.223313, 0.529873, 0.464219, 0.846257, 0.592794, 0.347513, 0.259943, 0.875496, 0.314201, 0.0395176, 0.201387, 0.527071, 0.572934, 0.965283, 0.164273, 0.722183, 0.670461, 0.573421, 0.0891485, 0.390577, 0.114763, 0.349043, 0.614023, 0.199395, 0.767955, 0.963946, 0.997774, 0.871356, 0.422047, 0.703578, 0.295348, 0.613672, 0.29313, 0.836419, 0.0611616, 0.99903, 0.775352, 0.829162, 0.103054, 0.523498, 0.961165, 0.692672, 0.932918, 0.618085, 0.772023, 0.371329, 0.0404679, 0.928768, 0.111364, 0.0583651, 0.88597, 0.18057, 0.66822, 0.981161, 0.336133, 0.762795, 0.594076, 0.375663, 0.799212, 0.635779, 0.0445139, 0.0620577, 0.767288, 0.979813, 0.291096, 0.268214, 0.89948, 0.0465171, 0.186285, 0.775602, 0.260225, 0.933776, 0.462414, 0.831417, 0.520135, 0.124447, 0.695567, 0.831564, 0.0788556, 0.565818, 0.94302, 0.45306, 0.0770404, 0.302754, 0.423112, 0.962129, 0.514299, 0.36983, 0.491933, 0.656737, 0.165862, 0.130265, 0.298323, 0.132495, 0.615909, 0.0133612, 0.352362, 0.660888, 0.45423, 0.657297, 0.79193, 0.93095, 0.845148, 0.24055, 0.415642, 0.950532, 0.906417, 0.38877, 0.276392, 0.191902, 0.824704, 0.291277, 0.370849, 0.416511, 0.829611, 0.0220368, 0.425669, 0.18961, 0.285454, 0.551285, 0.227607, 0.444496, 0.974809, 0.0600968, 0.211983, 0.581975, 0.00425177, 0.960302, 0.64041, 0.494323, 0.541578, 0.119043, 0.943748, 0.966992, 0.209228, 0.60463, 0.127989, 0.104585, 0.436124, 0.740095, 0.361377, 0.907628, 0.974453, 0.91908, 0.492144, 0.0683203, 0.419664, 0.345833, 0.212963, 0.250144, 0.632806, 0.910376, 0.451072, 0.871015, 0.611367, 0.871778, 0.910306, 0.119627, 0.984545, 0.318298, 0.868194, 0.818802, 0.575346, 0.149502, 0.254122, 0.175093, 0.415803, 0.843205, 0.803958, 0.554162, 0.713087, 0.379507, 0.664716, 0.618683, 0.98676, 0.941917, 0.694065, 0.339624, 0.0668197, 0.683359, 0.97611, 0.588092, 0.107186, 0.686056, 0.382191, 0.188653, 0.404279, 0.44653, 0.123, 0.0146833, 0.440021, 0.725741, 0.124184, 0.83591, 0.538041, 0.0555726, 0.591682, 0.346451, 0.293581, 0.0507477, 0.354474, 0.408784, 0.175109, 0.67453, 0.161961, 0.622366, 0.586914, 0.0823875, 0.727024, 0.70076, 0.888525, 0.930183, 0.821958, 0.589729, 0.943463, 0.710158, 0.96339, 0.622963, 0.58137, 0.9565, 0.927559, 0.885087, 0.870896, 0.234865, 0.551913, 0.723878, 0.827738, 0.970025, 0.216431, 0.722694, 0.230254, 0.130758, 0.217069, 0.321699, 0.236559, 0.734331, 0.248787, 0.24723, 0.870885, 0.9878, 0.265207, 0.563228, 0.766959, 0.852504, 0.280999, 0.00920061, 0.739933, 0.0973193, 0.921099, 0.81306, 0.335371, 0.715043, 0.696763, 0.223199, 0.257661, 0.384619, 0.384785, 0.0556174, 0.529579, 0.83923, 0.293077, 0.187327, 0.120192, 0.652666, 0.11147, 0.100827, 0.655984, 0.742341, 0.838797, 0.120838, 0.707987, 0.677343, 0.565744, 0.538032, 0.015161, 0.274831, 0.30208, 0.886585, 0.739963, 0.700467, 0.456345, 0.219709, 0.926903, 0.232933, 0.19907, 0.102168, 0.11887, 0.0908123, 0.366894, 0.789954, 0.460337, 0.322457, 0.883223, 0.948383, 0.784219, 0.863504, 0.764734, 0.759244, 0.988275, 0.864745, 0.953824, 0.31822, 0.314996, 0.882982, 0.815942, 0.485903, 0.0115252, 0.684831, 0.614017, 0.0873085, 0.52319, 0.0558119, 0.55041, 0.310008, 0.850893, 0.725259, 0.488155, 0.116138, 0.762803, 0.0302866, 0.645977, 0.287585, 0.21666, 0.309405, 0.674184, 0.816923, 0.905169, 0.101181, 0.123499, 0.351557, 0.0405134, 0.878338, 0.015469, 0.866496, 0.0844565, 0.490755, 0.669352, 0.085941, 0.152457, 0.238297, 0.472498, 0.438051, 0.757135, 0.149849, 0.431595, 0.912593, 0.089832, 0.634364, 0.0177313, 0.741192, 0.523192, 0.912263, 0.0871267, 0.627257, 0.935303, 0.0816778, 0.251181, 0.624053, 0.395471, 0.42887, 0.54241, 0.742145, 0.424883, 0.51269, 0.935702, 0.301565, 0.119601, 0.38729, 0.261783, 0.402373, 0.728986, 0.348084, 0.997016, 0.0222313, 0.698249, 0.034954, 0.185386, 0.95165, 0.546179, 0.651428, 0.26796, 0.0685543, 0.55737, 0.283411, 0.767661, 0.0244081, 0.685367, 0.947972, 0.034217, 0.726605, 0.698051, 0.426886, 0.801432, 0.756371, 0.407575, 0.836811, 0.686345, 0.859593, 0.523643, 0.351548, 0.765494, 0.176591, 0.516729, 0.777242, 0.108195, 0.81436, 0.277935, 0.877531, 0.708538, 0.730981, 0.540901, 0.184072, 0.845363, 0.0416249, 0.739659, 0.738858, 0.402735, 0.791638, 0.526591, 0.867771, 0.41814, 0.295131, 0.044962, 0.0655176, 0.0198664, 0.106658, 0.954147, 0.966123, 0.82762, 0.632706, 0.140776, 0.989987, 0.845963, 0.226568, 0.486297, 0.988705, 0.572034, 0.439592, 0.706967, 0.393107, 0.143573, 0.697269, 0.104599, 0.698352, 0.867078, 0.492137, 0.383308, 0.0374334, 0.731598, 0.932175, 0.816022, 0.76578, 0.125882, 0.377018, 0.138172, 0.622443, 0.232894, 0.396606, 0.378826, 0.468683, 0.603877, 0.51426, 0.918394, 0.808697, 0.583117, 0.983041, 0.0401852, 0.279656, 0.634658, 0.0174982, 0.652553, 0.657667, 0.804523, 0.0146742, 0.0437468, 0.502127, 0.825505, 0.082082, 0.85776, 0.748285, 0.367666, 0.583369, 0.83577, 0.489429, 0.452757, 0.272175, 0.778049, 0.642109, 0.946722, 0.448775, 0.924783, 0.120213, 0.560099, 0.6108, 0.404566, 0.322061, 0.894217, 0.871752, 0.934318, 0.540806, 0.715612, 0.245321, 0.693621, 0.536857, 0.947538, 0.549564, 0.00690011, 0.581177, 0.159086, 0.578516, 0.277567, 0.0780366, 0.205541, 0.783508, 0.339408, 0.621534, 0.294725, 0.568251, 0.550272, 0.959998, 0.306617, 0.983857, 0.643051, 0.201416, 0.379511, 0.0911273, 0.00867392, 0.689255, 0.00360455, 0.89881, 0.969304, 0.730101, 0.141655, 0.669899, 0.454591, 0.547034, 0.143947, 0.187448, 0.773797, 0.261486, 0.561844, 0.839189, 0.824408, 0.169363, 0.253418, 0.819671, 0.622918, 0.631451, 0.685432, 0.399756, 0.124523, 0.922578, 0.506742, 0.127644, 0.673104, 0.231756, 0.429476, 0.651572, 0.147774, 0.694393, 0.845516, 0.917211, 0.93802, 0.312698, 0.810501, 0.123674, 0.203391, 0.0207843, 0.623855, 0.828925, 0.637242, 0.201973, 0.181628, 0.711198, 0.852358, 0.0979782, 0.986842, 0.391859, 0.303603, 0.900008, 0.929104, 0.921181, 0.530946, 0.259832, 0.648264, 0.569696, 0.436142, 0.546196, 0.0834616, 0.939629, 0.232199, 0.573199, 0.39586, 0.254669, 0.243346, 0.304382, 0.654493, 0.560111, 0.781565, 0.635758, 0.507616, 0.75214, 0.476468, 0.647811, 0.527588, 0.0332494, 0.366896, 0.531055, 0.0697664, 0.777532, 0.536011, 0.722944, 0.0900476, 0.57308, 0.913552, 0.176855, 0.906393, 0.694757, 0.437899, 0.286775, 0.3538, 0.187071, 0.404394, 0.130317, 0.865417, 0.317758, 0.725547, 0.652804, 0.305662, 0.320363, 0.332596, 0.17036, 0.152259, 0.691891, 0.560425, 0.189973, 0.540408, 0.577431, 0.720768, 0.402886, 0.285252, 0.266165, 0.474568, 0.116938, 0.540145, 0.420395, 0.830567, 0.420468, 0.253555, 0.950871, 0.100956, 0.614152, 0.519237, 0.139785, 0.455553, 0.183753, 0.986065, 0.411425, 0.305915, 0.429603, 0.841138, 0.561344, 0.436388, 0.610828, 0.15472, 0.675246, 0.853011, 0.965463, 0.621879, 0.406218, 0.329405, 0.717781, 0.433035, 0.0178493, 0.718941, 0.10781, 0.441911, 0.174089, 0.430148, 0.32203, 0.197737, 0.277149, 0.537757, 0.298766, 0.612162, 0.741896, 0.575298, 0.396661, 0.756537, 0.0430394, 0.109055, 0.536147, 0.770903, 0.364357, 0.22412, 0.554143, 0.905576, 0.403468, 0.905772, 0.4869, 0.640775, 0.643692, 0.274591, 0.943092, 0.225239, 0.276099, 0.531784, 0.0124671, 0.0565806, 0.266562, 0.0155241, 0.625282, 0.909237, 0.103068, 0.469269, 0.653213, 0.125399, 0.448836, 0.950663, 0.593978, 0.182179, 0.633481, 0.498007, 0.776594, 0.507905, 0.749718, 0.971869, 0.674292, 0.135901, 0.204592, 0.309865, 0.472272, 0.902697, 0.243614, 0.48535, 0.421191, 0.484195, 0.147626, 0.663167, 0.422793, 0.0577459, 0.0945983, 0.308984, 0.689708, 0.954331, 0.163813, 0.522568, 0.413598, 0.704285, 0.14746, 0.200504, 0.303418, 0.233206, 0.244352, 0.316168, 0.636593, 0.233694, 0.386377, 0.285191, 0.132191, 0.888384, 0.0673355, 0.273624, 0.710301, 0.497124, 0.211171, 0.245496, 0.260614, 0.751294, 0.134577, 0.988738, 0.0417233, 0.493367, 0.0692844, 0.0922724, 0.800404, 0.0343097, 0.34494, 0.339203, 0.384323, 0.958678, 0.795491, 0.447462, 0.321355, 0.517795, 0.587684, 0.344861, 0.259348, 0.960066, 0.469398, 0.142309, 0.706147, 0.354212, 0.707038, 0.0158585, 0.440891, 0.749678, 0.523209, 0.0270166, 0.131271, 0.386052, 0.802055, 0.394042, 0.853838, 0.135632, 0.155355, 0.553912, 0.276347, 0.929328, 0.264493, 0.570699, 0.437027, 0.7571, 0.825212, 0.940382, 0.108118, 0.38541, 0.3061, 0.216764, 0.772938, 0.510621, 0.632783, 0.23145, 0.290395, 0.568904, 0.786351, 0.105085, 0.444512, 0.805947, 0.66873, 0.78683, 0.216537, 0.937454, 0.427521, 0.0749362, 0.560645, 0.447932, 0.2315, 0.143913, 0.822126, 0.246609, 0.881098, 0.850086, 0.686795, 0.00215024, 0.5587, 0.613078, 0.35141, 0.159527, 0.89901, 0.37716, 0.5231, 0.0457704, 0.808698, 0.301096, 0.218873, 0.236171, 0.114397, 0.619328, 0.217334, 0.47348, 0.858687, 0.202401, 0.370079, 0.618376, 0.23096, 0.524045, 0.185272, 0.128056, 0.745207, 0.232581, 0.247388, 0.830298, 0.522178, 0.751212, 0.595712, 0.957827, 0.484628, 0.210787, 0.531757, 0.487252, 0.231507, 0.0111242, 0.83636, 0.0214627, 0.735164, 0.793203, 0.357078, 0.744416, 0.336283, 0.0429804, 0.287745, 0.640177, 0.844866, 0.426414, 0.0408242, 0.0431777, 0.984603, 0.000617907, 0.232387, 0.416756, 0.419424, 0.471341, 0.685176, 0.201536, 0.899329, 0.407221, 0.824743, 0.314813, 0.976795, 0.179168, 0.190919, 0.0478787, 0.133257, 0.196867, 0.25424, 0.345138, 0.975666, 0.547967, 0.822544, 0.842198, 0.145387, 0.980333, 0.969722, 0.385827, 0.742174, 0.992562, 0.778518, 0.737936, 0.874408, 0.93019, 0.848756, 0.10286, 0.411697, 0.513535, 0.100505, 0.235844, 0.0712042, 0.780822, 0.65207, 0.106151, 0.843968, 0.137026, 0.19359, 0.470046, 0.685024, 0.801273, 0.443104, 0.840353, 0.303245, 0.94544, 0.626175, 0.313943, 0.371233, 0.510667, 0.753433, 0.472468, 0.482272, 0.355332, 0.423747, 0.303537, 0.745258, 0.322039, 0.899848, 0.42553, 0.516047, 0.190752, 0.130942, 0.305774, 0.442567, 0.827156, 0.42771, 0.838491, 0.561399, 0.98035, 0.922267, 0.467943, 0.535141, 0.83542, 0.0678746, 0.339785, 0.497421, 0.837094, 0.635399, 0.0376445, 0.290444, 0.953545, 0.815358, 0.015509, 0.0934704, 0.0117289, 0.471379, 0.211196, 0.50282, 0.304326, 0.677232, 0.464641, 0.263981, 0.406351, 0.501177, 0.90898, 0.123073, 0.456577, 0.616319, 0.329018, 0.36137, 0.235127, 0.449414, 0.0533181, 0.124245, 0.823626, 0.181028, 0.0965832, 0.515334, 0.0578094, 0.430617, 0.476579, 0.892971, 0.24295, 0.894233, 0.633983, 0.451852, 0.486397, 0.0788665, 0.314991, 0.961955, 0.721795, 0.925039, 0.136851, 0.285745, 0.0426938, 0.00889496, 0.379703, 0.435715, 0.306528, 0.800782, 0.510652, 0.700367, 0.415429, 0.499462, 0.659518, 0.615336, 0.0678577, 0.104654, 0.0371008, 0.726568, 0.523198, 0.581961, 0.47025, 0.212008, 0.935105, 0.620384, 0.634939, 0.535458, 0.0439939, 0.476947, 0.385057, 0.589417, 0.500308, 0.640718, 0.441176, 0.820168, 0.124298, 0.149802, 0.96686, 0.932195, 0.791948, 0.751393, 0.452534, 0.107434, 0.821382, 0.977205, 0.477527, 0.834642, 0.149939, 0.207407, 0.729512, 0.991264, 0.327756, 0.860948, 0.0246052, 0.582887, 0.528856, 0.0216231, 0.871167, 0.992184, 0.394165, 0.99151, 0.351569, 0.167225, 0.736738, 0.726891, 0.889942, 0.830547, 0.497418, 0.180943, 0.239298, 0.249516, 0.351323, 0.23906, 0.633892, 0.797763, 0.256025, 0.740394, 0.794716, 0.548886, 0.94153, 0.426771, 0.964263, 0.602134, 0.157692, 0.51221, 0.883211, 0.459695, 0.624687, 0.677619, 0.524757, 0.567082, 0.965132, 0.615383, 0.363414, 0.178103, 0.382568, 0.605458, 0.704619, 0.23453, 0.704259, 0.791189, 0.747455, 0.201132, 0.48362, 0.517318, 0.865407, 0.376693, 0.27156, 0.273983, 0.439398, 0.679456, 0.616666, 0.956534, 0.222882, 0.548331, 0.427016, 0.688625, 0.316475, 0.0797888, 0.197334, 0.825732, 0.046541, 0.667399, 0.995749, 0.747012, 0.193808, 0.12538, 0.340587, 0.460038, 0.090228, 0.169801, 0.593597, 0.0286974, 0.277012, 0.0333368, 0.555538, 0.64682, 0.664105, 0.112142, 0.972264, 0.589269, 0.437943, 0.077566, 0.637009, 0.399174, 0.289606, 0.639596, 0.676414, 0.835179, 0.44786, 0.0174114, 0.392065, 0.12716, 0.821315, 0.0340378, 0.193213, 0.348501, 0.901849, 0.0942689, 0.735283, 0.00792822, 0.844442, 0.107897, 0.583495, 0.883682, 0.684787, 0.569863, 0.096334, 0.655823, 0.686633, 0.405986, 0.00363057, 0.440372, 0.0493766, 0.377997, 0.965714, 0.323141, 0.138127, 0.870226, 0.502981, 0.864036, 0.930167, 0.24043, 0.617038, 0.171409, 0.498145, 0.325839, 0.0213956, 0.62056, 0.948913, 0.378266, 0.0567162, 0.451742, 0.875486, 0.681645, 0.924886, 0.359523, 0.828532, 0.33314, 0.981575, 0.6529, 0.198985, 0.356704, 0.730095, 0.689353, 0.80525, 0.86304, 0.019015, 0.98754, 0.361893, 0.842485, 0.793509, 0.301799, 0.201418, 0.0551384, 0.509297, 0.193336, 0.171306, 0.830488, 0.12314, 0.109343, 0.916502, 0.589594, 0.957195, 0.710631, 0.353375, 0.839165, 0.31661, 0.810285, 0.89454, 0.426378, 0.268367, 0.54897, 0.945698, 0.102724, 0.132359, 0.993314, 0.624269, 0.633677, 0.304598, 0.865015, 0.737248, 0.887137, 0.635383, 0.790786, 0.986307, 0.0834649, 0.308325, 0.773805, 0.693736, 0.869477, 0.378664, 0.166997, 0.548414, 0.591115, 0.207419, 0.678926, 0.306532, 0.882389, 0.330757, 0.66696, 0.0226264, 0.293737, 0.549755, 0.190018, 0.428002, 0.0330888, 0.78409, 0.850576, 0.0957443, 0.75285, 0.183112, 0.318606, 0.679375, 0.0513686, 0.784358, 0.680503, 0.936123, 0.19679, 0.342259, 0.857399, 0.497498, 0.373449, 0.963185, 0.061194, 0.67867, 0.319073, 0.944838, 0.945182, 0.375355, 0.664758, 0.660519, 0.924169, 0.330604, 0.490298, 0.0174605, 0.814949, 0.557866, 0.631376, 0.738338, 0.428307, 0.967856, 0.545299, 0.840206, 0.680977, 0.156847, 0.734255, 0.367661, 0.642721, 0.508084, 0.887059, 0.217669, 0.90437, 0.983986, 0.560042, 0.932969, 0.313569, 0.855987, 0.538502, 0.275809, 0.883581, 0.620688, 0.0784743, 0.223647, 0.824707, 0.715881, 0.314945, 0.816051, 0.503688, 0.338043, 0.150112, 0.930396, 0.155001, 0.0774007, 0.0745277, 0.707445, 0.853576, 0.620116, 0.702835, 0.660482, 0.459573, 0.815874, 0.884924, 0.779751, 0.764061, 0.107455, 0.229721, 0.0151863, 0.932072, 0.666714, 0.500362, 0.68834, 0.597092, 0.708239, 0.970872, 0.857097, 0.685983, 0.955755, 0.190806, 0.0163263, 0.487328, 0.186357, 0.434234, 0.864356, 0.954898, 0.121264, 0.741515, 0.134672, 0.998451, 0.412249, 0.636198, 0.333825, 0.394477, 0.563415, 0.834848, 0.0771076, 0.835773, 0.56283, 0.0627276, 0.36464, 0.982207, 0.736576, 0.286206, 0.171741, 0.467778, 0.467378, 0.745612, 0.8679, 0.36728, 0.441325, 0.936111, 0.635793, 0.832643, 0.584554, 0.0834765, 0.614986, 0.34952, 0.0930057, 0.504816, 0.946701, 0.89807, 0.38719, 0.146971, 0.76105, 0.00644369, 0.463738, 0.826216, 0.18824, 0.289021, 0.803412, 0.743778, 0.0281653, 0.750498, 0.790439, 0.0265427, 0.289931, 0.268671, 0.946377, 0.167911, 0.820347, 0.763946, 0.201094, 0.144418, 0.413465, 0.667066, 0.557799, 0.935135, 0.675409, 0.0583699, 0.902752, 0.702898, 0.24328, 0.75703, 0.958257, 0.743549, 0.126296, 0.265276, 0.729513, 0.962756, 0.123353, 0.143048, 0.263312, 0.690127, 0.859566, 0.485524, 0.0239273, 0.00151717, 0.0723809, 0.602373, 0.762771, 0.640783, 0.475153, 0.192226, 0.904081, 0.375573, 0.953741, 0.281071, 0.614488, 0.396497, 0.295024, 0.650256, 0.201304, 0.500559, 0.439527, 0.453997, 0.0177716, 0.239765, 0.86825, 0.600484, 0.0629735, 0.179571, 0.813547, 0.422881, 0.635806, 0.957556, 0.151832, 0.571374, 0.897366, 0.110469, 0.798898, 0.881717, 0.31844, 0.645391, 0.912229, 0.0643488, 0.084613, 0.726527, 0.216498, 0.906139, 0.853988, 0.84643, 0.941913, 0.594186, 0.633076, 0.753172, 0.502716, 0.363552, 0.506002, 0.313169, 0.135435, 0.816577, 0.667825, 0.627015, 0.251642, 0.781796, 0.856541, 0.294344, 0.119712, 0.356421, 0.362389, 0.146107, 0.910781, 0.762419, 0.39919, 0.336941, 0.92832, 0.415532, 0.089885, 0.516, 0.530227, 0.327444, 0.83888, 0.413443, 0.459605, 0.171019, 0.981721, 0.137748, 0.489884, 0.352895, 0.955072, 0.82157, 0.103319, 0.927083, 0.532581, 0.316902, 0.637615, 0.013524, 0.785173, 0.551048, 0.201483, 0.38202, 0.67241, 0.771687, 0.531025, 0.826922, 0.468332, 0.0624375, 0.0825748, 0.244726, 0.0474653, 0.295925, 0.19707, 0.963282, 0.0333516, 0.451775, 0.832708, 0.715401, 0.4051, 0.96577, 0.940278, 0.14701, 0.555778, 0.0758023, 0.0197387, 0.439182, 0.770812, 0.271119, 0.984473, 0.964287, 0.245488, 0.152525, 0.761959, 0.455694, 0.550487, 0.522703, 0.754432, 0.050449, 0.255882, 0.46119, 0.0253107, 0.761951, 0.267347, 0.485221, 0.620061, 0.543256, 0.95605, 0.947825, 0.0579034, 0.97709, 0.377694, 0.84545, 0.0448161, 0.613559, 0.105458, 0.0365728, 0.129131, 0.954268, 0.031975, 0.0117617, 0.889931, 0.594795, 0.510126, 0.0368811, 0.525887, 0.291737, 0.585558, 0.337933, 0.4318, 0.555003, 0.678292, 0.443403, 0.0961935, 0.0921452, 0.745526, 0.240156, 0.164814, 0.957537, 0.879883, 0.950124, 0.636305, 0.220515, 0.205567, 0.189646, 0.538752, 0.290674, 0.32938, 0.797885, 0.510492, 0.630532, 0.0980946, 0.240802, 0.898951, 0.831824, 0.255786, 0.87384, 0.566666, 0.102045, 0.334717, 0.973228, 0.431538, 0.148153, 0.528583, 0.680616, 0.376027, 0.119179, 0.366423, 0.920142, 0.556832, 0.519105, 0.206382, 0.757003, 0.021572, 0.612761, 0.451421, 0.0577189, 0.669513, 0.830178, 0.612823, 0.60906, 0.825019, 0.993897, 0.0701606, 0.153628, 0.61948, 0.808924, 0.423015, 0.98627, 0.671347, 0.225241, 0.267217, 0.658016, 0.681039, 0.60694, 0.54466, 0.417781, 0.0437905, 0.146063, 0.74165, 0.571657, 0.989927, 0.535368, 0.072208, 0.145779, 0.706269, 0.585279, 0.754385, 0.61125, 0.197921, 0.79298, 0.739187, 0.599358, 0.513894, 0.574557, 0.69983, 0.849722, 0.776265, 0.175063, 0.131493, 0.707275, 0.507535, 0.730282, 0.652986, 0.742082, 0.17585, 0.654827, 0.82512, 0.58306, 0.935568, 0.102209, 0.802158, 0.520895, 0.851372, 0.483173, 0.384515, 0.0963455, 0.366792, 0.91588, 0.157587, 0.636136, 0.819416, 0.684009, 0.98648, 0.0268828, 0.120039, 0.763116, 0.310086, 0.328987, 0.161907, 0.82533, 0.2567, 0.953775, 0.4694, 0.778376, 0.554404, 0.986563, 0.819607, 0.377642, 0.318059, 0.918451, 0.426894, 0.516104, 0.0522369, 0.126048, 0.660289, 0.484337, 0.956744, 0.286428, 0.0413375, 0.896751, 0.870654, 0.200405, 0.631848, 0.824929, 0.0500405, 0.627212, 0.434032, 0.564735, 0.89303, 0.623865, 0.0888084, 0.403164, 0.808939, 0.415461, 0.892036, 0.796837, 0.914876, 0.74146, 0.0978747, 0.936716, 0.89141, 0.0777628, 0.318249, 0.715764, 0.132868, 0.578082, 0.629962, 0.93883, 0.697048, 0.083048, 0.423943, 0.196325, 0.610782, 0.67452, 0.482867, 0.253195, 0.487306, 0.918178, 0.836293, 0.518131, 0.604459, 0.16633, 0.730498, 0.21196, 0.499088, 0.0243054, 0.398041, 0.937355, 0.313733, 0.0234008, 0.484782, 0.209581, 0.0270179, 0.538044, 0.714142, 0.372622, 0.227493, 0.706833, 0.960303, 0.429944, 0.840859, 0.405883, 0.381973, 0.0148768, 0.497663, 0.213393, 0.561021, 0.348902, 0.786089, 0.28272, 0.86897, 0.531537, 0.661053, 0.189288, 0.74971, 0.325308, 0.918797, 0.813701, 0.228766, 0.824648, 0.292886, 0.710634, 0.0932822, 0.215401, 0.0661168, 0.074035, 0.119914, 0.25693, 0.787931, 0.672028, 0.809371, 0.624077, 0.98712, 0.770035, 0.812618, 0.11387, 0.797945, 0.586221, 0.227731, 0.0190717, 0.911422, 0.289266, 0.104082, 0.772593, 0.365488, 0.933498, 0.286594, 0.0819389, 0.588701, 0.812149, 0.308459, 0.281146, 0.633226, 0.580048, 0.437438, 0.398447, 0.129135, 0.4693, 0.155483, 0.55256, 0.639354, 0.900999, 0.441239, 0.951415, 0.993208, 0.0965534, 0.457934, 0.874072, 0.146623, 0.365402, 0.859546, 0.876866, 0.608724, 0.454491, 0.840641, 0.638527, 0.654233, 0.698724, 0.706963, 0.979879, 0.296499, 0.873044, 0.996017, 0.634044, 0.769548, 0.177342, 0.667196, 0.70298, 0.640204, 0.827305, 0.377461, 0.67522, 0.661258, 0.516612, 0.203282, 0.0711081, 0.313474, 0.281737, 0.307195, 0.047267, 0.0455222, 0.755168, 0.87994, 0.360752, 0.209819, 0.710788, 0.0622891, 0.617599, 0.229477, 0.863717, 0.852072, 0.115508, 0.736702, 0.190183, 0.152829, 0.0356765, 0.472027, 0.491483, 0.122155, 0.494228, 0.137175, 0.547976, 0.195646, 0.0121695, 0.73299, 0.309884, 0.560044, 0.987009, 0.430698, 0.00382474, 0.814072, 0.470584, 0.347878, 0.941325, 0.230163, 0.64553, 0.700458, 0.848281, 0.339537, 0.1447, 0.552574, 0.0186459, 0.461755, 0.598745, 0.185845, 0.228224, 0.782785, 0.269419, 0.560944, 0.59951, 0.325321, 0.418616, 0.69145, 0.107765, 0.0735108, 0.641734, 0.784847, 0.710308, 0.894437, 0.711628, 0.64592, 0.10606, 0.768545, 0.182833, 0.0521074, 0.373284, 0.836888, 0.812419, 0.428769, 0.0369999, 0.107018, 0.534173, 0.297168, 0.171209, 0.447395, 0.350044, 0.713297, 0.355536, 0.771001, 0.540365, 0.768313, 0.980241, 0.502351, 0.307796, 0.707297, 0.230677, 0.237729, 0.848202, 0.778981, 0.747103, 0.875742, 0.528053, 0.931245, 0.6015, 5.17358e-05, 0.643154, 0.974905, 0.506503, 0.538104, 0.794559, 0.464988, 0.775557, 0.509155, 0.383407, 0.334673, 0.252116, 0.934364, 0.508817, 0.316989, 0.249102, 0.482309, 0.421111, 0.5004, 0.946172, 0.275846, 0.00794803, 0.80494, 0.53359, 0.193383, 0.868671, 0.22597, 0.741639, 0.0730955, 0.418945, 0.391232, 0.798867, 0.611787, 0.961968, 0.740611, 0.259884, 0.318891, 0.807088, 0.735529, 0.856915, 0.475515, 0.36784, 0.574426, 0.41317, 0.252748, 0.869888, 0.265862, 0.586496, 0.279353, 0.936098, 0.999699, 0.578387, 0.12723, 0.368401, 0.634045, 0.748139, 0.409005, 0.88407, 0.0502159, 0.995984, 0.0912325, 0.0429103, 0.970043, 0.0216342, 0.0568862, 0.360242, 0.833031, 0.179521, 0.59828, 0.701973, 0.387729, 0.667839, 0.115342, 0.775766, 0.246466, 0.310886, 0.186815, 0.291886, 0.566824, 0.926555, 0.292389, 0.66239, 0.363306, 0.67926, 0.111689, 0.734432, 0.711458, 0.14954, 0.00754289, 0.62662, 0.825252, 0.149637, 0.947528, 0.459901, 0.045079, 0.939959, 0.433388, 0.70712, 0.961558, 0.95893, 0.721015, 0.874506, 0.832101, 0.282481, 0.90934, 0.761407, 0.392322, 0.159559, 0.94197, 0.870158, 0.03336, 0.256874, 0.0622876, 0.529925, 0.987597, 0.405906, 0.0755638, 0.528436, 0.783823, 0.528075, 0.703557, 0.92635, 0.560757, 0.490081, 0.703722, 0.937953, 0.359367, 0.793429, 0.410178, 0.670041, 0.80674, 0.00688808, 0.16676, 0.106225, 0.444607, 0.193202, 0.522418, 0.386556, 0.670946, 0.02345, 0.914917, 0.666931, 0.116329, 0.668862, 0.0387895, 0.528626, 0.759742, 0.0109455, 0.707786, 0.960788, 0.786994, 0.659212, 0.730779, 0.551935, 0.0823566, 0.352984, 0.0386925, 0.276281, 0.36703, 0.831537, 0.360835, 0.977842, 0.191179, 0.236736, 0.539506, 0.812653, 0.420959, 0.314764, 0.681369, 0.922514, 0.0198208, 0.254843, 0.290766, 0.0609381, 0.79257, 0.956871, 0.293394, 0.883664, 0.692588, 0.959669, 0.515316, 0.103004, 0.526106, 0.937301, 0.755371, 0.363085, 0.949461, 0.64215, 0.955915, 0.449413, 0.499907, 0.581976, 0.937035, 0.168617, 0.98217, 0.795014, 0.418159, 0.589456, 0.71694, 0.618336, 0.917788, 0.795414, 0.148801, 0.93661, 0.452789, 0.460439, 0.39141, 0.1485, 0.569739, 0.969689, 0.286177, 0.288925, 0.121401, 0.689391, 0.800055, 0.58403, 0.0913524, 0.721822, 0.820777, 0.627701, 0.848366, 0.430608, 0.568319, 0.24714, 0.456631, 0.474619, 0.182914, 0.16318, 0.969156, 0.0715661, 0.357084, 0.489423, 0.441219, 0.641865, 0.0972819, 0.281715, 0.892366, 0.666108, 0.407355, 0.793999, 0.767004, 0.849207, 0.594093, 0.591909, 0.185094, 0.481622, 0.130339, 0.207815, 0.633715, 0.0950679, 0.304213, 0.777771, 0.566875, 0.650894, 0.908, 0.715623, 0.453709, 0.494873, 0.164986, 0.136478, 0.844892, 0.351003, 0.529236, 0.357621, 0.540397, 0.501937, 0.783928, 0.325222, 0.21133, 0.0124853, 0.0514517, 0.123201, 0.483255, 0.897414, 0.827372, 0.383265, 0.749082, 0.144005, 0.832858, 0.419486, 0.812362, 0.354209, 0.750605, 0.725897, 0.311671, 0.898322, 0.139518, 0.375711, 0.648342, 0.938626, 0.503861, 0.388468, 0.340517, 0.443478, 0.938442, 0.586266, 0.274055, 0.0277146, 0.27798, 0.134791, 0.323235, 0.460063, 0.206483, 0.990425, 0.285144, 0.0296279, 0.887272, 0.677299, 0.189501, 0.50649, 0.362349, 0.57537, 0.493371, 0.749639, 0.984641, 0.545836, 0.75374, 0.841115, 0.961771, 0.377901, 0.079646, 0.36752, 0.70832, 0.41948, 0.616419, 0.619567, 0.797896, 0.971316, 0.992549, 0.977303, 0.233725, 0.0465331, 0.443758, 0.68886, 0.809364, 0.720206, 0.473988, 0.0438189, 0.394437, 0.843318, 0.673354, 0.247272, 0.716884, 0.465691, 0.67913, 0.103581, 0.247044, 0.776397, 0.713336, 0.0857064, 0.733572, 0.00824115, 0.973825, 0.0054631, 0.690264, 0.131226, 0.202669, 0.419127, 0.0808313, 0.90791, 0.00845407, 0.553777, 0.333988, 0.10771, 0.864626, 0.900613, 0.358792, 0.213129, 0.9265, 0.916642, 0.305348, 0.0714905, 0.448046, 0.0165338, 0.781888, 0.657568, 0.380822, 0.285861, 0.747746, 0.336165, 0.122518, 0.109034, 0.196583, 0.53702, 0.332354, 0.675378, 0.457182, 0.586038, 0.0790538, 0.39616, 0.451382, 0.212597, 0.0578382, 0.394242, 0.250448, 0.192612, 0.987726, 0.149995, 0.48443, 0.22735, 0.0562007, 0.397844, 0.571009, 0.0596158, 0.533837, 0.772239, 0.37539, 0.636339, 0.084022, 0.910418, 0.385733, 0.842044, 0.358594, 0.927797, 0.0216316, 0.834033, 0.151185, 0.65671, 0.264563, 0.683936, 0.562267, 0.240411, 0.860032, 0.0446961, 0.936807, 0.477205, 0.860825, 0.2123, 0.284143, 0.0455541, 0.694883, 0.0617012, 0.575504, 0.269704, 0.346369, 0.339188, 0.480336, 0.985856, 0.0546475, 0.626681, 0.361976, 0.626888, 0.055417, 0.82409, 0.710263, 0.128441, 0.595352, 0.22604, 0.43445, 0.837705, 0.104783, 0.0511415, 0.190961, 0.596936, 0.053452, 0.304174, 0.57171, 0.353215, 0.010488, 0.246816, 0.0932122, 0.612738, 0.501998, 0.586356, 0.396924, 0.0936036, 0.924441, 0.505925, 0.262601, 0.32949, 0.00111347, 0.46364, 0.507593, 0.121674, 0.538844, 0.855018, 0.41841, 0.103038, 0.549434, 0.966196, 0.951207, 0.0533569, 0.445739, 0.378899, 0.160229, 0.638584, 0.873784, 0.57244, 0.935941, 0.258489, 0.0491446, 0.467041, 0.898167, 0.338616, 0.500514, 0.301561, 0.863336, 0.848966, 0.390039, 0.28318, 0.410614, 0.256398, 0.105856, 0.0358722, 0.961555, 0.341872, 0.891291, 0.659691, 0.798237, 0.568368, 0.181049, 0.953716, 0.66974, 0.321753, 0.730963, 0.800236, 0.615347, 0.926229, 0.771897, 0.718593, 0.760681, 0.967301, 0.773375, 0.860493, 0.443775, 0.436871, 0.244916, 0.970733, 0.0458495, 0.577526, 0.279178, 0.934203, 0.268333, 0.192285, 0.210131, 0.710649, 0.353909, 0.836996, 0.777894, 0.230633, 0.228066, 0.708844, 0.474627, 0.302571, 0.701639, 0.0487951, 0.379025, 0.836639, 0.684808, 0.994508, 0.926402, 0.300223, 0.53037, 0.905855, 0.0893086, 0.714568, 0.901778, 0.666019, 0.266441, 0.730149, 0.393682, 0.430029, 0.208461, 0.794102, 0.79503, 0.586481, 0.903543, 0.241359, 0.456652, 0.00214802, 0.312227, 0.582326, 0.615416, 0.0558838, 0.923502, 0.706949, 0.559163, 0.948034, 0.328065, 0.951462, 0.868804, 0.147185, 0.91278, 0.779487, 0.816029, 0.639607, 0.305077, 0.918527, 0.197986, 0.294465, 0.892737, 0.539866, 0.420432, 0.122988, 0.835, 0.898758, 0.386068, 0.101431, 0.589552, 0.489902, 0.26309, 0.533791, 0.356569, 0.0118044, 0.953867, 0.26857, 0.186294, 0.0452774, 0.421917, 0.493784, 0.414785, 0.721879, 0.912289, 0.658226, 0.844909, 0.296203, 0.168926, 0.240677, 0.680888, 0.341393, 0.440625, 0.934391, 0.269084, 0.382079, 0.353045, 0.892814, 0.46388, 0.136527, 0.666674, 0.149175, 0.878254, 0.525858, 0.803983, 0.407269, 0.369408, 0.37673, 0.98668, 0.700055, 0.158113, 0.597962, 0.632887, 0.709538, 0.950499, 0.429259, 0.657987, 0.0603, 0.78455, 0.817818, 0.670467, 0.45188, 0.533857, 0.398086, 0.237219, 0.748755, 0.657383, 0.536506, 0.399018, 0.87733, 0.342198, 0.758642, 0.509341, 0.293924, 0.060145, 0.504314, 0.568298, 0.178665, 0.953087, 0.42456, 0.666576, 0.569727, 0.721221, 0.824107, 0.796511, 0.626571, 0.255025, 0.225761, 0.59357, 0.690361, 0.686626, 0.746304, 0.184284, 0.648282, 0.819329, 0.952076, 0.448158, 0.0332633, 0.852651, 0.531981, 0.896184, 0.737609, 0.560398, 0.353053, 0.307972, 0.485161, 0.300442, 0.942142, 0.73065, 0.979565, 0.927786, 0.118953, 0.528236, 0.659064, 0.284041, 0.624441, 0.229304, 0.73557, 0.880571, 0.339063, 0.708249, 0.826763, 0.210482, 0.136053, 0.0496121, 0.466992, 0.822229, 0.191336, 0.304231, 0.620374, 0.0859391, 0.223158, 0.2694, 0.193214, 0.261619, 0.014816, 0.17563, 0.554757, 0.674788, 0.741212, 0.766272, 0.914706, 0.615055, 0.160788, 0.637888, 0.430139, 0.464245, 0.380538, 0.892205, 0.335936, 0.921497, 0.846967, 0.00290858, 0.91668, 0.307019, 0.489716, 0.890401, 0.680415, 0.846589, 0.337138, 0.486051, 0.356143, 0.666638, 0.146973, 0.760682, 0.262908, 0.0927556, 0.390415, 0.960416, 0.195444, 0.515854, 0.310233, 0.52904, 0.765056, 0.512377, 0.989755, 0.487331, 0.367056, 0.0187709, 0.541553, 0.373764, 0.932688, 0.0389721, 0.484621, 0.199486, 0.400526, 0.0814984, 0.180155, 0.911501, 0.694136, 0.34408, 0.78081, 0.368966, 0.398434, 0.37802, 0.945652, 0.911507, 0.655942, 0.36852, 0.44883, 0.583638, 0.982306, 0.17161, 0.94737, 0.51886, 0.0805748, 0.981497, 0.46145, 0.0340455, 0.632266, 0.685372, 0.67774, 0.694415, 0.949281, 0.0680856, 0.513159, 0.0697832, 0.360516, 0.970561, 0.320675, 0.95767, 0.0560639, 0.139137, 0.886109, 0.880892, 0.968041, 0.616826, 0.553778, 0.379093, 0.538205, 0.513793, 0.958085, 0.959205, 0.907414, 0.932649, 0.22889, 0.98758, 0.0628116, 0.955119, 0.271644, 0.801689, 0.011347, 0.646311, 0.745771, 0.47517, 0.600875, 0.294792, 0.764504, 0.554181, 0.472776, 0.0642935, 0.606999, 0.775537, 0.408396, 0.281776, 0.340066, 0.0262481, 0.565226, 0.611996, 0.969395, 0.9418, 0.719319, 0.796495, 0.823326, 0.225035, 0.739689, 0.451779, 0.0521533, 0.449227, 0.959449, 0.961225, 0.660242, 0.920841, 0.416676, 0.797053, 0.655847, 0.608478, 0.900489, 0.165995, 0.738849, 0.27131, 0.801538, 0.94637, 0.370646, 0.679767, 0.494438, 0.378872, 0.35697, 0.359024, 0.303692, 0.275711, 0.323702, 0.512316, 0.371951, 0.533268, 0.685444, 0.40831, 0.771926, 0.748736, 0.540789, 0.642759, 0.538328, 0.647328, 0.291415, 0.874469, 0.834747, 0.857975, 0.968321, 0.990562, 0.263434, 0.639832, 0.441566, 0.0726332, 0.340444, 0.752048, 0.349807, 0.730568, 0.789266, 0.811853, 0.513046, 0.899613, 0.516308, 0.497525, 0.234687, 0.237778, 0.165675, 0.762917, 0.451826, 0.615823, 0.662717, 0.457509, 0.158498, 0.577316, 0.35561, 0.330589, 0.700955, 0.841483, 0.163454, 0.193675, 0.662746, 0.991028, 0.736513, 0.560741, 0.216299, 0.708886, 0.122411, 0.901935, 0.113462, 0.504369, 0.0346855, 0.435608, 0.862252, 0.75051, 0.151968, 0.911389, 0.652589, 0.267185, 0.157826, 0.190982, 0.335694, 0.688365, 0.911853, 0.706414, 0.704084, 0.973135, 0.838225, 0.104534, 0.0721, 0.0749146, 0.326383, 0.87291, 0.402227, 0.401096, 0.412568, 0.0246242, 0.525776, 0.188117, 0.00442395, 0.494337, 0.519534, 0.110586, 0.173421, 0.20723, 0.318541, 0.73166, 0.681771, 0.870382, 0.710488, 0.206312, 0.15444, 0.147928, 0.97648, 0.596264, 0.363473, 0.606857, 0.88176, 0.745383, 0.923696, 0.965812, 0.753421, 0.641137, 0.611174, 0.459111, 0.66334, 0.954044, 0.489173, 0.948543, 0.150962, 0.0701563, 0.8108, 0.350131, 0.713544, 0.498818, 0.200341, 0.186501, 0.64419, 0.586686, 0.745698, 0.686946, 0.673159, 0.556574, 0.410714, 0.738815, 0.696961, 0.858431, 0.10813, 0.81019, 0.787369, 0.737881, 0.260082, 0.542611, 0.309025, 0.235043, 0.964064, 0.234988, 0.87565, 0.675234, 0.493421, 0.833309, 0.910046, 0.856748, 0.339225, 0.537402, 0.105541, 0.614716, 0.374233, 0.66333, 0.098417, 0.627339, 0.415579, 0.821638, 0.705974, 0.463912, 0.0648303, 0.954619, 0.350892, 0.126585, 0.970619, 0.883343, 0.712869, 0.203829, 0.0761472, 0.932658, 0.868495, 0.268359, 0.406852, 0.936335, 0.885286, 0.426577, 0.814333, 0.487315, 0.266312, 0.874065, 0.113086, 0.0879669, 0.358795, 0.731274, 0.780572, 0.762504, 0.396417, 0.0754197, 0.0542, 0.257103, 0.856874, 0.369736, 0.27032, 0.99483, 0.778053, 0.212175, 0.180389, 0.341515, 0.978491, 0.341037, 0.70969, 0.0857076, 0.897486, 0.113346, 0.940731, 0.503259, 0.766457, 0.272352, 0.251583, 0.19934, 0.747792, 0.244096, 0.355233, 0.747844, 0.238513, 0.74016, 0.735271, 0.488837, 0.937653, 0.974487, 0.888443, 0.251202, 0.571489, 0.864624, 0.638694, 0.971124, 0.943243, 0.702042, 0.779263, 0.257991, 0.234725, 0.065335, 0.924497, 0.227651, 0.597242, 0.893316, 0.720336, 0.697012, 0.733315, 0.628808, 0.801178, 0.168906, 0.414159, 0.26456, 0.418753, 0.0699098, 0.657538, 0.407016, 0.0388166, 0.0684677, 0.0342391, 0.790733, 0.9259, 0.796201, 0.705881, 0.0255235, 0.719836, 0.33844, 0.186822, 0.566064, 0.675268, 0.0149093, 0.606167, 0.350088, 0.10668, 0.446297, 0.0843774, 0.790409, 0.717251, 0.559988, 0.257403, 0.425087, 0.376849, 0.584526, 0.476961, 0.776508, 0.800178, 0.680013, 0.412751, 0.202657, 0.826877, 0.486285, 0.00149935, 0.390268, 0.758384, 0.905976, 0.768315, 0.253647, 0.983081, 0.00663012, 0.202434, 0.178113, 0.116399, 0.039723, 0.6561, 0.391824, 0.297508, 0.588924, 0.923464, 0.00537664, 0.836237, 0.964078, 0.828544, 0.150285, 0.963379, 0.966769, 0.570818, 0.181068, 0.17895, 0.909913, 0.865376, 0.40041, 0.849182, 0.743446, 0.970249, 0.00954235, 0.73294, 0.406437, 0.731526, 0.470572, 0.905462, 0.153149, 0.069615, 0.0179346, 0.196463, 0.115977, 0.577559, 0.370076, 0.357853, 0.229909, 0.43031, 0.865533, 0.0289437, 0.44876, 0.538148, 0.542429, 0.678139, 0.305213, 0.265633, 0.203731, 0.731977, 0.504254, 0.791669, 0.248444, 0.00427831, 0.728808, 0.422607, 0.0443968, 0.412837, 0.77262, 0.838393, 0.724063, 0.57939, 0.650184, 0.952033, 0.531606, 0.510049, 0.280466, 0.661898, 0.298809, 0.937598, 0.908492, 0.538353, 0.188075, 0.716075, 0.303379, 0.361017, 0.462339, 0.855308, 0.064749, 0.741943, 0.66316, 0.861273, 0.665325, 0.922275, 0.198738, 0.648602, 0.816523, 0.602193, 0.65667, 0.462498, 0.293423, 0.266133, 0.597445, 0.830623, 0.540784, 0.98207, 0.0532956, 0.676609, 0.167312, 0.646478, 0.841116, 0.891341, 0.48221, 0.0474615, 0.958032, 0.472501, 0.0507142, 0.321953, 0.241055, 0.888323, 0.736603, 0.796845, 0.785677, 0.707954, 0.0422941, 0.491214, 0.695153, 0.431698, 0.0973597, 0.69562, 0.231038, 0.160969, 0.472323, 0.296825, 0.0143042, 0.647385, 0.472272, 0.0449773, 0.0375269, 0.340891, 0.182227, 0.0961762, 0.0278703, 0.0500822, 0.320766, 0.612475, 0.765363, 0.129653, 0.993632, 0.597538, 0.671239, 0.898884, 0.136976, 0.654171, 0.877248, 0.430103, 0.510388, 0.00281405, 0.579553, 0.675898, 0.283895, 0.547819, 0.797886, 0.956798, 0.916383, 0.0430432, 0.773556, 0.682857, 0.00983586, 0.959418, 0.380881, 0.413192, 0.909154, 0.671533, 0.465914, 0.158052, 0.534799, 0.0888263, 0.0101368, 0.928351, 0.0414402, 0.319122, 0.133462, 0.961109, 0.741489, 0.785996, 0.169425, 0.559779, 0.944418, 0.782808, 0.852151, 0.00149579, 0.205381, 0.446745, 0.842947, 0.29455, 0.787455, 0.0536224, 0.193258, 0.366233, 0.11789, 0.639724, 0.541375, 0.786406, 0.526812, 0.843084, 0.0228975, 0.0407159, 0.917178, 0.247129, 0.399214, 0.601143, 0.47783, 0.0326244, 0.0280982, 0.994886, 0.039664, 0.757133, 0.83938, 0.120956, 0.174113, 0.313512, 0.636826, 0.576719, 0.953172, 0.195074, 0.577762, 0.476535, 0.569366, 0.350372, 0.156191, 0.411399, 0.153506, 0.935683, 0.473002, 0.440206, 0.119016, 0.645946, 0.0209994, 0.504743, 0.675811, 0.379955, 0.335104, 0.805845, 0.115485, 0.324205, 0.347445, 0.398456, 0.0608475, 0.695133, 0.963092, 0.852476, 0.728553, 0.634305, 0.808109, 0.705899, 0.855941, 0.224381, 0.875126, 0.257128, 0.00800127, 0.129995, 0.999965, 0.713339, 0.110972, 0.237753, 0.0847136, 0.68729, 0.162604, 0.859784, 0.742029, 0.847837, 0.313516, 0.136476, 0.878395, 0.938595, 0.599178, 0.951067, 0.21406, 0.561472, 0.739648, 0.850327, 0.0271676, 0.0405028, 0.641079, 0.260596, 0.254249, 0.702193, 0.960725, 0.38681, 0.846363, 0.517851, 0.90948, 0.107653, 0.779072, 0.931403, 0.105728, 0.178626, 0.463024, 0.145339, 0.216668, 0.818549, 0.783054, 0.80364, 0.11403, 0.871478, 0.613073, 0.246428, 0.69674, 0.646755, 0.74023, 0.361362, 0.957374, 0.322948, 0.592186, 0.687559, 0.767892, 0.918306, 0.85567, 0.374087, 0.0485491, 0.415257, 0.571648, 0.956083, 0.543928, 0.366023, 0.228243, 0.439239, 0.732554, 0.162414, 0.893113, 0.442318, 0.873651, 0.486249, 0.975333, 0.021467, 0.0117069, 0.45745, 0.760263, 0.397957, 0.40605, 0.977974, 0.268662, 0.991085, 0.862039, 0.851756, 0.134341, 0.868184, 0.347163, 0.695905, 0.620275, 0.268942, 0.776129, 0.805826, 0.020793, 0.922417, 0.703328, 0.66502, 0.651994, 0.442992, 0.199149, 0.163923, 0.516534, 0.830496, 0.436574, 0.350105, 0.624314, 0.385537, 0.00687786, 0.0963305, 0.162069, 0.431528, 0.662414, 0.420444, 0.27102, 0.515764, 0.384239, 0.747993, 0.0230769, 0.301118, 0.183377, 0.717655, 0.425752, 0.605783, 0.259882, 0.543188, 0.952181, 0.853013, 0.382584, 0.279875, 0.337507, 0.790354, 0.140324, 0.00178103, 0.89052, 0.981606, 0.967049, 0.469, 0.270805, 0.892961, 0.441842, 0.258373, 0.2039, 0.750057, 0.132035, 0.970401, 0.245466, 0.277798, 0.621281, 0.256343, 0.289629, 0.214118, 0.733642, 0.226264, 0.223815, 0.933411, 0.0981471, 0.244844, 0.705185, 0.612502, 0.459645, 0.231061, 0.180095, 0.683949, 0.62148, 0.273106, 0.420026, 0.138759, 0.0501066, 0.21252, 0.694149, 0.119514, 0.974127, 0.571826, 0.0811982, 0.60171, 0.841024, 0.616945, 0.24451, 0.320739, 0.80335, 0.230985, 0.881866, 0.205061, 0.070866, 0.409333, 0.862503, 0.28329, 0.110689, 0.574015, 0.772147, 0.572682, 0.640735, 0.757784, 0.628274, 0.694181, 0.417125, 0.029089, 0.0211161, 0.759778, 0.699903, 0.505733, 0.346615, 0.165152, 0.966993, 0.974606, 0.358874, 0.21057, 0.596211, 0.428221, 0.695422, 0.534236, 0.912104, 0.354949, 0.082759, 0.381062, 0.0662933, 0.515534, 0.170889, 0.215113, 0.279478, 0.507817, 0.842307, 0.341165, 0.0252222, 0.700538, 0.694553, 0.191996, 0.166709, 0.712132, 0.0224051, 0.77722, 0.930428, 0.373895, 0.895222, 0.994022, 0.657036, 0.172438, 0.784169, 0.419275, 0.791009, 0.777573, 0.39067, 0.624302, 0.359934, 0.612963, 0.706313, 0.111959, 0.0390335, 0.970666, 0.666958, 0.471559, 0.484552, 0.314426, 0.472257, 0.904414, 0.944772, 0.412989, 0.976617, 0.572601, 0.862967, 0.313269, 0.229798, 0.23277, 0.83076, 0.588171, 0.912323, 0.793586, 0.710813, 0.16191, 0.587905, 0.00747314, 0.634314, 0.98659, 0.0590751, 0.294304, 0.120698, 0.406781, 0.901869, 0.917492, 0.781419, 0.644313, 0.895011, 0.105034, 0.231572, 0.831534, 0.409516, 0.0972754, 0.128893, 0.0207831, 0.36514, 0.680242, 0.995298, 0.813521, 0.17838, 0.666933, 0.657365, 0.734068, 0.745543, 0.176226, 0.246687, 0.178081, 0.389388, 0.37581, 0.616925, 0.777974, 0.235395, 0.639782, 0.372604, 0.629282, 0.549173, 0.837367, 0.152404, 0.530772, 0.00985781, 0.240783, 0.159127, 0.586533, 0.742018, 0.102374, 0.570525, 0.414934, 0.944272, 0.3101, 0.7438, 0.424261, 0.982069, 0.159292, 0.594847, 0.481841, 0.350749, 0.940426, 0.593612, 0.749338, 0.238546, 0.779049, 0.494601, 0.111275, 0.494378, 0.428584, 0.296693, 0.53248, 0.153693, 0.088863, 0.529992, 0.136403, 0.222496, 0.0779707, 0.130267, 0.58797, 0.170469, 0.688227, 0.0272089, 0.476707, 0.51153, 0.197059, 0.205029, 0.107399, 0.631572, 0.218695, 0.145721, 0.562572, 0.639606, 0.919694, 0.277057, 0.473889, 0.553769, 0.806082, 0.533948, 0.982034, 0.575968, 0.44451, 0.0864928, 0.141039, 0.698115, 0.357452, 0.810979, 0.959997, 0.111074, 0.190409, 0.353476, 0.890042, 0.984756, 0.402445, 0.173425, 0.456301, 0.305938, 0.570571, 0.502218, 0.790741, 0.174554, 0.158844, 0.873969, 0.224072, 0.512449, 0.960338, 0.19284, 0.282728, 0.200631, 0.987078, 0.17059, 0.899308, 0.738105, 0.770904, 0.685526, 0.567407, 0.929639, 0.889502, 0.986333, 0.507396, 0.454575, 0.946174, 0.000128393, 0.97613, 0.247681, 0.223189, 0.130846, 0.503126, 0.00977749, 0.257345, 0.156397, 0.262276, 0.601337, 0.111028, 0.219943, 0.0214403, 0.879047, 0.974838, 0.526578, 0.995052, 0.573951, 0.738533, 0.745115, 0.576467, 0.478822, 0.345538, 0.952433, 0.429159, 0.965597, 0.0971296, 0.876261, 0.540817, 0.953571, 0.606756, 0.648853, 0.296477, 0.316441, 0.763382, 0.628519, 0.719841, 0.317647, 0.224754, 0.905354, 0.133716, 0.511366, 0.684844, 0.0750072, 0.60444, 0.197361, 0.058035, 0.362226, 0.92726, 0.435769, 0.405334, 0.15729, 0.747457, 0.11012, 0.923759, 0.631698, 0.529456, 0.480963, 0.997847, 0.0915321, 0.552318, 0.942094, 0.155205, 0.321001, 0.88759, 0.614815, 0.452906, 0.980156, 0.988733, 0.530218, 0.830658, 0.786017, 0.772253, 0.777924, 0.55863, 0.282608, 0.189867, 0.112464, 0.449778, 0.186788, 0.0719575, 0.451825, 0.634827, 0.408563, 0.28201, 0.590164, 0.624124, 0.290393, 0.411561, 0.505042, 0.226456, 0.100228, 0.155996, 0.200603, 0.626228, 0.824954, 0.629744, 0.544411, 0.311014, 0.862041, 0.427736, 0.539546, 0.845024, 0.316539, 0.93555, 0.0783485, 0.158411, 0.881053, 0.213352, 0.729507, 0.202396, 0.510654, 0.815964, 0.247689, 0.303486, 0.914875, 0.973396, 0.322303, 0.565204, 0.712887, 0.404316, 0.574151, 0.810808, 0.585602, 0.953374, 0.463926, 0.438665, 0.886745, 0.957678, 0.43447, 0.0935005, 0.0438857, 0.193685, 0.788244, 0.0893059, 0.969534, 0.0802372, 0.059717, 0.807384, 0.822331, 0.633693, 0.833317, 0.387917, 0.505188, 0.461769, 0.213152, 0.0342209, 0.325889, 0.138084, 0.559839, 0.70969, 0.366358, 0.295135, 0.398068, 0.192014, 0.166762, 0.143056, 0.364799, 0.466608, 0.615921, 0.80872, 0.106005, 0.105853, 0.0739101, 0.880153, 0.411685, 0.31732, 0.959769, 0.154294, 0.756141, 0.63009, 0.141698, 0.49859, 0.42693, 0.739907, 0.0242807, 0.116376, 0.60273, 0.920794, 0.312465, 0.193958, 0.0546883, 0.277781, 0.954336, 0.12488, 0.454498, 0.989237, 0.179482, 0.0193954, 0.520011, 0.5112, 0.420542, 0.511111, 0.527766, 0.746887, 0.405658, 0.823256, 0.795496, 0.878712, 0.929097, 0.31443, 0.0562202, 0.542419, 0.365227, 0.621199, 0.390671, 0.842298, 0.0553187, 0.183106, 0.752693, 0.783371, 0.0239456, 0.401386, 0.6085, 0.487029, 0.414038, 0.106318, 0.587626, 0.600169, 0.863234, 0.564907, 0.878195, 0.858119, 0.800449, 0.304501, 0.349082, 0.85689, 0.240666, 0.914041, 0.920501, 0.0946677, 0.296726, 0.647105, 0.157593, 0.738604, 0.777838, 0.822456, 0.121005, 0.0835438, 0.00650387, 0.190033, 0.970416, 0.710227, 0.497362, 0.146237, 0.169611, 0.50219, 0.195827, 0.0658138, 0.917113, 0.693886, 0.617656, 0.668504, 0.0501968, 0.77622, 0.278322, 0.411264, 0.38033, 0.291897, 0.584225, 0.511621, 0.636816, 0.00325911, 0.552845, 0.487012, 0.295391, 0.351319, 0.330664, 0.504898, 0.268667, 0.273362, 0.158858, 0.342329, 0.346874, 0.586149, 0.415282, 0.367159, 0.281282, 0.394212, 0.649718, 0.701435, 0.575093, 0.987478, 0.543518, 0.60293, 0.585194, 0.0846587, 0.993948, 0.519088, 0.392095, 0.573498, 0.493422, 0.63178, 0.383882, 0.198191, 0.894283, 0.528548, 0.599863, 0.636292, 0.10397, 0.6732, 0.364287, 0.809858, 0.560692, 0.994631, 0.4384, 0.842604, 0.652818, 0.776153, 0.339616, 0.516514, 0.143501, 0.257708, 0.760231, 0.926892, 0.185963, 0.565086, 0.140491, 0.0228559, 0.00735798, 0.813714, 0.425085, 0.911598, 0.426543, 0.151993, 0.793393, 0.628017, 0.434025, 0.254041, 0.546806, 0.312753, 0.530425, 0.470881, 0.956417, 0.829595, 0.570299, 0.49159, 0.505353, 0.179936, 0.617646, 0.0823811, 0.132929, 0.5014, 0.290813, 0.957392, 0.797293, 0.304657, 0.727281, 0.333453, 0.641057, 0.111023, 0.0091141, 0.17541, 0.286548, 0.0893028, 0.00954593, 0.0441531, 0.880874, 0.831576, 0.216571, 0.339205, 0.411432, 0.815293, 0.69624, 0.636377, 0.0785996, 0.0102724, 0.591968, 0.496091, 0.847951, 0.609832, 0.0627377, 0.249463, 0.612134, 0.544395, 0.463748, 0.40182, 0.61142, 0.271422, 0.314641, 0.831937, 0.175664, 0.945095, 0.978263, 0.313427, 0.922018, 0.280094, 0.539594, 0.187436, 0.332567, 0.386781, 0.809014, 0.053213, 0.576812, 0.254369, 0.862251, 0.381249, 0.767665, 0.176679, 0.349803, 0.80332, 0.718744, 0.178139, 0.555278, 0.384449, 0.815755, 0.933792, 0.61947, 0.957829, 0.233526, 0.382213, 0.304502, 0.948054, 0.485502, 0.580438, 0.696949, 0.0937883, 0.728815, 0.676401, 0.305457, 0.157044, 0.136247, 0.626979, 0.563179, 0.188353, 0.454238, 0.288522, 0.755235, 0.866475, 0.979532, 0.91483, 0.601608, 0.74356, 0.904399, 0.814325, 0.334625, 0.440071, 0.349416, 0.156377, 0.22615, 0.208253, 0.275315, 0.94069, 0.297708, 0.801272, 0.236748, 0.785785, 0.139803, 0.163611, 0.788068, 0.120665, 0.10143, 0.964675, 0.807978, 0.375503, 0.719976, 0.0627234, 0.744448, 0.148521, 0.670968, 0.564889, 0.967662, 0.138617, 0.586044, 0.580505, 0.482541, 0.555217, 0.0987674, 0.111251, 0.162678, 0.231221, 0.0836387, 0.76811, 0.286222, 0.40711, 0.980047, 0.764288, 0.657563, 0.763509, 0.96356, 0.606738, 0.563903, 0.0720881, 0.306462, 0.556852, 0.976887, 0.995086, 0.70922, 0.112916, 0.321314, 0.944721, 0.212512, 0.796404, 0.935032, 0.656479, 0.265373, 0.671016, 0.0253296, 0.132415, 0.437462, 0.399299, 0.142063, 0.965011, 0.552034, 0.59333, 0.12919, 0.567946, 0.15816, 0.947786, 0.462562, 0.496888, 0.734393, 0.598166, 0.00171491, 0.898537, 0.755027, 0.291732, 0.862137, 0.519213, 0.493989, 0.776933, 0.0761662, 0.613793, 0.493426, 0.159023, 0.235848, 0.589783, 0.332266, 0.849369, 0.651109, 0.127742, 0.29675, 0.273595, 0.687917, 0.442929, 0.931522, 0.561769, 0.0636195, 0.906886, 0.477365, 0.672938, 0.861644, 0.0969883, 0.0387793, 0.870088, 0.736888, 0.889562, 0.249274, 0.60556, 0.668005, 0.110343, 0.100921, 0.0862836, 0.703271, 0.629034, 0.485964, 0.417137, 0.393032, 0.0188766, 0.728669, 0.29715, 0.0372549, 0.670083, 0.984021, 0.919079, 0.565775, 0.355215, 0.207795, 0.278668, 0.466946, 0.344781, 0.250272, 0.400208, 0.399751, 0.590169, 0.789912, 0.85021, 0.12721, 0.851221, 0.182549, 0.319829, 0.0236746, 0.140023, 0.122837, 0.922413, 0.215876, 0.421003, 0.335499, 0.588046, 0.148991, 0.955343, 0.707926, 0.0355906, 0.383157, 0.115756, 0.781074, 0.624957, 0.852997, 0.819437, 0.688536, 0.540103, 0.00235164, 0.133666, 0.536905, 0.854726, 0.0536206, 0.0376954, 0.102685, 0.680753, 0.451366, 0.918893, 0.476345, 0.0675023, 0.760103, 0.0714962, 0.866499, 0.490291, 0.12542, 0.482161, 0.631525, 0.443632, 0.819449, 0.136307, 0.703275, 0.0271129, 0.47967, 0.38198, 0.592568, 0.934314, 0.716009, 0.832238, 0.786019, 0.555664, 0.00513192, 0.28334, 0.542588, 0.304002, 0.259903, 0.393253, 0.62914, 0.485006, 0.4082, 0.09165, 0.471402, 0.343116, 0.995312, 0.42106, 0.575573, 0.6356, 0.996344, 0.0922824, 0.0503404, 0.861474, 0.971098, 0.531618, 0.278897, 0.694201, 0.739111, 0.417865, 0.347274, 0.0560049, 0.776491, 0.29693, 0.352665, 0.851979, 0.462535, 0.925921, 0.635194, 0.749285, 0.474837, 0.0422809, 0.250704, 0.0542927, 0.960633, 0.504724, 0.71587, 0.512241, 0.579656, 0.274177, 0.59571, 0.0450952, 0.325116, 0.133309, 0.00706547, 0.0769446, 0.133744, 0.727747, 0.636273, 0.515981, 0.3346, 0.969261, 0.751505, 0.0726293, 0.090784, 0.992285, 0.297629, 0.156763, 0.11701, 0.798585, 0.153628, 0.876391, 0.60225, 0.0345976, 0.867972, 0.604693, 0.7169, 0.980062, 0.56994, 0.652737, 0.492232, 0.216526, 0.134844, 0.328194, 0.349324, 0.652535, 0.0983934, 0.267568, 0.476328, 0.940637, 0.618916, 0.645229, 0.564712, 0.361145, 0.719035, 0.7945, 0.35482, 0.832579, 0.395652, 0.961265, 0.444648, 0.569746, 0.842745, 0.608784, 0.506809, 0.333537, 0.789048, 0.228185, 0.0515458, 0.98826, 0.0079848, 0.912117, 0.650609, 0.193825, 0.409119, 0.290398, 0.529025, 0.485192, 0.665535, 0.513694, 0.305503, 0.431641, 0.155585, 0.349066, 0.194616, 0.0609608, 0.7737, 0.365058, 0.947661, 0.113823, 0.485191, 0.270988, 0.340072, 0.599013, 0.0691046, 0.0280304, 0.470698, 0.217867, 0.27945, 0.627204, 0.417909, 0.642555, 0.920911, 0.216709, 0.704156, 0.662401, 0.179275, 0.0822332, 0.169142, 0.382683, 0.0331443, 0.847433, 0.262524, 0.856625, 0.98401, 0.342673, 0.0682913, 0.0991218, 0.597256, 0.713984, 0.0756392, 0.156673, 0.741148, 0.529208, 0.550065, 0.956161, 0.209575, 0.621153, 0.0550376, 0.722213, 0.34307, 0.887517, 0.961557, 0.286025, 0.0360352, 0.963817, 0.57112, 0.792167, 0.874987, 0.936579, 0.458739, 0.221819, 0.0484225, 0.665387, 0.220781, 0.578058, 0.385384, 0.883892, 0.248328, 0.196923, 0.763805, 0.834794, 0.760918, 0.317117, 0.980345, 0.901547, 0.502061, 0.196929, 0.0528333, 0.189622, 0.67401, 0.194932, 0.819875, 0.626089, 0.99374, 0.724266, 0.00707075, 0.0384145, 0.377771, 0.958005, 0.739583, 0.0563767, 0.202651, 0.367577, 0.698997, 0.0820815, 0.882301, 0.616544, 0.0582296, 0.949367, 0.491598, 0.511587, 0.74732, 0.947975, 0.761361, 0.383735, 0.525491, 0.996303, 0.950501, 0.0831566, 0.885541, 0.450013, 0.010177, 0.825059, 0.258228, 0.93181, 0.228712, 0.259199, 0.233653, 0.820137, 0.929304, 0.123893, 0.989354, 0.592356, 0.3745, 0.71626, 0.765341, 0.395999, 0.840106, 0.115831, 0.320817, 0.270444, 0.182095, 0.112124, 0.0783929, 0.499542, 0.30114, 0.596813, 0.468669, 0.889689, 0.478028, 0.543028, 0.272562, 0.595185, 0.667194, 0.60921, 0.29821, 0.26297, 0.66534, 0.865841, 0.12424, 0.908586, 0.934559, 0.156946, 0.96181, 0.342982, 0.0481719, 0.542999, 0.427977, 0.761714, 0.837432, 0.0189111, 0.334859, 0.763214, 0.959384, 0.00574281, 0.445965, 0.535224, 0.635295, 0.197553, 0.636564, 0.539617, 0.666011, 0.234288, 0.355765, 0.053203, 0.709216, 0.0888354, 0.475347, 0.464947, 0.202297, 0.797056, 0.392016, 0.936453, 0.308735, 0.502893, 0.675965, 0.940242, 0.757746, 0.555337, 0.534555, 0.0817575, 0.403504, 0.755465, 0.315231, 0.862395, 0.842581, 0.316213, 0.149369, 0.296974, 0.036262, 0.807712, 0.482911, 0.28549, 0.335535, 0.0817643, 0.636789, 0.404951, 0.469242, 0.308286, 0.25039, 0.237834, 0.586363, 0.133399, 0.886036, 0.776181, 0.465954, 0.0859602, 0.113114, 0.52154, 0.580957, 0.949964, 0.621371, 0.868865, 0.385465, 0.491927, 0.768945, 0.389671, 0.868688, 0.0854205, 0.549694, 0.139308, 0.424174, 0.0325483, 0.802686, 0.508515, 0.697088, 0.603539, 0.728966, 0.937695, 0.288556, 0.686755, 0.56678, 0.577736, 0.9149, 0.145155, 0.809056, 0.479846, 0.792542, 0.235605, 0.289845, 0.339546, 0.948304, 0.374576, 0.312527, 0.0241412, 0.0254658, 0.102198, 0.99887, 0.755931, 0.771815, 0.221894, 0.650446, 0.522338, 0.647594, 0.343701, 0.35394, 0.145472, 0.780817, 0.0928448, 0.920876, 0.0360954, 0.19768, 0.158536, 0.58765, 0.0930221, 0.478469, 0.80627, 0.61792, 0.367957, 0.749602, 0.54134, 0.431034, 0.0941319, 0.153414, 0.435797, 0.411997, 0.393457, 0.408773, 0.253482, 0.136788, 0.479941, 0.698831, 0.440438, 0.267989, 0.324515, 0.345774, 0.647049, 0.474003, 0.432191, 0.668224, 0.314087, 0.903054, 0.920958, 0.76844, 0.13128, 0.392562, 0.620915, 0.252017, 0.880444, 0.556165, 0.011176, 0.899954, 0.0411076, 0.0495049, 0.250841, 0.404541, 0.358772, 0.0987615, 0.718135, 0.475056, 0.344506, 0.933108, 0.610929, 0.0210743, 0.761055, 0.583257, 0.285506, 0.963131, 0.714188, 0.650956, 0.917127, 0.718439, 0.487927, 0.494666, 0.502551, 0.319778, 0.818145, 0.0373288, 0.500038, 0.643328, 0.343029, 0.0312736, 0.439563, 0.516751, 0.418804, 0.404099, 0.799441, 0.383659, 0.497928, 0.0884743, 0.834432, 0.448695, 0.174522, 0.969026, 0.0567128, 0.811041, 0.436852, 0.669133, 0.956817, 0.934233, 0.999377, 0.46376, 0.904531, 0.990048, 0.844536, 0.344221, 0.327283, 0.672179, 0.447807, 0.629699, 0.468759, 0.552807, 0.439826, 0.27493, 0.71894, 0.434387, 0.745172, 0.958573, 0.281854, 0.730554, 0.601443, 0.591778, 0.784222, 0.380494, 0.776479, 0.745254, 0.282708, 0.171382, 0.0901646, 0.580693, 0.0139402, 0.225617, 0.351103, 0.132169, 0.420177, 0.346412, 0.169704, 0.348524, 0.747227, 0.216859, 0.496962, 0.795404, 0.645949, 0.362256, 0.941042, 0.965377, 0.857509, 0.0768885, 0.00932251, 0.683445, 0.511655, 0.00601335, 0.120911, 0.161513, 0.211679, 0.787344, 0.874349, 0.717985, 0.502714, 0.166127, 0.265323, 0.702488, 0.680595, 0.834315, 0.033373, 0.945105, 0.688605, 0.733733, 0.799686, 0.110531, 0.957362, 0.872062, 0.537134, 0.594211, 0.543484, 0.559464, 0.664876, 0.121834, 0.208257, 0.623329, 0.231384, 0.351651, 0.28177, 0.677623, 0.159049, 0.0789706, 0.649168, 0.708874, 0.429019, 0.806167, 0.322563, 0.856619, 0.0595638, 0.955667, 0.877296, 0.63245, 0.554974, 0.997797, 0.655268, 0.0881875, 0.774129, 0.228393, 0.280366, 0.840425, 0.288769, 0.137836, 0.822214, 0.812979, 0.268801, 0.431051, 0.743164, 0.449795, 0.0280965, 0.813509, 0.753704, 0.752745, 0.40483, 0.209138, 0.136398, 0.547444, 0.952821, 0.145824, 0.921534, 0.675708, 0.0968346, 0.429042, 0.90069, 0.880692, 0.660973, 0.762841, 0.043475, 0.548615, 0.351768, 0.487706, 0.837525, 0.858942, 0.426968, 0.693482, 0.834067, 0.178751, 0.916725, 0.312368, 0.952488, 0.336711, 0.571418, 0.886898, 0.640941, 0.1806, 0.55419, 0.590488, 0.83163, 0.14007, 0.403142, 0.887247, 0.342949, 0.88435, 0.171615, 0.209037, 0.268949, 0.0488915, 0.431707, 0.229817, 0.641149, 0.00553621, 0.0360574, 0.35453, 0.904074, 0.495435, 0.780787, 0.196379, 0.810491, 0.837112, 0.189245, 0.359259, 0.275542, 0.299573, 0.220301, 0.866422, 0.507245, 0.487767, 0.243448, 0.698851, 0.588256, 0.0844826, 0.907929, 0.819771, 0.342594, 0.215029, 0.41934, 0.00178745, 0.273484, 0.364893, 0.236331, 0.574799, 0.557957, 0.481694, 0.426242, 0.406354, 0.286845, 0.293724, 0.178113, 0.00349964, 0.932298, 0.545826, 0.777305, 0.10292, 0.167152, 0.30941, 0.785231, 0.0760898, 0.625054, 0.589052, 0.847903, 0.111181, 0.177771, 0.500524, 0.842363, 0.114009, 0.612185, 0.878975, 0.955185, 0.399446, 0.0946839, 0.540699, 0.915803, 0.442323, 0.56911, 0.55533, 0.401268, 0.284556, 0.101289, 0.983081, 0.835117, 0.606981, 0.372049, 0.496544, 0.226489, 0.279214, 0.914644, 0.937939, 0.281129, 0.389272, 0.968187, 0.635215, 0.487692, 0.909698, 0.844426, 0.1477, 0.226635, 0.521511, 0.370946, 0.150209, 0.729102, 0.367415, 0.761533, 0.615057, 0.692156, 0.667996, 0.0963341, 0.914547, 0.691541, 0.151564, 0.945305, 0.0662834, 0.366292, 0.551977, 0.291646, 0.679143, 0.851856, 0.791583, 0.503778, 0.023168, 0.412157, 0.247637, 0.912395, 0.00945087, 0.758841, 0.376842, 0.467647, 0.404122, 0.276024, 0.202679, 0.829769, 0.0542804, 0.265896, 0.631932, 0.761238, 0.277217, 0.316484, 0.497037, 0.117953, 0.268658, 0.438275, 0.908489, 0.463122, 0.633023, 0.00544212, 0.53295, 0.127467, 0.784857, 0.871983, 0.454343, 0.13975, 0.279066, 0.334803, 0.446106, 0.897049, 0.351662, 0.51381, 0.321255, 0.350486, 0.101246, 0.54997, 0.183608, 0.305128, 0.233735, 0.822038, 0.771658, 0.200972, 0.40196, 0.562487, 0.108847, 0.718786, 0.0755256, 0.961698, 0.585156, 0.970544, 0.361813, 0.987947, 0.344862, 0.964177, 0.014727, 0.0226188, 0.807721, 0.137678, 0.148848, 0.662738, 0.741941, 0.190003, 0.997506, 0.717802, 0.00164524, 0.200278, 0.512166, 0.930637, 0.228209, 0.220154, 0.441296, 0.425959, 0.693154, 0.421103, 0.106279, 0.521333, 0.347792, 0.023648, 0.67607, 0.418503, 0.139156, 0.650666, 0.168607, 0.539383, 0.793094, 0.177687, 0.295956, 0.926763, 0.126443, 0.478607, 0.199744, 0.936475, 0.710082, 0.088558, 0.223505, 0.853099, 0.271153, 0.934545, 0.604885, 0.523008, 0.430345, 0.632033, 0.676408, 0.492389, 0.494581, 0.315705, 0.407379, 0.417697, 0.675943, 0.168078, 0.382832, 0.379915, 0.269397, 0.761822, 0.981925, 0.146796, 0.799504, 0.381602, 0.705672, 0.422183, 0.210244, 0.0850634, 0.582337, 0.934588, 0.466793, 0.6366, 0.826037, 0.574077, 0.553228, 0.924906, 0.922825, 0.66238, 0.162232, 0.224305, 0.0519152, 0.855643, 0.366552, 0.152796, 0.00665957, 0.915834, 0.826795, 0.978109, 0.69574, 0.190423, 0.850852, 0.192516, 0.0265149, 0.523871, 0.855658, 0.277339, 0.476368, 0.0804248, 0.892567, 0.533843, 0.422648, 0.909866, 0.12791, 0.597845, 0.625991, 0.927966, 0.319578, 0.139636, 0.392265, 0.104043, 0.931437, 0.0622179, 0.616482, 0.184848, 0.301881, 0.633924, 0.848496, 0.011613, 0.558658, 0.742868, 0.176664, 0.498611, 0.744262, 0.0285374, 0.418136, 0.825301, 0.42586, 0.974819, 0.719124, 0.433044, 0.870553, 0.627664, 0.854485, 0.518249, 0.590694, 0.704523, 0.450934, 0.0700942, 0.0874604, 0.337147, 0.492185, 0.8885, 0.621986, 0.168248, 0.837084, 0.944791, 0.719689, 0.927206, 0.844932, 0.989313, 0.724708, 0.653446, 0.365233, 0.0451572, 0.432647, 0.165102, 0.795004, 0.126218, 0.239627, 0.0155477, 0.935287, 0.989966, 0.30067, 0.337323, 0.612089, 0.935274, 0.847273, 0.291589, 0.726199, 0.768206, 0.872156, 0.87183, 0.354196, 0.634677, 0.445589, 0.397467, 0.869365, 0.347935, 0.638584, 0.508064, 0.0356273, 0.640255, 0.195039, 0.121921, 0.170327, 0.236788, 0.525543, 0.380477, 0.640126, 0.966145, 0.0275385, 0.483208, 0.380622, 0.621541, 0.642745, 0.557541, 0.822002, 0.303046, 0.416204, 0.779118, 0.387479, 0.96399, 0.464379, 0.864708, 0.614493, 0.801388, 0.146563, 0.713396, 0.0371329, 0.746244, 0.641826, 0.108604, 0.364927, 0.363469, 0.0106725, 0.823084, 0.851167, 0.273869, 0.227434, 0.435503, 0.0165272, 0.630164, 0.896852, 0.773881, 0.0413545, 0.558784, 0.483894, 0.774959, 0.0977034, 0.135825, 0.728994, 0.00489014, 0.599283, 0.261376, 0.341713, 0.687273, 0.203976, 0.152216, 0.63596, 0.821107, 0.599782, 0.0429922, 0.175607, 0.691483, 0.571165, 0.123619, 0.338721, 0.41125, 0.827582, 0.259272, 0.954404, 0.711268, 0.158892, 0.723477, 0.26951, 0.709728, 0.938119, 0.832301, 0.737552, 0.519661, 0.416426, 0.882457, 0.453152, 0.115849, 0.271428, 0.0334407, 0.936773, 0.508819, 0.555718, 0.377546, 0.517199, 0.864394, 0.0810964, 0.797491, 0.349751, 0.662266, 0.46162, 0.619854, 0.354706, 0.628023, 0.671643, 0.000773542, 0.846307, 0.542696, 0.592178, 0.0777423, 0.634771, 0.682495, 0.261238, 0.508481, 0.984641, 0.959791, 0.741369, 0.32138, 0.535256, 0.847494, 0.86677, 0.8867, 0.432765, 0.760854, 0.158056, 0.991061, 0.737626, 0.526399, 0.923892, 0.541165, 0.65592, 0.179625, 0.726484, 0.498102, 0.733978, 0.228333, 0.193556, 0.189515, 0.240356, 0.232233, 0.507678, 0.345721, 0.324308, 0.790667, 0.946143, 0.993343, 0.137192, 0.0727121, 0.716761, 0.691615, 0.478477, 0.394535, 0.115059, 0.474343, 0.762372, 0.0948848, 0.196792, 0.492499, 0.29865, 0.455808, 0.484853, 0.835931, 0.895271, 0.305977, 0.382993, 0.87606, 0.602305, 0.775395, 0.127797, 0.927046, 0.506469, 0.495949, 0.40417, 0.379146, 0.423219, 0.0446891, 0.398602, 0.689065, 0.847912, 0.550092, 0.89121, 0.00920063, 0.576135, 0.0883344, 0.592445, 0.956837, 0.828345, 0.174842, 0.807681, 0.429489, 0.505379, 0.807906, 0.536595, 0.478865, 0.0814109, 0.490502, 0.121416, 0.800612, 0.391844, 0.954796, 0.231231, 0.0557827, 0.887462, 0.998108, 0.25508, 0.83718, 0.405546, 0.906978, 0.235831, 0.574562, 0.2957, 0.188627, 0.185206, 0.819698, 0.693555, 0.442004, 0.169963, 0.426534, 0.425901, 0.543655, 0.969529, 0.933518, 0.145669, 0.759227, 0.915676, 0.273991, 0.729098, 0.31145, 0.0192084, 0.511204, 0.77779, 0.25558, 0.464314, 0.273419, 0.727192, 0.134382, 0.976244, 0.775359, 0.518023, 0.189192, 0.803747, 0.211738, 0.600633, 0.851521, 0.584827, 0.44586, 0.901889, 0.310346, 0.12561, 0.979792, 0.540059, 0.307323, 0.359533, 0.790244, 0.672675, 0.0738207, 0.945995, 0.354888, 0.714272, 0.614953, 0.580057, 0.0454896, 0.773029, 0.276525, 0.400251, 0.777706, 0.216194, 0.731417, 0.391679, 0.900286, 0.541262, 0.0339563, 0.392093, 0.702863, 0.434141, 0.942108, 0.110307, 0.927712, 0.777655, 0.119819, 0.400947, 0.535523, 0.279621, 0.994394, 0.496025, 0.45035, 0.737689, 0.304766, 0.789883, 0.691568, 0.0935304, 0.915671, 0.72731, 0.729875, 0.649525, 0.141318, 0.264691, 0.311853, 0.843073, 0.0702589, 0.689202, 0.469382, 0.0296724, 0.786056, 0.983078, 0.0617036, 0.888933, 0.608236, 0.338423, 0.862757, 0.259319, 0.711308, 0.48213, 0.483573, 0.73203, 0.599131, 0.427597, 0.449767, 0.662143, 0.142532, 0.74296, 0.355283, 0.5316, 0.937622, 0.38181, 0.601194, 0.93952, 0.299073, 0.0300616, 0.636648, 0.564301, 0.0981288, 0.830984, 0.0247317, 0.70846, 0.443461, 0.168187, 0.751938, 0.0515943, 0.191639, 0.940248, 0.46713, 0.00027386, 0.688898, 0.138161, 0.815352, 0.645526, 0.728443, 0.774305, 0.284412, 0.112302, 0.824175, 0.709396, 0.473611, 0.546887, 0.133892, 0.615861, 0.408212, 0.343921, 0.503693, 0.823462, 0.00885317, 0.734236, 0.777598, 0.708333, 0.4036, 0.253607, 0.435411, 0.100474, 0.110289, 0.217749, 0.515388, 0.633986, 0.024931, 0.637958, 0.735677, 0.934728, 0.493779, 0.16506, 0.453233, 0.587303, 0.14519, 0.482511, 0.474612, 0.402911, 0.471623, 0.00747627, 0.0555732, 0.977537, 0.412581, 0.394904, 0.80255, 0.977978, 0.0657417, 0.644969, 0.842881, 0.30897, 0.263598, 0.626181, 0.459894, 0.621051, 0.0663734, 0.00782845, 0.634455, 0.798101, 0.760185, 0.452868, 0.627108, 0.755537, 0.630484, 0.575739, 0.637636, 0.916605, 0.285008, 0.0181095, 0.718673, 0.279337, 0.520797, 0.326475, 0.0360228, 0.0481964, 0.0094155, 0.624682, 0.978486, 0.300725, 0.560873, 0.857252, 0.898491, 0.0261796, 0.935442, 0.00226439, 0.369742, 0.901717, 0.199691, 0.49845, 0.724823, 0.539641, 0.541844, 0.436235, 0.217045, 0.480357, 0.505569, 0.8483, 0.966082, 0.244648, 0.378249, 0.188199, 0.795171, 0.945042, 0.190996, 0.607762, 0.107782, 0.77143, 0.564195, 0.895802, 0.281813, 0.214475, 0.424026, 0.948014, 0.630798, 0.936923, 0.884403, 0.409745, 0.585478, 0.793767, 0.476362, 0.379856, 0.717676, 0.412765, 0.276357, 0.990492, 0.272063, 0.178426, 0.695097, 0.336698, 0.993225, 0.988549, 0.557652, 0.65713, 0.293518, 0.208687, 0.172545, 0.96807, 0.934269, 0.230664, 0.896686, 0.314386, 0.527961, 0.462698, 0.797798, 0.151281, 0.638963, 0.295506, 0.349582, 0.883782, 0.672516, 0.621829, 0.285226, 0.759997, 0.757289, 0.543576, 0.0625032, 0.850267, 0.557823, 0.772413, 0.138459, 0.373712, 0.480045, 0.030689, 0.641374, 0.782408, 0.168057, 0.360421, 0.199623, 0.115377, 0.813596, 0.678358, 0.342444, 0.26582, 0.00591525, 0.283163, 0.407682, 0.785782, 0.0977254, 0.160728, 0.762813, 0.573479, 0.898984, 0.0405639, 0.420568, 0.275309, 0.623229, 0.341443, 0.765046, 0.864441, 0.27867, 0.0692433, 0.062564, 0.649539, 0.422946, 0.851217, 0.947537, 0.350746, 0.673897, 0.720034, 0.00890734, 0.963641, 0.852696, 0.550347, 0.0843111, 0.258905, 0.0346203, 0.0211018, 0.717017, 0.457505, 0.54584, 0.483439, 0.176523, 0.464043, 0.408288, 0.221654, 0.997748, 0.920986, 0.234864, 0.784818, 0.786941, 0.382804, 0.0270011, 0.241091, 0.106924, 0.40364, 0.447491, 0.366943, 0.796474, 0.186059, 0.503018, 0.735243, 0.903425, 0.975389, 0.678276, 0.495448, 0.604736, 0.197145, 0.961042, 0.647923, 0.686651, 0.0193707, 0.452604, 0.327127, 0.91481, 0.903362, 0.53074, 0.854814, 0.581991, 0.317361, 0.229803, 0.48692, 0.63633, 0.987766, 0.714006, 0.230898, 0.650067, 0.547455, 0.541696, 0.0752586, 0.713999, 0.245399, 0.231803, 0.582825, 0.710767, 0.593871, 0.131555, 0.900225, 0.519546, 0.485698, 0.805788, 0.956696, 0.109453, 0.633426, 0.0845869, 0.178138, 0.217805, 0.507283, 0.688616, 0.559781, 0.134245, 0.854137, 0.854356, 0.683975, 0.685215, 0.370101, 0.692283, 0.692915, 0.857771, 0.199484, 0.308744, 0.539065, 0.769612, 0.0965434, 0.412466, 0.584648, 0.56389, 0.902638, 0.255939, 0.599727, 0.72638, 0.805014, 0.733429, 0.0323149, 0.357784, 0.859215, 0.841399, 0.0476339, 0.354678, 0.373714, 0.268403, 0.633773, 0.643707, 0.431396, 0.356627, 0.319686, 0.884927, 0.43825, 0.123213, 0.917867, 0.887923, 0.476999, 0.320888, 0.850616, 0.460518, 0.984096, 0.708742, 0.602617, 0.242342, 0.0822375, 0.769586, 0.859953, 0.986874, 0.271621, 0.381774, 0.580256, 0.806828, 0.653448, 0.175783, 0.720684, 0.604432, 0.810283, 0.82537, 0.777029, 0.190185, 0.881346, 0.123456, 0.4169, 0.312984, 0.71109, 0.379802, 0.702901, 0.91573, 0.138463, 0.893744, 0.904974, 0.148429, 0.868553, 0.021413, 0.325659, 0.321182, 0.458394, 0.9185, 0.657111, 0.391165, 0.423989, 0.692714, 0.204703, 0.548315, 0.337581, 0.597745, 0.118339, 0.58786, 0.27224, 0.377868, 0.518281, 0.669602, 0.114614, 0.453017, 0.65691, 0.260958, 0.429086, 0.919754, 0.632038, 0.464322, 0.427757, 0.390527, 0.698182, 0.529399, 0.535967, 0.860006, 0.86582, 0.87482, 0.414901, 0.828315, 0.790706, 0.584447, 0.137916, 0.0666782, 0.626311, 0.339679, 0.806634, 0.664283, 0.893367, 0.192582, 0.0922133, 0.0631515, 0.975556, 0.796498, 0.135178, 0.537959, 0.78077, 0.663352, 0.0141877, 0.825989, 0.0777304, 0.948731, 0.420816, 0.811672, 0.666337, 0.142219, 0.604513, 0.880968, 0.369619, 0.720551, 0.950459, 0.203459, 0.254215, 0.187391, 0.872013, 0.540719, 0.835673, 0.760819, 0.801522, 0.537615, 0.469476, 0.200005, 0.383686, 0.903821, 0.719678, 0.643503, 0.742222, 0.660802, 0.622005, 0.115279, 0.80868, 0.410083, 0.602116, 0.196751, 0.695222, 0.623775, 0.424857, 0.0827443, 0.302176, 0.818945, 0.54384, 0.937174, 0.66109, 0.218779, 0.231331, 0.745365, 0.698025, 0.99938, 0.836967, 0.741806, 0.577539, 0.34268, 0.863178, 0.116269, 0.904559, 0.155217, 0.703401, 0.170877, 0.254972, 0.819891, 0.728599, 0.7314, 0.0494436, 0.395706, 0.750079, 0.510133, 0.131256, 0.706587, 0.716959, 0.672644, 0.572409, 0.134055, 0.8016, 0.948409, 0.685077, 0.883509, 0.42391, 0.0802641, 0.396451, 0.680006, 0.796024, 0.737545, 0.779766, 0.874185, 0.185475, 0.224181, 0.726417, 0.262177, 0.983726, 0.998792, 0.601068, 0.0632906, 0.266559, 0.994812, 0.368607, 0.338329, 0.857998, 0.394432, 0.00395892, 0.403091, 0.975504, 0.23115, 0.882397, 0.817858, 0.40079, 0.304486, 0.19727, 0.0403942, 0.84796, 0.121088, 0.0914703, 0.970986, 0.187894, 0.260465, 0.0981842, 0.129235, 0.0053786, 0.202048, 0.431041, 0.182747, 0.118168, 0.327375, 0.134632, 0.0938664, 0.541778, 0.893052, 0.677389, 0.60834, 0.104955, 0.455115, 0.462641, 0.21083, 0.554428, 0.513763, 0.414927, 0.152386, 0.137165, 0.304633, 0.785922, 0.0616525, 0.872498, 0.526546, 0.243628, 0.59958, 0.763127, 0.392787, 0.239071, 0.497765, 0.938576, 0.758766, 0.105476, 0.583509, 0.107149, 0.072246, 0.0686736, 0.491876, 0.222793, 0.242647, 0.392899, 0.134979, 0.0411305, 0.521578, 0.297438, 0.809898, 0.0138163, 0.470257, 0.904504, 0.0581343, 0.797919, 0.116889, 0.775738, 0.835381, 0.353088, 0.0392369, 0.0841973, 0.620153, 0.839601, 0.659522, 0.698976, 0.210228, 0.0302424, 0.397398, 0.800551, 0.427025, 0.15325, 0.0813214, 0.433884, 0.0652032, 0.804778, 0.296914, 0.0291595, 0.437946, 0.131204, 0.29828, 0.220672, 0.467927, 0.177905, 0.290324, 0.0388322, 0.542294, 0.776009, 0.824298, 0.735709, 0.153466, 0.400214, 0.708594, 0.453441, 0.594539, 0.692301, 0.488494, 0.45268, 0.854562, 0.415268, 0.613101, 0.717661, 0.668751, 0.835263, 0.500444, 0.293188, 0.446861, 0.261365, 0.618792, 0.839053, 0.147956, 0.556072, 0.757753, 0.455075, 0.869194, 0.273371, 0.8442, 0.0187834, 0.650478, 0.766219, 0.00604557, 0.799275, 0.480036, 0.342347, 0.804639, 0.109157, 0.700395, 0.144913, 0.0896981, 0.104342, 0.996452, 0.802804, 0.904757, 0.267159, 0.268582, 0.257929, 0.64107, 0.0970939, 0.139546, 0.79182, 0.587922, 0.820792, 0.710431, 0.813237, 0.237078, 0.844224, 0.844752, 0.775276, 0.279869, 0.527013, 0.00486709, 0.29619, 0.0297883, 0.596303, 0.617605, 0.795947, 0.97924, 0.77399, 0.800557, 0.621752, 0.24925, 0.00137245, 0.889115, 0.296036, 0.690818, 0.0984665, 0.733391, 0.993227, 0.00142601, 0.0876534, 0.626902, 0.783111, 0.391165, 0.348762, 0.0616425, 0.584172, 0.278297, 0.186677, 0.313632, 0.630907, 0.659842, 0.000195905, 0.0848633, 0.994714, 0.252779, 0.282139, 0.743638, 0.171873, 0.661953, 0.248161, 0.717851, 0.760843, 0.345918, 0.862501, 0.671703, 0.64076, 0.693063, 0.366835, 0.968806, 0.804512, 0.301244, 0.328024, 0.874884, 0.47607, 0.00138562, 0.956339, 0.654877, 0.213509, 0.301763, 0.66921, 0.327652, 0.0984915, 0.461783, 0.308599, 0.526057, 0.261795, 0.490774, 0.711595, 0.120919, 0.423112, 0.529085, 0.460559, 0.346745, 0.813399, 0.210967, 0.871596, 0.437, 0.0797882, 0.319964, 0.453486, 0.312109, 0.616004, 0.717271, 0.497556, 0.696008, 0.325173, 0.998081, 0.0344036, 0.470713, 0.595906, 0.503973, 0.768447, 0.0669863, 0.75852, 0.0691862, 0.904392, 0.526621, 0.454254, 0.282096, 0.61511, 0.520861, 0.929658, 0.316033, 0.122634, 0.778338, 0.781493, 0.699447, 0.374198, 0.300907, 0.908406, 0.420848, 0.292415, 0.365145, 0.260794, 0.827939, 0.0225802, 0.885312, 0.371748, 0.0942798, 0.163306, 0.345233, 0.584805, 0.00530463, 0.543839, 0.686881, 0.731238, 0.202015, 0.766038, 0.817129, 0.759973, 0.74491, 0.249627, 0.33974, 0.86768, 0.873101, 0.538603, 0.897788, 0.488417, 0.803839, 0.60539, 0.801472, 0.546895, 0.896886, 0.722128, 0.337904, 0.664771, 0.411652, 0.00687161, 0.162172, 0.883211, 0.82153, 0.814886, 0.45111, 0.0546196, 0.859811, 0.000270441, 0.793295, 0.873731, 0.942733, 0.186087, 0.70483, 0.963201, 0.600287, 0.455936, 0.540894, 0.988648, 0.849189, 0.708664, 0.0118315, 0.598502, 0.332059, 0.688875, 0.307857, 0.850157, 0.0282664, 0.142987, 0.630389, 0.627275, 0.247403, 0.324155, 0.990004, 0.982975, 0.0650682, 0.890491, 0.694133, 0.0698983, 0.00596018, 0.186113, 0.955229, 0.474459, 0.587411, 0.105645, 0.634679, 0.578154, 0.789058, 0.221689, 0.317176, 0.513465, 0.949536, 0.187694, 0.720225, 0.637694, 0.564973, 0.863555, 0.656051, 0.425104, 0.72279, 0.0150462, 0.769399, 0.995999, 0.383894, 0.884713, 0.82651, 0.133181, 0.343152, 0.432729, 0.379639, 0.797775, 0.577667, 0.993345, 0.715517, 0.0591047, 0.0951223, 0.291318, 0.567864, 0.611515, 0.326818, 0.0252071, 0.0706829, 0.550466, 0.229062, 0.368205, 0.0114132, 0.726769, 0.995834, 0.00206917, 0.361047, 0.229185, 0.704359, 0.708189, 0.763087, 0.418145, 0.166241, 0.467016, 0.199175, 0.749868, 0.230697, 0.919803, 0.173331, 0.703199, 0.238424, 0.36542, 0.733332, 0.865953, 0.406924, 0.376155, 0.504744, 0.324332, 0.795637, 0.876924, 0.150811, 0.284575, 0.606989, 0.668304, 0.773779, 0.400718, 0.461116, 0.0356158, 0.828637, 0.230266, 0.933551, 0.452046, 0.23424, 0.147727, 0.584319, 0.246715, 0.778469, 0.751298, 0.142385, 0.497668, 0.820541, 0.520754, 0.635979, 0.995609, 0.777778, 0.73501, 0.635293, 0.753446, 0.382544, 0.432099, 0.609674, 0.60688, 0.357035, 0.241933, 0.270505, 0.919522, 0.155695, 0.92069, 0.413791, 0.170884, 0.459599, 0.287268, 0.376801, 0.303515, 0.170855, 0.0423617, 0.842472, 0.495546, 0.404054, 0.0946646, 0.275245, 0.957405, 0.659572, 0.330991, 0.217615, 0.259105, 0.365855, 0.623508, 0.358096, 0.366259, 0.115222, 0.683045, 0.116525, 0.418933, 0.73935, 0.046771, 0.626346, 0.544816, 0.206997, 0.136698, 0.50891, 0.465636, 0.973117, 0.0879731, 0.496635, 0.0528215, 0.256656, 0.150501, 0.172589, 0.446358, 0.668215, 0.281295, 0.639564, 0.600982, 0.689498, 0.172988, 0.311158, 0.596166, 0.914397, 0.691011, 0.828275, 0.751661, 0.126823, 0.595763, 0.476016, 0.438493, 0.85836, 0.055756, 0.1724, 0.354587, 0.36832, 0.0914993, 0.370013, 0.65743, 0.313155, 0.369982, 0.760313, 0.830683, 0.453527, 0.085066, 0.0648175, 0.732424, 0.986999, 0.77105, 0.173702, 0.0774072, 0.787711, 0.479358, 0.0513305, 0.439819, 0.868344, 0.730487, 0.293238, 0.420022, 0.124757, 0.0971248, 0.740998, 0.257308, 0.354677, 0.752762, 0.368821, 0.825613, 0.237673, 0.650889, 0.779095, 0.680979, 0.954989, 0.477473, 0.29367, 0.709763, 0.449627, 0.312359, 0.236185, 0.0620183, 0.0817302, 0.675886, 0.727033, 0.803278, 0.919081, 0.153602, 0.185638, 0.059088, 0.635481, 0.681467, 0.955608, 0.584436, 0.267903, 0.972799, 0.970777, 0.668493, 0.0944233, 0.904109, 0.32312, 0.523612, 0.38511, 0.668045, 0.368678, 0.391273, 0.497355, 0.0173796, 0.171289, 0.111696, 0.894711, 0.456682, 0.11279, 0.0153473, 0.736969, 0.660566, 0.0133941, 0.419917, 0.526311, 0.7516, 0.899376, 0.842206, 0.652725, 0.504881, 0.937657, 0.105858, 0.898005, 0.181238, 0.511456, 0.573173, 0.51929, 0.648309, 0.20973, 0.354556, 0.375805, 0.188542, 0.0110647, 0.960757, 0.050705, 0.0815619, 0.782723, 0.314808, 0.40585, 0.0328103, 0.456401, 0.222684, 0.266882, 0.679273, 0.116498, 0.105427, 0.375314, 0.886496, 0.133358, 0.58684, 0.983258, 0.105889, 0.234651, 0.525368, 0.659181, 0.193151, 0.781094, 0.975721, 0.75392, 0.539964, 0.574357, 0.389485, 0.00738067, 0.949821, 0.276828, 0.0109401, 0.10862, 0.744617, 0.526253, 0.331845, 0.613306, 0.260269, 0.129338, 0.171237, 0.00535067, 0.252849, 0.115543, 0.656546, 0.380222, 0.669431, 0.696468, 0.0788784, 0.269542, 0.731211, 0.891437, 0.686094, 0.643402, 0.946833, 0.852984, 0.855147, 0.0741065, 0.735505, 0.628903, 0.164329, 0.828951, 0.93577, 0.416256, 0.720396, 0.820992, 0.68111, 0.675366, 0.635796, 0.0421593, 0.558648, 0.397164, 0.841473, 0.357881, 0.101455, 0.625198, 0.317364, 0.0309509, 0.949349, 0.429982, 0.740296, 0.83634, 0.431143, 0.551933, 0.669405, 0.989428, 0.520325, 0.0122792, 0.347439, 0.792887, 0.14999, 0.835497, 0.0626093, 0.780253, 0.353592, 0.0868309, 0.851044, 0.532502, 0.290523, 0.0931025, 0.0303709, 0.286026, 0.347305, 0.710572, 0.868018, 0.382781, 0.672354, 0.422583, 0.267142, 0.0534792, 0.626161, 0.709761, 0.146052, 0.0549357, 0.25044, 0.545147, 0.817305, 0.528971, 0.214889, 0.933382, 0.677984, 0.212322, 0.617944, 0.200769, 0.63663, 0.473708, 0.843618, 0.0158016, 0.789755, 0.369974, 0.129889, 0.6002, 0.0639386, 0.26817, 0.192894, 0.47349, 0.128915, 0.531, 0.836731, 0.00176826, 0.171285, 0.301016, 0.491037, 0.684083, 0.442135, 0.62612, 0.334639, 0.630786, 0.108427, 0.922433, 0.211707, 0.934806, 0.353681, 0.846479, 0.224087, 0.0387647, 0.912567, 0.352177, 0.639788, 0.85912, 0.580208, 0.209145, 0.648709, 0.780819, 0.276763, 0.570481, 0.92891, 0.360862, 0.313299, 0.9281, 0.641601, 0.531437, 0.611739, 0.348233, 0.328984, 0.460753, 0.786445, 0.561262, 0.586937, 0.914711, 0.295035, 0.522733, 0.178518, 0.193203, 0.067031, 0.0952899, 0.44979, 0.451621, 0.300487, 0.726786, 0.534888, 0.156079, 0.180306, 0.568849, 0.609314, 0.63499, 0.75209, 0.641635, 0.791956, 0.674595, 0.0351817, 0.683243, 0.10263, 0.903683, 0.7759, 0.886257, 0.625032, 0.827247, 0.297826, 0.00176633, 0.619953, 0.464783, 0.921148, 0.67926, 0.528511, 0.577298, 0.163977, 0.264389, 0.789901, 0.0141889, 0.532587, 0.236263, 0.129726, 0.605237, 0.583784, 0.0723834, 0.738629, 0.687056, 0.0506192, 0.2465, 0.958154, 0.0971148, 0.78452, 0.985721, 0.310138, 0.540363, 0.168563, 0.881159, 0.547955, 0.194954, 0.0955894, 0.879779, 0.438003, 0.672806, 0.924577, 0.224892, 0.670132, 0.480454, 0.354263, 0.67239, 0.0954662, 0.236402, 0.583413, 0.0585043, 0.477896, 0.603105, 0.0429229, 0.504042, 0.206812, 0.0616322, 0.835752, 0.66132, 0.624212, 0.531751, 0.681237, 0.557261, 0.779206, 0.521052, 0.0583492, 0.814716, 0.767002, 0.0346656, 0.957944, 0.145976, 0.446197, 0.393021, 0.871972, 0.520205, 0.468223, 0.263578, 0.810964, 0.319026, 0.642277, 0.270199, 0.225704, 0.897925, 0.197058, 0.336161, 0.657704, 0.0432936, 0.743068, 0.206791, 0.537499, 0.0734435, 0.674593, 0.559151, 0.416683, 0.810173, 0.652502, 0.00259192, 0.421945, 0.777675, 0.173665, 0.811577, 0.631349, 0.71241, 0.099652, 0.0575853, 0.196091, 0.769379, 0.833208, 0.286898, 0.0482026, 0.106219, 0.0961621, 0.631901, 0.221729, 0.546256, 0.226065, 0.538867, 0.780143, 0.690409, 0.547232, 0.373159, 0.495359, 0.878823, 0.53195, 0.929734, 0.433977, 0.717026, 0.97011, 0.45834, 0.63122, 0.676207, 0.457788, 0.491343, 0.299583, 0.326815, 0.733564, 0.820148, 0.727172, 0.755076, 0.625684, 0.564744, 0.759587, 0.101223, 0.90051, 0.265415, 0.687357, 0.898139, 0.485221, 0.729456, 0.869902, 0.198439, 0.209563, 0.339186, 0.323029, 0.0600406, 0.887052, 0.228774, 0.780437, 0.201377, 0.358911, 0.471789, 0.931232, 0.941895, 0.546953, 0.966439, 0.836993, 0.976292, 0.183496, 0.20134, 0.345574, 0.764903, 0.944351, 0.0855852, 0.276308, 0.917877, 0.207373, 0.0956428, 0.656021, 0.955109, 0.866709, 0.317181, 0.880536, 0.105305, 0.802864, 0.83936, 0.85476, 0.541733, 0.172791, 0.901183, 0.0787686, 0.17047, 0.695448, 0.628866, 0.310038, 0.980721, 0.519745, 0.314494, 0.838773, 0.817105, 0.956199, 0.497561, 0.440748, 0.866263, 0.130101, 0.585493, 0.64134, 0.545642, 0.543746, 0.634721, 0.692108, 0.104547, 0.48778, 0.525365, 0.222945, 0.0415997, 0.503657, 0.120902, 0.0969362, 0.627469, 0.779534, 0.44861, 0.350597, 0.345213, 0.683907, 0.679977, 0.840668, 0.790789, 0.893772, 0.51972, 0.844986, 0.474268, 0.886176, 0.676864, 0.435934, 0.848231, 0.532467, 0.648873, 0.968256, 0.61548, 0.79997, 0.589352, 0.759299, 0.0874378, 0.416241, 0.919548, 0.491485, 0.505018, 0.354504, 0.807806, 0.255302, 0.175807, 0.537194, 0.113588, 0.210534, 0.937963, 0.0289638, 0.649871, 0.0415402, 0.985796, 0.408788, 0.868951, 0.368989, 0.249701, 0.386199, 0.505068, 0.766356, 0.121093, 0.0671078, 0.253398, 0.69509, 0.905225, 0.17466, 0.101305, 0.239099, 0.413984, 0.283976, 0.22179, 0.416776, 0.639288, 0.0832629, 0.935949, 0.242455, 0.307911, 0.942863, 0.517647, 0.511876, 0.980054, 0.105804, 0.294665, 0.375717, 0.299203, 0.0041073, 0.692539, 0.980715, 0.289277, 0.998179, 0.249108, 0.175353, 0.642178, 0.413443, 0.818611, 0.360028, 0.07985, 0.348019, 0.288372, 0.934103, 0.493211, 0.161424, 0.58625, 0.101337, 0.248807, 0.744097, 0.485165, 0.407927, 0.502129, 0.254136, 0.478751, 0.42083, 0.248873, 0.424183, 0.77324, 0.215235, 0.953848, 0.815983, 0.158094, 0.391452, 0.911929, 0.646365, 0.359223, 0.680833, 0.785218, 0.355855, 0.892665, 0.340406, 0.520206, 0.443302, 0.71346, 0.263724, 0.994689, 0.940092, 0.937445, 0.427203, 0.086859, 0.916273, 0.886754, 0.376372, 0.590542, 0.703157, 0.644814, 0.0973851, 0.764644, 0.766873, 0.78788, 0.501908, 0.915237, 0.325129, 0.805438, 0.677256, 0.994908, 0.56436, 0.362835, 0.565735, 0.429897, 0.875547, 0.504688, 0.244607, 0.830024, 0.483786, 0.777301, 0.496426, 0.631197, 0.9522, 0.158597, 0.672437, 0.600779, 0.147453, 0.691156, 0.938142, 0.881237, 0.842964, 0.382955, 0.171285, 0.936033, 0.72643, 0.382905, 0.698039, 0.926544, 0.439297, 0.0560052, 0.279561, 0.160687, 0.459306, 0.321321, 0.73143, 0.713356, 0.898014, 0.300067, 0.327653, 0.921897, 0.18009, 0.153996, 0.222553, 0.235365, 0.496638, 0.845838, 0.484972, 0.574097, 0.278898, 0.280513, 0.639917, 0.00846011, 0.576826, 0.514818, 0.905084, 0.194731, 0.995362, 0.651051, 0.0309386, 0.260537, 0.969857, 0.281356, 0.676183, 0.902037, 0.152027, 0.0789176, 0.0313313, 0.276369, 0.528068, 0.240229, 0.96577, 0.466625, 0.809421, 0.716923, 0.496516, 0.928267, 0.466881, 0.314971, 0.701822, 0.877596, 0.975407, 0.259825, 0.346589, 0.518496, 0.464989, 0.631272, 0.999335, 0.85388, 0.106687, 0.544377, 0.496506, 0.0495288, 0.557982, 0.659796, 0.714556, 0.861876, 0.53801, 0.341512, 0.977187, 0.774364, 0.0222033, 0.682623, 0.213629, 0.915206, 0.856457, 0.829606, 0.495821, 0.440149, 0.134, 0.91534, 0.127665, 0.135935, 0.796209, 0.947672, 0.606545, 0.95812, 0.805233, 0.522448, 0.281149, 0.784618, 0.528592, 0.231617, 0.344436, 0.623061, 0.25302, 0.386911, 0.370523, 0.630385, 0.700022, 0.727151, 0.534839, 0.133714, 0.421592, 0.140723, 0.070464, 0.775013, 0.153538, 0.829366, 0.459244, 0.828389, 0.547179, 0.950638, 0.71957, 0.508546, 0.873755, 0.11234, 0.845236, 0.801012, 0.347623, 0.937428, 0.0476699, 0.246573, 0.154953, 0.838394, 0.398271, 0.717012, 0.00336201, 0.666849, 0.942137, 0.503052, 0.923131, 0.172334, 0.0475491, 0.967214, 0.542301, 0.699006, 0.449127, 0.322264, 0.292825, 0.115944, 0.785234, 0.0267876, 0.0219151, 0.0816941, 0.429375, 0.291332, 0.316293, 0.381101, 0.637254, 0.0153958, 0.157349, 0.464019, 0.4051, 0.43946, 0.806448, 0.526227, 0.17687, 0.750141, 0.659381, 0.995801, 0.582237, 0.481447, 0.0463786, 0.985857, 0.44112, 0.85009, 0.872144, 0.958045, 0.0223657, 0.373458, 0.132445, 0.609049, 0.198428, 0.00217104, 0.12125, 0.71728, 0.909938, 0.867929, 0.0668491, 0.148454, 0.266331, 0.267368, 0.565304, 0.238869, 0.560016, 0.697578, 0.163684, 0.956766, 0.985476, 0.865582, 0.457229, 0.824121, 0.716539, 0.535521, 0.190561, 0.532445, 0.440451, 0.196858, 0.568288, 0.697552, 0.0656491, 0.238439, 0.348217, 0.508543, 0.0735468, 0.91486, 0.388206, 0.196435, 0.635512, 0.730833, 0.584855, 0.536759, 0.630322, 0.0524932, 0.583308, 0.65385, 0.183771, 0.847247, 0.274343, 0.687695, 0.543999, 0.462012, 0.127895, 0.541794, 0.773117, 0.111856, 0.330496, 0.0557353, 0.108308, 0.933698, 0.422001, 0.062609, 0.840392, 0.174242, 0.763923, 0.238557, 0.990698, 0.5489, 0.814017, 0.712998, 0.348819, 0.92057, 0.77589, 0.405501, 0.987185, 0.586148, 0.0340775, 0.874879, 0.528434, 0.463603, 0.174784, 0.466188, 0.560802, 0.682338, 0.818371, 0.833119, 0.70543, 0.403955, 0.309361, 0.841809, 0.0837514, 0.0658028, 0.635184, 0.478617, 0.819805, 0.241208, 0.402354, 0.371352, 0.574236, 0.879457, 0.694758, 0.0255712, 0.217473, 0.030704, 0.502755, 0.590585, 0.713211, 0.587852, 0.765876, 0.919681, 0.548157, 0.660895, 0.900054, 0.712622, 0.283203, 0.495466, 0.11754, 0.139739, 0.902674, 0.465694, 0.719928, 0.514653, 0.820026, 0.113656, 0.188105, 0.961152, 0.5188, 0.130029, 0.0159896, 0.827994, 0.605139, 0.934137, 0.835487, 0.436506, 0.181797, 0.398376, 0.365892, 0.24449, 0.563175, 0.768978, 0.218127, 0.588646, 0.924228, 0.298714, 0.553758, 0.283015, 0.785156, 0.645116, 0.193191, 0.602467, 0.0834046, 0.845722, 0.162275, 0.700826, 0.734836, 0.364575, 0.529733, 0.166439, 0.125427, 0.333995, 0.169423, 0.664584, 0.197292, 0.0436679, 0.320572, 0.655961, 0.720951, 0.51083, 0.22972, 0.715727, 0.638601, 0.144037, 0.937777, 0.927911, 0.123267, 0.929121, 0.0972168, 0.725653, 0.323356, 0.751896, 0.198276, 0.158348, 0.868324, 0.131178, 0.899521, 0.198024, 0.431845, 0.994101, 0.891005, 0.85, 0.839544, 0.544451, 0.176621, 0.601967, 0.0216223, 0.537717, 0.290386, 0.633253, 0.845378, 0.0437701, 0.993162, 0.0854528, 0.851031, 0.386806, 0.798763, 0.0330196, 0.732928, 0.309405, 0.959024, 0.0200453, 0.506768, 0.891905, 0.229395, 0.644895, 0.220381, 0.819345, 0.844336, 0.827662, 0.75855, 0.258202, 0.705638, 0.0214844, 0.194284, 0.650836, 0.551501, 0.109274, 0.99566, 0.436286, 0.245548, 0.545725, 0.556864, 0.0177391, 0.433361, 0.0733969, 0.318562, 0.734191, 0.541346, 0.584533, 0.654032, 0.480747, 0.0396135, 0.440329, 0.438634, 0.787352, 0.196885, 0.999867, 0.780408, 0.148673, 0.672902, 0.0855072, 0.542285, 0.817219, 0.344624, 0.869507, 0.0972594, 0.738224, 0.678642, 0.610263, 0.513905, 0.327563, 0.595982, 0.300305, 0.367507, 0.385162, 0.512591, 0.0331952, 0.457096, 0.77215, 0.237526, 0.532184, 0.172633, 0.0760759, 0.764021, 0.656615, 0.250267, 0.913947, 0.0693472, 0.811476, 0.0654472, 0.180779, 0.25495, 0.491045, 0.243572, 0.688457, 0.302134, 0.995999, 0.0877077, 0.290827, 0.421128, 0.930957, 0.122838, 0.602823, 0.267248, 0.819376, 0.378619, 0.10743, 0.646243, 0.0390166, 0.485882, 0.410412, 0.0314222, 0.445647, 0.668752, 0.584085, 0.14271, 0.193579, 0.235109, 0.155165, 0.802243, 0.222773, 0.593747, 0.649464, 0.383544, 0.391871, 0.680344, 0.369907, 0.650128, 0.840548, 0.0512257, 0.845229, 0.0324772, 0.820746, 0.507174, 0.143959, 0.948798, 0.202721, 0.336271, 0.124466, 0.030435, 0.17137, 0.152208, 0.159802, 0.96511, 0.20956, 0.710096, 0.520639, 0.655321, 0.225285, 0.464752, 0.502103, 0.0716045, 0.852669, 0.477213, 0.531703, 0.843568, 0.178469, 0.47283, 0.13775, 0.932464, 0.195978, 0.352279, 0.458219, 0.88598, 0.401532, 0.33308, 0.289794, 0.490644, 0.0128116, 0.917066, 0.430907, 0.845364, 0.9673, 0.111233, 0.380055, 0.0719343, 0.995952, 0.67487, 0.369355, 0.452539, 0.138657, 0.77244, 0.0423, 0.872063, 0.779729, 0.0243229, 0.0438687, 0.677882, 0.85632, 0.548454, 0.823104, 0.0494186, 0.311531, 0.583488, 0.46654, 0.946371, 0.416478, 0.380618, 0.978415, 0.360331, 0.934685, 0.507023, 0.563155, 0.0599655, 0.468624, 0.207611, 0.729588, 0.987562, 0.0986833, 0.186597, 0.486791, 0.186946, 0.32957, 0.139015, 0.439042, 0.33656, 0.710923, 0.847317, 0.755213, 0.679668, 0.512238, 0.246794, 0.130706, 0.972152, 0.380354, 0.97653, 0.86102, 0.633842, 0.541387, 0.573967, 0.662506, 0.581571, 0.324206, 0.325255, 0.167391, 0.533735, 0.210881, 0.557627, 0.0425774, 0.248431, 0.609063, 0.411411, 0.0296826, 0.107329, 0.470474, 0.219309, 0.377342, 0.487793, 0.128537, 0.383163, 0.155686, 0.160019, 0.563484, 0.427877, 0.855583, 0.944766, 0.625127, 0.428711, 0.548492, 0.845844, 0.974884, 0.0283575, 0.928495, 0.314943, 0.704869, 0.434655, 0.792576, 0.302688, 0.887315, 0.448706, 0.360527, 0.61462, 0.386755, 0.456536, 0.913248, 0.562782, 0.834938, 0.417069, 0.936944, 0.175957, 0.0292056, 0.165132, 0.860349, 0.920219, 0.887488, 0.981758, 0.664164, 0.208711, 0.837174, 0.0690807, 0.285295, 0.0465079, 0.450513, 0.909982, 0.195234, 0.598476, 0.0319583, 0.562492, 0.593147, 0.0953558, 0.922204, 0.855676, 0.180321, 0.944682, 0.886156, 0.785763, 0.0513129, 0.584269, 0.481476, 0.594496, 0.31895, 0.862149, 0.761318, 0.830565, 0.477565, 0.704674, 0.448482, 0.0647326, 0.576446, 0.0936495, 0.0290601, 0.531201, 0.805988, 0.617532, 0.311513, 0.940009, 0.548031, 0.289231, 0.713648, 0.168361, 0.438717, 0.710118, 0.956462, 0.719465, 0.121624, 0.997186, 0.243058, 0.572056, 0.33345, 0.0567612, 0.784417, 0.15373, 0.194629, 0.838849, 0.365281, 0.853493, 0.937009, 0.884989, 0.912643, 0.615399, 0.171796, 0.508634, 0.801327, 0.793735, 0.898612, 0.144338, 0.763831, 0.0432119, 0.444554, 0.690018, 0.0769864, 0.258341, 0.299487, 0.462314, 0.612981, 0.61242, 0.270474, 0.687894, 0.15412, 0.945785, 0.304665, 0.527352, 0.862864, 0.814089, 0.355384, 0.0585086, 0.685452, 0.696904, 0.327063, 0.507143, 0.264042, 0.606223, 0.191667, 0.841703, 0.217113, 0.118348, 0.826108, 0.827688, 0.607877, 0.683131, 0.0545193, 0.745739, 0.389008, 0.866404, 0.262548, 0.408929, 0.44413, 0.726945, 0.139415, 0.863922, 0.729537, 0.193518, 0.573765, 0.217033, 0.0911973, 0.455508, 0.0853736, 0.883664, 0.918828, 0.0206688, 0.322978, 0.691095, 0.599105, 0.657169, 0.0346459, 0.883606, 0.603325, 0.00704693, 0.34111, 0.575056, 0.293797, 0.659662, 0.0266977, 0.343559, 0.73245, 0.568653, 0.976468, 0.40845, 0.574158, 0.583174, 0.262365, 0.27649, 0.783194, 0.96572, 0.638504, 0.0109963, 0.297281, 0.95719, 0.857866, 0.118023, 0.823408, 0.193587, 0.336572, 0.649514, 0.195704, 0.64884, 0.727483, 0.280076, 0.391448, 0.319339, 0.656744, 0.682944, 0.849771, 0.527175, 0.986829, 0.431986, 0.50676, 0.205591, 0.94704, 0.00324137, 0.76835, 0.494667, 0.0566577, 0.638742, 0.956018, 0.584582, 0.756424, 0.646476, 0.277489, 0.762332, 0.997635, 0.813419, 0.836437, 0.6161, 0.484459, 0.680175, 0.962532, 0.253346, 0.549721, 0.527324, 0.678575, 0.817699, 0.228619, 0.051797, 0.272214, 0.422288, 0.809362, 0.614724, 0.46797, 0.709692, 0.975017, 0.360894, 0.0906186, 0.297165, 0.152778, 0.831593, 0.685158, 0.714934, 0.31502, 0.988036, 0.917857, 0.297085, 0.630157, 0.965444, 0.969337, 0.822654, 0.239443, 0.983868, 0.247457, 0.765808, 0.578277, 0.162221, 0.090011, 0.125098, 0.453486, 0.602982, 0.997495, 0.535055, 0.298486, 0.74823, 0.0878896, 0.297253, 0.505105, 0.246785, 0.702703, 0.953522, 0.992123, 0.730079, 0.885203, 0.872968, 0.136767, 0.0848781, 0.20931, 0.241017, 0.490057, 0.850751, 0.255625, 0.383044, 0.253957, 0.17459, 0.904922, 0.933217, 0.563103, 0.615326, 0.298383, 0.730389, 0.990698, 0.797211, 0.0405547, 0.0917579, 0.383499, 0.47776, 0.569275, 0.761912, 0.91392, 0.563875, 0.646229, 0.823883, 0.818332, 0.376973, 0.685708, 0.562361, 0.313728, 0.451813, 0.0814237, 0.261835, 0.603268, 0.546794, 0.226541, 0.770576, 0.326147, 0.652414, 0.149648, 0.749899, 0.932337, 0.143466, 0.363197, 0.885366, 0.361895, 0.28782, 0.196789, 0.208795, 0.863102, 0.402006, 0.972238, 0.128598, 0.573996, 0.897687, 0.367745, 0.981472, 0.958369, 0.341549, 0.568072, 0.63617, 0.66779, 0.301268, 0.0970404, 0.395191, 0.562295, 0.940136, 0.972699, 0.40398, 0.808631, 0.946714, 0.501458, 0.102879, 0.0850953, 0.107325, 0.985752, 0.822408, 0.145373, 0.678183, 0.890433, 0.379324, 0.673399, 0.315857, 0.387693, 0.267464, 0.399486, 0.563653, 0.948893, 0.996971, 0.312536, 0.224712, 0.195422, 0.862912, 0.8311, 0.111842, 0.0215436, 0.014879, 0.108368, 0.341035, 0.0922098, 0.246749, 0.0802057, 0.403072, 0.73072, 0.0258649, 0.3814, 0.0909907, 0.965118, 0.317121, 0.550146, 0.918783, 0.800225, 0.499974, 0.881534, 0.822591, 0.768862, 0.264584, 0.53143, 0.686706, 0.75677, 0.585163, 0.528601, 0.788635, 0.571163, 0.349808, 0.470605, 0.881969, 0.191596, 0.920181, 0.914795, 0.498367, 0.570864, 0.867099, 0.512945, 0.279374, 0.953339, 0.612305, 0.729747, 0.687118, 0.644624, 0.621666, 0.451161, 0.380568, 0.366232, 0.652734, 0.936622, 0.892705, 0.160316, 0.266727, 0.387105, 0.656982, 0.791355, 0.850267, 0.173026, 0.616806, 0.911728, 0.373296, 0.511997, 0.599183, 0.0201844, 0.183038, 0.225816, 0.083518, 0.304678, 0.127526, 0.365757, 0.469518, 0.0323771, 0.891389, 0.576512, 0.236756, 0.459983, 0.496918, 0.903592, 0.939872, 0.190675, 0.320497, 0.0203818, 0.150569, 0.0856717, 0.0852438, 0.361675, 0.531786, 0.532727, 0.800923, 0.0737097, 0.499973, 0.313016, 0.42983, 0.72471, 0.254885, 0.967803, 0.816051, 0.754813, 0.595467, 0.939499, 0.73407, 0.0328131, 0.117256, 0.801046, 0.98091, 0.0316226, 0.735494, 0.97615, 0.928681, 0.66647, 0.92542, 0.772282, 0.525005, 0.0113489, 0.662231, 0.647464, 0.691623, 0.878107, 0.986174, 0.750473, 0.725888, 0.972051, 0.269137, 0.967739, 0.345124, 0.525215, 0.00230173, 0.876499, 0.713296, 0.206258, 0.148654, 0.492083, 0.170662, 0.540144, 0.797632, 0.845735, 0.0901862, 0.725708, 0.962217, 0.765269, 0.136793, 0.655495, 0.973011, 0.721749, 0.505018, 0.898978, 0.1419, 0.787899, 0.405809, 0.275214, 0.865547, 0.216191, 0.980591, 0.0639681, 0.868678, 0.834204, 0.426262, 0.629369, 0.109223, 0.0778093, 0.100758, 0.468726, 0.55479, 0.990777, 0.786453, 0.720513, 0.123705, 0.737568, 0.2643, 0.142287, 0.298431, 0.470926, 0.26009, 0.771193, 0.558194, 0.813712, 0.318702, 0.703376, 0.271103, 0.474335, 0.766687, 0.0102456, 0.553279, 0.921765, 0.527138, 0.939181, 0.17783, 0.618403, 0.55028, 0.770801, 0.918018, 0.0156285, 0.0582344, 0.419305, 0.440191, 0.800206, 0.372418, 0.710976, 0.22981, 0.753679, 0.105767, 0.595989, 0.481063, 0.430187, 0.398237, 0.920512, 0.782136, 0.90022, 0.183508, 0.991203, 0.103273, 0.937175, 0.931229, 0.463889, 0.37013, 0.107878, 0.833853, 0.822181, 0.457514, 0.368197, 0.12845, 0.275395, 0.831697, 0.14058, 0.91069, 0.1159, 0.507011, 0.187544, 0.255681, 0.85009, 0.597002, 0.201436, 0.0758791, 0.121787, 0.843484, 0.992153, 0.0290397, 0.420884, 0.541761, 0.234377, 0.584748, 0.214682, 0.454908, 0.373232, 0.705042, 0.0846849, 0.676359, 0.391221, 0.0726224, 0.818027, 0.845553, 0.359103, 0.399189, 0.532295, 0.591965, 0.0638131, 0.830835, 0.620067, 0.0838342, 0.938791, 0.550718, 0.00248069, 0.935693, 0.743766, 0.359448, 0.230411, 0.381636, 0.960077, 0.652757, 0.00918726, 0.592932, 0.372511, 0.389753, 0.993675, 0.384581, 0.343378, 0.00957446, 0.561085, 0.188613, 0.901168, 0.745008, 0.386326, 0.760493, 0.263977, 0.943769, 0.0411888, 0.466905, 0.793125, 0.81984, 0.325548, 0.766207, 0.368973, 0.446107, 0.572874, 0.455362, 0.108699, 0.371513, 0.951581, 0.193994, 0.018419, 0.60699, 0.615651, 0.148897, 0.0329962, 0.869811, 0.659208, 0.471403, 0.279857, 0.449411, 0.926895, 0.0170235, 0.333788, 0.132859, 0.159284, 0.894341, 0.438885, 0.477194, 0.353768, 0.2066, 0.918251, 0.0471047, 0.723558, 0.413295, 0.102194, 0.32278, 0.607575, 0.328097, 0.141306, 0.63415, 0.381959, 0.638005, 0.267259, 0.381353, 0.609073, 0.94418, 0.0949181, 0.0779184, 0.831966, 0.0372947, 0.458696, 0.325194, 0.480357, 0.811655, 0.534986, 0.464123, 0.753483, 0.657256, 0.435935, 0.488373, 0.783909, 0.991293, 0.373999, 0.371571, 0.292291, 0.226431, 0.0552599, 0.72983, 0.530037, 0.592297, 0.922314, 0.535397, 0.848662, 0.320842, 0.461469, 0.697733, 0.699023, 0.387278, 0.187353, 0.91391, 0.146173, 0.821822, 0.744637, 0.049221, 0.00713855, 0.845146, 0.473476, 0.780767, 0.511241, 0.702851, 0.183109, 0.751074, 0.908804, 0.648501, 0.723216, 0.804815, 0.357459, 0.992428, 0.540083, 0.201443, 0.542932, 0.0260532, 0.41567, 0.803766, 0.377759, 0.0282258, 0.873163, 0.771989, 0.135332, 0.429786, 0.899436, 0.503036, 0.46967, 0.0314196, 0.168672, 0.6573, 0.61836, 0.296532, 0.75275, 0.440724, 0.477417, 0.101412, 0.672003, 0.808464, 0.214804, 0.679745, 0.375457, 0.122195, 0.0725861, 0.149689, 0.432308, 0.171043, 0.836608, 0.828204, 0.16157, 0.606298, 0.540329, 0.737885, 0.10423, 0.193228, 0.384045, 0.579478, 0.572002, 0.357053, 0.647583, 0.804919, 0.657946, 0.78592, 0.205077, 0.0635974, 0.356279, 0.0302115, 0.112462, 0.5329, 0.0353439, 0.883937, 0.867497, 0.732951, 0.534466, 0.166197, 0.343865, 0.233998, 0.677171, 0.900603, 0.319964, 0.0777299, 0.293807, 0.92375, 0.665116, 0.511788, 0.569408, 0.183141, 0.241797, 0.328806, 0.773182, 0.506117, 0.903332, 0.0634056, 0.966247, 0.109753, 0.676579, 0.615518, 0.022722, 0.794906, 0.333341, 0.100288, 0.965705, 0.175894, 0.672127, 0.92922, 0.400484, 0.99517, 0.296742, 0.70313, 0.653463, 0.630647, 0.3, 0.25781, 0.502493, 0.692245, 0.164286, 0.901121, 0.777933, 0.499881, 0.310926, 0.945782, 0.485547, 0.133376, 0.682151, 0.992685, 0.492031, 0.440652, 0.289835, 0.801857, 0.148922, 0.797449, 0.297553, 0.235087, 0.673506, 0.701569, 0.274506, 0.664335, 0.348808, 0.0858898, 0.891046, 0.86499, 0.56995, 0.122645, 0.32221, 0.722885, 0.647581, 0.291142, 0.879507, 0.117262, 0.159482, 0.395556, 0.766304, 0.00745551, 0.95851, 0.953378, 0.725128, 0.149534, 0.0715187, 0.428435, 0.087609, 0.619377, 0.780261, 0.924924, 0.289078, 0.479124, 0.0779071, 0.551067, 0.0981934, 0.160841, 0.921033, 0.772312, 0.915264, 0.181145, 0.311038, 0.638103, 0.942361, 0.944867, 0.356726, 0.806764, 0.848946, 0.0495556, 0.831846, 0.148647, 0.282114, 0.146988, 0.494403, 0.281035, 0.863299, 0.501103, 0.326049, 0.223236, 0.828293, 0.205454, 0.766755, 0.605667, 0.610746, 0.0220558, 0.442917, 0.970086, 0.738035, 0.659323, 0.757438, 0.77343, 0.982792, 0.653796, 0.162166, 0.111772, 0.0154878, 0.398852, 0.553334, 0.186396, 0.523945, 0.522701, 0.76907, 0.0846576, 0.205697, 0.108288, 0.32589, 0.975228, 0.593238, 0.960279, 0.275291, 0.354261, 0.480216, 0.318182, 0.849524, 0.143325, 0.700827, 0.342653, 0.536966, 0.157807, 0.578958, 0.983365, 0.471684, 0.812554, 0.592856, 0.61995, 0.755793, 0.124307, 0.830922, 0.983727, 0.545654, 0.141342, 0.96937, 0.900402, 0.199047, 0.687875, 0.466796, 0.106049, 0.523671, 0.858705, 0.51011, 0.837409, 0.357153, 0.617167, 0.144112, 0.598025, 0.321082, 0.407618, 0.383217, 0.831688, 0.518964, 0.31747, 0.153182, 0.0169523, 0.219738, 0.130376, 0.0114876, 0.0131408, 0.227636, 0.862836, 0.87053, 0.0494288, 0.934751, 0.313082, 0.844151, 0.881587, 0.873415, 0.93892, 0.522037, 0.538872, 0.239712, 0.0700234, 0.497359, 0.824724, 0.060273, 0.255059, 0.804214, 0.0385446, 0.686466, 0.0236183, 0.367231, 0.912675, 0.980003, 0.550162, 0.160156, 0.740405, 0.591668, 0.283903, 0.518298, 0.0992232, 0.833306, 0.0356076, 0.479255, 0.219035, 0.125489, 0.106494, 0.751202, 0.672062, 0.0275192, 0.415199, 0.945351, 0.162596, 0.526399, 0.156734, 0.313128, 0.152892, 0.290877, 0.306564, 0.881553, 0.534238, 0.118855, 0.453356, 0.907232, 0.0990579, 0.403085, 0.582612, 0.8524, 0.642647, 0.917638, 0.690479, 0.708151, 0.946478, 0.220274, 0.604333, 0.374995, 0.085968, 0.450081, 0.23202, 0.619388, 0.172781, 0.371988, 0.904045, 0.197943, 0.700031, 0.574309, 0.0590342, 0.917979, 0.619084, 0.471351, 0.977649, 0.152323, 0.787392, 0.0169904, 0.955696, 0.361285, 0.999772, 0.0413336, 0.00745567, 0.834509, 0.618306, 0.191782, 0.490097, 0.866924, 0.881582, 0.185547, 0.646587, 0.53591, 0.0810783, 0.641288, 0.0908556, 0.281889, 0.360082, 0.0169111, 0.148972, 0.335747, 0.924505, 0.6449, 0.42539, 0.241403, 0.527155, 0.854003, 0.917513, 0.930921, 0.940182, 0.546018, 0.480607, 0.567392, 0.24036, 0.260858, 0.755869, 0.696928, 0.889399, 0.635946, 0.979741, 0.645656, 0.841219, 0.57826, 0.547416, 0.437659, 0.227435, 0.354821, 0.475219, 0.602813, 0.9542, 0.426673, 0.190239, 0.787201, 0.83574, 0.222938, 0.223357, 0.943922, 0.307732, 0.407235, 0.772202, 0.823804, 0.907674, 0.469757, 0.644941, 0.944729, 0.678936, 0.182763, 0.669295, 0.261396, 0.732253, 0.324244, 0.812283, 0.199705, 0.551836, 0.20551, 0.622396, 0.515947, 0.830756, 0.950204, 0.37391, 0.994554, 0.921947, 0.89421, 0.650081, 0.0513643, 0.564519, 0.166337, 0.963208, 0.0222917, 0.143124, 0.285645, 0.60238, 0.754984, 0.484512, 0.00350181, 0.634146, 0.53443, 0.207476, 0.47257, 0.328493, 0.606535, 0.557861, 0.508002, 0.839055, 0.2215, 0.393696, 0.41709, 0.633709, 0.880473, 0.312146, 0.991068, 0.0422319, 0.427465, 0.317993, 0.321067, 0.482519, 0.939097, 0.93866, 0.18651, 0.0218747, 0.498072, 0.920035, 0.775045, 0.138467, 0.00783922, 0.928655, 0.645646, 0.829622, 0.692938, 0.207094, 0.0186484, 0.40103, 0.359273, 0.166751, 0.0197518, 0.157962, 0.00489814, 0.450554, 0.834233, 0.7104, 0.499501, 0.591914, 0.568131, 0.151023, 0.201974, 0.678773, 0.0498054, 0.707166, 0.215978, 0.941606, 0.251902, 0.838026, 0.764797, 0.185298, 0.244075, 0.335499, 0.0638809, 0.858298, 0.0786895, 0.416929, 0.850201, 0.431233, 0.145361, 0.0391591, 0.744356, 0.962381, 0.298952, 0.214611, 0.539385, 0.299848, 0.476192, 0.818564, 0.489667, 0.149053, 0.453578, 0.69312, 0.489025, 0.384821, 0.0507784, 0.528422, 0.167769, 0.0695061, 0.362341, 0.310005, 0.0874438, 0.0182253, 0.416149, 0.889368, 0.867267, 0.782126, 0.673169, 0.0351738, 0.427632, 0.206792, 0.739605, 0.0657612, 0.328473, 0.0386429, 0.797805, 0.753864, 0.468673, 0.896956, 0.608983, 0.140393, 0.858324, 0.690824, 0.0963475, 0.860534, 0.155646, 0.610728, 0.791791, 0.647572, 0.867231, 0.536277, 0.167431, 0.509598, 0.292278, 0.673647, 0.760399, 0.135678, 0.236575, 0.0918812, 0.50639, 0.00361561, 0.470789, 0.150309, 0.445664, 0.640191, 0.251783, 0.869104, 0.303484, 0.45136, 0.105541, 0.124908, 0.349515, 0.46051, 0.536571, 0.910595, 0.662813, 0.569908, 0.255237, 0.426836, 0.680352, 0.429791, 0.79238, 0.256657, 0.25979, 0.91802, 0.770557, 0.746647, 0.786927, 0.834828, 0.665623, 0.718874, 0.282906, 0.0994461, 0.637917, 0.0317004, 0.729398, 0.618314, 0.346005, 0.652679, 0.960861, 0.614272, 0.218315, 0.722266, 0.334078, 0.490082, 0.759666, 0.0824943, 0.246811, 0.501582, 0.437904, 0.669119, 0.469253, 0.497209, 0.366719, 0.268422, 0.125883, 0.392855, 0.642103, 0.100537, 0.696517, 0.686808, 0.658717, 0.0527048, 0.492001, 0.144315, 0.0806398, 0.916123, 0.0947704, 0.698308, 0.641307, 0.149544, 0.0495856, 0.806163, 0.758025, 0.686753, 0.817656, 0.545491, 0.0896702, 0.283765, 0.362967, 0.771175, 0.895561, 0.270495, 0.373034, 0.750738, 0.315994, 0.386214, 0.37898, 0.823998, 0.417079, 0.704027, 0.311309, 0.902728, 0.612723, 0.954697, 0.787412, 0.259005, 0.555986, 0.345166, 0.154027, 0.285371, 0.479638, 0.938153, 0.827424, 0.417747, 0.131627, 0.585372, 0.276595, 0.338166, 0.888816, 0.205515, 0.724796, 0.30414, 0.675818, 0.760469, 0.438074, 0.515091, 0.0415316, 0.865371, 0.64607, 0.549665, 0.415766, 0.969352, 0.74064, 0.820653, 0.683802, 0.745104, 0.638719, 0.950003, 0.612648, 0.80751, 0.811646, 0.28429, 0.435325, 0.164869, 0.506926, 0.625481, 0.875001, 0.250263, 0.119058, 0.410753, 0.362809, 0.520447, 0.532355, 0.30614, 0.990048, 0.246524, 0.596864, 0.429183, 0.960561, 0.725848, 0.114912, 0.282102, 0.560757, 0.367448, 0.537203, 0.976068, 0.685703, 0.918309, 0.234538, 0.490541, 0.929749, 0.660109, 0.919554, 0.423967, 0.528061, 0.912007, 0.977372, 0.895536, 0.577261, 0.911036, 0.542754, 0.79095, 0.196632, 0.382023, 0.211566, 0.218488, 0.123244, 0.540393, 0.0934283, 0.421639, 0.668694, 0.18205, 0.861314, 0.865269, 0.0239912, 0.0644175, 0.161088, 0.0976057, 0.495183, 0.497982, 0.642607, 0.800152, 0.779988, 0.631244, 0.892617, 0.277899, 0.689278, 0.0372923, 0.659564, 0.729482, 0.947426, 0.879573, 0.219491, 0.259561, 0.713637, 0.759587, 0.728768, 0.227487, 0.682074, 0.0508287, 0.365871, 0.0115379, 0.0253321, 0.724254, 0.784426, 0.605822, 0.79863, 0.0920109, 0.264806, 0.529715, 0.280521, 0.0806833, 0.573684, 0.263211, 0.98225, 0.697906, 0.0727361, 0.422524, 0.892327, 0.215485, 0.150975, 0.272374, 0.254977, 0.0471687, 0.854011, 0.905523, 0.487369, 0.671862, 0.309957, 0.0326223, 0.15492, 0.485134, 0.906009, 0.00797781, 0.312363, 0.351138, 0.807354, 0.433191, 0.994537, 0.152766, 0.336247, 0.821376, 0.568848, 0.334488, 0.34449, 0.113985, 0.716185, 0.0828765, 0.31891, 0.0101528, 0.398173, 0.653834, 0.101553, 0.0429466, 0.794195, 0.598342, 0.0329396, 0.26652, 0.534885, 0.178991, 0.393045, 0.945898, 0.236378, 0.875477, 0.843853, 0.738825, 0.957059, 0.883943, 0.377353, 0.779163, 0.819641, 0.39917, 0.0942807, 0.404984, 0.717679, 0.969535, 0.642952, 0.0152037, 0.322173, 0.740293, 0.735752, 0.934312, 0.617279, 0.447043, 0.197315, 0.216372, 0.514738, 0.175573, 0.991895, 0.247655, 0.675527, 0.825682, 0.0679679, 0.711012, 0.397803, 0.261216, 0.561776, 0.555192, 0.295563, 0.389618, 0.989129, 0.737709, 0.433677, 0.961196, 0.878383, 0.500939, 0.758256, 0.653572, 0.451451, 0.115132, 0.192219, 0.939038, 0.635529, 0.477316, 0.804115, 0.00827723, 0.487808, 0.412956, 0.713571, 0.67833, 0.267152, 0.752279, 0.944106, 0.337772, 0.94516, 0.0482466, 0.300545, 0.00893618, 0.116131, 0.729364, 0.872157, 0.973988, 0.834828, 0.349206, 0.793585, 0.908499, 0.0230343, 0.871465, 0.90529, 0.623719, 0.156356, 0.274895, 0.170041, 0.815941, 0.489124, 0.266382, 0.180785, 0.639142, 0.180608, 0.694559, 0.655653, 0.558426, 0.311658, 0.150371, 0.685133, 0.75854, 0.416914, 0.0211004, 0.106531, 0.675141, 0.386412, 0.616957, 0.702496, 0.751583, 0.823503, 0.232479, 0.518833, 0.88042, 0.449801, 0.873537, 0.403454, 0.748868, 0.540153, 0.135152, 0.319178, 0.976757, 0.762667, 0.303477, 0.460385, 0.627705, 0.66124, 0.225106, 0.957666, 0.854031, 0.662319, 0.251712, 0.104927, 0.238142, 0.532306, 0.0441043, 0.154565, 0.0345476, 0.83563, 0.913591, 0.810374, 0.182844, 0.244655, 0.863284, 0.309717, 0.916182, 0.487567, 0.931449, 0.251717, 0.724678, 0.606854, 0.189304, 0.673796, 0.0773815, 0.889599, 0.256576, 0.362145, 0.920125, 0.611832, 0.0974827, 0.963835, 0.347465, 0.368334, 0.0557915, 0.51206, 0.950342, 0.284642, 0.125789, 0.910985, 0.369621, 0.461925, 0.580513, 0.0226002, 0.905911, 0.757851, 0.00114986, 0.86232, 0.625117, 0.771487, 0.689423, 0.918658, 0.863122, 0.525129, 0.031389, 0.599619, 0.960601, 0.840296, 0.534162, 0.81502, 0.540934, 0.159492, 0.814338, 0.723885, 0.090235, 0.576617, 0.338563, 0.36344, 0.592914, 0.0181945, 0.00990845, 0.179302, 0.248085, 0.0174004, 0.569925, 0.282361, 0.59425, 0.876042, 0.991342, 0.385033, 0.423606, 0.605631, 0.754561, 0.447055, 0.542943, 0.144129, 0.227229, 0.814315, 0.645272, 0.0350435, 0.177281, 0.217253, 0.108555, 0.0933903, 0.638201, 0.319816, 0.465517, 0.396075, 0.559073, 0.731407, 0.652334, 0.415819, 0.463731, 0.898378, 0.781612, 0.853351, 0.567849, 0.0676322, 0.972681, 0.271317, 0.227251, 0.91604, 0.227314, 0.10091, 0.139674, 0.879678, 0.0618158, 0.93691, 0.858663, 0.656639, 0.416152, 0.197135, 0.846068, 0.416782, 0.711312, 0.991886, 0.380021, 0.793923, 0.7898, 0.391976, 0.72784, 0.823548, 0.703683, 0.0557875, 0.106768, 0.376691, 0.993151, 0.47171, 0.526288, 0.812549, 0.131579, 0.653928, 0.788616, 0.581642, 0.868925, 0.205821, 0.39915, 0.296946, 0.966696, 0.644619, 0.289385, 0.212465, 0.806189, 0.285725, 0.534083, 0.327135, 0.669993, 0.131962, 0.319439, 0.661052, 0.826859, 0.698392, 0.439574, 0.317291, 0.433467, 0.466467, 0.702061, 0.0260792, 0.103434, 0.463534, 0.238455, 0.0238458, 0.149948, 0.994296, 0.784839, 0.277535, 0.289423, 0.610954, 0.0181285, 0.267422, 0.570679, 0.0927, 0.18874, 0.104791, 0.361917, 0.48399, 0.469215, 0.89176, 0.0340089, 0.848645, 0.796003, 0.698686, 0.83429, 0.249566, 0.20247, 0.0560978, 0.532086, 0.00241042, 0.978845, 0.653804, 0.171889, 0.14379, 0.970395, 0.190568, 0.817508, 0.534476, 0.545703, 0.238863, 0.248701, 0.439671, 0.872105, 0.0711503, 0.109641, 0.663663, 0.879851, 0.404459, 0.670435, 0.35154, 0.787346, 0.353047, 0.307953, 0.623819, 0.917576, 0.217974, 0.953093, 0.530778, 0.0942481, 0.544627, 0.156973, 0.455068, 0.846388, 0.843091, 0.357362, 0.739226, 0.713216, 0.738706, 0.382027, 0.446798, 0.297191, 0.55579, 0.950182, 0.654786, 0.745809, 0.969976, 0.0328249, 0.444964, 0.788204, 0.882919, 0.218934, 0.227815, 0.952823, 0.622917, 0.952082, 0.600167, 0.770613, 0.0856661, 0.590275, 0.371377, 0.192921, 0.450472, 0.897739, 0.579872, 0.629459, 0.797775, 0.512651, 0.220825, 0.662543, 0.215113, 0.408932, 0.117188, 0.0488343, 0.624517, 0.547773, 0.980149, 0.143366, 0.146225, 0.814327, 0.779139, 0.603293, 0.712174, 0.25335, 0.759281, 0.726628, 0.0954946, 0.507432, 0.619316, 0.61139, 0.0073343, 0.811255, 0.451277, 0.40173, 0.725122, 0.869911, 0.793965, 0.905632, 0.908934, 0.727874, 0.442662, 0.177654, 0.0428857, 0.176376, 0.586001, 0.490787, 0.681465, 0.876579, 0.476623, 0.0891462, 0.748424, 0.173574, 0.922245, 0.368648, 0.0713835, 0.909052, 0.121269, 0.164625, 0.221131, 0.741261, 0.998728, 0.210454, 0.592318, 0.953165, 0.799059, 0.652669, 0.627318, 0.354413, 0.12043, 0.947838, 0.861037, 0.760182, 0.117874, 0.911965, 0.781214, 0.667469, 0.729352, 0.821422, 0.809641, 0.0766209, 0.481135, 0.450774, 0.249029, 0.484877, 0.41154, 0.368076, 0.273665, 0.17875, 0.800276, 0.606232, 0.833419, 0.431604, 0.417373, 0.342649, 0.259456, 0.17573, 0.608306, 0.255278, 0.221354, 0.844619, 0.383746, 0.408302, 0.280374, 0.443266, 0.472583, 0.614341, 0.99775, 0.511718, 0.598597, 0.21895, 0.572983, 0.680042, 0.346485, 0.376984, 0.485663, 0.368217, 0.0719982, 0.621932, 0.716446, 0.497886, 0.308174, 0.772743, 0.377591, 0.118699, 0.361724, 0.145399, 0.322687, 0.435509, 0.874113, 0.823203, 0.0697124, 0.71379, 0.680413, 0.43251, 0.849853, 0.442805, 0.718404, 0.57, 0.976774, 0.131632, 0.442071, 0.217625, 0.0987891, 0.818226, 0.409763, 0.251333, 0.361336, 0.229466, 0.414572, 0.967588, 0.38856, 0.453527, 0.168258, 0.121589, 0.259288, 0.708411, 0.92906, 0.756081, 0.926194, 0.171172, 0.0627588, 0.426189, 0.901095, 0.509919, 0.140936, 0.811498, 0.477058, 0.00490302, 0.647061, 0.474544, 0.195222, 0.786194, 0.808953, 0.540596, 0.97078, 0.0126724, 0.0650848, 0.0433997, 0.0312905, 0.899887, 0.939005, 0.223059, 0.834616, 0.722675, 0.594256, 0.954715, 0.37608, 0.64524, 0.335348, 0.515949, 0.401999, 0.393233, 0.0490024, 0.0250427, 0.187972, 0.384919, 0.833782, 0.783104, 0.977303, 0.158729, 0.333586, 0.0502338, 0.801494, 0.291031, 0.49816, 0.958078, 0.479391, 0.835572, 0.320777, 0.706245, 0.396989, 0.411453, 0.667117, 0.919936, 0.298127, 0.468245, 0.62581, 0.125593, 0.0927706, 0.600427, 0.153621, 0.306354, 0.0734575, 0.456476, 0.455768, 0.844137, 0.0233551, 0.43679, 0.133455, 0.296743, 0.148274, 0.36981, 0.170767, 0.695778, 0.743133, 0.809318, 0.583172, 0.49528, 0.706006, 0.859869, 0.286241, 0.924451, 0.199671, 0.840548, 0.236104, 0.100701, 0.937621, 0.780132, 0.276649, 0.134453, 0.0898831, 0.0197874, 0.388934, 0.532583, 0.532269, 0.280046, 0.413882, 0.614893, 0.860659, 0.85073, 0.835906, 0.243669, 0.781437, 0.657171, 0.640155, 0.636711, 0.246225, 0.400486, 0.437177, 0.732831, 0.886331, 0.0484648, 0.784466, 0.307803, 0.78296, 0.435225, 0.040348, 0.10938, 0.085782, 0.912009, 0.690018, 0.845696, 0.763105, 0.27025, 0.578797, 0.69952, 0.571293, 0.476323, 0.389342, 0.0964858, 0.69871, 0.847456, 0.989922, 0.765655, 0.499258, 0.935291, 0.200629, 0.421518, 0.992777, 0.919894, 0.0312754, 0.200879, 0.13463, 0.439071, 0.962482, 0.118074, 0.739496, 0.561802, 0.295868, 0.0610594, 0.0495111, 0.0437061, 0.907812, 0.384339, 0.0956576, 0.238232, 0.59842, 0.398248, 0.0494056, 0.795226, 0.675948, 0.927738, 0.416942, 0.456946, 0.80176, 0.488236, 0.678677, 0.180541, 0.979995, 0.514626, 0.928086, 0.766595, 0.660089, 0.830318, 0.62411, 0.573957, 0.189075, 0.411805, 0.211143, 0.542054, 0.139239, 0.688617, 0.765168, 0.653583, 0.0153753, 0.138088, 0.710597, 0.874174, 0.369406, 0.753912, 0.0326591, 0.532357, 0.948062, 0.760923, 0.334434, 0.023816, 0.0403458, 0.531485, 0.68693, 0.444792, 0.116474, 0.00305501, 0.979432, 0.858887, 0.896419, 0.55056, 0.494668, 0.283919, 0.309247, 0.517832, 0.752532, 0.239866, 0.179353, 0.637841, 0.491144, 0.573195, 0.472105, 0.525877, 0.864037, 0.133561, 0.511198, 0.700817, 0.403014, 0.92291, 0.903589, 0.767714, 0.321063, 0.817249, 0.196288, 0.0313713, 0.0379668, 0.444515, 0.0305067, 0.560237, 0.486024, 0.300577, 0.287569, 0.622349, 0.496723, 0.125681, 0.527687, 0.956522, 0.752448, 0.188189, 0.997759, 0.600438, 0.0332686, 0.961283, 0.813999, 0.552099, 0.852963, 0.404257, 0.645259, 0.112288, 0.228243, 0.886161, 0.949725, 0.300789, 0.452646, 0.961252, 0.97437, 0.699202, 0.895913, 0.185787, 0.757885, 0.627126, 0.992427, 0.48017, 0.947875, 0.897657, 0.98695, 0.942724, 0.51713, 0.926083, 0.298111, 0.152125, 0.67908, 0.318174, 0.211883, 0.636607, 0.836706, 0.278385, 0.077398, 0.969227, 0.246706, 0.697787, 0.0180306, 0.690303, 0.943149, 0.827064, 0.237975, 0.946178, 0.987358, 0.339291, 0.279195, 0.111103, 0.341787, 0.772583, 0.146264, 0.497895, 0.936975, 0.836435, 0.621066, 0.79411, 0.035769, 0.0600016, 0.995965, 0.449941, 0.105262, 0.103016, 0.677542, 0.848522, 0.0144764, 0.272691, 0.99499, 0.0246795, 0.394497, 0.432227, 0.905833, 0.14186, 0.223843, 0.837145, 0.884101, 0.0958894, 0.278607, 0.306735, 0.277072, 0.121907, 0.846329, 0.928566, 0.494188, 0.866881, 0.82777, 0.40037, 0.0298162, 0.454833, 0.0557116, 0.568425, 0.716013, 0.625131, 0.359461, 0.167985, 0.768185, 0.954992, 0.567223, 0.297983, 0.302766, 0.909732, 0.0675928, 0.0475448, 0.612588, 0.454469, 0.0555836, 0.0137198, 0.679233, 0.997264, 0.670852, 0.98384, 0.79776, 0.444311, 0.299769, 0.415529, 0.428637, 0.0172332, 0.107078, 0.176099, 0.238645, 0.383938, 0.26459, 0.193779, 0.169667, 0.744224, 0.852553, 0.398356, 0.0836121, 0.254284, 0.0210508, 0.700994, 0.110569, 0.642708, 0.909614, 0.255123, 0.0434409, 0.196863, 0.489927, 0.814851, 0.0462726, 0.345261, 0.0971585, 0.125154, 0.320215, 0.650961, 0.870661, 0.0648448, 0.505123, 0.615948, 0.281158, 0.0647115, 0.931344, 0.149243, 0.904606, 0.280679, 0.491131, 0.177151, 0.661114, 0.922054, 0.926097, 0.177883, 0.316882, 0.685586, 0.127404, 0.898184, 0.173397, 0.435682, 0.238084, 0.38306, 0.482842, 0.392207, 0.730058, 0.920396, 0.5362, 0.523487, 0.159495, 0.621999, 0.987929, 0.171821, 0.732103, 0.860741, 0.614877, 0.0981477, 0.794807, 0.936615, 0.265722, 0.76072, 0.0555835, 0.465874, 0.838684, 0.13283, 0.544939, 0.562257, 0.538816, 0.0887406, 0.845126, 0.0543769, 0.479723, 0.0172412, 0.190061, 0.896127, 0.621624, 0.909402, 0.984097, 0.868161, 0.464493, 0.588519, 0.0339487, 0.510743, 0.457221, 0.51175, 0.906448, 0.506998, 0.128875, 0.371335, 0.719548, 0.244392, 0.164222, 0.460635, 0.736981, 0.386091, 0.74204, 0.23905, 0.790501, 0.125684, 0.59015, 0.142645, 0.55644, 0.0467885, 0.00931126, 0.345958, 0.903315, 0.307665, 0.22823, 0.803552, 0.781198, 0.545732, 0.203391, 0.71276, 0.196048, 0.453246, 0.171651, 0.763062, 0.691391, 0.203338, 0.47249, 0.88801, 0.398356, 0.459257, 0.505327, 0.807932, 0.249673, 0.941125, 0.584451, 0.551284, 0.614088, 0.302869, 0.293274, 0.641043, 0.379531, 0.95633, 0.545617, 0.984686, 0.0236969, 0.34287, 0.640143, 0.0201265, 0.509974, 0.98276, 0.628205, 0.613, 0.736292, 0.871173, 0.434412, 0.0964367, 0.715149, 0.0651578, 0.858076, 0.970373, 0.697583, 0.0511957, 0.1312, 0.0485288, 0.522365, 0.540259, 0.048487, 0.718387, 0.494992, 0.554173, 0.0165956, 0.37798, 0.536323, 0.352967, 0.980283, 0.811323, 0.993776, 0.888372, 0.577185, 0.325079, 0.687499, 0.302191, 0.760436, 0.258223, 0.407045, 0.642952, 0.186617, 0.217038, 0.300392, 0.909526, 0.825396, 0.669611, 0.243431, 0.830297, 0.519504, 0.740907, 0.017328, 0.619513, 0.198669, 0.951779, 0.000801512, 0.461046, 0.237398, 0.0297098, 0.458399, 0.0106786, 0.940433, 0.901133, 0.118996, 0.86597, 0.600847, 0.500417, 0.669039, 0.597425, 0.863528, 0.177502, 0.928123, 0.0235643, 0.0162282, 0.431811, 0.0247326, 0.46343, 0.117594, 0.705301, 0.993842, 0.128674, 0.351389, 0.583711, 0.744332, 0.0290085, 0.601058, 0.896482, 0.145603, 0.26196, 0.510078, 0.42398, 0.847985, 0.694315, 0.84136, 0.425329, 0.777447, 0.480603, 0.442984, 0.248403, 0.967697, 0.0191499, 0.797625, 0.650762, 0.504973, 0.793651, 0.371018, 0.0922546, 0.377238, 0.068083, 0.212269, 0.881175, 0.0865724, 0.60714, 0.20614, 0.495879, 0.869379, 0.798335, 0.817325, 0.407834, 0.684409, 0.994383, 0.458125, 0.160241, 0.985585, 0.602732, 0.990763, 0.94435, 0.650575, 0.654134, 0.792205, 0.669694, 0.212634, 0.649366, 0.601599, 0.152201, 0.406112, 0.41898, 0.932211, 0.833933, 0.629197, 0.460935, 0.877179, 0.464883, 0.492687, 0.978639, 0.590143, 0.695603, 0.8976, 0.989153, 0.850627, 0.776675, 0.858904, 0.262062, 0.572785, 0.468946, 0.249937, 0.38188, 0.504424, 0.564753, 0.761964, 0.380359, 0.22803, 0.0788631, 0.489809, 0.971161, 0.619884, 0.0317643, 0.516531, 0.625844, 0.236571, 0.615007, 0.599899, 0.751722, 0.685894, 0.354725, 0.740931, 0.968282, 0.888018, 0.621882, 0.924862, 0.527046, 0.347151, 0.212002, 0.572193, 0.117247, 0.194615, 0.095253, 0.0201087, 0.098146, 0.310113, 0.323643, 0.894278, 0.369888, 0.810741, 0.172767, 0.709853, 0.516814, 0.548164, 0.888336, 0.0797477, 0.24525, 0.938654, 0.502689, 0.295827, 0.119806, 0.289672, 0.961867, 0.741689, 0.292624, 0.945117, 0.71285, 0.699524, 0.804372, 0.210751, 0.982166, 0.418573, 0.589543, 0.938117, 0.337398, 0.723417, 0.610449, 0.0908809, 0.0543101, 0.654165, 0.663814, 0.14508, 0.741849, 0.396282, 0.526416, 0.655926, 0.192024, 0.718833, 0.15958, 0.677303, 0.733393, 0.404192, 0.124883, 0.0175574, 0.544939, 0.601568, 0.361211, 0.443735, 0.193085, 0.323427, 0.634401, 0.976123, 0.208605, 0.422247, 0.780503, 0.0134489, 0.0751375, 0.54448, 0.0500473, 0.869196, 0.0016913, 0.506276, 0.924825, 0.949871, 0.83844, 0.00014734, 0.637409, 0.949198, 0.519147, 0.186983, 0.0327156, 0.79524, 0.256879, 0.204747, 0.0328527, 0.88794, 0.632243, 0.0589014, 0.254368, 0.918402, 0.117684, 0.893522, 0.276757, 0.856862, 0.517076, 0.263484, 0.974121, 0.0329834, 0.0341347, 0.590086, 0.860627, 0.0518987, 0.291223, 0.166075, 0.180794, 0.939487, 0.330483, 0.295762, 0.818621, 0.464363, 0.88698, 0.662816, 0.113086, 0.876906, 0.233095, 0.602022, 0.605284, 0.372794, 0.81424, 0.0330449, 0.204503, 0.25696, 0.691717, 0.278742, 0.0521629, 0.675656, 0.188969, 0.00757971, 0.13839, 0.810708, 0.871205, 0.158449, 0.194538, 0.259537, 0.277723, 0.791728, 0.960963, 0.701475, 0.944527, 0.440489, 0.657244, 0.630292, 0.661049, 0.282093, 0.241272, 0.425455, 0.105716, 0.78398, 0.834028, 0.772558, 0.987713, 0.869861, 0.398555, 0.640129, 0.479359, 0.082363, 0.836305, 0.799196, 0.0100903, 0.709837, 0.52487, 0.630181, 0.338377, 0.549398, 0.35527, 0.403584, 0.550187, 0.922802, 0.323531, 0.413257, 0.64132, 0.892639, 0.237584, 0.0482399, 0.518739, 0.0191092, 0.48928, 0.60362, 0.418672, 0.79729, 0.222655, 0.663298, 0.11272, 0.189199, 0.261977, 0.624759, 0.247813, 0.321991, 0.869541, 0.0345437, 0.705514, 0.236079, 0.0751642, 0.0265965, 0.750211, 0.956067, 0.700557, 0.643285, 0.358891, 0.299891, 0.119464, 0.632929, 0.0396922, 0.416897, 0.857252, 0.358908, 0.820325, 0.226221, 0.00299245, 0.873745, 0.121183, 0.0709516, 0.328258, 0.311731, 0.787461, 0.130027, 0.422927, 0.850793, 0.128519, 0.0353783, 0.912582, 0.92066, 0.50438, 0.566446, 0.330203, 0.428012, 0.962995, 0.658071, 0.273066, 0.392982, 0.240056, 0.282333, 0.281442, 0.734583, 0.360461, 0.824135, 0.660209, 0.776367, 0.28154, 0.465446, 0.950371, 0.219994, 0.315196, 0.906205, 0.255356, 0.126619, 0.398926, 0.122939, 0.757885, 0.431244, 0.952485, 0.0897354, 0.0735005, 0.223834, 0.993829, 0.910219, 0.939369, 0.209587, 0.247965, 0.378534, 0.43695, 0.593372, 0.176592, 0.550697, 0.13481, 0.938469, 0.153942, 0.513803, 0.129937, 0.925577, 0.873291, 0.0110865, 0.599096, 0.164479, 0.993353, 0.306322, 0.761716, 0.19688, 0.738774, 0.101273, 0.375798, 0.871668, 0.651067, 0.752728, 0.317489, 0.860593, 0.151999, 0.834951, 0.0131671, 0.213692, 0.131601, 0.948809, 0.386539, 0.735252, 0.63589, 0.597934, 0.153738, 0.207024, 0.414825, 0.0393166, 0.656596, 0.633114, 0.71046, 0.515206, 0.725811, 0.956858, 0.96653, 0.307947, 0.81287, 0.231269, 0.443008, 0.866685, 0.330293, 0.630222, 0.610747, 0.537684, 0.520227, 0.196919, 0.622207, 0.476517, 0.113564, 0.894873, 0.092264, 0.263747, 0.620474, 0.035461, 0.0751112, 0.142827, 0.96917, 0.453922, 0.370087, 0.860608, 0.692643, 0.349574, 0.88413, 0.349871, 0.138777, 0.696332, 0.512382, 0.453479, 0.0715139, 0.962642, 0.117193, 0.755279, 0.0467926, 0.951346, 0.932626, 0.708602, 0.591814, 0.0766236, 0.911442, 0.684024, 0.676403, 0.423532, 0.918271, 0.744965, 0.592189, 0.449637, 0.982688, 0.150236, 0.267047, 0.621663, 0.00228062, 0.788285, 0.370236, 0.495627, 0.294375, 0.406271, 0.892055, 0.952665, 0.0619688, 0.786144, 0.154908, 0.182073, 0.265545, 0.838363, 0.0994781, 0.384246, 0.0825485, 0.307077, 0.920718, 0.0360689, 0.0136542, 0.516079, 0.785751, 0.622565, 0.838129, 0.934121, 0.890701, 0.949006, 0.505474, 0.576906, 0.270199, 0.493857, 0.486989, 0.158093, 0.346061, 0.78021, 0.846452, 0.0833809, 0.954651, 0.355517, 0.903562, 0.1996, 0.705845, 0.258689, 0.744718, 0.167873, 0.978481, 0.998333, 0.737268, 0.879358, 0.554217, 0.501416, 0.712734, 0.36335, 0.133565, 0.717549, 0.450434, 0.2594, 0.571242, 0.95148, 0.309606, 0.0138764, 0.616544, 0.0912953, 0.285406, 0.542064, 0.899954, 0.349686, 0.847954, 0.194902, 0.974816, 0.711702, 0.590018, 0.40569, 0.0615247, 0.860793, 0.951327, 0.325705, 0.20677, 0.902547, 0.377651, 0.372405, 0.150593, 0.694874, 0.196005, 0.460057, 0.948702, 0.868886, 0.668987, 0.744096, 0.134359, 0.121742, 0.968874, 0.657978, 0.642594, 0.131876, 0.630214, 0.806247, 0.865757, 0.902028, 0.445547, 0.298948, 0.469114, 0.0150796, 0.0826065, 0.425651, 0.31539, 0.0164835, 0.782148, 0.316098, 0.331723, 0.35115, 0.663207, 0.874859, 0.472857, 0.299885, 0.303156, 0.353277, 0.85299, 0.305924, 0.753377, 0.687635, 0.721103, 0.0175073, 0.131642, 0.793767, 0.895199, 0.168561, 0.0137051, 0.741135, 0.521726, 0.669587, 0.869922, 0.399693, 0.975729, 0.785194, 0.223006, 0.47353, 0.396348, 0.577172, 0.164608, 0.187767, 0.40811, 0.0538205, 0.879997, 0.0382984, 0.660866, 0.0198569, 0.127509, 0.00440152, 0.733446, 0.313383, 0.870143, 0.779805, 0.869333, 0.30661, 0.734021, 0.0171242, 0.989702, 0.893072, 0.961857, 0.578993, 0.37131, 0.339649, 0.893034, 0.993413, 0.890027, 0.985948, 0.732727, 0.144102, 0.176959, 0.205401, 0.91985, 0.618145, 0.425502, 0.232627, 0.822216, 0.664413, 0.661354, 0.0736633, 0.167671, 0.100399, 0.757384, 0.067736, 0.760572, 0.0928382, 0.962066, 0.459678, 0.979835, 0.194456, 0.752379, 0.16628, 0.373694, 0.804512, 0.985093, 0.541025, 0.476111, 0.581787, 0.999706, 0.242947, 0.0282187, 0.0515976, 0.663627, 0.467204, 0.544393, 0.0968091, 0.246363, 0.553636, 0.504417, 0.531388, 0.686031, 0.0876361, 0.428347, 0.376407, 0.554175, 0.561901, 0.202451, 0.920244, 0.259299, 0.542616, 0.483982, 0.549205, 0.54552, 0.207978, 0.352002, 0.705705, 0.532275, 0.179278, 0.428465, 0.134763, 0.659402, 0.0057269, 0.801147, 0.646996, 0.383481, 0.861122, 0.323674, 0.0827917, 0.584816, 0.364272, 0.643767, 0.763442, 0.542709, 0.583704, 0.13157, 0.472268, 0.840147, 0.298832, 0.197026, 0.609529, 0.299575, 0.779332, 0.105797, 0.946181, 0.374778, 0.157552, 0.855743, 0.949876, 0.75281, 0.97056, 0.328316, 0.278471, 0.338026, 0.290353, 0.331418, 0.105357, 0.107911, 0.209987, 0.204651, 0.150709, 0.274328, 0.737791, 0.997707, 0.179658, 0.493024, 0.65032, 0.415079, 0.753049, 0.277085, 0.467552, 0.872592, 0.711235, 0.764987, 0.962283, 0.916678, 0.933345, 0.16943, 0.705291, 0.305846, 0.932473, 0.417927, 0.630764, 0.954316, 0.242577, 0.65197, 0.527587, 0.322707, 0.167167, 0.82909, 0.392693, 0.114201, 0.49168, 0.601137, 0.725202, 0.55052, 0.871408, 0.183662, 0.194188, 0.766514, 0.390563, 0.53373, 0.411142, 0.11287, 0.496191, 0.237936, 0.0864019, 0.170401, 0.498972, 0.52159, 0.31857, 0.483729, 0.363289, 0.431386, 0.113659, 0.480874, 0.575185, 0.142799, 0.745861, 0.882621, 0.682881, 0.521802, 0.844527, 0.202177, 0.0689832, 0.439121, 0.0190587, 0.401935, 0.54349, 0.182965, 0.39918, 0.824962, 0.149391, 0.941701, 0.688548, 0.843249, 0.61073, 0.186105, 0.425176, 0.0335142, 0.078846, 0.894642, 0.150805, 0.871669, 0.983689, 0.569739, 0.120006, 0.23462, 0.546894, 0.985899, 0.111177, 0.778787, 0.0306535, 0.530043, 0.934035, 0.600232, 0.795862, 0.000255291, 0.899834, 0.799882, 0.6844, 0.909917, 0.526249, 0.784017, 0.508718, 0.603375, 0.101162, 0.504013, 0.89333, 0.249172, 0.983046, 0.425236, 0.314616, 0.210964, 0.86667, 0.422713, 0.889854, 0.630856, 0.250198, 0.10653, 0.532989, 0.341159, 0.503815, 0.042197, 0.116202, 0.139078, 0.00744486, 0.484724, 0.100958, 0.428136, 0.781813, 0.933248, 0.913441, 0.566981, 0.211166, 0.222149, 0.303675, 0.521641, 0.907715, 0.386349, 0.534989, 0.388098, 0.0928392, 0.741067, 0.453121, 0.120637, 0.344422, 0.28851, 0.42738, 0.221189, 0.558106, 0.616776, 0.927745, 0.71598, 0.960932, 0.211125, 0.795583, 0.218155, 0.327569, 0.97426, 0.366298, 0.132889, 0.273736, 0.784914, 0.57263, 0.600575, 0.592524, 0.612454, 0.233136, 0.181411, 0.298813, 0.983425, 0.931825, 0.432271, 0.375455, 0.348776, 0.704192, 0.3458, 0.469054, 0.206761, 0.545479, 0.319201, 0.0283074, 0.524172, 0.790276, 0.350788, 0.491273, 0.855947, 0.456643, 0.766323, 0.859359, 0.506045, 0.203754, 0.56715, 0.666739, 0.918874, 0.272368, 0.729093, 0.265262, 0.99749, 0.521925, 0.137931, 0.093001, 0.851225, 0.666747, 0.382704, 0.350107, 0.60679, 0.40621, 0.866533, 0.432777, 0.948598, 0.0121469, 0.71365, 0.519644, 0.292866, 0.383114, 0.724785, 0.476142, 0.457834, 0.435974, 0.722377, 0.268621, 0.794421, 0.028675, 0.659385, 0.128917, 0.412119, 0.0739568, 0.036621, 0.145276, 0.13657, 0.860479, 0.0404693, 0.790565, 0.465281, 0.355455, 0.849531, 0.919323, 0.0617039, 0.315882, 0.703688, 0.010304, 0.89633, 0.486141, 0.998679, 0.242657, 0.577039, 0.0329449, 0.848627, 0.932338, 0.109061, 0.719444, 0.0570264, 0.712136, 0.66945, 0.220701, 0.648711, 0.886529, 0.611644, 0.307003, 0.0654794, 0.00418425, 0.574744, 0.326092, 0.390828, 0.805966, 0.671923, 0.843696, 0.631535, 0.46795, 0.259107, 0.660515, 0.679202, 0.176025, 0.0569527, 0.0855054, 0.780762, 0.312956, 0.973318, 0.0683091, 0.840799, 0.24185, 0.225479, 0.369804, 0.209389, 0.757325, 0.648027, 0.781705, 0.0523023, 0.29403, 0.354478, 0.65567, 0.0788939, 0.645401, 0.201949, 0.761682, 0.0419674, 0.39134, 0.966735, 0.928743, 0.330331, 0.516785, 0.878995, 0.288281, 0.932334, 0.523682, 0.715837, 0.465523, 0.545675, 0.985006, 0.118815, 0.346305, 0.922536, 0.315067, 0.974697, 0.0746082, 0.745428, 0.712839, 0.25902, 0.27883, 0.680903, 0.256771, 0.248894, 0.120108, 0.0671847, 0.119086, 0.172175, 0.286801, 0.174667, 0.1063, 0.95757, 0.0951392, 0.0908864, 0.688181, 0.487435, 0.883682, 0.682503, 0.125781, 0.617355, 0.132983, 0.211431, 0.839752, 0.141492, 0.0747301, 0.879295, 0.705781, 0.487312, 0.236955, 0.471991, 0.784116, 0.672602, 0.580948, 0.560414, 0.724544, 0.883303, 0.95667, 0.617553, 0.29172, 0.0741911, 0.961876, 0.42648, 0.246887, 0.286034, 0.794506, 0.58938, 0.750983, 0.554592, 0.734428, 0.458975, 0.450334, 0.255665, 0.1741, 0.673493, 0.123515, 0.125788, 0.738598, 0.164642, 0.865752, 0.69991, 0.356539, 0.137902, 0.711637, 0.0666647, 0.985536, 0.979876, 0.628472, 0.68901, 0.0651761, 0.880762, 0.134784, 0.456508, 0.610558, 0.164991, 0.687468, 0.802499, 0.266486, 0.113039, 0.249944, 0.829623, 0.190044, 0.0325146, 0.385954, 0.103577, 0.432229, 0.658499, 0.518404, 0.288699, 0.800206, 0.305622, 0.658524, 0.569329, 0.682139, 0.389223, 0.986657, 0.638365, 0.683896, 0.734146, 0.467965, 0.863144, 0.349018, 0.857988, 0.519195, 0.137434, 0.198396, 0.47824, 0.33314, 0.58306, 0.819482, 0.131867, 0.978421, 0.634582, 0.191915, 0.302606, 0.795154, 0.808797, 0.101746, 0.191043, 0.382334, 0.0742625, 0.299295, 0.416325, 0.696179, 0.615711, 0.743909, 0.938272, 0.496033, 0.66508, 0.321946, 0.70886, 0.341679, 0.80148, 0.766432, 0.224154, 0.95704, 0.522049, 0.100194, 0.153635, 0.183131, 0.938512, 0.501546, 0.701983, 0.0469377, 0.306475, 0.462542, 0.892279, 0.77836, 0.122795, 0.955546, 0.679641, 0.343777, 0.744099, 0.620283, 0.0510458, 0.621126, 0.497496, 0.0615522, 0.994788, 0.116699, 0.174825, 0.84207, 0.147225, 0.56432, 0.459457, 0.783872, 0.6876, 0.226885, 0.793955, 0.763438, 0.460798, 0.929226, 0.484258, 0.947753, 0.809431, 0.146492, 0.872402, 0.92335, 0.44157, 0.70711, 0.290489, 0.568933, 0.86936, 0.315753, 0.0230588, 0.140049, 0.542548, 0.794032, 0.0363069, 0.263593, 0.78054, 0.0612037, 0.0121797, 0.881876, 0.517335, 0.173431, 0.646599, 0.277209, 0.553398, 0.0854287, 0.283189, 0.575213, 0.129925, 0.257416, 0.798785, 0.944648, 0.950456, 0.0976221, 0.365262, 0.605363, 0.918504, 0.64519, 0.673155, 0.027879, 0.219172, 0.48537, 0.65908, 0.280854, 0.424895, 0.881106, 0.788609, 0.178011, 0.274207, 0.574707, 0.305934, 0.349095, 0.94066, 0.356352, 0.114581, 0.253141, 0.984931, 0.876784, 0.228779, 0.359667, 0.387525, 0.564695, 0.0347474, 0.0232462, 0.92298, 0.845051, 0.142808, 0.323234, 0.0942305, 0.720866, 0.475697, 0.77392, 0.229708, 0.362771, 0.140601, 0.543391, 0.209958, 0.385444, 0.356594, 0.404747, 0.875072, 0.920314, 0.93781, 0.542478, 0.999612, 0.649792, 0.464563, 0.71913, 0.067716, 0.892564, 0.511679, 0.120239, 0.94121, 0.939246, 0.158685, 0.554051, 0.608282, 0.59656, 0.0820175, 0.288989, 0.0551669, 0.60417, 0.183301, 0.695141, 0.457712, 0.2546, 0.851147, 0.433316, 0.381404, 0.408653, 0.302199, 0.181817, 0.139157, 0.687415, 0.717399, 0.543787, 0.903703, 0.635132, 0.308985, 0.639285, 0.746764, 0.395103, 0.220186, 0.449086, 0.430574, 0.446683, 0.840254, 0.948244, 0.594952, 0.977248, 0.600554, 0.154041, 0.411787, 0.634164, 0.174319, 0.0357253, 0.477696, 0.209104, 0.789559, 0.809459, 0.322492, 0.521382, 0.679866, 0.042858, 0.906839, 0.182991, 0.109828, 0.652227, 0.134167, 0.156985, 0.797595, 0.901733, 0.173423, 0.511025, 0.991431, 0.748042, 0.258461, 0.247526, 0.443121, 0.642915, 0.40633, 0.178967, 0.956317, 0.118498, 0.77716, 0.84374, 0.615613, 0.775724, 0.128556, 0.047564, 0.482233, 0.869486, 0.912726, 0.712969, 0.589686, 0.915699, 0.127395, 0.0243428, 0.796528, 0.0271926, 0.276897, 0.854741, 0.273008, 0.409598, 0.272909, 0.611109, 0.385579, 0.855294, 0.0808233, 0.895329, 0.788063, 0.397411, 0.538612, 0.643484, 0.363602, 0.845004, 0.505217, 0.859102, 0.80703, 0.928915, 0.903243, 0.116869, 0.973045, 0.781131, 0.825223, 0.604321, 0.125879, 0.334724, 0.47945, 0.344978, 0.993253, 0.0462239, 0.136637, 0.135684, 0.905821, 0.709746, 0.703002, 0.824723, 0.503736, 0.309801, 0.257925, 0.446654, 0.284797, 0.492379, 0.43401, 0.0674284, 0.495682, 0.787423, 0.247764, 0.806895, 0.693029, 0.93894, 0.369134, 0.0293536, 0.397517, 0.434083, 0.280603, 0.457201, 0.265151, 0.617149, 0.409492, 0.471634, 0.576589, 0.519764, 0.0372819, 0.614149, 0.209851, 0.962954, 0.721739, 0.861157, 0.667498, 0.190374, 0.18033, 0.161539, 0.722929, 0.0673748, 0.466838, 0.636999, 0.439436, 0.414309, 0.471628, 0.119459, 0.791597, 0.0954365, 0.81868, 0.179773, 0.951307, 0.442901, 0.415485, 0.592253, 0.596485, 0.0820338, 0.454377, 0.314596, 0.7888, 0.733904, 0.388865, 0.737712, 0.628816, 0.0052384, 0.921227, 0.200301, 0.875352, 0.675662, 0.708127, 0.0445684, 0.801732, 0.220593, 0.195389, 0.031508, 0.138105, 0.004039, 0.801566, 0.220978, 0.0527361, 0.420341, 0.152275, 0.257702, 0.662879, 0.195477, 0.477988, 0.149647, 0.360582, 0.252035, 0.837836, 0.202909, 0.0443178, 0.124772, 0.519917, 0.792469, 0.950344, 0.556252, 0.393459, 0.626201, 0.895579, 0.0857474, 0.663377, 0.689754, 0.360722, 0.476277, 0.22723, 0.460077, 0.472823, 0.088477, 0.208273, 0.753047, 0.142233, 0.592708, 0.147864, 0.785634, 0.0503793, 0.721425, 0.94303, 0.121167, 0.892101, 0.815383, 0.703281, 0.446439, 0.121052, 0.880847, 0.736922, 0.594894, 0.0383547, 0.411381, 0.800388, 0.530794, 0.948522, 0.803177, 0.0677633, 0.938261, 0.162068, 0.25666, 0.0170452, 0.509109, 0.598514, 0.750054, 0.700235, 0.166511, 0.132547, 0.0892302, 0.268259, 0.519604, 0.323998, 0.888942, 0.884842, 0.464882, 0.964561, 0.298755, 0.926063, 0.704481, 0.577166, 0.878532, 0.323947, 0.649356, 0.108289, 0.438164, 0.0072268, 0.276634, 0.460823, 0.106675, 0.510821, 0.080038, 0.381653, 0.884968, 0.442802, 0.426584, 0.207201, 0.75459, 0.302402, 0.131164, 0.747887, 0.796919, 0.310198, 0.886895, 0.827975, 0.385015, 0.133066, 0.140414, 0.303026, 0.322819, 0.335723, 0.235426, 0.243013, 0.913023, 0.846627, 0.128839, 0.803731, 0.678621, 0.775961, 0.255289, 0.623905, 0.422038, 0.501386, 0.52264, 0.633341, 0.262209, 0.642365, 0.403741, 0.0553783, 0.33729, 0.625458, 0.121564, 0.908396, 0.0465776, 0.0216111, 0.165593, 0.819747, 0.156041, 0.454712, 0.634647, 0.717452, 0.791361, 0.16383, 0.876477, 0.298547, 0.0967544, 0.306887, 0.376036, 0.921137, 0.449657, 0.643716, 0.36668, 0.853546, 0.0979937, 0.608885, 0.127596, 0.274848, 0.477162, 0.355337, 0.203537, 0.196091, 0.0411482, 0.7056, 0.0659719, 0.559514, 0.120639, 0.91403, 0.699591, 0.0606869, 0.286808, 0.147464, 0.0819117, 0.471353, 0.238771, 0.0024005, 0.850257, 0.450605, 0.170032, 0.541669, 0.234009, 0.716294, 0.313126, 0.943393, 0.866585, 0.145735, 0.394582, 0.68414, 0.363647, 0.256421, 0.0112904, 0.0966522, 0.996209, 0.211737, 0.367466, 0.29625, 0.876097, 0.632086, 0.739723, 0.68968, 0.352125, 0.757828, 0.755, 0.341412, 0.991008, 0.0579852, 0.0432854, 0.0779292, 0.346145, 0.434658, 0.345201, 0.246648, 0.117396, 0.49991, 0.448512, 0.223898, 0.359364, 0.402891, 0.374578, 0.556938, 0.0631965, 0.818556, 0.191346, 0.99804, 0.812671, 0.383271, 0.477208, 0.287621, 0.32783, 0.404819, 0.360004, 0.48539, 0.730465, 0.842978, 0.840347, 0.176263, 0.725395, 0.913036, 0.579613, 0.190412, 0.393441, 0.179916, 0.97419, 0.205657, 0.882818, 0.751801, 0.663449, 0.282115, 0.309451, 0.684265, 0.925508, 0.907791, 0.644116, 0.594189, 0.270316, 0.928215, 0.270332, 0.535511, 0.0507398, 0.628708, 0.85227, 0.45383, 0.929312, 0.677593, 0.484119, 0.976435, 0.97058, 0.461061, 0.0915939, 0.341794, 0.211881, 0.734182, 0.578379, 0.505584, 0.253547, 0.575241, 0.403074, 0.554885, 0.791949, 0.551603, 0.192511, 0.62504, 0.352356, 0.467595, 0.955976, 0.430357, 0.1926, 0.869406, 0.636174, 0.391593, 0.737206, 0.488382, 0.711914, 0.713012, 0.718183, 0.130062, 0.203195, 0.185301, 0.721868, 0.382154, 0.225553, 0.830909, 0.549502, 0.887707, 0.564624, 0.707833, 0.861514, 0.692271, 0.223948, 0.0848766, 0.069706, 0.774395, 0.241254, 0.0330149, 0.0621875, 0.830338, 0.711968, 0.825082, 0.765116, 0.410175, 0.468473, 0.723556, 0.53847, 0.721592, 0.688386, 0.996853, 0.495969, 0.308593, 0.130009, 0.136017, 0.186983, 0.773281, 0.464971, 0.267662, 0.366385, 0.706133, 0.352993, 0.381277, 0.927696, 0.436281, 0.115097, 0.224903, 0.182432, 0.324585, 0.853631, 0.962648, 0.426183, 0.972765, 0.754619, 0.932642, 0.0397336, 0.405386, 0.185493, 0.196159, 0.280581, 0.81349, 0.0566108, 0.913126, 0.864805, 0.864243, 0.839135, 0.65982, 0.310596, 0.0306325, 0.719261, 0.91392, 0.682861, 0.286705, 0.495547, 0.837516, 0.877889, 0.103057, 0.0915095, 0.041526, 0.913605, 0.0120425, 0.430544, 0.3469, 0.424993, 0.271809, 0.640018, 0.978747, 0.519083, 0.591846, 0.453999, 0.818611, 0.561709, 0.490296, 0.764526, 0.345333, 0.465684, 0.501756, 0.646381, 0.303442, 0.487852, 0.302947, 0.759262, 0.705049, 0.359549, 0.0306396, 0.521511, 0.855995, 0.763526, 0.301525, 0.0211847, 0.801818, 0.440616, 0.187869, 0.286936, 0.653456, 0.0863641, 0.687108, 0.987247, 0.582493, 0.0787707, 0.215817, 0.152722, 0.727039, 0.680357, 0.157492, 0.0874165, 0.29075, 0.849083, 0.709289, 0.307685, 0.373957, 0.604046, 0.249798, 0.922869, 0.355248, 0.11615, 0.0781376, 0.575865, 0.376602, 0.829088, 0.135778, 0.1659, 0.155543, 0.593435, 0.392356, 0.0452224, 0.474088, 0.291258, 0.989302, 0.340455, 0.341552, 0.0144077, 0.769514, 0.964297, 0.549967, 0.0488852, 0.613462, 0.675892, 0.757522, 0.724489, 0.571236, 0.14291, 0.406721, 0.343369, 0.0858385, 0.186502, 0.806342, 0.539056, 0.536357, 0.904026, 0.70836, 0.450419, 0.713453, 0.0945273, 0.54752, 0.162833, 0.154616, 0.148919, 0.963524, 0.307454, 0.735734, 0.213009, 0.552391, 0.433575, 0.775243, 0.98033, 0.318374, 0.00285737, 0.99413, 0.500573, 0.0649286, 0.85529, 0.957998, 0.514092, 0.375678, 0.958082, 0.0966625, 0.86574, 0.0921497, 0.627249, 0.901373, 0.223073, 0.863927, 0.883334, 0.461885, 0.0646166, 0.653075, 0.366411, 0.61753, 0.919812, 0.851461, 0.591763, 0.173021, 0.742659, 0.258634, 0.999868, 0.633771, 0.734484, 0.126218, 0.322469, 0.609085, 0.391892, 0.294525, 0.0657253, 0.283494, 0.0219703, 0.274235, 0.199938, 0.140622, 0.0261972, 0.0954744, 0.252706, 0.125962, 0.591655, 0.351266, 0.951874, 0.0331499, 0.639152, 0.524904, 0.811898, 0.753506, 0.806572, 0.573111, 0.860953, 0.718553, 0.222265, 0.472788, 0.17718, 0.790505, 0.196329, 0.958922, 0.474081, 0.842605, 0.541059, 0.744722, 0.900755, 0.269413, 0.252577, 0.199723, 0.538515, 0.63466, 0.824158, 0.993387, 0.687465, 0.577214, 0.986416, 0.171009, 0.711906, 0.801219, 0.101932, 0.265538, 0.517818, 0.815548, 0.361354, 0.51904, 0.983438, 0.463077, 0.167169, 0.883352, 0.170694, 0.935116, 0.732944, 0.0358385, 0.269548, 0.838122, 0.986801, 0.338054, 0.0736178, 0.583174, 0.823464, 0.292892, 0.619814, 0.429944, 0.205241, 0.601124, 0.975362, 0.115201, 0.441878, 0.399484, 0.145629, 0.613184, 0.31096, 0.175023, 0.922024, 0.223168, 0.676566, 0.196824, 0.154837, 0.366641, 0.0328393, 0.806666, 0.81512, 0.967253, 0.317381, 0.31028, 0.203464, 0.87059, 0.721569, 0.640017, 0.985287, 0.553971, 0.111783, 0.187386, 0.0354623, 0.123158, 0.277595, 0.895027, 0.499938, 0.826473, 0.645091, 0.861861, 0.362853, 0.53105, 0.0392038, 0.963459, 0.314704, 0.299601, 0.0288469, 0.647459, 0.561967, 0.551995, 0.0351248, 0.231971, 0.726246, 0.124732, 0.443812, 0.0468727, 0.59497, 0.0819643, 0.617699, 0.367713, 0.796483, 0.0771232, 0.556894, 0.444883, 0.969592, 0.733371, 0.169334, 0.308866, 0.459915, 0.833199, 0.086676, 0.0771984, 0.96669, 0.567968, 0.322198, 0.098317, 0.207871, 0.242587, 0.293557, 0.0314135, 0.7136, 0.689609, 0.357997, 0.76835, 0.761416, 0.513577, 0.0437506, 0.248481, 0.8781, 0.938893, 0.452103, 0.803518, 0.939733, 0.49525, 0.635703, 0.592329, 0.723816, 0.383401, 0.730649, 0.824321, 0.616737, 0.0221482, 0.73448, 0.55418, 0.4532, 0.35911, 0.165987, 0.549263, 0.713057, 0.970698, 0.0558401, 0.0478223, 0.501555, 0.877499, 0.323109, 0.527453, 0.138095, 0.54054, 0.151781, 0.410274, 0.182295, 0.288251, 0.982909, 0.511266, 0.461464, 0.648477, 0.188035, 0.520581, 0.239408, 0.491809, 0.718919, 0.0938332, 0.285729, 0.106841, 0.606726, 0.490935, 0.86651, 0.180865, 0.985614, 0.159484, 0.0834094, 0.0251471, 0.881426, 0.634299, 0.803784, 0.124723, 0.363128, 0.96287, 0.48943, 0.689214, 0.565162, 0.0180327, 0.718634, 0.835039, 0.711085, 0.140549, 0.549321, 0.581991, 0.0430252, 0.622331, 0.608388, 0.798413, 0.0259461, 0.939007, 0.80905, 0.393104, 0.592326, 0.919957, 0.0515215, 0.21022, 0.816602, 0.902884, 0.570879, 0.64424, 0.585107, 0.0248819, 0.179805, 0.978196, 0.102385, 0.613371, 0.917875, 0.0269309, 0.837388, 0.103777, 0.177646, 0.0352003, 0.548149, 0.326652, 0.889184, 0.156289, 0.0854004, 0.0193054, 0.928428, 0.396445, 0.381079, 0.296542, 0.554379, 0.854839, 0.97794, 0.454189, 0.133098, 0.240306, 0.279264, 0.377384, 0.777238, 0.124053, 0.745216, 0.726462, 0.361903, 0.568819, 0.211092, 0.505899, 0.976372, 0.182271, 0.293568, 0.00238258, 0.856982, 0.183392, 0.18003, 0.713097, 0.977668, 0.100877, 0.349218, 0.147611, 0.315809, 0.603447, 0.64022, 0.0857591, 0.896518, 0.651759, 0.139246, 0.55121, 0.595465, 0.690662, 0.923067, 0.732652, 0.460265, 0.892629, 0.958834, 0.8925, 0.174999, 0.546506, 0.0428178, 0.250985, 0.237769, 0.8357, 0.707597, 0.419884, 0.668426, 0.418209, 0.0162044, 0.563968, 0.656426, 0.262716, 0.377029, 0.146079, 0.117027, 0.115488, 0.683381, 0.875439, 0.87393, 0.140898, 0.537138, 0.444611, 0.797474, 0.317486, 0.972075, 0.394308, 0.329786, 0.605717, 0.49305, 0.764609, 0.958847, 0.113853, 0.450245, 0.760901, 0.933014, 0.050825, 0.134767, 0.855269, 0.502479, 0.898026, 0.391195, 0.0719292, 0.636708, 0.688786, 0.10816, 0.0163563, 0.251003, 0.197639, 0.880459, 0.441534, 0.131779, 0.815068, 0.88632, 0.426271, 0.0119967, 0.14107, 0.626235, 0.738132, 0.617686, 0.949852, 0.257557, 0.0580807, 0.51944, 0.162752, 0.840121, 0.320176, 0.446263, 0.574496, 0.46924, 0.481378, 0.117363, 0.528148, 0.536896, 0.810889, 0.0764278, 0.158937, 0.85015, 0.898797, 0.592393, 0.771639, 0.801977, 0.114191, 0.339039, 0.0259773, 0.680674, 0.419337, 0.599168, 0.0757838, 0.512989, 0.732161, 0.507442, 0.869276, 0.321673, 0.731624, 0.383285, 0.38206, 0.801143, 0.438091, 0.560173, 0.660212, 0.23746, 0.731079, 0.318417, 0.36504, 0.876286, 0.736329, 0.791173, 0.956445, 0.323262, 0.947492, 0.473555, 0.485068, 0.740577, 0.113883, 0.542844, 0.99052, 0.314031, 0.70648, 0.257082, 0.215745, 0.745425, 0.745272, 0.113282, 0.846858, 0.424482, 0.829325, 0.165168, 0.514918, 0.695552, 0.301505, 0.702294, 0.632457, 0.095175, 0.179056, 0.469551, 0.118574, 0.0442971, 0.684475, 0.449821, 0.864671, 0.567892, 0.576176, 0.632051, 0.885669, 0.204395, 0.0971353, 0.352499, 0.738907, 0.466793, 0.496468, 0.351037, 0.243115, 0.911098, 0.227332, 0.644997, 0.637867, 0.332965, 0.208059, 0.216949, 0.46867, 0.886851, 0.227812, 0.671073, 0.474314, 0.763482, 0.925339, 0.0540861, 0.972279, 0.28742, 0.325972, 0.0267175, 0.266578, 0.0406248, 0.576108, 0.113293, 0.8665, 0.628075, 0.516012, 0.780659, 0.810629, 0.660676, 0.0992584, 0.352639, 0.394222, 0.15539, 0.373866, 0.365409, 0.501467, 0.90406, 0.632545, 0.806643, 0.140516, 0.111344, 0.320082, 0.192266, 0.267615, 0.354809, 0.875555, 0.0405757, 0.307203, 0.691234, 0.0835093, 0.543558, 0.716852, 0.671519, 0.265425, 0.366704, 0.787857, 0.372456, 0.659346, 0.244761, 0.395423, 0.353887, 0.654218, 0.880588, 0.62891, 0.478918, 0.030856, 0.744313, 0.196252, 0.381714, 0.832991, 0.572231, 0.960189, 0.257614, 0.253268, 0.797496, 0.69877, 0.926288, 0.079237, 0.609679, 0.648218, 0.735571, 0.440182, 0.864101, 0.161878, 0.159404, 0.415711, 0.593876, 0.995945, 0.140339, 0.572495, 0.841811, 0.657716, 0.19231, 0.0181406, 0.807133, 0.566996, 0.608455, 0.224459, 0.4223, 0.435267, 0.662959, 0.886933, 0.183438, 0.692375, 0.779116, 0.8541, 0.664286, 0.929841, 0.964775, 0.422457, 0.929662, 0.878484, 0.767223, 0.537799, 0.79105, 0.453013, 0.658287, 0.888925, 0.0472853, 0.736983, 0.0629988, 0.772934, 0.225754, 0.783285, 0.548115, 0.751911, 0.740392, 0.144652, 0.950571, 0.562828, 0.510774, 0.61591, 0.776793, 0.909532, 0.761456, 0.0999396, 0.688207, 0.97207, 0.318053, 0.355286, 0.771431, 0.750133, 0.516243, 0.190795, 0.427775, 0.283418, 0.286814, 0.401656, 0.346635, 0.352157, 0.937786, 0.024645, 0.278092, 0.432369, 0.449559, 0.62551, 0.740431, 0.543411, 0.897199, 0.303562, 0.259347, 0.200149, 0.680107, 0.192251, 0.0760841, 0.840244, 0.172685, 0.892975, 0.352777, 0.549554, 0.0523146, 0.303126, 0.0260337, 0.036535, 0.980034, 0.102809, 0.814378, 0.835065, 0.386273, 0.6071, 0.0930895, 0.871808, 0.193911, 0.430767, 0.885207, 0.254736, 0.00908399, 0.998593, 0.604866, 0.973251, 0.193466, 0.463873, 0.27736, 0.0773324, 0.849132, 0.844, 0.125167, 0.938337, 0.785522, 0.444699, 0.140913, 0.0915302, 0.467264, 0.67407, 0.203647, 0.558308, 0.314105, 0.241028, 0.205094, 0.477013, 0.631483, 0.169874, 0.146013, 0.185098, 0.0824085, 0.306384, 0.0702848, 0.0203417, 0.131966, 0.792877, 0.360839, 0.266718, 0.232539, 0.250638, 0.476562, 0.477389, 0.849534, 0.802507, 0.943572, 0.641526, 0.845753, 0.219046, 0.651024, 0.99469, 0.202947, 0.821584, 0.433272, 0.215533, 0.391946, 0.229529, 0.835089, 0.0849454, 0.455666, 0.998175, 0.634306, 0.0763347, 0.567693, 0.796136, 0.446085, 0.73937, 0.522221, 0.790854, 0.802317, 0.442015, 0.575168, 0.600855, 0.666882, 0.381843, 0.601493, 0.544687, 0.733424, 0.406066, 0.573913, 0.19919, 0.582, 0.0550953, 0.569645, 0.0875205, 0.126882, 0.543065, 0.927089, 0.168898, 0.986988, 0.906766, 0.908712, 0.178763, 0.849812, 0.687846, 0.923719, 0.105701, 0.833577, 0.865992, 0.208242, 0.224616, 0.900711, 0.593214, 0.914112, 0.377051, 0.427738, 0.379995, 0.00788512, 0.0284525, 0.286179, 0.861688, 0.1796, 0.703761, 0.67592, 0.0535396, 0.0522033, 0.723248, 0.305373, 0.105823, 0.219043, 0.0185547, 0.502168, 0.45647, 0.710568, 0.323213, 0.390663, 0.324126, 0.753844, 0.257257, 0.00722329, 0.805279, 0.399644, 0.00685557, 0.565763, 0.931747, 0.306555, 0.376521, 0.635822, 0.323792, 0.616773, 0.0146792, 0.164804, 0.35065, 0.150957, 0.754849, 0.833571, 0.987182, 0.469289, 0.492988, 0.680896, 0.502522, 0.222399, 0.962471, 0.631338, 0.725246, 0.817241, 0.117617, 0.0647289, 0.149253, 0.17814, 0.937932, 0.228594, 0.997632, 0.00808709, 0.975811, 0.268736, 0.0984531, 0.998211, 0.735972, 0.443816, 0.855411, 0.0915925, 0.865221, 0.618293, 0.515434, 0.563065, 0.863216, 0.952183, 0.259968, 0.703818, 0.0249326, 0.8609, 0.219079, 0.667082, 0.525095, 0.327602, 0.223916, 0.0785055, 0.149047, 0.929653, 0.959718, 0.4298, 0.52351, 0.411019, 0.911005, 0.605485, 0.681659, 0.363473, 0.269242, 0.0177429, 0.690073, 0.81481, 0.521546, 0.218472, 0.316103, 0.874616, 0.909483, 0.813413, 0.0632681, 0.277424, 0.690147, 0.851992, 0.0892098, 0.688557, 0.937125, 0.833351, 0.113076, 0.903101, 0.176372, 0.105112, 0.479078, 0.434056, 0.577398, 0.386865, 0.367798, 0.026212, 0.899348, 0.585439, 0.236936, 0.622744, 0.0752682, 0.221239, 0.894639, 0.218937, 0.975631, 0.096821, 0.0284117, 0.072328, 0.0827532, 0.787252, 0.515446, 0.81932, 0.582593, 0.191294, 0.42557, 0.629784, 0.362662, 0.946834, 0.845638, 0.994889, 0.692884, 0.153913, 0.0978367, 0.613994, 0.48708, 0.859723, 0.916252, 0.328909, 0.102319, 0.812725, 0.227103, 0.405228, 0.523504, 0.243892, 0.859925, 0.801817, 0.0011166, 0.975437, 0.273452, 0.564827, 0.449715, 0.036097, 0.292633, 0.68235, 0.00398159, 0.87888, 0.403724, 0.356468, 0.506308, 0.367138, 0.458221, 0.924481, 0.950841, 0.530794, 0.150865, 0.43758, 0.0344036, 0.00665103, 0.082743, 0.866126, 0.20521, 0.73732, 0.591323, 0.573932, 0.935947, 0.579041, 0.618868, 0.262846, 0.596978, 0.116659, 0.0239183, 0.68981, 0.695968, 0.225083, 0.411562, 0.580573, 0.52255, 0.428126, 0.307788, 0.0858744, 0.940854, 0.0113875, 0.754918, 0.88299, 0.559396, 0.340941, 0.519733, 0.39149, 0.555568, 0.744767, 0.878541, 0.790263, 0.974643, 0.41797, 0.211371, 0.620027, 0.0685091, 0.535988, 0.637239, 0.529062, 0.576366, 0.794985, 0.621648, 0.439529, 0.504987, 0.242352, 0.346939, 0.529057, 0.0434876, 0.505588, 0.542285, 0.146307, 0.712506, 0.363234, 0.628982, 0.313431, 0.750136, 0.874787, 0.596567, 0.0311857, 0.839364, 0.995601, 0.576761, 0.687218, 0.885489, 0.58522, 0.44643, 0.585192, 0.570862, 0.0743739, 0.0430503, 0.509719, 0.154523, 0.88849, 0.33323, 0.132801, 0.0657563, 0.392214, 0.291483, 0.691657, 0.719635, 0.14432, 0.628194, 0.510175, 0.772377, 0.0521979, 0.614788, 0.863319, 0.116435, 0.13198, 0.541235, 0.607762, 0.179594, 0.109577, 0.466121, 0.0622991, 0.201895, 0.168892, 0.380382, 0.75734, 0.53331, 0.734033, 0.043109, 0.346835, 0.730065, 0.288959, 0.455534, 0.680757, 0.684472, 0.383187, 0.655883, 0.651751, 0.272637, 0.460788, 0.756032, 0.345337, 0.486229, 0.614304, 0.442909, 0.25913, 0.432749, 0.211558, 0.742106, 0.495101, 0.197951, 0.801089, 0.224768, 0.861973, 0.389373, 0.920927, 0.55556, 0.85315, 0.719037, 0.447062, 0.742733, 0.357599, 0.389938, 0.709586, 0.975127, 0.122511, 0.834877, 0.70683, 0.924272, 0.981302, 0.949697, 0.939456, 0.29405, 0.611033, 0.0782227, 0.654357, 0.452443, 0.0590294, 0.320334, 0.254831, 0.262204, 0.202811, 0.77546, 0.613065, 0.831814, 0.936336, 0.147728, 0.400054, 0.169029, 0.0624219, 0.742198, 0.123529, 0.766895, 0.979775, 0.49911, 0.64663, 0.912194, 0.207431, 0.829884, 0.185013, 0.556736, 0.0521908, 0.515951, 0.587696, 0.667938, 0.465566, 0.780172, 0.839661, 0.533201, 0.736327, 0.443875, 0.187566, 0.914724, 0.903275, 0.048623, 0.638303, 0.424, 0.897581, 0.664215, 0.199464, 0.941401, 0.89244, 0.984834, 0.672877, 0.863197, 0.824398, 0.851614, 0.345036, 0.992657, 0.209134, 0.0919466, 0.557075, 0.033277, 0.33972, 0.189897, 0.315651, 0.272392, 0.838766, 0.478908, 0.996954, 0.611741, 0.547869, 0.0702573, 0.75574, 0.322897, 0.404965, 0.123521, 0.858347, 0.833113, 0.129333, 0.627266, 0.0282441, 0.18176, 0.831762, 0.43121, 0.164116, 0.764437, 0.242359, 0.944137, 0.527344, 0.0566526, 0.801946, 0.882477, 0.270734, 0.744573, 0.264837, 0.310905, 0.203049, 0.598361, 0.722903, 0.294378, 0.480961, 0.231185, 0.623784, 0.0165777, 0.911747, 0.836799, 0.40649, 0.370294, 0.0891179, 0.777113, 0.0462116, 0.600887, 0.833337, 0.126701, 0.749949, 0.609225, 0.533643, 0.25853, 0.497521, 0.000704963, 0.6763, 0.835601, 0.557648, 0.218548, 0.7903, 0.333088, 0.998962, 0.692565, 0.608204, 0.672425, 0.186048, 0.228699, 0.615913, 0.474062, 0.0190515, 0.166839, 0.276867, 0.789744, 0.359525, 0.457662, 0.672122, 0.170369, 0.901702, 0.294395, 0.373035, 0.597311, 0.400003, 0.280115, 0.643847, 0.139061, 0.384005, 0.931499, 0.811971, 0.308691, 0.0389924, 0.83356, 0.570812, 0.618889, 0.670241, 0.0155265, 0.474782, 0.122035, 0.331081, 0.507853, 0.195582, 0.342751, 0.259943, 0.917701, 0.66449, 0.743213, 0.893463, 0.364383, 0.442529, 0.163537, 0.283688, 0.807777, 0.611442, 0.454316, 0.22006, 0.953012, 0.156561, 0.566899, 0.312106, 0.475521, 0.210756, 0.920142, 0.137471, 0.669088, 0.0585373, 0.836081, 0.191608, 0.254845, 0.486011, 0.075566, 0.622073, 0.210472, 0.0907429, 0.425509, 0.16353, 0.523425, 0.54579, 0.368517, 0.535908, 0.0875819, 0.680664, 0.329899, 0.532038, 0.089285, 0.85776, 0.777489, 0.859216, 0.288147, 0.983525, 0.203696, 0.165571, 0.809551, 0.636441, 0.776048, 0.226028, 0.721532, 0.62279, 0.698789, 0.285756, 0.212431, 0.957557, 0.196306, 0.54497, 0.40739, 0.559639, 0.966, 0.950159, 0.226812, 0.253332, 0.544645, 0.216067, 0.613898, 0.963088, 0.853584, 0.16471, 0.428458, 0.85613, 0.732022, 0.744422, 0.202531, 0.0674777, 0.037437, 0.755197, 0.0188698, 0.569553, 0.700514, 0.561545, 0.861676, 0.217073, 0.15402, 0.106327, 0.765353, 0.167019, 0.894573, 0.936287, 0.602059, 0.416865, 0.0755147, 0.00253465, 0.123654, 0.542248, 0.556168, 0.293051, 0.075411, 0.195429, 0.302439, 0.780352, 0.845063, 0.559801, 0.580377, 0.871613, 0.761807, 0.790887, 0.31054, 0.355577, 0.383535, 0.771371, 0.942595, 0.370097, 0.711186, 0.304642, 0.950213, 0.740567, 0.412174, 0.692508, 0.770162, 0.296496, 0.158796, 0.377169, 0.642188, 0.921547, 0.275625, 0.750651, 0.595328, 0.163743, 0.760417, 0.69372, 0.74179, 0.950827, 0.20372, 0.085737, 0.812048, 0.852176, 0.906627, 0.380317, 0.00976704, 0.00843117, 0.333703, 0.718039, 0.585602, 0.119216, 0.085977, 0.218696, 0.479175, 0.981356, 0.403643, 0.195815, 0.470633, 0.599814, 0.642532, 0.0590197, 0.190236, 0.437582, 0.656899, 0.910241, 0.702452, 0.58384, 0.80942, 0.867268, 0.53488, 0.219692, 0.00706689, 0.663453, 0.808216, 0.601974, 0.376532, 0.83036, 0.629283, 0.883264, 0.193387, 0.0443994, 0.0761849, 0.997602, 0.197784, 0.699003, 0.896512, 0.171291, 0.109706, 0.142077, 0.40578, 0.95964, 0.248552, 0.285008, 0.830174, 0.964721, 0.775325, 0.321831, 0.604808, 0.754044, 0.64506, 0.562077, 0.542853, 0.513915, 0.448492, 0.196941, 0.656907, 0.0488933, 0.617255, 0.580532, 0.0304698, 0.282691, 0.925985, 0.278908, 0.608455, 0.247628, 0.264545, 0.874203, 0.0352378, 0.378687, 0.978938, 0.708673, 0.83408, 0.333302, 0.406912, 0.47241, 0.532175, 0.846942, 0.230808, 0.498074, 0.737747, 0.838437, 0.827828, 0.372765, 0.716195, 0.0031591, 0.60139, 0.239205, 0.362793, 0.368054, 0.0469707, 0.243922, 0.917699, 0.330605, 0.884813, 0.964747, 0.714459, 0.0288523, 0.872513, 0.892936, 0.110412, 0.671794, 0.0569587, 0.392471, 0.0335411, 0.546055, 0.620579, 0.996046, 0.629146, 0.720871, 0.424433, 0.18234, 0.970005, 0.37306, 0.193882, 0.16579, 0.67068, 0.344031, 0.673785, 0.677497, 0.116794, 0.612862, 0.430412, 0.0718545, 0.965153, 0.0675488, 0.420747, 0.0240714, 0.560629, 0.736834, 0.202314, 0.168625, 0.879575, 0.350342, 0.0633921, 0.320659, 0.651919, 0.93551, 0.0287724, 0.401608, 0.619234, 0.440252, 0.46345, 0.187329, 0.766523, 0.953905, 0.11476, 0.877794, 0.540893, 0.711903, 0.870188, 0.605244, 0.580351, 0.475272, 0.200008, 0.334535, 0.951009, 0.148014, 0.660032, 0.282946, 0.293743, 0.640482, 0.295294, 0.43565, 0.459423, 0.369859, 0.14906, 0.100024, 0.979403, 0.619681, 0.225737, 0.159481, 0.166316, 0.431837, 0.702767, 0.00762606, 0.382219, 0.821446, 0.665227, 0.665509, 0.393431, 0.174661, 0.247567, 0.352685, 0.0757953, 0.447083, 0.00220328, 0.94772, 0.172234, 0.659594, 0.182595, 0.658573, 0.926163, 0.395575, 0.833623, 0.586329, 0.446466, 0.248359, 0.43228, 0.286672, 0.637918, 0.818844, 0.505608, 0.924497, 0.876322, 0.486439, 0.492004, 0.116082, 0.419319, 0.371351, 0.112833, 0.799946, 0.813323, 0.627463, 0.626841, 0.212818, 0.080466, 0.45839, 0.936047, 0.330806, 0.396407, 0.128132, 0.337196, 0.508015, 0.276482, 0.469119, 0.416351, 0.418221, 0.256517, 0.433233, 0.0633748, 0.64523, 0.375232, 0.772746, 0.748978, 0.231822, 0.0306966, 0.482867, 0.600628, 0.639023, 0.564375, 0.263144, 0.690557, 0.978581, 0.779492, 0.344248, 0.563673, 0.919689, 0.139741, 0.746071, 0.48462, 0.771355, 0.508453, 0.877165, 0.881915, 0.329949, 0.271941, 0.992272, 0.955772, 0.592646, 0.0958913, 0.971883, 0.32395, 0.668041, 0.726392, 0.607352, 0.451372, 0.248852, 0.681079, 0.348441, 0.389822, 0.947261, 0.859747, 0.0752741, 0.00475409, 0.880169, 0.495632, 0.552549, 0.932897, 0.0370653, 0.158477, 0.396098, 0.336555, 0.147542, 0.244161, 0.800122, 0.161507, 0.317774, 0.747631, 0.105783, 0.913389, 0.217816, 0.689807, 0.407019, 0.387921, 0.796469, 0.778097, 0.99188, 0.842639, 0.973156, 0.0750759, 0.391345, 0.249398, 0.235551, 0.732391, 0.421792, 0.113257, 0.563915, 0.58631, 0.111841, 0.869968, 0.0037584, 0.941006, 0.686305, 0.589205, 0.320806, 0.729806, 0.580344, 0.939498, 0.209748, 0.925537, 0.189512, 0.311534, 0.942974, 0.611348, 0.755157, 0.968464, 0.94105, 0.154846, 0.0839139, 0.374204, 0.440558, 0.488214, 0.665687, 0.783799, 0.543538, 0.363276, 0.600286, 0.387724, 0.768738, 0.244694, 0.98572, 0.11021, 0.674951, 0.360568, 0.0993691, 0.121901, 0.218621, 0.87908, 0.625172, 0.286251, 0.0200376, 0.130153, 0.309825, 0.509561, 0.624001, 0.0635901, 0.799018, 0.570478, 0.359543, 0.383777, 0.922699, 0.00686779, 0.699793, 0.7135, 0.52636, 0.183373, 0.200387, 0.414875, 0.638781, 0.685314, 0.696136, 0.654266, 0.635831, 0.8221, 0.0593308, 0.455524, 0.565491, 0.394954, 0.0464883, 0.623067, 0.949965, 0.902042, 0.947205, 0.1711, 0.937498, 0.0470314, 0.432514, 0.295752, 0.728123, 0.246107, 0.518614, 0.159337, 0.0283117, 0.680406, 0.30313, 0.944064, 0.603427, 0.584558, 0.279374, 0.221425, 0.0173622, 0.351158, 0.873431, 0.59189, 0.333288, 0.803731, 0.455546, 0.324609, 0.587345, 0.510321, 0.265491, 0.916252, 0.205887, 0.2905, 0.756927, 0.62592, 0.701707, 0.524933, 0.0732553, 0.701777, 0.365049, 0.557713, 0.00681914, 0.281786, 0.52879, 0.291129, 0.53505, 0.700351, 0.0604884, 0.340905, 0.778127, 0.329368, 0.778571, 0.584514, 0.407648, 0.39103, 0.395246, 0.670026, 0.638777, 0.969351, 0.500163, 0.680367, 0.508428, 0.115356, 0.804179, 0.953212, 0.572265, 0.371534, 0.103027, 0.0456428, 0.210913, 0.179688, 0.493465, 0.565033, 0.283612, 0.607693, 0.906922, 0.837819, 0.899815, 0.0851523, 0.626258, 0.41873, 0.667188, 0.273192, 0.653154, 0.46232, 0.586902, 0.907028, 0.989101, 0.396109, 0.242971, 0.711676, 0.0384654, 0.572655, 0.233847, 0.139856, 0.809284, 0.381491, 0.442258, 0.818172, 0.546706, 0.368972, 0.346516, 0.0577279, 0.212582, 0.44512, 0.943408, 0.812529, 0.258691, 0.994612, 0.622952, 0.85047, 0.978712, 0.555412, 0.785713, 0.0686208, 0.790245, 0.635581, 0.326273, 0.324096, 0.1216, 0.588925, 0.937101, 0.0580164, 0.532749, 0.753574, 0.298395, 0.708837, 0.925584, 0.314537, 0.0199974, 0.914857, 0.0709083, 0.365013, 0.766875, 0.253048, 0.446831, 0.739593, 0.943318, 0.948265, 0.854331, 0.964466, 0.591531, 0.763801, 0.927365, 0.071033, 0.904064, 0.224232, 0.593028, 0.508106, 0.866237, 0.17225, 0.218926, 0.529941, 0.0546165, 0.475684, 0.428893, 0.849621, 0.779129, 0.998162, 0.39752, 0.46901, 0.103009, 0.0545734, 0.815139, 0.357352, 0.271335, 0.713358, 0.100985, 0.464451, 0.163076, 0.401539, 0.438434, 0.549619, 0.479359, 0.851188, 0.699579, 0.745942, 0.425003, 0.481552, 0.143529, 0.952757, 0.983489, 0.617207, 0.534987, 0.481627, 0.45129, 0.727141, 0.438207, 0.716823, 0.475713, 0.6761, 0.191068, 0.0135497, 0.371372, 0.950274, 0.132142, 0.138519, 0.431744, 0.044696, 0.143351, 0.964098, 0.0737461, 0.2113, 0.851505, 0.495653, 0.431771, 0.725464, 0.130739, 0.915484, 0.146845, 0.285753, 0.998684, 0.707186, 0.00516941, 0.26057, 0.103101, 0.0908891, 0.290059, 0.672285, 0.0566395, 0.749191, 0.56508, 0.480546, 0.925915, 0.889116, 0.753385, 0.151551, 0.829278, 0.622423, 0.281648, 0.981675, 0.789335, 0.648073, 0.330195, 0.706532, 0.991695, 0.624727, 0.912806, 0.222264, 0.686186, 0.292185, 0.62477, 0.551049, 0.296795, 0.260681, 0.249491, 0.887398, 0.312407, 0.3111, 0.450064, 0.228129, 0.300622, 0.0268301, 0.357236, 0.372048, 0.394023, 0.611144, 0.1639, 0.263486, 0.52356, 0.373948, 0.755847, 0.342152, 0.409573, 0.53276, 0.668326, 0.889879, 0.0864506, 0.355495, 0.878126, 0.726512, 0.903876, 0.000187004, 0.358175, 0.140873, 0.775369, 0.018159, 0.196947, 0.095644, 0.105281, 0.645324, 0.428589, 0.0170538, 0.848981, 0.494392, 0.433215, 0.735048, 0.611888, 0.892693, 0.231126, 0.653006, 0.958474, 0.801838, 0.813979, 0.646147, 0.650979, 0.389706, 0.915836, 0.451289, 0.143266, 0.738046, 0.423176, 0.282744, 0.690591, 0.570777, 0.255477, 0.337051, 0.77689, 0.277745, 0.539835, 0.459001, 0.909962, 0.265262, 0.34509, 0.321861, 0.0539542, 0.846826, 0.996099, 0.481874, 0.708116, 0.850162, 0.369266, 0.094268, 0.0749089, 0.958772, 0.514745, 0.121995, 0.636401, 0.62874, 0.163716, 0.969632, 0.981522, 0.473334, 0.496228, 0.0480764, 0.764567, 0.937061, 0.929395, 0.363888, 0.282947, 0.938167, 0.428342, 0.768651, 0.907915, 0.974172, 0.398318, 0.16123, 0.409964, 0.803203, 0.740767, 0.891692, 0.39842, 0.523639, 0.314543, 0.456075, 0.150667, 0.279852, 0.422093, 0.465474, 0.264575, 0.677157, 0.544893, 0.904432, 0.99908, 0.793004, 0.775158, 0.432617, 0.484001, 0.931907, 0.734817, 0.409538, 0.818343, 0.172727, 0.640275, 0.508192, 0.492913, 0.169585, 0.15526, 0.31094, 0.510842, 0.799051, 0.0744766, 0.95865, 0.0787125, 0.187659, 0.744356, 0.424918, 0.651432, 0.399407, 0.422122, 0.234407, 0.867176, 0.0031812, 0.576058, 0.0765145, 0.430805, 0.847064, 0.614349, 0.521165, 0.811179, 0.0851852, 0.863514, 0.540318, 0.190077, 0.876957, 0.0796464, 0.14774, 0.355131, 0.0387811, 0.844614, 0.428063, 0.737132, 0.591586, 0.497913, 0.904092, 0.235953, 0.787808, 0.147862, 0.0467231, 0.767655, 0.578869, 0.141525, 0.226493, 0.276889, 0.375777, 0.597596, 0.509915, 0.911535, 0.155048, 0.0807722, 0.593731, 0.475499, 0.534079, 0.326091, 0.456234, 0.469692, 0.386506, 0.376216, 0.130604, 0.871723, 0.63914, 0.269276, 0.26115, 0.0649642, 0.927679, 0.428795, 0.422493, 0.512882, 0.805651, 0.17752, 0.645434, 0.574177, 0.74434, 0.134506, 0.924828, 0.449199, 0.616417, 0.0897774, 0.563852, 0.0430616, 0.766246, 0.984203, 0.251402, 0.502739, 0.70647, 0.253403, 0.840979, 0.00160235, 0.626249, 0.688034, 0.276211, 0.0338151, 0.721406, 0.184782, 0.0933805, 0.469703, 0.0579994, 0.973281, 0.328976, 0.0224026, 0.0120294, 0.952645, 0.434664, 0.510599, 0.468921, 0.849224, 0.0732937, 0.307609, 0.105425, 0.930987, 0.830965, 0.143655, 0.827359, 0.24991, 0.547596, 0.58949, 0.630434, 0.272995, 0.910347, 0.553301, 0.398667, 0.37172, 0.565485, 0.312348, 0.0207811, 0.0938434, 0.431078, 0.874804, 0.0382148, 0.245888, 0.916753, 0.0281113, 0.975986, 0.928301, 0.898875, 0.284067, 0.455321, 0.519795, 0.410715, 0.728965, 0.509349, 0.0262562, 0.377225, 0.647666, 0.102608, 0.483928, 0.985199, 0.332942, 0.862284, 0.539911, 0.776392, 0.362682, 0.944709, 0.952443, 0.0163763, 0.0530153, 0.584523, 0.452316, 0.346231, 0.769532, 0.359455, 0.698313, 0.333678, 0.0630267, 0.0613665, 0.292989, 0.136602, 0.657469, 0.514898, 0.0925696, 0.245946, 0.399356, 0.00502625, 0.113463, 0.298903, 0.0883632, 0.593901, 0.940429, 0.715745, 0.557883, 0.442471, 0.600357, 0.558476, 0.381213, 0.713004, 0.700084, 0.63013, 0.853213, 0.459162, 0.514956, 0.191383, 0.415676, 0.554952, 0.36569, 0.562775, 0.398038, 0.34118, 0.379005, 0.0318352, 0.369871, 0.0846944, 0.571178, 0.662504, 0.0934849, 0.609795, 0.0044253, 0.248656, 0.713574, 0.145329, 0.375437, 0.997878, 0.0519915, 0.707906, 0.751402, 0.537598, 0.275107, 0.726846, 0.950406, 0.120217, 0.510785, 0.28646, 0.99745, 0.662108, 0.037174, 0.704815, 0.96889, 0.585646, 0.621016, 0.253529, 0.702981, 0.384114, 0.493708, 0.946507, 0.349568, 0.308275, 0.657793, 0.260429, 0.703277, 0.224442, 0.178678, 0.0533109, 0.408747, 0.222425, 0.259758, 0.201722, 0.481596, 0.461603, 0.583516, 0.548155, 0.557014, 0.950552, 0.438872, 0.339406, 0.982351, 0.545019, 0.876471, 0.379414, 0.817809, 0.116976, 0.740747, 0.541459, 0.650105, 0.010522, 0.411823, 0.790107, 0.581072, 0.493902, 0.178236, 0.0510262, 0.793219, 0.721524, 0.0202778, 0.967812, 0.442783, 0.585765, 0.506299, 0.455301, 0.982905, 0.218136, 0.367442, 0.711311, 0.951539, 0.511261, 0.559317, 0.741668, 0.0389977, 0.0124546, 0.923073, 0.634987, 0.710564, 0.714429, 0.154461, 0.314793, 0.270273, 0.48032, 0.678968, 0.650322, 0.120074, 0.154, 0.863101, 0.660725, 0.989582, 0.851138, 0.171238, 0.468959, 0.564795, 0.0262704, 0.613367, 0.899521, 0.186535, 0.390592, 0.451016, 0.561485, 0.742754, 0.323051, 0.382154, 0.0792466, 0.735059, 0.606898, 0.294006, 0.912585, 0.5191, 0.117282, 0.878891, 0.0961379, 0.838315, 0.405403, 0.858181, 0.989743, 0.729755, 0.162535, 0.67851, 0.542451, 0.405107, 0.0865721, 0.707122, 0.15019, 0.240717, 0.39602, 0.613774, 0.487051, 0.670991, 0.57201, 0.674157, 0.371978, 0.34165, 0.825731, 0.420471, 0.581756, 0.685447, 0.377548, 0.361245, 0.640912, 0.874889, 0.109935, 0.718771, 0.38296, 0.608281, 0.491219, 0.216237, 0.729194, 0.340244, 0.603779, 0.921871, 0.329327, 0.208833, 0.924213, 0.421747, 0.531276, 0.00795173, 0.918173, 0.0789264, 0.130723, 0.910299, 0.0848694, 0.754063, 0.17522, 0.484633, 0.0412105, 0.0334952, 0.62661, 0.0299762, 0.371506, 0.524782, 0.650564, 0.70848, 0.506701, 0.200102, 0.215629, 0.0784514, 0.504449, 0.709497, 0.149314, 0.28348, 0.216472, 0.0615314, 0.71074, 0.717697, 0.105789, 0.88394, 0.257676, 0.912471, 0.853964, 0.0569171, 0.832451, 0.206953, 0.0555372, 0.18866, 0.120599, 0.79349, 0.0556901, 0.0996212, 0.669235, 0.929572, 0.743434, 0.3225, 0.588423, 0.883245, 0.458549, 0.667092, 0.362529, 0.133026, 0.891944, 0.292762, 0.0656984, 0.891433, 0.156739, 0.778279, 0.829316, 0.00918652, 0.754315, 0.40035, 0.0286117, 0.470567, 0.461861, 0.893933, 0.0322868, 0.86838, 0.960487, 0.747865, 0.164985, 0.543611, 0.924465, 0.0648404, 0.354452, 0.850454, 0.635748, 0.359546, 0.729677, 0.170065, 0.835594, 0.241911, 0.612096, 0.298257, 0.656033, 0.736473, 0.570684, 0.141063, 0.65175, 0.732718, 0.998333, 0.642224, 0.195743, 0.812069, 0.565661, 0.0111318, 0.864431, 0.850898, 0.0313304, 0.183694, 0.867757, 0.823855, 0.31423, 0.273361, 0.0860176, 0.983894, 0.0956444, 0.66443, 0.887831, 0.286919, 0.843797, 0.119932, 0.497116, 0.571003, 0.682796, 0.866104, 0.440773, 0.384498, 0.788672, 0.171808, 0.309293, 0.700145, 0.451478, 0.416702, 0.38524, 0.690447, 0.229804, 0.851363, 0.993086, 0.466875, 0.593496, 0.812165, 0.22422, 0.897163, 0.592888, 0.409687, 0.367272, 0.606949, 0.0418853, 0.902139, 0.515622, 0.879613, 0.0087846, 0.495049, 0.758792, 0.574896, 0.97937, 0.42588, 0.4043, 0.396314, 0.698346, 0.867443, 0.957644, 0.580143, 0.158789, 0.806535, 0.292822, 0.575212, 0.440745, 0.394654, 0.97793, 0.315495, 0.365574, 0.989251, 0.149957, 0.646115, 0.47268, 0.445343, 0.363087, 0.198103, 0.0885023, 0.590964, 0.786558, 0.757322, 0.875607, 0.415867, 0.233232, 0.408189, 0.328638, 0.98307, 0.689366, 0.271193, 0.40178, 0.916204, 0.396797, 0.736722, 0.487217, 0.53466, 0.418768, 0.493619, 0.526003, 0.207898, 0.3513, 0.764979, 0.184003, 0.630575, 0.284706, 0.812363, 0.89832, 0.810347, 0.157824, 0.414233, 0.834702, 0.482118, 0.613242, 0.798075, 0.405224, 0.102532, 0.362188, 0.771588, 0.783701, 0.306191, 0.458318, 0.21087, 0.843597, 0.048031, 0.025925, 0.119953, 0.110736, 0.00805772, 0.945727, 0.170829, 0.942903, 0.957509, 0.914612, 0.277685, 0.0998828, 0.157159, 0.477879, 0.620781, 0.454242, 0.09695, 0.800274, 0.669118, 0.0545194, 0.0747449, 0.959952, 0.561033, 0.35094, 0.640937, 0.72153, 0.940412, 0.493563, 0.782122, 0.0391054, 0.560916, 0.602838, 0.369561, 0.288648, 0.285488, 0.175487, 0.354876, 0.0420985, 0.385587, 0.91713, 0.256593, 0.310404, 0.218336, 0.340142, 0.154145, 0.274297, 0.306909, 0.643085, 0.727223, 0.015141, 0.473561, 0.637071, 0.586807, 0.57735, 0.994438, 0.203348, 0.400903, 0.376024, 0.397213, 0.118403, 0.326235, 0.238752, 0.378954, 0.327215, 0.484799, 0.536918, 0.88017, 0.361464, 0.350449, 0.188614, 0.136302, 0.679481, 0.28402, 0.172178, 0.422458, 0.603042, 0.49143, 0.043025, 0.0261847, 0.257402, 0.455479, 0.72791, 0.28391, 0.116325, 0.432857, 0.336493, 0.0302461, 0.678545, 0.929686, 0.902836, 0.244853, 0.85614, 0.666575, 0.720534, 0.891851, 0.249178, 0.685972, 0.837344, 0.916753, 0.228053, 0.600715, 0.939043, 0.461278, 0.295011, 0.475544, 0.480572, 0.931025, 0.014823, 0.275961, 0.0728855, 0.184995, 0.435816, 0.131878, 0.102206, 0.63762, 0.00180148, 0.831552, 0.872646, 0.0593314, 0.388552, 0.809018, 0.618149, 0.288378, 0.0156647, 0.744867, 0.201755, 0.00170218, 0.737832, 0.447234, 0.239707, 0.764839, 0.355098, 0.917599, 0.0452707, 0.766898, 0.606773, 0.351303, 0.354268, 0.837914, 0.833773, 0.0415325, 0.689535, 0.970246, 0.245289, 0.170493, 0.785941, 0.611172, 0.99488, 0.255219, 0.144579, 0.667399, 0.143206, 0.787674, 0.886961, 0.263737, 0.183517, 0.390872, 0.56748, 0.078612, 0.774178, 0.0704361, 0.726794, 0.744586, 0.0216967, 0.946146, 0.027991, 0.545636, 0.46659, 0.812134, 0.386265, 0.955681, 0.662299, 0.481497, 0.430118, 0.996269, 0.957166, 0.433195, 0.933126, 0.997205, 0.41795, 0.830028, 0.778497, 0.871957, 0.872685, 0.627754, 0.590224, 0.503084, 0.26531, 0.842474, 0.985117, 0.813906, 0.697348, 0.567247, 0.348052, 0.539768, 0.885292, 0.83726, 0.647066, 0.504051, 0.853265, 0.684865, 0.432105, 0.564111, 0.479551, 0.660563, 0.191942, 0.535453, 0.0981221, 0.283992, 0.235373, 0.285301, 0.0336606, 0.0267226, 0.797876, 0.293447, 0.272971, 0.416199, 0.319188, 0.655346, 0.528362, 0.200321, 0.218261, 0.210511, 0.563691, 0.803145, 0.311688, 0.0458166, 0.781406, 0.414209, 0.723707, 0.279047, 0.563268, 0.903739, 0.975284, 0.358803, 0.116477, 0.928829, 0.0524135, 0.929989, 0.223963, 0.239982, 0.686052, 0.0841157, 0.491084, 0.947208, 0.686472, 0.659798, 0.70586, 0.325346, 0.593309, 0.526232, 0.691239, 0.880092, 0.871785, 0.00576415, 0.154296, 0.316782, 0.832757, 0.891824, 0.712168, 0.256573, 0.314953, 0.731322, 0.69323, 0.410329, 0.0598054, 0.0633719, 0.153841, 0.191406, 0.917982, 0.782984, 0.950621, 0.431821, 0.475662, 0.930644, 0.43351, 0.0779847, 0.143985, 0.275326, 0.980414, 0.0562501, 0.855428, 0.354228, 0.509527, 0.411792, 0.0680083, 0.364961, 0.0390038, 0.266477, 0.0988358, 0.846641, 0.882675, 0.309469, 0.0565254, 0.642614, 0.374367, 0.737144, 0.992361, 0.268723, 0.611044, 0.291866, 0.415973, 0.473206, 0.753897, 0.378031, 0.678003, 0.200336, 0.449631, 0.171387, 0.34159, 0.766647, 0.668858, 0.226873, 0.977119, 0.949211, 0.505311, 0.477781, 0.258112, 0.0192562, 0.829727, 0.199369, 0.0769564, 0.721004, 0.273987, 0.579016, 0.478694, 0.929445, 0.81796, 0.347308, 0.1142, 0.478117, 0.614233, 0.0158434, 0.926876, 0.464126, 0.583562, 0.126896, 0.913065, 0.15919, 0.593088, 0.271819, 0.85154, 0.293742, 0.435965, 0.118852, 0.0890801, 0.285569, 0.100301, 0.66851, 0.363812, 0.556509, 0.982328, 0.568594, 0.362003, 0.549044, 0.682929, 0.0830196, 0.219488, 0.32431, 0.306314, 0.845461, 0.317929, 0.513239, 0.469761, 0.0866943, 0.971634, 0.749946, 0.312276, 0.110921, 0.790912, 0.0906199, 0.365027, 0.693894, 0.290282, 0.113001, 0.624913, 0.988962, 0.66503, 0.351687, 0.776399, 0.16496, 0.976108, 0.768692, 0.950021, 0.0137268, 0.782994, 0.989928, 0.92683, 0.529316, 0.392654, 0.954002, 0.571183, 0.923659, 0.111812, 0.0981338, 0.122619, 0.622718, 0.591275, 0.273072, 0.814748, 0.0196948, 0.843352, 0.00885786, 0.193041, 0.584187, 0.777858, 0.952638, 0.0184829, 0.656321, 0.0363939, 0.359728, 0.662433, 0.638927, 0.909029, 0.354032, 0.65834, 0.10391, 0.224123, 0.477069, 0.661853, 0.330598, 0.791712, 0.533738, 0.408965, 0.894608, 0.95827, 0.737298, 0.735627, 0.727397, 0.210358, 0.0177663, 0.802234, 0.839811, 0.382884, 0.525836, 0.634119, 0.541241, 0.517584, 0.0919362, 0.123339, 0.193696, 0.56937, 0.0724227, 0.539913, 0.60809, 0.275282, 0.202694, 0.0724591, 0.331825, 0.964982, 0.714297, 0.181775, 0.97801, 0.310005, 0.274382, 0.255482, 0.74804, 0.656908, 0.352639, 0.237788, 0.763843, 0.0898946, 0.0556271, 0.945395, 0.233283, 0.459549, 0.291393, 0.380366, 0.00761878, 0.754733, 0.445453, 0.383008, 0.953107, 0.801082, 0.77968, 0.778829, 0.00416506, 0.028506, 0.408037, 0.0167465, 0.231224, 0.0218634, 0.802864, 0.432155, 0.374266, 0.6581, 0.13919, 0.334472, 0.817559, 0.534311, 0.0921687, 0.885542, 0.921301, 0.335071, 0.196437, 0.336152, 0.305071, 0.000878531, 0.940655, 0.918713, 0.697764, 0.224071, 0.179097, 0.609544, 0.178313, 0.672489, 0.576281, 0.374879, 0.490673, 0.0209759, 0.977082, 0.977767, 0.645734, 0.264889, 0.425193, 0.876967, 0.513684, 0.893216, 0.942787, 0.958608, 0.240452, 0.454345, 0.378708, 0.630916, 0.597085, 0.873556, 0.561133, 0.601752, 0.999286, 0.10868, 0.935995, 0.419598, 0.619687, 0.21474, 0.375042, 0.946096, 0.37599, 0.572168, 0.256488, 0.430483, 0.689666, 0.447373, 0.344317, 0.31939, 0.745312, 0.184443, 0.810819, 0.688951, 0.886608, 0.823717, 0.535063, 0.503938, 0.923031, 0.217417, 0.521355, 0.963095, 0.857502, 0.103348, 0.764154, 0.317005, 0.202662, 0.171049, 0.00759843, 0.685194, 0.191648, 0.861524, 0.452244, 0.42509, 0.375843, 0.487236, 0.594071, 0.144758, 0.391594, 0.375469, 0.0870385, 0.847586, 0.185147, 0.596277, 0.183901, 0.00842619, 0.592075, 0.573614, 0.843781, 0.142902, 0.15364, 0.646929, 0.0892638, 0.622267, 0.891082, 0.96448, 0.694048, 0.58694, 0.709285, 0.724762, 0.577442, 0.539584, 0.169461, 0.257079, 0.182425, 0.790564, 0.271816, 0.478529, 0.111367, 0.411876, 0.107244, 0.885156, 0.140449, 0.340984, 0.669242, 0.409775, 0.417205, 0.222673, 0.716909, 0.278746, 0.0974982, 0.351324, 0.644261, 0.605993, 0.959356, 0.709013, 0.221732, 0.426471, 0.486874, 0.943151, 0.573228, 0.20139, 0.0056907, 0.56611, 0.712727, 0.914089, 0.43214, 0.712815, 0.815321, 0.0330771, 0.094264, 0.909579, 0.642416, 0.006873, 0.41765, 0.701491, 0.541806, 0.626139, 0.127387, 0.901576, 0.236642, 0.472672, 0.395314, 0.732611, 0.387834, 0.830261, 0.395911, 0.764832, 0.932663, 0.784981, 0.404438, 0.180541, 0.995126, 0.124877, 0.0750531, 0.775687, 0.152062, 0.599499, 0.0665797, 0.862486, 0.568055, 0.169529, 0.207695, 0.341386, 0.416456, 0.301757, 0.942727, 0.574895, 0.0317433, 0.449472, 0.834351, 0.00770673, 0.299701, 0.361295, 0.280399, 0.74131, 0.0203017, 0.794719, 0.334522, 0.237113, 0.311298, 0.294878, 0.478388, 0.421921, 0.157156, 0.31681, 0.314472, 0.606741, 0.811527, 0.796103, 0.310328, 0.151267, 0.100948, 0.557801, 0.496497, 0.243911, 0.718503, 0.971905, 0.883, 0.547776, 0.347316, 0.782998, 0.684509, 0.480502, 0.0783967, 0.0692244, 0.947015, 0.629444, 0.870759, 0.193753, 0.827108, 0.447125, 0.0796941, 0.755518, 0.00448889, 0.457882, 0.500657, 0.880133, 0.965818, 0.139194, 0.87915, 0.746309, 0.381925, 0.997604, 0.571539, 0.757676, 0.346344, 0.021274, 0.614877, 0.0489256, 0.812085, 0.649245, 0.65368, 0.59189, 0.411203, 0.135296, 0.144665, 0.538057, 0.75719, 0.0142835, 0.260769, 0.0672047, 0.613627, 0.287363, 0.264982, 0.129293, 0.361468, 0.0549015, 0.557387, 0.845291, 0.698132, 0.818284, 0.1126, 0.193457, 0.646869, 0.597663, 0.544439, 0.17282, 0.782441, 0.770151, 0.709623, 0.343153, 0.552879, 0.548038, 0.752989, 0.549972, 0.672095, 0.87582, 0.955548, 0.311044, 0.912004, 0.249169, 0.753995, 0.702344, 0.906776, 0.63884, 0.202816, 0.245157, 0.903338, 0.687757, 0.68334, 0.36402, 0.586169, 0.178845, 0.088714, 0.855479, 0.831101, 0.236207, 0.342863, 0.0615278, 0.0731346, 0.891181, 0.76257, 0.816886, 0.310601, 0.535123, 0.459628, 0.551505, 0.571281, 0.709753, 0.0292223, 0.593478, 0.107417, 0.316914, 0.567733, 0.462745, 0.243221, 0.270869, 0.655028, 0.47336, 0.619956, 0.643538, 0.275546, 0.11702, 0.33149, 0.755853, 0.855251, 0.0386841, 0.0435289, 0.109853, 0.293621, 0.371091, 0.431532, 0.563657, 0.159055, 0.85303, 0.64411, 0.714508, 0.975522, 0.875886, 0.262888, 0.826408, 0.583355, 0.523709, 0.193719, 0.410391, 0.991841, 0.183163, 0.639219, 0.172018, 0.838954, 0.588928, 0.661519, 0.662144, 0.422971, 0.1109, 0.0253813, 0.47453, 0.640467, 0.793112, 0.557496, 0.379137, 0.999332, 0.858455, 0.309814, 0.134333, 0.0132461, 0.82194, 0.439918, 0.987648, 0.279281, 0.164631, 0.147135, 0.902111, 0.184585, 0.482819, 0.0707794, 0.159107, 0.30131, 0.993267, 0.245424, 0.37454, 0.35634, 0.122597, 0.938857, 0.693804, 0.348595, 0.92542, 0.0348975, 0.225595, 0.370385, 0.486661, 0.777579, 0.244736, 0.462099, 0.913458, 0.829255, 0.121728, 0.720604, 0.901608, 0.826562, 0.61733, 0.434164, 0.283134, 0.158365, 0.186713, 0.773638, 0.00277137, 0.815615, 0.11357, 0.449243, 0.918311, 0.671511, 0.406604, 0.172264, 0.864247, 0.28041, 0.201363, 0.533451, 0.254058, 0.588508, 0.587943, 0.552362, 0.340251, 0.501069, 0.175259, 0.717303, 0.152411, 0.485068, 0.860247, 0.316549, 0.0997113, 0.788903, 0.00377454, 0.897165, 0.731088, 0.72421, 0.983197, 0.709219, 0.722073, 0.900025, 0.948974, 0.837514, 0.686165, 0.457643, 0.245526, 0.189808, 0.118665, 0.209807, 0.763101, 0.715023, 0.598781, 0.965135, 0.868997, 0.223561, 0.670716, 0.99781, 0.145113, 0.266044, 0.719911, 0.58673, 0.529704, 0.404348, 0.728003, 0.889631, 0.524128, 0.472478, 0.40759, 0.46659, 0.257081, 0.657219, 0.37316, 0.133779, 0.939127, 0.644057, 0.925486, 0.0304648, 0.746252, 0.836801, 0.269247, 0.547866, 0.0164205, 0.714248, 0.253847, 0.724494, 0.77699, 0.0604745, 0.971237, 0.249226, 0.580107, 0.828316, 0.460406, 0.606842, 0.456018, 0.237932, 0.42174, 0.694471, 0.687645, 0.837182, 0.875367, 0.756231, 0.0400628, 0.789693, 0.14741, 0.371227, 0.575896, 0.926123, 0.459602, 0.581003, 0.385102, 0.154742, 0.37969, 0.22614, 0.308876, 0.850189, 0.163223, 0.599109, 0.373049, 0.856136, 0.371121, 0.965673, 0.577745, 0.639973, 0.123351, 0.870461, 0.754086, 0.236437, 0.720433, 0.0527095, 0.29122, 0.547433, 0.882622, 0.940694, 0.194832, 0.0750346, 0.0610123, 0.310287, 0.341471, 0.983267, 0.375502, 0.876803, 0.287406, 0.748471, 0.594963, 0.628531, 0.438567, 0.665441, 0.672503, 0.506363, 0.308025, 0.655934, 0.994362, 0.824733, 0.961195, 0.746385, 0.0960097, 0.310106, 0.453354, 0.381799, 0.109705, 0.104281, 0.868622, 0.501508, 0.11718, 0.500258, 0.948738, 0.471366, 0.780311, 0.532016, 0.49108, 0.549401, 0.00727923, 0.81396, 0.771862, 0.0687179, 0.210695, 0.560634, 0.701196, 0.200612, 0.241739, 0.264693, 0.375663, 0.00471052, 0.734523, 0.203789, 0.0913241, 0.425225, 0.876708, 0.0238135, 0.509967, 0.876341, 0.549541, 0.469744, 0.0518966, 0.160679, 0.0455441, 0.889455, 0.757726, 0.758576, 0.437118, 0.392305, 0.0345043, 0.92864, 0.732631, 0.548112, 0.373253, 0.617855, 0.339635, 0.0494614, 0.36108, 0.744314, 0.422427, 0.463968, 0.564526, 0.653425, 0.780431, 0.716295, 0.238265, 0.584077, 0.265653, 0.463058, 0.732914, 0.0885101, 0.291173, 0.173535, 0.765263, 0.480756, 0.648483, 0.0151566, 0.414509, 0.220405, 0.50928, 0.0271092, 0.12019, 0.944946, 0.817961, 0.493538, 0.369312, 0.354326, 0.287443, 0.030618, 0.357926, 0.316122, 0.362009, 0.668179, 0.341988, 0.268399, 0.621757, 0.573012, 0.492584, 0.972075, 0.0498279, 0.281859, 0.228451, 0.393648, 0.324706, 0.709167, 0.964169, 0.233573, 0.34167, 0.609674, 0.0458664, 0.760891, 0.443914, 0.246326, 0.899665, 0.0959899, 0.49794, 0.477643, 0.65378, 0.732796, 0.321912, 0.620424, 0.941089, 0.913828, 0.72776, 0.0321257, 0.75628, 0.274313, 0.611542, 0.128138, 0.695211, 0.926982, 0.720654, 0.588312, 0.495734, 0.123597, 0.228186, 0.66672, 0.0573115, 0.260192, 0.213768, 0.769004, 0.967237, 0.382545, 0.834618, 0.0757735, 0.712371, 0.935933, 0.796569, 0.599636, 0.612858, 0.463462, 0.78504, 0.717045, 0.374879, 0.707674, 0.208134, 0.591191, 0.765945, 0.155521, 0.232395, 0.654584, 0.214316, 0.735305, 0.0839292, 0.848127, 0.710719, 0.941026, 0.874579, 0.769307, 0.301192, 0.197223, 0.217495, 0.625208, 0.924223, 0.109125, 0.321889, 0.316792, 0.179218, 0.716343, 0.280709, 0.72566, 0.739555, 0.264069, 0.657673, 0.853048, 0.803091, 0.820985, 0.651857, 0.931743, 0.510448, 0.387036, 0.93948, 0.739968, 0.156606, 0.774725, 0.724864, 0.838277, 0.340938, 0.197131, 0.455342, 0.208851, 0.0474198, 0.489681, 0.718457, 0.20933, 0.107909, 0.885783, 0.176625, 0.123576, 0.406788, 0.630986, 0.0527968, 0.147339, 0.826964, 0.76672, 0.911876, 0.663619, 0.195413, 0.979305, 0.587192, 0.736023, 0.878371, 0.872901, 0.704097, 0.165629, 0.0568425, 0.335889, 0.229509, 0.0537321, 0.334414, 0.692468, 0.260873, 0.0967046, 0.52543, 0.533514, 0.553588, 0.13187, 0.630808, 0.609632, 0.963756, 0.557915, 0.219282, 0.784697, 0.967045, 0.00652943, 0.736587, 0.0784029, 0.81302, 0.148816, 0.953596, 0.502145, 0.097795, 0.905445, 0.779644, 0.283433, 0.29729, 0.0683082, 0.574513, 0.251414, 0.467007, 0.228528, 0.469036, 0.807922, 0.0549555, 0.416164, 0.904429, 0.227143, 0.465156, 0.604122, 0.574088, 0.934791, 0.108433, 0.852014, 0.907469, 0.945096, 0.459356, 0.667031, 0.281934, 0.637253, 0.932625, 0.804203, 0.0843158, 0.963223, 0.887788, 0.305144, 0.944713, 0.776812, 0.594855, 0.478587, 0.0797217, 0.036862, 0.896295, 0.0297441, 0.0766876, 0.7874, 0.150145, 0.884818, 0.14161, 0.485735, 0.364699, 0.329101, 0.0938824, 0.708327, 0.931794, 0.665628, 0.892828, 0.439991, 0.6202, 0.169928, 0.925779, 0.018304, 0.739203, 0.940643, 0.505835, 0.243505, 0.0885657, 0.727978, 0.385604, 0.704526, 0.0057521, 0.237235, 0.363028, 0.787715, 0.880617, 0.535948, 0.459587, 0.856384, 0.0469387, 0.0326529, 0.898075, 0.361336, 0.908772, 0.558115, 0.6705, 0.739076, 0.673523, 0.54199, 0.7216, 0.706949, 0.4299, 0.693178, 0.118263, 0.627025, 0.246125, 0.452557, 0.819869, 0.939203, 0.465305, 0.626229, 0.107427, 0.286635, 0.214797, 0.521525, 0.472208, 0.754895, 0.375884, 0.148967, 0.498511, 0.774345, 0.83256, 0.812576, 0.249913, 0.802774, 0.87416, 0.120424, 0.530108, 0.829449, 0.813338, 0.580094, 0.897474, 0.515633, 0.857695, 0.353416, 0.426706, 0.786654, 0.676372, 0.774821, 0.623743, 0.0176362, 0.530675, 0.531131, 0.834187, 0.550216, 0.0224422, 0.0538156, 0.656118, 0.902216, 0.83654, 0.64387, 0.305327, 0.0589362, 0.496555, 0.562009, 0.343018, 0.847697, 0.182216, 0.0835304, 0.477091, 0.341358, 0.909489, 0.844192, 0.250927, 0.933091, 0.0520628, 0.40084, 0.672863, 0.87823, 0.442065, 0.0937743, 0.88828, 0.237486, 0.63274, 0.909439, 0.382021, 0.905333, 0.869442, 0.230645, 0.729319, 0.0606626, 0.631403, 0.194657, 0.10353, 0.722611, 0.877988, 0.629357, 0.183838, 0.649866, 0.877232, 0.724609, 0.595428, 0.429649, 0.425882, 0.213661, 0.898753, 0.901602, 0.665194, 0.27595, 0.956716, 0.023234, 0.967866, 0.662391, 0.780798, 0.762026, 0.136363, 0.406325, 0.111442, 0.770045, 0.198666, 0.8796, 0.865666, 0.321917, 0.415935, 0.983409, 0.222184, 0.950112, 0.38876, 0.286987, 0.291493, 0.240819, 0.649202, 0.862471, 0.291304, 0.970219, 0.862215, 0.0320834, 0.192766, 0.590521, 0.635422, 0.9835, 0.228775, 0.188603, 0.0157858, 0.70571, 0.307928, 0.113621, 0.696907, 0.588957, 0.282671, 0.0120755, 0.166472, 0.958485, 0.157565, 0.0117145, 0.175625, 0.532964, 0.213409, 0.4184, 0.26797, 0.0135374, 0.190527, 0.848565, 0.087241, 0.171327, 0.929766, 0.352087, 0.163126, 0.194579, 0.708994, 0.59182, 0.201136, 0.264034, 0.126743, 0.0568339, 0.462297, 0.426802, 0.439699, 0.303216, 0.697761, 0.118959, 0.995954, 0.349346, 0.375857, 0.711591, 0.57804, 0.650406, 0.289833, 0.962389, 0.469556, 0.645988, 0.543234, 0.219211, 0.617346, 0.709983, 0.0910768, 0.391303, 0.981123, 0.805719, 0.805006, 0.798241, 0.485463, 0.173305, 0.9613, 0.00920269, 0.961719, 0.467961, 0.245859, 0.305431, 0.148351, 0.167167, 0.436692, 0.922042, 0.492883, 0.24934, 0.308159, 0.99099, 0.9951, 0.244618, 0.131421, 0.342017, 0.822735, 0.142372, 0.811411, 0.0409804, 0.721215, 0.861459, 0.766847, 0.284534, 0.264093, 0.675163, 0.664999, 0.665671, 0.745024, 0.771032, 0.483728, 0.898326, 0.521523, 0.249476, 0.258946, 0.257854, 0.0855351, 0.216374, 0.809722, 0.495059, 0.133953, 0.199373, 0.301335, 0.710782, 0.0216161, 0.274523, 0.260882, 0.545702, 0.625261, 0.748891, 0.800363, 0.534662, 0.284751, 0.757313, 0.964026, 0.256365, 0.321775, 0.209238, 0.988314, 0.762191, 0.337054, 0.861542, 0.235185, 0.476932, 0.437778, 0.914141, 0.402043, 0.380551, 0.905735, 0.638406, 0.773127, 0.309329, 0.0671007, 0.795017, 0.799532, 0.00364822, 0.369172, 0.875997, 0.600051, 0.882769, 0.396643, 0.189659, 0.725344, 0.825717, 0.2182, 0.786751, 0.506846, 0.242098, 0.672779, 0.317106, 0.627122, 0.509488, 0.675008, 0.89748, 0.219216, 0.145694, 0.290574, 0.718498, 0.983559, 0.420981, 0.119246, 0.663962, 0.470443, 0.667008, 0.579719, 0.571089, 0.997056, 0.626232, 0.773617, 0.939652, 0.446185, 0.111748, 0.967741, 0.652235, 0.397335, 0.494093, 0.51104, 0.397897, 0.670147, 0.846698, 0.0213984, 0.550662, 0.288313, 0.750895, 0.193161, 0.050176, 0.280056, 0.390065, 0.199181, 0.451561, 0.564859, 0.325755, 0.408679, 0.998351, 0.974254, 0.526062, 0.0957082, 0.219961, 0.812711, 0.958309, 0.272282, 0.161978, 0.338449, 0.557122, 0.870514, 0.628552, 0.840722, 0.0219039, 0.0575009, 0.185114, 0.0819644, 0.337032, 0.0216506, 0.982923, 0.891166, 0.0296416, 0.132156, 0.913832, 0.79417, 0.422663, 0.239595, 0.62447, 0.112625, 0.762766, 0.912618, 0.689301, 0.0103053, 0.84427, 0.89194, 0.0801654, 0.115514, 0.935909, 0.404519, 0.116629, 0.437316, 0.456131, 0.937935, 0.48319, 0.14409, 0.379065, 0.0453652, 0.579558, 0.1382, 0.282545, 0.602832, 0.983994, 0.862103, 0.428647, 0.6004, 0.783887, 0.89323, 0.787228, 0.280172, 0.404725, 0.333178, 0.702263, 0.177241, 0.731057, 0.195261, 0.258201, 0.35049, 0.72335, 0.509525, 0.153213, 0.0179785, 0.911976, 0.571066, 0.028932, 0.693651, 0.986674, 0.952026, 0.533778, 0.238231, 0.772806, 0.0107944, 0.285385, 0.104306, 0.58825, 0.499696, 0.775419, 0.316042, 0.972347, 0.61327, 0.0776558, 0.568497, 0.424756, 0.685905, 0.256827, 0.574769, 0.357528, 0.594186, 0.326758, 0.420383, 0.294078, 0.643391, 0.549081, 0.276275, 0.330507, 0.067916, 0.653988, 0.781768, 0.884367, 0.240856, 0.280531, 0.709184, 0.369147, 0.0581054, 0.486437, 0.290935, 0.105259, 0.0815294, 0.866518, 0.650869, 0.703351, 0.869202, 0.468616, 0.568117, 0.975868, 0.271798, 0.0865298, 0.894777, 0.256255, 0.322548, 0.614776, 0.658665, 0.0100567, 0.249481, 0.422542, 0.3974, 0.499621, 0.50149, 0.879339, 0.649601, 0.455446, 0.658601, 0.719479, 0.837564, 0.0624449, 0.902218, 0.225339, 0.350457, 0.489363, 0.763642, 0.243925, 0.657255, 0.0705136, 0.0489285, 0.186975, 0.439029, 0.346806, 0.694161, 0.502898, 0.98227, 0.680688, 0.960858, 0.208254, 0.511671, 0.716765, 0.711505, 0.911834, 0.626555, 0.0469704, 0.673963, 0.218715, 0.339074, 0.968538, 0.257862, 0.942298, 0.538326, 0.492178, 0.165233, 0.691901, 0.903309, 0.524838, 0.926782, 0.193413, 0.802709, 0.711884, 0.0240081, 0.140502, 0.0563052, 0.584991, 0.319483, 0.0546981, 0.910044, 0.812915, 0.747399, 0.417452, 0.857698, 0.210939, 0.168606, 0.231105, 0.336397, 0.868238, 0.866334, 0.0218861, 0.990205, 0.246814, 0.644786, 0.269771, 0.392032, 0.239372, 0.437806, 0.772413, 0.0163668, 0.0532537, 0.920045, 0.33141, 0.514837, 0.871406, 0.397055, 0.498462, 0.591409, 0.0411478, 0.676721, 0.118477, 0.83862, 0.707243, 0.420312, 0.444369, 0.783343, 0.00486774, 0.869095, 0.132895, 0.899124, 0.428715, 0.2236, 0.961385, 0.273355, 0.138576, 0.852518, 0.500557, 0.480699, 0.44492, 0.784342, 0.661856, 0.695567, 0.572185, 0.111332, 0.256081, 0.819809, 0.322147, 0.404687, 0.943613, 0.706108, 0.951866, 0.261504, 0.782387, 0.760186, 0.0156952, 0.815611, 0.178297, 0.471005, 0.611869, 0.116486, 0.0850198, 0.104099, 0.604362, 0.745123, 0.15129, 0.548693, 0.445766, 0.479616, 0.253791, 0.765529, 0.558392, 0.847801, 0.898313, 0.627633, 0.0950122, 0.842727, 0.375165, 0.406892, 0.196276, 0.650598, 0.527738, 0.343633, 0.234852, 0.361932, 0.787421, 0.669053, 0.902753, 0.254946, 0.694282, 0.609353, 0.523103, 0.146424, 0.157292, 0.107735, 0.721125, 0.279951, 0.969449, 0.129756, 0.59466, 0.656208, 0.666184, 0.384704, 0.840437, 0.165976, 0.439087, 0.0224346, 0.563953, 0.496244, 0.486372, 0.923374, 0.00637722, 0.833891, 0.430868, 0.476303, 0.526871, 0.85095, 0.346899, 0.59411, 0.150974, 0.487017, 0.180168, 0.32126, 0.398172, 0.874478, 0.0562483, 0.645354, 0.520445, 0.576221, 0.145211, 0.132088, 0.639109, 0.835965, 0.712716, 0.101905, 0.962231, 0.904648, 0.245937, 0.338644, 0.768588, 0.219802, 0.785703, 0.684242, 0.00758828, 0.344332, 0.845255, 0.343437, 0.479491, 0.00184634, 0.867279, 0.692014, 0.431415, 0.333164, 0.726412, 0.620137, 0.706249, 0.7203, 0.470956, 0.816513, 0.695551, 0.791482, 0.770726, 0.395524, 0.624206, 0.815844, 0.0873979, 0.611708, 0.256229, 0.822988, 0.259085, 0.441877, 0.392015, 0.0981093, 0.88807, 0.019005, 0.108683, 0.651687, 0.100758, 0.325939, 0.269713, 0.948986, 0.825146, 0.564144, 0.770639, 0.485206, 0.416936, 0.0984107, 0.771148, 0.266221, 0.44929, 0.304169, 0.0864227, 0.119333, 0.609946, 0.352228, 0.803194, 0.0700682, 0.180989, 0.290626, 0.654246, 0.873521, 0.195073, 0.0980244, 0.32034, 0.14283, 0.213314, 0.572725, 0.811518, 0.904587, 0.431793, 0.472596, 0.701579, 0.165594, 0.788756, 0.55967, 0.412133, 0.673068, 0.475394, 0.423188, 0.998463, 0.134837, 0.440722, 0.283467, 0.519767, 0.827366, 0.616524, 0.269592, 0.0610548, 0.214388, 0.310935, 0.813268, 0.614236, 0.305517, 0.0430781, 0.620736, 0.641705, 0.895556, 0.3765, 0.982094, 0.845038, 0.981563, 0.744293, 0.291727, 0.00362085, 0.717874, 0.725714, 0.542053, 0.320815, 0.258247, 0.240783, 0.290881, 0.296439, 0.715697, 0.0383373, 0.384729, 0.428255, 0.454301, 0.434624, 0.666046, 0.186181, 0.230389, 0.706728, 0.559589, 0.941174, 0.54638, 0.252105, 0.210496, 0.370935, 0.469081, 0.65679, 0.246057, 0.593924, 0.828955, 0.715542, 0.584473, 0.265623, 0.443921, 0.883346, 0.60509, 0.288227, 0.636306, 0.497498, 0.0708454, 0.835096, 0.627698, 0.328114, 0.316023, 0.5014, 0.921875, 0.54686, 0.0416384, 0.157481, 0.838317, 0.935189, 0.269667, 0.167253, 0.588981, 0.209506, 0.663613, 0.158796, 0.720612, 0.0420776, 0.36182, 0.501057, 0.195321, 0.655048, 0.320891, 0.495573, 0.589786, 0.931608, 0.467956, 0.0568821, 0.382032, 0.761145, 0.288557, 0.36101, 0.125653, 0.182641, 0.334124, 0.157218, 0.762846, 0.149435, 0.999255, 0.4778, 0.553885, 0.0677615, 0.931818, 0.016556, 0.803526, 0.0220922, 0.631252, 0.199443, 0.400167, 0.582883, 0.765045, 0.577627, 0.547879, 0.823248, 0.180415, 0.998187, 0.783894, 0.211209, 0.533494, 0.754303, 0.65733, 0.133927, 0.828414, 0.792878, 0.411378, 0.744205, 0.527533, 0.903261, 0.426149, 0.224438, 0.52951, 0.398703, 0.127338, 0.630412, 0.688104, 0.339014, 0.553921, 0.34712, 0.62277, 0.038325, 0.410101, 0.171616, 0.806647, 0.679862, 0.823435, 0.517721, 0.176172, 0.960146, 0.0800634, 0.487031, 0.0279664, 0.0173489, 0.0645565, 0.268324, 0.165998, 0.971805, 0.421874, 0.536972, 0.0730187, 0.845134, 0.3398, 0.305973, 0.0977254, 0.836375, 0.289279, 0.753972, 0.641238, 0.946321, 0.962601, 0.67325, 0.257082, 0.533891, 0.8623, 0.788637, 0.946002, 0.431869, 0.292219, 0.32183, 0.897805, 0.677521, 0.902471, 0.314869, 0.252362, 0.405313, 0.600214, 0.805096, 0.202731, 0.959067, 0.32637, 0.180819, 0.700432, 0.477366, 0.78468, 0.807295, 0.666499, 0.760154, 0.708467, 0.353349, 0.832866, 0.408311, 0.0210555, 0.297889, 0.612174, 0.759349, 0.589077, 0.993623, 0.142461, 0.334269, 0.560127, 0.848384, 0.151291, 0.404091, 0.0174518, 0.464505, 0.730424, 0.103645, 0.280192, 0.143918, 0.450896, 0.957061, 0.545256, 0.857519, 0.262689, 0.979739, 0.403343, 0.957957, 0.853007, 0.150011, 0.856873, 0.842149, 0.310354, 0.828174, 0.173182, 0.671715, 0.608794, 0.457571, 0.991531, 0.196842, 0.854958, 0.916562, 0.205036, 0.502097, 0.545073, 0.990717, 0.660022, 0.160164, 0.334036, 0.487408, 0.524314, 0.786715, 0.778241, 0.291536, 0.259401, 0.338952, 0.829521, 0.0694723, 0.375024, 0.141045, 0.580219, 0.487215, 0.409129, 0.557917, 0.241972, 0.316169, 0.910911, 0.686209, 0.00985684, 0.467838, 0.493742, 0.671694, 0.823553, 0.842008, 0.433344, 0.294061, 0.188521, 0.92429, 0.538474, 0.750357, 0.88262, 0.568984, 0.595121, 0.84028, 0.00421434, 0.776732, 0.335066, 0.409157, 0.0208589, 0.497187, 0.612949, 0.250027, 0.522101, 0.300015, 0.58257, 0.240747, 0.572784, 0.690404, 0.953512, 0.0912815, 0.0733098, 0.68624, 0.794803, 0.696063, 0.60953, 0.815455, 0.625078, 0.714355, 0.772728, 0.479029, 0.626334, 0.474357, 0.914993, 0.75467, 0.745836, 0.63729, 0.19669, 0.0905315, 0.893427, 0.849832, 0.912964, 0.382397, 0.71154, 0.808354, 0.893045, 0.456135, 0.393691, 0.863108, 0.761282, 0.991937, 0.573704, 0.690977, 0.3117, 0.567825, 0.306727, 0.326662, 0.491348, 0.399847, 0.77062, 0.405923, 0.918753, 0.258499, 0.902771, 0.73017, 0.637004, 0.585638, 0.605306, 0.115159, 0.118974, 0.432776, 0.488925, 0.66015, 0.350921, 0.18767, 0.973977, 0.723712, 0.339241, 0.379722, 0.536957, 0.624038, 0.497347, 0.944084, 0.159237, 0.777434, 0.289676, 0.511357, 0.39244, 0.415509, 0.890435, 0.595442, 0.47797, 0.466247, 0.66677, 0.451693, 0.580417, 0.25935, 0.643025, 0.380881, 0.944449, 0.650725, 0.165078, 0.702137, 0.704296, 0.752474, 0.936447, 0.868422, 0.262513, 0.0232052, 0.555611, 0.74614, 0.398948, 0.195334, 0.0348564, 0.583205, 0.423472, 0.0960633, 0.733525, 0.99676, 0.487536, 0.296982, 0.20456, 0.571804, 0.868799, 0.749744, 0.621992, 0.759803, 0.0618532, 0.253088, 0.445758, 0.650843, 0.648296, 0.750919, 0.0334708, 0.297256, 0.310481, 0.532567, 0.404802, 0.377319, 0.542259, 0.12443, 0.790325, 0.444367, 0.452441, 0.217944, 0.424292, 0.115662, 0.61383, 0.245621, 0.747123, 0.830206, 0.369922, 0.865725, 0.651759, 0.461403, 0.145014, 0.225638, 0.346928, 0.0338172, 0.143913, 0.235113, 0.753127, 0.939304, 0.684814, 0.633211, 0.960311, 0.489829, 0.883591, 0.474476, 0.173168, 0.804425, 0.121522, 0.153494, 0.0543107, 0.204177, 0.602172, 0.536432, 0.323885, 0.439595, 0.0364584, 0.520795, 0.635875, 0.199897, 0.194836, 0.741096, 0.971576, 0.194318, 0.181178, 0.326358, 0.633951, 0.632444, 0.361302, 0.29806, 0.942541, 0.87561, 0.230811, 0.0783732, 0.657944, 0.96467, 0.170779, 0.45759, 0.261792, 0.567791, 0.403396, 0.628791, 0.323723, 0.227396, 0.506212, 0.872135, 0.210903, 0.194407, 0.54982, 0.577773, 0.461273, 0.450567, 0.976968, 0.508873, 0.450128, 0.881639, 0.688893, 0.0998914, 0.966041, 0.53252, 0.0757306, 0.502864, 0.275422, 0.687194, 0.348087, 0.35807, 0.171159, 0.972887, 0.863733, 0.813569, 0.51552, 0.197781, 0.802414, 0.524504, 0.533703, 0.619684, 0.333113, 0.0153569, 0.907485, 0.44377, 0.428598, 0.183857, 0.982795, 0.238431, 0.208793, 0.321625, 0.380313, 0.568526, 0.930941, 0.677121, 0.544023, 0.419881, 0.388237, 0.011026, 0.121431, 0.239208, 0.189497, 0.659145, 0.109031, 0.142288, 0.555917, 0.99295, 0.786536, 0.0710536, 0.768471, 0.161077, 0.719407, 0.513358, 0.472459, 0.130592, 0.878847, 0.372559, 0.987415, 0.819368, 0.896491, 0.293905, 0.983938, 0.549059, 0.398498, 0.596892, 0.997351, 0.691984, 0.0992515, 0.953706, 0.353985, 0.967347, 0.595026, 0.175495, 0.593134, 0.333987, 0.199959, 0.124917, 0.367774, 0.688183, 0.826602, 0.00830476, 0.875706, 0.41639, 0.337927, 0.448663, 0.243666, 0.707968, 0.88552, 0.1651, 0.309453, 0.0265487, 0.970021, 0.872214, 0.456927, 0.635171, 0.243996, 0.693179, 0.997546, 0.220315, 0.00512867, 0.503069, 0.980794, 0.546817, 0.83832, 0.572533, 0.859018, 0.129256, 0.64796, 0.531453, 0.0151519, 0.954558, 0.50729, 0.690222, 0.0997272, 0.254811, 0.410683, 0.578204, 0.764459, 0.252579, 0.725538, 0.393329, 0.326625, 0.918815, 0.227689, 0.538058, 0.445234, 0.778616, 0.793048, 0.00585421, 0.475651, 0.0352852, 0.858657, 0.290356, 0.37671, 0.469648, 0.251567, 0.624495, 0.450786, 0.374024, 0.145673, 0.67222, 0.351939, 0.813303, 0.56368, 0.215226, 0.0814795, 0.960962, 0.355566, 0.435692, 0.396444, 0.641797, 0.253165, 0.932764, 0.280643, 0.831266, 0.852321, 0.563509, 0.451012, 0.864254, 0.171573, 0.787336, 0.701232, 0.583738, 0.185954, 0.367937, 0.158504, 0.0161709, 0.86806, 0.939253, 0.267748, 0.352643, 0.426932, 0.189601, 0.911403, 0.50837, 0.732431, 0.985006, 0.269347, 0.958147, 0.855181, 0.883161, 0.956014, 0.974718, 0.707734, 0.52887, 0.991762, 0.0393665, 0.105822, 0.141936, 0.504212, 0.476858, 0.584185, 0.0764541, 0.180425, 0.558995, 0.895737, 0.768966, 0.165616, 0.304485, 0.575581, 0.874015, 0.398608, 0.0082093, 0.0736755, 0.0499443, 0.581033, 0.874252, 0.876927, 0.991794, 0.672979, 0.650433, 0.286385, 0.946385, 0.358745, 0.243017, 0.655395, 0.163383, 0.242879, 0.0232899, 0.464411, 0.409471, 0.679895, 0.997155, 0.029812, 0.159658, 0.370295, 0.27686, 0.211441, 0.936067, 0.155365, 0.83717, 0.603396, 0.61765, 0.493705, 0.945561, 0.893608, 0.258562, 0.736011, 0.144251, 0.364804, 0.282035, 0.709481, 0.193582, 0.313855, 0.2994, 0.742215, 0.360354, 0.981149, 0.350727, 0.402133, 0.388844, 0.583267, 0.070153, 0.21933, 0.716418, 0.249012, 0.0757469, 0.0186221, 0.646731, 0.0770813, 0.265132, 0.578124, 0.368505, 0.645117, 0.513793, 0.838181, 0.00636151, 0.121437, 0.274747, 0.617832, 0.628392, 0.681296, 0.513797, 0.0434236, 0.154694, 0.975077, 0.590228, 0.987464, 0.402142, 0.714017, 0.563677, 0.624326, 0.658532, 0.847364, 0.379046, 0.666956, 0.211503, 0.164405, 0.0785213, 0.795591, 0.436383, 0.168083, 0.656521, 0.09148, 0.862772, 0.573072, 0.711908, 0.60611, 0.526567, 0.198538, 0.868665, 0.537465, 0.842959, 0.00195253, 0.827145, 0.977914, 0.479378, 0.255765, 0.377757, 0.266293, 0.264698, 0.374865, 0.406781, 0.788782, 0.457216, 0.607319, 0.663994, 0.932821, 0.688466, 0.920392, 0.82204, 0.905654, 0.299072, 0.98596, 0.259191, 0.359024, 0.511, 0.640021, 0.0260404, 0.228917, 0.480962, 0.0145337, 0.891536, 0.75293, 0.500486, 0.595336, 0.697801, 0.936754, 0.871415, 0.901875, 0.90638, 0.356738, 0.0370333, 0.842833, 0.246147, 0.663604, 0.986032, 0.848929, 0.805214, 0.400486, 0.834942, 0.0638208, 0.67463, 0.111669, 0.585896, 0.743536, 0.746684, 0.914686, 0.518284, 0.63239, 0.283595, 0.181878, 0.890666, 0.305306, 0.202127, 0.433876, 0.111278, 0.604247, 0.440199, 0.812756, 0.739672, 0.814564, 0.246007, 0.240815, 0.808788, 0.496448, 0.180202, 0.935318, 0.100369, 0.279289, 0.520393, 0.686972, 0.409546, 0.718325, 0.7967, 0.622775, 0.307297, 0.153473, 0.445662, 0.0112306, 0.823669, 0.69551, 0.398283, 0.711707, 0.053126, 0.14718, 0.894394, 0.0492904, 0.21273, 0.196796, 0.478175, 0.227983, 0.814148, 0.488083, 0.452984, 0.658198, 0.517192, 0.383138, 0.0411468, 0.658421, 0.254412, 0.834593, 0.78294, 0.639942, 0.360754, 0.849921, 0.433348, 0.0629977, 0.251698, 0.00425808, 0.0742739, 0.590643, 0.852995, 0.44001, 0.802642, 0.186821, 0.770974, 0.787275, 0.702866, 0.947383, 0.0997299, 0.738891, 0.119124, 0.482181, 0.281371, 0.586083, 0.991044, 0.356575, 0.342173, 0.873849, 0.974298, 0.30734, 0.181679, 0.754, 0.203723, 0.32117, 0.124177, 0.430788, 0.76273, 0.0252674, 0.571264, 0.620615, 0.864177, 0.144175, 0.758325, 0.644709, 0.952742, 0.461476, 0.401015, 0.439588, 0.520973, 0.165119, 0.845272, 0.382533, 0.697601, 0.298341, 0.517291, 0.687995, 0.831807, 0.406744, 0.00147834, 0.0503307, 0.0969225, 0.812963, 0.300985, 0.0614599, 0.296362, 0.255696, 0.469395, 0.302228, 0.682409, 0.855154, 0.318436, 0.731659, 0.898572, 0.196598, 0.709197, 0.0914218, 0.70073, 0.624826, 0.687569, 0.503143, 0.546211, 0.965121, 0.78538, 0.59169, 0.785625, 0.743999, 0.500571, 0.657924, 0.0492368, 0.031132, 0.373758, 0.517738, 0.559081, 0.509297, 0.338738, 0.512054, 0.754298, 0.384323, 0.525394, 0.923157, 0.188505, 0.297817, 0.0560293, 0.465045, 0.591416, 0.0247203, 0.890947, 0.614514, 0.684119, 0.751641, 0.333143, 0.995364, 0.699339, 0.847508, 0.909596, 0.210375, 0.0579457, 0.252702, 0.834395, 0.21708, 0.831192, 0.173178, 0.026883, 0.450337, 0.162823, 0.554947, 0.418359, 0.330839, 0.541603, 0.947367, 0.827405, 0.199467, 0.792635, 0.217672, 0.814948, 0.422815, 0.797286, 0.230456, 0.151905, 0.955378, 0.245243, 0.869898, 0.0846103, 0.966006, 0.358681, 0.231625, 0.0742771, 0.925698, 0.39544, 0.163441, 0.0563207, 0.375602, 0.192107, 0.983945, 0.0693443, 0.459649, 0.0862239, 0.0300678, 0.580992, 0.100373, 0.687713, 0.723878, 0.49037, 0.332304, 0.772052, 0.663358, 0.547988, 0.416127, 0.983808, 0.897234, 0.234652, 0.873977, 0.726734, 0.516347, 0.0135771, 0.0687126, 0.898031, 0.745826, 0.945414, 0.596578, 0.250961, 0.680425, 0.70184, 0.3849, 0.235968, 0.650414, 0.174933, 0.871153, 0.376684, 0.740842, 0.896868, 0.203744, 0.228205, 0.368293, 0.851204, 0.779779, 0.390117, 0.196098, 0.334587, 0.965125, 0.258294, 0.994362, 0.552795, 0.00458971, 0.627195, 0.144484, 0.0193671, 0.172626, 0.863175, 0.681595, 0.424902, 0.995404, 0.866294, 0.594732, 0.0967612, 0.363474, 0.553648, 0.166862, 0.754552, 0.377603, 0.687499, 0.702658, 0.715975, 0.940931, 0.0106362, 0.403338, 0.644233, 0.518175, 0.588178, 0.778881, 0.97979, 0.932757, 0.285407, 0.0743181, 0.806927, 0.169634, 0.388293, 0.29852, 0.45683, 0.0809973, 0.995686, 0.796594, 0.442121, 0.355805, 0.286648, 0.292238, 0.337873, 0.596707, 0.419495, 0.945673, 0.0221655, 0.00437317, 0.258344, 0.594591, 0.435367, 0.240169, 0.0165289, 0.223482, 0.0831526, 0.834681, 0.10958, 0.00299531, 0.239054, 0.820969, 0.40539, 0.9016, 0.940475, 0.436835, 0.150688, 0.0904121, 0.168206, 0.997886, 0.072182, 0.26026, 0.911627, 0.17641, 0.862734, 0.969125, 0.647594, 0.0418974, 0.604534, 0.644808, 0.337931, 0.175766, 0.149241, 0.0669352, 0.498339, 0.588808, 0.207361, 0.633391, 0.676561, 0.332095, 0.392212, 0.386744, 0.313642, 0.581496, 0.587427, 0.812137, 0.838925, 0.553267, 0.239962, 0.883877, 0.277647, 0.615999, 0.277633, 0.654658, 0.569249, 0.264581, 0.285088, 0.919378, 0.310061, 0.154421, 0.81211, 0.0805758, 0.537273, 0.718268, 0.78967, 0.168712, 0.672929, 0.188122, 0.542312, 0.889333, 0.750834, 0.792864, 0.665908, 0.355375, 0.0248623, 0.859699, 0.901034, 0.978931, 0.979496, 0.611859, 0.626397, 0.109279, 0.536334, 0.403299, 0.261629, 0.214884, 0.23765, 0.365601, 0.292705, 0.757067, 0.668028, 0.777016, 0.437371, 0.959043, 0.928364, 0.623429, 0.278643, 0.767776, 0.606668, 0.41287, 0.525901, 0.20721, 0.824017, 0.00833184, 0.340564, 0.477853, 0.646877, 0.437629, 0.272634, 0.977585, 0.720644, 0.767572, 0.251215, 0.519238, 0.458834, 0.257776, 0.190404, 0.419795, 0.833873, 0.0831019, 0.23368, 0.200893, 0.789614, 0.879695, 0.0838073, 0.444594, 0.880399, 0.864939, 0.0978044, 0.66689, 0.973639, 0.918454, 0.0414451, 0.998649, 0.301783, 0.988848, 0.780013, 0.405024, 0.102775, 0.826986, 0.547494, 0.361771, 0.775435, 0.888387, 0.640246, 0.894718, 0.291769, 0.968296, 0.0865464, 0.374765, 0.886514, 0.686886, 0.00595553, 0.295335, 0.0480263, 0.312763, 0.128689, 0.779346, 0.541304, 0.611078, 0.182802, 0.678796, 0.851202, 0.377671, 0.601242, 0.0745997, 0.594458, 0.267097, 0.561975, 0.451893, 0.17634, 0.113208, 0.186267, 0.501896, 0.169807, 0.935481, 0.646633, 0.698092, 0.0125801, 0.89294, 0.0404446, 0.19095, 0.164982, 0.0281738, 0.764832, 0.199218, 0.335745, 0.847957, 0.704248, 0.0827675, 0.551416, 0.318433, 0.0174397, 0.974683, 0.355133, 0.659525, 0.299524, 0.0348649, 0.0830367, 0.892005, 0.484873, 0.35453, 0.303067, 0.0449598, 0.677799, 0.0645166, 0.210052, 0.949441, 0.769507, 0.369312, 0.131324, 0.585159, 0.432039, 0.857668, 0.587967, 0.905259, 0.230036, 0.324217, 0.97751, 0.484565, 0.629018, 0.225109, 0.273108, 0.0643733, 0.968138, 0.272667, 0.280672, 0.172835, 0.25218, 0.572744, 0.658715, 0.373569, 0.0822719, 0.467123, 0.478136, 0.441749, 0.928454, 0.675788, 0.0515112, 0.326865, 0.717996, 0.791608, 0.835636, 0.62661, 0.752345, 0.355879, 0.301332, 0.592752, 0.674709, 0.8171, 0.475421, 0.993677, 0.349471, 0.464499, 0.290476, 0.754061, 0.286752, 0.864602, 0.0794103, 0.744692, 0.902555, 0.243577, 0.541223, 0.17843, 0.990312, 0.398339, 0.645027, 0.548395, 0.843345, 0.648185, 0.435208, 0.886112, 0.4496, 0.906236, 0.411202, 0.590934, 0.954434, 0.235815, 0.281263, 0.548929, 0.153537, 0.184205, 0.7991, 0.677191, 0.436472, 0.644432, 0.266107, 0.611344, 0.479622, 0.51194, 0.219136, 0.853853, 0.436816, 0.6076, 0.59187, 0.491823, 0.839212, 0.0891322, 0.79803, 0.291623, 0.234777, 0.181739, 0.611303, 0.898885, 0.197685, 0.864132, 0.812749, 0.881813, 0.532308, 0.775868, 0.86234, 0.203799, 0.11907, 0.63381, 0.525241, 0.917297, 0.822734, 0.0351789, 0.783906, 0.197773, 0.0485804, 0.730865, 0.154961, 0.362621, 0.572194, 0.351583, 0.708539, 0.477232, 0.584068, 0.677132, 0.428083, 0.546457, 0.77613, 0.312815, 0.775791, 0.100256, 0.738037, 0.709004, 0.548878, 0.761758, 0.958375, 0.964517, 0.887815, 0.612814, 0.498814, 0.164157, 0.170602, 0.235105, 0.487629, 0.307261, 0.596674, 0.947006, 0.473144, 0.397058, 0.153987, 0.92083, 0.535958, 0.0851835, 0.282776, 0.563342, 0.0861208, 0.311185, 0.867459, 0.810231, 0.131134, 0.769348, 0.221934, 0.908438, 0.355699, 0.0335063, 0.512362, 0.989534, 0.164223, 0.144575, 0.123998, 0.635566, 0.761381, 0.690244, 0.377104, 0.966984, 0.927097, 0.221802, 0.840417, 0.656228, 0.886511, 0.0520425, 0.933234, 0.960195, 0.410967, 0.0131046, 0.98945, 0.410679, 0.416786, 0.335773, 0.867022, 0.473227, 0.218017, 0.824167, 0.042242, 0.286993, 0.479034, 0.0468841, 0.0240143, 0.752881, 0.832831, 0.550008, 0.659603, 0.283631, 0.445006, 0.0618597, 0.795936, 0.605933, 0.622893, 0.118388, 0.0121302, 0.75759, 0.366031, 0.721477, 0.850805, 0.330059, 0.395251, 0.855987, 0.291648, 0.194895, 0.844415, 0.632958, 0.10251, 0.356481, 0.264689, 0.439693, 0.495715, 0.394411, 0.996949, 0.170269, 0.638766, 0.199071, 0.931374, 0.70971, 0.226326, 0.393906, 0.0564923, 0.0181645, 0.114254, 0.756949, 0.0379847, 0.0607238, 0.766953, 0.23202, 0.368807, 0.173989, 0.408378, 0.948673, 0.650679, 0.152183, 0.0212014, 0.0289645, 0.0329749, 0.284794, 0.0103853, 0.242957, 0.598746, 0.0915884, 0.416697, 0.397829, 0.110467, 0.245823, 0.405882, 0.99591, 0.21921, 0.610271, 0.807742, 0.299738, 0.64813, 0.275229, 0.526575, 0.585021, 0.846538, 0.828594, 0.107584, 0.758753, 0.45955, 0.203831, 0.907287, 0.742649, 0.375063, 0.846021, 0.429571, 0.606226, 0.687485, 0.911635, 0.37944, 0.630247, 0.92855, 0.744963, 0.0724244, 0.364864, 0.547149, 0.265658, 0.0431736, 0.640118, 0.409493, 0.416564, 0.965693, 0.868874, 0.681317, 0.813757, 0.456762, 0.807098, 0.135874, 0.728657, 0.109546, 0.884686, 0.296474, 0.278131, 0.702295, 0.0818165, 0.154129, 0.459227, 0.863289, 0.185109, 0.549708, 0.103023, 0.321497, 0.840481, 0.967839, 0.971399, 0.195059, 0.876058, 0.0751044, 0.80548, 0.0878057, 0.933578, 0.578939, 0.212132, 0.66614, 0.541055, 0.794425, 0.940204, 0.312221, 0.578872, 0.249227, 0.682941, 0.936471, 0.233682, 0.497073, 0.855049, 0.727995, 0.122327, 0.860892, 0.92598, 0.103458, 0.271832, 0.541797, 0.353715, 0.746916, 0.119324, 0.621795, 0.776785, 0.0376066, 0.606235, 0.98535, 0.540997, 0.403493, 0.516905, 0.242495, 0.317253, 0.44587, 0.427675, 0.451131, 0.825751, 0.432025, 0.917243, 0.142327, 0.793069, 0.247703, 0.171014, 0.15947, 0.772607, 0.232115, 0.514799, 0.483581, 0.577916, 0.665164, 0.959653, 0.898639, 0.862961, 0.414654, 0.131458, 0.171707, 0.221476, 0.927684, 0.943996, 0.914863, 0.970896, 0.446371, 0.938255, 0.666103, 0.0893337, 0.909624, 0.637013, 0.775433, 0.468151, 0.116681, 0.277571, 0.0764407, 0.162423, 0.384658, 0.920455, 0.387538, 0.779293, 0.593171, 0.372044, 0.476376, 0.176086, 0.524137, 0.0438486, 0.595312, 0.321621, 0.401834, 0.750315, 0.357228, 0.526045, 0.44901, 0.662365, 0.476719, 0.322529, 0.516499, 0.708421, 0.735793, 0.0104833, 0.00532161, 0.463197, 0.98893, 0.221206, 0.418423, 0.321729, 0.799445, 0.293395, 0.265432, 0.287705, 0.628185, 0.00118771, 0.25694, 0.0278366, 0.159475, 0.803109, 0.498007, 0.944893, 0.0641895, 0.717842, 0.641131, 0.441682, 0.0740112, 0.0526145, 0.887327, 0.938251, 0.0894169, 0.0215554, 0.197522, 0.752314, 0.639109, 0.268944, 0.189877, 0.250059, 0.973319, 0.00784774, 0.182758, 0.0961477, 0.145629, 0.0574231, 0.289511, 0.427288, 0.59409, 0.809736, 0.593583, 0.826503, 0.0167115, 0.0395654, 0.175921, 0.918072, 0.988553, 0.195865, 0.0493576, 0.256882, 0.0993088, 0.267146, 0.370154, 0.612375, 0.240145, 0.368014, 0.974433, 0.817853, 0.255606, 0.5134, 0.888767, 0.376799, 0.301158, 0.807284, 0.674619, 0.710718, 0.407684, 0.364833, 0.683545, 0.551968, 0.0888487, 0.10451, 0.562518, 0.706546, 0.0930773, 0.0180842, 0.271144, 0.807563, 0.897958, 0.2587, 0.616013, 0.805699, 0.599381, 0.368372, 0.351136, 0.829284, 0.260464, 0.0157136, 0.516772, 0.42577, 0.189366, 0.951162, 0.870767, 0.96263, 0.717807, 0.0699912, 0.409069, 0.258555, 0.44689, 0.952556, 0.650453, 0.0565907, 0.944194, 0.794816, 0.349263, 0.111698, 0.0893996, 0.333501, 0.918001, 0.16194, 0.0164505, 0.899396, 0.361277, 0.556422, 0.99869, 0.182667, 0.442393, 0.996485, 0.710526, 0.292991, 0.498181, 0.609877, 0.486425, 0.313886, 0.949695, 0.989009, 0.863366, 0.155735, 0.0421055, 0.496073, 0.535372, 0.438363, 0.526073, 0.700835, 0.148732, 0.446968, 0.0329374, 0.193337, 0.155741, 0.714287, 0.411356, 0.865726, 0.681642, 0.162871, 0.574011, 0.631729, 0.896857, 0.669892, 0.825005, 0.0815891, 0.563653, 0.476689, 0.251805, 0.972645, 0.197786, 0.981898, 0.207308, 0.901507, 0.0791852, 0.124701, 0.418809, 0.83853, 0.512213, 0.0912239, 0.0111224, 0.300854, 0.897104, 0.849134, 0.489731, 0.0449163, 0.541635, 0.324326, 0.943505, 0.921968, 0.118138, 0.760508, 0.376111, 0.0268056, 0.173483, 0.0219624, 0.382466, 0.869401, 0.52782, 0.270667, 0.00334581, 0.65812, 0.494211, 0.941646, 0.80405, 0.507431, 0.183213, 0.403965, 0.243346, 0.857107, 0.348323, 0.0825182, 0.113305, 0.365823, 0.740004, 0.959118, 0.472712, 0.804029, 0.95695, 0.184744, 0.237458, 0.607636, 0.653327, 0.887677, 0.443347, 0.897552, 0.808931, 0.770809, 0.450934, 0.192356, 0.88196, 0.731589, 0.659181, 0.454563, 0.427396, 0.643558, 0.119666, 0.162486, 0.865478, 0.973537, 0.005524, 0.501481, 0.676537, 0.572447, 0.650847, 0.320487, 0.131539, 0.631316, 0.775465, 0.973387, 0.48632, 0.908495, 0.863751, 0.501378, 0.217811, 0.551988, 0.458329, 0.926665, 0.796882, 0.53953, 0.673495, 0.759102, 0.559385, 0.870888, 0.917354, 0.664628, 0.591642, 0.721897, 0.81984, 0.348603, 0.618429, 0.393732, 0.612639, 0.559268, 0.594818, 0.0240865, 0.919583, 0.951564, 0.912822, 0.834258, 0.227765, 0.858413, 0.558966, 0.99628, 0.210777, 0.190269, 0.489516, 0.886066, 0.797631, 0.721393, 0.648251, 0.123166, 0.292298, 0.342009, 0.978376, 0.340087, 0.705938, 0.340266, 0.735755, 0.386855, 0.924698, 0.364137, 0.909995, 0.0349718, 0.60813, 0.140911, 0.0576143, 0.461955, 0.563707, 0.118206, 0.568721, 0.390357, 0.201702, 0.828934, 0.212367, 0.604297, 0.340181, 0.35123, 0.143424, 0.795792, 0.566785, 0.805546, 0.770922, 0.719113, 0.0990554, 0.522208, 0.634463, 0.874989, 0.48286, 0.604899, 0.245499, 0.742334, 0.144995, 0.511141, 0.273659, 0.832553, 0.761872, 0.187341, 0.0158627, 0.68155, 0.612817, 0.29456, 0.387387, 0.823899, 0.620883, 0.673587, 0.196102, 0.328783, 0.0783226, 0.117904, 0.909313, 0.223046, 0.784899, 0.788061, 0.829547, 0.569972, 0.590199, 0.759869, 0.851828, 0.624583, 0.784306, 0.275525, 0.951656, 0.269522, 0.61056, 0.767513, 0.546942, 0.609999, 0.330411, 0.971002, 0.415065, 0.630046, 0.138175, 0.809245, 0.781137, 0.885986, 0.952628, 0.18252, 0.0135932, 0.925388, 0.802527, 0.711905, 0.459045, 0.469015, 0.454683, 0.803198, 0.738985, 0.589639, 0.729487, 0.0206505, 0.0471438, 0.555607, 0.647299, 0.710894, 0.0232273, 0.473536, 0.992742, 0.138496, 0.551738, 0.395757, 0.0232328, 0.776831, 0.40743, 0.126116, 0.762352, 0.856934, 0.938773, 0.49775, 0.815219, 0.247492, 0.936613, 0.516516, 0.55747, 0.999511, 0.645613, 0.704286, 0.552449, 0.451766, 0.287689, 0.48226, 0.430813, 0.165118, 0.618992, 0.241117, 0.314003, 0.0530261, 0.728283, 0.376294, 0.125774, 0.137133, 0.151684, 0.828655, 0.241104, 0.893395, 0.908325, 0.991044, 0.639374, 0.962975, 0.891643, 0.271515, 0.423306, 0.84826, 0.677847, 0.288955, 0.772833, 0.922881, 0.58682, 0.769327, 0.389095, 0.431119, 0.777751, 0.148845, 0.961312, 0.659001, 0.0589023, 0.141155, 0.19504, 0.000765664, 0.816062, 0.427383, 0.250311, 0.715713, 0.857908, 0.298356, 0.810578, 0.413091, 0.0148862, 0.915761, 0.147426, 0.356771, 0.166736, 0.0924138, 0.400727, 0.262604, 0.740243, 0.543766, 0.941331, 0.298581, 0.045212, 0.998012, 0.270978, 0.364636, 0.573023, 0.443122, 0.835473, 0.155941, 0.104697, 0.907616, 0.99258, 0.798865, 0.26, 0.648707, 0.805339, 0.406667, 0.67507, 0.974242, 0.664198, 0.758657, 0.0178747, 0.738077, 0.241809, 0.659722, 0.440549, 0.282782, 0.306465, 0.0758013, 0.14522, 0.191062, 0.772218, 0.321738, 0.802261, 0.739133, 0.911742, 0.405674, 0.453852, 0.790895, 0.228438, 0.133695, 0.0672329, 0.271262, 0.602318, 0.496663, 0.839782, 0.765948, 0.762234, 0.572786, 0.678214, 0.367012, 0.054096, 0.634219, 0.389622, 0.140702, 0.262828, 0.736748, 0.109723, 0.403525, 0.347585, 0.218126, 0.661328, 0.216073, 0.839315, 0.389476, 0.173967, 0.6853, 0.972736, 0.316608, 0.91018, 0.733169, 0.836239, 0.79083, 0.289316, 0.900352, 0.337066, 0.19927, 0.252872, 0.0403512, 0.334056, 0.0458046, 0.0125463, 0.518202, 0.131157, 0.891323, 0.101934, 0.745387, 0.0230805, 0.840668, 0.509314, 0.817136, 0.846837, 0.179772, 0.110293, 0.863004, 0.0407414, 0.15709, 0.723, 0.749676, 0.599542, 0.539487, 0.475971, 0.903518, 0.562761, 0.176217, 0.237273, 0.183964, 0.532962, 0.593046, 0.408261, 0.17082, 0.281051, 0.722534, 0.821272, 0.287547, 0.273163, 0.736222, 0.270509, 0.289959, 0.992854, 0.502057, 0.599229, 0.132384, 0.629982, 0.995445, 0.437534, 0.366953, 0.374743, 0.170458, 0.602025, 0.467811, 0.144916, 0.938602, 0.0962815, 0.587758, 0.580466, 0.0733088, 0.418199, 0.247033, 0.625023, 0.32498, 0.975208, 0.204406, 0.637679, 0.6115, 0.0876326, 0.603851, 0.447662, 0.64321, 0.673376, 0.964428, 0.0946905, 0.195538, 0.878627, 0.338571, 0.0178136, 0.476829, 0.554766, 0.411815, 0.449492, 0.0729066, 0.969491, 0.359562, 0.494517, 0.484236, 0.919227, 0.600618, 0.625219, 0.794333, 0.805586, 0.666122, 0.965683, 0.528569, 0.891717, 0.140393, 0.79759, 0.577391, 0.763289, 0.20801, 0.177661, 0.371169, 0.548201, 0.846194, 0.732316, 0.986391, 0.0623603, 0.172577, 0.159146, 0.768465, 0.658948, 0.531107, 0.221383, 0.436848, 0.430568, 0.0506848, 0.700965, 0.515319, 0.699194, 0.20634, 0.210227, 0.3904, 0.857165, 0.157405, 0.961468, 0.495033, 0.184921, 0.846398, 0.330724, 0.634038, 0.189147, 0.450485, 0.187222, 0.404023, 0.00236388, 0.931786, 0.135573, 0.602142, 0.618866, 0.252505, 0.54096, 0.852662, 0.739679, 0.097013, 0.480278, 0.783236, 0.765232, 0.190613, 0.394705, 0.132664, 0.997013, 0.99244, 0.254413, 0.434662, 0.0958931, 0.317958, 0.274899, 0.0537962, 0.536602, 0.946362, 0.637584, 0.48559, 0.726569, 0.566276, 0.296169, 0.64028, 0.349356, 0.470348, 0.755729, 0.446221, 0.0269839, 0.0272818, 0.660063, 0.360886, 0.0748171, 0.233837, 0.522727, 0.147242, 0.679103, 0.0294322, 0.0387432, 0.103845, 0.968886, 0.713026, 0.583265, 0.525373, 0.332164, 0.733758, 0.127217, 0.599796, 0.964788, 0.981505, 0.164519, 0.730032, 0.858293, 0.659433, 0.170366, 0.972842, 0.913257, 0.715129, 0.538941, 0.779474, 0.241045, 0.736057, 0.561962, 0.0228246, 0.896983, 0.948562, 0.558583, 0.019025, 0.158188, 0.873232, 0.242444, 0.943358, 0.375036, 0.246231, 0.665553, 0.113559, 0.682651, 0.451092, 0.763624, 0.958767, 0.520576, 0.860918, 0.606418, 0.771554, 0.691394, 0.00944814, 0.654227, 0.630449, 0.997169, 0.102747, 0.927235, 0.765837, 0.641497, 0.467332, 0.759331, 0.607432, 0.104646, 0.813646, 0.736717, 0.538414, 0.660265, 0.562663, 0.787445, 0.863065, 0.685598, 0.701698, 0.799293, 0.882257, 0.596097, 0.838928, 0.720528, 0.723564, 0.443364, 0.773785, 0.157988, 0.492769, 0.500118, 0.229944, 0.268233, 0.704697, 0.21838, 0.563423, 0.521119, 0.187359, 0.703179, 0.539207, 0.18224, 0.308922, 0.597369, 0.576366, 0.525489, 0.940259, 0.413666, 0.654303, 0.379866, 0.152853, 0.318109, 0.169557, 0.301647, 0.948932, 0.198906, 0.563176, 0.808374, 0.794709, 0.281876, 0.51802, 0.504197, 0.979889, 0.134977, 0.0674366, 0.571003, 0.0545859, 0.852502, 0.817411, 0.277611, 0.850023, 0.86635, 0.843949, 0.767062, 0.0243032, 0.832766, 0.103435, 0.766998, 0.0809732, 0.868137, 0.0436018, 0.767215, 0.944113, 0.26165, 0.535751, 0.891189, 0.613507, 0.230717, 0.826793, 0.90983, 0.902705, 0.2274, 0.0591369, 0.0952792, 0.76841, 0.271257, 0.892712, 0.858496, 0.559836, 0.746414, 0.649751, 0.923867, 0.267264, 0.476589, 0.98997, 0.382942, 0.643292, 0.980777, 0.235266, 0.0795356, 0.206256, 0.392724, 0.00524073, 0.726938, 0.22979, 0.175653, 0.370933, 0.754208, 0.518768, 0.0731024, 0.374884, 0.526815, 0.820789, 0.698781, 0.684112, 0.947069, 0.528096, 0.812364, 0.726435, 0.275394, 0.937842, 0.677265, 0.805396, 0.649637, 0.666419, 0.121323, 0.0619148, 0.726193, 0.8854, 0.835788, 0.411723, 0.550008, 0.193728, 0.201855, 0.484289, 0.227465, 0.620363, 0.098864, 0.847027, 0.889017, 0.961642, 0.284766, 0.71569, 0.210706, 0.268288, 0.654382, 0.933237, 0.608494, 0.297602, 0.985109, 0.341269, 0.574067, 0.951014, 0.381361, 0.0390001, 0.132829, 0.400454, 0.623617, 0.391841, 0.147822, 0.559094, 0.753036, 0.176367, 0.468801, 0.916402, 0.505478, 0.0937695, 0.3424, 0.265769, 0.84268, 0.799894, 0.612063, 0.615141, 0.652949, 0.514943, 0.659052, 0.381337, 0.0512221, 0.894979, 0.214731, 0.300755, 0.317488, 0.539459, 0.943168, 0.74588, 0.951298, 0.374936, 0.788679, 0.522414, 0.262568, 0.380122, 0.950226, 0.80637, 0.695311, 0.663997, 0.651776, 0.567625, 0.637554, 0.88283, 0.502462, 0.651799, 0.304513, 0.744262, 0.630791, 0.874436, 0.468431, 0.468395, 0.156895, 0.752278, 0.444929, 0.313451, 0.772185, 0.686227, 0.266354, 0.352239, 0.556416, 0.175538, 0.886954, 0.80055, 0.592855, 0.117733, 0.890135, 0.694178, 0.143462, 0.183786, 0.111361, 0.751949, 0.910558, 0.177295, 0.617773, 0.980424, 0.186026, 0.934027, 0.782383, 0.998444, 0.570814, 0.463394, 0.625836, 0.678522, 0.186423, 0.421975, 0.51969, 0.56891, 0.241427, 0.587216, 0.431899, 0.61606, 0.250613, 0.313678, 0.402734, 0.15466, 0.503842, 0.558041, 0.614553, 0.870482, 0.590434, 0.816333, 0.825288, 0.867169, 0.04316, 0.321845, 0.739151, 0.221427, 0.175579, 0.754401, 0.316374, 0.190673, 0.272849, 0.603555, 0.673068, 0.0751547, 0.268687, 0.216717, 0.867664, 0.468306, 0.817736, 0.375376, 0.747197, 0.262925, 0.884748, 0.481215, 0.531307, 0.40478, 0.727778, 0.232204, 0.0111088, 0.109155, 0.225445, 0.95549, 0.580218, 0.0179345, 0.230458, 0.90268, 0.128021, 0.606634, 0.212615, 0.643174, 0.420191, 0.966698, 0.148805, 0.936902, 0.427286, 0.922583, 0.954008, 0.489383, 0.595573, 0.862088, 0.972187, 0.766681, 0.154849, 0.939441, 0.342115, 0.317699, 0.824605, 0.534231, 0.770833, 0.198245, 0.404023, 0.180527, 0.715574, 0.271331, 0.48273, 0.209219, 0.530061, 0.190801, 0.503903, 0.335641, 0.623987, 0.261913, 0.710136, 0.52045, 0.965192, 0.0952744, 0.121001, 0.031291, 0.641217, 0.498995, 0.98109, 0.553887, 0.115804, 0.0791406, 0.0481144, 0.451669, 0.504631, 0.139138, 0.305004, 0.706337, 0.667417, 0.824515, 0.863433, 0.758306, 0.838828, 0.0628382, 0.386401, 0.444192, 0.772499, 0.59096, 0.143982, 0.455908, 0.797249, 0.754216, 0.89061, 0.0942095, 0.499635, 0.913581, 0.599755, 0.249166, 0.564834, 0.398485, 0.0133317, 0.653655, 0.397257, 0.136592, 0.818902, 0.449319, 0.801893, 0.610453, 0.264237, 0.221726, 0.337326, 0.35534, 0.925313, 0.197837, 0.36576, 0.24177, 0.369551, 0.862368, 0.927855, 0.186939, 0.281315, 0.444484, 0.970839, 0.530399, 0.663522, 0.480791, 0.298823, 0.834414, 0.000362955, 0.394165, 0.076971, 0.0220796, 0.428804, 0.889852, 0.85644, 0.208431, 0.493519, 0.643436, 0.661069, 0.777199, 0.837695, 0.726576, 0.312783, 0.344399, 0.430382, 0.235437, 0.83473, 0.172753, 0.526748, 0.885962, 0.904429, 0.487373, 0.220492, 0.999547, 0.0381287, 0.0554077, 0.266235, 0.690878, 0.0281044, 0.742065, 0.212958, 0.194056, 0.224718, 0.566314, 0.0771658, 0.213819, 0.513215, 0.282513, 0.362161, 0.318223, 0.442504, 0.419344, 0.238926, 0.997876, 0.386875, 0.832378, 0.0568294, 0.950657, 0.294634, 0.594692, 0.936411, 0.143774, 0.912139, 0.0267354, 0.235006, 0.64102, 0.491119, 0.00979097, 0.128689, 0.112191, 0.698623, 0.314074, 0.018156, 0.894285, 0.51748, 0.494384, 0.121172, 0.676978, 0.220353, 0.551305, 0.360828, 0.591426, 0.477987, 0.856079, 0.568853, 0.32561, 0.326201, 0.436064, 0.724033, 0.0356362, 0.790356, 0.458894, 0.329052, 0.613057, 0.922875, 0.980869, 0.634058, 0.0913828, 0.471529, 0.678107, 0.72559, 0.619647, 0.285882, 0.723729, 0.215217, 0.381512, 0.522362, 0.999275, 0.941393, 0.5391, 0.346071, 0.11983, 0.938514, 0.893123, 0.149674, 0.216018, 0.62274, 0.590566, 0.453861, 0.585271, 0.159612, 0.832163, 0.688786, 0.878436, 0.768609, 0.0182591, 0.521624, 0.242915, 0.319346, 0.895387, 0.680631, 0.586943, 0.36178, 0.195096, 0.940498, 0.0713945, 0.784851, 0.452908, 0.014823, 0.380542, 0.769342, 0.334072, 0.542043, 0.073854, 0.831959, 0.192065, 0.772846, 0.0207625, 0.695405, 0.122613, 0.0689288, 0.0774109, 0.402734, 0.918181, 0.492536, 0.943891, 0.396396, 0.338269, 0.174756, 0.858506, 0.706656, 0.232598, 0.902888, 0.0232391, 0.933194, 0.840573, 0.432253, 0.994002, 0.237402, 0.540469, 0.890985, 0.600949, 0.0360909, 0.994874, 0.247599, 0.528897, 0.132192, 0.559757, 0.354476, 0.608939, 0.0500324, 0.221974, 0.725789, 0.916872, 0.975481, 0.47478, 0.476658, 0.718356, 0.557299, 0.507325, 0.638196, 0.371043, 0.169353, 0.94656, 0.00855371, 0.233763, 0.725029, 0.404322, 0.772414, 0.15335, 0.550963, 0.849622, 0.733453, 0.0758419, 0.683996, 0.503317, 0.970196, 0.521742, 0.68888, 0.468305, 0.0719073, 0.678399, 0.290667, 0.75198, 0.0459418, 0.157773, 0.245017, 0.637026, 0.984097, 0.739316, 0.740636, 0.514009, 0.565287, 0.688721, 0.913287, 0.382862, 0.967688, 0.0467683, 0.424222, 0.127091, 0.927243, 0.897798, 0.287813, 0.0502903, 0.686632, 0.683619, 0.814509, 0.0899549, 0.381423, 0.527956, 0.685924, 0.420243, 0.704938, 0.65919, 0.648959, 0.94251, 0.459588, 0.25664, 0.848237, 0.887238, 0.746489, 0.893294, 0.295475, 0.0295005, 0.371808, 0.0415862, 0.544484, 0.14464, 0.793934, 0.782912, 0.0538201, 0.557197, 0.0630537, 0.838322, 0.134248, 0.553618, 0.645032, 0.354804, 0.617168, 0.876732, 0.130592, 0.679223, 0.156673, 0.12631, 0.148912, 0.926549, 0.372323, 0.496342, 0.904119, 0.747807, 0.696363, 0.275942, 0.596396, 0.375462, 0.012028, 0.548492, 0.989382, 0.475792, 0.459052, 0.100197, 0.258228, 0.912339, 0.810651, 0.868541, 0.680623, 0.0184828, 0.128553, 0.0682615, 0.761748, 0.993481, 0.716995, 0.273896, 0.452432, 0.442725, 0.699469, 0.324601, 0.248393, 0.531693, 0.774441, 0.942833, 0.184796, 0.129317, 0.90462, 0.692213, 0.248467, 0.585284, 0.988108, 0.0619278, 0.0377834, 0.696714, 0.932663, 0.302306, 0.480454, 0.239548, 0.267572, 0.34719, 0.0419092, 0.747019, 0.904019, 0.286885, 0.848883, 0.935396, 0.334755, 0.21997, 0.151156, 0.127164, 0.303261, 0.496119, 0.747873, 0.825951, 0.362257, 0.195735, 0.127633, 0.0361607, 0.22829, 0.264407, 0.254855, 0.61719, 0.820758, 0.930943, 0.22506, 0.0200303, 0.792105, 0.652014, 0.966935, 0.57851, 0.712762, 0.388077, 0.60813, 0.624291, 0.928777, 0.870221, 0.784416, 0.53137, 0.819399, 0.849431, 0.971249, 0.373368, 0.52572, 0.934896, 0.184968, 0.796003, 0.930155, 0.336169, 0.694969, 0.305976, 0.513695, 0.873715, 0.923034, 0.116645, 0.320457, 0.112744, 0.395056, 0.483268, 0.403615, 0.246422, 0.112713, 0.955845, 0.530588, 0.378185, 0.203345, 0.49863, 0.981093, 0.593413, 0.856221, 0.647784, 0.34789, 0.538461, 0.176787, 0.760408, 0.681924, 0.610988, 0.707563, 0.299869, 0.734003, 0.1127, 0.0834526, 0.0220461, 0.117956, 0.0304949, 0.320282, 0.515334, 0.132549, 0.405509, 0.761268, 0.180804, 0.585011, 0.675927, 0.933179, 0.583383, 0.181295, 0.858632, 0.61501, 0.115127, 0.160397, 0.286001, 0.0365904, 0.612176, 0.890251, 0.855244, 0.544736, 0.600654, 0.192526, 0.207142, 0.585061, 0.723241, 0.0305854, 0.906201, 0.300921, 0.658964, 0.968552, 0.278554, 0.0310787, 0.717203, 0.248136, 0.630781, 0.270253, 0.289819, 0.734087, 0.720263, 0.171219, 0.899631, 0.564664, 0.0258576, 0.519758, 0.644002, 0.701642, 0.869865, 0.786975, 0.717549, 0.0491178, 0.434058, 0.394163, 0.885417, 0.727599, 0.785278, 0.242689, 0.492116, 0.85453, 0.155595, 0.728303, 0.856938, 0.399704, 0.241284, 0.850065, 0.922071, 0.877973, 0.168211, 0.851038, 0.157012, 0.303181, 0.356058, 0.583792, 0.191925, 0.941222, 0.492563, 0.144931, 0.223785, 0.814394, 0.338385, 0.540184, 0.774187, 0.119114, 0.206181, 0.735758, 0.575481, 0.158476, 0.547546, 0.279155, 0.878953, 0.495945, 0.576014, 0.457666, 0.222405, 0.193601, 0.204385, 0.90942, 0.722909, 0.115831, 0.996332, 0.116538, 0.109814, 0.789295, 0.787046, 0.276958, 0.493615, 0.708681, 0.859758, 0.212426, 0.441307, 0.0215186, 0.382688, 0.349373, 0.830311, 0.364945, 0.78871, 0.092119, 0.411143, 0.180855, 0.643677, 0.274428, 0.928137, 0.301677, 0.238322, 0.631149, 0.876727, 0.617039, 0.819574, 0.320929, 0.566256, 0.091213, 0.720415, 0.451016, 0.841243, 0.0637418, 0.773849, 0.523216, 0.129316, 0.228553, 0.83839, 0.720857, 0.4508, 0.729326, 0.283889, 0.521687, 0.670042, 0.310788, 0.237641, 0.198172, 0.857647, 0.590305, 0.486897, 0.895551, 0.976329, 0.0461186, 0.764466, 0.156225, 0.150065, 0.121629, 0.339321, 0.864672, 0.0475929, 0.181843, 0.930669, 0.749646, 0.413713, 0.758736, 0.265734, 0.811058, 0.83143, 0.148884, 0.814754, 0.00183105, 0.304799, 0.281774, 0.381291, 0.598088, 0.963552, 0.423599, 0.30476, 0.107936, 0.817804, 0.930188, 0.0292939, 0.859812, 0.991556, 0.336604, 0.782314, 0.388782, 0.295328, 0.884533, 0.257462, 0.300516, 0.591447, 0.723904, 0.326247, 0.146774, 0.97647, 0.297485, 0.589497, 0.970214, 0.0410628, 0.921568, 0.62248, 0.339269, 0.495581, 0.0767454, 0.966806, 0.0589158, 0.396406, 0.995582, 0.745146, 0.0589678, 0.0342307, 0.431874, 0.567572, 0.336451, 0.244786, 0.460678, 0.862148, 0.97088, 0.511187, 0.802736, 0.30921, 0.617095, 0.445207, 0.0750825, 0.394839, 0.222186, 0.367263, 0.00337537, 0.627881, 0.134698, 0.665422, 0.879893, 0.622672, 0.0302323, 0.885841, 0.537989, 0.911056, 0.278257, 0.716116, 0.269452, 0.979533, 0.410223, 0.618212, 0.317339, 0.719066, 0.905226, 0.95324, 0.957791, 0.250142, 0.522632, 0.360267, 0.357336, 0.776491, 0.882933, 0.100169, 0.116828, 0.166653, 0.768858, 0.850701, 0.0346909, 0.375351, 0.318612, 0.881048, 0.778074, 0.45394, 0.19884, 0.211993, 0.998809, 0.286914, 0.757266, 0.413899, 0.240068, 0.399836, 0.000750573, 0.158225, 0.279642, 0.761147, 0.492584, 0.684564, 0.0415782, 0.476355, 0.126179, 0.116378, 0.0221162, 0.97848, 0.891277, 0.45055, 0.782095, 0.156758, 0.794473, 0.618912, 0.531278, 0.602697, 0.399455, 0.702711, 0.263113, 0.0445142, 0.753808, 0.37428, 0.223015, 0.836329, 0.606861, 0.735915, 0.142172, 0.587444, 0.767676, 0.170938, 0.590825, 0.152644, 0.675136, 0.514966, 0.869304, 0.124112, 0.770372, 0.892914, 0.0584586, 0.0171729, 0.441062, 0.706832, 0.840641, 0.812824, 0.782498, 0.530312, 0.0831706, 0.606121, 0.082016, 0.988946, 0.872385, 0.134207, 0.35928, 0.712274, 0.225883, 0.218709, 0.332244, 0.338422, 0.117309, 0.372851, 0.463702, 0.45301, 0.0303854, 0.231422, 0.697024, 0.482024, 0.0043619, 0.884546, 0.530352, 0.722058, 0.657565, 0.596428, 0.221456, 0.47345, 0.447847, 0.486759, 0.913181, 0.218909, 0.837963, 0.381696, 0.285305, 0.795781, 0.876721, 0.726653, 0.877756, 0.25639, 0.326533, 0.0298693, 0.242767, 0.284716, 0.0232318, 0.408906, 0.192667, 0.874931, 0.000391752, 0.660105, 0.207311, 0.813546, 0.00368367, 0.867223, 0.68173, 0.451206, 0.20393, 0.130545, 0.513202, 0.373289, 0.04779, 0.591133, 0.360522, 0.828792, 0.204391, 0.261686, 0.840183, 0.753748, 0.950143, 0.54666, 0.394118, 0.625572, 0.318039, 0.074424, 0.168893, 0.760687, 0.81346, 0.447106, 0.127068, 0.881654, 0.533629, 0.0562974, 0.0354889, 0.865537, 0.685353, 0.326312, 0.0881701, 0.868933, 0.603044, 0.645942, 0.644711, 0.657063, 0.480383, 0.246866, 0.183549, 0.0892146, 0.441892, 0.531132, 0.954091, 0.159256, 0.0835811, 0.493946, 0.476517, 0.942527, 0.408721, 0.00653435, 0.0834318, 0.524329, 0.186832, 0.629579, 0.172714, 0.0858283, 0.0689363, 0.34424, 0.782379, 0.0185725, 0.584837, 0.174917, 0.487611, 0.445817, 0.886239, 0.468502, 0.476558, 0.229707, 0.129198, 0.272364, 0.0300387, 0.101355, 0.136276, 0.358289, 0.987152, 0.239242, 0.682611, 0.661904, 0.0805846, 0.14475, 0.775397, 0.910405, 0.1246, 0.222585, 0.058568, 0.072662, 0.85029, 0.368821, 0.360277, 0.569662, 0.465136, 0.295499, 0.757381, 0.561263, 0.152508, 0.579781, 0.762937, 0.865448, 0.315933, 0.0928943, 0.355187, 0.705349, 0.28899, 0.378469, 0.625726, 0.386859, 0.00989368, 0.203887, 0.379569, 0.405408, 0.269942, 0.574519, 0.546921, 0.307467, 0.83233, 0.27854, 0.484269, 0.506514, 0.766033, 0.202428, 0.705517, 0.468779, 0.462951, 0.917114, 0.0420313, 0.63035, 0.690599, 0.0984404, 0.365548, 0.498503, 0.0900206, 0.446141, 0.784607, 0.971757, 0.862731, 0.720257, 0.779566, 0.252187, 0.140158, 0.292969, 0.943034, 0.249158, 0.540571, 0.14069, 0.322399, 0.332654, 0.872873, 0.10853, 0.0938648, 0.195022, 0.108344, 0.647948, 0.0539708, 0.841677, 0.903013, 0.847462, 0.817227, 0.834615, 0.440096, 0.284655, 0.966755, 0.884198, 0.0708221, 0.448811, 0.114418, 0.655087, 0.550545, 0.619192, 0.382552, 0.635639, 0.0903912, 0.80223, 0.885799, 0.994593, 0.636088, 0.130848, 0.320617, 0.751163, 0.374591, 0.494888, 0.744976, 0.348855, 0.350868, 0.705441, 0.742399, 0.336918, 0.180846, 0.0451081, 0.68227, 0.515551, 0.0282521, 0.26814, 0.170132, 0.103779, 0.221597, 0.568927, 0.237043, 0.541676, 0.74259, 0.492674, 0.763025, 0.511421, 0.85459, 0.776193, 0.690783, 0.492501, 0.419218, 0.140563, 0.171486, 0.772961, 0.983848, 0.565914, 0.830099, 0.688199, 0.469671, 0.425257, 0.248831, 0.229074, 0.853564, 0.589752, 0.947709, 0.729819, 0.236991, 0.551146, 0.225102, 0.181392, 0.706441, 0.0775105, 0.0841077, 0.35211, 0.978525, 0.218744, 0.750949, 0.704113, 0.449898, 0.966484, 0.0106887, 0.820635, 0.236401, 0.880151, 0.271564, 0.610231, 0.88292, 0.367338, 0.395313, 0.988624, 0.579237, 0.585322, 0.820054, 0.511555, 0.530661, 0.986911, 0.414232, 0.437382, 0.650508, 0.925384, 0.733178, 0.985954, 0.526044, 0.0999448, 0.223699, 0.661449, 0.275497, 0.595932, 0.0252661, 0.358223, 0.00490222, 0.00723672, 0.450841, 0.913175, 0.664549, 0.09219, 0.813527, 0.874422, 0.87645, 0.261922, 0.181881, 0.41414, 0.879238, 0.32461, 0.315214, 0.464158, 0.237772, 0.500516, 0.0593136, 0.878194, 0.0388181, 0.269684, 0.555233, 0.208229, 0.735903, 0.43728, 0.203055, 0.55765, 0.262298, 0.280388, 0.616353, 0.664842, 0.224197, 0.649632, 0.633621, 0.380696, 0.0245201, 0.483146, 0.221768, 0.441952, 0.959894, 0.0493276, 0.616725, 0.104681, 0.230533, 0.707058, 0.500384, 0.05228, 0.134377, 0.0405846, 0.369997, 0.747358, 0.345489, 0.926711, 0.307617, 0.229975, 0.713876, 0.644125, 0.119532, 0.878276, 0.992438, 0.958023, 0.439826, 0.681482, 0.419516, 0.488463, 0.180604, 0.0648086, 0.753004, 0.881174, 0.0430597, 0.792289, 0.384788, 0.165759, 0.953536, 0.941777, 0.103897, 0.358685, 0.223135, 0.700617, 0.645657, 0.199259, 0.724836, 0.258711, 0.771646, 0.623592, 0.7923, 0.525011, 0.79812, 0.917418, 0.0408786, 0.70516, 0.685949, 0.845311, 0.848584, 0.816048, 0.577728, 0.0632474, 0.0337705, 0.749029, 0.354352, 0.115613, 0.405688, 0.354145, 0.0960672, 0.545181, 0.128382, 0.707834, 0.777129, 0.455415, 0.291398, 0.202387, 0.312735, 0.231164, 0.915044, 0.105688, 0.992714, 0.475474, 0.547683, 0.536471, 0.12417, 0.890503, 0.969704, 0.398818, 0.652417, 0.43562, 0.459029, 0.526732, 0.623382, 0.328677, 0.629836, 0.921082, 0.18711, 0.214384, 0.233311, 0.226147, 0.797971, 0.534456, 0.256337, 0.689548, 0.193202, 0.503527, 0.132612, 0.0113974, 0.561125, 0.951401, 0.642326, 0.713237, 0.257228, 0.287788, 0.0727935, 0.992315, 0.492784, 0.753093, 0.787538, 0.302604, 0.276507, 0.421724, 0.53667, 0.521195, 0.914161, 0.477701, 0.667751, 0.834651, 0.719402, 0.622925, 0.610117, 0.0918504, 0.827973, 0.776474, 0.997031, 0.604986, 0.693104, 0.920856, 0.953291, 0.753704, 0.937652, 0.49416, 0.0343811, 0.210229, 0.718012, 0.00326219, 0.662774, 0.545044, 0.605222, 0.793254, 0.812442, 0.223717, 0.222193, 0.414127, 0.109099, 0.0470771, 0.81452, 0.52753, 0.40668, 0.862339, 0.45624, 0.403171, 0.949618, 0.0508641, 0.107696, 0.548259, 0.703493, 0.77282, 0.897441, 0.960316, 0.375285, 0.672975, 0.0594269, 0.211847, 0.596767, 0.375966, 0.264368, 0.153689, 0.43175, 0.334658, 0.792881, 0.712137, 0.255055, 0.835249, 0.19435, 0.523901, 0.747742, 0.021932, 0.825722, 0.12995, 0.746476, 0.0302897, 0.143445, 0.633426, 0.667043, 0.062095, 0.378326, 0.992086, 0.460239, 0.868868, 0.510366, 0.870358, 0.384342, 0.225636, 0.0825906, 0.0475967, 0.202343, 0.63943, 0.753755, 0.00888219, 0.92597, 0.4676, 0.526284, 0.312375, 0.526216, 0.663695, 0.0312666, 0.102075, 0.0150701, 0.693283, 0.997117, 0.954093, 0.815279, 0.763802, 0.725875, 0.292492, 0.344055, 0.627345, 0.522168, 0.621334, 0.333947, 0.1424, 0.165267, 0.499736, 0.746754, 0.79877, 0.776785, 0.557139, 0.0425215, 0.90292, 0.316732, 0.869703, 0.785483, 0.0158521, 0.94999, 0.484853, 0.980765, 0.216394, 0.536688, 0.882527, 0.29969, 0.474124, 0.660068, 0.978133, 0.562514, 0.922584, 0.106834, 0.561946, 0.898387, 0.53585, 0.520597, 0.411147, 0.0280199, 0.0521744, 0.98895, 0.0385853, 0.0455102, 0.970508, 0.256327, 0.0389597, 0.624331, 0.274006, 0.378284, 0.33111, 0.482736, 0.923735, 0.0209944, 0.931935, 0.435454, 0.174434, 0.346234, 0.317617, 0.850623, 0.278118, 0.927707, 0.513877, 0.381132, 0.44689, 0.930011, 0.607872, 0.928808, 0.299398, 0.203065, 0.120989, 0.316432, 0.383152, 0.842135, 0.704556, 0.118635, 0.422995, 0.000466669, 0.147807, 0.502291, 0.993403, 0.601303, 0.222253, 0.0296877, 0.0284052, 0.87341, 0.233206, 0.0381863, 0.700907, 0.689801, 0.399134, 0.784999, 0.63014, 0.865453, 0.287433, 0.990328, 0.882574, 0.0774038, 0.778073, 0.484293, 0.738884, 0.684839, 0.468305, 0.0653757, 0.997647, 0.0303581, 0.0133819, 0.670839, 0.505136, 0.607782, 0.915422, 0.861656, 0.149417, 0.707365, 0.602442, 0.647925, 0.511412, 0.956392, 0.726319, 0.30591, 0.561806, 0.297597, 0.527965, 0.43694, 0.773655, 0.763751, 0.493458, 0.224437, 0.938869, 0.546626, 0.641176, 0.676299, 0.120913, 0.244841, 0.913733, 0.392493, 0.380461, 0.917425, 0.346866, 0.946269, 0.115818, 0.19961, 0.900466, 0.0815982, 0.91916, 0.461648, 0.865653, 0.983458, 0.810979, 0.762103, 0.384499, 0.572888, 0.156534, 0.281532, 0.862645, 0.278199, 0.337823, 0.407339, 0.193315, 0.384848, 0.986914, 0.819812, 0.828864, 0.658969, 0.424426, 0.0739355, 0.947452, 0.368272, 0.402025, 0.242601, 0.693957, 0.215303, 0.814995, 0.265798, 0.0264865, 0.286466, 0.015661, 0.583045, 0.291179, 0.669322, 0.511267, 0.229816, 0.131331, 0.730827, 0.74252, 0.93343, 0.720619, 0.612438, 0.259147, 0.112357, 0.586765, 0.0858606, 0.153671, 0.773048, 0.822479, 0.443573, 0.55486, 0.910416, 0.85707, 0.0148852, 0.853716, 0.228104, 0.513679, 0.951789, 0.727959, 0.200735, 0.210806, 0.125079, 0.228391, 0.209873, 0.237775, 0.108994, 0.446076, 0.221169, 0.834191, 0.98984, 0.0542308, 0.58488, 0.315972, 0.254531, 0.828329, 0.904021, 0.875414, 0.150553, 0.0298306, 0.215226, 0.586307, 0.417627, 0.479362, 0.172838, 0.5259, 0.908004, 0.0605249, 0.859353, 0.828878, 0.984935, 0.39564, 0.749108, 0.526107, 0.157768, 0.336767, 0.580207, 0.598838, 0.755481, 0.144988, 0.698507, 0.464909, 0.344876, 0.172977, 0.468696, 0.668333, 0.919779, 0.232349, 0.715044, 0.280844, 0.1119, 0.676872, 0.209419, 0.0207841, 0.770692, 0.610173, 0.803793, 0.47074, 0.186878, 0.269158, 0.749201, 0.540574, 0.915717, 0.523814, 0.571266, 0.10382, 0.811718, 0.731922, 0.468545, 0.708574, 0.323435, 0.610058, 0.45889, 0.356482, 0.981413, 0.812081, 0.802843, 0.140016, 0.27479, 0.879972, 0.976442, 0.0214847, 0.606645, 0.452834, 0.33259, 0.259304, 0.324395, 0.22969, 0.0908657, 0.994912, 0.00520821, 0.219624, 0.419453, 0.820987, 0.279137, 0.787163, 0.742862, 0.702888, 0.0585412, 0.17378, 0.803108, 0.967417, 0.554914, 0.676712, 0.801808, 0.203466, 0.917554, 0.547952, 0.110401, 0.566604, 0.000901547, 0.428874, 0.410979, 0.882269, 0.568531, 0.204807, 0.158472, 0.797748, 0.923136, 0.626076, 0.995379, 0.970147, 0.636004, 0.256138, 0.263984, 0.0188201, 0.836662, 0.891073, 0.815278, 0.213562, 0.265101, 0.287762, 0.853247, 0.162469, 0.27606, 0.396637, 0.0501615, 0.519138, 0.364286, 0.0357294, 0.34329, 0.43058, 0.134315, 0.399785, 0.243176, 0.897879, 0.0347835, 0.977129, 0.964701, 0.992374, 0.5317, 0.855217, 0.0626301, 0.720206, 0.157749, 0.331044, 0.119822, 0.361034, 0.657655, 0.734292, 0.487953, 0.878978, 0.348533, 0.361729, 0.655813, 0.495023, 0.407889, 0.922945, 0.903442, 0.38402, 0.512097, 0.429171, 0.506999, 0.895231, 0.182171, 0.618858, 0.88438, 0.296033, 0.481725, 0.858442, 0.493233, 0.151025, 0.16046, 0.180032, 0.441936, 0.211194, 0.7617, 0.867114, 0.199935, 0.743809, 0.637003, 0.808767, 0.561247, 0.776601, 0.43555, 0.188716]}
@@ -256,4 +258,4 @@ input0 = { i86: [0.791841, 0.131271, 0.840444, 0.524133, 0.301398, 0.711596, 8.7
output0 = {i85: [0.605085, 0.0733608, 0.0200205, 0.114568, 0.178815, 0.0580576, 0.123144, 0.232838, 0.0916902, 0.111395, 0.138349]}
Example((input0, output0))
-
+"""
diff --git a/tests/nnapi/specs/V1_0/mobilenet_quantized.mod.py b/tests/nnapi/specs/V1_0/mobilenet_quantized.mod.py
index 3f9684950..3a49f0719 100644
--- a/tests/nnapi/specs/V1_0/mobilenet_quantized.mod.py
+++ b/tests/nnapi/specs/V1_0/mobilenet_quantized.mod.py
@@ -1,3 +1,5 @@
+# Comment out: too large test. we can test mobilenet using frameworktest
+"""
# Passing weights via shared memory
Configuration.use_shm_for_weights = True
model = Model()
@@ -222,40 +224,41 @@ i83 = Parameter("op83", "TENSOR_INT32", "{1001}, 0.000139550233143, 0", [-4628,
i84 = Parameter("op84", "TENSOR_QUANT8_ASYMM", "{1001, 1, 1, 1024}, 0.00593112036586, 66", [65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 65, 65, 65, 66, 66, 65, 65, 65, 66, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 66, 66, 65, 65, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 66, 65, 65, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 66, 65, 65, 66, 65, 65, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 66, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 66, 65, 66, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 65, 66, 65, 65, 66, 65, 65, 66, 66, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 66, 66, 65, 65, 65, 66, 65, 65, 65, 66, 66, 66, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 66, 66, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 66, 65, 65, 65, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 66, 65, 65, 66, 65, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 66, 66, 66, 65, 66, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 66, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 66, 66, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 66, 66, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 66, 65, 65, 66, 66, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 66, 65, 66, 66, 66, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 66, 65, 66, 65, 66, 65, 66, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 66, 66, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 66, 65, 66, 65, 66, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 66, 66, 65, 65, 66, 65, 65, 66, 65, 65, 65, 65, 65, 66, 66, 65, 65, 65, 66, 65, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 66, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 66, 65, 65, 65, 66, 65, 66, 65, 65, 65, 65, 65, 65, 65, 66, 65, 65, 65, 54, 62, 53, 66, 54, 50, 59, 72, 57, 50, 75, 65, 55, 72, 73, 57, 72, 61, 44, 69, 56, 64, 64, 58, 56, 52, 75, 70, 72, 50, 76, 65, 71, 71, 54, 62, 58, 88, 51, 56, 86, 54, 75, 114, 64, 74, 67, 72, 64, 67, 62, 55, 62, 89, 67, 57, 56, 78, 65, 75, 107, 101, 69, 75, 55, 71, 83, 71, 70, 85, 72, 66, 55, 74, 54, 68, 70, 58, 60, 78, 50, 58, 66, 90, 70, 63, 74, 100, 63, 57, 57, 67, 56, 57, 69, 64, 67, 98, 64, 62, 76, 48, 53, 57, 79, 78, 52, 50, 74, 56, 57, 63, 82, 67, 70, 67, 63, 84, 59, 66, 49, 64, 58, 53, 79, 57, 78, 60, 64, 50, 86, 60, 65, 65, 64, 55, 83, 62, 64, 82, 75, 64, 71, 67, 52, 78, 81, 60, 63, 63, 73, 52, 58, 46, 46, 64, 69, 45, 61, 83, 67, 57, 65, 66, 49, 56, 49, 54, 58, 62, 71, 49, 65, 61, 53, 63, 66, 80, 61, 54, 66, 58, 52, 53, 76, 57, 80, 68, 94, 67, 65, 51, 65, 64, 70, 69, 54, 77, 82, 37, 59, 75, 57, 51, 57, 51, 68, 54, 55, 70, 66, 57, 57, 61, 69, 64, 69, 65, 51, 65, 35, 51, 61, 60, 69, 90, 71, 65, 49, 47, 66, 71, 67, 68, 53, 59, 87, 59, 58, 79, 77, 80, 52, 49, 75, 78, 65, 66, 62, 82, 73, 57, 64, 69, 66, 77, 61, 77, 82, 44, 65, 56, 61, 49, 74, 87, 51, 78, 52, 62, 68, 75, 55, 74, 53, 54, 72, 65, 63, 64, 64, 63, 63, 58, 77, 68, 52, 61, 85, 53, 67, 67, 66, 52, 49, 58, 72, 65, 54, 51, 58, 77, 90, 79, 66, 62, 57, 101, 63, 61, 44, 56, 61, 55, 78, 51, 58, 63, 61, 65, 63, 47, 74, 55, 59, 72, 50, 68, 58, 74, 76, 73, 46, 64, 71, 63, 60, 69, 65, 84, 73, 75, 60, 125, 58, 58, 55, 59, 57, 64, 78, 52, 51, 66, 114, 76, 62, 59, 71, 66, 59, 48, 58, 56, 51, 97, 60, 47, 86, 69, 52, 63, 69, 48, 76, 73, 69, 58, 85, 70, 46, 63, 48, 68, 62, 72, 58, 78, 82, 77, 73, 92, 106, 55, 64, 54, 59, 53, 54, 64, 61, 70, 52, 57, 60, 46, 66, 61, 68, 63, 53, 56, 68, 50, 64, 73, 64, 59, 69, 78, 65, 67, 62, 60, 50, 67, 80, 70, 85, 61, 66, 60, 58, 59, 68, 75, 57, 56, 57, 58, 63, 66, 47, 76, 54, 68, 55, 54, 75, 69, 138, 85, 49, 51, 53, 85, 73, 73, 56, 54, 58, 73, 52, 58, 59, 55, 77, 84, 83, 52, 49, 65, 64, 57, 69, 59, 43, 64, 62, 65, 50, 59, 60, 60, 66, 59, 78, 47, 60, 55, 50, 70, 90, 55, 62, 60, 97, 49, 58, 72, 59, 64, 75, 74, 64, 63, 59, 66, 69, 64, 67, 70, 72, 60, 69, 138, 73, 63, 64, 62, 75, 59, 74, 94, 69, 84, 58, 61, 76, 65, 95, 60, 126, 61, 60, 76, 74, 79, 65, 65, 69, 70, 80, 59, 57, 65, 52, 88, 58, 55, 76, 79, 66, 49, 51, 66, 62, 80, 60, 77, 49, 96, 54, 56, 53, 52, 48, 80, 71, 55, 60, 86, 68, 73, 65, 65, 71, 79, 59, 69, 76, 86, 73, 98, 63, 45, 73, 106, 61, 63, 61, 84, 105, 80, 53, 50, 63, 103, 51, 92, 66, 50, 71, 66, 64, 65, 65, 59, 60, 72, 82, 64, 74, 65, 77, 105, 69, 60, 59, 79, 51, 72, 49, 75, 77, 58, 66, 60, 77, 63, 65, 68, 69, 58, 74, 55, 95, 83, 56, 67, 65, 68, 59, 53, 75, 65, 63, 60, 67, 68, 76, 68, 80, 56, 68, 45, 52, 64, 78, 66, 77, 75, 72, 106, 47, 78, 76, 77, 63, 60, 64, 75, 89, 123, 66, 68, 57, 71, 77, 60, 47, 76, 54, 60, 64, 65, 61, 100, 60, 72, 54, 49, 62, 80, 87, 62, 55, 51, 58, 57, 51, 77, 59, 65, 58, 60, 57, 93, 52, 69, 70, 64, 63, 63, 62, 87, 63, 75, 51, 60, 68, 59, 68, 60, 52, 67, 69, 75, 51, 65, 59, 68, 64, 51, 79, 85, 104, 50, 59, 62, 72, 71, 65, 95, 56, 62, 58, 74, 103, 64, 68, 59, 110, 67, 57, 57, 58, 55, 51, 75, 75, 54, 70, 54, 64, 66, 61, 59, 71, 65, 55, 61, 65, 57, 69, 70, 76, 60, 70, 44, 64, 62, 59, 46, 74, 83, 66, 65, 80, 54, 73, 90, 61, 59, 68, 66, 58, 54, 53, 53, 70, 64, 54, 74, 56, 72, 74, 57, 61, 69, 72, 49, 65, 57, 102, 61, 71, 68, 70, 67, 54, 42, 79, 59, 81, 58, 42, 73, 53, 63, 52, 58, 66, 51, 61, 85, 55, 95, 66, 63, 106, 61, 57, 63, 60, 64, 72, 53, 63, 47, 49, 70, 64, 63, 67, 67, 71, 60, 61, 72, 50, 67, 64, 56, 70, 125, 46, 64, 81, 76, 59, 53, 55, 59, 77, 74, 66, 52, 64, 44, 72, 61, 39, 91, 62, 93, 61, 55, 54, 52, 70, 75, 58, 78, 62, 62, 71, 71, 62, 53, 77, 47, 47, 59, 81, 82, 69, 73, 55, 65, 67, 67, 83, 63, 64, 58, 69, 54, 54, 73, 52, 67, 60, 68, 56, 63, 66, 76, 84, 62, 68, 71, 65, 55, 55, 61, 64, 78, 64, 76, 53, 63, 68, 67, 55, 92, 71, 68, 58, 49, 100, 63, 73, 65, 54, 83, 64, 98, 58, 71, 97, 53, 56, 50, 56, 63, 61, 74, 50, 65, 66, 84, 72, 58, 57, 75, 76, 78, 46, 81, 101, 63, 70, 82, 62, 50, 53, 84, 56, 77, 62, 63, 71, 64, 67, 73, 70, 67, 57, 64, 68, 74, 72, 47, 62, 48, 86, 61, 60, 61, 82, 56, 67, 66, 64, 69, 59, 61, 68, 57, 57, 72, 89, 71, 48, 66, 68, 58, 46, 102, 74, 74, 69, 67, 59, 58, 92, 53, 64, 65, 75, 68, 51, 99, 79, 52, 54, 70, 63, 72, 80, 60, 70, 60, 80, 72, 64, 57, 73, 66, 68, 62, 77, 56, 62, 68, 61, 76, 59, 58, 54, 82, 85, 73, 70, 55, 58, 75, 67, 50, 61, 55, 50, 70, 124, 63, 66, 73, 62, 112, 71, 69, 61, 56, 54, 57, 60, 70, 58, 56, 73, 60, 56, 69, 60, 59, 68, 73, 60, 62, 74, 75, 53, 62, 81, 64, 69, 65, 58, 85, 54, 99, 60, 60, 93, 69, 47, 68, 68, 58, 62, 75, 58, 55, 67, 57, 64, 46, 72, 69, 53, 56, 56, 91, 63, 59, 54, 64, 61, 71, 58, 67, 66, 74, 110, 57, 88, 67, 48, 61, 66, 71, 71, 63, 62, 76, 78, 60, 60, 50, 88, 83, 52, 64, 60, 106, 71, 61, 58, 66, 71, 47, 65, 68, 96, 48, 65, 66, 60, 58, 60, 88, 56, 62, 60, 59, 71, 64, 55, 59, 66, 53, 58, 54, 67, 49, 61, 69, 65, 43, 60, 52, 58, 60, 61, 68, 54, 63, 67, 35, 87, 67, 68, 69, 59, 62, 54, 65, 58, 72, 95, 63, 71, 59, 61, 64, 56, 77, 77, 57, 57, 54, 60, 64, 96, 53, 77, 76, 72, 64, 57, 60, 69, 72, 76, 54, 63, 70, 67, 57, 70, 59, 54, 80, 61, 105, 56, 58, 56, 50, 75, 91, 65, 60, 90, 69, 59, 65, 70, 92, 100, 74, 45, 53, 69, 62, 118, 60, 107, 69, 57, 75, 44, 48, 59, 66, 56, 60, 67, 74, 50, 75, 86, 59, 73, 83, 78, 62, 57, 75, 44, 50, 49, 46, 62, 65, 67, 73, 60, 60, 61, 76, 53, 49, 56, 62, 76, 53, 67, 50, 62, 84, 61, 75, 62, 66, 69, 66, 79, 57, 78, 60, 66, 53, 68, 74, 74, 67, 78, 75, 69, 65, 63, 83, 84, 92, 68, 58, 56, 66, 71, 60, 54, 60, 67, 62, 51, 89, 75, 69, 61, 73, 81, 53, 65, 62, 78, 60, 120, 71, 58, 55, 62, 73, 78, 40, 56, 76, 70, 79, 51, 41, 72, 63, 68, 57, 66, 74, 69, 72, 63, 65, 65, 65, 68, 66, 56, 64, 73, 55, 55, 105, 63, 54, 64, 123, 105, 88, 72, 61, 74, 66, 60, 58, 93, 62, 71, 63, 54, 60, 69, 86, 64, 66, 73, 49, 63, 58, 65, 70, 73, 56, 54, 66, 64, 56, 62, 84, 56, 77, 70, 60, 63, 71, 69, 60, 51, 62, 58, 85, 46, 57, 62, 82, 67, 83, 54, 51, 39, 59, 60, 78, 63, 55, 132, 102, 55, 70, 54, 74, 80, 71, 64, 50, 50, 71, 63, 66, 62, 58, 54, 65, 71, 62, 67, 57, 46, 57, 59, 51, 54, 57, 64, 60, 55, 55, 64, 64, 64, 62, 63, 69, 52, 64, 61, 86, 83, 68, 63, 66, 68, 73, 73, 64, 54, 77, 56, 81, 95, 82, 62, 54, 52, 73, 68, 58, 93, 66, 67, 102, 61, 53, 53, 58, 60, 50, 71, 88, 86, 46, 67, 53, 54, 54, 101, 66, 83, 58, 53, 47, 79, 63, 65, 77, 61, 56, 58, 54, 63, 55, 75, 94, 54, 98, 67, 66, 55, 55, 65, 52, 58, 87, 67, 70, 74, 103, 59, 76, 60, 54, 73, 69, 82, 65, 62, 78, 49, 58, 54, 56, 70, 73, 63, 75, 50, 59, 70, 67, 69, 57, 59, 143, 48, 56, 63, 68, 66, 62, 74, 63, 62, 76, 61, 75, 61, 82, 72, 55, 62, 59, 59, 55, 60, 94, 65, 91, 58, 85, 68, 75, 68, 60, 49, 61, 75, 57, 55, 70, 83, 70, 55, 68, 58, 75, 61, 80, 71, 60, 87, 74, 73, 157, 80, 57, 68, 77, 61, 73, 63, 48, 67, 69, 65, 85, 63, 85, 59, 69, 70, 77, 59, 50, 90, 58, 65, 70, 68, 83, 53, 93, 69, 68, 58, 59, 61, 63, 71, 62, 54, 42, 64, 49, 43, 48, 76, 70, 61, 55, 55, 63, 48, 80, 56, 43, 55, 61, 47, 48, 69, 60, 64, 61, 70, 51, 70, 68, 72, 63, 72, 64, 65, 63, 62, 84, 64, 54, 64, 77, 57, 89, 84, 68, 59, 58, 67, 55, 62, 61, 70, 102, 90, 65, 68, 61, 56, 55, 38, 74, 75, 57, 76, 54, 61, 79, 50, 59, 68, 87, 78, 57, 74, 54, 103, 76, 70, 66, 48, 63, 64, 49, 85, 57, 61, 86, 59, 58, 75, 53, 62, 50, 66, 61, 63, 58, 56, 65, 52, 71, 63, 55, 53, 68, 85, 54, 50, 49, 76, 59, 52, 56, 45, 58, 45, 55, 78, 111, 73, 53, 63, 64, 66, 105, 66, 60, 63, 59, 56, 44, 80, 61, 69, 58, 47, 54, 76, 50, 58, 50, 65, 69, 88, 68, 63, 79, 69, 56, 51, 69, 74, 63, 85, 60, 64, 102, 67, 63, 70, 53, 66, 75, 68, 76, 59, 70, 51, 65, 60, 56, 53, 59, 76, 64, 62, 72, 66, 63, 41, 48, 71, 51, 80, 69, 70, 77, 68, 91, 67, 60, 60, 76, 59, 59, 70, 54, 78, 69, 65, 58, 75, 56, 60, 43, 55, 80, 65, 44, 74, 62, 62, 61, 59, 62, 63, 65, 80, 58, 65, 71, 62, 68, 54, 99, 62, 52, 59, 75, 62, 87, 69, 57, 58, 80, 68, 60, 49, 58, 52, 80, 49, 62, 74, 61, 61, 64, 55, 54, 55, 52, 57, 81, 57, 82, 78, 59, 93, 44, 57, 70, 76, 61, 72, 56, 70, 74, 58, 61, 63, 69, 72, 101, 55, 48, 68, 57, 70, 74, 50, 58, 66, 69, 66, 70, 54, 81, 72, 86, 48, 63, 61, 60, 60, 71, 78, 63, 60, 50, 55, 74, 70, 60, 73, 61, 55, 73, 55, 48, 69, 52, 70, 61, 64, 56, 61, 101, 61, 54, 55, 72, 53, 64, 95, 59, 51, 68, 76, 78, 50, 49, 59, 69, 69, 55, 66, 57, 45, 63, 66, 60, 57, 54, 61, 73, 63, 81, 54, 55, 53, 81, 55, 44, 71, 57, 71, 65, 83, 41, 62, 59, 84, 79, 60, 50, 62, 64, 53, 68, 55, 56, 64, 77, 61, 55, 78, 49, 84, 75, 76, 62, 56, 77, 60, 64, 67, 75, 69, 62, 75, 58, 73, 47, 63, 56, 60, 58, 60, 64, 77, 95, 83, 65, 73, 63, 54, 52, 83, 57, 71, 64, 108, 91, 54, 80, 64, 73, 39, 65, 43, 65, 69, 72, 66, 57, 52, 64, 79, 45, 89, 70, 90, 59, 71, 72, 76, 57, 77, 75, 64, 92, 73, 60, 58, 49, 69, 55, 86, 82, 51, 114, 77, 85, 62, 74, 50, 94, 64, 58, 75, 46, 61, 62, 77, 62, 66, 68, 67, 69, 48, 79, 48, 82, 52, 60, 75, 63, 70, 61, 68, 63, 81, 60, 58, 50, 93, 73, 58, 66, 79, 69, 57, 99, 67, 53, 62, 64, 76, 60, 60, 47, 64, 59, 77, 60, 52, 56, 62, 73, 89, 88, 48, 81, 69, 63, 57, 67, 65, 74, 70, 75, 64, 73, 74, 51, 68, 50, 70, 76, 62, 80, 57, 77, 66, 54, 95, 55, 77, 61, 85, 62, 70, 65, 58, 40, 74, 57, 74, 69, 88, 63, 61, 66, 56, 51, 62, 60, 73, 89, 54, 63, 61, 61, 64, 65, 54, 56, 85, 49, 56, 76, 56, 78, 61, 48, 59, 74, 71, 68, 72, 62, 63, 74, 65, 74, 65, 102, 50, 53, 79, 49, 64, 70, 63, 62, 56, 61, 58, 54, 56, 89, 99, 93, 66, 71, 65, 47, 60, 54, 70, 47, 74, 61, 49, 61, 93, 46, 74, 52, 57, 64, 46, 114, 84, 75, 58, 79, 66, 73, 70, 110, 55, 60, 86, 61, 74, 50, 50, 50, 88, 59, 89, 71, 64, 54, 62, 58, 66, 64, 63, 58, 58, 65, 80, 56, 64, 69, 62, 71, 70, 73, 66, 67, 64, 53, 76, 75, 59, 62, 83, 83, 64, 61, 61, 82, 67, 107, 99, 61, 46, 46, 83, 61, 71, 58, 69, 81, 62, 71, 60, 67, 65, 49, 62, 47, 106, 65, 69, 58, 65, 62, 72, 56, 70, 71, 61, 74, 50, 61, 74, 70, 69, 96, 71, 74, 72, 80, 70, 60, 53, 58, 70, 73, 55, 52, 59, 101, 75, 77, 76, 42, 64, 62, 61, 63, 76, 47, 57, 48, 57, 66, 52, 73, 40, 65, 62, 67, 61, 63, 44, 46, 58, 66, 49, 65, 58, 57, 49, 65, 55, 77, 75, 57, 73, 58, 59, 59, 82, 94, 77, 41, 72, 70, 65, 93, 58, 61, 53, 61, 75, 56, 54, 52, 57, 52, 61, 53, 107, 53, 60, 66, 51, 54, 65, 79, 105, 54, 79, 67, 52, 56, 121, 36, 117, 63, 151, 75, 61, 79, 61, 64, 63, 60, 69, 69, 59, 62, 74, 92, 69, 47, 70, 59, 60, 52, 52, 63, 57, 66, 79, 69, 79, 55, 62, 69, 63, 57, 83, 70, 62, 62, 50, 46, 61, 64, 95, 66, 74, 56, 52, 73, 67, 53, 51, 55, 54, 58, 76, 71, 61, 62, 81, 47, 87, 77, 64, 70, 54, 75, 78, 64, 65, 66, 79, 46, 64, 45, 50, 93, 59, 59, 76, 69, 61, 63, 54, 127, 72, 59, 47, 69, 57, 61, 84, 48, 74, 56, 69, 69, 72, 47, 77, 69, 76, 105, 67, 54, 61, 71, 62, 78, 54, 53, 70, 57, 65, 51, 60, 87, 62, 78, 56, 44, 61, 49, 62, 75, 68, 54, 70, 73, 62, 47, 83, 61, 57, 63, 99, 88, 66, 64, 72, 62, 89, 63, 66, 81, 68, 49, 34, 79, 68, 55, 91, 80, 64, 54, 58, 66, 70, 52, 87, 50, 61, 66, 60, 125, 56, 64, 81, 71, 66, 59, 49, 66, 78, 55, 83, 54, 76, 65, 48, 43, 54, 56, 75, 79, 78, 87, 87, 53, 67, 63, 59, 87, 51, 71, 58, 57, 68, 72, 79, 70, 61, 63, 62, 56, 63, 65, 72, 72, 61, 63, 64, 62, 54, 75, 78, 79, 57, 68, 67, 70, 63, 108, 60, 60, 57, 75, 107, 59, 58, 84, 91, 49, 76, 97, 74, 76, 60, 46, 90, 68, 76, 43, 58, 95, 55, 62, 98, 74, 36, 60, 64, 63, 64, 59, 62, 56, 51, 79, 136, 62, 72, 61, 65, 66, 74, 146, 61, 48, 56, 77, 76, 70, 78, 56, 65, 63, 63, 68, 48, 66, 76, 56, 55, 69, 50, 77, 49, 81, 50, 72, 44, 68, 43, 76, 77, 81, 99, 49, 84, 63, 100, 62, 61, 70, 63, 70, 75, 75, 58, 77, 71, 53, 51, 72, 63, 49, 62, 79, 68, 98, 81, 70, 73, 77, 65, 58, 59, 47, 50, 74, 52, 63, 68, 63, 61, 60, 57, 77, 60, 76, 50, 90, 65, 58, 65, 53, 68, 62, 51, 58, 55, 64, 64, 56, 73, 54, 65, 58, 50, 61, 84, 46, 60, 63, 129, 69, 80, 72, 45, 75, 65, 65, 66, 83, 53, 65, 62, 57, 55, 58, 64, 63, 58, 44, 68, 57, 82, 63, 71, 53, 41, 50, 61, 80, 59, 51, 48, 65, 42, 47, 78, 71, 44, 56, 67, 90, 70, 55, 49, 80, 65, 69, 91, 59, 60, 69, 67, 44, 74, 69, 59, 58, 57, 58, 62, 83, 65, 56, 56, 54, 70, 73, 75, 57, 69, 64, 79, 83, 111, 51, 76, 53, 64, 54, 107, 76, 79, 56, 62, 48, 72, 74, 69, 48, 47, 55, 73, 63, 68, 62, 85, 77, 55, 59, 54, 47, 70, 64, 45, 75, 66, 44, 48, 50, 83, 78, 68, 67, 77, 60, 59, 62, 51, 75, 41, 93, 65, 49, 63, 60, 64, 89, 62, 51, 92, 70, 53, 71, 61, 59, 61, 40, 53, 67, 54, 78, 71, 61, 62, 56, 58, 56, 50, 75, 57, 54, 60, 73, 70, 53, 74, 69, 41, 70, 64, 59, 66, 68, 62, 63, 72, 63, 62, 51, 73, 52, 57, 81, 67, 50, 49, 51, 57, 62, 80, 57, 77, 85, 54, 49, 58, 74, 64, 52, 51, 56, 53, 86, 65, 56, 64, 80, 63, 75, 61, 37, 64, 97, 65, 76, 57, 71, 55, 62, 64, 58, 71, 64, 64, 60, 58, 54, 58, 72, 64, 62, 57, 56, 59, 68, 65, 60, 49, 80, 58, 66, 68, 62, 68, 68, 67, 64, 55, 54, 57, 51, 60, 50, 81, 66, 51, 59, 62, 67, 55, 65, 70, 58, 63, 71, 60, 54, 53, 69, 86, 78, 57, 63, 68, 59, 55, 65, 80, 85, 60, 62, 59, 96, 63, 48, 63, 50, 56, 64, 72, 64, 64, 37, 54, 60, 82, 70, 65, 56, 69, 66, 66, 47, 62, 60, 67, 68, 63, 47, 58, 55, 87, 63, 67, 63, 65, 80, 74, 87, 58, 65, 58, 72, 48, 83, 56, 58, 73, 68, 55, 64, 79, 52, 67, 58, 56, 92, 59, 66, 50, 49, 78, 74, 77, 71, 62, 49, 80, 58, 55, 73, 75, 65, 64, 55, 81, 64, 82, 57, 67, 56, 61, 76, 75, 78, 70, 67, 69, 58, 60, 78, 62, 61, 80, 70, 79, 68, 61, 82, 61, 66, 69, 74, 60, 91, 54, 64, 52, 65, 77, 71, 64, 78, 62, 77, 69, 94, 76, 77, 51, 60, 100, 62, 46, 75, 81, 58, 70, 68, 60, 69, 78, 65, 76, 58, 106, 55, 64, 62, 56, 65, 62, 64, 54, 69, 68, 77, 56, 66, 63, 67, 57, 66, 86, 61, 54, 78, 73, 69, 56, 57, 60, 65, 65, 46, 91, 57, 86, 71, 61, 58, 98, 66, 53, 73, 64, 58, 80, 69, 58, 89, 72, 42, 71, 43, 81, 66, 73, 69, 73, 78, 62, 93, 71, 68, 64, 68, 72, 110, 78, 61, 106, 61, 66, 51, 56, 57, 74, 95, 57, 60, 70, 88, 57, 67, 63, 78, 52, 75, 54, 60, 76, 54, 67, 67, 101, 65, 75, 45, 51, 64, 57, 73, 91, 83, 65, 60, 63, 48, 82, 71, 73, 67, 62, 89, 90, 68, 65, 107, 63, 65, 68, 69, 65, 69, 61, 57, 65, 65, 69, 81, 60, 78, 58, 82, 56, 60, 64, 69, 51, 58, 74, 88, 52, 64, 43, 61, 64, 64, 58, 71, 67, 67, 59, 67, 71, 78, 86, 57, 69, 99, 86, 72, 64, 73, 72, 52, 51, 67, 57, 55, 73, 59, 75, 61, 46, 74, 45, 57, 67, 54, 66, 73, 82, 65, 80, 65, 59, 49, 65, 72, 58, 53, 65, 65, 61, 71, 68, 43, 78, 66, 58, 67, 52, 61, 56, 45, 62, 64, 64, 66, 64, 62, 76, 57, 53, 59, 70, 55, 64, 60, 61, 54, 82, 110, 81, 72, 71, 60, 80, 56, 65, 57, 78, 53, 73, 66, 78, 62, 63, 57, 59, 62, 62, 62, 74, 46, 61, 66, 58, 77, 70, 44, 79, 68, 67, 89, 113, 92, 135, 54, 128, 62, 68, 79, 62, 61, 73, 74, 53, 53, 60, 63, 48, 62, 53, 70, 60, 57, 56, 53, 62, 50, 67, 51, 92, 51, 96, 79, 66, 68, 63, 64, 80, 72, 64, 63, 57, 61, 65, 53, 48, 78, 66, 50, 56, 66, 69, 64, 48, 76, 92, 70, 66, 51, 56, 81, 70, 53, 58, 64, 64, 107, 73, 59, 79, 71, 73, 69, 71, 61, 86, 59, 62, 60, 63, 69, 74, 57, 45, 59, 62, 130, 55, 56, 60, 49, 53, 59, 79, 54, 68, 79, 55, 49, 66, 65, 54, 56, 90, 99, 52, 61, 39, 54, 62, 64, 64, 71, 59, 80, 62, 73, 60, 88, 56, 61, 67, 52, 79, 69, 71, 57, 55, 78, 58, 55, 69, 63, 99, 81, 64, 64, 88, 78, 66, 63, 60, 73, 76, 54, 58, 78, 75, 67, 72, 89, 61, 60, 60, 57, 57, 60, 61, 87, 53, 54, 61, 50, 62, 51, 82, 133, 78, 51, 69, 65, 54, 59, 57, 70, 84, 68, 56, 71, 52, 73, 73, 83, 41, 57, 72, 84, 64, 46, 72, 63, 73, 58, 75, 58, 66, 62, 59, 50, 65, 59, 57, 65, 64, 60, 55, 56, 93, 70, 59, 81, 60, 76, 73, 72, 47, 102, 62, 88, 53, 52, 74, 67, 41, 115, 60, 73, 66, 99, 78, 65, 72, 66, 65, 55, 57, 52, 65, 68, 57, 58, 87, 62, 88, 65, 61, 69, 66, 50, 60, 65, 71, 78, 73, 69, 54, 67, 51, 57, 57, 93, 100, 66, 75, 70, 50, 64, 69, 139, 51, 65, 65, 48, 56, 68, 52, 73, 79, 67, 62, 54, 77, 61, 54, 74, 45, 65, 85, 84, 70, 67, 55, 64, 84, 68, 52, 56, 47, 58, 81, 81, 52, 60, 79, 79, 63, 78, 73, 76, 59, 114, 68, 61, 75, 64, 82, 75, 61, 59, 58, 60, 56, 86, 64, 70, 81, 58, 67, 51, 75, 57, 70, 55, 70, 55, 62, 85, 52, 57, 55, 82, 66, 41, 79, 76, 92, 43, 60, 59, 51, 63, 57, 45, 82, 70, 84, 51, 67, 61, 61, 64, 76, 69, 77, 72, 58, 67, 124, 55, 55, 55, 55, 91, 69, 67, 52, 58, 56, 69, 58, 76, 62, 73, 60, 60, 66, 55, 63, 89, 76, 43, 72, 65, 73, 70, 64, 55, 89, 69, 59, 30, 62, 49, 66, 74, 69, 53, 60, 50, 53, 67, 81, 66, 67, 62, 56, 70, 76, 61, 64, 82, 65, 80, 50, 69, 61, 61, 52, 88, 53, 63, 58, 66, 65, 74, 68, 52, 61, 66, 56, 78, 120, 60, 69, 57, 60, 47, 122, 61, 49, 58, 66, 61, 42, 71, 81, 68, 54, 51, 78, 71, 61, 56, 84, 69, 69, 67, 64, 70, 57, 68, 51, 51, 65, 59, 81, 73, 94, 92, 66, 65, 78, 66, 49, 56, 59, 61, 82, 68, 87, 44, 75, 67, 70, 51, 63, 73, 48, 61, 62, 90, 64, 66, 50, 74, 72, 55, 59, 75, 57, 58, 72, 56, 58, 64, 48, 66, 59, 55, 45, 42, 66, 84, 62, 53, 51, 58, 65, 63, 52, 73, 83, 60, 65, 64, 64, 57, 75, 68, 67, 66, 60, 92, 72, 60, 77, 47, 76, 65, 59, 80, 73, 78, 59, 58, 56, 62, 91, 62, 57, 69, 59, 66, 67, 59, 54, 47, 68, 69, 57, 64, 67, 55, 54, 62, 50, 69, 70, 65, 59, 81, 54, 55, 67, 52, 54, 56, 62, 79, 71, 62, 47, 67, 77, 53, 65, 58, 63, 56, 51, 65, 61, 70, 70, 53, 59, 76, 68, 71, 70, 58, 62, 66, 72, 85, 103, 64, 83, 54, 72, 70, 53, 66, 64, 54, 53, 65, 60, 64, 67, 70, 70, 66, 61, 66, 64, 71, 65, 55, 51, 71, 63, 64, 68, 52, 74, 47, 49, 73, 64, 83, 50, 54, 74, 67, 66, 54, 67, 61, 73, 67, 75, 42, 52, 66, 67, 69, 69, 57, 79, 68, 69, 48, 67, 62, 59, 69, 60, 71, 50, 60, 55, 60, 68, 70, 55, 59, 57, 50, 79, 68, 77, 64, 55, 99, 56, 66, 44, 71, 70, 75, 59, 67, 52, 75, 63, 62, 81, 58, 98, 60, 51, 65, 66, 55, 54, 51, 62, 71, 61, 51, 68, 86, 63, 58, 66, 57, 62, 72, 73, 71, 48, 78, 85, 55, 46, 53, 68, 56, 63, 62, 56, 63, 62, 66, 67, 58, 65, 60, 64, 88, 58, 115, 78, 54, 46, 65, 62, 56, 73, 67, 60, 65, 128, 46, 51, 63, 85, 58, 54, 67, 65, 67, 65, 50, 94, 72, 63, 49, 46, 56, 61, 59, 62, 52, 79, 69, 67, 52, 65, 62, 84, 131, 66, 73, 61, 74, 71, 69, 53, 65, 61, 67, 69, 60, 65, 81, 62, 69, 84, 51, 70, 64, 80, 72, 66, 94, 61, 58, 74, 64, 56, 78, 72, 88, 84, 51, 57, 48, 54, 83, 72, 40, 65, 58, 49, 80, 59, 60, 112, 62, 45, 60, 58, 65, 47, 57, 65, 53, 61, 66, 55, 54, 61, 85, 55, 60, 79, 56, 59, 59, 46, 57, 83, 59, 59, 84, 60, 52, 59, 59, 85, 76, 63, 82, 54, 49, 72, 64, 51, 45, 73, 66, 56, 85, 38, 89, 53, 63, 79, 69, 56, 64, 46, 55, 81, 54, 67, 87, 65, 40, 57, 70, 61, 56, 69, 78, 56, 72, 69, 83, 63, 48, 82, 71, 65, 82, 56, 66, 58, 76, 72, 71, 72, 82, 88, 60, 52, 81, 69, 81, 55, 53, 62, 58, 83, 62, 61, 64, 63, 66, 72, 62, 74, 89, 56, 61, 48, 65, 53, 72, 90, 64, 64, 47, 68, 59, 77, 66, 65, 72, 74, 72, 50, 72, 47, 36, 63, 61, 71, 66, 68, 60, 57, 91, 69, 98, 75, 59, 63, 61, 64, 66, 69, 64, 74, 68, 66, 61, 59, 54, 70, 102, 53, 65, 58, 77, 73, 69, 67, 69, 50, 52, 85, 63, 52, 66, 61, 59, 54, 96, 61, 67, 72, 65, 92, 85, 73, 70, 54, 60, 77, 68, 87, 63, 111, 63, 116, 61, 110, 65, 56, 63, 67, 57, 96, 53, 81, 50, 50, 36, 66, 33, 68, 70, 54, 70, 56, 76, 75, 83, 48, 56, 81, 46, 87, 59, 69, 74, 75, 61, 60, 57, 92, 57, 55, 50, 53, 75, 70, 57, 76, 55, 57, 66, 76, 52, 76, 59, 48, 82, 53, 68, 67, 69, 75, 39, 60, 52, 55, 94, 65, 63, 77, 57, 51, 61, 84, 61, 68, 60, 64, 81, 65, 89, 39, 60, 62, 69, 71, 154, 47, 69, 68, 59, 54, 71, 73, 62, 65, 63, 42, 52, 67, 48, 96, 80, 61, 49, 64, 56, 57, 58, 50, 93, 61, 68, 60, 94, 76, 74, 57, 41, 48, 55, 83, 59, 74, 60, 55, 62, 57, 59, 49, 87, 63, 61, 58, 59, 51, 101, 94, 62, 54, 61, 77, 72, 68, 68, 58, 74, 52, 54, 56, 76, 53, 64, 81, 42, 76, 61, 76, 50, 64, 68, 72, 63, 66, 87, 62, 114, 70, 63, 89, 69, 56, 66, 48, 63, 74, 75, 67, 53, 60, 62, 79, 83, 69, 56, 81, 46, 59, 60, 73, 78, 52, 95, 44, 70, 58, 60, 55, 44, 63, 54, 70, 91, 54, 82, 57, 59, 76, 59, 62, 65, 53, 53, 82, 52, 73, 96, 59, 67, 64, 44, 67, 63, 50, 105, 84, 69, 61, 78, 72, 59, 59, 57, 56, 65, 49, 68, 56, 67, 50, 49, 55, 69, 57, 132, 61, 61, 64, 89, 56, 63, 57, 69, 39, 87, 51, 65, 51, 65, 69, 57, 89, 64, 70, 62, 69, 79, 69, 138, 91, 80, 73, 97, 72, 72, 59, 64, 95, 91, 51, 55, 46, 83, 55, 53, 76, 60, 105, 64, 64, 48, 62, 66, 64, 64, 59, 73, 67, 82, 109, 74, 88, 56, 99, 60, 87, 66, 79, 74, 79, 77, 69, 68, 49, 67, 60, 88, 77, 74, 77, 72, 77, 59, 45, 58, 89, 74, 62, 68, 81, 70, 68, 60, 53, 62, 73, 61, 46, 60, 54, 75, 51, 65, 65, 62, 85, 57, 70, 57, 71, 66, 78, 55, 50, 79, 91, 70, 68, 58, 50, 60, 57, 76, 76, 48, 63, 51, 115, 62, 89, 76, 83, 69, 59, 91, 54, 52, 84, 42, 51, 106, 65, 76, 39, 69, 67, 60, 63, 63, 75, 54, 80, 66, 66, 57, 53, 81, 41, 76, 70, 72, 83, 71, 66, 71, 83, 76, 65, 48, 55, 54, 51, 76, 42, 65, 67, 89, 55, 76, 82, 66, 59, 68, 57, 83, 65, 84, 80, 48, 65, 58, 59, 70, 67, 59, 68, 65, 48, 54, 88, 59, 78, 49, 87, 64, 59, 54, 107, 67, 78, 57, 51, 61, 82, 57, 78, 74, 66, 61, 64, 57, 62, 60, 54, 49, 62, 66, 51, 61, 86, 66, 72, 54, 47, 46, 52, 65, 48, 98, 51, 52, 68, 67, 60, 57, 53, 93, 56, 42, 54, 50, 88, 69, 74, 65, 66, 65, 67, 53, 57, 89, 58, 72, 64, 94, 77, 52, 55, 56, 89, 56, 53, 67, 50, 59, 37, 65, 54, 52, 54, 65, 104, 56, 58, 51, 61, 52, 75, 43, 64, 104, 61, 55, 75, 60, 74, 56, 55, 84, 59, 76, 73, 86, 59, 63, 56, 67, 54, 62, 33, 87, 63, 73, 76, 64, 59, 67, 55, 54, 58, 64, 58, 65, 52, 62, 53, 61, 62, 66, 78, 61, 61, 58, 94, 61, 71, 67, 50, 61, 75, 67, 76, 49, 52, 74, 67, 86, 58, 81, 60, 66, 74, 106, 65, 83, 52, 44, 64, 61, 55, 75, 64, 63, 43, 59, 82, 64, 52, 51, 70, 57, 79, 72, 68, 52, 52, 68, 36, 113, 62, 58, 72, 71, 53, 76, 63, 68, 77, 81, 65, 55, 57, 52, 60, 83, 57, 72, 65, 74, 74, 66, 81, 90, 55, 49, 63, 54, 56, 73, 70, 67, 39, 51, 44, 68, 148, 57, 59, 51, 68, 69, 51, 77, 58, 92, 65, 69, 65, 57, 59, 50, 64, 97, 73, 51, 83, 69, 66, 55, 61, 78, 49, 66, 56, 62, 68, 68, 66, 68, 79, 71, 72, 53, 74, 104, 45, 72, 66, 82, 94, 105, 64, 71, 68, 61, 80, 69, 69, 78, 87, 63, 59, 61, 86, 65, 54, 73, 70, 59, 70, 72, 59, 61, 60, 62, 76, 57, 56, 71, 65, 80, 60, 51, 73, 60, 53, 102, 74, 60, 62, 62, 53, 56, 64, 60, 69, 53, 54, 58, 60, 82, 65, 87, 66, 93, 82, 59, 59, 76, 59, 96, 70, 56, 44, 69, 56, 79, 73, 72, 55, 81, 55, 91, 67, 40, 51, 57, 35, 60, 81, 62, 66, 46, 67, 52, 80, 51, 67, 67, 59, 82, 83, 41, 62, 63, 72, 55, 76, 71, 68, 45, 61, 63, 60, 68, 58, 74, 57, 63, 75, 60, 60, 93, 63, 66, 67, 66, 65, 69, 87, 62, 54, 72, 56, 75, 54, 64, 48, 53, 56, 105, 56, 56, 49, 74, 59, 41, 56, 86, 116, 69, 75, 62, 63, 62, 93, 56, 79, 68, 63, 85, 77, 67, 61, 93, 70, 56, 90, 62, 88, 78, 48, 64, 66, 51, 71, 64, 60, 69, 56, 63, 85, 66, 72, 76, 64, 90, 76, 74, 55, 56, 71, 32, 62, 64, 55, 86, 59, 59, 84, 67, 67, 55, 77, 57, 65, 59, 61, 60, 61, 68, 62, 70, 63, 46, 63, 67, 60, 65, 62, 71, 57, 75, 69, 60, 61, 82, 53, 88, 51, 54, 71, 64, 63, 51, 78, 47, 66, 68, 55, 44, 69, 59, 78, 51, 75, 83, 53, 76, 70, 50, 52, 46, 60, 90, 66, 53, 55, 74, 91, 84, 79, 74, 69, 68, 100, 81, 56, 60, 69, 131, 92, 50, 72, 93, 59, 78, 108, 62, 59, 68, 60, 51, 61, 72, 66, 58, 55, 60, 50, 50, 65, 61, 73, 74, 65, 66, 82, 50, 93, 65, 84, 98, 48, 55, 59, 60, 55, 64, 67, 97, 52, 79, 63, 46, 57, 55, 65, 56, 57, 73, 46, 58, 89, 66, 57, 72, 55, 88, 37, 47, 57, 60, 99, 74, 92, 99, 59, 54, 103, 70, 69, 73, 69, 95, 45, 50, 80, 48, 71, 88, 70, 86, 101, 84, 47, 64, 64, 61, 59, 71, 52, 76, 85, 77, 87, 64, 51, 58, 63, 62, 50, 72, 82, 58, 81, 78, 61, 62, 55, 63, 66, 52, 61, 50, 58, 69, 48, 74, 59, 71, 45, 43, 47, 64, 69, 80, 58, 65, 64, 83, 71, 37, 62, 62, 68, 89, 91, 64, 60, 64, 73, 55, 75, 67, 41, 52, 60, 50, 61, 123, 72, 51, 65, 56, 39, 48, 53, 68, 60, 62, 67, 66, 60, 60, 78, 67, 58, 48, 68, 64, 119, 73, 57, 69, 53, 123, 58, 90, 76, 47, 77, 88, 58, 62, 63, 86, 69, 56, 47, 77, 69, 62, 52, 47, 64, 73, 66, 56, 64, 49, 50, 56, 64, 50, 65, 79, 55, 75, 55, 67, 66, 57, 72, 48, 57, 93, 47, 58, 52, 64, 115, 74, 48, 69, 61, 52, 63, 83, 68, 82, 78, 58, 70, 24, 69, 46, 76, 63, 95, 51, 74, 58, 65, 67, 57, 76, 57, 55, 58, 57, 86, 70, 60, 58, 69, 52, 76, 62, 78, 52, 82, 70, 74, 57, 64, 55, 55, 70, 70, 53, 66, 65, 53, 56, 92, 63, 55, 56, 56, 99, 86, 77, 45, 80, 66, 65, 65, 90, 48, 61, 60, 47, 63, 68, 67, 73, 58, 79, 89, 57, 70, 75, 51, 89, 69, 79, 83, 79, 64, 57, 71, 57, 53, 60, 64, 65, 52, 71, 87, 80, 86, 55, 91, 82, 53, 74, 75, 70, 67, 67, 50, 140, 68, 77, 65, 50, 75, 75, 59, 48, 55, 60, 106, 86, 46, 58, 61, 50, 63, 63, 91, 48, 94, 56, 52, 61, 49, 72, 62, 54, 71, 66, 85, 61, 57, 56, 77, 55, 51, 97, 45, 79, 45, 69, 60, 64, 66, 69, 83, 63, 49, 67, 94, 69, 51, 80, 51, 53, 51, 83, 104, 48, 76, 60, 64, 54, 60, 60, 91, 57, 50, 52, 97, 53, 55, 65, 80, 62, 77, 72, 50, 54, 113, 56, 74, 62, 56, 80, 63, 67, 56, 54, 57, 54, 55, 64, 62, 80, 77, 46, 78, 68, 88, 58, 53, 95, 75, 65, 80, 75, 68, 62, 75, 78, 89, 57, 77, 61, 71, 75, 57, 49, 71, 71, 61, 75, 48, 68, 50, 62, 69, 63, 77, 59, 58, 52, 115, 71, 89, 61, 72, 77, 77, 69, 64, 34, 58, 58, 44, 61, 49, 70, 67, 56, 85, 82, 59, 73, 48, 85, 66, 84, 59, 56, 53, 65, 65, 61, 60, 65, 71, 85, 72, 73, 78, 52, 66, 67, 62, 62, 77, 71, 62, 60, 60, 49, 74, 56, 55, 63, 59, 62, 70, 61, 64, 60, 90, 64, 62, 62, 63, 90, 70, 51, 54, 51, 52, 74, 59, 61, 69, 71, 44, 52, 61, 86, 48, 56, 48, 65, 64, 61, 57, 73, 71, 56, 49, 77, 65, 60, 68, 65, 76, 62, 70, 70, 124, 93, 61, 41, 68, 71, 78, 70, 68, 53, 49, 81, 67, 60, 66, 62, 69, 75, 102, 91, 67, 68, 47, 64, 89, 67, 42, 67, 81, 66, 61, 71, 72, 71, 62, 69, 55, 60, 57, 47, 97, 77, 75, 49, 67, 53, 84, 63, 68, 68, 68, 56, 62, 65, 74, 54, 50, 54, 52, 52, 61, 59, 58, 62, 70, 88, 70, 71, 53, 71, 86, 69, 68, 71, 76, 62, 81, 75, 73, 53, 61, 58, 63, 61, 43, 66, 50, 77, 53, 97, 57, 66, 49, 68, 71, 80, 56, 76, 70, 83, 53, 85, 58, 64, 65, 70, 57, 65, 56, 41, 61, 59, 118, 51, 63, 81, 67, 51, 51, 76, 58, 57, 40, 56, 70, 69, 68, 74, 82, 59, 71, 61, 82, 89, 52, 69, 69, 66, 82, 51, 66, 66, 73, 54, 55, 83, 100, 64, 64, 69, 54, 85, 156, 60, 65, 45, 65, 57, 56, 68, 58, 87, 66, 66, 67, 63, 79, 45, 47, 82, 65, 59, 51, 80, 62, 62, 66, 71, 60, 90, 58, 60, 64, 59, 44, 120, 78, 62, 61, 49, 48, 88, 47, 69, 59, 46, 55, 98, 89, 72, 67, 61, 55, 55, 67, 70, 79, 55, 66, 62, 93, 75, 45, 61, 50, 55, 90, 56, 49, 43, 59, 77, 61, 52, 67, 85, 70, 67, 62, 63, 75, 73, 81, 94, 77, 49, 39, 44, 63, 75, 67, 75, 57, 53, 51, 59, 71, 57, 80, 78, 70, 61, 106, 62, 57, 59, 60, 52, 52, 59, 61, 85, 69, 77, 78, 53, 54, 75, 56, 93, 67, 53, 66, 57, 61, 66, 70, 61, 65, 62, 70, 70, 58, 55, 66, 57, 57, 62, 77, 48, 59, 62, 85, 58, 59, 79, 77, 45, 61, 67, 60, 60, 83, 51, 86, 83, 67, 56, 55, 46, 60, 72, 86, 62, 55, 54, 82, 76, 54, 75, 47, 78, 45, 57, 43, 75, 60, 130, 66, 52, 73, 69, 83, 63, 60, 75, 169, 46, 82, 52, 48, 70, 73, 74, 76, 65, 72, 81, 75, 48, 60, 62, 80, 78, 60, 61, 75, 57, 45, 67, 62, 62, 66, 64, 58, 60, 58, 50, 63, 48, 63, 72, 59, 46, 82, 59, 80, 44, 57, 54, 51, 81, 73, 55, 70, 56, 66, 76, 93, 55, 94, 60, 53, 69, 58, 66, 72, 49, 75, 75, 64, 52, 44, 53, 54, 61, 61, 58, 68, 64, 75, 60, 79, 74, 59, 65, 89, 67, 68, 48, 65, 61, 55, 64, 71, 95, 81, 62, 53, 57, 45, 57, 68, 52, 54, 63, 67, 80, 67, 76, 79, 92, 57, 49, 43, 80, 54, 77, 85, 75, 57, 55, 76, 48, 63, 48, 56, 102, 120, 63, 47, 80, 57, 59, 56, 54, 59, 59, 48, 47, 51, 59, 67, 65, 65, 82, 44, 56, 55, 74, 52, 63, 58, 56, 78, 65, 78, 54, 87, 63, 52, 58, 52, 52, 61, 63, 45, 56, 64, 67, 79, 57, 62, 51, 77, 45, 58, 68, 59, 65, 108, 38, 51, 89, 53, 68, 62, 66, 58, 56, 98, 81, 74, 86, 66, 71, 80, 66, 47, 61, 70, 81, 65, 82, 77, 42, 63, 64, 51, 60, 75, 60, 64, 74, 57, 57, 68, 81, 66, 64, 62, 70, 89, 69, 68, 58, 59, 59, 52, 72, 83, 64, 73, 77, 45, 52, 54, 52, 93, 62, 80, 62, 68, 73, 55, 50, 52, 68, 61, 64, 53, 67, 83, 79, 53, 49, 54, 75, 70, 71, 79, 66, 56, 67, 84, 89, 82, 53, 62, 47, 50, 71, 34, 66, 53, 60, 71, 161, 59, 67, 75, 47, 63, 51, 53, 92, 48, 64, 62, 61, 60, 77, 77, 56, 48, 75, 72, 55, 96, 80, 56, 83, 75, 86, 47, 90, 62, 66, 69, 56, 74, 51, 66, 48, 61, 57, 53, 67, 63, 55, 61, 73, 70, 66, 94, 83, 74, 62, 68, 69, 56, 70, 65, 66, 54, 77, 61, 74, 35, 73, 57, 48, 63, 57, 43, 67, 49, 64, 88, 100, 59, 72, 89, 70, 64, 69, 71, 84, 63, 58, 119, 51, 50, 58, 52, 63, 70, 60, 58, 56, 62, 83, 63, 68, 76, 62, 66, 56, 94, 54, 67, 55, 53, 45, 74, 51, 52, 60, 68, 55, 54, 61, 87, 54, 67, 53, 73, 56, 76, 65, 49, 62, 70, 75, 60, 52, 65, 69, 83, 79, 72, 45, 59, 80, 70, 104, 67, 74, 66, 67, 59, 78, 65, 79, 69, 48, 64, 63, 55, 52, 58, 51, 57, 86, 79, 104, 50, 69, 66, 70, 76, 51, 61, 77, 51, 65, 59, 67, 89, 60, 69, 79, 47, 57, 53, 52, 63, 72, 105, 112, 77, 88, 55, 56, 60, 65, 82, 53, 55, 67, 47, 47, 56, 47, 58, 72, 77, 63, 90, 68, 104, 57, 76, 60, 51, 82, 60, 66, 91, 78, 85, 61, 67, 58, 55, 51, 72, 106, 42, 72, 65, 66, 78, 74, 80, 64, 80, 68, 55, 60, 106, 73, 60, 86, 77, 70, 49, 86, 83, 61, 63, 50, 46, 58, 63, 55, 64, 87, 74, 57, 81, 55, 72, 66, 72, 67, 51, 62, 66, 46, 74, 48, 60, 56, 54, 62, 49, 54, 56, 59, 71, 54, 37, 62, 55, 54, 73, 57, 78, 67, 61, 72, 53, 89, 62, 79, 54, 59, 60, 52, 66, 52, 52, 48, 50, 72, 60, 80, 59, 61, 85, 60, 64, 95, 74, 60, 57, 57, 68, 59, 72, 58, 79, 75, 54, 50, 91, 52, 66, 57, 79, 39, 63, 62, 54, 49, 60, 47, 50, 83, 61, 57, 72, 88, 56, 70, 62, 60, 53, 60, 104, 57, 53, 93, 48, 63, 72, 77, 69, 57, 65, 65, 77, 72, 114, 95, 45, 73, 69, 96, 58, 67, 62, 61, 63, 74, 61, 84, 60, 56, 56, 81, 61, 72, 116, 59, 69, 79, 65, 62, 58, 40, 72, 54, 56, 68, 47, 62, 74, 53, 58, 61, 73, 88, 57, 53, 55, 63, 80, 62, 49, 57, 47, 58, 63, 81, 62, 68, 52, 61, 69, 94, 45, 44, 94, 130, 77, 68, 57, 58, 72, 57, 86, 47, 67, 85, 51, 76, 64, 57, 66, 80, 101, 98, 76, 72, 63, 88, 76, 100, 53, 63, 40, 55, 54, 53, 54, 76, 58, 73, 56, 84, 75, 54, 87, 53, 79, 60, 59, 51, 68, 73, 46, 79, 77, 85, 58, 55, 57, 56, 56, 78, 76, 51, 60, 68, 71, 61, 57, 73, 62, 39, 79, 76, 79, 50, 48, 90, 64, 93, 98, 90, 70, 53, 54, 46, 52, 58, 73, 69, 61, 67, 74, 55, 66, 72, 70, 81, 73, 64, 88, 48, 64, 59, 51, 59, 63, 66, 54, 65, 74, 79, 138, 82, 78, 75, 66, 67, 63, 63, 69, 65, 74, 60, 56, 57, 63, 57, 66, 60, 87, 67, 62, 74, 59, 57, 54, 63, 47, 61, 98, 65, 74, 67, 62, 63, 67, 34, 53, 82, 80, 93, 76, 79, 57, 59, 61, 41, 77, 114, 60, 63, 78, 74, 65, 60, 76, 44, 59, 48, 56, 51, 65, 55, 69, 47, 79, 61, 71, 60, 65, 58, 59, 73, 55, 73, 62, 61, 72, 63, 90, 67, 48, 50, 66, 65, 55, 78, 70, 81, 51, 54, 73, 74, 84, 69, 71, 66, 83, 70, 76, 77, 65, 59, 63, 38, 57, 71, 71, 61, 76, 48, 66, 47, 41, 73, 58, 69, 60, 52, 112, 53, 69, 65, 52, 72, 59, 45, 45, 64, 69, 50, 83, 80, 43, 59, 65, 67, 63, 59, 63, 62, 58, 58, 56, 73, 69, 80, 61, 100, 73, 65, 58, 70, 101, 82, 50, 79, 59, 66, 61, 67, 78, 60, 55, 56, 76, 59, 76, 60, 65, 70, 59, 71, 64, 76, 70, 51, 57, 61, 52, 74, 65, 56, 66, 65, 64, 64, 83, 58, 73, 71, 52, 62, 58, 81, 65, 56, 49, 65, 51, 107, 76, 64, 62, 78, 69, 68, 55, 58, 69, 61, 89, 127, 56, 52, 55, 79, 62, 57, 71, 56, 94, 53, 93, 50, 50, 93, 49, 78, 59, 50, 63, 70, 52, 67, 64, 77, 70, 57, 75, 76, 98, 59, 61, 61, 61, 71, 68, 46, 49, 65, 52, 47, 35, 47, 80, 48, 60, 57, 57, 75, 66, 61, 74, 79, 44, 70, 68, 55, 66, 61, 50, 73, 74, 94, 44, 65, 62, 51, 59, 63, 68, 45, 60, 62, 73, 76, 58, 71, 57, 62, 65, 97, 64, 59, 64, 66, 66, 120, 63, 71, 63, 59, 40, 83, 80, 59, 56, 73, 62, 68, 64, 64, 64, 75, 64, 61, 64, 50, 61, 70, 54, 65, 63, 71, 53, 92, 61, 55, 63, 78, 56, 51, 61, 68, 73, 56, 58, 66, 72, 98, 89, 65, 54, 57, 53, 72, 66, 63, 90, 74, 61, 62, 52, 69, 82, 75, 153, 64, 53, 51, 74, 61, 50, 56, 83, 73, 77, 84, 59, 76, 63, 85, 57, 66, 74, 51, 77, 83, 60, 87, 73, 56, 59, 51, 66, 66, 80, 60, 62, 68, 55, 56, 68, 53, 58, 84, 97, 97, 69, 67, 77, 94, 76, 82, 81, 57, 94, 55, 61, 67, 75, 60, 52, 73, 80, 77, 50, 54, 54, 47, 69, 69, 64, 69, 66, 62, 74, 98, 78, 77, 49, 54, 53, 62, 70, 109, 55, 68, 68, 61, 67, 52, 74, 68, 59, 67, 77, 95, 50, 76, 57, 64, 74, 54, 62, 60, 50, 68, 65, 52, 48, 72, 77, 55, 59, 64, 68, 53, 84, 58, 63, 57, 56, 54, 62, 60, 73, 46, 71, 65, 76, 67, 58, 56, 71, 53, 95, 92, 53, 53, 64, 43, 70, 61, 82, 62, 57, 53, 53, 71, 54, 60, 69, 66, 54, 74, 71, 61, 91, 69, 53, 63, 56, 51, 77, 83, 59, 47, 53, 80, 68, 56, 70, 58, 71, 92, 46, 81, 78, 70, 73, 47, 86, 108, 60, 54, 89, 47, 62, 55, 57, 53, 71, 60, 63, 59, 67, 65, 72, 60, 67, 65, 73, 48, 75, 61, 60, 76, 79, 60, 81, 64, 53, 59, 61, 79, 78, 57, 63, 47, 81, 64, 52, 69, 59, 62, 64, 52, 68, 58, 45, 66, 56, 59, 54, 63, 109, 65, 51, 58, 68, 60, 69, 56, 52, 83, 66, 50, 71, 51, 59, 68, 67, 62, 60, 48, 82, 58, 61, 51, 65, 57, 71, 71, 62, 85, 53, 61, 64, 62, 69, 66, 68, 75, 73, 66, 54, 83, 79, 69, 61, 79, 59, 90, 55, 105, 60, 59, 42, 61, 72, 78, 52, 67, 78, 89, 60, 84, 61, 50, 52, 56, 82, 68, 71, 80, 63, 69, 58, 67, 59, 76, 58, 53, 64, 94, 76, 52, 95, 61, 68, 69, 70, 56, 71, 83, 73, 55, 76, 62, 74, 64, 88, 84, 67, 55, 85, 49, 37, 111, 60, 57, 57, 81, 59, 66, 67, 79, 71, 63, 66, 54, 80, 75, 68, 95, 77, 118, 62, 62, 73, 76, 59, 62, 60, 82, 44, 65, 61, 61, 50, 75, 54, 60, 73, 75, 75, 66, 49, 57, 59, 75, 52, 55, 78, 63, 68, 63, 54, 56, 71, 67, 58, 79, 63, 66, 59, 53, 66, 70, 72, 59, 71, 69, 72, 55, 67, 58, 60, 63, 68, 80, 61, 56, 59, 78, 56, 68, 54, 48, 67, 72, 58, 52, 53, 65, 65, 71, 106, 43, 68, 51, 77, 78, 63, 100, 62, 59, 55, 80, 73, 62, 80, 55, 61, 59, 72, 80, 50, 63, 67, 100, 79, 48, 76, 46, 67, 80, 85, 75, 66, 62, 54, 119, 70, 65, 72, 62, 54, 74, 53, 77, 41, 63, 49, 84, 64, 59, 45, 53, 47, 66, 64, 75, 57, 57, 68, 66, 59, 64, 69, 44, 53, 77, 58, 64, 55, 69, 77, 81, 70, 57, 57, 53, 91, 61, 89, 47, 71, 55, 55, 76, 61, 53, 60, 56, 98, 61, 47, 63, 56, 67, 54, 80, 75, 61, 57, 79, 86, 87, 68, 84, 62, 49, 60, 65, 56, 55, 60, 70, 84, 72, 51, 73, 52, 76, 76, 70, 74, 69, 69, 64, 95, 78, 57, 58, 78, 77, 81, 78, 83, 67, 66, 62, 85, 67, 51, 92, 81, 81, 66, 52, 71, 58, 93, 54, 61, 67, 69, 62, 63, 86, 40, 62, 56, 79, 51, 94, 60, 66, 56, 93, 61, 67, 41, 66, 63, 67, 63, 82, 46, 73, 46, 95, 64, 72, 75, 63, 57, 88, 79, 82, 110, 80, 60, 77, 50, 62, 56, 64, 67, 49, 76, 93, 60, 59, 71, 52, 52, 60, 59, 78, 60, 46, 69, 76, 76, 56, 59, 102, 69, 75, 85, 92, 66, 68, 57, 76, 69, 72, 69, 58, 54, 59, 72, 52, 53, 93, 72, 74, 60, 55, 51, 48, 38, 75, 55, 48, 63, 56, 64, 60, 72, 94, 144, 88, 82, 71, 57, 77, 70, 38, 63, 64, 59, 77, 73, 57, 71, 65, 53, 45, 71, 49, 88, 66, 66, 62, 56, 62, 49, 64, 106, 73, 92, 70, 51, 55, 59, 63, 60, 90, 54, 108, 64, 63, 55, 51, 64, 58, 67, 89, 83, 60, 54, 63, 72, 80, 84, 66, 55, 61, 54, 62, 91, 73, 62, 75, 74, 84, 61, 66, 71, 45, 85, 59, 60, 68, 63, 60, 61, 73, 74, 89, 50, 72, 46, 48, 62, 74, 79, 77, 67, 72, 60, 84, 82, 64, 63, 63, 57, 85, 67, 55, 59, 76, 44, 63, 61, 55, 61, 59, 75, 64, 65, 60, 68, 72, 62, 72, 55, 54, 74, 65, 75, 63, 58, 64, 72, 67, 65, 63, 41, 56, 55, 69, 67, 63, 67, 62, 60, 56, 54, 60, 49, 59, 63, 61, 65, 84, 63, 54, 103, 55, 71, 90, 70, 58, 54, 101, 60, 47, 53, 61, 72, 60, 66, 55, 86, 63, 80, 52, 53, 47, 62, 61, 80, 73, 68, 54, 58, 64, 80, 60, 75, 59, 55, 69, 87, 69, 73, 82, 70, 73, 50, 69, 54, 48, 75, 59, 68, 73, 67, 67, 75, 73, 47, 64, 53, 66, 61, 50, 63, 64, 49, 112, 63, 78, 72, 52, 59, 78, 62, 75, 77, 52, 126, 67, 62, 68, 60, 71, 65, 57, 67, 64, 54, 76, 53, 51, 59, 60, 67, 88, 97, 58, 51, 72, 49, 59, 53, 50, 49, 68, 52, 67, 56, 56, 57, 58, 57, 75, 67, 60, 68, 68, 58, 84, 66, 58, 72, 58, 51, 59, 59, 65, 76, 71, 61, 65, 54, 64, 59, 48, 88, 64, 62, 64, 62, 92, 52, 68, 64, 64, 67, 71, 53, 60, 58, 66, 62, 106, 70, 88, 54, 47, 65, 72, 71, 63, 82, 63, 75, 54, 97, 69, 79, 63, 63, 71, 60, 52, 68, 56, 75, 55, 74, 80, 57, 75, 70, 75, 72, 48, 58, 59, 54, 69, 81, 59, 70, 62, 72, 118, 85, 65, 58, 60, 37, 60, 63, 63, 57, 62, 73, 66, 58, 66, 53, 39, 148, 53, 59, 65, 74, 43, 87, 84, 81, 82, 54, 59, 59, 69, 69, 58, 61, 67, 54, 44, 55, 82, 84, 83, 72, 61, 71, 69, 50, 58, 83, 54, 70, 71, 60, 57, 65, 65, 66, 71, 86, 78, 54, 73, 38, 108, 60, 89, 80, 67, 58, 60, 56, 44, 70, 84, 72, 63, 69, 75, 63, 68, 63, 60, 70, 60, 65, 60, 62, 90, 62, 86, 62, 96, 56, 68, 75, 69, 78, 111, 88, 61, 63, 65, 52, 70, 96, 71, 46, 64, 60, 69, 69, 65, 63, 85, 58, 68, 74, 52, 68, 69, 60, 79, 47, 72, 44, 61, 55, 74, 66, 55, 71, 73, 49, 46, 55, 48, 61, 38, 68, 55, 58, 69, 74, 72, 55, 74, 51, 64, 79, 78, 48, 64, 65, 57, 79, 67, 56, 71, 56, 42, 69, 73, 53, 56, 70, 51, 54, 84, 47, 51, 90, 63, 58, 67, 65, 84, 72, 78, 70, 59, 59, 51, 78, 58, 81, 47, 70, 84, 88, 66, 66, 54, 74, 64, 83, 136, 98, 45, 78, 52, 76, 72, 64, 46, 62, 71, 76, 68, 58, 62, 71, 54, 59, 63, 94, 51, 60, 72, 68, 74, 63, 61, 87, 67, 59, 88, 63, 65, 79, 70, 60, 60, 80, 53, 44, 84, 65, 76, 61, 48, 53, 66, 67, 65, 56, 78, 63, 49, 99, 64, 62, 89, 75, 77, 66, 60, 53, 89, 76, 67, 56, 72, 67, 83, 38, 61, 61, 83, 80, 62, 81, 62, 71, 101, 64, 82, 72, 66, 71, 49, 91, 51, 79, 58, 77, 78, 80, 78, 70, 70, 62, 52, 76, 66, 55, 66, 49, 106, 71, 72, 57, 47, 58, 79, 61, 77, 72, 101, 63, 85, 83, 59, 58, 50, 58, 67, 64, 76, 64, 60, 85, 80, 62, 71, 69, 56, 67, 87, 53, 63, 100, 65, 71, 62, 83, 60, 67, 51, 57, 55, 61, 52, 63, 58, 62, 82, 66, 74, 70, 57, 63, 92, 54, 69, 68, 60, 62, 61, 56, 93, 58, 59, 60, 58, 86, 63, 55, 84, 56, 89, 50, 75, 70, 75, 77, 61, 69, 89, 77, 61, 59, 57, 64, 83, 54, 79, 73, 83, 48, 76, 47, 63, 43, 66, 65, 69, 59, 58, 59, 59, 56, 74, 80, 66, 55, 81, 58, 68, 65, 57, 58, 88, 37, 76, 59, 55, 76, 68, 88, 53, 68, 79, 68, 57, 71, 63, 57, 73, 72, 58, 61, 74, 63, 59, 67, 50, 58, 70, 56, 63, 110, 74, 56, 59, 60, 74, 55, 91, 60, 56, 59, 86, 79, 59, 88, 52, 68, 58, 56, 84, 57, 72, 52, 112, 59, 64, 61, 60, 74, 70, 77, 75, 66, 73, 62, 113, 71, 62, 54, 78, 47, 57, 47, 79, 56, 88, 50, 83, 39, 83, 58, 46, 49, 62, 51, 80, 77, 50, 79, 59, 61, 68, 72, 66, 56, 98, 73, 58, 60, 63, 79, 58, 63, 51, 83, 69, 79, 69, 55, 53, 46, 61, 54, 63, 83, 67, 74, 66, 98, 72, 66, 49, 48, 74, 56, 59, 63, 56, 84, 65, 75, 70, 65, 81, 44, 56, 58, 71, 57, 59, 65, 54, 68, 67, 50, 62, 57, 74, 48, 83, 47, 54, 62, 64, 74, 81, 51, 67, 66, 79, 68, 72, 56, 47, 74, 75, 92, 82, 77, 55, 47, 57, 61, 52, 67, 61, 85, 49, 43, 57, 70, 60, 40, 82, 66, 54, 42, 79, 71, 100, 63, 63, 69, 81, 81, 53, 61, 68, 48, 64, 62, 90, 52, 62, 67, 69, 64, 61, 80, 76, 58, 92, 43, 73, 83, 72, 84, 64, 67, 46, 57, 85, 57, 66, 70, 89, 82, 64, 76, 79, 89, 59, 60, 53, 76, 58, 48, 54, 65, 81, 46, 53, 56, 148, 58, 59, 55, 61, 57, 65, 60, 58, 64, 73, 68, 69, 82, 73, 60, 53, 70, 87, 64, 69, 64, 54, 68, 54, 49, 64, 54, 46, 68, 59, 62, 63, 88, 56, 64, 58, 67, 53, 70, 49, 55, 58, 63, 51, 69, 59, 62, 85, 66, 57, 60, 92, 89, 58, 75, 84, 55, 47, 50, 52, 64, 69, 75, 80, 73, 60, 64, 72, 61, 65, 77, 102, 64, 51, 71, 69, 70, 68, 67, 70, 82, 62, 71, 62, 61, 61, 61, 77, 65, 59, 64, 62, 57, 59, 64, 54, 67, 60, 77, 70, 55, 50, 60, 62, 81, 62, 66, 65, 57, 62, 71, 57, 53, 64, 49, 49, 63, 84, 57, 71, 53, 54, 77, 60, 102, 54, 40, 59, 62, 68, 67, 66, 58, 66, 59, 58, 65, 54, 74, 59, 55, 56, 50, 64, 59, 80, 52, 56, 54, 68, 89, 57, 95, 71, 59, 58, 80, 58, 56, 79, 81, 64, 68, 77, 71, 83, 61, 59, 59, 107, 67, 96, 71, 70, 74, 70, 60, 80, 83, 61, 96, 84, 70, 53, 96, 72, 63, 72, 58, 58, 62, 64, 53, 77, 67, 61, 42, 60, 107, 56, 89, 79, 54, 55, 54, 64, 84, 60, 70, 77, 88, 49, 55, 73, 52, 46, 75, 76, 95, 72, 57, 64, 51, 63, 57, 52, 64, 74, 54, 65, 52, 116, 67, 57, 62, 60, 60, 69, 61, 59, 50, 69, 77, 57, 69, 74, 40, 63, 71, 56, 66, 69, 62, 72, 102, 93, 54, 58, 50, 65, 83, 54, 102, 66, 76, 76, 71, 58, 67, 57, 49, 73, 98, 58, 63, 62, 62, 63, 71, 71, 58, 63, 88, 61, 70, 63, 61, 60, 59, 83, 56, 58, 61, 84, 77, 43, 56, 63, 91, 67, 59, 54, 60, 60, 60, 76, 51, 76, 56, 108, 65, 66, 72, 55, 55, 55, 58, 59, 52, 65, 68, 60, 60, 67, 96, 56, 60, 71, 51, 78, 100, 41, 66, 68, 68, 67, 64, 69, 64, 72, 64, 50, 64, 101, 68, 57, 50, 83, 62, 63, 62, 71, 84, 74, 69, 62, 97, 69, 54, 95, 62, 50, 55, 44, 54, 58, 47, 64, 68, 64, 57, 69, 60, 77, 55, 60, 96, 59, 90, 66, 61, 55, 85, 59, 66, 63, 60, 73, 72, 62, 60, 58, 55, 52, 49, 42, 59, 52, 51, 57, 66, 53, 50, 91, 76, 33, 62, 60, 58, 120, 66, 78, 73, 65, 76, 68, 57, 98, 80, 71, 58, 61, 69, 62, 78, 58, 78, 79, 76, 99, 88, 54, 84, 73, 73, 52, 71, 55, 75, 71, 60, 42, 44, 61, 73, 74, 59, 70, 59, 59, 54, 64, 53, 55, 63, 74, 70, 54, 99, 51, 63, 46, 57, 64, 55, 66, 84, 97, 56, 59, 43, 56, 52, 57, 58, 65, 92, 38, 73, 74, 61, 76, 56, 49, 83, 76, 73, 66, 40, 81, 93, 61, 64, 91, 102, 78, 97, 82, 73, 58, 48, 70, 60, 105, 47, 53, 64, 79, 85, 72, 72, 91, 69, 69, 67, 59, 65, 59, 71, 75, 51, 65, 50, 56, 59, 78, 69, 56, 64, 54, 68, 87, 58, 67, 65, 60, 61, 66, 64, 100, 51, 55, 76, 67, 60, 73, 85, 46, 59, 59, 95, 78, 105, 55, 60, 61, 70, 87, 105, 79, 62, 59, 66, 44, 67, 115, 76, 54, 60, 72, 79, 73, 74, 61, 69, 68, 49, 72, 59, 62, 53, 62, 60, 59, 56, 66, 44, 60, 68, 65, 58, 55, 57, 39, 85, 75, 62, 71, 50, 61, 64, 62, 69, 69, 60, 56, 70, 59, 97, 49, 62, 66, 67, 52, 102, 67, 60, 44, 91, 76, 43, 63, 63, 77, 54, 45, 74, 62, 89, 63, 64, 85, 75, 46, 59, 54, 72, 63, 67, 81, 102, 78, 61, 98, 72, 64, 46, 72, 65, 60, 60, 51, 71, 57, 60, 60, 53, 70, 46, 62, 111, 49, 69, 73, 61, 58, 61, 53, 65, 61, 100, 60, 69, 109, 90, 71, 88, 40, 59, 49, 75, 52, 62, 59, 63, 69, 73, 71, 65, 61, 65, 57, 56, 64, 94, 63, 64, 56, 93, 73, 48, 63, 59, 69, 85, 58, 61, 69, 54, 67, 61, 48, 55, 67, 60, 85, 65, 49, 61, 63, 67, 66, 65, 79, 54, 62, 69, 54, 58, 71, 54, 51, 57, 57, 65, 79, 54, 77, 84, 61, 64, 81, 67, 68, 58, 89, 81, 45, 60, 58, 66, 92, 47, 81, 60, 67, 92, 73, 66, 73, 51, 72, 75, 39, 63, 69, 55, 75, 54, 61, 80, 72, 92, 75, 53, 82, 55, 58, 40, 61, 56, 89, 34, 60, 57, 47, 79, 63, 69, 72, 62, 82, 60, 59, 76, 62, 97, 59, 76, 59, 71, 63, 64, 75, 73, 67, 54, 55, 45, 88, 52, 45, 50, 62, 72, 74, 71, 68, 69, 50, 43, 59, 78, 100, 99, 86, 64, 54, 81, 68, 106, 58, 65, 68, 65, 55, 58, 52, 69, 55, 74, 64, 61, 61, 58, 58, 68, 49, 76, 91, 65, 70, 61, 56, 56, 50, 61, 50, 59, 52, 52, 55, 46, 67, 62, 70, 76, 57, 68, 92, 54, 63, 67, 63, 62, 62, 58, 64, 49, 60, 62, 53, 79, 55, 61, 55, 80, 58, 65, 75, 103, 85, 51, 58, 52, 108, 64, 55, 77, 78, 63, 101, 86, 74, 64, 78, 58, 49, 59, 79, 89, 58, 58, 49, 57, 57, 51, 54, 55, 67, 60, 82, 50, 52, 51, 77, 69, 65, 69, 76, 65, 51, 60, 70, 48, 59, 57, 64, 77, 53, 54, 84, 82, 53, 72, 82, 72, 50, 68, 61, 66, 56, 63, 63, 42, 56, 115, 60, 71, 46, 69, 73, 62, 70, 68, 69, 54, 57, 58, 74, 78, 61, 51, 71, 63, 46, 52, 68, 52, 101, 80, 74, 64, 69, 60, 56, 92, 69, 57, 53, 50, 44, 57, 49, 68, 55, 49, 84, 56, 78, 68, 103, 50, 59, 95, 65, 92, 67, 72, 86, 80, 89, 58, 61, 47, 67, 59, 56, 61, 70, 59, 56, 79, 62, 47, 52, 79, 76, 53, 66, 96, 73, 61, 61, 66, 60, 59, 75, 59, 55, 63, 70, 62, 62, 67, 91, 56, 56, 61, 57, 61, 73, 55, 70, 65, 40, 68, 73, 60, 70, 59, 81, 68, 67, 62, 69, 66, 60, 59, 70, 69, 51, 66, 68, 63, 56, 58, 65, 55, 79, 55, 57, 57, 58, 74, 77, 61, 61, 57, 57, 49, 60, 62, 63, 66, 46, 82, 73, 62, 45, 68, 65, 65, 74, 62, 54, 59, 61, 108, 66, 113, 70, 58, 61, 69, 64, 65, 87, 72, 65, 64, 61, 55, 65, 61, 88, 92, 62, 69, 69, 63, 72, 66, 80, 57, 70, 54, 97, 59, 72, 69, 74, 57, 47, 54, 67, 76, 60, 55, 57, 59, 56, 52, 91, 60, 59, 70, 71, 61, 51, 64, 66, 52, 51, 91, 61, 112, 62, 88, 53, 61, 59, 54, 62, 72, 56, 82, 72, 52, 51, 83, 47, 58, 52, 70, 61, 45, 122, 60, 61, 63, 65, 71, 70, 58, 69, 57, 70, 60, 53, 67, 48, 61, 60, 78, 71, 73, 74, 57, 58, 58, 79, 54, 61, 79, 84, 51, 68, 88, 67, 69, 48, 59, 81, 63, 60, 52, 61, 128, 70, 56, 63, 61, 65, 117, 64, 63, 67, 95, 98, 78, 59, 60, 58, 69, 79, 55, 59, 51, 46, 56, 62, 69, 122, 58, 44, 52, 62, 55, 62, 60, 71, 67, 54, 62, 56, 100, 80, 64, 64, 85, 61, 57, 66, 65, 88, 64, 60, 49, 57, 55, 71, 82, 63, 46, 73, 62, 49, 68, 61, 60, 66, 65, 57, 61, 50, 43, 67, 59, 63, 53, 65, 76, 65, 64, 70, 44, 85, 51, 58, 60, 60, 96, 68, 46, 74, 57, 73, 59, 61, 48, 61, 79, 64, 73, 64, 68, 72, 71, 68, 60, 52, 63, 64, 62, 64, 65, 73, 49, 46, 67, 81, 56, 66, 56, 95, 65, 54, 52, 95, 51, 65, 79, 72, 60, 64, 102, 59, 65, 72, 70, 61, 71, 56, 68, 79, 65, 58, 109, 57, 120, 82, 58, 54, 69, 54, 66, 59, 69, 68, 61, 51, 47, 74, 83, 51, 82, 50, 60, 61, 72, 76, 67, 65, 52, 59, 59, 65, 44, 48, 52, 70, 68, 60, 74, 55, 57, 98, 65, 60, 75, 89, 65, 60, 53, 58, 87, 76, 73, 50, 83, 57, 55, 61, 87, 70, 63, 54, 62, 76, 68, 90, 63, 72, 102, 58, 60, 118, 58, 58, 45, 48, 56, 84, 62, 60, 69, 54, 59, 62, 69, 71, 60, 79, 76, 59, 50, 75, 63, 66, 50, 63, 104, 51, 81, 74, 87, 69, 60, 66, 70, 59, 58, 102, 62, 110, 74, 65, 55, 57, 86, 70, 78, 62, 61, 53, 62, 60, 63, 63, 61, 102, 131, 66, 54, 55, 76, 53, 47, 68, 62, 47, 81, 73, 63, 59, 64, 55, 70, 58, 59, 58, 81, 84, 60, 61, 70, 65, 50, 81, 59, 61, 65, 72, 70, 54, 89, 72, 56, 69, 53, 81, 43, 66, 43, 65, 55, 60, 72, 61, 60, 53, 60, 57, 73, 69, 67, 68, 68, 76, 57, 59, 64, 62, 70, 56, 57, 77, 38, 79, 42, 89, 64, 58, 62, 56, 62, 56, 57, 58, 49, 69, 82, 55, 73, 77, 60, 61, 55, 70, 66, 57, 64, 70, 63, 55, 76, 63, 67, 130, 68, 99, 84, 55, 69, 52, 66, 50, 66, 74, 86, 70, 49, 48, 65, 90, 54, 73, 75, 64, 71, 83, 55, 63, 80, 79, 61, 79, 58, 62, 56, 58, 57, 80, 62, 64, 51, 55, 63, 64, 43, 79, 65, 65, 51, 59, 63, 73, 57, 64, 52, 72, 46, 53, 50, 57, 58, 71, 72, 74, 61, 61, 91, 57, 56, 67, 81, 65, 54, 82, 61, 49, 53, 63, 72, 92, 73, 79, 47, 67, 77, 61, 83, 55, 76, 63, 103, 53, 61, 55, 83, 86, 44, 71, 55, 73, 66, 64, 52, 60, 67, 68, 54, 57, 65, 59, 61, 101, 63, 57, 49, 68, 78, 79, 72, 46, 58, 91, 80, 82, 60, 82, 93, 83, 59, 64, 66, 72, 54, 85, 92, 67, 62, 65, 54, 63, 52, 66, 56, 59, 49, 52, 91, 51, 62, 99, 97, 63, 52, 77, 60, 65, 84, 69, 67, 55, 63, 65, 57, 43, 61, 42, 57, 79, 59, 59, 65, 62, 78, 63, 57, 55, 50, 86, 54, 80, 82, 75, 73, 58, 47, 79, 43, 77, 52, 56, 50, 57, 51, 70, 79, 80, 59, 95, 93, 53, 65, 71, 85, 68, 53, 65, 97, 78, 66, 67, 54, 64, 70, 57, 50, 52, 44, 57, 68, 72, 59, 55, 79, 70, 67, 50, 73, 77, 54, 58, 48, 69, 59, 49, 87, 60, 56, 65, 54, 71, 55, 52, 67, 60, 72, 57, 46, 66, 55, 67, 76, 59, 68, 69, 60, 65, 45, 52, 69, 61, 74, 49, 69, 49, 64, 85, 69, 59, 59, 51, 93, 60, 63, 65, 69, 66, 61, 60, 59, 83, 51, 62, 75, 58, 68, 70, 65, 53, 60, 50, 74, 68, 96, 50, 82, 58, 81, 100, 49, 69, 74, 60, 62, 84, 71, 61, 72, 70, 67, 59, 52, 57, 68, 50, 63, 56, 63, 90, 56, 64, 72, 78, 56, 64, 77, 62, 63, 59, 51, 70, 50, 49, 50, 57, 63, 69, 62, 61, 57, 69, 73, 55, 79, 60, 63, 63, 55, 51, 43, 68, 56, 60, 63, 61, 59, 64, 55, 39, 64, 56, 102, 67, 63, 73, 65, 113, 54, 78, 76, 51, 69, 63, 78, 99, 64, 83, 69, 56, 73, 69, 61, 52, 57, 62, 67, 58, 61, 72, 68, 62, 63, 63, 65, 61, 68, 66, 49, 66, 67, 51, 71, 68, 52, 67, 64, 65, 90, 75, 52, 63, 58, 63, 68, 57, 66, 63, 65, 71, 70, 73, 96, 66, 73, 104, 55, 64, 77, 54, 68, 91, 58, 92, 68, 63, 68, 54, 67, 61, 75, 68, 63, 56, 52, 66, 73, 56, 60, 58, 52, 76, 68, 68, 59, 76, 70, 67, 61, 60, 62, 97, 73, 60, 59, 58, 63, 69, 68, 73, 80, 88, 46, 61, 60, 58, 66, 47, 92, 55, 84, 88, 59, 56, 96, 54, 51, 55, 71, 70, 58, 55, 68, 57, 69, 55, 59, 72, 64, 54, 78, 90, 52, 53, 74, 54, 69, 55, 54, 53, 61, 66, 95, 48, 39, 54, 57, 57, 67, 93, 72, 73, 78, 59, 62, 80, 77, 66, 59, 59, 75, 85, 41, 61, 78, 50, 70, 63, 69, 71, 58, 54, 72, 88, 67, 62, 63, 53, 97, 69, 79, 56, 56, 63, 64, 56, 58, 52, 56, 60, 56, 52, 55, 72, 78, 90, 63, 57, 49, 73, 55, 54, 62, 60, 50, 55, 73, 83, 54, 61, 74, 77, 67, 60, 63, 66, 62, 55, 67, 78, 49, 58, 72, 96, 64, 65, 71, 65, 55, 61, 53, 49, 41, 72, 94, 67, 69, 56, 70, 67, 65, 51, 66, 78, 60, 53, 60, 73, 62, 66, 78, 51, 65, 53, 56, 61, 56, 103, 54, 54, 69, 68, 82, 59, 57, 58, 78, 67, 63, 46, 57, 68, 70, 64, 87, 57, 66, 54, 66, 58, 67, 81, 57, 63, 54, 72, 64, 68, 54, 61, 77, 71, 64, 58, 63, 49, 64, 77, 81, 56, 67, 56, 63, 71, 109, 65, 49, 54, 66, 107, 53, 68, 92, 78, 60, 62, 53, 54, 62, 65, 48, 56, 46, 107, 76, 58, 63, 82, 68, 56, 73, 64, 80, 72, 46, 63, 75, 95, 65, 58, 98, 63, 55, 96, 60, 77, 74, 75, 77, 65, 57, 57, 75, 60, 93, 57, 56, 50, 61, 71, 62, 70, 61, 58, 54, 54, 51, 69, 73, 54, 68, 60, 60, 69, 68, 51, 55, 82, 72, 70, 88, 56, 71, 63, 52, 66, 58, 49, 68, 68, 58, 51, 87, 76, 64, 57, 68, 68, 63, 59, 100, 57, 70, 67, 69, 58, 71, 68, 50, 66, 86, 58, 61, 54, 84, 78, 90, 43, 59, 68, 58, 66, 63, 86, 58, 55, 83, 61, 45, 50, 81, 56, 68, 63, 75, 65, 53, 66, 53, 51, 103, 74, 70, 59, 61, 70, 68, 61, 65, 46, 66, 60, 61, 84, 70, 51, 62, 64, 70, 79, 78, 55, 68, 63, 72, 60, 57, 56, 66, 62, 70, 56, 85, 58, 52, 54, 50, 107, 68, 67, 89, 105, 74, 76, 64, 57, 63, 63, 83, 75, 66, 81, 61, 52, 84, 69, 67, 85, 70, 52, 41, 114, 71, 68, 59, 80, 70, 69, 75, 70, 62, 65, 67, 61, 60, 56, 63, 63, 66, 53, 66, 69, 78, 72, 72, 64, 83, 41, 54, 52, 67, 62, 82, 59, 64, 63, 72, 60, 59, 64, 75, 87, 54, 119, 61, 92, 93, 62, 66, 74, 70, 68, 45, 44, 57, 55, 56, 65, 54, 58, 68, 90, 120, 54, 56, 60, 75, 77, 61, 64, 69, 66, 81, 80, 79, 58, 78, 64, 59, 46, 78, 111, 71, 72, 64, 69, 49, 71, 65, 50, 51, 63, 68, 56, 72, 49, 61, 66, 61, 54, 68, 68, 83, 63, 52, 64, 57, 66, 55, 56, 66, 78, 63, 49, 61, 66, 66, 61, 73, 58, 96, 66, 59, 57, 52, 59, 67, 72, 58, 81, 60, 114, 49, 63, 86, 70, 49, 68, 64, 77, 63, 88, 58, 64, 90, 87, 59, 59, 48, 76, 59, 57, 62, 59, 56, 79, 65, 70, 75, 134, 63, 48, 52, 68, 63, 58, 65, 61, 74, 57, 85, 60, 57, 68, 62, 57, 75, 73, 89, 66, 63, 84, 53, 74, 69, 64, 60, 50, 79, 64, 62, 66, 45, 77, 66, 54, 57, 71, 66, 86, 56, 66, 68, 61, 52, 51, 68, 59, 50, 56, 45, 71, 75, 74, 48, 62, 73, 61, 70, 66, 61, 63, 62, 69, 60, 61, 63, 97, 52, 72, 78, 58, 55, 54, 89, 57, 65, 81, 70, 72, 88, 59, 65, 58, 48, 61, 96, 65, 68, 60, 63, 72, 56, 59, 50, 71, 71, 56, 59, 53, 54, 49, 52, 69, 73, 59, 72, 108, 60, 53, 65, 68, 66, 79, 65, 55, 67, 54, 52, 82, 68, 78, 84, 90, 64, 61, 56, 83, 61, 60, 74, 59, 53, 63, 46, 60, 60, 61, 46, 51, 73, 62, 48, 56, 65, 99, 88, 58, 63, 71, 65, 66, 59, 73, 61, 74, 52, 56, 85, 100, 68, 60, 61, 61, 67, 55, 56, 62, 76, 54, 50, 59, 52, 76, 55, 57, 82, 109, 60, 60, 58, 54, 56, 67, 56, 56, 56, 51, 77, 62, 85, 51, 68, 62, 88, 60, 58, 71, 77, 47, 62, 63, 62, 52, 68, 62, 59, 77, 62, 62, 61, 57, 57, 70, 61, 77, 62, 64, 65, 62, 77, 61, 77, 54, 66, 63, 77, 62, 66, 67, 70, 54, 72, 100, 71, 54, 56, 58, 60, 67, 71, 62, 63, 57, 96, 76, 69, 83, 58, 67, 58, 81, 70, 76, 61, 65, 68, 81, 78, 90, 52, 61, 61, 55, 56, 89, 113, 79, 53, 85, 57, 62, 90, 42, 59, 62, 53, 82, 81, 72, 78, 49, 60, 56, 63, 72, 66, 61, 60, 69, 57, 67, 44, 81, 60, 100, 76, 52, 70, 58, 66, 92, 69, 83, 79, 68, 58, 56, 81, 64, 62, 86, 77, 100, 66, 57, 78, 67, 62, 57, 58, 68, 60, 75, 59, 60, 64, 68, 70, 64, 85, 73, 59, 65, 60, 62, 72, 67, 88, 52, 68, 65, 53, 69, 42, 52, 80, 59, 72, 54, 50, 68, 64, 83, 73, 61, 64, 53, 66, 65, 63, 109, 76, 58, 67, 86, 69, 70, 77, 57, 56, 50, 68, 58, 69, 64, 64, 73, 86, 67, 68, 57, 75, 52, 43, 51, 58, 54, 55, 56, 84, 75, 63, 64, 53, 63, 58, 87, 63, 59, 62, 75, 84, 94, 55, 65, 65, 66, 62, 58, 65, 65, 64, 64, 55, 59, 110, 55, 75, 94, 67, 57, 69, 69, 50, 62, 60, 64, 60, 48, 84, 58, 88, 70, 64, 64, 88, 70, 65, 83, 69, 66, 66, 53, 55, 66, 52, 58, 55, 81, 63, 72, 59, 72, 69, 53, 62, 58, 36, 61, 77, 37, 59, 49, 56, 71, 66, 74, 72, 47, 58, 54, 74, 57, 63, 70, 64, 58, 86, 88, 61, 63, 82, 61, 57, 68, 65, 70, 59, 56, 74, 69, 54, 54, 68, 86, 50, 67, 67, 37, 64, 70, 67, 60, 75, 77, 69, 70, 55, 62, 45, 59, 50, 65, 58, 86, 65, 68, 73, 81, 65, 65, 74, 61, 67, 62, 61, 72, 78, 59, 64, 68, 87, 71, 67, 54, 53, 85, 80, 62, 71, 63, 88, 49, 104, 80, 64, 76, 76, 48, 71, 59, 53, 49, 77, 65, 60, 72, 65, 78, 67, 75, 59, 67, 74, 79, 57, 68, 60, 59, 64, 79, 62, 63, 83, 78, 67, 67, 59, 67, 62, 65, 47, 86, 75, 56, 77, 75, 75, 64, 64, 63, 58, 81, 59, 47, 67, 54, 106, 53, 70, 58, 59, 73, 69, 60, 69, 64, 39, 56, 66, 77, 83, 78, 104, 45, 62, 58, 76, 41, 64, 114, 59, 68, 54, 61, 71, 51, 60, 55, 59, 75, 57, 69, 53, 57, 67, 62, 68, 35, 63, 55, 51, 64, 49, 106, 74, 53, 64, 64, 69, 56, 57, 48, 53, 63, 60, 85, 55, 62, 61, 62, 82, 66, 68, 65, 61, 54, 95, 53, 65, 66, 62, 84, 55, 51, 64, 63, 67, 70, 53, 78, 57, 54, 53, 62, 63, 113, 54, 67, 61, 64, 44, 62, 62, 67, 78, 52, 70, 65, 69, 89, 71, 43, 94, 54, 77, 64, 71, 72, 82, 81, 45, 63, 75, 66, 55, 57, 102, 54, 48, 66, 66, 65, 78, 76, 58, 59, 66, 63, 69, 49, 55, 67, 62, 66, 63, 65, 59, 76, 67, 92, 70, 72, 66, 53, 79, 71, 58, 55, 64, 60, 71, 54, 68, 84, 65, 67, 56, 46, 76, 54, 77, 59, 64, 62, 57, 50, 48, 64, 99, 59, 58, 51, 50, 51, 55, 91, 84, 56, 71, 82, 55, 85, 51, 63, 85, 78, 65, 60, 57, 67, 69, 69, 69, 57, 62, 55, 77, 69, 50, 62, 69, 78, 130, 85, 65, 62, 73, 61, 77, 72, 51, 57, 63, 64, 66, 67, 73, 68, 38, 50, 64, 77, 69, 68, 53, 68, 50, 64, 57, 59, 69, 61, 58, 62, 66, 75, 70, 57, 73, 112, 64, 66, 68, 62, 102, 66, 55, 52, 125, 79, 65, 64, 81, 101, 54, 110, 73, 59, 59, 57, 90, 47, 47, 92, 74, 60, 65, 54, 60, 87, 72, 50, 52, 55, 61, 76, 65, 58, 72, 62, 66, 72, 65, 69, 64, 72, 62, 57, 58, 71, 71, 82, 78, 66, 50, 68, 60, 69, 71, 74, 66, 55, 82, 68, 57, 92, 61, 81, 55, 49, 50, 67, 70, 60, 58, 64, 66, 55, 73, 65, 64, 70, 64, 93, 122, 80, 51, 66, 83, 73, 45, 63, 66, 73, 76, 74, 51, 63, 54, 66, 55, 63, 72, 56, 66, 75, 61, 54, 77, 64, 39, 53, 65, 64, 71, 68, 58, 56, 58, 87, 65, 60, 59, 71, 67, 60, 69, 72, 60, 60, 58, 63, 59, 54, 58, 56, 90, 72, 73, 70, 70, 61, 62, 65, 73, 55, 84, 57, 72, 61, 58, 72, 63, 119, 78, 60, 49, 58, 69, 87, 45, 66, 74, 73, 56, 49, 71, 67, 68, 50, 57, 69, 57, 98, 51, 79, 70, 62, 69, 78, 84, 135, 59, 34, 56, 47, 64, 65, 56, 61, 59, 66, 61, 76, 52, 59, 57, 86, 68, 66, 73, 61, 62, 64, 61, 70, 95, 66, 73, 54, 83, 75, 58, 56, 51, 56, 85, 54, 69, 65, 66, 56, 62, 72, 76, 62, 35, 74, 63, 77, 60, 52, 49, 47, 64, 63, 58, 70, 71, 67, 53, 58, 71, 62, 53, 68, 78, 67, 57, 48, 58, 56, 61, 112, 66, 67, 73, 51, 82, 67, 63, 83, 85, 56, 62, 68, 58, 65, 132, 58, 60, 56, 74, 78, 44, 66, 60, 56, 65, 60, 76, 67, 59, 93, 57, 69, 100, 70, 53, 68, 62, 49, 83, 82, 62, 75, 63, 57, 74, 50, 65, 77, 63, 116, 48, 86, 55, 57, 70, 62, 56, 65, 66, 59, 57, 68, 50, 67, 72, 66, 57, 67, 52, 59, 83, 56, 74, 75, 76, 52, 57, 86, 67, 83, 60, 81, 70, 69, 68, 72, 88, 82, 63, 46, 60, 66, 67, 63, 59, 62, 62, 63, 65, 57, 57, 63, 71, 80, 82, 45, 88, 64, 71, 74, 49, 56, 69, 54, 61, 56, 59, 83, 72, 64, 51, 59, 107, 51, 71, 54, 96, 63, 56, 65, 113, 48, 53, 62, 53, 67, 68, 61, 62, 59, 58, 65, 64, 82, 62, 54, 65, 64, 60, 69, 63, 60, 60, 63, 73, 45, 69, 66, 78, 52, 60, 44, 78, 68, 75, 67, 55, 56, 78, 77, 68, 70, 88, 70, 93, 55, 53, 72, 75, 97, 57, 70, 71, 63, 60, 69, 80, 64, 72, 76, 63, 87, 51, 66, 52, 81, 59, 75, 56, 61, 60, 40, 60, 71, 73, 75, 58, 90, 61, 50, 70, 63, 58, 61, 68, 72, 69, 65, 69, 56, 89, 94, 69, 56, 51, 68, 83, 56, 62, 66, 60, 66, 80, 71, 61, 71, 86, 61, 51, 67, 77, 65, 61, 55, 64, 69, 52, 67, 77, 71, 71, 68, 85, 64, 55, 63, 82, 60, 72, 67, 68, 63, 68, 68, 79, 53, 75, 74, 57, 64, 80, 89, 58, 61, 48, 71, 63, 64, 67, 64, 60, 62, 70, 60, 75, 66, 65, 86, 61, 124, 48, 52, 64, 41, 73, 66, 49, 72, 55, 94, 76, 49, 48, 59, 68, 69, 53, 91, 78, 81, 56, 55, 67, 70, 51, 49, 72, 59, 58, 63, 70, 96, 54, 79, 87, 95, 52, 70, 65, 71, 75, 74, 58, 66, 65, 61, 68, 60, 47, 74, 57, 63, 42, 58, 91, 49, 61, 87, 67, 77, 62, 62, 66, 58, 64, 67, 70, 56, 97, 49, 54, 63, 59, 78, 46, 61, 73, 78, 47, 74, 60, 57, 75, 59, 56, 73, 59, 56, 65, 59, 64, 60, 59, 64, 68, 52, 54, 65, 78, 66, 71, 62, 88, 68, 67, 53, 75, 61, 47, 56, 75, 88, 68, 62, 54, 68, 47, 67, 70, 54, 73, 66, 64, 56, 59, 56, 92, 77, 49, 64, 70, 71, 60, 105, 63, 61, 68, 103, 56, 58, 67, 79, 65, 58, 68, 64, 53, 48, 90, 66, 66, 42, 62, 83, 65, 60, 75, 72, 60, 69, 78, 58, 56, 64, 75, 57, 65, 63, 56, 61, 90, 72, 69, 62, 69, 72, 78, 63, 55, 45, 100, 74, 118, 105, 55, 64, 61, 55, 57, 67, 62, 80, 86, 52, 73, 95, 46, 52, 87, 68, 63, 74, 70, 68, 57, 55, 57, 104, 72, 64, 68, 69, 60, 54, 65, 57, 69, 68, 67, 58, 59, 52, 39, 62, 76, 94, 51, 65, 58, 70, 53, 62, 57, 53, 40, 61, 84, 71, 87, 57, 91, 72, 61, 64, 65, 84, 52, 62, 66, 79, 81, 76, 76, 63, 56, 68, 56, 99, 68, 134, 59, 72, 80, 54, 45, 58, 63, 54, 80, 66, 65, 89, 81, 55, 68, 54, 64, 72, 87, 69, 56, 53, 56, 93, 66, 67, 64, 67, 61, 61, 69, 63, 57, 67, 73, 83, 54, 51, 68, 59, 126, 72, 72, 56, 53, 54, 105, 62, 77, 59, 65, 89, 56, 57, 63, 55, 48, 67, 56, 72, 68, 59, 51, 53, 68, 88, 68, 48, 59, 61, 53, 64, 59, 75, 62, 74, 59, 74, 88, 74, 65, 109, 89, 58, 75, 47, 77, 64, 80, 68, 43, 74, 62, 74, 63, 55, 91, 52, 83, 68, 55, 68, 51, 65, 49, 55, 60, 53, 54, 60, 56, 63, 66, 60, 65, 69, 74, 60, 63, 75, 59, 74, 62, 69, 80, 73, 85, 62, 56, 56, 62, 74, 63, 60, 89, 77, 59, 71, 71, 64, 65, 77, 99, 63, 70, 51, 50, 70, 84, 67, 51, 48, 51, 63, 52, 69, 77, 102, 60, 46, 44, 74, 62, 85, 64, 66, 65, 62, 51, 75, 65, 85, 69, 63, 77, 67, 57, 59, 56, 63, 69, 71, 104, 57, 73, 66, 51, 57, 78, 66, 52, 67, 54, 58, 69, 77, 67, 60, 91, 57, 59, 68, 64, 65, 56, 89, 66, 77, 66, 65, 65, 59, 67, 62, 47, 59, 66, 44, 49, 111, 43, 71, 88, 61, 94, 59, 60, 58, 66, 55, 51, 66, 76, 73, 67, 90, 69, 55, 69, 56, 67, 55, 79, 100, 64, 58, 85, 53, 74, 100, 56, 57, 63, 57, 63, 70, 74, 54, 55, 55, 60, 68, 67, 66, 66, 70, 61, 54, 63, 62, 58, 98, 60, 55, 38, 64, 54, 58, 73, 63, 61, 49, 86, 63, 56, 95, 67, 88, 68, 62, 65, 79, 74, 65, 52, 64, 61, 64, 47, 64, 63, 55, 66, 104, 130, 75, 52, 55, 61, 52, 87, 51, 86, 68, 57, 70, 61, 66, 58, 70, 64, 89, 63, 66, 50, 108, 70, 67, 59, 63, 61, 81, 50, 49, 68, 64, 75, 66, 67, 80, 58, 64, 68, 66, 76, 53, 42, 71, 47, 58, 56, 59, 64, 78, 46, 69, 75, 70, 63, 55, 72, 61, 45, 58, 48, 47, 66, 66, 75, 73, 63, 64, 64, 58, 105, 67, 50, 74, 79, 59, 78, 72, 78, 46, 68, 58, 53, 65, 75, 51, 72, 86, 66, 47, 71, 67, 83, 62, 59, 64, 77, 115, 61, 54, 68, 65, 76, 65, 69, 59, 56, 54, 93, 72, 73, 53, 59, 87, 66, 80, 70, 47, 68, 61, 52, 74, 58, 70, 57, 67, 57, 77, 55, 67, 60, 61, 57, 72, 51, 66, 67, 72, 56, 82, 65, 56, 69, 65, 55, 66, 59, 66, 57, 76, 59, 66, 56, 70, 71, 53, 76, 63, 65, 68, 67, 67, 56, 62, 55, 47, 59, 70, 67, 71, 72, 57, 47, 63, 59, 85, 48, 67, 82, 63, 79, 71, 63, 65, 93, 50, 59, 63, 55, 56, 84, 77, 57, 64, 61, 69, 58, 49, 69, 39, 90, 58, 70, 54, 75, 120, 64, 57, 52, 54, 86, 42, 47, 67, 50, 48, 67, 66, 84, 74, 66, 67, 53, 49, 67, 75, 52, 70, 98, 63, 61, 67, 61, 66, 61, 62, 69, 71, 64, 50, 69, 73, 42, 57, 68, 63, 53, 79, 55, 59, 50, 44, 64, 66, 71, 56, 61, 52, 68, 42, 70, 65, 75, 56, 71, 57, 63, 58, 54, 66, 90, 79, 58, 79, 77, 52, 76, 78, 72, 45, 66, 76, 65, 62, 70, 53, 53, 102, 67, 71, 60, 80, 81, 62, 62, 83, 122, 55, 58, 63, 92, 50, 86, 62, 64, 49, 67, 62, 69, 61, 67, 63, 76, 58, 63, 63, 70, 52, 63, 47, 52, 75, 58, 53, 59, 66, 57, 65, 83, 57, 76, 57, 51, 61, 59, 77, 83, 69, 79, 105, 69, 65, 50, 61, 58, 52, 44, 61, 61, 76, 83, 60, 53, 76, 57, 63, 99, 104, 66, 58, 100, 58, 47, 67, 91, 70, 68, 59, 64, 51, 76, 46, 55, 58, 53, 72, 65, 56, 64, 71, 75, 58, 68, 62, 61, 67, 57, 71, 59, 63, 98, 71, 66, 56, 65, 46, 69, 87, 71, 58, 57, 78, 76, 40, 62, 51, 46, 59, 63, 59, 68, 57, 59, 71, 82, 65, 55, 48, 80, 66, 56, 50, 83, 97, 64, 43, 62, 52, 62, 74, 61, 76, 71, 63, 69, 46, 62, 66, 53, 60, 70, 56, 69, 71, 52, 48, 42, 66, 44, 67, 64, 60, 53, 65, 89, 71, 50, 67, 43, 120, 56, 60, 85, 95, 61, 69, 55, 70, 79, 58, 49, 50, 71, 61, 60, 60, 37, 45, 81, 69, 46, 80, 65, 60, 64, 71, 51, 70, 60, 65, 70, 57, 69, 63, 55, 57, 88, 79, 74, 65, 50, 58, 75, 56, 83, 56, 50, 56, 57, 70, 65, 65, 64, 74, 97, 58, 64, 82, 64, 57, 66, 70, 55, 83, 62, 89, 75, 56, 66, 59, 65, 52, 54, 62, 61, 60, 44, 65, 44, 68, 62, 62, 53, 63, 67, 60, 60, 51, 57, 73, 58, 61, 52, 96, 50, 54, 64, 78, 71, 94, 64, 60, 45, 90, 60, 73, 72, 64, 65, 55, 49, 67, 61, 50, 51, 80, 92, 75, 73, 60, 75, 58, 60, 73, 61, 64, 70, 60, 64, 60, 64, 58, 69, 82, 51, 69, 66, 48, 64, 58, 62, 72, 58, 66, 99, 62, 60, 66, 68, 64, 64, 59, 69, 68, 60, 55, 55, 82, 55, 61, 66, 59, 60, 62, 69, 69, 68, 43, 54, 58, 96, 70, 59, 62, 65, 61, 52, 64, 109, 66, 76, 86, 87, 66, 92, 60, 78, 54, 78, 68, 62, 58, 71, 56, 55, 60, 72, 86, 71, 55, 52, 48, 71, 80, 91, 70, 69, 109, 103, 79, 71, 70, 87, 57, 62, 62, 66, 55, 63, 66, 52, 73, 76, 73, 86, 72, 63, 64, 60, 75, 40, 81, 58, 65, 64, 63, 93, 100, 70, 69, 62, 67, 55, 60, 70, 81, 69, 65, 58, 67, 80, 67, 86, 50, 68, 60, 86, 46, 61, 106, 49, 53, 55, 60, 69, 61, 71, 63, 55, 54, 65, 85, 58, 54, 65, 60, 62, 61, 76, 50, 66, 65, 56, 84, 64, 63, 71, 69, 70, 53, 64, 43, 53, 66, 67, 60, 54, 60, 54, 62, 69, 67, 78, 64, 73, 55, 113, 73, 90, 73, 55, 92, 77, 56, 76, 60, 72, 70, 55, 89, 55, 66, 50, 68, 87, 128, 60, 60, 72, 57, 69, 60, 55, 68, 66, 65, 92, 54, 73, 88, 63, 55, 55, 60, 82, 67, 72, 69, 52, 73, 55, 60, 62, 59, 56, 50, 55, 47, 83, 66, 63, 53, 47, 79, 65, 52, 67, 59, 52, 79, 45, 61, 81, 50, 62, 75, 61, 83, 52, 63, 56, 70, 71, 51, 73, 68, 76, 74, 58, 67, 62, 106, 73, 69, 68, 72, 67, 62, 63, 52, 53, 60, 58, 61, 64, 60, 56, 56, 58, 62, 78, 74, 48, 58, 57, 53, 68, 55, 68, 64, 61, 45, 53, 88, 69, 55, 66, 51, 64, 70, 59, 56, 64, 67, 63, 54, 81, 82, 60, 54, 58, 59, 113, 48, 69, 58, 70, 57, 63, 59, 56, 57, 57, 75, 76, 80, 72, 61, 73, 85, 59, 67, 61, 76, 64, 88, 83, 79, 67, 50, 65, 55, 74, 55, 72, 59, 58, 56, 69, 121, 79, 59, 54, 69, 83, 61, 63, 48, 84, 58, 73, 59, 61, 62, 61, 74, 53, 71, 57, 65, 69, 67, 52, 100, 65, 48, 55, 72, 60, 52, 65, 60, 67, 64, 49, 79, 84, 64, 65, 81, 67, 52, 91, 66, 55, 88, 82, 58, 72, 68, 58, 59, 77, 63, 54, 98, 46, 74, 60, 67, 59, 59, 61, 73, 52, 95, 58, 91, 84, 56, 74, 68, 86, 66, 54, 69, 56, 72, 54, 75, 64, 68, 47, 76, 113, 100, 54, 52, 54, 81, 60, 72, 69, 53, 52, 79, 79, 61, 63, 63, 65, 56, 72, 59, 70, 77, 65, 66, 68, 62, 72, 66, 61, 67, 58, 64, 80, 81, 64, 68, 70, 57, 67, 70, 54, 59, 63, 70, 79, 57, 59, 55, 69, 81, 77, 50, 63, 62, 66, 50, 53, 72, 60, 72, 62, 48, 84, 87, 65, 114, 47, 46, 59, 49, 116, 66, 56, 57, 80, 85, 38, 67, 71, 68, 49, 47, 63, 64, 48, 63, 61, 74, 56, 52, 48, 76, 62, 66, 54, 62, 102, 117, 60, 58, 52, 65, 80, 57, 51, 57, 63, 57, 72, 53, 70, 64, 43, 66, 64, 56, 77, 53, 58, 85, 69, 98, 62, 55, 67, 56, 81, 74, 61, 70, 97, 60, 51, 72, 72, 68, 101, 53, 53, 93, 57, 57, 52, 71, 61, 63, 66, 40, 55, 58, 98, 61, 60, 74, 60, 62, 63, 60, 64, 79, 68, 51, 70, 47, 67, 52, 47, 59, 70, 83, 59, 67, 61, 79, 53, 74, 60, 55, 85, 57, 63, 63, 55, 62, 61, 63, 63, 45, 58, 77, 57, 64, 51, 73, 52, 61, 78, 60, 63, 59, 75, 60, 45, 59, 74, 138, 73, 69, 66, 63, 79, 58, 62, 55, 69, 66, 58, 74, 64, 94, 54, 90, 66, 64, 59, 88, 46, 75, 48, 57, 53, 51, 54, 52, 73, 59, 50, 73, 54, 61, 63, 60, 54, 71, 78, 50, 59, 116, 67, 70, 60, 66, 71, 64, 65, 56, 73, 78, 63, 57, 59, 69, 68, 79, 66, 62, 60, 65, 107, 63, 68, 83, 73, 76, 67, 60, 78, 68, 53, 53, 43, 61, 72, 62, 74, 62, 52, 74, 100, 52, 61, 55, 86, 52, 68, 70, 78, 74, 59, 87, 100, 53, 64, 54, 59, 70, 61, 53, 61, 60, 71, 64, 61, 61, 52, 62, 59, 57, 58, 55, 61, 52, 55, 54, 55, 54, 54, 64, 48, 57, 61, 68, 59, 51, 53, 55, 62, 65, 58, 51, 62, 70, 51, 72, 83, 50, 77, 71, 59, 61, 50, 59, 57, 49, 52, 58, 81, 83, 61, 67, 58, 75, 64, 61, 89, 75, 98, 89, 60, 69, 70, 55, 76, 81, 62, 58, 59, 56, 53, 67, 66, 51, 56, 58, 72, 50, 51, 80, 56, 66, 84, 58, 64, 68, 76, 53, 62, 55, 53, 66, 64, 87, 68, 63, 52, 74, 55, 62, 52, 66, 82, 51, 58, 80, 99, 67, 54, 85, 68, 75, 64, 80, 62, 59, 55, 54, 53, 45, 74, 65, 69, 56, 71, 57, 72, 59, 73, 49, 60, 55, 70, 56, 76, 61, 55, 75, 56, 60, 71, 60, 64, 63, 74, 68, 66, 58, 57, 68, 64, 129, 74, 60, 58, 77, 69, 70, 58, 70, 51, 62, 55, 61, 66, 78, 74, 84, 57, 75, 69, 64, 57, 65, 60, 58, 67, 73, 60, 61, 66, 73, 72, 69, 61, 59, 68, 105, 64, 62, 59, 73, 84, 52, 68, 67, 92, 75, 52, 57, 64, 65, 66, 68, 61, 82, 107, 64, 52, 63, 87, 69, 54, 55, 66, 78, 65, 95, 63, 68, 70, 53, 63, 57, 55, 72, 74, 60, 47, 79, 59, 74, 52, 63, 56, 65, 78, 87, 54, 66, 57, 65, 52, 51, 67, 56, 89, 45, 57, 49, 55, 72, 64, 68, 56, 61, 62, 54, 60, 45, 68, 61, 64, 61, 57, 57, 83, 54, 56, 96, 59, 80, 78, 59, 56, 58, 52, 61, 69, 68, 61, 48, 72, 56, 69, 89, 79, 58, 53, 100, 62, 52, 62, 67, 54, 70, 60, 64, 68, 84, 76, 52, 50, 68, 59, 100, 59, 58, 64, 70, 53, 52, 42, 72, 59, 54, 52, 64, 56, 49, 67, 65, 62, 107, 73, 65, 56, 60, 92, 70, 65, 69, 60, 48, 100, 55, 57, 60, 72, 60, 61, 43, 54, 61, 58, 55, 58, 67, 105, 58, 65, 58, 52, 60, 53, 73, 58, 63, 64, 44, 52, 64, 60, 42, 54, 75, 71, 74, 53, 65, 73, 56, 66, 78, 80, 47, 58, 86, 61, 60, 81, 78, 58, 81, 61, 64, 83, 57, 65, 68, 76, 62, 45, 77, 57, 59, 58, 76, 59, 56, 70, 67, 66, 90, 82, 49, 64, 69, 59, 39, 75, 116, 67, 59, 67, 67, 67, 62, 65, 67, 66, 63, 71, 68, 86, 90, 89, 58, 67, 77, 73, 70, 71, 60, 65, 39, 41, 64, 61, 68, 74, 82, 59, 75, 59, 56, 77, 82, 55, 45, 65, 63, 104, 59, 45, 58, 47, 73, 120, 54, 61, 62, 54, 75, 52, 73, 98, 66, 48, 58, 54, 56, 73, 61, 68, 62, 53, 86, 55, 48, 57, 65, 58, 64, 60, 61, 76, 73, 63, 69, 68, 93, 60, 94, 102, 60, 61, 62, 49, 95, 52, 58, 52, 51, 61, 60, 60, 49, 95, 54, 45, 71, 73, 59, 110, 67, 48, 66, 50, 54, 74, 56, 68, 60, 79, 61, 62, 56, 60, 62, 56, 78, 70, 54, 69, 66, 102, 64, 44, 70, 72, 41, 59, 61, 68, 75, 81, 79, 62, 78, 68, 103, 56, 63, 63, 67, 76, 67, 47, 57, 57, 61, 67, 52, 69, 49, 72, 68, 85, 62, 59, 58, 50, 94, 57, 72, 57, 38, 72, 72, 65, 65, 59, 61, 58, 62, 60, 45, 68, 45, 68, 57, 79, 86, 79, 67, 48, 59, 62, 62, 94, 72, 102, 66, 57, 54, 51, 48, 48, 35, 86, 56, 64, 59, 67, 64, 79, 81, 61, 62, 71, 70, 67, 65, 60, 70, 58, 50, 72, 55, 48, 110, 71, 84, 86, 79, 112, 67, 79, 74, 93, 71, 67, 65, 70, 79, 55, 76, 84, 73, 55, 59, 57, 60, 51, 71, 48, 64, 65, 87, 53, 66, 66, 54, 81, 62, 95, 68, 65, 64, 82, 72, 63, 67, 63, 70, 54, 67, 57, 67, 74, 36, 57, 62, 68, 59, 64, 57, 97, 87, 57, 59, 87, 61, 94, 50, 61, 64, 59, 79, 63, 64, 63, 55, 83, 60, 107, 78, 54, 72, 63, 51, 43, 51, 65, 114, 106, 58, 84, 60, 62, 62, 40, 65, 67, 72, 55, 68, 58, 58, 51, 72, 71, 61, 73, 55, 64, 79, 56, 80, 75, 53, 86, 58, 60, 68, 54, 51, 63, 67, 69, 72, 75, 61, 84, 68, 66, 69, 92, 65, 62, 53, 60, 60, 75, 78, 69, 76, 81, 60, 65, 67, 69, 72, 63, 64, 47, 64, 86, 52, 77, 81, 55, 48, 81, 65, 54, 57, 47, 61, 50, 61, 53, 92, 66, 87, 60, 82, 65, 69, 67, 59, 72, 65, 59, 62, 73, 70, 56, 64, 55, 62, 90, 121, 66, 66, 84, 59, 76, 50, 71, 57, 51, 76, 93, 50, 51, 62, 56, 103, 62, 90, 78, 54, 59, 36, 55, 62, 70, 67, 75, 74, 65, 72, 62, 37, 65, 72, 59, 70, 70, 62, 61, 61, 61, 90, 60, 51, 69, 67, 57, 71, 61, 58, 56, 61, 75, 68, 72, 66, 66, 70, 91, 62, 71, 112, 73, 64, 58, 52, 62, 54, 50, 73, 55, 62, 57, 61, 56, 78, 65, 53, 63, 71, 100, 49, 62, 49, 49, 55, 55, 69, 69, 56, 65, 58, 69, 62, 75, 54, 60, 64, 62, 72, 58, 45, 58, 65, 60, 58, 75, 40, 72, 62, 62, 100, 71, 71, 84, 66, 66, 72, 58, 106, 82, 81, 51, 48, 72, 67, 71, 63, 61, 64, 73, 101, 56, 83, 61, 58, 58, 68, 59, 68, 60, 65, 83, 64, 56, 76, 80, 68, 60, 72, 77, 68, 65, 70, 57, 76, 46, 46, 67, 82, 64, 65, 50, 57, 75, 75, 60, 61, 77, 82, 54, 60, 106, 79, 49, 81, 87, 68, 76, 77, 50, 84, 87, 59, 61, 54, 67, 54, 57, 60, 65, 68, 50, 81, 101, 67, 63, 68, 88, 59, 58, 56, 107, 56, 59, 60, 57, 50, 50, 61, 65, 61, 65, 83, 60, 66, 73, 57, 68, 72, 70, 82, 49, 59, 56, 48, 59, 54, 73, 53, 79, 71, 58, 64, 64, 61, 81, 64, 72, 70, 64, 90, 53, 58, 52, 55, 70, 91, 70, 57, 69, 71, 56, 72, 67, 74, 76, 44, 62, 65, 60, 61, 50, 61, 66, 50, 72, 58, 58, 64, 64, 62, 75, 74, 52, 76, 51, 58, 58, 88, 66, 58, 69, 50, 55, 51, 76, 77, 73, 67, 68, 58, 103, 81, 53, 76, 68, 65, 60, 75, 59, 68, 66, 82, 74, 77, 51, 48, 56, 69, 57, 67, 69, 90, 59, 63, 81, 57, 59, 82, 67, 61, 59, 64, 58, 54, 61, 93, 76, 66, 63, 52, 72, 66, 49, 67, 83, 61, 97, 69, 73, 72, 71, 66, 41, 68, 57, 56, 71, 51, 51, 66, 50, 69, 60, 67, 63, 87, 53, 50, 67, 93, 75, 73, 90, 56, 60, 58, 92, 72, 51, 78, 83, 64, 65, 91, 68, 74, 51, 61, 98, 82, 66, 62, 57, 45, 65, 53, 67, 68, 59, 57, 62, 65, 71, 63, 75, 112, 71, 75, 50, 62, 85, 48, 56, 52, 68, 57, 55, 60, 57, 57, 65, 69, 63, 74, 91, 60, 63, 69, 69, 62, 74, 65, 67, 82, 58, 49, 63, 57, 49, 51, 82, 61, 67, 52, 51, 60, 65, 47, 64, 60, 83, 53, 66, 59, 72, 63, 55, 73, 64, 58, 55, 64, 59, 55, 63, 50, 66, 64, 59, 57, 65, 57, 87, 70, 67, 72, 58, 51, 71, 72, 59, 64, 71, 101, 63, 65, 77, 63, 61, 59, 59, 75, 57, 63, 72, 66, 68, 88, 76, 90, 56, 71, 58, 88, 69, 55, 58, 98, 48, 51, 61, 70, 70, 63, 57, 73, 63, 57, 73, 62, 73, 79, 65, 73, 55, 63, 64, 82, 65, 72, 68, 60, 72, 70, 76, 86, 64, 75, 49, 55, 98, 73, 56, 64, 61, 63, 84, 63, 65, 61, 77, 57, 112, 75, 57, 69, 95, 58, 60, 58, 63, 71, 32, 64, 63, 74, 77, 64, 54, 66, 74, 70, 76, 71, 57, 83, 57, 59, 64, 69, 60, 67, 71, 57, 71, 56, 65, 64, 61, 57, 59, 78, 54, 71, 70, 77, 71, 58, 41, 57, 79, 73, 64, 65, 80, 54, 44, 73, 56, 64, 66, 59, 53, 62, 65, 46, 35, 58, 77, 55, 51, 55, 84, 84, 59, 79, 84, 68, 71, 100, 63, 75, 69, 62, 63, 69, 68, 61, 64, 77, 66, 53, 83, 95, 47, 62, 33, 88, 94, 90, 64, 62, 63, 85, 58, 45, 76, 49, 52, 65, 61, 53, 59, 63, 58, 85, 68, 49, 73, 74, 115, 79, 63, 63, 61, 79, 124, 58, 62, 62, 48, 97, 53, 70, 87, 73, 80, 53, 51, 64, 60, 55, 76, 99, 84, 109, 56, 64, 48, 60, 55, 54, 83, 74, 59, 72, 48, 59, 63, 88, 63, 93, 94, 63, 47, 56, 63, 63, 57, 50, 53, 58, 59, 59, 59, 73, 94, 57, 77, 60, 75, 70, 67, 70, 78, 65, 65, 58, 60, 65, 61, 63, 78, 62, 66, 57, 53, 54, 54, 56, 59, 64, 76, 55, 92, 58, 75, 69, 59, 58, 77, 74, 63, 64, 70, 54, 63, 71, 52, 50, 80, 58, 70, 58, 59, 65, 69, 68, 65, 64, 57, 88, 48, 48, 53, 53, 92, 59, 85, 57, 56, 59, 61, 75, 68, 109, 74, 65, 67, 52, 54, 44, 78, 59, 64, 72, 82, 66, 54, 62, 52, 54, 74, 68, 69, 57, 41, 74, 87, 63, 66, 54, 54, 76, 64, 70, 55, 111, 78, 58, 70, 65, 57, 53, 68, 83, 83, 60, 63, 63, 58, 64, 65, 83, 65, 52, 55, 63, 60, 127, 77, 72, 114, 55, 99, 64, 91, 56, 68, 68, 65, 57, 47, 107, 65, 99, 91, 56, 80, 57, 78, 45, 69, 85, 66, 70, 65, 64, 76, 98, 66, 54, 85, 59, 75, 65, 66, 70, 62, 61, 63, 66, 76, 73, 57, 64, 66, 86, 51, 73, 64, 62, 80, 70, 41, 54, 55, 41, 57, 59, 60, 48, 96, 78, 66, 58, 66, 46, 100, 62, 62, 32, 88, 65, 74, 61, 58, 71, 66, 51, 57, 51, 61, 71, 103, 54, 77, 69, 55, 79, 77, 90, 53, 70, 69, 77, 74, 56, 74, 68, 50, 57, 77, 50, 70, 66, 59, 63, 63, 66, 61, 63, 56, 55, 86, 56, 80, 64, 55, 68, 66, 55, 79, 70, 50, 74, 86, 66, 63, 71, 51, 53, 67, 45, 87, 59, 102, 78, 63, 64, 69, 64, 54, 68, 50, 55, 46, 87, 64, 79, 69, 74, 60, 65, 68, 62, 53, 94, 82, 56, 93, 79, 40, 72, 60, 65, 75, 60, 45, 58, 59, 64, 80, 66, 53, 66, 63, 56, 58, 56, 57, 106, 49, 62, 60, 67, 45, 72, 63, 62, 52, 45, 100, 76, 81, 71, 68, 91, 59, 64, 57, 72, 48, 67, 73, 94, 73, 68, 59, 51, 85, 67, 83, 52, 69, 57, 42, 58, 63, 67, 57, 43, 51, 70, 77, 61, 54, 78, 66, 61, 66, 99, 59, 78, 58, 55, 60, 59, 60, 73, 92, 73, 62, 71, 64, 58, 67, 63, 68, 73, 58, 64, 73, 46, 63, 93, 61, 56, 66, 84, 55, 58, 69, 66, 58, 61, 86, 59, 72, 64, 57, 73, 63, 77, 54, 47, 48, 84, 59, 56, 62, 66, 83, 56, 55, 66, 92, 60, 61, 54, 43, 71, 68, 92, 74, 70, 75, 87, 67, 57, 74, 58, 79, 71, 79, 66, 72, 61, 52, 61, 68, 75, 58, 66, 52, 79, 78, 55, 63, 59, 91, 58, 53, 72, 51, 62, 59, 53, 73, 62, 63, 64, 63, 65, 47, 69, 83, 56, 71, 50, 68, 64, 58, 54, 70, 73, 69, 58, 62, 52, 42, 69, 52, 42, 84, 44, 48, 76, 55, 104, 55, 59, 70, 83, 67, 81, 63, 67, 71, 64, 63, 87, 73, 55, 63, 75, 57, 53, 49, 80, 61, 65, 55, 65, 110, 59, 76, 64, 63, 51, 59, 62, 68, 61, 59, 65, 57, 67, 59, 84, 64, 48, 71, 65, 68, 87, 65, 62, 62, 72, 73, 59, 59, 63, 68, 56, 72, 68, 59, 68, 73, 63, 70, 94, 62, 70, 61, 69, 54, 72, 61, 54, 62, 74, 61, 84, 92, 60, 89, 65, 72, 51, 50, 53, 61, 73, 80, 73, 109, 65, 58, 59, 69, 76, 71, 116, 64, 45, 61, 49, 68, 59, 49, 63, 59, 64, 57, 59, 68, 58, 56, 67, 62, 66, 92, 57, 70, 47, 80, 71, 67, 71, 63, 59, 57, 72, 66, 53, 57, 69, 55, 59, 67, 81, 63, 58, 64, 55, 87, 61, 57, 80, 52, 60, 54, 77, 71, 92, 63, 66, 78, 59, 54, 59, 42, 65, 85, 63, 98, 62, 70, 71, 80, 58, 57, 84, 95, 60, 60, 65, 61, 61, 54, 72, 64, 59, 62, 74, 60, 84, 63, 55, 74, 90, 59, 60, 65, 69, 65, 65, 50, 66, 59, 73, 58, 58, 60, 50, 63, 52, 61, 59, 60, 67, 52, 51, 59, 62, 54, 60, 54, 65, 53, 61, 64, 55, 68, 59, 61, 46, 65, 73, 38, 63, 68, 63, 66, 53, 60, 76, 93, 76, 61, 55, 59, 54, 73, 63, 109, 75, 64, 73, 53, 75, 88, 66, 71, 58, 65, 54, 81, 61, 79, 54, 83, 58, 50, 63, 65, 53, 66, 64, 89, 78, 55, 80, 75, 59, 65, 63, 55, 70, 56, 71, 48, 65, 66, 59, 70, 41, 82, 71, 72, 87, 51, 62, 61, 56, 61, 54, 58, 74, 87, 58, 70, 62, 66, 75, 110, 58, 60, 50, 77, 68, 75, 69, 63, 71, 56, 61, 62, 120, 76, 50, 75, 63, 54, 77, 71, 49, 56, 36, 59, 78, 64, 87, 61, 77, 86, 75, 74, 79, 93, 61, 67, 78, 43, 75, 60, 61, 76, 57, 59, 60, 54, 61, 47, 53, 69, 70, 57, 52, 75, 54, 80, 72, 50, 51, 62, 53, 62, 66, 62, 59, 64, 58, 85, 74, 52, 65, 79, 64, 67, 74, 64, 55, 46, 54, 57, 85, 51, 73, 56, 67, 45, 50, 79, 97, 87, 60, 54, 107, 93, 88, 64, 58, 72, 49, 66, 57, 74, 76, 51, 61, 64, 62, 62, 78, 73, 87, 66, 57, 85, 66, 73, 71, 66, 57, 76, 65, 48, 62, 75, 68, 56, 61, 55, 66, 50, 97, 96, 66, 53, 79, 59, 67, 59, 51, 63, 83, 80, 80, 61, 101, 72, 64, 61, 62, 54, 65, 56, 63, 64, 69, 65, 88, 80, 74, 78, 58, 53, 85, 71, 73, 47, 61, 66, 63, 52, 65, 61, 59, 82, 53, 75, 83, 46, 48, 72, 70, 62, 52, 65, 57, 74, 74, 55, 53, 59, 67, 126, 85, 65, 47, 53, 75, 68, 64, 81, 69, 55, 89, 52, 70, 58, 70, 62, 90, 70, 102, 72, 57, 48, 63, 80, 65, 79, 75, 56, 52, 64, 57, 55, 115, 55, 64, 55, 65, 56, 68, 67, 77, 84, 78, 50, 51, 63, 59, 75, 55, 63, 60, 90, 61, 59, 81, 80, 50, 70, 58, 65, 58, 63, 55, 77, 63, 53, 47, 64, 72, 56, 52, 60, 65, 54, 66, 60, 77, 53, 57, 96, 58, 63, 60, 64, 64, 58, 70, 57, 63, 62, 72, 67, 68, 69, 61, 49, 71, 65, 65, 50, 55, 62, 92, 61, 62, 81, 66, 60, 57, 88, 98, 68, 60, 46, 52, 62, 62, 92, 60, 52, 69, 66, 80, 102, 59, 50, 43, 55, 47, 71, 58, 72, 59, 74, 79, 65, 55, 86, 99, 68, 53, 58, 57, 65, 62, 52, 61, 71, 55, 68, 82, 72, 60, 69, 71, 58, 78, 63, 69, 69, 69, 73, 50, 59, 56, 62, 57, 66, 68, 52, 56, 76, 104, 84, 60, 75, 61, 73, 66, 58, 66, 88, 52, 91, 64, 55, 82, 55, 64, 47, 57, 70, 57, 77, 51, 61, 88, 55, 56, 67, 86, 77, 77, 62, 69, 57, 60, 58, 66, 49, 58, 57, 99, 64, 77, 66, 65, 57, 83, 55, 50, 67, 52, 59, 78, 83, 55, 58, 104, 60, 70, 70, 52, 50, 61, 76, 98, 70, 111, 66, 81, 79, 80, 66, 57, 86, 70, 51, 73, 44, 77, 89, 71, 63, 63, 47, 87, 120, 87, 60, 57, 60, 54, 71, 81, 92, 69, 57, 70, 56, 63, 45, 71, 84, 56, 78, 66, 74, 81, 55, 62, 74, 51, 82, 114, 53, 62, 68, 71, 66, 76, 72, 69, 59, 55, 67, 62, 47, 68, 69, 66, 68, 63, 64, 56, 56, 44, 72, 64, 79, 59, 67, 58, 67, 51, 79, 62, 77, 56, 69, 92, 58, 73, 53, 70, 40, 54, 80, 60, 71, 70, 58, 73, 90, 52, 48, 72, 82, 85, 81, 56, 42, 66, 61, 70, 55, 47, 60, 72, 61, 64, 64, 65, 72, 106, 58, 53, 75, 59, 73, 67, 71, 51, 50, 70, 113, 64, 46, 75, 69, 59, 59, 63, 52, 68, 62, 90, 124, 80, 54, 80, 61, 61, 56, 43, 55, 55, 105, 74, 69, 58, 70, 53, 53, 54, 63, 67, 70, 76, 48, 47, 60, 67, 60, 65, 53, 67, 62, 46, 62, 73, 59, 60, 77, 54, 61, 60, 64, 62, 78, 56, 63, 62, 56, 66, 55, 70, 65, 54, 56, 71, 70, 64, 64, 69, 74, 66, 68, 59, 69, 59, 64, 67, 73, 65, 67, 70, 63, 68, 68, 53, 70, 56, 57, 63, 45, 57, 55, 57, 83, 50, 61, 129, 61, 59, 69, 52, 67, 61, 81, 54, 59, 51, 63, 81, 67, 64, 79, 68, 73, 53, 61, 71, 49, 65, 69, 84, 61, 65, 76, 67, 63, 63, 67, 66, 72, 67, 74, 61, 67, 63, 69, 70, 58, 81, 57, 71, 51, 70, 53, 68, 67, 57, 56, 76, 68, 96, 60, 68, 58, 60, 69, 58, 52, 53, 95, 60, 74, 69, 62, 57, 74, 61, 52, 62, 80, 76, 85, 88, 64, 59, 77, 70, 71, 48, 72, 59, 49, 90, 69, 70, 74, 75, 67, 62, 59, 68, 95, 58, 68, 45, 47, 61, 64, 64, 63, 48, 56, 61, 76, 59, 104, 44, 61, 58, 67, 64, 88, 52, 74, 71, 58, 71, 64, 60, 79, 70, 74, 59, 45, 59, 79, 61, 76, 60, 57, 100, 55, 49, 78, 65, 58, 76, 89, 65, 58, 52, 61, 72, 70, 88, 75, 57, 80, 66, 74, 58, 58, 68, 70, 68, 117, 59, 89, 60, 56, 61, 53, 45, 62, 58, 59, 66, 81, 57, 59, 70, 64, 59, 64, 70, 64, 61, 56, 77, 60, 68, 83, 63, 51, 73, 73, 80, 71, 75, 79, 61, 88, 56, 79, 67, 67, 69, 67, 70, 70, 65, 63, 82, 78, 61, 97, 62, 68, 63, 61, 58, 56, 80, 74, 75, 59, 75, 66, 52, 73, 61, 74, 61, 51, 62, 81, 55, 68, 71, 60, 43, 62, 53, 102, 58, 55, 71, 49, 73, 71, 56, 59, 43, 51, 63, 69, 45, 58, 66, 73, 90, 69, 70, 58, 77, 58, 66, 63, 72, 63, 75, 55, 64, 65, 67, 61, 59, 63, 91, 71, 78, 61, 50, 60, 64, 72, 73, 74, 66, 61, 56, 67, 68, 69, 60, 66, 66, 61, 75, 74, 71, 81, 65, 62, 53, 61, 60, 89, 62, 52, 63, 65, 72, 54, 77, 121, 63, 66, 60, 78, 70, 81, 65, 62, 66, 53, 73, 73, 49, 90, 66, 59, 49, 83, 68, 67, 69, 58, 52, 60, 73, 52, 53, 69, 47, 66, 71, 58, 56, 53, 63, 65, 62, 41, 61, 59, 45, 66, 61, 74, 64, 58, 105, 64, 54, 66, 55, 50, 80, 73, 54, 63, 78, 88, 68, 59, 69, 77, 72, 64, 61, 70, 40, 61, 62, 62, 58, 58, 105, 68, 69, 57, 64, 59, 72, 64, 60, 60, 57, 56, 66, 53, 66, 62, 61, 89, 76, 61, 83, 46, 63, 53, 64, 53, 79, 44, 57, 67, 77, 53, 47, 57, 60, 65, 54, 59, 67, 53, 57, 57, 57, 90, 58, 73, 62, 79, 70, 60, 58, 51, 69, 59, 64, 76, 92, 55, 70, 54, 60, 58, 66, 60, 57, 63, 61, 64, 83, 88, 60, 61, 56, 66, 67, 57, 78, 94, 69, 65, 85, 88, 70, 61, 56, 73, 84, 57, 62, 54, 52, 50, 65, 74, 45, 68, 58, 60, 76, 65, 52, 77, 69, 74, 70, 68, 63, 51, 66, 57, 67, 53, 59, 70, 57, 49, 61, 73, 90, 91, 57, 76, 88, 49, 75, 58, 46, 55, 70, 73, 96, 60, 101, 69, 49, 59, 61, 64, 66, 60, 78, 67, 84, 77, 58, 61, 72, 67, 63, 67, 77, 69, 77, 100, 50, 64, 47, 62, 76, 61, 59, 66, 79, 69, 72, 61, 67, 80, 91, 53, 58, 46, 72, 68, 72, 46, 60, 62, 64, 117, 101, 64, 67, 53, 85, 71, 45, 62, 70, 49, 60, 75, 64, 66, 66, 56, 69, 56, 66, 76, 99, 65, 62, 68, 65, 64, 48, 67, 50, 64, 79, 57, 113, 56, 75, 89, 63, 68, 64, 59, 65, 60, 78, 69, 45, 65, 58, 43, 61, 55, 70, 86, 63, 60, 53, 60, 67, 46, 59, 69, 69, 81, 69, 80, 56, 73, 64, 50, 64, 60, 60, 80, 82, 57, 75, 59, 69, 53, 60, 74, 64, 65, 67, 64, 62, 66, 56, 71, 67, 58, 47, 61, 67, 68, 96, 52, 70, 65, 73, 47, 63, 61, 83, 82, 54, 111, 59, 67, 62, 73, 48, 75, 47, 96, 65, 67, 62, 41, 43, 67, 49, 63, 62, 54, 98, 61, 80, 70, 61, 63, 78, 76, 49, 90, 85, 33, 56, 51, 71, 72, 50, 81, 60, 58, 57, 66, 59, 58, 64, 68, 68, 48, 42, 48, 83, 57, 56, 45, 56, 62, 38, 62, 56, 54, 62, 63, 71, 86, 60, 51, 78, 63, 94, 79, 49, 86, 54, 105, 77, 70, 53, 69, 85, 70, 70, 68, 52, 49, 81, 76, 66, 62, 60, 56, 103, 61, 82, 51, 67, 68, 80, 71, 74, 60, 51, 54, 64, 66, 87, 79, 64, 90, 66, 62, 57, 55, 76, 59, 92, 78, 62, 52, 53, 59, 68, 64, 63, 74, 56, 58, 60, 76, 62, 77, 64, 68, 74, 71, 89, 53, 87, 83, 81, 51, 80, 74, 47, 63, 61, 52, 70, 64, 59, 60, 64, 42, 95, 96, 64, 64, 62, 78, 70, 99, 85, 83, 76, 51, 57, 67, 60, 55, 64, 83, 74, 79, 66, 89, 52, 66, 63, 63, 64, 62, 85, 71, 65, 71, 54, 70, 62, 63, 104, 50, 58, 68, 60, 57, 64, 80, 57, 87, 61, 85, 56, 58, 85, 78, 55, 58, 52, 62, 84, 58, 56, 77, 60, 80, 47, 55, 82, 72, 60, 60, 47, 62, 55, 87, 69, 65, 101, 54, 69, 75, 53, 63, 84, 55, 75, 64, 53, 66, 60, 62, 57, 55, 53, 87, 96, 61, 69, 62, 76, 80, 126, 64, 49, 70, 66, 82, 48, 71, 58, 55, 62, 89, 80, 61, 74, 60, 84, 57, 65, 49, 64, 41, 58, 76, 58, 48, 87, 61, 61, 68, 72, 53, 60, 73, 86, 48, 70, 58, 71, 62, 58, 40, 41, 64, 66, 56, 68, 52, 66, 66, 53, 58, 55, 68, 67, 67, 69, 74, 73, 94, 55, 83, 59, 63, 63, 58, 53, 69, 59, 61, 81, 49, 63, 67, 70, 55, 45, 53, 75, 63, 57, 82, 64, 66, 59, 59, 63, 41, 64, 61, 56, 68, 51, 83, 55, 73, 69, 69, 69, 70, 66, 63, 66, 89, 69, 55, 58, 58, 90, 66, 82, 67, 57, 85, 61, 69, 65, 54, 45, 74, 102, 58, 65, 75, 54, 66, 69, 67, 68, 70, 77, 46, 87, 58, 75, 69, 57, 75, 56, 83, 68, 76, 68, 94, 55, 51, 69, 82, 62, 53, 72, 76, 59, 68, 76, 88, 75, 75, 71, 63, 66, 67, 76, 52, 64, 64, 72, 67, 55, 63, 64, 68, 74, 86, 76, 71, 69, 75, 55, 81, 53, 50, 67, 54, 60, 62, 66, 57, 53, 81, 54, 72, 51, 60, 86, 65, 72, 53, 69, 107, 56, 60, 69, 117, 67, 92, 53, 82, 49, 65, 53, 60, 43, 67, 63, 52, 54, 85, 57, 60, 55, 63, 55, 63, 94, 68, 58, 56, 79, 70, 54, 73, 64, 64, 82, 66, 82, 46, 57, 90, 73, 71, 63, 53, 64, 59, 72, 37, 82, 73, 65, 56, 52, 84, 91, 74, 73, 81, 70, 67, 87, 61, 75, 68, 49, 72, 86, 99, 54, 44, 54, 68, 58, 75, 80, 96, 58, 71, 77, 75, 62, 55, 70, 66, 51, 66, 65, 50, 69, 61, 62, 54, 50, 97, 81, 58, 79, 81, 81, 66, 59, 81, 45, 69, 75, 68, 74, 67, 59, 49, 64, 65, 48, 58, 50, 67, 58, 73, 59, 62, 75, 83, 74, 54, 103, 86, 102, 65, 51, 45, 54, 85, 61, 85, 71, 55, 69, 88, 64, 73, 60, 57, 62, 82, 54, 81, 70, 65, 66, 72, 51, 96, 33, 57, 49, 78, 77, 55, 48, 56, 50, 82, 56, 66, 69, 66, 58, 61, 47, 57, 74, 68, 61, 46, 55, 69, 83, 49, 53, 68, 58, 68, 53, 66, 63, 51, 60, 56, 85, 70, 64, 52, 67, 64, 78, 64, 56, 63, 77, 86, 54, 34, 41, 80, 48, 51, 60, 70, 50, 69, 60, 49, 50, 72, 52, 56, 68, 82, 46, 52, 67, 73, 96, 69, 67, 57, 76, 55, 88, 93, 50, 81, 59, 39, 65, 63, 67, 58, 51, 55, 64, 53, 59, 51, 60, 51, 65, 69, 69, 89, 41, 70, 63, 59, 61, 63, 65, 53, 52, 49, 58, 88, 68, 65, 74, 89, 62, 60, 65, 77, 60, 69, 76, 80, 59, 69, 47, 71, 74, 80, 50, 50, 75, 62, 67, 57, 58, 69, 75, 61, 90, 76, 46, 67, 64, 61, 98, 59, 84, 73, 91, 59, 59, 63, 71, 74, 68, 71, 65, 74, 56, 65, 60, 74, 81, 55, 124, 63, 70, 56, 82, 63, 55, 66, 57, 85, 67, 61, 61, 60, 50, 46, 53, 84, 73, 56, 67, 51, 58, 79, 68, 55, 76, 62, 53, 105, 107, 56, 64, 70, 53, 67, 53, 59, 68, 53, 60, 52, 97, 71, 77, 57, 66, 65, 73, 60, 70, 68, 66, 58, 71, 60, 76, 54, 84, 64, 65, 70, 57, 67, 69, 62, 59, 65, 68, 64, 62, 67, 80, 69, 65, 71, 64, 71, 78, 62, 70, 81, 52, 65, 68, 57, 63, 63, 63, 57, 64, 62, 65, 45, 66, 45, 84, 56, 57, 60, 63, 52, 77, 55, 94, 65, 138, 58, 86, 69, 58, 61, 60, 56, 50, 70, 59, 53, 121, 50, 61, 60, 71, 60, 55, 79, 74, 50, 52, 64, 66, 61, 62, 60, 81, 53, 66, 56, 51, 69, 65, 69, 86, 68, 51, 66, 68, 73, 78, 68, 64, 61, 67, 98, 50, 87, 59, 70, 97, 67, 79, 64, 68, 59, 73, 52, 73, 65, 59, 63, 55, 57, 112, 68, 73, 63, 65, 52, 57, 71, 73, 65, 67, 62, 51, 66, 97, 63, 51, 97, 67, 80, 43, 77, 67, 61, 62, 49, 48, 63, 60, 63, 62, 89, 57, 57, 60, 54, 66, 52, 70, 60, 58, 56, 62, 57, 60, 57, 63, 59, 70, 70, 70, 61, 74, 74, 85, 59, 70, 75, 60, 82, 69, 60, 67, 58, 59, 57, 74, 54, 55, 65, 71, 67, 56, 70, 56, 68, 62, 71, 55, 76, 52, 67, 63, 72, 67, 51, 65, 103, 53, 62, 63, 88, 105, 62, 64, 85, 53, 66, 76, 56, 84, 67, 55, 64, 66, 52, 59, 64, 60, 64, 62, 57, 51, 62, 64, 81, 66, 74, 47, 64, 59, 64, 86, 114, 68, 57, 68, 59, 61, 64, 56, 71, 57, 62, 61, 67, 65, 55, 65, 61, 97, 64, 63, 59, 68, 61, 49, 63, 62, 46, 53, 70, 67, 62, 110, 64, 66, 50, 53, 73, 61, 71, 62, 72, 67, 62, 58, 73, 69, 62, 103, 64, 57, 58, 66, 67, 90, 71, 93, 56, 61, 64, 78, 81, 95, 70, 62, 62, 52, 57, 77, 89, 49, 58, 100, 54, 61, 70, 54, 55, 58, 77, 58, 70, 59, 61, 61, 70, 55, 77, 80, 69, 49, 66, 64, 48, 53, 71, 82, 64, 110, 60, 55, 59, 44, 68, 65, 74, 70, 60, 50, 58, 67, 60, 68, 66, 89, 63, 72, 121, 69, 67, 65, 74, 50, 92, 65, 81, 54, 74, 61, 79, 57, 70, 83, 80, 75, 58, 85, 62, 104, 58, 55, 70, 66, 76, 71, 57, 67, 73, 56, 70, 50, 76, 87, 51, 61, 65, 65, 69, 63, 56, 69, 67, 65, 59, 57, 52, 72, 47, 57, 88, 69, 60, 69, 65, 66, 56, 57, 55, 53, 74, 87, 57, 75, 51, 49, 56, 58, 48, 63, 64, 63, 62, 63, 83, 65, 61, 50, 70, 86, 66, 55, 83, 69, 56, 67, 50, 63, 59, 52, 66, 64, 72, 57, 80, 122, 68, 53, 56, 75, 77, 64, 65, 64, 67, 70, 95, 57, 56, 71, 51, 107, 56, 57, 62, 60, 74, 72, 67, 76, 59, 83, 73, 72, 58, 84, 73, 59, 83, 72, 49, 66, 77, 74, 79, 63, 60, 70, 58, 69, 50, 53, 59, 73, 66, 60, 56, 72, 54, 69, 58, 76, 61, 54, 68, 55, 59, 53, 63, 57, 60, 67, 69, 66, 54, 73, 66, 109, 73, 80, 60, 69, 71, 53, 76, 62, 53, 61, 53, 51, 70, 61, 69, 60, 57, 48, 83, 90, 70, 60, 69, 50, 58, 57, 79, 61, 77, 60, 75, 58, 60, 56, 70, 100, 67, 56, 61, 44, 69, 55, 48, 73, 69, 60, 44, 74, 55, 70, 80, 62, 57, 58, 64, 73, 106, 74, 90, 58, 59, 63, 63, 69, 64, 56, 48, 69, 64, 72, 69, 78, 77, 61, 53, 65, 63, 69, 63, 65, 59, 66, 76, 64, 59, 59, 68, 54, 64, 75, 72, 62, 52, 64, 55, 59, 67, 65, 53, 65, 49, 83, 79, 63, 78, 106, 62, 63, 67, 61, 76, 66, 62, 57, 51, 66, 66, 64, 52, 66, 84, 59, 64, 57, 58, 91, 97, 49, 57, 65, 109, 84, 73, 48, 55, 78, 65, 53, 60, 61, 69, 80, 60, 63, 60, 69, 64, 55, 62, 58, 82, 67, 57, 56, 66, 81, 68, 69, 54, 56, 73, 62, 55, 62, 63, 58, 64, 42, 58, 101, 60, 67, 64, 70, 60, 61, 60, 70, 55, 63, 94, 70, 71, 65, 39, 68, 70, 49, 49, 64, 85, 58, 51, 86, 92, 69, 67, 70, 57, 53, 72, 51, 57, 67, 58, 69, 92, 52, 68, 46, 60, 54, 57, 62, 70, 53, 86, 68, 64, 61, 112, 66, 84, 74, 69, 80, 75, 71, 60, 80, 81, 49, 79, 84, 64, 65, 80, 68, 57, 66, 65, 62, 58, 78, 85, 56, 77, 49, 63, 68, 60, 66, 68, 80, 48, 54, 74, 44, 72, 65, 76, 66, 96, 63, 89, 69, 70, 63, 67, 67, 76, 55, 75, 66, 54, 45, 72, 64, 57, 67, 61, 62, 59, 94, 63, 61, 63, 69, 65, 71, 63, 54, 73, 68, 56, 72, 76, 47, 69, 64, 78, 69, 59, 74, 73, 72, 67, 67, 58, 61, 51, 75, 52, 73, 71, 59, 70, 60, 65, 54, 59, 60, 62, 70, 58, 66, 66, 97, 55, 56, 61, 63, 67, 54, 70, 69, 89, 55, 55, 62, 64, 59, 64, 61, 110, 53, 71, 90, 63, 73, 94, 66, 65, 66, 59, 72, 88, 64, 70, 51, 58, 73, 64, 69, 56, 56, 58, 52, 59, 77, 49, 65, 54, 72, 46, 69, 54, 55, 55, 63, 66, 63, 72, 70, 69, 56, 75, 57, 56, 75, 70, 89, 67, 58, 55, 69, 64, 57, 57, 81, 71, 99, 64, 78, 77, 60, 63, 88, 79, 54, 68, 70, 56, 55, 65, 53, 64, 69, 56, 67, 58, 55, 85, 85, 75, 61, 57, 85, 57, 57, 68, 90, 59, 54, 77, 70, 58, 76, 76, 63, 66, 69, 112, 97, 62, 62, 56, 73, 61, 62, 88, 64, 43, 61, 74, 63, 53, 62, 60, 54, 81, 67, 53, 56, 58, 69, 75, 57, 71, 75, 66, 57, 79, 98, 65, 70, 52, 58, 61, 59, 59, 49, 72, 67, 61, 51, 69, 75, 59, 50, 65, 118, 46, 77, 74, 54, 70, 70, 64, 62, 67, 58, 72, 77, 52, 69, 67, 65, 71, 62, 75, 56, 79, 67, 73, 91, 75, 56, 69, 68, 137, 75, 59, 73, 74, 52, 80, 72, 75, 63, 72, 55, 58, 59, 53, 83, 82, 76, 47, 61, 69, 61, 65, 56, 52, 60, 58, 90, 47, 78, 58, 53, 38, 66, 56, 57, 75, 55, 60, 76, 74, 64, 76, 88, 62, 76, 59, 79, 59, 79, 75, 60, 63, 97, 53, 65, 50, 82, 86, 64, 56, 50, 57, 54, 68, 90, 63, 56, 61, 54, 133, 66, 62, 59, 58, 71, 95, 49, 90, 60, 73, 82, 63, 74, 43, 62, 98, 94, 55, 55, 56, 69, 78, 76, 72, 95, 59, 67, 61, 65, 52, 51, 51, 52, 76, 60, 64, 59, 69, 90, 63, 91, 99, 81, 62, 67, 72, 65, 57, 61, 55, 47, 58, 71, 62, 58, 70, 40, 57, 62, 81, 63, 60, 56, 51, 60, 55, 57, 86, 66, 71, 60, 56, 69, 76, 73, 45, 49, 67, 73, 54, 49, 55, 70, 65, 82, 80, 53, 58, 69, 70, 67, 53, 61, 113, 77, 56, 50, 76, 66, 63, 58, 52, 55, 52, 48, 83, 57, 65, 62, 66, 52, 65, 52, 77, 66, 63, 51, 59, 67, 85, 62, 70, 75, 71, 80, 58, 73, 71, 58, 68, 60, 69, 65, 70, 54, 63, 60, 107, 58, 65, 63, 100, 54, 63, 42, 62, 67, 89, 49, 67, 62, 46, 72, 73, 64, 52, 99, 70, 49, 65, 62, 61, 63, 56, 99, 70, 71, 40, 53, 73, 54, 71, 69, 49, 57, 69, 59, 71, 106, 64, 48, 68, 55, 71, 61, 58, 63, 58, 55, 80, 75, 81, 54, 66, 99, 60, 51, 60, 71, 48, 55, 67, 74, 63, 66, 62, 63, 80, 75, 71, 55, 65, 48, 80, 68, 90, 56, 82, 88, 49, 58, 44, 67, 61, 62, 54, 72, 59, 84, 72, 76, 54, 72, 75, 90, 86, 47, 63, 68, 40, 47, 63, 61, 87, 79, 59, 59, 66, 64, 72, 77, 75, 61, 62, 50, 54, 78, 52, 67, 50, 60, 59, 81, 139, 80, 63, 56, 60, 66, 75, 69, 73, 74, 67, 57, 58, 56, 66, 56, 81, 64, 70, 63, 62, 89, 51, 67, 76, 64, 74, 50, 56, 87, 60, 59, 80, 66, 60, 64, 62, 59, 61, 53, 51, 66, 62, 58, 54, 69, 52, 89, 64, 70, 61, 68, 78, 63, 59, 56, 64, 65, 53, 56, 66, 52, 76, 79, 56, 51, 59, 66, 75, 50, 51, 66, 58, 72, 80, 57, 86, 41, 85, 74, 69, 73, 59, 48, 93, 75, 50, 58, 80, 54, 54, 60, 75, 92, 61, 52, 68, 115, 48, 51, 97, 69, 61, 75, 57, 68, 57, 78, 79, 70, 65, 73, 70, 66, 62, 76, 75, 52, 50, 60, 78, 75, 74, 75, 64, 48, 45, 68, 51, 82, 92, 55, 63, 48, 76, 48, 67, 54, 52, 64, 53, 65, 70, 49, 57, 56, 63, 67, 55, 63, 86, 61, 61, 51, 49, 66, 70, 74, 61, 85, 65, 74, 82, 79, 58, 65, 56, 71, 55, 78, 59, 59, 61, 60, 71, 77, 61, 79, 78, 72, 66, 75, 63, 56, 76, 60, 52, 34, 61, 66, 69, 70, 74, 56, 65, 61, 65, 53, 55, 67, 62, 44, 73, 53, 56, 63, 78, 69, 51, 63, 61, 67, 44, 58, 62, 67, 70, 61, 66, 77, 79, 63, 71, 82, 59, 61, 78, 65, 79, 60, 59, 62, 60, 52, 61, 59, 69, 49, 87, 69, 57, 96, 62, 57, 58, 64, 65, 111, 55, 61, 63, 84, 74, 64, 60, 52, 73, 67, 53, 59, 58, 76, 45, 61, 63, 56, 71, 62, 61, 45, 65, 71, 57, 101, 68, 68, 81, 51, 66, 43, 77, 60, 60, 52, 56, 58, 52, 82, 56, 65, 66, 62, 78, 65, 64, 69, 92, 56, 87, 69, 89, 64, 82, 52, 79, 66, 70, 58, 57, 60, 53, 63, 57, 53, 65, 68, 62, 59, 64, 48, 59, 78, 69, 67, 72, 55, 44, 66, 64, 64, 66, 43, 56, 66, 86, 55, 77, 69, 63, 90, 72, 51, 78, 91, 56, 69, 59, 53, 62, 91, 65, 57, 65, 41, 63, 56, 54, 60, 60, 57, 89, 78, 59, 57, 61, 73, 65, 53, 50, 66, 72, 60, 57, 59, 70, 52, 67, 77, 45, 51, 87, 65, 58, 59, 64, 65, 85, 55, 62, 57, 65, 77, 63, 72, 50, 65, 69, 53, 76, 90, 69, 62, 56, 81, 63, 67, 74, 55, 59, 58, 78, 54, 58, 64, 59, 63, 64, 66, 90, 53, 62, 92, 68, 69, 57, 72, 58, 65, 67, 64, 68, 64, 72, 63, 72, 62, 68, 50, 42, 58, 84, 72, 103, 55, 52, 70, 50, 54, 69, 63, 60, 56, 65, 93, 59, 60, 68, 66, 67, 77, 62, 57, 72, 71, 80, 102, 46, 64, 59, 91, 59, 54, 64, 91, 57, 56, 61, 58, 65, 48, 88, 51, 48, 79, 66, 60, 46, 61, 66, 50, 64, 81, 62, 61, 67, 101, 113, 58, 45, 53, 76, 62, 59, 57, 52, 50, 86, 77, 77, 72, 60, 69, 45, 57, 60, 55, 74, 68, 65, 64, 59, 75, 67, 39, 56, 62, 73, 54, 53, 61, 47, 69, 55, 61, 68, 62, 49, 52, 66, 50, 75, 71, 56, 46, 65, 60, 64, 57, 66, 61, 58, 58, 77, 76, 72, 84, 62, 98, 53, 82, 98, 50, 85, 69, 55, 63, 65, 76, 86, 72, 62, 42, 65, 77, 55, 61, 72, 70, 87, 71, 61, 69, 50, 52, 60, 46, 93, 66, 83, 62, 50, 65, 72, 68, 58, 59, 56, 64, 59, 60, 57, 66, 49, 45, 59, 62, 54, 70, 79, 63, 63, 40, 63, 54, 68, 53, 62, 62, 84, 75, 83, 64, 48, 65, 73, 82, 46, 50, 73, 85, 87, 69, 55, 60, 69, 61, 94, 54, 74, 76, 82, 60, 87, 51, 51, 60, 77, 66, 59, 75, 60, 100, 91, 89, 82, 41, 83, 65, 53, 60, 62, 68, 64, 48, 66, 40, 77, 74, 82, 74, 67, 73, 50, 58, 52, 56, 44, 73, 91, 54, 56, 63, 94, 70, 67, 81, 57, 48, 50, 56, 63, 58, 58, 58, 51, 62, 78, 88, 53, 63, 42, 66, 70, 57, 61, 92, 46, 65, 85, 90, 82, 48, 71, 66, 63, 81, 78, 78, 73, 70, 61, 58, 90, 67, 87, 53, 57, 59, 59, 97, 72, 58, 54, 102, 86, 61, 108, 58, 80, 56, 64, 56, 60, 108, 64, 51, 63, 82, 67, 60, 52, 68, 59, 65, 55, 77, 52, 66, 70, 66, 51, 90, 50, 64, 68, 59, 127, 68, 81, 47, 60, 49, 52, 43, 61, 71, 49, 70, 67, 69, 71, 68, 72, 68, 54, 61, 61, 85, 42, 49, 57, 61, 71, 62, 84, 50, 57, 52, 53, 130, 51, 80, 97, 55, 59, 61, 94, 73, 28, 57, 84, 70, 68, 64, 60, 65, 73, 87, 51, 62, 57, 57, 60, 112, 73, 59, 59, 56, 66, 77, 78, 48, 67, 46, 63, 73, 57, 51, 75, 72, 74, 58, 77, 69, 57, 57, 76, 54, 66, 68, 60, 48, 50, 55, 67, 74, 54, 49, 72, 52, 68, 65, 77, 70, 74, 61, 73, 52, 108, 64, 68, 55, 75, 50, 82, 69, 46, 80, 88, 47, 50, 82, 80, 64, 80, 88, 74, 46, 56, 80, 72, 59, 61, 66, 68, 60, 65, 78, 52, 58, 75, 66, 67, 65, 54, 56, 48, 75, 66, 54, 61, 76, 41, 81, 58, 64, 82, 95, 69, 59, 62, 50, 61, 52, 70, 64, 77, 69, 41, 73, 51, 50, 55, 53, 83, 71, 63, 77, 40, 61, 77, 84, 55, 86, 121, 47, 57, 68, 74, 63, 92, 57, 70, 74, 52, 42, 53, 55, 78, 62, 80, 77, 74, 91, 81, 85, 66, 49, 44, 63, 66, 73, 57, 67, 74, 46, 67, 56, 69, 85, 57, 50, 51, 66, 63, 58, 59, 73, 58, 70, 77, 54, 65, 67, 55, 54, 57, 75, 68, 65, 59, 55, 79, 78, 62, 73, 58, 98, 94, 33, 78, 47, 64, 87, 43, 61, 59, 49, 63, 46, 62, 78, 58, 70, 106, 74, 83, 73, 59, 84, 61, 54, 78, 96, 60, 58, 64, 61, 52, 53, 56, 53, 68, 88, 64, 44, 67, 60, 51, 62, 48, 72, 67, 55, 75, 52, 54, 50, 68, 93, 52, 58, 45, 53, 80, 68, 64, 76, 63, 59, 70, 59, 46, 77, 67, 62, 77, 53, 69, 63, 74, 58, 65, 62, 83, 58, 73, 84, 71, 63, 57, 89, 63, 61, 58, 55, 51, 67, 90, 45, 59, 90, 91, 68, 63, 70, 54, 77, 68, 74, 43, 69, 62, 71, 57, 75, 78, 64, 57, 48, 73, 118, 55, 57, 71, 52, 52, 51, 82, 38, 66, 59, 67, 86, 56, 60, 65, 53, 55, 66, 64, 50, 71, 65, 62, 82, 60, 95, 61, 57, 73, 65, 79, 65, 62, 69, 72, 65, 110, 45, 71, 55, 55, 79, 60, 77, 109, 61, 61, 43, 48, 60, 44, 71, 109, 66, 77, 59, 61, 41, 62, 63, 70, 64, 49, 63, 58, 78, 60, 85, 73, 88, 50, 85, 58, 67, 63, 54, 61, 67, 69, 83, 69, 62, 84, 55, 72, 45, 46, 59, 61, 46, 69, 62, 73, 47, 87, 63, 51, 57, 78, 66, 93, 55, 49, 73, 58, 43, 59, 98, 30, 66, 63, 97, 74, 55, 58, 44, 61, 72, 81, 104, 80, 72, 60, 63, 71, 49, 42, 51, 77, 30, 55, 58, 86, 76, 85, 55, 66, 53, 58, 78, 67, 58, 54, 75, 56, 74, 91, 66, 70, 90, 60, 65, 67, 38, 53, 102, 77, 76, 63, 64, 50, 79, 70, 78, 60, 53, 84, 59, 48, 65, 40, 55, 67, 69, 59, 36, 70, 83, 78, 59, 53, 33, 75, 64, 53, 63, 59, 59, 92, 68, 73, 68, 51, 69, 57, 51, 55, 62, 44, 86, 59, 54, 81, 60, 64, 62, 68, 61, 70, 66, 77, 79, 63, 70, 60, 96, 91, 78, 76, 58, 57, 62, 61, 69, 54, 122, 47, 68, 54, 43, 79, 60, 64, 76, 44, 67, 66, 59, 76, 50, 61, 81, 65, 63, 73, 42, 65, 49, 48, 59, 64, 57, 70, 72, 71, 53, 83, 51, 52, 75, 72, 63, 94, 88, 40, 54, 40, 65, 42, 57, 60, 56, 66, 93, 76, 54, 61, 58, 68, 85, 72, 64, 75, 82, 48, 66, 70, 84, 57, 63, 92, 61, 52, 87, 68, 72, 79, 77, 55, 77, 83, 60, 69, 55, 56, 50, 73, 47, 57, 55, 73, 80, 48, 82, 62, 64, 64, 58, 87, 51, 55, 64, 57, 67, 55, 110, 58, 80, 89, 54, 46, 97, 57, 63, 66, 47, 83, 62, 58, 57, 58, 45, 62, 78, 87, 49, 71, 52, 46, 46, 65, 62, 48, 76, 60, 53, 59, 68, 48, 107, 72, 56, 62, 76, 78, 65, 66, 71, 67, 65, 62, 47, 92, 49, 56, 93, 64, 61, 54, 85, 67, 72, 60, 54, 52, 50, 54, 74, 58, 62, 63, 67, 60, 80, 76, 50, 84, 66, 56, 83, 80, 77, 69, 78, 90, 60, 48, 60, 50, 73, 80, 61, 53, 49, 101, 66, 88, 68, 68, 95, 51, 47, 56, 56, 78, 54, 51, 54, 65, 56, 50, 76, 108, 58, 70, 61, 79, 50, 87, 79, 54, 62, 69, 66, 55, 89, 48, 66, 78, 67, 67, 52, 76, 69, 62, 80, 69, 68, 78, 47, 66, 63, 82, 67, 40, 49, 71, 53, 53, 55, 78, 68, 55, 82, 51, 53, 64, 63, 58, 66, 55, 58, 79, 63, 71, 66, 60, 60, 69, 57, 92, 45, 53, 59, 73, 69, 55, 63, 65, 49, 50, 63, 62, 70, 66, 66, 56, 57, 45, 81, 58, 47, 89, 53, 65, 47, 65, 64, 42, 73, 50, 70, 72, 63, 61, 68, 59, 57, 57, 69, 54, 66, 68, 84, 71, 53, 86, 74, 50, 59, 56, 66, 80, 70, 114, 112, 70, 51, 58, 56, 60, 55, 87, 90, 98, 64, 78, 62, 90, 66, 56, 63, 64, 76, 65, 49, 48, 66, 102, 83, 55, 55, 61, 75, 77, 62, 66, 66, 53, 74, 86, 64, 53, 38, 83, 68, 88, 81, 38, 70, 59, 71, 81, 72, 63, 64, 72, 61, 60, 56, 63, 63, 70, 83, 59, 64, 57, 92, 75, 46, 44, 78, 63, 49, 99, 84, 60, 78, 122, 83, 77, 95, 73, 50, 57, 66, 48, 55, 79, 66, 81, 79, 84, 52, 56, 118, 92, 71, 60, 70, 76, 45, 78, 68, 59, 94, 57, 59, 68, 89, 55, 49, 62, 69, 64, 69, 77, 108, 57, 72, 63, 93, 70, 87, 54, 50, 63, 78, 112, 66, 83, 68, 47, 41, 61, 60, 63, 75, 105, 57, 46, 67, 85, 56, 89, 75, 66, 64, 52, 80, 57, 67, 71, 56, 51, 48, 70, 57, 70, 69, 52, 129, 59, 66, 80, 65, 60, 56, 81, 48, 76, 69, 68, 56, 50, 48, 53, 72, 57, 76, 63, 66, 73, 56, 57, 88, 72, 67, 63, 61, 89, 58, 54, 62, 64, 54, 54, 48, 60, 55, 102, 78, 69, 67, 56, 53, 51, 61, 78, 69, 71, 58, 51, 52, 55, 70, 95, 54, 52, 62, 58, 69, 65, 61, 68, 59, 51, 63, 91, 71, 99, 61, 70, 51, 61, 64, 62, 64, 74, 56, 87, 51, 79, 58, 55, 74, 58, 91, 59, 47, 53, 69, 69, 61, 67, 73, 74, 85, 65, 72, 65, 68, 92, 86, 63, 77, 71, 53, 73, 80, 60, 68, 63, 67, 64, 54, 79, 59, 47, 107, 57, 59, 59, 81, 66, 50, 68, 57, 68, 48, 49, 67, 78, 72, 67, 65, 86, 66, 63, 66, 83, 76, 47, 60, 65, 46, 108, 66, 48, 71, 65, 68, 75, 57, 83, 49, 54, 66, 67, 60, 62, 55, 97, 62, 56, 62, 56, 74, 83, 63, 79, 33, 60, 83, 69, 65, 116, 71, 66, 59, 58, 81, 61, 67, 68, 73, 61, 52, 75, 65, 66, 66, 62, 65, 62, 87, 39, 59, 63, 70, 60, 71, 61, 56, 67, 71, 77, 40, 62, 91, 83, 72, 64, 58, 52, 57, 60, 52, 53, 63, 63, 44, 61, 72, 68, 78, 90, 69, 73, 73, 47, 49, 44, 61, 64, 97, 48, 81, 53, 55, 61, 78, 72, 54, 52, 63, 81, 64, 65, 62, 72, 64, 64, 66, 65, 77, 57, 52, 58, 35, 62, 87, 57, 65, 56, 64, 40, 76, 72, 76, 69, 87, 55, 57, 54, 44, 60, 57, 67, 69, 66, 61, 54, 69, 74, 48, 116, 59, 77, 92, 59, 49, 65, 65, 68, 66, 55, 57, 59, 67, 86, 54, 67, 78, 69, 57, 53, 62, 50, 72, 64, 69, 46, 52, 60, 52, 61, 85, 56, 73, 54, 60, 98, 120, 50, 55, 67, 58, 59, 55, 58, 60, 60, 55, 56, 86, 52, 79, 73, 59, 64, 48, 53, 57, 52, 51, 53, 66, 57, 48, 61, 70, 71, 44, 91, 92, 51, 55, 57, 53, 70, 54, 48, 72, 58, 59, 87, 72, 118, 70, 64, 56, 91, 58, 66, 56, 119, 62, 65, 42, 40, 85, 66, 54, 57, 78, 53, 67, 56, 85, 68, 69, 55, 86, 61, 59, 68, 73, 57, 63, 82, 86, 63, 68, 70, 73, 71, 67, 73, 63, 88, 59, 67, 62, 50, 74, 59, 46, 107, 68, 61, 59, 61, 46, 57, 66, 67, 63, 74, 49, 62, 63, 47, 60, 61, 95, 65, 53, 58, 71, 77, 49, 70, 97, 85, 74, 67, 55, 64, 64, 96, 69, 74, 32, 68, 41, 82, 61, 63, 50, 63, 58, 70, 73, 71, 49, 59, 67, 60, 56, 72, 76, 71, 59, 60, 60, 46, 48, 73, 100, 68, 73, 60, 56, 61, 55, 63, 58, 63, 71, 58, 53, 64, 62, 65, 49, 75, 71, 55, 63, 68, 68, 83, 61, 62, 46, 46, 64, 72, 76, 49, 50, 56, 81, 55, 60, 44, 70, 55, 88, 59, 81, 52, 52, 42, 72, 70, 55, 79, 75, 113, 57, 58, 73, 77, 50, 65, 77, 56, 102, 65, 72, 86, 73, 74, 54, 62, 67, 60, 93, 68, 41, 42, 57, 74, 77, 114, 77, 64, 73, 62, 62, 59, 63, 62, 90, 59, 60, 59, 43, 50, 37, 51, 55, 55, 109, 66, 55, 68, 55, 54, 87, 48, 85, 72, 55, 79, 41, 49, 59, 57, 82, 79, 44, 68, 77, 53, 85, 82, 59, 51, 51, 66, 67, 70, 59, 62, 73, 57, 66, 59, 75, 58, 88, 54, 56, 46, 54, 57, 65, 69, 76, 63, 52, 57, 67, 51, 57, 58, 68, 64, 61, 64, 63, 64, 59, 73, 76, 59, 61, 59, 56, 56, 68, 70, 77, 49, 81, 66, 90, 67, 63, 65, 52, 74, 47, 83, 49, 38, 56, 83, 53, 56, 61, 65, 52, 44, 105, 112, 77, 44, 41, 77, 58, 52, 80, 64, 63, 52, 69, 58, 51, 61, 82, 69, 46, 53, 75, 92, 66, 53, 63, 49, 69, 51, 42, 72, 53, 62, 57, 69, 63, 75, 70, 51, 82, 56, 67, 59, 74, 60, 71, 59, 58, 103, 76, 63, 50, 55, 64, 71, 80, 60, 80, 63, 81, 77, 51, 62, 60, 74, 67, 61, 86, 57, 78, 65, 43, 75, 52, 65, 56, 66, 98, 64, 80, 34, 65, 66, 58, 58, 54, 66, 56, 62, 71, 51, 65, 63, 69, 70, 62, 63, 82, 61, 102, 76, 64, 104, 78, 47, 62, 132, 60, 48, 45, 70, 65, 61, 52, 60, 104, 64, 76, 83, 88, 64, 59, 99, 67, 63, 49, 51, 68, 67, 65, 61, 94, 67, 72, 68, 71, 49, 55, 61, 55, 60, 63, 54, 46, 74, 66, 69, 72, 60, 98, 71, 75, 70, 86, 64, 62, 72, 60, 61, 52, 56, 65, 99, 62, 49, 52, 54, 89, 76, 58, 42, 63, 58, 62, 66, 83, 70, 71, 104, 79, 55, 53, 42, 96, 64, 78, 65, 50, 64, 60, 71, 86, 68, 62, 90, 69, 55, 68, 68, 71, 46, 45, 83, 53, 62, 60, 59, 56, 63, 60, 74, 52, 102, 76, 98, 56, 61, 46, 99, 84, 53, 51, 84, 51, 64, 137, 67, 61, 51, 61, 64, 46, 60, 91, 73, 70, 56, 63, 112, 63, 59, 56, 52, 61, 65, 56, 78, 71, 83, 57, 73, 64, 53, 77, 61, 77, 65, 65, 51, 43, 48, 67, 62, 51, 61, 71, 50, 56, 80, 68, 76, 50, 93, 77, 53, 59, 64, 91, 57, 91, 63, 61, 79, 60, 69, 45, 76, 77, 49, 63, 91, 37, 65, 83, 48, 73, 69, 63, 54, 79, 70, 50, 51, 71, 64, 77, 56, 75, 68, 51, 65, 78, 96, 51, 91, 63, 57, 52, 50, 86, 78, 97, 61, 57, 84, 66, 111, 81, 64, 78, 77, 56, 58, 53, 59, 65, 68, 47, 56, 57, 70, 46, 50, 52, 65, 44, 55, 71, 98, 70, 63, 40, 45, 57, 65, 54, 50, 62, 85, 148, 54, 49, 57, 60, 48, 73, 57, 56, 53, 50, 78, 66, 72, 57, 74, 44, 52, 58, 78, 80, 81, 60, 73, 90, 62, 66, 40, 66, 68, 63, 102, 65, 65, 65, 58, 73, 63, 67, 81, 61, 66, 54, 73, 60, 46, 66, 68, 68, 62, 70, 68, 87, 55, 74, 66, 57, 65, 54, 50, 54, 62, 57, 40, 67, 69, 87, 61, 75, 49, 55, 61, 57, 71, 46, 54, 66, 61, 52, 85, 61, 61, 88, 52, 61, 58, 49, 85, 74, 64, 76, 75, 86, 56, 89, 52, 78, 50, 74, 83, 59, 74, 58, 69, 56, 47, 81, 66, 51, 68, 45, 45, 64, 66, 76, 72, 74, 55, 83, 63, 51, 69, 84, 55, 62, 62, 54, 82, 51, 67, 46, 47, 57, 67, 59, 79, 87, 52, 58, 77, 59, 51, 56, 67, 52, 95, 53, 69, 60, 96, 100, 93, 93, 56, 57, 66, 87, 93, 68, 52, 50, 123, 50, 58, 65, 58, 85, 65, 72, 68, 64, 52, 56, 71, 50, 75, 67, 68, 65, 63, 76, 57, 52, 85, 103, 50, 65, 72, 63, 71, 83, 66, 59, 47, 61, 73, 61, 58, 72, 73, 76, 65, 51, 96, 82, 42, 68, 57, 68, 77, 92, 46, 45, 68, 69, 46, 88, 48, 66, 60, 48, 43, 65, 88, 59, 79, 40, 78, 50, 67, 66, 71, 36, 57, 100, 71, 70, 48, 56, 52, 56, 64, 63, 75, 51, 53, 58, 80, 61, 48, 77, 60, 51, 89, 66, 80, 48, 70, 115, 55, 82, 55, 57, 69, 44, 57, 72, 44, 57, 64, 54, 58, 63, 54, 68, 87, 54, 86, 83, 53, 81, 61, 59, 67, 63, 92, 46, 86, 96, 65, 64, 49, 71, 67, 56, 95, 63, 88, 60, 78, 70, 38, 72, 59, 66, 59, 50, 72, 52, 79, 70, 54, 56, 40, 65, 66, 87, 55, 60, 64, 32, 85, 140, 73, 51, 92, 60, 55, 63, 78, 59, 73, 54, 78, 86, 84, 57, 56, 68, 48, 63, 87, 51, 57, 88, 65, 60, 68, 42, 80, 53, 80, 58, 87, 87, 58, 60, 89, 59, 72, 58, 57, 77, 57, 58, 50, 51, 95, 65, 69, 53, 111, 89, 76, 61, 106, 43, 82, 60, 66, 64, 62, 50, 65, 68, 50, 89, 71, 72, 87, 72, 52, 78, 73, 83, 54, 86, 80, 57, 74, 45, 59, 55, 74, 70, 75, 63, 102, 58, 53, 58, 54, 49, 81, 59, 51, 65, 99, 75, 68, 59, 49, 47, 120, 70, 75, 71, 77, 46, 61, 90, 82, 68, 60, 98, 59, 69, 83, 65, 52, 49, 89, 76, 111, 57, 40, 57, 50, 68, 50, 61, 52, 67, 58, 77, 71, 59, 54, 47, 73, 74, 77, 59, 82, 54, 65, 48, 45, 53, 48, 65, 85, 95, 62, 48, 69, 51, 55, 77, 70, 96, 41, 56, 66, 50, 56, 55, 80, 59, 85, 52, 49, 41, 77, 75, 69, 66, 70, 96, 72, 55, 74, 45, 69, 65, 93, 85, 73, 80, 83, 78, 65, 64, 66, 50, 50, 35, 79, 68, 65, 55, 63, 75, 55, 67, 80, 42, 61, 61, 75, 92, 62, 96, 55, 65, 43, 61, 43, 126, 63, 52, 79, 85, 44, 65, 60, 75, 65, 87, 46, 68, 85, 56, 66, 65, 107, 71, 86, 50, 73, 66, 81, 68, 61, 47, 47, 63, 73, 43, 73, 51, 47, 77, 53, 80, 56, 55, 72, 79, 108, 65, 69, 59, 77, 49, 57, 68, 82, 56, 27, 81, 97, 60, 73, 89, 93, 66, 112, 59, 68, 66, 65, 94, 45, 50, 61, 78, 62, 67, 48, 67, 61, 82, 55, 58, 69, 78, 44, 82, 81, 62, 54, 74, 54, 34, 58, 73, 68, 68, 58, 56, 75, 65, 63, 58, 91, 45, 65, 49, 62, 52, 67, 54, 61, 52, 55, 72, 73, 89, 55, 56, 62, 43, 55, 62, 79, 61, 76, 61, 88, 75, 51, 56, 55, 49, 62, 55, 100, 61, 50, 45, 54, 46, 50, 45, 72, 79, 68, 43, 65, 54, 65, 63, 60, 62, 42, 77, 56, 61, 55, 64, 67, 88, 75, 47, 49, 69, 53, 87, 63, 59, 74, 58, 82, 39, 66, 72, 69, 67, 61, 91, 56, 55, 79, 46, 48, 45, 47, 74, 62, 74, 50, 78, 68, 56, 54, 62, 55, 62, 53, 42, 72, 64, 60, 92, 64, 73, 71, 63, 64, 90, 76, 97, 68, 72, 70, 55, 55, 57, 61, 48, 73, 83, 53, 66, 81, 73, 52, 72, 60, 97, 79, 47, 61, 55, 93, 49, 55, 67, 89, 68, 92, 43, 55, 52, 66, 70, 85, 73, 55, 66, 64, 47, 56, 67, 78, 61, 66, 67, 58, 56, 74, 53, 82, 82, 75, 66, 55, 70, 57, 71, 57, 58, 70, 88, 72, 67, 59, 61, 83, 59, 61, 47, 58, 35, 66, 53, 67, 50, 74, 56, 67, 87, 53, 58, 56, 76, 63, 73, 53, 73, 50, 81, 90, 57, 54, 51, 59, 55, 57, 58, 68, 68, 75, 66, 76, 62, 50, 98, 102, 70, 73, 71, 65, 74, 72, 60, 62, 57, 57, 63, 79, 68, 73, 66, 76, 70, 91, 80, 75, 78, 50, 57, 72, 71, 61, 74, 88, 65, 50, 67, 62, 66, 88, 56, 70, 56, 63, 60, 55, 69, 78, 67, 55, 61, 74, 59, 63, 65, 57, 69, 64, 59, 67, 64, 61, 59, 81, 59, 76, 60, 58, 63, 75, 49, 52, 73, 53, 85, 78, 44, 65, 80, 60, 78, 65, 59, 74, 56, 51, 94, 62, 60, 78, 72, 67, 60, 66, 93, 53, 68, 92, 56, 66, 83, 62, 75, 69, 52, 64, 62, 61, 56, 61, 66, 64, 59, 77, 81, 65, 56, 52, 58, 71, 59, 58, 61, 63, 65, 63, 54, 41, 69, 59, 64, 50, 57, 52, 74, 103, 99, 67, 62, 60, 58, 61, 94, 53, 58, 63, 68, 122, 88, 62, 51, 68, 54, 70, 55, 54, 46, 57, 63, 48, 62, 58, 66, 61, 65, 93, 53, 55, 69, 77, 79, 66, 56, 58, 75, 86, 56, 53, 53, 61, 123, 58, 89, 62, 58, 59, 57, 70, 72, 69, 97, 61, 52, 53, 53, 58, 53, 76, 48, 59, 57, 65, 64, 104, 68, 67, 59, 58, 52, 61, 75, 66, 66, 71, 55, 65, 56, 63, 59, 67, 98, 88, 63, 63, 70, 59, 64, 67, 67, 62, 94, 58, 58, 57, 72, 49, 77, 59, 55, 64, 47, 58, 53, 57, 57, 58, 56, 69, 57, 118, 61, 60, 54, 70, 65, 73, 67, 51, 56, 93, 78, 62, 96, 58, 73, 87, 60, 70, 83, 58, 85, 75, 41, 55, 60, 66, 61, 56, 68, 61, 48, 73, 64, 68, 106, 59, 54, 63, 54, 82, 56, 52, 83, 60, 64, 59, 58, 80, 57, 61, 69, 67, 53, 69, 58, 56, 61, 60, 72, 67, 58, 55, 60, 73, 55, 55, 81, 70, 48, 58, 67, 64, 58, 55, 76, 109, 68, 76, 82, 77, 58, 59, 73, 58, 73, 67, 78, 58, 80, 57, 50, 61, 75, 62, 48, 68, 55, 61, 50, 72, 57, 55, 72, 55, 72, 51, 53, 61, 63, 61, 67, 61, 57, 65, 67, 69, 56, 55, 81, 58, 97, 69, 61, 61, 59, 74, 47, 52, 89, 61, 63, 56, 56, 64, 70, 75, 67, 47, 69, 72, 47, 64, 69, 58, 63, 55, 71, 60, 55, 61, 54, 67, 64, 63, 56, 98, 64, 83, 58, 61, 69, 72, 56, 58, 82, 72, 48, 57, 62, 50, 60, 65, 66, 61, 63, 59, 57, 62, 59, 57, 64, 102, 58, 62, 64, 55, 57, 39, 79, 58, 63, 59, 65, 80, 53, 68, 67, 58, 69, 73, 58, 61, 52, 59, 72, 64, 68, 88, 67, 67, 85, 57, 56, 69, 63, 87, 60, 72, 87, 66, 55, 59, 76, 102, 80, 71, 72, 58, 71, 50, 62, 56, 67, 72, 53, 58, 57, 54, 53, 56, 68, 65, 77, 52, 52, 71, 64, 62, 50, 87, 84, 65, 112, 95, 60, 62, 63, 67, 62, 68, 58, 65, 71, 70, 108, 61, 65, 62, 62, 51, 51, 55, 59, 79, 73, 61, 78, 46, 79, 52, 71, 72, 62, 82, 62, 66, 63, 61, 77, 71, 59, 55, 75, 55, 81, 47, 72, 85, 75, 66, 49, 52, 60, 63, 56, 75, 54, 73, 65, 54, 55, 53, 88, 60, 74, 50, 64, 62, 63, 47, 67, 66, 59, 60, 74, 104, 66, 62, 75, 67, 58, 53, 76, 61, 52, 68, 57, 69, 63, 54, 56, 75, 63, 48, 75, 60, 54, 90, 60, 57, 59, 55, 46, 62, 56, 59, 64, 84, 55, 63, 52, 69, 78, 58, 72, 56, 60, 59, 67, 76, 53, 72, 83, 60, 75, 50, 67, 68, 63, 68, 49, 82, 58, 83, 96, 82, 81, 48, 58, 65, 59, 63, 67, 64, 77, 61, 61, 65, 57, 66, 56, 68, 81, 62, 57, 71, 67, 56, 53, 58, 68, 82, 55, 50, 62, 57, 75, 74, 66, 69, 53, 58, 67, 58, 57, 66, 60, 61, 50, 68, 74, 91, 89, 73, 75, 64, 57, 53, 46, 74, 51, 58, 112, 67, 77, 61, 55, 64, 58, 42, 57, 63, 85, 61, 70, 92, 91, 62, 75, 93, 82, 48, 82, 58, 57, 62, 57, 57, 84, 81, 76, 58, 72, 57, 73, 63, 46, 57, 87, 61, 76, 70, 55, 80, 63, 78, 56, 59, 87, 84, 61, 72, 53, 72, 101, 66, 73, 53, 64, 59, 70, 52, 68, 55, 72, 69, 65, 70, 96, 74, 70, 83, 65, 88, 66, 69, 71, 66, 59, 51, 58, 58, 62, 61, 67, 56, 51, 109, 62, 67, 66, 58, 85, 58, 82, 75, 90, 69, 52, 71, 57, 55, 88, 98, 57, 96, 51, 53, 54, 79, 61, 62, 47, 51, 54, 88, 63, 68, 54, 74, 57, 83, 56, 66, 73, 62, 62, 53, 67, 56, 98, 72, 62, 53, 62, 65, 65, 58, 111, 60, 64, 78, 58, 59, 52, 69, 69, 58, 56, 70, 75, 70, 55, 62, 64, 69, 73, 65, 59, 58, 117, 63, 63, 58, 65, 62, 55, 59, 64, 70, 58, 55, 82, 63, 46, 61, 73, 81, 70, 61, 83, 88, 59, 48, 90, 71, 59, 61, 80, 58, 65, 63, 62, 67, 74, 60, 54, 66, 65, 48, 81, 88, 64, 70, 64, 58, 94, 71, 84, 68, 69, 58, 90, 68, 65, 97, 71, 57, 58, 74, 64, 59, 66, 59, 51, 66, 63, 80, 50, 66, 59, 55, 86, 75, 54, 56, 59, 71, 53, 63, 43, 92, 57, 90, 78, 63, 65, 56, 61, 58, 59, 71, 77, 75, 63, 65, 64, 91, 63, 48, 64, 53, 62, 61, 62, 51, 64, 66, 60, 56, 61, 60, 53, 64, 52, 60, 63, 49, 66, 64, 65, 53, 62, 65, 57, 65, 83, 56, 59, 56, 55, 58, 59, 66, 65, 49, 65, 60, 60, 47, 62, 52, 67, 65, 84, 55, 68, 75, 53, 61, 61, 65, 69, 95, 54, 71, 63, 69, 65, 59, 75, 76, 78, 74, 71, 66, 61, 68, 55, 75, 53, 61, 99, 73, 97, 71, 62, 53, 60, 62, 106, 79, 57, 63, 68, 73, 47, 63, 73, 55, 56, 62, 64, 68, 61, 61, 58, 65, 64, 60, 50, 50, 76, 70, 63, 75, 82, 77, 62, 71, 67, 62, 70, 66, 74, 67, 106, 67, 65, 99, 54, 61, 59, 80, 59, 63, 61, 57, 50, 56, 78, 62, 56, 85, 64, 62, 52, 54, 66, 59, 60, 73, 58, 70, 62, 73, 76, 57, 56, 63, 63, 72, 74, 96, 70, 64, 58, 62, 57, 62, 81, 60, 55, 59, 96, 84, 88, 71, 100, 55, 62, 74, 61, 72, 66, 68, 53, 62, 70, 70, 58, 49, 68, 57, 70, 62, 66, 64, 59, 65, 51, 69, 60, 56, 60, 88, 55, 72, 66, 72, 64, 79, 77, 54, 73, 53, 80, 94, 63, 59, 83, 58, 60, 57, 55, 86, 71, 53, 66, 69, 56, 77, 62, 57, 70, 62, 103, 95, 76, 67, 61, 69, 66, 47, 63, 80, 61, 57, 69, 92, 60, 62, 61, 51, 71, 67, 71, 86, 81, 64, 48, 64, 68, 54, 73, 59, 53, 44, 75, 68, 124, 47, 86, 64, 52, 77, 55, 58, 69, 55, 68, 66, 43, 54, 57, 58, 75, 77, 63, 88, 65, 53, 57, 58, 53, 58, 57, 56, 60, 65, 56, 78, 61, 59, 60, 62, 56, 86, 84, 68, 60, 56, 64, 62, 64, 62, 108, 66, 66, 61, 56, 80, 53, 68, 57, 61, 78, 57, 53, 60, 71, 70, 64, 59, 51, 60, 76, 50, 72, 54, 56, 64, 63, 65, 75, 55, 59, 69, 68, 76, 62, 63, 57, 62, 54, 45, 82, 60, 65, 50, 150, 70, 96, 64, 60, 55, 78, 78, 78, 73, 62, 73, 62, 74, 91, 76, 83, 66, 64, 66, 62, 58, 62, 60, 55, 85, 74, 72, 57, 54, 96, 57, 55, 57, 60, 80, 63, 45, 57, 71, 56, 58, 58, 56, 52, 91, 67, 92, 83, 63, 71, 56, 71, 69, 59, 70, 52, 54, 84, 62, 55, 55, 54, 74, 57, 92, 67, 68, 68, 63, 74, 75, 68, 55, 63, 62, 65, 77, 60, 67, 94, 71, 60, 69, 63, 58, 60, 58, 69, 72, 52, 51, 64, 55, 62, 107, 77, 58, 61, 93, 58, 56, 60, 54, 57, 56, 50, 63, 68, 57, 59, 65, 93, 66, 48, 63, 51, 80, 55, 78, 83, 65, 89, 67, 123, 81, 56, 56, 63, 80, 74, 58, 55, 56, 62, 59, 75, 112, 57, 49, 62, 82, 70, 59, 101, 74, 64, 61, 65, 65, 56, 74, 62, 74, 58, 90, 61, 56, 62, 62, 51, 62, 60, 59, 65, 69, 75, 64, 70, 59, 56, 67, 50, 69, 68, 54, 71, 54, 60, 69, 65, 70, 65, 82, 58, 79, 52, 76, 61, 55, 71, 63, 58, 55, 63, 58, 88, 52, 66, 57, 48, 69, 60, 82, 55, 70, 62, 78, 67, 45, 56, 80, 61, 73, 49, 59, 74, 63, 69, 69, 50, 63, 87, 81, 62, 55, 61, 75, 55, 58, 87, 59, 58, 65, 93, 73, 96, 67, 61, 70, 78, 95, 53, 59, 55, 97, 51, 68, 93, 60, 57, 68, 72, 69, 107, 68, 55, 65, 72, 57, 74, 77, 52, 64, 55, 71, 64, 59, 59, 46, 69, 69, 70, 92, 55, 62, 65, 59, 62, 67, 60, 64, 87, 61, 51, 90, 65, 58, 106, 56, 53, 73, 70, 112, 105, 55, 59, 62, 69, 43, 64, 77, 61, 62, 57, 68, 86, 62, 56, 58, 58, 54, 55, 48, 68, 77, 66, 58, 69, 65, 57, 52, 54, 61, 56, 61, 55, 60, 57, 80, 61, 70, 68, 62, 63, 50, 65, 57, 49, 59, 68, 70, 66, 55, 61, 69, 65, 59, 49, 103, 63, 59, 63, 58, 62, 55, 64, 74, 45, 65, 62, 64, 53, 59, 67, 54, 55, 56, 72, 59, 66, 59, 66, 60, 55, 64, 75, 78, 69, 53, 56, 58, 142, 63, 57, 63, 56, 65, 98, 82, 57, 101, 61, 59, 75, 57, 70, 81, 59, 67, 56, 67, 62, 48, 73, 70, 74, 76, 62, 58, 63, 70, 54, 68, 49, 60, 52, 61, 68, 70, 58, 70, 46, 70, 63, 46, 65, 53, 56, 58, 64, 52, 63, 56, 50, 76, 65, 81, 76, 62, 61, 65, 63, 60, 69, 52, 76, 71, 94, 54, 60, 57, 70, 62, 69, 62, 52, 61, 64, 61, 77, 51, 68, 61, 84, 57, 65, 59, 68, 69, 54, 66, 69, 53, 71, 63, 68, 59, 67, 48, 64, 65, 78, 94, 64, 67, 60, 50, 72, 48, 64, 54, 73, 69, 65, 64, 69, 70, 95, 67, 67, 55, 60, 59, 62, 62, 63, 64, 57, 109, 58, 81, 55, 71, 58, 64, 80, 55, 68, 58, 60, 66, 60, 63, 55, 51, 70, 63, 62, 60, 78, 68, 62, 52, 72, 50, 55, 59, 62, 98, 62, 43, 53, 77, 71, 69, 65, 107, 65, 55, 60, 71, 62, 66, 66, 81, 78, 64, 74, 69, 93, 63, 60, 59, 63, 61, 68, 57, 49, 57, 62, 67, 55, 57, 74, 70, 81, 71, 57, 59, 45, 68, 51, 60, 60, 48, 79, 56, 78, 56, 92, 53, 54, 77, 61, 67, 53, 57, 78, 59, 77, 50, 72, 59, 58, 65, 61, 75, 69, 75, 71, 52, 70, 59, 66, 56, 73, 86, 59, 54, 59, 62, 76, 59, 55, 84, 57, 60, 60, 61, 63, 66, 69, 71, 71, 60, 79, 49, 77, 80, 62, 78, 56, 68, 63, 72, 56, 77, 76, 62, 61, 55, 60, 56, 66, 67, 57, 73, 75, 63, 62, 56, 69, 59, 60, 60, 56, 63, 48, 63, 68, 70, 82, 61, 65, 63, 67, 57, 64, 86, 74, 75, 50, 55, 58, 39, 62, 58, 82, 73, 65, 81, 48, 61, 65, 72, 71, 91, 87, 82, 51, 84, 61, 65, 77, 63, 86, 63, 44, 50, 49, 69, 62, 64, 73, 72, 61, 71, 75, 55, 47, 63, 84, 71, 64, 80, 75, 70, 73, 67, 49, 78, 70, 53, 56, 68, 64, 89, 58, 79, 77, 54, 71, 67, 62, 67, 109, 67, 101, 84, 66, 76, 46, 77, 67, 52, 65, 73, 52, 66, 74, 61, 61, 94, 99, 69, 63, 70, 47, 55, 43, 62, 82, 52, 78, 87, 55, 81, 58, 50, 83, 70, 57, 52, 51, 71, 59, 58, 58, 99, 77, 62, 53, 86, 67, 44, 46, 58, 64, 55, 77, 54, 52, 61, 71, 64, 57, 48, 55, 59, 49, 64, 56, 47, 58, 59, 59, 77, 56, 71, 69, 59, 73, 52, 61, 78, 64, 119, 81, 80, 53, 73, 58, 72, 64, 50, 71, 58, 90, 70, 81, 69, 107, 56, 60, 92, 56, 54, 64, 60, 59, 66, 64, 71, 85, 62, 74, 65, 97, 53, 48, 52, 90, 65, 75, 71, 61, 51, 60, 78, 55, 58, 54, 71, 73, 53, 63, 56, 59, 53, 82, 94, 46, 56, 60, 51, 60, 61, 83, 53, 66, 56, 62, 69, 68, 60, 71, 51, 82, 65, 109, 90, 58, 52, 46, 68, 75, 57, 59, 104, 63, 59, 75, 87, 59, 62, 73, 64, 49, 48, 58, 69, 89, 50, 69, 81, 56, 59, 59, 55, 71, 59, 56, 81, 85, 63, 74, 62, 53, 64, 76, 53, 64, 81, 57, 61, 58, 46, 47, 58, 67, 74, 51, 81, 60, 55, 67, 50, 65, 62, 61, 73, 64, 68, 53, 82, 53, 40, 63, 87, 68, 64, 97, 62, 53, 49, 66, 60, 48, 55, 98, 61, 75, 89, 65, 40, 68, 54, 71, 59, 77, 57, 54, 71, 72, 71, 65, 59, 59, 68, 42, 52, 64, 63, 87, 69, 82, 62, 38, 58, 52, 57, 89, 48, 59, 58, 68, 72, 55, 44, 78, 62, 84, 55, 74, 89, 97, 64, 60, 55, 76, 77, 56, 64, 65, 66, 43, 57, 87, 71, 59, 55, 42, 63, 61, 49, 47, 58, 58, 61, 69, 37, 54, 62, 96, 63, 53, 62, 57, 90, 48, 72, 63, 55, 72, 59, 79, 78, 101, 69, 62, 79, 83, 50, 59, 66, 47, 63, 56, 61, 58, 73, 60, 74, 61, 65, 54, 54, 56, 72, 85, 63, 74, 62, 73, 69, 77, 56, 69, 60, 58, 54, 62, 70, 68, 75, 67, 79, 73, 57, 59, 50, 70, 82, 74, 71, 63, 54, 48, 116, 94, 60, 65, 81, 56, 78, 54, 57, 69, 30, 67, 96, 65, 45, 59, 35, 87, 62, 55, 59, 80, 92, 94, 88, 72, 72, 73, 68, 114, 86, 55, 78, 65, 85, 55, 49, 50, 71, 68, 61, 57, 136, 69, 57, 65, 82, 56, 66, 71, 69, 62, 80, 44, 65, 60, 69, 65, 56, 67, 84, 59, 56, 83, 65, 46, 54, 69, 65, 50, 65, 66, 64, 75, 49, 61, 54, 63, 70, 60, 49, 76, 71, 59, 76, 82, 70, 45, 78, 52, 56, 71, 73, 66, 49, 55, 68, 74, 79, 75, 64, 71, 60, 61, 52, 42, 66, 66, 51, 60, 48, 52, 73, 74, 53, 74, 67, 65, 77, 88, 52, 57, 48, 73, 64, 62, 53, 83, 69, 62, 64, 66, 70, 61, 67, 62, 59, 65, 45, 104, 58, 87, 49, 85, 84, 65, 82, 67, 53, 70, 82, 59, 67, 74, 61, 59, 70, 55, 55, 97, 63, 54, 55, 52, 52, 89, 50, 59, 53, 71, 58, 91, 56, 45, 63, 72, 48, 94, 64, 70, 69, 48, 58, 63, 54, 67, 83, 76, 61, 52, 89, 60, 80, 113, 50, 66, 71, 71, 127, 67, 76, 60, 53, 81, 69, 57, 84, 72, 57, 64, 74, 66, 84, 58, 80, 51, 56, 54, 95, 65, 65, 55, 50, 43, 43, 56, 48, 60, 59, 40, 57, 58, 63, 71, 88, 88, 94, 66, 56, 65, 77, 66, 72, 66, 56, 75, 56, 66, 71, 103, 56, 67, 81, 94, 78, 80, 71, 62, 64, 78, 73, 60, 87, 67, 62, 52, 60, 56, 72, 94, 82, 59, 62, 58, 77, 61, 64, 61, 58, 56, 63, 56, 85, 65, 81, 50, 67, 145, 59, 61, 68, 28, 63, 74, 63, 61, 86, 42, 60, 68, 69, 73, 64, 54, 74, 57, 57, 60, 67, 58, 52, 62, 70, 52, 61, 70, 67, 60, 73, 74, 59, 63, 62, 58, 62, 69, 46, 56, 78, 65, 78, 58, 52, 88, 83, 63, 53, 68, 60, 71, 62, 69, 53, 63, 58, 53, 56, 62, 60, 47, 58, 59, 56, 86, 57, 83, 55, 80, 52, 71, 49, 59, 75, 54, 68, 80, 71, 63, 63, 77, 63, 59, 53, 67, 53, 56, 78, 48, 102, 74, 61, 67, 66, 67, 75, 54, 68, 67, 84, 51, 51, 57, 44, 69, 60, 59, 64, 56, 50, 64, 53, 67, 62, 65, 61, 65, 56, 53, 85, 60, 69, 58, 63, 63, 111, 65, 67, 57, 79, 54, 69, 51, 70, 57, 61, 63, 54, 84, 70, 61, 54, 52, 59, 52, 48, 71, 67, 81, 69, 61, 53, 64, 53, 75, 78, 81, 72, 59, 65, 56, 68, 52, 124, 59, 65, 57, 68, 64, 59, 70, 74, 69, 73, 50, 67, 98, 80, 67, 80, 86, 75, 67, 53, 57, 91, 70, 62, 64, 65, 65, 87, 75, 59, 62, 51, 54, 77, 45, 69, 42, 61, 57, 67, 58, 65, 94, 57, 64, 75, 52, 61, 78, 66, 82, 48, 46, 77, 73, 59, 66, 61, 63, 43, 68, 59, 80, 61, 67, 92, 69, 67, 51, 83, 57, 63, 57, 64, 72, 81, 64, 60, 57, 38, 61, 66, 47, 50, 68, 85, 66, 53, 99, 87, 52, 54, 67, 55, 68, 75, 65, 86, 72, 55, 69, 68, 58, 62, 76, 65, 48, 56, 65, 46, 102, 75, 52, 61, 71, 51, 58, 75, 54, 76, 61, 59, 58, 68, 68, 53, 66, 76, 55, 64, 48, 65, 61, 67, 51, 42, 55, 64, 89, 60, 86, 76, 73, 69, 72, 80, 58, 83, 61, 61, 62, 58, 65, 71, 80, 63, 60, 44, 74, 51, 64, 65, 63, 61, 75, 65, 54, 74, 56, 51, 69, 59, 55, 62, 83, 79, 66, 65, 65, 49, 94, 70, 74, 68, 62, 53, 63, 57, 76, 49, 59, 58, 75, 67, 67, 71, 55, 63, 118, 77, 86, 88, 78, 52, 66, 45, 71, 66, 63, 72, 61, 62, 75, 66, 69, 59, 65, 73, 59, 59, 55, 66, 70, 70, 83, 57, 60, 64, 80, 62, 55, 67, 75, 82, 67, 61, 53, 75, 63, 73, 68, 79, 74, 62, 63, 49, 57, 68, 74, 59, 49, 67, 53, 57, 58, 64, 68, 64, 69, 70, 64, 64, 86, 63, 59, 57, 80, 49, 64, 62, 80, 74, 56, 62, 63, 54, 63, 57, 70, 106, 69, 66, 56, 52, 77, 59, 67, 62, 49, 56, 99, 76, 58, 73, 56, 74, 72, 72, 66, 59, 78, 67, 60, 60, 66, 62, 72, 52, 84, 47, 85, 60, 60, 63, 87, 66, 82, 61, 55, 64, 66, 60, 69, 71, 56, 67, 74, 64, 61, 67, 53, 49, 77, 65, 63, 61, 66, 55, 47, 78, 63, 67, 60, 75, 55, 64, 60, 51, 56, 56, 59, 50, 97, 62, 79, 45, 57, 69, 70, 65, 64, 104, 75, 63, 69, 62, 58, 71, 59, 53, 76, 74, 57, 68, 112, 64, 53, 72, 70, 57, 56, 53, 71, 64, 77, 90, 96, 65, 77, 64, 52, 58, 92, 59, 59, 67, 59, 76, 80, 54, 80, 63, 79, 90, 77, 88, 57, 57, 67, 57, 56, 50, 69, 70, 70, 58, 77, 55, 70, 63, 66, 78, 67, 43, 67, 62, 52, 61, 72, 51, 75, 59, 97, 54, 68, 73, 66, 51, 88, 51, 74, 70, 76, 44, 66, 53, 72, 74, 58, 81, 60, 56, 50, 50, 57, 60, 71, 58, 67, 55, 73, 59, 52, 68, 111, 63, 67, 63, 62, 77, 62, 63, 87, 55, 56, 63, 87, 59, 84, 57, 61, 62, 89, 71, 66, 62, 84, 76, 65, 51, 96, 56, 76, 60, 67, 83, 81, 66, 69, 53, 69, 83, 76, 80, 57, 56, 116, 64, 57, 52, 51, 68, 60, 59, 66, 55, 58, 55, 80, 63, 85, 67, 78, 91, 95, 62, 74, 54, 60, 58, 59, 54, 58, 84, 62, 67, 57, 50, 68, 47, 59, 77, 55, 72, 49, 58, 71, 74, 76, 77, 54, 60, 68, 68, 59, 85, 80, 77, 66, 73, 49, 58, 55, 62, 77, 56, 67, 64, 57, 54, 55, 104, 83, 75, 52, 66, 75, 68, 61, 68, 53, 58, 42, 79, 83, 64, 68, 44, 84, 74, 58, 60, 60, 90, 53, 90, 59, 56, 66, 65, 124, 68, 65, 70, 67, 90, 54, 54, 57, 68, 51, 59, 54, 124, 69, 61, 73, 69, 72, 72, 79, 57, 58, 83, 68, 54, 58, 64, 65, 63, 72, 56, 58, 75, 77, 56, 61, 64, 48, 77, 66, 57, 49, 60, 76, 59, 69, 68, 86, 67, 62, 50, 63, 74, 58, 57, 74, 59, 90, 61, 65, 41, 64, 65, 58, 44, 62, 94, 71, 65, 85, 74, 59, 55, 54, 71, 54, 49, 69, 70, 61, 63, 43, 43, 51, 66, 59, 61, 73, 93, 80, 52, 54, 74, 84, 58, 65, 58, 74, 71, 61, 61, 57, 71, 78, 47, 63, 59, 79, 57, 102, 47, 96, 58, 64, 107, 84, 70, 67, 58, 79, 64, 65, 52, 67, 52, 51, 61, 58, 71, 118, 57, 57, 65, 73, 60, 110, 69, 63, 69, 69, 59, 108, 63, 55, 49, 59, 67, 70, 74, 67, 70, 60, 63, 63, 79, 70, 79, 91, 58, 57, 68, 82, 56, 127, 68, 52, 58, 83, 113, 64, 84, 59, 54, 79, 66, 56, 59, 69, 61, 60, 73, 84, 55, 67, 63, 59, 75, 65, 79, 50, 66, 53, 55, 64, 79, 48, 72, 53, 62, 66, 56, 62, 56, 79, 84, 56, 116, 65, 61, 36, 55, 78, 48, 63, 61, 62, 56, 66, 78, 68, 43, 58, 68, 66, 56, 88, 48, 72, 59, 64, 56, 77, 54, 73, 69, 56, 54, 56, 72, 58, 59, 57, 83, 84, 63, 61, 55, 54, 58, 59, 71, 56, 61, 61, 67, 70, 92, 119, 105, 56, 47, 63, 60, 67, 63, 65, 67, 81, 65, 57, 61, 50, 62, 66, 53, 58, 68, 59, 63, 57, 50, 51, 59, 59, 48, 70, 67, 53, 73, 64, 67, 60, 70, 50, 66, 72, 65, 40, 75, 46, 63, 66, 55, 67, 48, 69, 59, 76, 63, 61, 65, 69, 62, 72, 65, 60, 72, 61, 50, 68, 76, 59, 55, 76, 61, 74, 56, 73, 56, 62, 54, 61, 66, 52, 66, 71, 63, 60, 58, 91, 60, 70, 59, 51, 63, 58, 80, 60, 106, 55, 62, 68, 55, 48, 81, 62, 85, 71, 62, 51, 59, 59, 72, 69, 52, 72, 59, 75, 59, 76, 59, 76, 74, 59, 54, 56, 70, 62, 72, 52, 61, 60, 63, 51, 99, 57, 67, 50, 69, 58, 60, 70, 69, 68, 49, 55, 54, 86, 73, 66, 73, 48, 55, 62, 62, 42, 69, 59, 63, 55, 62, 78, 65, 85, 84, 57, 58, 89, 51, 55, 60, 78, 120, 62, 55, 67, 64, 66, 52, 58, 80, 66, 60, 52, 69, 88, 57, 59, 74, 71, 62, 61, 55, 73, 57, 51, 37, 66, 61, 90, 54, 60, 63, 57, 68, 61, 51, 70, 60, 61, 65, 59, 84, 63, 56, 110, 77, 77, 65, 55, 64, 68, 49, 73, 68, 74, 101, 76, 64, 87, 68, 57, 74, 53, 62, 73, 68, 52, 67, 63, 59, 53, 66, 55, 53, 48, 61, 78, 70, 58, 52, 54, 56, 61, 67, 66, 54, 51, 67, 74, 65, 70, 75, 46, 54, 65, 64, 74, 60, 45, 57, 76, 87, 55, 67, 65, 54, 61, 57, 67, 68, 76, 53, 107, 70, 61, 59, 75, 60, 57, 68, 55, 66, 63, 59, 84, 78, 57, 51, 65, 50, 47, 56, 57, 48, 60, 44, 59, 57, 51, 57, 50, 80, 48, 63, 68, 72, 56, 58, 67, 58, 63, 56, 66, 60, 60, 81, 52, 82, 56, 71, 50, 65, 58, 62, 56, 79, 51, 57, 66, 73, 71, 66, 76, 67, 77, 64, 76, 64, 92, 68, 62, 64, 69, 54, 50, 66, 59, 51, 66, 66, 55, 88, 65, 58, 61, 69, 59, 71, 68, 78, 96, 67, 58, 77, 62, 61, 66, 59, 102, 69, 57, 83, 55, 71, 53, 72, 67, 83, 50, 56, 69, 55, 55, 65, 71, 53, 74, 51, 67, 54, 71, 65, 61, 65, 101, 64, 68, 70, 57, 50, 68, 65, 95, 51, 78, 57, 71, 67, 54, 60, 70, 70, 59, 61, 53, 72, 55, 63, 52, 55, 58, 65, 60, 60, 62, 73, 64, 71, 56, 78, 62, 61, 66, 60, 67, 64, 51, 54, 76, 55, 54, 103, 60, 66, 66, 64, 61, 61, 65, 63, 66, 69, 65, 76, 87, 63, 118, 70, 62, 61, 64, 59, 63, 62, 51, 54, 64, 54, 59, 71, 69, 78, 77, 61, 67, 57, 65, 49, 49, 69, 61, 52, 60, 56, 73, 68, 57, 73, 68, 63, 58, 79, 52, 65, 75, 79, 59, 61, 53, 75, 53, 61, 54, 73, 68, 51, 66, 56, 56, 71, 70, 59, 73, 63, 93, 120, 57, 63, 66, 73, 61, 65, 49, 91, 78, 55, 61, 90, 48, 71, 61, 83, 62, 63, 60, 59, 103, 71, 51, 78, 68, 62, 78, 55, 59, 45, 49, 75, 113, 72, 59, 61, 71, 64, 70, 64, 63, 59, 58, 64, 54, 65, 56, 63, 76, 58, 58, 86, 63, 63, 65, 47, 72, 57, 50, 61, 61, 55, 69, 63, 60, 46, 53, 62, 55, 60, 82, 57, 59, 58, 64, 61, 54, 60, 99, 54, 58, 68, 63, 65, 102, 63, 59, 73, 71, 53, 66, 55, 54, 57, 54, 63, 61, 62, 78, 80, 71, 55, 70, 53, 63, 67, 68, 60, 70, 71, 65, 78, 74, 53, 47, 66, 61, 45, 83, 44, 65, 55, 107, 67, 73, 57, 55, 60, 79, 39, 66, 79, 71, 47, 63, 62, 91, 59, 76, 77, 59, 68, 56, 61, 56, 50, 58, 61, 87, 57, 51, 61, 104, 60, 60, 54, 54, 92, 60, 60, 63, 59, 64, 51, 63, 58, 47, 73, 78, 85, 68, 49, 74, 53, 50, 60, 52, 53, 72, 60, 55, 95, 61, 65, 58, 72, 47, 72, 80, 62, 74, 56, 70, 54, 82, 57, 77, 59, 70, 56, 55, 65, 83, 69, 53, 61, 73, 84, 56, 57, 61, 59, 74, 50, 62, 59, 60, 89, 81, 74, 62, 69, 64, 65, 81, 57, 66, 64, 69, 84, 66, 58, 64, 82, 107, 71, 83, 63, 63, 92, 43, 65, 69, 63, 66, 65, 104, 75, 53, 60, 63, 94, 64, 56, 62, 68, 57, 51, 83, 135, 67, 60, 48, 61, 80, 44, 74, 75, 52, 82, 50, 62, 63, 68, 53, 66, 61, 58, 75, 83, 63, 64, 66, 63, 67, 45, 59, 66, 74, 48, 58, 60, 59, 61, 91, 96, 68, 46, 62, 50, 55, 67, 64, 68, 75, 100, 71, 67, 73, 57, 65, 51, 65, 75, 67, 65, 54, 56, 91, 71, 79, 74, 68, 77, 72, 56, 66, 52, 60, 65, 56, 86, 67, 72, 72, 50, 78, 53, 74, 58, 88, 73, 58, 67, 96, 91, 61, 54, 57, 67, 88, 59, 77, 66, 66, 68, 108, 64, 96, 57, 79, 74, 83, 85, 57, 52, 83, 57, 53, 72, 72, 51, 86, 59, 77, 57, 96, 51, 55, 54, 47, 63, 83, 62, 76, 56, 57, 82, 63, 64, 52, 63, 78, 58, 75, 81, 61, 62, 54, 58, 63, 58, 85, 46, 87, 72, 58, 53, 69, 51, 117, 52, 62, 68, 67, 107, 58, 82, 56, 66, 59, 58, 59, 54, 61, 56, 54, 61, 82, 85, 81, 79, 61, 68, 59, 43, 57, 69, 60, 61, 55, 59, 70, 76, 50, 70, 53, 67, 65, 54, 57, 94, 48, 38, 60, 63, 102, 40, 65, 65, 49, 85, 74, 55, 57, 68, 81, 63, 51, 51, 60, 108, 101, 62, 64, 53, 71, 49, 74, 67, 84, 57, 83, 55, 58, 72, 78, 62, 54, 57, 77, 75, 63, 72, 69, 61, 69, 58, 70, 58, 67, 39, 68, 81, 124, 79, 63, 51, 53, 68, 63, 79, 59, 74, 76, 74, 73, 69, 51, 82, 62, 65, 59, 55, 57, 64, 55, 68, 69, 67, 67, 84, 75, 65, 53, 67, 76, 58, 52, 84, 71, 63, 57, 49, 68, 57, 60, 70, 81, 56, 61, 56, 70, 56, 59, 51, 90, 64, 52, 65, 77, 68, 57, 80, 75, 62, 65, 57, 68, 77, 87, 90, 61, 58, 77, 58, 51, 61, 64, 57, 66, 63, 74, 56, 76, 61, 79, 69, 54, 56, 72, 53, 68, 93, 56, 65, 57, 65, 59, 54, 76, 74, 59, 71, 66, 88, 52, 62, 62, 69, 67, 66, 57, 67, 50, 57, 64, 65, 64, 54, 79, 59, 58, 65, 53, 86, 66, 65, 59, 69, 55, 107, 84, 77, 53, 79, 62, 59, 59, 59, 68, 61, 62, 75, 92, 67, 50, 44, 51, 64, 51, 58, 95, 52, 44, 76, 59, 66, 58, 52, 54, 82, 96, 82, 91, 65, 52, 66, 54, 112, 62, 54, 59, 60, 55, 72, 56, 78, 59, 68, 55, 77, 106, 55, 55, 72, 59, 70, 73, 62, 66, 71, 59, 57, 55, 75, 72, 55, 84, 74, 63, 53, 58, 48, 52, 53, 67, 59, 88, 62, 78, 63, 91, 49, 71, 73, 41, 57, 53, 68, 87, 55, 49, 66, 78, 62, 78, 67, 58, 52, 66, 63, 66, 78, 67, 64, 86, 80, 59, 100, 67, 62, 59, 53, 59, 45, 53, 55, 66, 73, 69, 63, 57, 68, 56, 88, 65, 68, 82, 63, 67, 89, 42, 69, 61, 61, 70, 66, 70, 81, 71, 70, 68, 67, 72, 66, 53, 54, 64, 67, 75, 60, 64, 52, 73, 53, 67, 55, 70, 70, 61, 52, 62, 57, 59, 64, 59, 53, 68, 64, 66, 71, 51, 72, 63, 63, 50, 45, 56, 72, 50, 51, 64, 67, 70, 55, 67, 46, 54, 89, 86, 59, 73, 74, 64, 59, 65, 74, 82, 77, 61, 68, 65, 63, 76, 63, 56, 70, 65, 99, 74, 76, 66, 51, 67, 49, 91, 92, 62, 59, 86, 69, 56, 62, 45, 66, 70, 51, 67, 75, 53, 58, 61, 51, 70, 60, 64, 53, 84, 57, 78, 58, 59, 54, 52, 68, 76, 65, 63, 57, 62, 66, 78, 68, 60, 73, 89, 50, 67, 57, 66, 60, 61, 64, 85, 60, 67, 75, 47, 73, 53, 66, 59, 54, 77, 56, 72, 67, 71, 58, 65, 63, 72, 65, 82, 71, 55, 79, 66, 58, 55, 62, 100, 61, 53, 70, 56, 62, 58, 50, 60, 48, 62, 84, 64, 107, 56, 66, 60, 61, 60, 58, 72, 72, 60, 70, 72, 56, 53, 135, 75, 78, 60, 69, 77, 66, 58, 70, 63, 64, 94, 60, 105, 75, 80, 52, 55, 57, 71, 61, 55, 52, 64, 53, 77, 57, 67, 67, 80, 60, 73, 62, 59, 62, 72, 56, 42, 53, 50, 66, 68, 58, 61, 65, 64, 84, 62, 67, 72, 78, 68, 65, 87, 63, 82, 71, 54, 60, 76, 68, 73, 83, 64, 62, 79, 66, 68, 71, 72, 60, 61, 69, 72, 55, 65, 46, 66, 71, 69, 61, 55, 90, 68, 62, 68, 74, 65, 62, 54, 64, 63, 50, 61, 62, 109, 59, 66, 80, 59, 59, 71, 75, 63, 77, 47, 61, 67, 59, 70, 81, 62, 70, 69, 62, 70, 107, 57, 79, 63, 48, 73, 57, 68, 58, 54, 64, 66, 61, 68, 49, 64, 98, 56, 80, 65, 49, 58, 38, 61, 55, 63, 96, 57, 63, 49, 54, 60, 58, 56, 46, 55, 68, 56, 67, 57, 64, 56, 77, 56, 65, 65, 62, 57, 85, 88, 74, 66, 61, 58, 48, 57, 63, 58, 48, 63, 61, 50, 75, 54, 65, 64, 85, 50, 67, 120, 60, 60, 62, 80, 55, 50, 57, 68, 62, 66, 77, 64, 64, 69, 55, 71, 53, 77, 69, 50, 92, 72, 85, 87, 69, 67, 85, 58, 62, 66, 83, 61, 71, 57, 58, 63, 67, 62, 65, 56, 65, 62, 96, 60, 52, 58, 93, 60, 73, 71, 60, 52, 87, 61, 66, 62, 86, 67, 76, 49, 102, 65, 65, 59, 62, 54, 59, 61, 77, 79, 82, 65, 55, 71, 59, 92, 85, 82, 69, 69, 58, 70, 58, 132, 75, 79, 61, 59, 56, 54, 59, 67, 64, 79, 46, 73, 58, 75, 52, 72, 58, 53, 74, 61, 91, 51, 70, 64, 69, 62, 55, 57, 83, 36, 61, 64, 52, 41, 118, 75, 54, 68, 85, 64, 70, 80, 68, 72, 79, 53, 53, 70, 61, 84, 56, 92, 61, 67, 78, 52, 89, 65, 65, 87, 54, 69, 64, 49, 85, 56, 45, 58, 65, 46, 63, 67, 80, 67, 55, 76, 80, 53, 64, 71, 69, 78, 63, 64, 54, 58, 59, 62, 73, 74, 59, 68, 58, 82, 57, 69, 72, 68, 57, 88, 48, 65, 63, 68, 75, 62, 59, 55, 83, 55, 72, 57, 75, 66, 58, 52, 57, 58, 67, 89, 70, 57, 66, 83, 90, 58, 75, 55, 60, 86, 68, 54, 75, 91, 46, 58, 81, 83, 63, 74, 49, 109, 50, 57, 66, 60, 60, 56, 67, 54, 71, 68, 57, 39, 56, 72, 73, 53, 54, 75, 59, 88, 52, 71, 61, 57, 84, 50, 84, 57, 73, 72, 56, 55, 55, 62, 70, 61, 49, 46, 58, 79, 74, 59, 60, 70, 59, 99, 65, 61, 60, 61, 67, 61, 82, 68, 57, 60, 66, 73, 49, 77, 46, 64, 74, 59, 70, 65, 78, 65, 63, 65, 57, 72, 72, 64, 96, 53, 54, 72, 125, 113, 63, 48, 68, 86, 62, 71, 60, 62, 69, 81, 102, 66, 62, 78, 76, 70, 69, 69, 59, 93, 72, 50, 78, 47, 71, 62, 72, 53, 72, 68, 60, 68, 71, 69, 98, 62, 93, 46, 65, 95, 57, 44, 76, 56, 62, 53, 53, 58, 58, 90, 58, 83, 53, 81, 64, 58, 64, 68, 53, 67, 59, 89, 66, 63, 65, 52, 74, 58, 55, 58, 55, 56, 72, 54, 53, 56, 51, 66, 69, 49, 60, 74, 70, 64, 55, 77, 57, 133, 66, 57, 58, 55, 50, 52, 57, 75, 63, 51, 57, 64, 73, 65, 60, 46, 82, 43, 56, 53, 71, 46, 57, 66, 70, 63, 75, 62, 57, 63, 42, 66, 76, 103, 59, 76, 65, 62, 78, 65, 66, 55, 71, 66, 64, 96, 77, 63, 59, 73, 74, 49, 84, 44, 48, 59, 60, 64, 53, 69, 53, 58, 66, 91, 63, 119, 107, 64, 57, 67, 49, 67, 101, 92, 62, 70, 61, 70, 61, 51, 67, 100, 57, 69, 53, 54, 61, 60, 67, 68, 108, 73, 93, 61, 64, 43, 41, 49, 45, 61, 70, 47, 57, 64, 56, 70, 63, 50, 54, 78, 47, 60, 64, 67, 49, 79, 83, 68, 72, 56, 73, 92, 63, 49, 60, 55, 57, 66, 74, 51, 57, 59, 60, 66, 50, 66, 71, 61, 52, 106, 53, 65, 57, 84, 58, 46, 60, 59, 77, 67, 72, 54, 58, 64, 66, 45, 88, 36, 66, 53, 58, 67, 67, 75, 144, 65, 60, 69, 62, 66, 42, 65, 60, 72, 55, 72, 49, 95, 64, 80, 65, 62, 71, 50, 66, 62, 62, 72, 55, 57, 57, 62, 50, 59, 53, 63, 55, 51, 79, 61, 57, 72, 52, 77, 72, 83, 70, 54, 105, 53, 60, 49, 53, 79, 48, 79, 54, 57, 60, 70, 57, 83, 57, 75, 69, 59, 61, 52, 59, 60, 57, 45, 69, 62, 72, 57, 59, 73, 69, 70, 66, 57, 64, 56, 79, 92, 57, 53, 98, 76, 60, 71, 60, 70, 82, 102, 72, 76, 79, 58, 61, 59, 50, 91, 85, 64, 61, 62, 56, 52, 88, 75, 62, 116, 54, 60, 67, 67, 63, 50, 59, 63, 59, 76, 60, 78, 70, 78, 66, 54, 62, 74, 60, 56, 54, 60, 76, 66, 122, 65, 71, 61, 55, 51, 72, 98, 60, 68, 63, 80, 70, 74, 62, 60, 63, 70, 66, 66, 50, 61, 69, 59, 73, 78, 51, 60, 46, 55, 65, 65, 90, 62, 65, 49, 95, 96, 43, 65, 61, 70, 54, 76, 55, 76, 85, 68, 67, 58, 74, 58, 53, 62, 48, 67, 61, 64, 68, 74, 125, 82, 61, 59, 68, 71, 95, 81, 66, 58, 54, 58, 75, 68, 44, 54, 77, 71, 49, 62, 70, 56, 49, 65, 58, 60, 53, 47, 68, 61, 55, 64, 59, 54, 53, 52, 74, 53, 70, 85, 48, 59, 78, 71, 54, 77, 64, 68, 89, 74, 58, 57, 67, 69, 59, 57, 68, 61, 57, 69, 59, 53, 68, 69, 61, 71, 78, 51, 55, 64, 60, 58, 83, 63, 74, 55, 58, 60, 73, 78, 71, 52, 99, 61, 65, 48, 51, 54, 54, 57, 78, 68, 58, 71, 43, 68, 60, 56, 54, 79, 71, 109, 66, 62, 63, 61, 64, 70, 63, 60, 57, 61, 70, 70, 56, 64, 66, 48, 85, 47, 50, 48, 63, 71, 58, 62, 93, 49, 47, 64, 51, 65, 53, 61, 71, 87, 52, 68, 61, 79, 57, 52, 70, 63, 66, 76, 65, 73, 50, 57, 92, 77, 88, 62, 54, 66, 86, 62, 53, 82, 57, 53, 51, 65, 85, 54, 48, 55, 53, 68, 77, 55, 84, 62, 80, 78, 74, 56, 65, 70, 55, 86, 62, 55, 77, 69, 63, 69, 65, 39, 69, 71, 75, 52, 63, 90, 61, 71, 61, 64, 65, 61, 69, 73, 61, 55, 54, 59, 48, 65, 42, 64, 71, 64, 68, 55, 49, 56, 58, 43, 81, 63, 70, 50, 74, 54, 49, 63, 111, 68, 77, 60, 90, 76, 63, 54, 60, 52, 66, 87, 71, 58, 99, 93, 83, 79, 75, 60, 75, 57, 63, 44, 51, 62, 55, 64, 65, 57, 54, 60, 61, 76, 55, 78, 59, 56, 63, 61, 77, 58, 53, 93, 69, 65, 51, 78, 76, 67, 59, 59, 76, 66, 55, 109, 62, 63, 65, 59, 78, 61, 70, 50, 81, 65, 55, 62, 69, 64, 68, 53, 64, 65, 75, 62, 57, 62, 64, 49, 66, 72, 72, 60, 58, 75, 93, 70, 82, 66, 57, 62, 95, 52, 64, 53, 60, 68, 61, 77, 48, 71, 51, 86, 61, 58, 64, 61, 83, 55, 57, 68, 109, 55, 53, 61, 53, 97, 63, 56, 53, 80, 70, 63, 58, 41, 84, 74, 52, 51, 82, 85, 66, 84, 53, 78, 43, 57, 56, 52, 50, 68, 59, 71, 62, 86, 61, 71, 56, 49, 111, 82, 71, 63, 58, 48, 72, 64, 57, 72, 68, 77, 79, 60, 46, 64, 83, 47, 46, 66, 57, 54, 75, 76, 56, 46, 66, 61, 57, 77, 145, 69, 51, 51, 91, 69, 65, 62, 58, 57, 66, 67, 103, 65, 68, 45, 68, 65, 73, 62, 47, 55, 90, 72, 99, 64, 82, 55, 81, 81, 76, 69, 87, 55, 59, 68, 49, 68, 63, 74, 72, 119, 48, 65, 53, 85, 63, 68, 90, 66, 58, 72, 73, 63, 58, 47, 86, 65, 68, 64, 67, 66, 59, 60, 72, 48, 80, 64, 61, 58, 80, 61, 61, 58, 61, 51, 54, 60, 72, 68, 61, 51, 84, 53, 71, 49, 70, 58, 66, 67, 60, 60, 63, 83, 51, 60, 70, 87, 71, 45, 58, 45, 95, 50, 43, 59, 50, 60, 74, 118, 50, 125, 54, 62, 62, 58, 62, 65, 87, 96, 56, 57, 61, 44, 68, 87, 49, 65, 51, 98, 66, 66, 60, 60, 62, 55, 55, 56, 50, 84, 75, 102, 61, 93, 69, 69, 83, 69, 57, 62, 98, 51, 63, 60, 69, 73, 50, 72, 69, 61, 67, 89, 66, 90, 71, 50, 85, 65, 87, 63, 60, 62, 68, 75, 53, 59, 67, 66, 77, 60, 69, 80, 64, 46, 73, 41, 70, 59, 69, 42, 62, 49, 81, 57, 50, 53, 63, 59, 70, 59, 68, 61, 52, 58, 82, 68, 74, 67, 62, 72, 56, 61, 65, 71, 55, 76, 85, 49, 55, 55, 58, 63, 80, 54, 70, 77, 79, 55, 61, 90, 58, 65, 62, 91, 68, 64, 61, 66, 66, 58, 64, 57, 51, 53, 88, 54, 79, 55, 67, 48, 41, 81, 62, 68, 49, 81, 67, 107, 72, 64, 77, 72, 85, 55, 74, 71, 62, 55, 71, 52, 73, 67, 90, 55, 76, 82, 76, 51, 65, 53, 137, 64, 40, 77, 83, 51, 67, 74, 42, 46, 56, 84, 70, 93, 59, 58, 58, 48, 69, 79, 58, 54, 62, 76, 96, 68, 53, 78, 79, 60, 69, 93, 58, 58, 61, 56, 61, 63, 60, 61, 62, 65, 50, 66, 70, 56, 60, 64, 59, 54, 77, 61, 86, 52, 44, 57, 76, 53, 53, 55, 76, 65, 54, 49, 61, 97, 71, 55, 92, 51, 61, 55, 50, 63, 55, 60, 57, 73, 72, 76, 60, 62, 51, 71, 52, 44, 50, 64, 80, 73, 68, 58, 77, 99, 101, 74, 64, 45, 60, 54, 70, 94, 62, 50, 68, 72, 59, 70, 78, 63, 88, 63, 63, 57, 65, 76, 64, 74, 61, 83, 54, 67, 93, 54, 73, 40, 58, 95, 69, 82, 75, 69, 58, 59, 61, 53, 63, 75, 52, 54, 58, 71, 49, 86, 51, 74, 56, 51, 65, 108, 60, 63, 69, 54, 78, 59, 67, 59, 65, 68, 58, 66, 56, 71, 66, 71, 48, 77, 61, 59, 64, 56, 74, 68, 55, 79, 58, 54, 66, 66, 92, 51, 99, 96, 58, 37, 65, 56, 71, 79, 60, 60, 89, 61, 66, 56, 74, 78, 71, 48, 75, 69, 84, 82, 60, 47, 75, 55, 62, 61, 87, 56, 69, 66, 49, 68, 49, 49, 65, 121, 98, 78, 64, 75, 66, 79, 83, 53, 61, 66, 60, 64, 53, 60, 50, 80, 44, 52, 69, 72, 60, 76, 68, 56, 59, 66, 52, 63, 63, 55, 49, 47, 70, 76, 61, 65, 69, 54, 63, 60, 69, 70, 57, 59, 54, 59, 81, 75, 73, 57, 75, 64, 59, 57, 67, 68, 79, 77, 64, 49, 77, 62, 69, 104, 56, 74, 56, 72, 65, 87, 62, 71, 52, 57, 68, 34, 83, 62, 63, 61, 60, 63, 40, 61, 59, 61, 59, 54, 62, 42, 60, 69, 93, 65, 100, 63, 56, 80, 57, 74, 63, 71, 71, 75, 51, 56, 60, 72, 60, 58, 65, 80, 72, 54, 85, 74, 80, 52, 56, 71, 64, 58, 75, 74, 62, 84, 64, 35, 66, 74, 57, 55, 51, 58, 65, 55, 53, 65, 79, 68, 46, 63, 75, 72, 72, 46, 74, 88, 65, 95, 66, 60, 81, 80, 62, 47, 45, 59, 64, 73, 56, 73, 53, 61, 78, 67, 57, 63, 69, 51, 52, 51, 57, 65, 71, 57, 54, 51, 56, 77, 51, 68, 65, 50, 57, 87, 61, 74, 69, 49, 73, 56, 59, 76, 60, 68, 87, 79, 122, 75, 53, 72, 65, 70, 73, 48, 64, 55, 67, 66, 34, 61, 53, 52, 61, 80, 56, 83, 83, 67, 64, 59, 60, 90, 58, 55, 48, 65, 70, 70, 60, 63, 57, 66, 77, 52, 63, 50, 67, 67, 64, 67, 62, 54, 69, 57, 61, 60, 80, 66, 50, 72, 81, 52, 63, 75, 54, 76, 84, 75, 65, 62, 58, 65, 61, 61, 63, 72, 64, 54, 91, 63, 66, 28, 50, 122, 57, 54, 68, 62, 55, 62, 57, 71, 72, 52, 52, 46, 99, 89, 53, 56, 57, 64, 76, 59, 69, 77, 56, 76, 70, 66, 63, 54, 46, 65, 83, 64, 58, 73, 61, 58, 84, 53, 76, 64, 73, 47, 46, 63, 60, 74, 60, 62, 57, 64, 37, 66, 71, 68, 60, 79, 65, 67, 58, 67, 48, 59, 50, 77, 59, 68, 68, 54, 108, 77, 53, 64, 59, 77, 62, 50, 60, 77, 54, 86, 57, 65, 79, 95, 80, 61, 50, 89, 65, 80, 64, 63, 78, 59, 48, 62, 67, 63, 62, 76, 66, 62, 58, 65, 63, 58, 68, 57, 79, 51, 68, 75, 52, 52, 61, 64, 67, 49, 54, 74, 116, 66, 79, 113, 87, 63, 40, 61, 71, 54, 88, 76, 55, 55, 60, 77, 62, 59, 50, 68, 60, 65, 72, 38, 67, 59, 60, 59, 53, 55, 58, 61, 54, 100, 90, 64, 68, 60, 91, 73, 78, 56, 63, 60, 66, 72, 67, 78, 91, 77, 48, 100, 41, 74, 70, 61, 56, 58, 54, 60, 62, 50, 98, 58, 59, 64, 78, 56, 90, 60, 55, 82, 59, 64, 54, 99, 66, 55, 73, 97, 61, 87, 66, 65, 77, 61, 53, 64, 56, 69, 73, 73, 64, 56, 73, 79, 68, 69, 112, 58, 63, 62, 58, 57, 44, 59, 61, 62, 55, 91, 73, 90, 61, 101, 66, 73, 47, 65, 67, 54, 81, 55, 55, 50, 62, 64, 63, 92, 123, 85, 55, 69, 79, 66, 65, 58, 57, 54, 50, 67, 71, 63, 51, 52, 77, 86, 82, 62, 56, 63, 48, 98, 54, 62, 72, 55, 92, 75, 87, 55, 66, 67, 67, 67, 59, 62, 80, 61, 62, 74, 75, 99, 69, 60, 53, 73, 76, 48, 52, 69, 62, 61, 63, 68, 56, 110, 68, 67, 61, 77, 59, 64, 42, 52, 73, 76, 67, 71, 97, 95, 64, 91, 72, 71, 60, 61, 43, 53, 73, 48, 79, 50, 62, 50, 68, 62, 57, 56, 53, 57, 51, 77, 61, 45, 64, 61, 72, 77, 53, 40, 86, 60, 69, 70, 61, 46, 88, 113, 69, 111, 68, 56, 80, 71, 82, 57, 98, 85, 53, 64, 36, 97, 72, 71, 50, 57, 70, 107, 73, 50, 61, 63, 70, 79, 75, 63, 60, 53, 59, 68, 41, 71, 66, 80, 67, 59, 95, 62, 79, 66, 58, 79, 89, 48, 65, 71, 67, 65, 82, 50, 59, 99, 61, 71, 101, 77, 130, 61, 76, 58, 62, 61, 59, 61, 70, 55, 63, 62, 66, 62, 68, 70, 51, 74, 62, 61, 71, 49, 74, 60, 54, 56, 58, 52, 68, 68, 67, 68, 74, 77, 70, 74, 62, 63, 63, 61, 59, 59, 50, 67, 57, 48, 63, 64, 67, 55, 67, 72, 55, 60, 62, 88, 80, 96, 74, 73, 57, 62, 52, 58, 65, 64, 64, 58, 53, 52, 52, 60, 62, 64, 50, 60, 75, 58, 49, 85, 60, 74, 64, 48, 87, 67, 80, 62, 60, 118, 61, 58, 76, 63, 83, 78, 80, 66, 86, 62, 58, 69, 55, 71, 75, 60, 60, 66, 57, 66, 71, 69, 50, 60, 63, 75, 83, 62, 60, 53, 60, 42, 45, 88, 55, 62, 46, 66, 58, 48, 83, 55, 62, 61, 60, 49, 77, 71, 56, 62, 60, 42, 65, 114, 74, 61, 54, 67, 63, 63, 64, 54, 46, 88, 66, 58, 62, 55, 65, 76, 60, 58, 76, 57, 77, 72, 58, 81, 70, 56, 48, 49, 80, 52, 67, 56, 69, 87, 52, 56, 78, 83, 66, 51, 46, 77, 61, 62, 56, 74, 94, 60, 58, 95, 56, 57, 65, 67, 62, 70, 59, 65, 75, 67, 63, 68, 104, 59, 61, 67, 65, 56, 55, 85, 63, 45, 76, 44, 108, 57, 74, 56, 70, 79, 52, 61, 44, 56, 60, 53, 91, 65, 58, 100, 63, 71, 63, 74, 58, 98, 55, 94, 58, 82, 60, 72, 68, 68, 56, 50, 63, 50, 82, 68, 63, 78, 58, 66, 62, 63, 67, 103, 70, 72, 89, 62, 65, 41, 65, 67, 43, 75, 42, 74, 63, 51, 53, 65, 48, 70, 62, 91, 74, 72, 77, 56, 67, 76, 78, 53, 61, 46, 82, 74, 68, 82, 55, 89, 64, 72, 69, 74, 82, 59, 94, 52, 42, 52, 64, 57, 78, 69, 83, 82, 69, 77, 60, 80, 63, 82, 54, 69, 81, 59, 66, 73, 66, 47, 61, 70, 55, 85, 101, 83, 65, 67, 71, 90, 78, 54, 65, 83, 59, 39, 43, 65, 57, 73, 71, 55, 70, 59, 69, 75, 59, 69, 54, 72, 69, 60, 53, 59, 52, 70, 61, 76, 49, 85, 61, 84, 74, 59, 71, 68, 70, 66, 61, 57, 74, 64, 51, 73, 56, 61, 60, 68, 60, 56, 107, 63, 58, 69, 63, 56, 68, 83, 73, 52, 58, 59, 93, 58, 85, 61, 71, 82, 57, 56, 63, 58, 68, 64, 52, 72, 34, 80, 54, 47, 76, 51, 78, 60, 52, 77, 86, 97, 71, 66, 61, 81, 66, 65, 69, 65, 74, 60, 60, 64, 67, 59, 67, 55, 69, 57, 66, 60, 69, 68, 102, 66, 56, 73, 54, 81, 78, 63, 67, 78, 64, 54, 42, 72, 68, 56, 69, 54, 67, 72, 77, 59, 78, 67, 49, 63, 68, 59, 56, 59, 64, 59, 69, 82, 46, 96, 72, 64, 76, 98, 63, 60, 54, 53, 70, 66, 53, 72, 71, 56, 59, 56, 59, 70, 78, 64, 61, 63, 56, 64, 92, 64, 60, 90, 56, 67, 71, 50, 60, 58, 47, 61, 80, 53, 56, 68, 55, 50, 70, 71, 85, 57, 97, 68, 67, 59, 56, 76, 72, 51, 79, 48, 59, 57, 63, 54, 55, 64, 62, 52, 51, 119, 90, 50, 59, 66, 58, 64, 43, 70, 69, 61, 62, 61, 55, 90, 47, 61, 85, 57, 40, 76, 80, 70, 42, 64, 61, 42, 67, 63, 69, 64, 83, 66, 61, 71, 47, 72, 89, 55, 63, 67, 48, 61, 67, 70, 62, 69, 65, 72, 53, 82, 55, 63, 65, 68, 51, 53, 44, 114, 70, 54, 57, 64, 59, 67, 69, 56, 70, 79, 65, 59, 94, 80, 48, 57, 57, 41, 58, 63, 68, 75, 71, 70, 82, 41, 75, 66, 54, 78, 72, 66, 77, 70, 71, 73, 60, 52, 52, 80, 53, 71, 64, 53, 73, 61, 67, 66, 43, 56, 85, 50, 66, 53, 55, 70, 71, 57, 59, 70, 84, 50, 80, 77, 67, 77, 55, 85, 97, 47, 52, 68, 68, 75, 65, 61, 65, 54, 67, 101, 72, 65, 95, 85, 67, 60, 60, 57, 54, 60, 59, 61, 57, 57, 62, 60, 54, 51, 74, 84, 64, 91, 61, 85, 59, 67, 53, 38, 80, 59, 62, 98, 71, 77, 72, 69, 62, 56, 47, 75, 86, 52, 54, 100, 58, 67, 80, 63, 51, 42, 104, 77, 85, 59, 57, 56, 58, 71, 51, 52, 51, 59, 60, 55, 93, 71, 60, 53, 44, 65, 47, 63, 63, 96, 104, 47, 61, 45, 71, 62, 114, 55, 66, 48, 66, 56, 68, 41, 80, 82, 48, 76, 67, 67, 68, 51, 72, 63, 52, 57, 60, 70, 80, 47, 59, 45, 67, 51, 84, 68, 58, 63, 65, 60, 74, 79, 66, 67, 53, 93, 62, 73, 49, 69, 74, 63, 74, 46, 53, 57, 53, 68, 52, 52, 56, 61, 52, 57, 88, 77, 74, 70, 63, 62, 60, 57, 51, 66, 56, 93, 73, 71, 66, 71, 96, 53, 56, 50, 51, 62, 76, 87, 59, 56, 51, 69, 60, 109, 138, 52, 63, 63, 93, 81, 49, 62, 73, 49, 53, 62, 90, 53, 72, 56, 54, 80, 45, 49, 47, 61, 77, 83, 105, 52, 78, 68, 66, 60, 69, 55, 68, 89, 82, 42, 48, 52, 75, 45, 55, 97, 52, 76, 71, 83, 57, 76, 56, 52, 65, 50, 59, 60, 77, 76, 66, 82, 53, 69, 53, 54, 56, 68, 76, 53, 65, 68, 66, 63, 83, 68, 45, 60, 63, 55, 61, 76, 65, 56, 66, 63, 64, 73, 58, 55, 83, 52, 67, 75, 49, 78, 74, 68, 74, 58, 53, 86, 65, 52, 59, 53, 113, 72, 83, 74, 48, 59, 74, 124, 70, 113, 62, 76, 87, 50, 82, 66, 81, 94, 66, 69, 53, 85, 69, 82, 50, 75, 41, 93, 59, 50, 66, 57, 63, 65, 83, 60, 62, 81, 72, 71, 50, 54, 61, 77, 67, 68, 69, 59, 79, 64, 64, 73, 64, 70, 58, 66, 63, 64, 86, 67, 93, 95, 37, 58, 81, 99, 102, 64, 73, 66, 48, 79, 48, 61, 63, 57, 57, 63, 63, 77, 63, 62, 48, 73, 61, 69, 59, 58, 83, 59, 60, 61, 45, 61, 66, 63, 66, 68, 70, 79, 68, 62, 67, 59, 66, 53, 64, 74, 55, 49, 52, 41, 81, 62, 78, 55, 57, 62, 62, 71, 72, 67, 64, 83, 64, 68, 50, 77, 67, 72, 60, 70, 58, 66, 57, 54, 41, 61, 75, 65, 59, 63, 68, 53, 56, 77, 56, 58, 55, 62, 75, 53, 67, 78, 74, 133, 82, 53, 91, 54, 82, 76, 64, 78, 61, 54, 54, 71, 63, 70, 67, 70, 58, 102, 64, 65, 54, 75, 83, 54, 51, 81, 71, 55, 52, 63, 72, 86, 56, 85, 60, 78, 55, 61, 48, 64, 58, 49, 73, 51, 59, 110, 71, 71, 55, 65, 50, 67, 78, 123, 71, 59, 58, 58, 78, 76, 60, 50, 50, 69, 76, 77, 70, 76, 93, 93, 73, 50, 73, 68, 86, 55, 48, 71, 67, 67, 62, 68, 66, 70, 55, 44, 53, 89, 43, 70, 55, 81, 51, 53, 71, 56, 57, 66, 69, 73, 97, 69, 61, 81, 75, 51, 51, 62, 62, 64, 59, 67, 66, 58, 81, 60, 105, 61, 55, 75, 54, 61, 50, 81, 61, 53, 89, 51, 81, 66, 67, 61, 59, 69, 74, 60, 67, 62, 70, 45, 56, 71, 49, 94, 56, 56, 52, 71, 73, 93, 86, 69, 60, 78, 61, 54, 69, 57, 49, 68, 65, 59, 84, 53, 49, 85, 52, 68, 50, 76, 61, 114, 62, 69, 83, 74, 60, 49, 64, 48, 71, 65, 71, 72, 57, 67, 55, 63, 53, 49, 67, 60, 73, 67, 71, 53, 54, 68, 54, 61, 58, 35, 81, 78, 71, 61, 66, 73, 47, 78, 78, 50, 60, 69, 75, 71, 57, 53, 61, 71, 61, 56, 73, 73, 62, 80, 53, 93, 51, 86, 85, 66, 65, 69, 72, 62, 66, 64, 60, 58, 59, 106, 92, 55, 53, 71, 60, 83, 52, 41, 77, 76, 59, 52, 70, 53, 63, 54, 76, 63, 64, 69, 78, 77, 68, 60, 62, 65, 55, 70, 79, 65, 68, 69, 54, 78, 76, 64, 72, 59, 80, 52, 55, 65, 50, 54, 59, 72, 72, 78, 55, 66, 79, 65, 52, 65, 59, 72, 119, 69, 65, 80, 70, 54, 63, 61, 53, 59, 41, 58, 53, 71, 84, 57, 72, 61, 62, 78, 62, 57, 80, 56, 74, 88, 52, 45, 104, 63, 57, 56, 55, 68, 63, 70, 47, 73, 59, 71, 103, 52, 77, 67, 61, 61, 55, 57, 66, 55, 37, 54, 66, 73, 60, 67, 54, 57, 82, 66, 62, 65, 56, 70, 53, 46, 72, 58, 59, 64, 64, 41, 61, 75, 65, 72, 61, 81, 70, 57, 44, 82, 66, 74, 80, 63, 77, 55, 65, 70, 74, 73, 62, 67, 74, 70, 36, 116, 71, 62, 56, 56, 62, 60, 54, 77, 46, 62, 61, 57, 50, 46, 71, 75, 58, 47, 49, 69, 60, 90, 58, 78, 50, 56, 81, 62, 68, 69, 58, 69, 61, 74, 72, 57, 62, 63, 34, 110, 92, 72, 61, 63, 48, 46, 68, 58, 37, 66, 59, 65, 79, 68, 52, 74, 64, 76, 80, 65, 79, 74, 62, 141, 103, 69, 49, 53, 60, 59, 46, 73, 63, 53, 54, 66, 70, 86, 75, 71, 74, 55, 58, 61, 43, 61, 83, 70, 60, 46, 62, 53, 60, 71, 108, 81, 48, 86, 46, 74, 50, 45, 74, 62, 65, 47, 64, 71, 57, 72, 67, 53, 42, 90, 53, 63, 79, 66, 88, 91, 69, 60, 46, 60, 60, 53, 79, 43, 66, 80, 61, 57, 65, 66, 55, 79, 74, 59, 61, 61, 82, 74, 68, 68, 62, 77, 68, 54, 59, 99, 63, 49, 76, 62, 89, 108, 58, 106, 55, 56, 82, 82, 62, 71, 98, 68, 63, 63, 40, 60, 94, 63, 78, 63, 62, 64, 39, 58, 59, 42, 87, 75, 66, 54, 100, 68, 86, 54, 64, 44, 71, 59, 67, 52, 77, 78, 57, 59, 66, 64, 78, 88, 46, 96, 83, 57, 67, 70, 66, 46, 57, 50, 75, 62, 63, 60, 47, 65, 54, 92, 83, 62, 69, 53, 77, 88, 67, 88, 72, 55, 68, 60, 73, 71, 73, 76, 53, 79, 69, 57, 81, 66, 64, 44, 64, 71, 73, 57, 63, 66, 69, 65, 95, 69, 72, 65, 73, 71, 61, 47, 55, 76, 103, 70, 64, 77, 32, 72, 67, 54, 53, 74, 52, 39, 26, 88, 69, 50, 58, 71, 67, 58, 84, 56, 80, 60, 41, 67, 42, 51, 60, 48, 38, 67, 47, 45, 53, 76, 58, 59, 88, 56, 93, 53, 78, 49, 80, 55, 64, 64, 83, 53, 68, 52, 51, 47, 82, 70, 56, 87, 76, 54, 74, 59, 56, 58, 62, 53, 59, 74, 69, 68, 53, 63, 47, 49, 73, 67, 89, 54, 69, 45, 69, 74, 70, 76, 69, 55, 68, 77, 84, 84, 43, 110, 80, 83, 107, 71, 60, 92, 53, 64, 72, 85, 76, 94, 77, 50, 69, 57, 49, 48, 78, 64, 62, 53, 64, 65, 74, 51, 57, 81, 61, 47, 62, 62, 61, 86, 60, 53, 56, 59, 95, 74, 58, 40, 87, 63, 49, 76, 95, 67, 64, 65, 59, 68, 72, 65, 60, 83, 52, 67, 89, 66, 74, 78, 69, 62, 70, 91, 41, 79, 73, 47, 73, 51, 72, 59, 54, 59, 95, 58, 52, 66, 83, 76, 57, 112, 54, 59, 50, 75, 78, 57, 78, 70, 69, 49, 69, 65, 64, 132, 76, 52, 65, 82, 61, 78, 59, 74, 64, 60, 70, 77, 87, 93, 58, 80, 77, 79, 56, 73, 70, 72, 64, 81, 57, 70, 77, 86, 70, 96, 83, 79, 52, 39, 66, 68, 74, 113, 62, 47, 61, 58, 68, 80, 48, 70, 68, 75, 68, 80, 56, 97, 75, 57, 47, 61, 52, 81, 63, 94, 55, 42, 57, 47, 75, 110, 49, 80, 84, 86, 77, 66, 71, 73, 82, 55, 58, 74, 41, 58, 69, 75, 68, 68, 52, 88, 90, 71, 62, 61, 32, 86, 53, 84, 52, 43, 61, 70, 66, 64, 82, 84, 73, 66, 67, 79, 62, 51, 66, 63, 94, 57, 61, 68, 58, 70, 45, 32, 61, 69, 65, 49, 72, 82, 91, 54, 73, 61, 94, 48, 56, 90, 36, 50, 107, 56, 60, 61, 65, 57, 80, 57, 49, 61, 67, 48, 82, 76, 42, 39, 74, 49, 72, 62, 59, 61, 56, 61, 54, 73, 56, 70, 79, 52, 60, 94, 78, 125, 71, 49, 63, 67, 85, 51, 63, 76, 44, 94, 87, 70, 57, 75, 55, 77, 67, 61, 54, 52, 75, 85, 48, 116, 55, 81, 60, 67, 46, 75, 113, 63, 95, 62, 71, 51, 57, 96, 89, 109, 58, 69, 70, 47, 61, 63, 81, 66, 61, 56, 62, 70, 52, 76, 52, 65, 62, 75, 70, 56, 62, 74, 47, 73, 59, 72, 87, 78, 90, 55, 60, 41, 77, 154, 47, 66, 69, 90, 64, 62, 112, 70, 64, 56, 55, 56, 59, 59, 47, 68, 56, 56, 44, 62, 66, 59, 43, 65, 53, 77, 85, 65, 41, 52, 70, 59, 68, 50, 59, 91, 68, 73, 49, 80, 48, 78, 61, 48, 51, 61, 98, 49, 72, 60, 50, 58, 42, 75, 68, 66, 59, 70, 82, 95, 50, 60, 96, 67, 51, 59, 82, 56, 49, 55, 62, 82, 46, 62, 49, 56, 76, 73, 62, 60, 76, 63, 77, 78, 64, 54, 58, 64, 54, 69, 56, 73, 74, 77, 68, 59, 67, 64, 81, 64, 98, 74, 65, 54, 82, 89, 60, 72, 54, 74, 59, 60, 45, 63, 51, 72, 67, 45, 73, 71, 64, 58, 54, 87, 55, 76, 65, 57, 53, 55, 61, 104, 81, 65, 65, 68, 55, 70, 66, 51, 67, 58, 64, 55, 96, 75, 57, 61, 64, 59, 76, 74, 46, 51, 64, 70, 55, 78, 72, 58, 76, 69, 56, 80, 57, 85, 53, 52, 70, 69, 58, 63, 64, 68, 68, 56, 53, 68, 61, 72, 57, 70, 60, 69, 53, 59, 55, 74, 78, 73, 80, 67, 62, 52, 60, 105, 62, 72, 74, 57, 78, 57, 59, 55, 60, 68, 65, 59, 50, 66, 63, 61, 72, 49, 50, 86, 39, 60, 42, 61, 63, 53, 45, 67, 55, 66, 70, 69, 56, 58, 114, 73, 119, 77, 65, 70, 71, 54, 66, 64, 56, 55, 53, 45, 68, 77, 85, 56, 57, 45, 72, 75, 52, 64, 78, 61, 68, 64, 87, 63, 91, 57, 59, 47, 66, 59, 59, 66, 50, 71, 63, 55, 61, 61, 68, 72, 50, 62, 43, 55, 72, 51, 63, 68, 75, 72, 66, 78, 59, 74, 70, 50, 65, 73, 62, 65, 52, 56, 70, 66, 80, 95, 51, 54, 62, 60, 55, 73, 58, 79, 83, 56, 67, 61, 67, 55, 71, 54, 50, 77, 61, 78, 49, 89, 56, 60, 57, 54, 60, 95, 72, 49, 49, 61, 75, 52, 60, 97, 55, 51, 51, 82, 65, 55, 78, 81, 67, 80, 63, 70, 51, 77, 67, 67, 56, 62, 56, 63, 57, 58, 62, 60, 73, 87, 64, 64, 64, 61, 69, 79, 99, 78, 45, 49, 55, 57, 66, 48, 53, 83, 77, 65, 74, 51, 68, 49, 61, 64, 42, 56, 55, 86, 70, 121, 102, 59, 44, 64, 60, 50, 63, 66, 52, 57, 69, 60, 77, 85, 51, 65, 71, 45, 62, 75, 71, 91, 85, 72, 70, 60, 68, 75, 65, 78, 107, 61, 48, 67, 66, 57, 66, 64, 75, 49, 51, 74, 63, 74, 54, 87, 83, 62, 55, 73, 68, 64, 55, 54, 88, 74, 55, 74, 87, 61, 62, 58, 70, 67, 69, 87, 79, 40, 86, 56, 72, 79, 60, 55, 81, 65, 89, 60, 79, 80, 63, 55, 73, 82, 56, 90, 54, 55, 56, 78, 65, 126, 67, 84, 41, 67, 64, 86, 59, 59, 69, 58, 79, 53, 74, 56, 104, 61, 62, 72, 74, 55, 46, 60, 65, 55, 60, 85, 48, 70, 98, 74, 59, 66, 79, 59, 56, 77, 48, 56, 77, 70, 50, 41, 66, 50, 66, 83, 58, 84, 64, 52, 68, 68, 59, 60, 57, 49, 94, 59, 60, 52, 65, 67, 39, 71, 139, 84, 97, 53, 76, 64, 54, 44, 53, 72, 49, 78, 76, 46, 66, 77, 51, 66, 64, 57, 105, 69, 75, 80, 57, 50, 61, 52, 65, 100, 51, 63, 105, 51, 84, 50, 41, 78, 52, 49, 59, 59, 102, 46, 55, 62, 76, 54, 70, 69, 92, 63, 72, 76, 77, 71, 47, 72, 62, 64, 60, 54, 58, 53, 48, 56, 63, 68, 50, 67, 57, 47, 57, 86, 54, 58, 54, 68, 60, 64, 52, 61, 105, 51, 73, 47, 79, 45, 70, 81, 85, 84, 54, 87, 68, 76, 80, 63, 62, 64, 78, 69, 67, 50, 72, 63, 62, 67, 57, 86, 71, 68, 41, 76, 53, 55, 77, 53, 63, 58, 47, 62, 54, 54, 94, 65, 76, 63, 53, 61, 90, 88, 63, 82, 64, 81, 85, 63, 75, 69, 69, 56, 69, 57, 78, 78, 67, 53, 55, 63, 53, 57, 52, 66, 53, 53, 63, 73, 44, 53, 68, 81, 67, 52, 66, 61, 52, 66, 41, 51, 82, 49, 73, 81, 51, 71, 59, 57, 62, 72, 118, 89, 78, 83, 51, 66, 49, 82, 65, 70, 57, 87, 72, 81, 83, 56, 56, 59, 53, 67, 68, 67, 65, 58, 56, 53, 72, 54, 60, 79, 65, 50, 68, 58, 71, 70, 67, 86, 56, 80, 37, 85, 79, 51, 64, 70, 63, 59, 70, 68, 61, 94, 71, 62, 81, 97, 72, 82, 55, 76, 63, 57, 72, 48, 73, 59, 56, 73, 76, 79, 64, 65, 72, 56, 71, 71, 75, 84, 63, 98, 47, 81, 62, 43, 58, 95, 67, 65, 68, 80, 52, 53, 63, 62, 67, 87, 55, 101, 53, 59, 76, 69, 78, 120, 74, 54, 64, 62, 49, 66, 63, 83, 66, 64, 79, 59, 92, 91, 53, 73, 78, 54, 65, 65, 46, 56, 74, 67, 53, 65, 57, 41, 63, 85, 89, 60, 68, 74, 95, 53, 51, 59, 68, 65, 71, 75, 67, 65, 80, 61, 78, 60, 78, 75, 84, 87, 60, 62, 48, 35, 82, 80, 68, 71, 65, 51, 69, 76, 54, 58, 61, 63, 69, 61, 63, 94, 49, 56, 58, 86, 87, 62, 57, 74, 43, 66, 146, 58, 67, 71, 60, 77, 76, 56, 55, 63, 44, 58, 71, 48, 73, 46, 46, 56, 60, 74, 65, 71, 48, 53, 49, 59, 85, 77, 76, 61, 64, 74, 87, 95, 41, 55, 56, 52, 84, 61, 52, 49, 56, 70, 75, 68, 59, 82, 55, 72, 72, 54, 69, 83, 59, 57, 70, 98, 52, 83, 48, 78, 43, 43, 136, 36, 99, 61, 61, 80, 48, 133, 73, 97, 65, 59, 58, 50, 55, 67, 45, 72, 57, 74, 72, 52, 46, 100, 63, 66, 47, 57, 76, 82, 54, 57, 64, 83, 68, 38, 72, 91, 75, 89, 69, 59, 85, 123, 50, 57, 76, 64, 50, 51, 122, 89, 47, 56, 53, 58, 47, 52, 45, 58, 79, 63, 63, 57, 68, 76, 90, 50, 57, 74, 88, 53, 63, 56, 73, 45, 54, 52, 81, 67, 61, 72, 63, 74, 63, 69, 87, 70, 57, 46, 77, 52, 69, 42, 65, 63, 68, 60, 81, 65, 64, 78, 67, 113, 52, 58, 100, 58, 52, 68, 47, 58, 65, 65, 81, 73, 59, 62, 42, 56, 53, 51, 58, 61, 37, 63, 75, 72, 58, 65, 55, 59, 54, 74, 47, 57, 55, 66, 75, 57, 65, 61, 58, 63, 90, 87, 66, 76, 44, 99, 46, 72, 69, 65, 51, 77, 83, 49, 63, 72, 56, 39, 55, 60, 69, 70, 50, 45, 71, 67, 74, 57, 55, 69, 62, 99, 55, 78, 59, 63, 59, 54, 60, 64, 64, 58, 51, 67, 85, 105, 67, 84, 73, 61, 51, 62, 60, 45, 61, 68, 50, 90, 57, 59, 94, 63, 55, 81, 68, 77, 64, 59, 73, 56, 82, 63, 59, 53, 76, 80, 58, 73, 63, 93, 58, 62, 65, 83, 56, 79, 49, 63, 74, 64, 67, 61, 74, 62, 57, 80, 59, 62, 55, 56, 66, 63, 73, 70, 61, 46, 49, 72, 64, 71, 74, 62, 69, 58, 51, 78, 43, 78, 51, 103, 66, 61, 34, 67, 54, 63, 48, 67, 53, 74, 121, 62, 101, 68, 80, 79, 81, 62, 61, 61, 70, 66, 60, 42, 74, 85, 64, 56, 67, 61, 55, 67, 75, 67, 48, 75, 53, 89, 53, 60, 102, 51, 68, 71, 57, 78, 65, 69, 66, 98, 67, 67, 73, 48, 54, 66, 59, 59, 57, 53, 71, 50, 82, 74, 59, 77, 59, 75, 72, 59, 59, 66, 106, 63, 54, 62, 53, 48, 71, 77, 80, 91, 69, 48, 50, 64, 85, 65, 85, 57, 60, 64, 67, 57, 69, 77, 54, 58, 83, 51, 63, 60, 68, 62, 66, 58, 85, 58, 66, 71, 49, 73, 40, 68, 76, 58, 82, 59, 70, 46, 81, 72, 81, 44, 75, 78, 58, 64, 57, 54, 42, 88, 64, 42, 60, 73, 85, 53, 69, 67, 78, 52, 62, 63, 73, 84, 76, 53, 79, 62, 60, 73, 64, 73, 56, 65, 102, 57, 43, 76, 50, 66, 71, 62, 65, 61, 63, 62, 76, 71, 50, 85, 84, 115, 123, 67, 49, 47, 73, 68, 70, 56, 84, 63, 59, 59, 79, 65, 83, 63, 66, 44, 75, 64, 74, 53, 57, 61, 86, 37, 104, 50, 68, 81, 93, 66, 68, 70, 74, 74, 54, 52, 52, 54, 53, 78, 54, 59, 67, 57, 72, 75, 49, 63, 55, 71, 72, 57, 89, 70, 74, 81, 71, 102, 52, 63, 59, 99, 78, 65, 80, 66, 61, 83, 60, 64, 64, 54, 96, 46, 53, 69, 79, 61, 45, 64, 47, 47, 56, 73, 49, 75, 72, 80, 70, 78, 59, 60, 59, 48, 76, 92, 56, 63, 64, 64, 62, 70, 56, 78, 70, 65, 70, 55, 52, 60, 55, 68, 52, 43, 53, 47, 68, 69, 73, 45, 81, 60, 68, 80, 80, 57, 53, 67, 77, 59, 77, 61, 60, 53, 69, 75, 59, 89, 97, 65, 59, 71, 80, 69, 66, 92, 61, 66, 63, 66, 75, 59, 66, 50, 63, 71, 65, 71, 62, 56, 70, 58, 77, 58, 59, 83, 68, 76, 66, 72, 73, 60, 55, 55, 65, 72, 67, 55, 54, 61, 60, 57, 76, 59, 71, 85, 62, 54, 65, 71, 59, 84, 47, 80, 63, 47, 100, 58, 66, 49, 63, 72, 67, 63, 54, 52, 62, 67, 79, 100, 74, 81, 51, 52, 58, 54, 77, 67, 65, 66, 43, 95, 53, 69, 57, 62, 55, 58, 87, 60, 60, 91, 66, 54, 70, 96, 92, 68, 73, 76, 86, 84, 66, 65, 55, 55, 63, 61, 68, 54, 79, 67, 60, 73, 66, 82, 46, 60, 63, 39, 69, 46, 81, 46, 55, 60, 58, 77, 51, 51, 74, 59, 68, 58, 74, 82, 78, 58, 58, 83, 54, 61, 56, 71, 80, 62, 68, 65, 66, 49, 94, 61, 60, 55, 61, 55, 55, 83, 58, 129, 69, 77, 72, 58, 74, 62, 79, 55, 78, 49, 51, 52, 79, 63, 69, 75, 70, 69, 55, 53, 60, 50, 56, 52, 66, 64, 83, 85, 84, 64, 51, 51, 62, 61, 69, 73, 63, 63, 62, 79, 46, 82, 45, 62, 75, 63, 49, 61, 79, 69, 57, 79, 96, 65, 71, 74, 70, 56, 72, 67, 59, 76, 50, 54, 79, 60, 69, 73, 68, 80, 66, 86, 54, 67, 47, 80, 53, 56, 86, 69, 80, 53, 76, 50, 54, 71, 68, 64, 59, 70, 76, 80, 68, 91, 61, 62, 55, 64, 43, 68, 62, 58, 70, 68, 43, 62, 49, 34, 86, 71, 48, 96, 57, 85, 58, 106, 81, 60, 61, 57, 78, 67, 66, 74, 59, 46, 48, 76, 56, 62, 68, 43, 70, 99, 70, 52, 66, 107, 84, 57, 60, 58, 49, 65, 54, 101, 53, 83, 42, 70, 73, 83, 49, 51, 59, 80, 71, 61, 63, 66, 50, 68, 50, 75, 68, 71, 67, 92, 89, 80, 60, 63, 107, 94, 47, 57, 79, 69, 72, 84, 56, 78, 57, 75, 53, 77, 78, 36, 56, 85, 48, 48, 58, 45, 60, 58, 50, 58, 72, 50, 74, 71, 67, 63, 58, 57, 58, 64, 94, 62, 73, 43, 53, 59, 78, 60, 69, 53, 65, 51, 77, 55, 50, 81, 59, 56, 73, 82, 78, 46, 65, 58, 64, 67, 58, 52, 59, 51, 81, 58, 69, 68, 56, 74, 57, 67, 61, 70, 42, 71, 76, 77, 89, 64, 48, 87, 63, 62, 75, 64, 69, 87, 57, 72, 88, 76, 68, 71, 77, 66, 69, 52, 65, 73, 70, 68, 56, 84, 84, 80, 58, 68, 58, 67, 124, 49, 99, 55, 71, 51, 53, 79, 63, 72, 49, 71, 71, 61, 73, 58, 60, 45, 61, 51, 72, 71, 81, 60, 73, 77, 56, 69, 62, 56, 83, 87, 54, 52, 74, 68, 105, 80, 54, 41, 84, 60, 78, 172, 69, 68, 74, 94, 66, 73, 78, 77, 58, 64, 49, 73, 52, 56, 65, 53, 53, 66, 56, 65, 60, 62, 73, 63, 59, 42, 63, 46, 54, 72, 70, 50, 52, 57, 77, 72, 59, 62, 57, 80, 60, 62, 73, 53, 62, 66, 68, 55, 83, 67, 68, 92, 81, 56, 71, 68, 60, 81, 78, 115, 72, 58, 57, 58, 59, 74, 65, 66, 59, 49, 68, 52, 52, 78, 60, 81, 86, 69, 71, 61, 58, 61, 70, 92, 63, 69, 60, 64, 76, 79, 73, 76, 60, 65, 44, 71, 65, 77, 51, 49, 47, 93, 59, 53, 47, 56, 70, 79, 50, 68, 66, 64, 79, 46, 52, 89, 62, 71, 51, 80, 62, 74, 90, 53, 66, 66, 66, 66, 62, 45, 69, 92, 79, 54, 58, 68, 76, 70, 54, 64, 69, 68, 78, 75, 48, 75, 49, 76, 69, 61, 73, 60, 74, 67, 47, 66, 48, 70, 75, 73, 67, 68, 52, 87, 68, 40, 57, 57, 51, 55, 70, 56, 48, 67, 74, 62, 64, 50, 57, 57, 53, 66, 47, 54, 63, 52, 73, 72, 85, 78, 40, 62, 62, 46, 76, 74, 87, 51, 61, 50, 78, 60, 58, 75, 55, 65, 65, 63, 72, 50, 75, 66, 49, 70, 75, 59, 73, 51, 67, 56, 67, 81, 76, 64, 60, 78, 59, 83, 60, 59, 119, 64, 98, 54, 50, 84, 46, 47, 66, 62, 64, 55, 77, 54, 88, 79, 77, 54, 54, 71, 59, 50, 70, 57, 56, 62, 55, 99, 57, 86, 72, 67, 57, 74, 63, 67, 59, 64, 51, 96, 69, 73, 57, 61, 80, 88, 58, 58, 51, 77, 65, 54, 99, 77, 59, 48, 64, 66, 72, 65, 46, 76, 118, 60, 49, 58, 57, 51, 59, 68, 57, 52, 64, 68, 68, 75, 64, 32, 64, 83, 56, 73, 47, 70, 65, 67, 66, 63, 76, 44, 67, 59, 67, 71, 55, 48, 69, 54, 62, 84, 64, 60, 64, 69, 38, 68, 101, 65, 67, 55, 56, 84, 72, 73, 63, 61, 56, 57, 57, 59, 77, 83, 64, 50, 75, 67, 65, 58, 77, 74, 55, 67, 78, 44, 71, 56, 98, 43, 58, 50, 55, 85, 86, 60, 77, 54, 94, 49, 67, 67, 66, 57, 107, 54, 55, 44, 65, 79, 59, 49, 66, 73, 104, 112, 131, 61, 48, 51, 42, 47, 65, 66, 67, 68, 57, 67, 58, 69, 83, 75, 46, 41, 106, 40, 68, 58, 77, 71, 71, 61, 86, 50, 54, 66, 105, 51, 68, 75, 65, 64, 70, 73, 66, 57, 46, 47, 57, 64, 89, 74, 67, 87, 56, 78, 53, 63, 66, 67, 76, 86, 70, 77, 92, 103, 71, 51, 85, 94, 79, 76, 74, 57, 71, 61, 79, 58, 69, 59, 72, 70, 64, 73, 64, 62, 63, 49, 61, 64, 63, 70, 73, 61, 51, 34, 58, 77, 54, 67, 44, 63, 75, 78, 52, 65, 73, 64, 66, 73, 57, 46, 80, 63, 52, 61, 63, 50, 59, 89, 70, 48, 100, 56, 44, 67, 75, 61, 52, 90, 42, 69, 88, 40, 61, 60, 80, 56, 74, 68, 53, 66, 61, 73, 65, 98, 97, 67, 67, 70, 59, 63, 62, 50, 63, 57, 64, 52, 59, 52, 78, 44, 71, 65, 47, 76, 64, 59, 69, 62, 63, 55, 58, 54, 60, 99, 62, 61, 78, 64, 75, 47, 66, 79, 65, 66, 51, 65, 59, 70, 78, 65, 50, 64, 69, 62, 81, 46, 48, 63, 52, 53, 88, 54, 68, 54, 42, 72, 69, 58, 51, 51, 44, 58, 63, 84, 54, 93, 71, 59, 70, 87, 61, 36, 63, 57, 64, 55, 61, 81, 51, 86, 47, 61, 54, 93, 77, 68, 63, 101, 78, 39, 71, 80, 63, 65, 74, 92, 76, 72, 52, 50, 70, 69, 52, 46, 58, 62, 61, 62, 57, 54, 68, 51, 76, 52, 72, 54, 58, 78, 54, 59, 58, 60, 70, 65, 69, 71, 68, 59, 63, 50, 73, 83, 49, 69, 51, 97, 59, 82, 61, 54, 78, 72, 44, 68, 49, 70, 75, 89, 65, 52, 61, 49, 70, 96, 77, 123, 62, 78, 76, 46, 65, 72, 67, 53, 77, 61, 53, 55, 68, 69, 65, 89, 87, 75, 69, 39, 62, 78, 50, 53, 75, 56, 66, 79, 64, 70, 55, 54, 83, 101, 64, 47, 59, 85, 53, 79, 61, 82, 82, 60, 71, 79, 65, 64, 96, 73, 81, 71, 64, 68, 64, 56, 68, 54, 64, 60, 93, 44, 64, 71, 53, 46, 48, 60, 75, 47, 69, 79, 50, 71, 36, 85, 55, 68, 43, 64, 77, 61, 92, 56, 72, 66, 64, 58, 43, 61, 88, 76, 44, 59, 63, 72, 63, 65, 47, 42, 61, 51, 105, 80, 52, 69, 53, 62, 83, 62, 62, 74, 57, 82, 55, 99, 64, 52, 85, 49, 67, 91, 49, 62, 58, 65, 39, 47, 69, 67, 65, 64, 53, 100, 77, 58, 63, 90, 84, 65, 68, 65, 48, 60, 76, 76, 56, 75, 70, 44, 81, 104, 56, 69, 72, 81, 74, 49, 64, 64, 66, 58, 62, 58, 49, 69, 59, 94, 64, 61, 71, 75, 106, 47, 72, 59, 70, 54, 89, 92, 72, 69, 53, 78, 58, 79, 55, 81, 58, 72, 61, 64, 62, 52, 54, 72, 69, 65, 70, 63, 65, 61, 71, 56, 68, 55, 81, 41, 73, 74, 45, 83, 61, 57, 57, 61, 60, 57, 64, 76, 62, 45, 65, 56, 53, 69, 49, 80, 61, 58, 61, 56, 64, 67, 42, 57, 59, 42, 54, 36, 70, 67, 59, 57, 68, 47, 56, 46, 63, 60, 64, 77, 107, 63, 54, 80, 55, 60, 62, 78, 81, 78, 58, 61, 79, 93, 46, 50, 52, 74, 67, 72, 61, 62, 81, 55, 43, 70, 64, 77, 75, 56, 66, 72, 127, 45, 88, 56, 70, 70, 72, 77, 35, 57, 55, 79, 82, 58, 69, 61, 63, 92, 54, 76, 62, 83, 61, 85, 73, 57, 65, 73, 70, 77, 84, 97, 60, 67, 47, 96, 101, 77, 65, 51, 99, 43, 69, 190, 84, 50, 53, 90, 59, 80, 96, 79, 41, 71, 80, 68, 70, 64, 62, 52, 78, 78, 51, 58, 79, 65, 57, 74, 59, 58, 70, 66, 61, 56, 57, 67, 70, 58, 75, 76, 50, 61, 58, 58, 61, 66, 76, 43, 53, 79, 59, 50, 117, 79, 53, 58, 54, 72, 85, 63, 79, 83, 42, 116, 41, 68, 63, 64, 73, 58, 68, 65, 68, 76, 63, 60, 72, 67, 76, 63, 51, 59, 63, 66, 48, 59, 65, 77, 64, 67, 42, 74, 64, 52, 78, 74, 58, 90, 60, 72, 63, 57, 59, 55, 56, 93, 66, 73, 66, 76, 84, 84, 57, 66, 59, 46, 82, 67, 54, 77, 55, 69, 54, 67, 65, 53, 74, 75, 59, 52, 71, 55, 58, 70, 64, 97, 90, 73, 79, 81, 73, 49, 76, 78, 79, 58, 70, 57, 73, 61, 56, 90, 83, 78, 81, 60, 74, 49, 61, 55, 61, 64, 56, 69, 48, 75, 45, 100, 61, 49, 68, 67, 79, 61, 74, 65, 48, 58, 47, 62, 45, 54, 51, 70, 50, 69, 57, 68, 58, 63, 53, 78, 86, 65, 83, 60, 69, 60, 79, 80, 75, 65, 79, 45, 62, 60, 75, 76, 44, 55, 73, 66, 63, 73, 69, 45, 49, 56, 56, 69, 68, 41, 74, 62, 57, 63, 73, 44, 72, 75, 72, 60, 63, 67, 123, 61, 87, 75, 64, 64, 61, 62, 65, 65, 65, 60, 58, 62, 68, 68, 84, 74, 66, 53, 77, 53, 66, 46, 59, 63, 68, 103, 59, 67, 99, 61, 49, 59, 40, 70, 68, 101, 73, 69, 84, 77, 62, 55, 72, 51, 50, 55, 55, 43, 60, 62, 83, 65, 54, 70, 55, 80, 39, 68, 60, 53, 92, 51, 74, 57, 73, 92, 62, 76, 49, 73, 77, 63, 63, 84, 70, 65, 67, 57, 76, 70, 75, 55, 56, 72, 74, 71, 43, 51, 54, 69, 65, 65, 61, 54, 54, 80, 66, 71, 50, 56, 69, 64, 76, 52, 88, 59, 56, 66, 72, 41, 73, 52, 53, 65, 61, 53, 45, 70, 52, 48, 57, 57, 65, 68, 51, 55, 65, 76, 63, 85, 57, 60, 59, 69, 103, 68, 45, 62, 68, 64, 81, 44, 75, 76, 105, 62, 50, 51, 47, 57, 71, 68, 60, 57, 76, 73, 69, 78, 59, 71, 110, 120, 110, 57, 56, 64, 60, 63, 70, 69, 82, 73, 54, 59, 62, 53, 61, 70, 54, 39, 91, 58, 65, 59, 63, 53, 61, 37, 100, 71, 55, 74, 101, 60, 73, 73, 56, 72, 54, 77, 72, 69, 56, 61, 64, 75, 61, 60, 73, 95, 60, 71, 68, 62, 77, 80, 48, 49, 48, 80, 117, 100, 67, 64, 77, 87, 63, 66, 60, 55, 83, 70, 76, 64, 55, 65, 76, 56, 69, 65, 42, 74, 54, 71, 54, 45, 61, 57, 58, 62, 62, 56, 77, 70, 79, 77, 52, 65, 48, 52, 66, 59, 39, 70, 68, 62, 74, 64, 68, 66, 59, 54, 79, 68, 51, 80, 71, 62, 68, 58, 65, 53, 70, 83, 66, 60, 58, 48, 99, 58, 75, 64, 69, 73, 59, 45, 53, 60, 81, 80, 52, 85, 61, 73, 55, 61, 52, 68, 78, 68, 70, 61, 68, 58, 81, 62, 42, 65, 45, 62, 53, 65, 69, 53, 54, 49, 61, 84, 63, 70, 57, 62, 78, 63, 69, 48, 59, 62, 80, 80, 64, 72, 52, 77, 78, 58, 65, 65, 51, 74, 72, 69, 67, 49, 65, 56, 48, 58, 83, 52, 64, 56, 59, 61, 55, 89, 50, 66, 72, 50, 51, 63, 69, 90, 51, 83, 69, 54, 64, 81, 80, 60, 63, 51, 67, 91, 64, 72, 63, 63, 47, 64, 68, 48, 64, 79, 72, 67, 57, 64, 83, 47, 60, 39, 75, 71, 68, 58, 70, 52, 54, 71, 57, 58, 67, 69, 44, 66, 50, 58, 77, 67, 61, 80, 65, 72, 70, 71, 65, 73, 57, 74, 65, 64, 53, 67, 64, 80, 73, 93, 70, 64, 53, 72, 51, 67, 68, 57, 79, 49, 68, 66, 70, 56, 81, 74, 74, 73, 56, 46, 62, 51, 60, 133, 68, 81, 69, 58, 70, 71, 82, 55, 81, 58, 57, 65, 59, 59, 55, 76, 65, 78, 75, 57, 78, 46, 82, 76, 79, 57, 80, 71, 75, 51, 78, 48, 62, 79, 45, 69, 75, 58, 48, 100, 54, 61, 56, 61, 64, 87, 68, 73, 77, 79, 58, 74, 79, 45, 63, 70, 58, 61, 79, 68, 55, 67, 65, 56, 61, 55, 67, 53, 47, 55, 80, 68, 67, 84, 48, 62, 94, 63, 49, 50, 57, 62, 80, 62, 73, 61, 64, 68, 52, 38, 73, 79, 58, 69, 56, 78, 66, 78, 57, 61, 65, 45, 83, 61, 72, 63, 69, 44, 86, 89, 63, 90, 63, 77, 63, 102, 60, 61, 76, 67, 46, 76, 78, 68, 55, 63, 64, 59, 50, 62, 69, 62, 58, 107, 74, 68, 63, 102, 79, 66, 59, 55, 49, 74, 61, 86, 54, 62, 56, 55, 74, 88, 69, 52, 67, 61, 70, 65, 64, 77, 58, 43, 62, 72, 67, 43, 70, 89, 86, 58, 72, 57, 86, 65, 69, 63, 59, 82, 71, 89, 63, 57, 71, 85, 55, 66, 81, 52, 76, 52, 65, 68, 61, 67, 53, 75, 48, 78, 64, 64, 80, 60, 62, 49, 80, 60, 57, 68, 116, 55, 52, 58, 64, 47, 59, 68, 80, 72, 61, 54, 74, 56, 80, 50, 55, 60, 58, 65, 72, 56, 60, 74, 52, 77, 60, 61, 62, 70, 57, 59, 51, 53, 60, 84, 75, 80, 72, 63, 56, 55, 61, 47, 77, 58, 47, 87, 71, 67, 76, 51, 60, 102, 58, 64, 83, 44, 58, 63, 66, 53, 51, 85, 60, 68, 60, 74, 69, 88, 58, 91, 56, 68, 68, 52, 118, 82, 98, 68, 63, 53, 65, 77, 67, 89, 67, 74, 88, 68, 58, 63, 58, 64, 66, 74, 52, 53, 50, 80, 59, 40, 61, 77, 61, 56, 76, 85, 74, 83, 61, 67, 98, 66, 62, 57, 78, 69, 59, 165, 55, 65, 60, 89, 56, 60, 67, 69, 72, 77, 46, 65, 59, 57, 69, 54, 61, 57, 48, 55, 52, 79, 59, 62, 38, 63, 70, 68, 67, 58, 51, 87, 47, 64, 76, 63, 64, 60, 54, 82, 68, 52, 53, 71, 59, 57, 61, 54, 64, 51, 74, 79, 50, 65, 63, 69, 68, 65, 67, 134, 55, 54, 56, 57, 57, 54, 69, 75, 52, 66, 50, 68, 64, 54, 66, 65, 63, 45, 52, 54, 59, 63, 75, 70, 50, 86, 63, 64, 62, 67, 77, 87, 56, 83, 68, 56, 51, 92, 57, 65, 52, 61, 53, 65, 46, 51, 53, 55, 67, 68, 50, 70, 95, 53, 63, 65, 65, 72, 63, 66, 61, 74, 54, 54, 70, 83, 93, 51, 64, 62, 87, 86, 57, 58, 63, 82, 77, 62, 74, 70, 69, 92, 57, 65, 72, 47, 66, 96, 82, 69, 81, 62, 48, 75, 68, 50, 55, 79, 82, 95, 86, 62, 74, 91, 68, 72, 60, 65, 81, 56, 69, 65, 72, 58, 60, 77, 87, 56, 65, 59, 73, 61, 81, 69, 80, 50, 63, 55, 42, 58, 64, 44, 51, 66, 75, 58, 66, 64, 74, 66, 75, 73, 56, 97, 69, 63, 56, 66, 56, 64, 53, 58, 63, 58, 69, 72, 76, 64, 57, 52, 58, 63, 50, 56, 69, 58, 57, 64, 61, 66, 108, 62, 65, 71, 68, 53, 55, 58, 62, 71, 60, 62, 74, 59, 67, 67, 79, 82, 61, 90, 65, 73, 45, 70, 43, 64, 52, 58, 83, 67, 55, 60, 68, 58, 71, 61, 61, 58, 58, 89, 68, 58, 47, 53, 55, 67, 53, 79, 57, 71, 61, 53, 51, 88, 60, 51, 75, 75, 68, 74, 72, 68, 63, 58, 63, 69, 50, 77, 53, 83, 63, 75, 70, 47, 47, 53, 65, 66, 53, 77, 52, 53, 58, 57, 60, 67, 59, 55, 63, 80, 81, 63, 73, 73, 77, 70, 41, 58, 65, 86, 76, 82, 79, 68, 52, 47, 56, 71, 55, 69, 81, 62, 47, 58, 41, 67, 66, 49, 71, 49, 75, 72, 53, 76, 63, 91, 62, 67, 63, 67, 58, 78, 65, 63, 55, 59, 66, 57, 67, 56, 71, 63, 71, 50, 63, 95, 66, 70, 120, 90, 73, 53, 84, 82, 60, 71, 49, 59, 57, 106, 71, 57, 56, 116, 52, 66, 42, 62, 53, 48, 56, 61, 49, 66, 63, 67, 57, 81, 69, 93, 47, 53, 63, 63, 66, 56, 52, 53, 71, 56, 80, 53, 63, 41, 85, 53, 68, 61, 53, 71, 61, 75, 60, 56, 58, 58, 62, 68, 59, 46, 63, 61, 77, 84, 65, 70, 87, 56, 62, 64, 63, 91, 38, 51, 66, 65, 71, 58, 53, 67, 87, 59, 69, 62, 78, 57, 48, 56, 101, 61, 72, 48, 56, 68, 66, 78, 86, 68, 61, 69, 68, 52, 69, 67, 45, 57, 71, 54, 67, 60, 64, 87, 63, 48, 101, 63, 42, 67, 61, 68, 63, 78, 66, 54, 76, 53, 64, 87, 73, 74, 58, 51, 66, 67, 63, 64, 56, 56, 64, 61, 126, 70, 48, 66, 59, 76, 58, 63, 65, 68, 61, 101, 53, 61, 54, 64, 60, 65, 47, 67, 78, 50, 59, 93, 61, 73, 63, 55, 69, 67, 55, 62, 91, 75, 58, 53, 59, 72, 56, 66, 52, 58, 48, 87, 51, 58, 56, 69, 84, 51, 61, 60, 83, 49, 62, 51, 60, 57, 54, 68, 55, 65, 133, 70, 78, 56, 74, 60, 63, 95, 69, 97, 64, 61, 58, 58, 49, 61, 69, 62, 73, 62, 66, 59, 69, 109, 36, 73, 55, 49, 64, 75, 76, 53, 81, 63, 64, 66, 81, 49, 63, 92, 74, 87, 76, 50, 69, 60, 60, 47, 66, 55, 78, 44, 56, 60, 89, 58, 59, 65, 80, 86, 66, 56, 67, 66, 87, 68, 54, 105, 63, 62, 49, 68, 87, 81, 76, 70, 50, 62, 64, 58, 56, 54, 59, 72, 62, 70, 50, 66, 79, 73, 55, 64, 54, 48, 61, 82, 48, 64, 63, 99, 49, 62, 55, 62, 90, 85, 55, 57, 61, 69, 112, 82, 72, 64, 57, 63, 68, 58, 59, 49, 49, 50, 81, 60, 64, 59, 73, 55, 65, 75, 90, 60, 72, 84, 63, 74, 72, 54, 58, 67, 79, 88, 47, 48, 75, 67, 50, 97, 51, 49, 67, 63, 68, 92, 63, 58, 90, 64, 77, 74, 92, 64, 59, 69, 67, 56, 52, 67, 81, 87, 71, 49, 54, 59, 53, 70, 57, 56, 71, 57, 67, 57, 73, 61, 60, 46, 60, 73, 45, 92, 85, 81, 66, 72, 57, 61, 66, 84, 66, 62, 54, 56, 84, 68, 73, 56, 66, 97, 63, 60, 66, 81, 66, 60, 63, 52, 79, 85, 58, 49, 87, 65, 88, 56, 56, 54, 77, 110, 59, 75, 71, 48, 84, 58, 78, 61, 48, 59, 58, 54, 93, 111, 76, 82, 56, 72, 62, 96, 67, 67, 57, 47, 59, 76, 60, 54, 70, 48, 62, 32, 47, 70, 61, 48, 60, 61, 100, 77, 77, 54, 62, 80, 72, 66, 76, 71, 83, 61, 70, 92, 50, 91, 63, 76, 88, 76, 147, 87, 74, 63, 94, 55, 59, 56, 87, 71, 59, 58, 54, 61, 67, 60, 70, 63, 71, 51, 69, 99, 56, 50, 69, 58, 59, 53, 54, 78, 60, 61, 54, 61, 47, 66, 78, 61, 87, 52, 57, 74, 61, 63, 68, 67, 76, 75, 51, 54, 62, 86, 77, 52, 69, 67, 67, 88, 70, 69, 59, 74, 72, 107, 66, 52, 65, 121, 56, 55, 53, 75, 69, 72, 74, 54, 64, 67, 69, 51, 75, 63, 58, 66, 54, 57, 72, 56, 81, 119, 50, 91, 76, 81, 58, 60, 110, 68, 82, 58, 79, 61, 78, 69, 61, 77, 67, 62, 60, 55, 58, 70, 81, 54, 71, 65, 59, 76, 56, 67, 69, 98, 58, 64, 52, 71, 68, 61, 89, 58, 55, 76, 75, 60, 56, 66, 58, 79, 55, 57, 88, 47, 64, 92, 55, 61, 54, 60, 74, 93, 76, 58, 57, 86, 60, 67, 77, 91, 65, 54, 56, 51, 70, 64, 64, 64, 81, 47, 70, 66, 51, 52, 51, 70, 55, 55, 56, 53, 81, 54, 89, 59, 62, 77, 71, 92, 63, 60, 52, 74, 85, 56, 64, 53, 57, 66, 64, 63, 71, 52, 58, 78, 58, 51, 93, 57, 103, 65, 61, 63, 51, 60, 76, 62, 69, 69, 80, 97, 92, 51, 67, 56, 69, 67, 60, 68, 58, 74, 69, 83, 80, 62, 83, 57, 53, 73, 53, 77, 56, 61, 45, 55, 55, 69, 58, 60, 52, 83, 98, 53, 57, 75, 71, 45, 102, 60, 57, 66, 59, 44, 54, 73, 66, 56, 47, 63, 75, 76, 70, 72, 65, 61, 46, 67, 55, 61, 74, 77, 68, 53, 56, 62, 68, 66, 75, 62, 53, 51, 65, 66, 57, 53, 75, 66, 56, 86, 51, 59, 62, 103, 57, 62, 59, 60, 95, 56, 70, 66, 63, 67, 69, 50, 61, 67, 66, 56, 67, 64, 56, 60, 79, 59, 69, 82, 82, 58, 72, 57, 68, 55, 52, 56, 69, 58, 50, 75, 48, 70, 64, 75, 66, 70, 57, 76, 57, 62, 48, 82, 75, 67, 102, 58, 60, 52, 49, 84, 41, 77, 62, 54, 64, 52, 67, 58, 66, 78, 79, 53, 52, 93, 55, 61, 42, 68, 75, 54, 64, 66, 50, 67, 51, 54, 59, 55, 50, 71, 61, 62, 77, 71, 66, 66, 57, 62, 60, 71, 67, 68, 50, 62, 49, 56, 53, 51, 58, 77, 70, 86, 93, 54, 53, 71, 72, 53, 68, 54, 55, 70, 55, 62, 136, 54, 56, 64, 71, 90, 69, 71, 68, 57, 63, 64, 82, 54, 62, 68, 54, 91, 69, 66, 63, 65, 65, 83, 88, 80, 63, 60, 49, 64, 74, 63, 51, 74, 97, 65, 46, 55, 62, 70, 66, 65, 62, 74, 68, 45, 70, 75, 77, 48, 54, 65, 58, 66, 57, 65, 60, 65, 93, 48, 56, 58, 61, 61, 69, 99, 69, 54, 67, 61, 55, 49, 71, 65, 45, 69, 86, 97, 73, 65, 63, 75, 64, 60, 39, 56, 65, 69, 54, 53, 55, 74, 59, 70, 77, 60, 52, 92, 54, 56, 62, 59, 66, 65, 56, 57, 86, 51, 67, 57, 83, 65, 58, 57, 74, 73, 71, 57, 63, 74, 65, 75, 58, 61, 78, 66, 69, 67, 65, 90, 58, 72, 68, 68, 50, 70, 48, 63, 84, 45, 52, 68, 74, 47, 56, 89, 54, 90, 68, 70, 55, 58, 52, 56, 58, 70, 65, 51, 58, 59, 65, 43, 66, 70, 64, 60, 51, 97, 86, 61, 83, 67, 64, 55, 65, 70, 68, 108, 92, 47, 45, 80, 89, 73, 74, 79, 58, 72, 35, 68, 48, 61, 76, 67, 50, 79, 42, 63, 71, 54, 68, 65, 55, 70, 71, 61, 93, 68, 81, 101, 55, 65, 78, 61, 57, 75, 71, 68, 58, 61, 64, 64, 65, 69, 49, 52, 63, 56, 60, 70, 62, 70, 78, 56, 91, 63, 57, 62, 51, 49, 60, 61, 61, 71, 59, 66, 61, 56, 70, 53, 100, 73, 70, 50, 71, 71, 135, 45, 67, 51, 76, 58, 69, 81, 64, 51, 67, 75, 61, 61, 77, 63, 60, 61, 52, 66, 55, 69, 64, 75, 51, 60, 60, 56, 60, 44, 56, 75, 68, 71, 61, 53, 39, 75, 92, 72, 69, 59, 78, 86, 58, 55, 74, 64, 44, 66, 91, 57, 110, 69, 79, 78, 56, 51, 56, 44, 90, 64, 94, 72, 61, 53, 61, 70, 60, 58, 63, 38, 73, 118, 60, 73, 45, 66, 58, 56, 63, 60, 58, 53, 58, 91, 73, 80, 65, 61, 76, 66, 84, 50, 48, 55, 76, 65, 70, 68, 66, 68, 110, 52, 52, 62, 55, 56, 52, 57, 48, 66, 88, 52, 70, 65, 57, 88, 58, 57, 70, 64, 61, 61, 79, 68, 68, 83, 64, 51, 62, 63, 54, 56, 76, 51, 67, 68, 70, 57, 70, 58, 54, 50, 72, 68, 61, 70, 73, 57, 84, 54, 66, 54, 86, 61, 57, 71, 60, 63, 87, 88, 94, 48, 73, 69, 116, 79, 81, 91, 63, 53, 89, 74, 60, 80, 66, 76, 72, 64, 59, 48, 65, 62, 60, 82, 54, 71, 61, 58, 60, 57, 63, 80, 87, 68, 62, 50, 61, 79, 49, 67, 57, 60, 72, 57, 54, 60, 63, 101, 49, 110, 72, 62, 73, 66, 52, 90, 53, 91, 68, 57, 60, 69, 79, 92, 78, 47, 75, 70, 96, 58, 61, 59, 70, 133, 64, 62, 81, 80, 62, 66, 74, 55, 70, 72, 64, 74, 50, 46, 87, 52, 82, 63, 51, 60, 72, 68, 68, 70, 56, 65, 54, 60, 100, 88, 50, 81, 55, 67, 63, 62, 62, 61, 90, 56, 65, 59, 54, 62, 59, 54, 61, 53, 122, 49, 64, 75, 81, 111, 88, 64, 85, 67, 69, 59, 79, 62, 91, 75, 40, 61, 59, 54, 62, 73, 71, 72, 67, 48, 54, 77, 57, 53, 47, 72, 59, 52, 51, 57, 60, 61, 68, 47, 89, 74, 80, 64, 59, 71, 63, 78, 59, 62, 87, 55, 54, 77, 55, 63, 59, 73, 67, 90, 72, 56, 83, 47, 157, 78, 66, 51, 75, 92, 70, 93, 48, 49, 68, 55, 83, 71, 67, 63, 64, 65, 60, 71, 60, 100, 73, 59, 63, 67, 51, 50, 88, 69, 78, 72, 64, 66, 58, 64, 56, 73, 60, 54, 78, 81, 53, 65, 59, 73, 62, 67, 93, 47, 74, 63, 72, 78, 60, 51, 74, 48, 77, 48, 58, 62, 81, 55, 62, 73, 68, 35, 68, 50, 61, 84, 78, 62, 55, 77, 70, 53, 55, 47, 51, 68, 66, 66, 76, 55, 64, 70, 68, 82, 65, 82, 58, 87, 75, 91, 99, 55, 47, 52, 63, 77, 53, 65, 50, 40, 58, 82, 58, 66, 60, 76, 76, 66, 64, 61, 94, 55, 60, 78, 53, 55, 62, 43, 69, 71, 67, 53, 88, 53, 47, 76, 71, 53, 51, 65, 96, 64, 59, 72, 63, 58, 69, 76, 69, 66, 65, 57, 43, 68, 67, 75, 75, 57, 53, 82, 63, 66, 62, 48, 58, 50, 66, 62, 91, 54, 58, 84, 57, 57, 79, 69, 52, 92, 53, 63, 62, 57, 68, 56, 55, 68, 57, 54, 62, 77, 65, 82, 57, 64, 79, 65, 65, 61, 81, 59, 60, 52, 46, 58, 56, 72, 79, 65, 70, 54, 58, 57, 64, 98, 61, 46, 62, 88, 53, 78, 70, 62, 64, 47, 47, 45, 72, 48, 70, 56, 57, 36, 53, 57, 89, 58, 100, 70, 63, 55, 79, 54, 81, 60, 57, 76, 71, 74, 58, 49, 83, 59, 61, 67, 52, 55, 57, 76, 107, 85, 68, 50, 57, 72, 79, 50, 73, 57, 76, 53, 79, 56, 62, 83, 49, 52, 53, 60, 55, 77, 63, 66, 62, 62, 50, 49, 50, 49, 67, 46, 74, 78, 65, 71, 67, 75, 56, 49, 59, 61, 73, 81, 60, 49, 57, 64, 67, 97, 95, 58, 43, 66, 65, 71, 62, 80, 60, 87, 121, 73, 55, 52, 64, 74, 90, 53, 46, 60, 58, 60, 66, 56, 64, 69, 54, 72, 58, 63, 46, 60, 75, 64, 57, 56, 69, 68, 78, 98, 60, 70, 78, 55, 70, 64, 80, 76, 45, 60, 94, 69, 67, 68, 56, 88, 66, 59, 51, 71, 68, 75, 71, 48, 49, 55, 49, 59, 50, 58, 100, 70, 66, 58, 62, 64, 75, 63, 78, 42, 53, 54, 70, 70, 69, 93, 67, 63, 57, 99, 62, 67, 58, 65, 79, 64, 68, 77, 63, 71, 53, 60, 78, 51, 74, 78, 60, 69, 71, 70, 76, 63, 57, 68, 56, 72, 76, 54, 69, 73, 55, 63, 75, 67, 52, 77, 73, 80, 67, 62, 56, 64, 67, 56, 76, 52, 60, 57, 70, 58, 56, 63, 69, 56, 54, 62, 71, 67, 65, 99, 84, 67, 59, 49, 63, 57, 55, 59, 65, 62, 67, 48, 53, 91, 82, 73, 61, 63, 61, 49, 54, 60, 58, 95, 48, 47, 56, 72, 64, 48, 68, 115, 67, 59, 68, 63, 73, 80, 75, 70, 66, 51, 102, 56, 66, 55, 58, 61, 76, 77, 54, 69, 78, 61, 57, 73, 62, 100, 77, 57, 78, 64, 54, 71, 77, 73, 78, 77, 67, 68, 64, 72, 74, 61, 56, 70, 71, 114, 61, 72, 75, 59, 60, 54, 58, 63, 55, 50, 49, 60, 53, 94, 88, 69, 67, 66, 69, 84, 72, 64, 58, 98, 58, 67, 53, 81, 70, 62, 66, 52, 71, 104, 49, 66, 58, 92, 54, 56, 56, 57, 72, 72, 69, 82, 55, 64, 56, 56, 73, 82, 57, 55, 73, 42, 60, 59, 60, 65, 62, 65, 63, 68, 66, 77, 72, 54, 54, 56, 93, 75, 55, 62, 71, 105, 56, 77, 73, 48, 67, 88, 72, 67, 71, 68, 61, 70, 100, 77, 62, 55, 69, 68, 72, 67, 63, 80, 74, 61, 100, 35, 70, 80, 70, 84, 76, 49, 74, 55, 44, 57, 46, 57, 50, 54, 64, 54, 67, 60, 83, 60, 49, 60, 56, 101, 65, 62, 64, 74, 55, 62, 55, 44, 78, 51, 66, 61, 57, 85, 51, 53, 53, 54, 125, 67, 61, 74, 54, 59, 52, 74, 59, 69, 59, 77, 65, 46, 63, 60, 57, 87, 50, 70, 64, 73, 60, 89, 55, 46, 66, 61, 80, 50, 67, 84, 63, 61, 53, 65, 53, 68, 67, 58, 62, 78, 58, 97, 66, 53, 68, 61, 76, 57, 71, 65, 131, 66, 73, 53, 81, 63, 63, 55, 60, 61, 129, 74, 70, 56, 56, 63, 74, 68, 42, 68, 75, 75, 55, 70, 55, 76, 55, 71, 60, 81, 55, 42, 44, 101, 83, 85, 57, 88, 86, 67, 55, 57, 69, 58, 60, 79, 72, 77, 67, 60, 105, 86, 68, 63, 65, 47, 79, 60, 55, 58, 80, 73, 55, 56, 76, 93, 78, 80, 95, 55, 57, 75, 60, 65, 54, 75, 59, 62, 75, 52, 63, 68, 66, 52, 56, 57, 49, 51, 64, 62, 66, 58, 54, 55, 74, 45, 68, 49, 92, 74, 39, 71, 48, 62, 62, 72, 79, 58, 65, 74, 91, 47, 67, 68, 86, 51, 44, 84, 71, 54, 119, 51, 61, 76, 55, 52, 61, 65, 72, 62, 45, 67, 73, 53, 45, 61, 45, 70, 57, 72, 88, 84, 55, 72, 62, 67, 72, 56, 47, 53, 68, 57, 67, 77, 61, 60, 63, 97, 79, 71, 60, 61, 67, 56, 71, 60, 67, 58, 73, 61, 50, 92, 82, 64, 50, 53, 67, 76, 53, 71, 67, 43, 65, 86, 65, 78, 80, 73, 75, 64, 59, 71, 59, 63, 89, 79, 73, 93, 75, 92, 51, 53, 76, 50, 49, 74, 54, 60, 84, 65, 68, 54, 73, 60, 50, 48, 49, 52, 57, 57, 70, 62, 60, 69, 59, 53, 64, 73, 67, 59, 69, 68, 66, 79, 64, 63, 86, 114, 75, 60, 52, 68, 78, 66, 63, 42, 67, 58, 57, 67, 80, 62, 70, 53, 55, 64, 82, 63, 59, 62, 69, 52, 77, 83, 46, 67, 53, 58, 62, 64, 70, 66, 87, 69, 78, 54, 70, 65, 71, 54, 62, 70, 62, 69, 63, 61, 59, 65, 66, 64, 62, 89, 69, 58, 106, 50, 141, 60, 70, 64, 69, 59, 68, 43, 67, 67, 55, 58, 56, 56, 65, 66, 48, 50, 54, 56, 75, 61, 49, 67, 69, 53, 60, 58, 67, 53, 78, 63, 64, 63, 58, 64, 75, 71, 61, 64, 57, 74, 60, 64, 83, 58, 66, 69, 43, 64, 73, 64, 65, 80, 60, 73, 55, 82, 36, 69, 72, 70, 72, 71, 78, 59, 58, 68, 58, 57, 50, 80, 64, 63, 79, 84, 46, 54, 66, 51, 77, 64, 46, 66, 69, 65, 75, 64, 82, 103, 67, 99, 65, 72, 66, 53, 69, 78, 60, 62, 80, 59, 70, 58, 59, 55, 61, 70, 84, 68, 73, 70, 48, 82, 61, 64, 62, 65, 44, 54, 59, 62, 68, 43, 56, 71, 65, 63, 58, 63, 58, 71, 80, 53, 61, 45, 107, 64, 66, 61, 49, 64, 62, 100, 67, 52, 82, 55, 62, 74, 56, 67, 60, 64, 59, 61, 51, 64, 66, 55, 52, 54, 50, 50, 85, 53, 66, 71, 65, 59, 48, 68, 87, 75, 65, 62, 62, 40, 90, 63, 55, 74, 59, 57, 64, 74, 61, 59, 58, 58, 81, 67, 76, 52, 54, 60, 68, 59, 57, 67, 61, 78, 59, 72, 66, 65, 51, 74, 65, 72, 80, 63, 57, 60, 74, 74, 59, 55, 63, 67, 72, 68, 51, 81, 46, 73, 66, 74, 63, 68, 97, 64, 58, 84, 66, 68, 49, 52, 65, 55, 74, 74, 76, 73, 59, 60, 62, 49, 57, 55, 59, 55, 70, 58, 102, 81, 65, 66, 65, 62, 63, 58, 76, 65, 73, 65, 62, 56, 67, 61, 58, 55, 57, 59, 58, 60, 50, 54, 49, 63, 61, 74, 55, 61, 72, 64, 54, 74, 64, 56, 71, 53, 65, 58, 75, 53, 48, 79, 79, 79, 55, 54, 84, 89, 108, 58, 67, 76, 70, 60, 44, 71, 62, 61, 66, 60, 81, 58, 46, 65, 76, 82, 63, 55, 66, 65, 52, 77, 57, 97, 56, 58, 62, 72, 73, 57, 81, 93, 65, 63, 69, 61, 56, 106, 57, 66, 50, 70, 42, 56, 85, 78, 67, 72, 78, 85, 48, 58, 61, 56, 69, 59, 56, 75, 81, 104, 70, 55, 53, 46, 68, 57, 59, 59, 62, 52, 48, 67, 43, 65, 61, 65, 56, 72, 55, 80, 67, 75, 70, 68, 67, 66, 58, 59, 84, 63, 65, 57, 65, 64, 59, 76, 104, 60, 79, 88, 67, 64, 71, 71, 60, 115, 57, 64, 97, 81, 65, 60, 59, 65, 89, 58, 64, 65, 60, 83, 61, 46, 61, 72, 65, 62, 63, 57, 49, 51, 70, 70, 83, 70, 57, 72, 54, 79, 54, 61, 54, 55, 65, 74, 61, 40, 66, 101, 62, 56, 66, 74, 57, 71, 56, 62, 74, 63, 50, 44, 79, 72, 98, 61, 81, 79, 64, 56, 73, 50, 59, 70, 74, 65, 57, 69, 59, 57, 59, 88, 68, 72, 92, 52, 59, 61, 80, 55, 68, 45, 115, 49, 65, 57, 47, 74, 58, 62, 49, 55, 60, 69, 61, 85, 55, 103, 57, 58, 81, 71, 54, 52, 51, 57, 59, 64, 79, 66, 68, 48, 50, 62, 67, 61, 59, 71, 68, 64, 97, 63, 61, 56, 58, 57, 60, 52, 50, 57, 53, 91, 57, 77, 70, 113, 82, 45, 70, 51, 58, 66, 56, 63, 79, 69, 53, 69, 86, 52, 78, 76, 63, 55, 63, 91, 57, 75, 54, 54, 60, 61, 71, 59, 76, 61, 82, 63, 54, 59, 63, 62, 62, 66, 64, 57, 72, 59, 50, 60, 56, 59, 62, 60, 66, 51, 63, 77, 79, 76, 68, 62, 66, 80, 72, 53, 68, 78, 71, 67, 68, 51, 80, 61, 59, 61, 100, 81, 64, 53, 59, 59, 72, 68, 56, 65, 72, 69, 86, 56, 76, 77, 70, 56, 47, 60, 56, 62, 60, 66, 64, 66, 48, 58, 55, 66, 71, 67, 74, 51, 74, 52, 65, 63, 76, 64, 63, 65, 65, 80, 60, 49, 60, 75, 72, 53, 79, 74, 66, 57, 57, 71, 126, 45, 74, 71, 67, 71, 61, 60, 59, 66, 62, 82, 69, 65, 53, 53, 69, 54, 56, 65, 58, 74, 46, 85, 73, 73, 62, 66, 62, 54, 62, 65, 79, 63, 64, 69, 58, 55, 46, 58, 83, 63, 76, 67, 88, 72, 69, 60, 51, 67, 54, 69, 65, 59, 45, 61, 57, 57, 59, 60, 72, 62, 104, 82, 93, 58, 78, 67, 54, 58, 61, 66, 66, 53, 70, 60, 76, 64, 58, 54, 78, 79, 59, 62, 65, 133, 50, 77, 69, 55, 79, 60, 54, 60, 50, 49, 65, 57, 50, 66, 66, 65, 81, 65, 92, 82, 69, 61, 64, 51, 59, 52, 66, 74, 69, 63, 74, 61, 55, 65, 52, 64, 60, 52, 50, 51, 82, 70, 62, 102, 46, 51, 56, 53, 65, 57, 75, 61, 55, 51, 66, 63, 83, 50, 53, 82, 59, 57, 49, 61, 84, 83, 56, 54, 63, 69, 52, 58, 60, 76, 76, 83, 62, 57, 57, 65, 74, 63, 71, 55, 49, 61, 110, 83, 48, 87, 63, 55, 66, 57, 66, 52, 62, 86, 89, 65, 65, 52, 74, 67, 64, 96, 105, 74, 56, 85, 60, 57, 68, 39, 76, 57, 71, 55, 64, 49, 58, 58, 55, 76, 79, 72, 59, 49, 64, 54, 56, 58, 75, 64, 70, 44, 65, 70, 45, 70, 75, 57, 67, 58, 67, 55, 70, 64, 73, 78, 60, 55, 40, 83, 72, 50, 69, 57, 60, 56, 102, 63, 71, 72, 74, 81, 66, 69, 64, 69, 51, 68, 60, 61, 56, 55, 60, 63, 69, 73, 80, 96, 60, 90, 64, 72, 60, 59, 71, 73, 64, 65, 60, 52, 59, 66, 57, 79, 91, 60, 62, 54, 80, 116, 46, 56, 59, 73, 66, 64, 75, 85, 56, 67, 65, 61, 54, 62, 70, 43, 47, 46, 68, 60, 86, 85, 54, 70, 66, 69, 53, 52, 58, 62, 65, 56, 57, 73, 63, 75, 59, 71, 58, 67, 59, 59, 71, 79, 70, 59, 69, 67, 55, 63, 62, 59, 52, 95, 82, 59, 68, 69, 153, 61, 66, 59, 62, 63, 94, 73, 79, 61, 53, 69, 71, 71, 76, 60, 65, 56, 61, 54, 70, 104, 60, 65, 53, 65, 64, 62, 66, 64, 59, 65, 63, 69, 87, 59, 61, 57, 46, 64, 54, 77, 47, 51, 65, 93, 65, 59, 61, 61, 91, 74, 52, 60, 58, 69, 86, 63, 49, 56, 44, 57, 101, 73, 64, 101, 68, 69, 47, 64, 59, 72, 58, 66, 71, 83, 59, 63, 80, 52, 52, 56, 52, 89, 52, 50, 57, 64, 90, 99, 49, 73, 71, 67, 55, 69, 55, 60, 57, 44, 58, 60, 70, 55, 66, 74, 75, 68, 49, 58, 64, 63, 63, 58, 71, 63, 64, 65, 64, 53, 56, 116, 55, 64, 69, 104, 53, 51, 72, 63, 53, 68, 57, 73, 65, 69, 100, 65, 79, 76, 68, 78, 83, 89, 66, 56, 66, 74, 46, 78, 52, 69, 72, 90, 55, 76, 62, 59, 69, 70, 68, 72, 79, 68, 64, 67, 60, 74, 63, 78, 81, 71, 65, 62, 55, 64, 54, 64, 97, 47, 66, 63, 53, 66, 67, 75, 68, 62, 63, 57, 66, 79, 52, 48, 58, 60, 59, 49, 70, 73, 53, 79, 63, 58, 63, 78, 86, 62, 55, 67, 79, 56, 65, 82, 59, 66, 99, 80, 60, 56, 73, 80, 53, 72, 61, 89, 73, 71, 51, 62, 85, 59, 74, 69, 53, 63, 70, 63, 62, 44, 67, 65, 72, 72, 77, 71, 47, 55, 62, 61, 69, 71, 79, 46, 89, 75, 63, 55, 47, 67, 57, 63, 51, 45, 53, 43, 49, 69, 66, 54, 59, 42, 88, 63, 56, 61, 58, 61, 70, 66, 77, 65, 49, 78, 48, 41, 50, 54, 56, 80, 69, 89, 73, 65, 60, 69, 66, 70, 66, 79, 65, 82, 62, 77, 101, 42, 69, 65, 58, 50, 77, 84, 56, 67, 88, 60, 55, 78, 78, 73, 97, 76, 63, 79, 68, 62, 54, 69, 59, 70, 75, 37, 55, 51, 64, 71, 70, 62, 65, 54, 51, 75, 54, 86, 83, 54, 58, 73, 61, 53, 66, 57, 75, 72, 78, 65, 67, 57, 65, 90, 57, 67, 59, 43, 68, 102, 60, 83, 63, 59, 62, 58, 63, 59, 78, 64, 61, 75, 52, 78, 65, 63, 61, 65, 55, 95, 75, 61, 71, 72, 90, 49, 88, 95, 63, 65, 84, 73, 43, 61, 85, 62, 70, 52, 49, 70, 71, 73, 71, 62, 59, 74, 60, 54, 92, 61, 60, 58, 74, 63, 113, 56, 69, 74, 55, 127, 55, 49, 58, 63, 75, 51, 83, 62, 56, 45, 62, 66, 78, 103, 56, 63, 65, 69, 59, 76, 66, 62, 41, 50, 95, 61, 55, 105, 75, 77, 80, 60, 56, 58, 54, 60, 76, 51, 65, 66, 57, 90, 79, 68, 63, 67, 42, 62, 70, 43, 61, 58, 62, 58, 64, 84, 82, 59, 61, 106, 64, 55, 75, 66, 57, 63, 120, 62, 58, 59, 56, 76, 68, 99, 57, 52, 77, 51, 74, 66, 66, 74, 52, 53, 57, 102, 63, 62, 55, 53, 61, 77, 89, 61, 58, 61, 59, 71, 57, 58, 67, 49, 55, 56, 42, 74, 44, 58, 77, 90, 49, 62, 55, 48, 60, 62, 81, 59, 70, 85, 53, 58, 55, 104, 62, 53, 41, 65, 81, 76, 53, 66, 66, 85, 46, 60, 57, 55, 52, 73, 72, 49, 63, 82, 62, 57, 51, 50, 60, 62, 80, 68, 70, 74, 83, 54, 51, 58, 71, 58, 54, 71, 68, 64, 62, 59, 57, 57, 61, 68, 75, 79, 64, 64, 61, 49, 70, 67, 54, 60, 42, 87, 55, 55, 66, 63, 64, 73, 74, 68, 56, 62, 54, 59, 84, 83, 64, 58, 70, 57, 60, 69, 64, 67, 80, 65, 67, 67, 60, 77, 64, 67, 46, 78, 62, 71, 61, 61, 66, 53, 73, 47, 49, 66, 63, 65, 86, 56, 62, 52, 42, 47, 71, 54, 63, 54, 55, 49, 49, 71, 58, 57, 65, 71, 52, 76, 73, 75, 71, 53, 94, 102, 62, 67, 68, 64, 56, 63, 66, 58, 63, 105, 70, 49, 36, 64, 73, 68, 67, 65, 57, 53, 70, 79, 46, 63, 47, 57, 78, 72, 62, 78, 52, 85, 66, 51, 62, 63, 49, 65, 80, 57, 60, 84, 72, 70, 56, 50, 84, 55, 66, 91, 55, 62, 57, 57, 63, 57, 46, 52, 80, 55, 127, 87, 102, 50, 61, 57, 68, 61, 55, 90, 76, 57, 52, 58, 54, 76, 47, 84, 77, 62, 71, 69, 53, 126, 68, 64, 59, 76, 58, 53, 68, 58, 66, 59, 73, 60, 54, 81, 60, 67, 92, 73, 65, 63, 59, 63, 79, 63, 73, 59, 71, 79, 59, 54, 76, 50, 61, 66, 56, 71, 60, 54, 77, 67, 80, 111, 63, 67, 52, 64, 56, 56, 89, 59, 77, 59, 59, 56, 70, 61, 94, 62, 68, 52, 57, 79, 54, 65, 82, 68, 58, 63, 74, 71, 63, 47, 64, 56, 46, 68, 59, 82, 52, 81, 87, 47, 75, 108, 58, 63, 96, 70, 63, 64, 82, 74, 59, 65, 83, 65, 65, 56, 95, 65, 50, 60, 75, 52, 60, 65, 105, 68, 75, 71, 62, 62, 73, 64, 40, 60, 50, 67, 51, 54, 79, 65, 64, 104, 77, 109, 79, 55, 56, 56, 73, 66, 59, 74, 48, 49, 61, 55, 74, 65, 73, 75, 57, 78, 58, 51, 79, 60, 87, 88, 66, 45, 60, 58, 49, 61, 55, 67, 66, 80, 49, 66, 54, 75, 67, 68, 72, 53, 71, 75, 67, 52, 61, 78, 63, 58, 62, 68, 89, 50, 55, 75, 70, 63, 50, 66, 51, 82, 50, 52, 69, 54, 69, 72, 62, 57, 79, 53, 87, 52, 82, 58, 62, 120, 102, 45, 41, 58, 86, 47, 76, 57, 63, 51, 102, 68, 65, 60, 94, 45, 86, 71, 70, 65, 63, 80, 63, 62, 84, 58, 62, 53, 69, 67, 81, 79, 69, 72, 47, 48, 48, 65, 51, 63, 45, 53, 55, 63, 114, 64, 75, 79, 65, 46, 105, 52, 80, 79, 52, 71, 110, 51, 145, 67, 72, 58, 62, 74, 53, 61, 73, 66, 67, 62, 50, 62, 88, 76, 55, 54, 57, 66, 76, 75, 57, 59, 53, 67, 45, 54, 60, 55, 91, 68, 60, 45, 54, 78, 83, 68, 52, 60, 60, 84, 37, 55, 55, 69, 75, 83, 67, 54, 73, 72, 64, 63, 58, 56, 75, 58, 70, 56, 58, 97, 51, 79, 60, 45, 69, 56, 89, 70, 76, 61, 67, 66, 50, 72, 49, 62, 60, 52, 61, 63, 56, 84, 53, 53, 52, 65, 97, 114, 73, 63, 67, 59, 65, 56, 62, 83, 84, 54, 65, 56, 75, 58, 94, 59, 54, 65, 85, 39, 65, 67, 69, 103, 56, 38, 69, 56, 54, 58, 73, 104, 58, 69, 61, 93, 71, 50, 58, 68, 78, 70, 82, 64, 58, 81, 81, 74, 74, 68, 70, 78, 59, 89, 77, 57, 74, 45, 66, 53, 84, 74, 59, 51, 55, 91, 63, 57, 60, 53, 58, 66, 76, 54, 53, 79, 47, 83, 58, 70, 68, 54, 67, 60, 62, 59, 58, 47, 78, 56, 69, 65, 64, 64, 59, 82, 46, 65, 78, 82, 50, 60, 58, 52, 58, 60, 54, 59, 83, 74, 79, 65, 59, 64, 58, 69, 102, 66, 74, 79, 83, 52, 68, 86, 75, 55, 78, 51, 49, 60, 67, 52, 57, 73, 69, 61, 68, 68, 59, 72, 91, 64, 45, 63, 59, 54, 53, 43, 59, 55, 56, 64, 52, 62, 63, 91, 60, 70, 56, 54, 65, 49, 75, 75, 74, 71, 54, 57, 77, 55, 74, 91, 61, 80, 60, 59, 63, 62, 60, 65, 53, 43, 60, 65, 67, 59, 56, 79, 69, 60, 74, 74, 83, 81, 95, 64, 61, 72, 59, 54, 73, 62, 57, 66, 51, 54, 55, 68, 75, 58, 58, 58, 57, 44, 105, 60, 62, 62, 61, 61, 57, 74, 77, 68, 91, 73, 58, 54, 57, 62, 74, 75, 61, 56, 54, 52, 74, 55, 85, 78, 55, 76, 56, 62, 57, 72, 66, 73, 66, 75, 71, 90, 67, 102, 45, 52, 64, 45, 65, 73, 69, 58, 49, 63, 62, 62, 65, 57, 61, 59, 71, 66, 63, 56, 77, 85, 58, 60, 66, 65, 55, 59, 85, 70, 59, 91, 53, 59, 67, 60, 69, 63, 59, 52, 59, 52, 85, 55, 51, 75, 77, 75, 71, 69, 52, 53, 84, 63, 68, 43, 57, 68, 85, 98, 82, 67, 64, 59, 79, 69, 65, 73, 53, 64, 72, 71, 64, 50, 87, 70, 71, 58, 57, 56, 57, 96, 59, 65, 61, 61, 76, 64, 72, 61, 49, 53, 53, 56, 65, 79, 61, 51, 59, 48, 73, 47, 48, 73, 56, 55, 73, 51, 68, 68, 94, 73, 51, 57, 71, 66, 67, 70, 70, 65, 60, 52, 47, 55, 94, 80, 51, 55, 91, 58, 65, 78, 62, 72, 48, 45, 70, 70, 65, 53, 55, 113, 96, 72, 73, 62, 65, 44, 76, 69, 83, 54, 71, 80, 81, 61, 57, 60, 49, 73, 68, 64, 46, 90, 50, 83, 67, 98, 60, 65, 75, 68, 53, 56, 53, 57, 64, 48, 65, 58, 78, 70, 50, 39, 59, 53, 62, 55, 53, 76, 72, 74, 74, 70, 75, 64, 58, 60, 87, 46, 73, 109, 52, 73, 63, 112, 63, 73, 72, 76, 61, 65, 63, 52, 75, 53, 59, 58, 56, 61, 73, 93, 57, 64, 84, 92, 56, 64, 54, 66, 78, 82, 80, 64, 64, 74, 74, 52, 63, 95, 69, 70, 54, 58, 80, 59, 81, 87, 59, 67, 64, 65, 72, 93, 50, 72, 55, 69, 73, 50, 56, 55, 72, 60, 59, 56, 53, 59, 65, 56, 52, 66, 71, 58, 67, 75, 73, 58, 49, 49, 60, 66, 59, 49, 63, 71, 55, 73, 59, 54, 62, 72, 64, 90, 60, 62, 63, 68, 55, 55, 61, 57, 50, 61, 73, 58, 60, 59, 87, 93, 72, 61, 53, 48, 75, 54, 62, 57, 47, 72, 56, 59, 69, 76, 76, 65, 95, 55, 76, 77, 82, 43, 132, 74, 74, 67, 69, 68, 67, 69, 69, 60, 59, 84, 66, 56, 65, 82, 55, 54, 67, 63, 69, 52, 70, 68, 53, 54, 61, 62, 140, 61, 59, 66, 75, 84, 71, 75, 59, 61, 67, 73, 55, 55, 43, 82, 53, 76, 69, 72, 65, 61, 50, 69, 59, 49, 58, 60, 56, 55, 52, 68, 40, 59, 94, 123, 64, 52, 62, 58, 105, 61, 55, 54, 65, 60, 49, 63, 59, 84, 73, 64, 53, 54, 52, 51, 54, 71, 86, 71, 67, 58, 70, 94, 59, 62, 71, 80, 88, 65, 75, 64, 63, 51, 100, 45, 65, 64, 62, 65, 97, 55, 72, 62, 90, 62, 57, 67, 71, 74, 70, 69, 57, 70, 71, 54, 74, 64, 62, 59, 67, 47, 63, 54, 52, 54, 57, 68, 74, 49, 59, 61, 57, 59, 80, 54, 99, 56, 63, 49, 84, 77, 44, 60, 71, 62, 67, 53, 94, 64, 59, 59, 69, 38, 55, 56, 64, 79, 99, 56, 65, 71, 61, 68, 98, 45, 57, 93, 62, 56, 57, 65, 124, 56, 68, 63, 84, 60, 69, 63, 68, 58, 69, 78, 111, 63, 74, 82, 61, 57, 51, 69, 46, 58, 67, 66, 58, 50, 67, 72, 58, 134, 72, 72, 67, 56, 63, 46, 62, 73, 70, 67, 63, 53, 63, 72, 65, 59, 62, 82, 67, 78, 83, 54, 67, 57, 57, 143, 56, 47, 78, 68, 79, 48, 68, 56, 64, 77, 58, 50, 54, 67, 65, 68, 48, 89, 62, 84, 48, 67, 61, 58, 57, 68, 76, 44, 82, 54, 61, 65, 97, 59, 72, 62, 58, 62, 69, 86, 62, 71, 80, 39, 47, 74, 60, 95, 116, 63, 74, 66, 66, 111, 96, 67, 51, 65, 80, 64, 70, 60, 56, 71, 59, 60, 74, 64, 54, 60, 68, 91, 52, 91, 88, 54, 44, 103, 106, 59, 55, 71, 63, 69, 72, 55, 75, 50, 60, 63, 61, 51, 57, 62, 62, 56, 70, 57, 71, 60, 68, 46, 64, 83, 84, 57, 43, 73, 68, 62, 52, 58, 139, 60, 68, 64, 62, 84, 71, 55, 64, 56, 67, 68, 61, 40, 68, 63, 93, 61, 63, 63, 74, 91, 53, 72, 62, 48, 63, 52, 67, 83, 74, 47, 55, 55, 45, 58, 75, 53, 50, 50, 61, 55, 25, 61, 55, 61, 77, 69, 86, 68, 63, 78, 68, 91, 57, 77, 77, 53, 48, 63, 64, 83, 77, 64, 63, 60, 63, 65, 54, 64, 66, 66, 77, 64, 78, 76, 65, 74, 70, 51, 75, 72, 94, 71, 59, 56, 64, 63, 64, 106, 67, 59, 51, 43, 51, 53, 83, 106, 51, 64, 59, 67, 59, 61, 43, 52, 71, 104, 63, 34, 66, 58, 74, 96, 56, 69, 56, 66, 64, 64, 60, 104, 58, 76, 68, 77, 77, 70, 69, 52, 61, 52, 57, 71, 63, 65, 109, 61, 53, 46, 63, 52, 63, 77, 71, 62, 55, 73, 79, 64, 80, 61, 78, 64, 63, 58, 69, 57, 76, 51, 75, 75, 78, 63, 89, 62, 66, 77, 64, 81, 50, 69, 59, 67, 60, 73, 59, 57, 81, 97, 75, 63, 69, 64, 56, 105, 63, 64, 65, 60, 57, 65, 58, 73, 69, 61, 83, 65, 70, 56, 65, 71, 59, 61, 73, 55, 96, 63, 52, 105, 47, 58, 74, 109, 89, 53, 85, 80, 43, 47, 81, 61, 56, 77, 89, 66, 92, 68, 60, 45, 98, 62, 68, 69, 66, 61, 74, 42, 68, 74, 53, 49, 50, 65, 88, 57, 55, 56, 69, 85, 52, 63, 75, 74, 92, 71, 55, 67, 55, 71, 56, 46, 56, 78, 66, 68, 56, 78, 66, 65, 57, 70, 61, 49, 62, 68, 63, 58, 64, 69, 67, 60, 52, 105, 49, 51, 82, 57, 57, 70, 59, 72, 55, 45, 109, 91, 73, 68, 65, 80, 65, 71, 66, 79, 83, 74, 52, 60, 69, 54, 74, 69, 61, 62, 101, 76, 60, 76, 48, 66, 55, 49, 91, 66, 56, 71, 72, 44, 83, 69, 69, 71, 61, 82, 56, 62, 60, 49, 56, 68, 54, 94, 55, 90, 71, 51, 54, 76, 62, 72, 101, 52, 74, 76, 53, 61, 53, 59, 49, 95, 62, 75, 61, 63, 82, 82, 56, 60, 68, 51, 66, 67, 73, 66, 66, 67, 62, 57, 65, 50, 57, 62, 64, 65, 55, 59, 44, 61, 81, 66, 57, 62, 55, 54, 52, 68, 59, 51, 48, 61, 67, 72, 59, 58, 63, 60, 37, 67, 55, 52, 73, 67, 52, 63, 68, 58, 73, 51, 57, 49, 73, 51, 58, 87, 79, 96, 85, 71, 57, 59, 60, 82, 78, 57, 53, 56, 74, 59, 70, 64, 75, 54, 48, 64, 61, 67, 51, 49, 57, 80, 59, 67, 62, 100, 57, 53, 77, 79, 67, 65, 68, 64, 74, 66, 56, 58, 47, 88, 83, 47, 60, 61, 47, 64, 82, 56, 61, 47, 58, 56, 58, 90, 54, 57, 63, 65, 51, 100, 77, 63, 58, 66, 79, 65, 59, 64, 111, 65, 51, 51, 53, 70, 66, 65, 50, 74, 64, 62, 63, 54, 65, 115, 52, 60, 77, 74, 52, 49, 47, 59, 60, 73, 60, 62, 85, 68, 69, 60, 92, 56, 59, 80, 63, 56, 99, 63, 54, 56, 52, 57, 62, 87, 59, 55, 61, 94, 60, 62, 84, 133, 59, 58, 58, 71, 62, 72, 51, 56, 68, 69, 58, 51, 61, 70, 75, 68, 74, 62, 55, 112, 61, 76, 60, 64, 73, 64, 69, 63, 64, 60, 69, 50, 66, 67, 52, 73, 70, 69, 77, 58, 55, 64, 58, 77, 68, 58, 82, 56, 75, 60, 63, 75, 81, 64, 62, 92, 78, 80, 58, 53, 65, 55, 82, 80, 64, 60, 79, 82, 60, 66, 145, 70, 60, 50, 64, 54, 62, 58, 64, 73, 64, 85, 73, 56, 55, 82, 49, 63, 60, 66, 74, 56, 74, 47, 85, 68, 44, 61, 66, 80, 60, 58, 61, 81, 68, 61, 57, 57, 65, 90, 74, 51, 45, 91, 38, 62, 71, 50, 77, 60, 50, 85, 69, 63, 74, 49, 116, 49, 61, 65, 78, 65, 54, 68, 74, 52, 75, 57, 62, 68, 58, 68, 54, 86, 61, 61, 62, 74, 67, 61, 61, 63, 59, 57, 54, 53, 47, 71, 70, 61, 72, 76, 72, 65, 63, 83, 85, 64, 81, 76, 53, 55, 72, 67, 74, 67, 53, 63, 73, 66, 79, 43, 59, 61, 73, 60, 77, 92, 91, 69, 68, 64, 70, 75, 50, 58, 71, 84, 74, 58, 55, 64, 51, 58, 54, 65, 78, 93, 64, 54, 77, 118, 54, 81, 58, 64, 85, 50, 66, 60, 75, 59, 65, 53, 69, 77, 60, 60, 102, 58, 74, 66, 63, 81, 70, 62, 67, 58, 66, 65, 61, 48, 83, 66, 63, 49, 76, 59, 47, 56, 66, 83, 61, 69, 58, 66, 70, 70, 63, 71, 62, 71, 87, 62, 66, 65, 67, 61, 74, 69, 55, 57, 84, 61, 58, 67, 107, 97, 67, 63, 56, 76, 64, 73, 65, 65, 68, 90, 51, 66, 104, 48, 63, 64, 72, 56, 54, 65, 77, 49, 71, 72, 59, 55, 54, 82, 60, 59, 67, 45, 106, 47, 59, 59, 55, 63, 63, 82, 116, 84, 67, 70, 81, 69, 55, 77, 81, 59, 78, 57, 80, 51, 76, 59, 68, 102, 57, 64, 61, 54, 70, 96, 58, 59, 68, 60, 50, 62, 54, 70, 59, 79, 46, 63, 59, 39, 63, 59, 70, 62, 56, 65, 61, 60, 96, 73, 84, 72, 55, 57, 69, 50, 61, 70, 59, 82, 68, 84, 74, 53, 51, 51, 72, 42, 66, 68, 68, 65, 59, 60, 59, 74, 67, 72, 58, 60, 62, 48, 62, 58, 74, 57, 71, 55, 72, 65, 53, 52, 47, 64, 107, 65, 45, 49, 53, 112, 54, 57, 59, 57, 47, 60, 68, 85, 69, 59, 50, 58, 41, 67, 55, 48, 61, 66, 68, 64, 80, 38, 58, 62, 70, 97, 61, 60, 54, 73, 64, 81, 45, 70, 79, 57, 70, 65, 46, 66, 70, 60, 64, 65, 64, 68, 55, 69, 56, 62, 55, 80, 59, 115, 82, 55, 62, 69, 134, 54, 65, 49, 69, 68, 58, 61, 42, 67, 66, 63, 61, 86, 61, 71, 43, 80, 66, 56, 116, 62, 55, 74, 69, 57, 80, 61, 63, 56, 50, 67, 75, 63, 74, 64, 83, 65, 72, 74, 61, 62, 73, 59, 59, 80, 72, 68, 58, 56, 60, 61, 60, 68, 60, 77, 81, 52, 73, 57, 49, 85, 77, 79, 57, 65, 72, 71, 48, 68, 70, 54, 64, 72, 63, 64, 65, 71, 73, 54, 44, 62, 58, 61, 67, 73, 64, 55, 72, 99, 82, 68, 66, 59, 63, 79, 86, 45, 56, 63, 57, 60, 62, 106, 75, 65, 74, 48, 54, 82, 59, 53, 77, 101, 75, 67, 47, 46, 52, 59, 50, 87, 52, 87, 73, 92, 74, 57, 75, 79, 63, 60, 66, 50, 50, 64, 99, 108, 55, 73, 57, 63, 57, 83, 76, 58, 68, 58, 51, 69, 40, 71, 59, 64, 58, 53, 71, 64, 49, 79, 62, 60, 71, 72, 70, 63, 43, 78, 50, 49, 56, 67, 70, 58, 46, 64, 63, 55, 91, 74, 55, 62, 63, 54, 63, 108, 73, 58, 76, 83, 53, 69, 63, 61, 49, 61, 56, 60, 61, 70, 59, 79, 57, 65, 70, 52, 69, 32, 69, 67, 69, 67, 62, 77, 65, 63, 63, 105, 53, 50, 74, 49, 56, 67, 52, 59, 62, 63, 70, 79, 81, 57, 68, 57, 68, 55, 66, 55, 62, 57, 75, 55, 65, 59, 89, 64, 78, 53, 68, 72, 64, 54, 56, 67, 87, 76, 61, 57, 67, 59, 63, 64, 61, 41, 78, 46, 55, 67, 66, 52, 67, 82, 56, 66, 63, 60, 61, 77, 47, 63, 63, 102, 62, 54, 65, 67, 65, 61, 62, 76, 57, 91, 57, 66, 65, 48, 62, 64, 59, 98, 63, 62, 58, 63, 89, 62, 71, 77, 68, 61, 92, 62, 69, 66, 67, 72, 56, 60, 71, 65, 64, 57, 54, 68, 56, 59, 85, 73, 92, 87, 65, 47, 70, 59, 64, 51, 64, 62, 64, 77, 71, 68, 55, 61, 78, 71, 82, 60, 57, 64, 62, 70, 59, 56, 58, 47, 59, 52, 59, 72, 64, 64, 63, 58, 46, 84, 73, 73, 54, 80, 63, 66, 45, 62, 64, 65, 62, 61, 69, 57, 64, 57, 61, 68, 76, 53, 67, 56, 61, 82, 65, 62, 58, 70, 64, 48, 61, 60, 57, 64, 66, 70, 54, 86, 56, 71, 72, 72, 91, 41, 56, 75, 52, 43, 79, 55, 72, 50, 85, 53, 66, 56, 62, 66, 69, 66, 64, 55, 61, 54, 60, 70, 54, 83, 116, 61, 65, 80, 64, 72, 64, 59, 75, 63, 59, 62, 77, 63, 56, 53, 62, 81, 55, 55, 64, 64, 51, 65, 65, 67, 53, 74, 61, 46, 87, 64, 73, 65, 68, 81, 49, 69, 69, 64, 71, 53, 61, 57, 67, 63, 56, 54, 65, 103, 64, 61, 71, 68, 89, 70, 60, 70, 59, 59, 59, 74, 55, 87, 66, 67, 57, 66, 58, 62, 69, 61, 70, 86, 54, 48, 55, 95, 68, 64, 70, 79, 58, 64, 49, 62, 55, 61, 61, 53, 64, 60, 71, 60, 61, 49, 60, 66, 73, 67, 137, 57, 71, 57, 70, 62, 68, 93, 55, 65, 73, 77, 73, 60, 68, 110, 52, 64, 88, 65, 63, 52, 74, 63, 68, 50, 59, 62, 58, 59, 64, 68, 54, 53, 59, 71, 53, 62, 76, 63, 57, 61, 74, 63, 71, 55, 71, 52, 73, 55, 56, 68, 57, 77, 79, 68, 59, 66, 57, 64, 62, 61, 56, 69, 75, 72, 59, 62, 59, 60, 56, 55, 76, 56, 97, 58, 62, 73, 90, 58, 59, 51, 56, 63, 60, 65, 77, 60, 55, 61, 89, 62, 63, 62, 66, 74, 62, 61, 63, 51, 67, 85, 55, 50, 80, 72, 49, 59, 65, 67, 59, 59, 64, 61, 60, 58, 58, 66, 69, 56, 71, 71, 54, 58, 67, 62, 59, 65, 62, 64, 51, 62, 88, 64, 66, 80, 50, 67, 65, 80, 71, 98, 121, 45, 51, 74, 63, 79, 71, 54, 64, 58, 83, 133, 70, 50, 46, 59, 59, 86, 53, 55, 64, 53, 57, 67, 50, 54, 57, 70, 51, 79, 59, 57, 61, 57, 61, 67, 86, 65, 73, 82, 59, 106, 87, 52, 55, 70, 78, 57, 60, 59, 56, 64, 59, 72, 67, 73, 78, 72, 75, 71, 60, 78, 81, 94, 76, 66, 70, 66, 59, 49, 61, 60, 84, 69, 64, 73, 101, 66, 67, 60, 76, 63, 106, 68, 66, 106, 57, 100, 61, 61, 78, 70, 105, 68, 69, 49, 63, 61, 75, 61, 73, 52, 90, 63, 72, 57, 63, 68, 68, 52, 59, 61, 51, 79, 61, 63, 84, 70, 61, 58, 67, 52, 53, 58, 64, 63, 62, 56, 52, 66, 61, 61, 71, 59, 79, 68, 60, 65, 56, 80, 53, 63, 65, 67, 70, 80, 57, 62, 64, 44, 72, 62, 67, 70, 91, 63, 78, 62, 50, 61, 70, 94, 70, 46, 63, 53, 57, 69, 61, 53, 60, 58, 57, 47, 51, 64, 62, 95, 78, 60, 82, 70, 65, 67, 96, 65, 59, 61, 61, 67, 61, 77, 69, 65, 78, 63, 64, 62, 50, 113, 45, 65, 74, 49, 57, 60, 68, 64, 65, 136, 69, 52, 60, 75, 69, 58, 58, 60, 59, 54, 68, 49, 59, 63, 58, 93, 60, 62, 76, 70, 51, 90, 65, 65, 59, 66, 83, 48, 66, 92, 69, 60, 73, 56, 97, 52, 71, 68, 67, 59, 80, 83, 72, 54, 61, 58, 44, 63, 82, 63, 67, 51, 103, 56, 68, 96, 54, 60, 58, 64, 54, 62, 68, 62, 78, 73, 72, 62, 91, 51, 59, 75, 53, 62, 112, 61, 64, 47, 72, 103, 63, 56, 98, 53, 66, 71, 72, 67, 71, 57, 78, 58, 80, 56, 63, 66, 57, 71, 66, 76, 121, 56, 61, 64, 67, 79, 53, 63, 71, 66, 64, 49, 63, 69, 82, 64, 54, 57, 60, 55, 58, 67, 65, 72, 68, 71, 56, 46, 65, 63, 65, 80, 63, 73, 77, 58, 63, 45, 121, 108, 71, 65, 78, 68, 66, 63, 63, 59, 62, 64, 67, 50, 50, 60, 62, 67, 89, 68, 51, 102, 60, 71, 64, 76, 51, 67, 69, 60, 62, 59, 61, 74, 70, 48, 64, 63, 63, 64, 60, 73, 61, 66, 67, 66, 57, 62, 81, 67, 63, 58, 63, 54, 72, 73, 60, 53, 51, 64, 87, 62, 94, 72, 64, 66, 73, 59, 58, 63, 72, 103, 47, 57, 60, 54, 61, 68, 64, 76, 73, 55, 62, 105, 68, 64, 67, 46, 63, 92, 62, 53, 61, 53, 66, 84, 89, 59, 88, 64, 54, 91, 53, 61, 55, 63, 61, 62, 70, 61, 63, 57, 60, 83, 87, 58, 53, 69, 63, 59, 69, 100, 56, 65, 71, 58, 52, 53, 85, 64, 53, 53, 52, 57, 53, 52, 68, 70, 64, 55, 69, 58, 82, 73, 50, 61, 64, 56, 56, 61, 52, 75, 76, 69, 70, 75, 68, 97, 54, 81, 70, 60, 80, 57, 71, 69, 60, 62, 59, 99, 54, 61, 62, 65, 60, 56, 62, 59, 59, 72, 46, 69, 62, 67, 47, 77, 63, 60, 65, 73, 83, 75, 62, 66, 79, 69, 60, 66, 50, 58, 50, 57, 66, 67, 72, 57, 79, 50, 58, 56, 62, 63, 64, 65, 66, 145, 83, 70, 60, 57, 62, 66, 60, 54, 71, 61, 53, 64, 49, 79, 62, 57, 64, 75, 76, 64, 67, 68, 64, 59, 77, 53, 60, 57, 71, 66, 77, 55, 60, 66, 47, 52, 57, 54, 78, 75, 54, 64, 59, 54, 59, 62, 56, 55, 54, 64, 52, 90, 82, 53, 79, 74, 77, 66, 63, 72, 55, 75, 54, 65, 63, 78, 56, 59, 57, 63, 64, 81, 61, 64, 58, 50, 66, 57, 56, 79, 61, 61, 67, 57, 58, 66, 56, 85, 65, 61, 97, 73, 56, 55, 60, 43, 57, 68, 64, 74, 55, 62, 70, 81, 62, 63, 60, 68, 117, 52, 81, 71, 67, 73, 76, 65, 85, 67, 60, 49, 63, 39, 64, 69, 63, 59, 60, 66, 69, 58, 64, 66, 78, 74, 69, 94, 63, 61, 83, 67, 60, 67, 61, 65, 80, 71, 56, 45, 56, 47, 61, 66, 66, 52, 58, 63, 57, 55, 58, 56, 67, 71, 75, 72, 60, 62, 56, 49, 56, 65, 63, 61, 70, 84, 90, 60, 49, 78, 73, 67, 68, 53, 81, 68, 59, 59, 67, 83, 54, 54, 73, 75, 72, 49, 59, 74, 54, 64, 65, 68, 64, 64, 72, 68, 99, 62, 72, 65, 62, 84, 93, 47, 58, 63, 69, 70, 60, 55, 54, 73, 55, 77, 81, 71, 58, 63, 55, 67, 51, 64, 66, 50, 64, 46, 44, 74, 60, 80, 81, 75, 53, 65, 66, 60, 66, 52, 99, 57, 57, 58, 61, 69, 81, 55, 68, 50, 55, 63, 61, 52, 57, 92, 72, 72, 63, 63, 82, 59, 72, 104, 58, 48, 62, 51, 57, 62, 90, 60, 63, 40, 100, 75, 72, 60, 78, 55, 79, 63, 55, 61, 61, 57, 49, 51, 70, 102, 63, 69, 68, 67, 68, 93, 61, 63, 58, 64, 64, 61, 57, 54, 77, 72, 58, 55, 66, 106, 63, 60, 74, 80, 49, 62, 55, 64, 66, 67, 77, 52, 69, 82, 73, 84, 58, 63, 59, 84, 59, 57, 66, 91, 48, 54, 71, 63, 66, 62, 57, 62, 69, 82, 53, 69, 71, 85, 75, 67, 65, 76, 70, 62, 55, 78, 46, 66, 68, 50, 42, 60, 74, 79, 46, 82, 53, 61, 79, 70, 53, 53, 65, 70, 60, 79, 75, 50, 45, 60, 79, 62, 47, 66, 56, 95, 56, 58, 72, 62, 70, 64, 67, 59, 48, 76, 53, 61, 88, 80, 70, 64, 60, 49, 78, 61, 82, 79, 67, 65, 102, 72, 62, 74, 56, 65, 65, 53, 81, 43, 52, 66, 63, 57, 73, 53, 60, 57, 65, 65, 68, 46, 54, 66, 67, 84, 61, 69, 66, 79, 64, 63, 69, 78, 73, 55, 56, 69, 43, 77, 72, 73, 66, 74, 126, 60, 45, 74, 80, 65, 58, 77, 73, 61, 53, 85, 66, 71, 66, 70, 75, 69, 48, 60, 58, 67, 66, 87, 74, 76, 48, 47, 76, 52, 66, 78, 58, 59, 59, 69, 62, 47, 60, 81, 67, 37, 59, 80, 54, 58, 83, 49, 67, 48, 72, 74, 144, 59, 61, 51, 64, 88, 69, 75, 56, 52, 114, 68, 80, 53, 68, 60, 71, 68, 51, 81, 55, 75, 57, 85, 59, 79, 45, 64, 74, 64, 52, 46, 57, 107, 82, 81, 65, 76, 64, 56, 76, 59, 54, 49, 62, 65, 79, 81, 71, 62, 100, 74, 62, 65, 64, 75, 72, 66, 53, 59, 72, 58, 54, 59, 58, 83, 70, 55, 83, 54, 75, 63, 66, 69, 66, 90, 77, 56, 83, 73, 67, 63, 43, 60, 67, 58, 59, 63, 76, 56, 72, 54, 64, 58, 58, 62, 55, 41, 101, 89, 63, 49, 68, 56, 60, 71, 69, 72, 56, 83, 76, 40, 65, 75, 81, 82, 76, 90, 61, 71, 96, 97, 86, 50, 75, 47, 61, 62, 60, 68, 67, 59, 100, 73, 63, 74, 57, 51, 46, 81, 84, 53, 63, 61, 53, 86, 60, 67, 48, 60, 69, 49, 55, 74, 58, 64, 66, 92, 71, 84, 59, 49, 60, 64, 64, 69, 71, 61, 76, 79, 66, 94, 55, 73, 61, 59, 58, 59, 56, 43, 65, 65, 73, 82, 62, 86, 72, 73, 70, 57, 53, 50, 63, 53, 73, 82, 74, 78, 82, 70, 48, 73, 85, 53, 50, 66, 46, 66, 84, 62, 57, 54, 71, 75, 55, 73, 46, 60, 54, 58, 41, 54, 64, 70, 60, 81, 65, 88, 69, 62, 53, 60, 74, 55, 71, 68, 69, 107, 72, 58, 68, 56, 73, 62, 70, 55, 73, 52, 56, 59, 77, 51, 62, 62, 57, 63, 70, 67, 57, 54, 55, 58, 79, 86, 61, 61, 64, 69, 67, 67, 71, 58, 84, 78, 73, 59, 81, 66, 61, 47, 63, 68, 84, 58, 74, 75, 51, 60, 55, 70, 52, 98, 61, 64, 90, 66, 150, 69, 76, 59, 73, 42, 65, 61, 78, 48, 67, 53, 75, 59, 65, 84, 57, 72, 69, 55, 68, 65, 47, 63, 52, 64, 68, 51, 57, 55, 77, 63, 58, 70, 67, 62, 81, 65, 49, 71, 57, 82, 65, 66, 82, 64, 69, 69, 74, 58, 86, 48, 77, 75, 47, 72, 74, 82, 54, 63, 51, 56, 68, 78, 50, 66, 64, 62, 72, 58, 67, 73, 67, 54, 80, 95, 71, 59, 56, 51, 78, 75, 67, 61, 74, 67, 51, 73, 83, 85, 71, 104, 55, 70, 56, 63, 70, 75, 61, 48, 60, 82, 61, 50, 75, 67, 45, 53, 87, 63, 57, 68, 58, 97, 65, 63, 58, 56, 59, 65, 68, 60, 77, 56, 68, 83, 49, 62, 62, 58, 76, 86, 53, 61, 60, 63, 103, 58, 78, 55, 80, 69, 90, 85, 64, 61, 61, 50, 67, 70, 67, 78, 72, 46, 41, 56, 66, 50, 51, 37, 64, 57, 43, 65, 73, 58, 65, 73, 62, 53, 81, 62, 79, 74, 55, 57, 63, 53, 75, 72, 67, 49, 68, 58, 60, 80, 62, 59, 62, 72, 89, 47, 67, 73, 58, 60, 69, 62, 75, 66, 67, 83, 63, 34, 59, 69, 63, 62, 58, 64, 64, 63, 97, 69, 63, 69, 56, 62, 54, 53, 81, 49, 51, 69, 63, 77, 49, 63, 60, 71, 96, 61, 83, 78, 70, 70, 78, 66, 50, 48, 56, 71, 52, 61, 57, 63, 61, 49, 63, 52, 39, 56, 66, 68, 72, 71, 53, 61, 55, 57, 55, 59, 72, 66, 86, 64, 62, 58, 73, 69, 53, 59, 52, 60, 55, 60, 87, 40, 53, 62, 72, 58, 65, 53, 72, 55, 57, 72, 54, 47, 69, 60, 70, 70, 65, 58, 72, 61, 57, 65, 59, 71, 78, 80, 117, 53, 63, 53, 60, 81, 73, 85, 55, 65, 88, 55, 57, 54, 60, 52, 91, 48, 67, 63, 71, 55, 75, 54, 74, 89, 53, 63, 63, 65, 49, 58, 64, 84, 54, 71, 57, 52, 64, 113, 38, 64, 76, 64, 57, 76, 83, 63, 65, 71, 98, 65, 47, 70, 51, 62, 52, 58, 50, 76, 85, 115, 86, 61, 60, 60, 59, 60, 65, 64, 48, 64, 85, 74, 70, 56, 59, 58, 55, 55, 58, 47, 86, 59, 64, 46, 53, 94, 54, 65, 62, 101, 48, 47, 50, 51, 60, 71, 79, 52, 63, 66, 54, 64, 82, 45, 61, 75, 76, 74, 61, 68, 54, 51, 49, 74, 126, 45, 55, 57, 64, 93, 62, 91, 53, 56, 86, 75, 84, 66, 64, 93, 70, 67, 54, 80, 52, 64, 57, 78, 64, 54, 66, 52, 63, 52, 64, 71, 58, 60, 67, 112, 82, 60, 54, 63, 86, 80, 64, 53, 62, 45, 55, 48, 86, 52, 63, 68, 59, 52, 69, 72, 57, 54, 85, 75, 62, 81, 75, 59, 65, 88, 110, 58, 51, 55, 63, 58, 54, 67, 66, 57, 81, 63, 56, 69, 59, 57, 69, 64, 65, 97, 62, 70, 54, 69, 52, 79, 83, 55, 84, 68, 52, 88, 73, 62, 62, 65, 45, 73, 66, 67, 55, 68, 59, 76, 60, 94, 75, 50, 90, 64, 69, 53, 43, 63, 58, 75, 44, 48, 75, 125, 80, 61, 62, 96, 78, 54, 90, 50, 64, 90, 72, 57, 51, 59, 61, 89, 55, 60, 56, 102, 70, 50, 54, 82, 63, 52, 42, 63, 57, 65, 87, 51, 75, 48, 53, 53, 60, 57, 49, 59, 55, 60, 57, 51, 57, 47, 66, 56, 71, 66, 80, 77, 73, 57, 82, 61, 107, 81, 52, 66, 65, 96, 83, 45, 50, 65, 62, 93, 54, 90, 47, 58, 61, 65, 124, 69, 71, 51, 65, 60, 83, 51, 84, 82, 77, 68, 69, 35, 64, 59, 54, 59, 60, 55, 74, 54, 60, 70, 60, 76, 56, 73, 66, 55, 55, 57, 55, 73, 65, 45, 63, 75, 53, 52, 65, 68, 59, 67, 55, 62, 61, 57, 68, 59, 63, 51, 56, 93, 69, 77, 127, 79, 73, 68, 42, 73, 71, 61, 50, 62, 68, 75, 55, 57, 73, 55, 53, 62, 76, 81, 50, 90, 71, 70, 51, 69, 58, 64, 41, 68, 66, 63, 56, 65, 75, 60, 63, 59, 70, 63, 72, 69, 50, 91, 45, 54, 74, 75, 56, 66, 85, 50, 110, 61, 56, 94, 72, 76, 51, 73, 60, 51, 132, 103, 69, 69, 59, 59, 68, 85, 75, 57, 93, 54, 60, 67, 87, 69, 59, 66, 70, 48, 53, 96, 61, 103, 74, 57, 69, 68, 45, 131, 66, 63, 60, 51, 59, 49, 45, 75, 67, 57, 77, 62, 61, 76, 79, 70, 71, 50, 69, 68, 52, 47, 59, 61, 58, 46, 56, 61, 64, 77, 68, 71, 68, 82, 65, 72, 62, 52, 69, 74, 54, 46, 59, 59, 60, 60, 42, 90, 54, 59, 51, 72, 63, 53, 52, 69, 58, 70, 65, 57, 56, 61, 61, 56, 42, 66, 58, 63, 70, 76, 60, 53, 65, 62, 63, 63, 83, 65, 63, 70, 79, 72, 48, 54, 85, 62, 64, 69, 95, 63, 60, 46, 67, 59, 71, 65, 75, 60, 63, 67, 63, 59, 61, 60, 75, 69, 67, 48, 50, 55, 30, 91, 55, 58, 67, 67, 47, 99, 59, 46, 48, 66, 91, 85, 67, 70, 67, 57, 58, 96, 54, 105, 93, 56, 60, 54, 66, 52, 73, 48, 102, 45, 55, 144, 70, 67, 60, 48, 73, 53, 46, 60, 71, 60, 69, 65, 70, 54, 57, 72, 55, 70, 76, 63, 51, 68, 54, 62, 53, 68, 47, 61, 87, 69, 55, 64, 68, 57, 51, 62, 60, 46, 68, 63, 71, 51, 89, 61, 59, 66, 97, 62, 99, 74, 64, 65, 99, 109, 78, 46, 58, 74, 93, 57, 84, 64, 75, 89, 70, 69, 98, 64, 71, 86, 66, 68, 55, 66, 51, 56, 56, 63, 76, 92, 60, 49, 66, 81, 64, 61, 67, 64, 91, 55, 60, 67, 64, 81, 43, 75, 63, 74, 59, 56, 62, 110, 53, 59, 79, 45, 63, 84, 55, 60, 61, 51, 134, 68, 78, 47, 86, 74, 58, 64, 77, 58, 57, 58, 58, 62, 77, 59, 62, 66, 61, 71, 62, 66, 41, 56, 50, 58, 75, 48, 58, 80, 32, 43, 64, 60, 56, 74, 81, 95, 80, 63, 51, 49, 48, 59, 53, 68, 61, 63, 80, 64, 86, 95, 63, 51, 65, 62, 76, 67, 61, 58, 74, 59, 69, 77, 65, 53, 58, 79, 84, 58, 95, 75, 63, 72, 67, 74, 78, 60, 60, 58, 96, 51, 56, 68, 73, 63, 72, 78, 46, 52, 72, 78, 64, 58, 81, 51, 55, 46, 97, 53, 70, 61, 52, 64, 83, 62, 61, 55, 80, 68, 56, 72, 76, 93, 87, 62, 82, 69, 54, 72, 101, 91, 58, 56, 76, 80, 46, 65, 65, 59, 73, 58, 70, 55, 54, 67, 69, 77, 54, 104, 66, 64, 77, 52, 67, 59, 59, 63, 77, 57, 63, 47, 45, 63, 51, 64, 62, 69, 67, 65, 57, 67, 95, 64, 73, 53, 66, 84, 53, 55, 42, 75, 62, 59, 55, 52, 65, 49, 71, 63, 62, 67, 64, 62, 54, 81, 57, 69, 68, 67, 50, 70, 68, 73, 82, 57, 63, 90, 67, 78, 55, 95, 51, 67, 52, 66, 83, 66, 74, 71, 48, 46, 79, 60, 53, 125, 59, 70, 75, 59, 75, 47, 92, 56, 87, 62, 68, 64, 67, 59, 71, 60, 61, 79, 71, 54, 72, 62, 62, 77, 83, 80, 57, 98, 61, 62, 29, 49, 53, 56, 58, 59, 66, 92, 74, 48, 58, 69, 68, 54, 54, 84, 75, 78, 66, 69, 57, 88, 67, 43, 64, 76, 61, 63, 56, 42, 57, 43, 64, 69, 64, 63, 57, 56, 60, 50, 83, 72, 65, 55, 60, 67, 53, 61, 79, 53, 58, 54, 69, 50, 60, 62, 55, 69, 61, 74, 70, 55, 61, 58, 77, 56, 60, 62, 72, 39, 54, 72, 45, 77, 82, 39, 72, 48, 60, 38, 59, 63, 79, 76, 64, 85, 61, 72, 78, 80, 61, 59, 51, 60, 47, 93, 73, 95, 72, 69, 56, 94, 69, 62, 92, 70, 54, 73, 68, 63, 60, 72, 64, 75, 63, 80, 60, 76, 66, 68, 51, 63, 65, 76, 60, 58, 91, 59, 47, 52, 47, 62, 58, 86, 58, 66, 52, 63, 62, 50, 74, 52, 91, 98, 52, 48, 62, 87, 59, 61, 70, 55, 58, 70, 60, 62, 61, 83, 67, 68, 63, 63, 85, 47, 61, 56, 61, 56, 73, 59, 74, 67, 61, 60, 65, 81, 57, 79, 54, 52, 63, 61, 62, 56, 77, 81, 88, 64, 76, 67, 64, 84, 105, 63, 66, 63, 44, 64, 49, 82, 58, 59, 64, 50, 64, 47, 61, 66, 82, 73, 62, 47, 64, 64, 48, 85, 67, 60, 94, 51, 61, 61, 69, 65, 61, 87, 54, 60, 63, 46, 58, 64, 61, 63, 61, 41, 85, 68, 69, 117, 57, 42, 74, 55, 57, 72, 64, 67, 70, 59, 66, 57, 70, 70, 65, 52, 52, 69, 55, 50, 69, 57, 56, 55, 67, 60, 53, 62, 69, 74, 75, 59, 57, 65, 50, 64, 48, 56, 55, 82, 60, 58, 64, 58, 63, 71, 103, 55, 105, 64, 67, 54, 53, 63, 53, 70, 65, 62, 102, 75, 56, 103, 82, 88, 42, 67, 61, 75, 59, 70, 53, 65, 70, 53, 52, 70, 52, 61, 60, 91, 66, 73, 46, 67, 59, 53, 46, 54, 56, 51, 69, 76, 71, 57, 83, 69, 70, 88, 59, 66, 66, 66, 89, 67, 90, 52, 64, 64, 66, 73, 66, 84, 85, 77, 69, 50, 71, 61, 68, 108, 74, 60, 52, 56, 57, 54, 59, 77, 56, 71, 83, 66, 51, 65, 63, 73, 62, 75, 86, 73, 70, 65, 65, 74, 49, 63, 64, 67, 60, 95, 63, 43, 59, 58, 57, 65, 79, 70, 48, 72, 69, 63, 54, 75, 50, 53, 89, 49, 61, 55, 55, 60, 51, 58, 74, 66, 58, 77, 59, 67, 75, 76, 60, 66, 68, 76, 56, 73, 97, 58, 96, 59, 58, 56, 89, 69, 57, 60, 65, 69, 52, 113, 65, 65, 51, 75, 65, 55, 79, 73, 59, 52, 55, 64, 79, 45, 70, 55, 80, 63, 54, 64, 51, 53, 57, 62, 73, 70, 69, 69, 57, 68, 54, 79, 55, 69, 59, 47, 62, 59, 71, 69, 59, 55, 60, 60, 72, 67, 68, 59, 71, 73, 64, 88, 51, 64, 60, 74, 56, 58, 70, 60, 89, 65, 58, 68, 92, 75, 56, 71, 60, 59, 58, 58, 63, 54, 55, 108, 74, 79, 77, 72, 69, 57, 60, 58, 67, 60, 75, 65, 83, 60, 108, 72, 51, 58, 63, 57, 83, 59, 58, 67, 59, 64, 65, 61, 87, 108, 58, 56, 80, 74, 76, 56, 66, 64, 69, 66, 62, 62, 58, 54, 49, 64, 64, 59, 70, 62, 76, 80, 63, 56, 67, 78, 59, 76, 56, 57, 79, 60, 80, 69, 77, 62, 94, 75, 67, 74, 67, 62, 88, 88, 80, 59, 55, 77, 127, 70, 53, 62, 62, 56, 74, 68, 62, 57, 66, 67, 51, 83, 76, 56, 67, 62, 81, 72, 60, 57, 55, 60, 46, 70, 95, 74, 50, 74, 84, 62, 54, 63, 74, 50, 79, 50, 71, 54, 60, 60, 55, 118, 73, 85, 47, 60, 72, 70, 71, 57, 52, 61, 66, 77, 64, 79, 55, 81, 66, 61, 71, 66, 60, 63, 69, 65, 81, 94, 64, 58, 51, 83, 67, 66, 73, 55, 63, 56, 55, 88, 50, 43, 79, 85, 70, 64, 52, 90, 56, 63, 59, 60, 58, 69, 63, 54, 45, 53, 59, 69, 44, 50, 70, 54, 72, 58, 94, 54, 72, 70, 54, 63, 64, 75, 82, 54, 124, 49, 68, 66, 83, 124, 127, 54, 78, 57, 60, 54, 55, 53, 83, 96, 56, 78, 78, 49, 76, 78, 64, 58, 64, 82, 80, 89, 58, 62, 69, 54, 61, 60, 44, 59, 57, 76, 66, 70, 62, 74, 79, 70, 63, 70, 76, 54, 69, 50, 61, 73, 75, 54, 74, 61, 59, 85, 73, 80, 59, 58, 56, 63, 128, 63, 52, 68, 53, 65, 67, 70, 72, 60, 62, 48, 59, 63, 53, 66, 89, 50, 63, 58, 68, 110, 52, 67, 46, 74, 59, 81, 67, 56, 71, 56, 64, 63, 50, 55, 60, 61, 49, 71, 67, 67, 82, 62, 62, 69, 55, 60, 102, 50, 59, 51, 64, 59, 68, 89, 58, 61, 68, 57, 54, 69, 68, 50, 59, 49, 62, 48, 83, 54, 69, 95, 65, 56, 58, 72, 56, 63, 79, 65, 52, 64, 58, 93, 63, 66, 55, 63, 63, 95, 58, 67, 50, 67, 73, 53, 49, 60, 73, 52, 47, 54, 67, 55, 70, 61, 51, 68, 56, 57, 66, 63, 103, 87, 83, 53, 76, 55, 54, 58, 72, 87, 55, 54, 70, 72, 57, 36, 63, 69, 51, 67, 65, 55, 63, 70, 111, 71, 55, 62, 65, 67, 58, 73, 55, 86, 75, 71, 48, 85, 90, 65, 82, 53, 75, 82, 63, 62, 50, 65, 62, 52, 57, 67, 48, 66, 57, 53, 60, 60, 72, 63, 53, 59, 59, 105, 59, 61, 57, 65, 65, 58, 56, 57, 51, 48, 61, 68, 64, 76, 97, 47, 55, 61, 57, 68, 77, 75, 50, 64, 56, 68, 58, 64, 56, 51, 53, 70, 47, 68, 61, 68, 80, 79, 69, 56, 63, 66, 67, 53, 53, 50, 79, 60, 70, 70, 69, 65, 59, 66, 63, 65, 59, 70, 57, 69, 55, 58, 76, 94, 60, 63, 61, 69, 55, 53, 83, 54, 59, 65, 62, 53, 53, 73, 99, 87, 60, 73, 58, 69, 60, 68, 60, 76, 57, 62, 62, 76, 71, 58, 71, 79, 50, 66, 59, 105, 79, 66, 50, 59, 43, 88, 53, 55, 61, 87, 62, 49, 61, 60, 54, 78, 61, 58, 62, 43, 60, 70, 63, 63, 74, 87, 56, 116, 66, 64, 69, 57, 46, 59, 54, 53, 59, 64, 89, 69, 85, 47, 76, 79, 54, 72, 85, 61, 52, 57, 48, 65, 94, 65, 59, 105, 59, 51, 44, 61, 63, 58, 60, 74, 57, 57, 65, 104, 66, 69, 54, 75, 58, 74, 57, 70, 67, 106, 80, 62, 64, 54, 51, 88, 60, 56, 51, 80, 52, 48, 53, 58, 59, 51, 56, 68, 92, 54, 73, 74, 56, 73, 62, 49, 67, 89, 82, 66, 45, 65, 57, 63, 52, 79, 47, 54, 77, 56, 67, 53, 75, 63, 56, 51, 71, 62, 55, 74, 55, 101, 77, 68, 49, 53, 61, 76, 47, 86, 56, 51, 94, 63, 51, 75, 89, 71, 64, 58, 63, 77, 74, 63, 63, 50, 62, 73, 51, 57, 71, 71, 65, 71, 71, 64, 60, 57, 71, 78, 75, 46, 61, 71, 125, 70, 62, 68, 57, 62, 67, 77, 56, 59, 57, 70, 65, 67, 65, 49, 76, 53, 84, 67, 51, 71, 67, 67, 76, 63, 81, 82, 52, 59, 51, 60, 61, 115, 63, 65, 52, 90, 44, 94, 74, 62, 66, 47, 60, 73, 55, 61, 66, 59, 57, 62, 88, 60, 61, 61, 55, 72, 66, 66, 77, 53, 62, 87, 75, 46, 65, 54, 66, 56, 66, 57, 43, 56, 74, 62, 49, 84, 56, 59, 65, 46, 86, 57, 68, 64, 66, 74, 55, 67, 60, 59, 59, 97, 63, 69, 62, 57, 54, 98, 73, 144, 73, 49, 58, 70, 64, 74, 54, 56, 72, 58, 69, 61, 75, 78, 104, 67, 57, 43, 57, 69, 55, 58, 86, 60, 67, 49, 84, 64, 67, 50, 52, 63, 87, 75, 53, 56, 83, 52, 62, 65, 92, 49, 56, 67, 65, 65, 88, 61, 61, 94, 61, 52, 37, 55, 48, 70, 73, 62, 61, 46, 70, 55, 80, 76, 67, 65, 83, 73, 98, 57, 81, 66, 82, 83, 86, 88, 91, 65, 68, 78, 56, 65, 69, 54, 50, 58, 81, 62, 57, 51, 52, 52, 59, 59, 48, 60, 69, 53, 76, 48, 70, 66, 58, 55, 46, 54, 69, 73, 90, 56, 71, 63, 62, 57, 93, 57, 74, 93, 62, 73, 86, 54, 56, 64, 71, 77, 95, 78, 85, 56, 63, 66, 48, 60, 65, 78, 58, 132, 57, 70, 95, 58, 86, 53, 70, 56, 62, 73, 53, 82, 62, 59, 57, 62, 57, 74, 50, 64, 61, 64, 62, 66, 51, 80, 71, 76, 96, 74, 69, 94, 78, 59, 56, 56, 83, 61, 60, 54, 61, 61, 60, 61, 72, 68, 68, 136, 74, 76, 87, 66, 59, 66, 64, 76, 49, 77, 55, 46, 73, 71, 60, 67, 49, 49, 54, 55, 87, 63, 67, 58, 63, 55, 56, 80, 52, 57, 67, 57, 98, 62, 81, 61, 63, 61, 90, 68, 69, 85, 71, 66, 58, 57, 71, 63, 57, 57, 48, 53, 73, 74, 70, 65, 43, 46, 69, 69, 69, 65, 58, 56, 64, 68, 66, 67, 63, 61, 61, 73, 51, 101, 76, 50, 66, 53, 75, 65, 64, 62, 49, 60, 50, 72, 46, 73, 72, 59, 69, 67, 56, 61, 79, 92, 69, 75, 79, 78, 49, 59, 71, 66, 80, 66, 50, 51, 50, 65, 61, 66, 47, 66, 71, 57, 69, 66, 63, 62, 82, 65, 60, 47, 85, 59, 79, 63, 54, 68, 85, 67, 106, 56, 88, 78, 85, 54, 76, 49, 53, 62, 69, 91, 48, 59, 63, 57, 54, 121, 74, 49, 54, 81, 43, 72, 49, 106, 70, 56, 53, 70, 61, 69, 65, 51, 78, 58, 67, 90, 61, 53, 57, 57, 50, 70, 73, 63, 66, 93, 72, 57, 61, 57, 93, 58, 61, 85, 63, 66, 67, 58, 62, 67, 65, 75, 78, 54, 82, 74, 56, 72, 67, 70, 63, 37, 58, 58, 65, 59, 59, 54, 69, 60, 63, 63, 67, 47, 73, 68, 59, 36, 65, 44, 151, 64, 55, 84, 63, 135, 54, 97, 67, 60, 49, 47, 53, 39, 75, 66, 46, 63, 54, 61, 73, 69, 52, 62, 65, 90, 108, 81, 47, 54, 47, 58, 58, 50, 72, 65, 60, 97, 63, 65, 77, 81, 97, 44, 86, 69, 59, 63, 60, 63, 62, 54, 82, 54, 54, 64, 75, 69, 64, 60, 63, 69, 78, 67, 101, 56, 58, 75, 58, 53, 72, 70, 44, 66, 70, 65, 49, 80, 59, 82, 56, 79, 68, 56, 56, 61, 96, 54, 64, 43, 41, 47, 104, 72, 58, 69, 58, 64, 83, 72, 60, 75, 81, 47, 65, 70, 96, 70, 62, 59, 69, 76, 84, 64, 56, 46, 68, 75, 64, 57, 58, 61, 76, 63, 50, 75, 71, 67, 60, 65, 79, 49, 71, 50, 52, 65, 55, 54, 97, 60, 52, 57, 57, 80, 50, 70, 66, 97, 66, 61, 69, 50, 52, 62, 58, 66, 51, 49, 56, 59, 66, 90, 64, 46, 53, 55, 61, 44, 65, 70, 53, 58, 56, 70, 70, 75, 100, 105, 60, 57, 60, 58, 65, 78, 58, 90, 45, 61, 70, 47, 73, 110, 57, 63, 61, 69, 84, 70, 49, 79, 104, 101, 53, 64, 73, 66, 96, 73, 71, 64, 61, 60, 52, 77, 84, 58, 63, 60, 71, 81, 52, 58, 62, 58, 62, 71, 55, 46, 63, 52, 78, 47, 70, 69, 65, 51, 55, 51, 65, 64, 58, 83, 65, 79, 54, 62, 61, 75, 46, 98, 57, 65, 53, 61, 50, 54, 62, 69, 58, 65, 62, 63, 59, 48, 58, 68, 58, 63, 85, 61, 72, 63, 59, 64, 70, 65, 55, 62, 49, 71, 62, 78, 73, 69, 72, 55, 64, 51, 68, 59, 59, 69, 82, 62, 47, 69, 88, 62, 51, 56, 34, 72, 68, 79, 71, 41, 61, 75, 45, 76, 53, 63, 71, 61, 67, 54, 53, 54, 78, 60, 65, 87, 66, 66, 64, 62, 72, 52, 61, 55, 60, 74, 56, 50, 53, 41, 51, 52, 68, 75, 91, 55, 66, 67, 52, 103, 43, 39, 40, 73, 63, 52, 66, 87, 63, 70, 53, 44, 62, 63, 71, 70, 67, 71, 60, 66, 66, 54, 63, 63, 83, 58, 61, 46, 64, 64, 65, 43, 101, 57, 52, 77, 73, 50, 65, 67, 66, 59, 44, 58, 44, 98, 96, 66, 59, 79, 46, 47, 68, 71, 66, 73, 44, 53, 36, 74, 75, 57, 71, 48, 69, 74, 54, 57, 41, 68, 68, 88, 69, 70, 64, 71, 70, 73, 64, 61, 54, 52, 62, 90, 62, 63, 63, 63, 83, 57, 81, 64, 83, 52, 71, 66, 41, 72, 64, 73, 61, 75, 58, 50, 63, 75, 80, 39, 67, 57, 62, 59, 46, 70, 75, 67, 46, 72, 67, 74, 64, 64, 78, 82, 65, 53, 40, 26, 51, 70, 59, 77, 97, 74, 73, 83, 61, 58, 112, 50, 70, 65, 61, 68, 53, 61, 66, 72, 59, 80, 94, 56, 60, 77, 64, 96, 63, 59, 58, 68, 54, 86, 94, 53, 64, 72, 119, 79, 67, 48, 45, 56, 62, 39, 62, 72, 70, 56, 72, 82, 65, 56, 70, 85, 59, 77, 62, 62, 58, 61, 37, 51, 74, 75, 85, 105, 43, 62, 65, 103, 82, 63, 74, 58, 57, 65, 64, 61, 53, 53, 88, 76, 51, 63, 52, 65, 69, 59, 69, 43, 66, 70, 64, 69, 80, 71, 62, 53, 60, 83, 51, 68, 85, 77, 69, 53, 43, 72, 72, 58, 47, 72, 91, 71, 66, 84, 61, 60, 60, 63, 65, 58, 71, 76, 58, 56, 70, 66, 62, 64, 62, 68, 46, 52, 84, 88, 62, 120, 69, 63, 66, 51, 72, 56, 55, 62, 70, 57, 42, 65, 59, 49, 53, 69, 40, 69, 53, 79, 74, 60, 85, 63, 72, 36, 79, 70, 68, 72, 74, 75, 122, 80, 56, 56, 79, 39, 51, 61, 76, 63, 68, 58, 66, 68, 51, 62, 59, 90, 71, 57, 60, 47, 61, 82, 63, 62, 76, 74, 62, 59, 86, 70, 59, 59, 84, 72, 67, 53, 63, 56, 54, 66, 56, 56, 66, 52, 65, 71, 53, 61, 84, 68, 73, 57, 62, 62, 53, 66, 61, 59, 68, 85, 60, 73, 41, 60, 51, 54, 66, 45, 54, 61, 62, 70, 61, 67, 64, 84, 73, 61, 55, 59, 84, 73, 89, 66, 68, 111, 67, 65, 65, 55, 74, 68, 97, 45, 101, 50, 79, 67, 45, 50, 57, 83, 62, 141, 51, 61, 107, 75, 65, 62, 58, 63, 63, 61, 69, 63, 67, 64, 62, 77, 40, 71, 56, 67, 64, 51, 75, 59, 89, 51, 80, 81, 94, 88, 79, 78, 57, 71, 47, 77, 58, 72, 62, 65, 65, 58, 41, 62, 73, 57, 53, 70, 83, 80, 73, 82, 67, 83, 48, 69, 56, 52, 52, 61, 66, 71, 63, 48, 67, 74, 62, 53, 92, 64, 65, 70, 58, 90, 63, 68, 63, 72, 40, 66, 81, 58, 75, 67, 68, 81, 99, 57, 48, 86, 81, 45, 61, 66, 75, 56, 50, 46, 65, 64, 89, 54, 74, 63, 64, 98, 75, 60, 52, 75, 68, 56, 69, 65, 79, 64, 71, 70, 59, 56, 54, 68, 61, 54, 62, 54, 59, 56, 65, 74, 74, 71, 52, 63, 74, 49, 66, 56, 69, 61, 54, 55, 92, 93, 59, 87, 49, 58, 59, 77, 64, 54, 65, 59, 40, 40, 58, 59, 76, 82, 89, 75, 60, 73, 56, 50, 54, 80, 69, 69, 49, 64, 73, 55, 66, 65, 63, 62, 73, 38, 120, 58, 84, 50, 54, 53, 63, 55, 59, 66, 61, 88, 70, 55, 52, 70, 66, 110, 65, 69, 66, 79, 72, 77, 64, 129, 50, 59, 49, 84, 62, 68, 64, 52, 52, 82, 90, 84, 56, 61, 61, 54, 69, 57, 90, 46, 81, 88, 60, 69, 50, 59, 68, 49, 54, 107, 55, 55, 62, 49, 67, 63, 47, 56, 67, 51, 57, 66, 54, 81, 59, 59, 76, 84, 70, 72, 81, 74, 54, 43, 60, 56, 64, 55, 56, 41, 81, 57, 67, 64, 61, 52, 144, 53, 61, 47, 76, 125, 51, 93, 63, 67, 66, 70, 64, 70, 98, 60, 60, 77, 40, 57, 92, 64, 66, 72, 65, 92, 73, 84, 62, 43, 63, 70, 65, 62, 62, 46, 49, 86, 57, 63, 87, 65, 95, 65, 67, 66, 65, 60, 56, 49, 46, 56, 96, 77, 60, 71, 60, 56, 55, 74, 57, 67, 76, 48, 100, 69, 58, 71, 63, 60, 72, 84, 41, 82, 62, 68, 43, 67, 50, 64, 46, 58, 52, 59, 63, 87, 86, 61, 76, 65, 45, 51, 112, 74, 63, 64, 50, 52, 51, 52, 59, 76, 87, 59, 64, 86, 99, 42, 55, 72, 75, 70, 60, 46, 64, 44, 57, 81, 77, 81, 69, 61, 86, 63, 69, 60, 65, 63, 62, 59, 76, 55, 73, 63, 61, 55, 58, 81, 51, 77, 69, 66, 53, 87, 61, 60, 56, 71, 70, 65, 56, 49, 48, 75, 63, 55, 71, 82, 43, 59, 53, 122, 61, 63, 80, 84, 58, 60, 55, 122, 56, 62, 63, 49, 77, 66, 133, 108, 46, 79, 63, 62, 62, 48, 54, 58, 64, 52, 69, 58, 56, 121, 75, 72, 80, 64, 54, 53, 77, 80, 107, 141, 61, 73, 56, 60, 50, 74, 70, 55, 49, 68, 46, 58, 84, 89, 63, 60, 60, 68, 65, 80, 62, 57, 64, 52, 51, 54, 48, 57, 51, 50, 54, 59, 65, 47, 68, 50, 86, 50, 60, 63, 64, 74, 75, 59, 54, 69, 66, 93, 47, 69, 42, 64, 61, 50, 75, 68, 65, 63, 64, 43, 75, 53, 62, 63, 63, 55, 69, 55, 56, 69, 71, 66, 60, 57, 61, 70, 60, 67, 63, 69, 50, 61, 54, 74, 64, 69, 53, 58, 56, 53, 60, 79, 49, 46, 63, 76, 51, 63, 52, 55, 79, 91, 58, 67, 64, 52, 68, 56, 80, 55, 54, 67, 53, 46, 57, 71, 82, 47, 59, 86, 46, 71, 49, 51, 66, 63, 46, 71, 67, 59, 66, 81, 52, 50, 59, 67, 54, 73, 64, 91, 60, 74, 58, 78, 67, 54, 59, 55, 70, 67, 73, 83, 75, 49, 66, 59, 87, 89, 59, 72, 70, 42, 69, 57, 49, 57, 92, 60, 65, 43, 62, 75, 82, 55, 54, 42, 90, 66, 44, 68, 54, 36, 48, 69, 70, 61, 71, 58, 79, 74, 104, 63, 70, 66, 77, 64, 68, 68, 57, 64, 74, 49, 43, 35, 64, 59, 56, 72, 87, 70, 57, 69, 77, 89, 54, 63, 71, 57, 67, 49, 50, 56, 73, 49, 45, 70, 71, 74, 62, 48, 59, 61, 59, 51, 61, 65, 71, 51, 65, 61, 72, 79, 59, 89, 60, 75, 52, 48, 67, 84, 73, 59, 76, 73, 62, 55, 55, 63, 54, 76, 81, 51, 61, 70, 53, 57, 87, 84, 48, 91, 52, 57, 76, 63, 63, 57, 86, 77, 39, 59, 86, 67, 81, 59, 59, 79, 85, 66, 57, 63, 70, 90, 80, 89, 81, 65, 59, 67, 51, 69, 123, 73, 69, 54, 58, 70, 88, 72, 56, 61, 53, 57, 57, 72, 51, 62, 74, 63, 79, 61, 64, 54, 64, 66, 56, 76, 68, 61, 64, 64, 109, 51, 55, 56, 68, 70, 65, 62, 64, 56, 68, 49, 84, 61, 55, 75, 59, 75, 85, 57, 87, 55, 46, 75, 75, 60, 73, 68, 68, 56, 52, 59, 63, 60, 55, 80, 50, 64, 68, 57, 61, 57, 58, 56, 71, 57, 76, 75, 48, 65, 35, 59, 62, 59, 77, 64, 68, 44, 53, 81, 68, 72, 62, 57, 102, 56, 45, 66, 67, 99, 46, 66, 75, 54, 75, 71, 54, 54, 62, 59, 76, 55, 71, 74, 77, 57, 57, 55, 60, 64, 80, 52, 40, 82, 63, 58, 63, 65, 75, 77, 50, 58, 66, 40, 73, 63, 57, 58, 59, 70, 78, 90, 59, 45, 75, 76, 76, 69, 56, 68, 81, 65, 67, 64, 65, 68, 65, 45, 63, 72, 51, 73, 49, 60, 53, 63, 86, 55, 58, 47, 66, 63, 58, 70, 56, 67, 89, 61, 63, 51, 75, 56, 91, 80, 73, 63, 64, 68, 64, 48, 65, 63, 49, 52, 51, 62, 60, 78, 55, 57, 75, 55, 62, 44, 55, 71, 84, 57, 84, 74, 59, 59, 77, 81, 71, 84, 60, 59, 65, 66, 60, 68, 58, 74, 53, 53, 78, 92, 56, 95, 59, 72, 107, 73, 45, 73, 72, 64, 88, 73, 55, 63, 58, 64, 62, 47, 59, 77, 95, 98, 59, 71, 44, 68, 65, 67, 64, 70, 96, 72, 67, 72, 55, 56, 66, 61, 50, 64, 59, 59, 64, 53, 76, 66, 61, 54, 65, 96, 59, 51, 85, 52, 48, 48, 93, 68, 67, 58, 39, 63, 63, 59, 60, 104, 48, 69, 61, 67, 44, 57, 50, 74, 65, 73, 46, 57, 62, 112, 77, 42, 142, 54, 62, 54, 77, 71, 61, 85, 67, 62, 60, 80, 75, 48, 57, 77, 78, 67, 71, 91, 58, 58, 61, 60, 56, 64, 84, 74, 67, 57, 69, 59, 72, 58, 52, 59, 54, 73, 55, 60, 80, 70, 85, 79, 59, 74, 51, 53, 66, 54, 99, 75, 53, 66, 74, 50, 52, 62, 108, 61, 67, 65, 78, 96, 59, 65, 45, 66, 64, 61, 66, 56, 67, 73, 87, 51, 71, 75, 66, 54, 48, 80, 64, 65, 45, 66, 53, 56, 59, 65, 43, 91, 138, 57, 101, 56, 72, 58, 78, 68, 69, 69, 61, 63, 82, 54, 58, 53, 85, 43, 70, 69, 67, 59, 59, 60, 52, 78, 66, 62, 77, 73, 47, 51, 68, 90, 58, 72, 63, 53, 70, 74, 46, 54, 79, 77, 47, 95, 70, 78, 92, 68, 64, 52, 55, 61, 56, 53, 81, 58, 60, 68, 81, 64, 63, 86, 59, 53, 67, 54, 111, 80, 66, 64, 51, 63, 56, 91, 64, 68, 50, 47, 70, 55, 40, 52, 66, 76, 65, 65, 79, 70, 52, 87, 110, 52, 59, 56, 76, 63, 56, 62, 102, 72, 81, 48, 71, 64, 56, 55, 56, 79, 69, 49, 64, 77, 85, 57, 56, 58, 60, 88, 112, 68, 61, 92, 84, 76, 80, 90, 68, 113, 56, 67, 66, 59, 84, 68, 65, 55, 115, 58, 52, 65, 64, 61, 51, 96, 43, 84, 59, 72, 70, 57, 64, 83, 103, 60, 57, 72, 112, 67, 69, 62, 48, 62, 57, 53, 51, 56, 49, 54, 50, 57, 58, 76, 87, 66, 63, 82, 69, 82, 69, 61, 87, 58, 76, 69, 105, 65, 52, 59, 72, 85, 68, 81, 64, 67, 72, 109, 65, 67, 63, 80, 50, 52, 60, 77, 66, 73, 57, 58, 63, 79, 52, 55, 79, 58, 51, 56, 74, 63, 81, 53, 87, 71, 61, 72, 60, 52, 71, 91, 128, 67, 51, 65, 63, 73, 53, 32, 55, 55, 52, 49, 62, 77, 67, 100, 74, 67, 64, 80, 49, 90, 68, 53, 64, 61, 53, 60, 66, 79, 64, 81, 50, 78, 73, 49, 102, 76, 76, 71, 68, 71, 55, 73, 74, 51, 69, 56, 57, 81, 75, 70, 107, 60, 48, 118, 61, 64, 74, 63, 59, 57, 53, 72, 58, 63, 91, 69, 78, 57, 81, 66, 70, 74, 71, 76, 54, 70, 67, 53, 53, 91, 62, 45, 58, 50, 47, 70, 57, 67, 63, 74, 59, 111, 64, 50, 53, 72, 61, 95, 62, 64, 85, 111, 113, 72, 80, 74, 71, 80, 65, 65, 87, 59, 71, 63, 74, 62, 54, 68, 58, 42, 62, 72, 65, 73, 60, 65, 61, 55, 48, 78, 69, 58, 81, 68, 69, 54, 66, 118, 75, 77, 70, 51, 60, 57, 52, 59, 87, 64, 56, 48, 84, 55, 60, 55, 69, 68, 60, 70, 70, 56, 75, 54, 65, 68, 72, 62, 44, 63, 59, 56, 53, 65, 72, 75, 67, 57, 82, 99, 58, 56, 65, 56, 73, 62, 60, 54, 48, 75, 52, 63, 86, 96, 62, 59, 54, 73, 78, 39, 64, 58, 66, 59, 68, 63, 53, 61, 60, 61, 56, 76, 58, 74, 92, 61, 49, 95, 63, 75, 59, 54, 66, 69, 63, 46, 51, 59, 58, 79, 64, 75, 67, 61, 55, 67, 48, 72, 68, 84, 75, 113, 60, 65, 89, 73, 80, 82, 43, 65, 63, 58, 105, 52, 78, 59, 86, 76, 52, 68, 52, 54, 59, 83, 63, 52, 57, 60, 76, 72, 75, 61, 53, 40, 83, 89, 141, 61, 50, 70, 70, 51, 88, 70, 61, 67, 50, 59, 85, 78, 66, 46, 49, 52, 48, 70, 65, 71, 67, 69, 78, 76, 100, 57, 72, 63, 75, 125, 46, 58, 77, 61, 52, 64, 81, 53, 45, 72, 53, 73, 54, 60, 81, 62, 55, 52, 67, 67, 63, 53, 56, 57, 74, 80, 77, 59, 73, 73, 50, 81, 65, 64, 73, 57, 45, 50, 47, 60, 62, 83, 77, 67, 49, 68, 56, 69, 59, 55, 52, 88, 64, 64, 55, 98, 74, 55, 66, 69, 114, 56, 53, 60, 61, 72, 58, 50, 71, 65, 66, 63, 59, 50, 67, 58, 76, 48, 73, 64, 52, 75, 64, 51, 59, 49, 61, 70, 52, 46, 62, 52, 65, 70, 59, 71, 68, 66, 65, 89, 44, 91, 56, 87, 63, 71, 57, 77, 72, 59, 82, 70, 68, 89, 64, 55, 68, 77, 59, 56, 61, 57, 63, 51, 61, 82, 68, 61, 80, 55, 72, 76, 63, 85, 83, 68, 63, 45, 85, 60, 62, 52, 111, 62, 54, 43, 45, 59, 61, 56, 49, 57, 64, 69, 75, 61, 51, 60, 61, 69, 63, 59, 75, 53, 80, 54, 43, 61, 53, 78, 54, 72, 68, 53, 59, 58, 67, 61, 82, 91, 59, 79, 74, 68, 66, 65, 58, 51, 77, 46, 41, 89, 61, 59, 54, 95, 57, 61, 72, 64, 58, 81, 67, 68, 79, 72, 56, 59, 82, 76, 58, 60, 46, 58, 60, 87, 62, 69, 79, 63, 68, 68, 64, 66, 63, 55, 48, 90, 54, 68, 66, 57, 65, 72, 72, 62, 50, 52, 61, 65, 56, 55, 66, 64, 86, 64, 60, 64, 64, 68, 58, 55, 67, 94, 50, 58, 59, 62, 65, 85, 65, 59, 82, 65, 72, 61, 47, 61, 65, 55, 54, 65, 49, 101, 76, 45, 63, 80, 60, 62, 62, 72, 72, 61, 66, 60, 91, 59, 61, 69, 44, 67, 72, 68, 59, 65, 64, 56, 69, 58, 65, 61, 54, 59, 107, 75, 103, 57, 68, 71, 101, 70, 54, 63, 74, 56, 61, 91, 54, 43, 46, 47, 78, 63, 75, 77, 68, 61, 42, 52, 61, 58, 77, 37, 58, 85, 65, 64, 65, 88, 61, 78, 58, 66, 83, 57, 76, 56, 68, 77, 46, 54, 49, 50, 71, 54, 67, 68, 75, 67, 65, 60, 68, 42, 58, 87, 52, 74, 54, 71, 49, 68, 64, 84, 45, 61, 80, 66, 68, 83, 67, 82, 61, 69, 53, 83, 79, 54, 68, 70, 60, 49, 71, 79, 76, 63, 67, 71, 55, 42, 55, 58, 53, 80, 50, 92, 70, 122, 79, 87, 67, 51, 84, 62, 72, 81, 86, 75, 69, 63, 84, 60, 57, 64, 81, 81, 72, 58, 69, 95, 67, 71, 63, 55, 57, 84, 57, 68, 73, 51, 58, 66, 56, 63, 88, 79, 53, 66, 67, 63, 48, 75, 70, 84, 46, 52, 76, 41, 90, 60, 57, 61, 82, 68, 63, 60, 59, 65, 85, 77, 69, 74, 97, 92, 57, 69, 90, 61, 71, 49, 76, 65, 55, 121, 89, 64, 59, 69, 66, 75, 73, 75, 63, 70, 87, 52, 47, 72, 69, 69, 66, 87, 69, 54, 51, 59, 80, 68, 65, 60, 65, 59, 55, 55, 149, 63, 68, 72, 77, 60, 50, 75, 75, 79, 85, 77, 80, 63, 62, 60, 54, 67, 52, 78, 48, 69, 76, 60, 51, 67, 59, 48, 54, 47, 60, 74, 47, 118, 62, 78, 62, 49, 77, 59, 54, 50, 63, 52, 72, 76, 56, 59, 45, 64, 60, 72, 59, 66, 67, 69, 67, 97, 82, 79, 54, 68, 57, 62, 58, 50, 48, 90, 51, 94, 58, 58, 68, 97, 62, 79, 85, 54, 53, 72, 49, 61, 75, 44, 48, 56, 78, 53, 60, 69, 66, 57, 74, 43, 52, 50, 82, 94, 56, 91, 50, 81, 83, 59, 58, 91, 66, 91, 62, 62, 63, 68, 49, 61, 51, 61, 66, 85, 72, 54, 57, 49, 51, 65, 52, 65, 59, 65, 62, 61, 57, 57, 64, 72, 91, 81, 67, 59, 81, 60, 74, 70, 61, 70, 55, 78, 65, 63, 71, 45, 53, 35, 66, 60, 53, 76, 71, 37, 67, 52, 86, 69, 79, 61, 81, 66, 54, 59, 60, 80, 49, 71, 72, 92, 60, 52, 82, 63, 65, 64, 62, 62, 58, 65, 55, 55, 61, 54, 59, 55, 52, 51, 84, 52, 63, 65, 54, 74, 61, 59, 76, 51, 56, 60, 71, 67, 53, 74, 58, 71, 60, 93, 79, 74, 61, 70, 66, 55, 61, 96, 83, 49, 50, 50, 61, 77, 62, 52, 59, 71, 53, 47, 62, 57, 65, 93, 61, 42, 56, 56, 61, 65, 70, 59, 65, 48, 63, 68, 76, 62, 57, 95, 69, 71, 77, 64, 75, 56, 48, 71, 63, 90, 61, 56, 61, 44, 64, 45, 58, 70, 69, 62, 49, 60, 58, 49, 76, 97, 83, 65, 71, 63, 51, 63, 64, 49, 77, 68, 55, 54, 61, 69, 80, 47, 80, 56, 52, 98, 45, 61, 72, 61, 66, 61, 68, 67, 65, 60, 66, 77, 73, 85, 54, 91, 61, 75, 69, 59, 54, 54, 82, 66, 73, 61, 49, 87, 56, 86, 71, 46, 70, 80, 41, 53, 76, 67, 55, 70, 65, 64, 67, 60, 68, 52, 72, 82, 83, 70, 65, 72, 66, 55, 66, 61, 72, 81, 50, 52, 67, 54, 84, 83, 71, 61, 62, 54, 74, 69, 66, 60, 52, 86, 93, 125, 61, 55, 75, 53, 78, 75, 68, 52, 66, 83, 56, 117, 54, 63, 72, 69, 70, 62, 53, 52, 73, 63, 80, 64, 52, 63, 58, 72, 87, 74, 61, 61, 59, 81, 56, 58, 33, 58, 67, 52, 67, 69, 55, 61, 46, 79, 59, 59, 64, 134, 62, 62, 52, 117, 62, 43, 60, 71, 52, 49, 57, 72, 71, 71, 68, 50, 59, 74, 56, 69, 59, 58, 57, 56, 60, 48, 66, 55, 54, 66, 62, 67, 86, 61, 63, 52, 40, 76, 74, 70, 68, 78, 66, 66, 74, 55, 56, 55, 55, 92, 99, 79, 80, 73, 46, 87, 70, 70, 62, 67, 77, 70, 75, 76, 70, 58, 72, 60, 58, 38, 78, 95, 76, 83, 57, 61, 93, 93, 57, 73, 64, 71, 66, 86, 67, 61, 74, 62, 83, 67, 65, 74, 61, 72, 51, 68, 55, 68, 86, 60, 64, 70, 70, 62, 52, 47, 54, 69, 60, 56, 69, 65, 67, 60, 63, 46, 63, 40, 92, 67, 52, 52, 58, 58, 58, 51, 75, 58, 60, 70, 61, 58, 32, 73, 61, 73, 60, 50, 79, 57, 69, 115, 72, 52, 79, 62, 83, 70, 81, 61, 67, 61, 63, 64, 51, 57, 54, 63, 74, 51, 56, 73, 53, 76, 74, 89, 61, 56, 68, 54, 79, 62, 62, 81, 43, 78, 67, 53, 58, 55, 86, 43, 65, 73, 52, 69, 60, 71, 75, 62, 66, 67, 57, 96, 63, 61, 82, 68, 70, 113, 79, 61, 77, 71, 76, 62, 64, 65, 67, 49, 67, 54, 56, 60, 67, 61, 69, 64, 55, 68, 84, 54, 54, 59, 61, 74, 56, 61, 58, 74, 72, 69, 67, 74, 53, 48, 61, 74, 67, 71, 65, 61, 54, 57, 75, 61, 56, 61, 54, 56, 68, 60, 63, 64, 52, 76, 45, 59, 77, 60, 62, 55, 61, 73, 81, 111, 42, 59, 65, 59, 59, 48, 56, 38, 69, 47, 79, 110, 65, 52, 54, 84, 61, 51, 50, 56, 61, 57, 64, 74, 32, 66, 60, 55, 76, 73, 59, 81, 55, 54, 59, 100, 70, 58, 50, 59, 62, 78, 56, 59, 59, 54, 83, 57, 53, 60, 53, 59, 70, 53, 55, 51, 44, 58, 66, 68, 70, 62, 62, 64, 82, 71, 83, 69, 71, 62, 56, 55, 54, 90, 47, 64, 60, 54, 61, 83, 45, 59, 112, 51, 56, 56, 49, 83, 67, 64, 43, 62, 57, 92, 58, 57, 61, 60, 47, 43, 69, 82, 47, 73, 87, 42, 87, 44, 58, 70, 73, 67, 59, 57, 47, 54, 46, 70, 60, 53, 65, 61, 60, 57, 73, 76, 138, 71, 82, 69, 65, 79, 67, 61, 65, 79, 69, 86, 82, 52, 68, 60, 70, 58, 62, 66, 69, 65, 80, 59, 60, 54, 69, 42, 45, 53, 84, 67, 58, 60, 62, 78, 59, 78, 80, 49, 66, 62, 75, 74, 66, 71, 63, 69, 81, 45, 55, 92, 96, 39, 58, 69, 93, 71, 88, 63, 79, 77, 54, 59, 54, 58, 73, 88, 59, 55, 82, 75, 76, 74, 53, 98, 54, 89, 57, 50, 81, 63, 76, 64, 39, 52, 51, 74, 64, 63, 69, 71, 52, 60, 70, 56, 50, 68, 26, 66, 58, 129, 54, 73, 65, 63, 81, 54, 49, 53, 62, 67, 73, 73, 56, 51, 75, 58, 80, 86, 84, 54, 79, 82, 71, 92, 74, 74, 60, 67, 76, 62, 65, 56, 56, 79, 72, 56, 59, 71, 68, 80, 63, 60, 45, 83, 88, 61, 54, 77, 66, 61, 72, 65, 74, 61, 62, 72, 55, 47, 57, 63, 59, 52, 58, 63, 98, 96, 71, 64, 82, 61, 71, 55, 63, 63, 72, 140, 64, 52, 82, 74, 67, 65, 76, 92, 77, 75, 54, 70, 77, 74, 64, 73, 83, 111, 60, 56, 57, 75, 78, 66, 35, 55, 79, 62, 60, 69, 116, 64, 49, 51, 59, 72, 58, 77, 92, 73, 75, 73, 71, 60, 56, 68, 62, 66, 86, 59, 67, 55, 49, 74, 54, 61, 58, 66, 57, 63, 59, 72, 72, 56, 79, 51, 74, 55, 72, 69, 59, 65, 59, 55, 46, 70, 54, 64, 56, 59, 71, 50, 68, 32, 90, 50, 67, 100, 80, 90, 73, 53, 51, 76, 59, 56, 65, 52, 75, 96, 57, 57, 58, 85, 55, 68, 74, 71, 54, 68, 55, 62, 55, 65, 67, 59, 85, 78, 56, 75, 72, 68, 69, 53, 86, 65, 52, 109, 44, 93, 70, 67, 39, 58, 75, 124, 62, 46, 95, 57, 76, 62, 61, 61, 51, 51, 51, 100, 62, 77, 67, 62, 64, 73, 57, 71, 73, 67, 58, 69, 70, 59, 48, 64, 52, 78, 68, 56, 63, 66, 57, 81, 80, 53, 54, 76, 54, 75, 58, 82, 64, 67, 70, 59, 54, 92, 79, 82, 62, 60, 44, 65, 56, 57, 86, 57, 59, 54, 71, 68, 65, 63, 72, 56, 69, 52, 71, 49, 66, 82, 70, 47, 66, 58, 38, 67, 86, 73, 62, 67, 64, 75, 99, 62, 66, 70, 50, 57, 62, 68, 60, 38, 67, 90, 75, 58, 53, 68, 74, 72, 51, 84, 96, 67, 48, 78, 60, 70, 62, 78, 54, 89, 85, 64, 53, 76, 71, 50, 53, 63, 63, 41, 60, 45, 71, 66, 45, 61, 61, 62, 68, 67, 90, 40, 63, 57, 54, 66, 68, 49, 49, 111, 68, 68, 95, 73, 83, 51, 62, 83, 49, 50, 56, 88, 71, 62, 68, 74, 95, 60, 63, 60, 53, 60, 62, 65, 44, 81, 86, 54, 66, 65, 67, 44, 54, 61, 67, 58, 63, 80, 64, 60, 51, 65, 98, 55, 84, 102, 64, 67, 76, 44, 74, 48, 78, 61, 79, 45, 68, 45, 68, 81, 39, 56, 63, 70, 58, 95, 52, 60, 79, 54, 67, 103, 50, 51, 68, 83, 54, 59, 79, 59, 64, 67, 62, 61, 63, 57, 65, 55, 62, 72, 76, 56, 62, 72, 54, 81, 67, 55, 58, 50, 79, 79, 71, 82, 70, 43, 53, 58, 82, 63, 83, 74, 56, 63, 71, 54, 65, 48, 56, 76, 81, 139, 78, 71, 72, 76, 56, 47, 52, 72, 58, 64, 57, 72, 61, 62, 84, 78, 59, 83, 68, 63, 52, 53, 53, 68, 49, 65, 64, 75, 79, 60, 47, 77, 60, 92, 54, 52, 35, 57, 59, 65, 57, 46, 54, 66, 114, 111, 48, 63, 81, 133, 74, 56, 68, 105, 63, 56, 60, 48, 64, 73, 46, 63, 64, 69, 85, 86, 63, 94, 64, 60, 57, 47, 66, 69, 59, 66, 43, 66, 54, 95, 64, 55, 70, 79, 66, 41, 58, 94, 81, 57, 53, 80, 81, 73, 45, 49, 62, 53, 59, 77, 80, 67, 62, 76, 63, 65, 85, 55, 53, 55, 59, 63, 58, 58, 58, 53, 63, 79, 64, 72, 63, 56, 73, 68, 51, 50, 65, 64, 73, 57, 68, 54, 72, 63, 85, 53, 64, 58, 79, 48, 54, 56, 59, 59, 75, 54, 64, 50, 73, 89, 55, 86, 58, 76, 45, 55, 48, 51, 66, 87, 96, 61, 75, 54, 58, 57, 60, 75, 74, 65, 50, 66, 60, 65, 68, 51, 73, 58, 88, 54, 47, 62, 54, 74, 59, 102, 58, 49, 70, 43, 60, 65, 49, 61, 60, 73, 73, 71, 62, 66, 64, 69, 65, 64, 76, 62, 70, 82, 84, 55, 56, 62, 61, 49, 60, 62, 53, 52, 51, 80, 52, 52, 60, 66, 62, 81, 72, 48, 54, 82, 60, 82, 70, 61, 53, 75, 50, 65, 64, 50, 63, 56, 68, 91, 57, 48, 44, 51, 62, 57, 64, 76, 58, 75, 55, 63, 47, 46, 68, 49, 73, 43, 69, 54, 79, 64, 62, 82, 101, 102, 85, 69, 80, 43, 61, 65, 53, 63, 57, 67, 92, 67, 78, 69, 51, 71, 72, 80, 45, 71, 68, 69, 78, 67, 74, 52, 77, 60, 50, 56, 67, 69, 67, 71, 99, 57, 70, 75, 55, 48, 54, 49, 54, 61, 54, 50, 92, 64, 68, 60, 66, 56, 61, 71, 66, 58, 68, 84, 51, 69, 61, 110, 74, 83, 74, 68, 58, 71, 55, 46, 71, 63, 88, 80, 59, 55, 63, 82, 73, 59, 58, 99, 65, 52, 82, 63, 81, 51, 50, 82, 54, 61, 62, 53, 50, 89, 54, 63, 65, 75, 48, 55, 61, 60, 62, 75, 61, 58, 61, 73, 65, 60, 75, 46, 45, 55, 63, 77, 53, 84, 52, 39, 43, 61, 84, 54, 48, 59, 61, 72, 67, 63, 46, 65, 71, 58, 60, 80, 59, 57, 80, 59, 52, 74, 59, 50, 57, 87, 50, 84, 54, 64, 89, 63, 57, 46, 67, 56, 49, 69, 70, 66, 76, 51, 52, 53, 52, 65, 65, 64, 90, 96, 117, 70, 36, 97, 61, 59, 60, 79, 74, 64, 66, 62, 40, 82, 62, 68, 56, 59, 77, 84, 77, 56, 46, 67, 47, 87, 66, 50, 69, 54, 63, 68, 56, 69, 89, 44, 49, 98, 54, 60, 37, 54, 58, 74, 56, 71, 79, 50, 40, 86, 74, 55, 74, 70, 70, 67, 69, 67, 65, 54, 68, 74, 57, 78, 71, 57, 63, 65, 64, 53, 111, 55, 57, 61, 51, 63, 54, 68, 81, 100, 79, 54, 65, 57, 61, 66, 89, 70, 57, 66, 66, 68, 67, 57, 59, 53, 60, 68, 79, 78, 68, 103, 52, 59, 59, 45, 51, 61, 70, 69, 63, 66, 57, 59, 60, 54, 74, 61, 78, 74, 95, 51, 68, 61, 70, 60, 55, 57, 70, 71, 59, 61, 45, 54, 51, 55, 64, 61, 63, 64, 88, 62, 67, 48, 61, 57, 55, 69, 52, 50, 55, 67, 68, 51, 67, 58, 79, 66, 67, 42, 59, 48, 64, 94, 94, 43, 95, 71, 71, 52, 103, 79, 56, 56, 68, 71, 85, 151, 71, 59, 70, 89, 63, 69, 85, 60, 50, 71, 51, 54, 74, 72, 65, 53, 76, 94, 94, 57, 69, 56, 70, 52, 103, 71, 89, 58, 82, 91, 102, 63, 92, 59, 58, 65, 49, 83, 90, 60, 66, 67, 62, 60, 61, 68, 58, 84, 70, 54, 53, 54, 72, 76, 53, 84, 68, 45, 76, 81, 53, 61, 58, 46, 79, 83, 52, 74, 69, 63, 49, 47, 65, 59, 47, 73, 69, 63, 60, 56, 61, 51, 70, 54, 80, 50, 48, 82, 63, 97, 63, 83, 65, 57, 67, 71, 58, 61, 37, 81, 57, 55, 87, 82, 59, 63, 60, 72, 69, 51, 71, 42, 59, 49, 68, 63, 66, 55, 54, 54, 131, 55, 99, 58, 62, 92, 73, 101, 44, 87, 57, 62, 41, 61, 49, 75, 68, 72, 78, 77, 84, 53, 79, 54, 67, 57, 56, 107, 89, 69, 70, 57, 56, 72, 59, 52, 69, 70, 63, 77, 62, 74, 43, 80, 62, 78, 69, 80, 75, 65, 62, 42, 71, 75, 57, 54, 63, 62, 72, 60, 86, 55, 61, 66, 53, 42, 64, 43, 62, 53, 67, 52, 66, 77, 94, 72, 46, 73, 73, 69, 60, 97, 81, 44, 53, 58, 82, 58, 54, 70, 67, 68, 90, 57, 46, 70, 74, 46, 85, 69, 80, 74, 64, 65, 53, 88, 57, 91, 55, 70, 72, 76, 69, 93, 101, 47, 63, 60, 61, 65, 55, 83, 59, 42, 74, 68, 94, 52, 66, 70, 59, 60, 73, 78, 69, 60, 74, 62, 60, 89, 66, 61, 55, 63, 47, 31, 117, 68, 62, 86, 69, 69, 56, 60, 52, 59, 52, 72, 68, 64, 76, 75, 65, 66, 58, 73, 75, 79, 53, 80, 47, 94, 63, 49, 73, 56, 62, 65, 83, 110, 72, 51, 73, 45, 71, 56, 63, 82, 64, 66, 66, 66, 57, 94, 66, 66, 63, 72, 50, 74, 55, 66, 47, 76, 83, 64, 66, 94, 63, 81, 57, 60, 58, 62, 56, 52, 66, 62, 59, 56, 73, 67, 78, 68, 48, 45, 66, 55, 79, 83, 64, 69, 65, 80, 52, 55, 65, 107, 50, 56, 66, 58, 67, 80, 57, 54, 85, 55, 57, 47, 71, 66, 62, 64, 64, 53, 45, 63, 64, 53, 55, 63, 40, 81, 107, 77, 84, 71, 65, 69, 105, 71, 89, 68, 66, 52, 67, 58, 101, 63, 68, 93, 92, 136, 57, 68, 70, 59, 54, 74, 74, 67, 70, 51, 49, 54, 61, 56, 56, 47, 81, 69, 63, 61, 67, 63, 59, 58, 64, 63, 41, 66, 62, 68, 61, 84, 56, 72, 59, 80, 53, 53, 60, 57, 66, 80, 55, 74, 76, 81, 75, 71, 86, 103, 77, 52, 46, 114, 84, 44, 68, 71, 79, 51, 84, 70, 60, 43, 56, 55, 59, 95, 52, 82, 63, 57, 54, 55, 55, 56, 72, 87, 75, 83, 63, 60, 74, 81, 64, 71, 87, 77, 113, 49, 64, 74, 71, 57, 55, 68, 71, 77, 67, 68, 78, 65, 78, 62, 64, 70, 57, 58, 69, 62, 60, 59, 59, 68, 78, 49, 81, 72, 52, 60, 51, 66, 71, 54, 58, 49, 64, 68, 72, 85, 58, 63, 82, 44, 69, 74, 83, 49, 55, 63, 50, 63, 59, 53, 55, 65, 57, 75, 74, 61, 75, 57, 53, 74, 70, 69, 62, 66, 63, 55, 72, 61, 65, 84, 58, 66, 40, 67, 56, 49, 51, 52, 57, 66, 69, 106, 55, 57, 62, 85, 55, 66, 67, 66, 57, 54, 45, 57, 71, 56, 63, 56, 54, 80, 61, 64, 88, 55, 56, 59, 52, 62, 75, 77, 66, 71, 70, 66, 62, 73, 69, 49, 73, 61, 56, 57, 69, 65, 86, 80, 56, 60, 80, 72, 84, 86, 58, 68, 68, 60, 65, 75, 66, 43, 59, 101, 61, 78, 58, 67, 61, 50, 44, 67, 47, 55, 64, 58, 51, 118, 62, 53, 57, 76, 56, 70, 66, 66, 79, 51, 49, 63, 60, 62, 105, 53, 52, 96, 62, 53, 42, 60, 59, 82, 57, 58, 73, 58, 123, 59, 67, 53, 54, 65, 55, 66, 72, 68, 64, 81, 59, 53, 83, 57, 67, 67, 71, 59, 77, 83, 61, 63, 67, 59, 65, 68, 76, 64, 46, 64, 52, 54, 74, 90, 70, 81, 55, 68, 54, 73, 57, 65, 67, 74, 59, 54, 57, 69, 65, 73, 61, 34, 56, 93, 69, 45, 55, 103, 60, 60, 56, 55, 82, 76, 70, 83, 50, 62, 69, 57, 63, 56, 58, 55, 66, 70, 79, 70, 73, 68, 46, 76, 51, 62, 62, 57, 64, 76, 42, 72, 51, 45, 77, 44, 72, 63, 66, 54, 72, 85, 67, 67, 81, 52, 97, 60, 72, 41, 52, 83, 42, 77, 59, 70, 80, 58, 62, 71, 65, 51, 63, 67, 64, 80, 80, 70, 62, 76, 51, 85, 58, 49, 56, 65, 72, 67, 55, 49, 54, 78, 61, 57, 56, 54, 71, 71, 53, 65, 56, 70, 52, 60, 56, 75, 88, 50, 52, 59, 54, 64, 56, 61, 118, 73, 54, 96, 67, 64, 72, 59, 64, 53, 55, 61, 67, 66, 72, 50, 54, 65, 56, 79, 52, 70, 80, 63, 66, 76, 59, 67, 67, 55, 72, 62, 72, 73, 83, 72, 67, 90, 97, 54, 76, 52, 69, 59, 67, 71, 61, 65, 54, 62, 67, 56, 62, 75, 64, 60, 67, 42, 88, 68, 51, 64, 47, 60, 46, 76, 85, 68, 164, 58, 66, 78, 58, 68, 66, 69, 55, 69, 62, 72, 72, 67, 105, 71, 78, 77, 51, 55, 65, 54, 59, 110, 88, 56, 56, 50, 59, 46, 50, 70, 61, 101, 60, 72, 81, 68, 59, 61, 70, 68, 66, 63, 55, 83, 58, 88, 79, 55, 75, 84, 32, 41, 67, 75, 50, 56, 68, 62, 64, 61, 67, 61, 70, 61, 58, 51, 64, 59, 52, 80, 78, 66, 57, 65, 59, 57, 65, 65, 59, 66, 58, 82, 59, 67, 61, 49, 74, 61, 45, 50, 68, 59, 69, 60, 61, 68, 82, 91, 52, 80, 55, 60, 64, 79, 52, 65, 69, 137, 72, 50, 81, 62, 61, 69, 69, 97, 65, 67, 81, 54, 56, 48, 61, 59, 62, 78, 59, 51, 73, 57, 95, 67, 57, 58, 78, 55, 60, 73, 109, 48, 48, 63, 59, 57, 47, 66, 71, 67, 59, 62, 66, 67, 60, 60, 70, 101, 72, 102, 48, 60, 54, 72, 58, 64, 57, 56, 59, 64, 57, 88, 58, 63, 67, 59, 53, 59, 62, 60, 60, 61, 48, 41, 65, 72, 65, 60, 51, 54, 65, 64, 66, 55, 64, 85, 61, 81, 96, 98, 52, 58, 60, 69, 70, 55, 68, 68, 64, 85, 74, 59, 71, 74, 47, 63, 100, 52, 66, 62, 61, 52, 64, 54, 72, 61, 61, 71, 59, 69, 64, 43, 48, 57, 64, 49, 57, 98, 117, 87, 71, 52, 75, 66, 76, 71, 53, 73, 70, 56, 72, 83, 62, 76, 53, 55, 79, 76, 64, 65, 70, 64, 73, 59, 62, 78, 46, 60, 56, 50, 63, 58, 90, 84, 73, 74, 62, 55, 79, 59, 59, 68, 61, 50, 60, 69, 67, 57, 35, 61, 44, 49, 60, 63, 74, 63, 77, 66, 55, 71, 77, 55, 58, 67, 69, 66, 49, 56, 53, 52, 65, 72, 82, 56, 53, 56, 53, 47, 57, 70, 62, 54, 76, 67, 63, 101, 67, 52, 60, 68, 69, 78, 117, 66, 77, 71, 58, 49, 69, 62, 64, 69, 72, 50, 61, 55, 66, 57, 62, 71, 55, 74, 57, 75, 62, 67, 71, 72, 64, 69, 61, 49, 76, 58, 62, 52, 66, 68, 66, 76, 80, 68, 56, 56, 63, 60, 72, 73, 106, 66, 70, 69, 65, 65, 56, 59, 55, 66, 65, 56, 42, 114, 63, 62, 79, 50, 71, 72, 59, 77, 72, 47, 56, 68, 63, 72, 69, 59, 71, 99, 53, 62, 62, 58, 56, 69, 55, 70, 70, 59, 55, 74, 58, 55, 83, 62, 56, 64, 54, 62, 65, 59, 64, 56, 73, 72, 71, 57, 54, 87, 40, 55, 63, 67, 75, 70, 51, 59, 71, 47, 68, 61, 60, 61, 73, 53, 72, 75, 56, 63, 82, 55, 73, 59, 57, 53, 72, 62, 56, 59, 74, 62, 56, 67, 84, 68, 59, 67, 60, 61, 77, 74, 53, 65, 65, 67, 71, 73, 78, 72, 54, 61, 57, 67, 69, 66, 77, 57, 55, 54, 103, 71, 53, 69, 46, 49, 48, 75, 70, 53, 54, 67, 64, 124, 73, 67, 62, 47, 56, 78, 60, 60, 48, 68, 58, 88, 87, 61, 65, 78, 90, 74, 58, 79, 52, 66, 68, 86, 58, 63, 69, 58, 77, 78, 57, 74, 67, 62, 66, 68, 47, 64, 58, 62, 54, 49, 55, 68, 76, 74, 76, 65, 54, 123, 77, 56, 69, 96, 55, 68, 62, 59, 64, 66, 47, 60, 61, 77, 74, 65, 60, 50, 69, 104, 66, 66, 63, 81, 57, 51, 63, 71, 70, 82, 58, 53, 62, 80, 59, 81, 74, 51, 62, 53, 62, 59, 46, 72, 57, 64, 71, 53, 68, 54, 56, 69, 53, 81, 76, 69, 71, 53, 82, 76, 58, 57, 63, 65, 96, 80, 52, 64, 57, 61, 47, 62, 63, 60, 55, 63, 38, 67, 75, 89, 63, 79, 71, 88, 57, 74, 62, 57, 59, 59, 65, 76, 60, 61, 66, 63, 75, 81, 66, 81, 63, 72, 59, 77, 76, 69, 54, 52, 69, 42, 65, 57, 65, 90, 55, 46, 65, 64, 53, 83, 65, 61, 62, 60, 60, 34, 74, 43, 48, 74, 89, 44, 68, 68, 43, 70, 60, 54, 63, 57, 52, 48, 64, 65, 58, 46, 111, 71, 71, 49, 55, 76, 71, 59, 37, 61, 74, 81, 70, 51, 57, 76, 76, 49, 58, 83, 66, 85, 55, 81, 61, 73, 57, 78, 85, 60, 89, 56, 56, 76, 55, 51, 51, 67, 57, 68, 63, 75, 67, 68, 75, 49, 50, 87, 64, 56, 120, 37, 82, 69, 71, 51, 101, 78, 62, 64, 55, 63, 52, 60, 67, 79, 54, 54, 60, 94, 62, 84, 80, 87, 72, 73, 53, 66, 62, 58, 73, 55, 72, 65, 66, 51, 100, 77, 71, 72, 65, 65, 63, 53, 59, 59, 55, 72, 55, 59, 84, 55, 61, 87, 61, 53, 62, 66, 74, 63, 79, 65, 55, 81, 61, 67, 52, 64, 48, 61, 49, 51, 73, 66, 98, 72, 52, 64, 61, 60, 76, 88, 73, 75, 68, 119, 65, 81, 49, 62, 58, 65, 64, 85, 54, 68, 86, 50, 61, 58, 68, 86, 71, 51, 53, 75, 76, 54, 62, 69, 58, 95, 66, 61, 71, 47, 51, 63, 61, 71, 51, 65, 50, 56, 61, 62, 67, 59, 96, 78, 58, 59, 78, 75, 77, 78, 62, 66, 65, 77, 57, 112, 77, 73, 75, 58, 68, 59, 83, 86, 71, 43, 61, 71, 66, 52, 58, 62, 49, 42, 46, 61, 74, 47, 71, 61, 60, 66, 61, 51, 69, 69, 57, 48, 67, 72, 70, 60, 71, 49, 51, 68, 66, 67, 63, 61, 61, 53, 63, 61, 57, 61, 60, 68, 72, 66, 111, 54, 54, 84, 57, 59, 67, 66, 68, 57, 54, 84, 69, 58, 83, 104, 79, 75, 53, 97, 68, 60, 68, 76, 60, 44, 54, 44, 56, 66, 62, 77, 71, 82, 77, 64, 48, 58, 52, 76, 45, 77, 61, 69, 57, 87, 63, 73, 53, 59, 94, 53, 57, 54, 52, 61, 83, 72, 93, 66, 72, 80, 66, 74, 55, 75, 71, 73, 56, 56, 62, 80, 79, 66, 61, 61, 61, 84, 40, 50, 64, 49, 60, 50, 72, 62, 65, 80, 60, 51, 66, 68, 56, 45, 81, 55, 61, 70, 56, 47, 69, 108, 57, 72, 64, 54, 54, 71, 47, 60, 81, 46, 59, 52, 53, 65, 68, 68, 58, 86, 85, 57, 51, 57, 56, 64, 65, 70, 50, 66, 58, 56, 68, 58, 56, 72, 64, 72, 65, 39, 55, 60, 65, 58, 63, 67, 87, 84, 90, 71, 81, 63, 80, 50, 62, 52, 74, 64, 45, 61, 69, 69, 50, 80, 62, 79, 53, 71, 67, 62, 133, 78, 51, 60, 45, 50, 68, 138, 89, 82, 76, 59, 90, 82, 58, 71, 55, 63, 78, 55, 44, 69, 92, 68, 70, 70, 73, 71, 66, 77, 95, 60, 81, 61, 49, 75, 68, 72, 97, 60, 102, 62, 55, 72, 58, 82, 65, 61, 73, 92, 75, 65, 46, 60, 65, 82, 50, 81, 54, 58, 69, 69, 61, 63, 75, 45, 53, 49, 62, 50, 63, 53, 95, 59, 71, 74, 73, 54, 58, 62, 60, 74, 53, 69, 54, 80, 62, 49, 87, 65, 61, 69, 53, 85, 55, 70, 51, 85, 104, 49, 59, 72, 67, 54, 53, 76, 84, 53, 60, 70, 74, 94, 60, 78, 70, 72, 61, 52, 79, 63, 78, 58, 53, 62, 63, 68, 61, 80, 71, 59, 81, 71, 62, 67, 80, 86, 59, 97, 75, 66, 51, 79, 81, 69, 46, 47, 53, 54, 82, 45, 61, 55, 83, 56, 58, 99, 76, 59, 63, 56, 47, 54, 84, 60, 59, 57, 68, 62, 63, 69, 49, 76, 65, 63, 65, 76, 71, 75, 73, 56, 65, 51, 39, 71, 54, 63, 62, 74, 51, 64, 82, 62, 65, 62, 75, 97, 65, 63, 65, 53, 77, 54, 78, 72, 67, 45, 57, 57, 96, 76, 65, 68, 58, 54, 55, 86, 77, 52, 59, 77, 62, 56, 75, 64, 96, 65, 60, 84, 41, 127, 59, 56, 66, 86, 70, 77, 79, 91, 53, 74, 77, 64, 75, 72, 67, 41, 69, 52, 62, 50, 72, 67, 65, 56, 76, 79, 59, 65, 57, 55, 75, 57, 63, 54, 75, 63, 57, 64, 64, 58, 64, 48, 60, 40, 75, 57, 66, 76, 69, 56, 70, 50, 60, 46, 61, 64, 58, 58, 78, 110, 83, 70, 56, 61, 59, 74, 68, 79, 66, 77, 52, 47, 70, 72, 72, 57, 49, 79, 70, 70, 51, 62, 74, 63, 54, 82, 77, 43, 58, 77, 56, 58, 50, 53, 55, 75, 68, 55, 71, 64, 56, 43, 66, 60, 75, 62, 72, 66, 55, 64, 67, 69, 63, 59, 59, 72, 63, 65, 62, 90, 71, 78, 51, 41, 63, 54, 71, 60, 49, 83, 80, 57, 49, 56, 58, 93, 61, 72, 75, 51, 72, 52, 81, 57, 69, 60, 71, 56, 55, 76, 43, 60, 65, 74, 55, 60, 63, 65, 77, 54, 71, 50, 86, 79, 79, 62, 52, 82, 77, 63, 94, 75, 64, 74, 48, 51, 74, 55, 67, 76, 84, 156, 65, 57, 58, 75, 69, 60, 53, 61, 52, 53, 82, 72, 63, 59, 70, 62, 70, 67, 52, 77, 61, 51, 50, 52, 69, 64, 47, 62, 56, 81, 59, 76, 78, 95, 68, 75, 57, 57, 57, 72, 81, 51, 73, 53, 91, 45, 64, 64, 64, 118, 64, 71, 60, 144, 54, 63, 51, 58, 67, 69, 69, 74, 57, 75, 68, 96, 53, 60, 75, 88, 74, 41, 69, 61, 58, 51, 63, 59, 64, 32, 78, 71, 77, 58, 73, 40, 56, 73, 85, 103, 71, 45, 58, 81, 58, 71, 85, 69, 75, 63, 89, 60, 64, 74, 75, 75, 60, 68, 63, 60, 91, 72, 51, 51, 64, 53, 56, 66, 54, 81, 47, 81, 82, 44, 57, 81, 40, 58, 58, 69, 54, 79, 91, 65, 83, 61, 46, 56, 58, 62, 47, 72, 56, 49, 58, 51, 57, 71, 85, 48, 72, 58, 52, 78, 71, 31, 61, 57, 61, 53, 60, 51, 53, 73, 81, 63, 73, 52, 64, 59, 56, 53, 45, 73, 58, 48, 84, 71, 52, 54, 62, 67, 66, 67, 71, 49, 75, 57, 57, 75, 77, 67, 78, 62, 83, 54, 87, 74, 56, 62, 63, 59, 60, 66, 65, 57, 60, 81, 67, 63, 54, 65, 67, 70, 44, 76, 59, 69, 75, 68, 52, 80, 53, 104, 49, 84, 65, 60, 64, 76, 84, 58, 63, 59, 70, 89, 64, 73, 65, 68, 56, 70, 77, 90, 67, 71, 75, 71, 64, 75, 70, 62, 65, 75, 65, 67, 66, 65, 79, 59, 65, 71, 64, 44, 49, 55, 65, 68, 66, 128, 56, 62, 53, 80, 60, 72, 71, 64, 152, 84, 69, 51, 46, 59, 62, 55, 60, 51, 62, 61, 56, 53, 67, 55, 53, 58, 81, 48, 55, 83, 60, 62, 65, 59, 56, 64, 68, 80, 54, 54, 62, 61, 61, 57, 61, 75, 75, 51, 64, 55, 59, 60, 54, 52, 61, 58, 74, 52, 67, 61, 50, 101, 72, 80, 56, 68, 71, 75, 53, 68, 89, 53, 79, 70, 68, 74, 67, 82, 58, 59, 61, 63, 54, 45, 84, 64, 80, 57, 41, 56, 62, 69, 62, 78, 72, 74, 69, 62, 54, 95, 63, 59, 78, 50, 82, 59, 65, 66, 50, 75, 47, 62, 57, 67, 66, 57, 75, 68, 67, 105, 50, 70, 52, 73, 71, 70, 58, 61, 50, 57, 54, 48, 85, 74, 93, 64, 57, 75, 61, 76, 63, 55, 64, 67, 51, 61, 60, 74, 64, 75, 56, 64, 84, 76, 54, 87, 72, 79, 58, 55, 60, 56, 60, 70, 69, 64, 59, 70, 71, 66, 74, 52, 112, 66, 61, 75, 60, 68, 66, 62, 79, 53, 73, 94, 67, 58, 44, 83, 41, 62, 98, 78, 63, 70, 67, 49, 60, 48, 65, 39, 54, 60, 71, 63, 75, 79, 58, 50, 58, 64, 66, 58, 65, 72, 59, 69, 68, 63, 52, 64, 67, 59, 64, 67, 73, 64, 64, 56, 105, 53, 65, 54, 83, 54, 54, 82, 51, 81, 58, 90, 50, 60, 68, 60, 39, 42, 63, 74, 58, 71, 70, 74, 64, 50, 72, 61, 50, 56, 66, 64, 50, 70, 66, 83, 65, 61, 61, 65, 51, 94, 52, 53, 50, 134, 56, 59, 72, 77, 50, 62, 54, 66, 49, 73, 68, 63, 57, 64, 66, 58, 42, 81, 45, 61, 65, 62, 60, 69, 64, 75, 76, 67, 71, 66, 66, 55, 70, 62, 61, 119, 77, 86, 61, 72, 58, 55, 58, 59, 48, 71, 63, 79, 73, 69, 59, 68, 67, 67, 83, 54, 84, 59, 66, 61, 63, 46, 55, 57, 77, 84, 62, 72, 70, 67, 64, 83, 87, 63, 75, 129, 66, 60, 76, 51, 75, 74, 65, 81, 61, 68, 83, 64, 63, 61, 58, 58, 44, 94, 59, 61, 60, 66, 84, 65, 71, 59, 83, 73, 50, 75, 98, 89, 86, 67, 60, 72, 63, 61, 83, 52, 66, 67, 57, 51, 70, 51, 66, 76, 52, 62, 68, 55, 60, 94, 59, 64, 52, 72, 63, 67, 70, 80, 76, 54, 68, 47, 68, 60, 58, 71, 73, 62, 61, 58, 65, 54, 62, 67, 73, 56, 73, 64, 48, 79, 69, 61, 66, 69, 45, 70, 45, 66, 63, 67, 80, 61, 51, 63, 47, 74, 46, 59, 66, 67, 49, 65, 91, 64, 64, 84, 61, 57, 62, 73, 65, 61, 65, 69, 47, 71, 66, 65, 104, 67, 64, 70, 65, 124, 47, 104, 36, 61, 49, 67, 63, 108, 51, 68, 56, 53, 64, 76, 59, 58, 51, 62, 62, 97, 60, 70, 58, 58, 64, 59, 57, 59, 59, 80, 46, 59, 69, 64, 75, 48, 71, 88, 63, 60, 61, 51, 52, 65, 67, 67, 47, 65, 53, 63, 84, 67, 68, 60, 55, 59, 62, 77, 81, 66, 63, 65, 63, 55, 52, 77, 117, 51, 57, 57, 89, 73, 65, 59, 73, 61, 58, 59, 57, 48, 49, 61, 60, 51, 75, 60, 62, 54, 52, 62, 53, 65, 58, 53, 76, 55, 69, 80, 56, 58, 67, 59, 68, 68, 59, 74, 60, 71, 57, 72, 51, 63, 54, 55, 69, 65, 58, 75, 66, 78, 80, 52, 57, 62, 80, 78, 63, 60, 61, 40, 55, 75, 77, 91, 69, 69, 70, 47, 62, 61, 62, 82, 57, 58, 52, 48, 72, 56, 58, 57, 58, 72, 78, 84, 59, 70, 66, 73, 58, 61, 62, 59, 62, 97, 63, 58, 61, 61, 64, 77, 96, 88, 57, 65, 68, 56, 91, 67, 63, 57, 74, 61, 67, 59, 61, 62, 73, 69, 69, 63, 50, 55, 54, 75, 65, 61, 66, 74, 58, 88, 73, 77, 56, 65, 64, 66, 54, 66, 61, 62, 66, 52, 61, 98, 52, 72, 71, 62, 52, 49, 64, 71, 62, 42, 84, 75, 56, 50, 44, 82, 65, 61, 56, 68, 61, 53, 133, 62, 59, 75, 59, 56, 66, 97, 63, 64, 58, 58, 58, 87, 58, 69, 53, 61, 54, 55, 63, 95, 80, 66, 66, 53, 87, 81, 46, 46, 69, 68, 71, 54, 89, 52, 54, 85, 78, 130, 53, 55, 88, 68, 52, 65, 71, 50, 54, 69, 54, 69, 73, 51, 74, 57, 67, 64, 65, 80, 68, 68, 72, 63, 50, 61, 46, 65, 51, 75, 60, 58, 67, 59, 61, 69, 53, 80, 63, 50, 65, 63, 63, 63, 68, 94, 53, 70, 76, 116, 62, 81, 60, 110, 65, 64, 61, 61, 53, 47, 63, 65, 68, 51, 78, 67, 53, 82, 64, 62, 68, 66, 56, 65, 56, 67, 70, 78, 66, 95, 67, 62, 57, 66, 69, 85, 71, 55, 58, 67, 58, 62, 83, 74, 59, 70, 89, 61, 61, 66, 51, 61, 63, 61, 61, 62, 85, 61, 56, 65, 59, 58, 64, 64, 80, 59, 62, 70, 73, 55, 54, 94, 68, 68, 55, 56, 60, 57, 95, 59, 60, 76, 69, 65, 64, 76, 68, 53, 64, 58, 67, 68, 53, 77, 62, 65, 65, 80, 62, 66, 60, 78, 62, 56, 75, 54, 73, 59, 61, 78, 97, 61, 55, 66, 62, 91, 62, 62, 94, 60, 70, 70, 57, 82, 73, 76, 65, 47, 89, 61, 50, 74, 78, 73, 71, 37, 60, 68, 91, 63, 64, 75, 53, 58, 52, 68, 72, 45, 48, 50, 49, 69, 66, 66, 62, 67, 54, 87, 43, 52, 57, 62, 63, 72, 56, 62, 52, 52, 52, 69, 71, 59, 56, 78, 54, 64, 60, 66, 53, 45, 110, 77, 70, 88, 46, 83, 92, 76, 77, 59, 75, 55, 51, 71, 44, 79, 91, 68, 86, 56, 74, 57, 70, 71, 67, 61, 46, 50, 79, 51, 47, 70, 62, 53, 71, 59, 66, 63, 64, 115, 79, 87, 54, 86, 66, 71, 72, 55, 76, 76, 58, 71, 67, 53, 61, 74, 53, 53, 66, 94, 55, 51, 61, 62, 74, 59, 76, 63, 67, 56, 77, 64, 66, 76, 59, 78, 50, 48, 79, 71, 83, 71, 77, 36, 68, 81, 71, 61, 57, 67, 86, 56, 65, 57, 63, 91, 41, 84, 63, 77, 47, 90, 41, 82, 71, 59, 58, 60, 67, 56, 51, 64, 55, 84, 60, 97, 66, 101, 62, 54, 55, 70, 53, 65, 34, 60, 58, 74, 70, 51, 52, 59, 68, 62, 67, 60, 50, 74, 59, 71, 60, 47, 48, 78, 68, 55, 82, 46, 43, 70, 73, 59, 66, 68, 62, 75, 66, 69, 58, 78, 50, 59, 66, 56, 87, 75, 58, 71, 62, 69, 57, 59, 48, 61, 73, 50, 58, 59, 73, 80, 73, 39, 72, 53, 47, 51, 60, 62, 72, 65, 78, 67, 66, 40, 58, 96, 55, 49, 63, 69, 54, 52, 73, 58, 86, 49, 63, 74, 76, 68, 68, 72, 120, 53, 64, 92, 93, 53, 55, 60, 87, 66, 56, 51, 41, 50, 63, 76, 80, 42, 79, 79, 53, 62, 41, 74, 56, 63, 57, 49, 56, 49, 117, 61, 62, 67, 66, 50, 67, 76, 64, 51, 81, 46, 68, 62, 70, 65, 91, 94, 54, 58, 61, 75, 67, 64, 76, 58, 80, 68, 70, 71, 87, 53, 76, 74, 66, 84, 81, 66, 60, 75, 71, 71, 50, 50, 64, 86, 68, 67, 48, 46, 59, 55, 53, 74, 69, 60, 84, 67, 55, 61, 62, 72, 53, 58, 67, 59, 48, 63, 76, 51, 62, 105, 53, 62, 64, 73, 78, 66, 70, 54, 88, 87, 54, 47, 43, 76, 61, 57, 81, 76, 72, 53, 78, 73, 57, 62, 69, 71, 61, 69, 68, 47, 65, 76, 62, 51, 66, 59, 73, 54, 68, 77, 60, 59, 76, 84, 83, 67, 60, 77, 54, 59, 57, 58, 64, 54, 67, 54, 97, 82, 57, 68, 55, 102, 63, 71, 106, 90, 65, 48, 61, 51, 82, 64, 42, 47, 54, 128, 94, 50, 51, 51, 102, 69, 51, 97, 54, 62, 74, 43, 72, 61, 99, 74, 68, 60, 74, 83, 62, 69, 117, 59, 89, 58, 49, 50, 83, 60, 110, 70, 60, 59, 66, 62, 53, 66, 77, 58, 83, 53, 44, 65, 47, 66, 96, 97, 86, 86, 56, 84, 62, 98, 62, 60, 80, 74, 60, 77, 74, 54, 85, 84, 79, 44, 59, 69, 56, 76, 39, 54, 70, 53, 67, 65, 76, 66, 74, 45, 52, 64, 86, 60, 84, 80, 56, 59, 80, 72, 79, 55, 59, 81, 68, 46, 71, 56, 67, 69, 91, 69, 61, 55, 73, 70, 67, 49, 67, 70, 52, 59, 44, 51, 63, 102, 82, 47, 48, 50, 77, 36, 113, 70, 70, 57, 60, 123, 50, 83, 62, 50, 64, 53, 57, 88, 90, 72, 57, 53, 41, 72, 65, 66, 55, 55, 68, 82, 87, 71, 61, 54, 74, 67, 61, 61, 77, 66, 48, 57, 64, 54, 62, 55, 69, 95, 61, 71, 61, 66, 65, 73, 51, 65, 50, 81, 64, 66, 67, 49, 72, 62, 62, 68, 61, 58, 73, 58, 55, 55, 66, 67, 63, 39, 50, 72, 45, 64, 74, 49, 63, 68, 56, 44, 57, 64, 61, 79, 62, 66, 57, 55, 64, 52, 48, 63, 62, 74, 44, 45, 52, 55, 66, 67, 73, 73, 61, 49, 64, 58, 54, 49, 59, 63, 70, 74, 53, 73, 59, 44, 72, 73, 81, 65, 49, 51, 90, 59, 47, 56, 73, 78, 96, 57, 77, 63, 58, 55, 57, 74, 60, 63, 51, 56, 77, 43, 72, 56, 64, 90, 56, 82, 75, 77, 90, 54, 55, 51, 83, 62, 51, 100, 98, 65, 52, 51, 93, 68, 69, 73, 65, 73, 72, 68, 80, 60, 90, 57, 114, 94, 64, 66, 69, 62, 67, 59, 75, 64, 77, 75, 63, 62, 66, 48, 69, 75, 68, 67, 65, 69, 61, 69, 69, 75, 54, 60, 45, 101, 74, 78, 61, 57, 58, 63, 50, 54, 49, 59, 97, 40, 46, 93, 80, 69, 75, 74, 76, 91, 66, 56, 70, 52, 60, 67, 61, 53, 74, 93, 83, 74, 70, 78, 58, 60, 85, 69, 57, 59, 52, 56, 57, 88, 43, 54, 57, 56, 54, 57, 68, 59, 59, 61, 81, 67, 58, 71, 48, 69, 58, 55, 91, 68, 73, 65, 70, 56, 77, 53, 62, 62, 78, 72, 58, 135, 45, 63, 73, 65, 43, 60, 63, 69, 76, 62, 55, 70, 66, 69, 67, 62, 64, 68, 66, 46, 58, 55, 70, 65, 55, 69, 66, 55, 52, 82, 53, 51, 63, 46, 55, 60, 58, 77, 51, 92, 69, 52, 59, 56, 72, 81, 61, 64, 60, 107, 60, 70, 59, 125, 75, 62, 56, 81, 51, 86, 61, 68, 73, 54, 66, 57, 73, 116, 50, 80, 62, 51, 73, 69, 69, 53, 104, 74, 53, 61, 75, 58, 61, 102, 49, 68, 57, 61, 105, 40, 57, 60, 97, 77, 63, 49, 69, 60, 75, 60, 61, 56, 59, 51, 79, 41, 62, 47, 71, 57, 79, 52, 48, 77, 64, 70, 57, 73, 59, 63, 59, 75, 82, 70, 54, 57, 52, 51, 58, 67, 51, 55, 67, 58, 67, 58, 54, 64, 72, 62, 49, 58, 60, 57, 74, 67, 61, 40, 59, 47, 55, 50, 61, 71, 56, 60, 69, 45, 69, 56, 76, 57, 66, 60, 68, 73, 75, 77, 85, 48, 58, 75, 57, 53, 57, 100, 69, 81, 53, 81, 63, 72, 56, 60, 65, 58, 70, 66, 72, 60, 77, 69, 70, 75, 44, 93, 77, 63, 64, 45, 73, 56, 81, 80, 49, 82, 55, 84, 58, 65, 62, 53, 79, 67, 56, 64, 68, 59, 91, 62, 77, 59, 87, 49, 63, 77, 72, 59, 61, 62, 63, 56, 54, 52, 59, 100, 57, 79, 55, 75, 67, 71, 56, 78, 47, 60, 39, 56, 55, 114, 45, 59, 69, 73, 60, 65, 69, 66, 72, 76, 62, 45, 60, 59, 108, 46, 77, 93, 51, 62, 62, 56, 83, 69, 53, 61, 48, 58, 131, 61, 81, 53, 60, 56, 56, 52, 68, 76, 56, 73, 69, 66, 69, 77, 86, 74, 81, 73, 61, 88, 51, 72, 46, 62, 47, 64, 61, 62, 65, 78, 47, 53, 78, 55, 62, 73, 43, 71, 72, 82, 72, 70, 61, 77, 68, 67, 74, 76, 58, 59, 71, 69, 64, 85, 64, 65, 65, 96, 58, 51, 44, 67, 68, 85, 78, 75, 48, 46, 48, 59, 53, 72, 61, 58, 62, 70, 81, 84, 57, 64, 53, 59, 60, 84, 73, 65, 64, 62, 73, 42, 60, 58, 72, 58, 60, 79, 57, 62, 66, 67, 69, 62, 55, 67, 87, 72, 66, 48, 69, 88, 63, 60, 61, 74, 63, 73, 79, 91, 41, 40, 60, 85, 83, 97, 52, 68, 59, 68, 74, 74, 62, 72, 51, 80, 79, 77, 48, 56, 54, 81, 70, 57, 65, 63, 83, 49, 60, 69, 57, 81, 58, 53, 77, 54, 86, 75, 66, 49, 75, 83, 82, 62, 130, 69, 64, 101, 53, 56, 81, 72, 73, 62, 55, 68, 73, 60, 58, 58, 68, 73, 65, 62, 60, 74, 67, 67, 67, 77, 52, 76, 44, 58, 68, 74, 62, 66, 51, 66, 55, 73, 75, 56, 70, 57, 73, 52, 79, 61, 59, 69, 46, 81, 55, 58, 65, 54, 39, 71, 67, 70, 81, 83, 67, 57, 55, 70, 45, 80, 79, 78, 126, 46, 54, 65, 69, 47, 63, 77, 51, 56, 83, 62, 71, 55, 80, 71, 55, 76, 54, 70, 50, 78, 53, 91, 65, 63, 49, 57, 55, 45, 41, 51, 54, 83, 61, 63, 70, 66, 71, 61, 45, 56, 64, 76, 63, 62, 71, 107, 63, 69, 53, 91, 54, 65, 63, 58, 47, 43, 66, 59, 65, 56, 49, 53, 76, 87, 50, 39, 68, 79, 51, 100, 64, 54, 45, 39, 51, 56, 67, 43, 61, 50, 61, 68, 67, 64, 76, 58, 47, 61, 76, 55, 63, 43, 69, 54, 54, 85, 56, 57, 72, 61, 69, 75, 60, 86, 68, 63, 53, 138, 44, 45, 86, 64, 57, 69, 73, 79, 65, 89, 86, 74, 81, 77, 62, 81, 71, 66, 64, 42, 62, 58, 88, 72, 66, 66, 76, 58, 63, 72, 111, 56, 59, 55, 89, 75, 50, 53, 90, 68, 61, 111, 60, 65, 56, 65, 62, 73, 68, 68, 56, 46, 58, 76, 73, 48, 67, 74, 60, 74, 54, 80, 84, 67, 55, 70, 71, 57, 77, 64, 60, 48, 63, 61, 65, 80, 80, 82, 52, 54, 40, 56, 61, 60, 72, 62, 60, 76, 71, 100, 54, 83, 72, 58, 74, 66, 66, 56, 66, 79, 73, 75, 42, 67, 57, 49, 105, 68, 68, 51, 76, 47, 67, 48, 71, 48, 73, 62, 48, 72, 73, 85, 77, 56, 48, 40, 45, 91, 65, 60, 50, 51, 44, 64, 50, 80, 65, 54, 55, 79, 52, 81, 70, 84, 60, 55, 67, 88, 68, 48, 63, 60, 68, 54, 75, 53, 69, 65, 51, 65, 57, 62, 85, 80, 50, 63, 63, 59, 61, 58, 61, 55, 73, 53, 70, 91, 82, 43, 74, 63, 67, 53, 56, 60, 64, 80, 91, 63, 50, 58, 67, 73, 59, 55, 51, 66, 63, 59, 65, 59, 59, 63, 69, 58, 74, 70, 68, 55, 54, 82, 61, 64, 61, 54, 48, 62, 37, 53, 56, 76, 56, 49, 91, 59, 62, 63, 67, 59, 54, 67, 81, 43, 71, 67, 93, 60, 67, 87, 54, 56, 47, 81, 72, 71, 67, 58, 62, 77, 58, 58, 54, 64, 73, 54, 65, 73, 71, 60, 72, 84, 78, 80, 65, 67, 63, 63, 64, 72, 60, 73, 94, 71, 47, 84, 54, 55, 46, 72, 49, 68, 48, 111, 71, 58, 52, 78, 63, 71, 56, 57, 58, 60, 67, 52, 82, 63, 49, 71, 60, 70, 48, 59, 77, 71, 61, 55, 54, 63, 66, 54, 61, 88, 51, 47, 73, 56, 62, 86, 67, 76, 58, 55, 68, 70, 60, 85, 62, 71, 58, 98, 54, 64, 63, 49, 87, 55, 56, 67, 38, 58, 59, 54, 62, 52, 85, 65, 54, 74, 61, 58, 64, 58, 80, 59, 56, 62, 64, 89, 71, 52, 72, 61, 55, 57, 95, 47, 57, 43, 65, 69, 50, 70, 49, 57, 72, 47, 58, 80, 75, 57, 65, 49, 67, 56, 58, 86, 84, 53, 68, 73, 102, 53, 80, 62, 54, 63, 73, 66, 66, 72, 60, 60, 79, 140, 91, 78, 75, 50, 59, 82, 61, 83, 61, 72, 63, 56, 108, 57, 80, 70, 69, 88, 53, 54, 63, 82, 57, 101, 64, 68, 57, 69, 69, 59, 64, 64, 55, 58, 61, 71, 57, 57, 69, 69, 49, 54, 57, 68, 62, 67, 71, 78, 50, 110, 76, 47, 76, 89, 75, 61, 56, 55, 51, 76, 51, 85, 70, 59, 84, 68, 61, 67, 58, 69, 69, 55, 77, 77, 58, 67, 91, 58, 57, 60, 57, 84, 76, 55, 59, 70, 71, 71, 68, 59, 91, 68, 68, 67, 55, 74, 40, 74, 50, 62, 49, 60, 57, 78, 66, 68, 64, 65, 63, 64, 76, 48, 64, 65, 90, 60, 59, 53, 66, 66, 69, 40, 58, 75, 66, 57, 74, 55, 83, 115, 79, 98, 62, 92, 55, 59, 83, 74, 61, 57, 59, 90, 62, 72, 61, 61, 49, 73, 72, 93, 59, 65, 73, 63, 82, 56, 65, 66, 62, 60, 62, 64, 56, 85, 57, 67, 56, 55, 60, 52, 76, 60, 77, 55, 61, 67, 51, 52, 72, 65, 84, 52, 72, 49, 53, 55, 54, 74, 68, 41, 44, 97, 53, 78, 49, 66, 111, 55, 64, 87, 50, 69, 56, 49, 70, 72, 69, 86, 65, 72, 63, 56, 59, 50, 60, 62, 61, 45, 69, 70, 65, 65, 69, 91, 75, 50, 65, 62, 77, 55, 75, 80, 60, 65, 55, 72, 95, 45, 73, 60, 80, 61, 68, 59, 64, 54, 62, 67, 51, 72, 73, 84, 85, 66, 52, 70, 64, 65, 51, 76, 92, 72, 58, 64, 60, 59, 62, 75, 76, 86, 88, 65, 62, 65, 60, 82, 51, 70, 103, 55, 60, 56, 58, 78, 73, 76, 47, 43, 61, 61, 51, 79, 69, 61, 84, 61, 63, 82, 65, 84, 55, 65, 77, 52, 56, 73, 60, 71, 64, 55, 66, 53, 89, 62, 68, 74, 75, 61, 55, 84, 78, 57, 59, 54, 68, 60, 62, 80, 61, 78, 70, 99, 69, 78, 52, 63, 46, 84, 67, 70, 58, 54, 67, 57, 55, 95, 79, 75, 60, 81, 65, 54, 51, 80, 56, 72, 52, 83, 60, 90, 52, 64, 53, 73, 72, 61, 62, 69, 67, 58, 56, 67, 51, 53, 96, 71, 45, 75, 87, 72, 75, 81, 53, 57, 74, 62, 67, 83, 55, 95, 39, 56, 67, 39, 69, 45, 58, 59, 73, 49, 54, 56, 64, 64, 60, 58, 48, 67, 63, 80, 62, 72, 57, 48, 65, 48, 45, 49, 69, 69, 78, 54, 75, 75, 74, 46, 65, 79, 55, 55, 71, 71, 47, 54, 64, 64, 61, 78, 72, 70, 71, 76, 121, 71, 61, 97, 56, 62, 66, 72, 88, 58, 54, 35, 54, 62, 68, 70, 68, 76, 68, 65, 70, 78, 56, 48, 62, 59, 67, 43, 66, 61, 66, 57, 67, 74, 58, 62, 73, 75, 64, 49, 46, 70, 48, 49, 60, 56, 63, 66, 60, 84, 65, 51, 78, 61, 70, 53, 83, 80, 70, 78, 48, 77, 57, 78, 55, 71, 53, 66, 78, 56, 60, 86, 78, 56, 70, 64, 51, 78, 56, 46, 62, 55, 67, 74, 57, 70, 70, 60, 65, 51, 57, 65, 54, 45, 60, 60, 67, 69, 59, 58, 49, 114, 48, 60, 66, 53, 75, 51, 47, 64, 81, 55, 76, 60, 74, 55, 61, 72, 55, 69, 69, 54, 40, 87, 53, 69, 67, 55, 83, 45, 92, 52, 52, 51, 51, 61, 59, 91, 70, 70, 64, 71, 39, 75, 65, 75, 77, 52, 78, 86, 52, 78, 99, 83, 64, 46, 59, 60, 61, 62, 52, 49, 51, 61, 77, 60, 77, 58, 59, 43, 96, 55, 45, 64, 63, 74, 55, 151, 67, 66, 65, 74, 59, 68, 82, 79, 72, 67, 71, 61, 47, 66, 77, 49, 65, 73, 84, 68, 50, 51, 68, 53, 47, 66, 67, 66, 61, 56, 106, 56, 69, 65, 83, 72, 57, 73, 55, 64, 72, 69, 59, 33, 58, 63, 41, 75, 58, 62, 57, 78, 70, 77, 54, 72, 70, 48, 65, 78, 62, 46, 76, 50, 51, 73, 66, 70, 83, 70, 42, 56, 52, 52, 65, 66, 63, 76, 74, 67, 63, 78, 74, 68, 73, 70, 66, 72, 69, 106, 77, 72, 65, 73, 74, 75, 64, 64, 75, 78, 39, 56, 59, 69, 68, 59, 56, 52, 68, 53, 35, 68, 62, 55, 65, 73, 69, 62, 75, 53, 120, 69, 91, 68, 78, 78, 55, 115, 76, 56, 75, 73, 79, 64, 64, 90, 52, 82, 67, 72, 66, 77, 65, 73, 60, 59, 72, 98, 69, 42, 54, 55, 57, 63, 50, 57, 60, 69, 52, 75, 61, 72, 57, 92, 86, 60, 44, 54, 72, 64, 73, 73, 62, 49, 75, 125, 68, 82, 81, 66, 83, 37, 92, 66, 56, 83, 85, 63, 69, 53, 82, 84, 70, 59, 70, 52, 60, 51, 90, 66, 63, 49, 73, 66, 51, 49, 93, 59, 56, 65, 67, 59, 59, 69, 51, 54, 65, 66, 46, 82, 75, 82, 88, 47, 73, 65, 77, 56, 58, 64, 75, 65, 53, 101, 92, 67, 63, 53, 74, 63, 57, 77, 45, 79, 70, 80, 74, 70, 43, 78, 60, 54, 72, 68, 54, 74, 55, 84, 62, 60, 64, 72, 79, 62, 50, 56, 81, 70, 62, 60, 67, 50, 82, 71, 64, 40, 55, 51, 59, 50, 80, 92, 67, 53, 48, 66, 76, 79, 44, 67, 78, 64, 73, 69, 61, 64, 72, 79, 65, 85, 69, 53, 75, 72, 55, 71, 54, 95, 55, 57, 66, 68, 70, 57, 60, 70, 60, 61, 71, 56, 66, 74, 61, 54, 50, 68, 76, 63, 83, 72, 66, 61, 65, 62, 62, 45, 59, 64, 59, 63, 58, 64, 54, 83, 89, 75, 77, 59, 77, 50, 67, 93, 68, 68, 47, 57, 66, 81, 58, 59, 77, 62, 74, 73, 97, 67, 48, 66, 58, 59, 69, 73, 66, 56, 92, 78, 64, 69, 69, 70, 69, 74, 87, 42, 55, 50, 91, 51, 90, 63, 80, 74, 93, 72, 76, 61, 58, 57, 58, 61, 59, 85, 93, 158, 109, 71, 72, 85, 62, 50, 67, 53, 58, 68, 54, 78, 84, 59, 70, 72, 59, 59, 65, 87, 45, 57, 66, 60, 70, 68, 76, 62, 80, 75, 51, 75, 50, 72, 61, 66, 79, 66, 88, 56, 65, 58, 52, 38, 64, 72, 80, 67, 74, 109, 75, 50, 63, 132, 48, 71, 65, 56, 81, 68, 57, 75, 49, 73, 76, 56, 69, 46, 50, 85, 49, 93, 67, 60, 53, 58, 87, 70, 54, 40, 79, 73, 59, 77, 62, 87, 49, 65, 72, 85, 57, 64, 70, 64, 70, 70, 62, 35, 45, 85, 68, 64, 85, 36, 58, 58, 50, 61, 46, 74, 47, 82, 62, 50, 60, 64, 45, 63, 63, 74, 48, 64, 94, 77, 43, 70, 90, 59, 52, 88, 58, 56, 60, 54, 59, 56, 72, 72, 45, 58, 80, 65, 71, 56, 53, 83, 59, 79, 76, 84, 55, 85, 75, 58, 68, 73, 70, 59, 89, 63, 70, 71, 62, 43, 75, 55, 64, 74, 62, 81, 74, 72, 51, 63, 58, 67, 71, 52, 58, 58, 99, 73, 55, 76, 63, 81, 58, 59, 48, 70, 52, 45, 51, 56, 36, 69, 113, 80, 42, 68, 57, 54, 47, 65, 65, 66, 51, 65, 47, 66, 64, 59, 58, 54, 49, 40, 73, 51, 53, 60, 76, 74, 66, 59, 67, 58, 50, 71, 61, 64, 56, 77, 62, 72, 55, 56, 70, 64, 65, 73, 60, 59, 59, 87, 51, 52, 71, 67, 70, 51, 64, 65, 74, 86, 60, 66, 71, 75, 57, 61, 40, 89, 68, 68, 58, 62, 61, 109, 70, 115, 62, 61, 60, 63, 59, 54, 67, 52, 70, 78, 63, 52, 58, 75, 90, 91, 54, 59, 51, 50, 72, 44, 64, 59, 76, 56, 79, 67, 65, 53, 67, 58, 75, 54, 43, 59, 66, 65, 74, 62, 58, 53, 73, 77, 66, 71, 79, 67, 52, 58, 109, 70, 72, 84, 103, 47, 66, 60, 84, 69, 63, 60, 75, 74, 76, 71, 49, 66, 79, 60, 65, 62, 71, 57, 50, 41, 63, 76, 66, 62, 64, 79, 72, 89, 85, 66, 56, 93, 58, 61, 89, 77, 82, 73, 54, 53, 61, 67, 76, 51, 64, 55, 89, 59, 85, 86, 56, 65, 87, 128, 66, 94, 52, 60, 83, 71, 59, 156, 76, 86, 76, 59, 56, 72, 66, 58, 52, 68, 75, 77, 64, 65, 57, 41, 53, 51, 71, 79, 74, 50, 52, 48, 63, 70, 59, 42, 65, 65, 73, 68, 65, 55, 70, 83, 72, 71, 47, 61, 38, 87, 56, 46, 69, 70, 55, 67, 51, 63, 55, 66, 75, 67, 124, 63, 63, 84, 60, 82, 64, 79, 69, 64, 59, 70, 63, 61, 81, 62, 72, 75, 69, 77, 68, 71, 53, 66, 62, 72, 62, 34, 68, 62, 62, 146, 55, 59, 97, 44, 70, 68, 59, 80, 70, 81, 63, 81, 61, 75, 76, 66, 72, 60, 52, 42, 52, 76, 62, 42, 64, 64, 70, 70, 43, 40, 70, 62, 64, 64, 85, 59, 68, 62, 47, 73, 75, 51, 64, 58, 44, 74, 53, 59, 66, 72, 58, 67, 79, 88, 75, 80, 84, 60, 78, 58, 49, 51, 54, 52, 68, 66, 59, 52, 66, 84, 56, 85, 58, 56, 66, 56, 70, 64, 64, 58, 66, 68, 69, 52, 87, 58, 56, 84, 68, 61, 60, 76, 57, 42, 64, 76, 68, 60, 90, 70, 71, 48, 73, 50, 61, 77, 62, 61, 55, 71, 77, 63, 50, 77, 86, 66, 81, 60, 81, 65, 83, 51, 69, 67, 58, 71, 73, 60, 61, 57, 70, 67, 59, 49, 59, 81, 53, 61, 124, 57, 47, 57, 71, 65, 64, 172, 74, 82, 83, 71, 77, 68, 54, 64, 64, 54, 53, 59, 55, 72, 56, 61, 62, 71, 102, 76, 61, 66, 64, 61, 78, 60, 79, 68, 54, 55, 87, 60, 112, 46, 52, 44, 52, 60, 63, 62, 48, 91, 57, 56, 57, 56, 50, 85, 83, 107, 66, 77, 100, 65, 60, 57, 51, 62, 67, 70, 55, 67, 73, 49, 64, 48, 73, 81, 50, 55, 50, 59, 55, 60, 78, 65, 57, 78, 68, 69, 80, 56, 47, 54, 44, 67, 67, 68, 75, 72, 52, 65, 66, 76, 86, 53, 98, 76, 77, 77, 65, 56, 62, 69, 33, 54, 58, 80, 65, 53, 73, 67, 75, 53, 61, 79, 71, 52, 47, 75, 97, 52, 89, 81, 61, 66, 77, 115, 76, 64, 80, 54, 81, 71, 82, 69, 61, 49, 55, 54, 90, 90, 66, 82, 64, 61, 57, 97, 66, 52, 56, 72, 52, 83, 79, 63, 67, 61, 59, 86, 56, 42, 52, 52, 77, 77, 53, 86, 53, 50, 63, 45, 67, 51, 57, 53, 67, 61, 65, 96, 54, 30, 66, 63, 53, 86, 85, 65, 58, 77, 65, 54, 65, 63, 60, 62, 69, 47, 66, 62, 52, 66, 79, 60, 53, 76, 76, 46, 62, 75, 80, 69, 61, 66, 65, 70, 79, 46, 60, 54, 68, 97, 44, 57, 74, 75, 67, 55, 50, 75, 67, 73, 69, 71, 66, 72, 57, 47, 45, 75, 72, 80, 42, 76, 72, 64, 66, 54, 78, 69, 63, 47, 65, 39, 57, 56, 62, 104, 55, 60, 73, 78, 102, 58, 57, 66, 63, 61, 70, 89, 58, 67, 90, 58, 61, 60, 72, 62, 77, 45, 59, 80, 62, 68, 77, 66, 77, 70, 41, 47, 74, 61, 56, 57, 81, 50, 59, 76, 76, 101, 58, 41, 53, 43, 85, 52, 46, 73, 70, 62, 73, 45, 70, 63, 75, 64, 56, 60, 46, 55, 47, 64, 84, 85, 57, 63, 60, 39, 65, 58, 71, 52, 56, 52, 80, 93, 47, 59, 70, 62, 69, 53, 63, 63, 62, 77, 74, 52, 62, 54, 64, 77, 68, 51, 59, 49, 43, 95, 61, 70, 49, 59, 75, 109, 54, 63, 54, 85, 71, 67, 72, 40, 49, 62, 57, 78, 62, 65, 55, 66, 59, 70, 56, 43, 52, 68, 63, 59, 57, 58, 70, 67, 133, 76, 67, 58, 55, 63, 75, 45, 84, 92, 53, 107, 62, 73, 56, 89, 43, 66, 59, 48, 61, 70, 67, 57, 80, 52, 56, 55, 50, 83, 62, 82, 59, 72, 47, 59, 75, 42, 51, 66, 71, 76, 42, 59, 59, 71, 49, 101, 90, 61, 85, 67, 56, 60, 63, 61, 89, 66, 90, 78, 51, 103, 84, 60, 76, 129, 52, 68, 59, 68, 74, 61, 69, 72, 78, 64, 54, 82, 57, 47, 77, 76, 65, 51, 66, 76, 65, 59, 64, 78, 63, 52, 90, 71, 71, 52, 51, 78, 67, 50, 48, 58, 80, 74, 65, 72, 54, 65, 70, 64, 56, 57, 88, 73, 47, 56, 63, 57, 60, 46, 68, 75, 61, 61, 61, 58, 76, 58, 64, 82, 87, 73, 73, 62, 102, 71, 59, 64, 62, 83, 67, 61, 68, 62, 70, 55, 71, 63, 58, 71, 64, 64, 81, 82, 74, 63, 55, 77, 56, 83, 99, 62, 74, 66, 72, 62, 56, 62, 60, 48, 55, 57, 53, 64, 55, 73, 61, 60, 71, 55, 78, 75, 79, 52, 47, 54, 51, 66, 42, 53, 69, 70, 65, 52, 73, 57, 63, 49, 58, 71, 57, 59, 49, 48, 72, 72, 97, 60, 108, 54, 55, 75, 94, 66, 48, 65, 61, 55, 45, 64, 66, 63, 59, 47, 49, 59, 65, 75, 55, 56, 57, 53, 85, 79, 55, 68, 56, 63, 69, 65, 48, 62, 69, 69, 74, 58, 67, 64, 39, 46, 61, 78, 60, 58, 57, 69, 48, 57, 67, 63, 64, 65, 66, 90, 68, 87, 64, 57, 74, 61, 65, 55, 64, 70, 68, 73, 59, 46, 62, 79, 69, 149, 76, 46, 74, 73, 59, 63, 71, 56, 97, 76, 66, 75, 48, 49, 79, 65, 50, 69, 57, 73, 64, 74, 57, 75, 65, 59, 102, 49, 63, 59, 72, 56, 58, 61, 54, 66, 50, 74, 58, 67, 59, 50, 71, 71, 58, 63, 90, 73, 67, 53, 115, 89, 50, 78, 69, 68, 78, 60, 62, 65, 75, 82, 71, 77, 53, 72, 61, 77, 68, 54, 58, 73, 59, 56, 51, 74, 50, 78, 70, 66, 58, 50, 68, 64, 78, 47, 56, 84, 72, 53, 60, 56, 70, 76, 60, 71, 53, 72, 81, 90, 70, 60, 77, 40, 97, 76, 69, 69, 80, 110, 66, 54, 75, 63, 67, 54, 48, 134, 76, 77, 85, 75, 67, 69, 65, 68, 54, 49, 74, 83, 78, 64, 79, 66, 61, 71, 73, 63, 51, 69, 56, 57, 60, 64, 70, 58, 67, 69, 77, 64, 64, 60, 51, 53, 64, 56, 60, 56, 62, 62, 54, 72, 56, 68, 89, 52, 66, 60, 60, 64, 49, 76, 130, 66, 56, 80, 76, 65, 72, 63, 79, 67, 64, 66, 77, 63, 70, 59, 79, 83, 98, 59, 53, 69, 59, 52, 60, 68, 54, 48, 71, 62, 72, 80, 67, 67, 75, 57, 54, 96, 70, 56, 51, 50, 57, 74, 46, 55, 60, 56, 38, 62, 67, 65, 71, 73, 70, 86, 87, 57, 107, 74, 68, 45, 69, 56, 64, 77, 72, 61, 71, 47, 73, 88, 62, 66, 69, 73, 62, 55, 57, 63, 59, 78, 63, 86, 79, 84, 63, 61, 65, 68, 55, 73, 60, 50, 66, 72, 52, 55, 74, 60, 90, 112, 78, 60, 76, 61, 63, 70, 77, 76, 61, 59, 57, 57, 72, 78, 60, 50, 49, 54, 64, 93, 63, 86, 46, 64, 70, 68, 47, 60, 100, 65, 67, 46, 59, 64, 60, 73, 67, 69, 70, 54, 63, 53, 66, 75, 69, 40, 59, 65, 66, 50, 62, 91, 60, 60, 68, 62, 37, 60, 79, 69, 57, 70, 58, 56, 62, 67, 56, 65, 82, 63, 48, 67, 96, 64, 47, 169, 68, 46, 56, 75, 77, 56, 77, 80, 54, 90, 62, 44, 60, 46, 49, 53, 68, 56, 122, 63, 63, 57, 79, 73, 65, 57, 79, 62, 59, 55, 102, 70, 89, 50, 88, 55, 63, 68, 56, 66, 79, 85, 51, 60, 59, 71, 54, 105, 89, 76, 68, 51, 65, 60, 56, 90, 58, 60, 62, 54, 59, 72, 72, 65, 64, 79, 45, 63, 61, 68, 52, 66, 60, 56, 83, 76, 60, 61, 78, 73, 77, 65, 80, 47, 51, 51, 72, 74, 66, 64, 78, 75, 52, 84, 76, 58, 68, 78, 71, 96, 67, 68, 65, 65, 66, 60, 53, 98, 58, 69, 63, 68, 48, 77, 54, 70, 77, 53, 68, 67, 64, 87, 97, 61, 52, 60, 65, 108, 73, 81, 69, 57, 71, 77, 84, 69, 78, 63, 64, 51, 73, 64, 66, 80, 62, 61, 71, 82, 72, 106, 53, 72, 62, 55, 72, 56, 45, 77, 57, 55, 60, 89, 83, 55, 78, 67, 50, 52, 72, 56, 59, 52, 63, 60, 78, 65, 86, 73, 62, 77, 55, 49, 67, 59, 64, 87, 83, 64, 52, 79, 79, 64, 57, 66, 53, 77, 52, 73, 71, 62, 62, 61, 82, 58, 58, 58, 95, 73, 49, 78, 56, 70, 62, 57, 89, 54, 51, 63, 57, 73, 50, 63, 61, 62, 58, 58, 62, 64, 69, 88, 71, 62, 70, 81, 86, 64, 54, 76, 49, 62, 55, 70, 52, 46, 44, 61, 59, 43, 51, 66, 56, 43, 52, 58, 71, 60, 72, 107, 61, 73, 87, 85, 83, 54, 57, 54, 78, 57, 79, 101, 66, 68, 98, 67, 65, 57, 56, 72, 58, 65, 74, 87, 62, 75, 66, 50, 57, 64, 52, 88, 57, 52, 53, 76, 74, 67, 52, 69, 65, 89, 67, 47, 71, 75, 60, 62, 36, 85, 65, 68, 104, 48, 58, 66, 68, 68, 54, 65, 44, 64, 58, 59, 69, 81, 53, 83, 77, 54, 51, 82, 64, 56, 56, 55, 84, 73, 74, 51, 73, 78, 70, 48, 60, 46, 80, 63, 66, 59, 62, 67, 78, 61, 73, 58, 47, 54, 73, 106, 66, 61, 54, 64, 81, 116, 65, 66, 57, 78, 65, 61, 74, 78, 53, 64, 58, 56, 66, 59, 58, 71, 66, 62, 74, 55, 66, 54, 66, 68, 71, 48, 98, 53, 98, 68, 72, 69, 47, 69, 60, 59, 73, 92, 67, 117, 63, 70, 52, 67, 62, 43, 68, 79, 74, 47, 61, 62, 59, 63, 37, 50, 63, 64, 61, 52, 64, 64, 54, 63, 58, 72, 64, 76, 65, 128, 73, 48, 48, 63, 65, 66, 78, 57, 86, 41, 58, 54, 55, 56, 52, 84, 43, 67, 42, 101, 57, 45, 67, 122, 59, 52, 64, 74, 50, 61, 61, 59, 84, 63, 77, 82, 52, 66, 63, 58, 63, 71, 54, 61, 78, 60, 50, 78, 58, 67, 79, 40, 67, 65, 63, 85, 57, 77, 50, 49, 63, 62, 68, 52, 72, 57, 58, 53, 61, 81, 77, 81, 52, 78, 65, 71, 66, 65, 62, 56, 69, 55, 59, 38, 36, 64, 60, 108, 59, 72, 62, 61, 81, 71, 67, 57, 74, 65, 55, 74, 47, 38, 55, 64, 66, 68, 72, 63, 85, 52, 71, 84, 59, 60, 52, 53, 64, 82, 48, 79, 76, 63, 67, 52, 96, 74, 38, 68, 57, 67, 74, 68, 73, 74, 53, 69, 44, 65, 74, 72, 82, 64, 69, 101, 57, 95, 40, 70, 119, 56, 60, 80, 63, 63, 65, 73, 87, 76, 68, 36, 58, 62, 58, 50, 56, 74, 116, 71, 71, 86, 65, 54, 82, 76, 51, 95, 48, 61, 53, 88, 69, 50, 77, 70, 64, 59, 58, 71, 65, 54, 68, 46, 46, 96, 60, 60, 50, 63, 71, 74, 50, 87, 79, 65, 50, 83, 63, 73, 49, 51, 83, 82, 69, 84, 78, 53, 58, 73, 59, 68, 72, 66, 51, 61, 74, 54, 68, 49, 79, 58, 68, 80, 51, 69, 62, 66, 72, 71, 52, 119, 93, 79, 39, 41, 72, 51, 62, 67, 102, 76, 89, 72, 57, 63, 70, 64, 72, 46, 63, 72, 80, 110, 59, 67, 76, 75, 65, 67, 72, 55, 77, 74, 55, 56, 67, 45, 56, 61, 67, 78, 68, 50, 65, 59, 64, 53, 42, 62, 49, 57, 57, 76, 53, 41, 66, 67, 73, 104, 53, 69, 59, 90, 47, 69, 72, 59, 81, 69, 88, 68, 74, 64, 60, 60, 67, 84, 82, 83, 67, 58, 89, 50, 58, 69, 90, 45, 79, 54, 48, 54, 83, 90, 51, 73, 68, 56, 59, 51, 56, 97, 64, 47, 70, 52, 71, 52, 63, 58, 64, 63, 63, 62, 61, 79, 77, 63, 68, 70, 79, 75, 46, 74, 61, 72, 57, 70, 59, 82, 50, 42, 85, 72, 54, 63, 56, 86, 49, 69, 56, 58, 57, 58, 57, 99, 49, 77, 59, 68, 64, 44, 100, 74, 35, 101, 62, 59, 54, 81, 46, 85, 46, 45, 88, 45, 65, 63, 76, 55, 79, 56, 65, 60, 96, 78, 72, 81, 66, 46, 62, 56, 75, 58, 54, 62, 55, 59, 72, 72, 62, 44, 53, 49, 56, 66, 74, 53, 67, 99, 74, 53, 55, 66, 56, 64, 67, 51, 68, 89, 59, 55, 86, 75, 64, 70, 39, 73, 90, 67, 46, 71, 53, 60, 69, 51, 80, 81, 75, 70, 84, 72, 61, 61, 61, 105, 60, 47, 56, 84, 107, 61, 66, 69, 65, 85, 74, 55, 95, 83, 53, 92, 46, 61, 77, 65, 70, 63, 69, 42, 49, 63, 57, 42, 59, 62, 49, 61, 65, 77, 52, 89, 69, 51, 81, 69, 76, 69, 63, 70, 74, 52, 63, 56, 48, 55, 70, 64, 66, 60, 65, 48, 72, 52, 120, 51, 50, 46, 57, 45, 44, 66, 67, 61, 55, 56, 96, 54, 65, 79, 63, 85, 50, 65, 76, 62, 56, 55, 53, 60, 31, 61, 64, 64, 59, 75, 60, 83, 67, 68, 69, 61, 74, 54, 63, 48, 70, 67, 84, 47, 74, 69, 62, 75, 96, 72, 78, 52, 51, 77, 94, 150, 46, 55, 75, 37, 56, 61, 90, 76, 52, 68, 69, 72, 74, 78, 65, 54, 76, 51, 57, 44, 50, 73, 78, 63, 66, 60, 60, 52, 57, 64, 120, 78, 89, 71, 43, 74, 56, 76, 94, 77, 98, 88, 47, 59, 69, 56, 66, 66, 57, 57, 62, 71, 66, 72, 61, 63, 57, 71, 98, 48, 63, 63, 75, 54, 90, 78, 29, 61, 79, 43, 68, 52, 69, 73, 78, 60, 69, 50, 59, 53, 47, 59, 71, 43, 78, 78, 63, 57, 100, 86, 50, 60, 69, 69, 57, 50, 49, 63, 62, 67, 52, 61, 51, 44, 53, 63, 67, 72, 68, 60, 64, 64, 52, 55, 67, 83, 62, 53, 40, 90, 60, 90, 59, 57, 66, 80, 85, 90, 110, 63, 62, 62, 64, 52, 57, 71, 76, 62, 43, 91, 73, 66, 59, 50, 52, 53, 57, 98, 66, 56, 52, 81, 66, 58, 61, 70, 53, 64, 64, 45, 63, 70, 62, 60, 54, 62, 50, 66, 68, 51, 52, 64, 53, 60, 54, 66, 61, 53, 65, 81, 63, 47, 72, 74, 63, 67, 104, 58, 44, 51, 57, 62, 60, 64, 75, 74, 75, 56, 57, 52, 67, 68, 67, 49, 56, 53, 66, 97, 105, 72, 80, 92, 44, 56, 64, 43, 62, 76, 54, 79, 88, 41, 83, 58, 59, 79, 77, 61, 67, 56, 55, 58, 60, 69, 52, 75, 48, 76, 58, 78, 83, 80, 46, 93, 72, 77, 40, 53, 70, 46, 55, 82, 66, 56, 61, 62, 57, 70, 70, 80, 76, 58, 59, 63, 63, 76, 76, 62, 62, 59, 47, 61, 64, 86, 58, 62, 62, 71, 115, 68, 67, 87, 60, 75, 60, 62, 69, 32, 60, 51, 68, 64, 65, 55, 61, 50, 68, 74, 64, 92, 66, 63, 75, 67, 77, 53, 62, 114, 63, 73, 83, 71, 64, 73, 74, 84, 60, 69, 66, 65, 40, 58, 76, 59, 61, 52, 92, 90, 87, 50, 75, 63, 61, 83, 56, 46, 56, 75, 74, 72, 68, 87, 45, 94, 62, 71, 59, 73, 58, 44, 59, 59, 37, 89, 61, 77, 83, 73, 64, 55, 72, 55, 70, 45, 90, 39, 54, 62, 77, 52, 74, 75, 60, 49, 58, 52, 43, 65, 58, 70, 59, 55, 65, 64, 89, 54, 71, 63, 48, 70, 93, 63, 84, 73, 53, 47, 76, 57, 65, 49, 44, 56, 80, 143, 71, 65, 70, 70, 89, 60, 75, 50, 61, 100, 57, 88, 84, 58, 55, 50, 63, 53, 123, 46, 73, 66, 65, 49, 74, 53, 79, 102, 70, 72, 83, 54, 75, 84, 57, 74, 91, 51, 91, 47, 48, 60, 57, 84, 46, 82, 79, 121, 67, 114, 75, 75, 99, 116, 64, 73, 66, 88, 68, 74, 38, 66, 50, 65, 62, 52, 43, 79, 56, 46, 55, 48, 54, 84, 61, 77, 74, 69, 50, 77, 62, 84, 81, 55, 38, 80, 45, 68, 88, 61, 74, 72, 84, 53, 54, 60, 59, 67, 77, 77, 92, 50, 65, 46, 52, 66, 61, 54, 58, 54, 58, 64, 82, 74, 64, 59, 57, 45, 55, 59, 81, 76, 72, 46, 66, 67, 70, 54, 84, 47, 70, 57, 69, 57, 77, 53, 61, 63, 58, 70, 100, 86, 63, 66, 68, 75, 58, 74, 58, 71, 48, 62, 47, 64, 118, 50, 44, 73, 76, 65, 83, 58, 42, 71, 65, 70, 49, 62, 80, 82, 52, 57, 69, 52, 61, 105, 57, 82, 80, 76, 54, 62, 53, 68, 67, 58, 61, 42, 58, 60, 61, 48, 71, 79, 61, 66, 111, 63, 54, 68, 53, 61, 45, 59, 63, 66, 53, 78, 84, 40, 58, 50, 66, 63, 60, 58, 62, 61, 68, 55, 57, 66, 74, 87, 73, 73, 55, 63, 70, 62, 60, 70, 64, 58, 53, 107, 53, 69, 51, 58, 71, 59, 58, 61, 71, 65, 67, 50, 64, 73, 57, 67, 53, 77, 66, 65, 63, 58, 65, 57, 70, 57, 75, 80, 105, 72, 49, 107, 60, 62, 56, 100, 77, 74, 75, 54, 67, 56, 105, 69, 61, 47, 56, 46, 69, 62, 63, 58, 57, 66, 69, 50, 59, 59, 79, 71, 65, 80, 69, 81, 56, 59, 72, 58, 62, 69, 64, 75, 65, 73, 47, 77, 63, 72, 49, 89, 43, 81, 71, 60, 47, 89, 58, 37, 67, 68, 64, 86, 63, 57, 49, 59, 61, 109, 62, 58, 49, 96, 60, 63, 57, 57, 54, 67, 61, 67, 84, 56, 54, 72, 49, 47, 55, 43, 67, 78, 59, 56, 63, 64, 54, 89, 63, 68, 60, 59, 76, 77, 61, 67, 57, 63, 90, 59, 62, 65, 50, 58, 43, 75, 48, 78, 73, 80, 118, 47, 69, 53, 46, 58, 65, 47, 60, 85, 57, 44, 48, 71, 55, 49, 50, 59, 60, 90, 62, 65, 74, 66, 77, 67, 71, 67, 61, 77, 72, 74, 66, 55, 57, 47, 58, 58, 65, 75, 67, 40, 89, 69, 48, 72, 60, 55, 72, 64, 79, 69, 54, 65, 62, 63, 58, 80, 121, 74, 58, 102, 86, 59, 58, 75, 64, 56, 49, 82, 74, 71, 62, 75, 55, 65, 53, 89, 44, 79, 69, 59, 75, 62, 51, 71, 79, 64, 81, 85, 73, 52, 70, 73, 52, 93, 86, 67, 73, 72, 65, 67, 57, 70, 63, 49, 68, 59, 77, 61, 74, 68, 60, 64, 63, 52, 60, 89, 77, 70, 83, 67, 58, 74, 76, 56, 153, 63, 62, 84, 75, 82, 65, 89, 51, 61, 57, 76, 49, 58, 87, 73, 72, 71, 60, 61, 70, 74, 80, 94, 63, 58, 66, 65, 76, 65, 66, 78, 66, 81, 56, 71, 70, 77, 83, 75, 57, 49, 62, 81, 56, 80, 70, 73, 57, 74, 99, 76, 70, 75, 53, 68, 45, 76, 57, 71, 86, 54, 62, 72, 52, 46, 66, 61, 66, 55, 61, 72, 63, 70, 63, 54, 64, 68, 75, 58, 58, 64, 85, 83, 62, 65, 56, 78, 79, 60, 64, 41, 64, 69, 67, 75, 51, 43, 94, 60, 39, 71, 61, 55, 59, 47, 62, 65, 68, 118, 44, 75, 68, 70, 65, 44, 94, 110, 64, 65, 68, 57, 36, 80, 70, 56, 76, 58, 70, 56, 40, 60, 104, 56, 79, 65, 57, 73, 50, 69, 85, 48, 78, 73, 59, 49, 53, 70, 74, 51, 62, 85, 57, 48, 58, 54, 72, 112, 82, 87, 69, 61, 58, 78, 76, 61, 70, 61, 54, 68, 60, 87, 67, 40, 87, 49, 68, 45, 89, 62, 61, 54, 73, 64, 78, 56, 53, 62, 69, 51, 66, 60, 72, 70, 60, 60, 59, 62, 81, 100, 68, 53, 56, 79, 52, 58, 44, 61, 65, 81, 59, 52, 68, 55, 61, 87, 98, 79, 68, 51, 60, 79, 50, 65, 65, 77, 68, 45, 68, 72, 65, 79, 63, 55, 63, 75, 64, 112, 67, 55, 44, 56, 76, 74, 61, 60, 47, 83, 71, 50, 63, 64, 63, 64, 52, 60, 67, 84, 59, 72, 52, 56, 82, 74, 77, 60, 50, 49, 67, 48, 57, 80, 97, 65, 63, 72, 77, 74, 70, 59, 70, 73, 62, 60, 64, 49, 68, 61, 58, 58, 79, 44, 64, 73, 95, 60, 87, 59, 56, 57, 67, 69, 57, 68, 63, 61, 59, 58, 66, 52, 77, 91, 70, 74, 67, 57, 56, 64, 64, 68, 54, 68, 55, 61, 61, 77, 57, 71, 62, 63, 58, 43, 112, 64, 62, 78, 56, 71, 64, 51, 55, 58, 57, 66, 51, 72, 65, 83, 80, 60, 57, 62, 52, 90, 78, 76, 58, 53, 59, 61, 57, 68, 52, 48, 60, 53, 59, 55, 47, 80, 62, 71, 64, 71, 56, 81, 65, 76, 59, 81, 64, 57, 45, 67, 63, 60, 57, 65, 64, 59, 68, 103, 61, 70, 49, 54, 59, 65, 66, 63, 72, 69, 54, 63, 55, 68, 60, 57, 90, 92, 58, 52, 75, 52, 62, 68, 74, 80, 54, 83, 87, 71, 56, 54, 66, 59, 68, 48, 68, 68, 54, 57, 56, 53, 62, 77, 68, 68, 67, 55, 75, 47, 67, 50, 84, 63, 46, 57, 67, 63, 54, 82, 52, 85, 64, 79, 78, 61, 71, 61, 82, 68, 87, 73, 66, 57, 63, 71, 47, 60, 74, 59, 56, 55, 71, 70, 67, 59, 63, 68, 40, 65, 88, 58, 57, 56, 58, 73, 78, 64, 66, 66, 69, 66, 64, 75, 64, 74, 65, 70, 119, 73, 66, 57, 59, 57, 47, 64, 60, 54, 66, 52, 44, 148, 59, 87, 87, 78, 59, 54, 66, 79, 53, 72, 59, 78, 100, 67, 66, 50, 103, 62, 57, 40, 45, 53, 45, 78, 76, 59, 78, 71, 72, 74, 74, 37, 68, 49, 71, 83, 77, 48, 63, 69, 73, 59, 60, 62, 48, 75, 79, 95, 68, 99, 47, 61, 70, 74, 47, 47, 61, 41, 67, 66, 51, 70, 65, 72, 36, 53, 56, 79, 64, 79, 60, 65, 66, 78, 68, 67, 73, 62, 59, 57, 61, 65, 71, 60, 73, 81, 90, 55, 72, 45, 66, 58, 54, 77, 54, 58, 66, 61, 55, 72, 59, 57, 83, 71, 82, 44, 61, 65, 65, 76, 53, 71, 73, 67, 63, 60, 62, 79, 63, 50, 70, 77, 51, 49, 57, 72, 71, 80, 71, 73, 57, 88, 54, 81, 57, 78, 75, 51, 54, 69, 73, 79, 71, 62, 45, 71, 66, 82, 65, 74, 66, 62, 57, 51, 87, 59, 51, 59, 67, 61, 64, 71, 76, 78, 61, 58, 46, 66, 46, 75, 81, 57, 51, 62, 74, 92, 68, 41, 61, 64, 64, 68, 62, 62, 56, 80, 60, 77, 91, 51, 66, 57, 69, 67, 77, 54, 74, 55, 58, 50, 51, 49, 35, 59, 68, 68, 65, 87, 63, 58, 86, 59, 57, 86, 54, 48, 84, 70, 72, 78, 75, 53, 57, 60, 86, 90, 66, 72, 54, 54, 72, 76, 51, 75, 51, 77, 57, 54, 51, 51, 75, 64, 56, 64, 83, 55, 53, 70, 76, 79, 83, 46, 50, 53, 55, 64, 72, 69, 57, 67, 63, 63, 60, 81, 77, 79, 57, 50, 53, 82, 61, 58, 76, 45, 89, 93, 65, 67, 75, 70, 42, 68, 113, 60, 57, 65, 65, 77, 54, 128, 67, 61, 48, 51, 82, 79, 69, 55, 49, 72, 46, 60, 62, 57, 66, 57, 48, 66, 88, 96, 44, 49, 44, 54, 68, 80, 57, 68, 71, 68, 58, 57, 60, 54, 73, 85, 92, 66, 59, 60, 57, 82, 69, 83, 62, 65, 72, 60, 75, 58, 67, 60, 81, 69, 56, 79, 62, 74, 64, 54, 61, 49, 63, 51, 59, 63, 51, 66, 40, 69, 58, 82, 50, 64, 58, 43, 57, 61, 60, 63, 60, 60, 77, 75, 76, 101, 54, 46, 55, 54, 45, 60, 71, 62, 68, 77, 65, 83, 62, 79, 61, 55, 65, 59, 61, 42, 57, 65, 58, 69, 67, 54, 58, 54, 62, 67, 73, 101, 77, 66, 69, 57, 59, 59, 50, 58, 61, 57, 78, 58, 63, 50, 46, 70, 91, 119, 78, 72, 62, 58, 74, 52, 65, 63, 78, 57, 45, 80, 74, 71, 51, 86, 109, 60, 66, 114, 66, 71, 62, 43, 64, 82, 71, 63, 69, 48, 57, 53, 62, 71, 56, 76, 54, 67, 69, 64, 55, 58, 69, 78, 87, 53, 82, 62, 64, 83, 61, 73, 66, 62, 83, 57, 47, 84, 47, 62, 75, 79, 97, 61, 65, 74, 56, 78, 63, 46, 57, 73, 87, 65, 45, 56, 60, 73, 62, 78, 60, 94, 62, 79, 65, 54, 69, 78, 85, 84, 62, 70, 59, 74, 47, 70, 75, 67, 73, 49, 80, 110, 52, 73, 80, 57, 57, 67, 61, 70, 62, 64, 49, 88, 60, 63, 57, 78, 82, 62, 71, 80, 63, 54, 93, 64, 99, 63, 64, 85, 62, 71, 72, 69, 51, 72, 63, 85, 82, 63, 54, 60, 81, 56, 61, 78, 70, 64, 79, 47, 65, 66, 72, 73, 72, 74, 59, 65, 85, 52, 56, 71, 85, 60, 67, 67, 58, 77, 87, 60, 61, 81, 77, 51, 63, 75, 61, 72, 60, 64, 62, 53, 65, 66, 69, 68, 75, 66, 71, 59, 59, 62, 110, 141, 78, 132, 76, 63, 100, 93, 73, 85, 59, 52, 43, 64, 78, 47, 82, 67, 65, 65, 77, 63, 64, 52, 77, 42, 89, 63, 52, 42, 56, 86, 143, 55, 54, 75, 60, 56, 78, 72, 59, 60, 72, 70, 65, 54, 63, 74, 60, 75, 74, 60, 65, 85, 66, 119, 59, 81, 44, 71, 53, 48, 103, 62, 52, 90, 79, 94, 60, 66, 69, 51, 68, 60, 60, 65, 74, 49, 76, 67, 57, 62, 80, 65, 64, 66, 49, 71, 55, 72, 47, 76, 92, 79, 68, 78, 68, 45, 63, 78, 88, 69, 44, 53, 50, 71, 75, 70, 55, 53, 51, 61, 77, 96, 68, 112, 56, 85, 67, 83, 68, 67, 67, 71, 95, 89, 51, 52, 72, 94, 53, 44, 97, 55, 65, 64, 48, 62, 58, 73, 69, 54, 68, 77, 65, 56, 65, 59, 80, 51, 81, 44, 53, 40, 71, 73, 60, 52, 78, 95, 50, 78, 50, 66, 58, 41, 102, 93, 62, 58, 55, 45, 58, 61, 64, 63, 55, 93, 65, 67, 56, 52, 41, 71, 72, 49, 54, 69, 94, 59, 71, 64, 81, 53, 49, 45, 78, 60, 55, 69, 57, 75, 76, 50, 46, 68, 51, 62, 73, 95, 58, 70, 58, 67, 49, 59, 58, 87, 65, 64, 77, 58, 70, 47, 71, 104, 61, 55, 55, 56, 79, 55, 68, 74, 77, 53, 66, 71, 61, 105, 59, 53, 53, 70, 77, 56, 69, 46, 83, 41, 44, 46, 60, 71, 59, 67, 62, 73, 71, 56, 45, 70, 43, 67, 67, 56, 77, 63, 72, 110, 65, 68, 57, 63, 61, 71, 58, 50, 68, 65, 53, 64, 73, 91, 50, 77, 56, 58, 92, 45, 55, 83, 42, 82, 54, 65, 78, 46, 53, 70, 62, 71, 72, 56, 57, 60, 55, 60, 76, 67, 103, 81, 55, 82, 88, 55, 62, 62, 58, 64, 87, 65, 81, 87, 72, 77, 65, 59, 57, 78, 60, 74, 61, 67, 51, 56, 55, 67, 63, 64, 66, 55, 67, 75, 62, 63, 58, 84, 80, 54, 81, 64, 60, 67, 71, 60, 61, 52, 66, 99, 59, 68, 81, 56, 51, 52, 82, 88, 49, 71, 78, 60, 44, 87, 67, 60, 61, 60, 65, 57, 54, 55, 55, 74, 92, 54, 69, 65, 80, 48, 60, 70, 78, 62, 72, 77, 60, 52, 56, 50, 50, 51, 83, 86, 79, 124, 64, 61, 65, 59, 51, 67, 63, 60, 47, 90, 55, 66, 61, 69, 64, 55, 55, 58, 68, 82, 57, 65, 69, 54, 74, 69, 65, 79, 50, 95, 95, 58, 69, 50, 53, 64, 62, 48, 61, 62, 63, 57, 52, 50, 68, 66, 48, 55, 75, 123, 79, 80, 64, 119, 66, 77, 57, 63, 55, 57, 55, 66, 74, 60, 77, 60, 67, 59, 67, 61, 53, 53, 62, 63, 63, 65, 41, 74, 77, 50, 59, 73, 89, 68, 60, 35, 67, 50, 80, 71, 76, 71, 77, 78, 62, 84, 67, 60, 69, 90, 100, 45, 70, 53, 68, 43, 51, 75, 61, 62, 74, 70, 62, 54, 49, 51, 53, 55, 63, 57, 79, 57, 69, 66, 58, 75, 93, 54, 86, 72, 54, 87, 81, 58, 64, 57, 55, 69, 65, 59, 44, 70, 60, 50, 60, 71, 74, 65, 57, 76, 61, 62, 45, 80, 51, 61, 61, 61, 68, 60, 69, 65, 61, 56, 72, 68, 56, 67, 52, 64, 53, 60, 60, 70, 60, 70, 65, 69, 71, 63, 63, 78, 72, 63, 74, 55, 53, 89, 66, 59, 68, 56, 81, 56, 75, 72, 90, 50, 68, 60, 73, 69, 83, 59, 66, 52, 64, 75, 77, 66, 62, 58, 74, 58, 76, 65, 54, 54, 65, 47, 70, 86, 55, 78, 55, 76, 54, 72, 62, 54, 50, 69, 70, 53, 48, 80, 55, 81, 67, 69, 75, 45, 48, 63, 66, 52, 57, 56, 52, 60, 64, 67, 60, 61, 73, 53, 67, 60, 66, 58, 109, 67, 59, 81, 79, 45, 66, 97, 66, 76, 50, 61, 52, 68, 52, 57, 58, 65, 58, 67, 51, 72, 84, 67, 63, 59, 58, 48, 71, 75, 78, 74, 67, 63, 74, 58, 57, 56, 54, 56, 47, 56, 60, 53, 61, 74, 88, 81, 64, 55, 78, 66, 75, 55, 66, 69, 76, 63, 62, 70, 68, 69, 59, 57, 63, 110, 52, 58, 66, 59, 60, 122, 61, 85, 62, 52, 69, 63, 84, 54, 70, 78, 71, 55, 56, 96, 66, 66, 56, 50, 61, 75, 63, 73, 75, 62, 52, 61, 72, 52, 69, 70, 51, 52, 56, 61, 60, 47, 62, 67, 71, 63, 61, 63, 75, 64, 63, 65, 95, 57, 88, 85, 78, 53, 77, 43, 70, 76, 52, 90, 56, 58, 61, 77, 79, 64, 71, 59, 64, 51, 56, 93, 62, 70, 67, 50, 70, 64, 60, 71, 66, 64, 58, 73, 75, 69, 52, 70, 56, 62, 67, 78, 57, 60, 59, 72, 73, 61, 50, 69, 71, 63, 72, 62, 67, 59, 54, 52, 51, 63, 67, 51, 102, 61, 56, 101, 76, 55, 61, 58, 80, 73, 61, 65, 52, 66, 65, 64, 62, 99, 70, 60, 70, 58, 71, 51, 69, 59, 79, 56, 64, 77, 70, 80, 40, 47, 71, 71, 51, 61, 66, 63, 66, 44, 62, 56, 54, 80, 113, 65, 74, 72, 60, 75, 61, 72, 69, 77, 68, 69, 74, 51, 74, 67, 72, 46, 60, 62, 64, 63, 68, 62, 71, 56, 57, 61, 79, 60, 57, 56, 62, 55, 64, 78, 59, 49, 82, 71, 63, 55, 57, 65, 74, 54, 62, 89, 62, 56, 55, 87, 67, 67, 69, 91, 60, 61, 62, 56, 75, 66, 55, 56, 50, 51, 55, 67, 53, 66, 61, 51, 81, 51, 82, 39, 59, 64, 76, 62, 58, 60, 75, 72, 75, 63, 72, 58, 60, 58, 58, 79, 81, 77, 61, 77, 88, 66, 66, 70, 59, 63, 59, 68, 72, 54, 69, 75, 47, 48, 66, 68, 57, 56, 52, 55, 63, 45, 68, 52, 79, 59, 60, 75, 102, 66, 53, 48, 71, 55, 70, 169, 68, 53, 62, 79, 63, 58, 82, 49, 57, 58, 66, 48, 73, 53, 64, 87, 51, 62, 94, 54, 71, 51, 77, 63, 93, 62, 96, 61, 60, 58, 75, 64, 57, 80, 74, 60, 75, 71, 71, 62, 67, 93, 60, 49, 50, 56, 72, 91, 71, 68, 65, 81, 63, 49, 69, 47, 63, 64, 55, 65, 58, 68, 70, 53, 73, 73, 61, 67, 44, 52, 60, 64, 66, 79, 68, 70, 74, 74, 57, 69, 46, 58, 67, 69, 42, 69, 80, 47, 87, 80, 58, 54, 52, 71, 96, 53, 55, 69, 64, 78, 63, 63, 69, 79, 85, 69, 77, 67, 60, 64, 58, 70, 58, 57, 66, 71, 62, 58, 64, 73, 130, 95, 82, 53, 54, 78, 65, 98, 62, 69, 74, 68, 57, 57, 69, 92, 88, 70, 60, 81, 76, 65, 65, 66, 53, 53, 62, 88, 57, 65, 65, 49, 66, 61, 50, 69, 53, 62, 65, 58, 69, 55, 84, 59, 50, 60, 82, 68, 61, 76, 57, 66, 77, 53, 69, 71, 61, 36, 81, 62, 103, 55, 59, 81, 57, 76, 78, 39, 62, 55, 73, 61, 65, 56, 61, 63, 66, 71, 64, 74, 66, 74, 80, 57, 67, 60, 70, 98, 80, 53, 58, 76, 68, 59, 59, 64, 68, 63, 59, 58, 56, 80, 69, 53, 84, 80, 55, 82, 67, 64, 58, 50, 67, 57, 82, 52, 55, 90, 58, 57, 61, 76, 56, 84, 66, 65, 103, 71, 67, 44, 75, 61, 72, 78, 85, 71, 58, 58, 71, 62, 82, 71, 54, 69, 72, 71, 88, 64, 53, 65, 60, 64, 75, 45, 57, 65, 67, 46, 59, 64, 73, 73, 51, 52, 61, 57, 59, 63, 79, 77, 64, 77, 61, 51, 57, 56, 47, 49, 68, 87, 70, 59, 60, 89, 69, 58, 64, 100, 60, 96, 54, 64, 54, 65, 61, 59, 62, 53, 62, 69, 74, 42, 70, 69, 67, 59, 61, 82, 63, 95, 52, 58, 60, 51, 64, 74, 63, 54, 57, 85, 72, 72, 68, 74, 64, 47, 58, 62, 124, 70, 60, 67, 80, 66, 63, 63, 79, 115, 47, 61, 78, 53, 77, 73, 66, 62, 67, 67, 67, 49, 60, 61, 64, 69, 76, 66, 55, 58, 63, 68, 51, 66, 53, 66, 77, 73, 54, 60, 62, 63, 86, 89, 94, 76, 48, 59, 63, 63, 69, 60, 58, 65, 88, 145, 79, 71, 75, 66, 82, 52, 107, 69, 59, 64, 66, 59, 68, 58, 52, 72, 73, 60, 63, 68, 67, 80, 66, 73, 55, 115, 63, 57, 66, 80, 72, 57, 44, 65, 45, 68, 68, 63, 65, 50, 63, 53, 78, 55, 43, 86, 59, 63, 79, 97, 70, 63, 58, 130, 63, 65, 57, 64, 66, 58, 49, 65, 65, 86, 65, 67, 72, 64, 53, 77, 50, 54, 60, 65, 59, 65, 45, 92, 51, 59, 67, 56, 69, 62, 55, 62, 68, 52, 57, 51, 70, 78, 80, 90, 73, 57, 59, 80, 67, 63, 60, 61, 80, 66, 72, 78, 64, 58, 61, 77, 53, 75, 62, 73, 79, 62, 62, 71, 63, 58, 54, 71, 84, 72, 66, 50, 81, 81, 64, 78, 85, 88, 67, 49, 73, 67, 64, 66, 67, 60, 61, 56, 50, 63, 73, 58, 67, 64, 57, 64, 56, 55, 67, 62, 79, 61, 54, 71, 57, 59, 60, 55, 49, 81, 56, 62, 66, 45, 58, 76, 45, 71, 70, 63, 58, 74, 58, 57, 53, 75, 43, 56, 69, 50, 50, 64, 75, 57, 62, 66, 68, 81, 63, 64, 56, 77, 99, 62, 80, 76, 48, 63, 72, 78, 49, 61, 72, 95, 89, 62, 100, 78, 70, 74, 60, 69, 67, 64, 55, 72, 48, 71, 64, 76, 66, 61, 68, 51, 62, 64, 75, 73, 56, 76, 75, 81, 85, 71, 73, 38, 63, 45, 53, 70, 60, 54, 61, 76, 61, 82, 66, 54, 49, 85, 58, 68, 74, 63, 57, 58, 55, 63, 55, 64, 68, 37, 48, 85, 71, 98, 71, 57, 57, 62, 69, 81, 61, 78, 50, 43, 104, 61, 48, 55, 67, 67, 79, 56, 60, 63, 69, 72, 59, 59, 51, 79, 75, 56, 63, 57, 63, 61, 49, 62, 67, 61, 63, 60, 72, 96, 59, 66, 41, 64, 59, 69, 60, 78, 49, 46, 62, 81, 58, 48, 70, 85, 92, 88, 61, 73, 61, 112, 70, 69, 66, 64, 67, 55, 60, 56, 71, 56, 62, 63, 64, 60, 78, 70, 57, 42, 59, 51, 66, 61, 86, 67, 55, 67, 66, 63, 53, 46, 56, 79, 60, 34, 58, 67, 70, 80, 79, 63, 100, 67, 88, 73, 58, 84, 86, 70, 44, 80, 55, 78, 53, 65, 52, 65, 42, 74, 64, 63, 58, 92, 76, 75, 58, 40, 69, 62, 55, 50, 66, 80, 62, 51, 59, 64, 70, 58, 69, 47, 70, 49, 71, 54, 54, 70, 56, 67, 74, 54, 52, 60, 68, 67, 60, 88, 58, 60, 57, 63, 77, 75, 48, 58, 60, 62, 66, 77, 58, 82, 98, 89, 61, 65, 62, 71, 56, 69, 53, 68, 54, 70, 60, 62, 73, 72, 73, 83, 74, 44, 70, 85, 65, 57, 71, 85, 62, 55, 51, 55, 72, 72, 65, 55, 50, 68, 79, 78, 65, 60, 80, 61, 62, 85, 56, 93, 62, 90, 76, 75, 42, 59, 79, 64, 82, 75, 67, 45, 63, 64, 72, 80, 60, 54, 65, 69, 70, 49, 78, 47, 64, 57, 39, 56, 53, 70, 59, 62, 65, 118, 68, 70, 85, 65, 71, 58, 62, 76, 58, 54, 72, 73, 78, 55, 58, 64, 48, 63, 63, 45, 51, 104, 56, 74, 44, 43, 59, 68, 75, 70, 64, 63, 54, 66, 59, 69, 48, 63, 81, 52, 86, 66, 78, 56, 64, 65, 79, 65, 47, 51, 85, 84, 81, 55, 53, 54, 63, 73, 64, 69, 73, 69, 61, 64, 67, 66, 65, 79, 47, 69, 49, 79, 58, 67, 63, 64, 61, 59, 53, 55, 61, 54, 57, 62, 56, 74, 79, 65, 56, 87, 107, 68, 67, 59, 30, 66, 80, 162, 77, 62, 52, 83, 86, 67, 68, 61, 59, 49, 59, 52, 60, 69, 83, 64, 76, 56, 75, 61, 63, 67, 83, 57, 90, 63, 64, 66, 61, 49, 69, 56, 99, 64, 59, 67, 70, 62, 65, 53, 82, 63, 66, 59, 68, 65, 80, 76, 77, 56, 61, 59, 68, 63, 56, 45, 66, 56, 62, 70, 82, 72, 84, 75, 66, 68, 55, 95, 71, 77, 70, 48, 65, 68, 65, 52, 57, 80, 58, 75, 55, 59, 97, 58, 58, 74, 57, 69, 63, 90, 73, 72, 60, 78, 50, 78, 67, 79, 100, 101, 67, 68, 63, 81, 58, 52, 65, 83, 78, 58, 45, 46, 68, 56, 81, 78, 47, 62, 58, 53, 140, 33, 80, 52, 59, 57, 58, 102, 45, 48, 92, 73, 82, 69, 58, 78, 58, 75, 50, 69, 75, 73, 67, 47, 46, 45, 62, 105, 90, 54, 53, 53, 70, 48, 64, 65, 65, 54, 57, 54, 90, 78, 63, 78, 70, 78, 90, 69, 61, 70, 61, 50, 64, 49, 64, 73, 65, 56, 47, 110, 111, 42, 58, 73, 58, 58, 88, 67, 46, 73, 56, 49, 70, 78, 93, 86, 65, 68, 63, 53, 62, 68, 93, 67, 62, 54, 74, 86, 85, 79, 51, 32, 71, 55, 64, 73, 53, 56, 54, 58, 50, 51, 61, 64, 83, 62, 66, 69, 52, 50, 59, 68, 79, 99, 64, 65, 63, 33, 83, 57, 67, 56, 70, 58, 72, 60, 91, 76, 92, 49, 74, 71, 50, 84, 52, 48, 74, 47, 51, 53, 43, 79, 75, 65, 63, 68, 75, 61, 51, 59, 64, 48, 71, 85, 71, 62, 60, 53, 73, 49, 48, 64, 55, 50, 72, 75, 68, 79, 50, 48, 59, 93, 46, 59, 59, 66, 58, 72, 74, 126, 81, 60, 42, 51, 55, 65, 68, 93, 71, 70, 67, 48, 64, 64, 66, 57, 71, 59, 78, 71, 63, 66, 104, 67, 71, 69, 86, 61, 67, 63, 64, 49, 65, 52, 77, 88, 44, 45, 82, 62, 61, 52, 71, 94, 58, 58, 62, 74, 111, 52, 62, 60, 72, 53, 63, 55, 51, 87, 70, 66, 63, 54, 71, 64, 53, 77, 66, 60, 64, 49, 55, 76, 65, 73, 74, 96, 69, 58, 82, 65, 80, 61, 79, 48, 50, 81, 56, 56, 57, 51, 89, 66, 76, 46, 68, 74, 57, 49, 69, 68, 52, 86, 81, 162, 74, 74, 84, 72, 59, 70, 67, 53, 67, 60, 77, 50, 61, 56, 61, 68, 67, 70, 59, 59, 69, 65, 75, 62, 61, 52, 50, 60, 58, 59, 77, 60, 58, 61, 74, 66, 61, 85, 66, 58, 73, 59, 68, 71, 56, 89, 49, 74, 82, 64, 61, 67, 52, 125, 65, 54, 80, 69, 52, 45, 73, 71, 66, 87, 54, 48, 79, 57, 87, 73, 60, 58, 70, 79, 76, 65, 67, 52, 63, 65, 67, 73, 76, 43, 51, 51, 65, 74, 64, 49, 69, 51, 85, 43, 63, 71, 92, 57, 61, 80, 69, 51, 67, 62, 71, 68, 55, 50, 66, 60, 61, 81, 63, 62, 84, 54, 55, 52, 66, 56, 57, 66, 61, 49, 56, 88, 68, 48, 57, 68, 73, 69, 48, 58, 71, 57, 60, 69, 53, 56, 57, 64, 50, 60, 54, 79, 54, 60, 57, 61, 69, 40, 52, 76, 76, 58, 71, 87, 59, 65, 67, 66, 58, 61, 67, 76, 52, 69, 78, 73, 56, 58, 58, 66, 63, 68, 51, 64, 87, 59, 73, 58, 59, 63, 82, 48, 47, 78, 70, 72, 74, 60, 64, 57, 64, 67, 91, 67, 78, 58, 77, 52, 81, 77, 56, 68, 54, 86, 67, 54, 59, 51, 58, 62, 69, 61, 61, 60, 67, 49, 38, 76, 70, 85, 72, 74, 56, 75, 57, 71, 57, 57, 64, 70, 73, 83, 64, 69, 76, 66, 66, 65, 60, 77, 52, 68, 57, 67, 51, 67, 73, 79, 65, 81, 59, 57, 74, 53, 55, 58, 102, 64, 69, 72, 79, 59, 67, 79, 66, 81, 57, 71, 49, 58, 59, 60, 56, 59, 72, 59, 64, 60, 62, 61, 82, 54, 59, 53, 56, 67, 68, 69, 58, 71, 52, 58, 53, 72, 61, 61, 62, 48, 59, 52, 45, 64, 85, 67, 67, 66, 89, 69, 53, 53, 67, 67, 76, 58, 71, 67, 66, 64, 66, 59, 58, 99, 73, 80, 68, 65, 70, 117, 58, 71, 62, 52, 48, 67, 61, 63, 60, 77, 64, 65, 40, 98, 50, 60, 50, 57, 57, 72, 56, 57, 64, 63, 64, 77, 67, 62, 65, 48, 69, 49, 64, 52, 56, 60, 56, 81, 69, 42, 67, 69, 72, 60, 63, 64, 89, 80, 76, 64, 64, 55, 69, 49, 67, 66, 60, 62, 55, 69, 52, 68, 69, 57, 53, 59, 75, 62, 79, 81, 72, 68, 56, 64, 56, 84, 46, 58, 63, 58, 63, 54, 64, 79, 52, 55, 76, 41, 67, 63, 64, 53, 55, 73, 52, 62, 54, 63, 60, 69, 59, 66, 69, 68, 54, 54, 56, 56, 55, 62, 118, 76, 63, 84, 75, 61, 70, 55, 89, 71, 50, 66, 47, 57, 71, 73, 62, 108, 52, 57, 55, 69, 60, 35, 68, 58, 80, 68, 74, 52, 59, 51, 50, 47, 43, 51, 66, 74, 56, 49, 70, 67, 51, 67, 58, 59, 98, 54, 76, 69, 57, 77, 85, 72, 59, 81, 80, 58, 79, 82, 59, 63, 78, 72, 67, 70, 70, 61, 60, 64, 69, 57, 73, 54, 65, 67, 66, 61, 54, 64, 77, 71, 51, 67, 61, 60, 75, 51, 57, 63, 63, 56, 66, 64, 51, 56, 68, 54, 69, 56, 68, 104, 79, 75, 61, 75, 64, 62, 71, 72, 60, 59, 51, 86, 75, 48, 55, 58, 102, 60, 82, 70, 75, 62, 79, 55, 84, 69, 64, 79, 67, 67, 81, 48, 70, 58, 49, 84, 77, 76, 76, 78, 79, 56, 49, 54, 53, 67, 53, 53, 72, 47, 64, 67, 78, 62, 73, 60, 70, 50, 42, 52, 58, 45, 57, 58, 71, 80, 69, 41, 106, 55, 69, 46, 54, 69, 69, 159, 82, 49, 56, 70, 75, 59, 72, 66, 84, 65, 67, 60, 81, 51, 63, 95, 58, 84, 82, 58, 66, 60, 87, 61, 102, 68, 88, 63, 73, 67, 93, 59, 71, 61, 64, 51, 56, 74, 85, 62, 57, 87, 52, 75, 52, 60, 63, 82, 81, 53, 44, 74, 73, 56, 61, 57, 65, 49, 73, 70, 62, 74, 66, 55, 62, 81, 63, 76, 58, 62, 85, 62, 61, 78, 79, 63, 55, 70, 84, 60, 52, 61, 91, 65, 65, 61, 63, 58, 74, 82, 60, 63, 57, 60, 76, 58, 60, 78, 60, 71, 61, 62, 78, 68, 88, 77, 84, 63, 69, 63, 76, 62, 50, 63, 73, 82, 44, 55, 77, 72, 128, 108, 90, 68, 70, 75, 69, 109, 50, 64, 76, 70, 52, 53, 69, 96, 81, 67, 54, 83, 64, 71, 50, 60, 44, 59, 50, 85, 68, 76, 57, 74, 36, 58, 55, 71, 63, 66, 55, 72, 63, 74, 72, 75, 58, 59, 57, 47, 61, 71, 64, 79, 60, 51, 64, 58, 57, 50, 63, 61, 113, 57, 52, 59, 64, 84, 75, 68, 52, 67, 61, 81, 75, 54, 83, 58, 54, 62, 74, 64, 62, 78, 80, 57, 54, 51, 65, 97, 76, 76, 59, 64, 86, 61, 64, 72, 69, 71, 70, 53, 60, 70, 77, 62, 105, 67, 61, 50, 50, 63, 66, 61, 73, 58, 65, 62, 60, 93, 62, 56, 56, 74, 57, 74, 69, 53, 72, 65, 73, 55, 58, 53, 56, 85, 74, 75, 69, 47, 58, 67, 68, 68, 61, 73, 65, 68, 84, 50, 77, 70, 49, 46, 83, 57, 79, 63, 48, 63, 56, 58, 68, 75, 58, 50, 63, 67, 72, 60, 57, 80, 57, 61, 53, 45, 67, 80, 62, 65, 64, 78, 55, 57, 62, 69, 56, 67, 65, 90, 63, 93, 67, 58, 60, 53, 67, 64, 56, 55, 49, 59, 66, 71, 81, 70, 67, 62, 94, 78, 66, 79, 64, 51, 57, 55, 52, 88, 50, 58, 57, 82, 70, 76, 65, 70, 56, 58, 65, 73, 123, 67, 57, 69, 51, 57, 69, 68, 60, 101, 69, 45, 89, 62, 50, 68, 65, 46, 79, 71, 64, 53, 52, 71, 69, 67, 75, 72, 61, 60, 63, 62, 66, 53, 61, 62, 69, 52, 49, 72, 54, 68, 85, 51, 94, 56, 60, 56, 69, 79, 80, 63, 40, 84, 83, 145, 69, 57, 79, 58, 53, 70, 77, 75, 49, 61, 73, 63, 66, 62, 54, 70, 74, 53, 47, 70, 64, 81, 45, 62, 57, 97, 66, 71, 57, 75, 70, 57, 57, 60, 61, 50, 77, 59, 64, 63, 69, 70, 66, 56, 62, 82, 63, 71, 56, 91, 73, 78, 63, 124, 53, 59, 58, 65, 47, 91, 63, 59, 58, 89, 53, 74, 50, 51, 54, 64, 51, 60, 58, 57, 58, 68, 47, 70, 54, 50, 55, 51, 75, 67, 67, 72, 60, 54, 62, 65, 61, 73, 80, 72, 59, 65, 53, 61, 62, 58, 60, 68, 80, 66, 46, 57, 73, 45, 54, 80, 68, 61, 53, 74, 69, 54, 62, 56, 65, 65, 59, 56, 69, 73, 46, 62, 67, 53, 72, 65, 77, 85, 63, 63, 65, 75, 65, 50, 58, 50, 50, 59, 68, 72, 75, 59, 55, 51, 71, 69, 83, 52, 64, 77, 64, 59, 67, 69, 56, 72, 62, 63, 50, 68, 67, 56, 62, 65, 67, 61, 93, 65, 61, 69, 91, 85, 53, 60, 64, 78, 69, 57, 56, 73, 98, 50, 70, 81, 63, 81, 88, 57, 68, 54, 59, 66, 71, 73, 63, 77, 60, 42, 60, 69, 63, 62, 80, 68, 47, 71, 63, 53, 61, 73, 65, 74, 53, 56, 79, 62, 83, 60, 66, 46, 48, 59, 74, 57, 71, 67, 61, 94, 76, 90, 58, 74, 59, 62, 55, 81, 71, 74, 69, 54, 67, 62, 67, 90, 67, 87, 104, 54, 46, 56, 65, 65, 63, 53, 65, 56, 57, 65, 63, 68, 58, 54, 50, 49, 80, 63, 58, 54, 52, 70, 60, 68, 61, 52, 75, 84, 49, 69, 56, 68, 58, 74, 67, 67, 51, 68, 51, 56, 67, 77, 66, 62, 75, 70, 62, 65, 58, 65, 50, 59, 59, 48, 58, 59, 87, 45, 64, 61, 85, 59, 47, 70, 89, 54, 59, 54, 59, 50, 58, 70, 60, 86, 60, 63, 64, 69, 55, 72, 77, 48, 58, 54, 60, 65, 83, 57, 84, 62, 62, 60, 49, 100, 59, 61, 73, 68, 65, 56, 58, 82, 110, 57, 69, 69, 91, 51, 93, 82, 66, 61, 71, 57, 65, 86, 57, 84, 80, 68, 38, 67, 64, 64, 59, 65, 92, 74, 77, 81, 53, 59, 59, 69, 58, 73, 58, 67, 82, 113, 64, 80, 70, 66, 73, 51, 54, 54, 59, 69, 65, 57, 72, 54, 62, 50, 59, 73, 66, 61, 51, 64, 69, 69, 58, 124, 53, 52, 55, 101, 54, 62, 57, 77, 69, 55, 65, 70, 75, 83, 52, 80, 72, 61, 58, 69, 73, 64, 56, 71, 64, 76, 51, 64, 54, 75, 78, 54, 58, 48, 90, 68, 75, 66, 75, 68, 55, 55, 65, 59, 67, 74, 55, 75, 71, 102, 57, 60, 60, 69, 50, 80, 57, 61, 89, 79, 66, 65, 60, 64, 48, 58, 60, 68, 63, 98, 61, 51, 83, 53, 78, 56, 68, 76, 60, 61, 89, 67, 65, 70, 74, 62, 59, 68, 66, 53, 80, 64, 60, 78, 60, 96, 59, 58, 71, 59, 48, 63, 60, 81, 62, 76, 61, 66, 68, 72, 52, 72, 52, 65, 56, 55, 65, 76, 52, 62, 105, 67, 47, 57, 73, 81, 68, 52, 56, 59, 74, 65, 63, 61, 67, 57, 51, 125, 63, 87, 73, 59, 64, 53, 53, 57, 67, 67, 63, 55, 59, 59, 87, 96, 57, 63, 64, 56, 65, 53, 53, 74, 91, 69, 61, 100, 53, 54, 55, 57, 62, 64, 61, 62, 63, 72, 63, 60, 62, 64, 74, 62, 56, 65, 51, 62, 69, 68, 57, 60, 73, 77, 53, 69, 76, 90, 46, 55, 51, 69, 67, 64, 72, 55, 61, 56, 56, 96, 73, 54, 74, 62, 46, 68, 54, 68, 53, 58, 56, 75, 79, 60, 76, 74, 66, 68, 69, 71, 81, 70, 79, 55, 64, 51, 70, 59, 68, 78, 60, 61, 90, 60, 78, 80, 57, 65, 55, 63, 74, 96, 79, 59, 60, 114, 55, 64, 64, 77, 70, 47, 44, 79, 55, 60, 56, 65, 53, 71, 57, 76, 63, 65, 52, 59, 56, 69, 70, 57, 67, 80, 57, 68, 64, 61, 46, 60, 61, 45, 68, 64, 118, 73, 80, 49, 59, 50, 95, 55, 61, 59, 54, 58, 66, 64, 78, 91, 58, 63, 74, 73, 55, 91, 81, 98, 82, 77, 64, 77, 80, 74, 79, 52, 63, 60, 54, 76, 70, 53, 73, 75, 82, 88, 70, 63, 72, 67, 66, 59, 64, 61, 60, 86, 80, 56, 52, 70, 50, 66, 57, 51, 60, 97, 57, 57, 61, 50, 58, 69, 65, 87, 65, 45, 60, 72, 51, 47, 61, 72, 54, 55, 65, 68, 71, 53, 64, 84, 67, 80, 56, 85, 56, 85, 78, 119, 106, 63, 61, 75, 51, 61, 61, 57, 60, 62, 84, 65, 75, 61, 73, 74, 63, 55, 71, 87, 49, 49, 78, 67, 69, 56, 64, 56, 75, 59, 76, 70, 72, 59, 81, 68, 76, 59, 70, 55, 60, 58, 56, 64, 60, 72, 65, 52, 100, 63, 70, 88, 78, 56, 50, 61, 51, 63, 64, 81, 84, 60, 64, 52, 82, 62, 78, 58, 51, 67, 61, 58, 72, 48, 52, 68, 57, 84, 70, 70, 58, 69, 68, 49, 86, 60, 47, 65, 63, 58, 71, 72, 61, 51, 61, 75, 75, 51, 84, 54, 67, 106, 69, 72, 57, 77, 69, 59, 58, 48, 63, 85, 62, 59, 59, 74, 85, 120, 62, 59, 67, 78, 103, 69, 72, 70, 62, 133, 60, 65, 44, 79, 74, 71, 82, 54, 51, 68, 65, 59, 48, 80, 56, 59, 67, 70, 54, 53, 60, 57, 58, 65, 91, 74, 95, 75, 66, 100, 70, 46, 69, 62, 74, 65, 62, 58, 52, 71, 55, 66, 66, 56, 62, 73, 72, 65, 52, 62, 62, 62, 55, 73, 58, 41, 53, 57, 67, 64, 70, 49, 61, 66, 54, 60, 71, 57, 56, 61, 67, 66, 66, 79, 46, 63, 66, 68, 79, 53, 80, 67, 65, 57, 49, 62, 57, 75, 57, 58, 95, 79, 69, 53, 68, 63, 74, 62, 81, 64, 52, 66, 68, 62, 61, 73, 85, 64, 61, 64, 69, 53, 67, 76, 66, 59, 60, 60, 65, 67, 73, 51, 124, 53, 61, 77, 60, 92, 73, 67, 71, 56, 63, 66, 68, 75, 93, 62, 65, 48, 59, 65, 69, 61, 75, 60, 56, 55, 60, 94, 56, 48, 51, 55, 53, 64, 57, 77, 53, 65, 69, 64, 78, 69, 56, 57, 74, 64, 76, 78, 64, 77, 63, 60, 70, 63, 42, 67, 60, 83, 71, 60, 61, 68, 78, 66, 55, 74, 59, 64, 58, 73, 61, 59, 50, 64, 54, 59, 68, 60, 52, 58, 58, 102, 64, 69, 58, 65, 82, 67, 67, 71, 55, 47, 70, 53, 76, 66, 51, 94, 52, 64, 71, 74, 52, 60, 58, 67, 56, 68, 59, 54, 52, 68, 68, 65, 70, 63, 63, 53, 64, 58, 55, 55, 67, 53, 54, 70, 59, 67, 55, 56, 71, 53, 74, 63, 66, 70, 61, 61, 68, 87, 148, 69, 70, 64, 63, 68, 92, 56, 53, 55, 73, 73, 55, 66, 67, 63, 75, 110, 76, 65, 60, 72, 88, 57, 76, 91, 73, 67, 73, 67, 99, 67, 67, 51, 71, 65, 64, 47, 69, 74, 70, 62, 76, 53, 66, 73, 65, 64, 55, 59, 92, 51, 62, 66, 70, 60, 57, 86, 53, 57, 51, 72, 72, 63, 64, 60, 63, 68, 93, 75, 70, 65, 65, 85, 57, 58, 90, 46, 61, 92, 63, 41, 76, 47, 53, 79, 102, 63, 49, 63, 57, 80, 67, 74, 58, 54, 129, 59, 51, 56, 68, 73, 71, 76, 67, 67, 62, 68, 55, 55, 64, 57, 51, 68, 46, 70, 75, 61, 76, 74, 64, 60, 63, 50, 77, 90, 62, 62, 86, 57, 66, 67, 68, 67, 48, 60, 68, 58, 54, 55, 77, 96, 65, 115, 80, 57, 75, 82, 102, 70, 66, 65, 85, 57, 60, 53, 64, 51, 68, 79, 78, 68, 46, 59, 55, 55, 75, 56, 55, 80, 56, 69, 50, 65, 63, 65, 73, 81, 86, 44, 60, 56, 95, 67, 57, 98, 65, 97, 62, 54, 63, 60, 63, 85, 56, 55, 71, 62, 55, 72, 60, 87, 74, 62, 52, 61, 56, 57, 90, 48, 66, 51, 96, 53, 56, 67, 77, 59, 54, 67, 54, 118, 44, 64, 70, 59, 80, 58, 57, 52, 49, 55, 65, 67, 63, 95, 67, 53, 61, 62, 51, 58, 66, 56, 62, 52, 52, 70, 64, 70, 61, 63, 75, 81, 68, 67, 63, 68, 66, 70, 57, 69, 52, 50, 57, 58, 75, 59, 49, 67, 72, 61, 76, 69, 58, 64, 63, 65, 52, 83, 53, 63, 70, 61, 82, 66, 85, 68, 50, 79, 69, 81, 59, 47, 64, 63, 56, 73, 70, 85, 52, 75, 58, 49, 65, 75, 59, 80, 81, 58, 49, 67, 51, 71, 80, 49, 85, 59, 73, 61, 69, 95, 69, 66, 52, 73, 68, 72, 64, 57, 62, 76, 94, 35, 104, 62, 86, 58, 50, 62, 59, 53, 76, 60, 62, 53, 67, 62, 57, 65, 48, 72, 48, 96, 62, 58, 68, 55, 58, 71, 63, 55, 61, 76, 60, 111, 74, 45, 75, 56, 56, 73, 54, 66, 46, 96, 69, 72, 61, 40, 55, 56, 95, 60, 60, 64, 63, 76, 63, 96, 85, 50, 57, 64, 76, 74, 66, 75, 65, 87, 55, 60, 65, 65, 66, 58, 52, 54, 63, 70, 51, 59, 93, 74, 59, 62, 73, 54, 71, 74, 65, 73, 89, 54, 71, 57, 57, 77, 90, 56, 108, 54, 62, 67, 72, 64, 56, 63, 65, 100, 70, 80, 55, 75, 56, 54, 69, 44, 70, 62, 48, 52, 63, 67, 61, 74, 79, 67, 61, 75, 54, 68, 67, 58, 71, 65, 60, 55, 50, 55, 45, 48, 53, 71, 78, 50, 78, 64, 80, 62, 48, 55, 44, 49, 73, 66, 52, 66, 65, 52, 132, 55, 62, 67, 77, 52, 54, 59, 66, 54, 55, 64, 62, 79, 62, 67, 36, 86, 55, 62, 67, 84, 61, 48, 53, 57, 64, 58, 64, 67, 67, 58, 71, 83, 58, 76, 64, 83, 69, 76, 72, 73, 61, 55, 37, 51, 64, 72, 66, 78, 85, 47, 92, 58, 49, 59, 51, 67, 52, 66, 62, 67, 53, 58, 76, 80, 51, 50, 62, 67, 59, 59, 64, 84, 66, 42, 57, 56, 59, 55, 41, 56, 54, 76, 69, 61, 58, 83, 56, 72, 50, 83, 59, 70, 62, 51, 87, 55, 74, 59, 82, 64, 60, 85, 59, 59, 66, 56, 95, 65, 84, 59, 63, 56, 69, 75, 84, 56, 71, 69, 58, 103, 64, 69, 69, 69, 68, 62, 59, 83, 67, 48, 59, 62, 62, 63, 74, 83, 62, 62, 51, 67, 67, 74, 57, 70, 46, 101, 53, 60, 68, 60, 90, 84, 96, 47, 49, 72, 59, 59, 85, 68, 68, 55, 78, 65, 53, 62, 104, 59, 67, 62, 39, 58, 68, 59, 65, 57, 76, 63, 86, 64, 55, 56, 67, 52, 69, 48, 55, 82, 66, 68, 60, 54, 61, 56, 54, 60, 48, 62, 68, 47, 61, 81, 60, 45, 51, 67, 67, 78, 52, 64, 74, 47, 54, 75, 68, 52, 54, 58, 61, 74, 68, 51, 51, 62, 71, 54, 46, 74, 74, 62, 61, 72, 98, 47, 135, 58, 63, 31, 78, 98, 57, 50, 40, 81, 57, 70, 61, 50, 49, 83, 50, 73, 56, 60, 74, 67, 83, 79, 61, 58, 68, 59, 73, 62, 70, 62, 57, 64, 73, 69, 54, 89, 34, 75, 66, 56, 63, 46, 72, 67, 47, 69, 62, 74, 70, 65, 61, 63, 64, 47, 51, 84, 76, 52, 67, 57, 70, 58, 55, 69, 55, 73, 55, 60, 65, 86, 61, 80, 80, 48, 83, 52, 55, 58, 56, 76, 59, 54, 57, 60, 89, 67, 60, 107, 68, 65, 70, 60, 69, 64, 55, 57, 84, 52, 66, 49, 60, 65, 80, 58, 57, 62, 93, 54, 57, 64, 52, 78, 65, 51, 59, 58, 58, 53, 79, 56, 72, 48, 56, 48, 74, 58, 87, 56, 56, 64, 76, 55, 36, 61, 57, 72, 70, 59, 61, 63, 66, 52, 59, 72, 61, 60, 64, 46, 61, 72, 50, 51, 55, 54, 87, 50, 91, 75, 50, 57, 45, 58, 68, 65, 82, 60, 61, 61, 63, 63, 62, 52, 46, 68, 66, 58, 68, 64, 92, 65, 71, 78, 54, 83, 82, 59, 74, 101, 47, 55, 55, 55, 56, 63, 71, 61, 62, 80, 76, 60, 66, 59, 59, 72, 59, 104, 66, 58, 113, 79, 59, 74, 66, 56, 39, 70, 70, 47, 67, 81, 47, 94, 65, 80, 77, 78, 60, 68, 79, 74, 62, 59, 52, 58, 56, 87, 51, 87, 54, 58, 59, 86, 66, 70, 60, 95, 55, 54, 66, 116, 76, 114, 62, 86, 52, 70, 58, 59, 56, 85, 76, 52, 67, 50, 125, 57, 54, 61, 61, 62, 60, 55, 58, 68, 62, 65, 57, 52, 57, 62, 56, 53, 73, 56, 84, 86, 61, 63, 54, 57, 59, 62, 69, 59, 64, 63, 57, 59, 69, 70, 72, 109, 66, 90, 54, 76, 63, 58, 55, 64, 77, 81, 70, 56, 57, 65, 64, 62, 66, 64, 61, 60, 63, 62, 74, 62, 81, 69, 62, 91, 65, 65, 63, 59, 55, 81, 87, 81, 49, 74, 48, 68, 60, 70, 62, 58, 52, 54, 54, 56, 63, 71, 79, 52, 64, 53, 53, 52, 53, 62, 68, 66, 58, 65, 48, 73, 61, 65, 72, 67, 62, 70, 63, 62, 51, 67, 57, 58, 55, 62, 68, 55, 97, 55, 60, 31, 61, 70, 40, 45, 56, 52, 61, 51, 70, 45, 53, 96, 68, 60, 53, 58, 41, 56, 68, 52, 58, 86, 86, 66, 99, 69, 65, 52, 70, 77, 49, 87, 84, 57, 74, 114, 81, 53, 58, 64, 53, 96, 58, 68, 57, 49, 74, 57, 67, 62, 70, 108, 62, 85, 71, 51, 56, 73, 101, 83, 66, 60, 53, 55, 66, 62, 43, 45, 58, 100, 74, 66, 62, 62, 50, 54, 107, 70, 69, 94, 60, 68, 56, 62, 62, 72, 43, 98, 86, 61, 63, 49, 60, 71, 77, 154, 43, 68, 78, 64, 124, 79, 57, 50, 56, 59, 66, 75, 68, 67, 66, 60, 87, 67, 57, 61, 56, 67, 61, 57, 62, 59, 41, 65, 66, 57, 57, 53, 67, 46, 52, 81, 85, 87, 80, 65, 88, 89, 68, 57, 62, 64, 62, 55, 66, 76, 54, 68, 80, 66, 80, 54, 59, 69, 64, 78, 71, 69, 59, 71, 70, 54, 57, 64, 56, 52, 78, 38, 65, 67, 49, 74, 71, 62, 71, 71, 55, 68, 53, 66, 58, 68, 63, 62, 53, 55, 57, 50, 60, 69, 66, 66, 75, 72, 73, 100, 65, 70, 68, 73, 62, 59, 69, 74, 68, 97, 44, 51, 61, 61, 70, 92, 71, 64, 58, 49, 60, 56, 83, 86, 45, 72, 64, 58, 64, 52, 74, 73, 67, 49, 81, 56, 108, 41, 54, 63, 52, 79, 64, 93, 51, 110, 88, 79, 78, 80, 71, 73, 61, 67, 48, 48, 61, 57, 65, 114, 66, 48, 55, 52, 55, 44, 53, 44, 84, 77, 107, 65, 55, 90, 65, 60, 48, 64, 73, 65, 60, 68, 73, 53, 62, 62, 69, 55, 69, 108, 108, 61, 55, 52, 50, 64, 66, 63, 76, 55, 63, 54, 40, 57, 57, 61, 56, 67, 79, 51, 57, 59, 55, 60, 57, 102, 84, 60, 58, 48, 61, 55, 73, 82, 62, 78, 55, 70, 54, 49, 63, 67, 79, 69, 74, 84, 69, 53, 95, 61, 79, 73, 78, 69, 72, 72, 78, 75, 73, 46, 74, 63, 99, 105, 99, 66, 64, 83, 126, 101, 62, 76, 59, 46, 55, 46, 65, 62, 54, 58, 96, 67, 48, 76, 54, 56, 75, 59, 58, 69, 55, 67, 66, 79, 72, 57, 48, 54, 59, 64, 76, 75, 74, 68, 90, 63, 70, 63, 72, 61, 55, 59, 81, 70, 100, 67, 64, 62, 57, 60, 62, 61, 71, 71, 49, 88, 52, 76, 75, 64, 47, 68, 52, 61, 52, 76, 50, 67, 62, 62, 54, 64, 76, 59, 100, 55, 74, 71, 61, 52, 57, 62, 64, 60, 58, 45, 47, 61, 53, 63, 81, 49, 88, 53, 70, 91, 80, 49, 84, 73, 51, 65, 65, 68, 44, 59, 67, 63, 117, 89, 45, 50, 59, 81, 62, 75, 80, 79, 58, 59, 80, 55, 85, 65, 52, 64, 63, 50, 59, 83, 63, 53, 49, 87, 66, 92, 77, 52, 61, 54, 90, 51, 69, 60, 55, 56, 56, 59, 64, 54, 59, 61, 67, 85, 50, 56, 69, 59, 78, 52, 91, 75, 76, 70, 54, 68, 49, 69, 62, 64, 58, 65, 68, 71, 65, 58, 70, 75, 81, 83, 66, 59, 54, 55, 46, 74, 55, 89, 82, 67, 61, 65, 92, 83, 56, 53, 116, 88, 54, 50, 47, 55, 59, 60, 85, 68, 63, 72, 75, 84, 57, 56, 71, 50, 73, 66, 56, 56, 66, 57, 55, 75, 83, 61, 78, 74, 88, 68, 46, 56, 57, 59, 59, 123, 51, 51, 61, 75, 75, 62, 69, 56, 59, 78, 45, 52, 56, 74, 48, 83, 62, 62, 69, 67, 63, 54, 63, 65, 65, 84, 49, 57, 41, 80, 63, 70, 61, 54, 79, 70, 53, 75, 69, 107, 71, 83, 50, 58, 80, 73, 57, 98, 80, 69, 67, 34, 73, 71, 105, 79, 52, 78, 60, 62, 60, 56, 62, 66, 54, 49, 55, 58, 68, 76, 61, 72, 67, 69, 59, 73, 63, 45, 73, 91, 64, 44, 57, 84, 49, 58, 71, 57, 56, 66, 54, 76, 75, 109, 63, 74, 78, 49, 80, 69, 57, 78, 73, 72, 69, 53, 101, 46, 75, 78, 80, 67, 58, 63, 73, 67, 68, 68, 75, 76, 76, 55, 54, 62, 71, 54, 63, 67, 70, 74, 85, 64, 50, 64, 61, 46, 61, 61, 78, 51, 76, 49, 63, 51, 60, 52, 43, 71, 64, 61, 73, 69, 67, 59, 63, 68, 55, 40, 61, 90, 53, 60, 66, 94, 82, 53, 71, 47, 113, 63, 54, 47, 73, 47, 76, 57, 81, 54, 50, 77, 69, 79, 55, 91, 80, 76, 62, 66, 76, 56, 107, 83, 54, 57, 72, 37, 61, 74, 47, 45, 68, 57, 86, 93, 108, 64, 52, 102, 65, 49, 88, 63, 108, 67, 58, 67, 62, 57, 56, 50, 70, 71, 102, 56, 75, 64, 48, 66, 67, 69, 67, 66, 51, 77, 66, 71, 86, 58, 53, 85, 86, 51, 64, 67, 53, 35, 46, 63, 57, 57, 53, 66, 69, 79, 58, 112, 56, 67, 59, 57, 61, 52, 77, 84, 57, 60, 85, 65, 66, 66, 53, 53, 55, 83, 66, 74, 70, 60, 60, 41, 47, 66, 80, 80, 56, 57, 72, 72, 59, 72, 67, 75, 62, 90, 61, 66, 63, 50, 58, 74, 66, 90, 80, 73, 78, 52, 72, 64, 68, 53, 61, 48, 63, 57, 57, 71, 56, 68, 89, 54, 55, 76, 76, 57, 79, 100, 50, 74, 65, 72, 81, 58, 77, 67, 74, 69, 69, 30, 66, 68, 61, 86, 77, 62, 116, 73, 63, 68, 64, 67, 112, 69, 66, 65, 63, 68, 63, 71, 63, 69, 70, 74, 92, 47, 66, 65, 60, 50, 52, 70, 76, 61, 60, 59, 54, 77, 61, 64, 55, 67, 63, 60, 58, 76, 91, 58, 59, 77, 76, 60, 62, 63, 57, 56, 86, 69, 64, 57, 60, 58, 59, 69, 79, 69, 66, 57, 68, 53, 72, 59, 62, 62, 56, 59, 82, 58, 77, 62, 64, 67, 87, 62, 67, 52, 88, 53, 85, 56, 68, 82, 77, 59, 69, 67, 69, 74, 63, 73, 62, 56, 80, 63, 74, 71, 67, 77, 58, 87, 62, 69, 64, 64, 64, 89, 59, 54, 61, 60, 49, 57, 66, 57, 53, 72, 60, 57, 67, 61, 73, 98, 71, 65, 46, 61, 60, 81, 63, 81, 74, 59, 53, 66, 104, 59, 56, 113, 66, 147, 69, 70, 68, 55, 64, 57, 71, 66, 78, 54, 67, 73, 52, 60, 74, 69, 65, 54, 55, 99, 89, 82, 74, 60, 55, 75, 65, 56, 69, 72, 76, 64, 70, 67, 69, 58, 60, 67, 66, 67, 65, 70, 63, 64, 66, 53, 92, 65, 73, 55, 63, 51, 84, 71, 74, 63, 46, 77, 65, 62, 45, 71, 58, 57, 48, 67, 78, 71, 44, 72, 67, 56, 80, 61, 51, 65, 65, 115, 103, 69, 86, 60, 71, 71, 66, 73, 70, 55, 70, 71, 68, 67, 54, 56, 59, 78, 70, 64, 67, 65, 63, 84, 65, 50, 73, 73, 73, 55, 59, 74, 73, 63, 68, 78, 63, 85, 66, 52, 57, 62, 59, 56, 62, 54, 59, 62, 70, 80, 54, 61, 64, 101, 60, 67, 71, 70, 55, 60, 66, 53, 68, 48, 80, 44, 54, 48, 63, 58, 73, 78, 53, 70, 74, 76, 71, 58, 56, 61, 76, 64, 55, 69, 64, 49, 50, 58, 50, 42, 62, 63, 63, 74, 62, 66, 49, 63, 49, 81, 63, 81, 59, 59, 60, 60, 79, 80, 55, 90, 65, 62, 104, 60, 57, 66, 62, 67, 51, 54, 76, 62, 61, 91, 69, 65, 65, 62, 47, 67, 57, 99, 90, 64, 59, 59, 78, 62, 61, 64, 61, 72, 73, 90, 58, 71, 46, 66, 58, 70, 73, 56, 64, 82, 50, 48, 62, 58, 93, 84, 64, 58, 53, 50, 47, 80, 65, 64, 64, 71, 55, 88, 39, 65, 51, 74, 86, 64, 71, 54, 68, 70, 66, 71, 61, 59, 65, 61, 77, 70, 74, 95, 63, 50, 56, 36, 52, 54, 65, 72, 90, 52, 76, 60, 61, 63, 61, 59, 101, 61, 62, 69, 61, 51, 67, 76, 42, 74, 75, 81, 63, 163, 65, 64, 71, 64, 64, 76, 68, 60, 71, 58, 72, 66, 54, 75, 75, 57, 76, 56, 93, 59, 59, 70, 59, 59, 70, 68, 68, 64, 66, 73, 57, 57, 61, 75, 60, 71, 46, 61, 56, 73, 70, 75, 71, 65, 71, 64, 120, 67, 68, 69, 61, 60, 72, 70, 65, 58, 74, 83, 69, 63, 68, 68, 47, 60, 85, 63, 58, 48, 57, 83, 62, 58, 80, 58, 59, 69, 66, 57, 43, 60, 62, 70, 74, 79, 93, 80, 71, 56, 66, 61, 56, 64, 53, 75, 55, 71, 82, 63, 146, 65, 56, 71, 49, 69, 53, 65, 103, 55, 69, 66, 69, 48, 59, 69, 51, 51, 56, 68, 47, 55, 41, 61, 69, 73, 79, 58, 67, 74, 37, 63, 68, 60, 58, 60, 63, 51, 61, 62, 61, 59, 59, 64, 77, 70, 96, 58, 60, 56, 62, 65, 61, 77, 89, 77, 61, 65, 69, 61, 61, 81, 58, 61, 47, 58, 60, 85, 55, 57, 48, 50, 64, 70, 47, 63, 73, 69, 52, 53, 64, 46, 66, 60, 70, 63, 63, 65, 66, 55, 66, 62, 53, 67, 63, 129, 56, 73, 58, 61, 73, 61, 85, 59, 52, 61, 71, 54, 64, 89, 52, 54, 63, 64, 53, 69, 64, 59, 62, 67, 63, 65, 73, 50, 65, 54, 75, 65, 51, 58, 57, 56, 85, 59, 77, 64, 63, 74, 58, 53, 66, 57, 68, 68, 62, 69, 57, 58, 55, 69, 63, 61, 62, 64, 64, 70, 53, 60, 63, 82, 63, 70, 65, 71, 68, 63, 74, 63, 82, 61, 63, 69, 66, 47, 59, 58, 67, 68, 46, 72, 58, 74, 52, 72, 98, 55, 60, 70, 44, 59, 65, 75, 80, 75, 62, 58, 80, 63, 59, 63, 60, 57, 59, 86, 68, 65, 80, 84, 81, 67, 61, 70, 60, 62, 69, 66, 67, 67, 55, 60, 65, 60, 73, 52, 60, 78, 62, 67, 78, 51, 71, 77, 69, 48, 67, 72, 69, 54, 56, 79, 61, 61, 54, 62, 66, 67, 60, 72, 49, 68, 52, 68, 70, 75, 60, 58, 56, 81, 87, 63, 71, 69, 64, 88, 74, 61, 61, 74, 77, 63, 90, 57, 59, 100, 77, 53, 56, 68, 64, 64, 54, 77, 72, 68, 66, 52, 53, 95, 59, 64, 56, 54, 62, 67, 57, 60, 63, 60, 58, 62, 57, 59, 69, 75, 67, 55, 47, 45, 81, 58, 63, 89, 55, 49, 56, 60, 58, 82, 74, 58, 80, 54, 47, 60, 74, 61, 52, 73, 52, 54, 52, 63, 67, 51, 56, 67, 62, 58, 62, 54, 71, 81, 74, 73, 87, 64, 58, 59, 73, 88, 97, 69, 64, 73, 63, 64, 70, 62, 77, 71, 70, 53, 65, 58, 100, 54, 71, 60, 71, 54, 61, 50, 56, 90, 60, 59, 59, 65, 82, 53, 56, 58, 62, 78, 79, 56, 71, 60, 53, 66, 78, 100, 61, 58, 76, 65, 70, 55, 76, 66, 70, 61, 60, 81, 64, 60, 80, 59, 64, 60, 73, 52, 60, 56, 64, 62, 74, 62, 68, 59, 64, 62, 55, 65, 60, 66, 72, 58, 65, 56, 53, 59, 65, 61, 56, 66, 53, 67, 50, 80, 70, 80, 69, 69, 49, 63, 71, 56, 85, 60, 93, 64, 54, 62, 58, 65, 50, 64, 75, 71, 82, 59, 65, 62, 78, 54, 62, 61, 68, 63, 60, 61, 53, 62, 60, 72, 60, 74, 59, 101, 71, 59, 65, 65, 75, 44, 68, 71, 90, 64, 64, 66, 87, 55, 69, 71, 48, 58, 69, 61, 63, 80, 70, 58, 59, 53, 46, 78, 57, 55, 50, 68, 64, 60, 76, 54, 78, 58, 47, 63, 62, 62, 113, 65, 59, 67, 40, 46, 76, 53, 75, 80, 80, 62, 61, 80, 54, 56, 58, 60, 58, 56, 69, 42, 61, 53, 60, 70, 63, 72, 68, 49, 66, 61, 52, 79, 59, 75, 94, 45, 82, 70, 63, 44, 89, 44, 42, 77, 51, 65, 75, 68, 74, 76, 101, 68, 74, 93, 75, 68, 77, 53, 69, 55, 43, 76, 69, 68, 53, 58, 69, 59, 71, 63, 60, 68, 70, 57, 48, 65, 57, 59, 47, 68, 53, 56, 83, 83, 87, 59, 45, 67, 67, 92, 82, 61, 66, 69, 64, 59, 81, 52, 62, 116, 78, 138, 49, 49, 84, 59, 79, 51, 65, 63, 65, 68, 52, 63, 68, 49, 71, 69, 57, 35, 56, 93, 66, 76, 62, 55, 49, 64, 66, 67, 51, 76, 62, 59, 57, 79, 75, 61, 62, 64, 56, 62, 70, 94, 63, 72, 65, 53, 88, 44, 58, 60, 52, 79, 61, 50, 71, 53, 53, 55, 57, 62, 53, 76, 53, 53, 74, 72, 46, 50, 61, 55, 64, 87, 70, 70, 76, 52, 79, 110, 68, 74, 64, 77, 64, 75, 56, 59, 68, 53, 55, 57, 56, 52, 64, 75, 50, 47, 54, 54, 59, 73, 61, 69, 78, 46, 73, 57, 64, 56, 53, 61, 60, 44, 52, 65, 64, 72, 66, 82, 47, 54, 52, 57, 57, 77, 61, 61, 67, 53, 78, 60, 46, 82, 51, 67, 44, 69, 68, 85, 57, 77, 81, 44, 78, 52, 65, 53, 63, 59, 45, 87, 53, 71, 69, 74, 72, 91, 76, 68, 66, 62, 51, 62, 57, 42, 43, 60, 67, 52, 58, 47, 58, 62, 56, 65, 68, 67, 58, 73, 92, 70, 56, 59, 55, 55, 47, 59, 66, 71, 54, 77, 91, 55, 64, 71, 56, 62, 76, 54, 56, 54, 56, 74, 75, 67, 68, 65, 52, 48, 84, 97, 117, 61, 68, 69, 69, 76, 69, 54, 66, 84, 65, 63, 58, 67, 59, 60, 55, 75, 61, 64, 77, 70, 97, 72, 51, 51, 73, 71, 59, 65, 62, 63, 67, 60, 59, 71, 57, 45, 66, 76, 54, 66, 64, 64, 71, 66, 60, 44, 68, 60, 57, 76, 54, 72, 58, 57, 67, 69, 66, 68, 64, 47, 70, 103, 50, 74, 74, 74, 75, 63, 84, 60, 94, 63, 69, 72, 108, 61, 74, 52, 55, 71, 71, 76, 86, 68, 92, 64, 65, 170, 76, 58, 88, 52, 49, 50, 56, 65, 55, 56, 48, 90, 72, 65, 72, 58, 70, 66, 70, 60, 58, 63, 79, 56, 65, 71, 47, 65, 109, 84, 65, 65, 68, 77, 72, 55, 72, 54, 69, 48, 59, 52, 60, 66, 76, 67, 106, 59, 87, 61, 53, 47, 61, 56, 63, 46, 83, 60, 63, 82, 74, 62, 44, 59, 64, 74, 49, 85, 69, 78, 84, 49, 71, 55, 60, 45, 75, 71, 74, 78, 66, 46, 73, 70, 86, 75, 60, 46, 58, 61, 50, 55, 54, 49, 53, 65, 85, 94, 139, 72, 76, 69, 51, 53, 52, 48, 86, 40, 69, 75, 63, 61, 62, 45, 74, 76, 43, 62, 65, 73, 67, 64, 60, 68, 76, 60, 57, 71, 62, 91, 62, 68, 51, 69, 92, 62, 55, 59, 67, 54, 61, 73, 62, 84, 62, 69, 71, 65, 69, 56, 79, 62, 59, 103, 72, 57, 68, 69, 62, 80, 63, 71, 51, 61, 61, 53, 52, 57, 63, 82, 69, 69, 56, 59, 74, 75, 61, 77, 44, 68, 72, 78, 63, 51, 68, 67, 50, 62, 72, 67, 60, 69, 55, 135, 51, 53, 72, 53, 91, 76, 78, 79, 50, 74, 61, 92, 64, 59, 61, 61, 55, 62, 61, 50, 81, 59, 60, 52, 65, 59, 58, 55, 60, 60, 87, 64, 62, 66, 82, 78, 73, 55, 69, 69, 54, 52, 68, 82, 71, 60, 50, 63, 96, 74, 59, 96, 47, 51, 77, 61, 62, 48, 69, 74, 70, 61, 57, 72, 76, 68, 71, 56, 81, 56, 71, 66, 87, 54, 77, 74, 72, 58, 61, 62, 63, 49, 90, 68, 73, 66, 75, 69, 76, 62, 45, 57, 81, 52, 56, 66, 69, 84, 54, 67, 71, 65, 65, 71, 73, 90, 51, 61, 58, 71, 83, 78, 48, 62, 85, 67, 61, 58, 48, 61, 59, 71, 37, 75, 92, 62, 81, 63, 64, 65, 60, 65, 78, 65, 65, 62, 53, 69, 56, 67, 56, 55, 76, 66, 57, 67, 57, 61, 36, 54, 59, 66, 57, 73, 64, 66, 75, 79, 64, 85, 55, 93, 81, 41, 77, 62, 48, 75, 45, 75, 71, 41, 87, 77, 97, 75, 53, 113, 56, 73, 60, 57, 65, 81, 67, 72, 74, 64, 59, 71, 75, 66, 71, 66, 70, 60, 68, 59, 62, 67, 57, 70, 57, 58, 71, 60, 69, 53, 53, 78, 55, 58, 76, 54, 87, 88, 71, 50, 103, 58, 84, 50, 54, 81, 66, 79, 70, 63, 65, 51, 64, 57, 74, 82, 60, 74, 76, 63, 62, 59, 78, 60, 51, 53, 58, 70, 89, 64, 70, 41, 57, 57, 65, 58, 87, 54, 58, 56, 58, 54, 50, 63, 59, 69, 68, 78, 89, 49, 82, 65, 46, 58, 68, 59, 67, 78, 60, 97, 62, 51, 60, 51, 66, 79, 55, 64, 55, 96, 70, 55, 75, 53, 76, 62, 63, 89, 57, 74, 99, 71, 76, 69, 73, 53, 73, 63, 63, 67, 68, 70, 72, 53, 52, 73, 51, 60, 61, 63, 45, 56, 64, 63, 62, 61, 58, 67, 67, 56, 63, 74, 61, 74, 60, 64, 53, 72, 61, 60, 63, 69, 59, 85, 52, 89, 87, 91, 50, 72, 70, 54, 77, 54, 56, 62, 79, 69, 58, 78, 62, 57, 55, 47, 59, 71, 76, 74, 58, 52, 67, 67, 57, 52, 77, 64, 70, 59, 45, 64, 78, 61, 51, 74, 63, 91, 62, 63, 76, 64, 78, 68, 66, 65, 113, 76, 66, 72, 59, 58, 64, 80, 67, 67, 71, 76, 81, 52, 74, 52, 79, 67, 74, 51, 61, 63, 50, 56, 67, 67, 50, 58, 85, 63, 55, 57, 83, 65, 74, 58, 52, 55, 62, 78, 68, 53, 63, 74, 78, 62, 60, 52, 68, 65, 71, 70, 77, 68, 73, 61, 57, 62, 66, 55, 68, 75, 79, 65, 46, 65, 82, 73, 53, 77, 75, 57, 77, 50, 68, 39, 92, 62, 54, 76, 66, 66, 65, 52, 57, 77, 68, 74, 84, 62, 84, 63, 55, 50, 62, 84, 54, 80, 65, 44, 64, 66, 54, 71, 69, 51, 67, 61, 63, 65, 60, 58, 52, 61, 59, 51, 61, 77, 76, 80, 65, 53, 47, 54, 75, 82, 76, 80, 69, 71, 78, 60, 80, 47, 49, 118, 64, 142, 71, 62, 72, 68, 69, 70, 67, 59, 63, 60, 68, 91, 54, 57, 70, 79, 63, 70, 59, 100, 78, 58, 64, 64, 85, 89, 56, 57, 48, 71, 81, 64, 71, 68, 68, 58, 56, 49, 50, 76, 61, 84, 62, 64, 71, 64, 94, 53, 70, 55, 45, 71, 58, 75, 77, 63, 67, 74, 64, 76, 47, 70, 56, 69, 82, 63, 53, 75, 34, 55, 61, 55, 60, 55, 77, 56, 59, 120, 81, 54, 72, 61, 66, 57, 58, 64, 47, 49, 64, 46, 63, 68, 61, 64, 64, 69, 61, 60, 66, 62, 65, 45, 65, 53, 60, 76, 69, 61, 71, 68, 59, 58, 62, 88, 85, 73, 49, 61, 61, 50, 61, 55, 54, 66, 61, 65, 64, 68, 74, 60, 60, 87, 51, 63, 70, 76, 68, 73, 59, 53, 80, 53, 71, 71, 58, 56, 68, 81, 49, 95, 46, 68, 64, 84, 74, 83, 82, 64, 83, 57, 72, 75, 48, 72, 68, 65, 85, 62, 63, 54, 77, 74, 60, 49, 63, 69, 50, 86, 60, 65, 50, 73, 56, 65, 71, 72, 57, 87, 70, 69, 103, 53, 62, 67, 48, 80, 83, 65, 79, 67, 56, 79, 55, 51, 64, 70, 55, 67, 60, 102, 95, 69, 44, 57, 63, 77, 64, 60, 54, 79, 58, 83, 66, 67, 65, 62, 55, 82, 64, 66, 59, 92, 84, 66, 56, 80, 68, 66, 56, 51, 59, 49, 66, 50, 61, 61, 72, 59, 55, 94, 57, 52, 58, 64, 85, 54, 63, 78, 62, 56, 47, 62, 53, 106, 47, 43, 61, 58, 49, 96, 51, 67, 61, 112, 55, 54, 69, 71, 81, 66, 69, 62, 65, 68, 75, 63, 108, 64, 56, 63, 64, 61, 61, 61, 85, 78, 82, 66, 65, 176, 54, 49, 87, 67, 59, 61, 64, 54, 63, 56, 58, 74, 57, 64, 84, 55, 60, 66, 80, 56, 51, 58, 58, 56, 63, 67, 58, 47, 86, 73, 55, 47, 46, 53, 76, 58, 66, 54, 74, 74, 54, 62, 63, 65, 76, 58, 118, 54, 69, 66, 54, 53, 66, 65, 62, 63, 67, 86, 64, 60, 76, 58, 51, 67, 83, 49, 79, 64, 63, 78, 60, 72, 75, 59, 52, 53, 56, 57, 81, 80, 61, 60, 51, 69, 93, 68, 77, 73, 72, 50, 48, 51, 59, 71, 43, 73, 91, 86, 148, 74, 56, 57, 48, 67, 75, 63, 89, 64, 61, 73, 65, 66, 62, 55, 81, 81, 55, 80, 57, 57, 75, 62, 47, 65, 75, 54, 73, 73, 80, 50, 31, 51, 62, 56, 77, 64, 51, 66, 74, 53, 54, 64, 86, 54, 79, 54, 56, 71, 62, 63, 57, 79, 77, 72, 72, 57, 60, 73, 53, 83, 52, 62, 69, 52, 58, 64, 66, 59, 77, 56, 79, 50, 54, 67, 80, 77, 67, 69, 75, 66, 87, 71, 68, 63, 63, 67, 67, 48, 61, 76, 65, 58, 59, 139, 75, 77, 59, 56, 80, 86, 60, 57, 57, 78, 58, 66, 54, 95, 74, 62, 57, 64, 53, 64, 67, 55, 55, 53, 57, 44, 45, 65, 67, 63, 68, 60, 76, 49, 76, 82, 81, 53, 66, 67, 55, 68, 64, 51, 67, 59, 49, 75, 66, 81, 61, 45, 50, 67, 58, 72, 72, 64, 66, 72, 67, 59, 67, 71, 53, 53, 67, 67, 83, 71, 55, 57, 85, 63, 48, 72, 58, 59, 58, 45, 66, 62, 100, 70, 65, 63, 71, 63, 80, 52, 51, 76, 76, 66, 62, 67, 76, 94, 55, 61, 72, 51, 62, 75, 58, 54, 65, 76, 63, 70, 79, 75, 60, 69, 57, 53, 55, 64, 62, 77, 53, 52, 57, 60, 84, 77, 79, 52, 46, 83, 49, 75, 67, 58, 59, 65, 73, 70, 60, 66, 81, 54, 66, 58, 58, 59, 56, 55, 69, 56, 63, 65, 55, 59, 66, 67, 70, 63, 62, 75, 64, 95, 86, 60, 72, 62, 67, 83, 67, 63, 82, 69, 70, 68, 90, 63, 69, 112, 67, 56, 53, 63, 63, 61, 72, 58, 67, 73, 58, 55, 53, 86, 63, 56, 45, 64, 74, 64, 59, 53, 61, 63, 64, 56, 54, 47, 79, 82, 65, 57, 56, 65, 71, 54, 66, 80, 37, 53, 61, 59, 74, 43, 57, 62, 53, 66, 68, 59, 60, 54, 53, 54, 77, 69, 55, 66, 49, 55, 81, 74, 56, 56, 53, 79, 66, 74, 60, 69, 74, 60, 63, 66, 72, 71, 84, 53, 61, 65, 58, 62, 71, 70, 64, 64, 60, 58, 74, 66, 80, 78, 74, 73, 74, 65, 59, 61, 59, 97, 58, 59, 69, 47, 72, 58, 62, 67, 59, 104, 62, 59, 71, 53, 59, 61, 81, 95, 62, 66, 83, 75, 72, 60, 50, 52, 80, 48, 61, 83, 65, 70, 78, 57, 57, 65, 67, 58, 62, 62, 63, 55, 49, 51, 65, 70, 58, 74, 76, 47, 65, 55, 64, 57, 53, 50, 74, 56, 72, 56, 58, 61, 69, 75, 58, 77, 42, 87, 58, 65, 46, 71, 68, 62, 70, 69, 90, 64, 59, 76, 60, 57, 61, 53, 71, 60, 79, 79, 69, 69, 55, 60, 62, 70, 74, 58, 63, 67, 50, 55, 81, 72, 52, 63, 77, 101, 59, 54, 70, 73, 62, 90, 62, 58, 67, 47, 54, 53, 91, 68, 52, 57, 65, 60, 66, 44, 54, 58, 70, 83, 101, 68, 62, 64, 46, 51, 80, 59, 66, 55, 57, 72, 47, 57, 70, 73, 84, 62, 89, 48, 44, 69, 55, 60, 57, 55, 75, 64, 85, 47, 55, 63, 67, 54, 63, 58, 80, 65, 55, 70, 70, 60, 90, 86, 68, 75, 72, 67, 65, 67, 60, 63, 54, 79, 115, 64, 58, 74, 67, 66, 100, 67, 54, 70, 65, 60, 64, 70, 75, 77, 79, 45, 51, 68, 78, 58, 64, 79, 66, 66, 72, 72, 98, 75, 56, 102, 73, 66, 52, 63, 67, 68, 81, 69, 75, 64, 113, 95, 59, 71, 50, 58, 96, 67, 54, 61, 48, 65, 58, 52, 48, 50, 52, 55, 54, 59, 65, 48, 63, 116, 71, 144, 65, 63, 58, 49, 63, 44, 66, 76, 63, 67, 66, 60, 47, 53, 63, 70, 57, 69, 74, 74, 64, 70, 52, 53, 60, 61, 59, 54, 62, 63, 54, 80, 82, 88, 62, 81, 54, 53, 84, 50, 65, 79, 58, 62, 44, 62, 36, 53, 61, 65, 70, 69, 84, 65, 80, 72, 57, 62, 69, 65, 53, 61, 65, 57, 64, 63, 66, 55, 119, 66, 74, 75, 76, 56, 63, 61, 69, 114, 97, 64, 73, 74, 57, 60, 61, 57, 62, 64, 79, 59, 58, 54, 59, 56, 61, 67, 57, 55, 58, 71, 92, 56, 59, 65, 58, 57, 72, 66, 71, 73, 57, 61, 56, 81, 69, 72, 73, 61, 65, 62, 64, 65, 70, 63, 51, 71, 56, 54, 67, 69, 64, 89, 60, 64, 65, 84, 50, 59, 75, 82, 62, 78, 84, 57, 65, 81, 59, 50, 60, 94, 60, 77, 58, 73, 62, 65, 51, 55, 61, 80, 65, 47, 64, 66, 39, 67, 45, 70, 51, 65, 76, 57, 57, 62, 57, 54, 61, 73, 71, 56, 61, 51, 74, 66, 46, 67, 52, 98, 60, 72, 64, 66, 60, 92, 80, 63, 51, 62, 56, 58, 64, 80, 77, 66, 79, 65, 50, 59, 61, 96, 97, 71, 65, 73, 54, 52, 72, 55, 50, 58, 54, 74, 68, 55, 62, 57, 49, 87, 54, 61, 68, 102, 76, 55, 52, 61, 55, 71, 57, 78, 51, 76, 62, 73, 61, 68, 76, 84, 60, 89, 63, 66, 48, 56, 42, 59, 63, 67, 65, 64, 51, 90, 60, 61, 63, 80, 52, 99, 59, 63, 62, 64, 73, 62, 77, 74, 76, 88, 81, 58, 89, 56, 57, 53, 77, 66, 117, 66, 47, 73, 59, 52, 82, 62, 52, 69, 72, 115, 52, 145, 75, 49, 95, 64, 50, 67, 53, 52, 53, 46, 71, 74, 47, 49, 63, 50, 51, 67, 56, 62, 59, 53, 63, 79, 64, 56, 60, 65, 68, 102, 61, 60, 59, 58, 68, 68, 80, 60, 55, 74, 71, 55, 71, 50, 65, 64, 118, 70, 69, 66, 60, 65, 60, 67, 62, 50, 52, 62, 58, 50, 72, 55, 52, 73, 57, 40, 58, 77, 51, 65, 66, 69, 62, 61, 69, 61, 80, 64, 68, 68, 61, 78, 56, 55, 116, 58, 65, 67, 60, 71, 67, 66, 62, 52, 52, 59, 55, 110, 91, 57, 71, 69, 67, 64, 82, 68, 47, 57, 66, 57, 85, 48, 43, 65, 69, 58, 59, 80, 83, 67, 69, 61, 65, 51, 78, 66, 60, 67, 62, 41, 65, 73, 70, 69, 49, 56, 50, 68, 65, 68, 65, 67, 62, 67, 42, 58, 71, 67, 71, 56, 43, 67, 69, 111, 78, 62, 53, 65, 62, 72, 56, 67, 58, 55, 73, 60, 59, 56, 63, 81, 66, 70, 60, 88, 56, 61, 60, 77, 60, 74, 58, 88, 74, 52, 70, 56, 65, 75, 52, 68, 71, 69, 59, 132, 50, 49, 56, 67, 63, 73, 93, 69, 63, 58, 57, 75, 54, 47, 64, 74, 44, 68, 72, 59, 58, 70, 69, 71, 62, 69, 57, 60, 55, 52, 79, 64, 83, 56, 67, 70, 71, 55, 57, 70, 64, 70, 80, 70, 73, 83, 75, 77, 84, 91, 58, 48, 60, 55, 82, 72, 51, 47, 66, 77, 37, 61, 58, 58, 69, 61, 56, 53, 60, 63, 76, 98, 61, 67, 60, 54, 57, 59, 68, 78, 52, 61, 81, 77, 59, 62, 46, 45, 64, 52, 55, 68, 53, 78, 65, 51, 68, 75, 51, 62, 75, 54, 61, 61, 64, 80, 95, 74, 47, 52, 78, 57, 62, 70, 60, 63, 83, 68, 60, 66, 86, 79, 70, 56, 58, 86, 74, 86, 74, 53, 73, 77, 80, 75, 60, 59, 73, 54, 55, 59, 37, 63, 60, 45, 64, 67, 74, 55, 63, 57, 65, 63, 66, 64, 53, 71, 70, 79, 49, 53, 64, 86, 86, 65, 72, 69, 53, 84, 72, 55, 52, 70, 72, 62, 107, 94, 56, 74, 72, 64, 71, 79, 65, 64, 62, 59, 56, 57, 61, 85, 64, 99, 49, 64, 68, 59, 69, 64, 57, 73, 63, 55, 64, 62, 58, 77, 61, 54, 66, 56, 82, 76, 66, 87, 67, 70, 76, 62, 78, 66, 70, 65, 67, 62, 70, 68, 85, 59, 51, 44, 71, 57, 62, 68, 68, 70, 60, 110, 53, 71, 45, 42, 61, 63, 58, 70, 103, 63, 70, 69, 47, 65, 55, 64, 78, 63, 66, 65, 52, 54, 60, 72, 72, 54, 78, 65, 78, 60, 57, 84, 59, 57, 69, 63, 60, 51, 84, 79, 63, 59, 65, 60, 62, 72, 69, 48, 58, 70, 53, 67, 65, 56, 69, 74, 60, 91, 49, 71, 66, 72, 76, 51, 68, 67, 70, 68, 55, 56, 66, 52, 60, 57, 50, 57, 52, 55, 57, 73, 58, 62, 70, 71, 63, 56, 63, 86, 62, 79, 54, 49, 69, 76, 77, 75, 71, 71, 69, 54, 57, 66, 53, 93, 52, 91, 61, 91, 48, 53, 66, 86, 57, 70, 58, 80, 111, 55, 51, 72, 54, 56, 78, 48, 52, 66, 65, 65, 56, 64, 56, 52, 55, 54, 80, 64, 97, 61, 84, 74, 50, 49, 55, 68, 61, 82, 59, 67, 81, 71, 48, 67, 78, 77, 70, 65, 67, 55, 45, 63, 91, 65, 76, 81, 51, 66, 88, 63, 62, 77, 50, 60, 59, 63, 51, 70, 57, 59, 75, 70, 39, 76, 65, 60, 55, 58, 83, 79, 100, 63, 59, 86, 56, 65, 72, 55, 64, 69, 57, 57, 62, 81, 67, 49, 63, 61, 93, 68, 56, 59, 63, 63, 88, 76, 57, 55, 43, 57, 75, 83, 52, 92, 58, 66, 67, 57, 58, 69, 58, 64, 60, 76, 60, 57, 57, 68, 71, 59, 75, 82, 56, 57, 68, 58, 69, 61, 69, 58, 82, 98, 53, 63, 59, 58, 48, 50, 70, 63, 67, 81, 50, 56, 60, 62, 64, 64, 66, 73, 81, 78, 62, 69, 89, 83, 62, 65, 56, 63, 54, 66, 63, 56, 83, 61, 61, 57, 70, 68, 72, 134, 73, 120, 52, 58, 64, 85, 76, 63, 80, 99, 65, 58, 65, 62, 47, 59, 40, 67, 65, 73, 87, 60, 53, 111, 65, 75, 70, 61, 68, 61, 62, 74, 48, 66, 64, 75, 64, 43, 58, 71, 70, 49, 82, 86, 52, 52, 35, 65, 66, 51, 63, 55, 44, 60, 53, 58, 77, 61, 64, 70, 92, 71, 70, 50, 54, 76, 59, 68, 71, 66, 39, 58, 54, 61, 71, 58, 69, 78, 74, 94, 76, 76, 63, 69, 69, 80, 60, 63, 60, 69, 57, 47, 64, 58, 64, 64, 96, 70, 60, 64, 80, 70, 51, 57, 65, 51, 67, 56, 68, 54, 56, 74, 91, 51, 70, 38, 80, 68, 78, 66, 63, 50, 59, 51, 62, 80, 70, 58, 65, 57, 58, 68, 60, 89, 55, 74, 68, 55, 64, 81, 92, 66, 65, 73, 78, 60, 54, 56, 54, 72, 63, 98, 84, 88, 66, 52, 70, 58, 60, 43, 56, 75, 84, 59, 72, 54, 49, 64, 71, 83, 62, 54, 59, 101, 61, 68, 61, 71, 59, 76, 68, 54, 59, 59, 56, 65, 74, 49, 72, 85, 68, 84, 70, 66, 59, 52, 71, 72, 65, 73, 67, 63, 57, 66, 81, 62, 58, 57, 42, 69, 93, 85, 107, 63, 92, 63, 63, 48, 65, 59, 68, 89, 49, 60, 57, 48, 62, 85, 72, 84, 71, 55, 63, 108, 81, 68, 61, 53, 61, 52, 43, 59, 73, 63, 57, 66, 67, 65, 60, 65, 56, 80, 83, 64, 62, 63, 81, 63, 67, 77, 82, 68, 47, 63, 63, 61, 72, 66, 78, 91, 42, 63, 52, 61, 83, 133, 47, 56, 47, 94, 75, 65, 76, 49, 54, 68, 64, 63, 74, 66, 60, 62, 98, 67, 81, 58, 74, 48, 66, 72, 59, 65, 67, 65, 50, 47, 61, 66, 36, 46, 70, 63, 46, 89, 66, 73, 66, 69, 55, 72, 64, 75, 59, 61, 108, 53, 62, 64, 67, 67, 110, 86, 58, 63, 61, 95, 72, 62, 68, 81, 62, 80, 62, 82, 55, 62, 69, 93, 90, 77, 106, 58, 57, 64, 64, 68, 60, 66, 76, 82, 55, 73, 60, 60, 62, 86, 56, 70, 58, 68, 54, 83, 97, 63, 69, 55, 93, 48, 57, 69, 74, 71, 53, 51, 61, 74, 73, 58, 61, 53, 109, 60, 62, 61, 76, 63, 70, 70, 79, 46, 152, 61, 70, 58, 53, 55, 59, 57, 74, 56, 55, 63, 68, 80, 76, 59, 41, 63, 56, 66, 72, 60, 62, 72, 62, 74, 62, 61, 65, 79, 45, 44, 92, 91, 69, 60, 64, 76, 71, 76, 77, 63, 73, 80, 72, 105, 68, 56, 77, 58, 63, 71, 60, 78, 68, 104, 59, 94, 53, 64, 73, 57, 59, 60, 75, 51, 69, 58, 66, 66, 56, 59, 64, 49, 71, 69, 88, 86, 60, 64, 82, 78, 68, 65, 50, 69, 73, 52, 43, 79, 64, 42, 70, 66, 53, 106, 59, 73, 68, 61, 89, 73, 63, 58, 81, 56, 62, 89, 64, 68, 63, 70, 64, 75, 60, 75, 77, 55, 76, 62, 56, 58, 67, 58, 67, 56, 78, 63, 55, 72, 74, 65, 74, 66, 70, 77, 68, 76, 55, 51, 60, 79, 85, 68, 113, 59, 68, 57, 59, 53, 72, 66, 50, 73, 53, 64, 59, 65, 62, 57, 56, 68, 53, 55, 51, 64, 74, 77, 62, 70, 66, 50, 60, 61, 64, 88, 59, 72, 63, 60, 67, 59, 64, 52, 47, 61, 53, 72, 70, 55, 69, 71, 68, 82, 69, 76, 59, 81, 55, 84, 84, 95, 58, 61, 61, 72, 89, 67, 49, 84, 57, 47, 65, 78, 76, 53, 71, 79, 63, 59, 46, 53, 80, 65, 52, 65, 61, 77, 80, 71, 65, 51, 72, 56, 64, 54, 52, 62, 70, 46, 81, 64, 55, 59, 73, 63, 56, 80, 76, 54, 52, 67, 74, 45, 67, 60, 58, 53, 79, 64, 70, 57, 86, 67, 54, 71, 75, 65, 67, 55, 63, 59, 60, 108, 77, 73, 72, 67, 70, 65, 85, 59, 59, 51, 82, 73, 62, 45, 65, 68, 52, 78, 58, 54, 57, 56, 70, 57, 70, 57, 71, 73, 82, 55, 63, 58, 61, 68, 66, 60, 115, 56, 60, 67, 65, 59, 70, 69, 57, 46, 67, 92, 72, 60, 54, 63, 58, 56, 55, 75, 70, 64, 53, 74, 50, 69, 56, 58, 64, 54, 76, 60, 49, 66, 50, 115, 61, 47, 51, 62, 98, 68, 73, 64, 65, 59, 77, 56, 56, 45, 61, 67, 67, 66, 55, 67, 65, 57, 60, 70, 43, 56, 75, 104, 48, 68, 65, 59, 52, 64, 59, 61, 62, 125, 62, 54, 63, 66, 66, 61, 77, 82, 61, 67, 95, 52, 71, 57, 54, 96, 54, 62, 49, 62, 80, 60, 66, 61, 56, 68, 49, 58, 74, 54, 67, 65, 49, 62, 52, 75, 85, 65, 41, 42, 69, 70, 58, 42, 73, 81, 61, 58, 68, 71, 66, 79, 64, 56, 62, 71, 52, 99, 68, 75, 45, 56, 68, 67, 59, 90, 72, 57, 51, 67, 61, 72, 63, 69, 61, 52, 62, 97, 79, 58, 58, 53, 66, 63, 55, 67, 70, 88, 74, 77, 59, 70, 70, 96, 68, 56, 51, 69, 115, 67, 43, 73, 63, 80, 54, 66, 49, 56, 72, 66, 57, 74, 64, 80, 68, 62, 60, 75, 76, 46, 77, 50, 57, 57, 74, 73, 52, 77, 71, 67, 51, 67, 68, 58, 66, 67, 66, 85, 81, 59, 66, 65, 64, 70, 71, 85, 62, 63, 69, 66, 57, 78, 59, 38, 63, 77, 68, 57, 56, 55, 63, 68, 79, 67, 75, 65, 61, 51, 92, 81, 75, 73, 66, 65, 68, 61, 63, 73, 67, 60, 65, 64, 67, 72, 71, 66, 59, 77, 81, 49, 53, 77, 45, 65, 90, 74, 75, 51, 88, 87, 50, 36, 76, 63, 39, 60, 62, 57, 59, 60, 54, 66, 57, 57, 59, 70, 58, 64, 54, 62, 80, 67, 102, 121, 71, 70, 54, 62, 43, 60, 73, 72, 75, 57, 60, 54, 50, 52, 53, 139, 49, 156, 42, 67, 90, 72, 50, 67, 68, 96, 70, 62, 64, 73, 78, 69, 62, 59, 58, 64, 75, 65, 68, 107, 56, 54, 46, 73, 79, 63, 72, 80, 51, 100, 63, 70, 66, 66, 59, 58, 59, 48, 66, 116, 55, 59, 54, 64, 48, 68, 55, 52, 56, 71, 64, 78, 74, 65, 66, 69, 72, 54, 59, 67, 69, 61, 46, 60, 58, 55, 44, 57, 58, 57, 70, 70, 64, 60, 65, 109, 61, 46, 63, 77, 64, 47, 64, 61, 59, 50, 61, 62, 73, 40, 62, 69, 74, 86, 61, 77, 65, 72, 63, 57, 63, 52, 63, 58, 90, 63, 45, 76, 60, 63, 69, 77, 52, 61, 59, 66, 74, 60, 64, 61, 58, 66, 58, 84, 63, 53, 90, 76, 62, 77, 50, 60, 64, 35, 66, 51, 59, 77, 65, 74, 59, 50, 56, 48, 61, 59, 45, 61, 53, 70, 64, 51, 69, 65, 57, 78, 64, 70, 61, 55, 64, 51, 66, 62, 61, 60, 65, 54, 75, 75, 71, 58, 57, 69, 51, 54, 69, 76, 63, 67, 74, 64, 65, 60, 69, 96, 66, 103, 102, 60, 83, 60, 54, 61, 62, 76, 87, 64, 85, 68, 77, 54, 68, 76, 59, 65, 50, 105, 106, 76, 90, 65, 70, 57, 66, 48, 68, 89, 69, 71, 78, 57, 66, 48, 88, 84, 53, 58, 68, 111, 77, 57, 64, 61, 69, 57, 78, 53, 50, 53, 60, 56, 67, 48, 69, 75, 58, 82, 80, 79, 60, 49, 95, 63, 72, 61, 74, 63, 62, 54, 68, 61, 54, 49, 67, 46, 48, 98, 64, 71, 68, 119, 67, 51, 82, 76, 56, 74, 77, 44, 48, 75, 47, 55, 114, 72, 75, 69, 54, 53, 69, 74, 60, 53, 70, 89, 65, 84, 70, 74, 80, 55, 54, 64, 68, 52, 83, 58, 57, 62, 54, 50, 73, 85, 55, 66, 64, 64, 58, 85, 87, 49, 84, 60, 68, 67, 98, 87, 66, 60, 48, 87, 52, 49, 77, 71, 80, 75, 52, 86, 58, 67, 54, 46, 108, 57, 83, 66, 63, 55, 69, 60, 56, 57, 69, 90, 57, 78, 73, 59, 41, 64, 58, 63, 55, 71, 74, 73, 59, 69, 44, 73, 58, 59, 60, 64, 73, 50, 61, 51, 58, 98, 72, 64, 74, 74, 50, 75, 43, 66, 63, 89, 63, 56, 48, 80, 131, 62, 71, 55, 69, 60, 68, 59, 73, 86, 64, 49, 83, 72, 71, 69, 88, 60, 58, 72, 64, 54, 54, 55, 45, 87, 79, 82, 62, 66, 85, 42, 77, 85, 69, 66, 68, 84, 63, 62, 71, 41, 63, 73, 69, 78, 62, 69, 59, 46, 77, 65, 44, 50, 49, 92, 66, 78, 54, 66, 57, 58, 63, 62, 51, 72, 50, 69, 71, 53, 66, 61, 64, 60, 50, 61, 83, 67, 62, 60, 58, 62, 49, 68, 63, 78, 79, 53, 58, 55, 61, 85, 68, 61, 70, 136, 43, 64, 57, 50, 95, 70, 70, 50, 61, 66, 72, 58, 53, 80, 71, 74, 80, 72, 47, 61, 73, 62, 52, 70, 64, 66, 64, 72, 79, 60, 92, 56, 68, 60, 70, 52, 68, 54, 49, 81, 61, 76, 52, 59, 62, 60, 59, 93, 66, 53, 58, 51, 58, 42, 71, 45, 68, 51, 56, 65, 74, 71, 54, 51, 64, 70, 73, 76, 62, 71, 66, 82, 68, 68, 67, 54, 44, 58, 68, 42, 76, 70, 46, 76, 58, 48, 73, 62, 95, 65, 57, 68, 70, 79, 56, 49, 57, 88, 70, 60, 65, 69, 70, 77, 82, 80, 71, 64, 66, 70, 76, 43, 53, 46, 54, 55, 42, 63, 59, 40, 83, 67, 65, 66, 62, 66, 41, 61, 62, 71, 48, 72, 77, 53, 56, 64, 70, 81, 75, 53, 73, 55, 87, 72, 62, 58, 67, 53, 66, 63, 60, 67, 58, 57, 68, 71, 53, 79, 72, 80, 59, 69, 94, 66, 78, 60, 79, 66, 62, 48, 73, 60, 77, 67, 81, 67, 62, 118, 70, 71, 57, 61, 52, 82, 67, 76, 67, 65, 56, 70, 63, 52, 52, 67, 71, 55, 64, 67, 58, 79, 75, 63, 65, 68, 59, 71, 91, 58, 65, 51, 83, 72, 67, 63, 78, 85, 85, 62, 96, 63, 79, 59, 65, 67, 32, 95, 72, 81, 59, 70, 61, 58, 66, 70, 67, 56, 64, 53, 68, 73, 68, 61, 66, 73, 75, 62, 60, 65, 46, 80, 34, 51, 47, 68, 99, 76, 61, 65, 59, 71, 58, 50, 59, 40, 80, 67, 88, 44, 63, 70, 65, 56, 61, 79, 58, 66, 56, 80, 58, 86, 49, 73, 67, 58, 67, 64, 74, 105, 62, 58, 53, 50, 68, 60, 63, 60, 58, 51, 73, 64, 82, 66, 70, 65, 77, 57, 62, 52, 60, 67, 67, 91, 66, 55, 54, 69, 63, 60, 51, 65, 63, 55, 67, 59, 57, 65, 64, 53, 56, 73, 61, 60, 71, 76, 52, 87, 61, 71, 70, 80, 53, 69, 67, 78, 28, 95, 67, 83, 75, 56, 50, 54, 60, 80, 67, 73, 67, 80, 66, 66, 49, 55, 55, 58, 55, 94, 53, 63, 69, 65, 62, 59, 80, 67, 54, 80, 87, 79, 69, 54, 66, 58, 69, 67, 65, 57, 88, 55, 56, 91, 50, 64, 86, 69, 68, 73, 69, 68, 55, 53, 82, 51, 79, 60, 52, 52, 78, 69, 58, 38, 76, 85, 49, 68, 54, 55, 65, 70, 67, 75, 94, 68, 48, 60, 55, 56, 31, 62, 52, 54, 48, 56, 51, 60, 59, 79, 54, 59, 57, 86, 70, 66, 59, 65, 42, 62, 68, 63, 59, 55, 71, 39, 61, 53, 73, 53, 69, 72, 70, 40, 68, 71, 70, 58, 50, 94, 66, 51, 98, 64, 45, 65, 64, 59, 64, 50, 79, 90, 60, 71, 90, 59, 49, 84, 66, 53, 68, 64, 59, 99, 62, 71, 62, 79, 71, 39, 56, 54, 62, 65, 47, 57, 75, 56, 64, 98, 63, 57, 86, 46, 73, 89, 51, 75, 58, 64, 54, 76, 67, 104, 74, 55, 57, 73, 66, 57, 66, 103, 63, 79, 63, 71, 51, 68, 70, 50, 64, 52, 65, 54, 65, 61, 89, 57, 73, 57, 74, 65, 46, 67, 84, 61, 75, 76, 70, 69, 57, 67, 49, 72, 81, 58, 78, 96, 86, 62, 57, 71, 78, 44, 61, 115, 66, 85, 56, 72, 37, 105, 70, 80, 54, 68, 66, 59, 65, 49, 56, 69, 62, 61, 74, 63, 59, 77, 51, 50, 51, 50, 56, 71, 59, 62, 55, 74, 76, 54, 63, 129, 50, 75, 90, 54, 55, 55, 97, 64, 73, 61, 69, 48, 63, 59, 54, 60, 70, 53, 65, 54, 77, 57, 62, 62, 64, 80, 66, 58, 44, 65, 76, 40, 72, 59, 74, 79, 78, 66, 61, 69, 67, 59, 74, 61, 76, 62, 59, 69, 69, 62, 67, 85, 71, 66, 85, 52, 65, 62, 74, 66, 60, 69, 69, 56, 91, 80, 70, 55, 36, 45, 67, 60, 51, 62, 67, 72, 66, 73, 64, 38, 56, 71, 64, 72, 57, 66, 58, 97, 57, 59, 51, 44, 62, 46, 76, 71, 70, 64, 48, 50, 41, 60, 49, 50, 51, 71, 76, 70, 62, 81, 85, 42, 56, 58, 76, 45, 68, 63, 55, 63, 79, 46, 59, 77, 57, 54, 56, 60, 50, 53, 38, 67, 89, 97, 67, 69, 65, 77, 75, 63, 72, 42, 63, 63, 69, 53, 125, 47, 75, 77, 57, 59, 79, 111, 93, 42, 57, 41, 53, 76, 55, 71, 88, 58, 56, 55, 67, 87, 72, 52, 70, 49, 54, 55, 58, 66, 55, 44, 71, 81, 77, 53, 55, 81, 59, 54, 57, 64, 70, 63, 67, 62, 49, 51, 56, 58, 78, 56, 85, 66, 87, 70, 86, 56, 91, 49, 63, 99, 52, 67, 53, 63, 68, 64, 77, 64, 65, 59, 111, 69, 46, 53, 51, 71, 116, 81, 72, 66, 42, 51, 54, 72, 52, 63, 72, 50, 39, 68, 74, 70, 53, 60, 75, 62, 79, 88, 52, 48, 85, 57, 55, 137, 85, 58, 48, 54, 92, 65, 66, 86, 54, 61, 65, 79, 81, 60, 65, 68, 60, 71, 53, 52, 55, 56, 53, 48, 63, 68, 63, 70, 59, 72, 62, 67, 65, 69, 53, 52, 48, 64, 77, 69, 68, 76, 51, 54, 45, 68, 60, 50, 65, 55, 59, 59, 58, 88, 61, 57, 74, 58, 50, 66, 69, 60, 64, 51, 43, 103, 56, 107, 83, 75, 50, 79, 57, 71, 69, 59, 100, 75, 48, 46, 74, 94, 49, 62, 80, 65, 70, 66, 81, 72, 68, 60, 67, 43, 77, 51, 74, 60, 66, 108, 64, 86, 68, 67, 88, 72, 60, 67, 61, 71, 65, 67, 49, 70, 90, 46, 77, 95, 57, 43, 42, 58, 68, 82, 70, 70, 56, 90, 76, 47, 65, 57, 53, 54, 48, 63, 61, 55, 63, 70, 56, 47, 59, 75, 78, 45, 63, 56, 66, 72, 46, 82, 68, 50, 86, 65, 89, 105, 59, 103, 56, 56, 61, 115, 74, 71, 56, 66, 80, 63, 68, 56, 69, 59, 60, 69, 45, 67, 49, 79, 64, 86, 56, 68, 90, 67, 39, 53, 119, 49, 83, 69, 58, 56, 79, 60, 88, 96, 75, 69, 54, 57, 57, 64, 49, 69, 62, 64, 40, 56, 72, 66, 45, 72, 70, 85, 57, 44, 82, 99, 59, 65, 50, 65, 56, 65, 47, 90, 83, 60, 67, 51, 84, 57, 63, 70, 56, 64, 45, 49, 55, 77, 60, 66, 54, 53, 74, 55, 46, 63, 96, 74, 58, 79, 58, 64, 76, 99, 61, 76, 61, 102, 55, 69, 50, 59, 55, 66, 49, 65, 56, 77, 61, 72, 93, 55, 55, 62, 48, 81, 45, 68, 56, 73, 45, 76, 62, 86, 67, 72, 94, 69, 65, 56, 69, 50, 66, 48, 75, 84, 73, 58, 85, 73, 91, 62, 60, 62, 59, 77, 60, 53, 142, 53, 61, 50, 62, 74, 61, 57, 55, 60, 63, 59, 65, 50, 93, 65, 65, 58, 66, 65, 73, 44, 66, 74, 71, 68, 98, 52, 50, 130, 81, 47, 69, 43, 65, 60, 53, 56, 61, 69, 82, 55, 62, 63, 64, 54, 76, 56, 77, 108, 58, 48, 68, 74, 61, 66, 52, 57, 48, 46, 84, 32, 47, 87, 51, 74, 92, 61, 80, 68, 77, 79, 46, 60, 73, 59, 78, 82, 55, 71, 60, 83, 48, 52, 82, 69, 82, 42, 64, 79, 56, 82, 55, 45, 67, 70, 72, 83, 79, 54, 65, 58, 84, 45, 50, 47, 87, 70, 59, 65, 53, 51, 66, 66, 70, 86, 70, 58, 54, 89, 60, 79, 54, 76, 80, 84, 65, 57, 58, 81, 78, 63, 57, 81, 49, 50, 49, 61, 67, 69, 65, 53, 63, 53, 58, 59, 70, 142, 70, 56, 73, 53, 89, 80, 87, 58, 78, 88, 80, 71, 81, 70, 52, 67, 82, 52, 40, 63, 61, 113, 77, 59, 55, 57, 57, 48, 36, 53, 54, 52, 65, 63, 80, 96, 83, 51, 67, 102, 59, 46, 70, 75, 71, 91, 61, 68, 91, 122, 75, 66, 69, 57, 86, 57, 69, 75, 81, 72, 70, 45, 69, 74, 65, 49, 66, 81, 64, 62, 51, 54, 61, 74, 43, 56, 67, 61, 63, 72, 60, 55, 67, 63, 60, 67, 65, 62, 72, 71, 84, 55, 48, 70, 62, 46, 53, 79, 60, 64, 71, 65, 64, 59, 108, 64, 56, 66, 70, 105, 65, 60, 77, 50, 68, 80, 60, 56, 44, 69, 54, 61, 52, 62, 62, 62, 68, 56, 83, 76, 65, 54, 55, 93, 75, 70, 74, 56, 52, 54, 66, 87, 60, 94, 89, 66, 53, 50, 70, 59, 57, 90, 49, 48, 61, 67, 60, 60, 78, 79, 69, 48, 68, 63, 65, 58, 61, 48, 58, 66, 53, 88, 60, 57, 70, 66, 88, 61, 81, 46, 59, 59, 63, 53, 75, 78, 53, 44, 66, 50, 57, 119, 65, 63, 60, 47, 47, 41, 68, 66, 63, 93, 68, 52, 62, 63, 62, 74, 60, 73, 67, 76, 72, 57, 71, 94, 56, 54, 58, 59, 64, 59, 55, 65, 52, 61, 77, 69, 74, 48, 83, 65, 60, 55, 64, 62, 63, 74, 94, 60, 91, 50, 55, 63, 76, 87, 67, 60, 44, 73, 63, 86, 67, 60, 72, 41, 74, 52, 55, 76, 65, 58, 77, 68, 49, 51, 79, 84, 88, 66, 64, 53, 58, 96, 72, 95, 81, 59, 64, 52, 45, 64, 51, 58, 48, 90, 55, 62, 55, 81, 62, 83, 65, 64, 68, 58, 58, 57, 50, 52, 66, 87, 112, 63, 68, 62, 58, 60, 56, 63, 70, 109, 94, 60, 70, 77, 92, 71, 73, 64, 50, 71, 70, 62, 70, 53, 66, 61, 75, 75, 65, 67, 80, 77, 55, 56, 43, 71, 59, 73, 89, 53, 78, 72, 68, 57, 45, 56, 49, 52, 64, 47, 75, 55, 64, 64, 69, 50, 69, 71, 53, 52, 62, 65, 68, 58, 61, 64, 60, 70, 81, 71, 52, 91, 62, 82, 68, 65, 65, 46, 58, 68, 72, 57, 47, 69, 60, 51, 61, 73, 66, 50, 55, 64, 49, 53, 65, 47, 88, 92, 73, 63, 96, 51, 67, 55, 57, 67, 60, 72, 61, 61, 56, 64, 61, 65, 54, 55, 66, 54, 62, 65, 102, 56, 91, 64, 77, 74, 57, 77, 58, 63, 58, 63, 62, 107, 72, 76, 59, 54, 47, 67, 50, 63, 34, 82, 64, 59, 59, 104, 63, 61, 60, 55, 64, 104, 64, 51, 79, 52, 67, 90, 63, 70, 63, 125, 67, 53, 65, 72, 51, 65, 78, 56, 59, 60, 63, 71, 71, 62, 48, 53, 68, 69, 58, 39, 67, 99, 91, 81, 64, 85, 59, 85, 72, 47, 58, 64, 66, 61, 85, 81, 81, 71, 52, 81, 68, 58, 52, 49, 64, 61, 51, 61, 62, 97, 102, 71, 82, 53, 98, 60, 57, 63, 64, 55, 50, 71, 110, 83, 74, 80, 62, 61, 41, 73, 61, 58, 53, 65, 67, 72, 82, 62, 71, 58, 71, 102, 71, 79, 63, 73, 54, 62, 54, 84, 71, 45, 66, 60, 58, 69, 69, 79, 78, 63, 66, 61, 67, 53, 65, 66, 120, 53, 53, 49, 71, 66, 73, 112, 105, 71, 54, 70, 67, 48, 60, 60, 56, 71, 64, 75, 59, 71, 64, 54, 77, 52, 86, 63, 63, 63, 66, 50, 61, 71, 91, 66, 68, 43, 52, 65, 93, 60, 59, 52, 66, 67, 68, 91, 57, 62, 59, 66, 62, 62, 35, 59, 59, 72, 93, 90, 84, 61, 63, 55, 83, 62, 58, 64, 58, 78, 62, 50, 55, 37, 63, 76, 57, 69, 57, 64, 59, 71, 60, 51, 66, 64, 52, 66, 56, 58, 74, 64, 72, 74, 70, 93, 79, 97, 60, 69, 53, 60, 55, 68, 52, 58, 43, 60, 65, 51, 69, 62, 46, 98, 43, 57, 87, 78, 98, 68, 68, 61, 55, 61, 53, 63, 53, 75, 47, 42, 72, 64, 54, 62, 73, 63, 70, 71, 65, 62, 68, 64, 60, 61, 36, 77, 72, 63, 44, 59, 68, 139, 62, 68, 62, 67, 56, 75, 57, 74, 102, 44, 74, 83, 67, 70, 57, 60, 81, 63, 54, 80, 58, 62, 76, 67, 50, 41, 58, 60, 66, 52, 60, 59, 61, 77, 74, 70, 71, 64, 49, 66, 52, 51, 83, 57, 69, 63, 45, 54, 54, 60, 65, 72, 70, 66, 49, 68, 50, 64, 61, 63, 109, 62, 52, 68, 62, 59, 101, 82, 62, 74, 65, 52, 58, 71, 48, 70, 56, 74, 71, 88, 93, 54, 66, 76, 66, 65, 62, 78, 63, 66, 71, 53, 82, 49, 60, 79, 74, 70, 70, 61, 124, 88, 83, 76, 48, 59, 60, 100, 64, 59, 59, 62, 68, 71, 65, 61, 59, 107, 65, 60, 72, 53, 40, 55, 70, 60, 71, 72, 59, 74, 64, 69, 57, 59, 80, 62, 79, 84, 55, 54, 52, 55, 72, 65, 49, 74, 56, 49, 80, 66, 58, 48, 72, 73, 61, 50, 53, 70, 54, 70, 57, 85, 56, 61, 66, 73, 75, 54, 55, 76, 122, 62, 67, 84, 71, 80, 71, 57, 72, 68, 71, 57, 79, 69, 76, 71, 53, 81, 74, 59, 73, 58, 52, 66, 56, 71, 69, 64, 56, 58, 57, 58, 70, 51, 54, 82, 65, 61, 66, 75, 64, 79, 58, 60, 58, 73, 54, 50, 93, 55, 103, 103, 65, 55, 37, 51, 73, 60, 62, 56, 55, 62, 49, 129, 77, 60, 73, 54, 60, 61, 72, 73, 61, 48, 64, 65, 67, 62, 52, 86, 131, 73, 132, 75, 57, 64, 67, 76, 74, 73, 84, 57, 46, 47, 78, 51, 60, 66, 59, 71, 68, 64, 72, 68, 50, 64, 58, 58, 59, 61, 63, 78, 69, 65, 55, 65, 49, 52, 64, 64, 74, 51, 39, 70, 55, 72, 64, 57, 75, 57, 58, 81, 58, 62, 50, 63, 54, 52, 78, 98, 60, 61, 72, 78, 69, 79, 55, 65, 44, 56, 64, 84, 69, 57, 51, 71, 48, 65, 64, 66, 55, 77, 64, 64, 70, 47, 54, 60, 61, 61, 55, 61, 68, 81, 73, 83, 61, 111, 52, 69, 75, 68, 64, 58, 92, 54, 61, 59, 64, 70, 52, 65, 76, 75, 67, 65, 58, 86, 65, 56, 59, 71, 53, 73, 54, 66, 62, 81, 79, 85, 60, 52, 75, 65, 60, 54, 61, 55, 57, 74, 117, 50, 72, 80, 61, 57, 77, 71, 83, 71, 75, 78, 41, 63, 66, 66, 64, 71, 52, 88, 61, 62, 60, 81, 61, 60, 63, 79, 67, 56, 53, 54, 65, 61, 74, 60, 65, 60, 93, 88, 54, 49, 71, 76, 62, 62, 58, 40, 89, 60, 51, 58, 57, 54, 59, 63, 83, 64, 67, 71, 60, 66, 57, 80, 75, 62, 87, 69, 72, 71, 133, 67, 67, 54, 69, 96, 76, 73, 57, 56, 51, 57, 72, 61, 54, 68, 79, 69, 56, 66, 54, 55, 80, 70, 67, 58, 36, 75, 47, 57, 64, 67, 76, 56, 52, 71, 67, 57, 78, 71, 80, 55, 108, 89, 98, 57, 60, 56, 101, 56, 67, 69, 66, 106, 62, 64, 51, 61, 54, 63, 70, 64, 66, 56, 70, 47, 66, 59, 59, 72, 66, 76, 55, 58, 74, 74, 90, 77, 53, 68, 65, 62, 67, 59, 64, 74, 84, 61, 98, 79, 63, 70, 66, 84, 58, 48, 71, 82, 78, 69, 62, 54, 65, 60, 55, 59, 65, 82, 71, 69, 63, 61, 59, 70, 57, 69, 92, 69, 64, 71, 119, 72, 48, 66, 52, 64, 68, 50, 71, 62, 62, 83, 51, 59, 68, 97, 57, 41, 58, 59, 112, 62, 62, 60, 59, 54, 97, 74, 73, 68, 55, 74, 54, 57, 77, 68, 73, 64, 64, 68, 67, 61, 53, 69, 85, 67, 59, 82, 73, 70, 69, 66, 66, 80, 93, 70, 51, 69, 50, 103, 51, 79, 59, 65, 59, 69, 49, 51, 71, 66, 122, 55, 54, 65, 46, 57, 77, 107, 56, 52, 51, 69, 57, 63, 61, 55, 50, 82, 76, 57, 49, 63, 64, 67, 64, 84, 56, 57, 67, 75, 78, 60, 57, 69, 58, 55, 50, 69, 54, 78, 60, 80, 63, 58, 69, 59, 67, 49, 72, 89, 66, 69, 41, 82, 51, 81, 73, 65, 62, 65, 71, 62, 100, 79, 74, 44, 56, 47, 50, 57, 81, 61, 72, 59, 57, 67, 73, 53, 54, 62, 88, 54, 56, 67, 69, 71, 60, 53, 63, 52, 108, 61, 73, 56, 59, 82, 70, 56, 63, 73, 70, 58, 52, 47, 66, 57, 58, 69, 70, 58, 45, 68, 68, 74, 66, 55, 95, 89, 62, 73, 53, 55, 60, 50, 94, 65, 61, 78, 73, 93, 74, 64, 89, 68, 142, 57, 70, 58, 61, 66, 70, 75, 55, 87, 66, 75, 58, 58, 80, 66, 78, 75, 72, 72, 63, 73, 51, 55, 71, 60, 70, 66, 77, 76, 93, 80, 76, 73, 67, 74, 64, 62, 56, 69, 45, 67, 53, 71, 74, 89, 46, 69, 75, 86, 95, 56, 42, 69, 65, 54, 51, 81, 92, 58, 47, 56, 81, 94, 76, 66, 59, 67, 70, 61, 59, 68, 62, 45, 67, 36, 67, 49, 63, 115, 132, 73, 49, 79, 51, 57, 66, 82, 65, 65, 56, 71, 45, 72, 66, 51, 47, 61, 76, 64, 58, 90, 74, 65, 50, 60, 74, 67, 60, 65, 62, 55, 62, 83, 59, 51, 62, 75, 43, 60, 66, 64, 57, 48, 57, 72, 43, 65, 64, 70, 68, 65, 68, 68, 60, 82, 55, 57, 60, 65, 86, 60, 72, 64, 66, 51, 58, 90, 59, 54, 103, 73, 60, 63, 68, 60, 87, 38, 102, 74, 69, 70, 46, 51, 73, 77, 63, 61, 65, 53, 75, 52, 70, 60, 60, 53, 59, 129, 53, 93, 62, 62, 63, 57, 64, 56, 64, 67, 78, 82, 75, 53, 73, 67, 55, 52, 76, 57, 55, 66, 55, 64, 90, 66, 62, 78, 58, 62, 62, 53, 74, 72, 75, 58, 61, 64, 67, 54, 71, 66, 56, 51, 80, 75, 69, 57, 64, 65, 60, 60, 66, 76, 63, 63, 55, 66, 65, 50, 92, 78, 60, 56, 84, 57, 57, 55, 60, 56, 59, 69, 80, 74, 71, 67, 80, 67, 59, 65, 84, 79, 58, 75, 80, 67, 56, 70, 69, 67, 71, 55, 68, 83, 90, 74, 57, 63, 48, 60, 77, 61, 84, 71, 34, 51, 52, 64, 56, 80, 60, 71, 64, 56, 59, 58, 64, 103, 64, 46, 59, 57, 82, 59, 55, 73, 49, 73, 73, 55, 79, 95, 63, 77, 65, 59, 61, 57, 63, 60, 60, 61, 65, 54, 60, 70, 53, 66, 67, 90, 61, 54, 54, 95, 55, 94, 58, 55, 46, 113, 62, 57, 57, 62, 73, 71, 66, 59, 54, 64, 72, 79, 60, 75, 60, 72, 67, 75, 76, 70, 75, 50, 51, 61, 58, 57, 50, 67, 73, 58, 86, 71, 60, 58, 65, 50, 65, 62, 67, 72, 50, 60, 67, 55, 85, 66, 80, 73, 54, 61, 64, 73, 73, 79, 71, 74, 83, 64, 66, 66, 67, 76, 79, 78, 67, 63, 69, 89, 62, 54, 55, 68, 58, 66, 60, 66, 55, 58, 73, 47, 59, 63, 71, 73, 72, 66, 57, 53, 78, 56, 49, 62, 62, 68, 69, 63, 103, 86, 72, 88, 85, 75, 87, 61, 65, 66, 72, 64, 86, 75, 57, 53, 74, 80, 62, 56, 52, 63, 48, 59, 55, 52, 85, 75, 60, 58, 57, 67, 60, 79, 54, 68, 87, 70, 72, 70, 63, 56, 63, 67, 82, 63, 55, 71, 72, 76, 87, 48, 58, 65, 58, 57, 80, 52, 62, 64, 69, 59, 49, 58, 72, 56, 61, 55, 81, 68, 79, 59, 55, 66, 54, 73, 68, 53, 71, 65, 78, 66, 60, 60, 61, 51, 62, 65, 107, 55, 53, 63, 52, 60, 79, 59, 78, 76, 81, 83, 64, 68, 71, 68, 65, 67, 67, 66, 79, 58, 95, 69, 45, 56, 60, 75, 41, 57, 59, 66, 58, 57, 72, 65, 56, 68, 96, 63, 50, 68, 59, 74, 93, 52, 53, 59, 64, 72, 63, 64, 42, 104, 77, 61, 55, 81, 68, 57, 61, 64, 64, 67, 79, 63, 75, 46, 67, 66, 63, 62, 53, 51, 72, 68, 74, 78, 67, 65, 74, 67, 69, 64, 52, 58, 64, 66, 60, 43, 68, 59, 66, 62, 91, 63, 73, 81, 64, 53, 51, 66, 69, 80, 90, 56, 55, 54, 61, 60, 74, 101, 48, 63, 91, 86, 68, 88, 69, 54, 71, 72, 60, 52, 61, 60, 60, 39, 48, 55, 59, 75, 49, 55, 60, 50, 48, 39, 54, 57, 66, 56, 59, 66, 89, 85, 111, 70, 52, 85, 56, 63, 51, 45, 67, 73, 82, 59, 63, 60, 69, 51, 64, 53, 70, 53, 76, 63, 78, 75, 58, 77, 55, 49, 90, 73, 75, 59, 80, 60, 69, 60, 62, 81, 80, 57, 87, 71, 65, 54, 61, 57, 54, 50, 63, 63, 57, 63, 51, 96, 105, 97, 55, 69, 104, 70, 59, 73, 75, 68, 76, 90, 49, 66, 66, 55, 64, 63, 63, 77, 80, 59, 60, 57, 54, 61, 69, 68, 80, 62, 60, 46, 53, 67, 64, 76, 71, 51, 58, 78, 94, 47, 71, 59, 99, 55, 74, 49, 95, 58, 59, 77, 79, 47, 93, 71, 53, 55, 66, 60, 58, 55, 53, 63, 58, 76, 56, 44, 92, 52, 54, 79, 83, 70, 67, 59, 67, 66, 61, 78, 50, 65, 66, 68, 76, 73, 64, 51, 99, 66, 58, 72, 66, 59, 77, 57, 61, 54, 104, 63, 57, 68, 52, 60, 56, 83, 57, 60, 59, 90, 62, 59, 70, 72, 59, 47, 60, 48, 120, 59, 58, 59, 61, 61, 91, 55, 104, 55, 61, 58, 63, 58, 78, 61, 95, 72, 59, 64, 62, 60, 48, 61, 95, 68, 58, 69, 70, 61, 53, 63, 59, 55, 92, 43, 49, 58, 58, 109, 53, 54, 103, 64, 61, 72, 53, 48, 60, 70, 46, 51, 59, 61, 72, 52, 48, 76, 75, 52, 52, 76, 49, 61, 48, 66, 60, 79, 58, 53, 55, 56, 56, 67, 54, 78, 66, 59, 63, 78, 55, 51, 60, 51, 59, 57, 77, 57, 72, 66, 59, 91, 65, 64, 59, 59, 70, 53, 61, 50, 67, 83, 52, 73, 93, 58, 80, 61, 64, 64, 69, 92, 79, 57, 65, 56, 66, 70, 63, 63, 74, 96, 56, 77, 85, 65, 76, 68, 61, 52, 68, 64, 51, 57, 50, 59, 50, 63, 77, 55, 138, 63, 88, 56, 107, 56, 57, 49, 54, 52, 50, 50, 56, 61, 61, 71, 50, 71, 55, 58, 60, 75, 80, 63, 73, 93, 75, 116, 73, 64, 73, 51, 80, 62, 74, 48, 58, 68, 67, 58, 61, 75, 69, 75, 120, 63, 58, 62, 67, 55, 81, 51, 57, 57, 53, 40, 67, 85, 62, 55, 65, 56, 64, 88, 77, 60, 81, 80, 49, 68, 55, 54, 76, 76, 65, 99, 71, 57, 61, 83, 59, 58, 53, 77, 89, 51, 68, 68, 83, 68, 53, 60, 56, 82, 100, 56, 90, 72, 90, 65, 90, 67, 102, 61, 66, 47, 67, 86, 84, 72, 72, 66, 78, 83, 66, 77, 61, 76, 58, 51, 77, 63, 62, 116, 139, 59, 50, 87, 69, 61, 77, 78, 63, 69, 69, 67, 74, 57, 63, 48, 63, 51, 103, 80, 62, 67, 70, 62, 57, 78, 66, 70, 61, 61, 55, 56, 77, 56, 70, 86, 70, 81, 51, 53, 45, 72, 66, 65, 59, 58, 51, 69, 52, 69, 64, 60, 58, 63, 56, 63, 50, 61, 71, 66, 73, 56, 66, 69, 72, 47, 48, 89, 64, 50, 51, 59, 49, 68, 51, 61, 41, 50, 79, 106, 81, 72, 75, 57, 57, 61, 45, 68, 50, 72, 102, 85, 84, 36, 63, 54, 98, 124, 72, 60, 52, 80, 51, 88, 68, 67, 64, 56, 79, 51, 57, 64, 57, 67, 65, 58, 69, 62, 66, 78, 59, 67, 70, 56, 56, 82, 67, 73, 53, 48, 56, 63, 76, 55, 93, 54, 68, 71, 73, 60, 63, 70, 51, 56, 62, 60, 60, 75, 59, 56, 42, 57, 56, 50, 61, 54, 55, 55, 75, 58, 57, 65, 62, 54, 72, 51, 51, 63, 61, 67, 56, 71, 76, 44, 84, 67, 57, 70, 82, 61, 67, 79, 62, 61, 71, 57, 76, 68, 76, 58, 50, 71, 61, 76, 55, 64, 80, 56, 69, 39, 56, 49, 67, 70, 59, 82, 84, 51, 51, 81, 64, 65, 79, 51, 53, 58, 60, 77, 40, 58, 71, 73, 53, 62, 65, 94, 91, 50, 89, 66, 55, 63, 68, 52, 62, 60, 58, 66, 61, 67, 62, 59, 57, 72, 63, 72, 69, 96, 59, 54, 83, 80, 88, 95, 69, 62, 66, 60, 53, 52, 50, 56, 64, 68, 78, 55, 65, 47, 64, 58, 69, 60, 60, 79, 64, 111, 86, 97, 64, 76, 57, 49, 58, 61, 73, 44, 53, 72, 90, 66, 51, 70, 55, 62, 62, 48, 72, 63, 56, 65, 67, 63, 49, 77, 81, 59, 54, 63, 72, 56, 62, 63, 66, 50, 52, 55, 65, 68, 55, 80, 85, 76, 79, 69, 70, 64, 88, 66, 60, 57, 64, 49, 55, 68, 63, 60, 65, 59, 60, 73, 65, 61, 83, 67, 85, 56, 88, 44, 61, 71, 58, 57, 58, 66, 114, 62, 54, 74, 74, 67, 95, 72, 68, 47, 94, 75, 71, 49, 59, 68, 90, 84, 65, 53, 65, 74, 70, 52, 59, 75, 94, 65, 56, 53, 68, 82, 76, 75, 60, 51, 65, 90, 50, 64, 56, 55, 44, 69, 71, 102, 65, 51, 62, 63, 64, 73, 67, 57, 76, 63, 90, 59, 61, 58, 60, 54, 84, 59, 83, 61, 58, 64, 74, 85, 79, 54, 58, 65, 57, 75, 66, 70, 67, 62, 69, 84, 59, 58, 55, 68, 97, 59, 76, 55, 58, 64, 84, 51, 73, 55, 62, 80, 60, 69, 75, 57, 62, 70, 62, 53, 69, 58, 70, 53, 69, 48, 60, 51, 66, 57, 70, 57, 54, 41, 56, 55, 48, 67, 90, 66, 92, 90, 64, 78, 58, 69, 80, 61, 64, 52, 60, 92, 55, 64, 56, 73, 74, 57, 65, 55, 52, 58, 68, 55, 73, 70, 62, 63, 56, 58, 53, 58, 60, 53, 59, 51, 65, 64, 89, 75, 111, 58, 68, 61, 52, 58, 44, 78, 48, 54, 80, 49, 59, 55, 51, 58, 71, 55, 72, 83, 77, 64, 62, 54, 74, 54, 54, 66, 54, 63, 61, 59, 83, 77, 52, 58, 99, 59, 65, 78, 53, 63, 52, 56, 73, 69, 47, 80, 65, 57, 76, 74, 78, 82, 60, 72, 53, 90, 72, 58, 63, 64, 58, 72, 83, 60, 65, 59, 125, 91, 59, 78, 65, 70, 63, 48, 64, 83, 66, 53, 59, 60, 65, 52, 61, 55, 78, 78, 55, 57, 65, 61, 52, 70, 42, 66, 82, 91, 68, 51, 62, 69, 59, 82, 49, 105, 58, 72, 67, 68, 51, 78, 52, 52, 63, 81, 92, 63, 54, 88, 56, 69, 77, 54, 65, 84, 105, 68, 60, 79, 87, 61, 77, 62, 75, 60, 46, 58, 65, 54, 78, 70, 75, 64, 59, 86, 48, 53, 58, 71, 80, 59, 62, 101, 60, 76, 65, 74, 96, 60, 71, 59, 73, 74, 60, 58, 55, 61, 71, 67, 72, 83, 53, 63, 57, 61, 74, 72, 57, 68, 81, 32, 60, 64, 63, 67, 62, 64, 86, 62, 61, 52, 73, 98, 68, 55, 69, 62, 74, 79, 43, 81, 61, 81, 66, 77, 64, 74, 57, 68, 59, 65, 63, 82, 79, 66, 68, 74, 77, 69, 104, 55, 60, 70, 61, 64, 59, 64, 72, 56, 88, 63, 77, 56, 73, 69, 65, 47, 71, 76, 95, 65, 63, 53, 89, 53, 100, 62, 69, 49, 54, 63, 57, 62, 76, 82, 50, 80, 63, 72, 47, 58, 59, 87, 108, 74, 47, 59, 82, 51, 66, 58, 76, 65, 68, 71, 79, 57, 41, 103, 64, 57, 90, 59, 52, 63, 55, 73, 70, 74, 53, 54, 53, 72, 61, 73, 51, 88, 63, 59, 60, 75, 56, 83, 59, 51, 51, 60, 52, 75, 53, 61, 67, 61, 61, 63, 62, 66, 58, 89, 71, 59, 67, 58, 69, 63, 60, 57, 45, 86, 60, 63, 74, 64, 57, 77, 51, 65, 67, 111, 71, 61, 77, 72, 56, 74, 85, 62, 72, 66, 59, 79, 44, 111, 56, 59, 58, 62, 64, 51, 76, 70, 60, 83, 80, 66, 66, 69, 62, 79, 103, 58, 73, 65, 57, 62, 58, 59, 76, 53, 93, 65, 86, 94, 63, 73, 58, 43, 61, 56, 71, 50, 65, 61, 58, 69, 84, 61, 59, 55, 54, 53, 78, 59, 67, 72, 69, 113, 76, 65, 66, 46, 58, 55, 74, 71, 61, 55, 70, 62, 52, 53, 52, 50, 119, 57, 49, 52, 80, 52, 78, 91, 76, 60, 53, 71, 56, 64, 56, 53, 77, 58, 59, 60, 79, 75, 62, 70, 74, 80, 51, 59, 60, 79, 59, 69, 59, 62, 70, 68, 66, 59, 45, 82, 88, 61, 59, 59, 49, 82, 67, 78, 63, 82, 49, 66, 64, 57, 69, 53, 64, 82, 89, 63, 70, 63, 69, 66, 94, 69, 55, 42, 71, 63, 63, 56, 56, 64, 57, 53, 71, 77, 61, 107, 116, 70, 75, 47, 68, 65, 53, 66, 57, 55, 79, 81, 52, 55, 63, 60, 68, 54, 84, 61, 58, 75, 72, 51, 68, 51, 59, 50, 62, 83, 65, 62, 67, 81, 59, 81, 49, 71, 66, 59, 56, 64, 66, 69, 67, 49, 57, 59, 52, 47, 58, 66, 81, 66, 59, 48, 70, 82, 52, 53, 43, 64, 50, 76, 73, 90, 74, 71, 88, 37, 54, 68, 53, 64, 70, 51, 71, 55, 92, 84, 54, 58, 65, 56, 61, 59, 70, 57, 63, 70, 71, 66, 70, 58, 56, 63, 73, 125, 70, 50, 84, 61, 59, 63, 68, 43, 52, 76, 83, 108, 57, 50, 54, 85, 54, 55, 76, 94, 76, 66, 64, 77, 69, 75, 43, 85, 64, 54, 69, 63, 68, 62, 123, 63, 55, 57, 74, 56, 54, 68, 61, 58, 60, 71, 56, 55, 60, 61, 55, 71, 76, 45, 67, 89, 58, 65, 61, 70, 118, 78, 68, 62, 58, 61, 77, 40, 90, 80, 72, 91, 35, 64, 59, 64, 84, 83, 65, 61, 69, 52, 58, 76, 52, 62, 66, 69, 80, 68, 85, 67, 70, 69, 72, 57, 61, 41, 66, 70, 91, 70, 51, 54, 64, 57, 62, 68, 65, 63, 59, 97, 73, 68, 77, 51, 60, 76, 50, 92, 74, 58, 61, 68, 66, 56, 63, 88, 74, 89, 107, 72, 64, 79, 81, 67, 55, 50, 71, 54, 65, 69, 85, 60, 64, 78, 71, 46, 72, 90, 59, 87, 55, 76, 70, 91, 67, 68, 81, 66, 60, 75, 69, 75, 83, 58, 55, 62, 61, 66, 63, 60, 71, 61, 55, 78, 59, 80, 85, 56, 44, 59, 71, 58, 55, 68, 60, 86, 61, 53, 70, 72, 59, 58, 74, 60, 70, 59, 51, 91, 52, 55, 72, 70, 54, 57, 70, 69, 50, 65, 37, 58, 58, 68, 68, 89, 87, 54, 75, 60, 48, 60, 69, 60, 59, 70, 62, 48, 60, 51, 74, 54, 45, 64, 63, 36, 88, 55, 54, 63, 52, 64, 66, 61, 93, 61, 83, 86, 66, 54, 53, 93, 60, 50, 64, 70, 85, 60, 53, 65, 57, 59, 71, 47, 80, 72, 48, 84, 62, 79, 53, 60, 71, 80, 62, 57, 54, 68, 53, 66, 61, 59, 65, 68, 72, 46, 80, 78, 64, 71, 80, 38, 81, 72, 44, 63, 82, 72, 62, 64, 67, 111, 71, 54, 65, 61, 66, 51, 55, 50, 54, 54, 86, 88, 62, 61, 78, 58, 52, 85, 54, 56, 53, 48, 56, 95, 63, 54, 64, 61, 87, 65, 53, 43, 52, 53, 48, 60, 59, 63, 61, 49, 68, 58, 84, 63, 59, 55, 75, 49, 53, 69, 55, 58, 51, 65, 60, 80, 69, 64, 81, 81, 66, 53, 67, 84, 60, 68, 58, 70, 58, 62, 74, 63, 52, 57, 64, 59, 68, 58, 104, 48, 84, 61, 62, 64, 70, 63, 74, 69, 58, 50, 49, 51, 59, 67, 75, 83, 63, 74, 80, 78, 69, 72, 64, 66, 75, 50, 74, 61, 77, 59, 47, 38, 78, 72, 63, 44, 59, 54, 90, 50, 56, 56, 70, 88, 56, 67, 57, 55, 69, 67, 50, 53, 63, 72, 56, 61, 75, 66, 68, 64, 70, 63, 76, 54, 85, 61, 55, 67, 60, 60, 60, 61, 66, 73, 44, 65, 71, 64, 57, 51, 100, 58, 79, 53, 50, 61, 53, 71, 68, 75, 59, 58, 57, 94, 58, 56, 69, 102, 62, 56, 56, 72, 39, 65, 77, 60, 63, 55, 107, 85, 65, 57, 48, 73, 67, 64, 62, 78, 66, 52, 55, 55, 45, 72, 70, 67, 70, 53, 64, 62, 66, 60, 49, 81, 65, 58, 98, 72, 70, 68, 104, 54, 70, 60, 71, 77, 81, 87, 69, 68, 55, 66, 70, 70, 50, 59, 64, 59, 60, 79, 65, 62, 86, 66, 55, 77, 51, 51, 61, 75, 54, 64, 97, 70, 63, 62, 67, 61, 75, 60, 62, 48, 71, 73, 53, 67, 71, 75, 78, 59, 58, 73, 70, 101, 56, 77, 68, 64, 76, 57, 65, 64, 65, 53, 69, 59, 64, 68, 57, 64, 68, 62, 66, 60, 80, 65, 69, 73, 52, 51, 63, 73, 67, 57, 75, 70, 71, 68, 87, 57, 74, 46, 82, 94, 66, 71, 71, 73, 72, 60, 56, 60, 68, 56, 68, 53, 52, 78, 53, 59, 83, 57, 69, 72, 80, 57, 84, 58, 57, 46, 98, 71, 58, 69, 56, 50, 66, 73, 57, 89, 86, 48, 57, 61, 49, 77, 76, 60, 48, 55, 109, 69, 65, 63, 58, 69, 72, 46, 70, 64, 65, 69, 91, 89, 101, 76, 60, 67, 63, 57, 68, 63, 68, 69, 89, 83, 66, 74, 62, 61, 59, 62, 67, 61, 72, 76, 73, 51, 55, 101, 64, 58, 71, 53, 60, 70, 60, 71, 63, 51, 71, 58, 56, 52, 56, 47, 62, 75, 58, 56, 56, 71, 69, 65, 71, 67, 61, 54, 70, 75, 58, 73, 61, 68, 64, 53, 71, 58, 62, 66, 65, 64, 58, 68, 52, 65, 68, 64, 70, 75, 68, 68, 57, 66, 46, 59, 53, 74, 56, 61, 89, 51, 67, 67, 52, 68, 99, 59, 64, 57, 66, 70, 51, 63, 58, 62, 66, 70, 61, 52, 69, 95, 83, 70, 60, 65, 55, 74, 64, 51, 67, 69, 62, 68, 55, 86, 65, 67, 63, 49, 95, 52, 62, 64, 57, 63, 59, 55, 61, 78, 59, 55, 60, 44, 65, 62, 70, 70, 86, 57, 61, 64, 49, 69, 68, 56, 90, 92, 74, 70, 61, 52, 66, 77, 75, 57, 55, 54, 68, 68, 53, 55, 72, 82, 136, 60, 46, 65, 71, 63, 56, 83, 79, 82, 57, 88, 65, 64, 67, 64, 62, 77, 64, 74, 76, 106, 78, 78, 57, 92, 67, 52, 50, 68, 74, 59, 85, 67, 49, 78, 54, 58, 48, 69, 63, 54, 54, 67, 50, 89, 56, 66, 65, 78, 133, 47, 67, 56, 68, 61, 70, 74, 80, 73, 67, 70, 55, 65, 68, 66, 68, 65, 52, 74, 60, 68, 62, 62, 60, 52, 68, 65, 69, 103, 155, 59, 77, 61, 75, 62, 64, 76, 57, 63, 75, 65, 51, 59, 52, 82, 59, 70, 76, 65, 69, 86, 69, 69, 78, 60, 64, 58, 58, 67, 62, 84, 60, 61, 55, 65, 69, 68, 73, 62, 70, 54, 55, 50, 65, 64, 54, 67, 62, 68, 62, 64, 56, 70, 62, 71, 68, 80, 64, 56, 104, 60, 60, 67, 52, 66, 63, 69, 74, 59, 64, 51, 53, 59, 65, 55, 80, 70, 97, 85, 65, 59, 75, 60, 54, 59, 70, 58, 55, 73, 69, 68, 76, 74, 71, 62, 70, 106, 61, 84, 55, 49, 49, 52, 65, 53, 81, 75, 71, 87, 60, 70, 65, 70, 56, 58, 90, 62, 66, 73, 70, 53, 72, 59, 55, 68, 68, 62, 72, 66, 84, 73, 63, 71, 63, 59, 72, 72, 72, 60, 65, 54, 79, 80, 70, 54, 70, 75, 60, 55, 77, 66, 65, 51, 69, 77, 78, 58, 88, 84, 47, 63, 69, 56, 71, 63, 63, 62, 73, 75, 87, 60, 76, 57, 68, 62, 58, 62, 75, 67, 56, 75, 72, 54, 61, 71, 78, 59, 67, 50, 57, 64, 89, 68, 63, 70, 66, 55, 75, 62, 80, 49, 54, 64, 56, 63, 76, 59, 95, 120, 71, 70, 78, 58, 58, 83, 51, 58, 70, 51, 68, 56, 58, 61, 68, 71, 73, 62, 78, 81, 63, 71, 69, 75, 57, 62, 57, 51, 65, 41, 67, 67, 60, 48, 66, 59, 69, 80, 60, 63, 74, 65, 48, 87, 66, 60, 55, 69, 75, 53, 44, 60, 70, 63, 78, 58, 76, 50, 79, 80, 50, 72, 72, 57, 70, 98, 92, 77, 72, 81, 58, 71, 59, 61, 56, 53, 63, 68, 99, 76, 73, 73, 62, 84, 69, 65, 64, 57, 55, 59, 64, 53, 60, 74, 64, 59, 60, 60, 65, 57, 78, 59, 66, 82, 66, 63, 64, 67, 65, 81, 78, 60, 64, 63, 54, 65, 72, 69, 69, 56, 72, 53, 50, 96, 74, 49, 62, 50, 110, 42, 60, 69, 83, 61, 78, 75, 58, 60, 74, 71, 61, 60, 68, 64, 73, 69, 58, 50, 67, 56, 79, 71, 70, 56, 77, 59, 63, 59, 65, 62, 87, 78, 63, 58, 68, 71, 66, 66, 48, 47, 87, 82, 66, 63, 75, 86, 64, 48, 61, 77, 79, 68, 59, 57, 61, 60, 53, 73, 85, 67, 69, 67, 70, 79, 62, 61, 87, 66, 77, 61, 98, 58, 58, 62, 66, 66, 55, 55, 73, 63, 68, 48, 72, 68, 64, 71, 68, 41, 65, 71, 61, 61, 61, 65, 83, 79, 54, 67, 64, 66, 79, 59, 76, 60, 58, 72, 83, 64, 53, 66, 68, 62, 75, 61, 67, 66, 69, 70, 42, 74, 61, 72, 59, 64, 59, 59, 81, 71, 75, 79, 57, 69, 48, 62, 54, 67, 51, 76, 62, 76, 74, 57, 59, 56, 62, 69, 86, 64, 55, 65, 62, 69, 54, 75, 55, 78, 60, 62, 56, 67, 73, 67, 58, 54, 56, 60, 96, 56, 96, 48, 62, 56, 72, 63, 70, 64, 79, 65, 63, 53, 55, 63, 80, 68, 60, 61, 66, 65, 63, 59, 54, 62, 79, 69, 64, 66, 110, 60, 106, 82, 74, 56, 76, 70, 60, 74, 60, 62, 52, 106, 59, 71, 54, 73, 72, 54, 78, 101, 56, 50, 57, 60, 63, 62, 57, 67, 54, 69, 63, 47, 50, 75, 54, 65, 63, 65, 64, 79, 70, 85, 53, 61, 58, 61, 82, 72, 79, 69, 70, 54, 61, 67, 44, 47, 61, 52, 55, 62, 82, 65, 63, 105, 67, 75, 101, 61, 100, 69, 62, 46, 51, 74, 60, 87, 54, 55, 67, 64, 85, 60, 71, 54, 59, 61, 97, 62, 50, 120, 76, 61, 53, 75, 60, 49, 64, 76, 74, 78, 57, 53, 62, 62, 71, 64, 55, 63, 82, 82, 65, 80, 71, 70, 56, 68, 52, 49, 95, 69, 61, 51, 76, 59, 55, 69, 77, 93, 93, 77, 61, 69, 56, 70, 80, 49, 58, 80, 73, 46, 72, 62, 55, 48, 56, 63, 52, 64, 77, 48, 58, 104, 46, 54, 63, 64, 76, 59, 76, 86, 54, 74, 58, 48, 59, 60, 59, 61, 137, 53, 81, 60, 64, 55, 73, 78, 57, 56, 66, 46, 82, 65, 60, 56, 65, 65, 56, 57, 54, 72, 56, 105, 63, 96, 57, 65, 55, 65, 83, 66, 78, 45, 71, 52, 56, 73, 54, 56, 69, 64, 54, 91, 61, 68, 63, 60, 93, 80, 55, 75, 60, 54, 66, 84, 88, 53, 57, 57, 115, 62, 67, 45, 74, 64, 132, 68, 85, 68, 69, 89, 60, 60, 63, 83, 66, 55, 107, 64, 62, 58, 78, 60, 75, 62, 57, 63, 58, 67, 64, 71, 67, 107, 75, 59, 56, 56, 56, 67, 74, 70, 43, 68, 74, 64, 61, 64, 49, 61, 60, 68, 68, 68, 59, 57, 65, 64, 47, 62, 52, 59, 98, 56, 59, 66, 73, 66, 64, 75, 49, 64, 81, 65, 39, 77, 60, 74, 61, 54, 73, 68, 85, 60, 61, 95, 67, 71, 60, 63, 56, 64, 69, 67, 49, 64, 64, 60, 65, 108, 67, 47, 54, 60, 77, 65, 50, 59, 79, 72, 56, 46, 61, 63, 67, 68, 64, 84, 45, 59, 81, 79, 103, 78, 67, 67, 76, 59, 66, 54, 65, 75, 66, 72, 85, 64, 75, 48, 70, 67, 64, 57, 62, 57, 81, 71, 57, 55, 60, 69, 60, 51, 81, 42, 62, 60, 72, 52, 76, 64, 74, 71, 101, 56, 60, 54, 68, 66, 87, 74, 53, 100, 85, 61, 67, 57, 62, 71, 119, 79, 62, 59, 64, 63, 80, 60, 40, 52, 52, 66, 59, 79, 65, 55, 69, 75, 40, 58, 47, 92, 69, 82, 47, 67, 56, 68, 73, 80, 56, 65, 67, 62, 52, 59, 57, 60, 65, 71, 76, 79, 79, 60, 61, 59, 54, 60, 64, 63, 104, 72, 81, 66, 66, 64, 90, 92, 69, 63, 55, 57, 55, 57, 70, 65, 62, 52, 61, 69, 99, 61, 51, 61, 68, 61, 55, 51, 30, 75, 72, 75, 79, 70, 47, 55, 59, 93, 50, 92, 56, 72, 72, 56, 66, 69, 77, 45, 75, 66, 74, 82, 62, 64, 57, 73, 92, 58, 58, 61, 71, 57, 71, 80, 53, 73, 76, 64, 62, 71, 85, 66, 58, 44, 70, 59, 57, 70, 63, 59, 73, 83, 60, 53, 57, 63, 52, 72, 110, 54, 48, 58, 92, 77, 53, 85, 69, 75, 55, 71, 66, 73, 68, 66, 71, 48, 85, 56, 77, 83, 72, 84, 83, 64, 66, 58, 67, 52, 82, 70, 55, 70, 54, 65, 60, 75, 74, 113, 47, 71, 63, 53, 63, 57, 72, 57, 78, 63, 64, 87, 62, 59, 70, 60, 70, 66, 61, 57, 52, 58, 93, 69, 50, 45, 73, 61, 81, 77, 49, 66, 99, 55, 70, 64, 87, 49, 68, 64, 51, 62, 83, 57, 57, 64, 57, 86, 87, 59, 50, 62, 78, 64, 78, 71, 68, 62, 50, 60, 80, 63, 59, 57, 58, 67, 84, 65, 48, 69, 61, 54, 66, 104, 58, 84, 51, 76, 72, 77, 61, 83, 52, 63, 55, 57, 67, 72, 64, 56, 100, 55, 69, 62, 94, 64, 55, 63, 80, 65, 65, 75, 48, 56, 58, 60, 53, 103, 52, 62, 71, 75, 56, 70, 61, 61, 63, 68, 63, 112, 58, 89, 53, 53, 78, 82, 61, 70, 70, 85, 59, 67, 64, 69, 83, 60, 58, 55, 65, 54, 72, 58, 54, 51, 69, 77, 58, 80, 71, 105, 65, 69, 81, 72, 77, 82, 54, 65, 62, 59, 65, 63, 57, 63, 69, 64, 61, 44, 57, 58, 157, 60, 70, 41, 63, 57, 66, 73, 89, 98, 76, 71, 54, 43, 65, 52, 69, 64, 59, 64, 58, 89, 84, 66, 59, 82, 64, 63, 101, 61, 72, 84, 61, 69, 57, 72, 44, 82, 67, 68, 69, 63, 61, 84, 67, 80, 89, 60, 64, 57, 62, 58, 77, 67, 57, 69, 57, 48, 42, 47, 43, 52, 94, 50, 58, 57, 69, 86, 73, 71, 75, 77, 76, 63, 34, 63, 63, 64, 78, 73, 59, 75, 61, 35, 36, 54, 94, 70, 47, 69, 50, 61, 57, 89, 61, 42, 70, 59, 79, 65, 60, 48, 74, 91, 71, 54, 63, 100, 71, 50, 59, 102, 54, 69, 53, 70, 65, 60, 59, 50, 75, 53, 94, 56, 60, 78, 69, 55, 51, 85, 58, 75, 49, 77, 87, 81, 56, 43, 56, 65, 49, 65, 53, 50, 66, 57, 73, 64, 59, 77, 72, 78, 75, 75, 58, 61, 61, 69, 70, 108, 61, 50, 65, 55, 76, 61, 111, 88, 54, 61, 45, 74, 78, 45, 117, 64, 53, 44, 77, 59, 99, 72, 51, 76, 70, 59, 62, 67, 77, 49, 79, 54, 79, 60, 55, 49, 94, 61, 63, 58, 82, 69, 55, 61, 51, 74, 67, 83, 48, 65, 93, 58, 60, 101, 68, 72, 65, 63, 46, 58, 65, 54, 47, 58, 51, 72, 53, 98, 50, 64, 58, 49, 54, 59, 75, 74, 78, 53, 84, 70, 65, 72, 66, 66, 63, 66, 61, 91, 40, 67, 76, 64, 74, 59, 58, 70, 66, 76, 62, 99, 74, 54, 56, 59, 51, 77, 62, 60, 73, 55, 105, 55, 53, 58, 71, 48, 96, 51, 47, 56, 68, 66, 45, 53, 85, 55, 57, 41, 93, 74, 56, 71, 61, 66, 86, 72, 57, 55, 72, 66, 66, 69, 77, 56, 55, 83, 56, 59, 68, 100, 43, 59, 55, 55, 56, 77, 60, 56, 57, 69, 88, 59, 76, 51, 45, 96, 77, 59, 59, 76, 65, 52, 68, 72, 90, 75, 72, 55, 78, 74, 82, 60, 47, 62, 53, 99, 70, 44, 37, 79, 72, 55, 62, 49, 74, 83, 55, 70, 76, 83, 70, 81, 75, 83, 72, 61, 79, 72, 92, 56, 69, 57, 58, 69, 89, 81, 68, 61, 82, 51, 59, 63, 56, 59, 61, 60, 49, 88, 68, 55, 64, 67, 72, 56, 71, 75, 80, 77, 61, 91, 81, 85, 65, 62, 77, 80, 65, 77, 38, 76, 64, 66, 71, 71, 72, 50, 79, 67, 63, 47, 59, 62, 69, 51, 65, 73, 92, 74, 78, 90, 65, 56, 59, 57, 76, 72, 69, 75, 66, 53, 60, 54, 58, 78, 55, 63, 85, 79, 72, 53, 66, 82, 63, 74, 52, 61, 66, 52, 63, 62, 65, 97, 70, 56, 61, 53, 70, 56, 67, 92, 79, 80, 94, 70, 55, 68, 83, 60, 60, 73, 47, 59, 44, 72, 57, 44, 77, 95, 64, 69, 60, 72, 71, 66, 60, 58, 76, 58, 101, 70, 57, 49, 77, 72, 58, 53, 74, 57, 59, 71, 61, 59, 88, 60, 67, 59, 96, 69, 61, 55, 95, 59, 69, 58, 59, 68, 83, 57, 74, 62, 55, 111, 65, 64, 106, 57, 71, 53, 64, 85, 53, 56, 69, 80, 56, 68, 50, 68, 69, 57, 78, 52, 96, 54, 69, 65, 44, 52, 56, 84, 60, 48, 79, 58, 66, 61, 65, 69, 50, 54, 80, 48, 64, 55, 77, 62, 65, 69, 57, 69, 48, 74, 66, 82, 64, 57, 67, 49, 67, 66, 70, 46, 54, 88, 52, 86, 66, 69, 74, 65, 66, 75, 75, 61, 76, 41, 64, 67, 56, 48, 67, 52, 77, 70, 99, 82, 69, 57, 70, 73, 87, 78, 58, 47, 69, 65, 56, 50, 71, 83, 77, 74, 71, 66, 90, 69, 68, 89, 84, 81, 53, 60, 72, 72, 52, 53, 70, 62, 66, 63, 59, 59, 76, 67, 59, 52, 45, 86, 67, 69, 56, 50, 56, 67, 90, 72, 67, 55, 55, 59, 57, 75, 70, 61, 89, 78, 131, 57, 63, 77, 79, 51, 55, 55, 66, 75, 48, 49, 51, 60, 61, 73, 69, 83, 60, 63, 55, 72, 47, 73, 83, 100, 63, 50, 64, 54, 75, 64, 67, 58, 64, 88, 49, 56, 59, 53, 52, 54, 69, 87, 52, 79, 65, 53, 56, 49, 70, 111, 63, 51, 46, 61, 62, 98, 79, 55, 53, 81, 59, 71, 89, 39, 68, 82, 57, 72, 79, 53, 56, 85, 66, 67, 66, 72, 73, 112, 122, 66, 67, 37, 81, 56, 90, 65, 56, 78, 86, 61, 66, 56, 62, 48, 54, 56, 82, 48, 59, 77, 60, 57, 76, 75, 62, 65, 62, 58, 55, 69, 77, 61, 67, 71, 62, 86, 61, 69, 59, 68, 82, 49, 69, 48, 58, 64, 60, 78, 70, 70, 65, 60, 54, 62, 67, 64, 62, 63, 73, 57, 55, 83, 63, 64, 68, 48, 75, 67, 56, 61, 52, 55, 50, 62, 98, 38, 89, 88, 58, 71, 64, 66, 79, 73, 67, 74, 70, 72, 70, 87, 43, 64, 52, 61, 70, 143, 69, 43, 55, 63, 66, 73, 91, 55, 69, 121, 99, 56, 56, 51, 44, 90, 49, 64, 52, 93, 48, 44, 74, 63, 62, 47, 90, 73, 81, 60, 50, 59, 69, 56, 42, 49, 81, 78, 71, 37, 55, 76, 56, 65, 50, 55, 65, 71, 56, 46, 67, 78, 71, 59, 47, 65, 60, 40, 52, 76, 65, 61, 97, 60, 59, 75, 91, 58, 55, 52, 88, 66, 59, 90, 55, 63, 62, 81, 59, 66, 110, 62, 61, 70, 49, 57, 72, 66, 67, 64, 104, 102, 58, 72, 93, 67, 78, 66, 51, 64, 69, 62, 47, 58, 75, 53, 62, 70, 59, 67, 63, 86, 44, 55, 56, 69, 63, 55, 63, 93, 61, 58, 70, 69, 70, 61, 74, 84, 89, 98, 56, 99, 61, 50, 79, 57, 55, 54, 70, 64, 44, 58, 72, 67, 80, 79, 58, 57, 61, 104, 68, 63, 78, 62, 67, 61, 57, 95, 69, 72, 61, 59, 74, 70, 63, 55, 75, 65, 58, 83, 67, 51, 54, 83, 88, 67, 43, 93, 74, 49, 60, 44, 73, 74, 55, 72, 68, 71, 52, 71, 79, 74, 72, 73, 86, 56, 66, 55, 68, 75, 58, 51, 73, 66, 53, 39, 63, 68, 68, 54, 66, 52, 69, 60, 73, 91, 93, 55, 84, 56, 49, 75, 66, 62, 53, 71, 64, 56, 65, 71, 68, 68, 59, 49, 73, 65, 58, 63, 59, 62, 82, 68, 80, 58, 88, 56, 48, 53, 60, 61, 60, 73, 64, 63, 61, 70, 65, 59, 75, 56, 48, 66, 39, 77, 61, 75, 63, 55, 72, 75, 64, 84, 74, 92, 60, 74, 63, 74, 76, 113, 53, 71, 65, 94, 68, 73, 77, 91, 58, 64, 63, 57, 86, 51, 65, 72, 73, 61, 54, 67, 69, 96, 58, 75, 65, 63, 56, 97, 82, 57, 64, 49, 71, 78, 53, 58, 50, 61, 63, 66, 72, 71, 77, 54, 66, 78, 61, 66, 57, 49, 59, 42, 62, 64, 65, 45, 70, 52, 68, 58, 56, 88, 72, 45, 89, 53, 55, 46, 59, 57, 41, 72, 66, 47, 50, 42, 83, 63, 56, 56, 54, 79, 70, 79, 55, 64, 59, 55, 81, 50, 68, 78, 61, 89, 66, 34, 72, 54, 60, 51, 78, 76, 79, 63, 70, 78, 81, 60, 72, 61, 59, 64, 60, 78, 47, 89, 71, 54, 59, 67, 61, 67, 62, 72, 67, 44, 69, 64, 83, 58, 60, 76, 46, 65, 59, 65, 87, 59, 65, 78, 64, 70, 63, 63, 65, 82, 52, 54, 62, 61, 80, 50, 63, 54, 71, 65, 48, 68, 79, 68, 59, 54, 46, 56, 59, 79, 63, 53, 70, 69, 49, 67, 98, 64, 79, 90, 62, 59, 91, 80, 75, 93, 46, 71, 36, 69, 66, 53, 62, 53, 67, 62, 65, 64, 51, 78, 70, 69, 61, 90, 86, 59, 46, 57, 56, 70, 69, 44, 75, 55, 111, 103, 76, 71, 53, 59, 67, 69, 69, 81, 70, 52, 55, 51, 69, 69, 58, 55, 91, 59, 58, 79, 65, 60, 46, 69, 58, 71, 98, 78, 74, 55, 84, 58, 64, 88, 72, 84, 62, 64, 63, 60, 60, 77, 70, 51, 62, 71, 92, 62, 60, 69, 61, 59, 80, 63, 53, 46, 93, 59, 62, 82, 77, 52, 81, 62, 64, 67, 44, 53, 67, 68, 86, 88, 68, 67, 50, 81, 59, 51, 60, 66, 86, 61, 57, 109, 68, 72, 61, 83, 66, 59, 63, 58, 62, 63, 66, 76, 79, 64, 70, 88, 66, 52, 72, 56, 77, 50, 84, 79, 61, 60, 73, 55, 70, 54, 58, 78, 55, 71, 85, 61, 64, 50, 79, 95, 62, 64, 44, 54, 60, 77, 58, 66, 47, 62, 56, 70, 72, 68, 77, 49, 62, 71, 52, 63, 87, 71, 69, 66, 66, 81, 93, 61, 75, 72, 56, 47, 57, 78, 62, 80, 84, 63, 60, 54, 60, 77, 53, 53, 62, 73, 96, 71, 50, 57, 84, 57, 86, 64, 74, 59, 44, 76, 49, 63, 78, 66, 68, 67, 52, 63, 60, 66, 87, 70, 101, 63, 57, 60, 71, 66, 60, 63, 63, 46, 69, 69, 83, 48, 66, 91, 84, 51, 95, 68, 57, 66, 63, 67, 73, 62, 57, 75, 56, 61, 68, 65, 52, 61, 55, 60, 75, 55, 62, 62, 66, 57, 53, 57, 63, 60, 65, 78, 66, 57, 57, 78, 79, 70, 60, 87, 63, 72, 52, 63, 64, 58, 79, 57, 54, 89, 60, 71, 74, 65, 58, 65, 50, 69, 61, 96, 65, 63, 62, 76, 56, 63, 74, 63, 67, 67, 67, 67, 48, 78, 76, 67, 62, 63, 71, 70, 59, 48, 57, 69, 89, 78, 59, 68, 70, 65, 78, 61, 76, 65, 63, 66, 56, 57, 80, 62, 107, 67, 96, 48, 60, 71, 71, 51, 69, 51, 64, 47, 53, 74, 65, 78, 83, 50, 58, 55, 68, 54, 62, 65, 64, 75, 62, 93, 72, 57, 47, 61, 78, 60, 77, 69, 77, 67, 64, 64, 71, 58, 69, 70, 117, 68, 69, 59, 96, 45, 63, 94, 54, 73, 65, 67, 69, 72, 49, 69, 71, 70, 76, 84, 64, 72, 66, 64, 64, 83, 68, 67, 67, 79, 73, 52, 58, 80, 64, 77, 51, 63, 45, 82, 89, 56, 53, 74, 56, 73, 68, 56, 54, 70, 64, 85, 53, 62, 74, 69, 41, 85, 75, 70, 88, 58, 68, 67, 83, 80, 49, 46, 67, 55, 63, 57, 57, 74, 74, 70, 57, 72, 81, 95, 128, 70, 54, 62, 75, 53, 65, 72, 47, 62, 70, 62, 61, 55, 57, 63, 68, 55, 88, 70, 66, 71, 61, 56, 89, 72, 71, 56, 71, 67, 82, 60, 70, 49, 52, 74, 57, 72, 68, 59, 62, 47, 81, 68, 68, 55, 52, 80, 56, 50, 63, 63, 56, 69, 58, 63, 77, 67, 58, 66, 69, 60, 46, 60, 69, 91, 60, 60, 89, 57, 49, 68, 70, 63, 66, 73, 71, 58, 109, 88, 73, 70, 78, 63, 59, 63, 70, 57, 66, 86, 80, 67, 77, 65, 65, 38, 83, 135, 57, 57, 68, 62, 58, 69, 81, 59, 56, 85, 92, 88, 68, 60, 55, 92, 59, 63, 84, 84, 49, 64, 61, 64, 68, 72, 55, 58, 62, 64, 73, 55, 78, 63, 97, 73, 64, 60, 60, 72, 43, 61, 59, 53, 62, 54, 62, 56, 55, 74, 57, 61, 55, 56, 68, 63, 59, 59, 47, 80, 95, 89, 52, 55, 70, 64, 83, 67, 94, 66, 70, 66, 49, 75, 72, 74, 59, 69, 47, 71, 71, 46, 67, 65, 71, 51, 78, 66, 59, 67, 88, 71, 65, 71, 66, 70, 55, 62, 45, 65, 74, 79, 52, 76, 68, 57, 57, 66, 52, 71, 59, 106, 45, 60, 68, 72, 69, 65, 46, 99, 77, 56, 83, 55, 55, 63, 57, 72, 75, 107, 106, 71, 54, 71, 84, 62, 60, 64, 60, 58, 60, 73, 82, 68, 80, 52, 66, 60, 61, 104, 84, 83, 61, 66, 68, 86, 68, 50, 67, 44, 47, 85, 64, 71, 75, 63, 51, 60, 77, 77, 66, 65, 58, 75, 65, 69, 52, 75, 78, 64, 53, 56, 66, 57, 62, 61, 53, 81, 48, 63, 91, 61, 71, 64, 70, 61, 79, 70, 56, 70, 47, 47, 66, 62, 50, 54, 62, 64, 55, 60, 56, 58, 55, 53, 93, 90, 73, 52, 80, 59, 56, 58, 77, 59, 67, 72, 72, 74, 60, 59, 57, 61, 41, 59, 52, 51, 78, 62, 62, 65, 57, 78, 57, 59, 91, 58, 84, 94, 56, 73, 67, 78, 52, 48, 76, 71, 89, 68, 74, 56, 62, 65, 63, 48, 59, 83, 66, 84, 53, 85, 60, 58, 76, 92, 44, 66, 64, 69, 69, 74, 49, 61, 71, 73, 62, 67, 69, 77, 61, 43, 44, 57, 75, 85, 62, 70, 62, 82, 59, 76, 70, 104, 43, 63, 54, 60, 51, 57, 64, 61, 44, 62, 93, 91, 46, 54, 69, 60, 63, 73, 48, 47, 70, 45, 62, 87, 63, 53, 48, 66, 76, 44, 68, 60, 54, 48, 52, 58, 55, 70, 71, 50, 63, 78, 80, 64, 69, 67, 66, 51, 67, 60, 54, 50, 56, 84, 55, 82, 65, 89, 44, 85, 62, 59, 57, 91, 66, 53, 74, 61, 58, 72, 67, 61, 62, 52, 70, 51, 63, 69, 93, 47, 94, 63, 67, 67, 76, 66, 73, 60, 71, 48, 71, 61, 58, 52, 81, 87, 61, 57, 63, 80, 70, 70, 72, 72, 66, 55, 71, 65, 86, 59, 54, 43, 45, 77, 54, 53, 59, 66, 63, 56, 53, 59, 58, 69, 57, 76, 62, 73, 68, 76, 52, 42, 48, 79, 70, 63, 83, 76, 57, 88, 51, 57, 49, 56, 82, 62, 48, 64, 66, 48, 64, 59, 66, 73, 60, 67, 90, 80, 54, 99, 70, 62, 77, 69, 62, 71, 77, 61, 68, 60, 48, 60, 60, 47, 63, 56, 78, 61, 94, 83, 69, 64, 57, 57, 64, 51, 58, 50, 62, 60, 76, 76, 56, 61, 67, 72, 76, 74, 55, 66, 65, 63, 62, 78, 53, 69, 52, 59, 128, 57, 67, 52, 56, 70, 77, 58, 75, 72, 62, 71, 71, 62, 69, 94, 78, 78, 47, 54, 75, 58, 63, 68, 62, 68, 54, 47, 71, 64, 56, 44, 58, 81, 59, 79, 64, 60, 57, 78, 69, 66, 82, 67, 84, 44, 65, 54, 95, 56, 64, 64, 47, 49, 61, 66, 83, 70, 74, 59, 67, 64, 60, 62, 66, 58, 55, 53, 53, 66, 62, 63, 64, 100, 92, 54, 64, 52, 61, 66, 59, 67, 58, 50, 71, 77, 88, 45, 62, 66, 63, 68, 66, 64, 62, 75, 67, 51, 66, 72, 63, 68, 57, 53, 57, 65, 53, 67, 69, 43, 66, 65, 63, 76, 78, 63, 67, 100, 65, 48, 66, 67, 69, 80, 56, 88, 67, 65, 80, 43, 71, 53, 71, 71, 65, 79, 70, 69, 72, 57, 127, 69, 58, 71, 56, 65, 69, 104, 64, 66, 61, 65, 61, 86, 62, 62, 61, 63, 118, 54, 56, 59, 82, 56, 63, 64, 58, 55, 61, 70, 54, 62, 69, 74, 55, 50, 63, 70, 72, 67, 76, 59, 54, 58, 67, 87, 61, 61, 57, 51, 72, 108, 62, 70, 88, 57, 64, 102, 85, 53, 81, 69, 78, 80, 57, 64, 62, 57, 73, 53, 76, 63, 71, 60, 51, 84, 71, 60, 68, 62, 82, 62, 80, 55, 87, 68, 70, 65, 63, 68, 95, 43, 52, 60, 75, 63, 72, 65, 66, 67, 63, 59, 60, 76, 50, 68, 46, 69, 49, 64, 65, 80, 65, 54, 70, 91, 71, 95, 74, 79, 92, 79, 68, 58, 56, 55, 81, 54, 55, 51, 62, 66, 71, 80, 75, 65, 98, 54, 68, 87, 83, 58, 65, 47, 58, 52, 71, 63, 55, 48, 55, 55, 56, 61, 79, 51, 75, 58, 52, 75, 59, 67, 63, 50, 67, 72, 72, 57, 64, 85, 66, 78, 61, 59, 61, 91, 66, 53, 60, 77, 71, 61, 56, 52, 55, 60, 50, 51, 56, 75, 61, 87, 71, 58, 64, 73, 151, 66, 71, 60, 58, 67, 79, 49, 60, 59, 88, 82, 83, 53, 50, 52, 71, 45, 56, 54, 78, 56, 55, 52, 73, 63, 68, 48, 69, 52, 72, 67, 68, 65, 50, 80, 72, 75, 66, 76, 58, 56, 91, 67, 66, 47, 90, 54, 67, 57, 70, 57, 52, 68, 59, 55, 65, 57, 82, 88, 57, 94, 61, 64, 64, 55, 64, 73, 83, 60, 52, 66, 61, 60, 73, 54, 70, 61, 79, 31, 65, 78, 67, 66, 49, 61, 110, 63, 60, 84, 74, 49, 62, 72, 71, 58, 53, 67, 47, 68, 88, 69, 72, 61, 71, 60, 75, 58, 56, 68, 43, 54, 91, 49, 74, 37, 67, 59, 58, 53, 68, 54, 66, 46, 75, 65, 48, 63, 47, 61, 66, 54, 102, 79, 62, 78, 62, 51, 59, 64, 74, 75, 72, 59, 66, 57, 53, 72, 83, 62, 61, 46, 62, 75, 67, 60, 55, 76, 61, 66, 57, 77, 59, 56, 75, 62, 70, 67, 67, 67, 64, 40, 71, 63, 61, 84, 87, 106, 54, 59, 63, 99, 69, 70, 94, 67, 77, 64, 71, 114, 84, 56, 67, 57, 57, 51, 66, 79, 60, 63, 75, 62, 53, 64, 75, 60, 60, 64, 68, 69, 69, 74, 72, 53, 70, 70, 68, 53, 48, 73, 62, 61, 55, 67, 64, 69, 70, 64, 58, 56, 140, 68, 78, 73, 67, 57, 63, 93, 77, 65, 63, 60, 67, 77, 78, 60, 59, 54, 57, 82, 78, 67, 89, 49, 66, 57, 54, 59, 91, 60, 56, 72, 63, 63, 60, 58, 54, 65, 86, 79, 76, 53, 70, 61, 57, 62, 65, 58, 57, 71, 58, 73, 77, 60, 71, 58, 69, 53, 70, 71, 70, 49, 60, 52, 77, 61, 63, 64, 71, 73, 59, 61, 92, 56, 77, 52, 95, 65, 56, 41, 47, 71, 47, 61, 57, 69, 70, 68, 61, 78, 63, 65, 66, 61, 56, 64, 68, 53, 67, 65, 69, 57, 56, 84, 62, 65, 68, 67, 63, 78, 63, 65, 59, 95, 63, 67, 50, 104, 71, 69, 47, 67, 66, 89, 59, 63, 114, 52, 67, 63, 105, 62, 54, 70, 76, 86, 59, 94, 62, 75, 67, 60, 58, 52, 60, 63, 53, 98, 66, 82, 88, 56, 56, 56, 51, 62, 56, 57, 75, 76, 65, 75, 73, 77, 71, 70, 61, 66, 55, 84, 57, 76, 63, 75, 61, 55, 67, 74, 55, 67, 61, 77, 71, 87, 83, 58, 62, 69, 106, 105, 48, 62, 75, 64, 47, 58, 66, 76, 60, 53, 54, 52, 64, 74, 65, 66, 59, 60, 83, 61, 54, 102, 74, 45, 71, 56, 62, 70, 62, 69, 64, 63, 63, 86, 62, 60, 61, 57, 59, 73, 53, 66, 59, 53, 58, 71, 55, 64, 56, 68, 51, 69, 62, 51, 55, 60, 94, 59, 58, 50, 76, 49, 56, 60, 62, 80, 64, 72, 64, 71, 100, 64, 50, 80, 62, 72, 72, 56, 88, 65, 53, 66, 71, 120, 46, 75, 108, 61, 73, 62, 73, 68, 51, 70, 77, 59, 66, 56, 64, 71, 67, 77, 120, 73, 71, 73, 57, 73, 61, 79, 98, 64, 58, 84, 82, 72, 46, 61, 72, 54, 72, 72, 67, 62, 53, 62, 68, 63, 88, 65, 70, 66, 73, 91, 85, 56, 60, 65, 66, 67, 70, 54, 70, 73, 66, 73, 64, 56, 52, 63, 55, 56, 57, 62, 70, 48, 75, 74, 76, 53, 67, 63, 58, 58, 51, 65, 74, 68, 61, 74, 64, 61, 61, 69, 71, 69, 70, 69, 62, 52, 63, 52, 62, 60, 63, 52, 63, 60, 58, 52, 61, 55, 52, 68, 63, 77, 52, 59, 82, 78, 76, 61, 76, 67, 72, 54, 60, 65, 35, 71, 53, 64, 60, 82, 76, 47, 54, 72, 63, 141, 85, 60, 62, 61, 95, 63, 53, 62, 86, 87, 65, 70, 46, 55, 47, 66, 89, 55, 70, 89, 53, 78, 58, 57, 89, 69, 59, 83, 59, 65, 67, 52, 53, 54, 65, 52, 58, 69, 60, 66, 62, 101, 56, 48, 71, 56, 41, 107, 66, 54, 84, 73, 86, 58, 62, 70, 46, 70, 68, 55, 59, 66, 63, 58, 40, 100, 49, 61, 72, 73, 71, 85, 47, 69, 46, 55, 70, 58, 86, 58, 63, 65, 75, 55, 55, 70, 75, 56, 62, 86, 58, 49, 90, 48, 48, 62, 86, 51, 62, 54, 51, 66, 65, 66, 68, 93, 62, 62, 89, 62, 56, 44, 66, 54, 52, 58, 75, 63, 65, 58, 62, 65, 97, 67, 59, 52, 45, 58, 63, 50, 41, 72, 62, 85, 101, 71, 52, 71, 55, 63, 53, 53, 65, 76, 49, 75, 75, 56, 56, 56, 67, 62, 56, 54, 81, 86, 92, 56, 61, 68, 73, 54, 65, 62, 84, 92, 74, 68, 55, 69, 53, 85, 48, 59, 64, 58, 64, 62, 58, 70, 38, 56, 78, 75, 66, 57, 70, 38, 102, 65, 51, 94, 51, 62, 96, 86, 51, 72, 75, 78, 43, 104, 51, 72, 74, 60, 136, 57, 65, 62, 93, 68, 67, 84, 62, 66, 54, 75, 55, 44, 65, 101, 64, 56, 82, 91, 52, 50, 65, 61, 68, 56, 86, 112, 63, 70, 76, 78, 62, 137, 61, 58, 48, 66, 60, 57, 62, 63, 67, 72, 80, 65, 60, 72, 76, 69, 57, 58, 84, 50, 84, 59, 68, 58, 47, 55, 69, 75, 78, 47, 86, 57, 67, 64, 62, 60, 58, 67, 72, 62, 81, 64, 94, 74, 54, 73, 58, 55, 63, 63, 67, 71, 67, 66, 67, 60, 61, 78, 48, 69, 61, 47, 42, 73, 51, 66, 68, 77, 62, 91, 70, 70, 64, 67, 78, 79, 47, 62, 54, 62, 60, 74, 54, 95, 56, 60, 60, 70, 114, 57, 60, 64, 53, 82, 58, 51, 64, 59, 66, 65, 49, 61, 74, 61, 79, 63, 59, 48, 63, 50, 75, 70, 61, 80, 79, 76, 53, 94, 61, 86, 88, 70, 62, 54, 70, 58, 54, 49, 69, 55, 64, 65, 60, 62, 61, 69, 53, 74, 57, 56, 73, 56, 78, 73, 96, 53, 109, 68, 73, 40, 71, 69, 90, 60, 85, 56, 86, 73, 58, 73, 65, 74, 66, 86, 54, 49, 52, 70, 68, 41, 62, 44, 65, 53, 88, 64, 73, 70, 66, 73, 60, 69, 82, 57, 60, 62, 63, 60, 57, 61, 66, 58, 59, 65, 82, 69, 62, 101, 61, 39, 71, 56, 77, 106, 75, 42, 59, 99, 61, 71, 74, 58, 79, 61, 78, 65, 48, 72, 82, 64, 59, 85, 56, 51, 38, 93, 66, 57, 53, 63, 63, 62, 70, 88, 71, 47, 86, 58, 62, 50, 61, 57, 60, 65, 72, 49, 90, 65, 68, 75, 75, 64, 63, 54, 69, 48, 50, 45, 62, 72, 64, 60, 50, 65, 64, 74, 93, 66, 93, 51, 57, 57, 61, 54, 58, 65, 63, 53, 61, 69, 66, 58, 69, 101, 50, 70, 73, 72, 63, 90, 63, 70, 68, 60, 100, 78, 71, 52, 54, 87, 55, 95, 62, 70, 57, 59, 61, 80, 60, 66, 52, 59, 53, 67, 70, 42, 57, 74, 61, 82, 59, 100, 56, 49, 67, 71, 60, 80, 74, 76, 44, 67, 55, 59, 59, 54, 56, 61, 60, 61, 74, 67, 75, 57, 63, 65, 88, 81, 70, 62, 59, 62, 55, 57, 101, 66, 72, 62, 97, 62, 63, 49, 68, 55, 48, 70, 55, 72, 55, 70, 76, 58, 62, 56, 149, 48, 65, 57, 57, 70, 87, 69, 62, 75, 59, 58, 64, 73, 63, 73, 67, 53, 65, 78, 61, 65, 65, 50, 51, 55, 59, 84, 80, 77, 54, 58, 67, 71, 39, 66, 66, 63, 60, 59, 63, 50, 68, 74, 68, 56, 56, 65, 63, 77, 64, 69, 61, 63, 59, 49, 58, 71, 82, 103, 68, 67, 56, 60, 57, 58, 58, 58, 61, 91, 66, 74, 73, 75, 79, 47, 75, 81, 64, 83, 74, 65, 54, 65, 64, 81, 49, 64, 51, 85, 68, 54, 62, 53, 74, 81, 60, 81, 53, 77, 90, 57, 70, 46, 63, 72, 66, 67, 64, 72, 47, 51, 64, 66, 61, 62, 75, 61, 119, 65, 66, 107, 67, 54, 78, 44, 93, 62, 72, 70, 50, 75, 66, 65, 82, 87, 89, 77, 64, 66, 68, 64, 79, 65, 56, 60, 72, 64, 69, 62, 64, 57, 59, 59, 68, 76, 68, 75, 77, 72, 83, 62, 61, 66, 68, 78, 57, 52, 77, 78, 67, 83, 66, 45, 62, 81, 56, 62, 71, 55, 89, 60, 56, 65, 62, 58, 62, 55, 89, 49, 62, 70, 69, 64, 63, 58, 84, 80, 63, 76, 58, 69, 87, 69, 59, 74, 53, 76, 83, 58, 52, 69, 61, 61, 60, 63, 69, 70, 57, 60, 64, 54, 68, 85, 67, 64, 69, 73, 73, 63, 48, 74, 64, 48, 69, 56, 68, 66, 71, 56, 45, 74, 47, 64, 88, 60, 69, 78, 66, 61, 65, 66, 61, 60, 72, 65, 68, 64, 65, 76, 83, 85, 64, 58, 53, 59, 64, 53, 73, 54, 65, 66, 61, 62, 76, 61, 62, 86, 66, 75, 62, 72, 73, 57, 58, 65, 49, 64, 71, 59, 52, 71, 94, 65, 54, 68, 41, 71, 60, 62, 77, 70, 55, 69, 58, 54, 64, 50, 81, 76, 84, 60, 58, 67, 44, 56, 87, 66, 56, 62, 57, 59, 64, 42, 69, 72, 65, 59, 57, 53, 70, 65, 71, 49, 62, 88, 95, 55, 56, 61, 58, 65, 62, 66, 49, 94, 72, 72, 62, 54, 80, 106, 62, 63, 52, 49, 72, 61, 59, 54, 67, 74, 64, 63, 102, 53, 55, 84, 78, 83, 57, 65, 44, 63, 73, 61, 52, 51, 79, 61, 69, 59, 56, 51, 83, 54, 59, 83, 82, 74, 65, 70, 60, 57, 71, 56, 66, 73, 57, 82, 65, 70, 49, 77, 68, 39, 67, 64, 52, 93, 68, 72, 77, 77, 54, 51, 67, 65, 64, 60, 81, 63, 75, 60, 50, 63, 67, 83, 75, 75, 58, 54, 66, 62, 81, 54, 65, 116, 76, 39, 67, 89, 56, 60, 62, 68, 60, 61, 65, 75, 71, 84, 64, 62, 53, 62, 86, 63, 38, 63, 69, 62, 70, 71, 50, 64, 54, 66, 58, 48, 71, 61, 52, 69, 86, 72, 47, 91, 60, 64, 90, 91, 59, 56, 52, 53, 58, 63, 70, 75, 75, 80, 62, 107, 58, 72, 94, 42, 62, 69, 60, 83, 68, 90, 54, 90, 99, 55, 57, 62, 56, 72, 56, 86, 69, 63, 55, 64, 68, 68, 78, 67, 47, 75, 46, 73, 68, 82, 68, 66, 54, 67, 109, 47, 68, 52, 84, 63, 61, 50, 51, 60, 64, 66, 92, 62, 49, 66, 70, 55, 51, 60, 56, 59, 53, 75, 64, 60, 68, 65, 56, 61, 57, 59, 57, 58, 62, 61, 69, 71, 64, 70, 58, 59, 87, 75, 57, 64, 58, 94, 70, 57, 65, 59, 70, 63, 87, 51, 60, 70, 64, 59, 57, 75, 58, 69, 64, 53, 71, 53, 66, 73, 63, 60, 82, 74, 62, 64, 78, 82, 77, 123, 61, 63, 59, 60, 111, 70, 85, 66, 71, 49, 66, 54, 54, 58, 52, 72, 72, 59, 73, 65, 61, 75, 75, 73, 68, 85, 72, 65, 51, 54, 72, 102, 77, 66, 52, 80, 70, 116, 64, 55, 71, 58, 55, 55, 74, 58, 77, 85, 69, 47, 72, 52, 45, 74, 68, 86, 87, 54, 72, 62, 54, 66, 58, 59, 65, 88, 61, 69, 88, 47, 62, 56, 54, 64, 57, 56, 85, 65, 68, 51, 74, 53, 54, 55, 74, 70, 62, 63, 65, 85, 77, 69, 39, 58, 61, 63, 63, 72, 53, 68, 52, 66, 81, 69, 56, 64, 41, 58, 81, 52, 56, 59, 45, 74, 54, 47, 59, 53, 59, 59, 53, 87, 82, 53, 67, 50, 82, 73, 57, 96, 58, 71, 60, 88, 57, 52, 43, 48, 69, 70, 61, 102, 59, 61, 68, 115, 54, 63, 49, 68, 72, 82, 55, 54, 57, 76, 70, 57, 61, 58, 64, 65, 89, 58, 71, 86, 87, 57, 58, 53, 59, 57, 68, 88, 59, 68, 67, 75, 69, 65, 45, 67, 98, 70, 67, 62, 62, 66, 58, 67, 69, 49, 74, 56, 55, 45, 68, 83, 68, 57, 72, 83, 60, 57, 26, 48, 56, 65, 66, 86, 57, 46, 60, 48, 51, 54, 69, 90, 73, 60, 69, 67, 50, 59, 81, 88, 83, 51, 84, 70, 54, 60, 61, 64, 70, 65, 65, 86, 62, 62, 73, 40, 75, 72, 82, 96, 74, 60, 70, 68, 70, 46, 49, 75, 63, 88, 65, 68, 52, 66, 55, 58, 60, 73, 68, 80, 58, 69, 59, 65, 67, 72, 62, 66, 69, 81, 55, 57, 44, 52, 67, 50, 72, 58, 52, 58, 61, 66, 55, 56, 68, 58, 81, 70, 70, 50, 67, 91, 67, 65, 80, 60, 63, 59, 48, 73, 76, 72, 42, 58, 72, 42, 68, 53, 95, 58, 64, 64, 74, 75, 81, 68, 63, 50, 67, 71, 82, 110, 61, 70, 62, 80, 59, 56, 76, 55, 71, 97, 60, 58, 75, 111, 59, 71, 54, 80, 74, 64, 54, 92, 71, 64, 58, 90, 55, 59, 64, 83, 74, 141, 46, 70, 55, 62, 56, 59, 88, 63, 60, 69, 82, 74, 40, 57, 55, 71, 102, 40, 72, 64, 72, 76, 65, 58, 56, 56, 90, 82, 64, 73, 58, 63, 58, 72, 91, 57, 99, 63, 60, 57, 74, 56, 50, 52, 54, 69, 67, 49, 58, 64, 63, 68, 89, 60, 71, 63, 71, 66, 66, 64, 61, 65, 46, 66, 56, 58, 59, 77, 52, 60, 71, 72, 73, 53, 52, 53, 53, 57, 65, 50, 60, 60, 78, 125, 68, 57, 39, 59, 74, 56, 95, 61, 46, 56, 78, 72, 57, 64, 63, 52, 61, 61, 78, 70, 55, 72, 67, 51, 52, 61, 64, 50, 42, 81, 69, 74, 63, 59, 57, 92, 50, 92, 69, 66, 65, 50, 66, 55, 55, 85, 53, 59, 55, 49, 82, 70, 51, 74, 44, 55, 65, 65, 79, 54, 56, 69, 62, 58, 55, 56, 54, 60, 72, 63, 70, 85, 66, 59, 78, 74, 62, 74, 68, 69, 63, 68, 44, 57, 79, 58, 66, 67, 49, 79, 51, 49, 54, 74, 90, 58, 72, 56, 86, 80, 50, 70, 72, 64, 63, 69, 66, 44, 71, 59, 68, 46, 60, 70, 65, 67, 59, 93, 85, 62, 54, 52, 53, 51, 56, 55, 69, 78, 75, 53, 56, 44, 57, 65, 61, 73, 59, 63, 54, 56, 65, 66, 70, 71, 70, 75, 72, 60, 65, 60, 53, 57, 50, 45, 70, 49, 49, 111, 52, 73, 93, 56, 75, 64, 73, 77, 57, 78, 59, 62, 70, 67, 62, 65, 58, 85, 47, 62, 58, 62, 68, 51, 57, 61, 72, 59, 53, 69, 67, 64, 62, 101, 66, 74, 97, 51, 64, 72, 81, 70, 59, 47, 54, 85, 73, 59, 73, 70, 50, 67, 54, 75, 100, 92, 73, 52, 54, 80, 74, 58, 74, 55, 62, 86, 62, 69, 67, 89, 79, 57, 66, 48, 43, 60, 66, 53, 73, 68, 61, 61, 73, 86, 69, 46, 58, 66, 49, 57, 82, 58, 52, 51, 65, 73, 46, 52, 59, 49, 94, 73, 139, 58, 59, 77, 54, 64, 46, 51, 72, 75, 85, 89, 78, 69, 60, 55, 77, 49, 61, 60, 57, 64, 61, 54, 62, 73, 64, 73, 57, 61, 64, 66, 68, 72, 59, 64, 69, 57, 68, 58, 64, 61, 61, 68, 72, 64, 59, 78, 57, 60, 47, 66, 58, 62, 63, 53, 93, 66, 79, 58, 60, 62, 58, 53, 72, 80, 55, 71, 68, 60, 71, 44, 70, 74, 57, 91, 78, 78, 105, 67, 54, 78, 58, 56, 53, 101, 68, 60, 67, 52, 56, 65, 77, 41, 59, 65, 58, 82, 61, 66, 55, 56, 54, 48, 57, 78, 64, 71, 60, 47, 46, 87, 81, 50, 66, 107, 57, 58, 67, 60, 70, 77, 74, 70, 62, 48, 62, 55, 66, 77, 60, 63, 69, 63, 59, 92, 54, 60, 57, 45, 62, 63, 71, 65, 67, 61, 64, 60, 62, 60, 62, 49, 55, 62, 62, 55, 62, 62, 64, 78, 61, 78, 63, 63, 64, 66, 40, 78, 64, 54, 65, 57, 59, 61, 63, 65, 54, 57, 57, 63, 61, 59, 50, 89, 52, 64, 64, 64, 64, 122, 54, 44, 66, 69, 67, 60, 53, 60, 84, 83, 98, 65, 74, 53, 80, 84, 64, 92, 74, 62, 99, 64, 74, 53, 71, 78, 68, 77, 71, 60, 51, 85, 36, 65, 66, 49, 62, 63, 60, 58, 52, 48, 55, 67, 51, 63, 65, 52, 63, 93, 43, 59, 71, 60, 77, 66, 63, 65, 56, 74, 68, 60, 78, 64, 83, 55, 92, 52, 66, 56, 56, 52, 62, 66, 60, 58, 53, 64, 59, 45, 66, 51, 79, 63, 61, 53, 75, 77, 68, 64, 50, 60, 47, 70, 67, 51, 54, 76, 48, 76, 44, 62, 64, 61, 81, 46, 65, 58, 64, 76, 74, 55, 58, 63, 66, 74, 64, 68, 68, 84, 68, 64, 92, 47, 58, 54, 83, 73, 65, 134, 55, 64, 63, 67, 131, 61, 61, 61, 89, 60, 110, 59, 52, 61, 53, 78, 48, 63, 68, 66, 66, 62, 60, 84, 62, 58, 50, 58, 62, 60, 55, 70, 78, 52, 89, 76, 70, 105, 56, 70, 74, 64, 74, 60, 97, 80, 86, 75, 96, 59, 57, 60, 60, 64, 75, 51, 81, 60, 79, 58, 53, 64, 52, 59, 52, 47, 54, 106, 71, 56, 64, 67, 60, 56, 66, 69, 63, 76, 79, 61, 77, 62, 56, 55, 54, 63, 54, 78, 66, 60, 73, 63, 65, 64, 55, 81, 53, 75, 68, 57, 61, 83, 63, 69, 97, 59, 59, 61, 60, 59, 52, 55, 64, 66, 77, 47, 83, 61, 59, 81, 60, 93, 65, 79, 59, 60, 86, 79, 54, 87, 94, 54, 54, 64, 82, 54, 59, 58, 68, 82, 106, 72, 65, 67, 51, 52, 46, 61, 72, 60, 55, 67, 68, 73, 83, 96, 73, 107, 56, 67, 75, 64, 63, 76, 59, 63, 59, 53, 60, 71, 57, 71, 68, 73, 76, 49, 53, 117, 49, 67, 68, 92, 85, 61, 95, 55, 49, 66, 49, 50, 75, 64, 57, 76, 68, 41, 57, 55, 62, 51, 75, 85, 60, 61, 60, 75, 73, 58, 70, 75, 66, 56, 71, 64, 56, 58, 83, 53, 59, 61, 56, 54, 84, 78, 70, 62, 65, 62, 53, 56, 58, 64, 73, 52, 70, 82, 61, 75, 65, 59, 73, 86, 69, 68, 55, 111, 85, 59, 61, 66, 51, 62, 69, 71, 63, 74, 58, 64, 48, 60, 51, 56, 77, 77, 72, 56, 48, 52, 48, 67, 69, 89, 65, 60, 65, 55, 56, 82, 61, 61, 63, 58, 93, 59, 71, 64, 73, 72, 56, 58, 60, 54, 71, 72, 56, 53, 61, 74, 75, 58, 66, 61, 73, 59, 73, 73, 89, 62, 59, 62, 57, 73, 55, 51, 70, 53, 65, 68, 66, 54, 48, 79, 79, 53, 78, 75, 64, 74, 56, 63, 60, 62, 109, 65, 65, 60, 63, 62, 59, 54, 76, 88, 52, 52, 52, 99, 45, 69, 58, 63, 70, 66, 59, 78, 67, 54, 61, 98, 68, 86, 60, 55, 60, 63, 62, 60, 90, 64, 75, 65, 51, 75, 64, 54, 47, 57, 65, 39, 106, 65, 54, 61, 64, 53, 70, 87, 73, 72, 70, 70, 65, 49, 74, 75, 66, 76, 49, 68, 81, 48, 71, 88, 67, 63, 65, 74, 60, 69, 58, 47, 51, 70, 144, 60, 78, 70, 73, 62, 48, 53, 61, 88, 55, 58, 66, 52, 60, 73, 52, 65, 65, 55, 73, 58, 63, 75, 60, 52, 67, 49, 108, 64, 65, 64, 62, 73, 57, 68, 68, 81, 64, 69, 71, 57, 53, 61, 69, 61, 59, 62, 64, 84, 53, 55, 60, 69, 68, 70, 63, 73, 70, 119, 57, 61, 57, 61, 77, 51, 58, 54, 76, 80, 63, 49, 58, 61, 76, 56, 69, 59, 73, 49, 67, 62, 73, 52, 58, 78, 58, 74, 63, 60, 67, 62, 51, 61, 67, 55, 64, 87, 65, 72, 48, 69, 75, 73, 69, 68, 68, 96, 71, 73, 86, 58, 93, 78, 64, 53, 76, 83, 109, 56, 79, 80, 61, 89, 61, 75, 70, 66, 59, 73, 51, 56, 55, 70, 80, 67, 58, 67, 65, 64, 60, 50, 81, 64, 71, 64, 81, 82, 66, 51, 79, 55, 62, 63, 39, 81, 59, 62, 59, 66, 72, 55, 48, 56, 81, 78, 57, 59, 62, 76, 66, 61, 59, 58, 55, 86, 49, 53, 63, 53, 65, 80, 56, 57, 60, 61, 70, 65, 72, 59, 62, 46, 90, 74, 55, 57, 68, 85, 74, 95, 61, 86, 63, 70, 55, 71, 61, 74, 74, 66, 60, 70, 61, 53, 62, 69, 59, 61, 72, 64, 67, 100, 53, 58, 61, 72, 61, 53, 65, 107, 70, 80, 61, 53, 74, 59, 72, 60, 71, 60, 67, 76, 54, 59, 61, 64, 63, 57, 53, 90, 73, 66, 78, 65, 49, 57, 60, 56, 65, 80, 91, 48, 56, 64, 60, 66, 63, 63, 71, 58, 81, 71, 61, 60, 66, 83, 72, 82, 59, 55, 61, 62, 76, 69, 75, 58, 48, 49, 54, 56, 68, 64, 44, 67, 76, 62, 57, 65, 74, 58, 55, 65, 65, 41, 53, 72, 57, 108, 77, 83, 77, 60, 68, 78, 67, 62, 87, 78, 62, 82, 62, 51, 70, 61, 66, 90, 52, 63, 93, 54, 61, 59, 69, 67, 58, 57, 53, 52, 63, 67, 56, 65, 61, 68, 55, 60, 65, 70, 67, 55, 77, 54, 73, 57, 65, 66, 59, 61, 54, 61, 58, 58, 82, 63, 102, 76, 63, 79, 42, 64, 64, 56, 61, 55, 70, 53, 55, 55, 68, 63, 51, 67, 47, 56, 67, 64, 57, 74, 60, 61, 87, 63, 72, 66, 69, 59, 49, 46, 60, 66, 66, 77, 61, 81, 79, 42, 71, 97, 42, 46, 107, 76, 97, 62, 68, 78, 60, 62, 50, 48, 61, 62, 60, 70, 69, 64, 56, 71, 86, 56, 41, 99, 59, 56, 97, 43, 73, 77, 74, 88, 53, 65, 63, 89, 49, 61, 60, 58, 55, 68, 67, 52, 64, 63, 55, 85, 96, 72, 38, 56, 61, 68, 63, 84, 72, 51, 58, 70, 74, 70, 89, 60, 77, 66, 76, 74, 63, 66, 60, 48, 66, 85, 63, 67, 116, 60, 86, 59, 70, 69, 75, 77, 60, 55, 57, 50, 83, 63, 54, 111, 64, 74, 82, 56, 58, 65, 61, 52, 60, 45, 54, 77, 56, 82, 70, 73, 54, 95, 74, 55, 71, 66, 45, 75, 63, 57, 60, 64, 75, 53, 62, 44, 51, 60, 58, 87, 79, 56, 91, 86, 57, 73, 73, 59, 105, 47, 62, 77, 102, 59, 73, 60, 61, 56, 59, 82, 65, 65, 68, 83, 66, 76, 50, 83, 62, 59, 60, 87, 60, 110, 63, 55, 85, 68, 67, 78, 61, 53, 58, 82, 66, 76, 89, 61, 86, 45, 62, 77, 71, 40, 64, 75, 59, 64, 65, 56, 60, 99, 92, 65, 62, 71, 77, 64, 69, 67, 56, 54, 49, 55, 56, 88, 55, 64, 52, 67, 135, 48, 61, 63, 51, 70, 66, 72, 51, 73, 77, 73, 60, 82, 86, 85, 51, 79, 35, 82, 54, 58, 57, 58, 63, 46, 60, 70, 71, 58, 55, 71, 82, 105, 64, 96, 69, 48, 51, 70, 84, 71, 64, 62, 99, 72, 67, 60, 67, 61, 66, 85, 96, 61, 45, 56, 66, 53, 52, 53, 55, 38, 52, 91, 54, 50, 75, 58, 74, 63, 54, 65, 62, 76, 76, 66, 62, 79, 90, 56, 67, 59, 63, 69, 65, 49, 52, 57, 59, 50, 68, 64, 65, 57, 72, 50, 85, 77, 56, 82, 80, 53, 72, 67, 67, 58, 48, 67, 56, 59, 59, 82, 78, 64, 62, 76, 68, 83, 43, 103, 78, 66, 51, 58, 64, 59, 59, 68, 67, 62, 74, 62, 78, 67, 73, 72, 56, 57, 34, 56, 62, 80, 75, 57, 64, 67, 41, 40, 69, 62, 47, 98, 93, 54, 61, 58, 44, 59, 62, 65, 121, 64, 55, 50, 72, 48, 40, 59, 105, 57, 86, 56, 74, 91, 74, 62, 102, 51, 106, 57, 81, 56, 93, 41, 66, 44, 71, 51, 63, 71, 104, 79, 64, 98, 56, 51, 86, 84, 75, 60, 51, 77, 81, 64, 79, 64, 67, 42, 82, 75, 99, 73, 47, 62, 62, 55, 60, 75, 75, 78, 66, 71, 74, 56, 55, 65, 76, 58, 66, 55, 29, 64, 65, 69, 66, 51, 62, 60, 52, 60, 65, 61, 84, 56, 58, 69, 72, 54, 70, 70, 66, 75, 56, 49, 53, 99, 56, 90, 101, 79, 79, 61, 80, 74, 43, 98, 69, 78, 54, 69, 66, 62, 77, 64, 70, 69, 82, 64, 71, 63, 65, 50, 50, 73, 60, 60, 61, 58, 82, 49, 69, 97, 57, 91, 71, 56, 63, 68, 61, 54, 84, 62, 82, 76, 59, 62, 62, 56, 66, 73, 76, 59, 61, 76, 55, 64, 61, 55, 96, 55, 62, 87, 66, 87, 62, 71, 61, 54, 75, 66, 63, 85, 62, 80, 68, 54, 62, 56, 62, 51, 67, 87, 58, 64, 42, 59, 46, 45, 64, 53, 65, 67, 57, 57, 24, 108, 55, 83, 95, 59, 58, 57, 66, 54, 57, 62, 87, 67, 47, 69, 99, 74, 53, 44, 57, 47, 60, 62, 73, 54, 67, 55, 63, 50, 88, 80, 69, 59, 92, 64, 66, 56, 55, 65, 65, 65, 63, 70, 74, 67, 69, 65, 59, 70, 55, 64, 57, 60, 79, 72, 54, 74, 69, 67, 94, 73, 79, 66, 44, 58, 52, 77, 65, 59, 51, 65, 56, 75, 77, 46, 83, 68, 55, 63, 50, 54, 82, 63, 66, 75, 50, 60, 31, 55, 49, 67, 57, 73, 42, 51, 61, 62, 61, 47, 60, 57, 88, 58, 67, 67, 56, 52, 44, 98, 59, 55, 79, 88, 63, 79, 76, 71, 60, 67, 66, 81, 65, 51, 43, 47, 56, 61, 53, 68, 79, 94, 64, 53, 54, 61, 66, 59, 55, 43, 87, 69, 55, 76, 70, 70, 77, 57, 41, 58, 63, 100, 66, 69, 60, 47, 63, 68, 80, 47, 48, 43, 45, 56, 59, 79, 68, 57, 76, 49, 58, 67, 74, 69, 107, 96, 63, 81, 51, 97, 64, 72, 57, 62, 47, 57, 60, 53, 60, 82, 83, 73, 98, 66, 56, 65, 77, 70, 48, 81, 87, 66, 48, 79, 97, 52, 53, 69, 58, 53, 66, 67, 65, 56, 90, 43, 76, 66, 68, 66, 68, 58, 73, 70, 65, 45, 63, 67, 48, 60, 97, 60, 82, 48, 68, 56, 58, 64, 86, 74, 58, 65, 68, 58, 56, 61, 90, 55, 49, 67, 55, 55, 46, 87, 54, 106, 60, 68, 70, 68, 79, 73, 76, 51, 84, 75, 74, 110, 89, 84, 61, 50, 62, 68, 73, 101, 40, 60, 60, 65, 69, 70, 55, 85, 73, 60, 58, 61, 66, 65, 92, 46, 53, 59, 108, 60, 53, 68, 78, 62, 52, 80, 49, 48, 48, 48, 54, 70, 74, 48, 51, 70, 62, 97, 76, 47, 63, 51, 59, 59, 55, 89, 75, 61, 54, 73, 69, 39, 61, 51, 45, 82, 60, 65, 57, 48, 89, 52, 100, 49, 41, 64, 63, 57, 59, 77, 95, 59, 77, 75, 54, 63, 80, 49, 50, 69, 69, 76, 60, 58, 74, 89, 87, 81, 65, 60, 74, 62, 63, 62, 35, 66, 58, 92, 69, 56, 49, 66, 69, 54, 57, 44, 56, 66, 73, 70, 65, 69, 59, 89, 53, 63, 39, 58, 45, 64, 69, 78, 71, 58, 59, 79, 68, 73, 64, 58, 62, 63, 54, 67, 67, 49, 77, 61, 63, 57, 53, 62, 89, 47, 52, 70, 65, 77, 76, 56, 60, 71, 67, 70, 59, 65, 70, 50, 77, 45, 62, 61, 51, 60, 65, 62, 76, 63, 86, 66, 58, 39, 82, 63, 50, 86, 34, 79, 64, 63, 55, 46, 60, 51, 72, 68, 68, 52, 76, 59, 89, 60, 47, 59, 64, 62, 67, 100, 87, 65, 60, 85, 66, 77, 80, 66, 62, 63, 54, 57, 62, 75, 63, 62, 49, 61, 57, 60, 48, 53, 69, 57, 44, 62, 51, 58, 65, 64, 66, 75, 75, 63, 67, 60, 69, 73, 59, 76, 74, 73, 73, 54, 59, 79, 55, 68, 70, 67, 78, 59, 52, 55, 86, 49, 66, 55, 69, 62, 72, 92, 60, 59, 80, 65, 54, 62, 75, 71, 72, 74, 77, 58, 75, 54, 88, 55, 57, 106, 54, 58, 58, 50, 63, 61, 57, 117, 64, 65, 53, 61, 67, 55, 62, 50, 59, 72, 71, 80, 54, 96, 68, 58, 61, 65, 73, 63, 61, 65, 59, 103, 57, 60, 83, 41, 68, 63, 78, 84, 81, 62, 62, 55, 103, 65, 60, 70, 53, 59, 53, 55, 60, 51, 56, 80, 51, 49, 80, 71, 65, 50, 61, 62, 48, 72, 64, 82, 61, 92, 57, 58, 61, 62, 48, 64, 68, 66, 73, 84, 79, 73, 49, 45, 67, 65, 58, 72, 100, 54, 63, 54, 56, 66, 53, 55, 63, 62, 74, 68, 41, 66, 59, 51, 60, 54, 59, 45, 49, 52, 55, 91, 59, 64, 55, 74, 119, 64, 61, 54, 62, 74, 67, 57, 64, 62, 54, 73, 54, 62, 75, 56, 62, 70, 65, 116, 61, 55, 63, 64, 88, 64, 61, 63, 61, 71, 87, 72, 63, 108, 61, 65, 51, 71, 57, 42, 75, 63, 64, 67, 46, 88, 69, 61, 57, 57, 105, 60, 48, 55, 53, 70, 99, 75, 45, 65, 69, 60, 64, 80, 56, 65, 61, 56, 49, 71, 63, 64, 73, 50, 63, 64, 79, 72, 54, 55, 88, 57, 61, 70, 65, 65, 71, 72, 64, 58, 63, 67, 66, 81, 62, 85, 83, 72, 75, 65, 73, 59, 62, 57, 78, 56, 55, 63, 67, 62, 103, 57, 69, 70, 57, 66, 68, 84, 49, 97, 97, 54, 82, 49, 66, 50, 82, 77, 55, 58, 62, 58, 46, 57, 58, 72, 75, 74, 62, 61, 69, 64, 58, 57, 51, 69, 59, 90, 75, 60, 72, 101, 80, 59, 65, 75, 60, 75, 62, 85, 67, 63, 76, 51, 57, 65, 57, 55, 77, 63, 63, 66, 53, 109, 62, 67, 87, 79, 56, 55, 92, 71, 67, 58, 46, 61, 54, 66, 65, 52, 64, 63, 69, 79, 53, 55, 71, 120, 54, 75, 56, 87, 71, 62, 65, 72, 60, 73, 67, 60, 68, 59, 78, 65, 65, 62, 68, 65, 57, 84, 55, 59, 52, 65, 53, 89, 56, 75, 80, 57, 72, 85, 58, 71, 91, 63, 80, 54, 97, 97, 60, 65, 77, 72, 64, 68, 81, 68, 76, 59, 61, 53, 65, 56, 62, 81, 79, 40, 98, 46, 68, 57, 96, 98, 43, 72, 62, 93, 91, 80, 85, 57, 57, 104, 92, 86, 55, 60, 72, 52, 63, 68, 69, 65, 95, 62, 54, 69, 57, 58, 52, 60, 72, 87, 53, 59, 57, 78, 70, 72, 97, 63, 100, 61, 60, 55, 59, 54, 62, 61, 62, 68, 63, 77, 88, 60, 69, 54, 94, 60, 68, 72, 69, 82, 59, 71, 59, 78, 77, 67, 68, 59, 60, 87, 82, 69, 71, 48, 55, 66, 67, 62, 55, 51, 55, 60, 68, 62, 60, 73, 64, 79, 85, 52, 74, 123, 73, 51, 69, 55, 50, 71, 56, 53, 61, 58, 40, 58, 68, 63, 55, 72, 56, 76, 84, 64, 68, 64, 66, 74, 74, 69, 46, 57, 60, 58, 57, 52, 80, 54, 85, 58, 51, 68, 55, 51, 55, 65, 91, 58, 57, 67, 66, 65, 53, 47, 58, 95, 74, 59, 77, 55, 59, 62, 73, 57, 57, 81, 54, 58, 85, 57, 69, 52, 57, 56, 73, 64, 52, 62, 57, 59, 65, 65, 57, 46, 77, 59, 63, 74, 52, 58, 52, 56, 63, 75, 46, 57, 61, 57, 54, 77, 70, 62, 58, 70, 54, 62, 49, 52, 55, 72, 65, 76, 51, 65, 96, 105, 61, 62, 88, 60, 80, 72, 63, 67, 75, 80, 76, 58, 48, 71, 46, 53, 60, 93, 49, 47, 62, 65, 57, 58, 61, 70, 57, 70, 72, 78, 57, 72, 60, 60, 66, 55, 62, 106, 51, 57, 61, 58, 101, 52, 67, 68, 54, 74, 55, 76, 79, 62, 59, 54, 62, 58, 80, 60, 105, 56, 42, 80, 45, 72, 72, 92, 86, 62, 49, 88, 68, 65, 58, 64, 65, 93, 72, 73, 86, 62, 74, 54, 49, 58, 67, 61, 61, 60, 70, 55, 98, 70, 57, 55, 57, 56, 67, 64, 63, 45, 70, 60, 54, 62, 74, 44, 62, 65, 64, 61, 49, 48, 62, 57, 77, 78, 53, 61, 46, 49, 70, 66, 57, 84, 68, 58, 51, 69, 87, 74, 58, 53, 76, 67, 86, 55, 81, 62, 55, 68, 54, 80, 82, 56, 48, 59, 83, 55, 50, 65, 58, 59, 56, 56, 80, 83, 66, 62, 56, 56, 76, 64, 64, 77, 57, 59, 72, 64, 67, 71, 68, 73, 48, 69, 67, 61, 75, 49, 65, 58, 88, 89, 56, 49, 84, 64, 57, 62, 58, 68, 67, 81, 49, 52, 63, 54, 81, 61, 53, 71, 56, 86, 56, 52, 86, 67, 49, 59, 50, 67, 63, 78, 62, 55, 61, 80, 65, 71, 80, 70, 46, 61, 54, 63, 89, 64, 72, 57, 78, 50, 54, 65, 48, 63, 65, 48, 67, 57, 100, 52, 60, 46, 53, 59, 59, 53, 58, 63, 80, 81, 78, 63, 47, 74, 58, 74, 83, 56, 57, 67, 50, 63, 51, 69, 58, 62, 49, 74, 54, 56, 63, 72, 65, 60, 67, 74, 52, 55, 47, 53, 58, 69, 59, 43, 71, 89, 63, 89, 63, 50, 84, 66, 61, 60, 65, 63, 55, 51, 65, 57, 56, 60, 63, 72, 98, 61, 53, 64, 58, 84, 97, 68, 54, 61, 57, 73, 47, 72, 82, 80, 56, 45, 64, 59, 91, 58, 103, 65, 75, 60, 63, 60, 59, 70, 62, 53, 58, 59, 65, 65, 57, 48, 94, 48, 57, 71, 87, 49, 64, 82, 67, 56, 63, 58, 68, 58, 54, 73, 74, 85, 88, 62, 74, 69, 80, 63, 79, 55, 56, 51, 58, 64, 62, 71, 68, 77, 67, 55, 77, 72, 75, 70, 64, 56, 58, 72, 64, 50, 93, 66, 58, 91, 60, 66, 65, 69, 74, 128, 69, 62, 67, 62, 71, 68, 43, 65, 71, 70, 78, 44, 48, 64, 59, 57, 79, 69, 47, 57, 47, 56, 80, 80, 65, 59, 104, 76, 57, 63, 53, 80, 57, 34, 64, 116, 58, 67, 88, 57, 59, 62, 48, 66, 45, 64, 64, 57, 98, 64, 58, 67, 70, 75, 78, 65, 84, 61, 54, 74, 64, 68, 79, 58, 60, 51, 47, 62, 66, 66, 75, 75, 65, 56, 47, 53, 57, 36, 53, 50, 74, 72, 74, 50, 69, 61, 84, 75, 41, 95, 58, 63, 69, 75, 49, 63, 57, 76, 70, 47, 93, 55, 75, 58, 96, 82, 60, 73, 79, 55, 62, 85, 54, 74, 46, 55, 56, 100, 66, 60, 68, 60, 46, 43, 73, 52, 73, 115, 74, 44, 61, 70, 81, 60, 56, 55, 60, 58, 62, 47, 85, 52, 56, 69, 56, 60, 87, 69, 69, 43, 72, 62, 39, 89, 80, 46, 76, 56, 132, 67, 37, 68, 75, 57, 72, 52, 48, 65, 66, 68, 60, 60, 63, 37, 71, 68, 63, 50, 85, 60, 61, 71, 73, 78, 63, 70, 48, 75, 51, 68, 66, 135, 64, 85, 47, 66, 71, 71, 63, 50, 73, 39, 90, 75, 67, 53, 65, 50, 80, 61, 78, 71, 68, 71, 83, 72, 50, 65, 63, 106, 103, 48, 63, 53, 60, 71, 54, 66, 64, 67, 76, 63, 58, 57, 79, 68, 49, 67, 63, 65, 76, 64, 61, 56, 67, 72, 49, 60, 71, 74, 62, 80, 67, 83, 71, 63, 53, 97, 58, 67, 53, 75, 71, 90, 59, 60, 71, 77, 67, 64, 58, 73, 63, 94, 65, 54, 69, 67, 76, 70, 69, 50, 71, 97, 71, 61, 42, 51, 70, 72, 59, 55, 67, 78, 47, 89, 93, 66, 73, 52, 70, 58, 70, 59, 92, 52, 66, 59, 74, 75, 75, 53, 63, 59, 66, 53, 82, 54, 55, 40, 63, 66, 71, 51, 50, 57, 107, 65, 56, 51, 88, 84, 60, 84, 62, 61, 115, 65, 53, 76, 66, 71, 65, 88, 59, 65, 66, 56, 67, 71, 52, 61, 49, 47, 72, 106, 64, 107, 57, 71, 73, 81, 71, 70, 58, 63, 62, 63, 51, 90, 68, 67, 60, 66, 76, 68, 61, 65, 50, 63, 65, 96, 59, 59, 99, 72, 70, 59, 46, 69, 61, 66, 64, 62, 78, 72, 88, 45, 58, 73, 97, 58, 58, 56, 113, 85, 53, 88, 59, 59, 61, 65, 73, 92, 60, 70, 94, 50, 65, 63, 59, 69, 112, 63, 58, 66, 68, 54, 41, 74, 67, 64, 63, 56, 86, 53, 34, 39, 48, 55, 42, 71, 57, 38, 50, 54, 65, 59, 59, 80, 60, 77, 59, 88, 46, 65, 73, 82, 93, 92, 87, 81, 57, 63, 48, 44, 51, 60, 71, 60, 46, 74, 67, 59, 97, 53, 83, 52, 69, 107, 55, 60, 57, 49, 68, 80, 61, 72, 64, 56, 54, 77, 66, 79, 72, 50, 77, 68, 78, 56, 47, 69, 36, 56, 61, 63, 55, 71, 78, 75, 46, 57, 49, 130, 69, 40, 77, 68, 59, 68, 82, 56, 55, 50, 54, 58, 53, 62, 62, 81, 59, 58, 72, 58, 58, 67, 84, 47, 118, 67, 76, 74, 69, 55, 68, 67, 57, 59, 85, 56, 55, 46, 53, 82, 52, 50, 78, 56, 67, 65, 66, 62, 53, 50, 63, 50, 111, 54, 65, 65, 73, 66, 49, 61, 67, 85, 64, 62, 63, 73, 52, 66, 56, 55, 69, 85, 98, 71, 57, 54, 54, 67, 65, 59, 62, 48, 47, 57, 89, 50, 96, 57, 48, 81, 57, 50, 43, 45, 90, 91, 55, 50, 53, 50, 63, 75, 77, 66, 66, 62, 47, 81, 62, 67, 58, 72, 67, 62, 95, 55, 67, 61, 64, 76, 60, 90, 94, 65, 55, 91, 58, 53, 63, 59, 53, 76, 83, 70, 65, 79, 53, 87, 59, 70, 50, 76, 63, 55, 64, 56, 65, 76, 40, 83, 56, 53, 58, 45, 50, 83, 56, 51, 66, 64, 69, 90, 52, 62, 96, 65, 58, 63, 52, 56, 54, 73, 83, 61, 57, 58, 58, 75, 78, 59, 49, 87, 51, 66, 71, 74, 69, 67, 54, 68, 85, 75, 59, 65, 46, 109, 61, 56, 67, 45, 66, 67, 95, 49, 52, 67, 66, 55, 58, 48, 60, 67, 55, 64, 72, 57, 54, 50, 51, 62, 61, 86, 60, 49, 64, 76, 76, 72, 74, 46, 73, 62, 81, 36, 54, 72, 59, 44, 65, 69, 76, 56, 62, 50, 82, 65, 49, 64, 69, 87, 59, 57, 59, 95, 60, 41, 48, 70, 61, 105, 63, 58, 63, 58, 54, 104, 66, 119, 68, 65, 61, 39, 84, 56, 97, 62, 48, 49, 77, 44, 59, 83, 70, 80, 48, 56, 59, 75, 88, 61, 55, 65, 86, 63, 47, 66, 85, 65, 57, 76, 68, 41, 75, 52, 63, 69, 54, 86, 54, 64, 56, 67, 69, 62, 58, 61, 72, 52, 74, 46, 64, 55, 89, 71, 77, 80, 70, 79, 52, 61, 77, 72, 65, 56, 50, 48, 124, 75, 55, 64, 65, 50, 84, 104, 82, 69, 62, 57, 89, 65, 79, 61, 58, 65, 69, 73, 61, 54, 62, 66, 73, 50, 60, 52, 67, 59, 60, 58, 92, 70, 56, 78, 51, 59, 64, 50, 57, 54, 114, 54, 61, 81, 60, 57, 64, 58, 68, 78, 54, 59, 88, 109, 56, 74, 67, 39, 63, 81, 66, 75, 50, 73, 61, 89, 58, 51, 54, 60, 51, 69, 45, 75, 52, 45, 62, 51, 85, 74, 61, 55, 54, 91, 52, 65, 57, 56, 50, 97, 63, 51, 110, 81, 62, 116, 54, 61, 57, 52, 123, 45, 60, 90, 62, 64, 78, 56, 82, 81, 87, 92, 54, 55, 54, 65, 60, 68, 61, 39, 89, 73, 71, 54, 69, 56, 75, 56, 101, 61, 64, 51, 73, 84, 71, 75, 70, 97, 48, 67, 64, 108, 101, 76, 56, 56, 48, 73, 53, 49, 76, 54, 56, 57, 73, 71, 58, 66, 125, 61, 57, 52, 57, 53, 74, 63, 63, 61, 52, 63, 67, 63, 52, 60, 43, 67, 48, 100, 83, 55, 57, 61, 64, 60, 65, 80, 63, 64, 127, 61, 73, 61, 51, 56, 95, 67, 64, 55, 79, 58, 43, 68, 80, 74, 45, 84, 66, 66, 82, 64, 61, 52, 69, 103, 64, 65, 54, 59, 70, 66, 49, 65, 54, 80, 65, 42, 79, 70, 62, 61, 61, 66, 40, 61, 57, 82, 76, 51, 67, 74, 76, 72, 68, 56, 80, 83, 63, 68, 72, 78, 52, 65, 59, 57, 58, 70, 63, 65, 94, 53, 78, 95, 52, 68, 68, 66, 62, 67, 76, 76, 80, 45, 70, 51, 73, 62, 99, 73, 62, 57, 40, 51, 99, 113, 68, 53, 57, 76, 53, 60, 58, 75, 64, 121, 56, 58, 84, 111, 48, 70, 48, 55, 74, 57, 67, 60, 62, 54, 58, 61, 63, 70, 63, 86, 71, 77, 67, 51, 60, 56, 65, 64, 102, 82, 68, 83, 83, 58, 55, 72, 92, 59, 47, 63, 48, 110, 77, 49, 47, 72, 81, 48, 61, 97, 56, 66, 69, 54, 73, 111, 66, 67, 65, 60, 71, 60, 63, 101, 86, 68, 64, 64, 74, 115, 57, 72, 67, 64, 65, 62, 76, 72, 66, 72, 57, 49, 62, 62, 81, 49, 88, 73, 68, 66, 52, 63, 54, 54, 37, 57, 56, 61, 63, 79, 93, 56, 67, 110, 55, 71, 62, 67, 64, 56, 57, 51, 56, 74, 70, 54, 81, 67, 53, 68, 70, 62, 76, 89, 43, 65, 60, 50, 65, 69, 56, 56, 54, 68, 65, 76, 49, 64, 74, 67, 53, 76, 62, 64, 62, 67, 89, 61, 74, 55, 64, 83, 49, 79, 62, 53, 98, 55, 73, 57, 63, 71, 85, 57, 59, 69, 70, 63, 57, 48, 74, 72, 88, 86, 77, 50, 51, 76, 62, 61, 74, 72, 55, 63, 56, 88, 62, 101, 83, 70, 72, 68, 63, 73, 54, 87, 60, 69, 56, 64, 77, 69, 52, 57, 72, 60, 60, 59, 64, 130, 72, 72, 64, 71, 84, 62, 73, 58, 68, 70, 68, 50, 79, 53, 71, 86, 72, 59, 77, 59, 46, 88, 80, 80, 57, 60, 66, 70, 92, 111, 59, 50, 72, 54, 46, 76, 62, 61, 67, 52, 64, 65, 85, 55, 45, 41, 68, 65, 80, 68, 53, 62, 89, 62, 80, 43, 80, 82, 61, 61, 66, 91, 78, 54, 87, 95, 69, 71, 82, 92, 89, 60, 78, 66, 68, 55, 61, 53, 70, 54, 46, 63, 59, 38, 105, 47, 51, 59, 71, 69, 74, 65, 59, 72, 65, 62, 63, 70, 106, 67, 48, 89, 56, 65, 99, 91, 52, 71, 58, 57, 100, 62, 70, 73, 60, 78, 87, 69, 68, 71, 55, 83, 68, 69, 68, 55, 61, 48, 69, 63, 64, 62, 70, 58, 58, 67, 69, 76, 79, 76, 58, 75, 66, 56, 56, 54, 66, 68, 82, 82, 68, 74, 74, 43, 62, 63, 61, 58, 69, 62, 66, 59, 77, 92, 44, 61, 51, 64, 57, 70, 64, 53, 75, 44, 74, 55, 59, 91, 47, 59, 76, 124, 63, 44, 90, 63, 52, 52, 73, 59, 56, 57, 62, 57, 56, 58, 52, 57, 78, 74, 63, 69, 57, 79, 66, 66, 55, 78, 94, 71, 54, 61, 69, 79, 94, 56, 65, 63, 54, 67, 67, 61, 68, 68, 55, 75, 40, 47, 57, 68, 62, 68, 50, 57, 72, 66, 77, 51, 72, 56, 91, 56, 75, 57, 65, 63, 49, 57, 75, 66, 86, 64, 62, 96, 67, 87, 48, 47, 64, 46, 77, 65, 81, 82, 64, 68, 59, 64, 48, 55, 57, 79, 58, 58, 67, 65, 77, 60, 58, 70, 71, 56, 59, 77, 93, 49, 53, 50, 58, 36, 58, 68, 93, 69, 80, 60, 56, 67, 83, 77, 58, 63, 71, 63, 64, 75, 58, 55, 60, 65, 73, 66, 60, 56, 67, 60, 62, 57, 63, 63, 53, 75, 135, 73, 56, 57, 70, 88, 60, 32, 87, 79, 75, 52, 66, 63, 89, 103, 65, 95, 64, 95, 56, 64, 63, 60, 66, 74, 54, 69, 75, 79, 90, 39, 78, 43, 67, 76, 90, 59, 93, 63, 49, 68, 99, 57, 53, 58, 71, 69, 72, 81, 65, 88, 57, 78, 77, 79, 82, 79, 64, 62, 50, 65, 117, 93, 64, 51, 58, 72, 57, 82, 55, 72, 59, 68, 60, 64, 58, 63, 51, 61, 64, 61, 55, 82, 58, 67, 65, 87, 61, 76, 59, 74, 63, 83, 74, 57, 66, 70, 63, 51, 63, 90, 54, 54, 37, 45, 70, 59, 67, 57, 61, 95, 51, 67, 53, 57, 48, 54, 73, 72, 69, 55, 99, 59, 74, 58, 82, 73, 65, 74, 67, 43, 57, 88, 47, 57, 66, 60, 67, 47, 47, 71, 63, 55, 69, 57, 52, 55, 85, 77, 73, 72, 109, 71, 72, 70, 70, 44, 71, 63, 56, 70, 76, 60, 77, 67, 84, 70, 65, 69, 51, 68, 58, 58, 65, 75, 69, 61, 74, 63, 53, 83, 66, 83, 63, 73, 55, 49, 68, 59, 43, 70, 75, 65, 64, 68, 70, 79, 56, 64, 55, 76, 54, 85, 43, 66, 62, 80, 56, 50, 82, 55, 59, 66, 49, 64, 70, 66, 60, 55, 67, 40, 59, 57, 68, 52, 58, 54, 62, 49, 69, 73, 52, 63, 74, 81, 71, 62, 54, 74, 69, 68, 63, 52, 58, 55, 65, 67, 70, 66, 65, 78, 59, 69, 68, 77, 59, 64, 83, 45, 46, 66, 72, 90, 72, 75, 64, 61, 68, 41, 45, 47, 63, 66, 54, 62, 70, 61, 58, 69, 69, 39, 38, 74, 65, 66, 54, 37, 77, 68, 58, 47, 63, 90, 53, 59, 68, 47, 49, 48, 64, 62, 63, 47, 76, 43, 51, 72, 51, 63, 53, 66, 63, 75, 47, 60, 64, 60, 57, 60, 66, 75, 59, 60, 41, 58, 53, 57, 70, 58, 73, 77, 72, 71, 59, 56, 61, 81, 96, 81, 63, 64, 65, 60, 56, 65, 54, 62, 68, 56, 60, 58, 59, 61, 61, 63, 86, 59, 58, 52, 63, 61, 66, 65, 56, 74, 46, 70, 65, 100, 69, 71, 55, 64, 63, 80, 62, 62, 60, 79, 66, 59, 55, 67, 70, 51, 118, 81, 75, 48, 55, 62, 101, 103, 57, 68, 59, 65, 92, 63, 91, 77, 73, 78, 58, 71, 58, 61, 50, 54, 57, 84, 72, 53, 54, 71, 65, 56, 62, 69, 53, 63, 61, 66, 64, 59, 65, 57, 58, 65, 58, 63, 71, 71, 51, 55, 53, 60, 67, 66, 61, 60, 54, 79, 69, 65, 65, 61, 67, 61, 51, 62, 74, 50, 67, 60, 65, 54, 68, 72, 49, 52, 67, 52, 69, 62, 56, 66, 63, 60, 64, 61, 62, 48, 70, 49, 76, 80, 63, 58, 58, 126, 50, 57, 62, 51, 60, 78, 70, 61, 62, 96, 75, 64, 49, 62, 66, 61, 58, 65, 83, 64, 57, 62, 63, 66, 65, 57, 58, 84, 66, 64, 51, 71, 45, 61, 60, 46, 60, 55, 60, 71, 76, 87, 67, 62, 58, 65, 115, 54, 71, 59, 59, 87, 87, 58, 62, 65, 66, 54, 58, 58, 55, 71, 114, 54, 81, 121, 55, 66, 57, 56, 68, 70, 49, 75, 78, 53, 51, 44, 81, 107, 56, 84, 69, 68, 63, 53, 74, 68, 69, 57, 62, 68, 62, 63, 59, 59, 58, 50, 71, 67, 51, 77, 60, 66, 55, 65, 58, 53, 82, 67, 57, 60, 94, 68, 70, 63, 63, 74, 66, 77, 66, 61, 84, 67, 76, 65, 80, 70, 58, 57, 65, 65, 56, 53, 63, 60, 60, 92, 68, 71, 60, 56, 68, 55, 64, 54, 58, 69, 58, 62, 56, 57, 56, 63, 58, 51, 78, 60, 42, 58, 61, 48, 58, 111, 44, 94, 89, 58, 74, 58, 51, 62, 70, 63, 66, 71, 72, 79, 61, 64, 56, 78, 92, 72, 81, 66, 65, 64, 60, 63, 55, 61, 58, 60, 72, 88, 71, 79, 83, 70, 72, 55, 57, 66, 63, 92, 72, 66, 62, 54, 76, 80, 59, 56, 52, 52, 60, 55, 64, 100, 61, 64, 50, 67, 80, 60, 92, 62, 57, 57, 58, 57, 92, 55, 51, 66, 78, 53, 60, 75, 59, 101, 81, 109, 53, 63, 68, 82, 68, 55, 65, 56, 72, 55, 67, 66, 64, 67, 105, 85, 58, 68, 83, 58, 69, 66, 61, 60, 67, 49, 53, 72, 70, 61, 75, 67, 67, 63, 57, 67, 60, 65, 69, 62, 82, 56, 59, 62, 75, 62, 54, 75, 96, 69, 82, 68, 67, 53, 68, 64, 61, 79, 51, 66, 95, 51, 68, 58, 64, 63, 57, 60, 79, 87, 82, 70, 70, 57, 57, 64, 77, 86, 60, 69, 54, 67, 57, 60, 63, 67, 77, 58, 58, 67, 60, 60, 70, 64, 77, 62, 64, 53, 69, 94, 58, 68, 83, 62, 103, 59, 64, 56, 64, 51, 54, 73, 56, 62, 55, 69, 73, 58, 60, 68, 58, 59, 47, 83, 75, 70, 59, 68, 57, 48, 86, 62, 67, 60, 58, 57, 52, 65, 65, 93, 80, 68, 66, 81, 64, 68, 58, 60, 61, 72, 73, 63, 72, 54, 71, 56, 74, 112, 55, 111, 85, 67, 66, 66, 90, 58, 59, 67, 48, 43, 70, 60, 86, 64, 83, 78, 67, 55, 51, 63, 89, 77, 56, 57, 67, 79, 73, 66, 62, 71, 87, 60, 53, 61, 63, 69, 73, 77, 60, 77, 88, 65, 56, 65, 48, 67, 62, 60, 60, 55, 73, 52, 55, 59, 60, 67, 106, 60, 61, 52, 74, 70, 65, 93, 65, 69, 50, 57, 68, 52, 64, 80, 59, 80, 56, 57, 76, 66, 74, 73, 69, 57, 69, 59, 58, 60, 61, 60, 57, 65, 78, 65, 57, 61, 56, 66, 90, 74, 64, 58, 61, 72, 61, 61, 64, 65, 57, 76, 91, 87, 88, 50, 57, 56, 62, 92, 77, 73, 67, 59, 75, 59, 61, 65, 76, 56, 64, 81, 73, 57, 72, 54, 90, 68, 68, 54, 82, 52, 108, 56, 54, 77, 80, 53, 64, 65, 54, 96, 63, 67, 69, 56, 89, 69, 61, 98, 62, 69, 65, 74, 69, 49, 55, 61, 58, 59, 82, 53, 90, 52, 73, 92, 52, 79, 80, 76, 64, 58, 55, 63, 66, 56, 65, 57, 59, 73, 69, 79, 54, 87, 67, 76, 67, 61, 56, 62, 71, 63, 60, 66, 108, 80, 61, 62, 68, 55, 60, 59, 67, 61, 46, 53, 62, 57, 57, 69, 54, 56, 57, 74, 74, 65, 75, 62, 67, 80, 51, 74, 52, 61, 75, 76, 63, 56, 51, 58, 64, 80, 65, 67, 69, 85, 61, 73, 78, 63, 68, 52, 60, 64, 50, 65, 61, 61, 66, 66, 72, 65, 58, 53, 98, 49, 71, 71, 97, 54, 53, 72, 66, 63, 73, 84, 59, 56, 56, 68, 72, 78, 71, 81, 52, 64, 58, 63, 67, 54, 84, 55, 66, 59, 102, 75, 59, 59, 72, 62, 69, 73, 64, 50, 69, 65, 49, 61, 61, 46, 58, 60, 51, 93, 40, 75, 62, 62, 73, 59, 52, 67, 80, 62, 57, 77, 64, 53, 72, 64, 63, 81, 47, 53, 54, 81, 83, 51, 97, 70, 84, 65, 71, 61, 63, 76, 43, 54, 52, 61, 66, 61, 90, 64, 69, 60, 63, 77, 57, 67, 51, 59, 90, 59, 68, 52, 64, 57, 68, 64, 62, 65, 57, 58, 48, 59, 59, 64, 50, 71, 56, 68, 76, 60, 65, 56, 58, 69, 46, 74, 63, 61, 64, 60, 65, 76, 54, 65, 57, 74, 72, 94, 51, 60, 52, 73, 67, 56, 60, 64, 62, 55, 84, 64, 60, 56, 60, 65, 90, 69, 68, 46, 70, 60, 42, 63, 56, 62, 62, 60, 79, 64, 63, 55, 49, 63, 69, 53, 61, 55, 51, 62, 67, 62, 62, 58, 58, 80, 60, 62, 60, 64, 56, 71, 55, 70, 48, 78, 62, 50, 73, 59, 63, 74, 87, 72, 58, 52, 65, 76, 61, 78, 71, 75, 73, 48, 54, 50, 54, 64, 92, 65, 63, 71, 58, 64, 60, 67, 66, 60, 62, 58, 51, 60, 65, 68, 51, 79, 58, 69, 61, 67, 60, 68, 59, 81, 60, 74, 77, 66, 60, 113, 70, 54, 63, 58, 76, 62, 63, 65, 61, 63, 53, 53, 53, 58, 63, 51, 58, 84, 74, 49, 58, 55, 69, 94, 55, 54, 83, 62, 61, 64, 58, 73, 57, 49, 60, 53, 76, 69, 39, 65, 54, 61, 61, 59, 62, 72, 90, 56, 102, 73, 72, 74, 49, 89, 67, 74, 62, 71, 76, 71, 47, 73, 51, 69, 97, 61, 66, 76, 61, 58, 77, 64, 52, 61, 39, 59, 54, 58, 59, 53, 61, 56, 54, 83, 55, 73, 91, 87, 57, 59, 52, 63, 53, 55, 59, 63, 55, 51, 60, 50, 44, 84, 54, 58, 58, 61, 74, 89, 71, 71, 85, 80, 61, 64, 69, 63, 72, 80, 74, 64, 64, 58, 59, 57, 58, 80, 79, 102, 55, 77, 66, 70, 76, 72, 65, 67, 70, 44, 61, 48, 67, 52, 64, 47, 55, 60, 105, 67, 73, 80, 45, 56, 57, 109, 60, 64, 78, 63, 131, 46, 56, 57, 66, 63, 72, 62, 68, 61, 77, 68, 48, 72, 80, 64, 63, 63, 50, 69, 57, 73, 77, 52, 69, 59, 69, 83, 63, 65, 61, 72, 69, 115, 62, 95, 60, 53, 57, 59, 84, 61, 50, 45, 93, 72, 56, 58, 62, 69, 77, 62, 47, 73, 50, 56, 70, 73, 72, 65, 67, 111, 96, 56, 55, 59, 67, 62, 58, 55, 67, 57, 67, 75, 65, 60, 62, 53, 54, 58, 63, 58, 61, 58, 58, 58, 76, 72, 65, 59, 80, 60, 54, 72, 60, 74, 131, 52, 53, 68, 128, 59, 45, 66, 58, 66, 38, 76, 60, 78, 60, 75, 66, 73, 71, 64, 65, 75, 58, 53, 57, 73, 70, 73, 64, 98, 89, 71, 52, 64, 68, 73, 77, 57, 66, 78, 68, 50, 59, 72, 66, 70, 51, 54, 50, 58, 54, 61, 83, 64, 64, 77, 61, 62, 71, 74, 52, 70, 78, 62, 63, 60, 64, 59, 59, 57, 53, 46, 98, 75, 61, 66, 65, 113, 73, 54, 67, 68, 108, 69, 74, 64, 69, 73, 64, 58, 69, 67, 67, 77, 78, 72, 60, 96, 60, 60, 68, 66, 68, 44, 59, 86, 112, 71, 66, 58, 63, 63, 94, 62, 63, 56, 83, 67, 65, 66, 67, 69, 53, 87, 67, 55, 55, 69, 52, 58, 83, 62, 73, 63, 55, 70, 57, 57, 65, 68, 78, 73, 86, 71, 63, 65, 80, 45, 57, 73, 88, 60, 68, 56, 62, 54, 66, 57, 73, 123, 46, 43, 79, 58, 64, 61, 59, 63, 70, 65, 61, 85, 83, 59, 59, 56, 59, 68, 64, 60, 62, 64, 79, 52, 69, 51, 70, 67, 64, 45, 73, 66, 59, 59, 53, 83, 51, 69, 68, 68, 54, 97, 56, 80, 77, 80, 93, 70, 60, 56, 70, 99, 54, 57, 88, 72, 46, 61, 64, 63, 70, 61, 61, 68, 59, 95, 73, 70, 49, 59, 75, 77, 76, 75, 68, 72, 55, 58, 56, 62, 39, 92, 51, 64, 60, 67, 68, 72, 63, 47, 75, 55, 60, 70, 71, 78, 74, 62, 59, 103, 65, 50, 70, 54, 56, 70, 100, 68, 60, 53, 58, 71, 54, 69, 55, 85, 55, 54, 81, 57, 42, 70, 68, 72, 70, 65, 67, 58, 58, 64, 53, 63, 81, 55, 89, 62, 68, 63, 57, 60, 69, 58, 85, 74, 75, 78, 55, 62, 72, 78, 56, 93, 100, 59, 66, 74, 84, 61, 64, 54, 59, 56, 66, 52, 62, 68, 58, 85, 63, 66, 60, 61, 77, 43, 61, 54, 51, 68, 64, 69, 104, 63, 74, 67, 75, 68, 40, 68, 63, 84, 59, 65, 76, 59, 69, 57, 49, 61, 67, 76, 64, 64, 47, 59, 74, 54, 60, 52, 50, 64, 57, 85, 56, 55, 62, 55, 63, 58, 70, 54, 91, 69, 68, 50, 72, 59, 52, 71, 56, 76, 67, 33, 50, 65, 71, 69, 60, 89, 64, 67, 45, 70, 60, 56, 44, 61, 51, 47, 62, 92, 63, 62, 66, 60, 63, 56, 64, 63, 57, 70, 74, 70, 55, 63, 70, 69, 58, 55, 73, 77, 99, 56, 81, 58, 54, 66, 46, 62, 87, 55, 72, 74, 53, 67, 59, 61, 102, 67, 52, 41, 66, 61, 57, 49, 67, 81, 81, 61, 58, 53, 56, 74, 90, 56, 50, 54, 57, 82, 71, 52, 65, 55, 67, 86, 69, 65, 74, 73, 65, 63, 68, 74, 71, 77, 53, 58, 58, 55, 79, 62, 62, 82, 71, 95, 68, 64, 65, 57, 68, 60, 53, 67, 62, 59, 64, 57, 46, 69, 58, 66, 61, 81, 53, 68, 78, 47, 58, 54, 58, 88, 54, 59, 53, 65, 48, 102, 83, 116, 65, 79, 63, 59, 85, 63, 59, 43, 65, 56, 68, 75, 72, 62, 61, 86, 80, 59, 50, 62, 67, 80, 59, 58, 60, 60, 50, 64, 77, 63, 92, 88, 69, 90, 57, 43, 69, 58, 67, 97, 82, 65, 71, 78, 74, 64, 63, 54, 58, 73, 67, 61, 48, 62, 66, 86, 63, 58, 55, 66, 72, 62, 75, 73, 66, 66, 67, 65, 87, 68, 67, 68, 51, 58, 60, 98, 71, 74, 59, 49, 95, 64, 76, 53, 72, 70, 63, 82, 54, 64, 51, 91, 60, 57, 36, 55, 72, 71, 55, 55, 98, 71, 71, 57, 63, 69, 71, 57, 62, 73, 82, 67, 49, 57, 67, 64, 54, 84, 55, 64, 91, 63, 67, 100, 66, 68, 74, 63, 63, 100, 79, 79, 61, 49, 63, 87, 74, 70, 56, 70, 60, 70, 56, 66, 62, 58, 70, 62, 84, 65, 80, 85, 57, 82, 44, 73, 51, 56, 67, 66, 56, 49, 56, 71, 61, 114, 58, 84, 66, 62, 96, 62, 67, 55, 63, 68, 61, 66, 55, 66, 94, 67, 56, 55, 63, 55, 61, 82, 89, 84, 87, 88, 49, 63, 56, 65, 61, 79, 92, 65, 53, 64, 59, 68, 73, 75, 55, 54, 51, 58, 73, 63, 80, 98, 48, 70, 57, 58, 67, 55, 60, 67, 91, 64, 62, 86, 83, 59, 62, 65, 61, 77, 63, 55, 56, 60, 60, 69, 62, 50, 76, 57, 40, 73, 52, 63, 92, 92, 74, 65, 100, 62, 69, 49, 63, 59, 76, 71, 57, 67, 75, 72, 105, 70, 61, 52, 69, 60, 59, 77, 55, 66, 51, 69, 66, 80, 99, 39, 62, 57, 66, 65, 71, 71, 79, 64, 64, 78, 72, 55, 93, 85, 75, 45, 69, 65, 68, 61, 42, 97, 64, 59, 91, 73, 72, 64, 65, 92, 61, 50, 52, 68, 64, 59, 57, 62, 42, 59, 65, 65, 79, 53, 50, 73, 74, 66, 66, 62, 65, 71, 66, 71, 68, 65, 65, 69, 68, 62, 49, 67, 73, 42, 78, 71, 63, 46, 65, 65, 73, 55, 60, 96, 63, 55, 52, 53, 62, 57, 73, 41, 53, 66, 54, 59, 54, 93, 44, 76, 61, 80, 69, 62, 68, 100, 84, 70, 61, 48, 51, 71, 62, 85, 67, 59, 55, 85, 57, 112, 67, 46, 81, 36, 52, 62, 71, 61, 80, 51, 71, 55, 54, 58, 71, 63, 63, 58, 67, 56, 72, 66, 64, 58, 61, 68, 60, 50, 77, 41, 102, 53, 81, 46, 55, 73, 64, 58, 49, 57, 62, 81, 67, 63, 57, 85, 62, 61, 74, 61, 62, 64, 74, 54, 58, 70, 101, 55, 55, 44, 53, 66, 69, 62, 52, 67, 52, 67, 82, 53, 60, 56, 60, 86, 64, 66, 62, 66, 66, 77, 67, 56, 64, 85, 57, 76, 57, 57, 73, 62, 60, 60, 73, 61, 62, 64, 47, 54, 72, 68, 164, 63, 57, 56, 68, 63, 55, 65, 67, 65, 62, 102, 46, 48, 84, 59, 70, 54, 86, 73, 56, 57, 61, 68, 67, 49, 59, 78, 62, 69, 59, 52, 59, 65, 73, 61, 76, 90, 79, 66, 68, 59, 56, 67, 53, 48, 73, 50, 66, 58, 44, 51, 61, 53, 65, 48, 79, 56, 63, 58, 66, 67, 72, 91, 75, 64, 60, 54, 63, 56, 66, 56, 74, 72, 90, 70, 51, 61, 85, 63, 73, 45, 66, 92, 65, 82, 70, 80, 60, 77, 92, 60, 58, 53, 79, 77, 68, 88, 88, 46, 50, 50, 91, 78, 61, 72, 64, 85, 53, 82, 64, 66, 70, 65, 59, 88, 58, 61, 72, 77, 77, 51, 62, 53, 55, 67, 53, 54, 56, 75, 55, 46, 64, 75, 90, 68, 55, 68, 65, 68, 82, 56, 74, 93, 73, 64, 51, 88, 63, 65, 76, 84, 61, 61, 55, 106, 68, 54, 64, 76, 71, 42, 59, 69, 91, 87, 73, 89, 62, 89, 109, 56, 54, 50, 69, 67, 50, 56, 34, 61, 64, 60, 63, 65, 64, 65, 47, 100, 87, 76, 88, 77, 66, 55, 94, 64, 61, 73, 51, 67, 53, 56, 65, 61, 66, 42, 68, 43, 66, 56, 67, 56, 52, 63, 72, 67, 60, 69, 70, 67, 63, 74, 55, 65, 56, 66, 62, 53, 69, 65, 63, 59, 64, 54, 58, 63, 48, 56, 60, 45, 91, 47, 62, 75, 82, 74, 76, 72, 84, 84, 68, 53, 57, 46, 68, 64, 69, 47, 74, 64, 75, 63, 61, 68, 68, 63, 62, 147, 68, 52, 63, 54, 66, 56, 64, 45, 53, 59, 68, 58, 70, 60, 54, 91, 60, 68, 60, 62, 66, 75, 63, 54, 51, 64, 58, 74, 55, 69, 59, 60, 76, 68, 73, 55, 74, 55, 56, 63, 67, 79, 73, 56, 77, 85, 50, 64, 53, 72, 60, 82, 56, 57, 53, 59, 55, 59, 43, 56, 66, 72, 68, 61, 71, 66, 57, 55, 56, 68, 65, 57, 76, 93, 71, 97, 44, 72, 69, 47, 56, 51, 61, 68, 75, 56, 74, 67, 60, 96, 93, 40, 74, 59, 78, 69, 51, 50, 81, 64, 73, 73, 89, 46, 47, 73, 84, 86, 95, 51, 61, 74, 56, 72, 64, 67, 55, 42, 72, 81, 66, 47, 64, 67, 71, 58, 63, 57, 73, 102, 65, 85, 61, 101, 86, 68, 57, 59, 42, 70, 80, 60, 61, 70, 70, 60, 87, 54, 61, 80, 67, 55, 69, 91, 56, 67, 60, 70, 59, 66, 50, 80, 73, 85, 74, 70, 51, 59, 60, 80, 103, 68, 49, 68, 66, 71, 54, 72, 124, 45, 59, 73, 86, 65, 61, 60, 57, 53, 69, 88, 68, 50, 77, 91, 57, 54, 58, 93, 53, 63, 81, 39, 78, 61, 84, 61, 63, 63, 66, 79, 63, 57, 72, 77, 59, 80, 63, 65, 98, 65, 58, 52, 60, 89, 70, 85, 84, 79, 63, 69, 62, 67, 72, 102, 70, 57, 62, 52, 56, 76, 69, 80, 66, 58, 51, 64, 68, 88, 78, 67, 64, 55, 55, 58, 64, 79, 59, 70, 52, 83, 73, 76, 70, 69, 69, 65, 74, 75, 53, 71, 63, 63, 57, 97, 72, 71, 66, 70, 83, 57, 61, 70, 56, 51, 59, 50, 56, 74, 75, 83, 68, 49, 90, 74, 71, 60, 79, 57, 55, 55, 47, 61, 62, 49, 57, 70, 60, 88, 68, 65, 52, 87, 73, 70, 66, 49, 81, 68, 57, 77, 56, 58, 83, 104, 54, 58, 81, 81, 83, 79, 70, 113, 66, 47, 62, 57, 75, 74, 58, 60, 75, 86, 72, 61, 72, 58, 63, 50, 92, 71, 100, 48, 56, 77, 72, 42, 67, 64, 72, 57, 66, 58, 65, 57, 63, 80, 55, 66, 80, 62, 67, 64, 57, 66, 66, 80, 57, 56, 73, 66, 77, 68, 58, 48, 51, 70, 73, 66, 64, 54, 67, 51, 76, 50, 63, 65, 54, 82, 56, 52, 44, 63, 48, 65, 44, 54, 67, 66, 46, 68, 58, 65, 61, 61, 93, 57, 70, 55, 55, 74, 60, 82, 104, 61, 66, 74, 62, 84, 62, 74, 73, 64, 54, 87, 57, 70, 82, 51, 61, 85, 52, 66, 54, 61, 73, 76, 68, 59, 57, 54, 67, 63, 66, 67, 65, 65, 64, 95, 89, 66, 49, 76, 64, 73, 95, 78, 62, 51, 63, 72, 64, 82, 68, 86, 66, 112, 63, 63, 71, 69, 81, 58, 82, 71, 80, 64, 47, 57, 65, 60, 96, 72, 54, 60, 70, 54, 54, 58, 63, 81, 58, 67, 60, 60, 61, 55, 58, 57, 62, 58, 67, 53, 61, 69, 68, 65, 53, 55, 73, 67, 55, 48, 58, 59, 77, 61, 62, 82, 59, 44, 69, 51, 78, 67, 65, 56, 63, 80, 55, 62, 51, 63, 73, 59, 79, 78, 65, 54, 99, 60, 68, 66, 68, 64, 47, 67, 51, 66, 69, 65, 63, 63, 62, 101, 71, 57, 65, 73, 58, 66, 80, 64, 63, 53, 70, 73, 59, 61, 54, 62, 56, 65, 57, 86, 52, 69, 66, 57, 55, 62, 49, 76, 113, 55, 58, 65, 45, 49, 49, 51, 51, 64, 68, 64, 67, 56, 61, 54, 80, 55, 52, 73, 59, 57, 66, 67, 52, 55, 92, 60, 64, 49, 51, 86, 74, 75, 62, 55, 64, 84, 64, 65, 62, 55, 57, 79, 50, 94, 56, 76, 83, 97, 91, 62, 73, 67, 70, 52, 57, 73, 74, 62, 56, 66, 64, 62, 58, 59, 61, 51, 57, 59, 96, 54, 65, 54, 52, 65, 57, 73, 76, 69, 59, 60, 57, 53, 77, 74, 53, 59, 50, 65, 52, 51, 60, 65, 92, 62, 59, 61, 58, 61, 65, 66, 65, 66, 58, 54, 54, 70, 66, 90, 64, 60, 80, 62, 57, 72, 73, 66, 70, 58, 68, 75, 58, 63, 66, 61, 73, 80, 40, 69, 72, 86, 100, 93, 61, 50, 55, 60, 77, 69, 58, 68, 73, 76, 70, 70, 45, 67, 70, 63, 62, 48, 67, 66, 73, 62, 68, 66, 54, 60, 63, 55, 59, 64, 147, 61, 63, 66, 62, 57, 60, 67, 62, 71, 70, 85, 74, 63, 48, 66, 65, 58, 82, 67, 64, 74, 81, 63, 72, 63, 61, 72, 72, 53, 69, 65, 104, 75, 67, 70, 63, 69, 61, 58, 72, 51, 57, 68, 61, 59, 88, 81, 73, 57, 90, 60, 68, 48, 69, 63, 47, 57, 67, 58, 74, 55, 68, 77, 70, 56, 61, 61, 61, 75, 82, 64, 47, 57, 78, 64, 70, 69, 61, 65, 56, 50, 66, 65, 58, 75, 67, 67, 88, 50, 84, 62, 60, 65, 59, 70, 68, 57, 98, 77, 68, 59, 63, 69, 62, 68, 55, 67, 57, 64, 66, 60, 61, 76, 88, 104, 52, 60, 74, 70, 101, 58, 82, 82, 53, 66, 60, 51, 63, 77, 74, 62, 61, 62, 81, 64, 56, 60, 78, 57, 61, 84, 63, 114, 74, 64, 51, 76, 96, 54, 82, 70, 62, 57, 83, 94, 69, 66, 74, 61, 58, 61, 52, 52, 62, 52, 75, 60, 68, 47, 123, 68, 80, 71, 74, 63, 91, 59, 65, 59, 107, 74, 54, 58, 63, 70, 58, 120, 56, 90, 79, 65, 41, 57, 60, 65, 77, 64, 53, 54, 62, 61, 62, 73, 55, 80, 65, 61, 72, 55, 67, 53, 55, 65, 55, 67, 57, 64, 66, 51, 56, 74, 56, 53, 61, 59, 69, 68, 54, 63, 68, 54, 66, 58, 65, 53, 59, 67, 66, 60, 52, 56, 65, 75, 64, 73, 87, 54, 84, 75, 67, 69, 53, 72, 56, 54, 61, 60, 72, 75, 63, 67, 57, 86, 74, 65, 88, 121, 66, 88, 77, 70, 60, 85, 55, 50, 53, 59, 86, 68, 68, 68, 65, 62, 53, 76, 55, 68, 52, 81, 85, 53, 63, 59, 62, 68, 59, 54, 85, 78, 61, 67, 62, 58, 68, 61, 67, 59, 74, 47, 60, 64, 97, 57, 66, 66, 50, 54, 59, 54, 69, 59, 47, 62, 53, 64, 70, 52, 82, 82, 71, 57, 72, 62, 52, 65, 58, 53, 64, 55, 107, 55, 59, 55, 73, 80, 56, 62, 72, 67, 64, 57, 64, 64, 90, 69, 95, 100, 71, 77, 73, 70, 88, 47, 57, 58, 61, 75, 50, 93, 75, 63, 64, 64, 69, 67, 96, 55, 72, 68, 88, 57, 75, 62, 59, 105, 66, 76, 56, 53, 59, 66, 59, 66, 59, 75, 52, 69, 52, 73, 57, 69, 73, 66, 63, 65, 67, 61, 107, 57, 73, 78, 72, 79, 95, 73, 56, 66, 74, 70, 62, 78, 52, 70, 82, 59, 58, 58, 62, 95, 50, 94, 70, 50, 57, 63, 67, 71, 46, 74, 68, 48, 55, 92, 52, 62, 105, 62, 67, 95, 67, 68, 66, 47, 56, 67, 60, 68, 47, 47, 55, 72, 55, 51, 73, 64, 62, 60, 51, 63, 65, 65, 77, 61, 88, 59, 74, 42, 72, 72, 51, 62, 60, 79, 93, 79, 103, 45, 65, 77, 54, 81, 70, 69, 63, 105, 58, 73, 66, 64, 50, 62, 47, 77, 52, 56, 70, 92, 66, 47, 46, 62, 79, 63, 59, 81, 65, 64, 56, 61, 67, 49, 67, 56, 87, 68, 67, 66, 52, 66, 81, 61, 81, 49, 96, 57, 52, 62, 67, 58, 65, 85, 78, 65, 66, 70, 45, 59, 71, 64, 68, 64, 51, 71, 64, 90, 73, 70, 70, 61, 94, 76, 81, 83, 55, 69, 57, 62, 50, 55, 58, 58, 53, 56, 64, 39, 57, 57, 46, 58, 59, 68, 61, 55, 102, 59, 69, 63, 90, 60, 59, 113, 74, 64, 67, 64, 85, 59, 61, 91, 48, 53, 64, 67, 90, 68, 70, 69, 97, 72, 63, 77, 56, 60, 56, 63, 77, 64, 60, 45, 60, 44, 80, 70, 56, 67, 54, 62, 65, 78, 68, 73, 68, 57, 87, 73, 69, 45, 37, 76, 46, 65, 61, 59, 58, 60, 77, 68, 71, 52, 63, 55, 45, 70, 63, 71, 72, 66, 54, 68, 61, 63, 60, 58, 61, 58, 55, 46, 55, 68, 61, 59, 52, 59, 63, 65, 63, 63, 75, 59, 58, 59, 79, 58, 55, 68, 64, 61, 61, 63, 83, 59, 75, 68, 54, 70, 86, 56, 66, 58, 59, 100, 60, 61, 51, 63, 61, 71, 64, 59, 62, 66, 89, 61, 93, 47, 75, 60, 58, 67, 66, 63, 50, 81, 98, 90, 54, 56, 67, 66, 75, 66, 69, 75, 68, 68, 52, 63, 50, 48, 88, 81, 64, 66, 59, 60, 36, 74, 68, 92, 73, 85, 61, 74, 48, 64, 68, 80, 86, 91, 80, 79, 72, 76, 84, 82, 59, 72, 76, 50, 50, 68, 77, 70, 57, 58, 69, 45, 56, 47, 52, 53, 81, 82, 70, 61, 75, 75, 65, 63, 85, 67, 72, 83, 73, 71, 61, 105, 56, 58, 41, 59, 50, 51, 60, 78, 58, 54, 57, 74, 58, 67, 75, 60, 68, 61, 65, 60, 68, 82, 64, 52, 60, 58, 54, 56, 90, 75, 63, 55, 59, 48, 45, 42, 72, 107, 101, 55, 66, 94, 64, 62, 81, 65, 44, 73, 68, 65, 65, 70, 66, 57, 77, 62, 67, 64, 92, 66, 67, 57, 46, 52, 53, 71, 54, 45, 63, 62, 68, 62, 65, 80, 53, 46, 65, 51, 48, 75, 89, 59, 87, 63, 87, 68, 50, 64, 62, 60, 55, 54, 58, 73, 101, 65, 58, 49, 63, 47, 64, 68, 43, 77, 45, 78, 54, 70, 66, 52, 47, 54, 117, 74, 54, 74, 73, 59, 80, 75, 75, 57, 66, 66, 74, 75, 65, 84, 77, 64, 71, 64, 70, 51, 46, 62, 79, 56, 93, 69, 62, 63, 57, 52, 49, 126, 71, 54, 74, 64, 44, 63, 61, 64, 63, 64, 71, 81, 61, 52, 89, 64, 66, 57, 52, 47, 45, 84, 69, 65, 56, 100, 68, 56, 104, 63, 51, 57, 50, 42, 70, 73, 82, 51, 59, 60, 57, 56, 83, 64, 48, 70, 63, 63, 81, 49, 55, 52, 65, 83, 89, 38, 58, 89, 64, 65, 62, 41, 64, 59, 68, 79, 43, 66, 68, 49, 67, 61, 55, 58, 67, 96, 50, 58, 79, 72, 143, 57, 58, 85, 55, 56, 72, 68, 99, 65, 51, 114, 95, 49, 69, 57, 79, 60, 63, 57, 57, 52, 67, 42, 73, 71, 73, 77, 74, 63, 65, 66, 55, 72, 68, 61, 68, 111, 49, 78, 60, 65, 64, 78, 51, 65, 83, 51, 84, 55, 63, 70, 50, 70, 73, 48, 73, 66, 74, 68, 62, 55, 50, 103, 50, 53, 70, 63, 73, 53, 59, 73, 58, 69, 96, 68, 61, 66, 48, 58, 74, 76, 51, 68, 82, 87, 56, 73, 79, 76, 88, 52, 69, 49, 60, 72, 51, 74, 61, 55, 65, 66, 88, 72, 59, 51, 84, 77, 46, 48, 56, 66, 87, 47, 85, 103, 61, 51, 71, 96, 74, 59, 68, 50, 77, 86, 62, 46, 52, 57, 65, 78, 61, 57, 67, 55, 55, 90, 72, 73, 74, 50, 73, 96, 74, 72, 66, 83, 78, 79, 71, 53, 91, 61, 66, 62, 61, 79, 95, 51, 59, 60, 57, 50, 68, 65, 54, 90, 89, 55, 93, 54, 76, 80, 90, 59, 69, 75, 60, 58, 48, 52, 44, 81, 66, 73, 55, 95, 72, 47, 85, 99, 60, 65, 67, 72, 68, 39, 60, 52, 41, 60, 48, 56, 70, 74, 78, 80, 72, 86, 84, 57, 57, 49, 54, 67, 38, 56, 72, 65, 79, 66, 39, 64, 66, 52, 43, 47, 62, 57, 85, 71, 68, 48, 74, 72, 56, 83, 71, 67, 54, 54, 64, 80, 58, 72, 96, 107, 55, 78, 64, 83, 70, 57, 52, 60, 60, 45, 40, 77, 61, 57, 69, 76, 67, 64, 64, 137, 66, 67, 61, 55, 77, 58, 53, 61, 64, 52, 75, 96, 70, 88, 64, 78, 66, 69, 59, 89, 68, 81, 59, 54, 80, 67, 78, 57, 65, 68, 41, 59, 70, 76, 71, 75, 49, 72, 68, 67, 54, 58, 81, 47, 95, 69, 61, 58, 85, 59, 79, 53, 73, 66, 69, 71, 56, 57, 62, 50, 101, 68, 50, 71, 60, 74, 67, 52, 51, 58, 64, 54, 59, 84, 66, 83, 51, 104, 57, 92, 52, 56, 60, 80, 69, 63, 79, 70, 38, 101, 85, 66, 78, 77, 69, 77, 46, 50, 64, 62, 68, 73, 67, 84, 44, 76, 68, 71, 71, 59, 61, 84, 54, 74, 63, 55, 52, 67, 56, 85, 73, 50, 83, 93, 78, 79, 72, 84, 83, 81, 52, 44, 62, 87, 55, 39, 57, 62, 45, 88, 76, 60, 49, 67, 67, 63, 70, 68, 68, 73, 62, 64, 61, 98, 51, 67, 67, 64, 62, 69, 64, 101, 57, 72, 54, 94, 56, 51, 62, 68, 48, 79, 63, 49, 53, 64, 76, 59, 89, 77, 71, 84, 58, 58, 43, 73, 67, 53, 61, 72, 61, 42, 61, 84, 56, 86, 65, 70, 51, 62, 77, 61, 77, 68, 61, 70, 65, 68, 61, 81, 68, 86, 62, 94, 112, 97, 46, 56, 84, 61, 49, 75, 58, 86, 54, 68, 65, 29, 69, 58, 63, 70, 41, 66, 60, 67, 88, 53, 58, 64, 69, 65, 73, 59, 63, 67, 64, 69, 74, 63, 61, 64, 78, 79, 49, 73, 47, 73, 88, 83, 56, 61, 70, 84, 73, 74, 87, 67, 60, 53, 63, 60, 47, 71, 42, 60, 58, 79, 48, 68, 70, 58, 68, 73, 65, 74, 51, 52, 52, 58, 72, 60, 113, 56, 97, 44, 73, 60, 57, 66, 69, 74, 66, 71, 71, 68, 54, 90, 55, 56, 69, 53, 74, 62, 53, 53, 74, 60, 51, 61, 82, 62, 53, 104, 63, 64, 73, 38, 76, 52, 80, 87, 54, 76, 70, 63, 91, 62, 56, 54, 68, 57, 76, 67, 101, 49, 66, 56, 75, 54, 55, 54, 55, 72, 51, 64, 60, 65, 60, 40, 71, 78, 67, 67, 54, 49, 67, 59, 87, 54, 65, 90, 50, 64, 80, 106, 74, 63, 68, 58, 66, 58, 75, 52, 74, 61, 62, 68, 58, 55, 63, 53, 50, 54, 58, 84, 46, 56, 53, 57, 60, 63, 52, 59, 75, 65, 54, 71, 99, 61, 73, 67, 51, 42, 92, 60, 54, 36, 66, 77, 71, 93, 77, 57, 59, 76, 58, 55, 83, 93, 61, 60, 62, 63, 50, 55, 59, 63, 66, 66, 47, 78, 58, 55, 62, 80, 83, 40, 47, 60, 72, 64, 72, 58, 63, 53, 78, 109, 75, 85, 51, 52, 71, 49, 58, 65, 77, 70, 58, 51, 93, 77, 75, 75, 53, 74, 54, 59, 62, 66, 61, 68, 62, 78, 60, 62, 70, 73, 72, 39, 76, 95, 59, 97, 59, 56, 67, 65, 71, 78, 62, 66, 58, 70, 61, 59, 77, 53, 67, 55, 94, 58, 58, 60, 90, 77, 54, 63, 68, 76, 67, 78, 55, 60, 56, 68, 60, 69, 50, 99, 67, 61, 52, 68, 65, 59, 47, 94, 62, 64, 58, 66, 60, 71, 70, 56, 61, 72, 68, 73, 77, 54, 97, 52, 117, 56, 67, 50, 98, 79, 66, 54, 64, 72, 71, 62, 65, 98, 62, 64, 56, 68, 79, 61, 57, 69, 64, 57, 63, 66, 68, 53, 69, 59, 61, 64, 63, 69, 68, 65, 66, 70, 43, 68, 75, 63, 50, 63, 79, 50, 54, 71, 63, 56, 57, 57, 67, 54, 49, 60, 75, 70, 68, 62, 93, 59, 55, 73, 57, 72, 60, 50, 59, 57, 56, 61, 69, 58, 61, 71, 59, 67, 61, 48, 50, 80, 60, 58, 85, 61, 59, 56, 80, 63, 54, 50, 64, 60, 62, 40, 71, 112, 51, 71, 77, 62, 72, 74, 89, 41, 71, 76, 65, 58, 47, 60, 84, 64, 75, 57, 55, 60, 53, 63, 78, 112, 88, 62, 81, 75, 52, 63, 66, 56, 69, 63, 70, 55, 70, 81, 79, 56, 52, 51, 55, 72, 61, 60, 50, 81, 64, 60, 66, 75, 107, 57, 63, 55, 64, 52, 73, 68, 118, 55, 67, 68, 56, 79, 117, 69, 57, 60, 65, 65, 68, 72, 75, 58, 62, 90, 97, 55, 49, 58, 67, 76, 57, 48, 54, 55, 83, 45, 55, 62, 60, 68, 73, 71, 64, 76, 69, 89, 56, 65, 71, 66, 159, 65, 56, 88, 57, 67, 79, 62, 83, 61, 57, 93, 65, 74, 54, 52, 63, 62, 65, 77, 56, 55, 64, 58, 57, 74, 64, 74, 51, 61, 47, 65, 60, 74, 67, 49, 59, 66, 61, 59, 67, 52, 64, 76, 56, 72, 59, 52, 48, 87, 75, 70, 73, 62, 58, 71, 67, 56, 64, 74, 52, 71, 65, 55, 52, 60, 63, 65, 58, 59, 51, 64, 86, 77, 82, 64, 75, 75, 76, 53, 81, 58, 51, 68, 72, 83, 66, 76, 41, 72, 108, 66, 69, 59, 55, 75, 66, 88, 49, 59, 59, 62, 67, 52, 65, 72, 69, 69, 43, 61, 58, 69, 69, 55, 69, 59, 66, 57, 56, 77, 92, 65, 66, 59, 86, 59, 60, 60, 85, 78, 67, 67, 64, 54, 57, 75, 63, 68, 65, 62, 75, 55, 60, 89, 62, 73, 54, 77, 57, 60, 62, 65, 78, 58, 57, 65, 65, 92, 64, 57, 67, 54, 61, 61, 57, 97, 60, 57, 59, 46, 79, 62, 48, 70, 62, 64, 84, 52, 51, 74, 68, 57, 55, 74, 57, 64, 63, 63, 73, 77, 94, 63, 59, 61, 49, 66, 56, 68, 66, 67, 54, 72, 56, 95, 63, 61, 63, 101, 57, 58, 72, 50, 65, 56, 57, 70, 53, 77, 58, 63, 46, 55, 66, 59, 63, 80, 64, 48, 83, 59, 78, 57, 65, 47, 57, 71, 67, 90, 65, 65, 71, 65, 74, 67, 66, 58, 43, 125, 57, 62, 77, 70, 55, 50, 67, 83, 61, 49, 65, 55, 89, 71, 54, 80, 64, 90, 70, 141, 89, 58, 59, 56, 65, 57, 56, 64, 60, 67, 54, 79, 59, 71, 82, 62, 55, 61, 67, 68, 56, 67, 37, 52, 49, 52, 79, 75, 70, 59, 62, 59, 42, 62, 54, 60, 46, 57, 56, 65, 60, 64, 69, 63, 129, 66, 58, 70, 61, 60, 59, 65, 68, 70, 63, 66, 63, 68, 49, 76, 71, 74, 60, 62, 63, 49, 52, 52, 77, 45, 57, 62, 66, 100, 63, 57, 63, 88, 55, 60, 63, 72, 68, 54, 82, 62, 61, 72, 74, 96, 76, 59, 67, 60, 100, 75, 79, 73, 64, 52, 47, 47, 65, 75, 73, 73, 69, 79, 67, 66, 65, 75, 68, 68, 56, 67, 64, 54, 58, 86, 74, 59, 63, 40, 55, 73, 59, 50, 68, 66, 47, 57, 57, 69, 69, 52, 55, 71, 71, 67, 66, 70, 62, 58, 69, 65, 65, 61, 59, 74, 64, 66, 71, 77, 72, 68, 59, 77, 57, 59, 56, 53, 57, 93, 69, 67, 62, 53, 52, 56, 73, 71, 98, 63, 54, 61, 78, 60, 100, 111, 57, 48, 59, 60, 74, 60, 78, 49, 64, 91, 81, 51, 58, 86, 66, 57, 74, 65, 47, 69, 65, 78, 55, 70, 63, 81, 82, 61, 71, 60, 70, 70, 58, 64, 89, 78, 46, 61, 69, 71, 62, 58, 58, 74, 59, 74, 75, 65, 82, 61, 53, 52, 59, 95, 83, 64, 65, 64, 60, 62, 70, 76, 67, 49, 61, 93, 53, 91, 54, 66, 52, 79, 61, 66, 51, 76, 58, 72, 74, 90, 58, 73, 64, 64, 46, 74, 54, 72, 66, 65, 72, 59, 66, 45, 66, 68, 62, 60, 74, 51, 58, 57, 52, 100, 81, 64, 61, 59, 55, 66, 56, 53, 51, 65, 72, 59, 104, 47, 69, 61, 56, 59, 75, 95, 69, 52, 60, 48, 55, 59, 71, 76, 84, 51, 60, 62, 60, 59, 58, 72, 92, 51, 85, 95, 59, 63, 62, 53, 74, 77, 73, 65, 65, 81, 72, 74, 71, 66, 65, 64, 82, 51, 66, 68, 100, 54, 72, 78, 97, 81, 77, 74, 56, 82, 71, 59, 63, 62, 69, 62, 62, 79, 59, 79, 60, 55, 66, 43, 90, 66, 48, 84, 57, 56, 68, 99, 71, 58, 72, 45, 68, 67, 61, 67, 62, 55, 62, 55, 56, 61, 87, 84, 59, 54, 64, 57, 56, 65, 50, 55, 69, 78, 59, 69, 67, 84, 62, 53, 52, 54, 64, 74, 63, 60, 74, 68, 61, 63, 63, 61, 72, 68, 81, 65, 54, 65, 68, 57, 63, 71, 67, 92, 55, 86, 58, 64, 83, 76, 63, 56, 85, 78, 75, 65, 73, 54, 72, 53, 69, 75, 70, 46, 61, 53, 75, 56, 82, 64, 54, 58, 71, 61, 66, 47, 61, 70, 55, 52, 53, 55, 88, 68, 65, 64, 75, 50, 55, 81, 58, 49, 56, 70, 59, 72, 83, 75, 75, 66, 55, 66, 65, 68, 45, 45, 73, 57, 94, 48, 96, 63, 59, 52, 48, 63, 72, 55, 57, 78, 54, 62, 64, 76, 92, 72, 61, 79, 73, 57, 56, 47, 61, 72, 55, 49, 77, 72, 79, 56, 53, 42, 65, 73, 71, 64, 66, 72, 70, 61, 59, 57, 65, 55, 73, 60, 37, 58, 64, 61, 71, 45, 49, 60, 77, 60, 55, 62, 67, 53, 62, 55, 67, 99, 68, 74, 54, 67, 73, 69, 44, 68, 65, 67, 63, 61, 64, 85, 52, 59, 61, 65, 66, 64, 74, 61, 54, 67, 66, 45, 55, 55, 53, 62, 59, 71, 68, 58, 45, 62, 65, 70, 76, 50, 60, 76, 66, 75, 76, 60, 81, 85, 71, 75, 52, 95, 73, 69, 57, 57, 89, 85, 66, 71, 57, 57, 74, 71, 81, 83, 58, 64, 85, 79, 90, 97, 118, 70, 43, 57, 73, 50, 64, 58, 77, 67, 57, 70, 74, 65, 79, 57, 68, 48, 61, 90, 72, 66, 49, 58, 53, 58, 59, 57, 43, 70, 55, 70, 75, 53, 77, 66, 61, 74, 69, 50, 61, 58, 61, 67, 55, 38, 78, 69, 63, 54, 77, 41, 60, 48, 60, 46, 39, 58, 67, 59, 66, 42, 77, 68, 70, 64, 54, 83, 53, 60, 62, 69, 56, 55, 124, 65, 61, 56, 56, 44, 56, 55, 68, 70, 73, 75, 68, 75, 66, 50, 74, 66, 56, 75, 69, 70, 74, 69, 91, 88, 58, 61, 122, 63, 67, 55, 140, 54, 79, 68, 81, 60, 46, 70, 50, 57, 69, 97, 70, 67, 63, 49, 76, 66, 66, 71, 73, 135, 71, 38, 61, 41, 60, 67, 64, 65, 61, 81, 76, 63, 66, 79, 67, 76, 55, 34, 66, 73, 74, 57, 73, 57, 62, 56, 48, 66, 65, 68, 74, 77, 63, 71, 66, 62, 90, 73, 52, 82, 59, 49, 61, 55, 53, 65, 51, 59, 74, 65, 67, 68, 154, 62, 73, 61, 65, 48, 46, 112, 63, 65, 65, 63, 74, 61, 85, 62, 30, 54, 64, 89, 62, 49, 101, 105, 54, 74, 91, 58, 66, 72, 87, 68, 57, 95, 57, 75, 69, 61, 56, 69, 65, 54, 69, 57, 90, 77, 55, 54, 52, 72, 59, 72, 112, 108, 64, 56, 51, 75, 64, 44, 68, 61, 49, 64, 59, 68, 49, 58, 68, 52, 67, 50, 58, 43, 145, 61, 60, 55, 81, 61, 49, 96, 89, 74, 66, 66, 54, 84, 44, 70, 72, 82, 77, 52, 55, 73, 67, 47, 72, 59, 69, 69, 61, 61, 88, 69, 63, 84, 71, 61, 69, 52, 78, 69, 78, 65, 76, 61, 72, 66, 51, 85, 54, 63, 56, 60, 60, 42, 62, 68, 57, 51, 44, 46, 81, 66, 73, 56, 76, 54, 69, 62, 44, 57, 60, 72, 42, 83, 79, 61, 89, 60, 72, 50, 51, 77, 46, 57, 59, 60, 98, 62, 70, 49, 66, 57, 62, 60, 49, 56, 56, 76, 100, 52, 56, 64, 61, 62, 70, 73, 62, 73, 60, 81, 58, 75, 60, 67, 60, 68, 53, 60, 67, 65, 51, 54, 73, 53, 48, 51, 60, 60, 72, 67, 81, 60, 68, 74, 57, 55, 68, 92, 112, 61, 66, 55, 93, 86, 57, 61, 76, 112, 81, 40, 58, 75, 83, 68, 51, 56, 77, 48, 66, 68, 71, 50, 68, 83, 81, 80, 64, 58, 47, 70, 57, 73, 68, 82, 73, 79, 99, 60, 63, 48, 101, 66, 75, 72, 53, 80, 51, 70, 80, 57, 57, 60, 69, 69, 88, 79, 82, 62, 57, 71, 54, 72, 58, 52, 72, 77, 90, 58, 65, 59, 53, 90, 58, 67, 69, 86, 55, 64, 59, 87, 78, 64, 39, 87, 71, 58, 82, 58, 62, 68, 86, 54, 44, 68, 68, 85, 73, 56, 91, 95, 99, 74, 60, 61, 40, 45, 69, 56, 55, 69, 71, 54, 58, 60, 65, 80, 73, 61, 60, 62, 51, 67, 67, 56, 102, 67, 65, 64, 68, 75, 43, 55, 74, 71, 58, 52, 73, 76, 45, 57, 51, 69, 63, 59, 84, 79, 62, 71, 41, 68, 56, 42, 56, 79, 72, 59, 68, 56, 56, 56, 82, 76, 79, 69, 58, 67, 63, 71, 73, 54, 57, 83, 63, 53, 93, 89, 74, 62, 81, 67, 67, 88, 51, 71, 83, 99, 81, 90, 60, 70, 65, 56, 73, 79, 52, 60, 54, 60, 59, 62, 40, 30, 63, 47, 69, 81, 48, 59, 68, 68, 62, 60, 119, 60, 82, 83, 75, 50, 71, 57, 55, 58, 51, 70, 62, 52, 80, 70, 57, 80, 55, 69, 92, 54, 58, 72, 65, 74, 60, 71, 77, 61, 78, 62, 53, 53, 70, 58, 62, 55, 68, 71, 66, 101, 59, 56, 78, 51, 56, 71, 73, 55, 58, 68, 46, 68, 70, 55, 65, 65, 57, 66, 44, 53, 63, 54, 53, 61, 85, 64, 88, 68, 65, 59, 69, 82, 49, 61, 69, 84, 99, 74, 62, 60, 69, 63, 55, 82, 64, 53, 57, 73, 65, 60, 61, 57, 89, 81, 78, 62, 42, 55, 60, 75, 56, 69, 64, 77, 96, 61, 55, 65, 81, 90, 57, 96, 63, 70, 51, 81, 63, 43, 68, 43, 59, 70, 65, 81, 67, 75, 60, 57, 44, 69, 62, 57, 85, 81, 52, 88, 55, 60, 48, 132, 64, 64, 64, 70, 69, 64, 54, 58, 53, 64, 60, 60, 54, 71, 66, 52, 56, 49, 54, 54, 52, 59, 45, 64, 85, 99, 58, 58, 61, 84, 80, 62, 56, 52, 67, 53, 78, 50, 105, 63, 73, 50, 55, 54, 57, 59, 90, 51, 59, 43, 56, 74, 89, 68, 80, 72, 59, 47, 51, 55, 65, 60, 83, 57, 64, 52, 89, 66, 65, 37, 60, 46, 66, 70, 63, 75, 77, 70, 85, 69, 87, 72, 80, 64, 70, 63, 58, 75, 73, 72, 81, 51, 40, 67, 93, 62, 62, 65, 72, 103, 70, 78, 69, 78, 51, 74, 71, 66, 66, 61, 89, 55, 61, 60, 60, 83, 62, 66, 70, 61, 44, 55, 57, 67, 61, 63, 58, 54, 83, 73, 51, 54, 82, 62, 60, 59, 73, 73, 65, 63, 71, 62, 59, 64, 60, 83, 60, 53, 57, 61, 52, 75, 53, 74, 59, 60, 60, 90, 65, 64, 69, 71, 73, 63, 42, 68, 63, 72, 66, 71, 48, 78, 82, 57, 64, 73, 63, 55, 72, 62, 59, 65, 59, 56, 91, 49, 110, 61, 62, 57, 64, 98, 62, 57, 69, 63, 42, 64, 51, 77, 53, 59, 53, 52, 70, 73, 74, 59, 74, 69, 65, 85, 63, 64, 68, 55, 58, 78, 61, 60, 51, 65, 62, 59, 57, 61, 69, 71, 65, 82, 66, 61, 73, 71, 82, 64, 56, 63, 71, 65, 64, 65, 53, 58, 68, 65, 62, 62, 63, 52, 82, 70, 65, 61, 62, 54, 77, 53, 93, 59, 65, 49, 67, 50, 65, 72, 55, 51, 67, 64, 77, 58, 51, 59, 64, 52, 59, 40, 63, 60, 110, 72, 45, 62, 57, 59, 55, 66, 69, 61, 69, 64, 58, 62, 73, 65, 82, 65, 88, 58, 61, 65, 58, 74, 64, 81, 56, 65, 51, 70, 64, 60, 69, 64, 59, 66, 97, 53, 72, 95, 76, 71, 64, 75, 63, 52, 68, 60, 69, 61, 61, 76, 98, 59, 55, 68, 69, 67, 74, 53, 56, 63, 66, 80, 69, 53, 77, 77, 58, 72, 63, 52, 77, 60, 69, 60, 55, 69, 62, 70, 72, 82, 105, 73, 106, 105, 49, 68, 60, 56, 63, 55, 55, 78, 60, 71, 65, 59, 83, 56, 56, 93, 87, 62, 56, 65, 55, 53, 60, 84, 68, 73, 59, 66, 64, 57, 69, 56, 60, 51, 58, 68, 85, 69, 65, 76, 55, 68, 66, 67, 56, 71, 56, 86, 75, 71, 78, 59, 66, 64, 54, 76, 60, 59, 56, 57, 54, 52, 51, 79, 60, 65, 67, 51, 48, 67, 50, 55, 58, 57, 62, 63, 64, 56, 47, 66, 90, 76, 55, 56, 89, 62, 63, 60, 62, 64, 49, 69, 70, 61, 61, 76, 68, 60, 55, 59, 72, 64, 65, 79, 86, 59, 66, 61, 65, 62, 76, 73, 63, 64, 59, 80, 60, 56, 64, 75, 89, 57, 62, 53, 55, 63, 74, 78, 82, 58, 73, 56, 78, 60, 79, 70, 60, 79, 53, 63, 63, 75, 52, 81, 59, 50, 80, 49, 55, 60, 87, 61, 61, 62, 44, 64, 58, 112, 67, 56, 66, 63, 83, 62, 76, 58, 58, 72, 61, 60, 52, 75, 76, 78, 56, 56, 52, 63, 82, 63, 77, 49, 63, 51, 51, 81, 78, 69, 67, 64, 66, 59, 69, 69, 65, 63, 69, 61, 67, 49, 95, 107, 122, 51, 59, 58, 67, 69, 59, 68, 69, 62, 57, 88, 70, 52, 61, 57, 52, 63, 83, 67, 60, 79, 57, 60, 68, 98, 51, 69, 67, 61, 102, 58, 55, 55, 64, 54, 71, 76, 66, 55, 57, 72, 65, 60, 60, 55, 61, 65, 58, 59, 60, 62, 56, 55, 67, 64, 71, 70, 100, 57, 94, 52, 68, 63, 64, 70, 76, 51, 75, 63, 60, 60, 54, 63, 47, 58, 53, 61, 65, 71, 62, 60, 63, 54, 48, 51, 52, 84, 77, 62, 63, 48, 69, 76, 65, 50, 49, 67, 53, 61, 65, 82, 68, 48, 63, 71, 60, 49, 66, 71, 66, 66, 61, 59, 60, 68, 57, 65, 70, 104, 53, 93, 52, 58, 60, 104, 96, 65, 65, 54, 56, 68, 62, 58, 75, 59, 68, 78, 61, 51, 61, 64, 65, 71, 60, 57, 62, 72, 62, 62, 55, 66, 65, 72, 67, 69, 64, 66, 61, 58, 47, 68, 68, 70, 57, 65, 65, 63, 63, 59, 52, 79, 110, 67, 63, 58, 62, 62, 74, 68, 63, 74, 64, 71, 49, 92, 107, 60, 108, 113, 50, 51, 87, 59, 70, 60, 57, 64, 46, 64, 65, 55, 76, 66, 69, 59, 67, 56, 57, 54, 64, 60, 62, 58, 50, 73, 58, 63, 70, 64, 62, 62, 62, 69, 68, 50, 84, 65, 62, 62, 58, 58, 55, 46, 69, 62, 64, 71, 88, 65, 54, 69, 92, 58, 63, 64, 56, 104, 71, 68, 54, 71, 54, 61, 78, 71, 75, 61, 68, 70, 69, 57, 65, 68, 63, 60, 63, 48, 61, 79, 54, 56, 53, 71, 80, 60, 69, 98, 62, 54, 85, 60, 60, 85, 61, 80, 52, 105, 70, 69, 58, 69, 58, 67, 51, 51, 73, 66, 52, 56, 63, 59, 55, 65, 103, 71, 49, 66, 66, 65, 57, 48, 57, 58, 59, 66, 67, 75, 57, 74, 77, 61, 85, 100, 64, 80, 85, 64, 65, 57, 57, 61, 69, 62, 61, 36, 69, 72, 82, 77, 63, 58, 108, 67, 61, 64, 74, 84, 70, 100, 60, 80, 78, 79, 72, 109, 69, 64, 68, 62, 87, 93, 114, 54, 76, 64, 67, 58, 76, 54, 65, 43, 61, 66, 69, 64, 56, 69, 63, 75, 108, 56, 68, 74, 64, 55, 54, 64, 48, 63, 69, 114, 38, 50, 83, 78, 58, 68, 63, 63, 61, 75, 70, 69, 81, 59, 70, 55, 64, 56, 85, 59, 63, 66, 63, 74, 60, 59, 61, 57, 67, 57, 56, 64, 63, 75, 62, 60, 49, 59, 74, 60, 70, 55, 68, 57, 71, 61, 58, 79, 86, 95, 64, 58, 60, 53, 61, 60, 53, 61, 75, 81, 48, 57, 59, 68, 65, 86, 60, 55, 64, 65, 50, 61, 58, 45, 70, 64, 65, 53, 51, 58, 58, 56, 74, 48, 54, 71, 88, 56, 63, 82, 69, 72, 46, 68, 59, 63, 56, 66, 61, 66, 75, 53, 63, 60, 62, 55, 70, 39, 55, 64, 57, 58, 70, 61, 68, 72, 69, 79, 60, 50, 60, 62, 63, 48, 62, 88, 76, 63, 57, 64, 60, 76, 64, 64, 55, 50, 64, 62, 65, 64, 60, 70, 60, 60, 59, 80, 59, 70, 62, 66, 70, 67, 59, 75, 64, 61, 68, 114, 126, 55, 55, 65, 59, 33, 65, 78, 76, 86, 85, 71, 87, 65, 73, 61, 87, 59, 68, 43, 66, 87, 54, 70, 78, 58, 42, 70, 35, 63, 73, 59, 88, 48, 101, 78, 56, 57, 71, 60, 81, 61, 67, 60, 65, 107, 50, 67, 52, 42, 51, 77, 57, 55, 68, 72, 56, 80, 69, 58, 76, 64, 118, 59, 58, 46, 55, 82, 52, 50, 53, 60, 63, 59, 74, 86, 52, 75, 66, 73, 45, 67, 71, 61, 65, 78, 71, 74, 72, 79, 68, 57, 78, 84, 90, 56, 70, 63, 68, 55, 44, 65, 45, 46, 59, 58, 71, 94, 61, 56, 65, 83, 55, 78, 76, 98, 63, 69, 94, 61, 57, 103, 76, 48, 58, 71, 61, 72, 82, 73, 68, 62, 86, 49, 62, 63, 73, 77, 83, 81, 70, 56, 55, 51, 95, 72, 75, 66, 85, 68, 61, 61, 60, 86, 61, 56, 46, 62, 76, 58, 69, 78, 105, 71, 52, 67, 63, 41, 43, 55, 79, 72, 56, 91, 56, 62, 79, 71, 64, 112, 52, 61, 55, 56, 62, 61, 67, 64, 67, 83, 101, 59, 66, 79, 97, 72, 66, 59, 53, 58, 61, 46, 71, 56, 66, 96, 61, 42, 60, 82, 57, 85, 63, 124, 67, 66, 45, 69, 89, 114, 63, 54, 63, 65, 73, 58, 65, 52, 52, 70, 78, 60, 83, 56, 56, 59, 52, 48, 109, 47, 73, 65, 44, 112, 64, 45, 75, 71, 73, 52, 71, 51, 55, 62, 65, 62, 73, 51, 61, 50, 63, 114, 52, 65, 51, 68, 54, 66, 80, 52, 84, 81, 95, 54, 68, 59, 69, 83, 50, 80, 92, 68, 80, 86, 59, 53, 77, 76, 68, 50, 93, 70, 56, 61, 83, 49, 81, 51, 60, 78, 58, 69, 36, 58, 101, 64, 75, 56, 56, 71, 57, 47, 64, 41, 90, 46, 60, 60, 43, 57, 52, 64, 78, 50, 68, 37, 68, 60, 55, 38, 47, 82, 59, 85, 96, 79, 49, 95, 59, 67, 56, 53, 50, 64, 52, 75, 63, 66, 55, 54, 69, 42, 88, 39, 69, 51, 80, 61, 69, 50, 53, 58, 38, 56, 68, 62, 61, 78, 59, 61, 68, 66, 57, 73, 66, 73, 57, 81, 64, 39, 64, 67, 56, 54, 56, 57, 73, 35, 55, 76, 60, 90, 82, 70, 67, 69, 81, 56, 54, 49, 62, 56, 76, 75, 82, 61, 47, 66, 43, 61, 67, 72, 54, 73, 63, 69, 60, 93, 57, 58, 59, 51, 61, 58, 56, 44, 50, 46, 49, 114, 73, 65, 62, 47, 67, 51, 55, 60, 64, 56, 73, 80, 61, 80, 69, 126, 68, 73, 65, 64, 95, 79, 115, 56, 61, 63, 45, 54, 59, 93, 59, 71, 63, 55, 69, 74, 67, 71, 61, 64, 78, 80, 90, 61, 93, 55, 53, 61, 66, 53, 73, 50, 93, 63, 59, 59, 67, 58, 107, 63, 63, 48, 56, 76, 87, 58, 54, 53, 82, 77, 68, 42, 86, 66, 88, 48, 59, 96, 63, 52, 78, 61, 79, 58, 56, 53, 57, 45, 66, 59, 60, 76, 104, 63, 51, 55, 75, 52, 45, 60, 60, 59, 64, 60, 74, 47, 70, 109, 62, 38, 54, 76, 65, 72, 58, 50, 70, 61, 72, 87, 96, 65, 101, 65, 44, 54, 56, 48, 76, 69, 54, 52, 53, 70, 62, 48, 74, 102, 92, 42, 46, 56, 54, 89, 48, 64, 60, 64, 60, 59, 63, 65, 63, 61, 48, 52, 65, 68, 51, 58, 71, 72, 61, 109, 60, 66, 54, 55, 51, 98, 76, 56, 56, 63, 56, 63, 66, 57, 78, 67, 81, 104, 69, 84, 54, 38, 74, 82, 43, 68, 72, 114, 70, 70, 56, 73, 78, 101, 76, 87, 98, 58, 65, 53, 67, 74, 68, 62, 67, 60, 70, 70, 68, 58, 63, 72, 58, 73, 54, 57, 60, 65, 58, 61, 83, 69, 57, 52, 58, 82, 65, 71, 98, 65, 37, 51, 68, 59, 66, 57, 55, 64, 75, 58, 75, 77, 69, 49, 65, 80, 41, 58, 51, 55, 65, 46, 46, 67, 67, 92, 47, 54, 79, 84, 89, 82, 54, 64, 50, 65, 83, 59, 49, 62, 54, 65, 61, 61, 66, 73, 40, 62, 102, 58, 59, 91, 48, 71, 55, 63, 59, 66, 79, 68, 52, 77, 55, 101, 74, 96, 60, 55, 57, 57, 60, 81, 51, 46, 67, 97, 62, 53, 66, 74, 60, 60, 55, 69, 47, 69, 120, 59, 57, 67, 78, 119, 75, 54, 60, 65, 65, 71, 51, 54, 73, 74, 60, 77, 69, 64, 53, 65, 64, 39, 75, 76, 70, 71, 68, 67, 78, 39, 48, 71, 63, 60, 53, 45, 55, 47, 68, 69, 91, 65, 68, 63, 76, 99, 60, 59, 100, 60, 62, 82, 51, 58, 49, 80, 54, 69, 52, 63, 64, 74, 73, 75, 96, 80, 66, 52, 78, 55, 49, 59, 52, 74, 75, 120, 60, 86, 63, 74, 70, 73, 82, 84, 70, 66, 53, 53, 38, 58, 63, 60, 51, 70, 63, 51, 83, 65, 64, 72, 69, 48, 82, 70, 76, 58, 64, 55, 79, 55, 58, 49, 63, 76, 76, 62, 63, 56, 55, 60, 57, 66, 61, 84, 75, 83, 78, 68, 78, 60, 66, 66, 89, 64, 57, 93, 61, 54, 67, 72, 37, 52, 42, 63, 50, 59, 70, 90, 77, 102, 55, 63, 64, 47, 57, 72, 73, 65, 55, 60, 51, 70, 97, 85, 70, 91, 76, 80, 62, 78, 69, 78, 69, 49, 59, 63, 60, 51, 47, 64, 80, 86, 68, 75, 66, 45, 87, 89, 51, 67, 59, 60, 74, 48, 50, 80, 70, 63, 57, 57, 45, 71, 61, 75, 57, 70, 53, 54, 63, 52, 46, 54, 56, 67, 53, 44, 65, 57, 71, 63, 42, 113, 54, 62, 51, 57, 71, 43, 58, 79, 68, 54, 55, 81, 74, 77, 95, 45, 52, 133, 75, 75, 76, 61, 79, 74, 64, 74, 41, 53, 62, 55, 51, 57, 54, 53, 79, 64, 59, 56, 63, 63, 54, 70, 58, 65, 72, 78, 77, 70, 109, 57, 94, 72, 74, 52, 61, 101, 82, 74, 85, 115, 62, 85, 94, 78, 56, 87, 66, 71, 42, 47, 72, 87, 55, 81, 61, 68, 54, 86, 94, 52, 61, 42, 61, 60, 62, 81, 65, 63, 62, 66, 88, 88, 60, 58, 104, 64, 86, 55, 62, 60, 59, 98, 50, 51, 61, 62, 87, 67, 60, 72, 50, 67, 59, 62, 80, 57, 63, 72, 65, 49, 78, 76, 40, 43, 99, 92, 108, 80, 67, 46, 51, 48, 68, 55, 70, 64, 79, 59, 61, 64, 79, 57, 50, 71, 72, 72, 75, 60, 91, 54, 58, 50, 75, 85, 41, 67, 73, 39, 46, 70, 72, 71, 79, 84, 69, 83, 84, 58, 60, 51, 67, 48, 64, 65, 59, 87, 62, 66, 59, 60, 55, 63, 62, 61, 58, 58, 61, 76, 58, 78, 76, 55, 57, 65, 62, 77, 67, 76, 59, 64, 65, 94, 58, 52, 61, 51, 54, 71, 49, 79, 49, 84, 69, 56, 74, 53, 63, 61, 76, 65, 56, 89, 66, 64, 61, 87, 83, 83, 44, 65, 59, 91, 64, 60, 55, 72, 80, 63, 41, 51, 65, 75, 70, 63, 74, 53, 75, 49, 49, 73, 124, 67, 62, 70, 93, 54, 75, 88, 63, 59, 47, 80, 54, 73, 68, 54, 46, 65, 78, 49, 58, 71, 56, 67, 92, 59, 59, 53, 67, 62, 76, 70, 98, 74, 65, 56, 60, 54, 85, 58, 61, 60, 50, 112, 44, 63, 60, 52, 63, 52, 60, 49, 61, 68, 63, 62, 76, 58, 60, 77, 77, 76, 58, 52, 62, 44, 72, 68, 63, 77, 75, 56, 99, 50, 62, 54, 73, 60, 67, 55, 57, 78, 79, 51, 59, 76, 79, 51, 71, 70, 29, 48, 59, 59, 71, 52, 47, 50, 50, 60, 72, 84, 93, 56, 40, 76, 74, 57, 97, 67, 66, 59, 50, 77, 67, 41, 65, 51, 73, 60, 69, 71, 55, 74, 62, 80, 64, 63, 68, 62, 40, 49, 69, 82, 56, 45, 57, 80, 92, 60, 55, 85, 60, 54, 56, 58, 50, 77, 74, 69, 82, 62, 52, 61, 75, 107, 52, 66, 59, 59, 75, 72, 55, 76, 56, 59, 51, 56, 93, 67, 85, 84, 60, 67, 53, 88, 70, 85, 67, 71, 73, 92, 59, 61, 68, 86, 63, 54, 75, 51, 85, 57, 82, 92, 59, 58, 44, 62, 95, 51, 64, 56, 60, 25, 81, 62, 79, 65, 101, 65, 57, 65, 59, 52, 62, 78, 67, 63, 62, 68, 64, 80, 58, 56, 72, 66, 56, 55, 55, 62, 56, 78, 55, 50, 70, 53, 78, 49, 63, 68, 69, 47, 59, 68, 68, 57, 80, 45, 74, 75, 97, 69, 65, 65, 46, 68, 94, 65, 61, 83, 52, 77, 46, 67, 86, 74, 62, 89, 79, 45, 79, 67, 89, 89, 111, 47, 64, 89, 55, 64, 44, 66, 98, 94, 49, 63, 67, 58, 71, 86, 37, 56, 76, 77, 33, 74, 53, 67, 52, 97, 76, 60, 135, 75, 42, 49, 66, 67, 66, 61, 52, 64, 57, 77, 87, 79, 42, 73, 86, 71, 50, 37, 62, 65, 48, 73, 42, 62, 47, 39, 47, 62, 66, 85, 76, 55, 55, 61, 81, 55, 81, 59, 112, 70, 49, 83, 71, 150, 63, 51, 118, 54, 60, 79, 63, 49, 69, 62, 45, 68, 71, 57, 68, 92, 64, 63, 48, 69, 55, 81, 66, 59, 56, 64, 68, 70, 57, 75, 66, 67, 79, 70, 46, 47, 60, 48, 59, 61, 65, 74, 63, 57, 82, 75, 82, 54, 61, 88, 55, 70, 56, 44, 67, 57, 85, 80, 50, 64, 65, 55, 58, 106, 77, 80, 101, 48, 73, 55, 45, 54, 46, 68, 54, 51, 59, 71, 41, 66, 74, 75, 54, 65, 73, 50, 52, 67, 88, 54, 67, 68, 73, 55, 52, 60, 76, 78, 51, 78, 84, 58, 91, 49, 65, 67, 44, 54, 63, 54, 67, 52, 74, 45, 103, 63, 71, 84, 107, 55, 71, 84, 56, 74, 64, 64, 62, 56, 82, 44, 43, 67, 62, 79, 57, 77, 48, 81, 28, 57, 53, 58, 61, 76, 51, 63, 67, 49, 65, 85, 56, 67, 60, 61, 46, 74, 37, 90, 86, 41, 68, 66, 57, 60, 83, 51, 68, 54, 72, 65, 60, 64, 58, 96, 39, 54, 53, 72, 65, 54, 48, 54, 57, 74, 63, 62, 67, 78, 63, 71, 56, 76, 58, 71, 57, 55, 65, 56, 54, 56, 61, 90, 53, 60, 46, 71, 72, 58, 63, 84, 52, 95, 55, 63, 81, 80, 42, 83, 57, 54, 52, 61, 59, 62, 92, 54, 88, 68, 79, 53, 71, 103, 57, 59, 61, 66, 45, 66, 79, 77, 54, 61, 64, 67, 50, 55, 79, 54, 71, 96, 44, 57, 60, 68, 45, 70, 66, 65, 79, 44, 71, 83, 66, 50, 58, 81, 67, 64, 48, 52, 43, 84, 61, 55, 63, 76, 58, 78, 74, 63, 63, 101, 59, 77, 88, 61, 36, 65, 59, 72, 50, 57, 71, 55, 56, 56, 95, 60, 70, 61, 73, 112, 55, 62, 29, 58, 59, 55, 44, 88, 57, 61, 58, 60, 66, 46, 132, 52, 92, 47, 70, 67, 50, 89, 51, 73, 72, 64, 69, 105, 69, 46, 53, 60, 51, 75, 51, 60, 73, 57, 47, 72, 88, 46, 66, 65, 86, 72, 48, 80, 68, 62, 64, 86, 47, 53, 44, 63, 84, 52, 62, 68, 90, 67, 42, 43, 90, 87, 115, 51, 71, 81, 55, 54, 66, 88, 44, 87, 53, 66, 61, 80, 73, 50, 63, 74, 55, 61, 72, 67, 58, 56, 81, 77, 90, 50, 57, 73, 74, 94, 53, 63, 68, 61, 67, 80, 61, 75, 89, 70, 48, 63, 64, 64, 54, 56, 63, 61, 47, 81, 59, 66, 52, 55, 84, 58, 58, 64, 69, 65, 71, 80, 47, 53, 50, 108, 57, 59, 47, 77, 67, 57, 59, 73, 71, 55, 102, 61, 93, 70, 77, 70, 66, 80, 55, 55, 58, 53, 50, 51, 79, 61, 77, 74, 68, 72, 53, 48, 66, 92, 74, 36, 71, 69, 103, 63, 89, 71, 63, 50, 84, 63, 71, 51, 64, 103, 56, 74, 84, 86, 68, 70, 57, 74, 82, 70, 64, 71, 65, 73, 60, 53, 66, 48, 82, 62, 77, 58, 66, 66, 63, 72, 52, 68, 93, 58, 67, 61, 54, 86, 100, 98, 83, 73, 57, 49, 64, 83, 72, 67, 58, 62, 54, 55, 99, 56, 79, 75, 58, 69, 53, 54, 67, 66, 68, 63, 52, 73, 48, 68, 47, 73, 56, 57, 71, 56, 55, 70, 87, 53, 46, 42, 56, 61, 69, 58, 65, 79, 56, 91, 68, 61, 70, 78, 62, 58, 81, 63, 66, 57, 65, 64, 57, 66, 102, 61, 63, 54, 46, 48, 45, 86, 63, 63, 55, 63, 66, 56, 54, 65, 63, 53, 61, 78, 79, 66, 63, 69, 79, 66, 83, 60, 60, 60, 54, 62, 44, 58, 66, 81, 60, 73, 71, 72, 65, 60, 64, 55, 59, 67, 75, 58, 110, 69, 55, 65, 49, 87, 71, 64, 58, 65, 71, 61, 64, 53, 74, 65, 60, 63, 64, 78, 62, 59, 62, 86, 55, 57, 81, 47, 53, 67, 64, 58, 44, 69, 53, 56, 59, 65, 55, 51, 35, 75, 61, 72, 70, 41, 77, 68, 79, 67, 77, 79, 80, 90, 73, 173, 73, 77, 39, 62, 94, 66, 56, 74, 53, 66, 59, 59, 79, 59, 72, 48, 79, 59, 58, 74, 64, 60, 59, 61, 66, 52, 98, 80, 72, 66, 68, 84, 49, 72, 73, 65, 63, 62, 55, 57, 78, 69, 59, 65, 62, 58, 75, 67, 60, 48, 69, 50, 47, 75, 116, 56, 63, 66, 79, 65, 97, 63, 60, 59, 62, 60, 62, 68, 85, 71, 59, 66, 63, 74, 64, 54, 67, 64, 62, 54, 74, 47, 61, 66, 81, 60, 58, 62, 61, 50, 58, 55, 54, 86, 71, 64, 47, 58, 64, 63, 56, 66, 70, 80, 57, 63, 53, 60, 78, 70, 63, 76, 58, 51, 65, 63, 74, 53, 66, 63, 65, 107, 46, 65, 60, 79, 84, 59, 72, 69, 38, 61, 65, 56, 67, 60, 76, 60, 59, 62, 75, 62, 59, 74, 54, 78, 74, 61, 59, 73, 40, 77, 61, 66, 62, 66, 83, 52, 68, 72, 70, 58, 60, 57, 78, 73, 69, 52, 66, 55, 67, 52, 72, 122, 57, 54, 63, 59, 77, 73, 58, 57, 58, 56, 68, 72, 83, 58, 54, 56, 82, 68, 72, 55, 57, 65, 67, 52, 69, 55, 54, 72, 52, 63, 61, 85, 60, 80, 69, 76, 61, 53, 61, 63, 57, 60, 59, 59, 69, 56, 59, 59, 94, 67, 64, 62, 65, 57, 63, 68, 78, 57, 70, 63, 69, 62, 54, 58, 53, 72, 76, 55, 69, 69, 101, 77, 92, 65, 77, 69, 62, 70, 58, 97, 56, 72, 62, 78, 81, 67, 115, 50, 51, 80, 65, 70, 62, 48, 79, 69, 117, 70, 55, 57, 57, 63, 67, 66, 60, 53, 65, 68, 85, 51, 49, 65, 51, 90, 83, 71, 123, 57, 48, 74, 63, 55, 62, 59, 77, 63, 63, 83, 55, 73, 57, 61, 60, 61, 80, 77, 57, 63, 71, 68, 71, 55, 64, 73, 55, 57, 55, 97, 64, 57, 65, 51, 82, 64, 67, 49, 104, 69, 81, 78, 54, 105, 73, 72, 85, 63, 51, 68, 109, 65, 72, 62, 66, 44, 56, 81, 55, 55, 76, 66, 80, 47, 48, 104, 69, 59, 70, 69, 53, 60, 61, 66, 85, 57, 79, 64, 85, 68, 80, 63, 63, 76, 59, 70, 58, 56, 69, 75, 58, 58, 54, 76, 54, 91, 46, 62, 77, 63, 47, 68, 49, 67, 53, 58, 59, 79, 78, 52, 61, 61, 69, 57, 52, 58, 76, 53, 57, 80, 61, 77, 61, 63, 76, 72, 88, 59, 58, 64, 69, 74, 63, 69, 76, 56, 78, 83, 52, 54, 60, 68, 67, 65, 57, 75, 77, 49, 68, 58, 78, 64, 58, 77, 54, 51, 68, 67, 92, 70, 53, 57, 83, 73, 47, 70, 60, 55, 80, 50, 75, 66, 92, 48, 51, 65, 112, 61, 64, 55, 70, 53, 85, 58, 68, 57, 73, 80, 68, 83, 53, 57, 75, 63, 61, 53, 79, 60, 65, 65, 74, 55, 55, 56, 61, 54, 61, 55, 63, 45, 71, 55, 51, 71, 62, 71, 49, 64, 57, 55, 60, 88, 68, 54, 50, 50, 56, 57, 76, 41, 56, 60, 59, 82, 54, 58, 63, 70, 44, 59, 53, 50, 60, 69, 65, 75, 84, 66, 60, 56, 51, 83, 55, 49, 74, 69, 65, 78, 52, 44, 55, 77, 70, 56, 66, 69, 58, 70, 53, 83, 102, 50, 71, 59, 52, 59, 67, 69, 57, 59, 50, 53, 70, 69, 64, 58, 66, 57, 67, 60, 80, 58, 79, 50, 60, 57, 56, 61, 114, 57, 66, 61, 80, 54, 54, 65, 71, 59, 58, 61, 54, 73, 64, 51, 64, 65, 59, 49, 70, 49, 42, 61, 80, 46, 75, 80, 58, 75, 58, 77, 77, 64, 41, 53, 61, 58, 66, 71, 81, 54, 63, 64, 64, 63, 68, 51, 77, 78, 63, 60, 73, 60, 62, 76, 64, 55, 68, 66, 62, 62, 83, 59, 91, 61, 75, 62, 52, 64, 62, 74, 55, 97, 87, 71, 59, 63, 48, 68, 57, 60, 57, 62, 62, 101, 71, 65, 64, 98, 60, 72, 79, 70, 67, 95, 97, 69, 62, 117, 81, 61, 92, 53, 88, 57, 76, 59, 65, 58, 54, 67, 60, 84, 76, 61, 64, 77, 52, 61, 64, 80, 65, 73, 67, 65, 68, 49, 69, 54, 68, 104, 77, 56, 70, 39, 52, 75, 58, 127, 65, 56, 60, 60, 70, 76, 58, 68, 57, 60, 83, 64, 47, 52, 73, 80, 55, 58, 84, 76, 61, 63, 41, 45, 64, 56, 64, 45, 77, 71, 85, 66, 68, 56, 69, 57, 55, 59, 52, 67, 63, 76, 54, 58, 55, 53, 70, 71, 58, 72, 63, 61, 58, 56, 66, 62, 70, 80, 61, 48, 65, 74, 54, 73, 69, 49, 63, 63, 46, 57, 48, 76, 64, 61, 61, 84, 64, 49, 58, 56, 132, 69, 55, 81, 73, 75, 64, 86, 77, 51, 54, 95, 56, 104, 56, 63, 68, 79, 53, 100, 77, 56, 43, 67, 56, 59, 69, 61, 88, 87, 68, 60, 51, 69, 55, 70, 63, 84, 72, 45, 73, 40, 73, 53, 66, 72, 78, 74, 89, 54, 51, 59, 60, 95, 71, 55, 65, 66, 68, 65, 79, 56, 62, 63, 56, 91, 60, 87, 65, 48, 64, 74, 66, 59, 63, 46, 82, 72, 79, 80, 80, 49, 69, 55, 63, 77, 66, 66, 54, 56, 71, 47, 64, 42, 89, 107, 60, 70, 66, 114, 67, 51, 52, 64, 63, 60, 60, 75, 59, 48, 62, 74, 77, 55, 57, 69, 48, 66, 58, 62, 66, 67, 56, 72, 55, 52, 62, 57, 60, 57, 70, 64, 54, 61, 86, 51, 58, 56, 58, 68, 72, 71, 66, 66, 73, 54, 48, 59, 100, 58, 67, 78, 88, 75, 71, 57, 53, 66, 76, 64, 91, 55, 67, 106, 55, 65, 69, 66, 57, 71, 87, 60, 73, 78, 61, 51, 61, 50, 79, 82, 55, 68, 67, 60, 69, 54, 60, 70, 60, 40, 44, 61, 58, 75, 67, 90, 80, 59, 46, 60, 66, 71, 63, 67, 80, 56, 71, 49, 64, 54, 69, 57, 62, 106, 72, 55, 48, 55, 68, 58, 51, 75, 64, 61, 76, 62, 57, 73, 48, 46, 64, 64, 49, 65, 76, 62, 55, 73, 58, 45, 93, 55, 61, 87, 46, 114, 75, 70, 94, 46, 56, 61, 72, 67, 51, 63, 60, 43, 81, 57, 49, 72, 79, 71, 73, 57, 50, 83, 98, 67, 86, 64, 82, 64, 88, 56, 51, 47, 74, 76, 55, 59, 51, 65, 70, 59, 105, 77, 80, 59, 91, 62, 61, 59, 57, 71, 81, 67, 65, 62, 61, 65, 70, 61, 68, 53, 57, 91, 58, 61, 60, 50, 64, 66, 44, 70, 54, 67, 69, 66, 69, 96, 85, 68, 66, 69, 81, 63, 66, 77, 85, 69, 47, 66, 71, 54, 91, 65, 55, 58, 65, 55, 58, 77, 63, 87, 62, 67, 57, 65, 107, 73, 80, 66, 56, 57, 52, 101, 75, 80, 59, 108, 68, 64, 59, 66, 51, 59, 85, 66, 55, 49, 56, 66, 55, 66, 74, 70, 55, 56, 79, 71, 94, 77, 101, 52, 62, 59, 50, 56, 49, 72, 57, 41, 49, 48, 72, 47, 61, 68, 48, 61, 53, 88, 64, 49, 74, 59, 64, 61, 70, 59, 55, 58, 74, 61, 66, 68, 60, 79, 57, 74, 52, 62, 70, 62, 59, 69, 62, 52, 103, 72, 79, 59, 65, 63, 75, 58, 66, 59, 65, 51, 54, 53, 58, 57, 68, 65, 55, 58, 66, 66, 64, 55, 46, 49, 80, 64, 62, 81, 62, 79, 65, 90, 66, 52, 90, 86, 51, 57, 69, 70, 76, 61, 85, 73, 92, 55, 58, 65, 56, 72, 67, 66, 73, 64, 76, 59, 62, 58, 68, 83, 68, 40, 47, 72, 65, 51, 56, 55, 64, 107, 68, 63, 74, 58, 104, 47, 60, 66, 48, 72, 48, 57, 69, 85, 70, 118, 68, 83, 66, 75, 65, 82, 58, 70, 80, 64, 68, 60, 55, 58, 62, 55, 63, 74, 86, 60, 63, 61, 91, 57, 62, 66, 101, 66, 84, 78, 64, 69, 65, 52, 58, 52, 60, 68, 87, 63, 71, 82, 48, 59, 83, 63, 36, 73, 74, 60, 68, 63, 59, 92, 58, 58, 81, 59, 66, 64, 59, 57, 92, 53, 64, 75, 58, 69, 70, 82, 71, 75, 66, 62, 60, 66, 65, 69, 55, 56, 58, 50, 75, 81, 57, 73, 67, 62, 78, 81, 48, 63, 56, 73, 80, 113, 67, 53, 57, 84, 63, 61, 63, 53, 70, 61, 71, 69, 77, 84, 41, 48, 76, 81, 62, 42, 44, 55, 85, 60, 54, 51, 56, 60, 81, 51, 53, 51, 53, 52, 50, 49, 45, 88, 85, 43, 76, 56, 69, 70, 75, 82, 42, 63, 38, 62, 116, 54, 42, 61, 88, 57, 61, 65, 90, 50, 56, 72, 83, 54, 72, 57, 61, 59, 52, 68, 65, 59, 77, 49, 82, 49, 63, 69, 57, 71, 58, 92, 57, 50, 91, 79, 53, 69, 46, 58, 50, 63, 39, 56, 69, 95, 80, 59, 68, 64, 64, 70, 66, 63, 65, 55, 91, 64, 45, 106, 51, 61, 68, 71, 72, 64, 86, 48, 61, 47, 77, 57, 69, 62, 64, 72, 68, 65, 69, 54, 58, 77, 57, 67, 67, 59, 87, 71, 65, 66, 97, 50, 94, 62, 51, 95, 59, 104, 64, 97, 64, 58, 64, 59, 63, 68, 83, 65, 67, 46, 85, 64, 48, 92, 42, 62, 92, 62, 48, 44, 70, 73, 72, 60, 49, 62, 52, 53, 57, 81, 88, 56, 57, 61, 78, 102, 61, 60, 69, 53, 92, 67, 52, 57, 61, 53, 48, 50, 71, 66, 69, 65, 74, 49, 66, 82, 61, 87, 52, 67, 75, 37, 70, 69, 69, 77, 54, 54, 65, 87, 83, 62, 59, 59, 93, 56, 63, 55, 63, 52, 66, 76, 48, 55, 64, 31, 59, 62, 47, 53, 62, 61, 82, 69, 68, 94, 59, 58, 61, 45, 67, 86, 67, 59, 101, 43, 66, 68, 49, 70, 58, 46, 67, 53, 91, 102, 77, 52, 75, 94, 43, 77, 74, 58, 57, 74, 71, 60, 52, 56, 73, 77, 66, 64, 59, 87, 107, 72, 52, 69, 44, 62, 53, 62, 54, 74, 69, 55, 60, 70, 45, 80, 92, 114, 56, 61, 63, 81, 64, 56, 54, 65, 52, 68, 62, 55, 52, 69, 71, 49, 70, 105, 63, 65, 60, 65, 64, 89, 57, 74, 74, 55, 66, 69, 82, 82, 65, 77, 62, 72, 100, 90, 67, 61, 50, 56, 69, 72, 76, 82, 55, 57, 58, 57, 65, 61, 57, 61, 92, 55, 45, 65, 66, 60, 53, 64, 59, 66, 71, 60, 92, 96, 68, 53, 70, 54, 60, 53, 64, 90, 68, 60, 81, 68, 104, 67, 62, 90, 49, 44, 73, 57, 50, 77, 61, 51, 47, 70, 82, 74, 65, 60, 61, 72, 48, 82, 54, 47, 53, 72, 118, 67, 76, 71, 57, 56, 66, 72, 91, 60, 57, 79, 84, 61, 53, 59, 66, 114, 40, 61, 73, 63, 59, 95, 65, 56, 54, 77, 62, 47, 60, 70, 90, 73, 58, 59, 62, 55, 57, 74, 60, 49, 71, 56, 66, 73, 50, 52, 70, 54, 88, 70, 72, 58, 78, 60, 55, 56, 62, 51, 56, 52, 58, 58, 66, 62, 56, 75, 66, 45, 61, 48, 47, 62, 77, 71, 67, 65, 60, 52, 68, 37, 52, 70, 110, 54, 69, 85, 62, 54, 86, 67, 60, 105, 71, 66, 81, 57, 59, 68, 53, 76, 51, 84, 67, 67, 75, 64, 70, 69, 76, 113, 54, 46, 54, 70, 65, 78, 59, 61, 62, 68, 65, 134, 71, 56, 65, 59, 78, 71, 56, 56, 62, 55, 51, 58, 53, 73, 88, 54, 54, 107, 75, 63, 82, 77, 67, 62, 57, 85, 74, 80, 62, 62, 52, 47, 59, 64, 55, 60, 67, 68, 60, 48, 94, 52, 64, 56, 65, 67, 62, 71, 57, 61, 64, 61, 72, 71, 52, 89, 69, 54, 64, 106, 69, 63, 62, 58, 58, 39, 45, 64, 58, 58, 55, 81, 68, 65, 55, 59, 49, 42, 49, 59, 59, 83, 53, 62, 66, 74, 58, 52, 56, 80, 61, 69, 63, 84, 51, 67, 55, 66, 62, 58, 80, 52, 70, 69, 63, 67, 50, 59, 76, 110, 74, 60, 61, 61, 62, 55, 106, 50, 65, 65, 71, 69, 60, 62, 60, 60, 88, 61, 74, 54, 63, 47, 72, 41, 57, 131, 70, 56, 65, 64, 68, 50, 72, 54, 58, 64, 74, 64, 64, 45, 70, 69, 58, 60, 49, 85, 88, 52, 66, 101, 57, 74, 95, 88, 52, 73, 51, 65, 56, 75, 91, 102, 65, 47, 69, 61, 81, 64, 69, 84, 49, 63, 93, 59, 70, 54, 65, 72, 62, 68, 70, 62, 93, 63, 94, 64, 77, 59, 60, 88, 70, 56, 66, 51, 77, 65, 44, 74, 59, 58, 72, 56, 84, 59, 75, 110, 57, 56, 89, 62, 60, 64, 61, 58, 56, 63, 68, 64, 57, 70, 68, 56, 54, 89, 80, 84, 75, 70, 71, 78, 51, 69, 82, 67, 55, 51, 71, 56, 61, 76, 49, 63, 85, 58, 60, 68, 86, 74, 56, 71, 52, 47, 75, 57, 71, 87, 76, 68, 50, 69, 51, 65, 53, 55, 78, 81, 74, 54, 54, 98, 68, 55, 70, 65, 60, 46, 53, 77, 51, 62, 64, 82, 57, 83, 76, 73, 46, 63, 47, 53, 58, 60, 49, 57, 49, 54, 66, 79, 87, 58, 76, 65, 72, 65, 48, 54, 102, 60, 73, 65, 79, 91, 59, 66, 94, 60, 59, 63, 94, 60, 54, 41, 47, 65, 87, 83, 75, 67, 61, 65, 46, 58, 68, 46, 64, 57, 61, 80, 71, 71, 52, 113, 60, 88, 76, 51, 58, 84, 57, 60, 57, 71, 68, 57, 82, 68, 60, 65, 66, 52, 70, 49, 59, 73, 59, 60, 59, 50, 68, 65, 69, 57, 47, 64, 65, 80, 73, 108, 58, 81, 58, 46, 70, 70, 62, 59, 63, 64, 60, 62, 51, 67, 64, 84, 66, 59, 58, 103, 67, 53, 42, 61, 58, 59, 70, 81, 61, 65, 73, 59, 76, 50, 76, 62, 79, 68, 96, 72, 48, 65, 57, 58, 75, 86, 58, 74, 50, 123, 52, 55, 67, 94, 61, 111, 56, 89, 67, 88, 80, 63, 55, 48, 55, 57, 59, 102, 53, 73, 58, 75, 54, 62, 82, 53, 75, 57, 54, 73, 62, 34, 58, 56, 67, 87, 49, 61, 80, 48, 66, 84, 65, 54, 80, 55, 86, 64, 71, 63, 68, 58, 86, 60, 50, 66, 69, 54, 74, 61, 45, 61, 104, 43, 52, 62, 55, 52, 57, 57, 77, 61, 65, 92, 111, 55, 54, 59, 57, 79, 81, 67, 47, 97, 71, 64, 67, 77, 65, 67, 75, 62, 70, 46, 55, 63, 55, 81, 59, 67, 61, 67, 79, 82, 68, 61, 61, 128, 54, 58, 69, 58, 61, 62, 63, 48, 56, 42, 66, 67, 75, 55, 77, 53, 66, 60, 80, 62, 52, 83, 56, 57, 84, 87, 55, 54, 58, 51, 97, 80, 65, 82, 63, 53, 47, 66, 64, 44, 53, 50, 68, 72, 65, 61, 70, 63, 66, 55, 24, 100, 103, 72, 66, 60, 64, 57, 61, 64, 64, 52, 63, 87, 78, 72, 40, 70, 68, 64, 68, 93, 72, 80, 79, 78, 61, 86, 73, 60, 73, 67, 60, 82, 56, 82, 70, 66, 59, 52, 67, 83, 57, 92, 47, 70, 59, 80, 65, 72, 46, 48, 71, 76, 51, 131, 50, 60, 58, 80, 50, 79, 72, 64, 50, 75, 73, 82, 73, 57, 97, 65, 55, 68, 65, 79, 77, 54, 76, 61, 72, 57, 91, 67, 58, 61, 66, 55, 54, 52, 50, 101, 79, 57, 69, 50, 54, 74, 75, 52, 92, 64, 68, 44, 55, 72, 66, 51, 55, 67, 95, 55, 81, 56, 70, 58, 80, 63, 65, 62, 55, 52, 69, 65, 84, 71, 69, 66, 78, 84, 58, 64, 48, 64, 85, 57, 49, 59, 97, 68, 56, 51, 75, 52, 46, 78, 57, 60, 63, 81, 62, 65, 72, 55, 70, 77, 52, 72, 47, 54, 55, 60, 56, 71, 57, 70, 91, 66, 56, 66, 58, 67, 60, 49, 71, 64, 75, 77, 58, 78, 79, 58, 53, 66, 49, 48, 53, 59, 92, 56, 65, 55, 61, 62, 65, 63, 54, 58, 57, 53, 63, 48, 60, 87, 46, 60, 47, 49, 69, 94, 46, 53, 76, 58, 73, 100, 62, 55, 62, 50, 67, 63, 58, 67, 48, 71, 72, 63, 55, 65, 70, 40, 64, 72, 60, 56, 63, 56, 68, 60, 63, 47, 51, 68, 56, 73, 55, 61, 65, 40, 66, 53, 59, 60, 56, 78, 60, 83, 89, 75, 83, 70, 51, 68, 61, 70, 61, 55, 101, 77, 82, 76, 64, 55, 89, 75, 74, 84, 71, 67, 89, 60, 60, 44, 81, 72, 68, 55, 66, 49, 69, 47, 59, 73, 71, 65, 56, 62, 50, 59, 55, 44, 106, 80, 58, 55, 84, 63, 60, 76, 70, 68, 65, 77, 45, 60, 65, 60, 65, 77, 57, 62, 46, 68, 64, 76, 71, 61, 70, 58, 56, 60, 53, 78, 86, 68, 66, 64, 63, 57, 60, 61, 67, 65, 81, 67, 56, 68, 78, 49, 60, 60, 66, 67, 59, 40, 74, 67, 83, 84, 65, 56, 67, 67, 61, 44, 57, 64, 56, 84, 66, 90, 76, 54, 59, 49, 72, 60, 50, 78, 61, 58, 67, 68, 80, 74, 58, 69, 57, 66, 60, 79, 59, 59, 59, 77, 57, 71, 75, 60, 79, 62, 48, 81, 52, 64, 71, 72, 72, 66, 69, 69, 69, 60, 61, 67, 61, 80, 50, 67, 45, 67, 70, 65, 52, 58, 59, 160, 58, 69, 69, 71, 73, 73, 59, 64, 77, 56, 57, 74, 61, 62, 73, 65, 59, 73, 74, 73, 72, 71, 71, 60, 63, 70, 74, 47, 43, 63, 58, 75, 59, 72, 73, 50, 62, 62, 72, 90, 56, 53, 67, 78, 55, 87, 46, 40, 53, 56, 63, 51, 64, 67, 75, 57, 68, 46, 84, 75, 59, 64, 54, 88, 68, 67, 53, 66, 62, 52, 67, 63, 56, 71, 62, 89, 62, 93, 63, 50, 70, 45, 48, 57, 63, 54, 98, 77, 82, 65, 94, 76, 79, 57, 50, 54, 67, 63, 69, 50, 52, 59, 66, 76, 75, 53, 69, 80, 54, 54, 47, 79, 57, 60, 60, 53, 88, 59, 65, 63, 65, 53, 59, 82, 63, 73, 63, 77, 58, 64, 62, 70, 59, 60, 60, 151, 89, 55, 67, 58, 65, 71, 74, 60, 61, 44, 68, 59, 58, 55, 59, 53, 84, 61, 59, 80, 63, 67, 48, 81, 64, 58, 54, 71, 61, 59, 55, 61, 63, 68, 57, 56, 66, 53, 64, 56, 65, 62, 57, 77, 63, 54, 100, 57, 61, 65, 65, 73, 61, 63, 72, 69, 66, 57, 75, 55, 57, 69, 65, 53, 78, 74, 69, 116, 68, 50, 55, 75, 54, 49, 56, 78, 71, 61, 70, 68, 63, 56, 79, 57, 75, 70, 63, 86, 75, 61, 57, 62, 69, 49, 79, 61, 72, 75, 71, 59, 57, 71, 60, 77, 70, 97, 62, 66, 70, 66, 68, 77, 60, 56, 72, 67, 70, 72, 96, 68, 57, 75, 69, 59, 69, 55, 59, 74, 79, 65, 69, 84, 49, 63, 78, 88, 60, 98, 81, 66, 73, 63, 59, 68, 69, 65, 52, 61, 54, 48, 69, 82, 59, 57, 84, 53, 59, 64, 49, 69, 92, 58, 68, 58, 59, 59, 94, 61, 71, 85, 73, 90, 75, 53, 59, 63, 66, 56, 70, 70, 93, 78, 52, 55, 56, 56, 89, 72, 57, 63, 65, 66, 104, 65, 75, 95, 72, 76, 57, 117, 65, 66, 58, 96, 80, 61, 69, 62, 54, 62, 51, 51, 56, 56, 56, 79, 77, 68, 50, 68, 69, 54, 67, 63, 73, 35, 62, 68, 54, 62, 62, 58, 66, 81, 59, 67, 68, 65, 72, 55, 76, 87, 76, 73, 66, 58, 86, 54, 76, 46, 58, 74, 75, 71, 59, 50, 72, 59, 57, 44, 61, 75, 70, 63, 62, 64, 71, 57, 60, 75, 56, 58, 72, 52, 82, 55, 58, 60, 91, 57, 84, 72, 91, 62, 59, 65, 57, 53, 43, 73, 71, 60, 61, 94, 58, 54, 68, 63, 64, 72, 60, 86, 58, 57, 75, 73, 59, 96, 57, 104, 48, 63, 73, 69, 66, 54, 47, 53, 62, 70, 53, 75, 67, 56, 53, 68, 59, 82, 62, 54, 90, 68, 79, 48, 74, 45, 61, 67, 55, 77, 69, 80, 61, 70, 65, 55, 63, 59, 50, 72, 72, 66, 55, 66, 72, 56, 76, 65, 56, 55, 51, 74, 70, 51, 64, 85, 61, 61, 81, 67, 68, 68, 51, 49, 78, 64, 78, 60, 65, 61, 59, 80, 70, 64, 71, 42, 80, 54, 49, 60, 54, 76, 72, 65, 103, 63, 65, 65, 50, 71, 61, 60, 73, 71, 57, 87, 46, 72, 63, 61, 68, 66, 79, 61, 63, 70, 60, 68, 61, 61, 61, 71, 61, 88, 78, 76, 58, 54, 78, 65, 82, 62, 58, 70, 57, 56, 55, 74, 67, 84, 75, 72, 55, 84, 67, 65, 54, 52, 67, 77, 64, 71, 54, 50, 72, 59, 64, 86, 66, 51, 64, 71, 81, 50, 69, 66, 88, 73, 60, 60, 55, 68, 76, 80, 75, 50, 68, 69, 100, 56, 72, 68, 60, 37, 55, 55, 58, 89, 50, 70, 70, 55, 62, 72, 60, 56, 47, 44, 50, 57, 64, 50, 60, 91, 48, 77, 57, 61, 85, 42, 70, 71, 93, 68, 66, 73, 55, 63, 55, 65, 57, 70, 78, 62, 65, 50, 65, 53, 84, 64, 63, 64, 66, 72, 82, 68, 71, 64, 68, 57, 64, 67, 64, 61, 70, 68, 76, 70, 70, 51, 50, 83, 86, 75, 71, 70, 77, 81, 69, 60, 82, 52, 68, 57, 71, 52, 79, 65, 70, 66, 58, 70, 60, 59, 60, 56, 72, 55, 52, 51, 65, 90, 61, 67, 62, 72, 61, 61, 48, 47, 62, 65, 84, 62, 79, 55, 87, 60, 66, 50, 58, 69, 66, 55, 55, 55, 56, 62, 64, 92, 83, 58, 66, 88, 68, 70, 54, 93, 55, 51, 70, 47, 52, 70, 66, 61, 76, 74, 65, 57, 51, 61, 51, 50, 58, 75, 61, 62, 67, 65, 72, 76, 49, 65, 62, 63, 54, 59, 129, 83, 60, 71, 75, 68, 54, 63, 64, 65, 63, 53, 53, 80, 66, 77, 64, 68, 56, 39, 55, 56, 74, 58, 84, 56, 58, 46, 63, 50, 52, 70, 69, 56, 74, 71, 55, 48, 56, 63, 82, 73, 64, 56, 67, 74, 92, 64, 79, 62, 74, 68, 58, 63, 55, 56, 66, 64, 68, 48, 83, 50, 55, 67, 76, 49, 55, 62, 56, 52, 87, 62, 89, 73, 54, 48, 63, 41, 61, 101, 80, 57, 65, 72, 54, 79, 62, 60, 55, 64, 57, 55, 78, 77, 66, 62, 69, 62, 72, 59, 60, 55, 58, 56, 54, 84, 81, 64, 48, 60, 77, 61, 75, 67, 68, 59, 65, 47, 89, 55, 57, 84, 60, 63, 67, 115, 60, 44, 53, 106, 67, 55, 56, 57, 54, 70, 63, 55, 75, 52, 63, 55, 78, 50, 61, 66, 69, 79, 67, 68, 89, 61, 57, 64, 46, 63, 71, 71, 57, 57, 52, 73, 65, 93, 58, 63, 54, 69, 59, 70, 65, 61, 71, 64, 63, 52, 65, 91, 63, 66, 66, 90, 55, 73, 80, 66, 63, 63, 80, 65, 51, 68, 60, 53, 81, 74, 65, 58, 51, 60, 71, 52, 67, 61, 60, 62, 74, 61, 66, 69, 87, 62, 80, 52, 60, 86, 60, 71, 83, 58, 49, 59, 47, 59, 66, 55, 66, 64, 64, 57, 57, 50, 56, 54, 73, 61, 54, 63, 63, 143, 58, 59, 70, 63, 82, 61, 67, 65, 70, 73, 60, 60, 76, 68, 58, 68, 56, 67, 93, 69, 69, 52, 66, 54, 94, 49, 81, 62, 64, 75, 52, 57, 65, 57, 66, 54, 70, 57, 63, 77, 76, 78, 86, 68, 54, 53, 50, 60, 115, 73, 60, 54, 61, 60, 54, 72, 68, 67, 68, 61, 65, 76, 71, 54, 49, 61, 63, 54, 73, 55, 62, 70, 59, 54, 62, 69, 54, 115, 58, 72, 62, 69, 55, 59, 50, 51, 76, 62, 75, 68, 72, 73, 64, 58, 62, 53, 79, 103, 60, 65, 58, 53, 65, 56, 61, 69, 63, 50, 72, 69, 73, 58, 70, 68, 65, 49, 70, 64, 76, 65, 64, 52, 63, 50, 66, 50, 57, 90, 57, 63, 61, 66, 70, 60, 49, 129, 51, 76, 61, 56, 66, 63, 64, 58, 67, 67, 55, 58, 75, 63, 53, 61, 69, 62, 63, 71, 52, 74, 63, 76, 57, 55, 63, 85, 60, 69, 68, 72, 43, 64, 69, 73, 59, 62, 66, 63, 55, 81, 58, 70, 52, 65, 84, 70, 61, 61, 69, 92, 67, 82, 67, 82, 67, 55, 54, 60, 60, 59, 62, 58, 61, 68, 61, 73, 77, 62, 58, 68, 59, 50, 65, 69, 68, 55, 68, 58, 74, 54, 109, 58, 66, 53, 66, 82, 47, 62, 80, 62, 57, 66, 74, 56, 58, 68, 69, 65, 50, 67, 96, 83, 71, 82, 65, 61, 57, 72, 66, 65, 59, 59, 63, 55, 61, 91, 87, 63, 70, 66, 68, 53, 99, 61, 74, 62, 64, 135, 50, 82, 76, 68, 84, 70, 75, 73, 77, 67, 67, 58, 57, 40, 79, 65, 64, 79, 69, 53, 63, 65, 51, 64, 59, 64, 65, 60, 54, 69, 53, 61, 52, 63, 83, 65, 49, 55, 66, 49, 60, 102, 68, 63, 65, 101, 69, 75, 67, 64, 83, 66, 92, 48, 57, 42, 56, 50, 64, 65, 57, 41, 71, 62, 78, 121, 55, 66, 61, 73, 65, 75, 55, 57, 69, 50, 73, 59, 81, 64, 57, 68, 67, 61, 67, 49, 72, 67, 55, 66, 78, 63, 69, 62, 63, 74, 66, 72, 71, 68, 64, 60, 71, 63, 55, 63, 60, 57, 55, 72, 57, 86, 83, 53, 61, 80, 79, 54, 74, 77, 60, 69, 65, 64, 50, 67, 101, 74, 74, 55, 68, 58, 67, 49, 76, 51, 64, 67, 60, 47, 63, 78, 74, 57, 70, 60, 63, 54, 68, 80, 64, 90, 55, 61, 79, 58, 56, 64, 68, 64, 59, 54, 55, 73, 81, 93, 59, 45, 63, 56, 55, 98, 53, 52, 54, 80, 64, 96, 65, 65, 50, 135, 59, 54, 72, 75, 61, 68, 64, 58, 67, 62, 72, 95, 99, 106, 77, 57, 43, 61, 72, 65, 65, 63, 54, 69, 66, 67, 61, 59, 50, 57, 56, 64, 66, 62, 66, 61, 67, 64, 71, 71, 96, 52, 72, 79, 64, 43, 57, 87, 54, 79, 55, 62, 60, 70, 56, 62, 58, 44, 80, 100, 69, 65, 68, 51, 99, 60, 62, 62, 67, 75, 63, 54, 54, 47, 53, 62, 107, 59, 75, 98, 70, 77, 63, 59, 76, 61, 61, 68, 65, 80, 61, 68, 62, 73, 76, 57, 61, 74, 70, 51, 55, 64, 70, 57, 53, 100, 60, 56, 68, 64, 67, 70, 72, 64, 64, 63, 65, 58, 62, 73, 59, 41, 74, 65, 77, 64, 56, 51, 66, 77, 95, 88, 72, 64, 63, 60, 49, 58, 56, 53, 76, 66, 55, 67, 64, 54, 58, 64, 54, 84, 106, 64, 59, 59, 92, 85, 66, 63, 63, 67, 56, 67, 85, 74, 56, 68, 75, 54, 61, 44, 61, 58, 81, 61, 60, 58, 47, 70, 47, 59, 51, 93, 70, 53, 67, 70, 75, 87, 69, 43, 68, 70, 67, 41, 71, 67, 60, 59, 55, 55, 62, 69, 67, 60, 62, 62, 70, 60, 63, 72, 96, 65, 64, 89, 53, 85, 73, 64, 53, 90, 51, 71, 65, 63, 66, 60, 76, 51, 64, 61, 82, 64, 60, 65, 69, 55, 50, 67, 67, 70, 62, 54, 44, 63, 62, 63, 34, 62, 79, 52, 70, 59, 64, 53, 57, 85, 62, 71, 58, 55, 70, 69, 70, 59, 51, 46, 50, 38, 58, 59, 59, 58, 71, 58, 79, 64, 80, 62, 65, 64, 63, 64, 111, 52, 71, 76, 57, 66, 63, 65, 71, 67, 75, 48, 63, 71, 54, 70, 54, 70, 72, 71, 76, 58, 58, 77, 60, 70, 61, 53, 70, 73, 71, 66, 66, 69, 52, 90, 70, 74, 70, 73, 64, 64, 58, 58, 65, 68, 49, 60, 63, 77, 69, 89, 65, 64, 78, 44, 73, 64, 69, 77, 56, 59, 62, 62, 71, 65, 52, 72, 65, 62, 73, 75, 54, 81, 63, 66, 61, 50, 64, 60, 58, 74, 76, 66, 75, 60, 107, 70, 62, 70, 70, 57, 54, 66, 58, 53, 67, 48, 64, 90, 86, 68, 70, 64, 57, 68, 68, 78, 69, 61, 65, 61, 73, 82, 79, 59, 54, 65, 69, 70, 59, 71, 68, 64, 68, 58, 94, 55, 56, 59, 62, 52, 46, 94, 53, 84, 88, 61, 67, 74, 55, 59, 63, 56, 61, 56, 53, 72, 55, 81, 58, 58, 52, 60, 63, 60, 63, 63, 49, 63, 97, 48, 64, 66, 73, 67, 87, 70, 67, 64, 61, 67, 74, 52, 49, 51, 66, 59, 75, 53, 105, 60, 68, 54, 49, 56, 78, 51, 73, 54, 76, 105, 60, 60, 56, 79, 57, 91, 60, 48, 56, 66, 45, 59, 65, 58, 81, 59, 72, 66, 82, 58, 56, 50, 62, 69, 57, 70, 61, 61, 62, 79, 72, 99, 73, 68, 56, 61, 58, 55, 51, 66, 63, 59, 49, 72, 52, 89, 70, 79, 67, 76, 79, 44, 77, 41, 52, 78, 72, 53, 56, 67, 79, 62, 60, 56, 108, 84, 39, 56, 57, 47, 62, 63, 71, 67, 87, 40, 69, 89, 73, 53, 49, 76, 90, 62, 53, 56, 76, 74, 71, 53, 78, 55, 81, 62, 52, 60, 78, 64, 72, 55, 65, 99, 74, 60, 49, 54, 62, 78, 73, 69, 93, 74, 54, 74, 51, 58, 70, 60, 55, 66, 68, 71, 54, 57, 98, 92, 90, 53, 90, 82, 63, 54, 69, 61, 64, 64, 68, 55, 69, 76, 70, 96, 46, 63, 70, 60, 71, 58, 62, 95, 53, 62, 70, 51, 84, 72, 66, 93, 56, 76, 57, 66, 63, 87, 62, 58, 93, 61, 59, 77, 48, 63, 55, 62, 69, 45, 69, 72, 65, 41, 57, 44, 58, 89, 59, 65, 62, 66, 66, 65, 64, 68, 75, 116, 65, 57, 55, 57, 54, 66, 70, 77, 63, 76, 76, 52, 69, 43, 60, 54, 92, 54, 55, 64, 54, 55, 62, 54, 77, 52, 65, 47, 56, 68, 60, 79, 79, 68, 55, 59, 71, 69, 46, 61, 61, 44, 75, 148, 64, 51, 65, 48, 70, 49, 77, 80, 59, 57, 100, 65, 93, 60, 78, 87, 56, 56, 55, 159, 61, 49, 54, 54, 63, 68, 83, 94, 52, 70, 56, 70, 71, 67, 49, 95, 66, 68, 84, 63, 60, 49, 63, 55, 60, 82, 115, 57, 63, 73, 38, 74, 63, 74, 42, 56, 86, 71, 67, 66, 77, 53, 35, 70, 74, 70, 53, 85, 44, 58, 68, 80, 59, 66, 53, 76, 64, 57, 60, 56, 56, 73, 45, 63, 66, 67, 59, 67, 44, 83, 51, 50, 65, 44, 86, 76, 57, 70, 90, 77, 47, 87, 74, 64, 61, 56, 60, 51, 51, 41, 51, 43, 94, 110, 70, 64, 76, 84, 64, 72, 60, 71, 54, 63, 61, 74, 55, 66, 88, 110, 113, 52, 69, 83, 57, 58, 51, 96, 62, 88, 70, 83, 59, 57, 60, 96, 79, 60, 56, 59, 66, 67, 75, 69, 93, 97, 54, 79, 41, 61, 54, 64, 61, 60, 49, 65, 64, 55, 67, 61, 61, 82, 72, 62, 56, 56, 66, 46, 59, 57, 52, 89, 84, 78, 61, 71, 107, 57, 53, 65, 71, 57, 60, 38, 50, 122, 60, 63, 68, 96, 58, 62, 56, 71, 77, 89, 85, 66, 68, 57, 63, 80, 52, 71, 72, 66, 75, 61, 73, 78, 74, 53, 61, 61, 62, 59, 64, 66, 63, 59, 46, 59, 63, 57, 65, 55, 59, 64, 58, 63, 92, 44, 64, 64, 49, 68, 84, 47, 65, 51, 56, 41, 69, 59, 48, 60, 61, 74, 54, 58, 66, 62, 87, 57, 63, 75, 60, 72, 60, 59, 66, 64, 51, 53, 87, 60, 48, 61, 64, 67, 61, 52, 63, 79, 64, 64, 57, 61, 93, 61, 68, 61, 70, 54, 87, 66, 71, 69, 60, 69, 80, 74, 58, 59, 76, 70, 59, 57, 69, 51, 78, 75, 71, 61, 91, 68, 54, 63, 45, 80, 53, 65, 54, 125, 51, 75, 75, 67, 70, 63, 53, 49, 56, 36, 52, 74, 66, 67, 66, 49, 73, 46, 73, 54, 57, 71, 84, 69, 66, 63, 68, 66, 53, 63, 43, 62, 51, 72, 58, 51, 79, 59, 45, 65, 65, 60, 59, 56, 117, 67, 71, 70, 58, 63, 58, 71, 53, 63, 57, 67, 65, 67, 51, 67, 61, 69, 52, 53, 104, 64, 58, 60, 74, 60, 86, 66, 74, 59, 45, 59, 66, 77, 57, 60, 78, 93, 52, 61, 73, 63, 55, 62, 72, 74, 67, 63, 71, 69, 45, 59, 62, 56, 68, 59, 59, 51, 56, 72, 94, 68, 59, 87, 54, 57, 54, 76, 70, 76, 56, 63, 68, 67, 83, 78, 64, 76, 70, 75, 59, 76, 66, 77, 68, 82, 59, 78, 77, 59, 77, 48, 70, 72, 90, 73, 60, 57, 78, 57, 61, 76, 76, 60, 58, 42, 63, 77, 63, 68, 88, 55, 80, 56, 71, 40, 57, 77, 47, 68, 77, 64, 98, 73, 43, 80, 55, 61, 73, 61, 61, 51, 55, 73, 84, 36, 54, 62, 78, 63, 63, 73, 71, 54, 91, 62, 70, 71, 64, 46, 69, 61, 81, 71, 74, 51, 54, 54, 69, 67, 77, 64, 52, 72, 73, 102, 66, 66, 89, 50, 55, 52, 76, 49, 79, 61, 63, 69, 69, 77, 58, 61, 82, 72, 66, 62, 64, 71, 70, 50, 54, 52, 56, 51, 87, 55, 67, 74, 52, 51, 83, 78, 60, 49, 55, 59, 62, 55, 64, 51, 72, 84, 44, 88, 63, 71, 63, 92, 59, 55, 69, 65, 50, 53, 92, 60, 66, 95, 90, 82, 86, 71, 56, 74, 63, 55, 63, 61, 55, 77, 54, 92, 77, 66, 54, 61, 56, 69, 56, 76, 56, 64, 72, 71, 63, 65, 64, 66, 81, 69, 71, 54, 57, 61, 90, 55, 32, 64, 58, 49, 56, 67, 105, 61, 61, 66, 72, 64, 52, 109, 78, 87, 58, 84, 78, 56, 52, 61, 53, 46, 76, 45, 57, 68, 63, 61, 89, 62, 52, 68, 93, 54, 52, 75, 47, 67, 38, 62, 59, 62, 46, 61, 52, 64, 78, 56, 59, 74, 71, 67, 71, 56, 54, 62, 62, 61, 54, 55, 70, 69, 55, 52, 71, 56, 54, 71, 116, 47, 68, 71, 48, 73, 50, 82, 81, 61, 53, 74, 67, 78, 64, 63, 86, 79, 54, 79, 62, 60, 71, 73, 62, 80, 53, 45, 60, 53, 69, 76, 73, 68, 50, 77, 70, 61, 57, 65, 68, 56, 73, 61, 59, 61, 71, 51, 77, 70, 81, 68, 51, 64, 47, 51, 89, 67, 62, 59, 64, 66, 116, 56, 84, 55, 40, 65, 55, 63, 42, 73, 81, 68, 68, 57, 66, 75, 88, 63, 56, 90, 47, 57, 24, 43, 47, 77, 46, 62, 66, 65, 72, 69, 78, 63, 51, 71, 60, 51, 74, 65, 61, 46, 58, 70, 57, 62, 75, 42, 68, 60, 73, 48, 55, 46, 68, 53, 63, 69, 58, 76, 53, 65, 55, 72, 53, 73, 53, 93, 65, 54, 45, 54, 75, 54, 59, 60, 66, 46, 61, 48, 72, 84, 44, 72, 62, 56, 73, 59, 65, 49, 58, 64, 52, 66, 71, 56, 65, 54, 74, 48, 57, 93, 103, 58, 79, 66, 51, 72, 51, 77, 68, 62, 66, 65, 68, 58, 61, 74, 61, 81, 53, 85, 73, 60, 71, 79, 95, 81, 68, 73, 41, 56, 82, 64, 67, 59, 49, 77, 59, 167, 53, 61, 58, 82, 46, 54, 70, 59, 65, 87, 65, 68, 43, 58, 59, 69, 54, 43, 57, 79, 61, 74, 64, 44, 66, 96, 92, 58, 56, 56, 62, 59, 49, 44, 64, 78, 43, 116, 65, 78, 44, 95, 62, 57, 53, 66, 64, 44, 66, 62, 63, 86, 54, 68, 44, 101, 80, 67, 73, 78, 58, 65, 72, 62, 60, 69, 84, 77, 53, 78, 62, 51, 90, 81, 65, 56, 65, 64, 65, 86, 70, 79, 58, 68, 85, 57, 89, 71, 61, 50, 60, 60, 57, 83, 59, 65, 70, 62, 56, 57, 69, 53, 64, 55, 53, 67, 77, 70, 52, 65, 65, 55, 61, 53, 47, 55, 65, 94, 61, 65, 64, 46, 61, 118, 49, 54, 59, 55, 63, 59, 65, 63, 86, 112, 71, 80, 59, 75, 71, 75, 55, 57, 64, 90, 49, 92, 64, 97, 49, 47, 44, 50, 59, 67, 52, 61, 52, 59, 73, 61, 57, 68, 47, 53, 76, 57, 65, 52, 60, 83, 63, 50, 66, 72, 90, 59, 50, 71, 65, 73, 62, 75, 73, 76, 66, 78, 78, 58, 56, 64, 66, 62, 76, 48, 63, 72, 81, 91, 53, 56, 45, 58, 75, 68, 56, 59, 85, 66, 66, 74, 63, 46, 75, 74, 59, 69, 66, 60, 56, 80, 97, 78, 64, 78, 80, 51, 55, 41, 54, 51, 71, 62, 40, 73, 79, 66, 77, 74, 60, 66, 65, 54, 71, 67, 75, 84, 67, 74, 69, 83, 56, 54, 73, 62, 50, 76, 62, 93, 53, 43, 56, 55, 57, 81, 76, 57, 54, 55, 61, 51, 68, 57, 83, 85, 73, 64, 96, 52, 80, 52, 80, 61, 45, 65, 56, 60, 54, 58, 69, 62, 67, 53, 68, 57, 58, 60, 71, 52, 84, 67, 103, 87, 51, 83, 64, 45, 69, 60, 61, 53, 52, 65, 72, 62, 52, 88, 84, 49, 58, 68, 83, 58, 70, 57, 67, 63, 80, 77, 54, 56, 68, 75, 73, 45, 89, 59, 94, 65, 69, 46, 62, 96, 105, 52, 89, 70, 50, 65, 63, 94, 76, 56, 51, 55, 41, 64, 77, 44, 66, 48, 61, 77, 70, 64, 56, 79, 47, 48, 63, 66, 70, 43, 66, 44, 63, 71, 70, 70, 62, 47, 50, 66, 74, 49, 57, 101, 80, 57, 77, 79, 55, 75, 94, 56, 59, 57, 59, 66, 68, 57, 74, 82, 54, 65, 144, 77, 85, 56, 47, 60, 57, 85, 49, 59, 62, 49, 63, 68, 87, 105, 72, 60, 71, 84, 60, 58, 60, 51, 93, 99, 61, 59, 59, 95, 73, 38, 59, 62, 41, 83, 49, 78, 61, 84, 89, 50, 75, 68, 54, 54, 62, 61, 66, 61, 55, 66, 53, 68, 53, 52, 98, 71, 51, 58, 53, 41, 58, 57, 65, 67, 50, 60, 61, 83, 64, 88, 35, 46, 79, 66, 60, 53, 48, 64, 60, 66, 72, 69, 74, 64, 77, 62, 81, 53, 67, 80, 58, 105, 49, 88, 68, 55, 85, 65, 54, 74, 80, 52, 62, 58, 68, 57, 58, 46, 47, 67, 76, 60, 79, 72, 65, 80, 64, 60, 105, 66, 52, 72, 58, 62, 68, 58, 58, 52, 74, 53, 57, 70, 63, 62, 50, 74, 71, 58, 79, 69, 58, 62, 85, 117, 61, 60, 68, 78, 73, 54, 56, 65, 77, 67, 57, 84, 61, 61, 56, 60, 46, 69, 51, 55, 59, 72, 110, 75, 60, 74, 79, 52, 49, 70, 64, 48, 65, 62, 65, 53, 56, 48, 38, 57, 62, 55, 69, 62, 73, 66, 91, 59, 69, 83, 62, 65, 83, 51, 69, 65, 66, 49, 71, 72, 72, 73, 68, 60, 63, 101, 63, 77, 68, 63, 68, 72, 49, 81, 57, 60, 82, 59, 71, 58, 75, 53, 73, 87, 90, 64, 66, 66, 70, 76, 87, 61, 60, 87, 52, 61, 82, 51, 75, 57, 72, 90, 70, 90, 48, 66, 43, 86, 74, 51, 49, 50, 72, 93, 58, 43, 52, 69, 56, 102, 49, 44, 54, 47, 83, 65, 73, 61, 59, 74, 59, 46, 69, 55, 61, 54, 53, 66, 74, 45, 70, 61, 73, 62, 72, 56, 88, 75, 58, 72, 56, 57, 67, 73, 73, 72, 50, 70, 58, 85, 50, 46, 75, 62, 117, 89, 106, 49, 60, 58, 63, 61, 53, 84, 55, 66, 67, 73, 64, 61, 86, 55, 59, 45, 78, 100, 56, 68, 68, 46, 54, 56, 35, 48, 83, 48, 50, 81, 56, 64, 65, 57, 70, 74, 60, 69, 57, 86, 56, 63, 46, 51, 54, 79, 63, 133, 72, 94, 61, 69, 71, 70, 62, 53, 72, 63, 73, 60, 50, 79, 52, 71, 89, 85, 54, 51, 66, 54, 59, 74, 68, 48, 68, 55, 63, 53, 64, 63, 72, 63, 74, 56, 57, 97, 70, 50, 62, 62, 85, 63, 61, 67, 50, 55, 50, 96, 70, 64, 82, 95, 69, 69, 72, 59, 66, 87, 74, 60, 83, 66, 79, 79, 68, 86, 61, 44, 95, 102, 61, 63, 65, 78, 74, 71, 57, 70, 70, 65, 53, 76, 70, 63, 72, 64, 54, 57, 53, 62, 54, 51, 62, 43, 58, 92, 48, 58, 85, 50, 55, 70, 88, 66, 85, 66, 63, 56, 67, 58, 65, 68, 61, 53, 53, 70, 60, 57, 105, 80, 51, 52, 95, 65, 61, 48, 66, 80, 50, 64, 57, 40, 95, 69, 80, 60, 48, 106, 42, 48, 60, 83, 57, 96, 55, 66, 70, 84, 58, 74, 65, 51, 82, 79, 57, 78, 83, 76, 58, 46, 73, 61, 68, 68, 62, 88, 56, 59, 62, 45, 65, 70, 70, 64, 45, 57, 81, 52, 82, 64, 64, 73, 46, 73, 65, 67, 70, 60, 77, 70, 61, 56, 70, 57, 56, 65, 57, 62, 55, 36, 75, 54, 60, 55, 69, 64, 63, 47, 99, 72, 64, 64, 75, 63, 71, 71, 79, 66, 47, 80, 42, 57, 70, 63, 49, 70, 44, 54, 65, 67, 79, 56, 79, 63, 65, 73, 57, 101, 44, 55, 65, 96, 87, 63, 68, 70, 81, 74, 72, 58, 52, 53, 61, 69, 64, 81, 82, 56, 102, 61, 74, 67, 68, 27, 65, 74, 44, 67, 45, 62, 47, 63, 86, 77, 60, 40, 59, 63, 62, 64, 69, 86, 65, 72, 70, 58, 56, 59, 75, 48, 65, 44, 70, 87, 51, 53, 72, 71, 53, 62, 68, 67, 48, 61, 55, 68, 61, 54, 66, 60, 75, 67, 74, 79, 65, 78, 80, 79, 52, 56, 56, 62, 73, 75, 70, 70, 66, 103, 60, 70, 49, 76, 58, 74, 80, 43, 81, 50, 75, 49, 60, 54, 58, 67, 51, 71, 71, 82, 42, 66, 46, 82, 57, 69, 68, 69, 71, 50, 70, 89, 60, 87, 58, 76, 51, 70, 49, 64, 84, 61, 73, 66, 66, 121, 89, 72, 81, 34, 89, 77, 48, 79, 113, 50, 69, 58, 74, 75, 61, 72, 69, 79, 70, 135, 40, 84, 69, 75, 70, 62, 78, 67, 58, 39, 64, 76, 93, 59, 106, 46, 62, 59, 52, 62, 59, 78, 80, 53, 83, 60, 67, 70, 62, 58, 71, 61, 74, 91, 79, 107, 83, 87, 40, 62, 89, 45, 110, 46, 72, 80, 70, 40, 73, 62, 64, 122, 70, 59, 51, 73, 46, 65, 60, 74, 63, 73, 124, 46, 62, 46, 53, 55, 44, 76, 51, 75, 78, 57, 37, 46, 53, 81, 116, 78, 64, 77, 72, 81, 70, 70, 46, 46, 56, 60, 61, 64, 54, 46, 52, 79, 88, 89, 73, 78, 103, 62, 51, 58, 70, 55, 71, 54, 55, 52, 109, 78, 66, 62, 58, 60, 64, 49, 82, 68, 60, 73, 67, 46, 70, 58, 68, 55, 68, 78, 62, 57, 69, 45, 85, 90, 71, 49, 52, 52, 69, 48, 82, 48, 58, 58, 60, 72, 81, 49, 71, 69, 75, 46, 60, 71, 67, 59, 62, 70, 68, 70, 51, 77, 56, 69, 54, 52, 43, 33, 67, 107, 89, 71, 63, 59, 71, 53, 52, 41, 45, 61, 71, 61, 61, 69, 58, 79, 64, 73, 70, 66, 58, 56, 63, 51, 60, 83, 57, 54, 62, 69, 63, 51, 63, 56, 37, 84, 69, 77, 89, 69, 63, 60, 69, 80, 47, 55, 71, 68, 51, 57, 62, 59, 59, 54, 47, 104, 60, 67, 79, 68, 56, 84, 59, 75, 60, 78, 55, 57, 52, 97, 64, 64, 66, 52, 64, 64, 70, 59, 66, 88, 100, 39, 76, 65, 63, 70, 63, 55, 57, 60, 77, 86, 77, 93, 61, 57, 77, 46, 78, 142, 58, 63, 65, 51, 62, 46, 47, 53, 43, 40, 44, 66, 68, 74, 64, 56, 70, 51, 57, 60, 72, 55, 54, 57, 50, 67, 56, 45, 68, 44, 53, 106, 74, 77, 61, 69, 73, 56, 93, 49, 65, 95, 60, 77, 47, 92, 52, 64, 58, 77, 83, 67, 48, 61, 46, 63, 54, 75, 77, 81, 65, 67, 72, 70, 58, 82, 99, 58, 68, 73, 66, 64, 60, 55, 57, 52, 67, 52, 67, 80, 53, 47, 72, 60, 77, 68, 62, 50, 81, 47, 42, 69, 58, 68, 50, 56, 68, 67, 75, 72, 64, 67, 75, 71, 66, 88, 87, 67, 74, 46, 104, 50, 66, 76, 82, 80, 42, 72, 86, 71, 86, 109, 67, 52, 52, 101, 87, 69, 81, 53, 61, 61, 77, 78, 73, 83, 84, 63, 81, 51, 82, 73, 108, 68, 73, 53, 76, 69, 39, 58, 57, 114, 59, 73, 78, 71, 86, 55, 71, 57, 76, 91, 43, 51, 49, 70, 46, 127, 38, 86, 62, 72, 67, 61, 46, 68, 86, 43, 56, 69, 82, 83, 54, 58, 83, 65, 49, 60, 71, 84, 70, 58, 55, 66, 87, 76, 78, 55, 62, 64, 67, 79, 58, 71, 84, 90, 86, 83, 63, 40, 64, 86, 41, 69, 89, 58, 57, 78, 47, 55, 66, 57, 53, 87, 76, 57, 64, 89, 49, 54, 42, 59, 71, 45, 53, 52, 66, 48, 64, 69, 78, 104, 67, 109, 67, 58, 79, 65, 69, 57, 76, 60, 77, 47, 55, 49, 60, 69, 50, 49, 70, 63, 61, 57, 56, 72, 70, 77, 55, 65, 56, 45, 67, 116, 60, 64, 53, 84, 64, 57, 67, 59, 55, 72, 72, 56, 59, 79, 75, 58, 72, 53, 82, 54, 72, 58, 77, 64, 48, 54, 47, 49, 74, 91, 66, 63, 60, 63, 65, 69, 125, 58, 68, 68, 64, 40, 62, 67, 50, 83, 94, 60, 64, 58, 82, 77, 79, 75, 54, 62, 66, 61, 46, 60, 87, 71, 68, 57, 68, 74, 71, 80, 72, 49, 59, 67, 51, 99, 106, 64, 61, 70, 51, 57, 89, 63, 71, 68, 72, 116, 64, 66, 67, 46, 43, 57, 67, 48, 70, 83, 45, 62, 75, 23, 68, 94, 72, 74, 66, 93, 63, 74, 45, 56, 83, 53, 78, 67, 92, 54, 71, 71, 67, 91, 46, 71, 90, 60, 58, 37, 55, 111, 49, 70, 57, 63, 58, 65, 84, 50, 84, 68, 84, 49, 74, 76, 56, 69, 47, 64, 113, 65, 73, 50, 50, 70, 59, 63, 53, 106, 58, 61, 76, 66, 54, 63, 59, 134, 77, 84, 68, 73, 58, 56, 66, 73, 59, 51, 46, 54, 60, 88, 41, 99, 73, 103, 77, 67, 66, 80, 64, 82, 90, 76, 63, 55, 42, 61, 52, 56, 54, 82, 56, 58, 57, 88, 76, 66, 77, 59, 74, 59, 55, 63, 47, 54, 53, 59, 62, 87, 75, 73, 66, 90, 72, 84, 67, 80, 43, 59, 59, 89, 103, 43, 64, 52, 70, 88, 63, 48, 69, 64, 86, 67, 78, 57, 58, 71, 72, 65, 84, 61, 70, 76, 62, 63, 65, 79, 57, 45, 62, 99, 57, 80, 57, 66, 77, 84, 54, 69, 52, 65, 64, 55, 74, 48, 50, 52, 69, 64, 69, 58, 52, 47, 47, 79, 76, 62, 65, 57, 92, 60, 94, 83, 64, 41, 80, 61, 51, 63, 85, 61, 64, 64, 61, 70, 55, 55, 92, 88, 52, 72, 61, 51, 74, 77, 55, 69, 36, 57, 68, 57, 67, 64, 69, 85, 61, 64, 57, 66, 78, 52, 51, 56, 49, 59, 72, 56, 57, 56, 56, 72, 65, 60, 64, 45, 78, 59, 62, 81, 73, 61, 82, 65, 52, 58, 63, 73, 87, 62, 52, 75, 66, 57, 70, 79, 61, 87, 71, 54, 48, 52, 67, 92, 70, 72, 91, 49, 55, 59, 56, 92, 75, 113, 62, 65, 71, 47, 49, 50, 46, 64, 63, 68, 78, 51, 57, 45, 66, 71, 70, 83, 53, 67, 78, 66, 52, 57, 47, 58, 72, 58, 87, 62, 66, 57, 60, 58, 56, 55, 64, 61, 67, 57, 64, 113, 67, 101, 70, 59, 43, 77, 58, 75, 57, 49, 54, 61, 69, 72, 50, 67, 83, 69, 57, 68, 68, 69, 70, 77, 79, 87, 48, 63, 66, 69, 62, 50, 66, 59, 47, 102, 94, 51, 49, 63, 52, 87, 73, 68, 59, 72, 51, 66, 59, 86, 58, 67, 52, 46, 60, 122, 67, 106, 73, 68, 70, 69, 77, 65, 79, 75, 53, 67, 61, 58, 68, 63, 55, 73, 56, 67, 64, 68, 61, 53, 57, 59, 74, 56, 64, 85, 65, 76, 57, 68, 78, 55, 75, 66, 58, 51, 60, 70, 67, 78, 52, 58, 56, 59, 64, 68, 52, 57, 53, 74, 35, 59, 53, 57, 72, 78, 82, 54, 96, 69, 81, 71, 92, 58, 64, 60, 58, 72, 44, 54, 56, 48, 79, 64, 89, 78, 59, 61, 61, 96, 51, 37, 81, 82, 62, 87, 53, 74, 45, 54, 61, 57, 101, 99, 50, 72, 67, 82, 65, 77, 73, 77, 53, 65, 63, 73, 47, 68, 72, 72, 56, 79, 77, 63, 58, 73, 91, 66, 60, 88, 54, 66, 48, 55, 74, 66, 65, 83, 59, 60, 89, 84, 70, 66, 60, 39, 78, 65, 69, 66, 116, 63, 72, 50, 80, 73, 67, 74, 47, 71, 50, 64, 57, 47, 55, 79, 51, 72, 78, 77, 65, 66, 53, 53, 59, 60, 45, 64, 54, 85, 62, 67, 83, 62, 64, 57, 53, 53, 63, 60, 46, 81, 74, 71, 64, 66, 56, 51, 67, 69, 64, 57, 89, 68, 54, 69, 61, 52, 71, 93, 65, 58, 63, 96, 60, 57, 81, 64, 74, 95, 58, 73, 55, 63, 49, 63, 67, 49, 78, 65, 68, 83, 45, 90, 57, 41, 63, 59, 77, 109, 63, 66, 75, 90, 63, 73, 77, 81, 59, 61, 69, 54, 54, 60, 63, 92, 68, 67, 62, 73, 65, 79, 55, 61, 59, 74, 76, 80, 45, 68, 66, 46, 69, 45, 62, 64, 73, 80, 50, 69, 93, 57, 62, 71, 73, 67, 59, 74, 57, 68, 61, 97, 77, 52, 50, 67, 68, 58, 59, 40, 60, 60, 66, 86, 64, 54, 59, 64, 63, 74, 54, 90, 63, 60, 78, 46, 68, 56, 60, 57, 73, 105, 68, 109, 71, 106, 71, 74, 58, 62, 59, 85, 48, 85, 56, 67, 79, 68, 73, 97, 41, 65, 52, 78, 61, 44, 67, 77, 61, 76, 67, 74, 64, 61, 68, 82, 65, 67, 53, 61, 75, 30, 75, 83, 66, 64, 93, 58, 84, 50, 78, 41, 65, 63, 68, 53, 69, 65, 66, 74, 44, 68, 43, 62, 64, 48, 104, 67, 77, 56, 61, 79, 71, 67, 42, 72, 63, 63, 90, 61, 61, 43, 74, 70, 72, 55, 54, 68, 54, 70, 56, 66, 48, 84, 56, 48, 63, 70, 80, 63, 79, 86, 97, 61, 67, 74, 69, 78, 56, 117, 70, 71, 52, 51, 60, 62, 68, 67, 40, 72, 60, 64, 61, 58, 62, 49, 47, 63, 79, 52, 60, 65, 58, 51, 76, 53, 51, 95, 58, 65, 79, 49, 85, 87, 54, 79, 96, 59, 59, 48, 68, 76, 98, 103, 60, 62, 76, 55, 62, 77, 75, 57, 69, 57, 66, 84, 62, 57, 60, 77, 57, 65, 61, 81, 52, 67, 68, 89, 69, 66, 56, 70, 75, 86, 71, 50, 64, 59, 67, 56, 68, 66, 62, 48, 71, 60, 63, 65, 87, 67, 60, 115, 51, 47, 79, 90, 62, 88, 54, 52, 77, 53, 81, 66, 55, 53, 48, 60, 90, 65, 42, 56, 57, 54, 70, 51, 59, 73, 70, 59, 78, 114, 91, 80, 63, 59, 53, 85, 78, 59, 82, 60, 99, 89, 76, 69, 63, 51, 56, 59, 98, 59, 62, 85, 50, 58, 59, 73, 54, 59, 62, 101, 86, 57, 69, 56, 71, 59, 94, 69, 66, 55, 74, 55, 88, 69, 82, 55, 73, 74, 64, 59, 81, 78, 61, 92, 99, 57, 41, 66, 48, 68, 122, 74, 48, 60, 81, 70, 66, 88, 60, 54, 71, 49, 57, 74, 58, 73, 51, 75, 72, 60, 70, 51, 93, 70, 65, 116, 84, 56, 58, 62, 71, 62, 50, 69, 54, 53, 62, 57, 72, 68, 51, 80, 57, 64, 70, 68, 59, 66, 71, 81, 78, 60, 59, 60, 70, 84, 87, 65, 81, 69, 81, 68, 53, 83, 77, 79, 66, 61, 73, 47, 62, 63, 61, 79, 61, 41, 61, 65, 67, 66, 65, 42, 48, 68, 63, 33, 59, 68, 64, 63, 47, 67, 62, 62, 78, 100, 75, 54, 55, 59, 44, 69, 63, 91, 67, 71, 63, 50, 59, 70, 71, 52, 61, 75, 61, 64, 61, 84, 67, 66, 62, 67, 48, 97, 50, 59, 51, 52, 61, 50, 56, 47, 50, 67, 54, 58, 65, 65, 46, 63, 100, 53, 94, 70, 98, 66, 53, 113, 74, 74, 52, 79, 58, 78, 60, 73, 48, 77, 76, 87, 60, 70, 73, 56, 58, 54, 88, 74, 61, 58, 57, 68, 69, 63, 75, 67, 70, 56, 52, 62, 59, 80, 52, 66, 45, 68, 96, 71, 50, 84, 64, 59, 70, 76, 64, 51, 90, 65, 83, 45, 52, 90, 51, 100, 73, 67, 74, 85, 93, 61, 78, 61, 73, 77, 78, 68, 52, 70, 62, 70, 50, 83, 42, 58, 80, 78, 67, 57, 69, 69, 57, 48, 38, 81, 53, 60, 61, 73, 75, 41, 39, 76, 65, 57, 83, 70, 45, 60, 68, 56, 79, 57, 71, 81, 63, 56, 43, 68, 61, 62, 54, 65, 63, 80, 61, 57, 54, 51, 67, 57, 49, 38, 67, 79, 74, 56, 83, 50, 81, 64, 64, 60, 75, 42, 77, 60, 82, 48, 110, 81, 67, 57, 60, 62, 57, 58, 66, 47, 51, 44, 68, 63, 48, 66, 58, 81, 61, 65, 59, 87, 66, 45, 57, 78, 63, 64, 68, 103, 59, 143, 52, 59, 66, 62, 73, 68, 51, 70, 59, 68, 47, 62, 54, 46, 79, 68, 53, 79, 45, 78, 70, 78, 52, 58, 47, 54, 73, 63, 69, 60, 69, 48, 42, 46, 54, 59, 59, 70, 57, 65, 76, 109, 117, 62, 57, 59, 59, 55, 64, 80, 48, 79, 65, 46, 88, 53, 72, 50, 103, 62, 50, 36, 60, 59, 71, 80, 60, 46, 49, 75, 96, 60, 84, 46, 57, 95, 57, 83, 88, 41, 39, 61, 74, 79, 66, 67, 66, 58, 57, 50, 81, 58, 61, 94, 77, 55, 48, 94, 53, 70, 54, 54, 65, 75, 61, 76, 48, 76, 66, 76, 49, 66, 54, 60, 52, 68, 67, 65, 68, 71, 54, 69, 61, 43, 77, 52, 66, 96, 75, 95, 59, 87, 95, 52, 77, 49, 44, 58, 75, 59, 55, 85, 62, 79, 46, 85, 64, 60, 65, 65, 65, 63, 73, 65, 55, 52, 71, 69, 56, 41, 75, 55, 86, 69, 71, 84, 80, 73, 58, 66, 55, 49, 49, 64, 84, 56, 83, 78, 63, 62, 78, 85, 66, 61, 72, 78, 54, 64, 60, 77, 84, 78, 70, 82, 51, 84, 72, 70, 59, 55, 48, 62, 48, 74, 60, 51, 60, 71, 69, 40, 71, 55, 61, 78, 108, 79, 56, 62, 50, 85, 59, 66, 58, 69, 72, 58, 47, 58, 63, 60, 78, 74, 79, 105, 80, 62, 55, 53, 123, 70, 76, 79, 76, 60, 63, 67, 70, 61, 49, 86, 46, 75, 77, 51, 58, 68, 42, 66, 58, 71, 90, 100, 54, 64, 65, 72, 64, 43, 46, 64, 64, 81, 82, 84, 85, 85, 61, 63, 53, 69, 63, 53, 65, 87, 76, 53, 55, 73, 66, 42, 60, 69, 56, 53, 94, 64, 139, 70, 56, 66, 62, 96, 66, 57, 75, 80, 70, 56, 47, 91, 65, 64, 61, 63, 46, 73, 65, 55, 61, 37, 54, 60, 45, 59, 54, 51, 57, 62, 57, 55, 64, 56, 71, 75, 77, 63, 52, 42, 72, 81, 50, 79, 74, 53, 65, 51, 86, 86, 54, 60, 75, 63, 76, 55, 57, 58, 45, 46, 70, 96, 65, 90, 62, 54, 39, 67, 37, 62, 65, 46, 51, 61, 97, 51, 62, 65, 65, 37, 59, 76, 66, 54, 49, 74, 59, 66, 70, 57, 64, 62, 66, 45, 52, 71, 58, 84, 74, 78, 72, 65, 69, 78, 75, 67, 73, 66, 81, 45, 64, 81, 53, 59, 65, 77, 54, 132, 49, 80, 67, 101, 65, 56, 62, 63, 54, 86, 66, 52, 66, 58, 60, 123, 54, 64, 57, 53, 78, 68, 60, 63, 73, 53, 65, 74, 60, 71, 57, 75, 69, 49, 69, 57, 65, 64, 63, 66, 58, 64, 79, 61, 74, 51, 62, 48, 63, 75, 57, 64, 62, 44, 57, 88, 53, 73, 61, 66, 61, 52, 106, 61, 77, 70, 89, 77, 59, 56, 61, 66, 71, 58, 69, 56, 60, 40, 78, 39, 47, 60, 55, 67, 68, 57, 66, 82, 62, 95, 67, 53, 51, 83, 68, 70, 65, 73, 67, 80, 59, 59, 74, 94, 81, 83, 68, 81, 54, 57, 64, 58, 56, 69, 77, 79, 60, 62, 82, 50, 59, 77, 63, 66, 97, 46, 68, 61, 47, 62, 61, 58, 38, 96, 75, 66, 65, 27, 94, 78, 69, 61, 59, 76, 51, 76, 60, 66, 93, 54, 58, 64, 61, 76, 71, 78, 74, 51, 85, 72, 55, 59, 56, 42, 48, 72, 55, 74, 51, 64, 61, 64, 57, 55, 63, 57, 65, 72, 61, 70, 74, 64, 51, 51, 62, 60, 68, 76, 64, 47, 78, 72, 73, 63, 76, 62, 50, 102, 59, 56, 83, 130, 68, 76, 71, 57, 54, 56, 45, 76, 56, 55, 56, 72, 71, 52, 49, 67, 63, 56, 84, 69, 71, 72, 61, 58, 49, 93, 53, 88, 74, 64, 76, 69, 68, 57, 70, 62, 91, 58, 47, 63, 72, 65, 53, 55, 84, 57, 60, 51, 66, 66, 72, 62, 62, 73, 93, 53, 102, 71, 78, 64, 76, 64, 48, 64, 109, 58, 58, 62, 60, 94, 88, 79, 74, 68, 76, 65, 65, 71, 63, 64, 80, 53, 57, 68, 50, 44, 126, 66, 44, 50, 80, 57, 69, 81, 76, 61, 65, 59, 43, 52, 43, 66, 64, 59, 104, 45, 71, 104, 83, 49, 50, 90, 94, 55, 57, 56, 66, 47, 69, 54, 68, 73, 55, 55, 97, 80, 63, 59, 59, 65, 63, 52, 63, 50, 58, 59, 70, 47, 53, 77, 59, 71, 78, 69, 61, 85, 62, 54, 73, 76, 54, 80, 79, 54, 53, 57, 63, 44, 91, 66, 66, 72, 59, 62, 108, 56, 51, 43, 58, 63, 70, 57, 73, 56, 61, 54, 49, 59, 66, 74, 100, 86, 67, 60, 53, 81, 46, 40, 45, 58, 77, 81, 79, 75, 69, 71, 60, 67, 55, 75, 54, 52, 64, 60, 60, 96, 70, 44, 63, 89, 84, 61, 62, 73, 93, 48, 63, 59, 84, 92, 74, 67, 69, 53, 43, 64, 82, 71, 51, 76, 51, 70, 85, 51, 95, 56, 74, 55, 94, 69, 76, 70, 47, 49, 78, 98, 64, 85, 62, 77, 88, 75, 59, 52, 74, 52, 47, 59, 49, 55, 71, 54, 66, 60, 66, 71, 43, 55, 48, 72, 56, 60, 62, 67, 63, 96, 62, 40, 61, 61, 56, 59, 71, 77, 81, 51, 56, 49, 61, 56, 64, 90, 81, 61, 79, 77, 70, 77, 81, 51, 64, 64, 61, 68, 67, 67, 53, 59, 84, 58, 59, 86, 76, 58, 52, 64, 76, 82, 70, 74, 34, 40, 81, 90, 60, 62, 83, 74, 57, 68, 55, 73, 67, 54, 66, 92, 56, 49, 70, 54, 66, 71, 65, 64, 53, 60, 55, 126, 66, 67, 54, 85, 74, 55, 55, 56, 56, 68, 64, 65, 71, 66, 69, 89, 51, 52, 65, 71, 70, 67, 68, 88, 67, 70, 105, 91, 85, 63, 75, 71, 61, 101, 72, 58, 40, 60, 68, 116, 51, 65, 69, 54, 86, 40, 55, 72, 60, 62, 67, 60, 70, 66, 58, 87, 70, 77, 51, 60, 67, 93, 55, 95, 73, 63, 62, 65, 67, 65, 73, 59, 75, 63, 63, 67, 55, 62, 47, 57, 48, 58, 53, 46, 56, 69, 84, 69, 73, 52, 77, 80, 53, 60, 58, 67, 68, 72, 67, 67, 77, 79, 52, 60, 81, 86, 65, 77, 86, 51, 67, 55, 85, 66, 59, 42, 87, 71, 64, 62, 61, 52, 65, 63, 71, 57, 69, 67, 72, 61, 55, 61, 52, 69, 77, 145, 104, 60, 60, 59, 54, 44, 62, 64, 43, 69, 65, 55, 68, 78, 57, 52, 71, 63, 47, 147, 59, 56, 69, 73, 52, 61, 64, 83, 58, 39, 67, 51, 59, 66, 36, 123, 60, 68, 76, 49, 69, 76, 59, 73, 59, 68, 98, 53, 74, 59, 56, 51, 71, 127, 85, 66, 59, 63, 58, 63, 78, 72, 60, 52, 86, 72, 64, 90, 74, 54, 81, 68, 64, 47, 50, 79, 92, 66, 50, 65, 68, 55, 82, 72, 58, 60, 76, 78, 53, 82, 55, 79, 50, 59, 66, 59, 67, 71, 66, 64, 57, 61, 75, 41, 54, 79, 64, 80, 59, 69, 36, 77, 77, 106, 75, 66, 60, 75, 70, 55, 62, 71, 62, 46, 55, 54, 64, 54, 59, 54, 88, 42, 59, 55, 56, 60, 105, 79, 82, 50, 72, 108, 53, 63, 57, 98, 86, 63, 57, 57, 61, 70, 39, 76, 65, 77, 54, 60, 80, 61, 70, 76, 71, 69, 93, 66, 61, 59, 64, 52, 56, 63, 73, 64, 73, 65, 65, 48, 80, 58, 53, 98, 83, 93, 54, 89, 50, 38, 68, 73, 63, 58, 67, 59, 76, 123, 54, 62, 67, 102, 61, 88, 60, 62, 55, 85, 75, 59, 62, 94, 60, 67, 61, 64, 71, 88, 62, 56, 81, 75, 55, 46, 60, 78, 53, 69, 63, 42, 57, 69, 63, 53, 55, 72, 55, 48, 67, 60, 66, 59, 83, 42, 59, 66, 75, 71, 74, 55, 60, 73, 72, 45, 64, 69, 77, 50, 90, 69, 42, 52, 88, 69, 57, 56, 47, 52, 62, 65, 63, 58, 45, 59, 87, 82, 81, 52, 59, 61, 60, 52, 88, 101, 58, 79, 69, 68, 46, 69, 65, 46, 58, 69, 49, 62, 80, 78, 60, 56, 74, 53, 74, 63, 57, 70, 73, 41, 44, 68, 59, 52, 99, 49, 65, 71, 82, 56, 62, 67, 73, 72, 53, 56, 75, 76, 68, 67, 62, 70, 79, 67, 73, 62, 64, 61, 66, 109, 54, 80, 44, 91, 51, 68, 61, 62, 64, 60, 51, 73, 68, 55, 91, 66, 63, 46, 70, 73, 67, 79, 54, 51, 69, 87, 67, 81, 38, 52, 76, 67, 72, 67, 65, 67, 75, 50, 60, 78, 65, 66, 58, 49, 83, 86, 51, 57, 58, 85, 90, 64, 66, 58, 66, 62, 70, 61, 40, 64, 67, 68, 66, 52, 52, 40, 35, 45, 101, 79, 66, 54, 61, 45, 53, 61, 76, 63, 65, 71, 98, 61, 62, 65, 64, 61, 66, 49, 64, 61, 76, 70, 63, 61, 66, 84, 62, 62, 73, 62, 66, 71, 63, 64, 77, 58, 41, 80, 53, 71, 64, 73, 75, 55, 47, 68, 70, 61, 78, 47, 63, 77, 53, 69, 58, 89, 104, 57, 61, 68, 81, 61, 62, 67, 54, 74, 81, 65, 85, 75, 67, 79, 77, 63, 66, 62, 86, 74, 100, 59, 68, 55, 56, 56, 76, 76, 65, 71, 72, 48, 74, 80, 65, 55, 59, 74, 81, 48, 55, 83, 59, 65, 61, 68, 49, 72, 64, 70, 49, 56, 52, 55, 72, 54, 61, 78, 67, 87, 55, 60, 58, 57, 73, 64, 57, 84, 51, 95, 66, 87, 59, 81, 60, 74, 67, 70, 85, 55, 42, 69, 56, 67, 52, 70, 58, 57, 33, 60, 68, 54, 61, 70, 75, 60, 74, 68, 60, 61, 95, 81, 70, 79, 66, 52, 56, 55, 61, 64, 74, 68, 76, 55, 64, 89, 59, 61, 70, 56, 78, 73, 56, 64, 82, 57, 44, 77, 57, 70, 48, 51, 95, 56, 85, 65, 62, 52, 58, 46, 61, 71, 50, 61, 57, 68, 70, 51, 62, 62, 56, 59, 55, 82, 47, 56, 68, 47, 58, 65, 55, 70, 69, 56, 47, 47, 60, 56, 80, 50, 69, 58, 67, 65, 67, 57, 67, 65, 70, 59, 66, 70, 57, 80, 48, 65, 64, 77, 70, 50, 56, 54, 71, 40, 65, 66, 65, 56, 57, 57, 83, 72, 56, 88, 70, 61, 72, 56, 71, 60, 55, 79, 57, 72, 67, 54, 54, 46, 65, 68, 50, 63, 65, 66, 65, 59, 59, 78, 69, 48, 58, 55, 61, 48, 43, 74, 80, 97, 79, 88, 88, 54, 51, 62, 72, 77, 50, 59, 57, 57, 80, 66, 64, 57, 74, 55, 92, 80, 63, 87, 75, 79, 58, 90, 59, 54, 80, 62, 66, 83, 88, 64, 47, 62, 59, 53, 63, 77, 56, 76, 61, 56, 57, 65, 58, 82, 48, 75, 76, 49, 87, 59, 72, 47, 66, 56, 89, 46, 68, 86, 65, 84, 89, 60, 67, 62, 50, 84, 63, 65, 57, 70, 71, 61, 55, 68, 64, 68, 63, 57, 65, 85, 61, 77, 65, 91, 64, 67, 69, 53, 77, 53, 118, 61, 84, 82, 78, 56, 62, 78, 49, 61, 70, 52, 65, 62, 64, 76, 58, 81, 56, 68, 61, 62, 60, 50, 57, 62, 71, 69, 60, 65, 63, 64, 77, 76, 57, 60, 81, 72, 64, 63, 69, 71, 93, 75, 63, 67, 63, 69, 71, 51, 57, 53, 70, 52, 72, 73, 45, 52, 34, 55, 63, 60, 61, 70, 64, 64, 95, 50, 53, 54, 50, 66, 53, 67, 55, 66, 66, 56, 67, 60, 68, 98, 56, 53, 72, 76, 68, 56, 64, 54, 54, 93, 76, 83, 65, 95, 45, 62, 77, 55, 72, 57, 89, 76, 46, 65, 66, 68, 65, 64, 109, 68, 75, 64, 77, 56, 60, 63, 78, 71, 56, 76, 64, 57, 62, 39, 57, 98, 52, 64, 63, 101, 59, 72, 108, 48, 63, 59, 52, 60, 60, 57, 58, 54, 80, 64, 57, 94, 76, 60, 84, 55, 68, 75, 66, 64, 51, 63, 66, 64, 150, 70, 65, 76, 40, 49, 77, 55, 72, 62, 75, 64, 77, 60, 51, 64, 66, 56, 82, 60, 78, 74, 49, 76, 56, 71, 67, 58, 52, 76, 57, 52, 70, 53, 61, 52, 53, 55, 72, 61, 54, 67, 54, 50, 68, 70, 68, 42, 66, 67, 70, 62, 70, 63, 65, 60, 55, 51, 61, 69, 70, 74, 55, 85, 69, 78, 62, 126, 72, 66, 68, 56, 46, 48, 63, 60, 70, 68, 53, 72, 58, 78, 73, 69, 48, 61, 59, 141, 56, 69, 60, 60, 82, 46, 72, 72, 63, 67, 50, 72, 63, 66, 87, 62, 65, 41, 58, 52, 64, 81, 49, 79, 71, 108, 86, 60, 57, 63, 81, 81, 52, 54, 70, 49, 56, 63, 86, 57, 58, 73, 53, 76, 71, 70, 61, 114, 61, 101, 57, 59, 101, 59, 62, 70, 68, 66, 83, 64, 57, 63, 104, 96, 63, 70, 68, 83, 52, 60, 66, 62, 67, 70, 65, 72, 63, 58, 72, 63, 83, 65, 48, 73, 61, 61, 57, 57, 80, 53, 54, 59, 52, 55, 77, 60, 63, 123, 82, 66, 64, 65, 49, 60, 73, 64, 60, 76, 62, 88, 81, 65, 65, 73, 62, 70, 54, 93, 51, 54, 54, 76, 67, 77, 60, 61, 65, 54, 65, 54, 59, 54, 53, 47, 72, 63, 55, 55, 57, 51, 72, 58, 67, 70, 70, 75, 60, 45, 63, 64, 66, 51, 75, 57, 61, 59, 65, 64, 45, 55, 64, 55, 77, 74, 59, 62, 49, 82, 79, 52, 70, 54, 45, 86, 65, 62, 77, 61, 42, 104, 50, 78, 62, 65, 53, 73, 83, 68, 63, 72, 62, 69, 65, 62, 61, 42, 63, 61, 58, 63, 42, 64, 64, 47, 50, 64, 66, 60, 60, 57, 92, 63, 60, 61, 72, 58, 63, 60, 63, 69, 93, 79, 57, 51, 68, 97, 86, 98, 66, 105, 76, 53, 76, 100, 58, 60, 44, 40, 66, 69, 78, 50, 58, 52, 52, 64, 56, 60, 55, 83, 61, 62, 70, 61, 83, 64, 74, 58, 81, 52, 62, 67, 77, 75, 74, 59, 70, 56, 73, 51, 88, 60, 63, 55, 73, 66, 62, 79, 52, 70, 75, 80, 52, 56, 72, 69, 72, 69, 52, 97, 124, 57, 62, 75, 84, 75, 49, 78, 54, 58, 51, 57, 82, 63, 69, 77, 72, 51, 52, 75, 64, 68, 66, 73, 71, 69, 56, 76, 50, 55, 62, 52, 46, 76, 72, 58, 50, 65, 67, 52, 67, 72, 64, 88, 64, 62, 57, 69, 65, 38, 58, 81, 64, 66, 53, 50, 64, 76, 61, 76, 49, 49, 68, 59, 90, 58, 71, 78, 57, 62, 57, 69, 91, 64, 62, 61, 63, 62, 52, 57, 64, 91, 54, 60, 67, 59, 68, 67, 48, 54, 84, 51, 71, 53, 72, 77, 59, 57, 58, 57, 70, 69, 55, 50, 59, 67, 69, 50, 56, 61, 56, 67, 61, 89, 67, 68, 56, 80, 72, 61, 53, 79, 66, 60, 92, 49, 57, 61, 62, 63, 85, 80, 54, 62, 70, 56, 56, 64, 59, 70, 54, 48, 42, 96, 65, 77, 75, 81, 63, 64, 63, 65, 63, 64, 58, 60, 80, 45, 59, 54, 71, 65, 92, 87, 50, 74, 71, 67, 66, 54, 65, 57, 73, 66, 57, 70, 63, 103, 81, 60, 63, 82, 57, 120, 75, 61, 63, 58, 70, 68, 56, 36, 55, 62, 48, 50, 60, 60, 59, 90, 58, 56, 52, 46, 53, 66, 58, 73, 60, 56, 59, 68, 66, 50, 59, 59, 78, 61, 90, 52, 70, 58, 69, 86, 61, 86, 51, 66, 66, 90, 64, 64, 73, 61, 56, 60, 87, 78, 57, 62, 79, 63, 51, 51, 53, 68, 62, 82, 73, 74, 65, 61, 62, 61, 53, 78, 62, 64, 68, 47, 61, 55, 53, 68, 65, 42, 77, 68, 60, 80, 57, 72, 61, 51, 84, 58, 56, 61, 55, 65, 60, 53, 47, 67, 79, 90, 120, 62, 63, 55, 55, 54, 63, 48, 69, 74, 62, 64, 74, 84, 63, 88, 66, 73, 63, 47, 89, 67, 57, 75, 68, 60, 50, 55, 49, 74, 76, 63, 55, 80, 54, 79, 60, 61, 78, 47, 72, 46, 52, 87, 95, 92, 48, 73, 72, 55, 64, 74, 66, 98, 63, 62, 67, 106, 70, 59, 62, 59, 55, 60, 55, 65, 55, 75, 85, 61, 48, 63, 95, 67, 70, 57, 64, 72, 67, 68, 63, 50, 62, 63, 60, 65, 61, 53, 57, 53, 79, 74, 60, 53, 62, 63, 59, 63, 86, 66, 45, 47, 63, 55, 75, 57, 72, 53, 68, 68, 69, 112, 80, 51, 41, 59, 50, 47, 65, 62, 65, 53, 79, 69, 111, 75, 64, 70, 82, 55, 57, 71, 64, 82, 64, 83, 71, 74, 56, 73, 67, 57, 69, 42, 59, 78, 55, 60, 57, 52, 49, 82, 63, 74, 61, 50, 57, 48, 87, 67, 61, 74, 57, 66, 75, 59, 65, 66, 82, 54, 124, 60, 67, 61, 62, 62, 87, 53, 77, 55, 70, 56, 63, 64, 53, 71, 88, 59, 57, 76, 62, 71, 65, 63, 67, 70, 54, 85, 65, 54, 55, 54, 97, 64, 78, 59, 73, 56, 98, 53, 58, 49, 57, 51, 59, 71, 67, 61, 57, 87, 66, 52, 72, 63, 57, 61, 73, 54, 58, 73, 55, 59, 68, 60, 44, 63, 79, 64, 70, 69, 62, 68, 36, 53, 71, 53, 64, 76, 66, 67, 67, 61, 88, 49, 53, 55, 68, 62, 64, 63, 42, 73, 58, 71, 65, 68, 54, 44, 73, 78, 62, 69, 80, 68, 74, 59, 50, 63, 60, 70, 59, 63, 63, 66, 57, 71, 71, 72, 60, 59, 50, 119, 67, 83, 62, 59, 96, 82, 62, 54, 64, 62, 79, 59, 57, 66, 45, 88, 46, 69, 71, 63, 72, 78, 67, 51, 39, 70, 87, 64, 61, 55, 53, 59, 71, 63, 77, 70, 57, 64, 54, 71, 111, 75, 71, 62, 62, 74, 68, 57, 63, 60, 58, 61, 67, 78, 66, 74, 73, 60, 66, 73, 52, 52, 61, 65, 59, 59, 49, 97, 71, 62, 69, 52, 77, 56, 63, 77, 69, 58, 92, 57, 59, 66, 53, 68, 55, 89, 56, 67, 60, 74, 71, 69, 68, 62, 52, 55, 79, 58, 81, 57, 58, 64, 52, 50, 82, 52, 66, 69, 64, 72, 67, 77, 109, 57, 64, 51, 69, 67, 60, 62, 71, 61, 68, 88, 53, 70, 63, 95, 52, 75, 72, 68, 81, 57, 60, 100, 65, 67, 58, 69, 50, 71, 58, 54, 59, 82, 99, 59, 65, 67, 72, 66, 92, 58, 75, 85, 48, 83, 80, 78, 59, 63, 77, 80, 117, 63, 72, 69, 69, 57, 58, 67, 50, 81, 73, 59, 58, 92, 61, 66, 63, 55, 76, 55, 57, 72, 66, 41, 51, 73, 65, 60, 57, 53, 83, 65, 89, 64, 72, 89, 66, 63, 54, 71, 57, 65, 58, 82, 52, 57, 87, 73, 75, 59, 62, 87, 60, 56, 74, 65, 71, 72, 60, 39, 72, 59, 64, 64, 58, 54, 56, 65, 56, 72, 64, 58, 57, 69, 78, 99, 66, 77, 48, 68, 57, 67, 81, 59, 44, 74, 56, 57, 85, 66, 76, 55, 60, 85, 56, 52, 70, 64, 64, 70, 75, 66, 61, 61, 68, 51, 56, 72, 51, 62, 62, 98, 69, 51, 83, 92, 67, 43, 55, 62, 48, 66, 66, 81, 54, 63, 78, 72, 49, 77, 66, 52, 54, 64, 61, 53, 42, 68, 51, 58, 59, 59, 58, 92, 76, 64, 65, 62, 72, 62, 44, 66, 57, 110, 72, 68, 90, 79, 45, 63, 58, 66, 54, 63, 79, 107, 90, 78, 71, 75, 52, 71, 83, 66, 61, 64, 43, 60, 60, 62, 64, 51, 46, 48, 66, 62, 54, 112, 54, 92, 56, 79, 91, 45, 67, 58, 52, 61, 81, 85, 61, 63, 54, 70, 71, 68, 85, 56, 60, 53, 62, 64, 76, 72, 67, 59, 75, 81, 56, 56, 45, 58, 54, 68, 77, 67, 63, 80, 72, 63, 51, 70, 64, 89, 63, 64, 62, 61, 67, 82, 53, 59, 62, 59, 67, 70, 65, 67, 64, 65, 77, 99, 75, 47, 80, 74, 81, 131, 61, 71, 48, 63, 58, 112, 52, 69, 94, 72, 84, 70, 69, 56, 81, 67, 62, 66, 89, 71, 74, 71, 64, 65, 54, 68, 63, 51, 56, 73, 47, 70, 67, 76, 62, 74, 72, 65, 50, 70, 62, 66, 60, 58, 88, 68, 67, 61, 102, 75, 52, 85, 71, 48, 71, 57, 74, 55, 43, 68, 47, 53, 78, 81, 71, 70, 67, 64, 56, 92, 55, 67, 75, 63, 42, 62, 61, 69, 69, 78, 55, 66, 66, 64, 81, 86, 53, 64, 83, 80, 77, 62, 60, 61, 80, 58, 66, 71, 71, 51, 72, 55, 68, 64, 56, 114, 66, 54, 67, 59, 63, 85, 63, 52, 88, 98, 70, 59, 82, 58, 90, 73, 54, 68, 71, 60, 63, 57, 53, 64, 51, 69, 73, 59, 55, 72, 54, 46, 64, 51, 40, 67, 71, 61, 54, 102, 61, 57, 65, 60, 62, 66, 65, 80, 64, 65, 73, 116, 49, 92, 67, 62, 60, 45, 60, 58, 76, 59, 57, 58, 70, 94, 66, 65, 83, 81, 77, 70, 69, 57, 64, 68, 63, 57, 72, 82, 58, 53, 67, 55, 57, 75, 57, 56, 92, 62, 54, 64, 51, 74, 66, 79, 69, 62, 48, 51, 65, 82, 84, 70, 60, 78, 71, 64, 65, 72, 69, 68, 67, 72, 60, 95, 89, 57, 71, 54, 53, 46, 74, 68, 52, 57, 54, 66, 82, 61, 57, 50, 84, 64, 58, 64, 84, 69, 47, 52, 63, 70, 88, 46, 57, 55, 68, 45, 70, 69, 71, 60, 73, 54, 67, 48, 59, 69, 52, 51, 76, 61, 51, 85, 60, 78, 48, 45, 71, 54, 81, 50, 70, 83, 82, 59, 76, 78, 58, 54, 56, 57, 85, 86, 91, 56, 76, 55, 47, 86, 52, 66, 67, 48, 59, 61, 59, 69, 63, 66, 76, 80, 61, 60, 57, 61, 71, 57, 59, 64, 93, 62, 84, 48, 65, 59, 57, 87, 60, 58, 73, 73, 116, 58, 82, 69, 58, 53, 75, 80, 60, 77, 58, 60, 111, 68, 87, 88, 51, 87, 70, 52, 76, 69, 68, 87, 78, 87, 79, 63, 58, 62, 52, 69, 61, 54, 104, 68, 54, 93, 78, 76, 66, 75, 53, 75, 69, 66, 56, 60, 54, 69, 72, 62, 58, 47, 74, 75, 68, 57, 56, 99, 53, 75, 64, 61, 68, 54, 57, 52, 77, 91, 48, 85, 83, 63, 64, 63, 61, 58, 63, 54, 54, 58, 61, 75, 69, 93, 69, 59, 66, 66, 73, 79, 76, 56, 69, 75, 54, 92, 57, 41, 61, 61, 59, 80, 58, 61, 68, 58, 60, 73, 56, 52, 52, 88, 55, 57, 113, 73, 62, 86, 56, 69, 61, 58, 78, 53, 79, 67, 76, 77, 62, 49, 71, 72, 60, 55, 59, 64, 51, 114, 61, 72, 52, 56, 70, 56, 57, 74, 67, 73, 60, 67, 66, 59, 71, 67, 64, 73, 83, 48, 73, 63, 71, 53, 73, 59, 59, 69, 54, 47, 44, 48, 67, 55, 76, 46, 62, 68, 51, 74, 76, 58, 75, 66, 68, 64, 51, 73, 60, 64, 57, 50, 69, 61, 70, 52, 48, 69, 51, 71, 66, 61, 72, 68, 57, 71, 63, 54, 62, 57, 64, 74, 74, 58, 79, 59, 69, 42, 60, 55, 52, 82, 53, 58, 69, 55, 57, 48, 55, 77, 69, 85, 73, 65, 62, 68, 59, 92, 54, 71, 69, 70, 66, 66, 65, 53, 70, 61, 68, 76, 39, 89, 86, 85, 70, 64, 60, 60, 64, 74, 97, 63, 56, 69, 65, 66, 65, 72, 77, 64, 74, 70, 56, 76, 63, 62, 52, 87, 41, 57, 74, 90, 83, 68, 64, 60, 71, 53, 68, 76, 56, 49, 111, 105, 125, 59, 70, 79, 67, 78, 53, 55, 69, 68, 55, 116, 85, 68, 83, 57, 46, 56, 72, 63, 80, 63, 73, 67, 59, 79, 61, 49, 81, 57, 70, 113, 81, 61, 61, 62, 63, 53, 70, 48, 61, 69, 62, 113, 62, 60, 54, 43, 38, 64, 83, 83, 73, 54, 54, 40, 79, 57, 73, 44, 65, 61, 57, 81, 59, 89, 64, 47, 44, 56, 68, 77, 44, 68, 57, 38, 75, 47, 68, 64, 61, 58, 72, 65, 57, 80, 50, 61, 85, 63, 65, 46, 72, 80, 60, 62, 89, 47, 68, 64, 70, 71, 64, 114, 63, 51, 67, 68, 59, 50, 54, 56, 62, 49, 87, 83, 71, 44, 82, 56, 52, 39, 65, 55, 58, 56, 47, 57, 81, 74, 84, 52, 50, 81, 44, 76, 77, 59, 69, 74, 73, 86, 61, 60, 59, 61, 73, 53, 53, 69, 82, 65, 88, 56, 59, 61, 86, 70, 95, 90, 79, 53, 79, 115, 60, 82, 40, 79, 74, 78, 50, 90, 53, 63, 58, 44, 75, 61, 64, 64, 68, 86, 57, 87, 64, 66, 54, 60, 61, 62, 104, 50, 62, 56, 45, 78, 62, 63, 61, 70, 68, 62, 60, 100, 50, 50, 76, 52, 52, 58, 59, 50, 56, 58, 97, 87, 70, 64, 109, 50, 52, 48, 74, 68, 59, 62, 70, 60, 60, 77, 66, 46, 47, 68, 139, 51, 62, 62, 65, 43, 85, 53, 67, 61, 60, 52, 45, 50, 67, 59, 59, 73, 71, 54, 63, 58, 51, 63, 56, 63, 71, 52, 64, 53, 70, 63, 67, 65, 80, 54, 49, 67, 59, 65, 78, 67, 76, 55, 60, 56, 59, 101, 76, 66, 55, 67, 63, 50, 70, 75, 65, 71, 62, 55, 63, 74, 73, 73, 86, 66, 69, 74, 79, 95, 73, 59, 57, 52, 73, 48, 87, 50, 66, 68, 65, 48, 98, 82, 65, 70, 68, 60, 58, 72, 56, 76, 60, 65, 59, 56, 63, 71, 66, 56, 75, 48, 50, 71, 56, 67, 57, 76, 70, 71, 59, 65, 109, 65, 76, 54, 109, 54, 57, 63, 65, 61, 103, 59, 58, 47, 67, 63, 62, 72, 48, 66, 52, 41, 48, 68, 70, 59, 71, 70, 65, 56, 92, 80, 71, 64, 63, 80, 67, 64, 63, 65, 60, 78, 52, 71, 65, 65, 63, 79, 71, 91, 41, 78, 57, 67, 60, 78, 74, 51, 45, 59, 67, 59, 89, 53, 74, 53, 71, 64, 70, 62, 58, 59, 54, 59, 62, 62, 60, 60, 51, 53, 70, 49, 91, 72, 64, 68, 64, 54, 64, 68, 68, 113, 46, 60, 66, 53, 69, 44, 71, 88, 118, 76, 63, 76, 57, 62, 63, 43, 105, 75, 54, 31, 65, 55, 68, 49, 60, 59, 64, 91, 78, 72, 66, 62, 86, 66, 91, 51, 95, 81, 65, 54, 75, 77, 63, 66, 66, 49, 62, 85, 59, 52, 70, 58, 76, 57, 71, 66, 65, 61, 78, 75, 54, 69, 62, 45, 54, 66, 58, 79, 71, 58, 52, 62, 59, 80, 61, 64, 53, 81, 67, 58, 54, 64, 60, 56, 92, 60, 68, 57, 64, 74, 59, 79, 59, 124, 54, 75, 74, 54, 61, 60, 40, 66, 72, 77, 84, 50, 73, 60, 75, 55, 69, 79, 76, 49, 69, 66, 78, 62, 59, 96, 83, 65, 73, 62, 73, 44, 59, 67, 49, 45, 47, 80, 70, 41, 60, 72, 68, 47, 55, 82, 56, 40, 63, 62, 53, 65, 55, 75, 57, 56, 63, 72, 72, 63, 72, 49, 93, 83, 61, 68, 57, 55, 49, 76, 65, 96, 47, 70, 70, 68, 81, 56, 52, 75, 58, 67, 82, 66, 62, 65, 59, 62, 82, 50, 63, 76, 69, 62, 90, 68, 64, 86, 60, 92, 88, 70, 50, 78, 57, 61, 67, 86, 85, 53, 62, 54, 141, 81, 77, 74, 70, 62, 74, 61, 58, 59, 50, 61, 75, 80, 89, 66, 79, 58, 66, 89, 68, 60, 49, 45, 69, 56, 62, 56, 61, 118, 75, 63, 61, 55, 72, 95, 59, 53, 62, 60, 64, 62, 68, 71, 72, 82, 65, 77, 52, 63, 53, 100, 52, 67, 68, 67, 53, 65, 54, 57, 70, 114, 63, 59, 61, 51, 77, 65, 66, 54, 82, 66, 90, 61, 43, 59, 60, 63, 62, 67, 76, 57, 59, 82, 178, 46, 75, 49, 71, 54, 48, 57, 68, 65, 82, 66, 46, 94, 51, 79, 58, 87, 67, 64, 73, 59, 76, 78, 72, 97, 58, 85, 42, 73, 85, 75, 75, 80, 87, 56, 88, 53, 86, 77, 89, 73, 69, 49, 70, 87, 53, 82, 64, 51, 63, 75, 55, 64, 66, 53, 45, 73, 55, 68, 58, 61, 64, 139, 71, 63, 62, 64, 70, 49, 63, 71, 91, 49, 69, 76, 62, 59, 60, 79, 47, 79, 73, 58, 70, 73, 69, 65, 65, 73, 62, 72, 63, 68, 69, 72, 64, 53, 70, 66, 48, 60, 70, 44, 70, 70, 55, 46, 58, 99, 60, 65, 80, 64, 50, 50, 75, 91, 71, 63, 64, 75, 58, 63, 61, 60, 61, 77, 64, 57, 79, 86, 61, 66, 70, 62, 66, 55, 68, 98, 55, 83, 67, 53, 59, 69, 56, 62, 56, 62, 71, 38, 68, 62, 120, 40, 84, 58, 62, 78, 75, 70, 63, 55, 56, 54, 67, 47, 64, 82, 69, 56, 53, 95, 52, 68, 40, 54, 59, 68, 56, 88, 49, 55, 89, 44, 57, 78, 66, 56, 82, 59, 62, 47, 50, 59, 77, 63, 56, 75, 71, 70, 54, 72, 55, 66, 67, 67, 66, 71, 54, 67, 76, 58, 43, 93, 72, 61, 73, 75, 65, 63, 62, 65, 76, 56, 83, 76, 63, 51, 58, 47, 70, 71, 67, 47, 60, 49, 66, 77, 55, 64, 58, 67, 46, 63, 81, 63, 65, 39, 54, 66, 56, 97, 82, 102, 68, 62, 70, 62, 65, 75, 59, 54, 69, 103, 38, 54, 76, 97, 63, 70, 64, 63, 58, 80, 69, 45, 75, 61, 53, 65, 63, 54, 65, 60, 74, 72, 72, 62, 66, 72, 56, 73, 120, 119, 90, 81, 88, 56, 91, 73, 79, 87, 81, 62, 59, 73, 72, 38, 82, 53, 82, 44, 50, 56, 77, 69, 92, 70, 65, 57, 60, 72, 62, 68, 58, 68, 48, 80, 59, 56, 41, 62, 74, 68, 51, 34, 63, 114, 53, 54, 54, 62, 88, 72, 56, 73, 95, 89, 75, 53, 79, 59, 58, 82, 64, 57, 63, 59, 44, 75, 72, 64, 63, 71, 73, 51, 61, 63, 38, 65, 71, 61, 57, 47, 64, 64, 49, 62, 76, 86, 72, 84, 63, 78, 65, 60, 58, 67, 46, 63, 100, 71, 90, 66, 51, 72, 58, 66, 50, 54, 65, 89, 73, 45, 36, 74, 68, 61, 56, 95, 70, 47, 70, 63, 50, 63, 62, 65, 62, 65, 63, 56, 81, 65, 59, 41, 71, 69, 62, 77, 83, 62, 55, 63, 76, 60, 48, 69, 53, 54, 60, 92, 56, 62, 79, 65, 57, 61, 67, 53, 66, 84, 55, 85, 55, 76, 61, 92, 61, 66, 58, 56, 75, 68, 64, 71, 59, 71, 53, 68, 72, 56, 71, 65, 78, 71, 67, 82, 78, 47, 47, 53, 68, 74, 96, 73, 56, 58, 66, 68, 75, 70, 64, 87, 65, 49, 69, 72, 60, 57, 75, 48, 77, 75, 71, 61, 83, 58, 83, 74, 55, 58, 71, 62, 52, 63, 59, 53, 75, 51, 59, 51, 59, 62, 57, 71, 48, 126, 132, 60, 64, 64, 66, 63, 61, 61, 60, 52, 67, 70, 56, 51, 73, 77, 76, 64, 70, 92, 68, 71, 48, 59, 49, 59, 81, 61, 55, 65, 68, 58, 56, 64, 61, 74, 68, 70, 57, 48, 70, 65, 58, 71, 62, 65, 91, 76, 69, 66, 78, 58, 52, 66, 45, 72, 55, 76, 50, 45, 48, 63, 91, 92, 100, 61, 57, 75, 61, 69, 44, 64, 74, 71, 61, 59, 79, 76, 65, 70, 78, 73, 80, 58, 63, 81, 77, 67, 60, 60, 45, 91, 62, 86, 63, 57, 45, 76, 59, 64, 49, 70, 60, 60, 57, 62, 63, 60, 55, 63, 69, 64, 117, 71, 60, 56, 77, 39, 67, 47, 54, 58, 78, 64, 51, 61, 64, 58, 82, 83, 60, 64, 65, 81, 53, 67, 76, 43, 85, 54, 52, 74, 56, 88, 80, 66, 53, 71, 64, 82, 59, 56, 62, 72, 75, 61, 57, 61, 49, 51, 90, 49, 81, 55, 67, 108, 62, 69, 57, 57, 57, 46, 55, 72, 47, 59, 69, 54, 86, 66, 54, 71, 47, 76, 52, 54, 101, 55, 68, 62, 61, 76, 58, 77, 64, 48, 70, 54, 39, 65, 54, 76, 65, 105, 69, 56, 70, 55, 74, 44, 78, 69, 113, 84, 76, 66, 74, 74, 56, 70, 79, 95, 74, 47, 52, 29, 60, 67, 55, 59, 84, 58, 90, 73, 60, 74, 97, 63, 60, 69, 73, 81, 79, 68, 56, 63, 50, 73, 72, 46, 72, 81, 55, 71, 76, 63, 85, 68, 67, 68, 54, 75, 92, 66, 56, 62, 78, 70, 71, 43, 53, 52, 67, 77, 79, 65, 73, 69, 113, 65, 59, 74, 54, 43, 62, 53, 57, 77, 63, 53, 65, 63, 73, 63, 67, 57, 64, 85, 71, 76, 41, 64, 51, 66, 66, 62, 55, 59, 71, 60, 78, 58, 69, 61, 54, 67, 70, 57, 68, 66, 54, 70, 50, 70, 60, 67, 89, 45, 79, 61, 46, 72, 67, 54, 69, 57, 67, 77, 82, 54, 57, 76, 76, 61, 79, 67, 57, 47, 46, 71, 74, 76, 46, 45, 62, 71, 53, 75, 71, 59, 57, 66, 72, 44, 67, 57, 70, 74, 64, 87, 61, 49, 65, 56, 62, 74, 53, 63, 71, 63, 59, 69, 72, 61, 55, 53, 56, 57, 59, 81, 80, 87, 38, 67, 69, 57, 50, 88, 79, 49, 50, 77, 63, 58, 67, 92, 48, 69, 56, 76, 91, 57, 67, 51, 76, 77, 49, 77, 66, 60, 70, 73, 55, 50, 106, 46, 57, 66, 47, 94, 54, 51, 71, 51, 61, 57, 55, 67, 70, 134, 75, 60, 53, 54, 53, 75, 76, 53, 54, 64, 71, 47, 66, 103, 59, 41, 53, 59, 58, 68, 63, 77, 58, 69, 55, 48, 57, 55, 69, 64, 55, 71, 54, 52, 64, 67, 82, 54, 73, 54, 63, 52, 66, 69, 65, 61, 51, 56, 57, 54, 63, 48, 72, 75, 154, 64, 71, 58, 68, 59, 68, 68, 49, 65, 57, 65, 73, 87, 57, 40, 55, 87, 50, 46, 58, 61, 94, 63, 66, 40, 48, 68, 57, 60, 87, 74, 89, 64, 51, 66, 66, 69, 74, 85, 64, 68, 69, 73, 52, 82, 66, 63, 78, 81, 56, 70, 68, 62, 51, 84, 55, 47, 54, 61, 78, 52, 54, 125, 53, 70, 67, 71, 71, 61, 50, 56, 91, 50, 57, 79, 81, 48, 75, 73, 53, 69, 72, 69, 66, 76, 76, 57, 39, 53, 47, 53, 58, 53, 59, 37, 61, 69, 63, 96, 62, 53, 72, 63, 75, 57, 31, 71, 63, 71, 52, 85, 50, 64, 61, 63, 71, 63, 73, 61, 59, 73, 73, 70, 60, 60, 54, 46, 63, 68, 52, 70, 48, 58, 67, 53, 70, 59, 78, 68, 67, 84, 73, 113, 54, 70, 91, 68, 72, 59, 54, 84, 50, 77, 117, 88, 76, 70, 55, 91, 73, 52, 56, 70, 88, 55, 63, 54, 65, 72, 81, 70, 58, 127, 69, 49, 59, 67, 54, 60, 73, 103, 91, 58, 76, 55, 42, 56, 50, 51, 63, 66, 90, 76, 44, 72, 55, 59, 57, 79, 80, 67, 58, 92, 62, 74, 60, 54, 71, 51, 64, 57, 49, 60, 69, 64, 87, 72, 68, 69, 59, 69, 72, 84, 68, 55, 51, 72, 48, 68, 55, 58, 109, 54, 67, 60, 61, 64, 67, 61, 66, 56, 50, 58, 72, 50, 84, 47, 56, 45, 65, 68, 51, 99, 61, 64, 57, 68, 46, 60, 77, 66, 61, 47, 58, 49, 53, 52, 63, 73, 51, 62, 69, 73, 62, 88, 71, 60, 63, 92, 67, 75, 93, 60, 56, 39, 77, 56, 77, 61, 67, 73, 56, 102, 72, 61, 53, 61, 59, 76, 64, 67, 76, 62, 61, 75, 42, 60, 55, 68, 44, 70, 80, 54, 59, 43, 97, 61, 60, 53, 75, 69, 67, 62, 57, 72, 53, 44, 56, 62, 48, 55, 64, 70, 61, 43, 74, 63, 59, 127, 55, 61, 45, 64, 44, 61, 70, 61, 74, 55, 96, 66, 61, 65, 54, 46, 57, 61, 63, 51, 51, 60, 55, 60, 54, 59, 53, 49, 55, 69, 45, 60, 67, 58, 82, 90, 49, 55, 63, 49, 69, 62, 57, 67, 78, 66, 59, 57, 55, 87, 60, 65, 47, 57, 69, 62, 65, 59, 66, 53, 54, 63, 54, 75, 45, 52, 69, 88, 71, 72, 51, 102, 58, 66, 73, 71, 51, 64, 87, 60, 72, 89, 55, 67, 51, 73, 62, 48, 67, 63, 62, 114, 61, 69, 65, 74, 56, 90, 60, 62, 65, 55, 64, 75, 49, 81, 67, 49, 54, 66, 75, 72, 54, 70, 65, 73, 61, 55, 83, 109, 61, 65, 28, 99, 58, 71, 67, 85, 74, 65, 52, 66, 63, 46, 79, 76, 68, 57, 72, 88, 62, 54, 48, 58, 54, 69, 99, 76, 73, 57, 64, 61, 111, 56, 51, 87, 58, 58, 57, 79, 56, 68, 63, 58, 74, 83, 53, 70, 88, 61, 83, 79, 56, 83, 50, 66, 84, 59, 66, 75, 55, 54, 90, 71, 58, 54, 74, 68, 70, 124, 100, 49, 60, 75, 72, 73, 65, 57, 61, 73, 78, 56, 54, 68, 60, 65, 57, 77, 56, 74, 58, 60, 54, 62, 79, 76, 68, 81, 74, 65, 67, 84, 53, 68, 58, 43, 56, 94, 68, 76, 90, 78, 72, 75, 73, 79, 78, 69, 62, 65, 68, 55, 73, 72, 60, 56, 74, 57, 65, 64, 63, 65, 87, 48, 79, 71, 67, 60, 71, 65, 50, 51, 71, 69, 70, 58, 32, 76, 66, 77, 74, 65, 84, 66, 61, 63, 83, 67, 59, 67, 50, 68, 77, 61, 94, 68, 49, 67, 97, 50, 49, 70, 65, 61, 49, 55, 80, 62, 66, 57, 53, 69, 131, 64, 84, 55, 60, 83, 55, 78, 58, 48, 51, 55, 63, 75, 54, 68, 55, 50, 62, 63, 67, 64, 65, 50, 57, 72, 82, 92, 74, 64, 60, 88, 55, 57, 81, 59, 61, 59, 39, 80, 67, 69, 62, 83, 76, 66, 40, 47, 68, 55, 85, 78, 92, 88, 55, 79, 68, 49, 60, 56, 64, 67, 75, 68, 95, 53, 121, 80, 58, 56, 65, 70, 59, 38, 102, 56, 57, 65, 57, 69, 69, 68, 68, 59, 46, 48, 63, 66, 54, 84, 67, 101, 70, 53, 60, 57, 63, 52, 71, 67, 103, 62, 59, 70, 50, 59, 73, 71, 66, 69, 62, 86, 61, 62, 56, 76, 57, 62, 67, 63, 47, 85, 62, 60, 82, 45, 76, 60, 70, 61, 77, 64, 81, 54, 67, 71, 76, 69, 39, 73, 70, 93, 54, 71, 70, 83, 71, 53, 54, 105, 94, 103, 57, 51, 102, 52, 79, 56, 52, 77, 59, 50, 64, 44, 56, 61, 77, 55, 59, 59, 64, 55, 118, 70, 53, 63, 62, 59, 70, 53, 99, 62, 87, 55, 58, 90, 56, 66, 72, 55, 68, 76, 57, 65, 58, 60, 57, 44, 87, 58, 67, 57, 60, 54, 58, 45, 77, 54, 53, 57, 66, 63, 63, 81, 46, 52, 60, 42, 56, 59, 58, 53, 51, 74, 54, 59, 61, 66, 70, 87, 47, 61, 55, 57, 59, 60, 68, 65, 51, 74, 71, 61, 54, 61, 49, 55, 61, 78, 57, 59, 63, 69, 60, 70, 69, 58, 61, 97, 59, 61, 50, 73, 94, 48, 90, 71, 82, 60, 57, 55, 61, 76, 67, 55, 64, 88, 61, 69, 69, 74, 70, 63, 62, 55, 49, 84, 84, 50, 48, 47, 46, 51, 76, 83, 64, 45, 53, 45, 94, 56, 64, 78, 82, 95, 42, 97, 56, 50, 68, 56, 61, 67, 86, 66, 62, 56, 58, 101, 70, 63, 58, 43, 63, 54, 56, 59, 50, 135, 53, 67, 76, 42, 58, 56, 66, 49, 69, 61, 93, 77, 67, 73, 80, 72, 55, 63, 71, 57, 64, 40, 71, 53, 63, 76, 66, 74, 79, 68, 53, 64, 76, 73, 58, 59, 95, 50, 76, 65, 65, 55, 60, 57, 56, 70, 62, 64, 68, 54, 66, 83, 64, 71, 116, 85, 52, 60, 57, 57, 55, 127, 60, 68, 51, 67, 86, 97, 56, 70, 48, 104, 61, 57, 57, 81, 103, 69, 74, 57, 58, 49, 65, 64, 66, 61, 82, 76, 64, 64, 80, 53, 56, 64, 51, 55, 65, 85, 63, 78, 52, 72, 53, 72, 93, 63, 80, 61, 79, 71, 65, 60, 70, 68, 52, 71, 64, 112, 52, 53, 51, 52, 71, 61, 54, 52, 99, 62, 69, 72, 64, 56, 69, 88, 55, 62, 71, 63, 64, 89, 52, 74, 51, 35, 63, 63, 59, 72, 52, 63, 53, 61, 68, 84, 60, 52, 63, 53, 69, 73, 80, 83, 74, 76, 56, 58, 64, 68, 43, 62, 79, 85, 55, 59, 58, 78, 40, 60, 52, 60, 69, 65, 70, 50, 60, 62, 60, 75, 56, 91, 59, 61, 76, 76, 67, 61, 83, 85, 61, 63, 60, 79, 98, 70, 80, 78, 60, 68, 89, 64, 64, 63, 63, 52, 62, 65, 78, 54, 66, 78, 51, 61, 61, 63, 77, 67, 67, 50, 61, 80, 51, 74, 61, 46, 41, 86, 60, 77, 64, 72, 53, 65, 60, 64, 78, 56, 56, 52, 67, 58, 64, 97, 53, 44, 54, 63, 73, 53, 73, 56, 48, 57, 66, 63, 61, 76, 50, 73, 56, 63, 87, 67, 69, 64, 76, 83, 59, 60, 68, 64, 59, 88, 65, 49, 64, 57, 48, 69, 69, 67, 79, 47, 54, 62, 62, 53, 79, 77, 49, 71, 91, 66, 62, 72, 74, 66, 62, 84, 89, 90, 55, 55, 68, 65, 81, 65, 81, 66, 66, 60, 63, 59, 55, 62, 60, 70, 67, 46, 60, 114, 37, 68, 69, 55, 40, 60, 75, 74, 64, 51, 46, 65, 65, 56, 68, 58, 51, 74, 101, 112, 119, 73, 74, 71, 55, 57, 63, 58, 57, 62, 56, 91, 67, 51, 125, 52, 48, 58, 80, 76, 96, 57, 92, 116, 59, 63, 54, 57, 67, 59, 65, 90, 72, 85, 59, 66, 52, 58, 56, 61, 58, 67, 63, 66, 68, 63, 66, 59, 39, 69, 61, 46, 97, 56, 55, 49, 64, 33, 72, 50, 55, 81, 62, 81, 60, 84, 81, 53, 60, 42, 44, 67, 54, 54, 47, 36, 70, 69, 61, 72, 30, 51, 54, 72, 56, 80, 58, 77, 83, 62, 60, 58, 72, 51, 60, 56, 80, 67, 87, 48, 61, 85, 63, 84, 60, 52, 67, 68, 46, 55, 50, 65, 74, 53, 77, 81, 91, 58, 73, 57, 64, 63, 84, 73, 60, 69, 64, 68, 66, 62, 72, 63, 71, 78, 90, 84, 81, 69, 64, 93, 76, 70, 73, 59, 67, 69, 47, 53, 53, 103, 92, 63, 40, 62, 60, 67, 77, 71, 68, 93, 59, 50, 62, 82, 37, 61, 69, 58, 64, 60, 68, 79, 52, 70, 79, 65, 65, 59, 66, 59, 56, 49, 67, 60, 68, 100, 55, 51, 64, 85, 127, 54, 57, 64, 52, 52, 62, 50, 66, 71, 59, 67, 69, 69, 67, 50, 64, 68, 63, 79, 71, 78, 74, 50, 85, 69, 67, 44, 79, 48, 44, 57, 69, 62, 59, 65, 63, 62, 65, 55, 69, 64, 68, 58, 150, 61, 67, 68, 70, 59, 119, 63, 68, 62, 69, 58, 55, 51, 58, 65, 65, 69, 65, 70, 55, 70, 80, 46, 60, 72, 54, 55, 82, 60, 73, 54, 101, 74, 65, 58, 54, 53, 54, 60, 77, 61, 56, 45, 67, 55, 57, 63, 73, 98, 70, 72, 74, 49, 70, 93, 73, 64, 67, 55, 61, 55, 100, 61, 48, 74, 74, 56, 73, 76, 49, 41, 76, 76, 54, 51, 62, 73, 72, 46, 55, 80, 90, 55, 65, 58, 64, 79, 43, 61, 66, 74, 69, 66, 70, 66, 63, 79, 68, 53, 53, 55, 72, 65, 59, 73, 53, 66, 83, 90, 49, 83, 102, 57, 72, 75, 64, 73, 76, 51, 74, 70, 98, 63, 57, 53, 64, 76, 61, 53, 58, 70, 79, 74, 63, 46, 72, 49, 59, 47, 82, 62, 89, 67, 68, 66, 62, 75, 64, 66, 71, 59, 52, 65, 53, 60, 49, 64, 65, 69, 58, 71, 56, 44, 52, 105, 60, 70, 63, 60, 55, 62, 77, 63, 98, 67, 63, 59, 73, 76, 53, 75, 38, 67, 47, 56, 78, 48, 55, 61, 50, 81, 32, 53, 54, 56, 66, 69, 66, 55, 62, 71, 67, 89, 45, 63, 82, 64, 69, 67, 51, 64, 93, 63, 60, 58, 49, 74, 67, 117, 68, 74, 55, 50, 68, 73, 57, 54, 52, 59, 67, 61, 68, 71, 68, 72, 66, 65, 77, 61, 88, 77, 54, 76, 72, 69, 73, 67, 71, 46, 82, 95, 59, 64, 61, 62, 68, 73, 71, 45, 50, 82, 86, 71, 56, 81, 56, 78, 61, 62, 59, 91, 59, 66, 76, 60, 54, 61, 64, 78, 63, 75, 52, 68, 54, 60, 57, 64, 65, 61, 73, 67, 75, 69, 60, 70, 60, 85, 57, 85, 86, 58, 62, 78, 70, 121, 41, 59, 74, 66, 62, 85, 48, 45, 72, 67, 67, 71, 65, 54, 47, 58, 49, 82, 87, 40, 89, 77, 73, 82, 76, 62, 86, 67, 58, 51, 53, 41, 67, 54, 51, 72, 51, 77, 80, 62, 61, 78, 49, 70, 48, 63, 52, 74, 64, 62, 58, 56, 62, 55, 64, 62, 81, 80, 56, 59, 48, 78, 57, 72, 61, 61, 66, 49, 76, 60, 53, 68, 58, 65, 58, 67, 73, 48, 53, 45, 55, 75, 75, 61, 46, 75, 58, 55, 62, 76, 67, 82, 65, 71, 57, 78, 52, 71, 56, 85, 80, 97, 61, 53, 98, 72, 85, 51, 83, 72, 69, 57, 55, 66, 69, 68, 59, 73, 106, 69, 66, 60, 78, 88, 69, 48, 57, 54, 50, 62, 64, 55, 64, 125, 66, 60, 46, 73, 58, 91, 74, 76, 85, 52, 52, 83, 53, 74, 70, 65, 69, 63, 55, 69, 54, 112, 78, 75, 67, 57, 63, 55, 63, 75, 49, 108, 74, 50, 59, 51, 79, 56, 76, 74, 65, 62, 102, 57, 52, 60, 58, 56, 73, 69, 46, 53, 55, 74, 173, 59, 71, 68, 67, 47, 66, 76, 66, 60, 70, 56, 53, 93, 50, 76, 61, 82, 72, 48, 77, 54, 88, 51, 59, 87, 45, 57, 50, 62, 97, 69, 64, 72, 81, 63, 59, 60, 58, 63, 76, 60, 56, 57, 57, 82, 56, 75, 61, 64, 75, 62, 55, 53, 67, 76, 50, 64, 68, 77, 57, 55, 47, 121, 61, 64, 61, 71, 70, 58, 69, 53, 94, 52, 71, 67, 75, 60, 77, 54, 71, 61, 63, 53, 61, 53, 70, 77, 74, 84, 62, 88, 72, 61, 58, 74, 50, 57, 79, 69, 56, 58, 43, 62, 62, 88, 65, 66, 76, 88, 77, 100, 96, 49, 72, 74, 59, 98, 86, 49, 60, 70, 74, 62, 48, 56, 69, 71, 59, 71, 64, 63, 57, 75, 56, 66, 63, 49, 90, 62, 58, 60, 61, 96, 65, 73, 53, 76, 51, 80, 78, 65, 57, 57, 105, 60, 56, 66, 74, 69, 74, 63, 55, 84, 62, 62, 62, 61, 46, 71, 64, 60, 65, 90, 78, 51, 80, 53, 56, 52, 87, 62, 59, 57, 69, 64, 69, 65, 51, 64, 71, 61, 66, 32, 78, 73, 69, 59, 58, 66, 41, 69, 80, 56, 50, 53, 74, 60, 62, 72, 41, 74, 64, 70, 58, 75, 60, 70, 93, 55, 47, 67, 87, 76, 64, 68, 68, 77, 58, 69, 63, 75, 56, 73, 59, 55, 70, 51, 49, 60, 107, 77, 70, 52, 47, 78, 64, 63, 62, 65, 48, 58, 68, 60, 53, 86, 67, 54, 61, 49, 60, 75, 73, 42, 50, 57, 44, 72, 52, 77, 70, 65, 74, 84, 49, 98, 89, 66, 60, 66, 57, 62, 136, 48, 54, 63, 60, 57, 69, 56, 75, 58, 98, 52, 71, 81, 100, 69, 70, 42, 75, 68, 121, 61, 59, 61, 71, 122, 88, 87, 116, 63, 75, 75, 66, 62, 73, 69, 69, 54, 61, 50, 59, 45, 71, 77, 47, 59, 49, 73, 60, 56, 76, 40, 77, 70, 50, 48, 68, 78, 69, 55, 77, 65, 65, 61, 56, 52, 80, 51, 68, 55, 96, 78, 69, 60, 71, 59, 75, 71, 47, 88, 86, 56, 57, 66, 72, 94, 52, 68, 47, 78, 48, 62, 67, 52, 100, 72, 70, 60, 74, 74, 77, 75, 46, 52, 89, 67, 47, 62, 48, 58, 53, 59, 69, 54, 64, 92, 61, 74, 67, 44, 72, 67, 60, 59, 64, 67, 54, 51, 67, 81, 67, 39, 69, 69, 66, 61, 77, 63, 58, 58, 73, 61, 65, 68, 83, 75, 51, 68, 72, 74, 64, 85, 61, 68, 90, 64, 71, 71, 58, 62, 76, 69, 52, 54, 83, 38, 65, 63, 80, 66, 52, 81, 93, 103, 74, 59, 77, 71, 74, 67, 110, 86, 57, 51, 56, 80, 60, 51, 65, 58, 73, 58, 50, 63, 61, 60, 74, 77, 51, 45, 54, 64, 60, 77, 96, 114, 89, 66, 80, 60, 65, 77, 63, 58, 75, 58, 53, 81, 26, 83, 100, 56, 61, 81, 57, 62, 74, 92, 70, 64, 61, 80, 58, 57, 57, 56, 58, 73, 67, 78, 62, 67, 84, 68, 56, 66, 68, 50, 121, 55, 78, 59, 84, 56, 63, 46, 69, 57, 79, 60, 49, 70, 61, 67, 76, 74, 64, 62, 68, 43, 43, 60, 78, 59, 92, 60, 59, 50, 59, 43, 63, 54, 62, 90, 66, 58, 58, 56, 78, 69, 65, 54, 75, 43, 53, 70, 63, 113, 67, 64, 68, 57, 66, 74, 56, 74, 58, 70, 71, 44, 80, 51, 54, 65, 76, 48, 118, 83, 66, 60, 64, 56, 74, 66, 76, 67, 42, 49, 68, 88, 108, 58, 86, 61, 88, 70, 71, 78, 83, 75, 102, 87, 56, 76, 51, 88, 45, 67, 62, 59, 70, 69, 76, 59, 71, 79, 54, 74, 63, 78, 86, 66, 119, 52, 66, 58, 63, 50, 51, 51, 78, 46, 48, 74, 71, 58, 63, 65, 45, 81, 69, 68, 57, 53, 71, 38, 57, 63, 74, 82, 104, 65, 75, 71, 103, 54, 64, 73, 60, 76, 55, 65, 83, 63, 72, 57, 68, 59, 45, 65, 83, 42, 77, 47, 57, 64, 64, 54, 39, 50, 68, 81, 75, 66, 54, 62, 78, 67, 55, 51, 59, 63, 58, 90, 63, 59, 73, 43, 51, 53, 45, 72, 62, 64, 62, 58, 49, 57, 58, 63, 64, 106, 39, 43, 71, 69, 46, 61, 69, 57, 80, 70, 51, 55, 69, 52, 77, 52, 53, 62, 71, 55, 58, 75, 53, 58, 62, 54, 68, 79, 66, 61, 60, 77, 83, 51, 59, 62, 75, 78, 63, 67, 66, 64, 63, 43, 70, 59, 62, 64, 94, 68, 141, 59, 76, 108, 55, 70, 51, 47, 41, 80, 71, 73, 68, 69, 53, 75, 47, 55, 52, 59, 67, 54, 76, 56, 107, 69, 71, 87, 67, 59, 93, 64, 61, 58, 81, 67, 63, 47, 70, 62, 53, 43, 74, 78, 57, 84, 67, 71, 70, 90, 47, 71, 58, 64, 68, 71, 63, 62, 80, 65, 65, 60, 67, 65, 61, 81, 59, 56, 63, 55, 60, 70, 66, 68, 48, 59, 61, 78, 85, 81, 48, 67, 54, 51, 84, 66, 70, 59, 74, 68, 69, 50, 52, 64, 65, 56, 47, 74, 76, 63, 55, 69, 53, 60, 124, 31, 70, 60, 78, 96, 56, 50, 71, 70, 50, 77, 71, 53, 77, 57, 64, 55, 70, 86, 61, 73, 61, 66, 47, 54, 49, 46, 55, 51, 60, 67, 64, 109, 59, 68, 55, 51, 74, 67, 50, 58, 61, 54, 47, 79, 38, 66, 58, 86, 54, 46, 72, 57, 51, 60, 75, 84, 85, 72, 80, 54, 71, 43, 77, 56, 87, 63, 75, 76, 54, 53, 73, 47, 62, 63, 77, 62, 50, 47, 60, 102, 74, 58, 74, 53, 63, 95, 59, 60, 71, 54, 73, 52, 47, 54, 65, 53, 53, 76, 63, 57, 60, 98, 60, 85, 70, 50, 83, 60, 52, 90, 68, 126, 65, 64, 58, 78, 66, 83, 59, 74, 78, 54, 100, 73, 69, 60, 63, 85, 56, 72, 50, 52, 59, 86, 170, 64, 63, 67, 71, 65, 43, 61, 64, 71, 52, 68, 42, 71, 76, 67, 98, 94, 53, 67, 55, 55, 84, 81, 68, 62, 74, 56, 65, 90, 54, 66, 54, 71, 72, 68, 39, 72, 60, 70, 83, 85, 62, 61, 53, 64, 71, 88, 64, 64, 66, 88, 75, 63, 59, 44, 89, 74, 49, 57, 57, 77, 93, 66, 57, 61, 73, 71, 59, 79, 61, 57, 85, 71, 98, 75, 108, 69, 49, 45, 66, 71, 68, 69, 60, 53, 92, 47, 68, 55, 56, 68, 57, 53, 59, 82, 50, 70, 61, 74, 55, 71, 58, 101, 57, 54, 57, 61, 46, 120, 85, 75, 95, 65, 71, 57, 47, 77, 75, 84, 69, 92, 45, 59, 54, 50, 59, 51, 54, 62, 74, 62, 58, 66, 66, 64, 58, 50, 42, 66, 53, 68, 62, 70, 59, 48, 65, 56, 79, 61, 46, 63, 61, 73, 66, 75, 64, 53, 45, 60, 54, 62, 61, 52, 37, 56, 69, 67, 69, 67, 58, 49, 56, 99, 74, 62, 53, 58, 59, 72, 62, 80, 83, 60, 66, 66, 63, 55, 57, 62, 53, 58, 66, 58, 74, 63, 74, 66, 49, 72, 59, 80, 64, 58, 56, 80, 69, 44, 58, 72, 65, 58, 64, 68, 59, 62, 59, 77, 60, 70, 69, 81, 68, 86, 65, 56, 72, 98, 49, 73, 69, 58, 66, 63, 54, 64, 57, 46, 67, 60, 84, 72, 56, 71, 46, 50, 72, 63, 70, 73, 60, 61, 67, 55, 60, 86, 60, 62, 55, 65, 58, 80, 73, 60, 56, 63, 62, 63, 66, 59, 58, 75, 46, 80, 48, 118, 66, 52, 59, 59, 56, 66, 137, 61, 80, 52, 55, 61, 54, 58, 84, 56, 65, 50, 86, 93, 76, 80, 86, 64, 48, 62, 109, 59, 54, 55, 53, 76, 85, 80, 104, 60, 59, 75, 50, 62, 82, 74, 85, 78, 65, 59, 71, 49, 63, 66, 75, 67, 71, 73, 49, 61, 55, 42, 60, 77, 53, 74, 64, 74, 51, 52, 62, 56, 80, 65, 71, 57, 84, 49, 77, 73, 62, 72, 58, 74, 68, 75, 74, 52, 71, 64, 57, 67, 60, 58, 80, 103, 57, 63, 59, 47, 65, 54, 69, 54, 74, 64, 45, 85, 65, 76, 62, 53, 63, 73, 74, 61, 66, 59, 73, 65, 81, 89, 68, 61, 70, 69, 52, 57, 74, 48, 85, 67, 58, 59, 81, 77, 41, 59, 64, 106, 87, 60, 71, 63, 65, 57, 75, 63, 62, 30, 67, 75, 70, 51, 61, 65, 50, 59, 50, 78, 66, 86, 65, 85, 58, 76, 91, 87, 58, 70, 58, 87, 59, 69, 85, 44, 69, 69, 59, 51, 57, 72, 71, 103, 82, 62, 71, 69, 59, 60, 96, 71, 74, 93, 44, 71, 54, 69, 68, 71, 66, 67, 45, 67, 75, 70, 73, 57, 60, 61, 49, 69, 44, 85, 140, 107, 119, 53, 74, 75, 49, 70, 52, 55, 81, 66, 71, 74, 65, 67, 66, 51, 56, 58, 64, 82, 87, 108, 59, 58, 55, 71, 49, 61, 53, 65, 61, 71, 69, 62, 58, 51, 76, 73, 59, 63, 62, 87, 114, 71, 93, 59, 45, 76, 100, 58, 65, 56, 79, 72, 39, 69, 53, 62, 83, 93, 55, 65, 68, 54, 70, 66, 64, 67, 72, 71, 48, 61, 50, 58, 73, 47, 72, 71, 87, 67, 57, 55, 71, 50, 60, 55, 74, 61, 54, 61, 60, 112, 55, 65, 58, 62, 78, 81, 81, 57, 76, 71, 67, 64, 67, 67, 49, 65, 61, 66, 70, 106, 56, 52, 62, 65, 72, 77, 67, 140, 60, 60, 78, 75, 111, 50, 62, 73, 65, 75, 62, 54, 58, 85, 57, 78, 65, 71, 79, 106, 80, 59, 66, 64, 68, 63, 70, 46, 72, 74, 83, 78, 65, 65, 58, 63, 110, 62, 62, 53, 80, 46, 75, 75, 108, 74, 58, 60, 62, 55, 66, 55, 51, 64, 50, 65, 83, 55, 63, 52, 61, 55, 57, 58, 74, 50, 67, 60, 86, 63, 57, 68, 67, 53, 66, 107, 80, 87, 67, 61, 69, 62, 44, 81, 59, 70, 72, 73, 70, 54, 50, 48, 61, 59, 53, 73, 59, 68, 55, 69, 68, 64, 75, 60, 51, 62, 58, 61, 61, 78, 73, 62, 54, 80, 56, 69, 60, 56, 48, 53, 77, 62, 56, 70, 70, 114, 54, 58, 66, 50, 66, 65, 64, 71, 89, 73, 68, 54, 53, 66, 85, 78, 53, 46, 69, 63, 53, 72, 49, 40, 54, 71, 60, 79, 71, 61, 57, 82, 64, 53, 58, 74, 71, 55, 74, 59, 58, 55, 55, 47, 69, 49, 62, 72, 58, 62, 105, 64, 62, 97, 68, 97, 58, 47, 56, 71, 64, 66, 80, 77, 48, 45, 64, 65, 79, 68, 87, 52, 56, 45, 63, 79, 64, 98, 43, 51, 46, 69, 60, 63, 54, 69, 54, 58, 77, 60, 61, 68, 62, 65, 51, 58, 71, 68, 66, 52, 49, 62, 74, 54, 40, 65, 45, 63, 63, 58, 59, 63, 56, 60, 56, 67, 59, 53, 57, 62, 77, 68, 70, 61, 57, 85, 78, 59, 88, 93, 58, 57, 69, 64, 71, 80, 70, 62, 64, 76, 64, 60, 77, 68, 55, 67, 61, 61, 66, 66, 66, 68, 70, 62, 87, 49, 93, 74, 75, 73, 54, 60, 53, 80, 77, 58, 62, 65, 56, 66, 45, 61, 73, 72, 48, 70, 74, 65, 74, 42, 64, 59, 48, 116, 61, 72, 58, 80, 44, 61, 89, 70, 71, 72, 65, 51, 64, 51, 75, 60, 55, 70, 69, 58, 65, 38, 61, 85, 72, 47, 90, 69, 68, 56, 82, 79, 68, 53, 57, 75, 93, 61, 83, 78, 66, 56, 65, 68, 65, 53, 65, 66, 61, 51, 65, 94, 70, 62, 61, 60, 76, 71, 38, 64, 65, 70, 74, 56, 50, 66, 60, 74, 56, 70, 60, 52, 52, 74, 63, 78, 62, 56, 63, 59, 60, 92, 66, 77, 57, 58, 59, 69, 101, 55, 85, 62, 57, 68, 73, 59, 77, 68, 67, 78, 50, 48, 43, 62, 55, 67, 126, 46, 69, 63, 67, 52, 66, 67, 57, 73, 68, 76, 72, 54, 106, 61, 87, 97, 60, 78, 69, 63, 49, 80, 49, 93, 66, 63, 76, 58, 54, 75, 78, 60, 72, 51, 78, 56, 58, 52, 85, 76, 60, 72, 66, 82, 57, 71, 63, 52, 78, 58, 64, 59, 63, 61, 60, 68, 60, 56, 66, 67, 59, 73, 69, 66, 74, 60, 64, 56, 56, 48, 76, 95, 72, 71, 81, 60, 69, 78, 61, 87, 76, 85, 65, 65, 51, 56, 47, 71, 65, 61, 57, 77, 63, 56, 56, 57, 59, 83, 59, 73, 66, 78, 68, 50, 62, 95, 46, 130, 50, 86, 55, 73, 60, 51, 74, 92, 58, 47, 72, 73, 72, 78, 64, 68, 62, 60, 67, 62, 60, 69, 66, 50, 49, 56, 90, 56, 58, 69, 59, 80, 65, 56, 72, 52, 65, 72, 71, 73, 68, 73, 59, 81, 61, 63, 74, 55, 52, 59, 62, 78, 59, 62, 40, 93, 55, 75, 56, 63, 65, 43, 61, 65, 59, 40, 72, 68, 68, 65, 70, 74, 111, 59, 67, 70, 47, 55, 53, 71, 45, 66, 59, 57, 64, 48, 66, 51, 81, 87, 63, 67, 72, 71, 40, 60, 75, 49, 70, 48, 65, 70, 52, 55, 57, 53, 69, 60, 107, 87, 56, 59, 86, 70, 62, 53, 70, 69, 60, 25, 48, 52, 68, 71, 52, 49, 43, 60, 49, 66, 91, 58, 81, 73, 63, 50, 55, 77, 68, 57, 44, 47, 57, 62, 65, 83, 65, 58, 76, 66, 78, 53, 56, 72, 60, 63, 56, 55, 63, 79, 84, 68, 79, 84, 58, 112, 71, 55, 48, 68, 63, 39, 71, 74, 56, 56, 53, 86, 69, 68, 49, 47, 64, 68, 79, 64, 80, 68, 55, 69, 63, 84, 113, 64, 54, 59, 52, 103, 52, 87, 104, 74, 55, 69, 82, 63, 78, 72, 91, 60, 56, 70, 60, 49, 74, 66, 56, 68, 42, 72, 69, 64, 57, 81, 37, 86, 65, 92, 77, 87, 69, 57, 66, 49, 68, 85, 52, 53, 63, 39, 68, 54, 64, 72, 73, 67, 79, 58, 60, 56, 55, 72, 106, 69, 83, 50, 92, 92, 63, 66, 54, 93, 78, 57, 56, 69, 91, 60, 64, 91, 64, 92, 58, 67, 60, 76, 74, 61, 43, 61, 57, 71, 57, 63, 70, 70, 84, 77, 66, 72, 72, 46, 74, 70, 63, 56, 62, 61, 74, 62, 78, 77, 91, 77, 38, 85, 66, 61, 73, 54, 34, 33, 75, 51, 57, 64, 62, 32, 57, 54, 59, 34, 67, 58, 68, 61, 64, 62, 65, 49, 60, 69, 46, 60, 38, 64, 68, 67, 69, 86, 79, 64, 61, 81, 58, 89, 38, 93, 77, 105, 59, 36, 99, 65, 48, 87, 54, 66, 70, 70, 77, 47, 48, 74, 59, 51, 85, 51, 70, 82, 49, 61, 65, 56, 66, 80, 129, 103, 86, 66, 90, 79, 60, 74, 64, 84, 92, 72, 71, 93, 50, 53, 80, 75, 52, 73, 69, 42, 76, 80, 98, 70, 43, 84, 49, 76, 41, 56, 44, 77, 73, 55, 76, 47, 74, 60, 48, 67, 50, 103, 130, 59, 54, 60, 55, 67, 80, 59, 56, 57, 67, 53, 63, 86, 56, 62, 67, 129, 75, 92, 59, 57, 61, 47, 66, 70, 97, 55, 58, 70, 63, 67, 94, 56, 61, 47, 59, 64, 70, 69, 62, 62, 54, 75, 61, 69, 63, 62, 72, 113, 74, 63, 67, 66, 84, 58, 84, 51, 68, 63, 71, 58, 52, 70, 82, 89, 76, 62, 75, 86, 44, 70, 68, 53, 68, 53, 81, 64, 66, 62, 65, 81, 109, 43, 96, 58, 55, 76, 51, 70, 67, 72, 89, 49, 64, 72, 70, 57, 82, 63, 50, 87, 54, 50, 65, 56, 81, 88, 56, 52, 76, 45, 65, 70, 133, 59, 71, 83, 74, 58, 62, 65, 82, 66, 50, 54, 47, 72, 41, 66, 45, 65, 55, 71, 79, 74, 69, 47, 55, 75, 67, 57, 99, 63, 88, 82, 121, 68, 62, 56, 55, 53, 59, 82, 60, 82, 68, 52, 84, 59, 53, 80, 75, 42, 71, 95, 59, 72, 65, 50, 59, 70, 55, 73, 78, 78, 70, 48, 98, 60, 57, 48, 77, 73, 48, 56, 74, 102, 77, 47, 47, 72, 68, 57, 55, 86, 49, 43, 64, 66, 43, 58, 71, 104, 58, 59, 83, 75, 85, 50, 41, 88, 88, 52, 49, 71, 43, 60, 98, 61, 58, 68, 90, 62, 62, 74, 62, 49, 56, 55, 63, 111, 40, 56, 43, 64, 65, 48, 80, 63, 91, 78, 74, 38, 69, 61, 75, 57, 91, 57, 58, 66, 60, 60, 117, 62, 70, 109, 60, 108, 60, 37, 40, 64, 67, 80, 61, 60, 59, 56, 70, 60, 74, 81, 61, 50, 65, 78, 74, 77, 67, 56, 83, 69, 83, 73, 54, 60, 60, 88, 53, 42, 46, 42, 58, 85, 65, 61, 66, 56, 87, 66, 61, 79, 83, 79, 64, 64, 72, 63, 92, 53, 59, 68, 58, 67, 75, 39, 81, 62, 55, 56, 76, 68, 44, 46, 53, 64, 50, 83, 54, 67, 68, 65, 100, 68, 59, 65, 103, 58, 59, 76, 55, 42, 64, 50, 50, 77, 68, 58, 67, 65, 72, 56, 60, 70, 65, 51, 107, 47, 61, 72, 61, 68, 71, 70, 57, 40, 50, 62, 57, 57, 64, 67, 75, 73, 74, 84, 78, 72, 42, 45, 75, 70, 58, 42, 81, 70, 48, 86, 40, 56, 73, 64, 58, 57, 64, 90, 61, 59, 75, 70, 72, 70, 62, 97, 76, 68, 56, 49, 59, 57, 100, 51, 64, 59, 56, 70, 55, 43, 64, 53, 71, 51, 85, 60, 57, 75, 66, 73, 37, 50, 58, 44, 58, 67, 52, 62, 49, 95, 81, 54, 61, 74, 64, 61, 64, 61, 64, 61, 62, 44, 78, 50, 70, 81, 68, 79, 58, 69, 67, 63, 66, 68, 69, 72, 56, 66, 63, 102, 87, 71, 83, 56, 69, 66, 106, 49, 54, 55, 64, 73, 59, 57, 74, 47, 62, 56, 57, 83, 62, 72, 50, 67, 138, 63, 92, 51, 64, 81, 94, 91, 61, 49, 57, 60, 71, 69, 75, 48, 85, 86, 63, 48, 51, 47, 59, 64, 62, 83, 63, 54, 54, 76, 49, 71, 68, 32, 66, 71, 55, 59, 59, 54, 93, 68, 55, 74, 60, 89, 67, 45, 67, 46, 47, 66, 65, 75, 55, 56, 54, 69, 51, 65, 47, 56, 62, 93, 71, 56, 98, 81, 67, 68, 55, 73, 99, 113, 81, 75, 92, 67, 57, 60, 64, 78, 78, 63, 86, 77, 59, 71, 43, 68, 44, 57, 72, 50, 82, 74, 50, 56, 63, 83, 64, 51, 65, 101, 51, 68, 90, 90, 44, 115, 77, 78, 48, 75, 54, 49, 47, 68, 65, 53, 53, 80, 48, 79, 83, 62, 53, 43, 60, 45, 56, 63, 73, 61, 50, 87, 41, 47, 59, 55, 86, 94, 53, 71, 61, 47, 62, 65, 98, 60, 64, 55, 62, 58, 73, 42, 70, 46, 69, 74, 98, 70, 53, 59, 49, 51, 58, 58, 78, 78, 68, 73, 94, 69, 59, 66, 86, 86, 55, 51, 68, 117, 96, 79, 46, 43, 60, 58, 66, 31, 69, 55, 75, 79, 63, 66, 64, 38, 51, 48, 48, 77, 61, 58, 66, 71, 62, 58, 62, 63, 57, 42, 48, 59, 67, 54, 53, 61, 44, 66, 72, 60, 59, 56, 72, 73, 67, 86, 74, 55, 64, 57, 68, 63, 39, 48, 65, 64, 60, 64, 45, 58, 75, 64, 72, 62, 115, 75, 73, 62, 68, 71, 64, 46, 94, 59, 65, 72, 102, 71, 44, 67, 47, 48, 69, 71, 50, 62, 58, 76, 59, 66, 87, 32, 67, 79, 66, 69, 110, 51, 72, 65, 50, 52, 63, 67, 72, 69, 70, 60, 75, 58, 57, 62, 76, 106, 101, 111, 49, 55, 55, 108, 72, 49, 39, 89, 58, 64, 55, 48, 68, 50, 70, 67, 51, 85, 118, 55, 86, 69, 57, 89, 66, 65, 67, 56, 81, 58, 61, 67, 47, 86, 75, 48, 67, 57, 73, 74, 72, 124, 53, 64, 71, 50, 47, 49, 62, 64, 61, 68, 66, 41, 56, 62, 65, 60, 49, 47, 59, 50, 56, 76, 57, 51, 70, 57, 78, 72, 60, 65, 67, 61, 73, 67, 76, 55, 64, 85, 50, 65, 49, 74, 58, 62, 63, 64, 45, 71, 74, 55, 69, 74, 109, 55, 77, 53, 56, 47, 55, 78, 53, 56, 38, 48, 59, 59, 70, 81, 105, 60, 71, 133, 84, 57, 66, 62, 71, 44, 84, 53, 55, 65, 73, 50, 95, 49, 42, 82, 66, 52, 74, 117, 74, 82, 66, 76, 76, 50, 121, 54, 71, 70, 62, 59, 77, 54, 50, 54, 66, 59, 69, 62, 58, 75, 57, 63, 57, 70, 60, 75, 88, 76, 83, 68, 73, 57, 64, 66, 65, 46, 59, 57, 82, 63, 97, 86, 72, 56, 53, 88, 63, 67, 48, 90, 59, 57, 39, 89, 56, 50, 65, 60, 60, 45, 88, 88, 68, 67, 55, 95, 57, 49, 68, 65, 62, 70, 66, 57, 90, 52, 89, 52, 66, 63, 83, 52, 49, 57, 59, 74, 70, 47, 89, 55, 49, 63, 71, 57, 66, 112, 91, 75, 79, 55, 69, 52, 64, 70, 58, 57, 50, 67, 78, 62, 53, 65, 63, 80, 76, 108, 69, 55, 69, 64, 72, 80, 80, 62, 54, 63, 55, 61, 64, 58, 66, 51, 75, 50, 124, 42, 75, 70, 63, 61, 68, 55, 77, 50, 75, 54, 72, 52, 54, 68, 68, 56, 64, 69, 71, 44, 54, 52, 95, 60, 78, 71, 64, 65, 68, 49, 74, 57, 63, 57, 81, 53, 54, 87, 80, 63, 61, 53, 87, 65, 71, 46, 48, 82, 55, 74, 39, 80, 55, 58, 76, 80, 49, 84, 67, 63, 51, 71, 74, 52, 56, 77, 73, 52, 59, 78, 54, 73, 106, 65, 114, 57, 45, 68, 53, 52, 47, 53, 53, 75, 58, 60, 71, 54, 55, 112, 67, 70, 77, 81, 62, 87, 45, 78, 54, 88, 63, 52, 68, 76, 88, 71, 84, 86, 84, 70, 62, 71, 58, 66, 86, 55, 70, 66, 62, 48, 51, 72, 57, 62, 70, 62, 83, 75, 64, 50, 66, 66, 74, 80, 58, 45, 110, 56, 53, 60, 68, 75, 55, 66, 60, 65, 99, 47, 95, 64, 63, 59, 79, 55, 63, 58, 51, 56, 69, 114, 66, 64, 56, 81, 49, 83, 67, 62, 76, 69, 57, 62, 63, 51, 68, 58, 57, 46, 51, 88, 53, 47, 64, 63, 69, 67, 52, 64, 61, 62, 91, 61, 73, 78, 62, 63, 76, 69, 46, 55, 89, 70, 91, 61, 66, 67, 79, 55, 56, 72, 91, 94, 77, 63, 90, 62, 72, 116, 87, 66, 56, 56, 85, 69, 61, 49, 50, 42, 42, 50, 56, 74, 58, 66, 54, 46, 66, 72, 40, 80, 66, 48, 93, 81, 68, 67, 50, 56, 70, 50, 62, 44, 87, 73, 65, 60, 65, 85, 80, 73, 59, 52, 63, 69, 61, 70, 66, 86, 61, 74, 76, 54, 70, 65, 62, 53, 89, 92, 78, 55, 69, 60, 41, 53, 64, 61, 52, 77, 70, 65, 66, 53, 65, 61, 47, 45, 51, 57, 67, 62, 72, 77, 80, 68, 78, 73, 64, 48, 56, 52, 80, 54, 66, 44, 87, 52, 59, 78, 66, 60, 76, 80, 49, 69, 64, 66, 66, 78, 68, 129, 51, 65, 56, 78, 73, 56, 42, 60, 67, 64, 62, 63, 62, 74, 61, 78, 55, 54, 69, 67, 68, 61, 70, 58, 75, 71, 55, 89, 85, 90, 67, 69, 65, 64, 60, 76, 66, 56, 57, 52, 56, 57, 50, 63, 68, 57, 45, 64, 49, 68, 59, 61, 60, 64, 90, 65, 90, 70, 53, 61, 61, 64, 53, 54, 76, 54, 59, 35, 53, 111, 65, 55, 54, 72, 57, 56, 73, 74, 73, 69, 54, 64, 57, 59, 64, 66, 76, 68, 75, 65, 50, 58, 72, 66, 72, 65, 72, 72, 52, 69, 55, 69, 52, 66, 74, 57, 86, 61, 70, 60, 67, 60, 66, 67, 80, 73, 75, 53, 57, 46, 68, 53, 78, 63, 115, 54, 61, 67, 65, 98, 49, 75, 52, 70, 79, 53, 56, 71, 112, 74, 74, 69, 65, 75, 58, 53, 101, 54, 55, 67, 63, 82, 66, 65, 60, 88, 52, 76, 74, 63, 65, 69, 40, 119, 67, 68, 53, 60, 52, 79, 52, 64, 70, 54, 46, 78, 70, 67, 76, 114, 57, 56, 61, 103, 70, 59, 59, 84, 88, 53, 70, 75, 66, 68, 56, 85, 78, 77, 65, 59, 66, 53, 61, 60, 53, 82, 83, 55, 59, 82, 63, 84, 62, 55, 64, 60, 46, 60, 58, 56, 64, 58, 72, 107, 57, 56, 48, 42, 69, 55, 49, 64, 61, 106, 64, 59, 44, 76, 51, 77, 53, 84, 57, 46, 47, 57, 52, 88, 71, 62, 62, 82, 61, 63, 57, 73, 52, 58, 67, 70, 66, 51, 66, 67, 68, 68, 62, 50, 46, 86, 74, 80, 59, 66, 65, 73, 67, 70, 89, 63, 68, 76, 72, 50, 40, 61, 59, 59, 59, 59, 62, 49, 71, 77, 79, 89, 93, 90, 58, 63, 77, 49, 57, 51, 53, 70, 56, 62, 63, 85, 102, 80, 52, 80, 62, 71, 71, 43, 66, 58, 55, 67, 69, 62, 59, 104, 66, 70, 65, 71, 59, 60, 54, 73, 58, 64, 42, 57, 60, 62, 65, 64, 61, 59, 57, 72, 71, 68, 66, 77, 66, 61, 68, 73, 75, 58, 68, 66, 59, 90, 67, 51, 60, 71, 103, 70, 76, 74, 61, 54, 69, 45, 85, 47, 65, 60, 86, 69, 65, 88, 56, 60, 68, 48, 53, 48, 69, 57, 68, 82, 62, 62, 70, 60, 65, 56, 49, 79, 58, 81, 59, 65, 52, 87, 64, 58, 84, 72, 63, 52, 71, 105, 76, 69, 71, 73, 55, 62, 71, 70, 74, 75, 101, 74, 66, 92, 100, 72, 48, 51, 55, 61, 66, 91, 55, 67, 67, 77, 76, 64, 109, 69, 64, 86, 60, 58, 56, 58, 84, 62, 56, 76, 58, 56, 75, 105, 68, 58, 61, 93, 54, 95, 64, 78, 90, 58, 58, 71, 87, 54, 49, 56, 53, 65, 58, 81, 52, 64, 60, 60, 73, 55, 62, 57, 75, 68, 52, 75, 45, 45, 54, 85, 64, 54, 61, 49, 71, 62, 56, 85, 46, 66, 70, 59, 82, 63, 64, 59, 55, 62, 52, 60, 76, 53, 73, 80, 86, 59, 62, 54, 79, 52, 74, 59, 51, 63, 66, 74, 47, 61, 85, 36, 65, 62, 70, 70, 79, 72, 61, 60, 82, 70, 62, 48, 60, 54, 78, 56, 55, 58, 53, 52, 54, 45, 65, 100, 76, 66, 77, 67, 77, 77, 77, 53, 68, 70, 73, 69, 57, 69, 61, 44, 44, 48, 61, 47, 77, 72, 58, 65, 60, 76, 62, 94, 74, 67, 124, 74, 59, 73, 49, 78, 72, 64, 51, 55, 62, 69, 71, 54, 97, 92, 53, 88, 42, 78, 69, 60, 70, 53, 94, 69, 49, 68, 82, 79, 52, 56, 63, 56, 54, 65, 61, 76, 72, 58, 59, 58, 52, 71, 71, 65, 57, 106, 63, 50, 57, 53, 62, 64, 50, 72, 41, 60, 83, 53, 55, 63, 69, 55, 55, 53, 68, 132, 102, 72, 54, 45, 58, 83, 58, 64, 59, 82, 44, 89, 57, 57, 60, 49, 54, 76, 64, 54, 56, 49, 55, 53, 49, 93, 49, 84, 78, 50, 62, 55, 58, 53, 79, 73, 82, 62, 43, 51, 95, 51, 63, 58, 59, 61, 82, 58, 55, 58, 71, 54, 62, 58, 73, 45, 66, 49, 54, 60, 51, 64, 122, 92, 80, 76, 55, 72, 77, 55, 74, 73, 61, 67, 60, 68, 85, 90, 62, 65, 106, 93, 77, 52, 58, 71, 76, 77, 73, 66, 62, 71, 69, 59, 65, 60, 72, 68, 62, 74, 59, 69, 68, 70, 53, 66, 60, 56, 54, 52, 72, 81, 72, 58, 111, 69, 51, 63, 80, 51, 57, 69, 62, 58, 62, 60, 82, 54, 98, 79, 49, 48, 54, 52, 109, 81, 77, 51, 60, 47, 50, 78, 71, 66, 75, 58, 61, 56, 58, 59, 80, 86, 46, 64, 55, 85, 94, 78, 66, 57, 33, 65, 63, 41, 103, 110, 86, 76, 66, 60, 70, 45, 74, 67, 63, 75, 47, 68, 52, 82, 53, 69, 66, 105, 54, 41, 84, 69, 70, 62, 52, 47, 85, 76, 67, 63, 67, 93, 66, 61, 69, 107, 73, 74, 56, 91, 49, 82, 66, 64, 43, 49, 62, 78, 56, 69, 52, 79, 54, 77, 46, 67, 77, 47, 79, 56, 52, 55, 65, 49, 61, 128, 73, 63, 58, 69, 79, 54, 49, 59, 40, 63, 61, 56, 57, 55, 66, 80, 66, 85, 92, 58, 69, 74, 51, 61, 81, 72, 67, 81, 69, 90, 71, 75, 59, 53, 75, 44, 56, 79, 36, 63, 71, 94, 79, 63, 48, 51, 70, 92, 52, 67, 77, 79, 87, 72, 75, 46, 54, 87, 63, 47, 67, 74, 78, 50, 60, 67, 46, 59, 59, 59, 56, 65, 51, 56, 82, 49, 67, 61, 51, 61, 73, 89, 63, 71, 78, 65, 61, 56, 73, 55, 83, 73, 62, 67, 62, 58, 58, 68, 48, 69, 65, 81, 76, 69, 87, 64, 76, 56, 57, 59, 55, 63, 76, 63, 57, 53, 76, 64, 97, 60, 93, 55, 80, 84, 65, 70, 64, 97, 65, 60, 80, 68, 66, 49, 56, 43, 55, 74, 66, 53, 97, 74, 75, 76, 72, 60, 58, 64, 59, 55, 82, 71, 70, 85, 64, 98, 60, 51, 65, 57, 66, 65, 61, 73, 54, 126, 60, 52, 57, 76, 69, 75, 73, 90, 69, 81, 53, 82, 90, 52, 49, 51, 49, 78, 53, 85, 74, 74, 52, 92, 64, 65, 74, 57, 44, 106, 100, 67, 46, 57, 60, 51, 54, 61, 79, 46, 63, 76, 74, 55, 65, 61, 63, 47, 55, 57, 64, 73, 68, 61, 62, 57, 47, 67, 104, 70, 94, 46, 62, 60, 59, 62, 67, 56, 55, 61, 89, 53, 50, 65, 97, 55, 53, 63, 75, 82, 97, 60, 68, 57, 57, 51, 64, 62, 68, 79, 60, 66, 62, 109, 85, 68, 92, 58, 67, 74, 92, 93, 64, 52, 66, 67, 53, 66, 68, 65, 122, 81, 62, 98, 44, 65, 47, 65, 73, 75, 58, 52, 68, 108, 52, 65, 59, 51, 68, 68, 78, 56, 89, 61, 48, 60, 76, 74, 73, 74, 55, 72, 52, 61, 57, 65, 51, 53, 60, 50, 58, 70, 57, 69, 80, 45, 80, 88, 68, 51, 60, 63, 52, 69, 73, 55, 72, 88, 55, 64, 83, 42, 65, 58, 59, 73, 62, 66, 53, 62, 68, 68, 92, 120, 63, 66, 65, 59, 68, 58, 72, 103, 76, 57, 95, 55, 75, 56, 84, 42, 66, 70, 66, 71, 50, 100, 57, 83, 76, 86, 73, 49, 69, 54, 76, 45, 62, 90, 53, 61, 57, 63, 78, 47, 68, 63, 62, 56, 74, 58, 67, 43, 73, 58, 70, 52, 73, 80, 54, 63, 53, 48, 61, 59, 53, 52, 64, 68, 61, 41, 118, 54, 49, 86, 72, 105, 71, 50, 50, 60, 69, 64, 79, 57, 81, 80, 58, 64, 84, 91, 69, 39, 69, 30, 76, 63, 75, 56, 55, 81, 69, 77, 59, 63, 64, 62, 64, 97, 72, 46, 62, 61, 75, 61, 83, 52, 60, 76, 62, 50, 52, 59, 59, 63, 67, 50, 51, 77, 48, 58, 71, 83, 69, 35, 58, 82, 62, 57, 66, 59, 60, 62, 41, 83, 94, 56, 73, 52, 52, 47, 36, 112, 73, 61, 66, 59, 50, 62, 67, 53, 48, 50, 73, 67, 65, 63, 115, 52, 63, 46, 71, 64, 52, 48, 78, 52, 86, 78, 65, 55, 52, 50, 67, 74, 84, 65, 75, 46, 43, 60, 53, 55, 66, 87, 62, 99, 70, 61, 73, 52, 75, 54, 70, 63, 61, 66, 53, 98, 41, 64, 66, 63, 59, 48, 48, 70, 71, 62, 58, 63, 91, 55, 61, 73, 69, 77, 136, 74, 56, 55, 63, 61, 51, 53, 57, 66, 83, 65, 72, 51, 57, 76, 56, 70, 63, 118, 97, 76, 54, 64, 62, 38, 56, 128, 69, 64, 63, 61, 66, 52, 98, 74, 67, 83, 58, 56, 67, 70, 55, 44, 66, 72, 62, 60, 67, 75, 50, 65, 60, 100, 66, 63, 67, 63, 58, 60, 98, 63, 71, 59, 62, 74, 62, 70, 62, 57, 72, 74, 55, 52, 60, 81, 51, 60, 52, 41, 78, 112, 40, 69, 48, 71, 78, 58, 90, 69, 50, 65, 61, 116, 59, 73, 61, 66, 52, 65, 65, 71, 69, 73, 97, 70, 64, 68, 54, 60, 55, 64, 59, 62, 39, 64, 51, 79, 80, 59, 57, 42, 99, 69, 69, 61, 61, 58, 63, 94, 62, 46, 61, 68, 50, 57, 68, 82, 59, 69, 74, 77, 47, 66, 73, 54, 72, 44, 116, 70, 64, 59, 59, 56, 64, 58, 65, 60, 67, 70, 83, 56, 69, 61, 47, 65, 68, 57, 61, 78, 83, 56, 62, 71, 52, 56, 61, 63, 87, 68, 50, 57, 64, 90, 46, 77, 71, 69, 51, 62, 54, 75, 61, 76, 75, 57, 45, 63, 52, 75, 66, 52, 67, 71, 69, 69, 53, 59, 46, 61, 69, 97, 57, 61, 67, 70, 50, 64, 81, 57, 50, 63, 43, 82, 71, 71, 75, 51, 83, 62, 57, 58, 68, 69, 72, 49, 69, 39, 96, 59, 91, 65, 61, 76, 98, 80, 67, 72, 48, 55, 65, 71, 53, 112, 60, 81, 67, 67, 75, 89, 56, 62, 57, 82, 56, 55, 51, 102, 90, 57, 75, 50, 96, 73, 67, 63, 89, 42, 48, 48, 99, 74, 65, 49, 54, 59, 51, 61, 52, 59, 80, 127, 52, 71, 50, 59, 79, 90, 83, 58, 70, 62, 60, 50, 56, 59, 57, 50, 65, 58, 67, 83, 56, 83, 66, 70, 74, 88, 84, 67, 69, 76, 53, 48, 85, 49, 70, 69, 65, 86, 75, 67, 69, 62, 81, 61, 61, 82, 45, 47, 60, 55, 58, 63, 69, 62, 63, 83, 49, 61, 61, 54, 57, 125, 68, 58, 56, 67, 82, 49, 88, 47, 59, 90, 61, 71, 51, 68, 56, 62, 50, 72, 73, 62, 56, 53, 67, 89, 98, 51, 40, 83, 64, 39, 51, 82, 60, 71, 65, 68, 79, 54, 68, 73, 75, 77, 51, 61, 58, 75, 48, 63, 77, 58, 60, 59, 65, 64, 63, 87, 62, 52, 55, 64, 95, 128, 58, 45, 63, 61, 78, 55, 55, 60, 58, 65, 91, 64, 49, 68, 45, 55, 62, 76, 62, 82, 58, 53, 72, 71, 42, 56, 58, 57, 57, 68, 81, 84, 50, 46, 83, 64, 68, 58, 61, 64, 83, 57, 74, 65, 87, 59, 69, 68, 48, 52, 59, 48, 97, 75, 52, 78, 61, 76, 59, 73, 60, 64, 61, 63, 64, 64, 63, 102, 67, 74, 58, 82, 84, 69, 64, 57, 57, 57, 53, 60, 42, 63, 69, 67, 85, 55, 76, 71, 73, 72, 71, 61, 64, 60, 101, 61, 67, 59, 86, 65, 64, 58, 71, 70, 95, 79, 77, 75, 57, 68, 58, 41, 52, 77, 83, 77, 61, 66, 68, 85, 67, 78, 58, 72, 58, 70, 64, 61, 58, 56, 56, 66, 67, 45, 60, 59, 71, 72, 62, 61, 67, 53, 56, 52, 73, 61, 69, 91, 61, 93, 67, 74, 62, 92, 82, 56, 68, 48, 67, 65, 51, 56, 47, 70, 49, 67, 69, 61, 55, 54, 70, 50, 81, 55, 60, 74, 57, 96, 54, 51, 56, 71, 99, 62, 59, 65, 44, 53, 73, 60, 90, 47, 45, 57, 62, 41, 56, 58, 75, 71, 53, 79, 60, 59, 42, 54, 62, 35, 73, 59, 59, 80, 50, 58, 49, 87, 60, 57, 53, 71, 59, 66, 54, 57, 53, 80, 84, 44, 56, 69, 76, 71, 65, 60, 55, 68, 59, 48, 86, 59, 70, 74, 44, 56, 47, 55, 65, 61, 52, 85, 55, 64, 75, 57, 82, 96, 65, 52, 62, 65, 70, 65, 63, 62, 73, 77, 82, 44, 54, 50, 54, 86, 49, 84, 87, 76, 59, 71, 57, 79, 79, 79, 59, 55, 49, 67, 86, 60, 61, 70, 64, 75, 51, 57, 69, 97, 80, 56, 86, 53, 79, 59, 72, 46, 65, 56, 89, 54, 69, 54, 77, 56, 71, 54, 71, 55, 56, 68, 68, 53, 67, 63, 55, 56, 67, 68, 62, 42, 78, 78, 75, 80, 73, 47, 77, 63, 103, 85, 44, 74, 71, 68, 47, 61, 93, 60, 82, 76, 72, 59, 51, 60, 65, 55, 54, 85, 73, 86, 64, 69, 65, 72, 53, 51, 61, 59, 45, 62, 108, 46, 98, 46, 58, 60, 69, 87, 60, 68, 57, 42, 60, 56, 41, 73, 52, 71, 55, 84, 60, 62, 94, 59, 62, 65, 64, 78, 62, 74, 57, 66, 58, 56, 51, 77, 49, 71, 51, 57, 60, 86, 52, 79, 72, 48, 69, 63, 62, 75, 59, 81, 60, 68, 74, 61, 58, 78, 58, 50, 40, 102, 85, 50, 60, 70, 70, 63, 60, 69, 54, 55, 66, 71, 64, 53, 62, 80, 77, 61, 54, 79, 46, 81, 58, 71, 49, 83, 67, 93, 52, 67, 55, 59, 76, 64, 50, 64, 83, 83, 55, 67, 81, 98, 69, 42, 62, 75, 59, 66, 85, 60, 60, 68, 70, 59, 79, 55, 60, 76, 68, 82, 52, 69, 73, 56, 53, 47, 47, 61, 55, 81, 65, 52, 49, 49, 75, 69, 67, 61, 67, 62, 55, 64, 70, 62, 74, 68, 73, 74, 90, 63, 67, 78, 36, 54, 59, 56, 107, 72, 78, 64, 92, 66, 61, 113, 55, 64, 71, 68, 67, 59, 49, 74, 57, 57, 61, 74, 46, 71, 97, 55, 56, 67, 73, 57, 99, 79, 99, 63, 98, 63, 57, 51, 58, 48, 90, 60, 71, 41, 40, 59, 69, 51, 62, 64, 64, 65, 59, 67, 59, 60, 67, 59, 73, 60, 69, 68, 53, 49, 77, 62, 62, 86, 58, 65, 62, 58, 57, 72, 47, 66, 62, 50, 67, 67, 51, 66, 63, 68, 116, 68, 62, 56, 74, 64, 62, 52, 55, 55, 85, 69, 75, 66, 63, 63, 58, 67, 60, 116, 62, 78, 70, 69, 84, 52, 75, 150, 63, 70, 57, 47, 61, 56, 75, 52, 71, 76, 57, 64, 62, 67, 78, 49, 66, 98, 66, 63, 73, 63, 53, 65, 60, 53, 69, 64, 76, 58, 65, 55, 101, 47, 60, 56, 54, 74, 63, 67, 61, 67, 80, 75, 76, 66, 71, 65, 82, 62, 64, 51, 66, 97, 61, 76, 73, 48, 66, 54, 58, 47, 37, 57, 54, 118, 62, 64, 69, 66, 52, 67, 64, 58, 60, 79, 86, 90, 48, 57, 64, 50, 51, 80, 57, 63, 56, 55, 58, 87, 68, 59, 46, 38, 86, 70, 87, 61, 50, 51, 66, 82, 72, 61, 68, 76, 62, 61, 79, 100, 71, 69, 57, 69, 65, 59, 59, 47, 63, 63, 131, 57, 59, 57, 69, 62, 66, 48, 100, 51, 58, 78, 92, 70, 64, 80, 76, 73, 72, 58, 72, 51, 68, 75, 60, 58, 65, 57, 57, 57, 61, 54, 58, 56, 56, 76, 53, 91, 72, 70, 58, 70, 50, 65, 57, 79, 70, 70, 65, 77, 51, 56, 67, 43, 61, 47, 64, 85, 57, 84, 63, 50, 53, 68, 73, 65, 66, 68, 67, 71, 74, 73, 60, 46, 57, 72, 67, 66, 47, 63, 91, 68, 62, 53, 57, 65, 48, 70, 73, 49, 105, 72, 68, 55, 51, 81, 47, 102, 62, 72, 62, 47, 53, 84, 67, 95, 64, 119, 73, 71, 69, 81, 54, 63, 56, 73, 45, 71, 59, 129, 78, 51, 54, 51, 97, 75, 61, 62, 71, 59, 52, 53, 60, 48, 83, 59, 66, 63, 69, 71, 48, 60, 61, 117, 48, 63, 56, 74, 78, 96, 61, 53, 59, 66, 63, 80, 72, 71, 46, 69, 51, 64, 74, 66, 60, 102, 86, 70, 63, 80, 96, 59, 69, 72, 54, 62, 101, 52, 52, 72, 62, 64, 88, 64, 64, 43, 94, 99, 61, 75, 46, 46, 59, 58, 66, 83, 75, 59, 62, 64, 57, 55, 38, 65, 51, 88, 54, 60, 57, 85, 73, 68, 83, 78, 62, 57, 62, 64, 52, 55, 68, 79, 58, 64, 64, 50, 44, 62, 53, 81, 89, 53, 69, 82, 52, 58, 57, 95, 65, 61, 82, 82, 69, 42, 62, 42, 101, 69, 56, 60, 71, 54, 55, 67, 59, 79, 62, 47, 65, 80, 79, 83, 79, 57, 56, 63, 81, 121, 76, 104, 64, 65, 78, 68, 56, 81, 59, 64, 71, 67, 61, 72, 54, 62, 68, 88, 85, 65, 68, 56, 64, 56, 52, 65, 52, 78, 81, 66, 66, 56, 37, 73, 56, 75, 61, 65, 78, 54, 67, 59, 98, 62, 107, 54, 62, 44, 51, 60, 64, 69, 93, 62, 72, 76, 53, 87, 91, 43, 52, 82, 72, 58, 59, 66, 64, 75, 72, 70, 52, 70, 98, 88, 59, 59, 55, 56, 65, 58, 58, 56, 70, 60, 92, 68, 61, 55, 56, 76, 65, 56, 64, 74, 66, 54, 72, 55, 79, 68, 52, 62, 74, 51, 94, 75, 58, 65, 58, 99, 51, 65, 56, 60, 64, 51, 40, 95, 59, 103, 60, 60, 60, 59, 58, 57, 55, 49, 63, 65, 54, 57, 49, 63, 66, 60, 51, 63, 54, 51, 79, 63, 57, 64, 67, 56, 64, 57, 56, 110, 51, 72, 77, 77, 92, 39, 73, 59, 59, 54, 60, 53, 61, 93, 52, 52, 65, 66, 68, 58, 85, 63, 83, 60, 71, 85, 64, 69, 63, 49, 75, 48, 76, 65, 59, 78, 48, 61, 53, 66, 83, 52, 52, 56, 50, 60, 70, 89, 71, 55, 61, 90, 57, 70, 58, 72, 57, 62, 59, 46, 67, 77, 54, 65, 72, 84, 56, 49, 90, 50, 45, 62, 63, 61, 60, 83, 83, 58, 64, 65, 47, 69, 75, 67, 75, 78, 61, 50, 73, 55, 72, 65, 51, 46, 76, 77, 43, 82, 53, 74, 69, 72, 58, 66, 76, 78, 54, 63, 57, 61, 60, 68, 77, 53, 60, 74, 80, 69, 51, 50, 53, 64, 48, 86, 76, 58, 50, 54, 69, 82, 64, 90, 58, 44, 59, 63, 54, 71, 64, 83, 68, 44, 70, 81, 92, 69, 89, 64, 68, 56, 72, 53, 63, 59, 67, 60, 76, 56, 56, 75, 71, 52, 55, 44, 50, 66, 53, 64, 78, 57, 58, 62, 65, 82, 60, 86, 73, 60, 86, 90, 76, 69, 41, 52, 78, 73, 109, 87, 55, 58, 58, 67, 62, 59, 100, 68, 66, 73, 73, 51, 89, 65, 68, 89, 73, 74, 67, 56, 63, 63, 58, 58, 52, 67, 55, 49, 46, 65, 106, 54, 80, 53, 72, 43, 65, 66, 63, 54, 59, 57, 68, 84, 53, 64, 56, 69, 63, 66, 58, 68, 63, 52, 55, 72, 66, 54, 52, 67, 63, 63, 58, 45, 55, 51, 55, 61, 66, 67, 50, 59, 82, 62, 52, 55, 64, 80, 49, 98, 60, 68, 63, 65, 53, 49, 69, 64, 50, 65, 64, 56, 60, 58, 67, 99, 58, 63, 60, 64, 61, 62, 59, 60, 68, 67, 78, 69, 65, 64, 48, 42, 65, 78, 75, 62, 67, 60, 64, 93, 58, 39, 74, 65, 64, 60, 55, 85, 28, 88, 56, 60, 67, 62, 65, 40, 73, 64, 56, 55, 66, 102, 66, 69, 93, 63, 56, 58, 72, 52, 53, 113, 68, 62, 53, 75, 65, 52, 78, 44, 57, 60, 60, 65, 62, 86, 70, 63, 64, 77, 70, 64, 47, 59, 68, 70, 80, 59, 64, 61, 67, 47, 64, 55, 78, 69, 47, 65, 102, 44, 60, 64, 83, 49, 85, 118, 63, 68, 64, 45, 63, 58, 80, 76, 59, 55, 55, 70, 70, 60, 81, 57, 59, 78, 54, 67, 76, 60, 82, 74, 81, 68, 65, 95, 75, 56, 81, 60, 60, 62, 62, 50, 80, 86, 54, 63, 69, 69, 72, 57, 58, 55, 63, 58, 76, 59, 100, 63, 47, 86, 81, 39, 67, 58, 52, 53, 51, 62, 67, 62, 54, 54, 49, 64, 69, 61, 54, 70, 56, 59, 87, 79, 59, 66, 56, 61, 108, 47, 64, 62, 55, 61, 57, 82, 63, 50, 48, 58, 69, 75, 61, 86, 75, 83, 79, 42, 81, 50, 62, 44, 38, 78, 64, 50, 61, 42, 54, 52, 70, 54, 87, 72, 66, 73, 73, 63, 56, 60, 84, 68, 60, 55, 62, 55, 67, 96, 57, 61, 50, 77, 121, 60, 78, 91, 67, 73, 69, 53, 48, 63, 75, 65, 49, 63, 64, 70, 53, 52, 67, 59, 62, 68, 55, 61, 52, 47, 74, 59, 65, 65, 52, 76, 56, 108, 59, 61, 61, 60, 65, 71, 76, 54, 85, 66, 55, 70, 74, 72, 83, 64, 67, 65, 79, 60, 64, 44, 42, 53, 77, 55, 59, 63, 92, 58, 87, 65, 54, 63, 74, 47, 56, 56, 81, 74, 49, 59, 90, 64, 47, 67, 76, 52, 54, 50, 46, 67, 51, 73, 119, 47, 53, 59, 63, 76, 67, 80, 73, 74, 69, 61, 64, 62, 60, 66, 71, 56, 47, 51, 59, 58, 86, 49, 67, 77, 60, 57, 55, 59, 64, 79, 56, 74, 67, 68, 43, 84, 90, 92, 65, 64, 52, 55, 71, 75, 52, 68, 56, 85, 73, 62, 48, 55, 51, 50, 63, 66, 56, 80, 56, 60, 59, 89, 65, 84, 53, 57, 55, 61, 67, 63, 67, 87, 62, 63, 55, 61, 71, 64, 66, 77, 50, 56, 76, 78, 78, 94, 60, 63, 64, 58, 82, 58, 60, 41, 77, 60, 64, 60, 63, 47, 83, 84, 72, 86, 59, 48, 79, 71, 71, 87, 65, 66, 62, 78, 71, 63, 62, 72, 65, 62, 64, 55, 85, 83, 50, 66, 77, 61, 54, 88, 59, 48, 64, 54, 56, 67, 93, 75, 58, 74, 55, 105, 52, 67, 60, 72, 58, 72, 74, 51, 59, 71, 81, 60, 60, 69, 67, 69, 70, 72, 66, 65, 57, 85, 63, 60, 54, 57, 82, 63, 42, 52, 66, 61, 58, 53, 63, 45, 70, 63, 42, 55, 63, 63, 72, 72, 57, 65, 85, 72, 64, 69, 60, 94, 63, 50, 69, 73, 60, 56, 45, 67, 47, 66, 64, 58, 71, 68, 68, 61, 84, 75, 70, 60, 65, 55, 51, 55, 69, 92, 54, 53, 54, 74, 95, 59, 62, 77, 106, 64, 82, 68, 58, 86, 54, 88, 50, 62, 64, 75, 91, 47, 90, 114, 51, 77, 49, 48, 59, 49, 57, 64, 44, 86, 69, 59, 59, 94, 52, 74, 65, 78, 67, 49, 61, 129, 60, 48, 77, 65, 78, 74, 54, 73, 69, 63, 66, 58, 70, 60, 66, 60, 80, 92, 56, 58, 61, 62, 92, 48, 59, 64, 62, 73, 67, 63, 67, 56, 93, 59, 71, 52, 60, 58, 88, 73, 84, 56, 119, 56, 128, 64, 58, 72, 60, 76, 66, 53, 42, 62, 73, 66, 77, 60, 52, 64, 60, 79, 63, 66, 66, 61, 67, 78, 78, 87, 57, 70, 45, 53, 52, 48, 61, 57, 61, 80, 58, 61, 66, 50, 69, 51, 62, 49, 78, 62, 74, 55, 58, 45, 84, 54, 44, 57, 64, 58, 52, 78, 78, 58, 85, 71, 62, 70, 74, 77, 83, 70, 56, 112, 62, 54, 62, 61, 42, 96, 65, 58, 83, 67, 55, 50, 93, 55, 47, 84, 48, 65, 75, 44, 55, 60, 62, 59, 62, 74, 47, 73, 88, 58, 60, 63, 46, 53, 70, 46, 59, 63, 64, 64, 99, 57, 75, 70, 50, 58, 59, 65, 77, 75, 62, 51, 63, 77, 62, 64, 60, 55, 67, 73, 82, 64, 51, 67, 98, 61, 59, 69, 94, 73, 51, 67, 78, 61, 79, 65, 79, 64, 77, 74, 56, 72, 57, 57, 66, 56, 65, 85, 67, 60, 65, 75, 46, 52, 60, 66, 63, 78, 67, 69, 63, 48, 62, 54, 63, 71, 60, 70, 72, 73, 52, 66, 81, 69, 53, 65, 61, 55, 67, 79, 63, 79, 62, 51, 47, 52, 56, 46, 52, 62, 59, 67, 71, 72, 80, 54, 85, 80, 73, 75, 58, 87, 53, 54, 57, 56, 71, 66, 79, 77, 81, 72, 46, 56, 57, 71, 64, 52, 89, 85, 74, 73, 55, 64, 77, 89, 60, 59, 77, 40, 54, 57, 60, 67, 72, 56, 66, 61, 42, 77, 74, 67, 72, 61, 45, 59, 64, 62, 63, 56, 48, 61, 57, 52, 62, 45, 88, 49, 63, 52, 56, 47, 53, 57, 56, 67, 66, 65, 62, 87, 51, 72, 63, 52, 60, 76, 52, 75, 54, 41, 56, 68, 49, 69, 57, 78, 72, 62, 51, 52, 89, 52, 88, 88, 42, 67, 73, 69, 59, 83, 67, 65, 64, 95, 66, 78, 63, 56, 62, 53, 73, 52, 69, 63, 93, 55, 106, 75, 62, 60, 52, 72, 65, 54, 36, 57, 51, 82, 46, 56, 81, 63, 51, 61, 75, 69, 62, 82, 60, 58, 62, 89, 61, 52, 57, 51, 75, 67, 63, 83, 45, 60, 51, 80, 59, 69, 54, 55, 66, 49, 56, 51, 55, 111, 59, 78, 65, 52, 89, 73, 65, 64, 76, 54, 50, 111, 75, 62, 52, 94, 55, 64, 64, 52, 45, 44, 57, 52, 56, 78, 63, 52, 92, 49, 60, 75, 49, 65, 58, 58, 53, 53, 54, 119, 50, 91, 54, 65, 54, 65, 76, 59, 77, 73, 63, 58, 56, 75, 56, 82, 73, 67, 84, 61, 53, 62, 73, 64, 78, 64, 60, 63, 65, 89, 71, 95, 79, 66, 55, 72, 59, 75, 71, 63, 65, 55, 72, 52, 70, 109, 53, 56, 57, 65, 54, 80, 50, 53, 53, 64, 92, 75, 65, 62, 61, 59, 51, 69, 61, 56, 64, 81, 80, 59, 61, 84, 66, 67, 56, 69, 64, 66, 66, 82, 71, 57, 58, 65, 51, 67, 76, 54, 53, 71, 83, 68, 56, 69, 69, 54, 70, 52, 42, 64, 66, 72, 71, 61, 56, 62, 71, 66, 60, 100, 62, 54, 72, 94, 49, 47, 70, 73, 54, 88, 63, 51, 69, 68, 85, 69, 83, 64, 60, 61, 58, 82, 59, 71, 53, 66, 65, 55, 60, 81, 69, 61, 64, 55, 73, 54, 54, 94, 58, 75, 59, 71, 55, 66, 64, 71, 124, 56, 57, 69, 66, 48, 63, 63, 49, 75, 67, 68, 67, 90, 65, 51, 82, 63, 51, 55, 61, 65, 44, 65, 56, 53, 58, 55, 80, 61, 76, 52, 61, 56, 62, 72, 56, 65, 51, 68, 48, 57, 69, 63, 93, 87, 76, 58, 58, 69, 82, 61, 68, 57, 88, 67, 66, 58, 68, 75, 62, 90, 69, 52, 58, 73, 55, 69, 62, 86, 59, 49, 56, 59, 63, 80, 54, 68, 45, 61, 62, 59, 55, 93, 52, 62, 67, 86, 56, 59, 109, 74, 48, 56, 70, 72, 66, 76, 73, 57, 59, 55, 81, 56, 64, 50, 82, 50, 58, 59, 57, 57, 75, 55, 66, 78, 74, 83, 45, 68, 52, 66, 58, 60, 56, 47, 79, 58, 54, 62, 57, 61, 61, 88, 49, 74, 62, 73, 63, 58, 77, 64, 164, 55, 51, 97, 68, 80, 66, 44, 75, 60, 62, 74, 65, 64, 67, 61, 71, 59, 66, 64, 53, 51, 75, 59, 50, 81, 56, 79, 51, 56, 88, 113, 69, 76, 68, 62, 48, 45, 58, 104, 53, 77, 60, 72, 82, 80, 63, 54, 71, 50, 72, 72, 53, 78, 82, 65, 74, 89, 50, 90, 74, 69, 77, 104, 68, 67, 51, 68, 59, 53, 46, 62, 49, 78, 49, 71, 71, 64, 61, 68, 86, 59, 51, 55, 58, 86, 80, 56, 81, 65, 68, 59, 62, 49, 44, 55, 61, 67, 69, 67, 60, 61, 91, 92, 57, 122, 58, 58, 44, 86, 72, 52, 55, 61, 68, 60, 59, 55, 64, 78, 74, 64, 80, 65, 66, 75, 54, 55, 63, 81, 58, 55, 92, 51, 81, 52, 63, 57, 69, 60, 65, 86, 47, 95, 55, 73, 60, 63, 63, 108, 84, 55, 52, 49, 60, 74, 70, 74, 56, 61, 79, 45, 61, 88, 50, 59, 59, 68, 42, 93, 66, 53, 57, 67, 58, 52, 66, 56, 59, 44, 70, 62, 80, 57, 57, 61, 77, 64, 59, 75, 79, 56, 54, 56, 60, 81, 58, 55, 67, 76, 67, 52, 53, 52, 56, 74, 59, 61, 63, 68, 77, 53, 72, 56, 55, 49, 55, 60, 65, 62, 51, 61, 61, 66, 64, 61, 105, 58, 69, 65, 59, 51, 78, 73, 54, 81, 64, 64, 62, 78, 56, 79, 65, 64, 54, 53, 48, 127, 55, 59, 63, 58, 62, 51, 67, 67, 67, 61, 57, 72, 54, 66, 61, 53, 51, 61, 80, 137, 58, 58, 61, 58, 63, 61, 56, 63, 52, 57, 60, 45, 68, 70, 67, 60, 52, 99, 54, 55, 60, 70, 62, 53, 63, 64, 67, 62, 61, 64, 55, 58, 56, 56, 91, 66, 63, 71, 54, 57, 62, 70, 61, 79, 92, 57, 63, 54, 93, 55, 52, 75, 75, 55, 73, 69, 71, 68, 66, 64, 61, 58, 55, 56, 59, 53, 57, 97, 66, 60, 66, 50, 58, 56, 56, 66, 60, 48, 58, 83, 63, 77, 67, 53, 56, 56, 91, 60, 72, 47, 65, 48, 75, 72, 72, 80, 92, 54, 63, 68, 56, 93, 62, 62, 64, 77, 54, 64, 97, 70, 48, 80, 58, 82, 66, 57, 58, 53, 67, 68, 68, 54, 57, 96, 49, 64, 54, 58, 54, 83, 57, 58, 53, 75, 64, 63, 59, 56, 69, 61, 65, 62, 63, 64, 61, 71, 66, 51, 75, 64, 61, 59, 75, 70, 114, 46, 70, 66, 71, 75, 59, 65, 66, 60, 76, 60, 63, 48, 56, 54, 63, 53, 72, 70, 55, 78, 60, 52, 92, 81, 59, 64, 66, 79, 66, 82, 86, 63, 67, 55, 55, 79, 58, 68, 56, 69, 87, 69, 55, 59, 65, 67, 59, 57, 59, 69, 56, 87, 67, 79, 58, 73, 55, 64, 75, 68, 57, 51, 70, 59, 63, 48, 67, 77, 62, 62, 75, 93, 70, 61, 49, 51, 61, 56, 67, 67, 47, 77, 71, 61, 46, 62, 66, 61, 80, 85, 76, 67, 53, 68, 77, 69, 57, 79, 62, 52, 68, 78, 67, 73, 73, 63, 68, 89, 62, 83, 64, 76, 58, 69, 50, 49, 48, 56, 61, 59, 59, 54, 66, 70, 63, 53, 77, 79, 56, 66, 51, 49, 58, 57, 52, 86, 54, 73, 63, 70, 64, 50, 73, 61, 57, 81, 62, 74, 71, 64, 60, 69, 67, 57, 62, 81, 64, 51, 50, 52, 64, 68, 57, 54, 89, 56, 69, 81, 57, 61, 64, 60, 68, 60, 65, 44, 43, 90, 115, 68, 64, 72, 66, 60, 78, 65, 60, 78, 68, 59, 63, 62, 59, 52, 66, 60, 64, 45, 56, 59, 61, 63, 61, 113, 66, 78, 57, 63, 56, 62, 79, 54, 88, 67, 50, 81, 67, 71, 64, 69, 65, 69, 72, 54, 62, 59, 51, 65, 61, 64, 65, 79, 63, 57, 98, 87, 66, 72, 61, 73, 66, 52, 67, 66, 53, 62, 55, 101, 72, 71, 72, 53, 62, 79, 53, 54, 63, 69, 72, 63, 62, 67, 51, 84, 53, 86, 85, 59, 73, 75, 99, 47, 51, 83, 62, 52, 67, 91, 59, 58, 62, 86, 65, 67, 59, 53, 62, 72, 58, 52, 79, 54, 57, 91, 64, 69, 63, 67, 69, 57, 63, 109, 59, 93, 76, 86, 72, 67, 65, 61, 65, 68, 48, 72, 78, 73, 53, 75, 49, 58, 75, 67, 59, 76, 64, 64, 58, 59, 54, 72, 61, 52, 82, 117, 41, 68, 64, 44, 62, 77, 43, 51, 80, 68, 58, 51, 62, 81, 74, 49, 65, 65, 55, 65, 100, 56, 66, 62, 76, 70, 67, 59, 67, 57, 62, 64, 64, 68, 76, 59, 99, 61, 52, 55, 77, 56, 81, 92, 48, 56, 72, 69, 54, 56, 64, 67, 78, 51, 70, 69, 83, 71, 99, 57, 67, 57, 54, 52, 81, 59, 60, 59, 76, 73, 59, 72, 73, 55, 100, 61, 85, 52, 56, 74, 84, 58, 67, 61, 65, 66, 69, 64, 57, 48, 58, 61, 54, 76, 67, 64, 70, 67, 63, 78, 56, 82, 69, 74, 66, 74, 66, 65, 53, 59, 54, 68, 60, 59, 51, 67, 50, 88, 88, 75, 60, 76, 68, 55, 56, 55, 52, 105, 59, 50, 55, 74, 52, 61, 63, 58, 71, 71, 59, 90, 75, 103, 52, 95, 60, 64, 65, 54, 56, 67, 73, 50, 69, 69, 72, 47, 59, 65, 57, 69, 110, 64, 53, 60, 53, 68, 63, 67, 66, 62, 56, 70, 51, 53, 53, 58, 79, 72, 102, 56, 80, 67, 67, 81, 72, 70, 53, 57, 67, 74, 63, 69, 70, 76, 51, 60, 68, 52, 84, 73, 61, 50, 72, 66, 61, 68, 53, 73, 76, 59, 67, 125, 69, 62, 103, 66, 56, 60, 57, 64, 57, 55, 35, 83, 71, 39, 58, 57, 44, 74, 84, 64, 89, 91, 68, 83, 59, 62, 67, 56, 82, 74, 56, 56, 56, 65, 59, 78, 58, 55, 62, 56, 63, 55, 53, 68, 53, 71, 83, 55, 59, 72, 67, 63, 57, 57, 134, 57, 64, 69, 82, 71, 74, 62, 61, 83, 68, 67, 47, 52, 63, 53, 67, 95, 67, 64, 79, 74, 83, 61, 75, 95, 50, 73, 62, 69, 61, 51, 74, 66, 60, 69, 75, 51, 67, 60, 61, 46, 75, 62, 54, 62, 66, 70, 68, 59, 59, 69, 72, 60, 46, 54, 59, 74, 57, 82, 72, 71, 63, 69, 47, 70, 55, 45, 63, 68, 78, 66, 65, 77, 57, 47, 70, 62, 59, 58, 69, 64, 60, 68, 76, 84, 50, 70, 54, 56, 77, 74, 72, 61, 76, 67, 87, 81, 55, 55, 79, 80, 75, 81, 62, 67, 70, 71, 61, 63, 75, 54, 54, 70, 51, 75, 64, 56, 59, 74, 73, 71, 62, 52, 92, 101, 69, 68, 90, 68, 65, 73, 56, 64, 71, 58, 68, 73, 70, 61, 64, 100, 54, 87, 48, 64, 70, 73, 74, 92, 65, 63, 77, 67, 82, 54, 59, 105, 62, 67, 69, 58, 67, 76, 57, 65, 62, 65, 77, 67, 110, 78, 65, 60, 68, 64, 79, 59, 75, 74, 51, 63, 86, 70, 53, 68, 107, 112, 67, 70, 49, 53, 63, 79, 67, 74, 83, 52, 59, 93, 66, 56, 65, 47, 64, 70, 74, 55, 75, 47, 69, 63, 106, 60, 47, 57, 56, 60, 59, 63, 68, 73, 62, 62, 61, 60, 52, 54, 68, 54, 87, 50, 76, 89, 54, 61, 56, 72, 60, 63, 85, 90, 68, 56, 59, 74, 44, 80, 61, 64, 52, 44, 70, 66, 58, 70, 46, 66, 54, 65, 49, 52, 54, 66, 59, 70, 64, 95, 54, 56, 62, 55, 66, 55, 59, 73, 50, 57, 45, 69, 70, 54, 57, 76, 62, 75, 65, 57, 65, 55, 83, 91, 57, 53, 49, 68, 55, 56, 51, 80, 83, 58, 51, 60, 70, 63, 70, 54, 61, 52, 107, 53, 132, 56, 52, 55, 75, 67, 61, 76, 99, 63, 82, 82, 53, 71, 57, 57, 51, 52, 59, 55, 58, 60, 66, 61, 56, 60, 70, 68, 72, 79, 56, 57, 56, 73, 61, 56, 58, 54, 64, 64, 50, 60, 59, 65, 75, 66, 58, 53, 94, 56, 74, 54, 53, 56, 58, 57, 52, 55, 35, 63, 86, 57, 63, 82, 65, 68, 66, 64, 61, 79, 64, 54, 73, 64, 63, 99, 57, 72, 77, 79, 59, 75, 56, 57, 64, 43, 67, 60, 58, 71, 49, 60, 60, 59, 82, 59, 47, 72, 69, 88, 48, 61, 62, 40, 85, 117, 63, 72, 51, 72, 73, 56, 61, 65, 60, 70, 61, 70, 63, 77, 52, 64, 60, 62, 61, 51, 69, 62, 84, 76, 70, 74, 62, 43, 95, 70, 58, 64, 92, 70, 62, 63, 83, 50, 52, 60, 93, 81, 61, 53, 47, 71, 79, 67, 58, 52, 79, 92, 80, 57, 66, 59, 59, 56, 69, 54, 47, 53, 64, 60, 85, 70, 64, 56, 73, 59, 59, 61, 77, 73, 62, 61, 79, 74, 76, 64, 79, 61, 75, 48, 58, 50, 98, 72, 63, 62, 79, 82, 63, 54, 58, 90, 56, 65, 85, 51, 50, 64, 55, 61, 73, 74, 65, 56, 49, 58, 60, 52, 68, 76, 65, 96, 54, 63, 88, 64, 61, 66, 88, 35, 73, 73, 62, 56, 60, 64, 76, 55, 77, 69, 65, 60, 73, 62, 54, 70, 61, 61, 64, 57, 41, 48, 68, 58, 66, 99, 55, 54, 63, 71, 59, 49, 59, 78, 63, 60, 66, 71, 56, 72, 56, 55, 62, 60, 60, 75, 63, 65, 51, 104, 64, 78, 49, 75, 52, 83, 81, 60, 73, 74, 62, 49, 92, 88, 58, 62, 76, 64, 56, 48, 100, 61, 74, 63, 53, 55, 62, 52, 53, 60, 73, 66, 58, 57, 67, 64, 91, 51, 59, 60, 58, 55, 63, 82, 63, 69, 53, 65, 59, 61, 64, 45, 66, 58, 57, 56, 61, 64, 58, 90, 77, 66, 60, 58, 54, 51, 62, 53, 65, 81, 80, 35, 69, 96, 53, 78, 76, 63, 62, 49, 67, 54, 70, 61, 52, 62, 55, 65, 67, 70, 51, 78, 55, 55, 63, 65, 71, 87, 63, 69, 55, 66, 57, 77, 58, 56, 46, 62, 60, 90, 65, 60, 68, 68, 48, 63, 60, 68, 49, 60, 62, 59, 118, 61, 57, 71, 63, 71, 59, 60, 56, 59, 57, 123, 51, 61, 68, 55, 62, 68, 65, 59, 72, 61, 57, 63, 68, 92, 76, 56, 64, 51, 58, 45, 73, 70, 42, 66, 83, 61, 57, 69, 70, 70, 77, 109, 58, 56, 65, 54, 59, 56, 70, 51, 63, 71, 54, 85, 76, 72, 70, 51, 55, 82, 62, 60, 71, 62, 59, 49, 71, 74, 74, 59, 47, 86, 63, 75, 72, 54, 51, 54, 89, 60, 58, 63, 59, 57, 74, 118, 74, 71, 51, 90, 64, 64, 73, 94, 65, 74, 47, 71, 94, 97, 113, 70, 55, 56, 54, 67, 74, 60, 87, 68, 74, 45, 64, 78, 74, 68, 63, 56, 57, 68, 73, 63, 70, 65, 90, 61, 67, 63, 67, 88, 88, 57, 54, 58, 49, 77, 92, 58, 69, 87, 69, 94, 59, 73, 68, 77, 61, 58, 74, 67, 73, 52, 52, 77, 59, 53, 66, 49, 70, 68, 68, 71, 59, 66, 66, 57, 68, 68, 67, 92, 53, 82, 93, 69, 56, 56, 51, 70, 63, 70, 46, 88, 55, 83, 76, 74, 67, 61, 58, 62, 61, 58, 71, 69, 59, 62, 56, 55, 63, 68, 49, 65, 82, 54, 43, 57, 65, 69, 80, 75, 54, 64, 61, 53, 67, 87, 66, 56, 72, 76, 92, 55, 85, 53, 71, 85, 68, 62, 55, 61, 55, 81, 71, 62, 52, 72, 70, 73, 78, 58, 54, 69, 66, 66, 82, 54, 71, 53, 57, 78, 47, 53, 63, 124, 60, 64, 96, 58, 54, 41, 61, 61, 48, 78, 47, 87, 75, 77, 51, 63, 61, 69, 84, 57, 76, 96, 52, 73, 72, 65, 74, 57, 90, 68, 65, 44, 64, 68, 47, 66, 56, 46, 67, 52, 59, 53, 54, 58, 62, 79, 80, 64, 66, 72, 55, 56, 66, 67, 133, 65, 58, 62, 46, 55, 49, 68, 67, 69, 61, 55, 57, 66, 73, 58, 61, 56, 65, 70, 64, 96, 87, 46, 62, 94, 63, 64, 65, 72, 74, 74, 63, 62, 61, 63, 78, 104, 70, 69, 47, 58, 56, 70, 56, 76, 64, 71, 67, 76, 58, 63, 64, 55, 61, 82, 57, 66, 58, 63, 64, 57, 52, 61, 54, 63, 61, 67, 54, 66, 77, 58, 61, 90, 45, 54, 73, 57, 69, 62, 54, 56, 71, 58, 48, 64, 58, 50, 60, 58, 63, 67, 76, 63, 77, 58, 66, 57, 61, 46, 59, 78, 51, 82, 62, 84, 65, 38, 72, 73, 78, 64, 92, 72, 55, 61, 64, 58, 63, 64, 65, 72, 70, 54, 92, 69, 62, 58, 65, 70, 76, 69, 71, 98, 86, 57, 60, 64, 71, 62, 73, 70, 62, 113, 48, 56, 62, 72, 68, 54, 79, 54, 60, 50, 67, 53, 66, 79, 62, 61, 59, 42, 52, 54, 60, 54, 66, 58, 69, 61, 89, 71, 62, 88, 66, 68, 46, 61, 62, 61, 62, 57, 58, 67, 60, 63, 110, 76, 63, 44, 63, 52, 59, 64, 59, 64, 56, 55, 79, 69, 56, 63, 68, 70, 67, 111, 57, 75, 82, 55, 87, 66, 102, 66, 55, 83, 71, 53, 67, 55, 53, 82, 59, 58, 65, 80, 38, 71, 66, 56, 117, 57, 89, 66, 57, 77, 72, 61, 62, 60, 69, 75, 59, 70, 63, 65, 60, 55, 57, 60, 61, 56, 68, 51, 64, 85, 52, 60, 68, 71, 62, 76, 58, 79, 65, 66, 86, 112, 72, 55, 64, 57, 72, 55, 60, 64, 58, 61, 56, 76, 63, 58, 45, 67, 62, 68, 64, 65, 69, 51, 82, 88, 59, 56, 67, 86, 77, 62, 72, 63, 56, 53, 78, 69, 87, 50, 58, 54, 52, 62, 110, 67, 125, 59, 62, 58, 88, 75, 57, 79, 59, 56, 63, 79, 52, 58, 55, 75, 68, 66, 63, 68, 53, 66, 64, 62, 68, 59, 55, 44, 76, 59, 48, 64, 52, 63, 57, 70, 51, 70, 76, 59, 67, 56, 67, 66, 80, 58, 70, 58, 117, 71, 81, 69, 62, 71, 73, 52, 74, 62, 70, 67, 75, 69, 50, 40, 55, 56, 75, 57, 58, 76, 70, 61, 62, 60, 62, 105, 60, 70, 77, 65, 69, 55, 56, 54, 66, 62, 53, 65, 46, 51, 42, 62, 45, 54, 80, 67, 77, 58, 79, 64, 64, 67, 70, 61, 81, 68, 70, 67, 65, 58, 111, 56, 53, 65, 65, 58, 66, 64, 50, 86, 51, 63, 83, 63, 73, 55, 65, 57, 52, 80, 67, 51, 64, 59, 101, 63, 62, 54, 84, 53, 62, 75, 90, 69, 63, 76, 71, 40, 74, 62, 63, 67, 58, 59, 52, 61, 98, 93, 64, 51, 84, 53, 80, 70, 72, 79, 70, 65, 71, 64, 71, 42, 71, 61, 50, 54, 67, 50, 75, 55, 75, 38, 77, 41, 51, 63, 68, 94, 80, 58, 52, 50, 84, 95, 70, 81, 53, 65, 64, 76, 53, 59, 75, 86, 70, 45, 58, 73, 57, 74, 73, 65, 73, 61, 65, 86, 70, 51, 57, 67, 92, 59, 68, 57, 85, 61, 57, 63, 67, 57, 87, 60, 69, 62, 73, 64, 68, 56, 62, 85, 83, 80, 60, 73, 70, 83, 56, 77, 60, 56, 60, 52, 69, 73, 75, 74, 74, 84, 57, 74, 50, 65, 49, 64, 54, 74, 68, 47, 57, 55, 64, 52, 52, 56, 71, 69, 92, 61, 41, 98, 59, 83, 52, 51, 65, 69, 39, 67, 67, 99, 61, 57, 84, 58, 71, 62, 60, 52, 46, 61, 115, 58, 55, 68, 79, 63, 68, 60, 57, 52, 63, 74, 87, 83, 55, 55, 63, 61, 71, 69, 73, 58, 65, 72, 79, 55, 90, 64, 75, 75, 62, 60, 76, 59, 63, 69, 79, 53, 55, 73, 52, 63, 75, 56, 55, 58, 49, 64, 67, 53, 59, 64, 77, 81, 47, 81, 86, 65, 63, 51, 54, 53, 60, 45, 47, 95, 64, 59, 62, 78, 58, 62, 58, 78, 56, 72, 81, 98, 88, 73, 65, 48, 64, 47, 45, 78, 55, 56, 50, 87, 81, 70, 53, 70, 63, 56, 74, 62, 54, 57, 61, 37, 90, 67, 82, 61, 67, 64, 50, 69, 90, 69, 72, 74, 60, 68, 55, 42, 59, 64, 78, 46, 65, 65, 55, 66, 70, 83, 62, 55, 58, 29, 63, 64, 69, 62, 71, 68, 73, 57, 62, 75, 58, 60, 59, 113, 64, 80, 60, 62, 57, 70, 51, 55, 60, 73, 60, 51, 70, 88, 63, 81, 59, 52, 59, 78, 61, 56, 67, 75, 83, 57, 53, 58, 72, 78, 40, 56, 71, 49, 64, 42, 64, 80, 59, 76, 63, 68, 71, 83, 60, 63, 57, 110, 67, 61, 60, 56, 74, 51, 76, 52, 60, 55, 104, 57, 56, 81, 65, 58, 56, 60, 63, 44, 44, 66, 59, 74, 100, 67, 57, 51, 54, 48, 52, 73, 72, 68, 78, 63, 64, 88, 68, 75, 65, 65, 78, 59, 69, 81, 56, 66, 84, 73, 73, 62, 38, 74, 61, 73, 62, 49, 80, 64, 85, 49, 67, 71, 70, 65, 62, 59, 55, 48, 81, 49, 67, 64, 58, 65, 76, 75, 59, 75, 75, 78, 53, 82, 72, 60, 69, 60, 60, 58, 62, 83, 74, 100, 58, 78, 73, 88, 55, 60, 79, 74, 60, 47, 77, 63, 60, 89, 44, 48, 62, 85, 67, 73, 52, 62, 63, 54, 59, 61, 65, 67, 60, 66, 52, 75, 68, 71, 63, 65, 73, 67, 72, 79, 65, 87, 47, 64, 75, 65, 61, 81, 64, 72, 58, 64, 75, 52, 87, 58, 59, 58, 72, 57, 58, 58, 69, 60, 51, 72, 100, 60, 60, 94, 83, 57, 110, 80, 63, 53, 50, 95, 41, 54, 55, 83, 67, 59, 67, 62, 77, 77, 61, 61, 86, 96, 110, 83, 52, 58, 68, 62, 92, 83, 73, 58, 61, 68, 49, 55, 59, 38, 57, 79, 55, 62, 66, 85, 59, 53, 50, 52, 52, 67, 56, 61, 51, 46, 114, 55, 80, 46, 67, 74, 72, 60, 57, 68, 44, 60, 74, 60, 81, 64, 75, 68, 43, 53, 62, 75, 91, 70, 87, 70, 65, 57, 78, 52, 76, 72, 91, 56, 54, 59, 58, 53, 73, 70, 62, 58, 56, 63, 79, 64, 58, 68, 77, 80, 49, 58, 74, 56, 57, 65, 63, 57, 52, 56, 51, 47, 65, 46, 52, 50, 63, 57, 57, 60, 53, 61, 64, 60, 56, 60, 57, 71, 67, 68, 59, 69, 67, 84, 46, 78, 55, 62, 55, 60, 100, 63, 48, 57, 94, 58, 77, 64, 70, 62, 81, 60, 82, 87, 66, 76, 55, 62, 112, 69, 65, 59, 56, 76, 67, 111, 59, 59, 52, 70, 89, 52, 54, 50, 63, 69, 55, 52, 66, 70, 71, 73, 56, 54, 64, 89, 68, 81, 57, 66, 99, 69, 50, 73, 64, 65, 67, 61, 92, 50, 89, 62, 78, 45, 50, 49, 55, 100, 71, 63, 76, 49, 62, 50, 47, 45, 55, 50, 69, 75, 118, 68, 58, 53, 94, 61, 45, 65, 93, 68, 74, 59, 63, 40, 76, 60, 85, 88, 87, 75, 62, 48, 53, 66, 67, 68, 54, 47, 73, 70, 59, 50, 54, 56, 60, 77, 55, 60, 51, 60, 60, 69, 122, 65, 71, 62, 54, 59, 61, 49, 55, 95, 52, 59, 54, 64, 97, 70, 61, 51, 85, 56, 45, 73, 56, 53, 57, 90, 83, 112, 56, 64, 63, 55, 49, 95, 75, 48, 86, 50, 67, 55, 74, 59, 73, 87, 62, 61, 57, 64, 82, 57, 63, 70, 67, 61, 50, 105, 40, 61, 84, 62, 67, 65, 68, 74, 67, 65, 56, 83, 74, 38, 68, 65, 56, 101, 63, 46, 68, 49, 65, 73, 51, 50, 43, 62, 63, 67, 61, 58, 56, 58, 66, 51, 46, 49, 56, 83, 55, 57, 81, 63, 116, 70, 73, 55, 70, 61, 54, 90, 68, 50, 66, 88, 82, 60, 80, 59, 65, 69, 51, 50, 46, 53, 47, 75, 70, 59, 64, 72, 59, 67, 73, 68, 56, 68, 72, 46, 48, 63, 84, 54, 50, 56, 115, 63, 53, 68, 87, 74, 99, 58, 83, 64, 71, 65, 65, 54, 64, 67, 48, 67, 94, 53, 51, 62, 69, 61, 84, 52, 90, 79, 73, 51, 58, 68, 60, 85, 60, 65, 52, 55, 44, 60, 53, 49, 123, 58, 66, 72, 70, 64, 42, 54, 57, 57, 67, 63, 69, 72, 77, 67, 67, 62, 77, 56, 68, 93, 55, 71, 61, 61, 63, 62, 51, 68, 41, 63, 59, 55, 68, 72, 62, 47, 68, 64, 60, 79, 46, 65, 62, 66, 63, 60, 75, 94, 91, 69, 52, 56, 74, 78, 58, 61, 55, 47, 61, 48, 88, 84, 45, 61, 50, 63, 47, 64, 54, 54, 65, 68, 75, 78, 66, 51, 54, 60, 66, 67, 71, 53, 70, 37, 71, 100, 75, 67, 69, 58, 68, 44, 58, 54, 40, 61, 98, 89, 91, 64, 117, 76, 72, 57, 61, 60, 83, 61, 77, 59, 42, 64, 62, 59, 108, 45, 55, 73, 69, 48, 65, 63, 62, 52, 68, 71, 66, 67, 83, 52, 66, 51, 100, 62, 49, 108, 78, 65, 57, 54, 59, 63, 73, 73, 69, 53, 56, 47, 58, 76, 62, 50, 70, 62, 45, 83, 69, 39, 69, 79, 82, 66, 88, 81, 50, 64, 71, 67, 60, 92, 63, 68, 56, 70, 53, 64, 59, 52, 58, 61, 64, 49, 52, 66, 50, 49, 63, 57, 65, 72, 58, 55, 54, 83, 62, 62, 57, 80, 69, 42, 90, 84, 73, 52, 59, 65, 92, 76, 81, 52, 61, 67, 48, 66, 47, 66, 67, 91, 62, 58, 76, 61, 57, 64, 63, 64, 65, 57, 58, 80, 89, 77, 60, 60, 65, 54, 52, 58, 75, 66, 48, 77, 64, 62, 60, 55, 48, 49, 61, 43, 72, 60, 69, 53, 66, 68, 82, 49, 76, 64, 87, 62, 67, 58, 58, 48, 76, 72, 54, 67, 64, 94, 60, 62, 65, 71, 65, 43, 57, 87, 66, 64, 43, 57, 78, 58, 86, 49, 68, 63, 64, 67, 61, 67, 58, 65, 51, 51, 73, 70, 60, 67, 67, 59, 65, 60, 59, 55, 67, 73, 66, 54, 59, 61, 62, 93, 68, 49, 76, 60, 53, 62, 61, 69, 47, 90, 49, 135, 56, 69, 56, 64, 67, 63, 82, 66, 57, 51, 57, 60, 56, 95, 51, 53, 45, 91, 79, 74, 61, 78, 83, 72, 60, 66, 58, 66, 67, 61, 51, 125, 61, 61, 73, 32, 56, 58, 80, 70, 73, 64, 55, 82, 59, 49, 87, 60, 87, 66, 52, 67, 87, 55, 52, 41, 71, 61, 59, 58, 73, 83, 64, 66, 64, 54, 61, 78, 72, 57, 65, 95, 76, 56, 48, 77, 63, 46, 53, 47, 60, 68, 65, 73, 81, 74, 76, 66, 66, 67, 101, 63, 68, 72, 51, 76, 80, 86, 60, 57, 69, 76, 71, 83, 71, 66, 71, 54, 66, 58, 45, 64, 70, 64, 67, 60, 79, 77, 73, 73, 55, 50, 66, 56, 68, 74, 84, 65, 68, 45, 66, 71, 91, 70, 64, 59, 58, 57, 78, 55, 60, 65, 70, 50, 68, 53, 64, 59, 74, 59, 71, 59, 90, 54, 67, 45, 82, 71, 62, 58, 76, 52, 58, 75, 68, 61, 76, 70, 56, 73, 52, 87, 52, 78, 65, 104, 68, 75, 51, 61, 66, 68, 74, 67, 99, 88, 48, 81, 64, 62, 61, 63, 93, 95, 88, 68, 71, 72, 102, 84, 54, 85, 58, 51, 60, 67, 58, 56, 70, 52, 75, 60, 71, 59, 103, 76, 50, 73, 87, 75, 60, 71, 67, 66, 67, 62, 75, 42, 61, 67, 59, 103, 92, 62, 85, 69, 79, 60, 82, 55, 61, 60, 66, 150, 70, 75, 79, 72, 58, 46, 66, 47, 92, 78, 69, 79, 56, 67, 56, 64, 72, 67, 56, 81, 78, 84, 60, 65, 90, 55, 63, 65, 93, 80, 66, 68, 63, 62, 59, 58, 60, 90, 89, 58, 42, 62, 52, 55, 49, 89, 77, 37, 68, 60, 54, 56, 68, 64, 126, 56, 60, 63, 59, 65, 79, 68, 64, 66, 67, 93, 72, 54, 56, 66, 57, 73, 46, 79, 58, 88, 77, 40, 56, 92, 56, 60, 66, 67, 64, 56, 58, 56, 78, 56, 45, 116, 57, 117, 48, 67, 73, 49, 57, 73, 48, 58, 60, 42, 67, 69, 59, 59, 69, 58, 63, 66, 74, 91, 67, 64, 49, 80, 39, 39, 70, 69, 73, 61, 71, 73, 60, 86, 56, 67, 56, 62, 67, 55, 74, 64, 63, 60, 83, 78, 72, 46, 66, 65, 76, 69, 60, 55, 52, 65, 41, 51, 54, 68, 56, 62, 48, 79, 56, 82, 65, 54, 78, 45, 52, 70, 71, 58, 65, 82, 69, 62, 59, 79, 60, 52, 73, 64, 90, 74, 58, 64, 49, 91, 72, 92, 75, 60, 50, 56, 66, 55, 63, 49, 68, 72, 42, 114, 63, 64, 56, 76, 80, 80, 59, 41, 70, 60, 65, 54, 58, 80, 47, 54, 72, 70, 56, 63, 61, 57, 57, 75, 86, 64, 80, 75, 70, 65, 59, 58, 79, 63, 67, 49, 89, 91, 109, 51, 65, 81, 74, 89, 65, 57, 76, 51, 78, 69, 56, 64, 82, 58, 58, 61, 55, 54, 52, 29, 60, 91, 76, 58, 82, 72, 113, 52, 105, 62, 57, 74, 63, 68, 75, 56, 81, 59, 51, 65, 54, 66, 74, 49, 82, 72, 70, 59, 101, 80, 60, 58, 55, 70, 60, 42, 46, 99, 49, 83, 71, 59, 67, 84, 54, 71, 58, 59, 62, 59, 65, 68, 49, 67, 52, 116, 75, 74, 58, 88, 51, 66, 80, 129, 43, 45, 67, 63, 68, 60, 60, 52, 56, 68, 66, 90, 70, 73, 42, 46, 51, 68, 69, 52, 73, 49, 85, 62, 87, 53, 88, 63, 65, 75, 45, 74, 42, 55, 87, 59, 88, 66, 81, 66, 61, 52, 125, 67, 103, 64, 68, 76, 68, 62, 59, 69, 88, 60, 66, 74, 52, 77, 53, 57, 61, 45, 57, 62, 63, 73, 48, 80, 112, 43, 76, 59, 83, 50, 65, 72, 65, 66, 50, 71, 43, 71, 66, 76, 60, 59, 68, 56, 59, 65, 77, 75, 72, 62, 82, 64, 53, 80, 61, 77, 61, 93, 53, 76, 75, 61, 62, 44, 45, 67, 78, 64, 50, 71, 70, 50, 49, 58, 53, 99, 58, 55, 62, 61, 51, 43, 55, 53, 66, 70, 55, 66, 53, 102, 73, 67, 56, 60, 56, 63, 68, 70, 62, 70, 69, 44, 67, 56, 76, 84, 56, 65, 63, 81, 102, 62, 60, 51, 57, 52, 54, 61, 50, 94, 48, 70, 73, 56, 57, 52, 50, 80, 61, 85, 76, 71, 63, 64, 75, 68, 49, 82, 72, 43, 54, 51, 65, 63, 75, 46, 91, 59, 89, 56, 52, 56, 83, 63, 71, 52, 54, 77, 73, 59, 93, 45, 49, 78, 82, 49, 52, 87, 86, 77, 90, 94, 53, 52, 47, 65, 59, 57, 71, 60, 68, 69, 71, 64, 74, 49, 58, 54, 70, 75, 48, 60, 70, 67, 76, 77, 79, 55, 72, 62, 66, 61, 83, 75, 75, 75, 53, 71, 55, 69, 57, 96, 89, 82, 68, 75, 80, 69, 71, 73, 80, 79, 71, 46, 58, 67, 55, 49, 90, 52, 95, 63, 52, 73, 51, 62, 69, 78, 72, 52, 71, 63, 62, 47, 52, 77, 58, 72, 47, 70, 63, 60, 77, 79, 64, 50, 64, 58, 87, 72, 95, 72, 72, 47, 68, 66, 49, 47, 42, 53, 64, 56, 61, 66, 64, 62, 83, 62, 49, 105, 65, 95, 55, 48, 38, 92, 41, 46, 76, 87, 72, 63, 90, 65, 53, 56, 54, 60, 57, 74, 92, 56, 56, 67, 58, 65, 73, 59, 56, 43, 67, 84, 57, 67, 56, 80, 48, 54, 63, 66, 72, 59, 48, 56, 93, 40, 60, 67, 56, 38, 51, 82, 72, 57, 119, 68, 70, 61, 63, 78, 64, 43, 55, 67, 61, 55, 78, 44, 46, 83, 52, 78, 66, 75, 59, 63, 76, 56, 59, 76, 50, 48, 82, 65, 65, 61, 63, 59, 50, 51, 78, 49, 66, 58, 83, 84, 84, 115, 79, 52, 45, 97, 73, 54, 45, 46, 46, 67, 42, 72, 71, 57, 58, 67, 80, 67, 65, 56, 69, 82, 60, 54, 78, 57, 70, 67, 77, 55, 69, 69, 95, 43, 75, 109, 68, 81, 54, 63, 80, 54, 56, 62, 85, 70, 62, 79, 79, 88, 67, 53, 53, 68, 56, 50, 47, 77, 47, 68, 65, 71, 81, 70, 116, 54, 51, 70, 73, 71, 57, 62, 66, 68, 81, 53, 65, 44, 50, 51, 64, 70, 63, 54, 83, 70, 60, 69, 77, 57, 47, 53, 83, 78, 70, 56, 58, 69, 84, 56, 84, 65, 63, 71, 77, 83, 53, 91, 91, 51, 58, 106, 81, 60, 99, 74, 71, 67, 62, 61, 62, 67, 74, 50, 73, 55, 102, 52, 72, 77, 61, 58, 52, 58, 82, 91, 76, 63, 84, 80, 98, 46, 76, 52, 54, 65, 66, 57, 42, 51, 63, 54, 69, 83, 67, 88, 66, 55, 84, 66, 67, 51, 76, 58, 65, 72, 57, 56, 77, 66, 55, 74, 61, 62, 56, 60, 41, 75, 67, 74, 55, 67, 52, 62, 56, 63, 96, 71, 65, 60, 56, 55, 95, 99, 52, 75, 62, 53, 63, 73, 57, 53, 76, 72, 66, 81, 77, 78, 48, 60, 77, 43, 57, 109, 67, 57, 47, 90, 66, 54, 74, 72, 54, 67, 60, 55, 61, 88, 74, 80, 94, 56, 69, 57, 65, 66, 55, 56, 69, 46, 57, 80, 62, 58, 65, 61, 74, 48, 55, 63, 64, 95, 95, 62, 76, 66, 67, 47, 57, 49, 54, 81, 84, 58, 57, 49, 63, 89, 74, 55, 73, 65, 63, 62, 69, 61, 45, 69, 53, 96, 65, 50, 79, 57, 61, 64, 55, 57, 55, 73, 67, 45, 71, 58, 57, 54, 86, 77, 96, 58, 84, 60, 88, 70, 63, 58, 81, 45, 69, 76, 82, 60, 63, 62, 69, 60, 58, 63, 67, 106, 45, 65, 62, 53, 48, 66, 69, 51, 63, 81, 67, 46, 63, 57, 136, 54, 63, 88, 57, 61, 68, 72, 62, 64, 58, 79, 70, 54, 68, 48, 66, 77, 54, 49, 59, 88, 81, 52, 70, 76, 48, 70, 74, 47, 71, 54, 55, 62, 45, 64, 51, 52, 53, 83, 62, 79, 58, 53, 77, 59, 94, 68, 52, 87, 56, 70, 54, 47, 57, 56, 73, 66, 55, 65, 58, 59, 61, 69, 53, 51, 52, 48, 50, 56, 86, 65, 85, 82, 72, 73, 72, 56, 69, 55, 70, 67, 65, 73, 61, 71, 61, 62, 59, 66, 90, 87, 77, 70, 72, 83, 68, 90, 61, 62, 58, 57, 62, 77, 74, 65, 66, 58, 74, 48, 71, 82, 61, 48, 75, 47, 61, 64, 64, 54, 64, 54, 49, 55, 77, 98, 69, 66, 82, 71, 63, 80, 47, 81, 64, 86, 62, 77, 69, 61, 76, 86, 65, 89, 54, 86, 65, 73, 57, 60, 54, 56, 54, 61, 86, 53, 65, 58, 58, 53, 80, 50, 65, 65, 62, 60, 67, 44, 91, 93, 86, 70, 65, 57, 71, 64, 70, 56, 62, 67, 65, 50, 67, 66, 58, 70, 67, 91, 81, 51, 63, 56, 62, 68, 62, 83, 37, 59, 76, 64, 54, 57, 48, 45, 56, 66, 50, 57, 75, 60, 64, 58, 102, 58, 66, 55, 56, 59, 69, 68, 57, 51, 50, 70, 74, 72, 63, 49, 62, 55, 82, 64, 62, 85, 61, 61, 89, 91, 60, 50, 58, 53, 72, 71, 63, 72, 72, 55, 79, 71, 54, 72, 59, 51, 80, 62, 51, 66, 61, 107, 74, 65, 47, 77, 69, 52, 67, 104, 45, 99, 64, 48, 73, 59, 75, 60, 69, 58, 66, 60, 66, 60, 64, 54, 52, 64, 53, 71, 53, 54, 60, 58, 59, 63, 41, 70, 62, 65, 51, 68, 66, 53, 54, 61, 50, 57, 58, 46, 81, 60, 101, 67, 129, 61, 63, 58, 69, 67, 60, 63, 116, 57, 66, 83, 67, 64, 61, 63, 39, 64, 58, 61, 56, 76, 47, 66, 55, 57, 69, 76, 89, 57, 67, 63, 81, 51, 71, 72, 41, 67, 70, 65, 77, 64, 42, 60, 68, 93, 77, 59, 66, 56, 76, 56, 62, 64, 74, 61, 52, 52, 50, 80, 100, 55, 64, 50, 66, 83, 67, 101, 61, 75, 59, 65, 53, 79, 64, 108, 74, 41, 58, 48, 66, 70, 65, 54, 62, 62, 67, 67, 78, 62, 87, 55, 69, 56, 91, 68, 69, 76, 64, 65, 91, 66, 41, 60, 59, 95, 62, 74, 63, 67, 73, 48, 61, 68, 68, 85, 70, 63, 77, 86, 48, 58, 59, 63, 65, 63, 52, 46, 76, 60, 100, 75, 59, 63, 80, 55, 71, 47, 65, 78, 61, 74, 61, 48, 39, 95, 83, 69, 59, 51, 66, 58, 60, 59, 39, 52, 78, 87, 82, 51, 53, 63, 79, 59, 58, 57, 63, 59, 66, 58, 81, 75, 49, 64, 63, 62, 59, 68, 54, 56, 55, 107, 57, 59, 84, 62, 54, 65, 67, 51, 60, 60, 66, 42, 50, 65, 70, 47, 48, 61, 88, 65, 66, 58, 61, 54, 48, 101, 66, 77, 71, 83, 49, 88, 43, 47, 63, 59, 77, 79, 114, 62, 63, 67, 56, 68, 71, 60, 83, 64, 65, 57, 66, 49, 56, 78, 69, 75, 76, 91, 60, 57, 60, 58, 52, 74, 65, 81, 80, 61, 51, 64, 104, 70, 64, 85, 62, 61, 56, 76, 49, 57, 66, 69, 56, 62, 80, 49, 48, 57, 60, 69, 51, 67, 64, 105, 71, 60, 63, 79, 57, 88, 60, 57, 62, 76, 38, 56, 67, 60, 85, 62, 97, 94, 58, 82, 47, 65, 53, 81, 107, 63, 49, 68, 69, 56, 50, 57, 56, 84, 73, 53, 64, 47, 58, 69, 68, 53, 69, 73, 62, 57, 65, 77, 67, 60, 86, 98, 61, 59, 56, 75, 60, 68, 58, 57, 72, 57, 53, 48, 71, 61, 64, 67, 50, 57, 91, 60, 63, 52, 62, 77, 61, 61, 50, 76, 65, 61, 67, 65, 61, 61, 59, 60, 58, 57, 52, 59, 64, 82, 65, 61, 69, 95, 54, 67, 58, 43, 56, 61, 52, 67, 61, 52, 61, 56, 68, 67, 55, 88, 70, 74, 62, 61, 61, 61, 56, 66, 74, 79, 65, 46, 64, 62, 76, 102, 82, 55, 77, 60, 55, 75, 72, 88, 100, 88, 81, 68, 79, 71, 69, 67, 88, 61, 70, 49, 65, 52, 86, 55, 55, 79, 56, 59, 66, 79, 61, 65, 45, 75, 61, 63, 68, 59, 66, 55, 75, 94, 56, 51, 56, 79, 79, 51, 57, 76, 63, 48, 54, 75, 82, 54, 81, 52, 62, 55, 94, 48, 51, 73, 83, 45, 61, 64, 55, 87, 63, 60, 59, 62, 65, 66, 69, 53, 68, 70, 53, 58, 65, 52, 72, 58, 73, 71, 67, 65, 80, 62, 60, 59, 61, 59, 83, 56, 73, 60, 54, 70, 62, 53, 80, 58, 62, 67, 88, 74, 69, 63, 52, 62, 72, 74, 84, 97, 70, 91, 53, 70, 55, 55, 83, 69, 64, 101, 65, 62, 80, 63, 87, 64, 59, 57, 81, 57, 76, 58, 67, 93, 41, 63, 58, 54, 68, 85, 68, 74, 61, 59, 91, 69, 57, 56, 59, 60, 91, 64, 64, 62, 57, 65, 70, 53, 54, 63, 66, 44, 62, 70, 69, 65, 44, 56, 45, 73, 63, 65, 89, 60, 69, 84, 59, 114, 63, 78, 61, 68, 66, 64, 80, 66, 61, 88, 71, 64, 59, 44, 42, 60, 53, 67, 52, 64, 54, 89, 59, 72, 73, 61, 69, 53, 81, 61, 60, 55, 97, 61, 69, 61, 54, 86, 72, 69, 56, 63, 58, 55, 58, 80, 57, 76, 61, 77, 69, 62, 70, 56, 58, 56, 35, 66, 58, 46, 74, 61, 97, 55, 59, 99, 62, 68, 78, 80, 53, 85, 67, 55, 70, 50, 103, 78, 66, 57, 54, 68, 84, 49, 62, 59, 66, 54, 77, 75, 52, 66, 60, 72, 91, 91, 59, 58, 73, 62, 55, 85, 66, 51, 72, 71, 61, 62, 53, 51, 72, 59, 62, 61, 66, 72, 50, 50, 53, 80, 139, 70, 60, 59, 61, 63, 66, 60, 56, 94, 65, 62, 61, 65, 76, 51, 78, 83, 56, 55, 60, 68, 64, 50, 83, 90, 68, 58, 73, 71, 77, 53, 69, 54, 63, 45, 68, 86, 80, 49, 61, 58, 79, 70, 83, 56, 69, 52, 95, 72, 54, 67, 58, 48, 63, 70, 68, 62, 72, 82, 64, 67, 75, 67, 54, 81, 64, 59, 62, 77, 49, 60, 69, 64, 62, 60, 52, 69, 57, 57, 66, 60, 71, 52, 101, 98, 64, 48, 71, 79, 99, 65, 65, 62, 58, 60, 69, 113, 51, 86, 60, 81, 40, 72, 65, 68, 58, 55, 79, 57, 65, 48, 65, 65, 93, 84, 62, 72, 51, 65, 96, 63, 62, 52, 72, 77, 60, 59, 74, 60, 65, 70, 58, 59, 62, 78, 51, 69, 73, 62, 61, 72, 52, 89, 61, 71, 63, 60, 69, 66, 65, 53, 68, 70, 69, 65, 65, 65, 62, 62, 82, 65, 52, 54, 66, 60, 65, 72, 63, 69, 96, 81, 65, 56, 61, 63, 74, 67, 62, 57, 57, 53, 87, 59, 59, 50, 79, 44, 68, 70, 57, 74, 38, 70, 73, 61, 78, 44, 54, 60, 67, 69, 59, 61, 50, 44, 54, 72, 69, 71, 57, 65, 79, 48, 52, 59, 62, 57, 74, 75, 62, 59, 53, 74, 62, 60, 55, 60, 51, 60, 79, 74, 59, 53, 52, 66, 48, 57, 54, 76, 97, 100, 53, 84, 53, 75, 67, 55, 81, 81, 56, 80, 66, 56, 70, 54, 80, 61, 58, 85, 71, 51, 60, 67, 59, 61, 68, 111, 63, 49, 48, 57, 70, 64, 56, 70, 54, 54, 66, 47, 67, 50, 47, 67, 52, 109, 97, 53, 63, 63, 83, 69, 71, 87, 76, 66, 68, 67, 65, 84, 128, 62, 83, 56, 54, 65, 64, 31, 55, 67, 67, 60, 106, 50, 66, 56, 55, 73, 65, 67, 117, 66, 66, 77, 67, 73, 65, 50, 60, 93, 60, 52, 81, 69, 65, 45, 72, 52, 60, 91, 79, 66, 61, 60, 58, 89, 76, 72, 83, 55, 48, 77, 52, 73, 59, 54, 56, 91, 77, 58, 62, 50, 76, 53, 60, 60, 65, 67, 63, 107, 70, 49, 56, 64, 60, 97, 48, 65, 58, 80, 81, 57, 41, 59, 63, 60, 66, 62, 102, 69, 55, 68, 55, 63, 48, 65, 55, 70, 58, 60, 65, 65, 59, 72, 55, 63, 58, 78, 67, 67, 61, 57, 56, 54, 86, 44, 46, 98, 63, 71, 56, 50, 51, 57, 65, 55, 57, 66, 61, 77, 73, 52, 56, 67, 55, 53, 51, 74, 78, 72, 69, 76, 55, 92, 53, 49, 114, 59, 55, 64, 69, 52, 59, 81, 66, 54, 63, 74, 107, 85, 61, 78, 71, 68, 90, 72, 70, 55, 67, 58, 63, 76, 54, 58, 72, 92, 47, 72, 49, 45, 55, 70, 62, 75, 75, 50, 91, 59, 71, 64, 68, 68, 70, 69, 65, 67, 67, 61, 68, 56, 69, 107, 54, 52, 69, 59, 69, 81, 72, 87, 54, 70, 55, 63, 66, 48, 94, 55, 66, 70, 57, 50, 58, 83, 52, 73, 71, 44, 80, 53, 66, 63, 48, 95, 72, 69, 82, 52, 79, 55, 56, 74, 55, 51, 74, 59, 65, 81, 76, 60, 65, 51, 54, 59, 54, 67, 67, 60, 64, 55, 62, 83, 73, 65, 73, 61, 71, 101, 64, 64, 58, 65, 42, 64, 69, 69, 51, 62, 69, 58, 72, 63, 61, 54, 51, 66, 56, 99, 66, 53, 64, 100, 57, 56, 84, 65, 53, 88, 84, 56, 79, 52, 60, 52, 50, 54, 62, 63, 56, 57, 60, 76, 93, 68, 62, 72, 66, 67, 75, 50, 55, 70, 57, 55, 71, 51, 51, 62, 68, 81, 62, 75, 70, 65, 62, 78, 69, 76, 84, 62, 59, 64, 59, 80, 56, 64, 53, 61, 81, 67, 68, 47, 71, 51, 68, 55, 70, 61, 58, 65, 58, 71, 65, 65, 62, 77, 72, 71, 53, 60, 54, 61, 70, 67, 63, 66, 69, 65, 60, 62, 56, 59, 68, 60, 53, 50, 53, 64, 50, 80, 61, 66, 47, 78, 60, 84, 62, 54, 65, 62, 62, 73, 71, 61, 76, 56, 69, 59, 59, 70, 64, 75, 54, 81, 82, 68, 96, 66, 77, 82, 50, 62, 59, 69, 105, 72, 94, 66, 61, 84, 61, 72, 75, 91, 71, 55, 49, 62, 49, 67, 71, 76, 66, 73, 70, 56, 48, 62, 52, 75, 61, 61, 65, 61, 57, 53, 55, 91, 72, 81, 54, 67, 62, 74, 67, 59, 64, 107, 54, 56, 60, 63, 64, 59, 68, 60, 77, 47, 115, 56, 51, 89, 88, 86, 77, 52, 66, 49, 64, 72, 46, 50, 97, 67, 45, 83, 56, 64, 64, 59, 65, 68, 69, 66, 60, 69, 66, 61, 66, 61, 74, 62, 71, 64, 50, 67, 74, 85, 56, 52, 45, 62, 73, 56, 50, 70, 63, 84, 54, 71, 60, 114, 69, 66, 94, 117, 55, 76, 70, 63, 61, 64, 78, 68, 64, 79, 63, 61, 84, 64, 58, 48, 52, 72, 66, 83, 87, 56, 71, 71, 51, 75, 63, 65, 51, 57, 57, 74, 57, 76, 86, 54, 60, 66, 77, 49, 67, 82, 74, 79, 51, 50, 61, 57, 62, 58, 43, 77, 74, 64, 66, 65, 68, 79, 60, 58, 50, 69, 70, 53, 56, 67, 62, 71, 50, 62, 44, 82, 64, 68, 75, 62, 69, 63, 48, 77, 59, 45, 73, 76, 54, 72, 69, 95, 52, 104, 72, 47, 65, 56, 79, 51, 60, 61, 63, 60, 85, 55, 109, 50, 60, 90, 60, 69, 50, 66, 65, 53, 51, 79, 86, 54, 49, 71, 77, 68, 63, 51, 84, 57, 50, 59, 70, 71, 87, 60, 57, 66, 52, 82, 69, 79, 89, 67, 52, 62, 58, 60, 104, 72, 81, 84, 58, 51, 62, 65, 94, 60, 60, 64, 76, 67, 56, 103, 56, 64, 62, 80, 76, 52, 54, 52, 62, 64, 52, 58, 68, 52, 72, 66, 62, 65, 61, 59, 81, 63, 63, 61, 48, 69, 62, 67, 80, 74, 160, 57, 58, 85, 79, 63, 66, 62, 56, 76, 58, 83, 53, 54, 54, 68, 75, 52, 60, 60, 62, 66, 69, 57, 86, 73, 56, 55, 64, 56, 61, 51, 64, 47, 47, 61, 56, 64, 53, 53, 99, 66, 65, 89, 76, 57, 63, 55, 87, 66, 74, 67, 60, 77, 86, 59, 76, 63, 62, 66, 71, 70, 60, 78, 71, 48, 64, 68, 64, 75, 45, 77, 58, 72, 69, 66, 70, 62, 61, 83, 60, 53, 55, 65, 77, 71, 69, 79, 76, 73, 78, 62, 71, 56, 62, 45, 81, 119, 66, 67, 51, 82, 78, 82, 65, 46, 80, 56, 79, 63, 78, 74, 47, 60, 91, 115, 55, 51, 51, 58, 82, 61, 65, 47, 74, 67, 65, 63, 70, 86, 66, 70, 62, 66, 79, 52, 53, 70, 98, 59, 54, 75, 51, 82, 44, 100, 70, 66, 62, 52, 43, 81, 74, 59, 68, 91, 64, 55, 63, 69, 58, 52, 58, 48, 58, 60, 63, 79, 55, 58, 129, 69, 62, 62, 55, 70, 89, 46, 71, 66, 63, 55, 64, 57, 62, 63, 73, 61, 62, 71, 63, 64, 74, 65, 55, 66, 60, 62, 67, 69, 70, 64, 74, 75, 58, 62, 63, 59, 53, 69, 55, 59, 50, 59, 55, 65, 62, 63, 66, 87, 63, 66, 83, 53, 57, 76, 57, 62, 68, 78, 79, 56, 65, 55, 55, 65, 71, 48, 56, 56, 97, 59, 60, 82, 61, 52, 76, 78, 106, 64, 53, 61, 52, 62, 63, 57, 53, 58, 64, 62, 74, 44, 49, 62, 62, 70, 63, 122, 67, 56, 62, 66, 74, 59, 60, 66, 70, 67, 98, 64, 54, 65, 61, 81, 62, 62, 62, 66, 56, 71, 69, 62, 55, 68, 70, 55, 80, 58, 66, 80, 99, 65, 39, 63, 65, 57, 57, 76, 58, 53, 65, 75, 97, 72, 57, 69, 59, 95, 61, 65, 63, 68, 52, 71, 70, 68, 60, 60, 50, 54, 61, 57, 62, 76, 53, 50, 73, 81, 78, 102, 65, 87, 74, 78, 68, 87, 59, 56, 78, 76, 71, 78, 51, 58, 49, 67, 60, 117, 64, 52, 78, 66, 89, 52, 58, 51, 68, 55, 56, 58, 70, 51, 62, 70, 82, 93, 73, 57, 53, 69, 62, 61, 64, 61, 52, 71, 54, 62, 63, 71, 55, 83, 58, 60, 94, 69, 56, 80, 66, 65, 56, 86, 63, 60, 59, 58, 74, 71, 56, 69, 86, 67, 77, 44, 60, 73, 57, 88, 58, 64, 58, 73, 63, 61, 68, 49, 52, 73, 52, 85, 76, 62, 72, 74, 55, 66, 62, 70, 69, 80, 94, 85, 57, 60, 51, 68, 125, 55, 83, 55, 74, 48, 62, 69, 64, 63, 52, 69, 82, 72, 66, 65, 55, 57, 64, 87, 81, 66, 53, 55, 49, 68, 62, 59, 58, 67, 57, 59, 58, 53, 68, 61, 80, 53, 65, 63, 103, 61, 63, 77, 68, 82, 94, 57, 70, 67, 64, 58, 70, 53, 60, 94, 63, 63, 81, 55, 56, 72, 72, 80, 61, 54, 54, 91, 64, 58, 69, 49, 50, 65, 61, 52, 68, 85, 71, 92, 85, 82, 76, 80, 49, 65, 67, 69, 58, 58, 67, 62, 62, 62, 70, 72, 57, 50, 55, 62, 72, 66, 68, 52, 61, 53, 76, 66, 50, 79, 70, 81, 71, 69, 69, 74, 85, 69, 58, 56, 54, 90, 55, 68, 65, 55, 55, 55, 66, 54, 52, 67, 77, 47, 75, 63, 59, 75, 60, 67, 55, 55, 51, 62, 67, 84, 53, 89, 63, 59, 57, 78, 65, 75, 65, 65, 73, 60, 44, 46, 62, 60, 74, 69, 68, 71, 88, 61, 59, 36, 64, 64, 53, 41, 69, 65, 61, 41, 58, 80, 52, 65, 54, 60, 71, 53, 65, 63, 69, 50, 56, 66, 75, 93, 49, 64, 71, 69, 62, 58, 56, 61, 64, 55, 68, 71, 54, 66, 68, 73, 97, 64, 62, 48, 59, 54, 45, 54, 57, 64, 72, 68, 63, 64, 56, 56, 59, 68, 58, 57, 73, 68, 65, 51, 54, 79, 79, 68, 44, 58, 52, 45, 45, 78, 57, 67, 64, 89, 51, 55, 75, 49, 65, 56, 65, 59, 66, 65, 68, 47, 48, 70, 59, 44, 63, 82, 99, 51, 94, 48, 58, 71, 67, 75, 78, 59, 48, 89, 59, 103, 72, 57, 52, 59, 62, 51, 87, 58, 53, 63, 58, 79, 81, 97, 60, 50, 69, 54, 50, 66, 70, 48, 45, 55, 66, 58, 51, 53, 63, 58, 62, 123, 81, 88, 64, 51, 52, 68, 74, 52, 55, 62, 54, 51, 59, 60, 65, 88, 69, 77, 57, 68, 85, 63, 66, 82, 47, 58, 68, 53, 56, 59, 64, 108, 75, 56, 76, 73, 93, 69, 67, 74, 70, 70, 85, 66, 55, 58, 68, 70, 79, 56, 66, 56, 63, 64, 69, 71, 62, 58, 95, 64, 49, 92, 53, 68, 57, 58, 56, 70, 67, 63, 66, 62, 65, 51, 52, 52, 66, 62, 61, 46, 51, 44, 46, 60, 51, 97, 57, 72, 42, 46, 74, 64, 68, 53, 64, 62, 74, 59, 62, 69, 51, 80, 42, 58, 67, 63, 122, 51, 61, 90, 51, 64, 51, 56, 60, 60, 75, 93, 95, 60, 91, 73, 74, 60, 55, 93, 62, 54, 78, 66, 71, 51, 75, 92, 74, 56, 60, 75, 71, 87, 53, 82, 49, 57, 101, 80, 59, 53, 76, 69, 71, 60, 109, 76, 55, 66, 51, 56, 61, 94, 69, 65, 86, 62, 72, 59, 90, 64, 67, 67, 66, 61, 71, 62, 72, 61, 59, 54, 102, 76, 63, 59, 76, 79, 54, 45, 77, 48, 75, 72, 55, 69, 46, 66, 66, 86, 85, 58, 59, 74, 64, 71, 61, 61, 66, 63, 48, 72, 53, 50, 59, 112, 71, 63, 60, 67, 67, 47, 53, 90, 36, 79, 62, 92, 52, 74, 59, 57, 65, 72, 74, 59, 71, 68, 89, 76, 58, 56, 46, 54, 75, 60, 55, 70, 57, 78, 82, 56, 56, 56, 79, 70, 84, 52, 78, 64, 60, 79, 66, 62, 73, 88, 76, 36, 56, 81, 87, 50, 71, 55, 68, 55, 51, 70, 65, 77, 44, 99, 62, 53, 72, 69, 66, 68, 71, 58, 65, 45, 60, 79, 65, 54, 64, 60, 75, 52, 62, 72, 106, 48, 55, 52, 73, 56, 59, 52, 64, 57, 62, 42, 57, 81, 78, 83, 91, 81, 63, 67, 57, 62, 47, 47, 65, 52, 55, 56, 52, 79, 69, 90, 71, 59, 57, 60, 75, 49, 70, 73, 56, 86, 76, 77, 68, 53, 82, 52, 60, 63, 70, 82, 75, 75, 64, 66, 62, 74, 74, 67, 63, 63, 64, 54, 52, 70, 77, 55, 57, 53, 68, 81, 57, 57, 57, 49, 54, 68, 88, 82, 49, 61, 68, 96, 50, 70, 79, 97, 76, 71, 53, 65, 71, 67, 69, 54, 55, 86, 58, 69, 86, 76, 77, 52, 64, 54, 81, 70, 77, 110, 53, 105, 73, 60, 61, 77, 86, 55, 93, 55, 61, 79, 48, 74, 53, 50, 89, 71, 95, 74, 59, 58, 57, 72, 67, 58, 46, 91, 96, 73, 80, 63, 59, 64, 67, 64, 93, 53, 55, 61, 72, 55, 81, 50, 82, 79, 68, 52, 76, 67, 81, 60, 61, 78, 82, 65, 55, 71, 83, 59, 64, 58, 63, 74, 60, 57, 63, 63, 82, 52, 87, 60, 62, 69, 55, 65, 76, 58, 70, 73, 63, 73, 59, 71, 54, 67, 60, 62, 96, 65, 61, 107, 48, 65, 42, 72, 77, 62, 48, 58, 54, 59, 86, 49, 91, 52, 78, 56, 83, 62, 55, 58, 75, 75, 58, 55, 69, 66, 60, 71, 60, 70, 46, 68, 45, 133, 71, 51, 61, 50, 71, 58, 51, 63, 68, 53, 79, 80, 79, 95, 55, 64, 58, 81, 54, 49, 53, 66, 79, 58, 72, 69, 61, 77, 69, 61, 72, 90, 70, 65, 60, 53, 65, 68, 58, 57, 80, 51, 86, 63, 78, 58, 58, 58, 80, 62, 60, 58, 66, 55, 82, 58, 62, 78, 60, 46, 70, 62, 77, 60, 48, 77, 61, 61, 52, 67, 80, 52, 72, 65, 82, 49, 49, 59, 59, 58, 52, 53, 71, 60, 60, 63, 78, 57, 42, 54, 65, 51, 50, 59, 103, 53, 58, 49, 66, 55, 81, 59, 37, 53, 78, 78, 48, 94, 96, 52, 59, 59, 83, 50, 76, 48, 54, 60, 66, 71, 57, 63, 63, 73, 64, 68, 80, 72, 89, 78, 47, 73, 57, 75, 53, 59, 52, 70, 79, 53, 65, 56, 68, 64, 51, 65, 62, 63, 73, 111, 63, 76, 65, 82, 70, 82, 67, 51, 54, 57, 66, 82, 58, 78, 60, 75, 57, 64, 67, 59, 60, 65, 74, 72, 53, 61, 61, 96, 63, 85, 84, 90, 60, 68, 55, 73, 43, 83, 59, 72, 119, 75, 50, 73, 61, 71, 62, 63, 73, 48, 53, 68, 65, 72, 97, 71, 68, 56, 58, 74, 87, 70, 53, 52, 56, 90, 83, 58, 44, 58, 72, 66, 68, 49, 60, 57, 61, 81, 85, 63, 77, 69, 56, 50, 61, 69, 51, 55, 66, 54, 47, 80, 56, 70, 85, 54, 69, 49, 60, 88, 58, 55, 52, 52, 70, 71, 59, 81, 57, 55, 61, 64, 103, 65, 77, 49, 90, 65, 56, 50, 50, 57, 95, 80, 67, 56, 51, 56, 60, 78, 78, 77, 69, 68, 66, 59, 57, 63, 64, 51, 44, 56, 61, 46, 60, 69, 44, 56, 55, 67, 60, 64, 52, 71, 52, 66, 62, 48, 63, 58, 51, 53, 55, 90, 63, 51, 67, 53, 77, 58, 72, 58, 70, 61, 75, 56, 59, 56, 76, 78, 67, 69, 62, 52, 63, 75, 90, 106, 72, 57, 64, 60, 57, 62, 88, 61, 67, 59, 88, 49, 56, 58, 63, 66, 53, 53, 65, 54, 58, 60, 65, 53, 60, 61, 65, 55, 54, 59, 71, 79, 54, 67, 57, 74, 54, 66, 68, 54, 70, 64, 58, 74, 69, 58, 63, 57, 66, 65, 54, 54, 98, 52, 57, 50, 64, 80, 57, 70, 79, 72, 65, 46, 55, 63, 57, 53, 48, 56, 70, 55, 63, 53, 54, 87, 74, 66, 56, 69, 70, 73, 104, 89, 69, 70, 63, 67, 74, 60, 68, 56, 85, 52, 67, 54, 64, 50, 42, 71, 114, 74, 62, 59, 59, 63, 51, 65, 93, 57, 56, 71, 56, 59, 55, 65, 64, 58, 52, 47, 76, 53, 73, 64, 80, 61, 61, 62, 60, 58, 43, 63, 76, 76, 62, 69, 70, 75, 60, 64, 85, 108, 57, 55, 58, 56, 60, 66, 52, 58, 71, 64, 83, 82, 66, 72, 55, 59, 66, 62, 59, 81, 75, 55, 55, 63, 71, 65, 60, 47, 67, 61, 72, 67, 53, 52, 55, 78, 55, 56, 75, 62, 80, 85, 79, 58, 75, 72, 69, 67, 55, 48, 79, 60, 114, 47, 69, 61, 69, 67, 62, 63, 64, 60, 89, 85, 85, 78, 58, 68, 68, 55, 61, 63, 74, 84, 61, 65, 92, 58, 49, 71, 66, 59, 81, 88, 59, 50, 60, 62, 60, 59, 62, 84, 93, 68, 53, 70, 59, 79, 53, 69, 70, 55, 57, 73, 87, 50, 67, 68, 54, 65, 74, 75, 106, 65, 65, 55, 56, 58, 69, 57, 64, 56, 63, 61, 55, 59, 67, 79, 109, 50, 54, 64, 64, 53, 72, 67, 46, 55, 61, 62, 58, 57, 73, 50, 68, 60, 58, 62, 71, 79, 86, 62, 85, 59, 67, 90, 65, 49, 47, 60, 59, 58, 80, 63, 60, 60, 58, 69, 61, 52, 74, 79, 75, 85, 80, 55, 75, 61, 62, 73, 57, 62, 109, 47, 58, 53, 62, 70, 46, 93, 78, 61, 59, 85, 50, 51, 71, 58, 75, 71, 58, 63, 51, 74, 66, 58, 82, 58, 71, 91, 64, 55, 84, 55, 56, 59, 58, 77, 71, 68, 84, 104, 67, 45, 87, 72, 70, 67, 64, 86, 49, 56, 63, 57, 64, 58, 70, 57, 51, 56, 88, 64, 63, 53, 57, 64, 65, 60, 63, 57, 77, 62, 70, 71, 79, 73, 60, 62, 67, 66, 62, 70, 39, 77, 83, 76, 50, 65, 94, 56, 70, 57, 55, 61, 67, 64, 134, 65, 57, 64, 52, 57, 65, 97, 89, 72, 62, 91, 47, 61, 63, 55, 49, 71, 42, 57, 54, 69, 64, 68, 65, 51, 79, 90, 44, 79, 71, 60, 58, 67, 95, 47, 71, 78, 65, 66, 70, 59, 55, 62, 64, 71, 96, 51, 75, 54, 71, 68, 67, 68, 53, 63, 61, 79, 58, 63, 57, 67, 66, 58, 77, 53, 69, 62, 74, 57, 83, 100, 56, 56, 67, 69, 52, 69, 77, 67, 59, 62, 52, 67, 58, 62, 75, 72, 52, 59, 88, 57, 57, 67, 72, 75, 53, 66, 73, 53, 51, 59, 66, 51, 58, 55, 86, 86, 83, 98, 55, 53, 67, 61, 57, 101, 66, 63, 66, 82, 65, 71, 71, 55, 56, 78, 56, 61, 53, 75, 61, 86, 53, 50, 58, 84, 62, 78, 59, 78, 87, 61, 73, 88, 70, 72, 92, 63, 57, 52, 67, 69, 54, 70, 68, 101, 81, 53, 71, 65, 57, 64, 59, 73, 59, 64, 63, 65, 67, 101, 105, 82, 61, 63, 63, 44, 69, 56, 73, 66, 42, 58, 57, 71, 68, 55, 56, 58, 70, 50, 68, 73, 60, 60, 70, 68, 60, 65, 58, 82, 59, 60, 92, 53, 56, 67, 61, 64, 78, 62, 78, 62, 64, 59, 71, 68, 48, 78, 46, 56, 63, 76, 59, 58, 62, 61, 65, 57, 54, 67, 45, 60, 60, 58, 63, 58, 43, 92, 45, 65, 63, 55, 66, 67, 81, 70, 72, 62, 72, 75, 72, 111, 84, 74, 55, 83, 53, 59, 78, 72, 67, 63, 57, 57, 55, 49, 63, 120, 79, 59, 48, 66, 67, 57, 62, 52, 62, 65, 69, 56, 60, 61, 59, 75, 63, 53, 55, 79, 54, 65, 85, 69, 67, 87, 55, 55, 65, 60, 54, 74, 62, 66, 55, 64, 60, 58, 59, 69, 56, 56, 52, 58, 110, 56, 58, 73, 61, 71, 58, 51, 57, 60, 68, 64, 68, 68, 64, 53, 73, 74, 68, 65, 62, 86, 76, 80, 60, 65, 65, 61, 56, 70, 87, 60, 59, 72, 62, 69, 67, 68, 66, 59, 86, 57, 87, 62, 61, 49, 79, 77, 84, 56, 62, 50, 49, 59, 61, 62, 84, 44, 77, 82, 65, 72, 69, 76, 62, 72, 58, 68, 76, 75, 69, 60, 63, 67, 70, 45, 88, 71, 77, 60, 69, 60, 59, 63, 74, 88, 76, 70, 69, 72, 53, 66, 52, 59, 75, 71, 74, 51, 62, 65, 56, 75, 103, 57, 57, 71, 62, 76, 66, 64, 80, 92, 60, 75, 68, 59, 58, 79, 64, 67, 61, 61, 60, 56, 54, 59, 53, 73, 63, 88, 55, 57, 71, 74, 99, 57, 64, 47, 56, 66, 69, 69, 53, 65, 83, 70, 74, 59, 67, 103, 76, 69, 65, 76, 77, 53, 54, 51, 65, 83, 55, 67, 86, 71, 77, 64, 67, 67, 63, 50, 59, 85, 46, 61, 67, 72, 77, 43, 57, 97, 59, 73, 60, 93, 78, 57, 62, 66, 88, 65, 67, 57, 54, 61, 49, 54, 63, 56, 54, 77, 62, 66, 72, 85, 52, 65, 67, 63, 66, 63, 70, 61, 50, 52, 51, 57, 64, 71, 95, 47, 61, 55, 72, 47, 59, 57, 62, 66, 42, 64, 66, 59, 59, 76, 61, 72, 68, 57, 62, 72, 56, 64, 59, 69, 67, 57, 88, 52, 90, 85, 79, 60, 73, 68, 61, 60, 72, 81, 66, 79, 68, 68, 47, 61, 72, 62, 54, 49, 62, 65, 55, 63, 56, 79, 80, 61, 75, 64, 59, 70, 68, 76, 59, 60, 68, 57, 74, 82, 67, 66, 63, 56, 86, 68, 59, 59, 54, 64, 53, 62, 76, 57, 54, 63, 73, 58, 68, 69, 61, 60, 48, 47, 64, 51, 60, 67, 93, 67, 59, 67, 57, 67, 59, 69, 58, 56, 103, 94, 100, 62, 66, 51, 66, 58, 60, 70, 59, 71, 62, 60, 66, 50, 65, 61, 59, 64, 72, 55, 58, 60, 64, 83, 68, 53, 69, 61, 42, 69, 72, 72, 63, 58, 61, 64, 69, 49, 56, 59, 60, 76, 51, 49, 80, 54, 65, 74, 56, 55, 64, 71, 63, 73, 63, 56, 90, 72, 65, 59, 64, 53, 59, 78, 54, 55, 61, 53, 67, 86, 78, 63, 58, 75, 56, 57, 62, 101, 61, 74, 56, 61, 79, 58, 52, 50, 53, 63, 68, 66, 106, 65, 73, 63, 46, 69, 84, 67, 61, 95, 67, 87, 50, 86, 77, 59, 67, 115, 85, 63, 70, 64, 59, 63, 55, 59, 75, 65, 64, 69, 57, 44, 75, 91, 71, 55, 63, 63, 63, 48, 55, 64, 70, 63, 57, 92, 61, 61, 58, 63, 51, 50, 60, 74, 70, 58, 42, 42, 70, 78, 61, 65, 45, 63, 103, 49, 70, 58, 56, 68, 117, 49, 82, 74, 68, 50, 59, 70, 78, 65, 74, 89, 88, 93, 61, 47, 68, 57, 59, 81, 73, 95, 67, 74, 67, 51, 48, 54, 66, 61, 59, 69, 47, 64, 62, 64, 51, 78, 80, 89, 79, 67, 61, 70, 62, 90, 64, 51, 55, 69, 50, 39, 98, 67, 74, 62, 55, 48, 96, 90, 91, 65, 58, 60, 51, 53, 70, 66, 67, 61, 77, 66, 57, 57, 64, 90, 83, 68, 69, 61, 72, 56, 94, 46, 65, 53, 48, 53, 61, 69, 70, 57, 63, 58, 56, 54, 70, 68, 76, 57, 93, 71, 77, 68, 63, 52, 50, 66, 59, 71, 79, 76, 63, 83, 69, 60, 59, 62, 66, 69, 87, 75, 67, 63, 67, 80, 50, 54, 71, 65, 109, 74, 56, 80, 59, 59, 53, 58, 82, 63, 70, 56, 107, 52, 62, 63, 76, 71, 62, 75, 59, 76, 68, 54, 76, 70, 63, 73, 64, 85, 74, 55, 59, 56, 77, 77, 62, 40, 60, 106, 62, 77, 80, 56, 72, 90, 82, 78, 71, 72, 55, 78, 58, 55, 59, 58, 61, 78, 91, 51, 60, 49, 49, 72, 60, 59, 59, 52, 44, 56, 83, 60, 61, 57, 64, 61, 57, 84, 54, 62, 42, 74, 64, 53, 49, 86, 102, 57, 52, 63, 55, 54, 67, 75, 139, 70, 58, 61, 61, 58, 58, 80, 80, 48, 53, 74, 56, 58, 60, 75, 66, 67, 57, 71, 62, 55, 68, 86, 65, 71, 71, 88, 57, 78, 59, 73, 55, 87, 59, 51, 52, 69, 56, 59, 60, 58, 66, 48, 63, 64, 56, 53, 95, 80, 62, 69, 71, 60, 52, 63, 60, 80, 56, 47, 61, 84, 70, 50, 61, 60, 44, 86, 66, 57, 56, 95, 49, 76, 63, 54, 71, 59, 75, 51, 51, 53, 64, 61, 51, 88, 65, 57, 70, 76, 94, 65, 62, 59, 67, 67, 84, 70, 84, 63, 54, 92, 63, 79, 66, 75, 62, 55, 78, 90, 46, 65, 69, 59, 46, 99, 66, 61, 69, 48, 39, 57, 82, 61, 48, 66, 55, 61, 53, 55, 56, 86, 54, 82, 60, 81, 60, 79, 53, 75, 41, 60, 72, 91, 89, 67, 71, 88, 75, 57, 81, 113, 77, 52, 59, 63, 58, 67, 87, 69, 58, 66, 62, 59, 60, 63, 72, 57, 71, 91, 109, 56, 79, 61, 95, 58, 79, 62, 66, 64, 63, 54, 60, 61, 58, 50, 66, 49, 60, 62, 73, 66, 65, 63, 74, 62, 71, 66, 64, 71, 56, 53, 63, 51, 60, 56, 52, 56, 75, 63, 92, 64, 72, 61, 70, 85, 70, 54, 63, 59, 54, 69, 68, 58, 59, 59, 69, 67, 63, 65, 62, 64, 87, 64, 66, 72, 69, 75, 68, 62, 53, 65, 55, 76, 86, 68, 95, 65, 54, 72, 45, 64, 60, 58, 65, 64, 59, 59, 54, 63, 54, 89, 50, 110, 51, 54, 76, 109, 65, 46, 64, 69, 66, 63, 58, 66, 57, 63, 44, 76, 64, 70, 63, 87, 65, 51, 87, 90, 65, 57, 88, 68, 83, 63, 56, 58, 55, 89, 69, 74, 88, 62, 81, 57, 63, 72, 89, 56, 57, 78, 70, 63, 118, 80, 69, 52, 65, 54, 63, 66, 56, 57, 64, 71, 63, 73, 50, 79, 73, 39, 85, 60, 55, 89, 63, 58, 94, 60, 81, 72, 78, 52, 73, 40, 71, 69, 73, 65, 65, 60, 67, 61, 64, 62, 91, 44, 72, 75, 87, 70, 97, 63, 59, 53, 58, 61, 56, 59, 74, 91, 60, 66, 74, 63, 73, 62, 74, 64, 78, 49, 70, 99, 66, 58, 62, 48, 80, 64, 67, 73, 66, 53, 88, 62, 61, 50, 77, 64, 56, 60, 58, 65, 76, 55, 66, 64, 66, 73, 59, 71, 66, 59, 71, 72, 68, 65, 64, 83, 52, 50, 47, 82, 66, 55, 74, 68, 72, 62, 57, 63, 76, 66, 70, 59, 79, 63, 75, 55, 54, 52, 58, 51, 79, 48, 60, 43, 101, 55, 68, 70, 51, 80, 59, 70, 50, 71, 59, 91, 84, 47, 59, 98, 122, 61, 66, 69, 63, 66, 61, 60, 73, 59, 60, 73, 58, 80, 51, 58, 63, 71, 69, 53, 76, 68, 57, 60, 65, 73, 51, 60, 85, 77, 66, 78, 73, 86, 45, 55, 64, 75, 46, 59, 63, 70, 56, 55, 57, 56, 57, 78, 58, 72, 59, 78, 60, 64, 55, 53, 56, 82, 71, 66, 56, 58, 51, 63, 53, 48, 65, 66, 98, 79, 67, 52, 80, 61, 52, 62, 54, 67, 72, 58, 64, 68, 65, 51, 65, 68, 56, 63, 76, 75, 52, 58, 51, 69, 78, 66, 98, 62, 86, 81, 76, 66, 61, 65, 70, 55, 64, 57, 53, 53, 58, 58, 66, 67, 65, 58, 51, 56, 55, 58, 69, 111, 59, 71, 54, 60, 86, 56, 69, 75, 83, 89, 68, 66, 49, 63, 48, 77, 61, 53, 62, 67, 60, 58, 78, 78, 51, 58, 56, 66, 67, 62, 59, 72, 60, 61, 69, 54, 66, 67, 60, 54, 77, 50, 60, 69, 68, 80, 54, 51, 65, 49, 61, 70, 48, 59, 95, 73, 69, 60, 53, 54, 52, 60, 61, 58, 69, 70, 49, 58, 56, 59, 90, 65, 76, 70, 77, 59, 79, 80, 52, 77, 64, 50, 62, 55, 74, 55, 62, 98, 70, 56, 61, 62, 74, 59, 58, 66, 56, 69, 60, 67, 38, 60, 69, 73, 55, 51, 85, 60, 65, 49, 58, 69, 88, 55, 73, 59, 71, 62, 78, 66, 61, 56, 60, 99, 57, 88, 59, 71, 69, 64, 57, 62, 56, 72, 57, 58, 64, 64, 91, 53, 41, 53, 85, 64, 58, 58, 68, 71, 94, 63, 99, 61, 66, 62, 64, 96, 57, 66, 64, 87, 78, 62, 67, 93, 83, 62, 57, 67, 58, 53, 53, 59, 78, 68, 87, 57, 54, 40, 69, 78, 87, 60, 56, 70, 58, 63, 70, 57, 56, 61, 51, 39, 52, 61, 64, 65, 71, 53, 71, 50, 60, 54, 57, 65, 47, 68, 78, 60, 58, 60, 86, 66, 57, 56, 59, 56, 106, 79, 72, 57, 57, 64, 83, 60, 74, 61, 69, 60, 74, 79, 63, 62, 73, 68, 63, 107, 63, 68, 86, 57, 66, 62, 45, 50, 61, 64, 77, 65, 72, 75, 54, 52, 55, 79, 69, 73, 69, 52, 50, 71, 51, 61, 63, 59, 66, 52, 77, 91, 83, 64, 70, 71, 58, 63, 56, 83, 78, 63, 62, 62, 84, 51, 60, 70, 58, 56, 59, 47, 59, 65, 61, 90, 100, 54, 60, 60, 60, 56, 67, 79, 58, 56, 30, 61, 66, 71, 55, 69, 54, 52, 69, 74, 61, 86, 75, 52, 88, 49, 87, 118, 67, 65, 59, 72, 82, 72, 72, 64, 60, 77, 52, 53, 59, 62, 61, 66, 72, 90, 58, 55, 62, 77, 91, 60, 55, 59, 123, 58, 79, 58, 62, 62, 73, 58, 60, 62, 68, 77, 92, 69, 63, 72, 49, 60, 60, 75, 69, 66, 65, 50, 81, 66, 62, 69, 53, 52, 70, 65, 56, 54, 87, 59, 73, 69, 64, 64, 74, 90, 72, 70, 47, 59, 65, 58, 51, 47, 51, 80, 66, 61, 82, 52, 74, 65, 69, 48, 62, 57, 94, 81, 62, 59, 61, 52, 43, 84, 58, 77, 59, 85, 47, 70, 57, 60, 71, 60, 58, 49, 62, 70, 44, 72, 75, 62, 56, 63, 54, 66, 48, 65, 144, 66, 68, 70, 75, 65, 42, 76, 64, 77, 56, 66, 74, 66, 90, 57, 76, 79, 71, 50, 68, 63, 84, 62, 58, 67, 59, 76, 72, 76, 91, 61, 69, 63, 62, 53, 50, 61, 54, 74, 67, 56, 47, 65, 66, 60, 68, 68, 75, 64, 63, 64, 83, 62, 72, 60, 63, 61, 56, 96, 62, 52, 74, 56, 64, 56, 67, 55, 61, 77, 67, 72, 68, 68, 63, 77, 65, 61, 72, 66, 58, 58, 62, 54, 67, 89, 65, 55, 62, 78, 93, 56, 51, 73, 65, 51, 72, 82, 59, 38, 62, 72, 58, 84, 62, 63, 65, 69, 68, 79, 80, 72, 63, 55, 64, 92, 67, 60, 72, 68, 75, 57, 49, 53, 51, 89, 56, 55, 68, 78, 53, 49, 63, 77, 63, 44, 49, 68, 59, 53, 74, 62, 54, 121, 70, 76, 44, 74, 82, 47, 75, 67, 77, 52, 70, 61, 62, 69, 66, 68, 70, 51, 61, 64, 55, 68, 70, 58, 71, 93, 117, 50, 60, 69, 65, 66, 86, 48, 85, 48, 64, 65, 80, 72, 65, 57, 74, 73, 63, 75, 72, 50, 50, 52, 68, 59, 95, 59, 63, 77, 62, 57, 73, 75, 65, 66, 56, 52, 73, 56, 93, 62, 52, 64, 54, 91, 61, 60, 64, 65, 56, 92, 76, 48, 63, 69, 58, 61, 53, 65, 60, 60, 60, 92, 56, 64, 56, 76, 52, 66, 53, 75, 62, 67, 72, 57, 80, 62, 68, 69, 67, 81, 65, 71, 88, 56, 64, 65, 60, 75, 80, 60, 68, 92, 56, 53, 63, 113, 60, 57, 60, 68, 58, 64, 60, 59, 56, 57, 60, 66, 65, 60, 81, 35, 51, 83, 61, 64, 65, 45, 75, 68, 53, 74, 53, 65, 57, 65, 81, 61, 75, 55, 68, 76, 64, 63, 51, 69, 52, 76, 56, 65, 121, 54, 66, 67, 56, 76, 58, 79, 49, 58, 61, 69, 56, 55, 63, 72, 50, 85, 56, 57, 55, 65, 54, 58, 74, 57, 66, 62, 66, 66, 80, 48, 79, 84, 59, 71, 78, 68, 54, 53, 76, 60, 88, 63, 64, 80, 72, 54, 79, 61, 76, 63, 65, 52, 69, 60, 56, 77, 78, 64, 91, 67, 74, 55, 59, 75, 78, 52, 55, 69, 65, 68, 75, 57, 75, 92, 90, 62, 58, 61, 62, 59, 59, 65, 76, 71, 57, 61, 57, 65, 52, 65, 68, 58, 81, 73, 65, 68, 59, 66, 74, 67, 69, 60, 65, 80, 60, 59, 60, 64, 81, 72, 56, 66, 62, 49, 61, 62, 74, 73, 62, 66, 60, 68, 65, 58, 42, 56, 97, 75, 93, 90, 62, 74, 100, 76, 62, 64, 75, 67, 64, 58, 54, 70, 56, 104, 88, 54, 47, 106, 48, 58, 67, 74, 51, 53, 67, 63, 53, 60, 68, 50, 57, 66, 52, 40, 65, 58, 59, 53, 82, 82, 83, 55, 50, 56, 72, 76, 59, 52, 58, 60, 60, 83, 76, 68, 68, 55, 61, 59, 60, 77, 56, 58, 58, 52, 72, 64, 63, 66, 65, 86, 54, 68, 50, 76, 79, 67, 104, 60, 78, 56, 70, 57, 54, 56, 56, 63, 114, 90, 87, 58, 62, 60, 52, 63, 56, 54, 48, 82, 62, 54, 66, 67, 65, 73, 68, 69, 56, 63, 56, 64, 73, 66, 69, 62, 69, 59, 78, 63, 62, 60, 70, 57, 67, 71, 74, 65, 63, 75, 57, 56, 61, 58, 56, 62, 78, 47, 67, 54, 60, 60, 47, 71, 76, 67, 66, 61, 55, 55, 56, 82, 58, 84, 113, 75, 60, 63, 50, 71, 71, 60, 49, 99, 60, 56, 64, 74, 60, 60, 79, 57, 59, 70, 67, 58, 72, 64, 56, 64, 57, 60, 64, 62, 63, 58, 53, 71, 63, 57, 80, 61, 57, 69, 63, 52, 99, 90, 52, 60, 65, 61, 67, 65, 63, 59, 69, 58, 59, 57, 68, 62, 56, 64, 61, 66, 61, 66, 60, 105, 73, 83, 68, 57, 62, 61, 53, 63, 55, 65, 57, 66, 57, 64, 53, 58, 60, 57, 55, 70, 73, 63, 56, 60, 60, 63, 72, 52, 69, 84, 46, 67, 63, 52, 98, 55, 72, 75, 67, 59, 54, 62, 64, 65, 71, 58, 63, 82, 80, 60, 72, 57, 59, 51, 50, 79, 58, 75, 62, 65, 74, 64, 74, 61, 54, 45, 61, 73, 62, 66, 61, 76, 44, 59, 55, 62, 56, 98, 45, 68, 68, 99, 53, 57, 36, 125, 80, 57, 80, 88, 80, 81, 64, 56, 44, 54, 85, 67, 78, 62, 66, 103, 88, 59, 72, 55, 68, 58, 57, 60, 51, 56, 56, 77, 54, 57, 85, 60, 67, 105, 68, 81, 67, 69, 54, 57, 76, 57, 66, 56, 63, 62, 91, 66, 59, 59, 61, 58, 127, 76, 75, 59, 62, 58, 67, 63, 68, 69, 58, 54, 62, 79, 68, 53, 73, 59, 57, 71, 69, 88, 53, 61, 76, 51, 63, 82, 60, 84, 77, 42, 70, 60, 52, 71, 70, 72, 61, 65, 63, 72, 61, 101, 57, 84, 60, 72, 69, 63, 63, 61, 99, 63, 44, 81, 45, 50, 57, 90, 73, 64, 56, 85, 62, 59, 51, 83, 62, 59, 86, 65, 60, 65, 67, 83, 90, 75, 59, 49, 54, 50, 61, 69, 56, 53, 50, 44, 66, 62, 64, 76, 72, 76, 61, 51, 72, 89, 58, 61, 74, 62, 61, 74, 65, 57, 66, 82, 72, 53, 67, 68, 61, 49, 78, 58, 55, 65, 60, 88, 93, 65, 63, 64, 64, 64, 80, 58, 58, 57, 99, 51, 47, 52, 55, 60, 65, 61, 57, 69, 68, 75, 55, 88, 62, 50, 54, 70, 59, 62, 61, 71, 69, 72, 71, 69, 60, 60, 50, 58, 81, 52, 53, 63, 81, 61, 66, 57, 65, 96, 64, 63, 76, 48, 54, 66, 68, 71, 57, 56, 66, 74, 61, 57, 80, 54, 66, 94, 74, 59, 57, 69, 98, 78, 55, 69, 59, 58, 57, 71, 67, 58, 56, 47, 60, 67, 62, 56, 79, 91, 63, 65, 58, 46, 62, 52, 77, 54, 70, 48, 76, 72, 69, 77, 152, 61, 55, 64, 76, 65, 72, 65, 55, 62, 78, 83, 68, 59, 64, 68, 64, 58, 55, 69, 59, 56, 81, 65, 70, 61, 65, 70, 72, 74, 79, 56, 56, 76, 49, 67, 53, 53, 60, 57, 54, 51, 60, 54, 57, 70, 68, 63, 97, 78, 77, 57, 78, 59, 54, 58, 78, 61, 58, 60, 67, 61, 51, 65, 79, 62, 71, 50, 52, 61, 54, 89, 78, 71, 64, 64, 63, 61, 78, 59, 73, 65, 59, 56, 61, 82, 59, 64, 57, 72, 84, 54, 57, 61, 69, 78, 78, 61, 74, 57, 60, 84, 63, 88, 60, 69, 71, 53, 127, 59, 73, 69, 65, 57, 60, 109, 60, 59, 62, 88, 56, 66, 58, 74, 93, 66, 51, 73, 63, 75, 64, 63, 57, 73, 56, 78, 64, 54, 73, 69, 87, 67, 59, 88, 63, 65, 59, 85, 70, 54, 75, 66, 56, 55, 74, 58, 60, 63, 76, 71, 72, 60, 62, 54, 63, 67, 77, 66, 70, 67, 113, 62, 59, 55, 85, 75, 80, 56, 85, 64, 62, 74, 67, 73, 62, 84, 66, 84, 50, 50, 81, 76, 49, 57, 55, 66, 81, 55, 63, 72, 58, 58, 53, 56, 67, 67, 70, 61, 62, 53, 85, 60, 75, 58, 63, 84, 54, 58, 57, 77, 56, 90, 67, 62, 60, 70, 84, 56, 74, 61, 66, 55, 70, 70, 59, 61, 72, 72, 59, 54, 53, 61, 56, 46, 83, 74, 93, 66, 55, 67, 76, 72, 52, 56, 82, 66, 47, 63, 67, 84, 66, 42, 60, 81, 63, 56, 69, 121, 70, 72, 69, 55, 53, 50, 60, 57, 53, 55, 62, 75, 76, 59, 67, 78, 72, 65, 62, 81, 57, 66, 67, 62, 70, 66, 55, 71, 58, 54, 71, 61, 78, 66, 63, 65, 51, 68, 66, 76, 76, 57, 85, 54, 122, 52, 74, 60, 57, 59, 64, 63, 59, 60, 59, 56, 61, 68, 58, 73, 83, 72, 63, 64, 50, 52, 54, 61, 72, 84, 61, 87, 54, 78, 78, 74, 87, 66, 66, 50, 55, 60, 53, 60, 66, 63, 95, 70, 63, 63, 72, 80, 70, 58, 63, 63, 70, 64, 86, 63, 67, 49, 61, 71, 98, 44, 63, 61, 83, 67, 78, 96, 74, 88, 58, 62, 54, 59, 71, 75, 56, 57, 59, 55, 53, 62, 83, 53, 53, 69, 67, 63, 57, 64, 101, 63, 65, 62, 50, 72, 50, 61, 55, 73, 74, 63, 79, 47, 53, 79, 64, 61, 64, 62, 85, 56, 71, 74, 60, 66, 57, 52, 55, 64, 46, 64, 71, 59, 60, 59, 62, 56, 64, 55, 79, 58, 61, 80, 111, 78, 68, 76, 46, 70, 58, 92, 60, 59, 57, 76, 67, 64, 59, 111, 76, 67, 58, 64, 67, 59, 84, 42, 52, 73, 61, 56, 87, 55, 61, 66, 57, 59, 58, 50, 87, 74, 55, 59, 81, 67, 49, 66, 49, 93, 65, 69, 51, 56, 64, 64, 53, 58, 66, 55, 55, 62, 70, 59, 67, 63, 63, 70, 55, 64, 59, 71, 76, 78, 70, 60, 67, 64, 54, 69, 59, 67, 61, 63, 58, 60, 66, 60, 81, 76, 66, 53, 59, 53, 57, 56, 65, 65, 81, 87, 63, 52, 50, 71, 54, 64, 103, 64, 63, 46, 49, 64, 63, 81, 68, 70, 56, 64, 76, 80, 86, 69, 46, 65, 44, 66, 72, 63, 62, 51, 64, 68, 53, 65, 67, 57, 69, 58, 53, 47, 55, 62, 57, 58, 79, 65, 69, 73, 57, 81, 68, 67, 65, 58, 60, 64, 48, 69, 51, 59, 59, 63, 87, 51, 65, 58, 64, 74, 64, 71, 41, 58, 62, 91, 52, 57, 49, 57, 64, 81, 59, 49, 78, 60, 64, 61, 73, 60, 68, 58, 80, 63, 82, 57, 87, 64, 92, 85, 79, 59, 56, 63, 60, 71, 61, 72, 71, 62, 69, 55, 57, 55, 73, 58, 85, 61, 65, 53, 51, 83, 73, 89, 67, 49, 64, 69, 67, 64, 57, 77, 64, 53, 55, 63, 56, 54, 49, 64, 62, 76, 54, 80, 63, 99, 62, 77, 51, 51, 43, 67, 66, 71, 62, 54, 59, 59, 65, 70, 53, 59, 63, 57, 62, 62, 59, 58, 63, 69, 53, 75, 65, 45, 64, 67, 57, 96, 59, 62, 51, 71, 68, 51, 55, 49, 72, 59, 58, 53, 74, 81, 77, 56, 91, 51, 57, 65, 63, 61, 56, 66, 64, 69, 64, 58, 83, 93, 54, 48, 59, 57, 61, 54, 59, 52, 66, 86, 71, 40, 69, 95, 70, 77, 64, 63, 60, 97, 58, 51, 60, 65, 59, 65, 49, 58, 46, 48, 63, 55, 48, 56, 53, 66, 50, 57, 64, 54, 74, 68, 59, 61, 44, 49, 63, 71, 76, 38, 66, 56, 106, 77, 57, 60, 86, 102, 67, 57, 58, 50, 77, 57, 89, 52, 85, 53, 65, 58, 59, 62, 53, 57, 67, 78, 119, 55, 57, 53, 81, 59, 62, 91, 85, 90, 58, 38, 56, 58, 69, 58, 75, 81, 50, 60, 65, 74, 58, 67, 47, 55, 84, 68, 66, 58, 63, 62, 67, 48, 59, 83, 109, 59, 60, 54, 52, 62, 56, 64, 60, 66, 72, 66, 59, 61, 58, 70, 105, 56, 60, 65, 70, 63, 51, 62, 55, 78, 65, 81, 53, 70, 69, 72, 64, 63, 47, 88, 56, 75, 71, 47, 92, 53, 63, 107, 59, 60, 67, 72, 68, 63, 85, 58, 63, 65, 52, 66, 57, 75, 86, 57, 50, 97, 62, 73, 60, 60, 53, 60, 62, 62, 117, 57, 67, 61, 56, 52, 74, 85, 76, 59, 63, 73, 57, 53, 67, 56, 66, 73, 57, 49, 67, 49, 61, 52, 62, 48, 86, 88, 64, 61, 77, 64, 50, 65, 90, 70, 48, 70, 83, 97, 64, 86, 61, 61, 59, 59, 58, 67, 61, 84, 108, 86, 61, 60, 71, 57, 66, 55, 58, 64, 66, 60, 63, 96, 69, 67, 59, 63, 69, 75, 77, 66, 59, 67, 64, 75, 60, 51, 57, 59, 50, 67, 63, 79, 63, 60, 64, 64, 65, 56, 56, 64, 61, 60, 132, 57, 58, 48, 60, 83, 45, 79, 75, 96, 46, 55, 62, 60, 55, 71, 78, 59, 65, 59, 52, 107, 62, 75, 60, 68, 83, 85, 65, 54, 54, 57, 45, 53, 60, 67, 52, 94, 55, 49, 74, 56, 55, 61, 82, 79, 97, 83, 71, 78, 56, 49, 64, 47, 52, 56, 59, 66, 57, 64, 54, 61, 70, 65, 58, 53, 62, 79, 57, 67, 61, 80, 71, 63, 54, 66, 60, 68, 65, 50, 65, 44, 56, 85, 57, 103, 67, 83, 76, 59, 90, 65, 63, 76, 59, 74, 42, 74, 54, 70, 53, 53, 57, 63, 56, 61, 88, 88, 57, 95, 64, 70, 73, 67, 65, 99, 62, 65, 43, 94, 90, 68, 79, 68, 73, 60, 63, 63, 72, 53, 62, 77, 47, 47, 62, 64, 47, 71, 71, 64, 65, 59, 66, 112, 88, 54, 71, 49, 91, 55, 93, 55, 64, 65, 53, 71, 72, 83, 70, 62, 50, 87, 63, 61, 66, 90, 78, 59, 77, 86, 96, 60, 72, 68, 61, 67, 74, 52, 74, 55, 48, 59, 61, 55, 62, 53, 89, 73, 67, 74, 54, 55, 76, 55, 60, 69, 54, 57, 70, 75, 52, 57, 67, 76, 56, 87, 68, 62, 89, 60, 94, 68, 57, 53, 60, 83, 57, 59, 54, 116, 63, 71, 64, 72, 65, 63, 58, 64, 60, 95, 44, 53, 69, 55, 60, 63, 61, 64, 73, 49, 108, 57, 68, 97, 64, 67, 80, 70, 60, 78, 55, 61, 58, 62, 43, 82, 45, 54, 77, 80, 96, 68, 65, 82, 72, 67, 52, 110, 61, 68, 74, 56, 74, 68, 62, 62, 56, 56, 68, 71, 55, 67, 52, 70, 55, 59, 86, 64, 76, 59, 47, 63, 46, 49, 54, 66, 73, 68, 50, 54, 55, 63, 47, 64, 68, 68, 71, 57, 66, 67, 50, 68, 135, 60, 54, 69, 54, 83, 63, 67, 58, 51, 52, 74, 56, 80, 67, 62, 64, 62, 67, 56, 61, 81, 70, 34, 60, 47, 64, 73, 72, 75, 74, 69, 72, 71, 58, 70, 80, 77, 77, 61, 91, 60, 49, 63, 73, 89, 71, 64, 85, 49, 76, 48, 67, 64, 58, 70, 56, 79, 84, 68, 73, 80, 71, 90, 63, 87, 47, 61, 68, 68, 64, 89, 51, 60, 50, 66, 70, 64, 52, 52, 61, 53, 66, 73, 63, 71, 54, 67, 59, 69, 57, 67, 65, 66, 84, 63, 65, 50, 64, 59, 63, 85, 58, 64, 63, 84, 72, 64, 47, 67, 68, 73, 80, 58, 52, 72, 61, 57, 52, 65, 96, 52, 51, 64, 56, 57, 46, 55, 98, 74, 69, 71, 45, 65, 82, 57, 69, 65, 66, 75, 52, 80, 59, 78, 93, 99, 102, 76, 44, 81, 58, 50, 63, 69, 59, 64, 77, 59, 68, 54, 71, 68, 76, 59, 65, 67, 55, 70, 56, 53, 62, 73, 64, 65, 59, 92, 65, 79, 67, 77, 67, 61, 68, 88, 68, 68, 66, 68, 44, 68, 66, 76, 51, 62, 75, 77, 71, 63, 58, 60, 64, 74, 57, 96, 76, 46, 56, 57, 67, 51, 79, 76, 60, 49, 80, 84, 59, 51, 54, 52, 55, 80, 102, 47, 60, 56, 58, 60, 62, 93, 53, 56, 58, 50, 93, 69, 87, 61, 62, 60, 58, 73, 57, 63, 87, 75, 63, 60, 56, 59, 76, 56, 56, 47, 66, 56, 57, 80, 65, 71, 56, 54, 72, 70, 54, 78, 70, 68, 51, 53, 56, 60, 46, 58, 74, 76, 83, 63, 58, 47, 72, 69, 56, 60, 59, 81, 56, 98, 65, 59, 78, 58, 77, 57, 63, 66, 63, 57, 79, 52, 86, 62, 49, 74, 113, 52, 74, 58, 64, 53, 71, 63, 53, 57, 60, 68, 73, 76, 56, 58, 62, 68, 52, 73, 76, 63, 85, 85, 57, 54, 67, 67, 58, 60, 63, 66, 74, 74, 61, 55, 64, 71, 54, 70, 57, 58, 62, 86, 53, 55, 57, 59, 53, 73, 70, 67, 68, 57, 69, 65, 69, 55, 67, 54, 52, 57, 55, 50, 62, 72, 59, 66, 49, 49, 57, 76, 61, 68, 75, 64, 60, 64, 60, 50, 87, 51, 60, 63, 69, 91, 65, 49, 48, 59, 59, 60, 54, 92, 75, 58, 52, 63, 84, 66, 72, 83, 52, 52, 66, 59, 53, 74, 56, 136, 45, 59, 67, 69, 80, 56, 64, 53, 53, 113, 74, 57, 64, 76, 54, 71, 59, 62, 47, 57, 54, 73, 78, 67, 63, 49, 60, 65, 65, 56, 78, 46, 103, 72, 47, 111, 63, 67, 76, 51, 58, 54, 54, 61, 63, 59, 72, 60, 70, 56, 45, 61, 56, 59, 85, 58, 84, 96, 67, 62, 79, 72, 60, 49, 85, 61, 60, 61, 70, 52, 72, 55, 76, 60, 64, 85, 68, 61, 44, 51, 65, 60, 69, 77, 59, 43, 81, 66, 60, 50, 92, 54, 69, 68, 54, 63, 78, 55, 68, 70, 108, 75, 45, 74, 64, 65, 61, 69, 72, 62, 102, 61, 77, 57, 63, 54, 56, 74, 54, 68, 59, 55, 74, 72, 61, 67, 72, 64, 42, 79, 51, 79, 72, 46, 90, 73, 71, 62, 63, 72, 55, 42, 59, 58, 49, 64, 47, 67, 58, 68, 66, 66, 63, 63, 56, 68, 59, 67, 59, 76, 84, 61, 66, 65, 49, 58, 92, 66, 61, 86, 52, 40, 66, 77, 78, 76, 66, 65, 58, 68, 71, 60, 51, 62, 99, 77, 65, 40, 68, 52, 64, 82, 58, 63, 73, 77, 63, 50, 74, 83, 58, 92, 71, 62, 54, 68, 57, 51, 74, 60, 73, 56, 81, 75, 78, 61, 50, 100, 65, 54, 50, 89, 65, 65, 51, 66, 81, 57, 79, 64, 72, 81, 77, 57, 69, 76, 63, 61, 60, 73, 66, 64, 50, 127, 68, 68, 35, 75, 68, 49, 61, 62, 75, 62, 41, 62, 51, 54, 66, 62, 61, 72, 63, 66, 63, 74, 78, 79, 86, 58, 52, 63, 94, 53, 73, 79, 53, 51, 63, 61, 54, 70, 63, 60, 48, 58, 60, 55, 51, 71, 51, 73, 63, 64, 83, 58, 73, 63, 116, 62, 65, 76, 71, 66, 62, 77, 70, 37, 62, 70, 80, 65, 52, 60, 62, 64, 55, 62, 52, 84, 59, 67, 85, 47, 64, 81, 46, 62, 47, 78, 64, 49, 83, 84, 55, 101, 64, 79, 71, 52, 57, 56, 59, 76, 67, 53, 83, 79, 62, 59, 81, 53, 64, 53, 51, 65, 63, 68, 62, 64, 77, 78, 64, 72, 61, 70, 60, 80, 91, 53, 66, 61, 84, 72, 63, 51, 64, 58, 43, 63, 74, 52, 71, 73, 51, 54, 91, 65, 72, 62, 55, 66, 73, 71, 101, 33, 75, 72, 72, 81, 75, 57, 61, 108, 77, 72, 87, 66, 85, 89, 60, 61, 74, 73, 61, 67, 57, 73, 59, 54, 56, 68, 37, 56, 67, 67, 107, 65, 48, 57, 66, 43, 55, 56, 67, 67, 79, 84, 84, 60, 62, 102, 83, 86, 54, 74, 80, 89, 70, 62, 60, 60, 53, 83, 51, 49, 84, 69, 73, 58, 62, 65, 51, 66, 79, 53, 68, 65, 77, 60, 78, 72, 59, 39, 93, 54, 63, 59, 71, 75, 49, 90, 56, 70, 63, 64, 60, 48, 70, 68, 76, 63, 67, 100, 87, 68, 51, 71, 82, 56, 62, 54, 57, 107, 54, 56, 75, 70, 77, 75, 74, 94, 63, 62, 74, 52, 87, 61, 63, 65, 65, 65, 62, 62, 62, 49, 60, 85, 57, 51, 58, 67, 76, 51, 52, 76, 66, 52, 50, 74, 74, 53, 57, 46, 70, 82, 60, 62, 63, 47, 70, 71, 73, 49, 59, 51, 62, 64, 57, 91, 73, 62, 82, 71, 51, 130, 53, 52, 49, 67, 43, 71, 80, 78, 67, 45, 67, 76, 101, 66, 60, 77, 77, 69, 65, 57, 66, 91, 53, 66, 64, 59, 47, 76, 69, 47, 54, 69, 78, 61, 78, 81, 61, 50, 59, 77, 63, 63, 81, 60, 138, 60, 64, 55, 74, 65, 47, 61, 90, 62, 47, 53, 46, 76, 101, 61, 66, 54, 48, 60, 58, 91, 60, 58, 68, 68, 72, 80, 100, 76, 92, 51, 75, 54, 59, 70, 57, 59, 64, 71, 74, 72, 75, 51, 90, 52, 64, 67, 87, 67, 100, 69, 62, 44, 59, 45, 85, 73, 68, 88, 63, 66, 61, 85, 74, 68, 61, 37, 70, 29, 60, 68, 53, 56, 68, 86, 53, 65, 83, 65, 59, 61, 57, 53, 73, 71, 61, 63, 52, 72, 68, 59, 55, 54, 50, 47, 66, 58, 51, 66, 63, 58, 64, 68, 77, 69, 72, 51, 63, 100, 65, 79, 71, 75, 64, 63, 56, 39, 68, 80, 68, 62, 79, 49, 52, 66, 56, 52, 70, 68, 62, 34, 83, 75, 93, 63, 53, 47, 84, 75, 79, 63, 53, 57, 78, 86, 42, 52, 61, 69, 55, 48, 61, 70, 59, 66, 69, 69, 59, 58, 65, 59, 47, 75, 60, 82, 90, 105, 65, 62, 75, 97, 48, 50, 76, 62, 71, 64, 80, 55, 67, 57, 63, 86, 68, 85, 61, 76, 51, 76, 92, 63, 73, 72, 64, 54, 66, 52, 62, 60, 67, 55, 59, 76, 65, 81, 68, 54, 44, 67, 81, 79, 59, 54, 76, 64, 65, 66, 74, 85, 82, 74, 50, 60, 75, 72, 57, 64, 65, 40, 78, 62, 65, 54, 70, 82, 63, 44, 58, 46, 70, 50, 59, 53, 76, 64, 64, 56, 81, 59, 63, 63, 90, 59, 79, 44, 56, 56, 80, 83, 74, 65, 56, 47, 61, 53, 72, 54, 60, 52, 50, 55, 65, 54, 54, 66, 37, 72, 88, 85, 92, 73, 56, 61, 59, 61, 63, 65, 68, 50, 98, 74, 72, 67, 59, 80, 85, 70, 59, 82, 111, 58, 58, 75, 70, 53, 45, 52, 50, 70, 50, 78, 58, 80, 68, 72, 48, 68, 64, 61, 48, 80, 66, 71, 106, 58, 112, 67, 75, 73, 70, 58, 72, 56, 78, 68, 48, 77, 67, 77, 66, 74, 56, 59, 73, 58, 86, 74, 67, 55, 55, 99, 56, 76, 74, 79, 79, 61, 55, 77, 69, 66, 56, 71, 78, 79, 54, 68, 55, 64, 74, 69, 63, 113, 60, 90, 71, 53, 57, 62, 57, 61, 68, 62, 69, 93, 61, 64, 57, 104, 77, 63, 75, 61, 95, 85, 72, 55, 63, 53, 69, 70, 49, 63, 85, 87, 61, 55, 72, 84, 68, 54, 45, 89, 56, 82, 87, 82, 58, 70, 59, 54, 61, 77, 51, 61, 64, 67, 78, 59, 72, 94, 71, 63, 69, 85, 68, 64, 49, 72, 69, 57, 69, 58, 70, 73, 74, 56, 66, 67, 76, 63, 55, 66, 65, 69, 62, 61, 67, 57, 58, 55, 51, 54, 55, 63, 85, 60, 55, 54, 56, 61, 54, 58, 71, 69, 56, 59, 81, 48, 70, 56, 58, 54, 94, 62, 51, 57, 89, 73, 57, 62, 77, 64, 50, 59, 65, 97, 56, 63, 92, 53, 58, 63, 78, 60, 65, 48, 50, 65, 71, 72, 55, 85, 63, 81, 73, 61, 67, 79, 83, 66, 87, 87, 41, 64, 60, 79, 86, 81, 66, 55, 71, 81, 51, 58, 59, 64, 59, 62, 84, 61, 55, 66, 56, 65, 66, 65, 103, 54, 52, 68, 67, 104, 59, 56, 57, 60, 55, 57, 59, 69, 68, 60, 48, 61, 70, 51, 59, 95, 79, 59, 70, 61, 65, 96, 69, 58, 66, 54, 57, 67, 112, 53, 59, 59, 63, 75, 102, 54, 75, 56, 65, 63, 80, 72, 51, 69, 60, 62, 63, 76, 49, 68, 50, 64, 61, 61, 80, 78, 98, 47, 82, 52, 55, 61, 56, 51, 78, 51, 72, 61, 71, 64, 56, 68, 59, 81, 91, 70, 113, 46, 59, 59, 51, 77, 97, 69, 67, 50, 82, 71, 75, 57, 53, 53, 76, 62, 70, 54, 64, 68, 60, 63, 58, 55, 61, 113, 63, 68, 51, 70, 59, 67, 52, 70, 74, 56, 56, 83, 72, 54, 45, 66, 73, 60, 48, 75, 55, 72, 94, 58, 100, 55, 53, 54, 65, 62, 53, 62, 57, 71, 63, 65, 61, 86, 74, 60, 64, 65, 60, 61, 60, 56, 77, 60, 61, 60, 50, 63, 63, 60, 80, 58, 61, 59, 54, 74, 65, 69, 116, 59, 67, 61, 97, 60, 62, 51, 72, 75, 58, 61, 73, 66, 82, 58, 71, 60, 52, 82, 58, 57, 53, 59, 74, 49, 68, 64, 55, 109, 54, 78, 82, 63, 58, 88, 69, 67, 85, 75, 51, 87, 80, 49, 69, 62, 65, 58, 55, 66, 69, 55, 63, 63, 70, 52, 89, 66, 59, 90, 56, 58, 65, 53, 69, 54, 76, 49, 59, 51, 67, 52, 61, 57, 91, 66, 49, 55, 71, 59, 151, 130, 51, 64, 45, 60, 69, 57, 65, 67, 50, 71, 59, 62, 55, 61, 67, 69, 62, 95, 62, 62, 56, 85, 85, 63, 89, 74, 52, 62, 65, 49, 69, 57, 60, 66, 72, 89, 57, 61, 57, 72, 76, 47, 55, 68, 45, 58, 65, 59, 72, 59, 90, 82, 56, 70, 77, 107, 53, 68, 116, 63, 99, 60, 83, 76, 64, 77, 78, 106, 58, 59, 78, 73, 59, 51, 51, 77, 72, 71, 104, 65, 66, 50, 70, 54, 72, 63, 115, 81, 66, 67, 65, 75, 66, 69, 54, 66, 66, 94, 88, 50, 87, 55, 60, 53, 67, 77, 55, 62, 65, 79, 62, 62, 56, 62, 49, 85, 68, 66, 65, 67, 101, 68, 64, 67, 67, 82, 68, 62, 60, 74, 66, 66, 62, 64, 64, 58, 78, 57, 70, 61, 56, 58, 58, 80, 74, 54, 56, 76, 67, 60, 54, 77, 56, 68, 84, 55, 64, 69, 70, 116, 61, 78, 64, 102, 65, 82, 65, 64, 49, 53, 78, 68, 57, 52, 68, 80, 60, 116, 58, 76, 65, 58, 52, 70, 63, 47, 61, 56, 78, 74, 54, 60, 62, 69, 62, 55, 79, 88, 60, 74, 58, 69, 74, 65, 59, 60, 62, 42, 87, 50, 64, 64, 78, 70, 60, 75, 69, 87, 67, 45, 80, 60, 78, 66, 53, 67, 50, 76, 58, 84, 66, 61, 60, 72, 64, 72, 64, 64, 64, 73, 57, 71, 58, 65, 75, 68, 65, 49, 89, 56, 72, 59, 74, 67, 65, 67, 61, 50, 98, 68, 64, 54, 47, 63, 60, 61, 43, 109, 82, 60, 103, 62, 84, 59, 56, 77, 76, 56, 81, 54, 106, 82, 59, 88, 57, 55, 69, 78, 57, 71, 54, 65, 84, 59, 57, 65, 63, 57, 58, 59, 76, 53, 61, 68, 59, 79, 78, 48, 94, 57, 52, 95, 58, 71, 57, 75, 83, 68, 54, 70, 68, 65, 53, 67, 63, 100, 62, 72, 78, 62, 59, 62, 64, 54, 61, 57, 81, 60, 63, 51, 54, 87, 81, 66, 65, 68, 65, 59, 59, 46, 78, 50, 57, 82, 50, 86, 97, 61, 56, 96, 78, 56, 75, 39, 58, 63, 63, 59, 50, 50, 95, 48, 63, 68, 87, 56, 70, 73, 63, 57, 60, 53, 84, 51, 57, 61, 55, 59, 72, 81, 61, 61, 62, 54, 66, 56, 70, 56, 64, 58, 67, 66, 67, 69, 52, 66, 51, 75, 73, 62, 63, 94, 44, 59, 59, 60, 64, 92, 60, 66, 62, 67, 60, 80, 62, 89, 72, 63, 56, 54, 59, 74, 109, 69, 56, 58, 78, 61, 62, 54, 51, 58, 66, 78, 87, 67, 63, 46, 54, 69, 68, 66, 66, 68, 58, 60, 48, 64, 90, 62, 58, 54, 66, 82, 75, 94, 66, 41, 71, 63, 62, 103, 51, 56, 72, 65, 74, 67, 63, 80, 80, 110, 62, 84, 74, 47, 66, 55, 70, 52, 63, 56, 54, 64, 71, 56, 62, 59, 66, 66, 74, 99, 66, 57, 127, 64, 57, 81, 56, 52, 67, 67, 69, 50, 77, 68, 69, 53, 66, 65, 65, 57, 98, 61, 53, 71, 92, 78, 93, 92, 57, 51, 65, 72, 57, 49, 55, 45, 61, 76, 59, 55, 49, 69, 61, 87, 112, 71, 47, 68, 60, 58, 56, 72, 56, 60, 83, 55, 66, 65, 64, 65, 55, 59, 63, 63, 52, 70, 56, 68, 54, 60, 59, 65, 72, 75, 77, 62, 80, 75, 73, 50, 60, 56, 86, 55, 62, 56, 61, 63, 62, 59, 63, 64, 60, 60, 59, 72, 52, 76, 94, 64, 66, 69, 61, 68, 55, 55, 69, 59, 54, 89, 75, 50, 67, 64, 70, 57, 79, 66, 57, 61, 69, 59, 69, 97, 62, 64, 86, 60, 62, 52, 63, 51, 55, 66, 85, 80, 91, 49, 39, 53, 80, 99, 76, 70, 53, 87, 69, 62, 50, 74, 68, 72, 53, 68, 69, 65, 72, 72, 50, 51, 74, 48, 55, 61, 52, 63, 71, 61, 62, 70, 54, 56, 81, 57, 51, 43, 55, 61, 79, 50, 47, 61, 51, 69, 55, 56, 53, 73, 56, 76, 57, 95, 85, 57, 90, 63, 66, 60, 85, 73, 83, 84, 69, 58, 75, 42, 65, 64, 105, 90, 65, 60, 58, 80, 65, 64, 60, 55, 63, 74, 84, 64, 77, 63, 85, 53, 58, 67, 68, 63, 96, 89, 97, 78, 97, 90, 78, 48, 66, 55, 66, 63, 70, 90, 77, 68, 64, 70, 57, 44, 53, 72, 57, 69, 61, 74, 74, 63, 66, 60, 74, 61, 58, 46, 51, 48, 79, 58, 110, 65, 52, 74, 57, 59, 77, 57, 55, 61, 69, 48, 59, 63, 62, 74, 74, 69, 62, 67, 58, 72, 54, 56, 62, 60, 60, 93, 57, 62, 57, 80, 63, 65, 58, 67, 61, 45, 69, 68, 94, 57, 59, 122, 71, 68, 68, 83, 56, 83, 101, 69, 62, 90, 59, 64, 69, 66, 65, 54, 71, 62, 74, 67, 70, 66, 64, 74, 42, 59, 74, 64, 46, 52, 64, 71, 71, 64, 96, 66, 60, 56, 44, 67, 79, 54, 68, 87, 48, 59, 66, 58, 51, 72, 54, 61, 69, 74, 67, 65, 80, 55, 55, 47, 75, 60, 54, 47, 61, 107, 61, 69, 48, 73, 78, 90, 72, 44, 78, 58, 82, 72, 41, 65, 54, 88, 85, 68, 74, 64, 58, 50, 61, 103, 48, 60, 63, 65, 112, 44, 59, 76, 57, 60, 58, 60, 53, 83, 80, 51, 50, 56, 55, 64, 69, 47, 79, 115, 79, 64, 51, 68, 58, 57, 113, 61, 54, 63, 77, 68, 61, 83, 101, 53, 57, 79, 92, 58, 68, 62, 90, 51, 60, 69, 64, 87, 52, 67, 61, 71, 65, 77, 67, 67, 63, 47, 53, 68, 70, 54, 71, 93, 47, 49, 73, 60, 53, 71, 76, 51, 67, 62, 74, 96, 60, 60, 54, 51, 44, 54, 66, 52, 64, 71, 69, 79, 72, 92, 75, 64, 52, 87, 65, 73, 84, 68, 65, 81, 83, 71, 54, 65, 67, 53, 62, 57, 66, 72, 85, 50, 71, 60, 70, 79, 77, 74, 56, 61, 57, 59, 90, 67, 75, 48, 76, 45, 46, 52, 59, 110, 91, 52, 68, 90, 48, 88, 71, 59, 74, 96, 65, 57, 59, 62, 70, 56, 56, 63, 48, 83, 52, 68, 65, 79, 70, 60, 62, 64, 40, 53, 66, 58, 80, 60, 58, 65, 50, 75, 115, 58, 55, 72, 72, 67, 88, 74, 103, 51, 57, 75, 71, 78, 70, 85, 48, 67, 52, 40, 58, 60, 61, 58, 62, 58, 87, 45, 78, 58, 51, 56, 76, 48, 71, 84, 69, 66, 62, 84, 62, 64, 90, 45, 83, 74, 69, 62, 77, 66, 60, 82, 65, 50, 65, 78, 62, 52, 49, 48, 51, 100, 66, 82, 64, 84, 52, 62, 57, 56, 56, 60, 52, 48, 57, 65, 68, 57, 70, 50, 57, 58, 54, 70, 58, 52, 90, 64, 54, 37, 81, 74, 65, 61, 55, 58, 44, 39, 66, 58, 73, 52, 52, 100, 83, 72, 63, 74, 45, 67, 69, 68, 61, 55, 69, 58, 82, 68, 49, 69, 65, 40, 79, 126, 67, 82, 73, 58, 73, 62, 65, 72, 57, 58, 78, 58, 56, 71, 62, 60, 75, 49, 57, 79, 49, 76, 61, 69, 52, 57, 59, 84, 69, 58, 69, 56, 67, 55, 80, 66, 72, 53, 75, 58, 74, 52, 58, 64, 98, 78, 50, 64, 90, 59, 58, 68, 66, 61, 51, 62, 49, 56, 63, 67, 58, 52, 59, 47, 39, 74, 66, 47, 56, 67, 53, 70, 67, 57, 57, 52, 61, 90, 64, 49, 54, 41, 57, 92, 90, 67, 47, 51, 77, 71, 60, 67, 99, 61, 81, 68, 65, 79, 58, 50, 76, 50, 56, 79, 72, 76, 69, 88, 76, 85, 54, 53, 78, 53, 87, 59, 72, 81, 60, 41, 57, 60, 87, 71, 56, 76, 71, 64, 52, 91, 51, 52, 60, 62, 61, 62, 59, 76, 48, 69, 70, 66, 64, 58, 50, 64, 83, 117, 56, 89, 61, 58, 47, 79, 52, 77, 72, 92, 55, 85, 68, 56, 49, 71, 63, 67, 72, 63, 56, 59, 56, 68, 66, 73, 55, 61, 63, 84, 70, 52, 57, 73, 84, 76, 51, 50, 64, 70, 60, 78, 76, 67, 54, 73, 49, 47, 60, 61, 50, 71, 59, 82, 59, 60, 75, 63, 70, 45, 53, 69, 57, 83, 57, 60, 73, 84, 55, 67, 94, 49, 58, 77, 54, 85, 70, 71, 55, 67, 65, 51, 63, 53, 58, 79, 62, 68, 60, 57, 64, 59, 54, 76, 56, 60, 55, 75, 59, 57, 98, 54, 55, 38, 70, 66, 78, 112, 72, 63, 49, 72, 59, 81, 86, 53, 76, 79, 63, 49, 68, 70, 65, 68, 64, 72, 84, 51, 67, 63, 84, 69, 61, 57, 75, 48, 56, 68, 60, 64, 69, 56, 60, 80, 65, 83, 63, 45, 67, 65, 66, 54, 52, 106, 66, 58, 74, 43, 78, 51, 60, 67, 48, 61, 57, 59, 53, 61, 48, 56, 86, 67, 59, 58, 68, 41, 74, 67, 67, 42, 57, 63, 52, 90, 61, 52, 96, 74, 64, 68, 56, 62, 69, 76, 56, 65, 108, 65, 65, 48, 90, 47, 65, 69, 67, 42, 49, 81, 55, 63, 56, 64, 63, 51, 72, 58, 56, 65, 110, 89, 81, 65, 119, 80, 77, 59, 54, 86, 60, 59, 54, 93, 81, 44, 66, 37, 62, 58, 51, 67, 106, 54, 95, 98, 50, 63, 55, 50, 56, 63, 73, 69, 87, 73, 65, 45, 70, 54, 47, 65, 61, 75, 52, 60, 64, 55, 57, 50, 51, 70, 50, 54, 58, 81, 23, 60, 53, 55, 61, 63, 59, 76, 85, 85, 63, 57, 73, 71, 67, 54, 61, 68, 56, 78, 62, 53, 60, 59, 28, 63, 65, 73, 66, 60, 75, 57, 80, 49, 59, 56, 72, 63, 70, 64, 74, 67, 70, 62, 69, 64, 52, 71, 55, 75, 59, 67, 58, 65, 61, 54, 63, 68, 70, 83, 67, 55, 80, 43, 62, 50, 59, 80, 55, 45, 53, 63, 51, 56, 61, 66, 68, 53, 63, 40, 59, 62, 85, 56, 65, 56, 83, 76, 90, 67, 80, 56, 106, 47, 59, 67, 68, 47, 52, 61, 47, 63, 59, 60, 70, 54, 61, 67, 82, 54, 57, 75, 64, 55, 64, 109, 72, 66, 60, 62, 66, 57, 77, 68, 53, 60, 62, 83, 73, 83, 70, 83, 73, 46, 64, 57, 45, 89, 77, 97, 54, 86, 78, 66, 65, 79, 64, 76, 69, 56, 103, 74, 75, 67, 63, 66, 76, 69, 54, 68, 69, 60, 80, 65, 77, 61, 66, 68, 59, 71, 108, 53, 76, 52, 79, 62, 66, 61, 86, 52, 59, 50, 66, 57, 60, 77, 71, 67, 51, 61, 58, 114, 69, 94, 60, 66, 54, 52, 60, 47, 62, 67, 63, 60, 75, 56, 69, 65, 87, 53, 92, 54, 58, 90, 59, 58, 61, 63, 66, 65, 67, 54, 78, 52, 60, 77, 80, 56, 77, 66, 47, 64, 54, 43, 58, 101, 65, 79, 73, 43, 54, 62, 66, 52, 58, 58, 66, 65, 99, 54, 54, 69, 77, 59, 77, 58, 60, 76, 66, 71, 60, 43, 65, 64, 87, 51, 87, 61, 77, 57, 87, 43, 62, 61, 71, 61, 58, 53, 62, 61, 63, 71, 40, 60, 76, 68, 55, 49, 54, 49, 71, 59, 61, 90, 53, 60, 102, 67, 84, 80, 88, 60, 72, 93, 62, 76, 61, 88, 58, 64, 48, 97, 65, 64, 98, 57, 56, 64, 52, 78, 73, 68, 58, 47, 86, 65, 72, 67, 85, 86, 75, 81, 53, 86, 67, 56, 63, 72, 61, 68, 58, 51, 69, 66, 64, 61, 56, 65, 76, 79, 64, 82, 60, 74, 83, 66, 57, 75, 81, 55, 59, 58, 59, 66, 70, 69, 51, 55, 76, 64, 78, 58, 73, 63, 63, 68, 60, 74, 77, 59, 50, 60, 75, 73, 86, 41, 61, 53, 69, 49, 54, 95, 98, 55, 54, 73, 72, 70, 56, 69, 61, 52, 57, 67, 72, 71, 63, 63, 70, 67, 73, 61, 73, 51, 61, 62, 66, 68, 65, 58, 77, 68, 72, 61, 75, 70, 64, 59, 80, 47, 56, 43, 64, 47, 60, 53, 51, 55, 66, 59, 74, 88, 74, 83, 69, 67, 57, 43, 61, 73, 54, 55, 60, 68, 72, 61, 59, 63, 58, 72, 67, 66, 56, 68, 55, 53, 47, 61, 87, 71, 50, 70, 64, 60, 72, 94, 56, 70, 62, 66, 52, 65, 81, 55, 65, 53, 61, 58, 64, 71, 62, 63, 66, 66, 81, 68, 55, 63, 68, 69, 59, 63, 58, 72, 73, 66, 85, 68, 69, 55, 80, 105, 77, 54, 54, 54, 59, 59, 90, 56, 76, 70, 72, 68, 57, 63, 96, 54, 83, 55, 86, 58, 56, 67, 70, 68, 70, 82, 62, 88, 52, 65, 86, 60, 53, 73, 70, 84, 65, 68, 49, 57, 53, 61, 54, 69, 65, 59, 56, 71, 51, 54, 65, 72, 62, 54, 81, 86, 71, 69, 87, 69, 68, 61, 51, 94, 85, 55, 91, 46, 73, 61, 59, 73, 71, 81, 63, 76, 62, 51, 50, 73, 60, 50, 89, 76, 66, 67, 79, 79, 69, 59, 68, 101, 66, 66, 62, 60, 53, 108, 68, 56, 64, 59, 59, 72, 72, 54, 68, 62, 65, 49, 76, 58, 53, 93, 64, 61, 54, 44, 70, 59, 62, 74, 71, 67, 58, 58, 69, 64, 53, 50, 65, 53, 64, 57, 131, 96, 59, 48, 56, 66, 74, 62, 65, 76, 82, 62, 58, 70, 61, 51, 67, 59, 56, 67, 66, 61, 66, 70, 50, 61, 79, 59, 56, 74, 47, 61, 75, 70, 64, 69, 74, 71, 44, 49, 65, 90, 52, 54, 72, 67, 73, 71, 73, 45, 131, 94, 68, 66, 61, 64, 51, 65, 66, 110, 63, 74, 57, 90, 37, 66, 66, 46, 78, 73, 72, 76, 65, 108, 73, 80, 81, 64, 74, 51, 70, 71, 54, 51, 104, 74, 54, 60, 73, 66, 59, 45, 65, 57, 57, 63, 139, 58, 46, 44, 68, 87, 69, 140, 69, 54, 66, 62, 65, 72, 50, 59, 61, 53, 66, 74, 77, 63, 60, 59, 71, 46, 66, 54, 54, 64, 66, 59, 65, 79, 72, 84, 64, 66, 78, 71, 71, 66, 69, 68, 75, 94, 57, 70, 52, 74, 47, 65, 56, 59, 54, 67, 46, 64, 61, 49, 56, 81, 62, 66, 61, 74, 87, 68, 66, 55, 64, 51, 58, 70, 79, 80, 59, 59, 71, 58, 54, 66, 59, 62, 66, 74, 60, 59, 72, 51, 70, 68, 55, 53, 61, 72, 67, 55, 64, 65, 61, 41, 68, 56, 60, 62, 69, 77, 76, 63, 79, 60, 62, 70, 70, 56, 70, 49, 66, 98, 68, 74, 82, 51, 71, 76, 60, 58, 76, 54, 67, 57, 63, 59, 87, 58, 57, 60, 52, 75, 62, 70, 54, 69, 51, 59, 67, 71, 67, 76, 87, 69, 71, 59, 76, 62, 61, 95, 85, 53, 65, 67, 64, 55, 54, 55, 55, 69, 57, 55, 68, 79, 52, 62, 56, 81, 59, 50, 60, 75, 83, 62, 90, 49, 58, 64, 100, 78, 65, 53, 56, 65, 69, 91, 55, 68, 51, 52, 62, 63, 50, 56, 66, 68, 80, 68, 64, 70, 43, 66, 85, 80, 65, 51, 50, 61, 56, 63, 68, 89, 49, 69, 56, 59, 52, 59, 91, 125, 64, 62, 71, 67, 60, 62, 53, 81, 47, 60, 62, 84, 58, 75, 63, 57, 67, 89, 51, 94, 66, 72, 68, 64, 49, 74, 88, 76, 58, 82, 59, 69, 64, 67, 66, 51, 59, 65, 61, 84, 61, 68, 67, 57, 66, 62, 58, 51, 62, 62, 69, 46, 56, 71, 59, 58, 80, 50, 69, 75, 66, 78, 57, 91, 70, 64, 57, 55, 55, 54, 42, 68, 58, 58, 61, 59, 65, 48, 56, 93, 59, 64, 67, 70, 82, 56, 47, 53, 60, 47, 59, 63, 68, 68, 68, 57, 62, 59, 56, 78, 54, 60, 46, 77, 78, 77, 52, 80, 57, 73, 65, 70, 72, 54, 51, 100, 54, 66, 57, 64, 63, 72, 70, 61, 70, 62, 60, 59, 67, 72, 92, 48, 49, 60, 58, 58, 90, 75, 70, 60, 67, 42, 76, 45, 62, 56, 90, 68, 79, 59, 53, 64, 62, 63, 39, 49, 53, 57, 59, 72, 60, 84, 70, 66, 42, 47, 81, 61, 73, 78, 56, 59, 57, 82, 71, 63, 43, 71, 64, 76, 45, 71, 64, 59, 61, 54, 74, 76, 59, 64, 49, 94, 68, 83, 62, 51, 52, 58, 49, 63, 51, 66, 53, 60, 78, 69, 57, 64, 68, 59, 59, 55, 61, 58, 50, 67, 82, 55, 75, 62, 78, 55, 55, 71, 96, 55, 66, 60, 58, 74, 55, 60, 62, 51, 56, 65, 47, 69, 70, 42, 58, 77, 71, 56, 80, 61, 50, 70, 75, 61, 75, 86, 59, 62, 70, 65, 59, 67, 65, 63, 68, 79, 65, 46, 82, 69, 59, 63, 54, 79, 55, 58, 69, 69, 76, 55, 62, 61, 83, 53, 58, 83, 54, 79, 67, 75, 55, 69, 61, 91, 60, 61, 57, 71, 39, 55, 65, 72, 78, 55, 68, 50, 108, 59, 54, 65, 94, 72, 75, 55, 54, 68, 59, 54, 62, 82, 68, 62, 66, 60, 76, 63, 51, 64, 63, 58, 75, 68, 50, 66, 54, 62, 72, 75, 64, 90, 65, 77, 90, 75, 54, 37, 91, 51, 64, 51, 60, 78, 56, 69, 60, 83, 62, 68, 52, 53, 61, 75, 74, 69, 77, 55, 69, 72, 76, 61, 65, 62, 59, 65, 48, 56, 75, 47, 87, 57, 74, 65, 70, 48, 64, 62, 85, 82, 73, 63, 56, 50, 82, 69, 67, 63, 55, 89, 75, 63, 107, 64, 61, 67, 84, 76, 57, 56, 78, 87, 63, 70, 64, 63, 58, 57, 74, 63, 82, 55, 67, 48, 70, 60, 82, 51, 60, 66, 70, 70, 63, 82, 65, 83, 91, 83, 66, 54, 70, 53, 63, 46, 88, 54, 63, 63, 70, 52, 69, 58, 152, 68, 65, 58, 46, 86, 55, 50, 68, 84, 49, 74, 63, 57, 59, 87, 73, 74, 66, 67, 86, 63, 61, 61, 67, 56, 69, 63, 61, 53, 73, 66, 69, 76, 47, 55, 64, 56, 38, 48, 63, 64, 72, 64, 53, 73, 55, 55, 51, 83, 64, 51, 56, 68, 63, 56, 79, 68, 56, 58, 70, 72, 70, 82, 71, 58, 59, 60, 74, 63, 63, 81, 68, 58, 39, 58, 73, 56, 50, 56, 46, 67, 86, 93, 65, 57, 89, 58, 40, 70, 96, 46, 82, 74, 80, 74, 50, 71, 68, 103, 60, 61, 59, 58, 49, 54, 73, 105, 69, 78, 66, 55, 86, 65, 61, 47, 71, 57, 110, 86, 62, 68, 64, 65, 63, 60, 64, 77, 50, 66, 63, 69, 63, 60, 63, 76, 83, 59, 77, 47, 70, 62, 68, 76, 81, 74, 48, 82, 67, 63, 68, 84, 67, 56, 65, 46, 62, 82, 80, 74, 54, 58, 71, 60, 57, 78, 50, 71, 56, 68, 66, 61, 69, 54, 57, 76, 70, 58, 49, 57, 56, 66, 64, 48, 55, 64, 81, 62, 102, 54, 66, 86, 77, 80, 68, 69, 66, 61, 44, 58, 45, 64, 57, 103, 61, 95, 46, 74, 50, 57, 56, 73, 61, 61, 47, 78, 53, 76, 45, 67, 64, 66, 61, 67, 63, 95, 78, 46, 55, 75, 47, 45, 65, 55, 62, 51, 60, 57, 54, 59, 58, 75, 63, 66, 49, 77, 60, 83, 58, 52, 80, 40, 58, 56, 90, 97, 63, 60, 68, 58, 54, 55, 58, 51, 65, 66, 58, 72, 78, 57, 52, 57, 63, 61, 61, 70, 65, 84, 66, 63, 75, 78, 70, 86, 63, 65, 69, 61, 64, 56, 65, 82, 60, 55, 50, 60, 56, 48, 53, 77, 83, 70, 85, 63, 109, 59, 81, 73, 68, 75, 75, 50, 76, 122, 51, 62, 65, 60, 71, 69, 80, 90, 50, 61, 88, 68, 48, 79, 48, 108, 58, 53, 66, 54, 64, 84, 68, 74, 134, 61, 64, 71, 54, 62, 60, 73, 83, 72, 57, 81, 134, 72, 61, 50, 61, 64, 73, 143, 62, 43, 71, 53, 56, 94, 57, 63, 77, 54, 52, 68, 74, 57, 69, 50, 46, 61, 71, 58, 80, 69, 63, 63, 69, 66, 65, 79, 54, 54, 74, 83, 89, 59, 62, 59, 72, 58, 53, 53, 82, 65, 54, 61, 74, 50, 66, 67, 71, 71, 63, 67, 42, 113, 59, 64, 108, 55, 63, 53, 48, 63, 55, 70, 55, 94, 63, 46, 54, 54, 82, 79, 55, 83, 61, 73, 77, 53, 84, 71, 64, 66, 52, 56, 60, 65, 79, 56, 46, 62, 57, 85, 49, 57, 61, 57, 59, 71, 84, 67, 69, 63, 60, 62, 69, 63, 72, 57, 66, 64, 70, 116, 52, 65, 53, 44, 60, 52, 91, 75, 91, 58, 75, 59, 70, 58, 93, 69, 59, 51, 67, 56, 52, 69, 62, 56, 50, 76, 66, 89, 50, 73, 59, 64, 57, 73, 79, 56, 45, 90, 80, 56, 68, 64, 63, 72, 77, 52, 57, 76, 57, 56, 67, 64, 71, 71, 69, 63, 66, 57, 71, 50, 103, 61, 65, 105, 58, 62, 120, 54, 52, 73, 69, 65, 65, 72, 58, 76, 74, 65, 52, 62, 54, 72, 78, 58, 67, 62, 61, 56, 86, 58, 107, 96, 53, 55, 92, 68, 74, 77, 74, 58, 79, 58, 64, 82, 63, 54, 69, 91, 72, 56, 47, 51, 73, 64, 59, 55, 78, 61, 83, 52, 46, 60, 70, 60, 51, 73, 62, 50, 88, 49, 69, 67, 67, 67, 73, 63, 59, 67, 65, 44, 57, 78, 61, 63, 56, 77, 47, 63, 62, 47, 80, 73, 60, 78, 69, 62, 75, 76, 67, 59, 55, 80, 72, 62, 95, 64, 65, 67, 72, 55, 83, 73, 61, 75, 64, 78, 61, 67, 61, 64, 57, 74, 48, 68, 69, 69, 52, 61, 80, 44, 68, 55, 86, 77, 74, 62, 56, 67, 69, 80, 71, 83, 61, 62, 76, 67, 63, 63, 60, 66, 60, 60, 60, 91, 76, 61, 61, 55, 59, 58, 70, 74, 65, 75, 106, 62, 94, 61, 60, 69, 65, 55, 60, 58, 52, 59, 47, 95, 65, 69, 62, 107, 70, 62, 38, 62, 55, 72, 49, 56, 70, 57, 58, 73, 86, 63, 58, 72, 54, 54, 65, 77, 56, 52, 65, 53, 70, 74, 58, 55, 53, 72, 84, 48, 62, 57, 60, 77, 60, 60, 65, 61, 59, 53, 59, 83, 59, 69, 84, 94, 62, 83, 58, 64, 61, 85, 55, 56, 59, 92, 59, 69, 57, 55, 89, 64, 57, 55, 94, 62, 63, 55, 52, 102, 65, 69, 47, 76, 63, 56, 80, 58, 56, 56, 101, 65, 55, 61, 60, 63, 62, 84, 84, 71, 58, 53, 56, 82, 87, 60, 93, 63, 66, 48, 81, 54, 70, 69, 65, 101, 90, 54, 69, 88, 85, 91, 48, 50, 47, 56, 58, 49, 72, 76, 60, 51, 70, 57, 70, 71, 70, 76, 62, 65, 71, 73, 72, 51, 56, 54, 74, 63, 69, 64, 64, 67, 86, 74, 57, 55, 55, 76, 79, 50, 60, 74, 53, 73, 71, 70, 53, 60, 72, 82, 81, 79, 64, 57, 58, 67, 109, 86, 54, 77, 71, 58, 48, 68, 51, 67, 65, 58, 51, 58, 78, 61, 88, 58, 65, 74, 71, 75, 54, 73, 117, 78, 55, 67, 48, 75, 59, 55, 65, 71, 76, 61, 41, 59, 57, 105, 43, 66, 61, 63, 67, 58, 57, 76, 70, 72, 89, 73, 65, 66, 55, 75, 53, 75, 62, 53, 69, 58, 72, 86, 64, 58, 99, 53, 67, 76, 93, 58, 44, 56, 48, 57, 54, 71, 54, 82, 55, 68, 64, 64, 64, 78, 100, 49, 62, 70, 58, 140, 73, 59, 102, 70, 73, 55, 66, 61, 92, 55, 59, 77, 65, 56, 53, 51, 65, 71, 65, 49, 57, 77, 66, 97, 58, 59, 58, 72, 70, 59, 72, 81, 81, 77, 54, 74, 59, 61, 58, 66, 58, 48, 86, 58, 50, 52, 67, 68, 61, 61, 63, 67, 96, 63, 56, 60, 70, 59, 66, 55, 74, 59, 50, 63, 111, 54, 69, 81, 61, 75, 89, 65, 48, 57, 59, 58, 57, 80, 54, 62, 76, 65, 58, 59, 54, 71, 58, 55, 67, 62, 71, 70, 59, 58, 71, 57, 64, 52, 58, 60, 57, 76, 65, 75, 62, 69, 80, 50, 83, 55, 46, 57, 66, 73, 62, 65, 68, 54, 61, 96, 68, 61, 62, 54, 55, 49, 70, 74, 57, 66, 86, 83, 61, 75, 63, 60, 61, 45, 52, 50, 68, 74, 98, 89, 65, 71, 94, 67, 60, 74, 62, 69, 59, 64, 78, 54, 59, 80, 63, 81, 56, 79, 57, 56, 61, 65, 76, 61, 64, 61, 69, 74, 55, 55, 75, 71, 58, 62, 62, 50, 52, 77, 61, 51, 67, 62, 59, 56, 58, 55, 76, 68, 70, 60, 62, 63, 51, 42, 55, 43, 58, 58, 76, 68, 67, 51, 63, 55, 75, 89, 49, 60, 78, 54, 55, 81, 65, 71, 73, 77, 54, 56, 49, 57, 63, 49, 85, 109, 101, 84, 83, 76, 75, 61, 57, 99, 60, 57, 65, 58, 62, 55, 71, 64, 70, 68, 59, 68, 52, 72, 58, 57, 57, 72, 78, 92, 66, 63, 74, 103, 56, 83, 61, 83, 60, 47, 58, 57, 77, 74, 66, 56, 63, 62, 68, 55, 88, 68, 51, 73, 61, 54, 72, 67, 61, 52, 67, 66, 50, 52, 70, 48, 56, 64, 77, 53, 60, 72, 96, 75, 54, 66, 65, 47, 55, 72, 57, 77, 57, 61, 52, 75, 68, 62, 83, 52, 73, 76, 57, 87, 63, 91, 62, 73, 71, 66, 53, 42, 65, 62, 70, 61, 62, 66, 58, 71, 54, 53, 76, 64, 60, 62, 68, 59, 53, 83, 51, 69, 79, 113, 51, 76, 109, 77, 67, 61, 51, 66, 66, 56, 69, 63, 64, 52, 65, 118, 56, 83, 60, 64, 63, 59, 48, 63, 63, 82, 73, 57, 64, 103, 52, 48, 70, 74, 92, 43, 59, 51, 76, 51, 76, 77, 57, 56, 64, 48, 82, 42, 84, 73, 39, 57, 92, 65, 64, 54, 59, 49, 64, 113, 43, 76, 69, 65, 62, 62, 48, 57, 75, 53, 63, 66, 76, 67, 83, 76, 59, 69, 65, 68, 54, 70, 61, 64, 62, 64, 89, 71, 69, 60, 76, 59, 82, 59, 98, 51, 53, 98, 64, 72, 65, 86, 64, 56, 54, 68, 57, 48, 61, 73, 74, 62, 66, 68, 67, 60, 74, 56, 62, 64, 60, 66, 55, 94, 97, 69, 66, 56, 72, 59, 62, 56, 71, 48, 65, 64, 72, 84, 45, 65, 72, 48, 68, 77, 60, 68, 60, 62, 51, 75, 59, 48, 73, 55, 69, 55, 49, 89, 65, 79, 83, 54, 51, 74, 71, 54, 77, 54, 53, 88, 63, 58, 71, 72, 81, 68, 53, 74, 71, 76, 57, 63, 89, 51, 50, 53, 79, 63, 82, 66, 52, 61, 72, 86, 49, 54, 93, 85, 58, 65, 56, 61, 65, 60, 60, 51, 78, 57, 73, 70, 76, 91, 73, 62, 84, 43, 51, 69, 68, 64, 58, 51, 56, 81, 114, 53, 66, 74, 67, 63, 53, 78, 64, 60, 80, 67, 67, 64, 98, 57, 58, 46, 68, 61, 67, 62, 69, 58, 67, 44, 59, 52, 66, 64, 81, 67, 73, 78, 74, 58, 74, 72, 83, 52, 58, 64, 68, 52, 80, 71, 101, 71, 81, 48, 78, 64, 51, 58, 97, 90, 65, 69, 59, 54, 69, 63, 75, 79, 76, 64, 55, 73, 53, 58, 53, 87, 67, 84, 63, 58, 65, 66, 50, 54, 45, 57, 78, 71, 64, 43, 88, 75, 134, 66, 67, 57, 57, 65, 66, 39, 55, 64, 70, 114, 57, 70, 65, 67, 67, 73, 58, 89, 64, 62, 53, 56, 76, 60, 81, 58, 45, 83, 58, 42, 46, 76, 61, 59, 50, 61, 66, 62, 64, 66, 59, 55, 59, 80, 78, 67, 56, 62, 54, 69, 52, 53, 49, 63, 52, 54, 58, 89, 78, 40, 94, 67, 39, 50, 43, 77, 60, 66, 70, 68, 63, 83, 46, 54, 66, 61, 73, 63, 56, 65, 60, 71, 56, 54, 69, 42, 51, 61, 60, 71, 50, 58, 77, 68, 76, 65, 65, 57, 70, 47, 58, 77, 66, 52, 96, 66, 64, 58, 70, 96, 53, 59, 111, 161, 69, 63, 59, 64, 65, 67, 47, 58, 56, 66, 73, 62, 60, 50, 107, 59, 45, 65, 63, 57, 55, 83, 46, 61, 63, 64, 67, 69, 61, 72, 51, 74, 62, 67, 72, 67, 40, 71, 70, 56, 59, 52, 62, 81, 44, 60, 46, 73, 76, 68, 71, 61, 69, 62, 68, 78, 71, 110, 77, 58, 54, 80, 59, 80, 35, 68, 56, 79, 84, 69, 46, 78, 62, 66, 64, 86, 93, 55, 43, 64, 51, 85, 105, 60, 82, 63, 109, 50, 55, 55, 60, 52, 80, 58, 84, 53, 71, 77, 75, 53, 57, 88, 23, 94, 72, 40, 64, 52, 109, 63, 85, 49, 55, 44, 73, 99, 89, 57, 60, 79, 61, 60, 100, 71, 57, 68, 79, 67, 57, 76, 57, 69, 49, 54, 61, 58, 69, 117, 62, 70, 93, 76, 56, 77, 42, 55, 77, 60, 65, 67, 71, 66, 64, 60, 82, 62, 40, 73, 74, 51, 64, 66, 57, 69, 65, 67, 60, 62, 50, 65, 44, 62, 81, 61, 86, 80, 89, 45, 70, 94, 50, 58, 60, 55, 46, 82, 61, 85, 52, 70, 63, 86, 56, 63, 57, 56, 62, 68, 56, 48, 61, 71, 45, 56, 61, 54, 58, 64, 63, 57, 74, 53, 62, 45, 34, 61, 61, 54, 47, 69, 81, 67, 95, 57, 100, 46, 79, 46, 71, 59, 81, 50, 70, 62, 61, 58, 62, 70, 72, 64, 99, 64, 66, 58, 72, 69, 62, 51, 80, 47, 72, 66, 55, 70, 114, 82, 77, 75, 66, 66, 50, 54, 66, 56, 83, 77, 48, 78, 56, 51, 75, 73, 47, 62, 62, 80, 81, 51, 58, 73, 68, 53, 55, 60, 64, 64, 87, 56, 58, 64, 57, 66, 75, 78, 48, 40, 58, 110, 75, 61, 71, 52, 49, 50, 68, 50, 49, 73, 48, 66, 93, 69, 52, 87, 66, 72, 75, 50, 69, 54, 52, 82, 92, 45, 54, 51, 70, 46, 80, 85, 55, 55, 67, 53, 59, 59, 60, 61, 66, 73, 76, 73, 66, 57, 72, 66, 67, 88, 67, 63, 41, 88, 77, 46, 63, 90, 73, 64, 44, 66, 48, 85, 60, 44, 64, 92, 109, 58, 58, 68, 61, 66, 36, 74, 76, 45, 61, 54, 78, 79, 56, 65, 57, 75, 93, 52, 80, 69, 104, 71, 54, 69, 106, 87, 74, 71, 58, 50, 71, 48, 74, 86, 53, 53, 53, 46, 68, 59, 73, 92, 62, 66, 98, 59, 57, 63, 71, 77, 61, 63, 62, 61, 71, 62, 65, 59, 77, 62, 50, 55, 61, 60, 83, 67, 77, 73, 71, 52, 76, 76, 76, 84, 55, 70, 67, 72, 72, 57, 71, 75, 51, 51, 57, 77, 47, 101, 81, 68, 49, 69, 62, 52, 74, 58, 64, 67, 58, 57, 68, 62, 44, 52, 58, 85, 77, 58, 45, 43, 63, 47, 67, 51, 62, 58, 106, 74, 62, 76, 58, 81, 62, 69, 60, 81, 60, 43, 59, 78, 58, 110, 78, 85, 55, 60, 61, 75, 67, 75, 68, 82, 78, 65, 59, 46, 73, 30, 52, 63, 96, 42, 94, 65, 48, 80, 63, 55, 67, 99, 48, 70, 74, 63, 51, 76, 57, 49, 71, 67, 63, 55, 80, 63, 58, 86, 52, 97, 68, 78, 65, 90, 56, 65, 82, 78, 65, 47, 62, 72, 70, 63, 67, 49, 74, 55, 69, 66, 46, 55, 65, 74, 53, 59, 73, 49, 93, 77, 75, 84, 85, 85, 68, 80, 68, 47, 91, 66, 51, 71, 74, 79, 67, 67, 85, 45, 93, 63, 64, 51, 132, 76, 65, 48, 67, 84, 76, 56, 82, 84, 66, 72, 45, 114, 39, 68, 76, 77, 88, 77, 63, 93, 82, 67, 106, 74, 64, 79, 55, 57, 66, 48, 64, 53, 88, 40, 52, 59, 73, 52, 72, 92, 54, 72, 79, 65, 61, 63, 63, 55, 109, 70, 50, 51, 61, 113, 53, 102, 71, 43, 63, 76, 25, 63, 65, 72, 70, 67, 78, 84, 60, 53, 41, 66, 46, 45, 37, 30, 51, 55, 76, 73, 90, 84, 77, 79, 41, 76, 69, 56, 50, 58, 90, 55, 111, 63, 46, 63, 65, 63, 45, 57, 51, 70, 52, 54, 43, 88, 74, 78, 44, 82, 48, 66, 48, 71, 77, 61, 68, 87, 72, 82, 72, 111, 65, 71, 52, 72, 100, 49, 78, 93, 46, 58, 50, 93, 80, 65, 66, 62, 79, 63, 58, 75, 93, 64, 57, 44, 69, 48, 68, 77, 47, 56, 56, 76, 89, 60, 72, 82, 55, 50, 56, 69, 89, 61, 68, 67, 60, 92, 47, 75, 65, 81, 81, 75, 57, 61, 118, 63, 71, 64, 91, 54, 73, 66, 67, 76, 60, 60, 75, 63, 68, 49, 47, 98, 55, 65, 48, 55, 74, 57, 64, 64, 57, 70, 84, 71, 86, 96, 63, 55, 73, 56, 65, 81, 60, 66, 66, 44, 86, 52, 88, 113, 56, 39, 87, 102, 57, 63, 39, 80, 80, 113, 83, 44, 75, 43, 66, 77, 63, 53, 95, 87, 70, 69, 55, 51, 67, 54, 68, 67, 57, 56, 59, 74, 76, 63, 45, 44, 91, 59, 46, 66, 74, 77, 57, 94, 80, 60, 57, 58, 72, 63, 48, 63, 74, 104, 64, 108, 61, 63, 77, 77, 71, 71, 63, 54, 50, 59, 57, 90, 59, 85, 76, 61, 52, 49, 51, 77, 49, 89, 63, 56, 47, 53, 60, 66, 64, 64, 53, 73, 76, 53, 69, 55, 47, 62, 67, 62, 83, 71, 47, 54, 64, 58, 59, 58, 54, 70, 47, 64, 75, 56, 87, 49, 75, 54, 59, 85, 54, 72, 51, 51, 56, 77, 61, 54, 36, 62, 57, 78, 51, 65, 72, 51, 69, 79, 63, 77, 103, 38, 76, 76, 47, 73, 73, 64, 61, 56, 68, 100, 68, 57, 64, 54, 76, 69, 70, 44, 91, 60, 70, 86, 92, 58, 69, 61, 51, 74, 58, 43, 66, 66, 56, 70, 64, 75, 64, 74, 103, 76, 61, 82, 73, 57, 90, 57, 58, 63, 84, 64, 56, 90, 67, 48, 66, 69, 78, 68, 60, 56, 74, 85, 59, 55, 50, 50, 65, 61, 73, 71, 46, 55, 72, 92, 122, 100, 57, 63, 62, 87, 72, 50, 62, 81, 55, 65, 59, 54, 67, 77, 65, 32, 64, 86, 60, 70, 73, 55, 70, 62, 78, 75, 68, 60, 57, 73, 75, 72, 57, 65, 87, 72, 62, 56, 74, 43, 68, 56, 71, 66, 50, 56, 59, 68, 62, 44, 65, 58, 64, 58, 57, 52, 62, 65, 101, 76, 72, 80, 70, 60, 62, 72, 58, 66, 69, 140, 74, 45, 88, 71, 57, 73, 60, 75, 84, 60, 57, 64, 66, 60, 54, 74, 47, 61, 69, 78, 59, 74, 78, 61, 73, 54, 61, 53, 76, 55, 59, 76, 64, 73, 59, 49, 75, 49, 89, 49, 62, 48, 72, 58, 48, 54, 73, 92, 56, 54, 38, 48, 66, 77, 46, 82, 69, 80, 46, 69, 55, 58, 78, 58, 69, 66, 66, 66, 61, 78, 57, 64, 64, 65, 64, 59, 52, 85, 93, 73, 57, 67, 45, 58, 60, 76, 58, 73, 59, 62, 54, 75, 56, 55, 64, 55, 53, 113, 70, 87, 70, 64, 66, 69, 65, 61, 65, 70, 57, 82, 58, 79, 76, 70, 70, 65, 80, 58, 88, 58, 59, 71, 59, 86, 73, 45, 91, 65, 55, 47, 59, 66, 65, 61, 98, 64, 92, 62, 78, 67, 68, 66, 57, 62, 62, 64, 78, 63, 58, 60, 63, 55, 58, 75, 64, 80, 74, 64, 49, 49, 74, 55, 88, 65, 64, 62, 83, 102, 63, 62, 60, 76, 65, 48, 71, 72, 57, 61, 87, 68, 51, 77, 67, 74, 78, 63, 78, 57, 59, 62, 69, 63, 71, 55, 63, 60, 72, 72, 76, 68, 47, 65, 53, 65, 64, 67, 54, 61, 55, 53, 50, 70, 49, 87, 84, 71, 72, 79, 51, 64, 65, 62, 58, 42, 76, 69, 60, 73, 73, 83, 56, 63, 79, 80, 71, 64, 65, 57, 91, 60, 80, 56, 61, 57, 82, 70, 69, 64, 81, 79, 66, 47, 64, 79, 56, 62, 68, 62, 67, 62, 49, 55, 87, 102, 72, 84, 71, 95, 72, 52, 69, 134, 37, 82, 34, 96, 43, 65, 69, 63, 60, 47, 81, 88, 60, 75, 54, 66, 57, 73, 52, 62, 58, 95, 69, 64, 72, 63, 50, 56, 52, 62, 57, 71, 78, 61, 65, 72, 59, 68, 56, 65, 59, 28, 59, 81, 68, 73, 45, 88, 58, 71, 65, 60, 60, 47, 66, 81, 32, 78, 55, 77, 64, 80, 53, 80, 66, 65, 65, 57, 49, 75, 67, 72, 54, 68, 55, 74, 66, 56, 50, 69, 53, 57, 77, 54, 80, 54, 76, 65, 65, 63, 80, 80, 61, 54, 83, 67, 48, 85, 59, 46, 63, 55, 85, 70, 61, 73, 58, 75, 65, 58, 58, 64, 89, 76, 61, 61, 75, 76, 86, 70, 104, 75, 81, 54, 54, 63, 64, 57, 61, 70, 46, 71, 76, 53, 65, 88, 48, 83, 90, 44, 74, 56, 92, 73, 79, 61, 69, 76, 54, 78, 50, 78, 76, 70, 69, 56, 78, 73, 63, 76, 56, 57, 78, 55, 71, 70, 79, 40, 68, 59, 60, 80, 70, 63, 89, 83, 94, 55, 55, 70, 45, 83, 60, 64, 80, 82, 75, 77, 79, 55, 84, 50, 51, 91, 72, 62, 38, 79, 71, 56, 71, 66, 62, 56, 55, 63, 45, 74, 76, 58, 61, 56, 107, 55, 59, 89, 65, 60, 64, 75, 62, 62, 63, 48, 50, 48, 61, 63, 90, 60, 60, 48, 58, 75, 73, 89, 59, 77, 60, 75, 68, 54, 49, 66, 74, 55, 86, 79, 87, 53, 65, 44, 58, 86, 90, 71, 82, 69, 61, 51, 58, 48, 68, 62, 72, 60, 60, 84, 81, 65, 60, 62, 59, 65, 67, 47, 64, 40, 58, 61, 61, 82, 63, 58, 52, 67, 104, 70, 57, 70, 73, 58, 69, 75, 68, 54, 62, 76, 55, 68, 75, 66, 61, 59, 59, 70, 57, 82, 59, 107, 63, 52, 71, 80, 71, 68, 57, 57, 67, 76, 56, 87, 76, 91, 52, 85, 51, 34, 87, 50, 67, 53, 44, 56, 60, 85, 68, 70, 66, 71, 73, 83, 64, 67, 61, 69, 66, 86, 69, 78, 65, 89, 57, 49, 49, 61, 59, 54, 46, 111, 79, 79, 66, 67, 73, 67, 67, 75, 70, 51, 65, 67, 78, 52, 76, 54, 100, 74, 76, 83, 63, 62, 72, 63, 70, 63, 71, 70, 51, 80, 61, 51, 57, 62, 41, 93, 65, 61, 68, 81, 62, 78, 66, 81, 64, 53, 56, 59, 65, 75, 111, 75, 66, 67, 84, 69, 72, 61, 95, 78, 53, 63, 84, 65, 61, 60, 69, 62, 54, 58, 71, 62, 50, 58, 58, 69, 78, 65, 68, 80, 52, 71, 74, 61, 38, 94, 58, 70, 94, 68, 57, 106, 63, 68, 68, 89, 54, 63, 58, 54, 42, 52, 61, 86, 54, 69, 54, 72, 63, 71, 70, 79, 92, 74, 54, 66, 53, 46, 82, 57, 53, 67, 51, 60, 83, 66, 56, 65, 69, 71, 57, 93, 59, 53, 110, 76, 38, 61, 65, 54, 50, 57, 50, 69, 41, 70, 64, 76, 64, 51, 46, 63, 64, 77, 46, 69, 43, 66, 67, 52, 69, 70, 56, 73, 80, 53, 58, 57, 68, 67, 59, 67, 87, 63, 74, 67, 69, 63, 74, 61, 55, 82, 65, 74, 110, 55, 63, 55, 36, 61, 53, 73, 64, 72, 87, 58, 86, 48, 62, 52, 56, 72, 78, 59, 72, 53, 63, 65, 71, 59, 52, 69, 74, 66, 63, 52, 80, 56, 77, 84, 47, 58, 98, 80, 52, 71, 64, 72, 74, 77, 64, 75, 48, 51, 65, 56, 80, 75, 74, 69, 62, 67, 56, 64, 39, 61, 71, 58, 65, 54, 63, 60, 70, 62, 60, 99, 71, 57, 69, 70, 59, 48, 62, 62, 66, 68, 58, 60, 75, 63, 50, 59, 53, 69, 62, 75, 62, 63, 73, 70, 72, 55, 78, 74, 113, 74, 48, 86, 79, 57, 51, 76, 68, 79, 67, 69, 64, 64, 64, 74, 51, 67, 50, 63, 85, 53, 63, 90, 59, 61, 62, 58, 80, 65, 54, 72, 74, 61, 61, 57, 74, 56, 88, 86, 55, 65, 59, 63, 58, 60, 54, 58, 64, 67, 72, 66, 59, 52, 63, 70, 58, 63, 75, 78, 64, 63, 57, 70, 73, 79, 73, 75, 66, 66, 73, 56, 82, 75, 52, 54, 92, 73, 59, 68, 53, 63, 83, 66, 75, 65, 88, 55, 60, 57, 61, 65, 59, 57, 72, 63, 65, 66, 62, 46, 64, 67, 61, 54, 63, 71, 73, 66, 74, 68, 61, 66, 73, 57, 53, 51, 64, 80, 55, 62, 68, 59, 59, 67, 62, 59, 76, 82, 87, 101, 64, 88, 62, 102, 59, 49, 58, 64, 76, 71, 90, 61, 58, 62, 65, 65, 73, 63, 42, 83, 81, 81, 89, 52, 53, 76, 60, 64, 56, 81, 51, 90, 59, 69, 70, 57, 57, 90, 51, 57, 77, 55, 92, 59, 66, 65, 62, 62, 63, 60, 63, 57, 56, 56, 66, 58, 54, 63, 61, 66, 49, 69, 99, 79, 64, 83, 64, 66, 61, 80, 62, 63, 64, 63, 56, 61, 65, 62, 77, 88, 62, 66, 69, 68, 59, 71, 57, 49, 53, 56, 62, 60, 45, 52, 58, 60, 65, 60, 44, 65, 62, 71, 49, 60, 66, 57, 55, 54, 71, 60, 66, 56, 62, 54, 69, 66, 70, 67, 80, 57, 48, 55, 89, 72, 56, 62, 73, 59, 65, 62, 76, 55, 58, 76, 50, 63, 51, 56, 71, 52, 67, 44, 55, 63, 56, 66, 73, 63, 98, 81, 74, 62, 55, 72, 83, 70, 67, 46, 79, 87, 67, 67, 102, 57, 61, 61, 69, 89, 46, 81, 56, 65, 72, 61, 64, 55, 59, 55, 55, 63, 66, 85, 60, 63, 64, 84, 74, 59, 61, 60, 70, 62, 56, 64, 76, 57, 54, 73, 68, 53, 67, 62, 54, 43, 77, 72, 47, 65, 75, 75, 61, 82, 67, 50, 61, 71, 79, 88, 68, 60, 62, 72, 60, 59, 71, 67, 81, 62, 67, 55, 60, 63, 48, 60, 59, 55, 55, 55, 61, 61, 68, 66, 72, 104, 47, 72, 58, 61, 61, 84, 62, 61, 63, 83, 53, 90, 65, 64, 61, 72, 85, 56, 70, 53, 60, 71, 63, 67, 71, 60, 86, 75, 55, 76, 68, 55, 79, 65, 66, 57, 65, 72, 45, 74, 81, 62, 64, 53, 68, 64, 62, 60, 83, 89, 52, 73, 63, 67, 63, 60, 64, 60, 59, 99, 54, 85, 76, 71, 80, 90, 59, 99, 73, 64, 67, 60, 64, 82, 52, 60, 48, 48, 59, 78, 61, 59, 73, 62, 63, 57, 54, 62, 60, 68, 62, 55, 75, 64, 62, 59, 68, 73, 64, 66, 97, 64, 74, 76, 64, 55, 51, 69, 72, 66, 65, 61, 72, 55, 62, 61, 51, 63, 60, 63, 60, 46, 61, 55, 50, 55, 84, 71, 56, 88, 67, 49, 55, 73, 104, 63, 54, 69, 71, 58, 83, 53, 60, 65, 97, 50, 61, 73, 120, 52, 79, 58, 66, 67, 63, 51, 62, 55, 108, 85, 74, 94, 64, 53, 58, 58, 58, 58, 60, 57, 81, 59, 64, 59, 74, 63, 55, 56, 57, 71, 56, 86, 77, 58, 77, 85, 54, 60, 66, 64, 92, 54, 57, 53, 67, 66, 67, 77, 62, 56, 65, 57, 46, 77, 65, 61, 78, 51, 39, 54, 58, 61, 55, 57, 58, 64, 63, 73, 83, 61, 55, 57, 66, 55, 93, 73, 56, 94, 58, 79, 89, 80, 64, 57, 76, 74, 70, 70, 63, 73, 62, 75, 73, 77, 68, 66, 82, 81, 80, 59, 73, 60, 57, 85, 47, 58, 67, 82, 59, 63, 72, 63, 72, 65, 45, 54, 58, 69, 75, 63, 66, 62, 60, 69, 77, 55, 59, 68, 59, 66, 61, 71, 76, 57, 58, 63, 68, 58, 65, 64, 53, 99, 110, 50, 81, 62, 65, 68, 57, 54, 61, 62, 57, 78, 68, 76, 59, 57, 59, 72, 93, 47, 85, 62, 60, 65, 64, 73, 84, 81, 54, 93, 68, 58, 57, 58, 46, 62, 94, 77, 66, 50, 86, 80, 83, 50, 54, 95, 49, 54, 57, 62, 46, 64, 70, 71, 59, 60, 75, 53, 73, 66, 58, 81, 59, 57, 86, 71, 75, 61, 55, 69, 88, 61, 60, 69, 74, 48, 58, 56, 68, 59, 59, 63, 57, 73, 55, 66, 70, 75, 65, 58, 109, 68, 68, 54, 64, 91, 56, 55, 57, 67, 59, 56, 57, 54, 67, 57, 76, 59, 66, 56, 120, 69, 62, 56, 60, 54, 66, 98, 56, 59, 75, 55, 65, 51, 84, 63, 61, 63, 52, 56, 71, 57, 91, 82, 66, 73, 73, 63, 76, 62, 57, 56, 86, 90, 78, 60, 79, 75, 68, 64, 62, 79, 66, 70, 64, 60, 93, 76, 64, 86, 67, 62, 81, 50, 63, 72, 57, 60, 59, 65, 77, 55, 65, 57, 61, 62, 95, 73, 72, 61, 66, 73, 60, 68, 66, 60, 68, 86, 54, 56, 55, 59, 74, 73, 78, 57, 77, 65, 71, 94, 58, 75, 74, 53, 70, 68, 53, 56, 125, 54, 54, 69, 62, 67, 64, 65, 80, 49, 55, 72, 59, 54, 70, 67, 55, 60, 64, 98, 80, 59, 68, 55, 56, 62, 65, 46, 52, 68, 63, 57, 63, 60, 61, 81, 53, 60, 80, 72, 61, 69, 73, 87, 57, 71, 72, 74, 57, 67, 79, 52, 64, 95, 59, 58, 63, 65, 72, 60, 92, 74, 65, 72, 84, 61, 70, 71, 80, 67, 66, 61, 70, 51, 70, 71, 60, 68, 77, 62, 50, 63, 80, 69, 68, 48, 59, 62, 81, 70, 73, 59, 62, 51, 61, 82, 65, 41, 50, 47, 48, 63, 82, 54, 66, 58, 59, 62, 52, 51, 62, 56, 55, 64, 57, 77, 64, 55, 82, 64, 70, 64, 74, 69, 58, 53, 60, 57, 70, 67, 63, 54, 72, 55, 86, 50, 56, 69, 54, 62, 67, 63, 62, 64, 49, 54, 70, 66, 53, 70, 125, 67, 52, 83, 98, 60, 65, 57, 48, 81, 67, 49, 66, 63, 62, 129, 77, 59, 73, 56, 57, 69, 80, 63, 58, 72, 78, 72, 52, 73, 55, 74, 65, 61, 78, 92, 55, 68, 58, 52, 54, 60, 54, 56, 67, 84, 71, 77, 56, 69, 64, 72, 73, 62, 52, 82, 61, 63, 47, 73, 47, 59, 68, 91, 74, 84, 46, 57, 78, 68, 103, 59, 57, 77, 69, 79, 71, 56, 110, 62, 68, 54, 67, 60, 79, 70, 87, 86, 82, 67, 71, 58, 66, 48, 60, 62, 71, 71, 78, 63, 75, 68, 54, 63, 73, 62, 95, 57, 86, 61, 51, 62, 56, 52, 78, 50, 62, 74, 62, 58, 64, 62, 61, 64, 43, 57, 53, 53, 75, 69, 89, 57, 52, 95, 59, 67, 57, 60, 49, 85, 57, 54, 75, 58, 72, 78, 71, 60, 66, 51, 65, 57, 56, 54, 65, 81, 72, 73, 64, 71, 57, 105, 72, 57, 38, 76, 64, 62, 74, 57, 50, 50, 79, 63, 52, 64, 77, 78, 71, 55, 60, 59, 69, 55, 73, 59, 52, 58, 57, 66, 67, 74, 68, 75, 58, 66, 61, 57, 91, 95, 63, 68, 79, 67, 67, 103, 73, 68, 51, 53, 64, 46, 69, 73, 62, 48, 70, 74, 72, 67, 59, 71, 80, 72, 67, 55, 79, 59, 66, 65, 113, 74, 44, 65, 57, 58, 68, 55, 64, 57, 68, 75, 71, 65, 59, 60, 92, 66, 72, 85, 70, 72, 48, 83, 65, 57, 73, 66, 65, 78, 61, 55, 60, 47, 65, 78, 53, 53, 82, 64, 72, 63, 73, 60, 57, 86, 68, 55, 78, 64, 104, 47, 90, 79, 109, 68, 57, 49, 63, 103, 59, 57, 55, 64, 71, 85, 83, 58, 62, 80, 48, 72, 96, 53, 75, 72, 68, 67, 60, 62, 79, 80, 68, 68, 80, 50, 85, 52, 72, 60, 58, 64, 62, 90, 63, 44, 63, 86, 48, 74, 53, 91, 71, 60, 56, 64, 60, 65, 59, 79, 84, 98, 56, 61, 57, 59, 63, 62, 67, 68, 68, 58, 63, 82, 71, 54, 63, 74, 61, 79, 57, 60, 67, 65, 58, 62, 52, 63, 59, 57, 62, 88, 64, 66, 56, 50, 88, 55, 43, 73, 70, 58, 72, 56, 69, 54, 84, 57, 57, 71, 78, 72, 62, 64, 61, 45, 47, 60, 65, 88, 102, 53, 54, 76, 77, 64, 61, 59, 64, 61, 87, 55, 52, 57, 58, 60, 61, 58, 49, 63, 72, 52, 42, 73, 63, 68, 63, 60, 64, 66, 66, 52, 63, 77, 50, 51, 47, 77, 66, 79, 67, 100, 45, 67, 59, 64, 88, 48, 67, 55, 74, 71, 91, 58, 65, 70, 65, 46, 71, 70, 75, 78, 63, 65, 89, 80, 43, 69, 59, 58, 66, 53, 85, 69, 41, 66, 49, 46, 57, 66, 75, 63, 65, 71, 74, 62, 67, 67, 79, 60, 57, 51, 36, 56, 62, 58, 72, 91, 54, 54, 56, 80, 65, 61, 55, 52, 65, 63, 63, 69, 52, 61, 57, 69, 56, 67, 60, 89, 29, 66, 65, 77, 115, 68, 100, 56, 51, 63, 64, 54, 61, 59, 93, 73, 71, 66, 61, 62, 62, 73, 77, 56, 62, 59, 57, 61, 55, 63, 69, 63, 58, 67, 58, 47, 54, 54, 73, 70, 44, 63, 48, 50, 85, 56, 67, 70, 68, 66, 77, 66, 66, 59, 61, 72, 67, 65, 62, 51, 86, 65, 63, 66, 67, 59, 76, 61, 62, 45, 62, 71, 58, 76, 55, 56, 67, 65, 54, 64, 63, 64, 59, 60, 60, 86, 54, 74, 86, 63, 70, 63, 72, 73, 53, 55, 76, 67, 69, 64, 81, 67, 44, 63, 64, 59, 57, 76, 77, 71, 58, 58, 61, 79, 55, 71, 51, 77, 64, 61, 78, 59, 76, 63, 59, 68, 70, 64, 69, 47, 75, 51, 59, 57, 58, 68, 55, 69, 77, 65, 64, 65, 66, 66, 50, 66, 63, 57, 66, 65, 41, 65, 86, 56, 62, 70, 63, 67, 50, 53, 71, 54, 67, 63, 63, 63, 82, 67, 57, 64, 57, 110, 64, 59, 78, 82, 67, 73, 65, 56, 59, 65, 78, 69, 113, 93, 51, 59, 60, 66, 87, 65, 74, 74, 59, 59, 86, 69, 81, 43, 73, 64, 69, 77, 80, 66, 69, 63, 57, 87, 55, 57, 55, 51, 54, 84, 59, 64, 56, 47, 52, 70, 70, 77, 48, 69, 71, 53, 64, 87, 69, 50, 74, 54, 62, 82, 69, 68, 96, 80, 66, 111, 68, 52, 66, 73, 66, 78, 47, 66, 60, 86, 49, 87, 63, 78, 56, 78, 88, 41, 63, 73, 73, 68, 64, 52, 51, 72, 84, 52, 70, 67, 58, 61, 67, 67, 51, 72, 48, 71, 57, 70, 65, 61, 70, 78, 55, 92, 69, 57, 117, 89, 62, 58, 60, 60, 67, 48, 56, 65, 52, 82, 66, 61, 65, 53, 47, 84, 62, 57, 58, 71, 61, 52, 64, 60, 75, 89, 67, 58, 70, 49, 49, 61, 71, 62, 62, 64, 61, 60, 54, 66, 64, 58, 70, 62, 63, 91, 55, 80, 53, 63, 61, 67, 60, 66, 67, 47, 73, 90, 79, 57, 62, 68, 63, 64, 107, 54, 48, 67, 76, 57, 45, 52, 60, 59, 73, 63, 82, 103, 81, 60, 72, 69, 72, 97, 62, 57, 82, 75, 73, 55, 57, 55, 49, 52, 52, 59, 51, 75, 65, 65, 63, 61, 102, 67, 82, 63, 63, 68, 55, 70, 68, 93, 60, 62, 81, 56, 68, 62, 61, 57, 68, 57, 62, 56, 66, 70, 77, 70, 56, 67, 56, 77, 69, 61, 69, 55, 63, 52, 54, 68, 64, 58, 77, 56, 68, 65, 63, 68, 70, 79, 48, 71, 53, 71, 75, 66, 81, 58, 60, 57, 57, 52, 62, 73, 63, 71, 48, 72, 67, 58, 68, 60, 71, 47, 77, 66, 62, 61, 77, 59, 65, 67, 65, 61, 71, 95, 71, 46, 70, 69, 69, 74, 59, 73, 64, 56, 82, 52, 82, 51, 73, 59, 67, 56, 86, 103, 61, 70, 68, 55, 46, 65, 60, 75, 102, 51, 66, 76, 56, 59, 70, 68, 53, 80, 52, 59, 52, 68, 55, 49, 67, 60, 70, 62, 58, 47, 71, 59, 55, 66, 57, 79, 85, 65, 62, 61, 60, 52, 60, 66, 72, 63, 78, 54, 64, 63, 57, 62, 62, 76, 62, 87, 61, 59, 66, 66, 65, 58, 66, 92, 85, 81, 99, 70, 58, 59, 78, 74, 67, 85, 57, 83, 59, 74, 92, 62, 52, 75, 61, 55, 49, 68, 113, 66, 98, 62, 54, 59, 54, 57, 106, 54, 46, 58, 59, 64, 66, 62, 57, 50, 76, 62, 55, 66, 62, 81, 66, 60, 71, 51, 77, 56, 77, 54, 64, 46, 69, 52, 72, 87, 71, 70, 87, 52, 88, 64, 64, 49, 91, 46, 74, 83, 55, 90, 96, 49, 67, 111, 62, 52, 63, 74, 93, 55, 44, 63, 73, 66, 73, 58, 100, 65, 55, 94, 78, 67, 54, 68, 56, 68, 72, 68, 43, 82, 65, 56, 51, 64, 79, 48, 61, 70, 71, 50, 72, 80, 49, 60, 63, 43, 75, 66, 38, 63, 77, 53, 61, 56, 60, 64, 45, 72, 87, 59, 72, 81, 56, 87, 68, 36, 73, 78, 57, 99, 84, 48, 67, 46, 55, 123, 62, 65, 85, 92, 60, 56, 60, 74, 66, 86, 49, 56, 54, 58, 55, 54, 71, 65, 50, 74, 83, 54, 77, 50, 91, 78, 55, 66, 74, 61, 62, 66, 58, 74, 58, 82, 68, 52, 52, 61, 64, 57, 48, 63, 56, 64, 73, 61, 69, 66, 83, 58, 80, 57, 66, 75, 91, 42, 39, 71, 62, 61, 66, 51, 52, 67, 62, 53, 79, 45, 58, 45, 60, 46, 75, 66, 90, 52, 51, 62, 64, 77, 69, 93, 79, 64, 49, 56, 63, 72, 41, 92, 57, 66, 84, 67, 61, 73, 57, 61, 60, 62, 65, 54, 92, 73, 49, 54, 78, 102, 65, 57, 55, 55, 51, 57, 54, 79, 69, 55, 73, 93, 50, 69, 57, 79, 75, 66, 76, 59, 67, 46, 70, 78, 82, 69, 86, 45, 73, 105, 78, 102, 52, 54, 74, 67, 46, 49, 74, 65, 88, 62, 71, 73, 59, 65, 83, 52, 62, 74, 70, 54, 53, 53, 62, 70, 53, 66, 64, 59, 92, 56, 54, 95, 76, 52, 59, 95, 64, 98, 54, 68, 67, 65, 73, 55, 65, 66, 62, 52, 89, 44, 66, 46, 82, 54, 59, 74, 48, 61, 70, 69, 65, 64, 51, 67, 84, 65, 58, 66, 49, 77, 68, 72, 71, 62, 71, 85, 68, 66, 50, 50, 73, 54, 74, 83, 64, 62, 64, 87, 62, 71, 59, 61, 79, 100, 58, 63, 79, 67, 72, 64, 66, 75, 64, 92, 65, 93, 67, 66, 74, 52, 61, 60, 59, 47, 94, 71, 70, 36, 60, 53, 60, 84, 58, 58, 86, 66, 68, 64, 50, 55, 68, 78, 67, 55, 73, 62, 72, 68, 67, 60, 74, 53, 66, 65, 95, 51, 61, 85, 67, 88, 56, 76, 65, 78, 63, 61, 63, 50, 69, 54, 81, 63, 39, 50, 71, 49, 97, 55, 68, 66, 83, 71, 60, 64, 127, 65, 66, 71, 67, 62, 67, 82, 82, 65, 100, 67, 55, 75, 55, 78, 48, 56, 57, 61, 67, 51, 63, 65, 50, 51, 66, 63, 52, 73, 78, 61, 67, 90, 84, 63, 84, 58, 71, 50, 65, 65, 74, 46, 68, 64, 96, 56, 64, 70, 61, 67, 57, 68, 40, 48, 63, 69, 97, 68, 53, 61, 55, 50, 72, 56, 57, 78, 56, 64, 70, 54, 70, 70, 72, 62, 75, 61, 65, 89, 66, 74, 70, 47, 89, 69, 78, 46, 71, 72, 68, 57, 50, 73, 70, 96, 68, 69, 41, 65, 70, 73, 79, 68, 66, 61, 68, 66, 50, 63, 46, 61, 81, 55, 53, 75, 61, 58, 60, 55, 59, 81, 66, 63, 65, 58, 61, 66, 79, 51, 59, 64, 61, 74, 100, 69, 46, 75, 58, 56, 52, 63, 61, 86, 98, 64, 69, 60, 92, 53, 49, 63, 58, 60, 54, 61, 69, 65, 58, 57, 75, 62, 66, 47, 91, 52, 69, 91, 48, 52, 97, 107, 72, 119, 63, 48, 68, 75, 66, 68, 87, 76, 61, 71, 85, 90, 73, 59, 80, 68, 53, 66, 59, 69, 76, 54, 72, 63, 55, 69, 61, 61, 58, 64, 67, 74, 56, 68, 74, 49, 76, 58, 79, 64, 73, 60, 56, 57, 55, 69, 67, 66, 69, 62, 53, 57, 61, 66, 45, 58, 65, 63, 43, 62, 109, 66, 55, 52, 77, 73, 70, 69, 62, 75, 62, 65, 56, 84, 68, 75, 62, 64, 59, 87, 97, 60, 67, 60, 77, 57, 41, 85, 75, 60, 80, 48, 61, 57, 103, 56, 79, 58, 63, 73, 61, 78, 85, 63, 55, 62, 47, 73, 72, 74, 71, 66, 72, 82, 50, 59, 48, 62, 53, 83, 60, 69, 79, 52, 78, 58, 66, 55, 47, 78, 81, 48, 67, 68, 68, 77, 67, 49, 62, 59, 61, 74, 77, 81, 36, 52, 68, 64, 62, 56, 53, 55, 53, 87, 64, 52, 90, 69, 68, 44, 49, 51, 67, 58, 64, 68, 66, 62, 53, 60, 62, 67, 60, 60, 66, 70, 57, 73, 96, 76, 44, 56, 54, 59, 59, 89, 56, 54, 72, 68, 80, 48, 55, 52, 66, 60, 37, 93, 60, 57, 61, 86, 84, 68, 69, 66, 54, 83, 67, 54, 63, 78, 53, 69, 57, 56, 66, 72, 80, 73, 106, 75, 50, 52, 58, 74, 61, 94, 69, 59, 78, 91, 68, 54, 60, 55, 59, 66, 75, 77, 88, 46, 36, 63, 51, 62, 71, 52, 66, 59, 80, 66, 46, 70, 40, 66, 62, 48, 65, 63, 69, 67, 48, 73, 55, 61, 58, 76, 61, 59, 74, 68, 64, 72, 82, 75, 51, 52, 72, 80, 96, 66, 62, 61, 49, 76, 74, 61, 66, 85, 69, 53, 76, 59, 59, 70, 99, 56, 83, 65, 73, 57, 87, 69, 36, 56, 61, 70, 62, 70, 72, 58, 75, 72, 67, 62, 53, 54, 61, 93, 53, 82, 67, 55, 57, 65, 55, 60, 107, 67, 68, 63, 96, 59, 96, 52, 57, 88, 65, 64, 70, 60, 47, 126, 68, 60, 66, 52, 66, 53, 70, 86, 69, 57, 64, 74, 82, 70, 53, 62, 64, 87, 82, 107, 73, 66, 62, 65, 62, 76, 63, 50, 66, 61, 64, 69, 57, 79, 60, 49, 59, 72, 68, 91, 68, 57, 47, 92, 56, 57, 66, 72, 77, 49, 83, 67, 51, 67, 103, 47, 53, 61, 68, 55, 62, 44, 58, 75, 68, 60, 86, 62, 55, 71, 122, 78, 66, 65, 68, 53, 67, 61, 57, 67, 58, 63, 57, 64, 47, 71, 58, 67, 65, 47, 119, 58, 85, 81, 61, 67, 49, 50, 44, 51, 62, 52, 77, 67, 56, 56, 60, 56, 48, 57, 49, 73, 62, 75, 42, 65, 61, 105, 55, 79, 62, 77, 53, 56, 61, 85, 64, 67, 78, 62, 91, 49, 47, 49, 91, 54, 92, 56, 68, 79, 65, 69, 52, 85, 57, 67, 55, 66, 102, 62, 55, 44, 61, 77, 60, 80, 75, 45, 63, 67, 83, 76, 49, 62, 65, 71, 60, 76, 59, 48, 67, 67, 57, 67, 89, 62, 61, 75, 61, 66, 58, 71, 60, 98, 66, 54, 66, 59, 61, 72, 59, 68, 67, 69, 61, 59, 65, 67, 61, 67, 66, 59, 77, 55, 45, 56, 51, 67, 54, 82, 65, 60, 70, 51, 98, 59, 52, 65, 49, 53, 77, 41, 66, 84, 71, 52, 57, 70, 65, 54, 64, 70, 38, 72, 83, 57, 59, 63, 68, 58, 59, 66, 61, 53, 66, 54, 61, 70, 56, 82, 61, 75, 61, 39, 77, 64, 70, 75, 69, 64, 61, 69, 74, 64, 81, 73, 50, 70, 49, 59, 64, 61, 72, 121, 54, 58, 85, 70, 47, 78, 63, 84, 85, 66, 57, 59, 95, 62, 69, 67, 68, 72, 71, 48, 77, 52, 56, 61, 43, 54, 72, 59, 65, 63, 67, 61, 66, 68, 64, 75, 72, 73, 52, 61, 65, 71, 58, 63, 76, 72, 70, 74, 62, 91, 103, 57, 89, 51, 63, 58, 86, 106, 54, 59, 50, 66, 73, 71, 60, 76, 76, 67, 64, 83, 57, 63, 56, 60, 39, 67, 68, 57, 72, 70, 57, 54, 58, 64, 60, 71, 61, 46, 54, 79, 68, 62, 57, 83, 59, 93, 78, 57, 62, 73, 65, 74, 51, 65, 61, 51, 69, 55, 44, 82, 90, 52, 62, 90, 53, 68, 48, 58, 69, 68, 53, 69, 81, 73, 72, 70, 77, 53, 52, 60, 50, 67, 53, 70, 61, 74, 48, 58, 71, 55, 58, 70, 56, 114, 49, 67, 56, 67, 68, 66, 59, 98, 65, 56, 68, 70, 71, 63, 70, 46, 89, 74, 58, 72, 72, 84, 69, 61, 70, 61, 72, 71, 79, 69, 73, 46, 60, 54, 50, 83, 66, 51, 62, 62, 58, 64, 57, 51, 58, 61, 66, 61, 65, 68, 73, 67, 56, 68, 76, 64, 57, 80, 79, 61, 90, 66, 62, 73, 70, 46, 67, 61, 72, 49, 60, 52, 64, 66, 105, 59, 50, 64, 61, 54, 45, 59, 80, 72, 81, 82, 55, 60, 126, 81, 95, 63, 44, 69, 69, 79, 73, 60, 134, 69, 62, 57, 61, 56, 53, 57, 59, 63, 65, 69, 81, 59, 59, 49, 63, 77, 50, 60, 53, 55, 80, 65, 77, 60, 59, 49, 65, 59, 54, 61, 112, 78, 64, 64, 76, 64, 64, 85, 62, 56, 62, 90, 55, 64, 62, 72, 68, 69, 73, 63, 54, 72, 65, 53, 69, 70, 69, 78, 76, 56, 66, 61, 86, 60, 61, 83, 68, 66, 65, 99, 55, 55, 95, 57, 72, 58, 77, 76, 46, 68, 71, 49, 64, 47, 78, 51, 67, 86, 96, 67, 58, 74, 66, 88, 74, 57, 65, 56, 59, 56, 74, 54, 72, 55, 72, 64, 60, 64, 67, 64, 71, 66, 63, 53, 87, 59, 47, 70, 96, 74, 64, 49, 76, 58, 67, 59, 71, 65, 76, 56, 56, 64, 64, 59, 47, 64, 74, 66, 59, 68, 54, 77, 46, 64, 60, 62, 62, 73, 63, 68, 51, 74, 65, 60, 74, 92, 65, 56, 78, 77, 67, 90, 59, 50, 60, 83, 70, 41, 97, 80, 59, 65, 67, 89, 85, 55, 66, 60, 67, 63, 65, 75, 67, 64, 60, 71, 59, 71, 69, 57, 84, 58, 61, 83, 51, 59, 50, 80, 62, 61, 69, 66, 90, 66, 58, 49, 72, 62, 71, 52, 63, 96, 69, 89, 43, 70, 65, 78, 71, 72, 69, 62, 83, 63, 52, 58, 63, 96, 47, 64, 77, 67, 65, 66, 57, 63, 75, 58, 71, 84, 51, 104, 106, 69, 59, 67, 65, 70, 61, 91, 66, 74, 81, 55, 75, 51, 62, 70, 33, 61, 60, 66, 74, 78, 68, 68, 73, 62, 66, 64, 57, 84, 55, 56, 61, 81, 61, 67, 68, 66, 61, 99, 51, 68, 87, 67, 64, 58, 73, 61, 55, 64, 41, 50, 75, 54, 67, 73, 64, 67, 53, 70, 66, 51, 56, 61, 56, 72, 53, 66, 72, 58, 45, 75, 55, 85, 72, 72, 74, 76, 55, 64, 63, 65, 76, 57, 62, 71, 63, 73, 62, 56, 59, 51, 59, 65, 53, 75, 54, 57, 105, 58, 58, 52, 63, 54, 81, 58, 58, 73, 72, 62, 77, 70, 57, 80, 74, 67, 64, 74, 54, 63, 79, 69, 59, 63, 55, 70, 65, 55, 63, 55, 67, 67, 55, 62, 60, 49, 61, 61, 83, 68, 65, 82, 69, 74, 71, 76, 62, 94, 85, 67, 64, 58, 61, 48, 91, 63, 62, 57, 40, 75, 71, 64, 55, 43, 65, 59, 62, 52, 57, 57, 60, 62, 45, 102, 61, 76, 82, 61, 60, 66, 83, 62, 76, 83, 56, 43, 58, 58, 68, 86, 73, 66, 47, 54, 73, 99, 50, 81, 61, 73, 64, 57, 63, 59, 58, 63, 79, 64, 56, 78, 64, 60, 67, 57, 61, 55, 74, 47, 65, 71, 72, 64, 50, 61, 57, 58, 64, 70, 54, 57, 58, 57, 76, 46, 51, 53, 70, 59, 73, 65, 59, 89, 60, 52, 48, 54, 71, 76, 94, 59, 57, 90, 83, 62, 67, 75, 64, 75, 59, 62, 63, 53, 72, 76, 69, 72, 51, 67, 72, 64, 64, 80, 75, 75, 52, 67, 67, 54, 56, 63, 54, 68, 83, 102, 59, 74, 51, 59, 89, 55, 88, 64, 63, 70, 50, 63, 71, 62, 61, 73, 68, 42, 68, 109, 62, 61, 61, 65, 48, 49, 65, 76, 69, 49, 75, 68, 61, 67, 73, 61, 68, 83, 68, 75, 65, 56, 53, 65, 60, 62, 91, 58, 83, 73, 78, 61, 64, 65, 67, 46, 56, 62, 80, 73, 61, 53, 67, 65, 58, 68, 64, 50, 62, 72, 87, 51, 61, 70, 66, 50, 66, 65, 53, 54, 59, 49, 70, 65, 71, 65, 62, 57, 51, 60, 83, 51, 68, 95, 77, 69, 77, 102, 48, 64, 74, 68, 60, 54, 65, 83, 50, 59, 80, 68, 58, 78, 74, 66, 65, 48, 75, 64, 58, 94, 60, 63, 54, 46, 70, 75, 62, 64, 93, 67, 73, 101, 59, 66, 68, 57, 80, 62, 41, 69, 63, 81, 73, 73, 50, 75, 56, 64, 69, 44, 53, 56, 116, 64, 70, 67, 94, 86, 72, 68, 88, 74, 56, 87, 74, 62, 37, 80, 60, 67, 59, 62, 52, 56, 58, 61, 71, 82, 55, 59, 69, 56, 62, 54, 35, 76, 61, 57, 68, 67, 103, 77, 67, 54, 55, 62, 75, 57, 74, 62, 55, 60, 57, 53, 64, 62, 89, 49, 76, 67, 59, 81, 63, 70, 57, 61, 62, 76, 63, 83, 58, 53, 62, 55, 57, 63, 63, 70, 77, 55, 67, 46, 68, 63, 73, 51, 73, 69, 70, 67, 73, 48, 80, 73, 60, 64, 61, 67, 59, 59, 56, 71, 61, 76, 72, 58, 37, 70, 71, 72, 76, 81, 84, 88, 47, 74, 69, 50, 68, 46, 65, 64, 59, 83, 72, 86, 60, 70, 70, 77, 64, 78, 71, 55, 48, 59, 68, 53, 77, 49, 61, 70, 71, 56, 82, 62, 76, 78, 67, 66, 54, 87, 65, 66, 72, 62, 57, 67, 54, 74, 65, 61, 74, 61, 50, 61, 80, 52, 47, 56, 51, 54, 54, 66, 69, 72, 69, 52, 57, 72, 47, 65, 64, 77, 51, 110, 63, 57, 50, 60, 53, 59, 48, 132, 66, 91, 48, 68, 60, 83, 52, 69, 64, 75, 67, 75, 52, 60, 66, 50, 88, 65, 72, 82, 70, 103, 58, 63, 56, 66, 65, 65, 55, 64, 63, 92, 63, 67, 66, 65, 47, 45, 60, 57, 74, 51, 112, 92, 62, 68, 46, 71, 61, 67, 63, 62, 50, 81, 55, 69, 63, 69, 62, 78, 66, 90, 47, 55, 45, 66, 62, 55, 57, 60, 80, 64, 48, 68, 66, 59, 53, 35, 65, 71, 57, 64, 57, 65, 87, 52, 78, 58, 53, 87, 70, 68, 69, 65, 143, 56, 66, 79, 75, 60, 63, 57, 57, 71, 57, 70, 57, 64, 56, 68, 62, 66, 40, 64, 52, 67, 53, 85, 69, 55, 60, 94, 55, 60, 68, 48, 70, 65, 88, 58, 63, 92, 66, 65, 89, 62, 51, 89, 76, 70, 67, 71, 109, 63, 103, 57, 60, 76, 53, 60, 70, 48, 84, 48, 50, 74, 60, 60, 71, 61, 71, 77, 77, 70, 62, 67, 53, 56, 76, 72, 36, 68, 65, 96, 76, 75, 58, 66, 90, 61, 55, 82, 60, 74, 71, 67, 66, 67, 55, 64, 63, 70, 57, 55, 59, 61, 49, 61, 65, 59, 76, 72, 58, 54, 57, 69, 67, 58, 72, 52, 99, 68, 56, 53, 56, 68, 56, 70, 97, 57, 69, 67, 68, 60, 64, 56, 59, 75, 72, 65, 56, 71, 53, 87, 74, 86, 69, 58, 61, 46, 62, 69, 79, 82, 65, 60, 64, 60, 63, 71, 55, 72, 54, 60, 85, 52, 54, 65, 57, 48, 49, 64, 58, 57, 57, 56, 60, 73, 53, 69, 73, 55, 69, 78, 48, 59, 68, 56, 75, 56, 80, 73, 57, 69, 71, 80, 74, 73, 58, 59, 57, 59, 54, 64, 54, 72, 59, 67, 75, 73, 76, 61, 64, 63, 61, 52, 68, 58, 72, 100, 70, 60, 59, 55, 54, 69, 65, 50, 77, 60, 60, 56, 96, 73, 74, 77, 55, 77, 89, 50, 61, 58, 52, 64, 67, 59, 64, 58, 67, 70, 72, 73, 78, 71, 67, 76, 50, 52, 67, 58, 88, 57, 57, 48, 47, 50, 66, 60, 67, 50, 40, 49, 70, 63, 67, 82, 87, 69, 62, 65, 69, 69, 68, 79, 52, 65, 109, 65, 62, 41, 82, 50, 83, 70, 77, 63, 70, 64, 61, 76, 63, 63, 62, 64, 58, 54, 136, 63, 60, 65, 52, 73, 67, 101, 59, 75, 84, 69, 68, 67, 94, 67, 58, 66, 61, 62, 66, 80, 74, 74, 55, 68, 63, 59, 67, 96, 61, 51, 68, 114, 56, 62, 69, 97, 51, 45, 70, 60, 61, 54, 48, 68, 112, 65, 49, 57, 52, 54, 101, 70, 65, 48, 59, 68, 67, 64, 49, 73, 63, 54, 53, 44, 84, 71, 62, 59, 83, 63, 62, 80, 69, 82, 85, 35, 57, 60, 64, 64, 60, 64, 70, 51, 54, 70, 51, 122, 53, 65, 70, 68, 61, 79, 66, 63, 114, 59, 61, 64, 61, 51, 70, 68, 55, 64, 47, 85, 87, 55, 60, 67, 58, 73, 61, 108, 85, 67, 68, 76, 64, 77, 71, 58, 62, 60, 76, 72, 73, 60, 57, 64, 55, 69, 67, 75, 80, 90, 62, 80, 62, 60, 55, 69, 76, 84, 61, 82, 83, 70, 59, 47, 81, 83, 67, 79, 70, 54, 81, 92, 82, 54, 52, 57, 67, 59, 61, 62, 64, 53, 59, 90, 64, 68, 63, 64, 58, 57, 69, 46, 74, 79, 51, 63, 82, 70, 76, 61, 68, 56, 58, 87, 47, 63, 56, 72, 69, 58, 68, 80, 63, 50, 63, 52, 62, 60, 55, 76, 53, 51, 61, 75, 66, 73, 67, 59, 54, 110, 64, 63, 71, 68, 66, 58, 59, 80, 38, 55, 47, 85, 82, 78, 87, 59, 74, 59, 65, 72, 65, 69, 64, 50, 73, 67, 89, 68, 99, 64, 62, 98, 70, 63, 65, 58, 67, 96, 47, 62, 56, 41, 61, 71, 55, 59, 83, 54, 59, 66, 73, 69, 62, 56, 69, 50, 78, 58, 62, 61, 63, 66, 62, 54, 89, 67, 79, 69, 68, 98, 77, 58, 59, 88, 65, 81, 76, 68, 55, 96, 58, 56, 57, 86, 62, 52, 60, 79, 63, 67, 76, 66, 71, 103, 66, 68, 65, 65, 70, 65, 58, 99, 54, 56, 82, 60, 67, 57, 74, 61, 70, 72, 75, 65, 71, 57, 65, 64, 63, 61, 73, 72, 69, 87, 75, 72, 74, 80, 57, 59, 56, 60, 85, 62, 69, 57, 63, 63, 57, 57, 64, 65, 74, 52, 117, 90, 60, 58, 61, 57, 62, 52, 68, 65, 53, 66, 89, 78, 79, 68, 69, 87, 60, 56, 75, 51, 59, 76, 51, 67, 50, 62, 61, 97, 79, 49, 50, 78, 61, 77, 62, 80, 61, 47, 62, 51, 51, 88, 64, 64, 61, 80, 89, 92, 69, 59, 46, 50, 70, 64, 68, 61, 67, 72, 63, 83, 62, 69, 56, 60, 62, 57, 65, 57, 69, 70, 69, 67, 43, 53, 93, 95, 63, 52, 58, 68, 57, 64, 62, 57, 57, 59, 57, 68, 57, 52, 67, 66, 75, 62, 60, 50, 70, 65, 82, 63, 53, 62, 55, 62, 63, 58, 60, 77, 72, 53, 46, 55, 58, 88, 71, 59, 64, 51, 66, 50, 55, 58, 45, 62, 81, 53, 59, 61, 61, 75, 47, 66, 70, 76, 47, 71, 63, 61, 61, 61, 61, 67, 68, 71, 89, 54, 93, 69, 57, 64, 80, 65, 111, 63, 74, 57, 37, 63, 75, 50, 64, 73, 66, 59, 56, 63, 67, 67, 69, 68, 66, 99, 79, 67, 74, 61, 58, 61, 54, 88, 63, 81, 75, 55, 56, 43, 63, 53, 63, 62, 62, 66, 66, 66, 66, 65, 61, 53, 53, 64, 66, 59, 64, 67, 61, 46, 73, 76, 66, 69, 77, 59, 54, 68, 103, 68, 71, 83, 55, 63, 74, 62, 76, 62, 60, 62, 60, 91, 66, 65, 64, 67, 58, 44, 81, 74, 53, 78, 68, 73, 55, 41, 67, 71, 62, 56, 58, 74, 70, 58, 52, 48, 105, 91, 55, 78, 58, 82, 68, 50, 76, 113, 60, 94, 63, 58, 77, 54, 61, 61, 59, 81, 74, 68, 66, 61, 63, 71, 88, 63, 58, 71, 69, 78, 56, 52, 71, 52, 79, 53, 57, 60, 45, 78, 57, 64, 85, 80, 57, 64, 56, 64, 60, 75, 56, 61, 59, 79, 50, 67, 76, 56, 69, 58, 56, 74, 64, 89, 69, 46, 82, 52, 55, 101, 77, 56, 63, 57, 76, 60, 55, 53, 51, 75, 56, 77, 64, 73, 67, 44, 64, 55, 67, 71, 66, 78, 77, 65, 60, 62, 64, 66, 73, 59, 43, 60, 57, 56, 52, 60, 66, 57, 70, 64, 59, 71, 60, 67, 58, 63, 56, 59, 70, 66, 54, 28, 69, 54, 62, 58, 60, 64, 53, 61, 76, 72, 58, 51, 61, 59, 65, 59, 59, 87, 61, 65, 72, 57, 51, 72, 101, 68, 79, 59, 111, 62, 75, 73, 46, 70, 70, 93, 58, 66, 115, 72, 50, 55, 61, 71, 59, 57, 68, 72, 55, 64, 84, 84, 62, 67, 59, 64, 59, 42, 63, 61, 73, 88, 93, 60, 56, 84, 48, 68, 56, 57, 89, 63, 63, 60, 47, 58, 74, 62, 65, 64, 78, 55, 73, 60, 64, 54, 75, 63, 59, 60, 52, 55, 61, 53, 59, 63, 73, 75, 119, 59, 55, 68, 70, 71, 84, 63, 54, 74, 69, 70, 65, 74, 66, 60, 72, 58, 54, 84, 54, 66, 61, 96, 67, 53, 61, 47, 58, 63, 67, 75, 60, 72, 72, 87, 81, 86, 69, 80, 55, 71, 42, 70, 81, 80, 76, 67, 61, 51, 65, 54, 55, 60, 57, 67, 79, 54, 51, 70, 80, 55, 69, 60, 64, 66, 54, 62, 73, 67, 79, 56, 64, 74, 105, 59, 54, 59, 57, 65, 62, 80, 124, 57, 63, 86, 57, 58, 66, 66, 62, 61, 81, 87, 57, 76, 71, 69, 58, 63, 79, 50, 77, 66, 63, 76, 76, 71, 79, 63, 83, 80, 62, 65, 63, 43, 63, 73, 83, 66, 61, 54, 53, 93, 59, 62, 49, 60, 60, 60, 104, 68, 52, 81, 55, 73, 61, 69, 88, 96, 77, 57, 58, 61, 70, 56, 63, 58, 74, 76, 60, 59, 58, 69, 48, 60, 53, 60, 64, 92, 63, 61, 68, 66, 133, 55, 60, 51, 74, 98, 61, 63, 76, 73, 56, 65, 70, 59, 76, 67, 58, 61, 56, 49, 98, 66, 71, 58, 52, 56, 62, 71, 69, 63, 74, 39, 67, 61, 73, 56, 64, 52, 57, 72, 62, 59, 85, 58, 60, 78, 76, 67, 71, 78, 65, 69, 94, 68, 59, 68, 84, 73, 63, 79, 85, 73, 47, 66, 53, 60, 87, 72, 46, 58, 64, 50, 78, 66, 67, 90, 75, 72, 79, 66, 64, 52, 48, 54, 68, 49, 53, 75, 63, 59, 66, 60, 72, 64, 83, 84, 83, 61, 58, 75, 42, 63, 54, 47, 65, 53, 75, 80, 80, 75, 62, 58, 68, 63, 62, 82, 55, 49, 126, 63, 62, 45, 57, 66, 53, 58, 77, 48, 51, 88, 79, 56, 49, 60, 61, 58, 63, 90, 63, 60, 61, 77, 77, 63, 63, 52, 91, 68, 46, 104, 63, 60, 62, 63, 65, 61, 99, 69, 78, 68, 70, 69, 63, 62, 51, 63, 63, 64, 67, 67, 54, 59, 61, 70, 63, 62, 57, 55, 77, 60, 63, 98, 73, 63, 71, 56, 73, 59, 69, 61, 63, 70, 61, 58, 64, 70, 56, 76, 61, 51, 100, 61, 77, 75, 56, 66, 99, 85, 67, 64, 71, 51, 61, 82, 63, 83, 60, 59, 76, 79, 75, 67, 87, 65, 54, 75, 63, 69, 53, 76, 53, 49, 62, 64, 60, 69, 73, 63, 51, 72, 67, 67, 76, 68, 71, 64, 78, 70, 60, 47, 76, 98, 78, 65, 61, 58, 59, 82, 67, 69, 57, 57, 95, 66, 47, 95, 54, 75, 99, 64, 71, 74, 71, 61, 46, 67, 58, 55, 53, 67, 60, 77, 53, 89, 57, 67, 61, 50, 63, 43, 50, 63, 59, 57, 70, 78, 59, 72, 54, 53, 58, 65, 82, 61, 124, 77, 73, 69, 51, 84, 86, 56, 70, 86, 34, 87, 60, 58, 56, 63, 66, 54, 54, 84, 63, 76, 61, 55, 71, 70, 64, 57, 54, 88, 71, 54, 53, 76, 45, 52, 62, 61, 73, 45, 53, 84, 55, 54, 72, 52, 65, 63, 52, 71, 75, 62, 54, 53, 65, 63, 71, 61, 57, 58, 71, 75, 87, 70, 46, 64, 63, 57, 76, 59, 88, 137, 55, 62, 55, 56, 48, 51, 79, 77, 124, 54, 57, 59, 64, 63, 64, 68, 51, 79, 55, 54, 83, 54, 81, 60, 62, 68, 54, 60, 75, 89, 63, 60, 56, 64, 51, 43, 75, 78, 75, 61, 64, 47, 69, 65, 74, 52, 55, 48, 63, 53, 56, 83, 76, 85, 65, 67, 73, 68, 49, 43, 48, 60, 61, 57, 67, 62, 58, 84, 52, 59, 58, 36, 43, 45, 82, 53, 82, 81, 101, 65, 58, 68, 54, 50, 56, 81, 83, 50, 57, 51, 81, 68, 71, 68, 46, 65, 56, 98, 74, 71, 53, 65, 59, 65, 72, 70, 63, 72, 70, 52, 70, 66, 54, 58, 63, 57, 73, 61, 78, 63, 41, 62, 57, 76, 56, 72, 32, 78, 64, 61, 57, 69, 47, 55, 73, 44, 82, 63, 52, 66, 61, 87, 54, 69, 71, 61, 52, 71, 61, 53, 77, 62, 45, 63, 73, 59, 81, 64, 66, 32, 59, 62, 86, 48, 71, 50, 58, 106, 50, 56, 65, 50, 53, 84, 62, 62, 65, 55, 78, 63, 64, 72, 88, 68, 72, 59, 67, 43, 43, 54, 66, 71, 74, 53, 68, 122, 60, 61, 65, 61, 59, 58, 80, 81, 72, 60, 67, 61, 65, 67, 74, 61, 72, 79, 69, 72, 68, 76, 67, 69, 63, 73, 56, 77, 74, 70, 69, 67, 53, 66, 62, 63, 67, 57, 71, 77, 59, 64, 71, 56, 42, 83, 71, 69, 62, 69, 60, 82, 63, 62, 58, 54, 69, 62, 70, 65, 54, 107, 62, 69, 55, 52, 102, 86, 49, 116, 81, 47, 78, 63, 71, 109, 59, 125, 86, 87, 73, 48, 80, 72, 66, 77, 116, 40, 70, 54, 60, 75, 59, 79, 59, 64, 74, 80, 53, 73, 88, 66, 59, 52, 63, 58, 49, 40, 75, 58, 67, 70, 75, 82, 48, 59, 67, 75, 70, 93, 62, 67, 71, 88, 62, 66, 52, 62, 73, 51, 85, 69, 60, 54, 89, 57, 72, 71, 63, 64, 69, 64, 58, 71, 60, 74, 50, 38, 53, 48, 47, 78, 59, 67, 38, 67, 63, 72, 69, 69, 59, 66, 79, 41, 71, 43, 74, 58, 69, 60, 72, 63, 52, 71, 60, 55, 59, 74, 68, 83, 64, 77, 62, 66, 67, 76, 65, 51, 80, 71, 70, 64, 53, 71, 57, 48, 75, 87, 64, 72, 57, 51, 59, 68, 73, 59, 69, 84, 63, 53, 61, 97, 85, 41, 74, 69, 65, 62, 119, 70, 84, 125, 59, 88, 62, 91, 65, 60, 75, 54, 74, 85, 76, 84, 69, 81, 74, 53, 59, 65, 68, 66, 74, 50, 63, 64, 64, 57, 65, 73, 97, 63, 110, 75, 55, 102, 63, 63, 70, 38, 64, 51, 81, 66, 60, 59, 82, 62, 60, 68, 72, 63, 90, 60, 62, 62, 57, 72, 82, 58, 69, 64, 63, 51, 67, 56, 60, 75, 97, 80, 61, 139, 62, 67, 75, 71, 57, 62, 62, 67, 61, 65, 78, 66, 73, 79, 70, 73, 56, 62, 68, 66, 86, 70, 70, 79, 66, 66, 82, 63, 41, 63, 69, 71, 58, 75, 56, 48, 72, 64, 74, 59, 91, 44, 79, 86, 64, 55, 70, 59, 62, 59, 58, 57, 82, 50, 64, 71, 62, 77, 63, 81, 71, 60, 71, 44, 61, 46, 70, 68, 61, 63, 58, 77, 77, 60, 66, 58, 53, 72, 116, 54, 67, 53, 59, 51, 69, 67, 62, 56, 62, 62, 62, 88, 69, 95, 52, 58, 89, 83, 79, 68, 74, 51, 79, 63, 63, 56, 73, 80, 64, 108, 57, 65, 74, 53, 44, 73, 70, 51, 71, 67, 72, 70, 63, 52, 48, 80, 58, 51, 63, 85, 71, 72, 49, 60, 93, 73, 58, 50, 59, 64, 56, 61, 60, 101, 71, 73, 60, 69, 93, 63, 58, 65, 65, 66, 63, 80, 77, 77, 48, 50, 97, 52, 56, 79, 91, 73, 62, 56, 61, 63, 71, 82, 69, 52, 55, 84, 76, 86, 69, 85, 57, 76, 64, 61, 63, 75, 58, 58, 68, 64, 63, 65, 56, 56, 65, 80, 58, 65, 74, 68, 52, 49, 58, 54, 103, 73, 68, 50, 74, 64, 86, 73, 68, 85, 61, 69, 62, 52, 93, 58, 75, 57, 70, 64, 65, 59, 82, 62, 40, 72, 59, 63, 81, 64, 57, 99, 65, 70, 73, 70, 61, 75, 72, 60, 66, 51, 53, 86, 55, 72, 46, 65, 67, 65, 76, 58, 106, 63, 59, 84, 32, 63, 56, 68, 50, 72, 79, 71, 49, 63, 56, 66, 69, 68, 54, 65, 56, 73, 120, 71, 46, 50, 57, 65, 63, 57, 78, 57, 82, 68, 57, 72, 55, 45, 59, 73, 89, 68, 83, 69, 64, 69, 78, 63, 89, 54, 67, 53, 42, 47, 65, 62, 65, 53, 81, 51, 60, 77, 82, 64, 66, 77, 58, 63, 74, 69, 43, 55, 57, 54, 67, 69, 66, 57, 54, 75, 55, 63, 131, 113, 77, 71, 56, 62, 68, 46, 86, 50, 67, 58, 52, 71, 65, 61, 52, 63, 56, 61, 55, 80, 77, 88, 94, 87, 51, 57, 91, 73, 58, 59, 60, 49, 60, 78, 56, 81, 76, 65, 66, 68, 71, 64, 71, 47, 62, 60, 44, 47, 58, 69, 56, 77, 84, 63, 103, 67, 78, 96, 59, 69, 53, 56, 57, 68, 66, 69, 48, 47, 69, 74, 54, 57, 86, 68, 60, 58, 90, 62, 81, 80, 57, 73, 81, 55, 57, 74, 70, 60, 56, 65, 53, 81, 52, 52, 98, 82, 60, 62, 78, 52, 61, 60, 66, 60, 62, 55, 73, 46, 56, 59, 85, 75, 60, 56, 63, 71, 64, 58, 79, 70, 58, 66, 65, 76, 71, 61, 46, 81, 73, 86, 78, 65, 60, 80, 63, 60, 31, 69, 67, 72, 65, 71, 50, 65, 54, 71, 64, 70, 60, 64, 71, 63, 64, 72, 70, 57, 38, 69, 67, 54, 45, 61, 71, 64, 50, 47, 54, 62, 62, 89, 59, 60, 53, 55, 62, 68, 78, 81, 67, 59, 66, 69, 68, 51, 51, 55, 86, 77, 66, 87, 72, 62, 70, 66, 63, 106, 80, 72, 108, 67, 49, 57, 56, 53, 69, 54, 72, 58, 98, 55, 62, 76, 76, 72, 64, 52, 124, 108, 69, 58, 45, 57, 78, 62, 78, 90, 70, 60, 52, 68, 68, 53, 63, 65, 62, 50, 87, 79, 59, 69, 61, 69, 62, 57, 66, 78, 53, 69, 67, 73, 66, 49, 58, 65, 71, 77, 82, 53, 68, 62, 68, 55, 63, 49, 61, 59, 63, 59, 54, 77, 60, 75, 49, 77, 51, 64, 55, 74, 99, 73, 62, 69, 60, 73, 59, 56, 58, 94, 41, 77, 85, 72, 49, 86, 68, 55, 50, 52, 70, 56, 68, 66, 63, 64, 70, 63, 62, 72, 69, 91, 76, 61, 60, 64, 68, 64, 51, 60, 78, 58, 77, 84, 106, 71, 51, 68, 60, 52, 63, 51, 49, 55, 53, 71, 74, 54, 91, 87, 72, 85, 72, 64, 77, 54, 57, 83, 59, 63, 50, 67, 91, 79, 90, 46, 57, 62, 75, 72, 62, 105, 76, 80, 35, 84, 49, 64, 47, 62, 71, 71, 51, 67, 46, 51, 60, 72, 52, 53, 87, 62, 82, 58, 69, 63, 80, 49, 54, 48, 57, 70, 92, 62, 49, 74, 64, 63, 101, 68, 75, 66, 70, 63, 59, 57, 67, 65, 71, 72, 57, 67, 61, 49, 68, 69, 55, 89, 47, 57, 49, 85, 81, 69, 67, 66, 88, 47, 63, 57, 66, 44, 65, 62, 59, 75, 69, 75, 68, 58, 58, 59, 78, 56, 52, 64, 55, 50, 100, 57, 76, 51, 73, 62, 46, 65, 82, 61, 93, 79, 68, 74, 69, 37, 55, 56, 63, 70, 70, 43, 50, 100, 67, 59, 62, 75, 54, 64, 83, 78, 70, 60, 61, 58, 54, 78, 74, 68, 74, 63, 67, 55, 58, 53, 65, 67, 58, 48, 64, 79, 75, 94, 69, 58, 71, 64, 64, 57, 82, 59, 67, 71, 50, 68, 58, 74, 65, 86, 66, 70, 74, 66, 82, 67, 64, 71, 52, 69, 77, 45, 75, 76, 78, 57, 66, 59, 77, 65, 79, 71, 73, 71, 60, 60, 60, 63, 50, 71, 74, 58, 66, 56, 78, 64, 77, 62, 82, 70, 60, 70, 85, 68, 76, 61, 129, 59, 68, 50, 63, 60, 53, 62, 50, 100, 71, 46, 49, 68, 63, 61, 58, 63, 71, 66, 65, 78, 75, 105, 67, 66, 44, 79, 111, 52, 71, 83, 60, 77, 68, 57, 56, 84, 82, 52, 65, 52, 63, 50, 53, 50, 67, 63, 93, 81, 41, 100, 45, 65, 76, 47, 59, 83, 69, 67, 70, 75, 54, 67, 74, 61, 69, 60, 80, 65, 79, 84, 77, 62, 66, 65, 85, 59, 78, 51, 59, 63, 44, 65, 92, 61, 63, 75, 59, 69, 60, 60, 96, 56, 95, 67, 60, 70, 66, 61, 66, 63, 78, 52, 53, 52, 56, 69, 64, 61, 72, 62, 63, 48, 63, 60, 39, 71, 64, 86, 65, 64, 60, 51, 63, 60, 88, 70, 62, 61, 62, 67, 53, 52, 75, 92, 70, 53, 51, 60, 60, 56, 57, 55, 51, 66, 55, 53, 78, 79, 54, 60, 84, 47, 81, 44, 70, 64, 88, 59, 53, 70, 89, 77, 51, 109, 62, 65, 59, 61, 67, 63, 62, 76, 62, 59, 72, 61, 77, 61, 68, 62, 59, 53, 55, 68, 54, 57, 67, 58, 64, 74, 59, 78, 52, 67, 66, 62, 73, 58, 67, 59, 63, 53, 65, 70, 57, 56, 65, 65, 66, 60, 91, 79, 69, 87, 62, 65, 52, 75, 78, 77, 95, 78, 66, 57, 63, 69, 75, 77, 72, 41, 65, 56, 70, 82, 48, 67, 67, 60, 87, 83, 57, 65, 70, 64, 67, 53, 73, 55, 60, 39, 78, 62, 82, 71, 60, 75, 59, 92, 74, 66, 72, 63, 51, 96, 49, 55, 64, 68, 65, 55, 69, 54, 66, 65, 65, 67, 64, 64, 60, 49, 72, 62, 55, 61, 86, 85, 54, 63, 67, 58, 59, 54, 60, 63, 46, 63, 60, 62, 103, 52, 55, 71, 66, 56, 93, 63, 55, 48, 60, 60, 66, 70, 47, 68, 64, 71, 82, 61, 55, 47, 55, 70, 54, 79, 64, 54, 54, 62, 55, 76, 59, 49, 56, 69, 90, 63, 80, 47, 72, 55, 60, 74, 55, 52, 75, 66, 76, 66, 57, 80, 78, 58, 78, 57, 75, 68, 73, 86, 65, 93, 62, 42, 72, 66, 71, 77, 47, 50, 66, 75, 55, 80, 92, 65, 56, 66, 70, 58, 66, 49, 65, 50, 75, 62, 65, 105, 56, 67, 101, 84, 70, 55, 53, 69, 65, 75, 59, 76, 70, 101, 56, 73, 61, 66, 59, 66, 61, 71, 80, 52, 60, 59, 60, 67, 57, 86, 65, 65, 64, 78, 76, 49, 46, 62, 75, 76, 69, 54, 67, 53, 92, 64, 46, 77, 61, 53, 66, 79, 68, 83, 62, 78, 63, 66, 70, 55, 52, 47, 45, 55, 64, 51, 62, 57, 100, 51, 55, 51, 37, 94, 52, 47, 71, 78, 64, 55, 69, 75, 67, 69, 57, 74, 72, 73, 55, 67, 76, 82, 69, 110, 63, 69, 75, 59, 68, 66, 87, 72, 76, 58, 84, 60, 70, 71, 64, 60, 52, 58, 49, 72, 79, 56, 72, 64, 65, 66, 44, 71, 79, 69, 80, 43, 72, 54, 63, 68, 82, 77, 47, 53, 75, 52, 62, 59, 58, 49, 73, 51, 60, 62, 84, 60, 65, 60, 84, 59, 67, 62, 68, 48, 56, 70, 65, 74, 61, 74, 71, 104, 45, 48, 70, 62, 93, 61, 89, 46, 60, 74, 61, 53, 58, 43, 58, 111, 71, 53, 73, 76, 49, 66, 72, 44, 59, 70, 62, 61, 55, 59, 58, 69, 68, 55, 74, 61, 69, 64, 65, 71, 96, 67, 60, 54, 67, 65, 90, 70, 57, 57, 66, 70, 60, 101, 69, 59, 67, 66, 55, 84, 53, 60, 75, 65, 60, 82, 87, 51, 76, 52, 84, 71, 65, 64, 68, 64, 48, 62, 85, 64, 66, 61, 90, 60, 53, 49, 55, 101, 88, 57, 69, 70, 63, 73, 70, 46, 92, 62, 78, 82, 81, 86, 60, 48, 62, 41, 63, 38, 46, 50, 65, 75, 50, 58, 59, 87, 86, 64, 67, 66, 88, 67, 72, 62, 71, 60, 43, 63, 59, 97, 96, 64, 77, 86, 49, 69, 56, 53, 65, 72, 67, 53, 69, 59, 49, 87, 103, 66, 88, 70, 63, 93, 68, 78, 64, 61, 79, 47, 68, 54, 67, 49, 65, 66, 82, 58, 55, 72, 74, 62, 61, 71, 61, 60, 63, 76, 82, 48, 69, 72, 59, 77, 72, 65, 58, 56, 87, 69, 49, 64, 104, 70, 82, 85, 68, 57, 59, 71, 58, 47, 85, 67, 88, 50, 64, 80, 69, 62, 70, 50, 69, 53, 51, 133, 64, 76, 48, 77, 51, 63, 49, 63, 71, 58, 53, 93, 79, 66, 54, 60, 91, 47, 60, 51, 82, 60, 61, 66, 73, 60, 97, 66, 66, 48, 78, 58, 76, 75, 71, 72, 42, 63, 75, 63, 65, 65, 62, 72, 71, 79, 61, 67, 55, 51, 91, 81, 60, 57, 70, 71, 60, 61, 66, 62, 70, 74, 68, 88, 62, 77, 85, 64, 70, 38, 59, 62, 56, 55, 64, 87, 107, 47, 78, 79, 60, 84, 67, 74, 53, 64, 54, 63, 37, 72, 74, 59, 65, 56, 78, 79, 63, 111, 55, 45, 61, 86, 76, 63, 87, 55, 49, 62, 68, 76, 52, 60, 67, 78, 52, 80, 41, 90, 61, 75, 63, 64, 60, 71, 57, 71, 72, 71, 51, 72, 61, 73, 116, 70, 71, 59, 71, 50, 91, 96, 74, 60, 57, 85, 54, 57, 63, 50, 84, 64, 50, 77, 91, 52, 86, 63, 54, 72, 69, 62, 67, 54, 78, 67, 72, 48, 53, 88, 70, 50, 59, 54, 53, 49, 57, 57, 57, 78, 51, 58, 57, 58, 113, 47, 61, 58, 91, 71, 61, 64, 52, 55, 59, 40, 89, 88, 62, 79, 80, 66, 70, 54, 68, 69, 73, 78, 62, 92, 46, 90, 70, 64, 59, 86, 85, 62, 61, 49, 68, 59, 61, 76, 60, 52, 55, 67, 75, 91, 80, 63, 61, 61, 59, 57, 63, 64, 76, 50, 83, 76, 81, 67, 58, 102, 89, 59, 60, 71, 107, 63, 70, 80, 52, 60, 57, 111, 58, 61, 100, 64, 68, 63, 74, 47, 62, 57, 84, 56, 69, 59, 49, 63, 61, 53, 101, 89, 54, 84, 99, 80, 63, 76, 68, 70, 75, 65, 71, 58, 99, 70, 35, 62, 66, 57, 71, 44, 55, 86, 64, 84, 65, 65, 61, 54, 99, 58, 60, 70, 114, 61, 56, 55, 61, 57, 71, 48, 66, 70, 68, 56, 98, 96, 53, 55, 59, 62, 76, 58, 54, 67, 55, 68, 70, 67, 55, 53, 67, 64, 83, 56, 66, 51, 89, 57, 55, 28, 67, 47, 82, 59, 109, 63, 67, 67, 85, 63, 52, 59, 64, 58, 70, 59, 81, 47, 66, 61, 89, 64, 67, 64, 65, 46, 48, 59, 55, 51, 74, 75, 50, 89, 81, 68, 61, 106, 53, 62, 68, 69, 62, 48, 68, 113, 80, 67, 53, 99, 66, 41, 60, 63, 57, 54, 61, 59, 50, 61, 62, 51, 57, 113, 64, 93, 53, 26, 81, 60, 75, 57, 70, 55, 79, 55, 100, 64, 42, 63, 62, 74, 72, 79, 68, 57, 74, 82, 59, 55, 64, 76, 56, 69, 59, 45, 52, 56, 63, 66, 47, 51, 52, 67, 35, 89, 26, 45, 46, 48, 54, 55, 75, 49, 81, 62, 59, 65, 70, 64, 57, 56, 88, 63, 61, 53, 75, 63, 57, 69, 70, 73, 47, 67, 48, 81, 49, 57, 69, 67, 72, 61, 49, 68, 64, 85, 76, 44, 45, 57, 44, 71, 57, 59, 62, 99, 52, 54, 64, 77, 87, 59, 66, 63, 56, 62, 109, 41, 62, 60, 61, 56, 82, 74, 60, 69, 45, 62, 53, 56, 85, 55, 51, 65, 59, 79, 71, 61, 73, 87, 65, 39, 59, 68, 55, 57, 80, 79, 65, 74, 70, 55, 56, 58, 59, 68, 64, 51, 60, 71, 97, 115, 54, 76, 49, 112, 93, 89, 56, 60, 68, 54, 66, 46, 61, 47, 100, 82, 56, 51, 72, 67, 101, 67, 85, 66, 62, 78, 59, 52, 55, 63, 67, 69, 61, 41, 57, 60, 54, 69, 80, 61, 67, 130, 51, 67, 57, 62, 61, 48, 59, 58, 59, 44, 75, 82, 95, 44, 77, 69, 77, 55, 63, 62, 74, 61, 78, 71, 74, 55, 57, 101, 52, 65, 71, 65, 73, 60, 70, 41, 45, 49, 69, 65, 63, 54, 103, 46, 52, 66, 79, 88, 65, 51, 51, 76, 70, 67, 50, 57, 74, 65, 52, 73, 48, 55, 62, 72, 89, 68, 111, 78, 64, 50, 78, 59, 41, 51, 68, 48, 64, 67, 60, 84, 83, 89, 69, 92, 57, 84, 70, 51, 51, 75, 57, 76, 63, 85, 62, 76, 86, 59, 64, 75, 66, 57, 53, 70, 46, 76, 61, 76, 69, 84, 85, 64, 83, 48, 48, 58, 79, 77, 72, 55, 42, 57, 51, 71, 71, 76, 67, 57, 69, 52, 73, 84, 72, 87, 75, 88, 61, 60, 76, 80, 59, 110, 87, 72, 54, 48, 78, 46, 106, 89, 77, 52, 50, 50, 66, 65, 73, 61, 62, 78, 59, 47, 81, 80, 63, 51, 63, 61, 86, 58, 69, 72, 74, 71, 42, 62, 63, 49, 65, 63, 46, 62, 76, 86, 55, 66, 85, 65, 67, 80, 69, 55, 70, 64, 70, 56, 65, 60, 76, 54, 65, 64, 63, 65, 76, 78, 59, 71, 82, 57, 59, 40, 75, 81, 81, 56, 101, 85, 70, 61, 58, 50, 57, 95, 84, 58, 75, 82, 73, 65, 62, 77, 60, 64, 59, 50, 61, 54, 106, 61, 60, 69, 70, 65, 50, 66, 62, 55, 70, 66, 67, 64, 64, 51, 58, 58, 63, 63, 63, 59, 56, 59, 56, 75, 57, 90, 66, 64, 55, 62, 66, 71, 69, 67, 64, 41, 73, 74, 62, 59, 59, 65, 55, 62, 64, 68, 60, 74, 72, 70, 61, 43, 55, 54, 63, 70, 47, 85, 58, 79, 60, 75, 54, 48, 93, 62, 67, 66, 63, 74, 61, 70, 49, 56, 58, 92, 56, 88, 57, 63, 67, 68, 80, 74, 89, 50, 71, 62, 70, 61, 71, 60, 70, 62, 66, 68, 57, 62, 64, 123, 68, 48, 60, 41, 101, 62, 84, 74, 52, 63, 53, 72, 71, 67, 48, 75, 72, 72, 70, 71, 81, 66, 65, 60, 64, 55, 72, 62, 80, 53, 53, 55, 66, 50, 63, 64, 40, 76, 88, 44, 74, 56, 39, 55, 67, 70, 74, 71, 48, 66, 68, 64, 79, 51, 70, 55, 47, 68, 119, 78, 114, 67, 55, 37, 79, 60, 69, 61, 63, 94, 70, 55, 58, 29, 73, 75, 82, 50, 59, 72, 85, 58, 67, 97, 67, 68, 67, 65, 79, 66, 68, 49, 74, 60, 66, 86, 77, 73, 70, 56, 79, 65, 71, 59, 67, 67, 60, 66, 62, 69, 60, 67, 77, 60, 67, 57, 93, 56, 52, 40, 76, 68, 70, 74, 79, 64, 66, 90, 66, 49, 66, 66, 104, 61, 64, 63, 71, 53, 75, 77, 62, 58, 100, 87, 95, 57, 53, 62, 45, 68, 77, 57, 59, 66, 66, 67, 67, 56, 49, 99, 52, 66, 61, 98, 54, 64, 46, 49, 71, 95, 45, 76, 73, 64, 66, 66, 74, 53, 43, 67, 66, 76, 64, 67, 59, 47, 76, 48, 60, 73, 84, 63, 71, 64, 64, 39, 60, 67, 61, 65, 119, 57, 75, 84, 53, 86, 56, 71, 58, 73, 101, 54, 65, 72, 57, 65, 69, 51, 63, 64, 86, 82, 76, 79, 74, 51, 55, 61, 70, 61, 52, 53, 74, 51, 60, 66, 67, 74, 75, 60, 68, 91, 51, 81, 58, 60, 68, 78, 72, 79, 59, 113, 72, 75, 65, 55, 62, 52, 67, 57, 66, 65, 59, 57, 72, 64, 69, 36, 81, 72, 92, 77, 72, 57, 99, 52, 44, 69, 70, 93, 64, 68, 70, 70, 60, 69, 49, 78, 88, 80, 68, 67, 64, 112, 66, 71, 61, 66, 80, 53, 59, 74, 65, 111, 57, 65, 52, 64, 45, 62, 43, 78, 61, 80, 66, 58, 82, 63, 64, 49, 86, 89, 75, 88, 90, 89, 98, 82, 67, 74, 64, 53, 60, 60, 79, 66, 42, 40, 48, 64, 57, 71, 69, 51, 121, 66, 58, 60, 69, 61, 77, 58, 58, 48, 58, 65, 56, 66, 73, 64, 70, 66, 51, 60, 61, 61, 87, 87, 65, 65, 46, 76, 61, 51, 81, 63, 57, 51, 77, 58, 86, 98, 83, 84, 67, 64, 62, 48, 64, 60, 75, 65, 100, 62, 57, 60, 77, 76, 67, 73, 43, 66, 61, 60, 60, 46, 64, 69, 77, 72, 80, 50, 40, 68, 54, 59, 92, 71, 49, 55, 86, 61, 49, 37, 61, 95, 60, 53, 73, 37, 48, 51, 66, 74, 81, 62, 57, 56, 41, 63, 90, 60, 69, 85, 57, 62, 85, 66, 66, 79, 66, 74, 74, 84, 48, 78, 54, 58, 78, 60, 58, 73, 65, 69, 64, 74, 67, 62, 109, 55, 75, 60, 60, 79, 47, 79, 58, 67, 55, 53, 53, 63, 57, 65, 54, 53, 47, 69, 73, 64, 67, 63, 66, 79, 70, 61, 70, 56, 54, 74, 52, 59, 55, 77, 63, 79, 61, 59, 62, 89, 77, 50, 55, 64, 52, 55, 53, 82, 52, 49, 62, 61, 60, 53, 60, 46, 56, 60, 69, 80, 65, 57, 42, 57, 61, 79, 72, 50, 49, 70, 70, 68, 66, 54, 62, 66, 77, 58, 72, 56, 66, 57, 45, 70, 54, 57, 68, 66, 70, 76, 67, 91, 62, 75, 66, 48, 76, 44, 60, 86, 66, 77, 49, 74, 47, 80, 45, 65, 62, 60, 78, 59, 48, 68, 60, 51, 51, 50, 57, 67, 70, 79, 55, 51, 67, 28, 61, 65, 61, 51, 71, 96, 63, 78, 65, 59, 76, 67, 55, 118, 43, 55, 64, 86, 74, 66, 79, 72, 77, 58, 69, 60, 61, 58, 96, 67, 61, 62, 69, 60, 56, 43, 69, 59, 47, 72, 47, 54, 64, 64, 65, 63, 56, 82, 58, 67, 92, 43, 62, 77, 45, 82, 56, 76, 76, 64, 108, 53, 58, 57, 62, 80, 61, 58, 63, 46, 76, 84, 58, 52, 41, 58, 86, 69, 61, 78, 49, 72, 61, 100, 52, 60, 58, 99, 57, 68, 74, 63, 74, 46, 77, 63, 97, 94, 77, 66, 73, 56, 69, 58, 54, 45, 44, 38, 71, 59, 68, 71, 72, 62, 91, 80, 74, 72, 39, 53, 99, 95, 67, 58, 49, 80, 68, 66, 58, 80, 94, 48, 66, 66, 59, 106, 62, 76, 78, 84, 68, 70, 75, 67, 46, 77, 57, 52, 61, 55, 66, 78, 63, 72, 74, 66, 64, 110, 128, 66, 56, 72, 43, 55, 45, 51, 62, 57, 68, 58, 61, 60, 61, 78, 57, 77, 59, 69, 65, 61, 75, 61, 58, 61, 77, 82, 55, 64, 82, 96, 46, 68, 47, 67, 77, 73, 72, 71, 58, 80, 81, 56, 72, 53, 38, 76, 100, 75, 53, 47, 65, 65, 27, 68, 61, 65, 52, 67, 55, 71, 60, 46, 51, 145, 60, 54, 55, 69, 68, 89, 49, 61, 76, 88, 46, 71, 71, 68, 43, 64, 45, 59, 78, 62, 77, 60, 69, 56, 78, 65, 50, 56, 69, 62, 86, 61, 59, 63, 74, 73, 42, 59, 63, 63, 84, 57, 48, 45, 77, 67, 57, 72, 55, 63, 73, 75, 55, 80, 72, 56, 78, 73, 51, 42, 62, 59, 53, 65, 61, 67, 64, 64, 51, 56, 56, 60, 80, 62, 66, 58, 64, 80, 87, 48, 76, 90, 59, 69, 53, 47, 51, 51, 71, 49, 71, 70, 73, 67, 41, 60, 63, 74, 60, 70, 86, 54, 74, 63, 73, 85, 62, 63, 61, 70, 66, 49, 52, 81, 58, 65, 69, 68, 60, 93, 55, 61, 59, 69, 81, 71, 61, 64, 64, 95, 56, 71, 60, 62, 62, 72, 57, 53, 68, 62, 42, 66, 62, 63, 71, 74, 55, 53, 56, 57, 55, 58, 61, 72, 51, 48, 65, 57, 67, 57, 52, 52, 64, 64, 68, 57, 48, 47, 59, 60, 79, 70, 96, 71, 59, 50, 56, 62, 57, 85, 74, 113, 51, 62, 41, 63, 70, 80, 65, 40, 66, 59, 68, 70, 76, 63, 69, 67, 60, 77, 70, 74, 65, 95, 70, 62, 60, 58, 82, 86, 67, 41, 60, 64, 48, 68, 65, 63, 62, 70, 56, 71, 61, 56, 74, 47, 70, 70, 65, 68, 74, 50, 81, 63, 56, 78, 46, 62, 54, 61, 72, 70, 80, 65, 71, 68, 75, 57, 63, 65, 72, 70, 58, 62, 51, 67, 69, 55, 60, 58, 56, 57, 116, 73, 63, 61, 54, 56, 88, 65, 78, 59, 62, 99, 69, 76, 99, 66, 50, 88, 62, 64, 50, 72, 102, 55, 73, 83, 51, 73, 59, 77, 76, 56, 90, 87, 75, 57, 68, 71, 93, 66, 56, 50, 75, 61, 43, 68, 56, 87, 50, 66, 68, 72, 78, 75, 56, 66, 57, 47, 56, 62, 58, 80, 67, 47, 63, 64, 68, 54, 70, 74, 74, 51, 65, 66, 81, 68, 66, 51, 63, 58, 81, 72, 52, 64, 62, 83, 112, 69, 67, 65, 62, 89, 86, 53, 66, 51, 77, 70, 63, 60, 56, 99, 55, 55, 68, 65, 58, 54, 46, 65, 72, 84, 61, 81, 61, 99, 66, 54, 54, 56, 64, 76, 51, 53, 56, 53, 60, 67, 82, 53, 71, 46, 61, 67, 72, 58, 74, 50, 39, 42, 69, 64, 113, 45, 69, 57, 65, 74, 74, 66, 59, 85, 78, 59, 84, 56, 75, 42, 53, 65, 52, 62, 55, 70, 80, 64, 76, 49, 65, 63, 53, 79, 68, 53, 71, 66, 73, 82, 71, 59, 77, 73, 65, 95, 70, 52, 50, 50, 64, 65, 78, 80, 59, 114, 48, 91, 69, 76, 55, 51, 74, 69, 87, 68, 64, 62, 65, 76, 58, 50, 65, 62, 94, 104, 75, 75, 72, 63, 63, 64, 50, 65, 59, 69, 84, 61, 64, 69, 70, 82, 50, 60, 65, 53, 51, 84, 58, 68, 75, 57, 77, 63, 83, 70, 45, 115, 65, 63, 57, 53, 50, 55, 60, 92, 62, 81, 44, 68, 67, 65, 55, 62, 91, 94, 67, 70, 62, 75, 69, 78, 74, 65, 73, 63, 58, 86, 69, 65, 84, 66, 74, 60, 64, 63, 63, 58, 122, 56, 63, 65, 64, 67, 64, 64, 69, 43, 50, 70, 57, 69, 48, 63, 74, 70, 46, 55, 70, 54, 59, 69, 49, 75, 40, 52, 64, 62, 67, 51, 77, 51, 71, 45, 64, 93, 98, 89, 74, 49, 47, 85, 82, 61, 80, 75, 94, 57, 71, 64, 63, 65, 59, 57, 53, 61, 68, 69, 65, 37, 69, 72, 74, 53, 65, 64, 51, 53, 40, 57, 78, 64, 65, 54, 57, 66, 61, 78, 62, 77, 92, 71, 96, 67, 57, 79, 66, 45, 64, 69, 66, 61, 71, 57, 66, 59, 61, 121, 57, 62, 71, 63, 65, 61, 78, 75, 67, 77, 71, 96, 59, 62, 83, 72, 54, 68, 47, 59, 52, 63, 61, 58, 116, 62, 58, 73, 61, 71, 67, 58, 57, 61, 88, 57, 59, 61, 45, 56, 76, 51, 63, 79, 84, 58, 56, 59, 75, 48, 72, 66, 89, 75, 61, 50, 72, 50, 63, 59, 66, 70, 55, 78, 65, 29, 69, 64, 61, 65, 58, 47, 53, 53, 60, 57, 54, 63, 59, 84, 67, 58, 62, 50, 53, 59, 77, 83, 61, 65, 67, 55, 48, 78, 44, 73, 67, 62, 61, 54, 68, 70, 55, 64, 68, 60, 55, 63, 70, 68, 67, 60, 81, 83, 71, 87, 71, 66, 68, 56, 56, 61, 68, 64, 76, 79, 55, 82, 52, 63, 64, 83, 67, 66, 85, 64, 73, 60, 44, 56, 73, 53, 63, 47, 74, 55, 76, 68, 61, 69, 69, 74, 72, 61, 60, 67, 70, 101, 63, 61, 73, 60, 73, 61, 59, 128, 61, 54, 62, 72, 59, 50, 92, 75, 52, 56, 73, 49, 39, 63, 32, 57, 44, 75, 63, 39, 73, 66, 63, 52, 65, 78, 62, 54, 65, 75, 46, 62, 59, 101, 59, 76, 83, 64, 65, 66, 63, 62, 79, 59, 58, 64, 95, 71, 48, 65, 57, 54, 64, 68, 51, 59, 79, 77, 83, 77, 71, 43, 74, 87, 55, 67, 57, 60, 41, 87, 55, 59, 41, 77, 56, 56, 76, 65, 56, 51, 65, 64, 99, 81, 58, 77, 67, 74, 86, 52, 53, 67, 64, 70, 72, 71, 53, 70, 80, 66, 49, 75, 76, 49, 65, 51, 68, 86, 43, 58, 66, 87, 83, 65, 55, 79, 77, 63, 56, 57, 69, 118, 67, 73, 48, 62, 56, 68, 67, 75, 69, 77, 69, 61, 79, 59, 55, 99, 65, 76, 60, 51, 58, 91, 101, 75, 73, 73, 60, 58, 66, 60, 59, 64, 67, 55, 84, 88, 56, 66, 69, 78, 59, 64, 64, 49, 50, 56, 64, 65, 64, 62, 71, 74, 89, 101, 73, 62, 64, 64, 68, 72, 51, 81, 60, 77, 72, 73, 75, 50, 97, 64, 73, 62, 106, 63, 97, 76, 78, 39, 66, 75, 59, 69, 64, 62, 62, 61, 46, 65, 67, 68, 64, 58, 65, 78, 49, 63, 69, 77, 51, 79, 77, 84, 67, 64, 69, 57, 80, 61, 84, 53, 50, 52, 54, 71, 63, 71, 58, 57, 63, 80, 75, 40, 70, 58, 61, 70, 41, 88, 73, 55, 68, 115, 63, 48, 62, 69, 83, 53, 71, 60, 62, 63, 54, 46, 93, 69, 60, 59, 54, 52, 68, 63, 72, 44, 56, 57, 59, 65, 55, 60, 69, 73, 51, 83, 74, 73, 89, 61, 75, 74, 53, 58, 42, 61, 67, 52, 48, 45, 56, 74, 47, 67, 56, 83, 78, 53, 71, 72, 44, 61, 72, 55, 50, 59, 57, 48, 72, 52, 64, 61, 58, 63, 62, 72, 74, 54, 58, 67, 72, 52, 118, 59, 80, 73, 76, 69, 53, 59, 64, 45, 77, 68, 60, 64, 73, 59, 62, 75, 98, 84, 61, 65, 37, 53, 46, 68, 69, 54, 50, 79, 55, 61, 59, 41, 54, 56, 54, 50, 65, 67, 51, 53, 77, 43, 56, 62, 48, 87, 56, 64, 79, 80, 65, 64, 47, 52, 70, 58, 49, 91, 56, 63, 65, 60, 56, 77, 73, 56, 49, 60, 63, 70, 89, 61, 70, 63, 70, 61, 59, 58, 85, 110, 53, 71, 84, 62, 87, 61, 52, 91, 52, 84, 64, 55, 65, 41, 42, 79, 76, 69, 85, 59, 60, 80, 60, 58, 74, 84, 61, 61, 78, 65, 72, 42, 45, 58, 60, 71, 66, 83, 116, 60, 59, 76, 79, 49, 57, 58, 70, 69, 64, 82, 64, 73, 74, 51, 60, 60, 45, 55, 69, 77, 90, 58, 53, 35, 66, 83, 72, 44, 68, 83, 71, 64, 47, 34, 59, 91, 72, 60, 60, 59, 102, 58, 59, 86, 66, 56, 54, 71, 59, 66, 68, 32, 70, 73, 55, 116, 83, 85, 54, 51, 92, 72, 76, 57, 126, 84, 83, 66, 68, 65, 56, 68, 78, 53, 87, 50, 99, 74, 89, 68, 78, 62, 82, 70, 49, 67, 70, 87, 45, 62, 60, 61, 73, 74, 82, 58, 67, 60, 62, 50, 72, 70, 62, 70, 87, 67, 73, 58, 73, 96, 61, 71, 65, 49, 68, 85, 65, 59, 64, 67, 83, 81, 50, 90, 64, 57, 53, 62, 74, 79, 48, 67, 73, 51, 62, 52, 68, 73, 93, 78, 85, 50, 60, 76, 70, 70, 62, 52, 73, 56, 56, 79, 69, 46, 58, 50, 73, 66, 71, 50, 115, 79, 71, 58, 46, 78, 62, 71, 59, 60, 70, 58, 62, 61, 53, 73, 50, 65, 56, 60, 80, 57, 81, 55, 60, 67, 63, 51, 74, 88, 35, 75, 74, 59, 60, 57, 70, 58, 93, 72, 72, 77, 66, 77, 64, 63, 71, 64, 55, 36, 54, 101, 56, 78, 61, 75, 62, 52, 78, 57, 77, 56, 53, 65, 48, 74, 81, 80, 66, 76, 94, 57, 88, 73, 68, 63, 39, 64, 62, 65, 81, 67, 67, 80, 65, 77, 69, 63, 66, 70, 59, 43, 58, 98, 57, 62, 60, 75, 111, 75, 86, 50, 54, 96, 66, 67, 57, 58, 60, 68, 64, 55, 54, 66, 99, 56, 65, 55, 49, 71, 61, 85, 83, 59, 77, 88, 85, 78, 84, 55, 49, 61, 49, 70, 51, 65, 67, 53, 70, 53, 75, 59, 91, 57, 115, 54, 68, 65, 78, 79, 81, 77, 53, 54, 62, 71, 65, 78, 61, 46, 65, 62, 97, 62, 69, 57, 69, 99, 39, 59, 63, 74, 100, 77, 59, 57, 71, 64, 88, 73, 93, 80, 84, 54, 55, 40, 55, 50, 54, 68, 66, 65, 112, 59, 74, 72, 87, 61, 51, 77, 92, 73, 61, 53, 74, 66, 53, 58, 82, 57, 55, 56, 49, 51, 66, 78, 56, 57, 71, 55, 83, 75, 95, 41, 56, 58, 84, 51, 69, 81, 63, 61, 70, 66, 69, 59, 60, 66, 77, 104, 53, 67, 64, 104, 52, 75, 70, 62, 56, 52, 55, 50, 97, 75, 67, 63, 80, 63, 71, 58, 55, 80, 66, 96, 51, 82, 56, 58, 50, 67, 71, 44, 62, 62, 58, 68, 69, 50, 61, 62, 55, 62, 58, 55, 59, 64, 47, 70, 71, 82, 64, 101, 58, 62, 51, 68, 41, 93, 83, 74, 60, 65, 69, 64, 54, 77, 68, 81, 72, 80, 47, 91, 45, 69, 57, 58, 61, 74, 64, 65, 76, 74, 71, 77, 75, 59, 67, 65, 60, 78, 71, 56, 55, 65, 83, 64, 61, 58, 37, 69, 74, 43, 69, 71, 54, 62, 89, 89, 71, 40, 63, 86, 53, 70, 64, 58, 96, 41, 69, 61, 66, 70, 75, 61, 73, 99, 62, 72, 58, 75, 69, 74, 67, 53, 58, 116, 66, 91, 52, 69, 50, 49, 55, 65, 83, 67, 64, 64, 82, 87, 79, 58, 46, 31, 60, 60, 64, 47, 60, 59, 67, 59, 73, 60, 87, 59, 65, 36, 67, 109, 73, 42, 71, 62, 58, 86, 77, 50, 80, 57, 87, 65, 49, 64, 68, 61, 55, 86, 58, 50, 55, 66, 54, 63, 63, 76, 70, 59, 50, 66, 87, 76, 60, 57, 74, 71, 59, 61, 76, 52, 96, 77, 63, 74, 74, 44, 110, 78, 54, 55, 62, 68, 74, 49, 80, 68, 70, 81, 63, 85, 73, 66, 63, 69, 39, 58, 47, 70, 56, 103, 55, 62, 51, 95, 83, 58, 54, 59, 86, 63, 38, 57, 69, 91, 61, 69, 61, 62, 82, 50, 53, 74, 71, 50, 78, 76, 64, 69, 63, 54, 52, 58, 89, 79, 59, 75, 89, 51, 28, 64, 69, 60, 71, 62, 60, 52, 79, 57, 65, 67, 43, 103, 65, 71, 86, 75, 62, 67, 54, 69, 69, 76, 53, 59, 84, 74, 63, 86, 71, 71, 67, 66, 61, 102, 110, 56, 60, 69, 62, 79, 50, 61, 56, 52, 49, 79, 68, 61, 57, 65, 60, 63, 58, 84, 54, 54, 66, 68, 45, 75, 76, 61, 44, 64, 64, 78, 56, 75, 62, 54, 58, 57, 75, 79, 66, 69, 58, 67, 74, 63, 73, 52, 87, 76, 43, 56, 53, 73, 60, 63, 67, 44, 56, 58, 113, 59, 73, 60, 68, 101, 72, 68, 88, 66, 75, 74, 51, 58, 79, 81, 77, 94, 75, 76, 50, 59, 57, 48, 77, 65, 73, 61, 77, 68, 49, 62, 75, 40, 53, 88, 50, 79, 52, 77, 72, 55, 54, 72, 65, 71, 103, 62, 60, 43, 61, 83, 57, 96, 56, 61, 57, 48, 87, 53, 72, 68, 44, 52, 86, 49, 63, 61, 57, 66, 57, 97, 54, 68, 82, 68, 58, 61, 70, 65, 64, 51, 59, 55, 64, 66, 59, 77, 88, 79, 61, 44, 43, 71, 45, 57, 58, 60, 72, 54, 98, 72, 67, 65, 43, 68, 68, 64, 80, 69, 59, 48, 78, 59, 69, 61, 65, 59, 85, 57, 72, 63, 62, 67, 61, 73, 54, 80, 93, 70, 58, 65, 70, 89, 50, 60, 63, 53, 61, 61, 49, 56, 80, 60, 63, 44, 56, 60, 62, 68, 64, 67, 44, 67, 49, 64, 68, 69, 61, 56, 54, 49, 52, 76, 49, 45, 71, 64, 52, 58, 71, 69, 66, 58, 56, 56, 67, 75, 58, 56, 59, 74, 63, 62, 68, 84, 55, 66, 53, 53, 61, 85, 69, 68, 72, 83, 57, 59, 67, 53, 63, 56, 51, 49, 59, 71, 69, 59, 101, 56, 72, 73, 54, 53, 103, 71, 60, 82, 70, 70, 70, 67, 73, 81, 57, 67, 54, 61, 76, 62, 80, 71, 62, 60, 47, 46, 65, 62, 65, 47, 57, 112, 65, 62, 73, 72, 85, 80, 86, 56, 48, 57, 41, 56, 57, 58, 115, 60, 76, 70, 62, 57, 79, 67, 81, 79, 53, 65, 65, 63, 50, 61, 42, 55, 58, 50, 49, 61, 63, 78, 67, 66, 75, 53, 63, 93, 67, 64, 41, 58, 62, 75, 64, 60, 59, 63, 86, 117, 69, 53, 67, 80, 77, 58, 57, 82, 65, 73, 67, 44, 68, 63, 67, 72, 42, 64, 77, 59, 44, 69, 74, 61, 68, 92, 70, 59, 67, 72, 55, 84, 66, 72, 64, 68, 72, 53, 43, 92, 67, 64, 64, 70, 82, 60, 68, 62, 51, 50, 68, 65, 67, 48, 56, 58, 62, 101, 59, 48, 62, 72, 80, 53, 82, 51, 97, 102, 65, 48, 53, 96, 64, 94, 74, 81, 63, 88, 70, 57, 66, 101, 42, 82, 61, 61, 70, 64, 88, 55, 83, 67, 57, 82, 67, 58, 66, 60, 81, 53, 57, 71, 93, 74, 64, 74, 52, 84, 68, 70, 60, 59, 84, 52, 57, 74, 63, 76, 57, 51, 57, 61, 71, 59, 67, 53, 71, 59, 63, 58, 56, 55, 82, 41, 82, 72, 57, 62, 56, 69, 56, 63, 46, 57, 70, 51, 56, 82, 53, 57, 77, 73, 50, 77, 70, 130, 59, 71, 67, 53, 42, 75, 65, 74, 52, 101, 59, 62, 56, 40, 77, 54, 55, 65, 55, 68, 68, 71, 60, 61, 60, 57, 95, 101, 52, 69, 93, 67, 76, 72, 65, 76, 59, 54, 49, 81, 55, 71, 61, 78, 64, 63, 68, 51, 49, 100, 56, 73, 64, 53, 68, 67, 68, 49, 59, 107, 56, 83, 68, 70, 59, 55, 58, 67, 74, 82, 70, 60, 70, 62, 60, 60, 69, 47, 57, 67, 64, 75, 71, 73, 79, 58, 52, 64, 156, 62, 51, 51, 59, 64, 73, 57, 63, 72, 58, 48, 141, 57, 55, 71, 58, 57, 47, 73, 55, 61, 68, 89, 74, 50, 69, 70, 62, 82, 84, 60, 64, 74, 65, 110, 62, 62, 64, 72, 52, 73, 90, 44, 72, 74, 49, 89, 48, 78, 63, 74, 64, 46, 69, 62, 49, 58, 52, 47, 114, 49, 73, 76, 71, 73, 66, 73, 63, 69, 71, 47, 54, 86, 93, 79, 67, 67, 68, 60, 43, 61, 89, 63, 52, 57, 68, 67, 64, 61, 77, 42, 63, 62, 95, 58, 90, 64, 73, 58, 69, 70, 73, 75, 75, 67, 73, 61, 61, 69, 51, 83, 59, 83, 67, 67, 74, 65, 67, 64, 55, 50, 64, 53, 67, 67, 74, 58, 57, 67, 69, 57, 70, 73, 69, 65, 117, 50, 64, 56, 77, 66, 58, 70, 49, 75, 64, 93, 58, 58, 58, 78, 70, 67, 55, 65, 58, 76, 57, 61, 91, 91, 60, 65, 65, 60, 57, 47, 70, 66, 73, 39, 63, 60, 81, 87, 70, 61, 62, 69, 56, 64, 56, 56, 66, 59, 65, 62, 75, 57, 62, 84, 80, 73, 68, 74, 66, 72, 81, 68, 62, 60, 58, 61, 49, 71, 59, 59, 68, 78, 55, 58, 81, 85, 64, 64, 55, 62, 57, 74, 56, 68, 73, 71, 67, 103, 50, 66, 64, 56, 44, 58, 46, 62, 71, 60, 49, 89, 70, 61, 61, 76, 69, 98, 83, 64, 79, 68, 66, 55, 66, 55, 73, 56, 64, 65, 83, 78, 67, 63, 65, 73, 59, 79, 76, 66, 54, 53, 68, 69, 51, 66, 66, 72, 82, 77, 60, 133, 104, 50, 57, 56, 74, 77, 61, 65, 71, 56, 58, 52, 57, 73, 85, 54, 57, 84, 42, 55, 58, 58, 52, 74, 57, 71, 73, 37, 50, 73, 53, 55, 55, 112, 64, 62, 48, 76, 65, 51, 47, 51, 68, 66, 60, 64, 72, 48, 44, 105, 62, 56, 56, 44, 74, 53, 70, 62, 83, 49, 61, 61, 42, 75, 62, 65, 80, 76, 69, 67, 57, 51, 62, 68, 74, 65, 72, 62, 67, 78, 60, 66, 74, 96, 64, 63, 67, 56, 110, 60, 63, 52, 58, 67, 73, 47, 80, 74, 79, 66, 68, 68, 66, 60, 50, 86, 94, 78, 61, 71, 63, 92, 75, 66, 56, 75, 64, 63, 72, 74, 68, 140, 74, 83, 59, 45, 62, 63, 63, 53, 59, 90, 68, 75, 62, 67, 69, 111, 68, 70, 62, 70, 55, 87, 116, 63, 84, 64, 61, 62, 64, 63, 59, 69, 49, 73, 62, 66, 64, 85, 51, 60, 64, 80, 75, 73, 54, 71, 57, 68, 64, 58, 50, 63, 84, 95, 63, 59, 62, 49, 64, 64, 60, 104, 65, 47, 71, 55, 60, 62, 54, 61, 58, 53, 69, 55, 67, 77, 85, 55, 57, 65, 53, 70, 70, 65, 63, 60, 49, 91, 45, 62, 67, 59, 79, 106, 90, 61, 94, 85, 59, 61, 105, 47, 67, 58, 55, 52, 68, 58, 63, 70, 73, 52, 64, 53, 56, 85, 60, 53, 76, 44, 47, 45, 56, 56, 60, 61, 61, 66, 85, 59, 74, 63, 75, 71, 75, 79, 76, 72, 70, 57, 55, 71, 67, 59, 75, 64, 56, 59, 66, 61, 80, 52, 61, 51, 65, 72, 100, 50, 70, 77, 66, 64, 65, 47, 76, 70, 73, 61, 51, 88, 64, 47, 68, 49, 66, 67, 80, 58, 62, 48, 58, 79, 89, 73, 84, 62, 78, 72, 80, 67, 59, 62, 83, 67, 60, 58, 55, 56, 84, 74, 63, 71, 69, 64, 57, 64, 59, 79, 44, 70, 83, 72, 60, 67, 71, 70, 52, 48, 65, 60, 59, 52, 64, 61, 62, 55, 64, 66, 69, 46, 59, 61, 66, 65, 48, 71, 67, 49, 54, 68, 47, 56, 57, 51, 66, 54, 63, 59, 64, 65, 58, 56, 76, 68, 62, 65, 52, 71, 57, 73, 74, 65, 54, 60, 51, 69, 67, 72, 66, 113, 65, 56, 57, 79, 73, 56, 85, 61, 63, 58, 88, 49, 64, 48, 68, 63, 71, 50, 60, 62, 98, 78, 42, 52, 58, 47, 101, 59, 82, 63, 74, 58, 65, 67, 51, 57, 65, 75, 77, 65, 84, 68, 71, 62, 59, 67, 70, 55, 68, 52, 65, 57, 60, 74, 59, 56, 50, 71, 56, 66, 115, 69, 66, 63, 64, 62, 76, 76, 57, 58, 50, 47, 60, 67, 68, 59, 89, 63, 48, 54, 50, 61, 57, 67, 91, 57, 46, 67, 58, 60, 85, 77, 75, 81, 68, 77, 66, 104, 85, 69, 58, 76, 84, 61, 52, 107, 62, 76, 62, 98, 55, 53, 57, 65, 93, 51, 71, 108, 71, 53, 64, 63, 87, 52, 64, 57, 102, 68, 65, 57, 40, 66, 54, 62, 59, 85, 95, 62, 89, 56, 45, 69, 62, 80, 56, 49, 52, 72, 63, 51, 67, 89, 70, 68, 67, 71, 71, 70, 65, 50, 64, 68, 62, 56, 70, 81, 65, 79, 63, 60, 44, 75, 69, 60, 59, 60, 57, 67, 55, 52, 58, 66, 39, 73, 58, 66, 55, 59, 59, 74, 84, 76, 67, 78, 71, 53, 58, 53, 58, 73, 78, 56, 43, 65, 57, 65, 63, 68, 87, 77, 65, 44, 53, 62, 59, 58, 60, 83, 51, 61, 58, 60, 80, 70, 66, 59, 59, 76, 62, 64, 84, 74, 65, 66, 70, 65, 68, 52, 63, 66, 75, 63, 64, 69, 66, 78, 60, 48, 66, 78, 56, 59, 55, 61, 54, 60, 67, 74, 53, 64, 101, 69, 47, 55, 52, 117, 65, 64, 73, 55, 58, 64, 72, 94, 86, 73, 65, 75, 68, 71, 56, 64, 77, 59, 67, 61, 71, 49, 73, 88, 64, 57, 72, 69, 67, 64, 66, 81, 64, 67, 56, 84, 77, 48, 60, 65, 58, 71, 76, 65, 54, 90, 60, 76, 67, 87, 51, 51, 55, 60, 66, 61, 67, 89, 72, 99, 68, 70, 70, 54, 72, 74, 69, 51, 61, 58, 57, 67, 64, 73, 69, 58, 72, 76, 65, 45, 73, 61, 72, 83, 91, 55, 58, 68, 79, 62, 58, 59, 54, 61, 45, 65, 58, 61, 81, 61, 96, 73, 61, 65, 64, 69, 63, 74, 66, 88, 58, 61, 60, 61, 75, 72, 65, 75, 74, 77, 69, 52, 79, 115, 67, 52, 50, 67, 52, 55, 65, 49, 78, 54, 57, 69, 57, 60, 84, 66, 56, 69, 57, 53, 76, 62, 59, 65, 99, 42, 66, 96, 94, 62, 73, 58, 78, 78, 50, 56, 59, 98, 88, 84, 70, 55, 62, 52, 52, 66, 64, 73, 49, 65, 64, 55, 97, 63, 86, 60, 71, 66, 76, 60, 67, 80, 53, 83, 73, 61, 49, 54, 73, 48, 104, 47, 69, 59, 67, 91, 56, 76, 79, 65, 55, 64, 81, 73, 64, 57, 52, 77, 73, 78, 49, 58, 69, 77, 51, 116, 66, 104, 72, 56, 75, 85, 71, 72, 58, 50, 61, 74, 62, 55, 67, 74, 48, 63, 48, 57, 71, 64, 77, 66, 60, 59, 46, 66, 67, 80, 58, 67, 50, 62, 68, 66, 63, 75, 52, 65, 50, 59, 63, 71, 62, 89, 73, 61, 68, 58, 64, 75, 60, 53, 52, 47, 72, 56, 65, 69, 66, 66, 58, 68, 52, 64, 64, 79, 77, 64, 57, 81, 49, 50, 48, 70, 72, 68, 65, 58, 56, 79, 78, 55, 71, 45, 69, 74, 52, 88, 58, 62, 67, 60, 56, 82, 67, 60, 84, 61, 56, 69, 54, 78, 65, 70, 64, 68, 59, 66, 73, 125, 69, 59, 71, 69, 66, 54, 63, 59, 73, 49, 74, 61, 72, 51, 59, 62, 65, 51, 61, 71, 70, 65, 68, 60, 77, 72, 51, 74, 77, 74, 69, 67, 48, 93, 75, 69, 51, 50, 71, 87, 62, 63, 63, 76, 71, 59, 67, 82, 60, 56, 61, 52, 52, 56, 66, 85, 69, 60, 58, 65, 48, 70, 80, 64, 79, 80, 79, 103, 61, 45, 46, 77, 60, 60, 54, 65, 51, 54, 62, 78, 89, 46, 72, 68, 71, 78, 71, 68, 63, 86, 57, 61, 60, 76, 66, 64, 69, 72, 76, 64, 60, 58, 78, 63, 90, 61, 63, 83, 80, 66, 66, 56, 77, 56, 71, 59, 74, 81, 59, 55, 62, 65, 52, 70, 78, 70, 56, 56, 58, 44, 67, 75, 64, 54, 54, 66, 77, 54, 55, 96, 85, 86, 53, 69, 69, 67, 53, 72, 62, 76, 58, 69, 60, 64, 76, 86, 61, 80, 86, 70, 53, 72, 55, 78, 55, 67, 61, 64, 76, 58, 67, 91, 57, 58, 72, 54, 68, 97, 127, 60, 98, 66, 60, 78, 63, 90, 69, 49, 63, 82, 78, 74, 61, 55, 77, 71, 55, 68, 57, 51, 63, 81, 59, 65, 73, 60, 49, 50, 59, 81, 65, 81, 61, 67, 43, 76, 94, 76, 60, 70, 57, 61, 56, 62, 62, 71, 60, 70, 42, 49, 52, 74, 70, 68, 58, 56, 66, 68, 92, 67, 63, 72, 74, 72, 62, 58, 63, 54, 77, 67, 64, 54, 86, 90, 65, 56, 90, 57, 49, 68, 52, 63, 69, 64, 83, 70, 48, 61, 72, 80, 61, 60, 52, 75, 66, 52, 100, 66, 71, 54, 58, 66, 58, 57, 87, 70, 46, 41, 56, 55, 73, 87, 84, 55, 72, 61, 68, 59, 62, 57, 75, 55, 71, 59, 47, 73, 76, 65, 80, 86, 54, 57, 110, 56, 58, 67, 63, 67, 54, 58, 54, 45, 52, 66, 65, 80, 58, 58, 60, 55, 51, 61, 59, 46, 57, 65, 54, 71, 49, 57, 54, 78, 70, 67, 86, 66, 75, 50, 69, 78, 57, 54, 65, 63, 74, 45, 69, 61, 60, 68, 68, 63, 59, 67, 80, 85, 78, 63, 80, 78, 51, 81, 72, 81, 72, 71, 73, 55, 66, 74, 60, 59, 52, 84, 71, 53, 69, 64, 51, 59, 54, 52, 76, 75, 63, 81, 56, 73, 71, 52, 67, 52, 51, 74, 58, 63, 52, 67, 64, 48, 54, 69, 73, 63, 57, 51, 65, 76, 48, 62, 69, 62, 62, 63, 67, 98, 42, 44, 52, 57, 67, 66, 74, 78, 68, 62, 88, 67, 35, 58, 47, 79, 52, 74, 49, 49, 89, 105, 65, 74, 58, 66, 75, 68, 67, 66, 66, 72, 47, 58, 52, 80, 62, 84, 54, 81, 70, 57, 51, 72, 67, 65, 45, 89, 52, 58, 61, 73, 76, 51, 62, 61, 55, 66, 68, 73, 100, 70, 53, 66, 64, 59, 58, 68, 84, 56, 68, 78, 68, 61, 79, 66, 65, 64, 57, 74, 95, 66, 44, 57, 76, 67, 69, 50, 61, 90, 73, 90, 44, 75, 57, 63, 58, 72, 78, 68, 66, 64, 77, 58, 61, 103, 72, 53, 53, 97, 54, 57, 84, 69, 89, 66, 61, 53, 92, 67, 65, 42, 60, 76, 59, 74, 58, 73, 68, 64, 70, 62, 77, 50, 71, 74, 75, 68, 69, 58, 66, 83, 52, 77, 62, 56, 71, 44, 68, 76, 57, 59, 62, 58, 110, 52, 91, 64, 56, 54, 53, 72, 57, 55, 55, 53, 75, 93, 79, 63, 67, 64, 73, 53, 52, 58, 70, 54, 60, 57, 66, 87, 78, 71, 62, 75, 55, 54, 61, 71, 59, 55, 66, 96, 83, 76, 47, 54, 51, 52, 75, 80, 63, 101, 44, 70, 60, 64, 66, 51, 67, 55, 82, 60, 63, 71, 66, 69, 53, 47, 50, 58, 85, 52, 61, 69, 60, 75, 50, 56, 63, 66, 56, 51, 71, 53, 69, 54, 66, 64, 55, 69, 90, 73, 64, 58, 72, 81, 58, 66, 44, 61, 68, 77, 70, 66, 69, 67, 47, 56, 106, 52, 42, 98, 65, 81, 53, 50, 76, 62, 70, 72, 54, 127, 65, 67, 73, 73, 62, 58, 87, 79, 88, 77, 59, 70, 62, 84, 83, 67, 61, 77, 76, 93, 59, 63, 72, 88, 72, 66, 68, 81, 58, 38, 66, 65, 77, 72, 69, 78, 70, 73, 53, 63, 59, 96, 51, 64, 77, 54, 65, 69, 59, 54, 65, 108, 59, 67, 54, 65, 66, 78, 49, 64, 76, 62, 63, 63, 55, 69, 54, 66, 85, 62, 51, 52, 56, 88, 63, 72, 59, 64, 58, 82, 53, 55, 68, 75, 60, 65, 52, 49, 71, 59, 64, 73, 121, 73, 64, 60, 58, 52, 55, 60, 60, 81, 57, 59, 75, 56, 81, 65, 62, 66, 58, 57, 57, 65, 83, 66, 61, 56, 42, 80, 60, 49, 64, 56, 84, 76, 70, 51, 65, 82, 89, 45, 65, 61, 62, 40, 71, 46, 73, 60, 112, 60, 67, 63, 68, 54, 67, 67, 71, 67, 61, 58, 67, 65, 72, 84, 81, 67, 70, 68, 64, 67, 61, 59, 91, 61, 50, 58, 75, 73, 56, 62, 73, 53, 82, 61, 96, 76, 73, 93, 63, 59, 70, 52, 77, 67, 67, 65, 66, 67, 66, 87, 70, 59, 66, 61, 56, 79, 67, 53, 90, 66, 46, 89, 64, 53, 58, 64, 65, 60, 54, 63, 82, 69, 69, 71, 96, 62, 51, 56, 56, 48, 61, 75, 70, 54, 62, 91, 73, 68, 61, 58, 57, 73, 61, 55, 70, 51, 60, 68, 91, 91, 69, 63, 64, 76, 64, 78, 58, 64, 81, 57, 64, 64, 63, 72, 62, 109, 59, 60, 61, 55, 59, 60, 55, 59, 70, 60, 47, 51, 63, 71, 75, 56, 52, 56, 69, 50, 52, 73, 56, 57, 54, 71, 62, 74, 58, 66, 68, 101, 54, 49, 89, 69, 75, 47, 68, 45, 65, 60, 47, 83, 85, 72, 70, 101, 63, 76, 55, 65, 66, 62, 58, 70, 68, 56, 57, 78, 73, 68, 54, 74, 63, 67, 51, 71, 55, 60, 64, 61, 50, 55, 58, 59, 76, 63, 51, 63, 45, 63, 62, 48, 52, 90, 65, 69, 52, 62, 64, 91, 61, 63, 59, 90, 66, 64, 59, 122, 60, 41, 83, 65, 65, 63, 72, 56, 78, 51, 71, 53, 79, 75, 54, 59, 61, 65, 65, 55, 55, 64, 62, 57, 64, 61, 69, 77, 71, 81, 52, 52, 91, 97, 72, 36, 74, 67, 56, 53, 60, 58, 56, 66, 61, 76, 71, 47, 63, 72, 60, 72, 62, 66, 49, 69, 56, 52, 73, 65, 72, 53, 46, 68, 46, 62, 76, 68, 54, 89, 77, 59, 62, 80, 71, 67, 71, 73, 73, 65, 51, 74, 100, 110, 61, 47, 57, 62, 105, 57, 91, 66, 56, 59, 54, 61, 55, 73, 77, 61, 71, 76, 63, 71, 56, 90, 86, 91, 77, 65, 62, 86, 78, 61, 63, 64, 67, 47, 52, 74, 55, 102, 67, 74, 66, 50, 78, 76, 61, 66, 57, 75, 51, 51, 75, 69, 72, 99, 70, 71, 63, 52, 63, 86, 130, 57, 68, 54, 57, 61, 66, 64, 59, 54, 56, 58, 86, 68, 64, 59, 50, 80, 55, 50, 76, 66, 70, 70, 50, 71, 57, 67, 63, 72, 96, 85, 66, 85, 70, 48, 66, 52, 80, 90, 62, 49, 61, 53, 59, 64, 71, 67, 64, 66, 112, 62, 61, 79, 97, 67, 68, 55, 48, 71, 71, 70, 61, 49, 48, 67, 78, 73, 55, 72, 83, 103, 73, 60, 57, 75, 58, 59, 65, 65, 70, 60, 56, 69, 52, 66, 67, 74, 81, 61, 59, 76, 68, 75, 48, 67, 64, 74, 75, 56, 81, 65, 63, 56, 67, 56, 73, 57, 67, 59, 56, 60, 61, 79, 86, 74, 54, 61, 63, 50, 45, 63, 83, 62, 61, 69, 78, 71, 86, 44, 77, 73, 68, 67, 111, 52, 65, 76, 57, 62, 55, 61, 64, 62, 53, 50, 79, 68, 60, 64, 60, 78, 56, 53, 56, 63, 59, 57, 74, 55, 38, 70, 53, 67, 53, 52, 73, 61, 57, 64, 65, 60, 49, 61, 65, 91, 74, 56, 74, 82, 58, 61, 83, 71, 79, 46, 47, 62, 69, 56, 84, 80, 50, 64, 71, 85, 60, 71, 58, 60, 65, 72, 56, 68, 56, 86, 52, 65, 66, 52, 55, 57, 41, 85, 54, 61, 67, 83, 72, 59, 52, 63, 52, 52, 74, 47, 57, 75, 92, 66, 76, 68, 72, 48, 86, 62, 60, 71, 55, 62, 76, 64, 57, 53, 55, 61, 65, 83, 67, 72, 93, 70, 53, 85, 58, 44, 54, 61, 58, 91, 61, 58, 67, 61, 61, 70, 63, 53, 101, 73, 57, 73, 61, 45, 77, 79, 77, 72, 63, 37, 66, 62, 64, 65, 71, 67, 64, 71, 70, 60, 73, 78, 75, 56, 61, 78, 50, 52, 67, 66, 65, 64, 53, 55, 91, 56, 86, 80, 95, 56, 49, 59, 61, 71, 55, 68, 65, 75, 63, 71, 57, 61, 84, 66, 59, 63, 53, 72, 89, 69, 57, 65, 95, 60, 68, 55, 60, 56, 69, 100, 65, 68, 65, 89, 56, 54, 68, 80, 45, 96, 98, 38, 64, 104, 61, 66, 51, 63, 72, 64, 100, 68, 52, 79, 58, 60, 106, 64, 58, 83, 45, 62, 56, 55, 57, 89, 51, 52, 83, 69, 62, 61, 64, 68, 77, 59, 62, 80, 59, 88, 111, 81, 60, 73, 60, 57, 65, 82, 63, 63, 64, 61, 104, 67, 84, 59, 65, 66, 65, 54, 56, 58, 61, 50, 91, 70, 61, 66, 75, 71, 67, 64, 68, 56, 59, 59, 73, 60, 69, 76, 53, 62, 53, 76, 66, 61, 57, 83, 62, 52, 82, 80, 75, 98, 64, 59, 67, 48, 87, 63, 47, 86, 84, 57, 72, 79, 63, 57, 65, 69, 61, 68, 61, 69, 68, 56, 68, 61, 58, 57, 105, 55, 54, 64, 71, 67, 55, 69, 58, 67, 44, 67, 52, 49, 55, 65, 80, 64, 59, 60, 75, 58, 68, 68, 82, 58, 62, 51, 60, 73, 62, 59, 62, 62, 50, 74, 51, 59, 80, 52, 59, 115, 58, 43, 67, 65, 53, 63, 75, 76, 70, 127, 68, 67, 71, 62, 70, 57, 67, 71, 90, 60, 57, 53, 62, 85, 63, 74, 73, 66, 89, 67, 65, 60, 88, 65, 68, 49, 55, 76, 68, 97, 59, 56, 57, 63, 65, 63, 70, 57, 56, 58, 51, 91, 68, 63, 70, 68, 65, 48, 62, 51, 48, 112, 66, 57, 63, 71, 75, 78, 70, 57, 70, 58, 68, 76, 81, 64, 64, 55, 79, 75, 65, 63, 60, 92, 72, 77, 61, 50, 74, 81, 63, 43, 62, 48, 66, 63, 42, 68, 70, 58, 55, 55, 119, 46, 64, 61, 68, 64, 55, 66, 66, 75, 64, 52, 64, 66, 69, 65, 77, 49, 73, 70, 56, 89, 78, 62, 65, 67, 63, 67, 65, 58, 74, 69, 57, 78, 75, 66, 72, 81, 79, 52, 65, 54, 66, 71, 71, 63, 85, 70, 101, 48, 57, 69, 55, 56, 63, 58, 71, 59, 76, 71, 56, 91, 63, 59, 56, 60, 58, 50, 55, 59, 60, 75, 64, 68, 69, 60, 76, 63, 55, 62, 59, 66, 85, 62, 98, 64, 63, 99, 55, 78, 77, 55, 70, 62, 70, 60, 60, 55, 61, 102, 60, 62, 84, 85, 51, 50, 69, 76, 82, 51, 66, 53, 52, 64, 71, 59, 70, 55, 46, 76, 61, 59, 67, 52, 95, 65, 62, 52, 56, 54, 82, 81, 59, 73, 81, 87, 52, 61, 51, 74, 71, 76, 76, 78, 64, 56, 51, 55, 78, 68, 57, 55, 57, 51, 55, 49, 65, 55, 69, 46, 61, 80, 62, 76, 56, 93, 67, 71, 62, 71, 55, 57, 60, 44, 54, 64, 52, 75, 76, 56, 64, 51, 68, 52, 43, 54, 63, 54, 53, 58, 87, 70, 41, 77, 64, 76, 55, 48, 49, 57, 94, 50, 49, 91, 61, 83, 49, 63, 57, 59, 66, 52, 83, 72, 74, 69, 67, 64, 52, 73, 64, 49, 48, 83, 56, 69, 73, 45, 61, 68, 68, 78, 65, 69, 63, 71, 77, 66, 65, 62, 75, 66, 61, 73, 70, 63, 60, 50, 55, 64, 65, 90, 74, 72, 63, 62, 52, 74, 66, 67, 70, 58, 63, 63, 63, 113, 52, 76, 77, 52, 60, 56, 63, 51, 64, 69, 61, 65, 66, 58, 65, 62, 47, 64, 60, 54, 78, 53, 64, 64, 69, 58, 65, 66, 62, 57, 67, 66, 67, 101, 60, 63, 66, 78, 72, 58, 68, 68, 71, 65, 76, 68, 79, 66, 71, 66, 66, 77, 75, 81, 61, 71, 59, 59, 82, 69, 65, 61, 59, 53, 50, 58, 88, 66, 59, 91, 68, 60, 60, 81, 69, 68, 74, 58, 92, 64, 65, 69, 92, 102, 49, 46, 71, 64, 73, 66, 94, 73, 57, 68, 62, 77, 80, 74, 65, 52, 74, 67, 65, 77, 60, 77, 84, 84, 65, 46, 63, 77, 58, 59, 73, 56, 64, 80, 75, 59, 61, 121, 66, 65, 56, 53, 72, 53, 51, 80, 64, 87, 53, 65, 82, 62, 52, 103, 52, 69, 77, 58, 58, 89, 119, 69, 48, 44, 53, 71, 60, 81, 59, 58, 51, 62, 75, 67, 54, 63, 67, 76, 59, 56, 61, 63, 89, 67, 60, 89, 54, 66, 76, 58, 46, 83, 53, 44, 39, 60, 77, 54, 60, 84, 65, 68, 63, 58, 58, 65, 88, 58, 72, 67, 101, 59, 51, 62, 83, 62, 62, 58, 62, 70, 79, 57, 76, 64, 63, 81, 70, 62, 68, 78, 60, 102, 77, 72, 55, 73, 71, 76, 84, 81, 77, 59, 55, 66, 45, 59, 87, 62, 72, 53, 72, 60, 80, 59, 73, 63, 71, 63, 53, 66, 54, 56, 63, 59, 53, 58, 85, 60, 53, 103, 75, 57, 66, 85, 56, 63, 51, 53, 69, 83, 70, 62, 78, 40, 68, 63, 63, 57, 56, 37, 70, 65, 68, 56, 97, 58, 63, 69, 72, 52, 61, 48, 60, 82, 112, 53, 83, 85, 65, 57, 69, 76, 67, 83, 64, 60, 64, 56, 66, 70, 38, 54, 56, 59, 72, 61, 55, 50, 71, 72, 55, 61, 63, 65, 71, 73, 75, 50, 54, 59, 56, 69, 86, 65, 65, 97, 71, 77, 83, 60, 72, 57, 50, 56, 82, 52, 69, 64, 63, 53, 55, 70, 59, 56, 68, 54, 53, 73, 57, 85, 55, 54, 54, 48, 55, 77, 60, 65, 86, 73, 66, 64, 75, 55, 67, 49, 82, 49, 63, 58, 55, 52, 50, 64, 74, 55, 55, 64, 63, 78, 50, 51, 61, 59, 72, 59, 60, 104, 64, 66, 67, 58, 63, 66, 81, 54, 65, 66, 64, 59, 49, 55, 57, 66, 68, 60, 55, 54, 86, 96, 55, 49, 63, 60, 94, 59, 64, 102, 61, 76, 67, 70, 65, 64, 76, 53, 50, 64, 60, 77, 92, 71, 62, 70, 68, 71, 68, 64, 57, 46, 58, 68, 60, 53, 65, 63, 68, 76, 97, 57, 67, 52, 75, 60, 91, 55, 77, 63, 66, 65, 56, 52, 77, 65, 63, 58, 52, 62, 69, 64, 70, 72, 75, 68, 44, 62, 63, 64, 58, 72, 97, 74, 80, 64, 56, 110, 57, 64, 63, 38, 59, 54, 72, 82, 43, 68, 68, 76, 64, 73, 79, 68, 84, 49, 65, 49, 54, 63, 56, 71, 86, 65, 59, 58, 97, 40, 48, 74, 73, 71, 77, 61, 77, 69, 64, 77, 67, 72, 48, 86, 63, 72, 71, 42, 53, 75, 54, 89, 71, 67, 57, 66, 80, 86, 69, 70, 57, 55, 99, 70, 71, 79, 76, 38, 42, 78, 68, 71, 51, 76, 79, 62, 68, 59, 60, 65, 59, 55, 68, 46, 63, 69, 74, 66, 70, 50, 63, 70, 74, 72, 70, 45, 65, 54, 71, 69, 47, 45, 98, 44, 45, 52, 62, 58, 62, 66, 75, 56, 65, 74, 71, 57, 72, 56, 75, 73, 60, 60, 73, 61, 61, 88, 57, 69, 64, 59, 70, 56, 70, 62, 80, 68, 58, 51, 71, 74, 53, 77, 46, 72, 69, 52, 70, 69, 76, 69, 62, 48, 116, 86, 75, 68, 55, 57, 69, 72, 55, 69, 107, 85, 71, 71, 62, 45, 70, 55, 71, 67, 42, 58, 50, 100, 88, 69, 58, 49, 66, 71, 70, 47, 73, 82, 55, 70, 62, 42, 74, 79, 60, 56, 77, 49, 69, 66, 84, 64, 67, 52, 66, 82, 86, 40, 63, 79, 61, 61, 65, 68, 64, 64, 49, 80, 61, 87, 65, 66, 74, 75, 55, 57, 77, 68, 60, 90, 76, 81, 54, 57, 46, 61, 63, 72, 59, 77, 64, 75, 66, 66, 74, 70, 67, 68, 62, 69, 66, 68, 80, 80, 88, 68, 60, 79, 64, 72, 63, 60, 51, 53, 44, 54, 64, 49, 83, 65, 116, 74, 64, 60, 56, 79, 75, 84, 62, 68, 92, 66, 79, 86, 57, 76, 62, 65, 88, 87, 60, 68, 87, 97, 37, 53, 40, 71, 115, 73, 63, 57, 64, 78, 54, 62, 59, 82, 76, 67, 52, 62, 84, 66, 68, 67, 59, 50, 106, 57, 71, 100, 58, 65, 66, 51, 67, 71, 54, 73, 64, 88, 53, 68, 68, 91, 71, 50, 36, 89, 55, 78, 50, 78, 47, 58, 83, 70, 50, 56, 74, 72, 84, 49, 89, 86, 56, 68, 67, 45, 44, 61, 74, 57, 59, 58, 56, 83, 61, 89, 71, 75, 62, 75, 47, 82, 68, 61, 61, 85, 62, 68, 60, 66, 59, 58, 57, 68, 69, 94, 59, 83, 65, 53, 95, 67, 65, 86, 78, 66, 52, 76, 59, 48, 55, 97, 66, 55, 62, 57, 63, 72, 49, 58, 74, 76, 61, 65, 84, 80, 62, 55, 58, 68, 99, 66, 49, 66, 100, 74, 63, 71, 47, 95, 90, 103, 63, 66, 61, 58, 67, 66, 71, 69, 72, 57, 78, 55, 69, 58, 71, 65, 50, 54, 68, 63, 81, 54, 58, 53, 59, 72, 58, 76, 43, 71, 65, 43, 66, 70, 77, 61, 75, 45, 73, 76, 68, 46, 61, 77, 56, 56, 61, 57, 64, 81, 50, 65, 67, 53, 61, 100, 71, 79, 71, 56, 74, 79, 54, 69, 65, 56, 55, 89, 44, 54, 63, 73, 56, 75, 64, 65, 64, 62, 68, 60, 70, 73, 67, 54, 43, 48, 60, 67, 65, 91, 74, 76, 56, 76, 82, 61, 83, 66, 63, 96, 65, 59, 81, 53, 81, 66, 67, 53, 48, 56, 65, 58, 70, 63, 80, 45, 66, 67, 66, 58, 62, 66, 66, 55, 63, 79, 53, 68, 59, 46, 71, 63, 63, 81, 61, 73, 50, 84, 45, 68, 103, 57, 54, 54, 66, 66, 92, 74, 57, 78, 69, 61, 71, 66, 76, 63, 66, 69, 69, 63, 65, 57, 67, 71, 51, 70, 60, 52, 54, 88, 74, 47, 67, 66, 45, 63, 61, 70, 65, 55, 56, 66, 81, 94, 56, 80, 59, 81, 74, 63, 81, 46, 69, 76, 67, 64, 63, 66, 79, 75, 80, 89, 79, 46, 57, 113, 88, 82, 57, 55, 61, 75, 59, 58, 68, 70, 81, 69, 55, 68, 61, 109, 93, 70, 95, 54, 67, 69, 42, 86, 61, 60, 59, 54, 74, 80, 77, 65, 81, 57, 50, 90, 63, 99, 100, 60, 75, 61, 58, 64, 63, 80, 76, 52, 77, 70, 96, 56, 69, 99, 91, 92, 55, 58, 71, 53, 71, 65, 71, 51, 45, 62, 62, 69, 67, 89, 68, 73, 68, 46, 71, 36, 75, 92, 61, 52, 54, 58, 56, 69, 67, 56, 66, 67, 36, 60, 75, 64, 56, 54, 53, 66, 47, 78, 100, 52, 58, 59, 52, 51, 63, 61, 61, 62, 76, 90, 64, 69, 55, 85, 50, 49, 85, 51, 61, 77, 81, 62, 63, 48, 81, 60, 36, 60, 59, 90, 52, 70, 74, 59, 61, 80, 26, 57, 54, 58, 72, 70, 63, 78, 58, 46, 58, 59, 65, 73, 69, 85, 84, 70, 85, 48, 66, 74, 63, 52, 67, 51, 62, 34, 75, 57, 100, 65, 64, 75, 52, 83, 74, 52, 59, 87, 67, 63, 56, 58, 64, 88, 57, 66, 85, 60, 68, 64, 63, 57, 53, 51, 75, 53, 61, 55, 94, 63, 64, 55, 69, 46, 65, 68, 55, 55, 60, 66, 64, 41, 58, 56, 83, 36, 65, 60, 67, 92, 74, 62, 79, 53, 55, 85, 59, 68, 66, 62, 65, 52, 37, 68, 55, 85, 87, 70, 55, 65, 65, 76, 66, 92, 64, 92, 58, 57, 70, 55, 124, 55, 67, 56, 63, 51, 52, 79, 87, 65, 83, 73, 88, 62, 58, 59, 70, 92, 73, 65, 60, 75, 69, 62, 63, 68, 77, 89, 60, 59, 78, 66, 68, 58, 53, 48, 75, 58, 86, 65, 77, 75, 63, 62, 55, 47, 58, 61, 64, 81, 58, 61, 75, 74, 83, 52, 57, 57, 73, 93, 69, 66, 103, 104, 64, 71, 47, 81, 61, 80, 93, 63, 61, 78, 47, 114, 66, 72, 92, 63, 54, 73, 82, 55, 68, 80, 65, 50, 86, 52, 55, 51, 85, 53, 66, 91, 58, 75, 60, 70, 50, 52, 87, 53, 74, 51, 47, 62, 58, 72, 59, 99, 67, 58, 65, 61, 95, 67, 73, 64, 64, 74, 87, 66, 85, 59, 71, 67, 65, 53, 73, 59, 97, 52, 62, 69, 103, 61, 77, 61, 67, 74, 62, 51, 61, 57, 82, 77, 52, 61, 44, 72, 60, 84, 55, 52, 112, 63, 43, 60, 83, 69, 69, 77, 64, 66, 67, 39, 68, 69, 87, 54, 67, 77, 53, 63, 72, 84, 63, 59, 60, 57, 52, 51, 60, 46, 73, 70, 92, 70, 61, 60, 53, 41, 60, 81, 61, 66, 59, 47, 58, 78, 62, 63, 68, 94, 66, 85, 60, 42, 46, 65, 58, 58, 49, 52, 64, 62, 56, 83, 65, 46, 97, 111, 62, 58, 56, 79, 58, 73, 65, 66, 75, 64, 59, 64, 64, 55, 69, 81, 57, 69, 76, 70, 60, 71, 53, 75, 80, 59, 69, 58, 57, 66, 80, 44, 73, 86, 58, 65, 58, 106, 70, 50, 70, 58, 51, 70, 77, 70, 70, 64, 77, 67, 60, 59, 132, 59, 78, 59, 85, 48, 83, 85, 76, 65, 65, 67, 63, 44, 58, 61, 77, 73, 64, 81, 93, 51, 92, 64, 63, 80, 67, 68, 62, 66, 72, 69, 69, 123, 55, 88, 68, 50, 67, 63, 61, 64, 52, 52, 36, 86, 48, 48, 64, 76, 74, 54, 67, 68, 69, 47, 65, 59, 82, 61, 82, 83, 70, 84, 75, 69, 62, 58, 78, 75, 70, 58, 75, 59, 53, 92, 75, 71, 58, 67, 91, 54, 73, 73, 67, 61, 78, 61, 71, 97, 52, 79, 54, 40, 44, 69, 64, 64, 58, 55, 59, 64, 62, 60, 74, 54, 96, 47, 95, 93, 78, 94, 55, 52, 55, 93, 58, 45, 84, 78, 70, 69, 60, 61, 66, 46, 59, 55, 77, 50, 52, 52, 67, 67, 61, 63, 42, 44, 74, 61, 96, 54, 58, 80, 60, 64, 55, 106, 58, 53, 53, 61, 59, 59, 62, 73, 75, 74, 46, 69, 60, 77, 89, 49, 56, 81, 78, 59, 67, 67, 51, 55, 106, 80, 58, 76, 104, 66, 57, 55, 85, 55, 64, 59, 46, 57, 75, 50, 58, 60, 64, 69, 61, 38, 64, 59, 64, 58, 47, 73, 60, 90, 67, 58, 81, 62, 59, 66, 50, 42, 52, 44, 105, 61, 65, 64, 56, 63, 89, 74, 41, 61, 61, 78, 66, 74, 69, 58, 63, 48, 106, 52, 67, 58, 68, 59, 100, 70, 64, 47, 61, 55, 68, 65, 65, 63, 61, 85, 59, 67, 55, 65, 57, 73, 63, 53, 55, 44, 67, 76, 42, 64, 41, 130, 61, 68, 52, 69, 63, 59, 61, 86, 73, 59, 56, 65, 89, 92, 57, 46, 43, 84, 87, 70, 64, 94, 47, 70, 74, 53, 64, 69, 59, 50, 66, 77, 63, 87, 64, 47, 65, 69, 74, 58, 60, 66, 55, 52, 63, 65, 62, 55, 61, 65, 69, 47, 70, 71, 46, 70, 74, 60, 53, 52, 66, 65, 62, 57, 87, 77, 58, 62, 45, 82, 54, 65, 55, 50, 63, 55, 62, 54, 44, 85, 73, 81, 72, 55, 62, 66, 65, 51, 69, 72, 77, 91, 55, 61, 64, 65, 63, 66, 56, 65, 63, 72, 56, 59, 59, 66, 57, 64, 73, 74, 77, 84, 48, 62, 69, 70, 45, 43, 67, 58, 62, 69, 52, 49, 59, 79, 61, 78, 61, 61, 66, 52, 61, 94, 68, 84, 71, 48, 67, 71, 60, 79, 69, 46, 38, 56, 133, 99, 58, 59, 51, 71, 47, 59, 56, 70, 70, 75, 68, 68, 53, 81, 73, 55, 52, 65, 77, 62, 60, 60, 64, 58, 54, 68, 79, 43, 72, 61, 78, 63, 48, 69, 80, 84, 96, 69, 78, 63, 49, 57, 93, 63, 69, 63, 79, 71, 78, 68, 74, 100, 42, 68, 83, 72, 83, 59, 64, 61, 48, 51, 61, 61, 76, 61, 73, 53, 74, 63, 116, 58, 63, 65, 66, 80, 57, 67, 45, 89, 71, 62, 67, 64, 62, 59, 67, 77, 72, 63, 54, 72, 48, 57, 78, 64, 62, 45, 72, 55, 72, 62, 54, 74, 77, 59, 48, 57, 76, 74, 62, 61, 101, 74, 59, 64, 64, 60, 69, 66, 86, 56, 101, 54, 54, 51, 75, 59, 73, 60, 77, 67, 54, 43, 54, 71, 82, 52, 68, 44, 58, 71, 71, 56, 58, 54, 90, 67, 62, 66, 50, 101, 57, 59, 56, 75, 56, 72, 55, 91, 72, 44, 56, 58, 64, 48, 43, 49, 69, 58, 63, 47, 68, 58, 58, 57, 103, 61, 65, 52, 102, 87, 85, 74, 56, 64, 90, 67, 64, 43, 59, 41, 68, 57, 64, 58, 51, 73, 78, 56, 62, 65, 77, 71, 53, 108, 64, 48, 58, 63, 71, 67, 70, 67, 80, 59, 76, 74, 100, 48, 51, 60, 65, 53, 60, 68, 55, 58, 62, 64, 54, 59, 65, 56, 62, 58, 63, 68, 65, 66, 76, 81, 59, 59, 71, 56, 56, 53, 83, 54, 48, 62, 82, 78, 97, 60, 42, 73, 53, 68, 77, 71, 83, 60, 54, 66, 45, 60, 67, 56, 84, 61, 96, 57, 68, 59, 54, 79, 61, 83, 65, 122, 66, 54, 62, 70, 94, 83, 122, 58, 63, 63, 61, 48, 59, 64, 67, 65, 47, 48, 49, 55, 72, 51, 65, 60, 60, 55, 116, 71, 61, 69, 60, 63, 67, 76, 65, 62, 59, 85, 56, 68, 60, 72, 68, 89, 61, 49, 58, 50, 59, 54, 66, 57, 57, 59, 62, 72, 79, 57, 63, 69, 39, 47, 76, 51, 40, 74, 64, 57, 64, 59, 66, 68, 46, 59, 58, 68, 50, 81, 65, 85, 64, 76, 60, 66, 66, 54, 52, 64, 73, 77, 58, 64, 95, 62, 49, 69, 72, 86, 54, 80, 56, 53, 56, 89, 71, 53, 57, 71, 63, 53, 54, 48, 108, 46, 76, 64, 60, 88, 56, 48, 71, 75, 62, 88, 56, 59, 77, 61, 61, 47, 59, 61, 79, 55, 62, 73, 45, 61, 57, 72, 55, 69, 58, 49, 62, 41, 60, 69, 80, 65, 58, 83, 71, 63, 90, 70, 63, 52, 57, 84, 60, 68, 72, 57, 71, 60, 61, 42, 49, 118, 51, 84, 73, 69, 45, 59, 75, 54, 74, 53, 59, 57, 72, 70, 80, 62, 94, 65, 59, 100, 105, 63, 45, 58, 51, 69, 54, 61, 53, 59, 55, 55, 46, 55, 64, 52, 48, 76, 81, 59, 79, 74, 66, 45, 105, 74, 60, 64, 75, 51, 68, 81, 69, 67, 105, 62, 69, 72, 51, 67, 73, 69, 83, 56, 64, 65, 81, 89, 63, 40, 75, 65, 74, 80, 55, 58, 94, 61, 70, 108, 60, 81, 57, 65, 55, 59, 43, 60, 51, 77, 84, 51, 55, 66, 53, 74, 63, 56, 67, 59, 70, 61, 62, 93, 90, 84, 76, 90, 54, 62, 80, 87, 70, 58, 57, 61, 72, 70, 63, 59, 53, 62, 51, 43, 72, 80, 91, 48, 58, 71, 84, 67, 76, 57, 51, 54, 74, 50, 71, 70, 67, 56, 51, 62, 85, 139, 86, 70, 60, 70, 62, 59, 84, 69, 54, 53, 46, 73, 76, 75, 89, 59, 66, 59, 60, 65, 60, 73, 73, 69, 66, 71, 61, 76, 55, 71, 64, 55, 57, 98, 58, 46, 62, 59, 65, 55, 51, 61, 64, 61, 78, 65, 69, 75, 62, 80, 54, 57, 66, 46, 82, 55, 63, 58, 72, 66, 58, 57, 62, 51, 62, 101, 61, 55, 66, 49, 74, 67, 70, 60, 84, 75, 60, 58, 55, 53, 59, 59, 47, 61, 62, 72, 88, 61, 58, 81, 66, 73, 61, 55, 66, 64, 56, 65, 67, 68, 70, 72, 61, 71, 55, 108, 80, 51, 57, 46, 67, 69, 58, 58, 83, 59, 64, 55, 79, 73, 87, 70, 62, 97, 47, 82, 64, 67, 74, 73, 105, 61, 51, 55, 67, 68, 59, 90, 60, 57, 53, 63, 69, 46, 86, 69, 82, 61, 67, 75, 47, 64, 76, 57, 61, 65, 53, 64, 86, 81, 56, 39, 63, 54, 67, 83, 47, 59, 65, 69, 82, 69, 77, 63, 75, 107, 62, 72, 90, 43, 60, 58, 54, 71, 45, 55, 56, 80, 60, 80, 126, 66, 54, 45, 107, 65, 52, 53, 77, 63, 46, 63, 115, 45, 61, 58, 82, 57, 70, 55, 61, 61, 71, 69, 45, 46, 65, 48, 66, 66, 68, 63, 61, 53, 64, 58, 81, 52, 71, 74, 67, 74, 93, 61, 59, 58, 67, 70, 73, 60, 80, 66, 73, 77, 102, 65, 77, 55, 67, 57, 54, 71, 71, 66, 60, 50, 70, 53, 100, 50, 69, 50, 49, 57, 58, 89, 72, 64, 68, 47, 61, 50, 72, 80, 41, 72, 70, 119, 91, 64, 83, 70, 64, 56, 85, 47, 57, 88, 56, 66, 85, 65, 68, 61, 69, 57, 60, 55, 73, 67, 61, 56, 63, 54, 46, 58, 98, 66, 69, 60, 65, 75, 48, 58, 72, 76, 60, 93, 80, 70, 64, 70, 63, 57, 51, 69, 63, 58, 66, 68, 89, 66, 101, 63, 59, 56, 89, 65, 52, 66, 71, 65, 73, 52, 63, 57, 61, 102, 63, 72, 54, 53, 63, 45, 67, 64, 71, 61, 49, 65, 78, 66, 73, 60, 62, 73, 70, 49, 83, 62, 42, 50, 66, 87, 59, 55, 60, 68, 66, 94, 80, 53, 59, 65, 86, 75, 50, 97, 70, 59, 83, 64, 69, 93, 76, 64, 52, 70, 50, 47, 87, 76, 68, 66, 67, 77, 68, 68, 66, 70, 57, 66, 59, 67, 49, 51, 57, 85, 69, 70, 52, 65, 81, 70, 96, 56, 58, 80, 51, 48, 61, 49, 69, 45, 74, 55, 69, 72, 67, 102, 60, 57, 92, 90, 65, 69, 77, 69, 64, 58, 49, 98, 73, 72, 75, 69, 78, 55, 79, 92, 52, 73, 61, 74, 51, 95, 62, 61, 66, 48, 54, 88, 82, 81, 106, 59, 47, 47, 63, 65, 51, 49, 52, 68, 49, 56, 49, 54, 60, 55, 57, 80, 66, 62, 84, 71, 108, 51, 58, 57, 55, 62, 63, 72, 56, 48, 68, 62, 63, 55, 62, 61, 59, 72, 67, 59, 68, 43, 55, 67, 64, 48, 56, 61, 104, 59, 60, 55, 46, 59, 73, 47, 52, 51, 62, 52, 79, 67, 62, 61, 72, 89, 54, 53, 65, 103, 64, 62, 72, 58, 51, 64, 55, 57, 80, 74, 57, 72, 59, 73, 42, 64, 68, 64, 63, 56, 65, 65, 58, 98, 62, 59, 78, 60, 69, 77, 62, 109, 58, 52, 68, 61, 84, 67, 44, 63, 76, 60, 57, 42, 52, 76, 68, 48, 64, 48, 78, 80, 62, 49, 52, 63, 87, 51, 73, 60, 81, 76, 66, 73, 65, 66, 100, 53, 126, 82, 73, 69, 48, 59, 74, 72, 76, 77, 109, 57, 63, 66, 100, 73, 65, 55, 58, 66, 97, 52, 89, 58, 48, 74, 58, 82, 59, 58, 82, 62, 65, 84, 67, 47, 65, 49, 46, 65, 85, 47, 49, 69, 66, 65, 62, 65, 64, 87, 65, 64, 80, 61, 73, 77, 70, 57, 48, 62, 57, 76, 55, 66, 52, 60, 78, 79, 57, 55, 77, 79, 78, 51, 66, 80, 54, 66, 54, 58, 71, 62, 68, 58, 112, 65, 82, 69, 48, 56, 62, 60, 60, 64, 79, 58, 95, 72, 77, 67, 78, 69, 72, 82, 71, 52, 63, 67, 60, 67, 63, 53, 69, 82, 126, 69, 52, 87, 50, 55, 71, 61, 69, 92, 67, 72, 63, 70, 66, 72, 64, 46, 65, 63, 66, 55, 74, 80, 70, 48, 68, 51, 61, 63, 64, 66, 67, 56, 41, 52, 70, 73, 79, 67, 73, 70, 53, 74, 65, 54, 43, 69, 58, 59, 65, 77, 79, 72, 92, 43, 69, 61, 62, 60, 59, 69, 59, 40, 71, 63, 71, 46, 84, 74, 75, 139, 71, 93, 77, 51, 57, 55, 80, 78, 85, 61, 77, 76, 61, 77, 49, 54, 56, 63, 64, 57, 37, 122, 79, 42, 82, 68, 53, 66, 68, 91, 66, 62, 72, 53, 65, 85, 74, 72, 76, 78, 59, 60, 69, 72, 60, 54, 64, 79, 59, 43, 56, 63, 49, 58, 60, 61, 53, 63, 53, 78, 50, 86, 71, 62, 72, 56, 74, 68, 47, 65, 64, 52, 49, 68, 74, 77, 69, 60, 134, 60, 89, 70, 47, 52, 80, 68, 54, 51, 55, 58, 64, 44, 66, 50, 85, 88, 57, 94, 57, 84, 55, 55, 73, 42, 59, 64, 66, 93, 46, 70, 61, 75, 71, 50, 68, 70, 76, 41, 63, 64, 53, 52, 77, 66, 81, 70, 60, 53, 52, 59, 66, 57, 77, 67, 61, 61, 47, 64, 67, 62, 70, 65, 66, 54, 60, 67, 63, 73, 54, 69, 113, 80, 62, 69, 74, 54, 83, 66, 53, 63, 66, 34, 63, 65, 60, 80, 64, 113, 60, 86, 61, 61, 69, 88, 74, 64, 61, 60, 70, 61, 67, 70, 59, 49, 83, 58, 70, 74, 58, 76, 71, 51, 89, 54, 61, 82, 57, 57, 87, 49, 54, 74, 74, 65, 74, 90, 46, 62, 73, 67, 51, 83, 64, 65, 57, 67, 59, 71, 52, 59, 75, 59, 62, 66, 71, 80, 55, 74, 60, 95, 60, 74, 69, 74, 69, 55, 67, 68, 80, 74, 65, 44, 54, 81, 75, 96, 81, 90, 63, 60, 84, 63, 65, 57, 56, 52, 52, 52, 84, 50, 71, 48, 73, 93, 68, 50, 65, 58, 65, 52, 79, 72, 70, 122, 58, 71, 78, 61, 57, 54, 46, 70, 95, 65, 56, 58, 47, 65, 49, 113, 69, 60, 55, 65, 81, 46, 92, 59, 64, 67, 69, 87, 68, 85, 87, 60, 71, 81, 49, 74, 69, 66, 83, 67, 55, 69, 53, 77, 51, 97, 54, 69, 80, 65, 77, 41, 64, 66, 48, 96, 64, 42, 68, 44, 56, 84, 68, 50, 59, 50, 80, 54, 56, 71, 60, 56, 103, 64, 58, 107, 52, 48, 65, 85, 55, 54, 62, 64, 87, 60, 84, 55, 64, 65, 52, 55, 55, 69, 63, 57, 62, 71, 71, 72, 64, 36, 60, 71, 75, 61, 75, 71, 49, 51, 65, 72, 56, 58, 48, 56, 50, 57, 67, 65, 79, 65, 71, 65, 56, 62, 58, 50, 68, 62, 60, 63, 69, 94, 76, 62, 55, 64, 61, 69, 102, 73, 85, 93, 71, 83, 62, 74, 68, 60, 71, 48, 68, 64, 62, 63, 95, 68, 68, 52, 84, 47, 74, 76, 80, 87, 72, 69, 53, 80, 67, 52, 73, 73, 92, 63, 73, 63, 52, 49, 81, 72, 61, 57, 72, 54, 57, 75, 82, 80, 70, 53, 53, 57, 85, 59, 59, 73, 46, 47, 80, 50, 59, 51, 76, 64, 72, 66, 62, 61, 74, 80, 61, 56, 72, 52, 82, 60, 54, 66, 53, 73, 76, 60, 52, 67, 66, 96, 67, 63, 53, 63, 75, 98, 58, 66, 72, 52, 73, 71, 78, 65, 73, 59, 63, 58, 54, 51, 72, 54, 49, 61, 88, 64, 75, 63, 55, 76, 90, 62, 67, 80, 62, 60, 54, 32, 68, 67, 68, 43, 66, 60, 73, 62, 58, 63, 41, 56, 73, 69, 87, 66, 60, 74, 62, 102, 54, 57, 75, 55, 61, 83, 98, 51, 86, 48, 60, 64, 56, 61, 60, 57, 48, 60, 60, 124, 65, 99, 60, 57, 73, 74, 80, 75, 61, 86, 77, 71, 51, 80, 62, 62, 62, 44, 44, 57, 81, 69, 47, 35, 65, 70, 57, 63, 89, 50, 72, 41, 63, 64, 89, 84, 63, 88, 53, 75, 74, 61, 68, 76, 64, 57, 51, 84, 60, 75, 66, 107, 69, 45, 76, 81, 58, 54, 51, 71, 76, 72, 54, 71, 77, 58, 62, 50, 46, 61, 73, 50, 80, 50, 66, 63, 53, 52, 75, 92, 67, 49, 49, 80, 73, 61, 40, 91, 65, 84, 58, 60, 66, 63, 62, 69, 83, 62, 75, 70, 61, 46, 55, 71, 62, 56, 72, 67, 59, 60, 64, 58, 64, 34, 72, 93, 53, 45, 91, 74, 67, 64, 52, 71, 82, 50, 91, 69, 65, 86, 68, 74, 51, 61, 91, 60, 56, 68, 60, 78, 120, 73, 42, 75, 66, 98, 79, 96, 72, 89, 53, 69, 56, 73, 75, 76, 55, 71, 71, 98, 68, 70, 59, 52, 78, 51, 56, 43, 72, 73, 70, 58, 58, 95, 64, 51, 77, 59, 56, 90, 67, 65, 63, 107, 64, 41, 73, 72, 63, 57, 108, 53, 44, 51, 49, 65, 67, 65, 67, 59, 57, 59, 64, 90, 56, 57, 50, 69, 71, 64, 84, 69, 47, 67, 88, 66, 71, 60, 69, 61, 54, 67, 69, 47, 75, 109, 78, 69, 59, 61, 50, 43, 56, 67, 49, 46, 77, 63, 68, 65, 69, 60, 75, 69, 98, 65, 81, 53, 51, 59, 75, 74, 73, 69, 84, 72, 46, 64, 46, 75, 48, 67, 91, 64, 64, 49, 64, 115, 53, 61, 51, 76, 55, 70, 70, 55, 58, 82, 68, 62, 69, 71, 62, 49, 59, 66, 66, 80, 72, 74, 57, 57, 79, 54, 51, 45, 56, 54, 55, 62, 49, 58, 76, 76, 65, 55, 67, 96, 69, 54, 92, 69, 56, 63, 68, 64, 60, 49, 86, 48, 57, 62, 75, 61, 98, 65, 82, 50, 58, 65, 63, 53, 63, 65, 63, 48, 59, 53, 67, 54, 69, 53, 114, 85, 59, 50, 101, 61, 79, 59, 70, 58, 73, 42, 73, 47, 68, 81, 66, 73, 59, 62, 58, 69, 60, 79, 60, 80, 52, 44, 85, 93, 91, 69, 57, 56, 71, 71, 63, 59, 59, 95, 82, 53, 68, 69, 65, 66, 96, 75, 63, 106, 55, 66, 50, 64, 46, 47, 71, 56, 55, 77, 79, 57, 57, 55, 67, 65, 49, 58, 71, 55, 88, 78, 57, 65, 56, 61, 77, 56, 57, 72, 67, 78, 74, 65, 64, 68, 87, 51, 69, 67, 79, 62, 76, 46, 62, 67, 78, 65, 57, 55, 47, 102, 110, 87, 84, 58, 59, 77, 78, 56, 55, 70, 74, 129, 98, 63, 53, 68, 82, 62, 61, 66, 67, 104, 66, 47, 98, 74, 55, 51, 68, 69, 59, 83, 106, 63, 56, 77, 72, 62, 53, 74, 80, 54, 68, 49, 71, 72, 49, 61, 56, 64, 70, 50, 75, 49, 52, 47, 83, 86, 67, 64, 48, 66, 65, 58, 57, 72, 48, 41, 64, 60, 41, 52, 42, 52, 71, 70, 54, 73, 81, 66, 98, 90, 63, 74, 73, 74, 62, 73, 66, 52, 44, 64, 82, 62, 115, 68, 62, 85, 66, 85, 69, 75, 63, 70, 66, 66, 75, 58, 71, 73, 57, 64, 73, 51, 65, 67, 63, 57, 107, 53, 46, 42, 74, 52, 92, 68, 62, 77, 58, 51, 59, 58, 66, 68, 53, 69, 74, 58, 57, 65, 77, 61, 58, 69, 49, 58, 69, 63, 54, 64, 84, 71, 79, 74, 71, 63, 90, 68, 69, 71, 71, 71, 69, 61, 51, 71, 58, 107, 61, 33, 63, 59, 65, 71, 75, 52, 60, 33, 60, 54, 64, 90, 90, 63, 66, 74, 55, 52, 57, 87, 52, 70, 69, 60, 56, 86, 56, 63, 92, 59, 73, 53, 53, 78, 80, 55, 72, 71, 77, 61, 74, 69, 47, 57, 76, 57, 47, 82, 57, 52, 63, 46, 51, 60, 81, 69, 95, 72, 53, 61, 63, 73, 59, 72, 67, 48, 71, 65, 95, 59, 57, 53, 78, 60, 80, 104, 121, 89, 59, 79, 64, 44, 46, 57, 60, 69, 73, 62, 99, 63, 87, 67, 71, 82, 62, 75, 63, 65, 61, 60, 82, 69, 75, 131, 74, 59, 62, 69, 59, 44, 46, 70, 68, 72, 53, 41, 66, 57, 63, 60, 63, 45, 76, 51, 81, 56, 86, 50, 65, 69, 72, 100, 66, 51, 56, 61, 64, 65, 57, 62, 89, 53, 60, 74, 44, 66, 76, 59, 57, 54, 66, 78, 67, 56, 61, 49, 46, 57, 56, 96, 62, 28, 59, 66, 48, 87, 59, 85, 110, 61, 66, 77, 63, 53, 61, 56, 84, 76, 35, 93, 73, 62, 65, 82, 51, 74, 66, 81, 71, 53, 74, 68, 68, 77, 53, 83, 59, 78, 55, 61, 66, 59, 51, 42, 41, 58, 53, 70, 81, 69, 41, 56, 64, 61, 52, 71, 56, 58, 69, 56, 62, 54, 87, 66, 87, 88, 54, 73, 50, 89, 64, 60, 50, 78, 64, 58, 70, 86, 78, 48, 57, 50, 66, 61, 106, 74, 65, 77, 69, 58, 73, 63, 72, 72, 66, 54, 59, 47, 54, 78, 69, 59, 58, 56, 85, 68, 82, 58, 82, 88, 73, 59, 54, 67, 55, 66, 57, 60, 67, 64, 54, 65, 72, 78, 80, 87, 60, 55, 73, 60, 84, 62, 46, 77, 58, 40, 76, 72, 90, 53, 51, 66, 54, 49, 66, 49, 47, 53, 59, 65, 45, 64, 73, 72, 54, 69, 80, 72, 70, 60, 63, 68, 66, 49, 76, 80, 73, 62, 53, 55, 80, 127, 57, 63, 73, 55, 69, 68, 75, 70, 66, 63, 94, 63, 63, 68, 68, 50, 61, 61, 64, 50, 58, 75, 63, 75, 78, 63, 70, 66, 72, 53, 75, 44, 76, 60, 76, 62, 65, 87, 57, 61, 89, 56, 66, 88, 67, 49, 67, 58, 82, 61, 61, 58, 67, 69, 93, 64, 47, 88, 67, 62, 51, 55, 72, 56, 87, 56, 71, 78, 57, 57, 64, 74, 52, 71, 55, 64, 59, 95, 56, 75, 48, 50, 68, 74, 63, 61, 55, 75, 47, 57, 61, 75, 68, 68, 67, 81, 66, 88, 60, 52, 54, 74, 75, 53, 59, 78, 79, 69, 77, 62, 73, 60, 63, 60, 77, 71, 62, 45, 47, 65, 62, 82, 56, 62, 58, 76, 64, 75, 53, 103, 68, 81, 65, 92, 82, 60, 63, 66, 56, 65, 65, 73, 65, 52, 60, 58, 45, 58, 57, 57, 76, 69, 45, 54, 63, 47, 90, 79, 63, 64, 75, 69, 62, 71, 46, 89, 57, 94, 71, 60, 59, 57, 51, 64, 64, 65, 54, 56, 51, 50, 74, 93, 76, 67, 78, 55, 72, 84, 47, 49, 52, 48, 76, 61, 59, 66, 113, 84, 62, 51, 53, 65, 65, 56, 77, 71, 65, 71, 57, 67, 76, 82, 93, 58, 92, 45, 55, 70, 90, 69, 59, 58, 63, 89, 68, 73, 46, 62, 92, 64, 74, 74, 76, 73, 48, 45, 59, 62, 73, 49, 59, 53, 83, 49, 66, 56, 57, 69, 72, 59, 49, 97, 52, 46, 67, 63, 55, 74, 58, 46, 71, 97, 66, 62, 63, 68, 48, 46, 100, 72, 81, 68, 66, 68, 95, 60, 82, 50, 76, 61, 51, 74, 52, 72, 71, 59, 55, 39, 78, 68, 56, 56, 60, 75, 94, 75, 78, 65, 42, 94, 64, 54, 69, 88, 58, 96, 64, 82, 50, 62, 69, 77, 79, 42, 69, 54, 49, 63, 86, 50, 54, 77, 68, 57, 111, 50, 78, 57, 71, 71, 71, 74, 51, 81, 69, 53, 78, 69, 73, 45, 51, 73, 64, 44, 67, 124, 48, 51, 40, 56, 63, 52, 68, 49, 62, 78, 79, 71, 67, 51, 58, 60, 76, 47, 71, 57, 58, 66, 60, 64, 77, 63, 104, 53, 47, 62, 62, 70, 70, 71, 62, 71, 61, 72, 62, 66, 59, 81, 63, 72, 62, 63, 62, 55, 68, 65, 73, 97, 79, 44, 58, 65, 46, 72, 70, 60, 53, 65, 92, 64, 57, 76, 43, 56, 46, 71, 62, 51, 66, 77, 81, 67, 67, 38, 69, 55, 67, 60, 56, 77, 66, 60, 65, 70, 58, 64, 79, 71, 63, 53, 61, 78, 56, 58, 57, 62, 61, 57, 76, 70, 91, 55, 65, 69, 49, 66, 62, 54, 70, 67, 58, 64, 76, 51, 50, 56, 69, 72, 59, 65, 59, 68, 66, 63, 67, 44, 61, 62, 67, 68, 64, 53, 86, 53, 59, 59, 95, 67, 86, 57, 64, 64, 82, 57, 65, 64, 53, 73, 57, 53, 76, 82, 66, 68, 50, 80, 57, 51, 82, 73, 69, 66, 60, 98, 63, 82, 69, 59, 49, 55, 68, 112, 63, 52, 113, 65, 62, 80, 57, 78, 69, 63, 66, 83, 76, 61, 73, 52, 48, 57, 73, 48, 78, 112, 70, 81, 60, 68, 67, 65, 69, 82, 56, 69, 78, 72, 76, 63, 56, 77, 52, 79, 68, 49, 77, 51, 54, 68, 54, 105, 58, 71, 64, 42, 61, 59, 57, 76, 80, 92, 60, 74, 52, 80, 48, 60, 63, 62, 63, 57, 60, 50, 70, 51, 65, 58, 88, 46, 76, 77, 99, 59, 111, 84, 72, 60, 114, 56, 78, 73, 56, 60, 65, 66, 53, 68, 80, 62, 67, 76, 49, 90, 64, 57, 55, 74, 58, 92, 64, 61, 63, 59, 53, 81, 98, 70, 68, 75, 62, 65, 100, 42, 48, 75, 65, 61, 98, 75, 78, 66, 60, 59, 73, 53, 74, 58, 72, 59, 59, 61, 69, 69, 57, 51, 65, 71, 68, 71, 54, 65, 57, 69, 68, 71, 48, 55, 66, 67, 57, 67, 65, 56, 66, 58, 74, 76, 56, 60, 50, 126, 55, 65, 64, 72, 52, 67, 42, 54, 57, 68, 92, 81, 62, 85, 91, 49, 56, 60, 60, 49, 60, 92, 50, 50, 67, 74, 69, 72, 74, 60, 68, 66, 48, 68, 83, 79, 84, 60, 44, 60, 62, 66, 68, 66, 59, 59, 59, 59, 48, 85, 57, 53, 65, 61, 64, 60, 44, 70, 66, 81, 60, 68, 78, 49, 68, 60, 56, 107, 60, 64, 90, 78, 81, 51, 73, 59, 77, 80, 68, 62, 58, 57, 102, 53, 49, 43, 76, 61, 49, 69, 86, 69, 84, 62, 81, 100, 49, 36, 75, 67, 76, 46, 75, 80, 45, 100, 64, 59, 52, 45, 53, 53, 79, 66, 93, 69, 62, 73, 43, 87, 51, 77, 69, 62, 59, 89, 82, 68, 68, 54, 65, 58, 54, 89, 65, 73, 70, 50, 71, 66, 61, 56, 65, 56, 60, 46, 73, 59, 73, 44, 64, 79, 67, 61, 63, 54, 67, 60, 72, 71, 55, 68, 63, 123, 55, 52, 63, 80, 45, 47, 60, 53, 74, 51, 59, 88, 55, 62, 62, 89, 62, 77, 71, 68, 75, 82, 59, 76, 69, 75, 63, 65, 66, 71, 56, 63, 60, 55, 99, 77, 55, 56, 89, 63, 53, 66, 64, 69, 52, 60, 74, 61, 60, 76, 68, 82, 64, 75, 62, 70, 85, 58, 48, 70, 88, 65, 73, 67, 76, 80, 64, 82, 50, 70, 72, 80, 77, 58, 61, 90, 54, 44, 49, 72, 67, 41, 111, 76, 77, 57, 51, 77, 53, 60, 63, 51, 58, 53, 67, 43, 73, 61, 79, 69, 56, 62, 60, 67, 65, 55, 68, 64, 57, 60, 58, 78, 63, 59, 83, 58, 70, 55, 59, 63, 79, 50, 60, 74, 79, 93, 75, 63, 42, 61, 55, 64, 56, 50, 66, 63, 104, 63, 78, 54, 82, 66, 63, 51, 80, 84, 57, 53, 73, 53, 72, 55, 81, 74, 65, 69, 73, 72, 70, 64, 72, 83, 68, 54, 65, 84, 55, 66, 63, 106, 88, 59, 48, 79, 56, 55, 72, 36, 56, 56, 82, 63, 80, 71, 79, 53, 59, 47, 79, 62, 59, 74, 46, 54, 86, 99, 53, 42, 66, 81, 58, 56, 69, 47, 54, 68, 56, 99, 67, 57, 57, 61, 65, 53, 61, 58, 78, 59, 42, 64, 65, 97, 63, 59, 64, 49, 59, 73, 83, 63, 61, 58, 47, 68, 88, 65, 45, 62, 59, 55, 69, 48, 83, 74, 67, 57, 60, 78, 67, 62, 79, 48, 66, 51, 56, 91, 74, 55, 78, 55, 62, 71, 64, 66, 56, 64, 62, 70, 60, 50, 53, 76, 72, 52, 65, 69, 61, 60, 51, 58, 71, 68, 76, 63, 65, 66, 73, 65, 60, 70, 72, 59, 61, 61, 76, 70, 73, 63, 89, 118, 63, 52, 50, 95, 61, 63, 67, 63, 78, 58, 55, 75, 67, 55, 68, 67, 95, 54, 55, 62, 59, 70, 58, 69, 54, 62, 80, 79, 69, 73, 66, 78, 54, 71, 89, 75, 65, 62, 80, 64, 78, 70, 59, 72, 61, 72, 57, 54, 57, 69, 55, 69, 73, 74, 64, 71, 56, 67, 64, 63, 60, 68, 85, 64, 50, 66, 83, 55, 54, 69, 67, 72, 54, 63, 72, 69, 79, 84, 60, 61, 59, 63, 79, 79, 69, 76, 55, 53, 94, 52, 64, 51, 60, 65, 56, 89, 69, 66, 63, 54, 78, 64, 53, 92, 55, 65, 61, 73, 74, 71, 67, 41, 37, 49, 65, 61, 62, 77, 69, 62, 47, 116, 53, 67, 111, 92, 55, 67, 53, 62, 60, 106, 70, 47, 69, 79, 61, 55, 74, 60, 96, 60, 59, 67, 58, 50, 77, 52, 72, 56, 57, 82, 56, 53, 56, 53, 66, 59, 72, 73, 58, 57, 60, 57, 93, 73, 59, 64, 66, 80, 80, 54, 67, 80, 73, 57, 48, 78, 61, 61, 67, 58, 52, 57, 55, 74, 66, 53, 66, 68, 59, 68, 76, 63, 80, 68, 74, 67, 84, 94, 52, 68, 77, 81, 74, 58, 89, 68, 61, 53, 57, 53, 69, 47, 59, 107, 74, 64, 67, 76, 79, 60, 46, 109, 82, 63, 48, 58, 68, 40, 66, 92, 56, 63, 63, 70, 49, 63, 71, 58, 54, 71, 56, 122, 39, 56, 72, 46, 61, 72, 56, 106, 75, 55, 65, 86, 58, 73, 70, 68, 78, 62, 56, 53, 71, 61, 59, 54, 75, 80, 99, 67, 69, 71, 63, 53, 57, 61, 56, 52, 70, 64, 49, 61, 78, 61, 62, 75, 76, 56, 84, 44, 72, 63, 51, 60, 59, 62, 66, 64, 83, 63, 52, 53, 74, 68, 98, 45, 67, 61, 65, 60, 61, 70, 77, 55, 57, 62, 70, 67, 67, 60, 64, 82, 83, 61, 63, 57, 69, 63, 59, 62, 55, 59, 60, 63, 68, 60, 72, 102, 54, 73, 63, 59, 51, 60, 62, 61, 56, 53, 70, 88, 62, 85, 88, 71, 53, 63, 66, 78, 64, 88, 55, 67, 60, 53, 52, 67, 65, 66, 57, 57, 69, 60, 75, 55, 58, 70, 62, 76, 72, 64, 75, 71, 56, 65, 90, 59, 88, 60, 56, 79, 56, 58, 72, 65, 99, 116, 67, 73, 62, 65, 52, 80, 71, 57, 59, 56, 65, 61, 58, 48, 95, 56, 69, 64, 64, 60, 70, 65, 62, 60, 53, 50, 46, 56, 80, 75, 64, 62, 51, 59, 76, 69, 66, 84, 64, 76, 61, 68, 80, 58, 73, 63, 81, 66, 46, 72, 53, 64, 63, 76, 51, 50, 66, 63, 77, 63, 66, 72, 62, 51, 64, 60, 73, 74, 75, 53, 73, 43, 100, 53, 67, 65, 62, 55, 59, 59, 59, 49, 49, 72, 69, 65, 58, 63, 69, 58, 64, 86, 62, 71, 56, 46, 70, 61, 61, 58, 56, 60, 56, 61, 62, 72, 54, 71, 73, 77, 76, 65, 45, 67, 62, 55, 75, 62, 59, 60, 109, 62, 63, 70, 87, 52, 63, 80, 65, 75, 71, 75, 63, 61, 63, 53, 68, 50, 55, 62, 55, 63, 70, 57, 95, 72, 87, 47, 63, 95, 87, 55, 61, 92, 69, 66, 69, 61, 64, 75, 53, 87, 53, 53, 64, 52, 54, 47, 55, 70, 69, 79, 84, 70, 55, 82, 60, 67, 63, 58, 63, 52, 57, 55, 65, 50, 66, 53, 68, 33, 63, 58, 47, 67, 66, 50, 62, 58, 66, 62, 58, 61, 63, 57, 79, 64, 51, 69, 61, 55, 51, 65, 55, 65, 70, 66, 87, 59, 79, 59, 52, 70, 66, 62, 68, 56, 57, 59, 56, 54, 64, 65, 68, 67, 67, 64, 67, 62, 77, 69, 57, 76, 52, 54, 60, 64, 53, 62, 54, 56, 71, 79, 69, 74, 69, 53, 88, 59, 66, 52, 49, 70, 66, 77, 70, 60, 50, 97, 86, 69, 54, 49, 77, 55, 61, 80, 47, 61, 51, 89, 59, 70, 61, 76, 62, 59, 71, 46, 66, 85, 65, 52, 87, 64, 57, 79, 77, 53, 70, 71, 58, 76, 59, 65, 64, 64, 63, 75, 65, 62, 57, 46, 46, 97, 55, 48, 83, 56, 70, 77, 59, 39, 66, 49, 83, 40, 53, 67, 97, 70, 69, 60, 73, 68, 58, 69, 82, 113, 62, 50, 75, 71, 60, 60, 43, 65, 56, 78, 75, 85, 46, 70, 60, 66, 43, 61, 73, 69, 64, 60, 69, 65, 52, 54, 54, 110, 43, 60, 64, 81, 50, 76, 57, 89, 81, 79, 62, 59, 60, 48, 59, 95, 88, 117, 52, 65, 71, 75, 75, 49, 79, 63, 64, 60, 71, 77, 65, 57, 101, 72, 59, 59, 104, 64, 74, 57, 62, 54, 114, 101, 55, 57, 61, 44, 67, 56, 66, 59, 93, 62, 71, 68, 55, 61, 66, 76, 71, 59, 73, 63, 107, 96, 58, 66, 57, 57, 47, 71, 51, 61, 75, 51, 78, 64, 67, 77, 64, 70, 52, 74, 70, 81, 71, 75, 70, 66, 58, 48, 64, 65, 72, 63, 91, 70, 65, 65, 73, 71, 64, 68, 64, 51, 53, 53, 61, 80, 65, 72, 65, 87, 49, 71, 56, 53, 64, 52, 45, 74, 64, 98, 44, 46, 66, 66, 61, 96, 75, 58, 96, 66, 52, 62, 55, 69, 84, 90, 45, 64, 77, 57, 57, 47, 50, 115, 60, 60, 66, 46, 61, 55, 51, 58, 85, 71, 54, 49, 60, 89, 50, 55, 63, 56, 56, 49, 73, 73, 70, 56, 61, 83, 64, 61, 54, 53, 65, 80, 60, 83, 64, 62, 88, 67, 75, 49, 55, 66, 55, 55, 79, 76, 52, 68, 56, 62, 71, 65, 70, 70, 60, 64, 70, 53, 72, 44, 71, 67, 61, 58, 52, 54, 60, 47, 58, 61, 59, 82, 75, 72, 55, 65, 63, 90, 53, 67, 90, 64, 67, 75, 58, 53, 89, 74, 128, 56, 43, 62, 76, 71, 53, 48, 70, 79, 78, 80, 63, 61, 66, 78, 58, 74, 66, 59, 55, 118, 60, 76, 73, 43, 62, 67, 64, 54, 76, 58, 56, 71, 70, 49, 69, 74, 58, 58, 68, 87, 63, 77, 62, 59, 69, 61, 52, 69, 60, 58, 55, 40, 53, 72, 60, 78, 69, 89, 78, 73, 80, 77, 53, 84, 53, 67, 55, 57, 77, 58, 51, 76, 69, 72, 63, 73, 57, 59, 59, 75, 141, 64, 56, 66, 66, 77, 68, 68, 64, 51, 53, 46, 58, 63, 57, 61, 67, 53, 55, 58, 67, 59, 53, 69, 56, 71, 104, 78, 121, 60, 105, 57, 84, 73, 59, 52, 62, 67, 116, 58, 72, 86, 52, 79, 56, 80, 62, 51, 71, 53, 59, 81, 66, 61, 116, 77, 57, 64, 62, 82, 55, 76, 62, 46, 63, 54, 37, 59, 75, 55, 55, 70, 86, 45, 62, 76, 73, 59, 51, 73, 78, 55, 65, 75, 55, 78, 51, 87, 80, 64, 70, 96, 46, 55, 63, 69, 61, 90, 66, 67, 73, 74, 66, 83, 82, 76, 51, 53, 69, 53, 49, 73, 69, 51, 71, 59, 83, 80, 61, 91, 77, 72, 54, 62, 38, 60, 79, 63, 70, 62, 58, 90, 66, 54, 78, 51, 75, 67, 70, 62, 109, 49, 55, 73, 51, 58, 95, 66, 59, 58, 50, 62, 70, 53, 72, 63, 69, 55, 81, 62, 83, 53, 58, 69, 53, 64, 74, 66, 58, 76, 67, 54, 96, 75, 52, 57, 66, 57, 61, 61, 63, 71, 54, 51, 40, 67, 75, 66, 73, 49, 51, 68, 58, 55, 65, 64, 68, 76, 89, 75, 54, 58, 73, 62, 63, 53, 55, 61, 78, 56, 68, 55, 53, 57, 66, 49, 68, 65, 64, 62, 113, 64, 62, 55, 61, 55, 57, 78, 62, 69, 94, 60, 61, 67, 81, 66, 55, 61, 70, 45, 42, 115, 65, 57, 64, 71, 57, 58, 78, 54, 71, 72, 77, 62, 62, 53, 64, 75, 61, 66, 57, 63, 66, 72, 67, 52, 49, 59, 108, 57, 59, 59, 53, 47, 100, 60, 67, 68, 67, 81, 75, 48, 48, 53, 68, 75, 50, 53, 56, 69, 73, 82, 100, 58, 56, 68, 58, 69, 62, 60, 51, 53, 58, 55, 75, 50, 64, 55, 66, 59, 81, 64, 55, 72, 81, 72, 59, 57, 55, 49, 74, 80, 63, 55, 63, 58, 51, 50, 61, 70, 57, 63, 78, 49, 40, 62, 60, 65, 79, 57, 58, 66, 74, 58, 62, 64, 72, 59, 66, 72, 62, 55, 54, 64, 101, 61, 59, 54, 54, 119, 83, 93, 54, 73, 65, 60, 47, 52, 55, 57, 63, 72, 76, 57, 74, 63, 74, 67, 53, 73, 72, 99, 67, 65, 65, 87, 73, 92, 54, 72, 61, 68, 68, 72, 93, 57, 64, 58, 68, 56, 66, 61, 74, 71, 61, 71, 67, 50, 44, 54, 51, 110, 52, 55, 64, 72, 79, 58, 65, 53, 60, 56, 63, 60, 48, 66, 77, 61, 63, 37, 55, 70, 71, 65, 66, 65, 68, 66, 47, 81, 60, 57, 66, 72, 61, 57, 103, 64, 78, 74, 73, 66, 89, 59, 76, 63, 86, 70, 58, 58, 114, 80, 52, 66, 97, 73, 69, 55, 55, 61, 81, 58, 92, 63, 79, 64, 79, 58, 66, 71, 61, 59, 67, 62, 65, 72, 64, 62, 75, 76, 69, 109, 67, 57, 66, 88, 64, 57, 58, 52, 100, 63, 77, 70, 62, 70, 58, 58, 80, 55, 66, 76, 56, 62, 58, 78, 58, 67, 51, 78, 61, 65, 49, 91, 63, 60, 63, 52, 81, 57, 74, 59, 68, 61, 62, 66, 52, 83, 55, 57, 90, 66, 61, 51, 62, 58, 59, 65, 50, 80, 64, 62, 66, 67, 51, 72, 74, 102, 59, 54, 59, 91, 68, 58, 72, 57, 43, 58, 54, 51, 66, 57, 70, 60, 68, 55, 56, 63, 71, 59, 53, 58, 56, 50, 49, 73, 69, 75, 47, 65, 64, 87, 64, 58, 53, 54, 62, 69, 61, 64, 81, 57, 62, 69, 50, 82, 66, 77, 69, 65, 59, 62, 64, 57, 59, 82, 87, 53, 51, 66, 54, 68, 63, 52, 79, 55, 113, 81, 63, 75, 83, 65, 75, 57, 63, 51, 54, 65, 53, 69, 72, 42, 58, 65, 63, 65, 63, 45, 55, 62, 64, 81, 80, 52, 56, 51, 72, 87, 57, 119, 74, 49, 58, 59, 69, 64, 64, 75, 63, 63, 70, 64, 56, 55, 77, 53, 81, 60, 83, 85, 51, 117, 71, 85, 58, 64, 60, 52, 96, 56, 73, 41, 73, 83, 54, 51, 82, 57, 66, 54, 60, 58, 61, 72, 68, 71, 53, 72, 61, 56, 64, 58, 69, 57, 61, 67, 68, 57, 111, 76, 52, 53, 85, 86, 73, 54, 62, 60, 62, 67, 55, 65, 68, 64, 92, 74, 88, 77, 72, 50, 63, 52, 52, 56, 73, 70, 49, 52, 116, 62, 46, 94, 123, 73, 68, 60, 69, 54, 86, 68, 73, 93, 87, 54, 66, 65, 54, 59, 59, 63, 55, 60, 41, 66, 47, 53, 68, 58, 77, 76, 67, 64, 70, 57, 80, 68, 80, 64, 63, 62, 78, 59, 69, 59, 38, 67, 61, 63, 62, 105, 56, 50, 60, 77, 68, 65, 50, 79, 51, 60, 93, 64, 76, 57, 89, 62, 73, 56, 61, 75, 68, 60, 56, 85, 68, 83, 54, 55, 59, 74, 58, 84, 74, 49, 69, 91, 76, 58, 60, 70, 48, 84, 50, 56, 69, 55, 54, 95, 75, 65, 64, 94, 62, 86, 70, 52, 66, 79, 72, 67, 52, 60, 55, 85, 83, 64, 44, 68, 62, 83, 41, 63, 67, 82, 59, 67, 71, 61, 67, 61, 58, 60, 83, 47, 65, 68, 59, 53, 69, 51, 67, 67, 81, 62, 96, 65, 71, 53, 67, 53, 49, 65, 50, 69, 74, 61, 58, 69, 50, 64, 59, 58, 50, 65, 48, 62, 59, 51, 75, 72, 52, 57, 60, 72, 60, 61, 60, 45, 76, 70, 61, 109, 62, 55, 78, 65, 56, 83, 61, 63, 77, 70, 46, 60, 72, 84, 64, 65, 79, 57, 64, 61, 54, 55, 62, 62, 87, 81, 55, 72, 82, 63, 52, 62, 82, 70, 60, 59, 88, 61, 69, 84, 60, 58, 63, 65, 71, 65, 60, 42, 89, 49, 91, 64, 74, 77, 71, 60, 58, 61, 48, 102, 71, 55, 68, 67, 65, 76, 77, 72, 89, 71, 83, 65, 56, 58, 69, 56, 47, 69, 78, 61, 95, 68, 67, 65, 53, 74, 69, 54, 45, 63, 57, 65, 59, 56, 60, 66, 69, 76, 76, 77, 58, 62, 69, 82, 72, 73, 66, 93, 71, 63, 80, 65, 85, 64, 65, 58, 50, 94, 62, 62, 76, 48, 78, 54, 61, 68, 66, 57, 59, 62, 77, 67, 49, 77, 69, 51, 62, 73, 61, 71, 95, 70, 62, 74, 61, 66, 83, 58, 68, 58, 82, 63, 52, 55, 61, 67, 55, 70, 58, 60, 76, 53, 69, 73, 72, 53, 74, 57, 75, 63, 65, 127, 61, 63, 96, 62, 61, 69, 60, 65, 56, 51, 52, 67, 62, 44, 68, 73, 61, 66, 64, 67, 74, 41, 81, 73, 64, 61, 63, 60, 67, 59, 65, 113, 54, 58, 56, 41, 55, 61, 63, 57, 58, 78, 58, 64, 48, 96, 58, 73, 64, 60, 64, 62, 66, 66, 57, 60, 67, 47, 59, 65, 97, 86, 54, 58, 56, 92, 69, 48, 81, 54, 92, 83, 47, 59, 70, 76, 62, 64, 69, 55, 66, 66, 61, 55, 46, 57, 64, 68, 55, 70, 79, 57, 68, 53, 73, 79, 79, 65, 73, 74, 64, 75, 50, 64, 58, 62, 76, 50, 56, 59, 73, 53, 44, 72, 77, 53, 78, 60, 91, 54, 75, 64, 79, 66, 59, 71, 69, 84, 79, 47, 56, 75, 65, 72, 91, 60, 66, 43, 50, 51, 42, 54, 80, 74, 68, 74, 54, 64, 44, 52, 68, 56, 82, 59, 81, 62, 93, 56, 62, 61, 51, 61, 68, 88, 66, 76, 60, 77, 101, 50, 53, 70, 107, 68, 37, 58, 62, 73, 60, 54, 66, 58, 62, 71, 59, 58, 60, 52, 50, 58, 54, 82, 48, 70, 71, 71, 53, 56, 56, 80, 64, 80, 81, 56, 72, 60, 50, 84, 79, 62, 50, 81, 67, 46, 62, 60, 52, 57, 39, 62, 72, 57, 73, 63, 94, 62, 61, 47, 84, 70, 57, 63, 75, 88, 64, 63, 74, 56, 70, 66, 72, 36, 63, 68, 55, 80, 35, 71, 76, 58, 70, 62, 64, 71, 57, 91, 70, 63, 60, 51, 76, 51, 43, 47, 62, 94, 75, 52, 58, 62, 65, 73, 51, 72, 87, 114, 67, 60, 64, 91, 71, 78, 65, 75, 49, 92, 75, 61, 54, 56, 67, 78, 71, 75, 78, 68, 47, 62, 57, 70, 65, 75, 65, 56, 82, 49, 56, 49, 58, 80, 59, 68, 64, 68, 74, 76, 55, 59, 73, 73, 78, 60, 96, 53, 58, 73, 60, 56, 53, 58, 65, 55, 92, 72, 61, 69, 61, 52, 112, 101, 54, 49, 48, 67, 70, 58, 51, 76, 96, 88, 60, 71, 71, 43, 71, 53, 77, 58, 64, 90, 56, 85, 65, 86, 60, 66, 72, 53, 59, 95, 56, 51, 78, 72, 43, 75, 80, 66, 82, 58, 56, 70, 59, 41, 59, 57, 39, 54, 64, 57, 72, 64, 73, 71, 61, 72, 56, 59, 58, 66, 70, 55, 65, 73, 92, 62, 43, 76, 83, 52, 63, 51, 60, 66, 67, 62, 56, 59, 70, 72, 49, 123, 68, 81, 85, 93, 58, 74, 67, 56, 77, 60, 49, 53, 52, 66, 95, 74, 66, 59, 66, 54, 69, 56, 57, 79, 65, 68, 60, 75, 67, 52, 102, 60, 40, 73, 69, 69, 82, 71, 76, 55, 84, 71, 62, 50, 77, 54, 59, 35, 58, 84, 64, 73, 72, 64, 65, 97, 54, 54, 77, 55, 60, 51, 62, 67, 79, 80, 57, 62, 67, 68, 97, 65, 43, 68, 73, 65, 97, 104, 89, 54, 62, 69, 86, 56, 72, 61, 80, 56, 62, 98, 88, 59, 115, 59, 49, 72, 84, 108, 59, 53, 85, 67, 57, 122, 66, 138, 56, 61, 71, 63, 69, 58, 68, 60, 47, 69, 55, 47, 70, 82, 52, 66, 55, 66, 100, 46, 61, 86, 92, 54, 60, 50, 76, 99, 54, 75, 67, 75, 72, 72, 43, 74, 77, 59, 70, 86, 54, 75, 48, 52, 51, 61, 60, 66, 66, 55, 63, 70, 66, 102, 58, 69, 71, 44, 80, 52, 73, 59, 72, 61, 64, 72, 85, 45, 63, 48, 87, 84, 92, 84, 61, 89, 71, 76, 64, 85, 74, 80, 72, 82, 59, 54, 70, 53, 66, 74, 49, 52, 71, 52, 40, 58, 62, 65, 66, 66, 71, 70, 57, 66, 69, 54, 66, 78, 56, 72, 85, 61, 66, 63, 79, 63, 60, 59, 63, 68, 85, 80, 47, 77, 73, 86, 53, 78, 52, 87, 100, 86, 55, 59, 61, 48, 82, 61, 62, 52, 105, 57, 62, 79, 52, 58, 88, 56, 67, 56, 83, 52, 65, 58, 53, 62, 79, 70, 69, 55, 64, 68, 64, 77, 74, 66, 66, 54, 46, 79, 63, 60, 60, 64, 87, 58, 78, 44, 60, 54, 53, 84, 63, 61, 56, 60, 90, 41, 52, 66, 58, 48, 60, 60, 81, 45, 67, 71, 77, 70, 81, 63, 75, 72, 55, 56, 67, 68, 45, 65, 64, 83, 66, 58, 53, 53, 49, 70, 65, 43, 58, 50, 59, 72, 57, 56, 69, 114, 59, 52, 52, 66, 51, 56, 60, 57, 57, 55, 81, 50, 70, 102, 75, 89, 57, 74, 73, 58, 69, 48, 47, 62, 47, 69, 56, 68, 61, 43, 73, 76, 62, 66, 57, 47, 64, 73, 74, 58, 64, 56, 54, 69, 70, 63, 68, 57, 54, 67, 64, 52, 65, 54, 62, 94, 57, 81, 44, 66, 56, 55, 60, 62, 96, 65, 56, 61, 76, 76, 48, 50, 58, 66, 46, 70, 57, 75, 59, 96, 52, 69, 48, 81, 63, 62, 70, 70, 75, 58, 56, 60, 72, 65, 51, 109, 66, 79, 68, 50, 54, 72, 58, 68, 47, 56, 64, 77, 67, 68, 65, 67, 68, 59, 89, 68, 70, 60, 52, 69, 63, 52, 58, 54, 73, 64, 50, 77, 68, 92, 62, 56, 61, 108, 73, 57, 93, 70, 59, 91, 47, 69, 73, 61, 55, 49, 107, 63, 51, 45, 102, 70, 49, 54, 56, 63, 79, 68, 61, 84, 110, 120, 67, 72, 40, 93, 79, 55, 70, 72, 81, 70, 59, 63, 54, 59, 105, 57, 57, 55, 88, 75, 69, 78, 78, 68, 63, 50, 61, 70, 79, 74, 86, 68, 67, 49, 110, 66, 84, 98, 52, 39, 60, 84, 74, 59, 62, 86, 46, 62, 42, 70, 48, 54, 78, 83, 51, 66, 59, 52, 52, 57, 59, 55, 59, 38, 61, 69, 76, 51, 75, 78, 75, 48, 50, 60, 52, 83, 65, 70, 78, 39, 71, 76, 55, 51, 64, 54, 52, 53, 60, 81, 89, 65, 96, 59, 64, 74, 75, 63, 63, 116, 52, 72, 68, 55, 54, 56, 80, 68, 117, 58, 47, 54, 85, 60, 67, 66, 73, 59, 66, 86, 47, 65, 72, 55, 65, 73, 55, 84, 66, 64, 57, 65, 72, 62, 85, 58, 53, 111, 67, 63, 75, 80, 46, 70, 49, 67, 60, 52, 77, 52, 67, 98, 57, 67, 68, 74, 65, 49, 83, 72, 55, 57, 83, 55, 71, 60, 94, 100, 73, 68, 56, 65, 96, 57, 70, 62, 54, 60, 54, 60, 71, 63, 60, 94, 75, 64, 53, 32, 51, 69, 85, 103, 69, 64, 67, 57, 55, 53, 57, 72, 67, 69, 65, 68, 91, 74, 52, 71, 64, 47, 65, 67, 53, 53, 68, 66, 63, 54, 63, 56, 62, 39, 57, 69, 59, 36, 54, 57, 68, 57, 60, 74, 52, 61, 81, 60, 47, 57, 69, 55, 91, 72, 60, 71, 69, 84, 69, 67, 78, 71, 71, 55, 48, 62, 79, 50, 69, 48, 69, 63, 72, 49, 61, 63, 62, 54, 65, 101, 59, 61, 89, 51, 106, 69, 81, 58, 68, 60, 70, 69, 58, 82, 67, 65, 57, 65, 78, 53, 86, 63, 55, 61, 77, 58, 107, 62, 61, 83, 36, 72, 69, 58, 68, 65, 65, 65, 46, 60, 72, 78, 54, 58, 79, 76, 63, 65, 74, 62, 79, 64, 56, 59, 67, 50, 95, 57, 59, 53, 70, 61, 59, 45, 56, 55, 40, 66, 60, 63, 70, 60, 55, 72, 72, 60, 70, 66, 71, 72, 66, 80, 67, 57, 106, 60, 69, 45, 62, 63, 58, 80, 74, 75, 72, 82, 59, 49, 77, 52, 65, 73, 64, 70, 82, 67, 50, 100, 67, 54, 63, 55, 78, 60, 68, 56, 87, 119, 80, 69, 46, 83, 58, 64, 74, 67, 73, 91, 53, 81, 65, 62, 81, 64, 70, 65, 58, 79, 60, 60, 54, 70, 61, 71, 58, 53, 77, 48, 56, 75, 53, 72, 65, 56, 60, 72, 73, 72, 59, 76, 80, 64, 65, 56, 55, 69, 63, 55, 58, 110, 69, 66, 66, 70, 68, 62, 95, 74, 62, 75, 77, 75, 59, 70, 68, 79, 96, 55, 71, 98, 89, 45, 51, 64, 52, 50, 64, 59, 64, 53, 64, 74, 69, 62, 65, 58, 64, 92, 72, 72, 62, 76, 75, 77, 106, 57, 65, 57, 72, 58, 72, 59, 71, 60, 106, 54, 68, 54, 62, 65, 80, 69, 76, 57, 57, 58, 51, 57, 68, 54, 85, 61, 86, 94, 52, 57, 62, 54, 62, 96, 65, 64, 68, 56, 56, 65, 62, 79, 53, 77, 46, 69, 52, 66, 56, 50, 54, 59, 77, 40, 82, 72, 53, 72, 74, 54, 98, 113, 63, 54, 63, 94, 54, 48, 51, 67, 68, 71, 60, 61, 69, 65, 65, 55, 75, 60, 88, 60, 50, 54, 80, 71, 66, 44, 57, 71, 77, 52, 60, 69, 68, 71, 61, 55, 60, 102, 66, 45, 68, 55, 56, 46, 85, 73, 62, 56, 70, 68, 78, 54, 84, 56, 69, 64, 64, 73, 100, 100, 100, 60, 49, 63, 71, 52, 60, 58, 57, 78, 59, 97, 59, 51, 61, 82, 45, 71, 83, 83, 64, 61, 60, 65, 74, 81, 61, 75, 72, 68, 59, 77, 66, 62, 74, 61, 67, 43, 83, 65, 47, 59, 69, 92, 63, 61, 54, 75, 55, 52, 89, 53, 79, 70, 71, 60, 76, 70, 61, 58, 69, 77, 58, 66, 70, 66, 57, 83, 55, 64, 39, 34, 90, 58, 59, 39, 63, 58, 67, 84, 64, 88, 57, 93, 61, 60, 66, 34, 89, 65, 64, 53, 50, 62, 62, 58, 44, 48, 75, 68, 97, 81, 65, 97, 64, 62, 69, 63, 96, 65, 66, 61, 77, 70, 83, 100, 62, 76, 36, 59, 62, 55, 76, 61, 61, 57, 73, 74, 73, 69, 50, 82, 75, 59, 52, 65, 63, 68, 42, 72, 62, 67, 67, 90, 61, 56, 63, 70, 61, 70, 70, 64, 57, 128, 51, 72, 80, 80, 69, 69, 59, 62, 63, 52, 49, 75, 56, 56, 122, 42, 86, 79, 118, 53, 57, 49, 64, 79, 63, 57, 120, 66, 63, 71, 85, 62, 63, 53, 69, 64, 66, 84, 52, 84, 60, 86, 73, 68, 60, 62, 57, 57, 65, 52, 60, 80, 70, 66, 68, 56, 98, 67, 63, 70, 77, 69, 50, 62, 67, 50, 75, 66, 117, 49, 63, 66, 66, 73, 54, 63, 65, 70, 56, 55, 57, 70, 84, 83, 50, 69, 68, 53, 53, 73, 68, 66, 75, 56, 53, 62, 49, 94, 66, 53, 61, 124, 63, 48, 78, 74, 74, 58, 53, 60, 61, 51, 59, 47, 60, 72, 55, 70, 61, 63, 66, 67, 48, 64, 63, 55, 60, 52, 69, 59, 91, 70, 53, 71, 60, 66, 58, 65, 69, 67, 59, 85, 65, 58, 60, 77, 48, 78, 87, 61, 66, 47, 58, 63, 77, 69, 69, 57, 62, 72, 59, 60, 57, 65, 50, 77, 90, 58, 52, 55, 59, 85, 61, 71, 65, 54, 55, 55, 64, 73, 59, 68, 73, 63, 81, 52, 71, 66, 68, 61, 69, 52, 62, 69, 47, 47, 62, 76, 60, 44, 63, 76, 55, 59, 49, 73, 47, 108, 89, 65, 69, 82, 72, 61, 50, 74, 74, 69, 53, 67, 60, 68, 53, 61, 49, 73, 62, 64, 76, 67, 79, 77, 68, 75, 73, 65, 71, 67, 77, 68, 45, 96, 59, 64, 64, 76, 65, 65, 112, 70, 52, 105, 63, 38, 53, 60, 76, 56, 67, 66, 67, 59, 70, 91, 67, 64, 63, 71, 41, 72, 50, 62, 55, 73, 57, 65, 53, 46, 66, 64, 68, 49, 76, 67, 83, 66, 85, 65, 48, 67, 48, 57, 73, 67, 59, 65, 85, 65, 121, 70, 41, 57, 72, 60, 66, 52, 86, 60, 66, 60, 54, 55, 61, 83, 63, 57, 65, 66, 100, 72, 118, 63, 67, 53, 58, 64, 58, 56, 68, 70, 49, 54, 60, 79, 70, 73, 62, 69, 83, 81, 64, 76, 57, 55, 48, 53, 53, 55, 90, 78, 62, 71, 51, 71, 63, 65, 84, 67, 59, 55, 64, 73, 56, 76, 59, 64, 64, 76, 60, 95, 92, 69, 92, 60, 56, 66, 63, 70, 71, 63, 67, 58, 62, 68, 63, 79, 77, 52, 52, 54, 58, 58, 55, 65, 66, 79, 90, 65, 66, 63, 69, 86, 51, 62, 68, 91, 70, 59, 49, 58, 65, 66, 71, 54, 50, 51, 52, 59, 72, 50, 82, 64, 62, 64, 61, 52, 50, 48, 66, 53, 45, 101, 70, 56, 76, 78, 63, 52, 98, 76, 92, 54, 63, 75, 61, 66, 66, 66, 71, 63, 68, 36, 78, 61, 67, 64, 89, 49, 55, 65, 65, 54, 53, 60, 61, 64, 51, 43, 69, 71, 65, 88, 66, 66, 47, 55, 44, 54, 58, 63, 62, 65, 96, 73, 86, 44, 82, 97, 55, 71, 53, 48, 64, 79, 74, 68, 72, 45, 58, 72, 44, 82, 68, 65, 74, 56, 57, 66, 55, 73, 73, 66, 87, 60, 66, 73, 50, 72, 62, 48, 90, 65, 116, 62, 70, 46, 73, 43, 60, 79, 72, 61, 65, 54, 71, 50, 70, 87, 66, 60, 89, 63, 83, 64, 63, 57, 52, 74, 68, 61, 79, 85, 79, 59, 71, 65, 71, 68, 75, 58, 66, 79, 54, 72, 76, 66, 43, 60, 63, 54, 61, 47, 67, 53, 70, 60, 62, 41, 65, 66, 50, 36, 62, 70, 82, 63, 61, 63, 106, 48, 57, 59, 63, 57, 71, 64, 49, 65, 82, 49, 50, 54, 70, 72, 65, 56, 80, 58, 73, 64, 61, 53, 65, 62, 65, 62, 55, 64, 87, 69, 68, 58, 80, 125, 59, 73, 86, 56, 46, 64, 79, 65, 52, 67, 69, 73, 58, 62, 74, 44, 76, 61, 76, 75, 78, 90, 72, 54, 66, 40, 60, 60, 88, 73, 70, 51, 73, 70, 56, 79, 64, 68, 53, 57, 76, 70, 70, 53, 64, 67, 63, 85, 79, 63, 59, 99, 71, 65, 65, 78, 74, 63, 61, 84, 79, 86, 66, 65, 69, 49, 47, 56, 48, 57, 46, 75, 69, 75, 70, 76, 63, 58, 56, 61, 36, 64, 44, 59, 67, 79, 65, 57, 68, 59, 58, 75, 89, 57, 70, 39, 68, 81, 69, 58, 78, 71, 65, 66, 60, 45, 72, 67, 62, 82, 65, 71, 97, 80, 84, 69, 63, 84, 36, 53, 58, 62, 123, 61, 89, 55, 84, 70, 62, 60, 58, 56, 52, 70, 65, 55, 55, 50, 88, 64, 47, 67, 88, 59, 62, 66, 59, 69, 59, 79, 76, 56, 46, 70, 70, 62, 57, 45, 65, 84, 61, 50, 47, 82, 71, 71, 79, 114, 55, 71, 40, 57, 93, 60, 60, 66, 112, 45, 65, 60, 66, 63, 48, 87, 60, 74, 76, 58, 52, 43, 68, 57, 46, 72, 58, 48, 67, 64, 55, 66, 68, 61, 65, 55, 66, 66, 61, 38, 58, 52, 64, 74, 68, 57, 51, 59, 64, 77, 64, 55, 54, 64, 54, 73, 36, 59, 59, 69, 75, 63, 58, 53, 80, 51, 64, 43, 56, 53, 48, 59, 66, 81, 58, 61, 69, 105, 67, 80, 57, 51, 78, 79, 77, 66, 59, 31, 64, 69, 85, 60, 101, 61, 67, 56, 58, 29, 65, 60, 87, 74, 77, 76, 63, 107, 59, 81, 68, 67, 67, 51, 51, 59, 77, 81, 64, 59, 72, 62, 60, 104, 55, 67, 57, 56, 64, 73, 69, 70, 59, 54, 70, 87, 58, 66, 67, 56, 57, 57, 59, 57, 75, 88, 63, 53, 61, 54, 57, 57, 57, 88, 58, 68, 64, 54, 64, 89, 70, 56, 57, 52, 48, 49, 57, 66, 57, 61, 77, 54, 75, 66, 74, 66, 52, 63, 46, 58, 84, 51, 48, 75, 80, 52, 66, 54, 53, 68, 82, 76, 95, 63, 62, 32, 86, 76, 74, 60, 112, 60, 55, 70, 66, 55, 66, 54, 76, 72, 66, 63, 55, 81, 78, 65, 65, 71, 59, 60, 61, 59, 69, 76, 66, 57, 52, 45, 48, 57, 65, 84, 66, 100, 53, 72, 82, 64, 60, 59, 50, 43, 46, 65, 66, 73, 81, 57, 110, 69, 60, 78, 67, 60, 53, 42, 73, 100, 52, 45, 67, 57, 69, 51, 71, 71, 58, 64, 56, 63, 60, 65, 98, 62, 61, 56, 90, 56, 66, 73, 68, 53, 71, 63, 36, 100, 75, 77, 48, 70, 53, 46, 53, 60, 26, 72, 77, 73, 72, 43, 60, 56, 67, 72, 63, 60, 66, 97, 63, 54, 76, 85, 65, 62, 67, 73, 43, 86, 62, 123, 52, 50, 68, 71, 75, 61, 61, 62, 64, 57, 64, 63, 66, 47, 82, 106, 75, 59, 58, 75, 66, 70, 72, 41, 70, 59, 50, 72, 72, 74, 64, 63, 71, 85, 65, 71, 68, 66, 61, 45, 57, 77, 60, 73, 63, 115, 58, 77, 49, 68, 59, 86, 68, 68, 54, 64, 55, 47, 62, 59, 86, 67, 67, 57, 62, 69, 71, 69, 76, 53, 93, 72, 68, 99, 79, 60, 79, 53, 57, 62, 65, 56, 90, 65, 67, 71, 55, 87, 55, 48, 66, 62, 56, 83, 54, 87, 68, 51, 51, 64, 77, 84, 67, 76, 54, 57, 53, 70, 77, 63, 61, 58, 51, 55, 52, 61, 64, 59, 80, 70, 77, 61, 78, 61, 59, 80, 51, 55, 56, 110, 80, 66, 44, 60, 70, 75, 70, 72, 69, 75, 85, 86, 63, 53, 69, 95, 62, 60, 55, 64, 56, 64, 71, 102, 48, 91, 61, 63, 56, 58, 80, 63, 64, 61, 67, 63, 60, 70, 64, 70, 70, 55, 66, 75, 62, 53, 86, 71, 64, 80, 89, 92, 81, 77, 66, 65, 92, 57, 64, 79, 60, 66, 85, 61, 45, 51, 63, 62, 66, 53, 65, 55, 62, 81, 56, 73, 61, 52, 78, 77, 77, 50, 106, 58, 89, 51, 68, 70, 62, 79, 80, 66, 62, 60, 66, 58, 65, 61, 58, 76, 60, 58, 52, 61, 84, 58, 61, 81, 64, 56, 57, 94, 44, 112, 69, 69, 68, 66, 56, 49, 54, 60, 73, 93, 67, 76, 62, 68, 27, 58, 48, 90, 55, 70, 75, 52, 69, 67, 48, 60, 46, 52, 74, 57, 61, 69, 76, 66, 61, 58, 57, 86, 42, 81, 107, 48, 63, 71, 56, 101, 54, 46, 68, 61, 57, 57, 61, 108, 67, 51, 51, 58, 68, 83, 59, 63, 66, 92, 103, 67, 64, 61, 71, 63, 83, 69, 72, 64, 78, 64, 70, 61, 72, 80, 61, 61, 60, 76, 55, 73, 66, 82, 54, 50, 66, 53, 76, 60, 80, 60, 84, 61, 62, 62, 63, 67, 61, 74, 66, 76, 57, 48, 54, 61, 66, 70, 68, 69, 73, 65, 65, 78, 52, 67, 63, 64, 61, 66, 62, 72, 75, 77, 69, 57, 65, 52, 66, 73, 70, 65, 65, 56, 45, 67, 73, 82, 54, 59, 30, 54, 60, 62, 47, 57, 44, 55, 59, 68, 67, 104, 78, 75, 67, 69, 53, 100, 69, 63, 76, 58, 75, 58, 60, 64, 63, 49, 66, 63, 97, 87, 81, 69, 52, 55, 62, 60, 48, 67, 66, 61, 41, 68, 78, 71, 97, 59, 84, 71, 45, 42, 87, 75, 44, 64, 67, 46, 69, 62, 69, 49, 85, 90, 74, 59, 54, 67, 73, 57, 61, 60, 41, 77, 56, 72, 61, 53, 81, 73, 50, 58, 76, 64, 71, 52, 62, 75, 60, 49, 53, 57, 89, 57, 73, 49, 49, 61, 71, 67, 80, 69, 62, 54, 76, 62, 67, 90, 67, 73, 66, 63, 62, 51, 56, 69, 81, 73, 84, 68, 80, 63, 79, 76, 74, 76, 59, 56, 62, 62, 83, 59, 89, 71, 43, 53, 75, 69, 57, 53, 51, 42, 91, 78, 66, 67, 71, 64, 67, 126, 140, 68, 61, 69, 41, 60, 71, 75, 80, 52, 62, 75, 98, 57, 69, 59, 87, 99, 67, 55, 100, 103, 87, 53, 68, 66, 53, 74, 68, 56, 62, 54, 67, 64, 58, 56, 51, 68, 62, 73, 66, 58, 58, 45, 57, 72, 63, 61, 70, 68, 63, 62, 60, 79, 41, 67, 65, 70, 60, 65, 56, 56, 54, 73, 58, 55, 50, 63, 62, 48, 48, 57, 74, 66, 62, 57, 85, 49, 62, 63, 104, 63, 103, 59, 54, 65, 76, 88, 64, 69, 56, 116, 99, 63, 81, 41, 48, 95, 59, 59, 86, 56, 70, 67, 68, 62, 62, 69, 66, 61, 49, 53, 68, 66, 60, 61, 75, 68, 133, 79, 54, 56, 67, 69, 76, 56, 76, 66, 65, 71, 55, 64, 71, 70, 48, 50, 61, 66, 72, 70, 86, 50, 66, 50, 68, 63, 55, 70, 80, 59, 68, 82, 51, 74, 65, 81, 100, 63, 67, 72, 61, 73, 61, 68, 54, 75, 77, 53, 86, 42, 71, 72, 71, 60, 57, 65, 74, 56, 52, 73, 49, 62, 79, 52, 102, 42, 56, 64, 79, 64, 57, 75, 52, 60, 66, 78, 67, 72, 60, 63, 89, 51, 68, 61, 49, 59, 62, 61, 61, 84, 57, 73, 68, 86, 73, 68, 75, 54, 62, 65, 73, 62, 67, 54, 68, 60, 77, 66, 68, 69, 60, 56, 66, 79, 54, 48, 60, 55, 98, 80, 60, 63, 61, 56, 78, 57, 98, 65, 53, 79, 79, 53, 56, 65, 65, 74, 46, 55, 86, 73, 29, 81, 44, 62, 67, 102, 68, 36, 64, 62, 66, 40, 56, 85, 65, 76, 56, 60, 65, 59, 56, 85, 141, 55, 64, 58, 62, 55, 52, 82, 49, 56, 72, 50, 55, 81, 50, 51, 58, 87, 61, 46, 80, 85, 55, 64, 51, 67, 52, 64, 62, 57, 63, 66, 44, 78, 79, 76, 63, 59, 60, 78, 67, 73, 62, 50, 62, 54, 60, 67, 63, 84, 64, 62, 62, 57, 65, 54, 61, 71, 53, 67, 59, 73, 51, 90, 54, 52, 59, 42, 53, 73, 45, 77, 46, 70, 58, 76, 60, 70, 100, 116, 60, 67, 80, 54, 83, 71, 53, 70, 61, 98, 59, 59, 61, 119, 80, 79, 79, 68, 56, 72, 55, 54, 53, 65, 57, 62, 73, 84, 65, 63, 113, 48, 60, 59, 45, 53, 68, 43, 59, 81, 48, 57, 62, 68, 76, 62, 55, 66, 45, 59, 78, 57, 110, 80, 89, 48, 61, 49, 47, 60, 56, 78, 72, 53, 59, 62, 67, 63, 71, 57, 80, 64, 63, 52, 108, 61, 86, 87, 67, 77, 55, 60, 67, 63, 61, 73, 61, 58, 51, 85, 59, 55, 61, 64, 65, 86, 53, 49, 89, 60, 53, 65, 62, 73, 80, 73, 58, 87, 56, 63, 46, 61, 64, 64, 66, 59, 73, 75, 75, 49, 63, 55, 73, 57, 70, 65, 66, 59, 72, 65, 74, 49, 75, 71, 68, 50, 60, 75, 66, 63, 77, 53, 62, 67, 59, 60, 66, 86, 70, 76, 76, 48, 59, 73, 72, 46, 66, 78, 46, 62, 62, 79, 76, 59, 75, 50, 115, 65, 60, 69, 55, 54, 71, 74, 70, 57, 61, 68, 51, 57, 69, 44, 66, 72, 58, 41, 93, 80, 74, 48, 34, 85, 74, 48, 58, 51, 49, 58, 58, 54, 67, 61, 80, 65, 35, 67, 62, 52, 53, 57, 55, 71, 65, 82, 70, 50, 105, 76, 49, 129, 43, 74, 77, 78, 56, 81, 70, 74, 62, 58, 56, 59, 55, 50, 54, 68, 80, 77, 83, 56, 49, 86, 60, 55, 51, 72, 68, 47, 69, 70, 56, 46, 60, 74, 103, 62, 77, 51, 66, 71, 62, 78, 59, 87, 52, 60, 75, 92, 62, 120, 71, 56, 93, 68, 61, 68, 58, 64, 59, 70, 60, 70, 57, 66, 59, 52, 64, 63, 66, 47, 78, 64, 53, 53, 64, 51, 68, 69, 66, 54, 39, 55, 58, 51, 56, 114, 59, 74, 61, 82, 81, 66, 60, 63, 69, 70, 74, 67, 55, 54, 73, 75, 51, 69, 58, 70, 62, 59, 68, 64, 65, 59, 55, 72, 76, 94, 80, 56, 71, 62, 67, 63, 53, 53, 73, 36, 59, 59, 72, 76, 68, 62, 64, 49, 73, 68, 93, 75, 96, 54, 54, 36, 58, 82, 73, 68, 52, 60, 84, 58, 61, 49, 51, 84, 59, 76, 61, 68, 74, 72, 61, 87, 74, 67, 76, 82, 55, 54, 60, 79, 57, 93, 75, 56, 82, 52, 76, 55, 69, 68, 60, 62, 59, 53, 84, 72, 63, 85, 68, 71, 56, 78, 75, 62, 69, 88, 66, 72, 88, 78, 58, 84, 60, 66, 68, 79, 77, 62, 71, 48, 79, 66, 78, 54, 47, 46, 66, 62, 72, 76, 67, 65, 61, 67, 88, 68, 50, 73, 78, 69, 65, 63, 77, 73, 50, 89, 75, 49, 75, 52, 53, 71, 70, 72, 63, 62, 72, 70, 85, 63, 73, 70, 63, 109, 61, 53, 45, 65, 87, 63, 64, 70, 61, 72, 67, 102, 55, 49, 73, 56, 75, 58, 86, 85, 64, 68, 78, 67, 63, 48, 64, 63, 60, 59, 69, 52, 61, 61, 47, 74, 84, 60, 109, 72, 81, 63, 91, 72, 68, 73, 50, 77, 45, 84, 71, 86, 55, 78, 59, 79, 60, 54, 63, 71, 73, 56, 52, 65, 29, 59, 46, 53, 103, 70, 73, 65, 42, 83, 78, 63, 60, 83, 47, 51, 58, 79, 60, 61, 73, 65, 83, 82, 77, 50, 54, 58, 80, 57, 63, 64, 71, 53, 68, 44, 65, 80, 81, 57, 59, 59, 68, 59, 56, 59, 70, 66, 45, 50, 67, 62, 69, 67, 65, 72, 53, 55, 55, 67, 53, 71, 51, 68, 55, 58, 52, 55, 72, 52, 61, 53, 67, 57, 80, 49, 74, 57, 59, 64, 62, 82, 62, 68, 77, 77, 59, 40, 71, 69, 69, 52, 86, 57, 64, 80, 74, 65, 45, 45, 76, 60, 53, 50, 65, 69, 83, 61, 57, 59, 77, 85, 71, 48, 77, 54, 42, 56, 67, 53, 73, 64, 54, 100, 129, 71, 80, 52, 64, 65, 65, 62, 70, 48, 82, 82, 93, 67, 54, 65, 91, 107, 52, 58, 95, 103, 79, 70, 64, 51, 75, 77, 72, 72, 52, 51, 66, 65, 53, 49, 95, 55, 62, 80, 71, 83, 69, 46, 79, 61, 60, 77, 46, 51, 65, 65, 50, 80, 56, 72, 55, 63, 62, 76, 88, 66, 56, 67, 75, 54, 52, 70, 69, 64, 70, 68, 50, 59, 81, 49, 65, 69, 47, 56, 107, 58, 96, 50, 73, 66, 51, 64, 52, 70, 58, 105, 96, 71, 87, 38, 69, 81, 63, 83, 55, 55, 68, 54, 54, 70, 60, 60, 63, 67, 48, 92, 69, 74, 76, 49, 79, 57, 114, 67, 72, 44, 71, 59, 92, 71, 96, 36, 65, 65, 68, 70, 87, 63, 77, 59, 57, 64, 60, 50, 101, 63, 79, 61, 76, 72, 71, 59, 68, 69, 57, 56, 54, 72, 90, 62, 82, 53, 64, 83, 69, 76, 65, 72, 67, 71, 93, 79, 54, 50, 53, 77, 63, 83, 68, 75, 74, 61, 54, 61, 62, 46, 97, 78, 98, 52, 55, 68, 76, 61, 59, 68, 52, 51, 57, 79, 61, 42, 66, 94, 66, 52, 71, 61, 63, 52, 60, 42, 62, 66, 52, 96, 66, 79, 66, 49, 86, 53, 56, 59, 62, 60, 69, 60, 73, 63, 65, 66, 56, 67, 59, 58, 63, 59, 62, 50, 47, 72, 72, 64, 60, 82, 59, 71, 74, 64, 82, 73, 55, 67, 57, 72, 46, 54, 54, 66, 66, 59, 94, 42, 55, 54, 49, 81, 67, 75, 59, 55, 55, 42, 68, 50, 75, 105, 52, 64, 51, 77, 53, 48, 41, 93, 135, 56, 49, 57, 67, 80, 52, 79, 66, 55, 61, 53, 39, 64, 55, 59, 64, 45, 59, 25, 68, 70, 69, 53, 69, 57, 73, 75, 74, 66, 70, 54, 53, 90, 51, 60, 74, 66, 57, 65, 80, 67, 58, 41, 101, 82, 73, 92, 66, 89, 89, 68, 64, 66, 48, 52, 69, 58, 69, 54, 63, 71, 58, 59, 75, 65, 87, 60, 50, 62, 59, 63, 55, 66, 59, 58, 83, 75, 101, 103, 72, 63, 80, 70, 79, 65, 59, 61, 53, 74, 58, 68, 54, 100, 58, 85, 91, 41, 63, 68, 74, 56, 63, 51, 70, 72, 70, 96, 47, 81, 104, 43, 76, 50, 38, 62, 73, 63, 65, 70, 66, 53, 63, 61, 76, 48, 84, 83, 65, 70, 81, 66, 109, 60, 89, 68, 79, 57, 75, 64, 69, 91, 77, 65, 60, 58, 45, 79, 86, 60, 64, 51, 82, 46, 91, 53, 97, 69, 74, 65, 72, 70, 77, 56, 55, 60, 57, 52, 52, 69, 46, 81, 65, 69, 45, 68, 48, 62, 74, 58, 62, 60, 69, 71, 67, 69, 65, 53, 74, 60, 44, 67, 66, 52, 60, 68, 50, 77, 60, 62, 49, 51, 54, 60, 75, 78, 69, 71, 74, 63, 60, 54, 61, 56, 45, 58, 55, 79, 54, 84, 62, 40, 48, 72, 74, 52, 69, 78, 45, 71, 68, 69, 60, 67, 60, 68, 56, 82, 63, 60, 58, 70, 89, 50, 49, 61, 89, 65, 52, 91, 62, 62, 70, 61, 41, 77, 62, 75, 63, 63, 57, 45, 67, 68, 76, 83, 67, 66, 71, 60, 59, 73, 67, 73, 66, 60, 66, 57, 76, 57, 58, 45, 88, 60, 55, 64, 67, 49, 56, 68, 62, 70, 72, 72, 59, 54, 101, 78, 65, 116, 65, 47, 61, 67, 59, 83, 64, 84, 58, 62, 82, 56, 63, 63, 74, 54, 63, 83, 61, 61, 69, 66, 54, 56, 64, 56, 61, 71, 61, 88, 60, 64, 65, 86, 85, 50, 70, 53, 62, 57, 78, 75, 71, 90, 76, 71, 74, 91, 55, 141, 60, 49, 74, 57, 74, 66, 55, 79, 50, 62, 65, 70, 45, 77, 61, 57, 62, 58, 57, 67, 75, 45, 63, 67, 61, 55, 69, 50, 69, 53, 48, 65, 74, 63, 97, 110, 71, 79, 72, 68, 80, 82, 67, 57, 69, 64, 42, 62, 64, 63, 63, 85, 54, 61, 57, 74, 67, 74, 67, 68, 70, 68, 80, 55, 72, 85, 71, 75, 78, 71, 69, 44, 74, 42, 59, 59, 64, 69, 57, 48, 61, 64, 69, 89, 68, 82, 85, 47, 90, 44, 65, 53, 58, 88, 48, 65, 48, 63, 77, 62, 51, 70, 63, 108, 67, 64, 55, 85, 76, 45, 66, 90, 43, 74, 76, 72, 59, 53, 63, 54, 65, 82, 47, 69, 75, 76, 57, 75, 51, 48, 87, 55, 54, 73, 73, 87, 55, 67, 74, 65, 63, 84, 66, 65, 50, 80, 72, 77, 74, 51, 48, 113, 56, 60, 61, 72, 76, 47, 54, 75, 68, 91, 90, 54, 52, 60, 61, 60, 74, 82, 58, 74, 73, 55, 77, 85, 55, 48, 62, 57, 63, 39, 59, 43, 61, 54, 62, 46, 68, 60, 51, 68, 69, 57, 86, 58, 62, 63, 61, 69, 73, 52, 69, 90, 54, 42, 54, 71, 61, 71, 66, 58, 68, 69, 66, 89, 66, 46, 61, 62, 83, 66, 91, 92, 75, 43, 64, 65, 84, 60, 82, 55, 38, 51, 68, 76, 55, 67, 66, 74, 61, 65, 61, 52, 83, 51, 68, 61, 65, 45, 59, 49, 97, 47, 73, 53, 63, 59, 84, 64, 73, 59, 65, 78, 57, 58, 67, 68, 81, 65, 72, 63, 77, 70, 67, 61, 71, 49, 95, 70, 104, 82, 49, 41, 67, 66, 73, 59, 115, 45, 131, 81, 74, 59, 78, 78, 65, 55, 57, 119, 57, 62, 61, 76, 110, 71, 84, 60, 60, 69, 67, 64, 72, 68, 65, 75, 58, 65, 66, 66, 76, 61, 58, 42, 58, 63, 92, 62, 57, 82, 55, 54, 74, 63, 61, 63, 73, 55, 58, 91, 49, 58, 69, 59, 54, 78, 69, 54, 66, 47, 59, 81, 69, 55, 107, 58, 73, 63, 69, 61, 53, 82, 85, 61, 60, 65, 85, 60, 64, 59, 64, 62, 74, 71, 63, 73, 51, 62, 69, 52, 57, 56, 60, 52, 70, 76, 53, 58, 67, 96, 69, 66, 73, 62, 85, 73, 62, 64, 70, 52, 56, 60, 54, 85, 55, 70, 60, 45, 56, 70, 61, 82, 61, 50, 87, 58, 82, 90, 64, 69, 90, 62, 63, 64, 66, 54, 49, 68, 44, 68, 61, 68, 47, 59, 81, 61, 57, 59, 76, 72, 86, 47, 84, 52, 56, 122, 57, 59, 57, 57, 71, 54, 77, 85, 66, 69, 70, 63, 57, 104, 64, 58, 67, 64, 68, 69, 53, 58, 64, 74, 68, 54, 62, 62, 61, 61, 113, 52, 55, 67, 59, 76, 67, 54, 60, 114, 79, 67, 61, 65, 55, 68, 69, 52, 59, 84, 72, 45, 71, 78, 74, 59, 51, 79, 79, 68, 65, 81, 59, 60, 66, 63, 70, 54, 78, 62, 55, 63, 84, 57, 57, 72, 100, 72, 55, 57, 68, 53, 62, 51, 96, 51, 73, 62, 62, 62, 50, 78, 60, 63, 68, 55, 62, 61, 41, 59, 63, 55, 64, 47, 61, 53, 70, 45, 75, 54, 69, 66, 74, 67, 51, 85, 70, 50, 49, 65, 67, 62, 82, 58, 74, 52, 64, 76, 78, 50, 89, 65, 55, 62, 70, 59, 71, 72, 63, 74, 53, 78, 58, 67, 60, 47, 59, 68, 75, 54, 78, 52, 62, 94, 50, 75, 77, 68, 48, 75, 81, 59, 74, 55, 67, 68, 65, 80, 72, 97, 52, 45, 68, 51, 80, 62, 61, 69, 76, 76, 71, 64, 102, 62, 72, 58, 57, 59, 71, 68, 63, 45, 71, 99, 41, 92, 61, 68, 68, 57, 60, 72, 54, 107, 59, 70, 93, 73, 68, 77, 87, 55, 66, 47, 80, 70, 60, 69, 85, 72, 67, 54, 68, 57, 50, 65, 43, 60, 63, 131, 54, 73, 56, 75, 59, 66, 71, 53, 55, 54, 67, 45, 101, 60, 65, 53, 66, 76, 60, 73, 59, 67, 71, 68, 55, 110, 65, 56, 71, 88, 60, 69, 56, 45, 89, 69, 51, 51, 58, 65, 61, 70, 69, 60, 55, 61, 65, 64, 60, 68, 78, 64, 48, 56, 68, 58, 59, 79, 47, 74, 70, 66, 72, 93, 66, 82, 62, 61, 52, 98, 107, 65, 102, 93, 62, 49, 59, 59, 65, 70, 68, 43, 78, 53, 56, 54, 63, 59, 52, 69, 66, 66, 65, 62, 61, 63, 54, 72, 56, 79, 45, 68, 47, 48, 73, 72, 70, 51, 61, 56, 58, 62, 70, 62, 48, 107, 54, 53, 60, 65, 63, 55, 65, 85, 52, 82, 61, 84, 65, 71, 121, 65, 94, 55, 70, 54, 57, 53, 88, 51, 63, 72, 88, 70, 80, 64, 60, 60, 55, 52, 82, 68, 80, 84, 61, 57, 57, 51, 64, 60, 58, 49, 66, 52, 83, 59, 60, 79, 69, 65, 59, 58, 50, 67, 55, 62, 64, 64, 57, 69, 59, 69, 76, 71, 76, 63, 75, 55, 62, 82, 78, 61, 68, 59, 81, 48, 75, 78, 47, 71, 53, 82, 61, 70, 60, 77, 57, 60, 69, 75, 76, 68, 62, 64, 69, 62, 84, 77, 88, 59, 62, 60, 70, 64, 68, 55, 61, 62, 56, 54, 61, 54, 53, 54, 57, 70, 70, 68, 72, 79, 81, 69, 51, 92, 73, 65, 86, 63, 52, 59, 82, 58, 70, 50, 85, 42, 64, 80, 69, 66, 65, 61, 63, 64, 50, 56, 58, 61, 78, 62, 78, 77, 73, 67, 81, 57, 75, 62, 75, 68, 61, 56, 75, 56, 61, 67, 60, 68, 50, 45, 55, 64, 49, 60, 58, 76, 68, 115, 53, 62, 41, 83, 82, 66, 57, 59, 58, 75, 66, 76, 55, 83, 58, 46, 47, 66, 50, 54, 60, 46, 77, 58, 66, 70, 78, 68, 73, 52, 90, 52, 52, 100, 89, 58, 61, 57, 63, 70, 65, 57, 67, 60, 83, 81, 52, 54, 61, 51, 71, 59, 60, 65, 53, 59, 61, 92, 61, 70, 72, 64, 63, 80, 80, 71, 57, 83, 65, 85, 73, 62, 77, 63, 67, 55, 71, 55, 50, 52, 78, 67, 72, 92, 55, 90, 67, 66, 62, 65, 49, 75, 61, 46, 78, 50, 89, 67, 70, 75, 48, 60, 63, 76, 85, 64, 42, 60, 64, 56, 54, 59, 57, 77, 73, 64, 52, 62, 89, 68, 55, 64, 59, 66, 63, 55, 60, 57, 64, 55, 64, 56, 54, 63, 52, 71, 48, 72, 58, 64, 59, 60, 65, 47, 53, 58, 59, 100, 61, 74, 70, 68, 58, 69, 50, 82, 100, 75, 61, 71, 44, 45, 57, 60, 58, 70, 57, 64, 53, 106, 78, 65, 61, 44, 56, 55, 80, 70, 61, 67, 61, 76, 61, 61, 57, 57, 54, 74, 128, 66, 61, 66, 52, 77, 74, 93, 50, 64, 65, 52, 51, 87, 69, 57, 69, 46, 82, 68, 51, 59, 64, 61, 127, 71, 72, 64, 61, 69, 78, 77, 88, 66, 64, 59, 106, 53, 52, 67, 62, 71, 79, 68, 82, 62, 53, 61, 61, 65, 71, 65, 86, 68, 62, 59, 82, 66, 64, 54, 88, 105, 57, 46, 68, 71, 50, 84, 69, 75, 78, 53, 56, 75, 56, 62, 65, 67, 57, 61, 66, 66, 87, 61, 54, 60, 60, 47, 77, 68, 60, 56, 57, 59, 57, 59, 69, 61, 77, 66, 83, 43, 55, 70, 57, 77, 62, 62, 62, 54, 60, 60, 56, 46, 56, 42, 62, 60, 49, 84, 71, 57, 59, 70, 76, 73, 74, 71, 45, 75, 43, 59, 45, 52, 93, 63, 50, 63, 75, 62, 44, 66, 52, 68, 57, 48, 76, 68, 68, 58, 56, 50, 112, 56, 67, 49, 55, 62, 58, 64, 68, 108, 67, 63, 58, 70, 58, 62, 102, 66, 55, 64, 60, 55, 67, 62, 43, 80, 49, 61, 65, 99, 70, 59, 82, 69, 96, 62, 95, 47, 72, 100, 73, 67, 84, 66, 83, 76, 54, 78, 86, 73, 73, 69, 56, 100, 53, 67, 81, 57, 62, 67, 63, 49, 65, 69, 63, 65, 60, 77, 62, 65, 65, 62, 70, 60, 52, 65, 69, 70, 73, 61, 68, 58, 52, 58, 61, 61, 53, 52, 42, 55, 53, 53, 58, 69, 76, 62, 74, 62, 75, 67, 79, 47, 68, 69, 50, 66, 95, 77, 61, 68, 66, 72, 74, 74, 75, 54, 63, 58, 65, 49, 56, 51, 107, 61, 63, 51, 56, 73, 43, 62, 58, 64, 49, 77, 65, 74, 46, 67, 76, 74, 42, 53, 64, 62, 72, 56, 64, 57, 69, 62, 59, 76, 45, 62, 82, 42, 55, 78, 80, 70, 58, 68, 54, 69, 79, 80, 74, 66, 62, 51, 45, 65, 45, 56, 55, 55, 60, 63, 62, 101, 69, 67, 65, 70, 77, 71, 51, 39, 81, 59, 67, 70, 57, 102, 66, 54, 90, 63, 62, 109, 60, 63, 52, 58, 82, 46, 78, 78, 63, 68, 75, 78, 85, 73, 96, 61, 65, 53, 87, 73, 90, 80, 61, 58, 87, 63, 62, 68, 105, 67, 64, 53, 92, 70, 65, 57, 60, 62, 85, 53, 49, 48, 83, 88, 69, 62, 53, 57, 71, 84, 65, 60, 54, 78, 52, 48, 85, 64, 87, 52, 83, 66, 62, 52, 61, 68, 63, 61, 80, 74, 70, 53, 61, 62, 89, 75, 74, 72, 57, 75, 72, 59, 74, 65, 65, 55, 54, 58, 63, 53, 73, 63, 48, 48, 82, 63, 97, 64, 52, 49, 74, 70, 68, 61, 62, 66, 85, 73, 54, 68, 63, 56, 59, 68, 70, 65, 62, 67, 68, 44, 58, 80, 66, 54, 63, 70, 62, 60, 61, 69, 77, 84, 65, 48, 75, 66, 63, 70, 60, 49, 65, 78, 62, 64, 64, 68, 78, 63, 78, 53, 61, 56, 58, 71, 65, 57, 59, 53, 75, 113, 70, 99, 49, 72, 43, 45, 73, 61, 41, 48, 65, 57, 65, 105, 59, 51, 53, 81, 68, 54, 69, 59, 58, 72, 95, 74, 56, 76, 58, 95, 93, 81, 56, 54, 56, 88, 56, 96, 59, 58, 71, 46, 46, 47, 66, 60, 65, 72, 54, 47, 60, 52, 65, 65, 63, 77, 61, 75, 64, 60, 82, 60, 69, 55, 87, 63, 50, 72, 81, 65, 54, 81, 54, 67, 42, 58, 45, 79, 56, 77, 59, 54, 63, 61, 54, 68, 53, 75, 57, 65, 61, 61, 69, 72, 67, 45, 102, 73, 51, 65, 56, 53, 57, 49, 79, 88, 105, 60, 71, 72, 76, 70, 67, 70, 67, 69, 69, 79, 84, 71, 89, 59, 107, 79, 48, 63, 118, 70, 59, 63, 49, 104, 70, 96, 61, 83, 105, 64, 77, 56, 53, 117, 64, 63, 60, 61, 72, 64, 46, 56, 62, 81, 56, 61, 64, 57, 122, 70, 99, 55, 112, 57, 73, 76, 63, 40, 75, 62, 59, 67, 64, 58, 47, 64, 72, 56, 90, 55, 83, 53, 56, 69, 65, 68, 52, 71, 73, 57, 71, 60, 68, 64, 69, 81, 63, 66, 59, 60, 58, 47, 89, 66, 48, 76, 72, 63, 60, 124, 63, 59, 63, 83, 71, 61, 58, 80, 70, 61, 63, 67, 70, 60, 58, 55, 65, 67, 57, 71, 49, 51, 61, 65, 58, 63, 70, 43, 52, 63, 60, 58, 81, 61, 77, 66, 97, 73, 75, 80, 65, 62, 51, 69, 57, 58, 65, 74, 64, 69, 78, 63, 77, 69, 47, 57, 55, 71, 68, 64, 86, 62, 62, 78, 94, 48, 65, 61, 73, 72, 51, 72, 58, 58, 49, 76, 61, 50, 80, 62, 74, 56, 54, 58, 54, 52, 59, 82, 72, 62, 69, 63, 82, 85, 43, 53, 65, 68, 60, 72, 66, 58, 49, 65, 51, 46, 52, 52, 60, 59, 62, 59, 52, 59, 78, 102, 70, 47, 68, 85, 69, 67, 64, 61, 60, 64, 58, 56, 74, 59, 58, 99, 62, 65, 62, 57, 61, 70, 61, 63, 75, 57, 60, 69, 52, 42, 68, 67, 68, 42, 72, 64, 73, 85, 70, 77, 96, 71, 55, 73, 59, 59, 74, 107, 69, 64, 64, 62, 72, 66, 54, 78, 58, 75, 55, 56, 80, 78, 77, 65, 64, 65, 71, 61, 62, 103, 44, 53, 61, 66, 68, 50, 64, 62, 61, 55, 53, 41, 78, 79, 77, 105, 53, 70, 69, 65, 52, 71, 55, 54, 61, 66, 95, 66, 61, 71, 39, 77, 66, 83, 69, 66, 65, 81, 60, 62, 72, 59, 64, 63, 50, 65, 44, 60, 74, 55, 47, 53, 60, 68, 67, 87, 67, 71, 60, 87, 60, 72, 63, 61, 49, 52, 89, 59, 81, 66, 45, 76, 65, 70, 68, 83, 45, 50, 56, 70, 47, 54, 81, 74, 58, 61, 40, 62, 63, 104, 64, 96, 64, 64, 67, 73, 69, 75, 68, 68, 55, 102, 80, 66, 63, 69, 92, 61, 65, 76, 73, 77, 73, 66, 113, 83, 56, 86, 57, 57, 61, 55, 67, 65, 67, 57, 87, 77, 49, 59, 61, 83, 70, 65, 58, 71, 58, 62, 76, 97, 92, 96, 63, 60, 51, 46, 69, 70, 60, 86, 75, 61, 53, 89, 90, 100, 72, 62, 81, 72, 89, 57, 61, 76, 71, 61, 61, 67, 80, 62, 70, 48, 61, 78, 64, 79, 69, 68, 88, 62, 67, 56, 60, 78, 82, 64, 63, 58, 68, 52, 64, 51, 69, 69, 76, 39, 77, 53, 69, 83, 54, 73, 74, 65, 67, 49, 68, 55, 77, 59, 84, 77, 56, 56, 53, 71, 79, 62, 60, 74, 85, 54, 53, 61, 36, 50, 50, 66, 67, 63, 61, 55, 70, 68, 64, 62, 66, 70, 48, 64, 59, 65, 65, 69, 80, 62, 58, 49, 66, 69, 51, 55, 47, 58, 57, 65, 68, 41, 78, 67, 44, 89, 66, 67, 88, 57, 71, 58, 98, 58, 69, 69, 51, 61, 71, 56, 61, 49, 57, 137, 75, 66, 83, 59, 93, 56, 108, 56, 58, 71, 66, 57, 58, 73, 63, 59, 54, 63, 68, 61, 59, 50, 77, 80, 53, 71, 77, 69, 58, 81, 74, 56, 59, 74, 80, 84, 70, 63, 53, 54, 55, 59, 77, 108, 64, 68, 65, 55, 57, 62, 83, 64, 62, 62, 64, 69, 68, 69, 52, 81, 79, 68, 70, 62, 65, 56, 61, 62, 67, 59, 81, 62, 64, 65, 75, 71, 83, 57, 60, 56, 65, 63, 69, 61, 59, 58, 60, 58, 75, 33, 68, 43, 60, 66, 59, 85, 77, 58, 55, 74, 85, 61, 52, 72, 72, 58, 72, 55, 69, 60, 76, 67, 86, 46, 74, 61, 57, 69, 50, 55, 59, 77, 67, 69, 67, 53, 65, 68, 70, 58, 56, 62, 60, 53, 78, 71, 65, 63, 51, 68, 73, 76, 98, 67, 72, 124, 74, 66, 53, 72, 56, 68, 63, 74, 61, 67, 72, 60, 60, 76, 91, 62, 72, 59, 78, 84, 60, 62, 100, 64, 65, 65, 61, 62, 82, 56, 77, 78, 68, 70, 56, 56, 52, 62, 45, 63, 83, 58, 64, 89, 72, 63, 71, 66, 75, 57, 63, 87, 72, 87, 62, 66, 82, 61, 48, 62, 88, 47, 65, 76, 65, 74, 53, 70, 68, 56, 86, 56, 59, 54, 64, 47, 97, 66, 36, 65, 55, 64, 61, 68, 48, 60, 69, 66, 54, 56, 51, 73, 60, 94, 67, 58, 62, 59, 62, 63, 62, 96, 65, 67, 60, 77, 87, 57, 62, 57, 67, 63, 61, 69, 66, 52, 49, 71, 50, 61, 69, 76, 61, 81, 70, 59, 63, 56, 75, 56, 82, 68, 72, 120, 67, 88, 72, 55, 61, 68, 69, 76, 70, 52, 51, 62, 78, 97, 52, 81, 67, 60, 48, 77, 56, 37, 70, 58, 46, 58, 55, 80, 61, 51, 60, 63, 56, 65, 76, 53, 51, 41, 67, 58, 70, 60, 83, 74, 50, 75, 79, 47, 78, 54, 107, 58, 66, 64, 52, 65, 61, 64, 67, 60, 60, 62, 52, 65, 49, 65, 74, 59, 51, 129, 61, 69, 63, 70, 74, 49, 71, 102, 61, 60, 63, 58, 73, 82, 56, 38, 52, 60, 59, 60, 54, 69, 49, 82, 94, 60, 60, 55, 57, 71, 93, 53, 89, 66, 60, 63, 64, 67, 74, 59, 61, 64, 52, 80, 67, 71, 65, 59, 57, 62, 59, 58, 49, 64, 56, 62, 71, 53, 61, 60, 105, 80, 49, 67, 62, 58, 67, 67, 38, 61, 64, 127, 82, 62, 59, 65, 89, 76, 63, 61, 56, 53, 65, 74, 49, 64, 69, 59, 66, 54, 61, 42, 61, 68, 64, 55, 71, 84, 83, 82, 68, 69, 77, 89, 59, 46, 88, 76, 75, 56, 58, 62, 62, 58, 61, 60, 77, 50, 72, 77, 70, 79, 67, 66, 57, 81, 73, 60, 55, 67, 59, 87, 89, 63, 59, 53, 82, 67, 64, 79, 74, 87, 66, 66, 76, 77, 63, 52, 60, 59, 57, 86, 59, 75, 57, 64, 120, 56, 84, 72, 101, 64, 55, 51, 77, 69, 78, 73, 87, 83, 76, 75, 66, 62, 55, 69, 75, 68, 76, 75, 53, 89, 64, 72, 53, 63, 63, 69, 73, 66, 70, 59, 59, 60, 64, 55, 68, 68, 68, 56, 79, 57, 72, 62, 54, 63, 65, 67, 57, 72, 58, 59, 54, 64, 70, 76, 53, 51, 55, 56, 64, 71, 51, 58, 58, 71, 64, 87, 56, 56, 63, 64, 74, 62, 54, 62, 80, 76, 69, 58, 68, 56, 56, 57, 61, 70, 58, 80, 90, 68, 61, 68, 56, 69, 72, 61, 71, 77, 66, 65, 53, 67, 93, 62, 82, 107, 56, 48, 54, 47, 65, 73, 59, 51, 57, 69, 75, 77, 62, 63, 89, 65, 46, 64, 60, 76, 70, 89, 62, 67, 66, 67, 70, 72, 59, 61, 100, 67, 64, 73, 74, 54, 70, 55, 54, 58, 62, 58, 79, 89, 63, 74, 54, 64, 66, 76, 60, 75, 60, 74, 85, 89, 57, 85, 63, 62, 76, 62, 58, 61, 72, 65, 76, 71, 74, 64, 71, 63, 65, 53, 74, 69, 66, 54, 62, 66, 71, 67, 61, 57, 61, 63, 71, 76, 92, 47, 72, 95, 86, 72, 67, 67, 54, 84, 53, 74, 89, 48, 59, 49, 58, 91, 64, 40, 95, 62, 55, 58, 64, 59, 60, 68, 52, 52, 57, 75, 70, 45, 100, 75, 73, 55, 59, 51, 87, 52, 85, 58, 54, 62, 52, 50, 65, 72, 72, 73, 76, 87, 72, 89, 68, 78, 54, 66, 55, 65, 56, 63, 49, 64, 70, 63, 54, 75, 62, 60, 60, 70, 71, 57, 47, 59, 54, 73, 52, 59, 63, 72, 71, 67, 55, 72, 67, 45, 59, 66, 71, 118, 49, 74, 86, 85, 59, 66, 69, 69, 61, 75, 60, 57, 87, 67, 74, 67, 89, 56, 56, 69, 70, 58, 56, 73, 71, 78, 86, 56, 58, 57, 60, 63, 75, 50, 77, 72, 55, 56, 49, 63, 64, 51, 79, 62, 57, 55, 95, 91, 70, 64, 65, 64, 76, 81, 52, 68, 62, 65, 48, 77, 86, 98, 51, 97, 82, 68, 70, 64, 74, 60, 45, 89, 63, 66, 71, 49, 56, 115, 57, 55, 66, 67, 75, 68, 57, 66, 67, 60, 69, 59, 65, 75, 84, 71, 67, 81, 67, 55, 59, 56, 74, 60, 55, 57, 63, 62, 71, 73, 66, 68, 68, 102, 86, 67, 39, 59, 86, 54, 58, 66, 56, 59, 63, 66, 63, 63, 50, 52, 59, 54, 65, 86, 57, 64, 52, 49, 63, 60, 62, 74, 65, 62, 71, 71, 70, 72, 75, 64, 62, 67, 69, 70, 63, 55, 79, 63, 78, 63, 56, 65, 69, 127, 52, 78, 77, 76, 73, 67, 72, 64, 65, 53, 61, 60, 46, 52, 62, 80, 81, 88, 65, 73, 53, 90, 72, 58, 64, 64, 60, 64, 51, 75, 70, 56, 37, 58, 105, 58, 81, 52, 53, 77, 62, 52, 68, 56, 73, 71, 52, 66, 75, 64, 77, 65, 90, 62, 95, 82, 76, 64, 60, 64, 47, 67, 51, 126, 54, 64, 55, 74, 58, 61, 96, 60, 75, 65, 61, 58, 61, 61, 83, 62, 73, 62, 52, 81, 61, 79, 71, 58, 85, 55, 75, 51, 56, 79, 62, 48, 72, 75, 54, 86, 71, 88, 60, 72, 61, 68, 61, 80, 58, 43, 79, 64, 62, 75, 63, 63, 64, 58, 59, 68, 74, 86, 76, 71, 69, 54, 69, 56, 68, 70, 55, 43, 66, 59, 84, 62, 54, 68, 62, 59, 62, 45, 57, 78, 61, 61, 64, 58, 56, 54, 75, 72, 60, 59, 78, 95, 77, 74, 62, 76, 70, 98, 60, 82, 70, 64, 60, 81, 60, 53, 70, 52, 55, 55, 71, 60, 83, 54, 74, 53, 42, 68, 48, 62, 60, 69, 50, 84, 69, 69, 58, 73, 59, 64, 50, 51, 64, 53, 64, 73, 62, 66, 59, 86, 67, 71, 67, 54, 73, 75, 73, 58, 67, 71, 78, 68, 62, 70, 77, 51, 69, 54, 60, 81, 68, 67, 55, 67, 60, 69, 69, 61, 59, 104, 51, 51, 85, 47, 87, 66, 97, 72, 44, 74, 56, 45, 67, 76, 80, 76, 95, 60, 61, 69, 84, 74, 69, 70, 58, 47, 60, 73, 55, 63, 87, 62, 72, 55, 95, 48, 81, 53, 100, 73, 91, 69, 65, 65, 82, 58, 72, 70, 105, 60, 53, 75, 97, 48, 82, 58, 66, 62, 59, 46, 61, 71, 76, 67, 45, 47, 64, 54, 66, 91, 48, 77, 57, 54, 57, 61, 79, 59, 78, 78, 57, 56, 55, 64, 39, 46, 75, 59, 68, 63, 68, 83, 57, 68, 60, 66, 68, 53, 72, 57, 64, 58, 57, 56, 79, 71, 61, 65, 68, 66, 82, 57, 71, 60, 51, 62, 85, 68, 52, 64, 66, 38, 69, 55, 72, 77, 83, 55, 63, 60, 76, 53, 65, 53, 46, 87, 83, 81, 51, 69, 45, 89, 45, 66, 48, 98, 57, 47, 57, 62, 81, 58, 86, 57, 67, 63, 55, 73, 72, 72, 101, 64, 70, 56, 57, 65, 73, 73, 62, 77, 73, 61, 88, 77, 72, 55, 65, 35, 69, 128, 52, 77, 73, 66, 61, 84, 61, 48, 72, 58, 65, 40, 67, 111, 74, 60, 64, 41, 58, 55, 70, 78, 52, 82, 113, 58, 71, 73, 63, 51, 91, 88, 97, 84, 99, 35, 62, 53, 65, 69, 64, 55, 71, 44, 68, 70, 65, 66, 68, 62, 65, 61, 54, 57, 56, 52, 46, 81, 57, 69, 87, 77, 57, 58, 67, 62, 69, 44, 62, 57, 58, 115, 68, 67, 80, 66, 81, 53, 46, 68, 46, 54, 64, 51, 57, 104, 68, 59, 84, 60, 51, 72, 38, 64, 62, 52, 62, 61, 76, 62, 77, 63, 89, 43, 101, 73, 99, 56, 80, 55, 71, 52, 92, 67, 64, 98, 60, 58, 83, 75, 67, 50, 53, 57, 80, 83, 76, 66, 56, 70, 47, 92, 57, 73, 29, 61, 105, 61, 62, 58, 51, 100, 58, 64, 92, 57, 63, 63, 65, 96, 61, 54, 59, 70, 48, 66, 119, 66, 54, 90, 110, 48, 71, 56, 87, 60, 101, 57, 117, 60, 56, 64, 81, 69, 55, 44, 49, 64, 78, 71, 69, 59, 64, 60, 67, 76, 85, 61, 64, 60, 57, 52, 83, 75, 80, 77, 52, 54, 77, 52, 71, 63, 53, 69, 47, 54, 75, 109, 70, 53, 66, 61, 64, 58, 53, 69, 55, 59, 58, 81, 74, 50, 62, 51, 62, 54, 93, 81, 57, 60, 69, 57, 54, 52, 52, 84, 56, 56, 77, 65, 51, 65, 55, 86, 83, 63, 72, 76, 70, 55, 42, 62, 56, 58, 56, 58, 57, 61, 59, 69, 73, 59, 78, 61, 61, 74, 70, 67, 50, 83, 76, 57, 96, 58, 63, 56, 79, 62, 52, 59, 73, 69, 69, 63, 59, 53, 48, 60, 63, 59, 70, 71, 56, 52, 76, 74, 67, 68, 79, 72, 71, 94, 91, 82, 60, 59, 48, 84, 77, 85, 63, 65, 56, 54, 57, 66, 64, 77, 70, 57, 51, 84, 72, 66, 72, 56, 54, 69, 48, 96, 86, 74, 57, 66, 74, 63, 67, 79, 73, 25, 45, 68, 56, 53, 51, 68, 61, 75, 55, 65, 53, 82, 65, 62, 79, 62, 66, 52, 64, 62, 81, 57, 52, 60, 79, 42, 73, 66, 60, 63, 64, 48, 75, 50, 80, 47, 62, 73, 42, 63, 68, 55, 58, 88, 57, 68, 59, 45, 81, 71, 60, 57, 73, 98, 80, 71, 52, 65, 55, 55, 94, 66, 73, 42, 60, 50, 73, 89, 52, 77, 73, 54, 74, 100, 85, 53, 63, 80, 56, 65, 71, 68, 57, 54, 57, 75, 64, 62, 80, 66, 48, 89, 60, 55, 55, 68, 80, 59, 77, 102, 62, 57, 90, 78, 76, 56, 85, 72, 64, 57, 57, 61, 93, 69, 46, 78, 50, 54, 49, 54, 66, 58, 78, 65, 62, 57, 81, 67, 72, 58, 51, 59, 91, 55, 85, 85, 61, 67, 51, 62, 64, 89, 72, 83, 71, 66, 59, 62, 67, 57, 55, 88, 47, 66, 81, 108, 58, 68, 131, 76, 55, 61, 67, 84, 50, 49, 78, 81, 43, 102, 71, 45, 88, 56, 71, 53, 67, 66, 59, 69, 71, 49, 61, 51, 55, 56, 65, 44, 66, 63, 91, 42, 82, 90, 68, 54, 49, 72, 32, 100, 67, 75, 53, 63, 63, 58, 50, 72, 74, 50, 59, 75, 65, 61, 76, 73, 76, 45, 62, 51, 72, 52, 76, 63, 63, 61, 63, 65, 44, 67, 61, 71, 57, 57, 59, 58, 66, 70, 75, 57, 94, 67, 56, 72, 58, 49, 48, 51, 64, 45, 64, 61, 50, 60, 82, 68, 69, 55, 49, 60, 66, 63, 75, 46, 42, 88, 94, 55, 67, 69, 69, 76, 61, 81, 63, 65, 43, 69, 75, 44, 63, 51, 53, 76, 64, 59, 76, 56, 52, 52, 54, 99, 64, 62, 60, 58, 59, 97, 66, 69, 59, 61, 70, 63, 70, 50, 79, 74, 79, 58, 62, 52, 63, 50, 80, 62, 68, 77, 78, 54, 71, 72, 68, 55, 93, 46, 51, 55, 74, 53, 59, 50, 64, 116, 75, 70, 58, 67, 68, 69, 65, 60, 85, 68, 50, 61, 87, 76, 56, 66, 66, 72, 85, 47, 58, 74, 67, 68, 71, 46, 84, 70, 69, 58, 55, 64, 62, 63, 67, 77, 57, 90, 68, 67, 77, 59, 41, 88, 57, 52, 42, 65, 58, 64, 58, 70, 61, 76, 59, 56, 52, 58, 66, 72, 63, 66, 79, 59, 58, 56, 55, 46, 61, 72, 58, 73, 51, 48, 53, 62, 70, 54, 63, 51, 45, 64, 75, 95, 74, 66, 64, 62, 64, 74, 84, 80, 80, 62, 64, 52, 93, 63, 60, 91, 82, 67, 62, 58, 70, 54, 75, 65, 64, 66, 66, 46, 105, 56, 66, 74, 69, 59, 56, 48, 79, 51, 46, 61, 46, 54, 46, 66, 59, 70, 57, 51, 66, 58, 72, 63, 64, 43, 69, 93, 73, 96, 71, 70, 57, 32, 60, 77, 82, 58, 57, 65, 74, 48, 57, 72, 54, 70, 91, 66, 68, 74, 56, 41, 61, 81, 70, 69, 83, 95, 47, 52, 55, 72, 79, 66, 59, 62, 79, 71, 49, 68, 80, 81, 85, 47, 82, 61, 62, 113, 81, 68, 69, 76, 82, 53, 59, 74, 47, 57, 85, 57, 71, 52, 59, 73, 74, 75, 72, 67, 87, 50, 68, 62, 93, 64, 70, 74, 80, 50, 74, 55, 99, 59, 65, 61, 67, 69, 61, 62, 86, 67, 95, 83, 50, 68, 55, 52, 56, 58, 53, 82, 59, 68, 64, 68, 93, 61, 75, 72, 52, 67, 59, 70, 73, 62, 74, 96, 81, 62, 69, 62, 54, 57, 73, 85, 73, 47, 77, 41, 80, 71, 49, 85, 69, 88, 55, 61, 63, 50, 73, 80, 49, 76, 55, 69, 97, 72, 61, 61, 76, 61, 63, 56, 73, 37, 68, 47, 68, 64, 69, 68, 73, 70, 54, 73, 69, 86, 55, 61, 71, 113, 76, 64, 66, 104, 56, 71, 55, 71, 53, 76, 75, 76, 75, 69, 67, 52, 55, 46, 94, 66, 65, 59, 61, 67, 83, 56, 69, 62, 70, 78, 51, 68, 84, 59, 59, 81, 60, 96, 65, 68, 57, 74, 28, 67, 61, 70, 62, 53, 80, 70, 68, 106, 71, 59, 80, 56, 58, 64, 95, 62, 69, 41, 103, 49, 75, 73, 75, 54, 102, 78, 66, 79, 110, 97, 72, 91, 64, 52, 71, 67, 49, 77, 62, 85, 64, 71, 57, 63, 71, 70, 64, 70, 64, 56, 70, 53, 58, 67, 56, 54, 49, 76, 88, 61, 80, 66, 82, 64, 56, 78, 49, 83, 96, 104, 64, 60, 61, 69, 60, 60, 86, 57, 100, 77, 75, 72, 67, 55, 65, 56, 51, 70, 61, 58, 61, 79, 68, 72, 75, 77, 76, 50, 65, 73, 80, 72, 71, 55, 48, 82, 78, 67, 62, 48, 68, 73, 90, 58, 73, 56, 57, 61, 48, 59, 86, 74, 69, 79, 55, 78, 72, 57, 77, 82, 103, 49, 50, 52, 57, 64, 75, 59, 72, 63, 38, 38, 56, 80, 76, 75, 69, 71, 71, 71, 107, 73, 82, 63, 78, 59, 79, 94, 71, 52, 41, 48, 85, 74, 47, 79, 63, 58, 58, 75, 54, 67, 64, 88, 59, 56, 61, 84, 76, 59, 61, 72, 70, 74, 55, 36, 76, 56, 61, 87, 89, 71, 50, 79, 72, 58, 67, 75, 69, 62, 56, 116, 62, 71, 58, 49, 76, 74, 42, 61, 54, 80, 55, 81, 31, 67, 48, 65, 66, 73, 35, 56, 59, 50, 77, 59, 88, 62, 55, 61, 61, 103, 59, 48, 69, 73, 80, 66, 75, 58, 78, 89, 62, 63, 85, 73, 58, 56, 37, 73, 50, 71, 44, 58, 55, 73, 69, 58, 72, 60, 77, 96, 92, 80, 58, 76, 91, 91, 58, 61, 42, 59, 46, 48, 56, 72, 70, 56, 63, 60, 67, 64, 60, 67, 54, 72, 68, 52, 53, 62, 47, 62, 85, 54, 69, 88, 64, 75, 70, 91, 53, 74, 62, 73, 67, 69, 52, 62, 66, 63, 78, 60, 75, 67, 49, 57, 69, 68, 46, 67, 104, 84, 63, 34, 69, 77, 72, 35, 72, 39, 109, 72, 54, 77, 46, 64, 71, 67, 54, 79, 60, 44, 50, 90, 62, 62, 37, 57, 52, 52, 48, 56, 43, 48, 71, 79, 54, 66, 89, 45, 68, 84, 56, 63, 46, 42, 72, 64, 60, 62, 80, 58, 97, 74, 66, 53, 57, 52, 69, 64, 66, 52, 83, 61, 83, 82, 52, 109, 74, 61, 65, 60, 64, 59, 66, 57, 61, 40, 58, 60, 58, 59, 58, 94, 72, 49, 68, 89, 53, 62, 48, 81, 51, 76, 56, 87, 68, 43, 47, 82, 55, 65, 60, 57, 55, 102, 64, 75, 58, 71, 71, 74, 53, 70, 64, 62, 64, 74, 42, 68, 55, 61, 53, 42, 59, 67, 43, 72, 51, 68, 64, 55, 69, 62, 61, 62, 81, 77, 69, 83, 71, 57, 71, 84, 86, 86, 70, 44, 53, 53, 65, 60, 55, 63, 69, 87, 64, 82, 60, 70, 41, 68, 73, 53, 49, 65, 80, 69, 89, 80, 70, 60, 54, 56, 89, 69, 77, 50, 59, 76, 57, 77, 129, 88, 48, 74, 77, 59, 48, 49, 64, 67, 71, 47, 73, 53, 80, 73, 63, 60, 93, 64, 39, 75, 59, 42, 57, 81, 68, 47, 65, 55, 62, 50, 62, 58, 61, 65, 55, 58, 70, 86, 75, 64, 55, 69, 60, 72, 80, 73, 81, 49, 86, 73, 53, 50, 76, 52, 62, 54, 54, 67, 61, 110, 69, 68, 47, 55, 62, 64, 47, 51, 68, 52, 82, 76, 71, 75, 46, 54, 59, 62, 60, 89, 44, 57, 62, 81, 37, 59, 75, 52, 52, 45, 71, 77, 78, 60, 56, 76, 85, 70, 69, 71, 74, 61, 72, 69, 69, 61, 54, 86, 106, 73, 57, 58, 60, 49, 49, 62, 48, 48, 55, 66, 73, 95, 75, 60, 83, 72, 66, 80, 48, 62, 67, 72, 92, 60, 47, 64, 79, 83, 70, 54, 43, 69, 44, 63, 69, 63, 46, 44, 94, 63, 42, 91, 49, 68, 66, 52, 66, 53, 71, 67, 59, 74, 66, 116, 50, 75, 67, 68, 52, 58, 81, 53, 56, 80, 60, 60, 84, 59, 69, 66, 46, 67, 66, 56, 72, 71, 57, 68, 69, 65, 96, 59, 53, 79, 59, 59, 51, 53, 79, 102, 38, 62, 85, 70, 58, 60, 60, 95, 49, 64, 47, 57, 62, 68, 79, 58, 74, 85, 56, 67, 72, 56, 52, 57, 73, 45, 45, 50, 67, 50, 77, 64, 50, 59, 44, 65, 83, 70, 69, 102, 57, 55, 91, 83, 50, 85, 50, 75, 64, 62, 51, 75, 83, 60, 79, 65, 76, 53, 60, 48, 108, 77, 56, 88, 69, 52, 65, 64, 48, 52, 79, 74, 58, 59, 65, 86, 73, 74, 70, 77, 63, 54, 54, 67, 65, 56, 53, 49, 69, 55, 69, 73, 60, 59, 54, 60, 70, 55, 77, 87, 62, 87, 78, 94, 53, 69, 52, 43, 64, 50, 59, 61, 76, 60, 74, 50, 52, 72, 59, 70, 69, 66, 61, 77, 57, 82, 60, 70, 60, 62, 70, 63, 45, 79, 69, 66, 62, 60, 100, 57, 69, 52, 59, 88, 66, 73, 53, 59, 55, 58, 53, 63, 70, 111, 69, 63, 91, 38, 78, 67, 62, 76, 65, 56, 75, 50, 64, 69, 62, 66, 105, 66, 75, 96, 80, 48, 85, 50, 85, 54, 55, 52, 97, 67, 66, 89, 90, 63, 44, 79, 81, 64, 69, 63, 77, 73, 84, 81, 65, 58, 54, 62, 55, 59, 56, 66, 53, 66, 50, 53, 70, 69, 92, 61, 56, 59, 53, 76, 62, 65, 78, 40, 107, 46, 50, 62, 56, 61, 70, 68, 110, 59, 56, 76, 58, 57, 52, 77, 63, 75, 57, 76, 57, 72, 63, 95, 63, 70, 51, 71, 86, 68, 59, 71, 66, 66, 54, 54, 48, 86, 77, 63, 81, 49, 62, 57, 63, 61, 49, 63, 82, 63, 53, 79, 67, 44, 69, 63, 62, 58, 74, 66, 49, 67, 46, 74, 95, 65, 58, 57, 58, 62, 49, 69, 83, 52, 74, 43, 53, 60, 77, 53, 75, 57, 74, 62, 52, 47, 80, 65, 62, 58, 52, 79, 79, 77, 64, 57, 88, 71, 77, 83, 86, 60, 77, 63, 66, 107, 51, 73, 88, 62, 63, 50, 75, 58, 77, 50, 97, 68, 65, 101, 60, 70, 88, 62, 52, 44, 103, 92, 65, 64, 70, 59, 60, 57, 63, 72, 66, 78, 66, 56, 54, 68, 60, 64, 62, 76, 59, 52, 74, 51, 65, 72, 64, 62, 48, 63, 80, 53, 50, 60, 72, 58, 45, 66, 59, 36, 66, 68, 68, 54, 61, 63, 61, 46, 67, 64, 90, 90, 71, 72, 59, 78, 66, 70, 67, 69, 62, 66, 68, 67, 55, 48, 76, 50, 90, 30, 72, 55, 90, 66, 79, 49, 57, 86, 63, 70, 63, 59, 71, 62, 92, 61, 65, 51, 63, 49, 54, 59, 97, 67, 87, 77, 70, 82, 76, 58, 63, 59, 87, 63, 52, 50, 52, 69, 56, 84, 70, 46, 44, 64, 61, 73, 73, 53, 62, 50, 58, 58, 115, 59, 85, 53, 62, 72, 64, 89, 73, 65, 47, 59, 79, 76, 64, 67, 47, 62, 68, 51, 63, 61, 70, 64, 57, 57, 57, 86, 61, 53, 70, 61, 69, 55, 69, 69, 51, 56, 62, 73, 79, 64, 79, 86, 78, 80, 90, 54, 51, 51, 59, 95, 60, 71, 73, 77, 77, 67, 46, 51, 69, 78, 57, 54, 71, 53, 49, 67, 74, 72, 75, 52, 81, 67, 81, 68, 73, 62, 72, 81, 56, 95, 58, 52, 65, 80, 86, 49, 74, 58, 102, 70, 68, 72, 75, 75, 80, 55, 45, 58, 51, 56, 63, 63, 60, 76, 74, 41, 65, 52, 86, 72, 85, 82, 90, 57, 89, 60, 80, 69, 45, 52, 64, 64, 75, 73, 91, 64, 55, 66, 93, 68, 67, 62, 63, 62, 61, 62, 61, 82, 60, 50, 82, 66, 80, 63, 96, 51, 60, 79, 73, 86, 52, 83, 67, 57, 61, 55, 49, 59, 62, 64, 89, 76, 68, 52, 64, 50, 48, 70, 76, 83, 73, 65, 57, 58, 43, 59, 68, 45, 120, 97, 63, 96, 93, 54, 61, 67, 59, 86, 56, 55, 62, 77, 55, 65, 51, 60, 68, 85, 75, 73, 62, 53, 67, 86, 62, 68, 108, 52, 61, 76, 54, 53, 61, 68, 97, 65, 63, 66, 63, 56, 48, 47, 64, 63, 55, 62, 63, 55, 67, 61, 77, 46, 70, 55, 61, 117, 58, 56, 64, 58, 72, 69, 61, 50, 77, 68, 71, 60, 55, 61, 70, 95, 54, 71, 60, 65, 60, 67, 61, 57, 59, 97, 65, 69, 60, 51, 62, 69, 72, 76, 55, 58, 55, 77, 60, 77, 70, 46, 86, 76, 79, 83, 52, 72, 74, 90, 55, 59, 53, 73, 54, 51, 69, 67, 65, 87, 78, 50, 68, 61, 76, 84, 68, 76, 70, 64, 80, 55, 65, 61, 51, 68, 69, 50, 62, 55, 65, 47, 51, 77, 52, 57, 81, 67, 50, 98, 60, 84, 50, 78, 46, 59, 38, 77, 90, 59, 107, 55, 91, 42, 64, 45, 71, 68, 66, 57, 66, 54, 62, 66, 114, 67, 66, 75, 77, 56, 64, 65, 72, 78, 55, 81, 54, 67, 95, 43, 58, 59, 87, 64, 65, 69, 64, 65, 80, 69, 65, 47, 68, 51, 54, 55, 66, 63, 52, 60, 70, 74, 84, 62, 92, 58, 71, 76, 54, 69, 45, 68, 78, 42, 50, 65, 69, 60, 68, 52, 66, 47, 56, 67, 52, 84, 56, 45, 65, 51, 59, 57, 73, 67, 68, 49, 86, 61, 73, 79, 52, 50, 63, 72, 71, 65, 72, 59, 73, 88, 58, 77, 47, 49, 47, 69, 68, 58, 79, 50, 68, 63, 58, 105, 60, 58, 95, 53, 92, 75, 72, 81, 63, 51, 68, 60, 44, 61, 66, 66, 53, 43, 93, 43, 68, 68, 61, 52, 61, 65, 77, 59, 69, 57, 65, 65, 61, 52, 84, 91, 65, 56, 55, 70, 69, 57, 50, 68, 76, 53, 44, 63, 61, 59, 100, 68, 63, 67, 66, 67, 59, 53, 57, 61, 55, 60, 47, 46, 57, 127, 72, 62, 61, 82, 80, 43, 82, 63, 52, 65, 82, 70, 69, 72, 67, 56, 70, 58, 69, 99, 54, 62, 67, 52, 53, 77, 56, 57, 56, 93, 66, 61, 50, 67, 57, 49, 64, 73, 72, 70, 71, 55, 62, 99, 66, 48, 61, 62, 60, 52, 67, 54, 61, 59, 69, 73, 60, 62, 65, 59, 72, 65, 54, 48, 77, 66, 69, 55, 70, 53, 50, 57, 64, 57, 67, 65, 64, 64, 75, 87, 73, 55, 67, 54, 69, 80, 64, 73, 62, 44, 69, 67, 76, 55, 53, 58, 57, 59, 89, 43, 103, 59, 57, 61, 57, 54, 55, 60, 56, 65, 52, 55, 93, 78, 58, 76, 71, 65, 54, 67, 60, 69, 61, 63, 76, 61, 60, 72, 81, 78, 91, 59, 53, 53, 51, 53, 76, 69, 52, 85, 62, 47, 60, 90, 66, 65, 60, 64, 77, 65, 58, 67, 56, 64, 60, 79, 63, 48, 80, 59, 64, 63, 65, 68, 52, 54, 50, 65, 75, 71, 45, 66, 58, 67, 78, 58, 76, 93, 53, 78, 68, 81, 64, 90, 74, 71, 61, 55, 65, 81, 64, 67, 55, 60, 89, 64, 70, 51, 67, 70, 75, 56, 78, 52, 44, 71, 58, 60, 64, 82, 58, 58, 67, 45, 84, 70, 83, 61, 74, 58, 69, 70, 67, 61, 80, 53, 57, 56, 60, 53, 87, 64, 85, 74, 63, 63, 63, 70, 54, 54, 88, 72, 75, 61, 68, 64, 67, 55, 44, 65, 59, 64, 85, 62, 72, 65, 73, 64, 59, 69, 68, 86, 75, 55, 76, 61, 73, 111, 61, 60, 87, 63, 43, 68, 68, 70, 52, 57, 56, 65, 82, 52, 67, 73, 65, 79, 102, 77, 71, 56, 50, 55, 60, 61, 59, 80, 64, 55, 63, 46, 54, 72, 98, 53, 62, 84, 66, 46, 65, 91, 58, 70, 67, 69, 65, 61, 59, 72, 85, 70, 66, 69, 67, 54, 66, 77, 58, 72, 43, 47, 79, 70, 66, 52, 59, 55, 56, 57, 72, 61, 63, 68, 62, 58, 63, 56, 62, 50, 62, 114, 61, 71, 65, 61, 66, 85, 62, 66, 54, 60, 82, 60, 54, 113, 67, 64, 65, 70, 53, 59, 59, 58, 68, 51, 108, 53, 84, 68, 67, 52, 100, 64, 63, 76, 123, 108, 56, 49, 81, 67, 64, 58, 51, 58, 64, 65, 49, 66, 78, 46, 53, 59, 65, 47, 66, 49, 30, 65, 61, 72, 48, 76, 70, 76, 56, 56, 61, 60, 75, 48, 63, 55, 57, 72, 57, 91, 65, 69, 84, 57, 60, 64, 55, 68, 89, 55, 40, 45, 80, 60, 72, 50, 67, 64, 65, 66, 74, 74, 47, 47, 79, 55, 86, 59, 68, 57, 96, 66, 61, 66, 80, 61, 63, 74, 66, 59, 72, 61, 79, 54, 54, 63, 73, 65, 55, 55, 89, 52, 71, 72, 53, 103, 67, 67, 62, 79, 70, 59, 64, 68, 58, 69, 62, 68, 69, 67, 53, 58, 61, 57, 62, 47, 72, 81, 56, 71, 99, 50, 109, 112, 53, 87, 64, 77, 74, 68, 62, 65, 62, 68, 52, 61, 56, 65, 77, 59, 53, 68, 66, 67, 61, 72, 63, 51, 62, 56, 72, 55, 68, 66, 70, 58, 59, 66, 66, 65, 78, 75, 48, 77, 47, 58, 71, 63, 55, 56, 74, 57, 68, 51, 74, 67, 63, 42, 69, 54, 74, 61, 49, 69, 52, 61, 70, 59, 73, 54, 91, 60, 65, 70, 76, 70, 80, 70, 78, 61, 63, 74, 54, 79, 65, 76, 91, 61, 82, 57, 101, 63, 65, 79, 80, 67, 60, 55, 72, 55, 63, 74, 55, 66, 58, 67, 71, 58, 63, 68, 73, 74, 67, 60, 65, 49, 81, 75, 71, 67, 67, 75, 75, 46, 72, 67, 73, 68, 70, 66, 73, 79, 62, 65, 74, 53, 70, 79, 72, 53, 53, 50, 73, 49, 86, 62, 66, 74, 67, 65, 51, 84, 48, 43, 62, 62, 75, 65, 59, 63, 76, 62, 61, 84, 65, 78, 74, 120, 56, 50, 94, 93, 72, 68, 61, 60, 68, 75, 47, 63, 100, 105, 63, 70, 33, 49, 61, 76, 56, 45, 69, 79, 60, 70, 69, 64, 64, 60, 60, 63, 52, 57, 63, 68, 51, 56, 57, 57, 74, 53, 65, 79, 40, 67, 55, 66, 77, 60, 52, 83, 70, 47, 67, 85, 54, 72, 58, 112, 67, 78, 62, 64, 81, 56, 85, 67, 56, 108, 80, 72, 64, 57, 72, 68, 91, 58, 67, 57, 71, 80, 51, 70, 64, 41, 92, 66, 55, 58, 80, 46, 63, 71, 52, 87, 61, 53, 59, 65, 51, 63, 57, 61, 47, 54, 56, 80, 53, 68, 58, 68, 94, 71, 50, 74, 58, 50, 64, 56, 76, 62, 72, 66, 50, 53, 70, 52, 75, 72, 67, 58, 53, 63, 59, 72, 64, 52, 71, 68, 92, 85, 70, 50, 59, 58, 49, 65, 41, 60, 56, 56, 54, 47, 54, 44, 55, 92, 66, 65, 91, 59, 58, 60, 52, 88, 57, 58, 79, 45, 90, 66, 51, 71, 71, 68, 65, 70, 58, 54, 74, 57, 60, 78, 120, 79, 59, 85, 62, 68, 74, 51, 70, 71, 79, 67, 74, 72, 59, 64, 68, 71, 76, 96, 56, 69, 70, 62, 65, 66, 91, 71, 60, 61, 63, 67, 63, 78, 53, 71, 68, 61, 106, 52, 108, 60, 79, 59, 58, 70, 49, 52, 62, 58, 64, 67, 72, 63, 65, 62, 84, 61, 50, 56, 57, 92, 77, 84, 73, 69, 62, 59, 79, 68, 47, 70, 35, 57, 67, 68, 82, 75, 72, 63, 63, 69, 62, 60, 94, 80, 57, 60, 58, 62, 71, 71, 57, 77, 55, 82, 57, 67, 67, 71, 66, 62, 67, 67, 74, 73, 60, 75, 71, 88, 64, 61, 56, 57, 51, 57, 66, 54, 69, 76, 56, 66, 56, 76, 84, 67, 69, 65, 54, 59, 68, 55, 75, 75, 67, 66, 60, 60, 71, 56, 71, 75, 49, 47, 97, 56, 48, 52, 54, 60, 91, 104, 62, 52, 83, 61, 90, 50, 54, 63, 69, 66, 57, 49, 72, 142, 46, 63, 58, 59, 68, 59, 58, 63, 76, 58, 62, 76, 67, 66, 58, 70, 60, 64, 67, 55, 72, 69, 75, 82, 58, 60, 115, 62, 57, 75, 57, 56, 75, 66, 81, 62, 66, 77, 52, 97, 55, 67, 66, 80, 59, 65, 102, 82, 58, 105, 61, 70, 71, 58, 107, 69, 66, 51, 78, 71, 55, 80, 75, 60, 66, 58, 78, 72, 63, 63, 70, 67, 68, 59, 73, 81, 65, 49, 97, 74, 48, 70, 65, 60, 69, 48, 45, 56, 54, 70, 77, 62, 60, 60, 80, 53, 65, 52, 69, 57, 93, 54, 57, 80, 70, 58, 59, 61, 58, 63, 63, 66, 77, 40, 54, 70, 49, 46, 57, 89, 66, 72, 56, 69, 53, 52, 58, 63, 63, 78, 71, 58, 66, 63, 58, 60, 75, 68, 57, 65, 74, 80, 80, 64, 55, 102, 68, 55, 91, 60, 71, 61, 66, 74, 64, 65, 68, 56, 60, 102, 70, 69, 61, 61, 54, 57, 65, 67, 59, 95, 58, 53, 87, 61, 74, 57, 79, 67, 42, 68, 58, 67, 38, 75, 71, 100, 66, 84, 73, 67, 84, 66, 75, 69, 73, 66, 68, 61, 57, 49, 49, 59, 73, 51, 93, 47, 59, 61, 74, 77, 87, 62, 81, 63, 58, 71, 81, 55, 50, 60, 57, 64, 106, 61, 69, 58, 47, 55, 65, 62, 69, 69, 78, 66, 65, 82, 43, 88, 51, 69, 67, 61, 62, 58, 59, 54, 96, 61, 67, 31, 55, 68, 70, 53, 66, 54, 77, 62, 65, 58, 70, 74, 53, 44, 67, 70, 75, 64, 56, 60, 74, 51, 64, 107, 63, 71, 99, 85, 62, 46, 61, 86, 65, 63, 56, 52, 63, 90, 68, 73, 61, 55, 80, 57, 67, 69, 86, 64, 67, 71, 63, 65, 77, 70, 43, 68, 56, 80, 63, 69, 65, 71, 62, 58, 89, 49, 66, 62, 73, 58, 63, 62, 88, 61, 85, 61, 53, 61, 71, 75, 113, 71, 68, 52, 65, 57, 83, 63, 52, 76, 72, 65, 59, 52, 65, 42, 60, 65, 53, 142, 70, 55, 55, 78, 65, 85, 57, 58, 64, 65, 53, 62, 51, 80, 95, 95, 76, 65, 59, 69, 59, 62, 54, 93, 124, 71, 69, 56, 68, 77, 52, 75, 124, 73, 95, 63, 79, 79, 67, 63, 56, 67, 79, 41, 52, 49, 52, 71, 60, 69, 49, 107, 59, 62, 58, 65, 56, 78, 50, 89, 87, 60, 53, 61, 47, 54, 49, 91, 67, 59, 54, 70, 77, 62, 87, 76, 67, 55, 68, 69, 78, 55, 50, 96, 104, 80, 66, 54, 60, 95, 56, 60, 57, 61, 57, 61, 66, 62, 88, 74, 77, 68, 82, 53, 64, 59, 81, 48, 84, 62, 60, 76, 54, 69, 50, 71, 66, 71, 55, 61, 69, 72, 62, 67, 65, 61, 60, 73, 53, 61, 62, 58, 53, 74, 59, 83, 80, 63, 55, 73, 64, 90, 66, 61, 76, 54, 60, 54, 51, 82, 76, 66, 77, 72, 79, 73, 112, 63, 89, 81, 75, 47, 61, 81, 58, 79, 56, 79, 61, 74, 92, 44, 58, 73, 59, 59, 58, 55, 53, 84, 67, 58, 59, 72, 46, 64, 64, 61, 84, 78, 57, 55, 97, 86, 70, 60, 69, 53, 61, 84, 76, 54, 54, 63, 78, 60, 51, 69, 80, 62, 54, 75, 48, 76, 70, 82, 78, 45, 52, 50, 63, 55, 61, 67, 70, 74, 70, 52, 73, 67, 55, 58, 71, 70, 56, 72, 60, 79, 62, 45, 77, 53, 77, 65, 72, 57, 75, 71, 63, 61, 77, 59, 56, 82, 64, 66, 52, 57, 62, 57, 56, 79, 53, 54, 77, 100, 59, 59, 55, 83, 83, 61, 83, 64, 60, 39, 60, 62, 58, 61, 51, 55, 65, 58, 69, 64, 66, 66, 70, 74, 51, 73, 79, 61, 64, 57, 85, 57, 52, 50, 67, 54, 83, 63, 80, 51, 49, 78, 104, 70, 60, 73, 75, 62, 53, 77, 71, 52, 60, 59, 65, 85, 65, 81, 61, 57, 57, 64, 72, 65, 51, 52, 51, 47, 58, 57, 78, 93, 51, 73, 75, 70, 58, 52, 48, 80, 61, 58, 63, 59, 67, 60, 76, 74, 60, 66, 52, 54, 74, 61, 62, 72, 84, 64, 73, 66, 64, 67, 53, 67, 73, 69, 69, 69, 78, 63, 64, 69, 59, 67, 65, 58, 65, 71, 76, 63, 60, 78, 54, 72, 78, 63, 70, 72, 75, 74, 60, 76, 58, 53, 72, 77, 65, 61, 92, 77, 53, 114, 74, 54, 80, 64, 61, 71, 70, 67, 56, 62, 59, 77, 60, 44, 52, 53, 54, 70, 67, 75, 57, 75, 62, 66, 66, 54, 79, 90, 66, 74, 60, 43, 58, 53, 71, 91, 60, 83, 68, 71, 56, 69, 60, 72, 63, 60, 49, 63, 65, 51, 60, 49, 60, 65, 104, 69, 65, 74, 79, 61, 83, 81, 62, 75, 70, 50, 77, 58, 51, 70, 63, 63, 76, 80, 63, 55, 48, 69, 91, 48, 43, 62, 60, 60, 87, 60, 74, 110, 77, 55, 49, 67, 56, 58, 57, 49, 67, 62, 69, 63, 77, 72, 61, 80, 79, 87, 58, 52, 59, 50, 70, 74, 57, 60, 71, 63, 60, 78, 55, 73, 70, 56, 58, 55, 85, 53, 65, 62, 59, 56, 63, 60, 68, 59, 61, 81, 67, 57, 54, 79, 89, 62, 61, 51, 64, 69, 74, 61, 71, 69, 51, 79, 53, 66, 75, 71, 81, 60, 50, 65, 86, 64, 50, 70, 79, 57, 63, 66, 95, 65, 60, 50, 79, 65, 64, 82, 58, 72, 58, 66, 69, 74, 82, 64, 74, 68, 76, 53, 71, 68, 75, 55, 66, 67, 55, 56, 59, 64, 71, 60, 65, 84, 73, 64, 82, 59, 54, 61, 60, 62, 56, 77, 54, 60, 58, 53, 46, 70, 92, 61, 65, 65, 54, 67, 70, 63, 74, 54, 58, 74, 60, 60, 67, 54, 65, 47, 48, 59, 82, 68, 56, 35, 61, 37, 71, 62, 63, 80, 59, 55, 84, 70, 76, 64, 89, 60, 79, 47, 75, 64, 69, 62, 55, 71, 51, 143, 66, 65, 61, 93, 90, 63, 91, 66, 55, 93, 64, 51, 62, 56, 58, 88, 64, 74, 51, 55, 49, 56, 67, 73, 77, 45, 66, 65, 65, 73, 75, 57, 69, 64, 64, 64, 75, 56, 41, 87, 67, 58, 66, 82, 77, 70, 90, 59, 55, 56, 74, 92, 47, 56, 79, 68, 47, 63, 75, 67, 59, 68, 75, 72, 58, 62, 72, 43, 56, 59, 76, 75, 73, 73, 75, 46, 80, 50, 77, 65, 51, 60, 54, 41, 71, 76, 75, 69, 53, 62, 60, 69, 76, 91, 61, 85, 79, 79, 51, 87, 84, 62, 72, 104, 71, 68, 77, 58, 58, 71, 59, 43, 76, 71, 49, 57, 65, 60, 51, 51, 68, 58, 62, 75, 64, 55, 83, 57, 61, 59, 57, 63, 55, 35, 44, 53, 69, 44, 72, 111, 69, 61, 45, 71, 76, 98, 75, 56, 55, 66, 67, 56, 48, 66, 57, 58, 60, 71, 67, 75, 59, 62, 82, 56, 61, 63, 58, 77, 69, 107, 75, 75, 74, 58, 71, 64, 83, 62, 72, 104, 75, 50, 58, 85, 50, 106, 106, 78, 69, 58, 76, 77, 69, 66, 72, 53, 76, 51, 60, 57, 55, 55, 74, 58, 99, 69, 100, 69, 87, 50, 103, 47, 73, 49, 78, 55, 80, 72, 100, 45, 50, 51, 113, 64, 62, 58, 58, 61, 71, 62, 88, 57, 83, 68, 64, 59, 72, 111, 61, 81, 109, 57, 45, 38, 61, 62, 84, 59, 53, 49, 64, 70, 39, 44, 67, 63, 58, 94, 81, 78, 59, 78, 58, 66, 55, 70, 99, 74, 56, 68, 73, 72, 73, 105, 53, 50, 79, 53, 63, 86, 61, 52, 65, 64, 67, 70, 82, 71, 65, 58, 67, 66, 59, 78, 80, 65, 78, 63, 76, 64, 50, 62, 63, 45, 57, 66, 79, 64, 62, 52, 76, 83, 56, 49, 63, 58, 55, 46, 63, 73, 67, 60, 59, 71, 77, 56, 56, 53, 89, 83, 113, 58, 58, 64, 64, 54, 78, 56, 61, 61, 68, 50, 35, 43, 65, 58, 52, 52, 49, 127, 73, 79, 49, 76, 65, 96, 44, 48, 59, 50, 74, 64, 47, 85, 55, 81, 53, 68, 58, 61, 60, 63, 64, 75, 119, 50, 70, 55, 78, 81, 85, 61, 96, 64, 102, 94, 54, 58, 71, 64, 72, 74, 54, 49, 63, 64, 85, 55, 58, 64, 66, 88, 56, 64, 61, 57, 55, 81, 66, 68, 84, 47, 70, 49, 81, 60, 52, 66, 50, 55, 51, 68, 50, 46, 106, 38, 65, 59, 76, 78, 60, 45, 39, 66, 86, 60, 58, 72, 63, 96, 82, 50, 81, 63, 85, 70, 59, 78, 78, 76, 49, 60, 83, 113, 60, 68, 85, 61, 99, 63, 42, 65, 61, 68, 63, 79, 77, 77, 72, 64, 70, 53, 57, 52, 61, 63, 127, 64, 60, 46, 58, 80, 67, 55, 61, 93, 53, 65, 56, 86, 58, 109, 71, 67, 77, 52, 57, 48, 60, 84, 47, 63, 59, 77, 58, 64, 123, 52, 98, 52, 115, 56, 64, 65, 70, 71, 41, 69, 93, 74, 72, 85, 75, 50, 68, 62, 53, 68, 80, 57, 57, 75, 44, 57, 65, 51, 67, 43, 56, 45, 60, 51, 73, 85, 48, 59, 88, 49, 79, 81, 78, 74, 63, 56, 57, 53, 65, 86, 84, 90, 46, 74, 77, 68, 57, 92, 61, 50, 64, 62, 65, 60, 48, 61, 64, 56, 47, 77, 40, 56, 65, 61, 59, 77, 55, 58, 75, 78, 47, 54, 49, 69, 75, 64, 61, 72, 90, 92, 76, 60, 48, 79, 63, 66, 54, 51, 72, 74, 69, 72, 73, 69, 88, 65, 81, 110, 79, 89, 81, 72, 83, 57, 93, 64, 55, 42, 55, 40, 61, 57, 71, 86, 52, 62, 58, 78, 71, 54, 54, 67, 72, 67, 75, 57, 79, 82, 87, 58, 56, 67, 66, 66, 76, 72, 69, 63, 38, 67, 60, 78, 47, 81, 48, 68, 59, 64, 70, 56, 70, 78, 55, 76, 80, 84, 75, 61, 57, 76, 63, 60, 60, 45, 54, 83, 71, 74, 89, 85, 65, 87, 88, 74, 81, 73, 56, 82, 63, 71, 69, 60, 62, 88, 44, 50, 77, 52, 76, 60, 36, 78, 66, 53, 87, 77, 69, 83, 69, 73, 52, 66, 58, 71, 82, 46, 69, 68, 67, 64, 73, 71, 89, 48, 52, 55, 74, 81, 59, 72, 49, 73, 51, 62, 41, 94, 62, 47, 61, 60, 69, 65, 70, 69, 56, 51, 52, 43, 73, 76, 60, 78, 55, 59, 56, 66, 71, 75, 65, 71, 53, 68, 68, 52, 68, 67, 42, 53, 47, 68, 65, 78, 44, 57, 56, 84, 72, 55, 57, 82, 70, 59, 49, 61, 48, 89, 70, 66, 62, 82, 46, 52, 71, 68, 61, 53, 57, 68, 66, 54, 45, 44, 58, 58, 67, 53, 65, 69, 77, 69, 63, 61, 44, 71, 88, 61, 70, 52, 63, 68, 62, 60, 91, 58, 68, 85, 63, 56, 35, 64, 68, 50, 63, 66, 57, 80, 75, 96, 75, 46, 66, 71, 76, 71, 70, 61, 73, 54, 64, 60, 88, 65, 62, 100, 67, 65, 83, 46, 61, 58, 73, 53, 72, 61, 85, 85, 62, 92, 71, 61, 70, 56, 61, 66, 62, 61, 73, 56, 69, 63, 61, 54, 78, 68, 62, 74, 56, 72, 53, 77, 95, 59, 72, 57, 59, 69, 81, 66, 82, 64, 49, 96, 50, 67, 60, 43, 65, 51, 67, 60, 69, 63, 46, 60, 91, 69, 73, 61, 84, 58, 87, 73, 47, 80, 82, 64, 38, 60, 55, 44, 60, 60, 44, 69, 105, 87, 55, 48, 60, 66, 62, 67, 65, 67, 68, 55, 55, 59, 60, 61, 80, 71, 72, 37, 86, 73, 57, 63, 64, 72, 47, 53, 42, 55, 52, 73, 67, 51, 51, 63, 72, 75, 59, 67, 55, 72, 59, 63, 50, 89, 51, 63, 72, 86, 64, 55, 65, 65, 77, 63, 61, 63, 45, 69, 96, 59, 62, 66, 62, 69, 75, 75, 50, 89, 79, 51, 86, 78, 67, 71, 65, 62, 60, 57, 56, 132, 59, 63, 70, 63, 101, 61, 93, 71, 58, 64, 69, 72, 71, 62, 65, 59, 61, 63, 65, 52, 54, 70, 62, 74, 57, 54, 73, 66, 67, 38, 71, 71, 52, 61, 43, 66, 70, 83, 57, 114, 62, 76, 58, 94, 56, 64, 60, 74, 59, 92, 88, 70, 104, 64, 91, 58, 64, 58, 69, 64, 70, 58, 112, 57, 73, 56, 48, 61, 50, 59, 83, 61, 74, 58, 56, 61, 84, 77, 63, 63, 61, 62, 70, 56, 53, 64, 61, 63, 59, 60, 57, 51, 72, 71, 78, 66, 72, 51, 71, 65, 61, 73, 73, 98, 65, 53, 69, 70, 45, 52, 63, 69, 76, 56, 56, 53, 55, 77, 59, 43, 68, 63, 72, 72, 80, 77, 55, 50, 58, 89, 57, 66, 46, 67, 57, 60, 61, 63, 65, 85, 63, 83, 63, 89, 62, 102, 77, 54, 64, 76, 61, 58, 89, 59, 46, 61, 61, 58, 56, 62, 70, 51, 68, 66, 68, 69, 74, 66, 67, 115, 56, 60, 60, 75, 63, 63, 67, 61, 79, 75, 56, 62, 77, 51, 57, 66, 89, 73, 67, 79, 70, 52, 69, 59, 68, 66, 66, 69, 63, 59, 69, 63, 62, 76, 59, 59, 65, 48, 65, 74, 76, 76, 45, 78, 81, 50, 71, 66, 48, 58, 48, 61, 63, 75, 66, 66, 49, 49, 56, 62, 98, 55, 79, 58, 68, 63, 50, 65, 65, 79, 62, 70, 68, 44, 71, 58, 52, 63, 70, 66, 48, 51, 61, 61, 58, 64, 76, 70, 82, 46, 67, 54, 65, 61, 72, 86, 51, 61, 79, 65, 81, 66, 64, 86, 59, 73, 69, 87, 56, 53, 65, 73, 54, 75, 71, 66, 78, 61, 53, 66, 65, 61, 61, 72, 93, 64, 82, 64, 68, 64, 61, 58, 51, 63, 64, 66, 71, 58, 70, 54, 83, 91, 66, 57, 40, 86, 60, 73, 53, 58, 64, 63, 56, 93, 69, 50, 79, 51, 72, 67, 122, 71, 59, 69, 66, 68, 58, 62, 70, 72, 57, 63, 56, 91, 63, 58, 68, 73, 58, 105, 85, 63, 64, 50, 71, 90, 60, 63, 55, 60, 69, 63, 55, 65, 63, 78, 72, 57, 68, 59, 66, 64, 68, 70, 132, 69, 70, 68, 56, 69, 60, 69, 83, 104, 108, 58, 68, 86, 63, 77, 60, 58, 69, 75, 65, 69, 72, 64, 55, 66, 58, 75, 74, 54, 58, 69, 50, 67, 52, 55, 66, 63, 73, 79, 56, 68, 61, 61, 60, 51, 61, 108, 78, 79, 54, 111, 61, 64, 56, 61, 90, 56, 70, 59, 69, 46, 97, 61, 77, 69, 66, 64, 58, 61, 49, 61, 54, 57, 81, 53, 70, 72, 62, 68, 59, 54, 84, 63, 100, 66, 54, 78, 75, 51, 80, 74, 61, 66, 65, 55, 82, 66, 61, 59, 62, 54, 50, 62, 58, 59, 71, 74, 79, 62, 51, 63, 61, 67, 65, 74, 61, 79, 52, 47, 65, 42, 61, 72, 59, 69, 66, 65, 74, 74, 54, 49, 120, 58, 83, 81, 83, 54, 55, 60, 63, 69, 78, 47, 72, 77, 61, 72, 61, 72, 72, 68, 54, 70, 64, 48, 58, 61, 60, 59, 69, 60, 68, 60, 46, 74, 62, 63, 73, 46, 89, 59, 57, 59, 46, 72, 74, 60, 70, 62, 84, 53, 70, 59, 73, 65, 65, 68, 63, 67, 59, 79, 71, 65, 81, 70, 69, 61, 55, 59, 60, 74, 63, 62, 59, 53, 62, 62, 78, 59, 80, 75, 54, 79, 54, 58, 70, 63, 66, 60, 59, 66, 58, 64, 82, 71, 64, 70, 69, 54, 68, 55, 50, 60, 62, 84, 61, 68, 77, 59, 70, 96, 67, 72, 67, 52, 50, 67, 56, 64, 69, 57, 73, 64, 69, 57, 81, 49, 75, 64, 65, 78, 71, 66, 55, 91, 60, 64, 73, 63, 69, 58, 77, 56, 42, 55, 48, 51, 81, 76, 88, 62, 67, 65, 82, 72, 57, 68, 53, 44, 74, 71, 77, 63, 71, 62, 61, 64, 60, 66, 52, 64, 43, 55, 79, 66, 62, 71, 61, 51, 48, 69, 109, 67, 59, 53, 59, 61, 63, 63, 54, 77, 64, 60, 74, 73, 67, 70, 70, 78, 77, 70, 62, 75, 61, 53, 65, 92, 66, 63, 64, 63, 72, 44, 74, 59, 54, 72, 70, 46, 70, 91, 79, 47, 63, 63, 60, 71, 60, 65, 49, 62, 62, 75, 100, 75, 74, 62, 71, 81, 83, 85, 75, 67, 67, 64, 70, 57, 55, 97, 74, 61, 79, 54, 60, 115, 88, 47, 59, 65, 63, 62, 62, 60, 70, 53, 49, 69, 66, 65, 49, 68, 58, 68, 71, 69, 59, 72, 62, 64, 76, 53, 93, 69, 74, 73, 53, 58, 95, 85, 65, 76, 60, 71, 56, 58, 69, 71, 74, 49, 62, 73, 46, 55, 64, 60, 60, 60, 83, 61, 78, 55, 69, 71, 69, 71, 67, 60, 62, 63, 58, 54, 65, 87, 65, 53, 81, 65, 67, 53, 49, 60, 57, 60, 61, 66, 63, 67, 74, 64, 93, 55, 46, 76, 50, 57, 59, 72, 83, 65, 62, 57, 73, 56, 85, 48, 63, 74, 78, 65, 55, 69, 62, 70, 61, 75, 59, 60, 78, 72, 90, 88, 71, 79, 70, 65, 59, 58, 59, 63, 63, 64, 83, 58, 56, 57, 89, 62, 57, 70, 64, 56, 63, 77, 51, 70, 78, 62, 59, 64, 80, 75, 66, 52, 54, 79, 59, 58, 63, 68, 81, 61, 55, 76, 50, 56, 51, 62, 41, 73, 61, 56, 71, 53, 80, 55, 83, 59, 60, 85, 52, 61, 62, 46, 67, 61, 72, 63, 65, 68, 63, 68, 57, 86, 66, 67, 54, 57, 52, 61, 60, 88, 61, 70, 56, 62, 60, 70, 66, 58, 54, 49, 64, 46, 54, 62, 61, 66, 69, 80, 72, 55, 50, 72, 66, 68, 39, 47, 61, 66, 65, 66, 58, 82, 45, 53, 71, 55, 59, 50, 65, 69, 90, 79, 66, 72, 65, 76, 54, 71, 61, 53, 80, 90, 63, 71, 86, 65, 82, 53, 76, 66, 75, 90, 64, 77, 76, 59, 58, 132, 53, 57, 59, 75, 52, 70, 89, 58, 58, 67, 65, 71, 64, 70, 91, 86, 66, 61, 72, 64, 61, 52, 58, 63, 63, 68, 88, 59, 70, 72, 54, 66, 70, 68, 66, 76, 66, 74, 107, 66, 60, 66, 55, 67, 73, 57, 93, 60, 62, 52, 55, 54, 47, 67, 94, 64, 61, 61, 67, 83, 44, 60, 42, 59, 60, 54, 90, 69, 63, 66, 60, 69, 78, 63, 63, 54, 77, 61, 99, 76, 75, 69, 68, 70, 63, 65, 64, 71, 67, 70, 62, 42, 56, 92, 80, 83, 42, 71, 76, 38, 71, 58, 59, 55, 61, 69, 63, 53, 53, 52, 70, 68, 99, 68, 55, 69, 46, 56, 68, 102, 74, 70, 59, 55, 47, 71, 83, 72, 60, 55, 80, 65, 72, 65, 87, 68, 55, 67, 71, 61, 58, 51, 76, 87, 72, 111, 83, 90, 52, 69, 68, 44, 53, 66, 62, 54, 67, 50, 54, 101, 69, 57, 54, 61, 56, 68, 66, 50, 71, 76, 59, 59, 65, 56, 55, 46, 56, 66, 63, 49, 54, 42, 58, 68, 66, 115, 98, 57, 66, 70, 68, 66, 63, 51, 72, 73, 52, 101, 63, 57, 62, 56, 67, 68, 97, 69, 112, 75, 62, 89, 82, 66, 52, 66, 42, 61, 95, 65, 101, 66, 59, 75, 106, 59, 71, 59, 54, 52, 75, 64, 66, 72, 49, 62, 66, 63, 61, 60, 39, 88, 71, 60, 64, 69, 66, 69, 69, 74, 37, 33, 65, 52, 76, 61, 62, 47, 61, 66, 91, 60, 58, 66, 53, 80, 67, 66, 89, 69, 47, 43, 54, 55, 69, 85, 58, 71, 54, 84, 52, 49, 65, 67, 87, 46, 47, 78, 88, 72, 59, 52, 55, 45, 67, 67, 69, 74, 57, 72, 73, 59, 58, 74, 63, 55, 45, 76, 61, 58, 44, 50, 68, 54, 52, 57, 93, 79, 60, 54, 66, 74, 78, 53, 65, 68, 72, 66, 70, 43, 84, 61, 97, 74, 78, 58, 60, 61, 72, 58, 30, 48, 73, 69, 41, 66, 72, 65, 50, 73, 60, 131, 58, 62, 69, 72, 62, 73, 89, 44, 55, 56, 69, 56, 64, 95, 68, 80, 69, 71, 73, 60, 55, 80, 59, 88, 97, 72, 48, 57, 56, 73, 115, 72, 99, 58, 117, 76, 87, 66, 76, 65, 62, 67, 50, 56, 54, 60, 72, 67, 61, 51, 53, 76, 67, 57, 64, 78, 50, 57, 54, 58, 73, 50, 76, 64, 84, 90, 59, 70, 46, 61, 52, 78, 75, 61, 82, 49, 54, 53, 68, 70, 36, 62, 68, 65, 83, 87, 65, 61, 81, 96, 51, 57, 71, 72, 74, 54, 83, 64, 85, 73, 46, 73, 66, 60, 84, 75, 98, 57, 86, 76, 59, 63, 68, 72, 74, 87, 63, 57, 55, 53, 92, 74, 57, 61, 83, 65, 93, 61, 73, 61, 68, 119, 82, 80, 72, 69, 72, 52, 47, 75, 62, 90, 67, 61, 101, 62, 60, 62, 56, 77, 74, 53, 69, 76, 43, 61, 90, 64, 68, 89, 115, 59, 61, 68, 70, 99, 89, 65, 78, 51, 113, 70, 49, 77, 52, 51, 74, 52, 66, 40, 45, 63, 62, 55, 45, 65, 112, 79, 85, 69, 60, 50, 59, 55, 83, 76, 75, 54, 91, 61, 60, 62, 67, 59, 72, 62, 86, 102, 70, 57, 50, 47, 64, 54, 61, 79, 65, 58, 59, 63, 60, 65, 64, 68, 69, 61, 69, 83, 56, 79, 58, 75, 76, 57, 57, 59, 73, 62, 76, 48, 59, 58, 62, 98, 68, 57, 64, 52, 61, 69, 67, 63, 70, 73, 51, 67, 75, 66, 61, 81, 57, 58, 61, 67, 65, 107, 48, 52, 60, 66, 57, 56, 100, 68, 48, 54, 80, 65, 69, 65, 66, 70, 52, 64, 48, 70, 71, 60, 69, 66, 67, 62, 67, 78, 63, 77, 77, 63, 100, 54, 61, 56, 77, 73, 57, 65, 55, 52, 93, 69, 70, 65, 60, 70, 75, 70, 53, 66, 67, 69, 68, 69, 64, 85, 62, 59, 77, 74, 46, 71, 63, 71, 69, 58, 64, 73, 55, 93, 60, 92, 53, 54, 45, 63, 73, 46, 49, 54, 66, 44, 71, 72, 68, 106, 37, 55, 65, 59, 93, 68, 48, 64, 118, 68, 65, 57, 67, 67, 77, 76, 69, 74, 70, 55, 45, 59, 57, 54, 82, 49, 73, 53, 60, 57, 58, 61, 50, 75, 66, 60, 60, 64, 62, 105, 66, 66, 60, 53, 60, 59, 62, 65, 57, 55, 63, 61, 53, 103, 65, 50, 54, 63, 50, 48, 72, 83, 68, 70, 70, 39, 61, 55, 42, 69, 66, 73, 54, 59, 45, 61, 53, 71, 44, 67, 68, 57, 47, 83, 74, 54, 53, 64, 53, 83, 58, 78, 55, 61, 59, 57, 60, 66, 65, 60, 63, 49, 68, 58, 74, 43, 62, 62, 56, 58, 68, 57, 42, 56, 69, 74, 43, 66, 53, 49, 64, 81, 61, 55, 59, 68, 90, 74, 58, 67, 66, 71, 55, 57, 60, 82, 68, 66, 69, 61, 69, 103, 39, 43, 60, 61, 72, 56, 60, 72, 55, 57, 58, 88, 49, 68, 52, 116, 76, 57, 77, 46, 62, 57, 77, 61, 78, 65, 65, 82, 58, 105, 66, 59, 66, 52, 82, 60, 67, 85, 76, 54, 88, 70, 46, 81, 62, 59, 57, 77, 49, 63, 72, 54, 91, 53, 67, 62, 68, 71, 62, 56, 56, 63, 54, 83, 61, 66, 58, 42, 68, 71, 58, 69, 51, 66, 62, 58, 103, 71, 78, 63, 65, 47, 61, 63, 72, 61, 84, 73, 75, 65, 65, 62, 65, 57, 54, 62, 72, 71, 67, 66, 46, 61, 87, 51, 63, 59, 47, 56, 72, 45, 72, 50, 58, 88, 81, 64, 70, 72, 58, 49, 68, 56, 85, 75, 59, 52, 61, 49, 56, 65, 84, 35, 61, 61, 41, 57, 74, 59, 54, 47, 73, 68, 67, 71, 61, 57, 52, 76, 67, 64, 94, 54, 73, 78, 45, 72, 57, 74, 59, 64, 78, 77, 89, 42, 59, 74, 71, 56, 73, 59, 84, 58, 76, 63, 68, 59, 63, 122, 65, 50, 62, 62, 47, 79, 55, 62, 66, 54, 61, 55, 74, 70, 55, 67, 50, 69, 60, 52, 50, 54, 87, 92, 88, 87, 83, 41, 63, 94, 57, 72, 51, 61, 80, 57, 55, 57, 88, 61, 71, 65, 55, 116, 72, 66, 67, 66, 63, 55, 65, 63, 58, 60, 51, 56, 63, 64, 85, 73, 56, 78, 80, 72, 71, 68, 74, 60, 62, 79, 79, 76, 62, 56, 71, 59, 75, 74, 61, 97, 65, 40, 52, 55, 94, 60, 67, 54, 68, 56, 92, 57, 70, 65, 50, 72, 90, 69, 59, 86, 85, 66, 75, 84, 80, 63, 60, 57, 63, 57, 67, 92, 65, 58, 62, 50, 55, 82, 49, 48, 68, 62, 47, 76, 53, 66, 53, 66, 59, 77, 68, 60, 69, 39, 83, 68, 81, 85, 61, 75, 65, 52, 56, 69, 50, 37, 84, 72, 81, 54, 66, 65, 69, 69, 52, 95, 56, 60, 68, 67, 39, 66, 70, 73, 61, 68, 65, 66, 80, 98, 63, 62, 55, 68, 106, 65, 44, 59, 63, 66, 73, 41, 82, 83, 59, 56, 51, 73, 70, 112, 41, 71, 72, 51, 55, 56, 54, 62, 53, 48, 58, 55, 74, 71, 66, 88, 78, 75, 90, 57, 104, 97, 79, 65, 85, 83, 60, 65, 81, 63, 56, 77, 90, 59, 45, 67, 71, 61, 62, 64, 82, 50, 86, 72, 70, 69, 48, 48, 71, 73, 91, 51, 60, 62, 61, 63, 59, 65, 86, 74, 53, 59, 72, 70, 67, 53, 93, 71, 90, 62, 57, 72, 46, 64, 75, 60, 100, 60, 68, 59, 54, 54, 58, 89, 81, 77, 79, 69, 71, 82, 84, 67, 57, 57, 55, 60, 79, 54, 62, 52, 57, 57, 63, 56, 68, 51, 68, 55, 57, 57, 68, 71, 73, 70, 56, 81, 51, 75, 63, 102, 51, 57, 49, 57, 60, 62, 68, 68, 78, 88, 65, 73, 85, 81, 71, 60, 71, 52, 55, 91, 67, 75, 41, 57, 59, 65, 63, 73, 73, 56, 72, 72, 61, 57, 58, 57, 61, 60, 80, 102, 67, 55, 60, 76, 69, 65, 54, 55, 87, 60, 98, 53, 67, 108, 59, 68, 58, 58, 59, 57, 62, 70, 61, 70, 103, 59, 58, 87, 57, 60, 78, 52, 71, 75, 92, 56, 58, 82, 49, 68, 61, 50, 69, 60, 63, 53, 63, 57, 53, 70, 73, 53, 68, 49, 75, 51, 44, 59, 69, 57, 55, 87, 92, 66, 58, 68, 82, 55, 88, 42, 66, 53, 59, 45, 58, 65, 59, 58, 53, 69, 70, 48, 73, 57, 65, 64, 61, 56, 80, 76, 58, 58, 63, 77, 73, 69, 78, 65, 83, 64, 79, 54, 53, 117, 81, 57, 93, 73, 79, 71, 76, 52, 72, 66, 64, 90, 63, 67, 81, 55, 63, 74, 57, 86, 73, 55, 97, 57, 61, 70, 77, 110, 52, 65, 71, 66, 107, 49, 75, 73, 74, 65, 64, 63, 55, 66, 58, 67, 69, 52, 83, 60, 54, 68, 53, 59, 121, 66, 80, 48, 68, 47, 56, 103, 52, 63, 66, 56, 75, 75, 65, 91, 64, 59, 63, 50, 75, 55, 89, 76, 58, 99, 74, 63, 59, 66, 53, 50, 43, 85, 64, 90, 48, 48, 66, 64, 53, 74, 67, 52, 83, 57, 80, 50, 50, 62, 49, 67, 66, 86, 80, 75, 58, 79, 48, 60, 73, 56, 81, 61, 67, 51, 48, 52, 73, 55, 96, 58, 68, 63, 69, 57, 67, 71, 50, 51, 67, 71, 66, 56, 71, 64, 61, 67, 86, 99, 98, 56, 68, 47, 48, 59, 67, 95, 48, 55, 51, 57, 80, 73, 75, 76, 67, 61, 59, 62, 73, 65, 96, 46, 77, 50, 60, 76, 62, 62, 76, 66, 71, 62, 51, 96, 73, 68, 66, 62, 87, 65, 61, 78, 53, 46, 83, 59, 62, 81, 61, 70, 70, 43, 84, 72, 78, 66, 56, 71, 68, 80, 68, 53, 73, 46, 63, 56, 66, 69, 78, 61, 67, 55, 64, 75, 61, 74, 58, 45, 71, 85, 59, 53, 62, 54, 65, 83, 63, 62, 93, 70, 63, 52, 65, 74, 66, 81, 58, 67, 54, 53, 86, 94, 91, 68, 65, 73, 58, 46, 67, 73, 64, 54, 61, 81, 59, 50, 50, 69, 77, 87, 40, 58, 69, 82, 89, 69, 58, 76, 73, 64, 60, 49, 75, 79, 59, 38, 72, 87, 69, 47, 77, 69, 112, 58, 61, 101, 45, 53, 78, 69, 55, 46, 63, 56, 55, 76, 80, 80, 66, 65, 87, 108, 75, 55, 62, 71, 71, 57, 59, 59, 59, 62, 42, 66, 62, 69, 67, 69, 59, 68, 63, 57, 58, 84, 52, 55, 57, 55, 81, 75, 64, 75, 57, 60, 77, 67, 59, 77, 51, 56, 48, 95, 54, 61, 45, 92, 73, 82, 82, 71, 48, 92, 73, 68, 76, 69, 78, 61, 70, 45, 63, 45, 73, 67, 64, 67, 78, 62, 55, 50, 54, 72, 46, 58, 67, 59, 55, 82, 58, 62, 64, 69, 56, 64, 81, 62, 62, 45, 52, 70, 69, 58, 69, 79, 41, 68, 50, 53, 86, 44, 125, 65, 46, 62, 53, 58, 82, 61, 66, 62, 73, 64, 75, 82, 56, 64, 66, 45, 68, 57, 74, 55, 53, 71, 67, 80, 50, 73, 56, 70, 60, 75, 42, 52, 45, 68, 61, 65, 64, 58, 99, 46, 65, 53, 67, 63, 68, 70, 66, 77, 47, 79, 65, 60, 81, 64, 51, 58, 41, 45, 44, 75, 67, 66, 73, 67, 70, 60, 60, 72, 55, 65, 66, 49, 39, 63, 67, 59, 63, 55, 70, 76, 99, 60, 46, 76, 79, 54, 59, 62, 72, 74, 94, 65, 94, 57, 58, 67, 66, 83, 75, 50, 56, 72, 65, 77, 58, 67, 59, 53, 41, 64, 72, 57, 64, 59, 62, 68, 69, 101, 65, 82, 77, 85, 61, 64, 52, 51, 57, 66, 59, 58, 64, 56, 60, 66, 60, 56, 49, 49, 69, 61, 69, 91, 73, 70, 65, 56, 60, 47, 62, 64, 86, 71, 56, 57, 70, 60, 79, 68, 74, 54, 52, 72, 61, 123, 64, 66, 53, 53, 84, 85, 68, 70, 59, 58, 83, 60, 80, 65, 82, 48, 57, 66, 54, 91, 66, 62, 67, 85, 81, 73, 74, 65, 69, 76, 57, 46, 53, 70, 64, 68, 61, 66, 82, 55, 52, 67, 58, 66, 56, 36, 51, 67, 59, 52, 81, 62, 62, 67, 68, 53, 61, 57, 59, 59, 46, 80, 60, 61, 79, 78, 71, 61, 75, 56, 71, 50, 55, 58, 57, 59, 36, 61, 75, 76, 100, 54, 74, 68, 85, 47, 69, 46, 60, 58, 81, 62, 75, 68, 42, 52, 74, 81, 58, 69, 93, 50, 74, 70, 71, 65, 68, 76, 65, 61, 90, 78, 82, 70, 55, 74, 65, 75, 45, 39, 66, 45, 49, 63, 55, 72, 63, 77, 68, 92, 52, 56, 86, 60, 81, 59, 62, 78, 67, 83, 50, 54, 52, 75, 64, 68, 69, 56, 68, 70, 67, 63, 72, 63, 79, 65, 78, 54, 71, 71, 60, 62, 43, 58, 67, 70, 64, 63, 53, 91, 70, 71, 75, 71, 93, 63, 64, 100, 57, 66, 45, 48, 65, 83, 60, 72, 60, 60, 51, 86, 55, 57, 75, 54, 49, 56, 60, 60, 76, 80, 81, 62, 79, 69, 72, 61, 88, 48, 70, 53, 48, 46, 68, 77, 98, 56, 76, 68, 79, 81, 59, 87, 86, 57, 70, 97, 52, 62, 74, 56, 60, 58, 84, 66, 66, 54, 68, 53, 68, 52, 69, 86, 63, 71, 66, 71, 58, 84, 85, 45, 68, 81, 64, 58, 73, 56, 79, 78, 109, 71, 77, 76, 47, 68, 84, 61, 79, 78, 81, 71, 72, 61, 71, 78, 92, 91, 74, 65, 46, 52, 68, 78, 72, 77, 61, 68, 75, 65, 70, 64, 65, 57, 61, 77, 97, 78, 67, 58, 50, 57, 76, 83, 88, 64, 59, 78, 53, 54, 63, 87, 71, 64, 41, 49, 65, 81, 63, 81, 67, 84, 74, 66, 72, 70, 62, 60, 61, 63, 63, 66, 67, 78, 68, 63, 56, 64, 62, 65, 65, 54, 86, 58, 45, 64, 77, 50, 56, 73, 58, 109, 49, 72, 55, 65, 68, 59, 73, 74, 71, 66, 73, 63, 76, 81, 49, 77, 55, 70, 56, 50, 53, 53, 64, 80, 119, 46, 80, 47, 50, 79, 101, 64, 49, 88, 132, 106, 51, 61, 98, 54, 67, 51, 61, 38, 65, 76, 60, 75, 64, 66, 68, 75, 80, 65, 58, 60, 78, 65, 56, 82, 69, 74, 54, 84, 72, 51, 56, 75, 68, 49, 61, 58, 60, 83, 87, 90, 60, 49, 60, 63, 65, 58, 57, 56, 83, 87, 66, 41, 59, 54, 64, 60, 52, 59, 60, 44, 73, 67, 44, 52, 70, 59, 66, 39, 73, 47, 81, 61, 64, 69, 53, 85, 79, 64, 60, 56, 77, 84, 74, 73, 79, 78, 57, 47, 59, 90, 85, 78, 62, 69, 48, 105, 53, 58, 66, 72, 70, 47, 57, 62, 57, 59, 64, 94, 94, 52, 42, 64, 60, 73, 68, 83, 64, 49, 77, 80, 104, 51, 98, 106, 47, 61, 63, 81, 63, 78, 84, 69, 79, 66, 51, 54, 59, 71, 63, 64, 69, 47, 65, 73, 75, 68, 58, 51, 70, 52, 81, 59, 77, 89, 76, 56, 54, 80, 60, 63, 80, 69, 78, 71, 71, 78, 87, 61, 65, 58, 50, 92, 58, 60, 63, 84, 52, 69, 73, 70, 64, 86, 66, 77, 67, 66, 63, 71, 84, 76, 84, 41, 45, 66, 75, 51, 56, 64, 54, 66, 45, 99, 66, 77, 78, 43, 80, 69, 54, 51, 83, 69, 56, 54, 66, 62, 56, 61, 72, 61, 67, 46, 79, 61, 65, 49, 82, 66, 59, 47, 82, 55, 82, 76, 62, 57, 106, 69, 48, 69, 55, 50, 55, 54, 63, 72, 60, 81, 56, 86, 53, 77, 67, 62, 68, 84, 77, 54, 55, 58, 58, 74, 77, 52, 72, 62, 92, 85, 49, 59, 65, 86, 62, 57, 63, 53, 72, 64, 67, 68, 56, 80, 52, 54, 62, 72, 43, 86, 85, 63, 75, 71, 63, 73, 52, 63, 72, 68, 65, 33, 87, 77, 81, 90, 52, 40, 47, 54, 52, 57, 58, 72, 66, 71, 78, 68, 50, 65, 68, 54, 84, 60, 65, 65, 65, 70, 81, 45, 94, 60, 61, 94, 76, 64, 68, 58, 83, 64, 63, 67, 70, 45, 72, 74, 57, 65, 59, 101, 54, 66, 69, 59, 74, 66, 90, 60, 61, 108, 85, 64, 53, 67, 49, 58, 64, 66, 61, 50, 78, 86, 86, 66, 51, 64, 75, 59, 67, 52, 72, 68, 72, 78, 52, 71, 47, 79, 59, 52, 49, 67, 69, 59, 61, 60, 64, 77, 61, 64, 65, 62, 79, 56, 53, 75, 74, 72, 60, 64, 107, 50, 46, 68, 47, 50, 71, 73, 53, 59, 59, 67, 53, 56, 66, 87, 63, 60, 84, 62, 51, 72, 71, 71, 63, 63, 80, 50, 61, 62, 78, 89, 64, 61, 79, 56, 78, 76, 64, 74, 60, 72, 69, 42, 61, 58, 59, 59, 105, 60, 74, 44, 72, 64, 78, 54, 52, 63, 71, 82, 57, 63, 78, 85, 133, 70, 72, 79, 61, 58, 99, 66, 56, 66, 52, 76, 90, 66, 40, 58, 50, 76, 74, 78, 87, 47, 61, 60, 69, 70, 64, 56, 56, 62, 76, 60, 57, 49, 79, 68, 62, 59, 67, 53, 88, 79, 60, 76, 59, 57, 49, 62, 58, 62, 66, 69, 60, 41, 59, 57, 40, 48, 53, 58, 93, 97, 63, 68, 63, 72, 61, 72, 82, 60, 73, 49, 86, 58, 86, 84, 55, 57, 64, 60, 54, 69, 54, 67, 73, 65, 64, 62, 69, 61, 59, 66, 58, 62, 63, 68, 92, 54, 72, 102, 66, 67, 58, 62, 73, 60, 65, 47, 44, 28, 73, 65, 64, 53, 47, 62, 61, 57, 67, 62, 49, 45, 71, 54, 71, 79, 52, 57, 52, 75, 94, 52, 73, 67, 58, 52, 91, 64, 77, 61, 58, 84, 68, 54, 70, 55, 65, 61, 68, 72, 88, 70, 56, 61, 91, 56, 76, 76, 73, 76, 61, 66, 55, 70, 62, 119, 49, 60, 53, 64, 107, 71, 83, 69, 51, 55, 73, 62, 76, 70, 71, 62, 59, 59, 85, 121, 55, 64, 68, 77, 68, 34, 66, 63, 62, 89, 60, 60, 57, 60, 48, 63, 84, 59, 74, 89, 62, 51, 57, 55, 60, 65, 50, 58, 71, 62, 59, 76, 66, 69, 101, 62, 43, 56, 58, 58, 64, 64, 104, 54, 54, 67, 82, 59, 48, 46, 42, 66, 51, 71, 69, 81, 68, 57, 61, 99, 56, 62, 75, 59, 72, 60, 55, 65, 61, 49, 89, 76, 97, 64, 68, 56, 81, 54, 72, 76, 48, 54, 51, 72, 65, 74, 83, 73, 60, 66, 52, 84, 75, 77, 77, 60, 70, 108, 65, 79, 64, 58, 54, 59, 55, 56, 57, 63, 56, 72, 73, 65, 54, 58, 51, 73, 63, 78, 86, 68, 72, 65, 57, 45, 59, 80, 63, 56, 74, 59, 53, 61, 92, 74, 68, 62, 69, 61, 55, 72, 62, 89, 62, 52, 89, 66, 58, 61, 57, 123, 46, 69, 59, 60, 61, 51, 75, 57, 74, 62, 70, 53, 69, 78, 47, 49, 72, 65, 51, 95, 49, 64, 58, 59, 67, 62, 98, 71, 53, 64, 56, 58, 76, 69, 65, 55, 53, 71, 71, 52, 91, 52, 50, 59, 95, 54, 50, 60, 76, 53, 54, 66, 75, 50, 55, 52, 45, 54, 66, 66, 102, 57, 89, 67, 67, 56, 56, 60, 63, 64, 86, 62, 75, 72, 70, 47, 81, 51, 60, 91, 66, 53, 66, 71, 83, 70, 78, 40, 114, 54, 52, 87, 62, 60, 44, 54, 61, 52, 56, 66, 81, 60, 64, 71, 54, 68, 68, 59, 58, 50, 70, 57, 68, 96, 58, 67, 92, 56, 55, 75, 62, 57, 78, 52, 83, 60, 102, 57, 71, 95, 71, 62, 67, 63, 50, 58, 76, 61, 66, 62, 74, 61, 65, 76, 70, 82, 72, 72, 51, 68, 76, 52, 62, 94, 54, 69, 72, 68, 109, 60, 78, 60, 67, 58, 64, 56, 53, 71, 89, 60, 62, 69, 59, 94, 65, 56, 52, 62, 60, 89, 66, 71, 61, 62, 67, 81, 97, 50, 56, 62, 65, 54, 61, 97, 60, 75, 51, 69, 64, 51, 67, 54, 68, 74, 113, 61, 62, 75, 63, 39, 37, 67, 67, 99, 127, 83, 58, 57, 124, 60, 64, 62, 48, 42, 69, 59, 73, 68, 67, 67, 52, 36, 76, 54, 66, 54, 62, 72, 60, 77, 59, 48, 61, 61, 67, 48, 68, 59, 51, 53, 42, 62, 67, 65, 67, 101, 62, 57, 47, 67, 89, 73, 82, 53, 115, 68, 57, 67, 50, 68, 64, 76, 70, 55, 62, 51, 144, 42, 63, 57, 54, 85, 53, 54, 55, 58, 61, 49, 66, 53, 73, 47, 86, 64, 49, 84, 96, 63, 89, 61, 46, 62, 50, 61, 74, 83, 84, 62, 57, 55, 78, 57, 63, 58, 48, 61, 65, 58, 90, 52, 51, 67, 54, 60, 47, 62, 45, 71, 57, 58, 62, 56, 57, 73, 73, 49, 135, 61, 66, 79, 70, 55, 52, 53, 80, 43, 95, 61, 98, 96, 61, 51, 65, 59, 70, 59, 50, 64, 53, 54, 53, 70, 68, 57, 62, 55, 63, 68, 48, 106, 55, 52, 52, 64, 51, 70, 38, 57, 52, 75, 62, 57, 88, 61, 74, 60, 56, 76, 50, 68, 63, 66, 54, 63, 61, 72, 50, 81, 66, 66, 70, 58, 75, 53, 110, 57, 54, 69, 72, 66, 72, 44, 66, 61, 60, 45, 61, 111, 59, 35, 87, 47, 66, 57, 90, 56, 64, 78, 57, 50, 57, 66, 59, 63, 53, 61, 46, 58, 59, 97, 57, 48, 58, 75, 79, 70, 58, 70, 104, 75, 77, 72, 68, 61, 61, 61, 72, 79, 60, 57, 63, 81, 51, 48, 62, 82, 83, 80, 55, 70, 66, 68, 75, 66, 83, 62, 57, 68, 85, 49, 83, 81, 67, 61, 70, 51, 67, 62, 75, 54, 58, 62, 72, 52, 55, 59, 70, 69, 71, 62, 71, 48, 50, 68, 55, 68, 68, 62, 55, 43, 59, 77, 64, 57, 53, 58, 106, 83, 58, 99, 47, 78, 88, 78, 56, 79, 58, 66, 78, 83, 63, 67, 68, 56, 65, 58, 66, 70, 76, 62, 58, 98, 60, 66, 65, 68, 78, 105, 63, 96, 82, 72, 83, 58, 62, 69, 82, 55, 63, 65, 57, 65, 50, 47, 52, 55, 63, 51, 121, 59, 58, 84, 76, 97, 79, 62, 62, 60, 78, 62, 56, 63, 57, 79, 71, 70, 47, 47, 56, 108, 78, 63, 61, 87, 50, 82, 58, 73, 80, 71, 90, 51, 66, 64, 54, 59, 61, 97, 70, 62, 58, 94, 47, 50, 44, 64, 76, 71, 82, 63, 62, 76, 58, 81, 64, 90, 58, 79, 51, 54, 50, 69, 56, 84, 49, 62, 54, 70, 66, 56, 50, 70, 74, 39, 71, 52, 58, 77, 64, 62, 68, 48, 70, 62, 64, 57, 71, 63, 57, 68, 58, 55, 57, 64, 61, 90, 49, 63, 63, 79, 63, 63, 88, 61, 98, 46, 45, 57, 72, 64, 48, 72, 74, 85, 49, 76, 62, 68, 108, 58, 67, 72, 67, 63, 60, 47, 49, 78, 66, 74, 58, 79, 71, 69, 68, 60, 68, 69, 51, 55, 70, 69, 53, 66, 57, 57, 79, 54, 70, 57, 40, 55, 63, 69, 81, 76, 73, 60, 70, 66, 55, 63, 56, 66, 69, 69, 67, 87, 62, 60, 49, 56, 86, 67, 84, 59, 54, 56, 61, 58, 65, 52, 55, 99, 62, 61, 63, 81, 74, 57, 78, 78, 46, 52, 44, 56, 55, 65, 72, 55, 68, 60, 60, 74, 76, 73, 53, 55, 64, 55, 56, 56, 64, 71, 60, 69, 53, 56, 66, 55, 77, 54, 79, 67, 62, 54, 58, 83, 59, 50, 62, 65, 65, 52, 63, 66, 70, 64, 50, 69, 68, 76, 65, 73, 70, 63, 69, 58, 91, 62, 71, 63, 80, 51, 60, 64, 90, 75, 56, 78, 87, 55, 54, 54, 73, 93, 55, 76, 71, 60, 62, 87, 82, 56, 47, 65, 60, 66, 62, 44, 58, 70, 58, 71, 62, 61, 60, 61, 92, 57, 66, 93, 79, 64, 60, 57, 60, 71, 47, 76, 72, 62, 64, 84, 118, 61, 56, 65, 63, 65, 59, 62, 50, 86, 58, 66, 71, 52, 66, 51, 56, 59, 55, 93, 63, 67, 54, 66, 65, 47, 58, 71, 65, 63, 62, 57, 45, 89, 71, 76, 74, 52, 76, 61, 73, 101, 63, 55, 53, 56, 65, 61, 72, 62, 89, 59, 48, 51, 79, 57, 54, 59, 63, 69, 67, 51, 51, 69, 75, 65, 59, 54, 73, 76, 53, 144, 92, 67, 74, 73, 56, 85, 60, 84, 100, 70, 63, 59, 51, 60, 66, 54, 55, 69, 67, 52, 86, 104, 69, 57, 72, 66, 57, 61, 84, 51, 58, 54, 74, 71, 59, 66, 75, 50, 65, 69, 92, 56, 55, 76, 63, 58, 68, 61, 67, 71, 65, 66, 62, 70, 66, 68, 68, 61, 69, 87, 53, 81, 83, 73, 65, 69, 50, 49, 42, 54, 61, 46, 43, 67, 71, 49, 56, 66, 59, 44, 88, 58, 72, 59, 68, 61, 96, 67, 58, 69, 56, 110, 86, 51, 79, 65, 66, 78, 48, 53, 63, 72, 65, 59, 66, 60, 71, 67, 70, 85, 49, 61, 68, 53, 66, 80, 60, 59, 13, 66, 48, 58, 89, 70, 67, 54, 42, 84, 68, 68, 71, 69, 81, 58, 94, 60, 46, 55, 48, 77, 42, 66, 64, 60, 82, 81, 70, 83, 60, 68, 62, 72, 65, 56, 71, 60, 69, 56, 70, 65, 82, 40, 64, 56, 39, 45, 75, 86, 78, 67, 93, 60, 70, 52, 76, 85, 48, 54, 89, 70, 62, 48, 43, 58, 102, 50, 69, 58, 56, 76, 61, 73, 67, 81, 60, 69, 66, 69, 74, 62, 53, 48, 76, 58, 58, 77, 68, 79, 80, 81, 74, 79, 109, 47, 68, 77, 74, 88, 64, 70, 62, 85, 72, 83, 85, 70, 75, 74, 66, 63, 66, 62, 65, 67, 68, 59, 67, 65, 65, 62, 67, 56, 66, 60, 59, 34, 96, 50, 64, 71, 58, 53, 67, 59, 67, 49, 58, 47, 59, 39, 64, 61, 72, 48, 77, 90, 68, 66, 67, 56, 77, 63, 66, 67, 73, 118, 66, 74, 68, 71, 59, 61, 106, 60, 71, 66, 69, 72, 67, 99, 64, 70, 80, 65, 50, 68, 72, 59, 82, 61, 62, 62, 71, 91, 73, 67, 81, 56, 70, 74, 88, 75, 59, 54, 64, 68, 63, 45, 62, 88, 66, 60, 71, 69, 58, 69, 47, 60, 45, 77, 69, 70, 63, 73, 70, 65, 71, 69, 71, 56, 51, 65, 58, 45, 51, 54, 72, 62, 75, 53, 70, 65, 58, 45, 64, 80, 50, 57, 68, 54, 75, 22, 63, 56, 65, 69, 60, 58, 70, 61, 67, 63, 77, 74, 60, 78, 54, 68, 49, 49, 62, 85, 53, 56, 68, 56, 61, 63, 78, 79, 67, 71, 95, 66, 54, 64, 82, 58, 69, 56, 53, 75, 62, 62, 67, 63, 47, 89, 89, 72, 58, 57, 68, 77, 111, 67, 105, 54, 54, 85, 53, 60, 59, 61, 48, 52, 51, 49, 64, 62, 56, 83, 50, 52, 68, 59, 110, 70, 90, 67, 62, 78, 67, 47, 57, 62, 89, 55, 76, 67, 54, 75, 52, 60, 70, 63, 69, 46, 81, 69, 38, 53, 87, 102, 48, 61, 76, 55, 65, 68, 63, 63, 63, 61, 81, 70, 72, 73, 59, 74, 57, 71, 63, 52, 86, 65, 58, 44, 59, 60, 61, 67, 54, 84, 66, 71, 51, 56, 70, 81, 58, 63, 48, 61, 59, 83, 76, 91, 60, 40, 85, 71, 58, 72, 67, 54, 78, 65, 54, 50, 82, 56, 61, 66, 49, 68, 48, 71, 88, 61, 68, 60, 65, 68, 55, 54, 60, 63, 76, 76, 54, 55, 46, 61, 61, 81, 61, 71, 75, 73, 58, 61, 61, 67, 68, 68, 58, 96, 61, 67, 78, 63, 71, 74, 60, 88, 65, 64, 57, 68, 61, 66, 64, 87, 55, 52, 64, 48, 81, 64, 83, 97, 66, 77, 65, 61, 92, 73, 44, 54, 82, 72, 52, 79, 63, 58, 69, 85, 65, 127, 72, 47, 58, 72, 95, 48, 86, 77, 62, 80, 87, 67, 72, 51, 64, 68, 70, 61, 66, 87, 77, 63, 65, 84, 65, 60, 65, 71, 63, 84, 64, 135, 60, 53, 78, 74, 91, 62, 62, 66, 65, 57, 80, 79, 85, 87, 75, 62, 50, 68, 54, 67, 42, 49, 50, 64, 64, 64, 61, 84, 58, 77, 43, 50, 76, 79, 49, 109, 76, 59, 60, 65, 43, 73, 65, 95, 49, 58, 56, 76, 61, 61, 83, 86, 56, 63, 75, 53, 69, 61, 56, 68, 73, 54, 63, 58, 59, 66, 63, 56, 77, 51, 81, 69, 60, 65, 75, 75, 57, 72, 39, 77, 71, 53, 119, 78, 71, 64, 61, 63, 105, 83, 68, 92, 60, 67, 60, 65, 66, 57, 50, 56, 56, 48, 56, 98, 48, 45, 57, 70, 81, 64, 57, 72, 64, 73, 63, 52, 56, 79, 88, 47, 63, 70, 61, 80, 64, 66, 63, 60, 70, 71, 61, 75, 75, 65, 86, 66, 43, 67, 62, 62, 62, 50, 95, 72, 73, 69, 59, 72, 77, 58, 68, 72, 62, 55, 70, 49, 55, 66, 76, 63, 50, 59, 62, 67, 78, 55, 60, 46, 59, 55, 66, 71, 81, 73, 77, 89, 54, 78, 74, 62, 61, 50, 52, 56, 57, 67, 78, 70, 66, 55, 62, 86, 70, 64, 95, 51, 71, 67, 81, 70, 76, 72, 101, 59, 59, 77, 72, 72, 79, 72, 64, 64, 80, 52, 66, 76, 66, 84, 50, 87, 65, 71, 94, 38, 70, 61, 64, 71, 63, 50, 77, 34, 68, 67, 53, 73, 49, 70, 67, 79, 56, 90, 60, 70, 70, 51, 57, 61, 70, 64, 60, 51, 107, 68, 64, 62, 65, 72, 68, 61, 54, 60, 70, 58, 77, 66, 85, 59, 70, 63, 61, 72, 63, 61, 59, 52, 64, 64, 67, 55, 55, 78, 61, 74, 72, 81, 71, 75, 91, 59, 93, 48, 53, 89, 58, 63, 64, 60, 57, 150, 62, 59, 67, 74, 96, 58, 98, 59, 70, 77, 61, 66, 55, 63, 58, 81, 60, 59, 52, 63, 74, 65, 56, 72, 50, 77, 50, 72, 54, 79, 68, 67, 53, 56, 57, 80, 73, 56, 51, 82, 69, 52, 50, 87, 53, 53, 68, 54, 56, 69, 93, 63, 51, 66, 74, 63, 53, 61, 80, 76, 66, 48, 55, 76, 61, 68, 69, 54, 53, 66, 84, 57, 59, 61, 86, 57, 68, 60, 63, 69, 73, 56, 74, 72, 69, 74, 68, 69, 80, 59, 79, 54, 62, 80, 55, 62, 66, 55, 50, 56, 101, 45, 60, 51, 56, 56, 74, 65, 63, 64, 57, 81, 72, 77, 81, 49, 57, 56, 56, 57, 56, 64, 68, 74, 94, 50, 50, 68, 72, 57, 66, 72, 68, 35, 103, 55, 38, 72, 66, 78, 68, 73, 80, 81, 65, 99, 68, 59, 70, 64, 62, 64, 54, 59, 70, 81, 55, 65, 45, 57, 75, 70, 55, 67, 67, 57, 59, 77, 74, 111, 53, 45, 52, 60, 58, 76, 109, 75, 56, 81, 59, 67, 77, 46, 75, 70, 79, 61, 73, 93, 82, 50, 65, 47, 65, 52, 75, 50, 62, 80, 63, 72, 81, 65, 70, 84, 48, 69, 79, 62, 74, 56, 87, 56, 88, 64, 81, 72, 55, 61, 58, 53, 107, 56, 47, 61, 67, 56, 57, 63, 60, 56, 68, 81, 62, 76, 64, 113, 64, 88, 65, 65, 131, 62, 54, 61, 71, 58, 51, 66, 51, 57, 70, 67, 78, 60, 45, 74, 69, 75, 63, 59, 63, 59, 55, 53, 52, 53, 100, 69, 87, 65, 61, 89, 65, 59, 103, 66, 57, 79, 96, 60, 66, 68, 80, 50, 91, 52, 33, 72, 56, 61, 65, 61, 71, 48, 72, 52, 46, 64, 66, 70, 75, 70, 58, 45, 57, 69, 54, 65, 66, 61, 64, 67, 68, 71, 78, 67, 74, 62, 54, 58, 62, 65, 73, 63, 69, 62, 40, 60, 114, 71, 57, 68, 75, 64, 65, 79, 60, 63, 66, 82, 53, 89, 53, 70, 53, 62, 72, 108, 61, 56, 60, 58, 71, 76, 73, 67, 69, 56, 85, 54, 56, 107, 56, 47, 71, 56, 66, 57, 50, 50, 75, 61, 116, 71, 52, 63, 53, 58, 82, 64, 58, 65, 118, 68, 56, 83, 70, 63, 64, 57, 70, 76, 77, 70, 71, 75, 68, 75, 57, 69, 63, 49, 59, 56, 57, 73, 50, 60, 55, 102, 68, 58, 56, 57, 57, 36, 71, 60, 60, 88, 68, 60, 56, 98, 66, 57, 58, 53, 64, 66, 50, 59, 89, 61, 58, 65, 77, 37, 73, 58, 68, 75, 47, 65, 111, 75, 76, 50, 53, 58, 79, 74, 71, 57, 107, 67, 73, 63, 77, 54, 74, 66, 57, 51, 61, 66, 67, 65, 92, 64, 88, 59, 53, 50, 55, 64, 70, 70, 61, 79, 69, 61, 63, 59, 57, 63, 46, 78, 63, 28, 57, 63, 58, 73, 54, 69, 59, 80, 86, 76, 79, 55, 60, 68, 113, 54, 89, 68, 75, 80, 44, 87, 58, 72, 61, 50, 84, 63, 43, 76, 85, 65, 72, 65, 49, 49, 60, 54, 60, 52, 54, 75, 56, 64, 55, 70, 65, 55, 76, 59, 54, 53, 72, 68, 63, 77, 60, 50, 84, 61, 57, 55, 59, 56, 74, 68, 69, 66, 78, 57, 63, 57, 69, 92, 56, 97, 57, 83, 73, 63, 48, 55, 55, 56, 55, 53, 79, 60, 44, 78, 82, 65, 66, 50, 74, 78, 90, 46, 47, 82, 48, 68, 59, 78, 65, 65, 66, 81, 54, 85, 64, 70, 63, 59, 74, 60, 53, 73, 64, 68, 87, 73, 67, 63, 58, 58, 60, 48, 54, 70, 79, 62, 59, 53, 53, 91, 62, 41, 52, 59, 68, 65, 56, 46, 63, 53, 62, 58, 81, 51, 71, 51, 61, 88, 88, 70, 48, 90, 57, 71, 75, 65, 66, 49, 59, 69, 49, 53, 59, 80, 48, 60, 65, 59, 88, 48, 59, 69, 60, 56, 55, 80, 59, 56, 67, 66, 55, 54, 58, 50, 63, 86, 71, 82, 59, 82, 60, 105, 63, 53, 49, 63, 65, 51, 61, 68, 64, 65, 95, 73, 75, 64, 56, 52, 72, 67, 76, 66, 73, 86, 52, 60, 69, 54, 66, 44, 61, 53, 65, 82, 47, 63, 75, 50, 65, 64, 97, 80, 65, 63, 67, 61, 83, 52, 68, 57, 92, 61, 52, 66, 56, 65, 75, 99, 50, 75, 75, 51, 102, 70, 68, 53, 66, 35, 51, 66, 64, 127, 83, 49, 66, 63, 69, 59, 54, 43, 59, 85, 61, 52, 76, 62, 75, 65, 48, 80, 73, 75, 69, 64, 47, 60, 52, 61, 75, 66, 60, 107, 64, 54, 73, 52, 61, 76, 61, 89, 71, 70, 43, 70, 72, 71, 67, 73, 69, 73, 53, 66, 89, 66, 63, 64, 68, 53, 64, 65, 88, 86, 61, 46, 72, 52, 78, 80, 66, 57, 51, 65, 62, 77, 84, 85, 63, 74, 75, 54, 59, 77, 53, 58, 51, 76, 81, 86, 71, 72, 59, 76, 57, 66, 69, 58, 50, 60, 81, 84, 67, 49, 67, 94, 61, 51, 49, 55, 66, 76, 71, 59, 75, 53, 78, 49, 64, 108, 81, 75, 80, 57, 43, 64, 78, 75, 53, 56, 90, 74, 64, 73, 87, 57, 62, 72, 65, 69, 86, 59, 90, 64, 54, 57, 70, 64, 85, 58, 48, 69, 60, 88, 60, 67, 77, 55, 69, 61, 64, 68, 67, 72, 57, 64, 75, 73, 59, 54, 76, 54, 52, 62, 82, 57, 49, 64, 67, 60, 61, 55, 54, 58, 54, 48, 58, 62, 67, 66, 55, 70, 70, 82, 52, 65, 77, 58, 64, 82, 61, 60, 80, 64, 70, 60, 74, 84, 65, 92, 80, 67, 55, 54, 70, 54, 54, 54, 70, 72, 71, 97, 87, 60, 48, 51, 80, 53, 69, 58, 58, 53, 67, 62, 46, 67, 66, 68, 66, 54, 69, 72, 57, 67, 58, 82, 87, 59, 61, 84, 67, 56, 49, 77, 72, 93, 84, 61, 86, 62, 65, 48, 80, 71, 54, 54, 60, 132, 59, 63, 66, 80, 114, 64, 86, 60, 67, 58, 82, 65, 59, 53, 62, 67, 67, 70, 64, 80, 51, 74, 77, 87, 86, 43, 83, 53, 57, 84, 69, 58, 76, 55, 53, 72, 65, 71, 42, 78, 55, 50, 65, 85, 52, 69, 85, 66, 51, 46, 73, 84, 70, 54, 68, 66, 68, 59, 69, 84, 83, 71, 47, 80, 52, 53, 76, 61, 63, 69, 39, 77, 38, 57, 63, 65, 73, 54, 53, 56, 71, 53, 60, 93, 64, 52, 66, 56, 76, 63, 58, 46, 88, 51, 82, 82, 52, 72, 40, 60, 91, 52, 57, 70, 70, 59, 61, 67, 58, 68, 65, 64, 94, 53, 77, 50, 73, 71, 50, 68, 76, 72, 74, 57, 69, 57, 86, 67, 73, 50, 74, 41, 79, 69, 77, 56, 45, 58, 61, 67, 58, 71, 62, 61, 83, 103, 66, 62, 56, 52, 64, 76, 87, 72, 60, 60, 52, 59, 57, 45, 94, 62, 58, 61, 60, 61, 62, 51, 97, 89, 80, 65, 62, 67, 72, 67, 82, 70, 55, 74, 57, 56, 73, 90, 76, 73, 73, 74, 61, 65, 74, 75, 77, 53, 69, 53, 65, 74, 33, 62, 59, 66, 71, 76, 61, 48, 87, 92, 76, 67, 54, 84, 89, 63, 82, 55, 67, 67, 74, 40, 66, 60, 124, 58, 70, 38, 63, 56, 65, 82, 34, 77, 90, 59, 69, 78, 55, 94, 57, 77, 70, 58, 105, 81, 52, 62, 76, 73, 56, 67, 60, 72, 50, 70, 62, 53, 61, 86, 55, 54, 75, 60, 61, 61, 69, 74, 50, 59, 84, 60, 89, 45, 59, 58, 87, 70, 89, 67, 66, 84, 71, 58, 69, 55, 52, 67, 85, 52, 58, 87, 64, 55, 95, 69, 35, 66, 82, 52, 53, 61, 56, 64, 65, 54, 49, 56, 61, 61, 77, 64, 55, 55, 61, 56, 61, 75, 38, 51, 56, 53, 61, 61, 61, 49, 56, 67, 62, 56, 65, 60, 83, 79, 64, 58, 64, 49, 59, 67, 57, 58, 60, 59, 70, 81, 56, 51, 71, 59, 51, 103, 78, 57, 49, 52, 74, 59, 44, 85, 70, 67, 64, 71, 56, 106, 74, 44, 64, 55, 52, 58, 43, 55, 77, 94, 89, 63, 67, 70, 53, 62, 104, 49, 83, 68, 55, 43, 71, 63, 53, 74, 46, 67, 48, 67, 57, 61, 71, 60, 59, 68, 61, 49, 59, 55, 70, 60, 57, 59, 53, 61, 49, 76, 71, 87, 65, 76, 63, 72, 57, 72, 71, 98, 73, 49, 65, 55, 65, 54, 61, 70, 74, 50, 61, 53, 69, 45, 86, 77, 82, 69, 84, 56, 71, 62, 62, 63, 101, 84, 59, 66, 78, 82, 63, 116, 58, 62, 92, 42, 89, 63, 76, 59, 63, 82, 59, 57, 68, 76, 92, 55, 80, 64, 77, 66, 74, 53, 51, 93, 74, 57, 78, 94, 71, 72, 67, 66, 71, 71, 68, 62, 56, 53, 47, 54, 56, 69, 67, 58, 52, 73, 85, 48, 64, 70, 68, 49, 103, 47, 71, 48, 92, 62, 69, 66, 83, 72, 55, 73, 99, 58, 66, 59, 51, 87, 37, 62, 53, 61, 65, 58, 72, 53, 73, 48, 71, 42, 60, 62, 76, 44, 56, 62, 46, 62, 92, 71, 65, 61, 63, 55, 69, 60, 55, 65, 50, 71, 51, 51, 57, 73, 80, 57, 78, 54, 50, 75, 56, 65, 71, 67, 60, 70, 86, 58, 73, 61, 51, 88, 60, 50, 57, 81, 55, 58, 65, 73, 54, 57, 68, 54, 56, 71, 60, 84, 73, 70, 80, 69, 84, 51, 55, 65, 71, 74, 54, 48, 70, 50, 59, 55, 84, 63, 86, 51, 63, 69, 60, 55, 54, 68, 56, 54, 61, 93, 52, 75, 47, 55, 66, 65, 70, 61, 69, 56, 71, 45, 79, 82, 53, 55, 73, 64, 61, 62, 68, 61, 53, 57, 97, 67, 63, 62, 97, 66, 73, 66, 78, 55, 61, 59, 64, 63, 70, 87, 67, 64, 81, 72, 80, 70, 42, 56, 56, 60, 72, 53, 74, 70, 56, 57, 65, 70, 72, 87, 65, 65, 46, 64, 61, 102, 103, 75, 83, 63, 56, 52, 50, 57, 71, 49, 73, 55, 59, 101, 59, 60, 62, 63, 56, 73, 62, 76, 53, 44, 70, 70, 78, 71, 54, 49, 61, 74, 89, 74, 89, 51, 49, 53, 49, 68, 58, 57, 64, 66, 75, 59, 48, 62, 77, 67, 75, 61, 50, 43, 62, 94, 113, 63, 52, 72, 83, 67, 70, 75, 86, 62, 55, 56, 83, 129, 99, 49, 49, 61, 65, 58, 57, 76, 71, 60, 50, 63, 70, 68, 67, 79, 65, 79, 52, 73, 62, 60, 46, 52, 51, 70, 77, 67, 67, 75, 53, 68, 69, 65, 54, 66, 63, 82, 58, 76, 70, 59, 49, 72, 61, 73, 49, 69, 80, 72, 89, 64, 65, 72, 68, 49, 62, 63, 71, 88, 51, 61, 70, 59, 70, 35, 65, 55, 60, 67, 87, 71, 95, 54, 68, 74, 87, 60, 79, 56, 88, 85, 61, 58, 77, 93, 47, 44, 50, 64, 75, 63, 63, 56, 53, 46, 67, 84, 70, 55, 55, 86, 46, 56, 83, 55, 60, 71, 57, 65, 81, 64, 67, 73, 63, 140, 50, 62, 55, 46, 81, 61, 62, 59, 73, 59, 79, 62, 72, 82, 97, 58, 77, 78, 64, 61, 89, 61, 55, 74, 68, 77, 74, 57, 61, 60, 72, 63, 64, 88, 75, 74, 72, 49, 57, 85, 55, 77, 46, 48, 69, 68, 96, 69, 56, 61, 71, 63, 56, 67, 86, 61, 72, 69, 67, 57, 60, 70, 57, 73, 70, 65, 70, 58, 61, 61, 59, 58, 41, 64, 65, 58, 67, 62, 32, 59, 65, 59, 51, 74, 74, 55, 75, 58, 68, 69, 94, 60, 77, 64, 72, 53, 56, 72, 58, 61, 87, 76, 66, 80, 55, 78, 65, 51, 66, 56, 53, 71, 47, 63, 80, 40, 66, 55, 59, 58, 80, 60, 60, 56, 48, 64, 48, 57, 56, 87, 71, 63, 51, 52, 71, 61, 82, 64, 112, 59, 64, 61, 74, 71, 59, 61, 88, 143, 71, 69, 58, 43, 110, 70, 106, 69, 48, 68, 77, 57, 62, 60, 64, 56, 81, 65, 65, 73, 49, 71, 64, 61, 73, 70, 62, 43, 71, 70, 84, 64, 52, 74, 48, 54, 57, 59, 67, 80, 60, 56, 55, 76, 60, 43, 66, 44, 64, 53, 64, 69, 63, 60, 69, 58, 54, 47, 60, 76, 50, 77, 50, 61, 55, 52, 73, 63, 66, 64, 61, 64, 68, 66, 73, 60, 70, 69, 49, 66, 61, 52, 65, 83, 72, 68, 51, 64, 74, 44, 107, 43, 62, 64, 59, 87, 78, 68, 68, 51, 94, 58, 57, 66, 76, 52, 59, 57, 51, 51, 64, 69, 96, 46, 74, 94, 85, 62, 78, 68, 68, 62, 49, 63, 63, 64, 76, 94, 65, 59, 44, 61, 80, 66, 63, 71, 56, 58, 61, 54, 71, 81, 62, 78, 52, 94, 73, 61, 40, 55, 72, 57, 75, 62, 74, 58, 69, 61, 78, 69, 96, 38, 71, 62, 66, 53, 54, 51, 60, 105, 53, 55, 69, 64, 58, 59, 103, 66, 66, 77, 69, 63, 75, 56, 79, 50, 59, 107, 64, 101, 103, 88, 66, 75, 64, 54, 75, 56, 58, 59, 82, 61, 49, 60, 52, 61, 49, 83, 82, 66, 50, 43, 77, 53, 61, 57, 70, 66, 66, 90, 69, 71, 116, 70, 68, 72, 64, 55, 51, 63, 54, 72, 74, 53, 46, 65, 59, 87, 62, 85, 67, 73, 134, 65, 84, 52, 64, 51, 85, 71, 53, 74, 72, 67, 67, 51, 68, 54, 52, 63, 61, 61, 77, 60, 58, 70, 52, 66, 90, 62, 93, 49, 57, 110, 63, 62, 93, 66, 70, 78, 77, 53, 58, 57, 65, 59, 88, 71, 66, 70, 41, 55, 81, 64, 55, 66, 62, 55, 67, 58, 66, 57, 65, 57, 54, 58, 49, 51, 76, 49, 70, 70, 73, 56, 76, 72, 60, 64, 61, 79, 57, 56, 65, 60, 82, 65, 61, 54, 57, 60, 94, 62, 53, 63, 70, 60, 77, 58, 46, 71, 60, 49, 57, 87, 65, 51, 66, 70, 63, 108, 56, 79, 70, 58, 74, 45, 56, 72, 102, 52, 66, 67, 80, 117, 67, 58, 72, 65, 71, 44, 78, 68, 66, 81, 121, 66, 40, 51, 61, 65, 51, 68, 67, 59, 89, 45, 61, 65, 57, 76, 65, 50, 60, 52, 58, 64, 79, 58, 55, 52, 72, 58, 75, 68, 49, 62, 60, 58, 58, 60, 57, 84, 62, 68, 69, 68, 52, 44, 70, 70, 75, 105, 71, 80, 52, 102, 69, 67, 69, 68, 95, 48, 63, 57, 59, 64, 62, 68, 61, 42, 51, 45, 64, 73, 58, 52, 129, 67, 52, 47, 72, 100, 62, 73, 75, 64, 99, 54, 75, 38, 95, 55, 80, 71, 57, 84, 60, 59, 90, 58, 76, 56, 96, 63, 63, 72, 55, 79, 67, 53, 68, 97, 66, 51, 60, 55, 53, 62, 69, 59, 52, 62, 67, 50, 66, 52, 53, 49, 55, 66, 107, 54, 59, 46, 67, 67, 108, 62, 62, 72, 89, 73, 49, 65, 90, 57, 58, 61, 66, 68, 68, 38, 69, 91, 47, 47, 64, 58, 60, 37, 66, 50, 61, 73, 58, 62, 71, 56, 58, 51, 65, 77, 77, 53, 58, 71, 50, 52, 62, 62, 71, 65, 58, 69, 56, 75, 50, 71, 78, 97, 96, 77, 78, 65, 52, 63, 47, 70, 63, 84, 56, 68, 72, 71, 70, 50, 43, 72, 65, 53, 39, 74, 80, 61, 63, 53, 57, 65, 67, 42, 60, 71, 54, 73, 68, 65, 73, 54, 82, 87, 72, 74, 71, 77, 71, 52, 56, 72, 64, 52, 58, 64, 63, 61, 56, 74, 61, 56, 73, 64, 52, 71, 64, 69, 54, 50, 55, 76, 59, 61, 61, 57, 68, 50, 64, 74, 70, 90, 59, 59, 92, 58, 65, 50, 55, 44, 48, 61, 76, 78, 64, 68, 77, 58, 58, 72, 58, 70, 76, 60, 63, 75, 48, 97, 78, 58, 63, 75, 72, 62, 53, 51, 63, 60, 66, 56, 54, 59, 74, 72, 64, 69, 58, 51, 60, 64, 47, 76, 57, 106, 78, 63, 51, 64, 49, 69, 68, 67, 81, 75, 77, 90, 53, 99, 63, 47, 58, 40, 56, 56, 60, 75, 54, 76, 69, 51, 61, 66, 60, 49, 63, 94, 74, 69, 92, 73, 60, 57, 73, 85, 55, 60, 63, 57, 64, 77, 51, 78, 75, 61, 66, 55, 52, 47, 55, 94, 74, 56, 69, 69, 85, 56, 71, 76, 80, 46, 62, 57, 62, 141, 110, 47, 59, 73, 46, 56, 58, 56, 57, 74, 61, 66, 62, 57, 64, 57, 62, 98, 62, 62, 58, 78, 60, 60, 60, 63, 59, 57, 82, 92, 56, 66, 63, 56, 64, 70, 65, 102, 61, 87, 73, 80, 53, 76, 58, 48, 57, 65, 80, 74, 92, 62, 66, 59, 78, 63, 58, 58, 59, 81, 76, 84, 63, 74, 69, 57, 62, 76, 46, 52, 67, 82, 89, 75, 70, 77, 72, 63, 53, 53, 89, 60, 41, 65, 80, 91, 84, 67, 61, 51, 49, 74, 70, 65, 62, 77, 94, 99, 59, 70, 61, 67, 49, 60, 76, 56, 54, 68, 54, 66, 62, 56, 57, 74, 64, 107, 74, 60, 58, 49, 81, 53, 54, 58, 79, 79, 72, 72, 73, 52, 75, 57, 66, 78, 66, 61, 70, 76, 89, 94, 74, 62, 67, 83, 55, 54, 78, 58, 52, 78, 75, 48, 78, 70, 90, 61, 51, 61, 64, 71, 70, 67, 74, 61, 69, 53, 58, 59, 98, 55, 88, 60, 46, 57, 64, 55, 68, 68, 63, 65, 53, 58, 72, 49, 59, 62, 57, 36, 64, 57, 58, 55, 54, 57, 44, 60, 54, 75, 56, 70, 58, 61, 54, 60, 56, 72, 107, 51, 75, 60, 55, 79, 53, 63, 61, 72, 67, 76, 65, 56, 48, 51, 67, 70, 58, 64, 62, 89, 69, 57, 79, 58, 56, 58, 63, 61, 77, 45, 65, 65, 57, 49, 59, 54, 61, 79, 68, 60, 79, 62, 79, 63, 77, 62, 68, 68, 40, 67, 74, 63, 54, 51, 56, 114, 62, 52, 78, 60, 77, 60, 72, 68, 58, 56, 75, 67, 79, 60, 49, 70, 57, 54, 53, 64, 52, 57, 63, 87, 71, 65, 67, 60, 55, 67, 88, 68, 65, 54, 58, 90, 88, 42, 64, 50, 65, 75, 63, 120, 73, 62, 59, 68, 69, 106, 110, 64, 62, 69, 57, 62, 63, 63, 105, 95, 73, 69, 102, 64, 45, 66, 59, 71, 59, 64, 77, 68, 52, 54, 94, 69, 82, 67, 74, 86, 68, 61, 59, 50, 65, 63, 81, 52, 78, 52, 51, 49, 71, 69, 59, 74, 71, 91, 75, 66, 59, 58, 56, 76, 64, 67, 91, 64, 54, 59, 70, 76, 68, 62, 64, 54, 65, 59, 57, 50, 67, 54, 54, 68, 61, 59, 55, 55, 47, 77, 56, 67, 81, 52, 69, 47, 64, 71, 61, 79, 83, 73, 91, 57, 53, 57, 70, 56, 52, 63, 45, 62, 65, 58, 68, 63, 47, 59, 60, 82, 58, 61, 57, 70, 70, 49, 64, 55, 79, 110, 50, 81, 68, 72, 68, 64, 50, 92, 69, 89, 58, 51, 57, 76, 54, 72, 83, 74, 55, 101, 74, 47, 53, 62, 63, 51, 77, 67, 53, 60, 60, 67, 51, 63, 66, 76, 79, 51, 71, 81, 91, 86, 58, 53, 74, 59, 67, 67, 55, 55, 75, 89, 89, 69, 47, 61, 72, 47, 52, 58, 74, 56, 69, 60, 66, 63, 67, 60, 55, 87, 53, 58, 50, 43, 68, 59, 51, 55, 74, 75, 51, 58, 61, 58, 61, 80, 93, 65, 69, 46, 70, 71, 65, 73, 71, 61, 70, 62, 49, 84, 86, 67, 57, 62, 74, 60, 88, 55, 49, 68, 68, 42, 61, 55, 61, 55, 61, 68, 53, 70, 51, 62, 63, 55, 70, 62, 65, 54, 49, 53, 68, 64, 84, 59, 68, 75, 62, 73, 51, 80, 67, 63, 61, 75, 63, 42, 64, 74, 59, 66, 57, 70, 94, 71, 79, 67, 65, 73, 55, 55, 104, 61, 71, 71, 58, 74, 71, 64, 74, 57, 64, 60, 78, 53, 67, 65, 73, 60, 57, 103, 62, 73, 62, 66, 98, 77, 57, 84, 58, 64, 89, 47, 69, 106, 72, 58, 59, 58, 46, 82, 61, 59, 54, 71, 121, 66, 67, 69, 71, 66, 117, 77, 105, 68, 83, 65, 73, 92, 64, 71, 73, 44, 48, 90, 65, 92, 68, 76, 66, 74, 58, 54, 54, 59, 66, 69, 67, 63, 64, 60, 59, 70, 86, 69, 60, 56, 76, 60, 50, 58, 57, 80, 57, 65, 76, 61, 70, 62, 63, 50, 63, 73, 80, 81, 98, 67, 64, 55, 77, 65, 54, 55, 70, 61, 62, 53, 53, 66, 63, 72, 74, 63, 90, 133, 45, 66, 89, 67, 72, 66, 56, 52, 68, 74, 67, 55, 58, 55, 69, 34, 51, 61, 58, 70, 64, 66, 53, 58, 70, 58, 61, 79, 79, 99, 60, 82, 67, 60, 60, 78, 75, 71, 66, 53, 59, 59, 59, 55, 62, 86, 69, 56, 51, 48, 64, 61, 123, 69, 75, 51, 71, 56, 58, 58, 57, 62, 60, 64, 66, 60, 69, 59, 72, 69, 63, 65, 51, 42, 79, 62, 65, 76, 60, 75, 59, 49, 64, 48, 69, 75, 76, 56, 59, 49, 57, 73, 65, 59, 54, 56, 66, 56, 65, 60, 53, 96, 60, 55, 86, 65, 105, 56, 62, 69, 67, 52, 61, 84, 69, 64, 72, 65, 69, 51, 63, 62, 77, 55, 65, 47, 69, 48, 82, 71, 65, 64, 71, 67, 72, 69, 51, 77, 57, 41, 59, 72, 83, 53, 80, 62, 69, 83, 86, 77, 64, 69, 84, 66, 59, 60, 50, 82, 99, 53, 54, 71, 60, 60, 58, 62, 74, 62, 67, 49, 54, 79, 74, 72, 56, 48, 55, 70, 71, 62, 71, 79, 54, 56, 60, 79, 47, 53, 51, 55, 63, 88, 52, 63, 68, 78, 60, 61, 97, 72, 70, 80, 80, 59, 72, 59, 59, 58, 64, 34, 69, 51, 63, 60, 44, 60, 66, 53, 65, 64, 78, 62, 36, 67, 68, 59, 60, 56, 48, 72, 68, 86, 117, 72, 63, 68, 105, 66, 78, 61, 66, 74, 56, 75, 58, 64, 65, 68, 48, 75, 70, 68, 54, 70, 68, 46, 76, 93, 48, 69, 50, 64, 77, 92, 69, 64, 59, 72, 66, 55, 89, 74, 65, 49, 66, 65, 57, 61, 69, 78, 65, 55, 67, 84, 107, 62, 89, 66, 73, 80, 71, 62, 83, 56, 61, 55, 61, 44, 67, 49, 106, 50, 59, 67, 52, 141, 71, 54, 63, 67, 62, 66, 89, 68, 70, 60, 53, 85, 68, 70, 66, 76, 70, 55, 53, 53, 61, 57, 48, 51, 58, 55, 120, 58, 75, 55, 55, 69, 43, 59, 60, 57, 51, 80, 54, 71, 61, 67, 54, 57, 72, 54, 49, 59, 59, 70, 95, 51, 96, 59, 79, 65, 54, 62, 53, 54, 61, 52, 52, 56, 80, 64, 73, 72, 84, 59, 75, 66, 68, 59, 64, 65, 69, 54, 70, 61, 44, 73, 65, 52, 67, 57, 67, 52, 53, 47, 77, 71, 59, 64, 56, 56, 79, 60, 71, 58, 58, 60, 69, 61, 73, 73, 71, 66, 59, 63, 78, 57, 84, 69, 63, 81, 61, 72, 60, 63, 70, 60, 93, 76, 81, 66, 84, 74, 58, 47, 61, 65, 60, 60, 58, 55, 108, 68, 70, 76, 72, 68, 67, 74, 68, 80, 50, 68, 62, 113, 67, 65, 51, 67, 65, 65, 62, 54, 68, 64, 73, 63, 72, 53, 69, 67, 61, 82, 90, 53, 110, 87, 66, 88, 71, 59, 65, 71, 81, 51, 89, 67, 58, 79, 75, 66, 52, 96, 61, 71, 67, 63, 58, 62, 60, 57, 77, 66, 57, 46, 35, 76, 96, 65, 77, 52, 76, 98, 53, 68, 59, 56, 56, 70, 62, 52, 76, 52, 54, 98, 82, 56, 53, 54, 57, 61, 49, 54, 80, 57, 68, 65, 48, 59, 70, 74, 72, 57, 57, 71, 51, 77, 45, 50, 51, 61, 49, 76, 97, 71, 66, 59, 114, 58, 64, 55, 69, 82, 66, 59, 55, 73, 75, 128, 59, 72, 51, 84, 77, 64, 76, 70, 59, 62, 73, 65, 56, 56, 65, 60, 54, 51, 56, 89, 61, 79, 71, 72, 104, 65, 79, 50, 59, 50, 61, 61, 62, 57, 68, 47, 58, 74, 104, 71, 71, 66, 64, 97, 76, 69, 77, 57, 46, 103, 94, 83, 82, 55, 104, 49, 63, 56, 69, 52, 68, 44, 65, 56, 76, 76, 77, 75, 51, 74, 68, 67, 67, 70, 55, 80, 72, 77, 47, 53, 55, 66, 73, 82, 80, 74, 49, 59, 62, 61, 98, 57, 70, 65, 58, 47, 69, 75, 53, 75, 72, 70, 52, 65, 87, 62, 85, 70, 63, 61, 65, 69, 80, 67, 64, 43, 73, 111, 59, 57, 54, 41, 62, 67, 88, 60, 56, 69, 67, 72, 51, 60, 66, 74, 85, 84, 97, 59, 60, 88, 77, 56, 67, 70, 64, 90, 72, 50, 87, 65, 47, 50, 83, 57, 64, 71, 96, 65, 48, 52, 72, 82, 65, 59, 56, 57, 63, 61, 45, 59, 49, 64, 64, 78, 101, 70, 60, 55, 56, 89, 51, 64, 64, 73, 50, 81, 70, 57, 55, 74, 67, 62, 87, 48, 69, 56, 53, 89, 59, 57, 58, 61, 67, 57, 48, 73, 51, 58, 83, 66, 70, 51, 48, 55, 74, 68, 77, 48, 104, 63, 69, 54, 79, 57, 79, 64, 50, 69, 69, 54, 61, 62, 79, 59, 75, 57, 56, 99, 61, 93, 62, 60, 41, 93, 65, 61, 79, 62, 51, 93, 75, 66, 55, 120, 85, 67, 76, 60, 61, 68, 51, 62, 77, 78, 88, 70, 70, 55, 99, 85, 61, 64, 68, 93, 105, 57, 113, 84, 59, 69, 62, 97, 64, 66, 50, 53, 105, 64, 65, 66, 59, 54, 73, 47, 55, 90, 59, 64, 70, 48, 52, 44, 75, 53, 57, 48, 61, 57, 51, 66, 47, 62, 50, 60, 56, 81, 48, 47, 74, 48, 70, 66, 55, 61, 90, 47, 75, 58, 64, 45, 75, 61, 84, 62, 60, 55, 72, 60, 68, 68, 63, 60, 45, 89, 81, 84, 53, 89, 71, 57, 62, 59, 52, 68, 58, 64, 83, 97, 59, 63, 58, 58, 71, 55, 37, 64, 58, 46, 60, 69, 48, 62, 38, 92, 69, 74, 93, 79, 61, 52, 101, 62, 65, 60, 80, 74, 68, 66, 69, 55, 69, 53, 65, 61, 53, 65, 45, 47, 64, 41, 56, 62, 50, 65, 55, 52, 63, 72, 57, 68, 61, 54, 52, 57, 48, 80, 56, 119, 54, 41, 65, 79, 50, 96, 51, 86, 94, 61, 67, 61, 66, 64, 62, 58, 59, 52, 99, 81, 77, 71, 70, 43, 70, 57, 54, 61, 76, 59, 62, 54, 101, 80, 48, 107, 68, 66, 80, 62, 38, 64, 53, 51, 77, 64, 57, 75, 69, 65, 54, 58, 61, 51, 59, 68, 65, 53, 115, 64, 47, 44, 57, 68, 59, 61, 64, 60, 73, 64, 59, 58, 78, 48, 65, 56, 89, 68, 82, 47, 62, 70, 81, 60, 62, 60, 60, 54, 119, 81, 99, 83, 68, 65, 64, 73, 65, 86, 99, 48, 55, 113, 58, 43, 69, 62, 67, 54, 45, 64, 49, 75, 49, 41, 49, 63, 63, 74, 64, 64, 49, 83, 68, 98, 76, 65, 61, 65, 55, 56, 105, 51, 54, 54, 83, 83, 62, 86, 58, 49, 53, 75, 76, 66, 65, 67, 54, 67, 63, 46, 60, 58, 54, 58, 48, 66, 52, 113, 63, 54, 57, 61, 52, 72, 49, 61, 62, 86, 62, 68, 77, 84, 96, 50, 53, 92, 60, 67, 64, 55, 65, 62, 58, 52, 71, 57, 77, 65, 71, 60, 67, 74, 67, 74, 90, 61, 69, 62, 91, 73, 68, 71, 58, 60, 57, 61, 45, 55, 74, 58, 42, 52, 75, 81, 67, 73, 71, 57, 59, 63, 70, 76, 60, 79, 77, 81, 49, 53, 71, 63, 85, 53, 64, 64, 55, 55, 71, 66, 59, 51, 47, 87, 64, 63, 58, 69, 57, 52, 88, 69, 72, 67, 49, 56, 59, 73, 53, 70, 96, 78, 57, 60, 64, 74, 50, 71, 60, 80, 93, 54, 60, 80, 76, 77, 67, 41, 97, 44, 58, 53, 74, 103, 45, 49, 52, 43, 56, 56, 81, 58, 77, 58, 68, 59, 60, 58, 57, 61, 63, 56, 70, 67, 62, 86, 67, 74, 72, 55, 80, 81, 75, 80, 90, 83, 61, 82, 69, 65, 66, 66, 97, 68, 58, 48, 55, 81, 54, 89, 68, 64, 73, 92, 96, 63, 63, 59, 44, 68, 65, 87, 79, 93, 118, 73, 56, 55, 68, 54, 52, 39, 67, 58, 98, 48, 58, 69, 55, 59, 66, 64, 52, 85, 62, 74, 86, 74, 57, 69, 79, 46, 73, 82, 66, 58, 95, 65, 70, 65, 60, 68, 47, 100, 54, 53, 55, 66, 69, 44, 51, 43, 59, 85, 52, 54, 46, 39, 91, 56, 64, 56, 68, 77, 87, 78, 68, 88, 59, 55, 50, 54, 56, 65, 73, 76, 66, 51, 72, 64, 74, 57, 48, 79, 54, 59, 82, 51, 72, 61, 75, 54, 79, 79, 59, 71, 81, 53, 53, 48, 68, 70, 95, 47, 50, 55, 48, 63, 91, 84, 53, 75, 63, 67, 66, 56, 59, 69, 63, 62, 61, 79, 81, 68, 114, 88, 61, 77, 57, 57, 56, 54, 70, 53, 91, 58, 45, 85, 61, 56, 91, 57, 104, 49, 55, 59, 53, 73, 61, 76, 78, 89, 71, 88, 57, 90, 44, 57, 66, 57, 62, 48, 58, 75, 58, 58, 59, 64, 64, 64, 55, 58, 91, 68, 63, 69, 56, 61, 52, 71, 109, 61, 65, 59, 59, 49, 67, 40, 52, 42, 72, 75, 71, 66, 58, 44, 76, 60, 104, 58, 63, 62, 56, 78, 86, 67, 91, 61, 52, 67, 85, 57, 88, 61, 46, 54, 70, 59, 53, 71, 91, 67, 57, 74, 53, 54, 83, 74, 43, 74, 76, 64, 76, 53, 64, 52, 49, 70, 68, 58, 66, 75, 67, 57, 71, 90, 61, 59, 64, 59, 66, 73, 55, 66, 51, 68, 78, 58, 62, 66, 67, 56, 52, 47, 54, 73, 98, 129, 56, 71, 81, 72, 83, 80, 76, 46, 67, 52, 91, 56, 69, 71, 55, 62, 56, 59, 94, 75, 66, 57, 57, 56, 57, 90, 45, 58, 57, 99, 67, 67, 77, 49, 64, 70, 70, 70, 67, 58, 55, 54, 63, 98, 56, 61, 84, 55, 57, 60, 68, 56, 59, 66, 81, 55, 70, 54, 61, 76, 57, 54, 53, 56, 52, 60, 57, 53, 56, 58, 52, 48, 67, 67, 67, 63, 62, 74, 68, 60, 93, 62, 77, 60, 94, 70, 42, 64, 60, 59, 56, 65, 69, 58, 64, 54, 65, 62, 64, 65, 93, 43, 55, 73, 52, 60, 65, 69, 64, 61, 74, 58, 68, 63, 69, 58, 47, 75, 111, 74, 74, 44, 74, 78, 65, 53, 61, 70, 61, 97, 53, 57, 44, 67, 67, 61, 53, 61, 72, 77, 67, 74, 56, 65, 58, 88, 60, 100, 76, 68, 63, 54, 102, 61, 59, 81, 70, 63, 68, 88, 82, 60, 50, 54, 74, 56, 89, 75, 52, 89, 76, 63, 58, 65, 75, 65, 38, 90, 74, 43, 66, 72, 58, 83, 64, 64, 51, 61, 65, 77, 70, 84, 76, 61, 68, 58, 62, 67, 44, 62, 63, 55, 45, 74, 61, 53, 99, 61, 76, 55, 63, 67, 56, 47, 89, 65, 76, 64, 81, 52, 71, 62, 58, 67, 68, 74, 74, 59, 67, 59, 62, 66, 83, 59, 64, 56, 62, 44, 60, 52, 87, 47, 71, 68, 82, 55, 103, 50, 72, 58, 73, 73, 65, 57, 79, 66, 86, 121, 58, 85, 53, 54, 56, 61, 87, 56, 54, 57, 57, 76, 76, 73, 61, 59, 58, 70, 69, 69, 47, 71, 72, 67, 69, 58, 58, 53, 86, 77, 64, 63, 74, 56, 64, 68, 76, 60, 70, 66, 70, 42, 56, 76, 78, 52, 57, 64, 69, 53, 45, 89, 81, 63, 44, 51, 60, 77, 49, 67, 74, 67, 62, 57, 43, 58, 66, 46, 58, 61, 53, 74, 58, 42, 66, 54, 61, 72, 56, 73, 77, 45, 81, 60, 55, 83, 70, 69, 62, 45, 67, 124, 67, 53, 53, 75, 64, 58, 65, 36, 62, 54, 82, 50, 48, 107, 77, 49, 65, 58, 55, 55, 59, 57, 65, 37, 97, 48, 73, 43, 54, 54, 127, 69, 63, 48, 133, 124, 62, 93, 65, 67, 57, 63, 59, 38, 72, 56, 61, 57, 61, 62, 68, 67, 54, 63, 63, 52, 68, 60, 74, 74, 52, 88, 89, 67, 73, 62, 56, 83, 57, 41, 65, 67, 64, 69, 61, 81, 37, 73, 56, 72, 82, 77, 76, 54, 98, 52, 72, 51, 69, 61, 44, 54, 65, 69, 82, 52, 98, 77, 74, 60, 79, 74, 73, 48, 74, 69, 75, 63, 69, 53, 64, 75, 52, 69, 65, 53, 53, 65, 52, 80, 47, 78, 62, 61, 80, 59, 84, 56, 51, 61, 52, 84, 51, 55, 62, 50, 80, 59, 72, 61, 68, 58, 52, 55, 56, 71, 42, 72, 39, 83, 52, 53, 62, 51, 78, 50, 111, 67, 109, 108, 42, 60, 49, 94, 97, 64, 70, 50, 65, 52, 58, 54, 54, 55, 63, 64, 49, 74, 76, 65, 49, 60, 47, 45, 57, 50, 51, 72, 65, 67, 74, 69, 82, 63, 51, 70, 60, 60, 69, 73, 60, 59, 60, 60, 63, 74, 64, 56, 60, 69, 84, 54, 55, 87, 56, 81, 58, 76, 43, 58, 62, 66, 59, 68, 91, 64, 66, 44, 67, 57, 80, 54, 61, 60, 63, 70, 50, 71, 30, 74, 82, 64, 67, 70, 66, 52, 82, 70, 55, 65, 51, 72, 49, 45, 46, 70, 75, 52, 69, 61, 59, 51, 110, 56, 78, 62, 74, 72, 48, 67, 78, 68, 81, 75, 54, 65, 74, 58, 73, 42, 60, 54, 50, 65, 83, 52, 69, 109, 60, 66, 63, 69, 60, 76, 69, 74, 69, 75, 58, 61, 65, 56, 61, 63, 78, 69, 57, 77, 51, 51, 70, 58, 71, 52, 66, 81, 71, 54, 66, 76, 82, 52, 74, 100, 57, 62, 97, 43, 61, 49, 55, 65, 86, 69, 70, 69, 96, 96, 66, 93, 60, 65, 83, 71, 74, 68, 69, 55, 75, 37, 95, 50, 59, 66, 59, 55, 64, 72, 66, 68, 68, 65, 72, 84, 78, 64, 51, 92, 62, 59, 63, 68, 83, 70, 42, 71, 66, 70, 76, 89, 66, 82, 59, 94, 78, 70, 55, 58, 54, 60, 81, 66, 56, 108, 87, 70, 64, 63, 60, 47, 77, 59, 58, 75, 59, 69, 55, 78, 68, 50, 114, 68, 54, 63, 63, 81, 64, 68, 58, 82, 65, 63, 87, 60, 49, 56, 66, 65, 63, 70, 58, 70, 49, 53, 72, 56, 74, 60, 74, 84, 60, 73, 80, 67, 97, 62, 71, 62, 95, 77, 60, 70, 59, 91, 55, 63, 57, 58, 59, 68, 61, 40, 92, 53, 78, 71, 55, 77, 69, 80, 64, 75, 79, 64, 49, 92, 52, 61, 57, 42, 66, 51, 58, 75, 53, 63, 60, 76, 79, 73, 44, 67, 87, 67, 66, 61, 54, 57, 71, 61, 53, 78, 45, 50, 57, 71, 71, 80, 129, 58, 63, 73, 62, 59, 75, 66, 75, 53, 54, 75, 67, 58, 77, 71, 53, 65, 57, 70, 71, 67, 65, 51, 63, 100, 59, 65, 67, 57, 77, 45, 85, 51, 51, 61, 63, 78, 84, 80, 108, 59, 83, 52, 56, 64, 63, 61, 69, 72, 77, 65, 65, 47, 42, 66, 81, 71, 67, 50, 75, 116, 77, 59, 72, 50, 59, 66, 67, 54, 55, 59, 61, 78, 65, 92, 72, 84, 40, 73, 72, 72, 54, 77, 56, 105, 65, 55, 65, 70, 64, 63, 68, 52, 56, 55, 67, 94, 44, 93, 54, 49, 87, 65, 84, 76, 71, 52, 62, 83, 69, 73, 67, 51, 48, 70, 87, 58, 71, 64, 69, 50, 78, 70, 60, 82, 54, 53, 61, 72, 72, 57, 65, 53, 84, 56, 124, 60, 84, 46, 65, 62, 82, 58, 75, 53, 53, 60, 56, 73, 91, 42, 81, 43, 83, 67, 79, 64, 50, 62, 39, 58, 69, 88, 62, 119, 81, 47, 57, 81, 33, 68, 91, 71, 39, 74, 62, 48, 70, 63, 63, 55, 50, 69, 70, 106, 39, 80, 68, 99, 95, 50, 86, 67, 56, 61, 53, 58, 46, 64, 71, 67, 83, 75, 78, 84, 49, 67, 53, 99, 65, 74, 80, 62, 64, 51, 68, 62, 76, 30, 89, 51, 52, 55, 49, 71, 62, 46, 134, 78, 68, 74, 77, 51, 66, 65, 75, 59, 55, 57, 61, 63, 42, 54, 67, 85, 55, 95, 77, 77, 66, 71, 71, 51, 53, 62, 83, 89, 81, 57, 77, 61, 58, 73, 68, 108, 54, 62, 64, 101, 69, 54, 60, 35, 67, 63, 75, 37, 87, 83, 49, 67, 71, 74, 59, 57, 60, 58, 59, 56, 61, 77, 54, 79, 69, 72, 58, 65, 44, 56, 62, 49, 63, 35, 49, 85, 74, 70, 49, 64, 79, 89, 66, 65, 86, 47, 72, 48, 73, 58, 57, 67, 61, 58, 57, 56, 58, 62, 67, 70, 54, 66, 87, 56, 71, 69, 56, 59, 68, 33, 85, 66, 82, 60, 70, 101, 75, 51, 55, 62, 61, 59, 109, 57, 74, 89, 82, 59, 57, 61, 72, 62, 66, 68, 66, 61, 69, 59, 66, 74, 63, 79, 86, 45, 80, 69, 78, 69, 61, 34, 92, 63, 63, 60, 96, 53, 66, 55, 52, 70, 101, 64, 73, 83, 70, 58, 61, 69, 97, 59, 68, 56, 43, 69, 49, 72, 57, 82, 63, 58, 70, 61, 111, 66, 74, 90, 51, 40, 81, 89, 51, 41, 62, 95, 46, 52, 75, 46, 52, 63, 70, 54, 71, 68, 57, 52, 60, 41, 70, 89, 109, 55, 38, 86, 55, 67, 92, 52, 49, 50, 70, 66, 69, 58, 45, 73, 60, 58, 70, 66, 61, 58, 78, 50, 83, 93, 59, 47, 47, 62, 50, 84, 54, 71, 53, 58, 65, 65, 66, 64, 55, 93, 79, 67, 67, 58, 86, 56, 55, 61, 75, 59, 70, 42, 72, 58, 107, 68, 56, 68, 80, 56, 51, 76, 66, 74, 68, 51, 56, 103, 66, 73, 48, 53, 62, 121, 53, 93, 41, 72, 46, 76, 70, 58, 60, 67, 60, 64, 50, 70, 76, 55, 53, 65, 76, 62, 54, 62, 80, 102, 95, 57, 76, 76, 63, 63, 62, 62, 92, 65, 105, 40, 58, 76, 89, 52, 75, 64, 53, 76, 72, 53, 70, 65, 68, 74, 67, 67, 57, 61, 63, 46, 69, 85, 73, 49, 69, 66, 58, 78, 56, 58, 57, 73, 65, 55, 60, 117, 67, 88, 71, 74, 50, 55, 53, 69, 40, 65, 47, 61, 57, 70, 49, 57, 74, 92, 59, 52, 51, 57, 81, 74, 89, 62, 81, 54, 89, 65, 48, 40, 72, 67, 89, 38, 70, 84, 60, 54, 57, 55, 68, 83, 54, 72, 107, 51, 59, 64, 62, 74, 80, 62, 105, 54, 43, 62, 46, 114, 100, 70, 55, 37, 91, 65, 72, 74, 61, 61, 36, 50, 90, 60, 48, 47, 84, 58, 75, 61, 74, 70, 53, 50, 129, 64, 72, 51, 85, 46, 55, 58, 96, 62, 85, 58, 111, 65, 68, 48, 61, 72, 63, 63, 56, 69, 79, 66, 74, 60, 61, 71, 66, 57, 68, 72, 44, 53, 52, 69, 81, 77, 71, 72, 62, 61, 102, 56, 53, 65, 62, 59, 51, 66, 55, 81, 48, 46, 68, 66, 67, 69, 61, 49, 51, 80, 67, 73, 88, 68, 83, 64, 77, 62, 48, 61, 41, 59, 58, 88, 50, 51, 66, 76, 61, 89, 84, 76, 77, 59, 59, 58, 89, 41, 82, 72, 68, 79, 69, 102, 64, 74, 72, 65, 74, 61, 45, 75, 71, 56, 80, 65, 61, 89, 93, 71, 68, 61, 71, 68, 44, 57, 81, 67, 58, 79, 71, 68, 87, 65, 60, 50, 73, 63, 51, 69, 57, 77, 76, 60, 79, 60, 66, 57, 50, 65, 68, 60, 65, 81, 66, 52, 65, 53, 62, 62, 56, 49, 84, 62, 70, 49, 66, 67, 67, 74, 66, 56, 45, 53, 71, 49, 63, 60, 68, 60, 64, 62, 50, 79, 63, 118, 83, 63, 77, 58, 63, 69, 57, 72, 54, 55, 57, 84, 75, 60, 60, 64, 87, 84, 56, 92, 82, 51, 52, 45, 63, 61, 61, 55, 35, 53, 61, 79, 76, 81, 53, 72, 54, 40, 62, 66, 62, 52, 47, 36, 67, 43, 53, 48, 59, 65, 95, 57, 71, 65, 109, 55, 30, 57, 44, 76, 49, 90, 65, 76, 38, 41, 62, 58, 64, 71, 49, 50, 50, 77, 84, 97, 95, 62, 90, 82, 41, 65, 52, 54, 64, 70, 48, 71, 53, 73, 54, 60, 51, 65, 63, 57, 71, 87, 63, 63, 63, 79, 67, 71, 57, 74, 54, 66, 71, 89, 44, 63, 63, 81, 73, 67, 60, 54, 78, 57, 64, 63, 50, 68, 74, 55, 61, 80, 71, 65, 74, 66, 54, 61, 64, 57, 60, 87, 110, 79, 74, 54, 71, 51, 65, 72, 63, 56, 65, 78, 65, 75, 54, 49, 56, 67, 61, 67, 61, 66, 94, 40, 73, 61, 96, 58, 75, 68, 76, 61, 60, 58, 51, 56, 58, 78, 68, 60, 71, 60, 81, 48, 62, 81, 64, 69, 63, 56, 80, 73, 58, 81, 59, 58, 65, 63, 74, 60, 55, 85, 61, 71, 80, 77, 63, 104, 92, 66, 44, 54, 44, 57, 54, 73, 83, 54, 58, 50, 66, 66, 62, 70, 54, 55, 57, 87, 63, 75, 100, 60, 69, 55, 59, 60, 62, 59, 41, 49, 68, 62, 57, 63, 61, 64, 54, 56, 59, 74, 56, 74, 78, 69, 50, 53, 58, 64, 54, 69, 53, 74, 79, 65, 48, 42, 55, 64, 57, 51, 76, 65, 78, 91, 72, 74, 63, 62, 68, 58, 52, 59, 68, 62, 75, 66, 60, 97, 89, 81, 51, 61, 49, 52, 63, 59, 50, 65, 61, 47, 50, 56, 77, 70, 62, 58, 71, 55, 72, 56, 65, 55, 62, 54, 67, 73, 62, 70, 70, 84, 50, 68, 63, 71, 58, 61, 79, 54, 88, 77, 72, 61, 68, 51, 56, 70, 56, 51, 68, 77, 63, 115, 66, 50, 62, 59, 73, 67, 103, 61, 58, 76, 76, 71, 63, 68, 62, 59, 60, 55, 61, 49, 83, 79, 71, 69, 55, 61, 81, 66, 58, 74, 69, 72, 70, 66, 84, 97, 57, 52, 73, 94, 69, 62, 61, 95, 51, 56, 61, 59, 73, 86, 51, 51, 75, 69, 76, 102, 49, 68, 71, 58, 66, 61, 118, 66, 58, 54, 52, 56, 74, 64, 72, 68, 70, 57, 60, 63, 59, 93, 90, 95, 75, 78, 79, 78, 77, 66, 67, 74, 57, 66, 95, 61, 73, 62, 53, 61, 90, 72, 62, 56, 88, 58, 52, 85, 49, 67, 71, 77, 65, 61, 67, 73, 65, 50, 70, 56, 79, 56, 59, 56, 76, 62, 82, 57, 47, 74, 95, 54, 63, 74, 67, 68, 49, 47, 62, 62, 58, 82, 51, 68, 63, 63, 48, 80, 56, 61, 62, 63, 44, 56, 57, 72, 60, 71, 50, 65, 59, 76, 65, 74, 64, 68, 72, 60, 53, 61, 79, 60, 57, 118, 70, 51, 73, 65, 75, 51, 64, 62, 71, 82, 74, 67, 76, 68, 54, 66, 86, 68, 53, 106, 44, 92, 51, 61, 73, 64, 71, 70, 62, 58, 64, 66, 59, 40, 51, 54, 80, 76, 84, 75, 93, 74, 92, 58, 114, 62, 54, 58, 45, 63, 57, 59, 75, 57, 70, 75, 64, 39, 77, 55, 90, 56, 85, 69, 59, 52, 53, 67, 66, 74, 67, 66, 61, 59, 46, 54, 49, 68, 87, 79, 48, 66, 61, 72, 64, 61, 86, 87, 66, 57, 53, 67, 61, 61, 64, 80, 111, 61, 80, 58, 73, 88, 45, 86, 43, 65, 69, 63, 56, 68, 72, 60, 69, 56, 67, 66, 77, 74, 54, 74, 79, 60, 62, 69, 75, 75, 71, 63, 64, 58, 59, 58, 74, 42, 63, 84, 82, 75, 49, 83, 69, 51, 63, 69, 60, 85, 65, 62, 63, 77, 54, 66, 57, 95, 79, 57, 58, 43, 58, 63, 52, 54, 63, 80, 67, 59, 65, 63, 61, 64, 61, 66, 77, 60, 47, 47, 74, 56, 51, 84, 64, 54, 57, 81, 73, 88, 71, 73, 80, 56, 75, 62, 55, 115, 50, 90, 80, 74, 66, 67, 50, 69, 43, 96, 114, 89, 77, 53, 71, 84, 49, 67, 97, 67, 113, 45, 51, 84, 71, 59, 58, 81, 77, 53, 75, 74, 66, 63, 58, 63, 57, 65, 59, 78, 62, 59, 80, 84, 62, 71, 61, 55, 54, 81, 60, 66, 58, 57, 60, 71, 62, 70, 72, 71, 62, 93, 51, 63, 62, 55, 105, 66, 75, 49, 55, 60, 70, 42, 55, 73, 45, 67, 74, 66, 78, 61, 55, 54, 93, 79, 74, 63, 99, 45, 50, 72, 78, 53, 70, 54, 57, 50, 72, 80, 51, 90, 60, 84, 82, 87, 54, 58, 65, 77, 71, 70, 38, 58, 64, 71, 75, 82, 55, 64, 73, 57, 54, 63, 96, 46, 82, 57, 50, 60, 45, 65, 62, 69, 73, 67, 78, 72, 53, 69, 63, 61, 120, 68, 94, 40, 50, 58, 61, 53, 49, 68, 55, 65, 79, 45, 71, 67, 80, 63, 60, 58, 45, 65, 49, 60, 73, 78, 54, 70, 47, 54, 55, 56, 58, 58, 65, 72, 64, 45, 47, 74, 65, 51, 87, 61, 66, 65, 70, 63, 50, 57, 46, 70, 63, 61, 88, 73, 58, 79, 64, 58, 49, 53, 71, 63, 66, 52, 49, 74, 91, 63, 55, 72, 53, 78, 57, 71, 73, 62, 65, 67, 90, 72, 54, 63, 81, 53, 75, 51, 79, 54, 124, 79, 47, 52, 71, 98, 61, 80, 47, 60, 72, 87, 67, 55, 76, 69, 71, 90, 56, 73, 54, 60, 58, 65, 83, 72, 97, 50, 100, 57, 73, 55, 75, 75, 63, 59, 56, 66, 58, 50, 67, 50, 73, 66, 66, 56, 70, 40, 70, 66, 63, 78, 82, 57, 69, 53, 93, 67, 61, 64, 69, 48, 49, 64, 73, 66, 79, 60, 49, 97, 54, 57, 58, 60, 57, 76, 82, 83, 58, 56, 51, 50, 71, 64, 67, 58, 77, 72, 69, 59, 80, 46, 72, 56, 70, 81, 96, 68, 66, 86, 58, 61, 52, 44, 93, 86, 67, 75, 50, 78, 67, 73, 51, 48, 60, 91, 75, 83, 63, 57, 60, 102, 73, 60, 75, 63, 89, 63, 71, 73, 75, 74, 65, 63, 80, 53, 58, 52, 54, 72, 57, 65, 59, 60, 107, 65, 53, 97, 57, 61, 65, 48, 56, 60, 82, 68, 59, 64, 71, 49, 61, 67, 80, 118, 78, 66, 79, 70, 49, 74, 65, 73, 63, 64, 45, 44, 66, 67, 59, 67, 58, 55, 54, 62, 71, 72, 47, 60, 62, 64, 81, 77, 74, 74, 52, 54, 81, 85, 65, 70, 38, 41, 81, 63, 89, 67, 78, 60, 67, 65, 58, 58, 53, 100, 42, 61, 64, 50, 72, 61, 70, 86, 63, 66, 54, 49, 41, 55, 66, 53, 71, 78, 58, 63, 54, 72, 67, 73, 61, 60, 70, 63, 63, 50, 59, 78, 56, 72, 77, 65, 79, 66, 64, 57, 68, 64, 71, 62, 52, 79, 70, 53, 112, 55, 88, 65, 79, 54, 48, 65, 61, 57, 75, 78, 84, 81, 91, 55, 80, 42, 66, 60, 52, 61, 65, 46, 47, 60, 44, 72, 59, 55, 70, 72, 64, 56, 48, 80, 68, 54, 109, 66, 74, 75, 59, 79, 50, 63, 51, 90, 64, 53, 62, 87, 62, 63, 47, 65, 50, 57, 63, 57, 52, 52, 76, 64, 40, 48, 58, 75, 53, 54, 51, 44, 95, 74, 66, 76, 65, 53, 65, 67, 67, 69, 93, 69, 70, 75, 55, 58, 65, 77, 51, 56, 74, 51, 67, 49, 50, 50, 60, 79, 67, 48, 65, 73, 55, 69, 69, 57, 64, 88, 65, 60, 48, 49, 47, 52, 59, 50, 102, 55, 75, 52, 66, 64, 58, 60, 63, 61, 67, 59, 52, 57, 59, 63, 91, 75, 93, 61, 74, 60, 62, 66, 59, 65, 43, 41, 43, 59, 73, 98, 117, 49, 67, 66, 109, 65, 82, 48, 52, 88, 53, 54, 69, 66, 83, 108, 64, 79, 64, 57, 69, 84, 54, 53, 68, 70, 74, 73, 71, 70, 57, 78, 58, 71, 81, 92, 49, 54, 78, 72, 77, 55, 44, 70, 70, 75, 55, 84, 57, 60, 45, 64, 70, 67, 52, 83, 43, 63, 63, 82, 52, 66, 54, 75, 68, 63, 85, 56, 59, 63, 94, 74, 65, 48, 72, 52, 64, 66, 82, 71, 70, 57, 66, 72, 39, 70, 99, 60, 100, 66, 75, 61, 64, 61, 59, 67, 63, 54, 67, 66, 71, 71, 64, 71, 40, 50, 108, 58, 79, 50, 66, 58, 72, 62, 61, 101, 59, 66, 46, 67, 71, 59, 74, 69, 50, 70, 70, 73, 64, 83, 63, 47, 64, 107, 43, 57, 81, 79, 54, 65, 79, 63, 58, 76, 66, 70, 72, 81, 54, 60, 90, 65, 71, 53, 69, 84, 59, 51, 63, 79, 61, 60, 70, 72, 79, 58, 50, 68, 80, 38, 76, 71, 71, 42, 62, 52, 75, 65, 61, 79, 47, 71, 77, 60, 67, 64, 76, 65, 78, 46, 96, 80, 58, 64, 61, 50, 53, 46, 75, 84, 61, 63, 58, 71, 66, 80, 54, 62, 76, 47, 96, 57, 77, 88, 75, 67, 43, 55, 60, 68, 49, 62, 71, 61, 54, 39, 72, 61, 60, 57, 93, 61, 55, 61, 49, 79, 69, 75, 54, 60, 84, 63, 61, 59, 63, 61, 68, 57, 86, 77, 63, 61, 54, 58, 67, 78, 77, 68, 81, 87, 53, 57, 55, 55, 53, 91, 52, 82, 50, 60, 90, 55, 83, 70, 65, 49, 62, 61, 66, 54, 90, 56, 94, 56, 70, 41, 72, 67, 51, 98, 79, 70, 85, 62, 64, 91, 93, 83, 84, 75, 52, 59, 98, 50, 61, 82, 64, 64, 56, 53, 53, 83, 62, 63, 56, 69, 72, 54, 62, 58, 57, 59, 65, 94, 47, 94, 53, 63, 75, 71, 80, 106, 59, 57, 67, 66, 57, 76, 59, 90, 81, 65, 46, 63, 51, 52, 90, 104, 77, 66, 65, 78, 61, 75, 62, 59, 53, 53, 50, 101, 77, 37, 42, 70, 43, 71, 78, 72, 53, 82, 56, 79, 63, 86, 50, 66, 72, 60, 61, 74, 72, 60, 53, 47, 49, 68, 76, 55, 61, 43, 61, 55, 48, 65, 68, 76, 65, 64, 94, 82, 104, 45, 55, 74, 64, 68, 63, 63, 60, 75, 70, 59, 70, 61, 48, 54, 69, 50, 64, 72, 63, 58, 47, 70, 58, 73, 60, 58, 50, 80, 63, 47, 66, 95, 60, 45, 50, 85, 51, 62, 67, 65, 66, 56, 63, 70, 70, 52, 64, 65, 77, 78, 80, 75, 61, 69, 83, 45, 94, 83, 66, 63, 76, 66, 64, 69, 33, 63, 68, 66, 58, 58, 64, 67, 64, 64, 44, 75, 45, 61, 64, 49, 71, 70, 74, 51, 66, 66, 77, 51, 84, 70, 89, 66, 79, 55, 61, 70, 52, 63, 54, 61, 63, 64, 74, 65, 54, 47, 66, 51, 58, 65, 50, 75, 65, 71, 65, 89, 56, 67, 74, 61, 126, 68, 93, 59, 41, 40, 65, 80, 75, 54, 112, 57, 52, 56, 72, 68, 64, 64, 81, 53, 64, 64, 72, 76, 65, 56, 57, 67, 64, 60, 63, 62, 58, 69, 66, 70, 65, 64, 72, 60, 73, 71, 106, 55, 64, 73, 74, 61, 44, 66, 60, 58, 62, 54, 58, 65, 68, 68, 68, 63, 43, 74, 65, 75, 54, 46, 55, 64, 63, 72, 64, 48, 59, 62, 68, 72, 59, 64, 56, 71, 80, 76, 60, 55, 109, 56, 43, 64, 89, 80, 41, 59, 62, 75, 66, 81, 81, 68, 63, 75, 65, 66, 63, 59, 69, 74, 60, 62, 53, 62, 58, 69, 80, 69, 70, 70, 73, 60, 60, 69, 51, 72, 69, 48, 63, 67, 73, 82, 66, 69, 51, 84, 73, 86, 82, 68, 62, 57, 58, 96, 56, 54, 60, 59, 44, 55, 65, 66, 78, 81, 65, 52, 51, 52, 70, 54, 61, 48, 58, 56, 70, 48, 92, 84, 54, 74, 60, 88, 72, 72, 95, 77, 70, 70, 62, 47, 57, 70, 65, 86, 88, 52, 56, 63, 55, 95, 90, 44, 68, 61, 57, 69, 69, 62, 89, 72, 47, 64, 61, 66, 69, 57, 39, 55, 48, 85, 54, 50, 79, 76, 61, 68, 65, 61, 52, 86, 41, 64, 42, 68, 57, 75, 74, 77, 60, 68, 65, 79, 60, 60, 60, 67, 21, 57, 73, 59, 61, 67, 60, 93, 66, 74, 55, 88, 73, 64, 57, 70, 63, 90, 74, 81, 37, 107, 104, 75, 51, 63, 73, 59, 49, 74, 54, 55, 75, 69, 58, 62, 54, 59, 47, 59, 64, 54, 78, 80, 39, 67, 64, 60, 47, 76, 71, 83, 58, 47, 92, 70, 65, 54, 60, 69, 66, 103, 62, 59, 70, 76, 61, 64, 56, 47, 65, 86, 80, 66, 60, 61, 75, 46, 69, 57, 53, 71, 75, 49, 71, 73, 49, 54, 78, 68, 59, 77, 32, 75, 69, 79, 60, 86, 79, 69, 60, 73, 59, 65, 75, 60, 61, 56, 62, 55, 54, 69, 77, 78, 68, 63, 49, 64, 72, 68, 110, 57, 86, 62, 49, 73, 51, 55, 52, 84, 65, 72, 81, 81, 58, 51, 55, 65, 64, 83, 64, 73, 89, 65, 59, 66, 50, 81, 52, 54, 72, 78, 88, 71, 66, 78, 80, 69, 85, 81, 107, 76, 46, 68, 71, 68, 66, 83, 68, 80, 80, 55, 45, 73, 77, 59, 68, 57, 62, 65, 69, 62, 67, 52, 50, 59, 62, 66, 51, 72, 59, 49, 62, 54, 49, 70, 55, 55, 81, 71, 68, 84, 62, 58, 68, 70, 71, 55, 54, 74, 64, 67, 50, 61, 56, 83, 77, 80, 70, 65, 67, 68, 95, 67, 56, 63, 60, 60, 50, 66, 57, 60, 57, 52, 48, 49, 57, 73, 79, 95, 81, 43, 59, 57, 64, 65, 58, 70, 55, 58, 91, 61, 63, 74, 58, 64, 90, 59, 91, 64, 63, 83, 56, 57, 64, 72, 83, 64, 59, 65, 71, 81, 74, 87, 75, 57, 80, 66, 62, 58, 76, 74, 58, 69, 75, 39, 56, 58, 80, 53, 108, 62, 62, 59, 74, 69, 69, 66, 51, 68, 69, 57, 56, 59, 62, 65, 66, 50, 65, 69, 69, 69, 73, 59, 58, 69, 97, 64, 49, 60, 53, 100, 65, 81, 69, 61, 69, 65, 56, 58, 64, 70, 65, 56, 72, 60, 63, 52, 65, 79, 50, 52, 99, 61, 72, 60, 110, 68, 63, 60, 52, 68, 60, 63, 51, 66, 64, 50, 59, 50, 89, 68, 61, 62, 56, 47, 57, 52, 65, 66, 51, 59, 75, 69, 72, 69, 67, 62, 63, 63, 78, 79, 43, 70, 70, 70, 62, 69, 55, 60, 86, 59, 77, 63, 52, 69, 83, 61, 93, 85, 75, 69, 54, 67, 57, 71, 77, 61, 56, 55, 66, 56, 69, 70, 53, 72, 67, 64, 72, 44, 67, 72, 71, 59, 54, 72, 58, 65, 73, 62, 78, 60, 62, 67, 60, 81, 65, 87, 62, 49, 74, 63, 82, 61, 65, 56, 58, 61, 65, 69, 72, 60, 53, 74, 80, 77, 79, 64, 60, 67, 62, 65, 88, 68, 61, 47, 62, 64, 69, 80, 81, 47, 80, 90, 56, 107, 73, 63, 64, 56, 67, 73, 66, 59, 49, 72, 79, 87, 54, 70, 74, 74, 69, 65, 47, 47, 64, 60, 46, 56, 68, 59, 55, 64, 84, 52, 56, 52, 85, 72, 53, 54, 58, 83, 52, 55, 80, 68, 58, 75, 69, 56, 67, 73, 72, 75, 56, 83, 66, 77, 65, 53, 55, 84, 57, 43, 74, 58, 73, 59, 61, 48, 69, 63, 76, 66, 69, 61, 53, 59, 72, 67, 58, 56, 47, 62, 73, 61, 66, 50, 96, 69, 70, 61, 61, 43, 41, 70, 64, 61, 66, 72, 63, 68, 72, 59, 41, 70, 50, 56, 73, 45, 71, 54, 62, 77, 70, 92, 65, 63, 56, 71, 51, 60, 105, 71, 67, 62, 53, 59, 73, 108, 72, 61, 62, 73, 56, 59, 66, 58, 46, 65, 72, 53, 46, 44, 67, 69, 59, 69, 73, 67, 75, 97, 63, 56, 53, 69, 63, 82, 70, 69, 58, 60, 50, 66, 66, 57, 103, 61, 66, 55, 54, 74, 61, 56, 78, 70, 55, 127, 57, 47, 70, 61, 49, 56, 63, 89, 55, 73, 55, 77, 66, 68, 65, 71, 103, 86, 46, 64, 59, 78, 66, 57, 79, 62, 60, 61, 58, 95, 56, 81, 57, 59, 75, 68, 67, 61, 71, 61, 63, 80, 86, 67, 59, 59, 33, 74, 48, 61, 65, 85, 62, 83, 75, 58, 60, 61, 69, 63, 50, 46, 60, 43, 64, 66, 58, 55, 74, 53, 65, 64, 89, 86, 63, 72, 67, 85, 56, 55, 60, 68, 58, 79, 68, 64, 58, 66, 58, 105, 70, 52, 80, 67, 116, 48, 59, 63, 64, 50, 60, 65, 67, 49, 62, 61, 71, 64, 68, 71, 60, 81, 54, 63, 59, 63, 98, 95, 73, 68, 43, 66, 55, 69, 72, 72, 65, 51, 56, 72, 57, 62, 51, 76, 62, 58, 63, 74, 46, 60, 80, 53, 121, 71, 67, 46, 78, 59, 62, 63, 70, 71, 80, 61, 81, 62, 68, 65, 54, 78, 58, 62, 79, 62, 58, 65, 66, 52, 63, 47, 68, 61, 54, 77, 59, 68, 66, 59, 56, 90, 87, 60, 63, 75, 39, 50, 55, 72, 68, 76, 56, 76, 77, 70, 53, 80, 66, 66, 73, 63, 63, 77, 69, 58, 69, 58, 62, 62, 54, 54, 62, 66, 59, 116, 64, 54, 71, 48, 64, 59, 68, 72, 43, 74, 60, 81, 73, 64, 74, 98, 67, 60, 63, 62, 61, 65, 68, 69, 84, 80, 61, 44, 75, 69, 52, 76, 80, 62, 66, 49, 67, 60, 55, 73, 62, 95, 71, 79, 62, 45, 52, 47, 69, 61, 59, 77, 63, 59, 67, 55, 61, 57, 61, 57, 67, 62, 69, 77, 49, 54, 69, 66, 70, 71, 91, 84, 86, 55, 64, 64, 56, 60, 65, 59, 92, 51, 66, 55, 63, 87, 74, 65, 93, 69, 69, 70, 52, 70, 70, 58, 51, 53, 52, 53, 73, 71, 62, 60, 70, 62, 70, 48, 59, 61, 68, 67, 102, 71, 52, 55, 56, 63, 77, 51, 62, 90, 78, 62, 58, 62, 72, 55, 59, 77, 97, 84, 67, 60, 52, 58, 66, 76, 57, 59, 74, 69, 55, 56, 55, 39, 91, 73, 61, 76, 76, 59, 63, 75, 59, 98, 66, 57, 62, 59, 66, 64, 77, 70, 63, 50, 66, 80, 78, 65, 63, 72, 91, 66, 67, 79, 111, 95, 69, 71, 77, 86, 86, 64, 79, 71, 67, 67, 63, 52, 54, 65, 99, 89, 57, 64, 64, 70, 82, 68, 46, 69, 54, 62, 82, 55, 73, 55, 104, 69, 78, 72, 64, 66, 64, 58, 71, 73, 69, 57, 61, 60, 66, 54, 62, 58, 67, 65, 53, 55, 75, 42, 75, 84, 86, 68, 61, 60, 54, 61, 73, 66, 76, 74, 67, 62, 52, 78, 67, 55, 81, 52, 86, 58, 53, 54, 79, 51, 61, 68, 89, 62, 55, 82, 59, 52, 57, 52, 64, 55, 69, 53, 59, 61, 66, 59, 62, 68, 68, 71, 53, 58, 67, 81, 85, 68, 84, 56, 54, 79, 70, 62, 57, 69, 74, 56, 60, 51, 52, 74, 81, 98, 77, 61, 62, 65, 92, 105, 66, 68, 48, 88, 63, 56, 72, 66, 59, 55, 75, 62, 75, 63, 60, 70, 56, 60, 72, 52, 52, 91, 59, 69, 63, 65, 77, 57, 74, 52, 85, 69, 62, 77, 51, 60, 48, 56, 69, 74, 69, 70, 59, 55, 57, 51, 66, 41, 81, 58, 56, 69, 60, 60, 60, 53, 70, 65, 63, 73, 92, 65, 73, 42, 58, 64, 83, 55, 91, 72, 83, 58, 51, 50, 86, 68, 66, 71, 66, 66, 112, 62, 107, 64, 68, 68, 45, 51, 68, 49, 54, 101, 58, 63, 60, 81, 62, 71, 75, 62, 60, 62, 69, 76, 58, 101, 72, 57, 65, 69, 56, 54, 50, 73, 63, 51, 79, 69, 74, 66, 61, 68, 72, 61, 66, 63, 59, 49, 61, 103, 64, 76, 89, 56, 53, 81, 53, 53, 62, 62, 79, 72, 97, 87, 63, 69, 87, 80, 47, 57, 74, 62, 56, 62, 65, 59, 48, 60, 86, 58, 47, 64, 58, 68, 64, 68, 62, 69, 81, 65, 58, 62, 51, 99, 69, 75, 53, 71, 70, 56, 83, 40, 68, 57, 53, 68, 91, 63, 58, 52, 56, 57, 56, 52, 94, 66, 43, 57, 121, 70, 60, 58, 63, 70, 63, 56, 63, 66, 60, 58, 60, 70, 59, 52, 65, 66, 53, 51, 59, 69, 71, 54, 65, 67, 78, 55, 55, 56, 68, 43, 65, 68, 104, 82, 59, 67, 53, 68, 76, 63, 51, 54, 90, 61, 51, 57, 55, 69, 69, 46, 112, 72, 53, 73, 84, 64, 67, 74, 45, 59, 67, 69, 69, 65, 48, 56, 68, 65, 48, 77, 65, 46, 71, 76, 56, 64, 67, 88, 62, 54, 85, 71, 68, 139, 63, 48, 57, 70, 50, 83, 77, 60, 60, 62, 68, 62, 63, 68, 68, 68, 65, 60, 71, 65, 62, 55, 76, 64, 73, 58, 66, 58, 62, 54, 96, 73, 79, 57, 61, 58, 49, 81, 70, 69, 88, 83, 61, 88, 70, 53, 61, 68, 64, 55, 56, 75, 56, 64, 72, 80, 65, 46, 84, 79, 97, 71, 47, 69, 66, 67, 64, 63, 73, 58, 73, 65, 73, 51, 65, 58, 67, 55, 73, 56, 54, 67, 56, 80, 65, 56, 72, 54, 108, 53, 72, 69, 53, 85, 78, 65, 74, 57, 60, 87, 68, 91, 75, 53, 74, 62, 95, 82, 55, 70, 71, 79, 63, 71, 60, 55, 50, 63, 68, 54, 59, 72, 54, 63, 49, 63, 61, 58, 52, 49, 69, 52, 56, 102, 68, 56, 62, 55, 73, 60, 53, 73, 52, 64, 52, 70, 59, 70, 63, 48, 57, 78, 49, 70, 75, 76, 69, 48, 74, 84, 65, 54, 78, 51, 73, 71, 70, 77, 61, 98, 64, 72, 56, 75, 60, 72, 53, 70, 73, 65, 75, 63, 83, 62, 64, 70, 62, 69, 66, 71, 83, 68, 92, 70, 53, 63, 65, 64, 61, 64, 55, 53, 66, 51, 84, 67, 90, 77, 73, 71, 51, 75, 55, 61, 55, 71, 56, 74, 51, 67, 63, 63, 58, 52, 53, 68, 74, 100, 69, 84, 62, 63, 59, 72, 75, 85, 64, 64, 72, 68, 70, 53, 74, 70, 77, 64, 67, 91, 77, 69, 71, 59, 56, 63, 58, 55, 63, 57, 63, 97, 87, 67, 56, 54, 82, 61, 47, 78, 62, 80, 54, 67, 51, 86, 62, 60, 66, 58, 68, 70, 73, 69, 61, 59, 56, 56, 65, 57, 79, 47, 70, 82, 69, 89, 101, 80, 60, 68, 54, 64, 64, 44, 70, 87, 47, 79, 55, 110, 67, 54, 64, 61, 105, 57, 55, 53, 54, 41, 37, 58, 64, 63, 65, 68, 54, 59, 52, 63, 57, 102, 66, 70, 65, 65, 90, 68, 80, 58, 61, 73, 59, 77, 51, 65, 64, 62, 55, 70, 76, 66, 66, 45, 60, 57, 45, 73, 61, 58, 67, 62, 122, 55, 72, 71, 124, 62, 55, 60, 55, 61, 56, 57, 66, 70, 104, 71, 47, 83, 62, 56, 109, 76, 55, 56, 69, 52, 69, 74, 72, 53, 64, 82, 51, 69, 58, 57, 71, 94, 86, 64, 52, 81, 77, 63, 60, 61, 62, 41, 52, 57, 77, 63, 56, 101, 63, 68, 71, 58, 51, 77, 50, 41, 75, 45, 67, 60, 60, 44, 60, 84, 66, 148, 71, 71, 78, 45, 76, 78, 65, 64, 51, 60, 58, 60, 47, 49, 57, 91, 52, 64, 68, 73, 60, 65, 86, 62, 91, 56, 60, 69, 81, 62, 75, 68, 69, 72, 73, 66, 74, 63, 60, 74, 65, 74, 73, 99, 51, 69, 69, 57, 63, 55, 69, 64, 64, 87, 48, 65, 69, 81, 70, 74, 85, 57, 71, 67, 66, 70, 54, 48, 93, 62, 64, 43, 51, 67, 58, 72, 62, 69, 37, 51, 73, 63, 71, 67, 53, 64, 53, 66, 67, 68, 66, 54, 73, 68, 68, 74, 55, 55, 63, 57, 56, 74, 65, 78, 70, 41, 64, 55, 61, 75, 61, 68, 98, 65, 61, 76, 55, 56, 72, 59, 46, 61, 65, 69, 63, 77, 43, 55, 60, 111, 58, 73, 60, 57, 58, 58, 54, 74, 41, 79, 58, 85, 59, 60, 74, 66, 92, 70, 57, 61, 71, 57, 73, 54, 55, 61, 50, 57, 56, 65, 78, 67, 66, 52, 71, 64, 74, 63, 54, 61, 59, 57, 46, 76, 67, 53, 72, 64, 54, 83, 79, 69, 71, 74, 56, 80, 62, 53, 70, 70, 54, 47, 69, 58, 47, 76, 51, 64, 60, 70, 58, 52, 68, 62, 57, 54, 67, 53, 60, 64, 85, 62, 71, 80, 88, 49, 70, 65, 63, 38, 86, 78, 75, 49, 59, 64, 55, 70, 84, 83, 65, 59, 76, 89, 84, 65, 38, 54, 60, 56, 70, 61, 61, 57, 73, 63, 54, 69, 44, 71, 71, 71, 99, 56, 74, 56, 64, 45, 59, 75, 72, 63, 72, 64, 83, 63, 52, 60, 61, 89, 52, 73, 64, 63, 73, 44, 69, 69, 55, 63, 83, 63, 64, 109, 89, 57, 66, 63, 64, 60, 60, 62, 68, 65, 67, 77, 68, 42, 70, 68, 59, 94, 70, 59, 65, 74, 80, 99, 58, 73, 72, 68, 76, 64, 62, 79, 53, 56, 65, 70, 61, 61, 54, 48, 64, 62, 68, 61, 56, 65, 76, 77, 80, 62, 70, 60, 84, 74, 85, 60, 75, 80, 61, 64, 68, 57, 67, 53, 69, 62, 70, 60, 63, 62, 68, 52, 62, 53, 62, 75, 50, 45, 81, 58, 76, 50, 51, 56, 92, 55, 72, 57, 58, 58, 65, 55, 72, 53, 68, 61, 68, 110, 85, 57, 67, 89, 70, 43, 88, 56, 94, 59, 61, 55, 54, 64, 73, 67, 62, 66, 58, 75, 66, 59, 44, 67, 78, 61, 75, 53, 72, 56, 71, 80, 53, 52, 73, 74, 71, 62, 60, 62, 51, 58, 62, 65, 69, 70, 71, 75, 81, 64, 48, 77, 77, 56, 65, 66, 65, 67, 76, 70, 56, 62, 67, 34, 59, 70, 78, 59, 43, 70, 54, 52, 68, 74, 52, 71, 66, 66, 76, 68, 67, 61, 74, 67, 64, 67, 59, 79, 47, 56, 68, 60, 50, 61, 50, 62, 85, 67, 59, 92, 50, 88, 65, 79, 61, 62, 54, 43, 71, 73, 70, 62, 47, 55, 64, 51, 64, 77, 83, 58, 75, 57, 66, 60, 72, 45, 80, 54, 58, 66, 63, 79, 74, 64, 61, 65, 68, 68, 62, 56, 60, 38, 65, 60, 73, 61, 63, 71, 70, 61, 66, 55, 71, 65, 65, 51, 80, 69, 56, 59, 64, 72, 74, 83, 50, 56, 47, 59, 54, 54, 86, 77, 98, 56, 82, 64, 101, 103, 87, 46, 82, 97, 66, 64, 48, 71, 72, 75, 64, 64, 71, 67, 67, 76, 69, 66, 65, 66, 60, 65, 68, 49, 57, 115, 61, 76, 71, 68, 57, 50, 81, 58, 68, 83, 62, 73, 88, 68, 57, 61, 60, 57, 86, 76, 56, 73, 53, 86, 57, 75, 60, 66, 61, 59, 73, 46, 66, 77, 82, 64, 51, 79, 74, 49, 54, 72, 59, 89, 77, 62, 113, 84, 59, 109, 73, 50, 55, 63, 54, 67, 60, 60, 59, 74, 57, 88, 71, 65, 83, 72, 76, 71, 70, 72, 80, 53, 70, 56, 86, 64, 55, 63, 52, 51, 68, 68, 55, 74, 67, 66, 68, 57, 71, 58, 77, 60, 47, 80, 55, 71, 82, 66, 74, 62, 60, 72, 46, 63, 45, 71, 68, 71, 70, 61, 69, 82, 55, 70, 60, 72, 56, 74, 64, 65, 61, 67, 65, 59, 74, 70, 40, 54, 69, 70, 55, 79, 61, 75, 73, 64, 49, 40, 55, 65, 66, 58, 69, 48, 60, 55, 75, 48, 63, 60, 61, 58, 56, 65, 74, 64, 64, 59, 76, 68, 51, 71, 67, 61, 55, 58, 46, 68, 94, 66, 70, 75, 71, 60, 58, 117, 54, 54, 62, 57, 77, 61, 62, 69, 72, 63, 55, 58, 70, 77, 57, 53, 40, 57, 57, 55, 57, 60, 64, 68, 57, 61, 80, 75, 79, 56, 74, 65, 76, 56, 64, 55, 94, 55, 59, 82, 65, 47, 57, 60, 68, 56, 45, 120, 48, 64, 60, 61, 65, 63, 63, 81, 58, 74, 67, 69, 53, 73, 72, 59, 73, 80, 67, 49, 89, 67, 61, 83, 61, 63, 57, 71, 61, 88, 52, 48, 80, 68, 56, 67, 61, 75, 83, 69, 70, 88, 90, 55, 77, 52, 59, 56, 63, 73, 48, 70, 62, 67, 65, 61, 56, 56, 52, 45, 84, 69, 65, 68, 57, 61, 61, 44, 77, 61, 66, 80, 86, 69, 57, 54, 69, 80, 54, 58, 58, 47, 65, 52, 79, 50, 46, 88, 59, 53, 70, 72, 75, 70, 117, 55, 72, 57, 74, 71, 61, 55, 72, 61, 72, 68, 63, 60, 59, 60, 62, 75, 109, 55, 68, 72, 101, 83, 82, 61, 60, 90, 74, 49, 69, 78, 67, 65, 82, 64, 48, 60, 64, 65, 58, 65, 58, 67, 60, 55, 88, 50, 117, 78, 51, 72, 94, 63, 77, 54, 80, 67, 62, 72, 78, 89, 51, 57, 67, 61, 62, 55, 81, 54, 49, 65, 37, 62, 50, 63, 56, 80, 61, 69, 72, 100, 63, 62, 79, 65, 66, 69, 67, 73, 88, 59, 58, 65, 101, 53, 63, 82, 72, 57, 53, 73, 67, 68, 85, 51, 41, 86, 65, 73, 74, 62, 72, 67, 78, 55, 49, 63, 71, 98, 51, 68, 56, 64, 66, 69, 48, 74, 69, 53, 58, 82, 71, 58, 94, 108, 70, 64, 60, 80, 68, 59, 51, 76, 62, 72, 59, 65, 50, 68, 65, 63, 71, 70, 52, 63, 76, 48, 67, 55, 98, 73, 65, 69, 74, 40, 57, 71, 63, 49, 60, 59, 76, 65, 56, 66, 70, 71, 49, 71, 61, 64, 55, 71, 53, 52, 76, 60, 55, 74, 75, 64, 85, 64, 54, 71, 72, 54, 53, 59, 77, 66, 72, 70, 73, 91, 77, 66, 78, 58, 72, 66, 52, 58, 69, 79, 76, 67, 77, 51, 69, 89, 61, 60, 57, 71, 63, 61, 58, 55, 47, 47, 44, 76, 82, 69, 64, 63, 65, 55, 60, 97, 66, 60, 67, 62, 73, 64, 62, 51, 65, 82, 68, 69, 71, 69, 68, 52, 56, 75, 67, 58, 53, 52, 52, 61, 59, 57, 68, 78, 59, 83, 89, 69, 62, 71, 77, 59, 57, 67, 72, 68, 63, 70, 68, 54, 70, 63, 69, 64, 77, 81, 68, 54, 55, 73, 86, 86, 59, 68, 94, 90, 75, 68, 66, 65, 82, 66, 55, 56, 75, 52, 102, 53, 55, 68, 54, 77, 53, 66, 60, 54, 78, 64, 93, 66, 64, 56, 45, 68, 45, 74, 57, 52, 46, 91, 75, 73, 52, 54, 48, 64, 72, 71, 72, 68, 72, 76, 82, 52, 63, 49, 58, 60, 67, 45, 62, 65, 57, 60, 59, 63, 73, 59, 64, 61, 66, 91, 77, 67, 48, 73, 97, 87, 69, 53, 79, 68, 74, 74, 49, 57, 81, 69, 61, 58, 67, 56, 67, 62, 62, 56, 61, 49, 70, 55, 63, 61, 59, 66, 70, 65, 76, 61, 57, 63, 90, 59, 57, 76, 53, 78, 75, 50, 64, 61, 67, 39, 55, 57, 47, 64, 69, 70, 64, 60, 87, 78, 75, 57, 66, 47, 64, 64, 63, 74, 51, 62, 76, 70, 53, 47, 57, 55, 61, 66, 68, 65, 61, 74, 77, 60, 50, 67, 65, 60, 73, 57, 83, 73, 91, 77, 44, 59, 72, 54, 79, 62, 57, 65, 51, 51, 67, 54, 67, 52, 54, 78, 66, 56, 46, 46, 95, 62, 57, 69, 58, 68, 70, 90, 71, 50, 56, 60, 66, 64, 95, 49, 68, 78, 66, 74, 74, 76, 54, 73, 51, 59, 46, 61, 89, 64, 50, 51, 65, 51, 59, 60, 69, 81, 68, 67, 67, 52, 69, 72, 72, 68, 49, 67, 73, 67, 67, 67, 39, 68, 75, 59, 64, 47, 91, 63, 68, 76, 65, 29, 66, 49, 63, 43, 58, 72, 61, 70, 86, 66, 70, 76, 75, 44, 57, 70, 69, 58, 81, 45, 81, 77, 68, 59, 88, 73, 72, 77, 66, 67, 63, 55, 73, 69, 75, 64, 66, 64, 72, 68, 78, 71, 59, 84, 63, 74, 87, 59, 69, 59, 104, 71, 35, 78, 70, 75, 70, 71, 65, 79, 50, 58, 58, 47, 72, 56, 75, 60, 51, 59, 57, 52, 51, 79, 51, 69, 88, 63, 80, 76, 81, 64, 80, 85, 48, 58, 67, 68, 79, 60, 62, 83, 47, 57, 66, 52, 54, 71, 58, 63, 85, 67, 65, 52, 77, 73, 72, 72, 57, 71, 67, 120, 79, 57, 76, 69, 57, 62, 81, 61, 65, 75, 63, 62, 71, 64, 57, 70, 77, 72, 48, 66, 117, 102, 82, 51, 65, 68, 64, 47, 87, 55, 74, 66, 38, 76, 58, 60, 59, 74, 70, 56, 80, 70, 82, 66, 68, 53, 73, 109, 60, 83, 66, 97, 78, 63, 64, 51, 43, 80, 67, 85, 108, 76, 67, 68, 70, 60, 60, 74, 68, 60, 47, 47, 55, 67, 58, 68, 58, 56, 56, 62, 47, 75, 74, 71, 56, 75, 52, 41, 64, 63, 44, 94, 94, 54, 96, 86, 65, 101, 64, 68, 61, 82, 42, 62, 56, 57, 45, 76, 67, 96, 67, 66, 77, 81, 70, 68, 58, 64, 111, 59, 81, 54, 86, 75, 64, 58, 80, 64, 66, 53, 64, 55, 63, 86, 68, 68, 60, 61, 71, 60, 68, 87, 66, 68, 47, 74, 56, 73, 67, 58, 63, 75, 66, 63, 51, 99, 75, 47, 78, 72, 71, 52, 53, 75, 67, 57, 71, 69, 62, 80, 37, 56, 78, 53, 46, 53, 69, 75, 64, 46, 56, 91, 52, 42, 65, 48, 59, 62, 46, 60, 67, 48, 71, 77, 80, 99, 68, 69, 79, 56, 51, 65, 60, 73, 69, 60, 61, 46, 86, 64, 38, 56, 50, 66, 70, 32, 83, 60, 60, 65, 54, 60, 49, 87, 47, 54, 81, 62, 73, 61, 82, 53, 79, 49, 71, 64, 49, 45, 51, 65, 60, 61, 58, 75, 63, 59, 40, 68, 69, 86, 62, 80, 55, 53, 91, 67, 57, 67, 50, 55, 89, 68, 58, 96, 44, 109, 52, 60, 57, 61, 83, 75, 57, 76, 44, 78, 55, 62, 74, 69, 61, 83, 53, 68, 57, 62, 58, 86, 65, 92, 72, 60, 83, 70, 67, 58, 58, 73, 48, 73, 64, 78, 76, 60, 56, 60, 51, 66, 59, 71, 74, 67, 52, 56, 67, 76, 67, 57, 56, 62, 45, 69, 58, 64, 65, 88, 74, 57, 55, 70, 58, 33, 72, 56, 37, 57, 59, 79, 61, 54, 60, 80, 83, 67, 73, 69, 73, 68, 71, 72, 71, 63, 63, 46, 53, 58, 63, 52, 62, 73, 72, 113, 59, 67, 81, 70, 117, 65, 76, 67, 62, 61, 65, 65, 47, 97, 83, 76, 58, 65, 59, 62, 60, 76, 88, 75, 62, 64, 97, 104, 45, 57, 68, 83, 76, 74, 66, 66, 61, 64, 55, 63, 52, 82, 55, 54, 43, 64, 66, 64, 71, 53, 88, 62, 100, 57, 46, 79, 72, 53, 69, 58, 58, 59, 60, 72, 63, 80, 65, 57, 73, 71, 67, 69, 68, 71, 46, 45, 50, 64, 66, 61, 65, 65, 37, 39, 51, 77, 69, 61, 65, 82, 99, 68, 78, 53, 58, 59, 51, 57, 136, 76, 68, 76, 65, 88, 49, 81, 72, 84, 57, 73, 56, 88, 78, 67, 84, 98, 64, 52, 62, 59, 77, 78, 59, 115, 61, 34, 74, 73, 65, 69, 80, 80, 60, 64, 64, 64, 75, 61, 78, 96, 73, 75, 49, 65, 49, 59, 75, 120, 69, 73, 69, 46, 57, 57, 54, 66, 72, 86, 75, 65, 78, 33, 58, 54, 80, 57, 86, 69, 46, 70, 69, 64, 64, 44, 61, 87, 70, 75, 59, 76, 67, 34, 43, 73, 56, 65, 60, 62, 82, 48, 53, 64, 58, 50, 77, 65, 45, 61, 45, 60, 69, 52, 77, 54, 54, 52, 70, 65, 52, 69, 55, 78, 72, 71, 75, 68, 62, 67, 70, 73, 74, 64, 54, 79, 64, 80, 52, 76, 47, 63, 58, 48, 69, 65, 80, 62, 99, 65, 59, 54, 66, 34, 50, 60, 74, 89, 71, 78, 80, 72, 64, 69, 61, 73, 61, 73, 62, 58, 60, 60, 58, 43, 65, 85, 63, 58, 55, 57, 59, 68, 88, 69, 57, 59, 70, 54, 98, 62, 65, 64, 60, 62, 55, 92, 84, 50, 37, 67, 39, 52, 59, 70, 84, 78, 57, 77, 80, 76, 71, 82, 73, 68, 54, 48, 88, 105, 59, 60, 67, 61, 59, 63, 59, 61, 49, 77, 90, 70, 62, 64, 52, 48, 46, 59, 61, 86, 64, 64, 95, 48, 56, 60, 61, 66, 89, 73, 76, 81, 78, 101, 88, 75, 65, 55, 70, 69, 62, 60, 74, 61, 55, 56, 60, 61, 65, 56, 69, 70, 83, 65, 64, 68, 58, 69, 71, 63, 79, 53, 66, 64, 46, 83, 74, 62, 100, 71, 79, 50, 61, 56, 85, 62, 81, 69, 58, 50, 75, 73, 49, 63, 55, 86, 71, 60, 64, 44, 68, 62, 46, 54, 76, 65, 51, 69, 64, 57, 76, 67, 51, 62, 65, 65, 65, 87, 66, 71, 83, 64, 59, 59, 55, 68, 46, 55, 62, 74, 70, 73, 77, 76, 95, 111, 79, 62, 60, 65, 84, 43, 55, 82, 63, 61, 74, 67, 74, 38, 70, 62, 60, 82, 68, 58, 61, 63, 95, 76, 70, 66, 63, 50, 73, 54, 76, 82, 94, 61, 83, 61, 69, 64, 84, 65, 61, 68, 62, 54, 65, 59, 57, 51, 84, 60, 50, 64, 64, 45, 107, 66, 93, 54, 41, 66, 83, 107, 58, 42, 65, 54, 54, 62, 88, 69, 68, 63, 62, 66, 57, 85, 56, 73, 58, 44, 140, 50, 78, 70, 67, 48, 63, 56, 65, 56, 85, 109, 57, 53, 76, 82, 45, 66, 71, 63, 58, 64, 60, 57, 48, 62, 52, 58, 67, 65, 55, 64, 72, 67, 64, 52, 61, 54, 60, 48, 76, 58, 84, 54, 63, 77, 84, 59, 89, 91, 72, 50, 59, 49, 70, 74, 53, 51, 92, 62, 83, 58, 29, 51, 54, 54, 75, 54, 57, 71, 70, 59, 51, 92, 42, 75, 68, 60, 90, 60, 57, 59, 78, 64, 55, 51, 47, 73, 60, 51, 63, 70, 58, 67, 56, 71, 57, 65, 70, 50, 48, 69, 76, 60, 47, 51, 91, 64, 60, 45, 51, 69, 70, 44, 112, 76, 59, 57, 62, 62, 80, 47, 62, 85, 75, 45, 68, 59, 64, 63, 66, 62, 51, 55, 48, 74, 62, 55, 77, 77, 56, 48, 49, 59, 94, 69, 70, 68, 73, 60, 90, 54, 110, 55, 56, 78, 65, 52, 68, 52, 58, 85, 58, 66, 85, 61, 74, 80, 83, 56, 96, 82, 79, 63, 82, 55, 52, 58, 84, 87, 61, 60, 47, 64, 63, 70, 60, 61, 55, 62, 80, 77, 70, 60, 63, 60, 70, 100, 62, 73, 60, 70, 63, 81, 51, 62, 69, 77, 68, 67, 81, 69, 59, 53, 69, 64, 71, 73, 42, 56, 50, 66, 67, 88, 88, 60, 53, 78, 59, 47, 64, 43, 60, 70, 29, 75, 40, 51, 51, 50, 68, 80, 63, 104, 109, 55, 79, 97, 57, 69, 68, 67, 68, 63, 43, 70, 78, 80, 89, 41, 53, 74, 98, 87, 78, 54, 69, 48, 68, 66, 74, 61, 92, 80, 54, 44, 96, 66, 63, 41, 52, 55, 64, 65, 56, 91, 49, 62, 67, 51, 39, 59, 75, 61, 74, 37, 52, 70, 49, 79, 59, 54, 80, 65, 48, 75, 53, 51, 74, 64, 44, 68, 59, 87, 55, 87, 70, 77, 56, 81, 66, 67, 81, 62, 55, 80, 46, 73, 63, 74, 48, 60, 70, 65, 61, 64, 64, 52, 98, 49, 68, 63, 61, 70, 56, 64, 58, 79, 73, 52, 41, 72, 53, 51, 71, 70, 55, 59, 69, 68, 72, 86, 59, 69, 70, 55, 79, 67, 75, 55, 55, 62, 67, 78, 64, 62, 51, 52, 77, 66, 61, 67, 78, 57, 68, 57, 49, 50, 66, 76, 57, 73, 55, 74, 68, 60, 85, 54, 71, 62, 72, 80, 65, 71, 53, 63, 76, 78, 90, 55, 110, 71, 48, 62, 69, 48, 64, 61, 70, 65, 77, 53, 57, 76, 65, 68, 66, 74, 84, 74, 34, 67, 28, 77, 68, 55, 60, 89, 64, 82, 56, 55, 51, 45, 77, 63, 45, 55, 62, 53, 59, 80, 53, 53, 64, 57, 52, 56, 68, 51, 83, 55, 60, 79, 44, 60, 56, 69, 74, 73, 62, 67, 65, 57, 43, 95, 43, 59, 67, 60, 65, 79, 69, 66, 56, 69, 69, 72, 76, 50, 71, 62, 71, 96, 56, 79, 71, 52, 106, 77, 58, 69, 75, 69, 57, 67, 62, 57, 61, 87, 65, 38, 76, 73, 63, 75, 115, 91, 80, 64, 69, 70, 65, 57, 58, 81, 74, 58, 49, 92, 63, 78, 71, 86, 68, 61, 57, 56, 92, 100, 45, 69, 80, 86, 68, 71, 43, 78, 63, 94, 66, 72, 40, 59, 82, 60, 77, 75, 54, 59, 56, 64, 77, 57, 122, 68, 60, 67, 143, 59, 74, 72, 53, 63, 55, 63, 75, 72, 91, 60, 56, 57, 72, 46, 63, 62, 63, 45, 78, 56, 64, 72, 57, 51, 71, 67, 64, 66, 48, 64, 72, 109, 86, 74, 51, 67, 61, 46, 71, 59, 41, 70, 53, 48, 71, 84, 77, 81, 77, 65, 73, 77, 53, 81, 50, 55, 78, 53, 67, 71, 61, 70, 43, 74, 78, 137, 59, 52, 62, 52, 79, 62, 58, 71, 67, 52, 63, 77, 76, 60, 59, 100, 73, 65, 54, 51, 51, 60, 70, 55, 88, 56, 43, 70, 52, 55, 58, 60, 71, 76, 62, 62, 56, 64, 90, 57, 57, 74, 91, 89, 80, 78, 69, 77, 64, 64, 54, 97, 64, 71, 62, 57, 78, 52, 44, 80, 103, 79, 71, 73, 51, 68, 62, 70, 71, 65, 81, 61, 72, 76, 65, 63, 86, 63, 61, 67, 82, 71, 72, 43, 71, 67, 67, 53, 77, 65, 62, 63, 74, 82, 53, 68, 56, 68, 46, 51, 62, 70, 41, 76, 54, 99, 57, 81, 60, 65, 82, 58, 70, 65, 45, 61, 72, 54, 78, 64, 84, 72, 76, 55, 57, 84, 66, 63, 50, 93, 68, 64, 79, 60, 38, 48, 51, 68, 67, 64, 80, 43, 80, 70, 65, 64, 54, 57, 76, 63, 72, 52, 43, 71, 63, 82, 54, 68, 52, 61, 78, 57, 45, 68, 77, 63, 56, 57, 61, 55, 59, 59, 63, 60, 68, 68, 57, 71, 64, 62, 98, 108, 62, 71, 61, 98, 76, 55, 71, 64, 53, 54, 58, 41, 74, 72, 60, 56, 46, 66, 69, 56, 67, 75, 72, 67, 56, 62, 55, 64, 81, 87, 67, 74, 47, 56, 88, 68, 63, 50, 69, 50, 85, 65, 58, 58, 75, 61, 71, 58, 67, 61, 51, 70, 73, 57, 52, 65, 66, 62, 64, 43, 70, 68, 49, 68, 73, 54, 60, 72, 85, 54, 80, 47, 67, 65, 61, 57, 70, 71, 66, 66, 70, 74, 63, 63, 68, 93, 64, 51, 76, 61, 81, 60, 72, 66, 62, 33, 68, 56, 72, 99, 67, 119, 71, 80, 73, 44, 57, 89, 43, 51, 91, 55, 60, 82, 80, 70, 50, 64, 66, 84, 84, 57, 47, 93, 94, 101, 68, 66, 58, 72, 57, 53, 75, 73, 75, 47, 63, 67, 64, 52, 76, 73, 77, 53, 73, 45, 53, 65, 76, 65, 61, 60, 65, 45, 82, 44, 85, 61, 79, 70, 78, 62, 65, 43, 72, 52, 58, 58, 60, 63, 44, 55, 63, 51, 60, 65, 67, 66, 67, 47, 68, 59, 72, 89, 43, 74, 92, 72, 63, 66, 54, 66, 60, 64, 72, 82, 88, 41, 75, 67, 83, 68, 52, 84, 92, 48, 61, 45, 96, 46, 56, 68, 56, 84, 94, 55, 110, 68, 49, 60, 60, 87, 81, 57, 72, 62, 61, 61, 69, 73, 61, 90, 83, 91, 56, 63, 46, 75, 56, 69, 46, 75, 84, 70, 51, 59, 58, 48, 66, 82, 70, 45, 79, 72, 63, 50, 66, 58, 53, 63, 63, 54, 80, 64, 61, 47, 62, 57, 64, 73, 60, 68, 69, 58, 72, 67, 60, 60, 68, 77, 53, 64, 52, 83, 58, 67, 67, 66, 59, 55, 82, 51, 64, 72, 70, 75, 65, 65, 98, 65, 74, 65, 59, 68, 50, 58, 45, 49, 60, 61, 69, 62, 64, 71, 63, 49, 64, 43, 43, 65, 134, 48, 70, 52, 65, 69, 53, 61, 58, 77, 52, 65, 69, 67, 59, 70, 46, 66, 56, 47, 59, 76, 62, 54, 51, 72, 61, 84, 103, 64, 73, 68, 53, 67, 60, 80, 42, 68, 72, 79, 60, 59, 71, 65, 74, 64, 63, 58, 85, 65, 46, 44, 59, 80, 50, 56, 80, 87, 68, 55, 66, 61, 69, 75, 94, 55, 67, 68, 64, 73, 107, 63, 73, 57, 65, 71, 61, 56, 52, 89, 55, 74, 58, 60, 64, 64, 52, 66, 54, 73, 65, 67, 77, 68, 57, 58, 82, 58, 53, 60, 80, 51, 57, 77, 69, 66, 72, 52, 63, 69, 56, 57, 80, 77, 63, 58, 53, 52, 45, 58, 91, 69, 63, 59, 64, 61, 58, 113, 65, 53, 63, 64, 53, 84, 76, 62, 57, 66, 50, 59, 73, 39, 51, 64, 54, 76, 58, 44, 78, 78, 66, 51, 56, 45, 53, 59, 57, 71, 71, 66, 68, 60, 66, 62, 71, 70, 87, 68, 75, 68, 55, 65, 52, 64, 71, 69, 71, 53, 90, 59, 60, 84, 60, 58, 52, 73, 57, 56, 61, 58, 65, 70, 98, 57, 66, 61, 59, 44, 53, 67, 69, 65, 47, 63, 53, 56, 58, 57, 94, 60, 66, 59, 57, 90, 79, 58, 56, 79, 52, 71, 87, 60, 66, 75, 82, 58, 66, 60, 60, 85, 60, 70, 59, 73, 54, 63, 89, 56, 65, 68, 70, 60, 65, 52, 68, 52, 45, 71, 84, 39, 77, 66, 62, 82, 66, 51, 66, 65, 56, 56, 59, 57, 67, 77, 64, 60, 62, 72, 62, 69, 71, 52, 57, 67, 58, 54, 47, 80, 71, 57, 90, 68, 63, 63, 51, 70, 52, 84, 60, 48, 52, 83, 69, 101, 43, 74, 105, 73, 69, 58, 58, 122, 53, 76, 68, 64, 82, 70, 53, 85, 47, 81, 70, 56, 65, 56, 89, 69, 90, 91, 73, 56, 65, 83, 59, 68, 42, 73, 57, 84, 54, 72, 62, 74, 68, 52, 56, 62, 74, 76, 81, 58, 57, 55, 75, 70, 68, 72, 59, 76, 65, 88, 49, 89, 64, 76, 60, 64, 64, 46, 80, 108, 67, 72, 56, 51, 71, 67, 57, 72, 83, 65, 65, 82, 80, 51, 55, 72, 59, 46, 79, 79, 84, 76, 76, 65, 66, 55, 73, 67, 113, 80, 63, 45, 79, 67, 75, 56, 55, 55, 42, 60, 149, 55, 78, 55, 58, 48, 67, 68, 59, 58, 68, 69, 65, 66, 85, 37, 52, 71, 48, 76, 62, 72, 82, 100, 56, 80, 56, 56, 30, 53, 90, 68, 52, 80, 45, 64, 69, 48, 46, 59, 55, 68, 58, 54, 46, 67, 73, 64, 122, 71, 62, 74, 67, 66, 58, 67, 67, 79, 57, 47, 67, 81, 76, 59, 78, 68, 70, 69, 34, 67, 82, 66, 74, 52, 76, 63, 63, 33, 56, 63, 58, 65, 72, 60, 54, 100, 70, 70, 64, 62, 60, 60, 70, 56, 49, 46, 55, 41, 53, 75, 66, 102, 71, 61, 59, 72, 58, 63, 62, 59, 57, 71, 60, 70, 55, 40, 62, 84, 57, 94, 68, 53, 52, 70, 114, 51, 74, 68, 63, 62, 57, 70, 54, 63, 60, 77, 70, 54, 60, 56, 72, 60, 61, 86, 57, 71, 47, 63, 92, 53, 75, 70, 65, 90, 67, 67, 64, 57, 61, 46, 78, 56, 79, 67, 77, 94, 65, 46, 53, 79, 55, 96, 61, 53, 62, 57, 56, 80, 69, 53, 95, 65, 58, 55, 80, 58, 52, 69, 60, 69, 63, 66, 57, 81, 65, 54, 67, 62, 77, 57, 81, 62, 62, 73, 61, 101, 54, 60, 66, 68, 52, 55, 86, 114, 64, 80, 68, 72, 65, 61, 61, 77, 49, 66, 50, 70, 62, 66, 61, 60, 54, 45, 102, 63, 51, 76, 56, 69, 84, 62, 92, 68, 48, 54, 75, 89, 57, 54, 65, 105, 93, 83, 64, 58, 52, 75, 50, 61, 57, 77, 69, 54, 61, 92, 62, 107, 113, 59, 63, 78, 82, 69, 53, 72, 53, 89, 46, 64, 58, 54, 76, 75, 71, 58, 65, 75, 64, 67, 61, 71, 56, 63, 46, 58, 59, 79, 69, 61, 64, 98, 55, 51, 73, 95, 60, 71, 48, 72, 63, 74, 57, 90, 81, 53, 41, 88, 48, 58, 59, 59, 62, 78, 61, 57, 50, 67, 62, 68, 50, 65, 99, 68, 109, 57, 61, 47, 70, 65, 57, 54, 69, 53, 85, 70, 63, 55, 49, 78, 80, 76, 89, 66, 71, 66, 111, 59, 85, 68, 61, 71, 65, 76, 70, 89, 66, 69, 68, 69, 63, 69, 95, 77, 66, 60, 66, 59, 57, 70, 62, 54, 61, 60, 69, 47, 54, 57, 70, 138, 72, 47, 48, 60, 51, 55, 56, 55, 58, 77, 56, 70, 60, 68, 52, 96, 73, 87, 53, 49, 59, 85, 100, 69, 47, 65, 67, 48, 58, 77, 61, 74, 62, 83, 78, 43, 73, 66, 85, 98, 72, 63, 88, 66, 104, 65, 70, 78, 57, 73, 54, 69, 73, 59, 68, 64, 64, 70, 62, 55, 61, 86, 46, 55, 49, 67, 83, 58, 94, 61, 57, 60, 92, 69, 67, 52, 36, 56, 68, 64, 61, 85, 55, 52, 48, 57, 62, 68, 60, 62, 53, 75, 58, 99, 59, 60, 80, 66, 63, 58, 75, 77, 75, 52, 81, 65, 67, 52, 60, 55, 76, 75, 67, 56, 50, 75, 51, 70, 78, 81, 68, 58, 59, 85, 54, 67, 83, 77, 94, 55, 100, 109, 73, 71, 72, 75, 57, 66, 80, 52, 70, 51, 63, 72, 64, 92, 46, 60, 61, 60, 45, 73, 57, 61, 63, 55, 43, 67, 64, 54, 74, 75, 55, 76, 73, 85, 90, 74, 94, 73, 67, 61, 64, 79, 59, 69, 73, 86, 65, 58, 53, 58, 63, 57, 61, 58, 79, 63, 74, 72, 49, 51, 73, 81, 69, 50, 82, 61, 78, 55, 53, 64, 62, 60, 85, 59, 69, 64, 50, 60, 60, 63, 64, 63, 62, 67, 68, 48, 58, 56, 78, 73, 52, 74, 64, 61, 42, 69, 66, 66, 55, 63, 71, 66, 84, 55, 70, 76, 72, 63, 46, 56, 55, 76, 65, 58, 64, 60, 63, 66, 100, 65, 48, 50, 68, 61, 88, 56, 60, 73, 61, 73, 63, 51, 80, 56, 68, 70, 40, 118, 61, 69, 113, 48, 71, 57, 50, 65, 67, 70, 68, 49, 77, 65, 58, 71, 65, 54, 62, 43, 101, 84, 76, 77, 75, 69, 65, 67, 81, 102, 55, 71, 56, 59, 59, 65, 58, 73, 46, 62, 65, 59, 67, 70, 58, 57, 85, 88, 62, 66, 56, 59, 49, 53, 57, 67, 67, 66, 49, 71, 95, 72, 54, 63, 71, 65, 79, 62, 77, 73, 51, 49, 78, 82, 70, 72, 61, 80, 71, 54, 68, 77, 69, 58, 73, 91, 70, 59, 64, 62, 56, 76, 61, 73, 82, 80, 69, 65, 72, 57, 36, 71, 55, 53, 69, 64, 63, 60, 64, 54, 76, 67, 108, 78, 74, 56, 57, 69, 76, 50, 69, 65, 110, 65, 59, 69, 100, 60, 73, 60, 74, 81, 55, 60, 54, 60, 58, 69, 91, 55, 73, 42, 68, 47, 79, 71, 59, 71, 55, 65, 59, 53, 65, 57, 41, 66, 42, 66, 76, 76, 72, 55, 47, 96, 67, 50, 71, 90, 77, 70, 59, 70, 92, 56, 73, 45, 46, 63, 68, 73, 56, 74, 73, 52, 73, 59, 58, 50, 68, 63, 61, 44, 68, 69, 79, 50, 80, 54, 44, 73, 70, 95, 74, 59, 72, 61, 39, 53, 65, 62, 66, 64, 46, 55, 52, 65, 62, 34, 59, 60, 57, 78, 52, 99, 67, 51, 63, 76, 44, 60, 77, 58, 75, 51, 57, 46, 54, 44, 49, 82, 66, 81, 75, 66, 74, 81, 73, 66, 55, 70, 90, 75, 63, 40, 61, 84, 55, 77, 72, 70, 103, 92, 67, 55, 70, 32, 50, 56, 71, 71, 62, 62, 66, 90, 62, 70, 64, 58, 65, 61, 52, 66, 89, 62, 65, 74, 64, 67, 64, 86, 62, 52, 56, 66, 42, 53, 62, 70, 67, 53, 67, 73, 70, 50, 33, 59, 56, 69, 60, 56, 40, 64, 64, 54, 59, 74, 76, 75, 79, 79, 67, 77, 60, 49, 52, 109, 61, 86, 62, 55, 56, 63, 111, 55, 74, 69, 63, 61, 69, 65, 83, 65, 44, 74, 73, 52, 46, 63, 57, 69, 58, 60, 70, 54, 54, 65, 70, 59, 100, 55, 50, 55, 73, 68, 57, 48, 122, 64, 46, 65, 74, 54, 70, 60, 82, 56, 86, 64, 68, 73, 69, 72, 60, 68, 70, 74, 66, 55, 87, 82, 83, 77, 54, 58, 93, 70, 56, 62, 65, 77, 55, 65, 90, 43, 68, 43, 71, 35, 69, 70, 47, 52, 94, 72, 118, 69, 62, 88, 74, 55, 79, 77, 64, 54, 58, 57, 80, 64, 68, 58, 63, 41, 60, 69, 64, 58, 61, 69, 66, 67, 67, 61, 72, 67, 58, 71, 66, 54, 77, 50, 96, 85, 60, 64, 52, 71, 80, 48, 69, 48, 48, 77, 68, 76, 67, 93, 91, 46, 59, 56, 66, 60, 49, 69, 47, 59, 85, 48, 82, 60, 80, 48, 63, 80, 78, 101, 61, 67, 52, 59, 65, 59, 58, 55, 60, 74, 56, 58, 65, 62, 51, 98, 59, 66, 60, 77, 55, 59, 61, 71, 58, 54, 72, 68, 53, 55, 38, 74, 67, 54, 73, 55, 43, 40, 50, 70, 69, 100, 61, 84, 67, 127, 61, 56, 81, 56, 54, 90, 54, 74, 51, 68, 70, 55, 70, 63, 66, 73, 67, 54, 74, 64, 82, 56, 65, 52, 64, 63, 74, 52, 85, 69, 64, 55, 79, 64, 64, 42, 80, 58, 60, 46, 60, 69, 70, 62, 70, 57, 78, 65, 62, 96, 77, 58, 47, 70, 58, 102, 84, 62, 76, 55, 49, 58, 54, 87, 58, 63, 63, 69, 71, 71, 75, 55, 62, 70, 85, 83, 36, 62, 59, 68, 52, 51, 74, 86, 70, 51, 63, 74, 63, 105, 69, 66, 55, 52, 68, 59, 71, 79, 73, 68, 58, 60, 68, 80, 71, 79, 60, 87, 63, 81, 59, 58, 70, 39, 70, 59, 68, 66, 71, 87, 57, 67, 84, 70, 58, 83, 50, 84, 56, 68, 81, 46, 50, 88, 44, 64, 82, 81, 61, 61, 91, 74, 59, 58, 67, 73, 82, 60, 59, 71, 51, 55, 66, 59, 62, 64, 62, 71, 48, 49, 71, 68, 82, 58, 76, 78, 60, 59, 47, 56, 76, 57, 70, 67, 54, 59, 76, 62, 56, 71, 61, 64, 49, 89, 56, 78, 63, 81, 80, 67, 55, 66, 62, 76, 61, 85, 55, 44, 84, 60, 71, 46, 71, 64, 47, 83, 97, 85, 69, 65, 62, 53, 61, 65, 66, 60, 40, 53, 69, 67, 70, 118, 76, 53, 106, 69, 58, 56, 57, 51, 56, 74, 48, 69, 64, 97, 70, 62, 38, 53, 83, 45, 60, 74, 73, 80, 32, 61, 63, 53, 72, 67, 56, 68, 61, 78, 89, 60, 68, 70, 63, 102, 80, 49, 60, 59, 68, 70, 81, 80, 75, 68, 74, 83, 75, 47, 70, 78, 59, 61, 72, 70, 63, 83, 51, 55, 46, 82, 85, 69, 80, 61, 71, 54, 33, 76, 63, 57, 48, 80, 67, 64, 72, 89, 60, 71, 70, 61, 61, 56, 53, 70, 52, 54, 67, 98, 66, 71, 96, 88, 62, 53, 63, 71, 98, 50, 73, 43, 62, 66, 48, 70, 69, 70, 71, 62, 58, 68, 73, 80, 66, 66, 41, 61, 72, 54, 59, 54, 47, 74, 80, 65, 77, 58, 53, 90, 75, 50, 59, 76, 56, 62, 70, 75, 79, 67, 76, 95, 53, 79, 58, 47, 77, 60, 53, 72, 63, 74, 66, 66, 66, 67, 56, 87, 62, 49, 66, 74, 55, 67, 70, 52, 57, 99, 53, 43, 71, 75, 61, 40, 55, 66, 75, 55, 97, 64, 49, 69, 67, 69, 72, 76, 68, 71, 67, 61, 59, 61, 72, 69, 53, 74, 77, 78, 66, 56, 73, 62, 49, 89, 53, 62, 99, 54, 60, 83, 83, 80, 71, 76, 53, 65, 64, 78, 53, 76, 60, 58, 48, 60, 77, 58, 56, 79, 56, 69, 74, 62, 64, 61, 74, 53, 83, 76, 76, 61, 71, 53, 54, 65, 77, 63, 51, 65, 51, 57, 62, 59, 70, 48, 50, 65, 60, 60, 71, 63, 60, 63, 57, 75, 44, 75, 66, 59, 53, 52, 70, 46, 65, 91, 49, 61, 40, 75, 67, 64, 78, 71, 71, 78, 72, 43, 92, 69, 80, 63, 57, 96, 63, 62, 32, 61, 78, 60, 76, 77, 41, 70, 50, 89, 44, 79, 56, 77, 66, 63, 50, 53, 53, 62, 79, 82, 66, 35, 78, 70, 64, 56, 70, 85, 67, 84, 83, 76, 55, 66, 55, 78, 55, 64, 61, 54, 69, 66, 67, 62, 51, 61, 60, 49, 45, 72, 64, 71, 54, 71, 95, 88, 61, 48, 72, 76, 54, 87, 68, 60, 101, 80, 88, 73, 78, 75, 72, 54, 62, 63, 59, 54, 80, 50, 53, 61, 63, 71, 95, 58, 61, 41, 51, 63, 79, 64, 63, 62, 100, 63, 84, 72, 60, 60, 67, 64, 49, 74, 62, 63, 59, 57, 81, 66, 82, 48, 37, 57, 71, 61, 52, 65, 76, 53, 75, 67, 44, 52, 73, 48, 58, 44, 73, 97, 58, 52, 67, 54, 46, 58, 79, 57, 47, 67, 51, 59, 61, 59, 76, 72, 57, 71, 74, 73, 65, 67, 52, 83, 42, 93, 77, 89, 56, 71, 76, 66, 52, 75, 55, 53, 68, 67, 78, 57, 73, 53, 59, 90, 56, 66, 84, 41, 88, 59, 82, 66, 57, 72, 62, 46, 55, 42, 69, 51, 67, 58, 59, 60, 63, 64, 67, 65, 66, 84, 63, 61, 52, 88, 83, 82, 60, 57, 89, 49, 80, 62, 52, 64, 63, 72, 62, 84, 62, 55, 62, 52, 47, 65, 66, 73, 75, 67, 59, 110, 103, 62, 83, 57, 50, 82, 60, 90, 53, 63, 79, 64, 89, 135, 63, 93, 42, 66, 58, 69, 65, 63, 76, 63, 72, 78, 53, 62, 53, 81, 66, 80, 81, 45, 126, 64, 63, 73, 82, 70, 72, 53, 55, 67, 63, 43, 77, 62, 63, 73, 52, 72, 51, 55, 66, 88, 74, 86, 62, 129, 86, 66, 70, 69, 53, 68, 60, 108, 42, 58, 60, 47, 69, 71, 46, 58, 90, 68, 49, 62, 96, 51, 57, 69, 57, 47, 73, 64, 82, 63, 47, 77, 59, 69, 58, 74, 72, 63, 69, 59, 72, 57, 64, 70, 66, 52, 71, 57, 120, 52, 55, 75, 65, 57, 63, 67, 54, 60, 62, 40, 87, 65, 75, 71, 59, 54, 60, 71, 51, 68, 74, 77, 82, 62, 68, 71, 64, 44, 93, 60, 51, 77, 49, 52, 71, 94, 72, 70, 66, 53, 51, 68, 81, 55, 64, 57, 115, 56, 85, 68, 81, 54, 59, 78, 82, 69, 93, 60, 103, 58, 55, 48, 76, 74, 59, 58, 52, 58, 85, 59, 85, 61, 48, 65, 79, 56, 67, 46, 56, 74, 79, 56, 84, 53, 57, 76, 47, 65, 101, 53, 54, 58, 54, 59, 70, 55, 79, 43, 77, 79, 65, 63, 66, 69, 42, 75, 78, 63, 62, 54, 67, 75, 99, 67, 58, 71, 53, 73, 71, 102, 47, 42, 72, 55, 59, 49, 58, 65, 58, 97, 56, 72, 50, 138, 75, 45, 47, 82, 70, 59, 60, 70, 64, 88, 54, 64, 82, 64, 51, 82, 66, 68, 81, 56, 63, 80, 56, 67, 71, 87, 75, 75, 52, 44, 70, 49, 51, 68, 50, 64, 72, 89, 72, 56, 55, 60, 56, 46, 88, 75, 65, 62, 59, 45, 50, 66, 81, 68, 61, 65, 81, 62, 59, 65, 54, 85, 56, 58, 61, 28, 77, 59, 51, 77, 59, 58, 65, 47, 86, 56, 94, 64, 53, 90, 58, 81, 39, 54, 59, 89, 63, 44, 54, 78, 52, 76, 81, 50, 59, 87, 51, 69, 67, 79, 47, 69, 73, 62, 55, 73, 50, 65, 49, 75, 61, 69, 71, 70, 98, 84, 73, 49, 94, 90, 46, 71, 61, 60, 66, 55, 68, 76, 59, 83, 66, 72, 63, 63, 59, 75, 92, 73, 47, 73, 63, 50, 59, 67, 82, 65, 107, 56, 56, 72, 62, 66, 63, 59, 51, 74, 69, 71, 61, 89, 98, 76, 52, 83, 71, 66, 90, 89, 56, 49, 70, 62, 66, 66, 82, 78, 105, 55, 77, 62, 73, 64, 64, 33, 63, 46, 84, 60, 54, 64, 78, 53, 61, 71, 65, 76, 63, 64, 106, 86, 57, 60, 61, 61, 59, 57, 67, 52, 65, 70, 71, 56, 82, 74, 77, 62, 69, 64, 71, 53, 89, 64, 68, 60, 66, 66, 56, 65, 61, 50, 60, 75, 66, 62, 76, 62, 74, 49, 68, 75, 56, 86, 72, 64, 56, 82, 58, 55, 59, 68, 55, 67, 59, 66, 47, 55, 42, 48, 60, 51, 73, 66, 64, 56, 72, 53, 77, 60, 49, 56, 113, 49, 63, 83, 52, 66, 79, 60, 61, 73, 38, 49, 64, 72, 61, 63, 68, 75, 63, 67, 64, 80, 67, 61, 50, 54, 71, 63, 58, 70, 58, 51, 87, 84, 56, 58, 53, 71, 52, 68, 71, 49, 48, 99, 82, 64, 59, 74, 70, 61, 76, 51, 67, 67, 69, 56, 84, 52, 74, 49, 69, 61, 40, 85, 68, 50, 68, 47, 62, 86, 56, 55, 64, 71, 73, 61, 77, 57, 68, 89, 48, 59, 60, 117, 81, 65, 73, 73, 48, 60, 56, 70, 74, 67, 68, 50, 66, 79, 59, 80, 83, 48, 53, 65, 44, 56, 55, 60, 57, 74, 69, 54, 77, 101, 57, 62, 73, 70, 59, 62, 71, 63, 65, 52, 72, 62, 68, 50, 71, 60, 59, 79, 76, 60, 46, 69, 60, 73, 55, 60, 85, 61, 64, 79, 56, 63, 66, 70, 75, 58, 73, 60, 47, 56, 67, 52, 59, 54, 75, 58, 65, 66, 69, 84, 59, 67, 54, 62, 42, 59, 60, 57, 57, 53, 51, 62, 61, 56, 76, 75, 54, 64, 58, 68, 53, 55, 92, 60, 55, 60, 50, 67, 101, 61, 60, 72, 60, 53, 58, 50, 52, 61, 56, 62, 56, 64, 65, 71, 60, 70, 104, 63, 55, 54, 45, 57, 109, 57, 60, 56, 51, 60, 60, 60, 76, 55, 69, 64, 64, 55, 81, 72, 87, 59, 72, 56, 67, 68, 71, 52, 77, 40, 60, 60, 38, 60, 66, 76, 64, 77, 67, 80, 85, 66, 43, 65, 68, 55, 95, 70, 59, 56, 52, 62, 69, 58, 85, 76, 81, 60, 74, 63, 87, 119, 80, 59, 58, 84, 56, 70, 36, 73, 63, 59, 43, 64, 66, 61, 67, 62, 60, 62, 62, 61, 61, 66, 75, 53, 65, 98, 68, 63, 68, 50, 62, 60, 64, 64, 67, 71, 59, 75, 70, 67, 69, 61, 68, 56, 82, 46, 61, 52, 70, 74, 59, 65, 63, 59, 67, 57, 62, 65, 84, 67, 46, 60, 104, 86, 75, 46, 72, 56, 76, 75, 83, 84, 84, 80, 67, 87, 97, 58, 48, 64, 73, 65, 50, 81, 64, 63, 54, 65, 63, 72, 92, 53, 62, 66, 74, 77, 39, 65, 70, 80, 80, 62, 92, 59, 96, 61, 64, 68, 80, 68, 60, 52, 71, 72, 67, 53, 57, 68, 59, 45, 74, 62, 67, 63, 80, 75, 56, 88, 62, 59, 65, 59, 79, 73, 87, 66, 52, 60, 60, 70, 70, 54, 57, 70, 50, 65, 72, 70, 65, 67, 67, 69, 66, 61, 61, 70, 78, 57, 58, 57, 82, 76, 52, 65, 72, 62, 62, 64, 57, 63, 54, 82, 65, 75, 69, 65, 63, 51, 67, 84, 73, 48, 60, 62, 55, 46, 49, 92, 59, 61, 64, 68, 60, 65, 73, 57, 69, 46, 63, 55, 62, 103, 53, 51, 48, 51, 55, 64, 64, 65, 74, 58, 57, 78, 78, 74, 78, 62, 51, 66, 72, 61, 67, 74, 68, 55, 58, 45, 113, 62, 85, 73, 58, 71, 52, 90, 69, 59, 88, 71, 63, 71, 88, 36, 46, 54, 72, 58, 61, 136, 56, 79, 56, 63, 62, 64, 51, 104, 56, 58, 66, 77, 60, 56, 58, 55, 122, 69, 65, 67, 63, 60, 50, 63, 52, 66, 58, 73, 55, 72, 71, 55, 67, 61, 65, 74, 65, 64, 70, 62, 76, 88, 112, 94, 62, 72, 68, 61, 77, 81, 69, 72, 73, 89, 60, 63, 50, 50, 58, 68, 51, 44, 61, 70, 70, 76, 59, 67, 69, 64, 53, 73, 54, 68, 93, 59, 59, 80, 49, 56, 59, 75, 46, 76, 83, 56, 64, 49, 66, 34, 70, 57, 98, 45, 127, 65, 65, 62, 70, 72, 73, 56, 57, 53, 81, 74, 71, 70, 55, 60, 52, 79, 51, 62, 60, 59, 62, 102, 90, 77, 68, 63, 49, 63, 52, 75, 57, 51, 70, 54, 61, 79, 65, 63, 61, 52, 65, 63, 64, 55, 62, 61, 119, 64, 67, 119, 103, 80, 73, 72, 82, 71, 63, 64, 112, 65, 67, 53, 59, 59, 65, 63, 67, 72, 48, 49, 58, 60, 51, 50, 65, 61, 55, 73, 55, 60, 66, 65, 47, 103, 54, 64, 57, 62, 78, 47, 55, 44, 37, 67, 78, 65, 85, 70, 52, 60, 65, 59, 67, 70, 62, 62, 62, 60, 60, 69, 58, 71, 67, 65, 61, 59, 68, 104, 65, 88, 62, 47, 98, 55, 66, 43, 55, 60, 62, 103, 48, 53, 52, 119, 70, 58, 56, 62, 69, 65, 43, 64, 67, 76, 58, 59, 58, 70, 59, 59, 71, 84, 82, 67, 70, 55, 82, 70, 69, 85, 54, 79, 73, 42, 53, 53, 57, 57, 56, 76, 58, 59, 56, 58, 57, 58, 62, 93, 66, 41, 65, 71, 66, 65, 55, 61, 67, 70, 77, 85, 70, 67, 54, 47, 59, 50, 48, 65, 73, 59, 72, 56, 67, 79, 63, 72, 82, 51, 71, 49, 64, 65, 75, 97, 66, 73, 85, 55, 58, 79, 73, 82, 71, 71, 58, 63, 65, 65, 83, 64, 52, 74, 68, 63, 71, 57, 57, 53, 52, 78, 39, 53, 56, 52, 50, 63, 70, 55, 54, 74, 64, 58, 79, 70, 59, 62, 69, 65, 57, 61, 56, 51, 63, 57, 66, 75, 61, 64, 64, 74, 67, 66, 69, 98, 69, 67, 62, 58, 66, 64, 69, 71, 59, 77, 79, 56, 62, 57, 57, 81, 63, 70, 71, 71, 73, 73, 60, 95, 41, 54, 108, 72, 60, 60, 76, 67, 62, 71, 73, 55, 99, 74, 69, 60, 74, 53, 65, 64, 56, 57, 72, 72, 77, 68, 71, 65, 64, 61, 61, 80, 52, 75, 50, 39, 63, 54, 65, 81, 73, 59, 60, 57, 49, 61, 65, 65, 85, 74, 69, 76, 65, 92, 75, 70, 53, 65, 59, 43, 73, 62, 42, 67, 77, 66, 78, 84, 73, 60, 63, 64, 114, 81, 74, 69, 67, 74, 51, 64, 74, 68, 72, 54, 64, 56, 59, 70, 73, 56, 90, 85, 65, 63, 65, 52, 53, 60, 60, 73, 48, 54, 67, 51, 66, 61, 89, 48, 62, 78, 72, 61, 59, 56, 63, 68, 61, 51, 61, 63, 79, 66, 51, 73, 59, 64, 121, 78, 67, 53, 56, 77, 69, 67, 69, 73, 61, 52, 68, 74, 62, 79, 59, 56, 59, 66, 51, 64, 67, 79, 61, 71, 67, 69, 85, 71, 82, 60, 57, 64, 61, 74, 45, 71, 54, 67, 56, 52, 60, 83, 70, 66, 66, 70, 63, 75, 71, 69, 59, 71, 61, 53, 50, 55, 59, 75, 55, 56, 72, 70, 66, 66, 49, 61, 73, 62, 64, 61, 76, 70, 70, 60, 87, 67, 60, 71, 61, 75, 29, 48, 45, 57, 75, 74, 66, 92, 95, 58, 74, 59, 61, 57, 48, 55, 68, 73, 64, 58, 66, 59, 78, 52, 86, 87, 79, 59, 81, 71, 94, 64, 63, 76, 63, 61, 64, 61, 59, 70, 73, 68, 49, 59, 46, 66, 66, 61, 49, 76, 68, 63, 53, 57, 60, 61, 69, 67, 62, 63, 63, 58, 102, 78, 80, 58, 50, 57, 58, 57, 68, 51, 64, 63, 54, 73, 66, 55, 60, 67, 58, 63, 58, 75, 47, 63, 66, 96, 70, 55, 90, 70, 50, 71, 62, 64, 62, 61, 68, 56, 67, 49, 50, 66, 63, 50, 71, 71, 107, 61, 80, 57, 66, 61, 59, 55, 83, 52, 49, 54, 79, 61, 64, 66, 67, 64, 58, 57, 60, 58, 63, 71, 57, 70, 68, 77, 55, 64, 73, 80, 61, 69, 51, 75, 78, 55, 57, 69, 56, 66, 70, 67, 48, 71, 71, 64, 64, 68, 71, 73, 60, 67, 82, 53, 82, 55, 59, 61, 65, 38, 101, 76, 86, 59, 70, 80, 80, 65, 58, 55, 68, 45, 54, 69, 61, 69, 60, 79, 73, 61, 87, 67, 67, 52, 70, 48, 54, 38, 64, 69, 71, 52, 74, 74, 54, 63, 47, 68, 62, 78, 72, 64, 65, 70, 79, 66, 55, 67, 64, 74, 63, 76, 70, 68, 67, 58, 76, 61, 68, 64, 59, 64, 63, 62, 111, 88, 74, 54, 64, 57, 55, 63, 65, 60, 86, 89, 63, 75, 86, 68, 53, 47, 65, 58, 74, 106, 66, 71, 80, 78, 70, 81, 60, 59, 71, 61, 39, 78, 50, 56, 64, 53, 62, 43, 73, 59, 70, 70, 76, 66, 70, 63, 71, 73, 70, 65, 111, 67, 72, 53, 63, 61, 64, 68, 57, 59, 65, 66, 97, 57, 56, 68, 63, 64, 45, 55, 57, 75, 57, 63, 72, 79, 57, 65, 81, 69, 51, 61, 64, 59, 61, 54, 73, 47, 53, 54, 69, 93, 74, 63, 65, 52, 97, 71, 57, 66, 66, 74, 63, 43, 52, 78, 58, 68, 64, 65, 67, 60, 53, 60, 70, 76, 55, 52, 88, 75, 55, 71, 63, 71, 71, 41, 57, 74, 54, 68, 40, 52, 58, 60, 75, 64, 50, 85, 63, 64, 67, 68, 61, 58, 54, 60, 53, 64, 53, 81, 54, 62, 62, 45, 68, 56, 65, 61, 54, 66, 66, 63, 54, 58, 66, 64, 58, 88, 64, 54, 67, 80, 81, 59, 77, 62, 51, 63, 81, 111, 62, 60, 70, 79, 54, 139, 54, 78, 63, 85, 52, 62, 56, 53, 60, 65, 68, 49, 63, 57, 61, 55, 121, 60, 60, 61, 45, 68, 53, 80, 78, 63, 56, 54, 58, 57, 67, 73, 58, 68, 77, 62, 68, 63, 57, 62, 79, 88, 60, 84, 55, 57, 58, 59, 92, 70, 66, 76, 50, 100, 67, 63, 69, 48, 66, 59, 61, 57, 65, 47, 70, 74, 58, 65, 62, 73, 64, 72, 63, 66, 80, 66, 59, 88, 68, 68, 70, 64, 64, 57, 61, 60, 69, 55, 56, 96, 57, 73, 116, 58, 125, 78, 78, 45, 62, 66, 67, 51, 59, 41, 61, 48, 60, 71, 65, 61, 59, 64, 56, 69, 64, 53, 83, 91, 59, 60, 59, 68, 57, 67, 58, 51, 59, 36, 57, 63, 73, 59, 36, 62, 63, 64, 56, 48, 37, 65, 62, 64, 123, 60, 67, 136, 56, 81, 75, 56, 85, 72, 86, 57, 77, 61, 79, 48, 68, 61, 68, 59, 56, 65, 58, 71, 62, 58, 48, 67, 74, 60, 71, 63, 49, 53, 68, 57, 60, 83, 55, 71, 72, 78, 47, 63, 66, 57, 73, 70, 70, 51, 63, 34, 40, 59, 75, 57, 63, 49, 58, 52, 59, 55, 67, 55, 64, 65, 68, 67, 65, 62, 58, 103, 55, 111, 55, 67, 91, 65, 70, 55, 56, 73, 67, 50, 68, 58, 51, 115, 52, 61, 64, 61, 71, 60, 72, 75, 48, 71, 53, 52, 78, 76, 64, 68, 55, 55, 79, 63, 68, 61, 65, 62, 61, 75, 66, 81, 55, 55, 54, 50, 58, 66, 68, 74, 58, 71, 64, 45, 64, 66, 66, 85, 79, 68, 79, 74, 52, 53, 72, 66, 61, 57, 96, 87, 73, 53, 57, 66, 52, 46, 65, 61, 71, 59, 68, 60, 63, 84, 70, 72, 56, 63, 62, 73, 81, 53, 67, 74, 59, 59, 75, 68, 59, 74, 66, 60, 45, 69, 62, 56, 99, 61, 65, 116, 106, 68, 67, 72, 62, 55, 70, 71, 62, 57, 61, 59, 59, 66, 63, 54, 57, 47, 72, 66, 64, 60, 88, 60, 71, 69, 64, 78, 65, 70, 64, 58, 53, 58, 71, 65, 59, 54, 66, 68, 66, 80, 63, 94, 88, 70, 65, 60, 70, 57, 105, 71, 54, 96, 92, 57, 57, 51, 49, 65, 76, 60, 68, 57, 114, 67, 56, 70, 68, 76, 67, 72, 87, 60, 58, 53, 61, 71, 74, 50, 73, 53, 63, 76, 76, 53, 77, 70, 73, 60, 60, 57, 62, 52, 53, 70, 98, 72, 84, 67, 60, 61, 81, 64, 74, 71, 58, 109, 69, 73, 73, 66, 41, 52, 64, 64, 77, 78, 60, 44, 51, 67, 80, 53, 71, 59, 71, 62, 64, 54, 64, 55, 63, 52, 66, 87, 63, 65, 65, 95, 102, 51, 56, 67, 65, 77, 75, 69, 80, 63, 84, 66, 65, 60, 62, 56, 61, 67, 69, 56, 81, 58, 58, 62, 64, 60, 62, 79, 66, 60, 63, 100, 48, 72, 91, 52, 78, 106, 52, 67, 67, 59, 51, 60, 80, 50, 91, 85, 71, 86, 51, 56, 65, 72, 79, 65, 77, 65, 57, 63, 62, 60, 65, 83, 55, 63, 89, 63, 61, 79, 64, 57, 57, 52, 62, 55, 58, 76, 63, 63, 59, 63, 62, 48, 62, 58, 75, 57, 64, 46, 57, 57, 57, 59, 73, 63, 59, 64, 53, 64, 73, 71, 79, 88, 58, 69, 54, 85, 70, 57, 72, 59, 71, 62, 66, 65, 79, 67, 66, 78, 63, 64, 54, 63, 61, 51, 74, 77, 61, 44, 75, 56, 61, 63, 70, 62, 77, 59, 77, 62, 76, 62, 64, 78, 70, 69, 89, 56, 58, 63, 74, 88, 66, 56, 79, 54, 67, 78, 69, 72, 46, 90, 78, 77, 70, 62, 58, 53, 60, 56, 50, 70, 63, 70, 53, 61, 58, 79, 62, 92, 64, 47, 47, 58, 50, 55, 66, 75, 57, 50, 58, 71, 82, 46, 76, 73, 73, 54, 55, 65, 52, 61, 66, 58, 69, 61, 66, 67, 66, 60, 70, 51, 69, 66, 66, 55, 53, 60, 79, 48, 67, 56, 50, 53, 67, 59, 49, 63, 57, 66, 57, 90, 61, 85, 55, 64, 62, 57, 81, 58, 59, 67, 88, 71, 55, 61, 85, 75, 58, 63, 108, 58, 58, 56, 71, 56, 60, 68, 64, 67, 66, 58, 69, 65, 63, 63, 70, 66, 63, 69, 59, 73, 110, 54, 61, 69, 56, 87, 67, 74, 67, 70, 50, 56, 76, 53, 55, 51, 83, 56, 67, 69, 60, 51, 74, 56, 65, 54, 72, 51, 57, 58, 62, 70, 73, 52, 103, 64, 72, 68, 78, 73, 67, 83, 62, 74, 65, 50, 68, 74, 57, 83, 62, 59, 56, 62, 54, 69, 64, 58, 76, 49, 63, 77, 63, 79, 71, 71, 50, 81, 53, 61, 79, 58, 76, 71, 64, 101, 62, 70, 69, 57, 71, 63, 57, 64, 54, 75, 60, 78, 73, 56, 43, 50, 59, 55, 111, 73, 64, 71, 108, 63, 64, 49, 62, 65, 72, 88, 71, 75, 54, 67, 65, 90, 67, 45, 55, 55, 68, 73, 72, 80, 66, 68, 91, 56, 60, 64, 86, 69, 67, 62, 82, 57, 91, 59, 57, 62, 56, 67, 87, 55, 53, 46, 77, 64, 71, 61, 65, 58, 66, 71, 62, 65, 64, 77, 53, 77, 74, 59, 58, 56, 66, 55, 65, 71, 57, 55, 70, 45, 63, 62, 86, 63, 67, 63, 58, 66, 60, 50, 75, 70, 37, 91, 44, 42, 75, 87, 71, 65, 60, 68, 54, 71, 71, 75, 68, 101, 73, 68, 68, 68, 65, 63, 85, 86, 67, 49, 64, 52, 46, 74, 74, 77, 73, 59, 65, 55, 73, 74, 81, 51, 73, 61, 51, 61, 78, 70, 74, 62, 50, 60, 76, 48, 72, 67, 74, 69, 65, 77, 59, 59, 69, 98, 58, 58, 46, 80, 61, 51, 63, 65, 74, 81, 72, 74, 52, 62, 69, 60, 70, 60, 52, 49, 58, 51, 49, 96, 73, 67, 79, 66, 66, 73, 67, 69, 90, 69, 50, 99, 65, 51, 53, 57, 60, 63, 80, 133, 60, 114, 65, 77, 65, 65, 53, 64, 74, 60, 82, 68, 53, 58, 56, 85, 84, 97, 54, 71, 68, 64, 61, 62, 64, 81, 62, 52, 52, 90, 68, 64, 73, 64, 62, 71, 38, 53, 80, 62, 66, 71, 128, 76, 69, 75, 61, 66, 57, 62, 55, 53, 58, 118, 47, 83, 83, 43, 56, 85, 58, 50, 60, 56, 61, 64, 75, 64, 57, 54, 70, 75, 43, 86, 55, 82, 72, 71, 55, 55, 65, 60, 63, 57, 63, 62, 80, 66, 89, 53, 77, 62, 58, 53, 140, 73, 58, 65, 62, 62, 60, 64, 54, 72, 50, 58, 80, 64, 83, 70, 64, 57, 72, 73, 60, 87, 88, 79, 66, 52, 60, 65, 66, 52, 87, 66, 56, 65, 65, 71, 52, 92, 54, 49, 59, 58, 61, 57, 68, 67, 67, 52, 124, 56, 93, 50, 48, 68, 43, 61, 77, 59, 53, 65, 50, 57, 56, 66, 64, 70, 83, 55, 50, 64, 59, 65, 100, 47, 58, 66, 48, 68, 55, 60, 63, 67, 61, 55, 84, 103, 58, 59, 63, 57, 95, 61, 63, 54, 65, 68, 55, 67, 64, 45, 92, 64, 87, 50, 69, 63, 65, 69, 76, 59, 72, 76, 71, 77, 80, 66, 52, 68, 77, 45, 52, 67, 75, 59, 114, 64, 54, 39, 77, 70, 52, 55, 54, 66, 58, 90, 63, 67, 45, 76, 60, 80, 49, 56, 80, 63, 55, 68, 69, 47, 61, 72, 59, 87, 76, 75, 55, 63, 56, 72, 77, 107, 89, 84, 55, 56, 63, 77, 58, 73, 64, 82, 75, 73, 68, 49, 61, 66, 73, 61, 81, 60, 54, 66, 56, 51, 49, 56, 65, 53, 65, 57, 72, 65, 54, 73, 71, 68, 62, 52, 79, 60, 67, 73, 47, 81, 66, 75, 74, 58, 50, 53, 75, 77, 59, 83, 60, 51, 78, 67, 47, 80, 50, 65, 55, 66, 87, 69, 60, 74, 62, 52, 70, 58, 67, 68, 62, 58, 69, 53, 61, 71, 66, 60, 55, 76, 51, 51, 68, 76, 79, 84, 74, 56, 63, 65, 56, 68, 41, 67, 91, 63, 73, 52, 51, 84, 64, 70, 64, 65, 70, 73, 69, 70, 55, 54, 49, 67, 54, 74, 66, 60, 120, 71, 73, 73, 59, 64, 78, 57, 74, 66, 58, 69, 66, 84, 66, 49, 74, 82, 79, 66, 119, 55, 74, 69, 55, 53, 66, 66, 74, 53, 84, 58, 67, 77, 54, 74, 69, 65, 67, 63, 83, 69, 57, 57, 59, 71, 70, 60, 55, 85, 85, 74, 51, 81, 58, 59, 57, 60, 55, 64, 87, 91, 59, 63, 74, 75, 115, 78, 49, 46, 68, 71, 113, 57, 63, 56, 56, 68, 75, 55, 63, 61, 70, 55, 66, 37, 58, 77, 51, 63, 56, 76, 76, 59, 70, 74, 64, 66, 59, 62, 52, 77, 55, 61, 55, 101, 64, 44, 44, 93, 68, 66, 48, 59, 63, 61, 54, 63, 63, 76, 76, 76, 64, 79, 96, 58, 65, 85, 47, 54, 81, 63, 61, 53, 62, 58, 68, 56, 70, 61, 54, 78, 56, 85, 65, 59, 42, 50, 65, 55, 88, 72, 71, 75, 68, 61, 52, 102, 73, 61, 73, 65, 64, 75, 76, 64, 59, 88, 69, 60, 64, 64, 72, 55, 100, 60, 83, 46, 78, 74, 69, 96, 67, 56, 58, 54, 57, 41, 63, 65, 64, 61, 54, 73, 55, 55, 52, 66, 66, 56, 96, 57, 65, 60, 55, 82, 68, 103, 86, 48, 56, 61, 58, 78, 71, 80, 70, 46, 61, 83, 83, 65, 64, 65, 68, 58, 59, 68, 73, 63, 60, 60, 60, 96, 77, 62, 84, 67, 61, 68, 59, 73, 58, 62, 60, 56, 55, 57, 82, 75, 74, 56, 61, 62, 61, 70, 61, 50, 64, 64, 63, 53, 69, 53, 65, 56, 56, 92, 50, 81, 47, 83, 54, 69, 63, 66, 61, 60, 70, 52, 66, 50, 63, 65, 59, 66, 70, 64, 55, 69, 76, 65, 65, 60, 60, 69, 77, 80, 68, 50, 53, 68, 45, 98, 55, 61, 65, 63, 68, 71, 60, 62, 69, 63, 85, 61, 62, 63, 49, 66, 63, 58, 49, 97, 77, 67, 63, 81, 90, 61, 68, 62, 72, 62, 91, 87, 68, 65, 80, 62, 90, 77, 77, 60, 70, 54, 118, 63, 76, 55, 53, 61, 65, 58, 70, 67, 73, 74, 64, 68, 65, 51, 61, 79, 62, 63, 48, 56, 76, 61, 82, 71, 46, 76, 35, 34, 92, 51, 70, 63, 59, 72, 54, 54, 77, 69, 54, 55, 76, 46, 75, 47, 78, 84, 82, 62, 92, 80, 53, 70, 76, 90, 56, 52, 72, 76, 52, 48, 62, 72, 74, 56, 57, 62, 45, 63, 75, 63, 43, 73, 62, 65, 76, 63, 69, 74, 81, 57, 61, 73, 56, 66, 51, 64, 56, 84, 81, 53, 64, 50, 51, 45, 61, 55, 75, 59, 70, 79, 66, 63, 64, 62, 58, 82, 57, 50, 64, 104, 65, 70, 105, 75, 73, 70, 64, 59, 57, 99, 88, 55, 52, 73, 57, 67, 72, 51, 76, 63, 51, 69, 63, 64, 54, 76, 61, 58, 58, 51, 95, 57, 75, 52, 55, 74, 74, 65, 51, 77, 82, 65, 102, 62, 48, 59, 56, 73, 62, 79, 69, 63, 61, 50, 57, 59, 97, 62, 61, 51, 67, 67, 79, 58, 54, 42, 66, 57, 61, 61, 58, 63, 76, 55, 92, 48, 78, 78, 66, 81, 58, 59, 98, 49, 59, 39, 46, 58, 75, 64, 45, 66, 58, 82, 55, 93, 74, 74, 56, 93, 84, 83, 59, 91, 51, 47, 53, 62, 68, 75, 61, 57, 71, 67, 83, 65, 69, 63, 58, 70, 74, 61, 66, 67, 62, 101, 68, 75, 58, 70, 56, 71, 69, 88, 76, 50, 64, 80, 53, 53, 71, 115, 63, 64, 54, 108, 68, 49, 61, 93, 57, 45, 67, 66, 59, 65, 73, 51, 63, 75, 54, 60, 63, 49, 56, 59, 64, 48, 67, 81, 62, 67, 77, 72, 40, 49, 66, 63, 66, 92, 67, 60, 60, 41, 67, 55, 69, 55, 99, 145, 48, 55, 60, 65, 59, 56, 73, 54, 74, 73, 82, 93, 68, 67, 68, 77, 48, 60, 47, 95, 82, 69, 50, 36, 65, 69, 71, 55, 56, 58, 65, 86, 75, 68, 55, 55, 57, 69, 80, 61, 64, 64, 76, 71, 66, 60, 56, 57, 75, 82, 59, 46, 75, 84, 60, 73, 65, 60, 74, 108, 59, 107, 60, 61, 56, 71, 67, 55, 64, 71, 63, 74, 71, 66, 72, 65, 55, 50, 51, 71, 57, 63, 76, 52, 74, 60, 63, 63, 54, 47, 59, 42, 70, 60, 111, 56, 55, 70, 70, 51, 73, 70, 56, 56, 60, 56, 68, 73, 53, 67, 69, 60, 85, 89, 68, 65, 88, 88, 77, 50, 36, 78, 49, 56, 75, 59, 67, 73, 59, 67, 81, 67, 52, 50, 59, 69, 59, 70, 66, 61, 77, 59, 118, 62, 65, 71, 58, 45, 62, 45, 93, 64, 90, 62, 59, 89, 58, 57, 67, 64, 62, 52, 61, 47, 54, 49, 82, 69, 64, 60, 112, 50, 61, 99, 66, 82, 60, 54, 79, 69, 75, 84, 64, 64, 113, 67, 65, 57, 53, 48, 68, 62, 71, 62, 43, 59, 75, 56, 105, 64, 72, 67, 69, 79, 91, 52, 74, 63, 71, 61, 72, 71, 67, 65, 55, 74, 56, 75, 60, 57, 78, 83, 58, 67, 61, 70, 70, 53, 72, 78, 65, 63, 68, 64, 66, 57, 64, 64, 73, 69, 68, 70, 68, 65, 56, 60, 56, 95, 79, 51, 56, 63, 65, 70, 71, 90, 62, 69, 68, 73, 64, 60, 79, 66, 62, 84, 51, 66, 78, 69, 75, 67, 60, 60, 87, 64, 59, 70, 74, 80, 61, 66, 52, 64, 53, 80, 53, 65, 54, 79, 60, 42, 65, 56, 66, 64, 60, 68, 65, 63, 61, 75, 49, 62, 77, 54, 78, 51, 58, 64, 60, 53, 67, 58, 72, 77, 50, 66, 63, 63, 59, 74, 79, 50, 66, 63, 61, 56, 49, 57, 79, 65, 103, 70, 82, 50, 72, 52, 51, 82, 52, 57, 54, 70, 82, 61, 62, 58, 68, 67, 70, 64, 70, 68, 62, 54, 50, 57, 67, 56, 60, 58, 70, 67, 70, 76, 54, 64, 60, 58, 72, 54, 59, 63, 74, 60, 116, 65, 66, 73, 56, 60, 59, 63, 69, 79, 68, 68, 66, 107, 77, 70, 62, 57, 71, 86, 103, 90, 61, 66, 79, 64, 54, 51, 58, 63, 109, 77, 71, 76, 71, 58, 63, 66, 64, 66, 53, 68, 69, 38, 62, 52, 96, 50, 78, 46, 41, 72, 122, 114, 68, 89, 53, 74, 54, 57, 94, 55, 55, 66, 61, 58, 79, 58, 53, 50, 69, 57, 85, 61, 53, 61, 67, 49, 59, 65, 73, 54, 59, 68, 64, 58, 70, 68, 67, 63, 102, 45, 58, 62, 78, 91, 41, 58, 69, 68, 71, 62, 58, 84, 68, 49, 66, 55, 70, 87, 58, 68, 43, 61, 72, 60, 69, 67, 84, 75, 77, 58, 58, 62, 54, 66, 64, 114, 76, 54, 61, 66, 64, 72, 74, 57, 74, 70, 86, 54, 59, 65, 63, 55, 64, 81, 70, 63, 47, 61, 77, 61, 65, 63, 67, 64, 52, 55, 84, 44, 73, 60, 70, 55, 50, 73, 58, 78, 44, 66, 57, 66, 47, 47, 66, 57, 53, 67, 64, 50, 69, 69, 62, 64, 67, 56, 70, 64, 74, 61, 71, 69, 63, 100, 53, 53, 65, 55, 68, 54, 63, 61, 66, 70, 67, 67, 82, 90, 62, 56, 65, 55, 67, 64, 77, 63, 57, 47, 62, 83, 64, 77, 50, 102, 51, 60, 56, 64, 88, 74, 65, 58, 60, 82, 83, 73, 92, 52, 57, 56, 70, 118, 41, 65, 61, 64, 73, 74, 71, 63, 67, 94, 53, 72, 49, 85, 72, 56, 48, 56, 58, 59, 53, 49, 65, 52, 85, 70, 57, 62, 77, 69, 72, 62, 54, 75, 65, 64, 56, 59, 62, 77, 67, 57, 64, 64, 68, 60, 65, 73, 58, 70, 68, 65, 68, 50, 61, 65, 57, 66, 76, 66, 68, 69, 49, 56, 69, 81, 95, 49, 81, 60, 58, 71, 71, 53, 75, 81, 82, 68, 68, 63, 66, 96, 76, 59, 56, 62, 68, 59, 53, 48, 67, 70, 114, 48, 100, 68, 59, 66, 98, 54, 63, 69, 91, 63, 61, 62, 40, 67, 77, 72, 74, 66, 71, 64, 60, 66, 70, 69, 46, 55, 58, 67, 51, 67, 70, 62, 71, 69, 77, 58, 63, 76, 45, 67, 52, 74, 49, 50, 65, 66, 61, 64, 71, 50, 84, 43, 56, 60, 80, 71, 64, 89, 76, 51, 69, 109, 62, 54, 52, 87, 61, 73, 78, 74, 61, 58, 87, 71, 72, 68, 64, 89, 60, 74, 53, 73, 70, 56, 94, 49, 58, 60, 52, 61, 77, 58, 70, 67, 70, 58, 82, 71, 86, 71, 69, 67, 67, 72, 65, 60, 52, 62, 74, 50, 52, 67, 60, 62, 75, 59, 68, 41, 59, 76, 88, 54, 59, 73, 122, 80, 61, 61, 62, 75, 83, 119, 72, 84, 41, 56, 72, 64, 71, 60, 43, 74, 53, 60, 75, 101, 48, 62, 72, 76, 64, 71, 81, 66, 77, 54, 51, 40, 40, 53, 67, 64, 54, 62, 84, 55, 57, 62, 49, 59, 71, 63, 71, 67, 58, 57, 46, 54, 57, 68, 44, 52, 77, 71, 41, 77, 77, 66, 70, 64, 77, 67, 66, 54, 68, 59, 109, 59, 81, 53, 69, 80, 45, 60, 72, 76, 73, 58, 62, 53, 59, 85, 81, 69, 67, 67, 67, 76, 73, 61, 56, 98, 46, 62, 112, 76, 64, 51, 55, 54, 72, 61, 68, 78, 54, 78, 88, 70, 73, 67, 51, 68, 66, 54, 62, 63, 53, 81, 55, 50, 84, 72, 100, 63, 61, 42, 83, 57, 58, 61, 59, 67, 69, 77, 43, 59, 60, 98, 65, 70, 52, 81, 79, 64, 66, 54, 48, 61, 63, 69, 50, 50, 56, 52, 94, 67, 78, 70, 71, 66, 57, 76, 49, 95, 66, 53, 64, 79, 62, 70, 71, 62, 40, 48, 64, 71, 55, 51, 55, 73, 77, 125, 82, 67, 48, 78, 64, 73, 47, 51, 77, 52, 61, 64, 72, 66, 70, 90, 87, 67, 58, 55, 95, 50, 55, 79, 81, 63, 72, 68, 64, 55, 53, 61, 83, 58, 56, 57, 54, 78, 68, 53, 68, 71, 92, 84, 64, 83, 50, 78, 74, 55, 75, 57, 62, 89, 54, 68, 58, 70, 64, 70, 56, 86, 62, 54, 63, 58, 77, 70, 83, 72, 77, 68, 69, 75, 66, 72, 49, 46, 62, 64, 70, 63, 63, 80, 75, 54, 66, 51, 77, 28, 70, 69, 64, 75, 122, 78, 54, 44, 68, 58, 63, 62, 59, 50, 41, 65, 47, 51, 68, 75, 56, 74, 66, 64, 61, 79, 69, 67, 75, 57, 54, 64, 66, 80, 83, 61, 68, 66, 69, 76, 58, 32, 65, 80, 55, 64, 45, 84, 47, 67, 61, 63, 118, 65, 93, 58, 81, 66, 55, 59, 67, 55, 63, 58, 70, 48, 56, 68, 73, 55, 61, 63, 64, 52, 80, 57, 79, 50, 63, 54, 63, 42, 50, 85, 62, 65, 61, 60, 41, 58, 68, 70, 65, 75, 105, 50, 54, 51, 57, 60, 76, 70, 67, 66, 72, 49, 47, 52, 70, 82, 51, 51, 56, 69, 78, 72, 71, 63, 79, 68, 66, 52, 73, 41, 73, 51, 55, 64, 105, 74, 66, 90, 50, 74, 71, 79, 69, 64, 52, 94, 63, 64, 50, 50, 92, 58, 75, 58, 70, 68, 61, 86, 58, 57, 66, 60, 71, 68, 101, 64, 56, 43, 44, 52, 52, 61, 82, 65, 67, 33, 64, 69, 60, 64, 77, 74, 59, 35, 53, 54, 50, 60, 63, 70, 61, 45, 63, 54, 70, 61, 71, 57, 56, 71, 66, 95, 60, 48, 60, 65, 55, 67, 92, 56, 78, 46, 61, 58, 71, 67, 57, 69, 68, 52, 63, 58, 109, 63, 62, 80, 65, 69, 40, 64, 65, 58, 80, 60, 43, 69, 60, 83, 80, 61, 62, 68, 119, 74, 50, 53, 63, 64, 77, 52, 57, 72, 67, 60, 57, 55, 81, 47, 58, 77, 77, 72, 63, 61, 78, 63, 64, 90, 65, 63, 71, 49, 58, 63, 62, 60, 91, 52, 64, 66, 62, 72, 72, 57, 56, 80, 67, 80, 67, 67, 69, 74, 80, 64, 64, 66, 58, 61, 57, 63, 70, 72, 53, 56, 84, 60, 81, 70, 63, 76, 54, 105, 137, 79, 72, 70, 51, 57, 66, 59, 66, 53, 94, 60, 39, 51, 75, 57, 50, 80, 64, 74, 60, 64, 75, 56, 76, 69, 79, 75, 89, 42, 69, 85, 83, 30, 70, 68, 54, 72, 72, 59, 65, 59, 63, 65, 69, 76, 108, 78, 66, 57, 60, 71, 95, 55, 76, 55, 64, 62, 63, 45, 52, 58, 75, 58, 77, 59, 59, 81, 61, 70, 68, 49, 47, 52, 64, 67, 54, 64, 55, 70, 68, 80, 79, 64, 61, 84, 69, 51, 42, 100, 64, 55, 70, 79, 64, 71, 64, 60, 70, 54, 65, 67, 67, 74, 87, 60, 76, 87, 71, 74, 77, 74, 54, 85, 50, 54, 60, 61, 77, 63, 67, 65, 53, 77, 50, 65, 64, 69, 61, 86, 92, 62, 88, 71, 72, 59, 77, 75, 93, 61, 79, 70, 58, 67, 54, 52, 70, 61, 68, 49, 64, 54, 55, 57, 74, 40, 65, 75, 91, 54, 53, 44, 77, 64, 55, 55, 63, 73, 42, 77, 74, 52, 44, 93, 57, 61, 73, 72, 58, 46, 93, 71, 55, 80, 67, 68, 101, 57, 65, 71, 66, 65, 71, 59, 41, 41, 64, 55, 71, 55, 68, 68, 46, 73, 79, 68, 57, 65, 65, 71, 68, 76, 47, 59, 74, 59, 98, 59, 74, 43, 66, 95, 71, 71, 52, 74, 60, 80, 78, 50, 72, 49, 79, 68, 62, 62, 46, 51, 55, 59, 68, 70, 59, 59, 75, 55, 105, 74, 42, 70, 60, 84, 92, 56, 66, 65, 47, 41, 68, 65, 59, 57, 59, 54, 61, 56, 65, 59, 52, 68, 52, 61, 80, 64, 69, 75, 78, 58, 70, 73, 62, 66, 61, 77, 54, 56, 78, 68, 74, 64, 100, 63, 55, 63, 61, 55, 77, 58, 57, 75, 63, 54, 49, 70, 54, 55, 61, 63, 63, 69, 54, 82, 71, 59, 84, 43, 74, 74, 66, 58, 83, 67, 56, 67, 55, 58, 43, 74, 56, 65, 82, 59, 69, 61, 51, 64, 52, 48, 77, 53, 58, 73, 95, 67, 48, 70, 66, 70, 74, 56, 89, 56, 58, 80, 66, 63, 67, 58, 54, 54, 66, 74, 73, 49, 70, 61, 72, 55, 68, 62, 80, 58, 48, 74, 91, 63, 73, 95, 75, 51, 81, 87, 58, 67, 65, 63, 69, 70, 70, 67, 78, 73, 51, 85, 57, 55, 62, 52, 63, 67, 62, 67, 62, 91, 73, 49, 95, 58, 59, 57, 57, 66, 94, 63, 61, 64, 58, 58, 66, 57, 65, 72, 61, 56, 58, 57, 81, 83, 73, 69, 58, 61, 61, 65, 45, 87, 108, 81, 74, 61, 113, 66, 52, 42, 74, 50, 59, 96, 63, 63, 73, 68, 58, 72, 64, 73, 49, 53, 66, 71, 60, 87, 57, 64, 61, 74, 63, 58, 82, 71, 51, 80, 62, 57, 77, 64, 68, 52, 72, 43, 76, 72, 63, 69, 74, 51, 79, 52, 64, 66, 76, 64, 57, 64, 55, 63, 59, 43, 73, 64, 64, 90, 44, 56, 67, 80, 65, 57, 58, 70, 83, 63, 98, 52, 79, 57, 58, 72, 52, 58, 86, 84, 80, 60, 65, 51, 74, 76, 76, 59, 58, 54, 55, 67, 79, 63, 72, 100, 74, 60, 37, 99, 59, 62, 65, 77, 51, 70, 73, 66, 66, 59, 75, 47, 63, 52, 54, 56, 64, 66, 76, 58, 71, 86, 43, 54, 48, 64, 73, 71, 67, 57, 95, 67, 84, 54, 54, 72, 87, 93, 90, 78, 65, 101, 65, 77, 71, 63, 66, 50, 62, 71, 54, 64, 63, 60, 61, 54, 69, 43, 89, 69, 75, 57, 79, 66, 71, 82, 52, 73, 74, 60, 60, 48, 72, 56, 65, 48, 74, 85, 62, 42, 55, 55, 56, 59, 71, 148, 70, 99, 63, 57, 66, 55, 73, 73, 57, 77, 78, 75, 56, 66, 66, 74, 80, 72, 52, 72, 107, 59, 71, 55, 68, 71, 59, 56, 67, 66, 70, 79, 59, 46, 61, 53, 74, 76, 71, 65, 66, 84, 123, 91, 64, 93, 45, 67, 55, 64, 55, 69, 69, 88, 58, 75, 62, 48, 58, 48, 73, 86, 51, 72, 62, 69, 55, 60, 72, 72, 69, 84, 70, 63, 66, 79, 48, 83, 62, 68, 46, 53, 52, 56, 56, 83, 37, 73, 68, 59, 70, 40, 79, 50, 112, 63, 57, 60, 76, 71, 72, 64, 57, 58, 47, 58, 77, 72, 56, 98, 69, 58, 41, 58, 53, 85, 86, 82, 92, 76, 75, 70, 54, 72, 74, 52, 79, 63, 56, 45, 64, 120, 61, 66, 77, 61, 75, 39, 66, 67, 62, 63, 123, 66, 73, 54, 61, 68, 64, 78, 74, 82, 67, 54, 54, 46, 74, 59, 52, 57, 94, 61, 77, 79, 70, 66, 75, 69, 72, 52, 71, 70, 58, 97, 65, 60, 54, 55, 72, 53, 55, 61, 54, 61, 91, 62, 57, 63, 35, 53, 66, 66, 67, 63, 65, 54, 50, 73, 76, 72, 62, 65, 97, 63, 59, 76, 68, 84, 76, 65, 74, 64, 62, 52, 55, 65, 72, 48, 86, 55, 57, 59, 55, 54, 77, 92, 59, 62, 56, 86, 59, 50, 66, 68, 74, 56, 80, 50, 69, 57, 81, 60, 63, 80, 63, 58, 77, 91, 78, 57, 77, 55, 70, 67, 56, 96, 62, 76, 47, 40, 76, 72, 48, 76, 75, 51, 55, 54, 78, 60, 71, 55, 59, 61, 68, 66, 65, 76, 77, 67, 67, 75, 59, 65, 63, 63, 55, 64, 52, 63, 41, 63, 72, 65, 78, 65, 63, 63, 64, 68, 61, 62, 62, 55, 88, 54, 95, 59, 65, 83, 56, 85, 39, 58, 72, 77, 68, 60, 51, 75, 57, 78, 78, 54, 57, 52, 46, 62, 54, 65, 56, 63, 75, 63, 55, 78, 75, 61, 56, 69, 69, 73, 57, 41, 72, 50, 63, 57, 76, 65, 70, 94, 58, 60, 56, 60, 68, 69, 76, 71, 77, 66, 48, 105, 60, 75, 81, 61, 57, 64, 82, 57, 56, 61, 80, 61, 90, 76, 42, 58, 66, 62, 78, 58, 61, 66, 61, 52, 58, 54, 58, 64, 43, 100, 61, 64, 125, 59, 79, 70, 52, 68, 60, 62, 66, 57, 103, 66, 56, 54, 60, 60, 69, 45, 51, 68, 47, 74, 59, 53, 44, 74, 67, 58, 62, 79, 50, 61, 58, 93, 62, 79, 67, 73, 54, 64, 79, 69, 64, 57, 71, 53, 125, 91, 69, 43, 44, 98, 78, 67, 78, 58, 56, 59, 61, 52, 59, 66, 84, 54, 54, 78, 69, 85, 54, 62, 81, 74, 80, 74, 76, 93, 44, 85, 66, 52, 59, 68, 69, 57, 65, 101, 88, 64, 57, 139, 57, 56, 80, 67, 69, 96, 70, 58, 71, 58, 75, 72, 68, 56, 63, 70, 49, 92, 88, 78, 69, 76, 43, 84, 59, 47, 60, 45, 68, 62, 64, 53, 56, 65, 50, 66, 63, 55, 72, 73, 89, 63, 57, 65, 69, 65, 53, 75, 84, 80, 56, 64, 66, 51, 42, 62, 50, 57, 54, 57, 52, 63, 69, 52, 91, 59, 68, 61, 54, 47, 71, 77, 68, 58, 70, 69, 68, 55, 78, 70, 61, 58, 56, 97, 57, 70, 65, 73, 70, 52, 88, 64, 62, 75, 62, 66, 83, 49, 62, 68, 67, 64, 73, 44, 65, 55, 53, 72, 58, 63, 97, 68, 62, 67, 64, 47, 61, 58, 68, 65, 63, 66, 54, 57, 88, 52, 116, 76, 61, 57, 77, 64, 59, 57, 75, 47, 70, 74, 87, 53, 87, 63, 58, 69, 55, 54, 55, 45, 41, 87, 57, 56, 52, 53, 81, 52, 65, 86, 47, 84, 71, 54, 72, 70, 98, 71, 45, 86, 72, 66, 57, 56, 65, 86, 83, 71, 98, 70, 88, 72, 56, 72, 42, 65, 41, 66, 46, 56, 58, 75, 65, 70, 61, 92, 50, 52, 57, 70, 63, 59, 60, 46, 59, 75, 84, 63, 72, 60, 63, 55, 59, 62, 55, 91, 53, 62, 56, 61, 47, 59, 68, 43, 62, 54, 121, 69, 82, 77, 96, 42, 65, 54, 76, 81, 83, 91, 89, 53, 66, 65, 85, 55, 84, 72, 62, 69, 57, 77, 63, 60, 61, 72, 66, 51, 65, 58, 55, 71, 93, 53, 78, 55, 62, 68, 81, 41, 95, 62, 56, 56, 64, 77, 60, 75, 80, 57, 70, 72, 84, 69, 78, 65, 92, 65, 55, 72, 51, 93, 64, 81, 76, 99, 71, 54, 61, 56, 54, 63, 74, 58, 44, 58, 74, 73, 79, 51, 65, 73, 67, 68, 58, 60, 69, 108, 70, 56, 75, 56, 70, 109, 69, 43, 65, 55, 58, 79, 66, 63, 63, 68, 79, 58, 60, 41, 48, 55, 61, 68, 50, 67, 57, 52, 63, 63, 62, 70, 52, 54, 81, 75, 91, 85, 76, 76, 75, 59, 60, 94, 54, 61, 89, 63, 56, 80, 88, 51, 61, 70, 55, 53, 91, 55, 64, 48, 67, 43, 60, 70, 65, 69, 67, 65, 63, 61, 95, 67, 64, 67, 90, 59, 59, 57, 76, 42, 44, 61, 53, 66, 53, 72, 70, 78, 68, 66, 69, 51, 38, 67, 64, 53, 75, 73, 66, 76, 51, 76, 60, 70, 55, 75, 59, 57, 81, 64, 64, 68, 67, 76, 53, 64, 75, 50, 87, 51, 51, 55, 66, 48, 77, 65, 71, 69, 65, 89, 55, 51, 64, 57, 83, 89, 88, 64, 69, 69, 75, 49, 67, 90, 54, 55, 57, 62, 83, 64, 69, 57, 59, 50, 72, 80, 62, 50, 58, 91, 80, 53, 84, 78, 66, 70, 67, 67, 82, 60, 78, 59, 45, 53, 62, 88, 66, 72, 47, 82, 64, 44, 60, 66, 93, 54, 60, 59, 78, 59, 69, 67, 88, 63, 55, 61, 65, 68, 42, 35, 80, 75, 78, 67, 77, 79, 47, 53, 65, 53, 63, 49, 65, 55, 62, 72, 73, 117, 72, 48, 59, 56, 58, 116, 64, 75, 60, 64, 47, 57, 54, 54, 79, 49, 69, 74, 43, 54, 60, 74, 95, 55, 74, 64, 89, 87, 59, 66, 69, 40, 59, 74, 58, 47, 58, 47, 82, 66, 58, 65, 65, 66, 54, 50, 65, 83, 67, 63, 52, 97, 50, 72, 82, 56, 82, 90, 65, 71, 73, 46, 59, 51, 53, 72, 79, 89, 67, 49, 54, 64, 57, 67, 85, 53, 69, 65, 78, 49, 54, 73, 73, 79, 53, 72, 61, 50, 60, 62, 82, 57, 79, 59, 48, 99, 58, 77, 101, 68, 139, 67, 47, 47, 56, 65, 64, 57, 56, 49, 61, 61, 57, 55, 72, 57, 69, 54, 98, 83, 51, 63, 60, 92, 82, 72, 52, 62, 62, 66, 57, 44, 59, 37, 80, 55, 66, 88, 62, 65, 57, 67, 66, 92, 82, 58, 74, 58, 115, 57, 103, 83, 71, 68, 77, 62, 53, 84, 82, 56, 87, 53, 79, 58, 62, 79, 62, 56, 97, 51, 58, 54, 59, 55, 45, 61, 43, 76, 80, 105, 44, 79, 74, 47, 56, 46, 72, 45, 48, 61, 85, 52, 54, 65, 103, 65, 57, 50, 69, 60, 44, 67, 70, 93, 62, 57, 59, 70, 45, 64, 46, 54, 58, 77, 79, 69, 85, 57, 62, 74, 64, 80, 58, 73, 99, 58, 78, 63, 49, 59, 59, 73, 62, 57, 72, 77, 69, 59, 61, 74, 83, 72, 60, 83, 58, 72, 85, 85, 67, 60, 71, 59, 83, 40, 72, 56, 80, 82, 53, 59, 77, 67, 62, 53, 52, 65, 49, 72, 65, 81, 62, 58, 57, 84, 63, 63, 80, 52, 50, 61, 59, 50, 76, 67, 62, 70, 71, 67, 65, 62, 82, 81, 34, 56, 46, 66, 64, 50, 82, 47, 56, 56, 74, 77, 59, 69, 50, 58, 73, 45, 62, 80, 87, 66, 65, 76, 67, 72, 87, 42, 67, 80, 56, 52, 52, 90, 52, 81, 92, 42, 58, 52, 95, 55, 75, 74, 61, 79, 60, 74, 78, 54, 47, 81, 58, 74, 60, 73, 72, 67, 63, 67, 82, 70, 75, 60, 64, 67, 65, 59, 78, 44, 48, 64, 39, 41, 86, 53, 75, 66, 58, 73, 77, 65, 57, 51, 71, 70, 55, 65, 96, 72, 92, 48, 61, 74, 75, 51, 80, 59, 60, 70, 54, 64, 47, 62, 85, 73, 62, 66, 32, 105, 144, 72, 65, 90, 76, 64, 52, 69, 78, 63, 108, 66, 44, 49, 63, 62, 66, 70, 69, 62, 50, 55, 78, 59, 66, 49, 64, 70, 58, 66, 43, 79, 82, 126, 76, 38, 62, 91, 73, 52, 62, 64, 77, 46, 59, 53, 75, 71, 62, 61, 72, 68, 71, 56, 56, 66, 50, 66, 56, 63, 64, 59, 74, 61, 61, 86, 60, 61, 72, 64, 100, 48, 61, 66, 81, 89, 76, 80, 51, 65, 61, 51, 71, 53, 76, 69, 67, 49, 83, 100, 66, 54, 66, 70, 47, 72, 52, 85, 66, 58, 68, 40, 56, 68, 71, 56, 59, 95, 63, 67, 67, 78, 46, 45, 65, 38, 79, 55, 80, 77, 69, 77, 59, 56, 63, 64, 78, 71, 67, 66, 86, 58, 57, 75, 71, 55, 80, 79, 95, 80, 76, 38, 66, 56, 49, 80, 70, 67, 52, 82, 62, 53, 76, 67, 59, 67, 76, 70, 48, 46, 90, 83, 57, 53, 85, 54, 56, 65, 60, 52, 60, 61, 50, 93, 67, 54, 92, 67, 63, 55, 75, 60, 49, 79, 51, 66, 95, 52, 59, 52, 58, 61, 69, 78, 58, 51, 69, 65, 60, 54, 49, 87, 46, 71, 69, 57, 64, 47, 64, 68, 53, 52, 59, 56, 51, 63, 95, 93, 60, 61, 69, 75, 56, 76, 53, 70, 57, 70, 71, 59, 78, 73, 51, 56, 63, 55, 72, 58, 66, 63, 53, 74, 101, 62, 72, 48, 65, 87, 71, 74, 65, 89, 81, 54, 56, 76, 57, 58, 67, 61, 42, 70, 55, 64, 50, 82, 55, 53, 45, 78, 76, 64, 75, 53, 47, 60, 90, 71, 64, 66, 65, 60, 71, 66, 63, 60, 68, 70, 83, 55, 72, 57, 56, 75, 48, 61, 87, 49, 53, 72, 68, 62, 64, 67, 73, 58, 61, 85, 49, 54, 49, 55, 79, 112, 102, 69, 86, 76, 68, 60, 96, 55, 60, 44, 57, 73, 47, 51, 55, 59, 71, 58, 87, 64, 62, 73, 82, 71, 57, 77, 48, 49, 58, 82, 73, 73, 63, 66, 55, 70, 67, 58, 61, 57, 57, 42, 92, 70, 57, 57, 59, 43, 70, 58, 76, 65, 56, 64, 93, 55, 69, 54, 54, 73, 70, 75, 61, 105, 68, 55, 64, 96, 64, 70, 80, 48, 68, 56, 56, 63, 60, 54, 51, 65, 81, 60, 68, 57, 65, 67, 65, 64, 74, 82, 83, 54, 73, 65, 64, 65, 69, 63, 78, 51, 52, 81, 69, 62, 70, 60, 98, 51, 54, 56, 55, 60, 57, 71, 45, 68, 61, 63, 65, 57, 73, 69, 59, 63, 72, 67, 104, 73, 75, 69, 59, 64, 61, 77, 84, 73, 55, 90, 70, 94, 76, 51, 91, 67, 89, 60, 71, 72, 60, 69, 58, 47, 69, 68, 69, 60, 76, 62, 56, 69, 51, 56, 59, 68, 75, 60, 77, 54, 106, 60, 55, 61, 75, 56, 59, 52, 71, 67, 61, 56, 54, 49, 53, 70, 49, 57, 80, 65, 70, 77, 55, 83, 55, 87, 56, 68, 80, 76, 97, 49, 79, 81, 67, 74, 75, 53, 71, 56, 90, 42, 70, 59, 70, 76, 56, 69, 47, 85, 64, 102, 75, 48, 71, 47, 82, 84, 61, 56, 66, 55, 75, 62, 65, 110, 61, 64, 72, 71, 92, 65, 57, 58, 47, 71, 62, 63, 67, 61, 64, 82, 70, 78, 57, 66, 67, 69, 68, 80, 74, 76, 54, 60, 56, 74, 83, 89, 77, 49, 69, 94, 54, 49, 60, 64, 61, 59, 71, 69, 46, 55, 64, 66, 82, 52, 48, 50, 91, 61, 76, 50, 56, 52, 64, 72, 83, 59, 66, 62, 61, 54, 32, 55, 71, 70, 63, 71, 59, 50, 51, 61, 59, 73, 82, 106, 42, 77, 60, 58, 66, 56, 58, 49, 85, 80, 87, 49, 70, 62, 62, 66, 106, 63, 57, 72, 64, 76, 53, 61, 55, 75, 56, 62, 46, 50, 66, 87, 61, 63, 53, 53, 60, 66, 63, 63, 81, 56, 95, 89, 65, 82, 53, 51, 75, 78, 69, 64, 55, 75, 73, 66, 68, 63, 50, 73, 94, 105, 72, 73, 70, 71, 51, 61, 63, 77, 73, 85, 82, 78, 58, 72, 68, 48, 76, 71, 56, 56, 56, 63, 61, 70, 55, 79, 65, 64, 53, 61, 90, 78, 114, 57, 51, 105, 66, 58, 64, 62, 49, 67, 87, 55, 65, 54, 62, 54, 54, 67, 66, 48, 69, 68, 99, 48, 85, 57, 64, 78, 75, 62, 78, 63, 46, 70, 69, 53, 67, 61, 57, 51, 69, 66, 50, 62, 69, 57, 77, 70, 121, 47, 50, 101, 109, 53, 63, 80, 66, 58, 67, 75, 79, 86, 74, 70, 52, 76, 68, 52, 80, 59, 63, 79, 66, 68, 52, 62, 81, 54, 61, 95, 63, 100, 64, 48, 82, 68, 54, 77, 65, 47, 79, 65, 64, 68, 68, 82, 48, 53, 60, 69, 62, 65, 66, 76, 62, 59, 79, 60, 63, 77, 79, 76, 84, 54, 58, 59, 67, 59, 67, 125, 62, 67, 71, 47, 77, 68, 51, 81, 73, 46, 65, 65, 45, 45, 63, 50, 62, 55, 51, 61, 82, 64, 49, 67, 52, 48, 59, 76, 53, 57, 60, 76, 78, 71, 75, 67, 63, 72, 44, 72, 76, 65, 66, 46, 74, 58, 57, 70, 43, 85, 49, 62, 66, 62, 50, 80, 80, 65, 55, 58, 72, 94, 57, 66, 62, 61, 89, 56, 48, 63, 73, 77, 63, 58, 54, 47, 79, 47, 72, 66, 75, 55, 61, 65, 60, 48, 72, 64, 59, 87, 70, 62, 91, 56, 77, 75, 59, 83, 105, 54, 55, 60, 73, 79, 57, 61, 61, 78, 104, 71, 66, 63, 99, 75, 71, 67, 47, 78, 69, 69, 67, 48, 54, 66, 55, 83, 55, 38, 67, 95, 59, 60, 65, 59, 65, 71, 63, 62, 60, 61, 63, 71, 69, 51, 75, 63, 79, 68, 76, 44, 61, 69, 48, 53, 56, 81, 63, 66, 54, 73, 82, 70, 93, 52, 57, 82, 69, 86, 49, 59, 67, 66, 55, 72, 65, 75, 87, 88, 60, 72, 66, 85, 126, 68, 53, 58, 54, 63, 54, 62, 47, 57, 101, 62, 62, 64, 87, 67, 52, 63, 64, 55, 55, 78, 67, 55, 72, 63, 84, 72, 70, 61, 50, 75, 76, 65, 73, 62, 67, 80, 55, 49, 68, 68, 69, 63, 60, 43, 66, 96, 53, 69, 69, 63, 69, 56, 56, 65, 51, 62, 71, 65, 78, 67, 67, 59, 60, 71, 75, 76, 79, 53, 83, 88, 55, 70, 55, 85, 72, 67, 57, 75, 82, 65, 61, 60, 76, 68, 50, 70, 63, 105, 50, 61, 66, 55, 59, 69, 60, 61, 100, 69, 75, 63, 66, 59, 49, 68, 66, 111, 57, 68, 47, 58, 69, 51, 48, 65, 90, 59, 82, 84, 67, 48, 56, 55, 85, 95, 65, 74, 50, 73, 74, 46, 84, 62, 65, 71, 47, 52, 76, 67, 60, 79, 45, 76, 74, 44, 77, 87, 49, 60, 67, 66, 80, 52, 70, 73, 70, 61, 73, 64, 78, 61, 85, 51, 65, 58, 62, 73, 77, 52, 67, 60, 59, 98, 68, 76, 71, 74, 72, 54, 77, 64, 73, 53, 62, 54, 95, 62, 64, 67, 76, 63, 55, 73, 70, 40, 46, 57, 62, 53, 55, 57, 42, 91, 75, 65, 85, 69, 78, 65, 75, 67, 70, 50, 75, 73, 73, 93, 49, 54, 76, 69, 50, 51, 68, 60, 70, 81, 71, 76, 66, 90, 52, 56, 68, 56, 58, 80, 64, 61, 57, 69, 94, 59, 86, 60, 59, 55, 80, 61, 75, 73, 93, 57, 75, 106, 64, 58, 68, 60, 61, 75, 56, 86, 52, 66, 67, 55, 52, 73, 77, 57, 93, 65, 66, 58, 60, 70, 73, 52, 57, 74, 56, 80, 73, 57, 72, 58, 78, 62, 70, 60, 65, 85, 74, 67, 72, 63, 75, 93, 61, 67, 45, 58, 70, 53, 81, 67, 76, 65, 50, 66, 95, 68, 79, 48, 64, 64, 89, 48, 65, 65, 55, 74, 52, 79, 69, 74, 60, 47, 71, 64, 91, 72, 49, 54, 63, 62, 57, 82, 47, 59, 53, 71, 62, 60, 58, 51, 63, 86, 68, 72, 67, 45, 62, 53, 76, 58, 79, 82, 75, 71, 57, 65, 66, 72, 71, 67, 94, 58, 52, 69, 60, 80, 56, 86, 75, 83, 72, 52, 82, 72, 62, 51, 83, 48, 73, 57, 71, 69, 52, 59, 47, 55, 55, 65, 58, 59, 61, 74, 67, 64, 64, 56, 81, 57, 86, 67, 50, 101, 69, 41, 58, 64, 54, 55, 73, 53, 63, 70, 77, 57, 65, 36, 68, 63, 57, 74, 58, 75, 68, 71, 73, 55, 57, 54, 47, 87, 68, 61, 110, 62, 73, 66, 59, 87, 72, 77, 85, 73, 79, 80, 58, 74, 75, 69, 42, 47, 64, 51, 58, 99, 76, 58, 69, 72, 59, 84, 77, 62, 60, 66, 71, 60, 43, 66, 65, 118, 78, 53, 49, 75, 79, 61, 60, 64, 104, 71, 64, 69, 65, 52, 42, 60, 63, 65, 46, 44, 76, 62, 60, 59, 53, 86, 76, 71, 70, 60, 58, 69, 59, 55, 94, 95, 83, 70, 72, 61, 48, 58, 59, 87, 67, 62, 60, 68, 64, 69, 55, 65, 60, 64, 52, 87, 64, 73, 58, 58, 80, 54, 73, 66, 64, 120, 62, 77, 57, 70, 72, 58, 46, 72, 64, 62, 55, 83, 64, 87, 58, 64, 50, 50, 62, 71, 63, 70, 55, 75, 60, 54, 57, 73, 82, 63, 53, 64, 72, 87, 58, 64, 66, 58, 62, 82, 58, 60, 63, 68, 60, 64, 74, 86, 71, 73, 63, 73, 64, 64, 57, 64, 74, 68, 96, 48, 62, 63, 54, 64, 67, 68, 60, 42, 55, 77, 70, 76, 76, 63, 58, 66, 65, 56, 69, 53, 69, 76, 77, 72, 53, 61, 61, 50, 56, 50, 59, 51, 82, 105, 49, 71, 65, 59, 64, 90, 113, 55, 53, 51, 52, 51, 59, 59, 56, 62, 54, 68, 75, 58, 50, 70, 59, 69, 61, 52, 63, 64, 49, 58, 63, 88, 86, 65, 57, 50, 67, 62, 71, 63, 63, 76, 63, 50, 87, 73, 62, 48, 65, 71, 61, 68, 52, 53, 52, 73, 78, 64, 66, 53, 54, 60, 77, 89, 76, 86, 85, 62, 86, 63, 68, 58, 55, 57, 72, 87, 71, 62, 52, 94, 67, 58, 95, 58, 102, 72, 69, 64, 61, 58, 61, 91, 60, 59, 67, 61, 33, 61, 60, 76, 59, 76, 46, 74, 64, 66, 89, 93, 71, 50, 55, 53, 51, 84, 55, 61, 52, 69, 61, 75, 79, 70, 49, 60, 59, 61, 56, 58, 60, 62, 62, 60, 109, 49, 94, 81, 120, 59, 61, 55, 85, 79, 61, 69, 86, 81, 110, 61, 63, 74, 58, 65, 104, 49, 55, 69, 56, 69, 54, 68, 76, 53, 60, 86, 58, 60, 73, 77, 68, 72, 51, 64, 68, 70, 79, 51, 58, 66, 58, 63, 75, 71, 65, 79, 58, 68, 67, 71, 76, 64, 63, 48, 58, 57, 67, 56, 57, 82, 62, 58, 72, 56, 69, 121, 66, 69, 49, 88, 90, 72, 58, 48, 49, 58, 59, 61, 70, 55, 61, 84, 66, 86, 46, 46, 54, 61, 65, 62, 66, 62, 65, 70, 76, 100, 70, 90, 57, 50, 70, 57, 60, 52, 62, 75, 52, 46, 83, 79, 52, 51, 58, 74, 94, 49, 58, 67, 79, 64, 55, 57, 83, 73, 80, 61, 67, 68, 68, 49, 74, 52, 51, 51, 60, 68, 66, 76, 46, 50, 58, 55, 85, 71, 78, 56, 45, 68, 68, 76, 65, 73, 62, 55, 54, 62, 61, 55, 64, 75, 62, 82, 53, 62, 79, 50, 69, 52, 62, 49, 53, 65, 71, 71, 70, 57, 57, 44, 104, 61, 73, 59, 65, 55, 83, 47, 54, 59, 57, 56, 59, 86, 67, 59, 68, 113, 73, 59, 63, 69, 80, 62, 80, 64, 64, 56, 52, 83, 51, 51, 77, 66, 76, 68, 76, 68, 68, 64, 69, 55, 55, 68, 65, 64, 53, 66, 69, 59, 54, 64, 76, 76, 86, 69, 55, 61, 65, 68, 49, 62, 64, 44, 55, 67, 53, 85, 59, 89, 77, 76, 61, 57, 63, 57, 78, 71, 64, 49, 61, 79, 60, 70, 80, 64, 68, 68, 67, 64, 65, 73, 58, 60, 64, 62, 56, 61, 83, 66, 57, 73, 73, 62, 71, 54, 68, 127, 61, 69, 80, 65, 57, 53, 59, 52, 70, 79, 69, 72, 85, 80, 90, 60, 44, 61, 75, 57, 56, 69, 74, 59, 73, 51, 87, 60, 68, 57, 73, 68, 86, 42, 60, 62, 62, 58, 57, 66, 75, 55, 48, 67, 66, 64, 79, 45, 66, 114, 43, 62, 68, 54, 63, 82, 57, 76, 55, 67, 68, 69, 73, 87, 100, 60, 58, 45, 69, 80, 56, 60, 84, 48, 76, 63, 60, 65, 91, 66, 74, 88, 66, 62, 53, 70, 76, 77, 54, 61, 68, 53, 74, 58, 77, 67, 66, 57, 55, 67, 78, 63, 48, 63, 73, 72, 62, 65, 65, 78, 67, 71, 78, 69, 54, 65, 71, 57, 88, 49, 69, 57, 72, 64, 57, 74, 61, 55, 68, 77, 74, 69, 56, 71, 65, 62, 48, 64, 68, 65, 66, 67, 60, 58, 65, 43, 74, 69, 129, 61, 70, 61, 66, 59, 76, 64, 63, 62, 70, 73, 72, 86, 71, 67, 48, 68, 45, 60, 67, 57, 74, 58, 66, 53, 60, 69, 62, 57, 60, 56, 68, 84, 50, 40, 64, 80, 48, 66, 56, 60, 76, 44, 38, 54, 55, 60, 68, 70, 76, 62, 78, 79, 64, 72, 68, 49, 49, 71, 79, 70, 80, 60, 75, 69, 85, 67, 52, 116, 67, 73, 72, 65, 66, 69, 56, 60, 54, 59, 81, 52, 67, 69, 80, 74, 59, 68, 63, 65, 55, 63, 68, 56, 54, 59, 60, 40, 74, 83, 62, 80, 60, 79, 64, 58, 63, 77, 65, 59, 66, 62, 82, 92, 56, 78, 60, 87, 57, 68, 62, 55, 84, 50, 55, 65, 60, 79, 52, 66, 91, 73, 106, 54, 53, 58, 49, 59, 89, 47, 109, 67, 56, 78, 50, 68, 64, 51, 64, 63, 92, 52, 45, 49, 69, 63, 71, 62, 69, 89, 61, 65, 55, 65, 78, 72, 51, 59, 66, 56, 67, 72, 80, 66, 68, 75, 67, 54, 56, 60, 69, 68, 72, 57, 63, 72, 59, 61, 75, 70, 101, 92, 71, 59, 74, 96, 62, 71, 73, 68, 83, 49, 56, 60, 75, 66, 70, 80, 60, 64, 68, 51, 58, 61, 56, 59, 75, 71, 47, 100, 76, 69, 67, 102, 71, 64, 61, 76, 63, 68, 75, 75, 67, 63, 42, 70, 56, 56, 63, 58, 60, 59, 71, 71, 62, 63, 57, 66, 68, 56, 83, 63, 64, 64, 52, 72, 66, 89, 73, 65, 67, 84, 63, 78, 92, 48, 62, 73, 80, 82, 63, 57, 60, 57, 79, 71, 60, 52, 70, 118, 55, 59, 47, 75, 83, 74, 76, 54, 67, 63, 65, 67, 98, 56, 67, 42, 66, 62, 61, 64, 52, 64, 97, 71, 59, 64, 67, 70, 87, 54, 48, 72, 66, 54, 56, 56, 68, 75, 66, 58, 53, 65, 62, 51, 53, 115, 98, 76, 64, 53, 89, 68, 63, 57, 61, 72, 66, 63, 90, 68, 61, 57, 66, 55, 57, 62, 73, 60, 70, 68, 52, 78, 87, 57, 53, 71, 58, 71, 67, 69, 68, 77, 64, 56, 55, 58, 60, 61, 65, 67, 58, 68, 66, 86, 84, 76, 67, 90, 51, 78, 59, 47, 51, 59, 67, 73, 69, 61, 69, 60, 63, 60, 60, 117, 56, 50, 53, 70, 63, 56, 64, 95, 58, 78, 61, 66, 71, 59, 76, 49, 53, 58, 64, 66, 66, 61, 50, 59, 66, 68, 57, 84, 82, 68, 84, 54, 69, 63, 64, 63, 72, 63, 68, 70, 91, 73, 77, 65, 55, 89, 65, 79, 64, 79, 63, 52, 66, 54, 62, 58, 62, 98, 42, 102, 51, 50, 62, 95, 72, 63, 53, 63, 86, 52, 64, 95, 76, 68, 57, 61, 67, 93, 76, 97, 61, 69, 55, 76, 61, 67, 62, 53, 79, 89, 69, 63, 64, 63, 69, 59, 61, 60, 62, 85, 62, 41, 75, 61, 62, 71, 69, 61, 53, 52, 80, 57, 51, 55, 68, 72, 67, 73, 69, 50, 79, 54, 52, 79, 80, 86, 79, 78, 59, 79, 57, 57, 98, 76, 65, 86, 58, 57, 64, 61, 57, 69, 95, 66, 63, 67, 66, 66, 66, 63, 77, 77, 58, 59, 64, 75, 58, 57, 57, 69, 86, 55, 62, 84, 87, 62, 79, 57, 50, 59, 53, 74, 72, 64, 61, 83, 75, 66, 62, 78, 82, 62, 59, 53, 56, 69, 60, 78, 60, 73, 130, 66, 63, 55, 55, 73, 66, 75, 57, 73, 66, 64, 61, 56, 60, 53, 57, 52, 66, 54, 76, 69, 53, 92, 59, 37, 65, 68, 63, 61, 58, 75, 55, 53, 50, 59, 60, 83, 92, 69, 79, 43, 81, 51, 62, 62, 79, 72, 89, 49, 75, 52, 74, 75, 79, 57, 60, 72, 61, 83, 94, 67, 48, 77, 49, 63, 72, 58, 67, 80, 66, 117, 72, 57, 69, 74, 56, 64, 73, 70, 77, 78, 71, 53, 66, 59, 61, 110, 56, 77, 61, 69, 64, 53, 58, 75, 70, 54, 59, 73, 73, 75, 54, 97, 55, 79, 69, 61, 70, 69, 64, 68, 81, 65, 78, 53, 70, 53, 66, 65, 72, 52, 61, 66, 66, 56, 70, 52, 81, 66, 57, 58, 58, 69, 54, 52, 60, 54, 46, 68, 59, 58, 60, 64, 92, 54, 53, 68, 85, 92, 64, 58, 56, 71, 51, 50, 67, 64, 58, 48, 61, 55, 94, 60, 43, 83, 70, 40, 69, 48, 57, 51, 58, 41, 60, 70, 49, 71, 79, 50, 84, 76, 67, 73, 57, 59, 66, 68, 54, 105, 67, 52, 66, 66, 85, 73, 82, 58, 75, 80, 56, 51, 63, 60, 65, 61, 54, 65, 46, 39, 72, 76, 50, 81, 56, 58, 61, 60, 56, 60, 72, 72, 70, 86, 90, 57, 70, 74, 50, 74, 77, 75, 64, 74, 77, 51, 78, 59, 71, 61, 63, 68, 65, 54, 46, 69, 55, 90, 49, 60, 61, 79, 116, 96, 47, 63, 55, 77, 48, 69, 51, 69, 57, 59, 68, 76, 55, 62, 62, 47, 68, 82, 91, 72, 56, 83, 99, 73, 63, 71, 68, 71, 68, 106, 73, 68, 82, 100, 63, 58, 61, 56, 73, 59, 79, 61, 64, 79, 64, 72, 61, 70, 52, 48, 69, 59, 103, 60, 55, 52, 63, 64, 56, 59, 67, 81, 86, 79, 62, 90, 53, 47, 68, 72, 86, 59, 55, 63, 77, 40, 103, 68, 56, 62, 63, 58, 73, 65, 59, 43, 58, 53, 60, 55, 71, 79, 73, 59, 72, 60, 52, 64, 93, 64, 77, 58, 76, 60, 66, 78, 71, 61, 60, 79, 48, 58, 57, 70, 62, 53, 56, 70, 79, 55, 60, 56, 61, 42, 58, 91, 46, 60, 75, 60, 57, 74, 50, 88, 72, 40, 78, 76, 43, 63, 42, 73, 63, 60, 87, 64, 72, 61, 67, 68, 57, 62, 71, 64, 67, 58, 65, 62, 96, 75, 61, 70, 79, 85, 58, 61, 72, 65, 67, 58, 68, 67, 73, 62, 57, 52, 78, 78, 64, 59, 75, 52, 83, 54, 67, 66, 89, 124, 58, 53, 71, 64, 54, 59, 102, 60, 63, 66, 58, 50, 71, 60, 55, 67, 52, 61, 61, 43, 96, 71, 57, 50, 53, 47, 53, 88, 66, 72, 79, 46, 62, 68, 64, 67, 45, 52, 68, 56, 64, 50, 56, 59, 67, 37, 59, 61, 50, 77, 54, 70, 55, 56, 79, 67, 58, 57, 72, 71, 70, 47, 63, 83, 66, 58, 45, 52, 69, 51, 80, 70, 62, 69, 63, 66, 63, 70, 63, 60, 70, 48, 70, 50, 80, 59, 81, 58, 68, 54, 72, 49, 57, 55, 40, 61, 61, 47, 65, 48, 76, 49, 56, 69, 68, 79, 94, 62, 66, 36, 73, 82, 69, 63, 58, 69, 83, 55, 65, 97, 84, 52, 50, 69, 53, 60, 62, 102, 60, 86, 47, 61, 59, 52, 71, 89, 94, 77, 65, 58, 64, 65, 57, 52, 62, 61, 57, 65, 66, 73, 65, 64, 54, 63, 54, 53, 78, 91, 79, 87, 85, 69, 95, 62, 67, 54, 59, 135, 51, 68, 83, 46, 61, 112, 61, 67, 52, 72, 55, 64, 84, 71, 70, 52, 73, 52, 71, 65, 59, 68, 82, 61, 79, 66, 74, 55, 43, 72, 46, 80, 41, 62, 68, 82, 61, 67, 67, 77, 85, 106, 76, 79, 45, 63, 55, 48, 65, 70, 70, 64, 39, 57, 46, 82, 55, 64, 64, 61, 54, 50, 73, 55, 61, 61, 74, 61, 69, 53, 53, 61, 90, 74, 54, 55, 84, 64, 100, 88, 46, 96, 63, 57, 64, 58, 42, 72, 61, 57, 66, 57, 66, 75, 53, 82, 90, 112, 56, 61, 78, 51, 52, 70, 62, 63, 64, 53, 70, 78, 65, 52, 63, 60, 97, 61, 62, 51, 55, 45, 62, 48, 78, 51, 56, 57, 62, 71, 62, 67, 94, 74, 56, 81, 59, 61, 68, 64, 49, 68, 77, 74, 71, 62, 52, 73, 60, 42, 55, 67, 66, 74, 114, 53, 51, 52, 90, 62, 62, 51, 71, 64, 84, 56, 46, 92, 48, 80, 67, 65, 75, 66, 73, 68, 73, 78, 68, 68, 79, 50, 59, 43, 71, 76, 59, 63, 74, 43, 50, 64, 64, 54, 71, 56, 63, 61, 66, 76, 70, 65, 52, 49, 67, 50, 67, 63, 48, 52, 68, 81, 70, 68, 63, 90, 87, 58, 74, 48, 67, 59, 75, 48, 58, 88, 65, 59, 64, 64, 58, 59, 79, 69, 62, 63, 69, 84, 58, 66, 57, 73, 61, 84, 55, 75, 72, 68, 72, 58, 67, 62, 59, 63, 67, 39, 55, 43, 54, 90, 56, 67, 62, 54, 87, 78, 80, 92, 58, 55, 61, 52, 88, 67, 62, 87, 63, 54, 55, 51, 50, 73, 65, 75, 63, 58, 65, 75, 67, 64, 90, 122, 57, 73, 71, 65, 52, 57, 62, 45, 71, 71, 57, 48, 80, 79, 69, 53, 72, 87, 73, 72, 55, 66, 74, 54, 57, 58, 57, 47, 40, 79, 51, 102, 41, 45, 71, 55, 62, 63, 49, 39, 54, 55, 62, 66, 71, 69, 65, 60, 70, 70, 62, 62, 59, 73, 46, 66, 91, 55, 72, 75, 73, 68, 62, 93, 52, 60, 47, 64, 63, 47, 66, 104, 62, 87, 56, 58, 65, 54, 55, 49, 61, 68, 68, 74, 60, 69, 55, 90, 85, 58, 70, 52, 58, 57, 52, 57, 62, 68, 61, 124, 63, 57, 78, 83, 53, 50, 59, 64, 68, 55, 83, 102, 67, 58, 42, 65, 72, 69, 75, 77, 57, 54, 59, 54, 122, 64, 69, 45, 39, 57, 63, 56, 77, 64, 66, 62, 81, 54, 70, 81, 58, 61, 69, 60, 65, 57, 74, 52, 53, 64, 54, 62, 61, 55, 78, 68, 61, 61, 72, 47, 76, 112, 43, 62, 61, 74, 60, 86, 84, 73, 70, 48, 54, 61, 57, 92, 66, 74, 66, 85, 76, 88, 68, 57, 71, 81, 68, 66, 57, 43, 37, 64, 56, 66, 66, 79, 58, 48, 77, 68, 62, 58, 58, 75, 52, 65, 76, 58, 67, 56, 63, 83, 70, 50, 70, 54, 70, 76, 74, 83, 61, 78, 67, 71, 64, 68, 60, 80, 72, 82, 58, 70, 68, 55, 47, 68, 58, 58, 102, 76, 59, 46, 60, 46, 63, 68, 61, 75, 57, 78, 72, 78, 70, 61, 63, 84, 78, 53, 61, 63, 55, 79, 75, 49, 57, 62, 45, 82, 114, 59, 70, 75, 57, 56, 98, 81, 79, 42, 58, 56, 72, 52, 69, 53, 70, 78, 68, 59, 69, 63, 58, 56, 82, 51, 78, 69, 54, 64, 61, 59, 97, 80, 65, 87, 52, 64, 76, 77, 84, 63, 62, 61, 87, 55, 67, 44, 70, 67, 81, 74, 58, 82, 62, 59, 64, 77, 41, 62, 69, 74, 56, 80, 73, 72, 68, 65, 73, 71, 77, 72, 47, 31, 70, 75, 60, 70, 61, 72, 71, 59, 54, 42, 54, 73, 46, 72, 78, 54, 71, 71, 72, 60, 51, 66, 91, 86, 72, 54, 64, 74, 65, 64, 71, 80, 74, 78, 84, 58, 66, 66, 55, 67, 64, 62, 77, 66, 51, 64, 68, 78, 74, 53, 68, 75, 90, 75, 64, 50, 68, 57, 111, 63, 64, 79, 75, 72, 59, 60, 71, 54, 59, 70, 59, 55, 49, 62, 58, 66, 42, 66, 54, 57, 72, 57, 71, 76, 56, 102, 81, 52, 125, 62, 82, 131, 92, 67, 72, 53, 69, 54, 67, 66, 39, 76, 54, 76, 95, 55, 56, 50, 72, 69, 66, 80, 43, 72, 72, 69, 36, 93, 83, 76, 67, 60, 81, 47, 78, 65, 79, 77, 80, 59, 48, 66, 52, 63, 105, 67, 64, 91, 70, 64, 40, 44, 65, 57, 52, 59, 50, 65, 89, 71, 79, 55, 58, 57, 50, 84, 49, 65, 67, 79, 72, 74, 59, 75, 56, 69, 75, 56, 82, 70, 107, 47, 110, 70, 100, 57, 63, 92, 67, 54, 58, 58, 59, 70, 58, 83, 73, 55, 58, 66, 68, 57, 66, 65, 47, 51, 86, 53, 46, 68, 69, 88, 99, 76, 63, 58, 67, 76, 78, 79, 78, 52, 70, 48, 63, 56, 59, 64, 97, 57, 62, 67, 94, 46, 54, 66, 54, 59, 79, 66, 83, 61, 38, 66, 67, 57, 51, 58, 74, 66, 62, 64, 112, 81, 80, 72, 66, 69, 54, 66, 77, 64, 58, 102, 48, 72, 64, 54, 70, 50, 78, 69, 76, 82, 76, 58, 55, 67, 72, 40, 91, 63, 92, 53, 57, 58, 51, 69, 68, 91, 69, 69, 64, 62, 53, 51, 54, 77, 74, 76, 77, 84, 91, 50, 78, 53, 73, 70, 60, 43, 66, 58, 72, 67, 86, 62, 65, 69, 59, 77, 73, 57, 60, 63, 59, 68, 81, 63, 102, 55, 73, 55, 58, 72, 45, 55, 42, 50, 62, 66, 92, 57, 63, 64, 66, 83, 56, 72, 56, 50, 64, 102, 60, 73, 66, 63, 62, 68, 60, 60, 61, 65, 64, 68, 72, 54, 64, 38, 94, 70, 80, 83, 53, 51, 60, 66, 64, 65, 59, 59, 57, 60, 66, 67, 68, 51, 62, 62, 57, 99, 86, 59, 72, 62, 57, 80, 54, 49, 64, 56, 46, 67, 74, 75, 80, 68, 62, 105, 92, 61, 65, 50, 59, 67, 70, 56, 58, 68, 57, 51, 74, 91, 80, 93, 45, 83, 89, 69, 56, 53, 74, 64, 53, 51, 63, 61, 63, 69, 51, 109, 62, 68, 93, 77, 52, 68, 72, 78, 57, 63, 62, 65, 62, 75, 70, 52, 99, 55, 47, 77, 51, 76, 71, 57, 60, 75, 66, 73, 71, 72, 59, 60, 56, 72, 60, 66, 55, 59, 74, 70, 112, 49, 47, 70, 81, 62, 79, 65, 43, 82, 74, 56, 53, 57, 87, 81, 75, 61, 82, 100, 78, 58, 69, 93, 67, 64, 52, 71, 61, 62, 70, 70, 56, 80, 42, 68, 76, 79, 66, 97, 65, 71, 67, 69, 82, 56, 62, 62, 70, 48, 76, 79, 103, 63, 67, 69, 45, 74, 67, 60, 44, 62, 67, 87, 55, 60, 62, 56, 80, 42, 51, 74, 65, 64, 94, 63, 79, 39, 69, 72, 65, 56, 70, 68, 69, 76, 62, 73, 41, 79, 62, 58, 70, 58, 85, 67, 72, 68, 61, 55, 55, 56, 58, 57, 87, 55, 67, 63, 50, 44, 53, 62, 51, 77, 84, 63, 75, 83, 58, 56, 78, 63, 56, 58, 83, 82, 60, 65, 51, 58, 68, 97, 75, 53, 66, 72, 74, 76, 90, 85, 78, 63, 75, 56, 49, 88, 62, 60, 55, 68, 63, 57, 71, 62, 58, 50, 63, 115, 44, 74, 50, 58, 59, 68, 62, 81, 70, 74, 89, 62, 58, 68, 62, 56, 65, 59, 70, 93, 63, 96, 55, 70, 79, 69, 81, 57, 64, 56, 54, 58, 79, 72, 62, 86, 64, 50, 65, 57, 68, 64, 60, 68, 58, 70, 34, 63, 65, 53, 62, 51, 123, 98, 51, 69, 62, 59, 64, 67, 72, 56, 64, 82, 65, 81, 52, 84, 85, 67, 67, 70, 67, 47, 61, 52, 62, 87, 62, 68, 60, 60, 91, 73, 66, 87, 70, 69, 66, 41, 47, 69, 63, 71, 63, 70, 50, 48, 65, 57, 69, 66, 68, 58, 67, 60, 75, 50, 58, 74, 74, 65, 48, 77, 59, 67, 65, 72, 42, 75, 55, 82, 68, 63, 56, 119, 63, 63, 72, 54, 68, 86, 45, 61, 55, 70, 65, 58, 49, 71, 52, 40, 66, 60, 67, 54, 70, 72, 34, 64, 46, 74, 70, 91, 83, 60, 47, 65, 52, 60, 68, 74, 66, 57, 73, 68, 57, 55, 57, 48, 51, 52, 63, 60, 60, 55, 83, 69, 113, 50, 75, 72, 59, 50, 64, 56, 66, 58, 45, 48, 73, 67, 59, 77, 46, 65, 59, 60, 68, 65, 68, 57, 67, 71, 52, 65, 58, 60, 62, 70, 54, 68, 81, 66, 65, 99, 76, 64, 57, 69, 71, 67, 58, 52, 57, 58, 48, 80, 64, 63, 76, 76, 76, 62, 84, 69, 55, 64, 66, 69, 65, 65, 56, 55, 64, 49, 79, 68, 73, 112, 58, 73, 66, 93, 62, 81, 62, 61, 56, 57, 61, 71, 57, 80, 67, 103, 74, 61, 63, 85, 63, 72, 65, 66, 62, 81, 56, 66, 69, 94, 69, 47, 68, 101, 57, 67, 54, 58, 65, 52, 59, 59, 76, 63, 52, 65, 41, 79, 65, 61, 54, 59, 57, 56, 78, 80, 49, 62, 69, 68, 75, 51, 62, 73, 95, 74, 55, 50, 64, 56, 60, 80, 110, 48, 78, 63, 58, 61, 85, 57, 88, 62, 71, 54, 68, 83, 65, 56, 65, 70, 81, 66, 58, 46, 72, 51, 77, 39, 51, 65, 63, 59, 65, 71, 80, 81, 66, 54, 57, 57, 66, 65, 84, 49, 73, 64, 66, 68, 45, 63, 68, 63, 92, 60, 56, 75, 56, 51, 65, 86, 50, 61, 72, 72, 52, 61, 58, 65, 58, 60, 68, 69, 65, 60, 77, 48, 70, 74, 59, 67, 70, 53, 67, 57, 59, 68, 54, 77, 49, 82, 59, 74, 85, 67, 57, 64, 68, 53, 69, 57, 67, 60, 64, 52, 86, 52, 91, 96, 65, 59, 85, 61, 51, 53, 68, 65, 62, 74, 69, 57, 67, 45, 79, 85, 76, 63, 50, 64, 99, 70, 59, 58, 62, 67, 97, 63, 62, 61, 74, 85, 55, 55, 64, 53, 60, 52, 67, 55, 59, 73, 74, 55, 77, 59, 79, 39, 57, 60, 61, 68, 50, 78, 84, 67, 100, 68, 75, 56, 59, 60, 60, 66, 59, 64, 74, 51, 74, 87, 56, 55, 94, 59, 64, 64, 57, 73, 59, 64, 50, 72, 71, 54, 58, 57, 67, 58, 52, 43, 67, 62, 61, 52, 71, 72, 64, 62, 58, 61, 73, 57, 90, 70, 62, 90, 97, 67, 128, 54, 68, 51, 72, 65, 77, 70, 80, 73, 59, 48, 48, 64, 86, 91, 66, 72, 52, 69, 63, 75, 68, 75, 76, 75, 67, 80, 63, 87, 63, 68, 98, 69, 65, 68, 51, 59, 68, 73, 69, 66, 54, 66, 72, 74, 70, 60, 62, 100, 55, 56, 67, 63, 67, 50, 61, 59, 44, 73, 63, 79, 70, 64, 63, 76, 66, 83, 61, 80, 63, 74, 50, 62, 67, 43, 69, 55, 64, 69, 59, 76, 70, 51, 52, 64, 63, 81, 56, 54, 91, 77, 69, 59, 60, 59, 58, 74, 76, 58, 54, 52, 93, 51, 103, 78, 57, 64, 58, 72, 74, 85, 70, 73, 68, 57, 63, 69, 76, 69, 52, 56, 65, 55, 76, 76, 68, 52, 60, 67, 84, 67, 100, 68, 50, 68, 58, 64, 80, 81, 62, 62, 73, 72, 58, 62, 70, 62, 67, 69, 108, 67, 60, 61, 59, 63, 67, 79, 63, 54, 88, 64, 78, 46, 38, 58, 90, 71, 83, 75, 57, 56, 66, 60, 62, 65, 55, 61, 58, 71, 97, 67, 70, 91, 89, 62, 74, 69, 80, 64, 50, 55, 67, 73, 68, 60, 74, 66, 44, 51, 70, 66, 69, 64, 59, 82, 67, 64, 57, 62, 51, 49, 60, 67, 52, 56, 48, 38, 51, 50, 45, 85, 58, 55, 76, 65, 74, 48, 75, 58, 54, 80, 71, 46, 78, 43, 75, 63, 39, 84, 83, 67, 68, 65, 60, 65, 81, 65, 61, 55, 59, 99, 54, 84, 81, 61, 72, 104, 97, 75, 69, 55, 61, 51, 89, 63, 51, 71, 67, 74, 56, 59, 82, 104, 88, 62, 54, 59, 57, 49, 58, 90, 55, 50, 55, 69, 58, 65, 44, 83, 55, 50, 71, 65, 58, 82, 58, 72, 59, 51, 64, 74, 87, 77, 47, 63, 105, 66, 61, 89, 50, 64, 43, 78, 60, 69, 72, 88, 70, 56, 64, 57, 57, 56, 68, 71, 63, 62, 59, 75, 84, 59, 52, 59, 68, 72, 72, 67, 69, 64, 71, 66, 67, 46, 56, 57, 52, 48, 64, 98, 63, 67, 62, 70, 55, 66, 64, 75, 66, 90, 59, 75, 79, 72, 45, 86, 99, 92, 54, 53, 73, 110, 71, 52, 83, 58, 66, 69, 132, 55, 78, 63, 88, 56, 45, 55, 64, 91, 87, 77, 63, 50, 92, 66, 66, 62, 60, 52, 52, 77, 54, 78, 51, 57, 75, 71, 83, 81, 80, 62, 84, 55, 63, 50, 69, 73, 58, 70, 68, 95, 53, 63, 70, 63, 51, 62, 45, 84, 59, 90, 86, 73, 65, 70, 75, 49, 57, 67, 64, 53, 71, 83, 75, 50, 86, 58, 48, 40, 60, 58, 80, 56, 85, 56, 82, 65, 56, 56, 50, 62, 49, 37, 72, 95, 78, 64, 84, 58, 79, 59, 73, 56, 82, 72, 46, 36, 61, 59, 60, 80, 67, 57, 77, 59, 79, 51, 79, 92, 54, 78, 71, 67, 64, 63, 68, 56, 73, 75, 114, 66, 64, 69, 83, 39, 49, 38, 72, 53, 40, 97, 76, 75, 67, 65, 92, 52, 39, 38, 71, 71, 90, 59, 55, 77, 63, 74, 55, 58, 81, 48, 80, 56, 81, 75, 49, 53, 59, 57, 83, 67, 64, 90, 49, 70, 59, 31, 70, 74, 66, 70, 54, 71, 70, 95, 65, 53, 79, 65, 70, 87, 72, 55, 77, 66, 62, 71, 65, 51, 50, 69, 69, 92, 103, 70, 72, 52, 70, 55, 71, 63, 55, 61, 67, 69, 57, 61, 58, 46, 58, 68, 56, 67, 56, 80, 44, 114, 56, 69, 55, 55, 62, 63, 71, 64, 55, 56, 80, 98, 67, 87, 59, 69, 45, 35, 93, 52, 78, 55, 91, 57, 64, 51, 66, 64, 61, 71, 81, 74, 70, 56, 76, 65, 74, 72, 68, 79, 84, 59, 58, 71, 89, 82, 65, 47, 78, 69, 105, 60, 51, 57, 65, 77, 58, 71, 80, 63, 98, 72, 74, 79, 56, 75, 58, 68, 58, 55, 127, 69, 50, 65, 64, 46, 67, 70, 51, 72, 59, 49, 52, 51, 81, 55, 77, 56, 68, 68, 61, 60, 103, 50, 57, 62, 57, 51, 67, 52, 92, 62, 79, 58, 65, 55, 57, 77, 81, 55, 64, 76, 40, 46, 91, 58, 73, 52, 74, 36, 81, 71, 60, 50, 69, 66, 66, 63, 67, 45, 65, 65, 60, 60, 58, 60, 70, 53, 59, 54, 77, 107, 64, 56, 59, 59, 59, 70, 57, 69, 73, 45, 58, 84, 71, 59, 53, 90, 58, 59, 61, 67, 52, 52, 64, 69, 94, 55, 45, 48, 64, 65, 55, 58, 67, 87, 70, 56, 71, 74, 40, 66, 57, 50, 87, 71, 42, 39, 60, 71, 57, 56, 71, 68, 53, 85, 72, 74, 48, 54, 56, 77, 92, 48, 61, 66, 51, 77, 62, 75, 72, 65, 67, 68, 69, 77, 77, 53, 58, 58, 64, 73, 76, 53, 62, 63, 72, 54, 84, 75, 57, 59, 59, 64, 66, 70, 62, 47, 50, 52, 41, 66, 60, 62, 68, 50, 53, 66, 71, 72, 59, 51, 78, 63, 61, 77, 53, 64, 79, 49, 77, 39, 63, 83, 75, 68, 65, 32, 81, 68, 61, 54, 74, 75, 67, 61, 48, 58, 45, 70, 59, 74, 59, 60, 75, 80, 80, 99, 69, 62, 57, 79, 48, 52, 74, 62, 61, 60, 68, 63, 64, 57, 62, 63, 48, 62, 65, 64, 68, 62, 77, 69, 49, 56, 65, 53, 74, 52, 68, 74, 56, 52, 78, 101, 44, 63, 100, 87, 71, 85, 61, 78, 65, 54, 66, 76, 52, 42, 60, 57, 59, 62, 75, 43, 56, 75, 90, 54, 75, 69, 64, 86, 68, 44, 63, 65, 66, 35, 64, 56, 57, 51, 62, 43, 70, 62, 71, 65, 62, 68, 68, 53, 88, 84, 118, 41, 87, 63, 72, 45, 65, 62, 63, 38, 88, 63, 83, 71, 67, 89, 56, 64, 96, 67, 56, 42, 75, 62, 64, 48, 63, 65, 71, 65, 75, 62, 59, 67, 59, 79, 70, 77, 97, 48, 75, 59, 68, 68, 66, 70, 73, 78, 98, 74, 32, 66, 62, 60, 99, 53, 65, 54, 58, 62, 57, 66, 50, 77, 58, 57, 73, 64, 76, 105, 67, 48, 63, 61, 101, 83, 73, 75, 71, 83, 65, 79, 54, 90, 62, 57, 41, 45, 79, 79, 71, 89, 50, 60, 62, 67, 60, 63, 58, 58, 62, 49, 66, 61, 69, 62, 59, 61, 56, 45, 63, 56, 62, 62, 70, 70, 68, 53, 47, 73, 68, 69, 86, 48, 71, 61, 61, 76, 61, 75, 104, 73, 99, 89, 66, 75, 64, 55, 65, 50, 51, 65, 57, 56, 71, 68, 48, 60, 69, 52, 50, 70, 68, 51, 69, 60, 52, 55, 80, 64, 76, 67, 69, 71, 44, 72, 59, 87, 53, 61, 73, 51, 56, 63, 69, 63, 61, 69, 63, 58, 46, 53, 68, 51, 95, 53, 90, 59, 66, 62, 59, 76, 69, 70, 67, 65, 93, 71, 64, 54, 47, 83, 63, 50, 93, 63, 42, 55, 51, 46, 67, 84, 66, 129, 55, 58, 82, 65, 68, 60, 71, 57, 62, 60, 65, 64, 63, 59, 51, 61, 52, 66, 61, 61, 57, 97, 80, 55, 76, 68, 83, 64, 67, 70, 91, 81, 73, 72, 54, 79, 76, 76, 61, 55, 54, 64, 58, 98, 69, 64, 56, 47, 65, 60, 85, 47, 62, 67, 66, 59, 52, 57, 81, 63, 45, 63, 53, 56, 101, 71, 72, 78, 61, 55, 59, 52, 65, 63, 67, 67, 66, 62, 44, 120, 51, 62, 52, 36, 74, 74, 68, 57, 69, 58, 49, 113, 87, 70, 80, 69, 80, 59, 49, 94, 88, 67, 67, 59, 99, 60, 73, 54, 62, 71, 72, 62, 84, 68, 91, 63, 76, 100, 71, 58, 75, 52, 64, 49, 71, 55, 55, 58, 78, 82, 50, 69, 85, 70, 61, 80, 52, 56, 72, 64, 88, 72, 60, 70, 71, 52, 96, 57, 81, 48, 49, 65, 81, 58, 93, 64, 37, 58, 81, 51, 52, 52, 71, 57, 49, 55, 70, 67, 58, 74, 70, 61, 45, 72, 90, 72, 70, 55, 62, 69, 72, 75, 60, 63, 63, 51, 51, 88, 53, 75, 73, 58, 56, 49, 60, 65, 58, 95, 71, 71, 68, 52, 69, 58, 62, 63, 69, 61, 92, 62, 80, 110, 80, 72, 112, 64, 80, 57, 92, 53, 79, 68, 99, 60, 94, 81, 59, 63, 47, 84, 56, 69, 92, 69, 63, 65, 64, 76, 45, 84, 61, 81, 71, 76, 54, 61, 81, 51, 59, 131, 68, 38, 65, 66, 64, 61, 65, 59, 83, 60, 88, 88, 82, 59, 75, 61, 36, 64, 51, 55, 63, 55, 61, 103, 101, 79, 63, 86, 63, 73, 66, 93, 50, 56, 60, 59, 84, 81, 48, 72, 67, 55, 74, 82, 53, 72, 47, 59, 63, 77, 40, 80, 47, 64, 63, 76, 65, 69, 69, 73, 63, 57, 57, 67, 72, 88, 61, 66, 72, 64, 66, 69, 67, 84, 98, 83, 63, 83, 68, 71, 90, 51, 47, 52, 58, 57, 58, 74, 57, 58, 57, 57, 53, 79, 47, 59, 71, 62, 48, 117, 66, 63, 38, 56, 83, 59, 47, 60, 61, 53, 46, 82, 55, 66, 56, 58, 86, 50, 57, 71, 63, 65, 47, 79, 53, 65, 73, 78, 65, 55, 90, 43, 82, 67, 83, 51, 67, 58, 81, 49, 54, 66, 64, 77, 70, 62, 74, 54, 63, 61, 68, 60, 61, 60, 65, 48, 77, 52, 63, 70, 52, 64, 90, 66, 74, 84, 68, 74, 51, 64, 38, 73, 84, 56, 77, 88, 66, 53, 64, 55, 56, 58, 57, 79, 53, 64, 73, 52, 54, 75, 68, 56, 72, 61, 67, 52, 97, 74, 79, 58, 94, 74, 51, 115, 79, 60, 116, 56, 70, 43, 65, 73, 65, 79, 62, 77, 75, 63, 75, 61, 46, 56, 71, 46, 70, 73, 144, 61, 51, 60, 81, 102, 58, 60, 74, 46, 89, 46, 57, 55, 62, 59, 71, 61, 117, 54, 58, 89, 62, 73, 47, 56, 76, 47, 80, 76, 100, 66, 69, 89, 109, 68, 59, 59, 57, 78, 56, 77, 101, 80, 80, 35, 53, 60, 65, 68, 49, 59, 45, 67, 85, 78, 55, 58, 45, 56, 98, 44, 54, 92, 53, 86, 65, 71, 64, 67, 75, 50, 98, 59, 43, 65, 47, 86, 69, 68, 58, 50, 66, 74, 67, 59, 67, 80, 57, 82, 68, 70, 61, 70, 59, 77, 58, 63, 57, 60, 45, 72, 54, 56, 53, 97, 49, 70, 65, 101, 59, 43, 67, 85, 75, 57, 54, 92, 94, 66, 74, 40, 74, 52, 70, 53, 60, 77, 71, 55, 61, 60, 50, 72, 76, 54, 97, 52, 69, 60, 81, 74, 75, 55, 45, 65, 57, 62, 55, 81, 63, 70, 55, 67, 53, 40, 83, 50, 73, 56, 53, 64, 63, 42, 61, 75, 77, 58, 75, 52, 69, 76, 82, 56, 42, 66, 81, 51, 54, 57, 72, 77, 65, 62, 64, 69, 89, 45, 54, 72, 60, 66, 68, 73, 64, 68, 52, 74, 44, 65, 89, 61, 54, 61, 78, 61, 62, 53, 53, 47, 81, 68, 55, 65, 79, 93, 54, 66, 48, 64, 74, 57, 48, 105, 94, 41, 69, 83, 61, 77, 63, 71, 86, 57, 43, 47, 42, 54, 69, 88, 58, 70, 63, 61, 62, 71, 78, 65, 69, 50, 67, 59, 57, 87, 48, 59, 53, 69, 62, 77, 46, 74, 93, 65, 74, 61, 56, 101, 103, 64, 71, 66, 62, 63, 35, 58, 87, 73, 95, 83, 81, 55, 83, 95, 128, 66, 47, 63, 69, 60, 54, 57, 43, 61, 64, 62, 82, 67, 79, 87, 70, 117, 65, 52, 61, 75, 60, 65, 64, 75, 88, 67, 34, 56, 56, 56, 110, 40, 52, 40, 82, 59, 57, 89, 62, 50, 52, 55, 66, 56, 44, 75, 38, 66, 55, 73, 79, 46, 108, 53, 59, 62, 56, 69, 71, 61, 72, 75, 99, 75, 56, 46, 55, 57, 86, 56, 60, 71, 62, 55, 72, 68, 71, 61, 73, 45, 37, 43, 84, 46, 48, 72, 70, 79, 89, 50, 66, 60, 85, 63, 53, 81, 53, 45, 65, 63, 45, 64, 89, 76, 79, 52, 73, 41, 84, 64, 63, 58, 59, 53, 70, 75, 57, 54, 72, 52, 79, 71, 84, 54, 69, 46, 60, 82, 66, 70, 61, 90, 58, 61, 80, 50, 79, 57, 54, 63, 78, 45, 67, 59, 55, 70, 78, 58, 52, 77, 71, 49, 46, 73, 57, 60, 87, 69, 63, 56, 94, 70, 55, 66, 65, 60, 94, 81, 49, 91, 63, 72, 44, 53, 42, 57, 65, 61, 92, 76, 65, 58, 72, 58, 72, 75, 65, 50, 64, 64, 71, 69, 71, 65, 59, 68, 72, 61, 89, 62, 76, 66, 69, 62, 82, 60, 54, 69, 69, 61, 53, 71, 43, 65, 49, 94, 95, 62, 63, 53, 43, 54, 59, 67, 71, 65, 68, 65, 62, 77, 65, 56, 59, 56, 89, 66, 31, 61, 68, 43, 40, 63, 57, 64, 54, 52, 50, 51, 74, 70, 63, 64, 72, 71, 78, 76, 84, 34, 36, 51, 65, 44, 65, 59, 58, 62, 94, 56, 77, 51, 59, 63, 70, 63, 69, 55, 66, 95, 58, 70, 39, 52, 64, 86, 54, 66, 70, 48, 55, 47, 85, 73, 86, 62, 64, 55, 73, 62, 48, 62, 60, 49, 63, 54, 64, 74, 64, 76, 80, 60, 70, 63, 55, 64, 53, 65, 62, 79, 106, 59, 86, 82, 72, 72, 64, 47, 77, 73, 68, 43, 73, 71, 80, 53, 78, 53, 47, 85, 78, 63, 49, 62, 71, 52, 63, 86, 74, 80, 59, 49, 62, 75, 47, 134, 52, 39, 97, 51, 63, 76, 75, 59, 61, 55, 66, 62, 80, 57, 33, 46, 67, 64, 56, 66, 100, 113, 51, 56, 62, 53, 54, 68, 52, 75, 55, 67, 75, 57, 61, 77, 73, 83, 72, 66, 75, 78, 63, 43, 50, 75, 59, 91, 44, 60, 83, 51, 72, 71, 73, 58, 57, 70, 76, 50, 46, 64, 80, 62, 69, 77, 82, 61, 87, 50, 64, 43, 85, 58, 60, 73, 76, 95, 100, 67, 57, 69, 55, 68, 67, 64, 64, 74, 43, 50, 69, 62, 87, 64, 81, 72, 57, 105, 48, 58, 57, 64, 78, 54, 72, 80, 69, 52, 51, 77, 66, 71, 75, 63, 67, 63, 59, 75, 54, 37, 54, 56, 42, 55, 68, 47, 61, 67, 67, 74, 103, 56, 73, 53, 62, 65, 86, 55, 65, 67, 44, 58, 54, 59, 54, 69, 63, 67, 63, 56, 65, 61, 58, 74, 65, 69, 82, 79, 85, 67, 58, 61, 48, 73, 59, 50, 48, 98, 62, 60, 57, 60, 53, 78, 37, 74, 56, 104, 88, 68, 61, 63, 73, 64, 101, 68, 56, 74, 60, 53, 68, 88, 83, 55, 63, 72, 56, 71, 85, 65, 48, 78, 69, 89, 64, 62, 54, 45, 68, 65, 57, 87, 59, 68, 72, 56, 81, 93, 57, 59, 55, 83, 73, 57, 61, 59, 54, 58, 61, 73, 98, 73, 87, 49, 72, 93, 68, 77, 62, 59, 59, 55, 50, 60, 88, 53, 57, 32, 70, 40, 85, 61, 65, 71, 66, 48, 65, 81, 59, 78, 56, 44, 88, 54, 49, 107, 60, 48, 67, 83, 57, 99, 66, 81, 51, 77, 72, 82, 69, 72, 80, 53, 55, 73, 52, 66, 60, 95, 52, 92, 73, 59, 81, 103, 47, 80, 69, 70, 81, 52, 76, 46, 57, 63, 89, 51, 58, 53, 67, 96, 65, 55, 42, 80, 76, 46, 52, 87, 74, 65, 40, 79, 62, 81, 63, 38, 87, 54, 43, 63, 54, 46, 59, 50, 95, 64, 51, 65, 86, 61, 56, 59, 57, 93, 57, 72, 70, 56, 51, 65, 70, 61, 67, 76, 91, 55, 49, 63, 55, 77, 68, 65, 91, 93, 60, 56, 98, 81, 63, 50, 72, 54, 61, 59, 60, 82, 86, 67, 80, 55, 70, 72, 97, 80, 82, 48, 54, 64, 67, 102, 60, 44, 72, 72, 56, 61, 83, 79, 46, 66, 60, 102, 54, 69, 89, 53, 82, 67, 53, 63, 86, 62, 56, 44, 117, 73, 54, 76, 59, 65, 61, 62, 59, 76, 58, 69, 55, 67, 89, 75, 46, 97, 87, 46, 77, 57, 65, 64, 66, 79, 61, 61, 58, 82, 74, 53, 93, 62, 59, 50, 60, 93, 56, 66, 45, 79, 50, 74, 64, 64, 73, 76, 69, 80, 39, 72, 83, 55, 74, 97, 51, 56, 59, 57, 72, 66, 67, 87, 65, 75, 64, 53, 61, 81, 90, 84, 61, 73, 49, 91, 82, 58, 64, 61, 75, 65, 68, 56, 68, 76, 55, 104, 46, 61, 57, 67, 64, 64, 67, 70, 64, 89, 66, 68, 80, 49, 70, 59, 66, 69, 65, 39, 67, 66, 52, 51, 79, 88, 89, 59, 66, 26, 46, 47, 59, 58, 64, 68, 61, 75, 64, 66, 64, 54, 51, 43, 43, 51, 57, 67, 81, 65, 59, 60, 70, 59, 64, 67, 78, 56, 74, 48, 75, 44, 79, 92, 80, 62, 62, 50, 49, 66, 64, 46, 82, 83, 63, 55, 62, 80, 72, 68, 47, 59, 55, 69, 42, 73, 57, 65, 58, 71, 57, 64, 69, 72, 57, 99, 66, 65, 52, 62, 65, 102, 71, 52, 55, 67, 72, 84, 69, 54, 52, 65, 75, 84, 55, 53, 90, 83, 80, 74, 84, 60, 61, 61, 50, 66, 67, 47, 110, 116, 81, 67, 75, 67, 55, 68, 58, 46, 61, 48, 67, 72, 44, 68, 61, 66, 32, 76, 69, 63, 68, 61, 60, 58, 55, 54, 55, 48, 68, 92, 58, 70, 76, 69, 91, 65, 43, 58, 68, 86, 57, 89, 67, 60, 67, 52, 52, 60, 67, 68, 67, 62, 78, 57, 63, 60, 63, 58, 80, 58, 82, 54, 93, 82, 55, 77, 54, 58, 60, 61, 87, 47, 55, 47, 72, 51, 60, 53, 56, 77, 54, 62, 65, 74, 54, 83, 63, 72, 94, 57, 68, 57, 53, 53, 54, 45, 74, 75, 82, 78, 61, 88, 55, 67, 59, 69, 53, 88, 65, 66, 71, 78, 64, 61, 63, 62, 71, 74, 84, 57, 52, 80, 85, 88, 94, 51, 75, 57, 60, 41, 64, 80, 50, 58, 72, 78, 67, 84, 84, 65, 64, 64, 63, 55, 58, 90, 97, 56, 71, 58, 63, 45, 64, 56, 75, 73, 69, 64, 69, 74, 105, 119, 58, 68, 54, 78, 77, 66, 87, 82, 71, 48, 71, 71, 74, 66, 64, 91, 52, 72, 55, 73, 41, 73, 54, 69, 85, 54, 52, 55, 45, 50, 51, 51, 67, 57, 67, 54, 79, 56, 60, 71, 73, 68, 112, 70, 91, 80, 66, 78, 75, 73, 81, 48, 59, 57, 96, 59, 98, 92, 64, 74, 94, 59, 57, 63, 66, 57, 56, 43, 110, 55, 54, 65, 62, 59, 40, 55, 46, 81, 75, 58, 111, 61, 70, 71, 76, 62, 66, 75, 62, 49, 44, 52, 76, 64, 55, 50, 60, 65, 62, 67, 58, 58, 57, 58, 57, 54, 71, 67, 63, 58, 75, 70, 67, 70, 65, 68, 89, 62, 81, 61, 48, 50, 88, 58, 57, 59, 99, 64, 49, 78, 49, 73, 63, 63, 63, 66, 81, 118, 63, 75, 81, 55, 67, 56, 74, 76, 56, 69, 56, 45, 78, 69, 56, 62, 60, 71, 73, 55, 71, 63, 61, 68, 61, 65, 112, 73, 66, 56, 72, 72, 79, 53, 141, 80, 71, 63, 96, 59, 55, 53, 57, 86, 63, 69, 47, 68, 68, 76, 67, 53, 66, 61, 111, 75, 61, 77, 60, 62, 72, 62, 119, 63, 76, 76, 64, 61, 64, 49, 75, 53, 62, 58, 61, 57, 79, 71, 70, 86, 62, 64, 54, 84, 72, 107, 58, 73, 68, 58, 50, 70, 60, 54, 68, 59, 79, 57, 64, 62, 66, 69, 55, 83, 70, 69, 68, 72, 65, 62, 65, 56, 90, 54, 53, 47, 49, 90, 56, 51, 64, 45, 63, 55, 46, 64, 58, 57, 57, 59, 60, 67, 92, 70, 55, 69, 66, 77, 59, 75, 84, 68, 65, 54, 58, 103, 83, 69, 68, 66, 60, 59, 67, 75, 65, 55, 60, 60, 68, 68, 64, 56, 60, 50, 51, 57, 55, 76, 65, 56, 51, 75, 68, 89, 87, 56, 76, 44, 68, 63, 46, 63, 74, 86, 68, 68, 48, 67, 76, 64, 65, 63, 68, 50, 77, 59, 48, 87, 79, 68, 61, 55, 80, 67, 75, 54, 58, 49, 63, 62, 56, 88, 82, 87, 78, 68, 56, 63, 75, 70, 62, 56, 79, 68, 116, 63, 71, 81, 65, 65, 114, 58, 48, 66, 63, 63, 63, 76, 53, 99, 60, 77, 54, 68, 62, 79, 61, 80, 99, 99, 46, 62, 62, 75, 49, 81, 59, 49, 46, 58, 72, 72, 53, 64, 70, 70, 80, 63, 56, 73, 78, 61, 59, 88, 58, 69, 51, 85, 65, 72, 55, 99, 67, 55, 71, 58, 46, 55, 67, 57, 75, 63, 61, 77, 68, 92, 82, 60, 57, 99, 55, 61, 53, 66, 63, 67, 64, 112, 64, 83, 58, 52, 57, 86, 62, 103, 59, 83, 59, 60, 76, 60, 68, 83, 63, 61, 66, 64, 119, 68, 62, 58, 52, 63, 57, 66, 62, 63, 61, 71, 62, 80, 53, 55, 99, 59, 69, 52, 69, 68, 63, 56, 65, 70, 69, 62, 47, 58, 55, 60, 80, 81, 71, 42, 64, 64, 62, 66, 66, 70, 67, 64, 67, 111, 75, 108, 66, 71, 46, 76, 53, 49, 63, 54, 79, 68, 56, 69, 82, 58, 69, 49, 74, 60, 58, 68, 62, 66, 65, 72, 55, 81, 66, 88, 58, 76, 67, 79, 65, 65, 38, 51, 62, 72, 71, 102, 61, 53, 68, 53, 50, 62, 70, 56, 67, 60, 52, 74, 64, 73, 68, 56, 74, 57, 64, 54, 55, 65, 74, 89, 52, 77, 67, 64, 70, 72, 67, 56, 79, 111, 72, 54, 53, 67, 63, 52, 80, 67, 47, 58, 70, 71, 59, 62, 52, 59, 73, 66, 61, 58, 62, 55, 57, 62, 67, 55, 43, 65, 63, 61, 70, 67, 52, 52, 69, 41, 74, 62, 86, 67, 81, 79, 66, 62, 86, 58, 61, 64, 70, 68, 63, 84, 63, 90, 63, 60, 143, 63, 62, 76, 55, 66, 63, 59, 51, 90, 62, 61, 88, 88, 58, 68, 71, 55, 59, 57, 67, 78, 58, 116, 47, 66, 49, 74, 62, 58, 56, 68, 54, 51, 60, 58, 87, 58, 71, 69, 102, 66, 88, 78, 67, 57, 42, 70, 67, 49, 71, 49, 53, 71, 56, 68, 49, 74, 73, 83, 58, 58, 78, 53, 71, 76, 64, 57, 85, 66, 66, 70, 58, 64, 39, 106, 55, 57, 73, 82, 79, 67, 73, 61, 88, 70, 77, 80, 102, 79, 57, 56, 58, 70, 69, 53, 64, 66, 54, 59, 49, 71, 68, 94, 63, 67, 100, 57, 62, 52, 70, 64, 71, 62, 84, 59, 58, 56, 53, 47, 72, 60, 61, 65, 81, 46, 51, 105, 68, 63, 69, 70, 55, 88, 51, 56, 88, 57, 77, 58, 67, 67, 58, 62, 68, 52, 84, 66, 81, 61, 65, 57, 60, 54, 73, 74, 55, 61, 64, 63, 54, 59, 75, 57, 69, 66, 47, 64, 46, 61, 77, 68, 70, 61, 64, 80, 66, 74, 78, 72, 79, 61, 57, 62, 61, 63, 67, 79, 48, 90, 75, 52, 63, 88, 66, 52, 45, 78, 83, 60, 73, 52, 56, 57, 63, 65, 101, 55, 65, 55, 75, 41, 68, 71, 54, 51, 59, 65, 60, 55, 57, 67, 59, 70, 46, 69, 58, 93, 55, 64, 61, 70, 79, 63, 70, 52, 57, 73, 78, 52, 72, 61, 84, 60, 75, 71, 68, 108, 77, 62, 71, 81, 71, 82, 83, 61, 56, 54, 59, 63, 56, 68, 70, 57, 80, 83, 57, 59, 72, 58, 84, 72, 61, 67, 49, 98, 73, 63, 58, 58, 76, 81, 64, 87, 45, 64, 112, 80, 54, 59, 56, 79, 64, 62, 72, 65, 70, 43, 80, 63, 61, 56, 57, 86, 57, 61, 75, 61, 111, 61, 78, 70, 57, 51, 72, 70, 46, 90, 73, 59, 53, 61, 68, 83, 96, 61, 69, 60, 63, 60, 86, 67, 59, 56, 84, 92, 66, 62, 61, 46, 58, 55, 65, 65, 103, 59, 57, 56, 64, 49, 56, 55, 73, 62, 60, 64, 61, 60, 71, 82, 83, 55, 65, 71, 71, 57, 60, 66, 60, 61, 71, 75, 73, 67, 57, 56, 63, 56, 54, 110, 69, 69, 58, 59, 59, 61, 70, 87, 48, 69, 69, 52, 67, 62, 77, 48, 73, 74, 63, 72, 62, 70, 61, 66, 58, 58, 62, 77, 46, 68, 63, 60, 58, 60, 75, 55, 75, 89, 65, 49, 54, 64, 67, 82, 67, 69, 62, 74, 63, 57, 65, 77, 59, 65, 72, 66, 70, 58, 86, 58, 65, 84, 55, 55, 54, 69, 64, 51, 61, 60, 54, 90, 71, 49, 68, 77, 69, 56, 74, 42, 70, 62, 61, 58, 64, 51, 58, 97, 64, 59, 55, 62, 63, 111, 82, 71, 56, 80, 68, 60, 51, 69, 54, 50, 66, 66, 55, 62, 70, 80, 94, 72, 57, 50, 77, 79, 67, 81, 80, 55, 71, 74, 50, 63, 60, 51, 99, 60, 68, 49, 66, 43, 55, 50, 63, 60, 56, 121, 56, 69, 59, 65, 129, 71, 115, 70, 58, 63, 65, 80, 64, 60, 55, 61, 52, 69, 76, 67, 62, 68, 40, 56, 60, 77, 85, 62, 63, 55, 75, 77, 59, 71, 49, 47, 82, 57, 65, 101, 50, 53, 59, 75, 62, 43, 76, 86, 76, 76, 80, 56, 81, 62, 107, 72, 62, 60, 54, 59, 63, 60, 78, 67, 64, 62, 75, 68, 69, 55, 63, 52, 69, 69, 64, 58, 55, 62, 71, 86, 45, 74, 57, 51, 71, 61, 56, 57, 54, 60, 62, 56, 57, 52, 62, 75, 66, 79, 56, 45, 68, 64, 57, 63, 64, 64, 59, 74, 65, 55, 76, 73, 92, 61, 60, 47, 67, 114, 71, 88, 63, 57, 89, 56, 59, 62, 72, 53, 70, 90, 69, 78, 77, 89, 46, 43, 61, 62, 61, 94, 65, 53, 57, 60, 60, 101, 53, 61, 43, 58, 74, 60, 97, 53, 55, 59, 42, 67, 66, 57, 77, 45, 47, 75, 72, 65, 43, 60, 50, 92, 52, 65, 68, 62, 76, 58, 56, 54, 51, 64, 53, 69, 72, 92, 60, 104, 92, 75, 58, 60, 67, 68, 67, 67, 46, 69, 61, 95, 71, 64, 56, 82, 85, 59, 48, 95, 53, 68, 64, 68, 38, 63, 62, 76, 100, 67, 68, 57, 50, 79, 59, 60, 109, 57, 98, 59, 72, 64, 54, 54, 76, 54, 69, 66, 66, 61, 62, 59, 63, 51, 47, 67, 60, 69, 61, 64, 58, 59, 62, 97, 64, 62, 75, 66, 57, 59, 47, 78, 102, 66, 53, 65, 85, 73, 57, 66, 72, 67, 62, 56, 74, 73, 62, 109, 55, 50, 106, 90, 59, 69, 48, 90, 65, 40, 50, 58, 70, 44, 65, 64, 52, 76, 67, 83, 59, 73, 60, 65, 67, 74, 72, 78, 62, 71, 52, 55, 53, 85, 72, 68, 57, 61, 55, 66, 75, 89, 55, 74, 62, 57, 73, 54, 64, 61, 78, 75, 64, 70, 59, 62, 80, 80, 79, 63, 62, 53, 58, 60, 60, 65, 67, 73, 67, 79, 66, 54, 72, 49, 80, 46, 61, 60, 65, 66, 83, 78, 85, 26, 74, 61, 58, 71, 56, 38, 84, 49, 67, 66, 64, 58, 58, 60, 64, 105, 57, 64, 85, 69, 54, 72, 46, 77, 71, 67, 108, 63, 47, 61, 106, 60, 74, 66, 64, 56, 56, 70, 74, 57, 55, 73, 53, 64, 64, 74, 61, 69, 56, 58, 67, 58, 54, 44, 46, 51, 56, 91, 74, 90, 53, 40, 52, 100, 57, 109, 78, 97, 84, 60, 68, 77, 72, 62, 69, 60, 77, 65, 86, 84, 62, 121, 81, 77, 79, 73, 61, 64, 98, 61, 66, 71, 69, 77, 47, 26, 64, 56, 75, 52, 56, 66, 65, 54, 61, 51, 68, 66, 68, 70, 51, 51, 74, 60, 56, 60, 60, 59, 64, 61, 62, 61, 82, 91, 69, 95, 56, 54, 65, 59, 43, 70, 57, 64, 68, 86, 109, 69, 50, 67, 65, 48, 72, 69, 58, 66, 48, 54, 108, 52, 105, 66, 61, 66, 63, 51, 51, 69, 106, 65, 64, 47, 53, 69, 66, 61, 53, 69, 56, 58, 61, 67, 57, 67, 56, 52, 83, 68, 83, 60, 63, 64, 60, 64, 77, 64, 70, 72, 68, 72, 130, 96, 66, 61, 57, 59, 73, 61, 59, 65, 65, 49, 64, 64, 67, 80, 67, 90, 60, 76, 66, 54, 66, 63, 63, 73, 63, 64, 60, 57, 63, 63, 49, 96, 64, 57, 70, 102, 72, 96, 60, 65, 63, 57, 81, 67, 49, 93, 63, 74, 53, 44, 57, 43, 59, 74, 69, 60, 64, 68, 78, 69, 74, 58, 68, 66, 68, 62, 69, 55, 78, 45, 55, 62, 61, 66, 68, 60, 81, 56, 65, 68, 66, 31, 71, 62, 70, 66, 54, 88, 51, 70, 82, 62, 63, 53, 67, 60, 79, 62, 66, 54, 69, 56, 95, 67, 69, 74, 48, 61, 51, 60, 64, 84, 53, 71, 53, 69, 56, 66, 51, 54, 64, 65, 67, 77, 52, 58, 55, 69, 70, 77, 61, 67, 65, 85, 38, 53, 42, 70, 54, 57, 54, 60, 63, 62, 85, 103, 70, 60, 76, 63, 84, 58, 62, 87, 39, 73, 80, 79, 62, 59, 82, 81, 59, 47, 59, 51, 69, 56, 64, 61, 72, 65, 56, 65, 53, 69, 61, 55, 54, 52, 62, 76, 71, 54, 76, 49, 58, 54, 71, 67, 73, 56, 71, 87, 69, 50, 64, 56, 58, 82, 69, 74, 56, 54, 66, 63, 81, 82, 74, 100, 92, 52, 53, 57, 78, 50, 53, 61, 72, 53, 73, 63, 78, 56, 75, 50, 65, 60, 61, 63, 56, 85, 63, 62, 55, 77, 55, 84, 69, 76, 62, 63, 83, 59, 65, 50, 62, 68, 68, 72, 55, 61, 55, 66, 54, 55, 54, 64, 61, 51, 50, 54, 77, 70, 88, 58, 54, 62, 72, 46, 77, 56, 71, 58, 53, 77, 65, 73, 68, 69, 93, 59, 61, 49, 61, 67, 84, 55, 56, 50, 89, 60, 57, 60, 85, 76, 67, 78, 76, 63, 68, 64, 88, 63, 54, 64, 66, 66, 75, 62, 66, 104, 59, 58, 71, 59, 69, 69, 69, 68, 79, 52, 73, 56, 65, 56, 65, 51, 61, 59, 89, 55, 75, 49, 61, 68, 55, 59, 65, 61, 50, 52, 69, 99, 50, 61, 92, 67, 78, 62, 56, 60, 52, 55, 60, 39, 64, 50, 55, 70, 55, 66, 63, 52, 67, 62, 70, 65, 65, 60, 66, 63, 63, 103, 60, 49, 61, 52, 96, 73, 72, 67, 65, 62, 65, 58, 73, 57, 66, 64, 20, 55, 74, 74, 77, 75, 69, 57, 63, 64, 52, 69, 65, 70, 69, 56, 68, 68, 58, 96, 55, 63, 111, 72, 61, 49, 62, 76, 60, 67, 62, 69, 70, 58, 62, 68, 64, 47, 66, 131, 67, 103, 61, 73, 51, 58, 67, 70, 70, 76, 51, 58, 53, 64, 74, 69, 66, 61, 69, 45, 62, 69, 72, 70, 44, 66, 71, 66, 69, 52, 74, 88, 75, 58, 63, 62, 67, 72, 59, 69, 60, 56, 48, 64, 76, 58, 66, 56, 60, 65, 75, 62, 77, 56, 72, 67, 60, 50, 54, 75, 64, 65, 75, 60, 63, 43, 85, 60, 64, 69, 68, 58, 64, 77, 75, 50, 67, 78, 45, 72, 62, 73, 56, 69, 69, 60, 47, 65, 64, 70, 50, 66, 46, 61, 82, 64, 58, 67, 65, 70, 49, 68, 69, 58, 70, 84, 48, 84, 52, 70, 75, 68, 59, 68, 61, 73, 76, 89, 64, 73, 65, 54, 50, 73, 53, 63, 68, 58, 57, 58, 65, 61, 62, 40, 64, 57, 53, 56, 46, 72, 59, 79, 78, 50, 89, 78, 80, 72, 62, 60, 74, 76, 64, 48, 68, 84, 77, 109, 79, 51, 80, 85, 72, 62, 76, 66, 54, 63, 80, 59, 67, 69, 62, 114, 78, 62, 80, 97, 51, 66, 69, 89, 72, 86, 70, 60, 75, 62, 52, 61, 99, 61, 61, 71, 68, 83, 68, 51, 71, 66, 62, 50, 77, 73, 62, 61, 54, 64, 71, 76, 60, 59, 58, 63, 82, 62, 103, 92, 76, 63, 54, 69, 46, 49, 63, 66, 42, 49, 58, 63, 69, 58, 66, 76, 79, 63, 60, 65, 67, 67, 60, 78, 51, 75, 70, 66, 53, 68, 56, 62, 63, 63, 57, 63, 61, 46, 66, 48, 67, 80, 110, 62, 77, 59, 64, 61, 51, 56, 51, 54, 53, 72, 53, 62, 60, 79, 56, 76, 60, 66, 58, 60, 69, 56, 52, 52, 69, 40, 70, 60, 85, 68, 84, 62, 65, 63, 73, 65, 61, 60, 70, 101, 83, 60, 69, 107, 48, 93, 75, 75, 43, 62, 58, 60, 61, 79, 73, 83, 82, 73, 77, 65, 62, 65, 61, 104, 70, 62, 76, 55, 64, 53, 74, 55, 63, 62, 58, 69, 73, 62, 76, 63, 58, 55, 72, 85, 43, 41, 42, 74, 62, 67, 75, 74, 58, 66, 65, 64, 72, 64, 93, 60, 67, 74, 53, 57, 68, 81, 63, 70, 56, 76, 71, 54, 69, 68, 82, 75, 71, 79, 77, 78, 64, 63, 67, 58, 51, 63, 65, 61, 53, 61, 72, 59, 65, 67, 67, 56, 61, 72, 65, 60, 48, 57, 62, 69, 86, 64, 100, 72, 73, 51, 54, 72, 59, 59, 62, 70, 51, 56, 71, 55, 70, 87, 115, 73, 61, 57, 86, 61, 63, 60, 63, 58, 87, 60, 70, 48, 57, 83, 78, 66, 67, 70, 47, 60, 59, 54, 64, 92, 55, 74, 56, 56, 76, 75, 59, 53, 64, 65, 59, 68, 65, 71, 61, 70, 82, 61, 74, 101, 64, 66, 51, 48, 73, 65, 56, 60, 70, 81, 60, 67, 85, 55, 58, 86, 69, 59, 71, 86, 70, 64, 65, 81, 65, 64, 68, 60, 67, 70, 62, 72, 79, 58, 64, 88, 64, 71, 75, 69, 50, 48, 49, 56, 76, 67, 65, 57, 77, 64, 103, 52, 70, 58, 50, 72, 79, 72, 49, 63, 65, 56, 64, 72, 52, 59, 80, 49, 97, 66, 67, 59, 74, 61, 73, 48, 103, 54, 61, 46, 124, 66, 97, 93, 50, 67, 71, 91, 59, 51, 88, 63, 63, 63, 72, 90, 76, 91, 83, 61, 79, 63, 58, 70, 67, 91, 65, 53, 57, 65, 63, 51, 74, 62, 56, 69, 64, 57, 67, 57, 64, 78, 102, 79, 56, 59, 70, 53, 74, 60, 70, 56, 83, 61, 113, 70, 64, 52, 26, 55, 51, 96, 51, 69, 70, 82, 57, 71, 49, 51, 107, 47, 61, 75, 80, 63, 90, 63, 56, 72, 71, 58, 54, 69, 51, 64, 56, 85, 58, 88, 49, 90, 50, 80, 64, 43, 58, 73, 90, 71, 62, 61, 60, 61, 59, 80, 87, 43, 57, 56, 58, 54, 87, 62, 92, 60, 86, 74, 63, 69, 65, 53, 72, 54, 54, 49, 73, 55, 61, 68, 68, 53, 67, 55, 55, 67, 99, 62, 83, 43, 58, 60, 59, 53, 110, 68, 69, 85, 68, 57, 83, 78, 116, 59, 61, 42, 75, 71, 64, 52, 70, 66, 82, 54, 64, 56, 47, 52, 66, 62, 38, 48, 65, 83, 54, 81, 65, 67, 74, 67, 54, 66, 69, 52, 53, 73, 70, 56, 90, 52, 62, 61, 53, 72, 53, 49, 61, 58, 62, 71, 90, 66, 62, 85, 72, 95, 59, 66, 50, 88, 58, 49, 60, 59, 67, 79, 75, 47, 47, 76, 78, 63, 63, 62, 65, 44, 60, 60, 56, 88, 60, 60, 55, 68, 116, 50, 59, 68, 56, 67, 64, 58, 68, 51, 67, 52, 62, 74, 81, 63, 62, 81, 60, 62, 81, 70, 68, 61, 65, 78, 56, 49, 67, 83, 84, 63, 75, 66, 54, 56, 59, 69, 59, 51, 59, 68, 83, 64, 77, 51, 67, 49, 62, 77, 68, 71, 59, 109, 69, 62, 87, 63, 56, 49, 49, 58, 92, 50, 54, 77, 59, 59, 44, 41, 64, 62, 86, 62, 53, 67, 54, 50, 62, 59, 41, 65, 62, 76, 96, 79, 62, 63, 46, 44, 61, 103, 66, 66, 62, 49, 51, 76, 55, 69, 71, 49, 67, 49, 59, 47, 71, 66, 61, 64, 67, 57, 57, 63, 59, 69, 57, 90, 107, 67, 51, 63, 63, 61, 76, 56, 58, 62, 46, 62, 98, 69, 57, 73, 82, 64, 58, 73, 72, 64, 73, 45, 97, 63, 46, 77, 59, 66, 52, 60, 80, 63, 57, 64, 70, 63, 108, 63, 106, 61, 77, 88, 60, 61, 76, 50, 67, 42, 75, 80, 69, 58, 52, 63, 54, 66, 77, 60, 86, 61, 63, 81, 65, 58, 56, 72, 64, 51, 37, 58, 70, 86, 55, 74, 51, 61, 70, 70, 49, 70, 73, 60, 74, 98, 64, 67, 113, 47, 58, 74, 60, 57, 59, 86, 94, 63, 62, 66, 44, 79, 55, 75, 70, 61, 73, 50, 42, 57, 63, 58, 63, 61, 48, 78, 75, 56, 67, 62, 66, 119, 62, 105, 67, 67, 60, 49, 58, 77, 59, 77, 75, 54, 83, 80, 64, 57, 80, 58, 61, 75, 48, 75, 73, 80, 68, 45, 66, 49, 62, 79, 67, 89, 54, 53, 61, 57, 78, 93, 57, 67, 85, 62, 59, 50, 64, 62, 72, 56, 69, 65, 58, 66, 60, 65, 58, 85, 75, 59, 45, 55, 68, 58, 54, 74, 61, 70, 75, 78, 73, 54, 66, 67, 59, 74, 95, 62, 50, 49, 64, 76, 58, 69, 54, 80, 47, 68, 60, 54, 70, 92, 74, 63, 58, 55, 57, 71, 60, 79, 77, 58, 59, 76, 56, 60, 119, 61, 46, 50, 56, 55, 73, 79, 52, 52, 82, 57, 57, 80, 57, 59, 54, 53, 51, 101, 72, 67, 53, 59, 78, 67, 75, 61, 70, 66, 74, 61, 55, 70, 59, 57, 71, 70, 51, 71, 75, 64, 70, 56, 68, 48, 68, 44, 51, 78, 92, 80, 100, 122, 65, 67, 71, 88, 62, 50, 46, 66, 55, 60, 83, 57, 61, 63, 88, 104, 61, 77, 76, 95, 61, 55, 76, 88, 56, 94, 66, 76, 89, 72, 66, 60, 131, 71, 60, 72, 42, 72, 47, 57, 71, 65, 50, 70, 68, 54, 70, 46, 58, 74, 48, 97, 67, 56, 61, 81, 93, 67, 100, 76, 74, 60, 43, 79, 66, 79, 72, 69, 70, 92, 92, 70, 68, 63, 66, 54, 57, 68, 64, 83, 75, 55, 68, 78, 55, 64, 58, 70, 62, 48, 68, 80, 71, 69, 57, 80, 63, 43, 54, 54, 61, 69, 97, 69, 44, 66, 71, 77, 61, 67, 83, 67, 65, 60, 62, 52, 61, 63, 51, 48, 64, 56, 69, 63, 46, 49, 58, 72, 72, 69, 55, 59, 67, 60, 85, 74, 73, 54, 59, 84, 62, 68, 58, 73, 59, 76, 68, 98, 70, 73, 63, 72, 52, 47, 76, 58, 70, 66, 61, 112, 76, 54, 101, 67, 68, 61, 65, 91, 59, 42, 61, 46, 62, 49, 50, 62, 85, 65, 60, 69, 65, 67, 64, 76, 65, 44, 79, 87, 79, 81, 78, 61, 68, 64, 82, 69, 82, 64, 49, 58, 64, 84, 77, 66, 69, 67, 63, 61, 82, 76, 76, 52, 67, 78, 56, 66, 100, 58, 75, 75, 70, 49, 69, 63, 66, 59, 55, 66, 61, 74, 55, 74, 60, 79, 66, 62, 62, 48, 73, 63, 65, 62, 75, 79, 64, 58, 72, 79, 64, 87, 77, 72, 66, 60, 66, 72, 62, 79, 59, 71, 56, 64, 75, 69, 60, 76, 92, 64, 63, 63, 93, 59, 58, 62, 107, 65, 71, 44, 70, 65, 57, 80, 67, 88, 60, 53, 65, 79, 50, 57, 42, 88, 90, 56, 61, 73, 55, 65, 66, 72, 57, 83, 65, 61, 71, 59, 62, 79, 62, 49, 93, 86, 53, 65, 59, 60, 60, 48, 63, 70, 57, 85, 72, 78, 68, 70, 75, 82, 71, 64, 65, 51, 52, 78, 57, 71, 68, 59, 79, 43, 58, 60, 49, 52, 93, 60, 60, 67, 63, 71, 54, 65, 53, 59, 67, 77, 83, 54, 50, 55, 67, 42, 100, 71, 64, 70, 62, 69, 61, 67, 70, 62, 46, 69, 46, 56, 36, 68, 68, 81, 73, 70, 69, 57, 68, 77, 69, 94, 91, 56, 77, 53, 105, 53, 78, 116, 70, 63, 47, 76, 61, 61, 65, 67, 46, 57, 67, 65, 70, 86, 59, 66, 51, 64, 98, 56, 67, 81, 51, 68, 58, 86, 115, 62, 64, 66, 58, 59, 52, 64, 78, 64, 66, 80, 105, 81, 72, 59, 50, 49, 50, 56, 58, 46, 95, 67, 77, 56, 80, 68, 55, 68, 68, 77, 57, 61, 55, 70, 80, 55, 64, 58, 75, 66, 48, 78, 55, 64, 65, 66, 75, 79, 48, 66, 57, 47, 61, 59, 61, 98, 64, 68, 66, 68, 77, 47, 57, 62, 53, 54, 142, 61, 59, 71, 53, 63, 86, 86, 83, 62, 67, 78, 65, 63, 87, 62, 72, 75, 83, 62, 48, 67, 67, 59, 94, 52, 65, 88, 88, 49, 61, 47, 63, 71, 53, 57, 57, 68, 98, 113, 65, 59, 69, 68, 69, 65, 101, 56, 66, 68, 58, 57, 47, 75, 84, 85, 62, 64, 61, 63, 55, 61, 67, 81, 61, 57, 56, 50, 51, 61, 76, 57, 42, 59, 73, 63, 56, 57, 68, 71, 79, 64, 64, 65, 55, 65, 87, 63, 66, 68, 100, 65, 64, 60, 65, 86, 64, 70, 56, 55, 58, 52, 61, 55, 60, 58, 63, 73, 82, 61, 57, 72, 59, 57, 60, 67, 44, 75, 79, 64, 100, 53, 48, 63, 57, 59, 56, 54, 65, 76, 54, 70, 52, 65, 77, 79, 103, 67, 63, 64, 65, 66, 62, 67, 64, 63, 84, 63, 69, 65, 51, 87, 54, 84, 54, 68, 81, 61, 59, 56, 57, 60, 48, 51, 73, 61, 63, 77, 68, 57, 50, 77, 71, 83, 55, 61, 61, 64, 80, 62, 77, 73, 69, 55, 51, 75, 73, 69, 68, 75, 68, 50, 93, 87, 53, 62, 65, 65, 47, 59, 61, 70, 63, 48, 55, 62, 58, 65, 95, 55, 75, 52, 71, 57, 80, 48, 49, 74, 60, 65, 86, 81, 44, 78, 63, 65, 55, 95, 55, 50, 54, 77, 52, 54, 52, 51, 82, 66, 72, 63, 54, 51, 63, 68, 42, 64, 59, 65, 60, 71, 61, 62, 55, 97, 95, 63, 61, 81, 73, 73, 88, 59, 79, 56, 62, 73, 68, 72, 77, 55, 63, 58, 56, 75, 71, 50, 73, 52, 68, 59, 44, 63, 67, 58, 74, 76, 46, 65, 49, 90, 62, 55, 91, 69, 91, 67, 63, 70, 69, 56, 51, 54, 43, 56, 62, 59, 59, 65, 63, 59, 52, 57, 74, 66, 101, 65, 50, 88, 87, 74, 66, 87, 50, 41, 66, 53, 70, 70, 64, 48, 55, 69, 59, 65, 59, 51, 71, 70, 55, 56, 51, 51, 104, 57, 69, 49, 57, 49, 54, 64, 127, 62, 62, 95, 65, 60, 81, 64, 69, 57, 44, 60, 88, 51, 68, 55, 59, 66, 65, 58, 70, 58, 58, 77, 50, 104, 63, 115, 53, 66, 69, 59, 77, 81, 82, 56, 57, 77, 82, 75, 82, 59, 85, 62, 58, 60, 65, 65, 70, 62, 76, 55, 59, 72, 49, 74, 71, 93, 68, 49, 55, 64, 86, 79, 75, 55, 49, 81, 55, 71, 54, 62, 67, 58, 93, 61, 62, 60, 68, 66, 50, 81, 54, 53, 47, 81, 77, 54, 77, 83, 70, 68, 71, 64, 69, 56, 74, 45, 70, 70, 97, 75, 38, 57, 76, 61, 66, 74, 59, 87, 63, 60, 59, 69, 65, 62, 72, 59, 61, 72, 58, 57, 80, 90, 60, 59, 74, 69, 69, 60, 87, 60, 50, 56, 61, 63, 64, 52, 70, 64, 79, 59, 72, 78, 75, 64, 60, 64, 60, 77, 82, 53, 63, 67, 68, 55, 65, 70, 68, 74, 64, 79, 63, 63, 53, 57, 41, 77, 59, 59, 77, 58, 63, 61, 54, 68, 57, 58, 90, 80, 78, 60, 115, 114, 81, 65, 60, 83, 55, 62, 58, 57, 64, 77, 98, 48, 69, 72, 89, 123, 67, 67, 68, 104, 51, 51, 80, 82, 69, 55, 55, 68, 86, 72, 73, 55, 56, 56, 69, 91, 57, 69, 64, 68, 66, 66, 64, 73, 75, 72, 52, 72, 65, 84, 57, 56, 59, 65, 59, 77, 73, 63, 85, 54, 92, 74, 57, 68, 56, 66, 54, 51, 61, 80, 78, 54, 69, 50, 57, 82, 71, 64, 65, 59, 76, 77, 58, 71, 58, 53, 60, 83, 59, 71, 64, 79, 73, 78, 65, 52, 45, 62, 63, 55, 67, 63, 95, 39, 48, 58, 61, 63, 81, 57, 93, 71, 78, 72, 61, 59, 54, 58, 50, 56, 67, 47, 74, 72, 52, 35, 74, 52, 57, 69, 65, 62, 70, 58, 87, 59, 75, 67, 72, 68, 65, 66, 56, 79, 78, 75, 62, 76, 65, 48, 50, 77, 48, 58, 54, 58, 47, 81, 68, 89, 87, 61, 117, 63, 58, 66, 50, 68, 60, 58, 48, 56, 43, 67, 67, 57, 90, 71, 47, 82, 67, 63, 66, 75, 66, 54, 60, 83, 63, 62, 84, 74, 72, 64, 100, 77, 64, 71, 60, 47, 60, 72, 76, 59, 55, 81, 58, 62, 75, 60, 78, 68, 50, 78, 62, 88, 72, 67, 60, 72, 60, 64, 78, 81, 77, 70, 64, 52, 73, 71, 76, 78, 73, 54, 42, 54, 61, 53, 62, 66, 49, 66, 83, 77, 59, 54, 66, 84, 73, 92, 57, 75, 56, 79, 72, 77, 66, 82, 58, 71, 54, 53, 86, 65, 52, 71, 94, 76, 74, 66, 75, 79, 65, 69, 95, 55, 87, 58, 71, 79, 45, 68, 61, 60, 66, 79, 51, 82, 64, 52, 65, 86, 93, 57, 62, 84, 50, 57, 59, 59, 66, 61, 65, 26, 50, 72, 48, 85, 81, 79, 55, 73, 76, 65, 49, 57, 73, 45, 68, 89, 63, 64, 68, 64, 56, 85, 63, 72, 50, 67, 46, 50, 76, 47, 57, 58, 64, 56, 61, 67, 57, 66, 52, 56, 83, 65, 61, 58, 95, 62, 64, 57, 71, 62, 54, 76, 75, 42, 63, 75, 103, 59, 50, 57, 58, 67, 68, 50, 67, 70, 60, 64, 63, 65, 58, 68, 55, 53, 71, 77, 55, 66, 59, 77, 61, 50, 90, 68, 78, 59, 56, 62, 108, 52, 80, 51, 73, 53, 59, 61, 53, 63, 63, 60, 64, 75, 60, 59, 75, 74, 66, 58, 74, 64, 87, 76, 66, 77, 63, 70, 64, 59, 52, 52, 67, 46, 71, 57, 54, 69, 72, 74, 56, 68, 98, 60, 62, 62, 84, 42, 66, 57, 60, 59, 72, 65, 70, 60, 85, 50, 51, 74, 65, 70, 75, 57, 62, 74, 63, 63, 47, 78, 75, 50, 63, 52, 60, 68, 72, 69, 54, 106, 55, 71, 59, 63, 63, 86, 81, 108, 78, 72, 48, 65, 65, 75, 85, 60, 60, 78, 49, 74, 53, 57, 77, 54, 73, 60, 88, 60, 52, 70, 51, 53, 80, 75, 75, 67, 45, 65, 63, 68, 59, 65, 50, 63, 56, 58, 69, 60, 58, 62, 62, 52, 75, 60, 61, 78, 76, 43, 57, 63, 65, 68, 59, 69, 100, 71, 68, 78, 78, 70, 73, 69, 99, 40, 73, 83, 47, 72, 58, 68, 57, 62, 63, 47, 55, 59, 73, 67, 46, 55, 60, 54, 62, 60, 65, 76, 66, 63, 52, 56, 56, 63, 56, 75, 68, 73, 57, 60, 40, 86, 64, 51, 53, 90, 67, 79, 61, 58, 55, 51, 58, 55, 68, 70, 64, 93, 63, 66, 66, 70, 45, 41, 63, 77, 62, 53, 85, 71, 81, 58, 59, 71, 57, 63, 67, 59, 64, 69, 74, 80, 57, 81, 63, 71, 108, 62, 59, 55, 79, 52, 53, 65, 57, 68, 60, 57, 57, 65, 72, 68, 71, 69, 67, 65, 106, 59, 58, 76, 59, 61, 52, 53, 58, 64, 66, 60, 70, 66, 74, 71, 60, 75, 58, 54, 72, 71, 74, 69, 66, 66, 67, 79, 57, 71, 61, 51, 63, 57, 62, 59, 78, 57, 65, 61, 62, 69, 62, 52, 61, 56, 67, 35, 72, 56, 65, 56, 95, 66, 72, 52, 52, 51, 55, 63, 63, 65, 67, 64, 72, 73, 58, 84, 57, 41, 57, 59, 75, 71, 56, 78, 82, 54, 44, 71, 66, 54, 69, 67, 65, 55, 72, 62, 54, 65, 79, 55, 66, 55, 67, 59, 68, 65, 93, 47, 62, 97, 71, 66, 70, 64, 68, 66, 73, 93, 54, 71, 87, 78, 46, 49, 45, 60, 85, 68, 84, 52, 49, 49, 63, 70, 53, 69, 66, 70, 55, 53, 68, 77, 56, 76, 90, 81, 72, 76, 50, 59, 73, 70, 65, 61, 56, 63, 66, 82, 54, 62, 51, 59, 59, 75, 64, 60, 89, 50, 56, 62, 67, 75, 58, 62, 83, 62, 93, 71, 63, 55, 51, 58, 51, 72, 55, 61, 76, 49, 56, 55, 69, 71, 61, 68, 97, 59, 70, 58, 61, 72, 74, 62, 115, 68, 81, 46, 72, 61, 75, 63, 65, 47, 54, 61, 75, 72, 63, 62, 63, 66, 59, 33, 61, 73, 70, 86, 62, 53, 64, 44, 63, 56, 51, 64, 50, 55, 54, 58, 77, 62, 61, 53, 93, 59, 86, 69, 52, 67, 56, 58, 65, 71, 66, 61, 69, 60, 70, 44, 60, 77, 119, 67, 50, 72, 61, 52, 48, 62, 60, 65, 75, 119, 59, 57, 67, 58, 100, 57, 72, 69, 57, 73, 61, 51, 64, 63, 52, 47, 62, 69, 61, 48, 49, 81, 64, 56, 80, 54, 89, 71, 61, 63, 70, 55, 74, 65, 54, 71, 52, 47, 46, 53, 71, 61, 46, 107, 76, 56, 41, 74, 62, 55, 82, 66, 62, 63, 92, 74, 67, 53, 64, 78, 71, 66, 55, 49, 87, 64, 76, 67, 55, 71, 80, 48, 64, 63, 72, 67, 61, 67, 63, 77, 77, 59, 68, 79, 79, 66, 59, 63, 61, 57, 66, 66, 96, 58, 72, 65, 77, 60, 73, 58, 44, 59, 47, 51, 95, 58, 58, 69, 63, 46, 54, 70, 78, 59, 59, 67, 81, 83, 56, 74, 62, 70, 58, 45, 62, 72, 71, 68, 86, 80, 58, 86, 94, 53, 59, 62, 63, 69, 54, 41, 69, 110, 64, 76, 53, 95, 97, 69, 71, 59, 59, 82, 56, 65, 58, 91, 71, 68, 79, 56, 72, 62, 61, 57, 36, 79, 54, 64, 69, 50, 49, 62, 56, 85, 100, 74, 84, 61, 68, 85, 46, 58, 68, 95, 82, 84, 85, 61, 64, 83, 104, 73, 91, 66, 59, 51, 57, 83, 58, 54, 63, 56, 83, 54, 64, 85, 85, 68, 89, 83, 61, 80, 74, 41, 119, 58, 76, 59, 75, 58, 68, 68, 104, 94, 67, 80, 60, 87, 50, 69, 71, 43, 62, 57, 50, 71, 62, 57, 56, 59, 82, 57, 56, 45, 74, 56, 67, 48, 64, 101, 60, 62, 47, 83, 64, 81, 43, 73, 52, 53, 70, 58, 59, 81, 58, 57, 62, 56, 63, 61, 64, 59, 57, 81, 63, 66, 84, 77, 59, 100, 64, 73, 68, 47, 66, 57, 74, 80, 57, 61, 54, 43, 88, 78, 97, 69, 60, 62, 100, 58, 76, 81, 73, 59, 73, 71, 63, 83, 76, 74, 69, 61, 58, 70, 58, 68, 57, 57, 78, 60, 60, 59, 64, 68, 58, 60, 55, 58, 81, 74, 74, 62, 71, 52, 49, 60, 74, 62, 85, 50, 50, 53, 64, 81, 62, 58, 63, 79, 62, 118, 60, 70, 59, 80, 63, 87, 66, 56, 73, 56, 66, 69, 65, 72, 43, 57, 42, 53, 73, 77, 76, 73, 66, 91, 68, 66, 46, 71, 75, 60, 90, 61, 67, 45, 70, 72, 48, 68, 73, 56, 60, 66, 61, 55, 50, 83, 56, 64, 57, 79, 64, 71, 57, 63, 69, 51, 61, 68, 70, 61, 89, 72, 54, 70, 73, 82, 50, 48, 46, 67, 57, 81, 58, 81, 60, 78, 82, 55, 59, 71, 47, 47, 65, 63, 60, 35, 44, 46, 67, 75, 75, 46, 82, 73, 67, 61, 109, 74, 43, 76, 68, 83, 80, 139, 52, 45, 80, 58, 49, 68, 64, 71, 60, 65, 57, 61, 69, 48, 69, 60, 55, 63, 70, 80, 62, 58, 52, 63, 56, 60, 72, 101, 85, 43, 58, 65, 49, 51, 55, 62, 68, 85, 66, 64, 95, 58, 60, 52, 58, 65, 93, 79, 66, 79, 63, 51, 116, 63, 68, 80, 103, 53, 63, 57, 64, 58, 69, 54, 110, 66, 75, 37, 62, 84, 101, 47, 68, 66, 41, 68, 54, 58, 59, 71, 84, 57, 63, 72, 44, 51, 64, 73, 61, 55, 61, 57, 69, 62, 102, 57, 68, 66, 63, 64, 50, 79, 54, 47, 65, 87, 73, 86, 56, 87, 53, 72, 63, 77, 59, 66, 73, 80, 57, 71, 77, 58, 56, 84, 56, 45, 71, 107, 80, 51, 64, 64, 64, 64, 60, 58, 54, 61, 68, 55, 92, 74, 67, 57, 79, 71, 47, 65, 50, 86, 58, 87, 66, 63, 69, 62, 77, 80, 93, 63, 70, 100, 67, 53, 60, 55, 58, 63, 56, 59, 78, 61, 49, 82, 72, 67, 76, 56, 54, 71, 73, 69, 67, 70, 55, 52, 84, 62, 72, 71, 70, 48, 70, 77, 74, 52, 61, 61, 102, 67, 70, 55, 59, 61, 55, 66, 64, 51, 73, 75, 75, 71, 51, 61, 72, 70, 57, 66, 56, 65, 58, 51, 87, 76, 55, 49, 72, 72, 62, 43, 80, 62, 61, 70, 72, 65, 97, 81, 63, 66, 52, 70, 38, 59, 67, 36, 59, 73, 57, 105, 65, 72, 61, 81, 67, 70, 67, 59, 73, 54, 68, 74, 87, 72, 61, 55, 52, 100, 74, 51, 61, 69, 60, 68, 56, 68, 63, 41, 61, 57, 62, 56, 61, 73, 80, 57, 57, 50, 84, 68, 64, 64, 67, 51, 53, 69, 46, 64, 73, 58, 57, 44, 73, 67, 94, 118, 55, 51, 54, 61, 68, 53, 75, 69, 51, 62, 60, 41, 101, 54, 56, 55, 83, 81, 62, 51, 70, 93, 43, 62, 63, 56, 55, 54, 60, 86, 59, 58, 66, 45, 63, 60, 68, 88, 65, 69, 68, 82, 67, 66, 57, 67, 60, 68, 88, 92, 67, 60, 86, 57, 83, 56, 58, 57, 67, 62, 86, 70, 55, 53, 70, 67, 59, 67, 74, 65, 65, 72, 46, 76, 64, 73, 71, 56, 64, 59, 62, 61, 54, 74, 99, 66, 62, 46, 64, 70, 69, 72, 64, 63, 60, 52, 76, 67, 63, 55, 55, 125, 45, 66, 81, 54, 52, 60, 63, 71, 68, 65, 66, 57, 76, 63, 62, 70, 66, 52, 62, 60, 42, 51, 75, 61, 67, 58, 55, 80, 76, 72, 81, 81, 63, 70, 77, 54, 32, 67, 57, 47, 61, 59, 70, 59, 72, 53, 68, 63, 52, 56, 55, 66, 65, 83, 54, 74, 60, 67, 69, 60, 69, 71, 71, 57, 66, 58, 56, 74, 62, 56, 63, 54, 64, 55, 57, 57, 63, 56, 62, 72, 102, 69, 68, 63, 81, 53, 51, 54, 74, 66, 82, 58, 37, 74, 60, 50, 59, 66, 84, 65, 60, 59, 37, 59, 66, 69, 71, 81, 71, 77, 80, 65, 76, 86, 69, 62, 63, 49, 44, 84, 87, 61, 71, 74, 44, 58, 43, 67, 72, 65, 73, 61, 59, 59, 76, 76, 75, 78, 97, 63, 60, 65, 55, 62, 55, 66, 57, 76, 53, 62, 64, 67, 62, 52, 74, 74, 61, 70, 52, 66, 59, 52, 82, 62, 65, 94, 98, 83, 78, 53, 71, 60, 61, 69, 67, 59, 54, 53, 62, 53, 68, 68, 63, 66, 47, 65, 81, 51, 62, 59, 64, 84, 58, 73, 46, 63, 68, 67, 58, 63, 41, 51, 62, 57, 75, 33, 61, 71, 50, 62, 54, 62, 48, 44, 59, 76, 73, 73, 49, 67, 70, 70, 49, 54, 66, 40, 80, 44, 53, 53, 69, 74, 57, 59, 48, 65, 69, 52, 79, 65, 65, 59, 63, 77, 58, 74, 45, 57, 51, 62, 54, 71, 58, 118, 106, 43, 68, 59, 74, 53, 66, 70, 58, 58, 74, 66, 69, 72, 85, 77, 61, 67, 70, 57, 114, 72, 73, 67, 51, 62, 76, 69, 78, 60, 66, 55, 54, 75, 59, 59, 79, 76, 61, 72, 93, 76, 59, 59, 61, 49, 58, 49, 59, 71, 59, 85, 98, 88, 74, 73, 64, 64, 64, 69, 84, 63, 71, 50, 57, 63, 62, 84, 77, 46, 62, 63, 55, 69, 63, 54, 80, 61, 55, 80, 54, 77, 68, 50, 59, 66, 60, 50, 57, 68, 60, 65, 87, 65, 85, 69, 71, 80, 79, 58, 63, 64, 66, 55, 61, 85, 66, 69, 70, 92, 71, 75, 61, 95, 90, 87, 50, 86, 60, 57, 54, 56, 57, 71, 88, 49, 106, 78, 56, 74, 51, 65, 62, 79, 60, 68, 128, 67, 60, 54, 73, 71, 93, 60, 53, 51, 50, 74, 83, 77, 58, 50, 64, 60, 55, 56, 83, 54, 78, 66, 63, 102, 65, 67, 69, 61, 79, 63, 54, 64, 68, 74, 71, 49, 59, 61, 71, 80, 52, 80, 53, 46, 77, 69, 34, 65, 61, 60, 80, 43, 72, 63, 75, 59, 94, 49, 63, 68, 60, 64, 73, 62, 70, 54, 63, 90, 57, 62, 71, 77, 67, 69, 74, 79, 50, 54, 57, 66, 54, 55, 57, 58, 61, 68, 61, 65, 64, 46, 56, 51, 53, 72, 86, 68, 75, 67, 45, 60, 51, 73, 64, 66, 60, 53, 57, 60, 73, 43, 77, 68, 46, 81, 48, 60, 66, 46, 74, 80, 70, 71, 48, 70, 58, 75, 57, 118, 55, 62, 62, 63, 66, 79, 56, 69, 84, 90, 59, 72, 68, 83, 78, 94, 72, 52, 70, 54, 64, 73, 73, 62, 61, 75, 71, 75, 61, 60, 73, 54, 84, 53, 70, 47, 76, 72, 52, 55, 56, 65, 60, 66, 57, 63, 69, 80, 66, 49, 67, 57, 58, 47, 76, 58, 54, 79, 62, 54, 77, 51, 55, 59, 75, 55, 53, 66, 60, 65, 67, 53, 51, 60, 68, 96, 84, 90, 51, 60, 71, 56, 52, 41, 63, 57, 57, 68, 73, 63, 54, 54, 66, 79, 55, 68, 67, 58, 90, 64, 92, 69, 52, 72, 61, 83, 56, 77, 81, 88, 37, 66, 68, 65, 61, 71, 62, 79, 80, 59, 75, 64, 75, 109, 57, 64, 63, 62, 34, 50, 65, 55, 72, 52, 60, 56, 52, 73, 74, 65, 71, 58, 57, 58, 55, 82, 60, 75, 108, 58, 46, 93, 62, 62, 61, 88, 47, 82, 78, 72, 72, 68, 70, 59, 71, 80, 69, 56, 79, 57, 50, 83, 63, 72, 96, 66, 83, 71, 90, 65, 61, 74, 52, 59, 70, 60, 79, 56, 61, 69, 58, 62, 55, 51, 77, 88, 61, 55, 59, 85, 49, 76, 78, 74, 74, 73, 69, 84, 56, 78, 57, 69, 71, 59, 61, 69, 61, 45, 51, 94, 71, 112, 62, 72, 79, 45, 59, 62, 54, 77, 73, 68, 76, 61, 68, 91, 68, 48, 54, 59, 74, 55, 79, 70, 79, 70, 60, 61, 61, 60, 68, 45, 64, 64, 73, 72, 66, 69, 71, 53, 65, 75, 79, 65, 71, 60, 64, 70, 53, 46, 57, 94, 65, 68, 73, 52, 70, 39, 52, 72, 67, 47, 61, 105, 64, 87, 59, 56, 75, 67, 73, 64, 85, 64, 63, 73, 67, 67, 50, 64, 57, 59, 46, 59, 51, 47, 70, 56, 65, 73, 108, 70, 73, 69, 75, 77, 60, 59, 78, 73, 64, 82, 78, 51, 58, 66, 66, 67, 57, 78, 64, 120, 73, 99, 64, 50, 51, 51, 87, 65, 69, 47, 68, 61, 60, 67, 57, 74, 68, 45, 74, 60, 63, 64, 72, 66, 66, 81, 66, 79, 80, 50, 62, 59, 52, 82, 62, 76, 82, 65, 62, 56, 65, 75, 64, 71, 88, 63, 49, 78, 67, 69, 33, 79, 74, 80, 50, 61, 50, 59, 55, 47, 69, 81, 66, 69, 55, 56, 65, 79, 100, 85, 53, 48, 44, 59, 62, 70, 68, 60, 60, 38, 64, 58, 71, 61, 58, 55, 69, 58, 53, 61, 65, 56, 57, 89, 49, 58, 68, 61, 95, 86, 61, 67, 87, 71, 71, 73, 58, 53, 73, 47, 50, 59, 59, 72, 80, 91, 74, 67, 71, 51, 60, 43, 85, 76, 54, 66, 76, 50, 65, 44, 66, 52, 68, 82, 64, 62, 55, 68, 51, 67, 60, 79, 72, 45, 81, 101, 60, 64, 60, 79, 74, 70, 75, 56, 66, 72, 58, 79, 84, 63, 84, 60, 60, 62, 68, 70, 64, 53, 70, 54, 54, 101, 68, 58, 72, 76, 55, 78, 66, 69, 73, 65, 58, 79, 67, 94, 73, 82, 67, 99, 63, 52, 68, 66, 102, 77, 69, 77, 70, 65, 76, 54, 75, 72, 60, 54, 85, 75, 87, 58, 65, 85, 75, 75, 59, 54, 65, 42, 46, 54, 65, 57, 50, 66, 62, 66, 51, 70, 51, 49, 76, 56, 75, 54, 62, 58, 63, 56, 68, 59, 66, 53, 76, 57, 67, 51, 68, 58, 63, 40, 62, 58, 91, 84, 62, 71, 101, 60, 55, 64, 53, 75, 44, 65, 92, 72, 73, 75, 69, 58, 81, 57, 54, 73, 83, 51, 110, 65, 87, 58, 55, 70, 73, 58, 75, 65, 43, 55, 69, 72, 30, 86, 50, 60, 58, 74, 50, 55, 82, 32, 70, 66, 47, 49, 73, 56, 75, 83, 79, 94, 64, 59, 70, 75, 54, 81, 67, 60, 44, 65, 81, 79, 75, 71, 70, 64, 69, 60, 55, 72, 57, 54, 65, 54, 74, 72, 62, 94, 65, 57, 71, 61, 64, 66, 48, 66, 75, 58, 59, 83, 69, 68, 70, 61, 69, 57, 68, 71, 69, 54, 51, 59, 61, 75, 73, 59, 73, 61, 60, 59, 62, 84, 57, 68, 62, 71, 51, 72, 71, 63, 42, 58, 63, 81, 54, 54, 56, 65, 71, 62, 32, 59, 66, 62, 58, 76, 80, 56, 117, 49, 55, 67, 62, 69, 42, 68, 46, 60, 69, 78, 61, 62, 60, 68, 87, 83, 65, 45, 76, 73, 62, 61, 67, 63, 63, 69, 59, 75, 64, 62, 68, 94, 84, 61, 62, 47, 53, 58, 94, 56, 51, 78, 82, 66, 61, 67, 123, 111, 69, 53, 74, 51, 70, 78, 50, 80, 60, 64, 46, 60, 56, 56, 52, 86, 56, 53, 75, 65, 65, 63, 69, 68, 65, 51, 65, 58, 85, 74, 67, 66, 87, 56, 63, 88, 60, 82, 69, 55, 61, 60, 67, 54, 62, 55, 47, 64, 61, 74, 59, 63, 51, 68, 54, 62, 79, 58, 35, 73, 67, 59, 77, 44, 58, 61, 48, 55, 73, 68, 72, 81, 69, 44, 64, 99, 77, 75, 60, 62, 92, 63, 70, 63, 58, 58, 68, 67, 55, 64, 46, 68, 84, 87, 60, 94, 86, 87, 63, 45, 48, 65, 102, 57, 87, 87, 64, 67, 98, 59, 88, 69, 56, 59, 69, 71, 62, 60, 70, 75, 70, 69, 67, 76, 58, 64, 60, 65, 64, 52, 80, 63, 85, 91, 82, 45, 69, 56, 78, 73, 49, 76, 74, 68, 86, 84, 47, 65, 53, 59, 72, 55, 41, 55, 65, 96, 97, 84, 60, 87, 103, 45, 49, 63, 65, 53, 67, 77, 61, 60, 82, 67, 67, 66, 55, 57, 46, 61, 71, 74, 75, 58, 56, 58, 68, 84, 64, 83, 71, 75, 70, 56, 60, 73, 73, 85, 65, 52, 64, 43, 99, 58, 82, 74, 61, 61, 63, 58, 99, 62, 75, 67, 57, 92, 55, 90, 44, 72, 66, 58, 63, 57, 64, 65, 59, 50, 72, 80, 49, 50, 61, 65, 67, 72, 52, 89, 62, 52, 67, 48, 78, 74, 67, 65, 81, 54, 59, 64, 71, 84, 76, 76, 85, 63, 59, 54, 70, 72, 62, 75, 42, 62, 66, 50, 55, 46, 107, 61, 63, 63, 83, 102, 70, 81, 76, 62, 72, 58, 54, 97, 61, 58, 59, 68, 55, 70, 58, 64, 58, 66, 61, 71, 45, 69, 82, 68, 40, 79, 110, 58, 75, 61, 72, 45, 46, 55, 69, 46, 72, 71, 43, 73, 65, 65, 70, 51, 60, 53, 72, 59, 89, 107, 53, 65, 45, 94, 42, 69, 82, 73, 57, 75, 79, 79, 79, 65, 64, 49, 52, 73, 91, 68, 67, 43, 74, 65, 58, 69, 66, 61, 67, 59, 76, 90, 42, 42, 84, 62, 64, 61, 58, 68, 113, 53, 71, 66, 73, 103, 70, 75, 67, 59, 85, 61, 53, 62, 65, 53, 68, 53, 74, 65, 126, 46, 64, 64, 75, 76, 65, 61, 56, 64, 107, 52, 55, 84, 62, 66, 69, 70, 64, 105, 48, 84, 73, 78, 66, 53, 63, 91, 59, 49, 51, 49, 48, 60, 67, 54, 59, 67, 61, 56, 80, 49, 88, 70, 67, 53, 64, 87, 83, 77, 60, 56, 75, 71, 48, 62, 74, 68, 81, 57, 61, 98, 45, 50, 103, 51, 66, 70, 71, 70, 79, 57, 57, 72, 64, 58, 77, 61, 61, 59, 89, 102, 79, 38, 64, 44, 45, 60, 72, 56, 43, 68, 47, 72, 60, 55, 78, 84, 65, 55, 64, 65, 58, 55, 59, 74, 71, 71, 40, 60, 56, 59, 71, 53, 64, 83, 69, 49, 66, 82, 57, 64, 87, 49, 92, 70, 45, 47, 60, 91, 66, 63, 58, 74, 53, 63, 50, 60, 64, 52, 93, 76, 61, 54, 68, 61, 74, 100, 62, 62, 66, 65, 80, 71, 77, 69, 91, 76, 50, 81, 56, 81, 75, 62, 61, 70, 57, 85, 68, 61, 65, 69, 58, 55, 65, 42, 70, 54, 58, 65, 95, 68, 58, 70, 53, 76, 52, 51, 59, 72, 51, 62, 67, 104, 56, 70, 52, 60, 71, 75, 77, 59, 77, 73, 83, 56, 78, 77, 62, 57, 46, 60, 63, 86, 53, 63, 57, 45, 70, 58, 83, 74, 63, 70, 45, 74, 59, 97, 71, 73, 64, 59, 49, 63, 79, 68, 54, 48, 67, 50, 64, 59, 44, 67, 52, 87, 54, 71, 54, 58, 75, 60, 70, 58, 50, 67, 85, 79, 46, 94, 63, 56, 65, 63, 49, 40, 63, 53, 63, 82, 57, 42, 55, 61, 57, 68, 45, 68, 65, 70, 72, 78, 62, 54, 62, 80, 92, 70, 59, 71, 101, 53, 61, 71, 55, 51, 54, 68, 61, 49, 67, 63, 61, 69, 65, 71, 54, 73, 76, 72, 56, 45, 89, 54, 113, 68, 102, 60, 77, 79, 53, 55, 57, 61, 106, 51, 77, 85, 82, 77, 61, 45, 75, 71, 43, 55, 69, 52, 52, 64, 82, 70, 59, 48, 66, 60, 99, 61, 53, 78, 60, 62, 53, 53, 55, 53, 63, 64, 89, 67, 82, 79, 93, 69, 48, 95, 71, 74, 96, 78, 51, 56, 86, 53, 58, 72, 54, 80, 59, 52, 65, 56, 73, 98, 60, 54, 62, 84, 94, 78, 50, 64, 68, 66, 66, 57, 55, 107, 66, 65, 49, 45, 75, 109, 72, 54, 82, 58, 44, 50, 68, 47, 55, 55, 61, 68, 71, 50, 60, 66, 67, 57, 85, 67, 47, 62, 69, 49, 56, 50, 49, 62, 74, 63, 102, 64, 95, 78, 71, 63, 53, 67, 60, 55, 83, 61, 91, 49, 62, 81, 68, 92, 69, 95, 62, 67, 68, 54, 93, 84, 56, 52, 52, 60, 94, 70, 79, 72, 58, 60, 59, 64, 67, 80, 65, 51, 75, 66, 80, 62, 85, 47, 59, 75, 56, 59, 57, 61, 91, 57, 55, 62, 65, 52, 58, 76, 67, 61, 69, 59, 81, 66, 73, 53, 55, 75, 66, 81, 56, 97, 78, 51, 84, 60, 73, 81, 70, 59, 72, 70, 63, 50, 80, 54, 67, 73, 63, 59, 67, 54, 63, 60, 97, 62, 61, 55, 57, 67, 75, 56, 74, 66, 54, 83, 65, 54, 60, 61, 86, 54, 81, 67, 57, 50, 66, 53, 50, 82, 64, 74, 51, 68, 69, 67, 73, 84, 54, 57, 64, 47, 56, 46, 64, 57, 70, 56, 55, 57, 62, 65, 85, 45, 67, 66, 99, 69, 77, 59, 53, 77, 69, 63, 62, 80, 74, 61, 60, 49, 70, 55, 75, 70, 82, 75, 83, 65, 49, 68, 65, 73, 67, 55, 55, 74, 54, 59, 61, 69, 60, 130, 58, 61, 85, 63, 64, 72, 84, 58, 54, 52, 62, 79, 88, 61, 68, 62, 63, 56, 50, 56, 105, 89, 61, 71, 62, 44, 79, 70, 71, 81, 64, 61, 44, 69, 70, 63, 86, 68, 59, 60, 88, 45, 73, 55, 44, 60, 45, 72, 78, 50, 73, 68, 61, 59, 78, 66, 69, 45, 64, 77, 56, 78, 64, 61, 53, 59, 69, 40, 80, 48, 60, 59, 67, 66, 81, 72, 115, 53, 117, 70, 53, 51, 56, 59, 58, 61, 59, 66, 56, 58, 68, 57, 73, 75, 98, 81, 65, 79, 82, 73, 60, 55, 61, 64, 76, 68, 58, 72, 61, 104, 64, 104, 81, 77, 66, 83, 65, 80, 37, 50, 54, 75, 63, 59, 98, 62, 83, 90, 68, 72, 74, 59, 68, 54, 58, 79, 116, 64, 54, 51, 50, 73, 75, 77, 56, 63, 71, 69, 57, 59, 59, 52, 74, 77, 68, 57, 68, 66, 50, 72, 61, 75, 55, 57, 75, 79, 62, 77, 72, 84, 46, 68, 84, 62, 60, 65, 63, 60, 78, 61, 55, 70, 71, 65, 35, 71, 56, 63, 61, 58, 71, 66, 91, 70, 76, 60, 55, 61, 76, 71, 67, 63, 80, 46, 66, 80, 64, 57, 70, 74, 58, 61, 61, 59, 66, 72, 37, 83, 54, 60, 56, 65, 57, 64, 56, 79, 72, 60, 53, 67, 70, 63, 79, 66, 79, 48, 53, 58, 57, 75, 75, 53, 79, 50, 50, 79, 71, 48, 52, 61, 83, 51, 63, 43, 68, 65, 62, 95, 72, 55, 79, 67, 61, 45, 67, 91, 59, 61, 79, 50, 64, 81, 60, 85, 73, 56, 75, 69, 65, 65, 45, 70, 75, 70, 54, 66, 45, 61, 67, 58, 52, 48, 59, 55, 69, 62, 58, 85, 71, 65, 66, 69, 63, 93, 75, 76, 68, 64, 64, 64, 64, 69, 66, 76, 71, 66, 49, 65, 67, 82, 60, 85, 48, 75, 68, 52, 62, 64, 51, 55, 63, 71, 62, 62, 69, 51, 66, 54, 61, 68, 69, 65, 78, 58, 48, 50, 88, 61, 68, 84, 65, 65, 47, 74, 71, 63, 67, 70, 76, 63, 78, 49, 91, 57, 86, 53, 103, 89, 88, 69, 58, 74, 59, 59, 66, 87, 55, 65, 92, 54, 71, 71, 47, 58, 68, 75, 61, 47, 61, 104, 58, 62, 53, 50, 38, 64, 56, 64, 61, 54, 54, 63, 79, 54, 84, 59, 53, 75, 77, 51, 83, 62, 56, 51, 118, 49, 54, 118, 57, 48, 69, 75, 63, 79, 73, 56, 61, 71, 90, 52, 79, 90, 56, 69, 78, 56, 63, 65, 74, 84, 62, 59, 97, 69, 77, 62, 74, 73, 69, 59, 83, 65, 81, 58, 66, 62, 77, 72, 68, 66, 68, 68, 70, 59, 68, 60, 67, 62, 93, 51, 75, 47, 62, 90, 57, 67, 67, 104, 63, 67, 63, 86, 46, 54, 69, 113, 55, 107, 58, 70, 68, 61, 62, 67, 71, 65, 55, 57, 63, 56, 70, 89, 48, 69, 56, 77, 64, 63, 57, 77, 74, 54, 48, 70, 95, 79, 73, 67, 56, 69, 75, 54, 57, 81, 66, 64, 60, 74, 71, 62, 69, 99, 75, 54, 66, 66, 66, 78, 55, 48, 43, 46, 63, 82, 61, 64, 64, 58, 63, 72, 55, 69, 62, 71, 53, 58, 49, 87, 57, 78, 105, 67, 70, 71, 70, 48, 67, 48, 72, 63, 53, 52, 54, 57, 50, 66, 73, 69, 48, 65, 72, 75, 65, 74, 64, 66, 46, 73, 75, 86, 76, 61, 73, 82, 65, 50, 55, 88, 63, 72, 65, 69, 88, 76, 53, 63, 86, 47, 86, 58, 61, 86, 76, 67, 51, 73, 56, 80, 65, 57, 72, 58, 54, 69, 76, 77, 72, 60, 55, 53, 63, 104, 57, 55, 66, 67, 50, 76, 60, 63, 58, 85, 87, 62, 74, 46, 75, 75, 71, 65, 73, 56, 54, 66, 57, 61, 65, 56, 67, 101, 77, 57, 47, 69, 73, 99, 69, 82, 65, 40, 53, 60, 75, 72, 71, 56, 65, 48, 50, 48, 61, 62, 40, 53, 75, 52, 65, 77, 55, 69, 53, 48, 52, 64, 81, 65, 77, 68, 55, 69, 40, 44, 61, 72, 60, 47, 79, 60, 39, 67, 51, 65, 59, 66, 39, 60, 79, 47, 57, 43, 93, 61, 65, 62, 54, 78, 65, 82, 51, 94, 90, 76, 72, 51, 52, 63, 54, 93, 71, 99, 72, 53, 63, 49, 55, 62, 70, 49, 67, 80, 69, 61, 60, 96, 50, 73, 103, 53, 65, 69, 63, 71, 66, 48, 70, 62, 70, 62, 53, 79, 47, 66, 81, 59, 55, 86, 81, 60, 58, 58, 68, 73, 67, 95, 62, 61, 62, 93, 45, 55, 57, 61, 92, 57, 74, 62, 48, 56, 67, 56, 70, 81, 61, 51, 94, 59, 77, 93, 75, 88, 64, 67, 62, 61, 39, 44, 54, 58, 59, 56, 56, 55, 74, 71, 64, 71, 49, 58, 62, 52, 74, 60, 67, 74, 70, 55, 51, 75, 80, 68, 63, 63, 55, 63, 72, 70, 68, 100, 72, 62, 103, 82, 51, 68, 74, 61, 72, 49, 63, 61, 55, 53, 68, 74, 63, 70, 72, 63, 54, 60, 58, 54, 65, 59, 80, 64, 54, 56, 65, 56, 40, 58, 62, 72, 83, 93, 70, 68, 73, 53, 69, 62, 66, 82, 76, 60, 68, 60, 32, 71, 48, 64, 56, 53, 63, 90, 82, 53, 81, 55, 58, 68, 67, 61, 67, 96, 57, 57, 78, 78, 81, 66, 56, 69, 63, 70, 58, 42, 60, 55, 56, 75, 56, 62, 69, 65, 120, 63, 80, 66, 66, 58, 63, 67, 58, 79, 75, 81, 64, 72, 78, 51, 114, 65, 43, 89, 44, 74, 65, 58, 48, 77, 73, 54, 48, 51, 91, 86, 77, 50, 65, 50, 61, 62, 64, 64, 71, 57, 60, 59, 44, 61, 45, 54, 62, 70, 79, 60, 59, 57, 75, 59, 46, 75, 61, 51, 64, 86, 54, 80, 54, 54, 67, 60, 64, 72, 98, 69, 59, 60, 59, 68, 49, 71, 98, 78, 58, 60, 72, 65, 50, 74, 60, 146, 59, 66, 60, 47, 77, 62, 76, 75, 96, 58, 60, 71, 70, 53, 68, 78, 48, 46, 56, 58, 61, 44, 54, 52, 66, 57, 57, 56, 72, 84, 67, 49, 70, 65, 63, 66, 78, 79, 69, 68, 56, 57, 74, 46, 51, 59, 52, 90, 49, 50, 38, 51, 75, 73, 55, 68, 64, 75, 64, 65, 51, 51, 81, 54, 81, 70, 89, 80, 64, 57, 40, 46, 56, 64, 72, 58, 87, 57, 102, 64, 21, 69, 79, 74, 54, 81, 78, 36, 72, 54, 68, 46, 66, 63, 85, 90, 64, 101, 68, 58, 64, 61, 68, 43, 57, 62, 71, 89, 75, 82, 74, 68, 83, 63, 55, 74, 70, 69, 61, 61, 47, 73, 65, 74, 75, 55, 77, 95, 71, 52, 77, 62, 47, 77, 90, 72, 113, 72, 66, 61, 53, 89, 73, 56, 60, 76, 81, 73, 80, 79, 60, 48, 57, 71, 63, 75, 71, 61, 64, 55, 64, 54, 69, 77, 57, 82, 52, 105, 61, 69, 54, 55, 55, 57, 43, 51, 61, 75, 75, 68, 65, 56, 58, 45, 43, 73, 50, 57, 52, 53, 63, 93, 62, 60, 74, 56, 63, 63, 80, 79, 78, 55, 54, 66, 79, 102, 45, 58, 78, 67, 75, 63, 70, 82, 62, 71, 70, 63, 57, 64, 75, 69, 54, 64, 49, 76, 55, 62, 65, 63, 55, 59, 90, 63, 59, 80, 69, 88, 49, 55, 65, 60, 71, 67, 69, 82, 69, 61, 68, 60, 91, 56, 65, 55, 69, 43, 61, 89, 66, 52, 59, 56, 81, 43, 57, 119, 50, 60, 94, 57, 57, 87, 68, 85, 60, 105, 67, 70, 59, 56, 72, 74, 98, 37, 61, 61, 64, 62, 78, 81, 74, 63, 58, 76, 44, 41, 67, 62, 71, 39, 67, 36, 71, 45, 68, 71, 52, 79, 63, 61, 56, 61, 70, 49, 55, 52, 55, 58, 75, 98, 74, 79, 80, 71, 79, 51, 64, 64, 51, 61, 70, 82, 66, 54, 78, 95, 60, 68, 74, 64, 68, 63, 76, 92, 62, 67, 62, 59, 72, 76, 48, 69, 50, 93, 74, 91, 59, 84, 65, 61, 75, 54, 69, 55, 83, 67, 70, 95, 56, 71, 57, 60, 67, 71, 77, 70, 44, 75, 93, 63, 53, 67, 49, 48, 58, 57, 63, 65, 62, 115, 80, 80, 76, 57, 75, 58, 64, 63, 60, 42, 61, 51, 78, 73, 87, 56, 61, 83, 61, 60, 84, 76, 59, 62, 117, 69, 72, 104, 52, 66, 68, 72, 72, 67, 83, 68, 63, 56, 74, 56, 68, 68, 71, 55, 69, 51, 73, 61, 59, 65, 69, 51, 58, 72, 83, 53, 103, 85, 92, 50, 46, 95, 57, 54, 60, 49, 70, 73, 88, 74, 64, 60, 78, 62, 67, 88, 71, 66, 81, 52, 62, 59, 65, 65, 66, 66, 63, 77, 58, 66, 56, 37, 66, 74, 86, 68, 53, 113, 69, 85, 82, 47, 57, 52, 64, 61, 56, 63, 58, 89, 70, 77, 54, 73, 76, 70, 67, 50, 64, 80, 63, 66, 58, 46, 66, 72, 71, 65, 52, 67, 28, 42, 73, 62, 60, 65, 45, 75, 64, 77, 72, 55, 70, 57, 54, 60, 73, 61, 72, 86, 63, 55, 66, 49, 50, 42, 59, 48, 63, 67, 47, 47, 77, 63, 76, 72, 44, 89, 67, 81, 50, 63, 65, 55, 51, 48, 89, 39, 54, 57, 64, 41, 88, 38, 64, 63, 71, 77, 53, 76, 61, 121, 54, 53, 64, 66, 80, 71, 87, 81, 86, 81, 76, 65, 54, 63, 47, 104, 51, 55, 77, 69, 66, 57, 75, 59, 65, 52, 82, 57, 88, 62, 73, 54, 56, 53, 76, 59, 59, 83, 73, 74, 75, 84, 77, 45, 53, 42, 66, 51, 69, 99, 72, 61, 75, 53, 70, 70, 94, 64, 68, 61, 59, 73, 57, 60, 63, 59, 59, 55, 56, 75, 58, 63, 61, 66, 60, 49, 80, 100, 60, 70, 85, 61, 76, 66, 48, 83, 54, 74, 66, 64, 87, 68, 49, 51, 63, 44, 72, 56, 69, 76, 67, 47, 60, 73, 64, 58, 54, 60, 59, 51, 62, 57, 68, 59, 48, 82, 57, 47, 56, 50, 58, 69, 61, 64, 75, 69, 79, 57, 66, 54, 57, 66, 43, 59, 53, 98, 50, 78, 91, 52, 42, 78, 57, 87, 63, 83, 67, 70, 67, 69, 47, 79, 55, 63, 54, 91, 63, 110, 61, 86, 62, 70, 35, 69, 60, 70, 50, 80, 67, 61, 68, 105, 75, 73, 118, 64, 56, 76, 74, 65, 45, 73, 68, 65, 91, 70, 52, 62, 49, 57, 55, 46, 67, 54, 66, 50, 38, 69, 69, 46, 66, 77, 73, 66, 74, 69, 54, 48, 62, 62, 74, 64, 81, 87, 60, 43, 53, 56, 51, 47, 72, 72, 88, 78, 83, 87, 72, 66, 75, 69, 81, 70, 65, 57, 64, 64, 47, 53, 67, 60, 54, 78, 64, 58, 34, 69, 65, 60, 57, 65, 60, 54, 97, 61, 64, 75, 46, 64, 62, 50, 101, 73, 62, 62, 65, 74, 59, 59, 79, 72, 70, 68, 99, 55, 48, 74, 57, 68, 56, 52, 70, 79, 43, 75, 113, 57, 49, 62, 63, 57, 71, 59, 73, 69, 48, 59, 40, 65, 72, 79, 59, 62, 65, 61, 62, 70, 48, 75, 50, 62, 85, 60, 63, 82, 77, 50, 60, 70, 43, 70, 45, 75, 91, 48, 71, 67, 75, 70, 71, 61, 46, 61, 51, 65, 57, 67, 64, 88, 104, 52, 51, 55, 68, 83, 59, 60, 58, 53, 58, 81, 52, 67, 68, 59, 100, 66, 79, 63, 46, 62, 43, 51, 74, 54, 75, 54, 56, 49, 82, 77, 59, 46, 76, 72, 66, 79, 60, 48, 57, 72, 59, 75, 85, 62, 66, 57, 82, 77, 72, 75, 71, 51, 61, 65, 78, 61, 64, 57, 69, 57, 74, 58, 101, 40, 66, 48, 48, 76, 48, 69, 41, 87, 65, 72, 57, 68, 85, 63, 80, 58, 49, 52, 62, 66, 84, 70, 67, 53, 58, 49, 60, 59, 47, 65, 56, 60, 85, 72, 52, 89, 83, 112, 72, 74, 65, 64, 56, 70, 41, 66, 69, 55, 63, 59, 60, 68, 76, 64, 56, 80, 63, 52, 108, 75, 80, 47, 72, 67, 68, 58, 82, 68, 66, 64, 58, 77, 60, 65, 72, 95, 65, 55, 70, 50, 65, 69, 79, 57, 50, 53, 73, 70, 69, 62, 54, 67, 66, 63, 62, 73, 57, 97, 90, 71, 68, 61, 54, 74, 61, 57, 68, 60, 57, 72, 66, 56, 117, 59, 63, 81, 48, 65, 125, 90, 84, 60, 55, 55, 78, 71, 70, 59, 75, 72, 72, 72, 73, 39, 63, 80, 81, 53, 65, 57, 78, 54, 69, 124, 59, 79, 64, 101, 75, 61, 70, 67, 77, 83, 54, 41, 60, 60, 65, 72, 59, 68, 81, 62, 51, 79, 77, 67, 73, 59, 71, 66, 54, 68, 97, 59, 78, 70, 37, 61, 83, 65, 63, 69, 78, 66, 54, 64, 57, 70, 58, 60, 48, 54, 103, 70, 61, 69, 78, 79, 53, 68, 57, 59, 59, 67, 64, 65, 71, 78, 66, 76, 54, 52, 61, 54, 51, 67, 62, 48, 73, 57, 33, 75, 82, 53, 53, 65, 63, 54, 53, 79, 55, 67, 64, 61, 93, 52, 65, 92, 53, 82, 82, 83, 48, 67, 50, 70, 69, 61, 82, 42, 65, 55, 67, 65, 76, 65, 66, 46, 72, 84, 54, 76, 64, 74, 81, 79, 66, 72, 67, 59, 75, 72, 73, 60, 78, 57, 58, 45, 64, 50, 61, 70, 70, 43, 68, 70, 85, 84, 63, 73, 66, 86, 87, 60, 58, 54, 52, 57, 43, 63, 86, 127, 54, 56, 41, 92, 104, 67, 63, 93, 47, 77, 84, 84, 55, 52, 51, 78, 48, 66, 61, 65, 52, 81, 65, 56, 72, 59, 51, 76, 66, 62, 63, 93, 82, 78, 51, 53, 69, 56, 84, 58, 59, 59, 71, 68, 56, 58, 62, 46, 62, 70, 50, 52, 47, 43, 44, 72, 67, 53, 58, 89, 70, 69, 60, 61, 62, 83, 44, 76, 53, 59, 54, 54, 50, 60, 40, 65, 57, 78, 61, 61, 61, 61, 59, 69, 52, 56, 63, 68, 48, 60, 61, 54, 48, 63, 57, 52, 75, 53, 71, 65, 117, 87, 75, 48, 68, 57, 65, 55, 64, 68, 56, 82, 58, 50, 54, 55, 83, 92, 78, 61, 66, 62, 90, 61, 46, 92, 66, 83, 67, 46, 66, 44, 86, 79, 65, 75, 67, 74, 94, 67, 57, 63, 65, 49, 48, 55, 59, 58, 100, 54, 80, 44, 63, 84, 74, 48, 52, 77, 61, 74, 75, 46, 65, 72, 68, 60, 55, 58, 80, 64, 56, 72, 89, 76, 69, 58, 92, 91, 49, 61, 57, 61, 59, 73, 75, 56, 79, 58, 56, 77, 76, 57, 56, 59, 82, 76, 66, 79, 49, 75, 54, 73, 62, 49, 60, 64, 65, 78, 38, 68, 76, 69, 62, 60, 93, 61, 53, 65, 49, 61, 70, 51, 46, 65, 55, 60, 54, 80, 62, 52, 62, 80, 57, 57, 54, 94, 60, 66, 63, 71, 94, 77, 41, 60, 76, 60, 59, 71, 86, 59, 61, 50, 73, 71, 46, 57, 61, 65, 58, 84, 53, 79, 65, 60, 77, 74, 70, 60, 56, 68, 69, 41, 89, 80, 53, 66, 94, 60, 63, 70, 70, 76, 51, 53, 69, 62, 68, 59, 60, 70, 67, 52, 62, 83, 46, 67, 73, 75, 60, 58, 49, 40, 52, 49, 56, 72, 60, 57, 78, 61, 69, 61, 94, 50, 76, 70, 57, 54, 76, 58, 66, 69, 46, 68, 69, 73, 74, 55, 49, 61, 81, 109, 55, 88, 60, 68, 73, 51, 61, 54, 85, 64, 58, 76, 60, 55, 57, 76, 55, 62, 74, 67, 63, 49, 55, 77, 57, 92, 67, 55, 73, 59, 51, 58, 58, 106, 63, 70, 66, 61, 45, 72, 67, 59, 63, 71, 58, 69, 66, 50, 86, 80, 53, 49, 69, 58, 75, 63, 62, 69, 43, 57, 62, 79, 65, 63, 65, 85, 69, 69, 72, 93, 44, 66, 67, 81, 64, 62, 57, 61, 58, 50, 77, 66, 58, 78, 83, 76, 56, 65, 59, 69, 49, 71, 79, 64, 79, 47, 75, 69, 70, 94, 66, 63, 48, 70, 55, 59, 60, 39, 73, 61, 70, 70, 65, 70, 54, 53, 82, 89, 56, 75, 64, 46, 95, 53, 70, 56, 56, 70, 66, 57, 65, 74, 60, 65, 79, 44, 67, 76, 37, 74, 83, 63, 58, 53, 66, 94, 62, 57, 64, 43, 64, 71, 57, 74, 62, 97, 67, 86, 84, 71, 88, 58, 52, 49, 76, 67, 54, 48, 70, 49, 49, 111, 57, 68, 75, 52, 60, 85, 88, 36, 76, 55, 67, 76, 48, 74, 49, 89, 58, 31, 95, 54, 45, 69, 58, 63, 53, 60, 56, 59, 81, 65, 60, 66, 68, 60, 110, 80, 79, 74, 59, 72, 95, 41, 70, 69, 67, 70, 54, 47, 61, 60, 59, 70, 63, 75, 80, 65, 63, 71, 77, 56, 56, 72, 79, 66, 76, 58, 64, 62, 64, 59, 53, 74, 65, 49, 125, 82, 54, 56, 62, 54, 47, 83, 87, 69, 92, 68, 64, 50, 55, 55, 65, 63, 142, 62, 51, 50, 61, 51, 75, 57, 57, 71, 84, 46, 77, 64, 72, 60, 75, 65, 57, 62, 69, 53, 65, 73, 64, 87, 58, 74, 72, 56, 65, 62, 50, 77, 58, 50, 62, 66, 67, 57, 74, 81, 104, 69, 53, 69, 58, 73, 43, 60, 73, 79, 69, 66, 83, 62, 40, 75, 77, 63, 68, 56, 58, 89, 40, 54, 78, 66, 55, 56, 71, 67, 66, 66, 89, 56, 61, 72, 72, 67, 63, 68, 49, 63, 54, 60, 64, 68, 72, 69, 63, 51, 71, 54, 67, 59, 63, 55, 57, 70, 78, 65, 56, 51, 66, 72, 53, 76, 59, 92, 56, 47, 68, 60, 63, 58, 52, 61, 89, 47, 58, 67, 104, 67, 81, 62, 58, 72, 68, 64, 28, 66, 56, 52, 52, 57, 63, 62, 80, 61, 30, 59, 61, 65, 50, 67, 72, 81, 45, 64, 71, 82, 74, 58, 65, 70, 91, 60, 72, 80, 71, 29, 81, 70, 49, 68, 74, 83, 71, 66, 57, 69, 72, 71, 57, 66, 70, 70, 59, 73, 72, 58, 76, 66, 76, 48, 54, 89, 65, 67, 88, 62, 56, 54, 69, 70, 72, 61, 59, 124, 63, 58, 79, 63, 49, 52, 89, 61, 57, 64, 47, 71, 67, 65, 55, 72, 76, 57, 58, 73, 74, 118, 62, 79, 63, 58, 72, 68, 51, 75, 50, 72, 68, 62, 61, 71, 131, 63, 56, 56, 60, 50, 117, 79, 47, 59, 68, 72, 61, 65, 69, 61, 64, 60, 75, 61, 77, 61, 47, 82, 101, 83, 65, 53, 65, 48, 68, 147, 61, 66, 68, 75, 73, 74, 71, 59, 83, 69, 67, 67, 59, 50, 50, 56, 63, 65, 97, 53, 56, 67, 57, 59, 75, 65, 60, 55, 67, 60, 88, 52, 58, 73, 55, 47, 63, 58, 59, 64, 43, 56, 70, 67, 52, 73, 59, 65, 59, 74, 68, 39, 61, 65, 62, 87, 51, 61, 70, 59, 52, 44, 54, 66, 78, 61, 60, 72, 45, 68, 41, 49, 61, 69, 61, 62, 62, 60, 50, 101, 63, 52, 66, 47, 67, 69, 68, 67, 75, 61, 59, 75, 84, 56, 64, 69, 63, 76, 87, 72, 60, 59, 85, 71, 69, 55, 73, 68, 64, 59, 70, 52, 57, 60, 60, 52, 66, 73, 75, 65, 52, 44, 66, 73, 61, 69, 85, 61, 59, 57, 59, 59, 60, 61, 66, 68, 64, 57, 76, 77, 54, 51, 77, 48, 70, 75, 63, 93, 56, 63, 60, 56, 74, 61, 73, 66, 75, 43, 110, 80, 65, 57, 68, 83, 94, 73, 88, 69, 55, 60, 77, 66, 80, 61, 73, 88, 56, 56, 60, 50, 63, 72, 133, 62, 89, 56, 59, 80, 56, 76, 73, 46, 100, 57, 62, 72, 68, 64, 49, 56, 66, 81, 86, 79, 78, 63, 62, 86, 68, 60, 57, 65, 55, 56, 62, 56, 66, 61, 59, 75, 52, 59, 40, 64, 64, 79, 61, 80, 54, 48, 63, 57, 59, 54, 75, 71, 93, 63, 55, 91, 45, 57, 78, 70, 64, 71, 71, 63, 65, 59, 55, 65, 73, 58, 64, 87, 65, 63, 48, 53, 110, 70, 63, 56, 61, 73, 65, 56, 80, 67, 64, 77, 52, 60, 72, 80, 65, 74, 53, 71, 47, 65, 85, 72, 88, 56, 67, 72, 58, 59, 70, 57, 79, 53, 45, 81, 75, 60, 65, 84, 68, 64, 59, 52, 79, 62, 60, 49, 134, 61, 74, 43, 85, 50, 60, 62, 61, 62, 78, 45, 66, 37, 73, 78, 53, 71, 62, 57, 68, 58, 83, 69, 65, 51, 59, 75, 74, 87, 49, 58, 73, 63, 52, 71, 79, 70, 76, 65, 61, 71, 102, 76, 61, 50, 67, 53, 76, 52, 61, 46, 54, 75, 63, 59, 58, 62, 53, 81, 65, 81, 79, 33, 57, 67, 105, 53, 68, 60, 64, 60, 52, 53, 74, 70, 72, 74, 50, 63, 77, 73, 61, 61, 56, 80, 62, 103, 67, 74, 65, 52, 92, 71, 61, 85, 73, 55, 65, 70, 72, 56, 73, 72, 57, 76, 73, 58, 55, 64, 53, 68, 64, 51, 60, 63, 47, 74, 77, 53, 65, 59, 60, 70, 66, 82, 53, 54, 76, 70, 59, 74, 61, 53, 67, 70, 82, 42, 73, 76, 73, 64, 84, 69, 102, 61, 58, 56, 67, 50, 47, 63, 60, 50, 49, 86, 66, 66, 66, 79, 69, 62, 95, 66, 80, 41, 67, 76, 65, 64, 63, 57, 54, 81, 60, 62, 65, 84, 55, 52, 80, 74, 48, 89, 79, 93, 70, 65, 58, 64, 57, 66, 61, 67, 65, 60, 73, 76, 61, 77, 67, 52, 53, 67, 63, 73, 55, 72, 61, 88, 87, 64, 81, 59, 79, 57, 59, 81, 59, 62, 64, 56, 60, 56, 74, 57, 73, 50, 60, 76, 65, 57, 63, 83, 75, 57, 56, 69, 59, 47, 64, 64, 62, 74, 56, 57, 78, 75, 72, 58, 81, 58, 60, 76, 46, 54, 67, 66, 70, 55, 61, 74, 62, 74, 66, 58, 68, 59, 59, 57, 61, 73, 58, 51, 64, 60, 34, 55, 60, 62, 78, 73, 70, 84, 66, 59, 74, 58, 66, 55, 56, 69, 58, 61, 56, 61, 58, 70, 69, 56, 100, 54, 61, 60, 65, 52, 90, 61, 67, 66, 62, 69, 64, 56, 65, 66, 75, 58, 67, 73, 68, 61, 62, 63, 95, 69, 65, 72, 52, 75, 65, 76, 51, 61, 72, 69, 79, 76, 58, 41, 59, 84, 60, 76, 55, 67, 67, 59, 70, 72, 49, 64, 83, 58, 51, 116, 68, 65, 77, 51, 55, 73, 44, 70, 71, 67, 65, 65, 64, 51, 57, 100, 71, 35, 82, 56, 70, 60, 80, 56, 58, 64, 55, 90, 57, 74, 65, 54, 64, 58, 128, 75, 78, 47, 76, 76, 52, 64, 65, 74, 62, 64, 70, 70, 66, 61, 58, 60, 59, 56, 76, 62, 63, 59, 66, 77, 45, 50, 61, 68, 76, 65, 44, 54, 44, 64, 74, 54, 45, 75, 116, 64, 57, 51, 47, 68, 58, 87, 53, 51, 87, 51, 58, 63, 76, 57, 67, 70, 139, 54, 61, 44, 73, 57, 91, 68, 94, 63, 76, 60, 102, 55, 88, 69, 68, 59, 69, 62, 72, 78, 53, 60, 73, 87, 46, 77, 64, 64, 54, 72, 65, 75, 56, 66, 59, 64, 58, 72, 94, 62, 116, 77, 61, 65, 66, 82, 62, 74, 57, 71, 58, 64, 82, 62, 53, 66, 47, 59, 54, 51, 61, 100, 52, 63, 79, 60, 73, 72, 62, 61, 57, 76, 92, 67, 66, 71, 77, 52, 80, 54, 60, 71, 84, 75, 59, 71, 98, 77, 53, 50, 54, 72, 64, 61, 78, 64, 74, 76, 63, 74, 56, 59, 63, 76, 63, 54, 61, 76, 57, 60, 57, 51, 67, 58, 79, 55, 73, 56, 64, 57, 95, 59, 74, 59, 61, 65, 68, 77, 60, 90, 55, 53, 67, 70, 49, 67, 55, 78, 59, 56, 57, 67, 73, 52, 70, 52, 55, 53, 83, 76, 61, 87, 72, 63, 76, 47, 84, 83, 43, 121, 71, 71, 49, 78, 54, 54, 72, 64, 69, 53, 57, 52, 86, 55, 68, 56, 58, 68, 88, 57, 101, 76, 47, 61, 66, 73, 61, 57, 58, 68, 69, 49, 60, 81, 72, 65, 73, 111, 52, 60, 91, 59, 70, 75, 77, 44, 60, 56, 61, 66, 60, 46, 71, 65, 76, 69, 63, 75, 65, 89, 72, 79, 53, 78, 61, 70, 64, 75, 78, 56, 56, 58, 60, 65, 86, 47, 44, 71, 64, 57, 115, 91, 86, 54, 46, 55, 70, 68, 66, 76, 57, 73, 79, 80, 60, 81, 66, 66, 82, 35, 80, 51, 85, 57, 44, 125, 68, 47, 74, 84, 77, 55, 64, 83, 73, 73, 64, 75, 61, 51, 61, 71, 68, 49, 68, 58, 52, 58, 58, 74, 61, 50, 52, 66, 66, 81, 84, 54, 59, 78, 59, 63, 70, 72, 54, 61, 49, 76, 59, 70, 56, 59, 82, 54, 89, 67, 70, 74, 72, 73, 58, 84, 68, 59, 63, 71, 49, 80, 82, 66, 75, 76, 71, 72, 66, 71, 84, 59, 53, 65, 56, 61, 77, 59, 28, 76, 56, 66, 57, 65, 52, 55, 63, 66, 66, 54, 59, 65, 95, 59, 48, 94, 82, 56, 78, 75, 55, 54, 55, 58, 54, 40, 90, 64, 57, 92, 53, 63, 61, 70, 56, 69, 66, 45, 54, 51, 71, 63, 68, 74, 60, 59, 81, 55, 45, 49, 60, 61, 79, 69, 58, 48, 74, 63, 65, 63, 61, 56, 73, 63, 60, 65, 64, 76, 70, 60, 61, 49, 55, 68, 75, 71, 58, 74, 96, 139, 65, 56, 70, 67, 69, 73, 34, 96, 65, 75, 80, 63, 43, 73, 70, 69, 89, 53, 61, 61, 55, 87, 68, 68, 59, 67, 62, 78, 65, 71, 59, 83, 86, 46, 96, 56, 74, 60, 65, 75, 64, 45, 47, 80, 65, 56, 60, 78, 63, 57, 56, 67, 56, 86, 64, 56, 55, 49, 68, 93, 71, 68, 58, 67, 69, 73, 73, 72, 85, 72, 57, 50, 87, 66, 79, 63, 76, 67, 72, 63, 64, 59, 57, 64, 65, 57, 64, 58, 69, 93, 47, 67, 63, 59, 53, 83, 71, 74, 54, 78, 98, 74, 59, 66, 79, 65, 55, 77, 58, 87, 68, 77, 76, 51, 60, 82, 72, 55, 77, 57, 70, 82, 62, 58, 47, 79, 57, 79, 61, 52, 75, 58, 93, 48, 68, 89, 67, 61, 98, 76, 74, 70, 63, 46, 81, 55, 57, 87, 105, 57, 67, 40, 64, 63, 72, 65, 56, 69, 64, 47, 64, 49, 60, 58, 72, 62, 76, 66, 71, 62, 43, 61, 77, 64, 71, 65, 61, 86, 65, 63, 59, 61, 62, 61, 65, 61, 69, 72, 65, 70, 74, 65, 71, 48, 67, 65, 71, 59, 51, 47, 58, 88, 48, 76, 62, 57, 58, 48, 68, 61, 70, 88, 62, 55, 107, 60, 71, 92, 63, 70, 54, 49, 67, 59, 76, 65, 76, 90, 59, 100, 60, 64, 64, 56, 82, 112, 70, 62, 45, 73, 101, 48, 76, 57, 88, 52, 62, 53, 93, 75, 71, 75, 75, 65, 72, 74, 64, 86, 64, 82, 89, 55, 66, 65, 86, 34, 42, 58, 75, 65, 52, 55, 54, 52, 73, 65, 44, 56, 52, 82, 50, 55, 58, 81, 68, 72, 56, 107, 59, 61, 50, 69, 59, 86, 57, 56, 75, 52, 54, 62, 70, 73, 61, 90, 50, 64, 51, 73, 68, 79, 83, 78, 65, 56, 69, 74, 66, 49, 44, 59, 69, 90, 57, 78, 73, 63, 56, 56, 67, 74, 61, 84, 65, 71, 64, 65, 49, 61, 57, 62, 69, 64, 75, 50, 53, 53, 72, 52, 71, 65, 81, 48, 74, 74, 69, 52, 60, 90, 67, 55, 70, 57, 58, 72, 65, 96, 68, 46, 68, 50, 62, 66, 79, 76, 60, 58, 58, 75, 77, 78, 77, 72, 55, 61, 64, 55, 52, 57, 56, 69, 69, 66, 49, 107, 48, 56, 54, 60, 45, 91, 83, 78, 60, 59, 68, 72, 50, 40, 57, 79, 70, 67, 78, 58, 78, 57, 85, 59, 59, 80, 64, 81, 55, 69, 68, 68, 83, 63, 71, 71, 79, 84, 46, 67, 57, 65, 67, 61, 51, 68, 65, 48, 68, 61, 70, 67, 76, 34, 75, 72, 54, 57, 52, 63, 92, 62, 65, 53, 62, 59, 77, 50, 68, 73, 89, 51, 67, 73, 59, 68, 54, 53, 74, 91, 56, 59, 63, 65, 62, 50, 54, 53, 53, 55, 58, 64, 57, 98, 57, 81, 87, 55, 60, 72, 60, 59, 81, 51, 68, 67, 108, 56, 50, 98, 54, 80, 68, 59, 68, 71, 72, 74, 81, 78, 66, 56, 55, 66, 49, 88, 79, 61, 71, 50, 47, 51, 59, 61, 60, 48, 54, 72, 65, 54, 66, 82, 60, 74, 102, 58, 82, 61, 77, 85, 67, 62, 74, 63, 73, 48, 67, 69, 65, 61, 55, 52, 55, 50, 55, 66, 47, 65, 64, 56, 62, 56, 57, 47, 63, 66, 40, 54, 76, 62, 69, 51, 46, 43, 111, 60, 53, 56, 69, 56, 50, 70, 51, 61, 87, 59, 68, 59, 71, 82, 63, 45, 132, 69, 55, 75, 59, 61, 100, 55, 76, 65, 55, 69, 96, 60, 80, 58, 57, 69, 62, 44, 88, 49, 86, 61, 93, 77, 57, 85, 53, 63, 83, 64, 50, 46, 65, 55, 52, 65, 69, 75, 78, 67, 89, 63, 77, 74, 38, 90, 82, 61, 63, 65, 75, 61, 48, 62, 45, 87, 69, 64, 47, 65, 72, 103, 60, 59, 70, 50, 79, 62, 67, 59, 63, 56, 109, 71, 75, 65, 78, 48, 59, 84, 76, 58, 62, 78, 91, 57, 85, 87, 60, 46, 60, 62, 65, 57, 60, 61, 62, 50, 60, 69, 53, 46, 62, 56, 63, 70, 73, 63, 58, 83, 67, 52, 77, 69, 83, 68, 69, 58, 73, 77, 82, 77, 60, 69, 46, 83, 65, 36, 46, 73, 64, 49, 63, 42, 61, 63, 55, 48, 69, 66, 73, 73, 59, 52, 47, 43, 52, 61, 64, 59, 56, 56, 73, 55, 90, 61, 57, 58, 52, 136, 59, 73, 65, 62, 66, 54, 48, 83, 52, 54, 68, 52, 65, 49, 56, 63, 63, 71, 73, 74, 113, 77, 62, 57, 71, 68, 76, 67, 67, 55, 81, 74, 56, 64, 66, 58, 70, 92, 56, 67, 73, 76, 59, 51, 66, 74, 60, 45, 66, 76, 72, 64, 70, 77, 60, 54, 55, 64, 67, 80, 100, 73, 77, 57, 78, 62, 71, 79, 93, 64, 51, 54, 56, 59, 94, 54, 43, 67, 40, 69, 123, 46, 72, 60, 77, 74, 79, 48, 54, 63, 65, 65, 96, 80, 72, 59, 67, 93, 90, 63, 61, 107, 90, 58, 61, 135, 75, 79, 56, 86, 56, 60, 64, 61, 78, 73, 53, 65, 71, 54, 66, 50, 64, 45, 69, 65, 27, 59, 76, 42, 57, 58, 51, 43, 82, 64, 89, 60, 52, 67, 58, 97, 68, 68, 60, 66, 70, 71, 84, 56, 52, 55, 68, 70, 55, 67, 94, 72, 65, 72, 70, 82, 51, 70, 65, 69, 83, 63, 68, 65, 80, 98, 59, 58, 69, 67, 69, 66, 52, 57, 55, 52, 110, 55, 32, 60, 52, 57, 64, 91, 66, 62, 68, 80, 69, 58, 60, 54, 87, 63, 65, 75, 69, 62, 82, 61, 53, 63, 64, 54, 55, 61, 90, 55, 52, 74, 36, 74, 55, 64, 52, 61, 81, 55, 67, 66, 86, 86, 59, 49, 67, 63, 55, 67, 57, 61, 68, 56, 59, 65, 77, 82, 62, 65, 75, 73, 54, 54, 70, 51, 54, 63, 66, 81, 68, 86, 51, 72, 72, 69, 50, 76, 55, 50, 93, 123, 57, 58, 58, 55, 66, 65, 65, 99, 70, 97, 81, 70, 45, 60, 82, 85, 90, 62, 70, 58, 56, 86, 70, 73, 69, 66, 57, 95, 66, 84, 84, 84, 80, 59, 60, 40, 62, 88, 60, 58, 74, 78, 66, 65, 54, 54, 66, 75, 58, 69, 44, 73, 64, 57, 65, 80, 46, 71, 53, 87, 62, 50, 57, 58, 67, 68, 53, 73, 56, 50, 60, 46, 61, 69, 62, 77, 45, 72, 66, 72, 72, 69, 41, 75, 66, 49, 60, 61, 65, 58, 56, 56, 77, 59, 65, 55, 63, 80, 53, 64, 104, 82, 80, 64, 71, 38, 68, 70, 75, 94, 66, 52, 67, 66, 57, 96, 68, 71, 55, 62, 61, 58, 85, 64, 59, 37, 51, 61, 64, 68, 69, 71, 90, 56, 62, 63, 62, 76, 102, 77, 77, 55, 41, 52, 67, 54, 65, 73, 109, 56, 53, 24, 63, 87, 55, 51, 53, 74, 49, 55, 63, 118, 71, 81, 65, 67, 62, 58, 81, 63, 69, 66, 71, 55, 60, 75, 56, 99, 57, 73, 76, 82, 56, 62, 64, 52, 50, 72, 71, 55, 75, 44, 64, 69, 55, 63, 55, 54, 59, 92, 65, 84, 51, 54, 53, 52, 56, 50, 66, 70, 55, 78, 63, 54, 129, 51, 68, 57, 61, 55, 63, 78, 79, 68, 82, 75, 70, 98, 59, 58, 72, 77, 62, 57, 59, 104, 80, 70, 52, 73, 82, 59, 87, 53, 59, 55, 62, 61, 85, 80, 68, 83, 68, 61, 79, 66, 79, 67, 70, 67, 72, 54, 60, 53, 79, 51, 43, 57, 78, 60, 51, 63, 53, 61, 62, 41, 47, 62, 64, 67, 68, 56, 52, 49, 79, 71, 59, 59, 58, 58, 81, 74, 36, 84, 54, 75, 77, 51, 60, 65, 62, 96, 53, 57, 41, 78, 62, 66, 60, 80, 84, 73, 66, 63, 66, 91, 51, 64, 49, 51, 60, 96, 57, 90, 75, 61, 73, 57, 69, 48, 49, 132, 54, 62, 59, 81, 66, 66, 70, 47, 73, 70, 73, 65, 61, 67, 62, 68, 47, 59, 74, 55, 73, 48, 76, 65, 68, 79, 62, 56, 63, 66, 43, 60, 57, 110, 75, 74, 75, 84, 48, 57, 76, 56, 60, 72, 68, 69, 59, 80, 58, 76, 85, 60, 73, 51, 73, 66, 51, 55, 71, 61, 70, 86, 71, 57, 63, 52, 50, 92, 80, 80, 47, 78, 64, 67, 41, 46, 49, 71, 67, 56, 69, 62, 86, 72, 56, 78, 60, 55, 70, 70, 60, 65, 59, 73, 102, 71, 104, 60, 38, 80, 56, 66, 49, 49, 52, 58, 51, 51, 61, 72, 58, 53, 59, 45, 63, 64, 70, 68, 71, 48, 53, 58, 88, 61, 55, 64, 67, 63, 60, 53, 61, 81, 73, 48, 48, 69, 46, 44, 66, 40, 73, 85, 88, 73, 60, 76, 47, 54, 65, 64, 55, 52, 58, 53, 62, 77, 60, 97, 72, 61, 64, 49, 59, 68, 68, 58, 60, 53, 93, 63, 65, 97, 74, 56, 69, 75, 63, 92, 81, 50, 63, 70, 54, 71, 64, 58, 58, 76, 76, 31, 58, 69, 53, 57, 64, 76, 77, 68, 68, 53, 77, 69, 60, 78, 60, 60, 102, 89, 80, 63, 71, 62, 56, 67, 58, 56, 57, 76, 74, 70, 57, 69, 77, 62, 81, 53, 45, 57, 74, 64, 65, 52, 58, 51, 57, 61, 81, 56, 56, 60, 91, 63, 66, 62, 66, 57, 137, 68, 67, 78, 62, 52, 33, 49, 81, 66, 87, 69, 61, 61, 76, 49, 63, 46, 144, 66, 53, 76, 70, 58, 79, 76, 66, 64, 78, 68, 69, 63, 63, 63, 98, 70, 91, 76, 94, 77, 71, 81, 63, 69, 74, 50, 51, 76, 63, 53, 47, 58, 63, 62, 87, 52, 57, 70, 40, 51, 95, 61, 57, 99, 92, 61, 50, 61, 58, 78, 54, 38, 53, 63, 62, 59, 79, 54, 62, 66, 61, 74, 51, 69, 39, 67, 43, 58, 66, 51, 52, 57, 63, 74, 74, 74, 61, 57, 43, 54, 71, 56, 64, 78, 61, 63, 72, 73, 65, 58, 71, 80, 86, 45, 83, 47, 50, 65, 65, 65, 53, 62, 50, 67, 56, 57, 40, 85, 68, 77, 41, 65, 64, 55, 62, 55, 60, 72, 61, 66, 61, 59, 60, 71, 47, 61, 61, 69, 43, 76, 103, 69, 66, 62, 63, 61, 77, 69, 59, 43, 58, 70, 64, 62, 76, 72, 48, 52, 65, 66, 75, 78, 52, 69, 58, 54, 57, 100, 52, 104, 120, 62, 97, 58, 46, 63, 55, 63, 80, 96, 72, 72, 59, 54, 66, 66, 52, 90, 54, 56, 94, 90, 89, 71, 64, 63, 74, 56, 63, 85, 60, 67, 67, 77, 66, 62, 58, 61, 50, 45, 77, 51, 61, 56, 70, 58, 48, 48, 82, 96, 63, 77, 56, 69, 51, 75, 77, 53, 67, 76, 84, 59, 87, 72, 68, 76, 68, 52, 81, 76, 55, 80, 64, 64, 87, 60, 45, 54, 55, 74, 121, 64, 98, 62, 76, 64, 69, 66, 67, 59, 78, 99, 69, 68, 93, 73, 61, 83, 75, 64, 81, 61, 52, 67, 75, 89, 69, 65, 64, 41, 59, 68, 34, 88, 57, 58, 52, 85, 88, 63, 46, 55, 80, 67, 79, 66, 87, 58, 66, 75, 61, 54, 104, 52, 78, 69, 53, 47, 78, 54, 77, 61, 57, 58, 73, 65, 78, 74, 77, 67, 67, 62, 58, 51, 78, 58, 64, 61, 91, 56, 86, 63, 64, 84, 60, 70, 57, 88, 51, 52, 52, 84, 78, 57, 73, 58, 66, 56, 55, 63, 64, 58, 91, 65, 49, 68, 69, 70, 73, 53, 62, 59, 85, 61, 62, 71, 58, 59, 44, 62, 92, 74, 76, 56, 60, 67, 54, 87, 71, 59, 64, 60, 75, 53, 54, 44, 68, 75, 69, 64, 68, 38, 49, 74, 69, 68, 69, 62, 64, 59, 61, 53, 60, 79, 51, 35, 60, 113, 39, 74, 63, 65, 52, 53, 49, 48, 118, 64, 59, 53, 80, 76, 41, 59, 86, 71, 66, 86, 84, 66, 62, 45, 53, 62, 67, 26, 50, 91, 72, 32, 77, 54, 72, 94, 62, 75, 87, 58, 53, 47, 62, 67, 117, 58, 50, 59, 50, 63, 72, 81, 57, 67, 61, 42, 49, 74, 60, 74, 63, 49, 56, 62, 69, 61, 52, 54, 81, 68, 44, 40, 65, 51, 53, 55, 67, 67, 61, 42, 55, 66, 103, 87, 57, 63, 61, 87, 59, 49, 58, 75, 54, 110, 43, 54, 61, 49, 43, 63, 68, 69, 74, 70, 64, 78, 50, 65, 65, 47, 64, 49, 67, 55, 53, 57, 61, 83, 71, 41, 58, 44, 51, 67, 66, 63, 51, 68, 121, 83, 41, 65, 55, 58, 76, 81, 63, 57, 76, 62, 87, 74, 84, 65, 56, 54, 51, 81, 60, 59, 64, 88, 45, 75, 91, 65, 87, 68, 50, 73, 66, 58, 50, 58, 56, 59, 67, 74, 90, 63, 66, 104, 58, 78, 46, 72, 105, 57, 91, 49, 74, 64, 65, 65, 59, 106, 61, 60, 72, 47, 58, 63, 56, 65, 57, 61, 67, 63, 114, 62, 59, 52, 80, 52, 96, 87, 73, 50, 39, 84, 38, 59, 69, 65, 80, 55, 66, 75, 69, 51, 63, 58, 51, 49, 95, 70, 56, 57, 56, 90, 64, 43, 77, 58, 86, 66, 60, 77, 92, 64, 67, 55, 82, 58, 57, 66, 60, 59, 61, 61, 53, 50, 57, 69, 49, 42, 62, 62, 68, 41, 65, 61, 62, 86, 57, 65, 52, 75, 63, 67, 63, 87, 69, 74, 45, 69, 73, 70, 72, 49, 57, 52, 92, 76, 65, 67, 58, 66, 44, 49, 60, 69, 53, 105, 79, 55, 67, 66, 61, 56, 55, 49, 66, 71, 55, 74, 59, 62, 67, 71, 51, 65, 65, 49, 87, 61, 91, 62, 58, 102, 68, 81, 67, 69, 63, 74, 61, 45, 55, 53, 72, 61, 66, 87, 53, 61, 72, 60, 71, 69, 115, 77, 67, 100, 61, 70, 105, 69, 69, 98, 78, 55, 61, 85, 58, 87, 80, 49, 53, 39, 59, 92, 58, 71, 72, 68, 86, 63, 75, 74, 65, 93, 60, 62, 56, 62, 78, 62, 61, 79, 72, 60, 56, 52, 45, 74, 59, 73, 79, 58, 75, 79, 63, 114, 72, 59, 63, 70, 71, 64, 62, 71, 49, 66, 64, 66, 56, 51, 65, 67, 56, 70, 65, 55, 65, 92, 63, 71, 62, 43, 62, 68, 43, 71, 51, 84, 55, 78, 51, 110, 68, 74, 64, 60, 73, 59, 57, 89, 67, 49, 35, 64, 46, 59, 45, 83, 56, 43, 84, 70, 67, 38, 48, 58, 52, 60, 59, 66, 51, 88, 68, 65, 63, 58, 70, 54, 61, 80, 61, 38, 59, 59, 62, 61, 47, 77, 95, 62, 87, 78, 89, 64, 83, 56, 65, 66, 61, 109, 73, 72, 60, 53, 74, 52, 54, 44, 61, 100, 68, 74, 62, 85, 107, 64, 59, 89, 75, 78, 62, 69, 85, 60, 78, 62, 71, 109, 60, 69, 84, 57, 70, 77, 70, 75, 54, 77, 88, 67, 86, 44, 46, 77, 50, 67, 56, 46, 32, 74, 46, 66, 62, 72, 68, 88, 54, 94, 57, 53, 70, 92, 78, 84, 71, 81, 67, 45, 74, 76, 66, 59, 84, 60, 64, 71, 58, 51, 65, 68, 66, 101, 72, 67, 57, 84, 54, 56, 81, 44, 49, 54, 66, 88, 77, 62, 71, 43, 43, 65, 95, 54, 65, 61, 79, 63, 83, 42, 91, 50, 84, 67, 72, 61, 44, 63, 48, 60, 78, 69, 72, 91, 40, 63, 41, 77, 60, 56, 57, 95, 70, 63, 72, 66, 60, 72, 60, 46, 77, 70, 72, 57, 58, 65, 66, 76, 63, 68, 52, 62, 64, 50, 71, 69, 87, 66, 83, 65, 59, 80, 41, 56, 60, 62, 74, 49, 53, 55, 57, 66, 59, 78, 60, 65, 90, 59, 74, 59, 86, 67, 70, 51, 85, 53, 81, 62, 56, 55, 61, 59, 52, 65, 57, 74, 89, 73, 72, 65, 54, 60, 59, 80, 72, 76, 62, 47, 82, 68, 67, 68, 48, 76, 84, 83, 63, 57, 44, 64, 65, 58, 41, 79, 64, 58, 56, 69, 62, 76, 56, 71, 82, 57, 66, 65, 63, 51, 60, 66, 48, 72, 53, 72, 57, 69, 68, 50, 65, 63, 83, 71, 60, 61, 66, 53, 66, 58, 42, 71, 57, 55, 49, 65, 63, 75, 60, 50, 53, 82, 63, 53, 88, 69, 61, 67, 61, 40, 55, 44, 92, 51, 67, 73, 50, 56, 53, 63, 74, 46, 68, 60, 64, 60, 55, 67, 50, 54, 51, 86, 123, 54, 76, 59, 40, 71, 58, 55, 84, 51, 79, 60, 79, 65, 58, 65, 75, 81, 52, 71, 124, 87, 67, 46, 91, 79, 85, 65, 67, 78, 74, 66, 56, 63, 54, 68, 62, 58, 49, 57, 80, 69, 46, 76, 53, 67, 83, 22, 54, 81, 66, 82, 48, 63, 65, 77, 72, 67, 57, 69, 108, 52, 65, 68, 77, 53, 57, 47, 46, 63, 62, 74, 59, 74, 57, 64, 58, 57, 41, 70, 58, 56, 96, 56, 53, 58, 68, 64, 67, 64, 49, 114, 99, 81, 70, 56, 83, 95, 69, 58, 82, 58, 75, 58, 61, 63, 70, 69, 65, 75, 59, 64, 52, 86, 69, 70, 62, 59, 106, 102, 52, 56, 83, 80, 64, 69, 114, 44, 64, 56, 45, 62, 55, 64, 84, 75, 67, 51, 46, 55, 89, 72, 72, 68, 85, 62, 60, 65, 57, 60, 66, 51, 64, 73, 84, 66, 79, 70, 63, 75, 45, 85, 63, 124, 70, 57, 46, 58, 58, 51, 75, 58, 90, 68, 56, 55, 54, 83, 69, 65, 47, 69, 61, 73, 77, 67, 64, 87, 99, 64, 67, 75, 69, 54, 67, 60, 35, 72, 57, 71, 84, 66, 63, 69, 67, 56, 58, 106, 55, 63, 60, 60, 70, 65, 70, 61, 77, 66, 76, 50, 69, 59, 48, 70, 66, 57, 52, 66, 63, 69, 57, 62, 76, 54, 61, 62, 64, 73, 72, 46, 57, 93, 52, 56, 61, 50, 78, 63, 74, 77, 64, 88, 52, 61, 67, 79, 53, 54, 58, 65, 69, 64, 72, 64, 59, 42, 52, 56, 64, 73, 55, 53, 77, 76, 47, 60, 56, 61, 52, 60, 62, 71, 96, 66, 46, 64, 70, 59, 67, 79, 52, 61, 67, 61, 67, 63, 47, 64, 71, 45, 62, 67, 64, 80, 76, 57, 48, 71, 53, 50, 95, 69, 58, 53, 47, 79, 64, 54, 56, 68, 91, 76, 61, 62, 65, 77, 68, 57, 59, 85, 58, 65, 74, 63, 70, 52, 54, 70, 51, 48, 51, 61, 53, 64, 57, 58, 62, 63, 75, 51, 64, 62, 45, 44, 86, 79, 63, 59, 73, 74, 79, 85, 68, 59, 86, 62, 98, 50, 50, 59, 70, 72, 81, 69, 56, 99, 71, 77, 68, 72, 61, 66, 60, 69, 76, 62, 69, 61, 64, 100, 64, 68, 68, 55, 106, 71, 57, 73, 68, 78, 68, 84, 68, 47, 57, 71, 78, 52, 55, 78, 57, 72, 53, 68, 110, 56, 55, 54, 90, 61, 74, 59, 67, 78, 66, 54, 57, 65, 70, 73, 52, 50, 82, 64, 54, 62, 111, 67, 55, 60, 70, 56, 76, 72, 82, 59, 65, 68, 54, 58, 99, 77, 93, 56, 65, 78, 56, 57, 74, 72, 91, 91, 54, 84, 56, 82, 58, 57, 60, 77, 87, 83, 85, 69, 56, 80, 66, 55, 76, 61, 64, 65, 82, 50, 77, 89, 71, 48, 60, 69, 44, 67, 81, 51, 72, 52, 65, 59, 66, 71, 52, 85, 69, 70, 60, 73, 71, 53, 72, 61, 61, 70, 68, 77, 54, 71, 64, 57, 55, 81, 66, 74, 64, 56, 58, 37, 60, 59, 94, 69, 61, 122, 69, 67, 53, 55, 69, 74, 59, 55, 54, 60, 82, 62, 84, 47, 68, 75, 49, 71, 50, 65, 61, 58, 78, 66, 57, 93, 60, 54, 55, 81, 59, 68, 49, 68, 64, 66, 57, 60, 70, 85, 61, 61, 66, 75, 69, 60, 121, 76, 61, 80, 65, 50, 70, 56, 59, 68, 68, 66, 50, 65, 74, 67, 61, 60, 71, 77, 67, 81, 54, 60, 81, 37, 79, 61, 83, 93, 45, 71, 75, 82, 58, 86, 84, 82, 69, 70, 73, 71, 38, 48, 57, 65, 72, 88, 37, 67, 72, 95, 62, 116, 75, 53, 83, 57, 64, 58, 62, 60, 58, 61, 57, 59, 64, 67, 66, 61, 56, 59, 66, 65, 56, 72, 66, 68, 63, 83, 50, 54, 52, 70, 56, 66, 65, 65, 84, 119, 72, 63, 61, 59, 80, 79, 54, 55, 63, 61, 82, 70, 48, 64, 64, 70, 53, 61, 66, 64, 67, 53, 65, 68, 68, 55, 53, 67, 58, 86, 86, 61, 76, 48, 64, 65, 60, 74, 62, 76, 51, 65, 56, 86, 61, 59, 68, 67, 85, 79, 77, 48, 51, 65, 68, 64, 79, 77, 73, 78, 82, 60, 68, 75, 53, 51, 74, 78, 91, 51, 43, 69, 83, 67, 70, 61, 76, 54, 61, 71, 88, 60, 63, 59, 66, 74, 60, 68, 90, 63, 55, 57, 60, 59, 59, 62, 41, 61, 73, 73, 63, 74, 74, 76, 69, 75, 56, 84, 54, 61, 57, 89, 68, 75, 50, 104, 66, 83, 65, 78, 93, 46, 59, 76, 59, 72, 77, 61, 57, 74, 94, 40, 57, 103, 75, 73, 59, 59, 60, 60, 57, 61, 76, 60, 69, 64, 59, 63, 61, 56, 72, 65, 58, 66, 71, 71, 45, 67, 92, 62, 70, 62, 74, 57, 77, 68, 80, 70, 68, 61, 66, 63, 64, 64, 59, 68, 45, 70, 69, 88, 72, 51, 74, 61, 69, 59, 88, 71, 81, 51, 72, 68, 64, 65, 72, 67, 59, 64, 68, 76, 78, 66, 81, 77, 57, 62, 30, 59, 79, 62, 63, 71, 63, 77, 50, 63, 60, 64, 57, 90, 63, 67, 66, 64, 68, 68, 69, 70, 54, 62, 86, 65, 60, 63, 64, 57, 87, 49, 61, 60, 73, 64, 65, 64, 59, 74, 62, 73, 91, 50, 72, 53, 96, 60, 50, 69, 56, 93, 70, 60, 52, 55, 67, 75, 62, 78, 45, 90, 68, 73, 76, 59, 67, 51, 64, 58, 58, 75, 76, 57, 61, 55, 60, 73, 56, 77, 75, 64, 83, 64, 62, 62, 59, 59, 76, 74, 79, 69, 64, 53, 59, 80, 70, 83, 69, 65, 74, 52, 96, 65, 65, 59, 59, 61, 94, 79, 63, 66, 98, 63, 64, 64, 61, 51, 55, 67, 67, 67, 86, 58, 84, 52, 63, 59, 67, 51, 67, 72, 72, 72, 59, 57, 68, 66, 70, 63, 58, 97, 58, 64, 86, 63, 61, 69, 66, 67, 68, 55, 64, 58, 66, 56, 56, 90, 56, 60, 54, 58, 89, 58, 53, 74, 62, 66, 82, 78, 89, 72, 53, 57, 48, 87, 57, 86, 63, 61, 51, 49, 53, 54, 73, 65, 42, 71, 58, 68, 91, 79, 57, 76, 60, 59, 61, 52, 61, 62, 71, 81, 49, 44, 57, 52, 71, 53, 90, 58, 106, 54, 69, 80, 70, 62, 54, 70, 91, 69, 41, 77, 72, 58, 72, 53, 72, 60, 60, 65, 70, 60, 110, 61, 47, 51, 75, 60, 64, 81, 66, 73, 86, 64, 85, 38, 62, 91, 82, 57, 69, 56, 93, 58, 53, 90, 67, 64, 70, 90, 68, 55, 59, 79, 56, 66, 63, 46, 58, 65, 80, 47, 93, 83, 43, 74, 92, 61, 56, 73, 74, 76, 84, 63, 71, 81, 106, 76, 63, 64, 49, 59, 60, 63, 72, 78, 64, 71, 60, 63, 73, 69, 68, 59, 68, 68, 65, 69, 64, 64, 60, 96, 51, 68, 60, 63, 59, 61, 75, 58, 59, 71, 54, 58, 43, 69, 64, 105, 55, 71, 57, 77, 94, 61, 48, 62, 55, 81, 79, 80, 43, 79, 52, 57, 84, 54, 52, 72, 61, 62, 59, 61, 79, 80, 69, 60, 55, 69, 69, 51, 58, 59, 67, 63, 66, 74, 94, 84, 62, 61, 61, 58, 71, 102, 64, 54, 55, 72, 63, 63, 53, 58, 59, 55, 61, 63, 50, 47, 68, 85, 55, 55, 54, 54, 69, 51, 61, 65, 67, 66, 65, 85, 64, 53, 66, 84, 74, 60, 63, 54, 50, 54, 64, 59, 94, 97, 61, 56, 76, 73, 67, 64, 60, 53, 61, 49, 55, 56, 54, 79, 54, 93, 61, 64, 70, 58, 50, 57, 97, 94, 51, 67, 69, 89, 61, 50, 89, 57, 77, 54, 64, 63, 73, 73, 68, 67, 61, 59, 63, 54, 68, 67, 58, 71, 68, 55, 66, 75, 57, 75, 50, 66, 65, 94, 64, 63, 78, 70, 62, 71, 62, 65, 60, 52, 86, 59, 61, 56, 55, 50, 55, 54, 73, 79, 67, 108, 68, 53, 74, 63, 54, 53, 62, 65, 101, 79, 54, 51, 67, 91, 48, 116, 91, 61, 77, 47, 78, 56, 52, 60, 66, 65, 70, 62, 78, 58, 71, 47, 50, 57, 50, 48, 60, 62, 85, 60, 72, 81, 87, 90, 68, 50, 66, 44, 50, 61, 48, 58, 79, 72, 79, 65, 65, 67, 123, 62, 66, 58, 69, 54, 79, 80, 47, 53, 95, 72, 50, 77, 73, 48, 57, 62, 89, 81, 71, 60, 68, 65, 57, 50, 60, 65, 72, 61, 66, 56, 66, 101, 59, 61, 58, 50, 61, 56, 75, 51, 52, 82, 49, 65, 64, 75, 68, 61, 55, 80, 113, 70, 61, 60, 80, 95, 56, 55, 59, 54, 51, 92, 52, 66, 58, 63, 68, 95, 64, 43, 79, 61, 74, 53, 95, 62, 73, 62, 57, 92, 64, 83, 54, 65, 39, 59, 83, 77, 56, 73, 73, 60, 81, 56, 56, 93, 95, 76, 60, 70, 55, 58, 68, 54, 59, 56, 62, 68, 79, 90, 70, 115, 68, 102, 67, 62, 61, 66, 97, 93, 61, 63, 70, 58, 52, 65, 58, 61, 64, 74, 46, 51, 62, 76, 63, 88, 57, 60, 67, 72, 65, 55, 60, 69, 45, 54, 60, 54, 54, 58, 68, 67, 104, 49, 58, 58, 82, 62, 56, 66, 70, 99, 59, 49, 80, 80, 60, 74, 72, 82, 88, 68, 77, 66, 63, 62, 52, 63, 109, 88, 51, 62, 55, 66, 57, 46, 65, 58, 85, 60, 65, 89, 76, 94, 65, 67, 74, 74, 58, 67, 66, 67, 46, 58, 58, 77, 64, 73, 50, 65, 64, 56, 78, 64, 71, 46, 62, 56, 66, 67, 56, 55, 56, 52, 55, 93, 74, 59, 69, 49, 76, 61, 73, 52, 46, 60, 61, 52, 55, 50, 62, 78, 79, 67, 54, 69, 71, 51, 64, 59, 65, 54, 70, 61, 84, 60, 70, 98, 62, 80, 78, 71, 62, 52, 69, 66, 69, 62, 53, 82, 62, 53, 110, 55, 34, 71, 75, 53, 44, 60, 79, 60, 75, 72, 111, 54, 64, 58, 62, 76, 54, 59, 69, 61, 65, 85, 73, 52, 53, 71, 55, 56, 62, 54, 67, 77, 63, 59, 65, 72, 55, 64, 71, 68, 87, 54, 58, 61, 82, 64, 48, 56, 95, 77, 65, 71, 74, 49, 67, 62, 59, 78, 77, 60, 60, 66, 70, 82, 75, 56, 61, 46, 81, 76, 66, 56, 56, 65, 61, 79, 49, 54, 84, 79, 67, 60, 67, 67, 59, 56, 108, 65, 61, 92, 46, 52, 71, 99, 43, 69, 55, 56, 48, 64, 67, 66, 50, 62, 44, 60, 63, 87, 76, 60, 50, 61, 79, 82, 58, 55, 61, 52, 61, 83, 54, 70, 61, 65, 89, 66, 48, 49, 61, 72, 74, 80, 67, 71, 59, 65, 57, 54, 67, 56, 67, 61, 71, 58, 53, 60, 78, 71, 65, 66, 87, 65, 63, 60, 57, 72, 57, 66, 68, 61, 98, 61, 56, 63, 68, 57, 60, 47, 63, 55, 57, 40, 69, 64, 99, 71, 60, 55, 59, 59, 74, 64, 122, 50, 43, 62, 47, 66, 93, 61, 95, 74, 51, 49, 121, 60, 102, 44, 53, 56, 50, 75, 48, 69, 61, 75, 63, 74, 68, 55, 66, 52, 68, 75, 71, 68, 52, 69, 58, 105, 66, 54, 61, 66, 81, 54, 62, 74, 89, 67, 71, 60, 65, 99, 63, 67, 103, 61, 54, 87, 83, 59, 70, 67, 54, 62, 51, 63, 49, 62, 72, 55, 59, 61, 72, 70, 61, 60, 60, 83, 80, 61, 58, 67, 78, 59, 62, 52, 45, 65, 62, 63, 109, 63, 67, 58, 68, 82, 55, 57, 64, 57, 60, 58, 53, 56, 48, 44, 62, 54, 74, 107, 75, 53, 61, 69, 52, 48, 41, 68, 67, 78, 59, 89, 41, 59, 41, 61, 60, 42, 100, 63, 68, 78, 83, 52, 67, 52, 48, 43, 65, 57, 79, 62, 85, 53, 75, 76, 62, 70, 57, 65, 52, 70, 65, 73, 43, 73, 62, 52, 69, 79, 66, 122, 105, 62, 64, 53, 75, 73, 48, 57, 113, 76, 70, 58, 54, 53, 77, 50, 65, 62, 65, 78, 63, 79, 78, 70, 50, 67, 64, 61, 80, 53, 62, 56, 59, 69, 75, 73, 35, 60, 62, 50, 59, 67, 69, 69, 58, 48, 61, 72, 66, 89, 48, 86, 55, 90, 70, 92, 72, 72, 68, 80, 49, 48, 71, 64, 65, 69, 49, 49, 56, 79, 52, 70, 61, 65, 76, 52, 63, 72, 44, 67, 73, 50, 76, 55, 70, 68, 59, 51, 68, 57, 74, 106, 64, 58, 69, 52, 73, 87, 61, 70, 80, 57, 63, 57, 59, 56, 64, 74, 63, 78, 85, 46, 50, 62, 69, 54, 54, 59, 126, 57, 54, 59, 70, 72, 57, 56, 67, 83, 54, 48, 56, 67, 53, 58, 74, 64, 61, 59, 59, 63, 71, 58, 61, 66, 102, 78, 52, 91, 67, 55, 56, 68, 59, 70, 64, 68, 55, 63, 72, 75, 55, 56, 50, 70, 59, 112, 59, 32, 48, 61, 57, 64, 65, 64, 66, 69, 43, 65, 102, 56, 65, 89, 71, 72, 82, 45, 77, 55, 80, 52, 57, 48, 81, 66, 52, 97, 63, 61, 42, 50, 78, 52, 48, 78, 52, 58, 60, 88, 64, 84, 57, 69, 70, 60, 96, 77, 96, 54, 57, 56, 79, 66, 68, 52, 71, 87, 56, 70, 71, 59, 72, 42, 85, 54, 69, 89, 73, 75, 60, 54, 65, 47, 55, 53, 61, 50, 97, 79, 63, 82, 62, 77, 65, 64, 67, 64, 46, 52, 68, 83, 41, 68, 122, 66, 46, 90, 48, 56, 63, 66, 49, 69, 58, 55, 53, 111, 57, 71, 55, 54, 63, 69, 127, 71, 73, 69, 63, 68, 65, 48, 57, 71, 81, 70, 71, 67, 67, 62, 53, 64, 58, 56, 52, 80, 99, 52, 49, 68, 76, 53, 94, 73, 63, 74, 64, 53, 49, 72, 71, 60, 78, 67, 55, 47, 57, 71, 73, 67, 64, 47, 48, 80, 53, 52, 70, 63, 69, 71, 65, 67, 68, 67, 53, 61, 56, 61, 61, 62, 79, 55, 63, 70, 80, 51, 39, 71, 37, 50, 65, 74, 69, 116, 71, 81, 77, 56, 89, 55, 54, 47, 67, 61, 87, 66, 74, 81, 68, 55, 56, 64, 58, 59, 67, 63, 66, 60, 62, 43, 57, 102, 54, 64, 126, 64, 70, 52, 50, 54, 77, 51, 63, 58, 40, 53, 69, 68, 73, 74, 69, 83, 68, 70, 61, 75, 119, 85, 82, 63, 69, 64, 50, 52, 60, 100, 68, 57, 51, 60, 63, 70, 54, 69, 77, 62, 50, 76, 116, 69, 71, 39, 73, 74, 59, 53, 60, 53, 78, 76, 69, 75, 74, 80, 44, 67, 70, 74, 63, 67, 55, 66, 94, 56, 64, 95, 55, 65, 72, 71, 70, 64, 69, 92, 63, 57, 48, 87, 82, 55, 74, 59, 74, 68, 70, 57, 71, 80, 56, 61, 43, 54, 50, 68, 66, 79, 78, 80, 63, 69, 68, 66, 63, 34, 50, 46, 66, 81, 45, 67, 64, 95, 61, 73, 71, 66, 55, 54, 64, 61, 63, 89, 93, 63, 58, 52, 59, 43, 60, 58, 101, 77, 68, 65, 82, 57, 72, 78, 47, 63, 74, 57, 60, 64, 74, 62, 54, 61, 52, 50, 44, 62, 61, 71, 106, 63, 67, 60, 56, 86, 60, 41, 56, 68, 60, 76, 66, 69, 60, 69, 55, 62, 60, 81, 47, 64, 61, 70, 56, 46, 109, 74, 75, 113, 50, 66, 72, 71, 60, 52, 59, 68, 57, 66, 60, 70, 58, 51, 63, 58, 83, 114, 77, 64, 56, 61, 92, 76, 56, 73, 78, 91, 75, 71, 49, 75, 72, 62, 67, 45, 74, 56, 71, 54, 66, 84, 68, 56, 58, 54, 81, 64, 78, 124, 87, 63, 69, 56, 70, 61, 66, 60, 57, 59, 71, 46, 55, 67, 73, 71, 50, 75, 61, 57, 55, 64, 67, 64, 67, 65, 67, 55, 78, 61, 39, 53, 72, 77, 57, 125, 59, 65, 57, 88, 92, 97, 42, 85, 53, 53, 40, 67, 67, 62, 76, 57, 59, 70, 77, 65, 68, 65, 67, 73, 77, 62, 79, 75, 63, 72, 53, 59, 76, 85, 55, 48, 53, 61, 72, 51, 59, 62, 94, 80, 84, 59, 84, 68, 86, 94, 78, 53, 66, 53, 75, 92, 61, 51, 54, 77, 70, 66, 71, 73, 46, 64, 69, 74, 71, 54, 56, 58, 75, 88, 54, 69, 73, 74, 63, 57, 53, 60, 56, 61, 78, 81, 72, 57, 55, 73, 78, 60, 56, 73, 60, 77, 56, 50, 94, 82, 67, 84, 45, 45, 61, 66, 77, 80, 58, 67, 69, 89, 62, 65, 64, 95, 53, 74, 58, 60, 58, 82, 55, 64, 62, 65, 101, 77, 77, 60, 69, 49, 55, 45, 60, 65, 70, 73, 75, 64, 68, 62, 63, 73, 58, 70, 78, 51, 56, 78, 74, 66, 50, 60, 62, 69, 56, 50, 91, 61, 61, 63, 74, 58, 67, 67, 68, 59, 61, 63, 74, 53, 71, 75, 58, 73, 75, 52, 57, 60, 60, 68, 63, 73, 58, 58, 74, 77, 54, 70, 65, 64, 62, 56, 61, 57, 46, 55, 60, 89, 57, 61, 69, 98, 61, 57, 86, 68, 73, 77, 53, 83, 82, 73, 80, 62, 63, 71, 55, 75, 54, 26, 72, 77, 53, 73, 53, 62, 65, 78, 70, 64, 53, 53, 61, 81, 116, 63, 49, 51, 58, 56, 70, 65, 52, 71, 78, 53, 67, 50, 58, 53, 84, 61, 86, 87, 69, 61, 61, 63, 57, 58, 63, 58, 44, 51, 51, 66, 113, 86, 66, 68, 55, 49, 67, 63, 68, 59, 56, 53, 49, 68, 105, 56, 81, 56, 64, 60, 53, 50, 71, 56, 64, 64, 74, 59, 53, 58, 73, 68, 65, 75, 79, 92, 60, 69, 75, 57, 65, 53, 80, 57, 65, 75, 45, 60, 82, 53, 51, 55, 69, 53, 86, 54, 78, 68, 55, 64, 65, 60, 44, 65, 54, 56, 55, 67, 79, 55, 50, 64, 64, 47, 39, 80, 66, 52, 77, 65, 55, 62, 49, 55, 63, 76, 63, 102, 77, 78, 108, 45, 67, 57, 56, 78, 53, 53, 44, 68, 57, 57, 44, 71, 68, 79, 64, 66, 90, 105, 56, 80, 73, 55, 55, 55, 110, 76, 61, 67, 51, 58, 58, 69, 53, 52, 63, 63, 60, 74, 63, 91, 55, 61, 49, 39, 51, 47, 76, 73, 39, 54, 80, 72, 98, 76, 52, 85, 90, 81, 79, 67, 50, 67, 52, 74, 56, 59, 75, 50, 50, 64, 55, 65, 38, 33, 81, 66, 60, 63, 83, 63, 83, 64, 65, 82, 65, 69, 81, 76, 99, 67, 74, 50, 74, 90, 56, 64, 70, 74, 58, 62, 32, 49, 56, 61, 65, 80, 48, 60, 66, 60, 60, 76, 68, 74, 75, 78, 65, 71, 71, 51, 64, 55, 103, 54, 63, 65, 74, 70, 100, 59, 89, 69, 47, 63, 68, 103, 89, 53, 66, 72, 78, 86, 81, 61, 71, 65, 65, 65, 69, 68, 68, 65, 82, 73, 46, 58, 60, 64, 86, 82, 74, 75, 53, 64, 57, 72, 51, 74, 48, 68, 54, 44, 95, 98, 52, 51, 74, 83, 62, 47, 102, 96, 68, 74, 58, 55, 61, 29, 66, 64, 55, 69, 66, 49, 72, 47, 119, 60, 69, 91, 65, 70, 53, 92, 79, 69, 61, 65, 71, 43, 46, 67, 82, 88, 49, 54, 46, 49, 51, 60, 66, 58, 57, 48, 70, 45, 68, 67, 77, 51, 65, 70, 81, 88, 69, 63, 48, 67, 57, 53, 56, 71, 84, 93, 61, 67, 86, 66, 63, 38, 74, 99, 56, 62, 78, 63, 78, 61, 73, 58, 56, 99, 75, 66, 66, 69, 56, 57, 52, 109, 56, 47, 64, 60, 63, 48, 49, 65, 78, 54, 59, 63, 53, 58, 65, 90, 60, 61, 63, 56, 64, 41, 94, 84, 51, 56, 59, 96, 66, 83, 61, 63, 72, 63, 50, 66, 82, 74, 56, 43, 57, 57, 38, 92, 58, 93, 70, 60, 47, 67, 61, 83, 74, 67, 50, 63, 81, 88, 37, 49, 50, 62, 52, 62, 72, 74, 57, 46, 49, 77, 84, 60, 73, 66, 57, 78, 52, 51, 80, 60, 83, 82, 61, 66, 85, 36, 80, 83, 53, 47, 73, 63, 114, 57, 58, 57, 55, 60, 62, 58, 46, 88, 77, 41, 63, 79, 58, 64, 53, 69, 104, 65, 58, 61, 67, 82, 58, 60, 67, 56, 105, 57, 49, 63, 50, 118, 58, 53, 85, 56, 67, 81, 62, 79, 60, 65, 81, 71, 42, 64, 108, 70, 62, 57, 92, 77, 75, 51, 53, 83, 57, 61, 62, 57, 42, 65, 79, 70, 80, 64, 40, 57, 71, 56, 61, 70, 61, 54, 102, 63, 98, 70, 41, 74, 68, 60, 65, 61, 121, 72, 58, 76, 66, 65, 74, 59, 113, 60, 75, 88, 75, 44, 62, 83, 70, 58, 69, 46, 73, 83, 75, 61, 60, 81, 65, 65, 53, 56, 63, 62, 65, 102, 53, 42, 54, 44, 45, 75, 62, 65, 50, 53, 65, 88, 56, 64, 63, 127, 68, 62, 59, 82, 85, 62, 63, 53, 70, 73, 68, 53, 61, 73, 55, 53, 67, 78, 69, 51, 83, 71, 56, 68, 64, 54, 63, 61, 74, 112, 67, 62, 85, 57, 55, 47, 47, 69, 59, 72, 65, 70, 64, 66, 79, 76, 62, 50, 50, 50, 80, 67, 82, 58, 71, 65, 47, 94, 90, 61, 81, 60, 69, 61, 51, 62, 64, 71, 61, 89, 79, 83, 65, 66, 54, 64, 107, 71, 50, 73, 61, 57, 56, 70, 59, 66, 65, 55, 63, 92, 83, 68, 61, 79, 58, 62, 58, 57, 56, 59, 63, 95, 77, 64, 74, 84, 74, 70, 79, 91, 59, 57, 88, 82, 50, 81, 68, 43, 68, 54, 62, 49, 59, 69, 71, 61, 64, 58, 49, 132, 78, 59, 48, 64, 91, 68, 64, 59, 60, 61, 57, 77, 48, 46, 80, 73, 57, 51, 63, 72, 76, 71, 66, 61, 59, 58, 69, 81, 80, 68, 80, 64, 85, 56, 72, 90, 86, 83, 64, 54, 78, 62, 62, 52, 80, 46, 87, 84, 47, 65, 63, 79, 46, 79, 57, 63, 61, 66, 53, 67, 75, 64, 74, 67, 69, 44, 106, 67, 75, 51, 91, 61, 71, 46, 93, 54, 62, 64, 54, 69, 60, 53, 57, 70, 93, 81, 86, 67, 65, 44, 69, 68, 61, 70, 34, 89, 57, 51, 71, 53, 65, 70, 68, 97, 75, 64, 82, 72, 93, 53, 53, 88, 64, 51, 52, 66, 103, 60, 70, 56, 54, 67, 67, 65, 71, 48, 61, 64, 64, 56, 55, 67, 75, 60, 51, 76, 72, 79, 55, 82, 106, 75, 67, 99, 82, 49, 44, 63, 94, 56, 64, 128, 70, 67, 56, 80, 89, 65, 66, 51, 56, 52, 58, 71, 66, 58, 57, 51, 72, 75, 54, 63, 58, 75, 70, 103, 51, 75, 64, 71, 79, 49, 78, 53, 46, 46, 48, 67, 76, 54, 80, 57, 79, 59, 108, 64, 65, 73, 63, 71, 71, 71, 71, 58, 50, 52, 77, 57, 70, 82, 75, 47, 68, 58, 48, 101, 74, 60, 91, 80, 74, 61, 102, 60, 71, 86, 71, 66, 88, 61, 40, 59, 48, 65, 55, 47, 48, 56, 62, 59, 46, 72, 121, 53, 53, 59, 59, 87, 64, 64, 76, 58, 59, 44, 59, 65, 60, 98, 74, 70, 73, 63, 55, 66, 66, 63, 53, 55, 51, 51, 78, 56, 71, 67, 66, 71, 66, 59, 63, 63, 63, 59, 71, 70, 71, 50, 63, 71, 70, 67, 59, 63, 54, 81, 52, 73, 56, 63, 57, 43, 61, 58, 63, 64, 65, 66, 55, 80, 63, 64, 67, 71, 78, 55, 62, 54, 45, 56, 56, 57, 52, 56, 80, 79, 83, 57, 87, 66, 62, 57, 48, 54, 71, 60, 69, 76, 62, 70, 59, 87, 51, 67, 75, 62, 77, 60, 63, 73, 69, 43, 62, 88, 50, 74, 91, 54, 43, 78, 66, 60, 36, 91, 52, 68, 96, 73, 64, 57, 76, 56, 75, 48, 50, 55, 56, 60, 53, 72, 68, 79, 58, 62, 59, 66, 61, 66, 65, 61, 64, 84, 72, 80, 61, 68, 72, 54, 51, 107, 67, 57, 64, 50, 48, 62, 62, 54, 62, 60, 56, 92, 89, 67, 99, 75, 76, 58, 76, 104, 75, 70, 52, 47, 58, 56, 81, 72, 65, 62, 56, 62, 52, 59, 83, 81, 51, 43, 67, 62, 62, 58, 69, 76, 78, 68, 60, 63, 70, 50, 112, 81, 60, 96, 102, 60, 84, 64, 67, 67, 61, 62, 60, 79, 59, 51, 51, 72, 99, 56, 62, 68, 49, 57, 102, 69, 96, 63, 63, 45, 57, 76, 62, 62, 47, 88, 91, 69, 61, 61, 58, 69, 62, 58, 74, 59, 67, 67, 55, 64, 52, 62, 47, 72, 70, 60, 39, 62, 53, 73, 48, 69, 89, 67, 63, 64, 59, 76, 63, 62, 53, 70, 66, 52, 84, 69, 77, 69, 79, 57, 64, 69, 67, 60, 63, 68, 47, 65, 67, 68, 63, 61, 79, 64, 47, 72, 57, 62, 88, 50, 85, 54, 50, 53, 83, 70, 56, 71, 58, 79, 80, 71, 70, 58, 88, 83, 60, 61, 59, 60, 47, 56, 70, 58, 63, 91, 75, 65, 61, 88, 65, 56, 60, 80, 71, 71, 77, 61, 44, 58, 52, 57, 50, 56, 67, 52, 74, 70, 73, 51, 66, 97, 52, 76, 66, 83, 67, 59, 79, 75, 88, 57, 81, 52, 57, 57, 98, 87, 80, 63, 66, 52, 60, 56, 63, 76, 80, 71, 58, 76, 49, 68, 79, 68, 63, 67, 64, 45, 56, 49, 105, 77, 60, 61, 51, 106, 50, 58, 58, 66, 57, 55, 63, 60, 54, 74, 50, 71, 78, 55, 68, 65, 67, 72, 70, 70, 89, 73, 64, 55, 100, 71, 50, 68, 62, 85, 48, 53, 62, 59, 56, 60, 94, 80, 57, 67, 72, 46, 69, 61, 58, 76, 67, 53, 46, 78, 76, 66, 62, 68, 63, 68, 56, 52, 67, 72, 64, 62, 99, 76, 61, 61, 77, 69, 79, 55, 62, 56, 69, 98, 61, 76, 59, 62, 59, 83, 51, 84, 49, 64, 61, 57, 64, 69, 68, 74, 73, 72, 59, 87, 38, 61, 74, 47, 53, 62, 50, 56, 86, 87, 74, 52, 58, 74, 61, 88, 73, 60, 68, 60, 105, 61, 67, 58, 85, 72, 58, 58, 44, 86, 53, 56, 73, 67, 64, 71, 68, 69, 55, 60, 63, 62, 74, 68, 59, 55, 53, 74, 34, 67, 56, 67, 64, 59, 93, 62, 60, 70, 81, 83, 63, 64, 58, 81, 62, 69, 61, 60, 120, 67, 63, 49, 72, 60, 43, 64, 69, 81, 77, 77, 54, 77, 65, 59, 56, 57, 62, 54, 63, 71, 92, 57, 65, 60, 51, 79, 85, 50, 63, 56, 55, 57, 67, 56, 98, 67, 110, 73, 68, 80, 112, 61, 57, 51, 39, 60, 72, 76, 66, 57, 60, 67, 51, 69, 71, 65, 80, 49, 79, 59, 57, 58, 77, 63, 95, 66, 63, 54, 74, 67, 62, 65, 53, 65, 68, 63, 71, 81, 53, 60, 42, 71, 66, 66, 64, 95, 73, 66, 63, 75, 92, 70, 52, 60, 58, 55, 62, 74, 62, 59, 56, 81, 67, 87, 65, 70, 70, 62, 63, 56, 76, 57, 58, 69, 69, 93, 76, 55, 80, 62, 68, 61, 62, 86, 58, 66, 63, 65, 57, 68, 95, 66, 55, 68, 61, 61, 81, 60, 71, 88, 77, 62, 94, 60, 56, 73, 68, 70, 64, 65, 45, 67, 71, 70, 54, 65, 58, 61, 60, 61, 53, 70, 101, 47, 62, 43, 71, 63, 47, 65, 65, 83, 67, 54, 60, 63, 64, 85, 64, 62, 62, 64, 50, 105, 57, 61, 54, 64, 87, 62, 62, 107, 56, 60, 71, 70, 57, 51, 61, 64, 55, 81, 96, 57, 72, 64, 50, 95, 63, 76, 67, 62, 58, 104, 58, 95, 38, 56, 93, 55, 54, 94, 73, 54, 59, 59, 55, 65, 63, 66, 77, 63, 71, 71, 59, 65, 71, 87, 67, 74, 55, 40, 63, 43, 55, 47, 54, 56, 60, 75, 84, 100, 81, 43, 71, 60, 57, 66, 79, 64, 56, 67, 96, 70, 49, 59, 67, 62, 56, 63, 73, 65, 62, 54, 80, 47, 58, 60, 54, 62, 71, 77, 77, 81, 50, 49, 55, 60, 67, 65, 71, 59, 54, 52, 63, 51, 67, 66, 117, 50, 69, 47, 67, 49, 89, 61, 61, 47, 77, 49, 58, 58, 77, 54, 68, 62, 65, 63, 76, 112, 72, 61, 75, 55, 72, 49, 49, 96, 85, 64, 57, 74, 65, 59, 56, 62, 62, 96, 61, 78, 63, 72, 59, 54, 78, 67, 66, 65, 54, 61, 54, 54, 79, 74, 68, 72, 79, 59, 54, 90, 63, 73, 63, 71, 59, 66, 64, 60, 60, 63, 65, 60, 84, 77, 69, 71, 55, 62, 54, 62, 75, 106, 79, 62, 57, 86, 60, 58, 46, 62, 68, 67, 51, 57, 68, 72, 67, 54, 65, 54, 59, 72, 57, 68, 62, 59, 71, 49, 61, 68, 125, 55, 61, 52, 102, 73, 65, 61, 63, 68, 66, 59, 67, 55, 69, 80, 64, 66, 45, 59, 58, 43, 98, 62, 55, 56, 50, 69, 56, 63, 55, 60, 66, 65, 69, 55, 117, 99, 58, 66, 55, 59, 97, 73, 76, 59, 54, 61, 85, 67, 91, 61, 51, 53, 53, 60, 79, 76, 53, 47, 64, 67, 76, 80, 36, 58, 72, 65, 52, 65, 74, 68, 67, 67, 80, 48, 66, 64, 71, 60, 68, 85, 65, 55, 56, 60, 66, 62, 74, 37, 65, 65, 55, 73, 69, 56, 37, 53, 63, 60, 59, 77, 47, 72, 56, 56, 64, 42, 55, 68, 77, 82, 94, 54, 59, 55, 48, 76, 70, 47, 55, 71, 69, 40, 76, 49, 69, 66, 54, 71, 55, 51, 55, 53, 61, 73, 59, 60, 56, 64, 55, 51, 62, 83, 82, 93, 56, 53, 64, 52, 80, 62, 70, 60, 47, 59, 62, 93, 66, 82, 74, 51, 58, 70, 57, 62, 41, 88, 84, 59, 85, 86, 87, 66, 57, 66, 63, 51, 51, 70, 90, 63, 55, 52, 72, 78, 66, 56, 80, 66, 61, 68, 73, 60, 57, 71, 66, 84, 67, 92, 62, 58, 69, 96, 53, 72, 53, 48, 69, 50, 55, 64, 68, 69, 61, 72, 48, 78, 87, 91, 71, 72, 62, 75, 58, 58, 74, 74, 48, 69, 93, 51, 67, 67, 59, 77, 59, 56, 78, 71, 78, 62, 62, 72, 60, 86, 53, 73, 70, 64, 66, 61, 58, 69, 78, 84, 78, 75, 58, 62, 49, 53, 64, 64, 73, 65, 77, 71, 66, 98, 51, 70, 75, 48, 53, 72, 74, 78, 56, 72, 74, 75, 45, 50, 82, 59, 68, 63, 68, 41, 71, 52, 81, 71, 63, 58, 59, 72, 60, 55, 66, 56, 73, 55, 59, 52, 60, 67, 61, 84, 58, 61, 72, 73, 74, 57, 82, 62, 67, 67, 66, 78, 79, 66, 40, 69, 65, 45, 95, 66, 91, 61, 66, 67, 66, 70, 64, 61, 94, 70, 64, 62, 72, 41, 51, 58, 66, 63, 110, 68, 72, 58, 58, 69, 71, 74, 60, 60, 75, 67, 59, 69, 66, 63, 71, 78, 61, 63, 62, 58, 66, 81, 75, 57, 68, 71, 93, 70, 56, 74, 65, 58, 70, 57, 53, 63, 72, 54, 75, 70, 59, 75, 61, 76, 66, 73, 85, 62, 64, 52, 65, 61, 62, 60, 73, 54, 69, 55, 69, 77, 65, 66, 67, 77, 61, 66, 70, 64, 60, 69, 100, 54, 79, 50, 49, 63, 58, 57, 59, 67, 76, 113, 83, 90, 49, 65, 78, 56, 64, 59, 63, 70, 83, 54, 89, 73, 60, 65, 49, 59, 87, 58, 60, 60, 57, 61, 47, 57, 63, 76, 72, 72, 67, 87, 76, 60, 81, 62, 55, 60, 66, 90, 71, 75, 69, 68, 46, 51, 61, 78, 53, 66, 59, 69, 63, 79, 69, 77, 59, 65, 83, 87, 52, 51, 89, 60, 83, 65, 57, 63, 74, 67, 73, 52, 63, 65, 73, 71, 68, 63, 66, 62, 62, 66, 70, 60, 62, 61, 56, 79, 75, 65, 63, 57, 59, 61, 67, 63, 70, 68, 66, 80, 69, 60, 57, 69, 69, 68, 52, 74, 48, 117, 76, 83, 72, 68, 53, 77, 70, 61, 72, 89, 68, 101, 70, 70, 70, 57, 81, 54, 78, 56, 51, 68, 53, 59, 64, 53, 59, 78, 57, 67, 74, 108, 59, 63, 78, 71, 70, 64, 73, 62, 88, 59, 65, 63, 71, 74, 68, 69, 77, 55, 79, 68, 48, 58, 74, 82, 61, 70, 68, 70, 133, 66, 60, 54, 78, 58, 95, 74, 50, 63, 89, 50, 66, 54, 63, 74, 49, 59, 45, 65, 59, 61, 60, 88, 64, 59, 61, 70, 67, 47, 65, 61, 67, 58, 69, 70, 64, 63, 41, 62, 73, 75, 60, 59, 85, 50, 67, 82, 65, 82, 68, 62, 85, 73, 68, 70, 59, 46, 69, 60, 54, 66, 71, 50, 87, 67, 67, 70, 54, 73, 65, 59, 61, 64, 57, 61, 55, 53, 61, 65, 61, 73, 64, 49, 67, 51, 49, 64, 62, 59, 49, 51, 68, 49, 61, 66, 57, 115, 70, 63, 68, 58, 55, 58, 54, 87, 71, 74, 62, 60, 68, 68, 89, 64, 85, 102, 56, 55, 73, 65, 72, 63, 60, 69, 70, 57, 73, 51, 81, 83, 68, 60, 57, 67, 57, 71, 73, 72, 68, 68, 71, 55, 68, 60, 65, 74, 61, 76, 56, 65, 71, 55, 107, 57, 86, 71, 73, 46, 46, 66, 50, 68, 60, 92, 56, 58, 65, 45, 62, 66, 63, 62, 57, 69, 69, 44, 51, 56, 58, 71, 55, 52, 50, 59, 55, 59, 65, 67, 66, 74, 55, 41, 63, 92, 43, 57, 67, 62, 75, 88, 56, 72, 52, 67, 79, 80, 73, 49, 64, 57, 69, 67, 64, 73, 73, 58, 51, 64, 60, 72, 68, 54, 68, 74, 50, 66, 86, 76, 50, 76, 69, 60, 55, 69, 89, 63, 55, 63, 74, 70, 48, 49, 57, 67, 48, 53, 69, 63, 65, 51, 60, 55, 54, 50, 80, 61, 55, 63, 66, 70, 43, 65, 91, 90, 69, 58, 64, 62, 77, 62, 106, 62, 64, 70, 69, 59, 60, 58, 52, 65, 57, 77, 45, 60, 65, 54, 146, 52, 76, 58, 71, 67, 64, 56, 63, 76, 52, 65, 60, 56, 73, 80, 74, 63, 62, 51, 63, 76, 67, 57, 65, 82, 113, 72, 65, 47, 103, 64, 60, 84, 65, 62, 51, 58, 59, 72, 72, 74, 72, 83, 68, 82, 52, 70, 73, 70, 61, 79, 74, 72, 107, 70, 61, 69, 61, 62, 67, 56, 67, 53, 79, 65, 69, 63, 69, 64, 46, 60, 59, 90, 63, 58, 60, 61, 78, 83, 61, 46, 73, 66, 68, 68, 60, 73, 65, 57, 94, 67, 59, 66, 57, 47, 67, 49, 72, 63, 70, 71, 59, 79, 60, 61, 62, 43, 65, 73, 67, 77, 61, 59, 91, 53, 74, 62, 47, 80, 58, 53, 46, 69, 68, 60, 67, 58, 56, 53, 61, 75, 50, 55, 61, 43, 96, 50, 79, 62, 98, 64, 75, 87, 59, 77, 63, 69, 53, 58, 67, 60, 57, 50, 67, 62, 71, 52, 84, 83, 61, 46, 66, 60, 85, 93, 50, 82, 54, 58, 59, 61, 109, 73, 64, 61, 71, 56, 81, 56, 59, 60, 73, 68, 63, 65, 76, 67, 59, 50, 69, 67, 76, 88, 72, 61, 61, 99, 86, 64, 50, 88, 46, 51, 59, 56, 50, 69, 82, 62, 51, 42, 78, 57, 58, 64, 79, 60, 56, 74, 71, 73, 57, 91, 64, 55, 69, 51, 92, 71, 76, 64, 42, 61, 59, 71, 74, 95, 67, 73, 53, 73, 63, 74, 65, 76, 67, 57, 89, 57, 66, 73, 45, 62, 76, 54, 57, 59, 58, 75, 63, 51, 61, 70, 50, 61, 54, 65, 76, 61, 51, 79, 96, 56, 64, 69, 61, 54, 63, 102, 68, 112, 50, 59, 61, 65, 88, 52, 67, 53, 60, 70, 60, 86, 39, 84, 78, 61, 44, 75, 71, 60, 53, 61, 47, 52, 69, 48, 111, 54, 59, 60, 52, 50, 64, 85, 60, 49, 56, 48, 44, 74, 31, 58, 31, 84, 77, 62, 111, 53, 74, 75, 57, 52, 56, 72, 82, 70, 93, 79, 59, 78, 40, 53, 50, 63, 72, 55, 72, 93, 61, 79, 62, 55, 65, 73, 51, 60, 61, 89, 51, 60, 67, 57, 69, 68, 76, 59, 71, 64, 60, 50, 60, 56, 77, 95, 63, 67, 59, 68, 76, 81, 72, 67, 93, 53, 76, 65, 61, 53, 68, 81, 73, 71, 72, 89, 80, 68, 57, 61, 48, 58, 83, 87, 65, 81, 58, 63, 66, 49, 52, 77, 73, 57, 65, 79, 60, 67, 59, 58, 60, 71, 92, 54, 51, 53, 76, 55, 77, 47, 76, 73, 65, 71, 63, 72, 80, 54, 44, 67, 49, 54, 64, 72, 71, 65, 57, 54, 66, 57, 64, 68, 77, 58, 79, 50, 72, 58, 96, 58, 63, 77, 75, 80, 115, 53, 56, 71, 60, 83, 49, 69, 65, 72, 50, 76, 34, 71, 52, 60, 60, 60, 63, 53, 130, 54, 72, 64, 82, 55, 73, 78, 74, 49, 49, 64, 68, 71, 47, 70, 47, 51, 58, 81, 63, 61, 49, 70, 65, 77, 67, 65, 82, 94, 66, 89, 61, 72, 57, 41, 63, 56, 67, 71, 73, 77, 64, 61, 72, 70, 79, 56, 68, 79, 58, 63, 68, 62, 69, 58, 58, 71, 79, 55, 55, 50, 69, 71, 46, 76, 73, 89, 66, 52, 59, 72, 82, 43, 95, 59, 63, 67, 66, 63, 55, 70, 83, 106, 61, 78, 56, 59, 73, 66, 66, 64, 55, 50, 89, 34, 58, 63, 60, 47, 65, 54, 57, 53, 80, 65, 57, 51, 79, 69, 66, 74, 74, 54, 87, 62, 81, 51, 64, 63, 67, 68, 91, 90, 75, 63, 72, 64, 63, 76, 65, 95, 63, 63, 62, 71, 66, 94, 50, 59, 61, 57, 91, 97, 61, 57, 68, 59, 84, 59, 61, 73, 87, 48, 48, 54, 59, 63, 92, 66, 67, 63, 53, 70, 55, 49, 63, 48, 48, 56, 67, 99, 88, 84, 57, 59, 92, 76, 77, 42, 44, 60, 62, 74, 82, 56, 58, 67, 66, 59, 75, 77, 75, 102, 69, 68, 47, 41, 35, 76, 70, 71, 57, 64, 81, 106, 81, 58, 61, 73, 80, 62, 81, 76, 65, 55, 63, 68, 47, 73, 68, 55, 69, 64, 82, 88, 60, 64, 42, 74, 67, 73, 89, 61, 86, 60, 57, 77, 67, 66, 49, 62, 59, 58, 46, 66, 63, 53, 71, 64, 82, 43, 69, 66, 142, 59, 85, 48, 78, 56, 111, 62, 71, 51, 111, 84, 63, 63, 59, 66, 49, 67, 55, 71, 74, 64, 64, 77, 45, 73, 76, 66, 53, 58, 54, 57, 75, 99, 50, 53, 50, 50, 62, 50, 64, 51, 79, 61, 49, 69, 65, 62, 64, 102, 54, 56, 104, 101, 64, 47, 67, 86, 61, 73, 50, 65, 70, 73, 73, 59, 88, 68, 50, 87, 78, 62, 63, 69, 72, 78, 58, 69, 69, 65, 52, 75, 64, 55, 55, 69, 62, 80, 65, 81, 51, 68, 58, 65, 63, 82, 43, 117, 83, 39, 62, 68, 59, 55, 63, 56, 82, 73, 57, 71, 81, 77, 61, 72, 55, 103, 78, 55, 78, 56, 71, 68, 43, 40, 41, 67, 78, 57, 52, 85, 69, 64, 75, 63, 70, 51, 62, 68, 47, 76, 56, 66, 66, 57, 74, 60, 74, 84, 65, 82, 53, 59, 46, 77, 95, 55, 53, 65, 52, 52, 60, 82, 50, 82, 56, 61, 60, 65, 63, 70, 71, 65, 77, 56, 76, 70, 61, 70, 47, 50, 66, 57, 63, 57, 74, 67, 55, 65, 71, 84, 58, 52, 62, 103, 59, 71, 74, 60, 82, 96, 61, 63, 58, 70, 97, 66, 67, 68, 58, 65, 56, 57, 78, 69, 58, 61, 61, 61, 53, 86, 45, 73, 77, 57, 48, 75, 58, 57, 69, 53, 71, 81, 56, 58, 107, 48, 58, 61, 58, 77, 50, 61, 35, 74, 41, 60, 64, 45, 50, 44, 76, 53, 71, 46, 93, 59, 54, 64, 63, 90, 71, 64, 72, 81, 71, 53, 61, 60, 65, 70, 93, 60, 59, 80, 69, 59, 55, 61, 78, 52, 65, 72, 64, 55, 68, 53, 151, 60, 53, 62, 78, 70, 70, 76, 75, 57, 61, 63, 59, 67, 77, 61, 65, 51, 63, 69, 60, 59, 60, 61, 75, 66, 105, 52, 93, 70, 113, 59, 56, 56, 49, 63, 53, 72, 49, 62, 48, 63, 101, 87, 49, 73, 51, 99, 72, 57, 57, 56, 65, 64, 108, 64, 52, 45, 73, 46, 57, 59, 62, 44, 75, 76, 57, 46, 72, 61, 54, 81, 58, 72, 62, 57, 71, 75, 80, 76, 88, 68, 75, 73, 59, 70, 51, 59, 81, 74, 75, 68, 51, 54, 71, 60, 61, 64, 80, 39, 63, 86, 79, 102, 69, 60, 59, 75, 56, 85, 75, 84, 65, 42, 93, 59, 97, 56, 67, 64, 79, 80, 69, 65, 63, 64, 59, 65, 67, 61, 69, 85, 62, 66, 72, 66, 103, 62, 67, 41, 76, 91, 54, 66, 76, 64, 72, 53, 71, 84, 68, 65, 61, 66, 55, 69, 66, 71, 75, 56, 38, 48, 75, 63, 89, 47, 55, 62, 51, 60, 77, 62, 106, 69, 63, 77, 61, 61, 61, 64, 62, 59, 64, 69, 73, 52, 52, 73, 61, 53, 58, 71, 68, 99, 65, 68, 54, 81, 61, 75, 38, 77, 67, 67, 53, 46, 49, 43, 51, 74, 56, 35, 67, 58, 49, 70, 71, 45, 71, 65, 73, 69, 72, 72, 60, 70, 80, 66, 52, 66, 75, 52, 56, 71, 55, 85, 76, 75, 55, 63, 58, 64, 78, 57, 53, 67, 79, 64, 68, 73, 65, 65, 58, 66, 82, 58, 68, 78, 68, 65, 62, 66, 68, 50, 55, 68, 64, 38, 59, 58, 59, 53, 64, 70, 61, 56, 51, 66, 69, 59, 73, 87, 57, 48, 61, 66, 73, 65, 52, 66, 53, 72, 62, 88, 69, 63, 61, 69, 77, 66, 59, 55, 57, 68, 74, 68, 73, 53, 104, 60, 63, 59, 77, 48, 69, 76, 61, 75, 58, 59, 72, 70, 63, 57, 85, 70, 75, 64, 82, 62, 40, 70, 76, 71, 64, 91, 78, 70, 56, 72, 77, 61, 56, 50, 76, 60, 46, 59, 62, 66, 49, 77, 63, 66, 68, 86, 56, 54, 69, 66, 52, 68, 87, 68, 54, 75, 88, 56, 65, 70, 57, 78, 47, 53, 79, 97, 70, 75, 56, 64, 65, 97, 55, 67, 75, 49, 63, 61, 81, 56, 75, 78, 65, 60, 53, 89, 66, 73, 68, 66, 81, 67, 55, 47, 51, 91, 38, 66, 62, 63, 50, 67, 62, 60, 71, 59, 76, 78, 65, 54, 74, 58, 70, 68, 60, 63, 49, 63, 72, 60, 76, 60, 47, 77, 52, 67, 71, 49, 74, 78, 62, 65, 54, 83, 61, 60, 75, 66, 67, 89, 72, 42, 85, 60, 86, 53, 61, 53, 69, 56, 57, 50, 54, 53, 121, 75, 72, 54, 57, 56, 63, 73, 63, 62, 89, 67, 77, 52, 64, 41, 42, 63, 61, 87, 115, 51, 64, 62, 45, 60, 80, 71, 67, 67, 55, 72, 51, 57, 57, 74, 78, 69, 63, 66, 62, 68, 53, 91, 64, 73, 79, 60, 82, 68, 54, 89, 66, 67, 76, 34, 60, 61, 64, 68, 67, 49, 65, 71, 56, 66, 77, 67, 101, 59, 60, 46, 81, 78, 77, 59, 63, 59, 84, 65, 83, 75, 79, 54, 48, 56, 66, 76, 76, 72, 54, 45, 87, 28, 68, 64, 75, 56, 69, 63, 52, 61, 79, 106, 61, 101, 48, 70, 60, 72, 62, 68, 55, 55, 97, 71, 75, 66, 55, 57, 64, 55, 64, 68, 66, 57, 60, 86, 48, 61, 59, 82, 74, 42, 64, 54, 66, 58, 58, 69, 77, 65, 83, 101, 92, 72, 74, 70, 65, 59, 80, 70, 52, 53, 72, 70, 89, 86, 55, 67, 71, 63, 91, 85, 75, 66, 55, 60, 86, 67, 68, 68, 76, 62, 67, 58, 62, 55, 70, 71, 58, 65, 82, 48, 74, 60, 54, 68, 72, 68, 48, 76, 85, 62, 66, 53, 63, 73, 40, 55, 63, 64, 71, 60, 75, 64, 48, 56, 79, 64, 72, 78, 68, 105, 61, 76, 53, 58, 79, 59, 87, 59, 58, 70, 59, 79, 69, 61, 68, 67, 65, 65, 57, 61, 60, 63, 55, 76, 64, 38, 46, 68, 70, 74, 72, 100, 62, 70, 76, 65, 71, 33, 56, 68, 80, 50, 68, 67, 85, 72, 50, 59, 74, 53, 58, 65, 61, 67, 61, 60, 60, 64, 63, 61, 131, 50, 67, 43, 77, 50, 86, 57, 37, 60, 111, 72, 69, 52, 65, 68, 65, 73, 59, 64, 66, 60, 56, 51, 56, 75, 53, 71, 81, 66, 55, 66, 62, 81, 73, 65, 52, 68, 68, 54, 64, 55, 64, 55, 76, 58, 64, 74, 57, 56, 62, 58, 90, 82, 57, 66, 78, 52, 72, 52, 64, 66, 84, 79, 93, 82, 64, 83, 58, 67, 70, 58, 49, 70, 74, 61, 73, 65, 58, 69, 52, 82, 62, 62, 71, 62, 60, 75, 66, 66, 52, 49, 71, 45, 56, 67, 63, 107, 80, 47, 62, 84, 54, 46, 53, 47, 68, 50, 50, 57, 79, 79, 66, 66, 63, 104, 47, 70, 67, 80, 75, 60, 64, 64, 81, 66, 58, 40, 72, 67, 66, 59, 53, 63, 67, 82, 89, 53, 48, 57, 48, 65, 68, 70, 66, 75, 56, 75, 54, 62, 57, 67, 82, 52, 107, 62, 95, 71, 59, 62, 63, 63, 60, 86, 64, 58, 67, 45, 68, 76, 63, 73, 61, 64, 64, 59, 68, 62, 66, 53, 65, 80, 61, 60, 54, 61, 57, 86, 71, 70, 47, 45, 60, 93, 70, 64, 52, 66, 64, 110, 69, 76, 63, 49, 78, 67, 75, 75, 53, 63, 47, 72, 66, 86, 75, 46, 56, 61, 74, 68, 89, 65, 72, 58, 71, 78, 91, 76, 58, 61, 58, 55, 62, 62, 70, 67, 75, 52, 86, 47, 70, 61, 55, 67, 46, 55, 76, 49, 70, 55, 54, 60, 63, 77, 80, 49, 65, 89, 59, 56, 69, 71, 83, 88, 73, 52, 61, 74, 54, 60, 116, 82, 74, 66, 78, 58, 64, 69, 80, 59, 51, 64, 77, 69, 71, 84, 161, 54, 80, 55, 74, 58, 54, 61, 68, 75, 66, 85, 57, 52, 76, 81, 69, 57, 74, 58, 90, 66, 60, 68, 57, 58, 123, 75, 57, 51, 107, 66, 77, 76, 59, 61, 49, 68, 64, 73, 73, 53, 65, 83, 58, 89, 50, 59, 46, 81, 67, 83, 80, 65, 108, 65, 57, 68, 72, 49, 81, 74, 67, 41, 81, 77, 63, 74, 65, 75, 49, 68, 67, 96, 44, 56, 59, 58, 58, 79, 80, 66, 85, 57, 62, 64, 68, 71, 83, 60, 88, 74, 53, 54, 55, 56, 85, 50, 73, 66, 79, 66, 64, 84, 52, 69, 67, 55, 54, 60, 70, 83, 68, 68, 92, 84, 77, 51, 74, 83, 53, 55, 71, 59, 61, 62, 60, 49, 59, 60, 73, 83, 68, 61, 71, 50, 100, 71, 77, 59, 82, 83, 43, 77, 56, 73, 73, 62, 55, 68, 68, 59, 60, 53, 57, 65, 66, 63, 77, 76, 54, 57, 72, 62, 79, 48, 59, 68, 61, 56, 62, 46, 88, 84, 67, 68, 65, 68, 41, 72, 52, 58, 82, 65, 50, 70, 74, 72, 82, 55, 64, 60, 69, 60, 51, 48, 57, 86, 64, 47, 66, 68, 75, 45, 54, 79, 68, 50, 69, 57, 61, 67, 55, 60, 77, 55, 59, 60, 73, 60, 60, 57, 52, 89, 75, 67, 56, 63, 69, 75, 75, 59, 66, 72, 62, 62, 64, 53, 67, 48, 73, 56, 64, 51, 62, 60, 86, 50, 88, 62, 71, 53, 55, 73, 51, 71, 66, 66, 72, 61, 102, 76, 51, 54, 56, 82, 67, 55, 61, 55, 65, 71, 73, 59, 65, 61, 82, 69, 60, 85, 61, 95, 58, 56, 57, 103, 69, 54, 77, 62, 65, 51, 68, 71, 68, 74, 75, 66, 69, 67, 53, 63, 68, 63, 77, 59, 55, 52, 76, 49, 59, 66, 68, 64, 70, 68, 58, 51, 66, 61, 59, 60, 68, 61, 89, 84, 73, 67, 73, 65, 69, 64, 72, 82, 94, 66, 81, 98, 63, 66, 67, 80, 50, 70, 56, 68, 58, 58, 74, 56, 79, 64, 67, 65, 78, 54, 84, 66, 65, 67, 54, 59, 99, 78, 70, 59, 68, 73, 59, 78, 54, 52, 62, 70, 77, 61, 73, 59, 51, 61, 61, 75, 68, 64, 76, 78, 79, 50, 58, 73, 50, 103, 81, 69, 88, 100, 73, 59, 61, 64, 55, 62, 66, 64, 60, 54, 75, 70, 99, 67, 102, 67, 62, 66, 67, 59, 62, 56, 70, 57, 61, 67, 56, 60, 87, 103, 91, 60, 96, 69, 69, 59, 62, 88, 57, 56, 59, 68, 60, 60, 78, 66, 64, 110, 71, 58, 70, 90, 59, 55, 64, 52, 85, 52, 71, 52, 108, 68, 65, 59, 58, 61, 59, 67, 65, 56, 54, 50, 67, 61, 61, 62, 68, 79, 70, 64, 51, 79, 56, 73, 62, 61, 69, 62, 69, 56, 88, 62, 71, 84, 105, 71, 80, 58, 57, 66, 55, 54, 76, 70, 55, 59, 61, 86, 74, 95, 83, 57, 59, 90, 65, 99, 62, 57, 49, 91, 65, 56, 53, 98, 50, 61, 82, 65, 76, 57, 54, 70, 66, 48, 69, 78, 64, 66, 67, 59, 66, 49, 73, 94, 54, 77, 54, 60, 65, 62, 64, 72, 58, 65, 82, 58, 55, 60, 63, 51, 78, 51, 68, 62, 57, 63, 60, 54, 63, 77, 76, 57, 65, 55, 72, 66, 50, 64, 61, 55, 70, 60, 79, 72, 52, 77, 56, 46, 54, 86, 71, 54, 52, 68, 52, 67, 91, 54, 75, 53, 74, 65, 82, 70, 61, 72, 63, 59, 90, 54, 62, 97, 68, 50, 81, 60, 65, 64, 48, 68, 67, 65, 62, 74, 74, 65, 68, 100, 80, 70, 90, 82, 57, 70, 72, 81, 58, 70, 52, 97, 61, 82, 80, 52, 73, 73, 49, 77, 52, 59, 73, 80, 62, 47, 59, 121, 69, 65, 65, 97, 68, 76, 61, 74, 65, 81, 54, 76, 51, 80, 60, 52, 72, 61, 57, 53, 56, 59, 65, 65, 55, 54, 72, 67, 63, 53, 42, 69, 74, 61, 69, 78, 69, 59, 70, 62, 72, 62, 69, 68, 70, 66, 77, 64, 119, 68, 59, 63, 65, 75, 63, 70, 62, 74, 59, 68, 73, 69, 75, 69, 67, 55, 55, 61, 74, 60, 74, 75, 63, 73, 75, 71, 66, 61, 76, 55, 67, 51, 50, 55, 48, 83, 62, 56, 70, 70, 71, 72, 81, 69, 61, 72, 73, 65, 89, 53, 83, 63, 73, 56, 68, 64, 90, 55, 84, 66, 46, 61, 55, 88, 58, 54, 79, 58, 92, 49, 79, 63, 60, 64, 77, 67, 64, 67, 56, 63, 42, 59, 70, 55, 70, 64, 62, 54, 61, 64, 53, 55, 70, 62, 58, 71, 62, 67, 71, 88, 67, 62, 81, 76, 63, 66, 62, 61, 68, 69, 47, 84, 71, 65, 78, 67, 57, 91, 55, 56, 52, 58, 54, 60, 69, 73, 62, 56, 79, 72, 54, 59, 61, 101, 54, 73, 52, 57, 82, 59, 62, 81, 54, 64, 72, 77, 80, 67, 53, 57, 68, 54, 57, 71, 51, 63, 56, 82, 79, 61, 53, 59, 72, 125, 71, 63, 73, 57, 70, 71, 67, 48, 58, 68, 54, 69, 66, 77, 78, 65, 58, 74, 60, 55, 57, 74, 82, 73, 59, 63, 90, 100, 49, 67, 67, 71, 65, 66, 64, 53, 70, 75, 80, 58, 66, 66, 66, 53, 47, 66, 56, 83, 66, 111, 61, 79, 53, 67, 71, 55, 54, 62, 65, 90, 53, 74, 65, 51, 56, 80, 57, 50, 60, 56, 61, 85, 80, 74, 74, 73, 56, 57, 82, 77, 64, 64, 72, 79, 49, 87, 77, 68, 61, 77, 63, 68, 59, 83, 69, 73, 53, 64, 61, 112, 81, 62, 60, 69, 59, 57, 79, 57, 53, 75, 63, 49, 50, 75, 82, 72, 47, 50, 62, 61, 60, 65, 67, 66, 67, 64, 87, 56, 56, 111, 48, 72, 44, 72, 74, 62, 83, 102, 56, 62, 84, 48, 76, 70, 71, 78, 76, 84, 60, 70, 71, 65, 72, 72, 69, 55, 54, 60, 76, 58, 71, 60, 68, 81, 66, 71, 68, 54, 88, 55, 65, 64, 58, 61, 67, 65, 45, 70, 70, 69, 69, 89, 60, 61, 83, 62, 65, 61, 67, 65, 61, 69, 70, 58, 62, 60, 57, 69, 67, 40, 67, 75, 54, 54, 63, 70, 81, 70, 67, 65, 69, 50, 104, 54, 75, 61, 67, 61, 50, 63, 53, 74, 58, 61, 61, 63, 51, 61, 73, 59, 63, 61, 68, 62, 63, 69, 67, 87, 54, 70, 70, 61, 61, 87, 44, 63, 59, 74, 64, 56, 69, 52, 61, 63, 62, 53, 75, 60, 55, 66, 65, 77, 72, 59, 73, 64, 41, 80, 62, 74, 60, 60, 59, 89, 90, 73, 77, 67, 56, 66, 55, 65, 68, 73, 73, 55, 77, 60, 58, 56, 60, 74, 58, 48, 107, 59, 79, 74, 82, 63, 56, 61, 47, 56, 57, 54, 62, 70, 68, 50, 72, 61, 69, 77, 73, 100, 59, 61, 79, 66, 76, 53, 85, 70, 72, 53, 87, 53, 77, 62, 61, 54, 58, 59, 52, 63, 72, 64, 56, 76, 58, 54, 70, 67, 55, 60, 54, 54, 64, 61, 57, 56, 73, 71, 56, 79, 59, 61, 59, 63, 53, 63, 68, 66, 66, 73, 58, 62, 81, 50, 64, 63, 75, 61, 66, 64, 58, 60, 94, 68, 57, 52, 55, 58, 119, 58, 59, 66, 76, 61, 68, 63, 77, 57, 58, 58, 71, 67, 55, 114, 73, 65, 66, 67, 81, 62, 63, 61, 60, 62, 58, 68, 70, 68, 60, 53, 69, 63, 75, 88, 70, 71, 69, 58, 79, 101, 54, 73, 56, 83, 68, 57, 69, 64, 63, 86, 57, 52, 50, 60, 65, 58, 67, 84, 61, 59, 70, 60, 69, 64, 55, 74, 54, 62, 74, 62, 59, 52, 67, 64, 70, 54, 55, 79, 57, 67, 60, 64, 67, 62, 102, 61, 66, 66, 60, 61, 67, 59, 54, 78, 62, 56, 68, 60, 77, 67, 47, 59, 65, 60, 69, 67, 49, 69, 48, 76, 65, 65, 71, 75, 85, 79, 60, 45, 76, 57, 78, 57, 67, 54, 54, 83, 75, 61, 57, 66, 63, 97, 59, 57, 71, 93, 71, 63, 41, 70, 55, 56, 82, 83, 91, 61, 54, 54, 95, 85, 65, 64, 73, 41, 80, 66, 46, 96, 62, 59, 63, 53, 63, 50, 50, 49, 70, 61, 63, 96, 77, 55, 73, 63, 69, 72, 58, 62, 71, 57, 72, 60, 58, 86, 66, 52, 78, 62, 76, 84, 96, 76, 61, 74, 59, 59, 60, 78, 56, 64, 54, 84, 54, 94, 72, 66, 72, 55, 56, 67, 57, 57, 70, 61, 62, 64, 72, 63, 57, 62, 61, 68, 61, 62, 64, 77, 61, 80, 100, 59, 63, 61, 74, 64, 65, 78, 103, 73, 58, 74, 64, 63, 60, 65, 75, 69, 62, 75, 61, 59, 70, 62, 68, 83, 57, 69, 60, 76, 59, 63, 57, 75, 47, 71, 53, 54, 61, 51, 67, 63, 60, 62, 71, 60, 72, 64, 73, 63, 58, 57, 68, 65, 61, 56, 64, 64, 60, 62, 52, 73, 76, 64, 57, 101, 79, 61, 80, 72, 68, 63, 74, 67, 72, 72, 61, 66, 70, 59, 53, 73, 52, 68, 61, 76, 56, 70, 46, 65, 58, 73, 58, 75, 73, 110, 64, 59, 63, 73, 75, 93, 65, 55, 56, 81, 76, 56, 63, 65, 62, 58, 68, 63, 68, 67, 86, 92, 57, 84, 55, 63, 73, 52, 72, 46, 71, 70, 68, 74, 61, 45, 59, 65, 64, 113, 42, 63, 62, 61, 68, 68, 49, 59, 60, 63, 76, 66, 59, 94, 67, 66, 63, 70, 55, 59, 59, 59, 85, 62, 109, 59, 59, 58, 68, 55, 84, 58, 72, 72, 47, 64, 51, 70, 86, 58, 60, 68, 76, 50, 54, 69, 60, 70, 69, 52, 68, 59, 65, 52, 76, 65, 64, 76, 66, 65, 67, 90, 77, 65, 61, 69, 64, 77, 71, 62, 60, 84, 64, 61, 49, 66, 61, 63, 64, 70, 61, 69, 67, 49, 73, 59, 59, 67, 83, 57, 76, 59, 59, 67, 75, 63, 66, 56, 47, 55, 55, 68, 67, 139, 45, 53, 90, 60, 55, 66, 59, 82, 73, 80, 68, 52, 67, 70, 81, 54, 51, 64, 71, 65, 65, 54, 49, 73, 65, 72, 57, 56, 60, 73, 55, 68, 71, 63, 84, 65, 60, 62, 76, 59, 52, 70, 54, 65, 50, 64, 92, 69, 61, 83, 59, 69, 129, 68, 95, 57, 75, 60, 65, 80, 59, 56, 74, 62, 69, 93, 71, 58, 56, 70, 64, 56, 60, 67, 82, 55, 61, 61, 69, 82, 57, 51, 58, 49, 59, 76, 48, 75, 65, 98, 62, 63, 60, 60, 67, 60, 60, 74, 74, 58, 58, 74, 55, 69, 70, 81, 62, 66, 59, 65, 56, 78, 61, 70, 66, 72, 54, 57, 59, 47, 80, 71, 74, 71, 62, 64, 70, 58, 68, 70, 61, 63, 62, 86, 65, 57, 101, 74, 59, 64, 80, 86, 67, 55, 63, 67, 54, 64, 72, 65, 65, 68, 66, 49, 64, 73, 77, 98, 78, 77, 60, 75, 51, 55, 48, 51, 56, 70, 77, 61, 68, 56, 58, 93, 68, 60, 74, 77, 65, 63, 60, 75, 82, 81, 65, 67, 65, 67, 65, 49, 50, 68, 68, 60, 69, 59, 87, 66, 59, 59, 78, 69, 54, 66, 56, 63, 71, 88, 62, 91, 61, 54, 76, 80, 75, 60, 73, 77, 61, 82, 67, 65, 106, 82, 64, 66, 63, 62, 63, 59, 63, 56, 92, 56, 63, 74, 95, 80, 60, 77, 60, 53, 54, 90, 61, 67, 66, 72, 67, 82, 71, 59, 71, 60, 65, 62, 54, 60, 68, 60, 67, 64, 69, 65, 62, 62, 80, 84, 70, 53, 94, 63, 60, 57, 83, 56, 68, 56, 67, 56, 53, 58, 55, 82, 55, 63, 91, 71, 67, 75, 53, 62, 72, 65, 69, 61, 65, 67, 53, 55, 60, 66, 75, 49, 52, 63, 77, 54, 61, 54, 59, 67, 53, 69, 53, 56, 53, 84, 81, 74, 47, 75, 71, 66, 59, 53, 63, 65, 54, 72, 60, 70, 63, 66, 67, 76, 62, 69, 61, 50, 65, 65, 54, 58, 53, 101, 54, 55, 65, 73, 71, 58, 55, 66, 59, 72, 85, 67, 93, 64, 72, 74, 64, 63, 72, 67, 56, 48, 68, 55, 60, 56, 49, 61, 59, 64, 65, 73, 70, 92, 66, 66, 63, 73, 88, 62, 52, 59, 63, 61, 73, 66, 65, 63, 72, 56, 52, 62, 62, 51, 55, 60, 64, 48, 114, 53, 53, 99, 63, 58, 82, 61, 59, 69, 62, 66, 49, 61, 50, 64, 63, 54, 87, 105, 61, 58, 67, 62, 58, 62, 78, 64, 62, 66, 60, 53, 70, 49, 64, 54, 67, 66, 75, 61, 67, 53, 77, 52, 59, 65, 64, 54, 61, 68, 70, 75, 61, 77, 83, 52, 75, 61, 54, 48, 63, 93, 56, 67, 58, 74, 62, 62, 74, 69, 68, 57, 51, 45, 63, 62, 54, 62, 60, 62, 67, 63, 75, 71, 71, 75, 59, 67, 55, 58, 57, 59, 65, 67, 72, 71, 69, 59, 67, 43, 52, 60, 89, 59, 69, 58, 62, 55, 92, 66, 56, 61, 63, 57, 48, 69, 59, 67, 55, 63, 66, 105, 72, 86, 62, 69, 70, 78, 57, 74, 65, 72, 68, 74, 71, 61, 64, 65, 82, 71, 83, 75, 80, 56, 60, 57, 73, 70, 77, 61, 61, 64, 78, 79, 74, 59, 59, 57, 50, 74, 66, 63, 80, 72, 45, 90, 58, 60, 75, 58, 71, 63, 61, 68, 69, 75, 86, 50, 78, 70, 68, 81, 61, 55, 56, 62, 119, 67, 79, 62, 50, 73, 52, 63, 63, 80, 82, 73, 55, 59, 83, 80, 73, 59, 73, 76, 56, 68, 66, 47, 62, 60, 71, 63, 60, 48, 59, 68, 59, 77, 60, 62, 51, 61, 90, 75, 86, 62, 49, 64, 50, 60, 66, 63, 65, 65, 61, 60, 66, 63, 66, 75, 58, 77, 130, 66, 49, 59, 53, 87, 63, 50, 64, 55, 71, 82, 52, 118, 59, 37, 92, 69, 83, 99, 62, 57, 104, 59, 72, 75, 80, 51, 63, 62, 53, 64, 55, 51, 60, 87, 53, 71, 82, 59, 65, 49, 56, 58, 53, 75, 59, 74, 74, 65, 61, 94, 41, 50, 76, 51, 68, 67, 51, 107, 69, 67, 70, 75, 40, 51, 53, 73, 51, 46, 65, 68, 94, 57, 53, 62, 76, 67, 58, 46, 67, 71, 102, 68, 69, 54, 59, 64, 84, 50, 76, 49, 75, 62, 136, 70, 84, 66, 47, 56, 97, 57, 53, 48, 67, 77, 74, 68, 65, 56, 56, 65, 82, 76, 75, 74, 70, 54, 46, 95, 52, 61, 67, 59, 68, 64, 85, 58, 76, 61, 57, 77, 63, 59, 60, 66, 88, 58, 48, 72, 56, 58, 55, 58, 74, 60, 76, 61, 77, 80, 60, 63, 64, 57, 87, 79, 55, 69, 80, 60, 78, 62, 63, 60, 65, 66, 73, 50, 81, 54, 58, 69, 72, 65, 79, 67, 57, 89, 54, 61, 71, 123, 65, 54, 62, 79, 37, 96, 78, 70, 51, 60, 70, 60, 67, 65, 80, 68, 64, 71, 46, 65, 73, 74, 55, 76, 62, 77, 68, 85, 62, 69, 75, 57, 42, 64, 66, 55, 67, 44, 56, 69, 71, 46, 63, 67, 53, 69, 54, 75, 61, 71, 60, 69, 47, 62, 48, 53, 79, 74, 77, 65, 52, 62, 68, 84, 74, 44, 53, 62, 78, 73, 66, 58, 75, 67, 59, 59, 76, 71, 88, 96, 54, 70, 94, 61, 73, 83, 50, 67, 56, 56, 87, 60, 52, 53, 97, 88, 54, 96, 67, 52, 52, 45, 60, 61, 69, 120, 88, 68, 71, 55, 60, 67, 81, 69, 120, 65, 66, 62, 66, 65, 56, 70, 62, 58, 76, 86, 61, 61, 52, 82, 79, 74, 89, 62, 58, 62, 84, 56, 65, 70, 64, 58, 56, 66, 67, 76, 55, 53, 50, 39, 45, 82, 55, 73, 82, 45, 55, 48, 66, 53, 42, 34, 92, 48, 76, 64, 86, 58, 51, 61, 87, 53, 59, 51, 70, 89, 73, 61, 74, 62, 61, 68, 84, 52, 69, 67, 61, 80, 66, 68, 73, 61, 57, 51, 64, 50, 66, 67, 63, 71, 36, 63, 60, 75, 70, 61, 64, 90, 45, 77, 43, 62, 73, 61, 59, 66, 60, 53, 62, 95, 75, 62, 49, 66, 82, 72, 80, 72, 58, 61, 59, 49, 65, 85, 71, 58, 74, 77, 44, 89, 63, 58, 55, 60, 85, 57, 113, 47, 64, 74, 79, 71, 61, 58, 69, 69, 58, 58, 75, 80, 61, 55, 59, 93, 46, 57, 76, 60, 71, 58, 60, 94, 55, 49, 70, 81, 43, 65, 107, 45, 70, 64, 66, 51, 48, 60, 64, 57, 54, 56, 76, 67, 80, 66, 56, 60, 69, 79, 60, 64, 62, 66, 65, 58, 64, 68, 50, 91, 38, 57, 62, 51, 61, 54, 51, 82, 78, 55, 65, 51, 56, 73, 96, 51, 63, 48, 67, 117, 68, 73, 61, 77, 68, 96, 67, 70, 80, 69, 45, 73, 77, 53, 75, 49, 109, 61, 71, 68, 62, 85, 72, 63, 67, 68, 64, 42, 82, 55, 63, 59, 50, 72, 55, 75, 54, 73, 75, 58, 69, 44, 54, 71, 79, 57, 86, 40, 81, 89, 59, 66, 70, 47, 69, 57, 72, 57, 57, 56, 76, 54, 45, 67, 86, 55, 67, 71, 82, 88, 93, 66, 58, 70, 59, 57, 74, 54, 47, 63, 75, 76, 65, 70, 80, 93, 82, 58, 69, 61, 56, 53, 92, 70, 95, 61, 78, 96, 71, 74, 46, 66, 46, 48, 61, 46, 88, 48, 65, 59, 67, 90, 53, 77, 59, 70, 85, 65, 65, 47, 75, 71, 74, 86, 59, 59, 47, 71, 58, 47, 62, 63, 62, 72, 85, 115, 80, 61, 70, 67, 72, 58, 64, 78, 45, 67, 51, 76, 56, 46, 74, 82, 46, 59, 53, 59, 71, 61, 51, 65, 62, 52, 56, 90, 72, 64, 63, 54, 53, 93, 85, 65, 68, 78, 67, 62, 67, 74, 58, 49, 52, 53, 77, 82, 51, 48, 80, 50, 50, 108, 60, 67, 66, 71, 69, 64, 84, 56, 61, 78, 45, 78, 90, 56, 52, 80, 79, 55, 42, 60, 56, 68, 69, 59, 65, 68, 73, 69, 72, 47, 105, 72, 78, 60, 73, 66, 63, 35, 70, 70, 68, 54, 85, 86, 80, 53, 60, 82, 68, 105, 65, 67, 66, 121, 77, 57, 58, 63, 59, 65, 60, 59, 55, 86, 66, 59, 63, 123, 67, 50, 73, 67, 75, 63, 71, 45, 62, 68, 51, 47, 102, 67, 68, 45, 78, 67, 90, 80, 64, 49, 69, 71, 81, 74, 60, 67, 77, 88, 57, 68, 74, 66, 75, 61, 73, 51, 30, 55, 54, 56, 66, 73, 69, 65, 57, 66, 63, 47, 109, 59, 70, 58, 65, 70, 69, 56, 75, 57, 49, 44, 58, 57, 68, 56, 57, 57, 83, 66, 69, 60, 107, 80, 72, 85, 99, 48, 62, 58, 61, 52, 56, 61, 65, 62, 66, 53, 44, 60, 71, 72, 60, 92, 42, 56, 58, 65, 66, 65, 92, 84, 64, 68, 67, 55, 59, 51, 79, 57, 62, 55, 63, 57, 64, 53, 53, 63, 67, 63, 60, 58, 65, 65, 46, 54, 55, 60, 58, 87, 55, 47, 70, 72, 62, 60, 92, 67, 63, 69, 52, 51, 66, 58, 44, 51, 71, 52, 71, 73, 75, 62, 49, 58, 86, 82, 51, 67, 58, 66, 63, 61, 88, 47, 72, 89, 61, 62, 72, 58, 60, 49, 64, 66, 72, 82, 60, 60, 53, 69, 63, 81, 57, 39, 116, 64, 63, 69, 63, 59, 86, 66, 69, 53, 62, 70, 53, 88, 58, 68, 53, 67, 54, 71, 77, 69, 65, 70, 64, 73, 64, 68, 61, 72, 61, 58, 81, 79, 62, 70, 55, 93, 73, 65, 94, 54, 55, 59, 68, 71, 69, 75, 63, 73, 49, 52, 67, 71, 63, 61, 31, 61, 72, 71, 84, 62, 59, 69, 97, 56, 97, 60, 61, 61, 67, 63, 65, 76, 56, 83, 52, 77, 74, 52, 64, 65, 64, 49, 65, 122, 94, 54, 56, 56, 87, 63, 57, 68, 95, 84, 64, 77, 67, 54, 70, 45, 81, 57, 49, 79, 78, 76, 84, 61, 88, 65, 58, 56, 77, 57, 57, 63, 54, 57, 53, 47, 83, 53, 81, 62, 53, 54, 80, 94, 52, 61, 57, 29, 72, 74, 45, 90, 61, 69, 53, 67, 55, 61, 73, 53, 70, 44, 90, 74, 74, 50, 73, 58, 65, 74, 54, 44, 59, 57, 85, 75, 66, 86, 58, 63, 50, 55, 57, 68, 62, 54, 49, 63, 64, 75, 64, 115, 66, 62, 37, 61, 65, 94, 64, 87, 78, 89, 80, 63, 69, 63, 59, 70, 64, 84, 55, 55, 58, 49, 76, 61, 49, 64, 82, 75, 52, 55, 93, 69, 42, 66, 86, 61, 49, 63, 44, 77, 52, 72, 54, 83, 64, 67, 59, 64, 60, 76, 52, 51, 97, 99, 67, 83, 59, 72, 77, 73, 56, 66, 56, 61, 63, 65, 58, 95, 74, 68, 46, 106, 67, 63, 63, 59, 71, 54, 66, 89, 72, 61, 59, 59, 64, 65, 48, 48, 65, 118, 51, 47, 43, 85, 70, 67, 74, 58, 61, 43, 77, 49, 46, 74, 53, 69, 66, 62, 62, 55, 39, 70, 61, 80, 51, 107, 67, 52, 67, 67, 60, 63, 69, 54, 84, 57, 61, 77, 60, 58, 83, 50, 58, 64, 64, 68, 79, 61, 64, 72, 88, 50, 79, 50, 72, 74, 57, 84, 60, 73, 62, 72, 68, 60, 58, 76, 76, 75, 61, 108, 62, 57, 81, 63, 42, 62, 68, 73, 55, 73, 72, 73, 53, 70, 73, 47, 67, 70, 72, 62, 77, 92, 66, 118, 65, 86, 53, 69, 72, 66, 83, 63, 54, 49, 73, 65, 34, 87, 55, 72, 66, 35, 69, 61, 65, 57, 81, 60, 66, 80, 52, 109, 80, 56, 47, 66, 49, 103, 84, 56, 67, 42, 60, 52, 63, 65, 55, 74, 71, 57, 65, 69, 57, 67, 71, 47, 52, 81, 67, 52, 74, 59, 49, 52, 57, 60, 60, 66, 60, 66, 65, 74, 61, 66, 59, 60, 68, 61, 52, 60, 44, 60, 71, 86, 79, 59, 56, 92, 66, 88, 90, 76, 78, 74, 59, 67, 56, 67, 63, 86, 72, 68, 66, 57, 57, 59, 63, 54, 61, 57, 64, 59, 55, 63, 56, 83, 58, 83, 59, 75, 59, 60, 107, 63, 83, 69, 55, 63, 63, 65, 68, 49, 47, 58, 63, 41, 78, 62, 66, 53, 58, 70, 105, 81, 71, 41, 80, 65, 69, 63, 44, 64, 49, 66, 66, 28, 50, 51, 68, 62, 87, 61, 61, 67, 148, 45, 65, 60, 57, 55, 47, 59, 89, 62, 46, 66, 74, 59, 65, 82, 90, 40, 64, 59, 53, 51, 85, 68, 69, 76, 69, 76, 61, 55, 67, 80, 47, 58, 62, 95, 75, 64, 49, 74, 65, 68, 52, 62, 60, 76, 63, 76, 51, 89, 50, 64, 42, 49, 52, 59, 87, 93, 69, 87, 61, 60, 64, 49, 56, 45, 68, 54, 72, 63, 61, 64, 60, 76, 77, 76, 53, 68, 76, 54, 49, 66, 58, 104, 74, 53, 57, 63, 53, 56, 64, 58, 107, 71, 59, 65, 66, 70, 69, 97, 60, 62, 74, 56, 63, 60, 57, 66, 72, 60, 71, 57, 70, 73, 88, 70, 79, 84, 93, 74, 63, 67, 52, 69, 50, 64, 55, 84, 82, 65, 50, 53, 79, 58, 60, 53, 65, 78, 60, 55, 83, 58, 104, 59, 67, 100, 82, 68, 79, 77, 78, 67, 47, 58, 76, 86, 60, 73, 66, 66, 75, 90, 89, 108, 55, 64, 64, 46, 79, 65, 83, 94, 71, 69, 59, 68, 68, 66, 57, 54, 88, 52, 56, 65, 78, 54, 82, 89, 66, 43, 50, 66, 72, 103, 54, 79, 79, 65, 67, 60, 73, 73, 71, 57, 60, 54, 51, 66, 65, 73, 78, 52, 41, 70, 66, 54, 72, 52, 48, 86, 70, 67, 73, 77, 62, 101, 59, 67, 53, 59, 41, 99, 63, 56, 66, 77, 90, 79, 58, 59, 71, 47, 82, 63, 50, 56, 76, 76, 66, 67, 83, 67, 76, 68, 81, 76, 65, 61, 56, 62, 52, 74, 89, 76, 80, 41, 62, 55, 61, 56, 56, 111, 61, 69, 70, 76, 58, 68, 73, 49, 101, 45, 67, 59, 63, 54, 63, 76, 78, 70, 63, 50, 86, 94, 49, 71, 59, 45, 80, 61, 54, 48, 50, 67, 61, 68, 65, 28, 89, 86, 67, 63, 86, 62, 74, 68, 65, 69, 70, 47, 63, 58, 60, 75, 62, 74, 59, 45, 54, 57, 73, 66, 54, 74, 62, 45, 56, 78, 63, 59, 59, 45, 50, 74, 64, 66, 59, 58, 48, 77, 80, 80, 74, 74, 68, 86, 75, 85, 70, 80, 67, 58, 89, 58, 66, 77, 70, 60, 45, 74, 90, 71, 81, 77, 58, 80, 56, 64, 65, 69, 90, 91, 67, 68, 72, 70, 60, 52, 61, 69, 100, 51, 80, 62, 61, 65, 61, 53, 79, 60, 51, 63, 52, 52, 36, 55, 70, 83, 57, 67, 63, 55, 99, 76, 71, 65, 59, 65, 49, 62, 66, 70, 81, 62, 90, 59, 85, 110, 65, 64, 76, 80, 68, 72, 59, 77, 74, 65, 107, 73, 84, 71, 70, 41, 63, 62, 86, 59, 63, 73, 56, 61, 57, 90, 66, 62, 60, 62, 57, 51, 63, 58, 63, 57, 73, 44, 129, 71, 62, 56, 60, 53, 48, 83, 79, 82, 75, 59, 59, 64, 63, 51, 74, 61, 52, 64, 77, 60, 56, 51, 50, 66, 60, 54, 59, 39, 57, 64, 67, 54, 36, 75, 86, 60, 69, 72, 73, 56, 72, 68, 59, 72, 67, 63, 74, 66, 62, 71, 82, 59, 82, 77, 84, 65, 57, 78, 31, 57, 67, 64, 58, 62, 56, 80, 65, 51, 65, 68, 50, 75, 65, 66, 67, 65, 60, 63, 63, 112, 57, 69, 96, 65, 35, 74, 50, 51, 58, 60, 55, 70, 85, 66, 67, 62, 64, 61, 61, 60, 60, 57, 55, 64, 50, 60, 73, 64, 76, 53, 66, 63, 59, 65, 74, 59, 52, 62, 57, 34, 69, 61, 59, 55, 76, 65, 60, 81, 71, 59, 59, 58, 84, 64, 71, 37, 63, 95, 72, 60, 87, 85, 72, 71, 70, 90, 60, 65, 48, 75, 57, 58, 55, 60, 86, 63, 61, 54, 52, 71, 92, 70, 65, 69, 55, 59, 67, 51, 64, 71, 89, 66, 58, 72, 51, 61, 93, 51, 61, 94, 78, 51, 57, 65, 65, 56, 55, 61, 71, 48, 50, 61, 55, 78, 47, 66, 73, 76, 66, 60, 60, 88, 92, 66, 38, 59, 63, 56, 63, 75, 67, 72, 79, 57, 39, 43, 61, 122, 75, 51, 70, 48, 88, 58, 71, 69, 58, 67, 55, 58, 55, 58, 60, 62, 81, 85, 70, 72, 57, 84, 62, 46, 88, 72, 57, 58, 132, 57, 63, 62, 62, 74, 80, 53, 63, 76, 80, 69, 59, 70, 53, 63, 81, 60, 79, 78, 78, 51, 64, 80, 68, 62, 64, 67, 87, 74, 89, 97, 68, 55, 61, 84, 59, 67, 64, 66, 60, 60, 63, 62, 56, 74, 67, 59, 65, 62, 66, 99, 62, 72, 50, 65, 110, 70, 40, 68, 63, 60, 62, 90, 56, 68, 68, 62, 44, 64, 57, 61, 54, 53, 75, 77, 87, 84, 93, 62, 62, 94, 71, 57, 61, 77, 50, 57, 81, 71, 98, 46, 83, 55, 50, 69, 73, 110, 65, 54, 74, 72, 54, 55, 58, 80, 51, 55, 57, 66, 55, 78, 77, 87, 59, 65, 67, 61, 82, 56, 99, 69, 66, 63, 48, 111, 52, 52, 57, 96, 62, 88, 72, 61, 77, 73, 65, 67, 59, 59, 68, 50, 89, 76, 59, 71, 69, 58, 50, 46, 56, 55, 59, 63, 57, 86, 60, 60, 65, 86, 76, 49, 87, 53, 57, 63, 50, 46, 59, 55, 61, 67, 73, 55, 55, 75, 66, 106, 49, 55, 60, 88, 66, 64, 73, 61, 66, 75, 82, 80, 61, 53, 65, 73, 109, 51, 62, 55, 56, 50, 72, 61, 65, 65, 48, 76, 55, 58, 55, 61, 63, 63, 52, 66, 72, 64, 60, 75, 74, 85, 50, 56, 70, 78, 53, 61, 58, 62, 100, 80, 81, 69, 66, 67, 63, 43, 87, 57, 54, 70, 81, 55, 81, 76, 62, 71, 64, 67, 69, 64, 85, 71, 54, 75, 86, 71, 72, 76, 116, 63, 65, 73, 83, 80, 77, 49, 61, 68, 65, 57, 73, 58, 67, 67, 82, 55, 57, 59, 73, 68, 63, 67, 58, 104, 67, 61, 52, 54, 108, 65, 72, 69, 100, 77, 57, 49, 69, 86, 73, 62, 66, 60, 57, 47, 64, 79, 69, 59, 62, 65, 73, 54, 58, 64, 64, 62, 70, 43, 70, 60, 58, 62, 53, 65, 63, 63, 93, 75, 59, 65, 114, 58, 53, 67, 53, 72, 58, 54, 60, 54, 48, 73, 66, 67, 66, 59, 81, 64, 58, 71, 56, 45, 69, 62, 96, 65, 46, 59, 66, 110, 68, 58, 68, 68, 50, 57, 59, 67, 61, 47, 63, 62, 57, 54, 51, 98, 69, 55, 62, 67, 78, 48, 51, 57, 66, 58, 46, 56, 61, 70, 62, 97, 84, 100, 65, 52, 65, 53, 56, 61, 75, 54, 100, 60, 57, 60, 64, 60, 67, 66, 59, 61, 71, 77, 78, 64, 66, 57, 64, 101, 82, 54, 87, 71, 60, 65, 73, 70, 65, 57, 64, 76, 78, 83, 56, 61, 75, 67, 61, 58, 47, 57, 63, 54, 52, 95, 63, 82, 47, 65, 70, 60, 84, 64, 52, 69, 63, 60, 71, 83, 48, 38, 72, 59, 57, 62, 64, 54, 53, 65, 45, 79, 77, 98, 84, 75, 70, 39, 71, 65, 58, 94, 68, 62, 53, 66, 89, 61, 67, 61, 57, 55, 47, 48, 68, 57, 55, 74, 70, 62, 47, 58, 67, 108, 65, 59, 92, 51, 65, 72, 60, 99, 92, 53, 63, 63, 73, 63, 61, 63, 94, 59, 75, 60, 68, 52, 72, 70, 50, 62, 67, 55, 68, 59, 54, 81, 80, 90, 68, 64, 73, 97, 57, 58, 65, 77, 55, 65, 62, 76, 50, 64, 106, 68, 82, 69, 89, 52, 59, 64, 78, 65, 56, 53, 119, 48, 62, 57, 62, 59, 56, 81, 53, 64, 56, 52, 68, 56, 49, 55, 54, 66, 70, 58, 56, 75, 51, 69, 60, 74, 74, 53, 98, 73, 74, 63, 50, 58, 68, 83, 58, 72, 93, 70, 68, 56, 44, 62, 43, 62, 88, 59, 71, 51, 60, 66, 75, 74, 52, 81, 61, 58, 83, 51, 55, 64, 62, 52, 58, 70, 53, 64, 62, 61, 56, 71, 86, 63, 55, 39, 73, 57, 56, 77, 67, 71, 86, 60, 63, 79, 42, 70, 53, 68, 62, 59, 49, 54, 59, 65, 66, 95, 83, 72, 61, 49, 103, 75, 68, 59, 65, 60, 65, 57, 44, 86, 73, 67, 45, 51, 61, 66, 59, 66, 59, 67, 75, 61, 56, 80, 68, 69, 58, 73, 74, 67, 81, 68, 55, 68, 64, 56, 71, 54, 91, 63, 68, 61, 65, 53, 78, 52, 37, 51, 82, 74, 53, 65, 61, 56, 56, 60, 78, 67, 95, 47, 55, 63, 66, 65, 48, 58, 57, 60, 87, 55, 123, 64, 57, 64, 54, 63, 113, 61, 57, 69, 73, 64, 28, 58, 69, 70, 93, 67, 60, 62, 52, 61, 63, 117, 74, 61, 71, 85, 67, 63, 65, 69, 68, 77, 63, 64, 71, 75, 69, 65, 73, 71, 68, 63, 67, 80, 63, 60, 75, 66, 76, 73, 76, 64, 71, 72, 68, 61, 83, 63, 58, 68, 51, 63, 56, 58, 60, 57, 59, 46, 58, 59, 79, 66, 107, 77, 58, 57, 84, 72, 48, 47, 79, 52, 63, 78, 50, 75, 67, 51, 61, 64, 75, 77, 82, 50, 82, 66, 52, 52, 80, 71, 51, 50, 60, 83, 69, 48, 45, 53, 53, 85, 63, 84, 67, 54, 39, 57, 61, 81, 66, 112, 77, 67, 98, 53, 67, 83, 71, 75, 78, 69, 42, 62, 94, 58, 63, 65, 61, 48, 84, 62, 54, 61, 49, 74, 66, 59, 67, 69, 66, 42, 65, 63, 55, 61, 63, 53, 66, 63, 48, 42, 86, 57, 70, 75, 76, 80, 55, 53, 62, 59, 50, 56, 76, 65, 80, 61, 70, 77, 80, 84, 85, 69, 54, 66, 69, 91, 54, 83, 68, 55, 52, 51, 75, 59, 87, 55, 55, 61, 65, 76, 97, 66, 45, 83, 62, 59, 56, 67, 78, 83, 75, 77, 66, 42, 53, 59, 66, 59, 71, 69, 79, 63, 58, 60, 51, 64, 61, 66, 62, 83, 69, 65, 68, 64, 69, 64, 65, 76, 73, 65, 58, 69, 79, 70, 78, 78, 44, 53, 59, 45, 68, 92, 120, 66, 72, 75, 48, 90, 67, 57, 58, 52, 54, 77, 65, 66, 67, 63, 54, 79, 64, 59, 61, 41, 80, 63, 71, 76, 65, 57, 75, 55, 85, 57, 71, 55, 96, 61, 59, 70, 60, 73, 82, 55, 74, 68, 48, 89, 77, 62, 65, 59, 82, 65, 57, 72, 57, 65, 57, 52, 70, 86, 69, 81, 52, 54, 88, 73, 70, 59, 68, 74, 73, 65, 58, 64, 69, 65, 71, 72, 67, 49, 54, 67, 42, 50, 74, 61, 65, 62, 58, 82, 87, 61, 79, 44, 73, 68, 58, 47, 50, 75, 63, 55, 47, 68, 54, 54, 46, 55, 81, 68, 54, 75, 58, 55, 61, 75, 73, 81, 86, 63, 56, 65, 53, 98, 108, 69, 67, 58, 51, 78, 80, 111, 59, 86, 66, 66, 53, 42, 69, 68, 66, 73, 52, 45, 62, 69, 62, 82, 77, 62, 68, 68, 70, 45, 61, 74, 46, 62, 62, 88, 71, 52, 55, 84, 73, 51, 65, 59, 58, 72, 70, 75, 47, 68, 61, 56, 57, 66, 60, 72, 61, 52, 58, 50, 66, 72, 55, 57, 87, 98, 53, 77, 80, 71, 71, 51, 126, 47, 63, 61, 63, 82, 58, 68, 71, 57, 73, 63, 47, 70, 86, 64, 81, 73, 47, 82, 60, 64, 61, 52, 57, 73, 78, 78, 55, 108, 64, 60, 72, 52, 69, 64, 67, 83, 58, 64, 68, 38, 67, 69, 46, 59, 52, 62, 72, 59, 61, 63, 75, 66, 82, 59, 80, 94, 74, 80, 64, 75, 100, 59, 48, 45, 45, 57, 52, 74, 58, 79, 55, 111, 61, 67, 89, 63, 93, 65, 58, 97, 64, 39, 80, 55, 71, 85, 96, 77, 54, 70, 67, 50, 54, 48, 71, 47, 65, 67, 83, 97, 64, 53, 53, 58, 62, 79, 60, 68, 68, 40, 88, 64, 74, 63, 80, 52, 66, 60, 50, 78, 82, 74, 76, 83, 109, 56, 44, 55, 100, 83, 66, 66, 70, 56, 62, 68, 76, 60, 59, 60, 67, 80, 52, 65, 62, 44, 52, 76, 56, 49, 70, 50, 64, 74, 63, 68, 74, 69, 65, 65, 61, 72, 83, 57, 45, 66, 112, 57, 74, 62, 60, 81, 60, 64, 50, 62, 59, 80, 72, 51, 89, 50, 51, 90, 60, 65, 59, 55, 67, 78, 49, 55, 53, 43, 62, 97, 66, 55, 60, 79, 56, 65, 53, 75, 59, 83, 63, 54, 62, 66, 67, 99, 99, 57, 69, 66, 72, 55, 73, 72, 70, 60, 63, 63, 66, 52, 47, 71, 84, 72, 63, 61, 54, 56, 81, 63, 70, 78, 80, 79, 59, 62, 44, 58, 51, 71, 57, 58, 58, 51, 74, 96, 87, 87, 51, 59, 64, 59, 59, 91, 83, 43, 58, 64, 61, 57, 74, 77, 100, 58, 52, 66, 67, 65, 50, 59, 54, 59, 53, 79, 56, 94, 56, 55, 74, 69, 65, 70, 59, 68, 65, 53, 50, 66, 94, 85, 60, 60, 60, 71, 54, 65, 58, 59, 63, 58, 38, 79, 92, 87, 53, 77, 47, 78, 65, 62, 60, 80, 73, 72, 67, 75, 81, 57, 68, 66, 62, 82, 66, 44, 56, 69, 67, 77, 76, 81, 48, 59, 69, 70, 65, 62, 63, 65, 70, 58, 56, 45, 71, 61, 62, 79, 75, 51, 69, 58, 95, 54, 88, 83, 83, 56, 55, 68, 70, 75, 55, 53, 62, 55, 55, 59, 97, 74, 84, 73, 85, 82, 81, 57, 61, 52, 68, 56, 65, 62, 53, 55, 115, 62, 81, 57, 79, 54, 100, 74, 68, 47, 57, 72, 70, 76, 93, 60, 80, 68, 42, 67, 57, 53, 68, 38, 83, 57, 68, 70, 39, 47, 60, 63, 66, 64, 60, 60, 69, 46, 64, 50, 69, 95, 96, 62, 71, 66, 58, 48, 67, 67, 77, 55, 66, 52, 72, 54, 60, 66, 85, 80, 76, 57, 55, 67, 76, 63, 68, 85, 68, 68, 93, 68, 79, 62, 95, 42, 72, 56, 73, 73, 85, 81, 73, 67, 83, 65, 67, 47, 54, 62, 55, 90, 49, 63, 68, 68, 66, 76, 66, 90, 62, 51, 73, 66, 61, 66, 71, 60, 62, 83, 94, 69, 61, 89, 55, 75, 72, 52, 82, 67, 32, 51, 71, 75, 55, 58, 87, 56, 76, 67, 66, 50, 66, 72, 86, 84, 62, 72, 55, 48, 55, 65, 46, 74, 60, 67, 70, 37, 90, 69, 79, 62, 74, 74, 72, 51, 61, 61, 55, 65, 52, 67, 47, 60, 53, 62, 60, 65, 64, 60, 75, 54, 54, 53, 58, 65, 94, 61, 64, 65, 71, 83, 50, 65, 98, 62, 67, 43, 60, 45, 82, 58, 60, 83, 71, 58, 95, 56, 111, 72, 73, 64, 52, 76, 51, 66, 35, 65, 70, 86, 71, 78, 67, 68, 59, 68, 50, 71, 51, 65, 66, 64, 63, 71, 90, 66, 75, 60, 76, 52, 49, 64, 56, 57, 63, 80, 56, 85, 70, 60, 50, 61, 49, 56, 52, 78, 73, 42, 57, 67, 65, 85, 70, 45, 68, 69, 68, 57, 73, 63, 86, 59, 59, 59, 50, 61, 69, 74, 69, 59, 65, 69, 65, 62, 64, 58, 66, 87, 56, 77, 83, 54, 59, 63, 57, 75, 50, 66, 64, 71, 59, 90, 69, 96, 50, 53, 58, 68, 79, 64, 59, 96, 56, 69, 62, 71, 70, 80, 69, 74, 63, 94, 77, 65, 58, 42, 103, 57, 62, 52, 54, 55, 66, 70, 70, 63, 78, 80, 70, 61, 74, 74, 76, 64, 85, 74, 48, 69, 51, 63, 74, 61, 87, 71, 61, 67, 67, 64, 64, 71, 80, 73, 59, 68, 81, 62, 73, 84, 67, 52, 66, 73, 49, 64, 82, 70, 97, 67, 69, 46, 50, 98, 70, 60, 71, 57, 63, 80, 67, 62, 73, 43, 64, 69, 56, 70, 88, 71, 53, 81, 60, 58, 62, 59, 56, 94, 68, 64, 59, 42, 54, 57, 67, 57, 74, 91, 87, 65, 67, 71, 74, 81, 67, 59, 64, 70, 62, 65, 78, 73, 46, 56, 73, 83, 59, 55, 63, 69, 53, 61, 49, 57, 58, 62, 78, 76, 71, 80, 83, 42, 69, 48, 50, 89, 101, 61, 47, 47, 57, 49, 62, 54, 58, 60, 53, 80, 46, 69, 59, 53, 85, 53, 62, 58, 70, 56, 58, 45, 98, 78, 74, 78, 37, 57, 70, 79, 55, 70, 82, 64, 61, 60, 69, 72, 81, 61, 53, 88, 70, 69, 64, 57, 64, 77, 65, 58, 59, 58, 56, 61, 70, 45, 82, 90, 61, 62, 64, 103, 70, 58, 61, 66, 31, 67, 70, 76, 45, 56, 77, 93, 50, 63, 55, 65, 78, 57, 70, 91, 48, 61, 36, 79, 74, 67, 63, 59, 82, 77, 61, 55, 57, 56, 58, 56, 72, 56, 64, 57, 67, 80, 72, 60, 61, 81, 95, 65, 55, 64, 71, 55, 57, 92, 80, 57, 70, 80, 50, 65, 60, 108, 87, 61, 66, 58, 56, 51, 64, 63, 81, 78, 60, 47, 58, 57, 67, 51, 73, 59, 54, 62, 68, 52, 54, 57, 74, 64, 77, 106, 59, 56, 99, 67, 77, 48, 57, 69, 54, 63, 69, 82, 96, 50, 56, 55, 75, 56, 90, 64, 69, 65, 60, 62, 76, 67, 72, 71, 64, 89, 53, 71, 55, 68, 55, 73, 39, 63, 64, 53, 97, 80, 63, 84, 70, 67, 48, 58, 63, 65, 82, 45, 50, 47, 61, 73, 52, 66, 75, 69, 63, 71, 57, 103, 62, 56, 66, 44, 66, 63, 70, 73, 63, 34, 61, 71, 63, 76, 71, 54, 63, 95, 65, 69, 56, 59, 66, 64, 56, 49, 64, 60, 80, 59, 60, 64, 61, 81, 74, 64, 65, 69, 63, 73, 98, 52, 45, 72, 66, 44, 50, 64, 90, 58, 59, 77, 75, 63, 71, 79, 67, 59, 59, 69, 64, 45, 78, 56, 71, 61, 91, 61, 61, 62, 57, 52, 69, 91, 56, 55, 47, 43, 54, 54, 63, 62, 62, 65, 73, 43, 60, 78, 70, 44, 70, 54, 51, 46, 73, 66, 78, 63, 122, 64, 49, 49, 47, 60, 65, 65, 68, 74, 94, 59, 77, 42, 57, 58, 75, 67, 80, 63, 70, 60, 51, 97, 61, 62, 53, 58, 68, 71, 59, 73, 54, 72, 68, 51, 82, 51, 68, 61, 53, 56, 119, 56, 68, 56, 87, 66, 75, 54, 64, 48, 62, 81, 64, 61, 72, 51, 55, 68, 78, 69, 65, 59, 52, 60, 66, 71, 63, 50, 64, 101, 65, 53, 61, 74, 80, 54, 53, 73, 70, 73, 72, 81, 52, 48, 41, 97, 48, 45, 51, 73, 81, 75, 59, 59, 65, 78, 50, 61, 85, 52, 69, 85, 79, 89, 72, 66, 90, 71, 73, 64, 91, 54, 74, 66, 67, 68, 56, 50, 97, 69, 64, 72, 43, 41, 56, 62, 64, 55, 65, 84, 67, 39, 69, 57, 69, 57, 71, 54, 54, 58, 68, 58, 116, 61, 63, 49, 63, 67, 75, 64, 54, 85, 61, 74, 68, 69, 58, 71, 57, 54, 67, 61, 66, 60, 66, 69, 57, 75, 81, 51, 82, 85, 67, 63, 69, 65, 86, 79, 70, 58, 68, 62, 53, 81, 68, 53, 53, 111, 72, 61, 74, 70, 77, 50, 55, 64, 83, 72, 79, 52, 63, 59, 55, 65, 74, 61, 59, 92, 58, 84, 59, 77, 59, 64, 41, 67, 58, 60, 87, 54, 41, 79, 88, 74, 57, 58, 57, 58, 63, 63, 47, 73, 102, 45, 57, 57, 52, 65, 80, 58, 76, 56, 72, 81, 56, 102, 68, 88, 59, 60, 93, 94, 73, 61, 73, 50, 65, 60, 55, 66, 78, 60, 93, 50, 71, 70, 56, 57, 54, 73, 60, 74, 79, 77, 99, 60, 68, 53, 58, 81, 51, 78, 66, 61, 84, 75, 65, 55, 67, 67, 50, 56, 55, 69, 56, 64, 63, 80, 63, 66, 33, 46, 57, 72, 124, 70, 68, 71, 61, 58, 56, 48, 85, 54, 65, 71, 69, 79, 53, 65, 65, 85, 66, 64, 64, 43, 48, 109, 94, 66, 61, 79, 96, 88, 70, 69, 73, 60, 58, 68, 55, 57, 70, 65, 60, 64, 70, 72, 59, 74, 72, 63, 70, 62, 75, 69, 64, 94, 76, 65, 62, 67, 74, 62, 53, 57, 58, 79, 64, 50, 61, 53, 110, 49, 54, 65, 53, 100, 63, 67, 54, 70, 61, 62, 54, 67, 53, 62, 79, 68, 68, 50, 57, 79, 59, 72, 49, 76, 57, 49, 53, 60, 70, 68, 70, 76, 56, 68, 94, 56, 103, 68, 52, 61, 70, 77, 68, 47, 78, 68, 69, 65, 87, 64, 59, 58, 55, 57, 47, 77, 74, 60, 70, 54, 47, 76, 61, 68, 80, 84, 57, 69, 47, 64, 70, 59, 127, 84, 67, 61, 68, 73, 47, 60, 71, 76, 62, 59, 52, 70, 75, 60, 53, 68, 69, 79, 69, 58, 71, 41, 60, 94, 75, 56, 62, 78, 69, 60, 55, 69, 92, 74, 54, 64, 69, 46, 63, 66, 75, 57, 65, 60, 73, 56, 60, 76, 61, 51, 76, 43, 93, 59, 57, 68, 84, 69, 63, 74, 63, 75, 70, 84, 69, 71, 67, 36, 64, 65, 79, 66, 62, 74, 82, 64, 62, 63, 64, 52, 80, 75, 67, 77, 68, 79, 79, 40, 58, 61, 67, 79, 77, 63, 76, 60, 52, 76, 65, 65, 67, 65, 59, 86, 68, 45, 63, 92, 66, 66, 68, 79, 64, 55, 66, 83, 58, 86, 81, 96, 97, 76, 77, 71, 59, 43, 71, 58, 76, 78, 116, 55, 57, 58, 72, 65, 71, 75, 65, 50, 85, 85, 67, 65, 70, 72, 82, 71, 57, 68, 57, 98, 75, 66, 83, 73, 49, 68, 57, 66, 65, 68, 61, 66, 83, 84, 62, 52, 86, 55, 56, 67, 63, 68, 83, 79, 67, 64, 75, 78, 76, 53, 70, 53, 47, 101, 72, 61, 74, 74, 105, 67, 58, 66, 98, 59, 64, 72, 56, 59, 91, 103, 60, 91, 73, 82, 62, 80, 69, 58, 88, 70, 56, 52, 65, 66, 57, 74, 41, 75, 31, 57, 75, 76, 60, 67, 54, 74, 55, 69, 64, 51, 70, 79, 63, 63, 72, 79, 81, 62, 51, 53, 61, 65, 64, 59, 52, 54, 62, 53, 62, 44, 65, 58, 83, 62, 65, 87, 55, 64, 69, 45, 63, 66, 69, 65, 59, 72, 71, 70, 65, 63, 50, 66, 66, 70, 64, 67, 72, 62, 89, 57, 101, 75, 95, 48, 50, 74, 62, 52, 56, 48, 72, 65, 60, 75, 54, 73, 79, 85, 55, 70, 63, 65, 76, 70, 59, 47, 61, 86, 55, 63, 56, 67, 52, 57, 90, 46, 51, 69, 58, 68, 65, 65, 98, 78, 62, 70, 51, 80, 60, 66, 68, 60, 95, 64, 61, 63, 50, 72, 59, 99, 61, 56, 77, 46, 75, 84, 63, 61, 48, 58, 66, 74, 59, 48, 63, 91, 70, 61, 64, 41, 88, 75, 79, 70, 68, 62, 63, 64, 80, 51, 63, 60, 64, 96, 67, 71, 71, 55, 60, 48, 62, 98, 64, 60, 65, 74, 57, 62, 59, 82, 97, 87, 72, 59, 57, 63, 52, 36, 69, 64, 61, 61, 91, 56, 55, 75, 60, 62, 47, 44, 67, 71, 94, 56, 61, 61, 59, 68, 60, 59, 58, 106, 69, 64, 52, 79, 59, 65, 58, 71, 58, 72, 54, 68, 74, 66, 59, 60, 72, 100, 44, 67, 70, 56, 63, 50, 56, 53, 56, 53, 58, 47, 59, 63, 53, 66, 70, 50, 61, 59, 90, 81, 68, 69, 71, 59, 71, 58, 55, 71, 60, 72, 66, 83, 53, 47, 49, 61, 65, 82, 62, 81, 65, 65, 67, 77, 50, 48, 63, 63, 67, 79, 82, 51, 64, 63, 54, 89, 74, 72, 54, 84, 63, 48, 52, 51, 53, 85, 74, 64, 42, 62, 56, 69, 99, 105, 60, 56, 61, 56, 72, 76, 74, 71, 78, 72, 64, 78, 72, 71, 79, 62, 90, 70, 62, 71, 65, 72, 48, 77, 56, 55, 36, 46, 54, 59, 69, 74, 60, 64, 49, 62, 59, 55, 65, 58, 64, 57, 62, 45, 62, 59, 63, 71, 65, 59, 67, 81, 46, 51, 62, 46, 96, 70, 77, 65, 58, 48, 60, 50, 53, 66, 99, 52, 77, 67, 62, 63, 68, 65, 59, 58, 55, 69, 78, 92, 47, 55, 59, 65, 48, 65, 58, 64, 52, 61, 38, 74, 54, 72, 72, 60, 64, 66, 120, 63, 68, 70, 79, 61, 65, 68, 64, 66, 74, 55, 59, 57, 60, 61, 58, 82, 78, 84, 47, 64, 67, 74, 88, 58, 63, 78, 57, 89, 72, 66, 67, 91, 55, 54, 51, 93, 62, 73, 70, 62, 57, 63, 48, 93, 68, 65, 66, 74, 69, 63, 75, 101, 63, 82, 38, 61, 63, 75, 67, 62, 73, 91, 57, 63, 90, 55, 65, 52, 54, 61, 82, 60, 101, 57, 74, 63, 83, 60, 66, 69, 81, 52, 88, 78, 56, 57, 64, 54, 66, 38, 78, 79, 68, 80, 52, 57, 70, 52, 83, 60, 117, 56, 63, 74, 44, 60, 53, 60, 56, 73, 67, 65, 77, 61, 59, 72, 57, 62, 78, 83, 47, 70, 67, 62, 59, 63, 85, 50, 52, 69, 64, 59, 51, 80, 95, 72, 49, 60, 64, 56, 57, 60, 82, 57, 96, 100, 60, 50, 52, 83, 81, 66, 62, 80, 61, 53, 86, 68, 58, 56, 62, 70, 63, 74, 48, 56, 57, 57, 58, 56, 55, 55, 81, 46, 71, 57, 62, 62, 49, 61, 75, 48, 61, 68, 67, 67, 63, 64, 43, 64, 95, 71, 72, 59, 61, 60, 63, 62, 69, 62, 70, 80, 60, 55, 66, 70, 63, 60, 68, 66, 67, 64, 53, 68, 59, 62, 100, 62, 69, 57, 66, 65, 51, 50, 50, 80, 69, 71, 73, 70, 67, 69, 128, 71, 63, 62, 53, 59, 76, 84, 58, 55, 77, 78, 67, 63, 77, 45, 57, 70, 50, 57, 70, 47, 54, 82, 49, 67, 65, 79, 54, 55, 100, 75, 80, 67, 73, 53, 69, 67, 47, 59, 88, 81, 74, 83, 54, 65, 59, 55, 68, 63, 59, 78, 55, 54, 87, 40, 61, 54, 76, 75, 58, 81, 65, 56, 90, 58, 62, 68, 60, 59, 74, 82, 67, 60, 48, 64, 90, 64, 54, 54, 84, 53, 71, 93, 61, 51, 66, 64, 78, 69, 77, 78, 76, 53, 62, 72, 74, 62, 78, 82, 64, 59, 66, 80, 59, 66, 53, 48, 61, 84, 66, 75, 70, 76, 116, 85, 55, 67, 64, 96, 70, 69, 62, 64, 65, 69, 62, 68, 54, 58, 54, 88, 54, 63, 67, 51, 85, 53, 69, 62, 65, 67, 65, 97, 74, 40, 61, 80, 68, 65, 59, 66, 60, 69, 71, 87, 68, 62, 62, 67, 71, 48, 53, 64, 71, 98, 62, 67, 62, 61, 63, 53, 113, 51, 73, 66, 80, 82, 76, 70, 60, 78, 64, 54, 56, 74, 65, 76, 65, 65, 57, 65, 58, 75, 56, 51, 61, 99, 59, 44, 61, 50, 54, 63, 61, 57, 84, 53, 64, 63, 55, 73, 76, 54, 41, 56, 63, 65, 62, 56, 59, 54, 56, 65, 75, 64, 104, 59, 61, 75, 72, 63, 55, 59, 69, 84, 68, 71, 68, 60, 62, 60, 58, 49, 58, 64, 50, 58, 60, 62, 59, 60, 65, 60, 68, 71, 59, 67, 60, 66, 58, 84, 78, 55, 59, 64, 86, 63, 57, 51, 66, 92, 66, 67, 58, 57, 66, 84, 64, 43, 62, 80, 58, 64, 75, 86, 60, 60, 71, 77, 76, 67, 61, 79, 71, 96, 90, 57, 52, 85, 51, 62, 72, 67, 66, 63, 87, 67, 67, 64, 64, 75, 68, 51, 64, 70, 65, 70, 80, 52, 86, 65, 76, 74, 68, 63, 76, 65, 68, 60, 66, 54, 78, 69, 67, 65, 69, 53, 79, 70, 71, 77, 71, 68, 67, 66, 68, 60, 63, 90, 65, 66, 70, 79, 41, 89, 46, 43, 70, 95, 65, 62, 67, 79, 66, 55, 56, 59, 59, 72, 67, 55, 99, 63, 61, 97, 57, 83, 60, 60, 64, 69, 65, 59, 44, 73, 77, 69, 70, 75, 68, 78, 59, 75, 44, 52, 82, 67, 61, 71, 52, 66, 69, 77, 61, 62, 50, 68, 46, 57, 53, 63, 78, 84, 38, 81, 51, 64, 64, 76, 65, 38, 69, 69, 59, 61, 57, 64, 43, 71, 68, 95, 50, 63, 56, 72, 56, 51, 63, 74, 59, 61, 67, 66, 70, 66, 44, 57, 64, 62, 70, 61, 85, 62, 94, 81, 104, 66, 56, 72, 64, 58, 69, 60, 49, 62, 66, 62, 61, 71, 84, 58, 68, 44, 66, 79, 62, 69, 53, 73, 57, 46, 75, 65, 68, 59, 89, 64, 75, 69, 95, 55, 53, 52, 66, 68, 99, 73, 73, 63, 69, 73, 74, 66, 54, 61, 83, 54, 97, 71, 62, 64, 54, 84, 54, 98, 62, 51, 90, 46, 64, 59, 74, 77, 57, 58, 68, 57, 68, 70, 55, 60, 88, 64, 73, 41, 67, 72, 83, 57, 65, 54, 66, 72, 76, 64, 68, 52, 54, 53, 79, 68, 63, 45, 78, 66, 61, 75, 84, 63, 73, 56, 62, 55, 61, 52, 74, 68, 73, 53, 59, 94, 65, 77, 64, 65, 57, 72, 85, 71, 48, 76, 74, 70, 62, 42, 54, 63, 67, 65, 68, 57, 53, 47, 52, 55, 63, 90, 58, 67, 55, 76, 77, 68, 58, 76, 80, 61, 57, 63, 61, 66, 59, 53, 60, 84, 53, 53, 46, 68, 60, 37, 75, 71, 62, 73, 88, 70, 64, 73, 58, 54, 52, 65, 52, 43, 75, 71, 64, 74, 74, 57, 60, 55, 52, 58, 67, 36, 59, 64, 63, 66, 54, 68, 63, 52, 71, 58, 48, 59, 47, 96, 52, 67, 60, 74, 77, 81, 68, 57, 71, 66, 67, 61, 67, 65, 66, 89, 65, 74, 53, 79, 55, 53, 59, 70, 57, 71, 64, 64, 82, 66, 61, 56, 56, 103, 63, 59, 63, 63, 74, 75, 56, 92, 56, 71, 71, 60, 70, 51, 55, 70, 61, 78, 60, 57, 57, 66, 112, 57, 74, 58, 74, 103, 62, 73, 66, 65, 81, 60, 62, 54, 63, 68, 63, 67, 74, 86, 51, 56, 89, 62, 72, 81, 64, 71, 71, 56, 111, 56, 77, 90, 66, 77, 60, 97, 60, 74, 73, 58, 85, 52, 49, 58, 76, 49, 72, 68, 57, 60, 38, 53, 70, 62, 55, 49, 54, 61, 71, 70, 62, 89, 87, 63, 55, 86, 69, 68, 70, 68, 86, 56, 51, 65, 110, 48, 59, 56, 45, 60, 64, 52, 49, 67, 87, 60, 82, 54, 61, 57, 65, 51, 55, 49, 55, 92, 52, 82, 98, 55, 53, 65, 66, 67, 73, 64, 59, 75, 69, 47, 48, 67, 57, 49, 61, 58, 84, 78, 57, 60, 59, 77, 56, 70, 69, 95, 55, 77, 51, 53, 71, 54, 87, 70, 62, 60, 62, 64, 69, 74, 57, 72, 66, 65, 83, 72, 63, 78, 63, 55, 59, 52, 59, 56, 99, 43, 83, 65, 86, 97, 56, 34, 80, 73, 68, 79, 68, 68, 66, 58, 67, 71, 117, 64, 63, 76, 53, 58, 76, 56, 60, 67, 62, 59, 87, 70, 68, 61, 66, 58, 90, 107, 70, 51, 63, 66, 52, 45, 86, 91, 56, 62, 54, 66, 71, 59, 86, 66, 84, 68, 54, 55, 87, 39, 95, 56, 111, 73, 61, 64, 57, 80, 48, 62, 89, 57, 55, 70, 81, 54, 81, 72, 73, 82, 61, 57, 88, 68, 85, 63, 64, 68, 64, 54, 61, 58, 70, 70, 48, 60, 49, 65, 59, 65, 61, 61, 54, 83, 63, 73, 64, 70, 108, 73, 65, 72, 69, 65, 64, 74, 74, 49, 70, 51, 57, 47, 70, 81, 63, 71, 38, 66, 63, 72, 68, 57, 78, 52, 89, 84, 59, 67, 70, 76, 94, 60, 68, 75, 78, 66, 65, 60, 58, 63, 107, 79, 69, 65, 54, 66, 64, 60, 54, 62, 73, 91, 56, 60, 102, 54, 56, 55, 68, 71, 57, 86, 60, 65, 59, 66, 47, 84, 65, 57, 76, 54, 66, 91, 67, 67, 68, 79, 61, 68, 69, 57, 67, 71, 55, 67, 70, 62, 63, 72, 70, 81, 69, 70, 53, 86, 69, 63, 73, 62, 80, 67, 50, 53, 82, 65, 58, 45, 57, 71, 51, 65, 65, 75, 51, 67, 109, 56, 56, 54, 57, 55, 57, 59, 71, 52, 64, 49, 69, 55, 65, 65, 78, 70, 85, 72, 78, 66, 57, 74, 66, 69, 59, 51, 53, 66, 62, 55, 68, 78, 49, 80, 61, 54, 112, 62, 76, 54, 72, 84, 44, 61, 63, 81, 61, 54, 78, 59, 55, 56, 60, 81, 61, 59, 72, 60, 66, 70, 90, 60, 71, 55, 62, 97, 66, 54, 40, 65, 66, 67, 55, 65, 77, 61, 70, 55, 71, 75, 72, 69, 60, 79, 48, 68, 105, 75, 76, 86, 73, 61, 63, 88, 80, 54, 62, 83, 60, 66, 76, 53, 63, 48, 66, 54, 84, 53, 58, 60, 58, 50, 61, 61, 56, 74, 68, 94, 66, 107, 74, 66, 63, 89, 81, 54, 61, 62, 58, 64, 68, 53, 48, 71, 64, 64, 50, 59, 67, 62, 61, 59, 46, 63, 63, 64, 71, 66, 98, 71, 67, 84, 80, 55, 71, 54, 74, 68, 62, 50, 48, 66, 73, 45, 47, 73, 62, 60, 61, 67, 49, 65, 61, 74, 50, 56, 57, 77, 71, 68, 41, 64, 56, 65, 58, 69, 45, 68, 59, 44, 38, 64, 56, 60, 74, 52, 59, 72, 86, 66, 62, 52, 64, 77, 63, 92, 54, 86, 78, 65, 61, 86, 62, 70, 59, 102, 56, 71, 106, 70, 54, 65, 70, 61, 59, 58, 74, 68, 106, 65, 63, 67, 77, 68, 70, 57, 69, 65, 75, 76, 61, 68, 69, 76, 47, 62, 58, 68, 73, 65, 53, 64, 58, 63, 50, 65, 84, 70, 62, 64, 79, 71, 63, 68, 67, 49, 72, 62, 77, 61, 53, 81, 71, 85, 72, 84, 85, 79, 64, 58, 37, 90, 53, 48, 59, 60, 79, 50, 77, 65, 65, 62, 75, 56, 63, 60, 91, 90, 48, 85, 66, 76, 65, 72, 67, 73, 69, 57, 55, 61, 66, 56, 72, 64, 90, 75, 77, 60, 69, 72, 59, 58, 61, 78, 64, 58, 55, 51, 52, 68, 75, 57, 63, 55, 66, 67, 61, 72, 90, 74, 58, 63, 65, 45, 52, 62, 45, 83, 91, 65, 74, 60, 63, 94, 55, 65, 83, 108, 68, 57, 66, 75, 59, 60, 67, 63, 59, 70, 65, 58, 55, 47, 71, 70, 93, 63, 86, 76, 91, 76, 71, 59, 109, 58, 61, 80, 61, 70, 57, 57, 68, 57, 63, 71, 66, 60, 89, 75, 40, 54, 66, 69, 65, 67, 55, 54, 63, 60, 46, 66, 74, 55, 88, 46, 67, 77, 56, 68, 58, 64, 62, 53, 55, 65, 64, 64, 60, 65, 57, 65, 99, 68, 60, 67, 59, 55, 60, 92, 73, 70, 58, 54, 64, 69, 76, 61, 56, 62, 62, 63, 50, 72, 52, 74, 71, 53, 69, 58, 73, 60, 50, 68, 57, 71, 51, 62, 48, 49, 62, 66, 75, 123, 92, 70, 58, 61, 68, 61, 64, 83, 77, 57, 83, 74, 63, 49, 62, 67, 84, 56, 65, 45, 57, 53, 61, 87, 52, 54, 56, 50, 65, 61, 60, 56, 62, 66, 82, 62, 68, 62, 79, 76, 71, 64, 64, 67, 61, 73, 56, 98, 62, 51, 62, 60, 65, 66, 60, 58, 64, 45, 58, 62, 56, 69, 73, 60, 65, 98, 58, 68, 74, 59, 60, 74, 104, 49, 58, 61, 67, 74, 50, 73, 65, 58, 76, 58, 60, 60, 74, 63, 55, 71, 59, 58, 51, 66, 76, 58, 62, 64, 72, 64, 59, 59, 68, 54, 65, 55, 58, 73, 73, 60, 55, 54, 45, 73, 62, 62, 73, 81, 60, 54, 71, 79, 54, 55, 56, 51, 61, 89, 61, 76, 67, 61, 83, 48, 67, 60, 60, 49, 75, 71, 64, 71, 53, 91, 57, 101, 81, 61, 104, 71, 75, 59, 65, 86, 64, 83, 74, 57, 84, 91, 63, 59, 52, 62, 56, 70, 62, 74, 89, 57, 74, 94, 68, 102, 66, 78, 53, 67, 75, 62, 55, 72, 65, 63, 81, 51, 49, 84, 78, 49, 64, 54, 77, 56, 48, 68, 64, 56, 99, 72, 59, 78, 77, 64, 62, 90, 64, 80, 65, 65, 84, 47, 54, 60, 62, 55, 67, 58, 77, 55, 78, 71, 77, 66, 55, 55, 58, 54, 58, 65, 55, 72, 62, 60, 78, 82, 60, 42, 66, 54, 90, 70, 64, 62, 105, 66, 57, 47, 68, 66, 70, 73, 59, 62, 65, 72, 66, 58, 50, 69, 82, 66, 66, 56, 67, 58, 73, 64, 73, 71, 68, 64, 57, 72, 79, 48, 56, 68, 72, 95, 62, 63, 58, 66, 51, 67, 84, 85, 48, 81, 61, 66, 62, 71, 61, 90, 70, 59, 47, 59, 61, 56, 57, 122, 72, 66, 52, 68, 78, 70, 49, 54, 58, 67, 74, 51, 59, 60, 90, 53, 80, 52, 59, 78, 85, 80, 78, 62, 75, 60, 54, 64, 51, 101, 63, 99, 65, 57, 64, 59, 63, 75, 103, 73, 65, 57, 58, 55, 45, 72, 63, 69, 59, 64, 73, 58, 57, 64, 66, 67, 76, 51, 91, 58, 59, 66, 57, 42, 92, 80, 68, 62, 66, 79, 61, 65, 78, 71, 67, 49, 71, 43, 69, 71, 75, 60, 127, 62, 62, 55, 69, 60, 51, 70, 59, 75, 78, 59, 76, 66, 53, 70, 73, 59, 93, 60, 53, 76, 73, 53, 48, 62, 75, 79, 59, 64, 59, 67, 74, 53, 53, 65, 89, 57, 67, 62, 62, 52, 54, 67, 67, 78, 96, 68, 68, 68, 81, 68, 56, 65, 54, 51, 68, 57, 55, 57, 64, 62, 52, 69, 71, 66, 63, 65, 61, 56, 56, 62, 59, 74, 60, 58, 87, 77, 62, 72, 57, 59, 68, 53, 54, 57, 72, 73, 125, 51, 57, 56, 53, 70, 70, 68, 45, 58, 67, 106, 81, 65, 38, 58, 62, 67, 48, 66, 77, 68, 66, 65, 67, 67, 74, 96, 67, 50, 93, 59, 59, 47, 69, 60, 80, 53, 99, 83, 67, 82, 74, 54, 54, 70, 62, 51, 64, 66, 65, 91, 39, 100, 62, 73, 61, 66, 60, 64, 59, 66, 86, 69, 72, 66, 63, 65, 72, 64, 57, 62, 55, 49, 57, 65, 82, 50, 61, 65, 60, 57, 58, 102, 55, 73, 70, 69, 52, 61, 65, 61, 56, 57, 72, 55, 66, 56, 83, 75, 68, 59, 66, 54, 60, 77, 44, 65, 70, 72, 68, 74, 73, 65, 81, 70, 56, 52, 68, 66, 71, 61, 61, 83, 82, 54, 64, 63, 82, 66, 50, 61, 54, 59, 52, 54, 71, 62, 63, 82, 63, 66, 72, 62, 72, 65, 84, 69, 58, 72, 64, 61, 73, 64, 64, 60, 70, 58, 55, 57, 67, 78, 61, 62, 95, 83, 55, 76, 54, 58, 63, 55, 117, 67, 66, 59, 59, 51, 52, 63, 63, 55, 62, 64, 94, 52, 105, 70, 64, 54, 65, 67, 60, 69, 61, 63, 65, 102, 54, 49, 64, 55, 73, 49, 48, 58, 92, 54, 31, 50, 68, 84, 123, 64, 61, 52, 57, 67, 60, 75, 74, 69, 65, 59, 71, 57, 85, 50, 77, 84, 67, 60, 76, 66, 83, 43, 65, 60, 68, 76, 68, 54, 69, 54, 58, 58, 94, 76, 75, 45, 55, 73, 67, 55, 81, 62, 56, 57, 63, 64, 61, 80, 63, 58, 63, 85, 67, 57, 77, 55, 57, 96, 40, 61, 57, 68, 58, 71, 62, 58, 87, 103, 57, 92, 62, 104, 62, 57, 71, 57, 60, 41, 54, 84, 47, 91, 115, 65, 51, 63, 57, 71, 55, 87, 50, 62, 79, 56, 76, 68, 61, 72, 71, 59, 59, 66, 58, 57, 58, 70, 70, 83, 66, 62, 62, 76, 63, 62, 70, 63, 80, 56, 63, 46, 48, 62, 52, 51, 116, 75, 68, 79, 72, 70, 94, 63, 77, 51, 56, 64, 76, 59, 63, 78, 80, 88, 69, 59, 59, 79, 59, 72, 65, 74, 65, 55, 62, 56, 70, 70, 60, 70, 78, 59, 65, 106, 37, 76, 66, 69, 62, 47, 51, 71, 62, 58, 79, 58, 78, 74, 68, 71, 62, 78, 62, 57, 73, 77, 79, 64, 65, 57, 38, 58, 55, 51, 75, 60, 68, 67, 63, 58, 89, 69, 62, 75, 58, 60, 61, 62, 59, 37, 64, 69, 60, 63, 68, 64, 53, 65, 83, 82, 52, 62, 61, 55, 62, 70, 60, 48, 63, 56, 64, 68, 78, 68, 64, 64, 66, 64, 60, 49, 52, 111, 94, 70, 87, 55, 73, 60, 88, 61, 71, 79, 76, 59, 66, 67, 63, 51, 60, 80, 62, 65, 71, 60, 79, 67, 49, 56, 56, 52, 85, 62, 100, 56, 97, 61, 83, 67, 88, 62, 62, 73, 63, 77, 89, 54, 65, 68, 74, 67, 54, 61, 51, 68, 69, 57, 74, 69, 66, 44, 58, 94, 63, 94, 76, 57, 62, 53, 76, 52, 44, 52, 51, 73, 66, 64, 66, 53, 60, 60, 55, 63, 54, 53, 61, 53, 60, 59, 63, 74, 72, 67, 75, 57, 51, 60, 54, 84, 68, 63, 62, 46, 73, 65, 67, 67, 79, 61, 68, 87, 62, 61, 61, 58, 74, 69, 71, 61, 74, 70, 67, 89, 74, 74, 74, 54, 69, 70, 62, 63, 66, 67, 49, 45, 64, 48, 81, 62, 65, 60, 68, 75, 58, 67, 60, 49, 59, 46, 58, 69, 72, 83, 59, 46, 73, 57, 60, 50, 69, 57, 59, 67, 68, 78, 50, 49, 68, 73, 62, 52, 67, 61, 62, 67, 65, 60, 52, 53, 83, 64, 79, 61, 53, 59, 60, 65, 66, 62, 61, 61, 53, 59, 51, 62, 72, 70, 62, 63, 59, 74, 55, 61, 68, 72, 78, 55, 59, 65, 42, 59, 61, 73, 71, 61, 48, 90, 99, 46, 84, 68, 65, 65, 75, 61, 73, 103, 49, 75, 62, 68, 49, 73, 67, 66, 70, 73, 106, 57, 60, 85, 64, 58, 53, 56, 56, 51, 60, 62, 74, 60, 56, 88, 65, 83, 59, 63, 79, 62, 52, 60, 57, 60, 71, 65, 72, 69, 53, 46, 59, 74, 69, 103, 69, 51, 56, 64, 58, 57, 83, 60, 61, 59, 69, 43, 76, 70, 60, 59, 60, 68, 84, 61, 67, 67, 71, 57, 119, 69, 72, 59, 52, 55, 74, 96, 69, 44, 75, 60, 75, 80, 59, 57, 88, 51, 63, 64, 58, 54, 89, 95, 64, 65, 57, 63, 63, 66, 84, 68, 45, 79, 59, 64, 64, 66, 62, 64, 66, 68, 92, 62, 67, 68, 66, 64, 60, 56, 69, 64, 75, 64, 63, 63, 69, 62, 66, 61, 57, 65, 54, 46, 52, 76, 69, 88, 66, 81, 79, 68, 64, 70, 56, 62, 60, 51, 57, 102, 56, 66, 56, 66, 56, 73, 62, 75, 75, 77, 77, 64, 53, 63, 55, 71, 68, 78, 48, 66, 55, 61, 61, 55, 49, 106, 71, 75, 60, 67, 69, 83, 54, 53, 103, 63, 76, 63, 64, 85, 63, 63, 58, 73, 55, 48, 76, 66, 69, 54, 55, 70, 72, 60, 78, 92, 66, 99, 54, 70, 73, 60, 87, 85, 129, 57, 60, 74, 61, 65, 86, 60, 61, 62, 54, 53, 72, 52, 55, 65, 57, 65, 65, 78, 51, 66, 61, 56, 54, 53, 95, 86, 53, 52, 58, 69, 59, 60, 65, 62, 59, 45, 46, 107, 59, 65, 64, 63, 120, 72, 62, 52, 62, 69, 126, 64, 58, 70, 63, 59, 65, 68, 52, 63, 34, 81, 56, 56, 62, 72, 55, 73, 54, 57, 60, 100, 71, 50, 61, 68, 57, 71, 49, 64, 60, 68, 58, 70, 66, 53, 69, 61, 43, 70, 110, 67, 57, 84, 68, 39, 54, 63, 56, 71, 79, 63, 77, 60, 55, 78, 58, 70, 61, 65, 64, 78, 54, 67, 50, 65, 83, 67, 58, 52, 90, 57, 88, 43, 41, 58, 69, 64, 56, 57, 50, 69, 126, 68, 55, 63, 68, 68, 63, 53, 50, 58, 57, 31, 61, 58, 65, 66, 62, 59, 51, 68, 68, 59, 75, 74, 52, 61, 80, 76, 101, 60, 99, 61, 59, 76, 65, 54, 77, 64, 85, 59, 63, 73, 77, 69, 54, 60, 94, 59, 66, 55, 47, 65, 67, 66, 86, 83, 57, 63, 69, 53, 64, 91, 61, 53, 81, 50, 49, 70, 60, 53, 66, 58, 72, 52, 74, 79, 96, 50, 58, 72, 53, 58, 52, 82, 57, 57, 66, 71, 86, 70, 46, 65, 55, 60, 75, 60, 61, 76, 66, 71, 74, 62, 64, 65, 61, 45, 64, 61, 75, 74, 55, 68, 63, 66, 69, 60, 82, 81, 85, 86, 70, 67, 58, 61, 70, 60, 67, 67, 65, 51, 58, 93, 62, 69, 59, 56, 52, 84, 59, 61, 76, 60, 60, 90, 70, 59, 76, 69, 76, 52, 73, 56, 74, 76, 58, 66, 64, 66, 54, 68, 81, 69, 71, 81, 77, 57, 57, 64, 57, 60, 62, 66, 69, 78, 59, 53, 64, 67, 55, 69, 43, 57, 63, 53, 39, 67, 71, 66, 57, 50, 64, 66, 92, 79, 68, 65, 63, 65, 52, 49, 80, 72, 72, 68, 52, 49, 68, 65, 53, 49, 67, 63, 49, 58, 75, 61, 71, 82, 79, 62, 87, 59, 57, 55, 48, 58, 100, 70, 58, 75, 90, 49, 62, 69, 57, 47, 58, 72, 45, 63, 71, 121, 53, 55, 75, 73, 77, 66, 81, 62, 54, 54, 67, 70, 75, 66, 59, 63, 55, 54, 65, 102, 63, 82, 60, 71, 59, 81, 51, 56, 65, 82, 56, 58, 62, 60, 64, 62, 53, 63, 62, 77, 76, 73, 71, 64, 55, 57, 67, 51, 53, 73, 61, 72, 72, 64, 45, 54, 43, 76, 60, 60, 85, 88, 62, 87, 85, 74, 70, 61, 52, 58, 95, 56, 63, 54, 57, 58, 68, 56, 60, 80, 52, 76, 58, 68, 63, 71, 74, 70, 72, 55, 59, 145, 69, 63, 62, 54, 67, 57, 68, 82, 105, 62, 64, 60, 65, 50, 71, 61, 56, 64, 66, 69, 87, 51, 52, 61, 62, 72, 66, 60, 47, 91, 52, 63, 76, 59, 52, 73, 67, 43, 70, 62, 62, 62, 74, 81, 62, 60, 53, 83, 64, 88, 46, 63, 64, 48, 62, 90, 71, 67, 57, 60, 55, 58, 73, 71, 66, 69, 62, 50, 96, 66, 54, 48, 62, 52, 73, 55, 57, 90, 55, 78, 64, 82, 56, 72, 82, 54, 59, 89, 67, 49, 61, 63, 59, 46, 50, 74, 57, 106, 93, 70, 67, 62, 87, 49, 65, 81, 52, 72, 69, 68, 75, 81, 47, 50, 55, 67, 58, 71, 76, 81, 71, 50, 78, 48, 50, 78, 102, 73, 60, 64, 102, 71, 42, 67, 54, 81, 73, 59, 54, 70, 48, 59, 54, 73, 63, 63, 72, 69, 63, 53, 67, 58, 60, 61, 77, 53, 56, 70, 72, 62, 57, 51, 63, 87, 64, 80, 59, 77, 66, 41, 68, 76, 50, 66, 67, 59, 58, 62, 92, 68, 72, 83, 76, 47, 65, 59, 58, 47, 68, 72, 71, 83, 66, 69, 55, 81, 46, 54, 64, 54, 85, 72, 61, 66, 78, 79, 63, 82, 46, 44, 59, 50, 65, 70, 70, 71, 70, 53, 71, 79, 75, 48, 57, 94, 82, 71, 51, 79, 56, 49, 57, 61, 61, 76, 59, 71, 45, 80, 121, 81, 49, 63, 63, 59, 73, 67, 73, 77, 57, 53, 77, 95, 56, 64, 55, 74, 50, 120, 56, 71, 67, 62, 57, 60, 68, 97, 57, 72, 78, 45, 53, 130, 49, 80, 56, 123, 71, 61, 72, 67, 62, 59, 129, 53, 51, 52, 62, 55, 80, 74, 66, 58, 81, 51, 62, 50, 58, 70, 56, 63, 46, 64, 67, 69, 40, 68, 68, 70, 66, 62, 68, 69, 69, 58, 68, 82, 79, 65, 64, 58, 61, 71, 65, 53, 65, 65, 79, 69, 55, 109, 63, 69, 54, 75, 67, 68, 65, 88, 66, 79, 60, 55, 86, 95, 54, 63, 82, 77, 71, 55, 56, 63, 57, 64, 71, 68, 58, 56, 57, 60, 63, 85, 99, 61, 117, 64, 67, 46, 54, 45, 68, 66, 56, 65, 56, 59, 88, 66, 76, 79, 99, 62, 61, 71, 84, 53, 83, 90, 59, 68, 58, 45, 68, 76, 98, 65, 48, 65, 54, 44, 61, 67, 58, 53, 78, 54, 79, 59, 74, 54, 60, 75, 57, 41, 87, 68, 51, 103, 62, 49, 84, 60, 54, 77, 67, 62, 66, 80, 54, 52, 57, 68, 70, 57, 81, 56, 131, 61, 46, 63, 77, 44, 51, 67, 69, 58, 61, 52, 79, 64, 84, 96, 90, 76, 55, 60, 54, 54, 66, 60, 38, 64, 67, 53, 59, 95, 84, 78, 52, 70, 72, 71, 80, 59, 53, 73, 52, 89, 57, 63, 64, 70, 53, 71, 79, 97, 57, 79, 57, 67, 76, 68, 56, 68, 56, 90, 82, 46, 100, 48, 70, 50, 79, 65, 49, 68, 61, 49, 54, 58, 70, 61, 49, 63, 82, 48, 58, 74, 67, 77, 31, 62, 63, 64, 62, 68, 65, 59, 72, 82, 84, 52, 62, 63, 59, 76, 71, 53, 55, 70, 54, 50, 63, 64, 106, 65, 52, 86, 65, 66, 37, 71, 81, 66, 59, 59, 62, 90, 76, 72, 66, 47, 68, 64, 41, 68, 49, 49, 44, 90, 66, 61, 73, 57, 58, 82, 101, 72, 90, 66, 62, 76, 72, 71, 96, 85, 63, 100, 90, 79, 65, 65, 65, 68, 85, 55, 97, 86, 68, 134, 60, 51, 49, 57, 54, 64, 60, 82, 49, 81, 65, 58, 62, 64, 67, 54, 94, 58, 53, 52, 51, 52, 78, 58, 73, 71, 47, 45, 68, 93, 55, 97, 75, 67, 57, 57, 88, 69, 45, 73, 64, 49, 68, 70, 58, 79, 73, 57, 77, 72, 65, 55, 77, 86, 60, 62, 59, 51, 51, 87, 52, 68, 82, 42, 52, 66, 61, 71, 86, 50, 45, 83, 59, 55, 67, 77, 63, 62, 68, 55, 60, 59, 86, 68, 54, 64, 58, 74, 70, 61, 94, 50, 55, 65, 67, 75, 79, 66, 58, 73, 58, 77, 40, 56, 54, 50, 64, 93, 61, 65, 61, 73, 62, 66, 77, 64, 67, 67, 69, 57, 59, 55, 65, 77, 68, 77, 69, 57, 70, 54, 63, 56, 54, 57, 76, 65, 63, 59, 63, 86, 76, 82, 49, 55, 59, 82, 53, 55, 57, 69, 59, 55, 62, 70, 63, 65, 84, 71, 59, 72, 60, 75, 61, 70, 46, 62, 130, 67, 58, 80, 54, 48, 56, 51, 53, 50, 58, 63, 30, 79, 62, 69, 54, 63, 89, 88, 52, 87, 55, 51, 56, 71, 93, 66, 70, 94, 64, 60, 73, 53, 60, 71, 71, 58, 56, 69, 65, 83, 68, 77, 68, 73, 56, 60, 59, 55, 64, 60, 67, 87, 86, 53, 75, 54, 47, 69, 58, 76, 74, 55, 57, 72, 64, 61, 55, 62, 76, 70, 66, 48, 87, 60, 67, 77, 53, 45, 63, 51, 74, 69, 55, 74, 52, 89, 45, 64, 65, 65, 65, 65, 51, 54, 84, 87, 79, 62, 63, 59, 78, 66, 72, 65, 60, 49, 90, 60, 73, 61, 63, 57, 54, 58, 68, 82, 50, 62, 57, 100, 59, 102, 47, 72, 65, 82, 59, 60, 64, 61, 72, 105, 78, 62, 56, 54, 63, 66, 79, 55, 58, 74, 50, 61, 63, 50, 54, 53, 63, 68, 78, 85, 80, 68, 54, 65, 68, 43, 58, 40, 62, 69, 64, 49, 76, 55, 80, 58, 47, 54, 72, 73, 71, 56, 74, 58, 94, 59, 65, 67, 68, 74, 86, 79, 66, 64, 69, 86, 78, 55, 52, 65, 58, 61, 73, 66, 63, 75, 59, 89, 56, 32, 65, 139, 53, 46, 64, 57, 67, 65, 60, 69, 59, 54, 46, 59, 55, 88, 69, 60, 51, 56, 69, 56, 50, 55, 16, 66, 65, 60, 58, 69, 67, 53, 75, 51, 64, 73, 76, 83, 63, 71, 68, 61, 80, 63, 79, 62, 79, 57, 62, 63, 40, 58, 94, 88, 54, 67, 69, 73, 55, 88, 63, 110, 55, 57, 59, 61, 73, 57, 75, 80, 74, 50, 76, 56, 63, 117, 70, 54, 62, 77, 59, 68, 72, 52, 56, 64, 67, 65, 55, 72, 57, 102, 75, 61, 89, 70, 87, 54, 68, 81, 58, 95, 63, 81, 86, 57, 67, 72, 49, 58, 57, 70, 43, 65, 48, 69, 76, 66, 78, 70, 73, 65, 68, 59, 61, 76, 58, 51, 69, 57, 65, 69, 64, 63, 115, 72, 92, 70, 46, 72, 64, 51, 84, 64, 58, 62, 62, 51, 70, 60, 64, 75, 55, 70, 70, 39, 81, 67, 90, 82, 48, 60, 74, 57, 72, 72, 57, 62, 85, 76, 69, 61, 80, 51, 59, 61, 78, 65, 70, 69, 52, 91, 79, 57, 83, 55, 72, 61, 65, 61, 61, 55, 51, 64, 48, 83, 62, 65, 54, 66, 67, 62, 48, 57, 71, 70, 72, 68, 73, 76, 61, 61, 70, 42, 56, 76, 63, 63, 69, 77, 58, 45, 63, 49, 80, 73, 51, 73, 60, 83, 69, 65, 83, 92, 74, 83, 68, 58, 61, 49, 55, 63, 61, 84, 43, 69, 59, 65, 54, 49, 58, 68, 73, 55, 67, 59, 55, 58, 73, 48, 62, 62, 62, 78, 83, 59, 66, 76, 64, 62, 82, 75, 59, 66, 51, 64, 67, 70, 89, 106, 48, 49, 51, 67, 57, 62, 85, 59, 62, 107, 73, 65, 55, 59, 66, 75, 47, 60, 90, 71, 64, 76, 65, 77, 67, 63, 55, 56, 67, 66, 47, 68, 78, 86, 55, 66, 110, 86, 64, 72, 52, 63, 73, 63, 62, 84, 80, 102, 96, 73, 64, 63, 61, 50, 60, 78, 49, 57, 67, 77, 60, 71, 48, 65, 64, 61, 86, 89, 59, 66, 90, 57, 79, 66, 97, 67, 71, 62, 76, 49, 53, 66, 69, 58, 55, 67, 65, 68, 71, 48, 57, 84, 62, 59, 71, 87, 66, 58, 45, 37, 71, 64, 58, 63, 58, 49, 68, 88, 48, 80, 77, 61, 55, 60, 69, 82, 86, 95, 60, 57, 69, 58, 74, 86, 65, 51, 54, 70, 59, 83, 70, 57, 76, 71, 59, 59, 42, 67, 39, 65, 58, 67, 41, 48, 67, 53, 76, 64, 66, 62, 48, 59, 75, 71, 58, 89, 79, 51, 90, 64, 58, 65, 88, 92, 65, 64, 66, 88, 75, 80, 75, 60, 58, 55, 54, 57, 62, 74, 93, 73, 81, 57, 57, 66, 92, 65, 70, 65, 113, 58, 67, 58, 69, 67, 68, 98, 62, 96, 61, 56, 78, 55, 69, 95, 58, 70, 64, 64, 61, 52, 74, 66, 74, 64, 57, 80, 58, 72, 60, 68, 91, 89, 55, 50, 71, 43, 49, 64, 42, 62, 70, 71, 55, 63, 117, 54, 73, 54, 55, 83, 49, 59, 54, 74, 68, 56, 60, 53, 82, 107, 53, 62, 65, 59, 75, 65, 59, 63, 79, 53, 58, 67, 56, 53, 111, 62, 68, 78, 71, 51, 56, 65, 60, 58, 56, 77, 70, 62, 57, 56, 60, 67, 43, 69, 69, 70, 57, 71, 65, 51, 68, 79, 69, 50, 55, 63, 79, 60, 51, 69, 106, 65, 67, 54, 57, 45, 79, 54, 73, 73, 61, 55, 67, 78, 52, 56, 89, 45, 72, 76, 54, 55, 55, 61, 59, 74, 71, 78, 59, 60, 70, 45, 58, 95, 52, 86, 73, 49, 65, 52, 90, 58, 78, 65, 78, 67, 58, 61, 80, 68, 67, 64, 50, 51, 63, 38, 45, 65, 73, 69, 70, 67, 62, 55, 57, 85, 60, 75, 63, 75, 84, 96, 50, 68, 71, 75, 98, 47, 61, 31, 68, 91, 65, 75, 44, 62, 53, 55, 71, 65, 47, 51, 64, 105, 62, 71, 57, 69, 85, 59, 60, 71, 54, 55, 75, 75, 60, 64, 70, 62, 54, 42, 70, 66, 61, 73, 59, 82, 65, 41, 66, 55, 53, 70, 62, 64, 70, 85, 68, 64, 52, 54, 41, 42, 66, 47, 61, 66, 64, 54, 64, 78, 67, 59, 58, 98, 44, 60, 64, 53, 57, 84, 68, 72, 70, 66, 58, 59, 70, 52, 58, 66, 69, 52, 108, 69, 80, 59, 73, 72, 60, 83, 72, 52, 51, 122, 57, 53, 79, 71, 53, 62, 61, 69, 66, 53, 67, 48, 94, 54, 63, 79, 54, 73, 84, 60, 73, 88, 52, 48, 75, 72, 88, 47, 61, 44, 59, 85, 59, 97, 59, 42, 56, 45, 61, 71, 55, 56, 110, 61, 52, 61, 63, 65, 111, 54, 82, 81, 58, 71, 58, 58, 66, 50, 66, 81, 75, 81, 42, 61, 92, 76, 55, 71, 65, 61, 64, 43, 76, 70, 40, 82, 93, 58, 49, 56, 80, 58, 56, 62, 66, 61, 56, 54, 66, 75, 61, 54, 68, 69, 92, 72, 47, 52, 63, 71, 46, 64, 81, 65, 90, 59, 59, 60, 56, 76, 44, 69, 45, 59, 72, 72, 85, 60, 81, 68, 66, 61, 68, 77, 61, 64, 95, 83, 27, 60, 78, 60, 58, 58, 70, 50, 54, 47, 40, 68, 50, 53, 75, 42, 65, 51, 83, 69, 62, 83, 46, 62, 81, 55, 68, 79, 48, 57, 68, 91, 129, 64, 57, 67, 69, 68, 47, 70, 71, 63, 58, 48, 59, 61, 57, 60, 70, 57, 63, 79, 50, 78, 66, 69, 69, 97, 59, 99, 58, 67, 77, 57, 57, 83, 47, 55, 72, 83, 57, 54, 67, 53, 135, 70, 58, 51, 53, 56, 65, 54, 66, 71, 65, 41, 59, 61, 72, 102, 59, 58, 57, 63, 87, 45, 67, 50, 75, 67, 53, 59, 58, 72, 68, 83, 100, 71, 64, 60, 71, 64, 72, 60, 107, 70, 65, 66, 58, 82, 70, 46, 76, 56, 80, 64, 48, 59, 51, 54, 65, 61, 53, 64, 50, 70, 53, 85, 61, 72, 53, 131, 64, 48, 64, 69, 66, 54, 60, 66, 57, 66, 69, 64, 49, 73, 68, 68, 53, 38, 89, 58, 55, 81, 46, 49, 72, 51, 61, 86, 73, 72, 62, 69, 61, 64, 70, 67, 60, 53, 69, 66, 41, 65, 61, 89, 66, 66, 54, 78, 60, 74, 73, 82, 55, 66, 48, 64, 61, 97, 73, 77, 70, 61, 61, 62, 70, 79, 94, 69, 61, 64, 80, 76, 74, 106, 51, 60, 72, 36, 70, 78, 57, 50, 47, 69, 48, 63, 59, 67, 55, 111, 52, 69, 70, 61, 60, 71, 81, 95, 64, 78, 58, 41, 71, 80, 61, 51, 73, 62, 77, 76, 59, 66, 43, 68, 73, 73, 63, 63, 52, 52, 76, 50, 64, 83, 64, 74, 69, 56, 47, 67, 49, 71, 120, 82, 86, 58, 55, 58, 93, 63, 51, 68, 70, 50, 82, 45, 71, 59, 60, 93, 61, 90, 62, 52, 80, 72, 56, 64, 54, 76, 80, 55, 70, 55, 54, 73, 84, 80, 51, 55, 67, 58, 66, 54, 49, 62, 93, 64, 63, 58, 64, 43, 60, 57, 77, 70, 42, 48, 57, 58, 69, 83, 55, 70, 66, 69, 66, 79, 64, 50, 66, 51, 50, 59, 58, 69, 86, 67, 86, 74, 70, 49, 67, 72, 78, 63, 63, 51, 101, 37, 60, 88, 46, 58, 67, 69, 74, 54, 58, 69, 79, 84, 64, 70, 96, 62, 148, 81, 72, 65, 63, 97, 74, 58, 58, 70, 62, 57, 64, 54, 88, 59, 62, 79, 75, 62, 85, 73, 45, 58, 74, 65, 62, 76, 62, 46, 79, 56, 81, 68, 55, 53, 65, 64, 58, 54, 99, 77, 71, 96, 47, 54, 68, 66, 68, 67, 79, 115, 52, 58, 81, 57, 76, 51, 60, 70, 101, 53, 80, 73, 53, 47, 61, 52, 52, 56, 70, 62, 53, 43, 44, 64, 56, 66, 97, 66, 77, 82, 53, 64, 76, 60, 79, 102, 65, 96, 99, 78, 85, 59, 61, 65, 70, 67, 61, 64, 59, 82, 52, 83, 63, 53, 63, 72, 73, 72, 67, 63, 68, 60, 79, 77, 76, 70, 72, 74, 56, 57, 70, 81, 76, 60, 81, 60, 57, 82, 51, 42, 49, 66, 62, 74, 51, 69, 78, 62, 59, 58, 59, 51, 52, 46, 55, 49, 52, 86, 69, 68, 48, 55, 65, 51, 45, 91, 59, 96, 74, 72, 93, 45, 73, 91, 83, 71, 55, 63, 49, 70, 65, 47, 51, 64, 34, 70, 31, 57, 66, 66, 91, 61, 46, 83, 52, 64, 68, 73, 56, 73, 68, 42, 78, 59, 62, 67, 84, 52, 74, 47, 67, 78, 79, 69, 73, 103, 80, 95, 62, 63, 60, 59, 57, 55, 60, 67, 71, 84, 77, 57, 69, 67, 54, 68, 74, 77, 69, 57, 127, 56, 65, 50, 86, 58, 52, 83, 56, 78, 65, 55, 56, 85, 65, 52, 44, 74, 61, 54, 76, 56, 71, 63, 66, 64, 74, 56, 71, 48, 41, 52, 98, 120, 75, 62, 74, 49, 66, 66, 68, 61, 60, 64, 68, 59, 83, 60, 73, 82, 58, 73, 58, 70, 79, 61, 59, 52, 62, 49, 75, 99, 51, 61, 71, 67, 76, 40, 54, 61, 85, 40, 73, 67, 57, 59, 117, 57, 61, 87, 54, 81, 63, 71, 57, 61, 72, 67, 61, 50, 31, 54, 78, 49, 78, 52, 63, 57, 60, 73, 45, 62, 55, 51, 68, 76, 100, 88, 71, 59, 55, 52, 89, 66, 71, 73, 42, 47, 66, 59, 59, 47, 67, 46, 57, 76, 74, 71, 82, 74, 59, 63, 55, 63, 62, 57, 71, 75, 62, 41, 53, 62, 78, 52, 39, 75, 41, 81, 67, 57, 55, 77, 102, 48, 52, 62, 52, 77, 63, 38, 93, 77, 55, 46, 74, 55, 58, 82, 60, 71, 55, 55, 65, 88, 56, 70, 56, 64, 70, 101, 66, 67, 69, 67, 59, 71, 63, 66, 78, 66, 64, 53, 63, 84, 47, 70, 66, 71, 74, 86, 78, 52, 60, 41, 65, 110, 58, 57, 83, 92, 81, 52, 65, 60, 70, 70, 59, 82, 102, 48, 66, 49, 94, 73, 59, 30, 58, 57, 62, 60, 71, 66, 58, 70, 43, 51, 55, 64, 67, 83, 51, 63, 76, 83, 73, 56, 60, 74, 54, 57, 72, 41, 46, 78, 57, 64, 71, 59, 65, 73, 65, 46, 75, 63, 59, 65, 66, 68, 51, 47, 79, 65, 61, 50, 66, 79, 91, 70, 93, 64, 113, 55, 54, 88, 48, 70, 56, 99, 57, 40, 53, 65, 56, 66, 67, 69, 87, 60, 50, 60, 97, 54, 94, 65, 84, 60, 81, 60, 52, 59, 60, 52, 88, 68, 97, 47, 81, 70, 51, 96, 50, 111, 61, 75, 58, 43, 61, 66, 56, 56, 90, 73, 74, 60, 59, 96, 99, 60, 60, 91, 56, 53, 46, 71, 81, 57, 69, 57, 65, 74, 36, 71, 93, 42, 49, 96, 75, 55, 74, 44, 64, 67, 53, 90, 93, 59, 80, 78, 64, 34, 58, 61, 62, 61, 72, 60, 70, 83, 74, 89, 72, 74, 75, 88, 43, 83, 66, 76, 65, 53, 94, 75, 63, 77, 66, 68, 59, 70, 74, 55, 79, 72, 81, 58, 54, 71, 80, 49, 67, 67, 58, 63, 74, 66, 99, 63, 40, 82, 64, 75, 56, 59, 71, 53, 71, 54, 53, 89, 51, 74, 63, 58, 48, 69, 88, 57, 39, 101, 84, 59, 81, 47, 51, 56, 55, 68, 77, 65, 123, 56, 69, 73, 89, 56, 63, 52, 67, 69, 102, 64, 73, 76, 71, 67, 57, 73, 71, 59, 60, 56, 93, 90, 52, 56, 68, 54, 64, 65, 58, 51, 85, 66, 68, 52, 60, 49, 84, 61, 56, 54, 52, 71, 63, 68, 52, 63, 45, 67, 77, 59, 73, 80, 135, 61, 70, 77, 121, 67, 75, 38, 49, 72, 71, 59, 83, 73, 49, 118, 79, 60, 62, 68, 79, 54, 68, 60, 74, 69, 75, 48, 59, 66, 94, 57, 53, 64, 62, 118, 57, 49, 69, 56, 68, 41, 71, 64, 51, 89, 72, 45, 73, 45, 77, 104, 94, 67, 55, 81, 69, 64, 91, 87, 64, 50, 68, 72, 73, 86, 61, 62, 75, 58, 121, 62, 56, 85, 64, 59, 65, 61, 83, 50, 55, 55, 65, 54, 71, 85, 82, 62, 53, 66, 71, 49, 55, 69, 67, 58, 70, 73, 77, 69, 79, 56, 47, 55, 65, 69, 91, 63, 76, 61, 66, 56, 45, 65, 80, 85, 49, 65, 62, 61, 65, 55, 51, 45, 65, 68, 56, 66, 57, 53, 62, 65, 76, 103, 75, 80, 54, 48, 72, 79, 63, 60, 58, 64, 57, 52, 87, 64, 58, 23, 62, 59, 41, 70, 54, 109, 79, 47, 77, 81, 50, 126, 53, 56, 60, 79, 60, 40, 54, 76, 96, 73, 68, 67, 78, 73, 56, 71, 63, 41, 69, 63, 77, 72, 79, 51, 74, 57, 72, 53, 87, 67, 124, 57, 48, 43, 64, 77, 77, 64, 42, 74, 55, 87, 58, 75, 52, 69, 69, 73, 79, 55, 54, 64, 70, 52, 58, 75, 66, 58, 54, 78, 67, 38, 58, 50, 88, 63, 48, 65, 87, 64, 65, 55, 101, 73, 66, 75, 76, 63, 54, 52, 74, 64, 75, 57, 82, 49, 56, 78, 60, 69, 52, 66, 77, 87, 65, 56, 66, 56, 70, 54, 72, 59, 52, 60, 78, 54, 63, 37, 57, 50, 51, 54, 62, 68, 64, 59, 48, 44, 64, 55, 74, 64, 61, 52, 66, 65, 62, 74, 80, 70, 68, 32, 57, 112, 67, 63, 72, 82, 52, 70, 86, 68, 64, 58, 54, 59, 54, 53, 67, 57, 49, 67, 60, 76, 65, 61, 66, 61, 57, 75, 69, 82, 74, 54, 56, 53, 77, 73, 53, 101, 82, 79, 47, 71, 76, 57, 55, 55, 63, 49, 67, 64, 112, 71, 59, 74, 62, 57, 70, 78, 68, 91, 52, 56, 61, 74, 58, 58, 74, 47, 84, 68, 58, 94, 70, 49, 48, 64, 69, 73, 58, 72, 61, 54, 70, 62, 61, 94, 73, 80, 54, 59, 53, 69, 69, 75, 61, 82, 60, 64, 70, 56, 52, 42, 111, 69, 48, 55, 75, 66, 45, 71, 93, 72, 72, 51, 58, 53, 55, 61, 84, 84, 64, 67, 51, 44, 44, 59, 58, 73, 88, 59, 63, 76, 51, 76, 51, 69, 71, 73, 63, 54, 59, 72, 59, 65, 63, 61, 84, 82, 95, 60, 55, 76, 69, 55, 57, 80, 73, 73, 54, 66, 69, 59, 70, 72, 49, 65, 70, 62, 66, 67, 88, 94, 79, 53, 61, 93, 62, 50, 99, 59, 45, 66, 48, 56, 55, 56, 61, 65, 96, 81, 59, 59, 55, 57, 69, 50, 50, 59, 68, 64, 41, 80, 91, 79, 66, 48, 49, 87, 64, 70, 45, 75, 55, 58, 60, 56, 41, 59, 78, 59, 66, 59, 65, 104, 52, 90, 69, 68, 100, 70, 101, 82, 96, 69, 53, 52, 89, 59, 88, 60, 77, 54, 57, 90, 72, 62, 69, 66, 80, 58, 64, 66, 61, 52, 53, 49, 80, 64, 55, 61, 54, 83, 93, 68, 65, 69, 53, 61, 89, 58, 84, 57, 60, 58, 73, 54, 60, 71, 66, 77, 71, 79, 56, 60, 66, 52, 63, 69, 86, 86, 59, 52, 51, 58, 57, 54, 67, 70, 67, 58, 65, 71, 48, 71, 69, 58, 75, 44, 73, 104, 66, 52, 64, 71, 64, 57, 73, 98, 60, 73, 60, 91, 61, 74, 59, 61, 57, 55, 83, 54, 67, 74, 59, 84, 71, 63, 69, 61, 88, 56, 51, 59, 81, 59, 73, 63, 87, 80, 79, 67, 61, 63, 69, 64, 53, 59, 54, 62, 55, 68, 64, 71, 56, 48, 58, 64, 74, 85, 61, 58, 64, 87, 60, 78, 68, 62, 70, 67, 82, 71, 85, 68, 67, 54, 61, 63, 73, 58, 68, 60, 69, 82, 77, 79, 49, 122, 65, 46, 83, 72, 74, 58, 73, 66, 75, 72, 86, 67, 89, 88, 70, 76, 113, 50, 79, 48, 58, 59, 78, 61, 63, 62, 59, 60, 77, 44, 83, 69, 70, 58, 61, 52, 60, 73, 78, 58, 61, 85, 65, 53, 48, 102, 78, 62, 51, 69, 60, 48, 38, 64, 77, 72, 61, 56, 66, 56, 49, 68, 56, 70, 113, 65, 56, 70, 65, 92, 60, 43, 85, 66, 55, 54, 62, 72, 69, 70, 72, 47, 82, 89, 40, 65, 49, 99, 74, 65, 60, 82, 70, 66, 60, 77, 68, 82, 63, 58, 87, 61, 68, 96, 68, 62, 77, 70, 41, 69, 52, 81, 41, 68, 79, 68, 73, 55, 73, 103, 56, 71, 58, 101, 44, 81, 73, 57, 66, 66, 63, 66, 47, 57, 65, 79, 54, 78, 68, 58, 49, 64, 66, 60, 71, 76, 77, 86, 53, 72, 62, 69, 62, 60, 50, 66, 55, 73, 55, 91, 61, 56, 80, 81, 65, 88, 37, 72, 71, 70, 75, 58, 61, 44, 66, 58, 55, 70, 63, 62, 65, 57, 50, 73, 78, 76, 50, 65, 97, 78, 49, 58, 65, 51, 70, 64, 70, 67, 61, 51, 75, 71, 75, 65, 77, 65, 72, 85, 56, 70, 65, 58, 67, 81, 54, 65, 62, 68, 71, 56, 78, 59, 74, 66, 93, 62, 61, 62, 69, 74, 61, 84, 74, 86, 83, 48, 76, 67, 56, 67, 68, 73, 75, 43, 69, 71, 80, 48, 62, 51, 72, 83, 87, 47, 54, 48, 60, 47, 53, 67, 54, 65, 60, 52, 56, 57, 74, 68, 74, 87, 74, 69, 99, 79, 57, 69, 60, 63, 91, 69, 44, 59, 76, 65, 80, 78, 77, 73, 68, 64, 52, 67, 64, 49, 71, 38, 71, 69, 59, 52, 51, 55, 87, 102, 64, 56, 83, 60, 65, 78, 69, 64, 58, 63, 75, 58, 55, 73, 79, 70, 55, 78, 48, 46, 73, 71, 72, 57, 50, 51, 82, 72, 48, 88, 131, 51, 54, 46, 124, 50, 65, 61, 66, 57, 59, 57, 58, 60, 67, 118, 62, 67, 70, 59, 55, 69, 47, 65, 58, 56, 64, 54, 60, 58, 69, 75, 76, 72, 50, 84, 62, 46, 63, 82, 30, 48, 69, 67, 64, 72, 104, 63, 44, 57, 70, 120, 72, 76, 62, 56, 48, 74, 39, 70, 49, 61, 65, 62, 56, 80, 87, 71, 75, 62, 99, 51, 69, 49, 65, 64, 61, 86, 75, 80, 76, 71, 51, 79, 63, 66, 78, 66, 76, 48, 59, 79, 52, 61, 66, 67, 47, 58, 59, 65, 52, 63, 50, 56, 65, 66, 82, 53, 79, 51, 40, 63, 62, 67, 68, 75, 66, 56, 66, 58, 88, 60, 59, 49, 70, 62, 69, 70, 51, 43, 72, 65, 66, 107, 73, 61, 67, 61, 45, 83, 80, 67, 63, 63, 61, 39, 48, 62, 60, 64, 58, 84, 64, 65, 68, 87, 56, 53, 81, 59, 61, 128, 53, 61, 45, 54, 73, 56, 57, 70, 72, 78, 66, 67, 74, 76, 57, 62, 68, 56, 70, 92, 60, 65, 57, 57, 75, 69, 47, 51, 70, 70, 130, 66, 49, 60, 73, 66, 65, 73, 45, 59, 70, 68, 54, 76, 58, 50, 49, 60, 60, 88, 81, 53, 77, 79, 72, 72, 69, 68, 50, 46, 111, 57, 77, 52, 92, 56, 61, 81, 75, 57, 68, 56, 79, 61, 78, 68, 82, 65, 59, 50, 63, 60, 50, 48, 82, 62, 80, 90, 46, 71, 53, 89, 46, 87, 61, 75, 67, 68, 69, 52, 47, 63, 69, 44, 55, 50, 74, 61, 80, 67, 70, 50, 67, 81, 63, 65, 69, 61, 59, 56, 73, 56, 76, 72, 54, 76, 50, 65, 74, 73, 37, 48, 60, 107, 82, 65, 66, 87, 55, 74, 78, 72, 71, 49, 64, 72, 63, 55, 53, 70, 78, 54, 78, 76, 60, 42, 53, 75, 52, 49, 73, 85, 92, 62, 58, 53, 74, 75, 76, 89, 64, 66, 48, 56, 68, 77, 66, 39, 73, 64, 54, 43, 121, 68, 61, 71, 58, 60, 54, 68, 96, 102, 74, 57, 60, 79, 58, 73, 48, 57, 58, 55, 69, 87, 66, 54, 46, 60, 66, 67, 72, 51, 71, 57, 60, 60, 66, 99, 55, 53, 54, 67, 61, 45, 44, 63, 56, 70, 84, 75, 67, 52, 61, 48, 83, 68, 51, 70, 78, 66, 60, 91, 106, 41, 61, 61, 85, 56, 75, 81, 83, 51, 58, 64, 53, 59, 54, 47, 70, 68, 84, 73, 54, 57, 64, 60, 82, 73, 68, 43, 57, 58, 59, 46, 67, 50, 59, 73, 88, 69, 95, 52, 44, 74, 62, 63, 68, 66, 35, 85, 62, 68, 63, 65, 89, 93, 46, 59, 67, 96, 62, 69, 70, 86, 76, 65, 46, 92, 50, 41, 74, 61, 52, 63, 55, 83, 51, 64, 57, 65, 70, 52, 69, 62, 57, 36, 80, 68, 66, 80, 79, 65, 63, 59, 57, 85, 74, 58, 59, 73, 64, 58, 59, 60, 69, 61, 73, 79, 54, 86, 48, 64, 56, 59, 87, 79, 69, 68, 68, 77, 112, 70, 89, 58, 87, 59, 65, 41, 96, 46, 101, 58, 80, 76, 67, 56, 67, 68, 57, 57, 82, 58, 63, 59, 70, 70, 52, 63, 84, 70, 75, 88, 47, 73, 82, 52, 49, 88, 48, 65, 93, 74, 96, 69, 61, 70, 72, 62, 49, 52, 68, 66, 48, 67, 65, 54, 68, 44, 48, 63, 67, 54, 72, 68, 45, 53, 60, 61, 83, 63, 58, 92, 59, 81, 59, 86, 65, 46, 65, 52, 76, 101, 49, 57, 98, 58, 48, 63, 82, 89, 40, 48, 67, 83, 56, 78, 82, 76, 69, 52, 61, 58, 41, 86, 56, 57, 79, 45, 57, 46, 73, 64, 66, 68, 79, 44, 76, 58, 53, 72, 87, 59, 73, 62, 44, 63, 71, 53, 73, 78, 67, 58, 58, 89, 71, 54, 73, 69, 58, 71, 82, 62, 75, 61, 52, 71, 57, 61, 62, 73, 48, 63, 75, 56, 59, 68, 51, 55, 59, 70, 61, 41, 74, 70, 61, 73, 62, 114, 55, 56, 65, 72, 68, 62, 75, 63, 69, 90, 62, 72, 76, 60, 72, 55, 64, 58, 67, 48, 44, 61, 51, 66, 60, 49, 94, 59, 51, 55, 70, 63, 67, 70, 85, 72, 73, 76, 77, 74, 66, 71, 68, 62, 63, 81, 68, 71, 62, 71, 68, 70, 62, 66, 81, 59, 67, 81, 65, 58, 54, 60, 68, 71, 128, 69, 60, 70, 75, 98, 72, 63, 64, 68, 60, 84, 71, 63, 72, 84, 77, 56, 54, 108, 36, 60, 53, 87, 68, 59, 47, 54, 65, 47, 63, 58, 83, 44, 70, 56, 74, 57, 68, 79, 56, 65, 70, 55, 69, 67, 67, 53, 81, 68, 55, 58, 65, 65, 60, 86, 54, 63, 45, 126, 63, 59, 48, 45, 68, 59, 72, 60, 72, 53, 40, 60, 53, 78, 85, 61, 47, 69, 74, 54, 62, 65, 72, 77, 65, 77, 75, 58, 64, 60, 64, 61, 66, 70, 64, 81, 67, 67, 68, 88, 66, 77, 56, 49, 66, 67, 85, 68, 90, 50, 65, 70, 45, 52, 59, 64, 55, 60, 74, 79, 58, 63, 54, 71, 109, 71, 67, 59, 62, 74, 56, 64, 73, 67, 61, 60, 47, 70, 81, 70, 54, 78, 59, 73, 55, 86, 88, 60, 80, 76, 59, 53, 92, 56, 48, 73, 83, 54, 67, 72, 103, 50, 49, 59, 49, 60, 62, 67, 62, 83, 87, 79, 63, 66, 58, 57, 61, 68, 55, 63, 72, 63, 72, 50, 49, 54, 57, 77, 93, 51, 87, 70, 56, 66, 38, 52, 72, 53, 49, 38, 59, 61, 70, 66, 74, 84, 107, 57, 110, 75, 56, 50, 70, 77, 59, 68, 51, 75, 44, 48, 56, 89, 53, 61, 67, 70, 71, 59, 63, 69, 79, 73, 79, 58, 48, 51, 74, 50, 67, 96, 74, 42, 51, 68, 77, 57, 64, 73, 50, 60, 83, 54, 60, 76, 55, 59, 54, 72, 57, 53, 54, 75, 57, 61, 59, 64, 94, 72, 78, 61, 114, 66, 61, 57, 116, 54, 63, 75, 59, 67, 68, 72, 61, 49, 63, 116, 53, 50, 63, 59, 75, 59, 67, 63, 71, 62, 68, 56, 62, 72, 59, 45, 64, 67, 62, 69, 77, 70, 64, 87, 54, 48, 79, 60, 55, 59, 85, 58, 76, 66, 58, 123, 67, 47, 47, 79, 54, 53, 65, 50, 60, 57, 64, 85, 71, 87, 89, 51, 74, 64, 121, 49, 72, 80, 74, 47, 82, 86, 83, 62, 61, 92, 50, 57, 80, 78, 67, 70, 60, 58, 41, 45, 50, 67, 76, 69, 72, 47, 50, 70, 63, 62, 65, 55, 65, 73, 64, 46, 54, 46, 64, 67, 57, 97, 57, 81, 84, 48, 79, 50, 66, 57, 56, 63, 65, 85, 52, 80, 88, 37, 73, 57, 61, 95, 53, 86, 66, 57, 63, 49, 76, 44, 56, 60, 58, 39, 57, 55, 71, 65, 68, 98, 44, 72, 70, 84, 57, 66, 94, 61, 51, 127, 51, 67, 61, 60, 59, 91, 70, 59, 57, 46, 68, 68, 61, 82, 81, 51, 69, 68, 84, 56, 67, 59, 71, 54, 68, 55, 60, 69, 82, 63, 117, 65, 60, 51, 56, 48, 63, 84, 43, 81, 79, 61, 60, 77, 55, 53, 51, 75, 53, 66, 51, 60, 55, 54, 54, 66, 50, 66, 58, 61, 71, 82, 53, 53, 87, 53, 55, 62, 74, 74, 64, 69, 76, 51, 60, 64, 71, 66, 86, 50, 50, 56, 46, 69, 67, 58, 63, 63, 70, 47, 80, 73, 56, 75, 67, 94, 63, 45, 58, 50, 53, 64, 66, 67, 72, 47, 64, 57, 74, 54, 87, 56, 68, 79, 82, 80, 42, 48, 39, 59, 78, 70, 59, 52, 83, 68, 72, 89, 97, 64, 82, 87, 62, 102, 52, 57, 49, 96, 62, 73, 60, 80, 71, 57, 82, 53, 64, 50, 64, 61, 69, 59, 59, 94, 69, 53, 59, 48, 66, 47, 62, 85, 76, 80, 77, 58, 84, 75, 67, 90, 65, 66, 52, 85, 62, 61, 56, 55, 71, 58, 67, 50, 106, 48, 52, 83, 53, 50, 65, 87, 77, 99, 64, 60, 55, 74, 65, 51, 68, 64, 97, 68, 54, 60, 76, 49, 59, 77, 59, 53, 48, 68, 52, 86, 54, 77, 65, 60, 69, 62, 63, 61, 56, 69, 43, 55, 67, 76, 62, 81, 57, 61, 55, 53, 92, 57, 63, 66, 64, 51, 71, 75, 83, 56, 56, 55, 68, 60, 42, 62, 53, 42, 57, 56, 56, 59, 58, 44, 64, 96, 94, 65, 63, 95, 73, 73, 58, 77, 54, 74, 55, 62, 76, 74, 67, 57, 65, 61, 64, 73, 84, 61, 70, 71, 40, 75, 83, 64, 70, 93, 55, 98, 75, 70, 67, 89, 67, 65, 53, 84, 86, 101, 72, 107, 55, 44, 51, 107, 54, 63, 82, 64, 66, 65, 55, 61, 63, 64, 76, 73, 68, 79, 80, 48, 46, 62, 75, 83, 42, 82, 88, 70, 60, 74, 85, 85, 55, 84, 64, 48, 51, 52, 61, 65, 45, 73, 62, 67, 60, 76, 64, 65, 62, 76, 83, 59, 67, 46, 73, 61, 121, 61, 65, 72, 109, 85, 75, 76, 71, 50, 79, 56, 97, 90, 73, 46, 60, 74, 67, 52, 88, 68, 51, 66, 72, 64, 62, 57, 107, 44, 70, 73, 64, 80, 85, 51, 51, 86, 63, 86, 68, 79, 105, 87, 58, 59, 97, 50, 49, 73, 54, 52, 69, 80, 55, 72, 64, 68, 68, 66, 71, 64, 68, 55, 67, 70, 69, 66, 77, 63, 66, 99, 57, 53, 68, 77, 60, 64, 38, 78, 56, 89, 64, 56, 51, 52, 75, 103, 91, 75, 69, 65, 38, 49, 60, 57, 60, 66, 77, 69, 54, 90, 52, 82, 41, 69, 96, 64, 62, 49, 77, 81, 43, 64, 52, 60, 80, 81, 76, 87, 102, 67, 50, 64, 62, 63, 64, 67, 68, 65, 63, 50, 75, 52, 70, 58, 59, 78, 64, 52, 76, 61, 77, 58, 36, 62, 62, 74, 48, 58, 71, 54, 78, 74, 35, 64, 77, 74, 59, 70, 58, 59, 69, 52, 70, 74, 56, 106, 70, 54, 70, 67, 57, 70, 60, 60, 60, 54, 71, 64, 58, 48, 54, 49, 76, 62, 48, 43, 57, 72, 73, 80, 53, 62, 79, 53, 53, 69, 80, 64, 75, 65, 78, 52, 62, 62, 76, 62, 69, 79, 58, 67, 63, 91, 57, 44, 52, 72, 67, 74, 62, 96, 53, 63, 75, 65, 66, 60, 62, 70, 48, 61, 89, 64, 41, 68, 60, 89, 62, 64, 87, 64, 54, 81, 65, 65, 63, 75, 63, 51, 70, 121, 53, 74, 51, 68, 62, 74, 61, 72, 63, 60, 61, 49, 92, 65, 56, 56, 54, 68, 76, 71, 79, 70, 91, 72, 59, 61, 73, 75, 61, 56, 68, 71, 47, 67, 56, 79, 74, 50, 54, 116, 46, 57, 47, 48, 61, 52, 56, 66, 53, 63, 59, 52, 57, 88, 71, 63, 47, 77, 50, 65, 53, 54, 77, 81, 64, 82, 82, 80, 63, 58, 49, 66, 96, 69, 68, 66, 79, 54, 58, 72, 56, 70, 60, 72, 65, 63, 57, 87, 82, 69, 64, 83, 78, 66, 54, 64, 54, 57, 63, 73, 48, 71, 46, 56, 109, 62, 67, 71, 64, 56, 64, 63, 40, 52, 64, 55, 61, 70, 57, 49, 73, 50, 110, 71, 69, 63, 63, 54, 71, 66, 55, 68, 69, 71, 55, 83, 76, 61, 80, 67, 64, 48, 62, 57, 51, 69, 78, 74, 64, 71, 41, 54, 57, 66, 65, 62, 63, 56, 52, 64, 74, 62, 52, 52, 83, 54, 72, 85, 74, 74, 62, 88, 81, 56, 54, 77, 52, 57, 78, 62, 74, 80, 71, 62, 69, 74, 82, 75, 114, 69, 51, 69, 61, 59, 54, 65, 61, 59, 79, 69, 56, 79, 53, 72, 73, 59, 47, 65, 75, 64, 94, 71, 51, 58, 65, 66, 61, 73, 78, 77, 66, 71, 62, 43, 65, 68, 65, 63, 63, 57, 66, 55, 65, 105, 65, 79, 70, 64, 67, 49, 66, 85, 68, 48, 90, 53, 81, 79, 63, 59, 69, 52, 65, 58, 62, 59, 93, 64, 68, 72, 50, 56, 66, 62, 67, 87, 63, 68, 76, 58, 74, 73, 58, 65, 46, 96, 54, 70, 45, 60, 59, 55, 69, 60, 74, 85, 119, 51, 59, 67, 58, 65, 76, 63, 62, 73, 69, 53, 48, 62, 86, 64, 75, 60, 73, 65, 65, 78, 78, 52, 53, 52, 58, 57, 66, 49, 43, 56, 62, 52, 46, 63, 60, 60, 84, 75, 103, 60, 45, 54, 64, 81, 65, 48, 61, 53, 60, 59, 66, 54, 68, 54, 64, 57, 53, 57, 77, 56, 53, 62, 84, 62, 54, 60, 64, 55, 66, 90, 82, 60, 88, 70, 91, 76, 88, 62, 72, 66, 53, 52, 79, 63, 60, 73, 63, 78, 62, 51, 54, 87, 65, 50, 56, 62, 56, 66, 67, 51, 58, 52, 74, 56, 63, 59, 54, 59, 70, 61, 34, 63, 58, 69, 57, 59, 52, 65, 56, 61, 59, 55, 73, 76, 51, 82, 60, 61, 59, 57, 58, 66, 56, 67, 71, 70, 48, 60, 52, 58, 70, 53, 80, 61, 55, 71, 71, 68, 95, 61, 64, 64, 78, 65, 92, 74, 89, 83, 66, 65, 77, 69, 54, 60, 56, 57, 64, 84, 57, 67, 63, 63, 44, 71, 66, 72, 51, 64, 77, 59, 53, 53, 86, 52, 62, 72, 61, 48, 58, 76, 56, 77, 82, 65, 60, 72, 77, 57, 79, 59, 54, 66, 56, 67, 82, 63, 50, 60, 89, 66, 96, 68, 65, 79, 67, 83, 67, 74, 59, 71, 58, 62, 67, 55, 64, 93, 80, 58, 53, 45, 66, 68, 78, 89, 67, 82, 73, 101, 50, 78, 54, 56, 58, 56, 63, 66, 56, 46, 79, 53, 55, 71, 60, 67, 59, 60, 68, 57, 61, 55, 61, 70, 66, 61, 75, 65, 70, 63, 49, 60, 71, 56, 55, 67, 63, 68, 67, 63, 56, 80, 60, 81, 59, 89, 81, 64, 96, 98, 60, 66, 74, 63, 61, 86, 93, 63, 67, 63, 49, 64, 63, 77, 78, 55, 59, 76, 67, 59, 58, 62, 80, 78, 60, 77, 63, 85, 78, 67, 57, 68, 67, 55, 49, 54, 50, 66, 46, 97, 83, 55, 56, 62, 91, 41, 50, 50, 65, 76, 85, 65, 61, 58, 78, 128, 67, 51, 62, 59, 65, 51, 66, 75, 117, 141, 59, 59, 51, 51, 68, 96, 72, 68, 58, 77, 59, 66, 107, 57, 56, 74, 59, 73, 52, 57, 89, 59, 72, 60, 56, 43, 82, 70, 66, 60, 62, 67, 68, 74, 60, 112, 70, 71, 77, 70, 52, 62, 60, 60, 51, 74, 86, 68, 51, 59, 61, 74, 65, 62, 49, 57, 63, 58, 55, 72, 56, 62, 55, 59, 70, 82, 47, 63, 68, 84, 51, 66, 61, 76, 70, 58, 46, 57, 73, 70, 67, 60, 63, 65, 48, 63, 73, 87, 56, 58, 76, 90, 57, 42, 63, 67, 55, 55, 83, 50, 53, 73, 59, 51, 50, 62, 67, 48, 73, 58, 91, 62, 75, 50, 44, 105, 56, 60, 77, 66, 55, 69, 68, 107, 53, 69, 55, 73, 72, 57, 97, 82, 81, 96, 69, 64, 62, 81, 70, 67, 76, 66, 78, 55, 78, 74, 68, 64, 62, 61, 73, 60, 87, 69, 72, 66, 71, 55, 69, 57, 94, 56, 102, 59, 92, 73, 65, 62, 64, 63, 55, 64, 56, 67, 98, 70, 67, 56, 69, 104, 55, 83, 60, 56, 77, 58, 49, 54, 61, 52, 59, 65, 75, 55, 58, 57, 61, 56, 72, 73, 64, 55, 60, 60, 63, 57, 57, 60, 71, 68, 70, 50, 67, 87, 74, 43, 64, 91, 59, 58, 81, 63, 69, 74, 63, 51, 62, 53, 60, 80, 66, 69, 71, 77, 60, 72, 70, 52, 55, 61, 64, 70, 62, 77, 62, 59, 90, 94, 63, 62, 51, 44, 66, 65, 64, 46, 63, 57, 49, 79, 68, 72, 57, 61, 65, 51, 64, 54, 78, 81, 69, 68, 76, 63, 55, 59, 79, 67, 58, 71, 49, 74, 81, 62, 67, 55, 64, 64, 59, 55, 74, 77, 54, 62, 49, 95, 60, 121, 57, 67, 76, 66, 76, 65, 70, 65, 85, 50, 59, 50, 55, 78, 69, 79, 56, 65, 69, 59, 57, 54, 60, 57, 60, 77, 129, 66, 51, 69, 45, 77, 57, 76, 47, 60, 60, 70, 63, 73, 66, 69, 64, 86, 43, 82, 62, 57, 57, 129, 66, 60, 72, 65, 71, 55, 57, 52, 58, 61, 71, 64, 87, 72, 59, 65, 84, 76, 61, 87, 63, 68, 50, 63, 64, 68, 53, 95, 59, 59, 50, 80, 69, 71, 73, 69, 76, 87, 57, 68, 82, 69, 66, 84, 53, 75, 50, 60, 55, 46, 66, 78, 58, 53, 54, 71, 69, 93, 65, 45, 55, 69, 78, 57, 65, 56, 95, 62, 59, 57, 64, 68, 77, 72, 87, 80, 72, 53, 60, 64, 55, 83, 66, 57, 52, 67, 64, 83, 84, 66, 61, 75, 57, 89, 74, 65, 63, 59, 78, 64, 54, 75, 59, 76, 60, 95, 90, 74, 64, 90, 76, 70, 48, 63, 53, 56, 63, 64, 65, 59, 50, 42, 64, 64, 50, 58, 63, 51, 58, 67, 50, 73, 68, 58, 83, 83, 61, 62, 86, 57, 50, 73, 51, 65, 76, 73, 74, 62, 64, 65, 63, 107, 61, 71, 62, 60, 67, 69, 73, 41, 52, 84, 97, 80, 74, 58, 41, 57, 50, 72, 68, 63, 62, 66, 69, 64, 66, 91, 76, 66, 55, 63, 57, 66, 56, 73, 58, 53, 70, 56, 70, 59, 49, 59, 31, 52, 59, 58, 62, 66, 69, 65, 83, 73, 65, 67, 70, 72, 49, 57, 56, 59, 67, 52, 81, 82, 51, 65, 81, 71, 99, 70, 80, 44, 65, 60, 74, 73, 62, 67, 56, 56, 50, 69, 57, 67, 55, 55, 55, 54, 63, 64, 62, 71, 54, 60, 73, 57, 60, 57, 67, 109, 56, 61, 56, 67, 101, 61, 71, 73, 57, 55, 60, 55, 74, 87, 58, 56, 57, 67, 75, 64, 73, 80, 71, 76, 52, 60, 61, 76, 60, 68, 57, 45, 50, 47, 70, 55, 61, 57, 83, 55, 70, 44, 70, 37, 68, 47, 72, 53, 55, 60, 68, 50, 81, 56, 70, 57, 64, 59, 64, 71, 63, 57, 59, 69, 56, 57, 66, 59, 85, 80, 54, 46, 49, 54, 74, 66, 66, 72, 58, 69, 57, 60, 54, 72, 53, 72, 61, 89, 67, 74, 62, 48, 68, 85, 60, 81, 62, 69, 62, 75, 49, 67, 61, 75, 59, 71, 68, 57, 69, 66, 98, 59, 61, 50, 80, 51, 60, 66, 64, 67, 68, 78, 86, 56, 53, 66, 54, 78, 58, 54, 65, 61, 70, 60, 69, 64, 65, 58, 73, 61, 46, 51, 47, 71, 84, 101, 55, 59, 67, 45, 75, 72, 72, 67, 56, 115, 99, 73, 63, 60, 76, 61, 72, 64, 63, 63, 69, 87, 40, 63, 60, 83, 78, 65, 59, 68, 72, 77, 49, 63, 102, 54, 46, 69, 54, 74, 56, 73, 57, 57, 87, 67, 49, 65, 69, 39, 56, 77, 121, 80, 48, 68, 56, 72, 85, 68, 55, 72, 67, 68, 65, 57, 74, 76, 71, 55, 58, 97, 65, 51, 71, 63, 41, 62, 59, 66, 61, 82, 47, 73, 58, 61, 66, 49, 70, 66, 63, 93, 68, 74, 46, 77, 68, 61, 61, 51, 63, 69, 62, 69, 60, 74, 60, 55, 61, 50, 52, 66, 67, 53, 74, 83, 65, 52, 74, 55, 79, 48, 51, 76, 86, 66, 89, 67, 65, 67, 49, 98, 69, 72, 67, 56, 64, 59, 72, 46, 68, 57, 85, 54, 95, 70, 52, 62, 68, 60, 79, 84, 58, 69, 62, 58, 48, 51, 61, 59, 56, 78, 65, 96, 64, 57, 48, 72, 81, 52, 59, 62, 64, 83, 60, 76, 75, 80, 49, 55, 56, 70, 59, 90, 54, 57, 62, 63, 41, 61, 55, 55, 60, 64, 71, 62, 88, 74, 105, 56, 66, 79, 87, 82, 55, 73, 59, 68, 61, 70, 52, 67, 69, 60, 76, 58, 67, 69, 81, 66, 59, 55, 58, 52, 64, 63, 66, 54, 64, 121, 65, 66, 63, 53, 55, 46, 71, 59, 90, 74, 79, 67, 44, 56, 56, 53, 61, 66, 63, 81, 68, 64, 61, 64, 81, 67, 57, 64, 56, 65, 66, 54, 73, 65, 72, 62, 63, 83, 52, 62, 69, 73, 58, 60, 65, 38, 52, 60, 66, 79, 47, 75, 62, 79, 61, 58, 78, 91, 62, 73, 61, 59, 83, 76, 78, 63, 57, 66, 47, 62, 50, 73, 64, 71, 76, 54, 59, 60, 56, 64, 60, 60, 82, 79, 60, 73, 80, 74, 58, 59, 45, 65, 79, 78, 53, 69, 65, 56, 62, 71, 63, 78, 61, 57, 58, 64, 64, 82, 64, 62, 98, 76, 77, 69, 43, 59, 54, 83, 60, 70, 84, 70, 67, 73, 59, 74, 62, 64, 61, 65, 69, 60, 46, 70, 58, 55, 59, 54, 78, 78, 80, 61, 87, 77, 75, 56, 77, 61, 99, 56, 62, 60, 50, 71, 67, 65, 55, 53, 45, 68, 54, 53, 63, 74, 58, 69, 49, 78, 95, 48, 78, 52, 95, 64, 84, 56, 53, 71, 56, 67, 62, 60, 84, 75, 79, 69, 72, 71, 56, 65, 72, 90, 74, 51, 65, 60, 66, 51, 59, 62, 81, 72, 68, 64, 64, 48, 59, 56, 69, 57, 57, 53, 51, 54, 64, 55, 60, 83, 50, 80, 66, 72, 79, 73, 67, 55, 55, 67, 64, 72, 52, 55, 109, 67, 50, 58, 57, 70, 85, 48, 57, 46, 52, 79, 56, 80, 70, 76, 72, 92, 47, 71, 74, 73, 56, 57, 62, 68, 47, 81, 64, 86, 50, 70, 63, 103, 75, 56, 89, 75, 56, 68, 71, 63, 61, 67, 55, 59, 72, 75, 57, 65, 48, 91, 74, 64, 47, 75, 96, 76, 82, 73, 56, 55, 104, 59, 79, 82, 85, 57, 59, 57, 61, 68, 80, 71, 50, 88, 78, 72, 67, 58, 59, 61, 83, 80, 53, 68, 37, 58, 106, 64, 103, 67, 82, 69, 45, 67, 67, 71, 79, 67, 69, 67, 66, 58, 60, 55, 48, 103, 63, 67, 70, 60, 62, 64, 68, 70, 43, 59, 61, 53, 57, 62, 71, 54, 66, 42, 61, 55, 57, 82, 63, 71, 60, 62, 77, 95, 69, 76, 64, 54, 89, 73, 62, 57, 53, 50, 63, 64, 65, 89, 54, 62, 78, 63, 57, 49, 67, 55, 58, 47, 61, 102, 65, 65, 60, 61, 63, 71, 47, 45, 54, 50, 49, 61, 73, 60, 68, 63, 86, 59, 64, 49, 64, 64, 71, 69, 65, 68, 66, 66, 82, 59, 65, 41, 70, 82, 59, 62, 63, 86, 65, 65, 61, 64, 65, 50, 83, 81, 59, 82, 52, 56, 75, 51, 68, 70, 81, 53, 66, 60, 52, 62, 111, 63, 66, 62, 63, 56, 51, 75, 78, 72, 67, 76, 62, 59, 68, 69, 73, 57, 82, 69, 64, 60, 57, 64, 82, 80, 105, 58, 76, 57, 129, 49, 82, 67, 55, 56, 55, 56, 78, 63, 54, 108, 80, 86, 67, 56, 68, 63, 58, 57, 71, 73, 52, 59, 79, 66, 63, 73, 63, 59, 71, 81, 74, 43, 71, 64, 43, 69, 58, 58, 62, 65, 81, 65, 54, 69, 57, 72, 82, 74, 65, 69, 45, 93, 58, 60, 54, 78, 58, 58, 74, 53, 62, 65, 66, 60, 71, 67, 61, 67, 62, 64, 60, 92, 64, 61, 60, 60, 52, 79, 67, 64, 81, 70, 54, 49, 94, 83, 83, 77, 100, 54, 51, 55, 56, 66, 51, 53, 64, 76, 80, 73, 54, 47, 70, 70, 93, 56, 56, 61, 86, 66, 49, 85, 77, 72, 48, 76, 66, 82, 51, 64, 54, 59, 58, 64, 121, 55, 70, 60, 56, 59, 53, 61, 73, 86, 64, 70, 72, 59, 83, 67, 114, 76, 59, 68, 65, 65, 75, 69, 77, 86, 52, 52, 67, 73, 65, 61, 50, 67, 58, 65, 54, 94, 59, 48, 68, 72, 76, 68, 54, 65, 50, 59, 46, 52, 63, 77, 86, 58, 62, 64, 61, 58, 63, 70, 77, 54, 54, 65, 58, 102, 66, 75, 58, 55, 54, 77, 53, 57, 47, 64, 57, 85, 83, 68, 67, 77, 61, 55, 59, 51, 65, 71, 68, 54, 56, 63, 52, 68, 54, 73, 59, 57, 53, 55, 57, 52, 54, 55, 83, 59, 71, 74, 64, 53, 46, 41, 91, 61, 58, 62, 64, 70, 61, 53, 82, 57, 69, 57, 70, 66, 75, 58, 61, 76, 52, 79, 70, 68, 57, 78, 63, 83, 78, 58, 62, 66, 61, 79, 59, 73, 76, 66, 110, 62, 73, 78, 64, 55, 69, 59, 73, 46, 59, 74, 67, 73, 76, 67, 59, 85, 69, 57, 78, 62, 60, 61, 79, 61, 69, 53, 59, 55, 47, 55, 50, 72, 77, 100, 52, 72, 59, 64, 65, 78, 57, 52, 73, 100, 79, 74, 51, 56, 103, 67, 78, 87, 50, 78, 57, 72, 62, 65, 60, 83, 60, 50, 74, 63, 69, 80, 65, 64, 96, 72, 63, 62, 53, 64, 60, 74, 53, 72, 89, 52, 71, 60, 70, 44, 51, 68, 110, 54, 58, 61, 57, 45, 82, 65, 62, 72, 72, 65, 88, 48, 84, 79, 70, 56, 56, 85, 77, 65, 53, 63, 52, 60, 67, 64, 56, 80, 51, 64, 83, 92, 51, 60, 82, 64, 70, 86, 73, 56, 61, 61, 69, 64, 69, 53, 59, 60, 54, 54, 56, 74, 54, 44, 65, 56, 49, 61, 75, 76, 75, 85, 94, 67, 70, 64, 60, 70, 60, 76, 63, 62, 93, 53, 67, 53, 60, 79, 60, 69, 81, 64, 76, 61, 65, 70, 64, 47, 59, 65, 77, 75, 58, 54, 48, 68, 103, 90, 68, 64, 59, 65, 63, 58, 63, 37, 58, 92, 93, 76, 47, 52, 62, 57, 103, 63, 50, 64, 73, 74, 60, 74, 55, 71, 64, 55, 65, 73, 59, 62, 74, 76, 64, 62, 56, 55, 71, 59, 72, 59, 69, 53, 69, 78, 125, 61, 67, 79, 56, 65, 70, 62, 54, 70, 56, 76, 62, 77, 53, 42, 70, 72, 64, 81, 65, 68, 47, 68, 53, 48, 75, 69, 67, 69, 71, 110, 70, 52, 59, 50, 58, 65, 75, 52, 57, 75, 75, 59, 57, 69, 56, 56, 63, 57, 75, 68, 57, 57, 65, 62, 61, 73, 55, 86, 73, 58, 65, 67, 76, 80, 47, 69, 56, 57, 60, 58, 57, 79, 69, 71, 58, 53, 68, 84, 56, 83, 60, 58, 42, 81, 61, 62, 77, 60, 71, 36, 64, 69, 58, 67, 68, 68, 54, 61, 91, 59, 54, 76, 61, 61, 59, 66, 62, 70, 55, 48, 62, 51, 83, 47, 58, 86, 59, 70, 51, 61, 65, 58, 76, 70, 73, 67, 76, 49, 50, 44, 60, 57, 61, 59, 67, 80, 69, 80, 57, 66, 77, 80, 73, 81, 62, 71, 59, 89, 68, 67, 123, 100, 49, 65, 56, 58, 62, 68, 64, 40, 54, 60, 57, 53, 75, 57, 61, 52, 92, 70, 57, 70, 87, 62, 65, 67, 77, 67, 94, 43, 57, 63, 70, 88, 69, 77, 62, 49, 44, 62, 56, 71, 47, 65, 71, 72, 56, 70, 75, 56, 71, 46, 61, 46, 49, 44, 58, 90, 57, 58, 61, 67, 52, 65, 54, 63, 55, 69, 58, 65, 66, 57, 53, 58, 63, 63, 75, 70, 52, 59, 97, 73, 52, 68, 60, 65, 70, 64, 74, 56, 55, 60, 63, 62, 48, 52, 62, 86, 61, 73, 73, 60, 70, 68, 70, 63, 63, 55, 59, 71, 65, 56, 90, 72, 83, 67, 61, 62, 73, 48, 47, 60, 50, 62, 75, 55, 68, 67, 52, 98, 70, 70, 77, 66, 42, 52, 72, 84, 62, 60, 51, 65, 56, 63, 57, 100, 83, 43, 82, 71, 64, 53, 51, 77, 61, 55, 59, 59, 76, 78, 52, 47, 61, 98, 98, 51, 73, 65, 83, 61, 75, 66, 61, 67, 107, 58, 66, 79, 78, 63, 56, 51, 54, 81, 85, 69, 70, 90, 80, 62, 68, 66, 49, 81, 59, 98, 67, 55, 45, 60, 91, 43, 96, 58, 70, 58, 57, 75, 44, 68, 72, 73, 68, 60, 47, 72, 51, 73, 64, 82, 64, 68, 61, 64, 62, 69, 79, 51, 46, 61, 60, 73, 69, 51, 56, 93, 81, 58, 69, 62, 65, 47, 58, 71, 67, 59, 80, 108, 56, 54, 54, 60, 93, 59, 67, 55, 64, 55, 71, 62, 63, 73, 55, 70, 58, 53, 64, 54, 67, 64, 57, 61, 60, 87, 71, 60, 68, 63, 58, 70, 53, 56, 83, 51, 62, 65, 48, 50, 55, 66, 65, 59, 78, 75, 50, 47, 63, 57, 58, 57, 65, 50, 67, 59, 64, 54, 72, 82, 67, 51, 71, 84, 73, 50, 52, 58, 45, 66, 92, 72, 56, 73, 64, 66, 64, 64, 54, 76, 55, 71, 69, 56, 51, 68, 107, 53, 60, 63, 77, 84, 58, 59, 79, 78, 97, 82, 56, 55, 80, 70, 49, 60, 78, 55, 66, 46, 73, 68, 82, 77, 95, 76, 53, 65, 119, 61, 71, 66, 57, 53, 71, 63, 93, 71, 58, 114, 67, 80, 51, 76, 70, 63, 60, 49, 58, 54, 56, 67, 90, 50, 53, 74, 65, 54, 67, 74, 93, 37, 70, 40, 66, 69, 41, 65, 54, 59, 66, 65, 62, 59, 63, 70, 84, 62, 60, 63, 51, 67, 64, 60, 49, 81, 64, 53, 85, 65, 60, 60, 60, 72, 63, 61, 49, 66, 60, 70, 70, 71, 62, 54, 56, 51, 50, 79, 76, 52, 64, 78, 77, 43, 75, 59, 69, 85, 73, 61, 63, 68, 78, 62, 69, 47, 64, 64, 64, 77, 62, 69, 74, 63, 93, 43, 61, 50, 77, 75, 68, 93, 53, 60, 80, 75, 80, 58, 83, 59, 55, 79, 59, 74, 98, 46, 76, 66, 66, 57, 55, 72, 68, 73, 50, 64, 93, 59, 57, 57, 105, 79, 59, 64, 51, 59, 68, 66, 71, 91, 57, 53, 52, 79, 60, 55, 57, 66, 78, 80, 65, 106, 63, 56, 48, 62, 92, 63, 73, 74, 61, 52, 55, 67, 61, 99, 79, 59, 58, 76, 60, 60, 56, 62, 78, 54, 48, 66, 77, 83, 53, 54, 53, 187, 54, 62, 85, 52, 50, 56, 73, 58, 79, 60, 66, 67, 66, 72, 37, 63, 56, 54, 64, 55, 71, 51, 68, 56, 68, 51, 76, 61, 52, 78, 66, 65, 58, 65, 65, 68, 79, 57, 59, 76, 75, 74, 46, 48, 65, 62, 66, 64, 58, 57, 76, 64, 72, 62, 69, 90, 66, 76, 59, 57, 60, 107, 59, 46, 54, 55, 50, 72, 54, 53, 67, 80, 58, 55, 45, 61, 68, 56, 53, 55, 53, 72, 75, 56, 67, 60, 77, 59, 129, 61, 67, 72, 59, 59, 70, 63, 59, 74, 62, 62, 73, 72, 55, 71, 64, 67, 59, 56, 46, 58, 62, 55, 65, 88, 69, 65, 80, 53, 59, 58, 81, 60, 63, 111, 83, 59, 60, 64, 61, 92, 60, 70, 44, 59, 65, 91, 43, 80, 46, 88, 47, 58, 50, 83, 81, 89, 57, 74, 88, 78, 53, 51, 61, 51, 68, 62, 56, 81, 75, 72, 71, 54, 76, 60, 95, 59, 117, 50, 56, 64, 71, 48, 74, 62, 50, 57, 58, 68, 73, 71, 60, 58, 73, 67, 63, 77, 85, 75, 84, 75, 59, 50, 61, 69, 61, 85, 59, 57, 40, 51, 54, 83, 70, 60, 69, 91, 68, 57, 54, 58, 67, 57, 56, 82, 71, 55, 58, 51, 66, 72, 60, 67, 74, 81, 62, 66, 60, 56, 62, 77, 87, 44, 77, 56, 111, 54, 50, 79, 98, 57, 84, 66, 97, 68, 66, 97, 55, 60, 66, 47, 59, 71, 54, 54, 65, 65, 74, 79, 98, 64, 69, 63, 68, 69, 88, 59, 58, 77, 58, 48, 60, 76, 56, 46, 63, 101, 59, 70, 54, 50, 58, 52, 99, 51, 44, 78, 72, 75, 57, 70, 83, 61, 61, 65, 56, 65, 46, 95, 59, 61, 82, 49, 60, 46, 42, 75, 49, 59, 69, 53, 65, 66, 98, 56, 70, 61, 73, 73, 62, 67, 72, 75, 56, 55, 70, 60, 49, 63, 59, 74, 58, 90, 68, 49, 52, 59, 58, 43, 70, 71, 55, 58, 63, 109, 57, 66, 52, 81, 51, 71, 55, 58, 65, 82, 82, 71, 59, 49, 108, 54, 96, 57, 60, 61, 60, 53, 46, 68, 73, 81, 57, 54, 63, 49, 43, 77, 76, 61, 54, 61, 69, 53, 77, 64, 50, 57, 63, 66, 74, 50, 58, 48, 63, 59, 57, 79, 78, 73, 53, 56, 77, 94, 53, 50, 47, 64, 73, 58, 76, 71, 56, 52, 60, 82, 63, 79, 60, 52, 66, 62, 93, 83, 67, 61, 53, 67, 89, 75, 59, 81, 64, 66, 67, 64, 86, 77, 87, 56, 58, 52, 71, 67, 47, 59, 63, 56, 66, 67, 54, 77, 54, 88, 52, 56, 82, 68, 80, 71, 50, 77, 53, 90, 63, 67, 113, 77, 70, 68, 53, 56, 55, 50, 46, 59, 46, 63, 55, 50, 70, 55, 59, 66, 72, 69, 69, 52, 70, 92, 60, 60, 70, 89, 111, 63, 61, 63, 81, 56, 55, 84, 68, 78, 66, 72, 57, 63, 79, 54, 70, 61, 46, 86, 112, 111, 73, 61, 103, 76, 56, 54, 54, 71, 63, 56, 55, 73, 74, 45, 53, 103, 51, 81, 67, 66, 77, 68, 56, 51, 49, 75, 72, 57, 93, 65, 72, 60, 65, 55, 68, 61, 49, 67, 66, 48, 85, 66, 82, 64, 76, 81, 60, 86, 65, 67, 59, 59, 61, 47, 62, 58, 72, 58, 60, 80, 44, 55, 88, 71, 39, 71, 50, 72, 53, 64, 51, 39, 52, 45, 54, 70, 60, 50, 88, 103, 73, 73, 49, 61, 68, 61, 60, 76, 48, 60, 50, 74, 63, 48, 66, 91, 62, 46, 76, 68, 64, 48, 93, 59, 66, 58, 57, 44, 65, 77, 62, 53, 62, 88, 73, 58, 47, 79, 95, 66, 81, 55, 54, 64, 88, 53, 90, 49, 82, 67, 47, 53, 60, 47, 74, 65, 74, 70, 61, 55, 61, 58, 58, 59, 43, 80, 71, 60, 57, 64, 82, 60, 80, 55, 74, 58, 68, 66, 72, 61, 68, 74, 72, 78, 42, 75, 85, 69, 37, 116, 77, 61, 67, 59, 44, 50, 66, 63, 57, 71, 67, 61, 65, 56, 56, 55, 66, 76, 60, 85, 78, 67, 51, 70, 55, 60, 61, 62, 61, 40, 67, 78, 74, 67, 67, 58, 58, 80, 100, 80, 83, 62, 65, 67, 65, 57, 57, 74, 66, 84, 56, 41, 72, 110, 72, 78, 60, 52, 64, 77, 56, 48, 69, 61, 52, 55, 49, 59, 77, 63, 55, 60, 74, 58, 73, 75, 82, 47, 45, 57, 45, 54, 64, 81, 86, 64, 54, 71, 55, 55, 73, 73, 69, 76, 65, 53, 51, 57, 74, 70, 45, 79, 49, 195, 61, 82, 67, 71, 70, 64, 69, 76, 70, 70, 88, 58, 73, 72, 55, 66, 65, 57, 90, 95, 43, 84, 56, 57, 77, 70, 68, 55, 67, 54, 52, 69, 60, 58, 87, 61, 108, 75, 71, 55, 114, 50, 53, 59, 54, 78, 62, 68, 78, 68, 75, 113, 75, 95, 49, 60, 62, 56, 57, 76, 54, 40, 56, 58, 105, 65, 62, 75, 71, 38, 69, 57, 68, 68, 60, 57, 33, 59, 45, 57, 80, 66, 75, 82, 65, 86, 81, 47, 67, 74, 44, 51, 67, 90, 45, 56, 68, 76, 62, 65, 72, 65, 72, 49, 50, 59, 60, 55, 80, 56, 66, 44, 68, 65, 61, 82, 80, 70, 58, 69, 58, 64, 56, 67, 61, 67, 68, 64, 51, 76, 70, 54, 95, 79, 67, 61, 51, 79, 67, 64, 78, 60, 55, 63, 71, 49, 49, 70, 46, 67, 101, 63, 55, 64, 58, 53, 90, 49, 61, 82, 89, 53, 77, 55, 57, 75, 93, 71, 40, 63, 70, 66, 60, 50, 52, 49, 69, 81, 72, 58, 65, 61, 114, 55, 61, 72, 59, 56, 54, 62, 62, 93, 57, 81, 50, 68, 58, 63, 72, 73, 58, 62, 55, 93, 71, 57, 56, 66, 65, 69, 68, 69, 59, 51, 64, 65, 60, 93, 79, 74, 73, 76, 74, 55, 79, 77, 68, 58, 58, 74, 68, 76, 58, 58, 52, 47, 61, 56, 69, 57, 62, 92, 68, 70, 69, 47, 79, 54, 57, 75, 78, 63, 63, 65, 64, 65, 70, 66, 48, 74, 59, 68, 60, 65, 58, 65, 77, 73, 72, 58, 78, 86, 77, 66, 37, 45, 60, 63, 75, 59, 69, 64, 44, 68, 69, 61, 77, 61, 58, 58, 78, 80, 64, 79, 58, 46, 70, 106, 67, 57, 66, 71, 50, 74, 48, 72, 92, 57, 91, 62, 58, 54, 69, 40, 42, 58, 63, 65, 67, 65, 75, 67, 66, 44, 65, 61, 67, 79, 65, 54, 71, 50, 70, 94, 82, 71, 53, 52, 75, 61, 41, 74, 54, 63, 46, 63, 69, 64, 68, 80, 65, 48, 62, 78, 62, 67, 75, 56, 66, 100, 95, 40, 51, 59, 86, 64, 82, 72, 62, 72, 53, 100, 72, 83, 60, 107, 58, 62, 60, 59, 84, 116, 61, 68, 63, 69, 59, 75, 69, 77, 63, 52, 54, 57, 66, 57, 67, 60, 78, 63, 78, 56, 98, 90, 44, 65, 62, 55, 71, 49, 60, 68, 49, 64, 62, 82, 69, 81, 81, 71, 57, 74, 86, 46, 62, 64, 47, 65, 54, 75, 55, 77, 86, 59, 61, 65, 53, 76, 72, 49, 62, 88, 59, 47, 75, 65, 62, 58, 75, 73, 65, 78, 59, 57, 72, 68, 74, 71, 61, 68, 58, 75, 56, 64, 73, 55, 67, 81, 88, 69, 87, 63, 61, 65, 87, 67, 84, 58, 76, 73, 60, 88, 64, 88, 51, 60, 83, 59, 43, 52, 59, 75, 52, 67, 66, 65, 68, 63, 48, 71, 51, 85, 77, 66, 73, 77, 61, 72, 55, 67, 63, 84, 79, 61, 63, 63, 60, 53, 110, 63, 36, 65, 68, 60, 64, 67, 52, 67, 61, 65, 66, 36, 53, 64, 54, 66, 66, 56, 59, 48, 53, 51, 52, 61, 67, 51, 66, 57, 106, 60, 80, 66, 54, 91, 68, 50, 49, 74, 64, 47, 62, 90, 58, 52, 70, 65, 69, 43, 62, 52, 57, 52, 66, 66, 59, 61, 56, 63, 64, 125, 59, 56, 64, 61, 70, 62, 61, 55, 72, 64, 63, 87, 59, 76, 57, 64, 70, 69, 55, 65, 53, 69, 61, 71, 75, 73, 76, 53, 63, 64, 62, 71, 65, 64, 49, 77, 69, 79, 77, 54, 59, 54, 81, 71, 66, 67, 64, 53, 68, 64, 53, 69, 55, 63, 76, 58, 85, 108, 61, 59, 56, 70, 82, 55, 62, 86, 84, 35, 66, 70, 72, 73, 72, 58, 64, 54, 61, 82, 54, 66, 63, 53, 93, 79, 55, 75, 67, 59, 74, 62, 69, 108, 91, 61, 71, 57, 44, 48, 79, 67, 77, 59, 54, 50, 58, 62, 78, 77, 56, 64, 64, 49, 68, 61, 58, 50, 71, 92, 69, 68, 105, 71, 52, 74, 65, 51, 53, 57, 63, 61, 69, 66, 55, 56, 58, 51, 61, 75, 73, 64, 47, 61, 47, 78, 54, 67, 93, 106, 110, 68, 61, 60, 87, 60, 46, 69, 55, 55, 50, 71, 93, 53, 93, 62, 56, 72, 59, 50, 104, 46, 62, 51, 79, 47, 53, 61, 53, 43, 77, 72, 61, 66, 73, 50, 80, 62, 59, 80, 64, 44, 67, 72, 75, 58, 57, 66, 65, 55, 81, 59, 76, 51, 63, 70, 65, 59, 59, 70, 64, 70, 87, 48, 71, 60, 59, 76, 65, 81, 72, 81, 49, 54, 70, 56, 67, 96, 57, 67, 58, 77, 69, 59, 99, 72, 72, 70, 68, 52, 67, 73, 63, 65, 65, 66, 63, 64, 56, 80, 72, 88, 50, 57, 60, 87, 97, 54, 70, 55, 77, 53, 41, 66, 48, 60, 51, 74, 63, 72, 83, 52, 61, 49, 62, 74, 65, 70, 65, 75, 83, 56, 63, 57, 47, 83, 97, 58, 72, 73, 77, 66, 85, 90, 65, 66, 103, 63, 71, 76, 64, 70, 72, 47, 69, 53, 74, 59, 69, 78, 70, 95, 67, 52, 62, 68, 73, 54, 73, 72, 65, 71, 103, 66, 107, 58, 66, 61, 60, 69, 71, 55, 83, 51, 69, 46, 73, 68, 77, 49, 60, 104, 63, 58, 63, 68, 71, 88, 86, 65, 65, 70, 52, 67, 61, 45, 55, 60, 75, 55, 57, 68, 60, 63, 45, 75, 59, 58, 63, 82, 58, 67, 49, 58, 83, 52, 62, 69, 69, 59, 56, 49, 106, 69, 59, 101, 64, 59, 76, 70, 51, 53, 49, 60, 47, 101, 78, 66, 52, 51, 54, 73, 54, 62, 49, 62, 61, 67, 50, 44, 61, 67, 69, 62, 36, 75, 73, 80, 63, 57, 70, 60, 65, 73, 71, 61, 59, 61, 65, 61, 65, 72, 70, 58, 53, 84, 61, 62, 46, 69, 69, 47, 69, 84, 62, 42, 79, 69, 57, 51, 48, 50, 74, 56, 55, 62, 77, 60, 74, 59, 59, 62, 55, 61, 62, 72, 64, 93, 72, 77, 56, 67, 72, 55, 78, 52, 71, 50, 66, 63, 67, 82, 113, 97, 77, 62, 110, 72, 70, 55, 60, 54, 68, 72, 90, 73, 69, 102, 63, 71, 65, 55, 80, 59, 60, 63, 50, 63, 58, 52, 84, 50, 77, 60, 55, 61, 80, 98, 63, 62, 58, 55, 58, 60, 56, 57, 60, 66, 74, 64, 61, 76, 43, 67, 51, 80, 38, 56, 72, 85, 66, 54, 61, 64, 48, 61, 48, 59, 88, 60, 54, 67, 56, 54, 63, 84, 63, 83, 70, 70, 48, 66, 73, 69, 76, 41, 48, 47, 57, 64, 58, 62, 85, 47, 62, 50, 65, 66, 65, 55, 63, 63, 58, 54, 62, 60, 63, 96, 76, 59, 50, 66, 67, 53, 57, 63, 83, 52, 73, 81, 74, 70, 51, 54, 67, 72, 81, 55, 48, 68, 66, 70, 96, 63, 57, 68, 66, 67, 63, 62, 76, 43, 60, 68, 52, 53, 59, 62, 105, 64, 66, 75, 74, 50, 59, 69, 55, 90, 61, 57, 68, 89, 57, 60, 58, 66, 50, 57, 38, 88, 67, 89, 58, 49, 79, 65, 77, 60, 57, 55, 54, 68, 60, 85, 58, 78, 53, 83, 74, 66, 75, 62, 63, 58, 47, 64, 70, 65, 61, 65, 59, 39, 67, 59, 67, 50, 57, 47, 81, 77, 68, 73, 85, 56, 62, 86, 51, 44, 71, 62, 61, 62, 80, 59, 63, 68, 43, 60, 51, 50, 50, 47, 75, 64, 51, 67, 68, 69, 68, 77, 50, 57, 63, 54, 68, 57, 62, 53, 54, 56, 59, 67, 92, 47, 57, 56, 63, 71, 66, 71, 71, 48, 53, 111, 54, 70, 60, 79, 63, 74, 59, 85, 66, 61, 66, 82, 66, 49, 63, 78, 67, 54, 54, 47, 59, 54, 59, 63, 57, 68, 69, 75, 62, 63, 60, 68, 38, 64, 60, 51, 67, 65, 64, 77, 67, 68, 61, 72, 41, 105, 64, 50, 74, 66, 67, 81, 69, 55, 65, 57, 51, 59, 66, 57, 68, 110, 100, 61, 60, 63, 86, 117, 73, 64, 75, 62, 69, 93, 64, 44, 53, 85, 71, 70, 46, 57, 51, 73, 60, 53, 78, 61, 60, 67, 59, 81, 64, 84, 49, 67, 68, 63, 66, 71, 80, 55, 69, 69, 113, 66, 64, 66, 63, 61, 49, 75, 80, 76, 93, 71, 78, 42, 77, 69, 55, 71, 59, 69, 71, 56, 84, 71, 73, 55, 72, 71, 59, 93, 56, 58, 70, 77, 65, 52, 110, 71, 54, 83, 63, 72, 55, 46, 71, 63, 84, 68, 70, 70, 55, 66, 63, 53, 88, 53, 75, 86, 63, 67, 108, 78, 49, 101, 45, 42, 80, 66, 60, 85, 63, 89, 87, 64, 79, 64, 71, 78, 61, 68, 66, 65, 88, 70, 79, 88, 60, 50, 71, 53, 76, 49, 70, 65, 76, 53, 93, 62, 75, 56, 74, 54, 54, 69, 62, 79, 63, 88, 41, 91, 83, 47, 63, 69, 48, 64, 103, 63, 67, 74, 65, 61, 57, 42, 54, 67, 77, 57, 66, 62, 37, 77, 48, 65, 67, 65, 57, 81, 55, 57, 68, 49, 69, 71, 64, 67, 69, 54, 76, 67, 39, 69, 76, 66, 65, 62, 76, 70, 75, 88, 60, 38, 79, 55, 44, 56, 68, 69, 40, 62, 62, 45, 69, 72, 56, 62, 57, 110, 59, 94, 42, 75, 59, 51, 41, 58, 77, 66, 69, 86, 60, 69, 81, 68, 87, 47, 49, 64, 77, 70, 74, 44, 56, 62, 95, 85, 74, 78, 67, 41, 57, 55, 52, 87, 60, 68, 71, 76, 49, 47, 55, 70, 68, 62, 71, 72, 75, 56, 56, 71, 61, 79, 82, 48, 52, 77, 75, 46, 79, 53, 66, 69, 71, 96, 63, 44, 72, 56, 59, 68, 57, 59, 68, 60, 74, 79, 58, 62, 65, 53, 90, 73, 72, 72, 48, 62, 69, 55, 55, 81, 77, 64, 57, 70, 81, 57, 65, 43, 64, 59, 40, 65, 52, 66, 70, 98, 48, 67, 74, 69, 75, 62, 44, 68, 64, 83, 78, 58, 89, 67, 52, 66, 52, 83, 75, 66, 50, 59, 67, 64, 68, 67, 58, 65, 79, 60, 66, 81, 61, 62, 62, 74, 74, 48, 84, 75, 99, 49, 75, 63, 64, 64, 59, 78, 84, 93, 81, 63, 58, 62, 61, 55, 70, 78, 48, 79, 111, 83, 62, 67, 59, 60, 44, 62, 50, 47, 88, 85, 47, 58, 54, 51, 55, 80, 46, 72, 58, 65, 75, 82, 49, 69, 60, 77, 71, 53, 59, 46, 66, 62, 61, 68, 48, 70, 57, 76, 70, 59, 65, 75, 73, 71, 47, 48, 66, 112, 70, 83, 51, 42, 70, 71, 88, 46, 37, 65, 50, 76, 75, 65, 78, 78, 75, 61, 60, 54, 67, 59, 65, 55, 67, 66, 75, 65, 56, 66, 58, 84, 54, 56, 68, 63, 80, 64, 71, 69, 49, 52, 67, 74, 49, 60, 54, 67, 75, 69, 75, 63, 69, 47, 65, 80, 61, 68, 66, 60, 74, 66, 44, 60, 63, 78, 69, 57, 79, 78, 94, 65, 66, 63, 50, 70, 73, 66, 77, 67, 86, 51, 38, 63, 63, 96, 80, 62, 74, 64, 57, 58, 58, 62, 77, 42, 58, 78, 44, 75, 47, 59, 59, 58, 79, 73, 81, 64, 54, 62, 60, 81, 85, 98, 62, 48, 59, 71, 76, 59, 62, 96, 64, 59, 54, 73, 77, 77, 56, 40, 47, 75, 60, 71, 61, 65, 69, 77, 55, 68, 59, 60, 55, 46, 68, 71, 59, 60, 70, 75, 57, 82, 79, 79, 57, 59, 54, 82, 59, 80, 49, 62, 67, 65, 58, 79, 59, 80, 64, 65, 68, 75, 48, 62, 48, 75, 53, 110, 73, 47, 53, 62, 54, 56, 80, 63, 55, 56, 60, 72, 49, 75, 74, 67, 49, 75, 53, 51, 65, 57, 53, 62, 57, 66, 48, 65, 60, 56, 68, 68, 63, 56, 92, 82, 55, 58, 60, 68, 70, 56, 78, 80, 55, 73, 66, 59, 71, 69, 56, 47, 58, 61, 72, 82, 71, 58, 97, 67, 74, 64, 48, 60, 67, 56, 66, 47, 92, 104, 68, 56, 67, 66, 78, 67, 68, 64, 48, 64, 60, 53, 62, 39, 103, 82, 54, 54, 98, 71, 69, 55, 71, 67, 68, 77, 87, 75, 82, 100, 86, 100, 62, 74, 46, 69, 63, 69, 71, 46, 75, 76, 91, 53, 62, 65, 78, 80, 48, 59, 57, 115, 61, 52, 110, 59, 50, 57, 74, 78, 53, 84, 56, 50, 79, 87, 72, 59, 77, 85, 76, 97, 71, 53, 46, 80, 58, 56, 77, 78, 43, 66, 66, 51, 52, 62, 46, 60, 52, 89, 67, 76, 70, 51, 51, 84, 46, 62, 50, 39, 49, 59, 73, 66, 91, 58, 68, 65, 71, 54, 71, 56, 54, 72, 66, 52, 69, 57, 77, 71, 68, 55, 66, 56, 88, 54, 74, 57, 84, 89, 60, 82, 58, 70, 63, 62, 64, 82, 68, 59, 61, 54, 64, 73, 70, 47, 109, 57, 66, 79, 47, 56, 99, 73, 68, 81, 74, 63, 74, 57, 95, 68, 99, 44, 59, 67, 64, 70, 71, 65, 69, 38, 61, 39, 72, 58, 75, 83, 61, 56, 88, 73, 56, 107, 56, 68, 51, 93, 86, 62, 52, 66, 54, 51, 73, 48, 71, 52, 51, 71, 59, 61, 99, 68, 55, 74, 59, 63, 58, 59, 54, 67, 62, 45, 49, 65, 69, 63, 53, 62, 74, 64, 44, 56, 60, 57, 68, 69, 66, 47, 62, 65, 60, 55, 44, 66, 77, 74, 78, 71, 55, 63, 68, 65, 82, 49, 50, 63, 61, 69, 69, 64, 76, 49, 86, 88, 49, 53, 66, 66, 80, 77, 62, 65, 84, 65, 79, 51, 57, 73, 58, 72, 56, 48, 89, 127, 53, 57, 69, 57, 65, 61, 44, 71, 97, 69, 70, 86, 62, 84, 46, 64, 63, 53, 61, 87, 67, 60, 55, 60, 58, 54, 65, 75, 50, 61, 60, 61, 95, 50, 51, 87, 62, 58, 80, 75, 70, 47, 64, 33, 77, 68, 61, 61, 90, 52, 77, 76, 55, 83, 75, 59, 78, 66, 87, 53, 59, 124, 66, 60, 54, 65, 93, 91, 31, 88, 61, 63, 62, 54, 64, 65, 78, 88, 62, 68, 71, 76, 64, 97, 59, 67, 61, 65, 60, 54, 63, 63, 67, 94, 50, 57, 60, 55, 61, 72, 62, 68, 95, 64, 127, 44, 58, 55, 63, 69, 53, 62, 67, 66, 57, 45, 82, 57, 53, 70, 65, 52, 65, 52, 74, 82, 80, 86, 66, 54, 54, 73, 51, 68, 71, 60, 63, 67, 39, 48, 47, 65, 56, 112, 62, 46, 75, 101, 69, 59, 71, 79, 62, 62, 51, 65, 45, 68, 66, 44, 87, 74, 114, 65, 61, 66, 82, 68, 44, 93, 60, 72, 66, 52, 60, 58, 86, 65, 59, 69, 79, 76, 72, 78, 57, 73, 74, 61, 69, 81, 67, 66, 59, 57, 60, 83, 57, 50, 61, 63, 68, 86, 114, 51, 71, 46, 49, 60, 78, 68, 58, 67, 73, 91, 67, 58, 63, 76, 62, 61, 79, 64, 76, 58, 77, 83, 64, 65, 131, 60, 53, 61, 65, 52, 49, 94, 67, 70, 60, 60, 66, 61, 40, 66, 63, 65, 72, 79, 74, 82, 101, 70, 53, 63, 62, 62, 60, 52, 59, 73, 103, 60, 55, 87, 51, 60, 62, 58, 68, 65, 56, 64, 85, 58, 48, 54, 72, 54, 57, 72, 59, 119, 53, 79, 73, 63, 63, 68, 76, 52, 69, 64, 81, 76, 70, 54, 71, 54, 70, 58, 57, 83, 58, 69, 73, 61, 69, 88, 70, 54, 59, 64, 62, 79, 45, 68, 67, 97, 49, 74, 47, 73, 57, 78, 65, 75, 83, 56, 76, 68, 52, 72, 54, 72, 105, 76, 59, 60, 71, 59, 45, 81, 73, 68, 64, 71, 73, 61, 70, 72, 43, 68, 71, 79, 50, 71, 64, 58, 56, 98, 54, 78, 59, 63, 84, 44, 58, 110, 62, 72, 65, 63, 46, 98, 78, 58, 67, 62, 63, 60, 57, 40, 81, 89, 57, 63, 49, 45, 63, 70, 63, 60, 80, 69, 62, 70, 66, 68, 72, 96, 71, 68, 84, 73, 56, 64, 62, 56, 57, 34, 56, 77, 42, 45, 51, 53, 46, 64, 60, 76, 82, 85, 60, 59, 65, 64, 53, 64, 48, 65, 68, 50, 63, 101, 83, 39, 67, 78, 87, 95, 75, 48, 69, 56, 101, 66, 65, 82, 57, 70, 121, 77, 60, 61, 79, 53, 61, 58, 49, 67, 81, 56, 51, 84, 52, 51, 56, 72, 64, 97, 64, 61, 67, 99, 51, 50, 65, 71, 72, 52, 66, 48, 75, 52, 51, 50, 83, 58, 65, 65, 42, 48, 62, 65, 90, 65, 50, 50, 50, 72, 71, 88, 51, 87, 51, 58, 43, 62, 73, 70, 67, 87, 57, 59, 77, 92, 56, 75, 56, 64, 58, 64, 56, 73, 68, 53, 67, 69, 55, 64, 107, 77, 64, 52, 75, 54, 83, 63, 55, 70, 66, 53, 70, 82, 52, 47, 48, 82, 68, 74, 77, 80, 50, 58, 42, 68, 62, 66, 70, 60, 73, 55, 57, 67, 52, 66, 76, 55, 59, 76, 99, 61, 104, 107, 54, 51, 95, 72, 54, 64, 64, 60, 91, 73, 60, 68, 90, 60, 65, 75, 67, 77, 60, 61, 52, 73, 60, 94, 59, 36, 55, 70, 76, 51, 78, 68, 85, 58, 63, 58, 69, 43, 63, 65, 57, 84, 58, 84, 65, 68, 49, 121, 64, 73, 72, 69, 49, 68, 66, 55, 62, 72, 56, 61, 72, 72, 52, 58, 69, 52, 53, 76, 60, 78, 61, 82, 66, 62, 76, 60, 66, 47, 59, 80, 45, 53, 58, 71, 56, 71, 71, 51, 69, 68, 57, 75, 71, 52, 88, 61, 56, 75, 73, 59, 71, 63, 73, 73, 53, 44, 65, 64, 67, 68, 52, 57, 58, 77, 59, 63, 73, 72, 72, 50, 71, 77, 55, 44, 65, 65, 66, 62, 60, 55, 46, 59, 63, 85, 79, 54, 59, 47, 73, 64, 60, 83, 59, 61, 57, 77, 72, 74, 59, 58, 66, 58, 71, 50, 65, 45, 57, 53, 60, 53, 67, 58, 89, 63, 64, 56, 55, 56, 49, 70, 68, 69, 80, 99, 61, 86, 65, 65, 86, 66, 85, 46, 59, 64, 57, 59, 80, 72, 110, 100, 78, 60, 78, 65, 50, 60, 55, 70, 84, 50, 94, 59, 47, 91, 65, 69, 45, 61, 61, 69, 62, 64, 62, 82, 58, 81, 99, 69, 58, 67, 57, 57, 74, 54, 72, 73, 58, 59, 102, 81, 59, 59, 51, 59, 78, 74, 57, 65, 76, 107, 77, 61, 69, 63, 39, 79, 52, 60, 76, 55, 58, 63, 71, 46, 70, 53, 50, 73, 67, 62, 56, 58, 71, 62, 70, 96, 49, 68, 57, 91, 64, 72, 61, 58, 65, 59, 58, 41, 64, 58, 68, 57, 70, 62, 65, 60, 53, 77, 46, 58, 67, 55, 64, 92, 65, 50, 61, 59, 62, 76, 55, 71, 61, 59, 58, 54, 72, 52, 86, 57, 54, 70, 72, 69, 69, 52, 62, 59, 64, 48, 59, 62, 61, 62, 66, 66, 77, 71, 52, 65, 67, 58, 67, 61, 77, 49, 125, 77, 57, 50, 65, 49, 52, 83, 64, 86, 46, 49, 44, 57, 70, 47, 60, 70, 78, 74, 57, 57, 59, 54, 78, 64, 74, 64, 62, 76, 58, 59, 70, 42, 54, 62, 62, 88, 80, 57, 126, 74, 62, 63, 60, 59, 74, 73, 65, 49, 62, 90, 29, 79, 62, 57, 62, 59, 50, 60, 59, 80, 62, 60, 66, 63, 68, 55, 62, 54, 53, 52, 74, 52, 61, 64, 53, 54, 63, 55, 63, 67, 62, 60, 62, 54, 69, 71, 65, 78, 68, 66, 96, 85, 61, 59, 71, 45, 50, 78, 72, 64, 81, 62, 54, 70, 86, 68, 61, 67, 53, 63, 63, 56, 67, 48, 60, 57, 70, 46, 68, 73, 82, 72, 60, 63, 78, 93, 60, 57, 50, 54, 56, 70, 67, 53, 58, 61, 61, 77, 46, 68, 73, 55, 77, 50, 69, 55, 72, 61, 64, 57, 96, 61, 53, 74, 56, 39, 49, 55, 75, 67, 76, 58, 93, 79, 53, 59, 75, 58, 45, 68, 75, 55, 47, 87, 60, 49, 53, 63, 66, 92, 68, 76, 78, 57, 53, 61, 55, 60, 60, 86, 68, 64, 56, 76, 43, 83, 58, 65, 63, 54, 54, 62, 78, 71, 68, 65, 69, 54, 71, 59, 53, 57, 64, 75, 136, 87, 108, 49, 49, 62, 58, 62, 76, 69, 57, 60, 49, 50, 66, 69, 75, 58, 79, 54, 73, 84, 69, 69, 71, 57, 64, 58, 78, 59, 74, 79, 68, 68, 64, 99, 50, 65, 54, 75, 68, 83, 66, 55, 69, 62, 65, 54, 69, 56, 68, 65, 55, 74, 58, 65, 55, 61, 74, 64, 79, 58, 69, 85, 66, 70, 48, 59, 57, 61, 65, 66, 66, 59, 47, 56, 69, 65, 79, 80, 56, 63, 64, 66, 83, 63, 78, 75, 47, 78, 86, 70, 58, 68, 68, 47, 76, 59, 80, 116, 101, 65, 64, 54, 61, 61, 72, 61, 64, 50, 50, 68, 58, 88, 69, 54, 58, 55, 59, 68, 59, 69, 60, 50, 57, 72, 90, 67, 63, 75, 61, 67, 52, 76, 48, 57, 65, 55, 48, 55, 66, 72, 69, 70, 64, 79, 61, 88, 48, 68, 56, 78, 97, 60, 72, 63, 77, 76, 77, 65, 83, 75, 56, 62, 54, 69, 70, 48, 87, 66, 74, 67, 54, 56, 61, 67, 46, 63, 57, 98, 62, 55, 87, 55, 60, 62, 60, 62, 68, 78, 81, 66, 72, 59, 62, 56, 71, 61, 56, 49, 59, 57, 53, 53, 69, 76, 97, 50, 60, 87, 72, 63, 64, 78, 63, 84, 53, 99, 74, 65, 68, 84, 56, 64, 80, 51, 58, 77, 50, 64, 62, 68, 66, 129, 85, 55, 57, 67, 61, 59, 65, 83, 81, 81, 59, 76, 53, 79, 59, 58, 66, 74, 65, 54, 66, 75, 69, 76, 81, 57, 72, 78, 78, 58, 92, 92, 56, 75, 74, 60, 54, 60, 86, 68, 50, 56, 57, 55, 56, 68, 61, 79, 54, 51, 65, 48, 84, 99, 66, 58, 64, 41, 86, 72, 62, 59, 52, 103, 71, 59, 54, 79, 63, 54, 70, 50, 65, 52, 42, 58, 51, 67, 69, 62, 54, 60, 52, 59, 56, 76, 62, 62, 63, 72, 56, 58, 66, 57, 83, 56, 68, 61, 78, 57, 75, 57, 98, 60, 64, 72, 74, 57, 65, 88, 74, 88, 79, 67, 76, 60, 84, 71, 55, 63, 83, 60, 62, 65, 83, 67, 46, 46, 88, 67, 65, 90, 84, 70, 76, 58, 87, 73, 57, 52, 58, 58, 61, 61, 64, 56, 101, 61, 58, 56, 68, 55, 64, 55, 69, 49, 74, 56, 68, 64, 67, 55, 62, 82, 66, 74, 49, 65, 75, 57, 50, 54, 49, 76, 76, 38, 52, 57, 73, 88, 71, 66, 59, 68, 58, 65, 55, 54, 62, 59, 64, 64, 62, 81, 86, 78, 53, 51, 72, 50, 57, 43, 54, 103, 61, 59, 72, 74, 45, 73, 51, 88, 66, 63, 85, 63, 79, 76, 63, 68, 52, 62, 57, 60, 86, 71, 60, 69, 73, 72, 95, 61, 76, 69, 57, 53, 100, 58, 70, 55, 87, 62, 71, 55, 56, 81, 60, 68, 59, 46, 95, 81, 54, 74, 61, 53, 73, 44, 67, 72, 52, 83, 63, 66, 48, 56, 57, 49, 98, 65, 78, 66, 68, 65, 50, 69, 78, 65, 72, 66, 70, 84, 90, 63, 70, 131, 59, 48, 58, 73, 74, 81, 63, 53, 59, 46, 55, 53, 59, 79, 59, 63, 55, 62, 70, 68, 66, 108, 57, 72, 50, 62, 61, 55, 47, 44, 74, 49, 77, 82, 65, 59, 61, 62, 53, 42, 62, 75, 68, 51, 72, 55, 57, 105, 69, 85, 64, 63, 69, 58, 66, 101, 92, 77, 57, 72, 67, 64, 84, 54, 55, 66, 63, 63, 62, 76, 85, 60, 66, 75, 64, 83, 60, 76, 53, 59, 58, 61, 86, 57, 70, 72, 56, 71, 67, 69, 82, 55, 69, 97, 56, 59, 79, 91, 56, 60, 83, 83, 73, 62, 80, 69, 69, 55, 58, 59, 61, 63, 51, 52, 68, 62, 61, 62, 60, 61, 58, 92, 100, 59, 67, 45, 81, 68, 61, 60, 97, 65, 56, 69, 61, 56, 64, 57, 94, 64, 90, 67, 96, 63, 89, 60, 57, 58, 61, 59, 74, 51, 70, 47, 64, 67, 60, 70, 54, 62, 75, 73, 58, 74, 53, 48, 49, 56, 65, 58, 89, 61, 53, 63, 59, 51, 81, 73, 65, 67, 73, 66, 57, 56, 61, 69, 56, 78, 67, 75, 53, 52, 76, 91, 49, 58, 90, 77, 73, 49, 58, 78, 56, 84, 63, 51, 61, 60, 59, 64, 65, 86, 57, 54, 59, 55, 91, 73, 59, 74, 58, 98, 69, 69, 60, 58, 53, 52, 44, 61, 73, 74, 65, 67, 48, 56, 67, 67, 55, 63, 61, 45, 51, 59, 45, 84, 53, 60, 66, 62, 52, 61, 86, 61, 74, 52, 57, 56, 65, 60, 68, 65, 63, 78, 61, 54, 76, 54, 65, 69, 67, 94, 66, 64, 79, 80, 73, 54, 73, 56, 55, 61, 59, 61, 60, 59, 86, 74, 129, 59, 69, 45, 56, 58, 59, 136, 56, 74, 59, 52, 67, 66, 58, 60, 60, 48, 53, 90, 55, 68, 63, 54, 66, 56, 87, 60, 60, 70, 52, 71, 69, 63, 45, 75, 60, 92, 44, 58, 106, 65, 84, 68, 61, 64, 66, 84, 56, 64, 59, 78, 47, 69, 76, 66, 53, 51, 76, 69, 72, 73, 62, 53, 49, 63, 69, 51, 62, 52, 59, 68, 65, 64, 67, 69, 57, 61, 65, 78, 47, 69, 61, 90, 57, 64, 39, 73, 67, 55, 54, 51, 75, 57, 65, 62, 61, 64, 81, 68, 38, 61, 103, 57, 64, 58, 88, 44, 61, 62, 79, 67, 67, 61, 62, 53, 49, 56, 49, 67, 55, 59, 80, 64, 69, 60, 56, 70, 62, 43, 59, 57, 67, 71, 71, 51, 60, 65, 67, 65, 60, 74, 65, 66, 44, 57, 88, 56, 78, 67, 73, 64, 80, 49, 58, 72, 61, 61, 61, 63, 76, 65, 74, 71, 62, 77, 63, 59, 61, 60, 64, 92, 77, 71, 59, 101, 73, 74, 65, 56, 86, 66, 71, 55, 81, 65, 50, 69, 62, 72, 56, 94, 69, 52, 69, 70, 57, 63, 54, 60, 58, 76, 52, 65, 78, 66, 64, 69, 67, 59, 52, 65, 59, 72, 71, 82, 122, 62, 104, 59, 61, 73, 59, 59, 74, 63, 69, 68, 44, 48, 67, 48, 67, 47, 48, 69, 65, 61, 69, 63, 65, 50, 61, 51, 69, 50, 78, 68, 66, 68, 63, 101, 66, 57, 100, 90, 53, 93, 61, 77, 61, 74, 71, 61, 69, 61, 64, 62, 64, 60, 59, 63, 68, 67, 89, 34, 89, 72, 57, 71, 81, 84, 78, 69, 71, 58, 70, 71, 64, 72, 49, 70, 75, 66, 61, 67, 68, 51, 53, 47, 94, 62, 81, 69, 82, 64, 58, 41, 74, 62, 45, 47, 77, 64, 77, 103, 106, 115, 61, 57, 55, 48, 73, 60, 73, 44, 56, 89, 68, 87, 62, 52, 59, 54, 87, 57, 55, 44, 53, 58, 59, 80, 58, 74, 83, 59, 61, 74, 59, 81, 62, 58, 85, 60, 51, 69, 65, 58, 55, 71, 60, 67, 68, 71, 58, 62, 69, 73, 67, 75, 51, 68, 74, 87, 41, 58, 87, 43, 60, 77, 62, 71, 44, 67, 84, 66, 71, 68, 54, 73, 56, 69, 58, 57, 67, 57, 95, 56, 75, 66, 61, 55, 68, 59, 65, 70, 48, 60, 74, 52, 58, 56, 78, 66, 66, 76, 66, 62, 67, 77, 57, 51, 63, 49, 80, 57, 60, 69, 67, 96, 55, 77, 55, 108, 61, 70, 59, 63, 56, 67, 49, 70, 72, 63, 64, 45, 57, 53, 60, 88, 80, 57, 66, 52, 73, 60, 66, 61, 62, 58, 61, 55, 78, 59, 74, 49, 66, 65, 63, 57, 80, 62, 69, 75, 79, 56, 59, 72, 102, 70, 46, 86, 57, 66, 60, 50, 49, 70, 76, 80, 57, 62, 54, 59, 62, 57, 85, 92, 80, 53, 62, 62, 67, 50, 49, 55, 66, 59, 80, 72, 61, 65, 62, 85, 68, 54, 73, 68, 62, 66, 66, 53, 61, 64, 66, 84, 53, 59, 73, 54, 67, 90, 66, 47, 63, 69, 45, 68, 62, 52, 65, 48, 50, 56, 73, 54, 47, 72, 104, 61, 71, 45, 80, 43, 93, 67, 90, 54, 65, 98, 72, 95, 70, 74, 65, 66, 85, 73, 58, 90, 70, 55, 64, 50, 56, 58, 64, 64, 62, 50, 90, 77, 63, 116, 67, 66, 122, 68, 76, 64, 53, 68, 50, 62, 79, 61, 70, 58, 77, 61, 63, 55, 65, 64, 49, 57, 64, 54, 47, 75, 74, 75, 66, 64, 63, 68, 97, 51, 69, 54, 87, 60, 55, 61, 61, 56, 73, 55, 67, 64, 77, 66, 65, 62, 65, 55, 51, 61, 68, 56, 68, 50, 69, 70, 52, 108, 62, 63, 65, 58, 68, 65, 57, 92, 70, 39, 55, 78, 63, 60, 82, 78, 63, 52, 102, 67, 62, 56, 46, 74, 67, 62, 58, 80, 62, 73, 80, 65, 64, 67, 101, 63, 73, 61, 70, 58, 115, 71, 59, 49, 91, 49, 50, 60, 58, 63, 51, 53, 60, 59, 83, 52, 63, 59, 63, 50, 89, 59, 52, 80, 60, 85, 67, 50, 64, 63, 56, 60, 113, 83, 66, 50, 62, 45, 45, 66, 93, 67, 73, 54, 49, 111, 50, 48, 56, 114, 53, 54, 66, 77, 55, 71, 71, 47, 68, 67, 68, 64, 66, 54, 63, 66, 62, 88, 49, 82, 48, 71, 51, 55, 57, 62, 48, 61, 71, 50, 61, 79, 90, 105, 46, 72, 74, 74, 59, 74, 83, 59, 55, 60, 64, 66, 56, 82, 71, 87, 55, 62, 77, 53, 52, 79, 67, 60, 40, 63, 62, 62, 61, 59, 62, 66, 71, 53, 78, 68, 80, 57, 78, 64, 79, 92, 59, 61, 57, 57, 63, 68, 74, 53, 59, 65, 73, 73, 57, 54, 66, 74, 50, 101, 53, 55, 83, 64, 67, 62, 74, 77, 49, 60, 72, 73, 67, 72, 62, 56, 52, 62, 55, 68, 72, 60, 69, 66, 51, 57, 74, 73, 61, 55, 61, 65, 70, 60, 56, 64, 71, 57, 62, 64, 93, 63, 66, 85, 68, 61, 119, 77, 88, 52, 77, 58, 60, 64, 67, 55, 62, 62, 59, 53, 70, 65, 54, 82, 61, 76, 55, 81, 64, 44, 68, 73, 64, 87, 45, 53, 83, 76, 65, 58, 91, 62, 94, 81, 51, 47, 46, 50, 44, 55, 65, 70, 73, 56, 74, 66, 64, 91, 58, 95, 75, 53, 99, 64, 61, 66, 61, 46, 58, 38, 55, 50, 67, 50, 70, 63, 57, 71, 59, 85, 33, 61, 64, 48, 60, 69, 71, 68, 53, 53, 64, 56, 60, 60, 59, 43, 73, 78, 55, 57, 54, 68, 60, 71, 52, 59, 94, 54, 51, 60, 55, 78, 68, 51, 69, 59, 55, 61, 63, 70, 58, 51, 54, 55, 67, 60, 64, 96, 58, 49, 58, 49, 74, 56, 74, 64, 66, 82, 65, 69, 64, 66, 65, 60, 76, 54, 64, 89, 63, 71, 80, 49, 52, 88, 112, 89, 61, 47, 44, 62, 73, 100, 55, 66, 39, 53, 91, 58, 52, 62, 62, 67, 67, 84, 62, 82, 48, 66, 73, 67, 56, 67, 45, 69, 57, 70, 50, 71, 60, 67, 43, 88, 59, 66, 81, 79, 82, 61, 71, 61, 74, 103, 67, 39, 72, 62, 72, 84, 59, 64, 70, 47, 83, 71, 58, 50, 55, 77, 62, 58, 67, 58, 46, 56, 70, 57, 52, 72, 64, 65, 55, 62, 104, 41, 58, 64, 55, 83, 74, 60, 56, 59, 92, 80, 56, 66, 61, 70, 75, 58, 61, 67, 55, 65, 55, 58, 78, 69, 69, 55, 103, 66, 76, 59, 60, 67, 73, 107, 61, 79, 57, 60, 66, 64, 56, 62, 56, 54, 81, 64, 67, 75, 44, 55, 56, 58, 68, 86, 65, 71, 59, 63, 59, 60, 56, 76, 64, 66, 60, 57, 70, 70, 65, 69, 49, 57, 85, 61, 91, 63, 60, 65, 72, 78, 65, 61, 69, 66, 52, 75, 56, 56, 78, 94, 72, 51, 60, 70, 60, 93, 97, 70, 60, 78, 76, 82, 68, 90, 64, 65, 64, 90, 63, 72, 56, 69, 36, 86, 75, 71, 57, 99, 57, 55, 70, 89, 64, 61, 59, 79, 58, 78, 37, 44, 62, 49, 78, 62, 80, 66, 88, 60, 79, 93, 56, 51, 54, 62, 70, 63, 58, 67, 58, 92, 85, 48, 68, 83, 77, 42, 57, 66, 58, 70, 71, 70, 49, 66, 54, 51, 46, 64, 65, 60, 80, 68, 64, 69, 48, 51, 62, 108, 59, 79, 61, 74, 65, 55, 88, 49, 61, 80, 49, 54, 70, 56, 72, 69, 71, 57, 74, 46, 63, 66, 62, 78, 71, 69, 31, 64, 63, 51, 60, 72, 94, 62, 97, 73, 70, 59, 62, 48, 73, 80, 79, 55, 61, 60, 69, 69, 67, 55, 55, 93, 40, 56, 73, 61, 88, 40, 67, 44, 67, 75, 64, 64, 58, 73, 47, 44, 54, 78, 58, 57, 58, 60, 53, 59, 113, 51, 68, 54, 75, 69, 56, 59, 78, 77, 61, 64, 51, 65, 68, 71, 58, 60, 95, 69, 49, 48, 61, 60, 48, 58, 48, 62, 59, 80, 98, 57, 53, 49, 75, 57, 77, 52, 70, 83, 74, 61, 56, 78, 61, 56, 72, 55, 67, 49, 82, 61, 53, 56, 73, 74, 63, 54, 62, 50, 67, 106, 68, 67, 63, 64, 60, 86, 72, 66, 83, 69, 75, 89, 54, 74, 64, 61, 55, 54, 51, 71, 55, 60, 86, 68, 62, 115, 91, 74, 55, 47, 87, 72, 66, 60, 70, 57, 55, 75, 48, 64, 77, 58, 66, 55, 50, 71, 54, 69, 53, 75, 76, 56, 62, 66, 57, 61, 61, 79, 61, 75, 54, 63, 59, 76, 66, 59, 66, 59, 51, 77, 51, 83, 66, 51, 69, 58, 58, 53, 49, 54, 66, 56, 107, 59, 64, 84, 49, 55, 68, 65, 66, 88, 121, 46, 57, 58, 68, 63, 55, 57, 62, 51, 60, 53, 56, 62, 73, 83, 60, 83, 71, 57, 65, 69, 65, 59, 67, 79, 67, 58, 155, 60, 64, 52, 51, 73, 44, 67, 76, 57, 47, 66, 77, 67, 67, 63, 67, 70, 85, 65, 80, 60, 51, 64, 64, 79, 67, 45, 105, 61, 82, 62, 70, 52, 60, 76, 65, 63, 57, 67, 69, 49, 50, 69, 69, 80, 60, 71, 101, 74, 58, 50, 69, 56, 65, 45, 66, 77, 78, 48, 45, 74, 92, 76, 68, 102, 60, 84, 52, 58, 66, 66, 78, 66, 63, 69, 65, 55, 53, 61, 82, 67, 55, 89, 58, 70, 55, 76, 58, 55, 65, 63, 79, 72, 71, 66, 54, 86, 55, 62, 67, 60, 75, 51, 74, 69, 79, 40, 49, 80, 63, 51, 81, 83, 51, 60, 69, 66, 65, 84, 48, 67, 72, 58, 69, 93, 64, 61, 79, 63, 77, 55, 77, 62, 79, 38, 62, 79, 78, 61, 54, 71, 49, 46, 49, 45, 63, 61, 63, 73, 66, 74, 59, 48, 58, 66, 53, 58, 45, 66, 62, 66, 62, 70, 75, 78, 65, 55, 59, 53, 80, 78, 62, 64, 102, 60, 63, 62, 91, 54, 76, 58, 64, 50, 71, 65, 56, 68, 63, 96, 56, 54, 46, 72, 56, 76, 65, 70, 63, 49, 89, 67, 84, 66, 76, 65, 63, 41, 69, 77, 93, 72, 71, 63, 66, 89, 58, 50, 87, 109, 71, 73, 57, 70, 76, 45, 96, 56, 49, 85, 53, 56, 73, 49, 58, 100, 49, 54, 60, 41, 67, 65, 56, 60, 68, 67, 84, 52, 48, 79, 74, 60, 55, 60, 75, 83, 70, 53, 58, 62, 79, 53, 64, 89, 65, 61, 60, 67, 64, 51, 45, 48, 75, 55, 98, 47, 64, 58, 53, 75, 64, 82, 61, 62, 59, 47, 58, 69, 47, 70, 63, 50, 59, 56, 78, 81, 53, 48, 49, 65, 64, 61, 58, 54, 70, 49, 59, 67, 63, 56, 63, 57, 50, 79, 81, 61, 82, 63, 55, 62, 53, 103, 66, 60, 96, 64, 51, 46, 67, 64, 86, 56, 65, 83, 64, 66, 64, 66, 73, 63, 74, 52, 52, 68, 64, 107, 53, 53, 66, 55, 72, 55, 58, 61, 65, 69, 70, 63, 71, 83, 71, 87, 56, 50, 47, 66, 49, 51, 60, 86, 67, 73, 52, 53, 81, 118, 77, 59, 63, 68, 60, 55, 70, 62, 46, 72, 76, 75, 70, 76, 64, 69, 67, 60, 74, 54, 59, 83, 64, 71, 49, 56, 61, 53, 51, 68, 73, 70, 77, 72, 86, 90, 49, 79, 51, 56, 67, 35, 63, 57, 79, 99, 58, 67, 47, 50, 66, 60, 68, 56, 78, 54, 56, 86, 70, 62, 64, 75, 68, 60, 75, 146, 59, 69, 66, 83, 59, 64, 93, 60, 60, 52, 75, 76, 52, 69, 59, 62, 53, 72, 63, 58, 57, 52, 61, 66, 55, 60, 60, 60, 44, 44, 65, 101, 49, 69, 69, 92, 76, 63, 45, 59, 48, 57, 98, 68, 80, 64, 64, 52, 83, 55, 66, 73, 47, 71, 61, 58, 84, 66, 52, 78, 76, 54, 49, 93, 95, 64, 75, 61, 77, 58, 54, 55, 74, 85, 68, 81, 47, 91, 70, 69, 50, 59, 81, 50, 56, 80, 58, 74, 57, 79, 61, 46, 63, 51, 54, 61, 57, 55, 60, 73, 56, 54, 54, 90, 70, 58, 81, 55, 73, 58, 73, 62, 60, 117, 56, 49, 62, 61, 70, 61, 64, 71, 67, 66, 91, 79, 69, 59, 64, 77, 72, 66, 65, 57, 83, 60, 57, 56, 69, 62, 53, 70, 59, 70, 93, 62, 44, 50, 48, 51, 45, 56, 55, 92, 109, 44, 49, 59, 77, 69, 68, 55, 75, 55, 72, 53, 64, 60, 68, 54, 70, 56, 50, 63, 53, 60, 92, 65, 77, 124, 57, 61, 44, 57, 60, 44, 61, 49, 81, 72, 57, 56, 71, 80, 55, 63, 78, 71, 58, 85, 51, 57, 53, 68, 50, 56, 66, 80, 76, 69, 67, 56, 65, 70, 71, 72, 47, 56, 63, 61, 80, 54, 57, 65, 58, 68, 59, 49, 65, 71, 61, 75, 65, 63, 73, 83, 58, 54, 85, 65, 50, 78, 95, 42, 58, 44, 71, 79, 48, 60, 71, 44, 46, 76, 67, 61, 76, 86, 59, 61, 59, 66, 57, 85, 64, 66, 66, 106, 62, 51, 57, 63, 65, 66, 56, 44, 59, 64, 75, 62, 68, 69, 69, 73, 86, 79, 49, 65, 65, 59, 94, 83, 62, 74, 48, 95, 60, 70, 61, 69, 72, 65, 57, 84, 91, 65, 72, 61, 85, 63, 67, 79, 50, 59, 65, 72, 74, 57, 55, 72, 55, 58, 78, 101, 67, 65, 76, 51, 67, 50, 84, 56, 65, 65, 59, 65, 70, 66, 66, 70, 75, 53, 68, 76, 72, 67, 72, 61, 61, 53, 41, 67, 62, 73, 56, 69, 98, 73, 87, 62, 60, 64, 49, 67, 60, 77, 85, 63, 68, 56, 69, 60, 60, 79, 54, 56, 64, 69, 73, 67, 75, 51, 82, 52, 58, 64, 63, 61, 62, 63, 55, 51, 43, 71, 60, 83, 55, 62, 71, 68, 99, 64, 80, 63, 76, 60, 61, 59, 117, 65, 64, 70, 65, 67, 52, 78, 74, 52, 96, 60, 92, 60, 60, 56, 61, 58, 59, 76, 51, 73, 98, 48, 69, 70, 57, 69, 51, 49, 50, 59, 61, 76, 63, 93, 60, 84, 78, 56, 70, 50, 78, 63, 48, 55, 59, 68, 62, 57, 54, 61, 68, 101, 59, 61, 73, 74, 66, 75, 63, 57, 67, 69, 60, 97, 50, 52, 51, 57, 62, 61, 72, 71, 52, 72, 64, 60, 67, 104, 99, 89, 59, 58, 65, 74, 53, 69, 48, 60, 73, 75, 66, 63, 71, 66, 54, 53, 59, 61, 66, 59, 64, 67, 47, 63, 52, 64, 64, 67, 64, 86, 64, 54, 58, 60, 61, 83, 39, 51, 62, 60, 65, 65, 64, 54, 63, 64, 63, 58, 74, 51, 46, 46, 61, 56, 123, 63, 66, 68, 62, 49, 75, 55, 72, 88, 123, 62, 61, 59, 78, 57, 63, 81, 79, 95, 57, 70, 78, 75, 69, 91, 51, 48, 48, 71, 90, 56, 72, 54, 52, 78, 74, 56, 158, 62, 60, 53, 49, 69, 61, 47, 57, 52, 71, 61, 72, 59, 69, 50, 54, 67, 90, 56, 94, 67, 52, 80, 69, 72, 74, 65, 102, 81, 59, 58, 53, 58, 55, 47, 75, 92, 38, 75, 63, 62, 71, 71, 65, 51, 57, 68, 85, 82, 67, 58, 77, 65, 57, 60, 45, 70, 88, 62, 74, 67, 69, 59, 63, 86, 60, 91, 61, 54, 70, 97, 58, 63, 62, 84, 62, 47, 56, 71, 84, 57, 54, 92, 53, 71, 56, 78, 60, 59, 68, 56, 94, 53, 71, 48, 43, 85, 77, 62, 87, 72, 65, 56, 67, 63, 81, 69, 76, 79, 60, 70, 77, 67, 75, 65, 53, 65, 71, 78, 56, 40, 54, 59, 63, 78, 57, 65, 83, 100, 40, 51, 51, 50, 84, 55, 76, 67, 72, 65, 60, 73, 64, 61, 61, 63, 75, 69, 75, 47, 51, 58, 59, 44, 75, 46, 73, 55, 77, 56, 67, 60, 46, 70, 46, 52, 51, 77, 69, 75, 74, 101, 70, 51, 99, 51, 61, 65, 81, 59, 54, 65, 60, 63, 88, 66, 54, 61, 75, 82, 50, 55, 50, 62, 73, 66, 67, 74, 61, 67, 44, 63, 95, 48, 68, 62, 60, 69, 77, 72, 76, 59, 65, 47, 77, 103, 83, 67, 55, 110, 60, 68, 49, 53, 62, 67, 92, 56, 55, 75, 59, 44, 53, 63, 55, 56, 63, 53, 65, 63, 50, 59, 59, 55, 74, 53, 97, 64, 58, 45, 72, 61, 96, 76, 77, 71, 65, 93, 51, 83, 91, 77, 50, 85, 65, 59, 69, 97, 60, 70, 45, 49, 48, 62, 76, 62, 66, 69, 67, 51, 60, 70, 71, 45, 57, 61, 62, 74, 56, 77, 95, 38, 47, 61, 67, 62, 54, 75, 72, 64, 66, 55, 65, 58, 60, 72, 62, 72, 72, 63, 73, 39, 51, 81, 71, 53, 65, 74, 56, 73, 61, 101, 74, 83, 59, 82, 66, 53, 60, 63, 70, 62, 54, 64, 59, 50, 64, 54, 73, 49, 60, 41, 63, 66, 62, 108, 59, 67, 62, 79, 57, 60, 66, 60, 77, 68, 78, 53, 56, 63, 54, 75, 64, 52, 57, 53, 63, 59, 53, 55, 72, 70, 41, 55, 58, 115, 59, 56, 56, 64, 69, 59, 70, 70, 67, 54, 56, 72, 52, 65, 78, 65, 66, 68, 59, 79, 74, 62, 70, 80, 49, 64, 81, 63, 57, 77, 70, 57, 76, 49, 87, 82, 63, 82, 67, 58, 88, 55, 61, 50, 70, 79, 52, 82, 58, 67, 76, 62, 71, 62, 52, 61, 71, 103, 74, 75, 50, 72, 45, 66, 63, 142, 46, 62, 64, 49, 70, 72, 79, 47, 89, 61, 51, 50, 50, 55, 66, 83, 62, 74, 54, 66, 58, 46, 69, 70, 62, 56, 72, 69, 62, 46, 87, 75, 78, 50, 66, 87, 77, 73, 59, 82, 55, 76, 65, 63, 63, 66, 55, 82, 53, 56, 83, 69, 54, 66, 65, 58, 60, 55, 61, 90, 68, 58, 46, 75, 77, 53, 43, 67, 66, 86, 65, 67, 73, 86, 63, 55, 77, 95, 73, 57, 68, 68, 57, 67, 54, 71, 65, 55, 69, 86, 73, 73, 77, 51, 51, 69, 64, 68, 63, 61, 55, 39, 59, 70, 82, 67, 59, 52, 49, 50, 51, 49, 72, 114, 58, 59, 94, 58, 44, 56, 65, 68, 59, 63, 89, 87, 57, 65, 55, 79, 44, 62, 67, 51, 70, 61, 46, 64, 85, 73, 58, 52, 56, 58, 52, 72, 69, 83, 69, 65, 91, 60, 72, 56, 74, 56, 65, 41, 71, 63, 79, 60, 81, 52, 66, 77, 68, 60, 47, 86, 64, 56, 59, 104, 65, 68, 61, 86, 58, 76, 60, 80, 70, 94, 54, 72, 84, 64, 60, 57, 58, 75, 68, 49, 58, 80, 89, 62, 68, 78, 70, 68, 50, 70, 96, 56, 64, 90, 73, 55, 64, 70, 71, 51, 71, 66, 51, 29, 62, 71, 85, 73, 51, 61, 44, 69, 69, 62, 75, 66, 75, 46, 59, 72, 76, 79, 65, 61, 66, 64, 58, 78, 84, 52, 74, 62, 73, 81, 66, 73, 72, 73, 57, 66, 55, 52, 80, 83, 65, 78, 70, 56, 80, 65, 84, 71, 83, 82, 60, 65, 58, 83, 73, 87, 56, 67, 66, 64, 57, 54, 61, 53, 62, 58, 113, 72, 59, 78, 63, 65, 101, 70, 68, 69, 57, 82, 56, 69, 75, 79, 55, 60, 55, 48, 59, 60, 51, 72, 62, 61, 61, 66, 60, 65, 60, 62, 80, 65, 49, 68, 67, 58, 90, 80, 63, 56, 64, 65, 71, 53, 72, 51, 56, 86, 64, 62, 52, 76, 57, 51, 50, 59, 68, 43, 84, 56, 43, 82, 94, 60, 62, 56, 61, 49, 67, 81, 76, 50, 106, 67, 73, 62, 65, 39, 56, 63, 95, 50, 61, 64, 63, 57, 63, 57, 47, 55, 69, 51, 77, 62, 64, 57, 94, 73, 90, 79, 59, 75, 56, 51, 61, 68, 61, 69, 83, 66, 58, 52, 52, 73, 87, 56, 59, 62, 75, 33, 58, 79, 62, 50, 92, 53, 61, 72, 82, 66, 44, 62, 82, 58, 53, 65, 62, 62, 65, 54, 77, 71, 85, 45, 117, 69, 77, 64, 69, 67, 68, 62, 65, 75, 75, 68, 76, 53, 61, 73, 70, 79, 63, 72, 74, 61, 57, 78, 54, 63, 63, 86, 64, 75, 52, 114, 59, 58, 73, 69, 55, 58, 70, 48, 96, 51, 64, 86, 46, 66, 61, 66, 66, 55, 93, 62, 60, 77, 59, 56, 46, 65, 67, 84, 71, 54, 50, 72, 65, 77, 87, 70, 80, 87, 80, 61, 56, 46, 83, 47, 59, 45, 64, 66, 70, 95, 57, 73, 54, 91, 43, 47, 54, 55, 51, 74, 72, 59, 44, 76, 54, 45, 76, 67, 62, 62, 51, 47, 47, 68, 76, 48, 63, 57, 53, 74, 59, 48, 68, 77, 64, 63, 56, 66, 69, 64, 62, 67, 108, 59, 76, 52, 72, 65, 51, 55, 66, 70, 47, 67, 70, 68, 61, 83, 70, 59, 57, 74, 53, 75, 72, 64, 70, 127, 52, 52, 139, 95, 46, 55, 79, 57, 60, 60, 81, 66, 57, 62, 56, 98, 48, 67, 69, 67, 85, 59, 73, 66, 101, 69, 58, 53, 78, 47, 56, 47, 55, 66, 65, 58, 76, 55, 51, 64, 72, 61, 83, 58, 67, 51, 50, 52, 63, 51, 52, 80, 53, 39, 72, 64, 75, 66, 56, 96, 45, 88, 58, 67, 57, 74, 57, 61, 72, 78, 63, 46, 76, 71, 59, 56, 56, 86, 68, 58, 67, 58, 115, 93, 55, 106, 53, 62, 50, 61, 72, 75, 60, 52, 85, 68, 56, 58, 49, 107, 84, 54, 74, 57, 63, 50, 45, 54, 85, 61, 57, 55, 67, 64, 97, 65, 66, 59, 54, 69, 63, 59, 56, 65, 58, 51, 70, 64, 59, 93, 65, 78, 54, 55, 83, 43, 66, 61, 81, 88, 81, 59, 73, 66, 41, 61, 86, 64, 81, 73, 69, 74, 46, 51, 62, 78, 64, 59, 50, 63, 72, 52, 44, 71, 53, 87, 84, 66, 60, 68, 72, 58, 56, 46, 84, 89, 116, 51, 55, 65, 81, 60, 36, 56, 63, 47, 98, 67, 59, 68, 59, 63, 71, 73, 49, 95, 59, 73, 50, 71, 62, 60, 81, 51, 96, 49, 32, 56, 72, 70, 74, 61, 58, 53, 73, 67, 55, 63, 70, 63, 53, 71, 68, 67, 66, 67, 64, 64, 92, 57, 74, 58, 68, 52, 51, 65, 54, 63, 73, 51, 53, 67, 70, 63, 45, 71, 61, 52, 77, 93, 63, 69, 78, 47, 94, 69, 67, 63, 62, 81, 72, 64, 63, 63, 73, 76, 61, 59, 71, 66, 59, 64, 52, 53, 58, 67, 84, 56, 64, 75, 51, 52, 55, 64, 74, 63, 71, 62, 50, 68, 46, 63, 58, 57, 105, 76, 67, 73, 53, 68, 59, 60, 69, 50, 63, 59, 54, 59, 61, 94, 55, 53, 63, 61, 73, 65, 54, 69, 78, 53, 88, 61, 60, 67, 73, 56, 64, 49, 62, 62, 55, 79, 65, 66, 52, 65, 106, 59, 68, 62, 63, 61, 59, 63, 58, 83, 80, 52, 61, 49, 101, 65, 66, 58, 62, 58, 51, 67, 48, 65, 65, 59, 66, 60, 66, 65, 68, 59, 78, 66, 63, 50, 64, 63, 52, 63, 72, 97, 54, 60, 63, 73, 66, 47, 70, 74, 47, 61, 61, 99, 80, 63, 51, 85, 84, 69, 55, 80, 45, 59, 64, 59, 67, 68, 67, 46, 73, 66, 51, 59, 57, 69, 63, 54, 72, 66, 74, 71, 51, 52, 56, 68, 76, 66, 53, 66, 94, 69, 64, 48, 61, 45, 73, 63, 53, 82, 65, 68, 59, 105, 68, 74, 62, 127, 57, 77, 46, 57, 60, 70, 77, 75, 84, 73, 68, 111, 78, 80, 64, 39, 67, 64, 63, 51, 72, 51, 70, 98, 65, 54, 64, 72, 67, 58, 58, 62, 82, 73, 62, 87, 71, 60, 76, 74, 64, 91, 82, 67, 75, 61, 58, 81, 84, 60, 73, 63, 75, 64, 61, 63, 72, 87, 72, 76, 90, 58, 63, 98, 52, 100, 49, 54, 56, 83, 63, 69, 58, 66, 60, 64, 65, 85, 76, 58, 58, 70, 89, 66, 55, 82, 57, 47, 63, 81, 49, 69, 48, 66, 57, 63, 59, 61, 69, 73, 66, 64, 84, 94, 63, 58, 70, 66, 67, 72, 63, 61, 65, 99, 55, 88, 75, 52, 62, 55, 63, 71, 58, 59, 69, 63, 65, 68, 62, 99, 58, 76, 72, 55, 62, 59, 61, 59, 61, 70, 69, 57, 59, 80, 64, 64, 66, 73, 51, 69, 75, 64, 64, 68, 108, 62, 56, 68, 84, 81, 58, 53, 49, 74, 61, 54, 64, 53, 63, 59, 68, 56, 69, 65, 66, 59, 84, 66, 63, 106, 65, 46, 87, 51, 66, 68, 57, 74, 61, 57, 60, 66, 62, 85, 67, 87, 42, 64, 56, 98, 57, 58, 62, 66, 54, 67, 71, 84, 40, 68, 46, 59, 65, 56, 56, 86, 60, 63, 101, 58, 74, 54, 69, 50, 74, 60, 49, 63, 69, 80, 67, 58, 64, 57, 62, 69, 61, 64, 83, 58, 51, 93, 89, 64, 54, 60, 90, 57, 70, 76, 54, 55, 59, 75, 58, 54, 56, 58, 46, 84, 70, 57, 53, 100, 79, 71, 56, 72, 73, 56, 65, 88, 54, 60, 52, 67, 53, 71, 52, 86, 79, 44, 82, 51, 93, 66, 58, 56, 58, 64, 58, 78, 59, 70, 75, 84, 75, 51, 74, 58, 77, 51, 47, 67, 75, 83, 64, 61, 48, 70, 56, 64, 91, 85, 69, 69, 61, 62, 73, 74, 80, 70, 61, 112, 70, 78, 70, 59, 55, 49, 76, 51, 58, 68, 70, 58, 62, 49, 74, 54, 61, 60, 54, 91, 54, 61, 74, 86, 58, 51, 68, 58, 72, 69, 64, 66, 95, 65, 95, 63, 75, 61, 66, 71, 67, 86, 70, 63, 70, 60, 58, 51, 61, 88, 98, 70, 81, 67, 62, 64, 72, 62, 86, 60, 51, 73, 63, 74, 73, 65, 63, 63, 56, 85, 63, 56, 61, 62, 55, 60, 87, 67, 58, 59, 89, 53, 67, 84, 59, 65, 57, 71, 55, 61, 55, 79, 58, 83, 102, 71, 70, 55, 55, 70, 73, 56, 68, 85, 56, 65, 93, 59, 59, 57, 63, 82, 95, 55, 57, 73, 64, 61, 74, 77, 67, 55, 63, 57, 74, 66, 93, 65, 60, 71, 61, 60, 43, 56, 61, 45, 49, 110, 70, 73, 61, 55, 64, 50, 61, 62, 62, 76, 43, 101, 63, 57, 59, 55, 39, 58, 61, 54, 41, 57, 46, 65, 61, 77, 62, 59, 52, 47, 51, 73, 54, 54, 66, 63, 83, 65, 86, 62, 57, 47, 61, 56, 68, 73, 58, 75, 59, 75, 54, 61, 71, 84, 60, 63, 67, 82, 52, 80, 50, 47, 70, 89, 76, 67, 62, 58, 60, 64, 49, 71, 74, 54, 69, 67, 78, 52, 55, 85, 49, 58, 104, 72, 50, 73, 55, 86, 51, 57, 88, 109, 115, 61, 85, 61, 70, 52, 57, 50, 80, 72, 73, 66, 63, 69, 46, 52, 62, 68, 59, 67, 71, 74, 76, 67, 57, 101, 89, 53, 131, 70, 66, 80, 59, 53, 75, 56, 59, 64, 47, 55, 64, 48, 56, 65, 54, 70, 95, 67, 59, 81, 52, 72, 62, 59, 90, 65, 92, 44, 71, 58, 73, 71, 72, 71, 63, 35, 55, 66, 82, 65, 72, 68, 54, 49, 58, 60, 77, 40, 58, 62, 99, 73, 72, 49, 42, 53, 60, 55, 69, 87, 68, 52, 63, 74, 52, 89, 54, 64, 62, 57, 68, 66, 53, 63, 63, 53, 60, 82, 79, 75, 65, 77, 61, 67, 54, 81, 69, 65, 73, 47, 90, 63, 81, 65, 67, 84, 58, 53, 63, 71, 62, 69, 55, 67, 60, 52, 66, 78, 57, 71, 78, 86, 54, 64, 51, 52, 68, 71, 62, 55, 57, 61, 74, 71, 58, 69, 58, 92, 73, 53, 65, 60, 93, 70, 71, 67, 86, 50, 64, 57, 63, 44, 66, 70, 55, 72, 80, 67, 55, 71, 44, 64, 55, 44, 71, 71, 54, 77, 73, 63, 62, 87, 104, 70, 63, 46, 65, 57, 74, 75, 52, 64, 97, 60, 64, 55, 77, 57, 52, 55, 63, 78, 86, 61, 55, 45, 72, 103, 54, 63, 80, 69, 69, 70, 63, 80, 49, 67, 67, 56, 83, 49, 61, 70, 66, 55, 71, 66, 60, 59, 66, 64, 60, 102, 58, 68, 78, 90, 69, 87, 77, 57, 47, 65, 90, 48, 73, 77, 62, 51, 45, 45, 61, 49, 62, 65, 56, 84, 65, 53, 58, 52, 56, 55, 55, 67, 81, 100, 57, 61, 98, 62, 71, 77, 50, 86, 61, 83, 61, 49, 57, 86, 65, 49, 93, 47, 55, 43, 45, 60, 72, 85, 56, 61, 59, 60, 65, 46, 51, 72, 60, 59, 55, 66, 85, 68, 76, 65, 46, 62, 70, 72, 71, 79, 65, 73, 64, 57, 64, 56, 49, 59, 54, 71, 70, 68, 66, 54, 71, 71, 67, 87, 46, 65, 58, 52, 59, 52, 59, 103, 51, 45, 55, 71, 70, 45, 59, 64, 67, 57, 69, 55, 66, 54, 49, 67, 60, 58, 57, 72, 43, 51, 57, 91, 54, 47, 56, 71, 74, 57, 86, 53, 81, 64, 72, 68, 75, 56, 90, 92, 84, 66, 63, 67, 77, 62, 64, 60, 72, 48, 90, 51, 61, 107, 59, 74, 61, 59, 53, 60, 63, 67, 63, 52, 71, 65, 66, 93, 55, 74, 49, 57, 59, 59, 74, 71, 60, 100, 50, 70, 70, 76, 67, 61, 79, 65, 89, 82, 81, 69, 54, 79, 61, 64, 50, 74, 61, 64, 72, 91, 72, 55, 64, 56, 76, 63, 74, 65, 59, 102, 54, 79, 78, 70, 48, 82, 56, 68, 68, 131, 71, 66, 41, 60, 73, 52, 61, 60, 72, 67, 74, 49, 60, 73, 74, 61, 72, 64, 50, 62, 66, 50, 45, 56, 68, 45, 59, 80, 41, 75, 53, 77, 60, 56, 57, 88, 77, 55, 91, 68, 51, 79, 60, 71, 91, 61, 62, 61, 83, 48, 67, 61, 57, 70, 51, 68, 63, 66, 52, 78, 58, 58, 75, 66, 81, 55, 71, 68, 65, 66, 65, 60, 52, 94, 61, 61, 59, 87, 66, 55, 65, 47, 89, 60, 42, 70, 83, 77, 64, 63, 89, 53, 65, 76, 78, 59, 64, 65, 60, 62, 80, 79, 64, 83, 60, 61, 43, 73, 63, 63, 70, 78, 69, 63, 47, 74, 71, 60, 74, 63, 82, 83, 66, 67, 67, 55, 62, 51, 55, 60, 62, 46, 64, 57, 62, 50, 72, 71, 73, 68, 83, 61, 77, 62, 68, 57, 87, 58, 78, 65, 59, 53, 55, 63, 101, 57, 62, 72, 71, 70, 46, 55, 78, 74, 58, 69, 52, 49, 52, 64, 74, 50, 51, 70, 57, 55, 62, 72, 76, 60, 76, 59, 88, 61, 64, 47, 49, 47, 65, 61, 72, 55, 75, 76, 51, 71, 54, 70, 54, 62, 70, 71, 58, 53, 61, 57, 50, 64, 58, 67, 66, 68, 59, 62, 65, 91, 59, 56, 73, 48, 53, 62, 55, 70, 85, 92, 86, 45, 60, 56, 60, 31, 57, 78, 78, 81, 59, 80, 64, 59, 74, 71, 65, 53, 76, 60, 95, 74, 59, 59, 74, 64, 68, 47, 70, 48, 72, 64, 61, 47, 58, 75, 68, 74, 68, 58, 78, 62, 77, 74, 68, 73, 63, 60, 48, 53, 62, 70, 57, 58, 67, 77, 72, 55, 83, 74, 66, 54, 76, 85, 60, 66, 67, 53, 49, 65, 67, 61, 60, 71, 78, 73, 54, 92, 49, 87, 83, 47, 78, 75, 51, 49, 65, 75, 80, 72, 70, 62, 70, 76, 45, 59, 52, 57, 58, 78, 56, 66, 64, 58, 75, 60, 56, 73, 70, 69, 58, 51, 68, 61, 63, 69, 69, 78, 60, 70, 61, 44, 65, 76, 84, 62, 69, 54, 61, 55, 62, 62, 59, 54, 73, 59, 48, 64, 55, 53, 77, 73, 82, 53, 150, 70, 58, 51, 65, 56, 84, 73, 73, 79, 71, 61, 56, 75, 57, 70, 68, 61, 65, 43, 70, 75, 70, 54, 58, 79, 73, 68, 79, 62, 76, 81, 76, 69, 62, 61, 59, 70, 72, 60, 81, 66, 67, 70, 78, 63, 75, 50, 71, 77, 70, 63, 65, 80, 50, 57, 66, 77, 58, 72, 55, 52, 69, 81, 63, 88, 69, 74, 67, 73, 57, 64, 60, 94, 64, 77, 70, 68, 60, 61, 68, 62, 72, 84, 76, 70, 63, 78, 77, 51, 61, 61, 57, 63, 71, 65, 58, 80, 43, 69, 57, 59, 69, 75, 78, 75, 55, 57, 78, 47, 63, 54, 67, 46, 72, 67, 52, 76, 60, 69, 51, 60, 77, 51, 58, 51, 65, 63, 50, 100, 59, 84, 60, 69, 66, 62, 57, 69, 80, 56, 57, 76, 75, 94, 89, 68, 63, 47, 62, 76, 60, 78, 77, 48, 91, 50, 52, 50, 78, 66, 73, 76, 61, 74, 76, 62, 84, 77, 71, 45, 57, 43, 60, 82, 64, 60, 47, 81, 76, 59, 81, 51, 71, 99, 62, 80, 75, 49, 62, 75, 71, 94, 51, 77, 110, 69, 58, 71, 92, 105, 79, 61, 64, 68, 72, 70, 65, 66, 58, 64, 69, 66, 109, 57, 73, 73, 81, 95, 71, 56, 65, 73, 54, 68, 70, 79, 62, 72, 62, 64, 65, 61, 84, 59, 68, 89, 73, 63, 60, 51, 71, 101, 59, 70, 59, 106, 77, 60, 57, 51, 52, 59, 114, 70, 80, 41, 76, 73, 77, 63, 46, 63, 65, 52, 69, 40, 78, 56, 62, 74, 90, 58, 74, 77, 75, 58, 63, 60, 52, 65, 67, 57, 70, 76, 61, 60, 66, 59, 64, 53, 52, 62, 65, 64, 73, 70, 53, 85, 60, 66, 58, 68, 71, 70, 56, 40, 66, 76, 65, 49, 63, 52, 78, 67, 58, 65, 64, 73, 79, 56, 61, 84, 59, 60, 68, 59, 68, 79, 57, 64, 84, 73, 62, 66, 58, 50, 61, 72, 67, 68, 59, 64, 68, 71, 69, 72, 84, 50, 52, 68, 68, 60, 77, 55, 56, 70, 59, 53, 67, 78, 59, 82, 67, 83, 66, 66, 70, 53, 87, 76, 68, 50, 90, 47, 52, 64, 68, 63, 46, 73, 59, 42, 70, 53, 52, 57, 64, 52, 57, 67, 91, 59, 84, 74, 61, 78, 76, 42, 56, 94, 65, 63, 47, 75, 53, 51, 81, 58, 68, 80, 81, 50, 45, 92, 85, 60, 60, 69, 75, 52, 85, 68, 63, 48, 66, 86, 52, 85, 69, 59, 68, 65, 59, 56, 64, 62, 73, 81, 58, 56, 77, 65, 78, 59, 50, 46, 62, 58, 63, 58, 62, 53, 71, 71, 68, 65, 69, 66, 65, 44, 63, 88, 69, 77, 64, 56, 80, 49, 59, 60, 46, 72, 63, 65, 56, 53, 49, 69, 72, 47, 61, 48, 82, 73, 76, 76, 66, 73, 48, 53, 53, 57, 87, 69, 70, 65, 71, 84, 65, 75, 70, 74, 65, 61, 71, 54, 76, 89, 63, 56, 70, 53, 87, 86, 78, 54, 58, 69, 56, 65, 63, 83, 42, 68, 70, 52, 65, 65, 44, 59, 63, 64, 54, 92, 58, 71, 61, 68, 61, 67, 68, 67, 66, 78, 69, 68, 94, 66, 60, 64, 70, 64, 59, 75, 47, 57, 60, 69, 63, 57, 76, 62, 52, 59, 68, 76, 60, 74, 58, 60, 65, 56, 166, 75, 76, 67, 57, 60, 55, 61, 51, 72, 64, 61, 60, 63, 70, 72, 71, 74, 64, 66, 74, 62, 69, 82, 72, 72, 69, 47, 64, 89, 69, 61, 59, 66, 79, 75, 57, 72, 52, 88, 78, 47, 70, 70, 61, 60, 68, 48, 74, 50, 54, 62, 62, 93, 63, 55, 74, 62, 56, 66, 70, 76, 61, 63, 62, 76, 86, 56, 74, 67, 67, 61, 61, 61, 77, 58, 94, 57, 51, 66, 53, 72, 67, 62, 67, 51, 73, 56, 48, 63, 85, 71, 60, 53, 58, 90, 58, 49, 64, 83, 68, 81, 54, 64, 58, 62, 54, 57, 84, 71, 53, 74, 74, 66, 79, 52, 59, 65, 62, 69, 71, 63, 56, 74, 78, 55, 90, 56, 76, 85, 83, 60, 53, 64, 62, 63, 78, 57, 65, 56, 81, 65, 51, 56, 63, 77, 66, 56, 71, 81, 70, 79, 70, 69, 105, 67, 51, 61, 70, 68, 94, 73, 77, 45, 77, 62, 82, 74, 63, 68, 67, 82, 70, 73, 63, 57, 83, 58, 53, 72, 73, 55, 63, 79, 71, 75, 64, 64, 54, 64, 49, 38, 66, 63, 60, 72, 53, 52, 66, 59, 77, 93, 70, 63, 74, 70, 45, 58, 56, 68, 71, 68, 59, 70, 65, 59, 51, 58, 64, 84, 86, 64, 54, 56, 84, 62, 50, 55, 94, 56, 58, 76, 67, 71, 76, 53, 101, 49, 71, 72, 94, 75, 60, 58, 66, 65, 48, 69, 55, 56, 64, 84, 71, 74, 55, 67, 84, 50, 69, 66, 59, 73, 50, 58, 63, 53, 60, 74, 47, 80, 64, 51, 71, 53, 60, 78, 62, 71, 63, 70, 53, 49, 63, 66, 57, 65, 46, 56, 55, 63, 76, 77, 66, 59, 63, 54, 63, 78, 49, 74, 74, 52, 59, 53, 66, 67, 72, 55, 104, 58, 72, 62, 64, 70, 84, 103, 66, 72, 63, 68, 57, 68, 72, 65, 57, 53, 61, 56, 78, 69, 52, 68, 66, 73, 41, 75, 69, 59, 69, 95, 66, 54, 48, 84, 74, 49, 71, 52, 104, 77, 70, 64, 66, 70, 59, 74, 76, 51, 49, 60, 59, 63, 83, 75, 69, 70, 110, 64, 82, 82, 77, 77, 73, 77, 58, 60, 71, 50, 60, 68, 47, 83, 84, 61, 56, 52, 66, 72, 95, 88, 81, 54, 51, 52, 67, 90, 71, 58, 59, 69, 53, 67, 33, 60, 48, 57, 47, 77, 93, 73, 44, 53, 75, 72, 55, 61, 61, 48, 72, 69, 69, 60, 61, 51, 67, 63, 72, 62, 70, 51, 61, 58, 53, 84, 54, 82, 56, 65, 76, 57, 71, 59, 60, 68, 58, 65, 59, 52, 68, 66, 82, 67, 149, 80, 55, 60, 66, 72, 78, 66, 71, 53, 44, 45, 60, 65, 73, 72, 41, 49, 64, 41, 74, 82, 74, 63, 67, 62, 72, 59, 74, 66, 61, 73, 64, 60, 88, 35, 52, 82, 90, 84, 81, 92, 60, 56, 79, 70, 60, 62, 77, 56, 81, 62, 60, 68, 51, 83, 68, 80, 66, 58, 55, 53, 87, 64, 71, 85, 73, 61, 48, 79, 78, 69, 66, 76, 49, 82, 56, 69, 62, 50, 65, 51, 79, 67, 75, 61, 50, 77, 61, 54, 65, 58, 57, 51, 50, 61, 92, 59, 53, 69, 60, 74, 86, 79, 62, 49, 74, 67, 61, 48, 73, 69, 58, 47, 81, 61, 54, 102, 59, 62, 62, 59, 61, 61, 66, 76, 77, 58, 56, 76, 60, 85, 62, 58, 58, 63, 69, 85, 76, 63, 50, 57, 43, 67, 71, 50, 75, 50, 71, 86, 59, 62, 59, 60, 80, 43, 55, 111, 61, 65, 81, 71, 58, 69, 64, 56, 71, 82, 76, 78, 78, 47, 60, 75, 62, 83, 56, 101, 54, 55, 95, 69, 69, 98, 84, 71, 62, 69, 62, 64, 67, 62, 63, 82, 82, 60, 64, 47, 61, 78, 62, 61, 72, 83, 66, 55, 39, 65, 55, 77, 40, 83, 111, 63, 76, 66, 71, 79, 81, 58, 52, 67, 71, 52, 72, 56, 64, 62, 69, 65, 57, 67, 44, 72, 41, 84, 50, 66, 60, 54, 73, 119, 57, 59, 63, 97, 77, 64, 69, 69, 59, 43, 127, 66, 87, 52, 73, 94, 74, 67, 56, 106, 64, 60, 52, 60, 94, 75, 51, 59, 101, 66, 95, 49, 61, 48, 75, 48, 63, 78, 62, 59, 79, 80, 79, 74, 82, 65, 68, 61, 67, 61, 68, 84, 61, 64, 70, 65, 59, 57, 70, 62, 64, 49, 67, 75, 100, 84, 48, 63, 87, 61, 60, 64, 61, 76, 75, 69, 88, 54, 62, 91, 55, 59, 73, 50, 55, 51, 80, 40, 74, 58, 51, 62, 57, 58, 78, 80, 71, 61, 43, 56, 68, 69, 45, 67, 79, 66, 60, 73, 77, 56, 71, 68, 69, 79, 88, 43, 51, 87, 73, 60, 57, 60, 67, 63, 66, 40, 64, 85, 49, 70, 68, 75, 68, 70, 50, 61, 79, 66, 49, 64, 73, 67, 65, 68, 70, 62, 79, 73, 57, 62, 67, 70, 46, 86, 69, 67, 92, 80, 65, 54, 66, 82, 50, 67, 66, 61, 71, 51, 94, 58, 44, 99, 81, 70, 58, 67, 71, 68, 76, 62, 58, 67, 82, 81, 58, 54, 68, 72, 63, 86, 69, 46, 74, 52, 66, 80, 56, 57, 50, 77, 66, 54, 72, 76, 61, 48, 75, 52, 59, 49, 76, 75, 56, 70, 62, 68, 56, 67, 51, 96, 69, 80, 41, 60, 80, 65, 79, 48, 56, 79, 73, 70, 54, 69, 53, 59, 73, 55, 71, 48, 71, 65, 70, 69, 69, 61, 43, 74, 81, 67, 86, 81, 71, 55, 58, 84, 59, 66, 68, 64, 55, 65, 66, 53, 64, 91, 67, 79, 64, 69, 96, 62, 96, 66, 72, 60, 63, 49, 48, 75, 63, 80, 66, 54, 78, 58, 61, 71, 58, 52, 33, 83, 61, 58, 50, 54, 56, 68, 52, 70, 66, 79, 54, 62, 83, 51, 58, 55, 85, 59, 39, 80, 59, 52, 78, 89, 60, 81, 50, 57, 48, 48, 46, 64, 77, 66, 75, 59, 72, 57, 162, 96, 66, 39, 55, 58, 82, 60, 59, 89, 56, 63, 63, 63, 55, 57, 67, 61, 67, 79, 62, 38, 84, 103, 74, 80, 60, 64, 61, 92, 75, 68, 46, 69, 66, 77, 59, 63, 65, 59, 84, 63, 73, 55, 54, 43, 60, 77, 78, 61, 42, 59, 53, 81, 69, 72, 60, 45, 66, 65, 53, 79, 65, 64, 54, 68, 59, 48, 67, 79, 66, 69, 75, 62, 92, 59, 97, 66, 38, 74, 71, 66, 78, 58, 63, 72, 70, 45, 54, 41, 74, 82, 90, 90, 58, 60, 63, 52, 55, 60, 66, 58, 69, 62, 59, 49, 64, 43, 69, 46, 62, 67, 71, 86, 66, 64, 54, 47, 67, 65, 53, 60, 62, 70, 53, 86, 57, 69, 60, 54, 108, 47, 45, 83, 69, 50, 58, 55, 68, 64, 49, 58, 44, 41, 89, 89, 71, 59, 60, 72, 77, 74, 80, 58, 105, 77, 66, 77, 46, 51, 93, 59, 60, 54, 55, 69, 65, 27, 75, 61, 59, 89, 66, 68, 62, 60, 58, 73, 67, 56, 78, 59, 66, 39, 61, 116, 65, 55, 65, 64, 74, 75, 64, 71, 76, 82, 74, 52, 71, 66, 53, 59, 68, 64, 69, 63, 70, 76, 52, 74, 65, 46, 51, 72, 69, 80, 60, 61, 70, 47, 98, 67, 59, 74, 61, 57, 70, 58, 72, 67, 62, 47, 62, 70, 72, 70, 71, 56, 77, 78, 59, 64, 66, 55, 55, 80, 68, 76, 63, 77, 60, 67, 57, 51, 58, 96, 77, 61, 72, 95, 66, 62, 71, 73, 89, 62, 72, 69, 71, 56, 83, 61, 65, 59, 68, 81, 61, 50, 78, 70, 69, 58, 61, 68, 55, 66, 104, 58, 78, 49, 56, 63, 60, 46, 62, 62, 52, 51, 60, 71, 74, 63, 56, 68, 56, 61, 63, 40, 77, 52, 60, 56, 49, 51, 76, 66, 68, 71, 84, 72, 59, 64, 59, 62, 61, 57, 54, 63, 64, 96, 64, 38, 82, 95, 67, 65, 52, 80, 90, 83, 66, 72, 55, 59, 86, 74, 54, 55, 74, 106, 59, 68, 64, 67, 62, 78, 59, 75, 55, 55, 66, 59, 78, 73, 68, 47, 54, 61, 64, 69, 52, 91, 83, 62, 67, 72, 63, 82, 70, 59, 65, 59, 60, 48, 63, 54, 50, 82, 61, 70, 77, 70, 64, 74, 55, 86, 49, 62, 86, 50, 67, 98, 46, 65, 65, 90, 63, 65, 71, 84, 81, 59, 54, 55, 58, 57, 58, 54, 93, 52, 44, 52, 67, 58, 60, 70, 61, 74, 98, 65, 82, 63, 70, 37, 72, 53, 61, 67, 68, 52, 89, 57, 55, 72, 56, 66, 79, 62, 67, 46, 67, 59, 86, 78, 56, 54, 87, 57, 82, 75, 72, 58, 68, 57, 60, 89, 55, 68, 57, 63, 57, 75, 66, 66, 57, 54, 72, 69, 96, 50, 66, 75, 91, 67, 62, 62, 58, 71, 81, 85, 67, 73, 78, 72, 63, 76, 102, 81, 57, 77, 73, 81, 53, 60, 57, 60, 70, 65, 51, 51, 84, 86, 47, 60, 66, 65, 45, 71, 51, 48, 86, 59, 56, 92, 65, 70, 56, 60, 86, 64, 51, 65, 73, 61, 60, 80, 70, 51, 68, 71, 65, 80, 83, 63, 53, 59, 61, 95, 79, 53, 68, 74, 61, 57, 77, 82, 76, 79, 57, 40, 75, 48, 57, 76, 58, 71, 80, 75, 56, 68, 56, 65, 70, 44, 63, 60, 67, 58, 76, 69, 81, 58, 65, 104, 66, 112, 99, 54, 64, 70, 70, 65, 70, 63, 69, 58, 92, 62, 78, 57, 61, 44, 61, 62, 67, 70, 62, 63, 67, 59, 65, 63, 51, 64, 61, 77, 66, 56, 65, 66, 67, 59, 77, 65, 57, 75, 66, 54, 52, 81, 73, 58, 73, 53, 52, 102, 55, 61, 58, 72, 60, 104, 62, 65, 57, 64, 64, 97, 80, 63, 63, 69, 52, 73, 67, 81, 63, 70, 74, 63, 60, 64, 53, 40, 122, 62, 74, 61, 65, 67, 53, 63, 90, 67, 54, 57, 80, 81, 65, 62, 60, 65, 64, 57, 60, 54, 63, 56, 96, 62, 57, 54, 62, 73, 81, 70, 59, 84, 78, 72, 82, 55, 61, 65, 88, 52, 71, 58, 67, 67, 68, 74, 63, 60, 54, 72, 60, 46, 51, 77, 64, 54, 97, 64, 96, 74, 56, 73, 65, 64, 71, 58, 70, 72, 60, 83, 60, 72, 61, 71, 71, 65, 52, 61, 76, 70, 58, 47, 55, 76, 71, 64, 69, 52, 60, 77, 59, 52, 61, 52, 62, 65, 68, 65, 103, 67, 57, 56, 72, 67, 80, 62, 66, 65, 54, 67, 52, 57, 61, 68, 77, 47, 69, 67, 68, 72, 54, 68, 67, 87, 59, 81, 68, 54, 66, 60, 51, 57, 48, 80, 56, 107, 60, 65, 64, 68, 67, 68, 65, 64, 68, 73, 63, 84, 65, 69, 81, 60, 49, 89, 72, 60, 84, 60, 75, 57, 61, 82, 56, 73, 60, 62, 61, 56, 70, 55, 66, 54, 96, 56, 63, 52, 88, 53, 83, 87, 57, 71, 67, 77, 60, 73, 91, 63, 54, 103, 38, 60, 78, 86, 77, 77, 52, 54, 62, 65, 115, 55, 60, 57, 75, 74, 69, 67, 66, 66, 66, 119, 49, 73, 60, 59, 79, 55, 70, 61, 65, 57, 58, 58, 81, 52, 94, 98, 80, 66, 58, 73, 79, 54, 80, 73, 99, 66, 70, 70, 52, 63, 59, 58, 101, 74, 57, 81, 57, 71, 96, 67, 66, 53, 65, 61, 70, 92, 55, 55, 64, 63, 56, 77, 61, 76, 67, 65, 47, 52, 51, 65, 66, 50, 46, 61, 60, 61, 93, 71, 55, 73, 66, 48, 59, 49, 59, 51, 75, 73, 116, 60, 75, 83, 62, 53, 73, 62, 102, 68, 54, 60, 57, 62, 65, 85, 66, 60, 79, 61, 82, 53, 72, 94, 61, 61, 61, 77, 64, 88, 56, 87, 100, 43, 72, 72, 97, 71, 67, 48, 91, 58, 73, 52, 65, 49, 54, 67, 56, 55, 55, 71, 75, 79, 52, 56, 65, 64, 70, 79, 56, 73, 74, 59, 74, 60, 60, 63, 122, 93, 70, 55, 75, 76, 61, 63, 62, 60, 52, 70, 71, 54, 75, 54, 40, 51, 67, 69, 61, 58, 51, 84, 59, 71, 59, 70, 59, 67, 63, 64, 63, 73, 71, 70, 57, 54, 57, 77, 66, 80, 62, 62, 56, 54, 63, 60, 81, 56, 45, 71, 58, 76, 73, 47, 58, 61, 61, 68, 66, 54, 93, 65, 57, 58, 58, 58, 69, 90, 62, 63, 55, 69, 74, 60, 91, 68, 65, 57, 55, 55, 56, 52, 82, 50, 56, 77, 84, 53, 66, 58, 83, 66, 55, 78, 63, 84, 56, 59, 70, 77, 64, 75, 58, 55, 56, 65, 70, 66, 59, 84, 78, 68, 65, 69, 71, 61, 59, 55, 50, 69, 67, 62, 52, 75, 96, 59, 58, 62, 61, 64, 58, 65, 59, 57, 62, 59, 50, 81, 62, 71, 58, 63, 77, 64, 45, 42, 64, 69, 44, 60, 54, 56, 77, 82, 71, 54, 71, 56, 64, 57, 63, 41, 67, 64, 70, 83, 55, 63, 60, 47, 58, 67, 82, 45, 75, 70, 70, 83, 60, 59, 83, 65, 59, 122, 52, 75, 45, 61, 52, 69, 65, 83, 65, 62, 79, 68, 63, 82, 48, 57, 86, 78, 64, 61, 77, 69, 63, 56, 67, 62, 67, 73, 76, 55, 51, 66, 57, 65, 53, 51, 64, 98, 55, 75, 80, 72, 69, 69, 72, 50, 54, 58, 68, 120, 95, 89, 76, 65, 69, 89, 79, 62, 71, 86, 50, 73, 63, 65, 62, 63, 68, 53, 60, 60, 66, 59, 78, 75, 59, 68, 57, 82, 68, 62, 66, 65, 54, 58, 64, 73, 86, 57, 62, 66, 49, 71, 77, 80, 88, 85, 51, 77, 55, 64, 74, 66, 71, 60, 59, 65, 65, 61, 42, 64, 63, 76, 64, 62, 73, 57, 59, 61, 57, 73, 52, 98, 50, 58, 46, 55, 50, 52, 71, 63, 86, 61, 82, 64, 49, 68, 68, 39, 53, 59, 50, 81, 72, 104, 63, 62, 70, 75, 72, 58, 79, 65, 64, 72, 61, 76, 48, 56, 60, 64, 80, 68, 71, 94, 70, 74, 76, 54, 58, 76, 59, 78, 52, 65, 61, 54, 62, 59, 84, 118, 64, 51, 85, 58, 69, 65, 58, 78, 76, 56, 62, 57, 78, 72, 58, 72, 47, 56, 67, 92, 69, 88, 69, 68, 80, 66, 58, 66, 59, 78, 64, 76, 63, 55, 49, 77, 54, 77, 82, 82, 69, 80, 48, 69, 56, 71, 69, 83, 78, 60, 59, 74, 71, 71, 55, 56, 58, 77, 62, 53, 66, 92, 85, 58, 74, 84, 56, 73, 49, 77, 77, 69, 67, 55, 68, 66, 103, 56, 72, 61, 55, 72, 76, 68, 57, 73, 86, 56, 71, 61, 57, 78, 71, 58, 65, 62, 63, 37, 69, 65, 52, 82, 64, 64, 46, 54, 56, 65, 59, 67, 71, 41, 65, 62, 88, 60, 54, 68, 68, 55, 103, 63, 65, 109, 61, 53, 44, 56, 57, 55, 65, 46, 72, 55, 58, 75, 74, 78, 41, 70, 45, 62, 65, 69, 56, 77, 66, 52, 54, 66, 66, 66, 55, 52, 76, 62, 88, 55, 78, 72, 58, 51, 44, 81, 61, 71, 66, 58, 62, 74, 76, 63, 69, 57, 59, 60, 63, 72, 54, 67, 50, 76, 57, 82, 62, 58, 69, 52, 63, 66, 70, 74, 67, 57, 57, 58, 83, 51, 53, 109, 77, 54, 53, 70, 93, 53, 58, 35, 72, 65, 72, 51, 64, 58, 67, 112, 59, 80, 63, 88, 54, 66, 58, 56, 65, 108, 61, 88, 68, 73, 90, 53, 60, 63, 44, 50, 58, 70, 48, 71, 78, 59, 71, 68, 46, 54, 69, 65, 55, 69, 57, 55, 83, 62, 62, 52, 66, 74, 51, 69, 83, 101, 59, 71, 59, 67, 65, 61, 65, 65, 61, 64, 105, 77, 63, 61, 44, 67, 89, 65, 75, 73, 63, 57, 73, 108, 63, 71, 58, 77, 71, 55, 59, 58, 59, 84, 52, 62, 148, 59, 56, 60, 50, 62, 72, 63, 49, 55, 46, 81, 74, 73, 79, 82, 64, 73, 63, 61, 68, 63, 86, 71, 69, 80, 60, 61, 44, 97, 89, 48, 47, 98, 65, 62, 66, 54, 65, 73, 85, 69, 70, 55, 48, 92, 57, 51, 39, 84, 60, 89, 78, 58, 63, 98, 65, 67, 87, 62, 100, 66, 58, 103, 90, 77, 70, 50, 58, 59, 52, 72, 75, 70, 56, 63, 70, 60, 60, 71, 109, 62, 43, 70, 68, 65, 55, 56, 56, 60, 73, 93, 68, 61, 53, 87, 61, 63, 69, 63, 75, 40, 86, 52, 52, 56, 82, 57, 55, 49, 60, 49, 56, 73, 55, 48, 68, 61, 59, 83, 56, 71, 72, 56, 73, 56, 76, 58, 39, 72, 55, 44, 61, 82, 63, 50, 67, 78, 54, 73, 67, 67, 62, 88, 70, 64, 62, 88, 64, 63, 66, 64, 58, 48, 64, 63, 74, 74, 68, 68, 85, 70, 74, 70, 47, 78, 59, 59, 61, 72, 68, 55, 93, 81, 53, 59, 74, 56, 69, 59, 65, 51, 47, 76, 55, 54, 56, 68, 62, 75, 49, 70, 54, 69, 61, 89, 57, 75, 52, 41, 62, 93, 42, 67, 65, 78, 70, 54, 61, 85, 70, 64, 50, 52, 53, 85, 62, 60, 62, 47, 72, 52, 69, 74, 71, 42, 68, 49, 43, 82, 67, 82, 60, 56, 75, 65, 60, 95, 66, 58, 83, 48, 86, 51, 47, 78, 73, 42, 60, 68, 76, 61, 97, 65, 77, 63, 70, 67, 51, 68, 62, 57, 49, 70, 56, 68, 64, 75, 77, 66, 73, 59, 74, 51, 66, 83, 65, 79, 55, 63, 68, 76, 63, 69, 78, 52, 52, 68, 56, 73, 76, 67, 55, 61, 61, 93, 56, 59, 69, 66, 59, 79, 64, 73, 50, 57, 77, 76, 47, 49, 57, 88, 54, 72, 75, 76, 91, 59, 60, 62, 66, 51, 65, 96, 52, 56, 76, 86, 69, 40, 58, 60, 59, 81, 96, 56, 67, 58, 68, 79, 48, 101, 83, 63, 65, 61, 77, 76, 59, 62, 65, 65, 46, 66, 77, 115, 67, 57, 82, 55, 95, 78, 74, 47, 105, 74, 63, 63, 64, 80, 81, 91, 72, 54, 76, 72, 66, 66, 71, 64, 62, 46, 68, 76, 66, 72, 76, 64, 57, 65, 58, 63, 66, 73, 58, 68, 67, 66, 60, 81, 51, 56, 52, 64, 61, 66, 52, 70, 98, 58, 59, 59, 52, 61, 93, 59, 58, 57, 58, 78, 52, 65, 71, 59, 61, 58, 61, 55, 83, 59, 51, 74, 79, 62, 73, 81, 99, 73, 80, 60, 68, 108, 59, 56, 41, 45, 66, 57, 63, 52, 64, 63, 76, 63, 46, 71, 52, 74, 56, 57, 62, 57, 69, 67, 73, 50, 66, 61, 53, 73, 83, 48, 73, 56, 53, 55, 63, 59, 62, 70, 63, 69, 69, 70, 65, 60, 73, 64, 63, 72, 54, 47, 146, 69, 70, 71, 64, 53, 71, 88, 59, 75, 77, 60, 68, 51, 79, 96, 66, 62, 80, 58, 66, 67, 50, 73, 58, 46, 50, 68, 114, 69, 64, 52, 88, 74, 58, 62, 77, 70, 46, 66, 70, 83, 39, 42, 62, 60, 57, 81, 43, 69, 63, 64, 90, 88, 80, 61, 74, 78, 59, 61, 71, 64, 64, 58, 55, 63, 55, 56, 64, 65, 58, 69, 59, 59, 51, 41, 59, 61, 58, 75, 55, 86, 56, 56, 69, 62, 86, 74, 102, 68, 51, 53, 65, 67, 63, 47, 70, 54, 62, 69, 61, 63, 63, 64, 98, 82, 46, 80, 71, 74, 66, 63, 84, 85, 74, 68, 59, 76, 81, 74, 75, 56, 66, 78, 70, 33, 71, 61, 50, 68, 60, 63, 54, 71, 59, 71, 53, 59, 73, 56, 67, 50, 93, 68, 90, 53, 81, 66, 70, 57, 54, 68, 67, 70, 75, 56, 68, 45, 60, 57, 58, 62, 58, 48, 59, 68, 68, 57, 37, 86, 56, 57, 69, 55, 82, 83, 75, 58, 73, 54, 61, 56, 50, 59, 61, 62, 63, 46, 61, 50, 57, 58, 63, 49, 53, 55, 82, 95, 66, 56, 53, 65, 43, 49, 75, 70, 65, 70, 52, 60, 57, 51, 71, 59, 57, 57, 62, 72, 86, 69, 66, 55, 55, 88, 59, 74, 46, 70, 85, 74, 66, 80, 60, 47, 73, 73, 50, 67, 62, 56, 80, 67, 55, 39, 65, 64, 87, 75, 63, 72, 81, 47, 71, 70, 79, 69, 52, 52, 69, 67, 47, 59, 50, 85, 83, 59, 67, 68, 55, 53, 57, 77, 67, 78, 59, 83, 66, 41, 59, 64, 59, 49, 60, 63, 77, 67, 62, 79, 68, 73, 58, 74, 69, 64, 78, 64, 62, 60, 67, 88, 75, 56, 65, 63, 63, 65, 71, 79, 68, 72, 98, 45, 55, 64, 45, 49, 67, 62, 62, 61, 144, 79, 48, 68, 66, 64, 72, 74, 69, 70, 62, 66, 94, 98, 51, 75, 65, 55, 46, 57, 54, 65, 79, 74, 32, 73, 67, 66, 74, 57, 60, 52, 84, 64, 92, 71, 51, 78, 62, 77, 87, 64, 66, 81, 92, 55, 60, 78, 57, 66, 73, 52, 61, 65, 68, 65, 57, 90, 74, 55, 49, 69, 56, 72, 55, 83, 62, 66, 54, 65, 76, 73, 53, 70, 65, 80, 62, 78, 67, 49, 52, 66, 50, 79, 71, 70, 73, 61, 86, 67, 77, 92, 72, 92, 52, 63, 78, 87, 72, 52, 54, 56, 70, 87, 90, 65, 64, 57, 71, 73, 58, 54, 70, 98, 77, 57, 57, 60, 82, 58, 68, 68, 60, 70, 52, 79, 57, 54, 71, 111, 60, 92, 56, 53, 53, 73, 51, 68, 71, 70, 51, 75, 76, 66, 76, 66, 75, 41, 49, 101, 63, 55, 71, 51, 60, 59, 54, 58, 60, 55, 62, 65, 49, 54, 76, 59, 70, 64, 71, 58, 64, 75, 47, 111, 96, 80, 64, 72, 61, 55, 66, 56, 62, 93, 60, 61, 81, 61, 57, 56, 77, 83, 54, 57, 108, 84, 71, 73, 68, 81, 61, 55, 60, 71, 85, 56, 62, 59, 69, 96, 38, 77, 104, 65, 77, 57, 78, 68, 59, 56, 49, 74, 59, 76, 79, 74, 59, 43, 68, 66, 63, 61, 77, 52, 73, 59, 90, 61, 68, 47, 63, 93, 57, 89, 67, 73, 65, 49, 66, 52, 55, 61, 121, 60, 63, 36, 60, 81, 86, 57, 48, 72, 53, 81, 54, 54, 62, 60, 42, 69, 82, 81, 71, 55, 79, 80, 81, 44, 70, 42, 64, 52, 76, 76, 72, 89, 59, 59, 57, 56, 49, 70, 60, 78, 73, 63, 62, 61, 52, 71, 63, 63, 57, 69, 84, 65, 49, 83, 63, 61, 66, 59, 75, 67, 59, 52, 53, 93, 70, 81, 54, 85, 55, 79, 65, 67, 54, 45, 57, 58, 76, 62, 50, 63, 63, 44, 54, 72, 52, 48, 59, 67, 78, 70, 52, 76, 86, 70, 67, 69, 68, 50, 59, 49, 65, 79, 67, 59, 67, 82, 66, 73, 52, 90, 61, 48, 91, 54, 89, 80, 62, 59, 72, 67, 60, 60, 70, 52, 53, 38, 62, 74, 66, 69, 50, 69, 79, 45, 57, 70, 100, 66, 61, 61, 72, 47, 79, 60, 56, 76, 66, 44, 59, 58, 59, 61, 67, 47, 51, 58, 76, 55, 74, 97, 64, 92, 91, 63, 71, 56, 84, 77, 64, 63, 65, 74, 62, 76, 67, 56, 67, 50, 65, 48, 59, 56, 73, 67, 64, 61, 64, 50, 66, 52, 75, 56, 65, 81, 66, 82, 62, 69, 79, 77, 68, 61, 64, 81, 66, 70, 69, 63, 67, 75, 95, 51, 84, 71, 48, 93, 61, 93, 41, 73, 71, 72, 52, 80, 60, 68, 68, 65, 89, 75, 56, 74, 68, 87, 50, 65, 50, 65, 78, 68, 79, 55, 79, 56, 59, 71, 67, 79, 44, 74, 67, 73, 90, 81, 60, 57, 72, 62, 74, 47, 51, 85, 40, 62, 82, 87, 60, 62, 54, 62, 59, 59, 46, 65, 86, 62, 54, 62, 68, 99, 52, 64, 70, 67, 74, 64, 89, 65, 58, 68, 64, 80, 87, 52, 70, 52, 92, 68, 47, 78, 71, 59, 50, 119, 48, 81, 59, 61, 72, 52, 48, 74, 64, 76, 65, 70, 58, 62, 165, 61, 61, 53, 51, 62, 63, 54, 67, 79, 80, 76, 97, 67, 58, 70, 63, 67, 53, 54, 64, 40, 59, 61, 57, 88, 62, 43, 70, 80, 62, 42, 49, 61, 78, 63, 86, 71, 77, 82, 71, 52, 44, 58, 54, 61, 53, 90, 66, 59, 43, 71, 53, 92, 76, 53, 82, 67, 70, 52, 77, 68, 72, 58, 65, 53, 72, 77, 79, 70, 79, 42, 61, 85, 75, 67, 47, 94, 67, 62, 89, 97, 60, 65, 69, 55, 74, 57, 76, 58, 66, 79, 60, 83, 69, 71, 70, 79, 59, 62, 86, 65, 67, 56, 73, 59, 56, 71, 42, 68, 66, 64, 56, 53, 73, 61, 69, 72, 84, 53, 80, 58, 76, 93, 74, 61, 70, 58, 57, 72, 68, 58, 54, 48, 59, 67, 54, 64, 65, 44, 62, 60, 73, 63, 64, 79, 60, 64, 77, 67, 68, 41, 72, 61, 82, 78, 53, 70, 82, 65, 89, 64, 77, 81, 68, 69, 80, 60, 68, 60, 62, 62, 68, 63, 88, 70, 67, 54, 60, 81, 97, 45, 69, 68, 66, 80, 57, 62, 54, 61, 65, 67, 71, 58, 62, 74, 57, 73, 62, 59, 55, 56, 72, 56, 61, 56, 133, 107, 66, 40, 54, 62, 54, 81, 62, 70, 60, 53, 69, 47, 45, 68, 59, 83, 60, 86, 73, 83, 55, 64, 69, 77, 67, 59, 72, 52, 76, 87, 59, 54, 65, 52, 62, 68, 69, 62, 72, 84, 64, 59, 52, 68, 72, 72, 73, 77, 51, 73, 71, 66, 77, 85, 51, 64, 61, 49, 62, 69, 62, 48, 63, 69, 52, 55, 70, 68, 63, 65, 55, 75, 60, 47, 77, 67, 58, 71, 60, 66, 55, 74, 66, 60, 65, 56, 59, 53, 82, 65, 62, 65, 54, 60, 76, 69, 87, 68, 99, 70, 81, 67, 67, 74, 53, 65, 61, 66, 72, 43, 53, 63, 50, 70, 77, 98, 65, 51, 69, 62, 83, 116, 52, 77, 73, 55, 65, 61, 68, 84, 57, 77, 63, 53, 50, 65, 98, 51, 63, 68, 121, 67, 72, 69, 64, 44, 82, 59, 63, 78, 57, 63, 59, 59, 108, 88, 81, 59, 106, 80, 65, 64, 60, 64, 74, 61, 87, 66, 69, 99, 83, 66, 58, 61, 64, 55, 66, 62, 59, 52, 47, 49, 66, 54, 67, 53, 61, 63, 66, 61, 59, 52, 65, 69, 77, 55, 46, 66, 68, 58, 62, 48, 74, 57, 59, 72, 60, 58, 63, 57, 64, 74, 58, 58, 64, 86, 59, 63, 51, 70, 76, 63, 56, 62, 56, 42, 79, 48, 76, 73, 57, 70, 84, 77, 80, 71, 52, 60, 55, 75, 100, 60, 88, 59, 133, 52, 51, 80, 62, 61, 84, 55, 67, 62, 56, 49, 68, 82, 73, 90, 59, 70, 43, 63, 64, 43, 56, 54, 62, 53, 60, 49, 54, 63, 93, 57, 87, 75, 74, 66, 75, 77, 64, 76, 64, 48, 64, 53, 109, 64, 58, 66, 78, 64, 86, 50, 46, 58, 57, 64, 71, 52, 80, 55, 73, 51, 66, 46, 57, 72, 59, 74, 57, 71, 53, 46, 73, 56, 79, 58, 53, 71, 61, 104, 83, 66, 61, 69, 59, 47, 61, 81, 50, 68, 59, 104, 56, 75, 74, 83, 61, 78, 69, 51, 59, 65, 57, 95, 67, 52, 58, 65, 76, 54, 65, 43, 61, 60, 72, 65, 55, 65, 62, 68, 64, 70, 54, 57, 59, 52, 48, 75, 42, 105, 67, 48, 61, 64, 58, 58, 70, 48, 49, 78, 60, 76, 56, 71, 51, 51, 63, 53, 60, 66, 67, 69, 57, 63, 63, 52, 57, 58, 68, 56, 89, 50, 63, 66, 70, 54, 78, 57, 85, 70, 65, 66, 64, 61, 73, 106, 44, 92, 91, 53, 55, 57, 59, 84, 60, 65, 104, 63, 64, 48, 59, 63, 61, 65, 66, 64, 73, 45, 63, 65, 52, 57, 91, 76, 60, 58, 70, 68, 64, 98, 69, 79, 134, 56, 37, 57, 77, 71, 62, 66, 66, 61, 53, 63, 59, 61, 50, 68, 54, 70, 42, 56, 46, 67, 68, 52, 78, 51, 64, 80, 49, 51, 66, 70, 68, 55, 63, 92, 71, 57, 75, 64, 86, 63, 63, 71, 58, 58, 63, 58, 108, 87, 45, 40, 71, 69, 52, 52, 71, 70, 84, 74, 58, 58, 51, 73, 82, 60, 59, 93, 44, 51, 120, 65, 69, 40, 51, 66, 44, 66, 52, 67, 78, 75, 69, 62, 62, 58, 48, 59, 55, 66, 72, 52, 57, 82, 62, 61, 64, 75, 72, 79, 61, 53, 73, 53, 73, 58, 91, 54, 50, 63, 64, 72, 89, 65, 63, 81, 55, 47, 64, 81, 64, 65, 54, 57, 81, 68, 66, 55, 70, 87, 67, 65, 67, 80, 73, 74, 70, 56, 53, 59, 52, 50, 58, 47, 50, 71, 65, 67, 89, 78, 55, 66, 63, 69, 68, 88, 60, 73, 70, 53, 81, 56, 54, 93, 82, 73, 73, 71, 61, 75, 69, 44, 69, 56, 94, 52, 68, 53, 90, 57, 49, 55, 62, 79, 56, 77, 77, 74, 49, 41, 48, 61, 80, 87, 53, 41, 78, 64, 46, 50, 94, 127, 43, 73, 43, 71, 58, 73, 76, 57, 64, 65, 56, 59, 50, 74, 81, 62, 49, 104, 70, 70, 63, 55, 77, 54, 69, 49, 69, 69, 80, 80, 54, 50, 53, 67, 65, 57, 60, 70, 61, 51, 72, 69, 62, 64, 58, 35, 55, 49, 79, 88, 74, 68, 48, 58, 69, 108, 64, 45, 67, 76, 65, 79, 37, 65, 71, 72, 75, 66, 75, 53, 74, 47, 48, 64, 75, 54, 53, 62, 62, 49, 72, 66, 66, 62, 87, 60, 61, 70, 72, 76, 55, 72, 63, 51, 71, 55, 74, 64, 75, 55, 54, 60, 57, 52, 80, 94, 69, 58, 58, 103, 59, 70, 50, 62, 63, 60, 55, 84, 61, 80, 78, 46, 80, 74, 55, 68, 66, 62, 74, 70, 60, 71, 52, 65, 59, 53, 66, 59, 71, 112, 71, 58, 55, 69, 66, 76, 72, 65, 74, 64, 96, 72, 59, 100, 77, 60, 74, 91, 121, 76, 89, 64, 63, 58, 96, 55, 61, 93, 79, 87, 65, 49, 63, 70, 62, 38, 51, 56, 62, 70, 54, 95, 75, 62, 100, 85, 76, 67, 67, 65, 83, 63, 73, 61, 45, 115, 64, 81, 86, 60, 80, 67, 65, 61, 92, 70, 79, 61, 39, 62, 66, 69, 54, 57, 60, 57, 59, 65, 63, 35, 62, 51, 60, 57, 72, 76, 65, 63, 68, 100, 52, 71, 74, 73, 73, 80, 54, 60, 60, 80, 61, 63, 61, 53, 66, 54, 50, 62, 65, 64, 52, 64, 89, 63, 70, 61, 77, 72, 67, 66, 67, 63, 58, 50, 60, 56, 74, 62, 75, 49, 47, 51, 62, 59, 64, 58, 54, 103, 81, 72, 57, 62, 75, 62, 78, 57, 59, 62, 72, 50, 72, 64, 70, 79, 50, 55, 68, 69, 62, 74, 68, 48, 126, 44, 98, 81, 57, 43, 63, 58, 64, 73, 52, 92, 65, 65, 60, 51, 110, 64, 78, 67, 69, 70, 70, 60, 47, 52, 60, 78, 84, 78, 43, 57, 49, 60, 64, 49, 29, 120, 72, 57, 60, 121, 63, 51, 51, 85, 61, 64, 51, 57, 61, 68, 48, 60, 82, 80, 77, 50, 66, 48, 64, 88, 104, 64, 59, 59, 72, 74, 76, 67, 56, 79, 70, 89, 62, 65, 49, 61, 60, 59, 56, 61, 54, 54, 54, 61, 66, 76, 58, 53, 56, 55, 63, 88, 89, 77, 60, 80, 66, 56, 58, 54, 68, 66, 61, 60, 73, 59, 66, 54, 70, 61, 77, 118, 74, 67, 53, 65, 81, 51, 77, 55, 66, 65, 52, 62, 75, 62, 63, 39, 64, 56, 56, 69, 96, 50, 54, 50, 67, 64, 66, 62, 74, 52, 72, 56, 82, 48, 51, 60, 40, 67, 61, 62, 83, 62, 50, 61, 86, 59, 61, 59, 90, 81, 69, 68, 83, 66, 46, 52, 48, 51, 68, 94, 60, 70, 71, 60, 76, 48, 48, 64, 63, 69, 90, 59, 71, 66, 60, 53, 65, 72, 68, 64, 56, 67, 50, 63, 75, 41, 98, 47, 91, 56, 92, 82, 63, 60, 38, 54, 62, 50, 75, 72, 51, 80, 57, 67, 85, 54, 63, 103, 139, 79, 60, 75, 53, 63, 69, 59, 63, 46, 80, 78, 80, 56, 59, 69, 79, 77, 79, 63, 99, 71, 45, 59, 65, 52, 68, 53, 69, 61, 61, 57, 72, 62, 63, 68, 60, 81, 76, 80, 80, 87, 102, 69, 58, 58, 78, 77, 66, 66, 62, 65, 69, 75, 67, 73, 71, 88, 45, 60, 53, 64, 60, 75, 87, 74, 57, 72, 74, 63, 59, 60, 85, 57, 56, 55, 78, 62, 58, 87, 73, 54, 54, 64, 73, 60, 128, 91, 58, 73, 71, 48, 58, 55, 86, 52, 60, 39, 93, 102, 76, 110, 67, 60, 66, 63, 56, 54, 70, 46, 65, 38, 61, 66, 64, 82, 80, 59, 62, 94, 66, 50, 72, 84, 94, 62, 62, 46, 57, 95, 86, 64, 49, 76, 61, 55, 64, 62, 63, 63, 67, 61, 56, 63, 69, 66, 120, 54, 83, 68, 53, 60, 60, 54, 59, 60, 67, 70, 51, 69, 56, 63, 55, 75, 59, 67, 58, 60, 54, 76, 55, 58, 58, 81, 70, 55, 51, 70, 67, 71, 52, 59, 43, 73, 59, 57, 64, 46, 72, 93, 82, 61, 55, 51, 66, 49, 80, 61, 50, 81, 84, 71, 47, 74, 77, 38, 55, 61, 68, 70, 57, 65, 84, 68, 73, 83, 61, 70, 62, 74, 61, 68, 70, 65, 44, 70, 68, 82, 57, 62, 43, 60, 59, 54, 62, 57, 47, 72, 55, 80, 66, 68, 52, 56, 57, 46, 65, 82, 59, 78, 97, 62, 80, 57, 74, 67, 63, 66, 53, 83, 87, 86, 51, 93, 47, 54, 70, 47, 50, 107, 72, 76, 79, 76, 63, 74, 50, 51, 83, 71, 64, 53, 62, 75, 59, 75, 73, 61, 77, 74, 55, 60, 52, 82, 62, 84, 71, 77, 122, 44, 60, 54, 64, 65, 47, 63, 76, 66, 55, 70, 85, 50, 51, 44, 71, 51, 78, 58, 62, 51, 52, 45, 84, 65, 54, 62, 71, 117, 52, 56, 43, 65, 65, 57, 65, 82, 52, 60, 107, 74, 64, 87, 51, 74, 59, 68, 52, 87, 69, 71, 81, 47, 72, 68, 49, 68, 77, 67, 82, 48, 70, 54, 65, 87, 47, 55, 95, 36, 96, 56, 82, 67, 64, 79, 70, 50, 56, 98, 82, 69, 72, 56, 58, 44, 51, 49, 59, 55, 64, 72, 69, 70, 57, 56, 58, 94, 62, 62, 100, 59, 77, 41, 88, 54, 64, 47, 53, 78, 50, 70, 78, 49, 56, 49, 60, 60, 61, 66, 71, 58, 75, 79, 108, 69, 49, 52, 47, 50, 57, 55, 77, 88, 66, 58, 51, 74, 65, 71, 52, 47, 62, 70, 67, 56, 50, 72, 74, 60, 57, 58, 55, 66, 54, 70, 76, 75, 55, 54, 70, 52, 55, 76, 77, 50, 64, 70, 69, 54, 70, 68, 62, 61, 75, 53, 59, 52, 52, 68, 66, 65, 63, 61, 56, 58, 47, 74, 69, 58, 70, 76, 51, 60, 51, 69, 63, 53, 69, 57, 88, 137, 101, 93, 65, 65, 63, 72, 85, 67, 56, 55, 57, 64, 48, 76, 79, 70, 67, 143, 65, 85, 61, 80, 63, 57, 65, 46, 74, 65, 64, 75, 54, 63, 62, 72, 77, 66, 68, 53, 61, 64, 53, 51, 40, 49, 61, 59, 49, 58, 84, 81, 86, 65, 70, 58, 60, 54, 65, 70, 88, 70, 69, 55, 68, 57, 81, 71, 50, 50, 78, 60, 69, 88, 63, 49, 55, 62, 65, 71, 84, 64, 50, 59, 48, 82, 62, 99, 43, 40, 67, 59, 62, 70, 82, 66, 66, 56, 66, 53, 102, 46, 72, 64, 52, 61, 64, 66, 74, 66, 63, 105, 60, 109, 66, 62, 87, 66, 72, 68, 68, 49, 60, 67, 66, 61, 50, 64, 49, 52, 51, 62, 66, 68, 72, 47, 56, 52, 85, 64, 66, 75, 52, 60, 50, 79, 54, 68, 66, 71, 38, 54, 89, 56, 63, 109, 78, 80, 47, 73, 132, 86, 70, 59, 75, 63, 78, 57, 75, 67, 63, 72, 71, 53, 61, 103, 54, 54, 48, 66, 74, 57, 64, 78, 70, 75, 59, 47, 62, 64, 67, 74, 66, 70, 57, 87, 70, 116, 57, 83, 79, 58, 53, 61, 64, 87, 47, 66, 60, 52, 51, 55, 51, 84, 56, 65, 65, 64, 53, 58, 65, 57, 109, 59, 68, 69, 77, 68, 79, 67, 57, 52, 67, 73, 62, 75, 74, 73, 57, 51, 48, 69, 67, 77, 55, 46, 64, 73, 55, 57, 79, 60, 64, 69, 66, 56, 77, 79, 78, 58, 88, 71, 56, 87, 81, 72, 65, 74, 77, 55, 55, 86, 51, 62, 74, 56, 46, 55, 91, 54, 72, 67, 59, 74, 77, 56, 79, 61, 58, 75, 81, 75, 70, 70, 73, 72, 58, 60, 79, 64, 67, 48, 72, 52, 73, 65, 56, 76, 66, 70, 63, 50, 78, 52, 79, 82, 66, 56, 64, 50, 90, 52, 61, 69, 80, 84, 83, 44, 61, 80, 47, 56, 70, 57, 63, 68, 63, 58, 50, 59, 66, 94, 66, 84, 54, 125, 71, 74, 79, 56, 67, 74, 67, 56, 82, 59, 57, 64, 60, 52, 68, 95, 58, 52, 55, 64, 56, 83, 53, 47, 61, 67, 65, 78, 53, 47, 62, 61, 42, 75, 79, 73, 55, 54, 53, 68, 75, 57, 58, 76, 95, 62, 60, 54, 69, 59, 54, 57, 113, 61, 55, 54, 55, 63, 63, 67, 49, 75, 50, 68, 61, 66, 72, 60, 55, 56, 61, 90, 85, 109, 68, 74, 61, 71, 79, 51, 65, 47, 52, 50, 69, 53, 57, 74, 52, 56, 59, 63, 62, 63, 71, 62, 71, 69, 66, 89, 60, 76, 57, 67, 60, 75, 72, 52, 81, 62, 64, 77, 58, 69, 85, 76, 67, 58, 54, 62, 70, 53, 118, 55, 71, 61, 57, 64, 66, 63, 87, 55, 106, 56, 70, 69, 59, 67, 62, 80, 57, 71, 67, 79, 63, 71, 64, 55, 77, 56, 68, 80, 59, 65, 52, 75, 56, 67, 78, 53, 59, 66, 80, 64, 70, 74, 68, 48, 52, 55, 56, 63, 90, 69, 60, 50, 103, 108, 62, 67, 128, 50, 63, 55, 57, 74, 73, 82, 79, 74, 59, 45, 95, 81, 64, 57, 71, 71, 81, 53, 65, 63, 69, 53, 79, 69, 85, 72, 53, 47, 66, 59, 57, 61, 56, 82, 74, 56, 68, 76, 62, 69, 76, 65, 60, 63, 62, 72, 56, 59, 53, 67, 62, 45, 69, 64, 75, 67, 58, 49, 55, 71, 52, 75, 64, 66, 63, 59, 67, 62, 87, 59, 75, 76, 79, 57, 53, 68, 68, 65, 45, 68, 58, 74, 72, 73, 125, 73, 61, 59, 59, 52, 61, 62, 71, 53, 55, 62, 116, 60, 56, 77, 53, 62, 61, 71, 62, 55, 78, 67, 55, 75, 62, 63, 76, 64, 66, 74, 60, 89, 61, 71, 54, 63, 50, 54, 83, 75, 47, 75, 98, 62, 61, 86, 79, 69, 73, 60, 62, 46, 72, 70, 55, 72, 74, 66, 85, 51, 64, 72, 46, 52, 65, 71, 57, 68, 69, 71, 60, 69, 58, 76, 53, 66, 72, 55, 64, 64, 63, 66, 79, 65, 68, 87, 69, 64, 77, 78, 94, 58, 66, 82, 64, 85, 59, 62, 53, 77, 77, 90, 96, 52, 64, 65, 66, 59, 58, 51, 58, 64, 53, 53, 46, 76, 54, 65, 43, 75, 46, 60, 64, 74, 82, 63, 53, 85, 74, 60, 64, 61, 60, 58, 77, 61, 69, 54, 64, 61, 65, 57, 72, 68, 94, 60, 60, 61, 59, 67, 70, 70, 51, 77, 53, 57, 59, 63, 62, 81, 56, 57, 65, 76, 58, 84, 85, 54, 82, 80, 82, 135, 60, 71, 56, 71, 61, 54, 46, 58, 60, 86, 63, 58, 74, 52, 62, 57, 68, 44, 64, 57, 63, 66, 65, 54, 65, 58, 58, 58, 56, 63, 48, 57, 61, 53, 59, 104, 61, 56, 69, 86, 50, 57, 72, 64, 61, 65, 61, 67, 56, 66, 68, 68, 57, 55, 65, 54, 67, 55, 61, 68, 57, 76, 66, 58, 51, 76, 56, 53, 74, 67, 107, 56, 77, 79, 56, 50, 84, 75, 114, 59, 49, 56, 54, 71, 61, 59, 62, 49, 74, 65, 58, 63, 63, 60, 80, 70, 59, 64, 54, 57, 68, 58, 58, 66, 71, 75, 57, 67, 102, 68, 85, 43, 61, 59, 57, 58, 47, 78, 62, 77, 59, 75, 68, 75, 64, 110, 65, 73, 69, 77, 73, 78, 72, 64, 64, 88, 55, 56, 85, 50, 54, 68, 57, 50, 88, 68, 50, 94, 67, 70, 57, 92, 47, 58, 59, 62, 89, 61, 70, 65, 75, 73, 68, 71, 57, 80, 61, 78, 56, 86, 59, 64, 62, 60, 74, 67, 56, 58, 68, 70, 70, 52, 105, 56, 64, 64, 68, 65, 44, 59, 55, 67, 61, 59, 84, 79, 56, 50, 58, 63, 60, 45, 79, 72, 75, 71, 78, 60, 76, 62, 69, 78, 58, 60, 62, 71, 77, 64, 60, 53, 48, 64, 81, 77, 57, 71, 65, 84, 61, 57, 70, 63, 64, 56, 69, 57, 95, 61, 68, 121, 83, 61, 83, 63, 78, 62, 74, 54, 69, 94, 63, 65, 70, 77, 58, 58, 59, 97, 73, 69, 70, 66, 53, 72, 72, 73, 67, 63, 72, 68, 73, 64, 51, 54, 59, 79, 58, 54, 73, 69, 62, 68, 84, 59, 55, 96, 64, 40, 77, 85, 62, 67, 68, 53, 44, 74, 67, 69, 95, 55, 66, 49, 75, 56, 61, 51, 59, 54, 78, 65, 51, 53, 61, 77, 91, 73, 83, 73, 65, 73, 67, 80, 61, 52, 74, 63, 71, 64, 52, 71, 62, 65, 61, 51, 75, 57, 52, 56, 73, 62, 47, 82, 66, 58, 57, 73, 70, 55, 66, 51, 61, 72, 54, 62, 51, 70, 55, 70, 61, 67, 56, 54, 53, 59, 72, 82, 82, 64, 57, 57, 55, 69, 69, 74, 51, 68, 60, 50, 76, 60, 64, 70, 59, 64, 71, 77, 60, 55, 55, 64, 74, 78, 65, 80, 61, 74, 56, 52, 135, 59, 57, 55, 49, 70, 85, 62, 73, 51, 53, 56, 58, 67, 62, 49, 52, 54, 64, 59, 54, 60, 63, 66, 69, 70, 73, 51, 58, 56, 59, 59, 58, 65, 65, 63, 87, 108, 65, 82, 83, 56, 66, 69, 61, 62, 53, 57, 67, 58, 61, 78, 74, 101, 77, 64, 57, 64, 71, 66, 56, 65, 50, 65, 64, 64, 66, 62, 68, 49, 77, 79, 58, 60, 86, 57, 70, 81, 65, 72, 66, 69, 65, 74, 66, 47, 66, 73, 55, 70, 61, 71, 52, 62, 56, 55, 59, 48, 58, 66, 59, 61, 83, 74, 52, 63, 53, 62, 67, 52, 68, 68, 59, 72, 54, 66, 60, 49, 54, 54, 77, 47, 59, 79, 66, 68, 53, 76, 61, 55, 56, 71, 70, 58, 86, 56, 56, 64, 68, 62, 53, 70, 51, 61, 100, 51, 44, 59, 101, 65, 53, 92, 69, 65, 59, 89, 58, 57, 54, 54, 82, 57, 56, 55, 77, 52, 66, 63, 74, 47, 62, 52, 69, 62, 50, 68, 58, 68, 60, 73, 78, 85, 58, 73, 55, 116, 62, 63, 65, 57, 55, 71, 73, 54, 78, 74, 62, 67, 81, 65, 58, 65, 63, 55, 58, 68, 70, 73, 53, 64, 49, 71, 58, 68, 53, 64, 63, 60, 62, 85, 94, 55, 61, 62, 64, 56, 58, 76, 50, 75, 78, 69, 67, 71, 67, 61, 50, 76, 83, 55, 68, 52, 62, 59, 62, 59, 71, 74, 60, 66, 50, 53, 67, 70, 49, 77, 58, 97, 47, 105, 62, 60, 62, 71, 68, 76, 60, 63, 59, 70, 66, 57, 59, 60, 71, 52, 57, 81, 61, 57, 56, 62, 72, 56, 70, 56, 58, 62, 67, 65, 55, 64, 67, 62, 58, 65, 60, 80, 65, 60, 55, 60, 77, 53, 57, 81, 61, 63, 112, 54, 62, 80, 74, 61, 56, 69, 62, 61, 71, 72, 61, 64, 92, 68, 57, 64, 56, 60, 79, 79, 56, 64, 64, 59, 61, 56, 72, 76, 61, 57, 61, 72, 55, 58, 67, 56, 63, 69, 83, 72, 74, 63, 54, 70, 72, 66, 66, 69, 78, 62, 54, 64, 91, 97, 74, 64, 137, 65, 70, 71, 67, 79, 67, 72, 58, 77, 58, 75, 93, 83, 60, 54, 57, 57, 56, 53, 68, 63, 60, 66, 55, 57, 61, 53, 57, 56, 68, 65, 67, 52, 52, 60, 82, 77, 56, 70, 69, 87, 59, 62, 68, 54, 63, 56, 45, 59, 60, 59, 68, 60, 48, 55, 51, 64, 63, 66, 53, 70, 73, 70, 66, 61, 75, 61, 60, 52, 70, 55, 67, 66, 59, 56, 67, 58, 54, 63, 59, 67, 63, 65, 78, 75, 112, 65, 69, 66, 86, 75, 69, 56, 70, 63, 55, 66, 114, 72, 57, 76, 65, 56, 56, 64, 57, 59, 77, 62, 52, 87, 69, 64, 55, 58, 60, 65, 55, 70, 55, 68, 59, 83, 62, 71, 88, 72, 64, 88, 106, 59, 56, 70, 56, 63, 64, 65, 61, 76, 58, 57, 75, 68, 70, 56, 95, 60, 63, 74, 65, 72, 72, 76, 56, 65, 58, 61, 60, 57, 56, 81, 65, 58, 67, 52, 69, 54, 75, 73, 64, 55, 60, 80, 54, 57, 58, 66, 116, 61, 80, 62, 68, 80, 70, 64, 52, 76, 46, 79, 62, 60, 67, 56, 54, 52, 63, 60, 61, 66, 65, 58, 57, 64, 54, 62, 72, 73, 57, 69, 56, 76, 75, 71, 55, 93, 69, 75, 51, 66, 65, 58, 64, 49, 62, 58, 72, 61, 64, 75, 50, 66, 55, 54, 80, 40, 62, 69, 66, 63, 60, 63, 67, 71, 78, 65, 50, 72, 63, 70, 68, 73, 81, 87, 65, 62, 48, 56, 68, 136, 72, 74, 57, 86, 65, 65, 54, 49, 61, 62, 65, 58, 92, 64, 64, 65, 62, 59, 76, 61, 86, 57, 68, 61, 69, 72, 73, 66, 74, 65, 58, 50, 62, 67, 68, 113, 54, 59, 88, 78, 61, 64, 61, 61, 63, 63, 59, 57, 62, 68, 54, 83, 65, 60, 78, 57, 67, 60, 50, 64, 53, 81, 63, 55, 59, 65, 52, 43, 64, 85, 54, 61, 65, 76, 54, 56, 58, 58, 129, 56, 58, 64, 73, 60, 66, 56, 51, 76, 69, 58, 73, 64, 67, 85, 59, 60, 66, 64, 61, 57, 62, 53, 72, 59, 64, 70, 70, 51, 123, 77, 79, 55, 60, 63, 73, 57, 50, 62, 71, 86, 64, 70, 63, 57, 67, 65, 63, 71, 47, 65, 53, 78, 48, 58, 60, 75, 73, 55, 69, 57, 64, 59, 52, 52, 86, 56, 59, 49, 71, 61, 86, 50, 47, 57, 56, 70, 86, 75, 50, 60, 82, 70, 80, 55, 63, 61, 55, 84, 62, 72, 59, 63, 46, 69, 59, 72, 70, 64, 75, 71, 73, 64, 64, 82, 56, 70, 64, 67, 66, 62, 75, 69, 64, 70, 66, 83, 49, 65, 49, 61, 61, 79, 78, 59, 60, 64, 67, 57, 63, 61, 52, 65, 73, 65, 62, 75, 81, 65, 43, 61, 61, 51, 86, 69, 74, 70, 60, 46, 60, 62, 58, 67, 51, 64, 73, 75, 92, 56, 84, 118, 96, 81, 55, 78, 65, 55, 62, 61, 68, 63, 73, 73, 67, 76, 50, 72, 65, 120, 61, 63, 79, 74, 71, 53, 63, 62, 56, 62, 73, 67, 59, 51, 60, 67, 54, 50, 82, 68, 61, 62, 56, 55, 67, 71, 74, 115, 81, 59, 69, 58, 53, 64, 60, 56, 63, 52, 59, 59, 52, 53, 59, 62, 85, 62, 61, 70, 75, 60, 79, 65, 63, 72, 65, 76, 75, 81, 61, 81, 64, 67, 68, 50, 54, 42, 58, 69, 53, 59, 56, 64, 68, 69, 56, 84, 59, 62, 56, 63, 74, 63, 63, 60, 68, 57, 59, 61, 60, 63, 50, 54, 65, 55, 69, 57, 56, 51, 73, 80, 56, 71, 51, 67, 63, 55, 65, 58, 57, 61, 68, 58, 68, 46, 71, 70, 58, 61, 55, 64, 70, 59, 58, 62, 83, 74, 63, 70, 71, 46, 61, 50, 63, 68, 62, 69, 70, 57, 66, 73, 135, 59, 58, 63, 48, 71, 78, 67, 68, 67, 62, 62, 58, 54, 61, 41, 63, 54, 64, 54, 59, 64, 50, 63, 63, 53, 74, 53, 64, 57, 61, 71, 70, 66, 66, 64, 55, 57, 58, 71, 57, 65, 58, 73, 60, 70, 91, 56, 53, 67, 61, 58, 77, 81, 59, 64, 59, 76, 62, 66, 65, 72, 53, 44, 68, 68, 69, 63, 80, 60, 56, 81, 65, 66, 76, 59, 68, 68, 64, 89, 72, 64, 59, 55, 57, 51, 64, 75, 60, 64, 69, 85, 86, 60, 57, 53, 69, 73, 61, 59, 70, 68, 48, 68, 59, 57, 47, 70, 72, 77, 73, 56, 60, 50, 69, 68, 51, 57, 61, 58, 65, 75, 69, 78, 62, 82, 63, 72, 63, 66, 82, 73, 65, 69, 70, 70, 66, 61, 53, 65, 71, 73, 60, 73, 77, 53, 56, 61, 62, 62, 78, 75, 63, 63, 57, 75, 61, 81, 58, 55, 90, 51, 51, 65, 87, 73, 74, 58, 49, 81, 66, 69, 75, 54, 68, 69, 73, 61, 61, 58, 73, 75, 75, 90, 68, 98, 61, 66, 67, 58, 48, 80, 51, 58, 69, 60, 59, 85, 51, 53, 89, 82, 65, 60, 67, 53, 57, 80, 68, 71, 72, 67, 58, 60, 65, 60, 64, 65, 63, 59, 72, 75, 57, 75, 76, 75, 58, 63, 57, 85, 75, 70, 53, 69, 72, 55, 67, 83, 77, 62, 79, 65, 70, 55, 58, 51, 86, 69, 59, 64, 71, 51, 63, 69, 45, 76, 59, 85, 61, 107, 55, 75, 66, 65, 62, 57, 60, 59, 59, 57, 63, 61, 52, 60, 64, 76, 79, 72, 55, 56, 64, 63, 60, 72, 60, 85, 58, 79, 52, 63, 56, 48, 51, 61, 58, 59, 68, 63, 69, 60, 50, 73, 84, 57, 76, 84, 79, 84, 132, 51, 71, 60, 70, 58, 59, 72, 80, 60, 78, 53, 60, 71, 69, 89, 67, 104, 56, 53, 72, 88, 59, 62, 64, 53, 56, 66, 69, 55, 59, 52, 61, 82, 66, 73, 70, 68, 63, 69, 79, 58, 79, 72, 66, 70, 54, 58, 57, 57, 57, 75, 67, 55, 104, 92, 39, 60, 138, 57, 64, 59, 59, 69, 73, 81, 60, 78, 66, 63, 79, 51, 68, 61, 118, 76, 98, 56, 59, 60, 74, 81, 72, 51, 56, 63, 75, 78, 56, 72, 59, 82, 64, 57, 71, 66, 61, 62, 74, 80, 79, 64, 61, 69, 57, 53, 83, 60, 63, 60, 60, 61, 74, 63, 55, 58, 61, 56, 60, 53, 61, 92, 69, 57, 78, 56, 59, 59, 63, 68, 78, 65, 63, 54, 63, 63, 59, 64, 62, 78, 69, 66, 72, 101, 125, 72, 65, 80, 54, 58, 45, 61, 72, 54, 66, 49, 126, 41, 68, 62, 65, 61, 59, 66, 52, 74, 51, 69, 60, 65, 54, 54, 58, 59, 61, 61, 74, 87, 60, 69, 50, 72, 60, 73, 62, 69, 54, 64, 61, 47, 58, 47, 59, 60, 69, 62, 66, 63, 62, 56, 75, 65, 77, 71, 65, 70, 75, 64, 63, 57, 54, 73, 58, 51, 56, 63, 67, 49, 63, 65, 70, 61, 67, 61, 56, 58, 71, 53, 55, 72, 69, 96, 59, 49, 71, 57, 103, 79, 52, 71, 66, 79, 55, 60, 64, 72, 62, 102, 93, 78, 51, 63, 78, 57, 57, 74, 63, 70, 68, 67, 71, 86, 66, 63, 64, 81, 71, 54, 59, 66, 68, 75, 59, 82, 62, 66, 87, 66, 58, 66, 50, 65, 62, 66, 59, 67, 57, 77, 70, 76, 54, 58, 69, 59, 62, 54, 68, 63, 67, 60, 73, 61, 62, 69, 58, 91, 72, 66, 68, 69, 75, 67, 80, 68, 65, 56, 63, 131, 58, 73, 57, 76, 70, 68, 52, 50, 58, 89, 65, 70, 81, 64, 79, 65, 40, 75, 64, 65, 55, 55, 59, 56, 62, 47, 70, 67, 91, 60, 76, 61, 62, 64, 48, 91, 58, 70, 60, 72, 61, 52, 69, 78, 70, 48, 73, 57, 67, 70, 71, 58, 79, 80, 63, 51, 57, 76, 70, 80, 67, 59, 51, 66, 68, 81, 67, 59, 60, 61, 60, 61, 59, 88, 70, 67, 58, 41, 110, 62, 68, 49, 62, 67, 64, 49, 59, 72, 62, 52, 81, 79, 70, 53, 55, 80, 63, 56, 53, 48, 59, 57, 78, 52, 60, 81, 61, 68, 80, 73, 98, 58, 60, 73, 78, 55, 72, 57, 60, 48, 45, 66, 67, 70, 57, 53, 50, 64, 62, 66, 41, 73, 60, 62, 61, 69, 55, 76, 52, 54, 83, 57, 58, 79, 79, 63, 73, 80, 55, 76, 53, 83, 56, 82, 70, 53, 71, 54, 54, 63, 59, 62, 57, 66, 61, 56, 60, 94, 52, 64, 63, 63, 66, 60, 69, 51, 56, 55, 59, 51, 77, 66, 90, 50, 59, 51, 78, 65, 61, 64, 82, 67, 71, 61, 55, 73, 68, 62, 72, 73, 52, 63, 65, 52, 57, 62, 59, 72, 83, 53, 76, 78, 73, 70, 55, 57, 61, 79, 68, 70, 52, 59, 53, 73, 65, 65, 63, 54, 48, 59, 61, 76, 82, 71, 64, 51, 97, 57, 58, 131, 50, 81, 63, 55, 69, 60, 73, 62, 70, 87, 58, 61, 70, 59, 71, 54, 62, 102, 58, 60, 72, 68, 71, 74, 57, 64, 64, 62, 66, 65, 48, 45, 68, 68, 56, 47, 55, 54, 51, 65, 72, 56, 44, 59, 54, 83, 59, 52, 63, 64, 54, 84, 63, 59, 62, 68, 59, 55, 52, 57, 60, 60, 89, 64, 58, 49, 52, 68, 88, 51, 55, 54, 67, 78, 93, 64, 79, 78, 65, 53, 68, 51, 63, 62, 62, 48, 79, 66, 67, 56, 60, 68, 54, 64, 55, 58, 56, 56, 72, 62, 67, 55, 70, 70, 65, 64, 64, 64, 51, 68, 53, 69, 48, 59, 56, 60, 48, 70, 50, 67, 71, 62, 63, 42, 87, 61, 71, 71, 58, 77, 77, 61, 63, 60, 59, 63, 76, 64, 74, 66, 57, 74, 67, 96, 60, 58, 54, 64, 62, 59, 77, 77, 59, 70, 67, 83, 59, 69, 114, 68, 79, 63, 57, 65, 70, 60, 49, 103, 68, 61, 56, 62, 62, 55, 69, 69, 60, 50, 79, 60, 63, 64, 67, 59, 68, 70, 73, 69, 59, 53, 67, 54, 66, 48, 71, 112, 63, 80, 65, 61, 55, 59, 87, 63, 83, 57, 76, 57, 54, 69, 89, 68, 77, 57, 57, 67, 56, 56, 77, 56, 66, 82, 53, 46, 60, 85, 73, 64, 61, 63, 54, 61, 49, 66, 89, 44, 60, 70, 51, 63, 61, 62, 63, 75, 75, 47, 59, 62, 80, 57, 59, 65, 68, 66, 69, 65, 80, 60, 65, 68, 82, 71, 74, 66, 57, 62, 52, 81, 64, 44, 60, 82, 39, 58, 59, 64, 76, 58, 57, 59, 51, 76, 71, 79, 56, 61, 52, 71, 59, 59, 54, 61, 85, 58, 58, 63, 105, 51, 58, 77, 64, 57, 97, 60, 81, 54, 48, 66, 77, 61, 58, 58, 56, 65, 55, 93, 98, 58, 91, 67, 61, 64, 66, 63, 55, 50, 61, 56, 69, 72, 98, 72, 86, 50, 64, 65, 52, 60, 59, 70, 92, 59, 67, 107, 59, 69, 43, 69, 42, 72, 71, 62, 107, 52, 63, 54, 77, 52, 81, 91, 53, 66, 55, 66, 93, 70, 76, 61, 75, 53, 74, 67, 78, 60, 67, 62, 58, 83, 46, 69, 90, 63, 55, 61, 64, 50, 60, 61, 117, 66, 57, 73, 46, 51, 44, 59, 53, 47, 48, 49, 38, 76, 68, 64, 53, 67, 63, 47, 43, 48, 68, 62, 66, 54, 63, 91, 92, 64, 52, 59, 67, 62, 46, 61, 73, 54, 59, 68, 71, 61, 61, 55, 57, 71, 64, 74, 53, 71, 57, 57, 53, 61, 66, 65, 51, 47, 69, 66, 64, 65, 63, 55, 56, 73, 76, 78, 67, 53, 54, 52, 50, 60, 49, 82, 68, 50, 91, 59, 74, 45, 76, 78, 59, 67, 82, 59, 65, 73, 88, 57, 55, 83, 64, 64, 64, 83, 103, 139, 72, 68, 59, 99, 61, 53, 51, 86, 66, 84, 72, 70, 68, 46, 65, 47, 58, 54, 74, 53, 60, 69, 62, 45, 69, 88, 57, 64, 72, 80, 49, 70, 97, 80, 93, 62, 145, 69, 54, 41, 68, 81, 72, 80, 61, 85, 56, 78, 84, 79, 52, 48, 56, 70, 63, 56, 43, 53, 64, 74, 82, 54, 57, 60, 56, 61, 60, 61, 57, 48, 44, 94, 87, 57, 75, 56, 65, 48, 68, 109, 65, 71, 53, 66, 72, 62, 68, 55, 55, 66, 68, 51, 83, 71, 77, 58, 64, 59, 59, 62, 66, 75, 85, 70, 76, 52, 70, 74, 51, 64, 66, 57, 65, 61, 68, 70, 55, 56, 84, 49, 76, 69, 144, 91, 73, 60, 66, 68, 63, 73, 75, 51, 89, 52, 60, 60, 71, 76, 68, 59, 56, 82, 67, 41, 84, 70, 64, 74, 65, 61, 54, 60, 101, 68, 54, 58, 50, 61, 57, 96, 58, 127, 41, 71, 58, 59, 93, 81, 47, 73, 30, 63, 79, 58, 63, 52, 55, 61, 66, 55, 75, 61, 68, 67, 56, 66, 61, 83, 48, 60, 47, 63, 57, 94, 69, 85, 73, 65, 71, 67, 70, 97, 47, 55, 50, 64, 57, 38, 67, 61, 58, 66, 51, 77, 70, 60, 78, 73, 60, 67, 55, 51, 50, 74, 68, 90, 66, 62, 55, 55, 62, 44, 67, 80, 70, 77, 52, 70, 60, 52, 63, 62, 60, 67, 62, 60, 57, 120, 47, 68, 58, 109, 67, 70, 51, 89, 60, 66, 76, 59, 64, 71, 59, 64, 63, 56, 105, 38, 91, 57, 77, 46, 57, 52, 57, 63, 75, 63, 57, 76, 66, 61, 67, 67, 62, 67, 52, 72, 84, 66, 78, 50, 76, 88, 61, 55, 59, 50, 63, 64, 63, 68, 49, 77, 59, 56, 78, 73, 78, 55, 84, 47, 59, 48, 68, 71, 55, 66, 67, 62, 69, 79, 63, 105, 102, 83, 66, 53, 58, 66, 64, 79, 79, 95, 80, 99, 71, 62, 65, 52, 69, 62, 60, 86, 49, 59, 53, 73, 54, 65, 53, 54, 82, 46, 58, 51, 41, 56, 65, 59, 57, 69, 82, 45, 64, 69, 113, 58, 55, 66, 87, 66, 74, 67, 87, 68, 69, 63, 66, 60, 59, 57, 74, 101, 88, 57, 53, 55, 52, 78, 47, 65, 69, 83, 46, 61, 57, 52, 53, 66, 74, 71, 54, 71, 100, 76, 59, 61, 66, 55, 69, 51, 65, 69, 75, 72, 44, 72, 57, 76, 65, 77, 78, 81, 59, 48, 48, 66, 68, 46, 59, 87, 62, 86, 86, 76, 54, 68, 58, 65, 66, 81, 59, 69, 82, 80, 68, 54, 67, 54, 55, 54, 69, 58, 61, 57, 68, 71, 85, 74, 61, 55, 91, 62, 62, 65, 48, 74, 74, 59, 80, 58, 82, 71, 69, 57, 51, 58, 62, 71, 67, 53, 70, 79, 55, 65, 62, 70, 41, 92, 83, 79, 66, 76, 69, 71, 57, 79, 73, 82, 61, 63, 67, 52, 46, 95, 59, 58, 128, 76, 71, 79, 81, 58, 56, 56, 50, 100, 100, 77, 66, 62, 73, 78, 67, 61, 78, 88, 60, 82, 58, 49, 56, 60, 38, 117, 107, 64, 57, 64, 78, 68, 56, 60, 65, 66, 66, 58, 80, 82, 54, 56, 72, 115, 85, 65, 65, 46, 65, 69, 61, 54, 69, 67, 73, 64, 64, 54, 53, 70, 59, 56, 57, 87, 70, 58, 63, 71, 55, 49, 49, 61, 59, 39, 61, 89, 61, 62, 53, 61, 58, 43, 67, 54, 74, 58, 57, 66, 79, 64, 54, 50, 48, 40, 72, 60, 62, 50, 76, 101, 51, 52, 62, 87, 46, 73, 66, 53, 60, 63, 103, 71, 61, 67, 54, 59, 70, 60, 53, 48, 76, 67, 58, 62, 64, 63, 41, 60, 53, 62, 49, 81, 59, 53, 63, 82, 59, 79, 63, 48, 51, 59, 59, 55, 55, 54, 66, 62, 66, 66, 68, 89, 68, 59, 48, 64, 62, 53, 70, 66, 70, 74, 54, 53, 56, 73, 78, 57, 57, 92, 68, 68, 65, 74, 69, 53, 77, 52, 63, 70, 57, 70, 66, 61, 86, 66, 56, 58, 47, 68, 73, 47, 66, 51, 75, 98, 61, 83, 57, 68, 67, 46, 81, 54, 61, 53, 74, 75, 71, 59, 68, 62, 56, 56, 63, 67, 65, 54, 73, 31, 68, 99, 63, 59, 97, 65, 68, 61, 60, 64, 76, 55, 49, 68, 63, 87, 62, 71, 73, 54, 59, 73, 65, 67, 61, 71, 54, 72, 57, 62, 55, 67, 73, 50, 62, 71, 61, 89, 67, 83, 56, 66, 58, 47, 58, 57, 50, 73, 76, 59, 48, 49, 63, 50, 57, 65, 64, 85, 58, 44, 60, 64, 80, 87, 71, 52, 63, 53, 63, 64, 55, 49, 89, 51, 90, 100, 58, 66, 92, 75, 54, 55, 64, 72, 65, 55, 58, 71, 63, 92, 63, 64, 64, 80, 57, 67, 75, 65, 74, 48, 62, 80, 79, 61, 64, 56, 65, 68, 86, 50, 86, 71, 91, 74, 72, 68, 57, 61, 56, 107, 55, 66, 69, 59, 119, 65, 62, 62, 72, 58, 82, 74, 63, 69, 82, 77, 78, 59, 59, 61, 64, 69, 59, 66, 67, 71, 66, 77, 71, 63, 68, 50, 42, 75, 61, 72, 55, 56, 60, 82, 64, 58, 67, 66, 69, 98, 86, 63, 52, 64, 69, 71, 57, 59, 68, 50, 70, 79, 59, 54, 55, 58, 63, 85, 50, 81, 60, 64, 77, 75, 72, 53, 94, 83, 58, 71, 69, 65, 80, 57, 57, 77, 83, 65, 87, 67, 61, 52, 58, 56, 66, 69, 70, 59, 70, 59, 55, 72, 66, 64, 62, 77, 64, 76, 72, 76, 68, 49, 70, 72, 55, 61, 74, 73, 67, 52, 50, 74, 72, 59, 51, 63, 75, 60, 48, 63, 91, 54, 67, 60, 69, 53, 77, 61, 64, 74, 86, 58, 77, 77, 55, 60, 60, 79, 71, 63, 61, 77, 71, 69, 99, 64, 92, 58, 87, 71, 65, 75, 56, 68, 73, 69, 61, 70, 71, 58, 63, 87, 68, 58, 62, 64, 95, 81, 65, 85, 70, 62, 74, 52, 73, 61, 63, 52, 61, 59, 59, 96, 59, 56, 52, 60, 90, 62, 65, 59, 58, 48, 55, 57, 58, 69, 57, 81, 78, 66, 90, 51, 71, 76, 125, 66, 68, 75, 57, 84, 84, 79, 72, 75, 77, 62, 83, 84, 64, 94, 103, 61, 56, 62, 73, 84, 67, 85, 78, 57, 62, 47, 57, 56, 66, 65, 57, 68, 59, 68, 67, 60, 69, 57, 61, 80, 90, 53, 66, 49, 60, 65, 60, 63, 55, 57, 63, 84, 58, 60, 62, 61, 73, 79, 66, 47, 65, 82, 72, 58, 73, 62, 75, 60, 70, 56, 57, 62, 75, 71, 67, 78, 62, 70, 72, 61, 85, 61, 70, 71, 139, 55, 51, 85, 71, 70, 67, 55, 76, 60, 67, 57, 96, 74, 86, 83, 57, 66, 60, 67, 66, 58, 73, 49, 63, 54, 64, 70, 60, 58, 67, 55, 62, 63, 51, 67, 62, 61, 89, 82, 63, 91, 70, 63, 101, 65, 52, 66, 72, 53, 71, 55, 62, 66, 58, 72, 63, 57, 77, 71, 46, 61, 64, 79, 51, 73, 49, 63, 75, 57, 55, 86, 61, 75, 70, 63, 71, 52, 62, 58, 79, 53, 59, 64, 57, 61, 61, 75, 66, 43, 58, 64, 74, 80, 53, 57, 69, 55, 76, 57, 53, 64, 92, 64, 59, 60, 53, 67, 81, 45, 67, 62, 64, 52, 61, 58, 64, 81, 80, 56, 66, 53, 59, 57, 57, 86, 66, 58, 57, 94, 76, 74, 65, 57, 55, 64, 65, 65, 63, 60, 63, 69, 60, 50, 88, 51, 65, 60, 69, 58, 65, 66, 61, 68, 70, 54, 55, 103, 70, 68, 54, 66, 66, 66, 62, 56, 75, 90, 61, 59, 53, 62, 59, 66, 60, 66, 70, 87, 56, 69, 55, 56, 57, 101, 60, 74, 61, 64, 69, 60, 62, 59, 64, 68, 74, 55, 65, 69, 63, 86, 57, 61, 76, 43, 48, 59, 62, 68, 65, 65, 75, 73, 68, 54, 68, 70, 72, 51, 73, 45, 89, 68, 70, 64, 64, 74, 68, 79, 67, 58, 69, 58, 59, 56, 63, 65, 58, 59, 71, 86, 69, 48, 70, 77, 55, 54, 76, 83, 62, 70, 79, 54, 140, 52, 65, 58, 57, 53, 43, 63, 57, 87, 74, 59, 86, 54, 105, 77, 66, 64, 96, 73, 49, 80, 48, 54, 73, 71, 69, 67, 64, 57, 51, 83, 65, 61, 53, 62, 85, 58, 64, 58, 64, 75, 51, 59, 58, 80, 54, 73, 61, 67, 56, 56, 53, 74, 116, 53, 69, 66, 71, 57, 60, 68, 62, 56, 66, 71, 80, 68, 51, 65, 68, 51, 68, 53, 71, 59, 67, 57, 65, 60, 64, 63, 71, 73, 76, 59, 65, 78, 57, 52, 58, 60, 74, 76, 56, 67, 80, 76, 68, 66, 67, 64, 79, 61, 82, 58, 64, 63, 60, 84, 52, 61, 54, 71, 58, 62, 65, 98, 85, 56, 63, 57, 49, 93, 77, 43, 63, 71, 55, 55, 83, 80, 67, 67, 61, 79, 54, 58, 87, 69, 80, 84, 61, 72, 66, 61, 77, 55, 66, 82, 59, 62, 46, 65, 74, 60, 65, 52, 94, 58, 56, 127, 84, 76, 52, 55, 60, 71, 88, 58, 55, 63, 80, 66, 65, 65, 63, 76, 63, 93, 51, 64, 66, 51, 68, 72, 67, 57, 66, 47, 66, 62, 84, 59, 67, 71, 62, 52, 45, 65, 85, 98, 54, 69, 58, 58, 71, 83, 50, 44, 70, 62, 68, 67, 63, 59, 52, 65, 61, 49, 72, 51, 65, 76, 53, 77, 60, 59, 59, 59, 66, 61, 67, 74, 75, 55, 68, 71, 69, 82, 62, 76, 63, 56, 64, 83, 71, 63, 58, 64, 59, 62, 56, 65, 66, 38, 67, 71, 64, 64, 87, 58, 83, 53, 52, 81, 71, 62, 53, 57, 58, 81, 65, 69, 64, 60, 65, 67, 82, 57, 42, 69, 59, 63, 73, 56, 73, 64, 71, 57, 60, 57, 58, 69, 59, 65, 49, 61, 54, 73, 70, 75, 47, 61, 57, 104, 64, 79, 63, 64, 61, 72, 80, 70, 47, 61, 55, 67, 69, 66, 154, 78, 75, 58, 40, 62, 66, 57, 66, 85, 62, 52, 66, 56, 63, 48, 55, 61, 60, 60, 81, 59, 67, 80, 86, 79, 54, 61, 81, 59, 70, 58, 60, 61, 68, 60, 74, 65, 57, 70, 87, 65, 67, 59, 49, 51, 64, 63, 53, 63, 76, 61, 67, 80, 46, 59, 58, 71, 75, 67, 60, 59, 67, 66, 71, 74, 72, 65, 61, 75, 65, 64, 42, 69, 70, 64, 97, 72, 64, 56, 60, 79, 85, 59, 63, 61, 55, 60, 61, 53, 78, 64, 75, 67, 66, 51, 68, 68, 70, 65, 58, 60, 65, 62, 74, 63, 57, 56, 59, 78, 72, 81, 60, 67, 75, 54, 69, 68, 66, 54, 70, 63, 54, 60, 72, 81, 58, 54, 56, 67, 65, 63, 48, 55, 71, 62, 50, 52, 56, 74, 74, 57, 64, 58, 78, 51, 80, 60, 88, 72, 62, 59, 85, 67, 50, 70, 63, 65, 56, 55, 96, 61, 78, 66, 60, 74, 54, 52, 65, 59, 67, 58, 73, 58, 59, 55, 66, 62, 64, 60, 62, 93, 57, 41, 62, 118, 57, 61, 80, 59, 66, 54, 64, 54, 76, 52, 49, 61, 75, 63, 80, 74, 53, 63, 57, 48, 38, 74, 61, 63, 60, 67, 62, 84, 84, 60, 62, 69, 66, 72, 41, 56, 69, 59, 67, 56, 61, 46, 72, 69, 58, 64, 66, 72, 51, 83, 72, 71, 71, 62, 54, 57, 67, 52, 56, 54, 61, 67, 66, 58, 66, 71, 51, 62, 68, 79, 63, 86, 86, 79, 55, 62, 59, 62, 69, 63, 61, 48, 54, 61, 54, 64, 56, 63, 54, 55, 64, 58, 54, 90, 71, 57, 67, 59, 60, 84, 59, 64, 64, 64, 61, 64, 65, 59, 64, 62, 67, 53, 61, 79, 70, 76, 47, 76, 64, 57, 58, 65, 65, 74, 77, 84, 63, 63, 72, 57, 53, 65, 89, 56, 83, 64, 65, 67, 78, 61, 61, 63, 88, 72, 70, 70, 50, 59, 60, 59, 77, 46, 78, 75, 57, 76, 57, 41, 68, 59, 71, 57, 72, 68, 52, 63, 63, 72, 67, 63, 67, 50, 59, 69, 66, 58, 120, 56, 68, 56, 124, 74, 70, 60, 58, 55, 55, 71, 74, 66, 63, 85, 83, 82, 58, 78, 47, 66, 74, 60, 58, 62, 60, 77, 64, 65, 100, 63, 78, 76, 85, 58, 63, 56, 63, 68, 62, 72, 62, 75, 58, 57, 62, 67, 61, 48, 67, 56, 69, 72, 59, 70, 54, 60, 62, 76, 69, 64, 45, 61, 54, 68, 57, 99, 86, 60, 70, 65, 70, 56, 82, 71, 66, 59, 57, 55, 69, 72, 58, 49, 55, 72, 51, 52, 75, 86, 66, 83, 73, 65, 62, 64, 77, 56, 56, 60, 71, 104, 113, 58, 60, 61, 73, 69, 51, 78, 74, 81, 55, 61, 62, 105, 70, 66, 80, 64, 88, 74, 55, 85, 69, 56, 61, 92, 78, 54, 58, 74, 62, 82, 89, 62, 63, 50, 72, 64, 87, 57, 60, 54, 64, 52, 54, 67, 68, 60, 87, 84, 76, 64, 57, 60, 40, 60, 53, 63, 59, 65, 69, 59, 72, 84, 48, 65, 67, 53, 64, 64, 68, 58, 56, 67, 73, 64, 54, 55, 51, 63, 82, 61, 82, 82, 60, 68, 57, 56, 81, 50, 65, 93, 96, 55, 63, 66, 85, 65, 60, 76, 66, 66, 61, 67, 59, 58, 71, 65, 73, 66, 57, 73, 57, 99, 56, 67, 64, 94, 62, 71, 64, 64, 82, 56, 63, 57, 63, 67, 54, 68, 64, 87, 63, 70, 77, 70, 74, 85, 69, 54, 64, 70, 90, 55, 42, 58, 67, 69, 57, 114, 51, 64, 69, 83, 57, 58, 66, 67, 62, 62, 54, 120, 52, 65, 49, 64, 77, 62, 68, 62, 56, 78, 68, 58, 64, 61, 85, 50, 79, 70, 69, 62, 46, 47, 56, 66, 58, 62, 54, 70, 77, 56, 68, 58, 72, 61, 64, 83, 67, 73, 86, 62, 49, 69, 71, 68, 55, 68, 73, 80, 75, 56, 50, 71, 64, 58, 63, 56, 67, 60, 56, 44, 55, 52, 64, 70, 74, 57, 65, 59, 78, 75, 93, 59, 56, 73, 63, 77, 79, 65, 106, 52, 59, 68, 54, 49, 65, 62, 72, 72, 86, 64, 72, 51, 53, 64, 66, 81, 66, 67, 68, 67, 59, 66, 66, 70, 61, 66, 57, 52, 135, 84, 83, 76, 62, 61, 69, 64, 77, 59, 87, 51, 56, 72, 67, 69, 68, 50, 73, 110, 58, 75, 75, 61, 78, 48, 59, 57, 81, 68, 84, 60, 78, 66, 78, 72, 80, 55, 59, 68, 58, 80, 60, 73, 62, 68, 68, 61, 64, 53, 55, 58, 76, 48, 73, 71, 67, 62, 54, 106, 62, 65, 64, 52, 84, 70, 80, 61, 59, 57, 54, 54, 57, 81, 105, 59, 54, 64, 57, 52, 73, 65, 53, 55, 71, 58, 67, 70, 76, 72, 56, 65, 49, 56, 90, 61, 71, 65, 55, 72, 77, 72, 57, 77, 108, 61, 71, 59, 75, 75, 65, 57, 73, 59, 67, 55, 70, 73, 55, 67, 62, 63, 58, 93, 94, 67, 65, 56, 83, 61, 55, 114, 67, 98, 38, 55, 53, 53, 48, 58, 60, 65, 59, 68, 83, 71, 55, 79, 58, 129, 70, 70, 73, 56, 65, 67, 81, 57, 70, 74, 65, 72, 65, 69, 55, 69, 82, 68, 52, 69, 60, 66, 61, 50, 64, 69, 78, 78, 50, 60, 63, 70, 48, 88, 72, 74, 53, 45, 64, 81, 85, 46, 59, 62, 95, 63, 60, 58, 62, 62, 71, 56, 65, 78, 64, 75, 73, 72, 61, 74, 60, 54, 71, 51, 66, 64, 40, 62, 66, 63, 58, 60, 66, 68, 54, 66, 67, 67, 51, 61, 65, 63, 80, 59, 68, 87, 48, 64, 61, 66, 65, 48, 69, 98, 49, 74, 65, 69, 62, 58, 62, 122, 58, 54, 63, 69, 71, 58, 66, 64, 70, 65, 62, 64, 67, 59, 67, 53, 71, 67, 74, 66, 69, 76, 69, 101, 64, 58, 70, 55, 53, 54, 72, 83, 62, 81, 63, 86, 50, 54, 117, 58, 82, 72, 54, 58, 67, 63, 57, 67, 95, 75, 93, 62, 60, 42, 63, 66, 61, 50, 54, 65, 70, 59, 61, 81, 51, 79, 78, 60, 56, 60, 81, 58, 65, 49, 56, 101, 65, 54, 66, 63, 46, 58, 49, 69, 68, 54, 61, 73, 69, 74, 61, 90, 48, 70, 52, 71, 66, 63, 68, 77, 54, 75, 52, 63, 73, 59, 56, 67, 53, 51, 55, 87, 70, 55, 71, 66, 57, 56, 61, 77, 51, 103, 50, 88, 80, 72, 59, 63, 87, 71, 63, 60, 66, 76, 58, 62, 58, 96, 74, 64, 69, 61, 82, 74, 78, 53, 58, 55, 65, 65, 55, 74, 55, 56, 69, 65, 73, 50, 39, 66, 51, 56, 66, 45, 61, 64, 54, 69, 66, 63, 54, 58, 71, 54, 64, 65, 98, 50, 61, 59, 57, 68, 58, 68, 67, 56, 117, 68, 54, 56, 66, 60, 75, 76, 59, 58, 75, 53, 97, 63, 78, 78, 72, 71, 82, 87, 81, 79, 64, 57, 62, 97, 50, 75, 55, 62, 63, 54, 63, 63, 81, 62, 45, 75, 57, 57, 86, 58, 81, 53, 51, 41, 66, 81, 62, 65, 53, 80, 71, 67, 65, 60, 77, 65, 69, 66, 88, 51, 70, 61, 63, 58, 70, 57, 67, 66, 51, 71, 44, 63, 51, 58, 89, 68, 98, 60, 70, 69, 65, 71, 84, 64, 69, 55, 69, 62, 37, 42, 67, 57, 89, 61, 48, 66, 82, 61, 62, 68, 76, 67, 57, 60, 73, 59, 55, 70, 58, 103, 69, 53, 60, 56, 47, 57, 74, 84, 66, 63, 51, 91, 72, 38, 79, 73, 66, 59, 56, 56, 78, 66, 72, 57, 84, 66, 64, 57, 81, 75, 78, 73, 52, 71, 113, 75, 76, 58, 58, 78, 70, 58, 96, 64, 54, 57, 61, 67, 96, 58, 52, 58, 74, 77, 62, 61, 71, 68, 71, 83, 52, 65, 63, 120, 81, 61, 70, 67, 48, 90, 74, 52, 67, 44, 71, 70, 61, 51, 56, 75, 70, 60, 74, 64, 65, 59, 45, 64, 149, 50, 62, 63, 65, 72, 59, 98, 74, 62, 56, 58, 44, 49, 77, 111, 89, 70, 115, 58, 60, 60, 70, 90, 84, 58, 56, 76, 52, 68, 80, 75, 58, 77, 62, 62, 55, 60, 60, 70, 63, 69, 93, 52, 61, 78, 59, 60, 60, 68, 55, 65, 69, 76, 46, 110, 60, 89, 61, 50, 74, 64, 62, 55, 56, 65, 58, 77, 69, 59, 70, 47, 92, 49, 68, 44, 69, 42, 58, 70, 60, 88, 66, 74, 81, 72, 70, 66, 69, 65, 69, 53, 63, 60, 62, 74, 70, 59, 73, 53, 72, 53, 78, 62, 68, 47, 56, 56, 58, 70, 61, 66, 66, 55, 68, 65, 78, 59, 70, 85, 66, 52, 55, 58, 54, 44, 52, 67, 79, 51, 63, 57, 73, 68, 71, 55, 64, 103, 53, 55, 68, 66, 50, 70, 55, 62, 59, 71, 54, 58, 54, 51, 55, 60, 85, 57, 57, 74, 74, 52, 50, 44, 102, 71, 71, 56, 60, 64, 58, 74, 68, 50, 67, 62, 50, 49, 93, 49, 66, 65, 42, 70, 60, 65, 67, 55, 62, 54, 60, 43, 62, 65, 50, 61, 57, 83, 56, 59, 61, 69, 59, 54, 56, 62, 64, 76, 63, 62, 55, 59, 69, 57, 87, 62, 69, 48, 54, 59, 50, 56, 63, 75, 56, 65, 61, 71, 56, 57, 65, 85, 68, 74, 67, 63, 70, 59, 63, 80, 51, 60, 48, 63, 67, 71, 59, 60, 62, 57, 115, 65, 67, 67, 55, 62, 84, 64, 51, 60, 69, 64, 63, 64, 80, 53, 74, 59, 74, 75, 61, 57, 70, 57, 60, 72, 62, 64, 63, 57, 75, 59, 72, 68, 59, 54, 65, 61, 103, 57, 74, 36, 58, 59, 73, 75, 64, 68, 51, 93, 64, 65, 64, 65, 55, 78, 72, 51, 71, 54, 50, 71, 69, 58, 60, 83, 50, 65, 80, 57, 54, 77, 77, 52, 52, 66, 66, 92, 93, 70, 61, 69, 51, 60, 47, 53, 60, 83, 56, 70, 57, 47, 58, 61, 60, 69, 70, 64, 75, 107, 86, 66, 66, 48, 60, 61, 74, 67, 52, 58, 72, 56, 69, 68, 82, 62, 48, 77, 53, 75, 64, 58, 55, 59, 101, 60, 61, 73, 69, 71, 87, 44, 76, 58, 58, 108, 65, 64, 64, 93, 86, 60, 64, 58, 69, 49, 83, 63, 79, 61, 72, 58, 64, 60, 60, 60, 55, 77, 50, 63, 59, 69, 86, 56, 59, 57, 59, 45, 105, 81, 50, 72, 79, 69, 61, 62, 79, 93, 68, 49, 45, 63, 82, 66, 60, 73, 65, 64, 50, 56, 86, 75, 56, 60, 82, 96, 78, 97, 62, 60, 56, 104, 60, 45, 64, 53, 66, 70, 47, 47, 54, 66, 97, 85, 61, 55, 65, 69, 74, 60, 78, 70, 62, 50, 60, 77, 70, 77, 66, 57, 62, 46, 88, 62, 68, 65, 75, 64, 75, 69, 88, 97, 74, 52, 60, 62, 80, 69, 46, 68, 57, 69, 71, 74, 89, 70, 62, 90, 74, 63, 50, 69, 69, 59, 62, 55, 71, 44, 84, 59, 51, 37, 74, 59, 68, 56, 67, 61, 79, 59, 63, 61, 72, 72, 62, 72, 58, 114, 53, 64, 56, 64, 56, 55, 84, 67, 69, 59, 57, 52, 57, 83, 58, 72, 59, 51, 67, 57, 56, 56, 50, 52, 60, 62, 65, 68, 64, 57, 57, 61, 64, 75, 81, 74, 63, 64, 66, 66, 98, 49, 52, 68, 75, 57, 77, 77, 76, 75, 65, 75, 77, 70, 80, 56, 78, 60, 75, 76, 61, 68, 65, 69, 49, 72, 61, 62, 67, 59, 120, 64, 53, 62, 63, 88, 64, 57, 66, 60, 68, 54, 67, 45, 67, 68, 80, 64, 57, 57, 100, 89, 56, 63, 59, 55, 54, 62, 78, 66, 63, 73, 66, 56, 74, 53, 44, 93, 61, 53, 73, 65, 70, 104, 91, 53, 63, 51, 52, 75, 62, 65, 51, 50, 78, 59, 88, 56, 56, 72, 66, 61, 90, 60, 51, 78, 67, 77, 85, 67, 59, 60, 49, 69, 95, 62, 76, 57, 73, 71, 65, 61, 78, 89, 54, 87, 80, 78, 59, 54, 69, 82, 54, 64, 65, 61, 48, 67, 56, 53, 52, 66, 67, 66, 51, 67, 117, 78, 67, 74, 56, 57, 55, 53, 61, 59, 65, 57, 51, 63, 70, 57, 65, 113, 68, 55, 53, 74, 59, 62, 56, 79, 64, 85, 58, 54, 60, 57, 69, 49, 84, 60, 59, 53, 57, 51, 88, 71, 56, 79, 52, 50, 60, 54, 73, 67, 63, 68, 79, 46, 64, 60, 82, 61, 73, 63, 61, 63, 67, 62, 60, 55, 53, 66, 54, 62, 64, 49, 60, 67, 56, 46, 56, 64, 47, 74, 71, 64, 50, 75, 72, 104, 72, 56, 59, 59, 56, 53, 54, 72, 71, 46, 46, 59, 71, 62, 53, 62, 89, 57, 69, 66, 68, 57, 71, 61, 96, 74, 52, 73, 70, 66, 55, 65, 43, 65, 68, 95, 89, 68, 129, 68, 57, 62, 50, 63, 82, 103, 62, 83, 53, 86, 50, 75, 70, 66, 46, 65, 54, 64, 45, 74, 88, 83, 56, 68, 56, 55, 65, 56, 68, 62, 59, 53, 67, 63, 48, 88, 68, 63, 45, 50, 60, 57, 75, 84, 44, 58, 105, 69, 69, 64, 58, 76, 59, 56, 53, 61, 77, 58, 92, 83, 44, 72, 72, 77, 82, 65, 74, 67, 59, 70, 64, 70, 76, 63, 89, 86, 57, 47, 62, 53, 75, 55, 70, 67, 109, 69, 62, 57, 75, 56, 48, 58, 57, 65, 61, 66, 62, 95, 88, 48, 65, 54, 71, 56, 64, 74, 62, 70, 64, 75, 76, 44, 80, 74, 75, 73, 58, 58, 68, 67, 82, 60, 66, 77, 70, 63, 68, 81, 65, 58, 66, 52, 50, 65, 77, 65, 65, 49, 61, 67, 71, 67, 65, 62, 50, 95, 93, 52, 59, 91, 82, 70, 69, 64, 84, 63, 83, 61, 67, 61, 79, 101, 67, 64, 78, 57, 45, 61, 57, 83, 59, 67, 53, 65, 75, 64, 64, 62, 62, 45, 73, 69, 93, 78, 76, 57, 149, 75, 45, 49, 55, 61, 63, 57, 75, 63, 51, 44, 53, 67, 72, 100, 72, 65, 111, 70, 70, 57, 58, 65, 72, 58, 68, 64, 57, 68, 95, 70, 79, 57, 61, 65, 67, 42, 72, 40, 64, 44, 100, 59, 76, 62, 73, 73, 63, 63, 43, 52, 67, 76, 60, 87, 56, 69, 76, 52, 54, 69, 63, 48, 58, 70, 63, 66, 67, 64, 57, 77, 87, 65, 71, 61, 40, 56, 60, 88, 70, 78, 55, 87, 70, 52, 36, 53, 55, 74, 65, 72, 75, 64, 62, 75, 79, 62, 84, 61, 44, 65, 73, 63, 73, 71, 55, 60, 72, 60, 62, 76, 42, 56, 58, 80, 56, 64, 78, 59, 61, 58, 55, 57, 60, 61, 62, 64, 67, 57, 53, 64, 72, 70, 86, 63, 51, 73, 63, 67, 61, 53, 86, 64, 53, 58, 67, 48, 48, 71, 51, 74, 64, 54, 74, 79, 62, 78, 92, 53, 64, 43, 109, 56, 79, 73, 53, 65, 55, 91, 76, 44, 79, 70, 60, 59, 94, 43, 56, 68, 64, 80, 61, 73, 70, 62, 58, 65, 49, 58, 75, 61, 52, 75, 65, 112, 49, 63, 75, 69, 55, 49, 61, 76, 76, 57, 63, 48, 63, 47, 61, 65, 70, 64, 57, 66, 44, 45, 51, 59, 59, 93, 62, 73, 73, 84, 66, 62, 63, 65, 74, 70, 59, 67, 55, 62, 59, 49, 47, 75, 41, 54, 56, 56, 72, 67, 66, 69, 108, 40, 51, 64, 65, 77, 82, 42, 73, 62, 71, 62, 67, 52, 57, 52, 83, 71, 50, 90, 59, 56, 63, 70, 81, 77, 70, 58, 75, 55, 67, 65, 54, 82, 62, 67, 45, 65, 54, 83, 86, 64, 50, 68, 66, 68, 54, 60, 50, 92, 74, 66, 56, 76, 66, 48, 69, 55, 60, 54, 46, 103, 67, 65, 55, 66, 61, 65, 56, 66, 53, 59, 83, 106, 56, 34, 85, 60, 70, 73, 63, 60, 55, 55, 62, 58, 54, 54, 63, 91, 63, 60, 50, 61, 58, 63, 63, 58, 68, 108, 75, 61, 60, 58, 69, 72, 65, 48, 55, 77, 92, 51, 64, 68, 49, 64, 58, 71, 57, 65, 58, 64, 67, 37, 71, 45, 78, 67, 55, 40, 61, 57, 67, 72, 66, 105, 76, 57, 77, 74, 103, 65, 73, 46, 54, 44, 46, 54, 94, 67, 73, 53, 47, 82, 88, 71, 63, 47, 69, 57, 59, 56, 58, 56, 65, 63, 73, 48, 82, 69, 60, 67, 55, 74, 50, 68, 61, 70, 68, 70, 55, 56, 54, 75, 52, 59, 56, 69, 60, 48, 61, 73, 68, 62, 60, 66, 87, 63, 56, 63, 65, 86, 58, 59, 66, 58, 53, 53, 60, 65, 56, 74, 57, 71, 42, 70, 80, 47, 62, 64, 86, 78, 79, 54, 61, 67, 61, 58, 58, 49, 55, 59, 92, 56, 71, 76, 59, 80, 55, 57, 82, 73, 84, 60, 60, 72, 67, 66, 65, 74, 58, 63, 63, 76, 58, 58, 47, 104, 91, 59, 67, 70, 67, 80, 69, 67, 63, 66, 47, 66, 57, 56, 78, 65, 64, 66, 67, 85, 82, 45, 58, 66, 68, 59, 46, 59, 50, 120, 57, 68, 58, 53, 70, 60, 79, 74, 46, 54, 49, 55, 64, 68, 68, 83, 75, 49, 62, 70, 62, 91, 78, 78, 51, 74, 45, 56, 92, 61, 73, 62, 76, 75, 77, 79, 63, 68, 68, 74, 101, 67, 44, 70, 67, 72, 81, 82, 63, 75, 50, 62, 62, 55, 56, 69, 66, 70, 73, 65, 58, 60, 60, 59, 64, 77, 52, 59, 57, 51, 117, 58, 49, 52, 59, 81, 52, 61, 86, 66, 61, 62, 73, 41, 71, 45, 67, 58, 50, 53, 93, 84, 72, 51, 73, 69, 59, 61, 61, 67, 68, 78, 61, 60, 68, 58, 54, 73, 83, 71, 78, 57, 73, 46, 81, 63, 50, 57, 73, 67, 87, 74, 90, 53, 61, 58, 69, 65, 63, 64, 46, 66, 111, 65, 62, 62, 68, 61, 64, 88, 78, 85, 51, 57, 111, 56, 77, 75, 70, 59, 65, 51, 55, 68, 59, 84, 68, 67, 57, 58, 66, 86, 63, 52, 63, 66, 64, 62, 53, 53, 68, 68, 66, 80, 55, 80, 113, 55, 56, 84, 74, 58, 57, 57, 66, 58, 66, 51, 65, 77, 55, 70, 92, 113, 54, 56, 57, 78, 77, 58, 53, 68, 57, 64, 48, 53, 65, 51, 77, 70, 57, 94, 47, 70, 74, 61, 72, 72, 50, 105, 71, 77, 57, 58, 62, 47, 63, 73, 95, 63, 74, 74, 64, 71, 95, 72, 60, 74, 77, 53, 64, 63, 60, 53, 42, 28, 70, 65, 57, 87, 44, 45, 67, 57, 73, 81, 75, 69, 58, 66, 54, 121, 86, 49, 45, 55, 49, 57, 60, 52, 79, 36, 54, 57, 67, 32, 53, 67, 80, 62, 70, 68, 60, 59, 78, 59, 91, 65, 45, 64, 54, 56, 62, 57, 55, 85, 67, 63, 109, 65, 122, 75, 93, 68, 57, 55, 41, 70, 79, 57, 58, 58, 63, 63, 48, 95, 70, 78, 66, 54, 78, 61, 108, 74, 54, 40, 63, 64, 71, 58, 71, 52, 65, 70, 74, 72, 65, 80, 76, 76, 54, 57, 57, 50, 53, 70, 73, 80, 82, 50, 66, 82, 56, 58, 66, 56, 62, 73, 69, 55, 58, 62, 99, 71, 98, 76, 59, 80, 76, 70, 60, 42, 65, 55, 75, 51, 59, 65, 57, 65, 61, 56, 71, 55, 72, 58, 60, 58, 53, 72, 66, 71, 55, 67, 68, 79, 87, 55, 46, 113, 58, 79, 68, 75, 54, 56, 78, 64, 87, 73, 59, 85, 59, 67, 62, 59, 65, 75, 83, 57, 59, 70, 83, 75, 63, 54, 65, 69, 81, 72, 55, 57, 59, 65, 106, 49, 72, 67, 70, 66, 77, 85, 69, 43, 57, 77, 64, 83, 92, 70, 88, 63, 65, 69, 58, 93, 67, 44, 54, 58, 47, 59, 63, 116, 83, 51, 66, 79, 48, 75, 96, 55, 72, 58, 65, 55, 54, 51, 79, 73, 59, 82, 65, 55, 66, 44, 56, 59, 141, 53, 69, 65, 59, 72, 66, 70, 59, 61, 61, 51, 65, 62, 56, 113, 57, 72, 113, 81, 68, 67, 54, 67, 68, 74, 62, 80, 48, 62, 74, 66, 84, 60, 48, 62, 58, 78, 67, 60, 58, 56, 87, 57, 54, 71, 61, 62, 58, 62, 57, 67, 56, 63, 84, 60, 64, 90, 73, 53, 47, 56, 68, 69, 74, 70, 70, 56, 54, 82, 54, 67, 53, 51, 49, 63, 73, 51, 84, 78, 74, 67, 82, 61, 64, 74, 67, 59, 58, 47, 75, 65, 76, 61, 83, 56, 71, 58, 62, 54, 58, 57, 72, 60, 86, 57, 73, 45, 56, 70, 57, 68, 76, 57, 74, 91, 62, 64, 92, 56, 57, 61, 59, 57, 59, 58, 51, 44, 89, 50, 64, 68, 66, 58, 88, 62, 70, 65, 63, 54, 66, 84, 47, 84, 72, 70, 59, 58, 62, 71, 53, 60, 67, 63, 76, 69, 75, 58, 65, 53, 70, 75, 109, 69, 125, 78, 68, 65, 53, 71, 62, 90, 57, 54, 52, 69, 89, 53, 75, 57, 75, 63, 55, 88, 75, 64, 54, 47, 72, 62, 64, 65, 61, 49, 52, 67, 53, 64, 67, 52, 51, 62, 62, 62, 67, 75, 53, 96, 59, 47, 69, 53, 63, 72, 95, 47, 53, 60, 78, 57, 60, 52, 56, 50, 62, 70, 67, 51, 49, 79, 56, 83, 69, 61, 71, 46, 61, 64, 56, 60, 78, 65, 65, 89, 78, 67, 55, 52, 55, 56, 65, 65, 75, 52, 61, 75, 76, 72, 59, 74, 70, 98, 65, 64, 53, 66, 48, 62, 55, 60, 75, 77, 66, 78, 64, 72, 62, 66, 75, 52, 54, 50, 55, 78, 50, 64, 79, 66, 95, 73, 59, 55, 80, 89, 69, 68, 60, 81, 82, 55, 74, 63, 62, 53, 71, 63, 54, 71, 45, 77, 63, 57, 53, 99, 75, 54, 115, 69, 72, 71, 65, 48, 60, 71, 74, 63, 61, 75, 62, 65, 63, 85, 57, 67, 69, 57, 67, 81, 69, 63, 58, 55, 59, 81, 113, 42, 62, 64, 76, 69, 74, 89, 73, 62, 79, 61, 67, 63, 53, 70, 65, 36, 74, 73, 50, 115, 69, 65, 58, 73, 65, 65, 66, 66, 50, 90, 50, 63, 52, 72, 48, 53, 52, 146, 69, 71, 65, 75, 57, 63, 64, 64, 61, 49, 55, 65, 88, 76, 58, 64, 67, 51, 67, 62, 62, 53, 43, 60, 44, 88, 57, 48, 57, 57, 67, 62, 69, 47, 45, 61, 79, 83, 57, 65, 86, 66, 56, 48, 52, 65, 83, 63, 54, 61, 74, 54, 52, 56, 64, 60, 82, 68, 55, 64, 52, 73, 57, 50, 60, 93, 56, 68, 81, 61, 77, 64, 74, 71, 62, 65, 63, 62, 72, 73, 65, 72, 60, 74, 56, 85, 61, 66, 53, 64, 58, 71, 54, 54, 75, 59, 59, 76, 68, 83, 58, 66, 93, 55, 59, 80, 81, 76, 72, 76, 89, 57, 58, 57, 63, 54, 62, 82, 61, 59, 58, 48, 79, 48, 58, 74, 47, 60, 49, 49, 61, 45, 89, 79, 55, 53, 75, 67, 64, 53, 54, 57, 87, 65, 70, 74, 57, 65, 65, 127, 53, 64, 60, 68, 71, 43, 64, 47, 74, 79, 50, 58, 65, 63, 60, 46, 52, 73, 67, 60, 60, 66, 57, 50, 67, 60, 48, 62, 68, 91, 42, 59, 58, 62, 90, 80, 81, 51, 103, 51, 78, 69, 54, 49, 53, 59, 66, 65, 53, 56, 72, 74, 81, 96, 49, 66, 64, 44, 54, 52, 75, 64, 50, 75, 70, 62, 61, 71, 61, 60, 75, 56, 71, 60, 62, 67, 44, 68, 64, 56, 60, 58, 61, 57, 56, 63, 68, 88, 72, 46, 62, 55, 91, 113, 67, 67, 59, 60, 65, 64, 85, 55, 55, 49, 63, 61, 53, 61, 64, 70, 64, 74, 56, 51, 55, 55, 97, 56, 65, 50, 66, 83, 55, 45, 44, 62, 61, 67, 73, 67, 63, 73, 57, 58, 95, 63, 54, 57, 75, 71, 65, 52, 64, 53, 66, 56, 103, 63, 80, 60, 60, 56, 53, 81, 57, 108, 41, 72, 79, 78, 63, 48, 59, 84, 86, 64, 61, 60, 69, 48, 70, 63, 69, 65, 79, 76, 67, 61, 109, 65, 63, 40, 52, 68, 68, 52, 58, 66, 74, 68, 53, 67, 50, 79, 77, 53, 78, 58, 49, 96, 64, 76, 86, 67, 47, 60, 51, 76, 54, 63, 90, 67, 74, 60, 59, 59, 56, 98, 64, 103, 71, 86, 57, 55, 71, 55, 65, 60, 64, 85, 62, 68, 62, 77, 89, 70, 75, 74, 62, 61, 61, 60, 53, 73, 60, 51, 62, 60, 66, 54, 69, 86, 76, 60, 48, 52, 58, 85, 88, 77, 51, 68, 53, 121, 61, 63, 71, 58, 63, 56, 61, 63, 70, 54, 83, 57, 63, 52, 100, 70, 54, 76, 83, 69, 74, 71, 64, 44, 99, 80, 63, 59, 72, 73, 64, 68, 42, 58, 58, 115, 47, 67, 92, 77, 48, 73, 66, 67, 58, 96, 52, 46, 63, 106, 49, 93, 58, 83, 61, 54, 67, 70, 69, 58, 62, 93, 58, 60, 63, 65, 61, 68, 69, 57, 59, 64, 79, 67, 43, 87, 58, 75, 71, 66, 64, 44, 61, 70, 55, 51, 102, 76, 83, 55, 65, 63, 65, 65, 63, 63, 45, 84, 115, 57, 63, 56, 53, 53, 69, 67, 73, 59, 56, 67, 59, 70, 66, 44, 60, 88, 62, 52, 94, 65, 64, 64, 67, 77, 98, 70, 73, 65, 66, 60, 51, 66, 74, 66, 61, 60, 61, 97, 76, 61, 65, 58, 63, 50, 52, 44, 100, 69, 68, 78, 66, 78, 70, 73, 75, 63, 61, 68, 57, 60, 56, 59, 72, 63, 51, 60, 84, 63, 91, 75, 65, 61, 59, 59, 47, 55, 72, 59, 71, 62, 69, 66, 61, 65, 52, 58, 61, 69, 64, 68, 81, 48, 62, 68, 76, 75, 76, 78, 73, 56, 66, 114, 54, 54, 70, 94, 52, 57, 54, 66, 65, 116, 68, 52, 59, 76, 58, 62, 56, 69, 60, 62, 75, 64, 53, 67, 110, 80, 54, 62, 63, 80, 64, 66, 59, 65, 53, 64, 62, 52, 51, 84, 49, 51, 106, 66, 59, 66, 71, 72, 66, 76, 65, 83, 65, 72, 94, 70, 71, 78, 66, 65, 59, 73, 64, 70, 74, 71, 77, 67, 85, 56, 70, 86, 50, 61, 50, 43, 61, 43, 48, 64, 71, 71, 62, 69, 69, 66, 61, 61, 51, 77, 57, 65, 55, 63, 68, 65, 73, 43, 57, 56, 67, 55, 56, 75, 91, 50, 53, 71, 65, 71, 48, 58, 52, 59, 83, 83, 59, 72, 60, 71, 67, 60, 85, 65, 56, 65, 96, 62, 66, 50, 59, 49, 81, 59, 56, 77, 58, 59, 58, 51, 60, 73, 64, 70, 65, 57, 61, 72, 56, 70, 52, 59, 63, 70, 52, 59, 59, 86, 82, 49, 57, 77, 61, 59, 68, 57, 70, 47, 70, 58, 59, 72, 61, 58, 59, 69, 65, 57, 69, 65, 58, 80, 106, 74, 72, 62, 129, 82, 54, 65, 52, 72, 57, 86, 63, 69, 115, 56, 64, 80, 97, 45, 65, 62, 41, 65, 57, 68, 56, 66, 72, 65, 70, 49, 72, 69, 55, 69, 53, 69, 74, 54, 75, 93, 60, 70, 78, 62, 76, 67, 63, 69, 40, 64, 62, 49, 59, 73, 68, 59, 63, 45, 68, 66, 71, 77, 56, 52, 69, 68, 62, 53, 69, 48, 77, 90, 97, 83, 62, 59, 77, 66, 74, 68, 66, 67, 69, 74, 78, 42, 69, 107, 120, 64, 74, 63, 59, 65, 63, 60, 88, 49, 83, 85, 63, 58, 60, 54, 62, 68, 65, 59, 60, 64, 54, 68, 71, 48, 74, 53, 55, 59, 64, 52, 73, 59, 53, 57, 63, 89, 69, 71, 77, 63, 59, 59, 68, 71, 77, 51, 42, 99, 71, 61, 40, 53, 67, 52, 64, 86, 71, 50, 52, 102, 63, 65, 62, 118, 102, 50, 44, 62, 109, 66, 68, 88, 66, 49, 85, 63, 63, 100, 65, 80, 49, 46, 61, 69, 58, 67, 70, 56, 73, 59, 57, 69, 41, 64, 50, 61, 53, 81, 81, 53, 60, 81, 61, 68, 57, 65, 67, 62, 73, 74, 67, 49, 101, 70, 57, 62, 54, 62, 68, 44, 56, 60, 65, 69, 59, 56, 60, 54, 56, 46, 73, 51, 58, 67, 66, 63, 75, 86, 89, 71, 65, 75, 100, 53, 65, 62, 55, 59, 114, 88, 36, 68, 69, 62, 48, 53, 58, 59, 60, 72, 63, 30, 66, 73, 64, 65, 72, 83, 62, 68, 56, 59, 50, 64, 72, 55, 58, 78, 69, 74, 53, 45, 54, 70, 71, 55, 56, 64, 49, 61, 52, 70, 70, 85, 89, 49, 72, 75, 59, 81, 52, 82, 65, 55, 60, 51, 66, 75, 54, 62, 71, 67, 47, 77, 56, 77, 71, 71, 56, 66, 53, 53, 68, 61, 60, 52, 52, 48, 60, 58, 75, 64, 55, 76, 62, 76, 60, 60, 95, 59, 82, 59, 78, 80, 65, 75, 51, 66, 64, 70, 78, 51, 66, 73, 63, 62, 122, 58, 63, 67, 54, 68, 48, 41, 56, 70, 66, 63, 104, 55, 60, 58, 97, 52, 51, 70, 78, 46, 63, 71, 63, 50, 57, 75, 59, 127, 76, 64, 77, 63, 53, 66, 70, 67, 54, 62, 57, 51, 65, 65, 61, 58, 64, 47, 73, 52, 59, 102, 91, 54, 68, 65, 63, 56, 58, 66, 68, 63, 57, 62, 73, 71, 76, 68, 53, 62, 61, 58, 57, 59, 61, 61, 67, 64, 49, 53, 74, 53, 56, 50, 83, 70, 67, 70, 76, 88, 61, 47, 86, 52, 71, 56, 62, 55, 74, 75, 88, 58, 95, 53, 67, 76, 61, 62, 74, 66, 75, 55, 69, 44, 58, 71, 64, 55, 87, 61, 94, 63, 41, 41, 55, 39, 58, 64, 69, 66, 64, 70, 75, 67, 112, 65, 67, 95, 86, 60, 53, 70, 93, 69, 59, 78, 63, 64, 57, 74, 78, 49, 46, 67, 41, 60, 51, 58, 48, 73, 60, 58, 56, 62, 58, 54, 63, 54, 84, 65, 44, 62, 79, 65, 86, 61, 81, 53, 57, 71, 52, 58, 48, 81, 86, 60, 91, 65, 37, 54, 57, 73, 77, 64, 64, 53, 61, 73, 55, 46, 79, 49, 64, 72, 69, 75, 100, 58, 54, 79, 55, 66, 61, 59, 60, 53, 58, 49, 53, 58, 44, 49, 83, 117, 69, 68, 56, 63, 56, 60, 53, 48, 44, 54, 58, 56, 51, 55, 73, 65, 69, 62, 76, 41, 79, 80, 68, 62, 52, 90, 77, 52, 70, 82, 72, 49, 55, 67, 116, 59, 60, 49, 53, 89, 67, 74, 50, 62, 52, 59, 57, 64, 69, 61, 58, 57, 48, 63, 77, 83, 56, 48, 68, 64, 45, 95, 57, 67, 69, 73, 102, 108, 66, 63, 73, 69, 77, 74, 55, 68, 81, 53, 53, 59, 62, 70, 51, 98, 64, 59, 68, 43, 61, 61, 58, 67, 101, 57, 83, 54, 48, 69, 55, 67, 64, 80, 41, 110, 98, 48, 104, 68, 76, 73, 59, 73, 70, 121, 64, 119, 53, 59, 72, 60, 77, 58, 87, 70, 72, 66, 68, 71, 46, 92, 72, 49, 68, 61, 42, 64, 67, 64, 61, 80, 56, 64, 67, 58, 48, 63, 50, 85, 64, 71, 56, 79, 105, 83, 85, 57, 70, 96, 66, 62, 69, 66, 76, 59, 44, 47, 70, 63, 61, 67, 55, 89, 66, 53, 64, 76, 92, 75, 44, 59, 66, 81, 74, 50, 41, 60, 55, 44, 63, 55, 75, 58, 80, 79, 59, 56, 80, 63, 66, 64, 82, 40, 54, 55, 92, 77, 48, 73, 56, 67, 99, 55, 56, 52, 67, 61, 65, 69, 75, 67, 65, 43, 90, 54, 66, 49, 42, 75, 60, 78, 66, 69, 49, 71, 57, 82, 46, 61, 71, 54, 59, 57, 60, 78, 64, 64, 85, 68, 85, 57, 57, 68, 93, 80, 70, 77, 147, 51, 47, 46, 53, 93, 61, 66, 66, 68, 73, 66, 68, 70, 55, 48, 50, 68, 77, 59, 61, 69, 52, 58, 46, 87, 60, 72, 55, 57, 58, 91, 61, 52, 79, 64, 70, 63, 62, 49, 58, 51, 69, 63, 59, 76, 60, 59, 61, 73, 67, 62, 50, 73, 47, 61, 69, 54, 48, 53, 75, 62, 73, 61, 64, 83, 71, 55, 93, 88, 59, 57, 63, 66, 52, 71, 81, 50, 60, 52, 36, 62, 59, 56, 70, 49, 43, 60, 45, 53, 64, 73, 73, 71, 51, 104, 64, 48, 82, 85, 57, 53, 98, 66, 59, 71, 62, 85, 64, 77, 65, 73, 57, 65, 66, 57, 54, 65, 48, 74, 50, 59, 64, 70, 59, 53, 69, 66, 62, 57, 71, 100, 60, 59, 59, 54, 81, 94, 64, 67, 80, 53, 59, 47, 51, 60, 64, 75, 51, 66, 59, 77, 67, 77, 52, 55, 64, 65, 64, 61, 84, 70, 62, 69, 39, 58, 49, 63, 47, 68, 112, 60, 63, 70, 50, 78, 52, 72, 63, 75, 51, 85, 77, 62, 61, 76, 65, 67, 83, 43, 68, 59, 83, 55, 47, 60, 67, 64, 66, 78, 57, 77, 86, 78, 62, 70, 54, 73, 72, 88, 71, 69, 78, 68, 51, 47, 67, 68, 63, 107, 57, 59, 72, 72, 57, 57, 65, 72, 66, 61, 71, 54, 75, 76, 61, 61, 73, 52, 39, 51, 75, 53, 75, 74, 96, 92, 61, 68, 54, 63, 81, 67, 64, 81, 66, 54, 49, 63, 54, 79, 44, 77, 71, 65, 55, 64, 66, 46, 60, 74, 68, 100, 57, 73, 54, 70, 67, 57, 55, 67, 67, 69, 64, 51, 58, 102, 60, 58, 48, 58, 59, 62, 66, 76, 69, 56, 66, 64, 72, 42, 78, 61, 72, 65, 55, 48, 61, 88, 64, 60, 57, 59, 69, 72, 56, 55, 67, 71, 70, 45, 63, 70, 57, 68, 122, 80, 105, 47, 51, 55, 54, 73, 87, 78, 80, 94, 64, 46, 63, 68, 69, 73, 59, 59, 85, 81, 67, 68, 60, 70, 57, 111, 65, 48, 125, 50, 67, 68, 75, 63, 83, 53, 64, 89, 61, 67, 62, 68, 65, 55, 58, 58, 52, 58, 72, 76, 87, 69, 69, 63, 56, 61, 63, 47, 72, 66, 84, 51, 65, 69, 50, 65, 87, 52, 106, 69, 87, 73, 68, 82, 65, 60, 66, 81, 76, 56, 59, 72, 75, 52, 58, 73, 57, 64, 66, 81, 58, 35, 51, 71, 45, 68, 64, 78, 83, 74, 138, 54, 54, 56, 80, 51, 100, 61, 86, 70, 66, 68, 51, 72, 50, 78, 61, 54, 57, 57, 83, 54, 62, 76, 56, 65, 97, 70, 73, 64, 88, 90, 73, 62, 54, 78, 50, 69, 56, 75, 47, 56, 55, 56, 64, 72, 66, 55, 65, 55, 75, 57, 82, 56, 64, 91, 63, 84, 73, 45, 69, 61, 68, 91, 90, 37, 84, 71, 57, 76, 69, 66, 108, 48, 60, 79, 53, 63, 70, 62, 80, 72, 48, 55, 66, 81, 73, 52, 79, 74, 37, 69, 76, 66, 78, 56, 72, 50, 137, 81, 89, 61, 82, 56, 67, 48, 89, 62, 61, 74, 61, 57, 106, 60, 69, 73, 86, 88, 63, 86, 52, 76, 61, 45, 52, 50, 60, 65, 44, 55, 48, 97, 41, 67, 96, 44, 60, 39, 75, 60, 62, 55, 56, 55, 69, 67, 34, 100, 75, 80, 63, 63, 54, 67, 77, 48, 49, 61, 73, 61, 68, 83, 50, 56, 65, 63, 51, 48, 63, 64, 81, 61, 68, 84, 56, 66, 67, 82, 66, 56, 81, 60, 55, 65, 51, 89, 70, 113, 75, 71, 71, 71, 56, 71, 74, 58, 61, 79, 62, 68, 83, 87, 56, 55, 69, 79, 63, 51, 53, 53, 63, 52, 74, 61, 43, 83, 82, 66, 76, 56, 58, 53, 61, 78, 65, 66, 54, 74, 80, 53, 47, 72, 63, 81, 72, 81, 57, 46, 93, 71, 68, 51, 87, 58, 51, 59, 50, 55, 68, 45, 45, 60, 52, 95, 69, 58, 67, 57, 73, 62, 50, 69, 83, 69, 66, 75, 50, 46, 92, 61, 53, 62, 65, 60, 88, 61, 30, 78, 42, 49, 72, 46, 47, 69, 65, 53, 61, 67, 70, 66, 78, 54, 50, 55, 79, 61, 69, 74, 49, 46, 66, 58, 61, 61, 50, 64, 64, 63, 69, 68, 79, 74, 58, 77, 53, 67, 79, 79, 73, 84, 58, 43, 50, 75, 51, 70, 58, 78, 74, 72, 62, 67, 55, 76, 58, 66, 39, 52, 97, 81, 83, 89, 49, 53, 53, 93, 53, 65, 60, 54, 73, 59, 41, 70, 57, 52, 65, 75, 87, 57, 58, 71, 43, 88, 50, 62, 43, 72, 68, 72, 47, 50, 63, 57, 59, 77, 65, 80, 80, 64, 46, 73, 66, 67, 59, 69, 59, 55, 63, 66, 69, 70, 67, 92, 70, 54, 74, 93, 39, 74, 49, 66, 69, 88, 72, 63, 75, 58, 65, 65, 72, 60, 49, 64, 65, 44, 65, 67, 51, 62, 69, 68, 47, 92, 57, 57, 56, 69, 89, 66, 68, 57, 64, 89, 49, 79, 102, 87, 60, 74, 72, 53, 38, 59, 66, 80, 80, 46, 71, 71, 73, 61, 50, 43, 58, 135, 67, 55, 61, 64, 44, 59, 70, 66, 65, 77, 70, 70, 62, 53, 51, 65, 71, 52, 52, 65, 56, 60, 65, 63, 75, 81, 66, 69, 61, 54, 53, 66, 44, 92, 60, 74, 56, 63, 93, 65, 82, 96, 52, 53, 60, 63, 67, 63, 57, 61, 62, 54, 71, 69, 66, 64, 56, 67, 54, 76, 58, 80, 48, 70, 69, 69, 65, 103, 82, 53, 56, 68, 62, 56, 76, 88, 65, 60, 54, 62, 64, 56, 56, 58, 45, 65, 83, 84, 79, 66, 97, 68, 57, 39, 63, 57, 50, 69, 66, 67, 83, 74, 55, 65, 68, 70, 48, 58, 61, 40, 68, 64, 44, 69, 56, 60, 57, 70, 62, 101, 59, 75, 49, 71, 70, 57, 73, 62, 61, 63, 76, 62, 69, 72, 66, 48, 71, 85, 78, 63, 59, 106, 55, 88, 68, 48, 53, 53, 56, 76, 74, 64, 57, 68, 76, 98, 53, 54, 70, 59, 86, 74, 60, 55, 70, 57, 68, 74, 54, 75, 60, 89, 55, 80, 55, 54, 52, 56, 100, 50, 88, 73, 72, 74, 80, 68, 66, 71, 67, 42, 54, 60, 63, 64, 51, 74, 70, 54, 80, 66, 59, 49, 69, 66, 55, 57, 51, 51, 69, 68, 83, 49, 77, 77, 51, 74, 55, 52, 69, 63, 61, 78, 57, 59, 64, 62, 53, 74, 64, 55, 58, 63, 70, 51, 61, 70, 61, 94, 76, 72, 52, 68, 64, 61, 74, 53, 53, 59, 72, 77, 61, 67, 59, 63, 64, 54, 53, 51, 74, 59, 89, 65, 62, 66, 43, 81, 57, 53, 62, 72, 66, 63, 43, 73, 69, 86, 69, 67, 63, 81, 56, 73, 67, 61, 52, 47, 54, 54, 66, 56, 63, 52, 65, 89, 79, 92, 70, 62, 53, 80, 75, 71, 87, 46, 61, 84, 57, 64, 60, 73, 76, 63, 129, 60, 58, 66, 63, 70, 65, 66, 63, 80, 60, 59, 64, 65, 57, 52, 79, 64, 106, 64, 52, 64, 58, 68, 63, 73, 68, 53, 66, 102, 64, 56, 67, 73, 77, 67, 74, 60, 64, 64, 98, 63, 54, 64, 65, 68, 62, 77, 61, 66, 64, 69, 60, 68, 67, 58, 64, 85, 63, 50, 56, 51, 66, 44, 89, 53, 76, 74, 67, 80, 74, 51, 79, 67, 54, 72, 69, 57, 70, 59, 83, 71, 57, 49, 76, 50, 53, 59, 66, 62, 66, 51, 61, 45, 78, 57, 51, 65, 71, 76, 53, 93, 49, 78, 52, 52, 44, 54, 62, 62, 68, 81, 55, 66, 62, 89, 74, 63, 66, 62, 62, 52, 71, 75, 70, 72, 66, 61, 66, 67, 69, 61, 72, 66, 74, 60, 60, 54, 59, 60, 72, 67, 62, 56, 59, 69, 89, 67, 63, 61, 61, 59, 49, 52, 113, 51, 107, 70, 52, 53, 67, 73, 49, 65, 76, 55, 53, 42, 64, 56, 74, 73, 65, 84, 63, 50, 44, 71, 58, 66, 63, 115, 75, 49, 121, 63, 62, 62, 58, 55, 67, 71, 79, 55, 58, 80, 78, 82, 57, 59, 66, 68, 63, 62, 63, 75, 95, 51, 59, 54, 68, 47, 74, 56, 61, 56, 64, 61, 56, 57, 72, 42, 70, 56, 102, 64, 48, 86, 70, 81, 75, 54, 65, 56, 81, 71, 44, 48, 60, 54, 69, 60, 53, 59, 67, 77, 83, 61, 88, 70, 72, 64, 62, 66, 72, 68, 52, 57, 71, 58, 68, 70, 70, 64, 86, 52, 65, 64, 80, 56, 68, 67, 52, 64, 48, 57, 68, 76, 55, 89, 66, 72, 122, 80, 69, 56, 83, 60, 75, 76, 64, 71, 72, 53, 63, 70, 60, 59, 54, 55, 63, 51, 54, 51, 69, 64, 52, 58, 84, 84, 56, 56, 53, 76, 97, 71, 67, 39, 63, 81, 74, 56, 61, 59, 60, 72, 69, 67, 96, 44, 66, 62, 46, 78, 79, 64, 47, 74, 65, 68, 49, 50, 73, 60, 77, 61, 65, 58, 56, 52, 54, 87, 83, 64, 161, 47, 55, 51, 62, 59, 73, 47, 83, 61, 53, 52, 74, 62, 84, 58, 68, 58, 86, 76, 60, 52, 55, 66, 59, 56, 60, 67, 64, 57, 56, 60, 69, 121, 74, 59, 76, 62, 62, 71, 60, 77, 54, 83, 74, 53, 49, 68, 53, 77, 69, 43, 59, 77, 62, 85, 77, 66, 75, 61, 49, 68, 63, 66, 68, 58, 64, 66, 75, 53, 62, 81, 56, 59, 52, 78, 72, 103, 61, 88, 48, 68, 76, 60, 56, 50, 60, 133, 52, 68, 58, 66, 75, 63, 59, 68, 67, 55, 89, 87, 63, 70, 94, 86, 58, 61, 72, 105, 54, 64, 67, 54, 62, 71, 74, 74, 51, 56, 57, 68, 81, 91, 43, 52, 64, 97, 51, 64, 72, 61, 90, 63, 56, 65, 55, 79, 48, 68, 72, 56, 56, 74, 70, 46, 53, 43, 70, 46, 55, 60, 62, 75, 59, 64, 85, 92, 90, 74, 54, 62, 69, 70, 76, 78, 101, 55, 53, 65, 63, 70, 64, 65, 45, 73, 63, 66, 71, 66, 55, 66, 42, 53, 72, 65, 70, 61, 69, 64, 59, 67, 83, 54, 86, 65, 74, 65, 74, 64, 67, 79, 61, 65, 55, 73, 73, 79, 56, 47, 68, 57, 51, 51, 74, 62, 64, 57, 58, 71, 72, 64, 75, 66, 49, 76, 69, 75, 69, 88, 57, 85, 64, 82, 47, 53, 75, 64, 70, 68, 59, 64, 84, 84, 70, 83, 70, 67, 55, 83, 56, 59, 59, 56, 67, 62, 58, 60, 55, 56, 63, 89, 73, 64, 63, 69, 65, 74, 52, 55, 61, 60, 78, 65, 68, 72, 47, 53, 61, 56, 44, 79, 81, 70, 67, 48, 82, 70, 62, 59, 76, 64, 54, 69, 66, 64, 74, 84, 87, 47, 64, 81, 61, 68, 66, 50, 92, 66, 59, 34, 61, 45, 50, 47, 54, 58, 85, 67, 61, 63, 67, 85, 57, 55, 79, 62, 83, 69, 70, 50, 62, 62, 94, 73, 75, 55, 45, 63, 46, 72, 84, 47, 64, 69, 62, 57, 79, 62, 73, 81, 58, 61, 68, 61, 75, 73, 73, 73, 52, 59, 62, 72, 64, 70, 97, 62, 56, 69, 61, 60, 50, 66, 66, 81, 62, 58, 59, 57, 54, 65, 63, 44, 72, 65, 56, 59, 62, 71, 49, 80, 57, 60, 57, 65, 60, 64, 70, 59, 71, 87, 64, 58, 64, 67, 90, 76, 75, 59, 74, 73, 64, 48, 63, 63, 59, 63, 64, 55, 62, 49, 69, 82, 56, 69, 70, 59, 59, 77, 71, 75, 75, 47, 46, 67, 72, 59, 88, 65, 86, 61, 50, 64, 68, 68, 60, 68, 75, 61, 87, 45, 67, 74, 61, 72, 63, 62, 54, 63, 63, 64, 62, 76, 57, 57, 63, 49, 46, 71, 70, 55, 71, 59, 101, 61, 79, 70, 67, 71, 73, 100, 77, 58, 95, 58, 81, 82, 63, 65, 84, 57, 57, 62, 83, 62, 73, 103, 56, 43, 48, 56, 63, 62, 65, 89, 62, 53, 65, 53, 61, 81, 60, 62, 105, 72, 63, 53, 62, 34, 90, 61, 58, 58, 58, 65, 68, 71, 58, 64, 61, 83, 58, 81, 65, 48, 61, 85, 99, 60, 58, 72, 56, 61, 57, 55, 68, 59, 63, 58, 58, 89, 64, 59, 52, 53, 67, 78, 65, 55, 67, 56, 72, 65, 68, 60, 74, 62, 104, 81, 69, 88, 65, 86, 49, 60, 71, 141, 69, 57, 75, 61, 57, 52, 60, 84, 65, 50, 80, 55, 67, 64, 60, 63, 53, 66, 79, 58, 57, 59, 57, 61, 52, 64, 59, 51, 49, 57, 65, 85, 67, 53, 61, 80, 54, 63, 55, 58, 52, 64, 54, 62, 66, 47, 47, 59, 46, 68, 60, 58, 61, 73, 60, 78, 54, 90, 57, 69, 82, 63, 59, 77, 76, 60, 58, 60, 62, 60, 66, 72, 53, 67, 116, 64, 62, 61, 55, 99, 48, 82, 65, 60, 63, 55, 120, 67, 68, 70, 73, 66, 70, 71, 64, 60, 56, 60, 86, 74, 83, 64, 111, 72, 71, 61, 70, 60, 57, 52, 51, 81, 79, 60, 64, 50, 64, 54, 67, 62, 74, 66, 63, 115, 69, 80, 56, 60, 62, 56, 56, 59, 46, 62, 97, 58, 60, 70, 57, 65, 61, 55, 62, 67, 69, 51, 75, 81, 76, 68, 81, 71, 63, 134, 82, 57, 52, 59, 84, 81, 51, 60, 79, 62, 77, 66, 65, 83, 66, 61, 62, 66, 56, 60, 51, 76, 63, 62, 79, 59, 50, 67, 71, 64, 54, 66, 48, 55, 60, 99, 85, 48, 75, 60, 78, 110, 49, 59, 71, 88, 93, 81, 67, 64, 51, 52, 95, 67, 84, 58, 75, 61, 62, 75, 63, 67, 63, 46, 74, 75, 55, 69, 74, 68, 81, 71, 55, 71, 55, 67, 61, 50, 69, 55, 60, 106, 84, 78, 66, 45, 76, 54, 56, 71, 71, 76, 72, 84, 65, 79, 69, 49, 66, 74, 48, 67, 66, 53, 51, 68, 55, 62, 76, 63, 47, 54, 58, 59, 96, 48, 60, 74, 69, 65, 66, 66, 65, 56, 74, 70, 60, 67, 68, 48, 73, 53, 63, 60, 66, 49, 68, 63, 48, 59, 58, 72, 62, 53, 59, 77, 80, 57, 58, 43, 74, 88, 61, 59, 57, 43, 87, 61, 47, 63, 83, 66, 64, 63, 80, 117, 71, 64, 70, 58, 54, 60, 52, 52, 67, 63, 61, 103, 83, 51, 83, 69, 57, 62, 62, 57, 61, 53, 60, 54, 64, 67, 92, 56, 73, 66, 63, 64, 62, 63, 68, 62, 71, 71, 58, 80, 58, 67, 55, 65, 67, 51, 83, 50, 73, 54, 55, 72, 80, 41, 44, 58, 66, 70, 59, 50, 64, 71, 71, 76, 72, 62, 89, 72, 61, 75, 60, 69, 83, 67, 61, 59, 60, 68, 73, 50, 62, 67, 60, 76, 67, 59, 60, 78, 75, 57, 62, 118, 62, 68, 68, 125, 59, 53, 52, 58, 83, 53, 56, 85, 69, 63, 59, 56, 72, 72, 58, 51, 64, 65, 57, 60, 57, 58, 45, 62, 57, 61, 62, 62, 56, 64, 88, 56, 63, 63, 59, 57, 79, 60, 55, 66, 60, 67, 58, 63, 69, 65, 62, 63, 79, 62, 53, 57, 73, 52, 60, 77, 78, 68, 65, 76, 54, 60, 69, 60, 64, 57, 60, 52, 50, 64, 56, 78, 64, 61, 60, 64, 57, 71, 59, 58, 70, 59, 59, 77, 55, 74, 53, 68, 114, 69, 94, 63, 65, 88, 60, 64, 78, 45, 69, 74, 57, 68, 62, 55, 108, 63, 55, 61, 71, 65, 80, 64, 54, 56, 59, 63, 77, 68, 52, 58, 60, 92, 62, 67, 48, 65, 82, 48, 76, 63, 65, 59, 64, 61, 68, 66, 57, 57, 72, 76, 57, 56, 64, 44, 64, 61, 55, 65, 61, 60, 59, 54, 62, 66, 62, 57, 62, 69, 61, 45, 69, 71, 93, 66, 62, 63, 88, 33, 61, 67, 61, 60, 66, 56, 64, 62, 65, 37, 54, 64, 67, 73, 66, 62, 55, 61, 44, 52, 67, 61, 60, 89, 64, 57, 65, 69, 57, 63, 66, 63, 72, 65, 69, 53, 64, 66, 72, 80, 65, 56, 54, 61, 61, 52, 59, 103, 106, 60, 70, 59, 66, 55, 57, 68, 62, 63, 95, 65, 54, 69, 62, 81, 59, 56, 65, 67, 61, 70, 59, 56, 59, 62, 74, 54, 59, 67, 106, 77, 86, 68, 93, 63, 106, 38, 80, 61, 59, 62, 60, 61, 60, 54, 63, 79, 83, 59, 95, 63, 62, 51, 47, 59, 61, 68, 63, 62, 68, 56, 57, 53, 57, 68, 65, 82, 83, 65, 60, 75, 112, 54, 53, 66, 67, 65, 63, 51, 80, 63, 68, 75, 71, 78, 65, 64, 78, 56, 64, 49, 55, 58, 63, 106, 62, 58, 79, 65, 55, 53, 56, 66, 53, 75, 52, 72, 69, 60, 84, 89, 52, 69, 55, 94, 62, 76, 70, 93, 67, 62, 64, 107, 78, 85, 57, 75, 87, 111, 58, 69, 56, 78, 48, 46, 63, 75, 101, 80, 67, 63, 63, 76, 60, 61, 57, 57, 65, 64, 68, 56, 61, 56, 98, 64, 67, 65, 68, 67, 86, 70, 52, 62, 72, 67, 53, 60, 62, 58, 62, 59, 59, 69, 73, 52, 48, 51, 53, 64, 62, 54, 65, 62, 59, 64, 64, 67, 94, 69, 81, 66, 75, 51, 53, 59, 69, 106, 70, 57, 57, 62, 65, 59, 62, 66, 87, 59, 55, 70, 74, 50, 51, 64, 60, 77, 63, 58, 75, 51, 60, 63, 81, 68, 57, 57, 76, 53, 70, 49, 67, 109, 62, 57, 65, 65, 103, 64, 72, 67, 56, 51, 63, 61, 43, 78, 85, 55, 60, 48, 41, 58, 81, 68, 65, 68, 62, 70, 68, 63, 77, 70, 89, 71, 44, 54, 54, 92, 62, 74, 68, 64, 54, 83, 85, 60, 48, 52, 65, 62, 56, 75, 72, 46, 72, 80, 74, 57, 66, 68, 50, 82, 62, 61, 72, 80, 68, 68, 66, 61, 54, 74, 69, 46, 79, 65, 61, 75, 63, 74, 46, 42, 58, 67, 52, 86, 62, 63, 46, 66, 57, 54, 77, 65, 46, 58, 47, 63, 66, 66, 65, 74, 69, 60, 88, 81, 61, 81, 69, 84, 71, 62, 53, 72, 70, 57, 49, 75, 60, 70, 59, 77, 74, 64, 59, 65, 66, 57, 59, 63, 53, 68, 78, 72, 49, 107, 75, 60, 76, 57, 57, 60, 43, 73, 59, 69, 61, 69, 74, 50, 50, 66, 77, 48, 58, 68, 81, 52, 53, 66, 43, 154, 55, 75, 73, 52, 54, 75, 65, 61, 71, 60, 52, 73, 56, 75, 63, 61, 65, 55, 74, 72, 69, 53, 80, 65, 55, 67, 66, 60, 71, 59, 65, 46, 91, 95, 53, 87, 74, 61, 73, 58, 59, 69, 73, 53, 71, 69, 48, 58, 48, 68, 69, 70, 84, 59, 58, 69, 65, 53, 60, 70, 65, 50, 58, 68, 81, 71, 55, 53, 58, 60, 59, 67, 58, 74, 49, 58, 59, 47, 74, 72, 59, 68, 60, 44, 87, 70, 50, 59, 73, 77, 69, 67, 70, 69, 68, 62, 80, 80, 60, 80, 87, 59, 62, 65, 51, 76, 67, 61, 64, 94, 63, 61, 45, 65, 62, 69, 69, 55, 79, 72, 93, 52, 60, 90, 79, 58, 57, 62, 55, 50, 74, 64, 106, 60, 74, 59, 62, 89, 55, 62, 68, 55, 75, 72, 63, 68, 43, 71, 58, 93, 97, 67, 87, 104, 64, 61, 56, 49, 69, 69, 80, 47, 52, 62, 48, 68, 59, 78, 79, 50, 93, 61, 64, 79, 60, 68, 51, 66, 66, 73, 53, 43, 68, 45, 67, 48, 75, 87, 75, 64, 57, 56, 62, 61, 63, 75, 72, 59, 73, 53, 104, 50, 85, 95, 57, 65, 56, 44, 76, 58, 51, 59, 63, 81, 60, 49, 66, 62, 65, 68, 67, 88, 58, 56, 61, 54, 60, 52, 50, 52, 61, 84, 44, 47, 62, 65, 55, 89, 54, 59, 57, 68, 69, 66, 48, 87, 57, 52, 64, 88, 43, 73, 88, 70, 60, 52, 51, 58, 60, 59, 59, 83, 58, 54, 74, 69, 64, 41, 72, 66, 60, 67, 56, 68, 72, 62, 65, 65, 61, 69, 70, 56, 84, 78, 92, 56, 87, 67, 60, 56, 60, 52, 76, 69, 49, 60, 46, 65, 50, 57, 54, 57, 57, 55, 55, 97, 76, 70, 54, 63, 65, 72, 56, 52, 75, 52, 64, 90, 48, 97, 74, 83, 85, 69, 61, 65, 42, 57, 62, 54, 54, 65, 59, 66, 58, 51, 60, 63, 53, 59, 75, 98, 42, 90, 70, 56, 116, 52, 60, 68, 47, 59, 78, 88, 66, 62, 52, 69, 55, 66, 72, 72, 52, 68, 69, 57, 40, 81, 80, 59, 55, 39, 81, 55, 114, 88, 102, 72, 69, 66, 62, 52, 92, 73, 93, 63, 75, 60, 80, 78, 67, 63, 57, 70, 74, 67, 73, 83, 61, 68, 52, 57, 74, 68, 82, 75, 56, 70, 62, 65, 67, 53, 74, 72, 66, 119, 64, 49, 62, 53, 82, 51, 81, 58, 78, 56, 54, 57, 75, 82, 59, 94, 57, 87, 67, 65, 87, 78, 66, 56, 70, 58, 61, 56, 73, 72, 73, 63, 85, 45, 56, 62, 70, 66, 58, 69, 59, 32, 53, 69, 74, 73, 69, 75, 73, 72, 62, 56, 47, 65, 60, 95, 76, 79, 63, 62, 54, 79, 57, 82, 59, 61, 61, 66, 52, 61, 65, 92, 44, 60, 62, 65, 51, 61, 80, 57, 79, 65, 43, 66, 56, 67, 69, 57, 62, 89, 63, 46, 64, 89, 69, 70, 62, 104, 63, 45, 60, 58, 60, 62, 60, 61, 59, 65, 68, 47, 72, 72, 63, 70, 82, 56, 49, 58, 61, 56, 45, 60, 52, 75, 63, 64, 103, 75, 62, 69, 78, 43, 85, 56, 63, 80, 72, 70, 66, 66, 66, 47, 84, 64, 68, 63, 69, 52, 58, 52, 65, 87, 60, 57, 50, 101, 53, 94, 74, 57, 60, 99, 71, 61, 81, 61, 78, 69, 60, 59, 68, 61, 71, 78, 114, 59, 55, 53, 68, 65, 67, 52, 62, 47, 47, 69, 53, 72, 61, 57, 61, 79, 71, 97, 95, 58, 57, 82, 59, 88, 78, 46, 64, 80, 68, 48, 76, 59, 62, 60, 77, 106, 44, 72, 75, 52, 44, 53, 65, 58, 85, 73, 62, 75, 79, 70, 46, 73, 59, 67, 64, 85, 64, 72, 70, 48, 60, 78, 51, 58, 77, 63, 53, 78, 66, 82, 111, 79, 49, 54, 96, 98, 68, 54, 66, 57, 70, 65, 76, 51, 74, 79, 74, 76, 85, 61, 63, 89, 61, 60, 54, 56, 77, 72, 60, 57, 68, 79, 58, 54, 65, 52, 55, 64, 64, 62, 56, 70, 60, 54, 53, 56, 70, 68, 71, 55, 67, 75, 66, 45, 62, 62, 75, 50, 78, 46, 70, 61, 59, 71, 61, 77, 79, 70, 83, 65, 83, 82, 73, 45, 79, 66, 70, 66, 49, 76, 52, 57, 58, 48, 65, 110, 61, 75, 64, 47, 55, 97, 71, 60, 72, 69, 63, 67, 53, 48, 59, 62, 73, 80, 72, 64, 70, 70, 53, 92, 74, 67, 51, 51, 73, 62, 55, 76, 46, 57, 88, 94, 72, 68, 73, 54, 64, 63, 62, 69, 59, 69, 58, 44, 62, 69, 61, 71, 66, 44, 68, 55, 67, 66, 48, 52, 59, 61, 55, 76, 102, 64, 60, 56, 70, 60, 62, 74, 89, 68, 52, 77, 65, 58, 60, 54, 46, 63, 35, 73, 59, 70, 44, 73, 70, 59, 74, 54, 66, 75, 67, 70, 62, 60, 61, 75, 55, 57, 69, 51, 78, 58, 69, 67, 71, 81, 61, 63, 74, 69, 59, 85, 54, 72, 53, 55, 74, 63, 68, 66, 63, 56, 71, 52, 78, 83, 71, 82, 62, 56, 60, 73, 86, 54, 82, 65, 48, 59, 56, 93, 70, 61, 66, 76, 53, 52, 55, 69, 74, 69, 61, 53, 72, 64, 55, 69, 65, 71, 62, 51, 44, 51, 62, 72, 94, 63, 67, 76, 83, 68, 59, 70, 59, 80, 63, 82, 44, 64, 73, 59, 67, 64, 64, 87, 79, 54, 72, 70, 97, 78, 71, 99, 73, 57, 60, 65, 52, 76, 85, 75, 53, 59, 73, 73, 60, 64, 67, 54, 57, 60, 55, 63, 63, 54, 92, 60, 69, 50, 52, 61, 69, 57, 56, 81, 79, 71, 58, 84, 69, 62, 53, 73, 76, 92, 73, 63, 48, 55, 79, 73, 67, 46, 75, 59, 58, 93, 60, 63, 69, 64, 60, 56, 64, 68, 84, 78, 68, 61, 56, 46, 66, 76, 84, 88, 78, 77, 70, 69, 76, 53, 64, 143, 82, 68, 53, 79, 51, 74, 62, 65, 53, 65, 69, 70, 62, 55, 52, 59, 82, 50, 73, 58, 67, 65, 43, 58, 71, 58, 76, 64, 53, 51, 65, 57, 94, 56, 65, 83, 61, 67, 54, 57, 72, 75, 71, 71, 44, 60, 58, 69, 79, 70, 55, 57, 55, 52, 59, 78, 52, 72, 59, 76, 95, 72, 57, 86, 68, 76, 62, 76, 62, 71, 65, 83, 71, 59, 72, 67, 63, 56, 47, 66, 58, 70, 60, 53, 66, 66, 125, 79, 68, 86, 53, 63, 63, 68, 67, 64, 67, 62, 80, 75, 87, 78, 108, 69, 63, 78, 44, 57, 53, 75, 67, 85, 70, 57, 61, 52, 54, 68, 46, 56, 55, 56, 81, 80, 83, 53, 65, 44, 59, 70, 71, 63, 53, 57, 103, 59, 52, 63, 59, 61, 73, 49, 68, 60, 53, 56, 75, 59, 104, 65, 75, 68, 68, 102, 63, 58, 51, 65, 68, 66, 64, 72, 68, 60, 65, 61, 59, 57, 53, 69, 68, 60, 71, 55, 55, 55, 51, 61, 91, 67, 57, 71, 82, 80, 73, 54, 62, 58, 54, 107, 62, 43, 69, 65, 66, 73, 42, 40, 59, 53, 104, 85, 76, 71, 76, 45, 67, 83, 68, 70, 59, 51, 52, 83, 62, 73, 65, 42, 77, 86, 58, 67, 49, 79, 68, 49, 61, 56, 55, 48, 54, 74, 36, 74, 70, 67, 76, 71, 76, 64, 61, 66, 67, 59, 62, 57, 64, 61, 56, 78, 58, 71, 79, 62, 63, 63, 68, 66, 80, 69, 80, 54, 55, 53, 51, 58, 49, 60, 101, 53, 65, 53, 54, 52, 73, 71, 79, 56, 68, 63, 53, 94, 73, 80, 88, 71, 77, 60, 65, 57, 71, 67, 62, 72, 62, 65, 57, 69, 44, 65, 87, 54, 63, 43, 64, 65, 65, 65, 59, 84, 84, 65, 51, 54, 69, 64, 50, 66, 67, 120, 45, 70, 58, 72, 65, 59, 77, 60, 72, 59, 65, 123, 67, 60, 64, 71, 60, 61, 62, 56, 72, 64, 61, 57, 52, 60, 94, 65, 49, 52, 56, 102, 57, 95, 74, 63, 64, 67, 63, 81, 72, 58, 73, 66, 72, 46, 64, 50, 103, 53, 54, 59, 107, 76, 65, 73, 70, 65, 60, 55, 74, 78, 68, 58, 67, 55, 74, 65, 64, 83, 58, 61, 61, 60, 75, 75, 54, 24, 68, 52, 64, 73, 55, 68, 56, 78, 71, 56, 59, 68, 53, 106, 57, 58, 75, 117, 71, 65, 65, 57, 86, 62, 66, 87, 57, 75, 65, 61, 71, 69, 54, 65, 70, 59, 65, 64, 51, 61, 64, 54, 60, 48, 56, 68, 59, 60, 96, 69, 58, 55, 51, 65, 70, 70, 73, 83, 66, 60, 64, 75, 63, 66, 59, 72, 78, 71, 83, 76, 51, 68, 78, 50, 70, 50, 64, 60, 58, 77, 69, 59, 81, 70, 66, 50, 61, 63, 65, 74, 69, 58, 58, 71, 70, 67, 70, 59, 62, 54, 66, 67, 47, 68, 68, 66, 69, 73, 71, 87, 72, 93, 54, 67, 64, 75, 55, 47, 68, 54, 80, 83, 55, 78, 72, 63, 69, 51, 65, 68, 71, 60, 84, 58, 77, 63, 63, 57, 41, 82, 62, 62, 63, 61, 115, 56, 60, 68, 68, 56, 68, 58, 53, 89, 83, 50, 82, 77, 58, 47, 75, 54, 75, 59, 74, 66, 59, 64, 68, 61, 56, 77, 57, 68, 48, 76, 61, 72, 58, 71, 87, 68, 57, 69, 85, 62, 46, 54, 54, 70, 58, 58, 76, 77, 59, 53, 88, 67, 54, 58, 79, 70, 61, 63, 59, 52, 69, 68, 34, 69, 65, 53, 63, 84, 68, 57, 58, 72, 69, 68, 60, 64, 61, 64, 61, 43, 72, 59, 71, 64, 52, 54, 69, 73, 96, 61, 61, 51, 62, 60, 64, 74, 58, 64, 95, 68, 55, 69, 57, 89, 58, 68, 71, 58, 48, 63, 53, 57, 64, 78, 61, 61, 55, 77, 79, 64, 56, 74, 78, 58, 84, 86, 73, 45, 63, 47, 63, 69, 58, 62, 47, 77, 62, 52, 64, 46, 107, 65, 66, 53, 57, 100, 47, 72, 52, 46, 85, 71, 73, 57, 73, 57, 63, 76, 53, 66, 83, 53, 43, 67, 49, 73, 58, 55, 62, 61, 62, 65, 65, 65, 54, 48, 82, 61, 66, 65, 80, 52, 59, 67, 56, 61, 53, 62, 44, 69, 46, 72, 75, 58, 71, 56, 73, 48, 78, 81, 65, 65, 66, 89, 55, 72, 49, 112, 68, 56, 69, 75, 78, 76, 61, 94, 89, 133, 54, 76, 62, 74, 49, 57, 58, 61, 80, 84, 63, 87, 79, 82, 66, 67, 82, 68, 73, 85, 54, 65, 55, 61, 72, 59, 76, 57, 64, 67, 73, 70, 81, 51, 74, 54, 65, 46, 57, 76, 85, 49, 107, 65, 70, 57, 56, 50, 64, 60, 44, 60, 74, 68, 66, 58, 67, 43, 77, 88, 60, 66, 69, 70, 77, 81, 66, 63, 81, 43, 75, 51, 67, 64, 69, 96, 105, 60, 59, 71, 54, 57, 71, 67, 53, 67, 55, 67, 62, 63, 70, 50, 62, 59, 67, 70, 61, 64, 91, 36, 55, 62, 56, 61, 74, 63, 50, 55, 85, 58, 83, 63, 64, 64, 54, 53, 73, 59, 61, 63, 73, 82, 65, 57, 98, 54, 54, 55, 70, 74, 60, 45, 55, 70, 52, 70, 60, 74, 70, 64, 52, 47, 59, 79, 56, 65, 60, 79, 58, 55, 73, 61, 47, 52, 70, 64, 57, 73, 58, 75, 76, 60, 47, 75, 65, 94, 57, 58, 88, 58, 71, 56, 82, 53, 84, 58, 55, 68, 67, 86, 79, 115, 65, 52, 53, 57, 64, 100, 117, 110, 60, 68, 77, 62, 55, 83, 64, 52, 55, 83, 104, 57, 70, 76, 61, 35, 53, 68, 73, 66, 75, 75, 61, 82, 75, 79, 61, 60, 62, 54, 61, 59, 71, 90, 60, 61, 49, 61, 62, 66, 70, 68, 69, 53, 56, 74, 85, 80, 57, 56, 47, 77, 93, 63, 62, 69, 75, 59, 76, 65, 59, 51, 79, 97, 51, 55, 81, 58, 59, 58, 59, 137, 78, 62, 76, 67, 62, 59, 65, 74, 49, 77, 63, 73, 54, 67, 52, 54, 56, 57, 67, 58, 60, 81, 76, 57, 65, 38, 68, 55, 62, 47, 54, 77, 70, 72, 65, 74, 72, 65, 57, 56, 53, 55, 72, 82, 79, 51, 56, 69, 68, 76, 64, 75, 67, 63, 57, 67, 53, 67, 70, 60, 84, 60, 79, 63, 50, 95, 63, 55, 81, 79, 63, 89, 54, 65, 53, 108, 54, 58, 60, 86, 61, 57, 61, 67, 75, 62, 113, 55, 69, 54, 44, 48, 60, 83, 76, 47, 58, 57, 57, 70, 88, 69, 65, 64, 60, 62, 79, 61, 61, 58, 52, 74, 64, 58, 32, 70, 54, 82, 67, 69, 58, 42, 73, 96, 76, 79, 50, 63, 51, 62, 66, 103, 85, 77, 76, 55, 53, 66, 60, 75, 67, 54, 67, 85, 41, 41, 117, 68, 86, 93, 68, 68, 70, 98, 88, 60, 60, 40, 65, 87, 44, 56, 74, 71, 78, 54, 58, 93, 76, 70, 71, 70, 50, 75, 48, 65, 85, 63, 79, 42, 61, 61, 77, 67, 44, 68, 67, 75, 84, 51, 46, 40, 40, 59, 68, 61, 73, 47, 79, 73, 94, 46, 85, 60, 69, 65, 68, 61, 79, 70, 74, 70, 65, 132, 81, 66, 56, 62, 86, 66, 54, 33, 64, 69, 95, 57, 58, 62, 48, 62, 57, 85, 69, 74, 73, 97, 37, 75, 63, 59, 55, 83, 66, 60, 76, 51, 65, 79, 50, 78, 76, 46, 55, 57, 81, 59, 64, 63, 67, 82, 56, 52, 69, 60, 58, 63, 50, 52, 79, 59, 64, 51, 66, 59, 55, 55, 52, 61, 80, 70, 67, 55, 65, 51, 52, 76, 61, 57, 66, 56, 53, 64, 66, 59, 49, 68, 52, 60, 53, 72, 47, 48, 49, 51, 64, 97, 74, 67, 69, 90, 96, 54, 51, 64, 61, 53, 65, 116, 78, 81, 70, 51, 64, 64, 45, 56, 56, 65, 77, 55, 40, 83, 82, 87, 81, 59, 58, 82, 58, 57, 53, 78, 76, 57, 80, 71, 103, 49, 57, 64, 59, 70, 50, 58, 82, 63, 76, 68, 62, 90, 86, 56, 51, 47, 84, 65, 95, 47, 94, 84, 76, 75, 87, 90, 60, 87, 85, 69, 60, 74, 50, 43, 57, 77, 79, 60, 78, 72, 65, 125, 54, 66, 70, 53, 61, 65, 59, 72, 56, 61, 59, 65, 68, 77, 63, 64, 71, 48, 47, 66, 50, 99, 62, 63, 61, 109, 61, 76, 41, 55, 91, 71, 67, 62, 72, 57, 54, 64, 38, 59, 68, 40, 73, 73, 66, 57, 74, 58, 65, 68, 66, 65, 45, 49, 90, 51, 72, 71, 61, 70, 71, 61, 75, 54, 56, 62, 50, 76, 55, 74, 54, 51, 69, 54, 64, 58, 85, 82, 63, 73, 48, 62, 53, 63, 61, 64, 49, 69, 64, 48, 60, 47, 62, 46, 30, 54, 73, 91, 94, 60, 69, 46, 59, 52, 58, 74, 71, 60, 67, 56, 63, 66, 49, 59, 101, 57, 85, 60, 55, 87, 62, 46, 71, 97, 80, 72, 89, 55, 77, 93, 52, 51, 68, 43, 51, 74, 69, 70, 59, 43, 87, 60, 67, 56, 79, 68, 58, 73, 72, 81, 70, 53, 69, 61, 56, 59, 80, 83, 52, 61, 75, 77, 69, 65, 68, 107, 69, 68, 72, 77, 68, 70, 63, 61, 58, 55, 57, 80, 62, 56, 61, 55, 68, 47, 58, 64, 70, 102, 77, 60, 75, 49, 84, 61, 56, 66, 55, 53, 62, 69, 97, 77, 51, 73, 97, 59, 64, 58, 68, 82, 65, 96, 51, 58, 59, 73, 58, 58, 58, 62, 57, 56, 71, 66, 51, 47, 56, 85, 58, 81, 53, 72, 82, 74, 65, 58, 49, 66, 76, 44, 44, 86, 78, 58, 73, 65, 73, 64, 63, 62, 86, 52, 90, 65, 69, 71, 78, 66, 74, 91, 64, 55, 71, 45, 70, 71, 68, 66, 68, 68, 87, 102, 83, 71, 68, 77, 83, 65, 69, 78, 66, 58, 60, 69, 78, 74, 58, 61, 59, 55, 95, 53, 73, 53, 81, 75, 64, 54, 61, 64, 62, 54, 62, 49, 57, 58, 60, 71, 70, 58, 60, 50, 73, 102, 108, 50, 53, 67, 63, 51, 72, 44, 48, 43, 70, 85, 71, 65, 67, 50, 86, 48, 51, 49, 61, 51, 63, 61, 52, 72, 63, 58, 77, 72, 52, 51, 65, 81, 63, 50, 62, 73, 93, 87, 56, 76, 81, 84, 60, 68, 65, 115, 63, 72, 55, 79, 68, 89, 63, 107, 60, 77, 52, 46, 60, 81, 75, 72, 63, 63, 83, 74, 52, 56, 66, 52, 72, 67, 54, 60, 73, 63, 62, 76, 67, 62, 58, 65, 55, 63, 61, 62, 41, 60, 56, 90, 58, 56, 57, 61, 71, 48, 80, 65, 71, 75, 57, 60, 64, 48, 50, 69, 99, 61, 72, 63, 58, 68, 59, 61, 77, 56, 72, 51, 94, 48, 54, 82, 56, 80, 59, 66, 65, 56, 68, 56, 79, 62, 74, 66, 63, 57, 58, 69, 63, 51, 59, 81, 59, 70, 77, 83, 75, 58, 68, 64, 69, 77, 77, 67, 52, 54, 51, 75, 49, 45, 68, 64, 92, 82, 93, 60, 65, 69, 68, 44, 76, 61, 113, 63, 67, 53, 54, 64, 76, 73, 73, 74, 61, 54, 69, 56, 72, 70, 80, 61, 76, 63, 67, 75, 69, 58, 81, 45, 50, 87, 73, 73, 57, 60, 65, 53, 44, 49, 57, 64, 74, 53, 80, 88, 79, 88, 48, 80, 73, 61, 57, 79, 55, 55, 65, 66, 61, 57, 49, 60, 68, 57, 58, 64, 66, 71, 55, 81, 68, 70, 61, 74, 60, 67, 68, 56, 48, 61, 85, 53, 60, 63, 56, 63, 40, 78, 56, 66, 48, 54, 93, 41, 64, 58, 60, 86, 69, 64, 60, 66, 62, 60, 63, 78, 69, 53, 62, 99, 60, 67, 53, 63, 65, 53, 63, 54, 75, 76, 57, 78, 72, 88, 67, 54, 79, 55, 70, 64, 62, 59, 55, 52, 53, 56, 83, 63, 57, 55, 67, 63, 67, 68, 62, 68, 53, 55, 62, 164, 78, 51, 58, 72, 67, 73, 64, 56, 78, 51, 71, 55, 75, 62, 53, 57, 59, 63, 63, 72, 42, 64, 65, 64, 54, 73, 47, 62, 63, 59, 63, 47, 62, 67, 58, 84, 86, 62, 73, 80, 55, 43, 76, 63, 59, 58, 56, 59, 68, 58, 61, 51, 64, 82, 58, 92, 55, 52, 60, 49, 75, 48, 61, 65, 70, 80, 57, 76, 68, 61, 55, 87, 65, 65, 67, 82, 48, 60, 53, 84, 56, 54, 62, 71, 106, 65, 102, 80, 62, 66, 60, 76, 75, 81, 76, 58, 78, 54, 55, 90, 41, 56, 88, 65, 72, 55, 73, 64, 51, 50, 65, 60, 49, 73, 74, 57, 50, 56, 87, 57, 81, 54, 67, 101, 68, 64, 67, 66, 60, 41, 53, 64, 80, 59, 77, 60, 76, 71, 52, 55, 55, 53, 76, 64, 88, 59, 92, 58, 92, 80, 57, 49, 85, 92, 75, 62, 50, 67, 56, 76, 67, 49, 61, 73, 85, 63, 59, 67, 47, 50, 40, 52, 65, 64, 55, 52, 63, 51, 56, 72, 61, 58, 71, 74, 57, 46, 61, 88, 89, 83, 72, 64, 64, 65, 69, 71, 58, 60, 75, 79, 110, 73, 64, 71, 61, 65, 62, 62, 66, 60, 60, 68, 46, 97, 56, 64, 41, 53, 75, 73, 73, 71, 67, 65, 76, 59, 43, 75, 62, 74, 50, 70, 56, 62, 68, 61, 62, 76, 70, 41, 51, 78, 78, 79, 57, 52, 61, 76, 75, 89, 67, 68, 56, 68, 73, 64, 72, 43, 74, 64, 56, 64, 60, 65, 71, 49, 67, 50, 69, 56, 59, 61, 57, 76, 61, 54, 59, 76, 75, 62, 51, 62, 60, 74, 65, 69, 77, 55, 64, 78, 70, 62, 63, 71, 61, 82, 75, 57, 49, 64, 53, 68, 67, 55, 57, 115, 59, 61, 66, 81, 47, 61, 53, 65, 67, 69, 61, 104, 72, 114, 48, 78, 50, 78, 72, 69, 89, 49, 81, 58, 77, 81, 62, 78, 67, 63, 71, 70, 64, 61, 84, 68, 69, 49, 52, 48, 108, 72, 44, 74, 64, 47, 58, 79, 88, 65, 81, 55, 39, 96, 70, 96, 69, 60, 85, 55, 89, 65, 76, 39, 59, 69, 103, 74, 89, 68, 85, 49, 40, 53, 58, 52, 64, 64, 81, 52, 70, 61, 58, 105, 49, 54, 73, 57, 58, 75, 88, 68, 65, 55, 39, 58, 67, 85, 84, 64, 67, 58, 61, 58, 57, 79, 59, 61, 82, 89, 67, 61, 76, 59, 58, 57, 70, 83, 96, 79, 69, 48, 45, 84, 48, 80, 56, 73, 66, 59, 57, 65, 65, 64, 71, 79, 67, 54, 67, 42, 67, 62, 85, 60, 66, 67, 71, 72, 57, 64, 63, 80, 50, 80, 63, 68, 59, 68, 51, 58, 45, 54, 82, 56, 64, 63, 64, 72, 63, 77, 57, 47, 53, 74, 57, 61, 48, 75, 83, 56, 55, 73, 39, 59, 54, 59, 63, 54, 88, 48, 75, 70, 59, 61, 70, 66, 60, 56, 79, 81, 68, 75, 39, 94, 61, 63, 82, 120, 60, 67, 77, 65, 87, 78, 79, 51, 67, 73, 62, 53, 73, 84, 63, 65, 65, 77, 55, 52, 63, 65, 64, 69, 51, 58, 58, 84, 116, 61, 45, 82, 53, 80, 78, 67, 71, 70, 101, 58, 68, 76, 48, 64, 81, 48, 63, 53, 66, 64, 57, 77, 78, 71, 62, 75, 58, 79, 63, 81, 56, 65, 57, 85, 75, 55, 77, 51, 57, 50, 63, 62, 61, 60, 76, 53, 80, 60, 55, 75, 71, 58, 50, 56, 72, 75, 63, 99, 48, 60, 59, 51, 63, 75, 71, 85, 79, 58, 69, 63, 64, 67, 83, 71, 52, 69, 55, 64, 76, 72, 42, 61, 77, 65, 55, 60, 67, 58, 78, 58, 49, 57, 53, 67, 52, 75, 77, 62, 88, 55, 69, 76, 77, 94, 52, 76, 71, 64, 59, 71, 52, 56, 61, 67, 55, 70, 54, 78, 112, 73, 73, 63, 78, 85, 59, 58, 54, 67, 60, 75, 54, 72, 72, 76, 49, 56, 74, 55, 79, 72, 98, 99, 58, 60, 63, 85, 60, 57, 46, 62, 61, 64, 67, 72, 56, 72, 65, 49, 71, 83, 66, 62, 58, 57, 69, 59, 78, 49, 68, 72, 50, 64, 73, 59, 56, 50, 60, 48, 81, 57, 70, 87, 62, 91, 74, 57, 74, 52, 67, 54, 72, 75, 59, 64, 61, 57, 48, 75, 70, 78, 51, 60, 55, 108, 59, 63, 69, 83, 54, 92, 48, 52, 52, 96, 59, 79, 42, 87, 58, 67, 55, 63, 57, 54, 55, 60, 48, 76, 64, 57, 54, 48, 55, 56, 59, 72, 64, 61, 83, 83, 52, 95, 80, 44, 72, 61, 67, 75, 73, 50, 75, 55, 70, 73, 57, 49, 58, 70, 90, 66, 75, 72, 63, 54, 73, 69, 65, 64, 69, 60, 71, 76, 50, 75, 71, 93, 81, 71, 62, 73, 60, 65, 77, 56, 54, 95, 42, 61, 45, 65, 51, 56, 69, 73, 63, 53, 99, 51, 75, 56, 64, 86, 60, 100, 62, 47, 55, 63, 64, 82, 66, 55, 58, 59, 73, 88, 47, 57, 65, 67, 52, 52, 58, 54, 62, 90, 57, 69, 53, 53, 58, 62, 69, 92, 64, 62, 81, 63, 65, 77, 61, 56, 59, 56, 62, 56, 61, 50, 50, 54, 99, 79, 50, 51, 53, 83, 68, 47, 56, 36, 49, 64, 81, 76, 59, 53, 55, 53, 50, 56, 66, 56, 99, 57, 67, 76, 82, 61, 58, 75, 62, 53, 68, 63, 72, 68, 52, 66, 70, 52, 52, 64, 70, 57, 53, 60, 60, 84, 58, 111, 56, 44, 55, 73, 60, 62, 66, 45, 62, 63, 63, 54, 56, 52, 58, 63, 75, 56, 61, 62, 68, 76, 99, 67, 56, 59, 82, 51, 65, 64, 67, 70, 67, 64, 66, 78, 48, 73, 64, 84, 56, 49, 70, 67, 58, 48, 82, 72, 49, 62, 90, 62, 68, 63, 73, 59, 67, 65, 69, 57, 56, 46, 59, 67, 59, 74, 52, 56, 89, 78, 81, 93, 70, 50, 47, 77, 63, 48, 57, 109, 74, 73, 60, 54, 48, 81, 57, 63, 45, 59, 92, 67, 54, 68, 73, 65, 93, 51, 60, 55, 81, 88, 75, 66, 65, 69, 59, 82, 65, 70, 56, 70, 82, 56, 41, 60, 65, 66, 78, 66, 61, 60, 59, 59, 58, 71, 61, 68, 84, 67, 63, 49, 70, 66, 62, 51, 71, 50, 62, 60, 65, 36, 51, 86, 78, 67, 78, 73, 54, 73, 68, 94, 57, 66, 99, 60, 62, 67, 59, 78, 68, 62, 53, 60, 64, 69, 70, 67, 55, 93, 60, 57, 61, 51, 65, 61, 72, 62, 86, 58, 73, 60, 59, 91, 75, 69, 73, 46, 54, 58, 59, 74, 68, 71, 55, 71, 65, 62, 59, 72, 67, 64, 59, 87, 72, 36, 64, 62, 46, 83, 59, 66, 79, 56, 92, 64, 58, 87, 55, 67, 52, 59, 93, 75, 89, 73, 72, 46, 67, 68, 76, 57, 85, 122, 77, 72, 59, 55, 53, 70, 134, 62, 56, 59, 77, 71, 40, 58, 70, 57, 62, 55, 61, 74, 53, 62, 64, 52, 70, 98, 67, 51, 70, 110, 69, 55, 64, 50, 80, 93, 62, 68, 67, 63, 51, 78, 60, 62, 70, 46, 89, 58, 85, 42, 63, 58, 42, 51, 58, 55, 56, 60, 63, 81, 62, 58, 65, 56, 66, 83, 56, 61, 67, 74, 81, 61, 61, 75, 87, 73, 60, 42, 53, 44, 66, 66, 67, 100, 51, 56, 46, 65, 63, 56, 49, 84, 47, 54, 47, 58, 51, 73, 84, 73, 59, 65, 63, 55, 60, 63, 64, 79, 71, 55, 66, 74, 41, 62, 76, 49, 55, 61, 59, 64, 50, 58, 58, 53, 63, 63, 65, 46, 60, 67, 69, 90, 72, 61, 59, 88, 58, 59, 75, 61, 70, 67, 70, 61, 57, 63, 72, 47, 59, 54, 54, 70, 58, 54, 70, 57, 50, 57, 69, 52, 104, 58, 50, 70, 86, 120, 55, 66, 61, 68, 67, 60, 103, 50, 70, 53, 83, 55, 60, 45, 67, 55, 57, 74, 57, 46, 75, 69, 71, 56, 66, 50, 91, 60, 55, 58, 67, 54, 66, 86, 84, 87, 78, 73, 65, 88, 82, 55, 54, 66, 68, 53, 65, 77, 76, 87, 70, 61, 113, 62, 54, 71, 72, 86, 79, 71, 71, 82, 77, 66, 55, 68, 59, 67, 56, 85, 57, 82, 52, 66, 55, 48, 66, 56, 68, 62, 63, 57, 82, 63, 72, 86, 69, 52, 84, 69, 56, 94, 68, 62, 58, 114, 82, 60, 47, 53, 76, 51, 52, 120, 63, 79, 59, 85, 61, 68, 55, 72, 71, 64, 60, 69, 37, 86, 72, 60, 58, 61, 88, 58, 83, 45, 67, 61, 68, 62, 68, 44, 86, 54, 63, 46, 64, 77, 79, 57, 62, 50, 54, 61, 84, 68, 63, 55, 71, 54, 99, 58, 114, 66, 42, 71, 63, 65, 67, 78, 57, 76, 59, 73, 64, 32, 62, 79, 60, 58, 48, 68, 44, 56, 62, 41, 53, 51, 70, 85, 74, 57, 50, 85, 41, 78, 53, 68, 80, 100, 67, 62, 56, 89, 40, 58, 66, 70, 74, 70, 83, 70, 78, 71, 57, 67, 58, 65, 69, 44, 85, 57, 58, 71, 69, 61, 59, 98, 69, 49, 43, 65, 123, 75, 72, 60, 46, 95, 57, 59, 53, 118, 99, 73, 56, 43, 88, 60, 82, 64, 79, 55, 70, 75, 67, 57, 57, 70, 55, 53, 73, 53, 68, 67, 66, 60, 55, 54, 91, 54, 70, 71, 70, 81, 57, 57, 69, 53, 60, 67, 59, 52, 57, 54, 73, 69, 63, 59, 75, 64, 71, 53, 52, 78, 54, 89, 59, 94, 64, 70, 84, 64, 65, 70, 50, 66, 51, 55, 54, 55, 69, 62, 68, 78, 66, 66, 78, 54, 54, 53, 86, 76, 68, 52, 60, 65, 69, 56, 48, 47, 65, 59, 101, 88, 51, 54, 30, 93, 70, 61, 73, 59, 63, 88, 80, 64, 58, 71, 57, 77, 46, 48, 114, 63, 68, 90, 73, 71, 54, 70, 48, 53, 54, 64, 63, 67, 55, 59, 58, 67, 75, 51, 68, 73, 63, 87, 84, 72, 78, 75, 51, 75, 64, 68, 76, 55, 55, 68, 47, 71, 49, 61, 62, 76, 53, 54, 63, 67, 54, 67, 69, 59, 72, 66, 44, 44, 68, 68, 62, 63, 57, 47, 75, 70, 65, 68, 75, 71, 44, 63, 51, 48, 64, 68, 77, 63, 65, 37, 60, 55, 60, 57, 48, 53, 63, 51, 56, 111, 58, 51, 60, 93, 49, 60, 67, 50, 72, 66, 79, 68, 45, 108, 48, 69, 66, 76, 63, 64, 62, 56, 74, 77, 84, 74, 54, 57, 49, 56, 66, 67, 71, 57, 61, 76, 59, 61, 79, 59, 93, 48, 71, 68, 67, 98, 65, 64, 78, 57, 57, 54, 60, 68, 103, 84, 106, 70, 88, 67, 44, 62, 57, 78, 53, 77, 51, 63, 71, 75, 39, 99, 62, 80, 55, 61, 90, 71, 69, 59, 65, 66, 69, 60, 58, 65, 48, 70, 48, 41, 55, 52, 53, 72, 70, 70, 74, 56, 58, 89, 69, 68, 61, 62, 64, 79, 68, 64, 71, 68, 46, 66, 73, 54, 63, 113, 72, 43, 67, 54, 57, 56, 74, 74, 87, 65, 60, 52, 54, 71, 66, 71, 53, 42, 52, 55, 67, 54, 63, 73, 62, 67, 81, 51, 76, 50, 73, 68, 64, 56, 50, 63, 66, 48, 46, 63, 76, 61, 52, 84, 64, 59, 44, 59, 44, 68, 72, 57, 59, 46, 58, 70, 90, 64, 67, 67, 71, 77, 61, 89, 82, 74, 40, 81, 67, 54, 57, 56, 44, 49, 90, 61, 59, 58, 64, 116, 63, 54, 68, 77, 70, 72, 67, 59, 71, 76, 53, 66, 47, 53, 58, 70, 59, 52, 52, 53, 61, 80, 85, 70, 58, 111, 56, 75, 62, 75, 75, 65, 63, 68, 70, 69, 99, 85, 54, 54, 80, 51, 68, 56, 59, 67, 62, 70, 83, 58, 54, 37, 67, 78, 71, 46, 65, 85, 60, 60, 77, 55, 68, 77, 58, 45, 74, 63, 63, 110, 34, 84, 43, 69, 90, 52, 54, 56, 46, 114, 67, 44, 77, 84, 62, 81, 38, 56, 60, 58, 67, 79, 61, 49, 67, 71, 68, 56, 73, 58, 64, 95, 70, 73, 49, 71, 62, 79, 69, 72, 65, 47, 60, 57, 55, 66, 58, 62, 58, 53, 59, 57, 62, 82, 73, 65, 52, 63, 91, 56, 64, 48, 78, 73, 62, 60, 51, 34, 67, 65, 95, 74, 46, 79, 67, 61, 48, 46, 51, 71, 70, 73, 58, 73, 69, 79, 65, 66, 66, 61, 60, 56, 53, 96, 102, 65, 87, 61, 82, 55, 37, 57, 59, 65, 55, 64, 59, 58, 61, 69, 102, 44, 65, 72, 64, 80, 53, 57, 62, 86, 74, 47, 67, 76, 69, 58, 62, 64, 64, 70, 54, 69, 75, 81, 64, 54, 78, 52, 57, 96, 58, 55, 81, 61, 78, 76, 58, 74, 41, 57, 72, 70, 65, 70, 81, 68, 82, 53, 48, 70, 65, 63, 73, 103, 80, 67, 67, 53, 58, 70, 52, 67, 55, 70, 60, 63, 59, 63, 86, 48, 79, 61, 63, 60, 48, 41, 65, 78, 76, 68, 82, 87, 62, 72, 65, 71, 71, 57, 90, 85, 64, 83, 68, 53, 70, 71, 74, 56, 103, 65, 97, 69, 73, 56, 53, 73, 48, 63, 57, 53, 77, 76, 71, 91, 114, 51, 72, 63, 78, 56, 36, 85, 43, 45, 72, 60, 54, 59, 57, 67, 74, 53, 58, 78, 76, 72, 63, 70, 69, 84, 73, 71, 45, 64, 75, 86, 75, 55, 62, 67, 64, 64, 59, 53, 56, 68, 56, 73, 51, 68, 61, 71, 53, 74, 58, 61, 51, 71, 68, 45, 56, 60, 64, 55, 67, 61, 90, 62, 86, 75, 61, 60, 66, 54, 54, 48, 70, 53, 73, 68, 66, 82, 55, 80, 91, 54, 66, 50, 62, 65, 82, 70, 62, 68, 58, 58, 73, 77, 64, 42, 55, 61, 56, 61, 83, 65, 69, 103, 80, 106, 73, 45, 58, 87, 64, 57, 85, 69, 76, 50, 75, 63, 59, 60, 62, 53, 83, 72, 46, 70, 64, 39, 77, 54, 63, 71, 47, 51, 63, 52, 52, 48, 62, 95, 69, 67, 77, 54, 52, 76, 81, 62, 81, 53, 59, 66, 83, 45, 47, 51, 74, 61, 112, 68, 71, 86, 73, 60, 55, 66, 68, 59, 80, 63, 66, 60, 64, 57, 60, 87, 63, 60, 52, 59, 49, 65, 54, 55, 72, 82, 79, 68, 72, 86, 70, 35, 76, 65, 60, 67, 66, 70, 78, 60, 54, 92, 74, 71, 73, 66, 45, 66, 75, 97, 59, 71, 74, 61, 45, 59, 86, 40, 71, 55, 75, 62, 76, 47, 60, 49, 57, 74, 81, 62, 54, 51, 85, 61, 54, 52, 64, 53, 103, 53, 78, 59, 69, 62, 50, 68, 65, 52, 58, 83, 86, 89, 47, 70, 53, 57, 88, 61, 94, 65, 61, 54, 38, 70, 63, 76, 59, 78, 76, 42, 65, 65, 70, 101, 67, 64, 67, 54, 68, 60, 72, 51, 54, 68, 58, 77, 58, 65, 69, 70, 69, 59, 60, 64, 64, 67, 63, 58, 93, 102, 68, 61, 50, 68, 46, 62, 49, 63, 74, 81, 71, 80, 77, 53, 67, 68, 89, 46, 62, 95, 60, 75, 71, 59, 55, 70, 54, 66, 152, 70, 63, 96, 55, 62, 53, 70, 67, 78, 92, 80, 70, 71, 48, 74, 53, 63, 77, 70, 53, 74, 76, 50, 60, 84, 66, 97, 71, 67, 56, 47, 64, 74, 59, 60, 97, 56, 75, 79, 51, 87, 54, 63, 60, 78, 90, 86, 68, 85, 70, 60, 67, 60, 75, 60, 75, 53, 49, 65, 59, 71, 73, 58, 60, 76, 71, 58, 58, 59, 91, 53, 66, 62, 57, 59, 82, 54, 58, 77, 99, 70, 70, 73, 57, 66, 76, 68, 69, 37, 77, 62, 80, 62, 55, 85, 58, 59, 77, 79, 81, 87, 48, 78, 95, 100, 71, 74, 61, 70, 53, 55, 64, 67, 56, 95, 51, 69, 66, 87, 111, 78, 66, 88, 49, 61, 59, 74, 67, 49, 60, 67, 73, 46, 68, 71, 62, 60, 78, 77, 67, 79, 57, 74, 54, 55, 68, 60, 60, 51, 54, 66, 88, 82, 60, 80, 54, 81, 53, 59, 53, 61, 75, 62, 53, 79, 60, 64, 69, 69, 44, 76, 71, 59, 58, 76, 55, 56, 56, 74, 73, 67, 74, 64, 67, 81, 67, 44, 38, 57, 70, 56, 71, 58, 48, 66, 78, 63, 60, 65, 95, 41, 73, 57, 51, 117, 59, 40, 55, 118, 50, 75, 65, 64, 52, 76, 95, 61, 58, 81, 66, 52, 89, 59, 50, 69, 54, 138, 67, 103, 85, 72, 74, 72, 57, 48, 59, 59, 59, 49, 48, 53, 56, 73, 97, 60, 70, 62, 78, 46, 47, 68, 71, 61, 47, 59, 77, 62, 56, 67, 115, 75, 75, 69, 77, 68, 59, 65, 53, 63, 70, 59, 51, 69, 79, 44, 55, 68, 64, 69, 68, 72, 63, 74, 66, 73, 64, 85, 68, 67, 57, 63, 59, 74, 109, 50, 86, 55, 70, 33, 62, 58, 60, 60, 80, 62, 69, 69, 65, 48, 46, 60, 69, 45, 65, 63, 77, 80, 76, 62, 56, 69, 59, 53, 81, 62, 75, 68, 52, 80, 62, 53, 68, 61, 86, 77, 59, 78, 63, 40, 77, 67, 48, 59, 69, 82, 34, 64, 98, 79, 56, 69, 59, 96, 69, 70, 48, 76, 55, 60, 81, 58, 53, 54, 53, 75, 62, 48, 59, 61, 61, 45, 56, 63, 60, 60, 63, 65, 79, 114, 69, 56, 72, 54, 53, 104, 70, 78, 69, 66, 58, 47, 82, 56, 67, 64, 63, 57, 60, 56, 62, 105, 81, 101, 62, 70, 76, 76, 88, 94, 69, 72, 47, 61, 60, 50, 60, 68, 78, 69, 81, 66, 75, 48, 69, 50, 61, 70, 59, 62, 91, 56, 67, 82, 51, 58, 71, 93, 69, 78, 65, 95, 69, 64, 67, 74, 54, 67, 62, 64, 88, 67, 69, 66, 66, 46, 49, 52, 65, 47, 57, 58, 52, 55, 69, 65, 54, 60, 99, 60, 50, 53, 49, 78, 59, 72, 76, 90, 54, 56, 62, 153, 80, 75, 77, 72, 70, 67, 51, 62, 67, 61, 64, 59, 80, 56, 59, 66, 56, 55, 82, 64, 73, 54, 73, 45, 62, 65, 59, 79, 50, 57, 70, 72, 52, 72, 61, 91, 65, 65, 62, 49, 52, 54, 54, 64, 60, 74, 57, 59, 87, 61, 65, 59, 59, 46, 63, 67, 47, 71, 53, 68, 68, 52, 61, 78, 77, 66, 59, 106, 53, 79, 55, 77, 50, 56, 75, 64, 77, 81, 66, 72, 61, 78, 66, 64, 82, 48, 105, 73, 64, 65, 56, 64, 85, 56, 64, 70, 69, 52, 61, 96, 59, 61, 56, 61, 61, 62, 68, 49, 63, 66, 47, 79, 65, 61, 47, 71, 64, 60, 54, 74, 72, 63, 56, 83, 47, 75, 61, 62, 47, 81, 93, 65, 85, 51, 74, 57, 34, 60, 51, 74, 54, 63, 59, 71, 80, 65, 57, 66, 90, 65, 56, 68, 76, 86, 98, 39, 52, 57, 61, 88, 68, 57, 58, 65, 59, 66, 50, 81, 61, 61, 75, 76, 70, 64, 61, 51, 62, 41, 50, 60, 63, 75, 56, 69, 89, 70, 54, 74, 68, 55, 58, 63, 44, 50, 63, 83, 90, 61, 75, 41, 85, 62, 59, 63, 43, 64, 69, 57, 57, 76, 76, 53, 47, 131, 77, 63, 71, 48, 78, 53, 49, 54, 91, 79, 56, 57, 56, 58, 54, 72, 72, 75, 52, 64, 59, 73, 55, 47, 64, 52, 68, 65, 63, 64, 59, 65, 72, 48, 70, 68, 65, 69, 52, 50, 47, 63, 56, 59, 79, 66, 51, 61, 59, 85, 61, 70, 59, 46, 73, 75, 54, 60, 59, 68, 68, 62, 67, 60, 58, 56, 65, 58, 68, 41, 73, 69, 71, 66, 62, 70, 71, 47, 44, 61, 55, 62, 53, 66, 41, 60, 53, 57, 58, 51, 63, 107, 75, 65, 70, 49, 62, 64, 52, 47, 56, 66, 71, 88, 58, 130, 70, 83, 54, 55, 48, 59, 42, 56, 65, 62, 55, 102, 76, 84, 61, 58, 55, 71, 47, 57, 56, 76, 101, 64, 61, 55, 103, 62, 67, 63, 92, 50, 65, 66, 73, 61, 91, 91, 62, 88, 80, 54, 78, 78, 75, 63, 70, 72, 78, 73, 50, 52, 127, 78, 63, 73, 91, 51, 60, 61, 61, 74, 26, 72, 75, 65, 59, 51, 69, 101, 55, 42, 44, 84, 62, 57, 54, 73, 77, 65, 59, 67, 66, 69, 84, 63, 66, 53, 49, 60, 60, 73, 56, 60, 63, 60, 69, 67, 42, 66, 55, 51, 43, 73, 53, 54, 56, 65, 56, 61, 60, 91, 83, 62, 71, 61, 74, 58, 74, 72, 39, 69, 69, 64, 51, 68, 57, 53, 80, 70, 63, 59, 52, 63, 81, 60, 77, 71, 67, 61, 53, 55, 64, 59, 74, 72, 90, 57, 46, 51, 77, 74, 60, 42, 60, 69, 56, 64, 53, 76, 65, 58, 70, 56, 74, 56, 55, 70, 63, 53, 55, 57, 50, 58, 71, 43, 63, 51, 54, 87, 65, 70, 53, 66, 104, 79, 75, 96, 65, 77, 71, 52, 64, 127, 77, 58, 59, 49, 72, 66, 57, 62, 66, 51, 57, 97, 75, 98, 55, 64, 70, 62, 74, 65, 63, 59, 70, 55, 63, 58, 64, 68, 91, 78, 54, 66, 50, 46, 62, 62, 67, 91, 105, 83, 65, 95, 78, 64, 57, 77, 65, 55, 63, 64, 93, 46, 63, 84, 87, 53, 62, 92, 49, 69, 66, 79, 61, 85, 101, 63, 98, 57, 58, 77, 59, 66, 62, 61, 66, 59, 76, 70, 59, 87, 86, 45, 60, 96, 62, 63, 64, 87, 59, 61, 66, 72, 79, 79, 60, 65, 88, 68, 85, 84, 70, 61, 65, 59, 60, 65, 53, 63, 95, 66, 66, 69, 76, 47, 51, 70, 83, 78, 84, 63, 75, 65, 58, 57, 58, 82, 61, 53, 91, 51, 61, 61, 56, 33, 53, 64, 48, 72, 55, 79, 55, 76, 63, 66, 81, 66, 54, 82, 92, 65, 52, 48, 74, 69, 66, 56, 68, 66, 51, 62, 66, 68, 63, 64, 75, 61, 97, 58, 83, 63, 68, 52, 88, 74, 77, 82, 59, 72, 64, 64, 56, 64, 60, 90, 64, 64, 57, 62, 68, 124, 108, 62, 56, 57, 51, 57, 56, 58, 48, 62, 72, 66, 59, 52, 54, 71, 65, 62, 49, 75, 62, 62, 64, 106, 65, 53, 60, 66, 73, 76, 74, 84, 62, 57, 71, 66, 75, 51, 61, 78, 59, 57, 62, 98, 52, 72, 63, 94, 50, 52, 66, 52, 41, 82, 48, 67, 47, 84, 60, 57, 47, 49, 49, 58, 69, 75, 63, 92, 57, 69, 59, 68, 67, 56, 66, 64, 60, 71, 59, 72, 64, 76, 41, 62, 80, 60, 74, 76, 53, 61, 75, 79, 57, 51, 57, 84, 64, 67, 85, 63, 55, 62, 80, 50, 75, 66, 52, 66, 84, 73, 64, 47, 61, 39, 47, 68, 68, 60, 67, 65, 60, 65, 63, 87, 59, 134, 58, 59, 68, 53, 59, 69, 96, 71, 97, 64, 71, 62, 77, 63, 56, 59, 53, 50, 57, 64, 52, 59, 63, 50, 59, 81, 80, 67, 65, 83, 68, 62, 69, 78, 48, 86, 56, 53, 72, 77, 56, 58, 90, 81, 59, 81, 65, 75, 53, 84, 78, 68, 60, 71, 82, 57, 69, 60, 75, 64, 69, 61, 82, 67, 65, 56, 56, 50, 65, 75, 52, 83, 65, 86, 55, 74, 70, 55, 62, 73, 61, 59, 71, 63, 49, 58, 77, 54, 68, 74, 74, 70, 76, 100, 79, 55, 59, 92, 51, 73, 55, 56, 62, 59, 51, 69, 84, 55, 63, 71, 47, 51, 57, 109, 51, 55, 61, 58, 53, 76, 69, 74, 75, 66, 71, 77, 60, 81, 49, 58, 75, 46, 50, 47, 92, 75, 77, 62, 47, 83, 57, 87, 75, 53, 52, 66, 61, 84, 60, 81, 70, 68, 55, 93, 73, 89, 58, 85, 62, 68, 100, 56, 64, 52, 64, 66, 73, 82, 62, 74, 66, 59, 71, 64, 74, 66, 50, 61, 54, 52, 116, 50, 74, 53, 104, 80, 67, 54, 74, 81, 65, 42, 61, 144, 79, 53, 61, 63, 85, 62, 60, 66, 46, 88, 75, 68, 80, 71, 58, 58, 70, 56, 54, 59, 57, 65, 78, 61, 44, 76, 61, 76, 57, 54, 65, 83, 66, 46, 68, 75, 66, 65, 48, 59, 61, 78, 70, 72, 63, 45, 80, 97, 70, 48, 63, 74, 62, 73, 59, 90, 55, 52, 58, 57, 65, 57, 79, 66, 75, 70, 72, 68, 63, 72, 52, 79, 54, 55, 74, 57, 68, 64, 79, 50, 63, 81, 73, 72, 61, 72, 78, 84, 48, 54, 58, 66, 78, 71, 83, 63, 46, 58, 60, 92, 64, 60, 75, 74, 62, 58, 71, 54, 52, 69, 52, 54, 55, 68, 62, 52, 81, 58, 52, 70, 87, 58, 47, 111, 82, 75, 62, 52, 60, 46, 60, 72, 66, 77, 66, 83, 59, 48, 59, 69, 64, 75, 51, 52, 68, 59, 94, 54, 44, 68, 75, 63, 74, 92, 60, 60, 47, 61, 75, 73, 61, 72, 74, 60, 72, 52, 59, 73, 60, 69, 71, 39, 55, 63, 54, 62, 67, 60, 61, 72, 64, 74, 45, 112, 59, 71, 57, 38, 69, 78, 67, 55, 91, 52, 78, 61, 60, 55, 66, 56, 80, 65, 67, 71, 74, 66, 61, 52, 65, 66, 63, 61, 78, 68, 63, 58, 67, 93, 65, 87, 59, 57, 52, 104, 53, 73, 52, 67, 71, 55, 67, 53, 64, 69, 44, 93, 72, 79, 66, 48, 49, 53, 85, 53, 64, 88, 79, 63, 61, 68, 64, 73, 48, 62, 65, 72, 63, 65, 53, 89, 68, 71, 76, 72, 57, 63, 58, 59, 105, 46, 53, 45, 75, 56, 71, 75, 68, 53, 74, 61, 60, 62, 73, 62, 77, 79, 71, 76, 67, 75, 67, 64, 80, 63, 68, 60, 53, 67, 51, 51, 60, 51, 66, 131, 57, 103, 64, 50, 65, 59, 83, 63, 56, 49, 77, 54, 60, 64, 95, 84, 96, 44, 60, 75, 50, 56, 44, 47, 61, 71, 105, 76, 69, 69, 65, 61, 68, 93, 51, 42, 64, 52, 72, 60, 57, 42, 103, 64, 113, 57, 58, 53, 68, 68, 71, 65, 44, 68, 45, 53, 63, 52, 60, 65, 74, 95, 61, 56, 79, 72, 68, 74, 60, 59, 73, 92, 68, 65, 92, 67, 69, 54, 81, 55, 75, 63, 74, 69, 76, 56, 54, 59, 74, 67, 60, 83, 47, 95, 66, 69, 47, 48, 58, 60, 65, 56, 65, 58, 52, 74, 57, 92, 61, 71, 59, 102, 66, 60, 65, 61, 76, 73, 48, 113, 67, 57, 61, 61, 50, 80, 66, 100, 64, 69, 63, 49, 69, 71, 74, 51, 61, 55, 76, 64, 66, 93, 60, 62, 53, 66, 63, 46, 61, 65, 55, 57, 59, 68, 68, 67, 46, 60, 52, 59, 49, 76, 65, 57, 70, 75, 91, 81, 61, 76, 66, 73, 62, 56, 64, 67, 67, 87, 77, 75, 69, 63, 50, 63, 67, 72, 80, 58, 72, 55, 51, 70, 101, 61, 72, 70, 80, 70, 68, 46, 125, 84, 65, 58, 46, 105, 63, 53, 61, 104, 52, 57, 75, 52, 39, 84, 81, 65, 78, 49, 58, 82, 78, 40, 61, 65, 59, 55, 97, 42, 47, 76, 66, 53, 63, 74, 63, 66, 97, 69, 74, 57, 63, 69, 46, 40, 53, 55, 71, 75, 49, 66, 58, 60, 80, 73, 63, 57, 77, 69, 64, 60, 62, 65, 60, 60, 58, 37, 71, 58, 62, 82, 44, 90, 74, 54, 68, 61, 62, 51, 59, 65, 62, 65, 65, 59, 57, 76, 62, 57, 76, 58, 60, 61, 65, 53, 73, 102, 66, 50, 63, 60, 80, 76, 58, 62, 69, 52, 63, 40, 59, 59, 40, 62, 71, 52, 52, 55, 63, 63, 69, 58, 54, 85, 56, 50, 80, 58, 66, 69, 79, 60, 63, 43, 74, 58, 67, 66, 131, 83, 62, 57, 59, 60, 87, 54, 57, 77, 59, 83, 57, 58, 66, 69, 118, 62, 62, 65, 82, 67, 68, 45, 99, 63, 87, 70, 75, 74, 55, 55, 60, 70, 56, 64, 46, 56, 74, 53, 96, 64, 57, 80, 110, 75, 49, 74, 54, 61, 63, 50, 61, 64, 62, 67, 74, 79, 59, 54, 50, 54, 61, 55, 51, 49, 43, 65, 52, 120, 68, 59, 82, 77, 60, 70, 70, 55, 66, 67, 97, 63, 62, 75, 64, 78, 58, 53, 98, 53, 56, 72, 53, 70, 61, 107, 54, 63, 61, 51, 62, 49, 61, 55, 61, 118, 52, 77, 56, 83, 83, 69, 79, 59, 59, 64, 66, 39, 57, 44, 61, 61, 70, 83, 66, 70, 64, 83, 67, 68, 76, 71, 60, 54, 70, 67, 65, 65, 57, 63, 72, 102, 68, 52, 65, 64, 65, 51, 95, 52, 73, 61, 70, 87, 62, 68, 59, 57, 57, 57, 55, 56, 46, 79, 42, 73, 84, 59, 69, 54, 75, 67, 48, 66, 68, 78, 67, 93, 72, 68, 67, 80, 60, 76, 52, 82, 64, 137, 66, 60, 62, 69, 68, 73, 69, 54, 94, 52, 64, 86, 74, 63, 50, 66, 77, 55, 68, 52, 80, 51, 54, 53, 48, 67, 73, 76, 66, 105, 71, 56, 62, 51, 47, 80, 104, 72, 61, 56, 74, 54, 78, 60, 67, 93, 76, 49, 71, 61, 60, 72, 75, 76, 67, 69, 64, 56, 71, 58, 65, 71, 84, 63, 62, 77, 67, 49, 50, 59, 55, 67, 74, 91, 45, 69, 57, 84, 74, 67, 47, 84, 68, 52, 54, 51, 63, 52, 62, 93, 72, 67, 62, 69, 57, 68, 61, 74, 54, 60, 42, 59, 80, 57, 57, 57, 63, 57, 61, 73, 53, 62, 60, 90, 63, 52, 73, 67, 54, 68, 85, 52, 63, 44, 52, 63, 55, 61, 71, 60, 67, 47, 49, 60, 72, 52, 70, 75, 62, 62, 73, 91, 65, 53, 43, 52, 57, 64, 47, 90, 59, 67, 58, 69, 58, 82, 64, 101, 60, 49, 61, 47, 68, 63, 75, 41, 55, 82, 56, 57, 55, 58, 62, 68, 108, 63, 65, 79, 59, 61, 128, 57, 59, 72, 97, 55, 49, 75, 64, 56, 52, 67, 69, 154, 73, 77, 62, 74, 64, 60, 50, 54, 64, 62, 56, 69, 58, 72, 61, 59, 70, 55, 61, 75, 57, 93, 70, 71, 69, 73, 76, 66, 60, 60, 66, 72, 56, 63, 66, 72, 72, 50, 62, 65, 58, 54, 70, 66, 53, 73, 83, 74, 57, 62, 87, 60, 60, 78, 52, 87, 48, 63, 70, 59, 73, 53, 79, 77, 56, 52, 65, 63, 62, 70, 57, 69, 58, 66, 48, 67, 64, 65, 87, 60, 75, 56, 60, 66, 63, 66, 78, 61, 49, 76, 60, 67, 83, 72, 49, 62, 50, 54, 52, 96, 54, 49, 61, 81, 59, 80, 98, 75, 72, 105, 67, 74, 73, 42, 59, 78, 80, 59, 77, 72, 72, 65, 57, 65, 99, 66, 69, 61, 55, 51, 68, 78, 103, 73, 84, 46, 56, 55, 54, 68, 45, 61, 57, 64, 63, 62, 81, 42, 66, 45, 70, 55, 73, 84, 57, 60, 57, 65, 60, 52, 65, 62, 71, 71, 46, 74, 76, 66, 57, 85, 69, 50, 67, 57, 63, 52, 65, 47, 59, 62, 73, 83, 70, 85, 45, 71, 62, 63, 60, 50, 86, 63, 72, 84, 59, 64, 107, 71, 80, 69, 57, 64, 50, 66, 71, 59, 64, 53, 62, 57, 60, 45, 94, 83, 63, 43, 56, 63, 80, 74, 63, 36, 95, 72, 77, 57, 57, 78, 64, 62, 48, 65, 76, 71, 67, 94, 63, 63, 55, 69, 66, 73, 64, 76, 59, 47, 54, 47, 53, 56, 87, 60, 51, 74, 56, 84, 58, 60, 72, 56, 62, 85, 48, 62, 77, 56, 82, 83, 122, 64, 45, 48, 77, 74, 43, 48, 55, 52, 66, 54, 59, 78, 68, 63, 59, 55, 65, 68, 60, 105, 58, 65, 86, 54, 53, 58, 60, 95, 68, 63, 78, 68, 81, 74, 52, 92, 60, 52, 65, 67, 76, 59, 60, 51, 56, 54, 62, 87, 85, 46, 80, 59, 57, 62, 57, 58, 78, 63, 71, 67, 85, 75, 62, 82, 73, 73, 53, 102, 47, 58, 75, 71, 45, 61, 53, 66, 108, 56, 62, 76, 61, 57, 58, 44, 63, 69, 55, 68, 88, 63, 62, 68, 51, 63, 65, 99, 60, 59, 66, 69, 59, 47, 81, 54, 58, 67, 99, 57, 67, 72, 57, 69, 67, 66, 60, 47, 62, 67, 77, 64, 69, 66, 66, 85, 56, 49, 45, 81, 69, 60, 56, 55, 65, 68, 62, 61, 56, 64, 69, 55, 58, 54, 71, 65, 59, 88, 74, 75, 64, 53, 61, 58, 76, 77, 90, 71, 60, 61, 75, 90, 50, 87, 61, 71, 68, 61, 77, 67, 78, 68, 75, 78, 64, 66, 47, 71, 83, 63, 53, 67, 61, 45, 76, 59, 60, 60, 53, 71, 63, 70, 65, 53, 69, 71, 94, 60, 67, 54, 58, 83, 93, 87, 89, 69, 55, 62, 61, 72, 57, 73, 58, 111, 83, 73, 72, 62, 62, 63, 44, 73, 95, 71, 61, 55, 60, 67, 100, 58, 68, 67, 65, 67, 64, 63, 76, 65, 84, 69, 92, 94, 79, 56, 67, 107, 50, 64, 60, 61, 65, 75, 86, 70, 54, 84, 62, 59, 69, 49, 57, 65, 86, 54, 52, 78, 64, 56, 54, 62, 51, 45, 76, 62, 116, 78, 61, 62, 70, 70, 60, 62, 70, 81, 71, 58, 50, 50, 61, 61, 60, 78, 73, 68, 81, 67, 72, 57, 62, 59, 76, 70, 53, 48, 69, 69, 52, 81, 81, 107, 58, 52, 49, 66, 73, 48, 71, 55, 59, 60, 83, 58, 42, 69, 40, 64, 55, 62, 67, 49, 80, 76, 59, 91, 76, 64, 72, 63, 54, 73, 66, 63, 50, 61, 59, 78, 103, 74, 50, 55, 68, 77, 70, 60, 47, 43, 59, 53, 74, 67, 67, 69, 81, 56, 61, 61, 65, 56, 61, 69, 60, 82, 89, 65, 119, 65, 67, 55, 67, 70, 54, 58, 85, 63, 67, 71, 66, 61, 60, 76, 52, 51, 64, 71, 103, 72, 75, 85, 99, 76, 70, 77, 53, 81, 61, 62, 62, 55, 70, 63, 44, 53, 62, 63, 73, 55, 78, 73, 73, 61, 69, 67, 62, 52, 61, 64, 62, 60, 51, 52, 62, 57, 58, 54, 67, 76, 55, 63, 56, 60, 71, 61, 86, 90, 69, 58, 69, 78, 80, 61, 55, 70, 59, 56, 118, 61, 57, 65, 54, 66, 66, 57, 89, 60, 55, 74, 74, 72, 58, 74, 96, 52, 64, 74, 66, 69, 64, 80, 85, 64, 63, 54, 59, 75, 67, 67, 63, 36, 55, 80, 50, 58, 63, 67, 61, 72, 62, 88, 66, 78, 88, 76, 36, 60, 75, 60, 53, 56, 72, 58, 54, 62, 78, 52, 67, 65, 67, 49, 94, 66, 71, 59, 57, 55, 60, 54, 63, 88, 55, 72, 68, 48, 61, 70, 74, 65, 56, 120, 59, 66, 71, 62, 57, 72, 96, 60, 73, 58, 75, 77, 67, 64, 73, 56, 69, 94, 82, 71, 56, 74, 54, 79, 81, 69, 82, 65, 94, 62, 79, 69, 73, 58, 58, 39, 65, 48, 52, 59, 82, 72, 70, 64, 61, 57, 78, 89, 76, 55, 105, 86, 53, 34, 89, 53, 54, 91, 64, 75, 64, 61, 59, 66, 73, 64, 46, 53, 53, 66, 54, 89, 68, 84, 76, 35, 63, 58, 76, 53, 47, 57, 73, 58, 59, 61, 55, 75, 74, 63, 81, 66, 39, 65, 52, 42, 62, 95, 57, 80, 52, 91, 62, 65, 48, 65, 79, 61, 47, 50, 67, 57, 59, 83, 112, 55, 55, 86, 70, 62, 80, 93, 68, 72, 63, 66, 66, 51, 66, 53, 66, 62, 65, 61, 55, 52, 69, 105, 65, 86, 78, 61, 74, 60, 56, 73, 73, 72, 50, 63, 49, 57, 63, 58, 65, 86, 41, 66, 59, 40, 71, 72, 47, 49, 61, 71, 71, 52, 58, 62, 76, 56, 60, 101, 56, 70, 48, 54, 95, 64, 57, 112, 58, 69, 53, 67, 69, 55, 64, 70, 62, 51, 62, 55, 60, 69, 53, 59, 77, 66, 61, 74, 69, 64, 119, 90, 54, 48, 137, 70, 59, 88, 77, 60, 53, 62, 62, 145, 39, 48, 59, 69, 55, 86, 52, 83, 73, 53, 72, 62, 82, 50, 48, 69, 65, 65, 54, 52, 78, 78, 51, 45, 62, 77, 55, 48, 63, 70, 79, 76, 54, 78, 57, 82, 57, 54, 49, 61, 65, 64, 69, 55, 51, 70, 47, 65, 72, 71, 61, 62, 62, 50, 59, 55, 78, 134, 44, 69, 58, 66, 53, 96, 87, 68, 66, 83, 58, 67, 70, 52, 82, 62, 74, 56, 53, 54, 76, 96, 56, 57, 49, 55, 58, 76, 120, 74, 58, 105, 54, 61, 65, 68, 88, 51, 50, 53, 60, 94, 87, 69, 67, 92, 59, 71, 71, 56, 65, 79, 65, 69, 36, 67, 51, 61, 62, 60, 44, 58, 116, 81, 59, 71, 50, 67, 57, 55, 51, 61, 63, 79, 74, 56, 87, 40, 60, 68, 60, 72, 53, 69, 69, 47, 44, 48, 75, 68, 67, 65, 143, 55, 74, 116, 50, 69, 70, 76, 63, 68, 53, 65, 53, 69, 66, 74, 111, 64, 74, 71, 56, 68, 73, 56, 52, 79, 84, 70, 73, 69, 87, 88, 62, 66, 42, 61, 61, 63, 86, 110, 61, 47, 57, 64, 93, 59, 64, 73, 75, 78, 69, 94, 102, 70, 80, 73, 106, 61, 68, 59, 52, 73, 72, 57, 60, 47, 68, 82, 78, 100, 61, 44, 80, 45, 101, 60, 66, 54, 54, 49, 68, 70, 68, 80, 62, 65, 68, 54, 57, 70, 37, 59, 83, 72, 46, 95, 53, 62, 55, 44, 64, 66, 63, 63, 94, 71, 53, 51, 75, 67, 96, 58, 65, 51, 28, 48, 89, 44, 86, 71, 69, 58, 51, 56, 54, 77, 64, 55, 53, 56, 54, 66, 39, 66, 64, 43, 89, 73, 57, 70, 65, 56, 53, 84, 64, 67, 65, 56, 57, 66, 147, 64, 60, 58, 50, 86, 44, 56, 73, 72, 78, 63, 46, 54, 47, 47, 63, 111, 76, 101, 77, 68, 64, 63, 60, 72, 68, 64, 87, 69, 49, 64, 96, 63, 43, 60, 83, 75, 59, 66, 40, 71, 62, 70, 67, 71, 64, 61, 65, 97, 51, 88, 59, 53, 67, 62, 64, 70, 73, 56, 38, 66, 50, 59, 67, 54, 56, 76, 64, 78, 69, 58, 70, 47, 58, 54, 85, 97, 101, 62, 64, 60, 74, 71, 90, 56, 66, 77, 56, 39, 65, 68, 71, 69, 58, 54, 68, 76, 65, 58, 72, 62, 67, 73, 72, 62, 76, 79, 69, 65, 52, 52, 59, 63, 63, 118, 55, 71, 75, 50, 86, 66, 75, 67, 56, 65, 65, 65, 60, 63, 53, 78, 64, 78, 63, 53, 59, 71, 66, 69, 58, 79, 76, 49, 68, 56, 104, 32, 70, 58, 49, 103, 56, 61, 59, 61, 67, 60, 50, 71, 72, 64, 61, 57, 100, 64, 49, 76, 55, 62, 61, 97, 78, 70, 63, 63, 51, 62, 82, 61, 53, 56, 49, 67, 52, 53, 77, 43, 64, 64, 75, 45, 79, 74, 55, 66, 59, 88, 91, 78, 51, 48, 55, 42, 86, 67, 75, 49, 55, 105, 70, 38, 87, 107, 34, 70, 58, 71, 63, 59, 59, 74, 51, 33, 61, 100, 68, 81, 85, 63, 67, 56, 84, 89, 46, 68, 77, 49, 79, 46, 49, 62, 62, 88, 89, 44, 55, 80, 77, 72, 74, 69, 89, 49, 61, 81, 73, 55, 60, 62, 55, 42, 62, 56, 40, 52, 70, 54, 63, 50, 52, 40, 69, 72, 73, 50, 59, 114, 83, 76, 68, 80, 69, 50, 76, 67, 67, 75, 83, 61, 49, 73, 83, 29, 66, 47, 83, 57, 97, 57, 57, 83, 40, 56, 51, 67, 62, 64, 47, 78, 76, 72, 68, 73, 68, 39, 66, 72, 69, 72, 67, 67, 77, 53, 46, 57, 69, 86, 59, 61, 76, 69, 90, 63, 74, 61, 51, 56, 56, 73, 60, 70, 111, 87, 75, 78, 90, 44, 71, 44, 49, 87, 54, 103, 51, 57, 67, 72, 54, 51, 59, 107, 84, 48, 48, 35, 61, 48, 65, 56, 95, 59, 60, 63, 58, 49, 61, 53, 61, 51, 69, 89, 60, 87, 55, 56, 61, 72, 57, 62, 67, 60, 84, 91, 64, 55, 69, 81, 59, 64, 67, 59, 56, 78, 76, 46, 61, 62, 57, 69, 48, 78, 73, 87, 85, 75, 51, 66, 65, 69, 119, 81, 92, 52, 79, 67, 60, 91, 63, 61, 55, 62, 57, 62, 57, 61, 61, 64, 66, 53, 65, 56, 56, 49, 58, 59, 58, 56, 73, 75, 63, 75, 65, 49, 78, 85, 79, 70, 57, 58, 53, 89, 77, 53, 61, 63, 67, 53, 60, 51, 38, 78, 60, 67, 56, 66, 61, 64, 78, 58, 76, 45, 87, 62, 69, 61, 49, 65, 70, 70, 58, 45, 56, 72, 61, 52, 49, 74, 64, 47, 55, 46, 65, 73, 63, 51, 81, 52, 56, 70, 55, 60, 36, 72, 74, 56, 54, 62, 51, 65, 58, 78, 89, 57, 66, 43, 47, 70, 64, 104, 91, 75, 67, 60, 60, 91, 47, 56, 78, 62, 67, 68, 82, 73, 46, 65, 68, 63, 55, 69, 59, 82, 51, 58, 79, 73, 62, 53, 53, 97, 74, 65, 52, 46, 78, 60, 52, 66, 66, 53, 98, 60, 72, 64, 60, 73, 70, 69, 60, 64, 59, 60, 51, 85, 55, 59, 82, 56, 102, 58, 74, 76, 75, 59, 69, 53, 65, 62, 64, 57, 50, 60, 76, 90, 66, 60, 71, 58, 84, 62, 60, 59, 98, 68, 46, 70, 58, 50, 63, 55, 73, 68, 52, 57, 49, 61, 47, 80, 62, 61, 92, 57, 49, 84, 52, 73, 74, 66, 71, 64, 77, 55, 72, 116, 70, 60, 94, 60, 56, 105, 92, 73, 61, 60, 70, 69, 59, 85, 67, 59, 64, 97, 59, 43, 46, 77, 73, 50, 83, 43, 58, 83, 62, 68, 54, 88, 71, 69, 74, 61, 54, 54, 60, 48, 74, 76, 62, 94, 69, 52, 74, 75, 63, 65, 49, 74, 47, 65, 60, 49, 82, 68, 77, 66, 80, 50, 68, 59, 68, 66, 91, 61, 87, 62, 72, 54, 66, 71, 75, 85, 72, 65, 81, 143, 66, 63, 82, 68, 50, 78, 62, 69, 57, 73, 61, 64, 56, 56, 53, 55, 56, 56, 52, 55, 75, 72, 82, 53, 89, 57, 68, 59, 92, 86, 61, 68, 78, 86, 63, 73, 83, 66, 50, 64, 52, 55, 60, 60, 60, 91, 64, 60, 58, 53, 64, 72, 73, 65, 47, 50, 48, 65, 65, 61, 58, 67, 75, 71, 64, 77, 48, 70, 90, 115, 74, 76, 56, 73, 77, 53, 56, 51, 57, 59, 58, 56, 80, 88, 69, 75, 116, 61, 78, 72, 68, 64, 74, 73, 74, 72, 47, 50, 79, 85, 45, 75, 73, 69, 67, 78, 92, 42, 66, 69, 73, 83, 80, 76, 61, 55, 64, 45, 63, 67, 96, 49, 69, 109, 77, 56, 54, 61, 60, 58, 105, 65, 55, 59, 65, 57, 58, 44, 55, 63, 53, 83, 47, 80, 58, 79, 95, 68, 62, 58, 66, 70, 69, 98, 96, 54, 56, 84, 53, 64, 65, 55, 63, 74, 70, 59, 70, 60, 61, 58, 80, 100, 46, 62, 84, 84, 85, 49, 60, 51, 64, 60, 73, 66, 54, 63, 50, 51, 66, 52, 60, 52, 65, 61, 34, 54, 81, 79, 70, 75, 57, 63, 50, 55, 63, 55, 69, 50, 73, 62, 61, 81, 61, 110, 59, 71, 57, 117, 54, 66, 70, 71, 60, 48, 66, 59, 105, 52, 64, 77, 48, 67, 68, 74, 64, 60, 63, 67, 69, 80, 55, 62, 82, 67, 62, 83, 64, 87, 69, 54, 55, 52, 75, 53, 56, 77, 57, 60, 63, 64, 38, 81, 71, 53, 68, 70, 68, 72, 109, 70, 56, 66, 66, 61, 57, 54, 66, 69, 70, 75, 69, 89, 54, 85, 81, 61, 77, 78, 83, 68, 77, 83, 74, 96, 50, 57, 62, 67, 73, 60, 67, 62, 61, 56, 76, 84, 60, 76, 64, 76, 76, 67, 51, 59, 72, 82, 50, 79, 63, 68, 60, 54, 58, 64, 63, 52, 64, 61, 63, 62, 73, 64, 82, 50, 62, 74, 43, 70, 63, 57, 59, 44, 63, 53, 78, 53, 68, 62, 51, 55, 78, 49, 63, 69, 66, 86, 71, 95, 80, 72, 66, 58, 63, 53, 56, 75, 106, 67, 63, 85, 42, 63, 65, 71, 47, 104, 60, 74, 75, 51, 61, 58, 42, 73, 62, 47, 57, 62, 57, 67, 50, 72, 70, 83, 64, 94, 70, 76, 71, 69, 56, 75, 59, 53, 70, 64, 56, 46, 77, 57, 91, 91, 50, 57, 64, 78, 59, 77, 58, 67, 71, 71, 59, 50, 69, 56, 53, 48, 87, 57, 64, 73, 90, 54, 64, 57, 74, 55, 53, 60, 55, 63, 74, 49, 52, 73, 53, 56, 73, 80, 72, 56, 47, 61, 66, 59, 66, 62, 65, 61, 59, 48, 61, 62, 84, 48, 66, 66, 63, 62, 64, 69, 58, 66, 60, 74, 78, 75, 53, 64, 43, 85, 52, 64, 62, 67, 83, 74, 68, 76, 66, 70, 65, 66, 49, 79, 56, 66, 47, 57, 68, 65, 61, 74, 86, 69, 62, 87, 51, 93, 61, 65, 74, 97, 73, 63, 50, 63, 61, 50, 61, 62, 61, 52, 64, 53, 60, 91, 59, 42, 111, 61, 63, 76, 81, 62, 48, 70, 68, 98, 84, 50, 76, 40, 65, 50, 65, 66, 58, 93, 68, 54, 75, 67, 58, 66, 77, 66, 73, 49, 40, 52, 61, 65, 63, 54, 52, 54, 58, 65, 57, 54, 56, 52, 72, 32, 53, 82, 78, 58, 90, 54, 64, 59, 50, 60, 52, 83, 69, 45, 91, 72, 57, 58, 65, 67, 55, 62, 82, 82, 65, 74, 105, 76, 66, 60, 66, 61, 83, 63, 59, 58, 43, 41, 77, 53, 66, 67, 65, 62, 59, 58, 72, 81, 67, 80, 72, 65, 41, 77, 67, 77, 70, 57, 68, 48, 77, 83, 91, 55, 51, 54, 63, 58, 73, 73, 55, 59, 55, 76, 75, 52, 48, 95, 73, 93, 81, 74, 79, 66, 82, 59, 78, 59, 63, 72, 48, 60, 58, 73, 57, 73, 54, 56, 77, 74, 42, 59, 64, 54, 128, 51, 64, 60, 64, 61, 34, 43, 58, 63, 66, 50, 46, 57, 55, 61, 79, 66, 53, 77, 67, 62, 57, 67, 44, 52, 66, 66, 48, 63, 68, 49, 69, 77, 52, 53, 65, 63, 66, 59, 86, 70, 83, 70, 61, 60, 49, 108, 60, 68, 73, 74, 68, 47, 51, 42, 77, 68, 68, 65, 60, 51, 50, 72, 63, 93, 42, 96, 98, 68, 58, 37, 47, 62, 82, 115, 49, 83, 49, 67, 61, 48, 66, 51, 50, 73, 108, 58, 64, 54, 51, 68, 57, 71, 58, 62, 60, 60, 70, 110, 58, 80, 66, 46, 49, 76, 57, 59, 65, 62, 61, 56, 77, 62, 71, 84, 60, 64, 72, 66, 65, 79, 55, 65, 69, 75, 70, 56, 63, 77, 65, 68, 47, 69, 72, 59, 58, 66, 44, 60, 59, 62, 65, 83, 63, 46, 67, 86, 78, 89, 90, 65, 44, 70, 64, 74, 40, 66, 83, 56, 51, 69, 79, 66, 86, 57, 68, 65, 55, 74, 58, 87, 46, 76, 62, 56, 73, 75, 51, 80, 58, 60, 64, 68, 64, 53, 74, 69, 48, 77, 104, 67, 71, 51, 56, 64, 47, 75, 63, 88, 71, 59, 67, 76, 61, 54, 70, 60, 87, 59, 64, 59, 52, 69, 60, 54, 86, 57, 51, 53, 62, 80, 73, 92, 55, 60, 75, 54, 79, 80, 54, 73, 94, 54, 40, 65, 81, 58, 55, 55, 50, 51, 58, 60, 69, 45, 65, 80, 76, 57, 81, 60, 65, 74, 41, 74, 80, 37, 80, 47, 68, 76, 55, 117, 76, 48, 74, 57, 60, 83, 107, 74, 50, 55, 60, 61, 80, 59, 52, 57, 54, 98, 61, 61, 44, 53, 71, 58, 44, 90, 60, 83, 79, 54, 90, 72, 49, 76, 74, 67, 50, 45, 49, 62, 59, 80, 60, 93, 53, 50, 73, 89, 80, 86, 66, 65, 63, 67, 56, 58, 63, 77, 49, 72, 105, 54, 47, 62, 60, 58, 95, 69, 50, 65, 66, 50, 58, 68, 67, 77, 37, 57, 66, 143, 50, 82, 59, 63, 68, 78, 62, 74, 49, 56, 65, 63, 52, 61, 50, 54, 55, 57, 54, 64, 51, 58, 54, 41, 74, 62, 67, 64, 77, 56, 66, 81, 76, 86, 69, 72, 70, 86, 59, 68, 55, 69, 71, 53, 57, 82, 48, 61, 66, 74, 39, 59, 66, 63, 57, 51, 58, 69, 65, 51, 76, 64, 90, 72, 84, 83, 57, 90, 69, 103, 42, 71, 58, 61, 54, 68, 70, 53, 66, 58, 71, 64, 63, 86, 53, 63, 126, 64, 64, 73, 68, 62, 70, 48, 56, 61, 49, 67, 57, 76, 64, 55, 71, 50, 72, 70, 85, 65, 71, 71, 86, 83, 68, 60, 60, 42, 66, 54, 56, 69, 104, 63, 59, 105, 55, 52, 58, 67, 57, 71, 91, 47, 67, 65, 69, 51, 36, 58, 55, 61, 60, 84, 60, 80, 63, 48, 56, 56, 56, 57, 66, 76, 53, 107, 79, 38, 50, 89, 54, 62, 42, 64, 61, 55, 70, 59, 84, 48, 55, 62, 60, 99, 63, 54, 85, 76, 89, 58, 63, 48, 102, 96, 58, 70, 66, 57, 67, 64, 79, 62, 73, 65, 61, 47, 79, 58, 65, 74, 73, 94, 60, 37, 62, 71, 63, 60, 72, 58, 82, 76, 48, 74, 69, 122, 77, 79, 71, 117, 62, 84, 62, 80, 65, 58, 61, 66, 82, 66, 62, 51, 59, 52, 63, 84, 57, 58, 41, 59, 76, 62, 45, 75, 78, 81, 66, 62, 62, 66, 66, 60, 55, 49, 72, 68, 45, 52, 56, 51, 66, 52, 45, 59, 68, 52, 71, 53, 72, 66, 116, 56, 56, 63, 71, 63, 65, 54, 60, 69, 57, 69, 56, 77, 48, 70, 54, 71, 68, 60, 58, 45, 47, 45, 60, 48, 56, 52, 65, 86, 52, 73, 76, 78, 58, 45, 73, 78, 63, 60, 59, 73, 83, 63, 66, 72, 59, 79, 68, 74, 66, 76, 64, 53, 61, 58, 42, 77, 51, 56, 81, 54, 71, 89, 93, 63, 71, 64, 74, 70, 58, 68, 68, 50, 50, 53, 90, 44, 68, 68, 61, 52, 57, 51, 57, 68, 73, 78, 61, 83, 54, 62, 59, 63, 57, 52, 71, 102, 102, 62, 53, 63, 71, 57, 67, 68, 64, 112, 69, 75, 65, 89, 68, 52, 44, 70, 46, 49, 61, 65, 64, 83, 54, 61, 60, 70, 58, 59, 55, 57, 80, 60, 63, 64, 76, 68, 65, 79, 46, 72, 76, 85, 83, 117, 61, 73, 83, 80, 61, 55, 81, 54, 63, 89, 58, 66, 60, 66, 51, 80, 76, 79, 50, 55, 59, 65, 84, 64, 56, 74, 58, 57, 72, 69, 62, 76, 42, 77, 65, 56, 74, 63, 80, 64, 70, 57, 51, 69, 61, 66, 58, 59, 67, 51, 51, 71, 79, 62, 72, 46, 80, 54, 71, 68, 56, 46, 59, 69, 67, 65, 69, 67, 69, 62, 56, 85, 54, 65, 75, 56, 78, 53, 70, 56, 67, 62, 45, 70, 85, 61, 57, 65, 57, 52, 62, 68, 91, 76, 74, 67, 61, 94, 64, 53, 69, 95, 68, 49, 61, 42, 89, 54, 75, 93, 77, 65, 67, 62, 80, 103, 74, 74, 106, 61, 61, 56, 44, 52, 55, 51, 66, 96, 50, 64, 59, 67, 56, 50, 64, 71, 71, 80, 58, 96, 87, 69, 78, 69, 50, 51, 70, 49, 80, 61, 49, 77, 64, 73, 54, 66, 72, 52, 69, 67, 65, 68, 64, 62, 60, 92, 73, 63, 85, 63, 66, 33, 63, 67, 57, 55, 67, 68, 82, 57, 57, 60, 58, 49, 57, 75, 66, 51, 60, 93, 78, 59, 62, 85, 59, 81, 48, 62, 58, 68, 69, 83, 60, 46, 57, 56, 71, 59, 55, 68, 75, 73, 42, 57, 82, 74, 52, 83, 66, 78, 51, 53, 89, 35, 74, 53, 79, 50, 84, 65, 82, 58, 55, 52, 65, 55, 39, 83, 87, 79, 63, 66, 51, 84, 59, 74, 50, 60, 77, 55, 85, 57, 75, 56, 65, 59, 57, 40, 75, 55, 66, 59, 56, 46, 52, 62, 58, 66, 127, 60, 79, 67, 67, 67, 56, 74, 61, 49, 61, 56, 71, 61, 54, 61, 77, 52, 68, 41, 64, 55, 59, 46, 58, 69, 56, 52, 95, 57, 57, 60, 72, 52, 58, 61, 49, 79, 66, 43, 89, 62, 74, 53, 78, 61, 70, 96, 66, 54, 56, 69, 83, 81, 72, 71, 75, 60, 60, 64, 41, 53, 61, 83, 46, 76, 55, 110, 82, 79, 60, 50, 67, 55, 70, 120, 56, 89, 65, 74, 53, 57, 61, 71, 60, 50, 92, 57, 61, 64, 84, 57, 62, 61, 60, 84, 53, 53, 57, 99, 69, 91, 58, 45, 53, 81, 67, 68, 69, 59, 73, 63, 82, 55, 43, 75, 73, 62, 65, 70, 71, 67, 84, 58, 62, 53, 68, 52, 67, 58, 35, 67, 55, 76, 52, 62, 56, 82, 57, 50, 55, 60, 63, 59, 72, 72, 51, 71, 52, 70, 62, 62, 94, 58, 50, 67, 65, 63, 62, 69, 49, 53, 102, 47, 54, 70, 55, 97, 58, 65, 57, 63, 69, 76, 95, 76, 56, 57, 60, 70, 73, 49, 60, 47, 59, 73, 59, 55, 61, 52, 101, 73, 79, 92, 62, 55, 51, 89, 62, 60, 57, 52, 78, 99, 50, 60, 80, 70, 56, 51, 78, 68, 61, 62, 68, 59, 68, 49, 99, 81, 45, 51, 56, 76, 55, 53, 71, 61, 63, 51, 52, 52, 79, 62, 51, 72, 57, 64, 69, 65, 55, 84, 51, 36, 75, 65, 57, 83, 55, 66, 89, 66, 73, 74, 67, 51, 53, 56, 70, 68, 75, 60, 70, 68, 53, 103, 56, 63, 71, 103, 40, 68, 71, 62, 60, 56, 66, 64, 50, 57, 73, 109, 66, 73, 70, 71, 67, 76, 63, 59, 45, 89, 89, 69, 47, 64, 75, 78, 80, 79, 54, 87, 55, 71, 46, 69, 77, 61, 54, 69, 74, 65, 81, 70, 67, 90, 57, 48, 62, 54, 66, 57, 56, 57, 76, 47, 31, 66, 64, 72, 57, 65, 56, 93, 71, 68, 60, 66, 67, 68, 44, 77, 70, 110, 81, 67, 63, 62, 59, 85, 77, 49, 70, 49, 36, 63, 48, 58, 61, 66, 59, 55, 61, 63, 75, 58, 65, 76, 52, 52, 58, 49, 77, 74, 60, 118, 65, 49, 71, 58, 79, 71, 67, 87, 68, 50, 52, 56, 50, 80, 63, 67, 70, 45, 58, 55, 73, 62, 56, 53, 67, 56, 82, 60, 82, 69, 48, 72, 74, 76, 76, 50, 56, 94, 79, 87, 45, 72, 46, 77, 76, 63, 78, 72, 75, 49, 60, 81, 71, 62, 97, 67, 54, 67, 55, 58, 68, 91, 77, 62, 71, 66, 47, 40, 51, 41, 69, 64, 76, 68, 68, 66, 57, 76, 73, 78, 79, 59, 86, 59, 53, 76, 76, 93, 104, 64, 78, 60, 57, 57, 47, 77, 41, 56, 64, 62, 66, 74, 79, 63, 79, 58, 62, 75, 61, 88, 84, 73, 54, 70, 71, 49, 72, 100, 53, 61, 84, 102, 62, 51, 77, 66, 40, 62, 54, 65, 58, 60, 69, 65, 64, 65, 81, 67, 55, 90, 59, 68, 66, 82, 71, 60, 61, 65, 71, 63, 58, 56, 49, 59, 64, 55, 66, 56, 79, 94, 83, 49, 60, 57, 56, 58, 60, 47, 59, 64, 59, 65, 84, 56, 71, 57, 70, 56, 70, 55, 70, 62, 59, 62, 55, 52, 52, 61, 64, 62, 54, 59, 70, 49, 84, 64, 62, 44, 52, 67, 61, 85, 49, 96, 69, 81, 63, 47, 73, 59, 76, 73, 78, 83, 67, 65, 64, 65, 51, 61, 61, 65, 51, 59, 75, 56, 63, 65, 101, 69, 85, 73, 79, 78, 67, 47, 160, 71, 43, 67, 60, 67, 44, 57, 59, 71, 47, 92, 65, 52, 61, 59, 104, 75, 62, 50, 79, 63, 68, 83, 55, 63, 57, 66, 74, 53, 67, 64, 57, 84, 63, 71, 79, 71, 71, 72, 59, 78, 47, 49, 41, 74, 78, 62, 95, 86, 49, 79, 42, 47, 96, 78, 98, 84, 73, 63, 52, 55, 48, 79, 47, 67, 58, 52, 60, 79, 64, 60, 52, 102, 61, 72, 70, 63, 60, 49, 56, 53, 74, 50, 73, 70, 71, 69, 65, 111, 73, 56, 75, 69, 68, 64, 60, 44, 79, 86, 87, 65, 59, 60, 64, 75, 56, 70, 82, 75, 45, 94, 68, 66, 48, 62, 66, 63, 76, 71, 69, 63, 52, 60, 54, 72, 50, 49, 71, 62, 57, 49, 76, 79, 53, 61, 67, 91, 72, 58, 112, 51, 54, 69, 58, 76, 63, 100, 45, 54, 48, 60, 70, 74, 78, 54, 64, 66, 64, 67, 77, 75, 68, 64, 48, 72, 61, 77, 82, 53, 49, 51, 49, 97, 75, 63, 69, 77, 65, 48, 85, 53, 56, 62, 51, 50, 59, 76, 46, 61, 60, 55, 55, 47, 68, 67, 49, 45, 42, 68, 60, 65, 72, 72, 36, 59, 56, 65, 66, 78, 44, 64, 56, 60, 62, 70, 81, 59, 64, 72, 59, 58, 75, 70, 62, 56, 66, 66, 48, 56, 68, 67, 52, 46, 81, 56, 110, 70, 58, 92, 73, 49, 60, 56, 71, 65, 64, 61, 61, 72, 61, 76, 57, 48, 72, 58, 81, 87, 51, 67, 80, 51, 59, 69, 56, 61, 63, 58, 71, 69, 80, 78, 36, 53, 66, 60, 64, 68, 63, 60, 71, 66, 71, 51, 60, 56, 75, 84, 73, 61, 58, 66, 71, 90, 70, 53, 77, 64, 54, 69, 67, 77, 67, 68, 65, 63, 65, 70, 66, 74, 70, 69, 44, 53, 65, 48, 77, 66, 143, 57, 38, 64, 89, 70, 54, 55, 67, 60, 49, 60, 67, 41, 55, 91, 59, 64, 62, 47, 49, 47, 89, 67, 67, 69, 49, 85, 57, 53, 63, 42, 54, 52, 56, 70, 88, 64, 85, 50, 68, 78, 69, 63, 53, 68, 68, 61, 83, 48, 94, 54, 72, 71, 63, 75, 56, 77, 51, 60, 93, 67, 78, 64, 65, 69, 61, 61, 76, 63, 51, 70, 58, 66, 91, 77, 42, 62, 72, 85, 57, 56, 63, 68, 60, 66, 69, 61, 51, 62, 81, 55, 55, 61, 91, 68, 69, 64, 61, 63, 64, 58, 72, 59, 51, 60, 60, 92, 68, 66, 44, 56, 56, 72, 53, 57, 78, 63, 70, 48, 52, 59, 59, 54, 82, 50, 61, 47, 80, 46, 61, 78, 55, 54, 100, 54, 65, 54, 67, 74, 79, 107, 51, 66, 76, 62, 80, 70, 46, 66, 94, 63, 73, 57, 65, 52, 75, 77, 84, 59, 51, 31, 52, 90, 66, 74, 70, 51, 37, 122, 57, 80, 64, 85, 54, 80, 64, 66, 56, 56, 91, 81, 61, 71, 99, 58, 67, 79, 69, 69, 67, 83, 67, 88, 68, 59, 52, 70, 81, 73, 55, 63, 80, 77, 60, 56, 65, 67, 68, 90, 49, 80, 61, 57, 67, 57, 60, 77, 50, 87, 69, 75, 59, 68, 64, 57, 88, 62, 80, 80, 63, 68, 54, 96, 39, 64, 69, 59, 65, 51, 118, 67, 53, 111, 72, 53, 50, 60, 60, 68, 64, 59, 67, 56, 59, 46, 75, 70, 61, 55, 56, 55, 49, 75, 52, 87, 42, 72, 83, 47, 69, 95, 69, 66, 61, 87, 57, 64, 54, 30, 83, 80, 47, 61, 102, 63, 63, 52, 59, 35, 67, 59, 59, 56, 45, 63, 46, 61, 73, 61, 85, 64, 73, 70, 62, 62, 57, 48, 73, 64, 47, 45, 78, 58, 52, 73, 40, 78, 47, 84, 68, 48, 72, 61, 60, 60, 68, 96, 49, 64, 83, 48, 63, 56, 68, 67, 67, 73, 36, 75, 56, 95, 70, 62, 61, 90, 52, 65, 44, 98, 61, 50, 64, 52, 53, 70, 56, 112, 74, 66, 80, 76, 63, 76, 109, 65, 60, 76, 88, 60, 63, 67, 44, 47, 42, 130, 51, 65, 56, 79, 79, 86, 50, 60, 59, 59, 88, 58, 98, 68, 52, 69, 85, 67, 48, 56, 59, 93, 75, 78, 63, 37, 63, 49, 77, 65, 87, 62, 65, 48, 94, 90, 68, 65, 48, 86, 58, 67, 107, 51, 92, 74, 57, 65, 61, 61, 68, 73, 60, 80, 69, 69, 67, 69, 56, 59, 74, 111, 61, 69, 63, 83, 53, 66, 57, 67, 52, 62, 72, 70, 62, 69, 50, 63, 59, 56, 72, 43, 86, 75, 75, 51, 59, 52, 67, 73, 62, 69, 61, 60, 82, 66, 67, 69, 72, 56, 57, 97, 63, 61, 73, 72, 72, 74, 55, 71, 87, 65, 56, 51, 63, 80, 57, 84, 63, 94, 49, 54, 107, 61, 54, 75, 74, 84, 55, 36, 72, 56, 74, 75, 39, 81, 52, 82, 64, 42, 73, 54, 67, 63, 59, 77, 73, 63, 148, 53, 71, 69, 71, 56, 65, 47, 63, 78, 67, 72, 70, 56, 55, 44, 57, 87, 67, 90, 77, 51, 125, 80, 61, 56, 61, 75, 77, 60, 47, 63, 55, 57, 69, 79, 62, 109, 74, 46, 56, 64, 67, 63, 60, 54, 35, 74, 106, 53, 60, 64, 48, 76, 62, 54, 59, 90, 61, 56, 88, 44, 51, 90, 68, 61, 55, 98, 94, 67, 59, 75, 66, 55, 71, 92, 57, 72, 59, 74, 45, 53, 52, 61, 55, 44, 48, 62, 63, 75, 93, 63, 79, 74, 80, 62, 41, 84, 69, 70, 56, 72, 71, 58, 63, 37, 83, 78, 73, 58, 85, 53, 70, 78, 79, 57, 70, 138, 72, 71, 98, 60, 64, 66, 60, 62, 39, 65, 43, 101, 84, 101, 64, 80, 71, 54, 48, 71, 61, 70, 46, 62, 75, 72, 72, 70, 72, 70, 65, 50, 86, 73, 75, 49, 44, 82, 60, 98, 96, 65, 68, 47, 87, 76, 50, 49, 72, 52, 78, 60, 51, 72, 63, 51, 54, 65, 61, 80, 68, 52, 59, 72, 135, 61, 72, 55, 52, 54, 51, 82, 67, 90, 58, 68, 48, 75, 63, 57, 43, 48, 74, 64, 50, 89, 56, 61, 64, 47, 63, 48, 72, 63, 62, 64, 69, 66, 62, 65, 100, 64, 65, 85, 53, 90, 77, 47, 50, 56, 72, 65, 48, 68, 101, 107, 56, 100, 61, 54, 70, 78, 65, 76, 96, 60, 42, 46, 31, 57, 71, 50, 63, 66, 71, 61, 96, 81, 47, 47, 45, 70, 71, 73, 55, 61, 52, 63, 54, 88, 58, 67, 78, 50, 97, 46, 62, 47, 64, 66, 71, 70, 86, 39, 53, 64, 63, 72, 67, 47, 73, 68, 58, 65, 45, 64, 65, 67, 74, 62, 82, 73, 59, 57, 58, 93, 81, 51, 60, 56, 72, 68, 83, 80, 64, 81, 72, 68, 61, 65, 82, 47, 60, 72, 59, 78, 57, 101, 74, 68, 57, 89, 60, 62, 80, 56, 37, 95, 72, 49, 60, 57, 61, 79, 79, 54, 44, 61, 73, 100, 67, 68, 43, 80, 102, 48, 87, 44, 68, 60, 63, 64, 60, 54, 66, 54, 75, 66, 93, 74, 52, 55, 49, 73, 62, 57, 57, 76, 79, 75, 57, 69, 57, 69, 73, 76, 53, 84, 64, 65, 66, 62, 65, 49, 54, 62, 51, 60, 69, 38, 84, 65, 49, 93, 83, 57, 69, 57, 64, 61, 51, 78, 75, 76, 67, 48, 75, 69, 76, 54, 82, 71, 81, 44, 58, 84, 67, 65, 85, 98, 70, 82, 56, 86, 58, 57, 52, 64, 60, 100, 81, 52, 69, 78, 87, 54, 94, 56, 67, 58, 56, 37, 55, 58, 56, 85, 58, 57, 102, 68, 66, 56, 73, 54, 51, 62, 57, 95, 65, 72, 69, 59, 64, 84, 74, 50, 65, 62, 65, 58, 96, 52, 77, 63, 68, 66, 47, 70, 61, 89, 56, 73, 59, 50, 74, 54, 65, 69, 62, 37, 61, 67, 49, 68, 87, 67, 81, 53, 54, 39, 74, 45, 71, 57, 60, 81, 55, 27, 67, 45, 65, 67, 55, 86, 62, 75, 51, 54, 91, 63, 68, 42, 76, 62, 67, 61, 46, 66, 67, 68, 50, 64, 71, 56, 38, 86, 70, 63, 64, 58, 63, 59, 35, 51, 75, 74, 63, 57, 72, 58, 38, 81, 58, 50, 90, 80, 64, 62, 51, 58, 51, 84, 77, 68, 64, 56, 60, 68, 104, 98, 78, 85, 72, 60, 84, 58, 78, 56, 83, 49, 60, 68, 56, 74, 48, 85, 81, 83, 62, 57, 78, 71, 69, 56, 81, 67, 74, 52, 73, 59, 77, 64, 56, 57, 60, 85, 71, 62, 66, 47, 73, 43, 69, 61, 64, 102, 65, 80, 53, 70, 57, 64, 75, 43, 77, 70, 48, 57, 99, 72, 67, 65, 71, 56, 51, 55, 48, 78, 52, 73, 72, 87, 55, 67, 62, 60, 75, 60, 69, 80, 75, 80, 55, 58, 55, 92, 53, 57, 80, 84, 55, 88, 72, 88, 48, 74, 51, 87, 62, 56, 48, 75, 71, 53, 39, 73, 54, 51, 65, 75, 65, 51, 68, 52, 79, 55, 55, 49, 71, 74, 53, 57, 62, 55, 67, 70, 56, 69, 60, 69, 59, 57, 58, 68, 50, 65, 44, 60, 108, 56, 78, 75, 86, 60, 69, 71, 45, 71, 60, 62, 60, 65, 58, 102, 64, 64, 61, 59, 61, 111, 71, 95, 69, 49, 56, 84, 65, 52, 72, 42, 69, 63, 60, 75, 47, 44, 55, 76, 93, 65, 60, 62, 62, 66, 46, 56, 65, 78, 63, 52, 93, 72, 66, 94, 57, 57, 68, 50, 38, 62, 57, 64, 66, 57, 56, 73, 66, 69, 49, 82, 64, 86, 56, 47, 69, 87, 73, 86, 86, 93, 70, 61, 86, 63, 45, 73, 49, 56, 73, 70, 55, 70, 85, 63, 113, 77, 73, 64, 64, 69, 61, 30, 75, 64, 62, 45, 59, 61, 110, 60, 48, 70, 75, 54, 70, 75, 66, 75, 65, 69, 72, 73, 70, 86, 59, 86, 83, 57, 59, 95, 87, 58, 56, 55, 73, 63, 65, 58, 65, 58, 73, 75, 54, 58, 59, 91, 42, 71, 60, 46, 60, 52, 67, 82, 54, 59, 69, 87, 58, 62, 60, 58, 55, 66, 72, 49, 69, 100, 71, 55, 56, 78, 59, 115, 49, 70, 77, 121, 53, 82, 62, 76, 65, 75, 47, 66, 91, 48, 66, 59, 57, 59, 76, 56, 74, 38, 68, 66, 62, 95, 74, 86, 53, 62, 56, 67, 51, 79, 73, 63, 49, 65, 44, 66, 61, 87, 63, 63, 53, 117, 51, 51, 82, 66, 62, 100, 68, 67, 64, 69, 55, 57, 48, 56, 96, 59, 65, 69, 55, 62, 66, 62, 56, 93, 64, 95, 65, 59, 72, 67, 77, 74, 61, 52, 65, 69, 85, 66, 59, 74, 58, 54, 58, 44, 39, 85, 37, 61, 64, 56, 62, 49, 69, 76, 59, 57, 71, 42, 65, 49, 58, 65, 58, 74, 59, 76, 67, 63, 69, 59, 64, 60, 52, 64, 53, 71, 62, 83, 83, 50, 74, 70, 56, 72, 61, 59, 107, 61, 52, 65, 64, 78, 64, 69, 54, 54, 64, 52, 53, 68, 63, 57, 65, 67, 60, 63, 66, 55, 53, 73, 66, 99, 52, 50, 82, 39, 62, 69, 84, 44, 106, 62, 68, 110, 66, 61, 67, 65, 55, 87, 55, 58, 67, 62, 71, 75, 49, 55, 51, 66, 56, 64, 58, 78, 64, 82, 68, 65, 58, 82, 69, 54, 74, 87, 74, 55, 63, 83, 57, 56, 57, 64, 71, 60, 53, 54, 50, 68, 47, 50, 63, 83, 66, 62, 61, 69, 52, 70, 62, 60, 61, 59, 83, 76, 66, 55, 66, 46, 53, 66, 52, 69, 60, 58, 48, 61, 59, 67, 60, 64, 60, 70, 62, 78, 66, 50, 61, 81, 74, 62, 89, 71, 60, 51, 69, 83, 72, 90, 96, 87, 60, 64, 61, 70, 68, 36, 83, 65, 57, 62, 50, 46, 47, 85, 83, 70, 50, 90, 81, 54, 84, 68, 94, 60, 60, 90, 63, 71, 54, 64, 58, 60, 71, 65, 52, 58, 62, 54, 61, 65, 70, 66, 66, 70, 57, 56, 75, 72, 177, 72, 54, 59, 61, 42, 62, 61, 76, 57, 61, 75, 64, 70, 75, 72, 124, 62, 55, 44, 64, 56, 56, 92, 58, 65, 88, 77, 61, 55, 70, 61, 64, 84, 58, 63, 88, 71, 48, 62, 76, 66, 56, 56, 66, 53, 71, 57, 58, 78, 75, 69, 71, 112, 72, 88, 60, 60, 68, 85, 85, 62, 74, 78, 66, 53, 68, 47, 61, 56, 60, 49, 74, 69, 60, 74, 60, 58, 62, 60, 61, 60, 65, 63, 36, 53, 67, 48, 49, 96, 80, 60, 67, 63, 76, 78, 73, 60, 122, 64, 70, 59, 68, 42, 53, 43, 71, 57, 83, 47, 35, 69, 70, 65, 80, 64, 68, 53, 70, 57, 73, 52, 51, 63, 51, 70, 80, 64, 70, 79, 56, 55, 77, 65, 75, 62, 90, 75, 54, 69, 79, 59, 80, 104, 79, 70, 48, 81, 66, 70, 68, 65, 76, 81, 76, 65, 62, 45, 60, 74, 85, 76, 69, 74, 62, 71, 63, 62, 78, 55, 60, 60, 56, 70, 49, 64, 60, 93, 77, 52, 62, 58, 65, 63, 88, 61, 59, 62, 64, 46, 54, 82, 56, 48, 70, 53, 65, 60, 64, 66, 56, 55, 81, 73, 55, 56, 59, 85, 76, 89, 70, 60, 65, 70, 41, 61, 80, 66, 61, 74, 72, 76, 43, 73, 64, 58, 68, 68, 61, 53, 56, 88, 52, 56, 78, 77, 109, 46, 74, 80, 59, 59, 54, 77, 62, 63, 51, 61, 75, 59, 62, 87, 77, 52, 59, 63, 73, 84, 65, 44, 51, 58, 56, 47, 62, 71, 47, 62, 64, 66, 128, 90, 74, 49, 66, 63, 63, 64, 59, 84, 50, 82, 65, 87, 76, 67, 56, 60, 54, 50, 60, 92, 57, 44, 75, 81, 49, 85, 60, 55, 53, 75, 54, 62, 57, 61, 43, 54, 68, 60, 69, 66, 65, 57, 39, 53, 75, 65, 122, 53, 46, 78, 54, 54, 60, 62, 57, 58, 70, 45, 45, 52, 52, 85, 102, 77, 67, 63, 59, 59, 85, 73, 59, 61, 51, 65, 57, 55, 63, 47, 58, 56, 66, 70, 75, 53, 71, 58, 55, 53, 51, 88, 56, 63, 63, 64, 83, 56, 76, 52, 95, 55, 63, 56, 59, 39, 64, 73, 87, 67, 81, 79, 97, 80, 63, 62, 74, 59, 52, 54, 70, 61, 51, 57, 46, 74, 48, 64, 75, 61, 58, 51, 75, 60, 50, 40, 62, 61, 61, 56, 57, 61, 118, 56, 68, 67, 79, 53, 59, 75, 71, 56, 60, 53, 59, 61, 52, 77, 42, 44, 58, 50, 45, 66, 54, 56, 46, 51, 77, 76, 54, 61, 77, 73, 50, 73, 67, 62, 67, 66, 42, 53, 122, 60, 74, 63, 66, 71, 66, 92, 65, 62, 63, 47, 70, 61, 46, 46, 68, 79, 47, 57, 60, 91, 68, 67, 70, 84, 68, 64, 56, 80, 55, 51, 58, 80, 56, 131, 65, 81, 57, 70, 62, 78, 61, 69, 45, 84, 67, 82, 65, 65, 65, 49, 49, 77, 49, 78, 62, 48, 59, 92, 63, 62, 54, 61, 50, 48, 61, 52, 109, 75, 49, 77, 71, 60, 62, 123, 55, 77, 55, 81, 60, 63, 46, 73, 100, 69, 81, 79, 75, 67, 71, 61, 82, 54, 67, 70, 53, 63, 72, 46, 66, 60, 66, 79, 103, 67, 44, 82, 46, 110, 48, 67, 78, 66, 50, 83, 46, 76, 53, 68, 43, 73, 88, 60, 55, 53, 66, 59, 67, 75, 55, 60, 65, 66, 64, 57, 65, 68, 54, 66, 56, 49, 73, 61, 51, 61, 46, 72, 70, 58, 85, 66, 65, 50, 60, 65, 56, 82, 64, 87, 65, 52, 72, 65, 60, 46, 84, 73, 78, 56, 71, 72, 59, 68, 42, 82, 63, 54, 50, 70, 66, 69, 44, 53, 49, 80, 76, 62, 56, 62, 70, 57, 78, 66, 66, 63, 52, 57, 68, 60, 69, 62, 63, 74, 75, 76, 65, 81, 81, 55, 63, 63, 73, 80, 51, 55, 89, 54, 85, 50, 50, 76, 76, 84, 54, 63, 77, 52, 60, 40, 87, 46, 64, 50, 53, 60, 58, 60, 103, 65, 45, 60, 58, 62, 82, 85, 86, 65, 49, 52, 72, 88, 63, 48, 59, 52, 58, 62, 70, 91, 55, 63, 56, 90, 61, 72, 72, 62, 63, 60, 60, 68, 54, 68, 57, 66, 51, 98, 82, 81, 63, 79, 47, 61, 55, 57, 73, 50, 50, 58, 56, 81, 75, 80, 58, 61, 54, 76, 95, 62, 53, 58, 60, 120, 76, 56, 53, 63, 67, 82, 63, 61, 51, 66, 55, 57, 106, 81, 48, 59, 56, 74, 74, 53, 56, 80, 66, 62, 57, 67, 64, 88, 52, 51, 57, 74, 81, 80, 57, 63, 88, 58, 61, 60, 73, 70, 59, 61, 64, 51, 68, 87, 97, 42, 69, 78, 76, 105, 65, 57, 80, 53, 54, 52, 72, 61, 75, 65, 72, 64, 65, 52, 67, 76, 53, 58, 85, 67, 50, 66, 69, 59, 62, 59, 67, 63, 61, 92, 58, 50, 93, 56, 66, 64, 57, 60, 75, 55, 52, 55, 57, 53, 53, 98, 69, 38, 37, 69, 74, 76, 76, 54, 58, 55, 114, 94, 73, 49, 54, 73, 51, 60, 61, 89, 87, 60, 57, 117, 83, 60, 61, 65, 64, 61, 45, 68, 78, 59, 72, 77, 60, 83, 48, 66, 48, 72, 58, 50, 62, 61, 81, 58, 45, 63, 60, 51, 70, 114, 113, 55, 61, 82, 67, 76, 54, 68, 53, 59, 59, 79, 47, 42, 61, 61, 64, 68, 62, 65, 67, 59, 56, 70, 85, 60, 61, 43, 67, 65, 60, 70, 49, 55, 73, 71, 72, 58, 49, 65, 68, 63, 60, 87, 78, 58, 51, 73, 65, 71, 65, 65, 67, 44, 61, 50, 63, 62, 78, 85, 71, 57, 60, 138, 72, 73, 47, 62, 54, 83, 52, 67, 81, 57, 54, 57, 69, 44, 59, 85, 93, 82, 26, 54, 57, 69, 56, 51, 55, 78, 55, 83, 83, 69, 53, 73, 79, 55, 71, 62, 108, 55, 60, 56, 57, 65, 55, 67, 94, 59, 61, 64, 48, 60, 138, 57, 58, 66, 47, 72, 74, 69, 71, 43, 66, 46, 67, 44, 62, 77, 71, 87, 63, 60, 71, 87, 57, 87, 90, 66, 95, 57, 53, 56, 61, 62, 88, 52, 64, 95, 70, 78, 66, 73, 73, 77, 65, 62, 61, 57, 78, 85, 57, 78, 142, 80, 80, 105, 72, 63, 81, 68, 62, 83, 69, 62, 72, 59, 77, 63, 55, 102, 73, 44, 51, 66, 75, 68, 87, 65, 53, 60, 89, 65, 97, 59, 83, 62, 51, 82, 79, 64, 84, 67, 80, 74, 64, 56, 50, 60, 62, 77, 102, 71, 76, 72, 71, 53, 43, 47, 56, 63, 95, 54, 124, 92, 66, 51, 43, 52, 69, 52, 77, 56, 81, 82, 70, 85, 72, 53, 68, 46, 54, 73, 56, 56, 59, 65, 52, 80, 58, 44, 66, 56, 67, 51, 99, 74, 72, 91, 74, 81, 66, 68, 54, 59, 55, 72, 44, 69, 51, 54, 54, 63, 62, 53, 111, 75, 93, 59, 76, 55, 82, 71, 56, 56, 69, 58, 80, 51, 60, 56, 82, 71, 88, 63, 69, 57, 73, 62, 65, 74, 61, 80, 58, 59, 65, 87, 50, 90, 60, 56, 60, 64, 63, 66, 66, 98, 54, 62, 54, 71, 61, 79, 46, 46, 57, 48, 55, 72, 83, 55, 63, 83, 69, 72, 105, 69, 64, 59, 57, 71, 74, 58, 81, 64, 72, 66, 64, 57, 68, 60, 62, 77, 63, 66, 52, 56, 70, 112, 64, 79, 43, 58, 52, 63, 89, 75, 81, 71, 59, 79, 59, 50, 50, 70, 61, 82, 87, 61, 67, 70, 50, 63, 63, 77, 66, 56, 83, 47, 87, 47, 59, 56, 55, 56, 68, 65, 67, 56, 63, 65, 48, 55, 62, 60, 77, 68, 53, 57, 55, 54, 78, 57, 58, 66, 57, 67, 76, 55, 66, 73, 67, 82, 90, 53, 70, 38, 63, 66, 54, 63, 65, 35, 73, 64, 68, 63, 66, 50, 58, 48, 76, 54, 46, 79, 52, 49, 69, 57, 60, 56, 54, 70, 64, 68, 104, 74, 64, 78, 56, 41, 66, 61, 71, 63, 50, 64, 80, 78, 50, 67, 60, 64, 71, 60, 66, 107, 69, 60, 80, 72, 52, 77, 67, 65, 81, 65, 46, 65, 65, 59, 114, 78, 71, 72, 64, 69, 54, 82, 47, 50, 52, 68, 55, 55, 85, 86, 63, 59, 69, 80, 71, 67, 62, 84, 82, 48, 65, 74, 58, 72, 53, 59, 82, 75, 101, 70, 59, 49, 75, 47, 57, 50, 61, 45, 65, 52, 54, 50, 75, 50, 53, 63, 68, 64, 41, 65, 61, 86, 76, 64, 60, 57, 75, 91, 75, 94, 63, 52, 63, 72, 62, 65, 65, 67, 72, 99, 110, 66, 55, 72, 73, 60, 61, 61, 92, 55, 53, 64, 68, 75, 68, 80, 50, 56, 45, 62, 55, 66, 52, 37, 62, 96, 58, 56, 65, 75, 48, 56, 65, 44, 59, 48, 58, 59, 61, 55, 55, 79, 50, 73, 62, 85, 64, 79, 58, 49, 52, 65, 59, 67, 72, 70, 67, 72, 70, 64, 54, 73, 69, 42, 66, 58, 53, 56, 82, 82, 61, 61, 66, 83, 55, 62, 59, 43, 63, 75, 67, 51, 81, 74, 73, 75, 61, 66, 43, 65, 123, 87, 65, 62, 49, 60, 53, 66, 64, 70, 74, 58, 88, 56, 59, 71, 68, 80, 52, 69, 60, 61, 64, 76, 66, 66, 61, 52, 55, 72, 96, 79, 56, 50, 68, 73, 59, 83, 65, 107, 73, 66, 57, 67, 81, 75, 63, 69, 40, 68, 63, 60, 76, 47, 67, 64, 73, 85, 89, 50, 74, 77, 65, 72, 57, 45, 68, 47, 64, 66, 61, 83, 42, 53, 94, 75, 75, 58, 45, 63, 51, 79, 91, 75, 66, 63, 55, 67, 49, 69, 67, 77, 62, 66, 82, 74, 60, 56, 57, 52, 84, 64, 46, 53, 54, 67, 61, 70, 50, 65, 76, 80, 72, 60, 63, 74, 60, 89, 73, 55, 64, 85, 46, 65, 101, 68, 64, 61, 60, 54, 52, 62, 72, 77, 48, 79, 53, 67, 109, 68, 50, 51, 49, 71, 64, 44, 72, 55, 98, 76, 65, 65, 74, 79, 66, 74, 55, 60, 51, 76, 74, 59, 45, 54, 62, 80, 57, 93, 81, 58, 69, 66, 82, 89, 55, 86, 71, 57, 60, 62, 54, 60, 86, 82, 71, 107, 73, 82, 61, 55, 52, 70, 71, 64, 55, 66, 48, 62, 83, 55, 68, 61, 59, 53, 69, 64, 66, 56, 64, 70, 71, 158, 64, 73, 78, 48, 53, 47, 61, 47, 50, 52, 69, 69, 60, 70, 57, 68, 67, 56, 73, 55, 75, 52, 85, 72, 62, 58, 82, 70, 91, 50, 64, 70, 60, 84, 72, 75, 76, 67, 65, 70, 72, 44, 90, 67, 74, 52, 83, 65, 88, 54, 68, 63, 87, 65, 68, 109, 51, 75, 68, 51, 58, 56, 79, 59, 69, 55, 48, 80, 91, 69, 69, 57, 60, 58, 67, 76, 71, 72, 74, 55, 60, 71, 64, 83, 76, 61, 64, 81, 78, 54, 55, 69, 80, 60, 62, 63, 66, 58, 49, 73, 62, 57, 53, 62, 70, 51, 51, 61, 53, 62, 53, 61, 81, 61, 61, 69, 70, 61, 66, 77, 92, 63, 56, 100, 79, 68, 58, 63, 52, 61, 83, 76, 80, 82, 35, 55, 61, 64, 58, 50, 51, 54, 76, 75, 70, 72, 119, 67, 49, 63, 72, 59, 62, 91, 67, 58, 58, 65, 68, 70, 71, 46, 51, 64, 56, 67, 56, 78, 68, 62, 61, 55, 56, 61, 105, 55, 73, 55, 68, 84, 61, 55, 53, 68, 49, 66, 60, 61, 68, 67, 61, 94, 67, 65, 67, 67, 61, 60, 102, 39, 56, 80, 71, 73, 67, 53, 84, 60, 51, 70, 73, 55, 70, 76, 58, 62, 57, 118, 80, 80, 63, 56, 63, 72, 39, 57, 53, 65, 66, 66, 60, 83, 68, 95, 66, 78, 51, 73, 70, 69, 42, 67, 58, 65, 68, 53, 65, 48, 65, 78, 74, 59, 70, 61, 67, 74, 81, 52, 65, 92, 55, 65, 82, 62, 61, 49, 65, 55, 124, 71, 53, 64, 64, 62, 57, 62, 57, 72, 50, 58, 61, 91, 65, 78, 58, 73, 80, 45, 61, 82, 65, 96, 77, 113, 53, 58, 56, 78, 57, 67, 63, 77, 55, 77, 51, 107, 55, 60, 54, 64, 96, 58, 53, 48, 81, 84, 61, 89, 94, 105, 63, 96, 54, 49, 50, 61, 52, 77, 48, 63, 40, 49, 56, 72, 54, 85, 54, 71, 52, 43, 63, 71, 71, 59, 71, 54, 134, 79, 64, 64, 66, 52, 71, 75, 79, 65, 72, 75, 57, 64, 65, 66, 69, 66, 67, 64, 104, 52, 59, 56, 57, 53, 57, 75, 71, 58, 65, 54, 62, 65, 55, 56, 85, 63, 77, 64, 64, 79, 57, 71, 53, 57, 73, 68, 58, 76, 63, 86, 52, 71, 75, 65, 61, 83, 87, 75, 55, 69, 52, 58, 91, 94, 65, 60, 56, 85, 66, 58, 59, 57, 65, 96, 56, 56, 67, 68, 62, 68, 72, 62, 82, 69, 97, 70, 55, 57, 79, 56, 65, 49, 61, 73, 75, 65, 56, 67, 77, 54, 69, 72, 66, 71, 82, 75, 65, 63, 65, 70, 72, 67, 50, 67, 64, 90, 42, 58, 57, 71, 50, 55, 65, 90, 53, 52, 58, 57, 55, 70, 51, 55, 93, 72, 58, 79, 62, 60, 54, 54, 66, 74, 60, 68, 63, 47, 57, 86, 52, 70, 67, 79, 51, 61, 80, 62, 84, 62, 57, 67, 84, 50, 67, 67, 61, 119, 68, 56, 72, 52, 63, 65, 74, 55, 60, 69, 55, 71, 74, 68, 55, 74, 54, 54, 95, 66, 46, 57, 50, 43, 65, 96, 64, 69, 81, 64, 72, 57, 52, 67, 66, 61, 58, 88, 79, 58, 54, 53, 80, 54, 67, 86, 57, 59, 81, 55, 65, 55, 91, 57, 64, 54, 59, 59, 82, 65, 66, 59, 44, 77, 54, 58, 64, 79, 51, 60, 61, 59, 36, 49, 69, 55, 71, 60, 56, 60, 49, 50, 62, 62, 72, 64, 58, 63, 72, 56, 71, 55, 61, 62, 70, 53, 80, 67, 80, 70, 62, 67, 67, 66, 59, 70, 48, 57, 56, 65, 61, 60, 75, 57, 77, 61, 72, 97, 66, 46, 88, 75, 72, 57, 58, 75, 71, 71, 69, 72, 54, 95, 48, 62, 70, 65, 79, 65, 55, 46, 56, 70, 57, 78, 70, 61, 69, 70, 85, 80, 71, 56, 47, 60, 61, 53, 55, 68, 60, 74, 65, 71, 68, 61, 63, 87, 105, 58, 58, 53, 68, 80, 60, 49, 69, 53, 57, 75, 64, 44, 67, 73, 65, 89, 70, 85, 68, 69, 62, 82, 110, 91, 58, 55, 49, 60, 61, 83, 55, 53, 52, 55, 64, 52, 56, 57, 54, 59, 68, 43, 58, 107, 60, 71, 61, 54, 66, 64, 57, 65, 91, 73, 66, 76, 52, 59, 52, 67, 59, 73, 53, 73, 70, 102, 47, 75, 65, 54, 59, 85, 42, 55, 71, 86, 56, 51, 43, 37, 59, 76, 57, 50, 76, 45, 57, 68, 49, 75, 62, 58, 59, 52, 65, 65, 55, 97, 63, 76, 73, 53, 54, 66, 57, 66, 53, 55, 72, 78, 70, 46, 58, 69, 61, 61, 49, 56, 44, 63, 45, 61, 63, 72, 69, 61, 86, 67, 61, 58, 59, 55, 74, 54, 68, 78, 50, 76, 73, 68, 51, 67, 66, 59, 62, 74, 68, 56, 64, 57, 41, 58, 74, 82, 64, 61, 64, 68, 74, 81, 77, 63, 51, 65, 53, 55, 58, 69, 90, 45, 59, 76, 44, 69, 74, 55, 57, 45, 70, 58, 58, 69, 50, 56, 56, 52, 62, 91, 81, 68, 56, 60, 63, 67, 76, 52, 60, 76, 64, 63, 59, 57, 81, 59, 36, 73, 74, 55, 72, 67, 67, 87, 58, 65, 63, 59, 70, 65, 57, 58, 56, 80, 65, 59, 75, 51, 73, 60, 69, 61, 75, 61, 65, 64, 53, 60, 60, 58, 66, 60, 77, 72, 68, 67, 69, 103, 49, 65, 74, 65, 71, 44, 49, 70, 74, 58, 78, 54, 56, 41, 54, 61, 60, 90, 55, 61, 97, 71, 55, 64, 56, 55, 73, 64, 68, 54, 76, 83, 76, 119, 62, 70, 86, 75, 84, 56, 49, 56, 70, 90, 53, 56, 53, 77, 59, 74, 67, 159, 65, 53, 65, 109, 50, 57, 54, 95, 76, 54, 55, 54, 53, 67, 66, 64, 67, 110, 62, 59, 64, 65, 58, 68, 58, 58, 52, 53, 71, 57, 95, 47, 65, 63, 104, 74, 64, 71, 71, 88, 70, 51, 61, 149, 81, 67, 58, 52, 68, 64, 60, 65, 55, 73, 48, 54, 119, 70, 50, 70, 71, 55, 69, 57, 62, 78, 59, 63, 77, 61, 56, 53, 69, 69, 75, 87, 54, 52, 79, 58, 79, 47, 54, 72, 55, 62, 52, 57, 76, 63, 97, 57, 68, 47, 58, 96, 84, 69, 61, 76, 60, 58, 55, 61, 48, 47, 62, 61, 66, 85, 49, 57, 50, 73, 55, 52, 60, 66, 48, 67, 59, 63, 73, 69, 50, 69, 64, 56, 51, 54, 102, 81, 56, 74, 61, 58, 105, 63, 55, 58, 54, 44, 65, 72, 71, 67, 63, 59, 60, 67, 67, 49, 64, 62, 69, 61, 47, 63, 81, 65, 51, 50, 58, 66, 100, 49, 52, 109, 70, 64, 61, 66, 52, 61, 92, 71, 61, 74, 69, 65, 67, 54, 60, 58, 61, 54, 55, 81, 64, 41, 59, 66, 69, 69, 74, 73, 60, 83, 37, 59, 58, 74, 65, 67, 47, 80, 59, 55, 56, 85, 66, 64, 55, 65, 64, 72, 59, 75, 91, 72, 51, 73, 74, 76, 52, 61, 61, 74, 61, 61, 68, 47, 77, 51, 51, 57, 72, 54, 71, 57, 62, 48, 70, 63, 61, 79, 68, 71, 75, 60, 62, 53, 60, 55, 61, 92, 67, 65, 67, 69, 61, 64, 106, 62, 65, 71, 50, 75, 69, 41, 81, 66, 66, 64, 80, 86, 51, 79, 63, 76, 76, 53, 69, 52, 73, 88, 117, 65, 74, 79, 57, 83, 79, 52, 49, 72, 67, 59, 81, 61, 53, 53, 66, 65, 45, 58, 45, 72, 77, 63, 50, 69, 122, 40, 49, 86, 47, 46, 66, 50, 54, 57, 66, 76, 51, 42, 59, 38, 60, 65, 84, 44, 52, 74, 64, 54, 45, 70, 106, 38, 53, 58, 50, 53, 66, 90, 58, 120, 85, 71, 53, 62, 69, 79, 85, 51, 48, 59, 54, 64, 57, 79, 108, 78, 61, 59, 76, 81, 64, 55, 66, 65, 80, 64, 65, 70, 62, 82, 68, 46, 64, 62, 59, 49, 62, 92, 63, 66, 66, 58, 95, 75, 53, 59, 85, 67, 62, 63, 60, 78, 57, 59, 55, 53, 58, 79, 70, 59, 70, 50, 47, 71, 79, 67, 81, 54, 59, 97, 74, 80, 84, 61, 99, 53, 66, 69, 69, 52, 83, 63, 55, 73, 62, 95, 71, 55, 86, 40, 45, 61, 59, 85, 64, 61, 59, 60, 49, 39, 88, 46, 66, 65, 88, 63, 130, 90, 83, 52, 70, 59, 46, 69, 78, 55, 54, 57, 66, 80, 59, 71, 54, 54, 46, 74, 78, 79, 63, 65, 67, 60, 50, 65, 53, 65, 69, 89, 61, 69, 58, 80, 65, 66, 49, 86, 74, 55, 55, 78, 58, 51, 56, 51, 65, 61, 58, 75, 57, 54, 38, 80, 64, 53, 71, 56, 67, 74, 72, 61, 69, 60, 75, 58, 79, 69, 94, 84, 77, 77, 75, 62, 68, 73, 66, 67, 77, 67, 60, 56, 72, 64, 60, 45, 63, 78, 63, 67, 43, 60, 74, 51, 73, 85, 54, 115, 58, 58, 50, 83, 68, 61, 54, 60, 81, 55, 78, 62, 53, 48, 68, 64, 68, 98, 77, 86, 66, 49, 114, 59, 66, 52, 59, 55, 54, 83, 61, 98, 73, 51, 59, 54, 61, 57, 61, 74, 50, 57, 62, 55, 67, 83, 64, 56, 69, 64, 66, 62, 62, 72, 71, 61, 74, 74, 53, 66, 55, 44, 64, 96, 63, 60, 60, 77, 69, 49, 69, 60, 47, 43, 49, 57, 57, 85, 50, 45, 72, 57, 48, 58, 71, 67, 56, 69, 62, 66, 60, 105, 53, 55, 67, 49, 82, 64, 60, 57, 58, 61, 67, 62, 52, 64, 63, 68, 59, 62, 57, 59, 72, 51, 47, 86, 55, 68, 59, 71, 58, 104, 75, 73, 69, 69, 54, 73, 84, 68, 84, 62, 45, 52, 55, 90, 78, 51, 52, 52, 45, 73, 51, 74, 61, 85, 62, 64, 56, 72, 53, 75, 86, 71, 66, 66, 116, 57, 69, 57, 50, 71, 58, 66, 66, 63, 58, 67, 80, 59, 43, 62, 64, 56, 88, 63, 57, 58, 68, 60, 76, 68, 68, 56, 61, 62, 81, 55, 76, 81, 46, 71, 55, 63, 73, 58, 62, 55, 65, 66, 65, 52, 68, 55, 82, 61, 56, 56, 54, 57, 113, 98, 48, 58, 57, 66, 83, 48, 56, 49, 62, 58, 66, 78, 56, 79, 58, 57, 58, 53, 59, 56, 56, 64, 51, 62, 60, 84, 66, 67, 76, 76, 55, 73, 58, 101, 68, 95, 57, 108, 80, 45, 77, 56, 59, 76, 62, 89, 105, 65, 58, 63, 68, 76, 71, 64, 63, 64, 57, 98, 62, 101, 54, 66, 59, 83, 60, 69, 66, 67, 63, 69, 80, 65, 86, 75, 49, 72, 74, 74, 52, 65, 46, 48, 66, 56, 62, 66, 57, 66, 62, 66, 80, 61, 76, 67, 66, 62, 53, 70, 50, 80, 67, 76, 56, 69, 61, 132, 58, 81, 76, 54, 47, 80, 66, 55, 45, 60, 49, 64, 68, 111, 42, 58, 74, 68, 62, 61, 59, 78, 63, 70, 54, 42, 69, 55, 65, 83, 68, 78, 75, 80, 78, 72, 66, 56, 65, 64, 51, 78, 76, 56, 74, 59, 62, 124, 63, 73, 68, 80, 49, 89, 80, 80, 70, 62, 77, 52, 59, 58, 83, 52, 75, 66, 49, 47, 69, 64, 67, 58, 55, 63, 71, 97, 62, 63, 68, 67, 56, 94, 64, 76, 66, 71, 81, 60, 78, 60, 73, 47, 131, 56, 83, 83, 68, 59, 66, 67, 55, 69, 70, 70, 51, 70, 65, 67, 49, 77, 48, 56, 73, 65, 78, 107, 74, 90, 56, 72, 50, 66, 58, 72, 59, 53, 55, 66, 60, 82, 78, 66, 53, 61, 51, 51, 80, 88, 81, 74, 68, 70, 59, 86, 61, 60, 58, 68, 62, 75, 68, 68, 63, 67, 72, 94, 60, 73, 60, 112, 63, 49, 61, 50, 69, 60, 65, 67, 60, 88, 50, 56, 59, 61, 57, 69, 80, 62, 62, 56, 62, 68, 40, 62, 70, 74, 99, 67, 65, 54, 74, 67, 67, 47, 60, 145, 52, 66, 80, 55, 52, 58, 63, 53, 63, 59, 58, 61, 60, 81, 64, 60, 58, 59, 67, 59, 64, 82, 48, 53, 62, 98, 63, 64, 65, 61, 49, 54, 46, 59, 58, 63, 57, 66, 55, 80, 60, 44, 72, 67, 66, 46, 68, 48, 64, 74, 57, 66, 111, 104, 63, 51, 65, 49, 62, 55, 68, 67, 59, 63, 62, 65, 60, 73, 44, 60, 52, 66, 62, 65, 57, 61, 54, 76, 40, 66, 51, 67, 80, 74, 49, 47, 39, 66, 67, 80, 68, 63, 82, 55, 70, 69, 77, 60, 57, 87, 59, 53, 56, 86, 59, 45, 89, 50, 59, 60, 72, 75, 110, 58, 61, 63, 69, 67, 85, 73, 90, 66, 69, 93, 69, 53, 59, 53, 66, 52, 54, 46, 83, 76, 44, 52, 70, 55, 61, 67, 59, 69, 78, 77, 53, 67, 61, 52, 57, 102, 69, 50, 79, 110, 49, 53, 54, 94, 57, 69, 69, 74, 70, 43, 67, 60, 58, 83, 61, 60, 62, 92, 48, 64, 66, 64, 90, 51, 71, 67, 78, 50, 60, 80, 67, 68, 54, 88, 52, 68, 61, 82, 56, 56, 66, 76, 63, 53, 64, 55, 57, 56, 48, 51, 50, 54, 65, 63, 88, 63, 46, 81, 67, 67, 52, 53, 62, 62, 84, 63, 61, 50, 57, 72, 57, 93, 52, 51, 59, 66, 60, 58, 89, 77, 68, 90, 70, 68, 68, 59, 59, 72, 65, 59, 57, 86, 72, 75, 47, 49, 71, 53, 49, 73, 76, 63, 73, 60, 57, 67, 63, 86, 70, 55, 62, 64, 55, 75, 95, 72, 52, 60, 61, 39, 60, 78, 85, 56, 53, 144, 59, 70, 66, 66, 71, 59, 69, 59, 46, 62, 61, 70, 83, 45, 78, 62, 66, 59, 63, 55, 63, 66, 61, 166, 95, 60, 82, 78, 52, 50, 113, 62, 50, 59, 59, 66, 64, 67, 72, 71, 88, 73, 63, 70, 59, 63, 78, 69, 66, 68, 85, 56, 85, 63, 64, 52, 92, 67, 54, 59, 60, 63, 67, 72, 81, 76, 56, 54, 72, 65, 40, 63, 78, 54, 57, 56, 67, 64, 57, 59, 73, 55, 62, 55, 66, 52, 50, 71, 36, 74, 50, 76, 34, 71, 82, 111, 49, 71, 108, 69, 64, 57, 73, 62, 51, 75, 69, 49, 72, 69, 65, 76, 80, 63, 69, 72, 73, 70, 60, 59, 64, 63, 60, 66, 67, 84, 78, 54, 88, 65, 61, 69, 58, 73, 58, 93, 58, 66, 55, 63, 50, 93, 52, 71, 66, 61, 66, 57, 57, 80, 90, 74, 51, 55, 55, 57, 60, 70, 41, 56, 67, 58, 75, 52, 83, 65, 50, 51, 78, 58, 68, 70, 71, 64, 68, 59, 59, 55, 51, 72, 60, 84, 63, 79, 75, 49, 63, 86, 69, 67, 64, 70, 91, 67, 56, 40, 65, 83, 47, 81, 48, 53, 54, 60, 54, 61, 66, 77, 58, 75, 61, 52, 82, 73, 55, 51, 69, 55, 63, 65, 62, 77, 53, 66, 99, 51, 50, 63, 60, 74, 47, 84, 92, 82, 35, 44, 91, 59, 53, 57, 98, 64, 93, 63, 77, 84, 63, 48, 73, 53, 76, 50, 61, 59, 63, 60, 71, 67, 69, 73, 65, 75, 70, 46, 64, 62, 56, 70, 61, 85, 69, 57, 59, 67, 60, 96, 69, 56, 64, 76, 68, 82, 57, 58, 55, 69, 63, 67, 63, 47, 67, 78, 84, 62, 60, 57, 58, 61, 76, 58, 77, 61, 78, 60, 66, 83, 54, 73, 64, 51, 53, 44, 64, 63, 61, 75, 63, 68, 85, 72, 72, 86, 53, 67, 64, 68, 86, 73, 75, 64, 58, 66, 79, 61, 56, 66, 62, 55, 56, 73, 60, 81, 74, 62, 74, 94, 63, 84, 44, 65, 46, 90, 57, 52, 59, 89, 59, 54, 61, 57, 69, 65, 45, 69, 59, 46, 64, 56, 75, 84, 74, 52, 55, 68, 56, 64, 68, 70, 83, 58, 71, 45, 109, 78, 87, 56, 137, 69, 79, 54, 94, 54, 55, 71, 67, 69, 62, 57, 88, 64, 58, 72, 62, 54, 71, 73, 55, 71, 71, 79, 58, 49, 59, 82, 57, 65, 70, 83, 56, 57, 63, 87, 59, 58, 86, 66, 52, 56, 68, 62, 53, 92, 59, 52, 72, 62, 49, 60, 63, 51, 79, 87, 73, 56, 68, 53, 64, 72, 67, 64, 72, 61, 59, 58, 60, 65, 67, 74, 71, 72, 55, 56, 83, 58, 46, 59, 67, 99, 57, 85, 54, 64, 50, 62, 57, 73, 54, 42, 58, 90, 58, 65, 75, 43, 74, 59, 72, 73, 75, 56, 55, 57, 66, 56, 60, 59, 59, 59, 61, 48, 86, 64, 58, 58, 74, 60, 64, 65, 64, 57, 65, 76, 62, 62, 70, 96, 75, 54, 65, 52, 54, 58, 50, 57, 63, 68, 42, 72, 47, 72, 43, 51, 93, 58, 55, 60, 68, 68, 47, 64, 70, 78, 62, 99, 61, 85, 57, 118, 43, 66, 66, 73, 56, 56, 58, 67, 63, 70, 58, 62, 62, 93, 58, 50, 58, 81, 59, 59, 69, 89, 67, 71, 64, 74, 53, 74, 85, 76, 49, 64, 42, 58, 78, 77, 62, 69, 66, 58, 60, 92, 62, 60, 80, 76, 69, 91, 60, 64, 63, 66, 57, 46, 65, 88, 85, 58, 71, 57, 57, 56, 67, 59, 66, 60, 64, 52, 57, 51, 79, 59, 54, 61, 60, 94, 63, 64, 44, 69, 44, 70, 70, 53, 65, 59, 102, 63, 57, 61, 58, 68, 133, 59, 77, 84, 61, 52, 72, 88, 55, 72, 71, 55, 66, 83, 69, 60, 62, 66, 47, 83, 67, 61, 75, 95, 73, 94, 84, 64, 66, 56, 57, 81, 90, 67, 58, 63, 60, 58, 73, 62, 64, 50, 60, 76, 42, 97, 54, 62, 56, 57, 54, 67, 65, 55, 61, 65, 65, 70, 68, 78, 61, 68, 60, 73, 67, 60, 54, 117, 60, 61, 67, 67, 53, 54, 76, 68, 54, 94, 74, 60, 63, 63, 59, 66, 64, 64, 52, 41, 58, 64, 69, 56, 56, 41, 89, 64, 65, 57, 67, 72, 45, 64, 61, 160, 47, 56, 83, 58, 66, 50, 63, 55, 55, 70, 62, 52, 65, 101, 52, 61, 58, 80, 62, 65, 63, 54, 53, 59, 88, 25, 66, 56, 63, 78, 60, 61, 69, 60, 56, 61, 52, 67, 57, 82, 55, 49, 56, 54, 50, 46, 63, 54, 75, 80, 67, 70, 112, 85, 65, 82, 70, 62, 66, 69, 72, 68, 65, 84, 57, 70, 64, 102, 59, 74, 56, 97, 54, 60, 74, 52, 60, 52, 65, 71, 65, 67, 84, 54, 49, 59, 64, 70, 59, 64, 72, 58, 87, 67, 64, 51, 58, 59, 61, 96, 77, 64, 72, 61, 89, 61, 78, 65, 65, 59, 51, 58, 119, 60, 45, 63, 64, 75, 60, 72, 81, 50, 70, 73, 58, 62, 56, 63, 53, 60, 53, 53, 54, 70, 64, 61, 60, 81, 54, 51, 62, 63, 51, 60, 71, 50, 77, 55, 70, 67, 58, 53, 77, 106, 77, 63, 68, 70, 57, 77, 49, 65, 53, 75, 66, 56, 60, 60, 69, 71, 62, 48, 63, 67, 94, 63, 81, 84, 58, 63, 53, 56, 58, 86, 80, 45, 78, 70, 66, 70, 38, 74, 67, 56, 64, 91, 56, 62, 62, 82, 49, 85, 46, 50, 80, 52, 54, 75, 101, 59, 82, 65, 57, 68, 56, 64, 59, 43, 80, 80, 70, 65, 55, 71, 74, 66, 92, 64, 62, 64, 60, 58, 57, 97, 64, 66, 79, 62, 62, 71, 49, 57, 78, 63, 62, 82, 51, 97, 57, 59, 73, 64, 59, 50, 86, 59, 66, 69, 65, 71, 73, 71, 65, 60, 73, 63, 70, 55, 113, 63, 63, 74, 72, 56, 86, 62, 68, 57, 59, 151, 66, 72, 66, 45, 103, 69, 61, 58, 56, 66, 55, 70, 68, 66, 96, 57, 62, 54, 51, 69, 48, 52, 60, 135, 100, 53, 97, 45, 66, 56, 107, 61, 62, 60, 74, 74, 78, 76, 65, 83, 55, 68, 71, 52, 62, 62, 44, 55, 68, 59, 59, 73, 78, 64, 59, 48, 60, 67, 54, 56, 55, 65, 55, 86, 64, 61, 65, 84, 70, 65, 59, 69, 42, 63, 75, 69, 62, 66, 56, 68, 49, 65, 58, 65, 53, 58, 70, 62, 53, 66, 70, 81, 61, 82, 67, 70, 49, 60, 98, 66, 56, 53, 57, 49, 62, 55, 90, 61, 56, 68, 58, 80, 86, 52, 80, 79, 49, 69, 55, 66, 62, 67, 63, 70, 78, 92, 49, 55, 95, 57, 84, 88, 53, 52, 54, 98, 52, 69, 63, 64, 47, 91, 60, 72, 56, 66, 62, 68, 75, 73, 86, 69, 65, 65, 64, 66, 75, 57, 69, 56, 61, 50, 72, 62, 43, 75, 53, 55, 69, 60, 55, 63, 64, 60, 50, 62, 69, 89, 60, 65, 66, 68, 43, 97, 64, 39, 56, 90, 74, 63, 49, 71, 36, 77, 67, 57, 101, 55, 60, 77, 65, 60, 63, 54, 62, 62, 49, 65, 88, 71, 76, 67, 61, 55, 72, 54, 55, 57, 53, 68, 73, 78, 56, 64, 113, 58, 70, 52, 58, 69, 60, 59, 60, 83, 55, 64, 92, 52, 46, 72, 115, 63, 90, 58, 79, 78, 61, 48, 64, 87, 53, 53, 63, 74, 75, 64, 62, 67, 87, 55, 69, 74, 54, 70, 60, 68, 45, 72, 74, 63, 54, 76, 53, 58, 57, 54, 51, 65, 73, 60, 88, 63, 69, 61, 58, 60, 48, 74, 41, 89, 59, 57, 55, 62, 58, 52, 70, 44, 75, 73, 61, 70, 80, 80, 62, 50, 62, 75, 57, 62, 75, 55, 43, 62, 54, 51, 63, 60, 80, 52, 49, 56, 61, 61, 58, 48, 89, 57, 69, 47, 62, 64, 71, 54, 62, 66, 59, 65, 62, 63, 58, 72, 49, 66, 85, 56, 69, 74, 44, 51, 61, 50, 63, 56, 68, 85, 73, 59, 66, 63, 70, 53, 56, 52, 53, 76, 76, 56, 84, 62, 79, 67, 59, 80, 54, 65, 70, 58, 74, 59, 51, 44, 73, 53, 65, 61, 131, 59, 68, 71, 63, 59, 62, 62, 64, 60, 68, 62, 93, 66, 54, 86, 61, 52, 81, 49, 71, 41, 59, 50, 53, 57, 61, 61, 78, 54, 61, 76, 70, 62, 75, 77, 54, 64, 81, 63, 47, 62, 78, 61, 65, 94, 60, 87, 63, 61, 47, 50, 57, 52, 102, 67, 52, 56, 47, 66, 74, 65, 54, 61, 90, 45, 55, 79, 52, 51, 66, 61, 70, 59, 62, 39, 71, 73, 68, 71, 67, 92, 53, 67, 60, 65, 57, 74, 56, 92, 64, 60, 66, 72, 71, 67, 61, 51, 62, 55, 79, 61, 51, 68, 44, 47, 72, 59, 58, 63, 62, 57, 60, 61, 61, 65, 67, 57, 56, 64, 67, 52, 67, 59, 81, 60, 66, 74, 75, 83, 59, 71, 68, 55, 61, 68, 64, 53, 55, 64, 73, 63, 124, 62, 67, 57, 65, 59, 58, 61, 56, 69, 57, 93, 74, 63, 62, 62, 74, 60, 65, 71, 63, 57, 72, 63, 84, 63, 60, 52, 57, 74, 91, 60, 76, 64, 73, 54, 59, 55, 63, 52, 62, 47, 81, 48, 65, 64, 66, 69, 67, 61, 77, 61, 63, 65, 56, 63, 55, 44, 54, 64, 55, 63, 89, 76, 76, 64, 51, 62, 63, 67, 68, 53, 60, 73, 93, 64, 52, 67, 83, 56, 72, 61, 63, 68, 66, 61, 79, 50, 53, 62, 82, 88, 61, 70, 82, 68, 42, 54, 60, 87, 90, 71, 57, 67, 67, 51, 70, 59, 72, 78, 66, 60, 98, 64, 78, 65, 59, 65, 59, 85, 65, 67, 77, 64, 67, 59, 67, 80, 80, 55, 79, 69, 44, 62, 56, 63, 70, 55, 52, 67, 93, 61, 60, 54, 60, 57, 85, 71, 61, 69, 128, 60, 60, 75, 56, 60, 73, 142, 66, 72, 61, 50, 63, 98, 63, 63, 64, 70, 51, 58, 60, 53, 65, 55, 64, 60, 85, 78, 61, 45, 61, 85, 67, 66, 69, 63, 70, 57, 52, 66, 55, 62, 61, 77, 49, 92, 76, 48, 56, 75, 61, 65, 89, 60, 68, 55, 58, 74, 67, 58, 80, 119, 57, 63, 65, 141, 81, 63, 81, 65, 87, 64, 58, 62, 65, 73, 73, 47, 76, 67, 66, 82, 78, 85, 66, 60, 62, 78, 68, 69, 55, 55, 66, 61, 54, 58, 52, 54, 71, 72, 54, 96, 72, 58, 82, 109, 69, 57, 55, 66, 97, 80, 63, 70, 47, 86, 78, 59, 99, 67, 50, 96, 47, 53, 61, 67, 57, 77, 72, 87, 54, 78, 64, 48, 56, 57, 58, 52, 56, 71, 57, 53, 63, 79, 62, 66, 55, 61, 39, 64, 53, 73, 88, 64, 67, 76, 58, 75, 52, 62, 50, 58, 78, 60, 50, 73, 56, 76, 67, 86, 67, 58, 90, 71, 59, 58, 52, 59, 74, 57, 78, 58, 73, 67, 61, 48, 79, 57, 57, 70, 88, 66, 58, 66, 62, 57, 77, 75, 63, 55, 70, 46, 58, 77, 67, 69, 61, 81, 59, 53, 53, 80, 63, 69, 62, 67, 62, 65, 47, 105, 70, 53, 58, 59, 70, 62, 57, 68, 80, 61, 51, 63, 73, 136, 67, 54, 94, 70, 55, 72, 67, 60, 62, 67, 56, 67, 52, 53, 51, 56, 66, 61, 66, 64, 60, 76, 70, 80, 78, 61, 57, 71, 64, 52, 53, 68, 68, 69, 59, 78, 61, 54, 66, 66, 65, 70, 59, 55, 90, 54, 57, 53, 65, 92, 44, 73, 66, 67, 91, 64, 76, 58, 98, 63, 63, 54, 75, 69, 64, 79, 66, 102, 39, 67, 68, 64, 73, 55, 63, 69, 71, 75, 64, 56, 59, 62, 69, 82, 55, 61, 78, 59, 55, 76, 55, 78, 47, 63, 62, 74, 62, 56, 75, 71, 102, 75, 58, 59, 61, 60, 69, 53, 68, 49, 84, 60, 63, 60, 79, 70, 67, 78, 59, 77, 63, 74, 54, 56, 71, 69, 49, 67, 53, 71, 59, 68, 58, 78, 54, 94, 74, 62, 74, 68, 60, 73, 104, 60, 57, 61, 64, 69, 63, 63, 46, 60, 56, 85, 64, 69, 91, 57, 55, 66, 91, 55, 72, 57, 56, 58, 62, 89, 60, 67, 60, 64, 71, 73, 65, 62, 55, 80, 64, 58, 70, 71, 75, 53, 65, 61, 51, 55, 65, 56, 61, 83, 59, 55, 53, 64, 66, 66, 53, 63, 90, 55, 85, 70, 62, 72, 82, 63, 67, 86, 64, 54, 62, 68, 66, 61, 54, 61, 58, 66, 57, 94, 58, 60, 73, 74, 63, 70, 79, 64, 66, 70, 68, 76, 68, 56, 68, 45, 69, 64, 72, 67, 93, 80, 78, 73, 60, 80, 68, 55, 69, 57, 72, 65, 53, 66, 71, 72, 55, 59, 73, 65, 103, 63, 78, 60, 61, 57, 64, 61, 76, 67, 60, 49, 60, 70, 68, 45, 66, 57, 73, 61, 44, 58, 64, 61, 53, 71, 53, 65, 59, 66, 58, 76, 65, 60, 91, 76, 68, 66, 65, 56, 68, 68, 70, 73, 60, 63, 83, 71, 59, 70, 64, 68, 42, 53, 45, 63, 65, 65, 117, 69, 54, 62, 45, 57, 70, 70, 77, 55, 54, 63, 68, 76, 65, 89, 84, 82, 64, 60, 61, 93, 55, 55, 50, 65, 70, 60, 69, 61, 90, 72, 54, 66, 89, 90, 66, 71, 71, 91, 58, 63, 81, 84, 56, 52, 73, 50, 52, 63, 70, 78, 73, 70, 112, 62, 72, 60, 70, 66, 59, 60, 56, 57, 60, 53, 62, 44, 77, 64, 69, 75, 71, 53, 56, 60, 66, 61, 78, 46, 58, 58, 65, 75, 61, 86, 60, 79, 60, 76, 76, 75, 90, 63, 48, 61, 61, 59, 56, 65, 59, 71, 65, 66, 63, 52, 68, 61, 62, 60, 72, 73, 60, 63, 66, 65, 50, 58, 63, 75, 67, 71, 64, 64, 66, 75, 65, 47, 91, 60, 57, 52, 54, 62, 78, 83, 67, 50, 70, 88, 67, 70, 82, 78, 66, 65, 63, 57, 75, 84, 66, 66, 56, 55, 52, 65, 66, 59, 58, 52, 64, 58, 61, 57, 63, 61, 56, 59, 66, 62, 58, 71, 70, 68, 81, 89, 87, 63, 80, 56, 73, 91, 59, 60, 55, 70, 67, 63, 58, 66, 60, 72, 72, 77, 69, 51, 76, 68, 55, 60, 76, 61, 67, 60, 58, 61, 54, 58, 76, 66, 86, 68, 55, 58, 60, 75, 54, 54, 69, 71, 63, 59, 81, 92, 58, 76, 94, 75, 73, 58, 51, 58, 52, 75, 94, 67, 61, 53, 58, 55, 72, 64, 74, 81, 63, 69, 53, 54, 60, 63, 49, 62, 56, 68, 63, 64, 57, 71, 69, 57, 56, 75, 61, 60, 71, 71, 68, 81, 51, 76, 51, 60, 53, 59, 73, 74, 76, 77, 102, 62, 93, 56, 55, 76, 63, 67, 53, 56, 67, 61, 59, 74, 66, 70, 62, 61, 67, 62, 57, 62, 60, 58, 72, 57, 65, 62, 42, 88, 53, 65, 65, 73, 59, 65, 77, 48, 70, 65, 42, 70, 83, 59, 60, 60, 69, 52, 72, 57, 55, 53, 63, 91, 70, 53, 64, 83, 76, 55, 67, 80, 61, 68, 58, 70, 65, 76, 63, 66, 77, 53, 79, 84, 63, 136, 55, 63, 61, 58, 64, 69, 59, 57, 65, 64, 63, 58, 67, 77, 67, 66, 66, 48, 73, 64, 54, 54, 62, 60, 77, 69, 60, 84, 106, 56, 64, 57, 57, 76, 53, 80, 83, 67, 68, 103, 73, 79, 83, 56, 76, 79, 51, 59, 76, 64, 71, 70, 67, 60, 52, 64, 71, 67, 67, 101, 70, 66, 76, 53, 65, 85, 61, 73, 61, 82, 72, 55, 61, 55, 50, 91, 60, 63, 75, 51, 81, 57, 65, 62, 68, 83, 67, 55, 59, 55, 57, 48, 117, 66, 86, 63, 56, 60, 68, 72, 70, 60, 63, 57, 78, 61, 61, 42, 74, 66, 55, 62, 81, 58, 59, 75, 70, 54, 59, 48, 61, 71, 62, 124, 60, 55, 57, 59, 58, 77, 44, 59, 75, 76, 64, 60, 61, 58, 63, 53, 61, 79, 102, 59, 61, 70, 64, 66, 62, 65, 57, 76, 73, 57, 65, 70, 60, 76, 82, 56, 69, 68, 68, 58, 48, 51, 58, 57, 81, 70, 61, 61, 80, 67, 64, 62, 68, 92, 61, 63, 64, 163, 62, 49, 62, 55, 65, 62, 66, 67, 69, 59, 57, 49, 78, 67, 65, 59, 74, 62, 57, 67, 50, 62, 65, 62, 65, 61, 60, 51, 74, 82, 74, 81, 50, 70, 48, 76, 65, 64, 71, 97, 95, 67, 64, 55, 92, 68, 91, 59, 54, 65, 110, 57, 75, 69, 59, 82, 55, 69, 69, 79, 55, 68, 69, 71, 46, 82, 73, 54, 57, 55, 42, 70, 61, 59, 67, 59, 62, 73, 56, 62, 61, 96, 61, 57, 67, 76, 88, 59, 57, 55, 58, 69, 76, 58, 73, 61, 84, 61, 56, 45, 63, 62, 67, 57, 54, 61, 69, 49, 66, 73, 73, 61, 66, 69, 81, 55, 56, 60, 65, 59, 51, 59, 53, 60, 82, 58, 63, 71, 60, 61, 110, 69, 66, 42, 69, 74, 59, 60, 53, 58, 66, 69, 70, 68, 62, 92, 76, 56, 69, 64, 53, 55, 65, 105, 63, 55, 67, 64, 63, 56, 56, 81, 53, 57, 68, 85, 55, 122, 49, 57, 67, 66, 67, 75, 69, 69, 69, 67, 50, 89, 81, 54, 65, 70, 67, 59, 71, 48, 68, 62, 79, 74, 67, 57, 64, 64, 64, 68, 62, 76, 59, 66, 65, 70, 67, 55, 60, 83, 49, 79, 51, 65, 57, 68, 54, 77, 65, 69, 66, 68, 78, 50, 59, 65, 106, 57, 63, 62, 61, 62, 113, 59, 68, 55, 71, 90, 49, 75, 59, 65, 66, 79, 78, 60, 65, 54, 68, 75, 68, 75, 65, 64, 73, 52, 63, 55, 57, 52, 54, 62, 71, 71, 57, 65, 68, 67, 65, 60, 61, 95, 73, 59, 60, 65, 68, 70, 59, 73, 64, 52, 82, 57, 79, 90, 67, 65, 62, 56, 63, 62, 63, 67, 55, 56, 77, 70, 54, 54, 64, 65, 60, 71, 85, 99, 75, 89, 67, 63, 59, 74, 82, 59, 52, 59, 59, 89, 56, 68, 60, 62, 76, 62, 77, 50, 76, 57, 50, 89, 75, 67, 84, 58, 65, 47, 75, 82, 60, 54, 53, 75, 65, 94, 51, 58, 62, 60, 60, 54, 54, 62, 59, 66, 59, 46, 60, 70, 66, 56, 66, 82, 69, 98, 90, 63, 77, 54, 73, 62, 55, 63, 99, 50, 68, 72, 84, 63, 74, 69, 57, 65, 56, 68, 51, 69, 76, 62, 68, 66, 65, 77, 61, 76, 58, 62, 62, 72, 58, 63, 63, 61, 59, 84, 62, 63, 69, 54, 71, 55, 70, 54, 70, 69, 75, 71, 74, 91, 55, 77, 64, 60, 66, 57, 66, 86, 51, 63, 65, 61, 94, 79, 89, 61, 64, 70, 66, 69, 78, 55, 75, 74, 78, 60, 57, 61, 51, 76, 64, 60, 64, 60, 64, 72, 66, 59, 74, 62, 60, 45, 71, 60, 87, 84, 58, 53, 92, 62, 57, 68, 76, 73, 53, 56, 57, 68, 55, 59, 68, 67, 61, 53, 60, 81, 68, 47, 69, 69, 79, 80, 97, 63, 53, 64, 65, 55, 63, 65, 53, 69, 59, 79, 66, 70, 58, 64, 80, 62, 68, 60, 64, 99, 69, 63, 55, 83, 87, 57, 49, 63, 121, 56, 47, 47, 102, 78, 53, 70, 59, 62, 43, 53, 72, 56, 59, 51, 67, 47, 82, 67, 56, 84, 76, 65, 99, 56, 47, 59, 58, 64, 70, 55, 65, 72, 56, 68, 92, 57, 89, 65, 66, 70, 73, 56, 61, 61, 63, 68, 66, 69, 71, 59, 62, 80, 66, 66, 69, 55, 68, 64, 110, 61, 105, 72, 62, 61, 57, 65, 73, 67, 55, 61, 79, 71, 54, 61, 89, 55, 60, 67, 50, 58, 66, 80, 76, 52, 68, 79, 64, 74, 61, 61, 58, 60, 49, 64, 58, 63, 64, 66, 54, 64, 72, 70, 77, 56, 56, 69, 81, 75, 67, 65, 74, 74, 91, 62, 53, 66, 65, 69, 85, 69, 87, 58, 60, 58, 66, 56, 67, 53, 64, 73, 62, 53, 66, 72, 68, 63, 60, 72, 58, 66, 80, 63, 89, 100, 56, 60, 77, 69, 73, 63, 60, 56, 60, 63, 64, 69, 54, 64, 66, 64, 57, 67, 64, 67, 74, 69, 62, 59, 67, 62, 58, 59, 55, 62, 67, 73, 60, 57, 60, 64, 50, 69, 63, 74, 87, 68, 68, 52, 62, 54, 53, 56, 70, 66, 111, 56, 55, 51, 55, 58, 61, 84, 75, 74, 60, 66, 64, 76, 70, 58, 44, 90, 69, 51, 67, 79, 70, 64, 67, 61, 57, 52, 73, 54, 62, 83, 73, 71, 48, 60, 74, 79, 62, 68, 65, 70, 67, 53, 57, 64, 77, 76, 58, 77, 47, 71, 83, 78, 63, 49, 54, 68, 52, 82, 64, 66, 62, 110, 66, 60, 52, 66, 59, 57, 57, 73, 54, 62, 66, 61, 65, 58, 60, 78, 53, 62, 59, 58, 63, 62, 77, 48, 56, 58, 61, 65, 62, 64, 87, 59, 65, 62, 70, 67, 66, 68, 57, 60, 62, 83, 66, 66, 61, 76, 76, 72, 60, 97, 62, 68, 63, 62, 64, 62, 62, 73, 57, 63, 67, 50, 67, 89, 61, 62, 60, 53, 65, 56, 62, 56, 56, 64, 72, 65, 53, 69, 72, 70, 65, 49, 57, 65, 56, 53, 53, 59, 62, 59, 62, 66, 86, 56, 77, 70, 64, 64, 58, 64, 62, 66, 66, 71, 99, 65, 61, 78, 78, 65, 56, 61, 63, 62, 64, 71, 65, 56, 67, 74, 114, 63, 65, 88, 73, 59, 74, 59, 51, 69, 59, 60, 62, 72, 57, 63, 71, 61, 62, 63, 62, 60, 71, 66, 90, 60, 65, 72, 73, 70, 65, 64, 65, 60, 64, 71, 65, 74, 68, 58, 53, 65, 58, 55, 54, 93, 57, 61, 59, 61, 84, 58, 65, 85, 59, 76, 56, 60, 64, 54, 81, 61, 60, 66, 56, 57, 58, 119, 68, 61, 62, 55, 60, 98, 61, 66, 65, 64, 60, 86, 61, 63, 72, 57, 57, 52, 79, 70, 65, 51, 60, 67, 61, 65, 61, 71, 59, 59, 67, 72, 74, 80, 72, 69, 70, 72, 60, 73, 59, 59, 63, 54, 81, 67, 62, 90, 83, 78, 64, 77, 68, 64, 55, 60, 59, 122, 102, 59, 85, 51, 63, 71, 60, 57, 60, 70, 67, 67, 53, 72, 63, 57, 61, 68, 58, 59, 56, 67, 63, 89, 64, 61, 66, 58, 64, 59, 72, 77, 63, 70, 58, 107, 69, 57, 56, 69, 61, 59, 59, 56, 61, 81, 102, 61, 66, 65, 61, 66, 85, 97, 58, 69, 70, 57, 63, 60, 66, 60, 79, 66, 57, 83, 56, 95, 63, 66, 61, 71, 68, 65, 59, 66, 64, 72, 66, 73, 60, 81, 61, 63, 60, 91, 69, 64, 53, 55, 54, 71, 69, 66, 66, 60, 62, 62, 56, 82, 68, 64, 72, 90, 63, 57, 87, 59, 58, 67, 61, 56, 102, 64, 69, 60, 56, 67, 64, 61, 58, 58, 59, 79, 78, 59, 64, 65, 73, 62, 83, 80, 80, 54, 66, 57, 50, 59, 49, 67, 59, 72, 61, 63, 61, 64, 86, 60, 65, 68, 59, 63, 66, 68, 64, 60, 61, 83, 61, 62, 59, 64, 67, 54, 85, 65, 53, 87, 59, 59, 65, 59, 50, 57, 83, 60, 72, 89, 57, 63, 60, 62, 61, 79, 61, 69, 65, 69, 57, 56, 70, 91, 70, 69, 62, 52, 69, 62, 64, 60, 73, 67, 67, 71, 61, 73, 64, 65, 80, 63, 71, 58, 57, 66, 64, 59, 52, 88, 56, 61, 56, 59, 60, 69, 107, 77, 59, 61, 62, 52, 60, 70, 61, 71, 68, 77, 89, 73, 64, 68, 68, 59, 75, 71, 70, 72, 63, 65, 61, 57, 62, 76, 57, 54, 81, 56, 70, 71, 59, 67, 51, 72, 60, 69, 65, 59, 61, 70, 78, 62, 69, 68, 59, 59, 64, 62, 73, 64, 54, 71, 60, 56, 58, 73, 68, 63, 57, 75, 60, 55, 58, 75, 66, 56, 80, 61, 65, 71, 59, 63, 52, 79, 59, 67, 65, 66, 86, 64, 78, 67, 74, 58, 68, 59, 49, 56, 63, 77, 64, 78, 67, 66, 54, 61, 74, 62, 56, 61, 58, 58, 69, 86, 61, 54, 70, 78, 76, 71, 65, 59, 61, 52, 60, 68, 59, 64, 72, 58, 69, 78, 55, 54, 59, 65, 58, 100, 62, 52, 82, 61, 58, 61, 66, 63, 62, 60, 98, 73, 83, 62, 60, 63, 60, 72, 113, 66, 73, 65, 66, 56, 64, 66, 92, 60, 97, 60, 55, 63, 57, 60, 62, 64, 57, 58, 60, 53, 52, 63, 58, 58, 75, 55, 58, 51, 68, 57, 60, 64, 60, 61, 54, 69, 55, 63, 60, 68, 60, 58, 64, 55, 67, 74, 55, 60, 63, 59, 56, 60, 64, 78, 95, 72, 55, 63, 56, 65, 61, 63, 77, 66, 53, 75, 61, 65, 54, 62, 54, 59, 64, 88, 62, 53, 63, 70, 59, 61, 65, 60, 61, 69, 56, 74, 63, 71, 67, 68, 57, 71, 75, 76, 55, 56, 49, 57, 60, 59, 70, 59, 58, 59, 62, 86, 74, 59, 75, 62, 77, 78, 115, 72, 73, 59, 60, 60, 65, 71, 67, 62, 61, 74, 74, 67, 62, 58, 74, 61, 77, 69, 59, 81, 59, 70, 77, 61, 57, 62, 65, 62, 97, 70, 83, 85, 69, 82, 53, 78, 60, 78, 62, 64, 66, 67, 63, 71, 65, 67, 77, 63, 83, 63, 84, 58, 77, 62, 57, 82, 60, 64, 67, 63, 68, 67, 57, 61, 63, 76, 60, 58, 70, 72, 60, 66, 61, 59, 63, 70, 73, 76, 65, 70, 53, 77, 64, 52, 68, 60, 64, 82, 105, 62, 76, 60, 66, 76, 55, 70, 46, 61, 67, 58, 80, 72, 56, 55, 74, 63, 58, 62, 65, 56, 57, 99, 65, 65, 59, 57, 57, 76, 59, 59, 60, 62, 70, 61, 67, 56, 62, 69, 63, 69, 67, 67, 69, 67, 53, 72, 59, 75, 70, 69, 69, 78, 92, 57, 63, 68, 65, 75, 60, 66, 83, 62, 65, 67, 60, 56, 54, 60, 68, 57, 55, 59, 61, 66, 77, 62, 68, 80, 66, 60, 55, 68, 58, 105, 76, 63, 98, 58, 62, 61, 63, 60, 61, 51, 57, 61, 61, 49, 66, 53, 59, 63, 64, 61, 71, 59, 74, 60, 68, 68, 64, 58, 60, 64, 61, 66, 64, 60, 77, 69, 56, 58, 69, 67, 84, 67, 71, 58, 52, 65, 90, 78, 83, 60, 70, 64, 66, 63, 57, 57, 61, 68, 71, 76, 65, 54, 55, 87, 54, 66, 66, 66, 65, 65, 51, 72, 76, 63, 68, 64, 67, 85, 62, 65, 68, 58, 56, 67, 61, 64, 59, 63, 62, 94, 60, 72, 62, 76, 58, 74, 70, 56, 65, 75, 58, 66, 75, 49, 74, 80, 64, 60, 67, 58, 56, 68, 65, 74, 58, 67, 63, 58, 68, 63, 58, 64, 58, 60, 53, 68, 64, 65, 63, 60, 64, 70, 64, 62, 61, 96, 61, 81, 60, 59, 61, 59, 60, 86, 84, 53, 66, 55, 54, 56, 90, 54, 81, 71, 66, 91, 57, 61, 64, 53, 68, 75, 59, 60, 58, 54, 73, 63, 59, 66, 60, 67, 67, 77, 58, 107, 63, 79, 68, 51, 62, 55, 64, 72, 61, 64, 57, 53, 57, 62, 62, 78, 83, 60, 71, 68, 72, 70, 86, 75, 48, 71, 73, 61, 49, 67, 87, 67, 70, 81, 61, 65, 67, 73, 63, 64, 57, 76, 64, 58, 71, 63, 62, 51, 66, 71, 64, 63, 65, 71, 57, 55, 66, 65, 69, 56, 61, 44, 63, 52, 64, 55, 46, 80, 73, 61, 57, 67, 60, 56, 66, 60, 63, 75, 92, 57, 50, 64, 79, 73, 61, 55, 52, 77, 66, 68, 59, 61, 62, 63, 83, 57, 74, 52, 67, 57, 74, 67, 52, 56, 48, 68, 54, 52, 48, 68, 65, 64, 55, 63, 54, 58, 66, 48, 73, 73, 65, 67, 55, 59, 75, 59, 68, 61, 63, 64, 71, 94, 55, 59, 73, 70, 59, 79, 67, 77, 58, 61, 53, 100, 107, 61, 56, 69, 60, 85, 81, 59, 57, 72, 80, 59, 69, 65, 57, 77, 81, 56, 58, 82, 65, 94, 66, 69, 63, 66, 85, 73, 75, 65, 71, 53, 56, 60, 67, 82, 77, 56, 63, 148, 69, 55, 65, 73, 62, 60, 73, 45, 65, 49, 65, 48, 101, 66, 70, 66, 60, 67, 63, 59, 62, 59, 64, 65, 52, 83, 52, 53, 67, 57, 70, 76, 61, 55, 59, 78, 72, 46, 69, 75, 83, 62, 61, 65, 79, 85, 133, 57, 53, 62, 52, 81, 56, 59, 60, 100, 55, 60, 51, 61, 63, 64, 53, 71, 63, 99, 64, 62, 46, 65, 55, 59, 58, 60, 82, 64, 76, 81, 65, 74, 70, 102, 78, 59, 64, 69, 86, 55, 58, 61, 65, 71, 117, 66, 53, 67, 97, 47, 64, 62, 70, 68, 51, 94, 58, 52, 74, 59, 61, 58, 61, 65, 82, 54, 78, 48, 61, 61, 72, 61, 70, 61, 60, 77, 77, 58, 58, 60, 81, 57, 64, 60, 82, 60, 57, 76, 59, 59, 52, 49, 53, 60, 60, 71, 65, 82, 67, 76, 63, 63, 67, 60, 66, 85, 60, 66, 54, 68, 55, 50, 57, 65, 91, 63, 56, 64, 59, 101, 64, 65, 55, 62, 78, 75, 60, 50, 80, 53, 63, 63, 72, 62, 64, 53, 56, 78, 59, 54, 65, 62, 69, 53, 86, 81, 56, 65, 68, 61, 72, 61, 60, 56, 55, 70, 50, 58, 59, 73, 57, 74, 84, 68, 58, 48, 62, 40, 60, 69, 56, 93, 71, 67, 64, 69, 118, 58, 61, 68, 69, 53, 130, 88, 65, 97, 56, 101, 84, 55, 59, 46, 61, 58, 90, 62, 59, 62, 53, 57, 61, 58, 66, 62, 82, 78, 52, 127, 81, 57, 76, 61, 55, 77, 59, 69, 71, 65, 52, 59, 104, 72, 60, 84, 61, 78, 47, 102, 62, 50, 66, 68, 64, 66, 79, 62, 98, 65, 63, 78, 50, 52, 58, 90, 62, 65, 65, 70, 56, 54, 65, 60, 64, 74, 76, 101, 77, 64, 55, 55, 65, 82, 73, 67, 57, 63, 47, 78, 54, 65, 66, 54, 69, 52, 67, 62, 49, 62, 57, 80, 63, 63, 74, 59, 68, 62, 58, 76, 82, 80, 56, 50, 70, 57, 66, 59, 57, 66, 70, 53, 87, 58, 62, 70, 63, 66, 63, 51, 63, 52, 56, 63, 69, 58, 84, 66, 55, 58, 49, 85, 55, 56, 62, 49, 57, 81, 63, 62, 68, 62, 73, 61, 68, 69, 84, 64, 70, 56, 64, 67, 62, 66, 58, 48, 76, 64, 53, 58, 47, 80, 64, 61, 62, 55, 50, 53, 49, 72, 63, 56, 77, 58, 61, 79, 59, 69, 54, 80, 59, 60, 65, 63, 69, 60, 62, 74, 61, 60, 58, 58, 87, 65, 77, 65, 52, 70, 58, 56, 72, 60, 85, 54, 65, 57, 62, 56, 67, 57, 61, 62, 62, 68, 63, 42, 62, 63, 50, 99, 60, 65, 66, 65, 50, 64, 55, 81, 69, 51, 54, 62, 80, 85, 60, 64, 76, 68, 58, 50, 67, 66, 72, 63, 59, 73, 77, 69, 58, 61, 58, 67, 84, 62, 54, 70, 56, 69, 51, 58, 81, 68, 53, 71, 51, 51, 64, 73, 82, 64, 48, 73, 59, 82, 60, 57, 60, 68, 59, 57, 74, 56, 97, 59, 85, 55, 60, 51, 64, 69, 71, 67, 49, 52, 62, 65, 68, 66, 81, 59, 54, 67, 71, 69, 86, 63, 71, 58, 60, 60, 75, 64, 64, 68, 69, 74, 58, 62, 75, 56, 59, 62, 68, 76, 115, 62, 58, 63, 48, 54, 42, 59, 64, 68, 61, 61, 70, 74, 59, 60, 57, 73, 71, 69, 76, 59, 80, 54, 65, 59, 61, 52, 50, 72, 65, 63, 71, 60, 61, 49, 68, 51, 61, 79, 57, 94, 81, 58, 64, 73, 52, 60, 50, 69, 57, 59, 51, 75, 67, 64, 53, 55, 55, 52, 57, 63, 79, 56, 61, 62, 64, 89, 75, 72, 75, 66, 74, 56, 58, 55, 73, 79, 78, 85, 74, 57, 61, 65, 58, 108, 61, 73, 61, 66, 75, 48, 51, 63, 65, 64, 62, 53, 61, 68, 65, 60, 65, 51, 51, 73, 112, 53, 71, 71, 63, 60, 60, 72, 59, 111, 55, 52, 54, 50, 55, 61, 60, 59, 81, 79, 51, 53, 86, 73, 60, 64, 61, 62, 93, 75, 59, 69, 53, 62, 62, 67, 65, 47, 109, 65, 73, 60, 59, 55, 99, 52, 58, 62, 106, 87, 68, 61, 59, 56, 89, 68, 60, 59, 47, 64, 70, 70, 66, 62, 76, 54, 61, 61, 65, 69, 56, 61, 71, 58, 61, 74, 64, 72, 63, 74, 57, 62, 54, 76, 60, 71, 57, 58, 52, 123, 67, 87, 64, 73, 60, 68, 58, 87, 63, 59, 75, 70, 79, 77, 61, 58, 95, 66, 66, 81, 66, 64, 54, 76, 63, 65, 74, 70, 68, 62, 61, 58, 49, 60, 79, 75, 75, 55, 57, 71, 57, 63, 62, 65, 65, 72, 55, 56, 56, 62, 55, 56, 82, 68, 68, 66, 62, 71, 87, 62, 69, 63, 60, 75, 77, 83, 74, 62, 58, 57, 67, 75, 63, 58, 64, 61, 75, 65, 57, 60, 69, 53, 62, 83, 58, 103, 62, 65, 63, 66, 73, 83, 64, 51, 65, 96, 56, 56, 54, 66, 86, 62, 70, 69, 58, 56, 67, 68, 56, 45, 55, 50, 68, 67, 106, 70, 71, 82, 67, 50, 59, 54, 49, 62, 48, 100, 61, 73, 77, 69, 55, 69, 63, 58, 73, 62, 93, 72, 72, 62, 57, 60, 62, 66, 53, 107, 68, 56, 72, 50, 54, 66, 67, 43, 58, 72, 51, 62, 59, 56, 73, 56, 68, 58, 55, 65, 82, 71, 57, 61, 50, 71, 67, 83, 52, 49, 56, 90, 77, 93, 77, 73, 52, 63, 60, 60, 62, 53, 63, 63, 68, 70, 53, 72, 70, 54, 73, 58, 69, 76, 56, 52, 65, 55, 78, 63, 61, 60, 66, 56, 63, 57, 68, 61, 61, 90, 53, 64, 71, 62, 71, 94, 67, 61, 78, 56, 59, 76, 102, 60, 58, 62, 69, 62, 54, 51, 58, 63, 62, 80, 64, 53, 76, 107, 63, 58, 67, 54, 71, 77, 59, 46, 55, 58, 120, 63, 56, 72, 77, 69, 54, 63, 79, 81, 61, 64, 74, 156, 66, 64, 54, 50, 55, 67, 65, 66, 56, 52, 62, 67, 66, 62, 63, 52, 51, 62, 63, 56, 56, 69, 59, 77, 62, 60, 72, 61, 57, 60, 82, 92, 66, 49, 55, 81, 74, 66, 67, 112, 65, 57, 63, 65, 80, 64, 127, 64, 56, 67, 59, 74, 63, 62, 60, 93, 57, 59, 65, 73, 60, 55, 62, 81, 62, 62, 56, 69, 57, 60, 76, 68, 81, 61, 58, 62, 64, 82, 71, 69, 59, 64, 65, 57, 66, 75, 70, 62, 63, 72, 58, 57, 99, 77, 64, 57, 63, 60, 59, 87, 54, 67, 61, 64, 57, 56, 71, 55, 61, 55, 60, 58, 84, 75, 95, 58, 59, 55, 57, 50, 98, 68, 65, 103, 70, 61, 55, 63, 61, 59, 86, 56, 83, 87, 69, 103, 48, 57, 57, 73, 65, 71, 62, 62, 76, 101, 72, 71, 56, 68, 69, 59, 57, 94, 72, 64, 52, 68, 64, 63, 69, 63, 94, 51, 58, 76, 72, 119, 56, 67, 60, 68, 69, 63, 54, 59, 81, 66, 60, 56, 62, 55, 65, 57, 64, 79, 70, 71, 55, 51, 64, 66, 73, 68, 67, 70, 77, 62, 59, 72, 67, 63, 64, 63, 66, 63, 63, 64, 53, 58, 56, 56, 62, 59, 61, 50, 60, 54, 59, 78, 95, 65, 68, 47, 125, 61, 53, 63, 61, 65, 94, 62, 66, 65, 57, 83, 84, 86, 65, 63, 63, 54, 86, 64, 65, 84, 58, 67, 63, 57, 63, 62, 80, 52, 55, 55, 91, 70, 63, 52, 57, 71, 61, 71, 59, 69, 91, 68, 69, 73, 57, 77, 56, 55, 61, 96, 54, 58, 65, 56, 74, 69, 54, 71, 69, 60, 46, 58, 56, 66, 61, 78, 72, 60, 64, 52, 63, 70, 79, 57, 69, 91, 50, 100, 55, 65, 57, 68, 77, 82, 78, 58, 50, 56, 64, 75, 64, 75, 65, 43, 54, 60, 77, 56, 52, 68, 59, 66, 62, 53, 62, 59, 69, 64, 63, 56, 57, 69, 60, 69, 53, 76, 59, 56, 70, 61, 61, 80, 52, 65, 61, 57, 71, 55, 57, 69, 61, 74, 57, 49, 54, 63, 68, 50, 73, 63, 77, 96, 72, 58, 69, 59, 66, 61, 87, 49, 62, 51, 75, 73, 60, 86, 81, 79, 66, 60, 61, 54, 62, 63, 64, 80, 80, 66, 62, 60, 81, 69, 45, 70, 55, 61, 74, 61, 56, 55, 64, 65, 78, 64, 70, 68, 89, 60, 47, 70, 57, 69, 64, 59, 70, 48, 60, 49, 65, 55, 56, 88, 100, 66, 63, 61, 79, 55, 62, 58, 96, 61, 81, 61, 61, 67, 74, 55, 61, 57, 60, 57, 93, 64, 56, 89, 73, 54, 59, 62, 64, 70, 60, 69, 71, 89, 62, 74, 98, 62, 59, 68, 66, 65, 54, 61, 58, 75, 53, 61, 49, 54, 58, 44, 55, 84, 65, 59, 70, 54, 80, 62, 70, 63, 66, 67, 65, 75, 51, 53, 46, 57, 50, 73, 61, 55, 54, 63, 92, 53, 57, 61, 63, 83, 74, 63, 56, 52, 57, 68, 54, 53, 85, 55, 68, 67, 56, 57, 43, 64, 69, 100, 57, 66, 63, 60, 55, 68, 64, 55, 58, 68, 72, 77, 90, 71, 93, 60, 77, 53, 75, 64, 55, 57, 67, 67, 54, 67, 61, 73, 48, 53, 67, 60, 68, 71, 58, 49, 47, 46, 56, 71, 57, 59, 57, 89, 61, 54, 67, 59, 58, 76, 71, 53, 96, 65, 56, 61, 75, 53, 69, 68, 65, 53, 55, 41, 60, 59, 63, 64, 61, 64, 58, 75, 63, 87, 75, 56, 62, 56, 61, 64, 55, 76, 60, 61, 70, 76, 66, 52, 68, 70, 63, 58, 57, 73, 54, 65, 60, 58, 73, 75, 53, 67, 73, 74, 85, 69, 47, 74, 100, 57, 70, 76, 70, 60, 50, 61, 54, 76, 51, 57, 64, 77, 58, 73, 67, 69, 66, 63, 65, 65, 50, 64, 57, 73, 56, 71, 58, 82, 77, 58, 63, 59, 57, 63, 58, 48, 63, 56, 63, 71, 42, 62, 63, 60, 59, 68, 64, 84, 56, 40, 71, 70, 65, 59, 66, 58, 45, 73, 66, 73, 56, 58, 52, 64, 61, 85, 94, 61, 75, 62, 63, 68, 88, 83, 77, 56, 88, 56, 69, 55, 61, 65, 63, 70, 71, 59, 91, 61, 53, 76, 61, 48, 68, 69, 59, 50, 65, 50, 64, 53, 54, 64, 66, 122, 62, 68, 58, 51, 71, 69, 58, 66, 85, 71, 59, 81, 70, 78, 69, 73, 66, 69, 71, 77, 77, 59, 62, 57, 66, 64, 88, 81, 55, 64, 50, 70, 63, 79, 65, 81, 56, 72, 52, 126, 56, 66, 56, 62, 63, 64, 72, 56, 77, 55, 66, 57, 62, 72, 65, 61, 61, 63, 62, 62, 62, 49, 85, 58, 61, 51, 85, 69, 49, 57, 92, 51, 79, 72, 65, 65, 48, 56, 50, 87, 52, 71, 62, 60, 63, 53, 58, 60, 66, 63, 67, 76, 61, 52, 51, 65, 80, 50, 72, 60, 54, 50, 55, 56, 63, 48, 69, 73, 53, 77, 74, 61, 57, 58, 94, 60, 56, 53, 83, 67, 56, 47, 58, 59, 58, 52, 72, 81, 81, 64, 113, 73, 61, 58, 79, 60, 62, 47, 58, 94, 88, 64, 68, 60, 58, 70, 43, 56, 60, 93, 84, 81, 86, 67, 58, 63, 71, 54, 54, 75, 57, 57, 71, 81, 68, 57, 88, 65, 62, 52, 68, 82, 61, 59, 65, 62, 50, 50, 79, 59, 89, 83, 57, 66, 67, 65, 56, 71, 57, 63, 67, 63, 94, 77, 111, 67, 57, 52, 45, 50, 60, 80, 79, 100, 63, 61, 94, 51, 68, 59, 59, 60, 72, 60, 63, 65, 50, 60, 76, 49, 64, 52, 62, 130, 74, 80, 57, 72, 65, 93, 62, 63, 53, 52, 57, 67, 61, 61, 62, 53, 62, 92, 110, 77, 62, 84, 72, 71, 62, 68, 43, 60, 74, 71, 68, 50, 59, 64, 71, 60, 56, 78, 60, 69, 64, 65, 58, 66, 70, 69, 60, 51, 73, 65, 55, 63, 79, 74, 53, 54, 71, 160, 73, 65, 67, 53, 92, 44, 64, 41, 71, 62, 60, 69, 81, 45, 70, 50, 77, 58, 57, 57, 59, 61, 66, 77, 57, 80, 68, 54, 55, 67, 61, 67, 54, 60, 61, 60, 61, 66, 74, 102, 61, 53, 65, 56, 58, 60, 111, 69, 92, 80, 86, 66, 93, 76, 55, 83, 56, 67, 68, 68, 60, 69, 69, 75, 64, 71, 47, 68, 57, 76, 55, 51, 69, 59, 60, 53, 60, 83, 65, 49, 60, 100, 52, 56, 60, 80, 85, 55, 85, 44, 55, 79, 56, 76, 71, 62, 70, 61, 48, 85, 73, 42, 73, 62, 51, 48, 67, 62, 58, 65, 71, 56, 71, 52, 84, 68, 59, 57, 64, 52, 80, 64, 63, 63, 72, 61, 59, 55, 56, 65, 66, 67, 78, 58, 69, 74, 70, 69, 49, 71, 61, 78, 48, 79, 56, 80, 68, 73, 59, 57, 67, 54, 49, 113, 68, 61, 73, 61, 61, 55, 73, 57, 70, 64, 64, 92, 56, 120, 63, 67, 62, 70, 89, 74, 71, 56, 56, 47, 60, 67, 62, 55, 65, 103, 58, 79, 53, 69, 67, 60, 77, 74, 82, 55, 63, 52, 63, 80, 79, 68, 68, 74, 73, 63, 73, 59, 57, 53, 71, 66, 71, 64, 86, 58, 69, 71, 67, 82, 61, 75, 52, 58, 64, 68, 121, 56, 56, 74, 55, 75, 89, 67, 60, 98, 69, 51, 44, 50, 58, 63, 54, 68, 121, 70, 62, 75, 54, 57, 71, 59, 65, 62, 57, 61, 59, 90, 66, 68, 51, 56, 64, 57, 59, 57, 61, 52, 62, 56, 85, 73, 58, 71, 62, 61, 45, 75, 63, 70, 62, 68, 61, 73, 45, 72, 64, 65, 84, 53, 66, 63, 55, 74, 79, 69, 61, 66, 57, 65, 55, 62, 58, 88, 54, 88, 55, 85, 67, 56, 59, 51, 68, 59, 56, 54, 63, 68, 54, 64, 64, 85, 100, 64, 58, 57, 66, 51, 55, 81, 62, 60, 88, 82, 56, 74, 82, 79, 65, 70, 64, 73, 59, 59, 64, 59, 59, 71, 55, 57, 60, 72, 58, 87, 43, 58, 84, 81, 67, 67, 49, 74, 54, 69, 59, 44, 61, 61, 60, 63, 77, 81, 97, 69, 60, 68, 89, 51, 85, 66, 41, 54, 59, 73, 72, 58, 56, 62, 76, 71, 71, 42, 52, 79, 45, 46, 54, 80, 83, 72, 56, 77, 56, 61, 56, 53, 72, 75, 67, 60, 63, 63, 86, 69, 55, 72, 61, 72, 56, 57, 73, 67, 67, 70, 65, 59, 82, 78, 55, 55, 72, 63, 84, 93, 50, 78, 58, 64, 91, 69, 77, 64, 59, 63, 76, 61, 63, 69, 61, 55, 52, 77, 57, 76, 79, 55, 84, 63, 73, 52, 59, 77, 68, 94, 61, 68, 88, 93, 72, 70, 95, 56, 55, 87, 58, 48, 61, 64, 73, 67, 75, 68, 58, 72, 53, 89, 66, 48, 62, 55, 96, 73, 54, 58, 57, 50, 79, 58, 52, 68, 69, 69, 78, 66, 59, 88, 103, 60, 75, 69, 64, 74, 54, 47, 80, 95, 65, 67, 92, 52, 77, 97, 67, 74, 74, 66, 67, 55, 56, 84, 55, 40, 77, 61, 59, 64, 65, 68, 64, 55, 74, 53, 44, 70, 121, 72, 80, 53, 51, 55, 60, 65, 66, 87, 83, 47, 50, 56, 74, 70, 61, 72, 58, 72, 64, 51, 90, 69, 73, 63, 73, 54, 56, 61, 82, 49, 63, 52, 57, 59, 79, 59, 96, 59, 55, 55, 55, 61, 55, 66, 73, 61, 62, 68, 66, 61, 56, 67, 64, 63, 93, 71, 71, 77, 70, 57, 63, 58, 63, 55, 70, 58, 66, 69, 59, 62, 54, 60, 57, 56, 61, 65, 61, 59, 62, 71, 75, 65, 75, 77, 62, 67, 60, 73, 74, 60, 42, 71, 61, 59, 54, 58, 71, 59, 56, 52, 54, 84, 47, 67, 59, 61, 59, 91, 57, 54, 57, 47, 54, 99, 64, 73, 63, 60, 92, 78, 57, 66, 101, 57, 55, 66, 61, 63, 61, 54, 62, 97, 62, 56, 80, 67, 66, 70, 67, 60, 86, 69, 68, 53, 66, 81, 69, 62, 53, 52, 64, 69, 48, 76, 57, 67, 64, 69, 57, 75, 111, 66, 64, 56, 55, 62, 68, 54, 71, 61, 102, 51, 57, 58, 53, 59, 58, 82, 58, 57, 67, 64, 68, 52, 62, 61, 60, 56, 61, 60, 67, 63, 76, 59, 56, 52, 73, 74, 65, 61, 64, 62, 66, 82, 53, 63, 71, 43, 66, 91, 66, 63, 66, 66, 65, 47, 75, 60, 66, 87, 52, 68, 64, 50, 64, 60, 47, 51, 62, 65, 59, 56, 55, 64, 60, 61, 65, 80, 68, 70, 61, 75, 66, 67, 56, 70, 66, 62, 56, 57, 84, 57, 59, 61, 61, 59, 50, 59, 75, 52, 95, 94, 82, 51, 78, 89, 53, 57, 64, 83, 75, 80, 57, 54, 86, 50, 52, 71, 52, 81, 66, 65, 66, 57, 54, 41, 84, 64, 56, 59, 62, 46, 76, 64, 57, 45, 83, 55, 70, 53, 88, 45, 71, 53, 87, 61, 64, 52, 55, 72, 69, 107, 64, 67, 64, 53, 54, 53, 55, 65, 73, 77, 53, 60, 47, 60, 57, 65, 70, 74, 77, 60, 49, 74, 58, 69, 75, 59, 60, 47, 55, 57, 57, 65, 53, 58, 66, 45, 61, 64, 83, 74, 66, 81, 66, 80, 55, 63, 118, 93, 63, 60, 68, 61, 73, 70, 70, 64, 94, 75, 54, 56, 64, 62, 90, 61, 64, 68, 56, 104, 61, 62, 58, 73, 72, 55, 40, 61, 53, 64, 77, 48, 58, 67, 71, 60, 47, 72, 115, 85, 83, 73, 57, 67, 54, 64, 63, 83, 69, 88, 52, 64, 76, 82, 67, 54, 51, 81, 50, 71, 78, 97, 54, 111, 76, 56, 79, 67, 39, 65, 71, 87, 48, 47, 72, 95, 68, 38, 72, 67, 65, 156, 64, 64, 59, 61, 68, 57, 68, 77, 58, 54, 73, 50, 53, 68, 68, 58, 70, 67, 44, 73, 46, 59, 59, 64, 83, 55, 65, 80, 59, 66, 69, 63, 42, 55, 67, 60, 58, 36, 48, 68, 79, 54, 72, 71, 69, 74, 67, 76, 75, 48, 57, 65, 59, 57, 76, 62, 76, 66, 77, 63, 61, 72, 51, 84, 58, 86, 50, 59, 53, 51, 76, 62, 74, 109, 81, 59, 82, 56, 66, 69, 65, 56, 87, 53, 47, 56, 62, 62, 67, 64, 63, 74, 55, 72, 73, 48, 71, 42, 71, 51, 76, 62, 49, 57, 65, 49, 53, 74, 62, 71, 57, 54, 59, 49, 73, 58, 63, 118, 79, 59, 88, 70, 68, 67, 68, 64, 65, 68, 72, 61, 71, 71, 77, 78, 68, 63, 84, 60, 68, 75, 65, 113, 68, 49, 47, 71, 46, 48, 56, 92, 56, 60, 50, 72, 64, 69, 69, 74, 55, 52, 62, 74, 80, 61, 79, 61, 76, 53, 70, 48, 88, 70, 79, 48, 66, 71, 82, 79, 66, 82, 62, 61, 49, 50, 56, 54, 48, 94, 75, 54, 59, 99, 74, 77, 77, 92, 52, 55, 64, 59, 52, 61, 66, 51, 62, 71, 62, 79, 44, 86, 59, 56, 70, 56, 70, 62, 72, 71, 76, 65, 73, 66, 58, 55, 70, 60, 66, 47, 59, 94, 51, 56, 78, 65, 88, 67, 80, 67, 75, 62, 62, 51, 62, 44, 53, 46, 54, 62, 71, 82, 62, 64, 68, 66, 60, 63, 82, 85, 59, 65, 81, 73, 60, 58, 48, 64, 96, 73, 74, 84, 69, 57, 53, 77, 69, 59, 52, 62, 90, 77, 74, 41, 77, 62, 58, 68, 67, 70, 74, 67, 75, 58, 72, 64, 76, 72, 47, 59, 86, 65, 69, 116, 70, 47, 58, 65, 61, 64, 62, 61, 60, 65, 85, 57, 77, 68, 66, 90, 81, 50, 61, 65, 53, 43, 53, 75, 68, 69, 58, 61, 89, 62, 75, 61, 76, 75, 55, 62, 44, 66, 55, 58, 74, 75, 52, 61, 80, 49, 62, 55, 66, 58, 59, 84, 48, 80, 62, 48, 66, 41, 61, 56, 67, 65, 66, 71, 57, 86, 61, 64, 52, 57, 68, 81, 62, 69, 53, 61, 40, 77, 84, 57, 40, 77, 53, 75, 66, 57, 55, 53, 77, 76, 57, 70, 47, 70, 54, 68, 60, 74, 56, 77, 59, 67, 64, 66, 79, 62, 113, 56, 54, 88, 73, 133, 61, 63, 47, 57, 69, 64, 56, 62, 48, 77, 83, 70, 60, 72, 50, 52, 49, 50, 98, 79, 47, 71, 66, 63, 67, 62, 43, 60, 74, 61, 77, 69, 56, 84, 62, 71, 74, 74, 58, 66, 72, 90, 83, 72, 54, 81, 76, 67, 81, 62, 69, 67, 60, 65, 55, 60, 48, 73, 50, 65, 71, 58, 85, 60, 57, 79, 94, 59, 59, 65, 96, 43, 56, 66, 61, 70, 97, 88, 50, 66, 68, 66, 49, 65, 60, 48, 79, 68, 79, 66, 52, 72, 56, 59, 56, 64, 56, 56, 57, 67, 51, 63, 78, 56, 63, 83, 62, 86, 73, 70, 59, 64, 63, 73, 70, 85, 61, 62, 52, 89, 48, 74, 95, 73, 70, 53, 65, 69, 59, 67, 44, 67, 56, 54, 60, 70, 67, 72, 64, 57, 85, 75, 55, 58, 59, 60, 63, 83, 66, 69, 61, 79, 69, 127, 44, 63, 49, 80, 54, 77, 130, 63, 62, 70, 49, 77, 61, 74, 46, 64, 67, 63, 62, 72, 72, 67, 54, 54, 48, 80, 79, 72, 85, 89, 79, 71, 61, 50, 67, 61, 51, 71, 49, 68, 69, 96, 55, 69, 72, 53, 69, 64, 66, 74, 55, 97, 50, 90, 61, 59, 51, 57, 85, 70, 58, 59, 65, 52, 50, 41, 64, 75, 68, 56, 73, 96, 76, 60, 52, 51, 59, 49, 60, 63, 55, 70, 52, 56, 65, 51, 61, 69, 74, 60, 71, 64, 57, 63, 53, 67, 59, 66, 60, 62, 88, 68, 87, 65, 69, 66, 50, 55, 75, 61, 65, 90, 65, 48, 54, 57, 53, 65, 78, 65, 63, 72, 66, 60, 44, 71, 58, 63, 72, 59, 49, 69, 72, 63, 61, 74, 58, 73, 76, 68, 77, 62, 68, 73, 63, 70, 65, 83, 63, 62, 57, 54, 77, 86, 68, 70, 54, 58, 82, 61, 52, 76, 62, 44, 52, 59, 64, 83, 59, 53, 74, 59, 47, 64, 62, 59, 65, 48, 65, 63, 62, 72, 71, 56, 68, 50, 86, 64, 50, 69, 59, 70, 88, 56, 44, 56, 56, 84, 74, 88, 55, 63, 80, 68, 67, 54, 67, 122, 77, 70, 67, 46, 79, 59, 65, 68, 58, 52, 45, 84, 65, 50, 55, 67, 84, 65, 58, 72, 59, 50, 52, 51, 66, 87, 79, 50, 60, 67, 58, 73, 67, 74, 71, 60, 65, 50, 66, 55, 57, 94, 55, 133, 77, 63, 70, 65, 55, 56, 70, 61, 69, 86, 69, 63, 60, 68, 57, 70, 59, 54, 58, 59, 71, 55, 94, 63, 58, 66, 66, 56, 64, 46, 58, 64, 60, 77, 60, 66, 88, 84, 57, 69, 59, 88, 118, 63, 58, 55, 60, 66, 86, 71, 73, 59, 67, 59, 79, 80, 57, 58, 83, 64, 66, 49, 77, 68, 58, 79, 76, 101, 58, 54, 55, 85, 68, 55, 63, 65, 57, 61, 65, 62, 70, 60, 69, 93, 66, 72, 52, 71, 67, 61, 52, 74, 64, 69, 53, 67, 74, 81, 66, 85, 66, 83, 65, 66, 70, 68, 65, 59, 75, 55, 50, 63, 64, 67, 83, 59, 53, 78, 64, 76, 80, 63, 58, 60, 70, 89, 79, 69, 63, 81, 53, 81, 46, 53, 74, 66, 79, 62, 64, 83, 67, 100, 63, 48, 72, 74, 62, 61, 48, 59, 79, 58, 57, 69, 49, 64, 63, 70, 75, 69, 91, 78, 72, 61, 54, 67, 61, 68, 45, 67, 70, 57, 49, 58, 57, 62, 86, 72, 55, 58, 60, 67, 87, 76, 97, 98, 90, 61, 78, 45, 70, 53, 53, 67, 77, 80, 55, 57, 67, 57, 57, 40, 79, 70, 54, 63, 57, 45, 66, 65, 61, 66, 58, 63, 68, 62, 65, 88, 64, 41, 54, 86, 70, 46, 64, 56, 51, 64, 55, 43, 59, 75, 61, 56, 55, 62, 47, 55, 60, 75, 56, 56, 88, 59, 69, 82, 53, 76, 65, 65, 40, 84, 54, 40, 77, 54, 54, 63, 69, 74, 75, 67, 59, 57, 53, 53, 77, 58, 82, 73, 52, 64, 79, 97, 70, 51, 78, 108, 114, 69, 50, 60, 64, 50, 67, 55, 67, 69, 57, 68, 58, 96, 72, 62, 64, 75, 53, 73, 72, 62, 69, 73, 62, 57, 61, 65, 84, 58, 79, 61, 57, 83, 78, 62, 73, 59, 76, 61, 61, 78, 81, 75, 60, 81, 82, 60, 58, 57, 79, 53, 71, 74, 62, 77, 90, 58, 80, 48, 81, 67, 68, 60, 77, 49, 66, 105, 71, 45, 67, 76, 55, 56, 72, 91, 76, 54, 73, 70, 62, 63, 89, 65, 72, 58, 77, 67, 54, 60, 75, 54, 63, 60, 67, 67, 54, 51, 84, 56, 58, 81, 41, 69, 62, 70, 70, 57, 120, 64, 63, 63, 56, 82, 50, 65, 89, 95, 97, 59, 79, 64, 42, 60, 73, 56, 81, 67, 62, 92, 69, 48, 54, 64, 73, 54, 59, 75, 87, 48, 59, 50, 79, 59, 95, 95, 62, 60, 68, 55, 64, 48, 53, 49, 63, 71, 58, 60, 68, 54, 90, 60, 63, 46, 61, 59, 43, 75, 76, 57, 62, 72, 56, 67, 71, 86, 81, 71, 54, 84, 90, 69, 59, 62, 72, 62, 63, 52, 62, 63, 64, 61, 70, 70, 78, 94, 63, 66, 56, 126, 62, 52, 55, 69, 75, 87, 62, 75, 78, 65, 64, 47, 47, 53, 65, 63, 94, 58, 78, 74, 52, 70, 68, 49, 48, 75, 78, 63, 48, 57, 47, 53, 80, 81, 59, 66, 52, 68, 69, 63, 77, 63, 81, 83, 67, 65, 59, 69, 70, 55, 58, 94, 64, 53, 82, 44, 56, 96, 58, 53, 62, 52, 68, 64, 56, 54, 61, 90, 59, 67, 68, 53, 67, 61, 115, 60, 58, 53, 67, 82, 44, 62, 65, 66, 73, 68, 54, 63, 58, 50, 81, 67, 58, 69, 63, 59, 52, 64, 52, 77, 82, 71, 52, 56, 80, 70, 50, 79, 58, 73, 47, 74, 78, 48, 47, 44, 46, 99, 66, 112, 50, 58, 40, 80, 96, 55, 71, 55, 53, 64, 82, 66, 54, 103, 77, 66, 68, 73, 94, 80, 46, 57, 87, 65, 82, 93, 65, 59, 68, 53, 58, 58, 65, 57, 59, 65, 65, 78, 80, 79, 62, 70, 72, 52, 80, 42, 69, 69, 66, 57, 64, 77, 64, 72, 66, 78, 105, 55, 62, 64, 63, 62, 64, 63, 51, 54, 62, 81, 59, 53, 73, 103, 50, 55, 76, 66, 43, 65, 60, 69, 60, 78, 104, 60, 58, 106, 83, 58, 85, 61, 68, 59, 81, 41, 49, 47, 72, 65, 58, 62, 56, 56, 63, 81, 55, 47, 74, 59, 58, 61, 53, 70, 73, 55, 88, 47, 62, 58, 55, 56, 75, 68, 48, 74, 56, 48, 50, 75, 65, 98, 94, 74, 63, 59, 51, 74, 71, 68, 65, 69, 50, 64, 74, 62, 58, 60, 65, 68, 62, 54, 62, 70, 57, 83, 63, 43, 79, 67, 79, 52, 69, 35, 47, 85, 63, 56, 82, 72, 101, 46, 53, 68, 58, 102, 82, 52, 66, 58, 61, 67, 71, 56, 68, 85, 56, 59, 74, 77, 69, 70, 57, 48, 96, 61, 74, 68, 52, 83, 50, 60, 69, 59, 72, 73, 61, 71, 73, 56, 58, 54, 41, 107, 56, 79, 76, 63, 61, 61, 76, 47, 63, 56, 112, 58, 46, 59, 55, 58, 56, 72, 41, 60, 72, 57, 80, 61, 48, 66, 52, 53, 63, 57, 82, 61, 89, 61, 84, 55, 65, 64, 50, 56, 56, 72, 88, 68, 64, 85, 68, 69, 72, 56, 69, 60, 71, 72, 71, 59, 58, 56, 61, 69, 54, 63, 123, 82, 100, 60, 86, 61, 50, 59, 57, 66, 61, 60, 56, 76, 54, 67, 48, 98, 76, 67, 56, 73, 65, 64, 51, 54, 59, 64, 54, 76, 66, 65, 70, 48, 51, 92, 51, 113, 65, 57, 49, 74, 66, 51, 63, 79, 59, 59, 60, 74, 61, 72, 95, 55, 86, 65, 60, 68, 86, 69, 55, 70, 50, 54, 63, 71, 59, 83, 61, 74, 59, 66, 64, 48, 57, 63, 76, 71, 84, 50, 72, 89, 66, 73, 92, 70, 61, 60, 66, 56, 73, 53, 69, 64, 73, 59, 71, 92, 68, 70, 119, 67, 71, 85, 61, 49, 78, 72, 62, 71, 58, 47, 70, 60, 70, 59, 61, 57, 56, 53, 59, 57, 60, 46, 62, 54, 62, 111, 73, 57, 45, 62, 56, 75, 62, 71, 57, 50, 64, 137, 58, 69, 69, 65, 58, 47, 50, 47, 49, 63, 46, 79, 56, 59, 50, 51, 59, 70, 61, 65, 69, 80, 62, 81, 68, 63, 73, 65, 74, 36, 67, 60, 79, 53, 103, 87, 37, 63, 51, 65, 60, 73, 83, 57, 55, 53, 59, 59, 74, 61, 123, 82, 85, 83, 63, 54, 64, 60, 51, 45, 57, 82, 81, 78, 63, 56, 74, 78, 49, 51, 46, 64, 59, 72, 76, 60, 51, 84, 77, 71, 70, 55, 65, 84, 49, 70, 56, 69, 70, 88, 69, 75, 73, 48, 73, 75, 84, 86, 60, 57, 65, 67, 66, 67, 67, 64, 54, 58, 77, 47, 79, 80, 65, 68, 46, 50, 83, 90, 69, 58, 68, 70, 50, 64, 89, 85, 62, 57, 65, 82, 56, 67, 56, 58, 82, 69, 67, 69, 71, 71, 58, 66, 50, 72, 55, 76, 72, 66, 65, 83, 72, 96, 48, 143, 73, 64, 68, 67, 73, 67, 50, 56, 64, 64, 73, 58, 65, 48, 56, 75, 56, 62, 103, 69, 79, 58, 60, 58, 87, 63, 123, 66, 71, 70, 81, 40, 59, 56, 86, 73, 67, 87, 58, 97, 66, 77, 66, 102, 60, 67, 45, 71, 60, 49, 56, 65, 63, 59, 68, 80, 55, 70, 71, 70, 72, 112, 54, 79, 101, 60, 62, 75, 74, 89, 77, 64, 43, 73, 52, 60, 51, 100, 58, 54, 74, 78, 34, 60, 54, 70, 86, 62, 59, 76, 55, 76, 60, 56, 47, 48, 67, 104, 53, 68, 71, 76, 61, 46, 77, 71, 53, 64, 96, 63, 39, 80, 56, 54, 49, 65, 59, 65, 56, 51, 74, 88, 54, 56, 49, 54, 52, 44, 87, 62, 53, 83, 71, 60, 78, 119, 98, 77, 69, 81, 91, 51, 54, 65, 61, 61, 64, 55, 41, 67, 80, 64, 60, 86, 73, 105, 64, 69, 68, 49, 79, 76, 70, 41, 65, 63, 55, 52, 69, 69, 80, 87, 41, 71, 62, 76, 59, 51, 54, 128, 66, 52, 90, 50, 64, 83, 55, 50, 71, 90, 74, 50, 45, 47, 66, 50, 52, 87, 59, 74, 56, 70, 71, 73, 72, 73, 57, 75, 65, 58, 90, 72, 54, 57, 54, 60, 73, 67, 60, 70, 60, 67, 53, 63, 48, 69, 52, 58, 47, 59, 51, 57, 56, 55, 85, 60, 76, 84, 59, 54, 55, 84, 57, 69, 73, 46, 58, 64, 88, 73, 65, 60, 75, 96, 115, 81, 81, 62, 72, 72, 114, 58, 48, 52, 57, 75, 64, 61, 61, 57, 69, 66, 73, 65, 67, 68, 60, 39, 79, 71, 62, 59, 59, 58, 58, 51, 67, 53, 72, 56, 64, 63, 68, 60, 56, 60, 84, 60, 61, 53, 79, 47, 69, 64, 73, 71, 70, 60, 63, 68, 58, 70, 96, 64, 80, 72, 53, 71, 56, 67, 66, 65, 61, 67, 54, 58, 46, 65, 96, 97, 74, 106, 65, 49, 48, 93, 82, 64, 75, 64, 80, 68, 49, 50, 65, 81, 82, 69, 67, 51, 58, 56, 74, 71, 134, 56, 68, 62, 70, 57, 74, 71, 80, 90, 51, 65, 45, 64, 57, 67, 77, 67, 52, 69, 53, 67, 71, 43, 79, 48, 66, 54, 47, 60, 58, 78, 52, 79, 49, 65, 72, 67, 82, 83, 61, 54, 93, 54, 82, 57, 66, 53, 48, 79, 67, 73, 59, 70, 79, 61, 82, 62, 42, 57, 65, 65, 63, 53, 62, 49, 46, 82, 58, 61, 83, 64, 74, 86, 61, 52, 48, 46, 56, 47, 60, 60, 54, 54, 50, 71, 68, 99, 70, 63, 52, 50, 64, 65, 56, 81, 64, 46, 70, 61, 62, 74, 51, 78, 58, 69, 61, 113, 62, 75, 62, 62, 53, 70, 57, 71, 62, 60, 55, 89, 59, 51, 63, 74, 89, 79, 61, 61, 52, 59, 59, 75, 73, 78, 66, 100, 55, 65, 67, 58, 55, 71, 67, 50, 59, 72, 66, 59, 56, 54, 80, 64, 54, 48, 59, 57, 44, 50, 88, 63, 59, 75, 49, 80, 60, 61, 69, 69, 45, 59, 64, 63, 78, 59, 79, 48, 57, 69, 42, 86, 55, 83, 51, 86, 45, 54, 72, 88, 68, 64, 63, 93, 62, 62, 50, 49, 56, 99, 65, 80, 62, 73, 66, 49, 69, 45, 54, 38, 65, 59, 70, 63, 55, 48, 63, 79, 33, 57, 60, 48, 67, 47, 87, 69, 95, 58, 59, 87, 63, 84, 80, 58, 84, 75, 63, 42, 54, 71, 43, 58, 70, 70, 58, 98, 71, 59, 59, 64, 61, 67, 53, 45, 44, 68, 59, 60, 56, 38, 46, 89, 99, 50, 54, 48, 50, 45, 75, 52, 64, 81, 61, 63, 79, 83, 58, 110, 65, 55, 60, 76, 62, 59, 70, 81, 67, 76, 56, 65, 49, 85, 59, 72, 89, 55, 77, 77, 73, 112, 64, 87, 61, 64, 76, 64, 72, 49, 56, 53, 67, 48, 70, 40, 71, 55, 65, 87, 82, 83, 56, 56, 72, 88, 69, 63, 104, 64, 50, 98, 58, 79, 63, 62, 49, 63, 41, 55, 62, 58, 76, 53, 53, 64, 65, 70, 74, 51, 92, 104, 54, 79, 65, 64, 51, 61, 64, 78, 66, 56, 118, 63, 63, 70, 62, 66, 60, 47, 70, 59, 78, 75, 64, 56, 61, 65, 59, 48, 83, 49, 97, 74, 49, 60, 67, 75, 62, 56, 78, 58, 60, 53, 67, 91, 57, 91, 88, 55, 55, 57, 50, 70, 54, 74, 83, 58, 80, 89, 53, 72, 77, 54, 75, 112, 73, 65, 59, 56, 89, 55, 65, 61, 56, 64, 100, 63, 57, 74, 71, 72, 72, 119, 60, 107, 70, 72, 85, 76, 66, 38, 39, 65, 61, 68, 56, 49, 82, 93, 80, 73, 63, 58, 67, 56, 70, 68, 75, 89, 53, 72, 58, 56, 53, 58, 64, 57, 52, 69, 66, 68, 82, 48, 55, 52, 51, 70, 63, 52, 78, 49, 58, 69, 68, 65, 64, 79, 75, 73, 88, 61, 53, 81, 63, 64, 74, 60, 58, 62, 70, 49, 68, 83, 79, 89, 100, 78, 54, 88, 66, 71, 64, 62, 51, 84, 73, 97, 41, 59, 72, 60, 63, 71, 66, 33, 50, 65, 61, 61, 44, 69, 64, 63, 52, 58, 62, 91, 59, 51, 54, 47, 69, 69, 55, 69, 64, 85, 56, 61, 58, 59, 66, 76, 64, 64, 49, 66, 64, 66, 58, 92, 69, 48, 64, 59, 73, 97, 68, 66, 50, 89, 60, 79, 73, 31, 57, 64, 54, 57, 72, 90, 52, 67, 63, 61, 59, 53, 80, 46, 77, 81, 64, 82, 69, 94, 88, 57, 42, 40, 65, 59, 69, 51, 60, 35, 71, 65, 68, 52, 71, 64, 68, 66, 55, 42, 49, 58, 88, 68, 87, 68, 48, 59, 74, 52, 96, 61, 120, 58, 75, 77, 53, 47, 54, 66, 59, 77, 56, 68, 59, 63, 68, 66, 72, 139, 76, 54, 61, 60, 72, 56, 52, 65, 58, 62, 55, 58, 50, 59, 67, 68, 72, 50, 58, 86, 64, 63, 67, 79, 65, 57, 70, 48, 61, 54, 71, 79, 57, 66, 63, 53, 56, 66, 72, 64, 60, 87, 53, 70, 59, 67, 65, 59, 59, 83, 58, 93, 59, 67, 85, 83, 74, 69, 65, 55, 63, 70, 56, 50, 65, 55, 62, 48, 56, 37, 63, 71, 58, 61, 70, 43, 56, 73, 50, 62, 81, 55, 83, 66, 59, 66, 68, 73, 55, 57, 56, 85, 46, 59, 56, 57, 71, 91, 67, 46, 61, 51, 64, 66, 63, 67, 55, 46, 62, 71, 68, 54, 72, 70, 59, 80, 57, 60, 57, 44, 58, 66, 61, 43, 69, 73, 86, 61, 66, 67, 45, 80, 47, 73, 47, 64, 66, 53, 63, 64, 106, 55, 73, 60, 48, 65, 77, 71, 58, 98, 44, 54, 68, 56, 65, 72, 69, 90, 57, 64, 74, 58, 61, 82, 90, 60, 63, 66, 61, 86, 56, 50, 76, 75, 62, 73, 35, 57, 83, 66, 68, 58, 71, 62, 43, 62, 77, 78, 69, 59, 63, 74, 51, 60, 74, 54, 55, 78, 62, 71, 66, 60, 77, 57, 70, 58, 45, 58, 56, 85, 51, 77, 72, 51, 59, 48, 65, 72, 69, 55, 59, 65, 60, 58, 59, 75, 65, 67, 51, 50, 61, 69, 56, 78, 63, 64, 56, 53, 63, 72, 63, 41, 74, 77, 66, 75, 75, 77, 65, 95, 51, 55, 76, 76, 52, 65, 111, 66, 95, 85, 80, 59, 84, 64, 61, 73, 65, 86, 67, 68, 61, 60, 68, 64, 62, 58, 63, 77, 58, 55, 57, 81, 66, 58, 73, 50, 67, 88, 81, 57, 50, 57, 66, 51, 69, 75, 57, 58, 67, 95, 95, 77, 49, 76, 96, 57, 59, 66, 88, 61, 71, 71, 62, 82, 85, 61, 66, 61, 72, 56, 71, 70, 64, 72, 66, 89, 60, 71, 61, 53, 46, 58, 58, 68, 55, 50, 53, 62, 71, 47, 58, 55, 68, 68, 80, 70, 70, 68, 80, 52, 61, 55, 58, 96, 77, 56, 76, 62, 78, 71, 61, 64, 62, 64, 68, 65, 49, 65, 59, 59, 91, 87, 63, 78, 81, 61, 99, 74, 91, 82, 124, 57, 50, 75, 62, 69, 75, 61, 78, 79, 65, 58, 55, 69, 72, 63, 57, 44, 54, 57, 73, 68, 45, 59, 56, 65, 97, 73, 89, 70, 85, 58, 39, 54, 56, 59, 72, 95, 61, 74, 52, 70, 61, 62, 56, 60, 69, 65, 61, 50, 69, 90, 59, 55, 53, 64, 92, 78, 51, 74, 71, 45, 58, 63, 95, 58, 45, 67, 100, 82, 83, 80, 52, 89, 53, 56, 76, 71, 54, 80, 61, 59, 59, 69, 66, 111, 87, 64, 77, 72, 53, 42, 54, 82, 53, 69, 85, 70, 110, 54, 52, 76, 55, 67, 58, 68, 42, 76, 50, 64, 79, 56, 57, 72, 59, 52, 61, 68, 72, 86, 74, 54, 69, 72, 50, 78, 78, 57, 66, 74, 64, 56, 50, 56, 51, 69, 62, 73, 76, 66, 99, 55, 54, 77, 64, 55, 63, 91, 81, 76, 59, 85, 82, 56, 73, 58, 77, 59, 84, 54, 74, 67, 95, 70, 68, 80, 66, 56, 67, 93, 105, 58, 67, 43, 67, 87, 47, 99, 78, 54, 69, 79, 58, 55, 76, 70, 70, 63, 138, 80, 87, 72, 116, 69, 65, 53, 71, 65, 76, 54, 55, 66, 56, 73, 49, 85, 67, 63, 62, 69, 58, 64, 45, 63, 61, 74, 56, 101, 63, 70, 60, 45, 66, 64, 92, 57, 64, 63, 72, 56, 43, 71, 53, 54, 52, 66, 72, 74, 53, 58, 72, 64, 58, 64, 76, 72, 60, 84, 64, 53, 72, 56, 72, 68, 87, 53, 64, 70, 60, 63, 54, 46, 62, 93, 73, 53, 61, 48, 90, 68, 63, 68, 59, 67, 74, 57, 59, 78, 97, 85, 49, 67, 65, 63, 67, 82, 66, 58, 84, 51, 84, 84, 61, 60, 76, 63, 60, 51, 65, 53, 77, 60, 56, 45, 87, 61, 65, 59, 72, 75, 54, 62, 65, 69, 56, 55, 61, 59, 64, 68, 52, 70, 70, 57, 51, 65, 58, 55, 52, 59, 63, 45, 61, 72, 64, 66, 76, 52, 56, 61, 54, 88, 70, 51, 61, 71, 60, 72, 60, 62, 70, 50, 109, 127, 68, 57, 81, 49, 68, 63, 65, 69, 68, 65, 63, 68, 72, 60, 60, 63, 47, 68, 63, 70, 62, 73, 74, 79, 57, 49, 88, 64, 57, 102, 44, 73, 74, 61, 82, 60, 109, 68, 73, 61, 91, 56, 64, 59, 63, 74, 69, 49, 74, 68, 62, 69, 63, 77, 51, 46, 67, 57, 55, 60, 57, 48, 63, 84, 61, 66, 70, 53, 76, 60, 95, 56, 65, 84, 63, 66, 68, 59, 67, 62, 50, 104, 66, 54, 68, 69, 64, 97, 57, 60, 66, 68, 74, 68, 69, 54, 91, 70, 68, 77, 77, 52, 54, 54, 66, 67, 64, 62, 102, 67, 75, 59, 112, 73, 84, 46, 59, 70, 65, 53, 70, 59, 55, 54, 102, 100, 54, 66, 49, 62, 84, 68, 67, 68, 69, 82, 60, 58, 63, 90, 55, 74, 53, 62, 49, 97, 49, 52, 62, 50, 71, 54, 69, 65, 90, 83, 58, 66, 89, 72, 45, 66, 69, 62, 69, 66, 68, 78, 64, 80, 49, 87, 58, 65, 60, 60, 54, 81, 65, 49, 43, 55, 95, 59, 70, 60, 61, 77, 73, 67, 60, 60, 61, 54, 58, 61, 78, 58, 63, 70, 70, 89, 54, 60, 50, 110, 66, 65, 66, 65, 62, 88, 54, 77, 56, 90, 64, 65, 57, 46, 59, 63, 51, 74, 46, 59, 61, 59, 55, 71, 49, 51, 63, 73, 86, 72, 51, 81, 57, 76, 69, 67, 74, 60, 61, 66, 60, 61, 40, 65, 56, 72, 58, 73, 87, 69, 49, 49, 60, 67, 60, 64, 81, 68, 92, 78, 70, 61, 61, 60, 97, 79, 144, 73, 56, 72, 87, 56, 49, 57, 65, 61, 55, 69, 55, 84, 84, 69, 59, 130, 64, 65, 76, 47, 58, 55, 67, 76, 62, 84, 55, 62, 76, 61, 91, 68, 79, 62, 78, 79, 53, 68, 85, 56, 60, 75, 58, 67, 63, 57, 60, 65, 53, 57, 50, 71, 66, 51, 53, 59, 52, 85, 53, 49, 58, 75, 72, 62, 60, 73, 70, 52, 57, 68, 90, 84, 96, 74, 56, 57, 71, 67, 49, 68, 61, 55, 86, 62, 58, 67, 71, 66, 83, 73, 61, 65, 54, 56, 72, 61, 61, 79, 70, 82, 61, 48, 61, 65, 47, 56, 66, 63, 69, 67, 76, 69, 51, 73, 70, 57, 65, 82, 63, 63, 57, 58, 64, 70, 63, 59, 57, 81, 60, 49, 56, 51, 81, 63, 72, 48, 63, 60, 63, 64, 79, 83, 78, 64, 69, 66, 63, 81, 60, 63, 59, 60, 58, 66, 79, 70, 69, 33, 71, 50, 68, 76, 48, 53, 63, 76, 79, 66, 51, 73, 57, 72, 74, 71, 48, 69, 67, 58, 71, 58, 79, 76, 77, 69, 78, 64, 61, 54, 51, 50, 71, 60, 43, 61, 96, 60, 74, 59, 73, 60, 81, 54, 73, 52, 47, 53, 63, 52, 58, 63, 54, 73, 58, 70, 60, 61, 61, 59, 56, 57, 53, 42, 78, 56, 38, 81, 69, 55, 56, 54, 58, 51, 56, 50, 67, 81, 92, 74, 47, 63, 65, 61, 66, 71, 65, 51, 65, 70, 60, 60, 89, 60, 59, 67, 49, 78, 65, 69, 64, 53, 83, 58, 71, 55, 75, 63, 63, 48, 58, 73, 85, 73, 120, 53, 94, 63, 62, 76, 84, 61, 58, 51, 76, 73, 55, 58, 72, 73, 67, 52, 68, 57, 73, 53, 65, 69, 78, 58, 69, 61, 46, 68, 51, 76, 88, 46, 54, 68, 58, 61, 44, 51, 50, 74, 59, 86, 92, 61, 69, 54, 88, 55, 70, 72, 95, 58, 55, 48, 83, 79, 84, 63, 66, 85, 81, 78, 60, 77, 67, 78, 52, 71, 57, 78, 63, 52, 66, 57, 74, 63, 75, 75, 58, 91, 59, 54, 62, 72, 66, 68, 66, 59, 56, 72, 89, 64, 81, 59, 43, 70, 65, 68, 78, 64, 55, 73, 57, 72, 77, 80, 73, 45, 62, 52, 71, 58, 109, 72, 61, 70, 73, 56, 102, 55, 60, 90, 118, 48, 64, 70, 85, 58, 76, 52, 53, 79, 77, 53, 64, 64, 61, 52, 64, 61, 70, 59, 61, 59, 76, 64, 58, 73, 59, 52, 95, 60, 66, 59, 66, 60, 60, 52, 56, 65, 54, 87, 67, 62, 52, 75, 69, 66, 45, 59, 60, 56, 73, 74, 69, 69, 70, 56, 65, 60, 66, 57, 59, 66, 58, 59, 86, 63, 38, 72, 93, 70, 72, 84, 66, 91, 64, 48, 96, 52, 50, 84, 57, 72, 46, 78, 68, 96, 82, 71, 66, 81, 110, 62, 64, 68, 61, 61, 85, 51, 100, 77, 62, 63, 60, 62, 61, 61, 57, 56, 69, 52, 57, 44, 55, 65, 68, 71, 57, 75, 65, 82, 89, 60, 50, 75, 78, 73, 64, 56, 52, 69, 84, 57, 50, 60, 112, 54, 65, 103, 66, 70, 97, 51, 69, 74, 67, 74, 57, 70, 60, 97, 62, 75, 67, 59, 52, 56, 88, 94, 72, 69, 66, 62, 83, 68, 64, 71, 66, 58, 66, 118, 82, 61, 57, 48, 43, 105, 51, 71, 73, 63, 62, 55, 65, 60, 64, 39, 63, 57, 127, 55, 78, 74, 108, 53, 73, 63, 55, 56, 63, 65, 63, 59, 73, 49, 66, 99, 68, 62, 67, 86, 65, 57, 73, 52, 54, 82, 59, 94, 55, 70, 62, 53, 78, 71, 76, 57, 62, 61, 74, 54, 59, 76, 65, 63, 60, 59, 49, 60, 67, 59, 74, 78, 51, 60, 69, 67, 65, 102, 59, 71, 68, 61, 67, 64, 87, 52, 56, 53, 67, 55, 49, 70, 57, 65, 68, 66, 60, 46, 78, 60, 68, 63, 60, 42, 63, 91, 76, 72, 89, 61, 42, 56, 44, 64, 62, 51, 66, 60, 81, 70, 77, 58, 63, 63, 52, 69, 69, 59, 86, 54, 58, 58, 62, 61, 69, 60, 56, 64, 83, 72, 80, 61, 74, 96, 55, 72, 61, 78, 49, 80, 94, 62, 57, 83, 67, 66, 63, 67, 71, 67, 54, 57, 47, 58, 53, 62, 65, 58, 65, 49, 59, 73, 58, 49, 74, 86, 77, 68, 63, 58, 58, 57, 107, 89, 51, 49, 57, 58, 72, 68, 66, 59, 59, 62, 48, 66, 72, 58, 53, 62, 63, 60, 78, 60, 75, 72, 62, 97, 57, 75, 65, 57, 53, 96, 65, 57, 65, 84, 45, 54, 99, 62, 40, 60, 57, 58, 72, 60, 54, 73, 61, 50, 49, 89, 61, 48, 51, 59, 90, 66, 50, 63, 60, 79, 65, 50, 72, 59, 43, 45, 72, 45, 61, 51, 65, 66, 57, 67, 62, 78, 63, 54, 67, 83, 96, 93, 49, 68, 69, 52, 76, 49, 70, 67, 61, 39, 76, 55, 73, 65, 85, 82, 55, 55, 74, 61, 69, 52, 74, 55, 61, 41, 99, 59, 56, 67, 85, 57, 90, 53, 62, 65, 93, 89, 44, 70, 47, 66, 97, 93, 69, 58, 63, 49, 60, 55, 70, 54, 44, 67, 50, 75, 49, 89, 62, 71, 57, 46, 78, 92, 54, 65, 90, 66, 66, 80, 67, 47, 69, 74, 74, 56, 85, 66, 45, 75, 53, 63, 62, 56, 72, 68, 88, 67, 64, 59, 72, 65, 75, 48, 78, 65, 53, 49, 52, 55, 85, 51, 78, 59, 45, 67, 49, 102, 55, 53, 51, 67, 83, 55, 70, 59, 66, 67, 63, 64, 54, 61, 61, 72, 59, 61, 102, 56, 54, 93, 59, 97, 47, 68, 60, 73, 68, 58, 61, 63, 55, 69, 71, 74, 54, 68, 67, 60, 54, 56, 61, 77, 78, 56, 69, 58, 90, 53, 62, 70, 66, 57, 63, 62, 61, 79, 61, 77, 65, 69, 66, 47, 89, 57, 45, 62, 72, 65, 76, 55, 61, 48, 69, 56, 57, 63, 74, 75, 66, 93, 143, 61, 74, 79, 55, 87, 61, 65, 48, 58, 61, 59, 57, 55, 69, 82, 52, 124, 68, 71, 62, 63, 75, 65, 78, 72, 71, 73, 61, 70, 88, 52, 136, 60, 60, 46, 74, 103, 65, 69, 84, 87, 72, 51, 60, 57, 51, 75, 63, 69, 50, 61, 59, 87, 80, 57, 49, 65, 79, 54, 45, 68, 43, 59, 55, 59, 69, 62, 83, 62, 52, 56, 80, 79, 63, 70, 70, 60, 94, 68, 77, 58, 58, 60, 66, 43, 53, 61, 80, 112, 46, 58, 52, 50, 73, 94, 71, 61, 62, 61, 97, 55, 75, 56, 68, 72, 56, 59, 71, 65, 70, 75, 99, 66, 57, 56, 57, 58, 65, 66, 67, 53, 67, 40, 69, 53, 77, 69, 59, 65, 72, 59, 71, 57, 60, 83, 63, 47, 73, 64, 65, 61, 67, 52, 64, 57, 52, 65, 42, 65, 63, 70, 65, 70, 70, 64, 48, 101, 74, 71, 88, 45, 77, 64, 62, 64, 69, 82, 53, 82, 61, 46, 59, 62, 55, 75, 56, 67, 65, 46, 76, 60, 63, 66, 68, 60, 58, 55, 56, 66, 86, 55, 78, 52, 54, 56, 69, 59, 65, 66, 58, 64, 64, 58, 57, 85, 53, 57, 64, 47, 71, 50, 69, 60, 82, 66, 86, 82, 57, 74, 46, 51, 71, 55, 62, 41, 61, 113, 56, 52, 72, 56, 62, 58, 58, 55, 56, 53, 70, 78, 75, 63, 71, 66, 96, 48, 85, 56, 80, 65, 59, 87, 57, 70, 71, 45, 52, 85, 70, 91, 85, 58, 69, 75, 53, 57, 49, 60, 52, 57, 75, 63, 74, 64, 58, 57, 68, 76, 63, 63, 49, 74, 54, 65, 74, 97, 70, 90, 51, 44, 86, 94, 66, 90, 77, 59, 57, 90, 91, 57, 67, 48, 75, 74, 63, 77, 50, 56, 67, 60, 37, 83, 63, 75, 58, 66, 64, 61, 99, 71, 57, 62, 66, 64, 49, 74, 72, 64, 78, 71, 72, 61, 64, 107, 69, 58, 61, 65, 97, 80, 88, 54, 83, 67, 52, 61, 58, 62, 56, 69, 63, 67, 63, 65, 45, 66, 74, 61, 57, 59, 73, 55, 77, 55, 62, 68, 65, 56, 64, 60, 46, 74, 51, 66, 70, 68, 69, 54, 78, 72, 62, 74, 76, 63, 62, 67, 65, 66, 71, 73, 78, 76, 82, 57, 70, 50, 46, 73, 79, 65, 47, 83, 56, 74, 73, 72, 88, 69, 62, 62, 67, 66, 71, 44, 71, 67, 49, 58, 71, 68, 62, 84, 53, 63, 80, 61, 92, 57, 67, 59, 56, 57, 60, 48, 54, 72, 64, 63, 62, 56, 63, 65, 78, 56, 42, 63, 59, 50, 79, 76, 80, 66, 69, 60, 68, 60, 71, 62, 89, 56, 56, 78, 92, 66, 51, 54, 81, 68, 54, 67, 50, 76, 64, 51, 60, 82, 78, 59, 58, 67, 61, 63, 65, 105, 119, 62, 73, 58, 60, 91, 61, 68, 70, 74, 78, 53, 68, 67, 60, 58, 62, 78, 57, 66, 62, 70, 72, 83, 72, 55, 56, 67, 65, 46, 71, 62, 63, 62, 63, 68, 67, 60, 54, 64, 67, 65, 66, 49, 81, 59, 58, 63, 67, 59, 52, 62, 60, 55, 91, 65, 58, 60, 71, 74, 72, 68, 78, 76, 54, 75, 70, 49, 61, 86, 67, 55, 60, 54, 95, 61, 67, 43, 60, 88, 68, 62, 60, 77, 61, 76, 56, 58, 63, 113, 66, 72, 61, 60, 62, 51, 71, 56, 66, 56, 51, 70, 121, 81, 83, 93, 107, 67, 50, 75, 55, 68, 50, 71, 59, 81, 62, 69, 67, 61, 62, 92, 70, 62, 57, 55, 52, 57, 61, 80, 66, 54, 59, 90, 75, 46, 82, 120, 68, 63, 58, 64, 46, 73, 80, 56, 67, 61, 76, 65, 68, 68, 62, 61, 67, 84, 60, 54, 66, 57, 75, 59, 73, 65, 59, 58, 55, 65, 59, 74, 75, 65, 74, 72, 60, 62, 62, 72, 70, 63, 89, 53, 58, 60, 60, 67, 45, 72, 96, 73, 75, 73, 65, 58, 71, 67, 57, 63, 55, 61, 87, 61, 67, 60, 58, 61, 52, 60, 70, 71, 59, 50, 88, 63, 76, 64, 61, 59, 61, 60, 59, 55, 70, 76, 51, 67, 67, 63, 71, 60, 68, 92, 54, 58, 118, 66, 89, 83, 62, 76, 58, 59, 69, 62, 66, 58, 61, 72, 80, 50, 72, 52, 55, 84, 59, 68, 60, 85, 66, 72, 60, 87, 57, 75, 68, 76, 115, 94, 82, 49, 55, 62, 65, 55, 69, 59, 52, 54, 52, 70, 78, 75, 76, 83, 56, 61, 91, 96, 62, 50, 92, 79, 66, 55, 53, 87, 62, 75, 65, 66, 54, 75, 58, 87, 85, 62, 57, 73, 63, 64, 85, 63, 63, 69, 67, 48, 58, 78, 58, 67, 56, 74, 71, 84, 55, 53, 62, 78, 63, 68, 63, 75, 57, 63, 63, 62, 72, 65, 60, 58, 69, 51, 68, 64, 89, 50, 70, 58, 52, 87, 72, 64, 54, 59, 77, 60, 54, 68, 67, 52, 77, 57, 60, 59, 80, 59, 63, 68, 60, 61, 75, 58, 67, 56, 49, 70, 87, 72, 60, 60, 79, 71, 66, 60, 52, 64, 74, 71, 79, 65, 71, 70, 63, 74, 64, 75, 39, 70, 67, 67, 58, 66, 69, 69, 56, 57, 51, 83, 56, 53, 74, 60, 42, 114, 60, 60, 59, 53, 33, 49, 55, 57, 83, 65, 77, 70, 46, 67, 54, 56, 54, 58, 73, 70, 69, 51, 61, 67, 64, 65, 52, 94, 46, 51, 63, 60, 78, 71, 46, 78, 58, 46, 55, 56, 55, 60, 75, 99, 65, 56, 50, 73, 76, 85, 67, 63, 48, 60, 54, 65, 52, 54, 61, 126, 50, 62, 99, 70, 95, 59, 76, 102, 86, 56, 71, 95, 90, 57, 50, 66, 81, 53, 87, 58, 60, 87, 57, 63, 52, 52, 67, 58, 62, 59, 77, 60, 61, 70, 57, 49, 64, 76, 58, 69, 52, 47, 72, 59, 69, 68, 67, 57, 74, 72, 53, 69, 58, 98, 80, 69, 72, 50, 74, 72, 68, 76, 106, 75, 58, 58, 97, 77, 74, 72, 62, 52, 71, 53, 61, 72, 67, 54, 59, 87, 62, 82, 54, 155, 52, 71, 65, 103, 74, 77, 66, 76, 61, 44, 81, 54, 68, 56, 96, 66, 58, 57, 55, 68, 60, 69, 74, 46, 59, 58, 67, 76, 60, 71, 61, 71, 74, 59, 56, 59, 57, 56, 51, 64, 51, 66, 61, 58, 59, 49, 59, 60, 71, 66, 84, 75, 78, 58, 68, 89, 89, 68, 51, 109, 87, 54, 77, 68, 62, 72, 55, 71, 79, 56, 54, 64, 58, 90, 61, 59, 66, 58, 58, 83, 72, 49, 76, 58, 63, 54, 66, 81, 77, 60, 68, 66, 60, 79, 68, 53, 53, 65, 50, 86, 45, 54, 72, 50, 59, 79, 59, 69, 56, 107, 41, 57, 65, 75, 71, 75, 69, 41, 80, 75, 59, 51, 53, 55, 39, 81, 73, 65, 52, 61, 66, 98, 84, 56, 71, 79, 53, 60, 50, 72, 67, 69, 94, 59, 61, 49, 51, 56, 89, 137, 57, 65, 73, 57, 62, 42, 57, 75, 65, 64, 83, 48, 81, 69, 80, 53, 63, 78, 75, 51, 68, 78, 86, 54, 104, 72, 80, 79, 46, 55, 90, 67, 29, 59, 42, 59, 58, 66, 68, 64, 81, 85, 56, 70, 73, 70, 44, 66, 69, 84, 81, 71, 102, 66, 45, 56, 66, 60, 56, 83, 82, 82, 71, 51, 60, 69, 77, 53, 72, 46, 54, 63, 48, 59, 71, 55, 93, 61, 47, 57, 58, 49, 54, 89, 52, 69, 61, 53, 70, 77, 91, 59, 63, 56, 46, 70, 62, 65, 64, 54, 54, 63, 63, 46, 76, 57, 76, 55, 56, 66, 77, 70, 125, 56, 56, 97, 74, 67, 55, 72, 72, 80, 67, 65, 58, 61, 60, 73, 52, 69, 74, 84, 53, 50, 59, 49, 72, 62, 67, 59, 63, 58, 85, 63, 49, 71, 44, 61, 69, 70, 103, 100, 60, 60, 74, 58, 68, 88, 79, 77, 53, 77, 65, 63, 46, 91, 60, 65, 64, 51, 53, 76, 44, 66, 110, 68, 57, 52, 69, 58, 68, 74, 53, 56, 82, 63, 79, 65, 61, 43, 64, 71, 57, 65, 49, 58, 83, 69, 66, 78, 51, 67, 59, 55, 58, 56, 52, 68, 60, 75, 46, 89, 72, 75, 59, 71, 70, 70, 60, 80, 72, 92, 59, 78, 78, 68, 69, 69, 91, 74, 90, 75, 74, 51, 94, 68, 68, 70, 67, 60, 74, 88, 68, 66, 82, 87, 55, 89, 72, 60, 62, 74, 60, 71, 51, 62, 57, 63, 76, 93, 65, 60, 75, 74, 61, 70, 50, 63, 52, 44, 55, 70, 54, 67, 50, 69, 38, 68, 63, 69, 70, 73, 67, 62, 61, 70, 79, 51, 80, 70, 49, 50, 85, 70, 85, 79, 58, 50, 66, 73, 96, 76, 66, 84, 86, 63, 55, 73, 80, 57, 60, 60, 51, 85, 81, 91, 59, 73, 84, 73, 58, 114, 60, 40, 66, 47, 74, 60, 66, 74, 53, 84, 70, 58, 59, 68, 50, 79, 59, 61, 81, 60, 61, 59, 49, 57, 65, 41, 58, 62, 59, 60, 61, 55, 62, 48, 59, 49, 52, 82, 72, 76, 74, 64, 61, 46, 56, 42, 47, 82, 66, 88, 51, 51, 62, 62, 59, 54, 95, 66, 52, 52, 53, 62, 39, 62, 74, 63, 64, 70, 125, 66, 80, 69, 79, 62, 52, 67, 44, 56, 71, 44, 51, 49, 70, 84, 86, 56, 60, 64, 61, 99, 71, 67, 95, 51, 66, 60, 93, 50, 61, 57, 50, 57, 59, 54, 68, 53, 53, 56, 93, 55, 78, 63, 76, 68, 44, 74, 88, 62, 80, 58, 64, 53, 67, 54, 52, 58, 63, 63, 63, 66, 81, 46, 68, 61, 57, 54, 62, 55, 57, 70, 61, 62, 63, 67, 72, 62, 62, 69, 61, 62, 76, 50, 68, 71, 59, 56, 62, 64, 50, 57, 61, 65, 69, 53, 63, 67, 49, 57, 54, 63, 57, 73, 72, 63, 81, 70, 57, 70, 65, 68, 78, 62, 83, 86, 56, 61, 57, 94, 88, 58, 69, 51, 70, 112, 72, 71, 57, 79, 77, 61, 56, 59, 51, 67, 65, 62, 75, 52, 56, 88, 55, 44, 57, 75, 54, 54, 71, 46, 79, 58, 49, 50, 67, 52, 70, 52, 77, 62, 82, 50, 54, 91, 56, 57, 59, 67, 52, 89, 55, 67, 78, 37, 60, 61, 69, 72, 51, 45, 64, 54, 90, 64, 69, 64, 54, 50, 59, 58, 71, 63, 84, 59, 66, 54, 60, 61, 71, 62, 81, 64, 40, 93, 68, 55, 70, 72, 44, 79, 82, 79, 65, 82, 77, 84, 86, 68, 63, 57, 76, 66, 80, 86, 70, 81, 81, 68, 96, 74, 52, 54, 96, 100, 61, 67, 62, 68, 54, 44, 65, 62, 52, 63, 71, 83, 61, 69, 57, 51, 80, 73, 75, 56, 55, 44, 92, 58, 64, 75, 53, 68, 57, 80, 65, 66, 50, 62, 72, 56, 66, 52, 66, 81, 54, 77, 76, 56, 54, 64, 50, 58, 56, 60, 65, 58, 59, 62, 60, 50, 73, 79, 87, 54, 96, 53, 60, 58, 69, 77, 46, 54, 79, 58, 67, 63, 52, 57, 68, 72, 56, 60, 103, 66, 72, 85, 70, 58, 94, 67, 61, 61, 75, 49, 73, 56, 59, 77, 71, 62, 57, 63, 68, 66, 64, 64, 58, 78, 56, 48, 78, 81, 64, 61, 74, 66, 52, 63, 45, 64, 64, 68, 68, 72, 82, 52, 75, 57, 57, 62, 70, 64, 65, 94, 64, 71, 56, 82, 74, 63, 71, 75, 60, 53, 51, 59, 39, 73, 55, 64, 68, 61, 59, 75, 80, 63, 50, 67, 67, 70, 48, 64, 47, 69, 54, 101, 63, 75, 55, 55, 55, 48, 52, 57, 66, 73, 68, 60, 51, 55, 48, 59, 53, 52, 63, 48, 45, 63, 62, 69, 71, 72, 60, 69, 106, 43, 63, 58, 72, 53, 60, 55, 77, 55, 57, 68, 70, 44, 67, 59, 65, 51, 66, 70, 73, 56, 48, 77, 60, 62, 64, 58, 57, 83, 64, 88, 42, 69, 56, 76, 63, 60, 61, 76, 63, 79, 91, 59, 100, 59, 76, 69, 60, 68, 82, 82, 67, 59, 83, 60, 91, 80, 59, 51, 53, 73, 18, 114, 61, 55, 64, 65, 40, 60, 64, 77, 57, 49, 36, 60, 60, 58, 68, 74, 62, 57, 73, 115, 69, 56, 56, 82, 66, 60, 41, 58, 56, 99, 54, 51, 72, 59, 72, 51, 71, 71, 53, 74, 71, 73, 62, 71, 79, 61, 56, 59, 45, 80, 64, 66, 73, 60, 61, 80, 57, 57, 112, 61, 66, 57, 55, 72, 56, 60, 96, 83, 64, 89, 67, 78, 63, 83, 62, 66, 55, 83, 71, 61, 55, 72, 65, 47, 64, 61, 80, 64, 93, 60, 55, 61, 63, 86, 73, 65, 75, 61, 50, 59, 63, 62, 62, 77, 86, 84, 84, 45, 60, 61, 79, 71, 66, 106, 65, 72, 62, 74, 112, 53, 52, 61, 72, 57, 84, 60, 56, 84, 63, 58, 60, 72, 70, 59, 61, 57, 59, 86, 52, 62, 74, 56, 69, 68, 65, 103, 38, 67, 47, 56, 60, 56, 53, 59, 67, 73, 62, 72, 87, 65, 86, 60, 44, 61, 70, 54, 73, 98, 77, 92, 119, 74, 65, 68, 68, 61, 64, 62, 69, 66, 45, 42, 51, 64, 85, 52, 89, 90, 68, 99, 71, 51, 52, 51, 48, 46, 48, 87, 62, 66, 62, 65, 63, 48, 74, 60, 49, 61, 72, 59, 50, 50, 53, 68, 48, 65, 70, 89, 57, 69, 77, 69, 115, 66, 60, 101, 55, 43, 55, 69, 81, 68, 50, 50, 72, 66, 59, 83, 64, 75, 60, 48, 69, 68, 78, 63, 52, 70, 54, 61, 70, 56, 81, 58, 92, 72, 67, 53, 78, 62, 37, 59, 65, 77, 91, 54, 73, 59, 50, 68, 69, 65, 60, 70, 82, 63, 96, 61, 82, 66, 66, 57, 75, 55, 74, 66, 61, 101, 59, 69, 61, 56, 78, 58, 64, 59, 60, 80, 64, 52, 54, 48, 63, 60, 77, 80, 59, 69, 51, 59, 66, 61, 92, 74, 77, 63, 76, 66, 57, 69, 70, 49, 59, 59, 65, 49, 62, 64, 76, 86, 52, 83, 65, 70, 75, 62, 107, 56, 69, 55, 59, 67, 62, 78, 67, 50, 77, 70, 55, 47, 53, 67, 74, 48, 66, 53, 69, 63, 63, 64, 94, 70, 70, 58, 63, 73, 66, 63, 77, 48, 55, 59, 71, 69, 95, 104, 109, 62, 60, 60, 70, 59, 102, 59, 62, 53, 67, 63, 62, 66, 50, 34, 55, 75, 51, 57, 66, 79, 66, 65, 71, 87, 111, 57, 81, 65, 71, 50, 62, 66, 58, 60, 55, 48, 60, 69, 66, 71, 48, 63, 57, 60, 62, 52, 58, 59, 56, 70, 52, 61, 46, 56, 54, 60, 110, 59, 73, 69, 67, 75, 59, 72, 57, 54, 91, 48, 55, 61, 63, 60, 66, 64, 54, 54, 48, 67, 69, 45, 55, 92, 81, 75, 75, 60, 62, 63, 39, 66, 69, 76, 57, 65, 75, 49, 56, 47, 84, 78, 66, 58, 69, 62, 64, 68, 82, 80, 64, 84, 54, 84, 68, 62, 63, 73, 60, 56, 71, 76, 68, 73, 69, 85, 103, 92, 85, 92, 64, 69, 67, 69, 50, 64, 50, 84, 54, 79, 72, 60, 72, 75, 58, 86, 83, 74, 62, 64, 76, 66, 63, 74, 71, 76, 52, 54, 64, 73, 78, 70, 52, 62, 67, 46, 51, 55, 58, 67, 84, 100, 94, 63, 60, 64, 81, 67, 57, 56, 46, 79, 62, 56, 59, 64, 95, 61, 102, 133, 57, 52, 85, 80, 58, 52, 76, 66, 63, 83, 79, 62, 65, 62, 56, 71, 75, 77, 57, 58, 64, 65, 66, 69, 60, 84, 56, 73, 55, 60, 57, 60, 107, 49, 50, 62, 64, 47, 57, 63, 61, 80, 60, 74, 66, 48, 63, 72, 69, 63, 58, 64, 91, 77, 70, 75, 72, 83, 55, 83, 66, 65, 61, 59, 70, 61, 86, 68, 71, 60, 66, 35, 57, 75, 58, 69, 77, 77, 52, 62, 87, 58, 60, 65, 61, 86, 73, 78, 69, 60, 59, 48, 68, 68, 65, 77, 60, 103, 83, 69, 109, 49, 54, 66, 49, 58, 59, 66, 52, 65, 52, 44, 68, 66, 99, 72, 51, 55, 62, 61, 60, 82, 71, 60, 65, 62, 59, 78, 59, 71, 92, 71, 61, 85, 72, 49, 70, 54, 64, 71, 70, 68, 63, 52, 66, 99, 77, 68, 65, 38, 73, 78, 79, 62, 60, 58, 77, 74, 47, 69, 67, 58, 74, 85, 60, 104, 76, 57, 74, 57, 49, 106, 79, 70, 67, 73, 67, 66, 77, 46, 58, 56, 59, 60, 74, 63, 67, 49, 70, 74, 79, 58, 74, 74, 68, 65, 44, 58, 39, 52, 53, 54, 58, 67, 86, 55, 70, 58, 71, 65, 54, 56, 45, 61, 69, 62, 71, 66, 68, 67, 60, 58, 55, 75, 49, 52, 69, 76, 71, 61, 66, 73, 72, 76, 74, 61, 53, 80, 73, 58, 85, 61, 51, 80, 76, 95, 70, 79, 53, 77, 72, 68, 69, 54, 50, 61, 56, 57, 60, 55, 54, 75, 55, 47, 60, 54, 62, 55, 80, 63, 60, 60, 77, 53, 69, 87, 49, 68, 62, 55, 60, 78, 68, 68, 65, 70, 69, 54, 91, 55, 85, 106, 51, 68, 72, 62, 51, 60, 49, 59, 79, 59, 90, 82, 54, 55, 59, 96, 54, 82, 61, 58, 63, 54, 50, 61, 58, 63, 91, 65, 80, 84, 71, 57, 51, 65, 67, 71, 107, 54, 53, 54, 65, 57, 75, 66, 69, 68, 57, 62, 65, 63, 64, 57, 55, 66, 58, 83, 73, 53, 61, 68, 50, 62, 37, 78, 72, 63, 39, 53, 84, 57, 74, 81, 70, 95, 61, 56, 71, 118, 56, 64, 74, 60, 57, 53, 70, 68, 46, 68, 88, 76, 57, 99, 73, 58, 57, 51, 46, 75, 95, 56, 97, 63, 56, 53, 72, 46, 90, 56, 64, 70, 57, 72, 100, 69, 61, 53, 65, 73, 58, 58, 57, 60, 52, 70, 60, 61, 75, 78, 61, 66, 51, 93, 70, 59, 45, 80, 89, 87, 72, 55, 53, 31, 57, 66, 95, 60, 59, 56, 76, 68, 56, 69, 73, 63, 90, 72, 51, 73, 81, 58, 54, 85, 61, 48, 53, 60, 61, 59, 50, 76, 72, 61, 61, 68, 60, 68, 58, 52, 52, 67, 68, 83, 70, 100, 60, 78, 75, 80, 78, 51, 71, 58, 66, 67, 64, 89, 79, 58, 57, 65, 68, 74, 52, 80, 68, 99, 80, 78, 78, 69, 70, 62, 69, 64, 66, 72, 76, 71, 65, 72, 65, 60, 55, 70, 75, 73, 57, 54, 61, 62, 51, 53, 60, 75, 59, 67, 66, 96, 58, 58, 63, 58, 53, 69, 57, 81, 59, 84, 64, 60, 62, 65, 55, 46, 60, 64, 65, 50, 52, 55, 63, 64, 62, 74, 64, 73, 59, 62, 63, 37, 105, 55, 67, 68, 64, 56, 63, 80, 72, 78, 52, 51, 62, 64, 111, 57, 49, 57, 63, 59, 46, 55, 59, 64, 75, 52, 57, 62, 76, 67, 56, 59, 66, 94, 72, 67, 50, 57, 58, 54, 58, 116, 52, 46, 66, 49, 52, 54, 47, 57, 63, 59, 65, 68, 57, 65, 71, 65, 61, 72, 66, 61, 50, 61, 47, 73, 47, 58, 86, 56, 56, 53, 64, 60, 94, 55, 90, 53, 47, 57, 64, 62, 93, 56, 49, 68, 60, 53, 70, 53, 65, 55, 69, 62, 77, 65, 83, 55, 60, 69, 47, 57, 72, 76, 59, 66, 59, 69, 66, 48, 63, 52, 53, 88, 61, 46, 49, 72, 54, 55, 59, 81, 52, 67, 66, 78, 55, 58, 64, 43, 69, 71, 82, 76, 71, 69, 86, 68, 67, 55, 53, 62, 70, 56, 64, 70, 53, 48, 83, 53, 105, 66, 68, 63, 65, 74, 46, 76, 53, 73, 60, 65, 64, 70, 67, 62, 54, 58, 61, 80, 65, 65, 75, 62, 68, 57, 61, 52, 72, 78, 74, 78, 54, 43, 81, 59, 63, 62, 67, 63, 63, 99, 81, 60, 77, 59, 72, 62, 52, 62, 81, 58, 46, 65, 54, 67, 68, 50, 60, 67, 44, 53, 50, 57, 63, 77, 39, 62, 60, 79, 62, 62, 55, 57, 54, 68, 83, 78, 59, 66, 61, 78, 96, 64, 62, 62, 66, 50, 85, 65, 81, 61, 83, 67, 72, 55, 45, 53, 55, 78, 40, 61, 44, 78, 80, 70, 66, 58, 56, 62, 70, 59, 94, 90, 63, 73, 62, 68, 67, 63, 135, 73, 113, 77, 77, 48, 80, 83, 67, 71, 70, 56, 76, 55, 68, 50, 59, 67, 55, 121, 68, 67, 54, 71, 58, 58, 73, 52, 74, 58, 90, 56, 54, 47, 70, 50, 69, 65, 68, 92, 59, 70, 61, 56, 66, 70, 57, 60, 61, 51, 60, 73, 81, 79, 60, 69, 75, 65, 77, 54, 73, 59, 77, 77, 63, 79, 72, 69, 53, 68, 95, 65, 65, 54, 67, 72, 46, 65, 55, 62, 64, 63, 88, 63, 61, 91, 58, 63, 85, 84, 57, 73, 36, 73, 66, 52, 39, 75, 67, 57, 79, 97, 94, 52, 71, 68, 75, 52, 82, 103, 65, 77, 54, 65, 66, 60, 61, 69, 80, 61, 81, 52, 94, 76, 60, 56, 58, 60, 92, 45, 76, 70, 54, 97, 53, 64, 75, 47, 63, 94, 54, 84, 69, 49, 56, 53, 49, 58, 59, 35, 41, 54, 69, 63, 58, 48, 63, 76, 67, 54, 73, 76, 63, 90, 110, 82, 74, 43, 69, 52, 69, 51, 60, 68, 65, 62, 46, 73, 66, 60, 73, 61, 43, 67, 59, 72, 66, 72, 73, 79, 99, 78, 54, 52, 67, 71, 52, 61, 54, 49, 63, 84, 73, 54, 111, 47, 53, 55, 94, 77, 54, 53, 68, 63, 100, 58, 52, 97, 59, 110, 67, 69, 63, 55, 105, 87, 61, 88, 65, 55, 71, 60, 59, 77, 93, 62, 81, 62, 77, 55, 84, 82, 72, 54, 54, 55, 59, 70, 81, 78, 76, 74, 61, 47, 70, 86, 64, 57, 56, 73, 54, 53, 52, 76, 66, 62, 63, 66, 89, 73, 80, 51, 58, 63, 51, 67, 66, 75, 58, 71, 70, 64, 65, 49, 66, 47, 54, 46, 61, 74, 71, 66, 69, 65, 55, 63, 90, 49, 66, 48, 94, 68, 81, 65, 45, 70, 55, 66, 62, 61, 65, 62, 49, 63, 71, 60, 42, 56, 54, 79, 79, 67, 66, 76, 56, 106, 57, 82, 63, 68, 70, 63, 54, 66, 58, 60, 61, 60, 62, 68, 57, 59, 87, 61, 87, 57, 50, 87, 61, 61, 89, 83, 53, 59, 51, 103, 74, 53, 70, 63, 50, 65, 63, 67, 100, 60, 63, 55, 71, 72, 70, 70, 48, 66, 62, 93, 55, 61, 93, 86, 59, 40, 82, 62, 65, 98, 82, 53, 63, 85, 77, 62, 55, 39, 66, 50, 132, 65, 59, 48, 69, 69, 69, 58, 68, 67, 70, 67, 60, 66, 46, 79, 54, 69, 59, 66, 58, 58, 56, 73, 52, 56, 53, 67, 48, 72, 75, 62, 67, 105, 66, 58, 62, 62, 57, 59, 65, 76, 67, 62, 67, 81, 73, 74, 72, 52, 52, 63, 67, 60, 70, 82, 53, 61, 73, 65, 87, 81, 76, 56, 61, 74, 52, 61, 78, 67, 57, 76, 51, 80, 58, 62, 62, 56, 79, 78, 79, 58, 68, 65, 104, 125, 64, 54, 62, 41, 76, 68, 49, 55, 57, 57, 62, 62, 57, 83, 59, 69, 75, 92, 49, 66, 72, 53, 62, 75, 71, 55, 59, 67, 55, 63, 72, 67, 60, 62, 69, 69, 49, 66, 72, 67, 73, 51, 54, 91, 59, 56, 66, 51, 67, 48, 58, 52, 47, 71, 52, 46, 60, 68, 70, 86, 53, 54, 78, 50, 66, 63, 58, 57, 80, 62, 47, 67, 64, 62, 70, 80, 55, 65, 62, 64, 72, 82, 76, 45, 57, 58, 63, 72, 64, 64, 82, 57, 66, 48, 70, 56, 65, 52, 54, 73, 63, 66, 59, 59, 55, 61, 59, 75, 73, 69, 55, 69, 66, 92, 109, 65, 68, 65, 56, 82, 62, 47, 83, 67, 52, 59, 61, 64, 83, 68, 61, 59, 53, 117, 86, 75, 61, 58, 57, 80, 82, 71, 70, 41, 57, 43, 87, 59, 67, 54, 75, 58, 42, 71, 54, 67, 67, 64, 56, 69, 66, 82, 83, 98, 52, 57, 62, 77, 71, 77, 63, 63, 54, 56, 60, 93, 82, 61, 57, 73, 54, 87, 52, 61, 66, 91, 61, 90, 71, 64, 68, 81, 53, 54, 62, 62, 50, 98, 61, 58, 60, 68, 71, 58, 66, 69, 63, 46, 103, 61, 100, 42, 63, 59, 69, 45, 73, 83, 86, 69, 115, 45, 72, 62, 62, 63, 72, 65, 88, 59, 60, 74, 45, 68, 63, 62, 50, 87, 83, 66, 68, 55, 77, 60, 62, 51, 84, 52, 52, 109, 121, 62, 65, 79, 80, 78, 100, 65, 63, 63, 65, 63, 68, 55, 56, 90, 54, 61, 67, 76, 79, 97, 89, 62, 60, 80, 57, 56, 64, 69, 108, 66, 60, 65, 52, 67, 65, 65, 65, 68, 55, 64, 55, 65, 68, 59, 54, 73, 76, 64, 60, 62, 63, 51, 53, 65, 83, 50, 47, 54, 50, 58, 87, 123, 90, 61, 52, 47, 66, 38, 63, 77, 45, 54, 65, 70, 89, 65, 56, 67, 73, 63, 64, 48, 53, 77, 67, 63, 55, 60, 56, 68, 82, 53, 59, 62, 47, 56, 60, 64, 59, 70, 68, 56, 73, 57, 48, 66, 63, 61, 38, 70, 49, 77, 104, 48, 62, 60, 65, 69, 41, 63, 58, 57, 49, 56, 60, 56, 71, 72, 59, 47, 59, 51, 75, 65, 60, 55, 92, 66, 47, 55, 71, 56, 65, 62, 55, 106, 86, 68, 67, 64, 61, 61, 77, 79, 69, 73, 70, 69, 71, 52, 58, 74, 66, 58, 58, 76, 65, 41, 137, 64, 79, 67, 69, 65, 55, 69, 60, 58, 58, 63, 68, 61, 64, 76, 45, 59, 64, 50, 47, 53, 48, 60, 45, 84, 76, 57, 56, 58, 60, 52, 64, 65, 66, 65, 75, 60, 70, 78, 65, 62, 76, 78, 56, 73, 61, 55, 104, 59, 62, 80, 82, 62, 56, 59, 63, 66, 52, 54, 50, 65, 67, 52, 73, 70, 53, 77, 75, 47, 48, 66, 73, 56, 71, 56, 64, 63, 60, 49, 64, 75, 67, 45, 91, 53, 34, 77, 57, 44, 70, 79, 77, 69, 55, 86, 58, 54, 67, 61, 87, 60, 66, 54, 69, 68, 67, 82, 65, 59, 65, 66, 60, 55, 70, 63, 49, 58, 58, 53, 63, 61, 121, 53, 99, 65, 56, 61, 83, 71, 63, 64, 63, 72, 66, 55, 75, 69, 60, 92, 60, 65, 87, 87, 57, 74, 51, 65, 59, 61, 70, 62, 109, 67, 58, 62, 61, 68, 69, 48, 77, 69, 50, 83, 41, 78, 58, 82, 64, 76, 42, 58, 44, 67, 91, 58, 67, 80, 67, 59, 68, 57, 78, 74, 60, 58, 59, 64, 64, 81, 65, 58, 84, 59, 62, 72, 64, 58, 81, 56, 34, 57, 64, 70, 56, 57, 42, 91, 53, 58, 54, 57, 56, 84, 67, 65, 65, 54, 75, 62, 58, 55, 93, 61, 75, 54, 69, 63, 58, 75, 53, 52, 106, 42, 62, 80, 64, 68, 55, 65, 70, 62, 71, 58, 59, 46, 48, 66, 48, 65, 105, 71, 75, 87, 78, 50, 68, 56, 53, 63, 75, 59, 54, 62, 71, 55, 38, 76, 62, 61, 63, 60, 72, 61, 70, 50, 66, 62, 50, 55, 64, 38, 61, 56, 61, 71, 70, 79, 74, 76, 81, 62, 71, 84, 49, 83, 63, 74, 59, 64, 50, 78, 65, 65, 50, 70, 56, 78, 80, 77, 80, 75, 92, 58, 51, 50, 65, 62, 54, 66, 52, 66, 62, 54, 74, 74, 75, 61, 55, 59, 66, 67, 70, 55, 72, 60, 92, 64, 74, 64, 72, 86, 57, 61, 62, 86, 104, 55, 59, 64, 93, 67, 56, 51, 51, 58, 56, 66, 76, 66, 62, 60, 70, 68, 63, 43, 83, 39, 68, 61, 64, 113, 55, 68, 54, 69, 59, 61, 79, 53, 100, 65, 61, 60, 63, 71, 61, 66, 66, 56, 79, 58, 53, 56, 50, 71, 66, 63, 54, 86, 64, 43, 55, 80, 68, 51, 52, 51, 73, 94, 52, 45, 90, 55, 92, 56, 72, 64, 50, 90, 69, 61, 85, 62, 73, 81, 46, 45, 70, 91, 57, 51, 54, 59, 69, 47, 71, 59, 47, 61, 57, 45, 96, 83, 71, 72, 57, 120, 63, 69, 56, 54, 67, 80, 59, 56, 62, 71, 57, 56, 66, 69, 57, 46, 59, 61, 72, 70, 58, 54, 49, 77, 63, 57, 71, 77, 66, 96, 65, 67, 64, 59, 70, 69, 73, 60, 82, 56, 63, 60, 61, 51, 97, 58, 47, 60, 79, 76, 78, 59, 92, 84, 49, 53, 89, 66, 54, 82, 61, 65, 81, 76, 72, 53, 90, 50, 83, 62, 73, 78, 58, 52, 61, 55, 60, 63, 78, 92, 58, 62, 67, 67, 63, 63, 69, 73, 63, 54, 63, 83, 50, 58, 55, 64, 50, 51, 83, 55, 87, 64, 52, 105, 51, 63, 94, 69, 61, 62, 64, 57, 49, 77, 45, 80, 86, 82, 50, 71, 54, 70, 59, 61, 61, 46, 48, 69, 62, 75, 67, 51, 57, 59, 70, 85, 58, 44, 62, 72, 50, 56, 64, 66, 53, 69, 56, 59, 81, 74, 56, 49, 56, 71, 120, 80, 87, 45, 68, 73, 63, 68, 54, 57, 66, 62, 71, 70, 66, 93, 58, 67, 60, 74, 67, 64, 54, 54, 50, 60, 63, 70, 66, 56, 72, 90, 61, 53, 40, 53, 82, 54, 61, 57, 77, 69, 51, 58, 60, 63, 52, 67, 58, 57, 71, 65, 51, 67, 55, 56, 45, 70, 53, 45, 59, 92, 58, 75, 49, 43, 74, 43, 59, 62, 92, 62, 66, 74, 55, 44, 57, 71, 54, 70, 80, 56, 101, 73, 82, 63, 63, 68, 71, 61, 66, 66, 75, 47, 53, 36, 57, 69, 74, 50, 47, 63, 49, 47, 85, 54, 78, 43, 70, 53, 61, 67, 74, 70, 62, 54, 83, 63, 94, 56, 60, 55, 71, 54, 64, 57, 70, 76, 62, 69, 62, 59, 63, 50, 120, 124, 64, 49, 72, 94, 70, 46, 74, 62, 74, 30, 71, 101, 79, 56, 78, 78, 91, 74, 67, 59, 60, 90, 66, 55, 77, 60, 62, 74, 76, 58, 72, 49, 60, 67, 58, 69, 48, 91, 59, 59, 84, 51, 63, 67, 67, 62, 70, 67, 70, 71, 51, 58, 50, 80, 77, 62, 63, 72, 66, 66, 53, 72, 52, 83, 85, 62, 69, 68, 83, 66, 51, 37, 61, 54, 59, 68, 68, 89, 69, 69, 62, 52, 104, 50, 81, 66, 70, 58, 62, 62, 69, 60, 47, 76, 61, 73, 61, 95, 56, 60, 64, 53, 59, 81, 57, 70, 64, 84, 64, 67, 63, 83, 67, 62, 76, 93, 67, 83, 40, 47, 65, 63, 103, 60, 79, 47, 54, 63, 56, 89, 64, 90, 53, 73, 60, 73, 72, 75, 70, 54, 72, 60, 52, 60, 61, 57, 72, 72, 60, 74, 71, 61, 62, 57, 74, 57, 61, 60, 70, 84, 64, 56, 58, 89, 60, 73, 42, 49, 51, 58, 51, 69, 78, 78, 98, 64, 54, 61, 63, 121, 64, 46, 69, 86, 67, 71, 61, 68, 49, 55, 65, 59, 90, 65, 53, 58, 67, 54, 55, 64, 64, 66, 68, 67, 123, 83, 79, 74, 92, 62, 92, 72, 67, 72, 75, 64, 60, 80, 57, 73, 85, 59, 58, 72, 63, 76, 49, 49, 57, 74, 94, 54, 53, 74, 42, 66, 70, 79, 64, 39, 113, 73, 69, 55, 74, 75, 64, 56, 72, 82, 73, 68, 60, 71, 95, 61, 84, 70, 59, 76, 55, 56, 63, 78, 91, 74, 68, 71, 59, 53, 67, 96, 73, 59, 61, 69, 76, 62, 56, 51, 56, 47, 71, 57, 97, 47, 77, 65, 88, 76, 56, 79, 73, 58, 51, 89, 52, 56, 66, 47, 67, 76, 71, 68, 66, 42, 47, 49, 72, 81, 49, 60, 85, 85, 58, 70, 54, 81, 82, 50, 49, 88, 91, 64, 58, 62, 45, 69, 61, 48, 50, 68, 49, 51, 65, 83, 49, 47, 83, 83, 59, 91, 56, 67, 68, 57, 57, 48, 46, 72, 59, 52, 62, 57, 64, 74, 65, 74, 44, 61, 70, 43, 64, 44, 60, 74, 52, 66, 70, 59, 81, 54, 55, 71, 60, 64, 70, 76, 38, 66, 51, 86, 55, 63, 53, 61, 70, 59, 83, 46, 57, 92, 69, 62, 44, 86, 61, 53, 55, 63, 69, 63, 46, 60, 59, 69, 80, 69, 106, 60, 127, 85, 64, 61, 62, 52, 51, 76, 82, 90, 58, 55, 63, 67, 73, 83, 68, 55, 54, 72, 93, 44, 61, 60, 67, 72, 55, 58, 83, 66, 50, 76, 51, 72, 55, 56, 68, 73, 55, 55, 52, 68, 54, 64, 63, 64, 57, 67, 61, 88, 75, 47, 67, 73, 63, 49, 45, 70, 85, 72, 85, 46, 54, 64, 54, 54, 69, 65, 83, 70, 48, 69, 65, 72, 45, 68, 61, 62, 75, 75, 49, 57, 59, 117, 60, 79, 55, 69, 56, 65, 67, 68, 71, 80, 85, 94, 60, 52, 74, 61, 76, 48, 65, 61, 74, 56, 54, 82, 77, 82, 69, 53, 66, 74, 61, 59, 65, 68, 50, 71, 58, 64, 53, 71, 66, 86, 61, 54, 55, 77, 57, 54, 40, 62, 51, 63, 86, 120, 58, 60, 62, 59, 78, 63, 72, 59, 65, 82, 67, 68, 74, 50, 67, 67, 73, 59, 58, 54, 62, 56, 70, 65, 63, 103, 65, 55, 67, 58, 60, 41, 85, 58, 64, 85, 91, 75, 70, 59, 81, 68, 43, 60, 63, 57, 55, 67, 62, 67, 109, 64, 54, 56, 93, 81, 57, 62, 79, 67, 70, 62, 68, 79, 67, 65, 64, 72, 53, 69, 90, 60, 73, 69, 71, 90, 57, 95, 61, 77, 49, 56, 68, 72, 89, 65, 53, 58, 91, 65, 72, 48, 47, 54, 68, 59, 64, 62, 48, 77, 92, 75, 67, 62, 98, 58, 50, 79, 64, 103, 57, 84, 63, 57, 75, 64, 76, 46, 94, 54, 60, 74, 77, 55, 79, 77, 61, 71, 49, 62, 75, 82, 62, 55, 67, 55, 67, 64, 111, 58, 49, 80, 62, 48, 58, 57, 46, 49, 77, 79, 83, 62, 86, 57, 93, 64, 68, 76, 62, 59, 84, 64, 84, 66, 81, 71, 81, 87, 60, 75, 63, 60, 55, 48, 67, 56, 55, 62, 72, 79, 102, 62, 85, 55, 80, 106, 65, 88, 67, 70, 58, 50, 79, 70, 67, 48, 68, 84, 68, 59, 67, 42, 85, 49, 64, 92, 80, 51, 73, 64, 58, 52, 67, 57, 78, 79, 49, 95, 83, 55, 83, 68, 68, 90, 50, 79, 70, 46, 55, 85, 80, 52, 69, 56, 53, 70, 56, 63, 64, 55, 59, 77, 80, 54, 57, 80, 46, 73, 62, 62, 47, 62, 70, 55, 74, 54, 59, 66, 60, 46, 75, 61, 81, 55, 68, 76, 53, 70, 90, 51, 57, 63, 88, 65, 59, 68, 59, 51, 51, 88, 63, 65, 70, 85, 54, 55, 85, 77, 78, 104, 56, 53, 76, 57, 56, 57, 67, 68, 75, 58, 51, 75, 73, 84, 46, 67, 54, 64, 68, 76, 60, 89, 42, 81, 79, 52, 78, 61, 65, 51, 71, 63, 59, 77, 61, 60, 61, 61, 64, 55, 54, 60, 78, 66, 94, 62, 68, 64, 47, 48, 91, 70, 65, 94, 74, 69, 51, 66, 66, 84, 67, 59, 63, 76, 43, 66, 52, 52, 62, 59, 62, 77, 61, 56, 67, 59, 63, 58, 58, 69, 87, 86, 60, 50, 53, 61, 64, 69, 66, 64, 72, 61, 53, 67, 56, 85, 73, 48, 67, 55, 59, 47, 59, 63, 73, 50, 54, 65, 63, 77, 69, 79, 52, 53, 100, 80, 62, 59, 74, 64, 122, 65, 91, 61, 58, 75, 56, 53, 56, 81, 59, 77, 83, 108, 86, 63, 63, 57, 63, 78, 71, 64, 61, 39, 66, 55, 46, 53, 74, 55, 71, 57, 73, 67, 70, 60, 61, 63, 55, 71, 73, 82, 69, 98, 51, 72, 68, 77, 57, 70, 65, 44, 55, 59, 65, 72, 67, 65, 70, 77, 64, 71, 78, 60, 69, 70, 58, 79, 84, 57, 82, 80, 63, 46, 77, 81, 61, 64, 123, 55, 94, 60, 48, 82, 60, 49, 75, 64, 59, 63, 55, 66, 51, 88, 64, 101, 76, 68, 80, 51, 60, 41, 69, 85, 48, 62, 66, 55, 58, 65, 89, 53, 53, 48, 47, 81, 85, 98, 88, 70, 92, 50, 42, 71, 58, 52, 62, 52, 73, 82, 50, 60, 87, 55, 72, 61, 84, 61, 51, 60, 58, 75, 52, 61, 58, 57, 57, 84, 98, 45, 66, 71, 67, 33, 60, 72, 85, 46, 57, 74, 68, 67, 67, 58, 42, 47, 56, 71, 65, 99, 80, 71, 65, 84, 45, 72, 58, 41, 93, 61, 93, 68, 60, 61, 59, 69, 57, 74, 50, 73, 70, 49, 50, 61, 57, 71, 59, 59, 76, 52, 55, 73, 61, 76, 55, 79, 68, 63, 69, 68, 98, 73, 62, 65, 52, 73, 43, 61, 52, 65, 63, 57, 62, 75, 68, 81, 45, 76, 73, 54, 72, 51, 49, 79, 62, 88, 79, 65, 57, 60, 85, 49, 51, 53, 54, 114, 75, 70, 65, 80, 92, 79, 53, 115, 67, 80, 68, 91, 75, 73, 62, 104, 61, 60, 61, 79, 83, 60, 78, 62, 61, 58, 66, 82, 79, 63, 52, 69, 45, 70, 46, 50, 49, 52, 62, 73, 36, 65, 57, 72, 40, 74, 61, 60, 80, 93, 67, 57, 64, 47, 64, 68, 76, 64, 75, 81, 70, 49, 66, 67, 69, 49, 108, 54, 62, 61, 57, 105, 70, 62, 94, 80, 75, 56, 61, 61, 57, 62, 66, 84, 70, 46, 74, 69, 57, 66, 60, 135, 109, 56, 67, 61, 61, 88, 75, 70, 88, 41, 62, 46, 80, 49, 91, 79, 54, 82, 68, 82, 87, 43, 83, 95, 72, 78, 91, 64, 61, 73, 66, 65, 61, 87, 80, 81, 52, 78, 62, 79, 76, 54, 57, 65, 82, 55, 83, 70, 67, 65, 53, 62, 64, 73, 49, 54, 61, 55, 79, 56, 77, 61, 64, 61, 84, 60, 71, 66, 66, 63, 49, 59, 69, 91, 95, 80, 54, 52, 37, 64, 52, 63, 111, 65, 59, 67, 62, 59, 73, 57, 62, 47, 85, 68, 44, 62, 62, 46, 140, 57, 64, 68, 62, 39, 60, 51, 61, 61, 86, 38, 66, 60, 50, 98, 53, 61, 65, 57, 65, 58, 58, 58, 63, 44, 61, 48, 53, 66, 53, 73, 64, 65, 71, 58, 57, 63, 95, 44, 64, 114, 60, 96, 59, 76, 51, 66, 67, 69, 72, 72, 54, 52, 61, 85, 60, 62, 61, 87, 89, 56, 76, 73, 59, 47, 48, 63, 74, 65, 78, 66, 90, 53, 72, 76, 62, 86, 56, 51, 54, 79, 71, 79, 72, 56, 61, 66, 47, 56, 96, 65, 71, 67, 73, 63, 51, 48, 87, 71, 52, 61, 67, 67, 48, 62, 63, 55, 66, 43, 61, 80, 94, 106, 64, 73, 43, 74, 57, 51, 63, 47, 78, 92, 74, 74, 85, 53, 72, 66, 66, 75, 49, 56, 74, 55, 90, 57, 52, 58, 52, 73, 64, 65, 67, 61, 52, 69, 98, 45, 63, 61, 64, 63, 53, 80, 69, 72, 81, 72, 52, 63, 61, 50, 72, 64, 59, 79, 44, 83, 35, 63, 61, 55, 63, 60, 64, 92, 76, 66, 71, 56, 87, 59, 52, 54, 57, 69, 75, 89, 74, 60, 63, 51, 88, 66, 62, 57, 68, 63, 94, 59, 52, 51, 78, 75, 62, 61, 72, 79, 64, 74, 67, 46, 71, 88, 46, 72, 91, 49, 70, 58, 65, 51, 93, 55, 104, 52, 126, 55, 60, 64, 68, 67, 78, 48, 61, 60, 68, 67, 108, 53, 63, 41, 71, 35, 77, 70, 82, 73, 68, 79, 87, 65, 77, 45, 57, 50, 57, 89, 68, 97, 103, 85, 37, 43, 74, 51, 87, 55, 59, 46, 54, 90, 75, 58, 72, 75, 40, 50, 62, 43, 59, 51, 66, 76, 105, 85, 55, 77, 43, 63, 95, 87, 62, 66, 60, 76, 82, 65, 68, 75, 71, 103, 60, 68, 102, 95, 72, 62, 59, 78, 61, 75, 58, 68, 59, 66, 53, 48, 43, 56, 44, 42, 57, 63, 73, 103, 80, 76, 82, 42, 65, 56, 61, 59, 67, 71, 82, 60, 92, 56, 66, 61, 106, 57, 66, 43, 91, 48, 74, 63, 55, 43, 64, 71, 59, 69, 88, 65, 117, 83, 49, 44, 90, 90, 54, 55, 48, 60, 53, 62, 70, 68, 45, 66, 39, 64, 50, 70, 77, 53, 56, 108, 76, 64, 63, 63, 46, 60, 51, 79, 44, 144, 70, 68, 52, 65, 65, 69, 67, 65, 66, 96, 71, 59, 119, 64, 58, 83, 67, 54, 62, 76, 78, 48, 69, 51, 58, 56, 54, 85, 63, 60, 67, 66, 60, 73, 79, 65, 71, 59, 89, 73, 52, 62, 47, 66, 66, 74, 104, 61, 70, 65, 69, 48, 42, 78, 58, 46, 71, 64, 62, 105, 62, 59, 57, 77, 76, 66, 60, 53, 63, 73, 57, 66, 43, 75, 67, 78, 60, 73, 79, 71, 51, 81, 50, 80, 72, 57, 62, 73, 58, 61, 78, 56, 48, 59, 61, 71, 51, 57, 52, 54, 64, 61, 60, 83, 63, 40, 52, 50, 68, 45, 65, 60, 102, 69, 79, 68, 75, 64, 101, 62, 82, 55, 82, 72, 59, 56, 76, 108, 59, 83, 59, 63, 62, 51, 91, 53, 49, 71, 77, 83, 60, 58, 51, 51, 70, 81, 63, 75, 57, 62, 77, 55, 73, 45, 56, 43, 58, 75, 37, 70, 78, 55, 82, 72, 84, 59, 60, 62, 81, 84, 71, 62, 44, 86, 68, 50, 69, 51, 62, 52, 38, 55, 47, 66, 63, 64, 45, 78, 66, 65, 48, 53, 57, 59, 66, 57, 74, 70, 63, 76, 74, 76, 58, 63, 55, 58, 75, 71, 78, 64, 69, 65, 66, 55, 66, 59, 62, 62, 64, 73, 57, 64, 46, 67, 73, 68, 61, 53, 92, 65, 77, 115, 55, 66, 66, 56, 70, 52, 52, 57, 51, 98, 59, 58, 51, 49, 58, 54, 87, 70, 60, 74, 60, 67, 46, 73, 63, 75, 58, 57, 58, 63, 67, 50, 59, 60, 61, 60, 56, 48, 72, 56, 67, 86, 61, 48, 52, 61, 58, 58, 57, 93, 76, 53, 69, 52, 52, 56, 58, 66, 59, 59, 50, 62, 67, 63, 67, 85, 66, 59, 76, 59, 76, 59, 57, 101, 91, 62, 59, 52, 49, 76, 58, 62, 98, 62, 68, 66, 58, 60, 61, 74, 72, 67, 66, 68, 63, 68, 101, 100, 64, 48, 62, 91, 67, 60, 48, 84, 59, 70, 55, 54, 70, 63, 55, 59, 56, 127, 74, 48, 64, 66, 77, 66, 80, 62, 52, 78, 66, 51, 59, 63, 88, 99, 69, 67, 53, 84, 73, 55, 81, 61, 68, 55, 70, 65, 65, 69, 72, 52, 58, 69, 95, 58, 67, 62, 69, 67, 57, 74, 57, 63, 67, 58, 60, 65, 123, 50, 80, 72, 75, 71, 51, 77, 62, 69, 43, 83, 63, 67, 54, 70, 68, 53, 51, 60, 82, 41, 60, 61, 81, 62, 91, 75, 85, 53, 77, 59, 54, 84, 69, 53, 52, 62, 67, 75, 55, 76, 66, 61, 51, 59, 88, 72, 51, 60, 52, 99, 68, 69, 59, 53, 53, 73, 73, 80, 76, 49, 75, 44, 45, 64, 60, 74, 72, 49, 82, 73, 52, 64, 64, 65, 68, 66, 67, 63, 63, 82, 71, 54, 61, 51, 68, 53, 62, 44, 57, 58, 47, 66, 59, 62, 60, 49, 92, 64, 83, 64, 43, 68, 58, 46, 59, 60, 53, 91, 62, 52, 60, 95, 66, 61, 71, 83, 65, 89, 55, 62, 56, 58, 40, 73, 61, 68, 96, 59, 54, 67, 70, 65, 61, 69, 64, 57, 75, 68, 53, 52, 63, 64, 63, 77, 78, 65, 57, 65, 73, 64, 43, 59, 68, 70, 66, 73, 67, 78, 86, 67, 105, 133, 67, 61, 71, 61, 67, 114, 73, 74, 72, 56, 65, 61, 62, 86, 64, 70, 65, 74, 61, 93, 56, 60, 70, 49, 67, 69, 61, 59, 63, 55, 59, 56, 144, 56, 68, 56, 54, 63, 69, 62, 103, 64, 79, 81, 72, 84, 80, 64, 55, 56, 83, 133, 57, 61, 53, 86, 63, 59, 71, 104, 57, 77, 35, 63, 57, 68, 67, 62, 58, 46, 78, 74, 63, 71, 64, 66, 54, 52, 50, 63, 65, 70, 71, 62, 63, 55, 76, 66, 63, 87, 61, 86, 59, 59, 52, 68, 69, 54, 63, 60, 67, 82, 58, 75, 59, 58, 65, 53, 53, 61, 50, 68, 64, 65, 79, 56, 48, 57, 64, 52, 58, 49, 57, 64, 56, 69, 69, 93, 69, 56, 78, 71, 53, 74, 54, 50, 62, 70, 55, 58, 63, 59, 66, 47, 67, 55, 47, 55, 59, 66, 54, 71, 61, 57, 63, 56, 58, 43, 71, 64, 58, 75, 60, 59, 61, 54, 55, 58, 72, 65, 71, 45, 64, 77, 54, 55, 63, 77, 62, 67, 73, 46, 66, 74, 53, 57, 79, 109, 56, 74, 72, 61, 59, 59, 58, 61, 61, 55, 59, 80, 70, 71, 72, 68, 58, 64, 58, 62, 59, 94, 59, 58, 63, 56, 73, 67, 79, 54, 55, 64, 70, 60, 66, 61, 72, 70, 63, 54, 83, 66, 47, 65, 86, 60, 47, 50, 60, 79, 62, 76, 73, 67, 61, 56, 47, 82, 53, 66, 136, 65, 64, 66, 47, 67, 66, 61, 77, 40, 56, 73, 75, 67, 68, 70, 81, 75, 65, 81, 64, 71, 59, 78, 84, 56, 62, 72, 58, 60, 57, 61, 70, 44, 60, 83, 49, 53, 70, 74, 67, 68, 57, 58, 63, 50, 62, 78, 62, 62, 107, 68, 55, 58, 118, 67, 58, 83, 59, 73, 65, 99, 61, 90, 56, 91, 51, 70, 74, 53, 72, 59, 107, 88, 59, 78, 48, 55, 61, 71, 59, 56, 67, 73, 91, 66, 54, 48, 65, 68, 70, 80, 57, 67, 101, 57, 52, 61, 48, 59, 74, 67, 71, 48, 75, 51, 65, 50, 67, 90, 66, 52, 49, 64, 61, 72, 65, 78, 64, 82, 67, 68, 73, 62, 67, 76, 57, 58, 68, 54, 63, 61, 56, 68, 58, 74, 62, 53, 65, 60, 64, 66, 61, 76, 70, 76, 59, 83, 83, 49, 54, 74, 74, 43, 58, 75, 60, 57, 66, 34, 46, 55, 55, 53, 70, 144, 57, 51, 57, 83, 60, 64, 73, 57, 57, 65, 52, 70, 63, 64, 63, 69, 65, 63, 54, 51, 87, 59, 72, 59, 77, 55, 61, 67, 62, 62, 73, 69, 62, 85, 67, 52, 84, 60, 63, 74, 87, 65, 73, 63, 76, 64, 65, 49, 58, 70, 64, 65, 55, 75, 58, 66, 52, 93, 66, 54, 56, 73, 59, 62, 66, 86, 85, 48, 64, 91, 71, 66, 56, 57, 84, 57, 57, 71, 54, 61, 64, 63, 82, 68, 80, 116, 68, 85, 51, 68, 73, 69, 62, 89, 65, 51, 52, 61, 57, 56, 96, 84, 59, 73, 45, 86, 77, 81, 78, 63, 57, 51, 54, 56, 58, 58, 71, 55, 64, 58, 74, 92, 66, 77, 79, 59, 72, 70, 60, 69, 70, 78, 74, 64, 92, 57, 69, 65, 85, 59, 123, 61, 61, 56, 52, 63, 63, 54, 50, 71, 59, 93, 57, 91, 57, 57, 90, 63, 64, 54, 58, 66, 78, 51, 79, 59, 64, 64, 73, 77, 71, 59, 51, 58, 70, 76, 82, 56, 68, 68, 77, 59, 80, 58, 58, 55, 59, 55, 56, 61, 64, 87, 78, 54, 82, 70, 69, 72, 83, 64, 68, 63, 67, 53, 71, 77, 66, 48, 58, 46, 51, 62, 70, 56, 62, 67, 67, 62, 70, 76, 61, 73, 72, 51, 78, 75, 67, 54, 61, 52, 82, 65, 46, 49, 54, 66, 47, 86, 59, 48, 82, 50, 51, 56, 96, 68, 67, 69, 59, 52, 51, 83, 66, 38, 75, 56, 59, 53, 63, 71, 68, 68, 48, 61, 64, 64, 82, 74, 71, 76, 65, 68, 56, 68, 65, 71, 69, 60, 56, 50, 81, 82, 65, 71, 73, 62, 66, 53, 61, 54, 69, 68, 66, 60, 49, 56, 57, 70, 74, 79, 70, 64, 69, 73, 81, 100, 64, 64, 75, 61, 58, 56, 63, 61, 60, 79, 73, 66, 71, 66, 70, 61, 53, 88, 58, 64, 81, 58, 66, 49, 53, 68, 71, 57, 62, 60, 66, 71, 73, 60, 65, 61, 73, 61, 62, 64, 61, 62, 56, 61, 56, 86, 76, 61, 61, 52, 61, 71, 74, 61, 68, 68, 74, 55, 58, 56, 60, 68, 77, 92, 55, 69, 73, 66, 66, 62, 72, 62, 63, 61, 61, 59, 69, 76, 65, 45, 68, 84, 62, 71, 63, 71, 83, 52, 68, 70, 53, 69, 88, 60, 74, 63, 54, 76, 81, 55, 64, 68, 70, 76, 64, 73, 70, 53, 70, 67, 54, 66, 55, 60, 81, 60, 56, 65, 69, 52, 65, 70, 63, 56, 58, 65, 69, 54, 60, 61, 96, 69, 74, 62, 59, 68, 64, 55, 59, 59, 104, 83, 63, 62, 88, 60, 61, 71, 63, 71, 59, 78, 75, 65, 65, 62, 61, 69, 68, 72, 103, 64, 84, 92, 66, 65, 55, 62, 64, 65, 61, 61, 66, 62, 61, 57, 55, 67, 60, 56, 65, 60, 59, 57, 58, 71, 71, 75, 58, 61, 56, 55, 95, 87, 57, 55, 55, 66, 61, 69, 49, 66, 56, 69, 64, 60, 59, 70, 71, 76, 69, 58, 59, 57, 71, 65, 59, 84, 44, 45, 75, 58, 64, 59, 74, 57, 65, 80, 88, 113, 86, 63, 57, 46, 53, 60, 65, 54, 73, 71, 63, 83, 100, 67, 90, 58, 59, 68, 57, 64, 61, 63, 66, 65, 59, 76, 63, 62, 60, 64, 59, 74, 62, 71, 60, 60, 61, 73, 75, 72, 79, 60, 59, 56, 65, 58, 65, 57, 62, 55, 61, 81, 65, 61, 61, 64, 68, 60, 63, 91, 70, 52, 63, 71, 54, 85, 57, 61, 67, 64, 61, 61, 92, 97, 55, 62, 71, 55, 57, 60, 56, 73, 63, 66, 60, 62, 61, 76, 59, 75, 60, 59, 52, 62, 57, 60, 65, 55, 70, 49, 65, 76, 64, 66, 69, 70, 69, 61, 74, 75, 68, 60, 77, 72, 69, 66, 55, 83, 62, 58, 54, 82, 81, 64, 71, 120, 58, 58, 60, 110, 66, 60, 69, 76, 51, 58, 66, 56, 73, 66, 66, 59, 62, 63, 66, 103, 88, 56, 58, 81, 58, 58, 61, 60, 58, 78, 67, 64, 60, 67, 71, 58, 83, 72, 65, 59, 57, 80, 55, 66, 64, 65, 59, 62, 63, 67, 69, 66, 58, 63, 61, 56, 74, 111, 59, 82, 69, 67, 73, 54, 76, 64, 64, 56, 64, 60, 58, 61, 64, 64, 61, 69, 63, 67, 66, 52, 66, 58, 69, 74, 65, 72, 61, 68, 74, 66, 59, 53, 60, 71, 67, 59, 69, 70, 106, 61, 64, 71, 67, 62, 61, 68, 78, 70, 53, 79, 62, 49, 60, 79, 58, 58, 71, 64, 85, 67, 65, 61, 69, 44, 80, 56, 127, 66, 75, 67, 66, 69, 65, 60, 71, 90, 70, 65, 86, 75, 106, 57, 56, 61, 58, 62, 78, 55, 58, 59, 67, 59, 64, 66, 70, 70, 56, 53, 76, 54, 63, 58, 87, 59, 62, 64, 63, 80, 69, 64, 72, 72, 64, 55, 60, 59, 62, 60, 58, 59, 65, 58, 51, 55, 63, 67, 62, 56, 66, 61, 58, 66, 78, 73, 59, 52, 70, 62, 83, 49, 67, 76, 58, 56, 60, 68, 66, 67, 71, 67, 56, 67, 76, 100, 60, 66, 59, 63, 58, 73, 53, 67, 70, 54, 62, 60, 69, 62, 70, 63, 106, 60, 55, 59, 76, 68, 64, 60, 62, 55, 55, 67, 62, 105, 52, 68, 95, 45, 64, 85, 82, 55, 66, 55, 66, 64, 59, 70, 78, 63, 91, 87, 68, 98, 78, 63, 73, 69, 69, 64, 67, 63, 71, 66, 60, 66, 56, 59, 56, 60, 67, 67, 71, 63, 100, 68, 59, 83, 68, 88, 83, 75, 42, 57, 62, 63, 60, 55, 80, 58, 66, 66, 61, 52, 54, 73, 58, 57, 58, 70, 64, 63, 58, 63, 55, 58, 87, 58, 67, 63, 59, 63, 66, 47, 55, 63, 55, 76, 57, 58, 51, 62, 67, 63, 65, 58, 56, 83, 71, 62, 73, 74, 63, 74, 57, 61, 92, 58, 57, 70, 55, 60, 60, 55, 58, 77, 61, 63, 58, 62, 73, 58, 62, 65, 65, 65, 63, 60, 67, 86, 64, 122, 75, 59, 68, 87, 64, 61, 78, 68, 65, 59, 69, 64, 66, 62, 74, 62, 91, 54, 73, 99, 93, 53, 65, 73, 86, 72, 57, 50, 58, 64, 68, 62, 73, 71, 65, 67, 56, 51, 62, 46, 63, 58, 65, 56, 62, 74, 60, 62, 82, 60, 60, 59, 63, 59, 68, 65, 50, 60, 67, 68, 50, 73, 54, 75, 76, 86, 75, 62, 51, 59, 52, 92, 58, 48, 97, 77, 83, 60, 61, 62, 60, 57, 66, 58, 76, 61, 71, 65, 66, 69, 74, 66, 64, 55, 68, 56, 59, 85, 97, 65, 57, 67, 69, 58, 60, 58, 73, 70, 57, 72, 72, 77, 56, 58, 74, 66, 56, 65, 76, 69, 65, 54, 77, 63, 61, 74, 59, 59, 67, 62, 83, 63, 63, 57, 71, 67, 53, 69, 58, 66, 51, 66, 58, 59, 53, 67, 62, 62, 65, 67, 73, 56, 77, 66, 67, 56, 71, 72, 59, 75, 63, 55, 58, 78, 64, 55, 62, 67, 56, 68, 52, 67, 58, 64, 65, 65, 69, 61, 58, 78, 52, 53, 70, 69, 65, 56, 69, 80, 66, 77, 57, 82, 63, 59, 69, 68, 59, 81, 71, 65, 70, 60, 54, 65, 71, 48, 60, 64, 61, 46, 75, 89, 87, 53, 73, 58, 57, 61, 103, 53, 62, 65, 93, 63, 67, 72, 82, 56, 60, 69, 77, 60, 54, 74, 94, 88, 76, 60, 44, 58, 69, 89, 61, 84, 44, 91, 62, 65, 83, 55, 80, 70, 37, 78, 69, 62, 41, 93, 70, 62, 57, 63, 76, 68, 68, 77, 87, 56, 66, 47, 47, 105, 63, 65, 60, 94, 62, 83, 59, 63, 79, 98, 80, 88, 54, 79, 55, 56, 57, 65, 58, 60, 62, 74, 62, 62, 68, 70, 68, 68, 75, 63, 70, 52, 58, 84, 59, 50, 53, 74, 56, 64, 50, 87, 70, 54, 72, 71, 54, 73, 79, 63, 52, 65, 72, 57, 62, 78, 55, 63, 69, 103, 34, 100, 53, 69, 72, 57, 76, 57, 52, 80, 49, 64, 52, 63, 80, 59, 81, 75, 67, 59, 116, 67, 62, 91, 65, 72, 70, 56, 63, 64, 44, 60, 58, 102, 60, 59, 86, 69, 60, 71, 68, 59, 57, 64, 68, 65, 61, 49, 47, 67, 60, 61, 77, 72, 78, 81, 59, 70, 69, 36, 46, 68, 83, 63, 76, 81, 75, 66, 56, 62, 57, 82, 50, 66, 74, 78, 50, 46, 102, 47, 68, 62, 53, 61, 85, 75, 69, 35, 66, 86, 70, 60, 56, 38, 75, 70, 64, 86, 66, 47, 81, 48, 83, 54, 49, 45, 60, 58, 105, 64, 59, 77, 67, 70, 56, 52, 61, 60, 68, 66, 62, 57, 73, 51, 41, 70, 53, 82, 49, 53, 60, 59, 48, 63, 66, 61, 53, 106, 67, 72, 76, 50, 92, 66, 36, 55, 63, 41, 117, 72, 64, 61, 60, 67, 60, 61, 57, 62, 71, 63, 58, 76, 101, 39, 74, 63, 70, 60, 98, 84, 51, 128, 81, 53, 97, 65, 66, 63, 61, 48, 57, 49, 50, 52, 55, 76, 58, 70, 86, 59, 84, 60, 74, 73, 60, 56, 80, 92, 55, 87, 56, 47, 66, 63, 85, 59, 69, 51, 61, 67, 68, 50, 75, 70, 62, 60, 54, 59, 49, 45, 71, 56, 65, 52, 64, 71, 64, 68, 68, 65, 52, 83, 81, 62, 72, 47, 78, 67, 67, 97, 64, 75, 73, 62, 76, 104, 72, 72, 77, 78, 47, 49, 81, 44, 103, 57, 64, 85, 72, 65, 69, 57, 77, 98, 61, 66, 131, 57, 71, 77, 73, 74, 38, 64, 115, 78, 75, 56, 61, 54, 73, 54, 78, 72, 52, 71, 80, 58, 47, 46, 50, 63, 52, 40, 47, 55, 62, 64, 60, 48, 63, 81, 82, 60, 60, 42, 66, 51, 62, 70, 41, 81, 84, 88, 58, 67, 63, 51, 50, 53, 78, 81, 76, 115, 69, 67, 69, 73, 68, 72, 55, 60, 56, 88, 71, 47, 56, 61, 80, 110, 54, 63, 44, 75, 70, 53, 81, 65, 55, 51, 56, 64, 66, 104, 58, 70, 42, 62, 54, 59, 66, 46, 70, 57, 62, 64, 83, 65, 71, 56, 55, 69, 80, 59, 57, 48, 79, 64, 93, 77, 51, 82, 58, 41, 66, 56, 63, 49, 60, 66, 64, 98, 62, 57, 56, 64, 48, 48, 69, 73, 69, 66, 62, 46, 73, 78, 66, 44, 91, 70, 56, 95, 70, 68, 52, 63, 64, 55, 58, 58, 69, 109, 81, 57, 39, 61, 62, 66, 76, 45, 56, 66, 62, 74, 78, 77, 48, 69, 74, 57, 78, 60, 84, 62, 66, 47, 92, 53, 70, 94, 65, 71, 55, 62, 48, 55, 77, 70, 67, 64, 81, 46, 58, 80, 69, 80, 59, 69, 58, 69, 57, 43, 65, 73, 63, 61, 69, 68, 98, 53, 60, 58, 79, 80, 48, 49, 58, 50, 64, 71, 66, 56, 60, 85, 50, 59, 97, 65, 50, 83, 43, 97, 57, 70, 63, 66, 61, 74, 73, 66, 61, 63, 75, 74, 75, 97, 76, 59, 77, 68, 52, 63, 53, 66, 68, 83, 66, 75, 61, 87, 65, 66, 62, 94, 70, 94, 54, 52, 63, 59, 50, 69, 55, 64, 64, 47, 97, 62, 83, 72, 99, 77, 82, 52, 96, 65, 57, 40, 39, 74, 78, 47, 69, 60, 55, 68, 138, 56, 56, 86, 52, 68, 75, 47, 73, 55, 97, 115, 86, 75, 99, 86, 51, 66, 69, 55, 71, 73, 51, 65, 124, 55, 74, 55, 74, 79, 73, 51, 58, 51, 44, 69, 55, 71, 77, 90, 82, 59, 50, 57, 94, 84, 48, 75, 66, 55, 79, 60, 78, 61, 66, 80, 76, 60, 62, 81, 67, 48, 55, 49, 69, 74, 55, 50, 54, 56, 54, 77, 51, 79, 60, 49, 46, 42, 28, 63, 69, 128, 58, 83, 71, 86, 43, 71, 73, 82, 75, 64, 116, 56, 59, 85, 67, 76, 59, 94, 75, 77, 71, 57, 70, 67, 59, 65, 69, 60, 68, 68, 66, 63, 47, 98, 56, 75, 58, 52, 70, 56, 47, 75, 51, 60, 89, 50, 39, 87, 97, 69, 57, 59, 60, 75, 54, 71, 63, 65, 60, 44, 48, 51, 105, 62, 51, 44, 66, 53, 66, 78, 62, 59, 65, 85, 107, 71, 64, 52, 54, 45, 58, 54, 45, 83, 75, 78, 61, 73, 66, 67, 53, 46, 63, 113, 73, 57, 87, 58, 65, 69, 54, 78, 79, 88, 48, 59, 66, 46, 58, 64, 101, 56, 66, 49, 71, 56, 55, 60, 63, 61, 111, 54, 97, 57, 55, 64, 78, 47, 76, 76, 46, 58, 48, 104, 104, 49, 58, 61, 51, 42, 64, 44, 51, 52, 60, 96, 49, 58, 53, 50, 97, 55, 53, 61, 84, 70, 40, 77, 68, 63, 92, 89, 62, 73, 64, 41, 76, 53, 86, 64, 82, 67, 62, 65, 74, 50, 64, 69, 44, 76, 67, 67, 47, 76, 71, 48, 59, 62, 55, 42, 70, 42, 89, 63, 57, 57, 106, 68, 54, 44, 48, 62, 90, 74, 66, 61, 59, 58, 43, 56, 93, 78, 70, 90, 77, 50, 40, 60, 113, 50, 62, 51, 72, 55, 60, 65, 58, 74, 74, 71, 46, 64, 60, 54, 70, 64, 81, 68, 94, 82, 49, 73, 76, 82, 65, 47, 61, 69, 74, 46, 80, 60, 65, 111, 57, 84, 41, 135, 45, 73, 58, 56, 73, 70, 70, 50, 60, 43, 67, 49, 61, 72, 81, 98, 61, 50, 79, 62, 58, 64, 73, 84, 48, 56, 75, 100, 61, 79, 68, 67, 53, 69, 79, 65, 79, 54, 65, 66, 71, 68, 56, 68, 62, 60, 47, 54, 62, 72, 86, 51, 55, 50, 38, 57, 59, 102, 56, 62, 70, 80, 62, 69, 72, 63, 102, 75, 88, 84, 66, 66, 69, 68, 67, 60, 54, 65, 59, 59, 68, 59, 61, 60, 60, 73, 83, 60, 58, 59, 59, 85, 61, 66, 61, 81, 56, 71, 72, 85, 52, 67, 77, 55, 62, 65, 73, 85, 54, 61, 59, 67, 54, 68, 66, 59, 68, 101, 77, 104, 62, 73, 72, 61, 60, 60, 66, 64, 58, 59, 64, 78, 76, 62, 75, 67, 79, 68, 58, 52, 65, 87, 79, 74, 68, 61, 62, 52, 49, 72, 81, 85, 64, 63, 73, 85, 47, 72, 55, 72, 57, 46, 60, 69, 75, 71, 41, 75, 68, 56, 74, 85, 72, 102, 79, 57, 57, 59, 59, 66, 67, 55, 65, 89, 92, 78, 60, 60, 50, 68, 68, 74, 69, 92, 61, 52, 62, 49, 61, 60, 76, 47, 67, 102, 78, 74, 62, 68, 82, 49, 59, 58, 74, 77, 82, 71, 71, 52, 64, 42, 69, 59, 74, 38, 63, 62, 112, 65, 80, 58, 54, 57, 54, 76, 64, 67, 85, 64, 53, 49, 80, 52, 61, 58, 67, 69, 51, 54, 84, 67, 42, 63, 55, 59, 67, 90, 61, 72, 65, 61, 87, 64, 57, 47, 46, 59, 109, 46, 76, 49, 67, 48, 62, 56, 63, 70, 61, 68, 54, 84, 78, 52, 68, 69, 60, 62, 70, 89, 64, 109, 65, 60, 99, 81, 64, 57, 72, 66, 53, 67, 63, 51, 57, 71, 52, 70, 77, 55, 71, 62, 93, 63, 62, 63, 60, 85, 65, 64, 62, 43, 60, 69, 69, 48, 76, 63, 69, 57, 61, 61, 79, 73, 62, 67, 78, 60, 59, 52, 65, 60, 61, 73, 77, 57, 46, 68, 68, 83, 60, 57, 68, 60, 62, 54, 60, 68, 56, 97, 62, 55, 63, 51, 69, 53, 55, 79, 64, 76, 57, 53, 79, 69, 101, 47, 78, 87, 79, 90, 66, 56, 70, 99, 69, 66, 126, 62, 61, 70, 77, 63, 56, 71, 116, 53, 64, 54, 50, 48, 74, 68, 73, 62, 74, 68, 74, 61, 77, 67, 52, 55, 68, 51, 47, 66, 68, 46, 44, 71, 66, 62, 56, 70, 62, 52, 64, 59, 50, 79, 54, 73, 56, 76, 49, 63, 51, 51, 67, 68, 68, 92, 48, 98, 61, 53, 55, 83, 64, 61, 62, 64, 75, 77, 73, 43, 76, 53, 77, 120, 66, 61, 48, 97, 76, 60, 64, 62, 51, 67, 52, 55, 58, 74, 96, 58, 64, 68, 62, 61, 55, 65, 70, 69, 55, 62, 62, 56, 82, 54, 61, 75, 88, 73, 84, 76, 87, 53, 99, 68, 39, 58, 47, 61, 73, 52, 58, 56, 88, 63, 58, 68, 58, 50, 56, 44, 59, 56, 78, 66, 50, 59, 66, 53, 90, 54, 59, 62, 87, 63, 65, 95, 40, 72, 48, 55, 59, 61, 52, 71, 61, 89, 64, 58, 62, 64, 54, 62, 86, 71, 70, 67, 59, 55, 62, 68, 65, 68, 88, 59, 57, 75, 73, 67, 75, 61, 80, 85, 60, 104, 57, 67, 67, 65, 53, 66, 70, 62, 51, 61, 62, 64, 64, 79, 62, 51, 52, 62, 70, 65, 60, 56, 56, 76, 59, 64, 67, 82, 126, 58, 39, 54, 67, 60, 61, 67, 75, 58, 65, 69, 59, 70, 65, 54, 59, 63, 68, 53, 68, 88, 60, 90, 61, 74, 50, 67, 63, 75, 67, 58, 77, 71, 61, 57, 69, 94, 63, 54, 72, 76, 65, 71, 70, 53, 62, 72, 68, 65, 45, 61, 54, 70, 62, 88, 67, 81, 51, 64, 77, 56, 65, 69, 58, 52, 70, 57, 89, 43, 59, 68, 103, 72, 59, 57, 84, 55, 67, 51, 51, 69, 65, 57, 92, 64, 56, 57, 149, 36, 41, 103, 72, 48, 72, 69, 67, 50, 94, 100, 83, 68, 94, 65, 47, 75, 72, 67, 61, 64, 63, 66, 133, 72, 66, 59, 61, 85, 67, 49, 57, 50, 52, 73, 56, 75, 76, 75, 122, 71, 65, 64, 79, 58, 67, 68, 68, 58, 93, 69, 74, 59, 73, 82, 49, 55, 78, 95, 56, 53, 80, 77, 60, 63, 65, 56, 44, 63, 68, 53, 68, 72, 56, 58, 63, 71, 63, 57, 66, 107, 73, 63, 62, 72, 52, 50, 74, 66, 58, 67, 104, 61, 63, 85, 53, 87, 58, 87, 86, 78, 75, 60, 64, 72, 47, 62, 58, 60, 72, 53, 55, 59, 55, 74, 79, 70, 57, 59, 61, 82, 51, 74, 65, 64, 71, 70, 70, 68, 93, 49, 70, 51, 62, 71, 60, 58, 60, 68, 67, 85, 57, 63, 69, 65, 56, 58, 77, 60, 79, 85, 45, 53, 66, 73, 69, 71, 67, 75, 56, 54, 71, 59, 47, 70, 62, 78, 53, 53, 61, 62, 47, 65, 75, 63, 65, 71, 68, 67, 80, 64, 56, 66, 70, 48, 48, 54, 63, 41, 52, 68, 67, 56, 68, 54, 76, 55, 66, 78, 60, 79, 105, 63, 98, 51, 58, 53, 56, 73, 64, 65, 54, 73, 50, 113, 83, 74, 48, 60, 65, 57, 66, 64, 62, 60, 56, 82, 60, 67, 68, 65, 70, 52, 51, 73, 91, 73, 58, 87, 68, 56, 59, 68, 54, 49, 70, 57, 69, 63, 87, 74, 84, 76, 64, 70, 67, 50, 67, 63, 58, 65, 63, 59, 51, 66, 65, 60, 56, 72, 50, 53, 42, 56, 93, 61, 50, 59, 70, 69, 58, 44, 68, 60, 65, 56, 66, 48, 47, 48, 60, 60, 95, 80, 66, 81, 75, 66, 57, 61, 89, 49, 60, 55, 67, 54, 64, 56, 74, 59, 49, 66, 58, 66, 56, 51, 75, 76, 49, 63, 108, 85, 61, 69, 76, 77, 55, 56, 61, 69, 87, 60, 64, 62, 75, 77, 59, 87, 59, 71, 57, 73, 63, 64, 53, 79, 54, 50, 73, 70, 65, 72, 66, 60, 92, 99, 67, 53, 76, 67, 54, 72, 85, 55, 66, 77, 52, 100, 60, 85, 66, 59, 73, 63, 69, 66, 55, 60, 65, 69, 71, 70, 59, 52, 68, 70, 66, 64, 51, 48, 54, 71, 59, 60, 52, 70, 75, 55, 64, 65, 85, 56, 87, 72, 72, 65, 123, 83, 73, 71, 68, 62, 63, 58, 100, 80, 59, 64, 59, 63, 73, 58, 71, 62, 76, 51, 57, 57, 67, 60, 65, 87, 45, 67, 55, 68, 61, 62, 42, 60, 47, 66, 64, 67, 54, 70, 77, 71, 66, 59, 70, 50, 70, 72, 63, 67, 61, 62, 81, 63, 67, 55, 55, 90, 48, 57, 54, 61, 65, 59, 77, 64, 67, 67, 62, 58, 82, 54, 71, 63, 53, 68, 70, 44, 67, 59, 55, 61, 64, 57, 78, 70, 61, 71, 61, 76, 79, 77, 54, 67, 62, 68, 51, 68, 63, 83, 60, 56, 55, 40, 62, 67, 49, 101, 88, 65, 75, 62, 59, 66, 104, 55, 57, 95, 73, 67, 56, 71, 57, 89, 72, 57, 48, 91, 67, 50, 70, 49, 52, 65, 63, 54, 51, 140, 104, 59, 68, 54, 76, 57, 61, 75, 71, 68, 65, 50, 55, 64, 79, 56, 75, 57, 91, 64, 73, 54, 105, 64, 60, 63, 66, 77, 53, 63, 60, 64, 61, 67, 60, 58, 46, 60, 64, 67, 57, 127, 58, 54, 54, 61, 66, 63, 62, 48, 62, 59, 58, 60, 51, 52, 83, 77, 62, 71, 51, 82, 80, 67, 120, 62, 78, 41, 66, 62, 61, 50, 69, 53, 67, 62, 70, 65, 61, 50, 43, 54, 55, 79, 65, 92, 82, 60, 74, 73, 59, 64, 52, 73, 56, 74, 59, 50, 54, 54, 51, 69, 96, 75, 72, 55, 97, 63, 51, 58, 46, 74, 73, 69, 72, 58, 76, 45, 61, 75, 62, 89, 71, 96, 53, 54, 65, 72, 48, 60, 45, 51, 63, 87, 51, 64, 52, 48, 82, 60, 53, 63, 59, 74, 73, 83, 92, 86, 67, 55, 50, 66, 59, 90, 71, 57, 68, 54, 67, 66, 60, 63, 76, 81, 55, 48, 67, 68, 69, 63, 48, 75, 59, 92, 66, 71, 77, 132, 70, 68, 134, 104, 60, 60, 41, 61, 63, 92, 110, 70, 57, 61, 47, 64, 63, 81, 52, 63, 72, 76, 55, 59, 82, 68, 59, 65, 54, 70, 61, 90, 44, 45, 57, 57, 72, 52, 67, 44, 73, 58, 45, 53, 58, 65, 51, 90, 76, 60, 55, 49, 62, 54, 62, 93, 62, 89, 59, 91, 54, 67, 69, 49, 72, 64, 59, 46, 70, 75, 65, 52, 58, 59, 62, 96, 82, 69, 65, 61, 84, 65, 57, 66, 59, 61, 66, 68, 57, 63, 78, 55, 66, 51, 58, 52, 48, 59, 55, 55, 71, 62, 59, 58, 51, 84, 46, 72, 63, 73, 47, 56, 63, 80, 55, 75, 65, 73, 65, 55, 50, 65, 48, 81, 69, 65, 61, 57, 60, 59, 74, 51, 76, 46, 114, 81, 52, 68, 62, 90, 78, 68, 50, 57, 58, 62, 69, 51, 52, 62, 49, 55, 44, 70, 77, 87, 68, 66, 64, 57, 63, 48, 91, 54, 57, 79, 59, 63, 70, 58, 46, 58, 70, 61, 106, 62, 46, 77, 49, 55, 92, 66, 91, 66, 58, 85, 66, 80, 55, 65, 45, 63, 52, 53, 63, 70, 64, 62, 73, 61, 66, 62, 53, 71, 65, 60, 76, 58, 61, 77, 77, 75, 90, 50, 99, 58, 57, 60, 57, 67, 72, 55, 70, 76, 61, 66, 77, 54, 63, 60, 52, 58, 78, 58, 90, 81, 68, 69, 64, 62, 51, 70, 42, 55, 71, 61, 54, 45, 82, 71, 49, 81, 51, 73, 68, 79, 66, 86, 86, 48, 68, 44, 58, 68, 78, 62, 49, 56, 85, 82, 92, 99, 65, 65, 58, 55, 61, 60, 64, 49, 61, 59, 63, 94, 52, 58, 49, 45, 56, 50, 63, 78, 62, 56, 50, 57, 51, 61, 64, 73, 64, 78, 146, 57, 51, 96, 97, 68, 50, 92, 65, 46, 62, 80, 99, 69, 89, 57, 61, 77, 69, 54, 59, 48, 56, 54, 123, 54, 70, 57, 71, 74, 65, 71, 57, 56, 75, 60, 68, 64, 70, 65, 57, 80, 42, 44, 52, 60, 56, 71, 54, 65, 64, 60, 88, 61, 56, 62, 56, 53, 79, 78, 67, 65, 48, 81, 76, 59, 48, 86, 82, 61, 61, 63, 60, 60, 69, 62, 84, 61, 56, 60, 52, 90, 67, 56, 61, 64, 63, 72, 100, 62, 46, 62, 79, 54, 87, 99, 60, 87, 57, 61, 93, 71, 82, 85, 53, 57, 62, 71, 77, 72, 68, 57, 68, 48, 51, 71, 67, 61, 60, 89, 67, 79, 68, 77, 46, 61, 92, 60, 57, 101, 78, 59, 76, 58, 74, 46, 60, 62, 38, 68, 90, 60, 57, 54, 64, 86, 46, 79, 62, 63, 53, 90, 63, 52, 60, 60, 55, 58, 60, 74, 46, 77, 71, 78, 46, 55, 52, 68, 54, 55, 63, 55, 64, 68, 43, 52, 57, 55, 69, 66, 78, 68, 67, 82, 62, 57, 52, 60, 66, 85, 55, 90, 96, 60, 52, 63, 84, 75, 62, 54, 51, 96, 88, 65, 103, 64, 56, 67, 56, 53, 66, 50, 56, 63, 69, 118, 68, 57, 60, 67, 56, 65, 73, 57, 60, 69, 72, 60, 57, 60, 67, 72, 46, 53, 58, 53, 81, 60, 62, 78, 93, 54, 63, 60, 56, 54, 83, 66, 73, 59, 71, 70, 73, 61, 76, 68, 84, 61, 64, 75, 71, 57, 62, 67, 76, 74, 60, 89, 58, 70, 55, 70, 74, 63, 74, 63, 59, 61, 61, 64, 61, 66, 53, 65, 47, 63, 63, 59, 49, 80, 65, 74, 80, 82, 70, 46, 75, 61, 63, 67, 49, 59, 64, 65, 80, 46, 59, 73, 100, 71, 44, 68, 63, 70, 45, 67, 71, 68, 68, 81, 69, 70, 56, 69, 69, 57, 48, 54, 59, 74, 72, 62, 72, 66, 71, 77, 60, 70, 89, 48, 69, 45, 58, 73, 85, 52, 61, 41, 110, 58, 43, 51, 56, 79, 46, 106, 56, 56, 67, 49, 55, 69, 96, 44, 73, 75, 53, 102, 61, 41, 67, 79, 51, 63, 78, 66, 66, 70, 55, 57, 63, 71, 72, 67, 74, 92, 45, 56, 47, 77, 63, 66, 54, 61, 72, 55, 68, 47, 56, 74, 66, 75, 72, 90, 67, 66, 45, 74, 50, 77, 51, 84, 60, 76, 99, 56, 60, 55, 66, 52, 61, 48, 69, 66, 137, 75, 53, 70, 82, 49, 64, 95, 35, 49, 48, 59, 65, 55, 60, 62, 67, 62, 50, 57, 56, 55, 72, 71, 64, 60, 59, 56, 64, 84, 41, 53, 65, 49, 80, 87, 75, 39, 59, 71, 71, 51, 60, 63, 45, 60, 47, 79, 56, 61, 84, 79, 72, 67, 57, 80, 78, 55, 59, 86, 70, 59, 66, 44, 57, 45, 70, 60, 62, 50, 61, 64, 59, 52, 60, 38, 61, 82, 72, 80, 50, 41, 79, 62, 67, 72, 62, 60, 58, 63, 69, 60, 80, 66, 64, 56, 105, 62, 60, 139, 78, 75, 78, 67, 61, 84, 73, 60, 55, 62, 72, 75, 90, 64, 70, 73, 96, 86, 64, 63, 75, 59, 68, 60, 62, 57, 70, 59, 74, 68, 68, 62, 36, 49, 67, 78, 92, 63, 95, 83, 55, 68, 72, 66, 56, 66, 55, 61, 84, 54, 54, 53, 48, 55, 68, 59, 81, 62, 54, 49, 57, 40, 43, 72, 61, 79, 59, 53, 49, 52, 58, 58, 64, 68, 54, 68, 87, 50, 71, 78, 103, 84, 60, 71, 66, 61, 54, 58, 85, 52, 62, 64, 63, 66, 61, 55, 72, 61, 71, 80, 61, 59, 76, 50, 71, 77, 72, 54, 101, 61, 71, 49, 61, 53, 58, 60, 34, 67, 52, 42, 51, 58, 83, 49, 72, 52, 115, 62, 73, 60, 73, 62, 55, 64, 73, 48, 64, 61, 65, 74, 43, 50, 118, 65, 66, 48, 84, 105, 63, 66, 61, 60, 59, 68, 60, 58, 38, 58, 43, 53, 60, 60, 52, 60, 71, 87, 53, 87, 66, 82, 59, 51, 85, 51, 64, 52, 59, 64, 74, 62, 66, 48, 60, 81, 62, 71, 93, 79, 57, 65, 60, 64, 58, 90, 62, 56, 45, 84, 63, 52, 128, 69, 58, 54, 98, 43, 70, 77, 71, 72, 56, 71, 95, 57, 89, 61, 50, 90, 67, 74, 66, 67, 94, 61, 71, 90, 68, 61, 61, 118, 32, 98, 62, 61, 65, 45, 59, 60, 62, 69, 60, 75, 59, 63, 43, 44, 70, 51, 53, 56, 65, 48, 68, 82, 46, 50, 60, 84, 61, 51, 51, 75, 64, 57, 55, 56, 72, 84, 59, 58, 78, 85, 64, 62, 81, 66, 73, 85, 65, 59, 59, 45, 61, 59, 57, 103, 54, 64, 69, 63, 50, 52, 52, 46, 60, 72, 61, 75, 49, 46, 54, 68, 60, 75, 58, 61, 70, 80, 70, 83, 52, 47, 74, 75, 68, 54, 78, 64, 61, 60, 88, 71, 68, 68, 56, 55, 53, 64, 64, 61, 85, 60, 86, 53, 69, 57, 53, 99, 79, 62, 64, 69, 77, 59, 63, 45, 66, 77, 59, 74, 72, 45, 62, 68, 65, 88, 73, 48, 63, 77, 105, 61, 74, 75, 68, 74, 61, 53, 83, 65, 74, 50, 79, 55, 46, 112, 72, 47, 80, 60, 73, 56, 56, 37, 70, 51, 89, 62, 57, 52, 79, 59, 66, 67, 64, 82, 89, 71, 50, 81, 75, 95, 78, 68, 71, 55, 41, 102, 83, 59, 59, 56, 84, 66, 76, 84, 93, 75, 93, 70, 60, 55, 58, 41, 70, 54, 76, 65, 82, 56, 75, 74, 62, 64, 54, 39, 63, 65, 43, 34, 54, 57, 50, 57, 58, 95, 61, 58, 66, 94, 67, 65, 72, 51, 90, 52, 57, 58, 79, 73, 50, 61, 57, 90, 70, 57, 57, 77, 81, 67, 65, 72, 55, 58, 67, 74, 80, 46, 82, 59, 73, 67, 65, 75, 59, 58, 69, 50, 50, 69, 53, 50, 69, 82, 67, 53, 61, 112, 68, 67, 95, 64, 45, 83, 112, 61, 66, 81, 60, 62, 40, 108, 88, 69, 86, 76, 67, 99, 82, 47, 70, 80, 63, 45, 85, 48, 55, 49, 56, 101, 42, 51, 56, 61, 60, 69, 59, 74, 60, 72, 58, 58, 53, 75, 66, 90, 89, 55, 52, 80, 47, 63, 93, 60, 58, 72, 54, 99, 68, 58, 61, 47, 52, 55, 60, 68, 66, 55, 83, 65, 43, 39, 65, 111, 76, 48, 65, 50, 53, 65, 64, 99, 52, 67, 62, 51, 67, 46, 108, 89, 93, 73, 126, 56, 92, 49, 66, 65, 51, 52, 64, 55, 48, 50, 56, 79, 55, 72, 55, 63, 50, 69, 53, 90, 49, 86, 68, 58, 73, 58, 71, 75, 68, 73, 68, 67, 75, 51, 73, 75, 86, 42, 66, 55, 48, 74, 63, 57, 63, 67, 62, 58, 46, 46, 70, 52, 49, 62, 56, 49, 74, 39, 54, 49, 67, 81, 82, 64, 67, 66, 42, 86, 48, 84, 43, 62, 58, 56, 71, 55, 90, 65, 77, 55, 39, 54, 50, 71, 68, 52, 72, 66, 52, 54, 69, 65, 65, 46, 53, 116, 57, 74, 78, 56, 35, 34, 56, 57, 92, 48, 53, 36, 66, 75, 112, 59, 56, 58, 61, 116, 62, 83, 49, 53, 93, 66, 59, 59, 69, 67, 54, 64, 92, 98, 69, 60, 82, 54, 52, 55, 71, 53, 73, 75, 59, 61, 88, 106, 59, 66, 55, 81, 58, 57, 76, 72, 78, 75, 61, 67, 64, 44, 75, 54, 54, 56, 59, 62, 56, 110, 54, 68, 57, 62, 57, 46, 70, 71, 71, 83, 70, 82, 67, 56, 77, 58, 52, 65, 70, 70, 85, 46, 59, 61, 59, 77, 66, 54, 51, 52, 80, 104, 101, 66, 79, 70, 59, 93, 72, 58, 57, 62, 81, 75, 60, 58, 47, 57, 73, 77, 48, 94, 59, 66, 72, 60, 72, 57, 66, 47, 73, 74, 68, 82, 74, 83, 67, 63, 63, 59, 67, 36, 40, 57, 83, 53, 80, 58, 59, 58, 90, 40, 58, 54, 91, 65, 77, 41, 67, 65, 83, 60, 53, 56, 73, 88, 46, 99, 86, 69, 65, 55, 80, 69, 68, 62, 72, 59, 55, 68, 56, 72, 53, 75, 76, 58, 54, 68, 51, 58, 32, 78, 64, 95, 50, 60, 67, 60, 73, 69, 48, 69, 59, 51, 60, 67, 62, 99, 62, 77, 50, 54, 69, 88, 71, 49, 60, 61, 84, 80, 76, 53, 45, 62, 56, 50, 57, 62, 48, 66, 69, 64, 81, 62, 51, 34, 64, 57, 81, 64, 91, 57, 66, 75, 64, 57, 80, 61, 80, 74, 94, 50, 90, 56, 57, 63, 114, 90, 56, 58, 60, 47, 55, 62, 64, 85, 60, 96, 69, 78, 59, 59, 73, 57, 74, 69, 121, 72, 45, 71, 81, 86, 74, 69, 62, 56, 63, 57, 69, 77, 57, 56, 51, 56, 51, 67, 87, 72, 79, 59, 63, 79, 79, 69, 55, 49, 64, 63, 84, 58, 50, 73, 99, 58, 57, 55, 62, 43, 67, 74, 57, 67, 68, 64, 44, 55, 46, 67, 45, 71, 83, 76, 70, 43, 64, 40, 60, 96, 60, 101, 62, 65, 54, 89, 70, 60, 88, 59, 57, 70, 64, 53, 104, 43, 53, 46, 57, 64, 75, 67, 59, 67, 70, 59, 59, 68, 58, 83, 61, 87, 73, 52, 90, 67, 69, 55, 85, 51, 46, 48, 69, 71, 70, 67, 53, 55, 74, 69, 55, 80, 70, 71, 68, 45, 65, 66, 65, 60, 80, 62, 48, 56, 48, 43, 51, 79, 53, 74, 62, 58, 49, 67, 56, 83, 64, 34, 72, 101, 74, 89, 42, 75, 57, 70, 73, 56, 65, 65, 100, 70, 82, 69, 57, 62, 76, 61, 59, 60, 85, 78, 68, 62, 67, 68, 63, 61, 60, 46, 52, 67, 45, 62, 54, 71, 68, 57, 74, 86, 40, 64, 70, 60, 48, 99, 114, 42, 68, 47, 60, 66, 72, 48, 69, 58, 35, 54, 62, 52, 50, 59, 72, 77, 80, 61, 67, 59, 62, 53, 63, 65, 76, 63, 56, 76, 64, 52, 56, 64, 68, 63, 63, 71, 65, 78, 52, 58, 76, 69, 52, 61, 79, 55, 62, 65, 51, 77, 63, 96, 55, 51, 72, 95, 77, 69, 41, 76, 63, 67, 65, 75, 58, 114, 52, 56, 113, 40, 85, 60, 78, 36, 85, 91, 70, 62, 47, 63, 68, 73, 69, 75, 75, 86, 57, 69, 53, 53, 103, 74, 45, 25, 59, 67, 44, 86, 79, 68, 48, 65, 104, 55, 50, 45, 56, 57, 102, 60, 89, 59, 74, 68, 36, 87, 63, 62, 76, 68, 96, 148, 69, 57, 63, 64, 60, 64, 69, 87, 62, 66, 67, 60, 58, 78, 48, 41, 90, 57, 107, 85, 83, 79, 61, 71, 73, 44, 61, 76, 64, 91, 65, 76, 75, 70, 75, 60, 68, 69, 81, 74, 49, 66, 60, 86, 98, 72, 58, 63, 51, 58, 43, 81, 57, 77, 59, 64, 82, 44, 58, 63, 52, 53, 59, 57, 73, 56, 67, 65, 54, 71, 60, 53, 60, 63, 59, 91, 54, 59, 67, 68, 51, 53, 62, 36, 125, 63, 56, 75, 68, 63, 83, 75, 57, 72, 69, 78, 72, 68, 81, 60, 59, 59, 65, 66, 60, 59, 66, 64, 58, 60, 69, 66, 61, 61, 54, 68, 68, 59, 65, 54, 57, 58, 54, 75, 68, 65, 58, 58, 68, 75, 90, 54, 62, 63, 46, 90, 76, 54, 67, 62, 66, 55, 114, 52, 62, 53, 62, 63, 77, 79, 66, 46, 86, 58, 64, 66, 72, 98, 82, 72, 61, 64, 63, 58, 83, 62, 40, 52, 74, 64, 68, 73, 83, 60, 56, 83, 65, 72, 66, 62, 52, 60, 63, 59, 63, 57, 92, 71, 69, 60, 44, 43, 109, 31, 65, 63, 77, 80, 69, 52, 56, 54, 58, 70, 67, 83, 46, 69, 57, 129, 58, 92, 76, 84, 54, 80, 58, 93, 64, 61, 56, 66, 73, 63, 48, 70, 48, 62, 85, 78, 70, 59, 53, 83, 74, 73, 55, 61, 61, 43, 64, 54, 50, 39, 60, 73, 105, 62, 55, 89, 110, 74, 66, 69, 60, 54, 86, 75, 61, 60, 58, 122, 61, 92, 64, 61, 55, 96, 75, 107, 57, 56, 76, 78, 51, 68, 58, 62, 70, 55, 73, 59, 72, 86, 53, 73, 65, 38, 84, 59, 58, 119, 108, 65, 59, 65, 69, 58, 49, 64, 50, 60, 86, 56, 75, 79, 65, 62, 74, 60, 68, 59, 57, 57, 105, 50, 49, 52, 55, 49, 83, 79, 51, 58, 78, 73, 65, 59, 44, 69, 67, 69, 78, 64, 79, 57, 74, 86, 72, 63, 69, 70, 61, 70, 65, 50, 85, 59, 78, 66, 63, 79, 83, 69, 65, 43, 64, 63, 85, 71, 75, 137, 90, 59, 63, 41, 77, 57, 83, 53, 65, 79, 80, 56, 59, 57, 56, 68, 54, 58, 53, 74, 41, 51, 52, 50, 86, 53, 102, 79, 55, 43, 55, 61, 68, 68, 64, 78, 82, 59, 73, 72, 130, 56, 59, 83, 70, 64, 51, 74, 68, 78, 74, 87, 76, 68, 63, 78, 66, 76, 73, 64, 55, 46, 67, 47, 69, 70, 62, 55, 78, 72, 56, 57, 71, 69, 82, 67, 68, 68, 69, 52, 55, 31, 75, 72, 71, 72, 50, 69, 104, 69, 45, 96, 75, 63, 77, 78, 45, 102, 53, 93, 57, 68, 63, 75, 58, 113, 66, 53, 59, 63, 61, 63, 81, 61, 64, 56, 84, 82, 74, 57, 54, 58, 43, 91, 62, 49, 70, 73, 54, 51, 58, 59, 51, 50, 88, 53, 71, 72, 54, 67, 101, 64, 57, 62, 74, 57, 62, 66, 108, 50, 42, 75, 62, 67, 48, 53, 50, 58, 75, 97, 97, 66, 84, 76, 71, 77, 50, 67, 77, 89, 55, 70, 62, 57, 56, 60, 67, 113, 75, 58, 64, 55, 61, 61, 62, 53, 79, 75, 73, 66, 45, 72, 57, 56, 89, 54, 68, 50, 70, 52, 55, 45, 61, 59, 105, 76, 84, 63, 55, 75, 56, 68, 53, 112, 79, 74, 86, 60, 69, 75, 66, 73, 79, 72, 77, 68, 67, 73, 71, 62, 88, 74, 72, 54, 61, 52, 40, 64, 57, 78, 50, 56, 75, 46, 69, 50, 88, 60, 69, 56, 67, 50, 63, 61, 83, 62, 67, 71, 76, 57, 65, 61, 71, 72, 55, 43, 68, 65, 55, 44, 57, 63, 56, 77, 90, 76, 49, 78, 60, 63, 67, 70, 57, 70, 49, 71, 59, 77, 60, 71, 76, 53, 77, 71, 69, 68, 64, 75, 47, 86, 53, 67, 77, 72, 59, 52, 72, 71, 93, 53, 55, 88, 54, 48, 69, 83, 45, 50, 51, 59, 86, 78, 55, 64, 79, 53, 75, 52, 66, 64, 59, 82, 74, 68, 71, 47, 69, 70, 56, 74, 64, 80, 83, 58, 87, 60, 56, 58, 62, 59, 95, 65, 73, 86, 51, 100, 70, 67, 86, 62, 74, 54, 47, 52, 53, 65, 58, 74, 60, 60, 65, 51, 78, 56, 70, 90, 108, 64, 83, 57, 61, 66, 51, 73, 42, 59, 69, 48, 66, 63, 65, 49, 69, 55, 61, 59, 72, 62, 67, 55, 43, 43, 60, 72, 88, 58, 48, 69, 69, 63, 59, 70, 59, 101, 83, 70, 63, 77, 75, 68, 57, 45, 67, 58, 84, 61, 97, 35, 45, 46, 67, 84, 65, 64, 53, 96, 76, 59, 64, 46, 89, 69, 72, 60, 100, 62, 73, 56, 57, 62, 63, 48, 57, 65, 69, 71, 59, 75, 67, 53, 60, 73, 65, 91, 69, 62, 46, 58, 59, 69, 57, 62, 48, 72, 60, 68, 42, 48, 78, 50, 71, 62, 73, 61, 92, 56, 56, 80, 77, 58, 72, 66, 57, 37, 59, 95, 57, 72, 63, 60, 58, 68, 72, 51, 71, 55, 58, 51, 57, 66, 74, 68, 80, 52, 55, 65, 88, 59, 62, 66, 66, 62, 63, 49, 47, 66, 66, 65, 81, 60, 44, 82, 45, 51, 84, 69, 57, 74, 77, 42, 85, 76, 69, 72, 49, 58, 73, 71, 49, 66, 86, 77, 62, 73, 82, 66, 52, 75, 69, 66, 67, 63, 56, 70, 66, 66, 53, 54, 83, 72, 51, 63, 59, 57, 56, 71, 69, 59, 52, 70, 59, 57, 60, 61, 80, 66, 76, 61, 55, 116, 55, 63, 52, 45, 55, 73, 55, 59, 76, 61, 50, 64, 62, 59, 81, 111, 74, 54, 122, 52, 57, 56, 80, 49, 81, 101, 84, 42, 67, 65, 76, 60, 51, 62, 81, 73, 64, 47, 50, 49, 102, 50, 74, 62, 60, 78, 48, 106, 64, 72, 64, 66, 59, 58, 39, 58, 72, 64, 63, 39, 78, 73, 55, 68, 55, 62, 69, 63, 72, 51, 83, 133, 57, 66, 65, 85, 58, 53, 51, 85, 67, 58, 71, 48, 71, 81, 55, 66, 58, 55, 73, 104, 80, 58, 47, 55, 99, 63, 66, 62, 68, 79, 70, 102, 55, 52, 55, 78, 58, 76, 43, 46, 78, 63, 73, 85, 66, 39, 59, 61, 64, 71, 60, 81, 82, 68, 63, 47, 59, 57, 68, 79, 64, 52, 66, 59, 69, 67, 88, 58, 72, 60, 80, 57, 52, 65, 84, 68, 70, 75, 74, 73, 71, 65, 56, 31, 91, 40, 60, 53, 59, 53, 67, 74, 96, 82, 73, 97, 70, 58, 68, 59, 55, 85, 58, 69, 58, 68, 104, 58, 62, 63, 79, 63, 72, 50, 76, 62, 72, 52, 83, 62, 59, 66, 43, 84, 99, 63, 50, 50, 60, 76, 63, 57, 104, 72, 54, 80, 51, 57, 63, 60, 64, 53, 115, 45, 65, 58, 57, 79, 68, 61, 102, 70, 76, 80, 44, 55, 74, 51, 125, 61, 63, 58, 55, 66, 64, 51, 51, 50, 61, 60, 52, 70, 64, 53, 54, 56, 69, 58, 57, 52, 81, 57, 67, 82, 63, 62, 64, 70, 65, 63, 63, 59, 103, 54, 64, 64, 46, 65, 60, 52, 71, 57, 60, 74, 55, 99, 67, 52, 59, 101, 66, 42, 63, 72, 69, 94, 63, 72, 64, 66, 52, 62, 59, 73, 92, 41, 63, 66, 53, 87, 50, 62, 69, 53, 70, 86, 64, 51, 73, 68, 52, 58, 52, 84, 49, 54, 111, 55, 73, 66, 92, 48, 62, 50, 65, 80, 70, 73, 83, 81, 93, 65, 67, 89, 71, 59, 60, 63, 66, 51, 49, 82, 68, 61, 49, 84, 59, 62, 80, 55, 64, 54, 75, 59, 63, 51, 53, 61, 79, 62, 61, 68, 64, 64, 59, 61, 69, 73, 66, 48, 53, 89, 84, 56, 76, 90, 66, 53, 51, 81, 57, 79, 80, 69, 42, 72, 51, 63, 81, 67, 67, 55, 59, 57, 57, 63, 61, 95, 52, 72, 66, 84, 84, 50, 94, 58, 49, 60, 86, 58, 50, 120, 40, 79, 60, 61, 81, 58, 78, 74, 62, 62, 88, 63, 59, 63, 61, 46, 69, 56, 50, 112, 86, 59, 78, 58, 56, 66, 53, 83, 63, 100, 81, 49, 65, 51, 46, 54, 61, 52, 57, 76, 49, 55, 57, 48, 85, 69, 102, 71, 56, 64, 69, 95, 58, 66, 80, 75, 29, 53, 81, 73, 74, 50, 49, 55, 61, 57, 80, 55, 54, 60, 61, 66, 55, 53, 60, 64, 78, 75, 73, 74, 65, 63, 52, 71, 76, 71, 54, 48, 74, 59, 77, 67, 62, 43, 64, 64, 55, 79, 51, 56, 69, 66, 65, 70, 54, 69, 91, 62, 145, 65, 55, 72, 59, 56, 70, 67, 66, 59, 58, 113, 94, 65, 61, 60, 64, 64, 61, 64, 47, 57, 63, 53, 59, 72, 78, 61, 89, 75, 50, 58, 54, 38, 71, 89, 52, 56, 90, 54, 72, 46, 79, 58, 53, 61, 79, 74, 61, 68, 80, 52, 63, 69, 57, 61, 65, 53, 60, 59, 49, 66, 46, 78, 54, 62, 67, 65, 58, 67, 70, 60, 60, 90, 57, 60, 45, 63, 56, 60, 64, 70, 79, 56, 71, 52, 66, 50, 95, 133, 76, 58, 73, 53, 58, 64, 59, 47, 71, 51, 41, 64, 76, 74, 61, 81, 53, 61, 67, 69, 59, 64, 75, 62, 67, 54, 53, 43, 98, 87, 58, 56, 64, 59, 64, 104, 74, 97, 86, 62, 65, 84, 87, 93, 61, 64, 56, 64, 66, 53, 60, 60, 101, 62, 79, 62, 76, 80, 55, 47, 62, 65, 73, 62, 76, 70, 66, 58, 98, 63, 79, 71, 93, 77, 79, 88, 62, 50, 61, 62, 66, 62, 62, 69, 61, 59, 61, 76, 69, 55, 61, 55, 50, 67, 59, 71, 57, 72, 63, 60, 59, 72, 63, 74, 58, 59, 88, 57, 66, 79, 80, 70, 74, 56, 62, 49, 52, 72, 58, 82, 65, 72, 64, 61, 68, 55, 47, 64, 80, 61, 72, 53, 65, 60, 73, 70, 99, 79, 68, 67, 61, 57, 72, 62, 78, 77, 75, 64, 68, 84, 73, 66, 106, 79, 77, 58, 69, 69, 60, 54, 59, 67, 64, 76, 60, 119, 60, 55, 69, 71, 65, 67, 72, 81, 59, 61, 99, 100, 58, 58, 59, 55, 45, 73, 55, 81, 59, 41, 63, 57, 65, 60, 55, 55, 60, 70, 78, 82, 98, 64, 60, 55, 53, 46, 65, 68, 67, 57, 63, 62, 64, 60, 64, 64, 68, 55, 53, 49, 62, 100, 55, 68, 69, 64, 71, 52, 99, 61, 57, 68, 50, 63, 51, 73, 52, 82, 84, 54, 61, 37, 60, 60, 59, 47, 57, 90, 70, 66, 91, 69, 50, 36, 66, 65, 115, 59, 50, 57, 71, 47, 57, 62, 75, 70, 82, 69, 74, 60, 100, 33, 79, 73, 87, 97, 71, 53, 68, 50, 68, 60, 57, 72, 63, 77, 56, 52, 50, 62, 67, 68, 64, 43, 56, 66, 46, 68, 61, 61, 59, 59, 65, 85, 57, 63, 55, 56, 68, 93, 69, 92, 62, 56, 78, 59, 60, 65, 72, 66, 68, 63, 51, 81, 59, 59, 71, 49, 62, 74, 76, 76, 78, 72, 66, 74, 69, 77, 58, 84, 66, 57, 77, 78, 58, 78, 78, 60, 49, 56, 70, 59, 71, 61, 66, 63, 73, 95, 103, 67, 48, 73, 75, 72, 48, 55, 59, 38, 51, 62, 79, 65, 59, 70, 63, 58, 66, 67, 64, 105, 60, 57, 66, 68, 63, 86, 84, 52, 65, 71, 68, 54, 59, 59, 60, 60, 62, 58, 74, 77, 63, 55, 62, 83, 51, 50, 62, 59, 91, 60, 50, 59, 59, 103, 50, 59, 77, 43, 58, 66, 78, 54, 66, 82, 71, 64, 50, 58, 67, 102, 46, 78, 127, 72, 76, 66, 75, 81, 63, 58, 63, 57, 66, 53, 80, 51, 54, 53, 56, 59, 66, 67, 54, 53, 63, 66, 77, 64, 58, 59, 69, 60, 66, 63, 48, 60, 51, 54, 50, 54, 74, 80, 60, 66, 50, 81, 67, 68, 59, 57, 60, 57, 70, 62, 56, 147, 56, 86, 48, 66, 72, 59, 60, 64, 58, 60, 61, 63, 67, 61, 52, 62, 60, 53, 113, 101, 45, 60, 51, 56, 104, 64, 56, 50, 54, 55, 74, 62, 56, 78, 73, 63, 68, 50, 69, 60, 68, 69, 52, 72, 51, 59, 61, 66, 55, 56, 83, 52, 57, 61, 55, 53, 74, 66, 54, 65, 59, 72, 54, 52, 51, 54, 69, 67, 50, 64, 63, 72, 64, 72, 62, 48, 59, 69, 75, 52, 78, 57, 59, 58, 91, 70, 49, 68, 57, 56, 64, 66, 67, 68, 48, 89, 63, 68, 69, 70, 65, 73, 68, 60, 43, 70, 103, 48, 55, 72, 64, 67, 60, 76, 57, 78, 64, 60, 65, 67, 66, 61, 48, 70, 94, 66, 73, 72, 84, 87, 77, 64, 67, 63, 61, 62, 101, 58, 64, 58, 53, 59, 73, 68, 65, 62, 71, 83, 74, 75, 69, 54, 61, 110, 74, 64, 43, 68, 124, 58, 53, 55, 66, 59, 87, 57, 45, 79, 59, 50, 60, 63, 61, 60, 75, 82, 70, 77, 47, 61, 67, 50, 55, 64, 54, 78, 59, 60, 64, 63, 56, 61, 149, 55, 62, 90, 78, 84, 62, 59, 77, 67, 65, 58, 62, 72, 65, 51, 74, 102, 59, 63, 72, 55, 60, 101, 59, 74, 53, 69, 58, 78, 62, 61, 77, 55, 66, 63, 52, 82, 59, 97, 88, 69, 69, 47, 60, 62, 64, 54, 69, 71, 65, 87, 50, 70, 118, 61, 69, 72, 118, 74, 56, 78, 63, 56, 51, 81, 49, 54, 73, 57, 73, 72, 70, 59, 64, 84, 56, 51, 63, 63, 62, 74, 58, 69, 70, 55, 50, 52, 63, 62, 69, 55, 67, 60, 64, 62, 126, 53, 72, 92, 72, 53, 64, 58, 71, 61, 57, 64, 55, 64, 52, 55, 84, 116, 67, 64, 75, 67, 56, 74, 66, 59, 69, 61, 70, 57, 57, 58, 57, 71, 62, 65, 62, 62, 58, 52, 76, 86, 46, 71, 62, 57, 42, 76, 89, 68, 52, 74, 46, 104, 71, 59, 67, 64, 79, 74, 56, 51, 74, 61, 62, 74, 58, 67, 89, 61, 74, 64, 60, 48, 66, 62, 63, 63, 77, 57, 100, 85, 56, 75, 83, 54, 57, 70, 73, 67, 54, 58, 73, 57, 61, 62, 71, 67, 70, 62, 58, 69, 61, 53, 56, 33, 66, 51, 65, 116, 64, 64, 55, 91, 71, 59, 72, 63, 70, 65, 71, 50, 85, 81, 59, 66, 56, 62, 58, 80, 65, 51, 75, 54, 56, 63, 61, 74, 65, 63, 62, 62, 47, 60, 50, 83, 58, 78, 73, 63, 71, 57, 78, 71, 61, 55, 68, 93, 64, 72, 99, 88, 111, 67, 60, 68, 59, 66, 66, 64, 82, 62, 60, 91, 78, 62, 62, 65, 81, 64, 59, 59, 60, 68, 49, 55, 57, 69, 87, 63, 72, 62, 68, 59, 54, 83, 65, 76, 54, 55, 63, 55, 68, 83, 62, 50, 57, 61, 59, 56, 51, 71, 73, 57, 48, 56, 59, 60, 54, 63, 60, 70, 50, 48, 80, 65, 85, 81, 56, 47, 71, 65, 67, 54, 51, 64, 82, 70, 60, 50, 67, 62, 59, 40, 77, 66, 63, 89, 67, 52, 87, 77, 59, 63, 60, 58, 55, 67, 71, 56, 66, 77, 65, 69, 55, 96, 55, 56, 49, 74, 66, 57, 68, 58, 58, 67, 67, 66, 58, 93, 56, 45, 61, 77, 62, 65, 63, 107, 66, 71, 64, 71, 49, 41, 68, 71, 83, 46, 50, 55, 73, 68, 82, 56, 80, 50, 62, 60, 64, 51, 62, 57, 51, 60, 62, 65, 102, 72, 97, 60, 68, 64, 61, 78, 61, 74, 66, 56, 65, 49, 74, 71, 65, 51, 75, 67, 49, 82, 47, 71, 70, 96, 62, 55, 74, 55, 59, 77, 63, 69, 69, 79, 59, 59, 45, 85, 69, 71, 53, 55, 71, 54, 55, 53, 87, 85, 71, 62, 72, 63, 52, 69, 50, 61, 42, 62, 82, 71, 57, 59, 60, 62, 65, 70, 59, 60, 71, 64, 64, 45, 84, 71, 48, 87, 100, 65, 79, 64, 63, 62, 64, 61, 56, 50, 69, 61, 77, 64, 90, 91, 62, 62, 82, 95, 65, 38, 56, 73, 62, 64, 69, 85, 63, 66, 61, 90, 55, 69, 86, 64, 77, 65, 62, 64, 83, 67, 96, 64, 75, 94, 58, 53, 41, 64, 82, 71, 48, 57, 62, 77, 110, 56, 85, 51, 67, 52, 58, 66, 91, 60, 118, 60, 62, 69, 75, 70, 48, 91, 79, 73, 59, 77, 85, 48, 71, 76, 64, 73, 86, 70, 67, 62, 66, 60, 64, 66, 68, 64, 74, 48, 49, 85, 71, 76, 64, 63, 62, 73, 63, 65, 103, 52, 73, 61, 75, 82, 91, 55, 67, 73, 63, 73, 53, 61, 52, 105, 81, 77, 67, 85, 50, 58, 82, 55, 89, 82, 54, 68, 65, 72, 56, 66, 65, 60, 87, 54, 60, 82, 78, 69, 58, 51, 49, 93, 68, 52, 64, 64, 48, 62, 67, 64, 66, 37, 86, 80, 57, 77, 80, 81, 75, 74, 77, 57, 66, 67, 55, 56, 55, 101, 70, 67, 76, 56, 55, 42, 57, 65, 58, 53, 65, 50, 60, 66, 54, 50, 50, 68, 60, 63, 81, 66, 79, 64, 52, 70, 44, 60, 79, 65, 58, 70, 58, 59, 58, 87, 37, 43, 83, 56, 62, 59, 52, 98, 70, 63, 68, 57, 60, 76, 63, 69, 61, 80, 54, 74, 66, 56, 62, 64, 61, 63, 59, 66, 67, 52, 72, 53, 46, 87, 59, 54, 48, 55, 47, 53, 69, 88, 60, 85, 71, 81, 67, 72, 75, 73, 49, 63, 60, 52, 63, 66, 58, 53, 89, 48, 61, 69, 75, 47, 135, 77, 49, 53, 64, 62, 63, 63, 72, 60, 69, 114, 103, 96, 54, 50, 84, 57, 54, 57, 77, 57, 49, 70, 67, 51, 65, 65, 58, 56, 66, 69, 62, 52, 107, 68, 50, 82, 51, 67, 66, 88, 74, 64, 77, 57, 67, 68, 55, 56, 63, 102, 63, 46, 53, 54, 53, 83, 62, 54, 61, 66, 57, 69, 65, 175, 51, 52, 48, 73, 67, 71, 68, 73, 66, 58, 70, 80, 66, 99, 57, 63, 52, 78, 90, 72, 72, 61, 71, 78, 96, 63, 65, 66, 66, 66, 67, 71, 67, 58, 56, 54, 62, 46, 68, 54, 68, 66, 50, 88, 38, 51, 75, 66, 60, 95, 97, 65, 72, 62, 59, 59, 42, 59, 56, 72, 53, 71, 60, 51, 63, 53, 55, 58, 77, 67, 55, 48, 53, 75, 88, 53, 91, 61, 109, 60, 62, 48, 54, 72, 67, 68, 47, 91, 60, 52, 80, 52, 51, 67, 60, 96, 82, 70, 44, 82, 80, 74, 82, 67, 51, 63, 71, 74, 69, 62, 53, 50, 60, 80, 61, 64, 53, 69, 92, 57, 54, 52, 57, 55, 55, 49, 50, 65, 66, 66, 60, 81, 68, 52, 81, 82, 93, 51, 47, 63, 69, 75, 53, 67, 53, 71, 73, 64, 70, 99, 76, 55, 63, 64, 58, 66, 88, 43, 100, 61, 55, 67, 71, 50, 84, 59, 86, 96, 62, 62, 61, 39, 61, 55, 75, 65, 71, 56, 64, 57, 45, 96, 65, 69, 50, 65, 63, 47, 67, 77, 71, 57, 105, 60, 58, 59, 45, 59, 83, 56, 54, 60, 67, 55, 70, 89, 79, 66, 52, 80, 73, 66, 68, 57, 51, 87, 55, 55, 53, 52, 73, 51, 66, 65, 44, 79, 56, 84, 58, 57, 68, 68, 67, 71, 91, 60, 57, 60, 56, 74, 40, 63, 49, 78, 73, 68, 109, 66, 58, 72, 86, 55, 84, 55, 77, 53, 63, 81, 92, 55, 110, 64, 62, 86, 71, 65, 82, 75, 68, 62, 53, 65, 90, 58, 62, 74, 57, 58, 74, 73, 77, 78, 66, 50, 96, 78, 65, 80, 51, 62, 44, 81, 78, 52, 49, 68, 64, 58, 62, 46, 69, 70, 61, 66, 90, 98, 51, 52, 69, 55, 67, 70, 55, 70, 66, 52, 63, 76, 45, 71, 72, 48, 63, 54, 48, 71, 60, 59, 52, 58, 67, 76, 47, 80, 55, 66, 60, 79, 57, 63, 43, 46, 76, 87, 57, 82, 79, 58, 58, 64, 67, 54, 50, 82, 62, 58, 68, 71, 59, 74, 58, 77, 60, 62, 61, 63, 45, 74, 81, 54, 56, 61, 48, 72, 62, 65, 53, 69, 86, 52, 54, 58, 49, 57, 60, 47, 71, 65, 73, 65, 58, 63, 59, 61, 51, 67, 52, 106, 50, 56, 52, 66, 68, 58, 79, 78, 78, 60, 56, 99, 49, 87, 109, 49, 112, 66, 75, 69, 55, 64, 71, 46, 65, 85, 58, 60, 54, 69, 74, 68, 82, 66, 62, 71, 76, 51, 81, 53, 104, 74, 49, 53, 69, 72, 67, 55, 49, 59, 72, 47, 126, 45, 76, 49, 72, 70, 63, 85, 56, 107, 69, 108, 78, 45, 63, 60, 68, 57, 89, 63, 43, 71, 60, 76, 65, 58, 46, 46, 86, 62, 72, 69, 56, 56, 46, 64, 96, 78, 67, 49, 52, 61, 61, 83, 60, 65, 71, 69, 74, 74, 77, 70, 49, 63, 62, 44, 60, 68, 54, 39, 66, 60, 70, 93, 53, 76, 59, 56, 69, 68, 77, 48, 60, 59, 58, 100, 75, 115, 46, 62, 72, 86, 81, 51, 46, 45, 64, 52, 75, 60, 63, 70, 59, 75, 54, 67, 54, 71, 62, 47, 72, 73, 61, 64, 50, 71, 96, 54, 68, 45, 53, 52, 42, 68, 49, 65, 22, 70, 80, 68, 50, 89, 60, 64, 49, 62, 70, 91, 69, 60, 40, 62, 76, 70, 121, 80, 53, 84, 66, 77, 68, 55, 71, 88, 68, 43, 62, 69, 62, 72, 63, 60, 70, 48, 49, 64, 107, 49, 71, 60, 55, 63, 64, 80, 65, 53, 60, 60, 71, 70, 52, 64, 59, 61, 50, 78, 57, 69, 50, 93, 70, 41, 65, 63, 55, 69, 80, 59, 59, 57, 64, 63, 47, 86, 53, 58, 61, 65, 63, 44, 76, 52, 109, 90, 58, 57, 71, 53, 57, 88, 73, 61, 76, 54, 73, 62, 73, 58, 43, 71, 51, 64, 68, 84, 53, 60, 54, 45, 72, 56, 60, 97, 112, 50, 57, 72, 59, 57, 71, 68, 53, 69, 54, 58, 80, 62, 78, 69, 49, 56, 92, 95, 70, 55, 56, 54, 50, 59, 56, 84, 65, 50, 80, 76, 65, 66, 58, 70, 69, 67, 55, 59, 52, 79, 75, 69, 64, 114, 56, 71, 56, 54, 71, 82, 68, 64, 54, 76, 94, 59, 78, 56, 59, 67, 57, 58, 61, 51, 103, 52, 53, 71, 45, 73, 49, 98, 69, 56, 66, 67, 66, 58, 85, 66, 70, 76, 79, 70, 50, 72, 73, 33, 57, 67, 69, 70, 63, 67, 57, 50, 77, 55, 56, 63, 53, 84, 53, 59, 112, 73, 75, 57, 70, 73, 99, 56, 68, 63, 63, 66, 68, 55, 70, 87, 53, 60, 52, 54, 73, 73, 86, 48, 73, 80, 51, 65, 55, 61, 67, 42, 61, 72, 70, 80, 59, 107, 54, 71, 69, 62, 61, 90, 67, 52, 62, 51, 52, 75, 65, 59, 38, 48, 78, 72, 60, 68, 66, 75, 62, 62, 77, 57, 66, 77, 49, 62, 61, 90, 65, 51, 82, 73, 59, 55, 51, 75, 60, 70, 53, 41, 50, 55, 43, 48, 68, 65, 67, 89, 82, 64, 91, 56, 72, 63, 49, 58, 80, 68, 61, 53, 55, 52, 55, 103, 61, 66, 82, 49, 57, 58, 55, 50, 45, 43, 76, 64, 63, 59, 57, 65, 62, 53, 45, 64, 61, 73, 71, 62, 58, 68, 52, 84, 64, 73, 77, 69, 54, 93, 65, 57, 68, 55, 64, 54, 70, 57, 62, 79, 74, 80, 65, 59, 63, 89, 53, 64, 40, 62, 57, 52, 69, 72, 72, 54, 49, 69, 61, 56, 126, 60, 63, 53, 68, 54, 72, 49, 76, 77, 67, 69, 98, 106, 58, 58, 67, 70, 46, 55, 76, 60, 52, 45, 66, 76, 40, 54, 84, 58, 67, 68, 63, 63, 114, 60, 65, 65, 55, 55, 69, 95, 60, 34, 48, 74, 76, 71, 63, 65, 56, 105, 60, 45, 65, 59, 48, 75, 49, 64, 60, 72, 62, 58, 72, 154, 66, 70, 53, 96, 71, 55, 63, 71, 71, 63, 88, 42, 78, 112, 52, 60, 51, 67, 62, 91, 66, 49, 77, 62, 75, 55, 77, 63, 66, 80, 55, 98, 57, 75, 90, 71, 68, 64, 65, 52, 69, 58, 61, 87, 67, 69, 57, 58, 75, 92, 84, 76, 58, 57, 53, 59, 46, 78, 49, 55, 59, 58, 65, 57, 45, 62, 44, 47, 76, 61, 65, 64, 64, 86, 58, 61, 106, 63, 107, 71, 54, 52, 46, 80, 70, 37, 56, 82, 47, 59, 86, 49, 54, 49, 63, 76, 63, 92, 63, 69, 73, 67, 61, 73, 66, 84, 74, 78, 55, 80, 57, 72, 53, 79, 50, 55, 61, 61, 67, 48, 46, 63, 53, 69, 65, 71, 74, 67, 83, 80, 57, 96, 40, 71, 76, 71, 66, 41, 58, 76, 51, 54, 62, 67, 60, 52, 90, 53, 63, 85, 82, 74, 53, 54, 61, 57, 80, 71, 91, 57, 58, 47, 55, 53, 74, 52, 65, 80, 65, 64, 65, 69, 44, 64, 76, 62, 79, 78, 70, 71, 75, 67, 51, 66, 70, 90, 64, 66, 60, 66, 69, 51, 107, 55, 55, 62, 63, 80, 95, 70, 61, 71, 61, 66, 52, 88, 60, 63, 54, 69, 63, 72, 56, 89, 52, 91, 53, 55, 58, 48, 78, 57, 61, 61, 55, 97, 79, 94, 54, 73, 58, 90, 57, 73, 67, 66, 53, 53, 67, 68, 64, 65, 75, 85, 59, 88, 129, 49, 57, 60, 84, 53, 54, 82, 55, 76, 86, 69, 71, 57, 110, 63, 60, 89, 76, 54, 51, 81, 57, 63, 73, 68, 82, 93, 48, 71, 57, 69, 58, 67, 58, 55, 60, 75, 35, 69, 79, 82, 51, 48, 66, 63, 64, 69, 69, 68, 64, 47, 52, 58, 52, 48, 57, 53, 79, 108, 57, 56, 78, 64, 66, 67, 56, 63, 51, 64, 49, 73, 53, 64, 94, 72, 58, 64, 55, 67, 67, 50, 63, 55, 58, 68, 71, 76, 63, 70, 59, 79, 72, 58, 57, 42, 61, 71, 75, 52, 86, 62, 53, 66, 57, 59, 59, 77, 61, 60, 76, 47, 58, 52, 55, 78, 62, 69, 47, 76, 45, 61, 83, 46, 64, 69, 51, 51, 64, 75, 72, 64, 44, 76, 57, 46, 63, 58, 66, 60, 80, 67, 61, 65, 55, 79, 68, 49, 62, 88, 55, 96, 60, 80, 56, 65, 66, 71, 63, 62, 55, 66, 84, 52, 76, 74, 87, 53, 61, 46, 66, 68, 56, 59, 70, 57, 57, 68, 75, 54, 64, 62, 87, 68, 82, 70, 56, 54, 59, 58, 72, 63, 129, 78, 66, 47, 93, 60, 58, 91, 49, 61, 58, 56, 121, 60, 84, 56, 68, 73, 79, 65, 80, 99, 69, 117, 55, 63, 69, 53, 54, 51, 85, 71, 50, 56, 53, 83, 52, 68, 67, 52, 51, 61, 70, 66, 52, 46, 55, 78, 82, 78, 63, 57, 62, 57, 66, 57, 65, 62, 57, 70, 61, 55, 75, 53, 64, 61, 70, 56, 56, 55, 66, 66, 82, 52, 67, 93, 63, 59, 55, 58, 59, 73, 87, 57, 72, 69, 73, 94, 80, 107, 48, 68, 60, 37, 69, 68, 65, 74, 71, 69, 64, 57, 56, 63, 62, 62, 61, 56, 57, 73, 54, 41, 49, 60, 50, 80, 53, 59, 44, 60, 68, 53, 66, 65, 63, 66, 72, 61, 84, 81, 83, 70, 73, 74, 59, 80, 61, 67, 80, 91, 42, 74, 61, 59, 66, 80, 109, 61, 52, 84, 58, 69, 61, 51, 74, 85, 75, 63, 66, 69, 50, 83, 52, 59, 85, 78, 64, 57, 82, 68, 52, 72, 71, 74, 62, 75, 70, 61, 59, 54, 63, 61, 47, 53, 62, 68, 51, 60, 56, 65, 64, 46, 53, 47, 66, 59, 64, 66, 42, 60, 66, 57, 49, 68, 64, 70, 77, 71, 66, 57, 66, 66, 60, 64, 102, 70, 64, 50, 55, 55, 59, 66, 71, 52, 78, 47, 57, 66, 64, 71, 51, 65, 47, 83, 83, 64, 58, 85, 94, 43, 58, 68, 59, 78, 115, 58, 83, 58, 50, 86, 65, 60, 49, 55, 56, 71, 53, 62, 78, 68, 68, 57, 105, 78, 55, 86, 75, 60, 64, 68, 49, 101, 83, 58, 66, 84, 59, 60, 65, 65, 79, 61, 66, 45, 73, 83, 53, 46, 53, 71, 62, 50, 54, 55, 66, 84, 48, 66, 60, 61, 47, 58, 65, 69, 53, 90, 79, 74, 77, 68, 123, 54, 71, 52, 58, 85, 49, 85, 86, 51, 44, 95, 66, 63, 55, 62, 66, 74, 54, 62, 83, 60, 58, 55, 51, 61, 45, 90, 53, 62, 62, 61, 89, 77, 58, 59, 54, 78, 80, 54, 132, 68, 62, 50, 69, 61, 94, 55, 51, 48, 56, 67, 68, 81, 71, 58, 50, 67, 54, 60, 94, 70, 95, 52, 85, 65, 62, 48, 54, 55, 87, 72, 42, 60, 57, 55, 51, 82, 72, 61, 52, 60, 52, 76, 64, 56, 62, 82, 56, 55, 47, 58, 68, 75, 55, 59, 64, 68, 61, 63, 100, 59, 62, 76, 75, 86, 61, 79, 65, 82, 62, 65, 44, 63, 54, 52, 74, 63, 69, 49, 69, 68, 64, 69, 46, 62, 54, 69, 59, 54, 69, 60, 89, 75, 50, 51, 49, 64, 79, 73, 70, 64, 54, 54, 62, 90, 60, 62, 80, 64, 70, 75, 58, 65, 66, 58, 68, 103, 63, 53, 54, 61, 58, 57, 40, 75, 62, 66, 75, 83, 62, 80, 68, 65, 62, 63, 63, 61, 60, 56, 38, 50, 72, 51, 58, 51, 60, 64, 67, 54, 55, 87, 57, 58, 66, 70, 52, 66, 81, 54, 44, 53, 62, 64, 47, 71, 74, 55, 66, 39, 142, 72, 45, 55, 61, 55, 69, 73, 68, 104, 67, 67, 70, 74, 56, 50, 71, 53, 60, 70, 60, 60, 49, 58, 85, 73, 59, 61, 70, 55, 65, 79, 59, 58, 110, 64, 85, 64, 63, 70, 66, 63, 68, 89, 59, 58, 72, 63, 80, 57, 66, 53, 64, 48, 64, 66, 42, 48, 79, 90, 88, 59, 84, 50, 66, 133, 55, 82, 58, 98, 84, 57, 60, 65, 70, 66, 64, 52, 73, 73, 56, 68, 52, 49, 62, 141, 94, 51, 63, 39, 83, 61, 62, 74, 58, 56, 67, 79, 59, 65, 100, 55, 55, 68, 58, 69, 69, 51, 67, 78, 85, 59, 61, 61, 80, 61, 65, 71, 58, 71, 55, 60, 63, 81, 69, 47, 60, 62, 93, 60, 71, 62, 52, 66, 99, 61, 74, 59, 54, 65, 69, 54, 70, 59, 96, 64, 70, 57, 59, 70, 57, 59, 78, 73, 50, 54, 63, 72, 54, 82, 58, 73, 53, 101, 81, 60, 88, 58, 62, 55, 60, 66, 64, 67, 62, 65, 54, 68, 78, 64, 64, 69, 58, 59, 54, 61, 68, 72, 50, 63, 49, 74, 89, 60, 63, 69, 94, 79, 65, 57, 102, 52, 53, 87, 40, 79, 65, 63, 51, 72, 35, 62, 60, 72, 84, 73, 74, 65, 60, 60, 56, 54, 62, 78, 118, 40, 64, 54, 58, 45, 60, 69, 61, 79, 71, 54, 55, 60, 53, 53, 78, 66, 64, 101, 86, 64, 65, 47, 62, 52, 60, 49, 66, 67, 74, 55, 67, 55, 79, 57, 53, 86, 67, 50, 86, 63, 46, 95, 67, 61, 54, 76, 56, 64, 77, 70, 89, 75, 51, 57, 63, 86, 58, 55, 72, 75, 67, 41, 66, 59, 82, 65, 66, 72, 65, 59, 59, 51, 60, 60, 66, 68, 67, 71, 66, 63, 55, 61, 67, 90, 75, 65, 105, 56, 55, 68, 64, 59, 62, 96, 62, 70, 44, 66, 63, 60, 99, 49, 64, 119, 69, 53, 60, 63, 67, 74, 59, 65, 70, 80, 63, 63, 46, 60, 64, 48, 45, 79, 65, 81, 43, 82, 66, 51, 79, 85, 73, 61, 62, 66, 91, 51, 72, 45, 64, 58, 46, 51, 86, 75, 68, 115, 60, 60, 78, 62, 51, 73, 70, 61, 55, 61, 62, 71, 62, 76, 63, 66, 55, 51, 55, 55, 63, 61, 59, 70, 50, 57, 65, 90, 44, 36, 72, 54, 51, 56, 60, 55, 83, 93, 64, 61, 35, 51, 79, 58, 53, 65, 58, 61, 62, 67, 68, 58, 82, 79, 51, 81, 54, 63, 66, 62, 69, 61, 74, 65, 101, 106, 55, 55, 57, 79, 87, 59, 74, 69, 54, 81, 73, 69, 55, 63, 72, 58, 70, 88, 71, 53, 67, 50, 66, 56, 62, 87, 65, 77, 105, 79, 68, 81, 61, 76, 66, 58, 77, 56, 75, 55, 44, 67, 54, 72, 60, 50, 58, 63, 61, 70, 48, 94, 54, 70, 56, 57, 71, 42, 68, 75, 62, 56, 55, 76, 65, 57, 113, 65, 52, 53, 73, 54, 60, 58, 66, 55, 72, 58, 68, 54, 73, 52, 74, 74, 65, 57, 64, 67, 55, 128, 64, 45, 62, 67, 95, 76, 54, 74, 89, 47, 56, 56, 50, 42, 59, 55, 57, 56, 66, 58, 52, 67, 49, 58, 77, 65, 64, 79, 53, 102, 83, 63, 55, 59, 68, 53, 71, 65, 95, 52, 73, 76, 71, 52, 59, 60, 64, 64, 63, 73, 66, 76, 53, 56, 46, 60, 68, 58, 100, 66, 57, 81, 57, 87, 65, 78, 61, 60, 72, 82, 81, 55, 52, 44, 71, 82, 53, 50, 73, 55, 58, 56, 43, 55, 56, 84, 82, 66, 59, 68, 64, 63, 56, 62, 63, 72, 60, 53, 50, 51, 67, 100, 64, 88, 100, 73, 121, 97, 68, 70, 63, 51, 63, 77, 63, 58, 62, 57, 75, 58, 70, 78, 98, 63, 59, 60, 67, 62, 97, 53, 69, 57, 71, 56, 67, 74, 69, 75, 49, 42, 69, 77, 67, 62, 104, 57, 76, 55, 86, 64, 70, 68, 50, 56, 79, 55, 76, 61, 55, 63, 68, 53, 55, 55, 61, 68, 48, 68, 62, 51, 57, 69, 56, 68, 53, 72, 59, 55, 70, 57, 68, 75, 63, 115, 51, 57, 68, 46, 61, 59, 47, 57, 53, 65, 58, 55, 55, 55, 66, 58, 64, 71, 52, 60, 71, 53, 62, 86, 58, 59, 66, 50, 75, 58, 55, 76, 59, 55, 54, 55, 101, 49, 66, 53, 57, 63, 59, 62, 64, 61, 52, 66, 97, 58, 62, 65, 62, 58, 65, 70, 61, 61, 71, 73, 54, 65, 71, 80, 63, 61, 61, 66, 72, 51, 56, 77, 54, 47, 62, 56, 54, 50, 57, 63, 60, 81, 76, 57, 59, 57, 54, 88, 74, 61, 55, 58, 84, 53, 72, 66, 49, 59, 54, 64, 48, 60, 59, 75, 59, 58, 57, 58, 51, 93, 59, 59, 70, 75, 89, 75, 59, 61, 53, 57, 114, 57, 69, 64, 64, 54, 60, 72, 55, 97, 75, 64, 61, 60, 108, 62, 61, 65, 61, 97, 56, 63, 78, 80, 59, 75, 66, 64, 80, 58, 59, 57, 76, 63, 69, 72, 77, 71, 63, 65, 69, 67, 121, 55, 88, 61, 79, 62, 53, 53, 54, 61, 80, 62, 77, 56, 57, 69, 56, 65, 66, 61, 70, 66, 56, 71, 77, 62, 49, 55, 51, 53, 53, 62, 63, 51, 62, 62, 56, 62, 47, 57, 75, 64, 73, 57, 72, 62, 68, 62, 64, 81, 64, 107, 60, 63, 62, 69, 55, 69, 48, 50, 50, 49, 61, 45, 66, 61, 50, 65, 58, 51, 44, 64, 83, 58, 79, 72, 57, 62, 63, 44, 58, 68, 62, 64, 88, 82, 68, 61, 81, 66, 71, 49, 71, 49, 59, 59, 62, 85, 63, 55, 74, 58, 51, 66, 72, 37, 63, 57, 87, 90, 65, 53, 76, 60, 74, 98, 54, 57, 63, 60, 54, 55, 94, 72, 58, 51, 65, 91, 68, 66, 62, 56, 63, 64, 45, 100, 62, 56, 79, 61, 69, 63, 59, 56, 59, 63, 77, 64, 61, 73, 95, 60, 66, 59, 54, 65, 63, 60, 64, 79, 70, 63, 62, 109, 55, 57, 61, 69, 52, 88, 62, 55, 70, 55, 87, 47, 48, 61, 45, 63, 59, 75, 63, 47, 91, 55, 88, 58, 71, 69, 66, 69, 55, 95, 58, 58, 109, 59, 75, 68, 60, 47, 54, 43, 60, 60, 61, 85, 64, 60, 54, 79, 51, 60, 83, 158, 65, 76, 59, 114, 57, 57, 56, 72, 57, 56, 78, 54, 85, 69, 64, 60, 57, 63, 71, 75, 97, 61, 66, 55, 72, 47, 61, 68, 75, 81, 61, 57, 55, 56, 51, 85, 75, 72, 63, 53, 74, 62, 79, 79, 57, 55, 60, 57, 78, 55, 94, 95, 56, 73, 57, 52, 49, 76, 62, 52, 59, 56, 67, 66, 63, 61, 105, 74, 69, 65, 79, 57, 73, 70, 69, 55, 66, 49, 82, 73, 92, 57, 65, 57, 72, 45, 57, 56, 61, 56, 52, 60, 71, 86, 60, 53, 64, 111, 127, 61, 79, 81, 57, 57, 65, 48, 83, 56, 50, 97, 56, 56, 48, 60, 86, 57, 66, 64, 117, 52, 65, 64, 78, 55, 63, 98, 80, 66, 59, 71, 61, 73, 56, 66, 108, 121, 47, 69, 54, 58, 51, 51, 46, 65, 69, 71, 64, 91, 49, 69, 84, 78, 61, 52, 61, 63, 63, 95, 69, 62, 56, 53, 54, 64, 52, 62, 59, 64, 64, 58, 66, 69, 74, 84, 80, 53, 86, 111, 103, 53, 65, 55, 77, 75, 66, 86, 59, 54, 67, 59, 76, 54, 66, 82, 58, 77, 64, 63, 81, 63, 67, 65, 67, 52, 54, 77, 61, 61, 52, 45, 55, 72, 55, 89, 93, 78, 64, 54, 58, 66, 55, 55, 66, 74, 75, 70, 59, 52, 62, 62, 63, 47, 75, 65, 62, 69, 71, 63, 63, 55, 54, 57, 66, 77, 58, 70, 109, 65, 71, 58, 72, 59, 71, 119, 51, 71, 52, 63, 75, 62, 63, 82, 61, 98, 61, 58, 63, 60, 61, 83, 63, 56, 57, 80, 50, 44, 61, 57, 56, 60, 60, 54, 56, 62, 55, 72, 58, 66, 60, 48, 86, 61, 53, 58, 59, 54, 63, 52, 78, 55, 59, 70, 64, 62, 57, 100, 55, 57, 56, 58, 62, 54, 67, 70, 66, 62, 64, 49, 81, 90, 70, 64, 58, 68, 64, 66, 66, 73, 61, 63, 58, 61, 81, 53, 50, 50, 43, 76, 52, 48, 69, 90, 71, 64, 86, 55, 78, 80, 88, 54, 64, 64, 50, 58, 59, 77, 69, 61, 52, 57, 60, 93, 64, 55, 62, 69, 56, 50, 67, 68, 54, 102, 71, 62, 59, 65, 58, 68, 57, 67, 60, 57, 46, 73, 71, 56, 72, 57, 54, 61, 60, 74, 83, 64, 70, 64, 70, 80, 72, 74, 55, 68, 95, 57, 53, 63, 61, 52, 89, 81, 70, 68, 63, 79, 55, 49, 58, 65, 68, 62, 64, 61, 55, 59, 54, 65, 75, 64, 81, 61, 46, 52, 55, 67, 65, 68, 63, 59, 107, 62, 65, 54, 53, 64, 60, 68, 58, 55, 73, 61, 51, 60, 55, 64, 65, 73, 66, 67, 46, 50, 56, 121, 54, 46, 62, 54, 93, 95, 67, 64, 96, 57, 55, 59, 73, 55, 77, 62, 73, 58, 54, 67, 72, 78, 60, 60, 81, 73, 76, 64, 58, 47, 57, 50, 53, 57, 65, 89, 66, 65, 62, 63, 90, 60, 61, 68, 66, 59, 69, 54, 65, 63, 64, 96, 74, 59, 70, 54, 66, 62, 102, 69, 62, 69, 69, 73, 64, 52, 59, 66, 44, 50, 62, 55, 55, 59, 78, 54, 46, 72, 59, 65, 61, 77, 64, 68, 65, 55, 46, 62, 60, 50, 50, 71, 58, 55, 69, 58, 76, 70, 77, 57, 60, 62, 71, 21, 70, 63, 82, 110, 57, 62, 55, 61, 61, 74, 63, 114, 76, 65, 91, 60, 69, 58, 92, 56, 49, 70, 66, 57, 63, 57, 88, 61, 106, 71, 73, 61, 59, 70, 62, 56, 54, 53, 54, 58, 65, 87, 74, 65, 43, 51, 62, 68, 68, 76, 69, 65, 50, 49, 103, 64, 69, 58, 64, 63, 63, 82, 71, 59, 59, 61, 66, 57, 72, 66, 61, 84, 57, 81, 62, 58, 56, 63, 58, 82, 55, 72, 64, 74, 72, 66, 62, 79, 50, 64, 57, 62, 65, 77, 57, 59, 54, 59, 71, 53, 69, 63, 57, 95, 63, 58, 50, 59, 80, 64, 53, 69, 83, 48, 58, 54, 96, 60, 76, 56, 53, 87, 52, 58, 74, 61, 66, 63, 84, 60, 63, 60, 66, 56, 65, 73, 65, 66, 92, 63, 57, 79, 60, 80, 77, 57, 63, 48, 62, 54, 64, 63, 58, 55, 54, 83, 63, 60, 63, 61, 50, 70, 65, 55, 63, 59, 64, 107, 75, 56, 55, 53, 68, 65, 61, 70, 56, 76, 71, 65, 51, 68, 45, 65, 57, 71, 45, 55, 70, 72, 74, 73, 60, 83, 62, 59, 69, 55, 88, 62, 91, 58, 68, 55, 57, 64, 57, 53, 35, 72, 60, 78, 55, 61, 70, 54, 54, 88, 56, 56, 98, 73, 56, 65, 63, 75, 64, 46, 73, 61, 64, 57, 69, 69, 81, 61, 61, 59, 63, 54, 68, 58, 133, 56, 64, 60, 81, 64, 74, 58, 61, 58, 52, 74, 67, 56, 52, 61, 55, 77, 68, 57, 66, 63, 68, 75, 98, 57, 63, 66, 48, 54, 56, 64, 65, 61, 58, 58, 66, 47, 62, 69, 64, 65, 56, 60, 63, 81, 66, 48, 77, 75, 77, 86, 69, 57, 63, 58, 51, 84, 53, 68, 49, 91, 52, 60, 76, 58, 50, 70, 69, 61, 63, 63, 71, 59, 71, 70, 55, 59, 60, 63, 52, 56, 59, 58, 60, 70, 64, 56, 75, 80, 76, 68, 61, 47, 63, 56, 37, 95, 56, 67, 57, 59, 53, 61, 79, 62, 54, 58, 72, 83, 73, 65, 77, 71, 58, 55, 58, 62, 66, 76, 81, 72, 57, 52, 85, 77, 63, 63, 55, 77, 66, 58, 56, 55, 65, 93, 64, 51, 96, 55, 62, 64, 52, 47, 56, 63, 78, 47, 56, 77, 83, 71, 53, 79, 71, 75, 53, 72, 56, 50, 68, 65, 70, 88, 54, 70, 52, 60, 51, 60, 73, 48, 53, 59, 54, 62, 63, 53, 54, 81, 63, 73, 66, 73, 94, 59, 79, 60, 76, 59, 50, 71, 68, 100, 62, 71, 105, 61, 65, 65, 57, 74, 50, 42, 63, 69, 64, 90, 70, 90, 51, 51, 75, 73, 58, 114, 61, 56, 54, 135, 68, 69, 72, 80, 51, 80, 82, 56, 78, 60, 64, 53, 65, 58, 51, 94, 102, 53, 66, 66, 60, 53, 68, 63, 69, 64, 77, 105, 60, 76, 72, 77, 67, 60, 63, 58, 77, 55, 67, 95, 56, 61, 59, 54, 90, 69, 64, 61, 56, 60, 59, 51, 72, 83, 68, 53, 65, 55, 73, 60, 60, 59, 92, 67, 78, 49, 79, 55, 73, 45, 66, 70, 67, 49, 67, 74, 63, 59, 58, 39, 63, 64, 58, 62, 48, 64, 62, 69, 60, 69, 43, 75, 56, 93, 79, 64, 58, 66, 51, 54, 66, 61, 69, 55, 50, 78, 57, 61, 105, 59, 66, 71, 68, 46, 102, 69, 56, 70, 58, 89, 50, 77, 76, 92, 65, 60, 59, 73, 61, 44, 115, 126, 59, 64, 50, 59, 71, 58, 66, 87, 61, 60, 48, 85, 42, 75, 92, 68, 63, 58, 55, 53, 70, 44, 73, 48, 68, 74, 42, 63, 61, 59, 62, 79, 55, 69, 66, 62, 68, 72, 75, 73, 65, 93, 96, 61, 66, 75, 52, 81, 62, 78, 68, 51, 68, 59, 81, 81, 95, 77, 53, 62, 47, 69, 100, 71, 70, 86, 70, 58, 51, 60, 60, 81, 48, 56, 67, 61, 66, 71, 61, 74, 51, 75, 65, 76, 71, 65, 68, 68, 71, 57, 90, 58, 61, 68, 57, 54, 77, 69, 62, 49, 67, 62, 64, 60, 72, 59, 68, 83, 58, 59, 95, 50, 62, 71, 91, 61, 72, 90, 57, 47, 55, 66, 65, 66, 64, 72, 61, 102, 59, 58, 55, 56, 74, 64, 67, 63, 62, 69, 58, 78, 72, 67, 55, 87, 47, 74, 56, 45, 65, 84, 56, 58, 72, 115, 43, 67, 56, 54, 56, 65, 60, 69, 64, 62, 63, 46, 63, 56, 38, 97, 64, 43, 66, 67, 65, 60, 75, 72, 59, 74, 55, 59, 67, 57, 51, 60, 91, 61, 77, 64, 64, 67, 50, 71, 57, 55, 66, 78, 69, 59, 66, 97, 66, 71, 71, 80, 71, 53, 93, 46, 84, 72, 87, 105, 59, 70, 57, 79, 53, 79, 78, 64, 56, 77, 60, 100, 73, 63, 53, 65, 65, 69, 66, 47, 70, 106, 76, 70, 75, 56, 76, 68, 59, 66, 59, 69, 84, 67, 47, 52, 56, 57, 86, 71, 57, 68, 66, 75, 51, 66, 52, 72, 61, 82, 76, 53, 84, 54, 63, 52, 64, 84, 85, 48, 80, 60, 51, 68, 62, 74, 69, 68, 54, 62, 71, 59, 67, 67, 54, 46, 72, 60, 73, 62, 56, 78, 67, 71, 72, 66, 105, 54, 105, 51, 62, 74, 79, 60, 54, 53, 63, 43, 74, 62, 63, 56, 69, 66, 75, 73, 62, 92, 64, 66, 52, 81, 75, 72, 66, 57, 62, 91, 52, 58, 58, 57, 52, 52, 53, 57, 63, 63, 55, 67, 53, 59, 70, 70, 62, 77, 98, 91, 59, 56, 48, 85, 83, 90, 59, 66, 49, 68, 71, 41, 68, 60, 75, 48, 67, 78, 79, 65, 61, 63, 59, 51, 75, 85, 58, 66, 54, 74, 54, 64, 73, 79, 63, 58, 62, 62, 79, 55, 63, 82, 63, 69, 63, 55, 65, 70, 54, 36, 62, 64, 56, 49, 69, 52, 71, 64, 57, 52, 67, 47, 61, 68, 63, 41, 53, 93, 47, 52, 61, 87, 73, 62, 73, 64, 72, 86, 72, 54, 74, 104, 53, 54, 58, 58, 63, 57, 99, 72, 66, 60, 51, 70, 61, 76, 77, 52, 64, 40, 82, 47, 73, 59, 79, 46, 119, 56, 76, 50, 79, 86, 58, 95, 50, 63, 72, 62, 90, 59, 102, 42, 66, 66, 57, 81, 53, 61, 49, 57, 61, 68, 69, 69, 75, 53, 61, 47, 62, 100, 61, 49, 69, 73, 66, 63, 71, 39, 71, 78, 55, 64, 65, 50, 56, 89, 52, 82, 58, 61, 59, 62, 65, 51, 59, 76, 60, 60, 61, 52, 59, 75, 71, 65, 75, 65, 74, 59, 75, 55, 56, 71, 56, 51, 44, 51, 67, 52, 66, 76, 57, 72, 53, 68, 111, 57, 96, 62, 66, 73, 68, 51, 75, 52, 65, 76, 74, 55, 65, 70, 89, 64, 66, 79, 64, 65, 93, 62, 43, 68, 61, 82, 57, 66, 74, 51, 71, 67, 65, 72, 54, 68, 62, 65, 56, 57, 60, 66, 61, 75, 47, 59, 56, 60, 61, 95, 52, 54, 43, 74, 89, 62, 59, 59, 44, 56, 91, 85, 48, 71, 54, 53, 70, 83, 64, 82, 59, 75, 76, 57, 70, 64, 50, 76, 63, 57, 84, 71, 86, 53, 56, 72, 62, 63, 60, 61, 51, 85, 66, 59, 60, 68, 56, 63, 58, 85, 59, 64, 72, 59, 76, 64, 63, 65, 65, 70, 74, 62, 59, 60, 67, 68, 57, 61, 59, 53, 73, 53, 61, 56, 107, 61, 83, 75, 84, 37, 74, 64, 68, 65, 66, 50, 73, 56, 59, 61, 69, 83, 67, 61, 90, 56, 58, 70, 84, 65, 58, 59, 63, 51, 65, 56, 74, 58, 71, 63, 97, 61, 68, 62, 50, 75, 61, 69, 42, 77, 75, 68, 58, 72, 73, 71, 60, 50, 59, 66, 58, 69, 62, 56, 53, 87, 65, 83, 75, 61, 46, 77, 59, 62, 49, 65, 64, 45, 57, 61, 76, 52, 63, 59, 62, 64, 55, 61, 64, 76, 74, 62, 72, 94, 47, 59, 68, 65, 58, 58, 68, 90, 67, 60, 59, 75, 70, 48, 58, 66, 64, 53, 73, 96, 59, 70, 82, 55, 64, 96, 65, 64, 52, 70, 76, 66, 52, 58, 74, 48, 63, 53, 66, 61, 52, 66, 72, 60, 59, 73, 80, 64, 92, 54, 46, 78, 54, 85, 65, 65, 51, 70, 71, 92, 77, 61, 80, 62, 40, 77, 48, 69, 56, 73, 76, 62, 82, 92, 54, 72, 60, 41, 62, 44, 95, 66, 65, 51, 65, 59, 68, 88, 49, 76, 49, 59, 72, 57, 66, 63, 83, 57, 68, 69, 78, 63, 59, 97, 60, 67, 71, 62, 63, 60, 57, 54, 52, 62, 67, 51, 60, 60, 73, 97, 70, 65, 92, 87, 68, 137, 64, 64, 56, 128, 58, 66, 82, 83, 52, 58, 80, 47, 83, 64, 48, 67, 57, 74, 55, 87, 78, 53, 62, 61, 68, 68, 81, 59, 62, 79, 64, 86, 70, 74, 86, 64, 61, 69, 69, 70, 66, 48, 69, 85, 63, 80, 61, 71, 84, 76, 60, 60, 56, 54, 59, 58, 49, 89, 74, 55, 72, 70, 73, 50, 44, 70, 79, 75, 67, 64, 71, 74, 69, 64, 88, 54, 64, 66, 56, 59, 61, 58, 60, 93, 70, 71, 58, 61, 80, 60, 59, 64, 53, 85, 59, 71, 51, 85, 93, 57, 69, 60, 70, 67, 71, 59, 70, 51, 64, 49, 57, 65, 74, 60, 82, 77, 58, 71, 91, 58, 61, 65, 57, 86, 51, 54, 78, 86, 56, 81, 55, 74, 69, 67, 111, 126, 40, 70, 66, 65, 72, 70, 66, 92, 48, 56, 63, 73, 67, 50, 103, 50, 62, 59, 76, 53, 65, 79, 67, 70, 62, 69, 65, 54, 54, 49, 46, 49, 63, 57, 66, 53, 62, 65, 52, 70, 51, 102, 101, 66, 56, 60, 59, 56, 76, 86, 56, 52, 69, 70, 71, 54, 96, 74, 70, 70, 54, 64, 99, 53, 59, 59, 83, 55, 73, 65, 62, 60, 48, 65, 47, 55, 68, 72, 67, 79, 71, 79, 68, 69, 53, 69, 84, 61, 77, 54, 79, 57, 48, 63, 67, 59, 67, 62, 66, 66, 58, 67, 57, 56, 60, 62, 66, 68, 54, 75, 94, 53, 70, 64, 76, 70, 62, 66, 46, 72, 55, 60, 55, 56, 65, 95, 61, 129, 65, 70, 54, 55, 65, 76, 82, 64, 47, 64, 60, 61, 48, 59, 66, 42, 69, 55, 81, 77, 53, 83, 58, 63, 55, 124, 49, 59, 56, 65, 59, 48, 66, 52, 73, 63, 59, 59, 71, 69, 73, 96, 77, 64, 52, 49, 74, 46, 50, 81, 65, 55, 55, 58, 72, 48, 71, 69, 80, 41, 64, 61, 66, 52, 65, 47, 63, 60, 65, 73, 51, 87, 75, 85, 64, 60, 69, 60, 69, 67, 87, 63, 79, 80, 81, 94, 68, 61, 71, 64, 52, 79, 50, 69, 62, 47, 71, 92, 55, 66, 63, 72, 68, 49, 69, 59, 84, 114, 51, 74, 48, 52, 54, 63, 71, 62, 52, 64, 67, 69, 62, 59, 65, 57, 79, 78, 53, 80, 46, 63, 57, 57, 73, 90, 54, 73, 57, 66, 66, 71, 71, 75, 59, 72, 66, 60, 69, 66, 62, 50, 60, 71, 55, 61, 67, 67, 59, 64, 75, 58, 62, 53, 62, 52, 72, 61, 56, 84, 59, 55, 83, 75, 91, 53, 86, 54, 56, 69, 78, 74, 58, 49, 56, 50, 65, 55, 82, 65, 70, 62, 60, 69, 60, 62, 52, 63, 59, 109, 66, 52, 52, 58, 61, 66, 70, 63, 57, 61, 59, 67, 80, 64, 67, 54, 55, 62, 65, 49, 61, 55, 68, 85, 94, 102, 57, 48, 48, 81, 84, 101, 69, 51, 58, 79, 66, 55, 62, 77, 70, 65, 58, 65, 85, 58, 68, 67, 85, 57, 44, 73, 69, 56, 62, 73, 57, 64, 88, 55, 52, 71, 63, 52, 84, 68, 64, 61, 61, 87, 60, 60, 63, 64, 67, 71, 73, 66, 49, 64, 51, 57, 65, 67, 75, 69, 79, 73, 66, 52, 54, 56, 59, 100, 75, 52, 56, 81, 63, 76, 63, 87, 65, 83, 84, 59, 75, 81, 56, 65, 63, 86, 49, 77, 73, 65, 54, 63, 62, 70, 66, 85, 75, 75, 70, 51, 63, 92, 75, 72, 76, 68, 113, 68, 75, 55, 74, 72, 58, 83, 44, 75, 61, 63, 64, 58, 97, 64, 53, 62, 53, 82, 40, 51, 73, 54, 64, 77, 60, 91, 68, 57, 60, 84, 99, 67, 62, 67, 69, 82, 51, 62, 62, 57, 48, 60, 62, 75, 71, 58, 63, 90, 56, 94, 94, 81, 49, 77, 47, 63, 97, 87, 46, 55, 72, 45, 61, 84, 77, 73, 66, 64, 61, 83, 64, 56, 119, 121, 69, 63, 57, 72, 64, 52, 48, 55, 79, 69, 56, 56, 78, 53, 53, 72, 59, 69, 49, 62, 74, 58, 41, 78, 85, 56, 80, 61, 63, 48, 99, 60, 56, 67, 68, 60, 57, 65, 79, 57, 75, 54, 63, 96, 58, 55, 78, 95, 77, 51, 48, 75, 57, 67, 75, 69, 87, 59, 62, 51, 70, 59, 62, 81, 69, 73, 57, 70, 67, 66, 62, 66, 66, 64, 62, 71, 65, 57, 50, 69, 58, 64, 55, 54, 123, 56, 45, 63, 68, 88, 63, 66, 51, 71, 96, 90, 57, 74, 45, 67, 63, 59, 53, 62, 56, 66, 70, 61, 53, 60, 70, 69, 76, 95, 56, 64, 62, 125, 90, 60, 54, 48, 62, 65, 99, 67, 55, 80, 43, 50, 67, 134, 52, 76, 101, 51, 54, 81, 109, 59, 54, 61, 90, 70, 91, 68, 59, 55, 77, 61, 66, 59, 49, 70, 64, 50, 65, 82, 51, 55, 60, 55, 59, 62, 63, 61, 31, 52, 63, 72, 72, 76, 77, 46, 61, 57, 62, 64, 85, 82, 49, 71, 54, 59, 66, 74, 58, 51, 53, 70, 54, 52, 53, 45, 61, 58, 64, 64, 77, 71, 57, 37, 73, 64, 75, 86, 73, 58, 63, 59, 73, 70, 55, 67, 68, 54, 95, 57, 53, 61, 60, 52, 64, 101, 51, 58, 96, 52, 52, 58, 76, 73, 54, 47, 48, 63, 75, 66, 123, 72, 71, 57, 52, 86, 57, 45, 57, 79, 54, 63, 56, 70, 67, 90, 57, 52, 58, 61, 75, 54, 53, 87, 54, 59, 55, 66, 67, 80, 54, 64, 60, 58, 71, 75, 58, 74, 43, 59, 81, 87, 49, 77, 66, 99, 55, 66, 70, 105, 60, 64, 48, 84, 73, 55, 71, 64, 58, 42, 105, 78, 120, 99, 67, 67, 84, 50, 85, 52, 52, 52, 48, 65, 63, 48, 65, 93, 68, 88, 66, 56, 60, 48, 110, 85, 93, 54, 86, 57, 63, 70, 58, 45, 53, 66, 65, 72, 66, 56, 69, 55, 60, 70, 66, 69, 86, 65, 52, 60, 73, 67, 53, 51, 86, 168, 81, 104, 65, 90, 72, 62, 54, 83, 78, 48, 50, 55, 71, 76, 47, 58, 66, 57, 85, 105, 64, 62, 46, 71, 56, 64, 90, 62, 69, 75, 56, 53, 53, 72, 71, 47, 67, 71, 56, 63, 67, 65, 70, 54, 60, 45, 63, 49, 80, 66, 66, 38, 74, 43, 56, 64, 61, 59, 35, 71, 75, 67, 75, 74, 62, 83, 49, 49, 54, 59, 64, 62, 58, 85, 86, 95, 72, 69, 43, 68, 70, 69, 56, 47, 69, 85, 77, 55, 60, 62, 79, 75, 61, 72, 45, 82, 60, 79, 57, 69, 54, 65, 57, 68, 101, 53, 81, 65, 73, 85, 67, 47, 53, 86, 66, 39, 51, 80, 57, 55, 58, 63, 51, 57, 95, 85, 60, 61, 62, 86, 68, 75, 75, 53, 66, 83, 46, 55, 85, 64, 79, 64, 119, 58, 55, 57, 67, 66, 103, 67, 59, 68, 75, 81, 63, 42, 63, 80, 86, 71, 58, 48, 96, 67, 104, 62, 77, 50, 86, 66, 67, 48, 56, 75, 68, 74, 57, 68, 59, 60, 80, 46, 75, 57, 66, 83, 71, 74, 82, 65, 71, 37, 78, 76, 53, 69, 86, 70, 59, 70, 63, 67, 64, 89, 52, 89, 51, 78, 64, 57, 52, 52, 67, 61, 61, 81, 61, 77, 51, 50, 75, 62, 94, 47, 100, 68, 76, 76, 45, 65, 61, 75, 83, 65, 57, 68, 54, 79, 50, 55, 71, 68, 75, 74, 62, 61, 100, 66, 59, 36, 95, 67, 75, 75, 48, 64, 67, 61, 52, 87, 124, 46, 48, 71, 74, 67, 66, 55, 56, 57, 78, 62, 53, 53, 62, 46, 55, 61, 79, 60, 82, 76, 39, 58, 63, 71, 55, 47, 53, 50, 60, 65, 69, 82, 69, 47, 57, 74, 64, 70, 54, 52, 48, 61, 56, 83, 60, 62, 74, 50, 62, 52, 57, 58, 68, 47, 65, 51, 78, 72, 57, 66, 74, 48, 46, 84, 75, 54, 59, 60, 42, 72, 55, 62, 63, 58, 74, 115, 61, 81, 72, 104, 66, 94, 63, 52, 57, 74, 64, 63, 66, 65, 71, 57, 52, 62, 54, 56, 57, 48, 58, 133, 45, 65, 58, 66, 60, 51, 51, 71, 75, 92, 61, 64, 65, 53, 57, 58, 64, 88, 65, 67, 65, 52, 74, 53, 78, 65, 86, 64, 60, 87, 43, 55, 73, 69, 33, 57, 52, 71, 64, 87, 94, 65, 63, 70, 76, 40, 75, 56, 61, 68, 84, 71, 63, 56, 62, 63, 57, 59, 91, 54, 57, 53, 59, 63, 76, 68, 91, 67, 47, 94, 69, 59, 63, 76, 77, 59, 55, 54, 68, 56, 53, 86, 76, 42, 41, 71, 64, 66, 61, 69, 51, 76, 64, 81, 58, 58, 45, 84, 60, 65, 45, 67, 61, 53, 58, 52, 42, 51, 61, 41, 82, 60, 58, 67, 60, 54, 73, 54, 67, 66, 63, 90, 77, 96, 63, 59, 68, 58, 66, 61, 68, 56, 56, 47, 38, 86, 58, 75, 105, 49, 37, 41, 58, 59, 40, 64, 53, 97, 74, 60, 65, 77, 53, 50, 66, 73, 56, 56, 44, 70, 56, 51, 42, 83, 67, 57, 64, 70, 62, 59, 70, 70, 44, 108, 60, 48, 65, 49, 67, 85, 65, 61, 61, 52, 61, 52, 70, 70, 58, 65, 51, 75, 60, 95, 62, 60, 53, 58, 65, 81, 57, 59, 55, 60, 78, 74, 58, 61, 76, 56, 73, 91, 87, 60, 72, 55, 62, 61, 66, 41, 71, 78, 52, 60, 66, 62, 50, 51, 61, 54, 43, 77, 77, 60, 64, 58, 68, 90, 67, 95, 110, 47, 66, 75, 118, 68, 62, 78, 54, 54, 55, 90, 55, 88, 57, 46, 55, 63, 81, 76, 57, 69, 57, 94, 85, 54, 74, 50, 59, 66, 60, 68, 46, 65, 58, 57, 59, 77, 71, 62, 57, 97, 71, 78, 104, 68, 75, 43, 62, 59, 73, 80, 76, 56, 68, 57, 56, 55, 50, 76, 51, 56, 98, 68, 88, 58, 63, 115, 54, 55, 59, 63, 71, 67, 88, 78, 70, 57, 57, 53, 88, 76, 104, 117, 59, 62, 65, 55, 76, 60, 61, 63, 64, 53, 100, 89, 89, 77, 74, 62, 62, 48, 39, 78, 79, 74, 48, 63, 64, 59, 61, 93, 52, 59, 45, 69, 59, 56, 62, 57, 62, 73, 101, 73, 96, 72, 55, 55, 66, 84, 68, 55, 46, 46, 58, 76, 62, 57, 61, 64, 78, 73, 77, 68, 101, 63, 72, 76, 48, 85, 77, 77, 68, 74, 52, 66, 50, 122, 50, 53, 76, 69, 114, 73, 83, 72, 53, 50, 42, 60, 57, 72, 53, 59, 68, 74, 62, 87, 63, 94, 76, 48, 62, 74, 50, 99, 117, 61, 56, 53, 64, 68, 90, 64, 75, 62, 73, 74, 63, 58, 43, 54, 96, 56, 54, 98, 96, 69, 76, 43, 75, 69, 73, 87, 54, 64, 66, 75, 73, 57, 63, 92, 48, 48, 75, 69, 60, 65, 46, 86, 105, 65, 73, 52, 59, 66, 77, 53, 60, 75, 65, 49, 77, 75, 69, 61, 54, 90, 64, 73, 69, 80, 68, 54, 58, 67, 62, 73, 53, 55, 57, 58, 53, 51, 75, 56, 68, 96, 55, 63, 75, 71, 70, 60, 60, 68, 45, 63, 54, 55, 72, 69, 61, 58, 86, 58, 73, 57, 57, 46, 58, 63, 61, 74, 60, 66, 31, 67, 67, 79, 57, 49, 62, 56, 55, 66, 58, 69, 70, 101, 46, 79, 57, 61, 35, 59, 53, 58, 80, 46, 70, 95, 61, 63, 52, 62, 63, 71, 76, 82, 74, 46, 57, 67, 75, 62, 95, 67, 52, 76, 65, 82, 57, 58, 97, 47, 80, 82, 59, 70, 75, 60, 81, 72, 64, 91, 106, 52, 60, 66, 55, 60, 62, 57, 65, 58, 76, 94, 117, 101, 51, 64, 53, 54, 66, 67, 83, 52, 65, 67, 52, 59, 73, 70, 69, 92, 59, 52, 80, 55, 52, 76, 79, 56, 84, 50, 76, 72, 75, 52, 68, 66, 49, 98, 84, 60, 68, 49, 57, 71, 70, 48, 53, 48, 68, 93, 56, 71, 73, 66, 77, 158, 64, 106, 61, 94, 75, 62, 64, 93, 54, 64, 68, 78, 58, 67, 49, 55, 53, 55, 98, 84, 66, 50, 82, 53, 66, 63, 77, 61, 71, 67, 55, 75, 69, 55, 86, 44, 50, 56, 85, 72, 37, 75, 60, 63, 69, 73, 55, 87, 133, 73, 64, 63, 59, 54, 59, 59, 58, 53, 63, 68, 73, 80, 59, 42, 55, 67, 51, 52, 66, 69, 49, 67, 65, 63, 83, 83, 93, 85, 57, 101, 59, 54, 61, 64, 60, 52, 77, 62, 49, 50, 62, 63, 61, 51, 67, 50, 60, 63, 93, 64, 84, 71, 88, 66, 67, 48, 61, 54, 69, 57, 42, 75, 45, 82, 56, 42, 46, 75, 74, 48, 52, 49, 72, 62, 58, 69, 41, 53, 60, 81, 57, 68, 63, 49, 83, 95, 58, 69, 68, 108, 90, 60, 67, 73, 61, 75, 68, 82, 87, 75, 68, 63, 60, 83, 60, 67, 99, 48, 80, 52, 32, 66, 80, 62, 46, 52, 93, 64, 66, 54, 57, 57, 52, 54, 69, 59, 58, 80, 60, 67, 57, 88, 72, 45, 73, 76, 133, 58, 60, 64, 73, 74, 146, 48, 47, 69, 59, 54, 72, 58, 56, 76, 62, 67, 50, 63, 66, 85, 61, 65, 63, 62, 71, 38, 65, 100, 62, 71, 55, 75, 61, 66, 77, 82, 77, 60, 55, 107, 51, 70, 70, 84, 45, 58, 84, 64, 51, 100, 50, 63, 69, 85, 63, 61, 60, 63, 91, 77, 70, 54, 101, 55, 59, 63, 53, 68, 97, 79, 66, 58, 63, 68, 66, 50, 111, 70, 65, 57, 80, 49, 30, 66, 46, 67, 61, 80, 56, 68, 77, 73, 54, 66, 53, 64, 69, 76, 60, 61, 80, 73, 49, 67, 49, 99, 64, 68, 68, 76, 47, 77, 56, 41, 76, 55, 47, 69, 61, 56, 78, 73, 53, 66, 47, 94, 62, 67, 56, 70, 56, 53, 59, 59, 72, 54, 47, 62, 67, 47, 69, 74, 60, 78, 47, 93, 52, 76, 80, 122, 64, 89, 79, 52, 69, 88, 43, 36, 58, 71, 75, 65, 76, 70, 64, 59, 59, 72, 67, 62, 46, 45, 75, 84, 81, 77, 60, 73, 68, 74, 78, 48, 67, 87, 68, 63, 60, 75, 54, 86, 72, 80, 58, 79, 70, 84, 64, 57, 64, 53, 88, 70, 68, 63, 68, 71, 61, 57, 51, 68, 47, 80, 60, 59, 61, 74, 74, 55, 57, 55, 49, 93, 50, 60, 70, 36, 73, 55, 68, 60, 57, 97, 67, 66, 58, 61, 78, 68, 71, 120, 65, 84, 53, 59, 56, 86, 53, 79, 72, 69, 49, 53, 71, 73, 65, 96, 76, 59, 61, 83, 79, 64, 65, 55, 80, 93, 53, 78, 50, 70, 55, 63, 57, 57, 64, 56, 53, 66, 59, 66, 65, 57, 58, 60, 81, 82, 73, 56, 48, 48, 96, 79, 77, 55, 50, 52, 56, 96, 37, 81, 84, 52, 52, 99, 59, 47, 62, 44, 73, 70, 92, 59, 87, 61, 76, 52, 52, 72, 73, 76, 61, 67, 68, 67, 59, 66, 59, 65, 64, 100, 55, 67, 71, 59, 55, 57, 41, 76, 79, 52, 56, 50, 67, 83, 51, 57, 57, 73, 55, 72, 62, 44, 74, 72, 55, 59, 62, 73, 60, 61, 49, 54, 59, 44, 43, 48, 81, 80, 94, 55, 49, 69, 59, 69, 57, 56, 84, 60, 75, 71, 50, 76, 55, 42, 55, 67, 75, 71, 95, 51, 62, 64, 64, 61, 88, 56, 74, 73, 66, 70, 60, 48, 55, 57, 73, 60, 53, 67, 67, 70, 56, 60, 70, 107, 69, 59, 73, 77, 110, 66, 71, 66, 70, 67, 50, 76, 75, 90, 69, 52, 69, 56, 91, 70, 85, 63, 63, 85, 49, 76, 73, 55, 74, 80, 87, 64, 58, 51, 64, 69, 68, 71, 52, 66, 84, 65, 71, 61, 52, 105, 63, 66, 78, 79, 85, 56, 66, 43, 62, 71, 78, 62, 63, 71, 63, 55, 68, 70, 70, 68, 63, 73, 69, 88, 65, 81, 52, 90, 56, 75, 51, 69, 83, 64, 71, 69, 49, 47, 61, 81, 82, 50, 79, 69, 57, 56, 73, 68, 70, 82, 77, 77, 41, 112, 66, 53, 94, 107, 50, 106, 67, 61, 59, 87, 56, 71, 75, 68, 56, 59, 69, 68, 72, 61, 60, 67, 69, 59, 63, 67, 77, 77, 59, 74, 50, 54, 64, 63, 66, 52, 56, 65, 78, 65, 57, 85, 67, 45, 84, 75, 83, 90, 43, 68, 73, 60, 90, 58, 71, 68, 62, 42, 56, 80, 63, 56, 58, 61, 70, 71, 72, 78, 67, 60, 53, 48, 69, 84, 57, 68, 55, 63, 53, 84, 56, 62, 73, 57, 75, 105, 75, 69, 75, 72, 87, 57, 62, 78, 59, 61, 70, 47, 64, 86, 63, 46, 56, 68, 78, 43, 64, 107, 92, 66, 55, 54, 59, 107, 57, 45, 80, 62, 64, 75, 41, 56, 80, 67, 81, 69, 62, 74, 74, 68, 78, 92, 74, 98, 70, 55, 71, 76, 67, 56, 61, 46, 71, 45, 91, 43, 44, 77, 64, 116, 57, 51, 64, 67, 61, 56, 62, 75, 60, 55, 45, 57, 81, 50, 62, 71, 42, 89, 58, 55, 63, 65, 55, 73, 93, 55, 42, 53, 60, 57, 75, 72, 50, 47, 66, 60, 69, 49, 77, 65, 73, 63, 63, 55, 55, 44, 85, 57, 66, 63, 75, 55, 87, 74, 82, 66, 57, 60, 65, 67, 83, 55, 43, 37, 39, 65, 63, 43, 69, 38, 79, 50, 59, 59, 71, 68, 73, 62, 68, 71, 66, 53, 54, 85, 54, 74, 66, 76, 61, 81, 72, 55, 71, 57, 81, 55, 75, 94, 58, 49, 63, 62, 59, 97, 65, 59, 88, 68, 63, 64, 63, 39, 64, 81, 72, 72, 87, 63, 58, 51, 53, 66, 67, 85, 38, 58, 63, 61, 59, 61, 56, 98, 66, 77, 71, 66, 65, 87, 66, 84, 55, 63, 82, 57, 65, 52, 63, 53, 87, 101, 52, 63, 46, 58, 56, 57, 84, 86, 69, 74, 65, 91, 69, 62, 63, 66, 61, 71, 56, 62, 67, 56, 82, 72, 99, 86, 57, 60, 64, 49, 60, 74, 65, 59, 71, 115, 73, 86, 42, 60, 62, 55, 49, 69, 53, 63, 52, 56, 66, 56, 46, 65, 65, 65, 83, 45, 55, 61, 61, 62, 59, 57, 53, 65, 62, 95, 53, 83, 50, 41, 93, 58, 61, 63, 70, 52, 54, 59, 98, 91, 117, 65, 88, 84, 64, 44, 72, 62, 90, 97, 86, 57, 53, 77, 78, 108, 63, 71, 88, 53, 50, 67, 67, 66, 74, 96, 63, 75, 111, 64, 63, 61, 83, 85, 71, 81, 56, 61, 71, 63, 62, 91, 67, 62, 70, 66, 67, 74, 64, 56, 72, 67, 58, 65, 52, 69, 74, 68, 64, 76, 51, 61, 86, 57, 63, 85, 78, 70, 50, 50, 60, 38, 59, 69, 48, 43, 76, 61, 80, 62, 66, 74, 59, 90, 47, 68, 72, 78, 101, 50, 53, 54, 72, 77, 60, 59, 77, 64, 113, 68, 76, 92, 63, 66, 51, 59, 59, 75, 59, 60, 60, 78, 67, 56, 59, 80, 76, 89, 64, 70, 71, 73, 48, 62, 60, 84, 61, 62, 62, 69, 61, 68, 34, 52, 58, 57, 55, 51, 56, 77, 67, 65, 48, 53, 69, 91, 71, 69, 83, 61, 74, 64, 77, 57, 64, 63, 81, 66, 59, 61, 37, 73, 95, 121, 101, 66, 71, 42, 67, 48, 90, 65, 87, 52, 93, 65, 88, 87, 56, 37, 53, 76, 61, 78, 68, 71, 68, 58, 97, 83, 60, 59, 67, 65, 46, 58, 71, 64, 67, 64, 75, 58, 57, 64, 79, 60, 50, 61, 74, 76, 71, 46, 72, 58, 68, 71, 78, 45, 52, 67, 49, 84, 78, 76, 57, 95, 48, 60, 90, 46, 50, 73, 69, 63, 74, 57, 60, 67, 100, 64, 86, 80, 83, 87, 59, 65, 62, 75, 60, 59, 68, 73, 53, 63, 65, 53, 50, 55, 67, 60, 61, 62, 49, 71, 48, 71, 77, 74, 58, 134, 52, 70, 64, 53, 68, 49, 68, 60, 59, 59, 72, 73, 51, 57, 77, 63, 67, 55, 75, 63, 65, 71, 67, 85, 68, 80, 66, 75, 88, 66, 46, 46, 59, 52, 77, 75, 87, 67, 40, 72, 67, 72, 59, 58, 54, 64, 57, 78, 85, 63, 75, 50, 79, 90, 54, 64, 82, 55, 54, 75, 53, 69, 64, 67, 64, 49, 78, 60, 41, 46, 81, 61, 77, 67, 42, 81, 64, 62, 72, 61, 60, 91, 57, 75, 64, 60, 62, 67, 49, 59, 75, 65, 98, 73, 56, 66, 76, 59, 48, 64, 58, 68, 60, 70, 50, 69, 67, 54, 64, 53, 96, 69, 71, 61, 62, 83, 55, 81, 45, 45, 53, 59, 71, 61, 80, 65, 65, 61, 73, 73, 60, 78, 64, 53, 63, 70, 58, 64, 60, 56, 58, 101, 60, 63, 71, 58, 47, 53, 67, 59, 41, 83, 57, 68, 62, 76, 62, 91, 80, 65, 77, 55, 62, 88, 76, 89, 51, 63, 68, 65, 70, 110, 100, 59, 58, 50, 45, 49, 75, 53, 65, 65, 55, 58, 55, 63, 67, 62, 45, 57, 52, 53, 55, 69, 69, 78, 38, 59, 66, 77, 66, 45, 62, 49, 76, 45, 62, 55, 62, 62, 72, 71, 38, 66, 80, 51, 73, 65, 52, 59, 60, 65, 82, 85, 61, 48, 65, 80, 80, 65, 67, 64, 64, 102, 60, 65, 65, 91, 79, 95, 69, 64, 59, 82, 73, 60, 51, 63, 72, 62, 46, 60, 59, 59, 63, 63, 51, 86, 60, 82, 104, 45, 62, 66, 52, 88, 68, 73, 70, 103, 98, 46, 64, 47, 75, 53, 89, 73, 66, 56, 52, 59, 48, 66, 44, 71, 88, 108, 50, 82, 89, 41, 51, 75, 67, 63, 66, 69, 60, 66, 56, 71, 48, 69, 58, 74, 67, 65, 53, 60, 61, 60, 48, 67, 101, 64, 54, 64, 66, 77, 66, 70, 92, 65, 40, 71, 56, 80, 61, 68, 59, 46, 70, 51, 74, 65, 75, 53, 57, 66, 73, 65, 89, 106, 88, 66, 61, 59, 83, 50, 78, 58, 79, 80, 63, 87, 66, 55, 68, 62, 91, 76, 62, 47, 73, 83, 62, 54, 60, 55, 90, 66, 72, 91, 68, 59, 55, 114, 67, 81, 57, 61, 80, 62, 60, 64, 55, 64, 68, 55, 82, 85, 73, 51, 70, 70, 93, 68, 67, 84, 57, 67, 61, 90, 50, 64, 57, 57, 69, 57, 82, 51, 62, 59, 67, 69, 50, 85, 74, 81, 91, 85, 58, 60, 54, 99, 92, 90, 93, 64, 69, 71, 56, 52, 76, 59, 49, 57, 53, 55, 64, 48, 35, 71, 52, 60, 52, 82, 59, 69, 55, 56, 80, 68, 98, 56, 76, 60, 80, 105, 74, 48, 64, 87, 55, 66, 57, 73, 65, 59, 63, 59, 71, 91, 64, 64, 72, 79, 65, 61, 74, 103, 92, 75, 65, 56, 60, 67, 72, 53, 73, 48, 81, 75, 66, 68, 76, 51, 79, 69, 67, 61, 67, 67, 59, 82, 63, 86, 78, 60, 46, 66, 59, 59, 61, 59, 72, 71, 70, 61, 65, 68, 67, 119, 57, 58, 74, 78, 54, 40, 79, 74, 61, 58, 66, 80, 71, 64, 62, 61, 64, 73, 43, 78, 70, 48, 45, 106, 106, 64, 46, 77, 56, 62, 72, 50, 69, 59, 91, 50, 54, 66, 53, 51, 72, 80, 82, 69, 63, 44, 71, 58, 60, 85, 84, 69, 52, 71, 48, 66, 48, 56, 69, 53, 102, 58, 58, 58, 53, 46, 77, 50, 62, 61, 56, 66, 53, 59, 65, 53, 58, 60, 69, 47, 58, 49, 62, 42, 75, 66, 66, 44, 85, 68, 72, 41, 74, 56, 74, 64, 78, 69, 53, 62, 57, 55, 44, 76, 73, 74, 111, 69, 74, 51, 78, 65, 64, 62, 66, 74, 83, 50, 61, 63, 74, 57, 68, 68, 65, 69, 76, 76, 44, 56, 53, 85, 84, 62, 58, 70, 67, 83, 69, 58, 65, 55, 68, 57, 76, 61, 54, 48, 97, 58, 59, 76, 56, 53, 99, 44, 62, 100, 69, 83, 73, 60, 62, 66, 53, 53, 61, 71, 56, 73, 70, 50, 66, 61, 86, 57, 64, 67, 58, 61, 60, 80, 58, 44, 74, 102, 73, 74, 62, 62, 81, 44, 42, 70, 52, 75, 46, 69, 81, 71, 70, 74, 53, 47, 72, 55, 68, 39, 62, 71, 63, 76, 73, 64, 63, 88, 60, 65, 44, 60, 67, 76, 50, 61, 70, 60, 53, 47, 102, 96, 113, 72, 66, 51, 57, 55, 64, 75, 71, 101, 60, 56, 77, 62, 58, 74, 56, 90, 82, 81, 52, 56, 58, 66, 64, 86, 58, 79, 94, 59, 63, 49, 73, 61, 66, 66, 69, 52, 55, 72, 56, 59, 70, 91, 58, 70, 78, 75, 60, 47, 95, 78, 57, 53, 61, 66, 43, 71, 72, 44, 53, 86, 72, 44, 81, 52, 75, 44, 66, 66, 91, 85, 57, 43, 51, 52, 72, 42, 68, 60, 61, 82, 65, 71, 57, 62, 80, 83, 52, 78, 68, 61, 62, 63, 76, 69, 52, 50, 101, 92, 82, 59, 62, 70, 54, 59, 63, 77, 88, 67, 63, 68, 51, 62, 59, 76, 57, 54, 57, 93, 78, 70, 54, 70, 59, 71, 68, 63, 75, 54, 78, 83, 66, 59, 69, 72, 68, 74, 54, 101, 67, 45, 68, 66, 44, 61, 48, 67, 89, 53, 82, 78, 45, 54, 59, 53, 61, 67, 69, 59, 70, 54, 55, 123, 76, 37, 78, 57, 64, 61, 87, 51, 56, 43, 72, 51, 96, 69, 53, 56, 76, 69, 89, 61, 69, 63, 63, 66, 82, 86, 67, 79, 96, 55, 56, 61, 73, 63, 61, 80, 75, 56, 62, 66, 56, 60, 57, 49, 78, 74, 63, 74, 69, 61, 51, 90, 81, 50, 64, 73, 49, 77, 72, 92, 58, 47, 54, 59, 53, 29, 49, 64, 49, 74, 71, 70, 66, 64, 108, 82, 46, 81, 106, 76, 61, 80, 55, 78, 63, 71, 54, 61, 66, 67, 76, 52, 79, 63, 51, 57, 60, 73, 53, 53, 65, 44, 65, 64, 50, 75, 84, 64, 50, 80, 59, 46, 100, 88, 67, 54, 83, 46, 64, 58, 51, 61, 46, 57, 74, 77, 65, 46, 70, 67, 45, 51, 78, 78, 71, 49, 83, 61, 49, 64, 68, 90, 82, 59, 74, 58, 59, 69, 49, 72, 48, 51, 74, 52, 91, 79, 60, 70, 68, 104, 48, 59, 73, 59, 65, 53, 72, 52, 58, 68, 70, 58, 66, 62, 51, 56, 62, 59, 81, 72, 43, 73, 81, 90, 70, 56, 49, 72, 65, 68, 60, 51, 48, 64, 69, 65, 71, 66, 114, 69, 52, 67, 67, 61, 51, 54, 60, 70, 58, 63, 51, 60, 69, 56, 60, 55, 89, 61, 53, 78, 55, 73, 34, 57, 55, 54, 53, 59, 61, 59, 75, 56, 59, 64, 74, 60, 46, 72, 65, 55, 53, 47, 58, 61, 73, 60, 63, 90, 50, 68, 71, 51, 81, 68, 54, 54, 54, 81, 66, 76, 52, 36, 79, 94, 70, 54, 108, 65, 74, 72, 52, 69, 68, 55, 70, 59, 62, 111, 86, 55, 46, 57, 51, 67, 68, 63, 63, 44, 56, 60, 78, 64, 58, 82, 58, 78, 53, 65, 50, 62, 68, 77, 45, 66, 54, 89, 63, 77, 68, 53, 69, 56, 46, 62, 77, 56, 61, 75, 76, 52, 72, 67, 67, 62, 53, 50, 74, 93, 75, 90, 72, 57, 64, 76, 62, 64, 55, 47, 67, 118, 65, 85, 69, 75, 67, 79, 42, 75, 50, 52, 89, 57, 63, 62, 48, 62, 63, 50, 59, 68, 61, 84, 58, 76, 64, 61, 70, 59, 56, 48, 69, 62, 71, 64, 53, 73, 69, 55, 73, 59, 83, 55, 68, 41, 84, 44, 77, 47, 60, 74, 77, 79, 72, 74, 58, 61, 94, 41, 67, 60, 51, 56, 58, 73, 59, 59, 47, 61, 56, 53, 89, 58, 69, 63, 60, 62, 44, 70, 63, 65, 42, 68, 60, 76, 60, 97, 56, 68, 70, 53, 68, 72, 63, 77, 86, 68, 72, 65, 53, 72, 75, 75, 58, 87, 58, 58, 60, 53, 59, 77, 82, 96, 60, 71, 66, 70, 84, 73, 80, 84, 83, 68, 49, 61, 86, 60, 67, 65, 57, 59, 77, 51, 53, 76, 74, 56, 101, 48, 47, 98, 75, 44, 75, 116, 74, 92, 61, 64, 69, 64, 68, 55, 54, 81, 57, 46, 57, 82, 55, 58, 46, 83, 100, 51, 66, 60, 54, 73, 55, 68, 62, 62, 78, 72, 46, 46, 59, 59, 70, 64, 61, 64, 55, 58, 55, 65, 64, 50, 71, 56, 66, 60, 93, 58, 70, 71, 88, 66, 90, 73, 46, 67, 77, 76, 84, 54, 66, 73, 62, 61, 62, 60, 43, 83, 47, 57, 69, 73, 68, 71, 53, 70, 86, 44, 68, 104, 74, 61, 67, 73, 81, 69, 44, 65, 74, 63, 79, 57, 83, 98, 61, 61, 60, 59, 57, 69, 76, 74, 106, 57, 89, 67, 68, 110, 62, 78, 76, 56, 67, 74, 63, 65, 76, 62, 51, 61, 53, 87, 93, 75, 43, 91, 53, 51, 60, 72, 53, 71, 64, 55, 46, 53, 65, 36, 71, 66, 56, 75, 63, 97, 49, 55, 54, 64, 60, 53, 72, 74, 72, 54, 67, 70, 83, 75, 55, 64, 69, 60, 44, 54, 53, 62, 45, 62, 99, 71, 54, 68, 57, 58, 66, 55, 71, 63, 82, 68, 63, 50, 67, 62, 84, 63, 69, 67, 55, 50, 63, 51, 56, 72, 91, 59, 66, 64, 87, 84, 92, 49, 54, 94, 55, 62, 72, 57, 67, 64, 59, 80, 61, 56, 72, 55, 63, 67, 88, 54, 64, 63, 76, 69, 56, 48, 42, 78, 83, 68, 59, 53, 57, 71, 38, 58, 80, 64, 61, 54, 101, 61, 44, 69, 74, 58, 63, 84, 51, 69, 106, 69, 55, 52, 50, 57, 72, 82, 93, 67, 80, 64, 59, 64, 57, 69, 52, 74, 49, 46, 73, 61, 45, 51, 59, 86, 60, 66, 81, 68, 68, 54, 56, 58, 67, 67, 68, 60, 63, 77, 77, 80, 49, 49, 74, 69, 57, 51, 70, 69, 88, 86, 65, 58, 72, 70, 72, 53, 54, 54, 53, 61, 66, 78, 65, 49, 76, 64, 108, 84, 69, 58, 78, 44, 61, 74, 65, 73, 58, 117, 65, 68, 71, 57, 71, 60, 65, 65, 62, 65, 55, 74, 71, 46, 79, 93, 70, 52, 62, 57, 73, 52, 74, 59, 65, 67, 57, 81, 64, 56, 66, 69, 60, 45, 74, 61, 88, 73, 91, 66, 57, 68, 96, 81, 104, 56, 63, 75, 82, 58, 57, 64, 64, 76, 67, 56, 81, 82, 75, 109, 60, 71, 59, 74, 49, 48, 65, 78, 64, 80, 73, 75, 94, 62, 66, 79, 69, 49, 53, 53, 65, 88, 55, 60, 45, 67, 63, 57, 87, 68, 81, 68, 64, 62, 75, 63, 62, 61, 62, 51, 84, 54, 81, 64, 64, 44, 52, 59, 80, 70, 59, 52, 44, 61, 75, 51, 64, 58, 61, 52, 56, 88, 60, 78, 56, 74, 58, 58, 60, 75, 88, 67, 78, 63, 61, 63, 69, 69, 58, 49, 78, 69, 97, 80, 68, 45, 68, 67, 68, 56, 80, 55, 57, 61, 55, 62, 58, 77, 76, 45, 65, 80, 54, 61, 74, 60, 65, 76, 81, 72, 82, 71, 52, 65, 68, 75, 42, 69, 66, 51, 75, 58, 69, 66, 56, 64, 53, 65, 66, 60, 55, 54, 72, 63, 70, 64, 68, 53, 78, 80, 90, 72, 65, 57, 65, 64, 72, 119, 92, 81, 49, 62, 76, 53, 58, 81, 88, 54, 35, 65, 54, 73, 57, 49, 60, 63, 51, 74, 42, 64, 49, 71, 90, 75, 71, 66, 51, 57, 65, 47, 66, 51, 40, 74, 57, 64, 38, 63, 52, 53, 70, 52, 76, 91, 59, 54, 84, 67, 73, 70, 80, 64, 60, 59, 57, 50, 72, 89, 55, 82, 55, 37, 59, 76, 48, 57, 58, 67, 43, 74, 55, 70, 96, 43, 71, 91, 88, 74, 71, 62, 58, 94, 67, 52, 51, 68, 58, 79, 62, 53, 57, 51, 70, 72, 70, 74, 63, 68, 91, 56, 81, 75, 69, 126, 61, 80, 81, 58, 64, 38, 63, 54, 59, 58, 97, 77, 56, 65, 66, 59, 72, 46, 80, 64, 54, 51, 43, 69, 82, 68, 69, 54, 82, 73, 75, 73, 61, 61, 55, 78, 62, 43, 63, 54, 63, 66, 76, 71, 45, 63, 74, 74, 84, 62, 99, 50, 62, 93, 44, 67, 56, 65, 71, 63, 70, 71, 65, 78, 54, 60, 48, 67, 67, 49, 72, 52, 67, 55, 72, 57, 67, 81, 68, 74, 64, 96, 53, 67, 57, 47, 78, 56, 57, 62, 61, 70, 130, 75, 54, 71, 50, 63, 86, 45, 69, 68, 54, 61, 74, 65, 74, 35, 60, 67, 93, 81, 71, 67, 79, 85, 82, 67, 61, 70, 62, 84, 72, 67, 56, 62, 58, 76, 64, 68, 58, 64, 83, 53, 55, 62, 61, 71, 59, 58, 60, 115, 70, 86, 69, 62, 67, 67, 75, 60, 59, 66, 69, 73, 53, 45, 46, 102, 60, 57, 64, 48, 72, 91, 65, 71, 52, 49, 77, 73, 70, 115, 64, 69, 56, 61, 58, 63, 56, 52, 50, 71, 73, 68, 68, 62, 44, 63, 54, 69, 66, 71, 57, 49, 59, 75, 59, 47, 54, 110, 48, 77, 67, 56, 84, 66, 67, 69, 101, 58, 71, 87, 62, 80, 69, 74, 64, 52, 71, 62, 61, 60, 65, 80, 50, 73, 60, 82, 52, 54, 56, 57, 74, 122, 71, 59, 85, 68, 68, 92, 55, 76, 52, 65, 65, 62, 61, 78, 62, 64, 76, 60, 94, 86, 90, 86, 54, 86, 50, 70, 66, 73, 67, 72, 51, 90, 50, 61, 64, 47, 70, 53, 57, 53, 80, 65, 79, 74, 60, 56, 84, 62, 84, 72, 55, 80, 63, 39, 65, 38, 78, 51, 50, 63, 88, 58, 52, 66, 77, 79, 49, 65, 70, 48, 91, 63, 48, 59, 69, 70, 69, 43, 57, 70, 62, 63, 64, 57, 55, 89, 70, 65, 48, 56, 80, 47, 78, 64, 57, 58, 69, 77, 55, 66, 57, 66, 54, 59, 52, 74, 69, 71, 54, 82, 57, 56, 50, 72, 49, 68, 79, 84, 104, 78, 85, 69, 43, 63, 66, 55, 47, 54, 53, 67, 66, 66, 51, 61, 54, 65, 104, 54, 54, 89, 68, 53, 62, 122, 58, 98, 65, 51, 74, 68, 65, 54, 71, 88, 68, 70, 62, 62, 59, 60, 59, 74, 88, 53, 64, 67, 70, 73, 67, 54, 46, 61, 71, 77, 46, 62, 50, 65, 64, 57, 61, 67, 69, 66, 69, 58, 66, 73, 56, 54, 85, 69, 76, 65, 77, 56, 75, 79, 71, 59, 55, 60, 56, 73, 88, 61, 61, 69, 70, 70, 45, 70, 55, 82, 73, 42, 74, 76, 72, 60, 85, 59, 70, 69, 77, 99, 41, 63, 75, 51, 77, 62, 62, 60, 72, 54, 72, 51, 56, 91, 76, 73, 58, 45, 53, 59, 67, 76, 111, 77, 59, 56, 64, 107, 68, 57, 90, 51, 54, 67, 63, 79, 76, 66, 50, 51, 72, 45, 65, 82, 61, 111, 51, 60, 54, 65, 48, 66, 67, 68, 52, 70, 55, 42, 68, 57, 80, 85, 69, 127, 57, 64, 62, 66, 58, 43, 55, 68, 60, 81, 57, 72, 99, 59, 54, 56, 72, 65, 55, 72, 55, 57, 68, 58, 99, 56, 51, 64, 65, 82, 74, 78, 65, 56, 98, 64, 67, 66, 63, 70, 78, 82, 73, 51, 41, 68, 75, 69, 53, 60, 57, 61, 87, 75, 74, 61, 79, 57, 62, 76, 52, 66, 66, 59, 57, 59, 52, 37, 70, 58, 55, 60, 46, 59, 74, 87, 61, 59, 68, 56, 53, 47, 67, 72, 52, 61, 80, 49, 62, 46, 60, 74, 74, 72, 60, 53, 73, 56, 73, 57, 63, 60, 76, 73, 41, 62, 100, 67, 63, 40, 83, 49, 56, 87, 90, 57, 62, 68, 66, 64, 64, 73, 48, 76, 63, 62, 72, 69, 55, 73, 47, 64, 64, 63, 69, 50, 38, 55, 74, 64, 55, 72, 99, 66, 56, 71, 76, 66, 68, 77, 62, 66, 67, 70, 72, 53, 93, 69, 58, 63, 73, 85, 58, 61, 63, 47, 63, 39, 52, 50, 75, 56, 55, 71, 99, 81, 55, 57, 81, 63, 80, 66, 59, 90, 69, 111, 58, 49, 61, 70, 52, 56, 74, 63, 74, 66, 70, 51, 73, 75, 72, 64, 66, 54, 61, 73, 60, 67, 64, 68, 55, 67, 82, 90, 63, 69, 69, 95, 59, 67, 67, 96, 91, 58, 72, 51, 65, 60, 111, 61, 98, 56, 67, 77, 68, 54, 55, 70, 64, 53, 65, 64, 39, 61, 68, 120, 72, 65, 52, 65, 65, 66, 61, 56, 53, 72, 67, 70, 98, 73, 57, 56, 81, 61, 52, 61, 58, 78, 82, 68, 53, 74, 69, 39, 66, 67, 79, 55, 73, 71, 84, 71, 71, 51, 64, 65, 68, 57, 70, 96, 62, 85, 81, 57, 41, 56, 62, 62, 45, 68, 71, 56, 56, 46, 59, 60, 72, 75, 72, 70, 73, 80, 70, 50, 44, 64, 90, 43, 107, 56, 85, 78, 55, 80, 54, 51, 61, 66, 102, 104, 51, 62, 73, 64, 72, 57, 78, 52, 53, 61, 79, 88, 60, 57, 49, 45, 87, 77, 68, 73, 63, 65, 70, 75, 61, 45, 64, 61, 85, 61, 82, 68, 49, 52, 65, 74, 71, 73, 66, 87, 73, 65, 59, 58, 55, 70, 78, 77, 81, 64, 69, 64, 58, 69, 66, 63, 79, 65, 62, 54, 56, 63, 53, 121, 107, 58, 67, 73, 59, 62, 59, 71, 79, 41, 53, 66, 75, 56, 89, 49, 62, 80, 79, 63, 46, 93, 51, 64, 100, 77, 68, 49, 62, 53, 64, 58, 45, 84, 64, 90, 82, 62, 41, 54, 61, 52, 65, 57, 71, 82, 76, 75, 89, 39, 47, 52, 77, 66, 56, 68, 62, 65, 74, 92, 55, 55, 58, 48, 69, 56, 68, 71, 53, 53, 72, 57, 61, 69, 81, 70, 66, 63, 66, 74, 78, 54, 40, 60, 48, 55, 72, 49, 84, 73, 72, 51, 57, 58, 52, 63, 60, 58, 64, 86, 69, 74, 64, 81, 66, 107, 62, 57, 63, 64, 68, 63, 72, 68, 64, 44, 95, 62, 63, 48, 78, 78, 67, 70, 67, 61, 71, 66, 64, 74, 52, 64, 86, 60, 86, 53, 65, 75, 65, 43, 55, 78, 58, 68, 76, 75, 54, 60, 59, 69, 73, 92, 71, 77, 91, 66, 100, 47, 43, 92, 52, 69, 70, 62, 45, 81, 61, 75, 65, 62, 61, 49, 76, 61, 64, 67, 58, 55, 70, 47, 58, 78, 73, 88, 58, 51, 70, 75, 69, 65, 59, 66, 57, 65, 68, 64, 65, 78, 122, 85, 67, 77, 75, 50, 51, 47, 69, 67, 58, 55, 60, 72, 80, 54, 49, 48, 73, 74, 43, 70, 46, 55, 64, 68, 60, 50, 54, 56, 83, 78, 66, 88, 60, 76, 57, 68, 51, 70, 65, 69, 56, 63, 81, 61, 60, 69, 61, 98, 82, 61, 61, 74, 57, 59, 80, 65, 55, 59, 52, 56, 55, 69, 67, 88, 63, 59, 48, 65, 63, 65, 49, 86, 58, 60, 66, 76, 66, 128, 68, 64, 49, 41, 52, 66, 51, 52, 63, 59, 61, 52, 62, 81, 52, 70, 80, 65, 95, 66, 55, 71, 59, 81, 72, 74, 59, 95, 57, 60, 66, 70, 73, 71, 56, 79, 81, 59, 84, 90, 59, 61, 71, 58, 62, 53, 85, 71, 62, 55, 56, 87, 67, 64, 66, 62, 64, 58, 64, 56, 52, 122, 54, 51, 86, 79, 98, 89, 62, 51, 63, 72, 73, 53, 67, 59, 77, 59, 87, 66, 56, 99, 62, 68, 54, 77, 61, 74, 63, 66, 75, 54, 57, 92, 71, 87, 77, 77, 68, 83, 80, 44, 60, 54, 74, 67, 58, 72, 77, 49, 61, 65, 70, 62, 74, 86, 57, 51, 120, 72, 82, 57, 58, 67, 61, 66, 61, 47, 60, 55, 70, 65, 42, 57, 62, 64, 73, 50, 75, 66, 71, 61, 70, 50, 57, 55, 55, 80, 58, 48, 69, 88, 113, 96, 73, 53, 61, 58, 65, 50, 83, 75, 53, 70, 71, 63, 76, 56, 67, 64, 114, 122, 57, 75, 72, 74, 55, 61, 67, 53, 66, 83, 64, 92, 48, 69, 72, 63, 55, 64, 82, 55, 70, 77, 40, 54, 66, 78, 72, 72, 70, 92, 72, 48, 60, 88, 61, 66, 68, 79, 61, 55, 88, 56, 36, 102, 53, 64, 83, 82, 67, 75, 80, 34, 67, 73, 62, 60, 64, 84, 91, 89, 68, 51, 60, 56, 64, 56, 49, 61, 72, 59, 71, 90, 42, 68, 59, 73, 82, 78, 61, 56, 51, 108, 102, 75, 58, 62, 74, 54, 62, 55, 53, 57, 54, 56, 55, 59, 59, 103, 63, 54, 63, 56, 66, 106, 73, 58, 66, 65, 46, 73, 63, 65, 56, 68, 68, 117, 50, 63, 75, 62, 95, 59, 85, 57, 48, 59, 67, 68, 66, 84, 66, 79, 45, 78, 53, 69, 65, 91, 40, 43, 91, 62, 67, 49, 67, 56, 83, 61, 67, 64, 92, 66, 46, 53, 87, 49, 54, 67, 68, 73, 53, 76, 47, 83, 50, 58, 60, 66, 58, 56, 47, 69, 59, 61, 107, 76, 69, 64, 70, 95, 66, 68, 47, 70, 59, 69, 69, 56, 63, 55, 54, 74, 59, 55, 79, 67, 74, 46, 59, 64, 39, 51, 69, 81, 62, 51, 42, 56, 53, 61, 74, 55, 67, 63, 79, 74, 63, 52, 70, 65, 73, 44, 51, 70, 78, 43, 74, 73, 56, 65, 71, 55, 66, 61, 64, 60, 40, 53, 51, 59, 103, 73, 60, 58, 75, 58, 54, 79, 48, 59, 49, 68, 59, 51, 84, 57, 58, 62, 49, 77, 52, 59, 56, 77, 69, 74, 60, 95, 57, 68, 56, 73, 69, 45, 59, 56, 107, 41, 49, 57, 68, 56, 60, 73, 73, 59, 87, 62, 53, 97, 86, 72, 82, 60, 70, 62, 67, 66, 74, 51, 85, 62, 75, 63, 62, 55, 67, 70, 60, 69, 85, 59, 77, 77, 60, 72, 67, 63, 64, 61, 61, 75, 73, 55, 61, 76, 52, 54, 80, 57, 67, 54, 75, 50, 73, 65, 63, 101, 65, 63, 57, 61, 40, 62, 52, 74, 71, 39, 73, 46, 55, 56, 55, 53, 53, 61, 73, 61, 71, 58, 76, 61, 67, 45, 60, 120, 66, 67, 60, 57, 52, 51, 59, 55, 82, 66, 67, 65, 66, 55, 41, 95, 67, 62, 81, 71, 53, 54, 80, 66, 65, 81, 95, 81, 75, 65, 71, 63, 54, 77, 57, 81, 61, 56, 55, 54, 85, 53, 114, 78, 96, 104, 101, 58, 56, 56, 69, 63, 107, 66, 62, 60, 66, 54, 64, 46, 39, 63, 67, 82, 81, 63, 72, 70, 62, 79, 74, 65, 79, 55, 66, 62, 50, 74, 60, 73, 58, 75, 60, 63, 43, 49, 67, 50, 85, 75, 68, 59, 76, 80, 45, 53, 67, 66, 62, 55, 48, 62, 71, 58, 46, 56, 62, 67, 66, 73, 66, 60, 61, 56, 45, 63, 47, 45, 88, 57, 61, 67, 87, 67, 67, 60, 58, 90, 62, 76, 66, 58, 64, 72, 65, 40, 52, 70, 74, 55, 63, 58, 67, 80, 74, 56, 73, 56, 60, 65, 58, 53, 87, 61, 65, 68, 55, 81, 54, 68, 76, 107, 48, 62, 86, 54, 77, 89, 67, 66, 60, 72, 92, 79, 84, 66, 59, 53, 53, 59, 67, 83, 73, 94, 73, 63, 78, 69, 61, 63, 58, 75, 56, 91, 44, 52, 65, 64, 68, 73, 63, 70, 54, 63, 55, 56, 68, 86, 85, 66, 58, 74, 90, 64, 97, 71, 78, 53, 121, 69, 63, 43, 54, 45, 87, 65, 54, 64, 51, 65, 72, 79, 107, 62, 41, 58, 78, 57, 78, 93, 64, 57, 64, 60, 71, 75, 77, 62, 54, 71, 60, 59, 67, 47, 62, 67, 60, 48, 61, 49, 81, 50, 53, 80, 67, 58, 75, 56, 60, 68, 61, 41, 80, 55, 49, 73, 63, 65, 57, 52, 55, 52, 104, 67, 53, 85, 69, 83, 58, 107, 73, 72, 71, 79, 54, 63, 36, 74, 50, 59, 64, 54, 80, 57, 53, 75, 63, 69, 48, 62, 53, 47, 57, 64, 45, 61, 73, 62, 52, 64, 87, 63, 84, 74, 74, 67, 70, 67, 69, 89, 53, 54, 56, 54, 61, 61, 68, 61, 52, 69, 64, 46, 90, 59, 91, 64, 50, 55, 68, 72, 62, 46, 72, 64, 69, 78, 63, 87, 46, 71, 51, 70, 102, 60, 56, 75, 59, 94, 65, 60, 40, 59, 81, 69, 70, 71, 66, 57, 67, 63, 64, 48, 49, 86, 79, 53, 63, 82, 61, 65, 59, 66, 58, 71, 73, 43, 63, 52, 53, 72, 61, 57, 72, 77, 94, 72, 87, 65, 62, 64, 73, 65, 72, 47, 46, 58, 59, 64, 53, 59, 56, 73, 59, 62, 78, 59, 61, 99, 63, 58, 71, 54, 59, 55, 62, 57, 83, 65, 77, 68, 51, 66, 55, 73, 47, 82, 62, 78, 44, 65, 62, 71, 49, 61, 62, 109, 61, 86, 65, 68, 63, 68, 55, 61, 58, 68, 56, 51, 39, 51, 58, 92, 64, 62, 61, 60, 63, 83, 48, 89, 74, 56, 54, 63, 79, 97, 70, 49, 60, 62, 53, 76, 81, 96, 50, 76, 55, 47, 56, 57, 63, 92, 59, 61, 52, 56, 53, 68, 74, 106, 77, 55, 56, 71, 67, 64, 72, 68, 59, 68, 60, 35, 79, 57, 82, 64, 65, 62, 71, 49, 70, 50, 61, 65, 73, 54, 86, 86, 52, 73, 67, 75, 46, 72, 72, 64, 57, 74, 62, 52, 47, 76, 61, 80, 56, 62, 68, 61, 62, 64, 57, 73, 82, 44, 46, 63, 73, 54, 48, 63, 54, 53, 63, 73, 52, 68, 71, 60, 60, 72, 71, 81, 62, 47, 64, 67, 81, 68, 59, 105, 83, 74, 101, 76, 61, 60, 61, 75, 52, 81, 71, 63, 61, 74, 53, 67, 56, 61, 82, 56, 82, 69, 61, 63, 62, 84, 60, 61, 57, 49, 48, 69, 46, 56, 65, 75, 47, 66, 71, 79, 66, 75, 61, 65, 65, 65, 80, 93, 52, 71, 59, 45, 68, 57, 61, 55, 73, 81, 64, 61, 63, 62, 71, 86, 65, 65, 55, 77, 88, 84, 52, 66, 65, 59, 49, 77, 78, 82, 50, 63, 78, 56, 110, 67, 80, 78, 61, 46, 108, 66, 50, 78, 62, 62, 67, 54, 55, 73, 68, 66, 45, 110, 63, 66, 68, 65, 65, 41, 47, 81, 41, 55, 64, 47, 64, 60, 63, 63, 93, 89, 72, 87, 69, 62, 78, 77, 67, 66, 51, 51, 82, 69, 74, 72, 69, 76, 52, 71, 89, 82, 72, 67, 76, 72, 71, 42, 63, 53, 74, 72, 61, 61, 73, 68, 45, 49, 60, 69, 79, 58, 57, 61, 67, 57, 67, 49, 57, 54, 55, 90, 49, 82, 72, 71, 79, 67, 54, 55, 79, 59, 71, 63, 67, 66, 53, 82, 62, 79, 53, 66, 38, 92, 55, 59, 67, 59, 57, 49, 82, 67, 58, 91, 53, 74, 47, 64, 129, 93, 76, 57, 67, 107, 61, 62, 80, 88, 65, 57, 73, 56, 76, 59, 63, 93, 70, 55, 53, 68, 59, 73, 61, 63, 83, 58, 59, 73, 52, 91, 50, 63, 74, 69, 61, 68, 58, 67, 74, 69, 59, 67, 74, 60, 62, 52, 64, 46, 67, 60, 67, 66, 58, 75, 54, 72, 85, 59, 50, 65, 53, 60, 82, 98, 67, 71, 73, 58, 54, 79, 53, 62, 67, 63, 58, 64, 62, 61, 83, 55, 76, 64, 74, 62, 64, 57, 53, 67, 71, 60, 71, 78, 59, 77, 56, 61, 60, 68, 74, 75, 69, 61, 69, 64, 63, 51, 59, 78, 45, 70, 63, 77, 79, 56, 53, 67, 70, 53, 65, 62, 64, 46, 57, 66, 62, 54, 91, 42, 72, 59, 65, 67, 62, 57, 59, 74, 58, 58, 60, 54, 66, 55, 56, 96, 70, 74, 53, 61, 53, 47, 70, 62, 67, 81, 69, 90, 79, 58, 86, 84, 67, 79, 66, 60, 54, 62, 68, 68, 62, 61, 60, 51, 53, 52, 71, 66, 64, 63, 53, 49, 63, 60, 58, 60, 70, 49, 81, 71, 72, 52, 78, 88, 47, 63, 64, 63, 77, 85, 66, 45, 57, 67, 91, 71, 64, 73, 72, 87, 58, 75, 51, 66, 68, 66, 77, 76, 77, 46, 60, 46, 48, 62, 60, 74, 61, 61, 81, 63, 64, 88, 86, 56, 63, 50, 66, 55, 45, 68, 54, 48, 154, 76, 70, 60, 65, 63, 59, 57, 59, 81, 56, 48, 54, 79, 66, 78, 61, 61, 59, 47, 52, 94, 67, 59, 70, 48, 74, 48, 90, 83, 107, 67, 63, 60, 56, 55, 120, 76, 68, 77, 57, 58, 74, 93, 65, 91, 71, 79, 70, 73, 54, 70, 61, 57, 62, 96, 69, 68, 63, 68, 76, 65, 66, 59, 60, 51, 52, 67, 58, 74, 71, 67, 57, 57, 68, 55, 67, 79, 70, 52, 73, 56, 63, 42, 65, 73, 47, 69, 88, 63, 74, 69, 72, 83, 92, 73, 56, 63, 60, 59, 78, 52, 99, 75, 73, 54, 59, 31, 63, 64, 67, 62, 80, 84, 65, 44, 86, 82, 67, 47, 53, 59, 66, 130, 72, 63, 78, 57, 55, 73, 64, 45, 67, 67, 66, 67, 61, 54, 61, 117, 54, 73, 56, 97, 63, 64, 53, 56, 75, 66, 65, 65, 54, 67, 69, 84, 72, 56, 35, 66, 60, 58, 63, 69, 75, 54, 60, 47, 61, 64, 57, 69, 70, 63, 60, 76, 65, 66, 79, 59, 68, 79, 78, 62, 57, 84, 69, 61, 48, 66, 61, 47, 64, 103, 84, 91, 57, 80, 64, 82, 59, 62, 98, 52, 55, 60, 75, 76, 61, 60, 66, 60, 54, 62, 60, 103, 72, 47, 110, 119, 68, 82, 106, 70, 61, 47, 62, 43, 67, 65, 118, 55, 61, 58, 50, 52, 55, 82, 54, 49, 55, 64, 87, 60, 52, 78, 62, 87, 62, 56, 48, 59, 70, 60, 73, 61, 96, 55, 50, 64, 80, 51, 55, 60, 72, 71, 81, 63, 80, 51, 87, 68, 106, 67, 63, 72, 62, 96, 69, 61, 56, 70, 70, 67, 56, 70, 51, 56, 73, 107, 53, 56, 58, 87, 63, 101, 82, 56, 67, 98, 85, 78, 49, 56, 72, 68, 63, 63, 59, 50, 86, 46, 57, 49, 68, 58, 59, 75, 85, 62, 105, 49, 58, 102, 65, 56, 73, 67, 46, 64, 80, 70, 74, 58, 62, 83, 68, 38, 55, 56, 69, 51, 56, 60, 60, 51, 59, 108, 71, 52, 110, 89, 75, 89, 81, 73, 68, 74, 64, 55, 67, 54, 68, 43, 65, 63, 60, 52, 73, 61, 75, 54, 68, 67, 61, 64, 46, 65, 58, 57, 57, 53, 63, 67, 51, 65, 79, 42, 56, 71, 72, 41, 77, 77, 56, 68, 50, 62, 72, 60, 62, 55, 82, 62, 65, 68, 74, 75, 39, 64, 59, 58, 91, 59, 72, 62, 48, 64, 75, 55, 69, 53, 85, 72, 74, 76, 77, 66, 64, 58, 60, 57, 72, 74, 80, 75, 60, 61, 84, 55, 72, 52, 71, 81, 66, 60, 48, 67, 83, 61, 70, 47, 69, 82, 61, 56, 63, 35, 72, 53, 80, 71, 60, 60, 46, 50, 71, 61, 65, 55, 83, 107, 65, 85, 60, 57, 59, 54, 59, 56, 42, 69, 48, 80, 57, 69, 58, 58, 90, 92, 63, 72, 64, 59, 56, 53, 52, 74, 53, 79, 65, 80, 82, 67, 72, 66, 109, 63, 75, 53, 65, 83, 60, 65, 68, 53, 83, 62, 84, 75, 53, 64, 58, 96, 67, 58, 67, 63, 50, 65, 56, 57, 56, 107, 62, 68, 74, 60, 86, 77, 66, 76, 80, 49, 50, 63, 76, 66, 84, 54, 78, 62, 68, 60, 67, 59, 59, 66, 64, 45, 47, 78, 68, 79, 60, 74, 43, 75, 56, 59, 61, 58, 63, 82, 53, 60, 83, 49, 60, 64, 75, 65, 68, 52, 54, 104, 54, 62, 52, 73, 52, 102, 57, 62, 60, 69, 64, 63, 88, 75, 64, 60, 57, 64, 60, 61, 73, 54, 62, 61, 60, 57, 64, 86, 60, 82, 64, 101, 57, 68, 59, 51, 48, 57, 65, 54, 63, 62, 50, 68, 73, 59, 52, 63, 69, 57, 67, 58, 63, 49, 59, 63, 46, 86, 66, 64, 58, 55, 70, 70, 69, 65, 72, 61, 65, 50, 66, 59, 101, 60, 52, 50, 60, 56, 43, 65, 52, 65, 62, 83, 56, 62, 56, 68, 74, 53, 69, 95, 73, 46, 57, 80, 75, 56, 57, 57, 58, 67, 69, 63, 50, 59, 52, 74, 99, 122, 63, 83, 70, 86, 60, 91, 53, 62, 64, 61, 65, 73, 54, 60, 63, 65, 71, 62, 62, 42, 66, 50, 83, 74, 71, 67, 57, 47, 79, 62, 56, 70, 61, 59, 64, 52, 71, 64, 73, 50, 73, 75, 53, 53, 52, 65, 73, 61, 60, 72, 50, 70, 63, 72, 54, 83, 55, 58, 49, 53, 58, 73, 56, 58, 62, 76, 66, 50, 75, 72, 95, 57, 75, 53, 61, 58, 58, 51, 78, 65, 65, 68, 61, 58, 68, 78, 72, 81, 78, 73, 58, 74, 63, 56, 73, 66, 71, 67, 78, 54, 90, 98, 48, 73, 85, 71, 65, 80, 64, 66, 54, 72, 68, 64, 65, 73, 78, 67, 69, 48, 58, 68, 89, 88, 62, 91, 58, 90, 59, 72, 72, 71, 51, 69, 86, 56, 77, 63, 77, 81, 54, 62, 58, 90, 72, 65, 61, 73, 63, 54, 77, 86, 76, 74, 80, 62, 51, 59, 65, 54, 75, 72, 68, 54, 59, 68, 84, 53, 62, 66, 70, 56, 66, 65, 56, 65, 72, 66, 88, 66, 80, 79, 70, 74, 72, 58, 58, 94, 40, 78, 64, 51, 68, 63, 61, 61, 53, 54, 66, 71, 53, 75, 94, 65, 67, 59, 86, 53, 64, 62, 62, 74, 51, 66, 77, 72, 58, 51, 76, 78, 66, 58, 68, 67, 66, 69, 61, 52, 65, 58, 69, 95, 79, 55, 102, 68, 73, 69, 48, 61, 63, 80, 79, 47, 85, 71, 82, 58, 53, 77, 62, 50, 54, 66, 86, 63, 59, 84, 59, 74, 72, 58, 67, 47, 69, 56, 58, 60, 70, 49, 73, 56, 88, 61, 83, 64, 62, 62, 60, 62, 61, 58, 67, 56, 85, 68, 57, 63, 80, 73, 123, 72, 60, 34, 67, 60, 66, 61, 60, 68, 82, 93, 79, 87, 71, 51, 57, 45, 74, 65, 55, 69, 98, 81, 70, 70, 61, 81, 54, 47, 57, 57, 64, 59, 55, 87, 52, 55, 114, 83, 61, 56, 62, 63, 56, 63, 68, 69, 67, 47, 81, 83, 70, 61, 55, 67, 74, 50, 74, 80, 76, 53, 51, 56, 78, 70, 85, 71, 115, 58, 63, 65, 56, 69, 93, 60, 63, 60, 78, 58, 79, 74, 81, 56, 78, 41, 57, 44, 51, 57, 58, 61, 51, 79, 88, 49, 84, 75, 67, 85, 63, 69, 55, 59, 63, 74, 59, 67, 52, 101, 59, 60, 76, 63, 64, 64, 72, 67, 75, 41, 60, 74, 64, 57, 59, 64, 67, 47, 78, 58, 66, 71, 100, 51, 41, 48, 61, 62, 55, 48, 60, 53, 68, 57, 90, 73, 87, 66, 78, 71, 96, 65, 66, 57, 67, 73, 46, 62, 75, 48, 62, 84, 68, 69, 47, 51, 74, 57, 46, 74, 67, 70, 66, 64, 81, 65, 68, 53, 76, 71, 61, 73, 61, 63, 62, 69, 82, 73, 72, 46, 57, 57, 67, 85, 63, 59, 61, 74, 51, 56, 57, 64, 52, 57, 57, 63, 72, 54, 86, 66, 83, 63, 74, 55, 92, 90, 58, 70, 69, 75, 75, 57, 58, 82, 66, 96, 87, 44, 60, 56, 56, 76, 77, 57, 53, 58, 84, 55, 41, 67, 103, 66, 54, 70, 52, 68, 53, 70, 63, 57, 67, 58, 49, 62, 75, 79, 124, 131, 66, 47, 64, 57, 61, 58, 48, 58, 92, 56, 114, 68, 41, 52, 56, 76, 42, 58, 54, 58, 57, 58, 77, 64, 52, 65, 66, 77, 64, 66, 59, 69, 61, 54, 67, 78, 96, 63, 60, 65, 75, 99, 67, 64, 97, 65, 60, 66, 89, 58, 60, 72, 83, 103, 58, 73, 44, 101, 82, 70, 70, 70, 58, 64, 69, 82, 65, 59, 63, 61, 58, 52, 63, 57, 57, 92, 77, 59, 67, 91, 58, 75, 63, 52, 75, 95, 99, 69, 58, 63, 83, 57, 83, 67, 56, 102, 53, 59, 102, 51, 89, 56, 65, 70, 75, 78, 73, 71, 69, 69, 82, 111, 56, 70, 77, 56, 78, 78, 60, 68, 49, 74, 71, 94, 62, 53, 69, 77, 85, 54, 77, 64, 72, 105, 54, 54, 63, 64, 56, 66, 67, 56, 54, 65, 59, 51, 69, 72, 50, 51, 52, 73, 42, 64, 63, 64, 78, 71, 61, 65, 54, 60, 67, 61, 43, 96, 55, 58, 70, 69, 79, 44, 69, 91, 61, 66, 66, 54, 58, 62, 50, 50, 60, 62, 73, 57, 77, 58, 60, 71, 85, 58, 78, 80, 58, 50, 79, 49, 45, 51, 33, 56, 100, 60, 67, 61, 74, 57, 58, 64, 57, 93, 78, 102, 72, 74, 59, 76, 64, 59, 44, 89, 59, 67, 63, 54, 47, 48, 80, 52, 65, 54, 72, 55, 85, 60, 56, 95, 69, 65, 83, 79, 56, 73, 93, 63, 55, 67, 55, 83, 71, 141, 66, 66, 69, 53, 65, 55, 53, 74, 55, 58, 52, 62, 56, 52, 70, 55, 66, 49, 62, 75, 72, 64, 61, 64, 45, 65, 52, 104, 59, 63, 67, 69, 50, 56, 44, 57, 88, 46, 83, 84, 73, 61, 86, 62, 59, 58, 52, 63, 67, 54, 71, 65, 59, 61, 62, 62, 53, 71, 58, 55, 49, 93, 64, 86, 62, 66, 66, 87, 82, 56, 65, 57, 60, 57, 62, 50, 82, 64, 55, 64, 64, 46, 60, 66, 56, 69, 64, 54, 62, 58, 61, 66, 55, 61, 78, 93, 82, 61, 74, 68, 59, 71, 55, 68, 69, 62, 102, 68, 60, 71, 68, 67, 66, 86, 62, 63, 56, 45, 43, 64, 77, 52, 62, 101, 62, 84, 60, 70, 61, 64, 59, 62, 68, 61, 57, 103, 76, 66, 66, 66, 63, 55, 54, 56, 64, 71, 59, 72, 67, 79, 51, 56, 55, 70, 79, 70, 55, 64, 60, 68, 78, 64, 48, 71, 64, 71, 53, 44, 63, 56, 48, 73, 75, 77, 45, 61, 72, 56, 59, 76, 61, 62, 58, 67, 88, 62, 51, 61, 63, 67, 54, 46, 59, 82, 60, 69, 90, 104, 51, 51, 70, 70, 59, 65, 55, 68, 67, 60, 74, 72, 66, 55, 66, 67, 53, 63, 42, 62, 69, 55, 93, 73, 58, 79, 70, 56, 48, 75, 61, 74, 54, 68, 59, 83, 65, 60, 51, 75, 63, 61, 66, 57, 62, 76, 88, 72, 63, 55, 60, 73, 86, 68, 84, 44, 51, 66, 59, 45, 73, 70, 66, 52, 57, 64, 58, 79, 72, 51, 61, 56, 81, 66, 82, 66, 53, 48, 66, 50, 53, 85, 59, 60, 57, 64, 60, 74, 60, 67, 54, 57, 71, 47, 59, 68, 51, 56, 63, 51, 73, 54, 72, 59, 69, 63, 52, 72, 46, 65, 56, 56, 68, 55, 35, 54, 63, 85, 65, 71, 50, 57, 79, 72, 54, 63, 69, 69, 63, 62, 61, 60, 56, 66, 59, 76, 78, 65, 55, 57, 64, 61, 79, 71, 68, 63, 64, 55, 72, 69, 64, 74, 59, 75, 62, 75, 71, 47, 65, 90, 59, 53, 54, 64, 61, 61, 65, 67, 67, 62, 52, 62, 64, 87, 62, 57, 57, 75, 70, 81, 111, 62, 66, 83, 59, 56, 66, 76, 63, 63, 64, 55, 62, 61, 114, 68, 64, 58, 70, 63, 89, 59, 58, 55, 71, 79, 62, 66, 60, 69, 53, 62, 65, 66, 42, 55, 63, 53, 104, 84, 63, 76, 65, 96, 44, 54, 50, 68, 61, 73, 87, 64, 60, 68, 79, 56, 56, 81, 60, 64, 77, 62, 82, 52, 61, 69, 61, 75, 85, 61, 60, 61, 64, 62, 49, 57, 88, 52, 75, 66, 62, 68, 44, 59, 44, 87, 98, 67, 64, 46, 80, 46, 53, 54, 54, 64, 105, 79, 61, 71, 78, 55, 61, 56, 61, 75, 62, 67, 75, 77, 67, 73, 51, 55, 76, 54, 52, 58, 60, 52, 55, 73, 75, 64, 84, 82, 60, 56, 62, 62, 63, 61, 66, 70, 69, 57, 82, 57, 66, 63, 56, 57, 65, 83, 106, 71, 57, 93, 54, 63, 64, 46, 63, 55, 46, 67, 63, 66, 60, 83, 65, 57, 68, 47, 70, 71, 78, 61, 69, 56, 84, 87, 64, 60, 65, 77, 51, 69, 60, 60, 77, 101, 70, 62, 68, 52, 60, 53, 107, 69, 63, 54, 66, 53, 65, 74, 60, 76, 68, 56, 83, 73, 80, 58, 110, 49, 67, 58, 70, 76, 82, 58, 69, 56, 54, 71, 65, 52, 47, 85, 67, 52, 55, 64, 59, 56, 55, 57, 63, 61, 68, 68, 56, 64, 51, 71, 61, 59, 60, 66, 72, 69, 54, 67, 53, 78, 69, 49, 76, 57, 56, 53, 65, 68, 55, 88, 54, 57, 75, 62, 76, 69, 67, 65, 81, 45, 64, 62, 75, 51, 91, 60, 125, 65, 67, 54, 65, 62, 59, 64, 65, 56, 54, 92, 70, 69, 61, 61, 59, 62, 63, 54, 43, 75, 54, 90, 70, 62, 58, 105, 56, 55, 64, 64, 141, 81, 84, 82, 70, 65, 80, 60, 119, 58, 38, 66, 68, 52, 57, 77, 67, 71, 100, 74, 108, 65, 57, 65, 62, 88, 89, 63, 74, 62, 76, 58, 50, 56, 82, 59, 71, 74, 72, 59, 79, 71, 70, 64, 64, 67, 61, 74, 66, 59, 79, 54, 56, 72, 60, 78, 54, 69, 97, 70, 73, 52, 58, 62, 113, 58, 50, 67, 53, 61, 54, 67, 41, 69, 60, 74, 58, 53, 79, 53, 63, 109, 77, 74, 49, 108, 98, 76, 115, 60, 64, 85, 96, 56, 37, 51, 66, 60, 49, 83, 101, 65, 89, 59, 57, 52, 61, 61, 73, 47, 77, 57, 81, 58, 47, 69, 62, 91, 65, 57, 67, 68, 69, 76, 75, 71, 55, 73, 59, 72, 55, 69, 73, 59, 98, 46, 56, 67, 73, 77, 55, 64, 59, 88, 82, 60, 49, 84, 66, 60, 67, 50, 60, 75, 69, 61, 58, 49, 75, 59, 75, 61, 68, 83, 74, 65, 50, 56, 67, 63, 56, 44, 48, 64, 47, 45, 95, 102, 91, 67, 95, 63, 65, 63, 50, 88, 64, 110, 84, 70, 84, 49, 63, 53, 54, 65, 48, 88, 62, 89, 85, 105, 63, 67, 60, 66, 56, 82, 59, 75, 58, 72, 87, 72, 81, 70, 93, 64, 58, 72, 89, 71, 64, 88, 61, 53, 71, 54, 70, 59, 56, 55, 66, 51, 53, 44, 64, 62, 81, 46, 51, 69, 72, 70, 65, 97, 61, 38, 62, 75, 59, 56, 48, 86, 66, 63, 58, 73, 86, 67, 55, 50, 68, 72, 55, 61, 77, 51, 82, 51, 60, 54, 66, 71, 77, 66, 51, 66, 61, 66, 59, 61, 60, 67, 71, 63, 64, 78, 49, 47, 80, 63, 48, 59, 48, 56, 92, 69, 75, 53, 73, 47, 57, 78, 58, 61, 64, 56, 75, 79, 103, 95, 62, 78, 50, 54, 49, 85, 63, 60, 66, 70, 75, 83, 62, 67, 50, 69, 70, 66, 90, 39, 52, 57, 60, 70, 61, 78, 62, 51, 69, 86, 77, 59, 48, 108, 73, 60, 53, 70, 66, 68, 58, 72, 60, 58, 66, 63, 68, 73, 54, 66, 71, 57, 60, 57, 86, 62, 66, 46, 80, 121, 69, 56, 76, 55, 50, 77, 103, 62, 52, 89, 82, 62, 70, 52, 75, 62, 63, 56, 59, 68, 68, 55, 67, 101, 69, 63, 54, 52, 62, 49, 76, 73, 50, 67, 65, 68, 81, 85, 80, 57, 51, 65, 67, 60, 52, 67, 75, 59, 53, 75, 61, 76, 62, 73, 54, 55, 60, 50, 48, 76, 77, 47, 60, 55, 77, 53, 64, 59, 59, 56, 40, 59, 62, 56, 70, 54, 48, 69, 82, 52, 56, 66, 45, 79, 64, 70, 50, 45, 58, 61, 40, 64, 64, 67, 65, 65, 37, 56, 55, 48, 70, 45, 68, 73, 65, 66, 43, 80, 71, 69, 77, 52, 49, 56, 54, 74, 88, 52, 50, 65, 78, 69, 59, 60, 57, 54, 54, 63, 57, 66, 64, 78, 65, 64, 56, 69, 69, 58, 56, 70, 54, 81, 63, 60, 97, 61, 61, 63, 75, 53, 72, 98, 91, 83, 79, 47, 60, 75, 67, 48, 61, 59, 52, 65, 56, 53, 77, 76, 53, 75, 94, 71, 56, 52, 62, 62, 53, 58, 81, 67, 78, 49, 71, 59, 52, 70, 52, 75, 74, 50, 70, 48, 50, 86, 72, 49, 68, 48, 58, 73, 63, 64, 60, 58, 63, 58, 84, 53, 51, 75, 64, 61, 53, 60, 47, 48, 56, 73, 54, 41, 60, 70, 65, 48, 70, 78, 50, 72, 85, 59, 51, 62, 51, 47, 50, 82, 52, 56, 57, 84, 64, 69, 51, 78, 41, 32, 72, 83, 61, 129, 69, 65, 56, 64, 56, 53, 59, 65, 61, 82, 79, 50, 68, 65, 64, 44, 59, 47, 75, 45, 47, 99, 66, 60, 62, 58, 91, 61, 57, 43, 102, 67, 55, 57, 70, 60, 61, 71, 71, 124, 64, 57, 56, 58, 148, 81, 50, 66, 85, 80, 70, 98, 71, 73, 72, 78, 62, 82, 54, 48, 62, 54, 86, 71, 72, 102, 59, 60, 58, 63, 63, 105, 54, 59, 79, 60, 61, 42, 54, 75, 71, 75, 66, 112, 72, 41, 75, 53, 63, 56, 50, 54, 50, 113, 79, 67, 47, 61, 51, 63, 70, 60, 58, 68, 60, 56, 101, 60, 67, 68, 63, 54, 70, 45, 62, 65, 53, 65, 46, 65, 61, 79, 64, 50, 73, 107, 63, 67, 72, 79, 60, 54, 61, 96, 59, 52, 63, 28, 63, 58, 46, 52, 51, 50, 69, 75, 87, 70, 55, 71, 48, 85, 53, 63, 65, 85, 73, 87, 50, 60, 56, 56, 74, 74, 58, 69, 66, 72, 51, 56, 94, 72, 53, 48, 73, 78, 62, 53, 41, 65, 79, 100, 63, 79, 60, 70, 80, 52, 73, 47, 73, 83, 71, 59, 47, 63, 57, 72, 60, 57, 64, 56, 63, 70, 79, 103, 91, 75, 64, 46, 64, 67, 75, 76, 52, 53, 65, 81, 81, 55, 59, 60, 60, 85, 45, 52, 58, 62, 87, 47, 66, 49, 62, 56, 87, 68, 58, 46, 69, 79, 76, 57, 64, 55, 58, 90, 53, 85, 74, 51, 60, 69, 61, 60, 62, 52, 79, 83, 54, 77, 68, 55, 78, 53, 73, 60, 52, 64, 56, 78, 78, 70, 92, 54, 72, 70, 82, 56, 78, 67, 48, 58, 50, 59, 71, 61, 85, 55, 55, 74, 77, 59, 43, 49, 68, 42, 77, 73, 73, 69, 82, 50, 77, 38, 63, 59, 68, 75, 60, 58, 71, 50, 82, 59, 64, 58, 49, 87, 69, 64, 53, 54, 54, 51, 60, 54, 61, 40, 127, 54, 73, 56, 78, 58, 63, 69, 52, 63, 61, 99, 75, 58, 75, 73, 46, 64, 75, 61, 56, 63, 54, 60, 54, 52, 72, 50, 65, 62, 63, 56, 55, 108, 38, 38, 49, 73, 98, 58, 53, 51, 65, 63, 82, 76, 62, 80, 37, 65, 57, 57, 71, 49, 55, 41, 57, 64, 77, 61, 67, 51, 42, 61, 117, 62, 68, 71, 61, 64, 69, 97, 74, 94, 55, 59, 63, 67, 56, 49, 59, 73, 59, 61, 101, 54, 67, 89, 61, 61, 52, 63, 74, 63, 54, 74, 66, 73, 51, 93, 72, 61, 63, 51, 66, 49, 83, 51, 55, 64, 55, 79, 60, 51, 80, 70, 47, 73, 65, 64, 56, 75, 62, 67, 49, 99, 62, 80, 75, 49, 61, 78, 69, 54, 47, 63, 74, 62, 88, 57, 64, 74, 69, 102, 71, 56, 113, 54, 59, 80, 65, 79, 79, 53, 73, 59, 81, 79, 67, 69, 69, 70, 100, 86, 68, 57, 57, 71, 64, 97, 70, 62, 67, 58, 58, 64, 97, 63, 60, 67, 75, 62, 66, 43, 54, 63, 88, 58, 57, 81, 60, 62, 59, 74, 81, 63, 56, 43, 68, 80, 54, 74, 53, 58, 41, 50, 82, 45, 66, 70, 71, 64, 61, 72, 67, 65, 90, 64, 69, 75, 65, 86, 53, 69, 53, 94, 59, 63, 65, 78, 63, 63, 60, 67, 69, 78, 52, 65, 60, 60, 82, 55, 60, 66, 73, 81, 58, 69, 54, 60, 72, 68, 66, 95, 75, 79, 56, 45, 68, 62, 52, 63, 75, 79, 49, 63, 59, 56, 76, 67, 92, 92, 42, 62, 71, 65, 58, 63, 73, 74, 81, 65, 54, 70, 60, 74, 69, 65, 53, 69, 74, 71, 68, 55, 68, 55, 73, 128, 64, 55, 103, 55, 72, 72, 59, 51, 61, 58, 62, 50, 57, 56, 67, 109, 66, 64, 50, 47, 53, 62, 53, 67, 51, 58, 59, 64, 84, 45, 95, 74, 56, 51, 57, 68, 73, 132, 62, 72, 58, 58, 54, 49, 76, 66, 74, 58, 50, 58, 91, 63, 58, 76, 66, 66, 80, 59, 98, 51, 57, 60, 83, 51, 63, 55, 52, 77, 110, 57, 70, 47, 73, 69, 71, 50, 83, 65, 58, 132, 63, 64, 50, 68, 76, 66, 87, 66, 69, 58, 63, 54, 55, 71, 65, 51, 76, 70, 64, 60, 52, 51, 59, 65, 65, 106, 64, 68, 44, 57, 64, 53, 60, 70, 65, 55, 63, 66, 76, 86, 66, 52, 68, 69, 54, 63, 54, 74, 74, 64, 66, 52, 45, 75, 72, 79, 53, 57, 75, 52, 70, 86, 55, 90, 70, 66, 91, 64, 59, 50, 68, 63, 59, 61, 59, 66, 61, 59, 60, 80, 59, 64, 68, 56, 68, 66, 51, 69, 59, 57, 64, 49, 59, 108, 64, 90, 57, 58, 50, 60, 56, 66, 55, 58, 88, 67, 60, 65, 71, 66, 57, 113, 67, 94, 84, 62, 42, 61, 63, 55, 61, 82, 77, 70, 56, 73, 55, 67, 61, 61, 44, 93, 55, 59, 65, 45, 73, 71, 54, 46, 64, 69, 52, 76, 74, 70, 52, 65, 60, 67, 74, 102, 87, 94, 49, 68, 56, 52, 56, 67, 58, 79, 59, 51, 54, 59, 67, 72, 57, 56, 66, 63, 52, 62, 68, 64, 55, 55, 72, 58, 60, 52, 66, 84, 80, 59, 69, 63, 46, 60, 72, 57, 53, 70, 61, 55, 58, 63, 73, 51, 65, 75, 66, 57, 35, 64, 62, 70, 67, 76, 71, 64, 56, 59, 44, 75, 55, 82, 57, 51, 64, 79, 49, 54, 100, 62, 70, 55, 64, 61, 62, 62, 73, 67, 62, 56, 66, 63, 59, 51, 61, 32, 67, 67, 71, 69, 67, 81, 77, 67, 67, 94, 48, 63, 53, 56, 80, 51, 68, 56, 45, 50, 67, 53, 75, 64, 74, 35, 56, 56, 55, 61, 110, 71, 62, 59, 54, 68, 49, 57, 66, 46, 79, 53, 52, 67, 47, 69, 69, 75, 61, 58, 71, 108, 70, 60, 74, 53, 55, 53, 48, 78, 48, 58, 59, 69, 70, 87, 58, 49, 55, 67, 57, 104, 74, 83, 57, 99, 53, 54, 55, 42, 63, 46, 51, 80, 63, 86, 106, 42, 50, 47, 58, 61, 52, 64, 68, 76, 68, 64, 74, 49, 63, 73, 68, 61, 48, 86, 101, 74, 75, 62, 64, 58, 74, 87, 61, 100, 78, 54, 86, 70, 106, 74, 69, 76, 73, 59, 69, 58, 73, 63, 83, 76, 77, 81, 49, 84, 66, 53, 60, 63, 72, 55, 67, 59, 56, 62, 69, 77, 57, 64, 63, 70, 61, 51, 60, 61, 52, 58, 50, 60, 48, 52, 84, 53, 54, 61, 100, 92, 51, 68, 74, 74, 68, 62, 60, 54, 42, 62, 64, 56, 74, 69, 54, 55, 66, 64, 60, 63, 61, 44, 67, 65, 114, 82, 62, 56, 76, 95, 65, 75, 72, 96, 65, 71, 47, 68, 46, 66, 46, 57, 60, 49, 61, 65, 70, 54, 61, 62, 71, 60, 62, 98, 79, 66, 64, 52, 68, 52, 59, 111, 55, 64, 89, 62, 50, 68, 64, 41, 91, 53, 75, 75, 72, 78, 65, 63, 64, 80, 46, 52, 69, 76, 105, 67, 52, 65, 60, 42, 59, 54, 65, 55, 58, 73, 60, 59, 45, 80, 69, 50, 110, 83, 59, 112, 62, 63, 61, 61, 55, 52, 62, 66, 69, 78, 68, 58, 51, 71, 52, 137, 58, 78, 80, 62, 65, 55, 47, 57, 57, 53, 67, 73, 77, 55, 58, 81, 93, 81, 81, 59, 54, 68, 55, 79, 57, 59, 75, 48, 67, 69, 61, 92, 74, 56, 61, 63, 64, 77, 70, 73, 63, 52, 54, 56, 58, 61, 52, 75, 129, 72, 51, 70, 68, 60, 64, 50, 44, 53, 60, 71, 61, 72, 43, 56, 73, 66, 61, 76, 72, 66, 67, 64, 85, 61, 67, 86, 88, 63, 73, 68, 113, 46, 101, 79, 66, 79, 57, 56, 47, 45, 61, 65, 56, 78, 56, 58, 57, 78, 56, 70, 66, 62, 80, 88, 62, 118, 52, 61, 75, 53, 60, 54, 62, 67, 69, 47, 81, 67, 60, 53, 41, 58, 73, 82, 94, 76, 66, 61, 61, 89, 58, 52, 52, 58, 69, 62, 91, 81, 63, 54, 55, 51, 53, 72, 61, 53, 50, 79, 53, 45, 61, 80, 84, 58, 58, 53, 69, 48, 45, 55, 92, 67, 70, 45, 75, 122, 79, 71, 59, 74, 63, 52, 59, 57, 73, 55, 46, 85, 72, 81, 80, 64, 76, 109, 82, 76, 60, 80, 69, 49, 57, 75, 66, 48, 58, 71, 64, 72, 59, 54, 50, 59, 59, 66, 60, 64, 63, 51, 62, 82, 61, 61, 67, 83, 56, 46, 66, 43, 50, 58, 60, 84, 91, 61, 52, 62, 86, 67, 85, 58, 63, 58, 48, 48, 78, 69, 52, 63, 57, 64, 66, 83, 54, 72, 81, 96, 62, 94, 66, 71, 69, 63, 60, 54, 60, 61, 52, 79, 68, 48, 54, 61, 61, 55, 54, 101, 59, 64, 76, 62, 74, 47, 63, 64, 69, 60, 64, 51, 64, 50, 52, 66, 57, 56, 72, 51, 59, 62, 52, 59, 112, 70, 59, 63, 92, 66, 65, 63, 52, 50, 78, 74, 82, 62, 84, 86, 73, 70, 78, 71, 81, 68, 57, 87, 63, 67, 56, 98, 77, 69, 55, 61, 89, 83, 63, 60, 58, 70, 61, 59, 56, 69, 84, 58, 67, 128, 66, 59, 55, 56, 59, 63, 55, 63, 99, 53, 68, 74, 69, 67, 55, 69, 56, 35, 63, 70, 41, 74, 50, 83, 66, 67, 83, 42, 80, 86, 49, 60, 55, 76, 69, 54, 63, 60, 70, 64, 50, 66, 54, 65, 65, 58, 76, 88, 66, 72, 64, 69, 113, 72, 64, 57, 78, 56, 60, 52, 101, 53, 73, 67, 68, 81, 86, 67, 72, 68, 64, 65, 82, 78, 62, 77, 84, 60, 63, 65, 81, 60, 59, 55, 76, 83, 45, 55, 72, 83, 78, 67, 69, 70, 66, 56, 58, 75, 71, 71, 67, 54, 47, 102, 60, 62, 58, 73, 46, 68, 53, 57, 55, 82, 68, 68, 57, 55, 60, 58, 75, 94, 58, 54, 59, 58, 49, 60, 60, 116, 61, 101, 71, 60, 42, 55, 65, 66, 84, 97, 60, 90, 54, 57, 58, 69, 60, 53, 72, 62, 54, 66, 50, 49, 49, 79, 53, 55, 47, 59, 67, 77, 67, 60, 67, 65, 63, 54, 67, 80, 52, 70, 57, 81, 88, 53, 52, 57, 68, 104, 63, 80, 87, 56, 58, 48, 55, 58, 64, 57, 51, 63, 75, 62, 43, 68, 78, 66, 64, 60, 51, 58, 50, 59, 63, 56, 65, 45, 96, 73, 56, 110, 58, 52, 63, 61, 56, 92, 74, 69, 65, 65, 80, 82, 49, 69, 53, 58, 49, 54, 55, 82, 62, 67, 72, 81, 62, 62, 61, 57, 72, 51, 58, 77, 86, 76, 47, 52, 56, 135, 63, 66, 69, 52, 61, 46, 69, 71, 62, 72, 60, 67, 71, 74, 62, 70, 64, 70, 55, 74, 71, 55, 50, 70, 98, 86, 54, 109, 46, 74, 62, 64, 54, 57, 61, 64, 66, 65, 48, 59, 70, 58, 57, 55, 50, 39, 69, 81, 62, 66, 39, 72, 65, 72, 54, 51, 91, 92, 89, 88, 61, 84, 64, 59, 75, 70, 69, 62, 66, 71, 38, 50, 50, 56, 67, 58, 83, 68, 75, 59, 65, 77, 67, 61, 63, 59, 59, 74, 55, 57, 67, 99, 71, 74, 64, 58, 111, 61, 51, 58, 47, 58, 71, 73, 80, 68, 58, 46, 59, 85, 84, 82, 60, 68, 58, 61, 68, 63, 54, 69, 66, 53, 59, 66, 75, 66, 47, 69, 56, 59, 92, 54, 84, 60, 79, 68, 80, 73, 67, 57, 59, 61, 61, 53, 56, 70, 68, 82, 58, 80, 58, 71, 57, 52, 63, 55, 67, 103, 63, 49, 93, 61, 70, 51, 77, 75, 66, 85, 64, 61, 82, 63, 62, 54, 68, 44, 66, 57, 66, 55, 89, 58, 45, 55, 93, 54, 69, 57, 53, 60, 64, 60, 57, 51, 70, 62, 61, 77, 49, 54, 62, 94, 41, 47, 64, 55, 58, 67, 86, 84, 86, 66, 62, 92, 71, 56, 72, 64, 47, 59, 67, 70, 58, 64, 54, 69, 78, 64, 65, 50, 63, 56, 50, 64, 65, 70, 51, 50, 99, 54, 52, 81, 52, 52, 68, 58, 56, 49, 67, 54, 69, 59, 71, 76, 65, 62, 70, 56, 58, 70, 54, 57, 70, 90, 65, 60, 68, 89, 97, 67, 86, 67, 59, 76, 61, 55, 58, 55, 53, 66, 90, 57, 77, 68, 82, 56, 58, 68, 67, 77, 67, 50, 59, 62, 54, 80, 81, 57, 66, 73, 69, 51, 58, 50, 52, 60, 59, 68, 57, 69, 63, 59, 67, 43, 98, 89, 59, 68, 78, 49, 63, 52, 63, 59, 48, 75, 66, 58, 71, 71, 92, 53, 97, 68, 67, 58, 53, 67, 56, 61, 115, 75, 58, 63, 52, 95, 52, 50, 63, 95, 97, 64, 65, 61, 62, 70, 53, 106, 66, 56, 79, 65, 58, 47, 73, 54, 57, 46, 59, 67, 70, 57, 60, 69, 69, 76, 68, 83, 70, 41, 64, 83, 59, 64, 60, 62, 61, 41, 55, 76, 64, 66, 82, 71, 51, 59, 33, 58, 75, 45, 83, 52, 61, 59, 46, 76, 53, 62, 43, 53, 51, 51, 73, 45, 109, 48, 79, 75, 77, 60, 64, 87, 53, 69, 60, 52, 65, 65, 76, 66, 63, 68, 64, 49, 61, 70, 52, 82, 63, 69, 82, 84, 60, 48, 59, 50, 66, 94, 52, 69, 48, 58, 60, 90, 63, 61, 65, 50, 70, 67, 60, 56, 70, 45, 53, 79, 66, 68, 64, 66, 69, 74, 71, 95, 61, 61, 52, 63, 44, 54, 67, 79, 50, 65, 75, 43, 63, 64, 78, 58, 70, 65, 68, 82, 89, 58, 53, 64, 67, 46, 58, 60, 62, 61, 55, 68, 53, 80, 63, 57, 67, 66, 66, 59, 59, 63, 45, 68, 113, 36, 87, 59, 52, 43, 72, 66, 67, 70, 53, 57, 53, 67, 78, 54, 79, 106, 54, 75, 76, 76, 61, 50, 69, 91, 63, 56, 64, 51, 70, 71, 57, 80, 51, 79, 43, 72, 72, 60, 43, 98, 63, 54, 91, 71, 61, 43, 74, 48, 65, 72, 52, 54, 57, 57, 93, 65, 74, 73, 67, 74, 60, 61, 49, 56, 43, 95, 43, 66, 71, 67, 52, 54, 57, 61, 68, 64, 82, 96, 60, 74, 69, 61, 58, 124, 68, 58, 60, 72, 61, 56, 67, 67, 55, 90, 57, 51, 53, 58, 87, 60, 63, 68, 66, 66, 70, 62, 59, 65, 66, 59, 52, 58, 42, 57, 51, 64, 78, 52, 74, 64, 53, 69, 59, 72, 50, 66, 66, 59, 61, 58, 75, 69, 51, 79, 79, 64, 58, 52, 48, 57, 98, 61, 63, 52, 44, 76, 55, 53, 57, 61, 71, 65, 62, 58, 63, 59, 58, 87, 58, 59, 84, 63, 57, 101, 60, 54, 54, 84, 69, 66, 60, 73, 54, 67, 83, 74, 59, 83, 61, 54, 71, 77, 71, 118, 52, 64, 57, 68, 81, 60, 53, 60, 80, 51, 73, 58, 72, 62, 63, 67, 59, 50, 66, 68, 48, 58, 71, 50, 89, 70, 57, 76, 56, 67, 68, 62, 71, 49, 58, 61, 64, 82, 50, 53, 57, 62, 74, 39, 58, 85, 57, 52, 63, 57, 64, 90, 81, 51, 64, 67, 64, 71, 62, 62, 89, 68, 70, 65, 71, 57, 64, 51, 53, 147, 60, 60, 53, 80, 59, 54, 67, 91, 97, 61, 87, 61, 57, 72, 60, 85, 62, 59, 61, 65, 64, 87, 80, 67, 56, 71, 72, 68, 54, 61, 42, 56, 102, 55, 67, 65, 77, 69, 58, 83, 80, 59, 48, 72, 56, 67, 79, 104, 67, 85, 57, 65, 102, 80, 73, 61, 85, 58, 57, 64, 40, 54, 69, 70, 67, 74, 96, 61, 74, 58, 67, 62, 45, 67, 62, 55, 69, 50, 61, 65, 68, 67, 78, 71, 75, 60, 56, 40, 52, 67, 64, 117, 86, 57, 64, 73, 51, 48, 71, 61, 62, 66, 77, 75, 94, 51, 52, 45, 45, 67, 59, 64, 39, 70, 87, 43, 73, 60, 75, 66, 52, 61, 56, 66, 72, 50, 64, 50, 48, 133, 64, 54, 73, 56, 61, 47, 69, 59, 59, 67, 65, 81, 66, 88, 82, 61, 72, 74, 66, 68, 62, 70, 71, 60, 43, 96, 41, 51, 83, 54, 59, 64, 57, 51, 68, 64, 68, 52, 62, 84, 59, 68, 71, 62, 49, 49, 95, 59, 55, 104, 64, 52, 66, 60, 69, 66, 58, 54, 64, 56, 59, 71, 69, 79, 80, 62, 46, 65, 62, 62, 56, 64, 66, 61, 60, 61, 68, 55, 84, 40, 56, 51, 62, 102, 65, 62, 58, 87, 56, 38, 62, 60, 70, 67, 60, 80, 72, 72, 94, 77, 63, 79, 55, 72, 54, 62, 52, 78, 72, 56, 51, 69, 57, 84, 72, 68, 66, 54, 80, 115, 43, 55, 72, 62, 54, 56, 52, 73, 74, 71, 68, 49, 70, 57, 50, 72, 64, 95, 64, 55, 62, 47, 77, 70, 81, 78, 57, 122, 71, 48, 54, 52, 61, 78, 65, 61, 48, 72, 72, 53, 65, 65, 50, 60, 63, 58, 77, 90, 82, 61, 99, 61, 72, 61, 46, 77, 70, 92, 70, 73, 59, 88, 75, 52, 50, 57, 64, 68, 66, 47, 63, 56, 53, 63, 53, 61, 80, 68, 75, 66, 61, 80, 62, 50, 54, 52, 86, 56, 59, 65, 75, 81, 67, 58, 54, 55, 57, 47, 53, 52, 66, 63, 61, 39, 74, 62, 68, 71, 42, 90, 57, 68, 58, 58, 66, 68, 51, 48, 81, 68, 47, 59, 65, 78, 73, 54, 75, 73, 59, 46, 64, 60, 72, 62, 59, 49, 89, 64, 61, 71, 81, 58, 83, 69, 57, 70, 63, 71, 52, 85, 54, 59, 64, 55, 80, 59, 58, 75, 50, 50, 52, 66, 58, 59, 67, 82, 82, 54, 66, 60, 77, 60, 63, 58, 53, 79, 52, 71, 87, 73, 50, 78, 68, 70, 44, 64, 64, 52, 68, 60, 80, 88, 81, 58, 74, 90, 59, 54, 53, 83, 46, 57, 64, 61, 79, 66, 54, 58, 71, 59, 75, 55, 64, 78, 56, 54, 64, 76, 49, 77, 65, 59, 58, 65, 59, 54, 60, 52, 63, 83, 70, 90, 69, 56, 72, 57, 81, 68, 59, 42, 53, 67, 59, 55, 60, 71, 60, 59, 75, 68, 70, 47, 60, 88, 57, 58, 88, 51, 61, 92, 68, 70, 65, 94, 66, 65, 73, 64, 106, 51, 63, 69, 69, 63, 58, 55, 64, 57, 68, 68, 64, 67, 80, 48, 64, 64, 74, 60, 62, 79, 62, 48, 53, 74, 60, 67, 61, 57, 87, 49, 83, 64, 84, 67, 42, 50, 47, 46, 58, 72, 69, 54, 68, 61, 48, 77, 46, 84, 72, 60, 63, 68, 71, 56, 41, 63, 44, 64, 73, 69, 95, 56, 63, 80, 81, 65, 70, 60, 72, 57, 137, 80, 79, 63, 53, 66, 71, 61, 52, 117, 76, 61, 78, 82, 67, 62, 44, 86, 64, 61, 53, 61, 90, 59, 50, 72, 69, 54, 64, 66, 69, 61, 65, 64, 47, 58, 65, 50, 65, 39, 86, 76, 77, 66, 60, 53, 85, 61, 81, 63, 48, 59, 68, 58, 68, 58, 43, 71, 57, 60, 95, 62, 56, 70, 65, 60, 52, 63, 61, 49, 43, 63, 57, 50, 104, 58, 76, 60, 89, 63, 59, 84, 62, 69, 50, 71, 60, 78, 61, 56, 61, 93, 59, 54, 44, 53, 76, 104, 61, 57, 60, 82, 58, 72, 54, 44, 60, 68, 52, 67, 61, 76, 53, 119, 77, 49, 77, 63, 74, 83, 74, 57, 75, 101, 63, 54, 62, 53, 71, 91, 73, 59, 80, 79, 63, 75, 64, 57, 72, 49, 73, 75, 60, 46, 74, 48, 88, 82, 44, 66, 62, 73, 62, 50, 111, 60, 73, 76, 55, 56, 73, 52, 57, 58, 73, 83, 78, 67, 78, 74, 60, 67, 55, 96, 83, 60, 58, 86, 97, 60, 77, 56, 69, 89, 57, 56, 63, 79, 65, 71, 92, 63, 85, 70, 52, 103, 70, 51, 78, 53, 73, 63, 50, 75, 79, 72, 68, 69, 69, 64, 52, 72, 77, 77, 58, 72, 44, 62, 88, 96, 67, 76, 83, 90, 24, 66, 59, 61, 53, 71, 62, 64, 61, 47, 72, 72, 64, 60, 60, 79, 61, 70, 47, 59, 61, 87, 64, 54, 59, 72, 70, 59, 63, 62, 63, 75, 62, 89, 82, 52, 90, 65, 55, 104, 81, 55, 59, 88, 61, 62, 72, 65, 65, 57, 60, 82, 52, 46, 55, 46, 55, 73, 50, 59, 99, 70, 71, 57, 89, 57, 77, 68, 63, 59, 86, 65, 59, 58, 73, 80, 74, 68, 76, 89, 52, 52, 64, 57, 65, 55, 79, 57, 70, 52, 58, 75, 47, 68, 52, 78, 102, 59, 75, 62, 29, 58, 67, 70, 65, 51, 79, 55, 65, 57, 65, 73, 67, 69, 65, 60, 69, 55, 82, 51, 72, 79, 80, 97, 55, 97, 67, 56, 61, 52, 61, 71, 60, 68, 58, 65, 52, 59, 65, 87, 63, 60, 41, 62, 53, 61, 61, 47, 73, 68, 54, 49, 39, 71, 83, 55, 77, 54, 51, 56, 55, 73, 59, 56, 108, 50, 50, 68, 62, 64, 69, 86, 69, 56, 57, 73, 86, 56, 55, 70, 72, 69, 83, 74, 72, 72, 49, 77, 66, 60, 53, 93, 72, 50, 77, 76, 52, 72, 54, 66, 54, 62, 47, 60, 71, 58, 51, 77, 48, 110, 54, 68, 61, 70, 63, 78, 62, 81, 60, 52, 72, 69, 72, 51, 49, 68, 59, 48, 63, 42, 48, 107, 69, 75, 59, 68, 72, 65, 69, 89, 57, 57, 110, 64, 64, 67, 69, 59, 50, 88, 62, 49, 62, 68, 71, 64, 50, 65, 62, 61, 73, 58, 108, 66, 59, 62, 100, 48, 90, 48, 88, 69, 63, 62, 74, 68, 79, 69, 82, 52, 63, 59, 70, 64, 56, 77, 81, 63, 65, 54, 80, 83, 84, 68, 57, 57, 39, 85, 72, 63, 45, 50, 62, 70, 57, 80, 55, 100, 46, 52, 54, 58, 49, 72, 106, 56, 89, 56, 73, 73, 79, 57, 51, 68, 64, 61, 60, 66, 67, 52, 92, 44, 72, 52, 39, 57, 52, 67, 73, 124, 57, 88, 63, 65, 58, 77, 75, 67, 74, 85, 62, 51, 51, 71, 141, 73, 64, 57, 53, 71, 79, 45, 64, 86, 73, 60, 54, 68, 54, 64, 74, 75, 56, 61, 63, 65, 45, 58, 71, 70, 47, 70, 65, 53, 65, 50, 72, 64, 67, 64, 67, 75, 78, 50, 73, 56, 57, 72, 72, 52, 58, 64, 55, 95, 64, 60, 55, 71, 69, 52, 56, 68, 64, 74, 55, 58, 52, 52, 77, 72, 69, 75, 58, 71, 59, 49, 58, 62, 48, 75, 59, 52, 80, 79, 44, 80, 76, 66, 71, 65, 69, 72, 53, 73, 50, 67, 55, 56, 53, 65, 64, 66, 59, 92, 89, 63, 55, 72, 71, 130, 60, 42, 78, 57, 57, 62, 75, 60, 99, 71, 69, 62, 68, 66, 62, 64, 53, 95, 59, 53, 56, 49, 55, 69, 90, 74, 48, 137, 68, 61, 59, 77, 46, 73, 74, 51, 47, 63, 67, 59, 59, 42, 36, 71, 62, 74, 93, 109, 109, 48, 70, 60, 71, 65, 63, 59, 48, 100, 54, 33, 59, 103, 46, 52, 52, 62, 45, 62, 62, 74, 59, 68, 65, 44, 33, 52, 72, 54, 70, 45, 53, 80, 71, 59, 82, 75, 76, 69, 71, 95, 67, 67, 56, 68, 78, 70, 82, 74, 73, 65, 71, 78, 66, 77, 64, 80, 59, 72, 61, 93, 60, 52, 56, 64, 69, 61, 58, 64, 81, 90, 60, 67, 56, 65, 80, 80, 59, 90, 63, 60, 61, 74, 83, 45, 57, 52, 85, 64, 55, 60, 66, 67, 60, 72, 55, 80, 53, 76, 90, 63, 88, 49, 70, 69, 67, 48, 64, 93, 58, 63, 83, 60, 49, 79, 63, 57, 77, 94, 62, 71, 65, 77, 67, 55, 58, 76, 59, 54, 79, 72, 68, 67, 71, 49, 112, 52, 61, 62, 58, 52, 83, 49, 54, 54, 62, 82, 72, 66, 59, 101, 55, 85, 64, 69, 69, 76, 67, 51, 49, 62, 53, 78, 61, 75, 90, 55, 67, 94, 82, 90, 62, 67, 57, 75, 43, 60, 59, 71, 57, 45, 76, 82, 64, 45, 48, 60, 91, 55, 72, 83, 108, 82, 73, 76, 48, 75, 59, 49, 71, 56, 63, 57, 50, 70, 108, 70, 89, 59, 50, 66, 62, 58, 46, 110, 48, 65, 64, 60, 122, 51, 75, 62, 48, 76, 69, 47, 96, 52, 57, 75, 63, 49, 78, 55, 62, 59, 42, 63, 71, 65, 64, 46, 47, 66, 56, 98, 47, 78, 60, 43, 44, 57, 71, 63, 55, 76, 73, 60, 66, 56, 79, 46, 70, 53, 76, 74, 55, 47, 87, 69, 60, 63, 47, 63, 89, 60, 53, 78, 80, 49, 73, 65, 100, 70, 60, 52, 50, 65, 50, 56, 69, 71, 62, 79, 65, 60, 60, 64, 44, 96, 64, 55, 50, 58, 43, 59, 82, 67, 74, 52, 79, 65, 80, 52, 65, 65, 72, 63, 51, 61, 69, 66, 41, 73, 60, 61, 59, 68, 59, 55, 69, 58, 59, 81, 65, 63, 59, 58, 75, 77, 77, 72, 69, 64, 52, 61, 79, 52, 41, 75, 75, 66, 52, 46, 88, 97, 62, 65, 68, 54, 86, 44, 58, 63, 73, 75, 62, 61, 54, 64, 50, 76, 49, 61, 58, 54, 66, 49, 68, 61, 65, 55, 49, 33, 58, 81, 51, 111, 42, 60, 72, 92, 51, 65, 45, 58, 54, 70, 52, 80, 58, 45, 75, 71, 73, 47, 44, 50, 80, 79, 82, 90, 70, 107, 70, 78, 61, 72, 59, 69, 76, 67, 54, 64, 51, 59, 75, 64, 62, 53, 63, 56, 45, 64, 62, 48, 83, 60, 68, 63, 77, 63, 94, 64, 129, 51, 78, 74, 61, 84, 71, 54, 56, 56, 68, 53, 52, 90, 74, 57, 70, 46, 77, 58, 100, 62, 62, 74, 66, 41, 69, 65, 51, 58, 80, 46, 57, 64, 51, 57, 66, 67, 35, 59, 71, 88, 53, 83, 56, 65, 49, 88, 62, 90, 69, 62, 42, 46, 65, 57, 64, 57, 85, 65, 103, 47, 71, 50, 63, 99, 71, 71, 73, 58, 58, 70, 46, 57, 68, 60, 70, 65, 67, 55, 66, 60, 65, 59, 48, 74, 50, 66, 126, 51, 72, 99, 46, 57, 73, 68, 73, 78, 60, 62, 72, 73, 53, 55, 58, 64, 38, 74, 88, 112, 62, 50, 56, 66, 65, 66, 61, 78, 61, 72, 56, 60, 58, 96, 91, 93, 76, 67, 70, 77, 72, 63, 64, 62, 61, 66, 74, 63, 63, 51, 80, 78, 56, 64, 56, 71, 59, 70, 51, 49, 69, 75, 47, 59, 51, 74, 67, 74, 56, 48, 86, 57, 60, 60, 85, 59, 43, 73, 62, 58, 59, 41, 86, 53, 67, 59, 87, 64, 69, 53, 69, 46, 45, 53, 65, 86, 90, 51, 66, 70, 65, 36, 58, 74, 66, 57, 59, 83, 57, 75, 60, 65, 44, 91, 73, 51, 46, 68, 79, 73, 61, 66, 55, 64, 74, 67, 69, 59, 61, 43, 56, 85, 67, 54, 60, 58, 68, 75, 53, 85, 81, 47, 58, 58, 52, 59, 75, 51, 82, 66, 52, 55, 61, 74, 63, 67, 69, 67, 65, 84, 42, 72, 64, 56, 56, 78, 53, 57, 75, 58, 47, 99, 74, 62, 78, 59, 47, 69, 58, 58, 66, 67, 75, 62, 66, 51, 61, 79, 63, 102, 51, 58, 61, 56, 57, 50, 70, 91, 87, 35, 61, 79, 64, 54, 82, 64, 64, 56, 65, 69, 63, 94, 62, 60, 60, 44, 78, 88, 63, 82, 54, 36, 51, 67, 78, 65, 122, 83, 58, 36, 67, 55, 54, 88, 59, 64, 56, 65, 74, 70, 52, 50, 87, 57, 70, 84, 82, 93, 75, 72, 64, 54, 60, 62, 57, 58, 72, 58, 66, 52, 59, 89, 55, 67, 51, 70, 91, 58, 77, 56, 74, 70, 71, 83, 57, 63, 60, 81, 58, 58, 51, 52, 76, 72, 61, 58, 66, 67, 59, 97, 51, 47, 80, 48, 102, 72, 78, 74, 96, 69, 63, 67, 68, 90, 60, 56, 58, 68, 68, 84, 55, 49, 78, 51, 48, 90, 69, 71, 68, 117, 51, 64, 70, 86, 63, 84, 59, 55, 49, 73, 52, 66, 76, 78, 86, 56, 75, 79, 66, 69, 58, 49, 73, 85, 58, 55, 55, 47, 106, 58, 63, 61, 59, 72, 62, 52, 44, 65, 57, 72, 58, 63, 68, 92, 54, 51, 51, 57, 71, 94, 68, 78, 65, 60, 62, 57, 51, 47, 58, 52, 47, 61, 75, 62, 76, 57, 63, 66, 65, 55, 57, 71, 47, 56, 50, 57, 55, 70, 50, 48, 64, 64, 71, 80, 54, 82, 65, 48, 73, 45, 50, 79, 66, 72, 57, 63, 60, 62, 88, 60, 55, 39, 68, 67, 63, 61, 61, 57, 69, 73, 68, 76, 57, 66, 51, 74, 56, 69, 54, 142, 94, 57, 56, 80, 74, 59, 69, 92, 62, 69, 71, 78, 72, 41, 67, 61, 55, 86, 50, 82, 84, 51, 76, 55, 55, 56, 49, 111, 69, 95, 62, 97, 84, 52, 55, 67, 56, 87, 62, 59, 59, 55, 50, 60, 60, 70, 80, 81, 105, 72, 95, 56, 54, 59, 62, 53, 62, 90, 76, 78, 65, 114, 58, 39, 62, 69, 64, 62, 65, 58, 60, 70, 48, 65, 58, 52, 73, 63, 63, 64, 58, 58, 47, 55, 65, 69, 67, 36, 64, 97, 88, 82, 36, 78, 60, 75, 46, 92, 49, 51, 54, 63, 53, 79, 66, 77, 69, 69, 79, 58, 111, 41, 69, 75, 73, 62, 70, 54, 66, 54, 60, 65, 64, 57, 81, 38, 81, 119, 63, 60, 57, 58, 75, 52, 57, 70, 77, 71, 50, 58, 62, 71, 58, 62, 68, 60, 68, 73, 59, 68, 88, 50, 55, 58, 65, 67, 51, 100, 60, 80, 66, 64, 58, 89, 55, 80, 117, 42, 58, 80, 80, 53, 66, 44, 67, 66, 83, 51, 83, 56, 92, 79, 59, 65, 48, 68, 60, 73, 62, 45, 63, 60, 65, 66, 76, 88, 51, 65, 65, 93, 57, 74, 92, 64, 58, 67, 58, 68, 66, 64, 88, 67, 64, 74, 61, 49, 63, 95, 74, 57, 58, 75, 70, 83, 74, 65, 62, 56, 61, 62, 58, 99, 47, 66, 53, 75, 83, 59, 68, 82, 80, 80, 65, 47, 62, 59, 77, 67, 51, 62, 73, 63, 85, 52, 67, 49, 87, 54, 47, 70, 68, 66, 72, 97, 55, 45, 59, 73, 126, 76, 90, 63, 66, 51, 79, 54, 71, 68, 55, 78, 61, 62, 68, 71, 62, 66, 57, 53, 80, 51, 55, 57, 50, 52, 67, 91, 48, 77, 67, 56, 53, 66, 81, 53, 50, 59, 54, 65, 49, 53, 91, 57, 80, 51, 73, 40, 63, 65, 59, 69, 57, 51, 53, 63, 56, 65, 68, 68, 92, 72, 76, 78, 56, 62, 51, 73, 53, 61, 62, 61, 82, 69, 60, 68, 50, 73, 56, 58, 70, 94, 62, 68, 52, 53, 58, 74, 50, 43, 62, 46, 70, 67, 72, 60, 65, 63, 67, 56, 51, 72, 79, 66, 64, 66, 56, 68, 48, 59, 71, 60, 103, 68, 74, 53, 68, 45, 72, 63, 79, 88, 65, 66, 78, 61, 74, 71, 69, 58, 78, 66, 81, 70, 47, 70, 68, 68, 69, 76, 70, 67, 78, 53, 70, 69, 79, 75, 56, 61, 69, 63, 56, 74, 57, 46, 55, 77, 75, 63, 62, 66, 54, 60, 52, 58, 54, 67, 56, 136, 68, 78, 63, 83, 56, 59, 60, 71, 71, 56, 56, 88, 56, 68, 67, 100, 79, 42, 67, 54, 69, 54, 94, 62, 69, 102, 72, 67, 62, 68, 71, 86, 56, 53, 75, 69, 57, 50, 49, 58, 52, 69, 58, 58, 65, 69, 72, 66, 98, 72, 64, 59, 63, 55, 44, 60, 112, 54, 67, 68, 63, 70, 62, 69, 71, 60, 70, 70, 76, 63, 90, 63, 56, 46, 69, 74, 59, 56, 61, 65, 77, 50, 66, 64, 63, 74, 82, 62, 50, 72, 56, 75, 67, 61, 54, 54, 70, 89, 63, 60, 65, 54, 57, 54, 53, 94, 71, 59, 55, 70, 50, 82, 61, 84, 69, 58, 81, 41, 70, 72, 80, 112, 59, 75, 69, 62, 75, 74, 62, 59, 67, 64, 76, 63, 57, 61, 85, 64, 49, 69, 51, 49, 73, 60, 59, 64, 68, 99, 45, 57, 61, 72, 68, 42, 53, 76, 58, 72, 67, 34, 65, 54, 56, 54, 50, 111, 58, 65, 58, 64, 51, 55, 86, 47, 86, 72, 52, 58, 57, 59, 79, 65, 67, 72, 59, 52, 90, 61, 62, 53, 66, 71, 43, 50, 53, 55, 77, 66, 48, 70, 58, 70, 53, 53, 64, 62, 67, 69, 44, 65, 60, 71, 56, 61, 60, 56, 46, 60, 65, 58, 57, 82, 48, 76, 60, 62, 51, 49, 69, 45, 64, 39, 63, 87, 70, 64, 61, 43, 67, 62, 53, 71, 72, 65, 80, 75, 68, 89, 63, 73, 64, 57, 63, 55, 57, 62, 50, 54, 57, 98, 50, 63, 31, 70, 55, 83, 80, 74, 75, 56, 81, 55, 64, 60, 63, 60, 53, 71, 90, 54, 77, 58, 59, 82, 57, 85, 64, 57, 53, 55, 50, 59, 75, 81, 54, 59, 57, 68, 56, 61, 75, 75, 72, 70, 58, 69, 71, 62, 65, 79, 60, 57, 61, 52, 63, 64, 52, 105, 59, 49, 56, 65, 53, 77, 67, 64, 63, 61, 57, 76, 53, 63, 61, 100, 75, 119, 67, 64, 54, 60, 63, 57, 57, 95, 109, 48, 63, 55, 61, 55, 69, 79, 61, 61, 60, 44, 63, 86, 82, 74, 72, 73, 61, 57, 86, 82, 61, 60, 72, 64, 53, 69, 99, 50, 68, 51, 79, 67, 59, 68, 68, 50, 72, 54, 76, 54, 71, 60, 107, 60, 65, 70, 81, 73, 72, 62, 71, 60, 66, 59, 62, 78, 58, 53, 75, 63, 58, 64, 71, 65, 58, 60, 66, 59, 77, 65, 70, 38, 72, 61, 73, 66, 76, 55, 52, 71, 50, 41, 67, 69, 74, 57, 72, 66, 47, 77, 53, 50, 51, 82, 80, 63, 69, 78, 68, 87, 64, 62, 79, 65, 54, 86, 62, 68, 52, 44, 76, 55, 60, 61, 67, 50, 59, 66, 45, 135, 52, 64, 63, 65, 52, 66, 58, 45, 57, 74, 51, 69, 65, 54, 109, 66, 75, 74, 69, 60, 68, 64, 62, 68, 67, 73, 78, 62, 105, 59, 61, 71, 66, 58, 61, 67, 66, 68, 66, 60, 78, 70, 69, 92, 51, 59, 54, 36, 67, 90, 68, 70, 53, 56, 73, 54, 53, 51, 48, 63, 50, 69, 54, 74, 53, 53, 68, 54, 61, 57, 54, 52, 73, 63, 63, 44, 63, 60, 50, 53, 58, 75, 56, 61, 59, 75, 60, 65, 109, 87, 61, 85, 66, 66, 63, 50, 73, 64, 85, 43, 70, 47, 56, 72, 62, 74, 83, 63, 59, 61, 77, 78, 65, 62, 57, 77, 54, 63, 80, 141, 59, 52, 50, 63, 63, 57, 73, 94, 57, 56, 99, 58, 64, 56, 66, 61, 53, 62, 47, 79, 86, 64, 63, 73, 53, 70, 50, 113, 53, 59, 58, 75, 61, 70, 61, 54, 72, 54, 63, 54, 69, 30, 44, 62, 79, 44, 73, 56, 89, 71, 97, 72, 70, 68, 64, 57, 60, 120, 51, 71, 64, 103, 58, 56, 72, 57, 57, 64, 65, 53, 78, 60, 91, 61, 71, 80, 63, 59, 80, 55, 56, 57, 64, 59, 72, 65, 65, 66, 57, 55, 75, 86, 65, 44, 49, 67, 69, 93, 77, 73, 66, 58, 65, 53, 70, 83, 63, 56, 68, 70, 91, 78, 55, 65, 69, 60, 74, 64, 63, 69, 79, 67, 76, 72, 52, 76, 91, 69, 79, 60, 66, 58, 82, 63, 72, 57, 61, 84, 63, 69, 42, 68, 74, 56, 77, 62, 68, 66, 88, 54, 57, 59, 48, 83, 89, 66, 46, 53, 63, 78, 76, 65, 76, 67, 56, 68, 82, 69, 42, 76, 61, 47, 51, 69, 57, 40, 52, 71, 57, 63, 73, 64, 63, 59, 66, 52, 79, 54, 65, 61, 70, 66, 86, 74, 61, 60, 65, 78, 41, 62, 54, 61, 82, 64, 63, 67, 74, 65, 83, 51, 71, 69, 55, 84, 61, 48, 55, 65, 53, 88, 50, 64, 58, 52, 80, 55, 71, 68, 63, 59, 66, 61, 59, 69, 66, 65, 71, 68, 68, 67, 83, 68, 45, 60, 61, 79, 54, 54, 52, 66, 62, 64, 68, 55, 62, 66, 112, 64, 50, 60, 57, 46, 74, 112, 57, 62, 65, 87, 127, 65, 86, 50, 61, 71, 49, 54, 57, 56, 60, 101, 57, 53, 61, 67, 83, 89, 63, 88, 52, 58, 67, 51, 65, 67, 57, 100, 93, 65, 59, 73, 64, 82, 63, 72, 59, 63, 61, 52, 85, 68, 66, 60, 54, 74, 55, 55, 59, 61, 79, 73, 62, 61, 78, 54, 39, 60, 44, 73, 60, 60, 71, 86, 72, 66, 58, 59, 59, 81, 79, 65, 89, 48, 80, 63, 71, 78, 50, 60, 96, 71, 62, 64, 69, 58, 53, 55, 64, 57, 60, 50, 61, 85, 71, 70, 82, 69, 58, 55, 82, 67, 67, 67, 47, 51, 54, 51, 54, 89, 59, 78, 69, 67, 60, 66, 67, 57, 49, 86, 59, 66, 115, 58, 70, 71, 63, 56, 70, 61, 60, 57, 71, 57, 62, 58, 56, 70, 55, 76, 59, 49, 79, 47, 58, 48, 51, 65, 63, 61, 70, 67, 66, 77, 53, 62, 81, 67, 54, 64, 85, 68, 48, 51, 85, 53, 76, 66, 71, 93, 93, 81, 70, 54, 43, 60, 82, 74, 75, 79, 71, 59, 57, 69, 61, 50, 59, 71, 51, 81, 59, 61, 44, 60, 62, 92, 68, 73, 58, 43, 65, 98, 63, 66, 63, 75, 61, 81, 55, 61, 65, 66, 77, 60, 61, 58, 75, 59, 61, 69, 40, 62, 65, 54, 63, 57, 59, 82, 92, 57, 61, 58, 53, 65, 65, 51, 52, 66, 100, 77, 84, 56, 62, 60, 65, 72, 63, 64, 67, 62, 55, 85, 90, 55, 56, 46, 134, 57, 77, 74, 72, 48, 64, 70, 68, 42, 57, 76, 47, 62, 63, 65, 53, 65, 60, 66, 71, 74, 70, 59, 64, 60, 73, 83, 26, 78, 91, 52, 58, 62, 50, 56, 59, 57, 70, 65, 49, 66, 81, 61, 69, 55, 52, 61, 78, 46, 72, 54, 52, 63, 60, 53, 54, 50, 76, 62, 56, 50, 58, 44, 59, 90, 63, 55, 58, 45, 60, 68, 65, 73, 60, 74, 75, 55, 78, 83, 66, 67, 89, 55, 57, 47, 56, 57, 68, 54, 62, 58, 56, 71, 79, 80, 69, 70, 42, 65, 66, 58, 65, 69, 58, 68, 74, 58, 67, 80, 43, 54, 65, 63, 66, 65, 58, 52, 69, 62, 50, 56, 56, 63, 59, 65, 68, 56, 62, 74, 94, 90, 74, 57, 40, 87, 84, 66, 55, 88, 57, 57, 71, 68, 59, 59, 56, 57, 52, 65, 61, 73, 62, 61, 67, 83, 69, 66, 54, 70, 57, 47, 59, 70, 59, 68, 70, 77, 64, 71, 62, 48, 65, 64, 82, 75, 59, 61, 73, 75, 55, 78, 78, 80, 71, 43, 45, 62, 70, 55, 83, 99, 68, 64, 57, 71, 86, 70, 58, 78, 71, 51, 80, 59, 67, 73, 73, 80, 45, 61, 46, 63, 57, 67, 78, 60, 67, 69, 55, 67, 61, 59, 69, 57, 52, 77, 52, 67, 82, 52, 69, 63, 70, 37, 52, 77, 64, 66, 59, 60, 51, 69, 53, 75, 56, 80, 68, 54, 69, 101, 58, 67, 70, 74, 69, 50, 74, 63, 75, 48, 68, 54, 64, 57, 57, 60, 61, 60, 70, 57, 53, 78, 67, 71, 57, 58, 58, 67, 75, 73, 56, 46, 74, 61, 57, 61, 62, 70, 64, 70, 48, 73, 56, 55, 45, 60, 61, 77, 55, 84, 71, 58, 133, 85, 66, 61, 68, 105, 86, 66, 89, 65, 66, 82, 59, 66, 66, 65, 61, 58, 68, 58, 50, 68, 60, 61, 77, 62, 61, 65, 65, 65, 78, 97, 52, 56, 59, 58, 67, 57, 70, 96, 72, 60, 65, 65, 59, 50, 76, 82, 60, 62, 74, 64, 68, 61, 85, 100, 66, 54, 51, 75, 68, 61, 60, 84, 54, 55, 62, 50, 65, 59, 70, 63, 70, 51, 49, 56, 81, 109, 56, 80, 64, 90, 41, 87, 72, 58, 64, 88, 53, 57, 93, 63, 80, 57, 69, 77, 81, 86, 60, 57, 46, 72, 59, 69, 62, 54, 99, 49, 73, 56, 57, 70, 69, 52, 68, 78, 52, 66, 58, 54, 71, 62, 50, 74, 72, 56, 67, 103, 65, 55, 46, 69, 63, 86, 104, 71, 61, 66, 62, 62, 70, 70, 71, 65, 57, 65, 54, 60, 75, 75, 67, 57, 68, 56, 60, 74, 74, 79, 76, 50, 59, 67, 64, 86, 77, 90, 77, 63, 52, 68, 66, 56, 54, 54, 81, 53, 86, 46, 64, 71, 69, 67, 60, 64, 63, 55, 64, 64, 50, 64, 60, 78, 64, 69, 62, 71, 63, 134, 90, 74, 81, 59, 59, 70, 54, 72, 76, 87, 76, 84, 56, 63, 49, 62, 105, 68, 77, 53, 76, 54, 74, 68, 75, 52, 81, 97, 48, 64, 77, 65, 71, 58, 62, 73, 68, 63, 80, 61, 90, 65, 77, 61, 55, 76, 48, 69, 67, 65, 62, 60, 53, 57, 72, 68, 44, 124, 60, 47, 53, 110, 72, 70, 68, 60, 56, 63, 72, 51, 73, 66, 63, 54, 66, 72, 62, 57, 48, 75, 61, 56, 67, 59, 79, 53, 52, 68, 62, 61, 52, 94, 68, 64, 59, 74, 69, 66, 76, 73, 60, 72, 63, 44, 63, 81, 57, 64, 69, 60, 57, 71, 42, 59, 61, 72, 46, 49, 63, 61, 56, 64, 76, 53, 74, 72, 95, 73, 78, 75, 61, 59, 91, 79, 79, 58, 71, 69, 70, 64, 58, 49, 57, 56, 87, 80, 55, 70, 113, 56, 67, 49, 62, 74, 84, 55, 65, 78, 58, 76, 87, 68, 85, 74, 61, 54, 61, 57, 59, 62, 66, 58, 69, 81, 64, 53, 62, 69, 57, 64, 58, 55, 50, 54, 70, 63, 50, 60, 59, 69, 74, 65, 63, 81, 67, 53, 103, 45, 56, 74, 71, 76, 79, 66, 70, 79, 59, 68, 69, 60, 80, 57, 59, 75, 55, 68, 55, 57, 58, 92, 65, 73, 71, 54, 59, 70, 53, 75, 58, 58, 77, 55, 81, 88, 82, 55, 74, 52, 88, 55, 68, 63, 42, 56, 61, 65, 64, 72, 65, 53, 56, 73, 96, 73, 68, 56, 91, 92, 68, 58, 56, 56, 74, 120, 73, 58, 85, 72, 123, 53, 72, 48, 60, 75, 63, 67, 62, 46, 64, 91, 61, 52, 84, 54, 68, 66, 65, 99, 48, 69, 48, 37, 51, 74, 83, 103, 106, 59, 79, 61, 42, 56, 63, 76, 63, 63, 56, 60, 59, 68, 75, 56, 61, 76, 70, 65, 63, 60, 100, 58, 53, 59, 66, 81, 46, 68, 56, 58, 69, 68, 80, 52, 86, 86, 67, 47, 65, 57, 50, 57, 89, 70, 59, 70, 67, 64, 65, 65, 89, 69, 65, 63, 60, 64, 56, 54, 65, 76, 58, 60, 69, 89, 62, 55, 92, 76, 103, 40, 65, 58, 84, 58, 93, 72, 61, 66, 41, 94, 73, 98, 58, 71, 66, 58, 59, 73, 73, 108, 79, 67, 99, 59, 66, 68, 56, 62, 54, 56, 63, 68, 66, 81, 63, 47, 70, 68, 66, 62, 48, 53, 64, 60, 69, 51, 80, 76, 75, 59, 105, 65, 70, 67, 51, 62, 93, 68, 58, 59, 61, 69, 38, 84, 71, 57, 52, 75, 61, 88, 88, 67, 62, 49, 60, 56, 98, 65, 70, 69, 50, 53, 99, 64, 67, 74, 57, 91, 51, 57, 73, 75, 54, 69, 57, 62, 73, 76, 46, 55, 60, 82, 64, 71, 69, 83, 65, 59, 61, 66, 67, 85, 61, 46, 64, 64, 60, 54, 53, 64, 78, 59, 41, 82, 45, 59, 67, 57, 63, 56, 66, 61, 64, 62, 72, 62, 50, 65, 51, 60, 66, 46, 58, 48, 69, 82, 59, 51, 80, 71, 68, 58, 64, 72, 36, 58, 115, 101, 58, 82, 55, 61, 62, 77, 63, 54, 68, 81, 53, 65, 84, 63, 51, 84, 56, 56, 77, 84, 44, 56, 50, 62, 62, 102, 55, 56, 85, 59, 68, 67, 64, 65, 52, 51, 59, 66, 45, 65, 75, 59, 82, 34, 63, 56, 77, 65, 66, 73, 69, 56, 98, 61, 62, 54, 75, 44, 61, 68, 69, 41, 59, 47, 66, 68, 71, 57, 65, 59, 53, 91, 56, 76, 60, 55, 93, 67, 83, 56, 60, 59, 65, 46, 46, 65, 67, 52, 66, 68, 58, 63, 77, 67, 63, 69, 66, 60, 66, 104, 48, 52, 58, 65, 69, 70, 64, 51, 58, 66, 70, 50, 50, 46, 64, 95, 60, 58, 77, 30, 71, 55, 71, 85, 52, 61, 57, 82, 74, 76, 72, 69, 91, 46, 73, 70, 65, 56, 84, 60, 68, 79, 59, 56, 83, 76, 49, 59, 67, 81, 80, 76, 54, 54, 76, 63, 54, 63, 50, 65, 52, 77, 61, 75, 68, 73, 68, 49, 69, 64, 65, 69, 81, 63, 63, 73, 64, 63, 60, 39, 45, 49, 67, 71, 87, 63, 62, 39, 77, 69, 51, 58, 47, 59, 78, 57, 63, 66, 75, 69, 78, 74, 77, 58, 63, 84, 82, 62, 63, 56, 64, 44, 47, 75, 64, 81, 71, 71, 59, 56, 50, 65, 55, 54, 42, 44, 57, 77, 52, 74, 64, 96, 44, 52, 74, 68, 56, 61, 47, 81, 65, 70, 63, 50, 49, 60, 79, 99, 56, 59, 59, 64, 81, 62, 103, 59, 104, 70, 63, 47, 50, 50, 63, 60, 67, 53, 61, 67, 53, 67, 68, 68, 50, 55, 53, 86, 63, 60, 67, 119, 58, 61, 70, 75, 51, 73, 69, 51, 98, 82, 51, 62, 51, 64, 64, 43, 58, 80, 55, 73, 90, 73, 65, 55, 72, 66, 67, 62, 66, 62, 104, 58, 68, 58, 58, 70, 68, 68, 68, 69, 77, 59, 51, 60, 56, 72, 50, 58, 57, 68, 81, 58, 57, 96, 46, 76, 41, 64, 54, 73, 83, 59, 64, 64, 63, 70, 74, 103, 58, 88, 63, 51, 69, 59, 77, 104, 40, 61, 66, 56, 65, 72, 69, 56, 58, 53, 50, 64, 69, 53, 83, 65, 80, 76, 51, 75, 77, 107, 59, 88, 85, 80, 51, 71, 65, 68, 55, 84, 66, 73, 98, 69, 91, 66, 70, 85, 62, 73, 61, 69, 65, 70, 62, 54, 78, 52, 94, 46, 57, 73, 61, 89, 56, 69, 56, 49, 52, 77, 58, 62, 77, 65, 49, 60, 71, 80, 59, 103, 64, 77, 59, 69, 58, 80, 56, 52, 54, 67, 53, 70, 64, 57, 67, 59, 65, 56, 50, 78, 68, 64, 59, 34, 53, 48, 64, 56, 82, 86, 66, 62, 57, 65, 63, 79, 66, 70, 64, 51, 60, 62, 74, 94, 66, 77, 51, 51, 83, 50, 101, 61, 80, 84, 67, 68, 55, 61, 60, 72, 65, 68, 61, 77, 49, 52, 76, 48, 57, 123, 101, 55, 75, 72, 74, 66, 58, 67, 68, 63, 109, 49, 66, 59, 54, 62, 85, 82, 68, 67, 57, 60, 88, 62, 81, 67, 59, 120, 71, 77, 87, 52, 69, 62, 68, 68, 53, 58, 105, 69, 53, 73, 64, 64, 54, 59, 48, 82, 81, 58, 65, 57, 85, 65, 65, 103, 66, 129, 50, 45, 72, 105, 55, 43, 52, 77, 60, 63, 52, 63, 64, 60, 72, 102, 68, 98, 55, 57, 90, 59, 73, 51, 70, 58, 66, 71, 64, 58, 86, 61, 86, 83, 55, 55, 57, 73, 77, 64, 62, 44, 58, 54, 48, 54, 57, 63, 56, 56, 49, 54, 50, 71, 75, 65, 68, 57, 49, 41, 66, 58, 55, 71, 46, 84, 65, 79, 56, 65, 52, 42, 61, 64, 114, 74, 54, 57, 65, 77, 74, 54, 84, 74, 63, 69, 91, 40, 60, 52, 79, 53, 68, 56, 51, 94, 67, 55, 56, 69, 79, 67, 61, 60, 92, 67, 59, 49, 55, 68, 86, 84, 53, 78, 76, 59, 49, 49, 48, 65, 43, 66, 51, 64, 59, 57, 77, 60, 85, 46, 71, 69, 62, 57, 63, 74, 54, 56, 56, 59, 43, 87, 68, 66, 68, 60, 54, 56, 85, 69, 75, 52, 79, 49, 69, 69, 73, 57, 65, 58, 53, 99, 66, 47, 47, 71, 59, 59, 53, 62, 62, 51, 60, 49, 94, 45, 77, 54, 63, 76, 72, 72, 67, 98, 71, 68, 48, 79, 75, 55, 57, 53, 56, 73, 56, 80, 73, 64, 87, 55, 52, 63, 61, 60, 63, 105, 81, 51, 85, 61, 105, 70, 91, 59, 62, 65, 47, 63, 56, 55, 56, 62, 65, 68, 68, 56, 93, 77, 55, 56, 60, 55, 38, 60, 51, 54, 76, 89, 90, 62, 52, 65, 57, 58, 59, 81, 48, 65, 72, 64, 67, 60, 73, 59, 65, 50, 75, 52, 52, 81, 99, 97, 61, 105, 58, 79, 52, 50, 82, 90, 88, 87, 57, 88, 61, 52, 85, 55, 71, 62, 60, 81, 100, 65, 88, 59, 63, 69, 57, 52, 70, 75, 76, 49, 59, 105, 57, 63, 69, 68, 73, 77, 52, 65, 63, 39, 73, 67, 58, 74, 59, 70, 90, 58, 102, 52, 60, 71, 51, 59, 61, 67, 82, 61, 48, 59, 57, 72, 59, 76, 51, 67, 79, 49, 33, 54, 83, 55, 68, 80, 62, 62, 80, 77, 79, 55, 81, 82, 62, 74, 73, 75, 67, 40, 49, 51, 60, 82, 88, 65, 71, 74, 59, 95, 52, 61, 69, 56, 46, 54, 74, 55, 60, 64, 70, 54, 75, 67, 75, 86, 58, 76, 50, 60, 62, 83, 71, 68, 65, 60, 66, 60, 71, 95, 69, 103, 77, 87, 51, 65, 65, 92, 65, 56, 64, 71, 63, 59, 53, 57, 66, 57, 71, 71, 52, 64, 80, 58, 52, 47, 51, 58, 70, 44, 77, 86, 55, 56, 57, 71, 95, 55, 51, 86, 62, 77, 52, 62, 79, 75, 67, 56, 64, 55, 77, 41, 69, 55, 59, 52, 59, 48, 65, 70, 65, 73, 73, 54, 49, 55, 84, 61, 58, 66, 58, 66, 140, 68, 72, 85, 46, 80, 51, 56, 52, 40, 42, 94, 58, 65, 68, 54, 70, 109, 52, 84, 68, 65, 49, 77, 67, 56, 59, 107, 79, 73, 44, 59, 91, 59, 62, 64, 88, 66, 72, 58, 48, 50, 71, 94, 60, 57, 54, 48, 66, 69, 72, 62, 44, 77, 68, 52, 74, 60, 46, 73, 66, 68, 92, 55, 49, 59, 57, 32, 63, 66, 75, 51, 71, 83, 82, 88, 90, 63, 80, 84, 82, 60, 55, 51, 54, 60, 65, 48, 53, 69, 73, 78, 76, 72, 96, 65, 58, 63, 60, 51, 113, 68, 78, 57, 68, 57, 67, 60, 50, 55, 72, 61, 54, 62, 64, 55, 72, 68, 59, 56, 58, 66, 60, 68, 43, 94, 50, 61, 46, 56, 80, 84, 54, 67, 71, 106, 59, 70, 46, 62, 51, 78, 61, 69, 71, 81, 46, 54, 69, 39, 82, 48, 58, 51, 72, 58, 38, 62, 53, 86, 67, 70, 58, 64, 56, 59, 58, 50, 39, 54, 68, 76, 54, 71, 56, 69, 62, 74, 53, 56, 71, 69, 53, 60, 66, 55, 80, 60, 53, 70, 83, 74, 60, 81, 75, 50, 91, 74, 71, 66, 51, 72, 80, 72, 91, 50, 81, 67, 92, 56, 55, 97, 56, 55, 48, 72, 79, 67, 49, 65, 62, 75, 79, 60, 67, 95, 55, 55, 53, 60, 53, 90, 63, 88, 60, 54, 60, 40, 72, 57, 61, 70, 60, 64, 76, 86, 65, 58, 70, 97, 67, 48, 53, 51, 85, 67, 85, 84, 70, 51, 61, 61, 62, 60, 61, 52, 57, 53, 62, 46, 89, 71, 55, 58, 66, 62, 56, 56, 56, 62, 53, 49, 88, 69, 88, 72, 76, 71, 51, 67, 80, 61, 81, 59, 58, 65, 70, 111, 70, 80, 46, 76, 113, 61, 53, 55, 71, 62, 69, 61, 63, 57, 115, 40, 64, 60, 77, 59, 51, 76, 52, 70, 61, 70, 64, 83, 75, 74, 74, 61, 63, 77, 70, 78, 52, 65, 51, 80, 65, 56, 92, 84, 53, 66, 65, 71, 67, 66, 72, 77, 57, 79, 66, 65, 69, 72, 40, 102, 79, 55, 79, 66, 68, 61, 63, 61, 60, 72, 65, 54, 57, 48, 79, 65, 57, 66, 63, 47, 66, 141, 61, 49, 90, 79, 48, 63, 69, 50, 51, 54, 53, 64, 84, 70, 90, 67, 79, 59, 51, 51, 59, 68, 63, 65, 57, 46, 62, 65, 87, 59, 44, 47, 54, 86, 59, 50, 58, 55, 53, 48, 72, 52, 62, 66, 66, 55, 75, 61, 72, 84, 73, 64, 54, 52, 43, 83, 76, 50, 66, 73, 44, 54, 54, 87, 106, 68, 84, 68, 74, 65, 59, 55, 43, 55, 71, 56, 59, 50, 52, 54, 47, 74, 55, 65, 73, 76, 54, 46, 50, 58, 57, 63, 55, 84, 55, 46, 75, 59, 76, 93, 57, 54, 74, 71, 63, 78, 52, 56, 58, 62, 50, 59, 60, 43, 70, 62, 62, 52, 47, 80, 51, 76, 42, 64, 60, 53, 61, 98, 58, 63, 69, 51, 68, 105, 60, 73, 65, 45, 63, 44, 79, 41, 57, 68, 57, 71, 78, 125, 86, 82, 77, 63, 57, 58, 70, 48, 59, 64, 131, 70, 80, 66, 76, 52, 45, 74, 75, 74, 54, 61, 75, 55, 52, 48, 67, 69, 69, 129, 49, 63, 63, 101, 53, 97, 62, 67, 70, 83, 51, 90, 72, 62, 78, 50, 73, 90, 73, 64, 81, 54, 71, 83, 63, 58, 73, 69, 74, 45, 42, 56, 82, 97, 50, 63, 72, 58, 71, 58, 64, 59, 54, 63, 59, 57, 61, 73, 70, 71, 51, 37, 58, 68, 47, 62, 73, 65, 68, 79, 57, 59, 80, 89, 44, 81, 45, 74, 67, 56, 71, 53, 55, 62, 91, 65, 63, 56, 58, 79, 43, 66, 59, 52, 54, 55, 79, 72, 73, 105, 63, 69, 90, 73, 44, 73, 61, 46, 79, 66, 51, 81, 67, 50, 90, 74, 59, 67, 84, 72, 79, 100, 84, 71, 72, 54, 73, 63, 96, 58, 70, 58, 62, 64, 74, 54, 70, 68, 62, 60, 64, 59, 65, 64, 76, 70, 58, 69, 43, 66, 59, 66, 65, 56, 58, 50, 41, 32, 69, 68, 87, 57, 73, 59, 74, 59, 93, 50, 60, 82, 56, 110, 61, 82, 66, 68, 56, 64, 75, 80, 55, 63, 60, 75, 90, 47, 76, 66, 75, 65, 77, 72, 69, 59, 52, 70, 59, 109, 86, 95, 70, 62, 70, 47, 108, 77, 75, 59, 82, 64, 44, 66, 48, 73, 60, 109, 67, 60, 52, 49, 115, 70, 70, 78, 38, 72, 67, 58, 58, 63, 68, 79, 56, 42, 64, 43, 71, 80, 66, 81, 66, 67, 74, 64, 63, 73, 60, 108, 103, 65, 78, 51, 56, 46, 46, 65, 68, 66, 75, 73, 58, 59, 86, 39, 56, 76, 74, 55, 80, 63, 109, 59, 58, 81, 72, 70, 50, 54, 72, 60, 59, 62, 64, 61, 71, 57, 58, 59, 82, 57, 70, 43, 76, 48, 67, 65, 60, 53, 38, 40, 69, 82, 83, 57, 65, 59, 38, 75, 85, 79, 75, 61, 63, 63, 70, 74, 51, 63, 52, 63, 68, 65, 56, 52, 80, 55, 59, 65, 64, 67, 66, 65, 70, 74, 59, 65, 61, 51, 89, 74, 41, 63, 60, 63, 46, 51, 71, 81, 86, 60, 66, 40, 87, 47, 46, 53, 82, 74, 59, 64, 71, 65, 61, 59, 62, 87, 64, 60, 61, 56, 56, 64, 52, 55, 54, 58, 78, 72, 59, 74, 69, 52, 85, 63, 60, 76, 53, 76, 71, 99, 67, 75, 58, 59, 62, 55, 69, 71, 61, 89, 68, 44, 71, 56, 53, 106, 95, 80, 70, 70, 73, 62, 49, 53, 52, 59, 54, 69, 59, 59, 42, 89, 64, 83, 62, 93, 63, 53, 56, 45, 65, 59, 61, 66, 58, 69, 59, 75, 48, 75, 73, 72, 55, 85, 56, 78, 53, 67, 52, 68, 63, 73, 52, 63, 76, 56, 62, 69, 58, 49, 97, 58, 64, 70, 68, 58, 54, 70, 40, 62, 78, 61, 52, 67, 57, 79, 93, 59, 59, 71, 71, 54, 72, 82, 71, 52, 56, 70, 74, 62, 60, 75, 69, 101, 65, 74, 76, 84, 48, 51, 87, 58, 64, 69, 54, 73, 80, 57, 64, 43, 65, 39, 82, 89, 57, 72, 54, 63, 98, 77, 60, 66, 73, 63, 68, 68, 61, 46, 78, 67, 75, 77, 67, 70, 64, 72, 59, 78, 54, 36, 56, 58, 70, 66, 63, 59, 56, 64, 64, 105, 85, 82, 81, 56, 93, 65, 71, 67, 46, 76, 31, 46, 47, 74, 69, 82, 78, 112, 50, 51, 64, 69, 72, 68, 69, 73, 97, 80, 67, 74, 68, 52, 78, 50, 65, 54, 36, 59, 62, 56, 76, 68, 56, 59, 62, 77, 61, 63, 47, 69, 57, 61, 55, 65, 52, 51, 84, 98, 41, 77, 50, 89, 78, 81, 55, 70, 71, 77, 65, 53, 44, 59, 85, 61, 69, 56, 38, 56, 90, 58, 65, 94, 66, 68, 56, 86, 61, 56, 74, 81, 73, 71, 55, 41, 66, 67, 55, 63, 77, 79, 95, 46, 70, 77, 60, 72, 60, 67, 64, 58, 68, 51, 73, 57, 62, 80, 69, 57, 42, 54, 66, 48, 64, 62, 70, 74, 54, 78, 89, 67, 102, 59, 76, 53, 68, 68, 60, 77, 63, 67, 82, 84, 65, 79, 65, 88, 56, 86, 78, 61, 65, 72, 53, 78, 60, 50, 62, 96, 70, 67, 55, 43, 69, 75, 64, 61, 39, 60, 67, 66, 69, 58, 60, 58, 57, 83, 62, 55, 63, 62, 65, 65, 74, 79, 94, 72, 50, 75, 72, 57, 55, 80, 68, 43, 51, 61, 65, 71, 68, 56, 73, 60, 75, 65, 49, 50, 53, 67, 63, 75, 68, 61, 55, 68, 81, 61, 43, 52, 46, 55, 67, 42, 59, 55, 68, 49, 94, 69, 96, 69, 66, 48, 64, 65, 75, 82, 71, 64, 66, 64, 59, 42, 74, 50, 55, 63, 60, 77, 79, 55, 62, 82, 46, 43, 69, 66, 67, 70, 54, 88, 53, 69, 72, 75, 64, 45, 62, 79, 62, 70, 72, 72, 67, 71, 74, 74, 62, 73, 90, 61, 95, 62, 76, 75, 62, 66, 94, 68, 50, 52, 65, 58, 50, 71, 56, 50, 45, 49, 72, 56, 68, 79, 65, 63, 59, 114, 58, 77, 93, 70, 61, 64, 60, 63, 57, 97, 70, 74, 65, 75, 102, 72, 80, 69, 58, 81, 47, 69, 60, 78, 56, 43, 42, 59, 101, 58, 54, 66, 54, 58, 59, 64, 64, 62, 57, 79, 74, 73, 82, 71, 76, 37, 76, 76, 68, 70, 54, 53, 62, 60, 63, 68, 65, 79, 59, 64, 56, 56, 54, 66, 65, 66, 60, 59, 68, 52, 53, 64, 61, 63, 48, 75, 68, 69, 82, 78, 65, 67, 57, 65, 60, 46, 60, 67, 69, 52, 40, 53, 72, 60, 67, 59, 69, 55, 74, 65, 64, 58, 56, 66, 77, 59, 62, 50, 58, 63, 56, 63, 69, 70, 62, 73, 47, 52, 72, 114, 75, 75, 87, 61, 66, 62, 66, 56, 51, 58, 32, 64, 60, 68, 57, 56, 72, 74, 53, 61, 54, 53, 94, 68, 53, 78, 75, 110, 76, 80, 96, 69, 51, 68, 55, 92, 59, 82, 76, 74, 53, 64, 69, 55, 51, 85, 62, 73, 60, 58, 81, 56, 55, 65, 52, 66, 84, 118, 72, 50, 76, 98, 63, 60, 66, 61, 93, 69, 59, 76, 54, 80, 57, 41, 56, 67, 53, 67, 89, 48, 53, 68, 41, 53, 87, 91, 55, 75, 42, 56, 63, 60, 57, 54, 53, 58, 77, 55, 80, 68, 54, 45, 60, 67, 61, 73, 66, 56, 57, 68, 105, 58, 89, 60, 79, 48, 76, 78, 47, 77, 57, 67, 81, 60, 87, 87, 63, 67, 78, 67, 56, 72, 100, 73, 67, 67, 53, 68, 76, 62, 60, 59, 46, 85, 68, 50, 80, 63, 67, 62, 80, 68, 61, 73, 69, 64, 68, 118, 63, 56, 56, 63, 84, 63, 69, 53, 82, 63, 59, 62, 54, 66, 60, 76, 60, 76, 62, 77, 60, 79, 70, 47, 65, 52, 99, 59, 94, 47, 48, 69, 73, 69, 53, 78, 61, 58, 33, 42, 50, 50, 50, 59, 65, 46, 58, 68, 68, 52, 79, 75, 50, 53, 46, 61, 83, 68, 51, 81, 56, 108, 105, 64, 51, 69, 79, 42, 88, 85, 76, 68, 71, 62, 66, 63, 52, 58, 98, 54, 64, 89, 85, 67, 68, 71, 45, 57, 76, 58, 64, 74, 69, 71, 70, 70, 94, 63, 89, 54, 71, 68, 49, 75, 60, 98, 61, 52, 67, 56, 135, 57, 88, 61, 50, 79, 76, 99, 80, 60, 56, 71, 65, 64, 63, 34, 65, 78, 59, 71, 47, 76, 55, 59, 68, 62, 57, 107, 52, 65, 64, 47, 59, 57, 55, 50, 61, 57, 66, 60, 70, 70, 56, 59, 55, 60, 90, 43, 64, 68, 106, 57, 51, 68, 61, 65, 66, 64, 52, 59, 64, 44, 74, 59, 38, 59, 67, 62, 68, 85, 50, 48, 79, 63, 66, 73, 44, 37, 49, 60, 71, 82, 65, 69, 70, 66, 55, 53, 47, 50, 69, 71, 50, 77, 44, 72, 63, 56, 55, 72, 64, 58, 51, 58, 84, 49, 59, 60, 68, 75, 48, 93, 56, 79, 69, 47, 61, 74, 59, 74, 84, 64, 63, 65, 67, 61, 37, 59, 68, 66, 75, 55, 55, 58, 58, 46, 50, 82, 46, 57, 78, 44, 59, 67, 71, 58, 59, 63, 73, 76, 63, 39, 75, 60, 82, 49, 63, 58, 53, 74, 77, 71, 60, 54, 62, 63, 62, 70, 51, 76, 59, 55, 51, 68, 49, 64, 34, 75, 58, 75, 80, 70, 83, 59, 61, 38, 78, 85, 64, 76, 39, 60, 49, 77, 60, 43, 68, 79, 65, 59, 57, 81, 61, 80, 57, 88, 61, 71, 42, 74, 53, 67, 66, 64, 62, 77, 67, 83, 32, 53, 94, 55, 61, 78, 64, 61, 68, 61, 56, 47, 60, 82, 45, 60, 48, 53, 70, 77, 60, 81, 90, 48, 96, 69, 74, 56, 70, 55, 54, 48, 52, 90, 62, 68, 59, 58, 113, 74, 66, 55, 59, 64, 69, 69, 75, 72, 64, 62, 74, 100, 97, 71, 57, 86, 67, 54, 75, 86, 81, 60, 64, 58, 55, 72, 86, 60, 73, 66, 68, 49, 66, 77, 60, 56, 68, 73, 57, 74, 60, 60, 55, 82, 61, 66, 65, 64, 65, 38, 90, 56, 70, 63, 61, 82, 66, 91, 64, 54, 62, 64, 43, 96, 38, 67, 82, 63, 60, 59, 90, 72, 47, 49, 89, 64, 70, 123, 88, 63, 71, 59, 61, 60, 59, 64, 64, 59, 72, 74, 48, 88, 84, 79, 37, 47, 75, 61, 68, 67, 71, 49, 64, 45, 62, 71, 57, 72, 97, 56, 58, 45, 68, 73, 53, 70, 60, 71, 58, 66, 63, 48, 38, 72, 41, 72, 36, 61, 48, 94, 57, 62, 57, 74, 66, 69, 63, 67, 60, 75, 93, 52, 59, 53, 60, 72, 34, 55, 67, 75, 62, 71, 55, 77, 66, 62, 61, 75, 60, 76, 61, 54, 47, 74, 62, 75, 70, 78, 70, 70, 54, 82, 65, 44, 56, 85, 53, 59, 57, 61, 47, 55, 73, 57, 67, 52, 59, 58, 68, 66, 71, 51, 58, 91, 88, 76, 62, 73, 97, 81, 74, 65, 80, 62, 59, 64, 60, 86, 60, 81, 66, 91, 79, 82, 87, 58, 87, 98, 59, 45, 77, 73, 66, 64, 53, 59, 62, 76, 61, 70, 55, 85, 63, 63, 62, 48, 77, 68, 51, 53, 66, 61, 70, 63, 63, 61, 83, 77, 68, 58, 54, 53, 52, 64, 49, 74, 76, 76, 56, 57, 61, 63, 72, 56, 53, 45, 60, 62, 66, 51, 72, 56, 43, 86, 65, 84, 82, 75, 67, 34, 77, 47, 84, 78, 71, 90, 65, 58, 56, 64, 63, 57, 66, 63, 56, 96, 61, 61, 69, 66, 110, 48, 80, 68, 58, 67, 61, 49, 42, 56, 57, 94, 75, 65, 51, 70, 48, 61, 65, 70, 91, 55, 51, 56, 79, 61, 59, 50, 60, 46, 75, 88, 53, 61, 93, 87, 64, 62, 62, 57, 67, 58, 71, 61, 70, 65, 64, 66, 59, 53, 99, 63, 86, 49, 114, 98, 70, 69, 64, 69, 59, 64, 56, 54, 72, 61, 62, 88, 40, 70, 93, 49, 66, 70, 52, 48, 103, 119, 73, 64, 84, 76, 74, 64, 62, 74, 61, 86, 41, 53, 66, 50, 83, 66, 79, 71, 93, 45, 60, 64, 51, 55, 65, 45, 78, 61, 107, 60, 88, 79, 61, 96, 53, 44, 55, 46, 71, 101, 67, 50, 66, 54, 74, 87, 88, 50, 58, 82, 84, 80, 57, 60, 61, 62, 51, 59, 92, 66, 62, 54, 51, 75, 75, 50, 53, 43, 53, 53, 57, 88, 74, 68, 43, 50, 42, 60, 72, 91, 61, 65, 54, 59, 51, 108, 87, 69, 102, 51, 65, 60, 69, 118, 83, 64, 48, 47, 100, 81, 57, 58, 88, 91, 60, 61, 60, 64, 59, 67, 79, 64, 69, 51, 54, 53, 60, 75, 46, 143, 87, 61, 87, 56, 42, 63, 80, 44, 53, 55, 68, 73, 61, 69, 57, 62, 92, 67, 59, 45, 75, 56, 78, 54, 63, 60, 79, 122, 62, 62, 60, 65, 84, 44, 65, 83, 66, 47, 109, 85, 46, 79, 49, 71, 66, 82, 65, 49, 73, 75, 45, 66, 62, 67, 58, 66, 61, 106, 57, 50, 62, 78, 51, 55, 72, 57, 60, 55, 60, 57, 81, 80, 83, 61, 68, 67, 50, 60, 52, 57, 47, 85, 60, 71, 74, 81, 69, 69, 84, 100, 58, 52, 56, 67, 59, 59, 59, 76, 65, 51, 72, 59, 51, 53, 65, 63, 59, 54, 54, 111, 109, 59, 46, 78, 52, 55, 65, 74, 50, 66, 55, 54, 67, 53, 62, 69, 64, 62, 53, 56, 64, 55, 87, 59, 60, 62, 76, 93, 59, 70, 57, 63, 65, 54, 87, 57, 57, 48, 56, 64, 69, 58, 61, 64, 52, 56, 63, 47, 61, 63, 60, 57, 59, 57, 56, 73, 77, 48, 52, 63, 55, 55, 107, 88, 69, 67, 55, 76, 70, 82, 66, 54, 68, 65, 53, 66, 75, 50, 65, 47, 66, 72, 82, 86, 76, 86, 60, 67, 53, 65, 58, 70, 48, 60, 63, 59, 111, 62, 92, 60, 49, 69, 62, 73, 49, 57, 53, 70, 67, 108, 71, 82, 54, 64, 73, 62, 77, 93, 49, 53, 70, 74, 78, 114, 62, 64, 67, 71, 58, 96, 76, 78, 79, 57, 53, 54, 59, 55, 51, 44, 60, 80, 65, 69, 82, 72, 104, 48, 62, 98, 61, 65, 63, 131, 59, 58, 66, 64, 138, 49, 86, 70, 66, 75, 60, 57, 57, 63, 61, 53, 73, 64, 55, 63, 69, 72, 57, 97, 57, 79, 53, 60, 68, 72, 80, 109, 44, 49, 66, 54, 55, 71, 60, 58, 48, 62, 58, 58, 54, 76, 54, 68, 66, 65, 46, 60, 59, 69, 83, 64, 40, 84, 44, 67, 52, 47, 59, 63, 51, 57, 88, 61, 42, 61, 65, 57, 64, 60, 59, 67, 81, 82, 34, 60, 52, 63, 42, 65, 77, 50, 72, 75, 65, 83, 74, 52, 55, 56, 57, 59, 58, 65, 59, 59, 67, 60, 68, 67, 76, 63, 67, 60, 64, 70, 70, 62, 44, 94, 53, 104, 81, 69, 89, 49, 62, 76, 52, 50, 60, 71, 79, 67, 70, 88, 74, 72, 82, 51, 79, 76, 68, 55, 70, 68, 44, 60, 68, 69, 76, 61, 84, 73, 61, 45, 79, 67, 74, 62, 40, 70, 61, 45, 76, 61, 61, 54, 69, 68, 75, 56, 66, 91, 71, 63, 67, 62, 72, 113, 93, 62, 50, 60, 68, 65, 64, 56, 81, 92, 52, 55, 49, 50, 45, 45, 54, 58, 67, 59, 78, 72, 58, 75, 67, 98, 52, 50, 66, 47, 74, 55, 65, 93, 75, 69, 74, 60, 59, 66, 80, 65, 64, 55, 77, 56, 61, 56, 89, 41, 64, 81, 61, 64, 54, 72, 94, 67, 75, 58, 58, 78, 57, 68, 63, 64, 87, 62, 62, 65, 93, 67, 78, 56, 61, 76, 81, 51, 62, 67, 53, 80, 70, 62, 59, 51, 80, 63, 61, 70, 73, 56, 73, 75, 84, 57, 79, 75, 50, 59, 62, 66, 61, 58, 54, 74, 73, 72, 60, 60, 72, 68, 63, 71, 65, 54, 61, 63, 42, 71, 57, 47, 50, 59, 49, 58, 60, 65, 119, 54, 71, 68, 63, 61, 59, 58, 56, 65, 73, 37, 75, 51, 67, 56, 50, 60, 57, 71, 54, 47, 65, 75, 57, 69, 65, 69, 83, 64, 38, 74, 73, 71, 67, 65, 82, 85, 59, 66, 68, 69, 68, 75, 62, 66, 61, 39, 71, 71, 61, 84, 87, 86, 71, 53, 73, 59, 82, 72, 58, 55, 94, 67, 52, 56, 63, 57, 82, 72, 53, 67, 53, 50, 52, 68, 63, 72, 55, 47, 73, 63, 70, 60, 73, 62, 67, 72, 43, 88, 57, 56, 35, 58, 76, 75, 57, 66, 75, 63, 62, 54, 58, 60, 74, 75, 54, 65, 53, 82, 59, 41, 59, 61, 64, 63, 62, 67, 57, 53, 58, 66, 50, 67, 55, 65, 59, 88, 49, 66, 70, 72, 91, 67, 49, 59, 88, 70, 52, 76, 65, 53, 70, 60, 83, 82, 69, 56, 80, 73, 65, 56, 61, 52, 62, 66, 52, 67, 41, 56, 127, 51, 68, 58, 51, 52, 70, 54, 61, 62, 62, 59, 62, 73, 48, 66, 60, 73, 52, 73, 67, 57, 67, 75, 68, 66, 90, 79, 68, 81, 51, 66, 49, 81, 54, 87, 50, 61, 63, 70, 70, 51, 49, 57, 57, 60, 57, 74, 60, 65, 52, 79, 51, 87, 71, 80, 34, 77, 62, 63, 62, 61, 66, 64, 50, 52, 68, 72, 54, 93, 71, 74, 74, 51, 40, 45, 78, 71, 63, 66, 74, 76, 55, 51, 78, 70, 88, 45, 61, 67, 72, 52, 70, 56, 54, 104, 60, 84, 91, 85, 124, 54, 70, 43, 74, 98, 68, 78, 50, 70, 96, 74, 63, 72, 63, 55, 58, 55, 79, 60, 85, 70, 47, 62, 54, 91, 49, 70, 82, 61, 97, 62, 57, 53, 84, 79, 64, 77, 49, 63, 87, 76, 69, 65, 54, 57, 64, 100, 40, 52, 118, 53, 71, 61, 56, 125, 71, 80, 64, 54, 64, 81, 57, 85, 58, 56, 49, 64, 69, 59, 58, 75, 87, 51, 62, 55, 69, 84, 60, 91, 80, 92, 66, 48, 72, 65, 68, 73, 57, 68, 86, 46, 75, 71, 82, 59, 60, 54, 56, 49, 38, 40, 62, 57, 54, 54, 96, 53, 71, 52, 50, 65, 64, 47, 64, 47, 47, 94, 52, 53, 66, 73, 73, 109, 54, 70, 68, 80, 67, 75, 77, 70, 52, 66, 74, 67, 75, 66, 52, 66, 66, 63, 69, 66, 52, 59, 65, 52, 42, 52, 47, 52, 60, 71, 56, 58, 63, 97, 77, 63, 61, 76, 68, 56, 60, 50, 58, 76, 59, 60, 81, 57, 54, 65, 75, 50, 134, 61, 53, 62, 61, 85, 63, 73, 51, 76, 62, 65, 53, 81, 59, 80, 72, 67, 56, 97, 51, 139, 93, 69, 81, 67, 78, 79, 60, 82, 70, 63, 61, 56, 56, 64, 56, 53, 74, 71, 73, 53, 74, 65, 75, 47, 61, 71, 78, 104, 80, 62, 73, 79, 64, 53, 55, 56, 59, 50, 75, 56, 67, 67, 42, 74, 49, 58, 69, 70, 74, 59, 93, 50, 67, 54, 84, 67, 72, 112, 54, 83, 59, 114, 65, 93, 67, 51, 52, 60, 54, 63, 41, 71, 70, 106, 57, 75, 72, 63, 55, 53, 59, 57, 45, 52, 65, 55, 62, 67, 81, 68, 86, 85, 49, 60, 54, 55, 62, 62, 70, 57, 56, 52, 70, 78, 68, 73, 53, 62, 50, 58, 68, 59, 67, 65, 76, 52, 62, 66, 63, 61, 73, 73, 60, 57, 70, 42, 50, 75, 55, 68, 54, 70, 70, 55, 62, 52, 33, 56, 63, 65, 62, 70, 65, 68, 58, 62, 59, 62, 71, 67, 57, 60, 63, 64, 45, 55, 67, 76, 68, 62, 77, 65, 63, 57, 56, 73, 70, 48, 52, 53, 94, 64, 69, 84, 64, 64, 60, 71, 66, 56, 79, 65, 54, 75, 68, 66, 62, 56, 53, 68, 94, 73, 53, 75, 56, 53, 59, 91, 66, 42, 81, 60, 46, 77, 63, 62, 65, 69, 64, 77, 52, 55, 65, 59, 90, 60, 66, 70, 53, 82, 43, 100, 86, 74, 70, 62, 55, 55, 67, 73, 104, 55, 56, 84, 58, 59, 60, 52, 82, 85, 52, 64, 69, 62, 55, 117, 69, 59, 68, 69, 82, 72, 91, 47, 79, 74, 63, 61, 83, 64, 59, 67, 57, 97, 51, 73, 77, 61, 141, 52, 76, 86, 69, 68, 61, 75, 74, 68, 65, 80, 74, 58, 58, 49, 56, 51, 54, 73, 58, 58, 59, 58, 66, 60, 56, 69, 87, 83, 62, 71, 61, 82, 58, 58, 51, 56, 54, 54, 69, 52, 72, 61, 44, 64, 54, 68, 76, 76, 80, 58, 61, 71, 73, 66, 45, 71, 68, 54, 75, 69, 72, 64, 61, 51, 47, 50, 73, 62, 44, 73, 55, 73, 58, 61, 54, 53, 56, 62, 48, 80, 48, 61, 72, 50, 69, 64, 80, 68, 60, 47, 66, 139, 56, 63, 60, 66, 81, 58, 80, 55, 57, 55, 54, 69, 70, 80, 64, 79, 59, 108, 63, 100, 57, 69, 48, 85, 45, 82, 62, 63, 59, 62, 58, 88, 66, 56, 60, 87, 62, 57, 48, 58, 69, 62, 93, 51, 53, 52, 64, 58, 81, 54, 66, 57, 67, 63, 56, 55, 63, 60, 77, 67, 49, 53, 75, 50, 67, 56, 63, 66, 67, 52, 67, 97, 47, 75, 52, 101, 67, 60, 60, 84, 51, 56, 86, 93, 60, 68, 49, 50, 43, 59, 55, 75, 82, 84, 60, 50, 80, 112, 62, 111, 62, 54, 70, 56, 66, 55, 55, 61, 80, 69, 62, 98, 66, 72, 64, 54, 72, 51, 56, 61, 54, 60, 79, 72, 57, 67, 47, 64, 49, 65, 62, 82, 74, 62, 61, 61, 67, 79, 77, 72, 57, 59, 72, 55, 73, 82, 101, 69, 80, 62, 75, 92, 76, 54, 64, 59, 59, 55, 67, 68, 75, 47, 110, 59, 70, 83, 57, 51, 105, 57, 71, 56, 62, 106, 72, 95, 73, 51, 63, 104, 72, 67, 50, 93, 53, 61, 62, 69, 77, 80, 89, 84, 58, 73, 40, 70, 51, 59, 66, 53, 61, 65, 61, 56, 63, 70, 56, 81, 59, 54, 60, 49, 81, 53, 78, 66, 61, 52, 62, 63, 67, 50, 57, 72, 73, 56, 62, 83, 55, 67, 73, 66, 61, 72, 57, 78, 73, 61, 41, 70, 64, 117, 84, 62, 68, 56, 54, 74, 70, 52, 41, 77, 50, 81, 78, 73, 81, 67, 76, 47, 54, 48, 54, 75, 58, 69, 59, 51, 68, 55, 51, 65, 48, 67, 55, 61, 75, 75, 61, 51, 75, 64, 65, 77, 68, 58, 81, 60, 54, 75, 50, 64, 57, 73, 60, 60, 60, 80, 64, 55, 57, 72, 50, 46, 70, 59, 68, 52, 72, 62, 62, 64, 83, 75, 59, 68, 58, 62, 65, 65, 72, 62, 61, 76, 51, 97, 86, 37, 71, 59, 65, 56, 65, 54, 56, 48, 56, 46, 60, 67, 53, 68, 59, 77, 52, 57, 46, 82, 103, 52, 57, 56, 53, 46, 52, 62, 62, 94, 86, 78, 78, 44, 67, 53, 55, 63, 80, 75, 70, 75, 76, 64, 62, 72, 60, 89, 63, 52, 103, 91, 49, 70, 62, 56, 73, 64, 62, 98, 52, 72, 70, 60, 68, 62, 51, 78, 54, 61, 69, 53, 54, 68, 104, 108, 55, 63, 52, 73, 59, 65, 52, 73, 69, 68, 57, 105, 58, 55, 73, 64, 69, 62, 81, 46, 66, 74, 53, 55, 58, 57, 65, 56, 51, 65, 65, 54, 54, 52, 81, 53, 73, 60, 55, 101, 39, 46, 55, 75, 73, 57, 57, 74, 59, 74, 56, 51, 62, 49, 78, 88, 67, 59, 62, 68, 103, 65, 70, 68, 54, 55, 57, 68, 52, 54, 50, 63, 72, 54, 56, 54, 51, 66, 72, 56, 63, 63, 63, 65, 71, 70, 63, 56, 65, 60, 70, 64, 53, 57, 72, 64, 70, 70, 59, 74, 54, 63, 108, 74, 50, 67, 66, 61, 63, 85, 78, 53, 57, 58, 45, 94, 58, 57, 77, 58, 71, 78, 56, 53, 62, 62, 58, 61, 73, 59, 121, 62, 67, 72, 66, 61, 84, 124, 72, 53, 100, 50, 47, 61, 63, 56, 80, 85, 59, 90, 41, 96, 76, 51, 60, 79, 51, 69, 53, 51, 59, 48, 58, 69, 83, 50, 108, 54, 56, 60, 70, 98, 52, 37, 66, 54, 55, 83, 57, 62, 70, 60, 63, 53, 68, 97, 54, 79, 71, 70, 63, 58, 61, 71, 40, 58, 56, 64, 55, 57, 53, 80, 55, 75, 61, 64, 53, 63, 70, 75, 57, 76, 61, 54, 63, 68, 80, 64, 55, 68, 63, 46, 79, 87, 60, 94, 80, 71, 71, 55, 83, 57, 72, 37, 84, 60, 77, 67, 72, 60, 54, 77, 74, 60, 52, 58, 76, 61, 76, 61, 54, 89, 71, 46, 76, 70, 76, 114, 114, 62, 79, 61, 71, 58, 75, 68, 53, 68, 78, 44, 63, 59, 64, 67, 59, 86, 58, 70, 67, 71, 91, 64, 67, 54, 76, 124, 77, 55, 79, 56, 50, 61, 55, 79, 64, 66, 74, 86, 59, 82, 56, 85, 66, 50, 74, 47, 41, 60, 85, 58, 60, 73, 77, 100, 66, 114, 66, 77, 69, 105, 66, 64, 51, 63, 53, 62, 74, 67, 76, 72, 67, 35, 74, 63, 79, 60, 88, 66, 75, 57, 49, 63, 77, 66, 56, 62, 73, 105, 89, 59, 66, 75, 59, 76, 61, 78, 57, 58, 62, 73, 57, 74, 65, 73, 41, 69, 48, 76, 74, 64, 66, 68, 70, 57, 84, 69, 55, 88, 79, 59, 91, 76, 73, 120, 81, 76, 92, 50, 73, 68, 65, 63, 68, 81, 55, 77, 59, 55, 72, 53, 56, 73, 55, 88, 60, 64, 73, 57, 68, 54, 68, 64, 84, 57, 65, 79, 76, 82, 51, 55, 84, 82, 74, 77, 46, 70, 41, 96, 68, 77, 65, 84, 50, 53, 88, 62, 71, 49, 58, 50, 55, 64, 85, 62, 62, 55, 56, 68, 96, 58, 77, 57, 63, 53, 79, 40, 61, 66, 74, 55, 109, 53, 70, 84, 62, 48, 67, 54, 51, 72, 62, 66, 52, 89, 60, 70, 58, 89, 86, 95, 64, 60, 55, 82, 65, 81, 52, 50, 67, 75, 57, 63, 72, 46, 87, 49, 74, 60, 77, 66, 65, 70, 66, 86, 68, 66, 68, 55, 53, 63, 61, 56, 70, 68, 107, 53, 68, 75, 75, 62, 56, 121, 57, 61, 64, 52, 122, 64, 75, 55, 66, 82, 82, 69, 58, 73, 55, 55, 64, 72, 69, 76, 91, 77, 73, 72, 64, 94, 80, 58, 48, 60, 69, 121, 52, 66, 55, 53, 55, 58, 59, 59, 61, 69, 58, 49, 76, 74, 86, 50, 53, 61, 46, 56, 48, 74, 58, 76, 64, 71, 65, 69, 81, 48, 61, 63, 58, 54, 79, 50, 68, 59, 56, 69, 64, 65, 72, 48, 64, 52, 56, 52, 59, 56, 57, 55, 55, 62, 69, 81, 64, 47, 58, 72, 76, 56, 67, 61, 61, 55, 82, 61, 57, 51, 53, 52, 67, 64, 96, 65, 43, 66, 63, 90, 52, 60, 64, 54, 58, 63, 128, 32, 60, 104, 74, 93, 75, 57, 60, 63, 70, 54, 79, 62, 76, 50, 57, 49, 64, 59, 55, 53, 116, 76, 64, 64, 46, 54, 62, 67, 63, 62, 59, 71, 76, 67, 93, 65, 65, 53, 51, 66, 68, 68, 54, 79, 66, 73, 55, 72, 57, 73, 62, 66, 54, 81, 60, 65, 69, 30, 71, 61, 57, 83, 68, 57, 62, 59, 76, 69, 77, 45, 78, 62, 68, 65, 66, 43, 67, 58, 57, 71, 63, 75, 44, 74, 60, 89, 75, 45, 69, 42, 58, 71, 47, 63, 45, 56, 44, 72, 50, 61, 68, 74, 66, 57, 58, 64, 52, 90, 55, 59, 50, 67, 68, 43, 41, 49, 70, 75, 66, 76, 58, 65, 52, 66, 71, 60, 69, 60, 72, 73, 53, 48, 69, 71, 51, 82, 65, 57, 78, 82, 136, 67, 71, 83, 68, 104, 66, 65, 63, 68, 70, 80, 81, 79, 64, 73, 62, 74, 57, 80, 38, 81, 50, 60, 79, 55, 57, 98, 89, 74, 90, 55, 69, 73, 74, 45, 65, 57, 63, 91, 64, 86, 50, 71, 64, 70, 70, 66, 63, 51, 78, 64, 75, 71, 55, 51, 71, 58, 81, 61, 83, 77, 54, 47, 55, 72, 59, 79, 59, 63, 77, 65, 77, 51, 73, 77, 104, 63, 48, 79, 50, 50, 74, 48, 74, 63, 55, 69, 60, 62, 87, 82, 83, 55, 69, 50, 84, 65, 62, 78, 82, 81, 85, 61, 61, 68, 62, 90, 55, 63, 86, 71, 62, 51, 65, 52, 63, 50, 34, 62, 63, 76, 61, 57, 77, 53, 69, 84, 79, 70, 66, 77, 45, 59, 56, 55, 61, 71, 71, 56, 59, 67, 70, 70, 79, 61, 79, 70, 55, 70, 63, 60, 64, 64, 67, 100, 60, 56, 64, 68, 73, 53, 60, 69, 60, 86, 71, 88, 72, 76, 67, 57, 86, 81, 69, 40, 47, 60, 62, 55, 56, 59, 81, 52, 60, 63, 50, 71, 68, 46, 68, 61, 65, 60, 71, 55, 56, 60, 59, 50, 52, 79, 80, 54, 68, 47, 49, 66, 47, 50, 58, 62, 56, 69, 55, 55, 87, 69, 67, 49, 58, 90, 101, 79, 45, 77, 60, 61, 54, 50, 72, 65, 65, 72, 47, 53, 90, 72, 49, 64, 55, 81, 72, 79, 79, 64, 62, 75, 66, 59, 79, 72, 46, 59, 54, 53, 76, 72, 96, 62, 59, 65, 69, 85, 57, 63, 48, 64, 99, 65, 53, 76, 56, 88, 56, 68, 69, 44, 69, 79, 51, 63, 70, 56, 70, 65, 85, 61, 71, 99, 56, 60, 63, 56, 59, 61, 64, 65, 71, 64, 57, 59, 125, 82, 73, 73, 46, 53, 63, 55, 69, 66, 71, 51, 48, 55, 61, 78, 74, 63, 85, 62, 101, 69, 74, 119, 79, 77, 59, 61, 85, 77, 73, 75, 58, 72, 56, 81, 62, 68, 35, 59, 65, 90, 88, 62, 59, 61, 74, 75, 52, 53, 104, 59, 49, 75, 102, 79, 81, 49, 52, 53, 64, 55, 51, 81, 50, 88, 48, 62, 59, 63, 62, 83, 64, 62, 81, 64, 77, 67, 64, 67, 50, 79, 69, 57, 64, 65, 75, 81, 75, 58, 67, 67, 88, 46, 63, 75, 84, 56, 59, 74, 73, 59, 58, 49, 73, 51, 88, 65, 79, 75, 47, 62, 61, 62, 55, 50, 62, 54, 73, 70, 51, 79, 47, 92, 52, 69, 54, 76, 78, 72, 77, 51, 76, 57, 33, 57, 54, 66, 76, 73, 56, 97, 38, 67, 52, 74, 54, 71, 59, 47, 59, 55, 49, 52, 95, 75, 72, 118, 49, 83, 65, 51, 84, 62, 67, 33, 89, 60, 72, 58, 88, 57, 87, 54, 99, 68, 89, 60, 57, 46, 80, 63, 57, 59, 61, 65, 69, 62, 62, 80, 65, 70, 40, 53, 47, 43, 70, 93, 50, 80, 80, 77, 61, 42, 57, 77, 64, 57, 51, 60, 66, 68, 75, 73, 96, 58, 76, 77, 73, 73, 72, 83, 63, 53, 65, 66, 73, 65, 61, 59, 56, 67, 56, 54, 52, 55, 32, 79, 81, 65, 75, 70, 76, 75, 62, 70, 68, 52, 62, 63, 56, 39, 56, 72, 73, 116, 70, 63, 48, 106, 57, 79, 58, 50, 52, 79, 61, 60, 64, 63, 55, 65, 62, 60, 67, 63, 96, 79, 63, 77, 57, 62, 57, 80, 67, 73, 53, 90, 36, 91, 55, 51, 63, 62, 57, 95, 63, 66, 43, 54, 65, 95, 61, 76, 61, 61, 61, 84, 63, 59, 48, 80, 67, 69, 84, 40, 38, 67, 78, 54, 43, 65, 71, 99, 74, 91, 41, 55, 64, 65, 50, 68, 58, 62, 82, 70, 63, 53, 63, 70, 64, 39, 64, 61, 65, 63, 54, 89, 58, 69, 61, 64, 50, 57, 64, 63, 73, 51, 74, 82, 59, 86, 67, 76, 93, 64, 76, 52, 65, 72, 62, 79, 64, 76, 41, 79, 44, 70, 60, 63, 61, 51, 78, 58, 57, 48, 69, 55, 75, 56, 69, 60, 62, 66, 63, 82, 59, 44, 68, 61, 86, 64, 56, 77, 70, 79, 43, 58, 44, 77, 76, 61, 71, 68, 48, 78, 59, 96, 59, 93, 60, 46, 64, 69, 79, 66, 61, 54, 57, 75, 74, 49, 69, 67, 75, 61, 65, 86, 52, 57, 56, 61, 62, 80, 63, 62, 56, 63, 44, 69, 58, 50, 77, 67, 86, 50, 66, 64, 71, 63, 55, 143, 70, 68, 56, 59, 130, 71, 79, 60, 55, 68, 70, 65, 69, 45, 78, 61, 86, 72, 76, 58, 75, 53, 37, 59, 63, 87, 46, 57, 69, 51, 61, 91, 70, 62, 58, 63, 63, 48, 68, 76, 71, 50, 52, 80, 79, 52, 55, 57, 74, 81, 74, 57, 66, 59, 67, 66, 81, 65, 51, 58, 80, 56, 68, 61, 54, 57, 101, 61, 60, 64, 56, 60, 77, 72, 78, 73, 39, 86, 68, 70, 57, 73, 60, 68, 52, 85, 54, 54, 57, 49, 66, 54, 63, 90, 61, 72, 61, 70, 67, 67, 61, 59, 60, 73, 76, 56, 54, 57, 54, 69, 70, 70, 74, 58, 49, 62, 91, 82, 54, 63, 52, 71, 71, 76, 82, 61, 70, 70, 75, 48, 97, 56, 69, 65, 66, 75, 55, 65, 48, 64, 57, 58, 70, 76, 75, 70, 59, 73, 61, 68, 59, 60, 66, 66, 81, 92, 49, 71, 57, 54, 66, 72, 54, 80, 58, 56, 64, 69, 64, 72, 46, 55, 89, 107, 61, 75, 61, 51, 70, 52, 63, 47, 61, 57, 58, 59, 65, 48, 72, 85, 92, 83, 49, 78, 66, 52, 64, 66, 65, 77, 67, 75, 61, 67, 54, 81, 103, 77, 64, 104, 44, 48, 71, 59, 71, 60, 55, 68, 81, 56, 65, 54, 56, 73, 54, 64, 56, 79, 52, 74, 70, 49, 65, 75, 54, 58, 64, 56, 61, 54, 61, 61, 50, 61, 59, 64, 49, 53, 50, 85, 46, 54, 61, 45, 72, 74, 60, 47, 69, 66, 99, 47, 58, 59, 79, 90, 74, 62, 65, 58, 75, 93, 49, 71, 68, 48, 69, 92, 71, 47, 41, 105, 57, 49, 59, 51, 76, 69, 38, 42, 104, 56, 56, 58, 50, 61, 73, 60, 70, 75, 53, 70, 76, 69, 58, 74, 60, 53, 91, 63, 68, 50, 62, 75, 62, 49, 54, 74, 54, 67, 66, 35, 54, 61, 59, 80, 76, 77, 41, 55, 46, 54, 65, 73, 62, 46, 88, 66, 59, 92, 80, 58, 42, 58, 49, 68, 68, 66, 49, 65, 77, 51, 56, 59, 69, 113, 66, 74, 56, 74, 71, 68, 88, 46, 68, 67, 60, 63, 85, 55, 76, 62, 37, 69, 72, 56, 52, 46, 74, 72, 56, 67, 86, 67, 66, 52, 68, 75, 53, 50, 90, 93, 59, 67, 60, 53, 64, 52, 51, 63, 51, 64, 55, 69, 75, 57, 73, 77, 58, 74, 67, 63, 72, 59, 70, 90, 81, 50, 66, 75, 59, 52, 65, 60, 52, 78, 71, 76, 48, 97, 63, 43, 94, 54, 96, 71, 74, 71, 70, 55, 72, 58, 45, 54, 64, 86, 80, 62, 84, 56, 86, 66, 63, 42, 73, 66, 71, 57, 54, 59, 54, 35, 86, 56, 97, 62, 72, 42, 77, 63, 57, 80, 53, 63, 69, 58, 60, 79, 70, 73, 60, 57, 60, 79, 45, 96, 55, 59, 58, 35, 61, 56, 91, 60, 69, 66, 71, 68, 68, 51, 74, 62, 67, 80, 68, 85, 63, 62, 55, 94, 75, 59, 83, 73, 33, 52, 45, 51, 79, 41, 60, 50, 66, 53, 66, 61, 90, 48, 78, 63, 61, 67, 51, 74, 50, 63, 63, 68, 64, 59, 95, 66, 81, 70, 42, 54, 69, 74, 45, 63, 59, 58, 55, 51, 70, 67, 69, 133, 76, 67, 55, 65, 108, 77, 72, 46, 57, 60, 75, 64, 82, 65, 36, 43, 58, 65, 70, 73, 51, 51, 79, 47, 58, 73, 70, 105, 70, 53, 64, 59, 45, 102, 56, 84, 65, 91, 70, 90, 65, 80, 61, 61, 63, 81, 56, 77, 57, 70, 65, 101, 63, 62, 84, 70, 62, 60, 38, 80, 61, 74, 47, 65, 55, 54, 66, 73, 56, 59, 57, 60, 68, 59, 67, 58, 57, 45, 70, 57, 71, 91, 68, 73, 62, 63, 64, 59, 58, 62, 104, 82, 78, 67, 45, 97, 95, 60, 76, 68, 65, 86, 58, 78, 48, 52, 65, 88, 71, 46, 104, 76, 72, 53, 68, 71, 53, 57, 56, 68, 60, 67, 62, 71, 75, 60, 57, 69, 59, 62, 57, 60, 76, 51, 63, 68, 74, 60, 82, 70, 60, 62, 55, 56, 55, 99, 66, 66, 46, 57, 53, 54, 91, 62, 44, 56, 64, 83, 84, 64, 84, 74, 80, 66, 76, 71, 78, 43, 62, 70, 67, 53, 60, 86, 62, 67, 76, 56, 73, 58, 79, 56, 70, 59, 46, 55, 66, 81, 76, 57, 66, 70, 63, 71, 50, 71, 69, 74, 67, 75, 49, 116, 51, 71, 79, 57, 51, 68, 61, 63, 64, 53, 103, 104, 59, 74, 77, 71, 101, 47, 55, 82, 75, 58, 50, 59, 82, 75, 56, 92, 63, 63, 82, 69, 78, 55, 75, 74, 64, 71, 74, 56, 68, 72, 52, 53, 66, 92, 79, 43, 55, 45, 89, 75, 76, 64, 53, 60, 61, 114, 57, 114, 32, 83, 79, 60, 45, 58, 44, 61, 61, 49, 43, 50, 72, 56, 64, 53, 52, 69, 76, 71, 71, 49, 70, 58, 73, 87, 73, 57, 55, 88, 57, 61, 68, 69, 53, 70, 67, 72, 55, 73, 63, 46, 53, 82, 64, 78, 54, 75, 66, 76, 82, 56, 76, 56, 81, 78, 79, 54, 64, 59, 68, 55, 50, 49, 73, 49, 53, 87, 76, 68, 75, 48, 80, 55, 65, 62, 101, 61, 65, 51, 70, 54, 64, 69, 53, 59, 56, 81, 58, 72, 64, 71, 66, 58, 58, 67, 76, 73, 52, 59, 62, 62, 59, 76, 93, 52, 83, 63, 58, 49, 61, 58, 57, 53, 58, 71, 54, 55, 65, 60, 72, 55, 63, 41, 66, 54, 42, 62, 77, 63, 59, 59, 61, 76, 54, 61, 68, 71, 45, 77, 57, 69, 88, 69, 62, 65, 49, 66, 64, 50, 39, 88, 81, 76, 60, 84, 77, 68, 58, 69, 56, 101, 50, 81, 91, 44, 54, 62, 60, 59, 63, 76, 62, 48, 86, 74, 60, 73, 80, 90, 63, 56, 95, 61, 70, 39, 45, 47, 51, 61, 59, 68, 63, 54, 71, 72, 93, 46, 65, 60, 77, 63, 61, 160, 84, 87, 58, 52, 138, 56, 77, 67, 59, 70, 68, 56, 83, 60, 66, 47, 75, 78, 66, 65, 64, 37, 44, 58, 79, 85, 89, 52, 62, 44, 54, 104, 82, 62, 80, 64, 53, 48, 60, 81, 93, 49, 59, 66, 77, 64, 67, 60, 52, 62, 73, 53, 76, 54, 58, 74, 77, 50, 58, 64, 86, 38, 70, 61, 68, 72, 78, 64, 54, 61, 72, 59, 59, 63, 62, 57, 69, 56, 63, 56, 75, 52, 59, 64, 60, 81, 54, 49, 66, 68, 48, 67, 71, 61, 55, 51, 70, 49, 67, 78, 58, 70, 63, 58, 50, 61, 63, 48, 59, 69, 57, 88, 56, 84, 60, 71, 77, 59, 69, 76, 61, 71, 49, 74, 75, 60, 66, 68, 77, 62, 66, 81, 52, 72, 77, 48, 66, 50, 69, 68, 53, 64, 58, 76, 67, 67, 48, 73, 61, 56, 81, 52, 82, 77, 101, 50, 68, 71, 65, 75, 68, 66, 53, 77, 62, 59, 65, 55, 63, 56, 49, 72, 81, 110, 61, 67, 55, 76, 78, 58, 57, 59, 58, 64, 71, 80, 49, 43, 61, 86, 57, 66, 54, 67, 52, 62, 57, 52, 50, 92, 74, 53, 69, 71, 49, 81, 76, 58, 74, 83, 60, 42, 62, 64, 79, 72, 69, 44, 62, 55, 63, 61, 77, 97, 74, 56, 69, 67, 69, 66, 102, 70, 57, 52, 55, 49, 77, 73, 41, 66, 79, 71, 56, 67, 79, 66, 67, 46, 64, 88, 56, 72, 66, 76, 68, 66, 80, 77, 81, 91, 93, 73, 73, 79, 65, 63, 57, 61, 47, 61, 83, 71, 69, 53, 79, 68, 47, 67, 57, 88, 71, 65, 54, 52, 82, 70, 72, 56, 48, 41, 78, 62, 76, 64, 53, 99, 67, 69, 70, 63, 67, 59, 80, 68, 59, 91, 74, 64, 79, 52, 105, 56, 61, 50, 70, 79, 44, 81, 101, 55, 67, 89, 68, 72, 64, 84, 72, 74, 87, 65, 69, 67, 83, 60, 69, 59, 61, 79, 69, 123, 71, 43, 61, 59, 60, 49, 73, 67, 58, 78, 38, 68, 69, 56, 83, 95, 73, 83, 56, 52, 39, 71, 74, 53, 59, 63, 61, 49, 60, 53, 71, 71, 48, 49, 69, 74, 51, 69, 79, 67, 69, 54, 70, 77, 69, 61, 76, 57, 60, 62, 54, 49, 56, 84, 92, 81, 73, 53, 81, 63, 69, 94, 56, 66, 58, 89, 61, 72, 64, 58, 63, 57, 63, 58, 69, 79, 64, 52, 62, 64, 46, 48, 60, 87, 50, 69, 67, 81, 60, 69, 79, 79, 76, 75, 40, 45, 76, 37, 83, 66, 69, 54, 70, 52, 52, 56, 69, 53, 79, 81, 97, 52, 77, 54, 44, 73, 62, 72, 52, 59, 107, 66, 69, 53, 61, 65, 74, 56, 73, 65, 55, 70, 54, 55, 63, 61, 68, 72, 59, 48, 76, 39, 51, 56, 30, 80, 67, 60, 69, 41, 51, 78, 89, 42, 52, 62, 77, 80, 97, 53, 62, 60, 41, 65, 67, 66, 73, 67, 71, 93, 59, 62, 65, 72, 56, 67, 74, 72, 99, 74, 54, 62, 61, 67, 81, 60, 58, 56, 43, 63, 54, 51, 120, 89, 58, 70, 60, 71, 52, 50, 61, 51, 39, 68, 68, 48, 67, 60, 61, 75, 61, 73, 47, 51, 67, 67, 81, 89, 81, 76, 52, 66, 100, 87, 75, 52, 72, 47, 77, 72, 61, 66, 57, 67, 65, 71, 54, 75, 61, 48, 60, 56, 60, 66, 62, 75, 70, 67, 62, 63, 40, 52, 49, 77, 50, 68, 61, 88, 65, 73, 80, 72, 64, 73, 61, 56, 61, 68, 76, 68, 55, 57, 86, 69, 44, 60, 40, 117, 67, 59, 70, 66, 66, 99, 93, 98, 54, 56, 51, 68, 71, 46, 74, 71, 59, 63, 81, 59, 60, 109, 61, 67, 75, 55, 70, 69, 76, 63, 57, 58, 60, 79, 54, 67, 87, 65, 58, 68, 74, 75, 71, 60, 63, 68, 61, 86, 82, 60, 79, 62, 67, 88, 61, 57, 42, 78, 51, 68, 57, 88, 71, 94, 70, 73, 50, 76, 67, 48, 67, 81, 70, 73, 56, 69, 62, 52, 62, 60, 55, 52, 52, 52, 61, 91, 62, 63, 61, 57, 55, 72, 66, 72, 53, 68, 58, 48, 65, 57, 79, 63, 63, 70, 56, 56, 66, 93, 55, 49, 58, 74, 64, 62, 92, 80, 62, 58, 66, 65, 39, 80, 57, 84, 63, 50, 82, 69, 57, 54, 72, 61, 74, 54, 69, 71, 66, 74, 62, 57, 62, 114, 55, 58, 58, 59, 74, 51, 67, 67, 62, 57, 56, 79, 66, 56, 68, 75, 88, 71, 59, 73, 68, 50, 73, 60, 75, 75, 80, 98, 79, 64, 61, 84, 56, 61, 51, 48, 41, 102, 50, 77, 55, 51, 49, 60, 55, 66, 59, 75, 93, 62, 75, 57, 93, 55, 55, 66, 70, 125, 49, 85, 75, 116, 75, 58, 87, 58, 66, 64, 58, 67, 66, 58, 72, 59, 69, 60, 51, 81, 59, 57, 62, 66, 54, 85, 43, 62, 46, 56, 58, 62, 60, 73, 45, 77, 73, 44, 116, 75, 66, 76, 49, 106, 51, 83, 49, 66, 62, 53, 57, 108, 55, 52, 55, 61, 70, 63, 79, 76, 52, 69, 59, 49, 66, 61, 68, 121, 58, 81, 98, 68, 63, 66, 92, 62, 53, 76, 65, 69, 78, 65, 74, 51, 56, 64, 56, 81, 61, 68, 83, 71, 78, 75, 56, 73, 68, 62, 55, 72, 74, 61, 77, 70, 77, 79, 82, 56, 83, 80, 60, 74, 53, 63, 92, 69, 66, 62, 88, 81, 85, 67, 62, 51, 66, 60, 60, 49, 70, 56, 57, 63, 103, 48, 66, 44, 67, 47, 61, 68, 59, 64, 58, 61, 85, 56, 60, 40, 74, 61, 44, 71, 50, 71, 63, 77, 49, 75, 53, 88, 54, 81, 62, 102, 57, 54, 56, 81, 62, 56, 57, 71, 75, 78, 66, 58, 73, 53, 75, 61, 60, 63, 67, 75, 55, 61, 45, 67, 59, 67, 82, 66, 52, 61, 52, 53, 49, 68, 88, 46, 49, 57, 75, 76, 69, 124, 60, 56, 63, 59, 136, 56, 67, 59, 66, 84, 51, 64, 70, 50, 73, 52, 54, 72, 80, 84, 55, 76, 61, 94, 74, 95, 43, 64, 67, 61, 56, 115, 80, 66, 73, 68, 54, 66, 48, 53, 78, 74, 71, 53, 64, 76, 76, 58, 50, 75, 71, 58, 102, 57, 51, 67, 73, 58, 62, 69, 69, 73, 58, 78, 55, 44, 80, 61, 60, 61, 73, 71, 71, 63, 70, 55, 58, 64, 61, 60, 60, 84, 72, 51, 52, 57, 53, 51, 47, 51, 63, 56, 68, 58, 56, 60, 79, 81, 61, 62, 87, 57, 60, 52, 80, 56, 80, 43, 69, 55, 73, 105, 69, 69, 56, 60, 58, 53, 116, 89, 64, 87, 63, 82, 51, 73, 87, 51, 44, 55, 71, 59, 54, 52, 81, 70, 81, 54, 41, 68, 91, 49, 85, 61, 60, 55, 72, 49, 44, 88, 73, 60, 66, 50, 69, 69, 51, 86, 74, 46, 56, 44, 48, 54, 68, 54, 63, 83, 60, 80, 68, 49, 37, 97, 87, 73, 52, 45, 75, 58, 83, 67, 56, 71, 65, 64, 57, 53, 61, 75, 56, 66, 55, 72, 45, 64, 92, 64, 82, 71, 72, 74, 52, 64, 68, 69, 75, 65, 61, 64, 63, 64, 61, 60, 71, 67, 74, 55, 63, 69, 63, 47, 85, 60, 53, 42, 61, 81, 59, 44, 79, 61, 40, 51, 62, 59, 52, 73, 69, 51, 64, 53, 57, 51, 67, 69, 67, 47, 54, 87, 90, 58, 81, 66, 80, 46, 45, 67, 56, 61, 136, 64, 64, 92, 86, 72, 59, 64, 48, 57, 67, 60, 87, 79, 57, 62, 66, 81, 65, 82, 53, 82, 61, 47, 54, 63, 70, 72, 102, 55, 94, 60, 79, 72, 38, 44, 65, 64, 68, 64, 49, 72, 75, 82, 84, 68, 54, 55, 81, 52, 74, 67, 75, 46, 52, 80, 60, 55, 61, 88, 62, 82, 71, 58, 71, 76, 63, 89, 74, 45, 80, 55, 96, 68, 69, 78, 90, 62, 101, 82, 78, 53, 55, 60, 44, 81, 50, 59, 53, 66, 61, 105, 48, 58, 60, 76, 96, 78, 74, 59, 56, 85, 85, 51, 56, 63, 73, 93, 91, 50, 71, 52, 41, 68, 55, 72, 54, 60, 63, 65, 56, 70, 59, 52, 47, 81, 60, 87, 68, 76, 64, 88, 42, 53, 54, 65, 56, 69, 68, 62, 58, 87, 60, 62, 69, 68, 81, 53, 64, 67, 53, 41, 57, 54, 51, 98, 58, 56, 75, 71, 83, 51, 66, 84, 48, 95, 79, 73, 71, 67, 66, 67, 90, 64, 88, 51, 52, 64, 63, 67, 76, 93, 76, 51, 71, 71, 82, 46, 52, 58, 59, 64, 65, 70, 70, 66, 69, 52, 68, 56, 72, 73, 76, 59, 73, 60, 60, 46, 68, 74, 66, 62, 48, 70, 46, 46, 68, 82, 53, 90, 62, 56, 69, 77, 53, 57, 50, 54, 46, 72, 62, 76, 89, 107, 72, 57, 64, 48, 67, 46, 72, 79, 46, 66, 74, 59, 56, 57, 64, 71, 64, 75, 67, 54, 67, 58, 66, 68, 78, 63, 79, 61, 64, 80, 61, 47, 63, 75, 92, 61, 64, 54, 67, 76, 53, 63, 64, 69, 66, 54, 62, 76, 62, 68, 68, 58, 75, 59, 61, 85, 69, 88, 64, 72, 77, 62, 77, 90, 96, 90, 58, 46, 88, 64, 65, 64, 62, 80, 69, 48, 74, 60, 78, 65, 53, 59, 58, 67, 49, 62, 55, 84, 91, 58, 65, 106, 77, 64, 39, 51, 65, 83, 55, 89, 74, 93, 73, 76, 56, 70, 44, 74, 63, 76, 65, 67, 72, 58, 65, 83, 49, 56, 89, 66, 58, 83, 89, 84, 102, 60, 57, 63, 81, 72, 60, 71, 47, 77, 60, 74, 74, 55, 68, 68, 65, 76, 71, 46, 93, 99, 62, 67, 62, 75, 66, 66, 70, 75, 75, 63, 76, 69, 49, 66, 77, 72, 64, 76, 81, 78, 50, 83, 80, 71, 61, 70, 56, 57, 71, 60, 71, 58, 73, 84, 68, 72, 53, 82, 80, 68, 75, 62, 61, 59, 56, 63, 54, 77, 69, 107, 55, 63, 50, 56, 70, 77, 40, 69, 47, 66, 62, 56, 60, 112, 55, 59, 48, 73, 54, 64, 75, 51, 62, 66, 60, 63, 82, 49, 83, 90, 80, 65, 61, 48, 62, 64, 67, 63, 81, 91, 62, 76, 58, 70, 62, 64, 66, 73, 67, 35, 58, 66, 61, 67, 65, 79, 49, 56, 64, 64, 59, 48, 55, 83, 69, 67, 59, 61, 58, 109, 55, 80, 66, 71, 66, 52, 63, 66, 63, 61, 95, 79, 71, 64, 62, 62, 99, 67, 81, 66, 68, 56, 87, 74, 40, 57, 54, 80, 75, 40, 52, 69, 87, 44, 63, 58, 50, 57, 60, 89, 66, 55, 45, 60, 59, 82, 68, 65, 85, 56, 74, 69, 68, 53, 73, 83, 63, 98, 65, 68, 70, 82, 65, 70, 84, 63, 44, 50, 57, 80, 67, 71, 71, 76, 61, 69, 59, 73, 74, 56, 73, 49, 58, 72, 73, 74, 70, 52, 57, 96, 88, 49, 66, 60, 52, 64, 126, 72, 56, 87, 52, 102, 57, 100, 92, 61, 57, 54, 55, 56, 56, 77, 65, 88, 80, 72, 85, 66, 58, 73, 66, 66, 49, 68, 75, 77, 72, 78, 88, 68, 72, 79, 70, 63, 74, 61, 64, 73, 63, 66, 52, 54, 70, 51, 73, 68, 51, 80, 75, 53, 101, 69, 55, 64, 90, 49, 50, 81, 70, 45, 84, 72, 54, 114, 81, 66, 85, 84, 52, 66, 61, 63, 60, 73, 55, 55, 67, 56, 70, 69, 70, 51, 51, 58, 46, 49, 67, 61, 53, 56, 76, 65, 46, 72, 54, 67, 63, 45, 60, 67, 65, 67, 89, 50, 53, 49, 65, 63, 42, 65, 55, 81, 56, 72, 53, 59, 61, 59, 70, 72, 79, 85, 69, 44, 65, 58, 73, 67, 64, 53, 72, 78, 64, 53, 69, 50, 110, 61, 57, 59, 78, 72, 82, 46, 61, 65, 62, 58, 53, 48, 71, 70, 61, 99, 114, 75, 90, 50, 59, 68, 57, 63, 44, 116, 64, 66, 66, 42, 132, 62, 62, 59, 60, 64, 63, 100, 96, 86, 66, 76, 48, 58, 65, 58, 61, 48, 77, 90, 80, 54, 59, 51, 65, 57, 54, 106, 72, 65, 69, 50, 46, 52, 59, 89, 72, 57, 50, 64, 82, 84, 66, 73, 40, 56, 33, 70, 106, 49, 84, 64, 59, 66, 69, 64, 65, 71, 55, 65, 58, 68, 75, 41, 67, 66, 78, 55, 63, 77, 50, 65, 51, 61, 57, 51, 83, 65, 63, 45, 47, 80, 84, 63, 66, 45, 64, 69, 71, 52, 63, 60, 52, 40, 69, 52, 56, 61, 46, 64, 73, 56, 64, 55, 46, 48, 60, 61, 56, 91, 67, 61, 98, 42, 53, 85, 59, 56, 40, 57, 68, 61, 91, 74, 61, 54, 68, 59, 59, 68, 57, 61, 62, 79, 61, 62, 59, 80, 61, 69, 68, 69, 68, 47, 72, 52, 90, 57, 67, 56, 94, 63, 50, 76, 55, 58, 54, 72, 73, 67, 69, 89, 74, 58, 47, 71, 48, 81, 66, 110, 78, 48, 60, 73, 65, 74, 63, 89, 57, 71, 65, 64, 44, 71, 46, 87, 69, 96, 69, 58, 80, 68, 58, 60, 64, 63, 86, 67, 59, 68, 71, 88, 90, 63, 60, 93, 63, 63, 60, 67, 63, 59, 51, 41, 70, 60, 46, 84, 94, 76, 58, 61, 46, 73, 63, 55, 77, 80, 43, 54, 69, 57, 53, 45, 64, 65, 54, 59, 51, 66, 56, 59, 56, 76, 55, 79, 90, 70, 78, 53, 86, 60, 78, 78, 63, 43, 102, 63, 62, 75, 62, 70, 61, 66, 56, 68, 59, 55, 73, 60, 60, 73, 71, 106, 46, 81, 51, 68, 65, 50, 72, 79, 53, 69, 39, 66, 89, 57, 77, 49, 55, 137, 51, 65, 66, 56, 79, 53, 70, 41, 83, 65, 62, 62, 77, 64, 76, 61, 62, 49, 62, 79, 54, 71, 93, 84, 75, 110, 59, 62, 45, 61, 57, 82, 60, 60, 72, 59, 59, 65, 68, 87, 57, 51, 88, 107, 89, 60, 62, 52, 67, 52, 52, 63, 56, 64, 61, 71, 59, 70, 76, 101, 55, 68, 48, 57, 50, 86, 83, 65, 45, 69, 73, 72, 57, 75, 63, 53, 67, 55, 61, 78, 70, 62, 65, 72, 85, 60, 71, 64, 66, 66, 55, 71, 65, 63, 73, 73, 61, 72, 68, 88, 70, 68, 63, 59, 49, 73, 68, 51, 75, 63, 68, 68, 35, 41, 51, 64, 63, 41, 67, 73, 65, 58, 86, 74, 57, 80, 68, 69, 63, 73, 67, 65, 73, 66, 61, 86, 66, 71, 67, 51, 67, 56, 63, 74, 49, 46, 68, 64, 70, 84, 59, 59, 74, 66, 69, 72, 67, 60, 59, 80, 70, 72, 59, 42, 61, 52, 92, 56, 81, 66, 70, 55, 55, 80, 56, 67, 62, 66, 78, 79, 52, 61, 60, 86, 57, 57, 31, 47, 64, 75, 34, 49, 57, 68, 67, 77, 63, 92, 68, 45, 59, 66, 83, 54, 68, 82, 77, 57, 57, 90, 69, 66, 57, 101, 57, 53, 72, 93, 62, 52, 83, 57, 91, 62, 57, 71, 55, 69, 65, 59, 77, 60, 54, 59, 43, 83, 104, 65, 55, 44, 61, 72, 61, 85, 60, 46, 66, 69, 65, 58, 99, 72, 75, 57, 61, 74, 69, 55, 56, 63, 70, 110, 88, 57, 51, 54, 87, 56, 62, 70, 87, 77, 78, 43, 67, 61, 54, 80, 64, 70, 59, 73, 55, 57, 47, 70, 61, 52, 63, 71, 66, 83, 37, 55, 56, 74, 55, 81, 44, 75, 69, 97, 72, 51, 75, 52, 79, 60, 57, 73, 63, 62, 63, 118, 52, 65, 66, 69, 74, 59, 115, 105, 80, 55, 67, 51, 54, 66, 55, 72, 50, 62, 65, 70, 64, 54, 82, 58, 56, 51, 65, 53, 56, 115, 53, 64, 66, 64, 84, 73, 57, 64, 55, 63, 57, 58, 75, 65, 71, 66, 64, 79, 75, 89, 65, 89, 53, 67, 67, 79, 57, 52, 50, 66, 77, 72, 45, 48, 52, 97, 59, 81, 57, 108, 58, 63, 80, 108, 50, 44, 70, 88, 75, 81, 75, 68, 54, 70, 74, 64, 69, 60, 52, 47, 46, 42, 69, 87, 35, 57, 57, 47, 61, 46, 74, 65, 56, 82, 63, 60, 85, 59, 81, 92, 59, 61, 38, 59, 53, 67, 73, 64, 43, 48, 65, 53, 48, 87, 75, 62, 76, 57, 74, 57, 94, 79, 64, 83, 60, 84, 68, 53, 52, 61, 60, 74, 55, 57, 54, 100, 51, 73, 54, 136, 18, 67, 57, 49, 65, 58, 52, 77, 47, 63, 45, 79, 67, 49, 67, 60, 89, 54, 61, 84, 90, 54, 87, 68, 60, 70, 54, 105, 95, 59, 68, 63, 65, 72, 67, 67, 38, 86, 94, 67, 52, 63, 76, 64, 52, 68, 74, 61, 75, 56, 53, 50, 93, 84, 67, 72, 72, 136, 60, 62, 53, 120, 76, 76, 72, 68, 60, 68, 63, 65, 62, 72, 97, 54, 65, 63, 76, 74, 66, 63, 61, 75, 75, 81, 65, 71, 54, 64, 76, 50, 67, 72, 70, 83, 47, 59, 111, 82, 53, 96, 57, 70, 64, 85, 74, 69, 83, 73, 49, 37, 48, 64, 61, 79, 68, 46, 48, 57, 71, 56, 65, 67, 78, 63, 62, 117, 60, 85, 74, 73, 79, 57, 84, 63, 61, 64, 61, 55, 87, 58, 65, 63, 59, 59, 54, 77, 66, 52, 94, 51, 78, 77, 56, 69, 71, 57, 50, 62, 72, 72, 83, 66, 65, 76, 69, 54, 46, 75, 64, 62, 57, 64, 63, 55, 68, 54, 70, 83, 70, 71, 78, 43, 68, 62, 69, 65, 50, 57, 77, 63, 51, 85, 56, 48, 59, 69, 81, 65, 56, 71, 58, 60, 96, 68, 63, 35, 68, 63, 46, 54, 75, 73, 59, 81, 48, 77, 70, 55, 49, 47, 60, 83, 52, 58, 52, 54, 71, 63, 55, 67, 84, 91, 65, 54, 67, 46, 83, 65, 48, 54, 55, 63, 44, 74, 71, 84, 59, 59, 59, 72, 61, 54, 53, 53, 60, 75, 107, 70, 70, 62, 66, 54, 61, 114, 65, 61, 55, 61, 136, 69, 60, 69, 81, 65, 63, 54, 78, 59, 74, 82, 57, 66, 77, 82, 62, 81, 74, 90, 76, 63, 50, 70, 53, 62, 65, 110, 60, 70, 72, 56, 57, 61, 61, 72, 83, 67, 58, 44, 70, 63, 65, 73, 70, 75, 74, 66, 67, 51, 93, 67, 56, 63, 81, 54, 71, 63, 56, 61, 77, 69, 82, 45, 76, 51, 96, 64, 76, 59, 74, 67, 85, 79, 61, 59, 76, 66, 78, 65, 69, 86, 77, 54, 56, 35, 48, 74, 62, 57, 53, 53, 57, 71, 46, 52, 67, 50, 65, 49, 73, 66, 81, 67, 55, 66, 74, 63, 57, 65, 42, 67, 62, 46, 45, 44, 64, 68, 78, 58, 80, 74, 90, 74, 64, 55, 80, 52, 53, 69, 76, 70, 89, 69, 50, 71, 66, 46, 60, 71, 60, 65, 78, 58, 52, 63, 52, 56, 52, 59, 80, 65, 70, 64, 69, 62, 65, 57, 68, 76, 66, 57, 73, 83, 61, 94, 53, 45, 50, 92, 65, 85, 69, 51, 65, 51, 76, 92, 66, 71, 56, 49, 65, 74, 54, 89, 67, 53, 75, 68, 61, 67, 66, 57, 61, 60, 66, 59, 62, 58, 54, 84, 81, 65, 66, 45, 48, 71, 82, 70, 68, 65, 57, 53, 65, 48, 69, 67, 51, 57, 62, 58, 55, 76, 67, 82, 65, 75, 57, 52, 56, 68, 62, 59, 56, 64, 68, 54, 59, 58, 61, 68, 64, 55, 67, 70, 85, 50, 68, 71, 69, 54, 48, 85, 72, 64, 95, 54, 67, 63, 66, 75, 68, 82, 54, 54, 64, 61, 69, 61, 61, 69, 67, 101, 62, 60, 71, 61, 50, 59, 48, 68, 65, 51, 74, 69, 97, 65, 74, 61, 64, 52, 86, 71, 65, 64, 46, 80, 83, 84, 78, 46, 79, 65, 89, 61, 74, 63, 47, 84, 75, 59, 71, 76, 87, 65, 70, 58, 62, 42, 61, 72, 83, 80, 56, 63, 68, 75, 52, 72, 66, 76, 57, 61, 96, 89, 77, 71, 68, 58, 36, 58, 58, 67, 51, 54, 77, 73, 66, 68, 56, 67, 80, 75, 65, 66, 49, 81, 70, 71, 74, 70, 79, 58, 91, 66, 47, 56, 67, 52, 75, 54, 64, 72, 66, 67, 66, 56, 69, 79, 58, 64, 59, 65, 61, 61, 75, 56, 58, 64, 62, 68, 66, 65, 58, 79, 63, 86, 72, 67, 62, 70, 72, 60, 62, 63, 51, 51, 69, 52, 76, 87, 70, 58, 76, 78, 63, 63, 78, 77, 49, 93, 84, 84, 57, 80, 72, 62, 70, 58, 81, 69, 82, 65, 62, 61, 71, 52, 71, 61, 53, 77, 58, 61, 65, 42, 69, 71, 47, 46, 57, 74, 62, 64, 49, 61, 66, 65, 82, 67, 86, 58, 73, 50, 65, 57, 47, 81, 63, 68, 64, 40, 80, 61, 60, 90, 62, 61, 64, 44, 48, 65, 73, 55, 54, 57, 80, 89, 87, 121, 42, 48, 66, 77, 54, 50, 64, 95, 61, 62, 77, 62, 42, 75, 80, 72, 74, 48, 66, 46, 66, 71, 52, 58, 59, 56, 70, 68, 60, 82, 53, 66, 59, 62, 74, 69, 49, 49, 37, 81, 71, 48, 67, 74, 62, 87, 80, 74, 70, 63, 81, 47, 69, 74, 74, 48, 50, 68, 57, 56, 49, 71, 62, 120, 51, 72, 66, 68, 82, 73, 48, 59, 52, 72, 58, 58, 52, 42, 63, 79, 70, 61, 58, 83, 59, 63, 72, 76, 51, 72, 69, 104, 59, 57, 49, 72, 52, 60, 53, 91, 79, 73, 63, 100, 60, 68, 62, 82, 51, 77, 73, 69, 37, 68, 57, 90, 54, 61, 85, 73, 77, 80, 104, 75, 57, 56, 55, 70, 47, 60, 59, 63, 54, 52, 65, 57, 52, 64, 47, 79, 52, 67, 55, 72, 78, 78, 53, 54, 73, 49, 47, 52, 83, 54, 95, 64, 70, 56, 57, 48, 93, 69, 65, 67, 80, 84, 52, 57, 52, 76, 63, 55, 63, 73, 62, 66, 47, 49, 49, 73, 59, 81, 86, 88, 64, 88, 62, 56, 79, 74, 65, 64, 61, 79, 62, 102, 56, 65, 71, 67, 67, 60, 65, 69, 53, 57, 70, 57, 50, 83, 41, 54, 65, 59, 64, 73, 77, 54, 64, 60, 78, 67, 73, 75, 85, 73, 61, 77, 62, 40, 68, 92, 49, 59, 50, 66, 61, 52, 62, 67, 66, 70, 72, 46, 70, 49, 66, 69, 57, 49, 64, 54, 70, 58, 62, 67, 60, 65, 77, 52, 66, 75, 65, 75, 49, 135, 54, 52, 66, 52, 46, 59, 61, 76, 63, 65, 51, 73, 61, 74, 90, 58, 108, 57, 58, 81, 63, 67, 74, 55, 70, 85, 55, 94, 81, 51, 73, 46, 53, 74, 58, 73, 67, 57, 74, 58, 59, 65, 77, 74, 58, 67, 59, 70, 54, 58, 75, 63, 66, 101, 51, 55, 63, 72, 74, 76, 71, 119, 51, 54, 53, 46, 79, 61, 66, 70, 84, 70, 57, 56, 66, 58, 71, 65, 64, 57, 69, 64, 65, 56, 94, 77, 56, 60, 57, 85, 64, 67, 76, 75, 65, 58, 95, 72, 70, 65, 62, 59, 58, 76, 83, 58, 58, 63, 71, 57, 97, 70, 47, 67, 66, 58, 61, 58, 65, 73, 49, 55, 62, 64, 50, 80, 70, 72, 74, 66, 75, 62, 82, 55, 74, 50, 67, 72, 59, 70, 63, 54, 68, 64, 62, 84, 58, 63, 90, 73, 82, 78, 51, 65, 57, 63, 53, 72, 68, 64, 108, 56, 64, 74, 60, 70, 81, 87, 58, 68, 56, 61, 75, 47, 60, 78, 80, 94, 67, 86, 68, 56, 52, 75, 77, 71, 65, 59, 93, 59, 91, 65, 42, 92, 68, 73, 90, 50, 54, 61, 54, 52, 48, 60, 59, 63, 63, 67, 73, 64, 72, 77, 60, 65, 77, 71, 72, 67, 65, 56, 50, 60, 67, 70, 57, 72, 45, 71, 50, 50, 54, 73, 64, 59, 63, 62, 68, 57, 81, 112, 70, 64, 69, 62, 54, 59, 57, 63, 64, 51, 82, 65, 55, 73, 84, 66, 99, 72, 69, 75, 66, 59, 71, 88, 68, 50, 54, 54, 137, 121, 56, 66, 66, 65, 60, 59, 71, 50, 57, 74, 56, 55, 62, 58, 58, 94, 70, 55, 59, 53, 62, 78, 63, 68, 71, 83, 56, 58, 73, 75, 61, 46, 52, 59, 68, 64, 63, 61, 44, 57, 59, 62, 49, 70, 66, 47, 54, 52, 65, 61, 53, 49, 63, 50, 62, 65, 58, 70, 86, 56, 54, 59, 52, 73, 54, 59, 62, 53, 86, 63, 85, 71, 67, 55, 55, 58, 76, 52, 78, 71, 70, 66, 70, 75, 107, 74, 67, 65, 72, 63, 90, 57, 68, 68, 69, 55, 75, 68, 70, 79, 68, 61, 65, 63, 84, 74, 61, 59, 61, 78, 68, 51, 92, 67, 63, 63, 67, 60, 60, 59, 70, 58, 63, 50, 57, 68, 67, 59, 77, 72, 67, 81, 58, 86, 89, 49, 54, 71, 51, 63, 94, 54, 74, 66, 45, 64, 44, 56, 54, 55, 84, 57, 61, 78, 48, 79, 69, 60, 60, 71, 63, 64, 51, 67, 43, 55, 60, 55, 65, 77, 60, 73, 83, 72, 69, 83, 70, 57, 49, 56, 64, 67, 60, 50, 62, 54, 85, 60, 56, 59, 63, 58, 85, 63, 55, 86, 59, 52, 55, 68, 41, 101, 81, 66, 61, 53, 72, 67, 60, 70, 88, 76, 63, 65, 72, 69, 43, 62, 53, 55, 53, 44, 64, 66, 121, 57, 80, 58, 55, 71, 65, 75, 47, 68, 69, 66, 56, 64, 105, 81, 70, 66, 58, 53, 66, 96, 82, 80, 61, 71, 58, 64, 67, 63, 97, 51, 73, 55, 67, 59, 52, 79, 53, 89, 96, 76, 47, 56, 63, 90, 86, 99, 77, 60, 57, 71, 56, 61, 53, 64, 60, 103, 73, 66, 139, 73, 70, 80, 67, 65, 84, 61, 48, 66, 51, 54, 93, 68, 68, 68, 60, 53, 53, 62, 59, 71, 84, 57, 72, 57, 80, 67, 42, 54, 66, 78, 55, 54, 71, 75, 63, 59, 52, 58, 112, 65, 58, 67, 59, 86, 72, 64, 142, 84, 64, 59, 82, 68, 108, 53, 56, 67, 52, 73, 80, 73, 63, 76, 70, 69, 58, 72, 69, 79, 48, 73, 79, 67, 66, 60, 81, 58, 82, 57, 94, 100, 58, 72, 63, 45, 56, 48, 58, 49, 86, 59, 69, 63, 60, 71, 70, 65, 71, 50, 75, 69, 65, 65, 50, 65, 78, 58, 57, 53, 50, 54, 63, 72, 57, 70, 80, 61, 63, 64, 52, 52, 70, 62, 60, 72, 58, 52, 53, 75, 53, 67, 55, 54, 62, 66, 77, 52, 85, 47, 50, 53, 62, 80, 97, 57, 69, 76, 71, 89, 57, 63, 43, 68, 66, 61, 52, 75, 87, 71, 62, 57, 54, 63, 75, 67, 60, 69, 61, 51, 88, 54, 60, 63, 54, 58, 53, 61, 95, 71, 58, 76, 82, 51, 62, 60, 57, 61, 62, 81, 60, 65, 67, 58, 62, 49, 62, 65, 68, 60, 59, 82, 76, 100, 68, 71, 67, 72, 56, 53, 40, 66, 70, 56, 60, 77, 47, 70, 54, 88, 67, 51, 57, 70, 68, 62, 62, 59, 45, 65, 92, 55, 58, 56, 50, 65, 68, 59, 49, 76, 59, 65, 55, 42, 61, 70, 50, 79, 70, 70, 56, 54, 62, 56, 54, 70, 66, 91, 62, 62, 79, 63, 60, 54, 46, 68, 82, 44, 66, 82, 56, 81, 61, 62, 75, 66, 45, 71, 61, 59, 56, 60, 69, 57, 74, 61, 72, 62, 66, 53, 73, 63, 69, 67, 72, 78, 61, 54, 50, 66, 73, 55, 79, 51, 88, 57, 49, 70, 62, 48, 67, 83, 61, 71, 50, 55, 57, 69, 71, 83, 57, 77, 53, 57, 70, 70, 92, 87, 63, 62, 64, 71, 49, 69, 36, 50, 69, 56, 57, 55, 73, 54, 68, 66, 62, 68, 69, 70, 72, 92, 53, 71, 84, 75, 59, 61, 64, 65, 70, 83, 72, 47, 62, 56, 68, 59, 63, 74, 63, 69, 56, 59, 39, 94, 68, 74, 66, 56, 67, 65, 47, 66, 54, 66, 67, 57, 65, 58, 78, 45, 61, 61, 69, 75, 55, 83, 66, 64, 65, 98, 71, 54, 67, 54, 76, 54, 61, 51, 55, 65, 64, 62, 73, 78, 88, 56, 59, 66, 76, 52, 66, 59, 73, 53, 58, 55, 80, 63, 71, 60, 61, 51, 61, 78, 74, 73, 54, 55, 63, 81, 71, 76, 48, 99, 55, 77, 74, 63, 85, 54, 90, 82, 74, 62, 77, 66, 41, 80, 58, 59, 60, 65, 55, 49, 67, 73, 64, 63, 57, 118, 92, 49, 69, 53, 45, 64, 58, 79, 61, 70, 17, 72, 53, 63, 59, 65, 82, 58, 69, 66, 64, 51, 104, 68, 53, 65, 52, 71, 61, 47, 72, 79, 69, 59, 83, 75, 40, 85, 85, 49, 65, 69, 71, 54, 53, 56, 60, 69, 56, 50, 92, 79, 76, 64, 83, 61, 81, 81, 53, 72, 58, 104, 56, 67, 64, 47, 55, 78, 61, 58, 73, 51, 72, 65, 61, 78, 68, 64, 44, 50, 56, 65, 74, 63, 58, 77, 49, 55, 67, 91, 83, 72, 52, 50, 64, 61, 69, 77, 56, 58, 66, 71, 60, 81, 64, 70, 49, 65, 102, 59, 73, 56, 64, 80, 65, 70, 64, 62, 78, 72, 87, 85, 58, 60, 61, 94, 57, 89, 70, 64, 82, 66, 82, 64, 73, 57, 77, 69, 82, 51, 68, 74, 75, 66, 50, 89, 61, 55, 57, 64, 55, 74, 45, 66, 76, 60, 56, 88, 42, 72, 105, 74, 76, 62, 87, 54, 84, 97, 44, 63, 77, 76, 49, 69, 50, 59, 55, 61, 72, 63, 64, 62, 57, 61, 71, 61, 56, 67, 61, 55, 56, 61, 58, 68, 75, 62, 63, 69, 72, 77, 55, 47, 44, 43, 81, 65, 67, 75, 59, 64, 84, 85, 72, 87, 55, 56, 53, 67, 61, 62, 56, 109, 73, 64, 59, 77, 74, 65, 54, 54, 61, 52, 48, 55, 79, 47, 55, 57, 74, 90, 51, 78, 54, 48, 53, 60, 49, 70, 50, 61, 62, 50, 51, 67, 84, 67, 80, 45, 59, 74, 55, 72, 59, 138, 73, 48, 61, 61, 147, 123, 64, 64, 63, 74, 54, 63, 53, 55, 48, 75, 71, 70, 69, 59, 61, 68, 63, 65, 66, 65, 65, 58, 62, 69, 54, 75, 70, 65, 56, 68, 75, 43, 62, 66, 53, 67, 65, 53, 73, 66, 57, 55, 64, 55, 62, 56, 61, 98, 62, 55, 54, 48, 57, 72, 77, 56, 57, 71, 67, 50, 78, 77, 43, 58, 79, 69, 65, 66, 87, 49, 33, 61, 69, 55, 57, 55, 64, 64, 76, 67, 69, 56, 67, 92, 75, 47, 78, 66, 58, 59, 69, 54, 52, 56, 74, 77, 62, 68, 69, 59, 58, 44, 55, 67, 84, 84, 69, 113, 59, 72, 60, 69, 77, 63, 63, 89, 69, 83, 59, 60, 61, 63, 57, 92, 72, 55, 49, 69, 62, 76, 36, 80, 57, 88, 90, 58, 77, 67, 63, 80, 97, 41, 58, 50, 58, 52, 57, 61, 68, 48, 98, 57, 56, 69, 46, 57, 59, 70, 69, 63, 59, 46, 54, 65, 69, 71, 61, 103, 48, 61, 58, 95, 67, 66, 65, 76, 64, 67, 66, 66, 64, 55, 53, 43, 69, 73, 52, 53, 63, 65, 57, 68, 82, 54, 58, 84, 56, 86, 73, 68, 100, 90, 63, 88, 58, 65, 57, 65, 73, 57, 67, 68, 69, 66, 51, 70, 70, 66, 59, 46, 60, 44, 68, 43, 132, 50, 79, 51, 49, 55, 62, 64, 38, 59, 58, 82, 67, 66, 99, 66, 65, 60, 61, 63, 76, 88, 110, 84, 61, 67, 44, 66, 62, 74, 98, 86, 74, 64, 61, 52, 55, 52, 94, 73, 85, 52, 45, 52, 69, 67, 65, 85, 64, 64, 49, 65, 68, 63, 59, 59, 94, 66, 87, 93, 105, 64, 56, 53, 65, 73, 58, 62, 50, 58, 62, 60, 87, 59, 55, 59, 64, 61, 56, 54, 70, 44, 115, 73, 65, 52, 71, 68, 57, 56, 55, 100, 57, 60, 61, 77, 69, 44, 89, 63, 106, 51, 91, 60, 64, 82, 56, 55, 114, 54, 45, 61, 91, 40, 86, 67, 66, 70, 66, 65, 64, 65, 67, 86, 88, 64, 43, 55, 47, 68, 54, 108, 65, 58, 67, 44, 92, 66, 78, 73, 104, 99, 62, 69, 43, 74, 66, 53, 52, 50, 85, 72, 60, 65, 72, 65, 54, 79, 67, 53, 62, 56, 58, 67, 84, 56, 69, 46, 68, 60, 81, 62, 65, 79, 64, 53, 67, 63, 63, 66, 60, 51, 50, 70, 64, 72, 59, 84, 80, 57, 62, 75, 59, 66, 71, 71, 76, 70, 78, 78, 50, 59, 73, 65, 93, 64, 49, 58, 67, 74, 42, 58, 63, 61, 61, 57, 52, 72, 49, 63, 62, 49, 65, 52, 68, 71, 60, 81, 56, 63, 74, 63, 93, 55, 68, 64, 65, 62, 80, 69, 57, 54, 54, 67, 75, 64, 50, 68, 65, 70, 72, 55, 81, 65, 58, 67, 66, 54, 60, 62, 55, 98, 63, 93, 49, 56, 70, 53, 96, 65, 75, 73, 57, 57, 55, 67, 49, 63, 66, 66, 79, 51, 55, 82, 72, 65, 64, 48, 69, 54, 73, 71, 68, 69, 64, 54, 76, 61, 48, 57, 59, 66, 70, 64, 49, 46, 88, 91, 58, 65, 61, 56, 90, 57, 44, 51, 68, 97, 60, 62, 57, 63, 65, 73, 72, 53, 79, 67, 73, 90, 45, 72, 54, 66, 82, 65, 62, 83, 52, 80, 54, 67, 67, 59, 56, 60, 61, 56, 50, 75, 76, 68, 81, 76, 60, 69, 68, 63, 60, 67, 59, 59, 50, 58, 71, 59, 54, 63, 56, 73, 56, 59, 56, 62, 61, 53, 64, 63, 71, 40, 47, 78, 70, 56, 82, 74, 100, 73, 97, 58, 67, 56, 55, 68, 43, 77, 69, 59, 50, 73, 53, 49, 69, 69, 56, 80, 61, 43, 65, 84, 57, 60, 83, 80, 63, 59, 100, 65, 51, 108, 52, 57, 72, 56, 96, 56, 64, 102, 55, 69, 71, 50, 55, 73, 41, 79, 71, 53, 57, 56, 70, 49, 56, 70, 68, 65, 69, 55, 67, 73, 46, 66, 52, 61, 57, 102, 44, 59, 74, 90, 54, 69, 49, 71, 68, 63, 64, 61, 60, 69, 60, 65, 57, 90, 80, 44, 65, 57, 68, 63, 71, 83, 55, 79, 81, 67, 44, 62, 89, 78, 51, 66, 48, 78, 77, 104, 57, 53, 82, 66, 53, 99, 68, 67, 67, 80, 52, 58, 101, 52, 61, 70, 46, 53, 57, 58, 73, 62, 53, 48, 57, 50, 67, 73, 44, 64, 57, 54, 57, 99, 76, 93, 69, 72, 62, 56, 64, 59, 69, 80, 100, 51, 71, 61, 61, 67, 77, 74, 65, 52, 85, 66, 100, 63, 74, 67, 69, 60, 65, 53, 46, 61, 70, 54, 74, 82, 74, 70, 74, 70, 66, 75, 86, 70, 54, 60, 86, 73, 64, 67, 80, 59, 71, 65, 68, 69, 84, 89, 47, 77, 63, 61, 62, 52, 67, 52, 38, 82, 67, 68, 87, 58, 44, 54, 69, 55, 60, 81, 62, 68, 64, 39, 68, 86, 83, 73, 65, 60, 65, 60, 37, 67, 79, 71, 71, 60, 118, 82, 94, 49, 65, 54, 70, 79, 57, 68, 60, 55, 61, 66, 79, 68, 95, 61, 75, 75, 56, 47, 73, 62, 52, 59, 59, 69, 82, 61, 61, 61, 63, 65, 65, 67, 74, 81, 70, 74, 75, 61, 59, 57, 64, 76, 46, 66, 62, 53, 71, 64, 77, 64, 68, 72, 102, 55, 63, 70, 43, 88, 47, 64, 52, 103, 53, 56, 70, 66, 54, 81, 80, 50, 66, 76, 64, 58, 61, 58, 62, 58, 69, 60, 52, 70, 74, 72, 54, 61, 69, 69, 68, 60, 56, 67, 76, 60, 53, 74, 59, 59, 70, 46, 60, 61, 62, 89, 70, 68, 63, 67, 100, 76, 75, 70, 54, 60, 82, 79, 58, 75, 54, 86, 64, 73, 59, 68, 44, 59, 70, 70, 57, 68, 75, 76, 78, 75, 117, 49, 69, 65, 74, 55, 76, 66, 72, 60, 62, 81, 65, 53, 56, 69, 71, 77, 70, 77, 51, 54, 72, 48, 74, 65, 56, 58, 76, 84, 70, 67, 68, 58, 57, 48, 53, 55, 71, 50, 76, 50, 52, 56, 48, 65, 90, 60, 51, 50, 81, 65, 77, 62, 76, 59, 52, 63, 83, 55, 62, 54, 71, 82, 67, 84, 62, 65, 59, 70, 53, 78, 54, 71, 84, 48, 81, 73, 52, 44, 52, 38, 53, 62, 60, 65, 68, 59, 57, 55, 61, 55, 75, 58, 58, 53, 46, 69, 63, 63, 52, 52, 135, 67, 74, 66, 128, 78, 64, 59, 89, 58, 72, 71, 56, 81, 93, 63, 70, 68, 37, 75, 81, 61, 67, 58, 48, 146, 53, 63, 73, 51, 52, 43, 75, 84, 59, 54, 85, 64, 47, 61, 62, 76, 63, 70, 55, 72, 57, 81, 68, 63, 48, 45, 76, 59, 52, 55, 92, 68, 58, 68, 72, 59, 85, 59, 99, 89, 41, 60, 53, 85, 59, 49, 54, 53, 76, 67, 62, 70, 166, 43, 63, 59, 52, 55, 65, 113, 61, 60, 60, 57, 59, 41, 59, 65, 69, 79, 41, 86, 59, 78, 76, 86, 46, 78, 70, 62, 48, 47, 72, 67, 59, 83, 57, 67, 60, 72, 56, 90, 52, 67, 52, 49, 97, 42, 61, 62, 65, 64, 69, 66, 88, 114, 57, 65, 56, 80, 55, 56, 61, 64, 58, 53, 64, 56, 61, 80, 56, 63, 65, 62, 64, 59, 114, 64, 48, 62, 81, 49, 62, 60, 76, 72, 64, 71, 56, 72, 60, 91, 57, 76, 68, 72, 63, 66, 75, 64, 74, 54, 51, 72, 65, 82, 90, 82, 69, 80, 51, 62, 62, 65, 58, 57, 71, 50, 85, 60, 55, 68, 57, 70, 60, 75, 61, 57, 70, 53, 61, 75, 46, 61, 65, 68, 63, 62, 60, 42, 74, 56, 58, 61, 77, 53, 89, 48, 65, 59, 69, 74, 85, 57, 88, 66, 77, 83, 60, 63, 59, 68, 114, 64, 61, 77, 58, 72, 57, 71, 37, 55, 81, 55, 65, 41, 62, 53, 88, 71, 59, 52, 56, 62, 71, 64, 73, 54, 67, 60, 62, 57, 62, 69, 54, 62, 66, 46, 78, 62, 59, 58, 59, 70, 92, 55, 46, 80, 49, 79, 68, 56, 56, 58, 95, 60, 51, 65, 100, 80, 80, 48, 56, 63, 69, 64, 61, 59, 66, 56, 68, 52, 65, 63, 53, 59, 70, 63, 76, 76, 81, 50, 56, 68, 51, 75, 62, 70, 97, 99, 47, 65, 87, 28, 55, 74, 76, 57, 85, 85, 55, 83, 46, 64, 69, 68, 52, 63, 80, 68, 76, 84, 63, 51, 72, 82, 49, 56, 57, 55, 69, 78, 51, 54, 49, 77, 58, 55, 70, 49, 51, 75, 74, 55, 60, 48, 81, 51, 76, 99, 54, 56, 63, 54, 60, 68, 57, 58, 84, 69, 53, 77, 67, 60, 84, 90, 119, 59, 56, 48, 66, 62, 62, 69, 67, 69, 55, 54, 76, 65, 98, 78, 66, 70, 75, 69, 54, 52, 70, 77, 94, 66, 54, 71, 78, 58, 70, 52, 59, 74, 67, 67, 50, 82, 54, 59, 61, 58, 65, 60, 69, 78, 71, 65, 56, 69, 75, 71, 62, 69, 73, 65, 59, 65, 60, 81, 80, 51, 42, 61, 47, 64, 80, 79, 65, 68, 50, 61, 64, 59, 74, 76, 54, 59, 110, 56, 102, 80, 81, 93, 60, 103, 87, 66, 68, 65, 56, 68, 79, 55, 68, 51, 63, 56, 54, 66, 69, 55, 40, 53, 65, 64, 60, 71, 65, 73, 67, 77, 113, 58, 69, 57, 56, 65, 57, 62, 59, 53, 56, 58, 55, 59, 79, 56, 62, 77, 65, 51, 51, 84, 81, 64, 75, 87, 73, 76, 58, 73, 77, 66, 57, 69, 87, 60, 54, 59, 67, 50, 65, 71, 86, 58, 81, 56, 68, 92, 88, 52, 57, 52, 59, 80, 71, 66, 63, 64, 75, 71, 47, 62, 55, 67, 56, 70, 77, 67, 62, 71, 62, 77, 71, 66, 64, 67, 75, 57, 98, 47, 45, 60, 46, 61, 58, 66, 65, 57, 72, 61, 60, 46, 59, 39, 65, 50, 86, 69, 54, 31, 50, 71, 60, 59, 67, 66, 98, 61, 62, 65, 93, 90, 74, 99, 55, 66, 70, 88, 70, 56, 84, 45, 62, 57, 67, 92, 61, 84, 71, 76, 66, 57, 67, 57, 77, 72, 58, 93, 68, 49, 66, 144, 73, 62, 65, 53, 55, 53, 38, 70, 98, 68, 74, 65, 67, 68, 55, 71, 57, 71, 87, 78, 59, 51, 62, 65, 75, 103, 64, 58, 62, 42, 82, 53, 71, 54, 79, 66, 61, 54, 62, 74, 63, 78, 57, 63, 65, 78, 54, 73, 77, 66, 51, 65, 63, 74, 66, 58, 78, 44, 74, 64, 69, 65, 62, 68, 44, 55, 79, 51, 72, 43, 46, 56, 35, 79, 52, 69, 101, 50, 88, 63, 80, 52, 79, 68, 68, 53, 57, 74, 90, 61, 77, 71, 31, 48, 83, 68, 77, 78, 60, 62, 68, 69, 56, 67, 50, 82, 50, 59, 70, 55, 70, 66, 43, 68, 87, 54, 61, 56, 54, 70, 75, 65, 80, 49, 86, 71, 69, 48, 52, 62, 65, 76, 58, 59, 59, 65, 59, 44, 90, 86, 101, 62, 50, 64, 62, 76, 71, 63, 55, 63, 66, 71, 61, 56, 100, 66, 77, 62, 79, 57, 70, 70, 56, 69, 70, 62, 71, 62, 78, 59, 55, 57, 57, 67, 64, 72, 73, 73, 56, 54, 57, 64, 69, 69, 60, 68, 47, 55, 81, 51, 49, 72, 55, 69, 54, 63, 73, 52, 47, 59, 70, 46, 47, 88, 64, 59, 41, 63, 83, 50, 60, 54, 74, 62, 71, 63, 76, 56, 60, 87, 72, 57, 51, 77, 73, 58, 69, 60, 72, 61, 69, 70, 64, 62, 54, 56, 53, 49, 77, 56, 43, 46, 48, 64, 60, 80, 54, 53, 58, 52, 60, 77, 74, 59, 64, 50, 94, 83, 61, 57, 68, 66, 71, 65, 76, 58, 81, 50, 69, 67, 57, 67, 52, 72, 78, 87, 91, 90, 49, 69, 78, 67, 54, 60, 64, 65, 63, 86, 72, 73, 60, 75, 62, 55, 52, 93, 54, 69, 73, 34, 56, 68, 62, 68, 62, 88, 59, 80, 81, 71, 59, 74, 77, 63, 51, 64, 48, 66, 61, 66, 72, 57, 93, 71, 65, 66, 70, 69, 69, 64, 55, 59, 36, 54, 50, 64, 61, 56, 33, 74, 56, 65, 61, 83, 65, 73, 34, 58, 64, 62, 54, 72, 62, 61, 58, 74, 64, 60, 60, 58, 89, 91, 90, 59, 90, 74, 72, 57, 64, 50, 56, 82, 68, 56, 66, 33, 63, 57, 104, 64, 57, 53, 164, 55, 75, 69, 52, 74, 58, 56, 63, 63, 65, 63, 88, 84, 55, 58, 82, 49, 60, 64, 74, 126, 53, 58, 76, 58, 53, 73, 44, 67, 73, 52, 56, 68, 72, 70, 57, 76, 66, 49, 66, 74, 68, 53, 68, 51, 55, 55, 75, 59, 62, 72, 91, 39, 62, 67, 62, 68, 91, 49, 65, 88, 54, 55, 80, 89, 78, 77, 55, 64, 51, 92, 67, 58, 167, 69, 57, 62, 56, 63, 49, 127, 54, 52, 61, 75, 74, 61, 61, 54, 56, 57, 60, 70, 71, 60, 80, 61, 41, 68, 82, 60, 41, 72, 72, 69, 57, 61, 64, 74, 71, 55, 76, 58, 62, 55, 90, 74, 87, 86, 67, 55, 45, 69, 65, 69, 110, 115, 60, 53, 71, 73, 68, 51, 61, 80, 54, 70, 59, 54, 56, 75, 60, 60, 64, 47, 49, 63, 131, 63, 69, 62, 86, 66, 84, 92, 69, 110, 58, 74, 46, 54, 66, 61, 70, 69, 66, 63, 58, 61, 56, 58, 61, 67, 61, 58, 68, 77, 61, 37, 68, 57, 49, 72, 67, 57, 60, 60, 67, 58, 57, 53, 56, 46, 58, 100, 79, 78, 46, 53, 59, 68, 79, 79, 66, 55, 53, 68, 67, 72, 65, 73, 65, 40, 55, 62, 57, 65, 64, 63, 55, 87, 74, 64, 97, 53, 64, 54, 83, 75, 55, 44, 90, 58, 99, 60, 64, 80, 66, 62, 62, 65, 70, 57, 79, 54, 97, 52, 53, 59, 54, 61, 87, 59, 70, 57, 52, 41, 60, 51, 96, 69, 66, 95, 64, 63, 68, 56, 62, 50, 68, 56, 52, 72, 53, 43, 83, 74, 59, 48, 70, 83, 60, 52, 76, 92, 105, 71, 60, 75, 106, 69, 91, 66, 58, 102, 56, 64, 57, 53, 65, 70, 63, 58, 48, 64, 63, 46, 74, 43, 50, 39, 64, 72, 83, 56, 54, 62, 81, 90, 57, 82, 45, 83, 106, 53, 45, 69, 81, 52, 59, 49, 60, 49, 67, 63, 77, 65, 53, 56, 70, 66, 67, 79, 64, 71, 50, 61, 58, 63, 58, 60, 35, 71, 62, 56, 53, 82, 52, 57, 67, 82, 74, 92, 64, 65, 84, 51, 67, 57, 84, 73, 57, 61, 63, 56, 58, 70, 76, 60, 91, 68, 64, 63, 49, 53, 82, 96, 103, 77, 63, 53, 85, 57, 80, 52, 86, 67, 73, 60, 57, 81, 52, 55, 61, 59, 62, 45, 73, 52, 60, 59, 77, 55, 43, 58, 61, 40, 60, 50, 47, 61, 70, 69, 84, 63, 58, 60, 66, 52, 67, 62, 73, 78, 74, 68, 47, 74, 72, 60, 56, 57, 76, 55, 61, 53, 64, 73, 88, 53, 63, 78, 67, 75, 57, 64, 60, 52, 55, 48, 60, 62, 81, 75, 51, 65, 73, 76, 76, 84, 67, 82, 78, 110, 85, 69, 63, 52, 54, 46, 50, 60, 72, 51, 56, 80, 67, 70, 47, 45, 55, 57, 78, 53, 58, 84, 63, 70, 88, 58, 114, 62, 91, 47, 61, 59, 47, 55, 54, 50, 73, 74, 55, 61, 88, 52, 56, 61, 66, 66, 94, 62, 66, 69, 85, 73, 65, 59, 57, 72, 44, 63, 63, 49, 57, 56, 58, 67, 89, 69, 74, 88, 68, 58, 60, 51, 58, 51, 57, 46, 58, 63, 64, 60, 50, 66, 48, 73, 63, 71, 71, 54, 63, 65, 50, 55, 82, 84, 40, 52, 58, 51, 48, 43, 73, 63, 61, 65, 102, 58, 70, 59, 87, 59, 67, 42, 78, 56, 60, 59, 64, 54, 57, 69, 51, 59, 84, 60, 54, 56, 59, 55, 64, 62, 77, 49, 86, 79, 48, 74, 90, 79, 63, 79, 70, 76, 59, 56, 69, 54, 58, 65, 61, 62, 70, 81, 58, 100, 76, 52, 79, 61, 65, 68, 62, 87, 64, 75, 75, 79, 74, 126, 46, 47, 69, 56, 81, 67, 52, 79, 72, 93, 75, 56, 55, 63, 65, 70, 67, 61, 79, 55, 49, 79, 75, 68, 109, 107, 61, 58, 65, 52, 47, 59, 56, 60, 91, 69, 64, 58, 70, 71, 49, 49, 66, 83, 57, 68, 60, 49, 56, 53, 55, 49, 44, 70, 86, 69, 62, 64, 58, 80, 57, 72, 87, 41, 56, 55, 45, 33, 69, 52, 77, 65, 70, 73, 54, 78, 92, 60, 95, 59, 83, 58, 67, 59, 68, 70, 54, 80, 70, 63, 86, 64, 47, 57, 80, 67, 59, 47, 79, 71, 71, 95, 55, 55, 90, 65, 58, 71, 77, 58, 63, 61, 69, 61, 54, 59, 57, 55, 82, 54, 62, 70, 67, 55, 70, 62, 78, 74, 54, 58, 60, 66, 86, 55, 76, 45, 64, 64, 45, 85, 60, 59, 78, 73, 65, 64, 58, 57, 54, 60, 101, 59, 58, 72, 53, 48, 63, 64, 93, 74, 53, 60, 48, 63, 54, 59, 66, 97, 66, 66, 57, 53, 118, 62, 58, 59, 70, 60, 102, 57, 66, 58, 62, 60, 61, 71, 58, 104, 58, 56, 71, 114, 64, 84, 58, 62, 69, 63, 55, 56, 63, 58, 57, 51, 49, 64, 63, 58, 59, 46, 69, 56, 70, 63, 55, 78, 63, 68, 74, 65, 66, 80, 63, 62, 58, 63, 116, 54, 74, 49, 72, 72, 57, 57, 69, 67, 64, 66, 105, 70, 89, 64, 88, 76, 70, 66, 65, 57, 52, 53, 79, 68, 58, 62, 56, 92, 70, 73, 61, 90, 60, 69, 62, 87, 54, 67, 72, 66, 41, 54, 78, 64, 57, 71, 61, 58, 59, 57, 57, 70, 82, 83, 64, 70, 55, 70, 62, 70, 68, 68, 57, 51, 58, 65, 61, 70, 109, 62, 137, 83, 54, 82, 66, 74, 55, 101, 77, 64, 51, 92, 130, 66, 53, 66, 77, 78, 54, 55, 52, 77, 80, 57, 58, 55, 73, 84, 49, 76, 60, 56, 66, 58, 59, 61, 65, 86, 72, 88, 61, 81, 80, 76, 56, 50, 71, 70, 59, 63, 62, 52, 72, 64, 94, 60, 69, 67, 66, 71, 60, 60, 77, 66, 76, 56, 67, 63, 45, 63, 50, 50, 96, 71, 57, 57, 73, 63, 71, 56, 65, 59, 61, 81, 78, 63, 80, 58, 51, 65, 62, 60, 65, 60, 55, 47, 59, 50, 72, 56, 67, 60, 62, 62, 73, 71, 63, 57, 78, 59, 62, 62, 58, 72, 83, 55, 110, 61, 80, 83, 58, 50, 69, 61, 53, 58, 58, 55, 59, 57, 63, 62, 64, 64, 82, 72, 84, 56, 52, 63, 67, 53, 76, 76, 74, 63, 72, 64, 61, 56, 58, 65, 68, 67, 76, 71, 71, 48, 67, 51, 68, 88, 44, 79, 59, 51, 105, 69, 62, 61, 92, 59, 60, 56, 64, 74, 69, 43, 52, 59, 66, 81, 63, 56, 70, 55, 79, 69, 103, 62, 62, 54, 65, 59, 62, 64, 78, 51, 68, 46, 55, 85, 60, 76, 58, 106, 71, 46, 66, 75, 71, 63, 51, 57, 86, 55, 56, 54, 79, 63, 70, 81, 72, 75, 60, 71, 61, 76, 68, 68, 104, 63, 69, 56, 75, 75, 58, 84, 66, 61, 54, 68, 63, 41, 64, 65, 52, 64, 58, 77, 53, 56, 59, 78, 87, 51, 62, 54, 52, 61, 95, 62, 73, 75, 50, 57, 57, 67, 57, 89, 82, 67, 70, 70, 56, 60, 47, 54, 56, 91, 58, 65, 56, 63, 67, 99, 72, 55, 63, 60, 66, 64, 71, 67, 64, 81, 57, 57, 115, 50, 51, 60, 65, 59, 59, 60, 85, 74, 52, 70, 56, 65, 78, 53, 61, 57, 64, 58, 56, 56, 59, 59, 59, 80, 68, 50, 80, 54, 72, 86, 77, 53, 64, 65, 55, 90, 66, 60, 75, 70, 63, 47, 54, 52, 60, 83, 88, 120, 79, 58, 65, 65, 67, 60, 66, 69, 65, 57, 61, 60, 63, 53, 88, 55, 47, 60, 73, 54, 69, 84, 94, 63, 56, 64, 77, 67, 47, 59, 51, 66, 62, 60, 58, 63, 58, 62, 63, 66, 56, 50, 58, 67, 85, 64, 61, 68, 67, 74, 68, 76, 59, 67, 66, 53, 67, 61, 60, 74, 80, 69, 65, 64, 49, 65, 60, 74, 67, 61, 67, 68, 95, 73, 87, 77, 91, 64, 61, 58, 62, 55, 59, 58, 70, 81, 58, 67, 59, 68, 60, 62, 68, 49, 90, 49, 43, 56, 70, 53, 61, 62, 58, 67, 54, 62, 43, 89, 63, 63, 84, 65, 61, 57, 54, 73, 59, 64, 81, 64, 84, 81, 53, 60, 47, 57, 55, 49, 50, 57, 66, 77, 59, 44, 50, 61, 80, 61, 65, 45, 62, 57, 67, 67, 71, 72, 74, 60, 47, 58, 59, 78, 63, 94, 56, 67, 59, 49, 62, 58, 69, 70, 58, 73, 99, 66, 59, 50, 58, 104, 66, 92, 58, 52, 62, 64, 74, 58, 42, 72, 61, 67, 70, 51, 68, 69, 48, 56, 69, 72, 63, 46, 63, 67, 87, 56, 69, 52, 61, 67, 55, 60, 56, 68, 69, 55, 65, 59, 61, 43, 50, 54, 62, 59, 61, 58, 108, 84, 60, 60, 55, 52, 63, 55, 52, 62, 55, 60, 78, 59, 57, 55, 110, 69, 72, 51, 54, 55, 69, 71, 75, 69, 60, 63, 60, 73, 76, 50, 65, 55, 97, 55, 85, 61, 65, 73, 60, 56, 55, 74, 56, 91, 50, 62, 47, 69, 58, 62, 98, 57, 58, 80, 64, 82, 77, 58, 65, 78, 68, 70, 52, 68, 62, 56, 75, 58, 67, 60, 62, 67, 60, 61, 69, 54, 49, 93, 61, 53, 60, 68, 65, 58, 72, 81, 53, 75, 62, 71, 64, 50, 78, 65, 52, 61, 75, 65, 72, 73, 62, 84, 65, 61, 47, 76, 89, 72, 63, 65, 50, 76, 62, 67, 65, 102, 65, 70, 63, 62, 79, 84, 62, 62, 61, 57, 61, 64, 82, 66, 65, 110, 75, 79, 52, 61, 79, 73, 48, 72, 59, 67, 68, 64, 64, 135, 61, 58, 59, 57, 53, 75, 63, 76, 65, 59, 67, 59, 71, 60, 80, 54, 75, 61, 66, 48, 69, 56, 103, 48, 58, 60, 54, 106, 77, 83, 67, 75, 74, 66, 126, 46, 68, 61, 67, 70, 61, 65, 62, 95, 68, 67, 65, 70, 63, 75, 65, 64, 53, 84, 57, 93, 42, 56, 54, 78, 60, 75, 66, 61, 88, 63, 68, 87, 65, 58, 70, 55, 80, 70, 60, 58, 61, 56, 44, 60, 60, 64, 46, 67, 59, 58, 48, 57, 71, 52, 77, 60, 66, 62, 50, 56, 56, 56, 65, 57, 56, 58, 53, 84, 78, 69, 85, 67, 68, 69, 59, 87, 68, 81, 67, 43, 60, 51, 69, 72, 86, 71, 62, 66, 60, 67, 64, 63, 73, 55, 52, 71, 56, 62, 73, 64, 59, 82, 61, 56, 59, 68, 62, 90, 54, 85, 72, 64, 65, 53, 49, 65, 50, 99, 63, 74, 79, 57, 65, 61, 73, 63, 77, 60, 84, 77, 64, 73, 82, 49, 64, 71, 94, 59, 69, 47, 59, 56, 52, 65, 74, 95, 49, 70, 51, 78, 98, 61, 53, 69, 56, 54, 66, 64, 60, 58, 62, 85, 68, 61, 58, 74, 59, 61, 66, 99, 59, 57, 60, 51, 58, 67, 80, 67, 80, 53, 64, 67, 56, 52, 63, 77, 57, 70, 56, 62, 53, 52, 59, 52, 62, 69, 72, 57, 73, 68, 74, 59, 60, 62, 59, 65, 71, 56, 74, 52, 60, 69, 68, 67, 63, 88, 52, 85, 63, 79, 52, 85, 72, 54, 56, 53, 69, 61, 65, 83, 69, 61, 53, 91, 44, 61, 61, 56, 60, 86, 72, 63, 63, 79, 69, 68, 70, 68, 78, 58, 58, 56, 58, 56, 50, 66, 66, 62, 72, 75, 49, 84, 63, 66, 69, 50, 72, 57, 55, 61, 62, 58, 61, 71, 80, 99, 122, 55, 70, 84, 65, 64, 56, 116, 56, 59, 64, 69, 121, 77, 85, 57, 52, 80, 55, 87, 56, 71, 46, 58, 73, 61, 58, 66, 53, 55, 64, 43, 82, 75, 54, 73, 56, 68, 66, 54, 49, 56, 57, 63, 65, 54, 62, 61, 58, 63, 57, 85, 61, 52, 82, 58, 87, 59, 55, 61, 69, 69, 70, 52, 63, 56, 56, 59, 64, 61, 61, 62, 61, 53, 63, 68, 86, 60, 74, 61, 63, 60, 65, 55, 65, 63, 59, 54, 71, 72, 77, 72, 51, 61, 66, 67, 51, 64, 57, 70, 65, 73, 71, 63, 62, 54, 80, 66, 78, 61, 53, 84, 86, 71, 77, 76, 55, 54, 63, 63, 54, 59, 68, 58, 56, 76, 73, 62, 62, 69, 60, 55, 61, 56, 47, 69, 84, 76, 61, 78, 69, 55, 57, 57, 68, 65, 79, 70, 48, 76, 54, 66, 59, 58, 80, 61, 78, 56, 120, 51, 84, 64, 49, 57, 63, 54, 64, 65, 60, 57, 67, 49, 65, 47, 53, 85, 58, 51, 49, 58, 69, 65, 66, 56, 61, 68, 60, 67, 54, 44, 54, 79, 61, 63, 58, 56, 71, 53, 59, 55, 52, 55, 56, 64, 79, 114, 67, 31, 57, 66, 71, 75, 55, 63, 108, 67, 74, 74, 66, 66, 42, 58, 56, 64, 59, 89, 65, 61, 73, 68, 65, 60, 71, 52, 79, 55, 62, 68, 87, 61, 59, 66, 66, 67, 71, 52, 57, 62, 56, 103, 70, 75, 67, 80, 96, 53, 55, 56, 69, 69, 64, 49, 62, 60, 63, 58, 59, 77, 56, 54, 105, 54, 64, 73, 72, 68, 99, 65, 59, 74, 63, 59, 57, 61, 82, 87, 82, 53, 83, 78, 61, 62, 55, 63, 63, 102, 71, 61, 104, 53, 59, 62, 62, 58, 62, 68, 63, 59, 70, 65, 57, 67, 57, 62, 64, 57, 58, 63, 58, 73, 82, 59, 56, 61, 66, 62, 64, 69, 55, 66, 87, 50, 70, 66, 57, 77, 70, 54, 69, 63, 52, 53, 60, 58, 57, 61, 70, 101, 68, 67, 64, 72, 58, 67, 67, 37, 60, 50, 79, 61, 47, 65, 78, 51, 61, 62, 62, 55, 55, 73, 64, 66, 62, 60, 69, 55, 76, 55, 128, 61, 55, 50, 66, 69, 56, 74, 61, 57, 73, 64, 64, 56, 81, 59, 74, 84, 76, 73, 90, 52, 64, 67, 77, 64, 46, 71, 68, 58, 79, 49, 48, 55, 53, 59, 49, 57, 73, 74, 63, 62, 86, 61, 62, 52, 76, 66, 56, 72, 69, 60, 61, 59, 56, 62, 65, 79, 58, 79, 61, 57, 57, 58, 83, 71, 61, 71, 55, 65, 56, 51, 56, 85, 67, 74, 66, 67, 74, 59, 77, 85, 61, 61, 62, 63, 63, 66, 49, 71, 67, 51, 79, 64, 55, 80, 63, 72, 56, 63, 63, 59, 71, 51, 40, 67, 62, 68, 53, 53, 57, 61, 65, 64, 64, 82, 66, 57, 82, 72, 62, 56, 55, 59, 65, 90, 60, 70, 60, 64, 65, 55, 68, 60, 86, 62, 60, 71, 48, 108, 59, 55, 53, 75, 70, 61, 53, 69, 46, 57, 79, 62, 62, 60, 66, 83, 63, 67, 79, 60, 68, 77, 56, 67, 74, 62, 61, 74, 58, 61, 83, 53, 76, 60, 99, 59, 66, 67, 68, 54, 69, 58, 54, 65, 70, 55, 70, 108, 63, 72, 85, 72, 59, 42, 66, 67, 57, 77, 66, 63, 55, 63, 88, 89, 61, 73, 62, 61, 68, 66, 87, 76, 51, 59, 54, 78, 65, 55, 61, 65, 75, 60, 96, 64, 53, 68, 68, 59, 53, 70, 66, 75, 67, 57, 57, 60, 62, 59, 78, 64, 64, 55, 72, 86, 69, 64, 58, 65, 75, 82, 55, 69, 63, 58, 84, 83, 89, 61, 61, 83, 58, 71, 53, 68, 95, 49, 72, 61, 60, 67, 64, 52, 67, 62, 60, 75, 78, 68, 82, 68, 60, 62, 51, 59, 93, 83, 77, 74, 64, 55, 63, 66, 63, 80, 76, 72, 62, 70, 53, 65, 64, 66, 66, 61, 57, 58, 78, 62, 77, 65, 55, 62, 64, 69, 57, 73, 85, 69, 60, 90, 70, 60, 65, 66, 61, 61, 70, 46, 63, 69, 65, 70, 50, 124, 62, 58, 53, 70, 57, 59, 66, 91, 65, 84, 76, 57, 65, 57, 65, 62, 56, 56, 71, 62, 82, 65, 67, 57, 85, 59, 74, 92, 55, 64, 62, 61, 73, 57, 124, 57, 67, 63, 76, 58, 63, 74, 60, 80, 66, 67, 74, 64, 68, 63, 66, 69, 66, 63, 66, 90, 50, 61, 71, 53, 55, 80, 56, 82, 61, 62, 81, 99, 67, 48, 59, 52, 75, 75, 50, 75, 61, 67, 70, 68, 62, 56, 52, 86, 66, 58, 66, 59, 64, 72, 60, 61, 81, 54, 45, 69, 62, 60, 60, 61, 74, 58, 67, 86, 60, 62, 121, 77, 45, 74, 66, 66, 64, 73, 47, 70, 61, 58, 90, 58, 55, 62, 60, 58, 66, 62, 77, 77, 67, 52, 64, 72, 77, 66, 53, 65, 58, 70, 54, 51, 63, 52, 56, 75, 61, 48, 67, 51, 63, 66, 79, 66, 49, 96, 71, 62, 76, 70, 49, 70, 66, 53, 60, 66, 49, 54, 50, 77, 70, 55, 78, 60, 53, 60, 64, 91, 75, 66, 69, 66, 69, 65, 57, 55, 60, 54, 63, 64, 57, 78, 67, 56, 61, 76, 56, 65, 53, 74, 62, 52, 68, 68, 71, 57, 53, 65, 78, 87, 58, 94, 74, 66, 67, 45, 76, 70, 65, 62, 72, 61, 64, 64, 60, 55, 61, 59, 69, 75, 52, 92, 59, 68, 59, 56, 61, 55, 60, 74, 73, 57, 62, 61, 65, 63, 53, 69, 62, 61, 67, 62, 53, 61, 72, 57, 50, 61, 100, 63, 62, 61, 77, 63, 63, 101, 58, 67, 63, 79, 61, 78, 75, 75, 62, 86, 71, 51, 61, 60, 55, 52, 66, 61, 69, 54, 60, 68, 53, 113, 49, 80, 51, 73, 48, 70, 73, 61, 66, 69, 82, 55, 57, 52, 50, 59, 80, 78, 65, 58, 82, 68, 60, 64, 74, 60, 48, 60, 47, 48, 58, 68, 56, 62, 55, 56, 55, 52, 72, 81, 56, 62, 53, 53, 60, 76, 57, 67, 58, 55, 73, 48, 69, 60, 94, 82, 79, 58, 65, 54, 38, 63, 46, 66, 60, 71, 98, 61, 51, 64, 91, 72, 55, 60, 48, 54, 79, 62, 44, 55, 56, 60, 64, 83, 78, 54, 69, 59, 52, 50, 73, 49, 61, 71, 54, 72, 84, 86, 72, 60, 54, 82, 76, 78, 95, 60, 71, 82, 36, 57, 98, 54, 60, 71, 66, 58, 59, 69, 88, 55, 73, 41, 63, 58, 84, 66, 50, 67, 70, 54, 59, 61, 61, 79, 82, 64, 77, 74, 53, 73, 54, 61, 60, 73, 61, 58, 60, 101, 48, 47, 51, 49, 91, 53, 49, 53, 78, 69, 72, 64, 60, 77, 68, 64, 67, 65, 53, 67, 63, 58, 45, 56, 73, 60, 62, 73, 92, 75, 71, 60, 60, 50, 59, 41, 53, 83, 86, 91, 57, 74, 79, 64, 60, 42, 60, 50, 75, 75, 73, 88, 76, 84, 71, 63, 54, 68, 70, 68, 74, 63, 59, 68, 74, 53, 97, 49, 49, 63, 63, 54, 54, 61, 80, 49, 57, 62, 76, 56, 65, 60, 73, 64, 71, 57, 76, 55, 63, 54, 61, 70, 69, 64, 58, 58, 71, 43, 63, 73, 70, 55, 63, 48, 58, 45, 61, 69, 60, 101, 62, 85, 70, 72, 61, 69, 54, 82, 81, 93, 77, 77, 49, 63, 158, 74, 67, 62, 69, 71, 79, 62, 43, 48, 60, 49, 64, 74, 66, 77, 53, 38, 80, 70, 52, 45, 61, 70, 83, 55, 59, 58, 83, 59, 100, 68, 65, 59, 52, 79, 47, 50, 67, 70, 88, 64, 87, 53, 70, 71, 84, 53, 64, 74, 76, 65, 38, 72, 57, 80, 59, 55, 61, 73, 60, 59, 46, 56, 52, 159, 70, 46, 71, 63, 63, 72, 68, 72, 47, 43, 59, 76, 79, 75, 70, 83, 126, 53, 68, 66, 67, 50, 64, 83, 52, 90, 79, 100, 50, 61, 48, 73, 39, 67, 89, 57, 62, 75, 62, 52, 72, 50, 57, 78, 53, 70, 55, 65, 71, 65, 73, 78, 47, 56, 52, 59, 57, 58, 48, 64, 54, 71, 61, 51, 75, 71, 71, 82, 78, 66, 76, 88, 76, 73, 63, 71, 90, 71, 58, 63, 74, 84, 74, 64, 55, 67, 78, 60, 62, 71, 67, 85, 75, 78, 74, 70, 84, 79, 77, 127, 62, 75, 69, 63, 52, 38, 65, 66, 87, 63, 60, 61, 96, 49, 58, 70, 42, 56, 73, 67, 66, 62, 59, 71, 68, 50, 58, 49, 79, 56, 73, 48, 60, 66, 68, 62, 84, 66, 55, 60, 75, 68, 82, 55, 90, 78, 63, 60, 51, 90, 63, 92, 57, 71, 60, 57, 98, 143, 62, 62, 63, 78, 64, 58, 42, 56, 69, 63, 45, 71, 64, 95, 72, 45, 72, 50, 56, 55, 70, 62, 60, 53, 67, 52, 71, 59, 84, 64, 63, 64, 65, 61, 60, 79, 58, 58, 72, 69, 60, 57, 62, 84, 68, 48, 70, 68, 60, 74, 40, 65, 67, 77, 65, 56, 58, 62, 80, 71, 67, 53, 105, 61, 48, 62, 60, 77, 61, 56, 44, 56, 61, 60, 150, 96, 81, 68, 94, 62, 64, 63, 64, 45, 50, 50, 76, 62, 61, 76, 63, 74, 62, 61, 71, 61, 70, 64, 87, 85, 69, 91, 53, 49, 73, 63, 119, 60, 51, 55, 47, 88, 50, 57, 51, 66, 59, 71, 82, 84, 52, 82, 61, 86, 55, 68, 71, 54, 64, 55, 57, 68, 76, 58, 58, 66, 92, 56, 54, 85, 73, 61, 86, 79, 55, 71, 45, 98, 93, 55, 72, 60, 54, 61, 64, 75, 56, 62, 75, 49, 61, 56, 66, 73, 72, 74, 76, 49, 67, 62, 62, 54, 68, 60, 59, 63, 71, 61, 53, 90, 63, 78, 80, 59, 52, 36, 80, 66, 66, 56, 48, 122, 69, 107, 79, 57, 84, 45, 81, 68, 56, 89, 60, 78, 74, 81, 71, 71, 69, 67, 83, 77, 56, 72, 64, 49, 53, 85, 80, 49, 59, 62, 67, 94, 84, 52, 55, 77, 63, 73, 79, 60, 56, 86, 66, 56, 70, 86, 71, 89, 53, 58, 86, 55, 52, 62, 73, 46, 68, 48, 74, 75, 67, 59, 70, 63, 71, 82, 73, 77, 60, 53, 65, 77, 68, 66, 72, 62, 76, 65, 69, 92, 61, 56, 49, 77, 110, 80, 52, 47, 50, 93, 67, 124, 60, 78, 65, 56, 56, 83, 66, 72, 80, 81, 74, 68, 52, 70, 62, 53, 51, 84, 74, 75, 43, 91, 57, 58, 60, 77, 71, 65, 70, 101, 72, 77, 60, 70, 63, 65, 77, 56, 81, 68, 53, 75, 53, 61, 68, 57, 66, 58, 61, 59, 53, 51, 91, 56, 67, 80, 47, 76, 67, 61, 65, 67, 61, 69, 78, 63, 77, 68, 77, 63, 69, 101, 71, 66, 57, 63, 67, 52, 62, 83, 46, 72, 50, 56, 64, 74, 53, 67, 44, 55, 64, 61, 54, 68, 57, 42, 73, 70, 56, 67, 67, 74, 108, 73, 47, 58, 76, 62, 55, 95, 63, 71, 74, 53, 60, 56, 93, 57, 43, 56, 50, 53, 76, 56, 81, 54, 60, 99, 54, 85, 62, 107, 49, 65, 68, 72, 58, 49, 61, 56, 72, 61, 62, 44, 52, 60, 60, 52, 35, 78, 67, 66, 56, 69, 57, 43, 64, 63, 54, 63, 98, 82, 69, 60, 62, 69, 75, 56, 72, 66, 65, 78, 62, 60, 93, 51, 65, 59, 71, 47, 60, 56, 94, 61, 71, 120, 84, 73, 51, 53, 44, 89, 48, 71, 63, 52, 65, 68, 87, 63, 63, 52, 46, 79, 73, 85, 54, 83, 50, 64, 79, 42, 84, 77, 54, 56, 65, 43, 52, 44, 76, 55, 89, 64, 62, 85, 54, 91, 44, 92, 59, 61, 64, 51, 48, 73, 79, 74, 67, 79, 43, 66, 72, 78, 46, 85, 67, 72, 59, 56, 76, 59, 56, 68, 43, 47, 67, 77, 61, 45, 53, 84, 100, 63, 65, 78, 71, 43, 69, 59, 64, 63, 55, 64, 78, 60, 100, 62, 55, 64, 70, 55, 47, 83, 51, 52, 81, 60, 64, 65, 68, 71, 59, 53, 108, 64, 58, 53, 70, 51, 84, 82, 90, 59, 58, 62, 77, 67, 57, 89, 70, 55, 54, 67, 56, 78, 91, 63, 54, 86, 75, 114, 65, 86, 57, 80, 71, 66, 41, 47, 79, 71, 57, 65, 45, 80, 75, 59, 78, 58, 51, 106, 55, 73, 63, 52, 74, 58, 53, 65, 61, 79, 47, 81, 69, 71, 59, 92, 66, 65, 93, 50, 57, 67, 65, 88, 47, 58, 80, 65, 60, 63, 31, 74, 66, 91, 86, 75, 70, 60, 46, 73, 51, 67, 52, 82, 49, 65, 64, 78, 72, 54, 64, 58, 67, 75, 71, 63, 57, 77, 78, 81, 57, 82, 103, 46, 63, 58, 65, 61, 53, 48, 51, 36, 61, 92, 81, 51, 89, 79, 88, 66, 62, 55, 48, 67, 78, 53, 54, 61, 95, 88, 63, 73, 62, 71, 70, 66, 85, 80, 57, 82, 57, 75, 91, 85, 72, 79, 53, 40, 64, 56, 81, 99, 77, 52, 56, 75, 74, 86, 66, 64, 58, 59, 64, 63, 51, 50, 62, 49, 89, 59, 66, 72, 62, 54, 52, 66, 48, 50, 59, 64, 59, 73, 48, 59, 55, 54, 46, 67, 66, 84, 59, 66, 55, 47, 77, 68, 54, 57, 62, 51, 66, 79, 56, 58, 46, 69, 52, 64, 65, 53, 62, 69, 70, 110, 96, 156, 47, 64, 103, 79, 50, 76, 54, 96, 62, 97, 68, 70, 66, 54, 67, 81, 58, 60, 50, 37, 72, 66, 106, 65, 62, 41, 52, 97, 115, 52, 76, 61, 60, 51, 54, 76, 63, 57, 51, 75, 49, 56, 51, 55, 75, 71, 56, 76, 59, 75, 80, 75, 63, 66, 62, 56, 92, 64, 72, 54, 68, 77, 68, 120, 60, 79, 73, 58, 51, 52, 58, 87, 72, 46, 67, 77, 56, 60, 75, 73, 80, 67, 62, 93, 83, 47, 68, 102, 79, 59, 73, 96, 62, 58, 49, 70, 60, 81, 53, 65, 52, 48, 71, 47, 64, 73, 65, 60, 81, 77, 77, 67, 45, 61, 67, 108, 47, 69, 57, 87, 69, 56, 63, 55, 54, 97, 76, 55, 91, 56, 77, 71, 56, 67, 91, 54, 66, 62, 63, 74, 56, 68, 66, 43, 57, 75, 108, 47, 59, 56, 73, 44, 58, 71, 68, 52, 72, 73, 66, 39, 63, 60, 70, 107, 57, 77, 53, 59, 57, 42, 86, 56, 55, 56, 78, 59, 57, 47, 55, 49, 74, 48, 86, 59, 60, 69, 71, 55, 70, 55, 57, 61, 127, 59, 84, 53, 71, 59, 64, 87, 74, 63, 67, 60, 79, 96, 59, 70, 70, 73, 61, 69, 77, 80, 84, 66, 56, 50, 61, 65, 78, 97, 65, 58, 63, 60, 80, 53, 50, 40, 74, 45, 72, 63, 80, 73, 51, 59, 70, 87, 55, 72, 71, 58, 65, 63, 79, 58, 66, 59, 66, 70, 65, 74, 65, 45, 71, 52, 78, 64, 76, 62, 59, 85, 62, 70, 73, 64, 62, 93, 60, 74, 51, 44, 43, 56, 56, 68, 75, 63, 54, 87, 51, 72, 85, 66, 59, 52, 58, 58, 69, 73, 62, 69, 51, 63, 73, 47, 46, 53, 70, 77, 45, 67, 59, 75, 49, 92, 62, 54, 46, 60, 55, 78, 56, 57, 74, 63, 70, 64, 77, 53, 62, 58, 105, 70, 71, 90, 62, 41, 61, 76, 50, 35, 65, 78, 91, 57, 56, 63, 40, 60, 68, 54, 75, 63, 64, 52, 54, 90, 44, 48, 65, 57, 35, 150, 71, 38, 50, 48, 63, 57, 99, 71, 60, 72, 64, 72, 56, 68, 57, 60, 54, 66, 65, 61, 68, 76, 63, 96, 61, 58, 69, 88, 57, 53, 58, 41, 88, 72, 66, 78, 66, 47, 87, 54, 84, 67, 42, 50, 84, 78, 70, 69, 52, 86, 36, 58, 61, 59, 61, 53, 67, 97, 51, 75, 66, 87, 47, 49, 66, 73, 60, 64, 62, 73, 53, 60, 55, 58, 57, 58, 75, 50, 77, 77, 57, 55, 92, 65, 63, 68, 75, 61, 65, 75, 66, 70, 55, 62, 66, 105, 76, 81, 62, 68, 65, 50, 64, 55, 85, 74, 68, 58, 56, 78, 59, 43, 64, 50, 48, 70, 62, 72, 48, 63, 83, 87, 60, 79, 70, 54, 64, 57, 76, 66, 56, 85, 65, 40, 46, 75, 68, 56, 68, 64, 58, 52, 69, 66, 51, 71, 72, 75, 59, 61, 47, 82, 72, 84, 62, 50, 60, 56, 66, 69, 80, 66, 40, 76, 57, 99, 58, 54, 61, 67, 61, 102, 67, 70, 97, 54, 65, 79, 45, 121, 54, 68, 81, 90, 67, 53, 61, 42, 68, 64, 46, 47, 75, 95, 92, 51, 62, 70, 84, 77, 45, 71, 61, 74, 77, 49, 54, 44, 87, 82, 60, 48, 64, 53, 66, 72, 88, 62, 62, 73, 49, 80, 59, 52, 63, 68, 70, 53, 64, 56, 71, 73, 80, 50, 52, 50, 63, 63, 73, 56, 75, 54, 49, 56, 67, 67, 71, 54, 44, 91, 51, 69, 52, 59, 60, 57, 98, 57, 51, 35, 65, 54, 54, 78, 72, 48, 56, 53, 67, 63, 69, 57, 60, 46, 64, 63, 102, 55, 46, 56, 66, 94, 73, 81, 61, 70, 54, 44, 56, 48, 58, 60, 88, 55, 73, 57, 48, 54, 68, 75, 59, 60, 50, 102, 68, 65, 44, 57, 75, 50, 51, 69, 52, 118, 45, 75, 75, 66, 59, 62, 82, 56, 67, 70, 51, 52, 72, 69, 65, 48, 40, 71, 63, 59, 73, 83, 52, 45, 67, 42, 51, 55, 63, 78, 56, 67, 58, 54, 60, 54, 53, 59, 69, 56, 101, 67, 68, 70, 59, 77, 36, 66, 101, 60, 55, 66, 58, 55, 64, 82, 71, 85, 54, 64, 81, 65, 53, 77, 65, 73, 78, 55, 75, 62, 57, 59, 55, 46, 64, 74, 52, 103, 45, 81, 84, 115, 55, 68, 57, 65, 82, 72, 70, 74, 70, 67, 77, 53, 76, 36, 77, 58, 75, 62, 76, 39, 83, 60, 77, 85, 69, 79, 48, 59, 81, 48, 68, 58, 70, 59, 64, 43, 43, 74, 64, 47, 83, 62, 63, 77, 62, 93, 62, 59, 62, 60, 52, 58, 55, 62, 47, 63, 79, 61, 79, 63, 105, 76, 55, 76, 90, 58, 61, 82, 52, 65, 72, 64, 66, 69, 50, 75, 67, 67, 79, 47, 57, 57, 56, 65, 71, 63, 49, 64, 61, 61, 53, 83, 65, 59, 69, 65, 57, 66, 62, 68, 62, 60, 43, 46, 67, 98, 61, 54, 110, 77, 93, 72, 64, 49, 94, 59, 80, 59, 56, 50, 63, 61, 72, 97, 67, 64, 72, 60, 56, 97, 40, 66, 43, 48, 83, 72, 64, 54, 61, 74, 72, 65, 65, 51, 52, 91, 62, 62, 57, 48, 61, 55, 66, 65, 62, 68, 86, 59, 56, 78, 50, 77, 72, 63, 57, 56, 59, 55, 51, 77, 75, 68, 57, 69, 125, 97, 75, 62, 60, 56, 63, 64, 66, 68, 58, 66, 64, 61, 65, 57, 70, 80, 61, 81, 44, 77, 50, 66, 85, 60, 65, 51, 68, 56, 75, 54, 83, 77, 64, 53, 62, 96, 67, 54, 88, 64, 50, 55, 58, 55, 34, 72, 60, 58, 74, 53, 57, 68, 60, 60, 57, 65, 63, 49, 59, 76, 65, 51, 66, 63, 49, 62, 66, 49, 57, 77, 83, 60, 80, 59, 69, 85, 54, 59, 67, 75, 45, 63, 62, 71, 61, 70, 45, 81, 45, 71, 74, 65, 66, 74, 168, 71, 71, 64, 48, 78, 74, 88, 64, 52, 47, 55, 55, 51, 54, 101, 63, 72, 62, 63, 55, 66, 66, 63, 97, 70, 73, 63, 97, 88, 76, 64, 63, 44, 53, 95, 58, 57, 77, 61, 84, 46, 77, 69, 62, 58, 94, 45, 71, 87, 72, 70, 61, 60, 60, 65, 65, 73, 68, 53, 55, 46, 40, 64, 68, 147, 34, 62, 57, 55, 69, 69, 60, 72, 86, 62, 67, 64, 50, 61, 56, 82, 86, 69, 58, 96, 64, 72, 63, 70, 59, 81, 65, 61, 51, 56, 60, 74, 34, 54, 63, 52, 59, 85, 57, 65, 63, 62, 61, 82, 49, 66, 51, 33, 69, 124, 68, 51, 67, 78, 60, 64, 60, 72, 63, 53, 73, 79, 88, 48, 83, 66, 49, 103, 53, 65, 101, 82, 48, 55, 63, 66, 66, 82, 79, 78, 48, 52, 57, 59, 66, 72, 79, 59, 51, 61, 59, 38, 44, 99, 67, 47, 61, 54, 67, 132, 64, 54, 84, 60, 54, 62, 54, 84, 70, 66, 59, 71, 78, 56, 86, 67, 56, 77, 80, 64, 66, 76, 99, 61, 50, 60, 56, 44, 79, 93, 66, 63, 64, 67, 77, 54, 71, 71, 60, 63, 48, 62, 69, 66, 72, 84, 53, 71, 58, 70, 63, 78, 59, 62, 51, 69, 38, 135, 55, 64, 42, 60, 113, 77, 74, 75, 68, 45, 45, 69, 42, 49, 65, 84, 48, 59, 49, 64, 71, 42, 70, 110, 75, 64, 76, 63, 57, 66, 53, 58, 55, 82, 67, 56, 59, 58, 88, 62, 69, 82, 58, 90, 68, 70, 63, 73, 59, 48, 69, 69, 63, 84, 58, 56, 50, 64, 56, 60, 60, 34, 79, 41, 59, 61, 57, 74, 76, 103, 63, 56, 64, 73, 128, 47, 76, 57, 139, 57, 74, 32, 57, 50, 68, 53, 40, 59, 72, 74, 61, 124, 66, 74, 66, 81, 58, 65, 62, 68, 62, 69, 91, 63, 74, 88, 105, 48, 74, 36, 54, 75, 60, 53, 79, 81, 64, 83, 69, 63, 66, 61, 76, 48, 74, 60, 88, 60, 76, 47, 83, 45, 69, 56, 50, 62, 66, 60, 44, 86, 64, 28, 77, 72, 73, 62, 69, 75, 100, 64, 59, 61, 58, 59, 70, 51, 78, 81, 45, 75, 64, 77, 66, 47, 81, 56, 52, 56, 49, 71, 53, 74, 75, 58, 50, 59, 47, 67, 88, 68, 67, 71, 75, 78, 70, 83, 69, 59, 54, 58, 67, 122, 43, 86, 78, 72, 62, 44, 65, 64, 61, 87, 62, 49, 73, 50, 70, 75, 67, 81, 72, 107, 73, 58, 65, 65, 59, 86, 52, 80, 60, 84, 83, 68, 72, 77, 57, 58, 68, 63, 69, 73, 62, 60, 79, 59, 84, 46, 63, 54, 50, 72, 76, 46, 82, 53, 70, 63, 69, 51, 87, 71, 56, 94, 92, 67, 64, 78, 57, 81, 40, 66, 64, 90, 48, 85, 95, 74, 83, 62, 55, 38, 70, 55, 72, 59, 82, 56, 73, 67, 78, 69, 68, 102, 58, 81, 53, 65, 78, 58, 59, 77, 57, 55, 58, 72, 91, 66, 50, 67, 72, 70, 58, 81, 79, 64, 79, 70, 65, 71, 50, 61, 55, 73, 54, 66, 81, 74, 55, 76, 68, 58, 68, 65, 63, 77, 66, 68, 82, 55, 55, 50, 65, 70, 57, 60, 68, 54, 63, 80, 52, 59, 68, 55, 73, 54, 76, 62, 61, 59, 56, 63, 60, 60, 74, 92, 54, 81, 63, 62, 68, 63, 64, 77, 70, 60, 75, 51, 68, 77, 53, 63, 55, 58, 66, 73, 77, 81, 86, 63, 100, 76, 60, 64, 69, 56, 50, 71, 61, 68, 67, 77, 65, 72, 64, 58, 57, 71, 82, 77, 82, 59, 66, 63, 73, 71, 71, 66, 52, 64, 64, 68, 69, 69, 65, 89, 77, 70, 60, 60, 58, 60, 55, 61, 56, 63, 50, 77, 73, 87, 78, 58, 83, 55, 62, 64, 64, 86, 69, 61, 75, 59, 55, 56, 52, 99, 74, 73, 61, 69, 62, 53, 53, 67, 56, 67, 64, 62, 60, 75, 58, 53, 68, 55, 58, 71, 64, 62, 71, 131, 52, 58, 67, 71, 74, 74, 62, 63, 55, 59, 61, 62, 77, 91, 66, 74, 65, 57, 81, 73, 43, 62, 54, 66, 47, 55, 69, 65, 61, 63, 66, 62, 57, 74, 63, 62, 68, 43, 59, 54, 56, 57, 73, 69, 66, 62, 56, 67, 72, 61, 69, 65, 64, 61, 60, 59, 50, 58, 79, 57, 84, 49, 57, 53, 56, 62, 60, 74, 59, 51, 75, 48, 61, 62, 56, 91, 66, 76, 54, 80, 66, 56, 73, 55, 62, 52, 72, 54, 77, 55, 75, 75, 61, 65, 61, 61, 64, 59, 58, 71, 64, 58, 52, 50, 62, 50, 56, 91, 49, 64, 73, 71, 66, 58, 72, 55, 64, 75, 58, 58, 45, 78, 56, 78, 71, 64, 84, 61, 60, 53, 65, 60, 57, 82, 55, 125, 65, 67, 83, 57, 55, 62, 58, 81, 70, 60, 72, 53, 68, 61, 67, 53, 73, 58, 59, 135, 55, 71, 60, 84, 61, 63, 67, 70, 60, 64, 58, 65, 68, 56, 58, 64, 59, 48, 59, 54, 74, 70, 66, 69, 55, 69, 62, 64, 62, 67, 61, 60, 70, 63, 96, 68, 63, 59, 98, 53, 74, 58, 69, 55, 53, 59, 66, 72, 60, 72, 59, 70, 63, 65, 56, 62, 58, 73, 65, 56, 55, 59, 56, 54, 70, 60, 61, 76, 72, 56, 58, 61, 65, 60, 90, 84, 63, 52, 79, 70, 51, 89, 77, 70, 77, 57, 108, 63, 68, 78, 89, 85, 62, 60, 63, 60, 54, 61, 79, 60, 61, 56, 64, 57, 83, 56, 71, 52, 59, 57, 58, 86, 66, 45, 90, 76, 69, 77, 56, 54, 57, 54, 73, 60, 80, 67, 57, 81, 68, 59, 61, 81, 73, 73, 72, 62, 63, 70, 73, 56, 88, 62, 62, 62, 66, 56, 67, 76, 70, 59, 65, 58, 78, 59, 69, 64, 55, 71, 54, 66, 67, 60, 61, 53, 87, 67, 77, 60, 53, 67, 57, 73, 70, 63, 93, 151, 63, 61, 54, 63, 61, 68, 80, 82, 57, 69, 59, 67, 59, 54, 55, 80, 66, 71, 56, 71, 64, 79, 57, 53, 68, 66, 68, 70, 69, 86, 65, 59, 65, 63, 60, 49, 60, 63, 69, 96, 69, 60, 49, 45, 74, 75, 64, 61, 64, 81, 82, 110, 60, 58, 55, 60, 76, 60, 64, 56, 61, 67, 58, 105, 61, 74, 54, 57, 68, 73, 66, 71, 58, 57, 52, 64, 51, 67, 74, 57, 73, 63, 55, 57, 50, 59, 72, 77, 70, 79, 101, 62, 66, 55, 62, 61, 56, 51, 62, 79, 57, 66, 69, 78, 63, 84, 59, 64, 82, 71, 68, 94, 59, 60, 72, 120, 73, 68, 64, 86, 61, 60, 62, 56, 54, 69, 63, 81, 71, 84, 58, 68, 63, 67, 68, 107, 94, 61, 76, 90, 59, 68, 68, 62, 58, 60, 65, 71, 64, 47, 57, 72, 59, 59, 85, 82, 60, 71, 50, 48, 70, 57, 70, 109, 74, 71, 53, 65, 58, 74, 85, 59, 71, 60, 57, 89, 60, 65, 69, 96, 69, 65, 78, 55, 75, 100, 50, 58, 68, 65, 59, 60, 87, 66, 62, 63, 70, 69, 105, 51, 67, 58, 61, 61, 62, 73, 95, 75, 95, 58, 76, 57, 60, 80, 69, 60, 57, 65, 63, 63, 70, 63, 58, 68, 62, 70, 57, 68, 73, 80, 59, 64, 63, 52, 60, 81, 66, 50, 82, 45, 57, 70, 61, 57, 65, 64, 87, 61, 69, 60, 58, 54, 60, 106, 44, 75, 81, 103, 73, 58, 63, 65, 56, 68, 84, 87, 54, 75, 70, 63, 66, 70, 56, 54, 68, 67, 60, 61, 66, 64, 49, 52, 74, 57, 76, 58, 54, 64, 91, 60, 63, 64, 61, 63, 70, 75, 85, 63, 59, 61, 62, 87, 67, 63, 61, 59, 63, 63, 53, 58, 100, 60, 93, 45, 64, 59, 58, 70, 57, 67, 51, 61, 67, 65, 59, 81, 69, 58, 72, 49, 50, 65, 71, 80, 57, 61, 75, 94, 57, 82, 68, 43, 76, 56, 64, 72, 60, 67, 66, 60, 57, 65, 68, 64, 65, 57, 62, 57, 73, 51, 51, 55, 70, 56, 76, 87, 71, 58, 51, 62, 52, 58, 57, 52, 55, 78, 61, 91, 68, 60, 55, 62, 53, 70, 88, 61, 52, 68, 80, 65, 72, 75, 60, 66, 91, 56, 71, 59, 55, 73, 90, 66, 57, 78, 64, 63, 75, 84, 72, 57, 67, 77, 50, 69, 68, 55, 81, 68, 64, 85, 53, 63, 60, 57, 59, 62, 70, 66, 60, 65, 77, 69, 68, 55, 55, 61, 71, 64, 58, 60, 59, 50, 54, 55, 76, 67, 58, 68, 62, 76, 58, 57, 75, 50, 66, 59, 70, 53, 65, 71, 63, 65, 49, 63, 56, 53, 64, 63, 73, 75, 58, 62, 62, 57, 60, 64, 74, 68, 82, 61, 61, 74, 74, 65, 66, 61, 96, 79, 55, 63, 72, 54, 57, 56, 60, 78, 71, 65, 60, 66, 67, 57, 58, 66, 66, 59, 49, 66, 62, 72, 56, 60, 72, 56, 65, 54, 67, 59, 71, 64, 57, 74, 72, 56, 60, 67, 67, 56, 68, 61, 54, 64, 97, 78, 55, 72, 51, 87, 65, 46, 57, 99, 72, 51, 58, 93, 49, 59, 54, 68, 57, 69, 54, 77, 57, 63, 69, 75, 56, 68, 56, 60, 74, 69, 59, 67, 66, 71, 51, 81, 67, 61, 58, 62, 61, 45, 59, 85, 81, 78, 66, 74, 56, 74, 82, 74, 62, 57, 65, 82, 50, 83, 67, 80, 62, 55, 60, 91, 56, 59, 79, 60, 49, 57, 67, 68, 57, 72, 91, 64, 69, 68, 65, 68, 77, 63, 73, 52, 57, 56, 62, 80, 60, 72, 64, 69, 70, 67, 59, 64, 60, 62, 48, 67, 68, 58, 73, 57, 51, 54, 59, 65, 52, 79, 61, 53, 62, 81, 75, 65, 55, 79, 52, 73, 69, 61, 56, 55, 58, 87, 62, 80, 96, 54, 60, 71, 78, 81, 69, 69, 64, 63, 71, 63, 97, 54, 74, 41, 67, 60, 68, 74, 58, 63, 61, 78, 67, 64, 80, 49, 59, 154, 70, 58, 68, 69, 61, 63, 65, 62, 61, 65, 64, 68, 61, 70, 53, 77, 63, 71, 80, 70, 60, 57, 86, 77, 53, 58, 66, 59, 63, 64, 61, 55, 61, 45, 76, 63, 65, 64, 49, 86, 82, 73, 67, 93, 62, 61, 61, 63, 54, 62, 63, 62, 56, 65, 76, 57, 58, 61, 76, 73, 69, 78, 70, 67, 61, 58, 55, 57, 63, 67, 66, 54, 52, 72, 52, 112, 73, 67, 58, 73, 49, 56, 56, 61, 71, 59, 90, 69, 79, 49, 70, 84, 76, 67, 60, 62, 60, 57, 90, 62, 56, 66, 57, 61, 78, 61, 72, 83, 71, 58, 54, 57, 63, 52, 60, 58, 57, 77, 54, 75, 89, 64, 69, 89, 58, 79, 68, 66, 62, 81, 84, 83, 59, 64, 58, 141, 93, 74, 72, 57, 70, 53, 67, 46, 57, 70, 68, 49, 59, 46, 58, 87, 109, 60, 64, 114, 67, 56, 57, 67, 65, 64, 60, 70, 55, 57, 60, 43, 67, 54, 65, 75, 55, 62, 46, 56, 61, 62, 68, 63, 55, 70, 85, 53, 81, 56, 63, 70, 60, 72, 66, 73, 68, 55, 69, 50, 67, 75, 76, 71, 83, 61, 78, 99, 81, 66, 55, 59, 61, 75, 65, 81, 64, 61, 55, 86, 67, 57, 82, 67, 80, 61, 57, 107, 92, 56, 57, 61, 72, 66, 61, 55, 70, 70, 75, 69, 76, 94, 58, 68, 78, 62, 81, 66, 73, 82, 49, 80, 55, 69, 73, 55, 58, 64, 82, 64, 66, 58, 63, 75, 82, 53, 61, 52, 59, 73, 70, 83, 54, 56, 74, 86, 70, 83, 70, 68, 56, 65, 59, 67, 87, 77, 47, 59, 68, 55, 47, 74, 71, 58, 46, 67, 61, 54, 58, 52, 102, 64, 72, 71, 53, 51, 81, 63, 43, 52, 69, 79, 67, 62, 58, 50, 86, 62, 56, 53, 66, 98, 54, 61, 62, 56, 81, 63, 48, 61, 63, 45, 74, 73, 88, 149, 67, 69, 62, 100, 56, 60, 58, 71, 44, 67, 57, 67, 61, 87, 67, 60, 72, 53, 60, 61, 54, 53, 58, 59, 64, 77, 60, 51, 53, 56, 55, 59, 59, 65, 65, 54, 67, 65, 73, 70, 74, 62, 60, 63, 67, 73, 67, 51, 68, 50, 73, 62, 60, 72, 60, 66, 58, 71, 66, 56, 60, 64, 72, 73, 60, 79, 76, 64, 76, 76, 66, 56, 68, 90, 54, 60, 79, 57, 89, 63, 65, 65, 58, 67, 61, 80, 71, 83, 60, 60, 63, 67, 68, 63, 58, 57, 67, 83, 73, 68, 60, 54, 58, 57, 49, 89, 65, 58, 92, 61, 85, 106, 57, 102, 56, 85, 55, 82, 79, 55, 59, 53, 61, 59, 56, 81, 58, 65, 55, 79, 78, 81, 63, 72, 64, 75, 53, 56, 77, 56, 70, 56, 57, 53, 58, 48, 57, 68, 64, 63, 60, 60, 58, 70, 50, 78, 56, 65, 58, 59, 76, 59, 65, 66, 72, 82, 85, 77, 77, 87, 57, 55, 81, 51, 76, 45, 53, 60, 52, 58, 78, 52, 82, 57, 60, 68, 73, 59, 68, 64, 72, 59, 54, 51, 57, 87, 69, 50, 127, 69, 65, 66, 70, 59, 90, 56, 75, 62, 78, 81, 57, 55, 66, 76, 51, 65, 60, 59, 66, 54, 54, 55, 55, 62, 64, 81, 69, 67, 54, 110, 65, 56, 56, 58, 53, 52, 62, 49, 62, 53, 75, 58, 57, 76, 70, 59, 71, 75, 59, 55, 49, 79, 61, 74, 60, 57, 65, 75, 64, 77, 60, 60, 63, 69, 61, 81, 72, 50, 61, 64, 53, 63, 61, 64, 66, 78, 49, 72, 86, 63, 74, 58, 120, 53, 70, 61, 69, 54, 49, 58, 48, 56, 69, 71, 57, 65, 79, 54, 66, 81, 55, 74, 50, 58, 65, 60, 95, 89, 56, 69, 93, 60, 51, 62, 89, 80, 69, 41, 59, 50, 63, 69, 60, 52, 49, 72, 103, 74, 65, 59, 53, 62, 75, 68, 58, 64, 85, 68, 60, 68, 58, 63, 86, 68, 67, 49, 60, 62, 52, 65, 49, 65, 86, 63, 62, 74, 78, 94, 55, 77, 57, 63, 90, 62, 68, 66, 77, 53, 65, 62, 48, 70, 67, 73, 49, 62, 56, 66, 70, 60, 54, 67, 62, 63, 64, 102, 59, 59, 75, 70, 62, 69, 64, 69, 56, 60, 75, 63, 70, 47, 52, 69, 60, 82, 49, 57, 75, 61, 68, 60, 54, 92, 67, 58, 77, 61, 65, 91, 58, 80, 70, 83, 50, 63, 57, 77, 75, 78, 57, 64, 71, 68, 55, 65, 51, 60, 45, 68, 63, 70, 74, 61, 60, 55, 58, 73, 62, 60, 66, 67, 66, 66, 118, 74, 53, 65, 61, 67, 59, 61, 62, 67, 71, 59, 62, 53, 70, 74, 67, 69, 58, 49, 64, 65, 56, 59, 72, 56, 50, 62, 62, 63, 65, 71, 66, 65, 100, 76, 68, 91, 59, 75, 60, 60, 56, 68, 73, 67, 97, 58, 84, 55, 73, 72, 56, 61, 62, 49, 57, 79, 67, 49, 50, 71, 57, 86, 55, 51, 52, 73, 58, 72, 88, 53, 56, 64, 61, 57, 48, 64, 61, 70, 77, 82, 59, 93, 58, 75, 50, 60, 49, 96, 54, 50, 73, 61, 54, 68, 69, 61, 56, 67, 54, 66, 57, 66, 51, 63, 89, 72, 58, 55, 53, 52, 64, 41, 64, 64, 61, 55, 76, 74, 52, 64, 50, 64, 50, 69, 60, 58, 84, 64, 61, 63, 58, 81, 64, 64, 63, 67, 68, 74, 52, 67, 73, 57, 60, 48, 116, 75, 101, 61, 47, 48, 46, 75, 39, 38, 53, 61, 67, 58, 83, 82, 46, 68, 68, 62, 61, 53, 69, 42, 44, 59, 56, 76, 62, 56, 74, 68, 70, 84, 85, 41, 54, 62, 67, 57, 60, 83, 53, 73, 71, 68, 57, 53, 51, 54, 62, 65, 70, 49, 84, 60, 82, 77, 72, 73, 68, 64, 51, 66, 86, 99, 49, 77, 48, 64, 57, 85, 60, 90, 82, 57, 71, 72, 63, 54, 59, 43, 93, 62, 48, 80, 49, 77, 64, 62, 62, 65, 33, 107, 63, 56, 55, 61, 71, 68, 77, 56, 62, 46, 68, 72, 82, 58, 68, 59, 66, 58, 87, 53, 54, 63, 51, 73, 68, 73, 90, 61, 57, 44, 54, 53, 82, 76, 73, 85, 38, 45, 70, 95, 96, 86, 58, 59, 72, 55, 54, 66, 63, 50, 62, 59, 68, 74, 62, 72, 82, 158, 85, 75, 62, 98, 60, 65, 97, 61, 72, 72, 76, 64, 77, 75, 105, 62, 62, 61, 81, 56, 71, 59, 81, 52, 49, 89, 58, 70, 55, 50, 60, 54, 69, 57, 63, 62, 53, 80, 65, 71, 58, 55, 62, 63, 69, 75, 56, 70, 65, 71, 111, 52, 45, 72, 93, 51, 124, 75, 93, 57, 86, 83, 89, 58, 55, 70, 48, 62, 63, 53, 67, 39, 91, 48, 51, 87, 58, 85, 52, 66, 80, 48, 67, 60, 66, 61, 114, 58, 70, 72, 82, 71, 102, 43, 61, 62, 81, 58, 71, 67, 66, 71, 75, 49, 70, 52, 66, 64, 55, 47, 64, 43, 91, 54, 57, 62, 80, 57, 72, 68, 65, 57, 70, 50, 75, 49, 54, 74, 63, 70, 54, 69, 73, 80, 60, 47, 66, 55, 66, 73, 54, 61, 88, 64, 88, 54, 58, 72, 52, 77, 53, 59, 64, 33, 70, 71, 78, 69, 76, 54, 49, 57, 54, 59, 70, 81, 55, 54, 63, 61, 46, 54, 81, 65, 65, 50, 75, 86, 57, 56, 52, 57, 59, 56, 65, 67, 50, 93, 45, 78, 80, 65, 43, 44, 70, 56, 63, 74, 58, 75, 66, 50, 54, 63, 50, 68, 56, 87, 58, 57, 60, 65, 78, 55, 63, 41, 66, 90, 51, 72, 55, 67, 68, 66, 69, 62, 90, 62, 70, 49, 67, 56, 72, 62, 73, 49, 80, 48, 35, 64, 69, 66, 60, 64, 73, 59, 57, 60, 44, 61, 68, 57, 58, 65, 67, 53, 58, 58, 75, 84, 66, 69, 48, 54, 53, 74, 68, 52, 40, 54, 80, 56, 140, 88, 89, 63, 80, 61, 38, 57, 62, 63, 51, 54, 78, 64, 49, 67, 82, 95, 70, 70, 57, 70, 60, 65, 71, 81, 56, 63, 56, 72, 106, 70, 79, 55, 73, 85, 63, 58, 74, 65, 106, 62, 65, 68, 77, 58, 70, 70, 46, 68, 76, 62, 59, 58, 63, 61, 43, 51, 61, 52, 67, 78, 47, 48, 46, 113, 64, 61, 72, 63, 65, 79, 74, 67, 62, 75, 64, 54, 55, 62, 70, 77, 63, 55, 65, 121, 54, 60, 65, 68, 57, 54, 97, 68, 80, 65, 70, 59, 69, 58, 60, 55, 67, 72, 63, 70, 58, 81, 59, 46, 58, 61, 67, 60, 64, 65, 67, 93, 66, 69, 72, 57, 72, 39, 73, 73, 46, 75, 75, 63, 65, 81, 91, 74, 80, 58, 61, 69, 70, 63, 80, 58, 72, 64, 73, 74, 59, 55, 54, 61, 66, 55, 54, 62, 60, 59, 79, 65, 55, 70, 57, 51, 64, 56, 87, 144, 54, 60, 79, 62, 63, 83, 58, 67, 123, 62, 53, 60, 55, 51, 75, 88, 50, 75, 82, 66, 74, 71, 59, 82, 51, 69, 50, 89, 92, 72, 61, 64, 53, 47, 79, 67, 73, 79, 59, 73, 74, 66, 68, 70, 98, 46, 60, 68, 62, 66, 55, 91, 47, 53, 52, 59, 62, 111, 59, 65, 63, 85, 87, 59, 54, 69, 64, 69, 64, 62, 49, 67, 71, 76, 61, 56, 57, 48, 42, 53, 67, 66, 70, 56, 81, 62, 65, 70, 79, 57, 54, 47, 55, 72, 91, 68, 59, 53, 71, 74, 82, 77, 65, 86, 59, 64, 69, 87, 61, 39, 62, 66, 77, 58, 73, 42, 57, 53, 63, 61, 108, 52, 68, 68, 74, 65, 67, 56, 56, 80, 63, 61, 103, 80, 81, 55, 103, 72, 79, 77, 68, 44, 58, 66, 62, 90, 71, 65, 55, 78, 56, 72, 54, 63, 53, 74, 73, 83, 50, 67, 55, 46, 59, 59, 76, 65, 61, 63, 65, 113, 74, 64, 68, 65, 70, 46, 51, 76, 53, 59, 78, 51, 70, 50, 75, 58, 90, 62, 62, 76, 68, 104, 59, 52, 76, 60, 55, 56, 83, 52, 53, 63, 51, 80, 59, 85, 83, 74, 33, 61, 74, 77, 75, 52, 75, 59, 56, 64, 51, 58, 59, 64, 60, 61, 71, 76, 70, 69, 49, 73, 56, 59, 57, 64, 60, 58, 69, 84, 81, 57, 89, 75, 63, 68, 65, 51, 64, 64, 87, 108, 50, 82, 94, 49, 52, 67, 57, 70, 48, 53, 79, 66, 84, 48, 49, 102, 64, 86, 67, 58, 64, 71, 78, 57, 146, 69, 91, 75, 60, 67, 95, 56, 63, 50, 45, 76, 62, 52, 68, 63, 63, 54, 72, 45, 62, 47, 91, 56, 55, 77, 61, 58, 83, 74, 60, 61, 55, 63, 64, 55, 66, 101, 64, 80, 89, 65, 49, 65, 61, 67, 63, 71, 75, 59, 56, 77, 83, 58, 48, 58, 58, 56, 74, 71, 64, 42, 49, 67, 55, 69, 74, 84, 44, 58, 60, 85, 64, 55, 72, 57, 73, 59, 83, 46, 88, 88, 61, 57, 82, 80, 61, 75, 62, 90, 79, 55, 91, 84, 42, 78, 83, 92, 65, 58, 67, 80, 56, 63, 52, 54, 70, 78, 54, 92, 50, 87, 80, 66, 80, 45, 57, 67, 64, 57, 50, 57, 87, 63, 53, 82, 76, 75, 64, 82, 53, 69, 88, 53, 51, 58, 51, 63, 60, 60, 53, 57, 58, 60, 44, 58, 65, 45, 62, 53, 44, 65, 73, 73, 50, 60, 58, 90, 74, 78, 115, 75, 59, 41, 72, 62, 63, 68, 69, 60, 55, 75, 58, 52, 67, 63, 63, 82, 61, 61, 58, 66, 70, 62, 58, 66, 65, 68, 66, 75, 61, 47, 66, 76, 90, 74, 71, 59, 58, 109, 72, 71, 81, 68, 74, 52, 52, 71, 51, 74, 44, 61, 79, 79, 55, 66, 57, 64, 58, 52, 76, 84, 44, 60, 79, 46, 59, 44, 57, 56, 56, 70, 59, 85, 68, 47, 77, 63, 62, 85, 66, 78, 89, 54, 75, 67, 81, 65, 69, 51, 69, 59, 75, 94, 69, 76, 49, 82, 32, 67, 62, 55, 53, 59, 71, 81, 74, 50, 55, 58, 59, 61, 80, 73, 62, 54, 69, 62, 71, 60, 67, 81, 61, 58, 70, 71, 56, 117, 60, 73, 57, 77, 47, 45, 100, 89, 63, 77, 69, 64, 69, 68, 50, 43, 51, 68, 48, 46, 54, 76, 60, 119, 59, 58, 61, 52, 66, 62, 99, 58, 53, 55, 106, 59, 48, 69, 52, 53, 46, 55, 54, 84, 81, 72, 79, 51, 72, 64, 53, 88, 62, 62, 42, 52, 48, 46, 63, 78, 56, 83, 70, 75, 64, 50, 66, 57, 79, 57, 60, 85, 53, 54, 81, 40, 59, 50, 45, 58, 77, 67, 81, 54, 44, 60, 60, 50, 47, 61, 115, 57, 71, 70, 101, 87, 59, 68, 55, 65, 76, 73, 79, 47, 62, 74, 54, 66, 73, 82, 59, 46, 82, 81, 56, 81, 63, 66, 74, 58, 56, 53, 57, 60, 68, 113, 69, 69, 74, 43, 70, 60, 43, 93, 67, 68, 83, 82, 43, 56, 66, 35, 92, 79, 54, 56, 69, 58, 68, 55, 61, 57, 103, 93, 56, 47, 68, 59, 55, 75, 68, 81, 50, 68, 79, 55, 71, 82, 68, 59, 65, 71, 47, 84, 81, 56, 95, 82, 64, 78, 97, 59, 68, 55, 54, 95, 65, 80, 62, 59, 67, 58, 65, 42, 60, 65, 90, 44, 68, 69, 102, 73, 56, 56, 53, 62, 47, 67, 63, 63, 44, 88, 56, 80, 67, 63, 61, 68, 53, 76, 50, 47, 83, 67, 70, 69, 67, 73, 68, 62, 75, 68, 68, 77, 53, 78, 82, 64, 80, 74, 54, 74, 61, 48, 53, 63, 79, 54, 37, 70, 75, 64, 61, 61, 92, 57, 86, 94, 66, 56, 56, 69, 70, 54, 68, 74, 57, 58, 77, 61, 57, 65, 51, 57, 63, 68, 73, 78, 49, 94, 79, 110, 59, 52, 45, 70, 65, 78, 73, 49, 60, 42, 66, 45, 45, 92, 141, 105, 68, 30, 54, 63, 72, 55, 66, 75, 73, 67, 63, 61, 57, 71, 68, 85, 33, 48, 59, 79, 75, 61, 82, 77, 57, 71, 94, 64, 122, 75, 64, 44, 85, 54, 60, 58, 105, 62, 86, 56, 39, 76, 56, 59, 78, 71, 74, 71, 50, 63, 71, 61, 68, 51, 56, 63, 54, 61, 91, 60, 61, 78, 50, 145, 52, 60, 50, 57, 60, 60, 70, 69, 82, 61, 54, 69, 71, 68, 77, 81, 82, 55, 79, 119, 69, 56, 51, 67, 51, 58, 65, 81, 56, 53, 53, 61, 52, 59, 68, 72, 54, 55, 66, 52, 68, 92, 50, 69, 56, 90, 65, 50, 98, 75, 89, 63, 68, 51, 59, 59, 78, 82, 56, 75, 62, 85, 52, 57, 70, 46, 44, 73, 56, 51, 58, 87, 57, 59, 66, 69, 92, 99, 65, 79, 80, 64, 67, 67, 59, 79, 58, 61, 56, 60, 70, 82, 74, 108, 52, 66, 71, 75, 80, 125, 55, 71, 75, 80, 45, 46, 70, 60, 88, 71, 59, 71, 63, 53, 65, 71, 66, 66, 65, 66, 59, 56, 70, 53, 60, 55, 69, 54, 93, 81, 62, 60, 70, 64, 72, 61, 70, 73, 58, 63, 91, 77, 51, 80, 47, 63, 72, 70, 49, 78, 61, 110, 63, 56, 75, 72, 79, 146, 79, 58, 60, 43, 77, 59, 85, 54, 79, 54, 68, 71, 66, 74, 59, 64, 61, 64, 67, 59, 86, 60, 61, 74, 74, 68, 66, 76, 70, 64, 71, 49, 64, 63, 74, 65, 72, 52, 58, 68, 71, 55, 69, 94, 29, 60, 62, 74, 51, 58, 71, 66, 58, 99, 61, 77, 68, 57, 82, 63, 73, 62, 111, 87, 68, 63, 51, 59, 71, 52, 42, 54, 50, 53, 98, 104, 82, 70, 72, 48, 90, 88, 63, 70, 61, 77, 85, 99, 81, 63, 61, 88, 58, 56, 86, 88, 43, 51, 86, 82, 103, 73, 82, 65, 83, 49, 91, 48, 61, 45, 62, 109, 76, 58, 42, 70, 66, 53, 67, 63, 86, 56, 65, 85, 74, 60, 80, 86, 59, 68, 53, 67, 93, 114, 60, 59, 99, 66, 67, 101, 51, 57, 72, 72, 82, 70, 64, 60, 66, 41, 56, 77, 60, 61, 92, 59, 80, 66, 61, 56, 61, 66, 68, 53, 59, 57, 80, 51, 59, 60, 43, 50, 69, 63, 62, 76, 64, 54, 62, 75, 54, 89, 56, 60, 49, 60, 71, 52, 49, 50, 60, 121, 69, 104, 58, 52, 74, 52, 78, 71, 70, 84, 48, 45, 61, 70, 59, 85, 63, 74, 49, 57, 75, 71, 79, 66, 61, 53, 96, 61, 57, 53, 60, 52, 79, 60, 59, 60, 48, 70, 89, 35, 74, 75, 77, 45, 54, 59, 75, 82, 56, 74, 65, 83, 46, 71, 55, 56, 51, 43, 70, 65, 59, 94, 56, 62, 52, 91, 56, 75, 77, 52, 75, 100, 59, 62, 83, 57, 71, 61, 56, 67, 41, 63, 64, 68, 56, 68, 52, 61, 61, 62, 60, 64, 56, 59, 58, 68, 63, 76, 68, 39, 79, 67, 83, 73, 50, 58, 74, 55, 65, 74, 66, 62, 54, 82, 93, 61, 68, 65, 65, 69, 58, 63, 56, 71, 49, 68, 80, 55, 84, 66, 89, 62, 69, 48, 57, 75, 45, 52, 86, 77, 48, 58, 71, 73, 72, 63, 51, 83, 76, 47, 93, 82, 82, 75, 58, 56, 44, 86, 56, 60, 62, 67, 70, 74, 46, 71, 67, 55, 66, 64, 83, 67, 62, 71, 69, 64, 70, 61, 53, 56, 57, 76, 60, 73, 65, 65, 54, 70, 55, 68, 104, 63, 61, 58, 58, 73, 68, 72, 64, 67, 71, 62, 65, 77, 70, 59, 65, 71, 72, 81, 84, 55, 55, 81, 61, 71, 63, 77, 49, 82, 64, 79, 69, 59, 97, 79, 58, 55, 49, 82, 64, 62, 69, 117, 50, 83, 59, 49, 64, 51, 55, 56, 62, 60, 59, 63, 64, 70, 76, 51, 58, 71, 56, 52, 67, 68, 63, 68, 60, 72, 73, 79, 60, 60, 75, 45, 57, 65, 68, 60, 57, 63, 61, 56, 54, 57, 58, 147, 66, 51, 69, 69, 51, 71, 69, 70, 56, 70, 59, 60, 50, 60, 75, 58, 61, 64, 77, 72, 71, 63, 86, 70, 65, 70, 64, 83, 82, 76, 68, 70, 57, 48, 59, 54, 71, 52, 66, 64, 77, 95, 51, 72, 64, 69, 66, 64, 60, 62, 61, 57, 64, 52, 60, 67, 53, 53, 62, 52, 55, 64, 62, 79, 66, 64, 54, 69, 75, 55, 56, 64, 72, 62, 63, 62, 57, 73, 64, 80, 63, 56, 65, 53, 71, 52, 59, 61, 61, 64, 87, 69, 58, 64, 58, 59, 63, 57, 64, 66, 110, 60, 62, 58, 65, 58, 60, 57, 65, 59, 61, 69, 54, 63, 51, 65, 64, 87, 52, 62, 62, 55, 56, 72, 57, 68, 59, 56, 59, 54, 102, 55, 53, 61, 66, 128, 87, 53, 65, 62, 77, 70, 65, 60, 61, 63, 61, 68, 56, 63, 60, 68, 81, 68, 55, 100, 71, 67, 85, 71, 62, 67, 78, 73, 60, 51, 62, 76, 60, 63, 72, 62, 60, 62, 67, 64, 62, 82, 72, 59, 57, 58, 58, 55, 59, 63, 69, 58, 83, 83, 68, 60, 66, 72, 65, 62, 58, 64, 67, 61, 76, 71, 57, 58, 92, 61, 57, 82, 64, 84, 85, 57, 96, 67, 62, 75, 66, 78, 91, 61, 65, 59, 65, 96, 80, 60, 74, 75, 67, 58, 53, 79, 57, 61, 76, 50, 65, 63, 47, 60, 67, 73, 66, 59, 67, 51, 75, 53, 64, 49, 67, 51, 63, 72, 78, 55, 68, 57, 59, 60, 77, 72, 69, 55, 67, 58, 55, 82, 68, 71, 81, 79, 80, 71, 90, 50, 69, 91, 58, 47, 71, 54, 64, 60, 55, 74, 60, 74, 70, 53, 78, 61, 71, 83, 60, 52, 57, 48, 59, 83, 73, 72, 51, 54, 66, 59, 64, 57, 106, 58, 67, 58, 68, 56, 77, 71, 91, 104, 75, 65, 65, 60, 60, 85, 81, 64, 70, 78, 80, 85, 76, 118, 65, 75, 59, 99, 56, 59, 56, 63, 56, 92, 63, 62, 61, 61, 72, 66, 63, 65, 51, 60, 75, 79, 55, 62, 59, 75, 82, 54, 75, 71, 55, 60, 67, 69, 59, 56, 64, 65, 89, 55, 70, 56, 61, 67, 90, 77, 62, 62, 53, 75, 58, 83, 67, 72, 52, 78, 61, 65, 67, 71, 71, 41, 50, 66, 63, 50, 56, 60, 61, 56, 64, 54, 54, 57, 75, 77, 77, 54, 115, 59, 62, 61, 67, 99, 57, 57, 89, 71, 108, 76, 72, 69, 54, 65, 72, 53, 66, 65, 57, 71, 79, 86, 78, 60, 53, 83, 70, 48, 83, 79, 63, 72, 56, 74, 45, 61, 67, 76, 73, 54, 59, 57, 59, 60, 55, 79, 95, 79, 46, 57, 60, 58, 60, 71, 68, 80, 57, 62, 67, 89, 68, 99, 70, 46, 72, 63, 99, 67, 76, 57, 65, 61, 65, 57, 58, 74, 65, 62, 51, 57, 52, 80, 68, 89, 65, 49, 60, 62, 65, 82, 61, 71, 52, 73, 75, 76, 81, 53, 84, 67, 66, 60, 66, 68, 79, 64, 66, 69, 75, 60, 93, 50, 49, 62, 61, 67, 67, 63, 87, 66, 80, 56, 65, 71, 59, 58, 76, 79, 67, 50, 62, 75, 56, 94, 63, 61, 50, 60, 69, 76, 60, 62, 60, 68, 61, 92, 60, 55, 67, 51, 60, 80, 72, 59, 67, 56, 62, 55, 59, 61, 53, 57, 57, 51, 68, 55, 57, 58, 68, 89, 53, 115, 84, 65, 56, 72, 71, 68, 72, 47, 63, 59, 66, 61, 89, 100, 90, 61, 64, 58, 53, 66, 62, 83, 64, 64, 66, 59, 67, 63, 76, 81, 78, 61, 66, 49, 74, 79, 56, 81, 80, 68, 65, 60, 75, 63, 87, 63, 57, 59, 69, 54, 68, 66, 59, 61, 71, 78, 50, 83, 63, 73, 56, 65, 64, 63, 69, 68, 64, 71, 63, 66, 50, 58, 49, 53, 69, 70, 56, 60, 64, 64, 71, 82, 65, 66, 52, 58, 90, 66, 63, 61, 52, 59, 73, 55, 92, 68, 51, 58, 70, 71, 52, 62, 99, 64, 54, 61, 89, 98, 65, 63, 61, 58, 73, 58, 58, 59, 55, 98, 68, 57, 60, 51, 61, 68, 71, 59, 61, 57, 48, 80, 56, 69, 64, 70, 58, 53, 55, 64, 60, 54, 57, 55, 67, 68, 83, 101, 48, 53, 90, 66, 73, 58, 61, 57, 59, 55, 57, 54, 62, 59, 63, 64, 64, 56, 55, 63, 63, 65, 60, 82, 53, 71, 66, 72, 71, 61, 89, 59, 91, 70, 110, 68, 60, 58, 64, 43, 57, 65, 67, 70, 59, 70, 70, 50, 59, 58, 66, 81, 55, 70, 80, 54, 68, 71, 72, 58, 55, 65, 63, 84, 62, 63, 53, 65, 62, 62, 67, 55, 58, 90, 74, 97, 68, 57, 53, 60, 66, 68, 62, 57, 98, 57, 69, 65, 71, 67, 56, 59, 61, 62, 66, 65, 70, 48, 65, 55, 76, 71, 55, 62, 77, 52, 65, 55, 72, 64, 70, 53, 77, 54, 64, 60, 61, 69, 73, 54, 67, 75, 60, 63, 86, 49, 54, 76, 62, 86, 54, 66, 64, 73, 66, 106, 64, 57, 64, 94, 61, 67, 75, 64, 64, 63, 73, 54, 66, 60, 67, 59, 77, 61, 75, 65, 64, 68, 55, 73, 75, 63, 68, 69, 61, 54, 60, 73, 67, 74, 57, 76, 65, 58, 57, 53, 61, 71, 74, 52, 81, 52, 96, 53, 100, 66, 60, 71, 58, 55, 94, 61, 64, 97, 57, 64, 52, 62, 59, 75, 80, 68, 87, 55, 56, 65, 104, 83, 67, 56, 65, 66, 80, 52, 56, 76, 51, 80, 79, 54, 66, 80, 59, 56, 45, 49, 58, 65, 58, 66, 74, 65, 62, 53, 69, 103, 62, 72, 48, 85, 81, 66, 73, 57, 61, 83, 51, 70, 67, 76, 58, 68, 73, 51, 67, 47, 60, 64, 50, 58, 99, 74, 79, 88, 108, 78, 76, 45, 44, 84, 57, 117, 57, 88, 58, 53, 74, 39, 66, 97, 61, 44, 58, 56, 72, 58, 49, 69, 67, 58, 74, 60, 66, 104, 50, 59, 63, 58, 73, 76, 66, 61, 60, 60, 63, 57, 76, 46, 68, 74, 54, 57, 74, 55, 79, 51, 57, 70, 80, 65, 58, 64, 59, 68, 62, 65, 53, 60, 71, 53, 71, 49, 75, 55, 79, 85, 61, 55, 77, 76, 74, 87, 56, 62, 67, 66, 61, 63, 51, 67, 86, 100, 58, 62, 53, 69, 64, 63, 43, 54, 63, 56, 75, 60, 65, 54, 68, 49, 62, 56, 41, 58, 78, 82, 63, 63, 73, 67, 78, 49, 49, 67, 48, 55, 82, 69, 57, 58, 49, 50, 59, 71, 74, 68, 56, 52, 74, 55, 71, 58, 46, 72, 80, 49, 55, 51, 110, 86, 54, 91, 81, 79, 73, 47, 60, 74, 56, 69, 100, 51, 52, 73, 63, 53, 70, 83, 68, 83, 67, 93, 75, 57, 56, 52, 64, 56, 73, 56, 64, 43, 83, 52, 64, 65, 53, 55, 68, 58, 53, 102, 49, 62, 81, 81, 68, 65, 61, 58, 57, 90, 48, 91, 68, 76, 69, 53, 60, 71, 89, 51, 61, 87, 65, 52, 60, 70, 75, 94, 51, 75, 53, 56, 52, 74, 62, 53, 50, 61, 52, 56, 75, 68, 84, 72, 51, 57, 80, 73, 99, 65, 44, 68, 66, 84, 64, 67, 37, 70, 56, 74, 78, 44, 84, 48, 56, 63, 66, 65, 66, 66, 57, 53, 56, 66, 92, 69, 81, 55, 91, 64, 69, 82, 66, 54, 56, 54, 56, 54, 47, 58, 56, 50, 53, 49, 47, 59, 61, 84, 61, 72, 81, 38, 71, 40, 107, 59, 59, 55, 52, 82, 41, 69, 52, 67, 41, 97, 89, 80, 55, 79, 72, 54, 52, 63, 76, 58, 52, 71, 49, 41, 51, 75, 58, 77, 59, 75, 63, 80, 54, 63, 59, 82, 58, 68, 86, 63, 75, 66, 59, 59, 79, 56, 61, 52, 50, 65, 51, 74, 65, 82, 85, 80, 73, 52, 64, 53, 61, 60, 47, 58, 46, 71, 117, 63, 84, 152, 62, 61, 48, 123, 60, 57, 67, 58, 57, 88, 44, 77, 70, 45, 43, 99, 67, 52, 58, 48, 59, 85, 71, 72, 73, 53, 65, 66, 75, 66, 54, 69, 70, 59, 58, 57, 60, 55, 88, 99, 72, 65, 75, 53, 68, 59, 81, 49, 60, 70, 52, 79, 43, 64, 54, 73, 63, 70, 70, 70, 89, 77, 59, 101, 75, 68, 69, 55, 47, 56, 53, 68, 59, 56, 53, 65, 78, 70, 73, 56, 74, 50, 69, 107, 55, 56, 89, 78, 68, 57, 57, 63, 63, 66, 73, 81, 49, 54, 75, 69, 77, 70, 59, 48, 56, 84, 58, 70, 81, 89, 54, 70, 74, 50, 68, 98, 78, 52, 62, 49, 60, 71, 47, 74, 53, 46, 60, 90, 83, 58, 97, 71, 72, 63, 84, 57, 57, 109, 42, 81, 68, 113, 61, 62, 63, 69, 66, 62, 56, 63, 92, 65, 72, 57, 103, 73, 61, 77, 60, 64, 88, 67, 76, 83, 67, 88, 86, 74, 87, 58, 57, 123, 94, 67, 53, 65, 69, 75, 65, 64, 59, 64, 54, 76, 84, 56, 52, 65, 62, 67, 75, 46, 89, 47, 57, 76, 97, 69, 54, 73, 71, 62, 55, 69, 70, 43, 50, 63, 79, 99, 54, 54, 76, 73, 60, 55, 76, 55, 49, 68, 74, 106, 59, 64, 66, 97, 64, 64, 90, 112, 70, 57, 73, 50, 61, 58, 63, 52, 61, 56, 50, 78, 74, 64, 63, 69, 62, 61, 67, 91, 59, 64, 69, 53, 46, 62, 68, 71, 68, 67, 70, 48, 81, 71, 48, 117, 56, 53, 58, 48, 59, 47, 60, 55, 55, 83, 78, 71, 62, 73, 55, 45, 74, 63, 69, 58, 67, 64, 62, 64, 49, 54, 90, 57, 62, 75, 110, 71, 64, 54, 48, 56, 60, 64, 55, 75, 57, 105, 49, 66, 67, 82, 72, 61, 60, 60, 53, 62, 78, 58, 42, 63, 54, 52, 60, 101, 57, 64, 61, 57, 56, 59, 66, 75, 57, 62, 60, 73, 49, 66, 68, 103, 69, 77, 63, 41, 103, 52, 69, 66, 72, 58, 68, 58, 55, 44, 61, 62, 74, 70, 65, 53, 44, 55, 90, 67, 58, 54, 70, 70, 71, 60, 57, 47, 97, 86, 56, 74, 49, 59, 70, 89, 91, 65, 73, 40, 73, 63, 86, 72, 57, 64, 53, 52, 71, 62, 92, 66, 48, 49, 66, 83, 72, 70, 68, 50, 45, 68, 66, 55, 68, 57, 84, 117, 69, 64, 76, 52, 72, 76, 61, 68, 57, 56, 76, 66, 71, 60, 62, 60, 54, 64, 53, 76, 70, 56, 58, 63, 74, 53, 63, 54, 56, 69, 75, 107, 54, 82, 58, 52, 145, 71, 60, 56, 65, 54, 72, 55, 55, 51, 87, 59, 47, 65, 49, 67, 74, 50, 71, 61, 54, 63, 53, 55, 82, 71, 54, 64, 67, 100, 60, 56, 94, 65, 51, 74, 66, 83, 80, 45, 64, 49, 57, 62, 55, 74, 55, 75, 86, 67, 79, 37, 65, 53, 65, 62, 67, 63, 58, 52, 52, 62, 65, 59, 60, 77, 83, 59, 71, 54, 58, 66, 57, 51, 56, 55, 53, 66, 54, 58, 54, 52, 50, 54, 80, 63, 55, 91, 53, 55, 58, 60, 74, 51, 70, 61, 56, 68, 60, 55, 70, 69, 56, 54, 59, 56, 66, 81, 67, 59, 54, 59, 72, 58, 57, 67, 59, 40, 60, 55, 66, 53, 60, 70, 61, 63, 59, 55, 32, 59, 54, 64, 77, 77, 119, 35, 109, 68, 67, 47, 55, 80, 73, 78, 122, 62, 141, 56, 101, 64, 58, 87, 46, 75, 52, 56, 58, 78, 68, 57, 76, 70, 63, 55, 70, 58, 73, 102, 60, 63, 56, 75, 64, 51, 62, 71, 55, 57, 57, 55, 80, 85, 54, 55, 80, 64, 63, 68, 59, 59, 68, 85, 60, 57, 52, 69, 49, 56, 79, 103, 50, 59, 62, 52, 79, 56, 74, 60, 74, 72, 101, 72, 64, 59, 79, 76, 56, 74, 57, 62, 65, 112, 49, 58, 58, 61, 62, 58, 49, 100, 52, 82, 57, 66, 72, 58, 64, 130, 59, 96, 66, 52, 108, 67, 53, 69, 76, 59, 61, 82, 74, 68, 54, 71, 73, 51, 67, 75, 62, 61, 56, 56, 55, 55, 69, 79, 69, 63, 66, 59, 61, 84, 41, 59, 49, 42, 89, 47, 60, 59, 77, 47, 81, 68, 69, 35, 52, 63, 49, 75, 68, 73, 86, 84, 52, 111, 94, 86, 103, 58, 42, 72, 66, 50, 58, 54, 47, 70, 54, 72, 61, 60, 57, 69, 67, 56, 93, 53, 97, 80, 75, 101, 63, 74, 71, 80, 59, 72, 78, 62, 84, 48, 58, 56, 77, 83, 49, 59, 53, 64, 62, 65, 73, 52, 67, 60, 73, 74, 63, 42, 62, 72, 67, 78, 135, 71, 73, 46, 72, 81, 76, 64, 71, 50, 61, 63, 80, 73, 55, 60, 34, 47, 69, 61, 86, 58, 65, 63, 66, 91, 51, 58, 60, 83, 73, 79, 73, 44, 85, 73, 51, 67, 50, 73, 89, 72, 110, 73, 69, 101, 76, 67, 83, 55, 68, 53, 83, 50, 72, 43, 69, 77, 94, 52, 72, 70, 59, 82, 51, 59, 72, 53, 81, 58, 70, 84, 118, 53, 77, 63, 75, 52, 61, 66, 57, 65, 62, 86, 63, 70, 60, 97, 68, 60, 47, 67, 31, 80, 70, 52, 100, 72, 90, 58, 59, 56, 75, 71, 46, 75, 29, 78, 81, 58, 63, 60, 98, 58, 78, 66, 69, 72, 52, 90, 47, 86, 51, 61, 62, 63, 54, 68, 69, 56, 90, 78, 81, 54, 64, 51, 82, 50, 45, 86, 62, 64, 40, 78, 63, 83, 49, 51, 63, 73, 53, 51, 62, 112, 58, 72, 65, 66, 58, 52, 78, 71, 53, 82, 75, 73, 75, 80, 54, 63, 97, 45, 58, 63, 77, 65, 59, 54, 76, 63, 64, 57, 57, 61, 69, 58, 118, 57, 67, 72, 66, 74, 66, 85, 63, 68, 79, 50, 78, 50, 61, 54, 72, 77, 52, 87, 69, 51, 60, 69, 65, 61, 71, 58, 53, 48, 57, 67, 78, 59, 83, 84, 67, 69, 71, 53, 69, 46, 77, 47, 56, 66, 47, 50, 87, 46, 70, 64, 58, 58, 63, 63, 64, 63, 65, 129, 64, 47, 60, 63, 79, 47, 63, 58, 64, 53, 68, 74, 60, 55, 65, 68, 79, 80, 93, 59, 73, 81, 54, 49, 56, 64, 71, 57, 87, 83, 78, 71, 39, 58, 69, 52, 78, 54, 81, 64, 55, 60, 74, 50, 60, 61, 65, 69, 41, 67, 51, 74, 70, 57, 69, 69, 73, 70, 74, 64, 56, 83, 55, 74, 58, 54, 51, 76, 80, 60, 63, 73, 64, 64, 67, 54, 63, 57, 61, 84, 60, 71, 70, 74, 51, 47, 123, 77, 71, 54, 87, 77, 61, 52, 60, 59, 51, 64, 72, 62, 63, 57, 62, 51, 63, 60, 70, 81, 41, 45, 60, 83, 60, 57, 65, 53, 50, 51, 84, 47, 52, 83, 60, 88, 79, 62, 57, 72, 64, 64, 84, 49, 60, 69, 78, 69, 51, 60, 67, 54, 72, 60, 80, 73, 68, 57, 64, 57, 47, 88, 40, 51, 97, 71, 75, 84, 65, 69, 85, 94, 60, 57, 47, 56, 54, 115, 79, 57, 74, 79, 65, 74, 67, 60, 82, 77, 36, 60, 58, 64, 49, 60, 50, 70, 89, 64, 38, 56, 75, 68, 103, 91, 60, 57, 46, 55, 50, 67, 62, 70, 73, 63, 74, 65, 56, 62, 62, 67, 56, 65, 77, 66, 51, 44, 70, 74, 69, 61, 61, 68, 70, 93, 81, 45, 59, 54, 56, 64, 65, 59, 69, 81, 56, 70, 69, 58, 50, 63, 69, 75, 87, 51, 80, 89, 61, 87, 60, 58, 58, 37, 78, 96, 76, 92, 77, 61, 68, 64, 69, 65, 74, 85, 85, 52, 51, 91, 77, 68, 65, 67, 53, 96, 55, 78, 72, 39, 76, 55, 64, 62, 56, 48, 76, 66, 61, 35, 66, 74, 74, 41, 53, 78, 53, 79, 60, 74, 63, 58, 52, 93, 53, 66, 82, 60, 50, 74, 47, 54, 75, 72, 67, 67, 52, 62, 75, 84, 59, 56, 52, 60, 58, 62, 66, 53, 54, 47, 125, 52, 79, 52, 67, 80, 68, 63, 61, 51, 61, 79, 63, 78, 82, 46, 87, 64, 82, 66, 72, 57, 54, 59, 60, 61, 73, 53, 49, 57, 62, 52, 60, 99, 70, 56, 72, 79, 70, 57, 86, 53, 56, 81, 68, 59, 59, 57, 66, 35, 56, 64, 62, 58, 64, 77, 56, 78, 68, 81, 82, 55, 66, 73, 53, 54, 106, 84, 79, 95, 75, 50, 50, 59, 75, 80, 61, 62, 46, 80, 66, 96, 52, 84, 76, 67, 72, 44, 96, 64, 54, 62, 72, 62, 47, 53, 48, 61, 61, 62, 70, 64, 41, 65, 78, 67, 45, 58, 84, 48, 56, 69, 68, 85, 64, 41, 89, 54, 54, 57, 65, 89, 44, 86, 38, 70, 82, 58, 66, 85, 58, 85, 67, 53, 52, 69, 59, 53, 56, 89, 63, 74, 71, 57, 68, 53, 61, 61, 62, 71, 73, 80, 65, 67, 71, 56, 89, 48, 51, 163, 89, 60, 81, 54, 60, 85, 51, 68, 65, 65, 51, 47, 42, 57, 49, 60, 49, 69, 67, 51, 70, 53, 62, 75, 43, 80, 69, 85, 79, 52, 75, 71, 80, 70, 64, 64, 84, 58, 63, 52, 55, 86, 65, 73, 57, 62, 59, 84, 71, 90, 65, 58, 65, 65, 51, 63, 50, 98, 61, 81, 64, 56, 63, 71, 58, 84, 112, 69, 54, 70, 60, 46, 74, 63, 62, 52, 49, 60, 68, 38, 52, 53, 71, 92, 61, 58, 56, 58, 53, 85, 86, 59, 71, 105, 82, 49, 54, 49, 64, 60, 62, 57, 59, 57, 56, 51, 129, 64, 62, 61, 66, 50, 71, 63, 73, 52, 80, 67, 76, 64, 54, 63, 66, 73, 62, 64, 62, 59, 83, 83, 58, 54, 115, 73, 73, 85, 49, 93, 49, 65, 63, 78, 99, 54, 79, 64, 62, 68, 58, 73, 68, 60, 63, 112, 103, 75, 58, 55, 86, 57, 90, 68, 59, 106, 69, 67, 63, 93, 63, 77, 66, 64, 73, 71, 57, 54, 87, 58, 65, 51, 61, 64, 58, 65, 59, 52, 54, 84, 76, 73, 52, 54, 65, 79, 63, 59, 54, 79, 64, 77, 52, 57, 69, 47, 69, 60, 80, 67, 75, 63, 63, 63, 62, 60, 54, 65, 91, 54, 65, 60, 85, 77, 76, 59, 55, 60, 84, 69, 56, 38, 52, 59, 99, 56, 65, 61, 98, 72, 92, 59, 44, 71, 65, 50, 61, 59, 56, 129, 63, 84, 77, 67, 67, 70, 83, 68, 91, 56, 72, 82, 74, 57, 43, 65, 73, 50, 48, 50, 51, 53, 78, 71, 65, 68, 53, 51, 71, 68, 58, 70, 47, 69, 102, 60, 58, 62, 34, 77, 112, 74, 94, 90, 74, 62, 58, 75, 56, 70, 97, 69, 44, 56, 70, 72, 70, 77, 85, 75, 65, 79, 90, 46, 47, 66, 47, 59, 54, 74, 101, 59, 61, 67, 73, 60, 50, 57, 62, 74, 72, 70, 57, 68, 60, 55, 67, 58, 62, 70, 56, 55, 60, 75, 110, 63, 70, 65, 65, 71, 70, 70, 64, 55, 69, 73, 64, 62, 69, 79, 44, 62, 54, 52, 62, 63, 54, 69, 61, 96, 70, 78, 64, 58, 55, 58, 92, 67, 121, 51, 62, 70, 70, 91, 50, 71, 64, 55, 59, 51, 49, 57, 63, 50, 64, 46, 49, 108, 55, 56, 82, 61, 61, 75, 48, 65, 65, 93, 81, 89, 61, 72, 48, 85, 60, 67, 55, 58, 57, 54, 56, 75, 81, 62, 56, 58, 44, 95, 63, 54, 72, 79, 59, 73, 66, 72, 65, 45, 62, 90, 80, 65, 85, 64, 81, 71, 118, 61, 73, 53, 51, 64, 55, 43, 66, 86, 56, 51, 72, 61, 58, 57, 39, 50, 53, 60, 64, 53, 83, 63, 75, 108, 62, 62, 56, 68, 52, 69, 73, 58, 63, 56, 70, 74, 84, 65, 65, 63, 78, 63, 58, 59, 68, 61, 51, 63, 59, 58, 54, 53, 49, 54, 70, 69, 58, 46, 65, 61, 69, 78, 64, 62, 47, 67, 57, 78, 55, 74, 74, 68, 59, 95, 53, 61, 58, 84, 72, 72, 55, 59, 84, 90, 25, 70, 47, 40, 49, 90, 81, 61, 60, 58, 73, 68, 53, 82, 63, 95, 69, 58, 56, 73, 66, 66, 52, 55, 104, 65, 62, 55, 63, 46, 69, 61, 74, 68, 60, 72, 56, 61, 91, 57, 75, 55, 47, 55, 66, 57, 41, 78, 44, 63, 57, 65, 72, 48, 62, 61, 44, 66, 65, 38, 58, 84, 61, 66, 77, 85, 63, 46, 59, 64, 53, 87, 48, 47, 62, 49, 87, 59, 63, 82, 56, 46, 66, 77, 68, 76, 93, 82, 64, 46, 73, 56, 65, 66, 62, 59, 64, 56, 58, 71, 93, 87, 73, 63, 67, 51, 61, 83, 84, 45, 56, 52, 106, 65, 128, 72, 64, 63, 50, 69, 64, 64, 53, 59, 82, 43, 70, 51, 85, 74, 64, 65, 53, 52, 62, 59, 84, 55, 59, 60, 60, 70, 70, 62, 77, 84, 29, 69, 77, 62, 87, 65, 48, 63, 70, 76, 75, 77, 65, 38, 62, 40, 69, 64, 63, 62, 71, 58, 102, 72, 74, 81, 53, 77, 63, 55, 81, 81, 77, 58, 63, 51, 51, 56, 91, 50, 57, 70, 91, 71, 53, 51, 77, 70, 61, 68, 63, 63, 55, 109, 54, 67, 52, 58, 72, 68, 55, 87, 71, 86, 61, 108, 80, 65, 68, 61, 54, 54, 69, 64, 69, 64, 72, 59, 50, 65, 84, 50, 60, 80, 82, 60, 39, 62, 87, 79, 73, 62, 84, 53, 57, 62, 57, 59, 68, 60, 46, 70, 62, 70, 69, 54, 54, 67, 66, 64, 55, 45, 57, 69, 49, 64, 57, 53, 52, 74, 125, 40, 79, 67, 79, 56, 71, 59, 79, 55, 57, 59, 60, 65, 89, 73, 87, 41, 79, 45, 61, 67, 69, 72, 68, 49, 41, 65, 60, 63, 53, 56, 104, 109, 71, 57, 69, 57, 55, 75, 52, 50, 75, 49, 69, 44, 88, 93, 72, 71, 76, 59, 74, 69, 88, 64, 37, 49, 65, 69, 62, 65, 82, 53, 65, 76, 63, 66, 44, 55, 73, 70, 77, 71, 85, 61, 63, 57, 60, 57, 78, 71, 66, 65, 65, 75, 57, 34, 72, 54, 76, 53, 97, 105, 83, 76, 55, 61, 55, 59, 53, 76, 73, 104, 44, 68, 66, 70, 58, 70, 64, 66, 71, 65, 61, 52, 53, 56, 105, 51, 59, 82, 48, 80, 52, 72, 60, 63, 80, 61, 58, 50, 79, 100, 63, 90, 67, 51, 74, 69, 65, 85, 51, 58, 65, 63, 76, 92, 60, 66, 68, 57, 48, 87, 62, 61, 71, 40, 55, 74, 59, 48, 58, 50, 68, 58, 66, 73, 78, 57, 85, 67, 61, 53, 41, 73, 69, 74, 73, 63, 53, 62, 96, 69, 47, 51, 77, 63, 57, 52, 40, 58, 52, 75, 62, 53, 67, 55, 65, 59, 80, 62, 65, 67, 55, 43, 77, 47, 69, 46, 63, 57, 63, 55, 56, 75, 71, 79, 48, 125, 52, 81, 79, 61, 89, 46, 76, 71, 71, 64, 52, 74, 75, 64, 56, 67, 58, 49, 59, 81, 83, 56, 70, 46, 73, 61, 68, 104, 78, 63, 54, 70, 82, 69, 74, 60, 79, 77, 61, 70, 62, 69, 65, 54, 62, 66, 81, 42, 38, 64, 69, 68, 62, 58, 39, 113, 88, 54, 56, 69, 66, 85, 47, 63, 43, 59, 57, 62, 65, 64, 62, 65, 61, 56, 100, 56, 61, 72, 62, 59, 63, 60, 50, 58, 71, 70, 81, 71, 58, 53, 71, 79, 73, 64, 68, 64, 62, 58, 41, 64, 71, 132, 75, 75, 62, 75, 66, 72, 90, 63, 55, 68, 60, 72, 90, 68, 80, 96, 52, 51, 49, 63, 40, 105, 61, 93, 75, 64, 64, 69, 43, 70, 59, 63, 35, 61, 37, 59, 64, 60, 65, 68, 72, 66, 64, 61, 67, 72, 73, 74, 43, 48, 58, 93, 77, 57, 56, 55, 48, 73, 55, 57, 84, 79, 47, 59, 66, 62, 58, 59, 75, 65, 75, 55, 57, 63, 69, 74, 42, 57, 70, 59, 56, 80, 59, 63, 45, 59, 66, 55, 43, 82, 79, 61, 57, 47, 77, 58, 52, 103, 76, 57, 62, 67, 58, 69, 50, 79, 68, 62, 62, 70, 57, 88, 65, 57, 69, 51, 41, 54, 75, 80, 71, 89, 65, 56, 55, 56, 50, 47, 55, 94, 79, 61, 54, 60, 68, 53, 62, 83, 118, 57, 55, 65, 55, 61, 91, 61, 99, 73, 84, 62, 65, 68, 74, 82, 66, 63, 114, 54, 51, 67, 75, 68, 63, 54, 53, 67, 53, 63, 50, 48, 56, 44, 87, 65, 63, 61, 67, 87, 66, 58, 60, 60, 66, 45, 57, 51, 61, 62, 87, 62, 71, 48, 53, 88, 68, 62, 90, 89, 62, 58, 71, 69, 67, 70, 59, 89, 47, 67, 68, 58, 72, 56, 107, 68, 61, 72, 65, 64, 62, 63, 73, 72, 63, 64, 46, 63, 77, 98, 72, 66, 50, 60, 81, 52, 64, 59, 60, 67, 89, 57, 56, 82, 54, 71, 64, 47, 53, 80, 83, 65, 63, 71, 72, 65, 76, 67, 60, 69, 59, 71, 62, 65, 57, 53, 71, 71, 43, 75, 72, 71, 40, 78, 70, 66, 68, 65, 63, 75, 59, 68, 104, 62, 57, 57, 58, 43, 60, 63, 81, 88, 60, 53, 66, 55, 64, 64, 64, 64, 64, 120, 77, 86, 60, 64, 61, 73, 61, 73, 52, 94, 47, 83, 55, 71, 56, 59, 52, 54, 57, 60, 51, 58, 71, 68, 71, 67, 69, 89, 59, 73, 72, 76, 59, 75, 78, 47, 46, 94, 69, 46, 67, 81, 55, 60, 61, 89, 56, 65, 52, 76, 76, 50, 91, 73, 86, 60, 83, 58, 58, 77, 56, 62, 55, 53, 56, 62, 59, 63, 45, 66, 77, 57, 50, 76, 73, 49, 73, 56, 53, 73, 68, 59, 69, 62, 65, 69, 58, 58, 76, 52, 58, 62, 53, 61, 70, 72, 46, 67, 65, 69, 67, 59, 70, 77, 67, 69, 51, 56, 71, 70, 52, 70, 76, 66, 54, 57, 54, 49, 74, 72, 53, 69, 58, 53, 57, 135, 44, 61, 72, 59, 79, 55, 64, 65, 59, 66, 75, 73, 54, 98, 49, 66, 81, 66, 80, 67, 53, 61, 54, 87, 51, 54, 77, 57, 66, 70, 58, 62, 75, 83, 66, 69, 55, 47, 54, 57, 66, 58, 64, 47, 55, 84, 58, 74, 66, 60, 63, 81, 92, 50, 65, 76, 70, 74, 58, 75, 79, 96, 118, 49, 59, 69, 48, 70, 54, 95, 84, 60, 58, 55, 72, 69, 61, 62, 48, 57, 60, 52, 84, 62, 86, 67, 67, 70, 59, 51, 55, 60, 58, 59, 49, 59, 39, 72, 61, 63, 77, 73, 67, 64, 77, 70, 59, 54, 59, 66, 64, 102, 61, 54, 67, 90, 64, 60, 77, 56, 72, 64, 49, 65, 99, 65, 60, 93, 71, 74, 67, 63, 70, 62, 63, 59, 93, 40, 66, 70, 51, 60, 84, 58, 54, 84, 54, 56, 59, 53, 58, 45, 61, 71, 43, 58, 61, 61, 68, 69, 63, 69, 77, 68, 61, 51, 86, 53, 76, 67, 84, 73, 69, 69, 65, 78, 79, 67, 69, 68, 74, 64, 60, 78, 62, 51, 58, 54, 51, 79, 55, 67, 77, 66, 61, 92, 55, 72, 68, 49, 62, 51, 55, 67, 65, 70, 53, 72, 70, 130, 62, 69, 62, 54, 56, 42, 56, 79, 52, 81, 67, 45, 83, 66, 56, 74, 55, 71, 84, 50, 58, 67, 62, 66, 35, 57, 65, 84, 56, 59, 48, 58, 39, 77, 65, 91, 75, 83, 77, 79, 49, 72, 61, 64, 84, 66, 75, 84, 58, 63, 81, 74, 59, 76, 68, 67, 65, 61, 47, 77, 73, 62, 67, 53, 53, 60, 57, 69, 62, 63, 76, 57, 74, 57, 78, 136, 58, 71, 64, 52, 97, 63, 63, 47, 63, 64, 51, 54, 76, 62, 58, 59, 80, 67, 62, 81, 76, 62, 62, 50, 98, 63, 50, 76, 56, 65, 64, 64, 60, 67, 80, 45, 50, 86, 63, 183, 58, 71, 50, 75, 76, 61, 65, 64, 73, 56, 50, 67, 66, 64, 36, 97, 52, 67, 60, 51, 58, 61, 70, 62, 53, 58, 60, 53, 57, 47, 47, 88, 75, 67, 90, 71, 78, 112, 115, 61, 59, 61, 106, 88, 67, 64, 54, 57, 61, 72, 65, 72, 63, 66, 76, 92, 57, 52, 68, 69, 93, 74, 69, 58, 73, 55, 67, 34, 44, 74, 80, 54, 82, 73, 62, 80, 72, 77, 61, 72, 53, 46, 62, 62, 46, 77, 60, 58, 84, 45, 93, 50, 33, 75, 83, 58, 64, 69, 54, 54, 64, 58, 70, 50, 71, 54, 46, 67, 73, 65, 63, 76, 73, 61, 65, 71, 55, 78, 58, 57, 93, 78, 54, 63, 70, 35, 71, 64, 62, 48, 59, 47, 50, 78, 73, 58, 61, 60, 73, 94, 62, 80, 76, 77, 115, 73, 49, 72, 65, 52, 53, 53, 81, 69, 82, 57, 70, 56, 77, 84, 53, 67, 59, 64, 72, 78, 53, 67, 85, 132, 68, 73, 50, 88, 64, 70, 57, 61, 46, 67, 62, 62, 41, 65, 70, 61, 54, 65, 57, 72, 81, 66, 103, 58, 64, 64, 77, 49, 93, 86, 67, 87, 69, 53, 64, 58, 57, 72, 67, 53, 77, 80, 60, 57, 74, 63, 70, 64, 51, 62, 64, 54, 60, 45, 60, 107, 69, 57, 47, 68, 31, 79, 73, 62, 59, 66, 51, 59, 61, 71, 58, 73, 58, 58, 53, 72, 90, 68, 77, 53, 49, 70, 58, 73, 53, 62, 65, 70, 62, 71, 80, 60, 76, 48, 67, 64, 60, 50, 62, 73, 89, 67, 77, 76, 61, 54, 69, 68, 95, 63, 68, 69, 53, 45, 61, 58, 67, 64, 73, 55, 64, 64, 107, 72, 63, 99, 52, 78, 58, 57, 62, 75, 67, 69, 69, 58, 60, 71, 72, 63, 74, 49, 56, 38, 77, 55, 66, 53, 72, 65, 68, 61, 65, 88, 56, 96, 50, 66, 48, 58, 69, 74, 58, 58, 64, 48, 65, 76, 100, 64, 66, 57, 44, 64, 68, 57, 88, 58, 54, 62, 74, 63, 71, 75, 108, 75, 60, 60, 65, 117, 51, 68, 97, 54, 58, 82, 67, 35, 81, 35, 63, 67, 79, 80, 52, 67, 67, 96, 86, 53, 44, 60, 62, 56, 46, 63, 54, 66, 62, 49, 77, 73, 94, 71, 62, 53, 58, 70, 54, 68, 72, 51, 61, 65, 72, 87, 76, 66, 93, 74, 86, 69, 108, 60, 65, 68, 61, 73, 47, 42, 49, 62, 45, 73, 56, 56, 60, 66, 59, 66, 43, 48, 82, 69, 50, 60, 69, 89, 48, 69, 59, 76, 73, 87, 51, 65, 67, 68, 50, 57, 72, 68, 58, 56, 76, 50, 81, 60, 49, 72, 51, 65, 63, 69, 69, 92, 57, 55, 121, 84, 67, 55, 64, 64, 61, 59, 69, 67, 62, 47, 48, 71, 65, 76, 44, 80, 86, 53, 75, 52, 127, 55, 74, 50, 80, 68, 60, 71, 77, 95, 53, 70, 59, 57, 81, 77, 66, 70, 63, 57, 129, 84, 60, 57, 45, 76, 59, 51, 78, 61, 143, 64, 60, 53, 47, 104, 76, 74, 57, 72, 78, 66, 64, 66, 58, 55, 66, 61, 45, 67, 51, 73, 68, 66, 82, 63, 80, 50, 60, 61, 67, 66, 65, 120, 62, 61, 73, 63, 79, 72, 71, 71, 67, 88, 98, 112, 60, 44, 61, 59, 54, 77, 74, 63, 58, 75, 53, 56, 70, 76, 62, 54, 66, 63, 80, 62, 57, 79, 56, 83, 78, 66, 69, 63, 67, 54, 40, 62, 69, 61, 59, 68, 44, 61, 65, 93, 56, 48, 29, 56, 60, 50, 69, 71, 72, 74, 68, 70, 61, 53, 97, 66, 48, 51, 60, 64, 68, 49, 68, 60, 63, 56, 55, 66, 86, 74, 60, 66, 55, 67, 70, 56, 73, 41, 69, 71, 85, 85, 63, 68, 95, 76, 51, 61, 47, 54, 94, 62, 78, 107, 73, 103, 66, 49, 67, 55, 56, 57, 70, 83, 64, 84, 58, 51, 50, 55, 66, 63, 61, 67, 82, 71, 49, 65, 91, 60, 70, 47, 69, 46, 56, 66, 73, 106, 60, 59, 94, 59, 75, 70, 62, 82, 68, 65, 73, 75, 74, 64, 80, 52, 61, 76, 62, 60, 84, 82, 74, 63, 63, 60, 52, 93, 81, 57, 67, 57, 64, 65, 65, 46, 82, 105, 47, 86, 89, 63, 84, 76, 59, 65, 73, 101, 73, 68, 56, 44, 73, 69, 60, 60, 85, 106, 79, 47, 64, 61, 56, 45, 53, 60, 46, 64, 79, 50, 62, 53, 57, 58, 92, 47, 78, 55, 86, 58, 50, 85, 64, 75, 59, 44, 67, 62, 80, 97, 62, 71, 67, 70, 78, 52, 73, 69, 57, 63, 67, 74, 49, 69, 57, 58, 63, 50, 68, 86, 55, 53, 53, 58, 69, 57, 66, 56, 83, 60, 53, 57, 102, 45, 63, 51, 48, 123, 79, 82, 87, 65, 50, 74, 60, 60, 65, 44, 129, 73, 52, 55, 44, 62, 79, 60, 65, 101, 65, 72, 62, 58, 156, 49, 49, 49, 47, 56, 50, 65, 60, 67, 71, 59, 66, 85, 73, 65, 47, 69, 48, 55, 61, 64, 66, 70, 72, 50, 72, 45, 47, 46, 50, 77, 83, 59, 66, 53, 78, 45, 66, 57, 81, 66, 60, 89, 62, 57, 67, 69, 54, 58, 62, 64, 66, 68, 62, 94, 101, 100, 67, 53, 66, 62, 61, 62, 51, 67, 56, 44, 109, 62, 67, 61, 54, 44, 75, 65, 45, 70, 68, 64, 73, 80, 50, 51, 54, 66, 72, 60, 68, 70, 58, 57, 82, 63, 66, 63, 66, 81, 62, 61, 58, 68, 37, 63, 81, 60, 73, 46, 45, 67, 52, 61, 55, 73, 63, 124, 68, 60, 78, 74, 74, 64, 58, 81, 61, 74, 52, 64, 58, 75, 45, 57, 66, 64, 50, 63, 96, 73, 72, 60, 54, 48, 53, 45, 64, 64, 63, 81, 55, 80, 64, 79, 61, 62, 69, 78, 76, 78, 75, 50, 123, 67, 86, 50, 62, 87, 74, 58, 30, 28, 65, 80, 60, 76, 79, 77, 77, 74, 76, 53, 67, 60, 58, 58, 64, 71, 78, 49, 60, 53, 48, 48, 59, 58, 56, 79, 56, 70, 58, 59, 88, 65, 68, 60, 70, 57, 63, 57, 73, 63, 67, 41, 69, 61, 54, 94, 72, 52, 51, 57, 58, 58, 90, 62, 50, 52, 45, 55, 86, 77, 69, 60, 73, 59, 80, 54, 73, 49, 63, 43, 43, 50, 51, 65, 82, 50, 64, 63, 71, 91, 54, 60, 57, 65, 53, 50, 53, 47, 63, 61, 65, 48, 73, 74, 79, 50, 72, 56, 69, 59, 58, 77, 65, 69, 75, 74, 67, 83, 76, 59, 65, 53, 54, 72, 100, 62, 75, 56, 68, 93, 62, 56, 73, 51, 59, 68, 80, 54, 59, 55, 54, 52, 73, 79, 69, 51, 56, 68, 60, 74, 98, 75, 72, 69, 52, 78, 63, 67, 98, 59, 68, 60, 63, 62, 59, 79, 58, 72, 47, 54, 50, 50, 78, 53, 66, 67, 74, 72, 71, 49, 63, 57, 60, 50, 73, 52, 52, 68, 56, 62, 81, 68, 88, 74, 49, 97, 36, 63, 72, 72, 49, 51, 78, 69, 59, 78, 78, 76, 78, 62, 81, 54, 71, 93, 60, 75, 56, 65, 60, 81, 89, 68, 47, 63, 58, 132, 49, 53, 78, 66, 55, 75, 61, 56, 67, 46, 68, 55, 59, 64, 57, 64, 37, 58, 97, 45, 55, 75, 59, 83, 85, 56, 60, 100, 68, 77, 49, 69, 68, 42, 63, 56, 53, 52, 79, 54, 62, 83, 54, 66, 79, 84, 78, 62, 60, 90, 64, 63, 67, 104, 66, 69, 66, 53, 77, 71, 64, 72, 101, 55, 55, 103, 54, 74, 74, 92, 72, 58, 54, 53, 60, 92, 60, 54, 80, 64, 66, 55, 66, 80, 65, 71, 47, 82, 55, 85, 63, 95, 72, 75, 80, 52, 47, 73, 80, 81, 60, 61, 70, 48, 94, 46, 55, 82, 65, 75, 66, 82, 89, 62, 65, 68, 82, 66, 94, 57, 51, 59, 63, 63, 69, 62, 51, 88, 66, 89, 62, 77, 72, 72, 78, 56, 45, 49, 73, 53, 65, 66, 47, 65, 59, 57, 91, 72, 58, 83, 73, 74, 75, 58, 76, 70, 82, 92, 56, 65, 48, 66, 64, 82, 99, 54, 76, 54, 62, 61, 50, 75, 54, 65, 58, 45, 66, 54, 61, 58, 76, 44, 69, 75, 77, 82, 64, 58, 62, 93, 45, 63, 48, 47, 70, 60, 59, 55, 97, 60, 70, 59, 80, 72, 111, 79, 51, 68, 79, 45, 71, 64, 56, 61, 56, 90, 90, 65, 58, 100, 54, 50, 55, 67, 73, 81, 52, 67, 83, 74, 64, 50, 63, 62, 128, 70, 62, 78, 66, 64, 79, 87, 77, 73, 63, 73, 57, 64, 100, 69, 50, 95, 69, 81, 61, 64, 63, 73, 80, 69, 80, 65, 53, 96, 52, 48, 61, 56, 75, 73, 69, 60, 47, 69, 55, 66, 84, 110, 68, 86, 71, 108, 80, 74, 61, 60, 61, 76, 57, 62, 77, 80, 61, 78, 60, 81, 64, 59, 71, 85, 50, 85, 104, 64, 52, 76, 73, 59, 79, 84, 54, 53, 56, 58, 62, 48, 50, 85, 72, 60, 57, 52, 56, 59, 67, 61, 64, 57, 90, 75, 72, 69, 57, 51, 52, 62, 70, 63, 48, 77, 74, 51, 61, 72, 78, 58, 70, 86, 62, 57, 41, 60, 68, 57, 51, 52, 66, 117, 112, 68, 63, 67, 86, 62, 60, 66, 52, 93, 75, 51, 62, 75, 86, 72, 64, 47, 49, 47, 55, 74, 70, 63, 66, 102, 82, 63, 50, 44, 83, 47, 64, 50, 87, 60, 60, 76, 82, 73, 99, 93, 55, 54, 69, 63, 53, 70, 71, 70, 74, 73, 68, 60, 66, 67, 68, 60, 62, 63, 76, 77, 64, 72, 64, 60, 96, 69, 79, 81, 54, 42, 56, 59, 64, 56, 63, 59, 72, 97, 65, 63, 70, 68, 60, 63, 64, 39, 65, 52, 63, 63, 91, 67, 69, 150, 57, 63, 52, 73, 58, 68, 59, 50, 59, 78, 56, 39, 97, 52, 51, 54, 55, 54, 130, 68, 65, 62, 54, 58, 78, 54, 49, 64, 65, 65, 72, 40, 69, 55, 69, 70, 64, 59, 64, 64, 63, 68, 56, 72, 57, 60, 47, 100, 55, 67, 61, 46, 53, 70, 80, 80, 52, 60, 63, 67, 90, 69, 41, 50, 55, 55, 47, 77, 55, 75, 88, 84, 70, 62, 63, 74, 45, 67, 62, 49, 70, 66, 69, 56, 70, 81, 60, 87, 52, 59, 52, 54, 63, 76, 67, 64, 66, 82, 55, 45, 51, 91, 73, 38, 46, 56, 75, 60, 73, 89, 81, 50, 52, 64, 47, 51, 60, 67, 99, 69, 83, 57, 72, 48, 40, 73, 63, 50, 74, 56, 75, 63, 81, 72, 72, 61, 47, 36, 74, 66, 85, 56, 85, 56, 85, 46, 64, 55, 55, 78, 60, 52, 74, 76, 81, 59, 52, 55, 58, 58, 63, 54, 65, 60, 67, 65, 56, 76, 66, 68, 63, 53, 58, 71, 56, 115, 57, 62, 49, 74, 59, 57, 61, 94, 76, 64, 59, 63, 58, 69, 61, 52, 73, 51, 50, 57, 70, 82, 61, 45, 65, 70, 45, 52, 72, 41, 48, 59, 66, 61, 74, 64, 76, 68, 46, 78, 63, 83, 54, 54, 63, 78, 88, 66, 85, 60, 65, 96, 60, 56, 60, 52, 63, 100, 47, 77, 71, 75, 60, 108, 61, 104, 56, 84, 63, 107, 49, 44, 67, 59, 52, 61, 86, 52, 60, 54, 77, 56, 71, 51, 50, 44, 79, 80, 79, 74, 63, 80, 68, 131, 62, 59, 83, 61, 67, 47, 67, 46, 72, 61, 65, 63, 52, 71, 69, 79, 79, 60, 66, 45, 85, 66, 59, 66, 68, 52, 52, 77, 73, 82, 65, 56, 65, 80, 78, 62, 70, 85, 62, 62, 66, 75, 60, 64, 98, 61, 65, 74, 37, 58, 51, 68, 57, 80, 64, 73, 57, 53, 60, 87, 58, 49, 58, 54, 63, 51, 84, 66, 83, 68, 75, 79, 61, 44, 64, 54, 60, 58, 74, 62, 56, 59, 62, 59, 58, 61, 63, 55, 77, 79, 57, 60, 73, 61, 63, 76, 73, 65, 58, 62, 46, 64, 52, 63, 57, 87, 59, 55, 60, 58, 66, 46, 53, 48, 75, 49, 39, 54, 67, 71, 81, 53, 55, 75, 58, 53, 69, 44, 70, 78, 66, 67, 56, 65, 50, 58, 59, 60, 70, 69, 60, 59, 67, 65, 97, 93, 65, 45, 49, 62, 81, 58, 49, 60, 57, 51, 56, 53, 65, 46, 64, 47, 58, 46, 68, 79, 51, 62, 50, 80, 74, 65, 79, 48, 50, 62, 81, 57, 63, 96, 63, 69, 57, 51, 81, 60, 82, 88, 42, 52, 80, 73, 88, 65, 56, 62, 45, 62, 45, 77, 66, 63, 55, 59, 82, 71, 55, 50, 69, 55, 50, 68, 70, 62, 59, 109, 69, 122, 68, 92, 46, 83, 55, 117, 54, 48, 60, 77, 77, 71, 61, 80, 94, 135, 61, 70, 61, 64, 64, 84, 43, 45, 71, 56, 85, 72, 61, 64, 76, 66, 57, 63, 94, 65, 51, 59, 62, 89, 69, 46, 75, 70, 68, 65, 85, 50, 50, 63, 73, 80, 56, 71, 52, 63, 62, 57, 76, 76, 74, 75, 53, 90, 63, 63, 61, 55, 67, 54, 56, 73, 75, 56, 78, 58, 62, 54, 74, 59, 61, 53, 75, 82, 98, 131, 87, 70, 74, 42, 171, 80, 65, 45, 49, 57, 68, 80, 71, 45, 52, 61, 64, 74, 47, 79, 59, 54, 40, 62, 44, 73, 61, 58, 74, 58, 55, 77, 61, 69, 88, 61, 64, 62, 67, 55, 84, 55, 83, 45, 58, 59, 67, 59, 64, 70, 69, 81, 51, 57, 74, 76, 60, 58, 94, 61, 67, 51, 79, 60, 71, 85, 52, 69, 65, 48, 67, 69, 67, 62, 48, 43, 76, 54, 57, 72, 46, 90, 75, 58, 67, 76, 79, 60, 72, 83, 73, 60, 83, 60, 74, 41, 60, 55, 59, 46, 62, 71, 58, 58, 60, 49, 60, 51, 64, 67, 65, 71, 53, 63, 62, 87, 78, 63, 79, 187, 54, 75, 49, 55, 55, 106, 66, 72, 66, 66, 62, 48, 64, 50, 58, 60, 60, 66, 61, 70, 68, 58, 63, 82, 75, 57, 75, 60, 56, 89, 65, 60, 55, 74, 86, 88, 69, 49, 93, 67, 73, 58, 85, 60, 60, 82, 64, 71, 53, 65, 52, 56, 52, 67, 74, 65, 68, 54, 61, 81, 49, 60, 34, 66, 59, 58, 59, 73, 62, 58, 54, 67, 58, 61, 60, 55, 49, 68, 69, 57, 77, 73, 55, 61, 88, 52, 71, 68, 74, 53, 60, 80, 58, 56, 89, 52, 76, 46, 69, 106, 64, 65, 93, 68, 74, 60, 67, 62, 51, 47, 54, 49, 71, 85, 51, 57, 66, 60, 63, 52, 84, 125, 146, 51, 59, 71, 48, 64, 77, 60, 59, 56, 68, 62, 81, 48, 54, 73, 66, 58, 68, 55, 55, 63, 52, 57, 57, 67, 50, 50, 52, 53, 60, 72, 65, 53, 77, 65, 67, 91, 59, 60, 85, 69, 67, 80, 79, 84, 55, 89, 92, 67, 66, 97, 83, 68, 62, 30, 74, 46, 67, 58, 62, 66, 53, 48, 44, 60, 61, 56, 56, 81, 54, 39, 57, 95, 63, 56, 77, 49, 79, 90, 121, 75, 58, 66, 63, 55, 59, 56, 56, 47, 93, 50, 49, 86, 91, 77, 48, 57, 69, 48, 59, 67, 68, 62, 65, 100, 66, 52, 59, 55, 92, 53, 64, 57, 71, 58, 101, 77, 39, 52, 62, 67, 71, 59, 59, 85, 85, 65, 85, 49, 53, 72, 60, 65, 106, 56, 64, 58, 58, 58, 50, 50, 64, 61, 65, 65, 65, 52, 69, 61, 79, 77, 54, 67, 77, 62, 58, 52, 58, 79, 82, 76, 55, 75, 64, 54, 70, 59, 61, 80, 91, 58, 48, 78, 59, 64, 67, 51, 61, 70, 58, 75, 61, 60, 71, 52, 61, 68, 60, 40, 54, 65, 81, 69, 58, 55, 72, 64, 64, 62, 58, 59, 51, 59, 62, 48, 81, 55, 63, 58, 59, 66, 72, 55, 70, 56, 59, 72, 60, 56, 55, 71, 57, 74, 61, 54, 67, 82, 95, 74, 56, 85, 55, 55, 59, 53, 149, 57, 55, 67, 62, 46, 59, 57, 62, 62, 56, 59, 55, 72, 66, 60, 62, 61, 70, 51, 50, 58, 64, 87, 45, 52, 111, 94, 46, 59, 57, 72, 51, 77, 65, 48, 66, 66, 42, 52, 59, 67, 58, 68, 61, 57, 62, 59, 59, 53, 61, 56, 70, 61, 60, 106, 73, 54, 63, 68, 80, 46, 54, 59, 57, 64, 59, 95, 89, 71, 65, 65, 79, 77, 79, 45, 63, 58, 69, 54, 58, 53, 63, 51, 45, 52, 59, 62, 68, 72, 83, 57, 64, 54, 75, 66, 63, 58, 69, 76, 61, 71, 60, 59, 51, 80, 68, 67, 61, 70, 54, 74, 58, 86, 60, 57, 70, 74, 61, 57, 64, 58, 63, 69, 68, 58, 49, 65, 59, 48, 52, 50, 72, 41, 65, 51, 69, 76, 61, 68, 78, 68, 48, 81, 58, 53, 81, 64, 58, 65, 53, 67, 65, 80, 64, 54, 68, 55, 96, 60, 67, 57, 120, 123, 52, 56, 54, 67, 56, 68, 77, 66, 68, 48, 54, 92, 71, 62, 60, 63, 64, 68, 59, 82, 48, 81, 53, 66, 60, 66, 99, 59, 57, 62, 48, 81, 59, 39, 57, 50, 38, 60, 66, 83, 51, 66, 70, 65, 58, 79, 66, 47, 58, 53, 83, 68, 51, 73, 50, 42, 56, 128, 65, 63, 74, 93, 96, 61, 45, 55, 64, 54, 46, 91, 59, 58, 61, 69, 69, 88, 94, 70, 43, 73, 63, 56, 57, 73, 61, 67, 58, 65, 47, 55, 49, 86, 64, 106, 106, 54, 87, 59, 69, 52, 45, 70, 101, 59, 72, 81, 66, 66, 71, 81, 88, 71, 63, 108, 62, 60, 62, 78, 64, 66, 66, 41, 62, 67, 55, 54, 51, 58, 41, 52, 55, 78, 48, 63, 70, 66, 63, 55, 59, 71, 58, 53, 43, 66, 63, 57, 67, 63, 81, 60, 49, 94, 124, 68, 90, 54, 70, 59, 75, 82, 63, 54, 57, 68, 113, 85, 58, 54, 67, 65, 64, 55, 71, 70, 53, 72, 55, 54, 52, 62, 75, 61, 50, 76, 83, 71, 70, 78, 83, 56, 40, 53, 45, 59, 59, 56, 72, 57, 50, 54, 68, 94, 65, 76, 99, 75, 44, 78, 66, 66, 65, 72, 56, 72, 80, 62, 62, 55, 57, 67, 71, 68, 84, 59, 70, 57, 51, 97, 53, 55, 91, 99, 51, 62, 56, 62, 66, 55, 78, 51, 82, 55, 64, 63, 57, 67, 45, 63, 59, 63, 53, 70, 96, 53, 64, 55, 71, 73, 49, 48, 64, 84, 112, 44, 66, 58, 75, 60, 69, 54, 76, 61, 72, 76, 80, 52, 51, 47, 102, 59, 78, 58, 80, 83, 55, 50, 82, 62, 59, 75, 87, 60, 66, 58, 63, 58, 123, 84, 60, 41, 73, 61, 68, 77, 61, 60, 52, 62, 75, 63, 154, 60, 92, 55, 55, 57, 62, 73, 61, 101, 71, 72, 61, 58, 63, 67, 61, 89, 62, 48, 46, 59, 53, 80, 55, 70, 56, 76, 74, 50, 60, 79, 60, 74, 61, 58, 60, 57, 64, 80, 72, 53, 71, 71, 79, 66, 56, 55, 68, 75, 69, 78, 63, 67, 58, 64, 38, 66, 38, 65, 70, 56, 61, 79, 54, 68, 84, 54, 65, 66, 47, 70, 57, 68, 50, 92, 81, 61, 62, 43, 75, 56, 71, 52, 70, 64, 69, 87, 65, 73, 56, 60, 55, 97, 64, 58, 62, 71, 61, 67, 53, 67, 66, 79, 59, 53, 121, 68, 67, 54, 58, 53, 65, 51, 53, 64, 56, 57, 57, 65, 86, 90, 82, 48, 95, 49, 53, 68, 88, 70, 73, 59, 61, 56, 65, 55, 52, 75, 73, 72, 67, 64, 83, 66, 65, 80, 72, 64, 88, 62, 69, 75, 80, 80, 82, 54, 69, 60, 52, 59, 51, 58, 56, 69, 58, 75, 91, 77, 52, 60, 61, 64, 72, 68, 53, 37, 53, 74, 54, 80, 65, 61, 75, 66, 47, 55, 69, 92, 63, 59, 71, 60, 49, 66, 67, 60, 67, 75, 60, 67, 75, 64, 72, 59, 92, 83, 66, 53, 47, 65, 59, 54, 74, 101, 81, 60, 54, 61, 66, 46, 58, 71, 73, 57, 63, 63, 77, 69, 94, 63, 65, 73, 65, 72, 78, 71, 65, 77, 62, 70, 53, 59, 63, 51, 71, 65, 68, 70, 69, 61, 76, 98, 72, 77, 86, 118, 59, 64, 55, 55, 51, 85, 82, 56, 112, 53, 63, 61, 54, 57, 65, 62, 52, 55, 50, 111, 92, 72, 61, 71, 63, 64, 61, 70, 49, 57, 57, 59, 68, 52, 48, 56, 68, 66, 73, 55, 48, 79, 76, 56, 70, 53, 64, 70, 63, 69, 59, 66, 69, 61, 54, 52, 70, 53, 143, 49, 58, 60, 57, 78, 91, 63, 59, 71, 83, 68, 98, 66, 63, 86, 50, 59, 105, 54, 59, 67, 74, 78, 99, 64, 53, 57, 45, 49, 95, 53, 65, 64, 54, 58, 48, 68, 66, 53, 70, 86, 64, 50, 47, 63, 67, 68, 84, 75, 65, 86, 64, 50, 65, 83, 62, 70, 61, 59, 47, 54, 49, 50, 58, 54, 60, 66, 62, 73, 63, 66, 63, 62, 73, 65, 59, 56, 63, 65, 59, 49, 69, 69, 57, 66, 57, 52, 61, 61, 80, 56, 63, 59, 77, 71, 79, 74, 77, 44, 64, 56, 66, 51, 57, 60, 64, 64, 70, 72, 38, 56, 90, 55, 85, 60, 68, 62, 85, 56, 74, 73, 54, 54, 66, 141, 54, 63, 99, 55, 56, 53, 72, 68, 70, 53, 67, 63, 54, 66, 61, 46, 63, 85, 56, 82, 70, 68, 77, 83, 81, 58, 69, 110, 57, 62, 40, 60, 74, 124, 64, 56, 64, 68, 60, 53, 47, 61, 66, 64, 54, 94, 81, 64, 81, 52, 57, 63, 61, 61, 65, 59, 73, 59, 57, 69, 45, 73, 56, 59, 74, 73, 88, 54, 78, 82, 56, 74, 88, 61, 63, 50, 77, 66, 62, 66, 76, 57, 54, 72, 51, 82, 45, 62, 55, 59, 64, 81, 74, 91, 67, 71, 76, 67, 53, 71, 65, 48, 76, 78, 113, 75, 36, 68, 55, 60, 111, 64, 76, 65, 60, 100, 48, 71, 71, 65, 83, 57, 70, 42, 67, 89, 66, 75, 66, 64, 80, 81, 62, 96, 34, 76, 93, 83, 60, 52, 55, 70, 66, 67, 53, 58, 50, 91, 75, 54, 74, 75, 49, 78, 47, 70, 61, 74, 59, 54, 60, 58, 64, 60, 66, 116, 49, 54, 64, 91, 56, 66, 71, 63, 52, 76, 79, 72, 95, 74, 56, 61, 63, 89, 72, 56, 57, 56, 69, 60, 69, 56, 64, 52, 63, 55, 62, 60, 48, 55, 74, 72, 76, 48, 47, 65, 72, 58, 75, 91, 61, 72, 59, 88, 56, 52, 67, 71, 46, 57, 63, 94, 70, 69, 70, 63, 66, 105, 58, 91, 69, 53, 60, 55, 46, 69, 66, 58, 63, 58, 52, 77, 52, 60, 71, 78, 51, 73, 67, 91, 58, 51, 66, 104, 54, 69, 54, 59, 63, 58, 61, 62, 64, 70, 55, 54, 72, 54, 72, 78, 54, 63, 61, 137, 62, 53, 90, 66, 80, 86, 69, 80, 93, 61, 61, 67, 58, 66, 50, 59, 55, 77, 48, 83, 56, 65, 71, 64, 70, 74, 128, 88, 79, 64, 43, 69, 61, 58, 63, 63, 46, 61, 67, 66, 65, 63, 54, 67, 68, 82, 67, 62, 73, 60, 61, 63, 50, 64, 85, 52, 59, 84, 69, 67, 62, 44, 60, 62, 78, 54, 61, 52, 55, 42, 69, 89, 55, 63, 64, 60, 56, 55, 65, 65, 94, 42, 72, 88, 51, 67, 56, 54, 59, 55, 65, 51, 57, 65, 61, 62, 82, 67, 70, 78, 76, 68, 60, 63, 57, 52, 54, 79, 75, 67, 69, 78, 51, 60, 68, 53, 38, 70, 69, 71, 51, 78, 67, 63, 61, 64, 48, 62, 58, 53, 69, 54, 54, 66, 87, 59, 58, 59, 53, 60, 97, 61, 57, 57, 83, 54, 64, 76, 71, 69, 56, 81, 61, 75, 64, 71, 67, 74, 61, 55, 60, 56, 61, 81, 50, 101, 90, 83, 46, 72, 71, 61, 54, 71, 70, 62, 69, 55, 49, 60, 74, 63, 47, 91, 60, 82, 63, 60, 64, 78, 65, 80, 69, 70, 55, 59, 50, 66, 48, 50, 72, 66, 62, 62, 77, 63, 72, 71, 74, 67, 69, 62, 55, 58, 58, 68, 61, 72, 64, 57, 130, 65, 89, 76, 67, 71, 43, 67, 78, 60, 68, 102, 97, 60, 70, 72, 70, 48, 71, 61, 74, 46, 50, 60, 57, 68, 60, 58, 62, 54, 61, 82, 51, 56, 55, 55, 53, 71, 53, 57, 59, 113, 56, 65, 65, 71, 58, 60, 76, 72, 74, 58, 59, 50, 80, 85, 69, 65, 26, 55, 40, 65, 66, 66, 80, 67, 53, 71, 59, 64, 47, 48, 58, 66, 73, 60, 58, 60, 63, 51, 46, 95, 56, 69, 66, 95, 61, 58, 70, 60, 121, 64, 45, 64, 77, 58, 82, 86, 54, 88, 54, 76, 91, 60, 70, 78, 63, 50, 58, 58, 64, 63, 55, 57, 52, 64, 80, 66, 83, 79, 63, 75, 57, 77, 60, 54, 64, 67, 54, 77, 74, 75, 64, 77, 79, 68, 62, 81, 78, 72, 64, 57, 54, 67, 98, 124, 69, 58, 55, 62, 69, 73, 58, 64, 59, 55, 67, 53, 68, 78, 68, 74, 59, 66, 59, 91, 55, 68, 79, 89, 63, 69, 64, 62, 68, 61, 65, 71, 71, 62, 77, 58, 65, 79, 70, 67, 64, 69, 62, 72, 58, 55, 90, 63, 62, 56, 82, 71, 56, 58, 52, 62, 68, 63, 100, 63, 54, 71, 64, 61, 63, 56, 53, 78, 76, 72, 65, 65, 60, 80, 40, 71, 47, 62, 71, 63, 62, 88, 74, 79, 64, 45, 70, 58, 55, 120, 60, 65, 36, 72, 64, 53, 87, 84, 46, 71, 66, 70, 64, 59, 76, 59, 89, 69, 73, 81, 70, 51, 55, 71, 63, 64, 46, 66, 53, 56, 48, 58, 53, 63, 89, 85, 57, 72, 66, 67, 67, 80, 53, 65, 93, 96, 55, 79, 57, 60, 65, 66, 50, 69, 45, 59, 46, 47, 60, 62, 70, 81, 54, 55, 53, 60, 60, 79, 69, 60, 81, 64, 73, 103, 81, 61, 65, 54, 67, 55, 60, 69, 107, 59, 75, 62, 87, 72, 75, 66, 56, 51, 68, 58, 59, 53, 54, 66, 60, 62, 63, 65, 68, 47, 70, 64, 53, 62, 68, 45, 55, 55, 55, 47, 72, 57, 60, 62, 85, 64, 61, 62, 63, 69, 61, 72, 54, 62, 56, 75, 68, 66, 59, 64, 59, 81, 52, 72, 68, 65, 72, 61, 65, 68, 61, 75, 58, 49, 100, 67, 66, 60, 67, 49, 63, 78, 63, 56, 87, 124, 58, 60, 54, 81, 73, 68, 81, 49, 105, 56, 51, 63, 59, 58, 55, 68, 59, 66, 60, 56, 80, 62, 59, 74, 91, 64, 47, 56, 63, 61, 63, 70, 80, 58, 57, 58, 68, 70, 63, 50, 54, 57, 75, 61, 52, 60, 72, 55, 55, 75, 59, 63, 63, 140, 87, 63, 58, 65, 57, 64, 51, 65, 52, 108, 63, 55, 85, 82, 62, 71, 61, 72, 90, 57, 72, 65, 59, 61, 67, 70, 61, 43, 51, 57, 51, 58, 51, 43, 80, 54, 46, 67, 64, 64, 66, 54, 58, 69, 76, 108, 90, 59, 107, 54, 52, 75, 97, 56, 97, 39, 61, 85, 46, 68, 70, 72, 68, 57, 75, 70, 50, 55, 64, 56, 68, 54, 50, 83, 58, 57, 58, 66, 58, 132, 77, 85, 56, 88, 60, 71, 72, 69, 67, 66, 43, 57, 73, 70, 81, 57, 58, 65, 48, 53, 70, 69, 92, 66, 73, 75, 73, 54, 66, 78, 70, 59, 57, 62, 69, 51, 58, 64, 70, 59, 74, 52, 43, 65, 49, 90, 70, 57, 61, 56, 53, 67, 56, 65, 69, 69, 117, 62, 54, 56, 59, 83, 59, 44, 53, 59, 39, 61, 48, 60, 60, 99, 77, 69, 93, 57, 77, 66, 68, 65, 65, 73, 64, 65, 54, 68, 51, 59, 54, 71, 62, 59, 46, 64, 57, 70, 76, 62, 56, 78, 68, 74, 59, 83, 70, 65, 60, 59, 46, 53, 115, 71, 42, 71, 67, 88, 79, 61, 62, 53, 68, 59, 73, 65, 60, 75, 59, 61, 71, 54, 90, 47, 87, 80, 53, 67, 77, 122, 57, 56, 64, 80, 77, 59, 66, 58, 66, 62, 63, 84, 50, 73, 82, 79, 53, 81, 62, 77, 57, 108, 67, 59, 72, 52, 70, 110, 57, 70, 86, 62, 55, 77, 60, 75, 60, 63, 55, 76, 45, 88, 62, 58, 84, 82, 55, 89, 48, 53, 79, 55, 55, 55, 47, 73, 69, 50, 68, 73, 51, 62, 61, 50, 72, 51, 60, 72, 57, 54, 58, 80, 72, 73, 74, 73, 55, 84, 52, 59, 58, 48, 55, 95, 72, 61, 128, 59, 61, 84, 64, 68, 64, 41, 58, 81, 77, 48, 64, 69, 61, 63, 69, 45, 74, 66, 71, 64, 64, 51, 67, 79, 68, 68, 64, 51, 59, 53, 59, 57, 63, 74, 92, 73, 101, 59, 79, 59, 44, 86, 71, 51, 64, 62, 61, 141, 59, 83, 49, 99, 65, 62, 50, 68, 78, 62, 55, 55, 67, 64, 71, 65, 50, 57, 55, 56, 60, 96, 76, 64, 68, 75, 62, 61, 66, 69, 56, 64, 64, 44, 58, 53, 67, 94, 63, 59, 55, 78, 65, 64, 54, 72, 51, 62, 67, 51, 63, 91, 56, 57, 66, 71, 59, 64, 77, 67, 57, 52, 64, 58, 57, 57, 54, 76, 59, 51, 64, 63, 49, 58, 54, 74, 66, 60, 53, 74, 99, 62, 63, 77, 62, 64, 64, 54, 73, 65, 71, 61, 69, 61, 96, 53, 50, 61, 60, 50, 84, 54, 76, 69, 79, 63, 75, 61, 53, 65, 67, 79, 73, 45, 54, 55, 62, 68, 52, 64, 59, 50, 58, 66, 63, 58, 83, 61, 61, 57, 78, 57, 52, 52, 70, 64, 87, 53, 56, 62, 44, 68, 90, 75, 63, 57, 69, 57, 50, 43, 61, 56, 74, 51, 74, 119, 55, 63, 69, 64, 52, 66, 56, 66, 57, 60, 63, 82, 75, 77, 78, 53, 55, 62, 78, 59, 101, 67, 60, 70, 60, 59, 51, 105, 53, 79, 69, 67, 77, 57, 68, 79, 64, 57, 72, 71, 75, 67, 52, 67, 43, 69, 66, 69, 68, 52, 59, 56, 54, 73, 81, 55, 54, 74, 75, 57, 71, 58, 68, 56, 43, 68, 63, 87, 83, 66, 57, 51, 64, 71, 66, 86, 91, 45, 75, 87, 62, 60, 68, 55, 78, 71, 58, 58, 64, 73, 63, 54, 65, 79, 57, 51, 50, 83, 85, 51, 57, 61, 49, 62, 72, 119, 62, 76, 61, 60, 71, 67, 56, 57, 63, 50, 68, 70, 42, 59, 61, 62, 79, 50, 80, 62, 67, 68, 72, 57, 52, 75, 90, 56, 65, 51, 63, 69, 81, 57, 72, 61, 81, 57, 54, 58, 81, 77, 58, 82, 61, 77, 68, 66, 72, 80, 70, 70, 65, 64, 76, 72, 54, 60, 64, 50, 60, 56, 54, 62, 57, 64, 70, 81, 66, 55, 52, 70, 58, 57, 54, 59, 86, 69, 59, 58, 84, 46, 66, 72, 61, 64, 64, 61, 70, 75, 53, 58, 74, 66, 55, 54, 57, 55, 57, 70, 65, 55, 109, 91, 79, 62, 70, 58, 64, 65, 69, 77, 73, 46, 62, 62, 69, 65, 93, 58, 70, 56, 59, 69, 101, 86, 51, 99, 67, 60, 51, 59, 70, 60, 73, 57, 66, 75, 67, 46, 53, 63, 66, 58, 52, 79, 63, 63, 71, 68, 55, 49, 57, 68, 63, 78, 58, 47, 67, 76, 55, 74, 61, 72, 59, 71, 57, 69, 63, 58, 68, 90, 66, 82, 67, 62, 58, 71, 61, 72, 102, 52, 49, 50, 71, 67, 60, 59, 60, 44, 73, 71, 63, 52, 81, 57, 68, 75, 73, 69, 83, 52, 81, 62, 91, 54, 62, 75, 88, 93, 88, 57, 51, 63, 60, 54, 62, 62, 87, 52, 53, 87, 60, 64, 86, 67, 62, 84, 75, 64, 58, 64, 54, 59, 72, 44, 67, 57, 55, 105, 64, 59, 66, 58, 73, 66, 81, 73, 53, 55, 74, 85, 72, 71, 77, 72, 86, 90, 75, 68, 66, 62, 64, 76, 61, 62, 76, 50, 54, 66, 64, 71, 42, 46, 93, 83, 59, 128, 99, 63, 79, 80, 60, 69, 65, 76, 78, 57, 49, 60, 71, 68, 67, 56, 72, 50, 53, 70, 73, 86, 85, 54, 64, 79, 68, 73, 61, 68, 60, 76, 82, 53, 71, 54, 75, 51, 64, 72, 50, 57, 59, 40, 82, 84, 55, 66, 65, 81, 52, 67, 51, 54, 76, 76, 63, 62, 46, 93, 64, 63, 39, 66, 63, 61, 44, 64, 61, 71, 68, 67, 68, 37, 71, 84, 72, 61, 58, 48, 57, 52, 55, 53, 58, 61, 47, 51, 93, 70, 52, 52, 58, 50, 60, 57, 65, 63, 73, 57, 66, 80, 69, 73, 76, 99, 39, 64, 51, 52, 67, 57, 58, 57, 67, 64, 78, 56, 53, 63, 54, 58, 79, 64, 66, 77, 48, 64, 53, 57, 75, 65, 77, 53, 61, 64, 60, 54, 63, 68, 55, 52, 92, 54, 45, 60, 62, 73, 46, 68, 72, 62, 55, 64, 72, 32, 65, 67, 72, 80, 77, 75, 53, 61, 66, 58, 56, 70, 89, 60, 58, 91, 59, 60, 71, 57, 56, 44, 74, 60, 73, 71, 63, 86, 77, 51, 42, 60, 65, 61, 85, 70, 59, 58, 52, 60, 54, 68, 65, 51, 115, 74, 62, 78, 67, 61, 53, 71, 52, 68, 64, 65, 73, 56, 69, 74, 95, 54, 73, 54, 51, 64, 69, 58, 85, 65, 57, 61, 51, 83, 67, 50, 57, 59, 75, 73, 59, 52, 86, 147, 48, 52, 60, 61, 56, 54, 71, 63, 92, 70, 72, 46, 82, 75, 72, 87, 51, 60, 58, 65, 54, 107, 48, 51, 76, 74, 55, 68, 75, 72, 80, 54, 73, 71, 48, 60, 72, 58, 62, 73, 55, 53, 53, 45, 75, 62, 59, 62, 66, 65, 72, 73, 65, 79, 47, 59, 70, 78, 85, 59, 55, 79, 39, 70, 66, 63, 59, 75, 49, 61, 44, 57, 62, 102, 79, 51, 71, 64, 70, 72, 71, 53, 72, 63, 67, 66, 49, 55, 58, 79, 59, 73, 97, 56, 54, 63, 49, 71, 89, 70, 64, 49, 62, 69, 52, 69, 75, 96, 70, 76, 55, 128, 70, 54, 42, 81, 59, 65, 48, 65, 100, 91, 66, 78, 92, 44, 66, 60, 71, 74, 70, 64, 77, 87, 47, 67, 72, 59, 55, 66, 95, 68, 76, 55, 51, 46, 37, 68, 58, 69, 75, 61, 40, 49, 62, 55, 59, 85, 56, 74, 44, 62, 76, 56, 73, 60, 62, 63, 67, 62, 61, 55, 49, 72, 49, 56, 61, 55, 98, 64, 56, 60, 100, 65, 66, 60, 52, 65, 56, 78, 80, 64, 69, 81, 67, 65, 70, 59, 80, 57, 39, 76, 68, 58, 77, 57, 111, 72, 75, 50, 64, 58, 69, 57, 72, 73, 62, 70, 77, 56, 80, 76, 54, 61, 49, 63, 66, 63, 57, 79, 61, 58, 63, 96, 64, 62, 52, 63, 64, 61, 59, 77, 56, 45, 78, 76, 52, 65, 61, 56, 59, 59, 72, 60, 49, 53, 88, 64, 41, 57, 72, 62, 57, 70, 55, 77, 69, 58, 50, 57, 63, 65, 57, 60, 66, 77, 59, 68, 80, 55, 76, 57, 45, 74, 60, 63, 108, 53, 67, 73, 60, 78, 72, 71, 64, 33, 75, 47, 53, 48, 45, 62, 55, 62, 63, 83, 51, 57, 124, 75, 117, 69, 71, 79, 58, 58, 98, 79, 63, 67, 51, 71, 63, 62, 105, 61, 77, 77, 40, 63, 68, 86, 67, 69, 69, 70, 44, 82, 55, 91, 83, 53, 78, 67, 61, 55, 68, 61, 71, 63, 55, 71, 61, 52, 54, 87, 69, 83, 55, 57, 97, 53, 60, 58, 78, 67, 52, 63, 74, 54, 66, 67, 42, 54, 67, 63, 80, 60, 60, 52, 63, 60, 60, 72, 74, 55, 53, 71, 77, 53, 56, 77, 53, 65, 70, 78, 44, 74, 77, 58, 40, 78, 65, 56, 61, 48, 47, 60, 54, 67, 71, 82, 51, 59, 107, 67, 52, 63, 108, 66, 66, 61, 76, 55, 74, 64, 56, 72, 57, 60, 74, 72, 58, 91, 68, 58, 62, 62, 69, 50, 60, 72, 69, 86, 73, 62, 63, 75, 78, 88, 99, 51, 85, 86, 47, 68, 113, 72, 72, 51, 61, 61, 57, 72, 69, 60, 85, 79, 71, 65, 59, 89, 46, 66, 77, 52, 67, 81, 88, 82, 51, 72, 53, 60, 58, 73, 39, 71, 49, 73, 45, 57, 58, 62, 57, 53, 81, 105, 54, 71, 55, 57, 70, 51, 56, 72, 61, 52, 83, 59, 63, 83, 52, 74, 52, 66, 68, 62, 96, 76, 64, 44, 73, 57, 74, 74, 78, 49, 66, 72, 78, 53, 57, 75, 67, 45, 64, 64, 70, 78, 91, 54, 67, 44, 52, 58, 57, 71, 62, 61, 83, 51, 57, 51, 56, 61, 61, 71, 62, 70, 56, 88, 83, 70, 52, 56, 56, 67, 56, 61, 88, 89, 92, 57, 74, 87, 40, 59, 51, 60, 72, 61, 98, 64, 81, 64, 101, 71, 63, 65, 96, 64, 54, 46, 71, 54, 66, 53, 57, 80, 57, 51, 48, 72, 89, 54, 74, 72, 76, 57, 118, 58, 72, 57, 51, 81, 97, 82, 62, 71, 74, 53, 80, 76, 71, 49, 94, 47, 114, 70, 63, 49, 53, 91, 75, 85, 53, 73, 74, 51, 62, 66, 64, 60, 71, 81, 47, 37, 35, 72, 58, 77, 53, 58, 65, 73, 73, 38, 44, 65, 65, 68, 49, 76, 62, 51, 52, 68, 85, 71, 66, 79, 52, 71, 52, 92, 55, 74, 58, 67, 57, 57, 60, 48, 48, 49, 69, 61, 57, 47, 51, 76, 66, 55, 92, 48, 84, 78, 76, 53, 68, 84, 74, 61, 57, 67, 52, 92, 64, 53, 110, 68, 70, 70, 55, 111, 67, 47, 57, 55, 50, 58, 67, 53, 62, 53, 55, 52, 73, 71, 63, 75, 62, 85, 59, 52, 86, 82, 60, 57, 61, 88, 67, 45, 81, 56, 51, 63, 83, 56, 71, 55, 43, 60, 60, 76, 51, 51, 38, 73, 61, 61, 59, 88, 63, 63, 72, 68, 63, 58, 49, 60, 57, 53, 70, 71, 62, 67, 46, 69, 64, 114, 64, 82, 63, 56, 62, 91, 67, 53, 45, 50, 56, 38, 66, 65, 97, 57, 78, 69, 42, 54, 104, 63, 57, 85, 77, 44, 73, 86, 59, 75, 77, 53, 52, 56, 53, 50, 61, 64, 59, 63, 54, 37, 80, 52, 75, 81, 76, 71, 85, 57, 76, 54, 63, 46, 67, 40, 67, 52, 54, 62, 88, 54, 57, 68, 57, 47, 69, 57, 74, 54, 63, 71, 67, 78, 55, 59, 65, 48, 54, 62, 80, 52, 72, 78, 67, 78, 59, 76, 48, 80, 72, 53, 79, 61, 43, 54, 69, 56, 60, 54, 66, 49, 41, 67, 66, 101, 77, 69, 57, 41, 72, 65, 77, 55, 61, 64, 53, 76, 64, 88, 52, 71, 72, 83, 46, 50, 126, 69, 50, 92, 59, 84, 64, 87, 101, 60, 58, 95, 61, 50, 65, 59, 65, 55, 69, 52, 68, 68, 64, 70, 61, 71, 70, 63, 53, 88, 63, 51, 53, 107, 71, 60, 67, 71, 67, 77, 71, 61, 54, 60, 55, 59, 66, 57, 80, 56, 78, 70, 54, 50, 46, 55, 63, 53, 49, 57, 52, 82, 103, 59, 58, 59, 78, 92, 74, 63, 93, 50, 61, 50, 53, 45, 62, 106, 80, 67, 66, 70, 91, 64, 49, 75, 64, 82, 44, 82, 72, 88, 75, 53, 70, 84, 51, 63, 54, 57, 80, 77, 77, 59, 87, 60, 73, 58, 102, 47, 72, 87, 83, 74, 109, 44, 67, 63, 65, 68, 62, 59, 66, 49, 54, 58, 49, 78, 74, 49, 45, 68, 67, 65, 72, 65, 116, 56, 66, 72, 61, 46, 64, 78, 65, 67, 58, 50, 58, 85, 84, 57, 70, 53, 60, 72, 74, 74, 71, 55, 62, 62, 58, 65, 65, 58, 65, 72, 66, 74, 62, 68, 66, 75, 44, 68, 66, 101, 67, 69, 58, 57, 57, 52, 82, 72, 63, 71, 60, 80, 83, 82, 45, 91, 61, 64, 71, 53, 57, 84, 67, 73, 64, 62, 64, 94, 64, 55, 83, 58, 58, 49, 83, 62, 55, 56, 64, 81, 55, 80, 54, 55, 72, 59, 59, 55, 76, 60, 69, 61, 100, 47, 72, 87, 53, 58, 67, 74, 82, 77, 71, 60, 85, 63, 64, 58, 58, 78, 45, 75, 78, 72, 71, 77, 64, 60, 77, 61, 79, 68, 51, 50, 60, 56, 52, 76, 98, 57, 78, 44, 72, 82, 76, 101, 52, 63, 69, 58, 70, 65, 62, 63, 69, 56, 67, 72, 51, 73, 54, 40, 50, 71, 60, 62, 71, 61, 72, 78, 36, 91, 56, 43, 58, 81, 59, 52, 57, 75, 43, 60, 65, 53, 54, 85, 85, 76, 63, 57, 62, 57, 60, 59, 67, 66, 51, 66, 98, 46, 58, 81, 77, 73, 50, 68, 60, 80, 50, 56, 45, 61, 60, 62, 82, 61, 76, 47, 54, 68, 63, 53, 23, 52, 75, 69, 69, 80, 72, 65, 72, 73, 53, 78, 86, 79, 133, 62, 105, 100, 107, 59, 100, 51, 55, 57, 68, 59, 50, 67, 61, 68, 74, 121, 64, 62, 87, 65, 61, 68, 67, 71, 70, 96, 57, 69, 68, 77, 84, 79, 69, 75, 65, 50, 56, 70, 61, 48, 60, 64, 80, 61, 69, 73, 68, 67, 53, 37, 59, 79, 73, 62, 53, 65, 60, 60, 69, 64, 58, 61, 58, 79, 81, 62, 57, 56, 65, 69, 64, 58, 74, 65, 52, 85, 81, 70, 44, 54, 55, 61, 74, 57, 81, 69, 52, 66, 50, 76, 78, 53, 152, 61, 51, 49, 72, 54, 50, 52, 58, 76, 65, 48, 87, 95, 58, 67, 57, 87, 58, 64, 75, 83, 60, 62, 71, 58, 78, 54, 65, 44, 58, 63, 100, 73, 59, 48, 76, 64, 81, 57, 54, 64, 70, 61, 77, 74, 61, 82, 69, 66, 96, 64, 53, 52, 55, 66, 78, 57, 69, 66, 66, 59, 75, 59, 46, 73, 60, 82, 108, 95, 63, 66, 71, 70, 64, 62, 68, 54, 48, 85, 51, 69, 90, 74, 91, 49, 61, 56, 64, 74, 53, 43, 70, 52, 61, 67, 53, 100, 77, 81, 52, 59, 59, 50, 71, 68, 79, 54, 56, 62, 49, 47, 110, 70, 95, 67, 51, 65, 54, 72, 56, 59, 136, 61, 64, 78, 100, 58, 76, 52, 76, 77, 97, 56, 73, 84, 71, 84, 60, 70, 57, 74, 71, 59, 70, 72, 62, 53, 66, 54, 47, 88, 64, 94, 59, 50, 62, 52, 65, 73, 66, 56, 79, 61, 60, 50, 48, 89, 62, 68, 60, 70, 65, 70, 61, 63, 70, 37, 64, 74, 69, 69, 61, 78, 70, 55, 52, 80, 49, 60, 72, 73, 33, 72, 83, 73, 72, 66, 59, 53, 64, 78, 85, 63, 81, 73, 67, 69, 55, 74, 58, 62, 69, 59, 92, 64, 58, 62, 90, 44, 56, 73, 57, 62, 67, 68, 44, 83, 72, 38, 85, 68, 60, 60, 76, 73, 54, 65, 71, 48, 53, 74, 59, 79, 56, 106, 53, 66, 90, 65, 62, 68, 75, 68, 59, 67, 61, 62, 69, 110, 57, 71, 51, 77, 68, 84, 49, 84, 47, 62, 60, 37, 74, 64, 69, 62, 48, 68, 63, 67, 56, 60, 68, 48, 89, 65, 51, 58, 63, 68, 70, 82, 72, 58, 60, 50, 65, 60, 63, 84, 55, 155, 58, 60, 60, 67, 47, 73, 51, 75, 58, 68, 71, 74, 44, 57, 60, 68, 93, 50, 53, 64, 51, 50, 53, 76, 61, 72, 57, 50, 65, 78, 80, 59, 71, 59, 66, 77, 80, 81, 83, 68, 77, 57, 52, 61, 65, 64, 73, 82, 100, 65, 49, 78, 82, 69, 58, 53, 63, 84, 68, 65, 79, 59, 68, 103, 76, 54, 87, 77, 108, 58, 65, 66, 111, 57, 102, 69, 58, 68, 66, 73, 68, 64, 57, 51, 61, 53, 85, 57, 56, 51, 69, 60, 57, 81, 63, 67, 40, 63, 67, 48, 60, 45, 62, 76, 45, 89, 62, 65, 61, 82, 87, 60, 82, 66, 91, 62, 49, 66, 63, 74, 51, 101, 49, 68, 62, 59, 42, 69, 77, 60, 62, 76, 75, 61, 55, 70, 90, 73, 53, 53, 42, 73, 71, 76, 43, 58, 74, 65, 61, 57, 70, 54, 55, 44, 79, 41, 46, 64, 78, 58, 67, 49, 59, 48, 103, 73, 64, 50, 49, 48, 76, 63, 49, 67, 54, 60, 59, 54, 60, 65, 64, 50, 60, 67, 78, 52, 60, 57, 57, 69, 102, 79, 86, 67, 57, 79, 75, 51, 50, 58, 90, 103, 76, 95, 60, 54, 54, 63, 62, 61, 70, 58, 57, 54, 62, 73, 59, 77, 79, 62, 56, 60, 70, 81, 59, 67, 58, 51, 47, 58, 56, 52, 49, 60, 69, 63, 66, 64, 60, 60, 92, 69, 59, 63, 112, 59, 62, 93, 72, 48, 63, 53, 73, 56, 67, 52, 47, 88, 55, 73, 57, 60, 64, 64, 65, 38, 38, 76, 75, 55, 77, 83, 48, 65, 50, 85, 46, 48, 54, 51, 74, 86, 59, 68, 81, 65, 73, 101, 50, 49, 52, 83, 57, 62, 86, 64, 64, 90, 56, 49, 113, 62, 81, 64, 158, 60, 63, 73, 58, 60, 90, 64, 68, 54, 56, 68, 76, 62, 64, 51, 57, 70, 65, 84, 54, 122, 76, 64, 90, 57, 53, 63, 73, 63, 72, 74, 62, 71, 55, 64, 61, 53, 55, 68, 47, 63, 75, 61, 65, 69, 49, 79, 35, 47, 48, 100, 53, 54, 63, 83, 69, 62, 69, 65, 73, 59, 62, 66, 84, 48, 72, 60, 57, 53, 58, 65, 55, 72, 82, 97, 61, 53, 67, 49, 96, 73, 55, 65, 107, 57, 63, 84, 57, 63, 75, 43, 51, 74, 61, 54, 97, 75, 88, 57, 66, 53, 46, 89, 66, 73, 58, 59, 69, 66, 61, 54, 56, 61, 49, 59, 60, 74, 73, 65, 72, 56, 70, 51, 67, 92, 64, 59, 96, 67, 58, 44, 83, 60, 67, 50, 79, 71, 69, 58, 54, 65, 59, 70, 76, 69, 55, 45, 41, 117, 61, 59, 80, 59, 48, 70, 80, 53, 65, 56, 52, 66, 79, 60, 57, 57, 39, 77, 37, 63, 60, 94, 54, 56, 60, 54, 55, 81, 70, 52, 71, 42, 81, 92, 75, 58, 88, 71, 69, 71, 46, 60, 63, 59, 70, 85, 90, 83, 81, 61, 69, 70, 96, 61, 76, 66, 64, 82, 54, 65, 55, 72, 72, 109, 91, 61, 47, 75, 76, 101, 65, 81, 86, 63, 73, 81, 53, 47, 66, 50, 56, 55, 68, 66, 55, 58, 72, 55, 65, 49, 60, 55, 53, 81, 66, 65, 53, 68, 77, 63, 87, 58, 63, 61, 116, 58, 84, 80, 65, 62, 70, 62, 45, 61, 57, 92, 41, 75, 40, 58, 61, 69, 66, 46, 61, 71, 49, 70, 76, 62, 62, 73, 58, 73, 70, 78, 47, 80, 56, 69, 58, 76, 97, 68, 71, 80, 63, 60, 62, 50, 97, 58, 83, 48, 56, 69, 78, 65, 78, 63, 72, 103, 53, 69, 62, 82, 47, 60, 113, 68, 82, 45, 93, 66, 74, 65, 77, 64, 80, 64, 77, 64, 60, 77, 68, 57, 60, 52, 62, 77, 78, 81, 98, 52, 50, 68, 61, 101, 58, 63, 63, 57, 49, 58, 73, 59, 84, 64, 76, 92, 52, 74, 59, 45, 64, 67, 55, 73, 64, 72, 45, 113, 61, 90, 82, 36, 45, 88, 65, 77, 64, 59, 75, 68, 68, 40, 60, 50, 65, 75, 63, 55, 82, 99, 67, 28, 72, 72, 68, 81, 86, 61, 77, 64, 60, 56, 47, 51, 70, 64, 58, 70, 56, 61, 99, 79, 100, 73, 56, 69, 70, 52, 47, 60, 63, 38, 85, 57, 82, 58, 78, 62, 77, 80, 73, 56, 53, 75, 78, 58, 77, 67, 54, 63, 76, 43, 58, 48, 58, 59, 69, 50, 40, 54, 71, 31, 62, 73, 84, 72, 87, 76, 69, 68, 37, 73, 74, 68, 69, 65, 97, 73, 55, 71, 65, 64, 59, 72, 80, 52, 63, 59, 60, 53, 64, 60, 71, 72, 73, 69, 70, 53, 51, 53, 68, 84, 75, 69, 61, 64, 68, 86, 55, 68, 64, 59, 50, 58, 70, 50, 63, 62, 77, 51, 52, 47, 65, 86, 66, 53, 49, 31, 81, 55, 84, 58, 76, 88, 60, 60, 81, 72, 64, 57, 68, 76, 70, 54, 60, 63, 49, 68, 97, 64, 40, 59, 76, 57, 59, 60, 129, 71, 82, 59, 61, 60, 60, 65, 61, 49, 87, 59, 57, 58, 86, 57, 78, 78, 106, 81, 65, 63, 98, 71, 54, 46, 76, 83, 80, 82, 66, 62, 83, 55, 76, 79, 65, 54, 63, 64, 88, 68, 89, 65, 54, 70, 50, 56, 48, 74, 85, 49, 76, 64, 89, 89, 64, 63, 65, 64, 71, 69, 53, 60, 64, 85, 52, 57, 64, 62, 54, 62, 88, 62, 49, 66, 65, 70, 129, 58, 107, 76, 155, 63, 89, 69, 57, 78, 47, 50, 66, 61, 49, 57, 71, 112, 58, 49, 58, 54, 62, 62, 46, 66, 37, 79, 60, 69, 53, 76, 59, 77, 77, 60, 58, 53, 54, 59, 94, 58, 68, 62, 57, 58, 68, 52, 68, 77, 58, 90, 65, 56, 54, 67, 69, 64, 80, 75, 47, 68, 75, 66, 57, 62, 56, 57, 66, 54, 67, 38, 44, 58, 47, 65, 77, 69, 43, 73, 58, 88, 101, 74, 56, 46, 69, 68, 88, 60, 78, 82, 59, 43, 69, 53, 49, 72, 75, 121, 71, 51, 84, 51, 52, 74, 64, 54, 90, 38, 63, 61, 68, 60, 52, 68, 71, 63, 57, 71, 71, 45, 69, 44, 48, 87, 63, 58, 80, 97, 83, 44, 64, 69, 70, 85, 97, 72, 74, 82, 48, 67, 65, 63, 51, 60, 79, 63, 92, 84, 36, 63, 64, 53, 74, 92, 62, 78, 57, 55, 53, 56, 36, 55, 54, 63, 31, 78, 68, 57, 62, 45, 72, 59, 62, 54, 83, 87, 75, 51, 61, 67, 82, 46, 66, 71, 71, 70, 65, 67, 55, 50, 90, 81, 73, 51, 58, 65, 61, 43, 84, 53, 58, 57, 76, 78, 78, 58, 64, 71, 71, 73, 62, 72, 75, 67, 55, 54, 67, 91, 60, 89, 47, 69, 76, 69, 46, 85, 63, 78, 90, 112, 41, 79, 56, 90, 51, 71, 67, 43, 35, 78, 56, 82, 66, 82, 44, 59, 77, 51, 58, 84, 70, 61, 84, 80, 58, 68, 54, 57, 61, 111, 64, 78, 95, 45, 71, 103, 85, 57, 70, 87, 43, 65, 59, 64, 89, 55, 43, 66, 52, 65, 60, 40, 54, 81, 57, 50, 53, 66, 90, 68, 81, 90, 43, 60, 75, 62, 78, 66, 59, 91, 63, 67, 64, 71, 45, 116, 54, 62, 79, 70, 88, 60, 51, 105, 64, 52, 58, 54, 72, 45, 73, 54, 65, 60, 75, 54, 63, 80, 41, 93, 97, 68, 57, 48, 74, 83, 79, 61, 37, 69, 58, 74, 46, 85, 68, 52, 44, 74, 108, 47, 82, 52, 55, 81, 49, 93, 43, 51, 67, 78, 80, 78, 57, 58, 47, 65, 53, 66, 79, 107, 61, 71, 79, 70, 93, 72, 70, 61, 62, 69, 51, 61, 59, 68, 74, 62, 85, 65, 68, 58, 85, 82, 48, 60, 63, 46, 54, 68, 70, 55, 59, 65, 69, 85, 75, 47, 96, 44, 58, 55, 52, 97, 74, 78, 54, 57, 64, 48, 54, 84, 62, 56, 60, 58, 56, 63, 67, 55, 56, 75, 82, 57, 63, 61, 57, 62, 46, 64, 68, 70, 72, 64, 62, 49, 60, 85, 90, 74, 47, 71, 86, 81, 48, 92, 83, 96, 47, 76, 53, 81, 107, 61, 50, 73, 58, 55, 68, 54, 62, 56, 64, 46, 57, 56, 67, 62, 60, 88, 57, 79, 79, 89, 60, 76, 62, 50, 48, 74, 57, 65, 69, 85, 64, 117, 80, 74, 84, 50, 61, 84, 52, 107, 63, 56, 59, 68, 67, 83, 46, 65, 46, 61, 66, 55, 72, 46, 45, 106, 71, 47, 63, 75, 66, 49, 75, 72, 80, 70, 47, 68, 62, 59, 66, 71, 65, 56, 67, 54, 63, 69, 64, 58, 73, 77, 84, 57, 66, 68, 72, 57, 77, 66, 72, 65, 50, 67, 95, 75, 92, 62, 73, 46, 60, 57, 62, 55, 71, 62, 52, 69, 60, 58, 66, 66, 85, 65, 74, 57, 61, 52, 65, 89, 71, 58, 74, 79, 50, 55, 59, 66, 73, 57, 73, 59, 54, 59, 69, 78, 57, 71, 52, 51, 72, 47, 64, 79, 71, 66, 57, 58, 57, 48, 51, 60, 46, 69, 55, 65, 63, 82, 68, 54, 65, 52, 115, 67, 56, 72, 77, 56, 65, 52, 79, 66, 75, 47, 44, 55, 57, 41, 80, 34, 75, 91, 73, 70, 63, 59, 61, 44, 77, 68, 77, 69, 65, 95, 50, 47, 58, 61, 58, 77, 70, 71, 78, 75, 81, 48, 67, 85, 62, 66, 81, 79, 46, 59, 90, 72, 68, 81, 62, 65, 64, 64, 54, 115, 45, 67, 56, 59, 57, 61, 61, 101, 54, 51, 57, 59, 75, 64, 71, 78, 63, 61, 46, 67, 81, 75, 63, 81, 50, 32, 44, 50, 63, 55, 58, 57, 61, 56, 63, 67, 56, 70, 47, 67, 62, 38, 67, 66, 78, 62, 46, 73, 47, 81, 73, 80, 71, 66, 51, 72, 34, 76, 70, 63, 67, 65, 55, 67, 75, 48, 39, 70, 56, 82, 65, 59, 39, 75, 60, 71, 65, 79, 77, 91, 138, 79, 65, 68, 67, 74, 93, 77, 75, 62, 50, 92, 76, 43, 55, 71, 85, 100, 67, 88, 53, 61, 71, 74, 66, 74, 54, 62, 71, 71, 57, 57, 61, 57, 72, 69, 60, 76, 74, 84, 59, 67, 40, 73, 58, 94, 109, 61, 67, 81, 81, 66, 68, 60, 70, 56, 75, 65, 86, 117, 69, 71, 82, 60, 59, 76, 53, 70, 63, 82, 76, 62, 68, 73, 130, 58, 59, 51, 60, 41, 66, 63, 43, 54, 69, 67, 48, 55, 41, 55, 83, 66, 63, 53, 62, 56, 50, 68, 57, 85, 57, 131, 85, 54, 50, 54, 84, 48, 41, 52, 48, 71, 52, 69, 61, 72, 74, 67, 81, 83, 51, 88, 83, 73, 38, 49, 79, 47, 78, 56, 45, 63, 58, 61, 66, 59, 53, 86, 73, 72, 85, 53, 44, 79, 66, 48, 54, 50, 65, 73, 73, 70, 66, 58, 76, 56, 65, 44, 70, 53, 70, 53, 80, 62, 63, 49, 57, 77, 89, 92, 39, 76, 55, 77, 49, 91, 87, 69, 43, 85, 63, 74, 75, 53, 61, 66, 67, 78, 88, 79, 60, 74, 65, 45, 44, 60, 58, 66, 75, 65, 52, 87, 91, 57, 58, 81, 37, 51, 44, 68, 88, 57, 50, 61, 44, 65, 90, 73, 57, 64, 87, 56, 66, 71, 62, 70, 50, 43, 64, 58, 76, 54, 61, 65, 99, 74, 62, 77, 47, 52, 78, 70, 80, 71, 65, 67, 62, 67, 73, 54, 62, 58, 58, 75, 75, 70, 95, 70, 65, 56, 64, 56, 55, 65, 75, 48, 60, 57, 63, 89, 58, 51, 66, 76, 49, 77, 45, 82, 90, 102, 59, 50, 72, 49, 55, 54, 65, 47, 57, 58, 79, 100, 77, 67, 67, 74, 58, 72, 67, 64, 56, 98, 58, 59, 57, 34, 65, 63, 56, 80, 107, 64, 53, 72, 96, 66, 65, 49, 63, 67, 86, 106, 49, 74, 78, 85, 62, 75, 52, 58, 60, 52, 66, 62, 80, 89, 88, 65, 52, 95, 76, 66, 72, 93, 80, 74, 83, 50, 73, 76, 78, 58, 46, 44, 56, 74, 51, 108, 52, 63, 53, 66, 62, 67, 75, 80, 115, 65, 82, 71, 64, 81, 82, 65, 60, 56, 49, 57, 70, 79, 76, 117, 68, 58, 82, 49, 60, 75, 69, 68, 51, 72, 48, 87, 70, 54, 50, 77, 64, 93, 49, 58, 48, 55, 57, 66, 60, 55, 74, 61, 65, 70, 66, 75, 58, 82, 68, 80, 71, 51, 53, 53, 58, 55, 64, 67, 60, 68, 71, 58, 52, 68, 77, 63, 67, 62, 57, 67, 74, 69, 90, 115, 62, 65, 51, 54, 77, 42, 51, 54, 78, 50, 87, 68, 46, 89, 60, 81, 62, 82, 66, 61, 70, 49, 61, 62, 62, 100, 47, 62, 68, 168, 67, 84, 50, 67, 60, 97, 46, 79, 57, 67, 73, 27, 51, 50, 78, 54, 55, 67, 69, 72, 61, 57, 48, 76, 72, 67, 47, 66, 54, 55, 63, 68, 105, 49, 52, 64, 59, 71, 59, 73, 68, 69, 76, 50, 69, 61, 59, 71, 49, 59, 56, 61, 43, 41, 74, 74, 69, 66, 66, 78, 72, 51, 56, 63, 59, 78, 53, 53, 54, 49, 59, 61, 59, 63, 47, 59, 56, 58, 62, 53, 60, 47, 79, 78, 57, 67, 71, 104, 55, 78, 59, 55, 54, 100, 88, 52, 47, 80, 59, 65, 64, 62, 44, 78, 59, 51, 77, 67, 54, 77, 54, 84, 70, 56, 54, 78, 80, 65, 62, 74, 55, 64, 46, 70, 139, 64, 61, 64, 65, 62, 62, 61, 55, 64, 67, 64, 61, 62, 52, 81, 53, 70, 47, 95, 46, 37, 64, 85, 64, 57, 72, 63, 64, 80, 64, 47, 107, 83, 73, 104, 61, 69, 32, 109, 78, 82, 49, 96, 56, 69, 86, 56, 63, 51, 82, 51, 73, 60, 81, 66, 63, 58, 74, 51, 72, 66, 72, 46, 54, 63, 61, 82, 69, 88, 58, 54, 50, 62, 56, 68, 59, 58, 83, 67, 70, 54, 65, 78, 61, 72, 51, 73, 64, 106, 69, 72, 85, 78, 87, 77, 68, 80, 61, 82, 52, 60, 54, 66, 92, 102, 72, 64, 85, 44, 86, 56, 56, 55, 82, 72, 64, 75, 70, 68, 55, 74, 62, 75, 58, 81, 55, 40, 74, 62, 103, 78, 71, 80, 71, 76, 72, 57, 52, 52, 81, 42, 65, 69, 71, 71, 73, 58, 108, 57, 52, 46, 64, 74, 58, 56, 81, 38, 57, 58, 70, 63, 57, 45, 59, 101, 64, 73, 114, 63, 53, 69, 91, 82, 70, 71, 54, 76, 60, 57, 67, 57, 69, 68, 63, 59, 70, 46, 63, 92, 101, 66, 60, 57, 86, 68, 54, 70, 87, 37, 59, 74, 43, 61, 67, 54, 59, 80, 62, 79, 41, 64, 54, 64, 70, 54, 49, 86, 68, 52, 69, 76, 55, 57, 68, 68, 56, 56, 71, 58, 60, 62, 70, 73, 61, 93, 67, 73, 57, 67, 54, 80, 55, 39, 65, 103, 81, 70, 78, 91, 77, 60, 65, 53, 60, 72, 52, 48, 53, 57, 54, 74, 60, 62, 45, 52, 53, 74, 69, 66, 61, 55, 86, 89, 61, 67, 62, 107, 45, 68, 102, 52, 58, 77, 55, 64, 77, 95, 60, 55, 65, 82, 122, 59, 78, 63, 69, 80, 60, 51, 98, 54, 64, 61, 34, 62, 85, 51, 70, 86, 58, 58, 67, 57, 79, 64, 68, 76, 68, 81, 72, 80, 53, 69, 77, 71, 123, 52, 70, 62, 58, 59, 57, 60, 58, 61, 74, 64, 106, 57, 71, 57, 67, 57, 52, 74, 68, 76, 58, 68, 57, 88, 68, 100, 51, 55, 86, 76, 57, 63, 53, 80, 55, 74, 52, 65, 99, 66, 51, 62, 45, 67, 90, 72, 73, 85, 77, 81, 57, 59, 49, 61, 50, 54, 62, 58, 38, 74, 56, 58, 48, 57, 45, 73, 70, 87, 84, 64, 71, 55, 110, 69, 68, 45, 66, 69, 55, 60, 71, 63, 48, 78, 51, 60, 42, 48, 52, 81, 45, 61, 59, 58, 54, 127, 80, 49, 51, 57, 66, 53, 46, 123, 58, 54, 56, 66, 56, 60, 50, 75, 65, 67, 63, 72, 33, 46, 60, 74, 52, 70, 72, 62, 54, 58, 58, 57, 57, 41, 118, 75, 82, 62, 54, 76, 59, 45, 47, 105, 57, 86, 69, 60, 89, 62, 53, 52, 54, 71, 62, 48, 60, 77, 55, 69, 59, 69, 45, 73, 90, 79, 63, 54, 86, 112, 61, 55, 84, 42, 61, 79, 65, 63, 56, 81, 44, 61, 63, 62, 58, 58, 66, 42, 53, 67, 87, 82, 58, 50, 66, 66, 85, 60, 62, 58, 54, 75, 76, 48, 60, 57, 75, 67, 76, 60, 67, 64, 44, 62, 67, 51, 52, 64, 87, 75, 54, 69, 60, 55, 73, 54, 80, 64, 77, 70, 94, 55, 48, 73, 78, 73, 79, 71, 55, 51, 64, 61, 58, 59, 67, 56, 62, 73, 62, 65, 58, 74, 61, 57, 55, 53, 82, 59, 85, 61, 58, 49, 63, 55, 61, 77, 87, 71, 65, 65, 70, 72, 55, 60, 66, 50, 62, 71, 77, 51, 57, 60, 45, 64, 79, 54, 72, 51, 52, 54, 67, 68, 75, 67, 48, 56, 79, 57, 112, 62, 67, 69, 60, 46, 60, 61, 51, 66, 57, 63, 85, 50, 79, 70, 43, 66, 63, 68, 59, 59, 71, 115, 72, 93, 55, 64, 55, 81, 84, 51, 61, 58, 62, 92, 61, 61, 55, 76, 103, 60, 61, 56, 47, 53, 80, 64, 57, 40, 168, 68, 63, 76, 52, 62, 69, 60, 62, 47, 58, 64, 67, 69, 68, 90, 83, 91, 58, 84, 54, 62, 61, 71, 70, 71, 79, 67, 81, 67, 176, 74, 85, 76, 36, 59, 69, 54, 52, 63, 45, 50, 65, 68, 73, 51, 66, 50, 102, 87, 111, 76, 47, 48, 61, 67, 49, 48, 70, 59, 73, 67, 63, 64, 69, 66, 54, 51, 67, 56, 48, 76, 60, 82, 87, 76, 48, 76, 76, 56, 54, 61, 62, 93, 50, 66, 68, 55, 60, 67, 59, 54, 125, 69, 62, 41, 57, 40, 64, 71, 44, 60, 57, 81, 65, 56, 73, 71, 64, 68, 40, 86, 62, 55, 59, 59, 71, 70, 75, 61, 67, 56, 66, 56, 57, 84, 62, 89, 42, 47, 100, 61, 44, 84, 52, 77, 54, 71, 54, 60, 63, 82, 62, 58, 68, 93, 72, 73, 54, 59, 91, 45, 71, 43, 95, 78, 64, 85, 77, 64, 74, 80, 54, 46, 80, 75, 45, 69, 56, 71, 51, 74, 60, 49, 73, 59, 83, 61, 84, 72, 55, 49, 70, 73, 67, 76, 68, 52, 60, 54, 75, 66, 51, 89, 51, 57, 79, 46, 74, 64, 77, 61, 65, 58, 88, 50, 55, 68, 48, 64, 108, 72, 71, 81, 56, 92, 67, 96, 53, 40, 55, 60, 62, 71, 69, 83, 46, 69, 65, 64, 69, 65, 50, 102, 50, 46, 65, 68, 65, 72, 54, 73, 61, 73, 53, 48, 83, 70, 67, 61, 57, 65, 59, 74, 57, 57, 75, 49, 64, 129, 56, 75, 75, 108, 56, 56, 92, 128, 69, 69, 61, 56, 76, 68, 51, 76, 97, 87, 63, 70, 75, 72, 59, 44, 47, 64, 72, 70, 83, 74, 65, 56, 57, 59, 61, 72, 44, 61, 63, 57, 70, 72, 75, 57, 63, 68, 80, 83, 68, 68, 97, 54, 73, 56, 59, 48, 72, 86, 71, 60, 61, 81, 80, 57, 63, 67, 53, 65, 58, 58, 47, 62, 69, 64, 64, 72, 59, 65, 27, 91, 44, 38, 62, 88, 51, 64, 57, 66, 63, 58, 74, 80, 66, 58, 74, 78, 71, 72, 118, 64, 66, 54, 52, 83, 52, 55, 66, 54, 47, 77, 45, 89, 53, 55, 55, 130, 69, 46, 71, 72, 70, 58, 66, 52, 61, 64, 72, 61, 62, 73, 51, 79, 54, 67, 42, 49, 59, 66, 63, 78, 57, 42, 110, 55, 62, 80, 63, 71, 77, 61, 60, 75, 49, 89, 59, 61, 104, 67, 54, 58, 35, 64, 71, 71, 72, 91, 83, 85, 60, 81, 64, 61, 70, 76, 73, 44, 76, 88, 46, 57, 63, 90, 80, 60, 84, 57, 84, 53, 57, 51, 49, 84, 68, 75, 59, 54, 57, 58, 70, 55, 62, 79, 50, 64, 68, 49, 57, 58, 49, 52, 98, 101, 61, 78, 37, 70, 78, 80, 80, 49, 71, 92, 65, 59, 60, 64, 71, 58, 57, 77, 51, 60, 57, 49, 65, 60, 40, 76, 80, 88, 66, 62, 58, 43, 59, 57, 97, 49, 46, 80, 57, 55, 87, 66, 61, 51, 67, 92, 84, 69, 57, 66, 63, 69, 68, 59, 81, 64, 57, 61, 62, 70, 70, 102, 78, 72, 74, 65, 71, 59, 59, 73, 49, 93, 77, 41, 90, 83, 70, 72, 64, 77, 70, 60, 64, 62, 39, 68, 52, 30, 79, 50, 53, 64, 72, 73, 68, 59, 80, 59, 73, 75, 41, 86, 49, 63, 43, 41, 65, 95, 79, 74, 66, 87, 67, 39, 50, 42, 54, 70, 75, 85, 59, 64, 67, 54, 68, 58, 61, 76, 61, 81, 80, 60, 46, 61, 70, 60, 71, 69, 53, 69, 61, 71, 59, 104, 56, 43, 66, 55, 56, 67, 96, 66, 57, 79, 80, 59, 77, 76, 70, 59, 66, 55, 58, 83, 117, 71, 64, 52, 61, 54, 78, 100, 91, 67, 61, 61, 67, 78, 64, 58, 64, 47, 61, 70, 41, 58, 50, 61, 76, 66, 54, 46, 65, 61, 72, 57, 69, 46, 51, 63, 64, 50, 66, 54, 63, 55, 64, 58, 75, 55, 61, 52, 68, 58, 69, 62, 60, 59, 77, 47, 80, 61, 56, 75, 105, 56, 72, 45, 47, 60, 85, 61, 72, 73, 55, 65, 96, 65, 59, 70, 72, 72, 86, 62, 56, 69, 61, 47, 57, 91, 74, 47, 51, 51, 56, 55, 58, 50, 50, 42, 70, 68, 59, 91, 74, 97, 46, 75, 58, 72, 60, 77, 60, 60, 73, 69, 57, 47, 81, 64, 68, 51, 55, 57, 77, 61, 65, 48, 61, 68, 59, 69, 41, 63, 78, 79, 56, 47, 69, 102, 59, 47, 64, 66, 86, 57, 84, 78, 83, 77, 61, 70, 71, 47, 48, 69, 101, 75, 54, 77, 65, 75, 77, 77, 63, 48, 39, 68, 64, 43, 73, 43, 69, 78, 65, 77, 70, 69, 87, 72, 70, 63, 64, 84, 89, 82, 61, 83, 67, 48, 63, 60, 46, 54, 59, 62, 52, 56, 56, 57, 64, 56, 58, 76, 66, 75, 76, 71, 50, 69, 76, 54, 62, 73, 82, 60, 65, 77, 113, 60, 67, 76, 65, 64, 94, 85, 58, 60, 81, 56, 62, 66, 62, 71, 64, 51, 98, 60, 80, 77, 57, 97, 57, 54, 61, 60, 85, 76, 47, 41, 52, 57, 57, 62, 51, 54, 69, 48, 81, 53, 74, 54, 67, 55, 60, 43, 65, 72, 72, 41, 70, 66, 56, 78, 59, 59, 82, 69, 93, 63, 91, 56, 61, 66, 45, 63, 68, 83, 63, 81, 59, 57, 45, 52, 69, 58, 62, 51, 66, 77, 81, 49, 47, 73, 67, 82, 64, 68, 78, 59, 62, 74, 69, 65, 76, 69, 71, 69, 113, 68, 44, 65, 59, 49, 57, 78, 59, 38, 71, 77, 66, 51, 46, 44, 56, 56, 99, 66, 45, 60, 71, 165, 67, 66, 49, 95, 48, 55, 51, 46, 64, 122, 52, 51, 77, 73, 51, 77, 50, 68, 65, 56, 55, 71, 62, 92, 60, 78, 67, 65, 50, 73, 53, 61, 78, 67, 47, 67, 72, 60, 64, 64, 67, 52, 59, 60, 66, 62, 62, 67, 80, 48, 72, 54, 64, 115, 85, 74, 78, 70, 62, 77, 73, 48, 72, 29, 53, 61, 64, 64, 54, 53, 97, 51, 71, 74, 64, 60, 100, 101, 69, 59, 78, 74, 64, 85, 53, 82, 61, 55, 69, 51, 60, 73, 52, 58, 72, 68, 59, 62, 68, 60, 54, 50, 60, 65, 82, 87, 48, 52, 74, 67, 62, 113, 60, 69, 93, 77, 97, 82, 56, 64, 68, 75, 79, 52, 78, 63, 97, 84, 72, 72, 61, 65, 79, 70, 68, 63, 81, 67, 86, 50, 101, 84, 81, 57, 57, 64, 65, 81, 81, 85, 68, 85, 53, 48, 52, 82, 72, 58, 47, 68, 67, 91, 49, 64, 76, 57, 93, 47, 70, 68, 47, 83, 59, 138, 73, 93, 59, 98, 61, 43, 79, 55, 81, 74, 35, 70, 77, 82, 53, 83, 56, 65, 55, 65, 64, 49, 59, 62, 60, 50, 66, 80, 52, 82, 64, 61, 54, 50, 72, 75, 76, 89, 65, 56, 75, 35, 67, 92, 39, 48, 61, 66, 64, 87, 77, 69, 48, 85, 39, 68, 43, 68, 50, 49, 48, 59, 101, 80, 83, 99, 57, 56, 66, 45, 49, 53, 57, 103, 50, 45, 64, 54, 77, 76, 65, 54, 42, 63, 84, 66, 42, 55, 64, 66, 35, 53, 85, 81, 71, 72, 59, 58, 58, 50, 62, 50, 52, 66, 65, 58, 73, 63, 56, 71, 47, 64, 55, 60, 54, 84, 69, 73, 55, 85, 47, 80, 69, 67, 65, 47, 55, 93, 86, 74, 49, 60, 61, 53, 67, 74, 81, 46, 66, 55, 73, 71, 67, 61, 88, 64, 55, 99, 39, 51, 73, 66, 48, 53, 80, 54, 44, 33, 48, 61, 69, 71, 98, 82, 71, 38, 56, 43, 55, 76, 93, 61, 69, 79, 49, 85, 65, 66, 47, 61, 60, 60, 73, 56, 79, 64, 61, 65, 68, 47, 89, 83, 68, 102, 52, 65, 54, 50, 58, 55, 53, 86, 70, 60, 51, 63, 97, 68, 65, 66, 61, 43, 59, 60, 64, 79, 71, 88, 68, 63, 78, 58, 98, 70, 74, 62, 70, 104, 106, 72, 105, 75, 61, 57, 52, 50, 62, 52, 44, 71, 61, 88, 56, 65, 66, 66, 75, 73, 64, 85, 44, 69, 58, 79, 83, 96, 53, 76, 73, 58, 68, 67, 62, 62, 66, 44, 55, 72, 54, 65, 69, 81, 59, 48, 70, 82, 62, 61, 65, 64, 88, 97, 55, 51, 49, 65, 74, 80, 66, 86, 68, 88, 62, 72, 60, 46, 50, 94, 91, 60, 60, 66, 63, 48, 76, 68, 111, 62, 60, 58, 49, 43, 59, 55, 57, 48, 59, 65, 55, 59, 60, 51, 48, 59, 66, 75, 67, 57, 54, 62, 49, 74, 71, 98, 67, 98, 71, 43, 73, 63, 42, 72, 98, 70, 62, 78, 82, 77, 86, 92, 65, 70, 79, 104, 51, 62, 59, 65, 73, 55, 61, 53, 84, 62, 99, 63, 60, 77, 69, 77, 68, 65, 107, 57, 78, 66, 74, 64, 53, 52, 67, 62, 60, 71, 87, 98, 55, 89, 66, 72, 81, 57, 63, 87, 47, 42, 45, 53, 74, 47, 79, 72, 74, 55, 55, 72, 68, 67, 56, 63, 61, 60, 82, 80, 59, 81, 80, 104, 92, 69, 46, 54, 76, 67, 80, 55, 56, 73, 83, 88, 51, 75, 60, 76, 46, 67, 74, 53, 59, 69, 75, 65, 119, 42, 67, 49, 68, 62, 59, 65, 80, 61, 67, 64, 135, 70, 54, 62, 66, 74, 43, 90, 77, 61, 65, 68, 81, 46, 56, 77, 71, 61, 92, 78, 73, 62, 55, 105, 55, 45, 62, 83, 67, 47, 49, 75, 64, 51, 85, 68, 64, 99, 68, 62, 94, 82, 83, 83, 63, 63, 80, 80, 62, 80, 62, 56, 33, 89, 79, 96, 61, 79, 50, 58, 56, 65, 66, 60, 49, 64, 56, 65, 52, 66, 78, 54, 47, 57, 67, 89, 88, 54, 58, 91, 54, 68, 43, 92, 51, 59, 45, 87, 77, 83, 73, 55, 52, 53, 54, 65, 90, 63, 42, 80, 50, 86, 84, 84, 49, 50, 56, 38, 83, 67, 84, 82, 72, 55, 93, 37, 64, 103, 60, 63, 58, 66, 77, 49, 73, 81, 62, 67, 74, 60, 75, 63, 44, 45, 91, 67, 75, 47, 70, 80, 74, 73, 40, 65, 63, 64, 38, 81, 68, 49, 76, 69, 56, 83, 46, 54, 56, 65, 60, 53, 65, 73, 56, 48, 93, 75, 57, 75, 65, 89, 81, 60, 51, 63, 66, 57, 64, 62, 71, 66, 61, 49, 79, 67, 87, 51, 85, 106, 64, 72, 50, 67, 110, 69, 63, 64, 69, 97, 87, 65, 64, 77, 78, 59, 65, 87, 73, 75, 71, 97, 96, 41, 54, 82, 78, 61, 70, 67, 60, 84, 70, 54, 86, 103, 76, 58, 78, 62, 57, 65, 69, 90, 41, 65, 66, 79, 50, 60, 58, 67, 81, 85, 34, 67, 83, 81, 51, 89, 66, 60, 59, 71, 67, 59, 69, 53, 67, 55, 55, 55, 61, 54, 71, 54, 74, 70, 54, 61, 69, 67, 72, 83, 52, 61, 73, 68, 56, 44, 36, 52, 62, 78, 54, 69, 73, 52, 94, 56, 71, 75, 66, 42, 92, 59, 71, 61, 49, 57, 99, 64, 53, 72, 56, 50, 77, 67, 62, 82, 65, 58, 70, 73, 78, 67, 119, 68, 71, 89, 48, 65, 33, 69, 52, 67, 66, 49, 60, 68, 48, 97, 78, 85, 57, 57, 61, 69, 53, 73, 61, 69, 71, 68, 82, 51, 81, 45, 71, 50, 86, 66, 65, 63, 58, 76, 82, 56, 60, 59, 65, 67, 42, 39, 109, 76, 71, 72, 58, 71, 65, 53, 65, 57, 43, 66, 64, 66, 53, 69, 38, 62, 82, 75, 57, 64, 65, 55, 70, 75, 65, 62, 93, 68, 65, 67, 67, 54, 93, 40, 51, 63, 59, 110, 65, 53, 74, 49, 61, 64, 90, 44, 68, 102, 69, 68, 65, 44, 65, 71, 47, 64, 90, 50, 74, 79, 58, 68, 53, 41, 63, 81, 53, 68, 106, 55, 43, 66, 58, 64, 68, 75, 73, 66, 31, 71, 52, 65, 77, 69, 66, 52, 87, 55, 61, 65, 93, 69, 90, 60, 74, 58, 61, 53, 86, 89, 62, 53, 59, 73, 71, 67, 75, 59, 92, 54, 57, 67, 73, 96, 44, 71, 48, 70, 55, 103, 60, 44, 51, 53, 57, 80, 96, 77, 65, 54, 85, 65, 71, 65, 52, 87, 53, 77, 84, 92, 75, 44, 70, 62, 58, 69, 47, 55, 89, 64, 87, 95, 73, 57, 49, 73, 61, 89, 94, 46, 36, 101, 67, 62, 54, 53, 69, 56, 59, 48, 68, 67, 47, 69, 69, 71, 71, 82, 77, 100, 104, 75, 72, 57, 94, 59, 25, 67, 75, 52, 80, 75, 63, 60, 78, 77, 63, 56, 66, 60, 69, 72, 74, 59, 56, 59, 70, 66, 96, 38, 74, 62, 50, 54, 49, 70, 71, 49, 52, 79, 69, 77, 47, 66, 75, 52, 59, 67, 55, 58, 63, 73, 87, 61, 67, 81, 65, 83, 124, 66, 80, 72, 50, 61, 61, 73, 48, 112, 49, 71, 79, 56, 70, 72, 57, 56, 64, 53, 75, 59, 55, 57, 67, 57, 44, 50, 42, 91, 59, 61, 52, 59, 50, 53, 62, 56, 60, 59, 66, 54, 77, 80, 72, 56, 83, 49, 69, 57, 59, 60, 83, 59, 51, 53, 93, 56, 52, 79, 64, 58, 77, 51, 49, 54, 54, 49, 47, 66, 61, 86, 59, 67, 97, 77, 47, 85, 64, 50, 92, 98, 53, 81, 62, 67, 64, 87, 67, 49, 63, 96, 86, 72, 69, 42, 52, 67, 70, 68, 71, 52, 61, 79, 56, 68, 66, 56, 73, 49, 70, 66, 113, 66, 89, 39, 65, 63, 65, 45, 57, 52, 68, 63, 69, 52, 91, 76, 70, 77, 74, 73, 76, 78, 57, 69, 59, 63, 60, 73, 90, 45, 49, 99, 123, 64, 92, 66, 68, 65, 70, 68, 80, 58, 74, 75, 75, 45, 30, 58, 56, 72, 64, 47, 67, 70, 48, 46, 51, 67, 73, 78, 42, 66, 74, 60, 93, 49, 73, 63, 56, 75, 56, 51, 77, 68, 94, 72, 78, 65, 59, 74, 65, 71, 69, 62, 55, 67, 78, 57, 75, 77, 61, 38, 107, 87, 62, 55, 96, 73, 56, 72, 42, 61, 62, 89, 103, 48, 50, 95, 68, 65, 60, 54, 73, 101, 61, 63, 58, 79, 70, 58, 69, 64, 66, 53, 95, 55, 93, 45, 52, 59, 78, 82, 70, 82, 69, 76, 71, 71, 66, 78, 56, 58, 57, 83, 71, 56, 55, 72, 65, 76, 64, 49, 66, 71, 64, 63, 61, 56, 63, 54, 59, 75, 85, 61, 101, 73, 58, 55, 68, 51, 72, 104, 68, 66, 76, 62, 58, 67, 71, 68, 150, 63, 53, 53, 43, 78, 68, 86, 72, 38, 73, 70, 61, 50, 49, 88, 68, 67, 100, 107, 70, 67, 76, 55, 63, 53, 50, 60, 57, 74, 56, 50, 89, 65, 53, 56, 57, 74, 52, 68, 53, 39, 59, 95, 53, 72, 73, 65, 85, 67, 63, 56, 82, 136, 66, 70, 77, 71, 61, 69, 35, 68, 70, 46, 56, 56, 44, 82, 65, 77, 62, 60, 57, 47, 50, 62, 58, 90, 60, 52, 65, 53, 56, 55, 71, 64, 53, 77, 68, 87, 49, 42, 46, 60, 56, 90, 55, 58, 52, 79, 54, 46, 50, 59, 66, 69, 54, 67, 61, 66, 69, 73, 81, 61, 55, 53, 70, 76, 78, 57, 80, 58, 66, 65, 72, 72, 69, 59, 97, 71, 69, 76, 75, 68, 61, 76, 52, 72, 67, 61, 50, 71, 61, 80, 65, 75, 89, 60, 42, 55, 65, 62, 74, 44, 52, 59, 57, 55, 69, 60, 79, 68, 49, 61, 57, 61, 67, 59, 55, 82, 54, 69, 81, 69, 58, 110, 81, 60, 64, 49, 80, 73, 81, 55, 68, 71, 83, 67, 70, 57, 34, 63, 73, 49, 50, 50, 82, 74, 63, 54, 64, 74, 67, 64, 67, 65, 66, 79, 58, 76, 79, 60, 64, 65, 83, 77, 71, 54, 56, 67, 52, 70, 68, 86, 72, 65, 72, 48, 59, 61, 59, 63, 73, 47, 54, 93, 54, 49, 89, 61, 58, 57, 60, 83, 57, 58, 72, 68, 69, 78, 65, 59, 79, 55, 65, 48, 63, 81, 82, 85, 61, 66, 52, 61, 63, 66, 55, 98, 72, 53, 63, 55, 63, 55, 47, 73, 67, 74, 66, 100, 55, 63, 56, 54, 46, 75, 61, 60, 47, 65, 57, 58, 29, 73, 57, 62, 77, 55, 59, 56, 63, 55, 60, 88, 56, 64, 61, 61, 69, 70, 73, 64, 88, 59, 45, 68, 57, 73, 84, 64, 52, 66, 51, 64, 60, 62, 67, 74, 59, 73, 67, 106, 68, 79, 55, 66, 56, 67, 91, 91, 53, 55, 71, 67, 68, 82, 69, 69, 73, 63, 74, 82, 97, 65, 61, 78, 86, 71, 78, 68, 83, 64, 64, 54, 64, 54, 78, 64, 45, 61, 67, 57, 64, 56, 92, 80, 146, 53, 70, 62, 69, 68, 74, 76, 69, 67, 64, 72, 81, 64, 135, 105, 65, 56, 74, 79, 58, 72, 68, 64, 74, 45, 52, 63, 62, 70, 66, 54, 55, 57, 54, 69, 66, 61, 67, 74, 45, 87, 93, 67, 54, 61, 74, 62, 90, 55, 63, 64, 62, 65, 51, 87, 40, 59, 80, 64, 83, 59, 51, 57, 77, 54, 73, 73, 50, 86, 72, 59, 72, 58, 48, 65, 81, 63, 60, 59, 59, 56, 65, 55, 83, 45, 51, 69, 78, 62, 69, 38, 93, 84, 77, 42, 70, 56, 54, 70, 74, 105, 60, 71, 57, 59, 72, 57, 65, 55, 44, 89, 68, 60, 84, 66, 58, 88, 100, 71, 54, 71, 71, 72, 64, 69, 48, 73, 74, 87, 57, 96, 64, 56, 87, 61, 67, 68, 72, 65, 78, 58, 58, 73, 74, 79, 67, 64, 83, 63, 73, 91, 64, 67, 65, 62, 84, 57, 43, 71, 58, 60, 67, 53, 77, 53, 75, 55, 108, 86, 56, 63, 102, 64, 96, 59, 63, 63, 92, 64, 52, 47, 56, 64, 56, 50, 51, 60, 45, 56, 48, 54, 55, 55, 74, 52, 58, 61, 43, 52, 51, 47, 72, 51, 85, 68, 71, 53, 57, 53, 57, 70, 80, 98, 71, 75, 85, 52, 71, 79, 48, 81, 65, 72, 58, 60, 77, 67, 82, 64, 87, 53, 43, 67, 63, 79, 69, 73, 82, 59, 58, 59, 68, 54, 64, 74, 65, 70, 64, 58, 80, 68, 44, 46, 72, 52, 55, 54, 64, 67, 95, 66, 84, 68, 57, 55, 63, 93, 83, 133, 64, 85, 50, 54, 69, 84, 72, 67, 71, 68, 54, 66, 60, 118, 82, 78, 58, 62, 59, 71, 63, 54, 84, 99, 44, 67, 64, 59, 71, 72, 106, 63, 41, 64, 62, 78, 57, 79, 69, 77, 27, 87, 76, 50, 69, 63, 74, 67, 50, 54, 82, 53, 68, 51, 74, 81, 75, 75, 70, 67, 59, 66, 47, 78, 59, 64, 35, 74, 63, 80, 99, 53, 60, 67, 66, 54, 59, 68, 55, 39, 57, 85, 79, 63, 81, 78, 68, 78, 62, 50, 85, 66, 63, 65, 69, 63, 76, 61, 53, 81, 89, 60, 58, 45, 76, 58, 105, 65, 59, 62, 68, 126, 60, 75, 59, 64, 57, 75, 58, 92, 56, 68, 48, 64, 72, 65, 46, 76, 81, 41, 78, 34, 68, 67, 55, 66, 83, 59, 57, 51, 77, 68, 55, 46, 44, 81, 69, 53, 68, 53, 71, 78, 63, 45, 99, 43, 51, 100, 74, 65, 51, 68, 57, 60, 56, 75, 70, 63, 66, 56, 66, 68, 70, 100, 57, 60, 86, 50, 66, 56, 59, 69, 67, 79, 57, 72, 49, 71, 46, 71, 50, 86, 67, 72, 76, 60, 86, 61, 71, 72, 84, 80, 52, 73, 59, 57, 37, 91, 72, 47, 85, 81, 50, 48, 125, 63, 75, 52, 66, 87, 64, 73, 68, 57, 67, 59, 78, 71, 80, 88, 53, 33, 88, 69, 59, 54, 41, 113, 58, 49, 70, 70, 63, 61, 80, 76, 89, 63, 52, 42, 68, 55, 89, 91, 64, 76, 58, 74, 56, 72, 67, 67, 56, 62, 76, 89, 69, 78, 58, 131, 51, 90, 72, 67, 69, 78, 64, 57, 48, 51, 59, 69, 54, 55, 65, 50, 71, 44, 54, 77, 79, 63, 65, 54, 85, 52, 68, 52, 56, 77, 99, 77, 73, 63, 73, 79, 57, 45, 57, 67, 70, 50, 59, 57, 55, 63, 83, 77, 47, 53, 69, 52, 74, 46, 57, 60, 81, 73, 60, 55, 54, 105, 56, 47, 62, 41, 54, 71, 47, 57, 60, 74, 65, 67, 60, 68, 93, 77, 77, 55, 86, 77, 52, 62, 70, 43, 69, 66, 73, 74, 72, 62, 71, 53, 55, 42, 66, 61, 102, 59, 52, 75, 85, 64, 47, 38, 58, 78, 91, 65, 72, 74, 74, 50, 76, 72, 74, 48, 65, 69, 59, 75, 53, 81, 71, 53, 64, 60, 49, 79, 60, 51, 49, 74, 50, 69, 70, 87, 86, 47, 68, 37, 83, 65, 62, 60, 60, 58, 68, 64, 98, 97, 77, 70, 52, 84, 63, 50, 76, 62, 58, 63, 80, 55, 46, 58, 62, 69, 61, 47, 92, 62, 72, 58, 50, 34, 110, 89, 51, 57, 57, 61, 89, 61, 56, 64, 56, 67, 51, 51, 56, 93, 69, 57, 60, 82, 62, 54, 66, 56, 44, 45, 49, 53, 58, 83, 64, 75, 56, 69, 45, 66, 43, 62, 84, 76, 55, 52, 101, 51, 96, 54, 51, 89, 84, 55, 69, 53, 50, 77, 107, 87, 60, 57, 68, 63, 88, 64, 64, 59, 42, 76, 78, 70, 62, 42, 58, 57, 50, 63, 64, 60, 47, 60, 54, 58, 71, 76, 71, 73, 54, 69, 73, 52, 83, 66, 125, 81, 56, 65, 62, 73, 70, 62, 96, 90, 61, 125, 63, 47, 60, 63, 59, 90, 57, 68, 60, 46, 49, 65, 77, 73, 86, 71, 78, 60, 70, 72, 61, 46, 67, 78, 52, 73, 71, 76, 51, 74, 71, 73, 53, 56, 51, 42, 56, 50, 75, 57, 79, 67, 67, 78, 49, 45, 59, 67, 91, 64, 79, 45, 34, 69, 56, 42, 66, 77, 56, 71, 53, 61, 71, 64, 54, 42, 68, 78, 95, 59, 57, 71, 64, 72, 77, 62, 53, 81, 83, 87, 77, 46, 65, 64, 88, 68, 68, 47, 51, 68, 92, 65, 55, 41, 64, 56, 71, 80, 68, 53, 60, 54, 67, 72, 54, 45, 80, 59, 52, 58, 77, 80, 64, 50, 65, 83, 67, 64, 52, 96, 51, 36, 48, 69, 81, 83, 76, 48, 38, 63, 84, 74, 58, 56, 78, 57, 68, 62, 62, 64, 90, 69, 57, 78, 63, 71, 70, 45, 104, 53, 48, 65, 73, 51, 58, 55, 70, 74, 64, 66, 58, 98, 74, 67, 60, 46, 73, 71, 49, 69, 83, 56, 58, 73, 66, 55, 65, 62, 85, 78, 59, 57, 57, 67, 44, 68, 77, 70, 68, 42, 76, 76, 68, 55, 57, 55, 64, 70, 61, 87, 64, 87, 96, 54, 55, 73, 86, 44, 61, 49, 57, 53, 55, 83, 62, 45, 72, 101, 69, 54, 40, 60, 64, 54, 71, 72, 73, 60, 70, 62, 79, 79, 84, 66, 93, 68, 65, 52, 86, 68, 52, 40, 76, 63, 57, 50, 78, 72, 70, 69, 76, 49, 80, 68, 58, 57, 80, 65, 60, 96, 88, 59, 75, 35, 68, 67, 88, 92, 59, 66, 85, 68, 79, 64, 85, 48, 114, 61, 93, 78, 77, 62, 68, 53, 58, 64, 63, 45, 60, 54, 36, 68, 89, 50, 52, 95, 83, 84, 77, 77, 107, 91, 45, 58, 48, 47, 52, 59, 63, 59, 53, 57, 68, 70, 81, 42, 69, 82, 52, 79, 66, 87, 69, 82, 90, 89, 65, 65, 70, 44, 65, 68, 76, 84, 59, 77, 61, 51, 67, 73, 66, 59, 48, 62, 63, 64, 63, 70, 80, 88, 44, 74, 65, 54, 61, 112, 50, 60, 57, 73, 50, 53, 47, 57, 64, 115, 44, 78, 93, 56, 81, 72, 80, 54, 42, 62, 56, 68, 52, 62, 49, 71, 70, 70, 56, 83, 57, 87, 73, 55, 71, 59, 79, 79, 69, 63, 59, 40, 45, 100, 91, 75, 67, 68, 59, 70, 67, 48, 74, 74, 74, 57, 74, 66, 68, 50, 78, 60, 69, 46, 69, 62, 37, 33, 50, 58, 88, 53, 86, 58, 58, 66, 111, 59, 49, 48, 60, 43, 49, 55, 80, 78, 89, 51, 60, 66, 67, 43, 61, 67, 51, 66, 38, 87, 64, 63, 62, 69, 55, 81, 75, 69, 75, 57, 45, 67, 57, 64, 61, 71, 62, 98, 77, 68, 28, 45, 60, 67, 127, 53, 94, 80, 85, 74, 53, 71, 70, 59, 71, 44, 69, 44, 61, 70, 67, 68, 96, 71, 86, 48, 48, 62, 56, 74, 75, 64, 116, 62, 69, 55, 79, 57, 70, 47, 67, 61, 63, 58, 69, 49, 50, 76, 90, 81, 101, 63, 62, 84, 81, 88, 55, 52, 74, 94, 56, 60, 49, 50, 69, 79, 44, 61, 45, 57, 55, 77, 68, 96, 57, 84, 51, 65, 85, 28, 58, 76, 68, 52, 52, 67, 62, 76, 63, 73, 86, 56, 40, 42, 40, 62, 55, 52, 79, 63, 59, 62, 60, 78, 56, 93, 72, 67, 88, 80, 65, 60, 60, 96, 69, 54, 62, 114, 63, 88, 53, 88, 84, 79, 54, 57, 86, 64, 74, 67, 77, 58, 77, 58, 72, 59, 94, 68, 65, 66, 67, 61, 61, 59, 86, 80, 61, 61, 53, 67, 58, 62, 72, 73, 62, 63, 68, 63, 70, 57, 75, 75, 109, 50, 70, 56, 87, 38, 96, 50, 67, 130, 60, 52, 58, 66, 56, 70, 60, 50, 62, 54, 56, 80, 89, 61, 61, 63, 65, 79, 61, 78, 58, 59, 74, 86, 70, 67, 82, 58, 52, 61, 53, 66, 95, 80, 69, 32, 61, 55, 69, 67, 69, 67, 108, 67, 79, 55, 62, 60, 71, 47, 55, 59, 70, 56, 39, 63, 72, 85, 39, 73, 52, 70, 66, 55, 53, 62, 69, 82, 59, 55, 63, 81, 102, 115, 54, 71, 54, 54, 108, 74, 98, 71, 43, 44, 66, 62, 60, 57, 64, 56, 61, 54, 55, 89, 42, 69, 72, 41, 80, 73, 64, 64, 67, 73, 66, 62, 82, 108, 81, 55, 80, 72, 77, 64, 73, 56, 69, 66, 50, 65, 72, 57, 72, 92, 53, 75, 65, 76, 65, 68, 59, 67, 35, 78, 57, 75, 61, 73, 71, 56, 71, 52, 88, 47, 80, 71, 37, 79, 63, 81, 74, 81, 67, 50, 75, 85, 74, 62, 71, 86, 58, 93, 60, 59, 56, 78, 64, 82, 64, 68, 72, 88, 65, 75, 62, 85, 69, 124, 46, 85, 50, 65, 46, 72, 76, 79, 42, 75, 72, 70, 58, 45, 68, 47, 63, 65, 93, 55, 64, 52, 65, 66, 107, 80, 75, 90, 44, 52, 68, 68, 117, 96, 86, 113, 57, 52, 52, 66, 78, 58, 81, 64, 53, 70, 71, 42, 50, 69, 54, 59, 75, 71, 52, 73, 43, 62, 42, 70, 66, 63, 57, 57, 79, 60, 59, 47, 64, 48, 52, 93, 70, 51, 89, 74, 59, 47, 63, 72, 68, 64, 47, 52, 66, 64, 48, 84, 70, 65, 53, 131, 89, 64, 53, 65, 77, 77, 48, 65, 86, 45, 79, 57, 60, 54, 66, 65, 53, 47, 74, 61, 89, 77, 42, 74, 59, 73, 33, 78, 68, 64, 63, 66, 61, 58, 51, 67, 52, 82, 59, 74, 65, 67, 56, 65, 66, 78, 82, 60, 61, 66, 67, 68, 69, 64, 52, 48, 47, 57, 40, 82, 68, 64, 99, 103, 53, 56, 63, 81, 66, 51, 128, 57, 72, 72, 59, 89, 52, 69, 59, 55, 74, 59, 66, 43, 61, 59, 78, 77, 70, 53, 57, 84, 151, 67, 52, 51, 58, 90, 136, 77, 75, 63, 58, 49, 72, 62, 65, 56, 78, 91, 87, 53, 65, 63, 57, 69, 59, 72, 62, 66, 55, 81, 76, 109, 67, 46, 58, 75, 67, 72, 95, 76, 60, 81, 54, 58, 70, 68, 48, 82, 59, 67, 27, 77, 60, 82, 68, 61, 54, 73, 57, 67, 75, 75, 94, 74, 55, 38, 79, 61, 49, 69, 64, 72, 67, 76, 82, 48, 72, 69, 82, 54, 35, 57, 88, 41, 56, 40, 58, 65, 67, 75, 57, 70, 61, 60, 54, 58, 84, 51, 65, 71, 83, 69, 67, 79, 47, 53, 99, 43, 66, 82, 53, 80, 85, 55, 74, 80, 55, 58, 65, 52, 62, 61, 63, 54, 60, 52, 83, 55, 59, 60, 63, 59, 68, 62, 75, 62, 58, 59, 85, 87, 76, 67, 125, 69, 30, 93, 82, 49, 50, 60, 66, 51, 35, 70, 58, 47, 72, 80, 62, 50, 67, 49, 87, 69, 60, 83, 67, 51, 69, 62, 79, 72, 69, 68, 37, 72, 59, 76, 46, 59, 55, 50, 46, 66, 79, 84, 72, 76, 92, 57, 100, 92, 64, 106, 64, 86, 80, 74, 64, 70, 70, 62, 63, 64, 73, 73, 95, 57, 89, 118, 69, 60, 54, 74, 107, 71, 69, 67, 56, 58, 52, 25, 56, 60, 73, 54, 57, 60, 75, 52, 63, 88, 69, 49, 74, 57, 77, 82, 60, 59, 76, 57, 43, 85, 91, 64, 59, 54, 67, 71, 72, 51, 71, 68, 78, 49, 58, 61, 57, 61, 36, 72, 54, 69, 91, 60, 51, 65, 56, 74, 46, 66, 67, 50, 64, 88, 83, 58, 96, 60, 93, 63, 61, 46, 72, 58, 58, 42, 81, 79, 43, 61, 54, 69, 81, 56, 69, 89, 57, 63, 67, 83, 64, 78, 64, 79, 55, 54, 50, 57, 53, 55, 70, 58, 86, 67, 64, 69, 76, 61, 53, 56, 67, 89, 61, 56, 43, 60, 64, 46, 67, 46, 49, 62, 52, 54, 85, 60, 74, 67, 76, 70, 57, 54, 44, 54, 59, 66, 70, 63, 86, 53, 78, 44, 68, 100, 73, 70, 61, 62, 81, 62, 50, 81, 78, 53, 63, 72, 56, 49, 60, 71, 61, 63, 61, 64, 51, 28, 89, 70, 68, 60, 47, 63, 51, 58, 59, 51, 66, 77, 69, 56, 49, 54, 83, 57, 65, 65, 68, 67, 50, 66, 102, 76, 62, 50, 59, 58, 72, 44, 96, 42, 84, 77, 49, 72, 105, 69, 127, 75, 55, 48, 53, 48, 38, 81, 72, 58, 66, 77, 80, 49, 73, 79, 41, 77, 45, 54, 72, 78, 75, 51, 75, 70, 50, 38, 67, 68, 59, 59, 52, 43, 77, 99, 75, 52, 84, 56, 93, 57, 52, 80, 70, 58, 33, 57, 85, 60, 55, 42, 46, 54, 93, 68, 88, 141, 68, 44, 58, 84, 66, 64, 75, 69, 63, 74, 61, 60, 82, 104, 72, 70, 49, 52, 68, 67, 67, 101, 92, 49, 73, 74, 67, 63, 63, 33, 58, 69, 80, 67, 70, 41, 64, 51, 77, 58, 46, 62, 40, 52, 84, 52, 49, 76, 91, 59, 65, 86, 55, 56, 85, 64, 63, 66, 65, 52, 78, 66, 58, 64, 96, 48, 60, 58, 67, 51, 66, 48, 80, 78, 57, 72, 61, 67, 74, 59, 67, 58, 78, 79, 68, 34, 68, 82, 47, 59, 50, 76, 63, 45, 56, 54, 66, 83, 68, 76, 60, 54, 69, 69, 70, 145, 51, 55, 55, 56, 66, 50, 71, 68, 59, 57, 75, 54, 81, 70, 53, 56, 57, 66, 53, 58, 78, 58, 62, 64, 73, 66, 50, 62, 62, 71, 92, 76, 59, 68, 71, 62, 99, 63, 40, 47, 57, 69, 52, 70, 62, 73, 50, 69, 57, 50, 78, 52, 57, 49, 66, 70, 51, 46, 48, 73, 69, 76, 68, 86, 46, 55, 61, 91, 77, 66, 65, 65, 70, 62, 77, 59, 73, 44, 91, 51, 54, 65, 69, 53, 97, 53, 56, 72, 63, 48, 50, 78, 48, 68, 51, 81, 61, 45, 55, 51, 64, 49, 59, 57, 59, 68, 58, 92, 49, 77, 66, 57, 74, 63, 63, 80, 82, 74, 64, 57, 59, 64, 58, 62, 50, 83, 54, 57, 76, 84, 58, 74, 52, 53, 60, 88, 68, 107, 64, 43, 64, 66, 53, 61, 53, 71, 81, 61, 56, 64, 62, 60, 62, 50, 57, 72, 64, 87, 52, 62, 62, 56, 69, 69, 55, 53, 69, 85, 73, 76, 45, 64, 83, 61, 70, 64, 78, 78, 63, 48, 75, 64, 57, 82, 62, 71, 82, 77, 62, 64, 65, 56, 63, 67, 82, 60, 55, 73, 34, 119, 89, 46, 49, 65, 90, 56, 72, 56, 82, 105, 64, 59, 67, 87, 64, 112, 61, 59, 56, 63, 53, 68, 61, 58, 47, 55, 65, 67, 64, 68, 67, 82, 51, 57, 83, 53, 78, 55, 64, 78, 65, 48, 75, 65, 54, 59, 58, 80, 64, 63, 79, 57, 63, 75, 52, 68, 106, 54, 61, 74, 59, 56, 97, 87, 58, 90, 65, 95, 66, 55, 63, 41, 80, 86, 70, 82, 55, 76, 38, 55, 60, 58, 69, 64, 68, 35, 77, 77, 58, 64, 67, 61, 57, 77, 54, 70, 68, 67, 66, 42, 58, 57, 69, 55, 92, 58, 63, 63, 59, 66, 64, 65, 87, 53, 60, 52, 68, 86, 55, 72, 34, 82, 75, 69, 82, 61, 61, 65, 84, 73, 58, 57, 57, 82, 70, 97, 53, 43, 71, 59, 76, 57, 65, 116, 54, 81, 58, 80, 70, 64, 67, 63, 124, 77, 100, 65, 46, 56, 69, 63, 70, 74, 60, 50, 53, 64, 55, 76, 59, 60, 62, 67, 56, 74, 88, 76, 90, 81, 58, 70, 69, 58, 65, 57, 61, 51, 39, 69, 55, 56, 59, 51, 62, 82, 75, 73, 66, 69, 54, 71, 81, 67, 64, 75, 65, 54, 60, 53, 54, 54, 56, 53, 78, 72, 62, 54, 43, 48, 50, 62, 62, 58, 69, 69, 79, 59, 69, 64, 55, 65, 57, 73, 78, 68, 74, 63, 38, 48, 72, 50, 74, 68, 61, 56, 62, 74, 39, 57, 52, 47, 65, 49, 56, 62, 64, 57, 73, 64, 104, 62, 48, 67, 50, 65, 75, 82, 76, 45, 57, 65, 88, 81, 90, 76, 60, 49, 63, 68, 86, 81, 56, 69, 40, 45, 63, 51, 51, 33, 81, 66, 59, 52, 70, 63, 47, 83, 59, 58, 59, 80, 60, 107, 69, 46, 64, 67, 65, 54, 54, 186, 49, 65, 79, 57, 80, 51, 72, 55, 70, 68, 58, 63, 43, 56, 76, 52, 73, 65, 87, 53, 63, 68, 53, 70, 62, 72, 91, 61, 57, 44, 58, 49, 56, 55, 81, 50, 63, 71, 88, 57, 98, 82, 62, 76, 62, 55, 65, 66, 55, 97, 75, 51, 75, 60, 63, 58, 46, 76, 53, 51, 64, 48, 79, 64, 66, 61, 93, 60, 76, 67, 65, 39, 46, 76, 47, 69, 87, 72, 65, 46, 69, 55, 47, 46, 65, 99, 78, 62, 77, 65, 86, 51, 40, 65, 77, 57, 79, 50, 65, 64, 55, 63, 48, 51, 80, 42, 62, 65, 69, 63, 70, 57, 60, 67, 80, 47, 58, 79, 74, 78, 62, 71, 71, 65, 64, 72, 66, 57, 81, 75, 87, 65, 54, 86, 85, 69, 160, 80, 45, 66, 60, 104, 72, 72, 74, 71, 60, 64, 110, 56, 57, 47, 61, 55, 65, 74, 63, 56, 67, 76, 47, 70, 95, 50, 70, 54, 58, 64, 70, 81, 59, 45, 65, 75, 66, 57, 61, 82, 54, 57, 64, 58, 78, 69, 61, 64, 67, 141, 53, 58, 51, 57, 57, 59, 65, 70, 74, 68, 38, 52, 75, 52, 66, 55, 53, 62, 62, 77, 93, 87, 77, 53, 64, 71, 116, 63, 60, 53, 70, 61, 59, 66, 53, 87, 61, 62, 62, 77, 83, 65, 63, 81, 68, 77, 52, 50, 59, 73, 151, 56, 63, 55, 68, 62, 88, 71, 84, 101, 43, 48, 59, 100, 68, 55, 80, 75, 65, 75, 51, 69, 84, 44, 50, 41, 59, 50, 70, 66, 62, 84, 59, 61, 55, 66, 62, 55, 66, 71, 68, 56, 126, 68, 60, 74, 50, 64, 72, 54, 58, 70, 79, 69, 58, 71, 95, 62, 66, 103, 46, 67, 71, 80, 73, 55, 58, 68, 64, 75, 62, 62, 67, 64, 64, 76, 155, 73, 72, 52, 68, 69, 52, 72, 48, 56, 64, 86, 47, 77, 68, 88, 70, 65, 68, 55, 61, 47, 107, 45, 53, 56, 73, 65, 80, 53, 52, 61, 60, 51, 58, 58, 53, 65, 69, 56, 112, 46, 66, 74, 66, 60, 73, 76, 103, 62, 53, 64, 68, 42, 78, 76, 56, 67, 64, 74, 64, 50, 59, 48, 65, 53, 109, 60, 59, 64, 78, 56, 56, 91, 82, 35, 55, 54, 64, 54, 53, 52, 76, 71, 77, 44, 51, 79, 55, 93, 67, 45, 55, 65, 93, 82, 64, 61, 77, 59, 68, 62, 84, 55, 68, 51, 97, 63, 77, 61, 75, 67, 87, 66, 67, 61, 50, 68, 70, 66, 50, 60, 76, 83, 74, 49, 80, 53, 55, 66, 47, 86, 83, 54, 47, 72, 63, 62, 59, 69, 67, 39, 59, 72, 53, 76, 49, 58, 61, 78, 72, 70, 53, 60, 74, 76, 79, 71, 70, 58, 62, 60, 60, 55, 90, 101, 67, 86, 77, 81, 67, 74, 51, 58, 68, 46, 62, 60, 73, 71, 52, 49, 66, 96, 56, 64, 81, 84, 47, 53, 70, 74, 63, 56, 57, 68, 59, 75, 62, 75, 59, 66, 59, 59, 63, 59, 69, 70, 71, 61, 95, 49, 70, 63, 82, 67, 58, 73, 95, 38, 78, 59, 71, 70, 51, 54, 59, 50, 61, 80, 47, 48, 100, 65, 70, 58, 48, 80, 71, 77, 73, 104, 65, 79, 104, 70, 66, 65, 55, 70, 58, 61, 72, 39, 55, 80, 51, 48, 75, 60, 66, 80, 45, 64, 56, 74, 59, 68, 60, 49, 74, 45, 80, 100, 72, 61, 52, 57, 56, 47, 60, 57, 77, 55, 59, 94, 61, 85, 68, 76, 71, 63, 62, 56, 85, 52, 90, 153, 53, 56, 64, 72, 69, 65, 83, 60, 51, 60, 54, 71, 55, 62, 66, 60, 113, 54, 59, 55, 78, 67, 50, 67, 62, 67, 60, 50, 64, 63, 65, 53, 66, 80, 45, 62, 62, 58, 69, 66, 83, 49, 57, 77, 73, 53, 50, 79, 61, 74, 79, 68, 92, 44, 75, 61, 59, 53, 94, 80, 85, 95, 74, 43, 56, 60, 77, 59, 57, 59, 88, 60, 60, 47, 64, 67, 63, 50, 66, 80, 56, 64, 65, 68, 45, 55, 62, 53, 50, 70, 69, 55, 55, 57, 71, 48, 71, 53, 67, 60, 91, 71, 74, 65, 84, 72, 88, 55, 58, 83, 89, 84, 76, 57, 65, 68, 73, 49, 59, 70, 66, 71, 61, 52, 60, 109, 87, 62, 76, 61, 66, 82, 58, 57, 86, 76, 61, 66, 62, 58, 71, 73, 75, 84, 75, 49, 60, 53, 56, 52, 99, 56, 58, 67, 68, 72, 69, 52, 65, 64, 79, 71, 48, 74, 65, 81, 56, 41, 67, 83, 80, 53, 76, 142, 73, 59, 54, 75, 54, 76, 55, 61, 57, 93, 65, 65, 53, 42, 74, 80, 60, 73, 78, 58, 57, 162, 63, 87, 61, 59, 69, 58, 57, 56, 59, 52, 46, 63, 65, 99, 71, 63, 80, 57, 65, 55, 66, 55, 78, 45, 53, 107, 61, 74, 38, 104, 88, 48, 53, 49, 47, 58, 84, 92, 60, 70, 58, 66, 68, 78, 54, 106, 111, 62, 57, 49, 78, 56, 50, 75, 74, 90, 52, 69, 70, 71, 119, 70, 63, 58, 61, 72, 77, 57, 47, 53, 64, 80, 67, 62, 75, 55, 68, 69, 58, 57, 73, 54, 72, 73, 62, 80, 85, 58, 65, 61, 72, 52, 66, 57, 60, 69, 66, 65, 73, 81, 56, 66, 55, 53, 90, 73, 60, 59, 55, 49, 69, 80, 60, 61, 55, 61, 61, 71, 69, 78, 84, 71, 57, 52, 46, 73, 64, 63, 73, 71, 66, 55, 67, 59, 57, 77, 58, 72, 57, 75, 56, 49, 65, 62, 68, 54, 65, 68, 61, 44, 79, 53, 91, 88, 61, 66, 65, 74, 72, 73, 59, 98, 77, 68, 58, 101, 59, 59, 50, 50, 62, 52, 85, 68, 64, 73, 49, 48, 76, 89, 55, 65, 68, 70, 72, 41, 70, 75, 54, 66, 63, 64, 74, 38, 57, 39, 54, 68, 74, 62, 65, 58, 72, 58, 76, 49, 56, 68, 78, 64, 53, 76, 68, 78, 54, 75, 55, 60, 79, 54, 50, 91, 65, 56, 60, 82, 67, 74, 65, 54, 61, 57, 61, 74, 53, 64, 54, 72, 57, 57, 56, 46, 76, 66, 43, 59, 81, 52, 61, 70, 52, 54, 65, 55, 77, 51, 92, 66, 76, 75, 81, 71, 80, 60, 85, 71, 69, 76, 68, 56, 62, 64, 72, 98, 69, 75, 50, 85, 46, 81, 80, 47, 70, 49, 51, 47, 61, 58, 57, 48, 70, 98, 66, 53, 62, 69, 81, 55, 56, 62, 68, 37, 54, 67, 54, 74, 72, 62, 63, 40, 66, 70, 76, 69, 50, 88, 89, 62, 52, 66, 76, 79, 60, 69, 45, 84, 67, 60, 85, 64, 50, 47, 68, 63, 59, 61, 58, 48, 62, 78, 77, 46, 47, 65, 66, 62, 67, 64, 95, 62, 59, 46, 70, 63, 80, 62, 64, 97, 70, 93, 45, 55, 92, 71, 73, 68, 80, 66, 63, 73, 70, 52, 84, 70, 58, 66, 65, 66, 49, 69, 54, 50, 66, 55, 55, 72, 80, 66, 59, 52, 53, 71, 59, 79, 43, 71, 51, 80, 94, 79, 39, 60, 76, 55, 88, 71, 54, 55, 56, 72, 52, 75, 67, 81, 51, 54, 55, 54, 90, 60, 49, 41, 66, 65, 84, 74, 78, 52, 105, 83, 52, 76, 66, 62, 49, 90, 66, 53, 73, 56, 86, 68, 69, 78, 101, 56, 62, 66, 69, 60, 61, 121, 73, 59, 65, 68, 53, 55, 69, 63, 49, 64, 58, 63, 65, 73, 104, 66, 68, 68, 60, 53, 46, 74, 52, 99, 72, 70, 95, 58, 51, 67, 70, 53, 69, 80, 58, 62, 81, 56, 60, 73, 55, 47, 76, 62, 60, 80, 88, 65, 58, 72, 78, 54, 56, 74, 79, 81, 74, 83, 44, 69, 56, 53, 81, 68, 69, 54, 56, 70, 58, 77, 60, 44, 79, 72, 53, 62, 90, 70, 68, 50, 54, 75, 101, 94, 99, 64, 63, 59, 55, 43, 57, 62, 51, 79, 71, 66, 54, 55, 74, 87, 66, 54, 67, 49, 70, 73, 74, 70, 44, 76, 68, 53, 56, 81, 67, 53, 78, 69, 74, 83, 53, 53, 61, 60, 46, 71, 71, 69, 58, 70, 51, 81, 58, 49, 64, 74, 113, 94, 107, 61, 82, 87, 65, 67, 80, 68, 67, 46, 65, 66, 81, 81, 74, 75, 64, 49, 59, 74, 56, 63, 80, 77, 62, 67, 86, 65, 95, 69, 74, 61, 76, 62, 50, 69, 69, 56, 64, 54, 66, 54, 67, 54, 67, 64, 85, 79, 70, 59, 81, 65, 55, 80, 81, 59, 92, 55, 80, 52, 67, 52, 89, 71, 121, 64, 78, 55, 65, 58, 68, 70, 66, 107, 59, 56, 83, 58, 65, 56, 71, 64, 69, 62, 44, 74, 53, 92, 67, 65, 71, 72, 56, 55, 67, 35, 71, 59, 94, 60, 57, 57, 58, 60, 85, 69, 80, 62, 85, 78, 79, 79, 52, 83, 67, 46, 75, 46, 71, 53, 56, 74, 59, 91, 60, 98, 66, 53, 86, 52, 70, 74, 56, 86, 65, 60, 126, 82, 78, 80, 81, 70, 82, 77, 56, 69, 72, 63, 75, 60, 58, 66, 73, 70, 64, 77, 58, 42, 56, 64, 68, 73, 78, 87, 70, 56, 73, 46, 59, 74, 47, 69, 53, 47, 73, 73, 53, 94, 70, 46, 80, 40, 58, 93, 108, 64, 55, 81, 61, 76, 58, 107, 67, 79, 37, 60, 56, 73, 60, 48, 51, 78, 69, 74, 54, 72, 50, 87, 47, 84, 62, 58, 119, 63, 81, 69, 62, 76, 52, 71, 51, 62, 66, 77, 36, 106, 68, 56, 67, 68, 45, 45, 52, 70, 94, 83, 62, 72, 52, 77, 59, 95, 61, 65, 30, 122, 57, 50, 85, 57, 62, 63, 57, 51, 62, 72, 56, 56, 74, 68, 82, 56, 73, 130, 50, 72, 60, 66, 79, 67, 61, 71, 56, 63, 44, 64, 77, 86, 87, 88, 54, 56, 87, 69, 59, 121, 74, 60, 50, 47, 66, 66, 67, 39, 43, 67, 68, 62, 96, 48, 52, 47, 42, 68, 62, 81, 47, 43, 74, 56, 73, 60, 43, 61, 64, 68, 55, 69, 56, 55, 70, 53, 60, 89, 58, 63, 80, 48, 38, 51, 57, 69, 71, 44, 59, 62, 56, 64, 81, 67, 72, 88, 62, 84, 95, 71, 61, 62, 69, 55, 68, 49, 42, 55, 105, 61, 51, 81, 54, 57, 48, 101, 55, 82, 68, 91, 63, 65, 73, 51, 67, 63, 48, 77, 48, 50, 66, 78, 45, 63, 99, 57, 55, 71, 38, 51, 58, 58, 59, 90, 76, 49, 57, 72, 58, 60, 88, 71, 65, 67, 99, 52, 71, 81, 75, 67, 84, 82, 53, 83, 72, 46, 47, 64, 54, 53, 68, 65, 56, 63, 64, 85, 49, 65, 44, 61, 52, 78, 29, 52, 76, 73, 63, 68, 77, 65, 51, 46, 63, 51, 79, 60, 69, 65, 66, 63, 44, 63, 66, 63, 53, 53, 66, 58, 73, 72, 69, 52, 50, 58, 56, 43, 80, 59, 45, 78, 59, 51, 46, 44, 67, 66, 62, 51, 46, 57, 77, 56, 64, 71, 69, 72, 62, 91, 59, 84, 81, 48, 61, 96, 65, 30, 58, 65, 89, 53, 118, 81, 38, 66, 61, 67, 79, 77, 60, 50, 58, 58, 47, 67, 59, 59, 56, 66, 55, 62, 69, 70, 55, 62, 99, 47, 60, 81, 64, 66, 59, 82, 54, 61, 75, 93, 56, 64, 66, 55, 104, 57, 55, 50, 48, 58, 101, 73, 59, 65, 59, 66, 68, 92, 54, 64, 56, 85, 61, 77, 75, 86, 63, 53, 59, 62, 80, 61, 60, 62, 68, 71, 122, 33, 51, 88, 68, 76, 40, 67, 49, 64, 51, 55, 100, 54, 64, 53, 73, 58, 87, 59, 67, 65, 72, 95, 63, 60, 71, 84, 106, 45, 87, 53, 61, 48, 66, 50, 47, 87, 60, 56, 65, 53, 47, 58, 50, 97, 54, 55, 83, 61, 48, 106, 63, 75, 116, 90, 65, 78, 63, 59, 68, 61, 58, 114, 49, 54, 88, 79, 47, 50, 85, 83, 63, 59, 50, 88, 43, 67, 57, 45, 78, 65, 57, 67, 62, 48, 76, 43, 108, 53, 71, 81, 75, 54, 58, 95, 82, 95, 44, 36, 55, 56, 55, 56, 64, 70, 70, 85, 67, 111, 60, 47, 52, 56, 83, 51, 40, 87, 57, 90, 57, 67, 81, 56, 59, 89, 61, 85, 68, 68, 73, 41, 63, 88, 63, 51, 83, 70, 69, 90, 67, 50, 72, 35, 77, 71, 70, 48, 62, 51, 58, 75, 81, 64, 101, 68, 71, 68, 73, 64, 44, 65, 79, 43, 56, 83, 40, 94, 61, 56, 74, 54, 67, 65, 64, 108, 54, 74, 64, 67, 63, 66, 76, 67, 43, 60, 99, 53, 70, 70, 71, 65, 85, 58, 64, 51, 67, 53, 69, 55, 102, 60, 65, 61, 68, 52, 44, 63, 73, 53, 61, 72, 69, 66, 95, 63, 88, 43, 57, 86, 53, 65, 54, 47, 69, 69, 99, 69, 60, 66, 49, 84, 62, 59, 56, 57, 42, 78, 59, 49, 45, 60, 68, 53, 61, 72, 89, 49, 45, 76, 61, 72, 49, 41, 44, 78, 70, 77, 77, 67, 51, 68, 81, 62, 99, 95, 98, 63, 66, 77, 59, 63, 67, 54, 79, 53, 64, 78, 68, 54, 68, 73, 109, 69, 57, 63, 60, 42, 64, 57, 51, 45, 70, 80, 69, 76, 66, 55, 69, 46, 58, 70, 63, 63, 43, 40, 59, 76, 75, 67, 68, 44, 74, 76, 55, 80, 82, 79, 66, 61, 74, 65, 69, 73, 66, 82, 66, 77, 88, 63, 61, 65, 74, 69, 59, 66, 59, 63, 71, 57, 52, 64, 56, 68, 59, 55, 50, 88, 66, 82, 59, 82, 72, 44, 59, 55, 62, 83, 66, 68, 65, 80, 54, 75, 56, 50, 65, 96, 66, 60, 60, 54, 80, 69, 59, 59, 46, 66, 74, 80, 79, 85, 93, 65, 58, 62, 63, 84, 65, 66, 40, 39, 64, 60, 48, 53, 53, 70, 59, 94, 77, 88, 58, 93, 67, 63, 73, 68, 50, 76, 106, 71, 59, 86, 38, 72, 45, 68, 58, 53, 54, 61, 100, 61, 94, 75, 38, 86, 66, 73, 64, 63, 50, 66, 64, 65, 75, 56, 98, 74, 69, 81, 94, 56, 55, 66, 70, 51, 85, 50, 62, 77, 75, 51, 67, 58, 63, 74, 64, 86, 87, 50, 60, 81, 100, 36, 60, 66, 54, 54, 70, 84, 79, 57, 60, 70, 67, 39, 87, 63, 62, 73, 78, 98, 64, 60, 59, 73, 50, 67, 48, 58, 74, 53, 50, 84, 75, 55, 80, 57, 63, 71, 82, 62, 88, 85, 68, 58, 66, 59, 47, 46, 63, 61, 43, 65, 46, 82, 73, 51, 69, 87, 63, 47, 58, 55, 81, 64, 75, 61, 79, 91, 52, 58, 55, 61, 50, 83, 62, 53, 46, 86, 69, 89, 78, 63, 49, 55, 50, 69, 65, 46, 94, 40, 70, 79, 71, 51, 74, 86, 62, 57, 67, 53, 58, 62, 77, 59, 90, 63, 55, 97, 62, 68, 61, 58, 73, 68, 86, 48, 90, 76, 47, 58, 74, 69, 71, 35, 64, 88, 61, 51, 64, 49, 72, 81, 57, 54, 48, 56, 51, 56, 60, 60, 55, 72, 51, 88, 52, 67, 72, 52, 74, 56, 69, 78, 65, 48, 61, 49, 68, 54, 55, 57, 59, 54, 44, 62, 65, 79, 73, 49, 59, 75, 59, 77, 68, 69, 72, 52, 57, 71, 68, 108, 70, 72, 58, 62, 39, 82, 38, 59, 70, 64, 71, 61, 86, 41, 69, 70, 51, 105, 71, 69, 58, 83, 93, 62, 62, 66, 52, 38, 66, 79, 59, 107, 68, 71, 55, 64, 116, 86, 66, 73, 66, 98, 47, 54, 46, 57, 47, 74, 55, 80, 72, 68, 97, 90, 36, 95, 53, 72, 68, 84, 81, 59, 67, 39, 76, 59, 47, 54, 74, 57, 45, 64, 46, 91, 63, 88, 66, 66, 67, 75, 71, 86, 73, 77, 87, 53, 86, 59, 51, 59, 63, 75, 49, 89, 81, 55, 77, 66, 63, 54, 59, 52, 70, 65, 59, 69, 78, 45, 50, 75, 37, 73, 32, 89, 57, 107, 96, 72, 59, 52, 50, 78, 68, 51, 66, 63, 73, 59, 76, 73, 65, 66, 63, 69, 66, 73, 39, 60, 52, 73, 53, 83, 46, 71, 70, 50, 63, 69, 48, 82, 45, 67, 73, 49, 87, 71, 71, 68, 42, 51, 50, 62, 86, 82, 50, 90, 44, 77, 65, 88, 74, 76, 48, 43, 50, 73, 90, 75, 53, 82, 62, 67, 66, 83, 89, 54, 65, 50, 66, 61, 75, 61, 48, 61, 56, 54, 51, 58, 61, 66, 48, 87, 56, 54, 50, 66, 59, 59, 91, 58, 57, 126, 79, 66, 41, 47, 65, 63, 77, 52, 56, 48, 93, 64, 64, 70, 75, 55, 63, 54, 60, 64, 73, 68, 60, 81, 78, 73, 90, 78, 46, 59, 55, 75, 56, 63, 58, 64, 72, 44, 70, 44, 73, 71, 72, 58, 59, 66, 73, 71, 97, 59, 55, 59, 71, 57, 43, 82, 65, 53, 69, 59, 69, 109, 142, 67, 77, 103, 66, 64, 97, 69, 57, 98, 53, 56, 53, 57, 58, 67, 55, 52, 48, 59, 58, 91, 60, 61, 54, 67, 82, 80, 77, 58, 73, 53, 61, 76, 53, 46, 72, 58, 56, 51, 72, 45, 67, 60, 90, 56, 58, 71, 68, 76, 74, 97, 51, 67, 83, 68, 57, 51, 92, 59, 58, 76, 81, 52, 72, 83, 51, 59, 58, 86, 74, 72, 57, 54, 78, 57, 95, 71, 54, 47, 70, 54, 79, 63, 51, 62, 54, 53, 55, 75, 65, 73, 56, 73, 89, 68, 62, 57, 81, 42, 65, 45, 52, 87, 72, 62, 69, 104, 65, 50, 57, 62, 70, 52, 61, 69, 63, 58, 56, 40, 73, 61, 40, 52, 56, 58, 83, 55, 33, 93, 84, 42, 70, 61, 81, 64, 94, 68, 67, 57, 54, 73, 71, 98, 40, 59, 69, 66, 75, 56, 66, 35, 39, 41, 58, 70, 50, 41, 52, 70, 82, 64, 62, 69, 72, 70, 64, 108, 87, 71, 53, 51, 48, 68, 77, 50, 59, 86, 57, 75, 43, 66, 51, 53, 43, 61, 45, 66, 72, 65, 82, 77, 60, 84, 76, 68, 105, 91, 56, 65, 64, 75, 64, 112, 54, 65, 68, 78, 45, 64, 50, 69, 109, 81, 97, 56, 36, 83, 52, 66, 86, 65, 54, 79, 97, 58, 85, 59, 34, 93, 75, 61, 66, 57, 52, 70, 68, 79, 50, 67, 55, 72, 65, 40, 62, 40, 62, 63, 66, 53, 68, 71, 97, 42, 128, 73, 90, 60, 49, 57, 53, 104, 72, 75, 72, 65, 43, 35, 58, 48, 64, 50, 59, 47, 49, 59, 76, 55, 52, 62, 59, 87, 50, 64, 39, 51, 59, 66, 58, 47, 59, 75, 49, 43, 93, 83, 72, 70, 51, 67, 63, 75, 69, 56, 57, 63, 67, 107, 45, 67, 54, 80, 69, 74, 70, 58, 66, 53, 82, 58, 61, 66, 55, 96, 58, 70, 43, 83, 66, 61, 70, 74, 67, 64, 57, 72, 60, 66, 57, 41, 51, 87, 48, 88, 62, 54, 75, 45, 35, 57, 62, 80, 43, 68, 55, 65, 72, 73, 64, 56, 71, 73, 65, 83, 68, 76, 76, 88, 56, 65, 67, 64, 75, 53, 56, 61, 70, 44, 74, 85, 64, 53, 126, 56, 64, 49, 63, 66, 37, 59, 73, 60, 60, 64, 88, 60, 69, 44, 88, 105, 77, 51, 59, 60, 75, 58, 64, 97, 59, 76, 115, 58, 72, 50, 73, 46, 54, 78, 88, 53, 46, 77, 55, 59, 93, 40, 75, 81, 66, 87, 50, 57, 65, 73, 52, 62, 63, 62, 45, 52, 56, 60, 62, 73, 83, 54, 90, 73, 104, 68, 97, 65, 63, 69, 79, 82, 73, 47, 62, 66, 50, 47, 96, 73, 81, 50, 69, 92, 48, 56, 87, 65, 71, 70, 66, 41, 57, 66, 54, 70, 63, 51, 57, 64, 70, 67, 74, 75, 54, 58, 72, 67, 49, 75, 55, 64, 49, 86, 68, 75, 68, 82, 80, 58, 69, 48, 68, 45, 62, 84, 47, 66, 58, 90, 58, 65, 81, 56, 59, 88, 42, 56, 50, 55, 90, 51, 66, 45, 54, 50, 75, 80, 71, 78, 63, 104, 52, 62, 66, 63, 50, 73, 54, 50, 65, 67, 37, 94, 57, 130, 75, 68, 84, 57, 54, 58, 78, 94, 84, 58, 89, 67, 84, 100, 57, 51, 84, 54, 62, 95, 55, 54, 79, 57, 97, 65, 73, 63, 76, 68, 61, 61, 71, 69, 64, 62, 93, 59, 59, 150, 54, 66, 68, 74, 84, 45, 69, 56, 82, 38, 60, 56, 40, 104, 62, 94, 87, 67, 61, 50, 74, 46, 53, 48, 59, 49, 40, 62, 53, 69, 49, 64, 63, 63, 88, 76, 58, 65, 74, 87, 64, 49, 71, 49, 56, 72, 53, 46, 76, 94, 86, 77, 70, 56, 66, 76, 110, 58, 58, 51, 73, 69, 61, 61, 59, 70, 55, 47, 62, 63, 89, 49, 58, 77, 76, 66, 62, 54, 66, 98, 64, 56, 65, 80, 54, 71, 72, 56, 52, 56, 40, 62, 78, 89, 70, 65, 36, 75, 74, 79, 54, 71, 50, 81, 39, 54, 65, 66, 66, 59, 77, 76, 82, 67, 63, 56, 60, 92, 78, 69, 44, 56, 57, 63, 54, 64, 58, 61, 62, 49, 57, 66, 44, 89, 61, 67, 120, 79, 74, 72, 72, 70, 62, 49, 71, 56, 71, 86, 51, 64, 48, 60, 83, 71, 61, 85, 68, 58, 55, 77, 82, 63, 68, 120, 86, 57, 94, 68, 60, 57, 65, 56, 79, 78, 83, 62, 69, 50, 57, 43, 63, 71, 75, 85, 74, 50, 60, 59, 49, 75, 37, 94, 79, 64, 59, 37, 66, 87, 84, 49, 82, 64, 67, 83, 85, 71, 64, 67, 63, 47, 59, 60, 56, 55, 61, 55, 44, 57, 66, 76, 58, 80, 77, 71, 65, 68, 63, 77, 81, 86, 54, 69, 89, 60, 74, 65, 80, 82, 66, 70, 72, 75, 59, 46, 63, 57, 47, 69, 58, 63, 63, 46, 70, 74, 79, 62, 64, 61, 76, 66, 49, 86, 61, 60, 55, 68, 73, 95, 106, 55, 53, 62, 61, 87, 50, 79, 76, 72, 62, 54, 61, 55, 48, 62, 62, 106, 65, 66, 57, 101, 49, 56, 76, 56, 114, 65, 134, 96, 51, 54, 54, 50, 61, 70, 65, 49, 53, 39, 69, 71, 82, 40, 88, 104, 82, 107, 75, 63, 78, 63, 83, 51, 57, 62, 95, 83, 65, 73, 52, 57, 63, 64, 72, 87, 56, 74, 56, 51, 74, 56, 70, 83, 75, 69, 65, 56, 53, 53, 57, 64, 66, 78, 76, 59, 62, 73, 85, 54, 39, 75, 62, 75, 68, 62, 70, 77, 49, 60, 66, 69, 70, 51, 60, 53, 61, 62, 61, 77, 76, 69, 53, 73, 72, 52, 71, 69, 65, 66, 63, 55, 61, 119, 53, 76, 57, 63, 72, 82, 71, 59, 77, 60, 52, 50, 52, 56, 80, 54, 62, 56, 94, 67, 65, 62, 53, 54, 60, 101, 72, 69, 49, 63, 66, 54, 73, 53, 68, 57, 64, 68, 62, 76, 87, 62, 79, 39, 79, 57, 58, 55, 84, 61, 46, 74, 79, 69, 85, 64, 68, 58, 58, 50, 69, 69, 59, 59, 63, 54, 67, 128, 48, 57, 54, 50, 77, 59, 83, 62, 73, 63, 80, 76, 55, 58, 52, 68, 79, 51, 103, 53, 66, 57, 66, 56, 91, 76, 41, 166, 63, 48, 55, 61, 60, 89, 57, 62, 64, 46, 63, 59, 57, 75, 76, 45, 52, 60, 133, 54, 49, 71, 77, 86, 67, 59, 57, 66, 85, 62, 46, 53, 71, 51, 72, 69, 56, 54, 87, 62, 64, 61, 54, 43, 56, 54, 52, 75, 63, 74, 72, 70, 57, 72, 45, 65, 70, 52, 76, 63, 71, 55, 47, 84, 57, 68, 54, 57, 64, 108, 76, 82, 55, 60, 64, 140, 50, 90, 68, 55, 67, 55, 54, 70, 63, 59, 50, 84, 74, 48, 51, 56, 71, 58, 66, 63, 61, 77, 57, 66, 72, 48, 91, 86, 59, 48, 62, 69, 75, 81, 71, 52, 65, 61, 35, 74, 50, 67, 83, 72, 41, 77, 70, 40, 39, 55, 71, 60, 71, 54, 85, 70, 79, 68, 68, 61, 54, 62, 55, 54, 61, 87, 63, 76, 56, 61, 61, 56, 57, 67, 68, 58, 63, 52, 89, 55, 60, 74, 55, 54, 51, 87, 62, 69, 54, 68, 60, 65, 59, 49, 72, 73, 60, 54, 73, 67, 60, 50, 77, 85, 72, 74, 58, 57, 74, 50, 49, 71, 46, 78, 43, 59, 59, 66, 62, 65, 70, 64, 64, 61, 72, 77, 76, 58, 87, 55, 108, 66, 66, 74, 63, 71, 62, 40, 87, 105, 80, 53, 79, 61, 59, 64, 72, 93, 60, 64, 59, 61, 69, 57, 62, 72, 89, 51, 62, 59, 62, 56, 57, 76, 70, 86, 54, 65, 80, 48, 63, 71, 77, 55, 49, 66, 72, 41, 77, 60, 68, 35, 71, 81, 78, 60, 73, 68, 59, 65, 63, 57, 68, 76, 72, 58, 70, 52, 54, 56, 70, 54, 69, 67, 48, 59, 68, 71, 85, 87, 60, 49, 72, 55, 49, 81, 68, 87, 73, 62, 70, 70, 66, 71, 75, 57, 99, 76, 78, 44, 56, 80, 76, 64, 63, 84, 69, 84, 64, 38, 57, 64, 66, 48, 71, 72, 52, 59, 62, 53, 59, 71, 66, 68, 53, 62, 55, 72, 69, 59, 81, 68, 86, 62, 54, 101, 62, 91, 99, 65, 94, 64, 86, 70, 87, 50, 62, 51, 60, 69, 44, 69, 81, 79, 81, 68, 48, 73, 74, 82, 85, 63, 57, 45, 82, 68, 64, 68, 68, 82, 65, 62, 82, 79, 68, 58, 68, 53, 67, 66, 48, 55, 101, 61, 85, 62, 57, 69, 50, 51, 65, 60, 63, 56, 80, 62, 63, 50, 61, 82, 92, 58, 63, 57, 54, 43, 76, 98, 57, 79, 66, 55, 63, 63, 62, 61, 67, 47, 77, 74, 60, 57, 74, 50, 69, 75, 77, 88, 46, 65, 62, 69, 59, 58, 78, 123, 62, 79, 46, 61, 113, 76, 51, 89, 79, 49, 69, 61, 55, 61, 88, 83, 52, 65, 89, 81, 95, 61, 60, 65, 84, 66, 66, 61, 74, 76, 58, 73, 75, 66, 68, 59, 75, 44, 68, 59, 65, 44, 54, 127, 69, 97, 66, 58, 89, 58, 72, 104, 57, 63, 71, 45, 81, 64, 57, 42, 100, 129, 56, 53, 67, 67, 68, 109, 74, 67, 86, 57, 64, 71, 59, 53, 55, 53, 67, 64, 105, 43, 67, 72, 57, 65, 66, 40, 68, 70, 62, 76, 53, 64, 55, 65, 89, 51, 64, 63, 71, 56, 68, 52, 82, 62, 68, 66, 57, 78, 51, 62, 63, 53, 47, 75, 46, 76, 45, 47, 84, 63, 69, 66, 58, 52, 82, 61, 70, 77, 105, 103, 60, 57, 55, 54, 159, 56, 57, 64, 69, 47, 48, 102, 63, 79, 63, 49, 60, 75, 32, 74, 65, 71, 71, 77, 64, 44, 68, 71, 76, 66, 71, 51, 73, 67, 52, 58, 101, 61, 88, 55, 80, 128, 71, 50, 64, 58, 80, 49, 64, 62, 54, 66, 79, 64, 81, 68, 77, 68, 45, 71, 58, 71, 79, 82, 44, 47, 47, 72, 69, 69, 42, 62, 60, 56, 42, 37, 73, 73, 79, 59, 56, 94, 77, 52, 43, 78, 58, 55, 54, 83, 69, 90, 70, 68, 84, 55, 53, 65, 64, 53, 65, 69, 101, 61, 75, 69, 70, 105, 38, 59, 82, 60, 59, 99, 52, 65, 67, 64, 67, 69, 88, 69, 56, 58, 52, 59, 78, 57, 52, 76, 66, 62, 63, 66, 71, 67, 80, 99, 56, 50, 81, 55, 95, 45, 76, 62, 104, 54, 59, 72, 73, 86, 62, 67, 62, 55, 82, 64, 70, 76, 53, 66, 75, 63, 80, 58, 70, 87, 80, 54, 82, 49, 66, 51, 63, 45, 61, 55, 52, 53, 58, 49, 55, 60, 38, 80, 82, 74, 67, 94, 71, 59, 43, 73, 54, 64, 93, 69, 100, 49, 63, 66, 50, 81, 69, 61, 49, 76, 77, 89, 73, 82, 82, 60, 65, 95, 54, 48, 50, 64, 105, 54, 79, 60, 64, 126, 65, 65, 88, 68, 83, 60, 42, 113, 53, 47, 53, 60, 49, 55, 43, 64, 67, 64, 77, 59, 64, 57, 79, 62, 56, 80, 58, 72, 81, 57, 65, 71, 83, 65, 98, 63, 48, 64, 80, 57, 64, 56, 117, 74, 62, 91, 83, 65, 75, 65, 78, 54, 81, 93, 83, 75, 59, 73, 60, 56, 57, 71, 61, 57, 85, 76, 51, 71, 64, 49, 67, 70, 82, 43, 50, 50, 69, 59, 69, 58, 83, 60, 60, 73, 94, 65, 59, 74, 71, 61, 51, 44, 56, 86, 45, 87, 94, 70, 70, 63, 63, 56, 58, 56, 61, 77, 71, 60, 77, 97, 61, 50, 58, 51, 70, 58, 78, 80, 64, 60, 55, 94, 42, 48, 72, 51, 84, 62, 54, 62, 86, 57, 71, 53, 68, 68, 39, 97, 64, 72, 70, 34, 73, 61, 57, 84, 48, 58, 56, 47, 66, 69, 73, 51, 57, 51, 70, 65, 91, 84, 80, 54, 60, 56, 64, 55, 52, 54, 85, 54, 68, 118, 49, 51, 86, 63, 87, 44, 66, 51, 107, 82, 81, 110, 61, 39, 66, 64, 48, 67, 54, 54, 65, 69, 68, 98, 62, 81, 46, 65, 63, 56, 91, 61, 71, 64, 71, 69, 51, 67, 96, 56, 63, 67, 69, 46, 70, 74, 95, 51, 71, 69, 75, 63, 48, 69, 37, 67, 76, 79, 62, 52, 59, 57, 75, 63, 38, 65, 80, 64, 52, 84, 52, 53, 68, 89, 79, 61, 75, 81, 62, 70, 76, 55, 67, 70, 86, 58, 64, 51, 53, 61, 46, 56, 67, 68, 77, 79, 43, 51, 66, 47, 50, 40, 62, 47, 55, 52, 80, 72, 93, 58, 80, 88, 35, 64, 73, 52, 57, 89, 49, 60, 65, 96, 70, 48, 54, 46, 61, 54, 59, 60, 61, 45, 75, 56, 60, 56, 84, 43, 94, 49, 54, 83, 79, 67, 52, 73, 82, 54, 74, 61, 60, 66, 82, 73, 75, 69, 47, 49, 66, 49, 78, 67, 53, 60, 97, 66, 51, 70, 63, 74, 73, 98, 109, 90, 58, 48, 51, 71, 42, 82, 69, 49, 84, 50, 83, 59, 40, 101, 69, 61, 68, 68, 104, 73, 62, 54, 67, 60, 63, 83, 50, 35, 54, 69, 106, 80, 66, 58, 43, 83, 65, 71, 60, 91, 44, 79, 64, 63, 80, 62, 81, 60, 70, 35, 60, 53, 47, 77, 72, 41, 64, 57, 67, 79, 66, 65, 48, 51, 73, 76, 48, 82, 79, 41, 57, 55, 86, 60, 100, 82, 68, 38, 57, 47, 61, 51, 62, 54, 65, 53, 76, 67, 103, 64, 46, 56, 50, 47, 67, 67, 61, 81, 54, 83, 75, 65, 57, 92, 74, 83, 80, 83, 55, 80, 85, 86, 51, 64, 51, 106, 96, 68, 57, 40, 76, 74, 92, 81, 49, 32, 51, 57, 70, 60, 71, 54, 80, 38, 50, 60, 64, 55, 55, 38, 52, 42, 66, 58, 79, 73, 63, 83, 67, 62, 61, 65, 64, 93, 96, 76, 71, 59, 64, 43, 66, 60, 48, 51, 80, 73, 70, 59, 63, 49, 85, 69, 52, 64, 89, 75, 77, 72, 90, 52, 69, 58, 51, 82, 68, 75, 43, 75, 63, 52, 63, 58, 44, 62, 80, 64, 50, 67, 64, 92, 73, 72, 65, 64, 44, 53, 73, 61, 53, 57, 56, 64, 82, 33, 60, 60, 58, 43, 67, 88, 68, 79, 44, 52, 40, 58, 74, 59, 57, 43, 47, 60, 49, 87, 62, 49, 39, 92, 56, 60, 79, 64, 56, 58, 44, 75, 52, 69, 58, 42, 68, 66, 58, 84, 58, 71, 60, 65, 89, 84, 88, 101, 83, 66, 44, 39, 66, 85, 63, 106, 83, 66, 92, 100, 70, 63, 58, 109, 75, 46, 76, 66, 55, 75, 83, 55, 75, 55, 60, 71, 59, 69, 40, 64, 63, 80, 48, 80, 62, 54, 87, 58, 74, 67, 45, 58, 74, 65, 89, 53, 51, 54, 64, 59, 70, 56, 43, 62, 51, 55, 59, 61, 78, 82, 62, 83, 83, 58, 75, 58, 86, 72, 60, 95, 49, 74, 73, 63, 91, 56, 59, 44, 60, 64, 60, 63, 32, 62, 54, 52, 64, 75, 51, 72, 68, 71, 66, 95, 50, 79, 68, 64, 60, 55, 74, 31, 72, 80, 66, 50, 62, 64, 57, 54, 52, 53, 113, 85, 52, 55, 74, 79, 61, 40, 50, 56, 88, 65, 71, 71, 67, 82, 55, 60, 70, 68, 84, 87, 58, 50, 58, 59, 67, 62, 61, 58, 48, 66, 68, 64, 135, 65, 64, 61, 61, 89, 54, 77, 116, 72, 70, 61, 71, 70, 55, 56, 121, 61, 56, 123, 89, 71, 51, 64, 88, 66, 51, 60, 42, 71, 44, 77, 58, 44, 62, 97, 72, 107, 91, 81, 71, 59, 78, 71, 73, 84, 68, 81, 59, 46, 59, 48, 87, 51, 89, 84, 50, 91, 70, 52, 67, 70, 82, 89, 65, 98, 34, 69, 63, 56, 60, 66, 59, 94, 76, 71, 55, 60, 57, 56, 73, 66, 76, 54, 48, 50, 68, 61, 76, 71, 64, 59, 113, 58, 74, 94, 52, 73, 79, 62, 47, 75, 49, 48, 54, 69, 54, 74, 78, 78, 58, 57, 75, 61, 81, 38, 64, 51, 66, 82, 72, 55, 81, 79, 52, 72, 72, 77, 32, 61, 44, 46, 70, 67, 47, 74, 44, 72, 51, 49, 82, 59, 67, 59, 88, 53, 54, 78, 53, 62, 61, 66, 63, 102, 79, 58, 67, 82, 64, 66, 71, 66, 52, 63, 45, 59, 60, 74, 72, 65, 74, 45, 72, 72, 52, 65, 55, 126, 92, 64, 83, 64, 62, 73, 63, 57, 63, 64, 71, 55, 79, 61, 86, 66, 46, 42, 55, 50, 62, 64, 62, 66, 57, 59, 35, 74, 56, 77, 64, 67, 55, 51, 71, 90, 62, 61, 43, 54, 87, 56, 75, 65, 52, 71, 52, 80, 42, 56, 57, 64, 48, 78, 57, 69, 47, 47, 64, 49, 53, 53, 84, 82, 66, 68, 44, 102, 58, 59, 53, 63, 38, 52, 103, 53, 63, 69, 60, 82, 57, 77, 87, 70, 74, 56, 72, 51, 64, 50, 50, 56, 59, 68, 70, 74, 73, 65, 42, 69, 57, 61, 85, 56, 64, 87, 59, 82, 78, 48, 65, 49, 56, 77, 69, 47, 67, 61, 38, 48, 57, 79, 65, 85, 42, 60, 53, 62, 73, 69, 51, 38, 85, 51, 66, 56, 72, 65, 56, 59, 66, 65, 68, 68, 76, 78, 80, 57, 92, 64, 66, 89, 95, 97, 57, 69, 55, 47, 52, 66, 73, 73, 67, 60, 72, 56, 61, 61, 37, 46, 75, 63, 54, 64, 84, 64, 53, 60, 81, 86, 67, 77, 63, 68, 58, 43, 58, 61, 56, 51, 60, 70, 66, 67, 26, 82, 54, 63, 47, 76, 64, 50, 64, 66, 77, 80, 54, 70, 63, 58, 98, 60, 61, 67, 76, 64, 40, 55, 62, 43, 56, 75, 67, 46, 77, 91, 70, 44, 50, 75, 56, 62, 103, 52, 58, 53, 53, 48, 65, 50, 50, 44, 76, 74, 47, 82, 71, 58, 52, 73, 86, 51, 59, 61, 59, 63, 70, 57, 86, 63, 53, 66, 63, 61, 57, 46, 57, 54, 56, 88, 65, 76, 62, 84, 69, 93, 45, 43, 52, 63, 54, 88, 55, 97, 81, 86, 46, 66, 75, 156, 113, 62, 57, 52, 85, 78, 76, 122, 70, 61, 60, 51, 59, 68, 51, 49, 61, 56, 56, 92, 67, 74, 76, 72, 73, 114, 60, 64, 76, 88, 73, 65, 55, 66, 78, 77, 88, 57, 60, 59, 83, 72, 68, 76, 61, 66, 77, 47, 59, 62, 61, 63, 71, 54, 67, 59, 51, 58, 63, 99, 80, 60, 53, 63, 66, 45, 46, 55, 58, 79, 70, 45, 58, 72, 60, 63, 57, 79, 97, 104, 48, 57, 68, 59, 39, 44, 62, 52, 71, 66, 67, 58, 62, 90, 80, 52, 54, 50, 58, 55, 68, 116, 78, 64, 61, 61, 53, 73, 75, 72, 58, 55, 46, 68, 73, 74, 61, 63, 48, 82, 57, 57, 57, 59, 58, 49, 60, 61, 60, 104, 51, 56, 65, 69, 69, 67, 69, 62, 77, 79, 56, 66, 49, 52, 54, 109, 57, 68, 63, 69, 67, 62, 60, 59, 66, 58, 55, 67, 69, 103, 52, 70, 50, 104, 47, 66, 77, 68, 44, 60, 58, 36, 63, 82, 130, 55, 88, 75, 69, 56, 58, 50, 65, 63, 79, 95, 62, 82, 69, 60, 83, 62, 94, 92, 56, 80, 61, 63, 61, 73, 66, 79, 53, 52, 78, 71, 58, 89, 78, 50, 52, 74, 59, 64, 79, 75, 51, 45, 100, 92, 77, 26, 66, 83, 59, 63, 55, 96, 66, 69, 69, 56, 73, 78, 41, 55, 54, 66, 68, 82, 77, 54, 58, 68, 61, 74, 77, 58, 92, 63, 61, 74, 58, 78, 76, 51, 47, 68, 88, 63, 59, 60, 77, 61, 60, 85, 61, 68, 84, 128, 56, 74, 81, 56, 58, 90, 70, 74, 71, 61, 103, 78, 67, 69, 70, 88, 85, 96, 79, 52, 74, 75, 43, 75, 93, 64, 73, 80, 61, 47, 78, 62, 60, 83, 61, 73, 102, 70, 44, 77, 50, 71, 62, 75, 48, 57, 80, 80, 60, 64, 51, 78, 63, 72, 55, 69, 69, 46, 55, 48, 96, 69, 56, 68, 79, 93, 83, 68, 54, 87, 58, 62, 85, 68, 65, 45, 72, 47, 67, 75, 52, 61, 61, 51, 89, 58, 55, 52, 60, 63, 62, 56, 55, 66, 71, 91, 59, 66, 73, 72, 63, 82, 41, 109, 80, 81, 60, 62, 69, 56, 84, 63, 53, 61, 80, 62, 61, 95, 72, 51, 65, 67, 78, 94, 66, 72, 173, 71, 51, 48, 82, 68, 78, 49, 155, 62, 73, 73, 65, 74, 73, 68, 61, 57, 66, 71, 68, 114, 58, 55, 95, 62, 72, 72, 64, 70, 93, 57, 54, 70, 90, 72, 101, 61, 56, 65, 62, 46, 80, 73, 56, 83, 66, 55, 53, 50, 44, 53, 30, 64, 80, 77, 50, 49, 52, 92, 54, 55, 82, 46, 62, 64, 65, 44, 55, 57, 62, 71, 53, 81, 88, 65, 70, 77, 64, 54, 96, 54, 64, 53, 97, 106, 66, 77, 45, 40, 72, 105, 67, 97, 50, 63, 78, 77, 74, 46, 63, 74, 46, 44, 59, 82, 64, 56, 57, 62, 58, 66, 50, 59, 59, 75, 72, 52, 50, 67, 39, 63, 47, 60, 37, 55, 57, 53, 63, 38, 56, 54, 41, 84, 62, 70, 131, 85, 67, 75, 80, 51, 68, 61, 69, 76, 73, 62, 76, 73, 53, 65, 76, 56, 54, 61, 50, 49, 75, 50, 81, 96, 72, 62, 72, 59, 67, 87, 84, 76, 75, 60, 76, 83, 77, 53, 108, 52, 65, 67, 74, 98, 49, 55, 46, 87, 61, 47, 77, 75, 67, 74, 51, 67, 59, 66, 87, 52, 75, 63, 150, 63, 66, 79, 58, 60, 72, 101, 58, 57, 63, 64, 55, 96, 52, 71, 62, 110, 79, 62, 69, 65, 47, 72, 63, 67, 56, 124, 75, 62, 57, 63, 60, 70, 44, 61, 56, 52, 83, 82, 60, 60, 41, 44, 53, 67, 63, 94, 46, 66, 60, 63, 72, 43, 87, 60, 76, 53, 67, 57, 55, 56, 79, 73, 66, 58, 71, 65, 71, 80, 51, 74, 57, 60, 53, 61, 58, 65, 61, 69, 54, 86, 63, 49, 62, 58, 75, 64, 83, 69, 82, 83, 54, 52, 58, 69, 47, 71, 68, 53, 63, 79, 74, 62, 88, 49, 65, 61, 79, 51, 83, 62, 64, 66, 77, 69, 81, 68, 52, 56, 78, 57, 46, 87, 63, 63, 78, 71, 69, 71, 75, 50, 49, 67, 89, 69, 71, 63, 51, 55, 60, 50, 69, 47, 87, 56, 81, 58, 78, 107, 41, 63, 68, 59, 82, 129, 88, 80, 62, 59, 52, 58, 70, 63, 88, 58, 64, 70, 61, 63, 71, 69, 57, 68, 49, 57, 89, 46, 82, 73, 60, 66, 71, 53, 61, 42, 108, 79, 53, 67, 73, 65, 58, 77, 44, 55, 101, 56, 46, 57, 58, 53, 62, 58, 62, 80, 60, 58, 48, 68, 58, 79, 66, 58, 54, 95, 57, 68, 75, 57, 64, 65, 66, 53, 54, 54, 64, 70, 62, 62, 78, 71, 65, 67, 57, 57, 64, 68, 63, 69, 111, 71, 56, 56, 66, 85, 94, 68, 66, 68, 56, 71, 62, 46, 63, 117, 53, 45, 53, 76, 48, 47, 70, 81, 55, 142, 83, 65, 65, 60, 48, 79, 55, 66, 48, 57, 53, 75, 61, 64, 68, 59, 80, 52, 77, 96, 57, 76, 57, 65, 58, 69, 70, 69, 62, 51, 66, 61, 63, 63, 73, 52, 70, 58, 65, 84, 87, 73, 59, 45, 62, 70, 64, 65, 66, 74, 62, 57, 79, 60, 53, 64, 55, 69, 58, 60, 51, 53, 69, 51, 62, 92, 67, 63, 68, 82, 56, 89, 91, 46, 78, 74, 65, 46, 67, 80, 72, 62, 75, 54, 184, 78, 67, 53, 48, 43, 55, 49, 44, 63, 57, 67, 67, 48, 72, 72, 63, 59, 63, 86, 96, 60, 88, 97, 68, 75, 61, 54, 65, 39, 40, 88, 52, 36, 53, 79, 88, 54, 62, 67, 57, 68, 73, 72, 110, 56, 86, 68, 91, 79, 53, 63, 62, 49, 67, 54, 54, 72, 72, 82, 82, 66, 83, 88, 73, 60, 46, 75, 53, 62, 51, 64, 56, 64, 62, 89, 88, 68, 49, 68, 72, 84, 61, 65, 52, 47, 50, 62, 64, 49, 56, 54, 63, 71, 75, 65, 64, 58, 65, 48, 55, 57, 55, 87, 83, 64, 48, 137, 79, 50, 62, 60, 49, 67, 75, 77, 62, 60, 66, 64, 65, 61, 78, 57, 62, 43, 63, 78, 63, 79, 70, 40, 68, 69, 67, 66, 74, 57, 84, 70, 72, 94, 42, 60, 62, 56, 60, 80, 59, 74, 57, 73, 71, 55, 69, 59, 76, 70, 83, 50, 55, 72, 88, 67, 93, 52, 69, 60, 62, 44, 61, 61, 66, 78, 61, 72, 72, 42, 67, 76, 82, 69, 60, 63, 73, 63, 49, 61, 75, 97, 67, 55, 48, 62, 53, 62, 72, 72, 57, 99, 58, 49, 83, 59, 64, 45, 63, 78, 51, 72, 48, 50, 70, 66, 41, 69, 56, 64, 68, 52, 55, 68, 44, 62, 67, 58, 81, 56, 74, 81, 81, 63, 42, 43, 59, 66, 47, 60, 62, 64, 70, 68, 75, 67, 59, 59, 61, 81, 61, 54, 53, 56, 71, 50, 61, 62, 74, 58, 66, 48, 64, 64, 42, 71, 64, 74, 60, 74, 72, 53, 43, 58, 53, 63, 69, 78, 36, 67, 53, 75, 73, 73, 63, 45, 65, 63, 86, 74, 60, 47, 69, 72, 67, 55, 77, 43, 88, 59, 100, 92, 56, 77, 75, 59, 77, 62, 66, 48, 58, 68, 44, 51, 60, 83, 55, 59, 72, 59, 46, 83, 33, 73, 67, 50, 53, 55, 54, 47, 62, 66, 66, 59, 80, 61, 88, 66, 40, 69, 54, 69, 67, 66, 58, 51, 61, 49, 56, 73, 58, 77, 55, 57, 87, 84, 74, 57, 80, 60, 61, 64, 64, 56, 66, 66, 159, 55, 91, 61, 64, 49, 65, 83, 49, 69, 91, 72, 65, 85, 64, 60, 63, 79, 55, 59, 53, 59, 63, 49, 65, 64, 47, 80, 63, 55, 66, 62, 81, 73, 32, 88, 63, 52, 61, 55, 62, 45, 59, 47, 57, 88, 70, 70, 48, 57, 55, 55, 51, 62, 57, 66, 66, 54, 83, 76, 57, 55, 82, 66, 63, 69, 60, 90, 67, 57, 60, 83, 61, 54, 64, 57, 70, 86, 55, 67, 52, 82, 57, 88, 88, 55, 67, 66, 57, 69, 58, 58, 61, 73, 60, 52, 77, 97, 94, 61, 55, 54, 70, 60, 64, 52, 55, 74, 71, 62, 71, 62, 60, 62, 56, 42, 50, 128, 88, 80, 99, 85, 59, 49, 67, 47, 81, 71, 58, 68, 97, 48, 64, 78, 95, 55, 80, 59, 87, 42, 60, 78, 89, 84, 40, 62, 53, 75, 66, 50, 71, 77, 63, 69, 79, 56, 81, 68, 89, 67, 57, 51, 74, 70, 55, 69, 63, 54, 66, 46, 60, 93, 79, 65, 78, 55, 62, 66, 62, 59, 85, 71, 54, 69, 46, 60, 69, 42, 49, 78, 62, 67, 82, 59, 81, 70, 79, 60, 86, 51, 58, 78, 57, 61, 62, 78, 89, 47, 70, 50, 54, 66, 50, 69, 71, 133, 88, 53, 55, 65, 62, 52, 61, 71, 53, 79, 79, 57, 61, 150, 72, 67, 129, 63, 59, 50, 59, 94, 56, 88, 63, 61, 57, 71, 67, 89, 78, 72, 65, 59, 53, 61, 69, 51, 58, 91, 63, 78, 60, 50, 52, 71, 64, 48, 55, 54, 40, 71, 74, 110, 64, 63, 105, 63, 93, 67, 71, 62, 74, 66, 72, 87, 54, 62, 76, 49, 44, 83, 49, 70, 61, 52, 65, 63, 78, 50, 70, 50, 86, 66, 61, 66, 50, 72, 47, 71, 91, 72, 81, 55, 71, 67, 65, 51, 71, 70, 54, 84, 96, 80, 58, 55, 78, 63, 64, 56, 65, 54, 84, 68, 70, 71, 77, 62, 70, 54, 71, 97, 47, 52, 74, 50, 59, 73, 65, 61, 42, 74, 50, 65, 71, 55, 78, 35, 60, 54, 66, 122, 62, 58, 82, 56, 37, 63, 57, 50, 73, 62, 55, 48, 69, 63, 83, 69, 52, 46, 63, 65, 40, 57, 63, 72, 69, 67, 56, 59, 71, 55, 49, 68, 70, 90, 74, 55, 62, 63, 66, 55, 66, 63, 74, 83, 45, 55, 60, 39, 68, 83, 80, 68, 73, 72, 58, 60, 72, 73, 72, 62, 60, 64, 78, 69, 46, 44, 51, 52, 79, 81, 37, 69, 81, 67, 67, 89, 51, 63, 68, 55, 78, 38, 94, 56, 54, 53, 55, 65, 71, 40, 45, 63, 51, 70, 66, 57, 66, 66, 85, 82, 72, 96, 66, 47, 82, 50, 65, 138, 59, 76, 63, 51, 70, 55, 67, 45, 66, 60, 56, 73, 50, 84, 84, 48, 60, 73, 58, 66, 81, 58, 59, 55, 62, 77, 72, 44, 71, 50, 103, 69, 62, 89, 58, 73, 55, 76, 67, 89, 79, 82, 68, 87, 50, 52, 63, 59, 51, 60, 76, 60, 55, 49, 63, 52, 64, 80, 52, 108, 46, 67, 90, 50, 62, 65, 87, 68, 70, 69, 79, 52, 41, 52, 47, 66, 57, 61, 60, 87, 57, 81, 62, 72, 93, 61, 68, 68, 64, 80, 83, 64, 54, 74, 51, 65, 51, 32, 54, 75, 62, 50, 60, 68, 68, 72, 82, 95, 69, 132, 95, 53, 68, 58, 72, 59, 70, 74, 44, 74, 55, 50, 98, 51, 53, 69, 59, 54, 66, 61, 68, 71, 67, 83, 73, 55, 53, 45, 63, 66, 53, 56, 52, 44, 69, 51, 68, 59, 62, 57, 116, 82, 98, 59, 76, 61, 76, 57, 68, 61, 61, 46, 80, 74, 70, 68, 78, 79, 56, 65, 52, 50, 83, 58, 57, 98, 63, 65, 62, 57, 53, 98, 64, 44, 64, 70, 57, 63, 58, 51, 52, 47, 62, 52, 207, 79, 92, 61, 55, 77, 65, 68, 63, 75, 84, 115, 69, 67, 63, 79, 50, 65, 51, 118, 82, 66, 65, 65, 63, 83, 59, 64, 83, 54, 61, 71, 46, 53, 70, 72, 52, 51, 59, 44, 68, 52, 81, 61, 103, 58, 80, 47, 80, 58, 67, 53, 71, 65, 71, 49, 41, 81, 89, 55, 70, 47, 53, 56, 74, 62, 65, 62, 62, 56, 65, 69, 58, 60, 81, 110, 65, 73, 57, 65, 66, 67, 76, 66, 70, 49, 51, 53, 58, 56, 55, 53, 61, 66, 54, 62, 59, 67, 70, 57, 76, 63, 77, 54, 57, 56, 42, 121, 56, 88, 65, 48, 41, 55, 68, 75, 58, 62, 78, 57, 63, 51, 71, 85, 67, 45, 66, 64, 65, 44, 86, 42, 59, 87, 53, 36, 81, 62, 51, 48, 58, 125, 64, 61, 65, 50, 50, 82, 60, 56, 66, 52, 62, 55, 61, 59, 76, 77, 71, 53, 67, 58, 81, 54, 98, 89, 68, 67, 49, 45, 77, 71, 68, 88, 68, 61, 84, 70, 80, 67, 87, 60, 43, 70, 76, 81, 74, 55, 62, 91, 64, 65, 81, 50, 62, 47, 48, 85, 77, 93, 54, 48, 68, 58, 65, 70, 51, 62, 68, 85, 63, 59, 63, 58, 71, 77, 53, 49, 75, 59, 65, 67, 54, 57, 59, 79, 74, 60, 58, 64, 67, 83, 66, 52, 62, 65, 41, 49, 100, 54, 64, 76, 66, 62, 57, 66, 47, 71, 52, 89, 84, 50, 58, 52, 72, 76, 59, 74, 67, 49, 59, 48, 58, 68, 56, 77, 50, 93, 60, 59, 50, 61, 61, 50, 66, 67, 85, 55, 73, 62, 64, 52, 81, 75, 77, 63, 67, 71, 68, 52, 54, 74, 52, 59, 56, 68, 100, 61, 81, 114, 53, 70, 67, 79, 107, 51, 51, 91, 45, 72, 85, 55, 58, 56, 66, 67, 82, 88, 35, 52, 43, 66, 67, 59, 41, 68, 43, 45, 50, 81, 48, 65, 62, 54, 63, 47, 50, 66, 73, 43, 65, 72, 71, 65, 63, 55, 45, 69, 45, 85, 45, 53, 105, 87, 59, 89, 64, 47, 53, 58, 52, 68, 63, 54, 104, 67, 91, 55, 46, 52, 55, 64, 33, 73, 88, 52, 71, 76, 74, 48, 80, 70, 61, 79, 63, 85, 74, 57, 82, 56, 53, 43, 53, 54, 43, 51, 71, 59, 48, 76, 69, 57, 80, 59, 49, 69, 55, 52, 68, 70, 40, 58, 59, 49, 47, 31, 79, 65, 79, 71, 80, 64, 95, 58, 71, 53, 60, 53, 69, 61, 61, 65, 64, 41, 46, 75, 49, 61, 55, 51, 48, 63, 60, 57, 74, 89, 68, 56, 108, 67, 83, 62, 77, 53, 70, 63, 44, 63, 65, 43, 58, 86, 96, 53, 64, 50, 67, 57, 77, 67, 69, 75, 73, 103, 79, 53, 57, 68, 62, 75, 71, 113, 102, 77, 60, 81, 54, 90, 71, 58, 67, 47, 53, 86, 98, 61, 66, 64, 84, 58, 70, 41, 96, 61, 76, 70, 59, 83, 29, 38, 68, 83, 56, 71, 62, 67, 76, 43, 74, 44, 51, 61, 97, 71, 74, 64, 54, 77, 64, 79, 45, 55, 41, 56, 66, 79, 92, 47, 54, 71, 60, 86, 64, 53, 51, 60, 44, 66, 73, 67, 74, 55, 77, 87, 55, 55, 88, 64, 75, 108, 57, 64, 108, 68, 70, 74, 36, 54, 68, 67, 92, 49, 52, 50, 58, 61, 51, 67, 70, 108, 92, 57, 69, 74, 82, 52, 55, 66, 45, 62, 59, 57, 67, 186, 77, 75, 86, 51, 59, 61, 56, 101, 66, 85, 69, 73, 56, 71, 55, 74, 71, 86, 47, 57, 65, 67, 72, 36, 54, 55, 52, 109, 61, 46, 55, 55, 57, 52, 42, 51, 55, 73, 78, 99, 46, 53, 77, 64, 65, 43, 77, 51, 71, 185, 67, 69, 76, 59, 68, 50, 69, 105, 73, 89, 77, 58, 62, 43, 69, 86, 57, 51, 75, 73, 77, 76, 72, 47, 65, 59, 55, 91, 86, 48, 92, 66, 46, 54, 56, 63, 57, 77, 108, 81, 76, 72, 54, 63, 53, 71, 70, 48, 69, 77, 81, 56, 68, 81, 85, 67, 64, 88, 55, 59, 85, 63, 72, 66, 55, 62, 54, 76, 107, 74, 76, 58, 61, 57, 80, 51, 59, 71, 64, 65, 70, 61, 59, 62, 58, 74, 56, 71, 62, 63, 59, 61, 61, 61, 63, 52, 54, 70, 64, 87, 71, 52, 67, 61, 60, 56, 48, 55, 87, 56, 52, 60, 73, 56, 79, 66, 69, 83, 64, 63, 53, 68, 66, 78, 62, 61, 106, 69, 70, 60, 68, 68, 71, 80, 60, 62, 84, 87, 55, 57, 53, 58, 59, 52, 59, 57, 70, 61, 64, 70, 69, 113, 67, 56, 136, 60, 58, 63, 78, 79, 66, 48, 47, 61, 65, 89, 58, 59, 60, 71, 55, 70, 54, 54, 51, 60, 58, 69, 61, 64, 74, 69, 64, 71, 65, 57, 59, 77, 57, 81, 91, 75, 81, 68, 71, 47, 71, 68, 62, 90, 81, 65, 68, 53, 54, 60, 48, 67, 72, 60, 82, 66, 79, 59, 110, 56, 64, 82, 69, 62, 52, 67, 55, 60, 82, 77, 68, 53, 69, 62, 69, 65, 63, 50, 50, 65, 66, 56, 73, 52, 63, 73, 61, 60, 24, 62, 79, 70, 53, 56, 71, 97, 76, 61, 86, 64, 103, 62, 61, 64, 96, 119, 56, 60, 72, 76, 68, 60, 70, 61, 80, 61, 73, 50, 62, 55, 58, 60, 53, 87, 46, 73, 66, 44, 78, 51, 63, 73, 49, 64, 59, 52, 78, 81, 69, 95, 55, 94, 58, 49, 81, 62, 74, 117, 96, 64, 61, 73, 57, 66, 59, 57, 63, 66, 61, 53, 51, 33, 95, 97, 60, 52, 68, 71, 78, 56, 55, 79, 59, 90, 71, 60, 82, 76, 63, 58, 66, 71, 63, 70, 53, 72, 58, 56, 67, 48, 63, 63, 57, 59, 77, 67, 78, 49, 58, 50, 71, 58, 55, 48, 68, 63, 56, 38, 74, 59, 66, 68, 58, 63, 70, 68, 57, 66, 57, 63, 76, 61, 59, 57, 61, 45, 74, 49, 80, 58, 72, 65, 58, 68, 62, 53, 60, 57, 55, 64, 62, 48, 77, 83, 69, 57, 66, 54, 71, 79, 119, 60, 42, 69, 52, 76, 60, 54, 28, 77, 70, 70, 86, 75, 60, 66, 48, 52, 56, 77, 52, 66, 80, 64, 55, 56, 56, 57, 52, 57, 70, 97, 82, 60, 62, 64, 74, 63, 83, 56, 70, 55, 69, 60, 53, 66, 68, 59, 85, 66, 76, 66, 55, 63, 66, 66, 55, 87, 70, 63, 80, 72, 82, 57, 75, 75, 61, 63, 77, 55, 44, 71, 62, 69, 78, 59, 70, 62, 62, 65, 86, 53, 47, 65, 65, 58, 85, 71, 73, 80, 64, 60, 62, 77, 63, 69, 62, 54, 95, 56, 51, 64, 61, 98, 46, 72, 62, 60, 71, 68, 69, 64, 74, 64, 83, 73, 60, 61, 61, 48, 64, 67, 53, 71, 65, 56, 121, 68, 48, 59, 60, 45, 66, 57, 68, 40, 84, 61, 61, 62, 61, 68, 61, 74, 57, 92, 78, 67, 58, 52, 62, 61, 60, 79, 67, 53, 90, 64, 79, 85, 56, 64, 66, 55, 73, 70, 64, 61, 59, 87, 64, 93, 67, 67, 61, 50, 54, 59, 62, 48, 70, 55, 66, 61, 71, 52, 68, 78, 49, 50, 64, 46, 80, 53, 66, 61, 63, 59, 47, 63, 48, 57, 53, 57, 58, 56, 47, 79, 65, 76, 71, 78, 69, 75, 59, 86, 75, 63, 58, 54, 66, 59, 51, 57, 61, 44, 66, 67, 54, 54, 58, 60, 65, 75, 77, 59, 60, 56, 76, 69, 60, 62, 57, 56, 59, 68, 53, 70, 50, 67, 53, 37, 65, 76, 76, 57, 52, 47, 55, 77, 56, 69, 58, 53, 63, 75, 59, 63, 63, 78, 61, 66, 63, 58, 75, 75, 79, 91, 84, 51, 64, 64, 53, 65, 58, 58, 62, 65, 69, 61, 42, 61, 60, 52, 89, 56, 69, 67, 71, 54, 71, 73, 67, 61, 116, 54, 64, 68, 84, 62, 76, 88, 60, 69, 68, 87, 78, 67, 54, 73, 86, 74, 59, 71, 69, 78, 58, 63, 53, 52, 81, 74, 70, 67, 115, 55, 53, 47, 60, 50, 61, 71, 60, 69, 57, 74, 67, 66, 61, 68, 79, 79, 109, 58, 53, 73, 75, 55, 75, 54, 65, 69, 55, 62, 61, 66, 78, 66, 89, 78, 72, 51, 45, 52, 71, 57, 72, 67, 63, 65, 70, 78, 60, 59, 63, 56, 68, 64, 88, 64, 73, 61, 68, 73, 65, 60, 87, 54, 72, 56, 74, 76, 74, 64, 54, 65, 71, 73, 50, 71, 59, 64, 85, 83, 59, 57, 53, 69, 60, 64, 48, 54, 77, 68, 82, 57, 67, 81, 106, 63, 82, 68, 73, 80, 80, 56, 58, 70, 110, 57, 46, 61, 58, 61, 78, 58, 71, 63, 77, 56, 56, 53, 68, 74, 89, 67, 64, 69, 96, 68, 55, 67, 68, 60, 74, 76, 67, 79, 59, 66, 52, 44, 61, 69, 95, 65, 53, 71, 54, 63, 72, 54, 68, 57, 74, 56, 50, 57, 77, 62, 53, 73, 81, 68, 66, 65, 58, 56, 78, 62, 63, 57, 82, 73, 54, 57, 64, 68, 55, 93, 65, 54, 72, 83, 58, 71, 72, 57, 77, 48, 73, 93, 56, 70, 59, 67, 46, 69, 77, 87, 61, 77, 83, 52, 82, 68, 36, 52, 67, 59, 62, 54, 58, 58, 66, 57, 87, 63, 56, 49, 74, 72, 77, 67, 119, 57, 62, 64, 81, 76, 63, 56, 59, 103, 66, 61, 71, 52, 59, 59, 63, 48, 63, 70, 60, 86, 82, 81, 52, 80, 64, 51, 58, 59, 62, 66, 63, 89, 65, 77, 104, 64, 68, 60, 67, 117, 76, 91, 57, 71, 49, 54, 58, 74, 62, 67, 62, 68, 64, 53, 66, 69, 60, 69, 64, 55, 54, 85, 66, 65, 69, 61, 49, 58, 60, 102, 70, 62, 59, 60, 74, 69, 68, 68, 73, 72, 60, 76, 82, 65, 59, 64, 68, 62, 55, 64, 64, 72, 54, 59, 59, 73, 55, 62, 84, 53, 86, 44, 50, 75, 63, 62, 49, 51, 51, 66, 74, 61, 85, 81, 59, 84, 43, 64, 46, 108, 53, 67, 52, 66, 92, 69, 70, 64, 67, 62, 77, 86, 62, 76, 95, 67, 92, 73, 58, 71, 65, 61, 87, 75, 72, 97, 59, 64, 51, 50, 83, 79, 46, 49, 63, 62, 63, 60, 116, 79, 55, 61, 75, 55, 57, 85, 87, 72, 73, 66, 58, 80, 68, 61, 52, 100, 54, 72, 60, 77, 53, 59, 90, 51, 57, 48, 74, 54, 46, 45, 70, 73, 86, 60, 45, 59, 68, 69, 64, 55, 68, 67, 63, 53, 50, 73, 69, 72, 64, 59, 49, 72, 57, 75, 63, 53, 51, 82, 70, 52, 69, 67, 91, 58, 64, 60, 60, 47, 62, 80, 72, 85, 55, 73, 52, 56, 45, 67, 72, 67, 70, 83, 77, 57, 58, 66, 63, 64, 80, 77, 64, 90, 60, 73, 40, 64, 53, 68, 57, 86, 77, 69, 63, 72, 94, 64, 53, 61, 97, 64, 60, 56, 56, 59, 72, 85, 75, 83, 66, 55, 75, 89, 66, 57, 99, 65, 56, 71, 87, 71, 113, 86, 59, 78, 55, 65, 42, 64, 89, 63, 72, 69, 58, 70, 66, 71, 59, 63, 64, 36, 83, 65, 65, 77, 86, 63, 60, 65, 70, 72, 51, 44, 61, 80, 58, 61, 71, 55, 63, 84, 80, 57, 60, 56, 61, 69, 65, 91, 75, 56, 70, 91, 53, 54, 52, 64, 80, 71, 71, 61, 62, 62, 60, 67, 44, 64, 48, 52, 58, 83, 65, 39, 54, 40, 64, 91, 58, 55, 79, 90, 52, 81, 61, 63, 60, 71, 69, 68, 76, 63, 75, 58, 57, 57, 57, 65, 58, 71, 59, 47, 43, 70, 59, 65, 58, 51, 65, 56, 69, 54, 51, 77, 63, 59, 93, 60, 58, 66, 73, 69, 42, 66, 90, 47, 56, 70, 83, 54, 52, 75, 61, 78, 65, 93, 69, 54, 66, 60, 62, 42, 61, 62, 96, 71, 63, 54, 58, 70, 75, 64, 50, 55, 64, 82, 58, 55, 57, 58, 59, 62, 80, 67, 64, 54, 67, 56, 48, 93, 61, 67, 65, 61, 71, 70, 55, 54, 73, 67, 104, 66, 54, 64, 56, 72, 63, 70, 81, 86, 60, 69, 52, 65, 80, 57, 54, 69, 71, 61, 107, 67, 74, 60, 50, 62, 43, 85, 58, 59, 51, 66, 66, 60, 57, 84, 44, 45, 67, 69, 58, 64, 50, 67, 66, 57, 52, 112, 120, 89, 74, 44, 61, 58, 73, 96, 67, 74, 60, 78, 71, 60, 56, 61, 59, 73, 97, 60, 60, 55, 55, 96, 81, 73, 58, 79, 53, 54, 58, 55, 69, 51, 49, 39, 55, 47, 68, 54, 50, 59, 69, 49, 79, 77, 71, 94, 98, 55, 63, 57, 73, 49, 60, 70, 73, 78, 53, 57, 64, 66, 60, 64, 71, 64, 49, 64, 76, 50, 59, 68, 78, 98, 52, 55, 80, 77, 44, 67, 78, 79, 59, 74, 77, 79, 62, 104, 62, 68, 60, 80, 74, 49, 69, 50, 71, 73, 54, 54, 61, 61, 57, 76, 68, 65, 63, 36, 65, 61, 88, 71, 66, 46, 51, 61, 64, 68, 56, 93, 56, 71, 65, 60, 79, 73, 66, 63, 80, 57, 124, 57, 55, 53, 63, 70, 62, 73, 46, 63, 67, 78, 57, 60, 89, 63, 58, 81, 66, 59, 58, 59, 97, 63, 65, 84, 54, 74, 61, 52, 45, 69, 44, 58, 71, 51, 58, 73, 47, 76, 70, 55, 88, 71, 64, 104, 63, 65, 47, 67, 44, 33, 81, 72, 67, 61, 76, 38, 62, 55, 60, 74, 81, 80, 76, 75, 49, 44, 59, 73, 50, 57, 62, 45, 76, 94, 59, 72, 54, 73, 62, 66, 73, 57, 65, 85, 69, 57, 65, 59, 79, 58, 81, 46, 56, 63, 78, 71, 54, 70, 49, 63, 72, 72, 63, 61, 63, 44, 65, 63, 55, 58, 83, 53, 65, 45, 98, 76, 61, 70, 82, 70, 70, 63, 54, 86, 65, 39, 59, 63, 96, 86, 69, 64, 54, 56, 59, 66, 57, 68, 90, 66, 113, 75, 76, 61, 51, 56, 66, 61, 55, 57, 58, 94, 74, 71, 88, 45, 58, 96, 54, 81, 59, 51, 64, 57, 39, 64, 60, 56, 75, 73, 55, 47, 70, 36, 81, 63, 64, 47, 71, 47, 81, 56, 67, 69, 79, 73, 106, 64, 70, 57, 74, 62, 63, 93, 75, 52, 75, 51, 70, 97, 65, 75, 49, 62, 57, 63, 71, 94, 83, 60, 64, 64, 51, 46, 91, 67, 66, 64, 95, 84, 54, 53, 57, 58, 64, 91, 58, 52, 71, 59, 66, 62, 67, 62, 64, 72, 99, 73, 63, 45, 68, 113, 55, 57, 60, 99, 112, 63, 75, 87, 68, 49, 62, 70, 60, 74, 73, 65, 62, 59, 64, 88, 82, 65, 75, 91, 67, 71, 45, 72, 54, 76, 51, 82, 66, 57, 81, 60, 88, 65, 68, 77, 69, 65, 70, 53, 86, 62, 72, 82, 74, 68, 48, 70, 60, 57, 50, 46, 57, 77, 69, 72, 66, 68, 77, 71, 39, 53, 83, 60, 56, 65, 52, 62, 67, 67, 40, 61, 57, 88, 47, 52, 58, 48, 63, 61, 59, 75, 108, 63, 102, 75, 82, 64, 58, 62, 49, 107, 66, 49, 60, 93, 84, 66, 78, 64, 82, 70, 60, 48, 53, 55, 61, 62, 68, 75, 65, 63, 63, 58, 76, 51, 58, 77, 66, 60, 77, 57, 63, 53, 75, 56, 79, 79, 74, 74, 64, 59, 59, 64, 73, 71, 84, 69, 48, 50, 54, 75, 63, 89, 48, 61, 55, 66, 71, 62, 90, 88, 63, 69, 70, 67, 60, 68, 87, 60, 74, 65, 43, 123, 78, 78, 87, 65, 62, 58, 93, 58, 55, 53, 58, 72, 60, 84, 46, 54, 79, 68, 66, 73, 79, 64, 77, 73, 67, 70, 55, 106, 61, 52, 64, 75, 58, 57, 52, 53, 71, 45, 72, 53, 134, 57, 56, 97, 85, 65, 82, 84, 68, 60, 60, 77, 47, 70, 54, 72, 68, 60, 58, 54, 77, 79, 83, 58, 71, 112, 86, 64, 58, 87, 68, 54, 57, 63, 76, 74, 64, 67, 52, 66, 70, 59, 88, 61, 57, 63, 83, 53, 90, 64, 83, 34, 63, 63, 62, 59, 65, 132, 92, 90, 64, 59, 89, 60, 72, 59, 42, 51, 90, 118, 60, 45, 60, 55, 84, 81, 90, 69, 88, 66, 55, 79, 55, 73, 79, 75, 65, 84, 53, 97, 92, 88, 91, 47, 65, 64, 68, 56, 68, 56, 69, 57, 49, 114, 79, 83, 67, 59, 58, 31, 106, 56, 61, 81, 48, 57, 50, 48, 65, 47, 78, 83, 34, 77, 45, 79, 53, 74, 55, 69, 63, 39, 59, 54, 57, 62, 57, 61, 86, 66, 58, 70, 70, 65, 53, 51, 90, 44, 66, 46, 96, 80, 65, 68, 65, 72, 52, 56, 61, 39, 58, 62, 72, 80, 54, 49, 71, 62, 67, 63, 63, 48, 78, 60, 80, 78, 90, 53, 62, 52, 71, 83, 52, 65, 68, 78, 56, 66, 79, 52, 57, 72, 62, 94, 64, 91, 67, 69, 101, 62, 56, 62, 89, 69, 77, 74, 61, 73, 57, 73, 67, 84, 77, 61, 62, 43, 94, 61, 74, 51, 57, 55, 43, 57, 65, 69, 76, 74, 68, 72, 57, 74, 55, 60, 66, 87, 57, 62, 81, 67, 53, 53, 64, 75, 64, 65, 65, 80, 70, 82, 43, 63, 56, 67, 63, 48, 68, 74, 84, 65, 59, 61, 75, 63, 69, 45, 52, 56, 66, 53, 64, 55, 95, 104, 60, 60, 51, 70, 56, 65, 51, 82, 96, 68, 84, 103, 147, 66, 76, 63, 42, 81, 60, 64, 56, 58, 52, 66, 76, 51, 53, 59, 70, 66, 71, 57, 63, 65, 45, 46, 56, 55, 62, 55, 77, 70, 49, 76, 85, 49, 40, 57, 70, 62, 49, 98, 65, 46, 65, 68, 65, 49, 59, 56, 63, 49, 65, 60, 76, 96, 67, 50, 66, 48, 70, 58, 41, 104, 67, 58, 56, 67, 80, 71, 62, 57, 64, 58, 58, 78, 64, 47, 73, 50, 58, 48, 58, 58, 68, 81, 65, 57, 75, 63, 83, 64, 54, 64, 47, 75, 52, 69, 65, 89, 65, 65, 52, 82, 59, 71, 87, 57, 65, 85, 57, 84, 90, 36, 80, 52, 60, 49, 57, 82, 53, 61, 65, 65, 70, 86, 131, 69, 77, 83, 50, 42, 54, 78, 63, 69, 68, 64, 91, 69, 74, 51, 85, 81, 59, 67, 68, 52, 46, 58, 71, 73, 70, 75, 65, 70, 67, 74, 58, 74, 69, 62, 57, 57, 84, 96, 69, 74, 75, 73, 60, 53, 56, 81, 77, 60, 85, 53, 55, 50, 81, 84, 45, 69, 75, 65, 76, 66, 67, 82, 104, 122, 62, 58, 63, 49, 46, 66, 67, 69, 63, 59, 40, 56, 59, 82, 59, 62, 55, 92, 72, 57, 85, 60, 51, 76, 83, 55, 73, 62, 61, 73, 55, 77, 61, 61, 65, 67, 71, 32, 66, 82, 67, 71, 74, 67, 79, 46, 54, 58, 66, 51, 81, 66, 60, 49, 56, 63, 67, 56, 69, 40, 75, 48, 45, 67, 107, 89, 56, 49, 58, 74, 65, 62, 65, 69, 62, 63, 123, 57, 84, 75, 59, 53, 42, 46, 44, 70, 69, 95, 54, 49, 64, 62, 64, 59, 70, 56, 45, 76, 77, 66, 54, 61, 81, 52, 101, 67, 61, 72, 57, 62, 88, 77, 88, 67, 66, 45, 61, 34, 62, 52, 53, 50, 58, 80, 89, 115, 54, 55, 75, 71, 50, 89, 103, 84, 64, 68, 70, 43, 54, 54, 52, 86, 77, 79, 56, 66, 50, 100, 69, 46, 73, 59, 51, 46, 47, 46, 51, 72, 67, 73, 50, 77, 44, 49, 87, 82, 58, 57, 38, 111, 91, 73, 56, 72, 61, 76, 63, 63, 57, 85, 47, 58, 92, 91, 44, 50, 80, 85, 66, 46, 65, 58, 55, 37, 66, 78, 68, 71, 64, 63, 43, 72, 66, 92, 94, 62, 55, 75, 64, 79, 50, 59, 61, 49, 53, 87, 87, 83, 65, 82, 63, 69, 73, 95, 65, 50, 79, 116, 62, 69, 74, 46, 51, 58, 72, 70, 52, 60, 85, 49, 60, 94, 95, 64, 69, 79, 54, 62, 73, 67, 64, 78, 63, 56, 80, 68, 57, 46, 71, 75, 80, 60, 81, 99, 72, 82, 47, 59, 81, 68, 80, 68, 43, 68, 56, 95, 115, 65, 64, 55, 54, 61, 64, 90, 64, 74, 75, 93, 59, 69, 70, 77, 54, 60, 56, 68, 68, 50, 63, 56, 76, 48, 46, 52, 50, 40, 76, 114, 59, 53, 50, 74, 61, 62, 71, 74, 63, 100, 66, 46, 54, 64, 55, 50, 58, 60, 55, 71, 85, 53, 70, 68, 67, 61, 56, 66, 52, 52, 68, 53, 79, 63, 54, 90, 94, 50, 63, 71, 58, 59, 101, 92, 85, 63, 100, 55, 77, 53, 46, 61, 55, 74, 54, 60, 78, 56, 104, 71, 80, 48, 67, 73, 72, 81, 74, 65, 52, 67, 59, 79, 102, 58, 61, 41, 58, 74, 71, 64, 49, 49, 91, 90, 69, 70, 54, 99, 76, 71, 59, 71, 87, 102, 70, 56, 92, 58, 71, 76, 69, 82, 46, 67, 58, 74, 92, 51, 41, 68, 79, 63, 88, 63, 65, 70, 56, 42, 66, 45, 65, 55, 55, 90, 70, 58, 66, 74, 97, 60, 65, 93, 65, 58, 72, 69, 92, 74, 66, 66, 75, 77, 91, 61, 60, 94, 54, 62, 63, 64, 79, 71, 64, 80, 85, 74, 67, 64, 43, 73, 66, 75, 65, 62, 76, 50, 45, 52, 64, 66, 81, 67, 51, 44, 90, 78, 78, 64, 79, 66, 58, 40, 68, 53, 71, 55, 62, 69, 67, 76, 64, 71, 88, 61, 67, 42, 56, 133, 60, 64, 78, 88, 77, 68, 65, 113, 81, 73, 65, 79, 55, 92, 60, 67, 80, 62, 50, 46, 57, 73, 82, 46, 55, 94, 50, 67, 86, 82, 51, 56, 47, 61, 69, 79, 89, 60, 47, 57, 40, 58, 83, 57, 62, 62, 83, 62, 49, 56, 48, 83, 34, 68, 80, 78, 63, 52, 103, 67, 72, 62, 73, 68, 52, 60, 98, 41, 47, 59, 61, 67, 92, 55, 32, 74, 64, 51, 79, 72, 40, 60, 89, 75, 62, 59, 65, 85, 55, 62, 74, 68, 92, 49, 55, 64, 60, 79, 71, 67, 79, 100, 64, 74, 67, 54, 72, 64, 56, 70, 60, 68, 41, 50, 55, 75, 71, 66, 68, 60, 56, 75, 72, 59, 39, 76, 92, 46, 86, 49, 64, 98, 86, 55, 67, 101, 65, 71, 54, 64, 56, 68, 51, 56, 56, 59, 65, 78, 42, 91, 78, 68, 32, 59, 64, 101, 67, 66, 64, 68, 68, 67, 66, 69, 70, 54, 63, 71, 77, 38, 59, 73, 78, 69, 70, 44, 55, 42, 79, 65, 63, 75, 79, 78, 67, 78, 66, 56, 60, 51, 86, 67, 70, 57, 54, 72, 68, 72, 56, 65, 50, 59, 55, 49, 49, 75, 65, 62, 77, 63, 69, 49, 58, 73, 85, 73, 59, 58, 51, 69, 96, 82, 71, 56, 54, 60, 56, 44, 68, 79, 75, 60, 71, 61, 57, 56, 69, 52, 81, 64, 75, 96, 62, 51, 92, 88, 68, 66, 56, 61, 57, 78, 62, 62, 84, 65, 62, 57, 76, 45, 83, 70, 50, 44, 51, 55, 79, 50, 72, 53, 49, 56, 80, 59, 84, 79, 61, 61, 67, 68, 41, 51, 67, 77, 65, 57, 58, 67, 41, 68, 72, 70, 84, 77, 56, 67, 64, 76, 74, 56, 77, 82, 68, 83, 47, 60, 54, 53, 63, 108, 71, 61, 66, 81, 79, 77, 65, 70, 66, 67, 85, 56, 110, 51, 67, 73, 67, 44, 62, 45, 84, 95, 39, 81, 79, 57, 65, 115, 69, 68, 56, 61, 67, 68, 64, 75, 51, 68, 62, 86, 61, 57, 57, 72, 57, 83, 94, 77, 65, 74, 56, 80, 63, 84, 80, 100, 62, 78, 59, 46, 55, 94, 85, 47, 70, 57, 97, 57, 73, 52, 85, 67, 59, 44, 34, 51, 87, 72, 72, 55, 63, 71, 52, 56, 84, 54, 40, 65, 93, 62, 79, 71, 61, 54, 61, 83, 88, 73, 70, 72, 77, 72, 83, 69, 71, 60, 82, 78, 47, 79, 58, 52, 87, 64, 60, 56, 58, 55, 58, 45, 94, 54, 92, 65, 88, 81, 57, 60, 49, 67, 58, 98, 51, 81, 69, 65, 60, 61, 54, 60, 62, 67, 71, 76, 129, 59, 62, 47, 66, 81, 58, 62, 75, 73, 95, 105, 39, 57, 91, 69, 76, 65, 83, 79, 70, 54, 70, 57, 47, 60, 67, 67, 93, 45, 27, 54, 66, 67, 84, 53, 79, 58, 62, 90, 42, 64, 68, 57, 73, 75, 72, 59, 66, 73, 43, 73, 62, 90, 66, 71, 84, 48, 58, 58, 49, 50, 72, 89, 83, 68, 61, 64, 51, 58, 64, 49, 77, 47, 58, 71, 61, 80, 83, 95, 55, 82, 68, 55, 60, 84, 68, 56, 84, 74, 61, 77, 57, 89, 33, 61, 75, 65, 60, 69, 122, 59, 55, 63, 49, 85, 61, 84, 36, 53, 56, 45, 65, 77, 70, 59, 46, 71, 61, 81, 45, 33, 79, 86, 99, 127, 70, 80, 93, 59, 72, 86, 51, 39, 59, 61, 70, 43, 49, 58, 93, 60, 53, 75, 64, 50, 67, 48, 76, 48, 43, 77, 48, 70, 59, 65, 61, 62, 57, 58, 75, 42, 73, 86, 63, 75, 79, 59, 50, 62, 58, 69, 68, 60, 55, 60, 73, 94, 53, 57, 58, 58, 92, 71, 55, 65, 57, 64, 48, 68, 59, 63, 68, 84, 65, 50, 53, 75, 60, 66, 67, 78, 36, 62, 64, 70, 78, 69, 73, 68, 77, 85, 58, 59, 60, 63, 51, 60, 47, 53, 101, 61, 66, 66, 51, 49, 38, 74, 62, 79, 76, 77, 54, 68, 62, 77, 57, 150, 75, 49, 60, 58, 48, 59, 71, 94, 54, 64, 69, 76, 45, 77, 55, 79, 65, 55, 81, 88, 92, 105, 53, 71, 41, 66, 82, 64, 70, 71, 69, 83, 64, 75, 58, 77, 63, 72, 54, 54, 57, 55, 53, 62, 75, 68, 59, 59, 55, 66, 70, 88, 89, 49, 127, 58, 57, 64, 67, 74, 70, 67, 63, 67, 64, 51, 78, 77, 70, 66, 61, 62, 50, 84, 54, 58, 65, 67, 49, 96, 45, 73, 109, 60, 79, 70, 96, 74, 57, 60, 82, 82, 57, 63, 82, 74, 78, 59, 46, 67, 58, 38, 100, 59, 63, 59, 74, 64, 61, 65, 64, 48, 47, 53, 70, 73, 60, 88, 61, 98, 62, 81, 62, 63, 72, 74, 48, 75, 59, 65, 77, 52, 56, 63, 75, 64, 63, 88, 68, 53, 87, 65, 72, 54, 67, 57, 36, 101, 53, 43, 88, 66, 57, 87, 71, 67, 66, 76, 78, 69, 51, 93, 49, 75, 50, 77, 99, 61, 56, 59, 64, 58, 63, 54, 77, 60, 60, 67, 85, 76, 44, 69, 91, 64, 37, 55, 52, 37, 61, 70, 50, 64, 85, 45, 53, 61, 115, 49, 52, 79, 44, 64, 48, 81, 97, 73, 78, 67, 66, 73, 85, 53, 60, 56, 63, 78, 56, 66, 90, 60, 89, 82, 73, 96, 65, 54, 44, 65, 49, 77, 62, 66, 60, 67, 81, 35, 81, 59, 62, 45, 84, 56, 67, 62, 62, 53, 67, 68, 64, 74, 62, 52, 52, 45, 56, 66, 64, 69, 112, 104, 52, 60, 60, 84, 73, 84, 58, 82, 67, 68, 80, 91, 108, 40, 60, 84, 91, 53, 66, 45, 46, 38, 84, 68, 67, 52, 79, 70, 82, 61, 85, 61, 94, 70, 58, 49, 82, 42, 46, 95, 57, 117, 83, 80, 77, 85, 85, 82, 93, 60, 46, 48, 74, 80, 63, 62, 68, 74, 57, 68, 106, 61, 53, 65, 62, 53, 98, 52, 69, 71, 57, 63, 54, 70, 74, 55, 47, 145, 59, 88, 48, 90, 67, 57, 65, 65, 80, 68, 35, 63, 78, 75, 73, 51, 46, 86, 66, 48, 52, 68, 74, 56, 63, 102, 41, 80, 69, 74, 79, 78, 46, 66, 66, 69, 66, 72, 58, 68, 74, 74, 55, 50, 55, 58, 78, 63, 65, 57, 71, 52, 74, 89, 81, 48, 71, 74, 53, 63, 52, 64, 66, 71, 51, 89, 64, 76, 66, 49, 71, 71, 88, 42, 67, 59, 48, 74, 49, 42, 59, 78, 54, 68, 95, 61, 60, 52, 67, 59, 84, 59, 67, 61, 73, 47, 66, 43, 63, 73, 62, 72, 72, 72, 83, 63, 48, 65, 46, 71, 81, 62, 62, 54, 59, 42, 79, 74, 58, 88, 63, 56, 60, 60, 46, 70, 59, 65, 53, 68, 69, 97, 77, 64, 46, 87, 54, 62, 62, 56, 60, 64, 49, 67, 76, 68, 57, 71, 53, 71, 70, 90, 56, 62, 50, 83, 66, 72, 66, 54, 78, 57, 93, 61, 46, 67, 76, 70, 130, 58, 71, 92, 81, 94, 69, 62, 61, 53, 43, 60, 56, 59, 69, 57, 83, 54, 64, 55, 60, 79, 52, 77, 82, 78, 57, 69, 60, 48, 103, 58, 52, 66, 83, 88, 72, 55, 64, 58, 49, 60, 65, 47, 55, 54, 72, 75, 59, 90, 58, 70, 61, 67, 109, 70, 64, 66, 80, 71, 60, 55, 108, 58, 109, 55, 76, 73, 59, 75, 101, 57, 56, 44, 76, 59, 120, 75, 73, 73, 124, 72, 60, 59, 90, 66, 71, 69, 48, 75, 80, 87, 58, 50, 77, 51, 81, 72, 71, 61, 52, 62, 69, 58, 97, 75, 60, 58, 65, 81, 65, 64, 58, 54, 69, 60, 56, 42, 59, 83, 47, 71, 87, 47, 63, 83, 61, 63, 69, 72, 52, 66, 49, 69, 62, 66, 47, 57, 48, 84, 57, 72, 72, 70, 67, 67, 83, 82, 67, 89, 73, 59, 62, 73, 44, 50, 68, 64, 56, 75, 55, 57, 68, 53, 77, 122, 50, 61, 45, 64, 77, 86, 57, 61, 49, 64, 82, 70, 50, 64, 65, 45, 47, 71, 64, 52, 62, 76, 55, 70, 73, 85, 51, 59, 58, 67, 58, 48, 61, 57, 68, 72, 58, 74, 66, 74, 64, 56, 51, 50, 67, 49, 72, 40, 66, 62, 55, 55, 54, 55, 72, 78, 60, 68, 56, 57, 47, 45, 87, 72, 63, 73, 58, 52, 56, 57, 50, 78, 75, 53, 108, 71, 83, 71, 79, 69, 62, 55, 50, 78, 76, 65, 77, 80, 63, 66, 85, 49, 57, 68, 53, 125, 67, 91, 67, 58, 48, 64, 60, 111, 65, 75, 60, 48, 68, 45, 62, 52, 51, 65, 70, 52, 56, 50, 44, 79, 66, 78, 80, 67, 40, 66, 79, 57, 66, 60, 53, 66, 72, 64, 67, 49, 51, 57, 66, 53, 54, 46, 55, 101, 76, 78, 88, 53, 47, 53, 90, 71, 75, 61, 52, 63, 67, 65, 63, 67, 42, 60, 41, 72, 55, 63, 97, 58, 65, 53, 77, 65, 71, 54, 71, 65, 62, 55, 53, 69, 63, 56, 62, 66, 49, 60, 72, 83, 96, 57, 69, 86, 52, 52, 67, 49, 67, 85, 118, 54, 76, 65, 65, 69, 74, 94, 77, 70, 56, 55, 65, 58, 49, 89, 91, 80, 58, 72, 61, 60, 76, 83, 63, 84, 76, 92, 55, 63, 63, 55, 53, 56, 50, 51, 55, 95, 66, 62, 68, 67, 77, 84, 67, 66, 66, 59, 54, 120, 73, 59, 63, 56, 72, 60, 66, 57, 73, 64, 59, 57, 58, 73, 91, 65, 70, 75, 54, 62, 51, 65, 56, 67, 50, 71, 58, 56, 64, 57, 67, 67, 48, 60, 62, 55, 63, 79, 92, 50, 111, 47, 71, 74, 67, 36, 99, 85, 59, 56, 66, 49, 63, 45, 72, 58, 59, 57, 59, 57, 74, 55, 60, 73, 73, 59, 55, 75, 62, 62, 53, 64, 75, 76, 49, 79, 81, 91, 63, 83, 73, 73, 51, 83, 48, 86, 59, 62, 65, 119, 82, 57, 77, 52, 70, 82, 76, 70, 47, 49, 85, 77, 95, 69, 60, 78, 58, 65, 64, 64, 51, 52, 79, 47, 61, 88, 59, 49, 102, 77, 58, 68, 58, 52, 50, 63, 76, 69, 66, 71, 59, 105, 52, 57, 51, 52, 60, 61, 103, 114, 62, 56, 62, 90, 55, 82, 56, 83, 58, 53, 67, 70, 69, 46, 80, 74, 68, 43, 61, 69, 65, 109, 71, 56, 56, 63, 82, 57, 66, 66, 54, 101, 64, 46, 65, 73, 65, 68, 63, 55, 64, 61, 64, 68, 86, 53, 60, 60, 60, 88, 85, 89, 63, 62, 72, 58, 70, 61, 70, 67, 67, 104, 81, 63, 87, 55, 77, 85, 83, 84, 62, 61, 67, 91, 55, 47, 61, 69, 61, 62, 71, 59, 60, 77, 68, 59, 68, 54, 60, 46, 70, 74, 49, 60, 61, 71, 50, 61, 49, 99, 63, 70, 54, 59, 76, 94, 82, 42, 61, 73, 52, 61, 59, 66, 64, 33, 61, 80, 74, 50, 58, 69, 56, 69, 45, 73, 71, 51, 58, 79, 64, 44, 73, 65, 60, 59, 56, 62, 70, 77, 65, 94, 59, 70, 57, 64, 61, 95, 58, 74, 55, 53, 71, 97, 51, 54, 67, 70, 61, 53, 79, 74, 58, 63, 69, 57, 66, 59, 75, 78, 64, 55, 66, 67, 63, 79, 63, 109, 63, 65, 70, 55, 51, 61, 47, 63, 79, 53, 54, 188, 80, 66, 78, 48, 65, 57, 67, 64, 53, 54, 115, 56, 67, 68, 43, 64, 75, 69, 59, 57, 53, 61, 52, 58, 67, 81, 60, 128, 48, 58, 65, 52, 62, 65, 59, 51, 53, 58, 66, 65, 58, 67, 43, 53, 57, 63, 84, 58, 81, 65, 62, 122, 77, 77, 74, 58, 63, 60, 79, 45, 52, 63, 80, 46, 56, 69, 69, 65, 64, 52, 90, 86, 48, 64, 114, 69, 53, 47, 81, 100, 74, 67, 56, 55, 55, 47, 55, 86, 70, 62, 59, 58, 55, 64, 61, 115, 74, 68, 68, 74, 95, 66, 59, 70, 65, 63, 69, 51, 63, 53, 53, 75, 92, 64, 66, 93, 60, 65, 81, 47, 79, 66, 59, 57, 67, 78, 95, 77, 69, 72, 57, 82, 59, 90, 53, 55, 54, 74, 44, 70, 69, 65, 70, 67, 83, 58, 56, 67, 70, 72, 61, 68, 52, 46, 61, 49, 39, 69, 48, 41, 74, 53, 83, 71, 120, 84, 53, 102, 60, 102, 55, 59, 63, 64, 63, 73, 62, 67, 50, 85, 60, 48, 57, 45, 64, 88, 59, 51, 49, 76, 66, 58, 95, 86, 62, 58, 84, 71, 61, 67, 69, 59, 60, 79, 61, 66, 59, 55, 51, 75, 53, 61, 77, 64, 67, 75, 65, 91, 76, 65, 68, 78, 61, 86, 67, 76, 83, 63, 67, 49, 73, 57, 64, 48, 68, 53, 63, 47, 55, 73, 54, 61, 59, 66, 83, 65, 80, 42, 69, 96, 67, 63, 59, 69, 62, 59, 75, 54, 63, 55, 56, 62, 55, 88, 70, 71, 53, 50, 87, 61, 70, 78, 75, 52, 78, 52, 76, 61, 57, 64, 120, 89, 74, 51, 45, 56, 50, 60, 67, 65, 45, 68, 64, 63, 73, 63, 60, 55, 68, 77, 63, 77, 61, 62, 63, 55, 57, 75, 55, 57, 69, 67, 67, 61, 50, 73, 75, 59, 73, 71, 56, 70, 71, 96, 60, 45, 49, 67, 57, 65, 71, 48, 51, 53, 55, 83, 93, 68, 65, 82, 53, 71, 62, 86, 51, 72, 69, 61, 66, 76, 59, 67, 79, 60, 59, 57, 126, 74, 81, 64, 79, 61, 51, 50, 73, 63, 66, 76, 60, 62, 172, 69, 65, 100, 49, 70, 64, 38, 57, 53, 74, 61, 56, 75, 118, 57, 51, 62, 56, 88, 105, 62, 96, 65, 61, 73, 65, 58, 64, 112, 63, 97, 59, 60, 91, 52, 98, 60, 69, 62, 69, 81, 112, 45, 49, 65, 69, 56, 63, 74, 62, 56, 69, 56, 82, 49, 44, 72, 78, 76, 70, 66, 45, 70, 53, 59, 68, 84, 70, 79, 67, 61, 64, 50, 90, 76, 64, 45, 64, 53, 61, 59, 65, 65, 65, 66, 36, 67, 63, 61, 68, 61, 79, 59, 59, 73, 66, 60, 73, 59, 49, 67, 68, 60, 54, 47, 51, 70, 74, 60, 56, 81, 64, 63, 57, 69, 61, 61, 69, 78, 70, 61, 65, 62, 58, 79, 62, 68, 41, 63, 57, 115, 71, 63, 61, 57, 50, 82, 100, 80, 47, 60, 59, 70, 75, 99, 58, 71, 56, 54, 94, 53, 87, 82, 68, 63, 77, 63, 67, 78, 67, 68, 70, 57, 57, 46, 61, 69, 94, 78, 78, 58, 66, 77, 46, 67, 51, 52, 73, 128, 42, 49, 66, 62, 55, 74, 127, 55, 82, 60, 64, 70, 62, 61, 45, 66, 93, 40, 63, 91, 47, 78, 68, 68, 85, 65, 53, 66, 83, 56, 61, 73, 44, 61, 77, 56, 54, 78, 76, 58, 60, 62, 67, 80, 136, 62, 76, 69, 66, 61, 50, 78, 48, 61, 60, 69, 44, 73, 83, 80, 71, 62, 64, 71, 77, 67, 66, 67, 54, 84, 56, 54, 104, 56, 68, 41, 63, 70, 68, 77, 63, 66, 82, 64, 72, 59, 44, 50, 61, 75, 53, 118, 52, 59, 58, 60, 78, 64, 62, 55, 61, 61, 67, 79, 84, 69, 73, 83, 76, 42, 63, 65, 59, 57, 54, 97, 64, 49, 57, 65, 50, 62, 85, 105, 60, 52, 46, 66, 70, 66, 89, 69, 51, 73, 67, 67, 67, 54, 57, 84, 52, 72, 50, 68, 79, 71, 74, 75, 64, 65, 58, 69, 59, 62, 66, 71, 61, 62, 86, 49, 43, 86, 73, 57, 75, 71, 67, 76, 73, 131, 60, 73, 59, 53, 70, 42, 52, 55, 60, 71, 61, 54, 128, 49, 72, 78, 80, 39, 45, 71, 34, 51, 58, 60, 47, 75, 63, 61, 73, 58, 76, 103, 69, 60, 54, 62, 57, 60, 46, 79, 69, 68, 67, 70, 56, 64, 55, 58, 55, 72, 35, 56, 79, 55, 66, 48, 100, 53, 56, 76, 73, 53, 58, 72, 54, 78, 50, 57, 78, 75, 65, 52, 74, 66, 61, 55, 65, 64, 94, 59, 51, 60, 68, 56, 51, 55, 57, 72, 57, 51, 58, 58, 63, 79, 62, 54, 49, 63, 51, 63, 29, 58, 69, 61, 56, 67, 55, 43, 76, 61, 66, 80, 45, 75, 52, 84, 71, 64, 66, 52, 40, 53, 91, 76, 69, 62, 73, 64, 60, 55, 68, 62, 59, 45, 55, 84, 54, 61, 56, 41, 68, 67, 63, 78, 80, 64, 71, 75, 60, 64, 59, 58, 59, 69, 48, 87, 70, 57, 65, 53, 58, 61, 64, 58, 103, 63, 56, 55, 77, 62, 61, 76, 49, 61, 46, 65, 68, 79, 89, 70, 69, 77, 69, 65, 57, 64, 66, 62, 59, 67, 70, 82, 71, 68, 47, 53, 61, 107, 52, 56, 56, 53, 60, 59, 76, 61, 72, 61, 71, 59, 78, 64, 69, 54, 40, 63, 64, 54, 50, 88, 76, 68, 56, 68, 59, 62, 63, 76, 55, 49, 70, 57, 75, 60, 72, 78, 50, 72, 56, 84, 55, 72, 59, 53, 41, 56, 45, 61, 63, 71, 88, 51, 68, 63, 64, 71, 64, 51, 76, 72, 66, 71, 76, 64, 60, 61, 69, 39, 114, 55, 144, 67, 85, 96, 37, 55, 93, 84, 58, 96, 63, 51, 33, 59, 86, 65, 76, 68, 82, 63, 46, 79, 72, 50, 94, 69, 74, 72, 82, 70, 76, 55, 106, 90, 56, 64, 64, 75, 56, 59, 61, 76, 58, 70, 120, 61, 76, 50, 50, 75, 45, 76, 82, 56, 59, 52, 76, 36, 68, 80, 54, 58, 75, 61, 57, 74, 58, 60, 64, 48, 59, 57, 82, 67, 66, 46, 74, 46, 51, 71, 58, 68, 102, 58, 75, 57, 59, 99, 59, 46, 45, 68, 77, 71, 49, 90, 56, 60, 68, 53, 51, 64, 52, 76, 54, 64, 73, 65, 47, 48, 72, 63, 63, 69, 75, 46, 63, 91, 56, 67, 52, 54, 49, 54, 51, 54, 123, 55, 67, 71, 51, 55, 60, 64, 44, 82, 71, 65, 57, 112, 68, 59, 65, 48, 55, 80, 86, 71, 54, 38, 58, 92, 45, 88, 60, 68, 54, 58, 59, 73, 99, 61, 92, 60, 69, 114, 42, 61, 79, 61, 89, 57, 83, 86, 64, 46, 89, 62, 52, 45, 56, 68, 72, 56, 64, 62, 57, 73, 58, 65, 52, 47, 55, 64, 73, 69, 67, 57, 133, 69, 86, 83, 54, 52, 69, 53, 58, 123, 59, 72, 67, 59, 55, 46, 48, 71, 56, 51, 62, 60, 62, 69, 72, 88, 71, 57, 43, 121, 80, 67, 56, 58, 57, 80, 51, 93, 71, 74, 56, 62, 54, 76, 54, 36, 57, 69, 69, 61, 66, 64, 57, 71, 61, 68, 48, 58, 83, 57, 38, 69, 100, 60, 69, 78, 61, 56, 77, 87, 77, 64, 59, 83, 64, 61, 64, 55, 61, 47, 86, 44, 55, 65, 71, 64, 65, 51, 85, 52, 57, 70, 58, 88, 105, 58, 62, 59, 55, 81, 145, 55, 71, 57, 61, 101, 59, 64, 78, 81, 75, 116, 59, 64, 58, 68, 72, 54, 59, 50, 54, 56, 79, 53, 53, 67, 60, 49, 65, 60, 62, 74, 70, 69, 58, 48, 51, 56, 66, 70, 58, 67, 83, 66, 100, 77, 70, 47, 80, 58, 51, 59, 73, 68, 52, 64, 63, 69, 61, 61, 60, 75, 51, 100, 58, 70, 67, 65, 60, 57, 71, 43, 53, 79, 60, 79, 86, 90, 73, 101, 66, 67, 59, 87, 71, 46, 75, 98, 105, 63, 84, 61, 56, 61, 53, 54, 56, 61, 63, 76, 69, 66, 64, 65, 77, 52, 64, 72, 59, 89, 59, 54, 52, 79, 58, 74, 73, 91, 56, 66, 82, 59, 58, 67, 51, 73, 59, 58, 52, 73, 64, 64, 57, 83, 61, 65, 77, 53, 65, 64, 69, 63, 78, 77, 58, 73, 55, 68, 62, 54, 77, 60, 43, 50, 66, 61, 65, 80, 76, 50, 55, 58, 69, 59, 106, 85, 59, 79, 58, 57, 60, 62, 69, 91, 43, 63, 54, 57, 70, 62, 39, 73, 70, 54, 62, 67, 63, 85, 83, 67, 69, 71, 91, 68, 57, 102, 103, 67, 91, 68, 65, 87, 53, 70, 52, 87, 44, 89, 65, 73, 59, 57, 47, 61, 67, 114, 62, 75, 49, 59, 77, 49, 66, 76, 59, 52, 54, 66, 67, 67, 50, 70, 67, 53, 129, 57, 51, 59, 60, 65, 63, 43, 124, 67, 69, 55, 78, 99, 54, 46, 56, 51, 61, 59, 67, 175, 56, 70, 62, 54, 92, 67, 69, 49, 62, 75, 68, 68, 71, 88, 78, 76, 73, 54, 52, 54, 55, 91, 55, 38, 36, 60, 90, 52, 64, 63, 57, 81, 59, 84, 66, 62, 78, 70, 79, 98, 74, 75, 55, 58, 57, 65, 48, 54, 65, 75, 42, 44, 50, 60, 57, 94, 50, 74, 78, 72, 74, 55, 67, 71, 55, 63, 65, 55, 56, 52, 54, 45, 55, 62, 80, 59, 64, 76, 56, 47, 71, 72, 56, 55, 70, 74, 41, 53, 50, 66, 66, 47, 55, 70, 65, 43, 60, 58, 35, 64, 87, 82, 54, 61, 54, 50, 62, 61, 59, 68, 68, 59, 63, 51, 47, 49, 63, 50, 60, 62, 72, 42, 81, 63, 55, 61, 76, 49, 64, 51, 48, 64, 56, 85, 69, 54, 67, 40, 56, 55, 108, 71, 59, 69, 55, 72, 55, 69, 77, 67, 60, 57, 53, 62, 53, 70, 71, 69, 63, 57, 63, 41, 77, 55, 66, 66, 61, 57, 67, 124, 68, 85, 59, 76, 58, 58, 70, 59, 64, 82, 56, 46, 76, 61, 59, 87, 37, 52, 67, 63, 64, 59, 63, 52, 71, 67, 53, 118, 49, 70, 66, 67, 53, 54, 62, 102, 59, 104, 63, 60, 61, 65, 70, 58, 60, 72, 63, 61, 63, 66, 64, 61, 63, 63, 55, 60, 92, 58, 72, 47, 68, 57, 64, 70, 107, 43, 73, 51, 88, 76, 82, 58, 64, 62, 67, 47, 69, 36, 100, 92, 86, 68, 73, 72, 62, 103, 85, 95, 74, 69, 48, 67, 62, 47, 78, 60, 60, 71, 67, 80, 68, 50, 56, 65, 57, 51, 54, 75, 68, 72, 65, 73, 59, 67, 57, 77, 80, 55, 66, 65, 60, 66, 68, 53, 82, 61, 72, 62, 86, 65, 52, 78, 79, 58, 52, 60, 63, 50, 60, 55, 67, 56, 83, 55, 66, 108, 67, 47, 58, 56, 86, 66, 42, 60, 92, 56, 59, 66, 65, 56, 57, 66, 68, 54, 101, 79, 74, 55, 72, 64, 77, 73, 62, 53, 70, 97, 55, 68, 67, 70, 95, 74, 56, 65, 86, 89, 55, 53, 54, 49, 104, 71, 71, 81, 71, 75, 57, 52, 67, 47, 81, 60, 64, 59, 58, 78, 77, 64, 72, 52, 71, 59, 67, 70, 61, 75, 59, 92, 64, 66, 51, 49, 64, 60, 66, 85, 74, 62, 50, 73, 49, 64, 78, 71, 66, 36, 53, 55, 83, 71, 117, 79, 77, 61, 67, 64, 70, 64, 71, 73, 78, 58, 58, 67, 59, 55, 76, 76, 63, 43, 70, 64, 73, 51, 55, 108, 62, 63, 71, 66, 72, 59, 70, 62, 55, 64, 61, 73, 52, 65, 58, 64, 52, 50, 81, 30, 58, 71, 70, 71, 57, 73, 50, 55, 46, 68, 62, 59, 92, 58, 58, 52, 63, 56, 36, 51, 59, 82, 73, 85, 79, 69, 69, 59, 67, 64, 62, 67, 109, 70, 108, 61, 53, 69, 58, 46, 56, 68, 88, 63, 47, 88, 96, 49, 103, 72, 60, 59, 59, 63, 46, 65, 64, 63, 58, 81, 61, 63, 72, 71, 54, 68, 70, 82, 43, 52, 58, 63, 63, 55, 106, 77, 79, 99, 59, 86, 60, 93, 58, 66, 65, 71, 46, 62, 53, 62, 62, 56, 61, 59, 83, 74, 58, 75, 59, 64, 66, 65, 66, 35, 71, 97, 75, 48, 69, 52, 78, 77, 143, 58, 65, 74, 85, 56, 73, 72, 76, 59, 86, 65, 64, 57, 68, 58, 63, 54, 81, 66, 68, 51, 70, 66, 63, 62, 60, 53, 66, 43, 42, 86, 75, 55, 77, 63, 60, 52, 61, 60, 80, 54, 79, 58, 61, 73, 64, 89, 51, 58, 84, 69, 78, 68, 57, 32, 54, 57, 45, 53, 67, 64, 129, 64, 73, 68, 64, 99, 77, 63, 91, 57, 70, 68, 58, 64, 77, 64, 65, 73, 84, 75, 72, 65, 62, 66, 81, 50, 117, 87, 77, 85, 60, 58, 107, 55, 59, 75, 57, 52, 73, 66, 58, 63, 78, 46, 106, 70, 65, 77, 68, 67, 63, 62, 62, 87, 109, 67, 69, 100, 68, 70, 70, 59, 65, 76, 69, 40, 81, 56, 52, 80, 72, 63, 52, 54, 62, 70, 79, 55, 58, 69, 55, 62, 97, 71, 63, 55, 56, 80, 78, 53, 65, 58, 69, 64, 68, 46, 65, 75, 87, 74, 54, 52, 73, 57, 70, 56, 81, 60, 55, 69, 79, 76, 94, 70, 90, 51, 81, 59, 71, 63, 68, 50, 71, 67, 74, 75, 67, 83, 59, 63, 70, 91, 74, 47, 83, 40, 68, 86, 75, 51, 51, 61, 70, 52, 55, 65, 64, 73, 56, 52, 83, 61, 58, 58, 67, 66, 76, 63, 50, 49, 73, 47, 64, 70, 55, 60, 47, 105, 68, 61, 65, 86, 76, 53, 75, 44, 74, 56, 71, 44, 64, 72, 64, 76, 105, 78, 56, 64, 75, 80, 69, 51, 77, 195, 60, 61, 66, 101, 73, 62, 68, 60, 48, 62, 55, 78, 66, 37, 75, 101, 65, 73, 60, 59, 53, 89, 57, 62, 84, 50, 58, 88, 63, 81, 65, 57, 57, 47, 72, 73, 76, 54, 65, 61, 56, 56, 79, 112, 80, 86, 52, 68, 73, 83, 61, 54, 63, 72, 71, 73, 80, 46, 62, 110, 57, 58, 99, 54, 56, 57, 58, 102, 60, 55, 64, 58, 56, 73, 50, 55, 59, 44, 72, 74, 67, 64, 65, 50, 45, 57, 64, 66, 70, 43, 60, 61, 86, 59, 92, 54, 87, 40, 57, 58, 44, 80, 95, 90, 129, 96, 61, 49, 70, 48, 63, 83, 45, 61, 67, 48, 79, 78, 40, 78, 83, 68, 108, 64, 74, 58, 69, 35, 73, 63, 101, 71, 45, 53, 53, 77, 74, 71, 91, 65, 53, 72, 59, 106, 63, 75, 46, 78, 46, 70, 61, 60, 38, 54, 61, 72, 62, 49, 67, 73, 79, 61, 65, 50, 58, 84, 85, 72, 70, 44, 52, 72, 71, 45, 54, 49, 63, 64, 58, 72, 114, 75, 66, 57, 75, 52, 62, 87, 74, 89, 74, 73, 67, 67, 81, 68, 71, 62, 43, 58, 72, 107, 74, 63, 52, 59, 82, 98, 107, 66, 70, 52, 62, 72, 52, 124, 60, 66, 58, 58, 86, 64, 54, 41, 76, 106, 47, 69, 54, 63, 78, 62, 62, 64, 56, 57, 58, 52, 60, 66, 66, 58, 67, 60, 50, 74, 41, 68, 52, 77, 71, 46, 53, 62, 45, 60, 94, 90, 63, 68, 49, 68, 54, 54, 62, 73, 47, 58, 76, 54, 40, 62, 74, 54, 72, 49, 57, 70, 48, 75, 66, 103, 60, 67, 70, 52, 84, 62, 56, 51, 68, 60, 66, 67, 67, 58, 61, 141, 80, 47, 67, 55, 47, 75, 71, 63, 47, 56, 85, 62, 74, 66, 54, 66, 61, 57, 54, 79, 44, 66, 99, 66, 66, 60, 53, 67, 51, 64, 61, 61, 51, 43, 66, 59, 41, 60, 56, 75, 59, 64, 72, 79, 66, 74, 58, 59, 52, 71, 67, 60, 91, 66, 79, 46, 67, 88, 63, 65, 49, 77, 61, 58, 96, 66, 67, 46, 91, 87, 50, 79, 69, 54, 60, 61, 62, 67, 58, 70, 78, 51, 47, 74, 75, 50, 75, 93, 42, 79, 100, 62, 49, 51, 39, 60, 91, 51, 79, 73, 84, 63, 54, 85, 51, 68, 65, 57, 55, 83, 77, 49, 60, 43, 83, 86, 58, 57, 50, 68, 66, 62, 73, 70, 71, 64, 60, 72, 69, 57, 69, 68, 78, 62, 65, 46, 77, 67, 66, 59, 91, 64, 58, 49, 76, 81, 61, 66, 79, 65, 56, 59, 70, 61, 79, 57, 64, 64, 76, 57, 77, 53, 55, 72, 51, 70, 60, 74, 41, 120, 93, 60, 58, 45, 79, 69, 56, 91, 79, 91, 78, 71, 85, 68, 116, 60, 64, 64, 66, 47, 75, 64, 43, 50, 47, 64, 46, 72, 49, 62, 71, 70, 49, 74, 86, 59, 70, 84, 59, 85, 58, 65, 60, 60, 57, 63, 95, 58, 56, 54, 62, 68, 87, 54, 92, 53, 47, 69, 58, 73, 58, 83, 83, 67, 64, 60, 50, 70, 76, 76, 64, 56, 69, 53, 83, 104, 57, 64, 52, 54, 70, 73, 82, 70, 59, 66, 66, 69, 102, 78, 62, 59, 91, 72, 82, 59, 67, 72, 72, 79, 87, 50, 22, 60, 63, 87, 56, 54, 46, 75, 80, 62, 62, 58, 43, 44, 50, 50, 66, 91, 65, 71, 62, 63, 91, 56, 68, 74, 54, 77, 75, 63, 65, 77, 75, 55, 60, 58, 56, 65, 102, 82, 105, 68, 71, 54, 72, 53, 70, 84, 57, 92, 45, 68, 52, 52, 48, 52, 58, 60, 53, 96, 82, 46, 81, 66, 71, 66, 72, 70, 61, 64, 64, 59, 48, 71, 72, 72, 73, 74, 68, 56, 63, 65, 65, 64, 60, 82, 63, 88, 83, 84, 54, 41, 77, 92, 56, 55, 67, 72, 57, 50, 66, 40, 59, 50, 53, 78, 71, 59, 81, 86, 44, 81, 92, 62, 67, 71, 82, 69, 68, 84, 60, 50, 69, 78, 60, 58, 63, 118, 74, 63, 62, 55, 85, 61, 67, 48, 85, 53, 89, 40, 52, 63, 70, 88, 71, 88, 66, 89, 70, 60, 42, 50, 63, 74, 65, 77, 80, 52, 66, 65, 74, 80, 59, 58, 63, 64, 59, 82, 74, 51, 53, 66, 52, 72, 40, 90, 73, 70, 58, 65, 59, 53, 49, 52, 136, 90, 48, 54, 75, 77, 68, 51, 83, 72, 58, 67, 50, 47, 63, 72, 71, 69, 75, 92, 54, 73, 44, 87, 77, 71, 109, 58, 53, 44, 72, 75, 64, 79, 63, 63, 41, 70, 65, 74, 70, 56, 64, 57, 57, 77, 87, 59, 59, 57, 57, 62, 37, 53, 101, 66, 99, 46, 62, 65, 93, 71, 59, 53, 90, 84, 70, 55, 53, 45, 73, 68, 73, 68, 48, 57, 65, 66, 54, 85, 87, 41, 90, 48, 67, 55, 93, 85, 76, 61, 67, 88, 99, 60, 69, 55, 67, 61, 93, 116, 49, 47, 58, 63, 64, 64, 93, 60, 68, 63, 56, 66, 68, 55, 75, 82, 78, 106, 82, 51, 58, 68, 78, 56, 62, 55, 59, 73, 66, 61, 76, 70, 54, 76, 62, 47, 63, 75, 66, 42, 63, 56, 50, 60, 56, 69, 86, 73, 111, 59, 112, 47, 51, 76, 71, 63, 94, 68, 67, 71, 85, 59, 77, 82, 58, 58, 55, 60, 69, 47, 71, 69, 74, 53, 60, 89, 81, 110, 66, 73, 68, 69, 67, 61, 77, 47, 61, 60, 57, 64, 87, 78, 55, 101, 53, 58, 106, 47, 55, 90, 62, 61, 52, 87, 120, 75, 75, 71, 60, 64, 56, 65, 67, 81, 72, 59, 71, 52, 48, 63, 78, 70, 52, 64, 84, 60, 105, 51, 57, 72, 68, 56, 54, 59, 64, 50, 75, 36, 63, 58, 43, 55, 71, 56, 72, 64, 53, 50, 64, 60, 52, 59, 50, 69, 64, 57, 67, 53, 54, 70, 80, 91, 87, 61, 55, 83, 67, 58, 71, 46, 73, 60, 59, 56, 80, 46, 78, 62, 61, 78, 39, 72, 95, 60, 54, 85, 77, 56, 52, 54, 82, 47, 45, 38, 41, 63, 72, 51, 59, 42, 52, 74, 52, 79, 76, 73, 122, 84, 88, 74, 100, 50, 61, 29, 44, 69, 89, 58, 73, 64, 88, 58, 59, 72, 71, 70, 64, 58, 61, 41, 71, 61, 54, 67, 109, 36, 50, 60, 75, 84, 58, 66, 71, 53, 58, 68, 49, 56, 54, 92, 78, 48, 75, 40, 77, 74, 53, 64, 56, 67, 77, 79, 53, 39, 80, 69, 63, 77, 43, 40, 52, 62, 74, 68, 54, 76, 56, 60, 56, 47, 61, 61, 47, 71, 115, 63, 58, 76, 63, 63, 91, 83, 44, 58, 57, 66, 62, 100, 43, 82, 88, 61, 63, 68, 74, 74, 86, 89, 58, 75, 68, 47, 53, 63, 56, 66, 60, 51, 74, 45, 82, 69, 69, 60, 71, 42, 63, 54, 62, 68, 49, 58, 49, 62, 60, 69, 74, 65, 52, 73, 64, 100, 45, 73, 59, 60, 82, 91, 79, 75, 79, 56, 70, 58, 89, 64, 62, 50, 65, 69, 91, 50, 59, 52, 67, 82, 63, 52, 66, 62, 54, 57, 49, 63, 66, 63, 82, 60, 66, 54, 56, 62, 62, 75, 56, 89, 61, 68, 57, 62, 58, 35, 72, 72, 71, 73, 122, 84, 59, 48, 40, 76, 68, 64, 87, 37, 70, 49, 53, 77, 46, 89, 78, 73, 69, 72, 85, 66, 54, 56, 107, 74, 71, 118, 53, 54, 68, 65, 61, 70, 68, 58, 53, 66, 86, 116, 58, 51, 51, 60, 66, 66, 76, 59, 119, 55, 54, 70, 65, 59, 60, 48, 69, 79, 54, 66, 32, 72, 64, 50, 62, 55, 73, 79, 52, 77, 52, 43, 66, 61, 52, 55, 65, 64, 81, 39, 67, 79, 90, 68, 55, 76, 58, 49, 65, 45, 62, 63, 60, 63, 58, 126, 50, 79, 55, 56, 57, 58, 57, 49, 46, 133, 76, 64, 56, 60, 84, 102, 75, 75, 61, 54, 63, 53, 45, 68, 73, 70, 44, 67, 50, 72, 57, 66, 80, 56, 72, 60, 62, 72, 54, 94, 52, 75, 61, 69, 81, 45, 69, 64, 57, 61, 79, 64, 54, 71, 62, 69, 58, 47, 44, 49, 71, 75, 53, 59, 42, 56, 62, 56, 72, 85, 91, 55, 55, 69, 54, 43, 59, 63, 72, 71, 71, 75, 63, 61, 53, 59, 63, 59, 41, 67, 80, 61, 64, 97, 73, 68, 67, 54, 54, 96, 63, 56, 86, 57, 62, 79, 46, 52, 62, 80, 67, 71, 82, 74, 59, 69, 73, 69, 59, 68, 90, 61, 70, 70, 101, 57, 68, 79, 61, 68, 68, 70, 42, 65, 46, 65, 75, 71, 63, 68, 67, 60, 48, 83, 62, 65, 96, 44, 55, 72, 64, 91, 66, 68, 66, 62, 77, 80, 66, 160, 37, 57, 68, 51, 52, 46, 73, 68, 73, 59, 77, 62, 50, 78, 50, 62, 61, 65, 60, 95, 63, 68, 57, 58, 58, 49, 48, 64, 54, 61, 57, 103, 90, 46, 66, 73, 58, 55, 74, 38, 56, 59, 65, 67, 68, 69, 75, 65, 93, 56, 60, 41, 57, 70, 62, 65, 65, 70, 43, 55, 60, 79, 55, 74, 69, 76, 47, 91, 41, 75, 104, 54, 59, 70, 54, 68, 75, 62, 29, 72, 76, 88, 50, 49, 99, 56, 77, 78, 53, 55, 47, 80, 70, 68, 64, 59, 75, 50, 82, 55, 126, 68, 76, 42, 51, 75, 68, 62, 65, 110, 63, 59, 51, 71, 50, 52, 59, 53, 62, 104, 45, 64, 56, 57, 84, 74, 55, 70, 85, 67, 61, 72, 49, 67, 63, 71, 84, 96, 72, 94, 64, 66, 57, 85, 71, 73, 63, 67, 101, 69, 93, 70, 76, 83, 86, 72, 72, 80, 82, 71, 60, 115, 66, 63, 68, 41, 78, 59, 65, 39, 53, 52, 63, 59, 46, 76, 87, 60, 69, 68, 77, 69, 58, 46, 56, 52, 78, 58, 86, 40, 60, 59, 37, 62, 52, 87, 81, 56, 66, 76, 54, 89, 53, 68, 56, 75, 66, 69, 58, 58, 76, 73, 80, 58, 74, 62, 65, 71, 54, 59, 57, 58, 189, 73, 47, 50, 65, 81, 48, 50, 60, 39, 58, 70, 57, 79, 67, 61, 63, 58, 57, 53, 90, 62, 58, 81, 52, 80, 59, 58, 36, 65, 59, 86, 64, 64, 60, 79, 61, 73, 61, 91, 59, 107, 95, 56, 62, 85, 48, 67, 98, 70, 44, 67, 85, 58, 56, 57, 75, 86, 83, 87, 83, 71, 116, 90, 57, 83, 78, 70, 64, 73, 59, 72, 97, 72, 81, 51, 66, 64, 49, 40, 78, 68, 44, 67, 68, 97, 60, 78, 69, 58, 101, 55, 76, 63, 47, 76, 61, 58, 75, 86, 50, 49, 105, 66, 76, 58, 93, 65, 97, 40, 54, 59, 39, 42, 88, 62, 72, 51, 64, 78, 52, 63, 73, 42, 50, 58, 60, 66, 73, 77, 52, 78, 73, 59, 47, 61, 39, 96, 86, 71, 73, 57, 42, 47, 96, 48, 87, 55, 47, 83, 41, 44, 46, 51, 70, 83, 81, 80, 90, 68, 72, 65, 62, 65, 84, 58, 64, 97, 59, 62, 67, 68, 50, 48, 65, 88, 61, 54, 43, 67, 72, 37, 44, 66, 61, 56, 69, 76, 59, 66, 66, 71, 62, 53, 72, 91, 84, 63, 68, 80, 47, 118, 67, 111, 54, 61, 64, 51, 71, 65, 64, 63, 122, 60, 57, 75, 54, 79, 70, 58, 63, 75, 67, 67, 51, 77, 47, 42, 74, 54, 68, 69, 73, 56, 64, 62, 60, 47, 55, 79, 65, 65, 47, 35, 68, 114, 122, 68, 88, 52, 57, 66, 75, 60, 47, 45, 66, 46, 59, 50, 59, 47, 57, 66, 86, 72, 50, 51, 75, 72, 61, 90, 74, 55, 59, 50, 57, 49, 63, 65, 53, 37, 59, 82, 57, 42, 83, 82, 98, 57, 92, 66, 101, 60, 38, 59, 64, 51, 66, 61, 48, 57, 51, 57, 52, 64, 43, 72, 97, 58, 51, 62, 89, 59, 68, 44, 102, 59, 60, 72, 50, 53, 48, 66, 61, 76, 56, 88, 75, 43, 51, 51, 76, 51, 58, 47, 106, 103, 92, 60, 62, 103, 62, 89, 48, 53, 69, 67, 63, 58, 53, 64, 52, 41, 82, 59, 53, 59, 59, 65, 68, 52, 56, 76, 62, 102, 63, 61, 24, 72, 78, 61, 44, 93, 85, 76, 86, 83, 68, 57, 78, 47, 72, 100, 58, 33, 64, 63, 65, 71, 70, 83, 57, 78, 82, 67, 48, 72, 87, 47, 81, 69, 50, 56, 74, 57, 58, 61, 55, 68, 54, 75, 58, 69, 69, 77, 72, 121, 73, 67, 75, 55, 60, 78, 66, 89, 51, 44, 73, 88, 52, 58, 63, 81, 65, 58, 71, 72, 65, 77, 54, 64, 80, 82, 76, 117, 87, 89, 58, 49, 92, 95, 84, 79, 53, 79, 85, 62, 55, 70, 81, 56, 100, 65, 59, 56, 84, 52, 65, 50, 58, 100, 60, 70, 70, 50, 75, 57, 63, 73, 39, 60, 62, 60, 54, 78, 62, 86, 68, 79, 71, 68, 54, 80, 56, 42, 81, 51, 91, 93, 61, 63, 55, 62, 49, 57, 124, 73, 54, 56, 74, 84, 94, 70, 67, 53, 58, 63, 53, 53, 62, 47, 84, 87, 61, 65, 52, 45, 61, 46, 61, 70, 72, 68, 76, 69, 57, 88, 56, 72, 60, 34, 67, 45, 52, 56, 74, 34, 60, 77, 73, 73, 89, 68, 65, 81, 53, 57, 98, 56, 74, 58, 80, 78, 45, 56, 59, 51, 73, 57, 61, 52, 77, 46, 78, 54, 60, 59, 69, 47, 104, 58, 57, 70, 69, 66, 77, 45, 66, 51, 64, 67, 73, 54, 127, 68, 69, 52, 69, 75, 68, 63, 54, 71, 71, 57, 81, 69, 67, 62, 63, 58, 67, 67, 58, 63, 47, 64, 149, 61, 64, 67, 57, 81, 32, 61, 99, 77, 51, 79, 68, 78, 56, 48, 56, 52, 68, 80, 74, 61, 48, 58, 72, 61, 59, 67, 60, 71, 69, 49, 55, 77, 43, 101, 61, 56, 52, 193, 81, 77, 79, 71, 86, 89, 71, 61, 62, 62, 77, 62, 55, 55, 60, 78, 72, 65, 51, 72, 69, 89, 59, 28, 58, 80, 51, 61, 61, 98, 95, 56, 74, 82, 64, 80, 46, 67, 101, 76, 76, 48, 58, 55, 69, 59, 90, 68, 77, 59, 73, 74, 65, 78, 63, 83, 62, 45, 59, 54, 81, 57, 68, 42, 65, 74, 82, 96, 53, 78, 36, 53, 60, 74, 63, 51, 73, 76, 54, 57, 74, 53, 56, 66, 68, 58, 53, 56, 68, 88, 53, 71, 52, 65, 66, 44, 101, 63, 69, 46, 70, 58, 52, 48, 66, 67, 61, 60, 66, 54, 58, 54, 66, 38, 50, 68, 60, 51, 83, 49, 89, 63, 51, 78, 67, 71, 54, 51, 56, 53, 69, 73, 73, 48, 61, 78, 42, 61, 58, 53, 59, 90, 66, 123, 56, 60, 59, 44, 71, 70, 63, 53, 51, 81, 90, 77, 61, 63, 65, 77, 44, 78, 43, 58, 71, 65, 68, 81, 53, 63, 66, 75, 94, 70, 86, 76, 61, 60, 81, 56, 68, 93, 80, 50, 70, 90, 71, 82, 60, 69, 104, 55, 55, 79, 61, 79, 83, 63, 82, 63, 54, 72, 85, 52, 83, 42, 66, 60, 59, 57, 48, 71, 66, 64, 117, 74, 90, 73, 57, 64, 61, 75, 73, 63, 43, 87, 39, 68, 65, 78, 74, 57, 57, 31, 72, 53, 68, 62, 49, 67, 59, 80, 72, 56, 55, 75, 64, 52, 64, 74, 51, 48, 71, 69, 60, 47, 60, 81, 69, 72, 45, 54, 62, 78, 63, 69, 150, 82, 75, 61, 45, 95, 55, 53, 81, 106, 68, 51, 66, 59, 83, 56, 47, 52, 58, 76, 76, 60, 40, 83, 65, 63, 70, 90, 58, 50, 57, 62, 66, 76, 61, 103, 54, 49, 75, 52, 66, 54, 50, 80, 58, 54, 78, 24, 38, 52, 108, 55, 56, 58, 77, 59, 86, 48, 58, 42, 48, 34, 82, 69, 111, 68, 51, 74, 54, 60, 59, 88, 61, 53, 60, 69, 42, 58, 69, 53, 62, 57, 76, 71, 80, 54, 74, 55, 51, 89, 66, 74, 73, 52, 77, 55, 58, 88, 96, 65, 77, 55, 84, 44, 55, 59, 50, 69, 67, 81, 62, 79, 59, 69, 74, 82, 55, 66, 111, 59, 76, 66, 48, 56, 60, 83, 54, 73, 44, 92, 49, 70, 61, 80, 77, 67, 55, 52, 66, 51, 60, 87, 78, 93, 68, 113, 55, 75, 58, 43, 65, 52, 86, 94, 39, 94, 64, 60, 71, 57, 67, 54, 64, 58, 53, 68, 58, 62, 69, 86, 60, 75, 51, 74, 65, 59, 56, 53, 64, 49, 74, 46, 54, 60, 65, 96, 86, 88, 49, 69, 81, 41, 108, 58, 74, 80, 62, 50, 84, 55, 63, 72, 110, 44, 52, 53, 76, 51, 67, 65, 50, 70, 78, 40, 85, 76, 61, 74, 56, 60, 75, 62, 69, 72, 50, 82, 48, 34, 79, 57, 77, 59, 74, 75, 56, 68, 92, 49, 39, 72, 55, 62, 55, 93, 58, 60, 64, 53, 88, 66, 46, 57, 54, 79, 101, 69, 56, 97, 51, 63, 72, 71, 74, 48, 95, 70, 66, 54, 77, 75, 41, 84, 56, 61, 67, 81, 81, 43, 70, 73, 57, 72, 55, 59, 73, 45, 60, 80, 68, 52, 100, 66, 61, 80, 59, 63, 67, 57, 89, 65, 82, 71, 60, 61, 68, 70, 88, 88, 68, 62, 52, 76, 68, 40, 41, 65, 43, 69, 61, 77, 57, 101, 75, 68, 72, 42, 69, 60, 66, 61, 59, 67, 76, 84, 66, 66, 74, 38, 49, 71, 38, 73, 63, 60, 92, 81, 149, 62, 76, 55, 64, 85, 71, 69, 70, 72, 41, 66, 128, 50, 60, 56, 48, 73, 211, 74, 73, 76, 67, 65, 73, 50, 47, 42, 70, 69, 68, 63, 56, 80, 54, 73, 60, 72, 71, 45, 81, 53, 63, 68, 81, 62, 65, 78, 72, 76, 65, 76, 59, 53, 85, 50, 74, 63, 44, 73, 91, 57, 80, 39, 81, 71, 72, 88, 75, 58, 68, 73, 75, 67, 60, 33, 92, 46, 66, 98, 75, 89, 53, 49, 83, 58, 70, 64, 49, 73, 60, 77, 55, 49, 82, 46, 80, 57, 63, 73, 61, 67, 43, 68, 57, 60, 74, 74, 64, 64, 46, 56, 50, 69, 73, 55, 42, 78, 72, 52, 54, 68, 62, 57, 68, 59, 67, 76, 58, 52, 60, 72, 53, 50, 88, 70, 61, 55, 87, 52, 57, 61, 82, 65, 67, 56, 64, 57, 49, 93, 84, 76, 65, 84, 58, 64, 84, 62, 79, 89, 55, 57, 84, 76, 71, 79, 61, 65, 88, 57, 54, 67, 81, 68, 59, 67, 67, 125, 90, 72, 59, 63, 74, 64, 81, 90, 46, 51, 66, 65, 74, 64, 54, 105, 62, 62, 90, 60, 61, 69, 49, 58, 51, 80, 52, 76, 73, 56, 83, 59, 100, 66, 82, 62, 61, 54, 108, 53, 73, 42, 52, 65, 56, 75, 59, 59, 77, 107, 77, 74, 63, 56, 70, 67, 97, 65, 77, 82, 58, 68, 77, 119, 86, 73, 55, 69, 60, 70, 62, 54, 49, 48, 49, 76, 46, 54, 47, 63, 66, 78, 66, 63, 61, 78, 65, 67, 67, 86, 57, 80, 59, 55, 65, 61, 56, 69, 65, 81, 56, 66, 82, 44, 65, 51, 59, 83, 68, 71, 61, 46, 51, 60, 56, 56, 54, 51, 84, 84, 55, 72, 61, 57, 58, 32, 66, 55, 61, 64, 59, 49, 80, 70, 68, 81, 66, 62, 78, 64, 85, 63, 76, 47, 71, 66, 78, 75, 78, 48, 89, 69, 56, 76, 55, 65, 90, 61, 52, 69, 50, 144, 86, 59, 75, 66, 62, 68, 44, 69, 76, 77, 68, 60, 71, 63, 90, 87, 77, 63, 60, 66, 52, 77, 68, 66, 78, 50, 81, 41, 58, 54, 77, 56, 51, 97, 65, 71, 74, 94, 55, 69, 58, 54, 67, 65, 60, 77, 70, 72, 52, 80, 59, 66, 53, 59, 80, 64, 31, 69, 58, 59, 74, 57, 78, 69, 74, 60, 86, 49, 70, 64, 63, 66, 65, 57, 112, 74, 51, 59, 73, 74, 50, 58, 62, 59, 63, 67, 59, 101, 65, 65, 61, 74, 83, 57, 57, 82, 45, 65, 54, 85, 87, 82, 74, 71, 68, 40, 63, 55, 41, 61, 53, 49, 71, 75, 74, 52, 54, 30, 67, 53, 68, 82, 65, 76, 58, 59, 85, 65, 71, 63, 51, 56, 60, 68, 66, 74, 56, 82, 45, 52, 76, 52, 55, 60, 57, 75, 74, 48, 62, 52, 53, 59, 53, 59, 69, 65, 48, 86, 64, 49, 82, 58, 56, 52, 66, 58, 58, 70, 66, 64, 71, 49, 62, 81, 59, 64, 65, 69, 68, 50, 69, 80, 66, 61, 63, 65, 57, 69, 65, 55, 78, 62, 77, 67, 67, 48, 57, 79, 69, 57, 43, 57, 67, 60, 54, 99, 67, 54, 51, 73, 63, 83, 75, 70, 56, 112, 66, 63, 62, 57, 71, 77, 53, 57, 67, 76, 93, 102, 61, 50, 52, 65, 81, 66, 63, 68, 53, 49, 71, 71, 55, 48, 51, 70, 74, 52, 66, 87, 71, 47, 68, 56, 61, 55, 69, 74, 60, 52, 66, 85, 70, 88, 55, 44, 88, 77, 56, 62, 76, 68, 69, 68, 53, 63, 60, 72, 58, 62, 67, 65, 80, 52, 71, 81, 69, 74, 69, 83, 67, 55, 63, 65, 43, 72, 49, 66, 46, 67, 63, 49, 69, 66, 53, 69, 69, 67, 61, 62, 82, 53, 67, 73, 51, 63, 56, 73, 58, 62, 70, 72, 55, 71, 47, 52, 75, 69, 72, 40, 94, 88, 54, 58, 65, 69, 61, 54, 92, 79, 92, 66, 72, 56, 64, 44, 52, 38, 59, 56, 61, 58, 64, 67, 62, 77, 73, 49, 70, 73, 81, 62, 67, 59, 44, 74, 60, 70, 60, 65, 74, 61, 68, 61, 47, 76, 55, 57, 72, 49, 58, 71, 81, 78, 46, 65, 65, 63, 80, 60, 55, 76, 64, 80, 65, 61, 52, 66, 45, 86, 57, 77, 57, 71, 86, 57, 62, 66, 68, 88, 64, 94, 69, 44, 57, 81, 95, 85, 80, 66, 57, 71, 65, 94, 56, 69, 78, 62, 50, 74, 54, 67, 77, 67, 51, 76, 33, 70, 92, 80, 72, 73, 65, 54, 76, 88, 78, 49, 50, 104, 70, 80, 73, 54, 59, 73, 48, 75, 103, 49, 59, 72, 65, 64, 86, 89, 63, 68, 77, 47, 72, 51, 68, 54, 41, 54, 63, 96, 54, 75, 68, 51, 60, 64, 87, 135, 66, 47, 48, 73, 85, 56, 52, 56, 69, 73, 65, 72, 56, 60, 85, 77, 65, 81, 136, 54, 69, 67, 61, 33, 68, 46, 75, 86, 52, 69, 62, 47, 48, 77, 47, 83, 121, 49, 55, 59, 71, 69, 66, 75, 77, 62, 72, 57, 143, 63, 69, 71, 79, 76, 62, 41, 58, 65, 45, 55, 60, 91, 56, 62, 48, 70, 77, 52, 71, 75, 62, 88, 64, 69, 55, 60, 77, 65, 56, 56, 79, 48, 71, 50, 57, 60, 59, 72, 63, 73, 59, 56, 73, 65, 69, 85, 61, 67, 35, 75, 72, 59, 66, 52, 29, 63, 99, 51, 83, 81, 66, 59, 63, 65, 69, 37, 73, 74, 70, 55, 61, 60, 72, 69, 84, 41, 41, 70, 61, 57, 61, 86, 68, 96, 43, 59, 56, 66, 72, 61, 60, 80, 55, 68, 62, 80, 57, 67, 80, 63, 52, 77, 91, 57, 51, 67, 70, 68, 79, 58, 72, 58, 76, 69, 55, 71, 57, 47, 74, 79, 65, 50, 63, 70, 60, 67, 51, 93, 79, 68, 66, 61, 72, 61, 67, 75, 74, 59, 65, 70, 54, 60, 62, 76, 65, 57, 72, 74, 52, 71, 64, 65, 42, 81, 73, 59, 49, 67, 76, 64, 58, 90, 77, 50, 57, 69, 66, 76, 60, 69, 75, 53, 69, 49, 56, 63, 71, 58, 53, 75, 84, 56, 66, 60, 66, 61, 74, 43, 57, 45, 60, 52, 81, 66, 67, 118, 57, 65, 65, 67, 92, 64, 59, 53, 78, 57, 68, 60, 58, 67, 58, 65, 54, 54, 55, 89, 51, 57, 57, 46, 75, 75, 70, 64, 62, 84, 49, 60, 80, 57, 61, 73, 97, 70, 93, 73, 61, 63, 60, 50, 80, 61, 52, 55, 67, 61, 63, 59, 56, 61, 62, 56, 63, 64, 58, 66, 68, 57, 100, 53, 61, 96, 65, 59, 71, 68, 73, 65, 64, 52, 51, 56, 56, 60, 52, 62, 63, 61, 53, 77, 62, 67, 68, 71, 76, 53, 58, 62, 57, 70, 65, 80, 87, 71, 54, 63, 77, 50, 68, 47, 53, 86, 49, 51, 63, 60, 52, 61, 83, 70, 80, 76, 50, 54, 62, 78, 59, 62, 63, 63, 52, 68, 56, 54, 52, 49, 75, 62, 72, 62, 63, 47, 50, 63, 59, 53, 61, 73, 53, 48, 63, 63, 57, 70, 78, 67, 79, 100, 52, 76, 54, 49, 59, 63, 68, 82, 67, 78, 50, 77, 51, 62, 79, 51, 57, 127, 78, 46, 54, 47, 45, 89, 56, 56, 69, 53, 82, 114, 75, 57, 68, 60, 63, 61, 70, 78, 60, 54, 50, 63, 72, 60, 56, 58, 83, 57, 81, 59, 68, 55, 62, 57, 69, 88, 72, 63, 65, 83, 69, 73, 58, 88, 54, 64, 54, 83, 65, 69, 61, 57, 56, 63, 68, 56, 60, 67, 69, 60, 74, 49, 70, 70, 78, 62, 50, 88, 62, 58, 42, 69, 81, 68, 102, 80, 69, 76, 73, 64, 70, 95, 59, 59, 64, 73, 63, 58, 78, 70, 64, 81, 70, 68, 47, 81, 63, 54, 99, 93, 68, 63, 56, 64, 59, 72, 81, 47, 105, 53, 74, 75, 69, 71, 91, 60, 88, 70, 57, 63, 75, 67, 74, 57, 62, 61, 54, 62, 61, 68, 62, 50, 59, 74, 64, 78, 61, 62, 65, 51, 78, 66, 52, 72, 66, 65, 65, 55, 54, 48, 71, 65, 84, 69, 65, 53, 64, 66, 48, 76, 67, 79, 67, 56, 67, 80, 69, 63, 57, 69, 79, 63, 60, 57, 57, 73, 86, 70, 74, 71, 60, 54, 75, 60, 71, 54, 68, 59, 71, 70, 77, 47, 75, 56, 50, 63, 66, 59, 43, 68, 57, 73, 62, 65, 64, 72, 61, 59, 66, 68, 78, 50, 62, 64, 54, 59, 75, 50, 66, 82, 84, 83, 61, 60, 53, 64, 49, 59, 52, 63, 73, 72, 62, 60, 80, 66, 63, 69, 64, 69, 78, 66, 62, 77, 61, 64, 67, 66, 76, 59, 56, 66, 74, 72, 49, 64, 70, 71, 49, 56, 56, 74, 80, 71, 60, 59, 61, 64, 55, 68, 69, 61, 95, 60, 69, 62, 55, 62, 64, 57, 83, 65, 81, 65, 69, 55, 54, 66, 54, 92, 59, 63, 55, 64, 56, 59, 61, 50, 47, 66, 79, 59, 43, 67, 75, 72, 52, 77, 52, 68, 92, 56, 64, 50, 55, 61, 65, 63, 54, 57, 61, 122, 58, 54, 65, 60, 68, 87, 60, 59, 54, 60, 99, 52, 73, 72, 79, 65, 68, 59, 82, 94, 69, 52, 67, 71, 78, 59, 59, 68, 76, 69, 75, 80, 77, 74, 90, 65, 78, 87, 64, 68, 85, 59, 93, 47, 57, 58, 65, 84, 72, 56, 76, 61, 51, 59, 39, 58, 57, 57, 71, 69, 63, 67, 69, 105, 83, 62, 65, 62, 62, 71, 66, 65, 62, 45, 56, 41, 58, 73, 73, 77, 63, 64, 62, 57, 54, 75, 40, 69, 70, 43, 65, 60, 61, 56, 69, 58, 66, 67, 55, 49, 78, 54, 57, 66, 46, 60, 89, 64, 81, 55, 55, 57, 55, 41, 65, 71, 69, 49, 80, 58, 51, 61, 96, 65, 82, 52, 61, 52, 65, 63, 68, 51, 56, 71, 90, 66, 66, 64, 70, 80, 57, 61, 67, 59, 61, 49, 62, 68, 70, 83, 91, 68, 57, 43, 63, 100, 68, 54, 68, 61, 67, 89, 65, 82, 54, 70, 58, 60, 67, 55, 47, 57, 61, 39, 62, 69, 63, 42, 79, 59, 76, 103, 57, 75, 68, 56, 52, 78, 50, 83, 109, 87, 58, 64, 85, 86, 38, 66, 71, 53, 55, 55, 66, 79, 66, 80, 85, 59, 61, 57, 57, 78, 65, 66, 57, 59, 67, 80, 67, 41, 51, 49, 52, 55, 49, 64, 70, 63, 79, 58, 60, 52, 67, 78, 83, 56, 59, 66, 52, 56, 66, 56, 65, 52, 79, 60, 69, 51, 52, 70, 52, 80, 91, 83, 102, 76, 50, 77, 44, 53, 72, 47, 58, 67, 59, 72, 64, 58, 99, 84, 58, 55, 56, 111, 62, 93, 78, 52, 62, 52, 75, 65, 76, 70, 98, 75, 73, 57, 55, 72, 67, 47, 100, 54, 72, 83, 62, 66, 58, 63, 70, 48, 54, 47, 86, 63, 62, 54, 75, 69, 67, 59, 62, 81, 84, 62, 57, 75, 60, 68, 60, 81, 64, 59, 67, 52, 130, 63, 55, 51, 85, 78, 69, 79, 68, 62, 89, 55, 82, 61, 79, 77, 69, 58, 61, 82, 69, 66, 85, 69, 55, 63, 61, 46, 63, 60, 67, 92, 72, 64, 40, 76, 68, 62, 91, 79, 57, 53, 64, 61, 70, 85, 73, 59, 58, 73, 72, 75, 71, 79, 68, 84, 89, 89, 65, 58, 57, 45, 64, 74, 64, 52, 78, 61, 56, 64, 66, 48, 39, 76, 74, 59, 60, 79, 78, 66, 40, 68, 75, 69, 63, 61, 70, 76, 62, 53, 79, 63, 62, 58, 65, 72, 77, 80, 75, 73, 59, 78, 54, 62, 61, 60, 58, 60, 58, 64, 59, 65, 62, 67, 92, 75, 145, 69, 79, 56, 68, 48, 70, 69, 79, 60, 51, 59, 60, 56, 58, 61, 66, 64, 60, 66, 65, 51, 42, 97, 57, 52, 54, 79, 54, 63, 71, 52, 68, 59, 64, 63, 88, 59, 55, 66, 63, 56, 57, 61, 94, 65, 62, 75, 66, 65, 71, 57, 61, 65, 51, 56, 68, 67, 99, 70, 84, 51, 50, 55, 78, 63, 63, 68, 74, 69, 62, 84, 57, 60, 56, 58, 63, 63, 55, 86, 62, 69, 49, 67, 84, 62, 77, 80, 65, 80, 57, 88, 67, 79, 75, 62, 59, 66, 59, 114, 66, 75, 45, 67, 94, 50, 53, 42, 68, 69, 67, 88, 72, 71, 74, 64, 59, 59, 58, 89, 60, 63, 40, 72, 69, 58, 83, 70, 96, 73, 69, 70, 82, 61, 66, 109, 73, 61, 97, 75, 61, 70, 78, 69, 67, 81, 86, 64, 86, 60, 58, 62, 53, 60, 56, 75, 71, 90, 60, 72, 60, 67, 54, 55, 55, 79, 69, 54, 80, 73, 42, 69, 59, 72, 65, 67, 93, 57, 61, 61, 50, 108, 66, 68, 60, 60, 61, 63, 79, 77, 59, 71, 59, 70, 55, 62, 77, 112, 68, 70, 78, 62, 56, 58, 61, 39, 124, 60, 59, 77, 71, 70, 100, 60, 60, 46, 72, 57, 55, 69, 57, 72, 55, 63, 45, 59, 64, 61, 50, 62, 49, 57, 138, 55, 53, 85, 79, 58, 69, 57, 70, 65, 67, 69, 65, 51, 70, 79, 67, 84, 97, 83, 43, 57, 61, 70, 57, 57, 68, 40, 56, 72, 54, 61, 48, 70, 96, 93, 69, 72, 68, 79, 65, 55, 68, 64, 76, 60, 57, 55, 58, 56, 59, 69, 96, 63, 77, 63, 84, 58, 61, 63, 66, 67, 59, 59, 53, 96, 60, 64, 72, 53, 54, 51, 70, 80, 53, 69, 59, 56, 65, 49, 47, 55, 79, 61, 55, 55, 71, 74, 47, 72, 56, 67, 91, 51, 61, 80, 44, 76, 62, 46, 52, 66, 61, 78, 73, 79, 97, 38, 73, 60, 62, 73, 75, 59, 66, 65, 84, 109, 75, 47, 55, 50, 61, 53, 65, 52, 69, 65, 103, 70, 84, 57, 70, 58, 69, 69, 54, 66, 63, 83, 57, 67, 59, 70, 55, 64, 78, 74, 76, 53, 60, 55, 57, 60, 90, 89, 52, 73, 57, 67, 67, 71, 63, 58, 62, 59, 85, 55, 59, 63, 68, 56, 63, 67, 85, 51, 73, 51, 55, 61, 61, 78, 67, 66, 57, 55, 89, 55, 68, 70, 70, 65, 66, 96, 74, 61, 70, 64, 68, 60, 71, 60, 102, 92, 91, 60, 65, 75, 48, 61, 48, 54, 69, 66, 69, 84, 69, 59, 46, 68, 85, 66, 55, 92, 80, 76, 72, 66, 54, 55, 57, 59, 64, 65, 69, 68, 53, 51, 51, 48, 75, 72, 67, 37, 57, 73, 53, 60, 68, 61, 55, 83, 78, 68, 54, 58, 63, 63, 58, 66, 61, 65, 66, 69, 60, 90, 52, 60, 77, 70, 58, 55, 56, 46, 63, 69, 66, 75, 58, 74, 54, 63, 94, 58, 70, 59, 50, 55, 67, 77, 58, 84, 65, 61, 52, 64, 70, 69, 54, 54, 59, 69, 61, 61, 57, 69, 53, 57, 58, 101, 72, 66, 76, 113, 55, 64, 61, 65, 67, 61, 112, 77, 71, 78, 59, 69, 82, 75, 78, 41, 73, 58, 71, 76, 57, 69, 33, 63, 60, 58, 58, 54, 70, 65, 68, 75, 69, 54, 53, 87, 57, 52, 63, 62, 81, 69, 70, 86, 68, 67, 68, 57, 63, 57, 64, 79, 71, 64, 72, 60, 48, 78, 85, 66, 64, 70, 65, 82, 66, 78, 44, 52, 56, 72, 60, 71, 52, 68, 65, 51, 42, 52, 69, 62, 65, 53, 65, 94, 66, 70, 55, 73, 47, 45, 88, 60, 68, 62, 47, 83, 79, 60, 64, 61, 75, 62, 72, 63, 65, 68, 69, 71, 56, 85, 64, 88, 80, 61, 63, 70, 68, 117, 45, 87, 84, 70, 87, 61, 53, 46, 67, 74, 54, 79, 60, 58, 46, 70, 55, 73, 71, 59, 75, 75, 73, 72, 76, 53, 41, 53, 71, 63, 41, 81, 69, 62, 55, 45, 61, 54, 51, 61, 84, 75, 66, 87, 52, 51, 57, 64, 50, 57, 71, 84, 61, 82, 65, 57, 63, 53, 67, 54, 60, 82, 89, 60, 59, 56, 71, 67, 62, 59, 62, 63, 63, 64, 67, 52, 50, 66, 51, 70, 93, 98, 45, 66, 64, 55, 73, 53, 54, 66, 51, 73, 59, 61, 62, 55, 73, 69, 63, 58, 72, 59, 64, 73, 81, 66, 62, 68, 53, 37, 64, 71, 74, 57, 70, 49, 66, 60, 62, 74, 76, 61, 76, 70, 49, 101, 65, 65, 68, 62, 83, 61, 73, 67, 82, 100, 60, 64, 82, 59, 73, 68, 58, 56, 63, 69, 88, 65, 84, 60, 63, 62, 54, 68, 56, 79, 58, 55, 37, 61, 44, 57, 55, 97, 66, 55, 70, 82, 74, 72, 54, 57, 72, 69, 70, 52, 79, 81, 88, 78, 72, 65, 52, 49, 68, 58, 56, 57, 79, 58, 48, 65, 75, 62, 80, 75, 53, 73, 51, 64, 65, 54, 74, 55, 111, 59, 67, 56, 59, 55, 87, 65, 86, 68, 78, 55, 78, 63, 75, 108, 68, 67, 76, 66, 51, 58, 53, 55, 66, 61, 77, 66, 81, 45, 68, 47, 68, 76, 86, 67, 60, 53, 56, 41, 74, 63, 65, 61, 70, 70, 60, 43, 46, 60, 55, 60, 76, 50, 95, 61, 54, 66, 61, 73, 77, 52, 65, 59, 75, 54, 64, 70, 48, 64, 72, 76, 61, 71, 65, 59, 89, 61, 80, 56, 65, 47, 69, 55, 56, 49, 46, 65, 66, 88, 68, 44, 73, 52, 54, 86, 74, 61, 68, 48, 75, 60, 62, 53, 77, 65, 58, 67, 54, 62, 53, 89, 57, 52, 62, 53, 63, 59, 61, 70, 72, 71, 122, 78, 71, 53, 81, 47, 58, 43, 85, 61, 70, 86, 90, 67, 76, 35, 71, 50, 70, 65, 70, 56, 58, 48, 62, 63, 67, 59, 62, 60, 40, 61, 66, 46, 69, 122, 59, 78, 68, 63, 53, 78, 56, 75, 63, 53, 74, 87, 58, 44, 63, 70, 46, 39, 59, 84, 63, 73, 89, 72, 67, 50, 88, 38, 58, 88, 64, 67, 51, 58, 61, 54, 69, 50, 53, 57, 75, 70, 73, 57, 56, 53, 56, 96, 69, 89, 81, 50, 68, 55, 52, 62, 62, 89, 56, 84, 65, 65, 30, 64, 46, 48, 49, 55, 58, 49, 58, 60, 86, 48, 77, 54, 67, 63, 62, 67, 47, 68, 62, 72, 58, 59, 74, 54, 63, 82, 53, 58, 68, 63, 61, 75, 77, 61, 77, 101, 72, 50, 42, 58, 70, 56, 109, 85, 87, 96, 58, 47, 65, 62, 77, 63, 59, 90, 72, 43, 76, 87, 38, 36, 53, 43, 65, 65, 64, 56, 73, 49, 58, 68, 76, 72, 60, 83, 66, 81, 34, 74, 60, 62, 55, 59, 68, 53, 83, 74, 50, 65, 58, 71, 52, 68, 60, 71, 74, 62, 30, 59, 56, 66, 75, 71, 45, 64, 69, 52, 67, 85, 68, 64, 66, 46, 62, 48, 63, 76, 59, 75, 93, 79, 53, 66, 52, 66, 75, 69, 64, 41, 62, 63, 58, 60, 64, 54, 97, 77, 52, 51, 78, 58, 74, 49, 67, 66, 57, 89, 81, 70, 70, 49, 61, 73, 64, 79, 42, 91, 57, 89, 66, 63, 69, 63, 64, 109, 62, 74, 101, 46, 64, 59, 66, 55, 56, 56, 73, 73, 73, 68, 73, 52, 65, 79, 60, 43, 77, 67, 96, 74, 73, 84, 52, 50, 79, 80, 54, 58, 54, 64, 61, 49, 65, 60, 38, 66, 114, 146, 62, 66, 71, 72, 48, 88, 81, 55, 80, 70, 42, 56, 62, 73, 71, 65, 70, 52, 58, 59, 56, 78, 64, 56, 42, 78, 63, 60, 63, 60, 76, 103, 89, 76, 44, 77, 56, 59, 72, 48, 80, 64, 112, 52, 51, 72, 74, 53, 108, 70, 55, 89, 97, 75, 53, 42, 63, 70, 66, 62, 64, 62, 57, 74, 75, 65, 58, 52, 66, 71, 63, 92, 68, 55, 63, 60, 63, 63, 60, 70, 61, 144, 75, 60, 97, 67, 48, 49, 86, 80, 75, 65, 63, 80, 52, 68, 81, 56, 76, 69, 59, 62, 71, 66, 77, 52, 67, 74, 67, 46, 53, 52, 75, 54, 62, 71, 51, 74, 40, 59, 47, 61, 74, 79, 103, 41, 70, 59, 84, 75, 64, 59, 69, 64, 52, 63, 68, 94, 70, 76, 89, 78, 75, 53, 93, 57, 77, 71, 62, 85, 50, 69, 52, 62, 58, 56, 65, 65, 68, 58, 54, 137, 80, 51, 75, 61, 48, 57, 69, 63, 44, 60, 54, 107, 71, 66, 69, 52, 79, 63, 63, 76, 48, 57, 78, 68, 81, 57, 66, 66, 42, 62, 54, 54, 61, 96, 69, 100, 79, 87, 66, 71, 59, 64, 89, 59, 71, 91, 55, 52, 87, 79, 43, 65, 60, 57, 53, 60, 69, 63, 74, 93, 78, 57, 58, 54, 52, 55, 64, 58, 68, 59, 78, 76, 66, 75, 63, 59, 52, 69, 82, 59, 32, 56, 101, 90, 58, 58, 51, 65, 66, 53, 71, 40, 76, 53, 73, 48, 54, 91, 63, 50, 61, 48, 71, 46, 73, 65, 63, 60, 60, 110, 53, 85, 72, 58, 52, 58, 58, 60, 57, 62, 73, 80, 74, 71, 62, 50, 61, 86, 53, 85, 65, 73, 61, 54, 65, 58, 58, 63, 55, 112, 51, 78, 59, 67, 72, 46, 73, 64, 76, 41, 62, 65, 73, 56, 43, 64, 60, 94, 53, 57, 39, 64, 89, 55, 66, 39, 53, 65, 70, 51, 68, 60, 64, 76, 61, 50, 52, 49, 80, 80, 71, 44, 47, 68, 72, 59, 50, 60, 78, 58, 71, 70, 67, 62, 62, 73, 71, 39, 42, 65, 73, 60, 62, 89, 136, 62, 65, 60, 62, 62, 42, 57, 72, 75, 61, 54, 57, 69, 55, 69, 55, 76, 69, 46, 77, 70, 59, 44, 53, 62, 54, 61, 80, 56, 59, 80, 59, 71, 75, 60, 85, 71, 47, 86, 65, 63, 36, 75, 90, 50, 86, 64, 81, 69, 69, 63, 64, 56, 56, 84, 98, 70, 45, 44, 89, 61, 67, 86, 76, 59, 80, 54, 53, 79, 86, 73, 76, 55, 59, 73, 66, 55, 65, 68, 76, 50, 97, 64, 57, 69, 73, 59, 91, 60, 105, 69, 77, 42, 81, 63, 57, 79, 91, 66, 54, 50, 50, 59, 43, 62, 71, 58, 65, 78, 65, 64, 63, 64, 55, 85, 79, 74, 53, 70, 59, 60, 85, 61, 37, 37, 88, 64, 64, 66, 55, 68, 72, 76, 46, 67, 65, 57, 64, 63, 54, 92, 61, 59, 58, 67, 97, 75, 70, 43, 69, 59, 77, 81, 59, 51, 100, 73, 68, 65, 74, 53, 58, 64, 64, 58, 81, 64, 98, 51, 54, 66, 44, 60, 51, 51, 58, 122, 57, 50, 82, 68, 51, 63, 93, 82, 76, 60, 80, 65, 81, 62, 73, 60, 57, 73, 67, 57, 56, 95, 74, 56, 49, 67, 47, 86, 62, 66, 53, 66, 85, 58, 56, 49, 62, 45, 66, 68, 60, 82, 51, 83, 34, 54, 63, 59, 46, 67, 47, 56, 102, 62, 86, 84, 69, 63, 69, 94, 87, 64, 59, 89, 77, 92, 60, 82, 68, 64, 87, 59, 89, 61, 60, 66, 67, 58, 79, 79, 60, 77, 80, 91, 55, 58, 73, 74, 77, 78, 68, 60, 54, 83, 68, 60, 62, 52, 58, 57, 73, 139, 72, 77, 61, 48, 86, 53, 73, 64, 87, 62, 75, 74, 66, 55, 78, 66, 99, 71, 66, 44, 76, 58, 59, 62, 98, 71, 81, 73, 95, 46, 55, 78, 60, 58, 65, 57, 58, 75, 65, 66, 108, 70, 77, 49, 55, 46, 72, 64, 57, 56, 42, 72, 66, 65, 55, 80, 47, 66, 49, 70, 67, 74, 54, 59, 75, 80, 67, 69, 89, 72, 64, 64, 108, 67, 56, 52, 50, 75, 74, 73, 94, 60, 39, 67, 46, 66, 67, 59, 61, 98, 79, 53, 66, 64, 58, 57, 79, 52, 47, 73, 54, 58, 56, 76, 56, 47, 66, 88, 49, 61, 67, 50, 81, 56, 77, 55, 82, 77, 75, 93, 64, 72, 61, 72, 50, 89, 83, 52, 65, 84, 58, 55, 68, 53, 63, 51, 92, 56, 65, 71, 74, 105, 87, 65, 61, 113, 64, 57, 60, 57, 51, 68, 82, 52, 64, 47, 59, 77, 57, 73, 77, 52, 64, 60, 50, 65, 39, 71, 50, 53, 55, 61, 44, 83, 72, 97, 63, 55, 106, 66, 81, 51, 69, 45, 79, 77, 56, 74, 64, 92, 64, 59, 84, 53, 68, 65, 66, 81, 97, 75, 61, 58, 79, 64, 83, 68, 51, 55, 69, 62, 67, 52, 64, 91, 52, 67, 73, 58, 40, 70, 73, 46, 64, 55, 72, 65, 69, 97, 58, 67, 51, 67, 80, 58, 47, 72, 63, 60, 62, 54, 78, 55, 68, 48, 63, 125, 102, 58, 48, 65, 76, 56, 57, 53, 72, 57, 58, 63, 59, 68, 50, 63, 48, 80, 45, 58, 67, 38, 72, 50, 52, 51, 57, 65, 57, 81, 64, 49, 45, 61, 71, 56, 73, 121, 91, 69, 87, 68, 52, 62, 46, 47, 67, 57, 65, 67, 60, 74, 80, 51, 48, 54, 55, 60, 56, 61, 53, 84, 47, 68, 46, 60, 75, 46, 73, 61, 51, 67, 63, 55, 66, 68, 71, 111, 57, 82, 73, 71, 59, 66, 67, 53, 60, 69, 67, 88, 71, 70, 76, 60, 69, 61, 48, 65, 58, 53, 61, 66, 70, 46, 71, 52, 60, 64, 49, 79, 70, 71, 80, 37, 61, 87, 67, 69, 75, 71, 67, 61, 58, 52, 48, 60, 75, 63, 50, 64, 95, 63, 63, 55, 55, 71, 44, 63, 73, 73, 64, 62, 59, 56, 67, 55, 62, 53, 106, 50, 62, 76, 63, 74, 41, 55, 50, 65, 46, 67, 55, 84, 65, 61, 62, 88, 75, 79, 85, 68, 42, 68, 66, 72, 61, 80, 59, 76, 75, 69, 66, 67, 74, 66, 57, 60, 62, 65, 60, 62, 74, 44, 84, 57, 48, 52, 67, 85, 51, 74, 76, 93, 62, 62, 60, 61, 103, 59, 56, 63, 58, 75, 69, 58, 70, 91, 73, 73, 72, 70, 58, 106, 48, 60, 66, 56, 57, 59, 82, 66, 54, 69, 45, 62, 68, 65, 42, 61, 49, 62, 60, 69, 69, 96, 70, 38, 68, 65, 95, 65, 38, 58, 60, 80, 67, 67, 95, 51, 75, 40, 61, 68, 78, 83, 64, 62, 74, 53, 60, 42, 54, 54, 72, 98, 60, 68, 71, 58, 66, 79, 71, 74, 82, 73, 65, 83, 74, 56, 56, 63, 118, 86, 66, 61, 52, 78, 47, 147, 74, 59, 53, 84, 71, 76, 63, 63, 63, 61, 51, 47, 77, 59, 56, 54, 63, 49, 52, 43, 61, 71, 67, 62, 64, 62, 103, 59, 83, 62, 68, 60, 62, 53, 71, 92, 66, 56, 66, 74, 67, 124, 51, 84, 71, 61, 66, 79, 62, 62, 60, 83, 70, 57, 96, 84, 58, 99, 62, 64, 70, 59, 46, 60, 79, 56, 85, 67, 60, 67, 90, 71, 38, 69, 54, 54, 64, 61, 61, 60, 56, 65, 80, 67, 47, 63, 74, 52, 58, 42, 49, 69, 68, 69, 99, 67, 69, 61, 59, 65, 58, 64, 56, 61, 58, 59, 86, 68, 78, 60, 54, 64, 39, 63, 54, 64, 67, 61, 69, 62, 69, 73, 58, 79, 78, 63, 49, 73, 65, 63, 58, 63, 47, 79, 73, 60, 87, 69, 61, 49, 49, 76, 87, 76, 56, 75, 77, 52, 42, 56, 77, 77, 71, 63, 42, 55, 73, 72, 62, 85, 65, 66, 52, 59, 63, 49, 53, 69, 75, 71, 73, 57, 63, 73, 71, 56, 56, 59, 93, 69, 64, 52, 64, 59, 53, 89, 65, 76, 65, 63, 78, 67, 68, 64, 63, 80, 50, 64, 61, 86, 53, 50, 75, 63, 52, 61, 61, 60, 51, 70, 63, 62, 44, 69, 59, 56, 58, 51, 48, 74, 64, 107, 64, 69, 56, 49, 49, 69, 109, 93, 58, 70, 56, 83, 54, 52, 70, 94, 81, 70, 82, 77, 72, 64, 62, 57, 49, 56, 80, 71, 56, 49, 59, 78, 80, 70, 65, 142, 74, 51, 51, 144, 57, 72, 84, 72, 85, 55, 58, 62, 58, 60, 79, 59, 64, 67, 63, 47, 56, 88, 45, 64, 61, 50, 89, 62, 75, 66, 73, 59, 57, 60, 78, 66, 109, 58, 70, 95, 50, 47, 58, 68, 57, 52, 40, 36, 64, 77, 54, 43, 71, 62, 68, 56, 99, 45, 51, 81, 60, 75, 55, 45, 100, 78, 45, 48, 97, 91, 58, 55, 60, 80, 56, 45, 58, 48, 41, 38, 68, 69, 66, 71, 66, 56, 75, 77, 57, 59, 47, 53, 55, 83, 55, 49, 71, 88, 78, 65, 52, 59, 55, 69, 66, 55, 75, 62, 57, 62, 53, 60, 62, 71, 71, 58, 56, 123, 93, 58, 43, 54, 65, 73, 56, 88, 65, 51, 58, 67, 66, 69, 53, 60, 92, 55, 60, 73, 70, 70, 47, 60, 63, 60, 81, 61, 92, 76, 54, 59, 87, 67, 58, 72, 72, 63, 66, 68, 63, 71, 74, 79, 82, 55, 58, 121, 82, 72, 72, 65, 63, 60, 72, 57, 70, 115, 66, 64, 62, 50, 74, 63, 71, 68, 68, 51, 59, 62, 58, 109, 87, 72, 66, 71, 55, 72, 54, 71, 64, 59, 42, 74, 50, 73, 111, 49, 61, 84, 75, 55, 72, 57, 51, 58, 58, 63, 60, 52, 58, 54, 71, 70, 61, 76, 69, 64, 74, 47, 51, 60, 66, 57, 79, 41, 69, 57, 85, 73, 83, 68, 68, 73, 96, 50, 81, 58, 59, 74, 64, 83, 61, 70, 55, 51, 61, 84, 51, 71, 77, 81, 89, 73, 90, 49, 48, 63, 89, 66, 68, 71, 76, 69, 78, 73, 61, 71, 80, 44, 59, 66, 98, 63, 66, 73, 60, 70, 53, 58, 63, 65, 63, 128, 51, 62, 67, 68, 70, 77, 62, 54, 51, 46, 72, 56, 46, 64, 71, 59, 218, 74, 80, 68, 88, 124, 72, 65, 56, 59, 54, 66, 64, 78, 85, 52, 43, 70, 49, 53, 82, 84, 56, 66, 40, 68, 66, 60, 53, 57, 95, 60, 105, 58, 57, 57, 59, 63, 57, 37, 57, 151, 70, 54, 70, 73, 46, 48, 74, 61, 61, 66, 72, 54, 39, 58, 134, 64, 55, 85, 68, 53, 74, 66, 54, 72, 52, 96, 71, 67, 97, 69, 61, 61, 67, 56, 68, 65, 72, 70, 69, 45, 76, 53, 72, 86, 81, 99, 58, 86, 59, 72, 84, 65, 90, 74, 39, 60, 75, 55, 78, 50, 64, 55, 67, 52, 88, 64, 62, 67, 47, 50, 59, 67, 80, 56, 71, 75, 65, 78, 88, 60, 61, 94, 62, 52, 48, 72, 56, 97, 63, 69, 54, 49, 62, 77, 59, 86, 49, 81, 64, 61, 56, 51, 69, 50, 84, 71, 72, 64, 75, 53, 50, 63, 57, 62, 63, 91, 47, 53, 72, 73, 56, 60, 70, 48, 77, 71, 73, 58, 40, 61, 66, 74, 69, 65, 61, 70, 61, 70, 93, 50, 67, 48, 68, 63, 47, 86, 61, 91, 55, 83, 122, 41, 72, 85, 91, 50, 69, 57, 63, 44, 47, 94, 67, 67, 70, 78, 68, 68, 52, 60, 66, 87, 65, 87, 67, 59, 81, 67, 64, 80, 71, 58, 63, 58, 75, 70, 96, 63, 55, 64, 80, 66, 65, 83, 74, 66, 57, 52, 52, 59, 88, 36, 59, 81, 62, 71, 62, 85, 50, 68, 82, 40, 49, 53, 55, 63, 49, 56, 94, 63, 67, 77, 71, 53, 77, 67, 64, 75, 51, 46, 49, 65, 80, 60, 60, 58, 62, 89, 46, 43, 57, 64, 72, 58, 59, 102, 58, 67, 71, 127, 54, 61, 94, 62, 50, 52, 53, 50, 82, 76, 76, 76, 89, 50, 59, 59, 82, 61, 66, 60, 60, 66, 77, 67, 34, 73, 37, 91, 53, 75, 79, 73, 62, 61, 51, 69, 62, 42, 49, 59, 62, 79, 99, 54, 51, 61, 85, 84, 58, 63, 75, 58, 68, 71, 66, 71, 67, 60, 102, 49, 78, 60, 66, 72, 59, 73, 70, 84, 64, 67, 52, 89, 61, 62, 74, 53, 54, 55, 73, 56, 52, 81, 99, 68, 77, 59, 84, 92, 69, 52, 61, 85, 49, 62, 69, 69, 74, 81, 60, 70, 43, 50, 77, 80, 63, 68, 54, 60, 58, 66, 49, 67, 86, 76, 66, 73, 71, 55, 81, 84, 39, 46, 71, 56, 43, 70, 69, 76, 57, 77, 67, 57, 62, 53, 54, 60, 63, 71, 62, 68, 76, 80, 64, 54, 75, 72, 61, 51, 79, 51, 78, 56, 55, 59, 68, 58, 64, 102, 66, 80, 49, 57, 66, 71, 76, 64, 82, 42, 63, 60, 63, 50, 71, 69, 39, 77, 90, 56, 90, 49, 48, 69, 89, 62, 62, 59, 70, 76, 96, 50, 61, 87, 69, 80, 57, 75, 50, 86, 68, 60, 77, 67, 116, 73, 60, 85, 50, 62, 61, 70, 87, 82, 62, 71, 56, 63, 50, 61, 58, 70, 71, 91, 48, 56, 88, 48, 61, 72, 72, 53, 44, 74, 57, 48, 91, 71, 68, 60, 52, 75, 74, 64, 55, 71, 55, 56, 66, 55, 94, 54, 33, 70, 82, 68, 66, 46, 48, 63, 52, 59, 50, 64, 71, 60, 53, 62, 71, 55, 54, 83, 59, 67, 85, 84, 62, 57, 67, 63, 92, 62, 87, 51, 50, 56, 59, 69, 57, 59, 49, 50, 46, 94, 46, 60, 96, 75, 83, 57, 63, 68, 70, 60, 47, 78, 70, 60, 42, 62, 49, 69, 66, 60, 63, 66, 82, 77, 63, 58, 67, 59, 48, 56, 73, 61, 47, 48, 55, 61, 92, 95, 61, 81, 62, 61, 35, 111, 71, 58, 47, 78, 82, 54, 60, 60, 77, 66, 70, 56, 62, 73, 57, 72, 57, 55, 65, 65, 111, 100, 72, 82, 64, 70, 60, 71, 62, 53, 77, 69, 81, 91, 53, 42, 71, 56, 51, 61, 85, 81, 73, 57, 109, 58, 77, 44, 64, 61, 83, 70, 48, 80, 53, 70, 56, 35, 67, 63, 71, 64, 75, 75, 60, 50, 67, 49, 66, 61, 50, 52, 83, 49, 47, 37, 63, 82, 44, 85, 52, 81, 71, 96, 72, 31, 104, 90, 41, 68, 79, 74, 73, 78, 49, 57, 82, 67, 57, 68, 64, 63, 64, 63, 52, 48, 69, 77, 93, 91, 64, 75, 67, 65, 83, 56, 102, 73, 68, 81, 69, 50, 56, 70, 69, 62, 65, 64, 70, 45, 56, 56, 56, 58, 60, 82, 72, 62, 67, 56, 46, 56, 62, 56, 53, 91, 69, 60, 57, 66, 63, 59, 68, 83, 55, 63, 56, 64, 61, 54, 63, 58, 58, 59, 67, 50, 73, 51, 87, 63, 77, 75, 86, 65, 66, 82, 46, 61, 59, 52, 61, 65, 78, 59, 89, 60, 68, 67, 50, 65, 70, 81, 60, 68, 49, 61, 77, 67, 67, 44, 70, 64, 68, 55, 58, 67, 133, 55, 51, 65, 48, 61, 61, 64, 69, 72, 44, 52, 57, 37, 81, 69, 82, 67, 65, 137, 53, 47, 71, 86, 61, 50, 52, 70, 52, 70, 64, 53, 69, 79, 81, 52, 45, 53, 48, 64, 62, 85, 58, 61, 75, 46, 50, 61, 97, 67, 72, 69, 71, 87, 69, 62, 64, 49, 42, 92, 52, 73, 101, 61, 52, 56, 58, 100, 78, 54, 67, 41, 81, 59, 77, 67, 55, 57, 69, 50, 59, 73, 68, 59, 63, 37, 48, 64, 60, 43, 36, 82, 79, 66, 70, 55, 70, 59, 74, 49, 89, 85, 58, 82, 77, 53, 73, 62, 86, 57, 65, 77, 62, 62, 75, 55, 69, 54, 55, 50, 58, 63, 73, 63, 56, 68, 68, 65, 63, 55, 88, 64, 60, 88, 70, 43, 56, 63, 68, 82, 78, 53, 82, 76, 66, 82, 74, 101, 63, 70, 86, 64, 46, 60, 42, 65, 55, 55, 81, 56, 80, 69, 65, 54, 67, 61, 58, 60, 67, 71, 73, 85, 82, 65, 96, 42, 81, 67, 93, 102, 54, 56, 58, 65, 79, 69, 65, 55, 49, 54, 53, 55, 68, 55, 60, 60, 58, 74, 54, 84, 49, 85, 67, 59, 45, 61, 71, 74, 63, 62, 80, 87, 75, 101, 64, 61, 49, 58, 57, 86, 62, 73, 58, 85, 74, 42, 64, 62, 86, 52, 66, 107, 49, 53, 61, 44, 45, 51, 90, 61, 53, 64, 66, 51, 68, 64, 73, 53, 74, 73, 50, 88, 41, 54, 88, 73, 52, 62, 58, 77, 61, 74, 77, 50, 77, 82, 66, 62, 80, 73, 54, 59, 57, 67, 82, 70, 54, 37, 82, 67, 79, 48, 64, 50, 91, 66, 50, 78, 59, 44, 77, 65, 58, 70, 50, 70, 54, 70, 55, 47, 76, 54, 52, 80, 90, 75, 47, 81, 72, 60, 75, 64, 62, 105, 61, 58, 74, 63, 107, 78, 65, 57, 42, 53, 57, 81, 72, 91, 35, 51, 61, 87, 64, 68, 58, 63, 71, 49, 55, 62, 95, 58, 57, 68, 65, 51, 59, 67, 75, 59, 80, 59, 60, 63, 61, 60, 38, 67, 53, 45, 64, 81, 75, 66, 52, 65, 57, 45, 55, 63, 86, 62, 56, 63, 76, 53, 80, 60, 60, 66, 77, 56, 94, 70, 67, 72, 54, 52, 71, 63, 60, 61, 50, 71, 56, 75, 58, 59, 49, 76, 61, 72, 56, 64, 85, 60, 43, 55, 85, 83, 46, 72, 67, 69, 53, 51, 54, 67, 122, 56, 46, 72, 82, 58, 52, 61, 48, 70, 54, 54, 46, 59, 49, 59, 65, 76, 69, 81, 71, 55, 61, 50, 71, 83, 56, 63, 70, 71, 63, 64, 76, 59, 84, 70, 79, 70, 72, 70, 53, 53, 48, 62, 71, 63, 63, 61, 66, 52, 45, 72, 67, 58, 53, 71, 65, 69, 73, 59, 69, 61, 81, 167, 53, 68, 55, 68, 62, 69, 53, 57, 70, 62, 63, 80, 73, 75, 74, 63, 74, 72, 65, 68, 74, 68, 45, 116, 48, 73, 70, 77, 74, 62, 67, 79, 68, 62, 60, 63, 55, 51, 68, 73, 66, 59, 85, 76, 61, 88, 99, 85, 75, 54, 95, 67, 71, 89, 66, 50, 39, 64, 76, 77, 72, 54, 45, 74, 47, 110, 54, 124, 73, 69, 74, 80, 60, 63, 68, 88, 82, 48, 72, 52, 59, 115, 88, 56, 63, 96, 62, 103, 75, 47, 58, 70, 48, 57, 54, 53, 57, 71, 88, 47, 59, 58, 54, 72, 47, 62, 52, 86, 76, 57, 62, 64, 110, 64, 78, 66, 46, 64, 45, 77, 55, 57, 65, 59, 69, 85, 82, 69, 60, 80, 59, 72, 87, 65, 84, 67, 55, 48, 58, 51, 72, 61, 60, 80, 63, 72, 93, 59, 79, 83, 61, 57, 70, 50, 55, 75, 60, 63, 74, 74, 49, 84, 79, 52, 65, 69, 57, 62, 53, 55, 63, 63, 47, 79, 61, 49, 66, 53, 66, 63, 60, 63, 62, 45, 49, 72, 47, 64, 78, 78, 59, 59, 74, 40, 59, 81, 56, 86, 50, 49, 103, 73, 95, 83, 64, 67, 49, 54, 65, 48, 133, 66, 80, 74, 81, 56, 57, 60, 76, 56, 82, 96, 61, 67, 116, 88, 49, 57, 67, 66, 68, 72, 74, 74, 120, 75, 54, 72, 80, 83, 65, 71, 48, 63, 99, 56, 51, 81, 59, 62, 56, 71, 72, 59, 66, 88, 67, 49, 60, 77, 47, 63, 62, 86, 87, 62, 62, 54, 85, 80, 68, 90, 69, 79, 68, 87, 60, 53, 63, 73, 64, 73, 76, 54, 92, 58, 56, 64, 58, 55, 46, 61, 55, 95, 72, 65, 84, 54, 52, 63, 87, 122, 82, 64, 55, 41, 53, 62, 71, 67, 58, 75, 32, 53, 96, 62, 59, 63, 56, 64, 58, 71, 82, 73, 70, 53, 68, 69, 85, 79, 59, 75, 64, 71, 72, 76, 83, 81, 71, 57, 72, 63, 58, 49, 65, 82, 56, 76, 89, 63, 60, 118, 53, 65, 65, 65, 65, 42, 51, 44, 51, 72, 38, 52, 64, 46, 110, 71, 75, 73, 71, 36, 73, 72, 62, 69, 45, 62, 45, 74, 73, 51, 57, 56, 52, 63, 55, 84, 65, 54, 46, 62, 64, 61, 70, 45, 96, 65, 59, 99, 65, 59, 67, 73, 72, 72, 81, 62, 96, 80, 78, 62, 54, 53, 65, 70, 63, 72, 46, 96, 79, 45, 100, 50, 70, 67, 64, 53, 76, 62, 51, 58, 66, 79, 70, 57, 71, 52, 72, 82, 42, 66, 53, 61, 68, 89, 46, 79, 65, 66, 113, 53, 61, 73, 81, 105, 66, 71, 93, 79, 54, 83, 64, 60, 81, 69, 83, 55, 54, 39, 68, 60, 55, 53, 64, 87, 57, 67, 68, 60, 51, 98, 49, 76, 55, 50, 67, 46, 66, 68, 57, 55, 79, 64, 49, 72, 66, 61, 62, 78, 55, 75, 73, 64, 60, 38, 85, 76, 87, 75, 90, 100, 43, 55, 69, 106, 58, 62, 47, 57, 85, 56, 70, 74, 40, 76, 34, 58, 70, 62, 51, 62, 74, 70, 71, 65, 58, 60, 66, 51, 61, 66, 71, 42, 68, 68, 58, 59, 69, 41, 106, 67, 79, 65, 80, 77, 60, 69, 59, 56, 88, 84, 54, 48, 59, 66, 55, 67, 77, 57, 61, 64, 69, 89, 35, 58, 73, 62, 69, 75, 52, 43, 85, 84, 64, 66, 84, 50, 74, 59, 85, 48, 73, 62, 59, 55, 93, 53, 74, 44, 48, 53, 75, 96, 58, 54, 45, 46, 57, 95, 60, 68, 65, 81, 62, 56, 50, 66, 53, 63, 71, 59, 55, 63, 68, 51, 61, 56, 66, 64, 66, 64, 51, 68, 35, 55, 60, 52, 56, 64, 46, 71, 86, 54, 50, 83, 46, 64, 58, 61, 60, 87, 70, 83, 49, 53, 69, 72, 82, 52, 56, 49, 72, 63, 60, 43, 63, 95, 63, 73, 79, 60, 85, 54, 62, 76, 66, 36, 51, 66, 81, 66, 70, 48, 48, 59, 53, 63, 61, 53, 69, 56, 74, 63, 58, 68, 80, 59, 58, 59, 65, 57, 57, 72, 50, 69, 51, 56, 71, 57, 66, 68, 77, 57, 55, 72, 66, 61, 44, 85, 70, 92, 44, 70, 76, 38, 88, 53, 52, 58, 68, 57, 85, 63, 72, 57, 58, 65, 81, 65, 75, 69, 58, 62, 57, 65, 65, 52, 44, 70, 125, 55, 76, 57, 70, 75, 41, 72, 90, 62, 67, 78, 57, 48, 66, 82, 85, 92, 59, 58, 64, 99, 65, 53, 54, 83, 56, 66, 63, 85, 48, 51, 76, 75, 114, 61, 66, 68, 59, 75, 54, 107, 54, 71, 66, 66, 60, 52, 50, 46, 68, 83, 51, 100, 73, 124, 41, 54, 64, 54, 71, 85, 53, 40, 65, 66, 70, 85, 65, 61, 59, 59, 67, 86, 68, 79, 65, 55, 57, 73, 46, 70, 66, 68, 79, 83, 82, 64, 96, 85, 80, 67, 64, 52, 73, 75, 68, 73, 55, 49, 68, 77, 42, 67, 93, 101, 65, 59, 58, 56, 66, 62, 79, 81, 61, 79, 67, 68, 54, 65, 84, 65, 59, 118, 81, 87, 61, 66, 68, 56, 64, 109, 66, 60, 74, 56, 50, 102, 51, 48, 80, 67, 49, 60, 81, 60, 63, 88, 59, 54, 71, 62, 91, 75, 53, 74, 107, 55, 56, 72, 64, 68, 64, 61, 63, 72, 80, 58, 72, 68, 65, 60, 55, 103, 78, 73, 57, 51, 58, 42, 73, 66, 61, 50, 64, 92, 66, 72, 65, 63, 110, 73, 60, 66, 55, 75, 48, 59, 63, 123, 39, 60, 56, 59, 75, 60, 79, 123, 58, 64, 62, 57, 68, 52, 93, 52, 58, 40, 56, 97, 63, 71, 57, 104, 80, 63, 67, 42, 62, 48, 65, 63, 59, 48, 79, 57, 90, 65, 60, 60, 46, 68, 60, 78, 72, 47, 84, 49, 56, 50, 72, 52, 90, 51, 62, 70, 75, 70, 51, 55, 66, 59, 73, 68, 57, 61, 54, 69, 65, 75, 59, 56, 70, 51, 58, 53, 79, 51, 69, 87, 95, 74, 89, 52, 58, 64, 39, 100, 65, 55, 56, 76, 60, 60, 54, 62, 62, 75, 74, 27, 51, 84, 49, 73, 55, 60, 63, 55, 52, 53, 63, 78, 86, 38, 53, 50, 61, 53, 62, 58, 63, 57, 69, 67, 77, 58, 84, 78, 65, 77, 70, 64, 48, 83, 68, 50, 77, 79, 60, 54, 108, 61, 68, 55, 89, 39, 68, 64, 98, 71, 71, 70, 74, 91, 71, 60, 55, 67, 69, 54, 71, 71, 59, 87, 47, 68, 56, 58, 73, 64, 65, 104, 71, 68, 64, 52, 82, 76, 63, 58, 54, 83, 67, 63, 71, 65, 45, 75, 72, 69, 74, 27, 62, 60, 66, 75, 61, 74, 47, 63, 51, 55, 52, 61, 67, 44, 70, 65, 67, 52, 65, 72, 53, 78, 71, 43, 62, 77, 61, 100, 57, 74, 70, 68, 50, 60, 71, 73, 52, 52, 95, 65, 96, 88, 57, 73, 75, 97, 59, 65, 58, 60, 56, 59, 70, 80, 59, 68, 52, 93, 72, 54, 69, 91, 55, 53, 53, 68, 62, 73, 73, 108, 66, 63, 66, 71, 85, 58, 80, 76, 58, 65, 47, 67, 52, 124, 74, 74, 72, 54, 58, 65, 65, 64, 60, 58, 88, 78, 55, 72, 63, 55, 49, 50, 76, 54, 72, 76, 64, 97, 73, 54, 56, 59, 53, 51, 57, 53, 68, 69, 77, 61, 55, 54, 77, 48, 81, 73, 49, 75, 61, 41, 64, 63, 65, 68, 45, 52, 52, 58, 54, 62, 58, 89, 67, 42, 56, 60, 46, 66, 85, 60, 66, 72, 54, 57, 78, 51, 54, 67, 48, 74, 50, 69, 65, 89, 74, 64, 54, 50, 57, 41, 54, 41, 65, 48, 86, 74, 49, 57, 79, 48, 62, 68, 64, 63, 51, 60, 77, 62, 66, 77, 61, 50, 30, 68, 62, 60, 73, 56, 58, 48, 97, 73, 42, 56, 37, 36, 65, 58, 53, 38, 65, 90, 50, 78, 64, 51, 71, 57, 71, 50, 78, 59, 55, 73, 66, 82, 67, 76, 50, 62, 60, 62, 66, 51, 84, 64, 58, 57, 81, 136, 76, 62, 73, 72, 71, 51, 81, 57, 64, 82, 54, 75, 62, 92, 69, 86, 55, 56, 54, 65, 59, 67, 43, 62, 74, 57, 67, 76, 55, 59, 80, 45, 79, 97, 55, 55, 59, 92, 43, 67, 56, 62, 58, 86, 73, 86, 63, 75, 57, 69, 62, 60, 55, 62, 101, 74, 43, 86, 49, 57, 64, 56, 95, 61, 86, 89, 57, 75, 58, 73, 54, 80, 74, 64, 100, 66, 79, 68, 64, 87, 84, 65, 85, 52, 54, 43, 50, 109, 116, 52, 60, 55, 52, 64, 62, 59, 67, 92, 58, 73, 62, 87, 84, 63, 60, 79, 55, 76, 61, 66, 60, 54, 59, 62, 56, 67, 59, 67, 63, 70, 54, 69, 73, 56, 58, 47, 67, 53, 60, 55, 64, 47, 40, 52, 42, 56, 88, 70, 70, 73, 65, 78, 69, 80, 65, 71, 77, 84, 72, 77, 53, 72, 48, 50, 65, 43, 60, 55, 65, 77, 76, 80, 63, 67, 68, 68, 78, 68, 51, 119, 75, 70, 57, 52, 65, 70, 62, 45, 70, 75, 84, 63, 110, 70, 61, 46, 52, 64, 53, 56, 45, 109, 49, 64, 55, 85, 52, 62, 46, 64, 69, 57, 61, 81, 58, 81, 69, 47, 61, 58, 76, 77, 69, 51, 80, 64, 67, 78, 75, 59, 61, 83, 43, 63, 59, 82, 78, 69, 65, 66, 54, 58, 62, 50, 89, 60, 61, 58, 87, 72, 55, 61, 74, 64, 64, 70, 63, 72, 62, 73, 67, 64, 90, 67, 55, 72, 84, 52, 75, 95, 77, 63, 107, 57, 83, 72, 48, 42, 61, 51, 71, 58, 76, 59, 77, 56, 64, 65, 54, 59, 57, 90, 73, 65, 67, 65, 64, 67, 43, 64, 45, 64, 65, 68, 79, 54, 63, 73, 68, 56, 53, 53, 54, 63, 84, 58, 59, 87, 58, 74, 59, 62, 73, 50, 76, 61, 64, 50, 96, 43, 73, 58, 60, 73, 80, 74, 54, 87, 75, 60, 67, 58, 70, 68, 62, 46, 51, 67, 103, 51, 80, 76, 120, 62, 70, 48, 60, 44, 50, 88, 59, 51, 53, 61, 64, 70, 83, 75, 89, 61, 94, 45, 57, 64, 61, 81, 71, 67, 43, 56, 68, 65, 74, 67, 64, 56, 61, 75, 50, 51, 67, 59, 60, 76, 45, 80, 60, 54, 57, 69, 65, 89, 64, 56, 69, 66, 63, 76, 56, 70, 64, 60, 70, 67, 61, 73, 72, 79, 60, 105, 55, 66, 71, 61, 57, 51, 87, 50, 81, 76, 62, 76, 49, 59, 92, 74, 75, 48, 70, 63, 68, 74, 75, 83, 61, 96, 82, 53, 53, 84, 62, 76, 72, 78, 64, 60, 83, 71, 86, 70, 60, 54, 56, 73, 74, 54, 62, 84, 57, 81, 59, 66, 67, 58, 53, 70, 72, 57, 55, 68, 51, 62, 63, 50, 71, 62, 64, 61, 60, 72, 73, 76, 69, 50, 85, 70, 59, 49, 63, 54, 49, 56, 55, 66, 52, 53, 66, 47, 82, 81, 70, 60, 49, 46, 79, 90, 68, 63, 45, 61, 55, 72, 77, 80, 45, 56, 64, 68, 93, 69, 38, 55, 64, 50, 52, 63, 65, 71, 65, 60, 52, 98, 55, 61, 44, 83, 70, 106, 66, 92, 58, 57, 60, 67, 56, 59, 47, 89, 74, 64, 47, 67, 72, 68, 66, 87, 72, 87, 67, 105, 60, 56, 67, 57, 48, 55, 76, 56, 61, 88, 78, 96, 72, 82, 48, 73, 59, 42, 70, 83, 82, 68, 47, 75, 67, 70, 53, 56, 59, 54, 73, 65, 50, 69, 69, 71, 66, 75, 63, 77, 65, 106, 56, 72, 73, 64, 81, 106, 71, 52, 66, 110, 62, 46, 66, 66, 51, 59, 74, 65, 59, 63, 70, 52, 68, 61, 47, 59, 83, 72, 64, 59, 74, 61, 57, 63, 72, 63, 101, 71, 57, 67, 74, 52, 45, 74, 68, 107, 57, 60, 51, 64, 54, 65, 65, 49, 81, 48, 77, 57, 52, 57, 73, 72, 84, 62, 57, 51, 61, 60, 61, 64, 68, 72, 70, 55, 44, 88, 98, 62, 69, 94, 47, 74, 56, 61, 65, 74, 129, 55, 60, 67, 69, 79, 62, 50, 96, 57, 56, 64, 109, 76, 63, 43, 59, 69, 75, 72, 49, 69, 55, 54, 51, 51, 58, 53, 86, 66, 49, 80, 57, 58, 65, 70, 68, 59, 63, 56, 68, 54, 70, 85, 56, 69, 74, 57, 60, 71, 65, 49, 53, 70, 66, 58, 58, 57, 81, 56, 86, 74, 74, 54, 61, 41, 49, 79, 95, 47, 60, 58, 60, 80, 60, 62, 54, 55, 53, 72, 59, 70, 67, 63, 67, 63, 65, 74, 51, 63, 71, 67, 63, 76, 103, 60, 89, 65, 68, 55, 45, 90, 62, 65, 66, 59, 69, 62, 52, 61, 74, 70, 62, 55, 61, 84, 51, 77, 65, 76, 71, 47, 74, 51, 60, 65, 50, 54, 71, 52, 83, 61, 49, 49, 48, 68, 53, 61, 71, 83, 46, 88, 51, 60, 53, 49, 48, 48, 73, 61, 67, 67, 78, 77, 69, 86, 69, 54, 55, 59, 58, 51, 72, 48, 63, 69, 74, 62, 62, 50, 64, 106, 52, 63, 62, 85, 67, 51, 78, 52, 83, 66, 63, 56, 57, 62, 61, 47, 74, 63, 63, 53, 83, 80, 74, 72, 66, 58, 59, 63, 72, 73, 71, 70, 44, 73, 83, 53, 80, 85, 61, 55, 92, 80, 73, 61, 58, 64, 66, 73, 51, 73, 54, 58, 66, 54, 66, 68, 58, 72, 64, 55, 72, 77, 75, 60, 69, 123, 83, 74, 75, 45, 89, 70, 52, 74, 44, 42, 62, 54, 61, 67, 54, 53, 64, 62, 80, 57, 66, 60, 54, 65, 62, 94, 48, 72, 68, 50, 72, 42, 47, 72, 75, 68, 86, 73, 72, 56, 89, 52, 61, 46, 60, 76, 63, 67, 67, 58, 70, 49, 69, 81, 66, 62, 65, 61, 56, 53, 87, 56, 63, 52, 50, 57, 55, 59, 53, 55, 66, 62, 55, 54, 78, 73, 60, 70, 69, 78, 74, 86, 72, 67, 65, 54, 52, 57, 57, 61, 53, 59, 58, 54, 75, 67, 74, 60, 58, 64, 61, 71, 68, 67, 67, 45, 61, 73, 104, 51, 60, 55, 46, 73, 61, 61, 79, 65, 63, 83, 131, 63, 62, 55, 56, 50, 54, 82, 45, 99, 63, 51, 68, 53, 82, 69, 75, 70, 57, 78, 97, 70, 71, 69, 59, 68, 54, 47, 57, 54, 55, 83, 73, 66, 69, 68, 58, 60, 57, 66, 54, 78, 75, 55, 48, 69, 85, 54, 54, 63, 65, 63, 52, 62, 63, 46, 80, 75, 56, 81, 70, 64, 62, 59, 57, 55, 56, 63, 95, 61, 67, 77, 62, 59, 85, 69, 130, 74, 62, 63, 56, 50, 62, 70, 51, 61, 77, 67, 70, 77, 62, 106, 55, 54, 77, 84, 67, 69, 61, 70, 54, 64, 54, 80, 106, 63, 70, 62, 69, 67, 53, 74, 73, 55, 75, 80, 65, 55, 46, 50, 60, 72, 106, 64, 65, 124, 57, 48, 66, 58, 53, 93, 77, 59, 58, 49, 75, 55, 64, 70, 53, 55, 64, 75, 52, 45, 82, 55, 70, 52, 68, 59, 54, 46, 92, 46, 42, 70, 67, 65, 106, 54, 69, 77, 56, 43, 78, 66, 48, 65, 57, 68, 61, 70, 64, 49, 96, 65, 48, 81, 65, 72, 79, 57, 67, 69, 63, 58, 53, 82, 46, 96, 86, 49, 71, 107, 79, 60, 53, 137, 58, 60, 71, 60, 56, 78, 67, 57, 94, 140, 72, 60, 82, 67, 48, 48, 72, 81, 66, 63, 58, 66, 78, 76, 57, 94, 65, 66, 70, 69, 63, 59, 61, 50, 49, 42, 53, 67, 56, 57, 65, 86, 53, 100, 64, 67, 70, 89, 51, 61, 74, 58, 105, 70, 79, 117, 59, 76, 61, 48, 62, 51, 96, 60, 59, 56, 60, 78, 80, 69, 55, 75, 66, 66, 78, 63, 102, 87, 54, 64, 68, 51, 74, 58, 60, 62, 58, 78, 60, 61, 103, 61, 53, 73, 60, 61, 74, 54, 55, 53, 72, 75, 80, 77, 51, 51, 59, 50, 72, 59, 70, 103, 53, 55, 60, 70, 51, 57, 51, 54, 64, 56, 86, 75, 55, 83, 85, 76, 48, 70, 49, 63, 68, 60, 52, 63, 73, 69, 66, 73, 55, 70, 66, 68, 64, 84, 65, 79, 93, 53, 57, 49, 50, 69, 54, 50, 58, 96, 78, 68, 59, 80, 56, 67, 73, 43, 54, 56, 63, 69, 53, 57, 70, 62, 70, 58, 74, 69, 85, 68, 67, 53, 58, 80, 60, 63, 57, 66, 73, 63, 49, 54, 68, 72, 56, 71, 66, 69, 61, 51, 62, 60, 58, 68, 79, 47, 65, 66, 66, 54, 70, 60, 42, 61, 69, 67, 83, 76, 59, 61, 62, 77, 64, 68, 53, 60, 94, 60, 57, 69, 73, 52, 60, 44, 82, 55, 59, 73, 117, 59, 65, 65, 62, 50, 77, 65, 49, 54, 70, 42, 58, 69, 54, 52, 56, 61, 63, 70, 62, 59, 66, 55, 57, 72, 76, 67, 46, 63, 65, 62, 58, 55, 53, 49, 105, 61, 65, 80, 71, 83, 75, 68, 63, 61, 90, 50, 49, 71, 131, 65, 87, 60, 66, 61, 66, 65, 65, 80, 95, 101, 69, 70, 59, 59, 79, 59, 53, 63, 54, 56, 60, 55, 88, 62, 58, 61, 70, 65, 38, 82, 117, 72, 61, 59, 61, 67, 58, 58, 77, 64, 98, 72, 59, 62, 59, 53, 59, 70, 79, 60, 81, 54, 61, 74, 62, 69, 105, 64, 59, 67, 74, 34, 90, 68, 69, 69, 87, 68, 49, 61, 86, 62, 54, 65, 58, 68, 61, 112, 66, 73, 74, 76, 57, 91, 52, 88, 59, 65, 53, 80, 50, 64, 59, 54, 56, 59, 48, 56, 111, 50, 81, 63, 73, 78, 74, 60, 71, 56, 68, 68, 90, 71, 88, 65, 78, 64, 44, 48, 64, 58, 89, 53, 67, 67, 66, 77, 108, 79, 63, 56, 85, 53, 62, 82, 59, 51, 66, 65, 67, 106, 53, 59, 61, 79, 52, 62, 57, 65, 63, 58, 49, 68, 46, 62, 62, 89, 64, 61, 80, 92, 65, 88, 55, 53, 69, 71, 68, 65, 72, 60, 46, 66, 53, 48, 72, 73, 51, 74, 55, 61, 143, 86, 51, 61, 61, 65, 48, 54, 68, 66, 70, 104, 81, 62, 72, 53, 63, 56, 80, 63, 60, 55, 55, 63, 77, 58, 59, 58, 77, 49, 59, 72, 62, 60, 53, 95, 57, 59, 53, 94, 66, 124, 78, 81, 56, 61, 47, 74, 59, 48, 50, 77, 83, 55, 69, 72, 58, 60, 80, 56, 60, 57, 56, 77, 54, 63, 70, 61, 86, 69, 79, 53, 73, 61, 76, 94, 67, 70, 68, 53, 79, 65, 55, 62, 60, 46, 65, 73, 88, 69, 58, 68, 66, 61, 69, 64, 53, 83, 70, 86, 62, 58, 61, 69, 59, 77, 66, 73, 65, 54, 66, 85, 52, 73, 70, 70, 107, 61, 49, 62, 70, 54, 87, 68, 54, 63, 73, 76, 61, 66, 61, 80, 64, 62, 51, 79, 64, 56, 61, 46, 60, 83, 53, 97, 73, 72, 58, 51, 44, 80, 67, 56, 138, 61, 71, 63, 91, 102, 47, 57, 61, 58, 59, 94, 46, 71, 55, 60, 86, 60, 65, 76, 55, 50, 53, 77, 65, 67, 58, 43, 45, 53, 60, 69, 60, 61, 84, 78, 85, 56, 61, 50, 72, 54, 74, 57, 69, 43, 64, 58, 71, 42, 55, 73, 82, 79, 58, 63, 74, 61, 67, 87, 65, 93, 64, 66, 70, 57, 78, 71, 62, 53, 60, 51, 63, 68, 62, 59, 64, 71, 102, 60, 63, 61, 65, 51, 84, 64, 61, 72, 79, 62, 71, 61, 47, 72, 101, 72, 85, 55, 46, 73, 53, 67, 61, 78, 68, 88, 68, 70, 64, 55, 62, 50, 63, 65, 70, 52, 79, 64, 71, 73, 58, 83, 64, 61, 41, 44, 66, 84, 59, 73, 53, 46, 72, 62, 60, 52, 55, 67, 46, 57, 62, 53, 57, 53, 51, 59, 85, 58, 53, 49, 87, 78, 81, 57, 63, 58, 66, 85, 70, 65, 50, 62, 58, 72, 67, 69, 66, 63, 54, 56, 79, 63, 54, 55, 38, 58, 57, 72, 61, 62, 58, 71, 79, 73, 60, 103, 56, 58, 67, 76, 93, 71, 71, 59, 57, 62, 55, 70, 48, 70, 52, 81, 49, 49, 54, 53, 45, 88, 63, 69, 63, 53, 56, 60, 68, 72, 58, 55, 45, 70, 61, 62, 70, 57, 67, 65, 59, 77, 76, 63, 70, 51, 55, 61, 74, 70, 55, 80, 69, 72, 74, 56, 66, 62, 70, 64, 46, 60, 62, 62, 62, 65, 71, 64, 82, 39, 56, 53, 78, 59, 67, 53, 55, 109, 58, 60, 82, 63, 65, 51, 57, 60, 90, 57, 80, 84, 54, 78, 74, 54, 73, 78, 61, 56, 63, 90, 61, 81, 64, 60, 59, 62, 59, 59, 43, 86, 83, 49, 75, 55, 56, 55, 101, 60, 52, 47, 64, 67, 63, 75, 79, 63, 79, 56, 59, 109, 50, 67, 69, 59, 55, 71, 61, 71, 63, 70, 59, 51, 49, 62, 60, 57, 49, 51, 58, 55, 62, 56, 94, 67, 77, 58, 86, 117, 94, 55, 63, 66, 71, 63, 50, 62, 69, 76, 63, 68, 63, 77, 65, 65, 74, 62, 80, 54, 49, 71, 71, 69, 69, 59, 63, 66, 53, 64, 64, 62, 72, 74, 71, 69, 78, 57, 56, 60, 59, 49, 47, 47, 53, 79, 74, 71, 84, 76, 97, 58, 63, 36, 66, 78, 72, 64, 66, 64, 67, 70, 74, 131, 55, 59, 61, 68, 93, 72, 88, 52, 128, 64, 73, 58, 67, 78, 59, 62, 58, 67, 56, 78, 56, 54, 61, 57, 68, 52, 63, 75, 55, 60, 100, 78, 74, 90, 74, 64, 79, 67, 68, 51, 59, 59, 58, 56, 58, 90, 83, 47, 62, 65, 51, 73, 66, 70, 58, 60, 52, 67, 70, 60, 75, 68, 63, 56, 68, 81, 53, 60, 72, 57, 60, 78, 74, 47, 64, 60, 67, 65, 76, 72, 58, 97, 44, 52, 78, 50, 52, 56, 70, 59, 57, 77, 68, 50, 60, 63, 74, 76, 80, 80, 63, 59, 75, 57, 36, 69, 72, 47, 46, 62, 61, 56, 73, 59, 76, 97, 59, 70, 74, 47, 64, 76, 52, 79, 108, 53, 52, 65, 70, 65, 70, 60, 82, 84, 62, 52, 53, 51, 61, 57, 75, 62, 56, 48, 60, 137, 69, 77, 81, 58, 70, 53, 75, 90, 55, 69, 99, 65, 68, 91, 66, 67, 60, 59, 56, 72, 65, 71, 71, 49, 62, 72, 55, 68, 59, 58, 70, 75, 60, 85, 70, 93, 64, 91, 67, 81, 62, 78, 62, 52, 58, 61, 54, 62, 55, 61, 73, 60, 66, 75, 57, 56, 52, 91, 70, 71, 74, 64, 63, 63, 59, 44, 40, 77, 72, 51, 90, 82, 54, 59, 74, 64, 64, 47, 66, 57, 56, 53, 52, 77, 70, 61, 56, 58, 84, 59, 63, 65, 69, 73, 63, 60, 60, 60, 56, 75, 64, 66, 63, 78, 94, 57, 55, 63, 93, 49, 83, 47, 67, 64, 67, 50, 59, 45, 49, 61, 75, 49, 91, 92, 64, 64, 72, 82, 69, 68, 53, 54, 63, 74, 40, 69, 70, 74, 73, 70, 60, 78, 58, 58, 73, 67, 70, 68, 92, 51, 66, 73, 51, 48, 48, 42, 66, 54, 62, 74, 66, 65, 55, 50, 61, 46, 64, 67, 47, 72, 58, 52, 71, 68, 71, 69, 37, 68, 39, 69, 79, 60, 86, 81, 58, 68, 68, 100, 101, 66, 48, 61, 76, 63, 51, 45, 72, 76, 49, 74, 98, 52, 57, 72, 54, 64, 56, 69, 86, 71, 53, 60, 68, 56, 68, 76, 124, 67, 67, 45, 75, 61, 72, 46, 86, 60, 85, 40, 56, 63, 50, 56, 66, 67, 53, 81, 54, 49, 71, 100, 56, 84, 59, 79, 54, 63, 71, 90, 52, 70, 80, 67, 56, 81, 74, 66, 56, 57, 62, 66, 74, 65, 48, 69, 56, 77, 72, 57, 62, 58, 54, 120, 92, 84, 72, 57, 112, 63, 63, 68, 72, 70, 61, 58, 49, 70, 61, 68, 75, 66, 71, 68, 58, 71, 76, 60, 71, 40, 90, 52, 56, 73, 52, 50, 60, 81, 44, 81, 65, 61, 63, 63, 79, 61, 48, 80, 64, 82, 56, 56, 74, 59, 46, 70, 84, 80, 58, 65, 54, 62, 89, 88, 58, 77, 61, 72, 75, 61, 66, 52, 60, 54, 75, 57, 66, 53, 53, 52, 55, 82, 60, 66, 62, 57, 58, 73, 61, 70, 62, 67, 87, 60, 68, 54, 70, 59, 66, 62, 67, 66, 69, 109, 72, 71, 49, 72, 64, 62, 67, 96, 50, 58, 77, 76, 56, 69, 126, 68, 63, 58, 56, 65, 59, 75, 65, 58, 58, 54, 76, 61, 48, 79, 68, 64, 86, 55, 60, 83, 51, 68, 98, 65, 46, 69, 70, 65, 69, 57, 60, 60, 60, 69, 59, 55, 89, 63, 72, 40, 67, 68, 80, 74, 78, 67, 64, 51, 70, 62, 39, 66, 65, 90, 65, 55, 48, 79, 54, 60, 70, 55, 88, 57, 32, 74, 64, 76, 73, 69, 65, 79, 66, 54, 64, 101, 55, 61, 69, 64, 62, 80, 85, 67, 65, 57, 77, 73, 88, 66, 55, 59, 64, 60, 65, 64, 68, 61, 71, 73, 75, 81, 60, 63, 52, 73, 71, 47, 74, 60, 70, 74, 78, 55, 58, 51, 83, 72, 78, 82, 63, 79, 59, 57, 53, 83, 60, 76, 84, 88, 57, 53, 94, 63, 56, 69, 52, 78, 63, 69, 107, 59, 56, 61, 62, 74, 80, 61, 77, 45, 63, 84, 53, 68, 57, 48, 57, 51, 76, 84, 113, 64, 53, 58, 59, 68, 60, 60, 64, 68, 57, 52, 55, 75, 64, 59, 64, 56, 69, 57, 71, 87, 65, 68, 64, 87, 69, 45, 60, 53, 61, 68, 65, 69, 60, 77, 58, 70, 62, 58, 51, 49, 59, 80, 66, 63, 100, 68, 72, 49, 53, 59, 65, 69, 68, 49, 57, 61, 85, 69, 45, 54, 58, 60, 67, 55, 61, 80, 69, 162, 68, 59, 46, 84, 77, 50, 88, 49, 63, 55, 45, 64, 61, 59, 57, 65, 60, 57, 66, 74, 62, 50, 63, 57, 74, 78, 46, 62, 69, 59, 84, 74, 88, 63, 59, 62, 73, 52, 81, 61, 71, 68, 104, 53, 63, 60, 82, 74, 58, 89, 62, 88, 69, 66, 48, 54, 63, 63, 71, 61, 54, 55, 73, 59, 74, 90, 75, 98, 77, 78, 91, 55, 60, 79, 82, 60, 64, 69, 63, 28, 58, 79, 64, 58, 50, 60, 56, 46, 57, 42, 56, 71, 98, 56, 68, 53, 87, 75, 48, 69, 98, 57, 62, 64, 85, 41, 67, 70, 68, 75, 60, 61, 65, 55, 61, 47, 56, 62, 64, 64, 73, 52, 74, 75, 116, 57, 43, 27, 67, 65, 75, 57, 89, 55, 62, 52, 75, 100, 79, 55, 43, 51, 53, 57, 77, 67, 69, 97, 93, 64, 75, 52, 60, 74, 89, 61, 60, 71, 45, 86, 76, 63, 55, 75, 57, 67, 85, 63, 65, 53, 66, 60, 75, 82, 48, 61, 69, 55, 49, 87, 51, 64, 46, 68, 67, 68, 44, 61, 79, 69, 54, 59, 62, 64, 44, 62, 88, 65, 82, 71, 81, 68, 53, 52, 71, 46, 62, 77, 59, 47, 71, 69, 91, 64, 62, 54, 56, 61, 52, 61, 46, 67, 42, 48, 62, 64, 59, 71, 67, 63, 66, 68, 60, 68, 68, 60, 80, 72, 70, 58, 51, 75, 96, 65, 63, 51, 55, 85, 60, 58, 72, 74, 66, 44, 74, 59, 80, 50, 71, 84, 71, 65, 76, 86, 59, 58, 86, 57, 66, 58, 67, 55, 59, 67, 54, 69, 55, 48, 72, 52, 73, 58, 57, 72, 56, 73, 56, 72, 69, 50, 77, 73, 63, 73, 75, 77, 65, 72, 65, 56, 62, 71, 74, 69, 62, 56, 58, 57, 42, 63, 69, 66, 52, 55, 50, 93, 53, 54, 66, 59, 89, 60, 54, 58, 83, 43, 64, 79, 72, 70, 90, 65, 61, 65, 76, 61, 83, 55, 78, 84, 89, 89, 62, 75, 58, 25, 60, 61, 62, 56, 69, 54, 55, 59, 83, 61, 80, 60, 60, 55, 67, 70, 74, 57, 53, 57, 80, 58, 69, 52, 70, 61, 55, 63, 68, 56, 72, 55, 56, 58, 48, 67, 54, 61, 65, 80, 82, 73, 70, 76, 76, 68, 56, 58, 72, 95, 63, 65, 61, 62, 42, 78, 82, 67, 53, 51, 65, 57, 67, 65, 59, 60, 55, 58, 55, 61, 63, 109, 43, 59, 45, 54, 72, 59, 71, 72, 52, 35, 73, 47, 78, 61, 65, 67, 57, 53, 88, 60, 69, 89, 64, 62, 64, 42, 56, 63, 98, 50, 80, 65, 67, 64, 56, 59, 73, 46, 63, 70, 60, 54, 67, 68, 77, 73, 54, 56, 61, 77, 108, 42, 76, 52, 55, 85, 42, 69, 55, 62, 64, 58, 57, 83, 81, 63, 75, 74, 57, 54, 95, 61, 82, 68, 37, 63, 71, 67, 76, 69, 46, 65, 75, 150, 78, 45, 75, 66, 61, 58, 74, 82, 68, 78, 56, 71, 64, 87, 65, 49, 41, 60, 63, 63, 53, 67, 52, 69, 34, 39, 67, 50, 84, 88, 94, 77, 56, 74, 51, 49, 79, 58, 57, 112, 57, 89, 71, 87, 48, 59, 69, 92, 105, 71, 86, 101, 49, 63, 55, 63, 62, 66, 56, 54, 76, 93, 97, 55, 83, 72, 55, 77, 81, 66, 58, 63, 87, 57, 74, 86, 49, 67, 75, 85, 62, 51, 124, 49, 73, 85, 81, 70, 65, 80, 108, 51, 84, 45, 118, 71, 59, 77, 68, 54, 86, 59, 66, 49, 60, 76, 68, 51, 53, 62, 65, 57, 46, 57, 64, 85, 55, 123, 50, 57, 58, 67, 61, 50, 103, 62, 56, 72, 61, 81, 63, 39, 48, 63, 67, 59, 74, 125, 58, 60, 63, 57, 67, 63, 48, 64, 61, 59, 67, 63, 70, 89, 89, 123, 57, 61, 59, 68, 62, 72, 98, 58, 79, 81, 59, 84, 80, 66, 53, 54, 67, 92, 63, 63, 76, 70, 89, 57, 69, 113, 49, 68, 60, 85, 53, 72, 47, 79, 75, 54, 59, 80, 91, 76, 77, 57, 63, 62, 57, 76, 66, 52, 45, 41, 73, 65, 84, 44, 84, 89, 71, 44, 53, 56, 74, 46, 85, 81, 90, 77, 58, 86, 76, 52, 69, 74, 59, 74, 79, 104, 57, 76, 61, 67, 91, 56, 62, 64, 40, 56, 55, 54, 57, 68, 72, 69, 53, 48, 46, 62, 64, 65, 50, 51, 70, 60, 83, 75, 82, 62, 57, 60, 81, 36, 63, 79, 103, 42, 65, 48, 64, 65, 92, 48, 46, 72, 62, 78, 73, 58, 49, 87, 83, 76, 58, 78, 80, 38, 61, 56, 60, 67, 88, 60, 44, 87, 69, 74, 71, 50, 65, 45, 45, 85, 50, 73, 44, 65, 53, 71, 54, 44, 83, 60, 66, 66, 54, 61, 69, 67, 75, 76, 67, 71, 94, 48, 48, 57, 59, 49, 74, 58, 66, 61, 44, 49, 60, 68, 33, 63, 44, 67, 59, 59, 60, 68, 60, 97, 69, 72, 49, 74, 83, 44, 59, 57, 72, 94, 56, 48, 59, 73, 59, 81, 60, 60, 56, 54, 56, 81, 61, 71, 46, 62, 64, 89, 65, 61, 54, 56, 66, 50, 40, 74, 43, 70, 64, 79, 42, 92, 84, 72, 70, 49, 47, 52, 88, 65, 66, 69, 56, 66, 59, 114, 65, 72, 61, 99, 76, 67, 101, 62, 73, 39, 72, 72, 102, 76, 56, 77, 83, 49, 74, 57, 67, 39, 42, 101, 114, 47, 63, 58, 65, 39, 50, 62, 82, 59, 71, 57, 74, 87, 64, 68, 83, 51, 50, 64, 65, 61, 79, 82, 67, 68, 83, 58, 46, 38, 85, 60, 57, 59, 78, 56, 57, 64, 62, 94, 51, 79, 47, 50, 47, 67, 65, 63, 38, 75, 37, 77, 64, 71, 60, 45, 70, 88, 65, 81, 89, 24, 69, 73, 45, 46, 70, 53, 47, 77, 51, 91, 54, 69, 92, 70, 90, 52, 76, 69, 42, 64, 53, 57, 85, 76, 83, 55, 73, 116, 52, 71, 60, 47, 65, 53, 72, 53, 64, 62, 77, 58, 64, 88, 52, 137, 52, 91, 96, 64, 72, 48, 46, 76, 64, 87, 41, 60, 67, 67, 63, 60, 55, 61, 82, 67, 59, 71, 62, 78, 64, 60, 84, 60, 51, 51, 53, 80, 55, 57, 99, 79, 84, 67, 55, 54, 72, 44, 60, 49, 68, 51, 55, 51, 73, 62, 73, 57, 44, 46, 51, 53, 133, 61, 66, 70, 67, 58, 55, 67, 38, 72, 63, 85, 43, 62, 52, 130, 79, 60, 76, 56, 61, 61, 67, 45, 68, 92, 79, 43, 56, 82, 79, 73, 66, 73, 47, 72, 70, 88, 64, 68, 58, 71, 86, 42, 82, 87, 112, 67, 83, 56, 40, 65, 55, 49, 38, 82, 59, 53, 52, 77, 83, 98, 61, 65, 58, 55, 50, 68, 64, 61, 66, 84, 72, 74, 55, 53, 63, 75, 67, 73, 97, 91, 73, 71, 52, 61, 63, 49, 69, 58, 63, 92, 79, 73, 57, 47, 95, 60, 73, 56, 62, 83, 57, 79, 52, 88, 66, 61, 74, 52, 65, 51, 85, 68, 86, 60, 64, 63, 79, 55, 72, 98, 66, 57, 45, 91, 74, 72, 64, 69, 55, 86, 63, 61, 41, 82, 65, 54, 97, 70, 72, 80, 69, 59, 46, 65, 53, 57, 47, 50, 58, 58, 73, 43, 61, 44, 52, 83, 54, 61, 60, 115, 61, 63, 76, 61, 75, 66, 46, 71, 44, 70, 52, 65, 41, 57, 44, 54, 63, 52, 58, 59, 62, 82, 69, 80, 60, 53, 79, 68, 76, 65, 68, 70, 71, 73, 92, 46, 73, 66, 58, 77, 45, 75, 55, 53, 83, 60, 101, 59, 96, 86, 73, 87, 57, 59, 64, 49, 55, 65, 73, 63, 85, 48, 43, 55, 60, 71, 56, 74, 40, 53, 51, 37, 43, 48, 60, 46, 70, 71, 84, 53, 107, 48, 46, 76, 55, 71, 65, 84, 81, 83, 53, 70, 70, 49, 45, 52, 77, 61, 77, 52, 92, 89, 102, 66, 67, 53, 53, 48, 53, 108, 26, 51, 54, 60, 84, 60, 70, 64, 47, 46, 62, 72, 55, 86, 61, 53, 38, 66, 70, 79, 47, 52, 62, 54, 38, 112, 39, 60, 55, 62, 58, 62, 75, 35, 71, 82, 76, 63, 69, 59, 78, 64, 52, 81, 76, 62, 78, 46, 61, 83, 53, 63, 84, 49, 48, 53, 60, 77, 86, 47, 92, 50, 63, 85, 67, 66, 68, 48, 53, 82, 97, 47, 66, 46, 55, 46, 53, 52, 57, 46, 80, 51, 71, 69, 83, 64, 55, 103, 86, 50, 59, 70, 73, 69, 62, 97, 102, 72, 80, 55, 71, 63, 57, 58, 65, 62, 74, 78, 61, 66, 59, 60, 54, 56, 90, 70, 51, 73, 73, 68, 65, 69, 130, 47, 53, 60, 68, 86, 40, 97, 81, 61, 56, 76, 94, 63, 82, 56, 70, 72, 66, 61, 56, 89, 122, 52, 59, 78, 49, 70, 68, 63, 51, 53, 56, 57, 80, 53, 62, 60, 70, 67, 75, 75, 60, 56, 70, 55, 70, 92, 73, 89, 78, 101, 72, 54, 81, 73, 62, 71, 53, 71, 67, 56, 51, 47, 56, 57, 61, 57, 56, 98, 86, 42, 64, 61, 63, 84, 70, 44, 58, 72, 65, 63, 96, 54, 83, 52, 72, 76, 71, 50, 95, 60, 92, 72, 69, 93, 53, 50, 67, 82, 67, 73, 64, 63, 76, 57, 117, 65, 74, 55, 74, 80, 59, 60, 59, 57, 62, 52, 52, 70, 72, 67, 72, 67, 80, 93, 74, 42, 54, 62, 77, 50, 79, 101, 59, 50, 59, 66, 44, 75, 71, 55, 46, 87, 85, 41, 49, 66, 77, 86, 74, 65, 44, 56, 80, 51, 78, 66, 91, 79, 69, 55, 53, 83, 99, 143, 52, 60, 71, 68, 79, 70, 50, 45, 72, 44, 62, 62, 38, 93, 58, 59, 47, 50, 60, 87, 53, 69, 84, 63, 43, 59, 27, 65, 91, 50, 80, 53, 55, 65, 85, 53, 53, 60, 75, 70, 60, 63, 72, 53, 54, 90, 91, 51, 55, 74, 36, 40, 92, 65, 59, 82, 70, 59, 68, 64, 53, 71, 71, 45, 67, 66, 78, 85, 90, 15, 55, 60, 48, 84, 85, 52, 68, 71, 113, 71, 77, 61, 72, 70, 48, 55, 75, 76, 73, 78, 57, 85, 92, 49, 89, 42, 69, 60, 75, 56, 62, 102, 106, 59, 55, 53, 69, 58, 125, 59, 78, 67, 71, 70, 55, 96, 77, 80, 82, 92, 47, 82, 65, 66, 144, 76, 74, 52, 52, 56, 69, 76, 80, 53, 65, 50, 63, 79, 62, 89, 76, 75, 68, 78, 103, 37, 85, 69, 59, 67, 61, 44, 81, 40, 48, 76, 50, 45, 62, 66, 45, 52, 70, 66, 71, 69, 68, 62, 59, 91, 81, 79, 75, 45, 66, 60, 63, 121, 59, 47, 95, 77, 78, 104, 92, 58, 46, 85, 77, 72, 39, 62, 128, 45, 52, 69, 78, 72, 64, 48, 48, 61, 52, 65, 75, 66, 57, 96, 60, 102, 57, 60, 74, 58, 71, 58, 48, 73, 74, 59, 72, 39, 58, 93, 67, 51, 50, 50, 54, 35, 89, 59, 49, 67, 89, 76, 77, 51, 62, 72, 34, 56, 47, 55, 82, 34, 57, 70, 72, 65, 58, 101, 105, 65, 62, 115, 72, 61, 123, 62, 54, 94, 42, 67, 71, 85, 46, 49, 69, 63, 59, 81, 57, 58, 74, 69, 59, 54, 85, 76, 79, 50, 105, 75, 69, 81, 45, 57, 60, 52, 65, 94, 57, 83, 45, 99, 70, 104, 76, 104, 40, 66, 69, 69, 59, 57, 51, 73, 69, 68, 71, 67, 76, 45, 64, 70, 80, 73, 64, 74, 72, 88, 90, 60, 63, 86, 62, 48, 70, 79, 56, 87, 70, 95, 69, 56, 60, 51, 83, 65, 81, 59, 74, 49, 81, 70, 60, 66, 67, 65, 66, 79, 64, 88, 64, 75, 87, 49, 58, 79, 69, 101, 65, 51, 79, 53, 69, 76, 64, 65, 75, 39, 56, 81, 52, 48, 88, 38, 48, 60, 73, 47, 74, 55, 62, 71, 74, 62, 127, 77, 49, 63, 69, 85, 49, 78, 88, 58, 85, 80, 58, 60, 75, 60, 108, 84, 120, 66, 51, 132, 72, 62, 65, 67, 93, 65, 43, 83, 41, 68, 36, 74, 93, 54, 48, 54, 64, 110, 52, 85, 70, 88, 51, 61, 56, 47, 40, 62, 54, 50, 49, 63, 96, 76, 70, 55, 64, 81, 65, 61, 53, 46, 58, 69, 43, 66, 63, 50, 61, 66, 66, 105, 54, 51, 63, 62, 83, 69, 47, 74, 61, 59, 60, 68, 69, 41, 100, 64, 59, 58, 59, 59, 59, 38, 74, 85, 69, 63, 54, 60, 56, 73, 60, 65, 62, 48, 82, 55, 65, 67, 75, 85, 65, 50, 87, 67, 66, 61, 83, 78, 32, 62, 71, 57, 44, 75, 50, 82, 65, 85, 59, 62, 101, 75, 66, 66, 62, 65, 43, 49, 50, 46, 77, 60, 57, 43, 47, 68, 79, 84, 67, 106, 52, 71, 67, 55, 71, 74, 76, 52, 86, 49, 65, 76, 67, 59, 61, 43, 75, 55, 62, 81, 48, 51, 54, 58, 84, 76, 50, 70, 56, 71, 57, 66, 62, 78, 72, 72, 52, 49, 56, 49, 77, 56, 62, 54, 68, 88, 61, 44, 66, 59, 130, 69, 61, 49, 71, 63, 52, 35, 67, 56, 79, 94, 64, 33, 76, 61, 81, 61, 63, 76, 81, 63, 74, 104, 64, 89, 69, 63, 69, 59, 43, 73, 59, 81, 54, 84, 44, 41, 68, 43, 65, 70, 67, 41, 75, 51, 38, 62, 66, 68, 81, 60, 54, 70, 75, 95, 91, 40, 65, 43, 67, 58, 59, 47, 38, 69, 60, 69, 76, 61, 57, 50, 63, 82, 62, 55, 74, 51, 53, 65, 58, 65, 71, 69, 84, 62, 62, 75, 53, 62, 70, 106, 61, 73, 46, 39, 58, 57, 80, 89, 60, 54, 41, 59, 48, 80, 68, 60, 51, 53, 44, 63, 55, 56, 57, 78, 48, 57, 59, 120, 53, 54, 39, 41, 56, 75, 64, 60, 57, 51, 56, 66, 60, 92, 59, 65, 51, 66, 86, 70, 53, 59, 99, 59, 52, 74, 65, 40, 61, 63, 48, 64, 52, 79, 76, 54, 55, 56, 63, 95, 46, 46, 60, 83, 65, 58, 86, 125, 78, 53, 59, 55, 60, 65, 23, 48, 98, 64, 45, 65, 45, 63, 49, 62, 38, 77, 96, 58, 78, 26, 66, 50, 51, 47, 69, 56, 68, 121, 53, 94, 71, 56, 88, 97, 50, 55, 89, 87, 80, 61, 96, 77, 41, 78, 68, 55, 74, 39, 53, 51, 54, 58, 80, 70, 55, 51, 66, 47, 51, 48, 85, 74, 70, 78, 66, 43, 83, 70, 68, 87, 51, 50, 57, 74, 52, 152, 55, 47, 106, 69, 66, 71, 87, 63, 82, 54, 54, 57, 77, 64, 67, 70, 74, 58, 47, 55, 48, 59, 57, 78, 56, 56, 67, 53, 60, 62, 63, 40, 53, 73, 88, 68, 65, 53, 65, 60, 59, 58, 71, 69, 57, 52, 58, 58, 71, 61, 63, 64, 69, 66, 58, 55, 38, 73, 59, 106, 47, 74, 58, 70, 65, 53, 69, 72, 67, 65, 139, 69, 84, 73, 64, 83, 69, 34, 60, 49, 58, 57, 60, 76, 83, 62, 54, 61, 60, 47, 58, 64, 72, 58, 64, 59, 48, 65, 49, 72, 84, 55, 73, 95, 40, 68, 55, 56, 64, 55, 75, 71, 52, 61, 75, 68, 44, 55, 57, 72, 54, 66, 95, 84, 57, 75, 57, 85, 91, 48, 56, 75, 49, 61, 43, 68, 58, 67, 79, 60, 64, 87, 58, 54, 60, 61, 42, 65, 95, 59, 70, 56, 61, 76, 67, 57, 76, 71, 52, 87, 73, 61, 77, 65, 46, 80, 53, 61, 68, 92, 65, 66, 78, 65, 64, 53, 78, 100, 62, 55, 88, 45, 68, 70, 55, 67, 61, 65, 84, 87, 69, 52, 55, 55, 68, 70, 69, 96, 53, 50, 71, 77, 59, 47, 61, 49, 76, 83, 95, 65, 69, 59, 69, 65, 59, 63, 79, 63, 86, 64, 54, 50, 57, 89, 67, 64, 54, 84, 68, 66, 73, 70, 69, 59, 93, 79, 63, 62, 86, 92, 58, 38, 48, 42, 57, 82, 43, 61, 86, 60, 62, 59, 71, 48, 51, 66, 66, 63, 104, 74, 60, 66, 67, 52, 45, 91, 66, 53, 78, 70, 59, 67, 68, 77, 61, 49, 57, 69, 127, 80, 85, 56, 69, 29, 69, 76, 45, 47, 65, 74, 35, 63, 66, 71, 54, 63, 54, 74, 81, 61, 62, 37, 60, 64, 76, 47, 50, 77, 58, 42, 57, 68, 79, 51, 57, 73, 72, 54, 103, 80, 78, 86, 75, 39, 72, 73, 54, 65, 52, 65, 83, 70, 69, 44, 82, 71, 89, 53, 61, 74, 69, 55, 51, 65, 70, 133, 74, 61, 69, 82, 75, 52, 75, 68, 62, 49, 68, 63, 61, 76, 116, 68, 65, 61, 77, 65, 64, 76, 56, 64, 99, 52, 65, 81, 56, 55, 69, 104, 68, 86, 63, 79, 80, 61, 65, 66, 50, 70, 60, 53, 94, 59, 68, 55, 72, 69, 73, 63, 65, 68, 55, 65, 82, 79, 125, 68, 70, 53, 82, 70, 79, 71, 52, 59, 49, 58, 62, 59, 118, 70, 56, 56, 55, 72, 78, 76, 82, 73, 65, 61, 69, 74, 53, 61, 71, 80, 69, 58, 52, 77, 80, 76, 58, 54, 75, 78, 83, 106, 59, 53, 73, 78, 63, 61, 75, 62, 67, 31, 84, 69, 48, 54, 72, 50, 60, 52, 62, 120, 52, 71, 55, 68, 73, 71, 55, 73, 68, 85, 94, 54, 60, 99, 69, 57, 64, 77, 60, 76, 64, 54, 75, 57, 65, 63, 64, 84, 83, 49, 61, 73, 60, 60, 66, 77, 55, 62, 82, 71, 64, 62, 56, 76, 57, 75, 67, 61, 72, 57, 57, 45, 55, 52, 58, 89, 78, 83, 61, 70, 57, 72, 62, 54, 68, 60, 76, 56, 69, 64, 69, 71, 59, 61, 103, 62, 71, 51, 60, 81, 65, 57, 73, 64, 63, 72, 49, 76, 59, 73, 60, 56, 52, 98, 76, 80, 71, 54, 75, 61, 58, 61, 60, 67, 63, 71, 49, 60, 74, 73, 76, 79, 53, 50, 65, 72, 72, 48, 77, 42, 68, 61, 78, 66, 87, 72, 46, 53, 55, 83, 71, 60, 69, 64, 61, 61, 82, 60, 74, 53, 69, 67, 67, 60, 47, 72, 61, 55, 58, 55, 49, 77, 71, 78, 55, 72, 76, 43, 47, 108, 66, 61, 79, 59, 70, 66, 53, 85, 62, 59, 68, 59, 86, 69, 48, 57, 68, 53, 50, 63, 50, 41, 63, 85, 54, 65, 49, 60, 97, 65, 48, 53, 44, 58, 93, 78, 65, 80, 75, 88, 62, 68, 44, 69, 80, 47, 71, 70, 61, 62, 110, 66, 70, 62, 63, 65, 57, 72, 74, 63, 90, 79, 83, 61, 62, 53, 59, 57, 67, 67, 67, 59, 59, 60, 72, 72, 62, 73, 54, 51, 43, 64, 57, 74, 52, 58, 63, 54, 67, 70, 53, 62, 61, 55, 54, 67, 68, 71, 55, 53, 62, 74, 71, 65, 51, 94, 101, 58, 52, 65, 44, 53, 61, 71, 77, 52, 46, 56, 57, 64, 77, 67, 77, 87, 63, 56, 76, 55, 54, 55, 73, 87, 67, 59, 56, 80, 92, 67, 50, 61, 60, 63, 63, 68, 54, 69, 79, 65, 67, 46, 59, 54, 48, 62, 78, 83, 66, 63, 92, 68, 64, 83, 74, 73, 70, 58, 71, 44, 104, 76, 61, 57, 54, 100, 77, 55, 52, 53, 69, 46, 58, 66, 44, 57, 73, 45, 62, 43, 70, 67, 64, 74, 66, 59, 79, 59, 61, 62, 71, 63, 45, 66, 57, 68, 70, 77, 64, 56, 38, 70, 38, 55, 107, 68, 59, 64, 61, 73, 66, 63, 76, 56, 69, 80, 55, 76, 59, 47, 67, 73, 69, 69, 78, 82, 83, 82, 63, 77, 51, 72, 55, 85, 62, 53, 69, 77, 61, 65, 98, 81, 72, 65, 39, 54, 67, 78, 59, 43, 94, 65, 88, 66, 57, 61, 60, 59, 57, 51, 64, 45, 59, 58, 72, 59, 67, 52, 63, 51, 72, 65, 55, 68, 64, 57, 137, 53, 53, 52, 52, 74, 50, 75, 86, 72, 56, 57, 62, 72, 69, 58, 55, 65, 61, 60, 59, 54, 69, 104, 69, 55, 69, 44, 94, 65, 68, 96, 52, 60, 49, 87, 69, 73, 82, 60, 78, 62, 46, 72, 85, 71, 62, 69, 95, 97, 88, 76, 104, 68, 73, 66, 68, 62, 59, 133, 70, 66, 75, 57, 58, 61, 66, 75, 56, 73, 83, 85, 72, 63, 59, 48, 63, 67, 57, 40, 61, 70, 43, 64, 65, 60, 54, 74, 63, 54, 60, 48, 65, 62, 86, 92, 84, 67, 50, 65, 58, 51, 81, 69, 61, 61, 55, 62, 58, 55, 62, 74, 48, 42, 59, 84, 97, 67, 74, 77, 70, 77, 64, 59, 64, 54, 78, 81, 61, 94, 84, 58, 72, 65, 83, 63, 68, 69, 47, 63, 86, 67, 58, 62, 79, 81, 103, 71, 46, 62, 71, 57, 71, 76, 106, 88, 70, 62, 66, 52, 59, 44, 101, 82, 92, 68, 63, 69, 87, 77, 47, 75, 40, 98, 68, 68, 71, 61, 78, 111, 54, 55, 132, 42, 61, 62, 54, 60, 77, 55, 61, 68, 61, 88, 63, 94, 58, 77, 49, 54, 63, 57, 47, 57, 44, 60, 59, 64, 67, 67, 83, 44, 59, 61, 67, 81, 52, 55, 91, 67, 54, 39, 66, 65, 76, 68, 65, 98, 82, 60, 40, 55, 46, 65, 80, 67, 69, 57, 65, 54, 68, 72, 56, 43, 68, 55, 65, 60, 62, 46, 65, 60, 55, 71, 65, 47, 77, 59, 61, 71, 79, 58, 52, 72, 67, 44, 63, 61, 49, 49, 67, 56, 92, 56, 65, 61, 58, 55, 69, 64, 48, 60, 65, 68, 99, 65, 61, 48, 73, 84, 62, 59, 62, 68, 66, 63, 65, 59, 61, 84, 58, 51, 78, 66, 73, 66, 80, 92, 61, 57, 61, 76, 62, 62, 47, 58, 46, 67, 79, 52, 87, 68, 63, 56, 73, 65, 40, 51, 100, 54, 78, 65, 103, 56, 67, 63, 53, 80, 60, 72, 56, 51, 67, 43, 62, 83, 78, 64, 80, 63, 52, 58, 49, 50, 62, 77, 56, 74, 47, 57, 54, 64, 80, 77, 88, 81, 53, 63, 57, 46, 57, 55, 61, 72, 75, 54, 45, 87, 66, 60, 71, 61, 132, 66, 61, 72, 70, 82, 77, 70, 71, 69, 59, 84, 68, 76, 62, 57, 62, 71, 71, 63, 68, 70, 70, 70, 53, 67, 55, 75, 53, 69, 41, 59, 70, 56, 62, 85, 53, 56, 72, 74, 51, 72, 64, 60, 74, 89, 69, 68, 77, 77, 76, 82, 88, 36, 62, 44, 60, 53, 67, 54, 92, 72, 66, 62, 49, 55, 68, 65, 55, 55, 55, 61, 70, 49, 39, 54, 54, 63, 50, 73, 70, 53, 73, 76, 61, 63, 60, 65, 69, 73, 64, 72, 87, 55, 62, 74, 57, 66, 60, 58, 81, 60, 61, 65, 50, 72, 54, 70, 66, 86, 76, 79, 70, 58, 52, 58, 58, 64, 54, 61, 89, 53, 71, 56, 52, 65, 70, 82, 66, 68, 71, 58, 77, 66, 95, 63, 63, 74, 95, 63, 74, 36, 71, 61, 63, 70, 58, 58, 76, 75, 75, 71, 89, 61, 75, 84, 64, 58, 67, 58, 50, 64, 95, 56, 47, 66, 71, 58, 59, 49, 54, 60, 70, 59, 53, 84, 65, 72, 64, 65, 53, 68, 59, 52, 59, 59, 61, 41, 67, 41, 52, 51, 67, 67, 46, 80, 66, 82, 58, 72, 58, 67, 69, 48, 59, 76, 61, 74, 54, 72, 60, 69, 65, 53, 59, 66, 62, 53, 60, 71, 46, 77, 53, 83, 66, 42, 90, 46, 72, 58, 68, 58, 78, 81, 63, 49, 87, 74, 87, 66, 55, 68, 46, 77, 57, 53, 45, 72, 64, 56, 49, 55, 74, 82, 53, 63, 80, 65, 84, 86, 59, 62, 56, 55, 67, 66, 57, 85, 55, 57, 73, 51, 65, 65, 58, 62, 60, 57, 55, 75, 49, 62, 58, 61, 71, 79, 75, 60, 46, 66, 54, 68, 92, 68, 54, 61, 41, 68, 62, 56, 78, 61, 57, 78, 72, 64, 50, 58, 87, 76, 66, 63, 62, 49, 60, 59, 65, 63, 94, 82, 66, 50, 68, 45, 66, 65, 66, 60, 72, 57, 76, 45, 68, 64, 58, 75, 84, 42, 66, 77, 62, 56, 44, 53, 95, 51, 75, 62, 67, 77, 83, 61, 69, 64, 53, 64, 89, 62, 84, 69, 79, 67, 66, 61, 60, 58, 65, 48, 61, 55, 51, 61, 52, 77, 72, 75, 72, 112, 69, 53, 76, 65, 60, 62, 83, 59, 64, 73, 56, 69, 50, 50, 70, 75, 67, 74, 60, 72, 82, 40, 42, 73, 51, 52, 50, 56, 60, 55, 84, 71, 59, 45, 67, 61, 59, 78, 54, 70, 90, 65, 60, 63, 63, 81, 55, 80, 75, 63, 64, 65, 53, 86, 78, 56, 60, 58, 71, 57, 57, 51, 84, 55, 73, 68, 65, 67, 48, 70, 75, 65, 63, 59, 61, 60, 73, 63, 45, 58, 60, 66, 69, 68, 48, 72, 84, 61, 59, 55, 70, 67, 52, 66, 65, 69, 75, 58, 56, 88, 43, 71, 74, 62, 63, 51, 61, 59, 70, 87, 56, 65, 79, 61, 69, 57, 72, 65, 89, 64, 53, 75, 65, 59, 50, 110, 78, 71, 67, 58, 70, 89, 51, 74, 64, 76, 74, 69, 52, 60, 66, 100, 58, 54, 66, 40, 73, 71, 68, 41, 48, 58, 80, 67, 91, 55, 55, 74, 63, 58, 58, 60, 102, 55, 74, 67, 58, 52, 65, 65, 63, 63, 67, 42, 65, 58, 72, 59, 61, 47, 59, 57, 66, 77, 59, 61, 58, 66, 92, 49, 66, 62, 64, 134, 66, 67, 63, 60, 85, 61, 49, 71, 52, 78, 53, 83, 71, 55, 70, 56, 97, 78, 95, 86, 64, 53, 60, 72, 89, 160, 66, 68, 57, 83, 83, 108, 60, 78, 86, 65, 51, 56, 69, 63, 65, 71, 68, 70, 84, 50, 52, 55, 68, 64, 64, 66, 62, 75, 51, 64, 107, 56, 72, 69, 64, 66, 80, 51, 65, 56, 48, 67, 56, 64, 95, 54, 61, 55, 64, 55, 74, 50, 62, 63, 63, 65, 65, 74, 69, 54, 70, 51, 93, 54, 68, 62, 67, 59, 56, 66, 73, 55, 65, 83, 56, 64, 59, 57, 52, 77, 58, 78, 64, 60, 51, 64, 80, 81, 67, 61, 56, 60, 91, 93, 55, 53, 70, 49, 51, 56, 66, 49, 58, 57, 56, 59, 59, 43, 76, 56, 71, 93, 107, 44, 70, 58, 68, 62, 67, 77, 50, 60, 77, 46, 58, 48, 55, 68, 59, 82, 75, 62, 68, 60, 94, 77, 72, 81, 48, 71, 62, 52, 45, 51, 88, 78, 96, 57, 55, 62, 57, 64, 63, 52, 51, 72, 37, 53, 53, 69, 75, 76, 70, 51, 59, 64, 63, 69, 73, 99, 52, 74, 48, 69, 64, 68, 62, 92, 62, 40, 65, 64, 72, 49, 87, 92, 49, 57, 56, 76, 53, 71, 75, 80, 81, 69, 79, 56, 61, 95, 64, 79, 72, 80, 83, 80, 72, 60, 53, 64, 55, 66, 75, 53, 75, 57, 59, 76, 57, 112, 82, 59, 64, 78, 57, 100, 75, 58, 64, 68, 67, 78, 53, 77, 61, 55, 55, 64, 68, 74, 46, 53, 63, 67, 99, 73, 65, 51, 56, 54, 89, 57, 69, 53, 55, 76, 43, 64, 61, 77, 64, 88, 47, 60, 42, 72, 54, 53, 68, 55, 61, 76, 86, 59, 72, 65, 64, 52, 56, 69, 120, 79, 57, 108, 64, 59, 68, 50, 79, 61, 47, 68, 70, 65, 55, 51, 47, 77, 72, 52, 51, 47, 51, 54, 67, 63, 50, 76, 65, 63, 53, 62, 58, 60, 109, 58, 95, 75, 68, 69, 84, 50, 61, 116, 66, 97, 64, 57, 86, 64, 57, 60, 52, 109, 70, 81, 68, 63, 52, 48, 70, 76, 77, 55, 58, 56, 45, 72, 67, 67, 55, 53, 59, 53, 57, 63, 65, 60, 73, 38, 81, 60, 72, 76, 66, 82, 63, 56, 82, 64, 130, 60, 74, 72, 51, 54, 70, 51, 71, 54, 58, 46, 58, 53, 60, 59, 67, 65, 73, 70, 56, 70, 61, 46, 48, 57, 54, 83, 56, 60, 67, 53, 83, 116, 65, 60, 82, 93, 89, 58, 48, 79, 53, 68, 74, 55, 100, 82, 56, 80, 72, 47, 67, 94, 41, 60, 66, 72, 62, 57, 59, 56, 46, 66, 60, 52, 56, 60, 91, 75, 59, 53, 71, 55, 51, 75, 48, 49, 59, 59, 58, 69, 52, 58, 67, 78, 69, 68, 72, 55, 80, 59, 67, 51, 65, 59, 118, 68, 50, 69, 64, 65, 66, 85, 62, 87, 74, 44, 83, 70, 61, 51, 97, 62, 70, 68, 91, 55, 56, 68, 84, 73, 54, 54, 79, 86, 149, 80, 74, 51, 62, 83, 64, 43, 79, 78, 49, 95, 66, 78, 67, 62, 56, 61, 71, 58, 65, 50, 78, 58, 83, 53, 53, 70, 53, 73, 55, 42, 67, 52, 60, 60, 53, 80, 66, 59, 52, 53, 51, 98, 58, 72, 71, 59, 76, 64, 83, 59, 62, 69, 82, 88, 69, 84, 83, 78, 52, 54, 56, 70, 45, 56, 64, 51, 44, 53, 68, 67, 53, 47, 65, 64, 74, 66, 47, 71, 109, 84, 69, 55, 69, 65, 94, 43, 72, 67, 65, 79, 77, 55, 65, 69, 61, 53, 71, 52, 120, 64, 59, 56, 55, 55, 50, 84, 61, 87, 62, 66, 47, 90, 46, 54, 69, 60, 60, 70, 70, 56, 60, 55, 59, 82, 55, 80, 60, 52, 89, 82, 63, 79, 67, 53, 60, 67, 72, 50, 91, 56, 65, 88, 77, 60, 61, 83, 89, 62, 61, 61, 57, 137, 79, 62, 57, 71, 69, 68, 55, 55, 63, 56, 79, 68, 71, 66, 65, 53, 52, 67, 118, 62, 69, 33, 50, 50, 115, 62, 61, 60, 67, 63, 66, 47, 62, 63, 100, 68, 56, 57, 44, 54, 60, 55, 55, 80, 63, 64, 60, 61, 62, 52, 69, 66, 62, 86, 71, 49, 86, 66, 55, 56, 61, 74, 48, 56, 75, 74, 63, 52, 56, 63, 56, 114, 65, 57, 45, 43, 64, 68, 75, 66, 62, 58, 66, 68, 56, 60, 54, 61, 67, 74, 78, 60, 72, 44, 59, 97, 74, 61, 120, 87, 103, 63, 57, 58, 73, 73, 61, 67, 59, 56, 88, 43, 60, 38, 59, 58, 70, 100, 84, 73, 45, 75, 62, 76, 55, 48, 61, 50, 53, 65, 80, 65, 73, 71, 53, 43, 53, 53, 62, 79, 67, 49, 56, 55, 51, 33, 51, 71, 49, 72, 64, 67, 62, 46, 74, 62, 69, 60, 64, 63, 66, 80, 63, 68, 50, 55, 113, 65, 51, 65, 64, 53, 116, 76, 63, 113, 61, 51, 60, 40, 150, 61, 80, 54, 40, 53, 53, 50, 87, 69, 49, 84, 57, 61, 70, 99, 59, 82, 63, 66, 66, 60, 70, 85, 64, 70, 68, 75, 67, 70, 55, 35, 31, 65, 80, 52, 47, 71, 62, 60, 78, 53, 47, 63, 53, 126, 88, 50, 59, 57, 68, 66, 87, 88, 47, 72, 60, 63, 65, 56, 55, 71, 83, 76, 59, 68, 70, 74, 62, 85, 58, 66, 78, 72, 47, 66, 53, 66, 46, 49, 60, 43, 39, 71, 87, 80, 48, 44, 30, 69, 61, 83, 68, 89, 59, 57, 98, 68, 51, 86, 47, 50, 45, 66, 63, 58, 75, 63, 59, 47, 51, 50, 54, 54, 61, 77, 55, 101, 88, 65, 68, 55, 67, 62, 89, 54, 90, 70, 36, 50, 73, 75, 52, 72, 57, 50, 66, 46, 99, 61, 56, 63, 67, 75, 62, 52, 55, 61, 71, 59, 75, 67, 86, 95, 91, 74, 78, 60, 71, 78, 57, 62, 61, 70, 66, 73, 53, 111, 69, 72, 44, 39, 38, 84, 76, 90, 40, 86, 69, 54, 59, 69, 59, 54, 88, 40, 76, 62, 48, 68, 59, 74, 72, 71, 61, 70, 83, 54, 73, 86, 61, 70, 97, 68, 75, 81, 62, 51, 65, 51, 56, 63, 111, 67, 66, 62, 73, 77, 110, 86, 65, 59, 52, 77, 68, 75, 47, 53, 66, 77, 54, 64, 57, 55, 56, 56, 104, 56, 74, 73, 51, 46, 79, 52, 43, 64, 94, 58, 81, 61, 60, 63, 80, 83, 67, 89, 52, 52, 74, 68, 43, 67, 41, 65, 71, 59, 60, 63, 51, 64, 69, 59, 93, 45, 60, 53, 63, 68, 74, 51, 53, 54, 79, 66, 48, 59, 62, 78, 51, 72, 75, 56, 47, 60, 117, 53, 83, 59, 54, 67, 108, 53, 92, 64, 59, 70, 62, 87, 53, 61, 70, 60, 70, 66, 66, 40, 67, 55, 76, 68, 66, 83, 81, 67, 59, 56, 51, 73, 57, 64, 35, 51, 52, 64, 43, 51, 61, 65, 136, 66, 79, 73, 88, 73, 41, 65, 72, 56, 90, 71, 63, 36, 64, 91, 107, 84, 68, 67, 58, 66, 68, 61, 52, 76, 72, 64, 73, 62, 84, 52, 77, 64, 70, 61, 50, 56, 91, 62, 64, 75, 61, 89, 49, 76, 79, 56, 63, 61, 49, 57, 65, 59, 50, 70, 77, 102, 99, 63, 95, 64, 60, 96, 68, 55, 85, 88, 53, 77, 87, 69, 85, 102, 74, 64, 71, 68, 63, 70, 47, 57, 73, 136, 65, 66, 93, 96, 76, 61, 54, 78, 57, 62, 105, 74, 71, 70, 84, 80, 66, 55, 57, 73, 73, 52, 109, 34, 51, 36, 69, 72, 67, 80, 89, 53, 63, 70, 69, 56, 58, 69, 113, 73, 56, 41, 60, 90, 54, 50, 44, 116, 67, 60, 55, 94, 113, 63, 50, 69, 52, 74, 64, 65, 78, 48, 65, 64, 65, 69, 56, 57, 53, 56, 71, 73, 75, 95, 76, 61, 45, 95, 67, 60, 59, 78, 72, 88, 61, 71, 67, 67, 70, 69, 69, 43, 57, 60, 104, 62, 66, 89, 55, 78, 73, 65, 59, 71, 84, 55, 58, 71, 49, 54, 64, 82, 79, 49, 51, 66, 56, 57, 67, 62, 86, 57, 66, 90, 50, 55, 57, 68, 88, 65, 73, 85, 59, 48, 67, 106, 60, 54, 76, 95, 55, 78, 61, 53, 53, 57, 64, 70, 59, 65, 64, 51, 57, 59, 65, 74, 44, 55, 54, 67, 72, 51, 57, 61, 39, 75, 58, 62, 55, 64, 72, 54, 65, 50, 52, 61, 64, 68, 88, 60, 52, 53, 83, 40, 60, 88, 73, 70, 66, 89, 57, 58, 107, 79, 56, 52, 50, 100, 73, 52, 58, 44, 71, 57, 56, 84, 52, 84, 42, 67, 60, 48, 62, 51, 56, 67, 65, 62, 57, 48, 54, 47, 79, 65, 93, 76, 63, 71, 59, 93, 71, 58, 85, 76, 82, 79, 42, 74, 46, 77, 55, 95, 54, 56, 51, 58, 74, 41, 63, 74, 70, 44, 85, 119, 52, 65, 63, 78, 62, 61, 88, 67, 55, 116, 87, 56, 70, 65, 95, 39, 60, 59, 61, 85, 70, 63, 69, 45, 55, 54, 70, 63, 52, 54, 89, 51, 53, 67, 86, 48, 82, 36, 50, 74, 59, 59, 68, 108, 93, 64, 69, 56, 56, 68, 61, 76, 66, 45, 75, 53, 67, 59, 69, 152, 58, 60, 53, 61, 56, 60, 61, 71, 64, 57, 54, 70, 50, 60, 76, 80, 65, 73, 48, 53, 64, 73, 91, 66, 73, 61, 81, 46, 75, 74, 50, 86, 91, 65, 61, 58, 76, 66, 56, 58, 61, 76, 71, 54, 92, 63, 61, 85, 60, 85, 81, 56, 52, 46, 57, 93, 69, 49, 75, 56, 53, 54, 53, 67, 67, 71, 68, 50, 69, 65, 69, 47, 87, 76, 56, 61, 86, 55, 50, 95, 58, 56, 46, 44, 63, 58, 59, 89, 79, 54, 78, 78, 62, 83, 64, 60, 62, 55, 55, 45, 56, 78, 56, 69, 85, 68, 86, 77, 57, 53, 70, 57, 56, 48, 44, 104, 45, 100, 79, 39, 67, 65, 71, 56, 41, 54, 67, 46, 48, 76, 76, 56, 64, 51, 53, 53, 56, 71, 79, 56, 68, 53, 58, 52, 74, 66, 73, 82, 67, 61, 60, 57, 95, 66, 50, 56, 63, 50, 50, 81, 45, 78, 62, 70, 60, 57, 55, 63, 49, 59, 84, 70, 85, 41, 59, 64, 50, 56, 76, 57, 64, 65, 53, 71, 45, 50, 59, 44, 59, 48, 141, 55, 75, 57, 73, 45, 100, 58, 39, 59, 62, 57, 63, 70, 89, 65, 28, 65, 72, 88, 58, 52, 55, 64, 56, 67, 52, 51, 73, 79, 56, 100, 81, 133, 55, 45, 78, 68, 49, 42, 99, 54, 49, 67, 70, 46, 66, 64, 56, 51, 56, 65, 54, 63, 80, 92, 58, 79, 51, 90, 35, 53, 69, 106, 41, 63, 68, 80, 63, 59, 50, 52, 65, 78, 69, 66, 58, 53, 66, 61, 46, 70, 65, 44, 84, 58, 64, 55, 67, 61, 42, 75, 43, 92, 81, 72, 71, 74, 57, 85, 66, 62, 84, 70, 47, 70, 62, 96, 54, 90, 88, 57, 62, 85, 77, 63, 92, 52, 51, 69, 60, 57, 122, 63, 72, 67, 78, 81, 50, 56, 76, 62, 56, 66, 68, 71, 42, 83, 101, 49, 54, 66, 91, 67, 64, 59, 45, 63, 56, 64, 59, 87, 77, 68, 62, 70, 45, 69, 84, 52, 42, 65, 66, 72, 68, 55, 107, 83, 71, 40, 45, 53, 76, 55, 67, 71, 69, 67, 65, 92, 75, 60, 70, 71, 75, 131, 84, 164, 50, 62, 57, 46, 45, 66, 67, 44, 62, 77, 46, 70, 53, 93, 43, 82, 81, 57, 81, 62, 53, 64, 65, 66, 78, 44, 82, 71, 64, 42, 40, 78, 65, 101, 93, 50, 32, 89, 59, 70, 89, 64, 61, 64, 70, 71, 64, 42, 57, 57, 52, 55, 68, 39, 92, 51, 63, 69, 68, 65, 63, 71, 50, 92, 90, 62, 64, 41, 54, 53, 74, 70, 68, 59, 58, 71, 54, 66, 63, 51, 90, 101, 53, 93, 64, 78, 58, 59, 74, 89, 89, 35, 65, 84, 85, 48, 60, 54, 65, 63, 56, 60, 53, 62, 50, 115, 72, 62, 65, 65, 51, 63, 45, 57, 64, 100, 73, 94, 63, 70, 73, 47, 70, 57, 68, 80, 65, 57, 53, 64, 63, 69, 77, 64, 101, 63, 64, 61, 67, 59, 92, 70, 74, 67, 51, 64, 65, 78, 62, 70, 71, 53, 46, 55, 77, 69, 42, 56, 74, 78, 84, 57, 87, 72, 69, 45, 67, 57, 61, 52, 46, 86, 61, 83, 48, 69, 79, 66, 59, 67, 71, 71, 67, 52, 75, 42, 71, 82, 71, 64, 65, 60, 57, 37, 54, 41, 78, 60, 112, 56, 85, 61, 52, 75, 55, 60, 60, 82, 48, 74, 51, 56, 74, 68, 74, 52, 46, 80, 36, 70, 65, 80, 67, 80, 81, 61, 58, 61, 70, 68, 64, 71, 63, 61, 66, 80, 67, 82, 97, 72, 98, 79, 74, 76, 60, 48, 63, 72, 94, 54, 83, 65, 81, 53, 55, 56, 61, 53, 63, 77, 42, 78, 98, 73, 59, 61, 70, 53, 93, 98, 72, 72, 74, 56, 116, 60, 55, 97, 65, 47, 42, 83, 58, 90, 68, 76, 82, 85, 76, 67, 69, 73, 71, 61, 60, 60, 66, 57, 66, 52, 94, 65, 63, 70, 61, 64, 81, 47, 54, 75, 57, 47, 45, 61, 72, 58, 55, 59, 94, 67, 59, 71, 52, 85, 54, 45, 59, 64, 34, 72, 100, 65, 89, 71, 88, 109, 76, 56, 57, 54, 72, 59, 67, 56, 72, 74, 66, 62, 77, 85, 41, 61, 57, 67, 55, 69, 71, 61, 73, 57, 50, 73, 57, 69, 95, 44, 63, 77, 78, 97, 64, 38, 55, 50, 53, 89, 60, 54, 109, 67, 69, 67, 55, 120, 57, 78, 51, 80, 69, 65, 53, 67, 59, 64, 77, 68, 64, 62, 60, 94, 88, 69, 75, 57, 75, 65, 83, 63, 71, 77, 70, 49, 86, 72, 151, 58, 62, 83, 57, 58, 57, 73, 55, 57, 78, 52, 58, 65, 53, 63, 77, 81, 62, 55, 61, 47, 83, 67, 52, 55, 53, 72, 76, 49, 78, 57, 70, 73, 60, 68, 65, 81, 79, 53, 79, 77, 55, 76, 57, 69, 56, 80, 68, 85, 59, 67, 46, 58, 61, 56, 66, 58, 48, 60, 66, 46, 64, 88, 59, 59, 69, 52, 58, 58, 47, 53, 52, 45, 81, 72, 100, 86, 64, 81, 66, 67, 54, 56, 67, 53, 75, 64, 92, 58, 54, 70, 63, 65, 53, 79, 65, 73, 55, 67, 68, 55, 69, 70, 73, 47, 46, 61, 57, 60, 105, 85, 62, 42, 48, 48, 73, 130, 55, 56, 66, 72, 54, 77, 94, 73, 55, 43, 51, 77, 97, 67, 88, 45, 75, 56, 88, 46, 55, 67, 85, 59, 101, 49, 77, 69, 70, 72, 70, 59, 61, 55, 59, 47, 53, 68, 66, 57, 73, 90, 85, 51, 60, 66, 58, 73, 67, 54, 56, 27, 68, 73, 90, 78, 43, 46, 71, 47, 70, 62, 58, 90, 66, 65, 95, 62, 48, 65, 68, 50, 41, 101, 52, 50, 60, 51, 77, 53, 72, 57, 51, 59, 85, 67, 57, 67, 90, 59, 66, 57, 76, 47, 77, 73, 54, 70, 70, 53, 59, 66, 60, 73, 69, 54, 66, 73, 68, 62, 47, 58, 55, 69, 66, 70, 65, 62, 63, 51, 76, 55, 72, 88, 41, 57, 87, 71, 53, 72, 60, 60, 67, 86, 77, 57, 50, 96, 62, 75, 83, 45, 63, 70, 37, 66, 67, 47, 69, 85, 75, 69, 59, 68, 68, 66, 97, 69, 65, 71, 44, 82, 64, 67, 71, 105, 63, 94, 64, 91, 59, 58, 66, 63, 42, 61, 49, 60, 73, 55, 65, 81, 71, 54, 69, 64, 76, 50, 57, 61, 66, 56, 66, 61, 53, 43, 49, 70, 52, 84, 58, 57, 65, 68, 63, 58, 104, 57, 51, 50, 64, 56, 78, 75, 38, 61, 58, 52, 78, 51, 66, 66, 53, 67, 68, 94, 40, 55, 55, 46, 75, 39, 63, 68, 65, 61, 52, 71, 66, 65, 52, 53, 79, 58, 68, 53, 80, 65, 55, 64, 63, 74, 63, 67, 70, 66, 60, 68, 82, 72, 73, 68, 76, 53, 58, 56, 45, 81, 55, 63, 60, 61, 55, 60, 64, 72, 46, 63, 62, 67, 76, 69, 79, 79, 70, 77, 116, 65, 31, 48, 68, 80, 62, 65, 65, 65, 59, 83, 79, 64, 49, 56, 71, 67, 88, 71, 42, 60, 65, 76, 59, 83, 59, 61, 56, 69, 69, 62, 74, 47, 66, 68, 60, 92, 72, 59, 66, 51, 99, 68, 74, 55, 53, 80, 43, 36, 66, 77, 86, 68, 47, 60, 51, 52, 71, 69, 77, 78, 65, 59, 63, 80, 75, 65, 63, 68, 64, 54, 52, 49, 63, 47, 63, 70, 117, 58, 42, 64, 61, 55, 51, 71, 61, 45, 64, 50, 39, 80, 79, 58, 51, 85, 72, 69, 60, 66, 61, 75, 58, 44, 64, 87, 82, 68, 67, 87, 58, 84, 54, 53, 83, 59, 64, 56, 65, 66, 63, 82, 66, 48, 80, 97, 55, 71, 92, 67, 75, 79, 63, 73, 70, 54, 58, 96, 58, 65, 42, 62, 56, 49, 64, 74, 90, 86, 71, 42, 66, 63, 100, 58, 55, 59, 90, 39, 56, 57, 75, 63, 63, 69, 64, 90, 54, 69, 58, 65, 63, 63, 89, 85, 62, 51, 59, 117, 111, 59, 53, 69, 134, 60, 48, 39, 78, 101, 45, 72, 46, 60, 81, 52, 65, 85, 82, 66, 56, 63, 83, 91, 56, 99, 71, 67, 73, 66, 80, 58, 119, 70, 67, 64, 64, 57, 57, 59, 47, 102, 68, 70, 86, 68, 60, 60, 51, 63, 57, 61, 57, 80, 69, 66, 62, 66, 74, 86, 44, 77, 86, 50, 68, 75, 60, 56, 200, 69, 58, 57, 49, 60, 50, 65, 47, 63, 58, 75, 68, 61, 57, 58, 51, 60, 61, 66, 64, 76, 73, 68, 43, 89, 65, 69, 69, 73, 65, 68, 62, 76, 59, 59, 65, 64, 70, 53, 96, 59, 72, 70, 67, 62, 67, 73, 44, 42, 54, 66, 61, 72, 62, 77, 88, 74, 113, 66, 78, 58, 69, 51, 60, 86, 49, 62, 59, 54, 77, 55, 56, 97, 39, 48, 46, 85, 80, 65, 78, 71, 69, 63, 54, 85, 69, 81, 57, 75, 82, 55, 86, 55, 58, 63, 60, 53, 71, 62, 76, 50, 46, 69, 78, 94, 58, 73, 61, 55, 70, 87, 68, 79, 86, 68, 63, 82, 84, 69, 88, 73, 76, 59, 72, 96, 55, 53, 84, 61, 61, 68, 94, 56, 67, 65, 77, 66, 53, 59, 41, 49, 62, 92, 70, 69, 69, 55, 63, 56, 55, 75, 42, 64, 72, 48, 94, 58, 68, 49, 45, 83, 66, 52, 77, 64, 54, 66, 72, 72, 65, 48, 72, 64, 72, 64, 44, 73, 63, 45, 51, 45, 75, 83, 93, 60, 56, 68, 52, 72, 49, 59, 65, 63, 61, 64, 61, 61, 65, 55, 80, 77, 68, 40, 63, 58, 77, 62, 53, 109, 59, 61, 58, 82, 59, 125, 68, 63, 70, 81, 64, 74, 53, 62, 61, 64, 75, 67, 90, 69, 70, 68, 79, 65, 49, 63, 88, 66, 72, 74, 64, 74, 68, 68, 70, 60, 65, 66, 66, 58, 67, 118, 50, 65, 53, 57, 59, 76, 53, 56, 64, 63, 58, 64, 65, 65, 81, 54, 43, 66, 58, 58, 69, 62, 63, 80, 67, 57, 61, 94, 71, 66, 153, 58, 50, 59, 46, 67, 67, 53, 71, 80, 69, 49, 71, 87, 64, 65, 48, 47, 70, 67, 76, 72, 70, 67, 70, 61, 81, 62, 57, 76, 67, 62, 54, 69, 72, 65, 78, 47, 71, 76, 61, 47, 59, 57, 79, 75, 55, 85, 66, 61, 60, 50, 55, 59, 56, 68, 75, 69, 63, 64, 58, 65, 71, 57, 50, 53, 68, 64, 79, 57, 61, 49, 78, 72, 64, 67, 78, 61, 50, 66, 75, 63, 67, 94, 71, 68, 53, 65, 51, 64, 64, 56, 72, 63, 49, 66, 64, 68, 71, 66, 65, 55, 70, 71, 57, 62, 65, 66, 61, 57, 66, 64, 53, 46, 57, 57, 59, 80, 54, 103, 75, 50, 57, 56, 69, 108, 74, 58, 64, 68, 59, 62, 55, 87, 69, 55, 51, 59, 78, 54, 53, 81, 84, 66, 73, 54, 77, 68, 63, 69, 68, 66, 54, 75, 58, 73, 56, 91, 61, 66, 63, 75, 67, 54, 72, 74, 55, 61, 55, 72, 59, 58, 58, 73, 75, 58, 50, 69, 44, 56, 67, 136, 52, 89, 72, 56, 83, 61, 74, 89, 65, 70, 42, 42, 51, 54, 61, 66, 72, 79, 73, 74, 55, 66, 86, 68, 53, 74, 58, 56, 65, 62, 69, 85, 52, 46, 79, 70, 79, 60, 62, 70, 70, 65, 56, 57, 64, 81, 74, 66, 56, 55, 61, 61, 70, 93, 62, 57, 66, 60, 65, 64, 63, 66, 88, 68, 72, 68, 67, 58, 78, 62, 61, 56, 72, 59, 80, 62, 61, 63, 80, 66, 70, 62, 68, 74, 64, 72, 55, 69, 54, 68, 56, 69, 58, 57, 71, 55, 76, 62, 60, 51, 55, 82, 69, 60, 61, 61, 60, 67, 94, 67, 58, 91, 51, 57, 64, 74, 61, 68, 50, 67, 72, 70, 60, 63, 86, 62, 82, 65, 58, 77, 73, 47, 59, 78, 47, 56, 80, 76, 56, 69, 62, 64, 54, 56, 68, 68, 108, 70, 80, 66, 55, 54, 69, 60, 65, 66, 66, 62, 60, 63, 72, 72, 68, 78, 61, 59, 52, 60, 66, 63, 55, 63, 62, 66, 75, 62, 72, 64, 70, 61, 69, 68, 79, 71, 73, 63, 55, 69, 54, 60, 68, 35, 56, 66, 80, 73, 71, 61, 62, 72, 67, 67, 61, 54, 60, 67, 54, 62, 76, 62, 79, 67, 72, 71, 89, 66, 55, 51, 61, 69, 70, 71, 54, 66, 73, 67, 71, 68, 56, 60, 58, 52, 64, 87, 89, 79, 54, 59, 60, 63, 46, 72, 64, 104, 59, 94, 57, 55, 81, 68, 68, 55, 65, 59, 77, 59, 79, 75, 57, 69, 52, 125, 50, 101, 41, 63, 63, 69, 56, 69, 80, 74, 59, 65, 68, 56, 57, 51, 60, 76, 68, 63, 79, 62, 93, 56, 72, 59, 57, 57, 75, 74, 62, 63, 69, 68, 79, 58, 50, 59, 70, 52, 67, 58, 68, 57, 66, 58, 57, 68, 61, 66, 73, 83, 77, 76, 56, 63, 68, 58, 71, 59, 64, 63, 68, 50, 66, 64, 77, 71, 60, 54, 56, 58, 67, 63, 68, 53, 55, 61, 54, 70, 67, 95, 70, 69, 57, 74, 71, 74, 86, 62, 145, 51, 65, 63, 72, 67, 75, 77, 64, 64, 61, 74, 69, 62, 67, 81, 69, 58, 76, 71, 58, 82, 44, 60, 65, 57, 66, 68, 54, 61, 67, 90, 75, 64, 58, 57, 54, 49, 62, 58, 65, 83, 51, 61, 61, 60, 76, 51, 81, 59, 68, 67, 49, 65, 64, 65, 62, 66, 75, 66, 44, 67, 62, 65, 88, 53, 69, 81, 60, 61, 54, 56, 71, 72, 60, 67, 87, 54, 57, 63, 62, 68, 57, 87, 55, 62, 65, 66, 64, 46, 55, 76, 52, 65, 65, 61, 49, 66, 101, 65, 53, 56, 55, 71, 62, 56, 102, 86, 60, 80, 63, 58, 65, 56, 54, 76, 52, 60, 68, 55, 58, 59, 103, 62, 67, 61, 59, 67, 60, 64, 66, 51, 47, 70, 52, 69, 70, 57, 49, 62, 59, 57, 68, 70, 74, 55, 85, 63, 84, 72, 66, 56, 58, 63, 52, 87, 58, 60, 69, 75, 56, 75, 71, 68, 71, 57, 54, 78, 58, 68, 64, 64, 62, 59, 65, 77, 61, 83, 62, 102, 60, 65, 63, 63, 56, 52, 70, 66, 62, 59, 52, 68, 62, 87, 51, 58, 71, 55, 57, 57, 50, 52, 52, 91, 63, 59, 69, 57, 61, 71, 64, 47, 69, 54, 68, 72, 57, 71, 70, 83, 59, 69, 54, 48, 68, 62, 64, 50, 58, 78, 58, 66, 68, 67, 53, 62, 60, 59, 80, 61, 59, 55, 53, 50, 66, 63, 71, 71, 69, 67, 76, 52, 69, 59, 55, 53, 56, 60, 65, 54, 58, 68, 63, 50, 58, 72, 45, 125, 56, 70, 47, 56, 83, 73, 61, 61, 56, 67, 60, 73, 61, 58, 65, 64, 57, 62, 75, 65, 57, 87, 42, 72, 81, 52, 43, 66, 69, 69, 55, 43, 82, 62, 61, 100, 47, 80, 67, 74, 61, 58, 109, 85, 91, 70, 68, 59, 63, 52, 70, 66, 72, 52, 73, 64, 56, 75, 52, 60, 67, 64, 69, 56, 53, 67, 111, 71, 70, 71, 86, 60, 89, 89, 63, 60, 42, 55, 65, 65, 69, 63, 73, 64, 60, 68, 75, 58, 94, 45, 85, 56, 73, 42, 68, 63, 78, 78, 78, 66, 73, 65, 64, 67, 56, 76, 73, 66, 72, 62, 61, 57, 61, 46, 72, 60, 81, 100, 74, 98, 64, 55, 64, 72, 74, 67, 68, 67, 74, 89, 79, 51, 75, 57, 67, 69, 68, 52, 56, 50, 79, 69, 48, 70, 75, 61, 71, 52, 53, 81, 64, 66, 83, 79, 57, 78, 66, 60, 60, 57, 96, 77, 75, 91, 48, 72, 97, 82, 50, 71, 61, 55, 74, 62, 50, 61, 62, 81, 78, 77, 48, 60, 44, 44, 78, 49, 62, 57, 85, 69, 66, 144, 64, 53, 62, 77, 61, 76, 77, 64, 69, 54, 56, 52, 64, 82, 55, 84, 64, 60, 77, 92, 58, 75, 68, 80, 105, 48, 76, 96, 50, 58, 148, 49, 61, 66, 78, 63, 82, 50, 55, 71, 62, 65, 52, 85, 73, 73, 62, 71, 74, 64, 71, 62, 61, 59, 56, 72, 75, 87, 62, 70, 77, 48, 52, 51, 92, 71, 64, 68, 65, 138, 67, 70, 79, 77, 64, 50, 52, 58, 64, 76, 55, 57, 60, 66, 58, 58, 55, 52, 57, 72, 76, 69, 55, 69, 65, 74, 46, 59, 50, 53, 71, 60, 52, 60, 68, 54, 55, 63, 43, 61, 93, 64, 80, 117, 73, 58, 76, 67, 53, 61, 53, 66, 54, 64, 77, 47, 65, 69, 77, 73, 60, 82, 65, 63, 60, 69, 40, 65, 67, 50, 76, 50, 62, 56, 65, 68, 64, 61, 62, 85, 55, 69, 59, 67, 92, 66, 59, 62, 61, 57, 58, 52, 62, 69, 80, 72, 86, 45, 96, 60, 55, 65, 101, 92, 62, 70, 69, 78, 73, 72, 59, 81, 57, 54, 76, 92, 61, 68, 57, 67, 73, 68, 59, 50, 59, 58, 60, 44, 73, 83, 63, 48, 60, 55, 68, 81, 53, 57, 44, 65, 57, 116, 67, 60, 72, 48, 73, 71, 65, 61, 79, 73, 52, 47, 65, 62, 55, 66, 88, 66, 69, 73, 57, 76, 66, 58, 68, 76, 79, 57, 81, 93, 53, 80, 59, 70, 64, 85, 75, 66, 60, 73, 65, 58, 60, 49, 72, 71, 66, 58, 59, 73, 58, 57, 77, 62, 57, 79, 69, 69, 69, 66, 53, 58, 61, 82, 48, 59, 56, 63, 56, 60, 58, 74, 59, 59, 87, 73, 66, 56, 57, 71, 65, 56, 78, 59, 48, 70, 70, 51, 88, 73, 52, 67, 77, 72, 34, 72, 50, 58, 74, 94, 87, 54, 61, 46, 55, 77, 46, 48, 69, 74, 97, 78, 67, 62, 69, 32, 71, 63, 64, 50, 55, 64, 67, 42, 91, 68, 59, 80, 46, 63, 58, 77, 58, 63, 53, 70, 52, 55, 54, 62, 90, 63, 65, 64, 67, 61, 142, 67, 61, 58, 54, 70, 61, 57, 48, 56, 63, 49, 62, 71, 65, 60, 54, 60, 65, 54, 51, 74, 51, 54, 73, 74, 74, 75, 72, 76, 42, 74, 57, 80, 61, 53, 95, 62, 64, 55, 75, 64, 52, 53, 73, 89, 58, 66, 83, 60, 54, 61, 82, 60, 52, 54, 65, 63, 68, 77, 66, 65, 43, 67, 65, 42, 75, 78, 69, 93, 63, 69, 61, 57, 89, 58, 75, 66, 76, 64, 53, 59, 79, 80, 68, 51, 78, 78, 73, 68, 86, 47, 74, 49, 66, 54, 57, 103, 69, 86, 69, 49, 65, 68, 53, 81, 70, 63, 68, 63, 74, 70, 63, 57, 43, 83, 67, 50, 63, 68, 65, 62, 59, 52, 51, 48, 58, 61, 68, 73, 91, 58, 62, 59, 58, 63, 72, 85, 66, 59, 110, 82, 59, 68, 77, 57, 66, 56, 76, 82, 59, 61, 70, 58, 66, 54, 56, 63, 42, 55, 58, 58, 97, 65, 70, 59, 68, 62, 43, 61, 67, 55, 64, 67, 68, 70, 66, 57, 71, 60, 67, 55, 72, 52, 59, 91, 75, 68, 48, 72, 74, 53, 60, 62, 60, 52, 73, 90, 52, 47, 55, 111, 39, 73, 107, 73, 109, 51, 81, 69, 71, 53, 52, 68, 75, 52, 56, 65, 62, 48, 68, 80, 53, 42, 102, 60, 43, 71, 68, 49, 64, 62, 72, 71, 77, 58, 65, 79, 66, 80, 58, 74, 68, 87, 61, 97, 70, 69, 43, 76, 84, 63, 65, 55, 77, 58, 44, 63, 47, 54, 66, 54, 57, 52, 91, 49, 66, 63, 67, 90, 64, 56, 105, 68, 47, 63, 53, 55, 58, 58, 58, 99, 83, 83, 81, 65, 79, 52, 76, 61, 57, 51, 68, 59, 63, 47, 74, 60, 53, 41, 66, 65, 74, 45, 66, 61, 64, 66, 64, 58, 58, 72, 117, 57, 71, 71, 84, 61, 76, 56, 57, 53, 72, 39, 59, 61, 73, 57, 71, 69, 67, 75, 60, 54, 69, 53, 64, 58, 70, 55, 77, 47, 74, 70, 52, 81, 64, 57, 60, 65, 61, 94, 61, 65, 105, 62, 54, 69, 72, 54, 73, 68, 67, 45, 95, 39, 64, 84, 87, 53, 69, 67, 60, 63, 83, 75, 61, 65, 58, 73, 85, 72, 75, 73, 54, 81, 68, 70, 51, 47, 70, 95, 61, 64, 51, 58, 63, 48, 80, 76, 78, 68, 54, 57, 40, 62, 67, 72, 75, 78, 67, 66, 48, 63, 46, 71, 82, 60, 60, 80, 69, 77, 67, 51, 69, 88, 58, 84, 82, 70, 51, 59, 67, 65, 55, 55, 74, 68, 35, 67, 59, 72, 63, 57, 69, 71, 75, 73, 76, 56, 62, 65, 44, 88, 85, 62, 62, 48, 87, 63, 60, 62, 55, 83, 73, 46, 60, 74, 79, 56, 59, 61, 114, 114, 62, 65, 75, 66, 89, 58, 61, 65, 48, 69, 74, 63, 73, 61, 45, 63, 108, 65, 66, 69, 61, 61, 53, 78, 61, 55, 75, 53, 72, 73, 68, 69, 74, 86, 74, 62, 69, 62, 80, 69, 36, 74, 54, 68, 84, 64, 54, 58, 42, 81, 55, 77, 47, 85, 82, 65, 69, 54, 80, 66, 60, 71, 67, 44, 88, 46, 52, 75, 58, 65, 54, 55, 69, 76, 67, 126, 59, 69, 61, 55, 77, 55, 80, 31, 57, 45, 76, 62, 63, 85, 41, 65, 91, 67, 55, 49, 40, 73, 63, 80, 69, 57, 60, 52, 74, 55, 76, 60, 99, 65, 69, 60, 77, 61, 77, 93, 51, 62, 60, 67, 62, 67, 72, 60, 59, 52, 62, 88, 62, 91, 110, 47, 64, 55, 34, 75, 56, 62, 79, 75, 62, 56, 80, 77, 40, 62, 66, 54, 104, 77, 70, 70, 66, 56, 109, 66, 78, 66, 58, 55, 55, 80, 85, 61, 65, 108, 61, 74, 65, 66, 40, 76, 66, 64, 47, 60, 73, 68, 52, 57, 67, 54, 39, 86, 69, 49, 63, 72, 72, 98, 74, 33, 38, 46, 54, 58, 86, 63, 113, 66, 73, 70, 80, 61, 64, 63, 77, 58, 49, 54, 56, 50, 45, 49, 60, 63, 75, 61, 66, 65, 65, 61, 66, 70, 50, 51, 73, 47, 63, 62, 78, 84, 70, 67, 53, 67, 73, 62, 73, 53, 43, 62, 76, 74, 57, 77, 42, 81, 66, 67, 63, 74, 56, 65, 108, 75, 52, 68, 36, 41, 68, 61, 65, 73, 99, 68, 49, 58, 61, 59, 61, 67, 72, 75, 94, 58, 65, 67, 57, 60, 57, 63, 67, 64, 126, 56, 48, 35, 112, 52, 48, 81, 90, 50, 82, 51, 108, 57, 53, 85, 73, 76, 72, 106, 49, 66, 64, 56, 77, 57, 78, 59, 71, 65, 63, 67, 85, 73, 63, 70, 74, 53, 81, 55, 72, 55, 84, 66, 80, 71, 64, 50, 62, 66, 50, 77, 82, 56, 71, 96, 53, 63, 57, 68, 57, 76, 64, 74, 61, 58, 64, 70, 64, 62, 71, 31, 68, 62, 59, 82, 63, 57, 65, 53, 48, 52, 80, 63, 60, 78, 81, 53, 56, 65, 57, 65, 74, 75, 69, 61, 68, 68, 60, 56, 64, 76, 104, 42, 59, 79, 50, 66, 48, 56, 87, 80, 79, 50, 77, 63, 64, 87, 77, 75, 42, 71, 83, 64, 101, 71, 69, 70, 73, 83, 62, 103, 73, 62, 65, 66, 61, 69, 55, 75, 100, 52, 87, 59, 72, 72, 58, 54, 63, 82, 84, 56, 65, 64, 45, 45, 53, 66, 55, 100, 49, 60, 72, 74, 85, 54, 66, 54, 49, 60, 68, 54, 73, 56, 72, 70, 50, 59, 76, 74, 58, 64, 57, 63, 80, 47, 58, 74, 59, 63, 74, 58, 45, 53, 75, 53, 62, 44, 49, 93, 44, 57, 60, 70, 60, 80, 38, 132, 86, 46, 75, 58, 88, 90, 75, 66, 65, 59, 57, 60, 80, 62, 67, 127, 55, 58, 53, 65, 69, 75, 51, 71, 69, 61, 59, 50, 79, 75, 60, 65, 94, 70, 76, 71, 76, 140, 64, 74, 63, 75, 66, 65, 57, 58, 69, 49, 78, 47, 73, 76, 61, 66, 43, 66, 52, 61, 75, 71, 52, 97, 50, 60, 65, 70, 48, 70, 87, 67, 42, 47, 59, 72, 72, 72, 52, 77, 41, 55, 60, 77, 60, 59, 79, 75, 75, 50, 59, 66, 75, 54, 50, 49, 64, 72, 72, 41, 57, 73, 58, 73, 75, 75, 83, 74, 85, 57, 91, 56, 61, 59, 71, 72, 94, 75, 66, 75, 67, 69, 80, 69, 68, 61, 73, 41, 85, 68, 70, 81, 55, 60, 55, 59, 55, 58, 61, 47, 63, 73, 55, 77, 72, 90, 80, 71, 54, 45, 98, 51, 51, 88, 56, 59, 56, 98, 48, 74, 67, 92, 66, 61, 63, 52, 48, 47, 101, 70, 82, 80, 71, 55, 74, 61, 57, 67, 72, 73, 64, 51, 70, 89, 39, 60, 76, 74, 65, 62, 47, 74, 61, 66, 53, 73, 56, 56, 31, 49, 49, 61, 65, 84, 90, 69, 88, 82, 66, 79, 48, 76, 53, 78, 51, 51, 69, 90, 62, 76, 67, 82, 68, 130, 85, 35, 72, 60, 71, 73, 60, 59, 74, 60, 83, 85, 52, 67, 81, 58, 71, 72, 58, 60, 79, 64, 85, 56, 137, 54, 69, 66, 71, 63, 60, 81, 44, 75, 54, 58, 92, 64, 54, 75, 50, 68, 62, 45, 65, 65, 74, 61, 59, 62, 66, 68, 67, 79, 48, 86, 53, 64, 127, 61, 99, 52, 56, 37, 61, 70, 49, 49, 85, 71, 59, 90, 86, 72, 70, 62, 56, 39, 59, 60, 61, 52, 52, 68, 53, 72, 70, 62, 68, 62, 77, 65, 58, 66, 78, 68, 54, 59, 65, 67, 100, 72, 100, 50, 80, 61, 73, 58, 74, 50, 63, 71, 44, 49, 58, 63, 103, 100, 60, 86, 68, 83, 100, 47, 106, 57, 87, 82, 61, 50, 75, 79, 57, 49, 79, 82, 67, 56, 60, 49, 73, 55, 64, 46, 70, 40, 65, 57, 72, 47, 79, 70, 73, 53, 72, 47, 71, 82, 59, 71, 81, 87, 60, 69, 57, 45, 49, 70, 43, 58, 54, 63, 74, 63, 56, 72, 70, 51, 67, 66, 40, 41, 51, 85, 75, 44, 63, 61, 117, 59, 69, 80, 49, 92, 100, 67, 60, 92, 82, 51, 55, 49, 92, 62, 79, 63, 49, 82, 51, 84, 54, 61, 73, 92, 98, 76, 64, 50, 61, 49, 57, 76, 79, 47, 70, 66, 85, 86, 56, 56, 47, 65, 62, 58, 74, 56, 56, 52, 57, 59, 63, 64, 53, 52, 62, 59, 49, 44, 58, 65, 77, 60, 74, 47, 65, 75, 68, 55, 75, 72, 53, 84, 66, 82, 93, 58, 65, 55, 80, 65, 58, 63, 78, 53, 61, 71, 51, 65, 47, 99, 58, 65, 51, 43, 71, 127, 73, 58, 52, 57, 65, 52, 83, 35, 65, 57, 62, 62, 51, 46, 75, 57, 49, 89, 64, 75, 67, 110, 95, 61, 48, 106, 63, 56, 130, 38, 61, 62, 82, 84, 64, 60, 44, 66, 52, 65, 62, 46, 95, 59, 57, 53, 54, 46, 56, 70, 58, 51, 41, 55, 58, 59, 62, 58, 66, 57, 53, 60, 61, 68, 70, 50, 90, 73, 69, 58, 69, 64, 57, 62, 60, 47, 95, 73, 78, 53, 71, 61, 51, 73, 53, 90, 82, 64, 92, 74, 70, 86, 57, 62, 58, 72, 55, 75, 94, 111, 48, 54, 75, 52, 52, 45, 60, 87, 79, 59, 68, 59, 109, 50, 72, 67, 49, 81, 57, 38, 71, 54, 89, 57, 46, 77, 64, 79, 75, 64, 59, 82, 66, 67, 88, 57, 79, 69, 74, 87, 67, 46, 88, 62, 126, 59, 57, 57, 80, 65, 69, 55, 32, 44, 62, 63, 56, 85, 86, 57, 51, 70, 66, 62, 64, 74, 53, 73, 72, 59, 76, 63, 102, 52, 81, 75, 78, 79, 48, 70, 92, 46, 77, 78, 69, 55, 71, 74, 53, 51, 66, 61, 72, 104, 77, 73, 59, 83, 56, 56, 69, 74, 84, 86, 68, 81, 58, 85, 80, 68, 50, 59, 73, 51, 66, 77, 74, 51, 43, 88, 81, 50, 52, 68, 83, 65, 47, 40, 60, 69, 52, 66, 55, 67, 56, 65, 63, 75, 71, 73, 96, 66, 81, 45, 83, 79, 81, 49, 56, 60, 76, 66, 31, 74, 83, 77, 96, 72, 77, 86, 56, 58, 62, 91, 66, 65, 64, 52, 75, 53, 58, 42, 39, 88, 74, 51, 65, 54, 59, 59, 49, 65, 79, 57, 139, 33, 61, 65, 64, 109, 76, 49, 83, 47, 98, 51, 66, 71, 49, 69, 73, 66, 68, 64, 67, 73, 50, 56, 63, 61, 66, 106, 68, 52, 92, 100, 89, 69, 54, 48, 56, 52, 62, 71, 44, 51, 56, 60, 64, 72, 74, 66, 79, 52, 54, 29, 77, 53, 66, 60, 57, 65, 61, 82, 67, 82, 68, 44, 79, 60, 71, 51, 64, 64, 69, 68, 65, 80, 81, 63, 60, 39, 72, 34, 73, 67, 60, 75, 91, 52, 72, 63, 61, 74, 37, 51, 76, 49, 39, 40, 54, 90, 60, 65, 60, 73, 48, 87, 93, 69, 51, 27, 57, 48, 75, 55, 45, 64, 44, 44, 31, 57, 134, 85, 56, 42, 81, 64, 38, 82, 60, 62, 80, 65, 57, 73, 41, 69, 51, 70, 74, 64, 63, 84, 57, 54, 86, 58, 58, 70, 74, 81, 56, 74, 79, 74, 72, 68, 65, 68, 79, 57, 67, 59, 93, 75, 63, 54, 90, 80, 57, 65, 50, 90, 60, 63, 65, 83, 70, 48, 68, 70, 61, 63, 58, 67, 61, 46, 64, 56, 39, 91, 65, 68, 51, 56, 83, 45, 61, 55, 53, 63, 59, 50, 47, 72, 61, 120, 44, 89, 92, 61, 54, 63, 92, 63, 81, 61, 63, 84, 65, 49, 79, 68, 84, 67, 58, 47, 76, 68, 57, 45, 82, 87, 84, 55, 40, 60, 49, 80, 63, 66, 66, 67, 60, 53, 81, 87, 66, 56, 90, 42, 43, 60, 51, 68, 50, 62, 55, 58, 76, 73, 70, 59, 50, 50, 62, 82, 50, 76, 62, 65, 74, 98, 53, 85, 76, 58, 62, 65, 55, 88, 65, 65, 70, 92, 48, 66, 78, 70, 118, 68, 73, 52, 46, 64, 92, 75, 49, 66, 67, 76, 55, 74, 55, 73, 85, 64, 71, 70, 78, 49, 84, 65, 57, 75, 42, 46, 61, 72, 42, 46, 52, 71, 59, 88, 96, 57, 61, 75, 54, 63, 77, 78, 55, 77, 125, 80, 106, 70, 52, 54, 49, 58, 67, 66, 81, 66, 69, 51, 54, 61, 121, 64, 54, 61, 51, 82, 111, 60, 87, 70, 60, 71, 58, 57, 104, 43, 68, 45, 50, 42, 63, 72, 62, 58, 58, 72, 61, 69, 67, 60, 68, 69, 65, 68, 74, 48, 59, 64, 75, 53, 78, 52, 41, 65, 77, 65, 70, 55, 75, 64, 74, 49, 94, 66, 63, 118, 81, 64, 73, 64, 61, 56, 69, 49, 84, 69, 54, 70, 83, 93, 89, 73, 66, 54, 62, 58, 72, 67, 85, 61, 68, 51, 79, 56, 69, 76, 53, 55, 48, 88, 72, 64, 62, 79, 43, 64, 80, 53, 69, 71, 86, 64, 60, 63, 56, 91, 61, 64, 73, 80, 79, 53, 62, 82, 75, 59, 62, 87, 97, 58, 80, 87, 74, 74, 68, 55, 62, 74, 56, 89, 79, 62, 65, 65, 76, 58, 88, 69, 58, 56, 61, 69, 76, 68, 60, 63, 65, 52, 79, 69, 55, 50, 42, 73, 120, 59, 77, 85, 58, 59, 46, 41, 64, 68, 81, 48, 57, 67, 81, 59, 59, 69, 63, 58, 69, 77, 67, 63, 64, 75, 59, 52, 88, 77, 50, 61, 57, 68, 65, 35, 76, 69, 62, 70, 59, 82, 80, 76, 71, 70, 65, 60, 63, 59, 65, 42, 49, 49, 65, 69, 60, 56, 56, 88, 110, 74, 48, 53, 73, 48, 67, 55, 74, 73, 55, 50, 60, 62, 54, 67, 72, 51, 51, 67, 64, 67, 70, 40, 76, 55, 66, 74, 37, 70, 75, 63, 43, 58, 43, 36, 93, 52, 119, 70, 79, 49, 76, 61, 102, 61, 68, 54, 75, 63, 45, 60, 80, 43, 65, 47, 74, 74, 52, 75, 54, 61, 42, 66, 45, 58, 53, 59, 60, 50, 59, 83, 51, 85, 68, 48, 59, 49, 64, 80, 85, 53, 76, 96, 56, 95, 65, 123, 109, 90, 58, 56, 86, 48, 62, 69, 47, 89, 72, 29, 105, 79, 76, 57, 92, 54, 49, 65, 95, 68, 51, 64, 85, 69, 68, 80, 68, 87, 72, 72, 76, 57, 70, 68, 57, 113, 42, 72, 48, 58, 59, 45, 49, 68, 54, 65, 69, 44, 73, 59, 91, 138, 82, 67, 81, 94, 50, 94, 51, 35, 52, 49, 52, 55, 58, 75, 60, 46, 37, 62, 62, 57, 45, 84, 82, 79, 66, 83, 104, 77, 69, 82, 59, 43, 53, 70, 62, 53, 88, 56, 59, 97, 48, 56, 65, 68, 72, 64, 63, 53, 76, 62, 83, 152, 66, 69, 55, 48, 87, 58, 78, 59, 48, 74, 47, 66, 55, 56, 64, 65, 83, 62, 53, 41, 52, 82, 50, 64, 75, 59, 45, 69, 69, 71, 89, 50, 78, 68, 85, 57, 70, 63, 73, 72, 103, 42, 66, 107, 92, 85, 69, 58, 67, 54, 38, 69, 86, 72, 51, 69, 64, 58, 44, 52, 80, 61, 113, 110, 70, 56, 73, 80, 49, 66, 92, 83, 54, 83, 51, 76, 51, 45, 47, 95, 76, 67, 58, 82, 44, 47, 62, 72, 70, 53, 71, 72, 54, 50, 89, 53, 56, 161, 58, 80, 57, 80, 61, 44, 62, 66, 77, 46, 87, 70, 78, 59, 84, 62, 63, 80, 51, 63, 68, 52, 97, 51, 64, 76, 57, 55, 73, 58, 68, 70, 79, 78, 64, 94, 45, 61, 60, 62, 71, 94, 68, 89, 74, 95, 54, 63, 55, 59, 100, 48, 80, 68, 64, 60, 79, 60, 102, 66, 56, 59, 62, 99, 64, 119, 63, 61, 77, 54, 71, 65, 68, 72, 46, 47, 71, 45, 78, 79, 70, 73, 63, 55, 98, 73, 56, 62, 47, 63, 62, 49, 70, 66, 89, 88, 46, 48, 65, 59, 54, 155, 53, 60, 60, 65, 83, 57, 42, 48, 67, 59, 79, 75, 71, 72, 43, 58, 63, 57, 57, 65, 53, 59, 94, 63, 74, 71, 66, 53, 52, 76, 59, 72, 75, 45, 72, 79, 62, 50, 63, 69, 89, 65, 62, 76, 86, 62, 54, 71, 41, 50, 47, 67, 86, 61, 82, 75, 59, 66, 78, 58, 58, 53, 57, 70, 72, 71, 55, 44, 59, 57, 58, 57, 77, 62, 72, 68, 60, 54, 68, 80, 59, 70, 45, 69, 79, 84, 75, 72, 79, 68, 60, 81, 67, 57, 55, 53, 65, 64, 69, 57, 90, 65, 58, 75, 58, 69, 68, 79, 67, 65, 98, 81, 76, 72, 72, 67, 63, 44, 78, 56, 71, 50, 72, 61, 71, 65, 66, 76, 70, 65, 57, 59, 62, 49, 69, 48, 85, 74, 60, 55, 34, 76, 65, 52, 67, 62, 50, 40, 84, 51, 72, 66, 84, 84, 62, 59, 54, 65, 76, 68, 64, 54, 53, 59, 69, 86, 76, 63, 91, 46, 110, 67, 65, 69, 61, 75, 59, 58, 58, 65, 52, 73, 69, 57, 55, 54, 59, 86, 67, 71, 88, 59, 60, 73, 53, 63, 70, 58, 56, 66, 62, 54, 49, 89, 67, 67, 64, 56, 80, 64, 51, 58, 81, 78, 93, 54, 44, 65, 61, 80, 80, 55, 60, 66, 63, 54, 61, 86, 57, 74, 70, 71, 75, 57, 53, 59, 70, 74, 63, 145, 64, 80, 58, 57, 49, 75, 66, 92, 59, 61, 66, 43, 48, 71, 64, 60, 74, 51, 69, 61, 51, 46, 112, 61, 81, 84, 58, 66, 68, 83, 59, 89, 52, 89, 76, 106, 64, 58, 58, 80, 61, 151, 54, 66, 69, 57, 42, 61, 48, 60, 59, 80, 45, 57, 64, 70, 52, 95, 81, 63, 54, 57, 60, 58, 49, 55, 76, 54, 61, 69, 72, 52, 52, 66, 76, 63, 53, 55, 72, 70, 72, 56, 59, 64, 77, 60, 56, 53, 74, 85, 57, 61, 65, 59, 60, 73, 78, 68, 68, 76, 66, 66, 63, 50, 48, 63, 76, 63, 65, 51, 72, 60, 55, 78, 80, 69, 45, 67, 72, 54, 56, 82, 68, 87, 113, 55, 58, 52, 81, 53, 68, 74, 49, 68, 56, 85, 104, 45, 58, 63, 73, 65, 78, 69, 57, 67, 70, 63, 66, 69, 59, 60, 64, 86, 53, 50, 55, 50, 28, 51, 57, 70, 68, 55, 53, 43, 91, 69, 65, 62, 72, 70, 57, 51, 62, 72, 74, 70, 75, 70, 47, 72, 48, 58, 89, 65, 47, 68, 68, 79, 65, 70, 76, 80, 78, 70, 57, 75, 68, 72, 68, 51, 69, 43, 65, 32, 66, 64, 48, 63, 69, 53, 49, 56, 72, 78, 57, 74, 84, 85, 63, 62, 56, 78, 45, 53, 74, 65, 54, 70, 76, 72, 117, 61, 57, 63, 23, 62, 94, 66, 68, 56, 63, 58, 71, 70, 71, 64, 73, 59, 64, 43, 61, 82, 79, 79, 78, 71, 69, 64, 64, 98, 69, 74, 62, 81, 78, 67, 64, 50, 76, 56, 62, 39, 90, 74, 52, 53, 65, 63, 86, 48, 60, 47, 68, 64, 105, 47, 52, 57, 52, 62, 87, 55, 47, 68, 61, 106, 81, 55, 67, 73, 69, 51, 114, 53, 84, 58, 78, 65, 55, 62, 61, 54, 68, 52, 68, 50, 66, 68, 69, 55, 86, 86, 60, 43, 78, 63, 72, 68, 53, 64, 79, 62, 60, 107, 38, 91, 62, 66, 85, 57, 60, 59, 70, 65, 44, 59, 81, 53, 68, 89, 104, 53, 80, 55, 66, 82, 65, 61, 44, 74, 56, 69, 53, 66, 59, 79, 64, 54, 62, 97, 65, 48, 59, 58, 62, 78, 94, 70, 74, 120, 58, 60, 56, 58, 52, 55, 70, 45, 59, 66, 64, 70, 73, 59, 64, 53, 53, 70, 60, 59, 47, 56, 60, 63, 52, 50, 67, 81, 106, 77, 81, 69, 64, 60, 66, 65, 86, 71, 64, 61, 68, 80, 59, 75, 37, 77, 83, 111, 83, 52, 54, 116, 87, 83, 53, 44, 50, 80, 122, 72, 67, 70, 68, 72, 89, 58, 46, 82, 70, 60, 77, 67, 84, 81, 64, 53, 77, 70, 59, 63, 78, 57, 54, 58, 71, 73, 57, 80, 64, 69, 145, 80, 56, 58, 68, 55, 68, 38, 68, 43, 77, 53, 66, 66, 64, 48, 60, 84, 79, 52, 77, 50, 62, 48, 48, 70, 72, 66, 55, 73, 69, 75, 81, 73, 79, 37, 67, 64, 76, 50, 74, 47, 74, 56, 63, 58, 94, 61, 68, 73, 78, 54, 64, 75, 72, 70, 67, 58, 59, 63, 68, 64, 65, 57, 59, 63, 68, 54, 69, 58, 61, 69, 76, 48, 59, 53, 63, 65, 72, 67, 45, 89, 60, 63, 78, 99, 107, 62, 71, 64, 79, 116, 61, 82, 70, 51, 57, 58, 80, 73, 57, 49, 83, 58, 63, 75, 65, 57, 57, 59, 78, 87, 46, 73, 80, 58, 62, 84, 110, 44, 47, 59, 74, 57, 76, 65, 72, 73, 58, 58, 57, 71, 57, 71, 59, 59, 67, 61, 57, 46, 83, 64, 120, 72, 66, 69, 80, 68, 57, 51, 61, 52, 93, 123, 86, 69, 76, 50, 56, 51, 63, 76, 63, 85, 58, 43, 116, 47, 79, 43, 104, 56, 52, 57, 75, 65, 67, 73, 80, 58, 69, 76, 64, 69, 74, 81, 56, 87, 71, 85, 75, 60, 79, 77, 75, 55, 85, 65, 58, 78, 68, 68, 68, 79, 56, 67, 80, 57, 94, 70, 43, 65, 84, 58, 50, 63, 86, 66, 53, 45, 64, 63, 58, 62, 64, 69, 74, 66, 57, 65, 63, 64, 82, 55, 53, 64, 68, 95, 66, 80, 55, 78, 56, 59, 73, 66, 51, 87, 72, 61, 77, 56, 67, 73, 68, 107, 76, 55, 60, 67, 65, 54, 61, 68, 71, 42, 53, 65, 83, 69, 63, 56, 106, 61, 60, 60, 58, 64, 40, 73, 64, 68, 76, 62, 71, 76, 63, 70, 54, 73, 52, 57, 75, 68, 48, 61, 66, 73, 57, 134, 74, 55, 83, 87, 69, 63, 50, 62, 63, 78, 74, 61, 79, 67, 50, 98, 71, 68, 68, 56, 61, 62, 67, 45, 57, 75, 46, 64, 69, 85, 69, 144, 75, 81, 57, 66, 54, 45, 66, 54, 53, 68, 78, 64, 72, 67, 54, 56, 69, 64, 57, 75, 68, 84, 102, 86, 71, 55, 68, 55, 62, 53, 70, 91, 72, 70, 62, 60, 65, 122, 55, 70, 44, 75, 46, 77, 72, 74, 58, 55, 65, 47, 89, 61, 80, 79, 87, 95, 62, 88, 95, 76, 68, 83, 56, 49, 57, 66, 70, 80, 71, 50, 93, 72, 63, 55, 65, 55, 69, 66, 62, 63, 59, 63, 65, 49, 69, 70, 73, 68, 83, 68, 96, 60, 71, 58, 65, 65, 53, 82, 57, 78, 57, 62, 52, 68, 67, 52, 53, 55, 76, 62, 62, 91, 76, 64, 48, 70, 64, 66, 65, 104, 91, 87, 66, 62, 45, 85, 46, 53, 69, 71, 72, 68, 65, 50, 62, 83, 59, 71, 59, 68, 65, 67, 60, 76, 55, 56, 51, 116, 57, 51, 73, 56, 58, 81, 70, 80, 76, 83, 88, 72, 55, 49, 71, 64, 77, 62, 78, 66, 76, 79, 70, 54, 61, 70, 50, 53, 59, 64, 77, 67, 63, 62, 88, 65, 65, 75, 67, 59, 56, 59, 66, 68, 51, 89, 60, 67, 62, 52, 71, 58, 70, 59, 59, 62, 52, 51, 49, 67, 57, 61, 64, 71, 52, 66, 80, 75, 66, 66, 69, 62, 63, 69, 68, 81, 56, 116, 46, 76, 66, 48, 55, 56, 66, 69, 77, 61, 55, 50, 54, 62, 60, 59, 68, 60, 63, 59, 63, 65, 66, 61, 73, 57, 52, 71, 59, 86, 76, 69, 66, 65, 75, 70, 62, 61, 66, 65, 58, 58, 59, 46, 68, 51, 62, 80, 58, 70, 80, 55, 63, 68, 91, 84, 59, 61, 68, 61, 53, 71, 64, 67, 65, 54, 73, 56, 82, 59, 61, 72, 67, 55, 59, 79, 69, 72, 55, 59, 60, 69, 54, 72, 64, 55, 69, 81, 73, 61, 73, 80, 69, 72, 51, 57, 55, 48, 55, 53, 59, 74, 76, 63, 90, 78, 50, 56, 63, 74, 67, 64, 72, 73, 68, 52, 61, 68, 51, 74, 95, 74, 65, 61, 62, 65, 48, 61, 58, 44, 78, 58, 68, 53, 63, 66, 59, 68, 57, 122, 46, 65, 68, 54, 91, 69, 53, 78, 53, 65, 70, 60, 54, 61, 60, 64, 70, 65, 54, 61, 84, 55, 76, 65, 59, 75, 52, 76, 74, 67, 54, 55, 62, 63, 73, 71, 51, 67, 88, 93, 54, 71, 53, 60, 69, 61, 85, 63, 64, 62, 83, 55, 60, 79, 76, 59, 66, 75, 73, 63, 79, 61, 75, 69, 75, 72, 60, 78, 65, 50, 63, 92, 49, 64, 80, 58, 65, 74, 67, 60, 52, 64, 53, 44, 61, 70, 73, 77, 67, 59, 74, 59, 97, 69, 65, 64, 45, 75, 61, 81, 50, 64, 58, 57, 58, 64, 69, 61, 62, 76, 62, 62, 75, 58, 51, 88, 57, 43, 66, 51, 59, 46, 58, 69, 65, 77, 60, 76, 46, 78, 66, 57, 81, 64, 41, 88, 61, 62, 61, 75, 62, 57, 128, 66, 62, 70, 57, 67, 75, 78, 34, 57, 69, 71, 68, 84, 46, 42, 82, 44, 64, 122, 73, 49, 75, 64, 71, 61, 49, 62, 77, 71, 76, 50, 83, 67, 50, 56, 58, 66, 71, 49, 98, 64, 72, 56, 49, 58, 102, 79, 91, 53, 55, 68, 51, 55, 62, 80, 63, 143, 81, 57, 60, 57, 63, 77, 83, 65, 76, 59, 47, 59, 63, 53, 67, 68, 65, 70, 71, 77, 55, 70, 101, 78, 80, 104, 83, 60, 65, 62, 59, 44, 59, 80, 56, 55, 71, 98, 76, 63, 73, 82, 52, 59, 57, 76, 66, 59, 60, 72, 67, 55, 57, 87, 64, 46, 55, 84, 62, 60, 61, 70, 73, 62, 51, 92, 53, 58, 96, 44, 53, 63, 70, 66, 59, 72, 70, 59, 59, 76, 60, 69, 69, 96, 76, 72, 67, 51, 71, 59, 55, 75, 65, 55, 66, 75, 68, 61, 44, 79, 70, 58, 63, 91, 70, 71, 70, 61, 65, 66, 54, 72, 94, 64, 69, 59, 68, 53, 64, 62, 89, 55, 66, 68, 79, 58, 65, 57, 62, 78, 81, 57, 95, 67, 80, 83, 50, 67, 55, 74, 76, 68, 79, 72, 62, 81, 75, 56, 64, 72, 80, 44, 50, 50, 64, 61, 69, 57, 57, 61, 55, 68, 58, 58, 60, 66, 54, 57, 74, 68, 55, 77, 61, 68, 70, 81, 77, 69, 65, 69, 80, 79, 53, 61, 71, 55, 72, 55, 62, 50, 56, 59, 79, 70, 55, 57, 59, 64, 59, 72, 67, 61, 62, 93, 54, 69, 66, 70, 62, 55, 55, 68, 72, 71, 59, 56, 59, 56, 45, 53, 65, 64, 84, 55, 67, 66, 73, 66, 68, 60, 117, 65, 58, 63, 57, 50, 82, 52, 67, 64, 69, 67, 76, 63, 57, 49, 51, 61, 41, 52, 59, 81, 66, 68, 63, 56, 88, 64, 42, 76, 62, 61, 61, 52, 47, 60, 63, 74, 79, 62, 70, 61, 59, 40, 51, 59, 58, 49, 75, 64, 68, 60, 62, 77, 66, 60, 59, 57, 46, 55, 58, 67, 54, 81, 69, 76, 61, 61, 58, 102, 63, 82, 70, 52, 57, 98, 71, 73, 77, 75, 63, 64, 53, 51, 82, 46, 58, 60, 67, 64, 78, 95, 39, 58, 70, 78, 69, 71, 75, 42, 71, 41, 145, 48, 58, 74, 46, 61, 73, 62, 45, 55, 69, 60, 67, 61, 93, 54, 65, 55, 79, 76, 61, 60, 59, 58, 77, 61, 52, 43, 73, 63, 66, 69, 58, 43, 71, 61, 71, 60, 45, 71, 68, 84, 67, 61, 49, 56, 60, 63, 96, 86, 63, 62, 62, 70, 73, 75, 58, 46, 32, 46, 89, 62, 76, 52, 79, 55, 57, 63, 72, 56, 72, 68, 59, 70, 57, 79, 59, 63, 42, 57, 48, 52, 78, 103, 63, 65, 75, 62, 58, 73, 71, 52, 53, 69, 68, 71, 68, 50, 65, 78, 51, 61, 66, 48, 59, 63, 59, 69, 48, 56, 93, 58, 39, 41, 83, 74, 55, 47, 58, 72, 62, 81, 78, 63, 51, 80, 49, 48, 59, 79, 70, 58, 73, 65, 54, 76, 64, 71, 66, 57, 59, 69, 79, 59, 69, 50, 107, 47, 61, 78, 68, 73, 64, 51, 59, 82, 64, 66, 69, 48, 97, 91, 52, 54, 59, 55, 71, 55, 35, 57, 56, 80, 68, 62, 69, 74, 47, 75, 52, 60, 62, 70, 61, 82, 82, 65, 71, 57, 72, 62, 61, 47, 61, 61, 71, 60, 59, 62, 62, 64, 65, 47, 81, 79, 54, 58, 93, 57, 53, 76, 76, 56, 58, 57, 57, 97, 69, 78, 62, 62, 67, 65, 64, 65, 61, 65, 64, 70, 61, 74, 69, 60, 49, 52, 52, 93, 67, 64, 95, 36, 47, 53, 58, 71, 78, 119, 59, 55, 72, 57, 63, 70, 111, 77, 65, 51, 74, 55, 77, 65, 62, 61, 46, 64, 69, 62, 69, 52, 50, 80, 68, 54, 78, 55, 59, 87, 60, 116, 67, 84, 76, 64, 83, 60, 66, 65, 48, 64, 64, 62, 72, 86, 82, 68, 65, 54, 61, 62, 66, 68, 58, 73, 73, 55, 63, 52, 81, 67, 57, 75, 82, 105, 59, 54, 73, 59, 58, 61, 108, 56, 61, 61, 58, 83, 49, 94, 77, 72, 62, 50, 51, 57, 89, 58, 55, 59, 86, 56, 57, 76, 49, 70, 69, 89, 45, 120, 58, 72, 60, 69, 58, 62, 75, 74, 55, 70, 64, 59, 63, 68, 73, 70, 95, 71, 60, 49, 66, 53, 96, 58, 52, 55, 78, 52, 67, 60, 54, 58, 124, 70, 75, 69, 73, 62, 58, 62, 72, 73, 53, 50, 60, 50, 60, 58, 64, 61, 78, 69, 75, 70, 75, 72, 76, 63, 38, 57, 116, 52, 50, 117, 73, 52, 72, 59, 50, 50, 57, 76, 55, 50, 66, 67, 57, 91, 44, 69, 55, 60, 56, 76, 60, 59, 67, 49, 40, 86, 70, 44, 74, 52, 92, 64, 86, 50, 73, 84, 77, 67, 108, 68, 55, 63, 72, 78, 62, 81, 59, 69, 70, 73, 49, 71, 77, 69, 58, 60, 54, 64, 56, 82, 85, 62, 37, 47, 91, 50, 48, 46, 71, 62, 74, 52, 34, 57, 61, 68, 86, 89, 69, 75, 79, 70, 78, 54, 63, 103, 78, 53, 54, 64, 61, 68, 39, 63, 63, 56, 52, 115, 80, 81, 73, 86, 52, 61, 63, 73, 57, 45, 52, 60, 56, 66, 78, 63, 96, 61, 66, 52, 48, 46, 66, 59, 57, 97, 65, 56, 64, 91, 57, 60, 56, 46, 65, 64, 60, 57, 58, 61, 77, 59, 46, 62, 61, 71, 60, 50, 64, 52, 80, 49, 56, 68, 51, 64, 55, 73, 95, 51, 74, 60, 70, 68, 84, 54, 71, 58, 52, 83, 66, 65, 34, 55, 81, 76, 87, 53, 90, 71, 78, 52, 80, 51, 80, 55, 89, 70, 60, 69, 49, 91, 77, 69, 76, 50, 56, 62, 77, 57, 65, 80, 62, 61, 67, 57, 66, 66, 74, 64, 89, 43, 53, 64, 63, 73, 59, 73, 97, 58, 59, 48, 54, 45, 118, 63, 76, 61, 79, 47, 70, 66, 88, 66, 99, 65, 82, 48, 71, 77, 70, 72, 45, 56, 50, 68, 46, 150, 42, 62, 71, 60, 62, 72, 59, 55, 76, 46, 48, 79, 82, 51, 77, 75, 44, 73, 54, 72, 64, 69, 62, 62, 58, 89, 55, 68, 53, 46, 109, 67, 100, 65, 70, 49, 48, 72, 45, 70, 59, 67, 53, 64, 67, 94, 89, 65, 51, 54, 61, 56, 66, 53, 63, 72, 68, 74, 77, 66, 55, 63, 62, 63, 83, 68, 59, 85, 88, 85, 63, 81, 63, 39, 95, 48, 76, 60, 38, 116, 58, 60, 62, 61, 51, 52, 71, 54, 63, 66, 84, 59, 63, 51, 66, 64, 70, 61, 79, 49, 98, 58, 64, 80, 71, 83, 68, 50, 55, 55, 62, 75, 58, 57, 72, 66, 52, 54, 54, 61, 61, 82, 63, 48, 63, 68, 31, 48, 59, 81, 118, 90, 63, 49, 60, 65, 61, 91, 65, 43, 74, 72, 86, 63, 66, 83, 70, 50, 90, 59, 54, 68, 63, 73, 81, 49, 42, 59, 52, 66, 67, 80, 82, 64, 86, 56, 56, 53, 61, 75, 59, 53, 77, 68, 58, 52, 99, 65, 67, 57, 79, 47, 70, 79, 72, 91, 69, 74, 74, 54, 58, 73, 72, 68, 60, 81, 99, 59, 61, 64, 78, 64, 52, 45, 53, 76, 69, 80, 70, 50, 71, 49, 138, 62, 47, 51, 48, 74, 57, 60, 56, 73, 49, 63, 59, 79, 79, 57, 53, 67, 79, 65, 59, 59, 67, 74, 77, 76, 67, 74, 69, 83, 49, 59, 73, 65, 81, 109, 119, 66, 62, 90, 50, 68, 59, 74, 69, 59, 65, 81, 69, 65, 45, 70, 58, 50, 46, 56, 77, 62, 69, 72, 72, 70, 68, 76, 46, 56, 49, 50, 61, 80, 68, 62, 54, 79, 59, 72, 52, 72, 83, 58, 79, 38, 72, 67, 84, 50, 62, 60, 52, 74, 57, 86, 67, 70, 83, 70, 57, 72, 83, 33, 70, 45, 95, 115, 51, 57, 60, 65, 66, 47, 57, 95, 90, 72, 79, 56, 56, 59, 65, 57, 79, 42, 55, 69, 59, 60, 70, 47, 95, 68, 55, 66, 78, 69, 58, 56, 49, 58, 101, 55, 71, 39, 106, 61, 70, 84, 62, 56, 106, 71, 90, 70, 42, 66, 86, 48, 52, 97, 87, 69, 79, 57, 71, 70, 45, 77, 51, 67, 96, 38, 41, 70, 67, 54, 55, 54, 66, 58, 54, 66, 66, 76, 57, 102, 69, 75, 63, 59, 76, 66, 156, 73, 64, 60, 63, 64, 49, 51, 61, 79, 52, 59, 62, 92, 74, 71, 90, 61, 48, 62, 58, 71, 109, 76, 96, 63, 79, 62, 77, 55, 112, 61, 49, 58, 69, 60, 58, 58, 64, 65, 52, 69, 49, 47, 58, 60, 64, 61, 57, 69, 61, 61, 60, 93, 64, 68, 65, 55, 57, 67, 49, 58, 58, 80, 56, 69, 72, 52, 76, 64, 85, 102, 69, 66, 70, 63, 59, 75, 72, 66, 64, 48, 88, 60, 66, 53, 68, 52, 69, 72, 72, 62, 50, 68, 59, 64, 43, 53, 57, 54, 69, 53, 62, 42, 72, 63, 53, 81, 74, 63, 44, 67, 66, 94, 94, 99, 51, 51, 73, 75, 83, 91, 50, 102, 50, 67, 64, 65, 68, 49, 57, 66, 69, 71, 91, 58, 75, 60, 40, 60, 59, 74, 56, 58, 62, 71, 92, 47, 58, 64, 64, 65, 91, 47, 60, 99, 73, 62, 62, 116, 76, 74, 88, 62, 58, 55, 55, 59, 66, 77, 47, 79, 75, 77, 52, 52, 66, 70, 55, 80, 80, 67, 68, 58, 89, 59, 68, 60, 64, 76, 77, 59, 83, 67, 74, 74, 46, 59, 81, 74, 53, 63, 94, 88, 66, 75, 55, 82, 72, 53, 53, 73, 72, 110, 52, 88, 45, 51, 73, 48, 56, 82, 91, 66, 51, 60, 45, 71, 45, 76, 97, 63, 64, 58, 82, 86, 56, 66, 60, 65, 73, 60, 63, 69, 46, 74, 66, 58, 81, 64, 71, 58, 55, 68, 75, 50, 103, 65, 64, 44, 57, 69, 61, 53, 66, 72, 62, 63, 71, 66, 45, 67, 84, 56, 100, 82, 68, 43, 76, 60, 61, 64, 75, 57, 61, 70, 62, 55, 63, 50, 84, 41, 56, 55, 55, 68, 84, 73, 55, 66, 70, 83, 45, 76, 63, 62, 73, 73, 66, 80, 94, 79, 68, 51, 47, 63, 91, 69, 62, 40, 43, 57, 54, 78, 81, 59, 51, 56, 61, 74, 59, 53, 61, 61, 57, 45, 39, 73, 65, 71, 86, 67, 38, 50, 54, 73, 52, 60, 79, 80, 60, 71, 66, 41, 69, 41, 65, 69, 60, 68, 62, 80, 70, 57, 56, 71, 61, 93, 88, 68, 67, 62, 54, 48, 66, 56, 55, 69, 78, 57, 60, 63, 61, 50, 78, 70, 63, 69, 71, 74, 96, 63, 72, 47, 49, 64, 56, 75, 62, 55, 63, 70, 52, 61, 61, 47, 72, 55, 57, 77, 74, 55, 72, 78, 71, 66, 61, 55, 60, 52, 61, 63, 73, 65, 85, 57, 48, 59, 81, 67, 76, 56, 61, 68, 66, 70, 88, 81, 46, 109, 56, 66, 98, 58, 74, 65, 71, 53, 47, 77, 76, 45, 66, 54, 56, 66, 76, 42, 90, 95, 61, 58, 105, 63, 63, 83, 65, 66, 55, 45, 59, 80, 49, 74, 64, 58, 64, 59, 71, 96, 84, 53, 56, 67, 67, 84, 67, 58, 63, 61, 53, 68, 74, 64, 75, 49, 73, 50, 71, 78, 49, 68, 46, 53, 89, 44, 59, 103, 93, 56, 53, 53, 76, 72, 61, 62, 65, 66, 87, 59, 66, 104, 69, 61, 92, 78, 63, 75, 77, 80, 46, 99, 53, 97, 54, 41, 75, 55, 71, 66, 76, 54, 65, 78, 59, 69, 42, 71, 69, 46, 60, 52, 65, 80, 81, 57, 74, 60, 76, 74, 75, 50, 63, 60, 65, 54, 56, 94, 59, 72, 70, 52, 59, 55, 78, 54, 70, 93, 72, 67, 55, 54, 63, 74, 79, 55, 112, 73, 88, 59, 55, 80, 75, 59, 64, 68, 58, 76, 62, 53, 67, 92, 65, 73, 72, 73, 82, 59, 63, 72, 58, 70, 94, 66, 60, 62, 56, 60, 50, 58, 62, 62, 52, 78, 72, 50, 52, 38, 139, 67, 53, 44, 67, 48, 45, 59, 63, 54, 79, 66, 64, 49, 62, 72, 48, 50, 62, 56, 65, 62, 57, 51, 57, 54, 68, 84, 62, 83, 59, 59, 85, 71, 58, 59, 86, 68, 46, 52, 76, 62, 66, 73, 55, 85, 65, 62, 83, 66, 76, 90, 72, 88, 58, 54, 54, 66, 42, 106, 50, 74, 70, 58, 85, 63, 62, 69, 51, 57, 67, 40, 58, 37, 69, 58, 64, 54, 60, 56, 70, 106, 69, 104, 100, 82, 61, 71, 53, 61, 78, 58, 76, 70, 52, 76, 60, 51, 65, 56, 91, 46, 84, 57, 42, 64, 75, 67, 66, 65, 90, 81, 52, 68, 46, 66, 59, 56, 53, 74, 63, 65, 48, 48, 52, 57, 81, 67, 56, 62, 60, 59, 57, 63, 68, 65, 53, 62, 72, 61, 110, 75, 64, 69, 66, 56, 54, 58, 54, 68, 64, 56, 83, 43, 43, 73, 59, 69, 92, 53, 59, 68, 63, 47, 72, 57, 67, 63, 61, 77, 65, 55, 91, 76, 69, 55, 80, 59, 74, 49, 62, 46, 54, 56, 56, 122, 67, 77, 42, 74, 69, 47, 61, 57, 53, 80, 58, 69, 78, 55, 67, 82, 55, 81, 48, 73, 64, 71, 76, 55, 77, 63, 60, 59, 61, 66, 66, 75, 83, 57, 56, 64, 61, 59, 62, 56, 67, 56, 82, 65, 64, 61, 82, 82, 77, 69, 67, 56, 55, 66, 57, 82, 43, 58, 65, 87, 62, 59, 64, 53, 59, 49, 77, 72, 46, 62, 76, 81, 86, 61, 73, 50, 78, 72, 56, 69, 42, 63, 52, 66, 69, 64, 58, 62, 69, 77, 45, 56, 49, 95, 57, 52, 59, 50, 58, 59, 65, 56, 73, 57, 66, 80, 73, 57, 66, 79, 81, 54, 83, 47, 78, 52, 48, 61, 63, 68, 59, 67, 55, 70, 64, 66, 77, 66, 75, 61, 66, 55, 61, 50, 59, 107, 60, 84, 60, 73, 78, 67, 45, 59, 73, 40, 66, 70, 54, 109, 43, 53, 69, 42, 57, 73, 71, 70, 71, 63, 63, 55, 74, 69, 64, 65, 54, 69, 43, 62, 64, 76, 96, 57, 70, 54, 62, 75, 56, 51, 53, 57, 55, 87, 43, 61, 45, 71, 53, 66, 72, 69, 87, 50, 71, 54, 72, 59, 85, 62, 86, 85, 102, 69, 55, 59, 65, 54, 59, 55, 63, 93, 61, 51, 74, 49, 62, 57, 74, 81, 61, 46, 59, 51, 114, 98, 59, 52, 57, 69, 65, 70, 56, 66, 42, 46, 41, 49, 75, 65, 66, 66, 78, 84, 75, 79, 61, 79, 65, 72, 61, 77, 82, 102, 73, 56, 89, 73, 57, 66, 73, 60, 76, 48, 66, 67, 71, 83, 62, 64, 84, 59, 63, 87, 86, 72, 87, 60, 62, 59, 65, 63, 86, 66, 81, 79, 61, 72, 57, 77, 55, 54, 46, 55, 66, 87, 50, 69, 57, 45, 55, 76, 61, 56, 78, 69, 57, 59, 60, 76, 57, 68, 85, 63, 72, 58, 62, 78, 67, 64, 62, 55, 54, 71, 84, 66, 74, 49, 47, 57, 56, 70, 76, 54, 51, 52, 84, 49, 53, 84, 69, 69, 50, 62, 85, 88, 53, 70, 59, 56, 72, 74, 88, 47, 64, 46, 79, 64, 37, 74, 68, 47, 68, 75, 64, 66, 47, 58, 60, 86, 60, 85, 61, 62, 93, 61, 79, 75, 73, 64, 63, 76, 53, 48, 43, 55, 48, 47, 66, 49, 71, 66, 69, 74, 55, 72, 81, 57, 61, 76, 32, 69, 73, 86, 67, 64, 69, 45, 53, 53, 73, 77, 62, 76, 50, 85, 72, 55, 68, 74, 75, 66, 58, 81, 74, 73, 50, 84, 50, 88, 50, 57, 65, 73, 48, 72, 58, 66, 59, 78, 70, 81, 40, 63, 64, 47, 79, 57, 88, 59, 44, 164, 74, 76, 53, 48, 71, 78, 63, 59, 50, 57, 53, 60, 57, 84, 64, 55, 73, 61, 55, 74, 46, 90, 55, 69, 73, 59, 62, 59, 64, 46, 43, 57, 70, 65, 67, 120, 60, 65, 53, 50, 46, 59, 66, 68, 73, 61, 59, 69, 81, 73, 75, 82, 72, 73, 78, 61, 89, 46, 55, 77, 66, 81, 73, 68, 83, 65, 96, 59, 69, 62, 59, 60, 55, 63, 43, 91, 38, 43, 65, 61, 46, 51, 67, 82, 57, 45, 67, 116, 96, 71, 68, 61, 101, 54, 66, 45, 69, 69, 73, 77, 50, 79, 78, 92, 58, 62, 52, 84, 86, 65, 58, 48, 56, 46, 67, 64, 55, 43, 62, 39, 61, 52, 57, 59, 63, 56, 48, 44, 70, 70, 75, 62, 54, 68, 65, 65, 50, 87, 95, 41, 63, 68, 64, 70, 72, 57, 62, 91, 52, 82, 74, 57, 53, 57, 64, 69, 137, 112, 86, 75, 79, 58, 72, 65, 53, 81, 72, 60, 66, 47, 53, 58, 69, 62, 87, 59, 59, 33, 55, 60, 67, 59, 74, 78, 142, 41, 69, 43, 64, 63, 65, 72, 71, 76, 52, 56, 60, 55, 61, 79, 66, 65, 54, 80, 61, 69, 56, 55, 65, 55, 97, 69, 54, 88, 81, 48, 61, 53, 62, 81, 74, 49, 67, 57, 71, 44, 85, 55, 55, 66, 68, 71, 74, 67, 70, 63, 84, 36, 53, 48, 62, 93, 80, 61, 44, 73, 64, 61, 52, 72, 82, 47, 71, 49, 58, 88, 81, 59, 82, 57, 55, 70, 57, 50, 54, 54, 57, 73, 71, 66, 80, 95, 75, 52, 58, 45, 71, 66, 42, 51, 53, 57, 61, 62, 75, 43, 66, 66, 73, 43, 84, 46, 70, 62, 71, 62, 47, 60, 59, 57, 53, 69, 68, 45, 55, 48, 54, 64, 73, 60, 73, 59, 51, 52, 77, 69, 84, 68, 96, 64, 59, 76, 48, 62, 84, 72, 77, 56, 65, 59, 64, 55, 48, 65, 47, 56, 71, 64, 75, 57, 81, 49, 59, 59, 66, 63, 87, 68, 52, 70, 82, 87, 55, 60, 57, 80, 96, 73, 58, 81, 53, 76, 77, 58, 126, 73, 68, 76, 67, 67, 76, 80, 56, 93, 81, 55, 58, 76, 76, 61, 82, 84, 54, 57, 70, 67, 70, 70, 59, 55, 58, 67, 72, 82, 57, 75, 74, 71, 50, 59, 58, 62, 58, 63, 78, 137, 38, 65, 60, 60, 93, 48, 67, 73, 58, 78, 68, 72, 73, 60, 57, 79, 73, 64, 50, 114, 90, 75, 58, 75, 57, 90, 94, 78, 62, 69, 69, 83, 72, 77, 111, 74, 68, 65, 68, 63, 50, 78, 69, 57, 85, 58, 50, 65, 101, 75, 51, 65, 48, 72, 47, 71, 59, 63, 76, 74, 61, 44, 57, 57, 68, 60, 73, 71, 62, 64, 54, 78, 40, 59, 86, 60, 56, 51, 77, 60, 91, 86, 71, 67, 58, 66, 69, 76, 41, 76, 72, 58, 65, 66, 61, 39, 57, 56, 60, 71, 59, 62, 69, 71, 58, 69, 59, 68, 60, 54, 66, 62, 136, 71, 78, 56, 58, 56, 70, 76, 67, 59, 79, 50, 48, 68, 61, 57, 60, 64, 58, 69, 76, 41, 97, 71, 65, 79, 65, 60, 46, 39, 45, 50, 74, 82, 68, 79, 73, 68, 64, 80, 47, 50, 92, 60, 72, 59, 77, 86, 67, 45, 64, 76, 75, 56, 59, 70, 98, 66, 65, 45, 63, 69, 43, 49, 78, 90, 79, 81, 71, 65, 58, 68, 62, 38, 89, 39, 83, 90, 64, 57, 97, 67, 34, 67, 78, 95, 65, 55, 46, 56, 58, 85, 70, 74, 60, 81, 54, 46, 94, 43, 65, 67, 64, 67, 63, 68, 55, 77, 49, 80, 51, 92, 70, 76, 70, 89, 67, 90, 54, 75, 67, 79, 87, 73, 39, 65, 78, 69, 58, 43, 63, 81, 76, 58, 73, 71, 73, 50, 82, 52, 58, 83, 69, 55, 69, 72, 62, 51, 73, 116, 79, 86, 90, 46, 56, 84, 57, 44, 94, 76, 50, 52, 59, 53, 59, 73, 90, 54, 55, 98, 50, 67, 83, 57, 61, 50, 96, 59, 77, 74, 65, 61, 69, 80, 54, 70, 35, 72, 57, 55, 56, 69, 80, 54, 63, 64, 86, 49, 50, 95, 62, 65, 72, 59, 49, 69, 90, 79, 58, 74, 58, 98, 79, 38, 68, 44, 81, 95, 76, 56, 74, 58, 100, 68, 53, 45, 72, 68, 71, 76, 74, 74, 98, 88, 83, 44, 74, 71, 59, 64, 59, 66, 113, 69, 51, 56, 71, 64, 98, 53, 55, 72, 72, 83, 68, 57, 100, 70, 56, 61, 57, 62, 73, 87, 73, 56, 56, 65, 63, 54, 64, 55, 71, 89, 68, 82, 55, 64, 56, 72, 57, 71, 53, 62, 77, 60, 77, 59, 66, 62, 79, 64, 63, 74, 66, 64, 70, 51, 51, 67, 69, 93, 70, 53, 88, 64, 69, 92, 61, 62, 64, 46, 50, 79, 55, 76, 80, 88, 52, 68, 60, 77, 75, 73, 48, 87, 50, 43, 62, 67, 48, 93, 74, 71, 58, 67, 60, 61, 47, 56, 59, 82, 39, 65, 49, 72, 48, 62, 59, 47, 67, 82, 72, 90, 79, 53, 61, 106, 64, 56, 69, 70, 55, 65, 94, 135, 75, 52, 71, 46, 84, 83, 72, 57, 57, 62, 77, 47, 62, 48, 95, 46, 54, 54, 51, 85, 53, 69, 128, 57, 65, 56, 68, 51, 54, 45, 51, 72, 51, 68, 80, 85, 56, 83, 98, 97, 84, 59, 64, 75, 56, 93, 96, 55, 56, 100, 59, 41, 67, 68, 85, 67, 49, 73, 65, 50, 68, 73, 55, 76, 66, 96, 81, 73, 83, 51, 100, 68, 64, 69, 60, 72, 64, 61, 61, 56, 51, 81, 40, 52, 98, 59, 66, 57, 86, 75, 59, 64, 65, 55, 50, 68, 46, 39, 68, 59, 74, 62, 62, 61, 72, 55, 70, 68, 38, 50, 73, 70, 71, 52, 74, 56, 68, 69, 75, 54, 63, 80, 69, 52, 101, 76, 55, 71, 45, 57, 53, 50, 72, 79, 86, 115, 70, 72, 61, 64, 94, 82, 88, 62, 54, 110, 55, 83, 69, 84, 41, 75, 84, 58, 60, 82, 55, 91, 66, 49, 69, 73, 60, 138, 73, 65, 65, 68, 66, 74, 62, 62, 99, 82, 63, 73, 64, 86, 79, 59, 75, 63, 56, 72, 58, 53, 49, 67, 91, 77, 82, 71, 64, 49, 72, 48, 69, 50, 61, 56, 77, 84, 56, 56, 56, 90, 58, 62, 94, 74, 58, 40, 103, 69, 73, 64, 74, 69, 46, 46, 49, 78, 76, 48, 52, 70, 48, 65, 62, 61, 65, 55, 60, 65, 39, 63, 83, 54, 55, 56, 62, 61, 59, 35, 84, 58, 79, 50, 67, 105, 63, 54, 63, 61, 68, 61, 67, 79, 78, 71, 50, 64, 62, 70, 60, 80, 75, 83, 73, 62, 49, 94, 46, 62, 54, 42, 56, 95, 67, 84, 69, 53, 55, 42, 70, 74, 55, 58, 40, 62, 64, 82, 62, 72, 57, 70, 57, 82, 62, 50, 40, 49, 85, 53, 84, 64, 67, 63, 103, 55, 49, 70, 56, 62, 84, 66, 54, 69, 59, 48, 88, 68, 49, 59, 105, 73, 43, 51, 81, 51, 101, 64, 52, 69, 87, 67, 65, 91, 62, 78, 49, 71, 49, 68, 65, 56, 60, 48, 44, 74, 55, 85, 51, 49, 46, 58, 65, 42, 76, 69, 55, 65, 60, 69, 55, 71, 56, 53, 48, 54, 78, 57, 52, 62, 63, 62, 63, 74, 73, 48, 89, 69, 56, 73, 53, 60, 72, 69, 59, 102, 79, 52, 64, 72, 49, 63, 109, 73, 62, 48, 61, 73, 79, 61, 67, 43, 78, 49, 65, 45, 54, 56, 53, 61, 46, 71, 82, 48, 60, 71, 94, 63, 55, 71, 53, 92, 65, 59, 93, 81, 54, 62, 71, 96, 66, 95, 61, 62, 42, 73, 98, 66, 92, 73, 70, 69, 72, 69, 52, 107, 56, 74, 65, 69, 51, 78, 53, 75, 54, 69, 68, 43, 58, 64, 53, 83, 63, 58, 46, 64, 57, 98, 55, 46, 54, 58, 139, 62, 62, 43, 54, 46, 50, 88, 66, 78, 61, 74, 58, 75, 66, 70, 68, 58, 53, 59, 74, 40, 58, 74, 58, 59, 56, 39, 59, 97, 72, 50, 57, 52, 76, 75, 69, 54, 51, 92, 64, 73, 61, 67, 61, 65, 67, 55, 53, 72, 52, 66, 79, 117, 62, 52, 55, 48, 71, 63, 63, 57, 48, 84, 72, 50, 53, 46, 63, 80, 81, 50, 76, 59, 71, 60, 61, 65, 68, 86, 58, 59, 80, 65, 71, 73, 76, 64, 58, 79, 72, 57, 59, 53, 67, 54, 68, 82, 85, 63, 47, 69, 80, 60, 43, 89, 67, 80, 53, 61, 59, 64, 89, 50, 76, 65, 67, 78, 48, 58, 82, 54, 63, 88, 63, 77, 71, 62, 60, 67, 82, 68, 68, 74, 62, 71, 108, 69, 69, 71, 69, 55, 59, 78, 53, 60, 84, 73, 57, 61, 71, 54, 66, 59, 51, 72, 52, 47, 58, 131, 76, 53, 80, 63, 51, 63, 87, 65, 92, 59, 53, 62, 66, 57, 60, 74, 101, 66, 84, 61, 66, 73, 61, 79, 86, 63, 66, 76, 52, 89, 95, 66, 61, 65, 56, 47, 37, 85, 79, 54, 60, 52, 62, 64, 56, 65, 37, 53, 59, 65, 73, 59, 71, 70, 68, 60, 74, 77, 61, 82, 62, 50, 76, 59, 61, 65, 63, 75, 78, 68, 72, 61, 70, 51, 69, 76, 144, 82, 73, 80, 72, 65, 54, 43, 94, 47, 41, 61, 66, 64, 67, 63, 67, 83, 68, 66, 55, 104, 99, 52, 55, 63, 74, 57, 89, 124, 68, 58, 51, 119, 59, 56, 112, 74, 69, 94, 61, 67, 51, 79, 35, 73, 95, 92, 67, 46, 110, 56, 55, 66, 57, 122, 60, 53, 59, 71, 74, 77, 51, 67, 44, 62, 75, 57, 66, 47, 67, 60, 52, 66, 55, 69, 64, 69, 59, 72, 79, 83, 78, 78, 75, 39, 85, 71, 79, 69, 63, 56, 48, 60, 68, 66, 95, 64, 60, 78, 54, 86, 54, 67, 55, 80, 55, 55, 62, 89, 74, 81, 65, 59, 61, 78, 71, 80, 71, 41, 60, 61, 93, 39, 58, 88, 53, 72, 57, 54, 74, 67, 70, 58, 69, 77, 58, 72, 61, 44, 68, 57, 53, 76, 55, 41, 71, 69, 49, 46, 65, 71, 59, 101, 67, 70, 76, 55, 51, 54, 57, 63, 57, 62, 60, 85, 58, 64, 64, 54, 54, 74, 93, 63, 72, 51, 47, 61, 53, 96, 47, 52, 106, 61, 64, 76, 78, 55, 69, 58, 51, 69, 56, 81, 71, 68, 47, 44, 77, 80, 59, 67, 60, 62, 64, 57, 57, 81, 75, 65, 55, 47, 80, 71, 61, 71, 38, 71, 66, 66, 58, 69, 43, 74, 70, 64, 85, 59, 101, 55, 69, 70, 46, 63, 70, 120, 121, 113, 48, 63, 46, 59, 82, 49, 46, 82, 64, 86, 54, 75, 63, 65, 77, 68, 60, 76, 61, 70, 48, 49, 63, 75, 82, 73, 104, 58, 94, 55, 72, 58, 61, 84, 50, 80, 86, 77, 76, 36, 85, 42, 58, 65, 67, 69, 62, 69, 71, 71, 89, 71, 50, 59, 77, 56, 95, 60, 65, 50, 80, 80, 69, 82, 60, 46, 57, 56, 55, 53, 62, 56, 66, 68, 78, 78, 58, 63, 76, 58, 41, 81, 68, 73, 55, 73, 38, 75, 71, 78, 76, 71, 56, 51, 45, 71, 68, 62, 72, 83, 79, 88, 74, 58, 78, 71, 46, 65, 81, 51, 56, 93, 45, 65, 74, 65, 51, 59, 58, 92, 38, 50, 62, 73, 52, 44, 81, 61, 60, 63, 54, 75, 48, 74, 72, 75, 67, 77, 57, 55, 59, 64, 63, 68, 89, 77, 55, 78, 63, 74, 72, 79, 88, 70, 64, 68, 63, 66, 47, 68, 62, 69, 63, 48, 87, 73, 63, 64, 73, 39, 65, 73, 66, 64, 69, 64, 61, 65, 93, 59, 40, 59, 80, 70, 47, 82, 78, 63, 100, 80, 67, 107, 72, 79, 60, 92, 75, 58, 75, 70, 76, 66, 53, 58, 46, 45, 56, 55, 67, 55, 44, 90, 78, 105, 82, 74, 79, 64, 47, 66, 48, 87, 60, 61, 102, 100, 62, 55, 121, 97, 99, 78, 64, 54, 49, 59, 50, 89, 40, 77, 98, 70, 65, 98, 62, 66, 44, 95, 75, 44, 80, 53, 63, 53, 64, 56, 54, 66, 68, 63, 41, 50, 72, 84, 61, 73, 88, 72, 51, 84, 66, 47, 59, 46, 83, 59, 41, 67, 80, 67, 89, 69, 44, 72, 94, 58, 71, 48, 79, 93, 74, 46, 74, 70, 65, 96, 68, 71, 79, 68, 57, 59, 73, 55, 68, 59, 52, 53, 48, 51, 51, 41, 70, 61, 57, 87, 41, 63, 59, 50, 55, 74, 53, 88, 51, 50, 62, 60, 50, 46, 66, 43, 82, 60, 62, 86, 65, 62, 82, 78, 51, 55, 63, 56, 75, 73, 60, 97, 71, 63, 57, 76, 55, 103, 69, 38, 70, 79, 87, 55, 59, 55, 112, 58, 55, 45, 51, 61, 47, 59, 106, 44, 57, 99, 67, 67, 58, 62, 49, 60, 50, 59, 84, 58, 55, 84, 105, 69, 53, 38, 56, 82, 53, 67, 49, 65, 61, 64, 64, 65, 57, 57, 80, 58, 35, 81, 49, 69, 70, 49, 62, 79, 82, 72, 70, 137, 79, 68, 67, 70, 57, 78, 65, 75, 67, 74, 77, 60, 57, 64, 101, 54, 84, 69, 56, 62, 71, 54, 72, 60, 91, 47, 64, 76, 101, 51, 71, 60, 64, 66, 76, 61, 119, 55, 63, 66, 59, 54, 56, 83, 80, 94, 53, 74, 52, 94, 74, 70, 73, 68, 43, 81, 64, 53, 85, 68, 67, 44, 44, 93, 62, 63, 59, 67, 55, 42, 61, 59, 60, 95, 75, 57, 88, 53, 42, 48, 61, 63, 39, 49, 78, 77, 64, 101, 44, 71, 41, 88, 58, 57, 78, 53, 77, 63, 65, 88, 64, 62, 60, 72, 45, 50, 80, 67, 106, 39, 49, 76, 76, 54, 67, 100, 60, 43, 66, 54, 62, 51, 77, 77, 69, 74, 67, 51, 73, 76, 75, 59, 70, 94, 57, 60, 54, 76, 55, 52, 58, 73, 64, 43, 65, 68, 45, 93, 58, 60, 77, 75, 52, 80, 54, 59, 36, 58, 49, 72, 91, 46, 40, 53, 70, 62, 89, 84, 64, 82, 63, 57, 66, 92, 72, 81, 75, 77, 62, 60, 78, 68, 73, 62, 98, 56, 62, 81, 58, 60, 76, 52, 71, 60, 62, 85, 62, 63, 56, 49, 65, 66, 61, 112, 48, 58, 96, 61, 78, 58, 60, 64, 51, 73, 58, 72, 58, 74, 69, 52, 72, 43, 50, 87, 41, 54, 78, 73, 80, 49, 65, 52, 58, 70, 51, 60, 42, 74, 60, 58, 67, 74, 75, 71, 72, 51, 70, 75, 76, 82, 61, 53, 50, 64, 103, 58, 61, 46, 60, 75, 93, 58, 26, 79, 55, 66, 69, 78, 78, 58, 68, 47, 58, 37, 85, 49, 92, 62, 86, 71, 54, 99, 66, 81, 46, 70, 50, 74, 52, 58, 60, 44, 71, 41, 68, 45, 51, 44, 70, 76, 85, 70, 54, 49, 67, 84, 61, 88, 89, 67, 72, 56, 64, 61, 59, 60, 49, 41, 61, 55, 79, 57, 49, 78, 68, 63, 54, 72, 69, 43, 83, 68, 81, 65, 52, 74, 70, 38, 62, 58, 82, 91, 66, 60, 56, 76, 80, 46, 78, 71, 89, 66, 61, 86, 35, 77, 41, 97, 75, 58, 55, 66, 74, 41, 89, 85, 55, 102, 60, 44, 83, 113, 45, 75, 86, 76, 52, 42, 84, 68, 71, 64, 68, 99, 57, 58, 60, 46, 54, 88, 75, 68, 59, 71, 76, 60, 75, 56, 93, 76, 79, 112, 47, 68, 83, 57, 62, 108, 66, 64, 65, 68, 48, 60, 45, 51, 51, 70, 71, 70, 31, 83, 66, 57, 75, 71, 89, 49, 57, 62, 52, 58, 31, 90, 66, 77, 64, 71, 64, 71, 45, 91, 68, 83, 34, 102, 65, 69, 71, 60, 85, 58, 52, 66, 58, 87, 56, 54, 81, 70, 65, 102, 67, 63, 60, 116, 40, 50, 83, 70, 72, 66, 77, 66, 54, 74, 66, 82, 81, 72, 74, 77, 48, 46, 70, 55, 87, 70, 74, 80, 44, 65, 94, 74, 70, 66, 62, 68, 52, 80, 65, 54, 68, 60, 63, 83, 53, 73, 76, 77, 49, 70, 57, 80, 62, 61, 39, 70, 55, 66, 65, 71, 72, 70, 84, 47, 52, 58, 66, 49, 63, 77, 59, 77, 53, 62, 72, 94, 75, 70, 99, 69, 75, 89, 67, 52, 81, 69, 50, 69, 73, 95, 86, 76, 69, 55, 83, 45, 69, 61, 81, 34, 53, 83, 62, 92, 85, 70, 50, 67, 47, 59, 69, 66, 40, 66, 60, 56, 72, 94, 62, 80, 92, 79, 63, 56, 66, 94, 67, 57, 85, 51, 76, 64, 64, 77, 67, 54, 46, 56, 60, 52, 55, 97, 49, 50, 73, 49, 124, 67, 65, 73, 76, 76, 86, 97, 52, 106, 85, 62, 66, 52, 108, 79, 46, 89, 93, 77, 53, 74, 63, 75, 49, 41, 38, 104, 82, 56, 69, 63, 73, 47, 91, 58, 75, 74, 69, 17, 46, 54, 44, 63, 82, 52, 61, 80, 79, 64, 69, 38, 67, 57, 71, 76, 53, 93, 67, 59, 49, 36, 114, 99, 97, 66, 60, 82, 35, 70, 68, 53, 62, 87, 73, 90, 49, 56, 54, 71, 69, 57, 76, 75, 47, 73, 46, 88, 69, 55, 64, 53, 61, 80, 53, 68, 92, 45, 77, 60, 71, 47, 84, 100, 42, 50, 114, 71, 60, 56, 77, 65, 44, 80, 58, 69, 80, 67, 76, 39, 57, 63, 99, 34, 47, 77, 73, 59, 77, 71, 75, 64, 70, 60, 54, 65, 91, 45, 61, 63, 56, 36, 77, 81, 32, 54, 65, 57, 86, 89, 49, 68, 55, 41, 84, 74, 68, 68, 51, 86, 57, 36, 74, 62, 71, 62, 54, 65, 58, 48, 61, 71, 100, 65, 52, 61, 67, 68, 63, 51, 43, 63, 64, 56, 44, 49, 63, 63, 56, 61, 68, 73, 72, 72, 51, 49, 69, 90, 62, 28, 57, 76, 70, 72, 40, 60, 70, 65, 73, 59, 62, 59, 70, 70, 61, 70, 56, 79, 97, 48, 58, 58, 51, 81, 55, 53, 51, 62, 60, 45, 62, 92, 84, 95, 89, 71, 68, 65, 61, 75, 61, 58, 49, 78, 57, 71, 69, 53, 77, 57, 43, 83, 60, 51, 60, 55, 78, 60, 69, 44, 47, 55, 59, 89, 70, 55, 63, 71, 54, 56, 55, 66, 55, 64, 63, 66, 81, 56, 43, 64, 70, 68, 83, 71, 70, 87, 75, 70, 63, 77, 85, 56, 79, 67, 60, 103, 72, 73, 59, 74, 71, 82, 46, 57, 64, 73, 61, 45, 91, 76, 78, 52, 56, 59, 73, 75, 59, 57, 74, 65, 77, 111, 46, 97, 62, 87, 74, 52, 86, 66, 50, 80, 60, 57, 42, 89, 47, 81, 45, 47, 63, 119, 53, 91, 38, 79, 53, 68, 84, 61, 73, 69, 45, 67, 81, 79, 71, 38, 50, 72, 78, 74, 56, 76, 48, 44, 83, 105, 77, 56, 66, 69, 69, 62, 70, 53, 57, 55, 54, 87, 62, 78, 89, 74, 96, 68, 61, 58, 55, 59, 70, 64, 64, 60, 62, 66, 64, 40, 64, 83, 73, 49, 60, 61, 56, 79, 73, 51, 95, 65, 67, 35, 88, 51, 77, 80, 74, 96, 79, 77, 65, 62, 76, 72, 79, 56, 68, 83, 52, 83, 40, 50, 66, 76, 41, 53, 90, 73, 50, 79, 60, 54, 43, 71, 49, 61, 52, 54, 77, 80, 36, 50, 44, 52, 53, 47, 71, 79, 73, 56, 58, 68, 78, 63, 67, 52, 48, 54, 91, 45, 59, 43, 66, 66, 66, 66, 62, 93, 49, 31, 73, 62, 70, 80, 52, 80, 97, 64, 84, 74, 56, 74, 69, 64, 80, 42, 44, 58, 57, 53, 58, 70, 68, 75, 77, 48, 51, 48, 70, 59, 85, 56, 36, 74, 69, 83, 64, 91, 59, 62, 61, 69, 71, 72, 43, 78, 63, 68, 76, 70, 43, 57, 60, 43, 55, 46, 94, 78, 66, 77, 86, 74, 71, 69, 57, 52, 63, 55, 73, 53, 35, 62, 86, 75, 40, 51, 80, 40, 58, 86, 74, 73, 85, 92, 49, 86, 78, 62, 53, 59, 90, 109, 73, 59, 80, 87, 77, 65, 41, 79, 68, 59, 69, 54, 47, 49, 56, 58, 53, 135, 66, 92, 88, 55, 96, 69, 56, 61, 59, 53, 69, 64, 105, 52, 68, 43, 48, 90, 47, 65, 58, 48, 64, 64, 57, 68, 69, 44, 54, 61, 90, 64, 47, 59, 56, 43, 61, 45, 54, 46, 77, 63, 76, 74, 65, 58, 78, 38, 76, 71, 70, 94, 40, 76, 73, 73, 58, 83, 63, 102, 38, 76, 98, 59, 86, 81, 46, 59, 43, 71, 67, 75, 40, 71, 65, 61, 79, 59, 68, 76, 59, 91, 106, 57, 52, 39, 59, 76, 58, 65, 42, 75, 62, 59, 76, 69, 65, 65, 43, 52, 81, 108, 121, 71, 67, 79, 60, 63, 72, 86, 65, 73, 69, 53, 78, 73, 73, 66, 87, 35, 61, 54, 70, 67, 52, 77, 51, 52, 59, 62, 70, 128, 54, 86, 69, 72, 70, 67, 64, 64, 79, 48, 53, 47, 70, 55, 65, 60, 54, 90, 69, 80, 45, 59, 52, 77, 39, 62, 76, 76, 49, 58, 82, 41, 80, 66, 47, 49, 54, 104, 35, 86, 64, 85, 100, 91, 30, 50, 45, 59, 59, 60, 82, 75, 112, 125, 51, 59, 62, 56, 46, 71, 60, 69, 50, 51, 41, 65, 87, 67, 104, 94, 66, 73, 57, 71, 58, 61, 84, 51, 64, 106, 60, 51, 59, 56, 56, 47, 63, 63, 61, 73, 56, 56, 45, 64, 83, 82, 67, 63, 68, 73, 59, 74, 64, 40, 59, 38, 77, 58, 64, 56, 72, 87, 64, 74, 54, 75, 56, 119, 43, 93, 72, 71, 44, 72, 53, 69, 66, 74, 70, 37, 54, 67, 55, 62, 53, 88, 92, 49, 63, 60, 56, 104, 92, 72, 70, 86, 66, 82, 79, 78, 59, 91, 45, 61, 77, 58, 75, 54, 101, 100, 58, 46, 80, 64, 54, 66, 66, 64, 60, 90, 57, 67, 43, 75, 52, 72, 59, 57, 26, 59, 79, 74, 58, 126, 79, 113, 71, 70, 49, 62, 59, 87, 48, 53, 83, 55, 61, 69, 48, 77, 81, 45, 57, 65, 62, 70, 57, 74, 60, 64, 43, 99, 65, 69, 67, 44, 72, 86, 54, 68, 55, 55, 61, 115, 77, 88, 35, 88, 60, 63, 64, 113, 63, 81, 68, 84, 46, 81, 60, 72, 76, 81, 62, 44, 64, 55, 47, 65, 72, 43, 55, 47, 58, 98, 46, 53, 67, 48, 55, 56, 40, 71, 54, 78, 45, 79, 77, 75, 67, 75, 73, 81, 65, 49, 86, 76, 65, 46, 83, 46, 53, 53, 42, 72, 61, 61, 85, 56, 76, 48, 57, 79, 47, 63, 65, 61, 52, 56, 63, 60, 51, 43, 59, 63, 70, 78, 79, 67, 70, 76, 84, 58, 75, 73, 48, 64, 73, 56, 83, 67, 60, 55, 57, 63, 65, 75, 62, 61, 70, 72, 55, 94, 88, 56, 68, 73, 55, 75, 54, 68, 64, 73, 67, 66, 81, 54, 65, 58, 55, 53, 59, 54, 77, 71, 52, 60, 74, 72, 53, 72, 74, 90, 62, 55, 58, 68, 91, 72, 55, 98, 55, 74, 74, 76, 53, 68, 65, 73, 75, 63, 56, 74, 59, 61, 79, 67, 83, 52, 69, 74, 67, 51, 47, 62, 56, 54, 48, 63, 59, 51, 64, 71, 67, 57, 73, 81, 60, 59, 63, 60, 42, 60, 77, 57, 81, 62, 67, 64, 75, 65, 65, 75, 61, 72, 76, 46, 69, 54, 58, 60, 62, 55, 48, 60, 36, 84, 68, 59, 72, 52, 68, 59, 47, 85, 77, 53, 40, 66, 49, 45, 49, 95, 85, 70, 49, 78, 58, 59, 59, 66, 70, 61, 80, 74, 55, 56, 58, 68, 56, 71, 60, 71, 73, 59, 65, 70, 97, 45, 108, 59, 50, 37, 70, 56, 68, 64, 66, 78, 68, 55, 53, 59, 42, 111, 49, 53, 59, 53, 53, 78, 68, 73, 57, 64, 60, 96, 67, 52, 68, 44, 75, 65, 51, 57, 75, 164, 58, 35, 66, 58, 68, 56, 66, 71, 57, 64, 56, 66, 72, 53, 50, 48, 53, 80, 111, 80, 57, 71, 62, 60, 52, 50, 68, 56, 62, 56, 82, 60, 72, 68, 57, 130, 63, 79, 52, 75, 68, 64, 87, 82, 64, 74, 61, 58, 70, 49, 71, 57, 97, 59, 69, 65, 55, 74, 56, 63, 67, 87, 43, 75, 61, 69, 87, 43, 69, 83, 67, 79, 76, 60, 57, 60, 75, 56, 71, 61, 55, 66, 71, 65, 84, 57, 95, 44, 98, 75, 88, 68, 96, 70, 86, 87, 86, 62, 56, 45, 62, 48, 62, 48, 59, 85, 79, 104, 77, 86, 58, 59, 68, 65, 65, 69, 56, 65, 44, 70, 59, 55, 51, 71, 77, 60, 85, 68, 73, 62, 75, 50, 71, 55, 61, 65, 92, 64, 98, 55, 73, 72, 50, 43, 83, 44, 62, 72, 52, 61, 57, 82, 62, 77, 64, 57, 93, 88, 69, 49, 60, 75, 65, 59, 55, 55, 48, 46, 77, 65, 51, 53, 62, 71, 43, 44, 54, 69, 63, 71, 65, 63, 50, 55, 55, 49, 54, 64, 58, 57, 42, 60, 61, 90, 51, 76, 56, 63, 69, 55, 73, 55, 53, 49, 67, 86, 71, 62, 78, 65, 59, 66, 56, 61, 67, 69, 51, 86, 72, 74, 66, 59, 60, 50, 49, 51, 67, 67, 88, 90, 72, 63, 73, 63, 66, 62, 100, 65, 69, 62, 52, 67, 42, 62, 55, 66, 72, 100, 64, 54, 50, 51, 34, 70, 96, 64, 60, 53, 67, 56, 61, 68, 69, 54, 56, 61, 73, 57, 50, 54, 88, 75, 61, 74, 95, 67, 50, 94, 76, 58, 74, 68, 45, 89, 59, 54, 53, 70, 52, 57, 80, 73, 60, 53, 50, 77, 82, 71, 54, 58, 74, 43, 65, 72, 86, 74, 43, 63, 76, 74, 56, 59, 64, 59, 46, 45, 68, 70, 68, 76, 64, 67, 82, 62, 62, 58, 65, 82, 61, 70, 68, 51, 41, 83, 53, 46, 57, 59, 64, 66, 48, 53, 69, 76, 72, 69, 57, 66, 60, 53, 64, 89, 62, 54, 53, 80, 86, 64, 43, 58, 75, 48, 59, 52, 73, 66, 69, 67, 92, 59, 73, 69, 56, 58, 70, 59, 57, 66, 49, 70, 63, 71, 67, 61, 77, 45, 52, 111, 64, 62, 54, 52, 54, 55, 53, 70, 69, 73, 70, 78, 80, 74, 76, 63, 67, 73, 90, 70, 68, 67, 71, 69, 48, 78, 56, 61, 67, 70, 53, 64, 65, 69, 51, 51, 62, 79, 141, 62, 61, 63, 41, 53, 61, 69, 69, 71, 69, 73, 57, 117, 73, 72, 61, 64, 55, 50, 61, 62, 86, 66, 66, 77, 67, 51, 75, 76, 70, 66, 40, 72, 80, 82, 61, 64, 55, 71, 45, 68, 54, 46, 67, 69, 49, 58, 85, 73, 56, 86, 54, 86, 93, 64, 54, 90, 135, 62, 57, 62, 56, 69, 48, 66, 53, 58, 73, 46, 80, 60, 75, 66, 57, 66, 72, 77, 59, 62, 43, 47, 106, 43, 51, 76, 63, 55, 46, 60, 99, 53, 54, 65, 58, 59, 68, 59, 79, 61, 76, 52, 83, 77, 67, 53, 57, 68, 69, 61, 48, 47, 52, 67, 64, 62, 60, 59, 53, 81, 59, 67, 63, 80, 48, 43, 59, 67, 69, 45, 78, 56, 74, 66, 82, 47, 45, 53, 55, 85, 54, 84, 102, 43, 62, 75, 64, 55, 70, 49, 77, 61, 103, 65, 82, 65, 72, 63, 56, 76, 62, 67, 68, 97, 51, 73, 73, 52, 66, 83, 67, 56, 77, 58, 59, 49, 66, 63, 63, 55, 78, 63, 71, 66, 101, 64, 56, 63, 77, 64, 74, 50, 91, 64, 60, 52, 62, 49, 80, 44, 76, 65, 46, 80, 65, 60, 77, 131, 76, 83, 73, 77, 54, 74, 59, 52, 62, 75, 76, 57, 65, 61, 92, 84, 77, 61, 76, 104, 58, 60, 80, 36, 52, 73, 73, 64, 64, 78, 81, 54, 62, 96, 68, 63, 66, 56, 96, 56, 45, 69, 56, 63, 85, 60, 51, 60, 90, 74, 62, 74, 76, 65, 63, 63, 62, 74, 91, 57, 55, 57, 68, 70, 74, 75, 122, 65, 74, 60, 61, 63, 58, 58, 78, 63, 93, 82, 68, 62, 93, 52, 75, 53, 64, 63, 62, 58, 67, 62, 129, 68, 59, 58, 67, 71, 54, 50, 73, 68, 70, 66, 63, 62, 59, 40, 123, 58, 56, 71, 71, 55, 79, 79, 74, 59, 91, 78, 69, 58, 72, 70, 66, 40, 70, 52, 89, 68, 49, 65, 55, 66, 72, 65, 61, 67, 65, 46, 61, 77, 70, 54, 69, 58, 87, 73, 60, 54, 68, 62, 118, 63, 45, 79, 51, 72, 83, 46, 58, 59, 49, 63, 58, 61, 47, 47, 50, 64, 59, 62, 72, 70, 55, 63, 57, 50, 59, 108, 71, 65, 62, 71, 61, 54, 56, 102, 65, 60, 97, 62, 54, 76, 62, 79, 66, 52, 49, 56, 59, 70, 60, 82, 58, 59, 62, 71, 57, 67, 55, 82, 70, 43, 68, 75, 71, 57, 76, 60, 72, 62, 77, 56, 50, 70, 64, 58, 68, 81, 57, 56, 36, 73, 93, 67, 59, 79, 88, 67, 60, 58, 72, 70, 53, 70, 54, 69, 56, 60, 60, 64, 54, 110, 53, 61, 85, 78, 93, 60, 66, 71, 70, 57, 68, 80, 36, 56, 62, 83, 42, 69, 71, 65, 61, 52, 55, 67, 58, 61, 71, 75, 78, 51, 62, 68, 61, 75, 53, 62, 54, 78, 99, 67, 69, 78, 65, 62, 89, 60, 57, 53, 74, 54, 65, 63, 59, 64, 56, 54, 73, 62, 65, 72, 73, 59, 86, 60, 61, 56, 54, 55, 50, 55, 71, 58, 69, 54, 61, 55, 52, 58, 83, 62, 67, 69, 55, 50, 77, 80, 70, 65, 88, 63, 71, 71, 75, 118, 63, 75, 74, 61, 51, 45, 71, 82, 51, 61, 61, 82, 61, 59, 58, 75, 56, 59, 56, 45, 63, 50, 56, 67, 65, 45, 51, 64, 53, 59, 80, 57, 98, 56, 58, 63, 65, 85, 62, 64, 60, 55, 66, 69, 86, 58, 68, 62, 50, 63, 40, 51, 73, 62, 74, 92, 102, 55, 63, 67, 54, 70, 88, 55, 63, 61, 57, 53, 73, 62, 66, 57, 61, 59, 63, 61, 66, 60, 61, 117, 70, 82, 63, 99, 71, 71, 68, 79, 75, 68, 68, 74, 63, 58, 69, 76, 63, 60, 68, 62, 59, 71, 43, 59, 60, 44, 66, 70, 68, 61, 69, 74, 73, 66, 51, 61, 64, 54, 61, 61, 75, 54, 71, 53, 49, 47, 70, 60, 63, 66, 82, 50, 88, 58, 51, 55, 48, 90, 66, 45, 68, 76, 67, 54, 59, 78, 67, 51, 66, 70, 62, 60, 60, 66, 69, 71, 54, 53, 57, 81, 62, 58, 68, 63, 65, 65, 57, 68, 64, 67, 60, 56, 67, 58, 66, 63, 68, 62, 69, 70, 68, 62, 70, 64, 61, 100, 53, 61, 54, 57, 63, 67, 40, 71, 60, 71, 50, 50, 69, 57, 52, 71, 75, 63, 77, 74, 75, 67, 67, 50, 64, 69, 69, 54, 55, 70, 65, 64, 53, 57, 67, 61, 83, 74, 69, 73, 61, 69, 67, 59, 76, 57, 49, 54, 59, 59, 57, 77, 59, 68, 55, 61, 56, 56, 65, 66, 48, 65, 59, 86, 65, 57, 59, 54, 57, 61, 54, 68, 62, 56, 75, 79, 128, 85, 40, 53, 77, 54, 107, 57, 72, 65, 65, 87, 49, 80, 54, 66, 57, 63, 64, 46, 60, 81, 60, 60, 62, 71, 67, 61, 61, 81, 64, 55, 81, 48, 57, 64, 56, 67, 55, 64, 59, 57, 83, 84, 58, 94, 57, 66, 59, 66, 66, 64, 55, 90, 57, 47, 62, 92, 63, 68, 57, 60, 84, 86, 53, 80, 70, 79, 63, 49, 65, 47, 69, 115, 59, 123, 83, 70, 57, 65, 59, 63, 62, 57, 70, 55, 54, 49, 84, 54, 73, 42, 57, 58, 45, 75, 71, 60, 105, 77, 77, 66, 72, 67, 77, 61, 76, 50, 67, 65, 65, 72, 63, 57, 63, 58, 96, 46, 73, 56, 57, 79, 82, 61, 58, 65, 57, 56, 43, 83, 89, 50, 60, 72, 69, 90, 59, 78, 59, 66, 58, 67, 61, 67, 69, 75, 65, 70, 48, 60, 56, 64, 86, 53, 67, 59, 69, 62, 60, 65, 75, 49, 71, 64, 72, 71, 65, 59, 56, 86, 67, 72, 85, 56, 122, 61, 93, 85, 58, 58, 62, 60, 59, 67, 54, 61, 69, 57, 65, 62, 69, 69, 80, 87, 39, 69, 82, 42, 74, 63, 54, 67, 64, 60, 87, 67, 74, 78, 65, 68, 61, 54, 69, 86, 75, 57, 55, 51, 81, 49, 63, 69, 64, 68, 95, 56, 57, 71, 78, 51, 85, 56, 64, 54, 81, 79, 63, 59, 63, 50, 57, 59, 66, 49, 57, 81, 85, 66, 58, 72, 72, 60, 61, 45, 69, 57, 67, 56, 61, 56, 52, 68, 68, 69, 82, 77, 61, 62, 104, 79, 61, 46, 73, 65, 69, 66, 55, 68, 67, 48, 66, 48, 63, 66, 70, 58, 64, 67, 68, 46, 69, 46, 62, 68, 57, 55, 65, 69, 55, 63, 72, 66, 59, 59, 73, 66, 77, 59, 51, 49, 63, 59, 54, 54, 65, 70, 68, 76, 67, 65, 78, 75, 80, 96, 64, 63, 65, 66, 55, 50, 61, 69, 60, 56, 62, 73, 128, 102, 64, 60, 70, 56, 70, 54, 66, 76, 96, 63, 55, 70, 82, 60, 67, 125, 57, 72, 68, 60, 92, 65, 71, 71, 67, 67, 63, 51, 42, 69, 44, 58, 66, 80, 56, 83, 74, 69, 57, 65, 62, 71, 102, 77, 68, 51, 63, 59, 63, 58, 58, 57, 103, 57, 98, 66, 67, 49, 61, 54, 98, 67, 77, 80, 80, 75, 60, 58, 64, 59, 133, 68, 69, 50, 54, 53, 63, 63, 55, 64, 75, 67, 60, 66, 77, 93, 73, 68, 63, 64, 60, 84, 61, 90, 80, 60, 67, 64, 59, 58, 83, 64, 57, 57, 59, 60, 67, 56, 59, 86, 73, 52, 70, 56, 76, 75, 61, 62, 68, 61, 64, 76, 65, 66, 63, 81, 62, 53, 60, 55, 58, 65, 78, 58, 73, 80, 77, 88, 57, 66, 83, 54, 63, 65, 65, 48, 67, 72, 56, 63, 85, 59, 47, 51, 55, 93, 67, 59, 67, 60, 72, 62, 102, 69, 66, 79, 59, 64, 82, 55, 71, 88, 54, 56, 62, 72, 48, 62, 66, 62, 66, 68, 65, 61, 91, 59, 59, 51, 61, 48, 62, 59, 70, 67, 54, 69, 75, 122, 56, 64, 63, 59, 64, 69, 71, 75, 70, 62, 91, 64, 75, 92, 73, 62, 48, 59, 57, 63, 66, 78, 68, 56, 69, 94, 60, 51, 55, 71, 68, 71, 62, 91, 60, 58, 56, 49, 59, 60, 60, 93, 64, 63, 97, 64, 70, 75, 53, 54, 49, 119, 63, 52, 61, 74, 59, 64, 63, 55, 79, 68, 74, 66, 89, 76, 61, 61, 76, 68, 53, 89, 61, 53, 65, 44, 65, 95, 55, 57, 53, 58, 50, 60, 88, 56, 72, 86, 42, 62, 62, 51, 55, 58, 76, 59, 53, 66, 65, 56, 62, 44, 76, 69, 51, 62, 62, 84, 78, 45, 62, 69, 61, 79, 52, 107, 58, 63, 53, 83, 57, 54, 64, 53, 62, 49, 70, 65, 53, 71, 58, 57, 113, 57, 71, 41, 60, 65, 54, 78, 66, 77, 71, 61, 76, 61, 70, 41, 90, 93, 56, 73, 51, 58, 66, 69, 75, 68, 59, 78, 82, 43, 62, 74, 70, 85, 53, 66, 49, 122, 60, 68, 71, 71, 41, 55, 58, 61, 53, 56, 54, 70, 75, 93, 73, 64, 54, 70, 71, 57, 78, 84, 72, 72, 60, 59, 72, 59, 58, 45, 67, 98, 84, 57, 42, 58, 66, 77, 48, 65, 47, 38, 75, 69, 64, 54, 74, 71, 59, 76, 77, 71, 48, 51, 66, 67, 65, 65, 55, 79, 61, 58, 71, 101, 85, 53, 64, 64, 52, 47, 70, 58, 68, 54, 65, 66, 66, 53, 64, 76, 59, 62, 68, 67, 64, 46, 47, 80, 63, 74, 43, 62, 76, 50, 90, 95, 51, 53, 49, 73, 59, 52, 82, 53, 72, 55, 69, 80, 79, 93, 81, 89, 54, 60, 68, 69, 75, 38, 62, 71, 66, 52, 69, 62, 44, 53, 89, 63, 61, 72, 69, 69, 59, 53, 66, 74, 58, 50, 50, 75, 66, 85, 71, 55, 61, 73, 40, 63, 57, 63, 84, 71, 59, 64, 100, 92, 72, 56, 40, 72, 42, 62, 86, 85, 57, 76, 65, 79, 46, 75, 73, 76, 84, 85, 48, 80, 58, 63, 71, 43, 49, 75, 58, 96, 49, 64, 57, 58, 57, 79, 49, 48, 44, 81, 45, 42, 62, 89, 46, 85, 67, 48, 132, 72, 70, 70, 74, 71, 84, 60, 51, 65, 94, 94, 55, 60, 65, 69, 96, 48, 63, 83, 59, 94, 61, 55, 88, 59, 56, 57, 67, 75, 61, 63, 74, 51, 63, 58, 58, 51, 68, 76, 83, 72, 47, 60, 61, 64, 58, 53, 68, 64, 56, 54, 54, 67, 53, 96, 71, 78, 71, 58, 68, 88, 75, 116, 73, 48, 73, 51, 84, 101, 63, 63, 61, 54, 53, 41, 71, 71, 61, 60, 72, 50, 53, 71, 67, 72, 58, 71, 77, 70, 56, 66, 58, 67, 55, 65, 66, 61, 72, 65, 74, 67, 63, 78, 65, 73, 51, 67, 64, 50, 76, 71, 73, 54, 71, 45, 79, 79, 65, 74, 56, 59, 91, 91, 71, 68, 43, 71, 72, 51, 96, 70, 54, 54, 64, 102, 79, 46, 58, 53, 54, 71, 57, 65, 52, 57, 66, 57, 67, 81, 57, 69, 54, 60, 43, 76, 60, 65, 67, 57, 131, 66, 76, 50, 62, 70, 101, 51, 88, 60, 63, 104, 74, 45, 71, 73, 78, 45, 54, 49, 57, 81, 76, 65, 39, 82, 61, 54, 48, 67, 65, 71, 81, 87, 63, 52, 65, 79, 51, 93, 70, 69, 57, 39, 88, 57, 63, 43, 74, 62, 64, 57, 52, 95, 56, 43, 65, 52, 52, 75, 48, 67, 58, 58, 77, 59, 67, 71, 55, 74, 53, 82, 86, 60, 45, 79, 50, 66, 74, 64, 61, 48, 62, 55, 66, 70, 68, 72, 101, 86, 75, 70, 79, 66, 58, 66, 79, 91, 74, 54, 51, 60, 76, 71, 66, 57, 67, 57, 71, 71, 54, 60, 51, 49, 58, 69, 76, 60, 52, 63, 46, 52, 65, 74, 52, 66, 66, 54, 94, 58, 81, 77, 52, 97, 73, 56, 107, 56, 77, 73, 83, 68, 59, 68, 66, 58, 73, 56, 102, 69, 63, 82, 77, 72, 85, 51, 58, 34, 57, 43, 57, 59, 62, 66, 77, 48, 68, 61, 73, 63, 95, 56, 61, 97, 48, 90, 59, 69, 52, 54, 57, 66, 58, 49, 44, 64, 65, 64, 72, 69, 89, 67, 35, 80, 62, 68, 81, 54, 65, 67, 64, 98, 57, 57, 93, 51, 75, 52, 77, 95, 73, 46, 74, 65, 112, 64, 44, 70, 53, 64, 65, 54, 73, 56, 54, 67, 77, 76, 68, 45, 60, 41, 69, 60, 68, 63, 69, 58, 45, 52, 71, 66, 66, 63, 59, 61, 60, 47, 60, 56, 59, 71, 71, 80, 61, 61, 60, 65, 74, 75, 54, 50, 76, 62, 95, 41, 58, 72, 60, 57, 52, 62, 70, 65, 67, 76, 74, 66, 63, 61, 69, 55, 57, 63, 59, 87, 81, 65, 50, 56, 62, 63, 54, 51, 42, 66, 73, 71, 52, 63, 95, 56, 61, 57, 49, 69, 96, 54, 54, 58, 78, 74, 45, 78, 54, 75, 58, 70, 45, 62, 62, 59, 75, 57, 72, 61, 95, 64, 51, 74, 60, 62, 58, 76, 91, 71, 55, 66, 79, 65, 40, 69, 68, 59, 74, 122, 68, 67, 46, 57, 67, 66, 59, 83, 47, 75, 72, 62, 77, 93, 69, 60, 49, 52, 64, 64, 71, 70, 56, 66, 55, 60, 66, 68, 53, 53, 56, 72, 77, 79, 73, 61, 49, 47, 42, 79, 71, 52, 85, 57, 90, 69, 56, 50, 63, 54, 56, 71, 65, 68, 57, 55, 117, 109, 90, 65, 68, 57, 64, 69, 84, 64, 62, 81, 83, 59, 63, 69, 76, 66, 52, 59, 73, 51, 70, 82, 83, 66, 62, 69, 53, 71, 50, 47, 53, 58, 49, 58, 58, 75, 77, 74, 47, 72, 73, 58, 73, 59, 71, 80, 73, 72, 61, 77, 87, 60, 46, 84, 76, 64, 49, 60, 54, 70, 61, 74, 69, 60, 90, 77, 58, 68, 89, 53, 79, 98, 102, 61, 49, 88, 68, 71, 68, 71, 83, 70, 51, 50, 42, 77, 56, 58, 64, 46, 74, 155, 71, 62, 47, 65, 64, 66, 49, 71, 58, 53, 65, 53, 53, 69, 67, 68, 63, 74, 77, 71, 51, 71, 75, 55, 67, 82, 56, 80, 67, 83, 61, 63, 72, 46, 71, 67, 109, 85, 56, 81, 71, 53, 65, 87, 67, 67, 77, 70, 67, 55, 63, 53, 75, 60, 89, 52, 69, 81, 55, 73, 73, 63, 66, 49, 54, 80, 54, 76, 56, 57, 67, 54, 59, 81, 57, 62, 90, 80, 48, 60, 114, 59, 66, 63, 91, 45, 53, 94, 67, 73, 54, 58, 48, 84, 55, 61, 57, 52, 81, 63, 54, 68, 49, 69, 71, 57, 61, 82, 65, 63, 72, 80, 66, 65, 43, 60, 56, 90, 60, 47, 47, 79, 65, 64, 73, 55, 82, 64, 76, 59, 39, 101, 41, 106, 69, 70, 59, 53, 58, 68, 70, 76, 52, 75, 67, 55, 60, 44, 51, 90, 56, 53, 75, 77, 70, 86, 72, 68, 68, 62, 88, 75, 80, 44, 69, 81, 61, 76, 50, 76, 55, 75, 82, 87, 61, 51, 48, 58, 63, 53, 71, 61, 56, 64, 43, 52, 69, 56, 66, 47, 58, 54, 70, 86, 64, 45, 72, 74, 89, 69, 44, 83, 50, 54, 66, 97, 64, 61, 59, 63, 73, 54, 71, 55, 57, 51, 79, 51, 65, 51, 49, 59, 63, 72, 65, 58, 52, 71, 61, 73, 72, 96, 59, 86, 72, 66, 46, 57, 55, 78, 75, 70, 60, 59, 58, 82, 63, 47, 79, 64, 61, 64, 57, 85, 65, 65, 48, 57, 52, 53, 74, 53, 69, 59, 78, 71, 54, 52, 47, 75, 73, 73, 53, 77, 69, 77, 55, 62, 76, 55, 47, 51, 68, 68, 65, 68, 64, 106, 47, 76, 43, 96, 70, 68, 52, 63, 45, 57, 45, 73, 51, 59, 87, 60, 60, 146, 46, 46, 69, 55, 85, 73, 63, 68, 60, 77, 66, 69, 64, 52, 58, 72, 64, 91, 66, 66, 71, 53, 73, 49, 84, 52, 77, 68, 51, 70, 123, 58, 80, 54, 50, 57, 63, 58, 67, 60, 59, 63, 59, 56, 63, 44, 64, 65, 62, 94, 67, 67, 56, 73, 66, 66, 68, 67, 63, 75, 81, 45, 53, 50, 81, 66, 70, 58, 66, 82, 69, 70, 78, 55, 88, 81, 63, 71, 63, 79, 61, 53, 60, 86, 70, 53, 65, 70, 58, 71, 96, 60, 50, 62, 58, 61, 50, 99, 66, 54, 94, 65, 111, 67, 52, 49, 53, 66, 66, 51, 93, 56, 36, 72, 54, 63, 59, 68, 73, 72, 66, 51, 82, 63, 102, 58, 68, 64, 50, 63, 64, 70, 50, 74, 53, 77, 81, 80, 55, 78, 55, 56, 68, 65, 63, 57, 67, 66, 63, 61, 54, 42, 75, 38, 48, 79, 80, 68, 75, 46, 57, 60, 75, 66, 54, 78, 88, 71, 58, 55, 52, 97, 67, 54, 44, 48, 60, 72, 58, 77, 98, 60, 65, 61, 56, 70, 61, 68, 52, 70, 67, 76, 53, 89, 79, 54, 78, 52, 75, 62, 77, 58, 54, 91, 62, 65, 58, 50, 58, 53, 45, 61, 58, 64, 50, 73, 76, 76, 78, 55, 78, 71, 63, 77, 88, 52, 89, 63, 71, 55, 50, 64, 65, 58, 60, 60, 65, 93, 73, 63, 55, 78, 61, 60, 80, 68, 76, 73, 78, 68, 64, 71, 64, 109, 70, 75, 77, 65, 80, 72, 50, 57, 72, 49, 50, 58, 70, 59, 68, 61, 52, 64, 58, 47, 52, 59, 70, 81, 55, 61, 57, 62, 110, 55, 92, 71, 66, 52, 75, 84, 59, 61, 81, 44, 59, 61, 60, 64, 85, 61, 51, 63, 68, 70, 105, 74, 49, 67, 51, 67, 82, 80, 69, 77, 66, 82, 63, 60, 62, 54, 89, 59, 59, 83, 56, 61, 80, 45, 74, 65, 56, 61, 42, 62, 78, 49, 78, 49, 58, 71, 55, 82, 63, 55, 65, 81, 68, 84, 108, 79, 62, 59, 126, 44, 64, 60, 52, 64, 51, 53, 77, 99, 59, 70, 71, 60, 70, 56, 67, 105, 45, 77, 77, 54, 55, 51, 51, 35, 82, 69, 69, 73, 60, 68, 71, 63, 71, 75, 68, 79, 48, 58, 53, 57, 67, 48, 52, 73, 62, 79, 68, 66, 55, 66, 61, 74, 98, 63, 65, 67, 64, 72, 49, 40, 53, 57, 61, 87, 69, 55, 78, 60, 54, 68, 80, 73, 66, 84, 71, 102, 78, 52, 71, 100, 53, 67, 51, 52, 88, 72, 69, 64, 74, 70, 67, 59, 99, 78, 82, 74, 73, 56, 68, 53, 39, 65, 63, 72, 65, 88, 59, 56, 76, 48, 64, 58, 85, 69, 73, 53, 63, 94, 73, 72, 52, 59, 62, 48, 58, 111, 61, 46, 59, 52, 38, 60, 80, 59, 67, 62, 66, 60, 62, 62, 91, 92, 64, 87, 48, 64, 50, 66, 84, 72, 32, 53, 69, 83, 57, 55, 44, 66, 56, 101, 113, 124, 59, 66, 78, 58, 67, 57, 67, 61, 45, 57, 61, 78, 85, 38, 67, 75, 74, 80, 53, 69, 62, 56, 57, 59, 71, 63, 72, 45, 66, 60, 72, 69, 55, 93, 72, 64, 88, 50, 50, 66, 65, 69, 68, 71, 83, 67, 53, 73, 74, 71, 83, 56, 115, 60, 68, 82, 72, 46, 52, 82, 80, 54, 76, 74, 71, 51, 51, 57, 49, 58, 46, 58, 70, 62, 53, 43, 68, 70, 69, 63, 50, 67, 62, 101, 64, 88, 54, 71, 63, 41, 66, 68, 83, 86, 83, 61, 65, 66, 63, 55, 59, 65, 60, 70, 66, 51, 63, 65, 53, 54, 42, 66, 57, 93, 68, 64, 61, 46, 73, 67, 70, 53, 87, 64, 61, 56, 36, 77, 53, 68, 54, 65, 75, 70, 131, 86, 56, 71, 50, 68, 64, 74, 75, 50, 59, 62, 46, 54, 65, 99, 58, 49, 51, 75, 49, 95, 57, 60, 52, 66, 65, 62, 92, 54, 73, 49, 58, 71, 60, 48, 88, 89, 65, 69, 73, 63, 62, 59, 46, 55, 67, 85, 65, 84, 47, 66, 66, 57, 60, 73, 67, 104, 68, 65, 78, 52, 55, 61, 61, 54, 45, 57, 59, 53, 54, 80, 67, 62, 69, 69, 51, 84, 66, 66, 75, 63, 57, 90, 68, 69, 70, 108, 76, 76, 89, 59, 61, 58, 82, 73, 57, 80, 47, 58, 89, 73, 51, 62, 60, 72, 112, 50, 85, 57, 51, 62, 100, 94, 58, 73, 84, 94, 50, 160, 80, 68, 44, 73, 50, 68, 82, 44, 73, 105, 63, 73, 63, 88, 63, 77, 63, 64, 58, 55, 45, 59, 50, 69, 75, 82, 65, 71, 50, 77, 83, 59, 65, 78, 87, 66, 60, 52, 57, 50, 59, 92, 85, 76, 45, 67, 74, 67, 82, 70, 143, 59, 67, 106, 58, 46, 60, 90, 76, 72, 66, 67, 41, 57, 55, 54, 71, 57, 59, 101, 84, 51, 48, 66, 64, 77, 86, 46, 58, 55, 57, 48, 64, 58, 63, 78, 70, 67, 50, 67, 64, 66, 75, 62, 66, 74, 67, 78, 79, 67, 64, 80, 62, 76, 71, 71, 74, 82, 53, 60, 71, 66, 76, 85, 83, 62, 73, 78, 61, 65, 65, 97, 72, 70, 63, 85, 112, 72, 55, 67, 54, 80, 62, 67, 52, 64, 70, 73, 60, 69, 73, 65, 71, 53, 84, 69, 64, 53, 69, 64, 60, 74, 55, 56, 66, 59, 58, 57, 59, 61, 57, 49, 60, 64, 81, 67, 60, 61, 47, 51, 52, 68, 67, 86, 46, 69, 78, 56, 112, 91, 49, 49, 70, 84, 55, 79, 64, 70, 72, 65, 63, 58, 80, 69, 59, 66, 68, 54, 63, 78, 70, 44, 84, 70, 57, 49, 54, 71, 59, 67, 61, 58, 54, 54, 70, 88, 67, 95, 94, 80, 73, 85, 71, 77, 62, 62, 74, 46, 99, 66, 101, 79, 76, 65, 49, 77, 61, 73, 64, 52, 58, 60, 71, 85, 68, 65, 51, 67, 61, 58, 73, 55, 43, 28, 72, 64, 72, 51, 55, 58, 59, 58, 69, 76, 64, 104, 78, 72, 55, 73, 47, 59, 86, 94, 67, 65, 69, 81, 70, 74, 51, 69, 73, 65, 57, 84, 66, 78, 49, 73, 74, 66, 72, 80, 45, 56, 83, 82, 57, 73, 52, 65, 67, 56, 72, 76, 62, 58, 52, 57, 84, 59, 66, 73, 77, 72, 75, 71, 71, 49, 55, 72, 55, 48, 52, 67, 79, 90, 77, 83, 64, 46, 67, 59, 57, 84, 62, 80, 62, 64, 94, 52, 73, 76, 87, 75, 70, 73, 64, 61, 98, 73, 68, 70, 55, 62, 58, 52, 69, 50, 81, 68, 62, 85, 70, 54, 67, 64, 58, 70, 40, 77, 40, 65, 77, 50, 46, 62, 80, 71, 59, 43, 67, 75, 49, 76, 49, 66, 64, 75, 58, 45, 81, 86, 84, 65, 69, 89, 76, 63, 71, 60, 74, 60, 61, 81, 68, 57, 61, 63, 53, 60, 89, 84, 49, 95, 62, 60, 62, 69, 63, 58, 86, 48, 61, 60, 51, 70, 47, 52, 42, 67, 63, 74, 72, 83, 40, 60, 69, 53, 55, 55, 63, 51, 81, 65, 83, 70, 70, 70, 68, 67, 68, 104, 49, 76, 66, 69, 63, 93, 62, 45, 51, 93, 89, 56, 56, 56, 71, 86, 84, 78, 50, 55, 62, 67, 75, 46, 98, 68, 53, 61, 60, 67, 61, 59, 55, 62, 67, 42, 87, 97, 87, 65, 56, 54, 54, 67, 58, 56, 66, 77, 97, 66, 52, 86, 68, 75, 55, 67, 76, 49, 52, 34, 66, 90, 63, 78, 75, 58, 55, 59, 72, 56, 88, 58, 55, 70, 64, 40, 62, 70, 93, 66, 65, 52, 75, 57, 80, 70, 75, 64, 71, 53, 63, 67, 68, 70, 65, 47, 68, 37, 73, 63, 84, 58, 58, 52, 48, 55, 85, 61, 71, 51, 62, 68, 54, 77, 77, 68, 54, 55, 55, 82, 77, 103, 62, 70, 58, 68, 47, 82, 61, 77, 50, 66, 68, 74, 88, 61, 68, 83, 84, 56, 42, 66, 67, 50, 61, 92, 64, 49, 26, 55, 72, 52, 75, 36, 76, 58, 72, 100, 61, 90, 61, 77, 47, 54, 59, 58, 52, 68, 66, 72, 42, 89, 68, 72, 60, 92, 64, 52, 106, 79, 94, 54, 60, 53, 57, 114, 67, 75, 48, 80, 59, 78, 77, 68, 53, 103, 58, 68, 39, 82, 73, 77, 59, 46, 51, 67, 68, 53, 45, 110, 73, 83, 61, 47, 42, 56, 52, 44, 66, 79, 65, 82, 83, 42, 82, 59, 68, 95, 63, 73, 59, 53, 57, 56, 71, 70, 71, 107, 74, 65, 81, 51, 62, 70, 58, 63, 60, 63, 55, 60, 58, 54, 62, 71, 69, 73, 64, 74, 65, 77, 46, 60, 49, 61, 59, 55, 61, 71, 54, 64, 77, 64, 59, 66, 63, 80, 77, 72, 93, 61, 60, 67, 63, 49, 59, 63, 64, 56, 58, 60, 78, 50, 73, 53, 87, 75, 78, 64, 78, 60, 62, 92, 61, 58, 62, 55, 53, 69, 59, 71, 120, 78, 56, 76, 66, 93, 64, 76, 64, 78, 55, 96, 63, 81, 75, 86, 80, 51, 61, 70, 69, 53, 114, 54, 55, 90, 64, 43, 60, 90, 47, 68, 73, 76, 76, 95, 80, 120, 73, 45, 48, 80, 70, 72, 80, 74, 68, 75, 76, 69, 89, 64, 56, 155, 62, 75, 49, 65, 52, 94, 45, 56, 42, 72, 73, 72, 54, 74, 83, 54, 43, 82, 76, 62, 61, 82, 78, 61, 70, 73, 36, 81, 58, 68, 51, 96, 53, 116, 69, 61, 60, 84, 85, 70, 67, 48, 84, 89, 62, 55, 64, 65, 99, 58, 46, 75, 62, 49, 71, 69, 68, 56, 65, 61, 140, 38, 66, 91, 84, 52, 58, 70, 57, 40, 50, 61, 53, 58, 55, 51, 84, 71, 52, 49, 86, 46, 59, 77, 69, 60, 61, 89, 59, 63, 68, 54, 65, 90, 62, 59, 53, 69, 68, 57, 61, 78, 62, 73, 62, 60, 50, 65, 62, 65, 68, 63, 67, 70, 62, 68, 60, 64, 49, 97, 81, 69, 74, 56, 74, 63, 60, 58, 49, 62, 52, 88, 58, 65, 71, 81, 79, 56, 55, 70, 69, 61, 77, 82, 48, 91, 91, 84, 87, 56, 86, 75, 80, 53, 62, 71, 65, 80, 64, 54, 50, 74, 58, 49, 76, 80, 74, 49, 55, 58, 61, 59, 75, 54, 66, 58, 69, 77, 63, 68, 65, 69, 57, 70, 42, 63, 65, 67, 49, 58, 69, 76, 83, 72, 60, 68, 50, 64, 81, 58, 54, 55, 57, 95, 81, 70, 91, 64, 52, 68, 55, 79, 84, 50, 61, 44, 62, 52, 69, 78, 43, 57, 45, 99, 77, 34, 91, 45, 69, 85, 71, 60, 65, 41, 60, 58, 55, 110, 37, 89, 51, 60, 55, 49, 70, 54, 76, 63, 56, 63, 50, 64, 50, 62, 107, 74, 77, 66, 69, 60, 71, 61, 61, 86, 64, 59, 66, 86, 53, 69, 70, 98, 91, 54, 63, 50, 39, 67, 50, 55, 41, 55, 54, 41, 90, 61, 54, 93, 59, 68, 87, 53, 60, 53, 71, 69, 130, 65, 47, 81, 60, 52, 84, 102, 58, 90, 69, 73, 60, 74, 71, 60, 83, 63, 103, 72, 90, 71, 70, 59, 91, 67, 57, 56, 64, 61, 61, 61, 58, 68, 44, 82, 53, 85, 69, 47, 75, 74, 52, 81, 91, 70, 71, 67, 62, 77, 51, 68, 54, 79, 70, 70, 75, 45, 64, 70, 60, 85, 75, 69, 57, 72, 86, 82, 76, 67, 68, 63, 82, 54, 68, 102, 107, 67, 87, 60, 74, 71, 38, 66, 67, 71, 57, 120, 59, 78, 67, 75, 60, 74, 56, 71, 65, 86, 90, 67, 54, 71, 117, 68, 71, 56, 54, 73, 54, 48, 64, 58, 64, 57, 60, 75, 66, 57, 70, 75, 55, 57, 50, 63, 90, 57, 69, 108, 64, 62, 76, 87, 51, 66, 49, 97, 66, 68, 94, 63, 50, 53, 81, 68, 113, 53, 79, 78, 60, 46, 58, 64, 61, 67, 67, 66, 58, 59, 81, 50, 41, 49, 42, 59, 58, 59, 54, 75, 63, 63, 71, 75, 56, 70, 55, 76, 103, 51, 74, 54, 96, 49, 33, 54, 76, 71, 67, 57, 53, 57, 66, 73, 39, 74, 96, 72, 88, 77, 71, 94, 78, 85, 66, 62, 51, 52, 71, 59, 86, 75, 54, 67, 57, 71, 58, 57, 57, 62, 64, 68, 89, 76, 59, 45, 41, 61, 83, 56, 73, 78, 49, 70, 63, 59, 73, 50, 58, 59, 48, 85, 55, 39, 94, 55, 68, 91, 62, 69, 54, 63, 61, 49, 72, 67, 66, 81, 61, 72, 71, 71, 53, 44, 70, 71, 62, 66, 93, 116, 65, 40, 66, 72, 56, 93, 73, 40, 69, 86, 59, 56, 74, 71, 86, 66, 41, 57, 53, 61, 48, 75, 78, 58, 83, 71, 45, 78, 62, 67, 40, 57, 58, 72, 106, 70, 79, 64, 55, 64, 61, 47, 97, 53, 105, 55, 100, 63, 60, 59, 84, 70, 39, 64, 62, 48, 57, 84, 86, 71, 46, 68, 43, 81, 85, 44, 40, 59, 59, 44, 62, 61, 79, 36, 67, 72, 51, 78, 73, 54, 52, 68, 75, 57, 50, 52, 42, 59, 78, 64, 67, 69, 60, 73, 77, 67, 40, 84, 61, 39, 55, 46, 51, 61, 53, 64, 53, 58, 74, 56, 79, 81, 52, 65, 45, 70, 55, 48, 63, 56, 78, 39, 87, 69, 71, 68, 63, 50, 55, 81, 57, 54, 62, 76, 64, 64, 83, 95, 63, 64, 120, 54, 87, 82, 83, 72, 60, 77, 52, 60, 71, 70, 61, 77, 43, 43, 62, 78, 87, 64, 63, 69, 53, 59, 85, 56, 75, 65, 46, 72, 81, 94, 55, 84, 62, 41, 67, 86, 82, 73, 64, 85, 56, 117, 76, 54, 64, 59, 83, 55, 113, 76, 36, 91, 54, 66, 93, 74, 80, 72, 59, 103, 53, 108, 54, 61, 62, 75, 70, 78, 71, 99, 55, 51, 71, 88, 68, 83, 62, 68, 69, 72, 83, 59, 50, 51, 59, 58, 46, 56, 80, 51, 40, 78, 78, 68, 64, 65, 66, 82, 57, 126, 63, 58, 76, 98, 56, 39, 85, 44, 54, 64, 66, 55, 85, 73, 81, 63, 61, 71, 67, 67, 75, 62, 94, 75, 92, 57, 66, 71, 71, 71, 75, 52, 53, 77, 79, 62, 50, 42, 56, 48, 64, 96, 41, 68, 66, 50, 70, 55, 69, 60, 54, 95, 73, 82, 67, 53, 42, 69, 58, 47, 81, 54, 97, 62, 47, 74, 56, 73, 62, 56, 69, 59, 58, 76, 60, 69, 58, 55, 82, 64, 67, 56, 73, 66, 70, 52, 79, 82, 96, 75, 81, 61, 29, 60, 57, 63, 64, 69, 79, 45, 65, 64, 78, 47, 85, 61, 70, 88, 88, 62, 58, 50, 59, 39, 60, 70, 76, 58, 63, 79, 56, 70, 97, 108, 77, 66, 79, 66, 77, 63, 68, 61, 46, 56, 58, 60, 50, 77, 61, 53, 55, 78, 62, 59, 48, 42, 61, 60, 52, 67, 55, 60, 75, 58, 67, 82, 58, 70, 47, 57, 62, 54, 66, 58, 75, 67, 73, 53, 41, 72, 54, 49, 51, 77, 57, 95, 85, 53, 61, 76, 33, 74, 76, 53, 80, 46, 66, 53, 74, 62, 90, 51, 58, 89, 71, 81, 79, 59, 74, 78, 54, 77, 46, 63, 43, 78, 66, 90, 74, 50, 71, 81, 56, 55, 86, 85, 46, 70, 85, 64, 59, 55, 46, 122, 78, 71, 65, 68, 74, 55, 98, 38, 48, 70, 95, 67, 82, 71, 66, 58, 91, 71, 98, 48, 56, 65, 76, 82, 50, 63, 53, 71, 70, 64, 51, 59, 71, 77, 72, 78, 67, 46, 56, 48, 73, 95, 44, 68, 68, 54, 69, 42, 60, 46, 62, 89, 116, 66, 73, 55, 49, 54, 51, 65, 40, 51, 63, 82, 81, 66, 61, 72, 64, 83, 99, 60, 78, 64, 80, 62, 78, 62, 53, 60, 47, 43, 77, 98, 55, 67, 49, 68, 76, 69, 68, 58, 49, 72, 69, 58, 60, 59, 53, 59, 51, 100, 55, 55, 67, 64, 94, 71, 56, 75, 52, 59, 46, 51, 33, 57, 108, 58, 75, 68, 58, 77, 75, 64, 68, 102, 75, 58, 81, 94, 84, 63, 58, 57, 90, 57, 70, 65, 64, 52, 57, 82, 75, 48, 74, 52, 105, 48, 35, 73, 74, 60, 64, 79, 71, 76, 72, 52, 60, 60, 62, 72, 97, 65, 64, 66, 59, 63, 71, 72, 73, 73, 72, 75, 60, 71, 136, 52, 49, 87, 57, 48, 46, 90, 51, 68, 69, 55, 67, 66, 65, 61, 88, 50, 81, 65, 59, 51, 56, 48, 65, 49, 76, 64, 66, 56, 79, 55, 67, 78, 77, 73, 53, 69, 69, 64, 60, 70, 91, 89, 61, 68, 70, 75, 51, 60, 70, 72, 55, 67, 50, 48, 73, 63, 48, 97, 90, 51, 62, 57, 61, 69, 63, 73, 63, 53, 41, 80, 61, 139, 73, 38, 58, 69, 101, 71, 69, 65, 57, 67, 47, 82, 76, 80, 71, 71, 51, 50, 50, 72, 86, 56, 48, 45, 60, 73, 55, 55, 71, 55, 66, 57, 67, 56, 62, 49, 70, 74, 75, 59, 73, 63, 63, 69, 57, 75, 74, 74, 64, 64, 49, 68, 59, 56, 80, 43, 52, 69, 72, 56, 60, 101, 84, 55, 62, 39, 63, 52, 48, 53, 56, 93, 47, 71, 73, 77, 61, 74, 81, 58, 63, 37, 57, 58, 78, 73, 78, 57, 85, 54, 49, 57, 58, 73, 71, 95, 56, 51, 59, 68, 49, 47, 55, 70, 61, 45, 54, 62, 87, 49, 61, 74, 50, 52, 71, 66, 61, 38, 103, 78, 87, 64, 69, 54, 83, 88, 50, 63, 72, 47, 62, 71, 104, 77, 73, 94, 95, 63, 56, 86, 68, 108, 56, 54, 71, 46, 61, 81, 94, 78, 58, 110, 102, 60, 73, 57, 53, 68, 44, 67, 70, 92, 71, 74, 67, 53, 78, 69, 50, 47, 86, 77, 73, 61, 60, 48, 79, 66, 78, 55, 74, 83, 45, 53, 80, 64, 73, 52, 60, 68, 71, 71, 95, 57, 49, 62, 56, 59, 49, 95, 50, 75, 60, 51, 62, 54, 56, 70, 61, 59, 56, 64, 78, 54, 59, 64, 51, 71, 63, 85, 89, 71, 62, 90, 48, 79, 61, 53, 49, 69, 58, 63, 69, 63, 64, 76, 75, 58, 42, 66, 54, 67, 86, 90, 64, 55, 66, 42, 74, 37, 54, 52, 52, 70, 51, 67, 57, 77, 67, 54, 67, 54, 63, 58, 89, 61, 65, 116, 74, 63, 68, 70, 86, 58, 83, 66, 70, 58, 69, 56, 70, 78, 61, 72, 58, 49, 54, 73, 83, 71, 48, 47, 81, 88, 74, 51, 82, 63, 66, 72, 57, 52, 84, 89, 75, 68, 49, 75, 72, 54, 69, 40, 56, 62, 49, 68, 44, 63, 56, 51, 79, 72, 67, 82, 72, 103, 62, 77, 68, 57, 71, 43, 72, 46, 62, 78, 63, 48, 65, 71, 47, 65, 65, 57, 67, 69, 48, 45, 60, 80, 73, 49, 64, 75, 94, 72, 71, 57, 68, 91, 73, 81, 54, 57, 101, 71, 61, 61, 64, 60, 105, 68, 64, 78, 72, 59, 50, 69, 50, 73, 66, 79, 89, 78, 95, 57, 59, 73, 50, 56, 67, 60, 64, 57, 46, 60, 42, 76, 72, 59, 70, 61, 54, 76, 84, 51, 51, 71, 50, 68, 60, 61, 56, 75, 91, 69, 69, 87, 91, 98, 73, 62, 73, 62, 78, 58, 47, 62, 54, 104, 89, 59, 52, 69, 51, 67, 59, 73, 54, 63, 60, 56, 54, 109, 67, 78, 62, 60, 42, 65, 58, 85, 65, 62, 87, 98, 61, 63, 65, 60, 58, 62, 50, 58, 69, 71, 66, 49, 63, 60, 60, 63, 74, 53, 68, 42, 54, 63, 84, 56, 72, 66, 95, 71, 63, 67, 55, 49, 57, 58, 74, 37, 74, 59, 55, 112, 51, 65, 61, 70, 76, 43, 70, 80, 58, 58, 62, 50, 55, 46, 79, 76, 66, 109, 63, 63, 70, 78, 66, 57, 48, 42, 38, 63, 62, 65, 64, 82, 60, 59, 60, 65, 52, 61, 53, 72, 55, 53, 53, 46, 72, 59, 41, 53, 51, 67, 87, 82, 81, 67, 55, 78, 50, 83, 73, 35, 55, 80, 59, 88, 46, 64, 67, 56, 47, 61, 60, 63, 88, 54, 69, 54, 47, 55, 68, 72, 72, 50, 73, 118, 64, 67, 58, 93, 54, 56, 65, 70, 51, 67, 67, 47, 63, 78, 75, 76, 50, 52, 63, 74, 58, 66, 53, 81, 76, 77, 53, 65, 69, 83, 72, 79, 61, 62, 58, 66, 46, 48, 90, 62, 59, 86, 101, 48, 58, 80, 55, 67, 63, 72, 99, 67, 57, 111, 66, 42, 54, 49, 73, 66, 66, 106, 118, 78, 61, 66, 86, 47, 62, 51, 31, 76, 62, 61, 64, 60, 104, 74, 94, 38, 52, 73, 79, 84, 67, 68, 66, 50, 67, 81, 76, 51, 89, 58, 72, 70, 73, 99, 52, 82, 58, 96, 71, 57, 70, 53, 88, 61, 60, 63, 83, 56, 71, 60, 70, 49, 80, 86, 54, 79, 61, 56, 64, 64, 63, 54, 52, 45, 66, 89, 73, 53, 101, 74, 64, 52, 57, 82, 56, 67, 64, 87, 65, 50, 71, 48, 62, 67, 65, 62, 55, 55, 88, 85, 64, 99, 59, 46, 69, 76, 72, 65, 54, 48, 66, 65, 52, 67, 66, 69, 68, 81, 68, 56, 48, 94, 76, 66, 65, 105, 52, 50, 71, 62, 53, 67, 53, 100, 72, 66, 48, 48, 62, 68, 42, 74, 58, 56, 38, 50, 68, 76, 65, 85, 72, 69, 63, 57, 63, 89, 63, 67, 52, 81, 69, 69, 81, 70, 51, 57, 58, 73, 67, 58, 52, 62, 73, 40, 78, 76, 81, 40, 86, 130, 55, 66, 89, 47, 65, 72, 76, 67, 77, 78, 62, 70, 49, 76, 58, 83, 97, 65, 53, 68, 58, 58, 49, 96, 57, 87, 46, 63, 55, 60, 59, 76, 81, 73, 74, 58, 52, 78, 57, 63, 76, 54, 83, 45, 78, 67, 77, 76, 121, 65, 135, 66, 66, 76, 54, 66, 58, 78, 43, 82, 69, 57, 78, 65, 60, 65, 58, 101, 93, 75, 59, 73, 91, 58, 70, 75, 61, 68, 62, 80, 46, 60, 44, 72, 78, 76, 89, 87, 66, 82, 63, 57, 87, 70, 84, 63, 68, 68, 64, 67, 74, 52, 74, 96, 69, 76, 67, 63, 54, 50, 72, 83, 71, 74, 77, 48, 56, 52, 77, 64, 48, 56, 72, 92, 46, 60, 78, 67, 53, 54, 55, 49, 51, 62, 82, 69, 72, 58, 82, 99, 85, 63, 72, 55, 59, 47, 72, 62, 73, 52, 69, 60, 56, 74, 75, 68, 59, 79, 89, 80, 55, 73, 86, 72, 61, 53, 62, 78, 61, 53, 59, 58, 53, 54, 49, 58, 56, 92, 56, 70, 64, 66, 49, 65, 89, 70, 81, 62, 56, 72, 51, 54, 67, 97, 66, 92, 92, 82, 72, 73, 53, 75, 77, 61, 57, 58, 88, 53, 56, 50, 78, 64, 68, 53, 67, 70, 50, 71, 52, 56, 53, 93, 53, 56, 69, 82, 82, 54, 75, 53, 67, 59, 73, 65, 63, 46, 67, 72, 70, 98, 59, 43, 69, 70, 55, 54, 67, 78, 81, 59, 67, 123, 67, 98, 49, 53, 69, 106, 70, 73, 63, 55, 77, 70, 59, 48, 68, 63, 79, 76, 70, 56, 48, 59, 51, 61, 66, 60, 77, 89, 72, 59, 41, 51, 109, 55, 49, 72, 55, 62, 73, 59, 64, 60, 61, 50, 49, 64, 50, 66, 57, 55, 63, 65, 41, 58, 81, 63, 60, 67, 57, 54, 64, 46, 80, 46, 77, 68, 73, 81, 60, 58, 93, 55, 81, 81, 54, 64, 62, 59, 64, 65, 63, 56, 81, 72, 80, 70, 102, 67, 80, 69, 61, 67, 62, 77, 49, 70, 63, 67, 77, 37, 50, 70, 60, 87, 61, 57, 60, 61, 71, 65, 57, 50, 61, 89, 59, 69, 53, 44, 70, 105, 55, 47, 72, 59, 89, 58, 72, 97, 154, 70, 107, 54, 63, 81, 87, 54, 62, 62, 77, 67, 95, 50, 107, 93, 45, 90, 69, 57, 84, 67, 94, 57, 48, 79, 80, 80, 69, 55, 86, 84, 58, 58, 50, 76, 74, 74, 53, 97, 83, 69, 56, 33, 60, 105, 54, 70, 61, 64, 60, 32, 89, 57, 69, 79, 56, 64, 32, 62, 59, 87, 115, 41, 86, 77, 55, 59, 89, 58, 48, 63, 51, 69, 50, 65, 80, 62, 56, 59, 60, 75, 57, 78, 66, 59, 51, 61, 48, 86, 49, 47, 61, 59, 62, 65, 64, 68, 54, 62, 44, 57, 60, 80, 80, 65, 75, 88, 53, 73, 68, 94, 45, 53, 59, 47, 48, 75, 98, 53, 61, 64, 74, 78, 50, 79, 96, 70, 66, 62, 75, 79, 74, 78, 81, 60, 61, 59, 66, 53, 49, 77, 47, 121, 91, 75, 63, 76, 63, 59, 51, 46, 60, 65, 77, 67, 52, 63, 63, 46, 76, 61, 68, 49, 61, 73, 77, 59, 76, 53, 64, 54, 55, 68, 56, 50, 74, 72, 88, 43, 85, 64, 51, 54, 49, 62, 40, 53, 58, 77, 68, 92, 55, 44, 53, 52, 56, 67, 71, 65, 71, 56, 58, 69, 62, 76, 75, 68, 60, 66, 57, 74, 77, 51, 64, 89, 47, 82, 59, 52, 66, 57, 59, 75, 80, 51, 69, 60, 48, 56, 68, 84, 83, 57, 51, 58, 70, 68, 71, 53, 77, 68, 49, 57, 50, 95, 77, 59, 80, 85, 62, 55, 61, 77, 70, 61, 71, 48, 62, 56, 69, 78, 56, 70, 44, 51, 56, 59, 67, 78, 52, 72, 114, 73, 52, 61, 71, 50, 62, 80, 50, 109, 40, 79, 105, 100, 64, 66, 60, 83, 52, 45, 91, 41, 71, 53, 62, 64, 71, 66, 76, 103, 51, 79, 76, 57, 54, 56, 80, 88, 84, 78, 63, 60, 65, 41, 54, 62, 55, 90, 52, 57, 75, 68, 59, 70, 56, 48, 68, 49, 81, 66, 66, 40, 51, 70, 76, 58, 74, 54, 61, 43, 43, 52, 53, 66, 60, 59, 60, 65, 49, 75, 54, 67, 61, 63, 58, 57, 69, 47, 71, 65, 59, 103, 92, 79, 47, 59, 74, 72, 85, 68, 61, 89, 61, 60, 44, 65, 73, 65, 92, 82, 70, 87, 47, 53, 40, 60, 67, 71, 65, 71, 35, 80, 48, 65, 106, 51, 65, 66, 58, 82, 50, 95, 46, 75, 69, 72, 56, 44, 50, 78, 75, 69, 56, 79, 112, 50, 77, 53, 61, 62, 103, 85, 66, 81, 80, 67, 75, 62, 53, 78, 52, 74, 77, 59, 67, 74, 68, 90, 80, 58, 52, 54, 43, 69, 55, 62, 58, 77, 79, 42, 79, 51, 72, 69, 63, 83, 67, 58, 78, 51, 53, 79, 78, 53, 48, 57, 63, 51, 67, 53, 45, 65, 60, 71, 55, 53, 54, 54, 75, 96, 76, 62, 59, 52, 64, 46, 56, 46, 63, 58, 47, 72, 73, 44, 49, 48, 110, 86, 56, 51, 70, 56, 66, 86, 77, 51, 57, 56, 67, 104, 62, 70, 58, 52, 81, 80, 51, 69, 51, 97, 78, 68, 83, 58, 67, 66, 80, 93, 76, 77, 63, 86, 52, 88, 72, 77, 80, 83, 60, 48, 85, 42, 62, 62, 58, 74, 99, 63, 69, 50, 59, 58, 78, 69, 67, 60, 55, 74, 57, 72, 70, 28, 55, 34, 74, 125, 50, 44, 55, 49, 67, 37, 51, 56, 53, 70, 68, 39, 56, 72, 56, 70, 69, 61, 43, 70, 65, 45, 45, 56, 56, 51, 68, 70, 67, 82, 49, 67, 65, 57, 102, 77, 73, 102, 77, 52, 55, 55, 67, 53, 51, 69, 71, 76, 62, 75, 69, 54, 90, 50, 77, 58, 90, 83, 53, 65, 45, 76, 59, 70, 80, 113, 50, 80, 63, 61, 68, 55, 74, 68, 77, 103, 64, 43, 38, 105, 62, 67, 78, 80, 68, 70, 54, 56, 65, 78, 60, 102, 67, 64, 57, 59, 80, 63, 43, 74, 56, 50, 53, 75, 58, 52, 59, 42, 65, 76, 93, 56, 68, 57, 77, 84, 56, 74, 67, 77, 59, 89, 145, 58, 61, 45, 56, 59, 75, 52, 66, 76, 61, 73, 59, 69, 83, 53, 70, 61, 53, 51, 64, 59, 77, 59, 72, 82, 66, 58, 48, 66, 55, 94, 84, 64, 50, 55, 59, 51, 62, 70, 78, 62, 63, 47, 42, 85, 70, 61, 107, 89, 72, 32, 50, 60, 54, 66, 61, 69, 71, 61, 67, 67, 60, 72, 71, 53, 73, 85, 61, 50, 93, 75, 69, 82, 84, 47, 53, 58, 114, 84, 134, 63, 84, 44, 77, 60, 56, 55, 46, 68, 75, 66, 51, 61, 72, 73, 69, 79, 78, 77, 44, 46, 55, 45, 58, 65, 63, 43, 62, 61, 52, 131, 76, 72, 92, 67, 75, 57, 76, 60, 61, 75, 49, 61, 64, 74, 72, 53, 58, 95, 46, 80, 47, 56, 65, 50, 73, 74, 53, 60, 76, 82, 72, 58, 72, 65, 53, 90, 42, 54, 76, 75, 72, 88, 65, 66, 62, 54, 64, 66, 74, 64, 69, 85, 39, 48, 51, 99, 79, 62, 63, 96, 98, 55, 70, 59, 45, 89, 63, 48, 94, 60, 93, 45, 80, 55, 80, 60, 69, 68, 60, 53, 111, 79, 66, 68, 58, 44, 60, 70, 58, 110, 44, 54, 81, 51, 71, 52, 58, 99, 100, 41, 59, 71, 83, 37, 74, 83, 53, 55, 75, 133, 57, 58, 70, 62, 52, 39, 62, 80, 55, 54, 64, 55, 76, 47, 82, 86, 51, 81, 83, 63, 71, 61, 60, 41, 72, 66, 38, 73, 104, 51, 59, 70, 71, 97, 34, 54, 54, 49, 70, 49, 68, 62, 59, 56, 57, 67, 58, 47, 60, 66, 58, 57, 48, 58, 58, 63, 59, 66, 58, 52, 80, 59, 48, 48, 66, 47, 40, 55, 56, 48, 64, 35, 93, 46, 73, 81, 63, 46, 84, 44, 42, 61, 88, 83, 84, 64, 67, 59, 72, 69, 67, 67, 45, 129, 64, 64, 71, 51, 65, 69, 70, 39, 49, 81, 53, 52, 67, 54, 46, 60, 56, 74, 66, 74, 57, 81, 52, 41, 50, 56, 54, 85, 62, 58, 61, 59, 93, 40, 52, 48, 79, 64, 74, 41, 77, 55, 61, 71, 68, 75, 87, 62, 79, 88, 59, 58, 56, 84, 69, 69, 70, 55, 57, 56, 49, 50, 65, 56, 120, 104, 72, 51, 52, 58, 62, 60, 56, 81, 77, 66, 44, 85, 60, 58, 65, 92, 59, 74, 67, 82, 77, 72, 59, 70, 68, 76, 64, 74, 85, 57, 55, 68, 72, 77, 63, 56, 84, 43, 113, 45, 65, 55, 62, 58, 51, 49, 97, 103, 76, 60, 68, 73, 54, 78, 93, 115, 75, 70, 87, 65, 85, 57, 69, 73, 65, 58, 48, 109, 68, 67, 64, 34, 56, 43, 82, 77, 69, 65, 66, 61, 61, 75, 69, 84, 58, 64, 83, 71, 63, 72, 88, 60, 58, 64, 60, 59, 78, 59, 60, 86, 54, 60, 60, 56, 67, 79, 62, 74, 59, 58, 81, 83, 76, 60, 63, 68, 69, 57, 54, 63, 36, 111, 81, 72, 55, 46, 76, 83, 66, 88, 63, 46, 76, 63, 57, 62, 87, 55, 104, 48, 81, 60, 88, 64, 53, 69, 34, 80, 76, 96, 60, 47, 60, 67, 58, 87, 80, 56, 72, 63, 35, 91, 65, 63, 59, 61, 54, 54, 73, 71, 55, 68, 72, 47, 73, 59, 81, 58, 64, 45, 57, 74, 53, 69, 65, 88, 74, 76, 60, 50, 65, 83, 97, 91, 53, 58, 74, 75, 88, 115, 46, 68, 54, 88, 65, 86, 38, 56, 62, 57, 65, 41, 113, 76, 70, 51, 59, 61, 45, 55, 64, 82, 45, 65, 77, 66, 60, 122, 58, 66, 58, 84, 67, 81, 99, 54, 99, 57, 73, 42, 57, 75, 103, 53, 44, 87, 54, 65, 99, 47, 59, 73, 88, 86, 80, 61, 50, 65, 57, 102, 67, 63, 58, 60, 61, 54, 70, 73, 60, 67, 93, 65, 54, 81, 56, 63, 63, 82, 73, 80, 36, 57, 62, 65, 55, 48, 68, 47, 61, 83, 66, 90, 65, 51, 61, 70, 78, 60, 48, 53, 35, 51, 55, 64, 75, 73, 37, 63, 108, 85, 82, 40, 58, 82, 57, 68, 66, 58, 57, 53, 53, 73, 54, 68, 102, 71, 82, 62, 30, 53, 54, 60, 52, 63, 74, 59, 58, 84, 75, 78, 51, 84, 69, 78, 53, 50, 72, 71, 79, 60, 68, 63, 109, 66, 112, 106, 62, 59, 32, 64, 100, 50, 59, 67, 64, 69, 57, 48, 70, 58, 81, 61, 60, 48, 43, 66, 55, 48, 47, 64, 63, 34, 54, 64, 70, 32, 67, 68, 72, 58, 79, 63, 81, 70, 78, 42, 64, 59, 79, 91, 76, 66, 70, 66, 80, 55, 66, 37, 59, 46, 108, 90, 47, 67, 36, 78, 63, 64, 52, 88, 42, 68, 65, 64, 68, 52, 55, 63, 89, 64, 73, 91, 28, 46, 95, 48, 46, 42, 65, 61, 71, 54, 58, 33, 133, 78, 59, 38, 59, 71, 43, 79, 78, 88, 55, 78, 57, 63, 63, 69, 81, 41, 45, 71, 78, 71, 67, 64, 114, 76, 64, 61, 90, 76, 80, 46, 110, 53, 47, 59, 46, 76, 53, 65, 48, 78, 45, 56, 73, 89, 44, 62, 73, 58, 54, 77, 50, 64, 66, 55, 69, 49, 63, 46, 49, 56, 60, 71, 70, 43, 74, 57, 64, 55, 64, 66, 42, 56, 42, 43, 62, 47, 87, 76, 52, 79, 52, 116, 50, 49, 51, 85, 110, 65, 68, 50, 64, 55, 43, 75, 63, 74, 60, 69, 65, 77, 51, 74, 49, 73, 55, 54, 102, 101, 84, 72, 60, 70, 95, 94, 60, 60, 52, 70, 71, 85, 64, 43, 60, 55, 42, 69, 65, 82, 56, 33, 74, 66, 68, 70, 86, 57, 82, 76, 57, 86, 46, 82, 57, 66, 62, 51, 68, 51, 74, 84, 48, 102, 78, 51, 69, 43, 38, 96, 94, 52, 69, 55, 58, 59, 76, 64, 60, 71, 73, 57, 41, 73, 78, 80, 46, 86, 84, 83, 64, 36, 66, 75, 62, 56, 51, 55, 103, 122, 59, 60, 65, 75, 65, 47, 64, 79, 91, 57, 62, 119, 61, 73, 69, 53, 95, 78, 55, 71, 79, 76, 64, 83, 81, 95, 59, 56, 53, 74, 57, 60, 57, 65, 76, 65, 80, 83, 74, 70, 85, 73, 38, 64, 72, 70, 50, 80, 66, 66, 81, 85, 57, 75, 60, 73, 75, 62, 93, 43, 54, 144, 79, 58, 50, 80, 44, 60, 68, 69, 71, 51, 70, 66, 80, 73, 61, 59, 56, 65, 70, 50, 41, 46, 83, 68, 76, 96, 49, 74, 77, 65, 73, 105, 82, 57, 61, 61, 79, 57, 70, 67, 70, 33, 68, 53, 59, 62, 84, 52, 71, 57, 67, 62, 75, 63, 62, 77, 52, 78, 69, 63, 62, 79, 52, 66, 102, 67, 55, 62, 59, 64, 57, 77, 66, 70, 74, 55, 61, 62, 67, 83, 83, 64, 59, 67, 63, 57, 58, 59, 67, 77, 58, 99, 96, 62, 62, 69, 69, 88, 91, 68, 65, 49, 56, 72, 64, 68, 61, 82, 64, 58, 58, 60, 40, 65, 46, 57, 68, 71, 91, 60, 49, 49, 63, 60, 64, 40, 49, 99, 77, 64, 55, 50, 75, 124, 43, 71, 61, 62, 59, 51, 84, 68, 51, 51, 114, 57, 63, 66, 61, 70, 66, 51, 69, 69, 53, 56, 45, 55, 80, 63, 96, 64, 77, 52, 66, 61, 62, 37, 58, 53, 57, 73, 58, 56, 66, 60, 59, 59, 63, 56, 61, 60, 58, 81, 66, 62, 60, 47, 61, 62, 71, 51, 68, 49, 49, 82, 58, 80, 61, 64, 57, 40, 78, 62, 74, 63, 53, 71, 62, 62, 59, 60, 65, 43, 86, 71, 83, 87, 83, 53, 59, 94, 63, 69, 56, 93, 51, 60, 57, 67, 49, 67, 67, 64, 57, 68, 94, 40, 70, 54, 60, 71, 88, 79, 90, 60, 52, 62, 57, 57, 82, 88, 77, 59, 62, 70, 48, 54, 65, 54, 56, 57, 55, 88, 71, 57, 56, 64, 54, 78, 65, 48, 135, 80, 76, 56, 62, 52, 69, 68, 62, 68, 68, 55, 71, 70, 68, 59, 55, 63, 71, 68, 54, 50, 64, 71, 74, 41, 61, 46, 69, 63, 54, 63, 42, 83, 56, 48, 58, 58, 73, 53, 57, 58, 72, 68, 76, 71, 59, 70, 74, 55, 57, 74, 65, 65, 66, 67, 55, 74, 55, 66, 55, 71, 54, 60, 57, 66, 63, 58, 64, 68, 55, 94, 70, 79, 64, 65, 54, 60, 75, 88, 77, 56, 59, 101, 48, 63, 74, 58, 80, 76, 71, 59, 59, 68, 72, 66, 71, 60, 74, 85, 61, 73, 54, 75, 63, 81, 54, 52, 58, 35, 66, 58, 77, 79, 57, 67, 53, 52, 66, 59, 78, 56, 60, 50, 60, 51, 67, 80, 68, 59, 65, 75, 52, 56, 54, 81, 64, 63, 84, 88, 62, 61, 67, 61, 61, 61, 55, 66, 60, 42, 70, 69, 68, 66, 96, 68, 55, 70, 75, 98, 98, 75, 41, 76, 73, 76, 71, 61, 45, 53, 61, 82, 54, 78, 44, 64, 53, 71, 61, 62, 60, 60, 58, 82, 67, 61, 70, 71, 68, 74, 62, 58, 74, 52, 64, 80, 79, 64, 71, 58, 46, 57, 53, 69, 70, 51, 76, 100, 76, 82, 64, 70, 101, 61, 73, 59, 46, 52, 59, 56, 69, 59, 70, 63, 65, 53, 56, 64, 57, 63, 55, 85, 66, 57, 60, 72, 56, 63, 60, 62, 65, 75, 76, 63, 85, 50, 57, 61, 61, 60, 65, 54, 62, 67, 61, 62, 53, 83, 68, 71, 63, 58, 61, 57, 68, 67, 69, 53, 64, 79, 59, 64, 48, 106, 67, 84, 56, 85, 66, 68, 56, 43, 49, 62, 73, 56, 68, 53, 61, 63, 53, 67, 41, 57, 56, 62, 62, 61, 35, 69, 56, 81, 64, 62, 80, 54, 71, 77, 52, 58, 57, 87, 68, 60, 53, 98, 70, 73, 65, 60, 83, 61, 60, 111, 61, 61, 66, 86, 65, 73, 55, 70, 67, 64, 64, 65, 54, 60, 63, 60, 59, 69, 63, 57, 59, 62, 61, 68, 70, 68, 83, 76, 73, 78, 51, 119, 55, 59, 170, 69, 49, 67, 68, 53, 79, 45, 62, 62, 61, 62, 67, 72, 51, 62, 59, 71, 67, 63, 58, 86, 62, 59, 70, 78, 54, 65, 73, 74, 70, 59, 78, 64, 89, 61, 67, 61, 75, 76, 67, 59, 61, 62, 53, 65, 52, 53, 57, 77, 69, 52, 59, 48, 104, 60, 61, 60, 63, 68, 64, 65, 71, 50, 46, 62, 79, 53, 58, 67, 94, 61, 68, 69, 66, 64, 66, 67, 112, 70, 108, 65, 77, 75, 59, 70, 63, 65, 52, 71, 62, 114, 68, 66, 70, 54, 61, 57, 102, 51, 78, 53, 52, 78, 64, 67, 51, 97, 51, 53, 65, 73, 53, 67, 72, 55, 65, 51, 62, 68, 69, 71, 64, 77, 67, 40, 62, 66, 52, 80, 58, 56, 50, 82, 51, 46, 67, 66, 52, 82, 66, 54, 54, 76, 54, 64, 99, 77, 64, 57, 105, 66, 98, 73, 82, 63, 106, 61, 54, 67, 84, 63, 69, 81, 51, 83, 51, 63, 49, 65, 72, 64, 115, 58, 73, 82, 74, 89, 65, 61, 57, 64, 147, 55, 97, 41, 86, 48, 66, 86, 64, 48, 72, 63, 51, 56, 89, 51, 69, 52, 68, 59, 59, 94, 76, 48, 68, 59, 67, 58, 72, 60, 75, 65, 64, 63, 88, 62, 56, 68, 48, 81, 84, 62, 56, 53, 52, 63, 63, 63, 58, 62, 53, 60, 71, 61, 51, 47, 85, 83, 89, 45, 62, 64, 87, 55, 63, 69, 59, 60, 53, 70, 74, 60, 68, 73, 62, 89, 66, 72, 70, 62, 79, 58, 61, 88, 63, 86, 70, 84, 59, 76, 56, 70, 61, 73, 75, 64, 61, 49, 63, 84, 56, 64, 62, 71, 76, 82, 68, 71, 85, 58, 76, 55, 61, 77, 56, 68, 71, 75, 57, 80, 55, 89, 48, 79, 49, 58, 61, 71, 59, 73, 71, 73, 74, 100, 59, 70, 58, 49, 59, 66, 55, 78, 60, 60, 54, 55, 69, 61, 55, 66, 86, 72, 59, 55, 75, 128, 76, 50, 66, 69, 52, 69, 52, 83, 64, 86, 81, 63, 88, 60, 80, 59, 78, 50, 57, 65, 67, 52, 69, 52, 88, 49, 88, 70, 63, 69, 84, 65, 61, 40, 59, 71, 71, 75, 50, 72, 90, 67, 64, 56, 78, 66, 75, 70, 70, 61, 56, 62, 63, 56, 60, 74, 56, 67, 69, 64, 64, 70, 52, 56, 88, 89, 58, 56, 71, 62, 60, 69, 47, 74, 60, 82, 72, 69, 66, 55, 77, 74, 87, 51, 64, 69, 73, 64, 74, 68, 60, 58, 46, 65, 89, 60, 66, 60, 38, 60, 78, 64, 62, 64, 66, 55, 72, 57, 46, 60, 71, 42, 53, 57, 64, 66, 60, 75, 67, 77, 73, 61, 75, 47, 62, 70, 67, 47, 57, 68, 76, 48, 72, 74, 67, 55, 76, 67, 79, 57, 81, 52, 62, 60, 56, 64, 60, 80, 79, 70, 72, 80, 58, 73, 74, 73, 62, 85, 68, 72, 66, 91, 95, 56, 53, 79, 75, 50, 59, 64, 68, 80, 103, 52, 83, 57, 63, 66, 71, 50, 56, 60, 76, 57, 65, 66, 65, 67, 58, 73, 56, 47, 76, 56, 62, 66, 86, 73, 96, 55, 52, 44, 97, 59, 56, 65, 52, 65, 69, 77, 86, 86, 59, 54, 71, 68, 53, 55, 81, 51, 63, 57, 72, 47, 58, 73, 81, 68, 67, 69, 55, 78, 53, 64, 73, 73, 87, 57, 76, 54, 44, 61, 55, 74, 69, 71, 91, 58, 41, 61, 69, 58, 60, 60, 52, 76, 69, 61, 69, 110, 67, 56, 78, 71, 100, 57, 57, 73, 64, 63, 52, 38, 64, 52, 83, 49, 66, 72, 90, 67, 65, 62, 61, 61, 58, 46, 55, 65, 59, 85, 52, 90, 59, 53, 75, 54, 72, 69, 63, 85, 58, 54, 61, 60, 65, 117, 63, 46, 51, 75, 67, 61, 71, 70, 61, 68, 71, 68, 47, 58, 54, 57, 63, 65, 69, 59, 77, 71, 63, 58, 89, 65, 66, 63, 59, 74, 59, 60, 57, 60, 61, 63, 62, 63, 60, 59, 70, 70, 68, 79, 53, 60, 69, 57, 56, 43, 86, 63, 56, 170, 78, 68, 96, 80, 55, 72, 64, 71, 61, 137, 55, 91, 60, 76, 72, 106, 55, 57, 64, 47, 59, 60, 73, 76, 64, 69, 59, 174, 60, 59, 69, 76, 83, 56, 64, 61, 77, 65, 72, 68, 60, 80, 65, 61, 66, 53, 57, 58, 67, 51, 51, 87, 68, 65, 59, 54, 103, 64, 46, 50, 72, 65, 79, 72, 61, 68, 58, 51, 58, 68, 67, 56, 78, 44, 75, 51, 61, 87, 72, 62, 57, 67, 71, 84, 56, 69, 58, 89, 52, 67, 63, 73, 55, 65, 76, 86, 57, 74, 65, 69, 49, 53, 84, 51, 67, 65, 64, 64, 70, 71, 49, 57, 62, 60, 77, 59, 62, 66, 78, 70, 77, 68, 65, 75, 57, 71, 91, 65, 68, 67, 73, 79, 52, 76, 72, 52, 63, 52, 56, 63, 69, 81, 76, 72, 64, 82, 68, 60, 66, 62, 99, 67, 82, 70, 58, 66, 75, 77, 58, 69, 69, 50, 61, 67, 62, 62, 65, 68, 61, 45, 71, 79, 61, 73, 67, 42, 72, 72, 66, 60, 72, 55, 77, 80, 53, 77, 71, 53, 73, 67, 53, 56, 65, 70, 65, 68, 44, 61, 61, 53, 59, 92, 52, 67, 92, 48, 61, 48, 66, 77, 75, 38, 58, 57, 74, 67, 79, 83, 62, 66, 50, 58, 52, 64, 74, 82, 75, 63, 71, 65, 66, 64, 77, 69, 54, 67, 48, 110, 59, 72, 69, 51, 63, 70, 48, 59, 63, 71, 50, 64, 57, 56, 51, 56, 66, 74, 62, 59, 61, 74, 115, 63, 92, 90, 71, 68, 68, 61, 71, 62, 80, 84, 55, 65, 74, 54, 60, 88, 72, 72, 63, 52, 66, 67, 65, 56, 63, 54, 72, 61, 57, 65, 99, 64, 60, 66, 70, 65, 57, 74, 54, 64, 81, 57, 60, 47, 61, 58, 55, 56, 48, 67, 73, 61, 68, 66, 77, 63, 66, 84, 64, 53, 75, 69, 98, 61, 62, 43, 60, 65, 65, 68, 73, 52, 61, 69, 67, 50, 51, 60, 85, 61, 57, 69, 67, 103, 59, 77, 68, 66, 58, 49, 77, 70, 60, 67, 60, 84, 90, 75, 91, 42, 119, 73, 83, 87, 77, 72, 67, 60, 69, 53, 71, 63, 70, 60, 54, 73, 57, 70, 60, 62, 57, 72, 59, 76, 68, 77, 55, 68, 63, 63, 58, 53, 87, 62, 60, 77, 66, 50, 54, 118, 57, 69, 82, 58, 56, 65, 90, 61, 72, 85, 58, 65, 69, 61, 63, 55, 61, 70, 76, 61, 71, 56, 80, 54, 64, 85, 87, 69, 63, 71, 65, 74, 51, 61, 55, 69, 81, 69, 86, 60, 80, 57, 66, 64, 65, 63, 80, 79, 54, 43, 62, 64, 72, 71, 63, 55, 66, 54, 70, 68, 70, 74, 57, 59, 65, 66, 60, 71, 68, 74, 72, 68, 65, 54, 70, 51, 62, 78, 67, 70, 55, 73, 57, 78, 59, 63, 66, 60, 66, 55, 95, 60, 75, 58, 95, 44, 69, 58, 65, 60, 62, 58, 90, 57, 43, 61, 48, 75, 57, 49, 74, 54, 55, 53, 60, 54, 77, 51, 74, 84, 64, 37, 102, 80, 57, 71, 62, 64, 53, 50, 79, 65, 74, 80, 64, 58, 63, 86, 41, 67, 78, 79, 60, 74, 74, 49, 66, 79, 60, 58, 52, 56, 63, 49, 61, 62, 68, 56, 72, 68, 84, 65, 65, 39, 55, 81, 73, 91, 59, 54, 55, 65, 63, 72, 60, 60, 51, 50, 50, 70, 72, 56, 55, 59, 93, 62, 70, 51, 64, 60, 51, 63, 56, 71, 74, 76, 68, 58, 65, 79, 76, 51, 69, 61, 59, 65, 65, 78, 57, 50, 74, 57, 75, 64, 82, 57, 75, 76, 60, 66, 48, 53, 51, 66, 81, 78, 63, 71, 55, 67, 51, 57, 61, 59, 58, 71, 70, 58, 51, 54, 58, 70, 53, 72, 73, 111, 59, 46, 64, 63, 58, 82, 55, 52, 64, 38, 65, 46, 84, 83, 53, 64, 64, 62, 70, 56, 45, 59, 62, 72, 57, 73, 55, 77, 82, 69, 65, 83, 59, 62, 62, 60, 72, 61, 64, 75, 87, 72, 82, 94, 48, 71, 38, 62, 71, 90, 60, 57, 59, 73, 63, 88, 57, 51, 57, 53, 57, 68, 70, 67, 72, 64, 58, 69, 65, 50, 55, 72, 53, 74, 48, 73, 44, 74, 73, 58, 75, 68, 65, 47, 63, 78, 68, 79, 51, 53, 63, 48, 45, 56, 45, 71, 57, 78, 70, 70, 62, 75, 56, 68, 63, 64, 75, 74, 83, 53, 62, 57, 52, 67, 54, 61, 55, 58, 38, 50, 60, 67, 50, 62, 78, 57, 67, 64, 60, 68, 65, 64, 62, 67, 70, 83, 43, 58, 57, 65, 72, 89, 55, 76, 44, 58, 73, 37, 58, 66, 94, 63, 58, 54, 71, 117, 42, 76, 75, 72, 61, 85, 47, 53, 65, 76, 58, 70, 57, 67, 65, 59, 57, 74, 67, 62, 60, 81, 78, 69, 54, 37, 79, 58, 82, 72, 77, 58, 62, 51, 68, 160, 71, 79, 56, 53, 78, 59, 54, 59, 100, 60, 68, 40, 70, 60, 57, 67, 76, 56, 69, 81, 49, 70, 81, 63, 61, 55, 89, 78, 90, 89, 55, 72, 64, 106, 76, 53, 70, 68, 63, 62, 68, 54, 67, 70, 56, 71, 63, 57, 54, 78, 74, 71, 55, 59, 73, 81, 98, 63, 73, 66, 99, 68, 81, 52, 61, 85, 44, 52, 65, 54, 73, 53, 72, 46, 56, 67, 59, 50, 43, 53, 73, 59, 48, 57, 60, 54, 45, 66, 64, 60, 54, 53, 51, 77, 41, 88, 53, 58, 70, 65, 76, 63, 62, 59, 39, 64, 64, 61, 82, 72, 72, 62, 69, 80, 62, 82, 62, 69, 62, 109, 69, 67, 71, 61, 69, 63, 52, 66, 49, 66, 94, 68, 70, 71, 62, 70, 61, 43, 90, 43, 47, 63, 67, 84, 68, 52, 68, 58, 95, 53, 48, 54, 68, 56, 67, 69, 56, 58, 66, 94, 60, 54, 58, 52, 57, 69, 66, 41, 50, 70, 71, 69, 87, 62, 45, 78, 67, 61, 47, 65, 81, 68, 49, 68, 55, 66, 72, 76, 73, 57, 78, 89, 62, 66, 61, 55, 71, 55, 65, 68, 67, 68, 80, 55, 55, 61, 57, 51, 64, 67, 51, 55, 56, 81, 65, 54, 61, 63, 93, 59, 71, 50, 60, 63, 55, 85, 114, 63, 66, 80, 101, 61, 61, 65, 52, 61, 54, 64, 65, 64, 56, 74, 59, 80, 81, 60, 59, 61, 42, 88, 83, 57, 66, 86, 60, 68, 62, 61, 67, 47, 51, 51, 72, 66, 73, 59, 79, 113, 64, 70, 42, 57, 48, 50, 62, 47, 67, 67, 51, 68, 51, 38, 69, 54, 60, 65, 61, 63, 67, 62, 52, 60, 89, 66, 73, 51, 57, 55, 60, 64, 56, 82, 63, 107, 65, 58, 92, 62, 68, 75, 89, 51, 52, 40, 69, 80, 62, 71, 76, 111, 83, 54, 50, 62, 62, 68, 72, 53, 202, 46, 59, 50, 87, 79, 59, 56, 75, 74, 79, 68, 60, 54, 55, 60, 103, 71, 76, 86, 51, 90, 63, 60, 59, 63, 66, 59, 62, 89, 61, 72, 56, 64, 60, 74, 56, 59, 59, 66, 52, 64, 90, 102, 52, 72, 64, 61, 55, 66, 68, 99, 63, 61, 56, 85, 115, 81, 57, 69, 65, 67, 66, 47, 59, 55, 72, 59, 59, 55, 56, 64, 85, 65, 55, 65, 62, 52, 51, 49, 62, 56, 59, 64, 51, 67, 61, 46, 68, 58, 60, 43, 63, 72, 70, 68, 73, 53, 95, 72, 72, 60, 49, 66, 71, 76, 82, 75, 69, 68, 58, 57, 56, 73, 93, 40, 98, 58, 101, 57, 74, 69, 57, 68, 54, 52, 78, 99, 61, 86, 56, 52, 73, 73, 72, 60, 82, 53, 58, 74, 59, 61, 98, 73, 66, 59, 51, 59, 56, 79, 90, 94, 154, 55, 82, 88, 79, 67, 77, 58, 58, 103, 55, 78, 75, 50, 67, 58, 65, 60, 75, 48, 56, 70, 86, 83, 84, 83, 57, 56, 67, 69, 60, 60, 67, 67, 77, 70, 73, 59, 88, 74, 52, 72, 68, 57, 74, 69, 62, 105, 57, 61, 80, 103, 60, 57, 54, 64, 80, 59, 54, 95, 65, 54, 84, 57, 64, 69, 96, 82, 56, 75, 61, 65, 144, 72, 71, 61, 56, 63, 48, 48, 72, 53, 53, 58, 91, 67, 55, 71, 68, 54, 62, 63, 60, 63, 135, 67, 83, 61, 90, 72, 49, 65, 70, 67, 66, 61, 78, 56, 63, 68, 63, 51, 79, 64, 88, 58, 62, 47, 59, 72, 91, 78, 76, 61, 67, 51, 69, 75, 79, 74, 73, 68, 52, 60, 49, 49, 65, 61, 56, 72, 66, 69, 41, 46, 55, 46, 59, 70, 60, 60, 69, 56, 61, 56, 60, 84, 47, 70, 71, 58, 61, 81, 56, 56, 62, 61, 53, 58, 74, 96, 69, 57, 67, 68, 58, 59, 66, 84, 47, 58, 71, 66, 65, 62, 71, 65, 52, 69, 65, 61, 47, 58, 54, 53, 75, 63, 80, 66, 58, 58, 54, 81, 74, 74, 67, 70, 77, 84, 56, 70, 59, 55, 65, 70, 66, 48, 70, 56, 49, 53, 60, 90, 49, 78, 59, 64, 72, 53, 48, 66, 94, 67, 84, 59, 75, 54, 79, 66, 72, 48, 79, 52, 97, 53, 54, 79, 46, 67, 71, 108, 59, 55, 71, 64, 68, 70, 72, 63, 71, 72, 77, 65, 90, 69, 50, 73, 88, 57, 69, 65, 50, 60, 38, 63, 55, 61, 81, 68, 66, 76, 57, 88, 77, 66, 78, 73, 68, 54, 65, 64, 64, 57, 78, 58, 75, 62, 106, 70, 62, 58, 77, 52, 76, 60, 72, 69, 87, 54, 78, 71, 60, 81, 67, 79, 91, 57, 52, 57, 62, 63, 67, 77, 79, 96, 40, 66, 64, 67, 61, 64, 87, 59, 52, 70, 55, 67, 64, 48, 106, 67, 89, 51, 69, 59, 75, 65, 65, 70, 56, 64, 77, 68, 39, 87, 59, 72, 56, 83, 56, 77, 91, 71, 76, 56, 64, 53, 69, 69, 49, 45, 61, 47, 71, 58, 66, 96, 43, 55, 67, 63, 117, 61, 50, 58, 58, 87, 73, 81, 57, 48, 95, 55, 59, 62, 80, 64, 57, 98, 55, 64, 61, 56, 50, 70, 105, 64, 95, 62, 72, 95, 54, 78, 77, 67, 83, 60, 85, 54, 63, 65, 73, 50, 55, 58, 54, 54, 65, 70, 93, 64, 67, 57, 88, 64, 88, 57, 53, 96, 70, 60, 44, 46, 44, 72, 49, 73, 74, 59, 60, 60, 55, 48, 56, 59, 61, 75, 66, 48, 53, 76, 62, 45, 75, 55, 56, 79, 68, 83, 56, 69, 49, 73, 58, 57, 63, 57, 39, 53, 77, 52, 82, 59, 62, 64, 60, 67, 32, 83, 84, 60, 52, 82, 64, 74, 91, 61, 51, 64, 65, 59, 54, 83, 60, 59, 58, 56, 58, 65, 67, 47, 69, 79, 77, 67, 148, 70, 68, 74, 93, 64, 67, 78, 75, 89, 58, 73, 41, 81, 90, 66, 64, 49, 77, 54, 79, 70, 59, 58, 76, 53, 46, 76, 101, 49, 52, 45, 72, 81, 77, 44, 66, 85, 90, 58, 91, 80, 57, 71, 73, 61, 66, 51, 78, 59, 59, 58, 65, 63, 55, 71, 72, 67, 75, 61, 91, 56, 64, 87, 68, 70, 51, 77, 80, 62, 48, 56, 47, 50, 68, 50, 73, 57, 71, 68, 49, 73, 58, 72, 65, 57, 66, 80, 45, 60, 62, 61, 77, 74, 52, 58, 64, 77, 75, 55, 71, 80, 60, 79, 64, 79, 67, 76, 96, 79, 71, 79, 70, 46, 69, 57, 65, 82, 57, 61, 81, 73, 73, 59, 71, 74, 71, 45, 76, 70, 87, 91, 64, 42, 89, 47, 72, 41, 61, 50, 62, 57, 69, 75, 39, 63, 77, 63, 66, 75, 44, 63, 85, 52, 55, 48, 59, 86, 74, 65, 94, 72, 45, 60, 57, 49, 82, 70, 59, 84, 72, 87, 50, 68, 54, 66, 66, 61, 84, 75, 65, 76, 71, 66, 65, 66, 50, 77, 60, 63, 73, 61, 61, 45, 75, 65, 67, 48, 58, 63, 50, 39, 71, 55, 98, 62, 69, 88, 56, 72, 82, 62, 57, 86, 96, 43, 61, 62, 49, 76, 65, 54, 78, 70, 70, 77, 41, 64, 91, 91, 65, 61, 57, 90, 53, 79, 83, 67, 37, 53, 56, 73, 55, 69, 47, 68, 80, 46, 69, 51, 69, 74, 42, 65, 61, 62, 73, 75, 58, 53, 42, 52, 71, 61, 131, 49, 64, 65, 51, 63, 50, 52, 73, 51, 71, 68, 74, 59, 66, 61, 67, 42, 54, 57, 85, 74, 67, 59, 79, 49, 71, 69, 86, 61, 85, 65, 62, 61, 57, 61, 58, 88, 76, 71, 66, 64, 85, 80, 64, 64, 54, 64, 94, 57, 77, 91, 66, 81, 71, 61, 84, 74, 76, 83, 75, 44, 58, 69, 66, 56, 53, 71, 68, 89, 61, 67, 61, 62, 55, 54, 74, 47, 65, 72, 71, 63, 86, 62, 48, 60, 57, 81, 65, 68, 64, 64, 70, 64, 69, 46, 62, 64, 64, 67, 51, 82, 77, 92, 66, 60, 85, 59, 79, 46, 66, 77, 62, 56, 68, 73, 73, 76, 66, 71, 53, 71, 104, 56, 62, 62, 57, 50, 60, 79, 73, 62, 37, 55, 55, 76, 64, 70, 50, 45, 58, 58, 78, 64, 73, 55, 57, 113, 65, 87, 62, 57, 42, 74, 60, 56, 71, 60, 96, 72, 110, 59, 66, 64, 73, 86, 54, 43, 56, 80, 49, 71, 86, 73, 55, 55, 70, 79, 83, 69, 64, 80, 53, 63, 55, 67, 81, 77, 62, 56, 54, 68, 65, 57, 84, 68, 59, 75, 61, 53, 51, 59, 66, 67, 53, 64, 62, 88, 58, 60, 80, 57, 53, 65, 51, 50, 66, 68, 87, 70, 65, 60, 79, 69, 69, 59, 64, 58, 115, 56, 54, 63, 51, 62, 64, 61, 48, 71, 62, 72, 59, 50, 69, 68, 64, 72, 99, 47, 78, 84, 57, 92, 73, 75, 49, 76, 90, 60, 51, 67, 62, 63, 58, 92, 46, 79, 82, 57, 61, 68, 66, 54, 54, 59, 72, 76, 57, 106, 59, 79, 104, 85, 61, 60, 78, 79, 41, 81, 63, 68, 54, 48, 64, 46, 52, 60, 51, 97, 59, 58, 90, 78, 65, 38, 54, 59, 55, 47, 61, 42, 51, 65, 59, 69, 73, 62, 63, 68, 78, 60, 68, 48, 78, 63, 65, 55, 80, 76, 59, 76, 71, 99, 66, 68, 67, 62, 77, 57, 65, 69, 65, 56, 54, 81, 51, 59, 110, 54, 76, 104, 56, 62, 64, 62, 53, 64, 50, 50, 48, 79, 56, 69, 63, 49, 74, 64, 49, 78, 96, 78, 83, 45, 75, 46, 56, 62, 67, 64, 53, 68, 55, 82, 64, 80, 62, 67, 69, 56, 73, 60, 76, 62, 64, 70, 55, 92, 62, 73, 52, 50, 57, 55, 53, 57, 81, 74, 64, 69, 61, 64, 40, 60, 72, 66, 62, 93, 73, 64, 112, 93, 66, 59, 66, 77, 53, 57, 102, 93, 71, 42, 51, 56, 42, 47, 53, 73, 67, 51, 81, 56, 71, 52, 53, 56, 73, 60, 78, 62, 87, 92, 70, 82, 45, 67, 64, 66, 64, 85, 51, 72, 61, 102, 46, 90, 71, 64, 76, 48, 52, 60, 54, 85, 47, 67, 80, 56, 72, 84, 59, 62, 74, 75, 64, 56, 59, 59, 53, 34, 57, 72, 52, 63, 67, 65, 57, 70, 77, 49, 61, 56, 59, 95, 51, 51, 45, 70, 61, 79, 48, 52, 68, 71, 37, 83, 66, 54, 67, 89, 63, 52, 64, 64, 58, 57, 97, 69, 68, 71, 63, 56, 71, 38, 53, 61, 70, 86, 42, 59, 55, 73, 68, 55, 66, 52, 60, 73, 52, 55, 57, 76, 55, 55, 42, 74, 66, 58, 60, 67, 58, 80, 75, 57, 62, 83, 57, 50, 57, 62, 46, 56, 81, 79, 74, 58, 73, 77, 48, 72, 73, 83, 75, 59, 72, 85, 83, 48, 111, 71, 56, 63, 82, 50, 72, 58, 60, 55, 66, 88, 51, 51, 52, 83, 80, 51, 63, 62, 84, 63, 76, 70, 88, 68, 46, 76, 41, 42, 84, 87, 55, 64, 80, 63, 65, 42, 81, 98, 78, 74, 50, 50, 103, 62, 69, 67, 59, 72, 80, 58, 60, 56, 51, 74, 59, 84, 76, 59, 72, 79, 56, 50, 86, 81, 81, 82, 58, 100, 76, 76, 68, 70, 56, 78, 76, 62, 68, 49, 69, 56, 50, 100, 51, 86, 71, 51, 30, 60, 82, 52, 74, 56, 97, 72, 54, 67, 61, 75, 55, 60, 67, 54, 33, 63, 62, 53, 52, 57, 67, 64, 78, 62, 71, 61, 77, 80, 82, 96, 93, 64, 53, 71, 79, 66, 67, 100, 65, 42, 82, 67, 53, 59, 81, 83, 71, 59, 85, 89, 50, 49, 53, 80, 65, 59, 63, 75, 51, 66, 58, 99, 63, 70, 42, 78, 58, 60, 73, 63, 63, 63, 63, 52, 58, 57, 68, 81, 56, 69, 67, 53, 74, 27, 83, 76, 37, 74, 66, 64, 52, 62, 62, 66, 64, 73, 61, 77, 72, 63, 66, 57, 60, 76, 50, 55, 72, 62, 68, 71, 71, 47, 63, 86, 71, 55, 58, 79, 35, 82, 61, 59, 64, 54, 97, 161, 61, 66, 69, 82, 65, 73, 64, 79, 53, 56, 64, 77, 56, 58, 66, 36, 38, 75, 77, 68, 85, 64, 71, 29, 42, 69, 66, 64, 53, 99, 51, 70, 52, 67, 72, 45, 81, 62, 46, 60, 64, 92, 58, 84, 75, 75, 65, 64, 76, 64, 49, 121, 61, 49, 70, 82, 69, 69, 75, 68, 63, 78, 116, 77, 63, 63, 45, 63, 56, 88, 91, 56, 56, 52, 53, 49, 66, 54, 46, 84, 88, 64, 67, 73, 52, 77, 81, 60, 66, 97, 68, 58, 92, 82, 83, 67, 89, 59, 63, 97, 114, 57, 68, 54, 70, 53, 51, 52, 54, 68, 57, 59, 49, 83, 58, 83, 51, 58, 58, 73, 49, 67, 72, 62, 60, 68, 62, 62, 66, 58, 65, 56, 60, 58, 55, 59, 65, 110, 56, 50, 69, 66, 56, 65, 72, 71, 58, 57, 90, 78, 67, 87, 57, 90, 70, 66, 50, 74, 61, 79, 41, 60, 91, 59, 53, 106, 85, 63, 81, 55, 63, 72, 64, 52, 63, 71, 60, 84, 63, 45, 78, 72, 70, 74, 54, 59, 68, 76, 71, 45, 65, 67, 59, 56, 61, 56, 38, 68, 80, 63, 52, 63, 87, 87, 60, 67, 76, 49, 51, 48, 85, 57, 68, 58, 55, 64, 59, 69, 90, 61, 53, 62, 77, 50, 63, 75, 63, 82, 84, 61, 54, 41, 57, 63, 84, 73, 62, 72, 59, 89, 55, 55, 69, 59, 55, 63, 52, 60, 85, 66, 59, 57, 54, 81, 56, 71, 58, 61, 67, 73, 54, 62, 63, 75, 61, 56, 68, 41, 70, 69, 74, 65, 80, 52, 70, 83, 62, 53, 82, 63, 55, 61, 87, 79, 57, 49, 48, 62, 41, 64, 94, 75, 53, 69, 69, 62, 60, 50, 81, 82, 61, 65, 62, 63, 58, 40, 54, 41, 39, 54, 63, 72, 61, 55, 69, 54, 76, 68, 58, 119, 55, 57, 83, 56, 78, 69, 67, 49, 71, 60, 50, 69, 64, 66, 54, 76, 66, 72, 63, 116, 66, 54, 58, 54, 126, 41, 64, 72, 55, 80, 47, 76, 81, 54, 49, 62, 66, 77, 58, 71, 77, 65, 74, 54, 78, 69, 46, 60, 76, 68, 60, 64, 70, 52, 62, 93, 63, 72, 76, 59, 96, 71, 70, 72, 67, 112, 53, 85, 70, 41, 72, 79, 70, 82, 65, 74, 71, 56, 63, 91, 66, 55, 64, 53, 87, 74, 75, 58, 64, 69, 72, 47, 64, 74, 73, 63, 87, 75, 57, 68, 100, 76, 73, 51, 56, 56, 67, 80, 93, 59, 54, 59, 79, 49, 54, 72, 84, 52, 81, 76, 46, 72, 62, 92, 64, 79, 53, 61, 77, 72, 71, 64, 52, 68, 49, 88, 45, 79, 91, 49, 86, 50, 74, 68, 61, 71, 43, 79, 61, 73, 68, 76, 66, 50, 58, 59, 66, 80, 71, 58, 71, 36, 60, 62, 66, 69, 59, 80, 52, 62, 61, 64, 66, 60, 53, 67, 66, 81, 58, 53, 97, 75, 57, 68, 75, 54, 48, 53, 74, 48, 84, 56, 50, 72, 52, 62, 71, 76, 73, 77, 82, 60, 85, 63, 90, 63, 69, 88, 57, 55, 53, 103, 77, 73, 64, 109, 76, 70, 86, 66, 80, 63, 60, 49, 61, 56, 107, 51, 69, 59, 82, 58, 53, 51, 66, 64, 52, 75, 53, 66, 61, 79, 60, 63, 51, 65, 62, 49, 71, 83, 52, 75, 76, 62, 73, 80, 59, 50, 85, 64, 64, 77, 67, 50, 62, 72, 71, 61, 65, 54, 65, 59, 47, 77, 84, 75, 88, 91, 69, 64, 76, 61, 92, 58, 62, 74, 59, 65, 45, 55, 79, 76, 63, 86, 60, 54, 74, 71, 71, 47, 71, 62, 67, 58, 87, 73, 68, 46, 66, 95, 113, 60, 61, 55, 107, 60, 44, 68, 87, 51, 70, 59, 75, 65, 64, 55, 84, 45, 80, 75, 77, 64, 70, 71, 72, 70, 78, 71, 43, 64, 55, 61, 67, 78, 55, 60, 62, 67, 60, 65, 46, 57, 48, 86, 58, 83, 71, 60, 68, 50, 59, 58, 69, 79, 92, 64, 75, 80, 58, 59, 70, 62, 85, 80, 87, 60, 67, 74, 80, 67, 63, 68, 65, 72, 69, 57, 68, 72, 84, 61, 61, 63, 73, 76, 58, 39, 71, 50, 83, 80, 64, 67, 61, 81, 69, 64, 40, 54, 64, 55, 46, 78, 82, 67, 67, 65, 65, 84, 33, 64, 60, 62, 83, 55, 64, 61, 64, 53, 68, 42, 68, 64, 89, 64, 77, 71, 58, 58, 57, 57, 116, 46, 71, 54, 51, 66, 35, 50, 65, 76, 59, 66, 47, 73, 93, 62, 57, 73, 70, 67, 75, 48, 60, 71, 48, 63, 67, 51, 69, 61, 64, 75, 59, 65, 45, 56, 74, 64, 65, 62, 43, 61, 105, 90, 62, 55, 68, 56, 81, 63, 71, 58, 58, 61, 78, 54, 50, 59, 61, 50, 45, 71, 71, 45, 42, 68, 87, 65, 66, 55, 60, 64, 60, 54, 62, 92, 122, 49, 76, 52, 54, 89, 75, 75, 63, 69, 63, 62, 74, 63, 72, 64, 86, 95, 46, 67, 66, 64, 52, 71, 52, 86, 81, 52, 48, 57, 45, 50, 62, 74, 81, 73, 43, 62, 46, 89, 103, 60, 65, 60, 72, 60, 61, 80, 72, 66, 50, 79, 47, 63, 65, 53, 77, 67, 47, 63, 43, 64, 90, 66, 60, 48, 97, 55, 60, 69, 66, 60, 68, 71, 45, 62, 113, 71, 51, 60, 64, 80, 84, 53, 64, 65, 72, 76, 68, 40, 63, 59, 61, 59, 56, 50, 35, 62, 80, 55, 56, 70, 63, 64, 67, 88, 57, 68, 63, 60, 63, 52, 58, 71, 67, 57, 60, 39, 55, 69, 83, 71, 67, 59, 67, 51, 53, 44, 75, 65, 67, 64, 41, 45, 55, 66, 52, 66, 69, 61, 68, 64, 64, 91, 69, 52, 60, 61, 76, 62, 45, 56, 62, 63, 93, 59, 71, 91, 60, 64, 85, 60, 69, 98, 73, 72, 68, 62, 70, 76, 94, 85, 75, 76, 66, 74, 65, 59, 53, 65, 71, 70, 58, 57, 70, 56, 62, 59, 59, 54, 86, 73, 66, 73, 68, 58, 78, 74, 76, 99, 51, 60, 51, 76, 94, 68, 80, 49, 72, 67, 61, 71, 72, 63, 52, 75, 48, 80, 39, 67, 66, 97, 59, 63, 60, 62, 67, 64, 68, 94, 51, 70, 44, 59, 54, 69, 86, 58, 88, 84, 83, 63, 68, 70, 83, 70, 59, 105, 53, 65, 51, 66, 89, 70, 62, 58, 60, 62, 58, 68, 77, 63, 56, 52, 57, 57, 66, 62, 80, 69, 72, 70, 48, 76, 101, 61, 77, 66, 56, 58, 68, 56, 49, 62, 68, 60, 69, 81, 70, 69, 59, 111, 45, 55, 100, 64, 83, 82, 61, 54, 43, 74, 49, 60, 69, 58, 62, 67, 59, 98, 66, 57, 69, 75, 88, 65, 62, 73, 48, 64, 64, 57, 72, 50, 65, 52, 47, 84, 65, 55, 70, 65, 60, 67, 46, 54, 58, 59, 70, 64, 63, 76, 61, 51, 53, 60, 53, 76, 76, 59, 46, 83, 100, 77, 69, 80, 72, 66, 61, 65, 63, 81, 64, 58, 70, 54, 52, 59, 51, 120, 85, 61, 49, 58, 66, 94, 49, 64, 63, 62, 68, 70, 63, 71, 93, 68, 60, 54, 48, 49, 58, 60, 69, 56, 69, 76, 71, 57, 81, 54, 70, 62, 69, 63, 76, 47, 52, 67, 46, 73, 58, 82, 68, 57, 54, 67, 57, 58, 63, 70, 57, 69, 75, 72, 75, 84, 53, 49, 59, 48, 63, 74, 58, 76, 78, 64, 57, 77, 47, 53, 71, 57, 50, 62, 93, 84, 52, 86, 66, 117, 57, 55, 52, 69, 60, 49, 61, 43, 71, 89, 77, 67, 73, 56, 46, 59, 86, 57, 62, 58, 76, 75, 67, 55, 50, 62, 61, 57, 67, 55, 113, 63, 59, 73, 57, 67, 59, 71, 68, 68, 71, 75, 73, 52, 69, 55, 54, 59, 74, 69, 55, 89, 69, 53, 49, 63, 67, 72, 74, 69, 59, 47, 68, 56, 73, 74, 57, 65, 38, 60, 62, 81, 104, 55, 78, 58, 66, 105, 52, 64, 68, 65, 79, 63, 47, 62, 64, 60, 78, 73, 49, 70, 83, 90, 60, 63, 73, 65, 87, 72, 48, 60, 58, 54, 64, 63, 60, 55, 57, 57, 52, 65, 70, 47, 59, 62, 66, 50, 59, 54, 62, 66, 48, 92, 70, 53, 60, 66, 66, 54, 86, 78, 58, 67, 58, 62, 64, 60, 75, 69, 63, 58, 68, 59, 66, 55, 69, 96, 39, 75, 69, 53, 76, 60, 48, 79, 52, 77, 44, 67, 75, 91, 57, 59, 59, 66, 77, 77, 65, 76, 63, 81, 40, 81, 60, 57, 49, 73, 90, 53, 50, 63, 67, 52, 49, 58, 73, 68, 68, 61, 51, 63, 70, 85, 81, 52, 68, 103, 52, 79, 69, 68, 50, 52, 77, 70, 58, 77, 46, 50, 69, 51, 51, 63, 52, 60, 62, 53, 72, 56, 55, 46, 54, 68, 61, 69, 56, 69, 65, 73, 60, 59, 60, 86, 46, 67, 78, 50, 51, 44, 68, 63, 74, 83, 54, 62, 70, 69, 57, 63, 77, 62, 67, 45, 56, 78, 51, 44, 78, 67, 73, 92, 78, 67, 79, 49, 55, 53, 87, 62, 67, 43, 49, 61, 80, 68, 73, 58, 75, 35, 56, 71, 69, 65, 71, 62, 64, 76, 74, 74, 49, 46, 73, 70, 60, 49, 72, 84, 73, 65, 58, 75, 65, 86, 75, 91, 71, 114, 65, 98, 77, 50, 55, 64, 54, 62, 56, 42, 50, 72, 70, 76, 62, 64, 59, 56, 58, 46, 69, 60, 56, 72, 79, 55, 75, 71, 65, 77, 65, 66, 63, 57, 63, 64, 55, 33, 46, 92, 45, 88, 47, 72, 68, 65, 51, 129, 51, 87, 75, 48, 67, 75, 63, 70, 41, 89, 102, 73, 58, 68, 56, 60, 69, 57, 56, 82, 64, 77, 77, 85, 65, 81, 67, 68, 72, 51, 75, 67, 46, 31, 56, 84, 81, 94, 64, 76, 64, 70, 59, 77, 61, 68, 82, 60, 61, 62, 100, 59, 49, 75, 53, 65, 56, 64, 76, 92, 67, 60, 57, 67, 82, 62, 45, 58, 74, 75, 59, 66, 60, 64, 58, 105, 70, 75, 82, 69, 61, 43, 99, 84, 62, 96, 66, 74, 54, 65, 75, 52, 62, 66, 63, 44, 44, 78, 87, 58, 51, 69, 62, 84, 51, 76, 57, 75, 71, 53, 84, 46, 67, 86, 81, 60, 71, 52, 59, 55, 49, 61, 74, 66, 63, 56, 63, 76, 57, 71, 62, 59, 86, 39, 66, 82, 51, 59, 62, 45, 77, 65, 79, 41, 55, 68, 68, 51, 50, 62, 71, 86, 45, 60, 67, 77, 71, 75, 51, 56, 47, 99, 70, 73, 81, 83, 67, 58, 69, 64, 68, 61, 110, 83, 53, 68, 42, 78, 86, 56, 54, 59, 66, 73, 74, 80, 67, 81, 82, 55, 70, 107, 84, 60, 55, 53, 74, 62, 59, 69, 79, 76, 71, 73, 48, 64, 55, 78, 68, 59, 57, 66, 71, 47, 134, 52, 67, 102, 43, 64, 84, 63, 57, 70, 62, 69, 75, 59, 53, 70, 56, 72, 71, 75, 54, 60, 41, 59, 52, 57, 67, 72, 41, 52, 66, 87, 56, 63, 63, 38, 72, 75, 76, 78, 113, 68, 45, 63, 69, 66, 61, 59, 71, 51, 61, 41, 68, 75, 75, 87, 93, 83, 79, 82, 64, 64, 59, 100, 86, 57, 67, 68, 51, 54, 64, 76, 59, 73, 39, 109, 62, 67, 51, 54, 60, 70, 64, 76, 71, 70, 62, 93, 53, 79, 55, 72, 61, 80, 74, 59, 62, 78, 86, 66, 75, 56, 49, 59, 74, 67, 55, 63, 53, 57, 83, 45, 77, 50, 63, 68, 51, 51, 72, 75, 62, 57, 59, 63, 61, 75, 60, 56, 48, 78, 64, 61, 51, 158, 67, 104, 45, 69, 55, 71, 49, 67, 63, 62, 67, 54, 64, 57, 70, 70, 46, 79, 49, 58, 42, 84, 52, 50, 71, 68, 64, 52, 89, 60, 83, 61, 49, 61, 58, 79, 58, 62, 65, 73, 54, 58, 52, 57, 61, 61, 68, 66, 64, 89, 68, 59, 61, 60, 55, 64, 62, 76, 74, 66, 39, 55, 60, 56, 71, 64, 55, 49, 66, 52, 72, 92, 54, 68, 67, 67, 72, 60, 54, 47, 74, 46, 76, 42, 63, 53, 69, 47, 77, 79, 62, 82, 60, 81, 76, 58, 51, 67, 64, 71, 66, 79, 53, 92, 54, 69, 65, 98, 62, 60, 55, 95, 63, 45, 53, 52, 65, 82, 71, 69, 74, 68, 94, 55, 61, 95, 77, 80, 70, 60, 52, 52, 60, 62, 60, 57, 65, 57, 82, 57, 79, 93, 53, 54, 104, 54, 68, 79, 72, 41, 55, 75, 68, 68, 70, 55, 70, 55, 61, 78, 45, 62, 57, 69, 62, 45, 52, 66, 73, 64, 57, 76, 81, 76, 71, 47, 67, 68, 66, 66, 69, 78, 64, 65, 108, 98, 56, 79, 55, 42, 67, 57, 60, 79, 74, 57, 51, 58, 68, 60, 56, 55, 65, 74, 61, 67, 63, 72, 46, 53, 57, 66, 72, 65, 71, 54, 57, 61, 68, 63, 63, 70, 62, 54, 77, 54, 70, 65, 69, 58, 64, 57, 107, 62, 79, 65, 78, 53, 69, 49, 68, 59, 63, 60, 59, 78, 84, 61, 76, 81, 107, 82, 110, 65, 64, 65, 49, 172, 49, 57, 60, 73, 94, 68, 77, 65, 50, 61, 68, 59, 55, 82, 66, 50, 62, 81, 62, 45, 58, 66, 74, 76, 80, 103, 62, 61, 78, 56, 69, 78, 68, 53, 61, 48, 75, 86, 56, 49, 70, 58, 52, 54, 73, 76, 78, 111, 69, 55, 67, 51, 76, 83, 53, 64, 56, 67, 57, 102, 79, 83, 75, 52, 81, 76, 54, 67, 55, 65, 56, 67, 59, 81, 55, 62, 53, 65, 64, 106, 45, 58, 39, 69, 108, 57, 64, 64, 60, 60, 69, 52, 56, 68, 52, 52, 58, 46, 91, 66, 61, 76, 61, 59, 76, 65, 63, 43, 66, 86, 56, 98, 44, 60, 48, 53, 59, 41, 61, 89, 89, 60, 85, 47, 54, 51, 66, 64, 63, 72, 84, 65, 80, 112, 48, 46, 49, 55, 57, 72, 69, 63, 39, 46, 81, 52, 61, 54, 70, 62, 51, 47, 62, 67, 69, 65, 71, 83, 54, 72, 65, 69, 56, 55, 55, 59, 89, 56, 86, 85, 67, 64, 72, 77, 73, 62, 67, 65, 74, 87, 56, 63, 95, 72, 73, 58, 62, 61, 47, 71, 58, 75, 51, 55, 58, 56, 65, 66, 65, 56, 91, 48, 48, 84, 50, 52, 51, 60, 64, 78, 58, 89, 56, 79, 86, 69, 49, 56, 63, 68, 85, 59, 68, 64, 51, 51, 54, 62, 59, 57, 45, 50, 84, 71, 84, 79, 79, 62, 78, 86, 78, 75, 72, 68, 65, 60, 45, 39, 69, 51, 61, 57, 60, 54, 91, 90, 70, 74, 52, 136, 75, 61, 67, 58, 59, 63, 53, 77, 67, 70, 66, 47, 61, 55, 46, 62, 75, 83, 61, 64, 48, 50, 64, 42, 62, 86, 66, 76, 87, 56, 51, 62, 68, 60, 59, 66, 70, 61, 57, 149, 95, 52, 45, 57, 60, 70, 51, 54, 63, 53, 66, 54, 88, 72, 88, 51, 77, 45, 79, 74, 78, 55, 90, 58, 92, 70, 68, 84, 54, 59, 49, 67, 68, 55, 91, 77, 83, 106, 72, 50, 85, 68, 66, 79, 61, 54, 65, 71, 120, 63, 52, 49, 45, 41, 42, 65, 56, 63, 53, 69, 40, 64, 57, 67, 68, 60, 84, 61, 39, 50, 53, 70, 61, 93, 59, 74, 58, 56, 62, 47, 88, 57, 63, 46, 66, 60, 57, 62, 63, 63, 64, 82, 63, 46, 103, 72, 65, 67, 58, 62, 54, 71, 66, 53, 53, 66, 81, 61, 52, 57, 59, 81, 70, 85, 59, 64, 61, 68, 64, 62, 56, 76, 63, 52, 61, 68, 76, 88, 44, 67, 76, 66, 77, 54, 71, 49, 66, 76, 72, 67, 78, 71, 73, 47, 72, 55, 51, 63, 48, 54, 58, 74, 95, 69, 54, 67, 58, 69, 90, 59, 88, 53, 72, 65, 83, 63, 88, 50, 58, 70, 55, 68, 91, 62, 46, 60, 50, 69, 73, 57, 57, 42, 59, 60, 61, 54, 52, 61, 75, 70, 57, 99, 54, 54, 53, 87, 65, 83, 77, 64, 66, 69, 90, 60, 55, 54, 50, 56, 58, 94, 66, 66, 67, 109, 62, 65, 61, 59, 63, 45, 59, 69, 58, 56, 67, 72, 64, 64, 67, 73, 59, 65, 109, 97, 87, 66, 60, 75, 46, 58, 62, 64, 74, 70, 83, 59, 52, 71, 82, 65, 72, 64, 71, 55, 59, 59, 76, 75, 72, 72, 60, 75, 71, 98, 50, 74, 77, 78, 60, 64, 49, 53, 69, 45, 50, 66, 70, 55, 51, 68, 64, 57, 69, 53, 62, 79, 56, 51, 105, 59, 65, 69, 88, 63, 65, 67, 50, 55, 88, 41, 84, 87, 70, 87, 66, 55, 83, 64, 49, 61, 55, 58, 70, 65, 69, 61, 69, 48, 63, 70, 81, 63, 36, 74, 73, 58, 47, 66, 52, 71, 58, 87, 42, 75, 64, 43, 66, 61, 55, 109, 45, 71, 80, 74, 47, 65, 64, 55, 55, 54, 63, 59, 69, 101, 63, 55, 66, 84, 61, 54, 58, 71, 75, 56, 49, 105, 71, 76, 55, 78, 52, 59, 78, 76, 73, 75, 72, 67, 78, 76, 60, 79, 62, 69, 67, 54, 92, 76, 62, 52, 70, 60, 64, 64, 50, 51, 71, 60, 48, 65, 62, 71, 59, 65, 63, 58, 58, 69, 48, 56, 86, 66, 64, 52, 65, 68, 63, 111, 121, 81, 41, 67, 48, 79, 54, 61, 65, 64, 54, 56, 97, 103, 63, 66, 49, 70, 61, 44, 55, 98, 67, 79, 65, 62, 76, 71, 55, 62, 53, 51, 83, 71, 76, 69, 62, 96, 61, 51, 61, 59, 68, 57, 35, 50, 66, 64, 61, 87, 75, 64, 68, 71, 74, 58, 50, 67, 59, 64, 57, 73, 53, 65, 57, 82, 53, 42, 99, 56, 58, 51, 82, 78, 56, 57, 91, 117, 56, 64, 55, 52, 63, 73, 52, 54, 68, 49, 76, 50, 62, 66, 73, 78, 79, 72, 56, 78, 63, 72, 80, 57, 49, 58, 75, 112, 79, 71, 68, 69, 64, 63, 50, 72, 67, 57, 62, 84, 56, 68, 66, 67, 59, 64, 95, 57, 62, 71, 71, 67, 49, 93, 86, 62, 42, 67, 59, 86, 48, 54, 86, 58, 39, 71, 62, 82, 80, 95, 60, 78, 76, 70, 59, 64, 62, 76, 94, 64, 53, 51, 50, 73, 49, 71, 38, 70, 69, 77, 50, 66, 67, 78, 68, 46, 63, 74, 58, 51, 77, 68, 80, 60, 58, 57, 51, 54, 50, 47, 75, 62, 73, 97, 58, 59, 75, 85, 67, 58, 63, 57, 81, 53, 75, 67, 63, 59, 92, 61, 67, 52, 68, 52, 53, 71, 59, 48, 57, 47, 56, 71, 70, 89, 50, 50, 68, 62, 62, 75, 52, 66, 83, 76, 37, 56, 51, 80, 57, 44, 70, 68, 64, 52, 77, 81, 66, 48, 72, 57, 58, 82, 75, 51, 58, 56, 52, 69, 56, 56, 49, 72, 43, 64, 36, 57, 74, 65, 60, 65, 56, 116, 61, 52, 67, 68, 44, 68, 50, 52, 59, 48, 127, 77, 84, 55, 66, 49, 55, 58, 57, 109, 46, 69, 81, 59, 37, 71, 64, 69, 64, 62, 85, 79, 92, 64, 81, 87, 53, 82, 59, 71, 67, 77, 57, 49, 64, 99, 66, 70, 70, 78, 61, 58, 73, 78, 64, 58, 111, 66, 83, 59, 50, 56, 76, 51, 70, 74, 56, 46, 101, 65, 74, 57, 60, 69, 68, 59, 64, 64, 62, 77, 61, 55, 75, 63, 82, 62, 66, 64, 79, 60, 57, 84, 94, 64, 55, 47, 53, 55, 62, 48, 71, 111, 43, 42, 87, 66, 92, 83, 64, 53, 79, 71, 74, 99, 59, 57, 55, 59, 62, 70, 65, 65, 73, 73, 62, 54, 57, 76, 68, 92, 65, 58, 61, 55, 77, 66, 70, 66, 74, 78, 68, 62, 76, 78, 57, 65, 54, 82, 89, 58, 73, 48, 51, 72, 55, 53, 59, 61, 81, 69, 68, 51, 62, 49, 66, 56, 48, 54, 61, 51, 49, 127, 86, 89, 70, 67, 73, 54, 77, 45, 90, 55, 58, 58, 52, 83, 69, 65, 80, 98, 103, 75, 60, 65, 87, 58, 66, 75, 63, 59, 51, 62, 52, 86, 53, 53, 89, 51, 60, 73, 59, 72, 58, 54, 78, 63, 62, 130, 91, 65, 98, 72, 64, 106, 70, 81, 64, 70, 71, 48, 69, 56, 66, 74, 42, 63, 62, 70, 62, 74, 53, 86, 53, 76, 71, 55, 69, 56, 60, 51, 56, 54, 65, 93, 75, 74, 70, 45, 58, 60, 68, 85, 59, 83, 57, 84, 37, 73, 73, 50, 55, 54, 88, 62, 55, 83, 112, 55, 60, 71, 42, 89, 63, 64, 94, 56, 75, 59, 98, 53, 48, 105, 64, 46, 71, 63, 47, 73, 80, 71, 63, 71, 68, 57, 58, 70, 41, 71, 45, 82, 55, 50, 54, 46, 42, 59, 53, 68, 69, 62, 62, 70, 49, 92, 76, 73, 60, 48, 71, 68, 58, 65, 70, 74, 81, 51, 62, 66, 73, 58, 46, 53, 80, 106, 105, 60, 59, 63, 53, 57, 88, 71, 57, 80, 72, 66, 57, 60, 92, 48, 53, 47, 59, 67, 63, 65, 53, 88, 68, 82, 72, 65, 110, 67, 58, 56, 67, 53, 70, 109, 66, 83, 69, 67, 61, 82, 70, 64, 66, 58, 91, 50, 53, 52, 51, 48, 52, 79, 87, 53, 64, 84, 55, 64, 67, 68, 64, 90, 62, 60, 71, 62, 58, 82, 95, 79, 66, 72, 64, 72, 46, 52, 62, 67, 89, 67, 94, 60, 49, 59, 63, 65, 62, 64, 83, 31, 55, 71, 68, 54, 46, 72, 74, 77, 65, 56, 48, 72, 97, 65, 67, 86, 41, 45, 72, 63, 81, 69, 85, 70, 66, 49, 70, 58, 91, 30, 60, 62, 51, 69, 71, 83, 53, 84, 58, 52, 55, 58, 32, 70, 62, 77, 63, 64, 72, 45, 60, 50, 75, 88, 57, 55, 76, 51, 48, 63, 73, 85, 68, 64, 79, 68, 80, 64, 74, 63, 74, 62, 64, 66, 66, 55, 60, 51, 89, 76, 68, 51, 70, 70, 70, 78, 71, 50, 48, 58, 68, 87, 80, 50, 41, 39, 42, 58, 62, 65, 78, 72, 70, 54, 64, 61, 64, 49, 71, 47, 57, 62, 68, 67, 51, 102, 67, 69, 63, 48, 69, 60, 57, 72, 53, 67, 54, 69, 51, 58, 54, 56, 54, 69, 50, 66, 67, 55, 82, 87, 152, 62, 52, 59, 65, 81, 55, 53, 80, 56, 61, 61, 70, 72, 58, 60, 97, 53, 47, 46, 54, 104, 75, 73, 66, 52, 53, 67, 49, 72, 58, 60, 69, 72, 74, 62, 74, 67, 69, 88, 57, 63, 55, 68, 68, 61, 68, 56, 55, 44, 84, 60, 83, 54, 54, 58, 57, 57, 91, 81, 81, 60, 76, 61, 83, 81, 54, 74, 92, 56, 69, 69, 58, 54, 61, 56, 71, 63, 68, 73, 54, 56, 72, 69, 76, 71, 82, 57, 51, 57, 75, 76, 61, 58, 60, 93, 65, 73, 51, 77, 120, 70, 81, 63, 60, 57, 73, 128, 66, 76, 71, 88, 44, 65, 51, 64, 54, 77, 62, 58, 62, 98, 59, 54, 54, 67, 49, 85, 47, 61, 59, 59, 72, 82, 56, 72, 49, 59, 97, 106, 55, 52, 65, 61, 45, 63, 57, 80, 64, 135, 60, 60, 57, 75, 84, 71, 59, 72, 53, 62, 75, 59, 67, 69, 93, 59, 64, 78, 62, 73, 66, 62, 74, 60, 61, 41, 77, 81, 52, 86, 65, 76, 69, 58, 53, 47, 64, 55, 53, 44, 62, 85, 69, 66, 84, 63, 60, 40, 94, 72, 65, 49, 62, 56, 62, 74, 49, 75, 89, 60, 62, 47, 90, 104, 62, 108, 54, 51, 51, 48, 78, 59, 67, 55, 63, 74, 54, 52, 111, 56, 74, 44, 59, 59, 61, 55, 60, 64, 72, 70, 50, 50, 60, 98, 72, 53, 113, 60, 55, 75, 80, 78, 57, 82, 74, 78, 52, 69, 65, 63, 73, 73, 73, 56, 58, 57, 52, 84, 63, 62, 69, 54, 82, 51, 54, 69, 54, 73, 75, 65, 64, 61, 52, 87, 70, 57, 82, 66, 65, 67, 56, 75, 63, 56, 59, 84, 62, 81, 62, 57, 67, 52, 93, 67, 67, 49, 79, 65, 64, 80, 83, 66, 53, 70, 77, 61, 41, 53, 80, 98, 71, 64, 74, 62, 65, 78, 45, 48, 39, 82, 59, 63, 70, 85, 60, 63, 59, 52, 41, 78, 42, 52, 55, 68, 64, 70, 47, 72, 73, 62, 64, 68, 87, 75, 42, 74, 87, 54, 70, 86, 75, 49, 73, 62, 69, 64, 63, 52, 72, 92, 77, 57, 77, 66, 58, 82, 91, 73, 91, 71, 65, 72, 58, 65, 64, 73, 69, 46, 66, 75, 51, 65, 65, 45, 53, 74, 61, 56, 83, 67, 55, 48, 71, 59, 73, 65, 64, 67, 88, 74, 91, 86, 51, 74, 59, 71, 83, 51, 61, 71, 61, 85, 84, 57, 60, 67, 68, 64, 61, 49, 58, 54, 62, 60, 79, 68, 49, 68, 51, 55, 68, 63, 86, 97, 88, 75, 70, 78, 60, 72, 55, 80, 51, 67, 50, 61, 67, 52, 123, 65, 74, 51, 77, 53, 52, 65, 61, 120, 42, 56, 68, 81, 62, 72, 58, 56, 69, 56, 55, 42, 49, 61, 52, 92, 54, 84, 59, 55, 73, 90, 75, 54, 79, 105, 61, 51, 46, 72, 73, 58, 61, 54, 74, 68, 94, 89, 83, 79, 72, 73, 62, 48, 102, 66, 92, 63, 51, 63, 63, 65, 60, 70, 46, 51, 77, 80, 67, 73, 80, 53, 75, 70, 73, 80, 90, 65, 43, 72, 53, 55, 50, 68, 69, 67, 72, 43, 67, 75, 41, 87, 71, 69, 58, 73, 83, 68, 87, 64, 73, 60, 101, 63, 79, 70, 64, 60, 66, 63, 80, 58, 64, 58, 51, 63, 59, 68, 66, 51, 69, 62, 58, 63, 59, 62, 92, 63, 63, 60, 48, 67, 49, 65, 73, 72, 69, 77, 68, 48, 63, 65, 67, 101, 66, 53, 57, 48, 86, 69, 70, 71, 53, 54, 53, 48, 65, 66, 57, 75, 90, 125, 67, 53, 63, 52, 68, 60, 66, 71, 81, 60, 70, 64, 78, 59, 61, 84, 75, 89, 89, 58, 81, 72, 68, 57, 84, 57, 65, 72, 48, 74, 55, 62, 67, 80, 72, 55, 85, 67, 61, 72, 57, 54, 65, 62, 65, 76, 73, 56, 62, 62, 54, 86, 66, 64, 62, 61, 61, 58, 51, 52, 78, 60, 58, 88, 79, 64, 68, 60, 65, 69, 62, 52, 66, 55, 55, 67, 59, 62, 71, 53, 65, 69, 59, 80, 46, 64, 62, 83, 69, 66, 69, 65, 47, 78, 52, 70, 68, 60, 66, 48, 79, 63, 69, 89, 98, 65, 60, 65, 52, 77, 67, 86, 94, 63, 44, 66, 88, 61, 52, 92, 65, 85, 46, 100, 45, 52, 56, 60, 74, 52, 103, 64, 54, 55, 66, 67, 54, 66, 55, 52, 43, 70, 58, 44, 65, 66, 74, 78, 54, 98, 70, 84, 103, 66, 59, 68, 72, 61, 58, 58, 72, 80, 76, 66, 63, 57, 67, 67, 71, 30, 69, 61, 56, 48, 65, 57, 58, 53, 52, 64, 56, 89, 49, 66, 62, 60, 90, 64, 71, 64, 57, 54, 41, 63, 62, 59, 82, 44, 72, 86, 91, 63, 54, 53, 64, 61, 49, 91, 74, 69, 87, 58, 57, 50, 71, 55, 65, 73, 75, 58, 53, 42, 59, 81, 67, 78, 96, 70, 64, 81, 62, 48, 65, 86, 67, 96, 87, 55, 63, 59, 68, 91, 54, 60, 53, 62, 53, 66, 51, 39, 49, 66, 44, 50, 84, 58, 63, 59, 61, 68, 50, 63, 70, 53, 69, 50, 70, 82, 62, 59, 75, 65, 69, 61, 58, 42, 56, 50, 73, 73, 43, 59, 54, 75, 55, 68, 50, 60, 92, 48, 63, 74, 85, 74, 41, 57, 58, 55, 79, 59, 77, 77, 69, 65, 42, 42, 64, 81, 87, 75, 51, 62, 59, 66, 80, 88, 61, 66, 62, 47, 95, 58, 64, 56, 58, 77, 51, 47, 86, 61, 76, 88, 54, 53, 64, 53, 77, 58, 70, 54, 62, 57, 80, 59, 55, 52, 48, 92, 48, 96, 64, 80, 66, 71, 78, 52, 57, 59, 58, 60, 71, 59, 60, 55, 55, 80, 62, 56, 74, 82, 79, 58, 64, 82, 57, 57, 55, 82, 62, 72, 56, 91, 55, 62, 72, 53, 47, 62, 43, 83, 69, 74, 45, 76, 64, 59, 61, 53, 75, 63, 51, 63, 77, 76, 142, 79, 50, 60, 44, 69, 57, 48, 77, 59, 66, 63, 54, 80, 75, 53, 61, 44, 68, 88, 82, 115, 68, 86, 84, 62, 54, 57, 57, 51, 78, 51, 57, 59, 60, 82, 67, 65, 56, 62, 73, 66, 63, 73, 74, 76, 60, 63, 67, 57, 71, 72, 51, 67, 65, 71, 42, 78, 47, 79, 66, 68, 58, 70, 51, 97, 63, 70, 62, 53, 66, 58, 76, 61, 67, 88, 65, 46, 78, 62, 56, 59, 50, 65, 93, 94, 74, 52, 56, 83, 78, 60, 73, 55, 77, 95, 60, 79, 66, 83, 71, 69, 74, 43, 58, 69, 51, 84, 71, 69, 71, 80, 71, 67, 63, 79, 54, 69, 61, 55, 69, 59, 53, 67, 63, 77, 81, 73, 67, 43, 53, 58, 53, 56, 64, 65, 59, 69, 89, 105, 65, 48, 45, 86, 56, 89, 64, 64, 72, 111, 58, 67, 79, 63, 68, 85, 49, 79, 67, 74, 90, 61, 68, 58, 131, 50, 67, 74, 73, 79, 63, 56, 60, 61, 36, 75, 79, 104, 68, 72, 69, 50, 58, 65, 48, 76, 47, 63, 75, 62, 64, 56, 62, 58, 87, 84, 56, 61, 55, 59, 94, 60, 57, 64, 54, 52, 53, 80, 82, 63, 61, 62, 73, 79, 81, 76, 68, 63, 56, 97, 42, 69, 58, 90, 64, 48, 64, 53, 78, 70, 67, 63, 76, 46, 61, 57, 44, 64, 70, 70, 44, 76, 75, 63, 67, 66, 53, 65, 58, 56, 74, 61, 59, 48, 76, 57, 62, 67, 57, 62, 69, 128, 65, 60, 63, 75, 110, 64, 76, 58, 62, 45, 54, 65, 74, 73, 42, 42, 90, 69, 61, 67, 59, 50, 65, 54, 57, 59, 68, 64, 54, 121, 43, 65, 63, 64, 75, 67, 72, 59, 79, 86, 58, 87, 68, 64, 80, 81, 57, 65, 68, 65, 61, 44, 51, 70, 70, 58, 46, 98, 67, 54, 55, 63, 57, 47, 75, 54, 51, 71, 86, 73, 44, 49, 83, 76, 90, 73, 61, 58, 56, 59, 92, 96, 60, 83, 90, 73, 54, 54, 74, 61, 51, 63, 51, 54, 67, 62, 61, 66, 69, 73, 59, 68, 68, 100, 60, 60, 65, 65, 56, 57, 56, 57, 42, 56, 73, 65, 62, 62, 69, 62, 59, 59, 61, 57, 81, 74, 66, 57, 60, 54, 57, 78, 64, 61, 74, 62, 77, 72, 72, 70, 58, 61, 65, 87, 65, 58, 61, 60, 72, 48, 63, 71, 75, 66, 65, 68, 67, 65, 88, 65, 64, 48, 72, 50, 60, 66, 57, 59, 72, 59, 98, 63, 71, 58, 52, 68, 71, 57, 105, 98, 62, 99, 75, 59, 52, 63, 73, 46, 51, 70, 57, 80, 65, 65, 66, 99, 72, 73, 64, 55, 54, 96, 55, 64, 66, 72, 58, 49, 73, 57, 75, 62, 72, 71, 52, 62, 56, 68, 54, 68, 64, 55, 67, 48, 65, 80, 63, 78, 72, 70, 104, 66, 88, 61, 55, 67, 65, 78, 80, 61, 70, 50, 91, 61, 60, 62, 59, 71, 57, 59, 58, 74, 99, 56, 90, 54, 65, 55, 56, 77, 76, 59, 86, 51, 57, 72, 62, 67, 82, 66, 45, 63, 53, 80, 77, 65, 78, 70, 64, 61, 65, 82, 54, 60, 104, 57, 61, 49, 57, 62, 64, 62, 74, 57, 72, 65, 63, 65, 66, 62, 81, 50, 64, 59, 76, 60, 62, 85, 73, 58, 79, 53, 64, 61, 82, 62, 62, 50, 61, 72, 54, 72, 61, 67, 60, 70, 60, 68, 52, 73, 80, 58, 53, 81, 49, 76, 68, 67, 68, 49, 55, 67, 54, 57, 52, 79, 65, 61, 65, 60, 61, 60, 58, 67, 48, 62, 57, 67, 63, 62, 54, 56, 61, 69, 61, 54, 81, 85, 65, 63, 94, 92, 53, 85, 52, 49, 60, 69, 64, 57, 69, 80, 94, 57, 58, 74, 65, 75, 71, 66, 58, 54, 61, 83, 52, 62, 61, 77, 59, 58, 70, 76, 64, 61, 96, 63, 60, 64, 57, 57, 61, 57, 56, 78, 64, 81, 78, 73, 59, 121, 83, 79, 63, 58, 67, 67, 75, 60, 61, 53, 55, 52, 52, 64, 64, 73, 63, 67, 51, 70, 49, 58, 57, 64, 50, 82, 71, 69, 70, 55, 59, 71, 68, 81, 67, 57, 77, 46, 63, 41, 68, 91, 67, 63, 67, 71, 55, 48, 73, 71, 88, 71, 53, 42, 64, 75, 52, 59, 64, 61, 71, 68, 73, 63, 59, 55, 71, 78, 66, 85, 100, 75, 57, 50, 63, 75, 64, 57, 82, 74, 67, 54, 66, 67, 70, 65, 62, 66, 59, 55, 44, 66, 70, 65, 69, 70, 63, 92, 81, 54, 60, 56, 59, 83, 70, 79, 57, 63, 77, 61, 48, 56, 60, 68, 82, 65, 62, 60, 71, 70, 61, 70, 64, 68, 65, 72, 70, 72, 56, 68, 94, 75, 63, 59, 51, 75, 61, 61, 70, 71, 58, 51, 74, 67, 55, 66, 60, 58, 64, 46, 65, 55, 64, 60, 123, 81, 62, 81, 49, 78, 59, 60, 50, 71, 80, 86, 59, 70, 125, 73, 38, 66, 74, 68, 82, 78, 64, 71, 49, 81, 52, 76, 87, 114, 59, 58, 54, 74, 67, 59, 102, 74, 67, 63, 88, 79, 52, 72, 69, 63, 57, 115, 55, 45, 52, 67, 64, 71, 57, 67, 59, 85, 109, 83, 49, 56, 67, 55, 64, 56, 105, 60, 83, 78, 55, 77, 62, 71, 73, 57, 58, 90, 62, 62, 67, 70, 79, 68, 58, 59, 50, 58, 60, 66, 45, 51, 58, 69, 65, 55, 71, 52, 66, 77, 70, 50, 81, 54, 47, 63, 69, 79, 81, 57, 59, 63, 64, 72, 63, 74, 57, 77, 49, 61, 62, 65, 75, 75, 46, 63, 59, 54, 68, 58, 54, 69, 101, 60, 66, 79, 68, 75, 73, 77, 66, 62, 80, 90, 49, 75, 55, 62, 94, 66, 65, 67, 79, 67, 75, 63, 80, 67, 62, 58, 56, 69, 75, 59, 64, 54, 57, 56, 67, 64, 58, 83, 51, 63, 83, 65, 67, 83, 73, 66, 62, 60, 66, 74, 56, 54, 60, 79, 62, 80, 52, 57, 65, 70, 54, 68, 54, 57, 65, 63, 62, 61, 62, 63, 62, 65, 62, 42, 56, 52, 53, 47, 64, 53, 60, 71, 76, 62, 51, 70, 70, 69, 62, 61, 55, 65, 64, 71, 105, 51, 68, 61, 70, 59, 63, 76, 69, 60, 64, 85, 41, 88, 79, 60, 84, 68, 66, 70, 66, 59, 60, 60, 36, 65, 54, 110, 73, 82, 60, 61, 67, 64, 48, 63, 50, 60, 63, 68, 45, 66, 70, 75, 63, 54, 76, 64, 66, 66, 78, 49, 52, 82, 61, 64, 68, 87, 63, 76, 51, 56, 74, 61, 67, 61, 49, 60, 66, 48, 61, 67, 64, 67, 63, 70, 61, 93, 62, 83, 46, 159, 51, 70, 64, 67, 52, 60, 60, 70, 67, 52, 83, 63, 67, 57, 95, 61, 68, 59, 59, 60, 45, 52, 109, 67, 73, 57, 66, 67, 59, 63, 56, 67, 43, 88, 92, 68, 52, 57, 81, 57, 63, 68, 82, 57, 50, 64, 77, 74, 58, 49, 55, 56, 118, 83, 55, 66, 52, 88, 99, 59, 46, 64, 57, 76, 68, 61, 93, 52, 77, 70, 77, 81, 73, 53, 63, 60, 64, 64, 79, 75, 64, 62, 52, 62, 91, 49, 48, 42, 80, 66, 68, 45, 77, 78, 64, 62, 53, 51, 72, 69, 78, 60, 89, 61, 64, 79, 67, 55, 77, 61, 63, 60, 71, 76, 51, 55, 60, 53, 54, 71, 54, 67, 52, 72, 60, 64, 69, 88, 61, 80, 85, 56, 51, 55, 95, 69, 88, 66, 49, 58, 57, 52, 58, 84, 82, 68, 59, 66, 54, 73, 55, 69, 74, 67, 65, 80, 53, 51, 60, 54, 56, 42, 69, 60, 69, 83, 62, 67, 85, 89, 67, 59, 52, 69, 62, 62, 65, 81, 79, 56, 66, 76, 64, 61, 66, 57, 72, 77, 58, 72, 87, 57, 60, 61, 115, 64, 82, 76, 58, 60, 52, 60, 85, 54, 63, 71, 61, 66, 68, 61, 71, 67, 39, 64, 85, 67, 58, 48, 95, 61, 75, 85, 66, 61, 49, 58, 75, 57, 63, 71, 60, 76, 96, 61, 69, 74, 66, 66, 44, 51, 63, 48, 74, 53, 66, 55, 64, 71, 51, 51, 53, 58, 83, 57, 62, 83, 53, 51, 60, 106, 53, 81, 71, 70, 48, 73, 61, 63, 87, 55, 74, 44, 104, 70, 65, 65, 85, 58, 54, 31, 54, 65, 62, 73, 75, 62, 55, 60, 77, 58, 59, 57, 120, 56, 64, 72, 82, 41, 82, 87, 55, 59, 53, 55, 62, 71, 68, 45, 74, 70, 112, 79, 72, 65, 65, 63, 78, 69, 55, 76, 100, 63, 57, 54, 62, 63, 57, 59, 63, 90, 57, 81, 62, 77, 75, 84, 80, 58, 65, 72, 66, 51, 66, 65, 64, 67, 63, 89, 41, 69, 50, 102, 68, 41, 58, 107, 65, 55, 67, 78, 67, 50, 62, 63, 71, 66, 88, 55, 45, 64, 71, 54, 143, 66, 75, 55, 78, 69, 52, 73, 61, 63, 46, 57, 87, 74, 80, 64, 71, 67, 45, 78, 62, 49, 76, 66, 54, 42, 43, 58, 57, 73, 78, 85, 64, 73, 61, 61, 64, 59, 96, 67, 65, 58, 59, 68, 68, 77, 54, 72, 55, 49, 62, 68, 71, 60, 71, 48, 70, 67, 51, 47, 52, 61, 79, 57, 92, 80, 59, 61, 56, 80, 79, 76, 54, 62, 57, 61, 60, 56, 69, 67, 73, 60, 66, 59, 51, 62, 60, 63, 57, 60, 39, 61, 89, 58, 86, 60, 66, 62, 60, 60, 53, 51, 64, 65, 69, 58, 64, 81, 66, 62, 51, 69, 52, 54, 57, 60, 58, 62, 70, 68, 55, 53, 90, 53, 66, 49, 69, 55, 74, 60, 61, 69, 77, 45, 88, 54, 46, 83, 61, 48, 75, 62, 50, 55, 60, 63, 46, 82, 58, 82, 102, 65, 76, 76, 61, 66, 79, 70, 55, 77, 57, 65, 67, 72, 56, 94, 69, 59, 70, 66, 49, 57, 71, 44, 58, 64, 59, 63, 71, 63, 64, 63, 92, 81, 53, 60, 63, 46, 83, 53, 57, 99, 95, 67, 73, 67, 80, 47, 52, 72, 68, 56, 74, 66, 60, 76, 65, 53, 64, 59, 79, 101, 34, 69, 80, 41, 78, 54, 106, 67, 54, 38, 73, 74, 67, 94, 63, 59, 54, 55, 71, 56, 64, 92, 73, 72, 71, 62, 60, 80, 51, 80, 47, 63, 61, 45, 58, 85, 62, 60, 49, 75, 67, 61, 68, 45, 129, 40, 46, 120, 56, 80, 60, 61, 61, 57, 60, 69, 60, 78, 60, 71, 61, 67, 69, 62, 77, 86, 73, 91, 53, 60, 61, 56, 76, 56, 61, 50, 52, 79, 53, 53, 54, 82, 55, 50, 66, 67, 79, 70, 63, 51, 74, 75, 65, 67, 59, 63, 62, 70, 60, 94, 61, 55, 48, 70, 63, 53, 112, 76, 57, 42, 78, 112, 49, 53, 76, 62, 72, 86, 70, 75, 84, 57, 59, 53, 75, 58, 49, 53, 58, 54, 49, 59, 53, 65, 44, 80, 50, 76, 92, 62, 55, 69, 76, 68, 59, 65, 62, 88, 76, 57, 64, 61, 64, 69, 81, 64, 70, 52, 67, 60, 70, 54, 86, 56, 60, 115, 62, 75, 75, 56, 46, 56, 79, 59, 51, 64, 58, 87, 63, 62, 52, 68, 60, 36, 66, 52, 64, 51, 69, 67, 54, 58, 67, 63, 103, 48, 56, 57, 52, 65, 67, 54, 67, 68, 82, 55, 75, 88, 95, 57, 52, 69, 51, 109, 54, 91, 57, 69, 64, 72, 52, 67, 75, 66, 67, 88, 64, 94, 79, 58, 57, 63, 67, 46, 62, 57, 33, 72, 48, 50, 63, 60, 82, 53, 74, 52, 77, 83, 62, 69, 59, 53, 56, 61, 72, 83, 63, 47, 64, 55, 46, 53, 66, 54, 75, 62, 70, 64, 44, 71, 54, 87, 81, 56, 64, 59, 59, 43, 60, 62, 76, 71, 78, 59, 77, 47, 53, 59, 65, 58, 76, 93, 57, 69, 43, 80, 85, 38, 57, 49, 56, 70, 55, 61, 82, 57, 52, 74, 59, 61, 65, 31, 67, 55, 59, 62, 55, 60, 77, 66, 61, 73, 75, 65, 56, 75, 62, 57, 71, 56, 59, 82, 66, 60, 83, 66, 78, 61, 58, 71, 90, 66, 76, 82, 55, 59, 56, 104, 58, 74, 92, 73, 55, 69, 56, 65, 65, 55, 48, 103, 76, 54, 67, 44, 77, 50, 73, 79, 54, 63, 65, 78, 62, 84, 60, 67, 62, 63, 74, 58, 76, 57, 63, 79, 53, 89, 97, 62, 69, 69, 60, 62, 69, 64, 46, 62, 66, 58, 53, 41, 57, 54, 74, 85, 73, 62, 45, 46, 52, 88, 88, 60, 60, 104, 54, 52, 118, 48, 57, 50, 82, 57, 46, 56, 64, 90, 69, 55, 71, 74, 51, 61, 72, 64, 69, 51, 72, 88, 108, 51, 86, 109, 55, 88, 53, 67, 62, 62, 40, 55, 66, 64, 58, 75, 67, 62, 73, 50, 110, 63, 41, 44, 62, 56, 111, 60, 52, 121, 72, 55, 64, 56, 62, 66, 67, 50, 47, 66, 78, 49, 77, 53, 84, 51, 77, 56, 54, 55, 47, 58, 49, 59, 51, 90, 64, 56, 70, 85, 64, 48, 70, 49, 57, 51, 41, 85, 55, 69, 65, 69, 53, 62, 70, 62, 49, 69, 59, 60, 58, 53, 81, 49, 48, 61, 60, 65, 48, 53, 53, 48, 59, 68, 52, 75, 67, 57, 78, 65, 92, 72, 71, 52, 60, 74, 50, 55, 65, 57, 47, 63, 52, 55, 77, 56, 83, 58, 59, 47, 68, 76, 55, 60, 78, 89, 124, 65, 59, 79, 57, 83, 56, 91, 47, 91, 50, 69, 59, 53, 93, 55, 60, 61, 68, 53, 52, 121, 65, 68, 65, 67, 56, 57, 86, 72, 56, 35, 72, 80, 71, 63, 95, 88, 60, 58, 75, 79, 59, 59, 24, 89, 53, 78, 53, 89, 59, 57, 76, 75, 54, 45, 79, 96, 93, 69, 65, 72, 66, 97, 68, 58, 43, 91, 75, 64, 45, 70, 81, 70, 89, 46, 63, 64, 49, 50, 70, 61, 55, 72, 116, 48, 56, 120, 62, 93, 53, 75, 80, 75, 63, 70, 66, 63, 73, 49, 80, 63, 53, 47, 57, 59, 72, 52, 81, 72, 61, 52, 77, 76, 81, 62, 76, 61, 60, 76, 50, 75, 63, 66, 74, 54, 49, 81, 61, 83, 101, 74, 74, 47, 54, 60, 60, 70, 66, 83, 45, 64, 70, 82, 70, 88, 66, 49, 68, 72, 47, 63, 73, 48, 61, 90, 69, 71, 58, 75, 56, 65, 90, 86, 71, 87, 66, 73, 40, 86, 87, 62, 56, 58, 65, 72, 59, 56, 80, 79, 63, 58, 51, 76, 43, 51, 57, 52, 70, 62, 73, 50, 74, 56, 62, 62, 69, 45, 97, 71, 62, 88, 91, 65, 69, 56, 76, 67, 55, 54, 71, 74, 137, 63, 63, 59, 71, 46, 66, 67, 67, 125, 70, 66, 57, 62, 68, 35, 57, 60, 56, 63, 62, 61, 63, 57, 56, 84, 74, 95, 65, 62, 59, 63, 46, 67, 95, 51, 63, 58, 57, 58, 39, 59, 56, 79, 71, 59, 92, 94, 64, 61, 45, 60, 65, 83, 58, 54, 104, 52, 47, 52, 73, 79, 53, 52, 61, 65, 52, 71, 68, 73, 67, 63, 76, 72, 61, 60, 79, 64, 74, 60, 62, 70, 46, 57, 66, 66, 63, 57, 70, 81, 59, 56, 76, 98, 64, 63, 71, 55, 91, 56, 66, 58, 107, 79, 96, 56, 56, 56, 37, 58, 73, 72, 52, 61, 58, 65, 58, 82, 57, 76, 68, 58, 67, 52, 64, 46, 55, 51, 66, 87, 58, 48, 46, 72, 70, 95, 50, 61, 72, 47, 72, 64, 54, 112, 49, 51, 77, 66, 82, 52, 55, 68, 70, 69, 91, 56, 73, 50, 68, 90, 105, 115, 64, 65, 59, 64, 62, 64, 57, 71, 71, 54, 64, 51, 62, 70, 67, 48, 73, 54, 76, 61, 100, 54, 74, 61, 73, 64, 63, 81, 66, 63, 47, 68, 75, 57, 81, 88, 76, 67, 52, 76, 43, 60, 82, 51, 70, 64, 56, 57, 54, 42, 62, 65, 59, 71, 52, 67, 60, 36, 44, 57, 60, 64, 65, 60, 56, 70, 58, 52, 66, 63, 78, 49, 47, 50, 70, 73, 41, 70, 63, 63, 55, 67, 74, 58, 56, 46, 47, 65, 89, 98, 61, 75, 43, 70, 66, 65, 66, 47, 67, 69, 59, 73, 48, 78, 97, 54, 60, 78, 67, 66, 52, 68, 68, 45, 63, 68, 75, 76, 59, 95, 63, 70, 52, 113, 64, 73, 94, 63, 45, 71, 99, 47, 67, 58, 64, 84, 53, 89, 84, 44, 77, 58, 68, 64, 63, 78, 67, 58, 47, 40, 60, 62, 106, 70, 56, 81, 64, 72, 76, 64, 66, 77, 49, 68, 70, 94, 43, 53, 70, 74, 66, 61, 47, 52, 43, 53, 111, 49, 70, 66, 71, 76, 71, 62, 58, 58, 81, 68, 69, 77, 66, 58, 59, 72, 80, 58, 81, 82, 59, 77, 73, 75, 54, 60, 68, 73, 84, 67, 63, 53, 57, 59, 65, 63, 62, 71, 59, 56, 78, 60, 54, 56, 103, 51, 109, 82, 71, 49, 59, 63, 48, 47, 86, 74, 46, 46, 62, 55, 63, 50, 56, 46, 57, 64, 46, 73, 44, 60, 52, 61, 70, 52, 60, 69, 67, 62, 55, 92, 88, 58, 66, 80, 60, 58, 72, 66, 76, 64, 59, 67, 95, 75, 73, 72, 54, 65, 61, 74, 47, 62, 46, 50, 69, 89, 67, 47, 79, 60, 86, 57, 61, 74, 54, 75, 78, 72, 51, 50, 111, 97, 58, 75, 49, 63, 73, 61, 46, 65, 68, 69, 63, 67, 56, 77, 56, 63, 63, 58, 70, 67, 105, 80, 51, 56, 77, 78, 58, 59, 70, 62, 64, 71, 55, 60, 64, 73, 87, 67, 55, 38, 63, 76, 110, 57, 58, 65, 49, 89, 62, 61, 65, 62, 75, 54, 58, 50, 42, 86, 70, 64, 107, 65, 62, 80, 106, 104, 66, 68, 59, 80, 74, 38, 64, 102, 53, 60, 56, 91, 63, 78, 70, 80, 64, 86, 64, 56, 62, 55, 66, 78, 69, 43, 56, 79, 60, 70, 66, 55, 75, 62, 60, 54, 50, 101, 99, 83, 90, 66, 51, 58, 77, 75, 64, 64, 43, 67, 70, 81, 39, 72, 55, 32, 56, 48, 62, 100, 57, 77, 54, 65, 56, 60, 74, 48, 70, 67, 74, 63, 59, 68, 77, 50, 50, 80, 52, 53, 60, 62, 103, 68, 54, 38, 62, 54, 64, 67, 61, 54, 64, 71, 71, 45, 71, 84, 78, 46, 50, 56, 62, 95, 58, 48, 99, 47, 51, 65, 45, 55, 71, 69, 56, 74, 71, 60, 65, 61, 51, 88, 79, 81, 53, 62, 62, 53, 45, 70, 62, 63, 66, 105, 71, 55, 70, 89, 87, 62, 69, 58, 79, 64, 69, 58, 83, 59, 66, 80, 63, 80, 69, 81, 63, 81, 65, 50, 77, 92, 61, 82, 64, 81, 71, 89, 56, 47, 58, 52, 38, 73, 72, 60, 56, 67, 104, 58, 76, 60, 64, 75, 77, 72, 58, 52, 56, 67, 45, 68, 90, 66, 69, 73, 70, 49, 87, 66, 68, 69, 62, 38, 114, 62, 61, 84, 107, 56, 53, 53, 59, 60, 63, 60, 58, 91, 35, 79, 63, 74, 77, 85, 89, 54, 60, 65, 67, 33, 53, 65, 75, 57, 69, 64, 73, 65, 64, 53, 84, 60, 67, 76, 118, 75, 71, 58, 71, 65, 57, 109, 48, 63, 53, 87, 57, 47, 52, 46, 79, 60, 74, 59, 65, 57, 65, 70, 70, 55, 43, 55, 78, 65, 56, 48, 66, 56, 69, 48, 58, 77, 83, 63, 68, 55, 75, 68, 66, 60, 60, 66, 66, 70, 52, 52, 48, 74, 60, 61, 59, 76, 109, 60, 71, 75, 62, 113, 71, 71, 59, 59, 77, 61, 60, 56, 66, 48, 61, 80, 75, 43, 50, 63, 64, 96, 69, 75, 65, 55, 51, 62, 64, 53, 68, 63, 75, 52, 91, 76, 62, 52, 95, 61, 54, 62, 41, 52, 49, 45, 61, 66, 52, 70, 82, 46, 81, 69, 53, 70, 69, 53, 61, 73, 55, 45, 55, 61, 75, 58, 65, 94, 62, 70, 62, 63, 64, 53, 58, 60, 76, 48, 65, 74, 49, 59, 62, 78, 48, 54, 61, 66, 81, 55, 75, 72, 70, 77, 71, 56, 55, 54, 62, 60, 69, 58, 96, 77, 82, 65, 49, 56, 105, 88, 72, 73, 62, 67, 57, 58, 78, 60, 49, 107, 46, 60, 72, 75, 57, 53, 54, 62, 59, 78, 57, 73, 68, 83, 45, 58, 57, 64, 56, 67, 61, 65, 57, 52, 68, 55, 79, 82, 56, 64, 62, 61, 74, 69, 78, 69, 57, 53, 51, 79, 56, 67, 56, 63, 125, 56, 54, 81, 66, 64, 60, 66, 67, 65, 60, 50, 63, 61, 70, 48, 86, 72, 58, 103, 123, 58, 68, 74, 48, 70, 56, 68, 72, 73, 57, 77, 53, 72, 72, 70, 72, 56, 72, 62, 58, 56, 64, 100, 66, 83, 69, 79, 48, 67, 60, 47, 68, 53, 77, 55, 64, 51, 60, 55, 44, 65, 61, 62, 57, 66, 60, 59, 65, 72, 76, 76, 57, 77, 77, 53, 54, 58, 62, 53, 61, 65, 77, 93, 60, 52, 69, 60, 51, 64, 68, 68, 70, 63, 64, 44, 76, 116, 53, 49, 47, 77, 74, 72, 70, 67, 68, 70, 62, 79, 71, 75, 55, 79, 76, 63, 59, 67, 59, 66, 56, 61, 40, 98, 67, 60, 77, 85, 62, 48, 66, 43, 49, 50, 64, 51, 116, 61, 103, 73, 64, 56, 146, 48, 58, 81, 64, 81, 58, 78, 65, 64, 106, 49, 83, 62, 59, 73, 85, 133, 57, 58, 57, 51, 162, 61, 54, 79, 57, 68, 69, 50, 69, 64, 89, 88, 46, 141, 45, 60, 74, 59, 105, 54, 65, 74, 61, 52, 69, 57, 73, 60, 69, 88, 69, 71, 53, 49, 55, 59, 64, 76, 55, 55, 65, 50, 67, 71, 61, 53, 59, 53, 77, 60, 60, 55, 80, 56, 51, 52, 57, 74, 62, 57, 63, 66, 54, 51, 65, 46, 68, 62, 63, 85, 53, 60, 56, 75, 65, 72, 87, 71, 66, 71, 69, 68, 86, 97, 77, 70, 55, 50, 52, 104, 66, 47, 65, 71, 72, 60, 64, 64, 72, 98, 76, 92, 40, 48, 66, 56, 57, 75, 59, 58, 75, 72, 75, 55, 65, 65, 50, 66, 77, 56, 101, 55, 37, 51, 65, 60, 92, 60, 52, 60, 89, 67, 59, 54, 58, 65, 62, 59, 65, 72, 47, 57, 65, 97, 66, 60, 56, 71, 74, 62, 82, 66, 52, 62, 57, 67, 62, 71, 56, 62, 71, 71, 76, 64, 67, 46, 59, 73, 48, 60, 58, 63, 68, 64, 70, 57, 71, 86, 66, 68, 67, 70, 59, 47, 66, 61, 52, 57, 70, 53, 53, 69, 53, 69, 67, 69, 67, 73, 74, 61, 101, 43, 58, 65, 62, 92, 54, 75, 46, 99, 45, 58, 77, 59, 61, 54, 76, 71, 76, 67, 56, 66, 58, 59, 66, 45, 87, 49, 59, 68, 66, 70, 76, 68, 154, 51, 69, 59, 59, 65, 59, 73, 63, 71, 61, 70, 70, 49, 73, 64, 61, 68, 77, 71, 74, 70, 67, 49, 68, 85, 62, 55, 61, 82, 58, 55, 68, 70, 62, 61, 72, 52, 47, 66, 54, 96, 61, 55, 52, 73, 70, 62, 61, 60, 56, 78, 54, 57, 85, 52, 61, 56, 75, 61, 103, 75, 60, 55, 67, 134, 64, 50, 64, 76, 69, 60, 63, 69, 69, 64, 52, 67, 55, 61, 49, 54, 71, 68, 54, 74, 61, 68, 62, 73, 69, 66, 59, 82, 68, 58, 76, 57, 76, 89, 76, 46, 49, 58, 100, 68, 82, 61, 68, 80, 69, 61, 63, 59, 56, 61, 63, 99, 75, 75, 71, 79, 65, 59, 55, 70, 75, 75, 57, 50, 83, 67, 79, 71, 114, 61, 54, 104, 80, 101, 73, 76, 66, 54, 54, 57, 66, 70, 59, 81, 61, 52, 64, 67, 69, 94, 82, 61, 50, 48, 61, 74, 56, 65, 66, 50, 56, 54, 56, 35, 53, 54, 48, 85, 61, 59, 48, 69, 89, 83, 66, 63, 64, 57, 57, 62, 51, 97, 59, 87, 66, 65, 60, 76, 63, 107, 73, 63, 62, 60, 53, 55, 60, 71, 59, 61, 56, 57, 57, 67, 58, 79, 47, 52, 98, 60, 48, 56, 62, 55, 55, 58, 68, 58, 50, 61, 65, 50, 62, 45, 79, 87, 60, 53, 63, 85, 59, 55, 83, 55, 70, 76, 70, 67, 68, 72, 64, 65, 54, 63, 61, 48, 58, 62, 56, 66, 69, 73, 80, 64, 50, 68, 73, 49, 57, 59, 65, 51, 81, 42, 46, 60, 44, 52, 61, 73, 37, 62, 75, 60, 59, 109, 63, 124, 51, 72, 75, 81, 73, 64, 73, 77, 59, 53, 64, 64, 77, 71, 95, 62, 50, 65, 52, 55, 56, 80, 65, 56, 66, 61, 102, 54, 81, 88, 86, 69, 48, 71, 85, 58, 64, 54, 78, 63, 77, 74, 86, 65, 76, 74, 92, 64, 70, 69, 87, 60, 64, 63, 66, 71, 68, 52, 76, 79, 95, 47, 59, 63, 69, 69, 89, 63, 52, 60, 53, 59, 70, 55, 55, 56, 73, 67, 54, 75, 79, 53, 76, 55, 117, 112, 69, 57, 73, 35, 174, 43, 65, 76, 67, 61, 39, 38, 63, 53, 55, 107, 68, 73, 38, 69, 65, 67, 55, 64, 72, 56, 47, 63, 64, 44, 58, 60, 85, 58, 69, 69, 54, 48, 73, 56, 103, 47, 40, 59, 53, 61, 79, 46, 99, 73, 82, 58, 40, 46, 60, 73, 48, 37, 52, 55, 63, 55, 71, 48, 68, 71, 65, 80, 47, 57, 51, 59, 76, 61, 106, 43, 77, 79, 59, 89, 68, 103, 63, 66, 68, 42, 32, 61, 42, 63, 53, 64, 76, 77, 58, 74, 52, 66, 76, 75, 78, 69, 39, 83, 54, 61, 53, 52, 71, 86, 60, 54, 82, 96, 49, 43, 68, 65, 49, 64, 95, 46, 78, 73, 107, 58, 52, 62, 48, 79, 86, 55, 63, 62, 66, 31, 70, 71, 71, 59, 51, 52, 60, 72, 48, 72, 53, 49, 90, 51, 103, 45, 70, 61, 67, 75, 55, 78, 69, 86, 61, 31, 52, 61, 58, 66, 69, 89, 75, 69, 62, 40, 80, 88, 61, 69, 41, 50, 64, 77, 57, 59, 63, 55, 57, 63, 67, 92, 57, 67, 60, 57, 74, 47, 98, 79, 73, 51, 49, 48, 85, 67, 73, 65, 81, 107, 78, 58, 88, 117, 39, 69, 55, 53, 66, 65, 74, 65, 64, 67, 102, 50, 66, 61, 48, 96, 59, 29, 69, 62, 69, 57, 84, 62, 55, 55, 48, 106, 60, 35, 63, 59, 63, 102, 71, 62, 66, 57, 43, 62, 95, 91, 68, 82, 67, 86, 81, 72, 49, 71, 74, 57, 46, 55, 50, 44, 67, 51, 59, 81, 77, 54, 80, 34, 42, 70, 84, 45, 74, 54, 43, 61, 76, 54, 50, 55, 78, 68, 68, 65, 51, 55, 62, 72, 65, 73, 64, 81, 35, 118, 62, 71, 55, 84, 73, 50, 55, 60, 56, 63, 60, 75, 64, 61, 49, 102, 63, 79, 94, 73, 50, 58, 72, 92, 74, 62, 50, 52, 63, 73, 71, 57, 51, 71, 73, 60, 85, 55, 58, 47, 48, 61, 65, 82, 64, 61, 69, 75, 73, 69, 73, 52, 99, 83, 62, 60, 50, 79, 64, 73, 106, 80, 75, 69, 88, 57, 40, 61, 69, 45, 67, 75, 90, 67, 50, 70, 82, 60, 44, 57, 68, 90, 55, 61, 65, 73, 55, 48, 76, 92, 95, 66, 76, 51, 34, 65, 75, 57, 56, 71, 59, 59, 67, 83, 57, 87, 59, 49, 62, 58, 71, 89, 52, 88, 63, 38, 61, 82, 65, 57, 39, 76, 39, 38, 72, 31, 62, 71, 57, 90, 127, 67, 58, 53, 95, 63, 77, 54, 71, 63, 86, 61, 49, 62, 52, 99, 62, 69, 46, 62, 48, 54, 72, 65, 58, 54, 39, 64, 47, 67, 71, 67, 59, 121, 56, 63, 75, 66, 70, 67, 66, 70, 66, 115, 72, 61, 78, 59, 55, 71, 52, 79, 68, 70, 70, 62, 64, 66, 44, 73, 86, 71, 74, 48, 81, 53, 55, 47, 50, 56, 63, 40, 56, 47, 41, 53, 76, 55, 54, 62, 68, 81, 49, 93, 44, 83, 81, 103, 71, 54, 91, 65, 88, 45, 54, 67, 82, 52, 58, 72, 83, 58, 74, 60, 72, 75, 74, 77, 73, 71, 42, 66, 45, 71, 74, 62, 58, 76, 94, 74, 61, 68, 102, 41, 70, 96, 42, 78, 65, 64, 76, 66, 60, 78, 64, 55, 56, 114, 63, 41, 93, 84, 72, 66, 54, 57, 63, 58, 46, 65, 58, 106, 69, 67, 56, 65, 67, 52, 66, 58, 64, 62, 74, 70, 77, 75, 47, 43, 67, 83, 99, 58, 79, 53, 43, 83, 58, 68, 118, 65, 57, 96, 52, 53, 51, 47, 88, 55, 61, 67, 54, 76, 68, 70, 58, 53, 50, 61, 55, 80, 61, 50, 60, 73, 154, 55, 73, 87, 56, 61, 75, 50, 77, 60, 110, 63, 91, 90, 62, 48, 82, 51, 87, 43, 68, 49, 68, 72, 70, 66, 48, 45, 57, 49, 68, 78, 84, 61, 57, 71, 64, 49, 46, 52, 65, 68, 72, 76, 41, 63, 51, 54, 72, 67, 70, 119, 61, 58, 47, 60, 65, 60, 44, 60, 49, 83, 70, 73, 59, 55, 93, 60, 78, 74, 76, 73, 62, 87, 87, 68, 51, 46, 67, 65, 79, 62, 50, 52, 99, 45, 67, 74, 84, 69, 54, 59, 60, 91, 73, 54, 55, 53, 86, 60, 50, 61, 34, 79, 62, 65, 85, 62, 64, 43, 63, 61, 51, 50, 72, 74, 55, 61, 62, 68, 104, 66, 76, 92, 78, 74, 55, 73, 60, 45, 118, 75, 59, 74, 117, 55, 76, 52, 54, 68, 71, 79, 78, 66, 100, 79, 45, 57, 78, 65, 104, 63, 66, 53, 82, 75, 117, 53, 61, 63, 52, 83, 73, 69, 54, 83, 81, 67, 52, 80, 63, 111, 55, 50, 44, 77, 50, 75, 72, 89, 87, 54, 69, 53, 48, 58, 90, 63, 85, 61, 75, 76, 75, 86, 59, 72, 54, 62, 72, 74, 46, 58, 49, 52, 38, 55, 57, 44, 86, 39, 101, 95, 62, 88, 54, 55, 64, 75, 50, 51, 60, 75, 59, 59, 73, 80, 94, 55, 48, 75, 60, 66, 68, 101, 42, 85, 60, 66, 55, 112, 60, 59, 75, 59, 55, 74, 69, 72, 48, 85, 57, 61, 45, 64, 69, 35, 57, 71, 49, 54, 67, 57, 48, 73, 51, 75, 58, 57, 60, 74, 82, 70, 50, 94, 53, 65, 85, 61, 57, 82, 61, 64, 48, 86, 85, 58, 56, 82, 65, 62, 52, 54, 50, 69, 69, 62, 69, 123, 94, 49, 76, 84, 77, 69, 47, 56, 71, 68, 60, 61, 56, 81, 72, 89, 75, 50, 67, 48, 45, 63, 63, 59, 57, 54, 69, 73, 60, 56, 64, 87, 78, 68, 66, 67, 57, 73, 31, 64, 63, 67, 77, 59, 61, 93, 59, 80, 93, 58, 68, 62, 93, 46, 73, 64, 62, 45, 84, 46, 47, 98, 37, 50, 83, 51, 65, 61, 67, 67, 38, 47, 67, 63, 77, 66, 50, 62, 67, 71, 57, 81, 76, 62, 64, 55, 61, 70, 53, 57, 60, 62, 64, 41, 58, 40, 55, 91, 65, 77, 69, 53, 55, 52, 51, 76, 58, 50, 78, 65, 73, 76, 63, 64, 58, 50, 73, 70, 70, 67, 147, 74, 55, 71, 65, 67, 57, 67, 69, 73, 76, 58, 126, 72, 52, 71, 60, 71, 76, 74, 87, 71, 72, 60, 58, 82, 79, 55, 65, 65, 82, 88, 73, 60, 78, 80, 74, 55, 58, 39, 57, 60, 54, 65, 77, 46, 46, 46, 66, 66, 57, 62, 54, 59, 63, 64, 53, 66, 64, 64, 55, 94, 49, 83, 66, 67, 66, 58, 61, 54, 96, 35, 77, 56, 71, 61, 60, 58, 79, 46, 49, 88, 56, 52, 67, 53, 81, 81, 74, 62, 89, 69, 40, 44, 67, 59, 65, 90, 78, 54, 73, 79, 68, 69, 51, 88, 70, 84, 41, 56, 79, 67, 51, 60, 60, 69, 50, 63, 93, 59, 47, 88, 72, 89, 71, 62, 59, 62, 83, 69, 64, 54, 75, 62, 65, 57, 72, 69, 74, 55, 73, 39, 56, 98, 48, 39, 59, 59, 72, 60, 55, 62, 63, 94, 60, 70, 73, 56, 83, 48, 60, 106, 60, 78, 81, 92, 63, 65, 93, 69, 84, 53, 63, 41, 98, 77, 74, 53, 62, 72, 67, 62, 60, 45, 92, 56, 53, 85, 76, 67, 74, 42, 43, 49, 72, 49, 54, 48, 81, 81, 82, 73, 65, 62, 72, 77, 44, 60, 72, 77, 39, 74, 51, 77, 53, 86, 40, 58, 80, 43, 82, 59, 41, 75, 59, 43, 71, 80, 71, 65, 49, 65, 69, 52, 56, 87, 60, 73, 56, 39, 70, 62, 86, 70, 60, 68, 27, 47, 68, 69, 71, 74, 66, 70, 40, 57, 68, 46, 53, 63, 58, 50, 68, 43, 51, 55, 74, 63, 59, 41, 42, 47, 69, 55, 111, 57, 52, 58, 47, 60, 63, 54, 68, 42, 84, 66, 67, 77, 74, 104, 70, 56, 78, 60, 69, 70, 69, 57, 83, 72, 80, 49, 49, 59, 62, 67, 84, 100, 63, 48, 48, 79, 80, 91, 63, 50, 66, 89, 93, 113, 68, 46, 46, 75, 57, 61, 46, 62, 71, 81, 72, 60, 50, 91, 65, 76, 49, 69, 38, 71, 90, 40, 56, 119, 72, 64, 67, 54, 51, 62, 56, 51, 86, 65, 47, 73, 82, 78, 87, 72, 70, 71, 59, 43, 55, 59, 66, 88, 55, 56, 66, 86, 87, 54, 76, 61, 53, 66, 81, 86, 46, 66, 40, 83, 51, 95, 69, 70, 44, 79, 76, 52, 64, 75, 75, 76, 84, 56, 33, 65, 78, 59, 60, 43, 63, 59, 63, 61, 55, 68, 86, 38, 55, 59, 68, 66, 67, 91, 44, 66, 86, 67, 64, 98, 73, 60, 65, 63, 87, 97, 62, 61, 50, 104, 74, 66, 77, 70, 77, 59, 96, 45, 74, 52, 64, 59, 86, 53, 49, 62, 75, 112, 71, 56, 78, 62, 63, 45, 90, 64, 53, 89, 84, 57, 72, 47, 155, 52, 48, 57, 53, 72, 67, 58, 58, 62, 77, 54, 59, 83, 81, 45, 39, 69, 54, 74, 65, 95, 70, 56, 62, 67, 77, 62, 64, 69, 68, 59, 65, 51, 56, 59, 82, 53, 87, 89, 61, 74, 70, 39, 59, 60, 78, 76, 60, 75, 71, 81, 59, 65, 62, 48, 70, 86, 105, 81, 39, 63, 79, 78, 58, 63, 67, 49, 75, 43, 36, 103, 50, 63, 69, 107, 82, 64, 65, 78, 78, 39, 46, 58, 44, 56, 76, 63, 76, 82, 51, 81, 65, 70, 67, 81, 43, 55, 49, 60, 62, 70, 73, 86, 90, 70, 76, 51, 49, 82, 88, 65, 80, 78, 55, 83, 78, 98, 37, 75, 75, 77, 83, 78, 55, 86, 100, 56, 62, 72, 28, 69, 78, 64, 57, 93, 71, 67, 44, 67, 67, 52, 68, 98, 97, 61, 67, 70, 79, 70, 60, 68, 64, 74, 60, 61, 67, 65, 75, 110, 96, 68, 69, 79, 58, 93, 60, 60, 77, 72, 77, 79, 105, 53, 93, 64, 103, 59, 87, 68, 77, 62, 64, 69, 66, 69, 57, 63, 70, 76, 69, 62, 58, 86, 79, 61, 54, 86, 48, 71, 44, 73, 64, 80, 43, 63, 61, 70, 46, 54, 70, 59, 71, 76, 72, 62, 45, 72, 67, 81, 66, 68, 63, 52, 67, 71, 52, 77, 74, 72, 60, 65, 81, 65, 59, 62, 55, 66, 53, 68, 60, 63, 57, 78, 76, 67, 55, 72, 87, 66, 85, 57, 61, 49, 73, 84, 66, 75, 66, 74, 66, 61, 68, 91, 83, 58, 90, 59, 87, 39, 95, 58, 48, 73, 60, 65, 64, 51, 68, 52, 85, 74, 89, 74, 59, 63, 69, 44, 65, 89, 96, 64, 48, 70, 84, 59, 92, 48, 53, 73, 77, 56, 74, 45, 71, 59, 60, 39, 99, 76, 64, 83, 64, 65, 52, 67, 68, 63, 65, 135, 57, 62, 54, 70, 89, 72, 58, 88, 63, 60, 81, 63, 62, 53, 50, 59, 71, 68, 61, 102, 62, 126, 76, 79, 55, 78, 54, 83, 65, 57, 57, 83, 57, 45, 42, 72, 66, 31, 58, 51, 73, 47, 56, 47, 56, 80, 55, 64, 74, 80, 49, 61, 56, 84, 63, 60, 104, 59, 74, 63, 78, 36, 47, 52, 63, 77, 64, 62, 77, 76, 70, 62, 44, 61, 66, 63, 62, 60, 65, 99, 70, 66, 51, 93, 52, 89, 86, 40, 77, 44, 75, 49, 73, 91, 80, 59, 68, 36, 59, 47, 88, 68, 100, 60, 46, 60, 80, 86, 64, 46, 105, 84, 66, 68, 56, 99, 57, 59, 46, 54, 64, 84, 81, 63, 49, 65, 61, 76, 56, 60, 60, 48, 127, 53, 55, 56, 64, 68, 62, 63, 69, 77, 52, 53, 62, 67, 66, 60, 44, 73, 45, 72, 59, 67, 57, 65, 41, 62, 90, 77, 52, 78, 62, 84, 74, 65, 69, 54, 54, 71, 70, 61, 78, 50, 53, 72, 88, 54, 80, 63, 55, 74, 33, 79, 69, 54, 74, 51, 111, 68, 70, 73, 47, 57, 66, 61, 73, 40, 53, 49, 89, 69, 62, 83, 71, 89, 79, 74, 63, 62, 62, 63, 71, 60, 59, 67, 73, 63, 78, 53, 72, 64, 70, 78, 89, 47, 71, 50, 69, 53, 55, 48, 48, 70, 56, 86, 68, 75, 90, 74, 54, 65, 62, 50, 81, 71, 72, 79, 81, 60, 59, 58, 68, 53, 82, 86, 143, 79, 58, 57, 74, 79, 42, 59, 68, 47, 70, 80, 76, 90, 65, 63, 72, 69, 66, 72, 67, 63, 47, 59, 42, 56, 63, 52, 54, 70, 58, 82, 87, 62, 63, 56, 57, 64, 44, 91, 46, 65, 78, 68, 60, 74, 95, 75, 71, 72, 60, 59, 59, 79, 59, 79, 63, 84, 52, 68, 65, 55, 70, 77, 80, 63, 67, 61, 58, 64, 68, 58, 67, 53, 79, 56, 70, 54, 53, 86, 59, 50, 51, 61, 62, 52, 68, 67, 39, 67, 54, 68, 67, 52, 79, 83, 53, 87, 79, 49, 57, 112, 87, 64, 79, 99, 58, 61, 79, 64, 65, 49, 79, 70, 73, 79, 91, 54, 59, 73, 59, 87, 60, 57, 79, 77, 67, 55, 70, 56, 56, 58, 63, 78, 61, 56, 38, 99, 75, 68, 48, 68, 65, 68, 61, 56, 87, 104, 67, 68, 84, 65, 78, 58, 53, 55, 52, 65, 69, 57, 77, 71, 85, 48, 90, 92, 54, 62, 55, 41, 48, 52, 67, 52, 84, 59, 63, 49, 81, 55, 43, 55, 82, 52, 56, 84, 82, 59, 71, 101, 47, 61, 65, 64, 59, 56, 62, 61, 46, 92, 46, 61, 75, 56, 57, 66, 64, 77, 59, 68, 72, 58, 58, 86, 68, 58, 52, 70, 62, 75, 65, 27, 100, 61, 66, 65, 78, 69, 80, 59, 96, 60, 83, 53, 52, 74, 94, 53, 72, 51, 64, 84, 73, 67, 48, 53, 64, 76, 48, 48, 68, 74, 46, 78, 85, 46, 66, 76, 68, 71, 57, 46, 81, 69, 61, 84, 60, 57, 51, 56, 45, 66, 84, 54, 82, 57, 77, 57, 57, 80, 84, 45, 67, 66, 60, 65, 60, 57, 53, 84, 53, 50, 61, 70, 52, 58, 65, 61, 66, 81, 68, 61, 62, 58, 65, 59, 103, 47, 75, 58, 54, 67, 53, 59, 60, 67, 51, 66, 63, 83, 51, 62, 62, 54, 62, 57, 46, 36, 67, 77, 65, 31, 77, 59, 67, 43, 64, 73, 78, 76, 76, 74, 82, 60, 97, 60, 48, 78, 68, 55, 69, 57, 65, 69, 49, 56, 56, 85, 52, 66, 83, 62, 70, 76, 52, 56, 59, 68, 67, 65, 74, 66, 53, 69, 58, 48, 93, 90, 61, 69, 64, 53, 73, 67, 69, 58, 37, 59, 55, 77, 59, 67, 70, 66, 73, 68, 62, 60, 81, 40, 50, 79, 56, 43, 64, 52, 68, 73, 80, 68, 61, 48, 56, 57, 79, 61, 53, 54, 81, 85, 61, 61, 75, 60, 94, 83, 70, 47, 67, 51, 75, 87, 57, 97, 74, 59, 56, 68, 60, 59, 71, 41, 61, 69, 66, 76, 63, 65, 73, 51, 63, 56, 46, 81, 78, 52, 68, 64, 65, 67, 73, 70, 62, 61, 48, 86, 56, 59, 59, 59, 77, 51, 59, 67, 53, 43, 95, 55, 55, 104, 41, 64, 51, 70, 61, 60, 74, 82, 75, 69, 63, 57, 53, 84, 67, 60, 74, 86, 50, 71, 64, 88, 65, 79, 59, 58, 68, 56, 56, 74, 74, 72, 75, 51, 87, 61, 67, 83, 76, 60, 94, 60, 64, 70, 76, 75, 50, 59, 63, 51, 55, 65, 46, 64, 55, 49, 56, 74, 67, 100, 57, 66, 89, 92, 85, 72, 41, 63, 61, 67, 64, 53, 66, 49, 64, 81, 82, 62, 78, 83, 52, 75, 67, 87, 71, 87, 73, 67, 69, 56, 73, 69, 64, 76, 72, 71, 40, 56, 79, 53, 77, 56, 73, 96, 76, 55, 56, 71, 72, 75, 65, 49, 203, 67, 49, 50, 71, 78, 82, 72, 57, 66, 64, 68, 92, 78, 90, 43, 64, 66, 65, 73, 60, 52, 62, 66, 61, 45, 60, 71, 73, 66, 70, 67, 65, 68, 50, 71, 52, 91, 44, 49, 99, 64, 62, 63, 71, 59, 62, 69, 68, 89, 63, 88, 87, 56, 99, 66, 69, 73, 53, 66, 27, 69, 54, 54, 68, 59, 80, 62, 58, 87, 77, 71, 57, 71, 55, 65, 49, 40, 57, 61, 67, 64, 67, 81, 67, 72, 62, 82, 72, 47, 57, 61, 74, 59, 60, 107, 47, 59, 56, 47, 70, 63, 74, 59, 68, 76, 71, 76, 55, 60, 66, 54, 68, 66, 60, 47, 70, 76, 63, 50, 57, 54, 65, 66, 79, 55, 82, 88, 65, 39, 55, 63, 68, 77, 57, 60, 97, 86, 53, 51, 67, 96, 59, 39, 50, 58, 85, 49, 38, 78, 45, 68, 74, 62, 64, 77, 52, 49, 69, 70, 59, 57, 63, 74, 48, 84, 56, 78, 63, 63, 84, 78, 72, 72, 81, 65, 67, 73, 53, 81, 73, 73, 113, 66, 49, 65, 75, 88, 66, 59, 67, 88, 71, 89, 74, 69, 67, 66, 71, 75, 69, 58, 66, 105, 56, 63, 57, 93, 87, 71, 71, 67, 58, 65, 61, 54, 73, 71, 63, 61, 73, 67, 69, 64, 68, 86, 79, 59, 48, 69, 72, 62, 56, 92, 81, 51, 51, 41, 52, 62, 67, 64, 62, 71, 102, 50, 75, 48, 53, 68, 62, 91, 54, 47, 68, 81, 69, 48, 48, 83, 63, 67, 73, 64, 72, 59, 70, 76, 76, 81, 62, 55, 82, 43, 77, 57, 77, 73, 71, 54, 44, 46, 38, 62, 61, 68, 66, 58, 60, 58, 49, 86, 53, 46, 67, 65, 41, 58, 62, 64, 75, 49, 60, 57, 62, 72, 60, 53, 82, 66, 45, 55, 73, 70, 92, 61, 64, 55, 77, 50, 60, 83, 56, 46, 55, 80, 67, 59, 65, 59, 55, 88, 79, 71, 59, 64, 69, 67, 66, 68, 58, 54, 45, 59, 67, 71, 84, 49, 54, 77, 71, 58, 47, 76, 64, 44, 72, 74, 65, 75, 83, 62, 77, 66, 51, 74, 63, 49, 60, 59, 62, 71, 56, 78, 72, 52, 67, 54, 108, 70, 57, 60, 71, 67, 41, 55, 77, 51, 66, 83, 64, 51, 68, 64, 55, 62, 70, 57, 86, 69, 62, 54, 57, 56, 62, 62, 66, 71, 66, 65, 58, 89, 71, 78, 56, 59, 65, 76, 55, 53, 56, 67, 57, 52, 57, 49, 106, 88, 60, 62, 88, 55, 74, 60, 66, 56, 57, 48, 68, 68, 70, 62, 102, 61, 61, 110, 58, 65, 54, 47, 70, 65, 43, 43, 63, 86, 64, 89, 35, 94, 55, 64, 65, 82, 61, 67, 53, 70, 65, 64, 110, 51, 51, 72, 58, 65, 46, 92, 58, 64, 75, 58, 63, 78, 75, 57, 68, 54, 68, 79, 57, 61, 66, 60, 51, 49, 74, 66, 34, 84, 63, 63, 57, 63, 49, 89, 65, 64, 74, 53, 58, 64, 78, 67, 76, 50, 68, 50, 49, 74, 64, 67, 60, 50, 92, 69, 51, 72, 66, 54, 67, 71, 91, 124, 52, 70, 50, 76, 62, 51, 88, 58, 58, 63, 66, 60, 68, 60, 85, 57, 53, 66, 45, 67, 54, 45, 58, 62, 64, 76, 56, 66, 83, 55, 51, 73, 66, 71, 70, 60, 46, 56, 80, 57, 71, 79, 81, 61, 46, 45, 47, 46, 72, 59, 70, 72, 86, 65, 68, 76, 69, 64, 57, 68, 50, 46, 58, 71, 70, 76, 82, 66, 55, 54, 71, 56, 71, 70, 71, 47, 75, 66, 57, 75, 88, 80, 42, 57, 81, 46, 60, 72, 61, 92, 86, 54, 63, 70, 67, 75, 65, 53, 59, 66, 65, 59, 97, 82, 96, 99, 73, 81, 76, 99, 55, 80, 60, 64, 50, 59, 43, 57, 76, 61, 63, 52, 82, 48, 50, 56, 62, 79, 81, 41, 51, 95, 50, 58, 61, 66, 48, 70, 47, 70, 55, 59, 60, 59, 75, 48, 51, 79, 42, 73, 64, 88, 40, 57, 62, 55, 84, 58, 55, 62, 78, 66, 70, 68, 61, 58, 58, 64, 83, 65, 85, 56, 72, 70, 74, 71, 68, 73, 74, 72, 43, 67, 40, 59, 73, 59, 60, 68, 67, 64, 40, 72, 99, 84, 84, 86, 51, 45, 67, 63, 87, 66, 52, 86, 77, 62, 56, 71, 63, 71, 63, 91, 71, 108, 81, 67, 57, 47, 48, 61, 66, 63, 68, 60, 51, 55, 67, 48, 65, 54, 70, 58, 61, 86, 65, 72, 66, 56, 81, 58, 61, 71, 57, 74, 57, 83, 50, 77, 79, 47, 66, 54, 51, 80, 55, 76, 65, 65, 44, 49, 66, 54, 114, 99, 59, 57, 63, 67, 113, 72, 89, 59, 41, 49, 49, 60, 60, 55, 56, 65, 82, 70, 70, 54, 50, 60, 50, 53, 73, 74, 178, 57, 61, 51, 45, 48, 76, 64, 72, 63, 66, 47, 68, 51, 56, 55, 73, 59, 71, 76, 79, 49, 61, 59, 64, 78, 64, 49, 66, 67, 124, 41, 91, 64, 51, 77, 56, 64, 55, 65, 69, 109, 65, 43, 65, 59, 48, 81, 66, 60, 56, 58, 78, 60, 61, 64, 83, 66, 83, 38, 61, 56, 74, 53, 62, 51, 81, 70, 52, 46, 67, 55, 70, 67, 75, 59, 66, 62, 61, 51, 68, 69, 49, 68, 80, 82, 67, 79, 52, 57, 79, 65, 62, 61, 76, 69, 87, 66, 50, 55, 87, 57, 84, 47, 69, 113, 74, 55, 67, 63, 71, 70, 65, 77, 59, 61, 68, 38, 52, 60, 54, 76, 73, 89, 64, 50, 55, 53, 68, 48, 76, 71, 43, 49, 92, 64, 57, 57, 75, 49, 52, 40, 53, 62, 52, 90, 65, 77, 57, 65, 61, 60, 85, 65, 67, 65, 54, 66, 69, 96, 66, 78, 51, 67, 61, 66, 95, 76, 62, 43, 51, 70, 76, 100, 77, 60, 46, 92, 63, 51, 91, 56, 73, 91, 39, 77, 35, 68, 50, 101, 56, 59, 62, 61, 58, 87, 57, 57, 65, 70, 101, 66, 51, 66, 81, 87, 63, 62, 65, 53, 72, 76, 48, 70, 85, 106, 63, 72, 46, 79, 66, 110, 55, 50, 47, 56, 101, 60, 131, 63, 78, 69, 93, 58, 65, 65, 68, 59, 73, 71, 56, 59, 82, 78, 56, 58, 45, 74, 63, 104, 112, 77, 48, 38, 73, 68, 78, 59, 52, 67, 54, 76, 89, 66, 66, 62, 90, 69, 56, 80, 72, 60, 53, 89, 62, 61, 63, 56, 89, 54, 88, 60, 87, 65, 64, 91, 63, 79, 68, 42, 57, 49, 72, 88, 62, 114, 55, 54, 67, 63, 57, 71, 83, 62, 51, 45, 74, 57, 65, 58, 86, 63, 64, 60, 56, 72, 60, 56, 57, 73, 71, 73, 89, 44, 100, 79, 100, 69, 49, 67, 75, 52, 87, 77, 83, 48, 67, 83, 69, 72, 61, 83, 65, 92, 62, 67, 76, 129, 51, 62, 58, 67, 65, 70, 98, 80, 111, 80, 81, 54, 57, 65, 56, 65, 66, 72, 66, 93, 65, 58, 69, 55, 62, 65, 67, 91, 83, 93, 55, 58, 95, 71, 61, 72, 51, 61, 57, 76, 46, 55, 90, 45, 62, 72, 64, 103, 78, 73, 59, 54, 66, 82, 74, 63, 63, 62, 51, 51, 87, 66, 54, 106, 60, 52, 72, 56, 59, 64, 99, 80, 61, 63, 49, 62, 63, 129, 71, 65, 62, 73, 67, 75, 60, 61, 73, 58, 57, 48, 52, 70, 73, 84, 60, 72, 40, 108, 60, 54, 54, 36, 43, 57, 74, 63, 67, 69, 70, 42, 53, 61, 58, 94, 64, 68, 55, 99, 63, 62, 75, 72, 73, 58, 63, 74, 67, 60, 63, 64, 48, 58, 64, 53, 75, 94, 85, 50, 73, 78, 61, 67, 66, 116, 74, 46, 73, 78, 57, 52, 56, 44, 76, 60, 73, 102, 58, 82, 64, 54, 56, 81, 66, 68, 55, 67, 70, 72, 77, 53, 69, 74, 68, 63, 51, 43, 72, 53, 62, 81, 79, 76, 82, 57, 61, 71, 76, 83, 66, 62, 60, 61, 88, 67, 47, 83, 72, 67, 69, 64, 59, 69, 72, 78, 73, 54, 55, 58, 54, 56, 58, 53, 74, 65, 61, 55, 72, 40, 64, 83, 69, 72, 63, 121, 44, 50, 88, 87, 83, 59, 64, 72, 53, 72, 79, 58, 82, 72, 50, 48, 103, 46, 79, 69, 56, 55, 63, 59, 55, 74, 50, 74, 51, 78, 71, 61, 93, 78, 67, 67, 55, 45, 65, 79, 56, 75, 81, 84, 63, 76, 63, 64, 79, 55, 67, 43, 50, 77, 54, 57, 71, 80, 42, 65, 66, 74, 69, 102, 77, 65, 65, 67, 63, 53, 74, 71, 78, 62, 66, 95, 66, 60, 75, 53, 55, 72, 56, 63, 103, 56, 48, 74, 53, 77, 45, 50, 53, 88, 47, 71, 69, 68, 78, 71, 70, 53, 87, 46, 56, 51, 61, 37, 60, 72, 71, 59, 67, 68, 75, 80, 53, 82, 60, 56, 53, 65, 51, 83, 92, 56, 49, 72, 68, 65, 110, 50, 85, 52, 65, 37, 76, 61, 33, 50, 69, 46, 55, 59, 81, 50, 66, 62, 51, 67, 78, 49, 54, 86, 62, 72, 123, 66, 79, 83, 61, 68, 44, 43, 78, 54, 98, 56, 52, 58, 57, 63, 68, 54, 79, 78, 44, 58, 55, 73, 76, 58, 54, 69, 59, 46, 57, 49, 72, 61, 133, 84, 73, 112, 81, 50, 49, 56, 49, 71, 72, 60, 57, 69, 62, 61, 80, 68, 42, 73, 93, 33, 37, 54, 57, 74, 43, 69, 52, 50, 63, 54, 74, 66, 71, 58, 84, 70, 53, 55, 70, 68, 59, 36, 52, 80, 85, 60, 61, 32, 77, 61, 62, 63, 72, 78, 55, 79, 57, 82, 47, 54, 90, 50, 77, 70, 57, 40, 59, 52, 79, 82, 97, 42, 53, 100, 62, 72, 92, 67, 83, 59, 63, 65, 61, 64, 59, 60, 53, 68, 48, 67, 73, 50, 58, 71, 44, 58, 58, 67, 48, 61, 46, 92, 53, 64, 56, 53, 105, 89, 72, 69, 57, 63, 50, 86, 56, 58, 100, 66, 73, 51, 74, 56, 72, 78, 62, 55, 68, 122, 41, 71, 76, 43, 58, 88, 49, 44, 52, 83, 87, 58, 55, 53, 55, 57, 55, 57, 69, 112, 76, 86, 72, 55, 59, 62, 32, 50, 77, 57, 83, 57, 56, 70, 62, 93, 64, 65, 56, 55, 72, 86, 50, 62, 52, 63, 59, 107, 59, 87, 62, 68, 47, 67, 66, 61, 103, 86, 64, 68, 55, 55, 64, 92, 67, 73, 81, 60, 69, 85, 53, 39, 71, 57, 70, 98, 56, 65, 60, 61, 84, 56, 76, 52, 63, 87, 55, 78, 97, 69, 60, 37, 76, 62, 58, 80, 65, 79, 50, 72, 58, 79, 89, 74, 54, 54, 55, 71, 68, 52, 70, 68, 49, 61, 65, 68, 89, 68, 54, 65, 86, 65, 71, 66, 66, 65, 54, 66, 59, 95, 63, 45, 63, 63, 46, 56, 53, 65, 84, 44, 127, 58, 83, 64, 65, 51, 78, 81, 60, 58, 86, 81, 65, 57, 58, 65, 49, 65, 53, 75, 75, 65, 66, 53, 67, 78, 36, 51, 92, 46, 50, 63, 75, 59, 67, 72, 69, 66, 64, 64, 63, 55, 53, 57, 73, 116, 77, 71, 49, 59, 61, 58, 59, 63, 84, 61, 88, 69, 55, 43, 49, 86, 68, 57, 72, 105, 75, 63, 81, 58, 84, 49, 49, 39, 59, 37, 48, 78, 63, 79, 54, 64, 59, 86, 60, 61, 73, 56, 72, 82, 49, 51, 63, 61, 71, 65, 71, 61, 57, 49, 56, 77, 64, 82, 77, 62, 70, 53, 69, 53, 51, 51, 87, 57, 63, 55, 63, 72, 48, 66, 53, 62, 55, 69, 35, 55, 74, 62, 57, 67, 56, 52, 69, 67, 53, 59, 66, 64, 53, 136, 53, 59, 61, 57, 53, 49, 65, 62, 83, 51, 68, 89, 69, 51, 72, 55, 56, 72, 55, 61, 57, 98, 67, 59, 76, 100, 48, 60, 109, 87, 73, 54, 65, 64, 112, 75, 78, 95, 86, 92, 57, 88, 52, 139, 55, 55, 63, 71, 62, 86, 68, 44, 58, 76, 53, 69, 76, 45, 53, 72, 63, 54, 75, 61, 108, 46, 67, 69, 51, 56, 61, 78, 95, 58, 91, 97, 54, 78, 64, 49, 62, 77, 97, 66, 57, 71, 57, 90, 75, 85, 77, 62, 45, 65, 75, 66, 77, 57, 88, 60, 80, 63, 65, 63, 51, 81, 53, 89, 59, 64, 63, 61, 56, 50, 65, 59, 43, 51, 90, 59, 61, 66, 70, 86, 64, 97, 64, 66, 59, 76, 60, 52, 105, 60, 77, 78, 69, 71, 77, 53, 92, 53, 58, 61, 89, 56, 121, 64, 51, 68, 76, 77, 85, 63, 64, 53, 59, 60, 52, 71, 84, 52, 87, 45, 65, 46, 79, 61, 62, 51, 73, 46, 81, 50, 61, 94, 65, 64, 71, 55, 56, 53, 73, 55, 63, 44, 48, 91, 60, 49, 83, 71, 49, 127, 106, 80, 79, 62, 71, 75, 51, 71, 73, 59, 52, 115, 47, 59, 70, 90, 76, 58, 78, 72, 57, 52, 55, 53, 65, 63, 61, 46, 65, 53, 67, 65, 95, 53, 71, 59, 77, 49, 76, 60, 44, 49, 52, 101, 70, 83, 74, 63, 95, 93, 52, 57, 57, 69, 59, 71, 58, 68, 84, 95, 57, 65, 56, 98, 78, 70, 44, 78, 62, 77, 79, 67, 75, 80, 68, 65, 55, 43, 61, 66, 73, 63, 58, 50, 69, 53, 46, 68, 52, 60, 62, 63, 68, 91, 67, 62, 71, 61, 82, 68, 68, 71, 50, 65, 61, 79, 66, 80, 74, 63, 87, 63, 117, 39, 69, 56, 58, 55, 74, 47, 62, 77, 39, 70, 60, 106, 68, 107, 67, 65, 64, 56, 94, 73, 77, 65, 66, 77, 98, 50, 95, 59, 70, 70, 65, 95, 51, 87, 86, 68, 51, 95, 64, 52, 53, 59, 50, 47, 60, 74, 48, 80, 56, 65, 104, 77, 51, 74, 86, 75, 86, 55, 70, 73, 65, 76, 33, 57, 66, 141, 61, 81, 87, 45, 72, 82, 64, 45, 83, 53, 50, 59, 54, 63, 54, 82, 57, 50, 81, 65, 46, 50, 72, 57, 75, 59, 62, 103, 66, 46, 72, 66, 66, 77, 68, 57, 75, 59, 92, 60, 70, 44, 82, 80, 71, 55, 64, 73, 52, 61, 92, 68, 60, 75, 53, 44, 68, 53, 78, 50, 50, 76, 84, 69, 67, 56, 77, 61, 73, 76, 62, 79, 56, 64, 58, 73, 74, 81, 47, 35, 60, 55, 64, 65, 57, 56, 53, 100, 60, 86, 86, 56, 90, 67, 68, 69, 67, 55, 54, 79, 67, 45, 77, 60, 88, 59, 48, 58, 57, 61, 64, 49, 43, 66, 65, 61, 45, 80, 69, 55, 51, 70, 64, 53, 79, 89, 51, 58, 49, 74, 70, 66, 77, 64, 56, 74, 72, 48, 74, 56, 37, 49, 55, 47, 73, 85, 59, 70, 75, 54, 64, 43, 62, 66, 70, 53, 70, 67, 59, 75, 60, 92, 52, 52, 66, 76, 63, 70, 68, 56, 61, 67, 56, 64, 68, 135, 65, 59, 78, 53, 56, 58, 59, 78, 67, 67, 85, 62, 53, 49, 50, 59, 33, 64, 62, 66, 68, 48, 85, 60, 52, 90, 48, 66, 74, 70, 115, 60, 62, 48, 82, 67, 60, 78, 46, 60, 63, 61, 73, 58, 78, 38, 65, 94, 52, 78, 54, 52, 54, 81, 69, 62, 80, 58, 62, 82, 73, 56, 57, 49, 65, 67, 97, 68, 53, 60, 56, 57, 54, 69, 75, 82, 67, 71, 61, 50, 61, 60, 71, 97, 65, 72, 74, 66, 59, 52, 65, 63, 57, 83, 83, 67, 67, 72, 56, 63, 67, 54, 56, 40, 47, 53, 63, 72, 48, 74, 43, 52, 59, 53, 66, 91, 53, 58, 79, 72, 62, 60, 61, 62, 74, 59, 72, 158, 63, 71, 71, 68, 75, 60, 64, 79, 62, 78, 71, 71, 46, 62, 68, 78, 75, 73, 76, 63, 99, 52, 78, 93, 68, 63, 89, 53, 65, 48, 55, 87, 74, 51, 71, 63, 58, 79, 54, 60, 65, 58, 63, 84, 52, 71, 52, 74, 76, 61, 59, 51, 73, 56, 81, 68, 58, 87, 74, 94, 68, 59, 60, 82, 60, 47, 104, 56, 64, 71, 79, 46, 53, 69, 60, 86, 65, 60, 120, 45, 35, 112, 75, 84, 59, 68, 80, 64, 58, 69, 96, 59, 95, 52, 60, 66, 56, 67, 86, 79, 86, 60, 45, 73, 76, 46, 71, 81, 55, 76, 41, 88, 59, 55, 58, 64, 69, 67, 63, 72, 65, 67, 47, 61, 69, 66, 52, 64, 74, 68, 66, 67, 83, 71, 54, 66, 71, 60, 81, 43, 70, 52, 86, 57, 48, 76, 73, 69, 71, 52, 59, 105, 54, 65, 40, 71, 79, 75, 70, 67, 59, 55, 50, 77, 96, 106, 60, 72, 58, 51, 51, 98, 82, 91, 70, 80, 82, 70, 61, 98, 69, 61, 62, 76, 90, 76, 64, 59, 88, 52, 75, 58, 78, 67, 63, 70, 82, 60, 65, 86, 67, 53, 51, 58, 55, 59, 77, 74, 75, 54, 83, 85, 49, 69, 56, 60, 117, 68, 57, 72, 66, 68, 69, 50, 79, 55, 60, 64, 59, 58, 92, 57, 60, 69, 65, 82, 63, 64, 74, 70, 71, 66, 78, 54, 77, 67, 61, 72, 69, 58, 51, 56, 80, 69, 79, 74, 44, 71, 65, 55, 50, 74, 59, 75, 48, 75, 71, 62, 76, 91, 40, 68, 54, 69, 59, 56, 59, 65, 74, 67, 66, 73, 89, 68, 44, 68, 87, 57, 78, 57, 91, 55, 89, 57, 64, 50, 61, 54, 61, 78, 55, 43, 74, 72, 72, 62, 84, 53, 71, 68, 62, 66, 41, 79, 61, 81, 80, 69, 72, 93, 51, 81, 60, 55, 62, 61, 57, 61, 90, 59, 103, 68, 67, 67, 65, 61, 60, 77, 47, 65, 73, 49, 67, 84, 53, 45, 66, 60, 57, 73, 73, 80, 75, 84, 70, 85, 75, 64, 59, 57, 66, 68, 69, 79, 57, 56, 59, 59, 69, 69, 60, 80, 58, 57, 68, 61, 62, 68, 50, 46, 81, 65, 58, 63, 99, 66, 72, 84, 61, 56, 66, 89, 48, 74, 69, 84, 78, 63, 50, 80, 59, 67, 76, 72, 56, 71, 75, 83, 57, 86, 61, 70, 50, 69, 47, 73, 61, 51, 103, 42, 73, 67, 66, 51, 64, 50, 49, 55, 60, 58, 74, 62, 56, 50, 64, 42, 55, 53, 47, 78, 63, 67, 59, 55, 79, 69, 76, 65, 71, 100, 54, 64, 71, 38, 88, 71, 64, 43, 77, 52, 53, 52, 73, 71, 59, 50, 60, 75, 54, 65, 50, 76, 37, 64, 59, 60, 46, 52, 78, 90, 50, 69, 81, 80, 70, 53, 53, 36, 60, 56, 51, 67, 83, 92, 73, 58, 66, 60, 61, 79, 55, 65, 61, 75, 58, 80, 97, 40, 63, 53, 77, 60, 81, 75, 64, 82, 79, 75, 77, 55, 92, 61, 64, 88, 71, 61, 64, 73, 71, 70, 75, 54, 70, 48, 57, 57, 44, 43, 43, 62, 55, 52, 42, 78, 74, 77, 67, 50, 67, 56, 49, 71, 63, 55, 61, 58, 59, 62, 58, 117, 41, 49, 62, 65, 87, 97, 61, 60, 75, 61, 63, 76, 90, 67, 36, 56, 99, 62, 57, 84, 79, 55, 62, 52, 54, 49, 47, 74, 70, 58, 55, 71, 45, 59, 80, 53, 68, 58, 81, 92, 50, 151, 64, 61, 67, 59, 69, 64, 62, 78, 58, 78, 33, 59, 90, 55, 85, 61, 67, 47, 54, 60, 73, 88, 84, 69, 86, 67, 57, 82, 71, 36, 64, 39, 78, 57, 64, 58, 58, 81, 73, 66, 47, 90, 66, 52, 63, 88, 85, 61, 81, 53, 56, 46, 51, 64, 48, 62, 74, 68, 65, 74, 64, 99, 70, 47, 102, 57, 59, 74, 55, 64, 67, 65, 68, 50, 74, 68, 49, 54, 107, 74, 59, 68, 83, 44, 66, 58, 84, 71, 65, 62, 48, 70, 57, 62, 56, 58, 82, 47, 69, 63, 64, 56, 88, 58, 62, 69, 53, 60, 56, 82, 73, 53, 73, 71, 89, 80, 73, 74, 46, 81, 49, 60, 71, 65, 80, 64, 54, 53, 67, 70, 73, 68, 54, 52, 54, 95, 58, 69, 56, 64, 59, 59, 60, 60, 100, 77, 96, 77, 71, 46, 70, 102, 61, 64, 49, 71, 57, 76, 56, 67, 71, 104, 54, 64, 47, 117, 66, 57, 62, 65, 72, 75, 80, 79, 50, 62, 57, 66, 84, 63, 51, 63, 69, 61, 57, 69, 98, 54, 65, 66, 49, 63, 85, 60, 76, 70, 68, 60, 89, 57, 61, 61, 55, 71, 68, 45, 56, 68, 58, 77, 57, 74, 57, 90, 64, 60, 47, 65, 67, 58, 54, 69, 51, 63, 58, 68, 81, 68, 53, 72, 54, 66, 58, 62, 58, 54, 78, 62, 102, 63, 59, 60, 92, 92, 66, 61, 74, 81, 56, 67, 67, 53, 74, 42, 79, 54, 76, 58, 57, 52, 60, 61, 65, 60, 46, 64, 38, 57, 63, 61, 59, 60, 77, 56, 74, 77, 61, 65, 67, 56, 52, 57, 67, 58, 53, 63, 59, 63, 59, 81, 59, 54, 66, 87, 67, 54, 91, 60, 60, 53, 49, 44, 66, 75, 70, 63, 54, 65, 57, 64, 62, 73, 45, 50, 65, 85, 90, 58, 65, 71, 52, 81, 86, 64, 58, 55, 60, 60, 62, 58, 75, 62, 55, 59, 59, 59, 80, 77, 84, 70, 55, 68, 54, 89, 55, 62, 65, 73, 76, 69, 47, 63, 59, 73, 68, 58, 56, 66, 67, 73, 51, 62, 46, 65, 73, 74, 62, 57, 63, 69, 66, 65, 68, 51, 64, 63, 65, 53, 68, 51, 62, 51, 60, 61, 75, 55, 47, 56, 74, 67, 59, 55, 66, 65, 61, 77, 64, 64, 56, 66, 62, 59, 84, 72, 62, 60, 62, 62, 48, 83, 58, 67, 82, 59, 74, 56, 73, 66, 77, 63, 73, 146, 75, 64, 54, 56, 79, 77, 53, 74, 80, 49, 73, 59, 73, 85, 60, 60, 50, 64, 63, 47, 85, 63, 73, 65, 63, 75, 57, 58, 58, 66, 69, 58, 68, 65, 61, 59, 58, 68, 56, 70, 61, 64, 61, 41, 61, 90, 58, 59, 65, 57, 67, 83, 73, 68, 67, 67, 65, 65, 57, 65, 67, 74, 74, 60, 65, 49, 73, 74, 73, 69, 64, 55, 55, 76, 49, 97, 60, 58, 99, 78, 73, 81, 71, 58, 74, 60, 68, 72, 65, 65, 73, 71, 69, 71, 50, 60, 66, 77, 58, 54, 56, 66, 57, 68, 63, 61, 59, 52, 63, 63, 66, 50, 61, 56, 54, 90, 74, 62, 46, 49, 151, 65, 78, 57, 57, 66, 63, 76, 55, 59, 69, 72, 79, 71, 79, 66, 64, 64, 62, 63, 98, 48, 67, 73, 59, 77, 63, 65, 57, 50, 85, 51, 64, 80, 55, 55, 64, 105, 78, 69, 59, 62, 65, 65, 104, 69, 64, 70, 63, 63, 91, 93, 80, 69, 71, 58, 62, 60, 62, 76, 87, 64, 88, 60, 63, 54, 58, 57, 66, 75, 68, 69, 75, 57, 67, 59, 49, 66, 63, 59, 68, 51, 54, 63, 74, 57, 63, 94, 60, 64, 49, 71, 71, 63, 67, 70, 82, 54, 68, 86, 62, 66, 61, 74, 70, 58, 66, 60, 67, 70, 71, 66, 53, 58, 53, 58, 80, 60, 60, 70, 74, 58, 49, 75, 74, 62, 63, 62, 63, 64, 70, 90, 65, 52, 57, 56, 76, 72, 57, 73, 53, 57, 56, 63, 59, 84, 68, 56, 56, 87, 56, 59, 76, 57, 60, 70, 57, 55, 57, 71, 68, 70, 85, 66, 57, 57, 56, 61, 68, 58, 92, 54, 75, 65, 68, 63, 59, 63, 71, 59, 61, 81, 55, 65, 65, 70, 60, 54, 54, 69, 59, 71, 59, 96, 63, 88, 78, 58, 63, 79, 66, 58, 57, 61, 67, 61, 54, 69, 61, 67, 72, 83, 76, 55, 69, 73, 75, 70, 69, 54, 67, 80, 100, 55, 72, 58, 58, 71, 70, 59, 61, 72, 79, 67, 62, 66, 58, 114, 58, 45, 55, 57, 65, 65, 91, 78, 65, 66, 55, 63, 70, 64, 69, 58, 55, 64, 69, 64, 57, 57, 69, 75, 62, 63, 76, 62, 66, 88, 47, 65, 60, 69, 67, 64, 69, 71, 73, 56, 71, 62, 61, 65, 77, 68, 58, 65, 56, 64, 72, 58, 64, 56, 64, 77, 57, 71, 59, 77, 55, 85, 66, 52, 68, 58, 62, 52, 48, 76, 56, 54, 60, 66, 74, 84, 66, 58, 62, 71, 52, 65, 59, 54, 73, 75, 69, 61, 68, 51, 59, 93, 44, 82, 55, 62, 124, 55, 64, 84, 63, 57, 60, 67, 75, 60, 51, 78, 76, 53, 67, 72, 60, 59, 64, 61, 70, 69, 67, 62, 67, 70, 59, 48, 73, 49, 61, 76, 83, 55, 54, 53, 64, 72, 51, 70, 66, 63, 49, 51, 49, 46, 70, 61, 59, 59, 76, 61, 105, 58, 68, 74, 69, 60, 70, 79, 54, 51, 63, 65, 51, 66, 61, 58, 58, 75, 64, 62, 71, 57, 84, 73, 81, 53, 70, 71, 60, 65, 55, 71, 54, 59, 61, 59, 62, 66, 56, 56, 111, 83, 57, 71, 61, 73, 55, 66, 96, 54, 65, 62, 60, 42, 70, 67, 60, 60, 66, 67, 75, 61, 70, 58, 59, 58, 74, 54, 53, 76, 61, 79, 75, 63, 55, 56, 56, 61, 62, 76, 45, 78, 52, 65, 58, 77, 69, 72, 96, 61, 56, 59, 51, 62, 49, 120, 64, 62, 65, 62, 70, 61, 61, 63, 77, 62, 65, 51, 78, 53, 75, 77, 58, 57, 89, 59, 58, 68, 64, 96, 57, 67, 75, 63, 48, 63, 80, 72, 47, 66, 71, 50, 59, 68, 74, 64, 65, 76, 62, 56, 63, 61, 85, 71, 63, 70, 65, 63, 54, 63, 59, 55, 74, 50, 60, 70, 56, 64, 60, 60, 87, 65, 59, 51, 59, 73, 64, 64, 74, 79, 49, 54, 59, 57, 58, 81, 58, 48, 77, 66, 63, 65, 75, 60, 83, 52, 96, 61, 75, 58, 64, 68, 48, 58, 73, 87, 79, 59, 69, 65, 59, 73, 86, 68, 68, 99, 52, 62, 73, 98, 61, 70, 93, 62, 66, 87, 64, 71, 55, 64, 69, 69, 67, 52, 71, 69, 56, 58, 66, 62, 61, 57, 52, 55, 61, 75, 115, 58, 56, 73, 85, 63, 62, 65, 53, 74, 52, 66, 58, 70, 64, 84, 54, 78, 68, 52, 88, 61, 95, 56, 69, 50, 63, 62, 55, 55, 82, 52, 64, 99, 84, 72, 76, 77, 62, 56, 65, 68, 74, 79, 64, 64, 67, 58, 77, 67, 79, 83, 60, 50, 69, 49, 61, 70, 57, 62, 59, 63, 59, 72, 56, 60, 83, 63, 84, 72, 72, 75, 70, 66, 54, 46, 72, 54, 57, 71, 59, 79, 94, 72, 57, 76, 51, 72, 59, 73, 82, 77, 72, 106, 48, 37, 36, 67, 97, 49, 65, 86, 66, 58, 61, 61, 81, 81, 99, 86, 111, 61, 71, 79, 95, 71, 47, 70, 51, 105, 71, 86, 76, 59, 59, 39, 45, 77, 74, 106, 66, 68, 43, 50, 45, 40, 37, 88, 61, 75, 63, 59, 59, 46, 43, 72, 57, 67, 75, 66, 64, 63, 54, 66, 64, 51, 64, 80, 65, 65, 46, 56, 54, 63, 81, 51, 46, 60, 124, 61, 62, 61, 52, 54, 80, 87, 51, 67, 69, 69, 101, 101, 59, 85, 59, 78, 70, 54, 63, 45, 73, 63, 77, 86, 61, 65, 74, 62, 60, 93, 64, 89, 78, 82, 75, 54, 71, 81, 59, 62, 75, 66, 74, 71, 44, 97, 58, 87, 47, 65, 61, 104, 65, 87, 47, 67, 51, 78, 103, 60, 44, 53, 51, 62, 56, 155, 68, 61, 90, 59, 75, 162, 120, 68, 62, 40, 41, 56, 43, 74, 59, 54, 67, 79, 73, 64, 76, 84, 64, 91, 64, 73, 36, 76, 45, 60, 72, 70, 38, 61, 63, 67, 56, 45, 71, 104, 77, 90, 64, 43, 74, 52, 68, 107, 49, 64, 62, 55, 51, 46, 64, 75, 55, 65, 54, 65, 50, 57, 79, 65, 63, 39, 47, 59, 46, 51, 61, 64, 67, 59, 102, 76, 78, 54, 46, 71, 73, 108, 66, 76, 65, 62, 72, 65, 49, 77, 73, 74, 41, 61, 60, 51, 55, 72, 55, 47, 63, 101, 68, 71, 72, 68, 79, 64, 52, 73, 57, 70, 58, 49, 67, 69, 91, 86, 74, 92, 96, 59, 97, 76, 61, 71, 72, 130, 48, 57, 66, 64, 59, 50, 54, 83, 63, 65, 37, 74, 87, 41, 70, 62, 57, 53, 69, 95, 50, 20, 74, 49, 72, 70, 62, 80, 54, 37, 51, 66, 64, 64, 84, 49, 70, 65, 55, 66, 48, 58, 57, 65, 63, 61, 70, 74, 46, 43, 47, 56, 83, 69, 56, 48, 84, 72, 51, 63, 71, 43, 73, 69, 46, 72, 71, 79, 50, 93, 75, 61, 61, 75, 61, 90, 42, 52, 64, 71, 53, 54, 54, 55, 46, 55, 82, 106, 49, 84, 104, 63, 66, 106, 93, 66, 65, 85, 65, 106, 63, 100, 72, 54, 64, 50, 63, 92, 63, 101, 79, 84, 80, 57, 68, 57, 70, 65, 72, 54, 71, 52, 66, 50, 72, 72, 64, 42, 66, 71, 44, 126, 43, 81, 79, 52, 67, 56, 97, 58, 54, 52, 65, 55, 65, 95, 64, 93, 98, 99, 78, 58, 54, 54, 51, 73, 77, 137, 69, 58, 50, 56, 69, 64, 57, 74, 84, 52, 60, 55, 73, 78, 64, 62, 63, 75, 47, 60, 65, 69, 49, 97, 54, 53, 67, 49, 60, 66, 79, 52, 52, 69, 66, 55, 78, 61, 54, 109, 75, 69, 63, 52, 65, 74, 57, 78, 51, 78, 74, 72, 72, 74, 106, 52, 52, 26, 67, 120, 55, 72, 60, 84, 66, 65, 63, 109, 120, 78, 60, 71, 35, 67, 63, 59, 80, 60, 56, 64, 41, 62, 50, 61, 62, 66, 55, 75, 75, 69, 54, 55, 59, 84, 77, 57, 56, 66, 95, 72, 53, 66, 62, 67, 62, 65, 70, 62, 70, 72, 89, 74, 89, 69, 46, 55, 52, 86, 65, 48, 92, 48, 72, 67, 44, 58, 50, 53, 59, 41, 72, 64, 61, 74, 95, 79, 87, 55, 97, 53, 64, 88, 77, 67, 53, 34, 62, 82, 58, 77, 70, 64, 52, 41, 66, 58, 76, 51, 72, 54, 57, 70, 54, 45, 49, 61, 62, 73, 50, 90, 58, 56, 69, 75, 93, 68, 64, 56, 73, 58, 61, 50, 68, 49, 93, 63, 56, 68, 47, 136, 50, 42, 97, 110, 69, 92, 42, 87, 54, 73, 87, 64, 49, 71, 50, 68, 64, 74, 67, 49, 66, 68, 42, 56, 75, 55, 45, 48, 59, 96, 94, 47, 40, 74, 62, 67, 54, 65, 70, 59, 70, 78, 44, 99, 76, 58, 68, 48, 49, 67, 85, 48, 73, 58, 55, 56, 71, 54, 61, 82, 83, 75, 61, 52, 76, 52, 68, 92, 73, 85, 60, 40, 66, 50, 52, 66, 55, 43, 68, 75, 73, 51, 76, 48, 73, 73, 63, 82, 64, 80, 72, 66, 62, 86, 45, 55, 72, 57, 87, 27, 66, 84, 59, 66, 51, 66, 73, 72, 48, 68, 37, 74, 50, 45, 66, 56, 54, 88, 60, 43, 62, 69, 74, 72, 63, 54, 57, 64, 49, 78, 63, 52, 81, 85, 69, 66, 73, 61, 57, 58, 62, 61, 59, 70, 69, 62, 76, 73, 68, 43, 54, 62, 59, 66, 53, 54, 73, 63, 78, 65, 70, 99, 67, 79, 74, 113, 99, 52, 46, 67, 55, 67, 66, 107, 55, 34, 76, 72, 56, 85, 66, 65, 67, 76, 65, 62, 64, 127, 42, 86, 41, 77, 43, 49, 94, 67, 61, 83, 63, 58, 68, 56, 89, 68, 94, 43, 56, 57, 50, 62, 53, 47, 122, 93, 113, 76, 59, 79, 38, 65, 49, 71, 60, 50, 85, 85, 89, 69, 69, 91, 84, 54, 67, 97, 56, 89, 81, 55, 46, 74, 40, 66, 65, 77, 59, 62, 57, 73, 81, 60, 68, 71, 51, 63, 96, 57, 52, 65, 79, 64, 65, 57, 53, 65, 62, 88, 36, 56, 45, 46, 67, 68, 68, 49, 42, 90, 60, 76, 73, 103, 62, 78, 77, 44, 64, 56, 82, 79, 45, 60, 77, 60, 53, 55, 39, 63, 54, 60, 53, 53, 66, 60, 44, 52, 66, 89, 57, 65, 78, 50, 45, 69, 99, 54, 78, 60, 75, 66, 78, 86, 77, 70, 59, 65, 64, 62, 101, 83, 61, 69, 64, 84, 76, 84, 73, 86, 68, 53, 96, 81, 65, 62, 58, 40, 51, 80, 93, 54, 55, 66, 72, 51, 46, 54, 67, 84, 137, 56, 77, 53, 63, 79, 60, 64, 90, 37, 66, 68, 53, 98, 70, 63, 68, 69, 56, 61, 76, 127, 54, 75, 75, 77, 56, 57, 56, 92, 61, 66, 58, 68, 57, 68, 72, 61, 53, 77, 60, 70, 51, 76, 63, 134, 61, 57, 72, 77, 62, 77, 78, 51, 78, 57, 49, 52, 152, 69, 69, 74, 120, 85, 60, 53, 68, 74, 74, 73, 65, 64, 67, 58, 71, 52, 52, 56, 66, 59, 66, 68, 70, 54, 73, 61, 63, 61, 66, 62, 72, 56, 58, 79, 64, 60, 69, 75, 61, 91, 45, 51, 54, 58, 62, 109, 59, 59, 57, 91, 66, 75, 69, 62, 57, 89, 64, 55, 57, 76, 54, 54, 68, 63, 74, 74, 57, 55, 56, 62, 59, 69, 74, 79, 46, 69, 69, 67, 73, 59, 66, 57, 68, 68, 93, 63, 46, 49, 67, 73, 57, 58, 90, 70, 51, 79, 77, 83, 54, 56, 53, 57, 58, 53, 60, 95, 76, 61, 69, 71, 55, 53, 69, 61, 50, 77, 72, 53, 78, 65, 72, 65, 62, 66, 95, 58, 78, 66, 49, 65, 65, 63, 64, 76, 63, 57, 74, 74, 60, 60, 64, 67, 65, 70, 53, 74, 70, 51, 46, 65, 98, 58, 56, 69, 64, 92, 55, 78, 56, 54, 67, 74, 84, 51, 58, 62, 71, 70, 79, 60, 69, 82, 62, 63, 52, 63, 84, 117, 59, 55, 58, 68, 89, 61, 68, 85, 74, 49, 56, 60, 69, 79, 60, 64, 60, 61, 52, 56, 71, 71, 80, 54, 94, 73, 74, 67, 56, 82, 53, 71, 64, 68, 81, 69, 60, 52, 61, 60, 64, 71, 70, 43, 57, 67, 71, 58, 78, 54, 65, 68, 71, 57, 72, 52, 65, 62, 65, 87, 85, 58, 68, 67, 67, 67, 58, 78, 57, 83, 43, 62, 68, 78, 61, 55, 67, 54, 83, 72, 64, 73, 60, 60, 67, 71, 93, 56, 67, 113, 57, 60, 72, 62, 69, 76, 55, 63, 83, 57, 65, 55, 57, 59, 66, 71, 69, 51, 83, 93, 67, 55, 53, 56, 78, 82, 60, 57, 66, 45, 52, 97, 74, 47, 89, 73, 56, 82, 57, 58, 48, 40, 72, 60, 70, 80, 61, 67, 71, 65, 89, 65, 67, 71, 77, 64, 49, 67, 71, 54, 64, 63, 65, 69, 62, 58, 70, 43, 65, 77, 64, 69, 65, 69, 80, 61, 63, 76, 60, 75, 57, 69, 61, 68, 57, 59, 68, 71, 58, 74, 104, 59, 71, 67, 65, 69, 62, 97, 89, 87, 64, 69, 37, 57, 58, 76, 58, 63, 63, 72, 59, 52, 86, 45, 63, 55, 100, 79, 70, 58, 63, 54, 57, 80, 80, 56, 79, 65, 77, 81, 49, 52, 47, 77, 58, 77, 75, 62, 53, 77, 89, 84, 63, 56, 53, 67, 64, 60, 58, 70, 59, 53, 57, 59, 66, 61, 59, 84, 65, 53, 63, 69, 67, 62, 75, 106, 57, 62, 61, 48, 62, 88, 71, 62, 58, 60, 63, 66, 81, 66, 79, 45, 63, 68, 61, 34, 70, 69, 70, 36, 67, 74, 69, 81, 58, 70, 60, 61, 72, 75, 79, 57, 67, 56, 68, 60, 58, 56, 56, 67, 72, 79, 74, 69, 59, 62, 57, 64, 84, 52, 59, 81, 67, 59, 50, 59, 70, 89, 61, 97, 46, 80, 77, 56, 66, 61, 54, 54, 49, 58, 67, 52, 53, 62, 54, 60, 53, 69, 79, 69, 75, 91, 74, 65, 62, 59, 76, 67, 61, 59, 66, 79, 77, 49, 93, 87, 67, 68, 57, 65, 67, 52, 68, 77, 53, 71, 56, 74, 54, 49, 53, 76, 57, 59, 67, 56, 68, 67, 61, 64, 57, 74, 69, 60, 64, 78, 69, 56, 69, 62, 63, 55, 60, 62, 75, 86, 63, 65, 48, 79, 60, 56, 51, 67, 67, 62, 76, 71, 62, 43, 57, 79, 61, 57, 97, 71, 77, 80, 62, 72, 68, 54, 60, 48, 91, 61, 94, 107, 55, 56, 66, 68, 72, 56, 86, 50, 59, 61, 59, 89, 59, 110, 60, 56, 74, 65, 66, 51, 64, 84, 45, 86, 58, 57, 69, 62, 58, 74, 51, 70, 65, 71, 82, 58, 67, 83, 56, 74, 53, 60, 61, 52, 63, 63, 78, 60, 68, 70, 69, 48, 75, 58, 54, 64, 60, 66, 67, 66, 64, 65, 67, 71, 69, 59, 51, 71, 57, 76, 67, 63, 60, 61, 54, 53, 62, 49, 94, 74, 69, 66, 49, 61, 70, 60, 73, 80, 62, 59, 72, 61, 71, 79, 62, 89, 66, 70, 69, 57, 59, 67, 111, 58, 64, 73, 68, 59, 58, 59, 55, 55, 63, 67, 73, 59, 49, 71, 43, 66, 70, 42, 62, 50, 78, 69, 84, 49, 81, 81, 55, 70, 50, 63, 51, 46, 62, 64, 72, 53, 75, 69, 120, 61, 61, 56, 66, 62, 51, 88, 61, 60, 71, 60, 67, 57, 55, 44, 59, 86, 92, 74, 57, 51, 83, 67, 53, 65, 57, 67, 57, 62, 59, 97, 48, 60, 69, 52, 54, 66, 61, 73, 63, 55, 64, 60, 90, 57, 79, 77, 53, 62, 79, 73, 81, 68, 59, 76, 49, 67, 59, 71, 65, 63, 66, 61, 52, 57, 81, 53, 61, 77, 59, 61, 58, 57, 65, 72, 53, 87, 67, 52, 60, 66, 62, 56, 69, 70, 120, 51, 66, 52, 68, 69, 65, 74, 48, 56, 66, 51, 71, 63, 93, 53, 71, 62, 73, 46, 70, 80, 46, 52, 64, 75, 63, 54, 83, 61, 78, 59, 70, 67, 54, 73, 64, 89, 78, 57, 44, 49, 46, 47, 53, 68, 86, 67, 68, 64, 63, 51, 72, 62, 63, 130, 54, 50, 54, 73, 52, 58, 96, 60, 107, 65, 58, 66, 53, 73, 78, 63, 57, 56, 76, 53, 77, 66, 56, 57, 51, 61, 90, 70, 66, 66, 108, 69, 87, 59, 99, 56, 74, 60, 82, 57, 57, 62, 66, 51, 52, 83, 58, 59, 66, 71, 53, 58, 84, 66, 50, 50, 89, 55, 79, 69, 64, 83, 76, 57, 72, 71, 71, 84, 55, 68, 100, 68, 73, 58, 42, 59, 63, 56, 66, 67, 69, 72, 64, 78, 73, 46, 52, 69, 48, 48, 72, 78, 70, 70, 53, 76, 65, 83, 36, 109, 53, 72, 57, 83, 39, 61, 64, 54, 82, 65, 56, 77, 62, 60, 68, 55, 29, 73, 65, 67, 52, 76, 53, 69, 56, 71, 44, 65, 62, 57, 97, 57, 79, 70, 54, 62, 91, 61, 64, 61, 90, 49, 72, 59, 48, 51, 72, 86, 69, 110, 77, 67, 97, 52, 66, 84, 50, 52, 71, 78, 67, 57, 93, 55, 62, 63, 70, 65, 65, 63, 50, 47, 55, 53, 76, 119, 63, 98, 82, 55, 66, 64, 61, 58, 58, 98, 80, 63, 75, 72, 55, 75, 58, 65, 73, 44, 91, 76, 84, 82, 57, 70, 77, 53, 67, 58, 54, 63, 43, 53, 42, 68, 71, 76, 64, 63, 59, 63, 61, 87, 78, 60, 63, 68, 75, 47, 59, 69, 65, 71, 66, 69, 57, 73, 73, 56, 63, 58, 70, 88, 52, 94, 53, 66, 66, 104, 56, 76, 61, 84, 64, 59, 75, 65, 52, 58, 57, 87, 65, 57, 66, 51, 61, 73, 68, 53, 69, 76, 55, 50, 71, 61, 89, 56, 90, 60, 64, 59, 84, 61, 45, 67, 54, 75, 62, 61, 77, 60, 59, 65, 56, 76, 95, 65, 71, 67, 81, 78, 56, 88, 67, 54, 75, 75, 76, 77, 60, 77, 61, 31, 71, 67, 72, 70, 53, 52, 88, 65, 52, 72, 79, 49, 88, 52, 66, 71, 65, 57, 78, 73, 62, 56, 72, 67, 86, 64, 71, 52, 71, 80, 65, 70, 69, 73, 46, 76, 72, 46, 56, 59, 53, 74, 79, 79, 73, 61, 70, 67, 60, 70, 68, 55, 58, 68, 83, 56, 61, 75, 58, 89, 64, 70, 95, 92, 54, 80, 47, 63, 56, 75, 74, 55, 51, 77, 49, 64, 53, 69, 58, 57, 37, 82, 68, 68, 73, 108, 79, 55, 65, 54, 67, 78, 85, 70, 64, 58, 59, 65, 64, 65, 78, 61, 69, 81, 43, 64, 55, 67, 53, 84, 66, 98, 57, 81, 44, 60, 64, 78, 75, 83, 62, 76, 73, 69, 74, 54, 76, 48, 69, 68, 57, 58, 59, 61, 68, 77, 63, 88, 74, 46, 70, 109, 67, 59, 54, 73, 53, 54, 63, 77, 68, 53, 83, 47, 50, 81, 67, 146, 108, 72, 69, 54, 72, 133, 46, 83, 61, 55, 60, 68, 68, 49, 61, 64, 107, 69, 68, 65, 64, 54, 75, 80, 81, 60, 45, 78, 43, 62, 49, 74, 93, 74, 55, 46, 80, 56, 55, 55, 75, 58, 63, 70, 41, 63, 80, 54, 70, 81, 65, 65, 63, 68, 56, 61, 64, 64, 72, 57, 66, 77, 57, 62, 42, 102, 65, 60, 65, 49, 57, 75, 72, 61, 113, 63, 47, 49, 59, 70, 51, 70, 68, 37, 64, 70, 67, 64, 55, 58, 53, 54, 79, 57, 47, 74, 67, 69, 69, 71, 71, 62, 59, 59, 90, 82, 47, 69, 70, 82, 59, 56, 105, 81, 46, 55, 79, 103, 58, 65, 63, 44, 80, 67, 81, 62, 63, 71, 77, 61, 78, 51, 49, 64, 109, 59, 82, 57, 105, 70, 64, 49, 68, 75, 83, 53, 108, 50, 110, 56, 52, 54, 85, 83, 92, 68, 47, 61, 58, 98, 74, 94, 65, 86, 57, 73, 56, 69, 58, 47, 65, 67, 56, 51, 63, 69, 57, 56, 69, 83, 60, 62, 68, 66, 62, 57, 85, 51, 80, 70, 69, 58, 59, 70, 57, 69, 76, 59, 80, 60, 63, 43, 47, 74, 67, 70, 52, 66, 71, 53, 56, 87, 77, 84, 83, 58, 59, 54, 50, 71, 105, 60, 51, 64, 50, 66, 80, 57, 66, 67, 61, 78, 41, 66, 71, 39, 50, 42, 46, 61, 76, 60, 73, 75, 58, 67, 56, 62, 78, 63, 59, 76, 74, 73, 62, 80, 52, 50, 56, 57, 59, 50, 39, 46, 82, 53, 60, 78, 94, 67, 55, 55, 75, 47, 56, 47, 62, 61, 75, 73, 60, 70, 57, 56, 81, 73, 62, 50, 94, 55, 54, 95, 65, 57, 50, 56, 36, 58, 64, 119, 53, 77, 48, 63, 51, 91, 53, 36, 47, 59, 91, 85, 72, 55, 61, 65, 70, 63, 67, 75, 59, 54, 66, 83, 65, 66, 66, 56, 96, 50, 57, 73, 60, 67, 82, 91, 54, 64, 59, 49, 58, 79, 60, 61, 66, 68, 63, 59, 58, 69, 62, 68, 52, 74, 63, 60, 63, 60, 46, 53, 117, 106, 118, 64, 62, 102, 68, 71, 74, 61, 55, 59, 63, 70, 59, 59, 76, 59, 54, 47, 52, 58, 81, 64, 62, 60, 57, 71, 78, 39, 51, 59, 59, 59, 77, 103, 86, 69, 67, 74, 56, 65, 51, 65, 54, 53, 59, 98, 41, 57, 81, 57, 82, 55, 62, 46, 55, 62, 34, 75, 77, 74, 64, 84, 81, 53, 52, 56, 54, 84, 72, 60, 63, 73, 70, 39, 56, 67, 67, 51, 73, 61, 90, 62, 59, 63, 54, 67, 50, 58, 71, 50, 67, 75, 76, 56, 54, 53, 60, 86, 46, 64, 74, 66, 70, 62, 49, 59, 77, 42, 66, 75, 64, 56, 50, 70, 72, 53, 65, 57, 38, 90, 57, 71, 55, 38, 74, 57, 67, 71, 67, 59, 54, 66, 75, 62, 64, 68, 88, 65, 75, 120, 72, 72, 74, 62, 58, 49, 57, 80, 66, 52, 63, 61, 75, 73, 55, 68, 53, 63, 63, 45, 64, 54, 43, 74, 29, 101, 76, 56, 55, 69, 67, 70, 70, 74, 62, 40, 50, 95, 71, 83, 53, 72, 61, 74, 116, 49, 61, 114, 68, 59, 53, 52, 41, 78, 64, 58, 51, 54, 78, 56, 70, 83, 67, 54, 72, 61, 142, 67, 83, 67, 69, 65, 54, 53, 53, 69, 80, 44, 63, 67, 43, 67, 60, 76, 66, 55, 73, 61, 65, 57, 83, 74, 86, 82, 82, 64, 51, 73, 69, 48, 63, 72, 48, 59, 74, 68, 83, 49, 57, 56, 74, 51, 83, 41, 63, 46, 62, 89, 55, 66, 56, 54, 98, 55, 89, 62, 37, 65, 58, 60, 42, 62, 60, 76, 73, 71, 60, 54, 69, 52, 65, 72, 80, 66, 47, 59, 53, 64, 70, 62, 56, 59, 73, 40, 68, 72, 53, 69, 77, 61, 68, 63, 72, 61, 51, 66, 66, 71, 82, 52, 66, 67, 54, 66, 73, 39, 66, 64, 59, 48, 66, 71, 57, 52, 62, 70, 112, 60, 71, 44, 47, 55, 55, 91, 75, 46, 74, 63, 74, 50, 56, 76, 48, 81, 67, 57, 61, 56, 45, 71, 66, 51, 69, 43, 67, 88, 61, 69, 67, 67, 81, 70, 59, 75, 62, 43, 55, 97, 50, 68, 68, 79, 69, 83, 75, 66, 41, 73, 69, 71, 53, 67, 60, 67, 66, 75, 93, 73, 63, 79, 60, 68, 69, 72, 55, 111, 51, 66, 78, 46, 67, 91, 55, 84, 51, 55, 52, 54, 54, 70, 84, 66, 56, 70, 64, 68, 74, 70, 67, 59, 91, 43, 70, 130, 83, 76, 79, 46, 56, 57, 74, 64, 50, 92, 64, 59, 65, 61, 62, 68, 76, 55, 46, 62, 47, 67, 57, 80, 110, 51, 81, 62, 45, 65, 67, 63, 41, 52, 68, 104, 61, 61, 59, 64, 66, 49, 93, 59, 50, 91, 55, 65, 69, 75, 61, 40, 53, 67, 79, 60, 79, 70, 56, 81, 71, 57, 73, 67, 86, 59, 84, 80, 55, 70, 79, 95, 63, 58, 66, 94, 56, 46, 65, 53, 68, 61, 57, 71, 84, 65, 46, 56, 85, 55, 79, 57, 63, 76, 85, 55, 55, 48, 65, 73, 60, 50, 59, 56, 64, 63, 47, 75, 61, 52, 100, 72, 76, 113, 70, 69, 56, 53, 61, 59, 69, 74, 66, 75, 72, 60, 59, 62, 81, 78, 81, 54, 62, 63, 49, 66, 63, 78, 63, 70, 59, 66, 94, 73, 69, 57, 105, 71, 60, 47, 66, 57, 61, 73, 62, 90, 55, 54, 53, 70, 86, 44, 88, 54, 58, 51, 77, 63, 62, 70, 76, 60, 78, 44, 98, 41, 59, 70, 82, 82, 56, 62, 47, 53, 61, 67, 60, 73, 63, 53, 75, 77, 54, 53, 63, 53, 67, 71, 85, 58, 111, 58, 63, 67, 53, 49, 63, 65, 62, 59, 63, 63, 61, 77, 57, 50, 135, 33, 74, 65, 71, 59, 63, 51, 75, 60, 73, 73, 52, 76, 75, 67, 42, 59, 58, 62, 56, 47, 74, 87, 76, 50, 60, 55, 52, 46, 61, 71, 59, 76, 76, 66, 83, 66, 70, 59, 64, 60, 58, 65, 62, 122, 87, 78, 68, 81, 65, 62, 65, 50, 57, 70, 51, 66, 66, 57, 88, 55, 85, 61, 58, 83, 69, 49, 68, 56, 77, 52, 58, 90, 57, 50, 49, 63, 48, 82, 39, 65, 75, 60, 79, 48, 56, 49, 74, 71, 64, 63, 56, 74, 56, 76, 69, 87, 88, 72, 70, 52, 68, 76, 70, 55, 68, 44, 63, 42, 78, 51, 67, 62, 82, 65, 56, 48, 69, 61, 60, 114, 74, 52, 62, 63, 78, 77, 62, 56, 76, 44, 62, 60, 74, 121, 59, 76, 72, 66, 67, 59, 80, 65, 100, 88, 66, 75, 60, 57, 56, 49, 62, 74, 60, 41, 75, 73, 68, 59, 70, 59, 84, 44, 60, 59, 86, 65, 59, 53, 62, 66, 58, 47, 77, 65, 40, 67, 42, 52, 82, 86, 60, 64, 92, 66, 62, 73, 61, 48, 56, 58, 54, 62, 72, 82, 53, 75, 63, 53, 77, 51, 50, 69, 50, 60, 111, 68, 66, 54, 56, 69, 65, 75, 71, 86, 59, 52, 60, 73, 68, 57, 50, 46, 60, 52, 69, 87, 64, 66, 101, 93, 89, 75, 66, 60, 47, 71, 68, 76, 88, 74, 64, 69, 84, 55, 64, 61, 63, 64, 72, 56, 67, 77, 60, 51, 59, 94, 76, 45, 52, 62, 50, 56, 47, 100, 69, 80, 63, 57, 65, 52, 80, 60, 66, 61, 64, 89, 59, 63, 73, 52, 59, 51, 70, 60, 54, 59, 52, 59, 73, 111, 64, 59, 69, 68, 58, 58, 69, 64, 53, 53, 55, 67, 87, 74, 64, 71, 67, 56, 68, 53, 63, 73, 81, 61, 60, 82, 80, 89, 86, 79, 86, 70, 63, 67, 54, 67, 56, 62, 55, 59, 58, 67, 85, 67, 77, 67, 63, 62, 72, 56, 63, 73, 58, 71, 69, 71, 57, 51, 51, 79, 77, 59, 69, 81, 89, 69, 62, 92, 67, 51, 69, 56, 61, 57, 80, 64, 134, 82, 74, 54, 82, 57, 64, 61, 63, 68, 61, 92, 63, 68, 67, 58, 59, 73, 66, 64, 59, 61, 61, 82, 83, 75, 50, 56, 67, 69, 72, 51, 61, 63, 55, 63, 50, 80, 51, 77, 57, 52, 72, 47, 72, 60, 67, 52, 63, 65, 64, 42, 77, 51, 96, 62, 62, 77, 59, 59, 67, 50, 82, 89, 58, 97, 70, 88, 62, 45, 117, 63, 72, 53, 77, 65, 54, 56, 69, 79, 67, 67, 57, 66, 62, 84, 75, 59, 77, 63, 69, 48, 60, 83, 61, 64, 66, 58, 59, 76, 66, 55, 69, 57, 100, 65, 64, 63, 76, 75, 59, 62, 72, 73, 62, 49, 45, 68, 81, 58, 68, 54, 78, 47, 70, 80, 56, 64, 69, 68, 61, 66, 78, 54, 66, 81, 48, 52, 75, 73, 41, 64, 66, 66, 62, 60, 48, 65, 63, 54, 75, 58, 66, 94, 58, 67, 68, 55, 59, 57, 78, 77, 71, 84, 77, 53, 52, 66, 70, 54, 40, 92, 81, 52, 65, 71, 53, 84, 75, 62, 54, 59, 65, 79, 75, 48, 80, 64, 60, 74, 39, 88, 60, 71, 55, 81, 58, 58, 71, 93, 88, 75, 65, 67, 63, 81, 59, 77, 64, 43, 48, 67, 61, 51, 53, 74, 58, 68, 61, 69, 51, 55, 76, 60, 55, 53, 60, 90, 48, 87, 63, 121, 58, 84, 55, 75, 65, 49, 69, 59, 65, 72, 81, 66, 70, 54, 60, 66, 66, 76, 67, 68, 64, 47, 63, 58, 64, 55, 90, 63, 73, 88, 67, 38, 47, 31, 134, 77, 55, 54, 69, 87, 52, 58, 72, 74, 62, 59, 73, 59, 65, 71, 92, 72, 90, 68, 81, 65, 69, 99, 69, 85, 82, 68, 60, 54, 70, 56, 71, 65, 59, 78, 70, 41, 66, 60, 63, 43, 57, 68, 60, 60, 65, 64, 65, 54, 62, 76, 69, 70, 56, 58, 49, 69, 91, 68, 52, 56, 66, 102, 72, 60, 73, 47, 84, 95, 54, 70, 54, 67, 100, 57, 59, 54, 66, 73, 44, 52, 65, 68, 84, 65, 66, 69, 70, 57, 82, 51, 62, 67, 70, 65, 60, 59, 63, 70, 62, 52, 58, 73, 68, 91, 47, 91, 59, 44, 81, 65, 61, 52, 51, 59, 66, 66, 50, 55, 71, 67, 61, 60, 45, 72, 34, 87, 46, 72, 55, 62, 47, 95, 56, 71, 62, 65, 58, 55, 54, 76, 56, 56, 43, 83, 58, 69, 78, 79, 84, 93, 54, 73, 50, 81, 76, 85, 56, 94, 71, 68, 54, 73, 124, 54, 86, 92, 58, 121, 60, 74, 69, 71, 84, 53, 52, 44, 62, 67, 46, 81, 57, 59, 93, 61, 55, 107, 59, 56, 84, 57, 75, 77, 69, 85, 110, 38, 52, 57, 76, 73, 61, 62, 59, 58, 51, 60, 78, 48, 59, 65, 55, 59, 62, 59, 70, 74, 59, 53, 71, 73, 76, 41, 65, 60, 51, 103, 71, 64, 72, 57, 73, 63, 48, 78, 85, 71, 58, 65, 82, 58, 60, 65, 48, 71, 59, 63, 82, 105, 61, 70, 63, 56, 65, 77, 56, 101, 62, 54, 65, 62, 73, 71, 58, 71, 52, 48, 47, 60, 76, 75, 86, 56, 129, 60, 63, 71, 58, 44, 89, 81, 62, 59, 58, 65, 54, 62, 48, 67, 75, 45, 64, 67, 91, 67, 56, 68, 36, 83, 52, 51, 77, 83, 76, 43, 62, 82, 64, 68, 80, 30, 57, 63, 69, 92, 70, 51, 80, 107, 77, 63, 52, 67, 65, 48, 56, 68, 52, 53, 65, 60, 68, 98, 55, 54, 91, 80, 87, 56, 57, 68, 56, 71, 60, 94, 53, 50, 69, 64, 49, 83, 56, 60, 67, 76, 68, 142, 77, 63, 71, 54, 59, 47, 117, 64, 66, 53, 32, 43, 65, 66, 53, 62, 86, 72, 57, 56, 45, 67, 40, 50, 68, 65, 68, 59, 86, 87, 53, 58, 67, 58, 70, 47, 65, 57, 66, 69, 73, 59, 67, 52, 51, 47, 56, 42, 70, 64, 63, 83, 55, 42, 52, 79, 75, 54, 52, 52, 83, 67, 64, 63, 72, 58, 65, 74, 56, 78, 58, 50, 46, 58, 61, 60, 58, 84, 58, 58, 59, 76, 40, 55, 61, 71, 56, 86, 43, 55, 116, 49, 58, 75, 75, 53, 54, 74, 63, 60, 65, 60, 60, 55, 61, 55, 64, 52, 63, 69, 60, 68, 53, 64, 63, 73, 86, 65, 52, 63, 64, 58, 91, 63, 65, 79, 75, 60, 52, 33, 74, 64, 62, 74, 75, 51, 84, 70, 63, 55, 79, 83, 89, 78, 53, 73, 57, 69, 53, 62, 63, 76, 50, 69, 68, 70, 51, 60, 72, 49, 95, 72, 72, 64, 62, 53, 57, 85, 61, 49, 57, 58, 62, 77, 50, 62, 127, 68, 85, 75, 77, 57, 107, 86, 41, 58, 92, 55, 66, 59, 61, 67, 77, 57, 63, 56, 60, 46, 63, 55, 55, 79, 67, 70, 122, 99, 68, 74, 71, 57, 81, 62, 67, 51, 64, 71, 39, 68, 61, 62, 72, 48, 58, 49, 62, 84, 59, 70, 74, 68, 72, 57, 62, 57, 72, 78, 48, 71, 112, 69, 75, 120, 62, 69, 56, 55, 62, 76, 44, 63, 61, 63, 78, 60, 59, 47, 67, 52, 64, 68, 58, 79, 83, 97, 69, 61, 65, 59, 66, 63, 54, 55, 64, 95, 58, 60, 62, 74, 85, 61, 70, 54, 52, 109, 66, 57, 61, 60, 67, 82, 78, 79, 99, 58, 81, 50, 63, 83, 53, 58, 57, 48, 77, 49, 98, 70, 71, 63, 57, 76, 76, 64, 74, 87, 69, 59, 80, 64, 73, 58, 55, 61, 86, 77, 68, 87, 61, 54, 84, 57, 50, 57, 69, 56, 52, 54, 90, 69, 84, 75, 83, 59, 55, 109, 53, 74, 70, 43, 51, 71, 83, 84, 57, 89, 61, 74, 61, 63, 54, 68, 70, 66, 55, 53, 58, 57, 89, 101, 58, 98, 96, 50, 84, 65, 69, 45, 67, 52, 49, 49, 87, 54, 90, 74, 74, 66, 65, 55, 50, 78, 67, 66, 64, 55, 71, 88, 69, 71, 52, 66, 67, 87, 58, 51, 51, 51, 72, 52, 56, 72, 111, 84, 43, 61, 55, 71, 57, 62, 65, 65, 63, 109, 78, 84, 65, 66, 60, 57, 60, 91, 50, 74, 61, 72, 49, 66, 61, 79, 43, 61, 53, 51, 57, 56, 68, 58, 55, 67, 50, 54, 64, 60, 67, 68, 65, 66, 67, 51, 50, 54, 55, 67, 65, 78, 55, 63, 66, 97, 57, 62, 83, 79, 71, 54, 60, 59, 71, 64, 69, 101, 54, 66, 72, 69, 73, 91, 62, 68, 82, 81, 88, 62, 75, 52, 61, 56, 78, 64, 58, 52, 46, 59, 71, 71, 66, 61, 60, 62, 46, 46, 75, 69, 62, 56, 49, 61, 92, 65, 76, 73, 60, 61, 50, 70, 66, 62, 58, 40, 55, 64, 72, 43, 102, 56, 90, 77, 73, 50, 90, 67, 50, 57, 75, 70, 55, 42, 65, 70, 72, 56, 83, 71, 35, 67, 75, 78, 71, 66, 83, 57, 56, 66, 73, 89, 55, 62, 68, 55, 84, 61, 66, 62, 38, 76, 48, 67, 65, 46, 46, 64, 46, 61, 59, 51, 79, 54, 62, 80, 53, 63, 74, 58, 46, 75, 73, 100, 75, 72, 70, 50, 66, 82, 74, 46, 67, 82, 42, 70, 50, 110, 89, 61, 72, 64, 72, 59, 54, 72, 62, 57, 44, 71, 60, 57, 55, 55, 60, 73, 57, 36, 56, 58, 63, 64, 58, 120, 74, 66, 51, 57, 52, 64, 62, 70, 69, 73, 65, 81, 89, 153, 70, 58, 63, 51, 68, 74, 60, 122, 49, 67, 64, 61, 70, 74, 70, 78, 126, 50, 67, 73, 68, 45, 70, 82, 64, 53, 66, 51, 68, 82, 86, 81, 66, 52, 84, 62, 76, 51, 69, 94, 76, 45, 66, 49, 56, 101, 68, 73, 73, 59, 76, 104, 77, 63, 52, 52, 69, 70, 70, 56, 71, 72, 47, 86, 114, 59, 60, 60, 61, 54, 61, 83, 91, 80, 62, 48, 137, 65, 60, 54, 50, 96, 64, 61, 49, 82, 70, 60, 90, 49, 59, 78, 71, 57, 71, 57, 52, 48, 42, 73, 67, 65, 150, 70, 85, 78, 66, 52, 72, 49, 86, 85, 76, 64, 136, 72, 59, 83, 67, 55, 64, 54, 62, 74, 65, 62, 74, 62, 43, 85, 86, 60, 80, 64, 42, 82, 77, 61, 78, 60, 55, 59, 60, 51, 92, 52, 89, 79, 94, 62, 76, 55, 48, 56, 84, 44, 76, 64, 68, 85, 67, 56, 75, 71, 67, 62, 46, 87, 72, 75, 90, 54, 65, 56, 78, 72, 70, 65, 97, 56, 51, 71, 82, 71, 68, 65, 49, 81, 70, 61, 36, 60, 74, 50, 61, 57, 63, 61, 126, 75, 64, 58, 60, 68, 66, 58, 51, 60, 50, 54, 67, 103, 69, 79, 75, 79, 80, 79, 75, 44, 80, 73, 69, 61, 54, 52, 64, 55, 56, 75, 54, 50, 73, 78, 84, 75, 80, 62, 97, 54, 57, 96, 64, 58, 62, 39, 82, 76, 66, 53, 88, 74, 53, 66, 67, 84, 82, 48, 80, 51, 78, 81, 73, 80, 53, 64, 41, 61, 57, 54, 56, 41, 60, 53, 48, 77, 67, 59, 58, 60, 83, 115, 38, 53, 71, 113, 49, 75, 73, 56, 62, 45, 67, 53, 60, 50, 43, 64, 96, 51, 58, 49, 63, 80, 53, 55, 125, 72, 85, 40, 78, 80, 84, 56, 58, 82, 38, 57, 70, 75, 64, 77, 58, 114, 83, 66, 66, 72, 60, 60, 73, 50, 59, 68, 53, 75, 50, 71, 71, 81, 70, 70, 78, 58, 58, 57, 94, 37, 82, 82, 52, 79, 65, 58, 56, 78, 66, 65, 76, 66, 62, 80, 64, 50, 62, 55, 53, 55, 121, 100, 74, 71, 77, 59, 65, 75, 44, 70, 53, 69, 79, 65, 62, 54, 60, 94, 67, 64, 99, 59, 56, 96, 81, 74, 64, 60, 65, 63, 59, 66, 53, 75, 55, 86, 100, 59, 56, 58, 74, 78, 57, 53, 70, 88, 70, 78, 43, 79, 64, 64, 64, 30, 60, 77, 60, 71, 45, 79, 62, 67, 59, 61, 54, 77, 59, 69, 44, 32, 69, 50, 66, 68, 73, 61, 68, 84, 68, 43, 46, 58, 74, 69, 63, 57, 76, 80, 85, 63, 78, 42, 55, 63, 55, 71, 67, 64, 73, 59, 74, 71, 69, 84, 74, 64, 86, 79, 75, 83, 62, 55, 82, 65, 73, 48, 73, 56, 53, 55, 49, 72, 56, 62, 59, 77, 61, 73, 58, 47, 57, 53, 68, 46, 83, 80, 51, 60, 51, 70, 43, 55, 50, 75, 63, 101, 71, 63, 75, 62, 54, 73, 54, 65, 60, 104, 46, 49, 60, 101, 49, 74, 52, 43, 106, 66, 69, 62, 76, 73, 65, 74, 59, 61, 55, 89, 53, 71, 60, 69, 86, 57, 44, 64, 67, 49, 98, 54, 48, 73, 53, 76, 55, 68, 79, 57, 51, 46, 51, 56, 74, 61, 79, 38, 52, 57, 81, 58, 57, 52, 65, 77, 196, 54, 58, 89, 60, 71, 82, 70, 63, 71, 48, 69, 58, 56, 45, 44, 75, 59, 60, 61, 82, 47, 85, 72, 64, 59, 59, 65, 75, 64, 52, 49, 50, 82, 65, 66, 61, 125, 62, 85, 72, 63, 77, 51, 81, 68, 72, 55, 78, 80, 61, 71, 63, 60, 55, 76, 84, 85, 64, 79, 82, 56, 69, 66, 62, 77, 70, 70, 39, 42, 54, 73, 50, 58, 71, 66, 66, 67, 59, 68, 57, 82, 61, 56, 70, 70, 67, 75, 62, 72, 57, 52, 69, 65, 63, 76, 60, 87, 60, 112, 63, 57, 63, 59, 92, 70, 48, 74, 75, 88, 99, 84, 66, 49, 67, 54, 105, 54, 68, 48, 43, 61, 45, 58, 40, 71, 62, 61, 112, 67, 81, 59, 48, 71, 53, 67, 80, 63, 67, 86, 52, 62, 64, 76, 59, 43, 47, 85, 69, 82, 65, 59, 63, 56, 80, 71, 49, 60, 69, 87, 58, 70, 66, 61, 47, 71, 44, 63, 48, 78, 84, 91, 70, 81, 55, 60, 68, 59, 67, 45, 61, 72, 51, 73, 59, 78, 48, 51, 95, 77, 88, 73, 43, 66, 32, 55, 54, 47, 81, 42, 80, 72, 62, 53, 141, 114, 58, 85, 40, 77, 77, 45, 52, 65, 59, 60, 88, 52, 46, 64, 70, 105, 81, 70, 56, 65, 70, 50, 55, 73, 61, 68, 94, 73, 59, 56, 72, 77, 76, 63, 58, 71, 79, 90, 65, 54, 66, 57, 58, 48, 57, 65, 88, 84, 75, 64, 59, 68, 51, 51, 65, 54, 65, 70, 65, 87, 93, 80, 52, 48, 50, 89, 78, 76, 73, 65, 51, 55, 80, 88, 79, 47, 31, 67, 52, 64, 66, 74, 61, 58, 82, 69, 64, 75, 76, 52, 75, 71, 50, 63, 62, 93, 57, 63, 56, 45, 75, 63, 68, 73, 73, 64, 57, 128, 68, 74, 54, 60, 83, 67, 70, 79, 69, 74, 45, 104, 55, 64, 73, 55, 49, 56, 98, 52, 72, 91, 56, 57, 52, 80, 49, 57, 66, 72, 54, 59, 63, 62, 51, 74, 53, 58, 54, 72, 100, 73, 81, 82, 90, 57, 67, 75, 75, 61, 55, 72, 55, 63, 66, 60, 82, 76, 68, 64, 64, 82, 78, 63, 58, 57, 62, 51, 59, 56, 53, 80, 80, 65, 58, 65, 94, 66, 59, 121, 87, 88, 56, 50, 55, 74, 64, 68, 82, 55, 64, 74, 88, 53, 49, 53, 92, 70, 67, 80, 68, 51, 64, 62, 65, 108, 72, 53, 61, 66, 74, 48, 57, 56, 55, 57, 57, 57, 73, 68, 56, 89, 72, 81, 57, 89, 60, 57, 54, 73, 70, 61, 72, 52, 35, 84, 50, 54, 72, 64, 58, 69, 70, 71, 69, 45, 59, 58, 81, 97, 77, 72, 52, 58, 49, 49, 90, 73, 64, 59, 73, 67, 73, 54, 44, 64, 69, 70, 67, 54, 66, 48, 76, 62, 57, 87, 74, 78, 48, 57, 65, 68, 45, 92, 51, 72, 75, 66, 56, 55, 75, 54, 71, 57, 72, 69, 55, 51, 88, 70, 68, 62, 59, 60, 83, 75, 47, 53, 62, 55, 52, 68, 74, 53, 64, 72, 72, 61, 82, 57, 77, 66, 55, 63, 69, 67, 54, 85, 68, 86, 60, 78, 69, 59, 57, 65, 69, 60, 74, 63, 65, 62, 55, 66, 75, 55, 67, 56, 59, 58, 68, 161, 64, 43, 61, 77, 65, 69, 57, 58, 51, 81, 62, 72, 75, 69, 57, 50, 63, 55, 70, 56, 68, 52, 68, 53, 62, 65, 93, 52, 67, 90, 68, 75, 84, 66, 60, 89, 48, 59, 57, 66, 63, 63, 65, 73, 69, 39, 49, 75, 59, 63, 52, 69, 62, 68, 77, 55, 56, 76, 65, 69, 71, 70, 78, 61, 59, 54, 65, 75, 66, 70, 60, 58, 77, 51, 100, 57, 64, 62, 63, 61, 64, 53, 57, 76, 76, 87, 57, 76, 60, 76, 62, 84, 63, 51, 56, 58, 52, 68, 64, 83, 58, 56, 52, 67, 55, 68, 71, 64, 58, 77, 69, 70, 77, 56, 67, 62, 69, 62, 60, 65, 98, 71, 65, 59, 69, 71, 86, 64, 60, 82, 69, 69, 64, 67, 61, 82, 69, 64, 66, 69, 64, 69, 64, 69, 55, 75, 62, 65, 56, 47, 65, 72, 72, 78, 64, 60, 64, 58, 60, 56, 61, 58, 74, 76, 58, 75, 79, 52, 83, 82, 72, 57, 66, 69, 77, 69, 80, 54, 63, 58, 61, 61, 61, 62, 68, 59, 59, 79, 56, 68, 66, 69, 72, 50, 63, 109, 68, 70, 57, 66, 73, 56, 69, 63, 55, 67, 73, 50, 65, 64, 78, 57, 57, 59, 56, 68, 63, 69, 55, 65, 55, 67, 134, 68, 56, 91, 59, 56, 63, 65, 48, 49, 97, 76, 68, 85, 67, 80, 53, 35, 60, 82, 63, 74, 64, 71, 67, 54, 70, 66, 64, 65, 82, 63, 69, 53, 59, 68, 57, 69, 71, 76, 64, 71, 65, 52, 67, 63, 60, 75, 55, 60, 61, 81, 63, 69, 66, 129, 67, 73, 57, 95, 76, 55, 100, 66, 68, 66, 64, 61, 73, 74, 76, 57, 61, 74, 79, 58, 74, 63, 52, 70, 72, 77, 66, 60, 71, 75, 71, 62, 77, 68, 79, 69, 59, 54, 67, 54, 77, 61, 52, 66, 72, 69, 58, 69, 55, 56, 57, 72, 60, 67, 46, 68, 60, 65, 68, 60, 59, 70, 78, 97, 57, 66, 61, 53, 69, 67, 68, 73, 58, 61, 59, 44, 77, 65, 65, 59, 93, 64, 55, 56, 59, 56, 59, 63, 77, 65, 63, 63, 80, 70, 62, 62, 63, 65, 73, 67, 58, 88, 65, 84, 89, 72, 69, 65, 66, 76, 60, 83, 63, 71, 63, 50, 70, 72, 59, 80, 74, 61, 59, 36, 63, 67, 106, 63, 58, 60, 54, 69, 60, 61, 79, 52, 57, 64, 66, 101, 64, 79, 62, 59, 71, 46, 68, 66, 94, 58, 66, 89, 70, 58, 60, 113, 72, 59, 61, 62, 50, 69, 68, 62, 66, 72, 79, 58, 60, 62, 58, 56, 69, 55, 59, 44, 77, 46, 63, 43, 78, 109, 69, 61, 60, 58, 55, 51, 55, 90, 82, 57, 73, 64, 53, 101, 60, 49, 99, 68, 64, 65, 68, 75, 59, 65, 55, 56, 72, 56, 63, 69, 63, 68, 55, 69, 51, 64, 84, 55, 85, 63, 73, 85, 63, 62, 73, 63, 54, 82, 64, 65, 66, 52, 58, 53, 74, 73, 54, 48, 68, 71, 56, 63, 68, 59, 55, 89, 71, 61, 59, 62, 55, 64, 68, 27, 69, 78, 58, 62, 67, 70, 58, 60, 65, 56, 72, 61, 92, 53, 53, 58, 46, 46, 80, 63, 53, 46, 55, 56, 63, 68, 57, 88, 74, 56, 66, 57, 62, 62, 86, 61, 85, 54, 47, 48, 65, 63, 63, 66, 53, 53, 54, 58, 65, 64, 61, 88, 66, 65, 69, 84, 81, 58, 60, 77, 55, 87, 81, 74, 68, 63, 51, 66, 65, 66, 56, 55, 70, 58, 56, 51, 96, 77, 50, 79, 100, 99, 59, 90, 73, 73, 54, 63, 63, 67, 67, 34, 66, 60, 64, 71, 59, 75, 57, 63, 70, 59, 62, 64, 57, 63, 66, 76, 65, 57, 59, 53, 82, 66, 69, 58, 52, 58, 64, 72, 56, 108, 77, 70, 67, 66, 52, 51, 65, 84, 61, 98, 68, 66, 49, 90, 57, 60, 69, 61, 63, 57, 66, 67, 61, 62, 60, 77, 56, 56, 81, 67, 76, 71, 55, 62, 42, 61, 67, 156, 60, 55, 83, 92, 74, 57, 76, 52, 52, 68, 50, 63, 88, 57, 75, 69, 83, 60, 55, 67, 62, 63, 69, 56, 54, 63, 59, 59, 80, 76, 54, 71, 54, 69, 59, 63, 77, 80, 64, 64, 66, 61, 53, 64, 72, 77, 70, 57, 69, 59, 59, 56, 59, 50, 84, 77, 64, 79, 53, 60, 65, 71, 58, 67, 71, 65, 46, 67, 59, 60, 59, 60, 68, 61, 75, 69, 57, 73, 62, 70, 58, 53, 59, 64, 70, 61, 45, 72, 76, 55, 53, 44, 62, 61, 102, 55, 84, 63, 68, 50, 72, 59, 62, 76, 54, 60, 59, 82, 123, 72, 59, 63, 60, 50, 55, 61, 50, 54, 65, 64, 54, 69, 53, 65, 53, 61, 54, 65, 72, 52, 60, 59, 59, 65, 84, 70, 61, 59, 58, 64, 64, 57, 81, 63, 68, 49, 65, 64, 68, 57, 63, 61, 60, 48, 76, 72, 62, 48, 72, 69, 56, 72, 66, 67, 64, 64, 51, 68, 60, 69, 71, 64, 60, 56, 52, 57, 61, 66, 60, 51, 47, 78, 63, 58, 55, 60, 63, 43, 70, 72, 55, 107, 58, 72, 67, 88, 104, 59, 57, 74, 59, 64, 65, 55, 71, 54, 69, 48, 61, 71, 95, 68, 76, 63, 88, 62, 69, 68, 76, 52, 52, 57, 66, 60, 62, 53, 65, 59, 63, 59, 70, 75, 62, 59, 60, 62, 67, 49, 63, 82, 74, 72, 68, 51, 72, 63, 63, 63, 64, 65, 56, 61, 59, 59, 65, 74, 55, 58, 72, 59, 61, 60, 56, 55, 81, 62, 80, 57, 58, 62, 69, 81, 97, 60, 62, 46, 65, 60, 77, 58, 69, 56, 58, 65, 97, 51, 60, 73, 61, 65, 67, 74, 61, 59, 54, 86, 70, 69, 82, 72, 48, 76, 45, 63, 58, 73, 71, 54, 60, 62, 54, 44, 71, 56, 65, 60, 78, 60, 51, 61, 60, 52, 69, 95, 82, 63, 67, 56, 42, 79, 61, 57, 77, 58, 58, 64, 64, 60, 67, 50, 60, 77, 53, 84, 72, 78, 71, 73, 96, 61, 58, 70, 77, 63, 76, 66, 60, 58, 57, 68, 79, 45, 77, 51, 76, 72, 82, 78, 67, 79, 78, 72, 71, 49, 64, 61, 67, 60, 73, 59, 72, 90, 61, 56, 90, 81, 67, 57, 60, 81, 70, 74, 62, 65, 67, 61, 82, 77, 37, 45, 56, 70, 72, 70, 61, 71, 69, 78, 61, 83, 64, 47, 59, 74, 63, 80, 53, 57, 58, 62, 73, 70, 67, 84, 48, 58, 48, 62, 59, 57, 77, 69, 69, 54, 62, 53, 68, 57, 107, 69, 68, 59, 59, 59, 45, 68, 58, 62, 53, 68, 68, 69, 46, 114, 60, 44, 71, 63, 64, 75, 76, 69, 68, 56, 79, 74, 75, 52, 62, 87, 63, 58, 73, 54, 59, 60, 65, 68, 57, 65, 63, 66, 63, 49, 65, 62, 65, 74, 78, 52, 70, 85, 88, 77, 82, 81, 58, 74, 62, 74, 66, 67, 58, 75, 70, 68, 57, 52, 74, 62, 57, 55, 56, 73, 82, 75, 67, 65, 65, 68, 58, 49, 67, 57, 90, 62, 70, 76, 58, 75, 57, 70, 68, 62, 74, 54, 65, 56, 69, 58, 60, 58, 75, 56, 53, 79, 52, 59, 81, 59, 45, 73, 79, 89, 87, 54, 74, 80, 51, 78, 65, 54, 76, 81, 64, 65, 48, 59, 60, 64, 68, 84, 65, 69, 80, 55, 46, 76, 74, 61, 88, 72, 107, 58, 67, 64, 92, 48, 82, 61, 67, 77, 74, 69, 77, 57, 48, 65, 60, 75, 55, 84, 51, 87, 64, 48, 55, 70, 57, 52, 61, 62, 52, 78, 55, 57, 60, 62, 51, 51, 44, 61, 90, 62, 71, 65, 61, 68, 58, 62, 82, 88, 65, 78, 53, 67, 64, 80, 60, 44, 64, 101, 55, 67, 53, 68, 49, 84, 54, 72, 95, 51, 88, 68, 81, 45, 68, 43, 73, 62, 65, 45, 67, 106, 46, 63, 53, 69, 71, 60, 75, 68, 48, 55, 52, 54, 64, 55, 63, 65, 60, 66, 51, 66, 52, 52, 54, 63, 70, 78, 58, 82, 53, 60, 55, 65, 62, 66, 70, 81, 36, 65, 61, 72, 50, 59, 67, 90, 55, 66, 66, 60, 64, 77, 46, 62, 57, 72, 76, 54, 62, 61, 64, 74, 68, 81, 79, 66, 65, 97, 58, 73, 59, 73, 53, 72, 101, 64, 77, 67, 72, 65, 72, 72, 60, 77, 52, 68, 54, 93, 64, 81, 66, 50, 60, 63, 55, 49, 80, 64, 91, 67, 73, 66, 64, 63, 70, 55, 84, 63, 55, 88, 56, 65, 63, 62, 65, 70, 72, 57, 55, 69, 65, 61, 70, 50, 50, 67, 63, 76, 54, 59, 65, 34, 54, 73, 56, 81, 76, 75, 78, 62, 63, 60, 61, 65, 53, 106, 57, 71, 71, 59, 66, 70, 78, 68, 59, 71, 71, 61, 63, 57, 69, 49, 54, 73, 57, 64, 75, 66, 63, 58, 55, 67, 54, 69, 71, 70, 65, 51, 66, 73, 74, 64, 35, 97, 64, 80, 77, 85, 128, 57, 54, 71, 60, 62, 49, 61, 66, 49, 74, 67, 52, 64, 67, 65, 74, 75, 87, 59, 69, 58, 65, 51, 67, 64, 80, 63, 71, 48, 52, 64, 82, 80, 59, 52, 70, 50, 65, 63, 64, 60, 62, 68, 53, 68, 53, 65, 66, 55, 44, 87, 58, 54, 61, 51, 78, 67, 85, 65, 53, 60, 106, 58, 58, 66, 82, 71, 62, 59, 77, 61, 68, 53, 48, 68, 67, 62, 81, 62, 96, 70, 48, 71, 70, 67, 74, 62, 65, 66, 58, 76, 51, 63, 55, 78, 56, 117, 68, 66, 58, 60, 53, 76, 66, 68, 56, 60, 60, 55, 45, 55, 60, 78, 64, 61, 65, 83, 58, 67, 82, 84, 62, 53, 55, 80, 50, 63, 78, 43, 65, 67, 62, 69, 60, 64, 49, 67, 65, 66, 76, 55, 48, 70, 60, 60, 70, 51, 83, 48, 84, 83, 64, 72, 79, 58, 78, 43, 66, 59, 84, 75, 50, 73, 64, 83, 70, 70, 59, 60, 82, 67, 60, 67, 98, 65, 54, 71, 68, 79, 54, 62, 103, 66, 70, 57, 72, 49, 65, 69, 64, 64, 43, 34, 69, 63, 83, 48, 73, 75, 46, 68, 69, 77, 85, 77, 63, 65, 73, 54, 59, 63, 58, 50, 49, 49, 58, 46, 59, 64, 55, 81, 36, 67, 67, 44, 65, 57, 40, 47, 51, 57, 84, 57, 63, 56, 69, 50, 106, 72, 89, 71, 54, 60, 105, 80, 112, 46, 50, 53, 69, 46, 54, 49, 62, 81, 51, 98, 63, 66, 86, 52, 77, 74, 54, 88, 55, 75, 62, 50, 89, 61, 67, 74, 60, 118, 70, 70, 54, 66, 61, 50, 72, 62, 67, 80, 56, 83, 60, 67, 59, 53, 65, 56, 55, 66, 70, 64, 77, 101, 58, 61, 61, 59, 65, 52, 54, 79, 60, 66, 79, 57, 60, 77, 91, 92, 71, 74, 49, 60, 67, 61, 76, 64, 59, 57, 61, 57, 92, 54, 60, 53, 58, 76, 83, 74, 62, 73, 66, 53, 80, 84, 67, 57, 78, 91, 113, 68, 70, 65, 89, 46, 69, 69, 67, 49, 82, 78, 50, 71, 84, 88, 48, 62, 81, 50, 71, 77, 68, 91, 69, 152, 52, 60, 101, 50, 73, 46, 63, 59, 53, 64, 77, 59, 50, 50, 66, 85, 55, 63, 55, 67, 56, 66, 55, 67, 70, 62, 70, 91, 49, 65, 48, 67, 65, 59, 65, 56, 67, 62, 55, 63, 75, 66, 64, 64, 100, 82, 71, 44, 62, 73, 59, 59, 79, 65, 49, 93, 60, 58, 71, 65, 42, 89, 62, 56, 86, 53, 60, 61, 105, 76, 73, 55, 35, 60, 58, 82, 59, 52, 60, 40, 59, 66, 49, 56, 53, 65, 57, 86, 92, 83, 76, 74, 58, 94, 64, 85, 64, 100, 80, 50, 38, 71, 82, 55, 50, 85, 76, 68, 62, 71, 43, 71, 61, 40, 37, 70, 56, 74, 96, 78, 76, 52, 90, 97, 56, 70, 59, 55, 62, 92, 62, 63, 79, 39, 50, 51, 59, 71, 50, 47, 45, 56, 75, 49, 66, 62, 74, 51, 53, 65, 73, 56, 73, 54, 82, 83, 71, 112, 69, 75, 90, 79, 61, 64, 63, 45, 78, 51, 75, 46, 72, 53, 54, 57, 64, 61, 52, 75, 45, 99, 66, 90, 56, 62, 95, 86, 54, 75, 91, 50, 45, 54, 80, 69, 53, 89, 52, 47, 48, 74, 46, 70, 58, 73, 68, 60, 61, 70, 53, 65, 57, 72, 96, 62, 68, 57, 53, 57, 61, 58, 62, 39, 57, 79, 65, 70, 65, 66, 75, 81, 45, 70, 58, 94, 73, 64, 63, 45, 90, 57, 71, 68, 75, 65, 71, 49, 94, 104, 48, 58, 62, 68, 64, 79, 53, 68, 79, 53, 60, 49, 57, 76, 77, 66, 59, 67, 65, 56, 92, 73, 72, 54, 47, 71, 116, 56, 61, 53, 62, 73, 49, 72, 46, 68, 54, 72, 54, 64, 62, 67, 65, 74, 81, 51, 72, 62, 77, 55, 75, 57, 54, 85, 52, 60, 78, 46, 68, 46, 58, 61, 51, 51, 57, 64, 60, 43, 23, 95, 77, 95, 76, 75, 77, 61, 64, 57, 59, 85, 46, 87, 78, 87, 67, 111, 52, 70, 78, 71, 63, 66, 88, 76, 66, 60, 62, 73, 63, 64, 52, 53, 101, 51, 91, 72, 55, 71, 50, 43, 100, 56, 91, 70, 68, 45, 71, 57, 66, 61, 82, 84, 65, 64, 90, 46, 52, 69, 42, 81, 40, 71, 61, 53, 34, 96, 89, 44, 56, 67, 85, 47, 90, 51, 112, 60, 77, 65, 62, 48, 94, 60, 88, 67, 75, 61, 60, 61, 52, 61, 76, 69, 71, 74, 56, 46, 61, 70, 46, 76, 53, 57, 59, 63, 80, 75, 48, 66, 73, 76, 85, 41, 62, 51, 83, 108, 79, 80, 47, 130, 56, 99, 42, 79, 61, 102, 65, 109, 43, 81, 65, 87, 76, 51, 55, 76, 81, 66, 69, 68, 75, 73, 59, 114, 74, 64, 61, 91, 97, 54, 55, 63, 78, 54, 79, 95, 58, 55, 57, 73, 62, 76, 81, 57, 77, 59, 66, 50, 65, 66, 51, 66, 61, 67, 61, 60, 44, 71, 43, 57, 66, 49, 51, 76, 59, 73, 43, 47, 61, 70, 40, 87, 64, 54, 49, 60, 65, 63, 72, 68, 74, 59, 77, 91, 47, 62, 57, 70, 71, 57, 98, 69, 44, 42, 66, 45, 51, 56, 77, 78, 74, 68, 48, 52, 85, 64, 69, 70, 51, 57, 80, 58, 36, 79, 100, 92, 56, 53, 52, 45, 57, 96, 48, 63, 69, 61, 83, 48, 56, 49, 48, 56, 68, 41, 58, 52, 108, 58, 64, 67, 56, 57, 39, 66, 68, 59, 66, 137, 66, 102, 58, 63, 54, 46, 70, 78, 67, 58, 58, 56, 69, 80, 71, 58, 88, 53, 36, 62, 78, 66, 64, 63, 85, 83, 63, 65, 80, 39, 71, 61, 45, 70, 50, 50, 58, 62, 79, 66, 67, 53, 64, 83, 55, 66, 54, 84, 93, 87, 81, 65, 51, 86, 57, 67, 59, 54, 51, 73, 66, 47, 57, 85, 93, 68, 102, 44, 62, 65, 63, 56, 73, 61, 53, 76, 55, 56, 75, 81, 50, 99, 70, 37, 46, 75, 66, 47, 64, 87, 36, 58, 73, 102, 49, 55, 74, 54, 75, 47, 45, 59, 76, 86, 60, 53, 80, 56, 88, 49, 54, 56, 61, 72, 79, 68, 58, 57, 46, 100, 68, 38, 67, 68, 48, 62, 69, 54, 63, 78, 71, 63, 82, 75, 61, 122, 48, 52, 79, 48, 64, 56, 58, 63, 60, 49, 64, 71, 52, 69, 84, 65, 69, 82, 105, 49, 54, 103, 54, 81, 79, 45, 60, 96, 70, 74, 68, 50, 97, 58, 84, 65, 74, 57, 56, 87, 63, 89, 33, 92, 64, 57, 76, 58, 51, 72, 48, 125, 45, 60, 77, 71, 92, 65, 70, 73, 72, 74, 73, 63, 53, 59, 50, 54, 90, 77, 57, 59, 57, 75, 64, 79, 69, 66, 56, 47, 66, 89, 91, 63, 111, 54, 62, 95, 60, 62, 85, 33, 71, 77, 57, 55, 52, 69, 73, 129, 75, 57, 79, 66, 56, 72, 72, 56, 75, 180, 66, 84, 100, 44, 63, 65, 71, 61, 42, 62, 82, 56, 69, 70, 62, 40, 86, 43, 48, 64, 129, 65, 41, 50, 116, 54, 45, 65, 65, 66, 75, 45, 62, 62, 77, 53, 37, 39, 65, 88, 76, 53, 62, 53, 57, 59, 56, 53, 82, 68, 52, 64, 85, 29, 44, 55, 90, 70, 50, 79, 71, 90, 72, 55, 77, 64, 58, 67, 83, 53, 66, 77, 62, 80, 75, 80, 53, 43, 47, 66, 57, 59, 71, 60, 71, 43, 44, 48, 57, 57, 59, 92, 88, 70, 46, 61, 96, 50, 74, 44, 68, 61, 87, 73, 65, 73, 41, 58, 68, 83, 73, 77, 75, 116, 71, 26, 74, 58, 59, 78, 56, 54, 79, 100, 52, 78, 66, 59, 62, 59, 97, 58, 61, 51, 68, 76, 57, 66, 82, 85, 81, 86, 97, 75, 64, 130, 94, 106, 60, 61, 57, 74, 76, 81, 56, 123, 59, 85, 75, 53, 99, 68, 75, 59, 47, 51, 54, 54, 57, 63, 63, 76, 94, 64, 77, 56, 57, 79, 70, 52, 51, 68, 74, 76, 70, 70, 55, 69, 41, 69, 46, 73, 47, 46, 44, 51, 78, 87, 45, 47, 73, 61, 82, 49, 65, 79, 79, 65, 49, 63, 34, 81, 53, 66, 56, 66, 62, 37, 59, 72, 74, 63, 74, 109, 102, 87, 79, 71, 49, 54, 59, 74, 56, 40, 74, 84, 53, 65, 69, 63, 58, 93, 55, 97, 53, 54, 51, 77, 72, 58, 90, 49, 66, 84, 52, 77, 76, 62, 55, 99, 74, 95, 64, 52, 69, 66, 86, 59, 90, 57, 63, 82, 62, 93, 79, 58, 97, 65, 68, 75, 58, 62, 89, 67, 86, 43, 54, 55, 64, 43, 88, 117, 61, 73, 71, 63, 52, 48, 67, 53, 38, 80, 98, 63, 46, 77, 53, 53, 57, 76, 61, 51, 84, 61, 61, 54, 60, 65, 74, 75, 75, 59, 86, 65, 61, 62, 70, 69, 67, 70, 59, 64, 68, 58, 145, 61, 67, 72, 47, 59, 76, 56, 62, 58, 63, 71, 54, 62, 76, 49, 53, 57, 68, 65, 61, 49, 51, 56, 50, 68, 54, 102, 93, 90, 68, 75, 97, 92, 56, 56, 71, 53, 61, 53, 66, 73, 45, 60, 95, 66, 81, 68, 66, 57, 83, 96, 51, 111, 72, 50, 70, 43, 86, 87, 80, 53, 68, 83, 53, 64, 71, 65, 58, 55, 65, 59, 57, 103, 42, 37, 67, 72, 92, 72, 60, 63, 83, 60, 73, 59, 53, 66, 57, 63, 49, 61, 50, 80, 67, 56, 68, 69, 84, 89, 59, 66, 79, 62, 50, 80, 79, 80, 72, 77, 59, 52, 77, 63, 71, 57, 65, 58, 82, 65, 62, 51, 80, 77, 59, 78, 79, 53, 47, 79, 100, 111, 65, 74, 58, 63, 59, 92, 81, 73, 45, 58, 77, 57, 86, 62, 59, 67, 43, 68, 76, 72, 54, 54, 59, 51, 64, 51, 48, 68, 61, 65, 69, 59, 57, 87, 75, 59, 62, 78, 59, 59, 80, 59, 84, 78, 63, 60, 66, 109, 44, 62, 71, 55, 68, 66, 73, 61, 84, 57, 79, 70, 49, 66, 62, 83, 53, 75, 52, 77, 55, 51, 58, 100, 70, 45, 46, 130, 72, 64, 89, 75, 52, 70, 100, 63, 72, 65, 86, 62, 91, 71, 51, 63, 68, 55, 55, 45, 50, 39, 65, 64, 64, 65, 86, 63, 93, 32, 72, 56, 96, 65, 61, 70, 54, 82, 84, 70, 71, 74, 91, 58, 66, 58, 73, 61, 52, 63, 57, 71, 85, 74, 71, 62, 60, 59, 86, 59, 67, 63, 55, 46, 70, 83, 52, 70, 47, 53, 64, 53, 61, 93, 67, 66, 66, 50, 62, 74, 52, 122, 76, 70, 61, 72, 69, 59, 41, 72, 57, 43, 58, 52, 80, 72, 82, 61, 63, 52, 69, 55, 104, 48, 66, 84, 65, 54, 65, 73, 55, 72, 81, 58, 77, 84, 84, 64, 63, 58, 76, 62, 119, 53, 64, 50, 62, 85, 42, 68, 73, 69, 58, 51, 60, 58, 73, 54, 55, 73, 83, 65, 55, 39, 71, 67, 93, 74, 63, 52, 61, 47, 66, 91, 60, 79, 49, 50, 73, 63, 68, 83, 66, 57, 78, 75, 68, 59, 90, 61, 66, 51, 58, 63, 50, 70, 71, 48, 73, 63, 56, 60, 76, 52, 72, 57, 62, 68, 100, 57, 54, 53, 109, 55, 86, 69, 57, 74, 68, 75, 55, 66, 116, 47, 70, 79, 50, 44, 60, 56, 59, 50, 58, 50, 67, 77, 62, 61, 53, 104, 69, 72, 89, 48, 63, 41, 53, 61, 67, 47, 69, 85, 59, 74, 120, 43, 74, 79, 60, 43, 56, 69, 76, 93, 67, 58, 64, 53, 50, 57, 57, 70, 43, 61, 80, 58, 62, 43, 53, 83, 56, 89, 51, 60, 65, 78, 103, 56, 84, 53, 69, 51, 79, 89, 60, 99, 55, 58, 56, 73, 87, 75, 37, 87, 52, 46, 74, 52, 51, 71, 54, 65, 53, 55, 70, 52, 71, 43, 53, 60, 57, 71, 45, 97, 62, 55, 55, 84, 56, 60, 80, 45, 51, 85, 91, 43, 74, 71, 114, 82, 67, 79, 42, 55, 64, 51, 47, 79, 78, 69, 71, 73, 61, 79, 60, 57, 55, 78, 67, 60, 69, 49, 61, 50, 59, 84, 67, 45, 75, 65, 84, 117, 55, 63, 58, 75, 82, 121, 54, 108, 71, 35, 99, 51, 77, 62, 92, 85, 56, 72, 60, 52, 68, 61, 84, 110, 63, 64, 63, 56, 72, 47, 66, 53, 78, 71, 67, 51, 91, 65, 54, 62, 50, 66, 55, 76, 71, 93, 59, 67, 54, 75, 68, 54, 53, 53, 84, 92, 86, 70, 74, 69, 57, 61, 62, 71, 76, 80, 69, 49, 56, 93, 61, 52, 70, 74, 68, 63, 55, 55, 42, 87, 64, 72, 71, 71, 75, 53, 61, 61, 55, 59, 59, 81, 49, 74, 68, 77, 62, 79, 90, 57, 51, 69, 74, 71, 47, 64, 69, 51, 64, 61, 88, 58, 50, 67, 59, 79, 87, 86, 68, 53, 70, 42, 62, 53, 55, 78, 71, 60, 67, 73, 37, 63, 68, 83, 72, 67, 84, 50, 39, 60, 60, 51, 73, 80, 70, 52, 59, 54, 69, 44, 70, 69, 63, 61, 63, 95, 72, 65, 45, 55, 75, 63, 61, 66, 60, 60, 68, 54, 70, 59, 70, 60, 79, 43, 101, 55, 55, 53, 69, 76, 72, 69, 73, 72, 62, 76, 62, 76, 74, 72, 63, 97, 61, 57, 67, 73, 58, 57, 59, 65, 71, 70, 93, 80, 79, 64, 45, 58, 65, 43, 55, 63, 50, 75, 59, 66, 73, 70, 62, 63, 57, 70, 86, 74, 76, 68, 56, 45, 71, 51, 67, 72, 48, 51, 55, 62, 67, 57, 98, 63, 57, 70, 54, 73, 62, 55, 49, 60, 70, 79, 62, 78, 53, 62, 73, 46, 96, 100, 43, 53, 51, 122, 65, 64, 87, 64, 79, 94, 66, 49, 79, 62, 85, 75, 56, 56, 50, 74, 71, 59, 53, 116, 78, 55, 69, 61, 66, 66, 70, 42, 106, 93, 63, 57, 60, 52, 67, 61, 76, 77, 84, 53, 50, 73, 45, 81, 41, 90, 53, 58, 84, 53, 65, 40, 59, 64, 68, 61, 49, 53, 76, 60, 48, 66, 69, 57, 102, 60, 59, 60, 61, 59, 46, 65, 52, 69, 78, 69, 63, 86, 63, 46, 67, 98, 75, 44, 73, 50, 66, 68, 57, 64, 62, 78, 79, 73, 77, 58, 63, 54, 56, 51, 67, 76, 57, 59, 54, 68, 103, 88, 54, 65, 73, 58, 67, 64, 63, 60, 45, 65, 75, 74, 55, 77, 62, 71, 65, 61, 71, 78, 69, 78, 45, 56, 37, 49, 92, 85, 63, 88, 72, 108, 70, 95, 64, 71, 62, 84, 46, 66, 82, 82, 74, 79, 65, 50, 75, 81, 88, 64, 59, 141, 68, 77, 92, 65, 92, 63, 68, 65, 59, 65, 72, 46, 93, 45, 91, 69, 62, 48, 61, 61, 60, 64, 47, 61, 49, 78, 59, 74, 66, 87, 94, 57, 77, 70, 66, 53, 69, 58, 61, 52, 69, 62, 108, 62, 48, 67, 84, 70, 63, 64, 73, 63, 60, 78, 56, 61, 75, 54, 49, 51, 51, 60, 52, 76, 72, 70, 50, 55, 54, 54, 39, 55, 83, 62, 59, 64, 77, 79, 62, 69, 65, 84, 63, 93, 78, 76, 102, 44, 84, 63, 69, 74, 64, 74, 68, 42, 48, 83, 81, 68, 42, 63, 48, 73, 71, 69, 68, 70, 75, 49, 60, 52, 83, 43, 56, 88, 68, 59, 75, 65, 88, 52, 66, 67, 64, 50, 54, 64, 51, 68, 66, 63, 57, 56, 57, 66, 95, 73, 63, 61, 54, 68, 87, 49, 59, 53, 58, 80, 72, 78, 67, 65, 56, 67, 71, 60, 68, 40, 55, 45, 94, 63, 66, 69, 62, 84, 61, 46, 66, 76, 69, 73, 59, 80, 62, 62, 79, 60, 46, 85, 71, 74, 54, 40, 88, 50, 111, 71, 89, 66, 67, 77, 58, 63, 72, 69, 70, 85, 62, 61, 70, 68, 68, 69, 59, 53, 72, 53, 48, 49, 65, 111, 58, 73, 63, 69, 62, 71, 43, 55, 91, 56, 81, 40, 61, 72, 56, 55, 70, 61, 61, 84, 64, 53, 79, 65, 53, 74, 53, 75, 51, 53, 67, 69, 68, 77, 63, 78, 56, 96, 53, 66, 69, 69, 59, 67, 54, 49, 57, 61, 38, 52, 70, 64, 60, 61, 57, 63, 75, 60, 59, 73, 88, 83, 62, 57, 66, 61, 67, 65, 50, 74, 76, 40, 73, 46, 61, 47, 44, 72, 54, 62, 121, 60, 51, 67, 74, 69, 47, 47, 85, 90, 52, 69, 65, 70, 52, 59, 50, 72, 77, 66, 47, 66, 40, 93, 61, 88, 69, 55, 64, 58, 69, 68, 67, 74, 72, 57, 78, 61, 56, 57, 115, 49, 59, 97, 71, 60, 79, 53, 53, 85, 60, 73, 58, 76, 74, 80, 81, 85, 66, 69, 72, 54, 67, 61, 73, 77, 70, 44, 67, 67, 69, 67, 71, 85, 26, 100, 59, 67, 86, 59, 49, 47, 62, 92, 147, 76, 52, 83, 51, 64, 71, 59, 64, 75, 64, 54, 69, 59, 54, 60, 49, 71, 46, 75, 78, 109, 47, 78, 68, 64, 86, 71, 73, 69, 78, 48, 84, 73, 49, 63, 81, 64, 49, 73, 54, 64, 79, 68, 73, 65, 60, 73, 96, 56, 68, 57, 76, 57, 65, 72, 86, 54, 62, 57, 63, 92, 67, 78, 79, 65, 58, 59, 62, 71, 67, 98, 47, 82, 53, 73, 55, 72, 69, 77, 57, 53, 71, 38, 89, 69, 67, 77, 79, 67, 65, 67, 56, 75, 53, 74, 65, 64, 69, 73, 79, 56, 54, 84, 74, 83, 79, 61, 109, 79, 88, 53, 64, 64, 61, 61, 40, 66, 86, 66, 47, 67, 57, 86, 63, 62, 76, 55, 69, 61, 73, 56, 63, 52, 69, 46, 59, 55, 56, 74, 68, 59, 82, 60, 65, 90, 53, 80, 73, 63, 58, 68, 72, 49, 83, 68, 50, 57, 58, 64, 55, 58, 63, 26, 67, 79, 52, 81, 68, 61, 56, 66, 54, 88, 99, 78, 62, 47, 72, 58, 56, 125, 94, 82, 64, 81, 62, 76, 79, 64, 71, 47, 74, 65, 51, 84, 70, 79, 70, 66, 48, 53, 80, 76, 47, 73, 59, 54, 62, 46, 91, 68, 77, 67, 73, 55, 54, 64, 66, 42, 66, 50, 68, 65, 48, 89, 73, 57, 72, 50, 58, 53, 65, 74, 80, 64, 67, 70, 61, 53, 63, 61, 73, 75, 59, 94, 71, 99, 100, 65, 58, 76, 84, 60, 69, 57, 51, 61, 55, 63, 57, 60, 63, 72, 61, 52, 68, 52, 89, 85, 52, 58, 64, 82, 74, 53, 60, 71, 69, 69, 88, 64, 88, 59, 53, 77, 63, 70, 51, 60, 52, 49, 66, 73, 58, 56, 46, 70, 81, 72, 68, 79, 55, 64, 76, 60, 69, 71, 42, 74, 81, 56, 74, 49, 57, 67, 72, 54, 50, 67, 80, 65, 70, 92, 62, 49, 59, 52, 78, 74, 84, 95, 75, 77, 79, 42, 52, 38, 55, 66, 56, 59, 65, 75, 41, 59, 70, 49, 59, 61, 62, 70, 81, 56, 54, 81, 57, 78, 75, 58, 44, 41, 51, 84, 73, 57, 72, 60, 74, 73, 78, 74, 53, 61, 75, 46, 80, 65, 68, 64, 68, 90, 44, 73, 66, 76, 68, 61, 64, 87, 70, 57, 50, 67, 87, 94, 73, 66, 80, 74, 91, 67, 73, 57, 63, 51, 52, 66, 76, 40, 56, 54, 50, 57, 60, 67, 53, 85, 55, 84, 48, 47, 78, 80, 30, 82, 51, 62, 69, 76, 79, 52, 69, 46, 82, 64, 57, 75, 66, 57, 70, 70, 71, 51, 69, 88, 79, 46, 54, 73, 59, 68, 72, 47, 43, 48, 46, 56, 60, 66, 71, 64, 61, 62, 59, 51, 47, 63, 63, 85, 84, 54, 45, 56, 48, 46, 70, 79, 63, 74, 81, 77, 61, 110, 49, 74, 58, 79, 83, 56, 70, 50, 79, 62, 66, 81, 60, 70, 85, 71, 85, 65, 93, 74, 67, 58, 59, 84, 63, 78, 70, 73, 77, 71, 74, 55, 64, 66, 70, 95, 68, 87, 61, 64, 48, 88, 87, 40, 49, 55, 52, 56, 55, 77, 64, 62, 46, 65, 83, 83, 48, 68, 79, 54, 57, 72, 82, 64, 45, 57, 79, 70, 55, 55, 51, 65, 42, 79, 52, 78, 81, 51, 70, 70, 59, 85, 56, 52, 69, 60, 76, 61, 60, 53, 56, 80, 59, 67, 78, 65, 70, 29, 52, 119, 63, 84, 63, 63, 69, 71, 58, 80, 43, 56, 48, 79, 53, 56, 63, 130, 57, 73, 61, 79, 71, 61, 71, 50, 53, 76, 72, 77, 60, 55, 58, 80, 97, 47, 85, 60, 73, 48, 89, 90, 68, 67, 67, 78, 61, 54, 60, 64, 36, 84, 80, 59, 69, 74, 65, 54, 80, 86, 51, 66, 71, 54, 71, 67, 74, 79, 101, 54, 73, 55, 64, 60, 48, 59, 73, 129, 67, 71, 69, 49, 99, 69, 58, 63, 73, 70, 81, 58, 41, 62, 63, 72, 74, 63, 95, 74, 74, 70, 71, 64, 82, 75, 43, 64, 51, 64, 98, 48, 55, 71, 51, 79, 42, 64, 52, 69, 70, 69, 54, 61, 59, 74, 63, 68, 59, 54, 56, 63, 74, 57, 68, 71, 70, 80, 61, 89, 63, 81, 55, 62, 64, 90, 53, 90, 93, 40, 56, 68, 102, 57, 113, 51, 77, 46, 76, 57, 63, 77, 71, 54, 100, 50, 56, 68, 81, 57, 109, 94, 77, 51, 73, 71, 85, 61, 70, 67, 48, 63, 61, 124, 47, 48, 65, 62, 106, 64, 58, 66, 42, 65, 82, 65, 80, 66, 63, 46, 52, 63, 75, 67, 52, 49, 93, 62, 62, 61, 62, 74, 67, 78, 76, 52, 82, 67, 53, 50, 54, 60, 79, 52, 74, 54, 62, 65, 53, 73, 85, 43, 72, 69, 77, 74, 103, 67, 64, 73, 88, 66, 79, 64, 71, 57, 61, 74, 47, 54, 59, 57, 83, 67, 87, 72, 66, 43, 77, 59, 56, 77, 37, 97, 59, 72, 75, 69, 58, 58, 39, 68, 67, 57, 79, 75, 78, 42, 67, 158, 59, 76, 62, 48, 61, 74, 78, 53, 83, 57, 66, 55, 60, 66, 61, 74, 73, 100, 66, 66, 84, 53, 58, 86, 44, 53, 59, 56, 73, 84, 72, 78, 57, 69, 58, 83, 53, 127, 57, 51, 77, 53, 55, 47, 57, 63, 72, 57, 61, 71, 64, 68, 49, 63, 63, 58, 59, 56, 67, 66, 79, 57, 65, 58, 68, 55, 85, 64, 57, 55, 52, 58, 50, 68, 84, 71, 64, 64, 49, 68, 142, 57, 63, 60, 67, 72, 74, 55, 67, 59, 70, 47, 69, 67, 81, 47, 56, 54, 75, 73, 67, 90, 63, 87, 72, 99, 59, 64, 52, 49, 56, 64, 67, 67, 51, 81, 49, 57, 72, 51, 91, 61, 65, 70, 75, 72, 58, 53, 69, 59, 68, 94, 58, 48, 62, 64, 46, 56, 74, 41, 72, 65, 46, 71, 62, 64, 65, 59, 54, 48, 58, 45, 55, 84, 41, 86, 84, 82, 45, 71, 65, 53, 55, 77, 69, 69, 77, 60, 49, 64, 68, 66, 77, 55, 42, 57, 58, 54, 72, 74, 61, 66, 25, 77, 66, 77, 77, 90, 69, 66, 101, 108, 48, 54, 77, 65, 82, 56, 52, 101, 85, 59, 63, 55, 45, 81, 66, 62, 50, 70, 52, 43, 77, 43, 72, 91, 55, 65, 53, 57, 89, 63, 67, 57, 70, 71, 65, 58, 44, 75, 77, 64, 54, 74, 70, 100, 47, 56, 68, 102, 64, 64, 106, 64, 69, 52, 66, 42, 62, 99, 87, 66, 68, 73, 66, 38, 63, 67, 53, 78, 74, 64, 62, 47, 78, 70, 52, 79, 78, 79, 53, 67, 95, 69, 74, 81, 59, 60, 60, 69, 61, 48, 79, 59, 63, 73, 58, 64, 67, 66, 54, 53, 64, 45, 91, 52, 63, 72, 67, 52, 52, 81, 64, 72, 47, 69, 67, 52, 56, 63, 64, 61, 93, 72, 61, 58, 67, 61, 68, 66, 65, 60, 68, 55, 75, 78, 71, 44, 76, 71, 68, 85, 48, 77, 54, 63, 83, 51, 83, 56, 67, 80, 71, 69, 55, 80, 89, 81, 62, 60, 51, 78, 75, 77, 57, 56, 106, 131, 116, 56, 51, 73, 44, 58, 64, 67, 62, 76, 57, 67, 73, 72, 57, 68, 54, 54, 68, 95, 55, 54, 76, 66, 64, 84, 60, 68, 74, 74, 51, 53, 74, 68, 56, 64, 79, 65, 55, 70, 63, 83, 59, 74, 57, 58, 41, 52, 50, 61, 61, 86, 49, 66, 65, 62, 60, 84, 74, 51, 59, 49, 56, 56, 50, 45, 60, 58, 67, 45, 48, 77, 57, 64, 94, 89, 45, 61, 57, 65, 93, 50, 81, 63, 38, 82, 121, 65, 61, 53, 49, 96, 58, 58, 64, 69, 69, 66, 62, 60, 67, 93, 52, 66, 64, 52, 69, 59, 54, 70, 62, 49, 90, 75, 79, 75, 72, 71, 49, 80, 77, 56, 48, 80, 55, 58, 69, 67, 73, 86, 87, 66, 67, 66, 68, 57, 65, 75, 67, 59, 81, 60, 94, 78, 49, 60, 52, 53, 64, 106, 67, 65, 104, 65, 62, 52, 71, 61, 57, 50, 66, 88, 70, 73, 71, 68, 81, 80, 54, 65, 57, 64, 61, 88, 64, 59, 72, 47, 62, 51, 92, 67, 52, 62, 59, 63, 42, 90, 81, 60, 74, 109, 78, 75, 78, 50, 43, 35, 46, 63, 62, 84, 55, 81, 81, 72, 62, 57, 79, 48, 69, 48, 59, 59, 50, 63, 61, 95, 58, 72, 54, 60, 60, 67, 53, 58, 65, 65, 64, 70, 34, 71, 90, 39, 91, 94, 63, 85, 58, 87, 66, 63, 51, 60, 67, 56, 71, 81, 62, 63, 87, 61, 68, 58, 76, 63, 68, 67, 58, 85, 64, 60, 67, 75, 54, 55, 61, 68, 80, 58, 40, 65, 55, 61, 55, 61, 57, 65, 64, 60, 69, 47, 58, 63, 61, 50, 66, 54, 53, 102, 62, 79, 50, 86, 49, 70, 64, 55, 74, 60, 47, 62, 86, 63, 90, 55, 89, 42, 56, 60, 54, 47, 59, 118, 47, 52, 57, 52, 52, 51, 86, 60, 49, 64, 71, 58, 67, 62, 61, 71, 66, 64, 93, 63, 99, 73, 82, 78, 95, 55, 46, 63, 67, 52, 56, 77, 63, 59, 55, 78, 66, 46, 62, 49, 72, 51, 64, 84, 60, 74, 60, 39, 65, 54, 57, 70, 92, 58, 64, 87, 68, 67, 70, 49, 55, 56, 97, 63, 58, 72, 67, 57, 58, 52, 90, 73, 67, 44, 70, 124, 57, 88, 49, 89, 53, 57, 66, 77, 102, 57, 76, 57, 43, 51, 60, 99, 74, 80, 62, 83, 31, 78, 63, 57, 56, 56, 61, 55, 63, 54, 44, 48, 50, 58, 53, 68, 52, 99, 76, 71, 51, 86, 74, 61, 81, 66, 70, 113, 42, 55, 64, 69, 91, 47, 94, 66, 61, 67, 56, 61, 54, 52, 75, 57, 68, 35, 78, 92, 62, 58, 57, 69, 69, 80, 57, 77, 62, 61, 52, 61, 69, 54, 82, 72, 64, 42, 62, 81, 66, 67, 51, 57, 57, 70, 53, 106, 70, 77, 80, 69, 91, 44, 59, 44, 62, 53, 55, 58, 84, 57, 65, 78, 90, 146, 69, 61, 78, 60, 65, 52, 66, 55, 62, 71, 78, 69, 40, 49, 77, 68, 76, 72, 85, 62, 48, 47, 79, 62, 66, 86, 72, 72, 68, 51, 64, 65, 64, 82, 54, 84, 68, 71, 106, 93, 81, 56, 76, 76, 52, 63, 78, 63, 58, 58, 51, 70, 50, 97, 83, 63, 49, 63, 58, 57, 38, 57, 66, 86, 45, 46, 69, 32, 37, 46, 47, 59, 52, 72, 48, 71, 43, 64, 68, 66, 73, 66, 61, 54, 60, 87, 63, 72, 75, 75, 68, 53, 60, 59, 54, 53, 62, 76, 48, 73, 59, 69, 54, 60, 74, 62, 89, 120, 51, 79, 54, 53, 73, 52, 75, 52, 51, 73, 75, 38, 96, 49, 59, 46, 61, 142, 64, 58, 84, 61, 70, 51, 74, 66, 70, 56, 90, 92, 57, 68, 64, 57, 50, 50, 70, 56, 69, 68, 57, 65, 95, 60, 99, 60, 50, 58, 57, 56, 59, 49, 60, 76, 57, 72, 59, 59, 57, 56, 64, 62, 80, 54, 72, 84, 49, 72, 58, 56, 45, 61, 48, 74, 48, 95, 56, 50, 59, 52, 69, 55, 58, 51, 72, 70, 63, 79, 68, 68, 55, 51, 65, 56, 58, 73, 85, 74, 64, 62, 49, 65, 55, 76, 83, 50, 97, 82, 84, 62, 49, 20, 56, 38, 57, 74, 61, 44, 81, 68, 88, 62, 66, 47, 49, 65, 68, 52, 60, 91, 53, 92, 46, 68, 60, 38, 59, 51, 57, 76, 61, 68, 83, 71, 39, 76, 52, 74, 71, 63, 61, 69, 62, 45, 68, 87, 54, 85, 63, 59, 90, 42, 68, 60, 79, 62, 63, 54, 61, 98, 97, 55, 60, 63, 73, 69, 57, 53, 46, 53, 51, 102, 79, 70, 62, 80, 47, 51, 57, 56, 71, 48, 62, 82, 65, 57, 44, 62, 64, 62, 68, 60, 79, 75, 73, 97, 61, 94, 76, 71, 57, 60, 76, 58, 44, 52, 64, 77, 57, 53, 109, 66, 52, 46, 66, 73, 51, 67, 63, 60, 53, 57, 73, 63, 79, 85, 65, 58, 127, 67, 38, 51, 79, 51, 87, 70, 65, 50, 75, 60, 72, 82, 50, 59, 65, 64, 65, 47, 82, 72, 72, 54, 107, 59, 71, 93, 36, 52, 37, 55, 44, 83, 57, 74, 53, 52, 78, 86, 71, 49, 56, 60, 49, 79, 51, 64, 67, 56, 52, 35, 60, 43, 92, 65, 53, 63, 64, 75, 58, 77, 54, 68, 63, 77, 58, 74, 78, 55, 41, 61, 67, 50, 66, 55, 74, 45, 60, 79, 68, 60, 67, 58, 52, 65, 68, 73, 50, 83, 72, 70, 67, 84, 57, 57, 148, 91, 63, 43, 58, 76, 101, 105, 52, 58, 60, 57, 58, 71, 68, 65, 58, 101, 74, 83, 53, 77, 50, 83, 53, 79, 51, 59, 69, 37, 36, 45, 67, 77, 61, 85, 68, 52, 52, 57, 50, 95, 68, 57, 49, 64, 71, 43, 44, 63, 78, 65, 61, 61, 53, 86, 103, 74, 69, 85, 68, 63, 62, 69, 70, 81, 85, 69, 92, 61, 76, 48, 112, 66, 49, 114, 55, 119, 53, 59, 61, 52, 79, 61, 42, 55, 54, 61, 70, 65, 79, 50, 50, 80, 67, 48, 55, 69, 80, 54, 45, 79, 86, 92, 57, 36, 68, 42, 54, 74, 59, 58, 49, 75, 48, 45, 80, 57, 44, 48, 73, 55, 66, 72, 68, 89, 70, 78, 83, 68, 52, 69, 58, 86, 76, 55, 52, 54, 75, 70, 51, 50, 79, 55, 68, 82, 90, 66, 57, 88, 45, 49, 71, 51, 64, 50, 71, 64, 53, 69, 81, 70, 64, 61, 36, 66, 73, 64, 75, 70, 54, 59, 74, 71, 71, 81, 66, 64, 73, 71, 56, 54, 71, 62, 57, 77, 54, 54, 73, 72, 65, 82, 60, 96, 61, 61, 61, 110, 124, 54, 69, 60, 67, 56, 55, 40, 56, 63, 87, 61, 90, 49, 40, 61, 65, 93, 88, 66, 62, 79, 55, 74, 76, 43, 72, 76, 60, 61, 46, 66, 59, 92, 45, 87, 76, 77, 71, 54, 52, 64, 69, 75, 100, 49, 67, 63, 77, 84, 62, 70, 80, 46, 62, 77, 78, 78, 50, 78, 64, 49, 46, 40, 63, 97, 69, 47, 53, 59, 63, 74, 70, 56, 71, 67, 59, 80, 84, 42, 73, 67, 115, 65, 93, 60, 51, 46, 70, 64, 54, 69, 126, 90, 53, 70, 55, 92, 66, 62, 70, 62, 59, 58, 62, 49, 47, 64, 72, 59, 75, 114, 84, 52, 59, 106, 62, 67, 42, 84, 69, 51, 55, 58, 69, 55, 83, 62, 68, 64, 64, 55, 74, 44, 44, 75, 56, 59, 64, 85, 54, 54, 59, 46, 94, 64, 95, 78, 67, 64, 96, 83, 78, 73, 77, 61, 73, 94, 64, 63, 65, 65, 145, 64, 81, 91, 63, 53, 49, 51, 64, 63, 68, 56, 67, 61, 63, 41, 66, 74, 37, 123, 82, 71, 53, 79, 71, 52, 51, 66, 71, 49, 74, 67, 54, 56, 71, 82, 86, 86, 93, 66, 40, 59, 77, 65, 71, 55, 72, 75, 91, 53, 67, 71, 69, 63, 76, 61, 67, 74, 55, 59, 46, 74, 54, 58, 65, 68, 73, 71, 65, 76, 55, 77, 88, 76, 68, 62, 44, 63, 45, 89, 53, 50, 53, 48, 50, 70, 46, 60, 74, 59, 78, 57, 82, 106, 54, 76, 78, 54, 63, 115, 55, 72, 56, 68, 84, 60, 56, 62, 63, 49, 54, 61, 78, 72, 68, 65, 60, 62, 69, 62, 106, 53, 62, 67, 66, 72, 79, 79, 81, 66, 100, 43, 54, 77, 72, 76, 57, 85, 52, 62, 42, 68, 101, 106, 47, 54, 75, 72, 69, 58, 70, 66, 56, 70, 90, 51, 67, 34, 47, 55, 93, 79, 93, 40, 67, 71, 63, 62, 73, 69, 86, 98, 75, 55, 49, 47, 60, 74, 102, 56, 57, 78, 86, 64, 98, 50, 62, 69, 60, 61, 87, 65, 64, 74, 51, 60, 63, 65, 85, 75, 62, 107, 32, 83, 67, 49, 55, 66, 53, 74, 50, 75, 53, 61, 45, 40, 107, 89, 60, 41, 50, 60, 105, 62, 64, 73, 75, 74, 53, 61, 55, 68, 65, 72, 58, 57, 77, 65, 57, 54, 66, 72, 99, 64, 57, 67, 51, 44, 97, 99, 48, 56, 63, 91, 45, 76, 63, 110, 61, 73, 93, 62, 68, 85, 97, 47, 65, 46, 62, 71, 73, 70, 57, 65, 104, 61, 96, 78, 65, 54, 56, 92, 77, 72, 46, 113, 60, 46, 84, 75, 67, 60, 44, 98, 51, 41, 86, 57, 117, 60, 70, 59, 63, 79, 62, 53, 68, 97, 50, 53, 77, 78, 63, 62, 61, 75, 87, 68, 51, 74, 58, 54, 77, 58, 56, 70, 52, 84, 60, 76, 55, 69, 81, 54, 84, 58, 62, 60, 72, 71, 54, 62, 107, 64, 59, 59, 71, 72, 62, 86, 47, 88, 58, 110, 79, 54, 57, 77, 64, 55, 59, 53, 62, 63, 69, 64, 76, 50, 45, 79, 65, 51, 58, 74, 54, 73, 68, 69, 59, 97, 59, 56, 55, 95, 62, 40, 49, 59, 56, 62, 62, 77, 75, 48, 61, 63, 58, 73, 49, 59, 114, 75, 49, 52, 56, 53, 71, 43, 49, 51, 63, 47, 51, 85, 75, 70, 64, 94, 94, 52, 70, 61, 66, 61, 75, 87, 83, 63, 76, 72, 73, 52, 66, 90, 84, 69, 85, 67, 55, 55, 53, 63, 72, 45, 59, 64, 57, 72, 62, 54, 69, 46, 67, 59, 78, 81, 74, 75, 55, 62, 73, 53, 61, 47, 69, 60, 72, 51, 122, 95, 55, 69, 55, 56, 54, 81, 67, 79, 45, 67, 53, 61, 61, 64, 48, 142, 61, 46, 66, 75, 49, 66, 62, 86, 64, 70, 58, 52, 92, 99, 46, 68, 75, 87, 88, 39, 56, 60, 43, 47, 63, 44, 86, 70, 45, 61, 82, 101, 78, 43, 69, 109, 54, 65, 73, 64, 64, 52, 67, 51, 59, 81, 47, 58, 62, 100, 45, 60, 91, 57, 70, 67, 86, 61, 72, 79, 53, 46, 68, 82, 73, 71, 61, 60, 47, 57, 52, 64, 60, 87, 115, 51, 63, 46, 68, 59, 64, 72, 56, 60, 92, 62, 60, 89, 56, 81, 60, 76, 94, 76, 90, 84, 63, 83, 46, 53, 63, 54, 70, 59, 66, 39, 87, 97, 68, 57, 47, 54, 43, 71, 102, 60, 60, 56, 65, 48, 42, 70, 77, 72, 52, 57, 64, 86, 78, 47, 68, 58, 55, 50, 56, 67, 72, 73, 51, 79, 53, 81, 62, 51, 51, 77, 47, 88, 67, 46, 51, 58, 63, 51, 54, 64, 69, 110, 33, 51, 69, 57, 54, 64, 57, 42, 78, 69, 64, 64, 53, 58, 52, 97, 31, 53, 59, 67, 49, 97, 77, 97, 54, 66, 55, 87, 58, 54, 61, 59, 57, 67, 67, 81, 58, 52, 72, 66, 50, 65, 97, 72, 97, 53, 65, 55, 76, 38, 44, 44, 89, 62, 48, 60, 95, 69, 68, 51, 77, 63, 58, 69, 99, 63, 60, 77, 59, 80, 71, 59, 78, 61, 77, 72, 55, 57, 53, 53, 64, 106, 63, 51, 92, 63, 89, 84, 57, 60, 44, 60, 58, 63, 52, 69, 66, 58, 58, 77, 54, 68, 38, 63, 95, 59, 60, 83, 102, 88, 47, 66, 87, 51, 105, 86, 67, 94, 64, 104, 61, 61, 71, 66, 72, 63, 61, 71, 63, 64, 52, 62, 40, 58, 77, 40, 55, 54, 110, 72, 52, 76, 82, 75, 68, 61, 51, 86, 63, 74, 68, 62, 63, 72, 68, 62, 60, 49, 55, 61, 75, 85, 72, 67, 128, 54, 61, 75, 67, 45, 51, 45, 62, 75, 57, 58, 69, 41, 72, 74, 76, 67, 84, 87, 81, 50, 62, 82, 65, 103, 52, 51, 79, 62, 54, 64, 61, 73, 75, 45, 75, 63, 51, 92, 98, 79, 34, 57, 81, 140, 53, 50, 52, 85, 91, 48, 71, 49, 80, 66, 54, 73, 68, 63, 112, 51, 116, 67, 70, 50, 72, 76, 70, 86, 99, 71, 79, 74, 72, 91, 71, 62, 69, 34, 72, 67, 64, 64, 34, 77, 76, 69, 84, 60, 49, 51, 76, 78, 55, 79, 63, 69, 67, 63, 56, 72, 63, 58, 57, 94, 76, 66, 61, 64, 47, 72, 65, 50, 46, 52, 51, 67, 84, 96, 56, 85, 59, 52, 82, 74, 70, 49, 78, 70, 64, 38, 86, 59, 68, 55, 60, 53, 63, 54, 82, 42, 85, 52, 52, 52, 98, 64, 62, 99, 69, 59, 62, 56, 65, 56, 61, 68, 80, 81, 68, 59, 58, 60, 63, 56, 69, 76, 69, 57, 49, 40, 68, 53, 71, 38, 61, 48, 46, 66, 70, 70, 80, 78, 61, 96, 50, 48, 65, 57, 75, 53, 57, 80, 68, 63, 59, 54, 67, 57, 57, 60, 67, 65, 67, 60, 54, 57, 45, 49, 68, 65, 107, 81, 51, 71, 40, 73, 67, 61, 88, 75, 66, 74, 66, 60, 79, 60, 86, 47, 61, 64, 79, 68, 62, 69, 80, 91, 64, 59, 66, 100, 56, 65, 58, 62, 63, 64, 69, 57, 62, 76, 48, 66, 61, 60, 42, 82, 59, 55, 55, 71, 74, 62, 118, 97, 67, 69, 68, 52, 62, 59, 49, 50, 57, 84, 61, 34, 77, 87, 62, 51, 62, 65, 54, 36, 77, 46, 59, 85, 59, 77, 86, 81, 66, 54, 58, 56, 64, 53, 32, 68, 87, 52, 59, 46, 51, 88, 46, 62, 43, 52, 61, 96, 66, 51, 85, 55, 66, 58, 55, 56, 82, 60, 59, 87, 85, 61, 77, 74, 73, 63, 67, 82, 90, 87, 90, 75, 75, 79, 62, 45, 76, 70, 33, 63, 54, 56, 65, 76, 73, 48, 70, 48, 50, 43, 74, 67, 63, 60, 55, 83, 46, 57, 64, 65, 64, 60, 73, 59, 56, 93, 52, 44, 85, 76, 69, 46, 108, 54, 65, 59, 97, 74, 49, 61, 64, 54, 71, 71, 68, 68, 81, 60, 55, 81, 75, 51, 71, 66, 50, 45, 81, 58, 112, 67, 52, 79, 67, 76, 63, 59, 69, 66, 52, 61, 53, 83, 53, 65, 59, 89, 57, 85, 53, 66, 82, 68, 60, 70, 68, 79, 80, 77, 56, 59, 76, 48, 67, 65, 73, 77, 79, 54, 59, 59, 60, 71, 83, 64, 56, 53, 69, 63, 63, 73, 54, 62, 53, 63, 47, 70, 64, 79, 66, 93, 52, 78, 53, 84, 74, 91, 71, 191, 51, 73, 77, 68, 85, 49, 105, 60, 65, 66, 68, 75, 59, 61, 77, 64, 60, 64, 59, 58, 64, 58, 57, 62, 60, 66, 61, 67, 55, 63, 69, 62, 54, 82, 48, 54, 81, 63, 55, 78, 78, 57, 51, 52, 57, 57, 63, 62, 61, 81, 90, 67, 71, 69, 64, 64, 113, 51, 87, 63, 70, 65, 67, 63, 48, 53, 76, 85, 75, 56, 47, 62, 89, 56, 67, 40, 65, 71, 58, 74, 52, 62, 76, 60, 50, 60, 74, 49, 77, 52, 72, 54, 59, 78, 70, 78, 69, 61, 56, 69, 65, 49, 70, 58, 39, 93, 91, 58, 88, 42, 38, 45, 41, 60, 79, 72, 57, 58, 64, 59, 75, 84, 59, 41, 48, 66, 77, 64, 55, 58, 47, 73, 60, 72, 98, 69, 65, 48, 59, 70, 49, 55, 74, 63, 66, 58, 83, 39, 54, 60, 72, 71, 61, 67, 73, 88, 74, 50, 65, 65, 73, 58, 65, 74, 83, 62, 66, 76, 48, 60, 107, 68, 80, 66, 65, 62, 80, 69, 63, 68, 62, 42, 58, 75, 60, 58, 70, 51, 65, 54, 52, 51, 67, 64, 70, 65, 80, 68, 65, 43, 73, 64, 74, 77, 77, 67, 51, 66, 63, 64, 56, 69, 48, 71, 67, 70, 71, 78, 66, 57, 51, 63, 55, 69, 90, 51, 56, 50, 80, 68, 49, 51, 54, 81, 52, 70, 71, 65, 68, 75, 48, 58, 62, 81, 65, 79, 55, 64, 77, 75, 113, 77, 70, 60, 64, 52, 48, 54, 75, 66, 60, 59, 73, 68, 76, 70, 73, 65, 66, 72, 64, 51, 61, 73, 70, 54, 75, 68, 66, 65, 78, 73, 64, 61, 78, 81, 58, 64, 81, 51, 88, 78, 82, 72, 60, 65, 77, 59, 59, 75, 58, 46, 51, 63, 55, 62, 67, 49, 60, 58, 56, 60, 54, 70, 108, 65, 62, 55, 79, 94, 86, 112, 57, 50, 54, 75, 64, 64, 86, 63, 71, 74, 81, 58, 61, 62, 63, 78, 67, 82, 62, 68, 58, 55, 66, 67, 67, 47, 59, 61, 71, 57, 69, 70, 58, 94, 68, 86, 84, 59, 131, 67, 61, 57, 77, 74, 59, 46, 86, 63, 68, 91, 63, 77, 95, 51, 79, 78, 60, 68, 67, 69, 52, 60, 57, 65, 56, 67, 79, 58, 44, 86, 64, 56, 53, 58, 49, 77, 72, 62, 60, 57, 51, 56, 66, 47, 68, 71, 54, 57, 69, 88, 63, 49, 69, 104, 50, 81, 60, 65, 59, 76, 79, 86, 72, 61, 68, 118, 62, 61, 66, 87, 57, 71, 68, 79, 67, 76, 73, 107, 54, 64, 71, 60, 55, 66, 69, 66, 43, 87, 65, 80, 67, 43, 69, 76, 83, 48, 55, 49, 78, 59, 58, 63, 61, 51, 59, 69, 72, 56, 53, 39, 80, 57, 50, 70, 85, 55, 60, 80, 68, 62, 80, 85, 79, 56, 62, 60, 48, 63, 102, 64, 59, 68, 58, 69, 56, 49, 47, 67, 63, 74, 83, 70, 67, 55, 83, 54, 65, 53, 60, 52, 86, 61, 58, 57, 74, 57, 67, 65, 65, 53, 81, 62, 62, 68, 73, 57, 50, 60, 55, 56, 65, 75, 63, 55, 77, 56, 81, 53, 74, 73, 74, 65, 71, 61, 50, 64, 54, 81, 78, 62, 60, 78, 74, 69, 59, 65, 73, 59, 76, 66, 54, 62, 45, 74, 67, 68, 53, 73, 84, 61, 68, 74, 65, 63, 78, 60, 72, 49, 68, 46, 72, 61, 62, 58, 86, 87, 73, 44, 76, 68, 64, 63, 81, 82, 59, 71, 71, 76, 73, 58, 69, 126, 45, 47, 47, 57, 59, 57, 88, 53, 75, 82, 42, 68, 90, 65, 56, 94, 79, 59, 73, 54, 70, 66, 44, 68, 84, 55, 49, 73, 73, 66, 53, 62, 64, 57, 83, 65, 69, 62, 68, 80, 62, 59, 99, 72, 47, 81, 61, 62, 63, 71, 48, 60, 72, 61, 50, 70, 48, 74, 73, 59, 59, 59, 58, 59, 84, 75, 59, 42, 70, 61, 79, 71, 65, 54, 56, 84, 74, 95, 66, 40, 70, 49, 49, 53, 85, 55, 49, 74, 59, 75, 58, 52, 87, 66, 112, 66, 93, 68, 61, 76, 63, 57, 61, 86, 58, 67, 82, 69, 68, 72, 53, 76, 89, 65, 68, 67, 76, 62, 60, 75, 61, 59, 67, 66, 65, 138, 67, 77, 74, 54, 62, 60, 82, 81, 67, 44, 75, 45, 81, 77, 58, 61, 73, 71, 56, 52, 76, 56, 54, 80, 48, 51, 57, 35, 48, 55, 53, 53, 96, 60, 58, 90, 54, 96, 70, 65, 67, 90, 59, 74, 63, 58, 74, 63, 79, 55, 75, 69, 70, 62, 56, 63, 73, 89, 58, 66, 73, 59, 69, 80, 64, 63, 56, 71, 59, 76, 68, 56, 68, 70, 78, 53, 63, 71, 53, 47, 84, 55, 70, 71, 70, 54, 77, 59, 92, 53, 62, 77, 94, 65, 72, 91, 74, 60, 78, 63, 60, 63, 72, 82, 55, 57, 69, 71, 65, 82, 62, 67, 59, 40, 52, 104, 65, 66, 66, 69, 84, 85, 70, 62, 65, 64, 65, 78, 81, 61, 63, 88, 58, 71, 58, 62, 71, 64, 65, 90, 60, 53, 77, 74, 77, 89, 73, 67, 43, 49, 56, 71, 65, 51, 61, 65, 58, 74, 71, 50, 62, 66, 70, 95, 83, 65, 86, 75, 54, 50, 74, 89, 71, 76, 67, 61, 54, 66, 61, 55, 55, 66, 83, 69, 61, 72, 72, 106, 109, 64, 76, 132, 70, 71, 83, 74, 91, 57, 71, 64, 57, 60, 65, 69, 96, 107, 52, 58, 104, 56, 81, 49, 57, 66, 50, 57, 79, 76, 76, 70, 59, 83, 43, 81, 60, 46, 51, 58, 56, 39, 73, 62, 56, 69, 80, 55, 55, 56, 88, 65, 71, 67, 70, 92, 72, 72, 65, 62, 57, 100, 53, 71, 63, 101, 73, 48, 71, 53, 73, 58, 95, 66, 78, 64, 73, 67, 67, 51, 97, 91, 68, 67, 68, 63, 67, 69, 60, 75, 57, 49, 54, 74, 66, 87, 72, 74, 61, 55, 49, 72, 59, 62, 56, 70, 68, 49, 76, 65, 74, 68, 53, 63, 61, 140, 79, 56, 67, 48, 90, 61, 49, 66, 61, 60, 50, 47, 80, 68, 48, 75, 47, 63, 56, 72, 77, 65, 70, 82, 57, 68, 51, 61, 63, 58, 88, 59, 76, 44, 56, 71, 61, 43, 39, 77, 66, 86, 72, 90, 65, 79, 72, 45, 66, 59, 51, 68, 57, 75, 79, 64, 64, 68, 50, 66, 75, 54, 91, 53, 70, 51, 63, 53, 46, 88, 56, 63, 75, 92, 53, 63, 84, 74, 73, 63, 78, 45, 64, 51, 65, 74, 48, 49, 57, 56, 53, 71, 75, 66, 52, 67, 82, 52, 67, 67, 61, 92, 63, 42, 69, 58, 79, 74, 83, 69, 53, 81, 73, 52, 81, 53, 51, 69, 41, 51, 51, 57, 53, 58, 67, 49, 72, 68, 96, 76, 53, 55, 66, 72, 63, 71, 65, 49, 52, 57, 49, 70, 59, 63, 79, 63, 69, 83, 66, 90, 71, 46, 84, 48, 71, 82, 40, 77, 69, 60, 53, 72, 59, 64, 59, 60, 60, 70, 76, 55, 63, 52, 73, 105, 76, 80, 75, 52, 52, 54, 66, 62, 57, 79, 65, 54, 55, 72, 62, 63, 58, 70, 75, 76, 69, 88, 80, 66, 57, 55, 65, 65, 66, 56, 58, 53, 53, 60, 60, 58, 86, 62, 72, 70, 78, 87, 112, 66, 55, 63, 63, 50, 59, 41, 68, 57, 87, 84, 58, 64, 50, 54, 53, 75, 72, 77, 66, 61, 55, 84, 53, 66, 88, 66, 71, 78, 40, 62, 44, 72, 65, 56, 81, 54, 67, 52, 75, 50, 61, 70, 61, 60, 67, 56, 69, 149, 76, 62, 49, 87, 66, 72, 59, 70, 71, 69, 57, 59, 76, 63, 55, 63, 71, 81, 55, 58, 56, 62, 73, 51, 83, 53, 80, 68, 69, 55, 71, 48, 68, 53, 71, 69, 75, 70, 63, 69, 51, 53, 57, 59, 58, 67, 48, 88, 68, 64, 63, 62, 65, 59, 45, 66, 55, 84, 73, 68, 59, 67, 57, 70, 81, 63, 70, 52, 45, 78, 44, 90, 58, 67, 89, 62, 72, 72, 59, 70, 65, 59, 75, 55, 81, 74, 65, 115, 67, 61, 89, 57, 57, 61, 67, 68, 144, 65, 64, 48, 45, 71, 66, 36, 71, 68, 38, 75, 73, 59, 83, 61, 66, 64, 74, 52, 60, 55, 86, 84, 64, 80, 78, 60, 63, 71, 67, 95, 48, 58, 64, 66, 44, 47, 40, 50, 59, 62, 44, 69, 66, 47, 71, 82, 63, 61, 53, 72, 58, 42, 62, 78, 74, 71, 60, 70, 55, 55, 73, 74, 75, 76, 73, 54, 49, 68, 62, 61, 60, 68, 51, 62, 69, 78, 44, 57, 53, 51, 86, 37, 67, 49, 71, 72, 74, 48, 64, 59, 57, 109, 53, 76, 60, 67, 64, 45, 72, 62, 52, 69, 97, 62, 76, 51, 63, 76, 52, 74, 55, 58, 84, 76, 67, 64, 84, 55, 62, 76, 73, 64, 56, 58, 75, 95, 73, 65, 58, 66, 66, 78, 75, 55, 98, 51, 56, 77, 70, 75, 98, 45, 53, 58, 64, 75, 72, 85, 72, 61, 51, 56, 74, 51, 64, 56, 90, 41, 68, 69, 58, 69, 71, 77, 49, 86, 60, 71, 50, 51, 68, 47, 76, 71, 71, 62, 72, 46, 47, 54, 65, 59, 51, 62, 71, 55, 78, 69, 72, 60, 68, 56, 59, 71, 50, 58, 61, 66, 58, 68, 49, 53, 55, 59, 64, 62, 60, 106, 47, 56, 80, 58, 80, 89, 76, 65, 56, 55, 67, 60, 62, 53, 58, 70, 71, 69, 71, 82, 44, 79, 73, 60, 57, 50, 71, 54, 80, 48, 79, 88, 63, 67, 70, 63, 53, 51, 50, 74, 111, 66, 63, 63, 55, 49, 66, 42, 50, 71, 53, 72, 99, 63, 74, 66, 48, 53, 52, 93, 52, 81, 53, 77, 53, 64, 81, 44, 65, 63, 85, 71, 61, 71, 51, 57, 58, 68, 109, 71, 63, 27, 60, 76, 78, 78, 93, 68, 71, 54, 67, 82, 55, 69, 93, 79, 71, 70, 88, 80, 54, 56, 79, 66, 78, 52, 56, 66, 80, 78, 67, 59, 47, 76, 74, 55, 39, 61, 48, 49, 57, 54, 58, 91, 88, 53, 60, 62, 48, 63, 58, 84, 62, 77, 57, 60, 89, 68, 53, 98, 75, 64, 80, 62, 80, 71, 71, 67, 45, 55, 54, 70, 70, 57, 69, 61, 42, 61, 69, 65, 45, 60, 66, 50, 62, 75, 55, 70, 63, 60, 67, 75, 52, 96, 93, 53, 48, 61, 46, 73, 65, 48, 51, 76, 92, 64, 61, 52, 73, 74, 67, 77, 96, 72, 83, 80, 52, 67, 54, 67, 92, 74, 66, 70, 49, 65, 81, 98, 94, 58, 65, 47, 96, 54, 42, 84, 68, 55, 58, 65, 102, 53, 71, 75, 55, 67, 68, 51, 38, 62, 86, 66, 82, 76, 53, 68, 103, 60, 53, 44, 77, 74, 60, 76, 72, 61, 67, 71, 46, 52, 53, 66, 55, 67, 57, 62, 58, 54, 68, 70, 68, 76, 114, 63, 41, 104, 53, 47, 75, 51, 79, 97, 70, 77, 70, 49, 64, 58, 85, 91, 64, 58, 71, 58, 79, 71, 77, 73, 74, 46, 75, 54, 77, 43, 57, 60, 71, 57, 52, 71, 82, 70, 56, 49, 60, 107, 48, 81, 48, 67, 66, 77, 85, 72, 43, 60, 64, 68, 58, 75, 85, 62, 48, 66, 59, 68, 68, 64, 61, 53, 55, 66, 107, 50, 83, 76, 81, 59, 71, 68, 65, 65, 71, 72, 59, 60, 92, 93, 67, 66, 55, 53, 66, 59, 88, 53, 124, 59, 61, 46, 64, 93, 57, 84, 67, 69, 64, 74, 59, 70, 60, 61, 57, 36, 78, 61, 56, 60, 168, 53, 75, 100, 106, 97, 89, 78, 56, 75, 52, 68, 91, 64, 94, 54, 60, 70, 68, 74, 57, 83, 58, 73, 61, 55, 58, 61, 58, 81, 64, 78, 72, 74, 70, 79, 66, 58, 107, 73, 65, 61, 67, 70, 60, 67, 65, 49, 59, 64, 45, 56, 64, 93, 59, 71, 61, 76, 55, 65, 68, 94, 68, 62, 74, 49, 66, 38, 62, 59, 68, 71, 65, 54, 79, 76, 67, 54, 82, 78, 76, 86, 68, 69, 77, 45, 68, 64, 66, 62, 66, 60, 62, 66, 59, 66, 73, 67, 69, 80, 62, 64, 61, 62, 60, 77, 69, 54, 65, 59, 67, 50, 71, 56, 55, 61, 55, 65, 57, 54, 66, 55, 59, 60, 97, 68, 61, 61, 65, 47, 59, 62, 48, 66, 60, 58, 59, 57, 48, 87, 63, 56, 67, 58, 50, 76, 68, 50, 52, 54, 62, 68, 47, 84, 62, 50, 75, 45, 60, 61, 60, 63, 106, 60, 65, 59, 62, 72, 57, 51, 69, 60, 65, 62, 63, 73, 60, 49, 73, 63, 73, 72, 66, 72, 56, 69, 77, 57, 69, 54, 59, 60, 78, 58, 60, 68, 61, 63, 64, 66, 78, 61, 84, 78, 58, 67, 83, 74, 78, 58, 70, 66, 60, 76, 76, 135, 54, 84, 64, 52, 71, 84, 69, 79, 63, 56, 81, 71, 68, 76, 58, 64, 72, 105, 62, 60, 46, 63, 55, 75, 91, 81, 131, 63, 55, 62, 77, 95, 61, 52, 54, 63, 64, 58, 67, 62, 94, 55, 58, 57, 50, 48, 54, 70, 70, 65, 63, 94, 59, 51, 62, 64, 76, 58, 66, 72, 55, 56, 76, 62, 60, 59, 61, 48, 58, 68, 65, 48, 69, 70, 61, 62, 64, 76, 75, 55, 64, 60, 69, 75, 62, 63, 61, 61, 63, 64, 60, 72, 53, 55, 74, 53, 59, 67, 61, 54, 72, 70, 52, 60, 59, 71, 77, 71, 55, 85, 69, 68, 51, 72, 54, 70, 60, 57, 56, 62, 75, 63, 57, 61, 63, 63, 53, 74, 68, 52, 72, 70, 63, 67, 65, 50, 67, 58, 80, 62, 73, 63, 68, 61, 49, 70, 61, 68, 58, 61, 64, 67, 59, 71, 81, 64, 64, 55, 71, 80, 58, 54, 66, 62, 76, 56, 55, 51, 62, 58, 64, 59, 53, 79, 59, 74, 66, 64, 89, 65, 62, 50, 39, 61, 58, 47, 67, 91, 59, 56, 74, 57, 81, 64, 69, 70, 61, 76, 77, 66, 59, 54, 59, 56, 70, 56, 55, 49, 63, 84, 61, 72, 56, 57, 74, 52, 77, 64, 42, 154, 70, 64, 48, 78, 77, 65, 61, 63, 81, 77, 60, 51, 66, 79, 67, 62, 79, 118, 56, 73, 63, 64, 72, 52, 71, 59, 81, 93, 57, 92, 62, 62, 80, 50, 67, 54, 94, 47, 58, 53, 54, 64, 58, 61, 49, 81, 65, 78, 88, 88, 50, 74, 58, 74, 52, 62, 72, 56, 77, 57, 73, 49, 56, 73, 52, 75, 66, 65, 47, 61, 55, 45, 62, 66, 73, 70, 68, 81, 40, 60, 66, 79, 64, 63, 66, 59, 68, 63, 59, 57, 60, 75, 52, 52, 58, 66, 78, 58, 70, 63, 60, 86, 52, 71, 39, 57, 67, 61, 70, 63, 45, 73, 64, 59, 63, 56, 58, 56, 63, 55, 71, 75, 51, 88, 110, 89, 71, 93, 67, 68, 105, 59, 92, 72, 66, 66, 81, 74, 64, 65, 70, 59, 54, 65, 73, 69, 66, 67, 98, 58, 77, 139, 58, 61, 60, 59, 69, 64, 73, 66, 54, 59, 51, 61, 65, 79, 63, 68, 71, 59, 66, 65, 52, 54, 61, 78, 63, 84, 68, 73, 45, 80, 73, 122, 73, 66, 73, 76, 71, 81, 47, 59, 64, 57, 52, 92, 59, 64, 68, 55, 61, 60, 57, 61, 67, 53, 109, 78, 53, 69, 51, 61, 59, 62, 57, 57, 44, 59, 69, 87, 55, 58, 73, 62, 74, 77, 55, 60, 86, 57, 69, 56, 62, 59, 73, 70, 92, 57, 40, 53, 68, 61, 58, 71, 62, 68, 53, 68, 74, 121, 58, 68, 68, 74, 67, 59, 119, 53, 61, 65, 62, 65, 54, 62, 63, 75, 51, 67, 73, 60, 46, 63, 63, 54, 63, 54, 53, 66, 59, 60, 73, 58, 72, 67, 74, 68, 67, 53, 62, 66, 64, 84, 62, 44, 78, 55, 68, 68, 73, 65, 74, 67, 61, 61, 57, 72, 66, 59, 66, 148, 72, 69, 68, 62, 76, 79, 59, 66, 57, 63, 53, 64, 47, 62, 59, 72, 74, 56, 65, 62, 72, 58, 71, 64, 67, 88, 60, 60, 57, 73, 47, 60, 58, 64, 79, 67, 69, 60, 114, 78, 57, 62, 60, 60, 56, 77, 74, 74, 63, 57, 55, 68, 59, 83, 72, 58, 80, 66, 49, 53, 58, 60, 49, 59, 84, 48, 72, 67, 72, 68, 70, 81, 73, 72, 44, 51, 52, 57, 90, 70, 47, 64, 53, 71, 67, 60, 61, 58, 64, 41, 75, 70, 77, 85, 81, 64, 100, 54, 61, 89, 72, 65, 83, 68, 59, 69, 98, 74, 69, 85, 73, 61, 64, 67, 71, 64, 90, 64, 61, 94, 64, 61, 57, 62, 90, 61, 74, 71, 47, 70, 62, 72, 71, 62, 59, 67, 61, 60, 51, 59, 75, 69, 57, 57, 55, 75, 68, 61, 56, 41, 67, 71, 57, 61, 90, 57, 52, 63, 75, 54, 61, 64, 69, 48, 64, 75, 44, 57, 64, 86, 77, 58, 71, 52, 57, 44, 62, 76, 57, 68, 44, 40, 49, 56, 59, 79, 86, 54, 65, 66, 66, 61, 76, 58, 68, 73, 64, 69, 66, 69, 62, 53, 85, 61, 57, 76, 68, 77, 65, 58, 63, 59, 59, 63, 54, 61, 72, 55, 78, 62, 60, 61, 63, 78, 94, 61, 58, 53, 63, 63, 64, 57, 63, 61, 67, 56, 33, 74, 55, 59, 79, 65, 62, 56, 42, 70, 51, 66, 62, 55, 68, 72, 57, 78, 69, 56, 87, 65, 84, 59, 56, 59, 53, 77, 54, 61, 56, 59, 79, 78, 69, 53, 45, 67, 52, 61, 68, 78, 52, 93, 45, 46, 65, 58, 85, 47, 51, 85, 101, 58, 58, 57, 68, 87, 55, 57, 66, 71, 62, 65, 70, 60, 60, 64, 53, 77, 76, 62, 65, 61, 65, 62, 44, 69, 75, 60, 85, 70, 82, 59, 63, 106, 56, 67, 63, 47, 69, 60, 100, 67, 102, 65, 74, 72, 52, 73, 85, 75, 64, 51, 70, 60, 56, 60, 63, 52, 77, 69, 74, 79, 60, 52, 59, 63, 59, 77, 67, 71, 52, 83, 75, 60, 72, 61, 59, 61, 95, 64, 51, 58, 81, 64, 62, 74, 50, 89, 62, 43, 62, 66, 49, 54, 92, 75, 47, 66, 76, 71, 68, 59, 64, 51, 54, 77, 55, 56, 75, 68, 48, 70, 72, 67, 76, 61, 75, 63, 53, 63, 62, 43, 60, 59, 58, 94, 60, 68, 84, 77, 66, 87, 67, 73, 76, 70, 63, 47, 71, 58, 61, 75, 64, 59, 59, 47, 79, 53, 83, 69, 42, 82, 75, 75, 73, 61, 60, 59, 67, 65, 59, 60, 80, 65, 73, 80, 66, 74, 53, 59, 59, 50, 65, 66, 60, 71, 61, 49, 55, 56, 53, 61, 61, 62, 42, 66, 62, 55, 101, 76, 55, 49, 46, 77, 55, 69, 51, 68, 51, 79, 41, 74, 66, 158, 40, 66, 52, 61, 56, 148, 73, 59, 52, 61, 64, 74, 82, 45, 71, 62, 60, 76, 82, 81, 63, 73, 52, 70, 71, 65, 105, 63, 69, 60, 88, 77, 62, 64, 60, 73, 56, 44, 75, 72, 43, 49, 56, 63, 61, 76, 70, 58, 58, 67, 59, 77, 76, 69, 63, 69, 74, 74, 45, 48, 73, 89, 50, 83, 61, 47, 54, 42, 74, 79, 76, 72, 64, 67, 77, 66, 53, 82, 55, 73, 83, 59, 57, 28, 61, 55, 49, 57, 52, 60, 68, 61, 82, 54, 82, 71, 84, 47, 58, 57, 70, 80, 49, 55, 65, 53, 68, 56, 91, 53, 102, 51, 59, 50, 64, 72, 60, 67, 78, 61, 52, 59, 58, 69, 73, 64, 68, 73, 54, 68, 62, 52, 75, 78, 66, 65, 89, 58, 61, 62, 57, 64, 57, 64, 54, 71, 69, 63, 52, 64, 63, 52, 92, 63, 55, 68, 74, 83, 69, 51, 75, 51, 35, 67, 51, 45, 75, 66, 53, 73, 58, 56, 84, 62, 48, 60, 61, 78, 80, 44, 67, 63, 60, 62, 59, 67, 68, 67, 68, 64, 76, 100, 68, 49, 69, 68, 43, 65, 101, 65, 53, 74, 68, 60, 73, 58, 53, 55, 68, 74, 51, 62, 69, 58, 44, 75, 75, 58, 70, 59, 72, 64, 100, 61, 51, 46, 57, 98, 63, 57, 73, 65, 55, 69, 53, 56, 75, 47, 65, 66, 64, 66, 78, 57, 65, 61, 71, 75, 98, 75, 54, 50, 51, 86, 63, 66, 54, 81, 77, 111, 46, 54, 56, 68, 57, 66, 71, 71, 68, 50, 65, 52, 54, 53, 32, 60, 58, 65, 70, 58, 64, 57, 70, 75, 115, 59, 63, 69, 62, 77, 47, 75, 60, 56, 60, 61, 57, 46, 56, 48, 57, 60, 93, 73, 50, 72, 52, 56, 54, 71, 84, 60, 64, 76, 52, 73, 78, 65, 66, 65, 70, 81, 59, 133, 57, 72, 61, 55, 70, 55, 78, 97, 61, 54, 59, 78, 74, 66, 74, 93, 71, 75, 112, 96, 59, 48, 75, 60, 83, 63, 53, 70, 75, 80, 77, 89, 60, 58, 80, 78, 63, 84, 66, 81, 54, 62, 58, 57, 85, 81, 60, 79, 67, 54, 49, 68, 68, 76, 67, 61, 75, 61, 76, 61, 86, 61, 68, 130, 65, 54, 59, 62, 80, 66, 66, 66, 59, 53, 55, 107, 65, 62, 65, 57, 59, 68, 89, 55, 55, 69, 52, 62, 40, 59, 65, 57, 69, 73, 75, 55, 67, 112, 59, 52, 69, 52, 58, 53, 70, 66, 51, 68, 69, 51, 75, 59, 57, 59, 61, 68, 58, 49, 57, 61, 60, 50, 103, 71, 54, 88, 82, 58, 60, 47, 92, 66, 71, 44, 75, 70, 72, 75, 54, 43, 61, 64, 86, 65, 65, 56, 67, 89, 65, 38, 64, 84, 70, 79, 71, 34, 61, 64, 49, 59, 55, 78, 69, 92, 69, 56, 75, 64, 51, 81, 60, 65, 75, 65, 57, 57, 58, 48, 60, 63, 89, 72, 55, 88, 80, 51, 71, 90, 51, 55, 102, 74, 79, 65, 81, 71, 55, 54, 52, 51, 73, 51, 49, 75, 60, 106, 46, 86, 56, 97, 58, 58, 65, 74, 82, 67, 65, 75, 66, 41, 59, 76, 68, 54, 54, 68, 62, 79, 80, 67, 66, 52, 63, 64, 87, 66, 59, 47, 68, 59, 58, 73, 64, 59, 71, 57, 65, 93, 71, 69, 74, 59, 77, 57, 79, 64, 75, 71, 63, 64, 67, 62, 74, 51, 78, 61, 60, 67, 51, 62, 84, 56, 67, 98, 53, 101, 58, 56, 52, 60, 82, 59, 77, 65, 99, 46, 61, 94, 61, 43, 62, 81, 70, 58, 65, 68, 68, 58, 60, 63, 64, 58, 89, 74, 66, 59, 68, 76, 67, 64, 47, 50, 68, 61, 56, 62, 57, 64, 44, 94, 50, 51, 58, 49, 73, 64, 138, 51, 57, 53, 51, 65, 64, 62, 78, 60, 71, 80, 66, 55, 56, 64, 55, 61, 58, 79, 61, 48, 61, 53, 56, 68, 73, 89, 70, 64, 78, 76, 48, 56, 59, 73, 65, 76, 52, 76, 64, 43, 77, 72, 84, 69, 92, 68, 66, 74, 60, 48, 67, 57, 72, 93, 77, 100, 48, 65, 64, 58, 56, 107, 56, 63, 51, 62, 52, 60, 57, 60, 61, 56, 91, 52, 63, 57, 72, 59, 57, 60, 66, 71, 56, 66, 66, 45, 58, 72, 84, 65, 69, 60, 53, 58, 51, 52, 69, 80, 72, 71, 83, 69, 72, 57, 53, 45, 73, 66, 57, 62, 76, 46, 43, 69, 49, 70, 46, 62, 89, 67, 52, 66, 52, 87, 98, 54, 74, 50, 106, 53, 43, 59, 56, 79, 55, 96, 45, 93, 62, 49, 47, 66, 50, 48, 82, 71, 50, 83, 110, 79, 91, 81, 35, 70, 68, 76, 60, 111, 73, 69, 41, 68, 62, 78, 77, 51, 68, 64, 64, 104, 50, 75, 78, 52, 63, 60, 100, 62, 59, 51, 74, 105, 90, 51, 78, 68, 127, 70, 101, 79, 56, 61, 71, 68, 66, 66, 57, 61, 72, 58, 65, 67, 61, 68, 61, 102, 58, 71, 47, 73, 75, 63, 55, 65, 70, 57, 66, 53, 47, 66, 81, 70, 44, 42, 91, 67, 74, 79, 63, 62, 80, 68, 86, 61, 67, 82, 75, 75, 77, 54, 53, 51, 67, 66, 63, 73, 73, 79, 59, 79, 54, 72, 85, 66, 49, 52, 61, 93, 62, 66, 72, 73, 73, 48, 68, 44, 44, 64, 64, 53, 56, 48, 73, 68, 112, 100, 65, 65, 54, 81, 70, 60, 56, 56, 92, 46, 68, 65, 49, 89, 60, 51, 57, 60, 66, 56, 76, 58, 61, 51, 76, 54, 59, 67, 75, 46, 64, 57, 66, 68, 50, 57, 49, 66, 67, 57, 47, 68, 63, 137, 47, 74, 81, 57, 70, 64, 70, 87, 45, 56, 68, 63, 74, 46, 71, 59, 46, 47, 55, 126, 61, 88, 76, 52, 88, 36, 52, 75, 48, 69, 55, 66, 80, 59, 63, 70, 78, 81, 68, 59, 57, 65, 66, 66, 67, 83, 58, 70, 64, 78, 56, 52, 65, 37, 56, 73, 55, 77, 74, 67, 65, 51, 63, 45, 66, 80, 81, 54, 61, 74, 61, 56, 87, 55, 54, 74, 72, 57, 68, 60, 65, 69, 61, 66, 83, 55, 59, 63, 62, 71, 108, 64, 80, 52, 59, 63, 49, 78, 52, 54, 55, 77, 57, 112, 108, 46, 60, 35, 57, 61, 66, 54, 78, 77, 62, 62, 67, 72, 69, 58, 61, 87, 81, 43, 75, 77, 54, 48, 80, 73, 79, 54, 67, 75, 89, 50, 97, 64, 85, 71, 77, 49, 56, 66, 46, 58, 51, 108, 43, 61, 58, 53, 56, 60, 44, 77, 68, 52, 59, 53, 61, 64, 80, 71, 63, 56, 72, 81, 85, 76, 63, 66, 68, 121, 62, 62, 53, 46, 48, 29, 70, 84, 94, 52, 59, 66, 74, 60, 50, 54, 57, 67, 60, 82, 85, 85, 49, 69, 53, 61, 55, 48, 54, 58, 57, 60, 72, 86, 102, 67, 37, 61, 54, 70, 65, 85, 60, 115, 77, 54, 38, 55, 91, 63, 58, 99, 70, 47, 74, 56, 62, 53, 66, 74, 70, 56, 71, 51, 93, 77, 56, 62, 57, 74, 109, 55, 60, 76, 55, 65, 54, 95, 52, 58, 69, 46, 57, 50, 113, 62, 63, 66, 48, 63, 63, 94, 74, 66, 109, 78, 58, 173, 51, 59, 54, 60, 57, 74, 65, 93, 48, 52, 57, 41, 53, 61, 79, 66, 51, 45, 55, 64, 77, 70, 70, 67, 80, 46, 58, 68, 84, 112, 82, 50, 35, 51, 45, 80, 75, 43, 56, 53, 61, 50, 70, 75, 74, 66, 73, 57, 89, 53, 50, 50, 40, 98, 68, 72, 48, 44, 76, 80, 118, 53, 54, 48, 55, 40, 66, 92, 65, 67, 60, 69, 64, 47, 57, 51, 64, 71, 60, 56, 54, 69, 61, 52, 54, 87, 72, 48, 60, 60, 58, 61, 61, 54, 62, 67, 56, 79, 67, 77, 58, 78, 71, 67, 100, 101, 40, 47, 72, 67, 48, 60, 50, 63, 61, 86, 73, 67, 90, 53, 80, 71, 80, 58, 58, 67, 76, 53, 57, 69, 61, 68, 65, 71, 68, 64, 80, 52, 56, 63, 54, 65, 45, 42, 85, 90, 59, 62, 51, 62, 94, 75, 52, 66, 106, 49, 47, 76, 53, 51, 48, 68, 79, 98, 56, 59, 71, 76, 78, 64, 78, 70, 49, 53, 80, 55, 57, 55, 53, 46, 30, 57, 51, 81, 57, 59, 51, 50, 53, 38, 71, 47, 54, 62, 92, 68, 63, 94, 82, 49, 87, 54, 66, 69, 78, 84, 49, 72, 51, 60, 48, 52, 73, 64, 40, 89, 60, 68, 91, 63, 50, 82, 52, 51, 88, 111, 79, 71, 63, 58, 72, 56, 57, 61, 71, 69, 59, 59, 156, 73, 62, 77, 65, 77, 68, 70, 97, 67, 51, 90, 42, 72, 61, 56, 84, 47, 61, 50, 44, 58, 65, 48, 97, 67, 51, 57, 36, 87, 85, 60, 109, 58, 75, 43, 75, 68, 55, 60, 65, 62, 64, 67, 71, 120, 63, 51, 94, 59, 60, 71, 54, 118, 58, 60, 56, 56, 74, 62, 63, 65, 85, 111, 74, 58, 118, 68, 53, 90, 64, 71, 43, 69, 65, 57, 67, 58, 96, 68, 66, 61, 50, 66, 51, 76, 54, 76, 60, 79, 63, 66, 66, 85, 62, 118, 40, 73, 57, 79, 80, 49, 55, 86, 63, 60, 66, 50, 74, 61, 65, 69, 59, 86, 70, 67, 60, 97, 68, 67, 60, 66, 68, 30, 72, 72, 57, 58, 59, 66, 44, 71, 51, 52, 79, 53, 57, 62, 44, 78, 54, 56, 62, 54, 65, 54, 72, 62, 63, 70, 66, 56, 54, 78, 73, 72, 69, 78, 68, 83, 68, 77, 59, 64, 62, 54, 87, 61, 67, 53, 75, 49, 42, 67, 62, 73, 55, 85, 67, 55, 80, 81, 78, 62, 46, 56, 55, 65, 90, 77, 86, 78, 80, 66, 51, 88, 92, 57, 78, 77, 63, 66, 63, 47, 45, 56, 77, 91, 74, 69, 54, 57, 70, 77, 71, 102, 59, 51, 89, 56, 53, 79, 63, 65, 55, 65, 71, 46, 99, 77, 56, 43, 69, 57, 88, 76, 71, 120, 68, 84, 68, 77, 64, 82, 64, 47, 52, 62, 68, 67, 60, 67, 62, 67, 86, 59, 56, 101, 62, 86, 71, 43, 47, 65, 60, 81, 62, 68, 53, 59, 56, 56, 89, 50, 95, 40, 77, 62, 53, 62, 69, 70, 81, 100, 72, 59, 56, 39, 54, 69, 73, 67, 68, 61, 83, 55, 61, 69, 56, 61, 72, 66, 55, 68, 58, 49, 65, 62, 67, 52, 66, 77, 59, 66, 64, 47, 83, 85, 68, 85, 61, 60, 95, 59, 62, 58, 73, 73, 84, 71, 84, 61, 69, 72, 47, 55, 66, 72, 57, 62, 74, 54, 113, 82, 56, 39, 71, 57, 55, 58, 73, 53, 63, 75, 56, 68, 60, 68, 77, 67, 87, 70, 63, 61, 54, 73, 81, 64, 89, 61, 71, 58, 57, 71, 48, 89, 48, 75, 81, 60, 82, 56, 65, 65, 40, 78, 49, 97, 69, 55, 45, 74, 46, 64, 66, 68, 72, 99, 83, 69, 77, 82, 75, 62, 79, 58, 135, 65, 64, 60, 73, 68, 67, 80, 70, 69, 73, 64, 100, 52, 62, 59, 56, 156, 63, 48, 80, 36, 61, 60, 71, 51, 66, 67, 51, 84, 57, 52, 63, 62, 69, 79, 55, 56, 69, 61, 87, 54, 52, 67, 53, 83, 90, 70, 64, 61, 61, 64, 76, 45, 61, 46, 54, 68, 73, 65, 90, 57, 77, 61, 68, 72, 58, 54, 69, 75, 71, 78, 56, 70, 81, 53, 55, 70, 56, 67, 59, 52, 70, 56, 70, 70, 91, 54, 60, 63, 62, 78, 59, 84, 48, 45, 53, 82, 58, 65, 46, 66, 39, 69, 46, 89, 57, 47, 78, 53, 53, 61, 70, 60, 59, 53, 65, 62, 57, 72, 87, 50, 131, 107, 70, 59, 73, 84, 52, 54, 65, 80, 64, 52, 64, 61, 63, 71, 74, 60, 90, 86, 59, 47, 68, 82, 50, 42, 58, 73, 43, 66, 69, 65, 47, 42, 43, 65, 67, 61, 63, 49, 66, 77, 65, 82, 60, 60, 75, 71, 76, 68, 84, 75, 71, 68, 69, 41, 78, 81, 42, 44, 70, 110, 102, 70, 67, 88, 67, 58, 98, 61, 73, 83, 67, 45, 50, 73, 49, 60, 71, 61, 69, 58, 65, 64, 51, 101, 74, 82, 62, 106, 79, 60, 69, 77, 52, 60, 64, 112, 87, 61, 56, 58, 87, 81, 69, 91, 66, 67, 70, 89, 80, 68, 80, 67, 57, 81, 77, 55, 66, 64, 58, 73, 51, 72, 68, 105, 87, 78, 60, 68, 58, 68, 58, 95, 74, 56, 76, 61, 53, 68, 53, 45, 70, 62, 64, 74, 74, 80, 72, 63, 63, 69, 74, 54, 70, 65, 67, 50, 67, 141, 66, 59, 62, 50, 68, 58, 53, 57, 74, 76, 56, 52, 54, 69, 42, 64, 44, 81, 62, 62, 150, 78, 70, 52, 54, 54, 79, 62, 77, 55, 77, 79, 56, 134, 50, 59, 78, 69, 89, 80, 87, 73, 51, 70, 69, 69, 89, 48, 65, 51, 49, 83, 90, 93, 55, 48, 54, 95, 65, 78, 76, 65, 50, 61, 58, 54, 72, 57, 54, 66, 58, 74, 67, 70, 82, 57, 61, 85, 61, 60, 44, 56, 51, 72, 51, 71, 73, 76, 71, 54, 72, 67, 61, 55, 79, 61, 79, 86, 49, 56, 63, 66, 63, 89, 57, 105, 52, 59, 63, 64, 56, 39, 43, 73, 82, 72, 55, 83, 71, 60, 52, 73, 61, 76, 72, 80, 59, 62, 84, 64, 59, 73, 66, 60, 49, 66, 67, 57, 54, 82, 68, 66, 47, 60, 84, 80, 60, 50, 49, 59, 53, 59, 47, 72, 64, 45, 70, 77, 50, 62, 93, 60, 70, 71, 69, 83, 53, 56, 53, 88, 96, 57, 59, 60, 100, 62, 61, 48, 70, 71, 57, 78, 56, 50, 42, 80, 49, 62, 64, 59, 58, 61, 71, 52, 72, 118, 68, 54, 60, 59, 72, 78, 97, 55, 76, 61, 67, 66, 68, 56, 54, 58, 64, 80, 73, 53, 57, 52, 77, 54, 50, 62, 52, 76, 62, 57, 75, 64, 95, 63, 56, 44, 73, 55, 53, 75, 70, 54, 77, 59, 70, 63, 60, 70, 93, 80, 49, 45, 54, 97, 52, 62, 48, 80, 66, 102, 85, 77, 46, 82, 59, 72, 63, 50, 66, 85, 140, 60, 62, 53, 56, 62, 62, 72, 50, 82, 55, 69, 69, 121, 56, 69, 60, 68, 57, 75, 79, 69, 92, 52, 60, 49, 56, 51, 72, 79, 96, 46, 85, 83, 66, 53, 78, 62, 43, 59, 56, 56, 73, 72, 63, 51, 58, 53, 66, 74, 60, 75, 77, 87, 55, 60, 82, 72, 56, 60, 69, 78, 85, 56, 54, 68, 60, 63, 76, 52, 77, 61, 69, 61, 73, 82, 65, 81, 85, 85, 58, 84, 76, 78, 59, 59, 63, 60, 48, 76, 52, 66, 43, 64, 66, 79, 83, 74, 48, 78, 51, 56, 75, 65, 61, 95, 87, 64, 56, 69, 84, 63, 56, 45, 48, 79, 59, 49, 80, 77, 52, 47, 66, 69, 45, 65, 58, 72, 111, 54, 51, 80, 72, 78, 64, 67, 106, 48, 67, 43, 55, 61, 77, 57, 53, 71, 70, 54, 70, 63, 73, 68, 55, 58, 65, 76, 62, 95, 42, 74, 55, 91, 59, 59, 68, 67, 65, 87, 82, 51, 97, 62, 73, 81, 68, 58, 68, 60, 66, 61, 60, 26, 72, 46, 78, 56, 59, 46, 51, 56, 58, 39, 50, 54, 67, 66, 55, 78, 79, 63, 61, 67, 82, 81, 81, 65, 51, 70, 57, 53, 65, 67, 42, 72, 78, 79, 68, 90, 80, 57, 62, 61, 51, 55, 56, 93, 48, 60, 63, 71, 58, 95, 64, 76, 63, 89, 74, 58, 51, 73, 69, 99, 42, 64, 54, 66, 55, 55, 63, 94, 62, 49, 66, 71, 78, 92, 58, 66, 77, 59, 65, 61, 79, 65, 57, 57, 59, 59, 42, 68, 87, 57, 59, 48, 63, 69, 66, 80, 68, 43, 65, 69, 69, 50, 113, 60, 73, 46, 68, 57, 40, 77, 65, 68, 64, 89, 58, 58, 74, 62, 71, 62, 73, 49, 62, 62, 64, 83, 65, 60, 80, 65, 54, 61, 70, 73, 51, 50, 69, 75, 57, 43, 85, 53, 67, 74, 56, 51, 59, 53, 74, 58, 61, 73, 70, 63, 72, 67, 56, 41, 67, 60, 61, 59, 63, 97, 65, 79, 64, 67, 82, 48, 54, 55, 66, 71, 77, 61, 60, 42, 73, 80, 74, 59, 69, 86, 45, 71, 101, 81, 73, 54, 50, 42, 50, 64, 63, 56, 49, 79, 64, 77, 51, 58, 60, 52, 65, 66, 75, 51, 78, 67, 65, 61, 66, 61, 82, 85, 49, 57, 56, 68, 83, 70, 50, 56, 56, 69, 83, 60, 55, 66, 45, 67, 62, 91, 71, 60, 34, 74, 68, 81, 82, 62, 64, 88, 79, 67, 100, 90, 68, 67, 49, 59, 51, 76, 58, 141, 61, 40, 65, 56, 69, 71, 59, 94, 70, 53, 60, 69, 72, 57, 66, 56, 82, 85, 80, 53, 60, 46, 66, 53, 47, 68, 68, 65, 72, 67, 52, 69, 66, 89, 67, 71, 59, 71, 89, 69, 71, 78, 67, 68, 67, 66, 75, 67, 62, 71, 47, 73, 63, 68, 70, 52, 55, 89, 66, 42, 82, 79, 68, 60, 62, 57, 70, 58, 69, 53, 68, 47, 65, 62, 68, 73, 61, 79, 50, 65, 80, 70, 69, 73, 65, 71, 81, 76, 39, 69, 75, 67, 76, 59, 54, 83, 82, 64, 58, 56, 113, 53, 62, 57, 55, 71, 50, 53, 62, 64, 93, 79, 51, 85, 65, 61, 74, 61, 36, 61, 68, 67, 68, 61, 48, 54, 106, 74, 55, 52, 65, 80, 68, 80, 59, 78, 84, 56, 67, 59, 51, 71, 56, 72, 62, 59, 65, 88, 99, 67, 93, 81, 61, 48, 71, 78, 69, 69, 64, 94, 82, 85, 60, 52, 73, 61, 70, 59, 70, 73, 51, 55, 78, 49, 60, 35, 51, 76, 71, 76, 52, 58, 82, 62, 111, 56, 59, 60, 82, 62, 44, 83, 54, 78, 70, 90, 71, 68, 71, 67, 82, 74, 61, 59, 64, 50, 60, 65, 77, 93, 55, 54, 38, 60, 87, 69, 80, 81, 56, 85, 64, 62, 60, 56, 62, 66, 54, 43, 65, 48, 75, 46, 52, 41, 74, 63, 67, 68, 51, 51, 54, 51, 72, 61, 76, 53, 80, 76, 59, 42, 53, 45, 71, 71, 69, 61, 62, 73, 52, 50, 68, 46, 77, 60, 74, 58, 52, 80, 58, 74, 57, 55, 51, 65, 46, 62, 64, 80, 65, 82, 62, 88, 62, 82, 86, 66, 80, 50, 63, 51, 63, 83, 61, 67, 60, 74, 53, 91, 51, 105, 70, 64, 65, 74, 60, 146, 91, 61, 73, 47, 41, 71, 70, 63, 84, 52, 70, 57, 75, 79, 55, 69, 71, 78, 86, 65, 53, 104, 67, 72, 80, 58, 83, 77, 68, 58, 76, 72, 60, 77, 75, 35, 73, 81, 62, 50, 70, 74, 63, 52, 71, 59, 57, 62, 50, 77, 64, 44, 52, 75, 56, 46, 69, 68, 59, 102, 66, 51, 71, 69, 53, 95, 74, 72, 91, 68, 67, 81, 67, 58, 95, 42, 72, 91, 73, 57, 57, 64, 78, 59, 77, 63, 70, 45, 57, 34, 57, 71, 72, 80, 58, 57, 38, 77, 78, 59, 86, 72, 50, 78, 63, 91, 62, 45, 63, 103, 58, 58, 75, 61, 53, 45, 55, 64, 91, 68, 75, 54, 47, 68, 63, 65, 60, 69, 64, 37, 53, 78, 135, 71, 64, 48, 60, 54, 73, 75, 61, 60, 118, 55, 68, 90, 58, 62, 27, 69, 43, 55, 70, 81, 76, 72, 87, 60, 98, 74, 66, 69, 71, 51, 63, 63, 85, 78, 86, 46, 52, 77, 61, 74, 54, 51, 66, 88, 63, 75, 41, 64, 66, 75, 63, 88, 64, 66, 74, 43, 51, 56, 44, 62, 87, 100, 54, 71, 45, 78, 95, 94, 66, 69, 52, 54, 52, 103, 81, 60, 65, 50, 103, 63, 106, 83, 80, 85, 66, 50, 77, 68, 52, 75, 37, 58, 99, 59, 63, 47, 65, 63, 60, 51, 55, 103, 60, 57, 66, 50, 75, 61, 67, 64, 67, 71, 48, 74, 69, 62, 112, 60, 57, 47, 53, 74, 55, 67, 82, 59, 63, 61, 99, 64, 54, 73, 53, 74, 65, 75, 50, 38, 70, 69, 60, 66, 58, 49, 68, 61, 73, 61, 70, 60, 57, 71, 64, 65, 89, 70, 59, 64, 75, 69, 55, 59, 39, 56, 55, 58, 71, 51, 77, 73, 67, 56, 48, 58, 84, 70, 66, 72, 59, 77, 48, 60, 64, 69, 59, 58, 57, 58, 58, 48, 50, 86, 59, 76, 65, 82, 52, 65, 74, 111, 67, 97, 73, 58, 69, 122, 56, 52, 53, 57, 65, 47, 67, 67, 63, 60, 57, 53, 53, 68, 79, 89, 62, 56, 50, 57, 46, 77, 54, 60, 57, 61, 75, 67, 65, 59, 67, 58, 66, 64, 73, 111, 56, 89, 73, 63, 55, 75, 76, 70, 78, 72, 52, 102, 69, 56, 80, 56, 58, 54, 74, 48, 72, 71, 61, 71, 76, 52, 55, 74, 41, 62, 56, 68, 72, 94, 53, 49, 161, 98, 60, 68, 79, 68, 75, 61, 73, 35, 48, 72, 89, 65, 75, 64, 56, 76, 58, 54, 72, 43, 62, 71, 95, 91, 73, 44, 59, 64, 51, 57, 80, 55, 71, 65, 72, 57, 64, 50, 58, 35, 85, 75, 55, 72, 57, 70, 63, 57, 91, 57, 50, 49, 50, 55, 55, 57, 63, 47, 69, 46, 54, 67, 59, 71, 63, 52, 75, 74, 62, 62, 73, 75, 62, 53, 72, 73, 72, 73, 53, 83, 67, 59, 67, 73, 63, 68, 65, 106, 45, 63, 86, 57, 52, 46, 40, 57, 74, 69, 87, 62, 80, 36, 36, 63, 93, 77, 52, 72, 61, 52, 133, 59, 86, 46, 55, 73, 74, 73, 43, 83, 63, 88, 61, 59, 70, 69, 77, 66, 80, 120, 40, 88, 63, 71, 49, 68, 66, 86, 52, 71, 67, 57, 85, 62, 81, 85, 66, 57, 77, 59, 51, 84, 68, 56, 49, 60, 64, 79, 64, 84, 44, 65, 69, 39, 67, 55, 76, 93, 55, 53, 73, 72, 81, 75, 74, 72, 100, 89, 78, 59, 53, 69, 53, 73, 62, 51, 74, 78, 58, 42, 117, 61, 64, 58, 75, 58, 50, 71, 69, 62, 77, 66, 40, 67, 76, 64, 52, 84, 60, 46, 55, 60, 56, 86, 46, 73, 66, 73, 60, 64, 74, 53, 65, 68, 73, 58, 67, 56, 65, 58, 89, 66, 111, 58, 86, 70, 80, 53, 60, 55, 64, 49, 60, 69, 64, 53, 58, 60, 45, 55, 57, 46, 63, 88, 61, 93, 67, 37, 62, 49, 72, 80, 79, 91, 67, 62, 54, 69, 85, 79, 56, 54, 85, 59, 79, 79, 79, 51, 60, 58, 60, 75, 90, 55, 77, 67, 61, 54, 68, 51, 66, 52, 57, 72, 64, 59, 63, 52, 79, 74, 61, 67, 70, 69, 71, 50, 65, 79, 52, 68, 53, 57, 53, 80, 160, 45, 71, 59, 92, 65, 86, 67, 67, 73, 55, 50, 65, 59, 74, 76, 93, 52, 66, 67, 33, 54, 69, 87, 57, 75, 67, 43, 76, 61, 64, 67, 68, 80, 70, 68, 103, 84, 62, 63, 45, 68, 60, 63, 60, 64, 62, 78, 76, 43, 66, 75, 75, 56, 59, 80, 88, 50, 63, 66, 52, 79, 65, 59, 90, 61, 45, 57, 69, 83, 83, 53, 68, 73, 58, 61, 59, 75, 45, 60, 81, 84, 44, 40, 63, 84, 78, 80, 49, 23, 62, 68, 81, 56, 81, 70, 59, 67, 48, 26, 66, 63, 60, 85, 48, 64, 78, 91, 54, 67, 68, 62, 47, 62, 68, 88, 74, 73, 61, 47, 59, 87, 65, 59, 72, 88, 75, 72, 71, 60, 77, 74, 48, 67, 59, 57, 79, 75, 67, 62, 46, 54, 64, 59, 92, 86, 62, 88, 69, 103, 42, 68, 47, 58, 62, 59, 59, 66, 62, 55, 57, 56, 51, 59, 65, 74, 69, 60, 60, 64, 76, 74, 60, 51, 88, 56, 81, 58, 69, 45, 65, 80, 60, 104, 68, 76, 64, 82, 73, 69, 52, 79, 96, 71, 53, 95, 74, 64, 68, 82, 56, 61, 52, 59, 60, 85, 87, 59, 68, 91, 67, 79, 53, 66, 58, 48, 43, 40, 77, 76, 86, 68, 75, 61, 85, 46, 78, 71, 63, 52, 58, 47, 62, 41, 118, 69, 50, 69, 69, 48, 75, 67, 49, 75, 62, 59, 72, 91, 67, 58, 67, 62, 64, 69, 52, 80, 49, 53, 58, 67, 65, 57, 84, 47, 51, 62, 63, 70, 60, 79, 66, 53, 43, 89, 59, 65, 67, 69, 81, 82, 77, 87, 70, 55, 61, 70, 68, 65, 61, 77, 74, 75, 48, 63, 54, 60, 76, 41, 70, 56, 64, 49, 70, 69, 47, 49, 47, 67, 46, 46, 51, 59, 60, 65, 52, 63, 82, 65, 92, 70, 74, 63, 54, 54, 70, 62, 47, 84, 68, 68, 64, 72, 78, 72, 68, 58, 65, 48, 52, 66, 79, 70, 53, 64, 80, 55, 61, 46, 42, 95, 53, 53, 80, 63, 48, 110, 59, 63, 133, 58, 96, 50, 75, 61, 34, 61, 55, 107, 87, 101, 69, 57, 73, 73, 85, 73, 71, 60, 46, 59, 109, 55, 57, 90, 65, 79, 62, 57, 60, 52, 59, 49, 52, 57, 61, 68, 59, 56, 50, 67, 51, 55, 99, 75, 74, 96, 79, 91, 64, 64, 59, 60, 68, 71, 53, 48, 73, 49, 55, 75, 57, 51, 59, 69, 69, 89, 67, 43, 85, 49, 75, 77, 72, 58, 43, 81, 72, 86, 67, 51, 68, 73, 77, 101, 84, 68, 70, 55, 61, 62, 71, 42, 81, 65, 86, 74, 47, 62, 62, 65, 64, 68, 70, 60, 57, 53, 81, 57, 67, 53, 88, 53, 65, 64, 70, 59, 171, 66, 51, 60, 74, 66, 66, 62, 66, 57, 69, 48, 57, 58, 55, 52, 68, 47, 59, 46, 74, 34, 72, 49, 47, 60, 76, 58, 94, 41, 56, 55, 81, 104, 90, 73, 41, 76, 63, 59, 67, 51, 74, 112, 64, 65, 135, 69, 66, 36, 55, 78, 52, 72, 46, 46, 63, 65, 68, 65, 62, 97, 71, 69, 65, 87, 49, 44, 49, 69, 65, 79, 67, 56, 74, 75, 54, 61, 69, 54, 61, 71, 58, 56, 74, 53, 63, 66, 71, 85, 66, 44, 44, 50, 66, 62, 60, 57, 53, 77, 67, 68, 72, 59, 65, 47, 73, 70, 52, 56, 77, 56, 74, 83, 80, 48, 57, 83, 86, 97, 33, 70, 56, 57, 52, 63, 66, 64, 64, 71, 90, 54, 60, 49, 59, 42, 59, 80, 81, 57, 78, 55, 69, 43, 77, 59, 57, 77, 71, 60, 48, 48, 66, 209, 50, 95, 97, 76, 57, 61, 66, 69, 62, 73, 61, 61, 48, 62, 74, 60, 47, 45, 79, 68, 59, 76, 64, 59, 75, 79, 58, 81, 56, 64, 51, 66, 72, 51, 93, 88, 64, 64, 65, 79, 71, 109, 57, 66, 47, 84, 64, 71, 68, 66, 52, 74, 45, 36, 51, 92, 81, 66, 60, 79, 80, 57, 85, 75, 57, 88, 72, 68, 47, 81, 79, 54, 87, 83, 61, 72, 86, 86, 86, 59, 44, 52, 66, 42, 68, 58, 53, 65, 59, 48, 71, 51, 74, 65, 78, 49, 66, 53, 67, 68, 77, 53, 63, 73, 67, 53, 55, 67, 72, 48, 57, 47, 59, 92, 78, 64, 81, 62, 61, 59, 104, 68, 62, 50, 67, 56, 70, 66, 71, 64, 55, 71, 62, 75, 96, 59, 62, 55, 77, 63, 60, 74, 60, 63, 63, 71, 70, 69, 64, 58, 49, 96, 72, 74, 72, 66, 68, 50, 63, 65, 106, 69, 64, 41, 83, 68, 93, 61, 43, 49, 53, 78, 62, 89, 70, 54, 29, 47, 64, 71, 46, 68, 59, 52, 59, 71, 70, 72, 91, 63, 70, 56, 88, 71, 66, 73, 64, 73, 59, 49, 61, 83, 64, 66, 67, 54, 70, 97, 74, 75, 54, 55, 83, 62, 78, 79, 54, 100, 69, 72, 58, 61, 57, 99, 148, 92, 80, 74, 60, 71, 82, 55, 78, 61, 70, 87, 56, 88, 77, 74, 46, 62, 51, 52, 44, 70, 78, 82, 59, 49, 53, 62, 57, 62, 92, 63, 76, 83, 53, 68, 85, 44, 64, 63, 65, 71, 65, 70, 54, 57, 45, 57, 49, 34, 68, 77, 56, 63, 65, 59, 50, 78, 89, 43, 61, 59, 94, 49, 67, 65, 52, 82, 76, 59, 77, 95, 69, 86, 81, 104, 57, 49, 47, 38, 90, 67, 103, 87, 80, 69, 78, 80, 55, 54, 70, 50, 75, 63, 81, 104, 67, 79, 71, 53, 50, 64, 77, 72, 65, 95, 52, 48, 57, 78, 46, 60, 74, 45, 79, 58, 65, 53, 78, 54, 88, 74, 76, 48, 88, 64, 46, 55, 62, 111, 83, 61, 67, 46, 61, 58, 60, 60, 47, 65, 73, 61, 77, 60, 72, 54, 52, 79, 88, 43, 75, 69, 56, 76, 88, 69, 69, 62, 51, 63, 121, 58, 60, 51, 82, 82, 54, 61, 68, 50, 63, 124, 48, 95, 82, 54, 54, 73, 57, 51, 72, 60, 72, 56, 61, 53, 75, 61, 69, 78, 66, 68, 44, 61, 56, 65, 60, 42, 77, 48, 35, 56, 74, 81, 97, 79, 50, 71, 76, 76, 59, 88, 70, 53, 58, 72, 86, 67, 89, 91, 62, 88, 59, 108, 64, 53, 46, 52, 73, 69, 60, 64, 49, 95, 71, 68, 84, 66, 73, 89, 31, 63, 55, 66, 62, 64, 51, 77, 89, 72, 54, 66, 81, 40, 56, 62, 50, 57, 73, 74, 74, 68, 53, 84, 80, 55, 46, 65, 50, 68, 67, 39, 51, 65, 49, 85, 68, 89, 61, 55, 73, 60, 60, 89, 61, 103, 45, 53, 53, 54, 46, 91, 65, 65, 63, 56, 88, 58, 68, 90, 70, 58, 85, 76, 54, 78, 67, 78, 63, 75, 76, 70, 53, 30, 50, 66, 59, 111, 82, 63, 87, 51, 58, 70, 99, 57, 74, 63, 69, 30, 71, 57, 65, 61, 84, 73, 59, 56, 73, 64, 75, 88, 61, 50, 58, 70, 59, 101, 66, 63, 48, 99, 58, 64, 43, 93, 62, 53, 70, 71, 51, 61, 56, 44, 64, 73, 44, 66, 53, 59, 49, 48, 73, 65, 59, 88, 65, 84, 59, 59, 62, 65, 73, 63, 63, 70, 92, 51, 39, 72, 63, 72, 67, 61, 138, 73, 70, 64, 71, 56, 60, 51, 82, 65, 70, 62, 56, 74, 40, 61, 62, 70, 37, 58, 56, 150, 65, 67, 57, 50, 98, 57, 48, 64, 58, 54, 71, 86, 63, 103, 51, 68, 52, 49, 79, 42, 58, 66, 53, 66, 68, 58, 91, 64, 45, 56, 77, 50, 75, 60, 54, 65, 68, 64, 45, 131, 75, 57, 64, 70, 89, 80, 67, 58, 55, 46, 44, 77, 60, 60, 83, 58, 93, 82, 67, 59, 90, 56, 58, 70, 66, 68, 71, 54, 82, 78, 79, 86, 66, 77, 74, 62, 81, 67, 48, 63, 80, 61, 46, 45, 62, 68, 78, 51, 65, 56, 51, 51, 70, 76, 74, 75, 49, 63, 85, 91, 64, 71, 56, 70, 85, 69, 52, 42, 68, 78, 60, 82, 36, 65, 68, 57, 62, 50, 53, 53, 69, 43, 60, 66, 53, 69, 50, 47, 83, 60, 62, 57, 52, 98, 69, 58, 73, 47, 65, 44, 108, 71, 50, 65, 84, 72, 65, 43, 76, 73, 59, 55, 45, 93, 41, 56, 88, 73, 58, 75, 56, 58, 50, 82, 64, 78, 53, 106, 82, 108, 59, 66, 54, 64, 98, 56, 70, 77, 72, 77, 69, 49, 76, 75, 46, 40, 39, 55, 108, 56, 35, 55, 67, 51, 62, 61, 68, 123, 49, 78, 59, 54, 62, 95, 57, 68, 60, 73, 45, 65, 92, 60, 56, 61, 55, 42, 63, 65, 64, 70, 57, 63, 49, 71, 75, 44, 90, 66, 51, 44, 63, 52, 60, 92, 102, 70, 57, 57, 74, 65, 101, 77, 63, 61, 57, 83, 68, 60, 70, 75, 115, 60, 60, 57, 84, 76, 81, 92, 55, 59, 55, 70, 66, 55, 87, 70, 66, 63, 61, 60, 60, 68, 53, 50, 54, 52, 74, 57, 70, 77, 60, 52, 60, 76, 65, 72, 69, 54, 53, 66, 81, 83, 89, 65, 92, 76, 54, 50, 65, 71, 60, 65, 66, 61, 63, 100, 40, 53, 47, 50, 61, 60, 80, 67, 62, 107, 50, 68, 62, 60, 72, 52, 69, 58, 80, 60, 46, 76, 61, 59, 75, 76, 76, 66, 74, 54, 80, 54, 56, 65, 70, 64, 86, 69, 65, 77, 45, 48, 60, 59, 53, 70, 73, 52, 80, 74, 87, 57, 74, 62, 107, 59, 63, 67, 56, 58, 57, 114, 76, 55, 72, 69, 54, 47, 74, 70, 73, 61, 65, 57, 63, 84, 67, 55, 69, 67, 66, 88, 49, 64, 57, 50, 76, 67, 109, 85, 77, 66, 78, 76, 79, 54, 61, 58, 59, 32, 61, 43, 54, 57, 94, 90, 76, 75, 57, 65, 69, 61, 66, 51, 54, 55, 64, 76, 59, 60, 52, 81, 52, 53, 40, 65, 71, 69, 57, 102, 70, 90, 73, 67, 62, 55, 70, 50, 61, 54, 56, 52, 57, 61, 86, 94, 52, 81, 98, 58, 77, 68, 71, 52, 67, 65, 59, 63, 68, 45, 56, 79, 100, 59, 46, 65, 79, 60, 63, 41, 76, 78, 102, 81, 55, 76, 79, 66, 77, 55, 79, 46, 99, 83, 106, 53, 68, 71, 71, 45, 47, 50, 66, 50, 55, 75, 69, 76, 72, 72, 51, 49, 44, 71, 53, 68, 77, 88, 37, 79, 62, 58, 68, 66, 49, 53, 67, 55, 41, 45, 59, 70, 88, 57, 93, 75, 99, 47, 71, 72, 58, 79, 73, 86, 73, 77, 75, 53, 71, 83, 86, 58, 53, 69, 67, 55, 57, 79, 55, 59, 73, 51, 83, 66, 62, 63, 69, 93, 69, 79, 75, 107, 116, 48, 57, 77, 88, 52, 61, 64, 74, 73, 40, 51, 42, 75, 71, 54, 63, 57, 97, 71, 63, 42, 65, 67, 69, 60, 77, 71, 56, 71, 75, 68, 58, 45, 80, 75, 66, 51, 78, 49, 47, 67, 67, 37, 71, 75, 50, 86, 59, 80, 71, 67, 64, 51, 81, 64, 69, 67, 68, 76, 77, 58, 131, 49, 69, 52, 64, 73, 93, 52, 70, 53, 73, 64, 61, 70, 63, 53, 60, 69, 57, 59, 54, 50, 43, 63, 74, 42, 73, 74, 71, 46, 58, 49, 98, 55, 64, 45, 53, 41, 64, 60, 66, 47, 91, 80, 64, 47, 54, 56, 32, 77, 65, 65, 65, 60, 97, 56, 69, 87, 58, 61, 46, 74, 74, 85, 65, 66, 77, 71, 62, 80, 89, 95, 54, 59, 63, 64, 83, 62, 76, 58, 77, 59, 54, 70, 65, 81, 75, 70, 54, 63, 83, 97, 67, 60, 60, 65, 56, 70, 72, 68, 75, 108, 57, 55, 68, 90, 52, 70, 59, 64, 53, 60, 77, 60, 67, 67, 74, 81, 69, 57, 60, 69, 68, 48, 57, 57, 73, 74, 57, 70, 60, 66, 56, 75, 57, 51, 90, 58, 58, 53, 62, 85, 51, 64, 72, 47, 53, 70, 68, 55, 46, 63, 94, 60, 56, 64, 64, 57, 56, 71, 77, 89, 61, 69, 56, 47, 57, 92, 72, 70, 67, 70, 67, 68, 59, 84, 53, 55, 89, 66, 69, 78, 53, 83, 101, 67, 64, 59, 39, 62, 43, 57, 54, 52, 77, 64, 84, 102, 63, 56, 64, 70, 55, 55, 61, 65, 68, 65, 60, 111, 59, 69, 88, 77, 48, 74, 70, 109, 68, 65, 77, 45, 90, 53, 52, 92, 87, 70, 70, 60, 63, 54, 55, 64, 74, 95, 53, 56, 64, 56, 75, 73, 62, 61, 50, 61, 86, 55, 48, 59, 63, 57, 68, 55, 66, 67, 64, 59, 72, 52, 44, 69, 82, 83, 54, 62, 86, 73, 62, 57, 51, 53, 52, 68, 64, 106, 61, 65, 72, 51, 103, 49, 49, 84, 104, 62, 71, 57, 60, 86, 69, 64, 66, 63, 50, 67, 74, 64, 69, 92, 55, 77, 65, 65, 62, 53, 59, 68, 66, 57, 72, 50, 124, 92, 66, 51, 60, 64, 65, 55, 77, 112, 51, 53, 94, 85, 45, 60, 70, 59, 71, 59, 61, 78, 57, 53, 54, 68, 69, 111, 68, 81, 44, 56, 62, 50, 52, 80, 53, 70, 59, 71, 52, 49, 63, 108, 70, 68, 56, 70, 65, 62, 60, 102, 64, 55, 75, 97, 66, 56, 68, 60, 70, 65, 31, 60, 73, 78, 58, 55, 55, 56, 62, 122, 55, 52, 45, 62, 62, 75, 56, 60, 61, 51, 55, 51, 64, 61, 50, 61, 61, 56, 52, 46, 67, 63, 89, 56, 63, 86, 69, 69, 68, 95, 75, 74, 47, 57, 70, 78, 52, 56, 63, 82, 52, 87, 77, 67, 70, 59, 48, 59, 113, 63, 68, 55, 58, 51, 37, 62, 70, 76, 68, 69, 51, 56, 56, 46, 47, 83, 66, 65, 65, 76, 58, 71, 46, 65, 55, 61, 72, 52, 59, 87, 77, 83, 61, 65, 60, 61, 64, 90, 61, 56, 45, 61, 72, 51, 106, 43, 53, 73, 56, 51, 56, 61, 86, 80, 45, 67, 52, 72, 55, 64, 69, 73, 82, 65, 83, 67, 58, 76, 59, 63, 46, 64, 63, 65, 60, 54, 53, 52, 95, 104, 77, 42, 64, 72, 53, 77, 54, 79, 46, 49, 47, 46, 65, 59, 57, 64, 72, 55, 87, 81, 69, 57, 63, 63, 62, 46, 67, 50, 65, 63, 43, 64, 67, 54, 83, 110, 69, 66, 59, 37, 129, 58, 73, 77, 72, 61, 75, 66, 49, 81, 55, 62, 59, 76, 61, 79, 66, 55, 56, 48, 55, 82, 78, 78, 50, 67, 45, 53, 74, 73, 49, 68, 56, 76, 79, 49, 46, 58, 90, 63, 67, 87, 74, 55, 73, 58, 36, 69, 71, 58, 58, 55, 71, 71, 65, 81, 70, 48, 90, 57, 41, 45, 57, 56, 62, 51, 52, 66, 70, 64, 79, 46, 55, 77, 68, 56, 61, 83, 48, 58, 54, 68, 59, 68, 53, 44, 62, 50, 74, 57, 65, 72, 73, 56, 69, 65, 67, 67, 71, 55, 108, 53, 95, 75, 72, 63, 64, 60, 77, 53, 51, 69, 68, 55, 61, 72, 77, 52, 60, 65, 66, 60, 72, 65, 84, 57, 56, 78, 74, 69, 47, 57, 59, 66, 70, 68, 93, 51, 59, 68, 57, 77, 55, 59, 53, 49, 78, 53, 77, 56, 61, 68, 72, 63, 52, 97, 65, 56, 69, 61, 65, 89, 68, 73, 49, 83, 85, 52, 52, 50, 70, 66, 72, 78, 69, 68, 68, 68, 73, 97, 46, 64, 61, 73, 95, 79, 54, 68, 66, 45, 53, 57, 57, 58, 55, 54, 89, 79, 57, 122, 66, 56, 79, 54, 76, 57, 63, 70, 103, 80, 69, 60, 34, 56, 55, 56, 79, 61, 57, 70, 80, 88, 65, 50, 64, 51, 83, 50, 81, 77, 89, 68, 57, 66, 55, 79, 61, 76, 85, 61, 81, 71, 63, 59, 59, 57, 55, 83, 59, 63, 96, 60, 53, 81, 74, 83, 70, 53, 94, 60, 87, 43, 57, 72, 87, 59, 60, 61, 140, 46, 72, 49, 65, 60, 65, 60, 53, 70, 55, 76, 57, 56, 66, 53, 79, 48, 65, 54, 67, 51, 72, 42, 63, 63, 88, 98, 79, 58, 53, 81, 59, 62, 50, 54, 47, 53, 80, 56, 64, 55, 62, 73, 59, 96, 47, 70, 67, 61, 92, 80, 75, 58, 71, 80, 88, 72, 71, 59, 63, 82, 61, 56, 71, 65, 86, 64, 49, 68, 76, 51, 81, 71, 64, 64, 52, 61, 75, 58, 97, 46, 59, 65, 80, 78, 71, 54, 51, 50, 75, 78, 103, 68, 75, 48, 74, 63, 64, 67, 66, 103, 58, 73, 67, 56, 50, 52, 64, 53, 56, 72, 61, 72, 78, 56, 59, 75, 64, 70, 89, 58, 88, 81, 77, 61, 81, 77, 60, 59, 69, 75, 66, 72, 56, 69, 41, 80, 71, 85, 65, 56, 63, 67, 89, 47, 90, 71, 83, 65, 69, 47, 89, 63, 57, 60, 66, 73, 63, 68, 82, 69, 80, 67, 74, 65, 66, 56, 53, 64, 61, 56, 64, 54, 69, 65, 50, 59, 43, 79, 80, 59, 61, 70, 63, 65, 69, 104, 69, 58, 62, 42, 98, 65, 96, 55, 60, 62, 62, 48, 77, 52, 71, 71, 82, 61, 54, 67, 55, 47, 84, 53, 75, 50, 63, 67, 64, 65, 69, 72, 55, 75, 70, 65, 54, 105, 60, 44, 51, 54, 91, 54, 54, 61, 60, 53, 33, 71, 67, 75, 60, 66, 53, 75, 84, 65, 72, 101, 63, 57, 60, 83, 62, 69, 87, 47, 56, 74, 69, 62, 52, 109, 58, 42, 58, 68, 64, 82, 68, 59, 61, 60, 71, 60, 55, 54, 66, 81, 49, 52, 60, 49, 84, 71, 44, 60, 65, 86, 75, 57, 55, 66, 53, 85, 48, 69, 102, 76, 70, 70, 56, 87, 70, 76, 63, 61, 55, 52, 36, 66, 76, 79, 73, 75, 62, 55, 72, 79, 66, 75, 80, 54, 61, 59, 69, 73, 78, 66, 68, 65, 58, 57, 59, 58, 89, 64, 55, 55, 49, 110, 60, 73, 81, 106, 92, 79, 83, 67, 67, 49, 69, 47, 56, 71, 80, 83, 63, 62, 58, 85, 71, 52, 56, 60, 79, 75, 103, 45, 54, 56, 71, 70, 59, 41, 79, 43, 74, 69, 57, 72, 66, 61, 86, 97, 63, 55, 73, 83, 57, 66, 61, 61, 67, 60, 70, 56, 42, 55, 77, 42, 78, 51, 50, 64, 55, 66, 113, 42, 59, 46, 65, 54, 68, 56, 50, 56, 52, 64, 75, 60, 89, 71, 65, 72, 57, 65, 50, 53, 14, 58, 65, 82, 127, 42, 57, 54, 101, 63, 68, 53, 58, 57, 59, 46, 64, 76, 65, 62, 51, 54, 35, 79, 53, 69, 76, 68, 65, 79, 72, 64, 64, 69, 37, 50, 64, 62, 64, 60, 57, 68, 59, 58, 55, 70, 55, 95, 71, 64, 55, 70, 96, 72, 68, 56, 72, 93, 43, 82, 82, 52, 60, 62, 78, 58, 56, 75, 56, 113, 37, 69, 50, 74, 71, 61, 72, 74, 56, 59, 71, 74, 69, 70, 43, 56, 57, 52, 61, 68, 70, 48, 43, 58, 52, 72, 76, 55, 63, 96, 40, 88, 58, 75, 52, 80, 54, 73, 68, 91, 63, 65, 50, 67, 72, 51, 50, 58, 53, 52, 64, 35, 81, 47, 52, 54, 72, 83, 84, 59, 67, 57, 76, 79, 47, 66, 42, 74, 51, 52, 70, 66, 49, 71, 64, 60, 60, 43, 62, 56, 70, 64, 67, 67, 69, 67, 63, 82, 98, 53, 70, 52, 71, 70, 82, 67, 53, 67, 114, 67, 65, 89, 61, 78, 64, 59, 52, 47, 54, 93, 65, 61, 68, 48, 115, 68, 83, 38, 56, 97, 48, 76, 60, 64, 69, 63, 67, 88, 82, 55, 64, 83, 69, 67, 78, 57, 72, 81, 75, 57, 62, 56, 65, 81, 124, 78, 51, 66, 65, 73, 78, 67, 53, 57, 70, 65, 65, 62, 65, 45, 60, 76, 79, 49, 63, 52, 51, 73, 63, 60, 63, 58, 54, 47, 103, 74, 78, 56, 64, 60, 54, 41, 70, 50, 66, 61, 83, 65, 60, 49, 57, 75, 99, 57, 68, 67, 70, 59, 52, 64, 82, 42, 73, 58, 89, 64, 57, 81, 56, 54, 87, 44, 71, 81, 67, 66, 66, 87, 83, 73, 65, 77, 64, 72, 65, 51, 73, 57, 42, 85, 59, 65, 84, 59, 72, 56, 82, 122, 78, 45, 79, 80, 45, 58, 63, 79, 53, 69, 68, 64, 81, 71, 62, 47, 49, 63, 58, 73, 67, 76, 52, 51, 72, 88, 63, 63, 35, 77, 55, 52, 59, 58, 59, 49, 58, 66, 51, 82, 50, 58, 60, 95, 57, 65, 58, 48, 51, 62, 83, 73, 81, 62, 63, 36, 70, 49, 79, 75, 65, 73, 52, 79, 72, 54, 62, 78, 88, 72, 80, 65, 59, 41, 66, 120, 76, 74, 67, 53, 62, 61, 48, 58, 61, 85, 52, 75, 93, 69, 64, 49, 43, 49, 56, 51, 78, 52, 71, 67, 58, 60, 68, 86, 54, 52, 49, 93, 67, 86, 73, 51, 58, 73, 47, 60, 60, 87, 77, 43, 60, 79, 70, 52, 86, 47, 62, 58, 77, 51, 49, 68, 70, 57, 53, 55, 71, 73, 53, 80, 61, 67, 45, 64, 56, 74, 84, 35, 77, 110, 65, 87, 71, 63, 76, 57, 70, 82, 55, 65, 73, 68, 69, 72, 55, 47, 80, 74, 78, 49, 56, 75, 111, 48, 46, 58, 52, 46, 50, 106, 68, 79, 42, 73, 77, 50, 66, 85, 59, 61, 56, 63, 87, 74, 66, 82, 71, 61, 53, 51, 81, 67, 47, 67, 50, 44, 68, 40, 77, 106, 64, 93, 66, 94, 58, 78, 55, 62, 70, 65, 64, 51, 74, 65, 77, 53, 84, 57, 60, 67, 51, 75, 55, 101, 77, 71, 73, 49, 50, 86, 63, 75, 135, 97, 69, 90, 71, 85, 87, 84, 79, 61, 59, 55, 75, 80, 83, 84, 76, 66, 58, 37, 58, 53, 46, 59, 74, 44, 60, 75, 72, 66, 65, 55, 68, 58, 77, 69, 66, 81, 70, 66, 75, 70, 48, 66, 67, 80, 51, 65, 102, 70, 51, 61, 70, 52, 124, 78, 64, 55, 72, 108, 53, 85, 75, 110, 74, 78, 61, 63, 54, 41, 71, 97, 67, 62, 66, 90, 55, 83, 48, 59, 71, 72, 50, 78, 59, 82, 69, 71, 58, 109, 60, 103, 69, 75, 115, 77, 55, 68, 66, 87, 52, 86, 45, 52, 64, 54, 58, 56, 72, 58, 72, 61, 60, 58, 79, 60, 80, 56, 84, 98, 59, 52, 127, 90, 61, 62, 48, 68, 92, 68, 69, 54, 80, 75, 52, 53, 90, 55, 68, 52, 53, 49, 58, 75, 44, 48, 75, 62, 56, 69, 46, 41, 60, 50, 58, 60, 52, 57, 69, 70, 77, 67, 52, 50, 59, 106, 55, 78, 127, 77, 88, 60, 40, 90, 55, 74, 67, 73, 65, 70, 70, 61, 68, 70, 65, 65, 58, 84, 91, 55, 53, 58, 44, 62, 53, 48, 60, 52, 60, 80, 73, 54, 38, 75, 55, 67, 77, 74, 47, 37, 71, 66, 95, 99, 99, 57, 54, 77, 52, 51, 64, 61, 69, 68, 89, 63, 65, 91, 86, 54, 70, 73, 52, 75, 61, 70, 60, 50, 54, 79, 64, 71, 70, 43, 72, 45, 67, 58, 60, 98, 57, 54, 62, 93, 83, 51, 79, 76, 59, 78, 83, 57, 47, 62, 69, 78, 52, 63, 68, 66, 77, 63, 67, 68, 47, 47, 70, 70, 75, 70, 70, 75, 84, 58, 50, 66, 64, 82, 75, 53, 80, 65, 52, 50, 55, 54, 81, 58, 59, 79, 78, 113, 59, 57, 93, 62, 59, 70, 75, 72, 70, 95, 45, 63, 58, 59, 65, 66, 65, 50, 46, 42, 75, 69, 69, 107, 40, 50, 62, 67, 64, 53, 66, 59, 56, 74, 45, 70, 83, 63, 79, 78, 72, 49, 66, 68, 67, 85, 58, 68, 81, 66, 56, 92, 64, 74, 64, 79, 70, 84, 56, 54, 60, 53, 85, 63, 69, 57, 45, 85, 68, 55, 68, 84, 92, 59, 65, 51, 67, 67, 65, 63, 65, 71, 56, 62, 73, 63, 68, 54, 77, 67, 81, 75, 66, 60, 55, 71, 88, 60, 88, 144, 64, 69, 69, 76, 53, 76, 40, 67, 68, 55, 74, 77, 61, 59, 59, 44, 75, 53, 66, 62, 57, 64, 46, 59, 69, 92, 73, 65, 93, 86, 51, 71, 45, 59, 63, 66, 66, 63, 76, 57, 53, 79, 54, 62, 54, 74, 65, 44, 71, 60, 79, 72, 52, 72, 57, 65, 52, 63, 69, 38, 81, 54, 120, 70, 61, 76, 84, 51, 66, 71, 56, 64, 67, 66, 63, 64, 41, 63, 72, 58, 79, 39, 39, 50, 50, 75, 59, 71, 64, 94, 80, 49, 53, 57, 61, 61, 68, 62, 74, 94, 68, 83, 83, 64, 63, 56, 43, 53, 55, 66, 48, 56, 69, 62, 55, 88, 117, 44, 58, 86, 45, 66, 99, 50, 66, 62, 94, 76, 51, 85, 45, 52, 49, 61, 70, 51, 73, 78, 66, 67, 66, 73, 63, 85, 46, 46, 63, 51, 83, 95, 52, 93, 83, 90, 63, 68, 31, 46, 58, 83, 52, 56, 77, 49, 57, 65, 61, 69, 71, 66, 64, 61, 79, 71, 57, 76, 75, 71, 47, 57, 64, 84, 83, 47, 57, 78, 55, 78, 62, 53, 60, 78, 57, 97, 58, 63, 67, 54, 70, 71, 36, 79, 92, 88, 43, 68, 86, 73, 57, 63, 64, 56, 55, 60, 60, 47, 76, 59, 48, 68, 55, 57, 56, 49, 45, 50, 67, 68, 48, 31, 65, 54, 104, 85, 59, 60, 70, 73, 54, 67, 73, 67, 68, 77, 68, 63, 59, 54, 86, 49, 86, 96, 61, 56, 83, 87, 58, 72, 77, 77, 70, 62, 46, 70, 82, 69, 58, 72, 62, 74, 56, 65, 54, 86, 81, 53, 69, 50, 72, 69, 55, 57, 52, 66, 79, 58, 80, 71, 70, 61, 70, 64, 51, 44, 55, 75, 60, 90, 79, 59, 73, 67, 43, 74, 55, 55, 58, 53, 54, 60, 87, 87, 68, 83, 61, 52, 65, 67, 79, 76, 69, 55, 89, 59, 53, 78, 54, 56, 54, 53, 68, 67, 60, 63, 73, 89, 62, 57, 73, 102, 68, 91, 84, 55, 102, 83, 37, 82, 56, 75, 50, 62, 90, 61, 80, 57, 59, 96, 64, 32, 52, 53, 61, 57, 53, 44, 44, 61, 81, 62, 56, 39, 61, 60, 70, 58, 50, 72, 79, 53, 97, 72, 38, 54, 57, 78, 56, 85, 80, 44, 91, 73, 61, 50, 47, 70, 45, 61, 69, 95, 64, 52, 74, 87, 76, 54, 60, 73, 69, 84, 102, 65, 59, 47, 53, 78, 68, 83, 71, 64, 69, 63, 85, 44, 49, 77, 40, 59, 71, 51, 56, 64, 49, 53, 70, 68, 86, 69, 79, 49, 80, 49, 69, 72, 66, 106, 72, 73, 66, 61, 73, 66, 63, 70, 56, 61, 69, 66, 76, 61, 61, 69, 69, 74, 85, 75, 52, 64, 56, 61, 72, 68, 52, 70, 63, 50, 58, 69, 46, 63, 104, 70, 64, 63, 63, 95, 52, 66, 53, 81, 96, 55, 52, 73, 48, 72, 54, 96, 85, 68, 67, 56, 79, 49, 59, 83, 61, 61, 83, 42, 56, 40, 92, 68, 55, 56, 85, 54, 54, 107, 76, 56, 74, 29, 69, 81, 63, 55, 73, 65, 73, 89, 45, 90, 50, 78, 84, 93, 73, 71, 86, 59, 70, 76, 68, 46, 77, 47, 79, 62, 60, 50, 53, 53, 86, 92, 60, 60, 69, 56, 57, 84, 59, 82, 55, 47, 94, 50, 86, 74, 76, 48, 88, 71, 63, 72, 51, 76, 67, 77, 73, 72, 75, 45, 88, 116, 40, 60, 56, 65, 52, 62, 78, 95, 84, 64, 51, 87, 49, 91, 54, 58, 76, 50, 68, 51, 58, 50, 52, 76, 39, 71, 55, 81, 85, 76, 71, 56, 76, 69, 78, 65, 64, 58, 80, 47, 81, 73, 44, 73, 65, 75, 44, 53, 83, 41, 70, 54, 62, 60, 67, 71, 63, 68, 74, 70, 74, 55, 66, 60, 73, 79, 54, 77, 64, 85, 56, 57, 61, 61, 87, 88, 80, 69, 65, 78, 56, 51, 63, 57, 68, 64, 65, 56, 88, 60, 73, 54, 63, 60, 71, 63, 87, 72, 101, 66, 71, 75, 48, 58, 85, 58, 59, 48, 76, 59, 57, 63, 82, 60, 56, 54, 66, 75, 83, 83, 46, 61, 56, 57, 70, 63, 54, 71, 78, 70, 59, 76, 64, 63, 47, 71, 53, 53, 93, 65, 70, 46, 67, 87, 59, 66, 66, 66, 77, 57, 66, 76, 57, 57, 64, 67, 85, 77, 68, 71, 88, 56, 49, 67, 54, 76, 70, 54, 76, 59, 58, 63, 46, 74, 73, 40, 67, 58, 44, 60, 50, 70, 56, 88, 63, 82, 62, 50, 85, 57, 75, 74, 50, 52, 75, 62, 58, 71, 43, 59, 64, 54, 76, 60, 83, 72, 64, 80, 83, 58, 48, 57, 58, 52, 74, 45, 62, 73, 59, 69, 52, 50, 86, 47, 50, 68, 79, 65, 65, 57, 103, 67, 81, 53, 77, 79, 65, 46, 63, 53, 93, 62, 79, 66, 84, 65, 76, 61, 70, 72, 90, 75, 67, 44, 72, 71, 74, 67, 53, 45, 77, 74, 57, 150, 62, 60, 53, 67, 68, 61, 63, 98, 62, 61, 54, 71, 66, 70, 68, 73, 56, 67, 57, 58, 90, 68, 68, 53, 83, 60, 91, 63, 83, 72, 56, 54, 50, 78, 47, 62, 47, 69, 60, 75, 80, 56, 65, 70, 68, 52, 57, 68, 74, 60, 76, 35, 60, 56, 84, 36, 70, 53, 83, 85, 58, 53, 99, 102, 77, 68, 49, 67, 68, 65, 52, 52, 49, 69, 59, 76, 67, 71, 67, 63, 57, 43, 64, 59, 61, 59, 52, 101, 54, 68, 58, 58, 58, 58, 58, 74, 59, 97, 61, 43, 95, 54, 65, 78, 59, 70, 76, 72, 61, 70, 60, 69, 85, 71, 63, 65, 85, 61, 64, 82, 90, 57, 75, 53, 68, 74, 50, 92, 83, 69, 55, 57, 67, 78, 69, 67, 69, 46, 78, 58, 71, 59, 74, 65, 51, 84, 63, 48, 65, 64, 61, 73, 72, 53, 78, 71, 72, 61, 104, 52, 79, 53, 60, 84, 59, 54, 73, 65, 68, 139, 50, 89, 64, 70, 63, 55, 50, 59, 59, 74, 73, 77, 68, 47, 53, 64, 71, 68, 76, 63, 54, 58, 56, 62, 55, 72, 70, 62, 70, 47, 60, 56, 95, 68, 78, 67, 75, 68, 53, 80, 65, 65, 43, 69, 49, 50, 67, 68, 74, 42, 56, 64, 45, 66, 59, 66, 66, 83, 59, 53, 66, 73, 59, 48, 63, 58, 70, 66, 69, 72, 67, 62, 64, 71, 67, 47, 70, 73, 60, 64, 58, 85, 55, 76, 56, 71, 60, 64, 59, 68, 68, 83, 97, 57, 73, 64, 62, 67, 57, 62, 50, 72, 63, 80, 69, 68, 70, 67, 70, 65, 71, 57, 52, 74, 60, 76, 50, 59, 57, 49, 55, 70, 62, 60, 44, 64, 57, 67, 65, 78, 93, 73, 55, 65, 64, 60, 66, 56, 73, 59, 56, 59, 51, 47, 45, 99, 58, 47, 71, 82, 84, 72, 71, 68, 76, 73, 83, 68, 60, 43, 65, 60, 76, 86, 62, 55, 82, 58, 74, 58, 68, 54, 64, 62, 61, 90, 69, 68, 53, 56, 60, 60, 61, 62, 86, 69, 78, 64, 60, 80, 63, 75, 76, 57, 53, 79, 56, 61, 61, 51, 48, 59, 68, 54, 53, 65, 65, 87, 58, 73, 110, 51, 58, 55, 67, 65, 56, 67, 70, 68, 70, 65, 166, 76, 79, 65, 70, 60, 75, 58, 103, 84, 67, 55, 67, 82, 71, 62, 57, 60, 37, 62, 62, 136, 50, 66, 73, 60, 67, 68, 62, 59, 77, 81, 58, 89, 75, 74, 55, 69, 63, 64, 56, 42, 75, 51, 66, 66, 40, 54, 83, 53, 51, 66, 78, 52, 93, 86, 53, 56, 68, 58, 74, 77, 65, 79, 59, 84, 63, 82, 58, 47, 60, 68, 58, 64, 77, 49, 66, 76, 49, 82, 46, 50, 64, 53, 64, 68, 59, 68, 72, 75, 87, 54, 68, 71, 50, 57, 79, 65, 81, 53, 65, 68, 113, 74, 53, 67, 54, 55, 56, 68, 55, 77, 77, 60, 89, 58, 55, 37, 78, 65, 71, 91, 72, 71, 74, 61, 39, 59, 62, 52, 56, 71, 60, 98, 68, 49, 63, 72, 83, 62, 71, 65, 69, 74, 44, 62, 71, 74, 55, 69, 78, 67, 60, 60, 84, 53, 75, 55, 66, 54, 67, 66, 105, 62, 64, 81, 57, 61, 58, 48, 70, 57, 81, 64, 68, 71, 53, 57, 63, 68, 72, 71, 66, 66, 45, 56, 57, 76, 60, 72, 75, 115, 64, 79, 77, 71, 73, 76, 76, 67, 101, 57, 40, 51, 80, 65, 85, 68, 49, 60, 115, 59, 85, 80, 57, 48, 67, 59, 78, 77, 53, 59, 59, 76, 60, 56, 70, 70, 64, 74, 78, 64, 80, 68, 45, 63, 60, 74, 59, 65, 38, 64, 67, 51, 53, 66, 61, 74, 108, 68, 69, 51, 67, 52, 66, 72, 110, 63, 57, 80, 83, 52, 52, 61, 51, 83, 65, 46, 73, 59, 66, 58, 174, 62, 72, 51, 100, 82, 86, 68, 61, 57, 63, 57, 72, 54, 91, 67, 65, 73, 48, 77, 77, 50, 44, 64, 60, 81, 81, 85, 78, 75, 57, 64, 58, 61, 75, 49, 71, 70, 61, 66, 73, 67, 73, 77, 97, 57, 55, 66, 59, 68, 72, 84, 72, 65, 73, 60, 68, 59, 62, 63, 63, 54, 90, 57, 44, 117, 58, 56, 73, 51, 56, 63, 68, 58, 69, 57, 68, 61, 53, 72, 87, 83, 51, 57, 64, 72, 67, 57, 51, 73, 58, 63, 78, 62, 71, 83, 68, 64, 66, 63, 72, 54, 69, 69, 74, 79, 76, 47, 62, 57, 71, 63, 75, 70, 66, 67, 66, 61, 56, 44, 62, 47, 52, 62, 63, 59, 58, 76, 51, 52, 65, 67, 57, 66, 54, 68, 73, 65, 71, 57, 62, 62, 76, 60, 114, 52, 79, 67, 42, 53, 67, 55, 67, 73, 70, 61, 54, 52, 71, 41, 67, 46, 62, 96, 68, 77, 58, 51, 69, 67, 76, 56, 65, 53, 68, 70, 72, 67, 76, 55, 70, 82, 72, 81, 60, 72, 74, 58, 56, 59, 55, 60, 62, 52, 69, 59, 66, 65, 72, 60, 58, 69, 64, 68, 106, 64, 84, 74, 64, 64, 71, 56, 53, 54, 59, 68, 57, 54, 70, 56, 56, 73, 68, 75, 60, 88, 69, 58, 77, 99, 57, 64, 80, 83, 81, 50, 59, 50, 80, 69, 108, 73, 67, 51, 59, 66, 46, 58, 62, 56, 50, 74, 88, 58, 53, 71, 68, 67, 71, 73, 73, 70, 62, 107, 60, 81, 58, 54, 58, 54, 94, 60, 67, 70, 74, 55, 53, 53, 68, 75, 58, 78, 63, 44, 70, 72, 70, 70, 54, 65, 63, 56, 54, 57, 57, 80, 60, 66, 48, 59, 65, 64, 60, 69, 62, 82, 65, 45, 54, 65, 49, 85, 66, 100, 71, 60, 54, 82, 114, 46, 51, 56, 88, 62, 61, 54, 96, 74, 64, 62, 49, 62, 64, 58, 71, 44, 61, 46, 66, 56, 70, 70, 68, 58, 49, 61, 82, 53, 76, 84, 55, 51, 73, 59, 73, 60, 93, 80, 63, 44, 70, 71, 61, 70, 73, 77, 60, 64, 68, 66, 74, 89, 68, 51, 57, 57, 61, 87, 51, 67, 109, 54, 59, 49, 74, 101, 91, 57, 77, 61, 59, 62, 74, 55, 60, 71, 54, 61, 60, 52, 55, 55, 77, 72, 34, 52, 79, 63, 85, 78, 59, 54, 73, 50, 71, 65, 71, 81, 55, 53, 67, 62, 62, 85, 64, 44, 53, 64, 73, 78, 71, 73, 66, 62, 75, 40, 76, 72, 56, 56, 61, 61, 75, 61, 72, 91, 63, 63, 74, 57, 59, 62, 59, 65, 60, 65, 69, 67, 77, 107, 106, 124, 50, 63, 56, 100, 68, 86, 38, 56, 71, 57, 65, 72, 46, 56, 80, 62, 75, 79, 73, 57, 92, 71, 103, 58, 90, 78, 63, 72, 68, 50, 68, 75, 88, 57, 109, 57, 37, 53, 57, 62, 67, 35, 51, 76, 58, 71, 55, 73, 95, 59, 73, 46, 65, 93, 74, 95, 68, 58, 59, 64, 67, 84, 63, 85, 71, 81, 66, 76, 60, 49, 47, 76, 60, 68, 64, 105, 57, 61, 40, 43, 59, 54, 59, 62, 58, 51, 64, 72, 56, 58, 105, 44, 79, 61, 65, 58, 54, 40, 64, 68, 81, 74, 76, 64, 73, 54, 61, 55, 78, 59, 46, 54, 66, 82, 44, 64, 64, 35, 81, 69, 63, 47, 61, 66, 64, 63, 134, 55, 76, 66, 53, 67, 76, 61, 87, 61, 71, 85, 58, 81, 68, 73, 58, 67, 91, 71, 49, 69, 54, 55, 68, 48, 63, 55, 60, 71, 66, 65, 69, 61, 56, 67, 69, 59, 65, 76, 52, 65, 79, 64, 62, 75, 76, 84, 57, 54, 51, 67, 111, 57, 56, 86, 40, 70, 60, 59, 121, 66, 52, 51, 51, 75, 69, 91, 55, 49, 55, 59, 64, 57, 62, 56, 52, 56, 57, 68, 53, 47, 64, 53, 93, 44, 92, 63, 55, 53, 53, 72, 66, 72, 41, 92, 58, 63, 56, 59, 69, 55, 80, 61, 77, 63, 84, 71, 74, 73, 64, 73, 48, 72, 75, 47, 58, 67, 58, 65, 139, 52, 52, 52, 98, 75, 53, 80, 62, 66, 55, 59, 52, 54, 62, 59, 68, 87, 84, 74, 95, 103, 67, 63, 80, 85, 71, 80, 75, 69, 83, 61, 58, 86, 63, 75, 61, 73, 62, 72, 75, 109, 78, 76, 92, 42, 61, 75, 53, 81, 50, 66, 60, 76, 77, 73, 64, 94, 59, 65, 79, 66, 62, 61, 51, 54, 51, 76, 63, 32, 89, 81, 63, 68, 47, 44, 74, 55, 58, 60, 64, 61, 60, 58, 76, 69, 46, 57, 60, 54, 57, 59, 66, 60, 91, 63, 64, 85, 58, 58, 64, 54, 79, 60, 64, 62, 65, 66, 53, 57, 80, 53, 59, 46, 73, 56, 86, 55, 68, 57, 76, 84, 72, 53, 51, 81, 62, 79, 58, 65, 54, 95, 65, 67, 60, 72, 75, 57, 70, 57, 51, 73, 57, 64, 83, 60, 78, 48, 58, 66, 75, 66, 59, 62, 75, 53, 68, 60, 63, 39, 117, 67, 59, 74, 51, 87, 50, 60, 63, 144, 68, 74, 66, 47, 53, 91, 84, 75, 56, 49, 50, 72, 53, 70, 63, 62, 58, 59, 66, 50, 50, 65, 62, 58, 78, 69, 60, 65, 65, 56, 64, 86, 60, 69, 71, 66, 93, 67, 73, 81, 53, 66, 43, 64, 68, 91, 61, 53, 75, 139, 56, 98, 71, 54, 80, 67, 32, 48, 62, 54, 68, 71, 67, 63, 47, 64, 63, 57, 55, 70, 71, 79, 56, 111, 84, 49, 63, 50, 58, 47, 57, 61, 56, 82, 67, 71, 98, 64, 78, 96, 63, 63, 69, 70, 58, 56, 73, 88, 49, 71, 75, 68, 62, 100, 58, 50, 56, 44, 68, 71, 63, 72, 59, 68, 75, 70, 62, 64, 65, 76, 61, 71, 62, 83, 63, 60, 76, 66, 57, 67, 55, 87, 61, 53, 52, 54, 62, 53, 59, 57, 57, 102, 56, 59, 58, 68, 54, 109, 81, 67, 51, 64, 55, 52, 57, 69, 71, 82, 67, 70, 55, 86, 56, 64, 66, 67, 53, 66, 60, 82, 67, 89, 73, 78, 37, 77, 72, 53, 62, 73, 85, 51, 64, 70, 54, 65, 54, 54, 68, 52, 60, 63, 69, 63, 53, 82, 83, 90, 58, 78, 66, 66, 55, 54, 63, 52, 79, 90, 56, 64, 87, 62, 63, 76, 55, 57, 56, 67, 60, 59, 54, 61, 65, 50, 44, 75, 54, 47, 75, 82, 68, 73, 58, 77, 57, 58, 58, 51, 53, 65, 64, 80, 59, 57, 68, 67, 46, 62, 64, 70, 71, 52, 63, 60, 70, 59, 69, 55, 85, 75, 71, 58, 93, 73, 78, 63, 70, 59, 57, 61, 59, 59, 68, 80, 69, 114, 77, 52, 67, 58, 76, 50, 48, 64, 60, 58, 52, 76, 82, 59, 76, 64, 59, 65, 72, 72, 59, 54, 48, 80, 63, 67, 82, 79, 83, 74, 49, 71, 75, 60, 67, 47, 61, 70, 67, 59, 58, 76, 50, 92, 51, 71, 73, 55, 71, 93, 59, 61, 101, 62, 78, 52, 95, 82, 56, 60, 71, 39, 59, 77, 81, 79, 63, 59, 48, 61, 61, 70, 151, 72, 59, 61, 54, 54, 57, 59, 57, 62, 49, 54, 87, 72, 62, 61, 94, 56, 56, 58, 49, 38, 55, 68, 63, 89, 52, 55, 54, 57, 46, 75, 73, 51, 71, 52, 94, 85, 72, 62, 62, 61, 57, 52, 87, 76, 65, 76, 59, 155, 59, 43, 52, 92, 48, 70, 80, 58, 89, 47, 56, 59, 83, 117, 63, 88, 52, 65, 47, 64, 56, 67, 74, 62, 51, 79, 58, 73, 63, 74, 66, 79, 69, 55, 77, 64, 58, 72, 56, 60, 56, 87, 35, 67, 55, 53, 59, 52, 58, 55, 52, 54, 57, 64, 57, 52, 67, 77, 50, 61, 62, 52, 51, 55, 68, 50, 74, 45, 57, 66, 71, 57, 70, 65, 76, 82, 84, 64, 69, 45, 82, 39, 55, 69, 59, 52, 85, 88, 68, 61, 75, 49, 65, 51, 78, 57, 57, 60, 56, 63, 51, 98, 44, 68, 67, 80, 62, 62, 63, 65, 61, 56, 93, 65, 50, 40, 71, 61, 63, 86, 74, 58, 75, 65, 59, 58, 61, 57, 65, 57, 55, 66, 96, 65, 66, 46, 75, 66, 73, 62, 123, 68, 62, 65, 60, 50, 71, 55, 60, 51, 113, 70, 86, 85, 73, 73, 53, 65, 66, 59, 66, 79, 80, 66, 58, 53, 76, 66, 71, 92, 79, 60, 80, 61, 51, 97, 85, 60, 73, 80, 72, 106, 51, 39, 90, 80, 87, 66, 70, 70, 68, 58, 75, 59, 45, 63, 73, 65, 104, 54, 55, 102, 66, 58, 67, 79, 48, 42, 82, 46, 68, 62, 65, 57, 56, 53, 78, 67, 40, 66, 122, 78, 48, 72, 75, 66, 80, 64, 79, 63, 52, 70, 77, 93, 68, 63, 84, 83, 68, 78, 91, 62, 66, 84, 57, 76, 67, 51, 63, 54, 62, 64, 61, 95, 104, 70, 56, 55, 71, 68, 79, 63, 55, 56, 68, 91, 53, 80, 63, 59, 100, 95, 68, 79, 83, 51, 80, 42, 79, 50, 63, 61, 131, 72, 51, 88, 45, 38, 68, 43, 58, 63, 55, 65, 87, 53, 56, 57, 64, 63, 59, 54, 65, 43, 62, 74, 67, 93, 61, 40, 42, 45, 80, 46, 51, 57, 110, 91, 74, 54, 64, 71, 72, 61, 57, 70, 53, 56, 75, 60, 64, 63, 68, 49, 62, 55, 118, 66, 55, 49, 67, 53, 49, 52, 73, 76, 51, 59, 83, 51, 56, 69, 63, 58, 57, 68, 80, 58, 141, 44, 83, 73, 58, 66, 53, 79, 80, 68, 70, 68, 49, 70, 72, 64, 75, 55, 70, 64, 67, 43, 70, 34, 72, 58, 55, 55, 65, 52, 59, 72, 60, 53, 60, 51, 70, 57, 59, 96, 70, 83, 42, 86, 39, 58, 61, 88, 80, 69, 67, 66, 65, 58, 66, 83, 79, 64, 65, 49, 70, 65, 66, 100, 56, 56, 93, 64, 67, 71, 64, 105, 62, 81, 72, 68, 73, 68, 72, 46, 56, 49, 56, 64, 64, 65, 88, 59, 66, 95, 52, 61, 63, 90, 68, 102, 44, 55, 58, 66, 67, 78, 76, 62, 78, 59, 100, 73, 59, 58, 70, 63, 57, 64, 48, 122, 52, 57, 61, 61, 62, 53, 80, 71, 74, 57, 63, 48, 59, 74, 64, 56, 62, 75, 56, 50, 56, 50, 85, 65, 55, 73, 55, 59, 69, 65, 69, 76, 57, 63, 58, 55, 71, 109, 43, 61, 71, 63, 67, 67, 49, 45, 89, 58, 64, 57, 90, 50, 93, 68, 65, 60, 90, 35, 70, 50, 50, 85, 38, 69, 68, 63, 75, 81, 76, 72, 44, 61, 59, 72, 52, 76, 60, 138, 80, 48, 69, 64, 62, 96, 115, 81, 60, 85, 67, 52, 68, 62, 65, 67, 87, 62, 59, 48, 47, 56, 71, 55, 56, 62, 72, 60, 75, 58, 58, 52, 70, 60, 61, 84, 62, 38, 55, 50, 55, 70, 67, 84, 58, 60, 59, 77, 69, 65, 74, 71, 52, 78, 70, 52, 79, 51, 81, 63, 57, 57, 92, 64, 70, 68, 62, 80, 76, 82, 58, 66, 51, 48, 47, 96, 66, 51, 77, 80, 41, 66, 55, 70, 64, 63, 62, 47, 50, 58, 64, 56, 61, 80, 66, 73, 62, 91, 56, 83, 61, 53, 59, 62, 48, 69, 51, 89, 67, 90, 91, 63, 63, 70, 60, 58, 74, 94, 54, 77, 60, 156, 60, 68, 57, 37, 71, 60, 105, 79, 65, 55, 56, 65, 80, 55, 56, 65, 72, 46, 66, 67, 55, 53, 58, 61, 42, 50, 55, 80, 58, 56, 46, 61, 54, 54, 58, 63, 67, 54, 72, 51, 60, 68, 58, 57, 68, 78, 60, 100, 85, 87, 54, 39, 52, 65, 51, 65, 62, 72, 54, 111, 58, 71, 54, 52, 71, 74, 51, 54, 52, 105, 70, 64, 67, 96, 58, 60, 94, 81, 66, 54, 72, 55, 70, 69, 60, 65, 143, 55, 51, 75, 100, 74, 56, 90, 60, 65, 81, 54, 79, 76, 79, 53, 61, 87, 61, 68, 69, 42, 79, 94, 71, 80, 75, 62, 85, 61, 59, 62, 62, 71, 51, 87, 67, 65, 84, 49, 45, 55, 81, 82, 91, 83, 70, 78, 48, 59, 57, 55, 70, 51, 60, 65, 54, 57, 65, 57, 86, 74, 43, 92, 86, 59, 70, 54, 78, 63, 59, 67, 68, 58, 61, 72, 66, 75, 50, 73, 67, 40, 52, 110, 65, 81, 59, 62, 50, 106, 84, 72, 55, 78, 78, 67, 49, 51, 67, 52, 50, 51, 51, 69, 56, 54, 54, 56, 82, 77, 61, 73, 53, 73, 73, 54, 45, 72, 52, 56, 66, 71, 59, 60, 62, 88, 69, 53, 76, 63, 63, 66, 32, 56, 82, 117, 81, 76, 54, 77, 52, 66, 69, 66, 86, 78, 52, 54, 60, 64, 45, 53, 62, 73, 59, 63, 73, 64, 57, 62, 66, 55, 63, 60, 64, 54, 64, 59, 65, 63, 68, 70, 65, 53, 53, 72, 101, 75, 49, 67, 52, 73, 73, 58, 60, 70, 58, 35, 92, 44, 50, 80, 52, 74, 89, 44, 81, 79, 51, 89, 62, 44, 71, 59, 85, 81, 76, 53, 55, 60, 68, 60, 52, 72, 94, 77, 55, 63, 56, 81, 59, 63, 75, 55, 77, 66, 56, 73, 69, 78, 61, 55, 68, 58, 68, 61, 72, 91, 40, 61, 65, 79, 64, 72, 49, 71, 95, 59, 54, 63, 131, 60, 52, 106, 67, 65, 88, 62, 55, 59, 66, 63, 87, 50, 91, 67, 82, 66, 60, 87, 64, 61, 76, 79, 47, 55, 80, 64, 61, 63, 58, 63, 64, 37, 72, 68, 60, 70, 44, 61, 60, 84, 73, 76, 77, 53, 65, 62, 68, 85, 56, 48, 59, 46, 58, 53, 62, 73, 85, 68, 58, 36, 67, 62, 65, 83, 70, 66, 72, 60, 63, 45, 50, 72, 78, 55, 78, 65, 66, 54, 70, 55, 45, 71, 90, 59, 70, 84, 65, 63, 46, 57, 67, 73, 79, 102, 94, 84, 47, 70, 59, 53, 63, 55, 63, 71, 68, 67, 34, 57, 54, 78, 46, 60, 60, 85, 69, 64, 86, 61, 54, 86, 65, 56, 67, 66, 49, 99, 93, 71, 75, 153, 42, 68, 82, 68, 77, 74, 69, 47, 68, 69, 81, 46, 69, 75, 78, 106, 70, 68, 76, 76, 78, 96, 78, 54, 64, 62, 62, 48, 61, 66, 59, 47, 82, 55, 86, 67, 61, 54, 76, 66, 58, 62, 70, 73, 56, 71, 71, 65, 58, 110, 48, 66, 69, 55, 74, 57, 65, 34, 60, 77, 61, 62, 60, 37, 63, 66, 69, 104, 52, 68, 99, 46, 84, 81, 68, 84, 66, 65, 58, 82, 82, 49, 44, 60, 66, 66, 58, 68, 54, 53, 68, 74, 57, 71, 62, 59, 59, 84, 73, 110, 60, 58, 109, 62, 71, 60, 39, 70, 51, 63, 64, 111, 63, 76, 57, 52, 78, 71, 67, 110, 47, 38, 66, 64, 58, 55, 62, 54, 63, 59, 65, 55, 64, 61, 45, 63, 70, 55, 57, 57, 77, 64, 63, 61, 67, 83, 60, 63, 64, 60, 49, 56, 73, 77, 67, 76, 74, 38, 58, 80, 67, 63, 86, 55, 80, 60, 55, 59, 56, 57, 49, 72, 51, 58, 61, 61, 71, 66, 75, 82, 73, 56, 55, 75, 56, 72, 62, 56, 114, 52, 72, 60, 65, 53, 50, 70, 46, 83, 65, 63, 80, 60, 54, 76, 101, 104, 43, 79, 73, 58, 61, 99, 82, 55, 46, 45, 50, 71, 54, 79, 56, 55, 60, 51, 68, 58, 78, 65, 83, 52, 65, 74, 50, 67, 53, 81, 43, 77, 54, 68, 57, 99, 51, 68, 75, 56, 69, 50, 70, 52, 66, 79, 71, 48, 54, 63, 73, 69, 55, 60, 74, 56, 65, 69, 54, 63, 70, 67, 81, 60, 76, 78, 74, 53, 61, 57, 69, 67, 63, 67, 69, 60, 56, 43, 46, 57, 63, 64, 76, 65, 53, 66, 56, 69, 65, 159, 43, 56, 69, 41, 50, 63, 58, 95, 67, 107, 46, 62, 88, 61, 61, 75, 70, 66, 69, 58, 67, 98, 77, 60, 76, 74, 28, 66, 69, 55, 65, 47, 48, 82, 77, 57, 48, 64, 60, 65, 67, 68, 80, 84, 59, 53, 75, 59, 82, 74, 65, 70, 66, 57, 60, 65, 56, 74, 75, 74, 61, 69, 46, 52, 72, 43, 86, 66, 75, 86, 67, 113, 56, 62, 63, 68, 73, 72, 70, 50, 58, 56, 70, 48, 56, 82, 54, 104, 78, 66, 65, 84, 66, 45, 69, 53, 88, 62, 53, 102, 54, 70, 65, 57, 78, 90, 86, 73, 67, 91, 67, 56, 83, 68, 45, 51, 83, 63, 63, 63, 53, 68, 63, 60, 51, 57, 71, 56, 44, 93, 50, 66, 55, 88, 81, 60, 65, 62, 55, 82, 51, 60, 47, 96, 50, 65, 65, 46, 94, 71, 75, 99, 61, 99, 63, 78, 62, 108, 53, 46, 64, 64, 63, 61, 52, 64, 76, 62, 58, 55, 85, 76, 54, 85, 64, 57, 45, 64, 79, 82, 55, 86, 82, 60, 48, 34, 82, 72, 78, 72, 54, 51, 81, 124, 103, 86, 50, 58, 88, 52, 75, 66, 65, 66, 42, 49, 56, 75, 52, 83, 55, 62, 62, 63, 75, 56, 72, 57, 68, 67, 63, 69, 56, 44, 76, 61, 64, 85, 62, 69, 79, 64, 70, 66, 85, 76, 78, 75, 93, 70, 53, 53, 66, 71, 84, 73, 65, 53, 70, 68, 85, 64, 68, 56, 43, 49, 74, 78, 54, 60, 62, 68, 52, 73, 73, 69, 167, 62, 48, 64, 66, 58, 80, 59, 64, 49, 73, 80, 42, 55, 72, 84, 67, 37, 88, 59, 49, 66, 70, 54, 48, 61, 58, 82, 52, 84, 55, 63, 81, 67, 74, 74, 70, 74, 68, 74, 67, 69, 92, 66, 70, 56, 33, 79, 66, 95, 72, 52, 76, 66, 65, 61, 49, 68, 75, 47, 67, 63, 82, 69, 51, 55, 61, 59, 71, 62, 56, 68, 56, 63, 47, 71, 66, 60, 63, 64, 160, 66, 66, 65, 65, 64, 62, 39, 58, 53, 58, 75, 45, 75, 80, 75, 67, 64, 65, 50, 81, 58, 63, 72, 58, 59, 71, 66, 49, 70, 55, 62, 57, 82, 48, 65, 58, 56, 68, 142, 68, 64, 79, 55, 57, 59, 69, 85, 72, 65, 56, 64, 60, 66, 77, 54, 64, 35, 71, 77, 87, 43, 52, 69, 95, 47, 56, 54, 51, 77, 68, 73, 57, 51, 56, 71, 83, 72, 54, 53, 77, 79, 82, 58, 51, 66, 83, 63, 57, 82, 61, 71, 64, 70, 79, 81, 55, 101, 33, 54, 66, 67, 56, 53, 37, 52, 58, 68, 59, 59, 83, 71, 74, 57, 43, 68, 60, 73, 79, 63, 61, 70, 68, 56, 81, 80, 62, 53, 72, 62, 49, 66, 77, 87, 67, 57, 76, 54, 55, 143, 65, 59, 69, 77, 73, 83, 62, 58, 57, 62, 74, 58, 50, 76, 68, 50, 72, 62, 46, 63, 106, 51, 54, 61, 63, 72, 63, 49, 74, 56, 84, 65, 72, 94, 52, 63, 57, 67, 43, 63, 67, 63, 41, 80, 63, 54, 52, 46, 71, 70, 50, 78, 72, 64, 80, 58, 58, 104, 61, 59, 55, 56, 91, 56, 74, 66, 87, 66, 76, 72, 69, 51, 66, 91, 66, 59, 70, 84, 75, 73, 88, 56, 60, 44, 105, 64, 62, 65, 84, 77, 48, 95, 72, 59, 57, 60, 43, 61, 49, 82, 87, 73, 63, 65, 52, 90, 75, 59, 88, 47, 54, 64, 76, 62, 53, 85, 57, 52, 88, 73, 72, 75, 80, 60, 66, 70, 75, 66, 92, 55, 73, 82, 58, 65, 61, 82, 79, 74, 60, 62, 73, 78, 44, 65, 69, 67, 51, 53, 68, 78, 88, 64, 46, 72, 61, 55, 63, 92, 52, 83, 68, 59, 49, 48, 46, 64, 43, 61, 73, 65, 63, 77, 90, 65, 70, 52, 30, 76, 49, 83, 54, 65, 71, 76, 62, 69, 38, 51, 72, 53, 72, 46, 62, 45, 96, 63, 62, 59, 63, 69, 82, 57, 61, 57, 48, 53, 80, 70, 54, 68, 77, 59, 95, 58, 70, 83, 48, 57, 58, 69, 68, 90, 64, 68, 65, 60, 62, 74, 72, 73, 67, 54, 55, 66, 47, 57, 66, 49, 76, 83, 40, 67, 64, 47, 92, 82, 71, 64, 50, 85, 72, 87, 61, 74, 52, 56, 88, 55, 84, 78, 55, 69, 59, 98, 58, 78, 61, 53, 127, 51, 79, 51, 71, 14, 67, 86, 52, 61, 54, 67, 90, 78, 71, 68, 64, 59, 67, 75, 55, 67, 60, 47, 61, 45, 64, 100, 67, 60, 50, 61, 53, 47, 54, 47, 59, 85, 62, 77, 70, 140, 58, 66, 84, 87, 83, 54, 69, 72, 46, 51, 119, 69, 61, 60, 66, 53, 43, 69, 71, 65, 57, 76, 70, 67, 60, 60, 85, 78, 81, 53, 52, 72, 83, 70, 66, 68, 70, 88, 76, 81, 57, 59, 66, 67, 49, 74, 57, 60, 64, 71, 64, 93, 57, 87, 65, 56, 99, 47, 51, 57, 74, 61, 59, 57, 67, 63, 44, 57, 67, 64, 42, 81, 57, 82, 63, 57, 68, 66, 61, 61, 67, 77, 55, 70, 62, 58, 83, 81, 60, 81, 90, 62, 74, 67, 75, 77, 79, 74, 52, 71, 86, 59, 63, 80, 63, 64, 62, 58, 57, 92, 71, 78, 63, 65, 63, 56, 55, 59, 78, 38, 74, 69, 79, 62, 71, 67, 74, 56, 32, 65, 77, 83, 62, 82, 51, 61, 103, 41, 64, 63, 62, 60, 58, 67, 50, 73, 66, 85, 63, 78, 64, 62, 47, 53, 64, 79, 54, 85, 52, 61, 103, 47, 70, 77, 66, 78, 77, 60, 49, 51, 112, 69, 43, 66, 64, 50, 48, 68, 66, 65, 57, 57, 73, 51, 56, 65, 54, 74, 57, 70, 60, 78, 66, 73, 52, 49, 74, 77, 80, 64, 58, 70, 49, 47, 69, 78, 76, 71, 61, 93, 68, 67, 67, 66, 55, 62, 70, 65, 48, 88, 71, 71, 72, 73, 57, 68, 56, 66, 60, 57, 59, 81, 71, 47, 71, 72, 63, 162, 61, 72, 63, 71, 48, 63, 70, 79, 59, 73, 63, 57, 77, 48, 52, 38, 91, 68, 52, 63, 50, 64, 39, 66, 62, 64, 69, 66, 64, 57, 77, 57, 64, 75, 49, 100, 69, 74, 56, 45, 61, 60, 76, 67, 61, 67, 66, 81, 65, 85, 74, 79, 78, 56, 87, 54, 101, 44, 85, 43, 77, 60, 66, 60, 58, 61, 102, 52, 87, 72, 89, 85, 67, 66, 62, 97, 43, 73, 63, 42, 71, 53, 79, 69, 46, 60, 77, 110, 78, 76, 55, 43, 77, 52, 75, 54, 68, 71, 76, 47, 65, 66, 70, 48, 56, 60, 54, 71, 73, 69, 79, 55, 72, 71, 59, 81, 47, 41, 45, 69, 77, 54, 58, 71, 67, 38, 66, 66, 71, 53, 74, 72, 58, 52, 78, 65, 72, 78, 75, 78, 61, 74, 49, 82, 82, 60, 59, 87, 52, 80, 79, 68, 62, 51, 55, 58, 134, 109, 69, 49, 84, 54, 73, 66, 62, 66, 80, 47, 54, 62, 52, 53, 74, 44, 99, 73, 41, 62, 66, 71, 65, 72, 53, 61, 165, 60, 83, 59, 54, 51, 60, 62, 53, 67, 79, 69, 59, 55, 63, 49, 68, 68, 73, 75, 52, 86, 56, 64, 74, 62, 43, 64, 55, 78, 59, 64, 49, 67, 53, 72, 54, 51, 77, 77, 78, 37, 73, 79, 67, 89, 53, 69, 83, 72, 78, 52, 82, 50, 36, 56, 87, 77, 65, 65, 58, 78, 56, 70, 68, 68, 79, 44, 67, 62, 56, 92, 75, 82, 76, 53, 44, 64, 81, 65, 43, 71, 47, 90, 65, 78, 53, 72, 67, 69, 69, 58, 66, 61, 57, 75, 87, 58, 62, 60, 90, 66, 66, 57, 62, 54, 70, 60, 76, 57, 60, 59, 68, 70, 75, 37, 56, 50, 60, 63, 73, 60, 77, 84, 63, 71, 81, 46, 45, 63, 71, 76, 64, 59, 95, 56, 60, 62, 58, 53, 77, 52, 58, 58, 46, 55, 66, 60, 43, 43, 71, 46, 83, 77, 52, 81, 57, 72, 68, 72, 58, 54, 71, 72, 63, 78, 92, 102, 54, 46, 38, 55, 97, 77, 69, 60, 68, 59, 70, 57, 114, 60, 52, 62, 55, 64, 60, 82, 62, 77, 83, 59, 64, 59, 71, 60, 75, 84, 61, 60, 71, 77, 52, 79, 76, 72, 65, 71, 54, 72, 60, 57, 49, 61, 60, 61, 76, 41, 73, 47, 62, 139, 61, 52, 46, 63, 82, 51, 69, 64, 56, 75, 64, 64, 65, 64, 72, 69, 73, 52, 51, 73, 93, 63, 60, 75, 67, 58, 84, 57, 68, 54, 67, 69, 79, 68, 71, 41, 59, 60, 88, 52, 53, 86, 68, 65, 63, 66, 50, 55, 123, 80, 49, 58, 50, 58, 68, 47, 48, 60, 94, 78, 85, 75, 63, 76, 70, 72, 65, 62, 60, 67, 74, 84, 59, 56, 88, 64, 50, 57, 70, 69, 80, 72, 64, 50, 86, 61, 63, 68, 68, 69, 73, 48, 96, 90, 60, 55, 70, 68, 51, 50, 59, 54, 64, 92, 70, 58, 56, 73, 55, 77, 55, 69, 158, 57, 51, 58, 70, 57, 63, 69, 59, 76, 63, 63, 65, 99, 72, 70, 84, 55, 55, 54, 60, 53, 71, 86, 52, 65, 72, 59, 55, 78, 55, 56, 55, 67, 49, 74, 55, 65, 70, 59, 48, 69, 91, 67, 58, 54, 66, 65, 63, 55, 70, 87, 46, 63, 56, 57, 80, 66, 63, 54, 84, 72, 61, 62, 71, 59, 73, 66, 64, 80, 63, 58, 69, 72, 78, 43, 68, 94, 72, 54, 53, 49, 52, 35, 85, 102, 62, 58, 57, 62, 47, 98, 55, 76, 67, 89, 58, 55, 85, 73, 57, 55, 70, 69, 68, 50, 57, 70, 54, 64, 53, 84, 83, 75, 79, 82, 59, 73, 76, 67, 79, 59, 82, 79, 74, 65, 83, 52, 78, 57, 66, 88, 77, 81, 90, 66, 60, 64, 62, 44, 65, 68, 42, 55, 56, 69, 57, 60, 75, 120, 64, 68, 77, 77, 38, 87, 49, 65, 80, 77, 57, 79, 54, 68, 72, 67, 50, 67, 61, 90, 58, 63, 58, 60, 66, 72, 90, 83, 69, 57, 79, 61, 76, 64, 78, 72, 58, 47, 55, 53, 49, 56, 106, 74, 63, 61, 96, 53, 85, 53, 57, 88, 49, 68, 61, 81, 94, 46, 56, 74, 75, 86, 60, 57, 62, 53, 66, 86, 63, 62, 64, 51, 83, 63, 64, 55, 62, 53, 68, 63, 66, 82, 86, 54, 93, 60, 74, 67, 74, 54, 66, 71, 100, 77, 70, 58, 59, 50, 104, 71, 80, 72, 68, 60, 64, 50, 91, 69, 62, 76, 64, 71, 64, 48, 71, 66, 61, 58, 71, 72, 74, 58, 48, 72, 64, 74, 61, 59, 63, 71, 60, 84, 66, 58, 66, 47, 49, 60, 57, 76, 66, 48, 80, 47, 61, 61, 61, 64, 77, 65, 46, 74, 70, 53, 86, 61, 55, 63, 64, 81, 55, 43, 64, 80, 62, 68, 91, 46, 75, 67, 70, 58, 67, 65, 60, 72, 52, 81, 90, 64, 93, 51, 72, 58, 62, 62, 64, 64, 98, 62, 53, 79, 63, 80, 48, 69, 72, 67, 61, 63, 59, 45, 60, 68, 85, 74, 55, 50, 62, 52, 56, 55, 69, 51, 69, 116, 49, 67, 39, 67, 65, 125, 79, 57, 31, 46, 66, 82, 49, 54, 60, 58, 56, 67, 80, 53, 53, 79, 58, 57, 60, 58, 68, 68, 70, 51, 84, 63, 74, 50, 55, 60, 63, 68, 72, 56, 71, 76, 52, 72, 72, 57, 57, 59, 81, 75, 99, 64, 65, 83, 33, 58, 54, 54, 68, 85, 88, 76, 58, 61, 71, 73, 72, 53, 73, 52, 69, 79, 57, 67, 63, 65, 56, 60, 101, 74, 75, 67, 73, 73, 69, 64, 51, 45, 58, 63, 106, 72, 62, 66, 65, 58, 89, 52, 55, 78, 65, 85, 86, 54, 57, 65, 62, 70, 60, 65, 102, 82, 64, 73, 65, 88, 52, 55, 65, 107, 95, 58, 63, 62, 57, 54, 66, 72, 49, 71, 71, 75, 46, 72, 55, 54, 58, 75, 63, 61, 59, 63, 73, 71, 77, 53, 81, 72, 57, 70, 70, 66, 55, 47, 69, 39, 67, 63, 56, 62, 57, 54, 56, 66, 87, 62, 72, 47, 58, 48, 72, 131, 58, 90, 48, 59, 72, 59, 59, 64, 75, 55, 64, 66, 71, 66, 48, 49, 77, 71, 47, 75, 80, 57, 59, 91, 59, 51, 90, 58, 55, 55, 65, 67, 50, 83, 55, 70, 126, 60, 67, 60, 65, 114, 90, 66, 65, 65, 57, 51, 63, 41, 101, 66, 62, 54, 52, 68, 51, 69, 84, 70, 55, 58, 71, 54, 66, 58, 59, 77, 74, 57, 72, 82, 52, 72, 60, 59, 61, 82, 62, 66, 51, 81, 63, 64, 52, 53, 60, 66, 48, 69, 66, 70, 53, 64, 54, 61, 70, 70, 67, 56, 67, 57, 59, 65, 70, 50, 69, 58, 56, 58, 64, 65, 58, 76, 60, 84, 35, 57, 51, 54, 69, 56, 81, 52, 51, 69, 67, 76, 47, 66, 60, 57, 57, 62, 63, 58, 54, 51, 67, 67, 61, 57, 62, 76, 58, 68, 54, 69, 56, 53, 66, 66, 72, 59, 65, 63, 56, 65, 60, 62, 54, 71, 69, 63, 73, 129, 58, 54, 79, 70, 73, 57, 56, 56, 77, 60, 71, 47, 53, 57, 64, 71, 61, 59, 57, 63, 84, 75, 60, 61, 74, 72, 65, 63, 79, 68, 64, 90, 35, 75, 91, 70, 57, 59, 63, 76, 73, 57, 58, 75, 67, 66, 61, 70, 44, 62, 90, 64, 64, 74, 57, 53, 58, 72, 69, 50, 57, 60, 66, 88, 67, 79, 45, 90, 72, 67, 54, 104, 61, 88, 60, 70, 68, 54, 70, 55, 52, 63, 72, 47, 56, 90, 76, 82, 51, 72, 59, 67, 99, 74, 62, 48, 103, 81, 57, 86, 75, 74, 61, 83, 57, 53, 101, 45, 80, 59, 59, 61, 53, 72, 80, 72, 78, 55, 70, 65, 64, 63, 52, 60, 63, 58, 73, 54, 51, 50, 55, 51, 64, 51, 53, 62, 88, 93, 69, 59, 60, 65, 84, 64, 61, 70, 56, 60, 68, 51, 83, 65, 57, 87, 57, 68, 64, 45, 57, 64, 63, 83, 73, 69, 66, 53, 68, 54, 68, 57, 60, 73, 67, 58, 81, 62, 58, 60, 70, 103, 71, 55, 64, 61, 78, 69, 60, 60, 80, 74, 58, 45, 92, 72, 50, 64, 67, 59, 67, 53, 43, 68, 69, 60, 76, 60, 104, 66, 70, 69, 65, 69, 74, 66, 65, 97, 52, 77, 77, 65, 48, 60, 75, 77, 77, 61, 120, 65, 56, 51, 66, 66, 65, 65, 65, 54, 84, 66, 40, 60, 49, 60, 51, 59, 72, 80, 61, 127, 51, 72, 69, 52, 58, 80, 86, 71, 65, 78, 39, 58, 60, 56, 74, 71, 55, 66, 65, 65, 75, 169, 56, 77, 129, 56, 58, 61, 69, 58, 51, 58, 62, 65, 63, 66, 74, 54, 55, 57, 58, 47, 69, 65, 66, 60, 65, 57, 55, 93, 66, 65, 87, 71, 80, 57, 69, 59, 94, 57, 58, 61, 66, 64, 76, 62, 58, 58, 59, 61, 56, 72, 54, 73, 53, 64, 47, 54, 70, 73, 107, 65, 51, 54, 65, 78, 63, 53, 69, 65, 67, 65, 71, 71, 54, 53, 44, 72, 63, 89, 64, 61, 59, 76, 64, 62, 58, 95, 66, 96, 61, 47, 71, 71, 76, 70, 73, 56, 58, 69, 57, 72, 50, 66, 65, 81, 68, 81, 53, 65, 79, 81, 69, 78, 58, 61, 75, 60, 73, 81, 66, 52, 80, 71, 64, 62, 64, 57, 47, 62, 57, 84, 48, 62, 46, 76, 52, 73, 75, 74, 69, 69, 71, 54, 55, 66, 57, 68, 58, 61, 90, 85, 104, 49, 84, 77, 55, 53, 74, 74, 51, 52, 76, 64, 79, 64, 71, 54, 56, 63, 72, 65, 61, 41, 65, 35, 56, 61, 69, 65, 43, 58, 77, 61, 71, 68, 72, 67, 76, 55, 148, 51, 63, 61, 62, 54, 61, 58, 59, 142, 66, 81, 73, 56, 74, 49, 48, 68, 121, 60, 69, 79, 71, 75, 58, 76, 71, 61, 104, 55, 77, 60, 69, 78, 60, 64, 65, 79, 59, 49, 54, 59, 86, 66, 59, 70, 85, 61, 61, 64, 54, 53, 55, 63, 73, 83, 52, 57, 62, 93, 68, 45, 97, 62, 49, 94, 63, 50, 75, 65, 82, 61, 65, 55, 69, 61, 56, 50, 58, 66, 89, 63, 70, 59, 72, 89, 65, 58, 67, 53, 63, 77, 62, 54, 65, 54, 58, 55, 60, 42, 64, 71, 59, 62, 65, 59, 58, 58, 62, 58, 77, 69, 62, 59, 74, 71, 79, 92, 84, 61, 44, 79, 80, 57, 47, 54, 78, 54, 64, 60, 41, 70, 50, 43, 71, 72, 63, 74, 51, 98, 50, 50, 62, 57, 42, 45, 55, 66, 62, 57, 78, 75, 66, 75, 72, 60, 71, 69, 53, 65, 67, 67, 65, 59, 42, 63, 106, 73, 56, 42, 66, 58, 80, 80, 71, 73, 64, 67, 65, 68, 65, 63, 68, 62, 70, 73, 58, 76, 109, 52, 74, 52, 82, 117, 74, 55, 66, 71, 70, 61, 52, 65, 93, 66, 76, 60, 53, 59, 51, 70, 53, 90, 61, 71, 51, 69, 59, 54, 67, 58, 73, 64, 45, 59, 70, 85, 58, 55, 109, 54, 63, 53, 69, 89, 70, 72, 74, 51, 62, 65, 81, 76, 52, 65, 67, 81, 60, 60, 87, 66, 56, 77, 58, 52, 70, 93, 63, 64, 46, 92, 52, 85, 85, 58, 82, 54, 63, 75, 69, 82, 67, 68, 93, 69, 68, 84, 70, 52, 53, 80, 65, 53, 59, 56, 72, 56, 78, 46, 61, 77, 62, 58, 58, 49, 46, 57, 77, 105, 66, 88, 60, 45, 71, 67, 55, 78, 55, 65, 103, 76, 75, 76, 70, 68, 88, 54, 69, 63, 74, 60, 47, 65, 67, 77, 60, 69, 68, 73, 50, 74, 69, 69, 68, 62, 72, 90, 82, 51, 81, 41, 55, 59, 52, 52, 76, 58, 67, 53, 95, 56, 65, 95, 79, 89, 64, 50, 51, 54, 47, 72, 55, 67, 68, 74, 87, 42, 66, 81, 49, 79, 67, 82, 62, 55, 89, 71, 61, 47, 52, 96, 55, 49, 71, 63, 59, 72, 107, 64, 59, 51, 60, 72, 52, 57, 70, 58, 50, 64, 57, 132, 58, 81, 73, 57, 68, 56, 66, 73, 53, 58, 65, 45, 69, 86, 70, 69, 72, 64, 75, 60, 57, 53, 63, 48, 54, 78, 69, 64, 73, 61, 59, 49, 88, 64, 57, 76, 75, 51, 71, 55, 75, 68, 55, 68, 57, 57, 69, 65, 88, 46, 66, 58, 52, 74, 50, 72, 54, 83, 70, 53, 56, 52, 76, 78, 65, 50, 68, 62, 49, 52, 62, 64, 52, 47, 99, 67, 74, 55, 65, 59, 52, 56, 71, 54, 54, 77, 57, 43, 50, 62, 61, 43, 92, 65, 39, 58, 51, 54, 82, 53, 37, 67, 59, 76, 71, 73, 71, 64, 93, 57, 59, 83, 61, 61, 46, 64, 45, 86, 32, 49, 78, 81, 61, 52, 44, 60, 56, 75, 71, 56, 64, 66, 64, 67, 76, 67, 75, 68, 63, 67, 53, 57, 77, 79, 68, 93, 52, 57, 60, 63, 59, 59, 55, 55, 53, 53, 87, 60, 47, 88, 61, 49, 72, 58, 57, 92, 62, 47, 56, 84, 68, 62, 90, 76, 63, 53, 79, 82, 58, 78, 69, 65, 43, 67, 62, 91, 48, 60, 43, 70, 52, 68, 84, 50, 56, 65, 73, 69, 72, 67, 79, 60, 58, 51, 49, 54, 61, 70, 51, 65, 61, 80, 52, 77, 69, 93, 73, 73, 151, 65, 52, 82, 61, 51, 55, 49, 87, 57, 56, 75, 77, 78, 82, 58, 78, 85, 55, 62, 57, 69, 69, 72, 54, 91, 106, 59, 80, 53, 48, 66, 67, 73, 58, 72, 62, 66, 62, 52, 49, 42, 57, 72, 81, 44, 65, 54, 62, 96, 61, 75, 64, 71, 62, 77, 63, 59, 47, 55, 68, 57, 63, 62, 81, 56, 47, 72, 68, 63, 60, 63, 81, 47, 87, 58, 87, 58, 78, 54, 56, 47, 85, 70, 65, 70, 66, 38, 72, 77, 66, 74, 59, 50, 57, 64, 88, 75, 56, 65, 77, 62, 65, 72, 47, 50, 51, 85, 56, 70, 64, 64, 62, 64, 56, 42, 44, 88, 60, 56, 75, 72, 84, 61, 64, 69, 63, 88, 56, 74, 55, 70, 76, 71, 54, 49, 59, 80, 65, 63, 58, 47, 72, 61, 74, 56, 77, 55, 68, 56, 74, 60, 64, 52, 78, 59, 51, 63, 67, 62, 83, 74, 55, 61, 64, 73, 73, 59, 82, 63, 55, 52, 68, 66, 63, 56, 46, 79, 57, 69, 56, 64, 75, 64, 54, 78, 54, 43, 62, 65, 85, 78, 62, 46, 53, 50, 64, 65, 78, 60, 73, 92, 93, 43, 65, 65, 63, 59, 73, 44, 70, 72, 59, 97, 68, 92, 59, 62, 106, 51, 47, 71, 65, 53, 85, 57, 62, 68, 55, 60, 79, 55, 58, 62, 51, 35, 58, 76, 66, 70, 63, 53, 115, 115, 54, 73, 61, 66, 72, 65, 93, 66, 74, 47, 52, 56, 54, 61, 56, 82, 79, 74, 76, 59, 77, 63, 74, 72, 53, 65, 53, 62, 71, 53, 62, 52, 81, 52, 70, 50, 69, 61, 49, 76, 62, 65, 68, 75, 64, 63, 62, 54, 61, 60, 85, 52, 66, 70, 66, 63, 59, 76, 110, 56, 61, 79, 60, 64, 63, 64, 79, 52, 87, 56, 69, 63, 67, 67, 61, 73, 69, 87, 45, 89, 67, 79, 77, 72, 59, 67, 77, 55, 67, 57, 66, 81, 52, 142, 68, 78, 86, 72, 76, 68, 72, 69, 65, 47, 49, 54, 70, 61, 65, 66, 58, 71, 59, 60, 31, 72, 65, 98, 54, 50, 70, 76, 59, 69, 83, 89, 63, 91, 74, 65, 65, 56, 78, 80, 69, 64, 62, 63, 45, 67, 73, 64, 51, 63, 53, 61, 72, 71, 52, 69, 53, 55, 77, 65, 50, 83, 76, 64, 65, 68, 58, 50, 65, 93, 89, 53, 95, 102, 62, 68, 46, 52, 66, 63, 52, 57, 56, 71, 73, 58, 63, 65, 66, 69, 77, 62, 59, 76, 56, 88, 77, 60, 59, 68, 77, 75, 52, 83, 77, 52, 52, 62, 50, 98, 52, 65, 71, 72, 60, 72, 94, 76, 76, 78, 61, 72, 83, 70, 80, 58, 67, 49, 52, 53, 70, 75, 68, 56, 64, 86, 63, 73, 99, 74, 164, 56, 64, 74, 76, 60, 47, 68, 68, 50, 60, 67, 67, 65, 83, 67, 76, 83, 44, 67, 53, 64, 91, 49, 48, 60, 72, 59, 69, 64, 70, 71, 57, 67, 54, 87, 70, 65, 61, 60, 57, 53, 67, 57, 57, 51, 66, 59, 85, 74, 73, 89, 66, 78, 54, 74, 68, 56, 58, 61, 61, 74, 77, 70, 60, 59, 73, 66, 61, 60, 49, 55, 63, 70, 67, 59, 63, 77, 71, 70, 63, 64, 62, 56, 63, 71, 73, 64, 67, 57, 59, 72, 70, 57, 117, 69, 61, 43, 83, 57, 60, 53, 71, 63, 86, 69, 64, 77, 56, 45, 50, 53, 54, 86, 65, 56, 134, 56, 50, 63, 79, 125, 86, 84, 83, 62, 60, 83, 69, 68, 72, 82, 59, 45, 62, 50, 58, 74, 40, 59, 54, 61, 75, 70, 77, 59, 71, 63, 60, 70, 54, 62, 80, 90, 58, 61, 87, 76, 57, 67, 70, 62, 72, 62, 57, 61, 63, 65, 67, 91, 54, 70, 68, 45, 62, 90, 53, 95, 58, 88, 52, 60, 71, 84, 61, 105, 58, 66, 50, 87, 95, 82, 91, 55, 62, 67, 66, 81, 78, 75, 70, 78, 45, 72, 66, 39, 60, 71, 53, 59, 62, 60, 62, 60, 92, 56, 73, 50, 54, 50, 60, 71, 77, 71, 77, 70, 52, 72, 46, 68, 56, 53, 52, 63, 48, 50, 92, 78, 73, 66, 77, 80, 81, 60, 69, 69, 61, 69, 84, 63, 63, 53, 71, 85, 59, 76, 67, 62, 67, 72, 72, 58, 111, 81, 51, 40, 53, 57, 76, 73, 57, 47, 82, 46, 60, 64, 67, 52, 39, 72, 78, 108, 57, 64, 36, 57, 80, 64, 69, 70, 64, 57, 60, 53, 68, 68, 57, 47, 84, 69, 60, 68, 64, 57, 68, 64, 63, 70, 83, 52, 52, 54, 62, 75, 74, 59, 98, 64, 61, 70, 50, 78, 62, 71, 59, 71, 63, 92, 82, 47, 62, 41, 48, 77, 71, 61, 55, 75, 59, 66, 58, 88, 88, 45, 62, 73, 91, 77, 66, 57, 62, 59, 77, 79, 64, 77, 106, 63, 84, 61, 75, 61, 58, 42, 66, 58, 58, 58, 69, 62, 57, 76, 69, 69, 93, 52, 76, 43, 49, 76, 79, 74, 56, 63, 71, 94, 58, 73, 45, 60, 64, 60, 53, 60, 68, 52, 64, 86, 54, 42, 73, 78, 89, 50, 76, 63, 59, 56, 66, 64, 46, 63, 54, 63, 61, 46, 49, 52, 46, 47, 79, 60, 61, 41, 67, 78, 41, 71, 74, 87, 58, 66, 48, 73, 66, 82, 63, 77, 69, 72, 68, 55, 52, 66, 59, 65, 52, 81, 77, 68, 61, 67, 57, 61, 51, 82, 77, 58, 54, 60, 55, 54, 59, 69, 57, 90, 96, 63, 61, 70, 68, 70, 75, 63, 71, 52, 52, 64, 40, 60, 60, 80, 51, 66, 93, 54, 56, 62, 82, 53, 83, 74, 79, 73, 75, 62, 74, 68, 69, 51, 95, 76, 57, 52, 71, 74, 80, 64, 54, 55, 57, 65, 71, 83, 50, 66, 53, 61, 64, 64, 54, 45, 57, 77, 64, 51, 56, 62, 60, 84, 65, 69, 69, 62, 55, 51, 51, 62, 54, 60, 58, 78, 47, 88, 98, 67, 132, 62, 63, 63, 60, 64, 72, 69, 88, 44, 73, 82, 65, 61, 93, 61, 67, 84, 60, 64, 54, 54, 73, 66, 53, 79, 96, 76, 59, 60, 65, 65, 78, 61, 57, 66, 59, 75, 64, 66, 62, 46, 68, 64, 63, 64, 44, 53, 63, 89, 65, 84, 51, 73, 52, 78, 67, 80, 67, 52, 40, 61, 63, 72, 63, 45, 63, 66, 60, 61, 70, 60, 78, 66, 63, 66, 76, 77, 62, 65, 62, 66, 79, 59, 68, 77, 68, 50, 67, 73, 63, 49, 57, 80, 62, 53, 94, 70, 68, 60, 52, 62, 72, 52, 63, 63, 52, 70, 48, 61, 60, 56, 68, 45, 57, 56, 76, 67, 61, 61, 47, 64, 80, 47, 72, 50, 69, 60, 68, 70, 85, 68, 64, 65, 53, 45, 64, 64, 80, 73, 55, 53, 69, 76, 81, 75, 81, 64, 61, 69, 61, 37, 82, 70, 67, 73, 110, 63, 66, 72, 54, 61, 69, 78, 67, 74, 80, 66, 69, 74, 55, 53, 73, 64, 58, 55, 75, 74, 59, 53, 78, 110, 63, 66, 59, 65, 61, 84, 54, 59, 56, 81, 78, 75, 58, 55, 54, 68, 68, 56, 60, 78, 84, 59, 58, 47, 72, 61, 60, 63, 62, 50, 50, 66, 80, 73, 73, 60, 105, 66, 58, 71, 62, 77, 65, 71, 57, 64, 76, 62, 84, 50, 56, 67, 55, 106, 67, 88, 69, 73, 51, 63, 62, 132, 60, 54, 45, 53, 65, 58, 65, 61, 78, 57, 43, 69, 64, 53, 67, 87, 64, 52, 108, 56, 91, 48, 74, 76, 80, 50, 49, 60, 85, 56, 61, 82, 75, 59, 77, 56, 85, 58, 58, 61, 44, 60, 83, 58, 62, 36, 92, 59, 66, 64, 66, 61, 110, 65, 60, 69, 51, 69, 76, 66, 67, 59, 82, 67, 62, 62, 66, 73, 63, 65, 62, 57, 64, 73, 62, 54, 60, 93, 57, 76, 67, 60, 73, 80, 92, 64, 101, 65, 54, 50, 62, 64, 54, 76, 66, 57, 131, 76, 60, 66, 66, 68, 74, 71, 57, 94, 61, 64, 74, 64, 64, 61, 75, 59, 56, 60, 79, 75, 78, 66, 75, 65, 63, 46, 60, 99, 71, 69, 65, 60, 56, 60, 88, 71, 66, 56, 57, 59, 82, 53, 84, 65, 64, 55, 58, 74, 53, 68, 64, 47, 71, 57, 48, 59, 60, 62, 86, 56, 66, 66, 89, 59, 63, 86, 63, 56, 74, 93, 59, 54, 65, 71, 59, 82, 59, 67, 63, 68, 70, 61, 73, 64, 84, 70, 50, 64, 64, 60, 62, 90, 76, 48, 67, 69, 76, 82, 71, 76, 63, 168, 74, 70, 74, 67, 85, 55, 73, 66, 67, 78, 76, 65, 79, 55, 63, 63, 65, 66, 41, 73, 61, 71, 73, 61, 43, 45, 59, 77, 57, 108, 81, 49, 68, 60, 66, 82, 60, 40, 71, 67, 60, 62, 61, 63, 62, 75, 61, 72, 73, 54, 56, 52, 56, 45, 80, 49, 85, 62, 35, 64, 54, 74, 77, 74, 60, 105, 54, 53, 75, 74, 55, 88, 57, 73, 58, 43, 65, 62, 66, 70, 41, 80, 70, 78, 81, 76, 76, 61, 58, 96, 56, 76, 55, 58, 78, 68, 77, 74, 62, 65, 55, 73, 73, 80, 66, 78, 37, 77, 86, 41, 62, 73, 53, 67, 83, 61, 80, 70, 57, 79, 62, 73, 77, 58, 100, 58, 86, 40, 66, 95, 51, 90, 49, 67, 61, 48, 67, 62, 47, 48, 62, 69, 65, 49, 74, 59, 81, 71, 82, 71, 71, 65, 77, 66, 65, 177, 58, 38, 59, 68, 67, 52, 40, 40, 39, 72, 51, 68, 50, 77, 51, 56, 74, 43, 45, 61, 79, 52, 103, 57, 53, 41, 96, 56, 69, 65, 48, 81, 67, 52, 61, 57, 53, 53, 88, 55, 61, 55, 62, 52, 53, 72, 55, 51, 85, 53, 95, 104, 57, 83, 44, 79, 85, 73, 69, 63, 81, 74, 85, 89, 46, 63, 99, 64, 52, 69, 88, 75, 64, 76, 85, 61, 46, 65, 60, 58, 48, 61, 48, 65, 62, 77, 81, 37, 72, 60, 51, 46, 66, 64, 73, 51, 75, 64, 73, 60, 79, 88, 35, 62, 69, 61, 57, 73, 68, 60, 61, 62, 55, 88, 78, 65, 84, 55, 95, 57, 88, 59, 77, 65, 68, 76, 61, 66, 37, 82, 61, 70, 69, 63, 62, 57, 62, 54, 53, 55, 77, 81, 59, 42, 68, 65, 65, 75, 63, 76, 124, 68, 69, 78, 61, 78, 71, 87, 86, 51, 62, 61, 49, 43, 56, 59, 54, 57, 63, 64, 63, 64, 79, 62, 58, 73, 69, 51, 66, 65, 77, 53, 56, 69, 80, 51, 85, 61, 66, 77, 57, 78, 68, 59, 59, 55, 99, 63, 66, 62, 46, 47, 67, 55, 80, 76, 85, 85, 69, 74, 77, 71, 70, 48, 58, 70, 49, 53, 55, 71, 62, 60, 69, 70, 51, 57, 73, 90, 66, 48, 49, 56, 73, 54, 65, 67, 71, 65, 68, 83, 67, 82, 103, 63, 52, 57, 75, 81, 69, 82, 67, 61, 65, 48, 98, 59, 70, 66, 61, 69, 49, 71, 63, 69, 70, 59, 119, 86, 61, 72, 48, 72, 65, 76, 62, 49, 61, 52, 50, 68, 74, 40, 63, 41, 72, 63, 75, 59, 71, 61, 63, 68, 60, 62, 63, 104, 59, 58, 36, 41, 50, 71, 62, 82, 59, 51, 60, 48, 82, 48, 59, 60, 54, 64, 72, 73, 81, 85, 88, 70, 61, 64, 43, 73, 78, 73, 58, 74, 55, 59, 81, 82, 62, 67, 67, 83, 59, 66, 62, 83, 48, 104, 101, 78, 79, 47, 55, 42, 70, 89, 65, 57, 44, 67, 78, 88, 41, 47, 73, 77, 89, 62, 51, 52, 66, 74, 100, 43, 68, 80, 63, 71, 48, 57, 63, 58, 67, 50, 60, 44, 58, 88, 69, 70, 75, 69, 139, 109, 61, 56, 48, 70, 54, 66, 46, 68, 59, 57, 37, 90, 72, 59, 72, 57, 54, 49, 60, 67, 49, 70, 91, 62, 51, 64, 61, 64, 79, 53, 61, 82, 68, 68, 80, 57, 42, 61, 59, 48, 96, 53, 57, 64, 59, 76, 62, 52, 71, 81, 63, 58, 83, 59, 69, 66, 62, 68, 60, 73, 78, 94, 70, 67, 79, 64, 96, 98, 55, 56, 61, 56, 69, 79, 59, 43, 59, 71, 49, 68, 80, 65, 45, 62, 47, 79, 55, 59, 61, 76, 59, 74, 82, 60, 72, 72, 91, 57, 85, 89, 80, 91, 62, 154, 74, 69, 69, 99, 58, 66, 78, 66, 64, 47, 63, 78, 53, 62, 74, 65, 56, 69, 71, 71, 78, 51, 69, 66, 75, 71, 78, 84, 53, 53, 67, 66, 57, 67, 54, 63, 83, 65, 61, 104, 46, 70, 62, 67, 45, 44, 84, 49, 74, 68, 75, 57, 64, 79, 66, 64, 67, 89, 60, 65, 85, 58, 90, 41, 82, 63, 59, 61, 49, 59, 78, 59, 64, 54, 77, 89, 88, 112, 63, 64, 73, 91, 69, 66, 72, 68, 56, 50, 63, 61, 58, 56, 56, 85, 89, 59, 78, 96, 64, 70, 52, 56, 57, 60, 58, 60, 54, 61, 56, 97, 88, 80, 66, 55, 52, 71, 42, 68, 59, 64, 73, 54, 72, 56, 65, 62, 57, 67, 60, 54, 102, 53, 61, 80, 43, 52, 88, 62, 74, 62, 92, 76, 55, 60, 91, 58, 66, 88, 67, 61, 61, 82, 58, 64, 76, 69, 61, 68, 73, 51, 66, 65, 56, 85, 40, 85, 59, 48, 53, 44, 48, 73, 45, 56, 59, 56, 59, 65, 58, 59, 62, 50, 143, 56, 96, 88, 89, 75, 56, 77, 61, 75, 71, 63, 52, 61, 88, 64, 96, 55, 70, 76, 58, 69, 130, 70, 62, 70, 67, 55, 80, 46, 53, 74, 71, 73, 78, 79, 147, 77, 62, 49, 38, 65, 73, 90, 53, 63, 91, 53, 50, 75, 67, 70, 73, 62, 56, 51, 63, 61, 54, 89, 58, 65, 86, 116, 63, 42, 64, 59, 62, 41, 60, 49, 55, 64, 48, 54, 83, 69, 31, 67, 55, 61, 53, 58, 38, 68, 41, 68, 67, 74, 65, 55, 62, 68, 48, 54, 68, 56, 56, 73, 74, 57, 45, 87, 55, 65, 52, 72, 71, 65, 56, 104, 53, 103, 64, 67, 40, 62, 67, 94, 50, 66, 92, 65, 60, 90, 59, 71, 72, 132, 80, 66, 49, 60, 53, 54, 57, 83, 64, 48, 77, 78, 47, 46, 52, 59, 87, 30, 50, 68, 73, 67, 52, 53, 82, 36, 87, 73, 19, 62, 49, 73, 69, 56, 64, 66, 80, 57, 91, 62, 56, 56, 78, 121, 49, 67, 71, 98, 69, 88, 41, 58, 62, 60, 64, 61, 44, 81, 55, 46, 69, 59, 63, 62, 81, 52, 68, 54, 50, 70, 57, 60, 54, 61, 57, 69, 46, 68, 68, 59, 63, 65, 83, 71, 52, 113, 58, 66, 73, 61, 64, 76, 51, 84, 85, 49, 69, 96, 64, 51, 52, 63, 61, 58, 62, 57, 56, 113, 51, 50, 70, 89, 48, 84, 62, 56, 62, 48, 53, 50, 87, 72, 54, 67, 62, 59, 49, 47, 50, 63, 68, 86, 75, 70, 68, 45, 66, 52, 73, 58, 48, 67, 61, 47, 69, 107, 94, 60, 64, 81, 51, 49, 56, 65, 66, 51, 61, 54, 76, 81, 77, 60, 62, 55, 84, 69, 78, 57, 77, 65, 56, 50, 52, 109, 47, 54, 53, 70, 57, 57, 65, 73, 72, 60, 49, 51, 84, 62, 56, 65, 75, 72, 48, 98, 58, 67, 64, 77, 78, 66, 60, 88, 74, 74, 63, 98, 151, 61, 58, 75, 58, 94, 65, 110, 64, 58, 64, 58, 59, 41, 56, 55, 68, 75, 78, 59, 87, 51, 86, 64, 112, 62, 67, 59, 60, 84, 88, 43, 49, 58, 54, 86, 58, 58, 76, 53, 69, 77, 66, 62, 41, 80, 51, 50, 58, 57, 58, 66, 66, 70, 36, 46, 54, 72, 66, 55, 54, 42, 55, 60, 60, 72, 57, 64, 78, 58, 60, 64, 60, 33, 76, 52, 80, 55, 71, 63, 47, 53, 67, 74, 68, 61, 62, 80, 57, 83, 67, 75, 59, 74, 72, 76, 66, 52, 54, 52, 59, 55, 80, 75, 47, 39, 72, 59, 59, 80, 71, 56, 71, 50, 61, 61, 69, 63, 62, 80, 94, 58, 80, 66, 76, 51, 50, 68, 47, 63, 62, 45, 92, 77, 66, 66, 70, 64, 70, 90, 72, 59, 58, 60, 43, 71, 73, 84, 74, 64, 78, 64, 59, 64, 72, 86, 60, 72, 41, 80, 61, 73, 70, 58, 57, 54, 60, 87, 75, 59, 68, 72, 68, 69, 50, 64, 57, 85, 73, 59, 86, 56, 36, 59, 103, 80, 62, 93, 55, 61, 54, 69, 66, 67, 64, 60, 50, 87, 62, 51, 66, 51, 59, 80, 72, 61, 50, 68, 60, 55, 50, 57, 68, 57, 74, 58, 51, 74, 70, 63, 63, 55, 108, 78, 49, 58, 89, 87, 78, 80, 54, 75, 50, 60, 62, 54, 61, 46, 69, 63, 63, 49, 61, 55, 70, 60, 66, 51, 57, 68, 34, 64, 66, 53, 67, 47, 56, 149, 74, 62, 46, 79, 73, 62, 104, 61, 57, 57, 73, 68, 79, 63, 121, 75, 62, 67, 34, 76, 71, 55, 87, 70, 52, 67, 116, 67, 61, 60, 47, 62, 73, 59, 53, 87, 70, 65, 64, 78, 81, 70, 71, 64, 51, 55, 49, 79, 84, 53, 56, 84, 79, 44, 64, 71, 64, 73, 53, 75, 92, 47, 62, 43, 65, 61, 86, 66, 59, 62, 57, 98, 58, 65, 51, 61, 56, 95, 117, 66, 76, 80, 59, 53, 47, 71, 77, 52, 73, 64, 81, 49, 60, 54, 56, 67, 48, 73, 70, 59, 79, 66, 132, 73, 81, 64, 47, 64, 89, 53, 55, 78, 72, 53, 67, 70, 70, 85, 43, 46, 54, 67, 56, 58, 68, 70, 58, 68, 67, 107, 62, 47, 57, 63, 73, 62, 52, 65, 51, 49, 51, 40, 56, 54, 61, 47, 69, 61, 62, 67, 54, 66, 58, 56, 46, 63, 83, 68, 63, 74, 78, 67, 66, 95, 47, 69, 50, 66, 53, 65, 46, 61, 56, 59, 59, 73, 80, 47, 57, 52, 57, 57, 65, 66, 64, 62, 63, 86, 62, 65, 59, 95, 75, 48, 72, 54, 102, 43, 72, 62, 56, 73, 76, 55, 71, 47, 84, 66, 86, 50, 76, 63, 84, 71, 61, 89, 63, 67, 63, 83, 77, 66, 71, 49, 82, 77, 84, 66, 57, 39, 55, 58, 33, 65, 80, 83, 43, 58, 62, 85, 69, 51, 67, 62, 78, 73, 65, 51, 65, 61, 53, 62, 53, 47, 64, 74, 57, 58, 60, 57, 76, 85, 50, 63, 55, 62, 68, 59, 53, 61, 88, 52, 53, 59, 61, 100, 58, 51, 70, 52, 81, 67, 51, 67, 62, 108, 124, 47, 84, 57, 62, 70, 73, 52, 65, 57, 78, 42, 83, 66, 59, 69, 51, 56, 63, 45, 63, 67, 67, 106, 67, 68, 71, 70, 69, 57, 76, 55, 86, 56, 46, 70, 99, 62, 55, 74, 65, 62, 60, 98, 74, 40, 61, 65, 74, 85, 56, 41, 65, 69, 66, 71, 65, 57, 51, 55, 72, 57, 59, 62, 72, 87, 57, 40, 63, 42, 92, 58, 64, 68, 63, 44, 53, 56, 87, 43, 64, 61, 67, 72, 47, 63, 57, 84, 92, 74, 84, 90, 84, 54, 75, 68, 46, 55, 84, 65, 55, 63, 57, 90, 53, 50, 62, 42, 61, 70, 65, 67, 68, 59, 71, 58, 70, 44, 75, 72, 54, 49, 66, 79, 123, 49, 73, 69, 74, 90, 57, 52, 67, 65, 60, 61, 82, 65, 68, 65, 66, 66, 73, 50, 73, 72, 76, 72, 65, 52, 81, 76, 59, 81, 76, 66, 69, 69, 63, 68, 45, 51, 60, 63, 61, 66, 69, 67, 75, 99, 47, 72, 122, 90, 75, 61, 62, 55, 63, 56, 71, 65, 57, 72, 67, 41, 44, 73, 49, 56, 72, 70, 69, 75, 74, 81, 116, 48, 66, 57, 79, 69, 69, 58, 60, 62, 40, 64, 47, 49, 107, 60, 68, 46, 87, 75, 62, 78, 61, 65, 58, 80, 49, 55, 82, 62, 85, 64, 80, 80, 53, 67, 99, 68, 56, 92, 83, 78, 66, 75, 37, 41, 50, 50, 46, 80, 41, 56, 57, 63, 50, 75, 63, 64, 64, 52, 88, 36, 68, 76, 63, 71, 69, 59, 62, 67, 83, 73, 66, 58, 59, 50, 52, 64, 75, 59, 50, 56, 76, 57, 66, 74, 56, 50, 55, 57, 46, 123, 59, 58, 60, 74, 67, 66, 69, 59, 58, 76, 47, 75, 59, 58, 62, 63, 84, 56, 67, 51, 79, 81, 74, 52, 79, 57, 54, 66, 57, 68, 66, 54, 61, 58, 66, 44, 50, 71, 70, 46, 79, 63, 46, 72, 33, 66, 52, 84, 60, 81, 48, 51, 77, 101, 57, 58, 80, 74, 52, 48, 82, 66, 64, 60, 63, 58, 74, 100, 65, 50, 62, 67, 81, 62, 76, 95, 82, 52, 61, 45, 57, 49, 99, 33, 50, 51, 37, 76, 63, 77, 137, 53, 69, 66, 72, 66, 91, 80, 86, 50, 66, 76, 69, 63, 42, 69, 71, 58, 58, 78, 66, 86, 91, 110, 80, 52, 105, 60, 67, 81, 63, 50, 64, 68, 74, 72, 52, 92, 96, 54, 68, 59, 56, 60, 68, 55, 55, 110, 137, 42, 65, 57, 67, 68, 73, 61, 75, 48, 86, 65, 72, 70, 60, 73, 66, 76, 61, 66, 55, 55, 89, 60, 56, 67, 53, 74, 72, 78, 68, 75, 65, 69, 82, 76, 82, 66, 55, 68, 75, 80, 80, 74, 56, 42, 81, 83, 81, 51, 70, 64, 71, 95, 56, 54, 45, 51, 51, 72, 53, 49, 111, 71, 68, 79, 78, 65, 68, 61, 50, 91, 54, 44, 62, 49, 55, 60, 64, 46, 67, 71, 73, 58, 55, 57, 57, 50, 72, 66, 78, 93, 50, 59, 69, 39, 51, 70, 74, 78, 60, 74, 56, 59, 50, 55, 78, 55, 97, 79, 74, 81, 60, 61, 64, 73, 66, 57, 66, 82, 53, 91, 65, 71, 72, 80, 84, 61, 49, 87, 50, 56, 114, 99, 71, 74, 74, 75, 93, 67, 82, 80, 83, 111, 79, 80, 63, 76, 71, 75, 82, 89, 50, 58, 62, 67, 72, 79, 89, 44, 69, 77, 76, 73, 48, 59, 57, 69, 80, 62, 71, 42, 48, 45, 68, 65, 59, 77, 95, 66, 61, 68, 63, 61, 48, 82, 63, 55, 66, 55, 76, 50, 106, 67, 50, 51, 74, 60, 58, 58, 74, 55, 44, 59, 55, 65, 54, 52, 61, 48, 76, 59, 70, 74, 101, 65, 54, 60, 67, 45, 69, 66, 54, 45, 66, 54, 66, 51, 62, 42, 72, 71, 87, 63, 72, 64, 52, 69, 58, 71, 61, 65, 65, 68, 76, 61, 80, 60, 64, 63, 50, 56, 53, 82, 62, 73, 54, 64, 67, 58, 59, 56, 78, 55, 60, 54, 77, 52, 71, 61, 64, 68, 61, 51, 95, 53, 64, 58, 51, 70, 64, 42, 54, 60, 61, 80, 56, 65, 92, 61, 78, 78, 65, 41, 59, 65, 61, 57, 62, 71, 62, 71, 54, 62, 74, 80, 87, 64, 68, 57, 56, 62, 47, 85, 60, 60, 58, 67, 73, 65, 85, 53, 70, 45, 92, 73, 43, 134, 70, 56, 43, 81, 80, 68, 64, 67, 49, 55, 63, 62, 80, 60, 72, 46, 51, 49, 67, 68, 77, 58, 71, 80, 55, 70, 60, 57, 62, 63, 73, 64, 58, 52, 70, 75, 63, 44, 65, 83, 56, 73, 30, 63, 68, 59, 44, 90, 91, 73, 76, 63, 53, 101, 55, 71, 96, 50, 66, 65, 51, 61, 44, 65, 101, 83, 62, 66, 69, 57, 67, 56, 75, 69, 63, 61, 67, 41, 56, 58, 74, 94, 80, 84, 100, 70, 64, 53, 66, 71, 67, 54, 50, 71, 78, 59, 74, 50, 69, 74, 63, 63, 59, 55, 51, 84, 82, 92, 63, 64, 65, 38, 58, 66, 50, 66, 53, 93, 57, 77, 47, 72, 63, 48, 47, 70, 68, 44, 58, 73, 95, 65, 94, 59, 67, 46, 49, 69, 83, 65, 61, 59, 67, 44, 54, 54, 105, 53, 55, 79, 75, 55, 55, 79, 65, 69, 64, 40, 52, 67, 54, 55, 76, 56, 65, 67, 65, 58, 70, 91, 74, 88, 70, 79, 59, 45, 56, 71, 87, 66, 75, 57, 70, 77, 68, 63, 67, 58, 53, 60, 90, 74, 54, 84, 75, 73, 43, 68, 82, 75, 61, 55, 63, 44, 56, 73, 63, 88, 68, 78, 68, 80, 109, 63, 60, 57, 51, 57, 69, 77, 76, 64, 59, 56, 66, 50, 70, 66, 62, 50, 73, 77, 61, 47, 88, 74, 76, 79, 56, 68, 69, 63, 57, 112, 50, 68, 68, 58, 47, 49, 67, 52, 85, 67, 69, 74, 49, 81, 51, 74, 105, 64, 61, 67, 63, 76, 54, 66, 69, 49, 61, 85, 54, 70, 43, 60, 58, 72, 82, 51, 52, 47, 76, 57, 95, 65, 73, 66, 62, 47, 84, 67, 48, 56, 89, 84, 64, 73, 62, 102, 56, 54, 118, 66, 67, 60, 72, 85, 64, 72, 71, 73, 65, 61, 84, 68, 53, 78, 98, 83, 36, 63, 77, 95, 92, 50, 78, 55, 63, 66, 80, 81, 93, 63, 60, 74, 63, 87, 68, 66, 52, 51, 59, 47, 49, 59, 72, 42, 67, 55, 70, 60, 72, 42, 44, 63, 59, 47, 78, 70, 75, 63, 69, 58, 50, 127, 88, 70, 69, 55, 95, 120, 75, 85, 56, 65, 50, 42, 69, 79, 57, 71, 70, 62, 51, 78, 67, 67, 70, 63, 72, 76, 65, 72, 65, 61, 37, 76, 61, 85, 63, 66, 63, 75, 62, 53, 83, 61, 62, 56, 48, 44, 85, 71, 59, 60, 61, 67, 60, 65, 71, 93, 62, 72, 62, 62, 57, 58, 90, 64, 59, 67, 68, 87, 70, 74, 78, 57, 65, 63, 64, 74, 62, 60, 49, 53, 55, 74, 93, 58, 82, 72, 64, 75, 89, 92, 90, 56, 56, 52, 73, 56, 65, 77, 69, 72, 72, 71, 49, 73, 75, 59, 54, 63, 65, 86, 67, 72, 64, 39, 61, 79, 78, 76, 70, 73, 66, 54, 36, 64, 56, 54, 57, 74, 71, 61, 79, 60, 74, 56, 64, 70, 64, 73, 88, 63, 65, 59, 70, 57, 68, 44, 102, 46, 76, 66, 64, 80, 77, 58, 53, 44, 49, 71, 74, 66, 74, 65, 61, 86, 67, 38, 74, 69, 74, 49, 73, 73, 61, 61, 77, 50, 47, 71, 51, 65, 51, 43, 56, 68, 62, 86, 74, 69, 63, 84, 67, 61, 53, 51, 64, 65, 56, 69, 102, 63, 64, 57, 45, 52, 80, 82, 72, 61, 95, 63, 54, 84, 76, 68, 46, 58, 66, 71, 52, 65, 58, 49, 78, 70, 59, 73, 51, 53, 78, 59, 57, 75, 57, 72, 63, 74, 59, 67, 71, 61, 61, 82, 94, 59, 63, 62, 50, 61, 65, 50, 64, 55, 46, 67, 69, 68, 71, 121, 72, 54, 60, 62, 66, 54, 59, 83, 77, 73, 66, 44, 56, 46, 76, 77, 55, 68, 91, 73, 68, 87, 69, 61, 56, 111, 75, 65, 57, 52, 78, 85, 61, 82, 81, 45, 69, 73, 39, 51, 82, 39, 69, 58, 59, 53, 66, 64, 63, 42, 70, 65, 62, 70, 60, 84, 91, 67, 62, 52, 40, 79, 59, 54, 51, 80, 69, 53, 35, 93, 75, 64, 70, 71, 82, 79, 59, 65, 84, 51, 71, 66, 43, 64, 60, 58, 75, 69, 66, 78, 88, 59, 72, 79, 58, 71, 52, 60, 68, 54, 108, 84, 74, 60, 68, 70, 89, 80, 61, 50, 61, 65, 87, 57, 76, 93, 93, 55, 75, 79, 55, 67, 65, 60, 51, 46, 63, 81, 49, 57, 60, 116, 100, 65, 75, 65, 55, 52, 65, 50, 57, 95, 47, 58, 57, 72, 95, 55, 68, 65, 70, 56, 70, 62, 75, 60, 62, 56, 73, 61, 64, 51, 89, 81, 69, 74, 68, 59, 65, 57, 91, 64, 55, 89, 67, 73, 60, 43, 85, 56, 102, 55, 89, 50, 65, 66, 50, 64, 68, 57, 59, 59, 58, 73, 62, 79, 95, 59, 62, 52, 57, 53, 53, 63, 59, 81, 71, 79, 71, 48, 67, 74, 58, 57, 49, 91, 61, 86, 79, 58, 74, 71, 73, 57, 58, 67, 47, 56, 42, 66, 102, 71, 79, 75, 59, 52, 74, 51, 48, 75, 72, 75, 62, 57, 63, 68, 49, 78, 77, 63, 79, 62, 69, 54, 88, 69, 54, 70, 64, 71, 70, 58, 78, 53, 132, 64, 68, 56, 69, 68, 65, 70, 71, 52, 63, 56, 57, 61, 67, 74, 46, 50, 54, 88, 76, 65, 91, 75, 61, 73, 79, 46, 60, 65, 70, 48, 43, 64, 60, 56, 57, 71, 64, 76, 56, 58, 63, 71, 70, 60, 28, 59, 68, 68, 80, 80, 61, 72, 63, 68, 76, 59, 73, 76, 70, 79, 50, 62, 63, 51, 67, 70, 88, 64, 58, 57, 57, 70, 76, 60, 52, 62, 63, 71, 85, 59, 50, 52, 74, 65, 51, 55, 61, 58, 78, 81, 45, 69, 68, 52, 83, 55, 62, 65, 78, 54, 77, 69, 52, 53, 35, 59, 51, 57, 60, 62, 59, 67, 81, 65, 75, 52, 66, 82, 51, 74, 72, 62, 65, 63, 65, 75, 55, 69, 69, 53, 71, 45, 56, 93, 80, 67, 57, 92, 69, 67, 63, 56, 78, 82, 68, 62, 65, 70, 70, 97, 56, 63, 79, 86, 62, 46, 65, 98, 68, 60, 46, 53, 74, 54, 60, 79, 54, 82, 58, 78, 84, 67, 61, 64, 61, 60, 80, 80, 62, 58, 54, 83, 56, 76, 52, 78, 63, 75, 54, 74, 58, 57, 82, 110, 49, 84, 75, 50, 57, 56, 66, 82, 67, 72, 73, 61, 82, 56, 59, 49, 57, 73, 54, 60, 65, 144, 73, 67, 58, 62, 64, 66, 56, 49, 81, 61, 81, 52, 76, 71, 65, 73, 51, 87, 58, 56, 54, 68, 40, 108, 88, 50, 59, 73, 41, 75, 55, 50, 60, 74, 64, 62, 62, 73, 70, 67, 50, 72, 61, 71, 76, 64, 60, 72, 82, 50, 81, 54, 62, 55, 52, 54, 76, 70, 58, 35, 62, 67, 77, 70, 68, 61, 53, 49, 76, 92, 66, 51, 58, 51, 74, 58, 51, 58, 63, 68, 101, 54, 52, 70, 66, 61, 88, 56, 59, 58, 89, 65, 59, 58, 53, 64, 89, 64, 59, 58, 62, 73, 83, 86, 63, 59, 71, 54, 64, 74, 72, 84, 64, 93, 53, 49, 61, 55, 52, 54, 69, 52, 77, 51, 39, 58, 41, 45, 68, 69, 131, 59, 68, 59, 68, 81, 94, 62, 63, 65, 69, 69, 60, 74, 41, 48, 64, 70, 38, 74, 66, 57, 79, 70, 56, 64, 96, 46, 69, 46, 53, 92, 74, 49, 45, 55, 72, 43, 68, 68, 55, 45, 90, 67, 52, 85, 54, 78, 104, 48, 76, 52, 74, 68, 69, 54, 86, 62, 43, 86, 48, 83, 74, 83, 59, 48, 53, 57, 50, 60, 84, 55, 67, 61, 79, 58, 50, 91, 63, 67, 62, 70, 116, 69, 55, 71, 48, 80, 63, 65, 97, 70, 83, 84, 56, 105, 88, 57, 65, 51, 59, 34, 71, 64, 69, 75, 83, 65, 66, 53, 61, 75, 74, 77, 67, 55, 55, 44, 54, 78, 73, 51, 56, 55, 86, 77, 59, 70, 62, 56, 59, 58, 59, 65, 58, 62, 66, 59, 63, 64, 73, 69, 84, 83, 44, 69, 46, 52, 64, 60, 59, 63, 60, 93, 99, 65, 67, 77, 65, 128, 73, 54, 80, 67, 70, 63, 71, 56, 52, 63, 68, 44, 62, 53, 62, 59, 60, 57, 98, 74, 69, 72, 62, 51, 34, 74, 62, 87, 84, 69, 70, 62, 81, 51, 74, 56, 88, 50, 64, 67, 70, 91, 59, 59, 109, 61, 83, 61, 69, 78, 62, 74, 89, 55, 77, 53, 88, 77, 77, 58, 70, 93, 59, 55, 67, 62, 67, 118, 54, 67, 76, 62, 44, 65, 65, 66, 60, 61, 76, 56, 74, 65, 56, 62, 70, 63, 68, 119, 67, 51, 69, 87, 80, 79, 62, 68, 43, 71, 76, 54, 78, 65, 73, 81, 70, 76, 47, 54, 61, 98, 79, 64, 60, 72, 58, 59, 45, 49, 64, 57, 79, 74, 74, 87, 61, 61, 67, 62, 74, 60, 87, 52, 97, 74, 53, 57, 79, 52, 58, 72, 63, 65, 79, 59, 73, 63, 78, 78, 46, 70, 95, 82, 60, 48, 59, 75, 54, 75, 65, 69, 51, 49, 48, 69, 64, 62, 60, 61, 88, 52, 58, 65, 68, 61, 52, 74, 57, 75, 58, 69, 64, 58, 68, 51, 58, 82, 91, 67, 66, 88, 65, 58, 100, 57, 62, 64, 54, 64, 73, 73, 60, 61, 74, 62, 70, 79, 70, 55, 63, 65, 71, 54, 82, 65, 55, 70, 53, 69, 60, 83, 66, 48, 52, 79, 70, 67, 58, 49, 57, 56, 57, 56, 71, 97, 78, 83, 70, 75, 61, 63, 63, 64, 87, 54, 66, 57, 71, 62, 53, 71, 64, 73, 68, 66, 60, 61, 81, 66, 68, 63, 44, 78, 62, 75, 71, 62, 58, 54, 63, 55, 66, 53, 68, 97, 119, 60, 42, 77, 55, 77, 44, 72, 95, 89, 60, 51, 54, 62, 46, 69, 68, 64, 98, 48, 75, 65, 67, 56, 61, 76, 70, 56, 60, 78, 67, 54, 57, 86, 83, 56, 83, 55, 47, 59, 47, 67, 83, 85, 112, 64, 65, 90, 90, 42, 80, 73, 95, 64, 71, 77, 62, 70, 47, 75, 87, 55, 51, 62, 58, 58, 61, 60, 64, 41, 64, 60, 66, 77, 70, 66, 60, 63, 67, 57, 78, 68, 71, 66, 65, 80, 43, 78, 53, 68, 60, 58, 60, 62, 82, 54, 90, 83, 114, 73, 62, 67, 51, 66, 62, 62, 55, 68, 56, 62, 85, 52, 81, 59, 80, 52, 51, 57, 95, 67, 91, 43, 58, 67, 60, 54, 79, 48, 68, 48, 52, 75, 59, 72, 59, 86, 77, 45, 66, 113, 67, 64, 61, 34, 69, 98, 56, 67, 56, 75, 62, 93, 61, 68, 62, 64, 50, 76, 56, 66, 71, 73, 64, 65, 55, 71, 82, 52, 63, 84, 66, 57, 83, 68, 60, 56, 51, 60, 65, 63, 48, 66, 104, 64, 57, 76, 79, 53, 62, 58, 64, 55, 83, 65, 60, 65, 77, 50, 56, 60, 57, 71, 52, 75, 109, 65, 80, 60, 58, 61, 58, 58, 65, 46, 69, 67, 70, 80, 56, 48, 83, 73, 63, 59, 58, 61, 69, 74, 61, 60, 54, 64, 85, 62, 91, 65, 66, 86, 57, 77, 87, 57, 50, 79, 61, 68, 55, 52, 60, 63, 76, 55, 76, 72, 54, 53, 62, 72, 66, 82, 55, 59, 62, 51, 63, 60, 53, 67, 72, 54, 69, 62, 54, 72, 63, 60, 46, 77, 58, 62, 72, 60, 92, 55, 51, 62, 58, 76, 56, 53, 66, 102, 71, 63, 55, 63, 71, 52, 82, 68, 36, 59, 58, 83, 92, 60, 69, 97, 58, 85, 157, 63, 78, 47, 67, 56, 74, 44, 84, 59, 45, 61, 55, 103, 50, 54, 64, 51, 66, 60, 58, 55, 63, 58, 60, 56, 56, 48, 64, 68, 64, 72, 70, 61, 51, 80, 56, 61, 42, 55, 53, 77, 64, 59, 69, 52, 72, 62, 69, 78, 59, 63, 63, 65, 62, 91, 67, 94, 55, 67, 82, 60, 45, 60, 69, 49, 69, 76, 55, 69, 73, 60, 108, 67, 69, 49, 105, 66, 72, 68, 65, 57, 61, 50, 69, 63, 57, 54, 55, 69, 77, 55, 78, 56, 78, 73, 57, 73, 64, 74, 87, 52, 72, 70, 52, 59, 64, 53, 63, 56, 57, 69, 86, 81, 58, 87, 62, 88, 51, 51, 83, 67, 53, 50, 57, 60, 70, 55, 57, 63, 69, 65, 37, 56, 86, 66, 72, 53, 56, 81, 64, 60, 61, 52, 70, 65, 54, 79, 120, 64, 58, 72, 73, 63, 52, 65, 55, 77, 69, 94, 88, 73, 67, 80, 73, 62, 65, 72, 77, 73, 77, 63, 56, 70, 65, 57, 63, 78, 68, 73, 64, 65, 66, 81, 68, 67, 67, 80, 59, 72, 55, 67, 47, 47, 62, 59, 68, 71, 80, 46, 42, 48, 60, 63, 46, 70, 107, 69, 50, 64, 68, 53, 73, 58, 88, 73, 51, 55, 62, 59, 73, 50, 66, 55, 54, 68, 61, 55, 79, 61, 58, 48, 65, 75, 54, 85, 82, 65, 70, 35, 73, 57, 82, 64, 78, 80, 85, 54, 63, 63, 81, 85, 58, 62, 66, 57, 61, 58, 103, 60, 41, 48, 48, 68, 60, 34, 55, 58, 77, 69, 57, 63, 83, 60, 63, 76, 76, 66, 74, 73, 60, 57, 70, 58, 68, 39, 64, 87, 55, 58, 88, 60, 54, 55, 66, 68, 66, 50, 69, 84, 61, 59, 68, 59, 73, 70, 39, 70, 59, 40, 67, 39, 63, 54, 87, 76, 57, 60, 77, 62, 63, 60, 87, 74, 69, 55, 60, 69, 56, 63, 77, 57, 90, 56, 71, 68, 47, 63, 51, 67, 60, 58, 52, 37, 69, 48, 65, 54, 58, 85, 60, 61, 57, 56, 60, 70, 82, 61, 61, 62, 59, 83, 54, 62, 66, 58, 74, 85, 55, 68, 65, 51, 84, 88, 91, 82, 46, 61, 80, 75, 79, 67, 60, 54, 76, 72, 61, 45, 51, 52, 75, 63, 40, 53, 140, 79, 52, 68, 66, 56, 62, 74, 61, 69, 57, 73, 51, 51, 49, 105, 67, 64, 63, 47, 36, 89, 47, 46, 51, 59, 106, 66, 71, 53, 74, 64, 74, 45, 68, 60, 78, 55, 64, 54, 81, 82, 70, 51, 55, 55, 102, 56, 47, 60, 59, 91, 67, 66, 103, 72, 77, 50, 55, 58, 56, 57, 79, 60, 60, 69, 70, 73, 84, 63, 55, 61, 60, 60, 69, 77, 52, 52, 71, 53, 126, 86, 69, 45, 79, 60, 87, 62, 72, 99, 59, 55, 68, 61, 73, 60, 46, 84, 57, 68, 67, 49, 58, 58, 46, 65, 83, 68, 67, 70, 75, 73, 71, 68, 62, 80, 71, 81, 81, 68, 68, 76, 80, 60, 75, 53, 63, 58, 60, 61, 65, 68, 46, 65, 51, 69, 59, 72, 60, 57, 44, 61, 59, 84, 65, 67, 95, 62, 67, 47, 70, 64, 65, 64, 54, 75, 61, 60, 53, 73, 55, 75, 60, 75, 86, 84, 93, 75, 65, 72, 68, 58, 56, 86, 67, 129, 57, 53, 56, 71, 54, 68, 56, 64, 86, 50, 49, 57, 88, 63, 57, 79, 85, 56, 69, 79, 59, 64, 80, 109, 68, 87, 81, 64, 60, 49, 71, 58, 68, 69, 84, 46, 68, 68, 58, 61, 61, 58, 80, 71, 80, 81, 61, 77, 62, 67, 61, 61, 65, 53, 102, 58, 73, 76, 63, 53, 59, 73, 73, 48, 85, 63, 56, 57, 84, 52, 87, 89, 94, 55, 72, 56, 57, 66, 75, 56, 62, 56, 46, 55, 83, 75, 88, 59, 60, 63, 48, 60, 68, 67, 81, 100, 96, 79, 61, 46, 66, 57, 55, 78, 69, 52, 81, 65, 81, 86, 66, 44, 68, 53, 64, 66, 133, 74, 83, 102, 56, 74, 48, 74, 82, 88, 91, 67, 81, 72, 61, 62, 65, 51, 42, 71, 89, 66, 76, 79, 77, 75, 76, 94, 56, 93, 64, 42, 60, 53, 59, 62, 68, 59, 71, 59, 54, 56, 69, 73, 57, 59, 92, 54, 51, 62, 91, 44, 75, 61, 76, 67, 58, 72, 78, 77, 65, 49, 77, 65, 65, 46, 68, 56, 76, 50, 65, 61, 105, 56, 52, 69, 66, 78, 58, 82, 67, 60, 68, 73, 52, 92, 65, 67, 57, 64, 83, 54, 58, 68, 55, 64, 73, 71, 63, 70, 75, 88, 63, 77, 53, 75, 72, 69, 56, 69, 96, 73, 73, 51, 62, 73, 67, 58, 65, 84, 75, 61, 83, 61, 86, 68, 65, 58, 72, 63, 74, 59, 62, 64, 62, 53, 62, 60, 66, 69, 70, 58, 74, 84, 72, 84, 83, 65, 61, 63, 64, 70, 64, 66, 63, 69, 54, 53, 73, 55, 54, 67, 57, 63, 61, 64, 48, 82, 68, 65, 68, 56, 70, 72, 64, 60, 75, 43, 56, 77, 56, 70, 30, 73, 61, 68, 66, 55, 74, 73, 92, 74, 61, 65, 57, 60, 69, 82, 69, 66, 66, 69, 62, 58, 63, 62, 76, 71, 53, 74, 59, 49, 63, 110, 69, 64, 53, 75, 58, 55, 53, 94, 75, 65, 55, 70, 44, 58, 55, 68, 56, 65, 71, 58, 84, 52, 51, 65, 68, 73, 65, 63, 71, 106, 72, 65, 44, 54, 63, 67, 73, 50, 80, 51, 54, 78, 54, 61, 56, 67, 132, 62, 57, 78, 59, 70, 61, 46, 62, 62, 63, 64, 80, 74, 64, 58, 69, 70, 57, 73, 69, 60, 67, 59, 59, 70, 63, 40, 59, 70, 55, 73, 66, 65, 55, 58, 79, 73, 92, 61, 93, 77, 62, 79, 64, 59, 47, 50, 52, 45, 57, 62, 56, 73, 78, 106, 62, 69, 64, 84, 55, 55, 57, 62, 68, 64, 58, 72, 64, 69, 55, 60, 67, 55, 71, 61, 69, 52, 54, 68, 44, 67, 55, 71, 52, 71, 66, 69, 63, 66, 60, 66, 57, 59, 68, 59, 68, 63, 84, 90, 67, 58, 57, 75, 73, 66, 81, 50, 88, 55, 66, 68, 65, 71, 56, 71, 66, 89, 57, 66, 63, 70, 65, 85, 55, 59, 62, 59, 58, 143, 70, 45, 67, 64, 59, 60, 57, 52, 58, 48, 63, 72, 57, 66, 99, 85, 62, 43, 77, 89, 72, 72, 57, 65, 79, 54, 64, 74, 60, 56, 70, 63, 70, 47, 61, 52, 61, 64, 82, 76, 40, 64, 70, 54, 68, 63, 78, 56, 59, 80, 60, 68, 59, 66, 78, 64, 85, 69, 58, 54, 88, 52, 69, 50, 57, 63, 108, 61, 64, 74, 57, 84, 41, 64, 81, 61, 63, 57, 69, 67, 80, 65, 56, 81, 66, 52, 62, 77, 107, 62, 90, 76, 52, 71, 71, 75, 65, 59, 86, 58, 64, 61, 61, 73, 66, 83, 61, 58, 59, 71, 82, 76, 67, 58, 60, 65, 57, 64, 65, 68, 63, 62, 78, 63, 50, 66, 85, 65, 55, 69, 55, 80, 56, 69, 61, 54, 59, 60, 49, 70, 66, 63, 72, 75, 50, 50, 75, 50, 60, 56, 65, 68, 68, 56, 68, 62, 58, 51, 53, 53, 63, 57, 66, 64, 56, 48, 55, 66, 66, 58, 78, 80, 67, 63, 58, 69, 64, 63, 56, 71, 63, 90, 64, 68, 71, 71, 61, 77, 75, 66, 66, 59, 67, 66, 48, 66, 57, 60, 66, 75, 69, 79, 60, 106, 88, 65, 67, 51, 64, 76, 49, 54, 54, 72, 79, 90, 65, 58, 76, 75, 63, 54, 61, 58, 57, 116, 82, 58, 66, 69, 83, 57, 51, 80, 54, 90, 66, 57, 58, 33, 68, 48, 45, 66, 68, 51, 66, 59, 64, 64, 56, 66, 64, 69, 71, 74, 61, 55, 81, 77, 34, 59, 72, 53, 66, 67, 66, 73, 63, 47, 69, 68, 64, 51, 82, 80, 55, 52, 56, 47, 85, 57, 53, 71, 48, 52, 60, 77, 65, 64, 50, 52, 62, 68, 61, 82, 76, 68, 66, 56, 57, 61, 86, 77, 55, 60, 60, 64, 61, 58, 52, 55, 63, 62, 48, 66, 63, 58, 42, 69, 62, 56, 65, 69, 55, 60, 72, 53, 70, 90, 62, 70, 52, 59, 70, 54, 77, 54, 59, 85, 72, 53, 77, 47, 75, 69, 77, 63, 66, 61, 73, 59, 65, 56, 60, 68, 96, 66, 46, 55, 64, 65, 77, 63, 114, 58, 109, 71, 58, 51, 63, 74, 69, 71, 59, 64, 45, 62, 69, 66, 46, 83, 110, 54, 61, 68, 87, 53, 90, 54, 56, 75, 58, 57, 60, 62, 69, 48, 57, 46, 60, 48, 67, 60, 63, 71, 73, 99, 68, 55, 75, 80, 50, 71, 57, 63, 69, 76, 82, 67, 87, 78, 56, 76, 99, 65, 54, 58, 53, 68, 48, 54, 79, 89, 50, 70, 63, 80, 63, 61, 80, 87, 48, 73, 63, 65, 94, 62, 48, 77, 66, 41, 59, 68, 56, 73, 54, 72, 82, 62, 68, 44, 64, 73, 73, 62, 64, 56, 183, 51, 65, 68, 58, 44, 54, 49, 63, 61, 51, 68, 86, 49, 52, 72, 64, 74, 52, 66, 93, 77, 64, 58, 53, 70, 65, 67, 63, 68, 58, 82, 92, 60, 60, 56, 82, 74, 64, 64, 64, 62, 65, 66, 42, 67, 81, 66, 73, 50, 63, 52, 69, 71, 40, 51, 59, 68, 61, 64, 70, 75, 65, 54, 72, 75, 59, 70, 56, 73, 66, 68, 53, 65, 67, 61, 68, 73, 61, 52, 60, 64, 84, 87, 59, 86, 51, 49, 68, 50, 51, 53, 56, 78, 57, 51, 63, 63, 52, 47, 54, 66, 61, 77, 61, 107, 55, 74, 79, 60, 70, 92, 55, 82, 65, 66, 58, 67, 69, 67, 62, 44, 65, 62, 56, 60, 55, 76, 55, 83, 43, 65, 60, 48, 55, 64, 53, 67, 81, 79, 66, 71, 80, 56, 59, 64, 63, 77, 62, 76, 76, 62, 67, 66, 79, 47, 60, 69, 76, 56, 76, 64, 66, 86, 56, 58, 68, 51, 75, 60, 68, 62, 64, 71, 78, 78, 54, 72, 62, 50, 73, 82, 57, 74, 38, 58, 90, 59, 61, 70, 50, 74, 88, 68, 81, 67, 55, 99, 64, 55, 85, 53, 57, 71, 66, 57, 75, 85, 68, 70, 73, 49, 51, 60, 65, 59, 65, 122, 69, 82, 67, 63, 79, 65, 63, 70, 62, 57, 63, 76, 71, 54, 62, 80, 67, 59, 70, 58, 54, 47, 67, 70, 58, 74, 68, 75, 70, 76, 55, 59, 60, 64, 59, 61, 52, 56, 54, 65, 44, 57, 63, 59, 53, 34, 49, 36, 54, 76, 88, 50, 67, 50, 53, 77, 57, 80, 61, 66, 67, 63, 61, 96, 76, 57, 61, 52, 71, 73, 116, 60, 60, 51, 90, 67, 66, 43, 73, 96, 108, 54, 70, 40, 74, 66, 51, 89, 74, 58, 78, 73, 63, 41, 55, 60, 75, 52, 88, 46, 56, 48, 63, 52, 67, 64, 49, 57, 44, 71, 66, 67, 77, 61, 64, 81, 62, 63, 56, 77, 77, 58, 70, 49, 59, 82, 66, 67, 72, 66, 59, 58, 67, 61, 57, 67, 71, 79, 65, 49, 68, 74, 88, 62, 69, 126, 76, 56, 76, 75, 67, 77, 76, 64, 49, 64, 60, 47, 56, 73, 67, 53, 68, 49, 72, 52, 50, 72, 49, 57, 52, 53, 103, 67, 69, 74, 74, 34, 54, 47, 68, 65, 60, 75, 75, 54, 62, 64, 70, 77, 57, 85, 105, 74, 67, 57, 56, 63, 62, 63, 85, 78, 69, 52, 40, 64, 74, 69, 71, 88, 60, 51, 44, 45, 77, 82, 98, 54, 53, 80, 56, 83, 58, 57, 75, 84, 65, 53, 57, 70, 78, 59, 63, 53, 57, 54, 69, 66, 58, 61, 85, 57, 38, 56, 61, 79, 87, 38, 52, 51, 97, 65, 64, 123, 73, 68, 67, 73, 72, 59, 73, 86, 66, 69, 42, 50, 77, 62, 77, 63, 70, 53, 46, 49, 66, 47, 48, 68, 73, 59, 63, 44, 51, 55, 40, 94, 60, 52, 51, 55, 107, 50, 54, 64, 59, 67, 54, 60, 61, 75, 107, 65, 48, 60, 80, 70, 70, 58, 68, 60, 63, 71, 67, 108, 62, 64, 67, 73, 61, 68, 59, 65, 51, 66, 62, 57, 46, 47, 83, 71, 50, 73, 62, 107, 64, 74, 58, 59, 47, 53, 48, 59, 67, 65, 73, 58, 57, 59, 76, 98, 63, 77, 81, 39, 56, 48, 54, 44, 64, 80, 61, 54, 67, 51, 77, 66, 47, 63, 64, 65, 51, 52, 60, 57, 54, 62, 75, 59, 92, 65, 54, 60, 74, 58, 90, 55, 76, 53, 63, 53, 71, 57, 71, 60, 55, 72, 48, 66, 96, 73, 56, 65, 50, 52, 77, 50, 70, 40, 54, 31, 72, 69, 67, 50, 55, 58, 47, 70, 65, 91, 78, 64, 63, 95, 93, 53, 74, 65, 67, 54, 78, 72, 68, 61, 51, 75, 70, 58, 69, 60, 74, 71, 76, 51, 45, 39, 69, 50, 44, 76, 55, 64, 71, 76, 57, 70, 59, 52, 75, 61, 87, 76, 69, 67, 46, 102, 70, 70, 75, 81, 80, 83, 75, 60, 61, 66, 65, 68, 76, 90, 70, 60, 50, 65, 61, 83, 66, 58, 44, 67, 44, 50, 72, 82, 65, 49, 73, 60, 81, 65, 54, 54, 56, 70, 65, 53, 121, 58, 65, 101, 52, 46, 67, 63, 77, 67, 86, 83, 66, 64, 68, 66, 74, 62, 43, 86, 58, 63, 78, 76, 88, 73, 93, 53, 60, 97, 77, 61, 68, 51, 48, 73, 75, 51, 52, 53, 81, 63, 116, 72, 61, 67, 55, 85, 99, 63, 98, 77, 46, 52, 39, 55, 113, 54, 94, 84, 69, 87, 62, 74, 65, 42, 56, 81, 45, 58, 105, 64, 60, 57, 58, 63, 57, 46, 49, 59, 61, 53, 53, 49, 71, 46, 59, 94, 66, 78, 96, 66, 57, 65, 58, 68, 43, 61, 66, 61, 69, 49, 54, 70, 56, 80, 59, 63, 47, 60, 50, 68, 62, 64, 66, 84, 60, 78, 59, 54, 57, 61, 63, 50, 64, 86, 58, 47, 64, 67, 47, 68, 69, 120, 85, 55, 55, 53, 59, 69, 60, 53, 70, 96, 45, 68, 68, 70, 56, 52, 57, 58, 86, 93, 57, 65, 34, 64, 54, 54, 70, 54, 70, 47, 171, 67, 60, 73, 62, 50, 53, 66, 70, 51, 64, 59, 61, 60, 63, 64, 37, 57, 131, 57, 66, 64, 59, 76, 63, 59, 61, 78, 56, 48, 40, 70, 66, 75, 49, 61, 55, 72, 48, 55, 60, 62, 61, 59, 67, 63, 70, 79, 68, 48, 108, 148, 77, 57, 56, 55, 70, 39, 57, 58, 59, 61, 62, 61, 41, 77, 53, 56, 85, 74, 57, 69, 94, 85, 66, 95, 70, 50, 65, 54, 111, 114, 46, 75, 58, 48, 54, 50, 77, 50, 69, 79, 58, 64, 46, 68, 46, 60, 74, 56, 85, 75, 64, 66, 91, 93, 81, 81, 71, 64, 98, 75, 95, 53, 65, 66, 67, 70, 71, 69, 62, 55, 77, 59, 76, 58, 50, 72, 105, 47, 74, 92, 55, 65, 74, 54, 62, 32, 64, 57, 47, 59, 63, 74, 69, 71, 43, 63, 63, 60, 55, 57, 87, 59, 34, 54, 60, 54, 56, 49, 81, 62, 63, 70, 70, 141, 48, 75, 70, 66, 58, 48, 65, 72, 88, 85, 61, 73, 69, 59, 71, 62, 72, 76, 121, 75, 81, 66, 76, 80, 66, 59, 63, 78, 51, 155, 67, 50, 68, 64, 74, 82, 72, 49, 81, 71, 58, 57, 57, 63, 51, 74, 69, 68, 78, 103, 70, 107, 58, 55, 54, 64, 61, 68, 62, 83, 47, 76, 73, 63, 61, 63, 93, 74, 81, 75, 55, 71, 67, 118, 68, 50, 49, 70, 46, 62, 54, 57, 63, 64, 55, 73, 66, 54, 68, 63, 55, 57, 70, 65, 50, 54, 79, 36, 61, 71, 61, 81, 66, 58, 67, 62, 99, 63, 82, 56, 72, 65, 64, 47, 58, 57, 98, 74, 94, 143, 143, 65, 53, 63, 67, 48, 77, 71, 60, 77, 108, 64, 68, 82, 67, 63, 93, 57, 72, 72, 65, 103, 72, 93, 111, 57, 64, 56, 56, 50, 99, 51, 78, 73, 52, 46, 54, 72, 50, 61, 74, 82, 89, 78, 41, 69, 62, 80, 73, 65, 68, 54, 50, 64, 79, 64, 74, 62, 74, 69, 76, 62, 58, 49, 29, 82, 58, 62, 80, 59, 54, 56, 64, 50, 44, 106, 63, 90, 59, 67, 50, 50, 77, 54, 68, 50, 40, 70, 84, 66, 84, 52, 55, 32, 56, 74, 46, 51, 67, 56, 72, 74, 68, 62, 68, 53, 57, 44, 53, 56, 58, 60, 80, 56, 56, 83, 62, 57, 65, 60, 74, 71, 57, 61, 62, 65, 68, 50, 50, 73, 63, 69, 66, 56, 67, 87, 64, 59, 57, 41, 50, 66, 59, 63, 77, 88, 60, 76, 54, 56, 79, 58, 89, 43, 71, 70, 57, 66, 77, 60, 58, 71, 52, 43, 60, 66, 66, 63, 53, 78, 60, 79, 90, 58, 61, 66, 37, 66, 66, 54, 64, 67, 61, 53, 60, 55, 58, 60, 50, 49, 71, 63, 81, 47, 50, 52, 53, 34, 65, 49, 67, 62, 78, 67, 82, 55, 62, 50, 59, 77, 94, 65, 69, 68, 65, 56, 54, 73, 44, 50, 85, 71, 77, 54, 60, 53, 80, 69, 64, 48, 63, 69, 45, 71, 104, 66, 55, 64, 51, 76, 71, 53, 81, 50, 145, 89, 67, 57, 61, 43, 65, 65, 51, 90, 68, 70, 64, 66, 64, 59, 79, 62, 67, 67, 58, 44, 60, 62, 65, 60, 50, 34, 68, 50, 49, 84, 49, 54, 57, 68, 115, 56, 94, 62, 74, 48, 67, 53, 81, 88, 68, 50, 60, 92, 84, 101, 63, 158, 74, 41, 56, 55, 82, 88, 73, 66, 54, 63, 78, 62, 58, 59, 64, 76, 57, 65, 53, 62, 49, 60, 56, 48, 61, 69, 94, 70, 71, 62, 73, 49, 57, 59, 54, 42, 59, 48, 58, 56, 65, 64, 94, 63, 77, 60, 61, 54, 54, 80, 45, 69, 66, 53, 70, 65, 63, 60, 61, 39, 113, 67, 80, 104, 67, 77, 56, 61, 67, 63, 76, 48, 64, 65, 56, 57, 63, 127, 61, 54, 73, 52, 79, 67, 52, 81, 61, 50, 53, 88, 64, 70, 106, 65, 45, 71, 69, 90, 56, 42, 56, 66, 58, 75, 110, 84, 61, 89, 44, 67, 63, 79, 56, 78, 59, 80, 70, 62, 66, 66, 61, 64, 67, 66, 78, 55, 72, 62, 64, 59, 45, 50, 57, 57, 60, 56, 68, 57, 55, 83, 72, 77, 53, 61, 84, 63, 77, 66, 62, 59, 64, 59, 107, 65, 72, 58, 73, 85, 69, 79, 63, 61, 53, 78, 45, 47, 64, 108, 50, 64, 44, 68, 61, 42, 66, 55, 65, 65, 57, 42, 53, 53, 63, 65, 69, 58, 75, 59, 60, 53, 56, 58, 67, 69, 58, 55, 70, 57, 68, 75, 49, 52, 57, 52, 56, 77, 58, 89, 60, 70, 51, 63, 73, 61, 53, 56, 72, 68, 72, 49, 52, 78, 72, 54, 64, 53, 55, 63, 72, 105, 118, 70, 99, 56, 64, 83, 64, 65, 90, 58, 54, 53, 65, 67, 53, 112, 47, 64, 48, 66, 92, 83, 61, 55, 62, 53, 82, 48, 75, 96, 57, 76, 53, 62, 87, 68, 78, 69, 45, 65, 87, 84, 69, 65, 88, 84, 57, 62, 65, 78, 76, 72, 81, 59, 77, 52, 66, 59, 49, 52, 70, 67, 81, 57, 53, 70, 92, 62, 74, 64, 77, 67, 58, 60, 62, 37, 65, 60, 59, 71, 86, 33, 58, 86, 61, 78, 64, 63, 49, 89, 62, 66, 69, 76, 87, 78, 65, 54, 57, 75, 84, 80, 55, 55, 60, 65, 120, 61, 60, 70, 66, 57, 60, 62, 45, 62, 75, 63, 73, 57, 71, 75, 66, 48, 73, 92, 42, 70, 53, 64, 51, 65, 69, 52, 89, 61, 64, 65, 50, 68, 55, 63, 65, 53, 51, 81, 66, 55, 85, 52, 55, 42, 62, 51, 60, 69, 64, 68, 59, 83, 63, 61, 64, 83, 60, 53, 69, 75, 83, 72, 69, 77, 68, 73, 67, 54, 68, 61, 67, 40, 76, 57, 56, 90, 45, 48, 61, 69, 63, 60, 67, 72, 99, 71, 68, 71, 55, 66, 59, 66, 75, 47, 70, 88, 62, 64, 55, 64, 75, 74, 53, 77, 62, 49, 60, 65, 64, 51, 66, 59, 56, 50, 63, 64, 71, 82, 63, 79, 97, 57, 86, 61, 59, 68, 58, 112, 60, 107, 54, 59, 43, 81, 51, 64, 60, 72, 73, 83, 72, 83, 56, 55, 57, 64, 56, 69, 53, 83, 64, 67, 53, 67, 66, 69, 61, 72, 66, 72, 77, 65, 65, 78, 128, 178, 72, 69, 66, 71, 61, 53, 57, 91, 62, 119, 65, 67, 85, 51, 59, 58, 54, 58, 68, 61, 68, 79, 66, 49, 93, 62, 56, 77, 87, 53, 57, 52, 77, 71, 63, 70, 75, 63, 55, 66, 100, 63, 63, 65, 62, 57, 58, 73, 67, 61, 68, 63, 47, 62, 50, 62, 66, 57, 67, 73, 100, 65, 69, 77, 74, 68, 51, 91, 70, 73, 53, 82, 63, 74, 51, 60, 56, 68, 48, 62, 78, 68, 56, 57, 53, 85, 68, 73, 84, 76, 85, 62, 53, 65, 69, 77, 52, 73, 69, 60, 59, 58, 58, 67, 84, 56, 53, 65, 55, 72, 55, 66, 70, 81, 63, 67, 60, 59, 47, 64, 59, 65, 66, 99, 58, 64, 73, 58, 52, 73, 53, 83, 62, 67, 60, 67, 84, 70, 52, 41, 46, 64, 65, 73, 53, 65, 59, 58, 85, 71, 62, 59, 74, 51, 60, 54, 67, 42, 68, 65, 73, 42, 54, 57, 92, 66, 49, 44, 64, 52, 58, 50, 65, 71, 66, 69, 123, 60, 50, 67, 75, 60, 95, 55, 76, 59, 91, 68, 78, 60, 58, 52, 59, 65, 92, 81, 65, 91, 65, 66, 60, 67, 67, 61, 78, 70, 61, 58, 50, 53, 53, 34, 48, 66, 51, 54, 60, 44, 65, 67, 71, 79, 49, 64, 65, 49, 54, 90, 50, 51, 63, 54, 77, 54, 64, 67, 68, 60, 56, 55, 57, 92, 57, 81, 70, 78, 49, 44, 67, 47, 56, 54, 82, 63, 58, 66, 63, 63, 76, 70, 64, 70, 88, 63, 66, 69, 50, 96, 57, 76, 68, 68, 46, 64, 67, 61, 66, 73, 54, 70, 75, 77, 81, 74, 62, 67, 70, 75, 79, 55, 83, 75, 50, 66, 77, 77, 57, 61, 69, 74, 76, 52, 56, 97, 68, 68, 53, 49, 77, 55, 73, 57, 56, 65, 49, 52, 62, 64, 87, 58, 63, 74, 83, 95, 65, 69, 71, 86, 60, 60, 67, 51, 78, 60, 61, 66, 59, 56, 118, 72, 58, 59, 52, 58, 58, 80, 56, 59, 55, 65, 65, 61, 59, 61, 75, 56, 60, 53, 73, 57, 55, 47, 75, 69, 50, 59, 69, 84, 63, 66, 59, 85, 87, 74, 64, 67, 65, 74, 68, 81, 70, 80, 55, 70, 74, 90, 68, 69, 90, 60, 68, 78, 63, 45, 58, 58, 91, 69, 94, 92, 65, 69, 58, 82, 53, 63, 74, 47, 53, 67, 60, 58, 58, 51, 65, 68, 68, 73, 65, 67, 59, 78, 63, 85, 48, 60, 34, 65, 55, 56, 88, 76, 82, 45, 69, 87, 63, 72, 81, 93, 63, 92, 56, 70, 65, 82, 64, 64, 109, 56, 69, 39, 73, 60, 70, 46, 90, 67, 92, 65, 65, 51, 58, 82, 78, 96, 59, 64, 64, 61, 58, 107, 80, 64, 84, 51, 56, 69, 73, 71, 73, 67, 31, 67, 70, 71, 54, 46, 72, 82, 56, 58, 74, 67, 60, 71, 57, 60, 34, 70, 73, 80, 68, 50, 70, 37, 51, 47, 72, 70, 65, 55, 77, 66, 55, 68, 67, 73, 67, 75, 61, 72, 60, 92, 54, 60, 59, 77, 91, 85, 77, 70, 66, 61, 57, 54, 67, 66, 71, 53, 70, 67, 90, 76, 58, 86, 94, 68, 64, 51, 78, 74, 48, 59, 64, 64, 60, 70, 66, 74, 77, 46, 88, 56, 76, 68, 70, 59, 68, 71, 94, 62, 91, 70, 100, 51, 66, 71, 79, 55, 45, 69, 75, 79, 64, 58, 82, 69, 62, 77, 75, 55, 44, 59, 76, 72, 64, 43, 50, 77, 66, 48, 53, 61, 72, 63, 82, 69, 67, 58, 58, 46, 52, 64, 70, 52, 65, 60, 61, 69, 55, 89, 68, 48, 69, 61, 118, 77, 59, 75, 69, 54, 74, 68, 66, 50, 59, 70, 93, 85, 92, 63, 61, 42, 56, 70, 65, 76, 67, 72, 58, 60, 60, 60, 57, 52, 52, 61, 69, 89, 72, 62, 63, 61, 76, 64, 53, 75, 57, 67, 51, 64, 52, 53, 59, 86, 62, 76, 73, 58, 55, 51, 43, 44, 44, 72, 63, 63, 66, 84, 63, 66, 64, 58, 74, 62, 48, 69, 80, 71, 68, 51, 85, 65, 44, 53, 61, 65, 57, 54, 74, 43, 77, 59, 62, 84, 62, 66, 60, 47, 79, 56, 74, 57, 57, 61, 75, 51, 54, 56, 66, 52, 55, 67, 62, 81, 48, 52, 57, 67, 70, 63, 68, 41, 67, 62, 57, 60, 78, 65, 61, 68, 72, 73, 58, 76, 59, 78, 98, 68, 69, 75, 68, 69, 50, 58, 55, 53, 66, 66, 75, 62, 68, 58, 70, 91, 77, 66, 72, 73, 68, 65, 50, 81, 71, 75, 41, 48, 57, 53, 77, 99, 97, 56, 69, 59, 72, 56, 85, 70, 58, 53, 72, 81, 53, 76, 66, 60, 62, 97, 74, 75, 68, 66, 80, 62, 61, 60, 62, 67, 77, 56, 65, 63, 68, 80, 73, 62, 68, 56, 62, 70, 56, 68, 49, 66, 65, 110, 90, 65, 71, 64, 62, 70, 59, 72, 61, 66, 69, 85, 81, 58, 91, 80, 67, 78, 74, 56, 62, 56, 74, 77, 50, 58, 55, 49, 61, 73, 50, 60, 51, 56, 61, 51, 52, 57, 68, 66, 52, 73, 60, 67, 58, 54, 79, 60, 37, 60, 82, 63, 63, 54, 85, 59, 72, 65, 60, 66, 35, 59, 55, 88, 69, 47, 64, 50, 82, 73, 73, 56, 63, 56, 66, 56, 70, 88, 60, 63, 55, 57, 69, 55, 56, 52, 65, 79, 48, 66, 68, 76, 67, 81, 61, 51, 64, 62, 66, 75, 57, 34, 70, 71, 75, 102, 113, 70, 50, 87, 82, 47, 59, 59, 72, 74, 65, 59, 67, 72, 63, 56, 42, 69, 70, 59, 55, 52, 61, 55, 86, 57, 97, 77, 76, 64, 70, 92, 66, 60, 69, 52, 59, 60, 88, 75, 56, 96, 78, 53, 67, 58, 73, 49, 81, 66, 70, 111, 90, 44, 51, 51, 82, 66, 56, 132, 70, 59, 71, 68, 70, 63, 94, 72, 46, 55, 54, 93, 55, 51, 68, 45, 83, 85, 65, 72, 78, 45, 57, 83, 71, 84, 46, 63, 66, 73, 63, 77, 71, 67, 74, 42, 49, 52, 84, 53, 60, 57, 55, 61, 76, 45, 58, 53, 61, 49, 64, 67, 61, 87, 51, 90, 69, 58, 60, 47, 64, 49, 72, 66, 42, 73, 60, 76, 62, 83, 63, 66, 69, 55, 65, 68, 62, 56, 45, 60, 60, 66, 51, 60, 66, 73, 65, 66, 53, 83, 73, 68, 83, 101, 53, 61, 91, 61, 72, 59, 49, 60, 70, 72, 76, 58, 68, 79, 71, 80, 47, 80, 84, 61, 75, 58, 70, 73, 54, 63, 57, 65, 57, 72, 54, 143, 81, 81, 60, 52, 55, 29, 63, 78, 63, 65, 77, 56, 54, 58, 68, 99, 51, 62, 58, 52, 79, 56, 45, 46, 56, 91, 55, 60, 69, 68, 58, 68, 60, 56, 61, 66, 93, 68, 78, 32, 62, 64, 70, 51, 64, 53, 78, 103, 78, 53, 66, 49, 48, 55, 61, 62, 74, 66, 51, 71, 53, 73, 73, 60, 61, 63, 50, 94, 79, 59, 65, 88, 104, 60, 48, 71, 50, 65, 61, 59, 82, 54, 58, 51, 82, 57, 76, 69, 59, 50, 75, 63, 70, 120, 79, 58, 37, 73, 77, 63, 62, 90, 64, 50, 77, 56, 86, 64, 72, 65, 74, 56, 76, 65, 74, 56, 76, 61, 72, 76, 73, 67, 55, 59, 93, 66, 91, 48, 99, 56, 60, 79, 69, 73, 77, 115, 64, 67, 75, 60, 70, 71, 54, 68, 56, 59, 58, 72, 91, 94, 58, 75, 64, 50, 73, 52, 54, 64, 49, 59, 47, 43, 49, 55, 61, 76, 83, 65, 65, 74, 64, 56, 66, 56, 42, 66, 58, 61, 49, 61, 90, 75, 48, 54, 70, 63, 63, 71, 67, 48, 55, 62, 75, 56, 58, 57, 76, 55, 56, 48, 86, 78, 74, 65, 86, 92, 67, 66, 61, 85, 60, 63, 75, 56, 59, 34, 72, 39, 59, 99, 59, 65, 57, 93, 54, 67, 71, 69, 39, 61, 54, 72, 90, 58, 68, 63, 59, 62, 56, 68, 97, 67, 73, 60, 66, 76, 70, 60, 97, 49, 71, 62, 64, 72, 61, 66, 62, 56, 62, 47, 57, 61, 65, 58, 45, 65, 64, 61, 66, 55, 56, 54, 76, 83, 80, 55, 53, 63, 51, 67, 72, 64, 125, 59, 84, 53, 47, 62, 73, 71, 69, 52, 73, 66, 81, 82, 66, 68, 55, 101, 67, 71, 49, 76, 71, 67, 65, 70, 51, 55, 58, 55, 51, 74, 66, 60, 71, 57, 61, 57, 69, 59, 62, 79, 65, 52, 65, 97, 58, 70, 71, 76, 75, 45, 53, 61, 65, 72, 75, 68, 64, 75, 101, 60, 67, 65, 70, 80, 53, 58, 108, 66, 65, 64, 70, 88, 69, 61, 48, 65, 46, 89, 77, 49, 62, 69, 53, 75, 67, 78, 66, 65, 68, 125, 84, 61, 57, 76, 95, 50, 65, 66, 63, 73, 57, 59, 75, 70, 70, 67, 56, 74, 46, 64, 74, 54, 51, 54, 69, 48, 94, 61, 56, 67, 57, 76, 56, 56, 75, 68, 64, 81, 81, 71, 80, 73, 55, 54, 68, 76, 70, 76, 66, 63, 55, 77, 58, 113, 70, 60, 67, 60, 48, 63, 70, 57, 79, 67, 56, 62, 56, 65, 46, 56, 49, 66, 81, 64, 56, 64, 55, 73, 61, 60, 65, 61, 81, 47, 68, 67, 62, 69, 115, 61, 54, 56, 58, 75, 60, 56, 68, 58, 73, 58, 65, 60, 46, 65, 62, 77, 68, 58, 86, 63, 56, 37, 65, 71, 80, 68, 57, 62, 59, 53, 76, 70, 63, 72, 84, 49, 56, 79, 54, 60, 61, 83, 65, 100, 66, 59, 92, 44, 51, 40, 68, 49, 78, 62, 98, 74, 79, 58, 71, 85, 73, 65, 42, 61, 57, 61, 57, 64, 85, 80, 65, 67, 81, 85, 68, 56, 56, 60, 87, 62, 58, 63, 81, 74, 67, 69, 60, 60, 72, 49, 66, 56, 60, 51, 66, 55, 70, 54, 57, 68, 62, 69, 72, 49, 65, 70, 81, 50, 82, 66, 61, 57, 95, 65, 62, 61, 75, 54, 83, 65, 61, 46, 67, 83, 61, 69, 124, 58, 56, 72, 80, 62, 57, 60, 67, 54, 69, 130, 70, 81, 78, 66, 77, 66, 56, 69, 59, 58, 71, 68, 89, 58, 72, 65, 66, 62, 72, 74, 77, 64, 67, 63, 44, 69, 66, 59, 69, 72, 62, 84, 66, 65, 102, 60, 55, 65, 60, 98, 63, 69, 46, 63, 68, 71, 62, 64, 61, 58, 64, 72, 59, 72, 59, 71, 54, 78, 74, 95, 55, 62, 64, 71, 70, 56, 59, 68, 85, 104, 107, 63, 51, 74, 52, 66, 66, 59, 66, 76, 63, 85, 58, 77, 48, 60, 76, 68, 80, 65, 67, 58, 83, 80, 68, 72, 74, 67, 58, 59, 58, 72, 49, 55, 60, 54, 45, 72, 55, 104, 61, 52, 63, 58, 65, 69, 59, 86, 61, 64, 75, 68, 63, 69, 60, 69, 65, 76, 74, 60, 82, 52, 62, 59, 49, 68, 77, 50, 51, 73, 124, 122, 75, 78, 68, 64, 61, 52, 56, 60, 60, 65, 55, 69, 65, 63, 65, 79, 66, 71, 77, 66, 68, 69, 76, 48, 81, 54, 47, 63, 90, 51, 49, 58, 78, 59, 93, 67, 64, 62, 83, 67, 54, 74, 60, 118, 61, 76, 58, 70, 54, 67, 65, 57, 77, 50, 67, 72, 53, 62, 75, 57, 56, 59, 58, 67, 55, 63, 66, 59, 53, 76, 66, 54, 60, 60, 67, 58, 66, 80, 71, 63, 63, 75, 67, 71, 66, 63, 64, 58, 61, 55, 67, 74, 61, 64, 63, 63, 62, 70, 54, 61, 67, 68, 76, 55, 79, 84, 73, 94, 62, 68, 82, 73, 53, 61, 61, 64, 78, 68, 65, 64, 67, 62, 65, 50, 58, 134, 120, 59, 60, 52, 84, 78, 46, 60, 65, 66, 77, 66, 43, 84, 61, 68, 72, 119, 55, 48, 61, 86, 63, 89, 62, 62, 58, 63, 53, 53, 53, 66, 74, 55, 48, 79, 70, 75, 65, 76, 81, 70, 66, 72, 73, 70, 48, 54, 83, 69, 64, 71, 52, 58, 137, 46, 94, 68, 66, 63, 51, 56, 79, 72, 67, 99, 74, 62, 62, 50, 65, 73, 70, 47, 55, 55, 68, 68, 45, 58, 69, 55, 62, 60, 51, 47, 59, 61, 48, 78, 55, 55, 52, 70, 49, 91, 63, 57, 62, 75, 52, 77, 63, 70, 67, 66, 67, 58, 65, 60, 60, 54, 80, 61, 58, 74, 62, 73, 67, 60, 66, 72, 57, 65, 74, 66, 55, 68, 65, 82, 51, 66, 54, 69, 43, 52, 67, 52, 46, 72, 71, 71, 65, 59, 73, 74, 68, 55, 56, 57, 53, 68, 58, 59, 56, 65, 92, 57, 60, 81, 56, 81, 59, 61, 80, 51, 63, 68, 64, 45, 63, 50, 93, 60, 36, 70, 75, 50, 70, 69, 63, 61, 54, 68, 75, 62, 59, 70, 84, 60, 79, 59, 76, 55, 60, 83, 57, 71, 65, 64, 71, 60, 64, 76, 87, 61, 69, 53, 76, 55, 67, 44, 52, 88, 95, 66, 93, 63, 140, 60, 68, 121, 65, 59, 64, 67, 64, 105, 53, 57, 68, 60, 78, 65, 67, 80, 69, 70, 74, 56, 66, 70, 108, 57, 69, 63, 84, 65, 63, 60, 67, 64, 74, 51, 90, 68, 75, 59, 63, 56, 62, 75, 69, 61, 53, 61, 52, 107, 58, 62, 65, 72, 81, 65, 57, 68, 59, 53, 95, 57, 58, 62, 41, 65, 69, 66, 57, 72, 58, 67, 58, 58, 63, 59, 49, 64, 63, 70, 54, 46, 55, 61, 57, 57, 66, 61, 61, 67, 48, 69, 76, 67, 64, 63, 63, 74, 53, 64, 52, 63, 65, 81, 59, 74, 70, 60, 94, 60, 74, 69, 109, 70, 65, 57, 59, 48, 67, 57, 59, 59, 45, 52, 59, 60, 60, 90, 70, 54, 76, 76, 45, 69, 53, 73, 76, 64, 57, 57, 64, 47, 110, 75, 64, 64, 70, 60, 88, 63, 67, 54, 89, 54, 65, 71, 56, 100, 72, 69, 61, 56, 66, 57, 46, 57, 73, 57, 110, 76, 85, 79, 62, 45, 56, 61, 77, 71, 57, 73, 64, 70, 57, 51, 59, 56, 68, 78, 72, 48, 37, 53, 58, 75, 68, 83, 60, 71, 50, 65, 63, 54, 71, 63, 63, 52, 47, 63, 54, 72, 67, 59, 51, 57, 72, 67, 80, 57, 80, 71, 54, 48, 70, 101, 45, 69, 86, 58, 96, 70, 76, 68, 67, 67, 70, 80, 55, 72, 68, 71, 60, 84, 75, 69, 64, 80, 97, 65, 69, 83, 79, 47, 62, 69, 81, 69, 80, 98, 49, 65, 45, 75, 55, 71, 55, 46, 53, 63, 58, 73, 63, 75, 50, 46, 52, 63, 61, 58, 56, 52, 75, 83, 56, 67, 74, 71, 63, 50, 84, 56, 84, 85, 83, 62, 150, 63, 60, 63, 48, 61, 67, 42, 67, 50, 92, 66, 59, 54, 85, 62, 62, 73, 74, 50, 53, 63, 63, 53, 52, 47, 68, 52, 46, 58, 60, 64, 86, 85, 62, 54, 68, 63, 58, 77, 72, 71, 62, 78, 74, 60, 71, 65, 67, 60, 71, 66, 113, 81, 73, 95, 56, 60, 61, 66, 58, 90, 55, 65, 46, 57, 60, 57, 66, 88, 99, 57, 52, 126, 53, 61, 70, 60, 70, 79, 78, 78, 74, 48, 74, 56, 66, 49, 42, 65, 66, 77, 58, 64, 56, 50, 74, 59, 75, 57, 72, 66, 64, 51, 64, 66, 49, 64, 74, 62, 83, 53, 57, 52, 66, 78, 65, 61, 84, 58, 57, 53, 57, 45, 60, 72, 83, 57, 57, 78, 92, 51, 57, 52, 63, 52, 69, 86, 68, 66, 63, 38, 88, 63, 58, 75, 92, 67, 72, 56, 74, 47, 62, 58, 62, 59, 60, 68, 66, 81, 49, 69, 67, 62, 64, 45, 62, 77, 66, 60, 86, 77, 96, 87, 58, 72, 82, 67, 60, 61, 59, 58, 83, 59, 66, 81, 54, 92, 57, 81, 65, 50, 75, 60, 51, 76, 86, 57, 73, 46, 48, 60, 51, 75, 64, 68, 72, 68, 55, 69, 78, 64, 73, 73, 71, 63, 54, 62, 58, 55, 69, 71, 84, 65, 91, 59, 62, 59, 43, 67, 59, 61, 69, 56, 45, 51, 73, 74, 107, 53, 74, 60, 60, 54, 67, 89, 58, 74, 67, 73, 79, 70, 51, 60, 65, 59, 68, 60, 72, 65, 54, 96, 66, 52, 71, 54, 51, 61, 47, 52, 67, 64, 57, 52, 72, 45, 52, 58, 55, 63, 55, 64, 61, 53, 43, 89, 71, 70, 91, 77, 64, 57, 56, 61, 76, 81, 83, 70, 58, 70, 70, 56, 72, 72, 61, 68, 61, 57, 56, 65, 61, 79, 64, 53, 68, 59, 78, 78, 78, 55, 59, 49, 67, 53, 75, 52, 61, 56, 62, 57, 80, 52, 64, 67, 54, 51, 67, 50, 70, 57, 65, 67, 68, 66, 72, 64, 54, 56, 76, 129, 71, 74, 53, 54, 62, 66, 75, 97, 53, 64, 65, 104, 62, 69, 73, 49, 69, 69, 74, 71, 82, 85, 53, 88, 70, 54, 62, 52, 78, 64, 42, 84, 69, 77, 65, 75, 65, 70, 61, 45, 59, 62, 64, 53, 85, 46, 43, 46, 72, 72, 89, 69, 71, 63, 56, 51, 83, 66, 88, 55, 58, 93, 91, 65, 64, 54, 55, 92, 63, 59, 79, 54, 81, 58, 57, 54, 47, 53, 65, 74, 55, 60, 52, 81, 70, 66, 54, 82, 64, 43, 57, 116, 55, 59, 80, 76, 72, 63, 87, 82, 45, 58, 54, 66, 69, 75, 77, 56, 80, 66, 63, 50, 56, 54, 75, 57, 63, 124, 62, 71, 57, 61, 72, 83, 80, 52, 79, 63, 73, 103, 69, 67, 41, 47, 54, 69, 59, 58, 84, 56, 69, 39, 48, 68, 72, 72, 64, 50, 81, 61, 52, 58, 69, 58, 96, 78, 79, 61, 85, 72, 70, 60, 55, 73, 52, 82, 67, 56, 98, 74, 53, 72, 71, 91, 54, 67, 59, 56, 53, 69, 66, 47, 85, 51, 75, 64, 82, 54, 63, 85, 51, 60, 70, 63, 75, 66, 66, 63, 70, 59, 84, 59, 57, 57, 77, 57, 61, 58, 73, 75, 61, 74, 64, 61, 69, 75, 68, 84, 41, 71, 70, 72, 76, 73, 64, 67, 50, 71, 62, 66, 39, 52, 69, 61, 52, 59, 71, 74, 73, 53, 72, 64, 73, 86, 68, 78, 54, 63, 65, 59, 63, 57, 67, 64, 56, 53, 50, 42, 70, 57, 69, 62, 43, 53, 83, 84, 57, 67, 52, 43, 66, 83, 81, 66, 54, 44, 48, 87, 69, 51, 71, 65, 65, 53, 79, 43, 66, 64, 66, 64, 62, 67, 76, 65, 76, 65, 69, 68, 57, 98, 59, 59, 68, 84, 54, 60, 70, 82, 68, 52, 68, 61, 58, 60, 77, 92, 77, 94, 66, 55, 68, 80, 54, 60, 74, 86, 60, 63, 43, 70, 100, 47, 65, 64, 70, 76, 55, 79, 79, 60, 57, 69, 74, 64, 94, 74, 84, 63, 74, 52, 60, 51, 77, 50, 61, 58, 77, 87, 66, 57, 74, 73, 54, 59, 66, 52, 79, 74, 64, 59, 57, 87, 71, 87, 75, 63, 49, 90, 61, 77, 67, 87, 74, 57, 105, 58, 64, 60, 61, 69, 79, 56, 59, 77, 76, 50, 69, 54, 69, 85, 87, 54, 52, 58, 60, 64, 76, 77, 93, 69, 67, 49, 55, 74, 64, 48, 49, 57, 53, 61, 68, 57, 82, 71, 69, 77, 81, 70, 77, 70, 74, 67, 68, 53, 43, 54, 54, 84, 46, 62, 52, 60, 56, 61, 59, 58, 60, 51, 61, 59, 58, 57, 58, 62, 73, 52, 63, 54, 60, 74, 71, 58, 46, 74, 59, 86, 67, 60, 99, 47, 73, 89, 55, 64, 73, 64, 73, 56, 77, 69, 59, 71, 69, 82, 74, 47, 80, 68, 75, 76, 72, 57, 65, 49, 63, 58, 46, 89, 127, 67, 50, 67, 69, 67, 74, 62, 56, 65, 59, 77, 60, 51, 71, 86, 89, 59, 69, 56, 49, 50, 87, 91, 58, 60, 79, 60, 76, 74, 68, 67, 75, 61, 76, 60, 79, 85, 85, 52, 57, 69, 53, 79, 59, 53, 46, 71, 74, 56, 59, 100, 57, 70, 104, 70, 28, 88, 65, 48, 70, 77, 74, 78, 59, 57, 62, 64, 71, 67, 54, 75, 71, 62, 70, 80, 72, 75, 67, 67, 117, 114, 51, 60, 52, 51, 60, 51, 47, 55, 47, 73, 57, 62, 72, 68, 76, 79, 71, 73, 69, 60, 76, 79, 67, 48, 46, 76, 54, 56, 40, 64, 50, 50, 52, 59, 65, 54, 71, 131, 43, 54, 69, 67, 72, 49, 49, 81, 57, 71, 54, 62, 84, 60, 49, 46, 62, 60, 56, 62, 50, 90, 68, 67, 59, 60, 82, 41, 39, 41, 52, 76, 56, 50, 62, 62, 37, 62, 62, 70, 72, 62, 66, 97, 67, 48, 42, 64, 72, 70, 55, 49, 67, 57, 41, 91, 51, 69, 77, 61, 56, 44, 60, 72, 72, 84, 71, 67, 62, 73, 62, 67, 88, 35, 49, 87, 50, 65, 68, 62, 47, 82, 80, 69, 75, 79, 75, 63, 62, 53, 38, 57, 63, 43, 55, 69, 71, 68, 71, 68, 57, 67, 53, 57, 57, 51, 72, 45, 72, 72, 59, 91, 79, 64, 59, 62, 83, 58, 58, 42, 55, 71, 53, 46, 66, 67, 84, 60, 54, 68, 74, 56, 65, 53, 65, 64, 39, 43, 62, 66, 63, 66, 57, 50, 64, 149, 58, 78, 72, 64, 45, 54, 64, 43, 73, 69, 63, 75, 97, 132, 63, 30, 64, 74, 57, 86, 39, 99, 59, 61, 72, 66, 69, 81, 61, 46, 63, 61, 73, 49, 61, 42, 39, 73, 40, 63, 79, 78, 69, 59, 80, 56, 63, 81, 82, 65, 129, 76, 49, 58, 71, 65, 56, 63, 61, 46, 58, 92, 53, 59, 119, 36, 55, 84, 61, 88, 49, 64, 65, 63, 70, 71, 64, 96, 67, 57, 66, 74, 68, 54, 59, 96, 46, 77, 51, 77, 58, 68, 66, 72, 57, 67, 104, 56, 74, 86, 63, 67, 35, 60, 56, 78, 61, 46, 55, 62, 50, 78, 70, 68, 84, 69, 61, 64, 73, 52, 61, 74, 95, 74, 65, 48, 62, 60, 90, 80, 63, 88, 63, 69, 63, 63, 60, 60, 77, 56, 88, 66, 44, 63, 51, 69, 96, 69, 60, 59, 68, 80, 53, 78, 55, 60, 95, 62, 51, 61, 60, 89, 72, 61, 67, 80, 90, 53, 62, 59, 60, 69, 55, 41, 67, 60, 52, 76, 52, 69, 50, 78, 83, 62, 79, 78, 68, 58, 58, 51, 72, 85, 56, 54, 43, 90, 77, 102, 83, 50, 109, 144, 49, 66, 60, 54, 52, 79, 70, 74, 58, 63, 47, 43, 70, 89, 63, 67, 48, 69, 75, 36, 54, 49, 73, 60, 58, 53, 88, 87, 60, 59, 61, 42, 57, 60, 55, 50, 54, 58, 76, 47, 63, 75, 68, 73, 57, 89, 84, 106, 70, 74, 49, 73, 80, 79, 55, 74, 52, 87, 52, 50, 79, 65, 74, 69, 45, 64, 77, 71, 69, 58, 61, 57, 82, 52, 76, 58, 96, 88, 77, 54, 85, 62, 70, 62, 65, 48, 75, 60, 67, 58, 62, 80, 65, 84, 70, 79, 79, 45, 65, 44, 79, 65, 81, 65, 60, 62, 79, 58, 76, 59, 77, 68, 79, 66, 70, 48, 72, 52, 88, 83, 70, 83, 86, 76, 73, 64, 68, 62, 73, 42, 84, 57, 53, 71, 75, 63, 58, 48, 70, 42, 96, 87, 58, 60, 49, 71, 74, 76, 56, 103, 60, 64, 58, 72, 64, 93, 72, 68, 48, 68, 113, 53, 77, 67, 58, 55, 64, 68, 67, 55, 72, 44, 59, 65, 59, 58, 67, 55, 78, 56, 74, 65, 38, 76, 57, 58, 76, 81, 72, 58, 44, 96, 75, 71, 74, 83, 68, 71, 92, 46, 65, 56, 83, 66, 65, 59, 78, 61, 81, 64, 54, 47, 87, 74, 47, 62, 49, 64, 83, 61, 69, 85, 59, 62, 70, 41, 36, 70, 75, 76, 78, 60, 54, 43, 64, 71, 75, 69, 53, 67, 71, 47, 64, 79, 81, 53, 63, 52, 97, 54, 65, 77, 74, 62, 50, 61, 61, 65, 74, 47, 86, 61, 55, 78, 68, 78, 58, 53, 41, 68, 63, 50, 47, 54, 65, 61, 56, 53, 55, 80, 101, 62, 53, 49, 55, 50, 63, 67, 50, 62, 41, 56, 58, 56, 50, 41, 60, 42, 75, 63, 34, 58, 47, 89, 92, 74, 73, 57, 72, 43, 45, 79, 84, 80, 64, 84, 56, 47, 80, 65, 59, 60, 88, 78, 50, 65, 62, 85, 66, 54, 81, 64, 87, 47, 73, 41, 69, 91, 48, 68, 111, 55, 55, 57, 88, 57, 49, 64, 63, 65, 64, 58, 63, 63, 67, 75, 68, 78, 41, 67, 78, 66, 108, 44, 62, 63, 63, 64, 61, 70, 86, 48, 88, 46, 81, 65, 58, 62, 74, 44, 76, 58, 75, 62, 70, 65, 120, 66, 48, 52, 62, 58, 59, 55, 26, 46, 66, 57, 70, 59, 76, 54, 54, 54, 65, 62, 51, 65, 77, 78, 46, 63, 66, 79, 84, 68, 93, 49, 42, 75, 60, 61, 83, 55, 69, 98, 68, 56, 71, 68, 61, 43, 47, 77, 87, 54, 64, 67, 57, 62, 64, 79, 62, 54, 57, 69, 67, 64, 106, 78, 54, 88, 45, 59, 42, 65, 72, 50, 72, 59, 35, 85, 52, 62, 60, 74, 56, 61, 65, 55, 73, 52, 58, 69, 53, 60, 72, 49, 61, 71, 57, 85, 55, 75, 66, 57, 82, 44, 48, 75, 64, 50, 69, 101, 73, 69, 84, 53, 54, 70, 58, 68, 83, 66, 95, 65, 141, 81, 64, 69, 36, 84, 71, 75, 60, 70, 78, 81, 57, 94, 70, 61, 84, 57, 66, 87, 60, 47, 51, 49, 78, 66, 60, 77, 49, 49, 65, 78, 96, 86, 70, 66, 72, 64, 70, 67, 65, 61, 58, 83, 143, 67, 72, 75, 75, 55, 87, 67, 94, 43, 48, 72, 81, 73, 58, 65, 88, 77, 97, 50, 51, 46, 111, 83, 46, 44, 40, 70, 75, 60, 80, 47, 57, 55, 76, 99, 80, 61, 51, 63, 60, 70, 85, 90, 140, 66, 64, 45, 58, 58, 82, 52, 61, 68, 63, 63, 63, 77, 56, 70, 66, 69, 57, 77, 42, 81, 60, 70, 60, 70, 47, 62, 79, 71, 62, 59, 79, 72, 77, 63, 71, 89, 59, 64, 60, 99, 55, 53, 94, 64, 57, 54, 58, 73, 62, 53, 65, 47, 58, 74, 59, 68, 73, 71, 64, 68, 97, 59, 83, 55, 58, 64, 62, 70, 63, 59, 69, 73, 68, 72, 64, 79, 63, 51, 59, 59, 55, 54, 64, 58, 75, 61, 76, 73, 63, 60, 76, 56, 57, 58, 82, 92, 55, 65, 86, 64, 57, 74, 66, 64, 82, 66, 54, 55, 61, 59, 61, 68, 79, 65, 46, 64, 62, 79, 45, 52, 64, 62, 81, 86, 66, 64, 75, 56, 82, 71, 72, 88, 52, 68, 86, 53, 65, 74, 65, 80, 102, 77, 74, 73, 51, 68, 65, 77, 63, 62, 73, 70, 76, 70, 64, 59, 109, 66, 67, 60, 66, 68, 62, 63, 50, 64, 47, 56, 59, 58, 47, 68, 64, 57, 86, 56, 59, 66, 57, 53, 61, 58, 55, 72, 70, 60, 79, 53, 96, 62, 54, 59, 77, 61, 63, 100, 75, 68, 79, 72, 55, 55, 46, 68, 63, 43, 60, 58, 72, 118, 73, 81, 59, 62, 87, 66, 53, 52, 144, 66, 51, 62, 57, 73, 65, 60, 68, 58, 71, 60, 62, 85, 60, 97, 81, 86, 65, 63, 56, 60, 56, 73, 63, 61, 87, 81, 54, 53, 64, 59, 55, 73, 66, 64, 91, 98, 55, 64, 107, 45, 47, 84, 74, 68, 64, 56, 74, 113, 57, 61, 62, 63, 72, 68, 65, 64, 95, 59, 86, 77, 63, 78, 61, 69, 64, 73, 56, 62, 73, 121, 55, 61, 62, 56, 70, 57, 58, 37, 106, 63, 90, 83, 59, 60, 50, 75, 62, 48, 79, 69, 65, 57, 38, 64, 57, 54, 58, 93, 65, 58, 74, 54, 61, 65, 60, 71, 55, 57, 73, 48, 61, 67, 60, 56, 74, 73, 63, 87, 57, 75, 55, 80, 56, 67, 68, 75, 56, 62, 74, 76, 63, 78, 75, 69, 60, 83, 60, 70, 56, 79, 78, 58, 64, 63, 57, 58, 57, 63, 62, 78, 63, 54, 55, 72, 84, 57, 57, 60, 64, 63, 44, 70, 61, 68, 62, 43, 50, 71, 64, 55, 51, 73, 69, 63, 68, 65, 74, 71, 51, 45, 73, 54, 55, 67, 81, 80, 59, 68, 64, 54, 86, 65, 72, 53, 91, 63, 63, 56, 77, 58, 70, 71, 70, 66, 56, 84, 58, 66, 57, 53, 125, 83, 54, 66, 51, 78, 56, 45, 69, 60, 54, 62, 65, 56, 60, 96, 82, 76, 42, 55, 69, 66, 68, 66, 64, 69, 53, 54, 63, 68, 53, 45, 45, 90, 95, 60, 74, 66, 73, 66, 55, 74, 54, 73, 89, 72, 61, 68, 69, 58, 49, 67, 61, 86, 71, 46, 53, 63, 75, 79, 66, 46, 67, 66, 61, 54, 63, 78, 59, 69, 60, 70, 61, 63, 59, 58, 52, 79, 80, 107, 51, 69, 70, 50, 78, 64, 60, 47, 74, 68, 62, 72, 87, 51, 55, 69, 59, 62, 65, 63, 59, 42, 64, 57, 51, 79, 73, 70, 66, 61, 64, 57, 50, 42, 66, 75, 58, 64, 49, 53, 67, 59, 46, 61, 66, 99, 40, 71, 61, 51, 62, 77, 54, 72, 44, 62, 57, 86, 58, 80, 58, 68, 81, 68, 50, 68, 57, 66, 50, 53, 45, 59, 67, 54, 65, 53, 58, 65, 62, 57, 76, 51, 57, 82, 46, 78, 54, 55, 70, 81, 61, 66, 72, 51, 57, 65, 53, 67, 56, 64, 60, 53, 88, 63, 54, 61, 91, 83, 59, 65, 73, 63, 82, 63, 67, 59, 73, 50, 84, 86, 63, 67, 54, 56, 57, 65, 57, 70, 69, 72, 75, 56, 64, 119, 62, 52, 66, 58, 54, 57, 59, 50, 61, 59, 55, 60, 61, 75, 74, 67, 77, 63, 62, 56, 72, 73, 60, 139, 72, 84, 84, 54, 65, 72, 60, 110, 63, 61, 59, 73, 63, 57, 59, 68, 51, 76, 60, 94, 65, 95, 83, 65, 61, 65, 65, 56, 94, 68, 98, 84, 63, 54, 62, 69, 80, 60, 69, 53, 83, 88, 66, 59, 68, 64, 62, 70, 70, 46, 70, 75, 94, 55, 45, 76, 57, 53, 63, 58, 53, 79, 65, 80, 66, 61, 70, 76, 73, 71, 87, 44, 79, 57, 80, 67, 71, 68, 46, 64, 72, 56, 67, 62, 73, 62, 61, 56, 86, 76, 60, 61, 54, 73, 36, 93, 73, 65, 64, 88, 68, 68, 74, 79, 71, 53, 55, 58, 74, 61, 59, 60, 48, 66, 52, 81, 69, 68, 62, 54, 57, 60, 69, 80, 54, 62, 66, 68, 68, 75, 84, 61, 66, 115, 63, 70, 80, 58, 70, 71, 53, 59, 59, 69, 77, 50, 70, 81, 61, 78, 68, 67, 55, 73, 62, 60, 55, 65, 75, 76, 77, 55, 73, 57, 74, 78, 68, 69, 76, 64, 54, 61, 80, 59, 75, 54, 48, 70, 58, 79, 75, 108, 64, 64, 60, 49, 65, 74, 70, 77, 55, 34, 56, 73, 74, 73, 58, 70, 46, 59, 58, 98, 126, 87, 56, 75, 64, 66, 34, 69, 64, 62, 89, 50, 78, 59, 62, 65, 62, 82, 56, 69, 68, 66, 77, 76, 82, 52, 122, 60, 92, 63, 76, 62, 53, 64, 97, 64, 56, 61, 111, 61, 63, 61, 80, 60, 61, 70, 77, 83, 75, 77, 55, 63, 66, 67, 59, 60, 66, 56, 82, 71, 53, 68, 59, 53, 60, 76, 85, 60, 64, 63, 60, 61, 59, 55, 75, 61, 71, 58, 85, 57, 65, 48, 71, 64, 83, 59, 76, 65, 54, 84, 64, 61, 63, 68, 69, 74, 68, 64, 74, 52, 53, 64, 48, 69, 86, 70, 67, 85, 67, 62, 63, 67, 88, 71, 69, 61, 74, 47, 54, 66, 68, 59, 64, 54, 90, 63, 64, 61, 79, 65, 67, 63, 71, 62, 66, 63, 47, 45, 76, 58, 78, 62, 62, 58, 59, 63, 58, 50, 53, 69, 55, 70, 61, 86, 66, 64, 94, 56, 67, 65, 49, 68, 77, 48, 58, 81, 49, 116, 59, 86, 62, 74, 45, 62, 68, 84, 67, 46, 68, 61, 74, 72, 55, 58, 88, 67, 77, 57, 68, 57, 73, 82, 56, 72, 62, 79, 53, 52, 56, 62, 65, 72, 117, 88, 54, 70, 74, 91, 66, 65, 64, 81, 50, 58, 38, 64, 67, 76, 68, 72, 95, 61, 58, 60, 62, 73, 67, 65, 58, 66, 65, 38, 58, 59, 48, 68, 71, 46, 59, 59, 72, 68, 78, 68, 59, 64, 51, 64, 50, 48, 70, 77, 53, 63, 57, 50, 80, 56, 33, 70, 73, 51, 58, 60, 73, 78, 62, 74, 63, 67, 52, 67, 60, 61, 53, 81, 93, 86, 77, 73, 64, 82, 65, 60, 81, 63, 63, 93, 60, 46, 45, 58, 68, 94, 50, 73, 98, 61, 46, 63, 51, 66, 53, 70, 69, 77, 78, 50, 82, 49, 61, 63, 40, 57, 88, 55, 56, 52, 71, 80, 66, 72, 85, 51, 69, 62, 48, 52, 83, 79, 42, 70, 72, 63, 84, 60, 49, 55, 50, 82, 53, 61, 77, 55, 69, 44, 55, 72, 56, 71, 72, 71, 37, 91, 72, 56, 74, 102, 46, 55, 54, 63, 59, 51, 73, 71, 64, 72, 51, 74, 53, 65, 79, 67, 48, 62, 77, 82, 76, 65, 50, 81, 67, 89, 56, 55, 96, 60, 81, 73, 66, 70, 60, 68, 92, 85, 93, 65, 75, 101, 57, 83, 36, 59, 80, 65, 62, 78, 60, 51, 49, 56, 77, 62, 74, 79, 57, 69, 55, 55, 62, 87, 63, 65, 55, 87, 90, 70, 75, 53, 60, 67, 65, 91, 61, 53, 57, 75, 58, 38, 68, 66, 62, 69, 62, 61, 82, 65, 113, 47, 83, 69, 67, 74, 45, 78, 45, 72, 64, 38, 74, 52, 95, 83, 76, 60, 75, 85, 58, 76, 62, 91, 71, 46, 61, 67, 69, 54, 60, 63, 86, 70, 78, 85, 66, 64, 49, 64, 74, 69, 59, 45, 52, 68, 36, 101, 60, 59, 73, 57, 73, 62, 74, 74, 37, 91, 69, 52, 77, 79, 65, 46, 80, 77, 71, 65, 54, 53, 94, 37, 54, 61, 58, 49, 64, 55, 79, 66, 44, 58, 56, 76, 50, 72, 59, 71, 62, 45, 70, 59, 62, 72, 49, 34, 66, 66, 60, 64, 67, 85, 99, 100, 59, 50, 52, 42, 72, 51, 53, 69, 117, 64, 52, 75, 68, 59, 57, 106, 94, 87, 108, 74, 71, 57, 61, 55, 106, 55, 77, 81, 86, 54, 47, 91, 80, 167, 56, 74, 58, 46, 75, 66, 63, 59, 51, 71, 50, 87, 72, 66, 46, 53, 57, 100, 70, 59, 70, 58, 55, 53, 57, 57, 104, 86, 77, 63, 80, 48, 63, 74, 47, 63, 34, 65, 76, 64, 56, 88, 60, 88, 93, 66, 72, 78, 67, 55, 71, 54, 64, 64, 90, 61, 69, 66, 57, 59, 45, 64, 82, 81, 68, 151, 71, 62, 61, 103, 52, 43, 88, 54, 65, 67, 108, 69, 86, 41, 60, 71, 73, 87, 75, 56, 60, 50, 49, 58, 102, 78, 43, 83, 67, 80, 85, 61, 60, 69, 59, 79, 51, 85, 60, 91, 48, 55, 61, 62, 66, 74, 103, 49, 61, 58, 49, 45, 60, 58, 49, 56, 52, 76, 51, 89, 60, 63, 60, 57, 59, 60, 63, 69, 121, 82, 90, 48, 55, 63, 53, 69, 69, 35, 112, 93, 61, 78, 108, 60, 65, 70, 46, 63, 84, 82, 51, 101, 50, 63, 77, 69, 69, 83, 80, 71, 69, 58, 68, 63, 68, 60, 87, 86, 125, 57, 63, 66, 62, 66, 63, 91, 71, 71, 49, 72, 75, 71, 74, 71, 65, 58, 68, 79, 66, 73, 97, 60, 73, 51, 61, 70, 54, 83, 64, 67, 63, 54, 56, 56, 62, 42, 28, 45, 68, 39, 76, 75, 58, 52, 49, 76, 43, 59, 71, 48, 61, 50, 42, 53, 85, 89, 49, 61, 52, 67, 68, 73, 51, 82, 85, 73, 65, 58, 61, 50, 100, 55, 40, 58, 78, 83, 62, 81, 52, 71, 63, 48, 73, 51, 83, 83, 61, 57, 50, 68, 48, 55, 75, 70, 55, 66, 75, 49, 70, 52, 85, 60, 70, 57, 59, 78, 55, 64, 82, 55, 56, 80, 76, 52, 70, 63, 63, 86, 46, 49, 57, 56, 64, 54, 77, 60, 76, 46, 89, 55, 65, 95, 48, 68, 54, 131, 58, 58, 66, 96, 68, 66, 61, 71, 59, 73, 54, 83, 69, 56, 53, 55, 72, 73, 48, 75, 84, 62, 67, 58, 73, 49, 52, 64, 36, 51, 93, 68, 54, 56, 105, 74, 61, 77, 76, 57, 59, 71, 70, 57, 78, 88, 68, 70, 68, 63, 60, 59, 82, 50, 46, 56, 51, 52, 62, 36, 70, 79, 56, 61, 52, 46, 85, 40, 55, 51, 85, 80, 43, 56, 69, 88, 64, 55, 58, 57, 48, 90, 80, 73, 69, 61, 74, 89, 85, 81, 66, 68, 60, 60, 47, 56, 59, 95, 88, 76, 75, 46, 62, 49, 62, 67, 54, 79, 62, 68, 65, 47, 59, 44, 81, 86, 68, 58, 61, 71, 55, 83, 50, 91, 81, 95, 54, 61, 108, 64, 59, 63, 60, 57, 40, 65, 68, 66, 75, 59, 57, 69, 62, 55, 58, 72, 55, 66, 57, 84, 87, 45, 52, 64, 78, 50, 64, 42, 73, 61, 76, 59, 46, 30, 79, 47, 50, 93, 48, 70, 43, 64, 55, 75, 74, 70, 85, 77, 47, 68, 72, 78, 89, 64, 134, 73, 62, 51, 77, 80, 62, 56, 61, 90, 70, 59, 54, 54, 58, 44, 88, 39, 70, 84, 48, 81, 48, 72, 56, 48, 71, 49, 79, 70, 91, 71, 73, 52, 46, 55, 66, 71, 60, 52, 54, 43, 55, 99, 78, 78, 74, 91, 57, 60, 74, 64, 70, 70, 60, 57, 74, 62, 64, 60, 71, 50, 60, 53, 59, 56, 71, 47, 64, 66, 86, 53, 67, 79, 53, 78, 83, 73, 68, 66, 51, 48, 59, 75, 50, 80, 68, 81, 91, 48, 58, 37, 68, 71, 76, 90, 71, 75, 54, 53, 78, 57, 48, 47, 73, 49, 58, 58, 67, 99, 50, 76, 62, 50, 66, 36, 78, 87, 77, 58, 52, 60, 79, 71, 71, 58, 60, 61, 65, 67, 45, 65, 41, 31, 52, 58, 71, 81, 44, 55, 51, 61, 79, 76, 74, 79, 75, 65, 48, 66, 61, 53, 89, 95, 67, 69, 50, 67, 87, 57, 76, 77, 81, 59, 88, 39, 56, 68, 81, 48, 59, 61, 67, 74, 58, 61, 55, 75, 72, 61, 58, 48, 76, 57, 60, 56, 85, 73, 65, 55, 61, 54, 82, 55, 68, 60, 78, 41, 59, 43, 69, 71, 54, 44, 66, 70, 61, 49, 101, 44, 74, 71, 47, 66, 67, 45, 76, 60, 100, 71, 83, 68, 69, 73, 50, 43, 84, 40, 66, 78, 75, 52, 67, 61, 70, 60, 40, 97, 73, 84, 63, 53, 60, 75, 54, 86, 92, 74, 88, 61, 49, 98, 66, 67, 83, 82, 72, 57, 69, 56, 68, 77, 38, 76, 77, 55, 77, 65, 83, 96, 59, 113, 64, 77, 83, 75, 64, 82, 52, 72, 82, 48, 37, 65, 57, 62, 76, 61, 58, 85, 75, 69, 66, 58, 62, 90, 60, 64, 53, 113, 53, 104, 69, 71, 36, 79, 78, 37, 96, 50, 51, 62, 47, 49, 46, 56, 67, 71, 64, 57, 67, 69, 71, 88, 61, 81, 49, 80, 87, 71, 73, 67, 63, 48, 74, 54, 73, 66, 79, 79, 51, 65, 94, 48, 80, 51, 86, 59, 71, 91, 60, 65, 59, 72, 87, 57, 69, 67, 52, 71, 49, 76, 60, 65, 66, 67, 63, 92, 58, 53, 46, 78, 53, 58, 74, 59, 70, 68, 56, 73, 63, 43, 121, 67, 69, 63, 69, 56, 73, 85, 100, 62, 55, 47, 53, 57, 70, 42, 66, 96, 58, 59, 64, 75, 51, 83, 57, 70, 30, 83, 35, 65, 85, 78, 81, 60, 79, 72, 72, 46, 64, 69, 45, 50, 65, 74, 69, 49, 51, 65, 57, 67, 69, 99, 71, 63, 77, 76, 63, 84, 61, 65, 54, 40, 71, 56, 40, 74, 48, 76, 68, 74, 36, 99, 57, 68, 65, 51, 48, 85, 80, 51, 62, 63, 53, 77, 90, 55, 94, 51, 86, 69, 50, 77, 59, 63, 70, 77, 69, 58, 74, 91, 52, 63, 65, 49, 54, 52, 64, 62, 39, 95, 57, 57, 60, 84, 45, 57, 70, 67, 49, 62, 87, 50, 72, 52, 48, 50, 104, 88, 118, 60, 97, 52, 64, 68, 146, 59, 103, 61, 81, 75, 110, 57, 66, 73, 48, 158, 57, 60, 84, 75, 84, 63, 60, 68, 81, 56, 87, 85, 70, 65, 55, 41, 74, 97, 104, 63, 74, 51, 74, 56, 55, 63, 46, 53, 64, 62, 88, 58, 43, 54, 61, 46, 51, 89, 96, 59, 58, 59, 57, 64, 69, 70, 74, 72, 55, 58, 67, 70, 66, 66, 50, 45, 57, 45, 78, 46, 56, 50, 71, 63, 75, 156, 90, 62, 54, 102, 52, 45, 82, 70, 73, 66, 81, 59, 44, 55, 57, 62, 53, 98, 63, 81, 80, 84, 58, 61, 67, 63, 61, 61, 80, 80, 82, 62, 57, 86, 73, 92, 56, 73, 65, 72, 56, 66, 68, 107, 60, 79, 65, 82, 62, 60, 53, 57, 74, 86, 57, 57, 64, 51, 69, 68, 63, 83, 77, 62, 73, 74, 58, 72, 140, 88, 58, 62, 50, 60, 65, 68, 105, 96, 69, 71, 26, 71, 62, 70, 65, 72, 49, 65, 68, 47, 39, 149, 70, 53, 69, 73, 105, 85, 59, 87, 58, 62, 121, 51, 73, 85, 45, 86, 96, 38, 54, 63, 61, 49, 85, 63, 49, 62, 80, 69, 58, 68, 41, 53, 54, 58, 71, 51, 63, 51, 64, 52, 55, 64, 64, 49, 67, 84, 55, 63, 52, 82, 53, 66, 70, 47, 56, 66, 72, 45, 81, 56, 66, 65, 68, 92, 36, 55, 87, 71, 55, 75, 57, 73, 50, 103, 72, 53, 79, 70, 53, 62, 48, 66, 95, 67, 43, 61, 50, 40, 93, 60, 60, 74, 61, 67, 87, 84, 38, 81, 65, 65, 81, 56, 63, 62, 50, 65, 41, 63, 69, 71, 64, 63, 41, 55, 72, 54, 70, 62, 70, 65, 86, 55, 78, 91, 48, 81, 62, 48, 55, 60, 88, 60, 53, 73, 60, 82, 61, 50, 45, 77, 79, 47, 55, 82, 88, 48, 44, 70, 54, 78, 57, 50, 72, 58, 63, 77, 59, 77, 66, 76, 63, 87, 60, 50, 63, 66, 63, 74, 73, 61, 74, 61, 68, 72, 69, 40, 83, 59, 82, 47, 65, 90, 67, 47, 84, 60, 57, 75, 76, 65, 53, 58, 52, 85, 58, 65, 63, 71, 50, 52, 58, 84, 71, 69, 62, 46, 102, 76, 66, 49, 59, 75, 73, 85, 64, 80, 66, 64, 86, 57, 63, 71, 76, 59, 64, 54, 76, 95, 82, 62, 71, 41, 52, 56, 49, 82, 59, 73, 82, 69, 78, 72, 60, 69, 68, 67, 81, 67, 57, 65, 86, 51, 82, 63, 49, 65, 42, 52, 46, 74, 78, 71, 70, 93, 73, 59, 69, 81, 69, 78, 85, 73, 58, 63, 72, 85, 64, 95, 54, 110, 61, 53, 102, 74, 79, 99, 77, 57, 42, 64, 79, 86, 45, 78, 49, 61, 52, 64, 60, 59, 62, 55, 52, 43, 97, 56, 41, 43, 48, 58, 72, 50, 61, 76, 60, 59, 83, 60, 68, 56, 43, 74, 50, 76, 46, 57, 71, 65, 65, 116, 90, 78, 64, 63, 70, 67, 65, 66, 144, 72, 65, 65, 75, 53, 69, 106, 62, 88, 82, 52, 71, 64, 72, 56, 62, 76, 67, 70, 76, 64, 47, 57, 51, 54, 71, 61, 60, 67, 48, 58, 95, 67, 58, 56, 73, 98, 59, 56, 60, 64, 54, 77, 70, 57, 94, 73, 40, 61, 47, 98, 58, 74, 48, 50, 55, 65, 57, 68, 63, 71, 82, 75, 42, 47, 53, 66, 61, 70, 90, 40, 67, 93, 66, 55, 49, 70, 72, 61, 52, 52, 63, 72, 56, 67, 96, 77, 49, 40, 91, 77, 64, 61, 59, 88, 61, 44, 50, 78, 69, 70, 61, 63, 60, 73, 53, 72, 60, 76, 86, 64, 63, 39, 83, 41, 61, 75, 76, 81, 94, 61, 94, 59, 97, 91, 90, 63, 43, 52, 88, 81, 49, 55, 56, 71, 50, 66, 88, 70, 64, 70, 73, 59, 83, 72, 67, 46, 58, 61, 86, 42, 81, 80, 72, 70, 72, 88, 65, 62, 61, 63, 50, 67, 65, 62, 64, 57, 70, 38, 62, 90, 86, 99, 108, 79, 51, 56, 64, 71, 57, 45, 64, 65, 71, 62, 57, 81, 63, 51, 80, 94, 63, 69, 65, 71, 61, 75, 55, 36, 63, 69, 53, 52, 47, 64, 40, 136, 69, 55, 96, 74, 47, 67, 39, 53, 57, 57, 81, 45, 44, 71, 103, 69, 68, 60, 51, 61, 53, 40, 51, 49, 48, 49, 69, 68, 64, 90, 53, 71, 51, 68, 70, 69, 51, 42, 57, 64, 83, 67, 81, 71, 53, 64, 46, 61, 78, 62, 60, 48, 76, 54, 71, 74, 61, 56, 53, 21, 97, 47, 71, 71, 89, 56, 56, 94, 79, 73, 65, 82, 46, 52, 85, 88, 60, 70, 87, 60, 67, 63, 62, 61, 59, 42, 47, 64, 53, 86, 83, 96, 44, 75, 118, 86, 71, 49, 61, 64, 68, 75, 74, 61, 55, 99, 76, 58, 77, 53, 74, 77, 60, 82, 52, 43, 77, 56, 91, 57, 61, 54, 65, 54, 64, 68, 105, 72, 59, 72, 63, 49, 57, 46, 65, 103, 49, 39, 72, 56, 70, 80, 70, 48, 53, 65, 61, 69, 81, 61, 72, 62, 85, 82, 71, 58, 46, 47, 74, 56, 101, 65, 53, 49, 65, 61, 78, 57, 77, 60, 37, 80, 62, 55, 84, 65, 65, 60, 80, 96, 91, 79, 62, 45, 63, 49, 57, 92, 68, 56, 57, 59, 53, 60, 57, 72, 60, 52, 48, 64, 73, 60, 71, 83, 103, 55, 72, 86, 85, 77, 76, 52, 82, 65, 85, 60, 38, 64, 54, 67, 72, 79, 63, 42, 81, 72, 77, 46, 62, 65, 50, 60, 108, 58, 85, 58, 48, 61, 57, 56, 72, 61, 90, 107, 47, 121, 70, 60, 56, 60, 66, 94, 61, 64, 80, 89, 59, 76, 82, 64, 66, 81, 89, 57, 51, 77, 44, 60, 72, 76, 60, 75, 72, 54, 59, 85, 65, 29, 64, 58, 46, 65, 60, 49, 50, 60, 46, 78, 57, 177, 139, 54, 57, 50, 89, 66, 74, 85, 61, 50, 50, 59, 42, 61, 51, 81, 45, 64, 74, 73, 77, 79, 78, 70, 62, 115, 71, 61, 51, 75, 89, 62, 69, 70, 74, 78, 53, 60, 71, 71, 67, 51, 92, 71, 73, 53, 77, 34, 64, 60, 42, 53, 56, 65, 72, 64, 65, 65, 53, 91, 68, 83, 77, 62, 53, 53, 68, 51, 77, 78, 68, 58, 52, 58, 68, 64, 50, 57, 63, 48, 51, 46, 62, 61, 55, 87, 40, 60, 62, 70, 62, 46, 81, 90, 88, 65, 60, 61, 46, 65, 67, 38, 32, 67, 47, 39, 60, 61, 53, 86, 58, 48, 65, 75, 39, 95, 49, 61, 50, 49, 65, 62, 53, 59, 47, 60, 53, 47, 70, 60, 42, 84, 69, 83, 85, 85, 53, 53, 67, 55, 69, 50, 57, 56, 59, 83, 64, 69, 42, 70, 53, 54, 83, 80, 53, 50, 48, 61, 75, 80, 56, 54, 68, 48, 43, 65, 57, 65, 44, 58, 46, 87, 45, 67, 126, 41, 68, 92, 70, 73, 63, 77, 86, 66, 57, 118, 60, 70, 63, 43, 65, 52, 109, 61, 58, 81, 101, 51, 55, 51, 65, 71, 90, 54, 87, 38, 40, 88, 89, 37, 78, 80, 42, 73, 50, 63, 50, 70, 94, 75, 59, 61, 74, 67, 80, 60, 72, 58, 63, 66, 83, 60, 98, 82, 54, 67, 50, 59, 56, 94, 61, 46, 75, 78, 62, 81, 58, 64, 79, 41, 47, 62, 66, 96, 77, 66, 71, 75, 54, 49, 65, 43, 80, 55, 65, 81, 73, 60, 70, 107, 52, 55, 63, 59, 83, 80, 53, 58, 62, 51, 72, 64, 59, 55, 74, 62, 102, 42, 97, 39, 60, 87, 59, 69, 81, 59, 63, 85, 63, 61, 68, 54, 76, 105, 60, 63, 110, 51, 83, 53, 56, 83, 67, 72, 42, 74, 91, 52, 75, 59, 43, 94, 78, 65, 39, 83, 98, 27, 66, 61, 79, 79, 34, 49, 71, 86, 61, 58, 61, 72, 68, 58, 58, 74, 55, 83, 78, 41, 56, 69, 54, 57, 50, 35, 69, 53, 55, 49, 64, 62, 67, 69, 60, 53, 69, 105, 43, 64, 51, 67, 55, 77, 57, 90, 95, 75, 36, 95, 59, 81, 79, 58, 60, 63, 57, 62, 64, 91, 65, 59, 55, 70, 69, 69, 75, 61, 162, 84, 77, 60, 56, 52, 57, 73, 130, 86, 64, 62, 81, 73, 62, 59, 87, 52, 76, 58, 77, 95, 68, 49, 92, 46, 52, 67, 75, 122, 53, 52, 43, 52, 56, 83, 111, 67, 49, 71, 68, 43, 84, 72, 51, 79, 73, 63, 48, 76, 55, 57, 85, 57, 78, 77, 55, 68, 58, 114, 55, 22, 64, 65, 76, 87, 40, 107, 46, 59, 34, 47, 59, 91, 73, 61, 52, 73, 60, 48, 95, 37, 73, 76, 86, 70, 55, 61, 70, 55, 60, 78, 86, 77, 60, 75, 66, 82, 66, 62, 64, 52, 79, 66, 55, 98, 68, 59, 69, 39, 67, 69, 76, 69, 78, 71, 50, 65, 50, 79, 54, 58, 61, 51, 64, 54, 70, 59, 65, 51, 55, 92, 75, 62, 61, 71, 82, 80, 70, 50, 80, 53, 64, 74, 51, 59, 67, 49, 55, 68, 71, 54, 111, 75, 61, 56, 51, 57, 76, 35, 61, 110, 63, 66, 96, 55, 36, 93, 81, 60, 72, 56, 80, 73, 58, 71, 51, 55, 69, 69, 75, 77, 83, 54, 50, 80, 71, 58, 79, 100, 69, 82, 70, 54, 66, 70, 68, 73, 49, 83, 165, 64, 55, 40, 68, 59, 71, 82, 61, 73, 84, 83, 70, 58, 62, 49, 67, 74, 69, 62, 67, 57, 58, 67, 58, 75, 35, 105, 67, 63, 56, 78, 60, 60, 61, 60, 69, 80, 56, 107, 54, 106, 57, 58, 80, 81, 52, 63, 57, 52, 73, 94, 76, 86, 68, 88, 83, 55, 59, 59, 70, 69, 58, 63, 43, 54, 41, 54, 74, 70, 63, 57, 55, 65, 51, 74, 82, 62, 81, 83, 53, 74, 49, 100, 86, 74, 63, 58, 56, 78, 73, 93, 101, 46, 73, 75, 72, 70, 66, 54, 56, 74, 71, 62, 61, 52, 58, 63, 62, 59, 61, 63, 70, 60, 66, 62, 57, 84, 61, 91, 86, 57, 45, 56, 53, 61, 71, 59, 74, 52, 57, 62, 79, 51, 60, 65, 62, 77, 65, 73, 64, 57, 63, 74, 61, 52, 67, 62, 40, 110, 64, 76, 82, 70, 83, 50, 84, 61, 61, 68, 74, 42, 78, 77, 75, 63, 74, 63, 86, 52, 109, 53, 54, 78, 61, 48, 54, 51, 63, 93, 67, 68, 75, 70, 76, 67, 129, 85, 52, 90, 64, 61, 66, 89, 72, 63, 77, 64, 42, 65, 71, 64, 69, 55, 68, 60, 71, 68, 57, 65, 37, 61, 83, 42, 60, 68, 81, 62, 69, 54, 101, 54, 64, 58, 25, 97, 63, 98, 60, 76, 73, 72, 72, 86, 75, 93, 50, 56, 65, 74, 69, 63, 44, 75, 66, 64, 55, 60, 58, 64, 68, 51, 109, 61, 64, 70, 80, 61, 71, 66, 56, 43, 52, 84, 61, 47, 68, 69, 69, 59, 74, 59, 78, 58, 81, 77, 66, 79, 59, 69, 59, 61, 57, 63, 64, 72, 127, 65, 97, 46, 61, 71, 34, 53, 56, 70, 74, 59, 81, 47, 84, 53, 74, 52, 65, 59, 50, 104, 68, 79, 70, 79, 85, 67, 67, 69, 78, 38, 80, 44, 57, 33, 77, 73, 59, 62, 60, 57, 61, 68, 52, 68, 78, 66, 69, 57, 81, 55, 65, 65, 68, 69, 49, 85, 64, 47, 56, 58, 115, 64, 58, 65, 48, 57, 69, 101, 73, 73, 72, 58, 81, 72, 57, 94, 64, 67, 60, 78, 63, 57, 71, 69, 69, 67, 114, 55, 59, 96, 72, 53, 62, 41, 97, 59, 44, 70, 95, 62, 39, 71, 76, 75, 52, 40, 71, 56, 76, 50, 58, 51, 62, 63, 63, 68, 58, 90, 52, 42, 47, 120, 53, 62, 63, 65, 59, 82, 83, 69, 68, 69, 67, 52, 54, 71, 51, 52, 62, 74, 81, 43, 53, 96, 70, 50, 60, 72, 54, 57, 56, 67, 66, 54, 56, 60, 71, 57, 72, 102, 74, 61, 65, 71, 43, 54, 79, 35, 46, 111, 96, 62, 68, 48, 57, 72, 42, 46, 35, 64, 69, 44, 62, 79, 71, 74, 58, 53, 122, 64, 44, 87, 49, 56, 73, 50, 61, 69, 60, 80, 62, 81, 73, 53, 65, 54, 86, 50, 71, 64, 62, 79, 49, 106, 76, 59, 67, 72, 63, 94, 62, 88, 63, 60, 78, 92, 72, 44, 61, 61, 40, 72, 91, 91, 52, 40, 84, 66, 69, 56, 58, 103, 59, 53, 63, 66, 72, 53, 48, 57, 54, 63, 66, 69, 56, 82, 95, 43, 61, 93, 57, 54, 72, 71, 76, 63, 61, 77, 88, 74, 62, 58, 81, 65, 50, 66, 60, 98, 64, 44, 58, 51, 52, 67, 73, 85, 61, 85, 68, 74, 75, 83, 43, 49, 48, 60, 54, 73, 49, 63, 54, 87, 63, 49, 66, 82, 84, 59, 83, 55, 73, 44, 60, 45, 71, 54, 68, 109, 71, 59, 76, 51, 46, 74, 63, 81, 62, 99, 63, 100, 50, 63, 72, 50, 49, 62, 63, 70, 50, 35, 58, 74, 79, 94, 66, 38, 58, 49, 71, 63, 81, 60, 60, 65, 90, 61, 58, 53, 58, 41, 65, 69, 37, 65, 64, 103, 54, 70, 60, 65, 70, 57, 58, 44, 69, 51, 53, 80, 63, 61, 57, 74, 53, 75, 54, 71, 84, 82, 69, 47, 60, 64, 65, 60, 52, 87, 47, 75, 81, 46, 76, 58, 50, 147, 53, 78, 102, 55, 82, 37, 55, 37, 55, 72, 49, 62, 56, 105, 72, 45, 86, 86, 42, 91, 86, 95, 55, 64, 85, 73, 61, 69, 61, 56, 105, 78, 62, 63, 43, 85, 60, 58, 80, 92, 61, 123, 67, 70, 72, 80, 60, 51, 60, 63, 62, 56, 73, 53, 52, 75, 69, 48, 91, 55, 85, 58, 62, 71, 48, 75, 59, 57, 56, 74, 62, 81, 63, 60, 81, 71, 56, 41, 61, 81, 70, 56, 55, 50, 70, 97, 85, 63, 77, 59, 48, 97, 45, 74, 109, 75, 58, 45, 90, 60, 104, 58, 68, 74, 81, 128, 52, 49, 57, 54, 82, 57, 56, 46, 145, 62, 69, 71, 60, 52, 62, 51, 37, 84, 64, 74, 66, 83, 50, 78, 58, 55, 62, 68, 59, 67, 67, 88, 56, 52, 81, 70, 81, 52, 41, 56, 66, 53, 46, 71, 66, 134, 59, 44, 56, 73, 53, 54, 65, 62, 67, 68, 55, 52, 74, 75, 89, 128, 45, 54, 61, 80, 78, 102, 94, 59, 70, 60, 83, 48, 66, 60, 62, 57, 71, 65, 61, 45, 69, 78, 77, 54, 67, 89, 50, 69, 53, 70, 70, 57, 63, 75, 48, 65, 73, 93, 68, 74, 60, 60, 80, 63, 64, 67, 54, 55, 68, 61, 45, 35, 61, 44, 72, 66, 49, 69, 93, 87, 95, 86, 85, 85, 57, 63, 65, 48, 57, 73, 69, 64, 53, 66, 69, 61, 116, 107, 68, 53, 55, 56, 78, 71, 71, 62, 76, 61, 70, 49, 81, 55, 44, 57, 76, 67, 58, 111, 50, 65, 93, 74, 72, 62, 57, 86, 61, 63, 38, 66, 54, 92, 55, 78, 61, 59, 46, 64, 80, 59, 52, 50, 60, 63, 91, 61, 63, 51, 67, 68, 67, 74, 68, 74, 64, 53, 77, 67, 95, 71, 61, 71, 68, 67, 74, 58, 66, 68, 54, 56, 56, 73, 66, 82, 49, 62, 83, 81, 59, 78, 77, 62, 72, 56, 69, 57, 52, 58, 67, 56, 55, 78, 75, 60, 45, 73, 68, 67, 46, 54, 56, 90, 64, 77, 103, 66, 76, 47, 85, 53, 78, 37, 62, 54, 62, 54, 66, 51, 62, 63, 70, 84, 77, 64, 48, 42, 73, 57, 60, 58, 69, 96, 81, 68, 68, 87, 56, 44, 52, 51, 55, 71, 57, 56, 65, 49, 83, 52, 53, 50, 67, 84, 82, 79, 70, 64, 74, 60, 58, 75, 114, 56, 71, 66, 42, 89, 45, 65, 64, 55, 69, 64, 59, 53, 61, 74, 39, 87, 68, 52, 61, 103, 62, 66, 97, 52, 73, 48, 53, 85, 56, 63, 84, 68, 61, 45, 82, 61, 89, 42, 82, 79, 65, 76, 86, 59, 70, 56, 63, 95, 54, 61, 59, 80, 93, 52, 69, 69, 55, 50, 52, 76, 67, 75, 81, 58, 84, 66, 75, 66, 57, 83, 67, 94, 43, 67, 89, 57, 73, 102, 86, 70, 100, 53, 58, 32, 61, 80, 55, 67, 73, 87, 62, 57, 82, 53, 33, 88, 47, 54, 69, 86, 84, 79, 49, 63, 60, 82, 82, 75, 65, 63, 80, 55, 92, 77, 48, 65, 63, 50, 62, 70, 60, 65, 114, 55, 54, 48, 57, 41, 91, 73, 73, 43, 76, 71, 58, 73, 99, 78, 73, 68, 51, 69, 66, 81, 80, 48, 58, 59, 65, 51, 54, 64, 70, 59, 60, 45, 54, 61, 56, 43, 67, 79, 98, 69, 52, 45, 92, 66, 78, 65, 71, 72, 68, 88, 41, 79, 56, 92, 46, 67, 76, 62, 60, 73, 68, 58, 57, 85, 52, 65, 71, 86, 55, 79, 65, 76, 51, 53, 62, 67, 97, 63, 99, 60, 61, 57, 56, 76, 84, 64, 64, 66, 73, 48, 60, 67, 68, 72, 71, 59, 64, 83, 76, 48, 69, 46, 57, 52, 29, 82, 55, 65, 60, 66, 61, 52, 93, 54, 54, 74, 63, 51, 71, 82, 61, 66, 53, 42, 49, 55, 84, 59, 50, 87, 55, 63, 72, 45, 83, 64, 50, 61, 80, 63, 68, 79, 77, 63, 65, 74, 76, 53, 63, 79, 71, 55, 76, 54, 68, 55, 71, 62, 62, 60, 81, 37, 32, 59, 33, 56, 70, 53, 80, 75, 76, 71, 67, 50, 58, 68, 58, 61, 87, 44, 65, 55, 72, 62, 64, 55, 42, 75, 75, 67, 66, 60, 45, 49, 68, 51, 70, 67, 72, 48, 52, 72, 58, 56, 67, 64, 53, 58, 64, 61, 58, 67, 64, 53, 50, 99, 69, 51, 67, 81, 55, 116, 62, 36, 91, 89, 68, 66, 64, 57, 70, 75, 77, 88, 78, 52, 67, 70, 63, 70, 59, 57, 56, 56, 44, 49, 92, 70, 60, 57, 54, 63, 60, 68, 122, 66, 66, 41, 56, 48, 56, 94, 62, 41, 59, 86, 146, 84, 64, 68, 56, 62, 47, 47, 79, 69, 61, 36, 93, 71, 40, 53, 63, 73, 66, 70, 100, 53, 47, 71, 57, 54, 92, 57, 61, 72, 42, 79, 77, 69, 57, 59, 50, 65, 49, 48, 70, 54, 48, 88, 80, 52, 66, 60, 47, 55, 65, 42, 68, 65, 54, 59, 82, 78, 58, 64, 69, 62, 58, 81, 89, 81, 39, 48, 81, 69, 63, 78, 64, 90, 70, 51, 60, 34, 86, 59, 79, 52, 71, 112, 47, 36, 50, 100, 55, 53, 80, 59, 49, 82, 48, 81, 80, 58, 82, 59, 95, 56, 79, 78, 91, 78, 64, 46, 89, 54, 64, 102, 55, 80, 59, 80, 90, 49, 63, 142, 53, 64, 75, 92, 72, 60, 85, 107, 74, 65, 57, 41, 47, 49, 72, 74, 86, 65, 100, 60, 65, 79, 66, 75, 73, 50, 44, 36, 79, 59, 82, 66, 83, 60, 64, 80, 52, 52, 48, 43, 57, 73, 63, 57, 90, 89, 72, 85, 93, 64, 52, 42, 70, 67, 47, 81, 75, 62, 58, 76, 61, 59, 75, 69, 53, 72, 67, 39, 66, 86, 75, 80, 50, 51, 56, 120, 66, 54, 50, 60, 74, 74, 71, 82, 54, 74, 71, 51, 78, 51, 118, 78, 48, 80, 78, 65, 54, 49, 51, 65, 74, 50, 70, 91, 67, 68, 59, 76, 44, 74, 66, 55, 68, 60, 93, 58, 76, 67, 41, 54, 79, 49, 57, 75, 62, 77, 63, 57, 57, 72, 78, 52, 76, 67, 84, 90, 71, 53, 81, 63, 73, 61, 56, 74, 51, 55, 73, 45, 67, 44, 61, 55, 68, 97, 58, 89, 64, 78, 82, 68, 85, 58, 51, 53, 72, 39, 75, 70, 58, 73, 61, 53, 76, 59, 37, 84, 33, 104, 53, 63, 75, 56, 53, 69, 91, 62, 36, 77, 81, 107, 68, 52, 51, 53, 58, 73, 67, 50, 46, 68, 60, 80, 60, 89, 62, 59, 62, 56, 49, 83, 82, 59, 60, 81, 63, 71, 67, 89, 50, 54, 96, 91, 62, 40, 68, 56, 91, 69, 43, 53, 65, 59, 32, 90, 74, 61, 80, 83, 64, 54, 70, 61, 50, 62, 53, 94, 60, 50, 100, 43, 79, 44, 52, 66, 100, 62, 61, 70, 71, 72, 72, 46, 52, 55, 66, 64, 82, 60, 64, 63, 49, 81, 74, 108, 94, 79, 67, 114, 46, 90, 58, 57, 76, 63, 47, 62, 73, 90, 93, 40, 61, 112, 64, 65, 65, 68, 56, 101, 54, 78, 73, 86, 64, 68, 49, 73, 71, 61, 71, 79, 79, 53, 61, 45, 57, 58, 64, 62, 64, 52, 84, 58, 70, 52, 99, 94, 76, 90, 56, 51, 67, 55, 68, 73, 77, 76, 81, 56, 66, 56, 55, 91, 75, 76, 78, 77, 62, 54, 49, 44, 76, 73, 81, 63, 49, 68, 78, 74, 51, 64, 55, 74, 85, 86, 77, 81, 55, 56, 62, 45, 81, 52, 90, 55, 96, 56, 51, 53, 96, 64, 54, 67, 47, 59, 65, 62, 114, 43, 57, 76, 56, 52, 64, 59, 92, 43, 81, 76, 125, 55, 73, 45, 68, 63, 90, 73, 63, 49, 47, 70, 66, 97, 78, 58, 68, 76, 76, 84, 68, 40, 83, 89, 47, 45, 74, 63, 38, 70, 69, 47, 89, 80, 47, 142, 72, 77, 84, 93, 64, 73, 46, 97, 72, 48, 85, 64, 63, 93, 79, 78, 92, 75, 61, 66, 62, 65, 61, 61, 64, 61, 65, 48, 53, 77, 51, 52, 73, 63, 62, 71, 54, 60, 45, 53, 74, 65, 59, 59, 66, 60, 58, 72, 62, 62, 59, 65, 83, 77, 64, 60, 70, 70, 81, 60, 71, 60, 62, 60, 65, 80, 58, 84, 54, 80, 66, 63, 54, 73, 55, 83, 73, 55, 99, 52, 54, 71, 66, 72, 84, 53, 56, 65, 53, 87, 74, 65, 49, 64, 93, 53, 89, 57, 74, 66, 74, 63, 45, 65, 65, 54, 60, 105, 62, 52, 56, 65, 63, 54, 75, 66, 55, 77, 83, 63, 55, 53, 51, 54, 64, 66, 61, 54, 51, 91, 96, 62, 53, 82, 65, 83, 74, 57, 69, 69, 103, 78, 92, 60, 61, 103, 80, 70, 58, 79, 77, 76, 61, 74, 76, 75, 56, 73, 46, 67, 66, 71, 53, 66, 104, 58, 75, 50, 49, 98, 76, 53, 89, 57, 61, 50, 38, 64, 65, 43, 92, 81, 79, 62, 84, 71, 68, 66, 51, 92, 44, 83, 49, 67, 101, 63, 64, 70, 66, 69, 68, 55, 77, 65, 103, 109, 74, 69, 65, 56, 57, 61, 48, 62, 62, 75, 68, 73, 74, 64, 69, 59, 77, 47, 83, 51, 51, 78, 71, 68, 63, 54, 58, 62, 99, 113, 72, 55, 80, 54, 50, 73, 73, 73, 51, 59, 56, 80, 71, 73, 72, 74, 59, 50, 65, 55, 65, 58, 85, 73, 126, 79, 43, 105, 68, 55, 48, 37, 53, 65, 55, 46, 62, 60, 56, 78, 75, 81, 72, 60, 84, 60, 52, 56, 64, 58, 71, 39, 63, 59, 76, 43, 61, 56, 56, 63, 56, 79, 54, 84, 59, 81, 100, 51, 53, 85, 70, 51, 73, 54, 53, 56, 67, 83, 54, 52, 66, 78, 57, 63, 48, 68, 73, 72, 72, 57, 81, 61, 62, 55, 62, 41, 65, 58, 56, 58, 72, 39, 77, 46, 63, 63, 62, 75, 64, 62, 68, 91, 51, 55, 86, 57, 63, 61, 69, 60, 73, 162, 100, 56, 62, 63, 56, 63, 88, 53, 80, 102, 91, 71, 64, 58, 63, 64, 59, 45, 77, 51, 64, 84, 66, 74, 80, 108, 71, 54, 71, 43, 58, 68, 64, 55, 82, 71, 67, 77, 70, 55, 102, 119, 59, 62, 72, 53, 51, 84, 51, 60, 73, 56, 75, 73, 72, 55, 54, 67, 62, 53, 72, 59, 80, 51, 57, 53, 77, 59, 65, 101, 61, 63, 63, 61, 78, 60, 72, 51, 77, 68, 63, 77, 68, 54, 53, 63, 52, 69, 63, 58, 57, 79, 111, 56, 70, 142, 50, 73, 61, 55, 39, 76, 91, 62, 82, 53, 40, 59, 52, 56, 71, 47, 67, 51, 60, 58, 70, 51, 76, 59, 62, 66, 48, 61, 48, 55, 67, 69, 66, 55, 55, 75, 68, 66, 59, 80, 92, 73, 72, 65, 57, 62, 55, 56, 60, 81, 44, 80, 67, 35, 64, 49, 69, 52, 40, 65, 67, 54, 56, 48, 42, 81, 59, 61, 63, 70, 84, 71, 65, 63, 65, 65, 63, 91, 63, 61, 73, 80, 75, 68, 58, 68, 65, 98, 69, 53, 64, 57, 49, 73, 68, 58, 69, 73, 60, 64, 55, 75, 75, 49, 60, 57, 67, 64, 76, 83, 55, 52, 55, 68, 66, 49, 68, 60, 77, 57, 66, 66, 79, 76, 68, 60, 73, 59, 75, 74, 73, 46, 62, 72, 62, 47, 49, 57, 65, 61, 84, 42, 64, 60, 68, 61, 57, 59, 72, 39, 58, 59, 66, 50, 82, 73, 56, 52, 45, 76, 80, 74, 68, 74, 76, 78, 76, 91, 70, 69, 60, 56, 57, 58, 65, 49, 61, 45, 57, 98, 62, 62, 70, 53, 89, 63, 63, 56, 64, 67, 55, 52, 78, 42, 68, 46, 61, 50, 74, 104, 59, 71, 84, 86, 92, 75, 59, 68, 55, 61, 64, 59, 72, 48, 77, 74, 62, 74, 60, 72, 63, 47, 60, 75, 74, 59, 53, 79, 67, 75, 67, 58, 71, 65, 67, 78, 82, 60, 69, 87, 52, 101, 71, 91, 70, 66, 73, 65, 70, 51, 58, 79, 76, 51, 62, 73, 45, 80, 70, 68, 65, 58, 69, 62, 65, 49, 42, 71, 58, 90, 65, 59, 88, 45, 59, 63, 73, 42, 86, 53, 57, 58, 75, 66, 65, 80, 74, 96, 51, 71, 53, 52, 70, 90, 45, 54, 61, 92, 83, 102, 49, 56, 102, 73, 98, 54, 69, 91, 62, 57, 83, 75, 60, 101, 68, 66, 49, 62, 75, 69, 53, 81, 80, 54, 55, 98, 59, 63, 39, 39, 59, 42, 112, 59, 47, 53, 71, 54, 80, 72, 84, 59, 46, 78, 72, 78, 51, 72, 66, 74, 75, 53, 80, 62, 50, 57, 89, 62, 70, 67, 64, 64, 61, 72, 66, 91, 69, 65, 81, 65, 52, 58, 59, 59, 72, 95, 82, 78, 102, 58, 55, 64, 66, 76, 78, 56, 49, 71, 59, 63, 50, 62, 74, 53, 63, 62, 56, 80, 73, 62, 77, 81, 56, 76, 66, 57, 50, 62, 73, 71, 57, 52, 52, 76, 72, 92, 75, 67, 65, 43, 60, 66, 68, 63, 68, 59, 52, 60, 68, 49, 69, 65, 47, 70, 79, 62, 48, 70, 69, 72, 77, 78, 55, 72, 71, 74, 66, 61, 70, 65, 72, 53, 47, 63, 66, 61, 77, 83, 65, 71, 70, 62, 71, 54, 50, 62, 49, 69, 64, 57, 66, 63, 64, 65, 80, 67, 74, 51, 56, 54, 70, 72, 65, 66, 66, 112, 57, 88, 61, 58, 79, 102, 81, 97, 46, 66, 73, 77, 63, 62, 72, 66, 112, 117, 62, 66, 66, 68, 61, 112, 58, 61, 74, 63, 61, 65, 63, 73, 60, 81, 63, 54, 73, 61, 56, 66, 69, 67, 87, 86, 50, 50, 68, 62, 82, 56, 69, 41, 80, 68, 59, 63, 67, 72, 63, 74, 75, 44, 73, 85, 59, 61, 53, 64, 55, 50, 66, 63, 41, 67, 84, 49, 79, 55, 56, 60, 76, 59, 59, 61, 85, 59, 75, 90, 60, 69, 70, 54, 183, 66, 56, 58, 52, 82, 61, 79, 46, 62, 70, 54, 76, 66, 79, 67, 74, 89, 46, 43, 50, 42, 56, 51, 54, 59, 57, 54, 64, 72, 123, 52, 68, 63, 68, 58, 64, 96, 57, 72, 70, 66, 57, 55, 79, 64, 69, 80, 69, 53, 49, 61, 67, 70, 77, 62, 70, 63, 68, 52, 64, 46, 71, 63, 75, 67, 68, 55, 60, 61, 124, 57, 58, 59, 87, 60, 80, 99, 60, 63, 68, 61, 68, 55, 58, 67, 70, 53, 52, 60, 49, 50, 61, 70, 70, 54, 54, 74, 48, 61, 61, 85, 65, 70, 57, 56, 62, 51, 60, 61, 65, 66, 50, 49, 74, 69, 74, 82, 65, 79, 55, 67, 72, 66, 103, 69, 67, 54, 83, 72, 58, 87, 60, 76, 59, 80, 56, 64, 59, 58, 72, 78, 61, 66, 47, 67, 63, 61, 55, 56, 175, 79, 62, 70, 57, 75, 100, 60, 63, 59, 86, 45, 61, 61, 88, 57, 75, 67, 69, 65, 64, 65, 89, 64, 72, 63, 62, 75, 61, 61, 53, 70, 71, 61, 54, 65, 67, 54, 58, 57, 56, 54, 75, 55, 70, 62, 52, 55, 56, 66, 73, 57, 58, 56, 97, 83, 60, 58, 72, 66, 58, 68, 61, 75, 53, 102, 70, 95, 57, 57, 54, 61, 64, 66, 62, 80, 67, 65, 67, 41, 53, 59, 82, 61, 57, 61, 61, 72, 80, 66, 63, 62, 65, 124, 79, 71, 79, 39, 66, 75, 58, 57, 54, 65, 61, 55, 63, 58, 77, 51, 54, 81, 95, 63, 57, 55, 79, 66, 75, 55, 67, 66, 54, 56, 66, 60, 66, 56, 63, 51, 62, 62, 90, 51, 56, 66, 55, 55, 71, 64, 64, 61, 65, 59, 66, 59, 64, 68, 53, 59, 67, 62, 71, 74, 65, 62, 61, 78, 65, 55, 64, 54, 52, 64, 75, 65, 70, 72, 56, 70, 67, 75, 51, 66, 58, 66, 57, 61, 96, 47, 56, 64, 64, 58, 58, 60, 56, 78, 71, 71, 49, 60, 62, 89, 70, 60, 58, 57, 55, 58, 61, 76, 55, 54, 83, 67, 92, 67, 72, 59, 61, 56, 65, 71, 64, 72, 66, 60, 62, 55, 76, 68, 64, 70, 63, 64, 75, 66, 70, 94, 66, 63, 66, 59, 67, 56, 67, 64, 69, 57, 88, 64, 64, 45, 66, 60, 63, 62, 61, 67, 69, 91, 82, 67, 63, 66, 70, 69, 57, 60, 58, 60, 55, 60, 62, 48, 56, 60, 65, 64, 58, 59, 55, 54, 62, 72, 61, 64, 63, 61, 57, 67, 59, 61, 57, 69, 60, 64, 56, 73, 54, 68, 91, 55, 76, 65, 58, 72, 73, 57, 90, 62, 57, 87, 62, 85, 58, 58, 73, 81, 58, 60, 63, 53, 50, 71, 93, 72, 83, 68, 59, 64, 59, 78, 88, 61, 65, 60, 56, 59, 73, 65, 80, 64, 81, 66, 59, 58, 49, 62, 61, 73, 64, 52, 52, 55, 57, 70, 59, 54, 56, 69, 80, 64, 85, 56, 52, 58, 115, 61, 71, 99, 69, 63, 84, 58, 62, 68, 59, 56, 73, 66, 76, 61, 73, 94, 71, 57, 67, 69, 50, 73, 57, 60, 47, 59, 75, 52, 60, 60, 88, 153, 66, 82, 45, 82, 69, 66, 80, 83, 59, 52, 65, 71, 88, 64, 90, 55, 67, 68, 65, 63, 76, 81, 85, 58, 111, 56, 98, 65, 62, 69, 67, 66, 80, 73, 54, 80, 70, 79, 72, 67, 68, 74, 73, 63, 62, 63, 51, 79, 74, 59, 74, 55, 66, 63, 70, 64, 62, 51, 65, 73, 77, 71, 71, 68, 61, 57, 62, 63, 73, 73, 59, 72, 55, 61, 64, 62, 61, 72, 118, 58, 51, 50, 66, 55, 53, 67, 79, 59, 61, 63, 37, 54, 68, 61, 60, 74, 67, 68, 84, 83, 82, 69, 70, 80, 47, 60, 65, 55, 78, 57, 61, 70, 58, 61, 91, 75, 77, 60, 76, 61, 55, 57, 63, 68, 52, 83, 66, 51, 64, 58, 78, 60, 114, 58, 56, 61, 55, 68, 52, 55, 52, 52, 64, 64, 71, 75, 58, 65, 57, 63, 59, 65, 67, 49, 68, 66, 87, 62, 58, 64, 69, 58, 71, 60, 50, 80, 76, 67, 72, 54, 62, 53, 78, 65, 64, 57, 49, 59, 65, 53, 69, 63, 51, 68, 53, 62, 76, 65, 61, 60, 70, 67, 67, 68, 104, 51, 71, 52, 67, 50, 64, 77, 71, 57, 69, 58, 62, 63, 52, 93, 68, 62, 58, 65, 70, 70, 57, 64, 81, 58, 58, 57, 64, 59, 68, 51, 80, 70, 62, 53, 59, 54, 75, 56, 100, 64, 58, 56, 65, 57, 83, 55, 67, 63, 66, 81, 60, 60, 70, 84, 71, 60, 57, 62, 56, 63, 60, 67, 71, 52, 71, 71, 121, 71, 54, 67, 90, 65, 58, 60, 59, 61, 78, 59, 57, 58, 63, 72, 73, 96, 57, 71, 61, 56, 80, 61, 54, 65, 58, 59, 64, 67, 54, 63, 60, 65, 52, 66, 61, 64, 87, 60, 79, 70, 65, 68, 81, 80, 68, 58, 67, 67, 76, 62, 78, 57, 72, 50, 99, 63, 76, 63, 67, 65, 54, 88, 82, 63, 61, 55, 55, 58, 72, 64, 75, 74, 48, 57, 56, 55, 58, 55, 54, 55, 49, 63, 47, 54, 83, 70, 58, 58, 66, 69, 59, 74, 61, 62, 67, 59, 70, 60, 68, 65, 85, 61, 60, 59, 51, 64, 53, 54, 67, 65, 73, 52, 59, 68, 70, 80, 55, 61, 71, 65, 61, 65, 103, 54, 63, 61, 65, 54, 63, 73, 61, 67, 67, 61, 60, 59, 60, 66, 53, 60, 65, 61, 96, 64, 72, 66, 61, 63, 50, 67, 53, 63, 69, 65, 74, 72, 104, 55, 59, 73, 71, 71, 95, 70, 65, 96, 61, 59, 49, 57, 55, 70, 65, 58, 67, 63, 59, 77, 56, 61, 52, 57, 58, 61, 72, 89, 51, 54, 61, 82, 56, 73, 88, 73, 68, 54, 62, 61, 67, 60, 72, 75, 61, 69, 64, 65, 66, 82, 59, 70, 61, 120, 55, 61, 57, 56, 62, 70, 54, 59, 60, 49, 82, 65, 66, 59, 60, 68, 71, 64, 54, 127, 58, 72, 62, 55, 59, 70, 60, 115, 64, 58, 56, 60, 52, 76, 48, 59, 59, 84, 96, 53, 56, 61, 53, 63, 59, 61, 53, 58, 57, 101, 53, 58, 72, 67, 67, 59, 62, 53, 71, 73, 58, 64, 77, 66, 60, 59, 67, 71, 52, 62, 56, 68, 76, 50, 80, 77, 105, 61, 68, 66, 60, 54, 69, 69, 83, 71, 56, 59, 62, 71, 58, 62, 66, 59, 66, 62, 64, 75, 59, 56, 77, 48, 51, 60, 60, 58, 68, 44, 66, 90, 51, 55, 79, 94, 59, 62, 111, 65, 57, 64, 67, 66, 59, 63, 73, 80, 68, 83, 57, 72, 64, 58, 67, 55, 65, 66, 81, 86, 59, 52, 64, 78, 70, 62, 63, 68, 55, 72, 55, 74, 68, 72, 57, 62, 74, 50, 60, 56, 57, 68, 90, 75, 69, 74, 72, 58, 61, 80, 65, 61, 60, 57, 61, 65, 64, 66, 65, 72, 69, 59, 46, 69, 76, 71, 71, 84, 62, 60, 77, 64, 64, 60, 54, 58, 45, 56, 85, 33, 73, 68, 71, 68, 61, 58, 58, 65, 53, 61, 69, 66, 62, 69, 62, 50, 45, 66, 77, 73, 51, 71, 53, 80, 48, 72, 63, 65, 63, 67, 57, 68, 45, 59, 59, 61, 57, 76, 74, 63, 60, 60, 60, 110, 69, 67, 59, 66, 46, 71, 65, 81, 74, 62, 56, 55, 74, 62, 60, 69, 59, 63, 74, 58, 73, 72, 51, 50, 92, 53, 65, 65, 54, 63, 66, 76, 40, 61, 56, 81, 60, 62, 80, 63, 63, 62, 60, 70, 62, 63, 78, 56, 70, 59, 81, 64, 56, 85, 55, 96, 62, 67, 61, 73, 60, 54, 75, 75, 98, 49, 57, 55, 61, 64, 63, 64, 50, 60, 60, 59, 70, 74, 68, 70, 55, 88, 64, 64, 62, 72, 105, 62, 58, 85, 49, 60, 76, 67, 68, 53, 71, 62, 77, 56, 55, 88, 65, 71, 61, 61, 65, 55, 74, 85, 56, 57, 61, 62, 77, 69, 64, 57, 73, 55, 62, 57, 78, 74, 68, 70, 64, 56, 55, 70, 90, 77, 61, 61, 77, 69, 64, 65, 62, 59, 60, 64, 61, 62, 64, 57, 76, 58, 73, 62, 109, 76, 69, 69, 68, 56, 66, 49, 53, 61, 59, 77, 68, 58, 93, 71, 69, 62, 66, 57, 57, 62, 51, 60, 54, 59, 61, 58, 51, 68, 61, 57, 77, 56, 61, 56, 71, 57, 61, 125, 71, 62, 58, 56, 52, 55, 79, 60, 61, 68, 52, 52, 80, 76, 71, 69, 55, 63, 70, 63, 67, 60, 64, 72, 68, 58, 58, 69, 58, 65, 69, 66, 88, 60, 70, 62, 64, 66, 75, 54, 48, 51, 63, 55, 96, 73, 58, 55, 80, 72, 59, 60, 65, 67, 82, 71, 58, 64, 67, 63, 65, 49, 74, 61, 60, 73, 73, 62, 63, 51, 63, 66, 74, 51, 49, 80, 84, 60, 73, 72, 53, 71, 69, 61, 70, 77, 42, 66, 66, 74, 75, 65, 62, 68, 52, 58, 72, 64, 62, 91, 64, 63, 63, 60, 64, 65, 69, 68, 68, 76, 64, 67, 68, 61, 61, 70, 79, 62, 69, 54, 53, 61, 58, 56, 71, 65, 72, 63, 56, 109, 61, 57, 59, 64, 65, 70, 61, 130, 70, 52, 60, 69, 61, 69, 62, 72, 60, 63, 52, 58, 79, 72, 74, 73, 64, 49, 65, 60, 71, 47, 71, 71, 68, 50, 69, 69, 63, 63, 56, 53, 79, 60, 59, 57, 63, 65, 59, 57, 60, 72, 67, 72, 61, 55, 83, 72, 66, 64, 63, 60, 57, 68, 60, 66, 73, 68, 60, 55, 78, 65, 60, 72, 58, 77, 100, 69, 71, 65, 58, 67, 54, 60, 49, 75, 64, 58, 71, 60, 72, 56, 79, 76, 76, 52, 93, 54, 57, 64, 96, 71, 74, 61, 62, 54, 73, 66, 67, 88, 63, 67, 62, 67, 64, 62, 56, 63, 53, 58, 68, 87, 70, 69, 55, 54, 66, 119, 48, 54, 68, 87, 49, 53, 71, 68, 74, 51, 61, 56, 65, 56, 67, 76, 65, 60, 66, 60, 75, 85, 74, 58, 64, 51, 59, 61, 62, 69, 68, 64, 58, 59, 70, 58, 92, 47, 89, 62, 63, 56, 51, 62, 60, 113, 61, 62, 60, 61, 58, 54, 58, 77, 56, 64, 57, 47, 64, 71, 110, 64, 63, 70, 63, 53, 63, 53, 100, 111, 64, 74, 66, 68, 55, 59, 68, 65, 61, 61, 79, 80, 57, 68, 97, 68, 52, 60, 59, 60, 61, 66, 71, 59, 62, 63, 65, 63, 87, 55, 93, 68, 52, 69, 67, 55, 74, 59, 60, 61, 66, 58, 65, 61, 70, 63, 60, 68, 58, 70, 59, 63, 52, 63, 70, 66, 72, 66, 61, 50, 66, 62, 57, 59, 58, 60, 56, 69, 62, 80, 62, 60, 51, 71, 63, 39, 80, 58, 64, 58, 61, 84, 56, 68, 70, 69, 69, 63, 126, 66, 61, 67, 59, 50, 67, 64, 87, 63, 61, 67, 52, 68, 76, 65, 62, 54, 77, 62, 70, 60, 69, 55, 74, 70, 62, 71, 69, 72, 60, 118, 56, 59, 57, 59, 64, 53, 64, 61, 66, 69, 69, 55, 50, 67, 53, 68, 66, 64, 61, 50, 56, 63, 66, 67, 75, 82, 65, 70, 84, 64, 53, 90, 68, 63, 81, 70, 68, 63, 66, 82, 57, 68, 49, 65, 67, 75, 56, 54, 66, 54, 57, 70, 68, 63, 80, 60, 71, 71, 78, 58, 104, 143, 72, 56, 64, 59, 57, 70, 82, 61, 62, 67, 74, 66, 57, 127, 67, 65, 69, 49, 65, 54, 72, 60, 72, 75, 58, 68, 65, 51, 68, 59, 70, 63, 61, 57, 67, 59, 66, 65, 55, 111, 81, 58, 60, 69, 106, 65, 46, 73, 62, 63, 73, 53, 70, 54, 65, 55, 75, 69, 50, 88, 61, 63, 64, 63, 69, 58, 85, 68, 67, 49, 71, 67, 60, 58, 64, 57, 75, 60, 93, 77, 65, 52, 59, 60, 101, 57, 58, 74, 58, 72, 59, 58, 61, 61, 117, 91, 56, 65, 63, 82, 67, 74, 61, 70, 55, 74, 123, 96, 62, 68, 99, 62, 69, 77, 55, 61, 84, 71, 81, 60, 49, 64, 52, 53, 58, 72, 85, 60, 62, 48, 70, 65, 58, 56, 44, 64, 65, 68, 53, 86, 67, 64, 70, 59, 104, 55, 77, 84, 61, 58, 44, 71, 70, 69, 65, 49, 44, 68, 57, 82, 63, 57, 70, 49, 75, 60, 91, 100, 58, 72, 69, 61, 72, 74, 40, 79, 53, 82, 88, 58, 35, 68, 76, 86, 65, 69, 48, 84, 56, 98, 72, 59, 90, 58, 60, 59, 68, 146, 62, 54, 74, 64, 72, 58, 49, 93, 67, 50, 79, 69, 104, 74, 64, 66, 51, 67, 67, 70, 72, 73, 63, 60, 74, 57, 62, 67, 62, 92, 91, 37, 59, 54, 76, 76, 51, 62, 86, 63, 73, 83, 81, 70, 69, 79, 79, 53, 69, 86, 61, 80, 63, 52, 44, 74, 75, 53, 80, 76, 55, 67, 78, 51, 87, 67, 52, 62, 48, 70, 53, 73, 70, 52, 61, 44, 67, 71, 57, 80, 82, 74, 93, 50, 64, 90, 50, 46, 101, 109, 65, 72, 69, 94, 65, 50, 67, 63, 55, 57, 60, 70, 58, 86, 66, 77, 83, 67, 49, 72, 43, 70, 63, 58, 53, 74, 42, 78, 69, 53, 75, 78, 55, 57, 81, 68, 51, 73, 68, 66, 38, 52, 83, 42, 79, 61, 70, 125, 80, 60, 72, 41, 53, 66, 61, 76, 85, 68, 67, 77, 83, 53, 78, 76, 74, 57, 78, 58, 45, 86, 74, 59, 70, 65, 68, 94, 85, 58, 63, 58, 75, 67, 61, 61, 60, 64, 52, 80, 53, 52, 32, 48, 57, 49, 63, 65, 72, 48, 68, 54, 61, 58, 82, 60, 81, 57, 58, 53, 70, 70, 65, 65, 54, 58, 74, 60, 47, 60, 66, 39, 58, 81, 82, 58, 61, 52, 62, 56, 104, 72, 73, 71, 45, 95, 62, 81, 76, 63, 70, 68, 61, 61, 77, 40, 51, 55, 65, 52, 66, 64, 57, 66, 62, 61, 59, 61, 54, 69, 51, 67, 57, 61, 55, 48, 55, 79, 75, 64, 56, 44, 58, 61, 79, 62, 66, 86, 49, 69, 63, 50, 76, 65, 67, 71, 51, 106, 55, 70, 41, 68, 61, 61, 58, 61, 67, 59, 72, 63, 82, 60, 52, 79, 82, 80, 71, 46, 73, 89, 70, 55, 79, 62, 69, 64, 62, 60, 63, 57, 55, 76, 66, 85, 58, 50, 65, 69, 97, 82, 90, 67, 65, 71, 40, 70, 61, 66, 65, 64, 96, 60, 54, 65, 72, 67, 76, 57, 92, 50, 68, 46, 58, 64, 58, 67, 57, 70, 66, 77, 59, 89, 58, 76, 73, 75, 64, 63, 65, 56, 83, 109, 64, 65, 56, 71, 69, 58, 45, 53, 66, 89, 60, 52, 68, 69, 77, 46, 76, 61, 54, 55, 75, 74, 68, 66, 80, 59, 58, 64, 83, 102, 74, 87, 80, 67, 57, 69, 49, 64, 59, 65, 70, 52, 54, 66, 52, 63, 52, 59, 88, 58, 89, 53, 65, 83, 56, 70, 85, 95, 60, 53, 101, 70, 58, 56, 50, 47, 74, 74, 71, 74, 74, 86, 84, 70, 54, 83, 87, 75, 68, 84, 55, 120, 68, 43, 45, 90, 55, 58, 66, 89, 90, 104, 56, 46, 63, 53, 43, 62, 84, 68, 66, 63, 54, 48, 54, 83, 67, 59, 56, 44, 81, 60, 57, 64, 83, 38, 55, 59, 50, 55, 58, 79, 56, 64, 96, 45, 59, 109, 59, 72, 71, 54, 81, 69, 66, 53, 55, 90, 51, 52, 58, 55, 57, 47, 79, 51, 46, 75, 74, 74, 60, 76, 53, 88, 58, 76, 83, 75, 73, 35, 47, 49, 57, 67, 49, 59, 81, 49, 66, 63, 87, 71, 77, 77, 74, 75, 62, 70, 65, 71, 51, 80, 76, 72, 64, 94, 75, 53, 59, 64, 70, 57, 71, 68, 64, 53, 70, 62, 66, 57, 52, 55, 72, 51, 82, 86, 49, 53, 103, 61, 72, 42, 50, 88, 60, 66, 73, 61, 62, 47, 80, 46, 72, 68, 68, 67, 65, 61, 71, 57, 47, 72, 59, 95, 44, 80, 58, 121, 71, 58, 37, 69, 55, 57, 59, 112, 58, 68, 70, 72, 75, 64, 71, 48, 52, 81, 76, 42, 63, 61, 74, 59, 54, 83, 78, 74, 52, 85, 83, 48, 99, 74, 75, 60, 96, 83, 42, 63, 41, 54, 69, 68, 61, 64, 52, 42, 65, 80, 65, 54, 88, 60, 69, 91, 54, 67, 57, 61, 77, 66, 81, 80, 54, 58, 65, 59, 49, 65, 52, 51, 83, 69, 78, 52, 75, 44, 58, 62, 57, 77, 61, 83, 99, 54, 68, 53, 51, 57, 67, 55, 66, 71, 60, 65, 79, 58, 63, 70, 85, 70, 89, 74, 65, 64, 86, 68, 52, 81, 86, 41, 78, 69, 49, 57, 50, 98, 86, 49, 53, 124, 83, 64, 106, 78, 76, 71, 47, 59, 58, 46, 58, 63, 52, 38, 56, 56, 54, 53, 51, 78, 66, 84, 74, 73, 78, 65, 57, 62, 50, 75, 62, 55, 72, 53, 46, 57, 57, 60, 73, 70, 102, 70, 61, 60, 66, 73, 71, 64, 65, 81, 71, 69, 50, 69, 66, 45, 54, 66, 79, 75, 44, 59, 74, 78, 43, 72, 60, 70, 58, 51, 61, 71, 94, 60, 64, 49, 62, 77, 68, 88, 63, 87, 94, 66, 63, 58, 63, 58, 58, 61, 44, 62, 59, 64, 66, 68, 49, 75, 59, 45, 45, 64, 54, 64, 46, 74, 78, 60, 51, 71, 90, 88, 63, 51, 77, 58, 67, 69, 63, 42, 63, 86, 67, 61, 69, 65, 56, 104, 73, 67, 51, 63, 67, 77, 94, 61, 63, 62, 70, 47, 42, 94, 66, 48, 61, 51, 41, 38, 60, 80, 63, 54, 54, 82, 61, 85, 38, 84, 49, 55, 67, 59, 59, 65, 60, 72, 61, 97, 46, 79, 62, 90, 58, 59, 77, 47, 60, 72, 112, 56, 90, 73, 41, 64, 47, 56, 65, 63, 65, 43, 50, 67, 84, 59, 73, 41, 73, 51, 62, 70, 66, 52, 72, 53, 55, 81, 46, 57, 59, 55, 65, 111, 64, 48, 78, 62, 59, 56, 84, 59, 60, 62, 45, 77, 68, 52, 73, 57, 69, 90, 59, 58, 38, 56, 64, 92, 69, 75, 90, 85, 86, 73, 66, 83, 67, 61, 39, 62, 82, 57, 56, 76, 72, 63, 67, 49, 57, 72, 58, 80, 96, 140, 59, 72, 82, 67, 101, 57, 69, 61, 57, 83, 61, 68, 80, 58, 86, 51, 57, 70, 86, 52, 48, 66, 71, 65, 71, 56, 45, 64, 57, 66, 60, 53, 68, 57, 54, 54, 53, 72, 52, 52, 74, 86, 46, 36, 57, 56, 55, 59, 77, 84, 54, 66, 74, 58, 60, 53, 51, 88, 69, 68, 52, 78, 71, 60, 66, 67, 42, 118, 82, 45, 67, 51, 72, 51, 125, 55, 104, 123, 71, 68, 65, 93, 69, 63, 78, 50, 78, 68, 51, 71, 83, 56, 64, 74, 60, 77, 75, 68, 43, 50, 58, 50, 66, 59, 57, 65, 52, 74, 122, 45, 61, 45, 42, 61, 82, 55, 81, 68, 63, 51, 52, 64, 59, 82, 63, 67, 87, 57, 48, 57, 70, 77, 80, 62, 84, 70, 81, 70, 71, 66, 51, 58, 57, 90, 59, 76, 44, 65, 70, 69, 53, 73, 67, 58, 48, 76, 129, 65, 49, 62, 54, 52, 66, 66, 60, 63, 60, 56, 84, 110, 52, 86, 48, 62, 88, 60, 67, 64, 63, 64, 76, 58, 51, 97, 69, 62, 57, 72, 71, 63, 78, 51, 60, 83, 66, 49, 61, 41, 80, 88, 63, 49, 36, 58, 41, 65, 76, 64, 70, 81, 60, 61, 63, 69, 59, 49, 96, 52, 60, 84, 54, 88, 63, 46, 62, 60, 59, 45, 71, 83, 57, 61, 57, 94, 66, 71, 74, 127, 51, 51, 57, 97, 56, 78, 54, 63, 46, 52, 48, 69, 76, 72, 60, 51, 77, 53, 50, 67, 56, 67, 77, 58, 77, 56, 63, 66, 80, 40, 58, 59, 55, 74, 65, 93, 75, 67, 66, 60, 57, 57, 142, 70, 58, 60, 110, 67, 46, 51, 49, 63, 53, 101, 109, 83, 96, 56, 60, 71, 86, 53, 55, 85, 50, 59, 56, 84, 77, 69, 58, 64, 77, 72, 61, 48, 60, 61, 59, 62, 61, 63, 59, 61, 57, 64, 80, 67, 69, 63, 66, 63, 64, 59, 61, 64, 84, 128, 44, 57, 68, 58, 74, 75, 65, 72, 65, 83, 62, 67, 60, 43, 74, 51, 68, 43, 73, 63, 70, 60, 65, 76, 66, 64, 64, 71, 103, 68, 73, 65, 50, 56, 60, 58, 71, 56, 77, 57, 52, 69, 55, 69, 70, 38, 67, 69, 59, 68, 71, 49, 71, 69, 75, 57, 71, 52, 44, 59, 72, 69, 59, 58, 63, 76, 61, 54, 58, 73, 61, 67, 68, 74, 64, 65, 60, 66, 63, 66, 62, 87, 52, 64, 76, 45, 62, 55, 68, 95, 69, 54, 49, 96, 58, 71, 73, 59, 75, 97, 50, 85, 46, 75, 70, 80, 83, 57, 63, 54, 53, 54, 80, 93, 78, 105, 59, 60, 70, 56, 61, 60, 69, 66, 76, 55, 43, 60, 70, 69, 64, 55, 79, 59, 69, 55, 93, 63, 57, 62, 102, 53, 74, 57, 81, 67, 82, 36, 57, 53, 55, 61, 56, 66, 65, 55, 50, 63, 72, 58, 59, 66, 58, 67, 65, 52, 64, 82, 74, 70, 61, 47, 88, 55, 61, 86, 63, 60, 55, 93, 69, 50, 70, 72, 73, 55, 71, 48, 50, 47, 87, 74, 60, 63, 48, 69, 53, 76, 43, 62, 81, 54, 62, 55, 52, 111, 50, 69, 50, 83, 57, 45, 65, 68, 59, 58, 75, 62, 57, 55, 73, 111, 109, 63, 64, 81, 45, 72, 69, 59, 69, 59, 61, 46, 53, 78, 61, 60, 58, 67, 53, 120, 61, 57, 59, 61, 64, 51, 67, 63, 104, 41, 63, 69, 54, 57, 62, 52, 66, 75, 40, 52, 78, 70, 62, 50, 49, 67, 52, 63, 42, 55, 60, 67, 57, 76, 59, 72, 71, 53, 45, 48, 57, 76, 60, 54, 58, 44, 57, 47, 57, 61, 68, 56, 57, 67, 109, 53, 58, 63, 53, 64, 77, 59, 77, 69, 51, 34, 109, 50, 60, 55, 74, 64, 67, 54, 49, 54, 58, 69, 62, 85, 64, 101, 47, 63, 56, 43, 49, 61, 56, 62, 56, 72, 72, 92, 64, 70, 65, 73, 67, 55, 65, 43, 56, 63, 85, 79, 83, 53, 60, 70, 59, 66, 65, 64, 59, 79, 70, 65, 70, 72, 74, 77, 125, 110, 80, 66, 56, 65, 70, 52, 64, 64, 82, 55, 58, 66, 57, 72, 64, 53, 80, 54, 77, 76, 72, 70, 56, 95, 54, 86, 38, 49, 53, 69, 59, 60, 56, 69, 57, 64, 45, 66, 58, 51, 65, 73, 57, 80, 62, 65, 72, 83, 64, 70, 56, 45, 76, 50, 57, 60, 55, 56, 74, 76, 106, 66, 73, 50, 52, 62, 66, 82, 88, 62, 53, 72, 50, 128, 47, 51, 87, 73, 56, 44, 68, 94, 84, 55, 63, 80, 59, 46, 74, 82, 76, 87, 52, 74, 58, 62, 59, 53, 46, 46, 65, 63, 55, 65, 62, 53, 66, 70, 69, 44, 88, 53, 54, 58, 56, 69, 66, 77, 78, 56, 50, 61, 58, 77, 48, 65, 56, 78, 74, 73, 56, 82, 113, 53, 53, 49, 61, 79, 72, 80, 53, 50, 90, 66, 83, 75, 73, 52, 76, 69, 69, 57, 65, 63, 67, 78, 57, 82, 56, 52, 75, 93, 76, 62, 61, 56, 55, 48, 68, 98, 57, 54, 44, 68, 62, 79, 84, 59, 79, 74, 67, 73, 56, 62, 63, 67, 76, 58, 57, 64, 51, 65, 67, 77, 67, 59, 53, 57, 60, 73, 50, 49, 62, 42, 59, 68, 68, 45, 59, 64, 48, 50, 57, 97, 58, 74, 47, 58, 51, 60, 47, 57, 93, 57, 56, 84, 76, 81, 54, 64, 72, 66, 87, 60, 67, 76, 64, 53, 100, 66, 65, 45, 78, 77, 69, 118, 56, 79, 68, 55, 86, 64, 79, 60, 117, 70, 68, 51, 94, 57, 69, 86, 56, 75, 51, 90, 53, 85, 63, 59, 74, 59, 54, 50, 70, 70, 57, 61, 58, 61, 61, 66, 68, 59, 69, 62, 55, 93, 99, 93, 37, 61, 73, 51, 58, 79, 41, 60, 61, 48, 57, 86, 88, 57, 61, 56, 60, 63, 93, 93, 72, 67, 76, 52, 68, 72, 58, 62, 55, 89, 58, 66, 93, 81, 94, 74, 62, 58, 70, 46, 92, 72, 70, 73, 62, 52, 53, 60, 63, 76, 56, 82, 84, 46, 63, 58, 62, 62, 54, 83, 93, 55, 72, 56, 78, 87, 68, 81, 55, 69, 69, 70, 57, 92, 57, 95, 75, 67, 69, 73, 51, 54, 94, 55, 43, 68, 87, 68, 74, 60, 63, 72, 56, 58, 72, 75, 56, 75, 81, 65, 92, 56, 75, 57, 64, 56, 56, 54, 96, 67, 81, 53, 59, 65, 58, 66, 67, 101, 52, 54, 74, 54, 77, 84, 81, 75, 66, 57, 106, 53, 53, 68, 63, 69, 61, 62, 87, 71, 47, 57, 71, 42, 75, 56, 113, 76, 57, 60, 63, 49, 67, 55, 47, 67, 46, 49, 67, 60, 73, 94, 58, 42, 85, 77, 52, 65, 48, 49, 86, 52, 112, 113, 57, 79, 79, 67, 61, 83, 66, 36, 55, 57, 84, 72, 54, 87, 54, 58, 58, 76, 73, 58, 66, 56, 69, 53, 88, 52, 51, 105, 68, 71, 60, 51, 46, 71, 72, 49, 59, 57, 67, 73, 70, 51, 63, 69, 62, 62, 56, 83, 64, 53, 87, 68, 41, 62, 75, 35, 78, 51, 57, 65, 56, 60, 44, 36, 71, 44, 41, 107, 63, 57, 83, 77, 73, 78, 49, 49, 72, 52, 95, 75, 44, 95, 70, 54, 67, 61, 73, 44, 59, 136, 74, 55, 63, 88, 48, 59, 59, 82, 55, 85, 85, 63, 73, 72, 51, 64, 79, 55, 72, 44, 38, 55, 58, 75, 98, 63, 45, 121, 51, 56, 94, 107, 73, 71, 95, 81, 54, 66, 98, 72, 115, 75, 65, 71, 55, 35, 96, 54, 52, 59, 61, 60, 61, 57, 62, 49, 75, 64, 36, 55, 85, 59, 77, 59, 76, 59, 63, 61, 70, 70, 61, 50, 43, 56, 63, 78, 60, 56, 53, 55, 50, 61, 83, 63, 98, 62, 74, 73, 78, 85, 27, 63, 59, 53, 45, 40, 31, 41, 74, 75, 73, 54, 49, 72, 74, 49, 59, 90, 82, 46, 55, 52, 55, 62, 63, 60, 78, 66, 42, 53, 51, 80, 70, 79, 113, 92, 67, 96, 103, 115, 78, 70, 68, 71, 65, 69, 44, 78, 53, 52, 101, 64, 75, 65, 73, 90, 93, 90, 95, 63, 59, 42, 58, 84, 73, 49, 79, 67, 59, 46, 95, 70, 64, 53, 98, 36, 81, 81, 73, 68, 57, 53, 56, 55, 55, 61, 60, 56, 58, 58, 66, 65, 75, 52, 64, 69, 56, 58, 66, 68, 37, 62, 63, 62, 58, 73, 71, 79, 55, 70, 81, 46, 78, 75, 76, 54, 51, 62, 95, 58, 59, 82, 69, 72, 55, 116, 64, 76, 37, 58, 74, 74, 68, 71, 50, 56, 88, 75, 61, 47, 73, 93, 84, 79, 75, 63, 61, 51, 65, 95, 52, 65, 53, 30, 60, 66, 81, 58, 47, 59, 86, 59, 74, 68, 62, 75, 99, 79, 76, 55, 56, 72, 54, 75, 61, 60, 53, 70, 61, 61, 61, 75, 64, 85, 88, 58, 65, 68, 49, 59, 43, 69, 57, 44, 51, 47, 88, 57, 60, 56, 111, 63, 58, 61, 42, 58, 54, 83, 66, 56, 57, 63, 46, 53, 49, 60, 46, 70, 99, 75, 62, 56, 45, 50, 58, 66, 54, 56, 46, 52, 81, 52, 59, 51, 77, 64, 82, 55, 66, 63, 85, 71, 59, 110, 56, 60, 63, 49, 62, 81, 49, 57, 75, 60, 83, 87, 86, 69, 41, 79, 67, 61, 48, 35, 57, 56, 55, 49, 68, 86, 45, 54, 109, 58, 61, 90, 72, 67, 69, 69, 72, 61, 92, 96, 60, 68, 65, 72, 56, 76, 91, 52, 68, 48, 65, 72, 89, 77, 81, 57, 49, 49, 63, 48, 46, 87, 67, 56, 60, 47, 62, 43, 83, 66, 63, 69, 60, 50, 75, 53, 85, 55, 55, 83, 61, 54, 43, 51, 46, 50, 79, 53, 89, 44, 92, 90, 78, 59, 64, 73, 66, 90, 55, 69, 66, 77, 74, 58, 59, 86, 50, 70, 68, 52, 52, 68, 43, 60, 60, 72, 89, 58, 43, 69, 67, 60, 83, 74, 64, 47, 66, 58, 74, 53, 80, 74, 77, 63, 66, 53, 60, 65, 68, 101, 52, 65, 57, 63, 63, 74, 60, 72, 71, 69, 58, 57, 77, 47, 59, 75, 82, 49, 55, 52, 80, 80, 71, 107, 67, 64, 115, 65, 83, 50, 70, 56, 59, 50, 63, 71, 71, 61, 88, 52, 79, 48, 66, 48, 57, 88, 114, 51, 83, 77, 75, 60, 62, 54, 70, 87, 55, 66, 56, 62, 67, 50, 54, 54, 90, 36, 75, 45, 64, 68, 74, 44, 56, 84, 80, 62, 66, 104, 77, 58, 62, 61, 79, 79, 60, 62, 51, 79, 76, 85, 63, 54, 68, 73, 135, 90, 60, 67, 84, 69, 52, 36, 90, 71, 43, 41, 61, 51, 83, 69, 66, 71, 74, 56, 75, 68, 65, 71, 57, 66, 47, 45, 81, 65, 71, 71, 59, 73, 57, 72, 69, 70, 71, 44, 39, 66, 50, 42, 69, 67, 61, 68, 66, 126, 68, 68, 74, 66, 58, 53, 53, 77, 57, 94, 59, 48, 64, 63, 98, 74, 62, 101, 66, 80, 59, 56, 46, 77, 50, 47, 65, 84, 58, 61, 58, 60, 62, 64, 55, 61, 59, 71, 77, 58, 81, 87, 64, 52, 49, 75, 65, 53, 43, 61, 56, 70, 75, 54, 66, 67, 57, 63, 38, 89, 62, 45, 86, 73, 95, 68, 57, 141, 89, 63, 69, 54, 57, 69, 53, 52, 69, 57, 74, 59, 60, 72, 51, 62, 66, 66, 51, 51, 48, 61, 56, 67, 64, 82, 82, 92, 70, 58, 54, 49, 64, 61, 54, 65, 62, 88, 42, 69, 61, 59, 57, 41, 58, 57, 80, 65, 49, 100, 58, 56, 62, 72, 80, 73, 75, 71, 62, 70, 77, 62, 55, 65, 62, 66, 78, 61, 56, 72, 82, 92, 66, 60, 54, 56, 54, 60, 68, 74, 101, 63, 86, 64, 61, 78, 58, 69, 61, 72, 68, 75, 56, 99, 65, 65, 71, 59, 74, 66, 61, 63, 76, 55, 70, 56, 63, 63, 58, 75, 66, 75, 84, 59, 55, 35, 61, 67, 71, 61, 58, 59, 57, 59, 66, 50, 76, 83, 58, 83, 61, 63, 68, 62, 63, 84, 72, 61, 62, 62, 77, 74, 73, 71, 64, 68, 87, 63, 70, 69, 63, 78, 34, 78, 57, 57, 60, 80, 47, 59, 72, 65, 73, 52, 84, 68, 71, 69, 72, 71, 94, 61, 61, 59, 59, 73, 79, 69, 71, 57, 75, 67, 62, 61, 60, 68, 64, 64, 56, 61, 66, 65, 59, 45, 71, 60, 73, 69, 62, 60, 75, 68, 46, 58, 92, 63, 71, 68, 50, 67, 56, 52, 78, 66, 70, 44, 62, 60, 44, 62, 66, 62, 48, 58, 68, 67, 50, 60, 72, 80, 60, 58, 86, 63, 58, 41, 86, 59, 57, 55, 56, 57, 61, 83, 71, 63, 44, 62, 47, 64, 75, 71, 73, 68, 58, 71, 39, 54, 75, 56, 76, 74, 49, 67, 72, 62, 51, 64, 77, 117, 79, 72, 49, 60, 78, 70, 73, 55, 49, 61, 56, 70, 59, 60, 69, 68, 64, 82, 70, 53, 51, 57, 95, 133, 79, 54, 75, 64, 78, 74, 92, 64, 76, 72, 57, 71, 61, 47, 73, 83, 65, 56, 68, 58, 54, 55, 69, 75, 68, 56, 64, 64, 73, 51, 122, 76, 80, 54, 71, 59, 66, 65, 71, 40, 68, 44, 61, 77, 69, 65, 90, 73, 71, 47, 41, 65, 61, 39, 65, 73, 64, 71, 77, 114, 120, 137, 59, 70, 64, 64, 65, 58, 64, 74, 64, 62, 49, 74, 63, 47, 67, 64, 64, 60, 66, 63, 72, 76, 77, 52, 66, 61, 70, 52, 89, 83, 106, 68, 63, 80, 66, 92, 52, 63, 60, 66, 69, 46, 76, 61, 50, 65, 54, 58, 81, 62, 74, 52, 48, 62, 41, 55, 70, 57, 63, 63, 64, 58, 74, 62, 59, 66, 78, 82, 54, 52, 63, 63, 73, 78, 59, 92, 59, 79, 55, 64, 66, 50, 58, 51, 113, 52, 68, 71, 58, 53, 82, 76, 74, 52, 75, 54, 81, 56, 61, 92, 67, 39, 72, 64, 60, 66, 65, 60, 77, 63, 89, 58, 56, 53, 59, 62, 61, 61, 71, 72, 92, 74, 67, 72, 63, 64, 62, 88, 66, 76, 75, 87, 59, 48, 71, 59, 65, 67, 76, 70, 92, 80, 81, 100, 62, 72, 70, 58, 79, 57, 102, 53, 59, 59, 64, 62, 66, 70, 73, 58, 68, 61, 83, 114, 92, 67, 79, 48, 57, 63, 66, 52, 99, 65, 63, 79, 45, 71, 43, 79, 75, 52, 74, 70, 59, 59, 67, 73, 56, 51, 65, 52, 54, 68, 59, 72, 61, 69, 50, 61, 72, 73, 66, 61, 66, 71, 63, 56, 63, 52, 64, 58, 73, 80, 69, 60, 83, 60, 62, 54, 62, 71, 43, 62, 90, 47, 65, 50, 49, 63, 74, 70, 63, 46, 60, 59, 64, 71, 82, 62, 64, 54, 85, 68, 77, 63, 71, 58, 70, 47, 63, 76, 83, 69, 64, 95, 61, 63, 53, 57, 64, 66, 69, 69, 85, 49, 65, 70, 88, 68, 62, 65, 48, 54, 63, 71, 60, 75, 68, 74, 61, 59, 65, 79, 60, 57, 73, 65, 65, 66, 78, 85, 56, 67, 66, 100, 65, 66, 71, 59, 55, 66, 66, 68, 58, 60, 91, 90, 58, 63, 78, 55, 65, 42, 67, 71, 60, 52, 58, 81, 70, 59, 47, 56, 71, 61, 87, 69, 61, 85, 63, 63, 58, 65, 62, 54, 64, 58, 59, 62, 66, 70, 76, 61, 62, 81, 70, 67, 75, 57, 65, 64, 55, 65, 67, 68, 91, 39, 56, 79, 71, 67, 51, 79, 70, 55, 80, 51, 74, 78, 81, 73, 68, 69, 63, 77, 63, 48, 68, 50, 87, 52, 63, 63, 66, 62, 76, 174, 71, 77, 90, 50, 68, 61, 59, 72, 52, 83, 61, 73, 76, 77, 67, 55, 71, 71, 99, 74, 52, 64, 77, 51, 50, 65, 70, 95, 76, 72, 76, 111, 61, 78, 57, 67, 76, 73, 56, 61, 52, 76, 69, 43, 62, 68, 66, 77, 86, 52, 70, 41, 68, 82, 68, 63, 68, 84, 89, 46, 97, 68, 64, 52, 60, 71, 56, 68, 66, 81, 78, 49, 57, 68, 62, 70, 69, 61, 88, 57, 64, 64, 84, 75, 61, 48, 120, 53, 72, 76, 69, 57, 62, 52, 59, 62, 63, 50, 45, 53, 62, 56, 73, 67, 67, 64, 61, 41, 74, 69, 68, 56, 84, 72, 46, 72, 56, 53, 83, 60, 66, 50, 55, 72, 61, 55, 56, 52, 80, 74, 62, 84, 59, 62, 61, 60, 72, 54, 65, 75, 97, 118, 50, 71, 81, 64, 63, 50, 80, 66, 56, 57, 69, 77, 67, 60, 59, 77, 77, 91, 63, 121, 50, 57, 55, 71, 69, 61, 45, 54, 56, 85, 57, 67, 80, 82, 69, 82, 57, 85, 72, 79, 74, 54, 66, 73, 72, 77, 80, 67, 57, 53, 76, 48, 67, 57, 73, 70, 62, 61, 67, 70, 60, 63, 41, 57, 77, 70, 66, 92, 55, 69, 43, 57, 52, 55, 60, 74, 65, 63, 67, 64, 63, 65, 57, 70, 59, 71, 58, 61, 67, 66, 64, 53, 46, 65, 57, 53, 97, 67, 67, 60, 56, 52, 57, 86, 69, 40, 80, 52, 66, 68, 66, 78, 65, 58, 83, 57, 72, 66, 73, 78, 70, 63, 61, 87, 79, 58, 91, 68, 44, 56, 65, 55, 50, 63, 80, 61, 75, 58, 55, 67, 83, 71, 64, 56, 115, 60, 71, 55, 85, 71, 53, 73, 100, 75, 85, 55, 74, 56, 57, 67, 82, 79, 83, 58, 49, 60, 64, 83, 67, 64, 59, 61, 34, 49, 67, 56, 72, 76, 75, 35, 52, 56, 55, 82, 65, 62, 55, 89, 55, 61, 71, 62, 55, 67, 67, 63, 54, 63, 60, 69, 44, 78, 68, 72, 73, 59, 49, 91, 58, 58, 59, 102, 61, 64, 51, 70, 76, 50, 84, 41, 49, 65, 60, 74, 89, 79, 59, 72, 61, 88, 51, 70, 59, 59, 88, 68, 94, 61, 66, 68, 60, 54, 53, 65, 69, 78, 62, 64, 80, 84, 91, 58, 58, 59, 56, 74, 90, 63, 57, 89, 75, 56, 60, 59, 63, 63, 63, 78, 61, 66, 55, 86, 43, 72, 62, 68, 50, 46, 73, 70, 64, 55, 64, 44, 59, 65, 47, 110, 72, 56, 102, 57, 67, 74, 70, 54, 63, 50, 61, 73, 57, 59, 62, 49, 53, 55, 68, 60, 63, 59, 44, 65, 88, 75, 74, 59, 67, 74, 63, 72, 56, 64, 59, 92, 51, 57, 74, 77, 75, 72, 75, 57, 74, 53, 58, 60, 56, 100, 72, 46, 58, 60, 67, 73, 63, 60, 65, 49, 45, 69, 57, 76, 42, 60, 42, 75, 71, 76, 128, 59, 72, 65, 51, 64, 59, 42, 84, 49, 60, 72, 70, 51, 74, 65, 72, 48, 75, 96, 57, 62, 52, 65, 67, 62, 56, 66, 63, 68, 78, 60, 55, 67, 57, 63, 80, 55, 89, 76, 55, 57, 79, 38, 46, 69, 66, 45, 65, 70, 53, 84, 47, 65, 56, 59, 82, 148, 65, 52, 68, 70, 48, 58, 63, 67, 68, 76, 61, 62, 50, 70, 68, 68, 46, 53, 57, 47, 84, 70, 57, 67, 67, 69, 77, 58, 44, 69, 61, 75, 73, 55, 60, 61, 71, 78, 60, 70, 78, 63, 63, 68, 68, 75, 68, 63, 54, 132, 59, 77, 53, 56, 60, 110, 64, 70, 66, 65, 61, 77, 55, 58, 69, 67, 70, 56, 74, 68, 63, 70, 77, 57, 54, 104, 63, 71, 72, 63, 50, 67, 66, 62, 71, 59, 53, 60, 60, 71, 59, 48, 65, 59, 79, 43, 61, 73, 61, 60, 48, 79, 55, 78, 70, 58, 46, 77, 69, 89, 76, 52, 52, 70, 54, 67, 52, 52, 74, 63, 126, 75, 87, 71, 65, 64, 54, 56, 56, 60, 68, 88, 63, 50, 75, 54, 91, 50, 64, 70, 49, 84, 62, 73, 55, 61, 52, 65, 66, 76, 87, 70, 89, 85, 67, 53, 99, 69, 64, 65, 60, 65, 65, 52, 54, 68, 62, 107, 68, 63, 72, 71, 64, 59, 51, 62, 52, 57, 56, 43, 65, 56, 95, 51, 50, 49, 57, 49, 66, 56, 74, 54, 81, 75, 47, 68, 61, 73, 66, 74, 62, 52, 51, 51, 68, 72, 56, 51, 59, 62, 55, 76, 63, 59, 69, 63, 111, 55, 66, 75, 71, 59, 56, 54, 90, 74, 58, 74, 60, 67, 53, 47, 53, 73, 73, 73, 36, 68, 49, 51, 60, 62, 72, 67, 69, 70, 62, 63, 72, 61, 50, 67, 73, 69, 40, 57, 71, 64, 64, 85, 53, 89, 68, 59, 94, 80, 70, 61, 70, 61, 81, 59, 57, 83, 57, 89, 53, 58, 64, 53, 66, 84, 57, 55, 52, 64, 65, 65, 76, 85, 89, 56, 62, 68, 61, 59, 70, 53, 56, 90, 59, 53, 41, 78, 45, 65, 44, 80, 57, 62, 77, 76, 64, 99, 74, 76, 50, 92, 68, 89, 79, 61, 78, 62, 77, 52, 66, 72, 68, 90, 82, 69, 55, 82, 44, 73, 57, 59, 72, 52, 53, 80, 63, 71, 86, 49, 65, 68, 59, 76, 71, 65, 57, 69, 70, 67, 52, 45, 64, 56, 97, 63, 72, 71, 65, 74, 68, 76, 61, 71, 57, 72, 56, 68, 96, 62, 65, 84, 69, 75, 87, 71, 50, 62, 82, 63, 64, 73, 84, 42, 63, 58, 82, 56, 63, 47, 61, 56, 66, 61, 77, 58, 60, 61, 63, 70, 75, 57, 60, 57, 62, 67, 56, 52, 89, 93, 76, 47, 69, 75, 54, 73, 48, 74, 66, 72, 52, 62, 68, 58, 71, 65, 51, 69, 66, 67, 49, 61, 67, 61, 32, 70, 59, 58, 64, 52, 62, 53, 53, 71, 80, 63, 69, 64, 113, 65, 49, 68, 57, 61, 54, 73, 58, 83, 62, 100, 80, 50, 32, 47, 93, 80, 77, 58, 71, 54, 54, 85, 52, 73, 43, 68, 88, 63, 75, 64, 58, 92, 128, 66, 50, 63, 69, 67, 62, 54, 69, 70, 74, 81, 49, 61, 66, 60, 65, 67, 98, 87, 77, 73, 67, 109, 59, 66, 75, 70, 47, 59, 65, 54, 65, 59, 50, 57, 49, 65, 59, 57, 45, 65, 81, 64, 45, 53, 77, 71, 53, 49, 78, 52, 62, 60, 58, 70, 112, 61, 79, 61, 75, 61, 70, 42, 65, 74, 86, 52, 71, 56, 44, 58, 82, 54, 55, 61, 68, 97, 85, 45, 106, 68, 44, 62, 61, 68, 67, 60, 52, 64, 47, 64, 53, 58, 91, 73, 67, 66, 76, 76, 70, 86, 60, 80, 80, 77, 55, 54, 63, 63, 105, 42, 66, 123, 66, 64, 62, 45, 53, 61, 53, 72, 48, 73, 42, 58, 56, 52, 62, 68, 68, 73, 69, 59, 63, 49, 71, 85, 65, 57, 68, 65, 70, 80, 48, 75, 73, 46, 79, 68, 57, 59, 100, 76, 53, 64, 57, 59, 82, 55, 106, 66, 48, 54, 68, 57, 60, 63, 64, 60, 61, 82, 61, 71, 61, 56, 69, 61, 85, 74, 67, 71, 78, 62, 61, 54, 56, 116, 59, 61, 47, 61, 76, 80, 51, 60, 75, 62, 73, 62, 79, 69, 67, 57, 61, 68, 46, 71, 62, 75, 87, 72, 71, 75, 66, 54, 69, 77, 65, 74, 100, 59, 38, 92, 78, 58, 66, 113, 56, 53, 84, 73, 49, 69, 44, 52, 76, 62, 62, 68, 64, 103, 100, 50, 109, 59, 61, 47, 61, 54, 68, 80, 84, 59, 108, 78, 58, 69, 56, 68, 42, 65, 64, 58, 58, 70, 69, 47, 77, 86, 52, 87, 55, 61, 47, 61, 118, 66, 76, 56, 69, 61, 74, 49, 76, 51, 63, 118, 64, 85, 70, 67, 43, 56, 40, 64, 62, 57, 59, 67, 46, 74, 111, 64, 113, 71, 65, 79, 60, 64, 33, 81, 67, 65, 65, 95, 109, 81, 37, 53, 80, 54, 66, 50, 48, 78, 72, 56, 61, 52, 97, 61, 77, 60, 74, 59, 55, 74, 59, 80, 54, 59, 97, 49, 58, 82, 74, 61, 68, 60, 50, 54, 58, 62, 64, 67, 68, 74, 61, 61, 67, 65, 58, 63, 84, 48, 59, 96, 49, 75, 95, 67, 60, 69, 46, 66, 90, 102, 97, 70, 81, 66, 94, 65, 77, 80, 82, 64, 104, 48, 87, 64, 68, 56, 61, 69, 53, 85, 76, 52, 55, 77, 62, 75, 57, 131, 119, 59, 62, 74, 92, 61, 56, 67, 67, 87, 58, 70, 74, 62, 69, 56, 63, 103, 83, 49, 62, 66, 56, 132, 67, 73, 69, 94, 61, 85, 61, 60, 59, 69, 112, 50, 57, 72, 76, 69, 63, 67, 61, 68, 66, 68, 56, 86, 59, 66, 45, 72, 64, 42, 88, 48, 71, 56, 87, 60, 63, 65, 67, 79, 64, 74, 88, 66, 66, 45, 53, 61, 43, 82, 61, 50, 51, 67, 63, 60, 56, 53, 97, 63, 61, 68, 71, 57, 61, 51, 59, 60, 75, 60, 85, 75, 64, 63, 70, 53, 55, 49, 54, 61, 59, 68, 58, 62, 59, 71, 64, 56, 79, 56, 47, 59, 66, 57, 52, 56, 62, 71, 57, 87, 44, 53, 68, 62, 114, 63, 69, 48, 79, 81, 63, 68, 52, 58, 48, 59, 69, 85, 61, 53, 59, 56, 60, 76, 78, 72, 65, 52, 61, 61, 55, 85, 65, 58, 69, 77, 46, 57, 60, 59, 58, 65, 49, 60, 50, 94, 69, 49, 85, 44, 65, 61, 73, 54, 70, 62, 63, 64, 58, 52, 51, 69, 67, 72, 60, 49, 64, 54, 68, 73, 62, 58, 56, 64, 59, 49, 55, 62, 80, 88, 55, 55, 61, 67, 84, 58, 59, 94, 140, 63, 65, 67, 57, 70, 76, 50, 64, 61, 100, 77, 43, 57, 64, 62, 100, 56, 62, 139, 73, 69, 61, 47, 62, 75, 59, 83, 55, 66, 60, 45, 64, 92, 59, 55, 58, 69, 83, 62, 73, 75, 72, 82, 53, 71, 69, 78, 62, 49, 68, 42, 88, 73, 60, 54, 55, 68, 63, 64, 81, 68, 54, 58, 60, 62, 48, 61, 53, 53, 57, 48, 73, 72, 59, 88, 95, 58, 57, 52, 54, 79, 57, 50, 84, 74, 53, 60, 68, 60, 58, 50, 90, 61, 61, 65, 57, 55, 71, 67, 69, 81, 51, 63, 55, 57, 61, 75, 44, 55, 69, 56, 56, 71, 45, 73, 56, 63, 51, 55, 75, 63, 66, 57, 73, 57, 71, 115, 70, 76, 56, 52, 116, 64, 69, 50, 81, 68, 70, 73, 72, 63, 57, 65, 75, 49, 63, 64, 89, 84, 47, 74, 85, 58, 54, 36, 46, 66, 60, 50, 69, 66, 60, 53, 78, 49, 121, 59, 58, 57, 59, 66, 67, 60, 62, 63, 73, 55, 75, 65, 62, 66, 43, 106, 41, 91, 55, 94, 86, 71, 63, 48, 62, 65, 54, 61, 72, 52, 75, 65, 82, 60, 49, 62, 56, 59, 60, 67, 82, 64, 102, 96, 71, 118, 86, 57, 61, 73, 34, 56, 79, 82, 62, 67, 59, 58, 65, 54, 74, 77, 59, 71, 56, 44, 69, 57, 58, 59, 49, 64, 72, 53, 59, 56, 52, 90, 54, 56, 92, 66, 59, 63, 58, 64, 51, 65, 74, 46, 75, 32, 66, 60, 59, 71, 53, 60, 48, 87, 71, 56, 57, 72, 75, 63, 60, 57, 63, 68, 74, 73, 67, 64, 44, 62, 58, 74, 65, 62, 54, 69, 54, 80, 68, 66, 76, 53, 82, 74, 58, 47, 62, 55, 82, 63, 64, 60, 55, 55, 47, 52, 68, 38, 51, 71, 71, 66, 69, 50, 92, 97, 59, 61, 106, 58, 67, 51, 62, 69, 64, 66, 64, 80, 47, 58, 60, 60, 55, 71, 78, 51, 53, 52, 89, 65, 76, 67, 66, 55, 60, 64, 59, 54, 52, 72, 49, 64, 53, 63, 56, 75, 65, 66, 60, 64, 65, 66, 60, 62, 76, 70, 59, 69, 53, 67, 81, 90, 64, 133, 73, 41, 73, 73, 61, 58, 51, 54, 59, 70, 58, 81, 49, 55, 56, 62, 62, 67, 50, 58, 73, 145, 64, 61, 52, 73, 58, 63, 76, 66, 43, 70, 82, 75, 53, 76, 95, 46, 89, 65, 46, 68, 58, 55, 79, 57, 56, 61, 69, 57, 41, 77, 79, 58, 68, 53, 60, 61, 65, 72, 97, 62, 96, 53, 69, 100, 54, 62, 56, 76, 81, 86, 97, 97, 54, 54, 55, 58, 80, 73, 70, 58, 51, 54, 61, 63, 59, 67, 56, 77, 58, 104, 59, 58, 62, 56, 87, 67, 49, 62, 68, 64, 51, 56, 65, 67, 56, 80, 116, 71, 56, 60, 46, 66, 53, 61, 55, 61, 55, 61, 55, 65, 71, 38, 75, 101, 73, 59, 54, 72, 82, 40, 69, 60, 50, 62, 52, 65, 44, 69, 61, 69, 73, 69, 68, 83, 64, 59, 56, 76, 67, 46, 65, 57, 67, 78, 65, 59, 65, 60, 72, 66, 62, 75, 70, 81, 91, 72, 60, 64, 88, 103, 53, 59, 54, 77, 60, 63, 56, 62, 54, 91, 61, 67, 90, 51, 80, 63, 62, 61, 59, 58, 57, 117, 76, 60, 48, 59, 55, 76, 74, 70, 73, 44, 58, 56, 59, 73, 63, 56, 69, 65, 78, 66, 93, 57, 76, 49, 45, 67, 73, 53, 69, 74, 67, 65, 61, 75, 65, 67, 63, 67, 85, 71, 64, 60, 65, 53, 81, 55, 62, 54, 68, 59, 71, 61, 53, 52, 64, 51, 100, 56, 120, 55, 68, 72, 50, 72, 51, 38, 58, 49, 56, 66, 51, 56, 100, 64, 63, 51, 68, 47, 62, 65, 61, 73, 53, 43, 61, 69, 70, 57, 69, 74, 65, 75, 53, 60, 54, 64, 61, 53, 58, 59, 69, 92, 45, 70, 67, 97, 97, 88, 77, 73, 74, 82, 58, 53, 58, 84, 66, 53, 57, 56, 63, 65, 57, 71, 93, 65, 65, 67, 66, 58, 62, 61, 62, 106, 57, 62, 62, 83, 52, 63, 60, 64, 66, 70, 72, 60, 61, 51, 65, 95, 69, 79, 65, 59, 69, 67, 56, 61, 55, 91, 64, 62, 66, 62, 58, 58, 59, 76, 62, 79, 62, 53, 82, 64, 54, 65, 70, 59, 57, 64, 59, 70, 89, 57, 71, 57, 74, 75, 80, 51, 61, 59, 62, 41, 54, 57, 88, 50, 100, 44, 81, 67, 75, 57, 122, 56, 72, 59, 76, 61, 60, 84, 70, 65, 81, 58, 54, 66, 70, 93, 58, 45, 60, 73, 79, 64, 57, 83, 80, 69, 63, 65, 46, 58, 80, 47, 66, 66, 56, 73, 73, 87, 63, 50, 88, 87, 75, 41, 65, 44, 50, 68, 70, 64, 58, 55, 66, 60, 96, 78, 49, 71, 82, 59, 45, 42, 61, 63, 74, 75, 49, 54, 56, 56, 76, 62, 49, 76, 65, 54, 59, 69, 62, 52, 64, 87, 49, 64, 73, 65, 74, 101, 50, 64, 66, 59, 64, 58, 98, 68, 70, 66, 53, 67, 53, 68, 72, 50, 59, 61, 64, 63, 69, 81, 57, 87, 58, 64, 56, 69, 56, 56, 75, 55, 62, 63, 69, 54, 36, 51, 58, 54, 59, 41, 75, 77, 69, 69, 54, 62, 65, 58, 62, 63, 61, 73, 71, 40, 72, 122, 80, 73, 78, 60, 65, 57, 56, 70, 64, 97, 63, 62, 52, 94, 55, 64, 62, 63, 99, 63, 68, 62, 77, 56, 70, 69, 54, 55, 63, 54, 123, 66, 72, 64, 67, 55, 55, 68, 61, 72, 62, 69, 57, 58, 64, 53, 67, 52, 78, 65, 101, 55, 54, 54, 63, 83, 63, 61, 60, 72, 85, 67, 93, 62, 65, 64, 72, 91, 68, 43, 84, 67, 51, 115, 57, 88, 48, 66, 70, 51, 79, 50, 47, 44, 98, 79, 54, 78, 62, 56, 54, 61, 73, 75, 72, 85, 60, 47, 68, 61, 61, 67, 34, 62, 104, 50, 67, 49, 57, 70, 35, 59, 71, 44, 71, 44, 52, 53, 62, 56, 59, 107, 69, 64, 86, 70, 51, 54, 50, 109, 62, 58, 70, 66, 46, 65, 56, 68, 89, 61, 48, 64, 81, 66, 88, 69, 63, 66, 81, 78, 40, 103, 69, 80, 71, 50, 53, 72, 69, 55, 79, 64, 75, 60, 67, 63, 91, 58, 46, 55, 63, 57, 59, 61, 53, 56, 54, 60, 50, 61, 59, 66, 55, 63, 71, 35, 94, 65, 57, 68, 60, 52, 60, 71, 79, 74, 62, 69, 53, 56, 69, 73, 64, 60, 79, 54, 65, 51, 83, 77, 57, 70, 52, 72, 52, 56, 64, 55, 66, 77, 52, 51, 65, 56, 75, 76, 92, 79, 66, 57, 64, 57, 73, 67, 105, 66, 55, 61, 66, 48, 83, 56, 62, 76, 51, 83, 67, 50, 65, 59, 57, 69, 77, 66, 84, 68, 76, 52, 48, 58, 61, 72, 50, 59, 62, 77, 71, 69, 70, 56, 69, 57, 69, 59, 66, 60, 65, 58, 75, 50, 54, 48, 59, 65, 53, 57, 55, 90, 63, 76, 82, 53, 79, 52, 58, 115, 68, 54, 64, 69, 70, 89, 50, 56, 68, 72, 72, 57, 66, 85, 86, 71, 74, 68, 80, 64, 59, 38, 89, 76, 60, 56, 73, 75, 51, 71, 65, 82, 58, 78, 68, 43, 77, 83, 54, 50, 60, 57, 68, 64, 55, 62, 51, 64, 67, 69, 55, 59, 63, 47, 60, 72, 56, 69, 48, 64, 62, 62, 67, 79, 48, 57, 56, 69, 63, 60, 60, 120, 79, 78, 59, 59, 75, 76, 54, 57, 71, 59, 58, 51, 73, 62, 60, 68, 47, 55, 74, 74, 60, 69, 50, 78, 85, 63, 82, 53, 58, 60, 45, 67, 79, 75, 56, 58, 58, 69, 76, 46, 58, 58, 63, 62, 59, 61, 64, 53, 59, 70, 59, 91, 51, 61, 116, 49, 67, 76, 101, 74, 61, 140, 79, 51, 107, 55, 57, 44, 58, 58, 36, 69, 77, 46, 57, 57, 58, 80, 53, 73, 94, 65, 78, 70, 57, 68, 65, 60, 68, 54, 93, 61, 61, 98, 76, 73, 74, 60, 73, 70, 74, 66, 62, 65, 89, 69, 82, 81, 73, 72, 54, 74, 66, 55, 83, 63, 58, 60, 58, 60, 46, 69, 60, 73, 79, 56, 66, 71, 74, 68, 68, 59, 53, 68, 49, 74, 58, 67, 77, 73, 102, 51, 63, 56, 114, 81, 53, 53, 58, 55, 81, 57, 129, 63, 61, 48, 83, 59, 49, 65, 43, 66, 69, 57, 63, 60, 70, 55, 52, 68, 86, 58, 65, 79, 74, 66, 64, 75, 64, 41, 69, 57, 77, 64, 76, 59, 72, 68, 72, 64, 50, 57, 46, 57, 41, 58, 69, 54, 63, 71, 50, 60, 55, 85, 75, 63, 60, 62, 44, 67, 61, 59, 55, 48, 86, 77, 62, 38, 66, 78, 85, 84, 61, 61, 65, 52, 57, 62, 67, 68, 62, 72, 72, 62, 80, 73, 74, 59, 51, 43, 87, 67, 56, 74, 71, 64, 53, 60, 53, 69, 59, 64, 64, 56, 60, 82, 58, 55, 91, 61, 50, 67, 56, 63, 70, 47, 54, 58, 60, 63, 61, 58, 66, 63, 56, 75, 82, 66, 51, 43, 51, 76, 43, 92, 122, 66, 62, 74, 68, 74, 60, 58, 61, 71, 68, 51, 66, 52, 67, 64, 62, 80, 52, 64, 66, 94, 57, 71, 57, 64, 63, 60, 76, 62, 74, 62, 63, 62, 109, 57, 52, 53, 45, 64, 72, 67, 59, 58, 62, 59, 58, 58, 44, 62, 67, 55, 71, 75, 84, 69, 54, 68, 71, 65, 106, 62, 48, 83, 70, 66, 65, 65, 80, 63, 43, 66, 74, 59, 64, 68, 64, 66, 49, 84, 112, 87, 99, 74, 45, 61, 73, 52, 49, 56, 80, 70, 70, 61, 76, 64, 71, 47, 100, 58, 88, 52, 103, 55, 52, 71, 45, 74, 69, 54, 47, 59, 70, 89, 56, 61, 77, 61, 100, 76, 93, 65, 72, 39, 45, 70, 72, 139, 85, 82, 57, 61, 71, 69, 64, 88, 60, 41, 58, 60, 52, 71, 79, 65, 69, 64, 72, 88, 79, 62, 68, 53, 54, 59, 50, 82, 55, 66, 69, 102, 53, 54, 55, 57, 70, 56, 61, 50, 54, 116, 73, 63, 46, 54, 53, 65, 69, 52, 74, 80, 63, 61, 46, 68, 72, 61, 65, 60, 69, 58, 65, 82, 59, 74, 59, 62, 48, 55, 58, 66, 76, 75, 48, 70, 74, 48, 113, 71, 89, 63, 64, 70, 85, 67, 41, 59, 52, 79, 72, 62, 58, 61, 80, 76, 50, 69, 102, 57, 73, 73, 63, 53, 46, 59, 51, 73, 61, 75, 61, 65, 56, 75, 55, 60, 62, 68, 82, 85, 64, 63, 63, 76, 78, 70, 75, 81, 62, 56, 60, 130, 59, 62, 70, 97, 41, 66, 63, 65, 60, 50, 63, 74, 57, 57, 106, 87, 62, 71, 59, 68, 61, 63, 66, 71, 60, 108, 46, 68, 54, 73, 72, 56, 63, 54, 67, 62, 57, 66, 65, 64, 66, 71, 91, 72, 65, 58, 110, 58, 71, 68, 79, 37, 42, 55, 71, 62, 81, 94, 48, 53, 61, 68, 54, 67, 61, 75, 44, 53, 75, 65, 57, 67, 47, 52, 65, 72, 57, 72, 59, 62, 55, 71, 60, 74, 45, 44, 79, 77, 86, 76, 77, 63, 70, 60, 90, 55, 30, 62, 83, 58, 67, 57, 92, 67, 62, 53, 76, 70, 53, 63, 45, 58, 49, 54, 94, 63, 63, 77, 76, 57, 80, 66, 74, 62, 62, 70, 56, 67, 86, 37, 51, 116, 59, 64, 63, 70, 94, 61, 74, 66, 58, 53, 66, 51, 85, 90, 69, 64, 86, 58, 50, 72, 48, 35, 66, 43, 77, 62, 53, 77, 86, 67, 64, 92, 91, 62, 93, 62, 75, 83, 76, 44, 54, 66, 59, 87, 50, 74, 72, 61, 63, 60, 76, 68, 54, 56, 81, 59, 48, 80, 81, 67, 78, 65, 73, 54, 101, 73, 84, 49, 85, 91, 59, 74, 52, 70, 70, 67, 56, 74, 88, 94, 69, 64, 61, 50, 64, 57, 80, 59, 70, 84, 62, 64, 56, 83, 76, 63, 78, 77, 79, 69, 67, 56, 65, 60, 69, 56, 83, 75, 62, 56, 48, 46, 55, 70, 70, 77, 68, 58, 51, 53, 80, 82, 94, 67, 80, 58, 64, 57, 56, 53, 54, 68, 78, 84, 63, 64, 59, 72, 98, 67, 64, 60, 79, 80, 86, 41, 55, 65, 55, 65, 50, 58, 76, 64, 62, 74, 63, 114, 67, 53, 60, 56, 61, 60, 69, 73, 95, 68, 62, 64, 70, 71, 45, 46, 64, 69, 63, 66, 61, 86, 55, 71, 63, 60, 63, 66, 65, 63, 71, 68, 69, 51, 64, 70, 52, 69, 70, 76, 61, 65, 72, 94, 69, 71, 55, 59, 58, 52, 52, 59, 72, 113, 47, 71, 73, 61, 47, 82, 71, 80, 53, 45, 64, 46, 68, 70, 71, 62, 66, 68, 84, 66, 67, 63, 50, 61, 46, 70, 71, 54, 57, 53, 67, 77, 60, 51, 58, 74, 74, 57, 73, 62, 61, 51, 78, 48, 58, 74, 66, 32, 89, 59, 52, 60, 44, 73, 54, 64, 48, 92, 47, 63, 70, 61, 53, 38, 61, 54, 60, 77, 100, 68, 60, 106, 65, 64, 65, 55, 121, 51, 68, 82, 64, 50, 58, 74, 68, 57, 55, 51, 83, 67, 66, 59, 69, 51, 57, 71, 77, 67, 62, 89, 77, 81, 76, 104, 54, 57, 59, 65, 53, 97, 151, 61, 69, 79, 85, 72, 80, 69, 97, 72, 64, 60, 85, 67, 65, 78, 65, 55, 63, 77, 97, 70, 67, 80, 55, 52, 57, 58, 70, 71, 57, 47, 112, 66, 69, 108, 71, 45, 100, 108, 125, 76, 75, 83, 59, 64, 84, 62, 64, 64, 61, 75, 73, 50, 72, 56, 50, 54, 59, 72, 63, 59, 65, 37, 90, 54, 72, 69, 49, 64, 70, 80, 58, 54, 61, 46, 74, 116, 64, 50, 81, 51, 51, 70, 58, 62, 66, 53, 67, 76, 68, 99, 75, 83, 77, 94, 106, 89, 66, 54, 68, 104, 58, 58, 66, 86, 63, 73, 65, 49, 80, 57, 64, 66, 78, 87, 51, 62, 66, 51, 67, 47, 57, 61, 68, 70, 64, 101, 75, 90, 96, 62, 59, 46, 66, 65, 30, 65, 62, 61, 84, 56, 62, 67, 65, 79, 57, 62, 70, 63, 74, 93, 69, 59, 78, 55, 73, 43, 72, 84, 68, 68, 56, 107, 54, 54, 71, 55, 71, 108, 74, 126, 41, 57, 41, 73, 68, 71, 62, 67, 84, 61, 82, 52, 51, 77, 59, 66, 68, 58, 58, 58, 57, 63, 53, 96, 81, 76, 64, 51, 76, 81, 103, 63, 56, 65, 36, 91, 44, 48, 70, 60, 75, 83, 49, 70, 56, 58, 73, 65, 80, 53, 112, 45, 79, 57, 102, 65, 53, 58, 62, 64, 55, 34, 52, 57, 70, 67, 59, 62, 75, 73, 64, 74, 67, 58, 60, 57, 99, 54, 54, 62, 91, 64, 93, 47, 61, 85, 76, 65, 66, 60, 60, 95, 46, 55, 88, 54, 65, 73, 84, 70, 61, 57, 85, 46, 57, 51, 59, 79, 52, 51, 61, 120, 72, 79, 75, 41, 45, 62, 88, 75, 53, 60, 70, 114, 59, 66, 69, 61, 79, 67, 62, 71, 73, 49, 72, 63, 77, 59, 62, 70, 55, 114, 54, 59, 49, 50, 66, 66, 61, 42, 78, 58, 69, 79, 80, 76, 45, 57, 57, 58, 51, 58, 46, 83, 57, 73, 77, 74, 50, 59, 56, 59, 61, 51, 57, 68, 60, 54, 76, 56, 35, 59, 72, 68, 63, 60, 61, 77, 53, 60, 79, 53, 65, 58, 62, 67, 71, 55, 70, 56, 92, 58, 73, 71, 67, 57, 92, 81, 82, 66, 71, 63, 45, 72, 65, 50, 48, 79, 82, 59, 62, 51, 57, 42, 69, 65, 65, 53, 54, 60, 53, 63, 63, 72, 56, 62, 63, 58, 56, 48, 48, 60, 57, 64, 79, 78, 53, 51, 115, 71, 52, 76, 60, 63, 62, 65, 72, 81, 62, 62, 74, 42, 61, 42, 53, 41, 76, 54, 111, 69, 109, 70, 71, 60, 74, 75, 71, 69, 60, 72, 75, 105, 58, 73, 64, 85, 69, 59, 66, 53, 63, 45, 59, 78, 61, 48, 52, 63, 74, 62, 70, 68, 51, 50, 46, 83, 50, 51, 51, 76, 46, 72, 52, 67, 79, 68, 62, 76, 51, 59, 79, 47, 74, 60, 86, 69, 69, 70, 59, 68, 79, 62, 62, 61, 77, 57, 55, 81, 67, 56, 65, 61, 75, 64, 87, 59, 64, 53, 82, 64, 62, 79, 63, 33, 70, 66, 59, 62, 62, 71, 70, 61, 63, 45, 50, 83, 54, 80, 51, 69, 77, 52, 70, 74, 58, 73, 73, 67, 73, 54, 67, 71, 56, 66, 78, 43, 71, 57, 54, 60, 63, 68, 66, 43, 60, 106, 52, 75, 75, 60, 50, 45, 69, 49, 45, 92, 57, 75, 59, 77, 61, 49, 69, 53, 81, 76, 103, 92, 58, 78, 56, 66, 75, 56, 62, 76, 77, 85, 49, 49, 67, 71, 66, 64, 70, 58, 56, 69, 86, 47, 54, 93, 73, 65, 67, 103, 54, 89, 68, 67, 56, 73, 74, 57, 46, 49, 61, 49, 53, 59, 75, 70, 57, 67, 89, 50, 44, 58, 66, 50, 73, 78, 64, 68, 89, 74, 55, 71, 61, 66, 53, 59, 99, 51, 46, 56, 66, 59, 55, 62, 55, 54, 52, 70, 64, 65, 77, 57, 63, 70, 75, 68, 73, 76, 59, 65, 78, 48, 77, 63, 79, 49, 102, 69, 76, 71, 61, 79, 58, 47, 81, 66, 84, 86, 66, 64, 66, 57, 69, 54, 67, 64, 99, 81, 50, 66, 55, 63, 80, 61, 59, 46, 60, 62, 55, 60, 75, 70, 70, 91, 71, 64, 58, 45, 60, 74, 63, 45, 112, 53, 106, 59, 66, 73, 70, 53, 94, 67, 78, 51, 69, 59, 54, 67, 76, 46, 72, 67, 59, 55, 64, 77, 93, 68, 63, 61, 61, 104, 64, 71, 71, 74, 88, 61, 115, 61, 53, 63, 73, 52, 57, 64, 57, 85, 67, 63, 62, 63, 71, 64, 55, 55, 36, 72, 62, 52, 75, 64, 63, 84, 72, 78, 69, 61, 54, 69, 50, 70, 74, 66, 51, 61, 61, 61, 62, 65, 57, 80, 52, 74, 60, 55, 71, 42, 65, 92, 57, 66, 84, 61, 81, 62, 63, 57, 62, 69, 52, 90, 74, 80, 52, 85, 58, 69, 69, 60, 45, 59, 68, 70, 69, 55, 72, 64, 64, 64, 55, 67, 55, 58, 59, 42, 69, 52, 71, 70, 68, 52, 90, 68, 60, 44, 62, 66, 62, 68, 53, 53, 61, 76, 51, 77, 90, 58, 156, 58, 49, 50, 56, 60, 54, 62, 63, 80, 63, 73, 47, 51, 71, 61, 52, 74, 50, 66, 74, 81, 69, 73, 42, 61, 77, 43, 60, 52, 128, 69, 52, 68, 71, 63, 68, 71, 59, 53, 60, 63, 45, 62, 71, 78, 70, 69, 74, 66, 66, 64, 57, 49, 55, 62, 66, 107, 81, 73, 66, 70, 66, 70, 54, 63, 74, 53, 65, 70, 79, 63, 65, 55, 48, 72, 82, 65, 62, 76, 68, 56, 72, 66, 66, 67, 36, 47, 57, 90, 64, 68, 65, 46, 75, 88, 120, 67, 63, 73, 53, 63, 63, 79, 65, 70, 76, 63, 55, 57, 76, 55, 65, 51, 48, 63, 83, 76, 56, 105, 61, 58, 74, 58, 84, 77, 94, 79, 62, 81, 67, 46, 58, 106, 70, 33, 65, 61, 48, 75, 66, 117, 112, 80, 87, 51, 46, 60, 69, 49, 54, 75, 55, 119, 41, 75, 70, 57, 66, 63, 62, 51, 82, 69, 72, 67, 70, 56, 84, 48, 54, 64, 67, 50, 58, 81, 69, 58, 46, 77, 63, 88, 63, 58, 67, 43, 69, 52, 58, 44, 78, 68, 57, 72, 64, 64, 82, 65, 76, 74, 54, 75, 69, 64, 93, 66, 52, 52, 53, 64, 64, 80, 69, 71, 60, 62, 64, 62, 70, 56, 69, 60, 58, 53, 54, 70, 75, 77, 70, 68, 88, 90, 47, 53, 62, 83, 49, 60, 84, 51, 89, 57, 54, 77, 78, 45, 61, 55, 61, 53, 47, 77, 57, 51, 63, 98, 79, 61, 53, 67, 55, 70, 133, 71, 68, 69, 71, 57, 95, 70, 66, 63, 47, 62, 62, 80, 50, 90, 62, 51, 66, 45, 66, 55, 67, 78, 46, 64, 51, 52, 78, 66, 58, 46, 66, 75, 64, 97, 71, 54, 56, 59, 64, 74, 65, 76, 77, 57, 46, 63, 59, 90, 56, 59, 99, 70, 60, 67, 47, 43, 106, 66, 58, 54, 68, 53, 95, 66, 91, 90, 69, 55, 59, 75, 83, 59, 83, 63, 59, 101, 81, 51, 67, 69, 56, 62, 59, 58, 63, 56, 65, 56, 45, 62, 67, 76, 64, 89, 70, 53, 93, 58, 76, 59, 60, 65, 61, 88, 72, 74, 67, 56, 45, 69, 55, 63, 51, 63, 72, 70, 58, 55, 65, 68, 74, 66, 66, 53, 74, 90, 67, 56, 53, 92, 66, 79, 58, 90, 82, 57, 79, 50, 76, 49, 71, 41, 71, 58, 78, 83, 63, 68, 45, 50, 70, 75, 73, 52, 47, 59, 63, 94, 77, 54, 65, 67, 72, 64, 74, 76, 65, 64, 77, 74, 57, 67, 67, 65, 64, 57, 62, 116, 60, 59, 57, 63, 52, 69, 62, 66, 66, 55, 64, 63, 68, 57, 53, 73, 57, 72, 98, 54, 60, 89, 50, 68, 59, 38, 65, 69, 72, 66, 61, 51, 86, 63, 62, 61, 68, 53, 57, 53, 55, 89, 57, 86, 104, 56, 62, 73, 35, 63, 57, 74, 55, 71, 87, 65, 73, 55, 68, 64, 70, 49, 76, 68, 70, 58, 71, 78, 57, 46, 55, 89, 64, 66, 48, 65, 41, 81, 55, 54, 61, 90, 75, 49, 68, 45, 84, 60, 58, 91, 72, 48, 43, 63, 59, 58, 60, 52, 64, 50, 65, 74, 63, 68, 37, 75, 68, 64, 83, 63, 67, 47, 74, 88, 55, 67, 94, 73, 56, 64, 64, 64, 64, 63, 51, 85, 78, 58, 75, 40, 112, 66, 70, 53, 51, 77, 70, 77, 63, 71, 73, 68, 94, 107, 56, 66, 78, 74, 67, 42, 90, 80, 70, 58, 65, 59, 87, 43, 68, 55, 62, 54, 49, 65, 69, 56, 51, 73, 56, 72, 47, 71, 61, 73, 69, 61, 61, 56, 73, 61, 77, 64, 63, 84, 70, 49, 57, 54, 61, 137, 70, 69, 74, 59, 71, 51, 55, 63, 62, 70, 62, 65, 55, 80, 56, 62, 106, 56, 119, 55, 56, 47, 63, 57, 54, 49, 41, 70, 64, 67, 57, 55, 71, 61, 73, 66, 74, 73, 76, 70, 51, 69, 68, 65, 61, 75, 56, 91, 56, 74, 83, 51, 59, 74, 59, 72, 92, 69, 72, 80, 58, 54, 59, 61, 67, 52, 81, 69, 74, 83, 68, 49, 68, 82, 64, 51, 92, 54, 99, 63, 47, 77, 76, 79, 70, 63, 65, 57, 51, 65, 50, 78, 56, 63, 70, 65, 46, 54, 44, 41, 102, 70, 93, 99, 74, 75, 63, 66, 67, 70, 82, 69, 49, 60, 99, 53, 37, 63, 61, 73, 85, 59, 59, 66, 95, 70, 62, 58, 68, 160, 35, 55, 77, 60, 74, 48, 70, 75, 57, 56, 69, 62, 85, 85, 68, 74, 52, 80, 67, 109, 49, 70, 106, 43, 54, 69, 75, 106, 86, 86, 62, 55, 51, 71, 70, 54, 53, 99, 56, 50, 74, 58, 95, 84, 77, 71, 73, 101, 70, 68, 73, 88, 67, 78, 65, 67, 80, 83, 52, 84, 69, 53, 47, 85, 68, 69, 43, 103, 73, 47, 78, 40, 41, 79, 66, 56, 60, 74, 52, 49, 54, 70, 44, 68, 82, 61, 51, 60, 58, 68, 43, 65, 63, 77, 89, 58, 45, 56, 76, 74, 55, 39, 70, 96, 116, 75, 47, 63, 65, 55, 55, 56, 79, 70, 54, 70, 56, 65, 95, 59, 75, 67, 128, 47, 83, 59, 59, 67, 57, 61, 107, 62, 66, 65, 63, 62, 54, 64, 48, 62, 46, 75, 67, 119, 92, 55, 68, 65, 67, 57, 78, 52, 61, 63, 66, 71, 66, 53, 100, 60, 74, 62, 66, 77, 64, 79, 61, 67, 57, 71, 60, 63, 55, 51, 66, 69, 48, 44, 52, 64, 38, 61, 92, 47, 59, 40, 71, 78, 80, 79, 59, 62, 71, 54, 94, 94, 60, 74, 58, 57, 46, 127, 76, 70, 85, 77, 48, 75, 46, 72, 72, 62, 52, 78, 54, 95, 61, 74, 58, 63, 85, 69, 58, 85, 62, 68, 74, 73, 75, 63, 92, 63, 79, 91, 61, 58, 54, 41, 47, 79, 53, 53, 65, 56, 73, 57, 90, 47, 66, 62, 53, 56, 61, 89, 51, 81, 58, 92, 69, 87, 94, 73, 62, 130, 54, 60, 59, 97, 57, 76, 64, 54, 66, 69, 58, 56, 72, 56, 45, 72, 58, 124, 35, 62, 50, 44, 108, 64, 59, 54, 63, 57, 50, 50, 51, 91, 44, 111, 96, 51, 83, 49, 50, 41, 65, 51, 58, 56, 52, 62, 58, 80, 60, 41, 55, 51, 78, 77, 39, 41, 37, 97, 57, 58, 64, 70, 73, 91, 71, 66, 41, 83, 55, 54, 53, 37, 58, 67, 64, 62, 46, 74, 118, 74, 97, 68, 42, 81, 86, 70, 89, 67, 63, 92, 113, 67, 26, 72, 57, 75, 55, 58, 59, 70, 64, 52, 75, 61, 57, 57, 73, 53, 64, 64, 50, 53, 81, 55, 66, 68, 40, 51, 49, 82, 59, 69, 65, 91, 69, 53, 42, 83, 57, 58, 71, 68, 46, 64, 49, 81, 56, 88, 76, 63, 81, 76, 68, 70, 69, 59, 40, 60, 74, 78, 52, 45, 75, 85, 51, 53, 98, 41, 77, 68, 77, 69, 106, 62, 66, 59, 68, 51, 55, 41, 48, 48, 77, 51, 122, 69, 61, 80, 85, 62, 59, 71, 93, 65, 71, 64, 62, 60, 63, 53, 55, 71, 70, 54, 55, 74, 61, 76, 49, 116, 61, 59, 93, 136, 66, 62, 71, 60, 77, 67, 72, 71, 58, 76, 64, 60, 53, 54, 89, 60, 48, 65, 51, 68, 68, 68, 69, 99, 69, 59, 65, 61, 63, 58, 66, 91, 73, 65, 63, 68, 64, 76, 79, 65, 62, 73, 47, 85, 44, 43, 64, 66, 60, 47, 52, 47, 87, 65, 63, 57, 55, 82, 49, 73, 45, 57, 107, 67, 55, 77, 44, 59, 58, 67, 65, 56, 54, 66, 60, 105, 70, 48, 60, 71, 81, 72, 127, 80, 78, 104, 71, 53, 60, 67, 49, 51, 90, 52, 101, 67, 57, 62, 92, 62, 67, 65, 66, 47, 58, 57, 74, 49, 57, 41, 52, 89, 80, 53, 47, 60, 60, 55, 64, 60, 69, 72, 63, 72, 50, 65, 68, 63, 70, 72, 66, 54, 89, 82, 53, 76, 70, 77, 60, 58, 84, 56, 70, 71, 67, 56, 80, 50, 63, 78, 66, 74, 67, 40, 44, 55, 67, 97, 52, 62, 48, 65, 44, 59, 79, 69, 67, 83, 71, 58, 57, 71, 71, 51, 96, 62, 66, 69, 69, 53, 63, 50, 56, 36, 33, 85, 57, 71, 53, 60, 100, 84, 60, 68, 70, 57, 70, 44, 97, 71, 77, 49, 71, 65, 73, 63, 57, 77, 53, 56, 89, 52, 79, 57, 84, 57, 69, 70, 65, 63, 40, 96, 84, 53, 60, 65, 69, 53, 66, 67, 68, 53, 67, 67, 65, 76, 92, 55, 57, 85, 67, 55, 64, 66, 77, 82, 69, 78, 91, 62, 69, 57, 63, 66, 63, 74, 48, 54, 93, 61, 38, 69, 71, 64, 63, 58, 57, 70, 63, 89, 69, 57, 96, 76, 51, 66, 65, 70, 64, 53, 90, 66, 70, 87, 40, 119, 65, 66, 34, 95, 72, 52, 69, 64, 51, 64, 78, 93, 83, 74, 41, 47, 66, 73, 97, 77, 64, 67, 37, 37, 59, 78, 78, 65, 59, 78, 65, 77, 81, 56, 53, 61, 55, 88, 77, 53, 66, 85, 63, 57, 64, 44, 68, 38, 63, 61, 55, 70, 50, 55, 83, 85, 76, 66, 40, 59, 123, 72, 63, 65, 72, 57, 53, 59, 69, 65, 58, 93, 52, 70, 69, 53, 42, 58, 83, 45, 72, 50, 83, 40, 57, 39, 94, 48, 71, 54, 51, 63, 92, 60, 68, 53, 70, 44, 88, 38, 61, 68, 62, 61, 56, 58, 62, 80, 76, 76, 80, 50, 52, 51, 78, 55, 87, 95, 78, 83, 58, 75, 77, 59, 50, 58, 64, 66, 75, 56, 96, 92, 83, 62, 68, 68, 59, 68, 83, 56, 70, 75, 62, 58, 58, 70, 76, 54, 69, 63, 61, 38, 72, 83, 54, 59, 82, 65, 50, 58, 67, 57, 56, 64, 70, 55, 63, 80, 78, 61, 64, 48, 46, 73, 91, 57, 87, 71, 72, 62, 54, 50, 89, 113, 63, 67, 67, 56, 58, 50, 42, 74, 78, 103, 89, 58, 52, 65, 51, 73, 50, 73, 81, 77, 57, 71, 64, 74, 62, 67, 76, 62, 73, 81, 64, 60, 50, 65, 68, 95, 66, 70, 64, 62, 65, 83, 64, 60, 79, 53, 48, 40, 76, 66, 79, 60, 72, 61, 51, 77, 58, 66, 72, 63, 61, 62, 57, 60, 61, 62, 54, 90, 85, 44, 62, 58, 57, 89, 70, 69, 77, 64, 52, 57, 74, 71, 66, 51, 62, 67, 57, 57, 52, 79, 56, 74, 89, 82, 52, 61, 96, 76, 70, 77, 61, 57, 53, 54, 70, 77, 34, 78, 80, 70, 60, 59, 56, 83, 62, 47, 59, 58, 133, 91, 75, 47, 48, 79, 99, 60, 81, 63, 79, 67, 85, 67, 65, 79, 99, 61, 51, 65, 105, 72, 74, 60, 59, 47, 73, 66, 80, 62, 55, 78, 76, 57, 59, 60, 73, 66, 77, 75, 69, 60, 60, 58, 78, 45, 70, 79, 61, 61, 65, 62, 71, 63, 28, 71, 81, 47, 78, 87, 57, 62, 63, 51, 72, 58, 46, 64, 66, 125, 62, 52, 124, 83, 40, 69, 76, 59, 64, 67, 79, 54, 89, 64, 60, 81, 52, 78, 66, 48, 63, 69, 79, 64, 57, 46, 60, 50, 71, 81, 65, 59, 55, 98, 62, 51, 63, 73, 61, 71, 55, 50, 62, 85, 52, 53, 49, 65, 66, 82, 56, 77, 36, 60, 70, 64, 57, 55, 72, 73, 64, 84, 56, 69, 75, 71, 63, 50, 54, 80, 45, 59, 45, 65, 65, 54, 52, 71, 40, 61, 60, 96, 60, 51, 50, 71, 62, 74, 58, 74, 74, 73, 58, 59, 109, 65, 56, 40, 62, 72, 64, 51, 51, 62, 68, 64, 63, 72, 58, 34, 54, 57, 66, 69, 55, 70, 57, 55, 70, 83, 53, 59, 87, 47, 66, 66, 58, 47, 48, 45, 93, 59, 69, 65, 44, 64, 70, 101, 67, 65, 70, 91, 91, 79, 65, 63, 98, 55, 74, 55, 34, 98, 61, 50, 52, 56, 54, 60, 74, 69, 95, 52, 57, 66, 61, 82, 57, 62, 69, 75, 48, 49, 80, 49, 68, 53, 54, 81, 61, 66, 67, 64, 64, 62, 59, 88, 94, 54, 85, 50, 52, 41, 66, 64, 65, 59, 64, 62, 105, 61, 54, 74, 49, 75, 59, 86, 54, 50, 45, 78, 87, 83, 62, 54, 71, 71, 66, 101, 68, 105, 79, 52, 63, 73, 72, 76, 47, 78, 55, 65, 76, 85, 64, 63, 54, 84, 69, 64, 55, 46, 59, 84, 51, 62, 68, 69, 80, 64, 72, 71, 57, 65, 83, 67, 71, 70, 130, 64, 68, 41, 71, 76, 70, 81, 61, 49, 88, 69, 63, 56, 61, 52, 63, 66, 92, 60, 71, 66, 61, 59, 80, 51, 72, 85, 50, 81, 54, 65, 81, 87, 76, 75, 56, 72, 65, 51, 59, 72, 71, 63, 63, 99, 61, 86, 59, 96, 78, 43, 44, 54, 61, 78, 62, 44, 57, 49, 94, 73, 66, 48, 79, 56, 80, 66, 61, 42, 30, 60, 60, 63, 72, 70, 68, 84, 65, 46, 64, 49, 64, 77, 80, 74, 53, 87, 74, 73, 91, 48, 65, 66, 90, 62, 53, 64, 79, 71, 62, 69, 74, 69, 77, 71, 58, 41, 67, 60, 56, 133, 56, 46, 50, 70, 73, 56, 56, 131, 77, 52, 67, 72, 66, 63, 66, 58, 90, 76, 52, 67, 81, 77, 60, 82, 86, 57, 70, 57, 113, 90, 62, 72, 43, 51, 67, 88, 70, 82, 63, 63, 54, 53, 54, 46, 134, 82, 45, 74, 79, 49, 74, 57, 67, 81, 68, 73, 74, 63, 52, 79, 68, 64, 51, 55, 67, 84, 62, 49, 71, 68, 96, 59, 78, 52, 95, 55, 52, 82, 56, 48, 77, 42, 86, 60, 83, 60, 55, 47, 80, 65, 142, 73, 44, 74, 56, 60, 61, 58, 80, 79, 61, 47, 77, 93, 64, 61, 52, 50, 58, 105, 75, 69, 102, 54, 61, 65, 54, 58, 60, 57, 52, 59, 52, 61, 88, 90, 52, 95, 59, 70, 70, 49, 47, 72, 59, 76, 84, 49, 53, 70, 87, 48, 63, 67, 63, 68, 72, 90, 63, 71, 68, 47, 54, 72, 61, 86, 61, 70, 65, 75, 87, 59, 97, 63, 76, 88, 55, 58, 113, 54, 71, 115, 61, 70, 80, 63, 56, 55, 63, 81, 56, 69, 64, 65, 58, 73, 89, 59, 58, 51, 72, 49, 54, 66, 60, 56, 81, 71, 65, 60, 51, 48, 50, 45, 50, 87, 58, 72, 52, 69, 82, 67, 59, 78, 72, 62, 50, 80, 62, 45, 51, 57, 74, 44, 74, 63, 49, 69, 80, 73, 69, 65, 65, 76, 84, 61, 65, 61, 87, 45, 54, 67, 85, 70, 65, 79, 81, 62, 64, 61, 74, 56, 72, 56, 66, 67, 53, 58, 78, 58, 90, 58, 55, 44, 73, 42, 84, 57, 58, 58, 85, 74, 46, 66, 72, 63, 60, 78, 53, 71, 58, 66, 61, 68, 57, 53, 51, 56, 60, 57, 65, 54, 76, 83, 37, 64, 52, 54, 47, 74, 72, 54, 54, 67, 71, 75, 84, 59, 60, 98, 70, 63, 60, 72, 64, 77, 68, 66, 53, 60, 73, 63, 45, 63, 61, 74, 98, 75, 79, 61, 62, 70, 59, 78, 68, 53, 63, 60, 66, 68, 58, 54, 87, 59, 57, 56, 51, 68, 63, 85, 51, 51, 52, 74, 58, 76, 66, 89, 65, 42, 115, 66, 66, 54, 91, 50, 40, 54, 75, 72, 62, 72, 58, 58, 75, 86, 74, 78, 95, 95, 54, 56, 54, 73, 91, 70, 84, 59, 64, 63, 50, 73, 62, 72, 63, 68, 63, 73, 67, 75, 81, 73, 40, 70, 72, 74, 72, 65, 98, 54, 57, 50, 74, 74, 67, 57, 54, 94, 66, 75, 71, 71, 62, 49, 66, 66, 62, 87, 44, 72, 82, 46, 81, 62, 68, 57, 71, 63, 49, 52, 72, 56, 49, 49, 61, 68, 56, 53, 67, 45, 60, 37, 65, 52, 62, 49, 62, 100, 81, 72, 71, 87, 62, 68, 81, 77, 77, 61, 77, 58, 80, 81, 66, 71, 66, 66, 71, 52, 70, 64, 63, 45, 94, 88, 61, 58, 42, 60, 48, 58, 110, 67, 71, 67, 61, 56, 96, 92, 57, 92, 64, 49, 78, 71, 80, 73, 79, 60, 50, 78, 58, 59, 56, 75, 94, 71, 65, 97, 68, 67, 47, 56, 60, 106, 116, 56, 66, 60, 63, 58, 64, 71, 56, 44, 61, 55, 83, 69, 60, 55, 81, 95, 66, 81, 55, 69, 74, 57, 97, 59, 63, 72, 70, 62, 50, 48, 57, 63, 96, 53, 63, 55, 51, 84, 36, 59, 55, 51, 59, 70, 65, 57, 65, 33, 66, 56, 61, 121, 132, 63, 52, 50, 70, 63, 58, 52, 46, 48, 73, 46, 105, 87, 96, 64, 80, 58, 64, 47, 63, 63, 88, 74, 53, 62, 69, 60, 79, 60, 53, 68, 59, 75, 49, 47, 59, 77, 42, 80, 85, 69, 52, 55, 107, 45, 75, 68, 55, 79, 61, 54, 58, 71, 59, 127, 69, 58, 81, 73, 69, 74, 66, 73, 60, 55, 50, 64, 53, 44, 71, 50, 59, 80, 88, 91, 60, 109, 54, 61, 56, 60, 79, 73, 39, 66, 74, 65, 56, 59, 54, 71, 60, 51, 61, 93, 51, 61, 52, 66, 84, 63, 65, 85, 54, 55, 58, 112, 63, 59, 59, 61, 61, 65, 53, 56, 69, 48, 49, 47, 67, 63, 45, 56, 74, 54, 80, 54, 73, 66, 47, 67, 61, 81, 60, 65, 78, 54, 59, 64, 57, 74, 84, 71, 42, 69, 52, 45, 45, 71, 60, 65, 68, 82, 71, 76, 60, 97, 67, 60, 82, 58, 64, 73, 58, 84, 59, 73, 61, 70, 66, 50, 80, 80, 92, 67, 127, 50, 59, 74, 67, 64, 74, 69, 70, 67, 73, 65, 54, 82, 64, 61, 67, 91, 54, 55, 67, 42, 68, 55, 58, 48, 53, 60, 68, 93, 58, 112, 53, 65, 85, 49, 107, 61, 77, 56, 24, 96, 49, 68, 80, 52, 119, 50, 58, 79, 56, 70, 89, 97, 68, 59, 46, 47, 106, 65, 88, 88, 36, 62, 71, 64, 58, 64, 64, 61, 63, 56, 82, 60, 81, 67, 80, 84, 82, 60, 55, 75, 69, 68, 50, 49, 56, 60, 90, 59, 129, 62, 62, 50, 117, 52, 51, 76, 67, 54, 68, 73, 154, 88, 54, 54, 72, 76, 70, 58, 65, 66, 60, 70, 51, 79, 76, 73, 78, 76, 74, 123, 54, 53, 64, 66, 56, 65, 54, 52, 56, 52, 56, 62, 72, 57, 59, 48, 89, 68, 124, 59, 65, 106, 89, 68, 58, 71, 72, 85, 77, 58, 66, 63, 64, 55, 52, 59, 72, 54, 43, 56, 52, 53, 52, 80, 77, 44, 50, 66, 63, 66, 69, 108, 81, 59, 57, 69, 77, 64, 64, 69, 64, 53, 60, 85, 61, 58, 49, 49, 80, 82, 97, 75, 55, 63, 51, 80, 81, 102, 79, 49, 52, 84, 66, 52, 79, 81, 84, 57, 60, 69, 53, 63, 65, 96, 66, 60, 59, 56, 51, 46, 52, 61, 64, 61, 55, 60, 96, 72, 68, 58, 61, 77, 59, 60, 80, 49, 67, 62, 63, 46, 78, 57, 65, 65, 53, 75, 78, 45, 52, 39, 60, 40, 58, 72, 67, 67, 65, 76, 83, 84, 41, 69, 76, 76, 95, 62, 64, 47, 68, 63, 61, 57, 52, 67, 59, 63, 66, 58, 112, 51, 47, 64, 52, 77, 61, 59, 55, 101, 90, 62, 70, 63, 64, 49, 49, 83, 61, 77, 54, 65, 61, 75, 71, 55, 81, 97, 84, 65, 60, 68, 74, 76, 74, 77, 68, 63, 54, 86, 63, 58, 65, 58, 60, 45, 58, 83, 58, 68, 74, 67, 73, 53, 73, 54, 71, 66, 75, 50, 78, 74, 89, 74, 76, 56, 59, 52, 68, 79, 61, 54, 81, 53, 95, 61, 56, 57, 65, 58, 84, 48, 80, 65, 41, 65, 66, 46, 51, 61, 61, 58, 63, 84, 64, 101, 57, 71, 37, 78, 77, 88, 73, 52, 59, 46, 65, 43, 65, 57, 47, 63, 63, 69, 51, 78, 66, 53, 54, 71, 65, 75, 80, 58, 51, 78, 63, 63, 82, 106, 55, 74, 52, 71, 67, 58, 67, 58, 72, 67, 48, 66, 59, 67, 47, 50, 69, 107, 52, 53, 79, 65, 85, 65, 68, 120, 62, 59, 74, 61, 75, 67, 64, 89, 56, 62, 54, 66, 56, 57, 48, 67, 77, 82, 73, 62, 49, 58, 93, 71, 81, 65, 56, 69, 58, 72, 55, 81, 76, 70, 60, 58, 56, 104, 56, 89, 38, 109, 57, 76, 65, 52, 63, 97, 61, 50, 44, 77, 65, 70, 68, 90, 82, 47, 99, 54, 61, 56, 74, 62, 98, 72, 61, 86, 126, 69, 62, 52, 59, 49, 64, 84, 63, 72, 49, 77, 98, 71, 85, 51, 71, 48, 86, 62, 53, 44, 67, 66, 62, 89, 62, 85, 48, 64, 67, 39, 58, 49, 54, 60, 76, 63, 81, 71, 60, 67, 71, 47, 56, 65, 74, 61, 105, 68, 62, 98, 64, 56, 78, 71, 69, 92, 80, 77, 54, 76, 70, 47, 66, 81, 80, 66, 55, 68, 51, 84, 54, 72, 72, 53, 70, 49, 78, 67, 62, 65, 71, 60, 47, 78, 52, 50, 54, 57, 71, 69, 85, 53, 46, 72, 60, 63, 75, 70, 55, 69, 76, 65, 56, 45, 65, 46, 44, 70, 54, 70, 90, 52, 55, 117, 56, 60, 52, 51, 48, 51, 66, 60, 57, 60, 76, 48, 61, 58, 60, 55, 62, 65, 59, 61, 53, 88, 79, 61, 63, 51, 56, 62, 71, 67, 68, 61, 63, 59, 48, 63, 54, 60, 52, 61, 48, 59, 35, 65, 46, 61, 50, 56, 82, 89, 79, 68, 56, 66, 81, 62, 62, 62, 63, 76, 63, 53, 55, 81, 55, 65, 59, 67, 69, 60, 93, 61, 74, 86, 83, 70, 59, 70, 58, 52, 67, 65, 64, 85, 56, 64, 56, 45, 50, 76, 80, 51, 72, 42, 79, 64, 54, 54, 65, 51, 64, 81, 119, 63, 77, 60, 53, 50, 61, 76, 78, 80, 55, 58, 67, 72, 62, 67, 66, 56, 46, 54, 72, 56, 104, 48, 47, 51, 64, 58, 62, 54, 44, 84, 50, 68, 65, 61, 64, 79, 65, 71, 66, 76, 55, 69, 60, 78, 53, 55, 63, 86, 61, 58, 67, 50, 52, 60, 57, 51, 72, 58, 108, 99, 56, 63, 79, 49, 80, 95, 51, 62, 69, 113, 67, 55, 62, 67, 49, 53, 57, 67, 79, 79, 70, 97, 70, 57, 62, 67, 68, 97, 66, 81, 77, 52, 55, 55, 52, 67, 91, 66, 62, 60, 66, 58, 57, 98, 69, 56, 72, 81, 74, 80, 67, 78, 37, 67, 55, 71, 57, 63, 55, 58, 65, 53, 67, 50, 62, 55, 77, 71, 79, 56, 49, 67, 53, 71, 58, 58, 60, 59, 63, 72, 80, 59, 59, 49, 95, 49, 86, 85, 57, 62, 65, 65, 60, 69, 61, 87, 76, 75, 76, 63, 56, 61, 58, 64, 50, 53, 69, 79, 58, 68, 63, 61, 87, 61, 64, 64, 73, 63, 59, 52, 70, 83, 60, 71, 58, 51, 110, 71, 66, 87, 74, 55, 56, 71, 57, 63, 65, 59, 45, 58, 53, 64, 61, 53, 64, 70, 78, 85, 51, 76, 58, 73, 60, 81, 117, 59, 50, 49, 67, 61, 60, 63, 71, 88, 63, 68, 74, 116, 55, 62, 62, 76, 61, 68, 59, 52, 60, 76, 58, 77, 49, 74, 71, 78, 70, 69, 60, 50, 96, 62, 48, 69, 58, 78, 61, 63, 60, 58, 76, 67, 75, 93, 62, 92, 84, 82, 89, 49, 62, 53, 62, 69, 81, 43, 64, 71, 58, 77, 61, 65, 59, 52, 55, 65, 68, 65, 66, 105, 58, 92, 44, 117, 74, 48, 57, 75, 64, 44, 74, 55, 73, 65, 74, 77, 53, 64, 73, 58, 47, 63, 81, 66, 57, 59, 54, 61, 59, 72, 52, 72, 55, 95, 74, 50, 59, 70, 53, 59, 54, 76, 53, 66, 65, 68, 84, 56, 52, 65, 80, 56, 48, 64, 46, 61, 65, 61, 66, 62, 67, 57, 61, 51, 81, 61, 76, 47, 49, 59, 67, 71, 66, 62, 54, 66, 52, 65, 65, 60, 79, 78, 67, 72, 78, 59, 59, 62, 64, 89, 60, 62, 65, 64, 55, 66, 61, 55, 45, 43, 62, 76, 85, 72, 55, 72, 86, 49, 76, 69, 74, 76, 75, 54, 99, 54, 80, 82, 57, 64, 82, 76, 68, 71, 72, 77, 58, 80, 65, 72, 62, 72, 71, 53, 85, 54, 72, 63, 69, 63, 66, 62, 66, 59, 64, 91, 59, 69, 66, 57, 83, 67, 66, 65, 48, 70, 65, 57, 59, 56, 58, 69, 55, 61, 63, 64, 60, 59, 74, 54, 47, 72, 112, 45, 65, 54, 86, 61, 83, 114, 98, 68, 58, 44, 59, 67, 54, 64, 54, 69, 74, 65, 72, 70, 59, 139, 55, 69, 103, 73, 85, 64, 60, 63, 83, 93, 50, 91, 86, 62, 64, 64, 101, 59, 71, 46, 64, 64, 57, 36, 80, 71, 46, 70, 45, 59, 69, 80, 48, 81, 52, 84, 58, 68, 59, 57, 57, 66, 50, 72, 51, 76, 53, 44, 47, 91, 60, 61, 63, 72, 58, 56, 71, 51, 50, 66, 58, 59, 52, 62, 78, 57, 46, 91, 81, 86, 56, 52, 60, 63, 58, 75, 76, 76, 89, 62, 63, 50, 53, 62, 70, 80, 72, 65, 82, 72, 115, 65, 62, 65, 47, 65, 52, 73, 66, 65, 74, 50, 67, 72, 60, 84, 53, 63, 68, 73, 86, 50, 81, 77, 52, 48, 49, 61, 61, 51, 62, 55, 90, 49, 62, 68, 68, 70, 51, 59, 42, 67, 65, 48, 57, 52, 72, 95, 63, 66, 64, 61, 88, 76, 60, 61, 70, 61, 59, 61, 67, 71, 56, 57, 64, 55, 45, 113, 71, 60, 68, 85, 61, 55, 90, 67, 73, 43, 60, 79, 87, 64, 68, 56, 61, 63, 75, 60, 62, 49, 62, 44, 67, 51, 100, 87, 78, 65, 88, 68, 58, 63, 58, 63, 59, 81, 74, 59, 66, 58, 58, 62, 79, 73, 70, 57, 83, 89, 68, 58, 59, 52, 71, 80, 48, 64, 56, 69, 83, 63, 63, 48, 46, 61, 43, 61, 65, 57, 53, 75, 59, 57, 70, 56, 83, 46, 50, 69, 43, 50, 68, 54, 52, 61, 93, 64, 127, 51, 78, 53, 59, 68, 68, 82, 69, 77, 65, 49, 74, 64, 112, 66, 52, 83, 67, 63, 54, 58, 66, 83, 71, 92, 56, 69, 58, 62, 65, 61, 65, 68, 63, 56, 74, 72, 92, 73, 120, 57, 67, 72, 56, 52, 55, 51, 86, 94, 72, 54, 60, 54, 58, 53, 59, 64, 66, 63, 64, 55, 65, 72, 62, 60, 68, 82, 75, 58, 59, 51, 77, 69, 58, 70, 64, 61, 68, 66, 70, 64, 71, 56, 62, 63, 58, 53, 60, 60, 105, 55, 78, 61, 46, 59, 62, 69, 62, 64, 57, 61, 66, 53, 58, 65, 76, 51, 60, 78, 71, 69, 78, 76, 58, 61, 56, 78, 43, 91, 69, 66, 56, 77, 88, 71, 53, 60, 65, 55, 68, 63, 58, 61, 45, 108, 57, 64, 75, 59, 57, 54, 65, 93, 95, 53, 76, 63, 53, 65, 73, 61, 61, 75, 58, 112, 53, 53, 55, 62, 62, 68, 63, 54, 67, 60, 44, 60, 64, 91, 58, 62, 72, 68, 50, 76, 64, 94, 131, 94, 63, 66, 66, 74, 50, 63, 60, 90, 73, 45, 62, 53, 64, 76, 48, 75, 74, 76, 65, 68, 82, 57, 52, 71, 88, 82, 90, 73, 95, 70, 56, 46, 71, 49, 66, 77, 79, 45, 72, 67, 73, 60, 44, 74, 56, 71, 65, 60, 67, 84, 57, 54, 56, 52, 57, 61, 88, 54, 58, 54, 78, 60, 103, 60, 64, 85, 59, 55, 59, 80, 66, 58, 50, 66, 71, 60, 68, 62, 57, 75, 73, 70, 52, 86, 57, 119, 74, 91, 69, 85, 147, 58, 77, 56, 71, 54, 56, 67, 61, 74, 87, 89, 50, 64, 50, 58, 52, 51, 68, 82, 42, 63, 53, 62, 64, 49, 67, 48, 67, 51, 68, 101, 101, 60, 59, 55, 72, 63, 77, 80, 78, 56, 67, 68, 57, 88, 48, 122, 100, 55, 69, 57, 67, 70, 74, 68, 57, 57, 105, 96, 51, 50, 67, 56, 91, 80, 116, 59, 55, 65, 67, 57, 77, 111, 68, 45, 58, 66, 65, 48, 65, 109, 42, 76, 67, 73, 67, 67, 55, 53, 52, 58, 43, 39, 62, 63, 52, 40, 50, 54, 66, 77, 60, 64, 51, 107, 74, 71, 54, 91, 70, 65, 68, 53, 58, 70, 94, 70, 63, 70, 87, 69, 114, 73, 54, 67, 92, 53, 63, 65, 109, 68, 68, 48, 67, 58, 53, 100, 45, 56, 83, 53, 57, 84, 55, 50, 47, 72, 51, 57, 56, 83, 60, 41, 36, 70, 83, 46, 53, 63, 56, 58, 80, 69, 54, 45, 62, 70, 69, 65, 79, 53, 82, 59, 89, 72, 60, 70, 71, 79, 64, 69, 47, 80, 49, 58, 72, 84, 50, 68, 68, 67, 74, 60, 52, 70, 45, 76, 53, 50, 78, 95, 63, 77, 56, 88, 76, 70, 79, 71, 59, 60, 53, 82, 63, 64, 53, 70, 61, 76, 72, 65, 81, 82, 60, 45, 75, 85, 66, 47, 63, 51, 63, 66, 56, 44, 52, 62, 57, 52, 58, 52, 56, 88, 40, 62, 62, 57, 69, 73, 60, 55, 63, 82, 26, 64, 56, 48, 85, 74, 81, 90, 67, 86, 67, 82, 76, 75, 55, 75, 59, 51, 73, 62, 60, 66, 44, 42, 55, 71, 66, 66, 48, 68, 68, 59, 78, 64, 59, 59, 61, 55, 64, 79, 83, 69, 52, 97, 66, 83, 58, 104, 70, 66, 78, 63, 44, 78, 74, 67, 65, 50, 68, 60, 90, 57, 63, 68, 69, 161, 64, 75, 73, 72, 77, 80, 62, 60, 56, 60, 71, 74, 57, 64, 61, 71, 92, 44, 82, 65, 56, 55, 66, 53, 89, 47, 65, 61, 60, 68, 64, 79, 64, 70, 90, 51, 67, 48, 67, 82, 62, 62, 54, 58, 56, 68, 79, 49, 67, 63, 83, 57, 51, 70, 80, 49, 57, 46, 61, 60, 75, 57, 84, 68, 50, 69, 65, 79, 71, 63, 81, 61, 66, 57, 78, 69, 78, 51, 84, 61, 89, 47, 55, 83, 67, 80, 82, 78, 66, 66, 44, 72, 47, 64, 38, 58, 69, 61, 62, 54, 40, 43, 72, 78, 47, 68, 53, 89, 50, 74, 55, 56, 70, 54, 67, 49, 85, 41, 73, 57, 44, 64, 91, 76, 53, 68, 96, 77, 57, 65, 66, 79, 82, 70, 89, 48, 59, 49, 73, 71, 61, 79, 74, 46, 50, 83, 53, 57, 65, 49, 52, 48, 73, 48, 53, 80, 63, 77, 58, 78, 99, 90, 77, 63, 62, 49, 70, 66, 93, 48, 74, 68, 41, 61, 85, 49, 45, 78, 48, 69, 63, 74, 72, 72, 73, 59, 41, 73, 48, 64, 75, 72, 43, 88, 60, 46, 71, 77, 105, 63, 47, 71, 68, 65, 73, 56, 66, 41, 99, 47, 39, 46, 76, 54, 56, 42, 87, 52, 43, 58, 55, 97, 71, 56, 55, 50, 67, 53, 56, 66, 60, 70, 84, 60, 46, 61, 49, 65, 59, 56, 72, 97, 55, 81, 72, 63, 61, 71, 71, 47, 63, 43, 62, 60, 50, 83, 61, 84, 57, 84, 46, 57, 67, 61, 63, 60, 67, 64, 68, 47, 83, 87, 59, 67, 61, 42, 69, 47, 70, 88, 48, 65, 59, 136, 90, 76, 59, 57, 75, 53, 67, 79, 62, 53, 53, 48, 77, 49, 73, 47, 81, 69, 70, 79, 62, 52, 56, 67, 60, 67, 50, 48, 64, 38, 57, 68, 64, 53, 52, 80, 52, 97, 58, 47, 64, 56, 94, 76, 80, 72, 94, 48, 43, 65, 68, 69, 63, 53, 70, 66, 114, 53, 72, 49, 87, 85, 86, 49, 104, 67, 41, 84, 62, 71, 82, 68, 99, 58, 91, 65, 62, 70, 56, 66, 66, 67, 36, 78, 66, 50, 69, 66, 61, 146, 59, 53, 60, 73, 52, 77, 81, 68, 78, 76, 73, 71, 58, 70, 63, 70, 85, 58, 62, 79, 65, 68, 70, 56, 77, 84, 65, 66, 61, 68, 60, 77, 72, 63, 56, 61, 65, 70, 61, 41, 59, 74, 57, 69, 77, 65, 53, 57, 62, 59, 54, 87, 82, 67, 45, 68, 64, 60, 82, 89, 64, 47, 50, 64, 60, 61, 47, 50, 69, 65, 57, 64, 56, 73, 57, 70, 86, 56, 75, 85, 96, 84, 68, 49, 47, 82, 97, 126, 60, 74, 80, 59, 85, 47, 75, 56, 57, 66, 70, 57, 43, 57, 72, 86, 62, 58, 75, 50, 56, 49, 70, 62, 73, 66, 77, 64, 116, 66, 75, 58, 64, 57, 74, 80, 71, 72, 89, 58, 77, 67, 60, 55, 91, 90, 64, 69, 67, 68, 59, 66, 81, 64, 63, 75, 61, 86, 80, 69, 85, 61, 84, 52, 60, 57, 77, 68, 82, 77, 67, 85, 62, 46, 69, 65, 59, 90, 68, 59, 75, 55, 74, 58, 50, 32, 104, 69, 49, 73, 99, 67, 61, 65, 77, 71, 61, 89, 69, 76, 70, 55, 71, 55, 63, 68, 81, 52, 71, 100, 83, 56, 96, 57, 51, 45, 75, 60, 52, 77, 58, 69, 52, 74, 62, 74, 87, 60, 80, 62, 79, 61, 55, 78, 46, 52, 61, 44, 54, 94, 61, 47, 95, 62, 60, 63, 64, 69, 67, 78, 57, 59, 66, 76, 61, 57, 70, 66, 65, 73, 87, 35, 47, 79, 55, 58, 46, 79, 56, 48, 67, 38, 64, 78, 100, 67, 65, 79, 71, 59, 55, 35, 74, 52, 53, 61, 65, 67, 45, 60, 129, 61, 87, 80, 42, 97, 62, 67, 101, 74, 50, 60, 68, 61, 50, 81, 42, 79, 59, 67, 82, 44, 65, 54, 65, 78, 90, 58, 71, 49, 49, 82, 63, 59, 62, 53, 61, 48, 61, 69, 104, 59, 68, 69, 58, 118, 51, 58, 79, 39, 80, 69, 58, 60, 63, 58, 67, 70, 65, 71, 47, 69, 71, 67, 70, 52, 73, 50, 60, 75, 71, 48, 53, 70, 81, 107, 80, 67, 67, 68, 54, 79, 41, 64, 73, 60, 88, 74, 49, 71, 69, 105, 47, 71, 99, 57, 109, 41, 65, 81, 66, 78, 72, 81, 56, 57, 61, 62, 60, 63, 60, 55, 75, 57, 54, 55, 83, 67, 67, 69, 71, 61, 44, 61, 74, 57, 77, 67, 43, 80, 82, 53, 82, 53, 74, 69, 72, 74, 63, 89, 54, 57, 82, 101, 60, 63, 55, 61, 65, 75, 61, 78, 52, 60, 63, 74, 78, 60, 80, 72, 75, 57, 60, 50, 62, 90, 50, 73, 54, 54, 88, 57, 75, 66, 60, 98, 42, 63, 64, 49, 70, 70, 67, 59, 82, 56, 69, 60, 76, 68, 73, 101, 58, 64, 46, 87, 48, 79, 38, 52, 79, 64, 85, 115, 53, 57, 50, 77, 61, 60, 50, 61, 59, 64, 61, 67, 72, 55, 108, 75, 73, 119, 67, 74, 80, 71, 65, 66, 48, 78, 58, 85, 70, 61, 67, 80, 69, 53, 60, 55, 85, 55, 66, 63, 55, 65, 76, 67, 52, 64, 68, 69, 57, 59, 59, 55, 50, 67, 58, 69, 77, 53, 63, 83, 59, 72, 77, 63, 62, 48, 64, 70, 52, 84, 49, 57, 59, 57, 51, 49, 59, 116, 61, 30, 64, 70, 75, 76, 76, 68, 53, 50, 49, 58, 57, 66, 55, 92, 119, 51, 60, 49, 131, 89, 46, 76, 40, 81, 49, 48, 78, 69, 64, 61, 58, 35, 48, 69, 54, 108, 67, 67, 70, 77, 50, 105, 76, 45, 69, 61, 60, 104, 53, 48, 75, 75, 56, 66, 88, 48, 63, 73, 60, 53, 46, 57, 98, 59, 76, 69, 71, 56, 72, 65, 71, 56, 51, 58, 86, 48, 95, 76, 74, 74, 57, 63, 77, 88, 68, 64, 60, 57, 58, 62, 74, 72, 50, 60, 72, 65, 87, 79, 59, 47, 100, 89, 54, 69, 63, 79, 65, 71, 86, 39, 42, 73, 67, 59, 51, 65, 88, 54, 49, 62, 60, 44, 85, 54, 61, 72, 52, 56, 78, 70, 67, 85, 39, 57, 69, 57, 44, 89, 64, 48, 41, 67, 48, 75, 67, 60, 56, 73, 69, 50, 51, 75, 78, 97, 56, 59, 42, 60, 44, 71, 53, 63, 68, 97, 86, 67, 48, 64, 48, 78, 121, 64, 57, 83, 87, 65, 58, 78, 56, 69, 75, 63, 79, 61, 58, 72, 56, 79, 70, 67, 57, 57, 62, 64, 48, 64, 73, 69, 53, 80, 59, 73, 60, 59, 60, 38, 64, 69, 74, 59, 61, 60, 74, 63, 62, 55, 72, 55, 86, 65, 53, 81, 67, 59, 65, 71, 60, 61, 50, 59, 70, 86, 67, 67, 77, 78, 52, 69, 84, 57, 71, 88, 123, 32, 45, 68, 76, 70, 62, 70, 56, 73, 67, 90, 101, 46, 50, 55, 46, 62, 74, 64, 59, 62, 41, 72, 62, 64, 72, 78, 68, 71, 67, 41, 69, 51, 72, 59, 61, 87, 59, 55, 66, 88, 54, 59, 48, 67, 62, 64, 68, 73, 41, 104, 64, 60, 139, 86, 67, 76, 62, 73, 83, 53, 66, 120, 60, 53, 42, 64, 57, 49, 72, 49, 63, 57, 60, 52, 53, 60, 70, 92, 65, 50, 60, 55, 64, 68, 75, 62, 77, 46, 74, 46, 63, 77, 44, 56, 47, 102, 51, 61, 50, 66, 74, 66, 67, 62, 72, 59, 68, 104, 104, 75, 64, 71, 46, 56, 54, 95, 50, 60, 47, 60, 70, 43, 80, 62, 59, 87, 46, 46, 52, 56, 73, 79, 62, 67, 55, 85, 52, 81, 62, 56, 57, 61, 63, 92, 77, 60, 65, 55, 52, 55, 67, 69, 46, 61, 56, 48, 58, 46, 56, 53, 74, 93, 51, 86, 80, 100, 69, 82, 88, 69, 71, 54, 50, 79, 66, 73, 56, 56, 73, 68, 77, 54, 42, 64, 77, 53, 71, 52, 66, 56, 64, 60, 71, 73, 54, 59, 89, 67, 88, 63, 48, 48, 85, 60, 61, 74, 57, 74, 58, 73, 34, 53, 79, 66, 60, 47, 70, 50, 80, 63, 50, 58, 72, 60, 83, 46, 76, 77, 71, 70, 97, 84, 64, 70, 43, 53, 85, 91, 93, 47, 64, 65, 74, 79, 41, 58, 53, 52, 62, 77, 57, 67, 50, 63, 62, 91, 58, 66, 59, 60, 86, 56, 90, 76, 71, 58, 85, 71, 58, 68, 62, 57, 71, 64, 78, 56, 61, 58, 59, 76, 100, 81, 45, 72, 51, 51, 53, 63, 97, 76, 53, 60, 66, 59, 55, 62, 66, 63, 59, 49, 52, 88, 64, 62, 70, 50, 46, 68, 63, 54, 109, 64, 67, 71, 48, 76, 59, 60, 51, 51, 60, 57, 67, 66, 67, 99, 89, 66, 58, 58, 41, 82, 72, 70, 64, 75, 73, 70, 52, 68, 77, 66, 61, 60, 58, 85, 54, 58, 69, 76, 58, 69, 56, 64, 67, 71, 58, 59, 73, 53, 55, 71, 70, 70, 75, 65, 59, 57, 56, 54, 62, 54, 59, 61, 49, 84, 43, 59, 58, 40, 55, 56, 50, 62, 85, 67, 52, 47, 86, 62, 71, 67, 54, 65, 84, 54, 68, 67, 89, 112, 67, 77, 98, 70, 52, 75, 68, 69, 136, 40, 64, 57, 53, 93, 67, 76, 53, 73, 73, 60, 113, 55, 49, 76, 43, 59, 54, 48, 70, 76, 95, 41, 63, 75, 80, 50, 47, 51, 54, 77, 99, 49, 49, 52, 62, 60, 72, 68, 44, 68, 38, 68, 64, 59, 58, 83, 52, 47, 65, 50, 68, 46, 89, 55, 106, 64, 96, 58, 48, 98, 55, 58, 63, 71, 64, 49, 82, 55, 64, 99, 60, 102, 74, 56, 67, 51, 87, 83, 72, 76, 62, 54, 67, 67, 70, 93, 70, 59, 67, 73, 59, 70, 50, 69, 80, 57, 58, 77, 59, 51, 70, 56, 72, 59, 65, 81, 70, 67, 63, 79, 79, 69, 65, 75, 52, 72, 83, 48, 90, 61, 98, 61, 50, 58, 85, 62, 84, 50, 64, 55, 68, 64, 69, 48, 49, 61, 57, 51, 70, 58, 57, 77, 74, 58, 61, 89, 64, 56, 53, 68, 73, 65, 60, 61, 66, 54, 71, 73, 58, 70, 47, 68, 72, 67, 63, 49, 53, 59, 61, 75, 88, 54, 71, 66, 58, 73, 61, 61, 65, 61, 51, 54, 67, 65, 69, 43, 57, 73, 60, 77, 69, 70, 60, 61, 43, 51, 63, 63, 66, 52, 69, 62, 91, 62, 60, 65, 59, 65, 57, 61, 73, 61, 73, 55, 56, 63, 60, 165, 76, 85, 66, 67, 66, 69, 53, 56, 59, 66, 57, 66, 77, 67, 98, 65, 67, 63, 57, 63, 57, 72, 75, 57, 60, 62, 70, 67, 58, 59, 79, 68, 99, 58, 68, 62, 80, 63, 74, 92, 51, 78, 72, 60, 56, 41, 82, 131, 67, 62, 63, 62, 86, 57, 62, 72, 58, 78, 50, 102, 73, 67, 68, 51, 59, 63, 59, 58, 67, 73, 71, 69, 75, 61, 58, 56, 51, 71, 76, 65, 54, 62, 57, 54, 44, 66, 71, 56, 57, 62, 54, 65, 75, 66, 50, 81, 77, 58, 60, 54, 59, 67, 66, 49, 63, 53, 64, 61, 64, 81, 69, 61, 68, 87, 65, 68, 73, 49, 65, 50, 59, 79, 75, 54, 76, 63, 81, 55, 55, 58, 60, 74, 68, 64, 58, 55, 47, 73, 63, 67, 77, 64, 58, 52, 71, 66, 57, 64, 62, 71, 77, 92, 80, 62, 58, 59, 71, 70, 62, 59, 77, 68, 67, 80, 65, 65, 73, 51, 60, 67, 78, 65, 75, 71, 63, 69, 51, 60, 58, 64, 50, 71, 65, 83, 65, 60, 52, 67, 54, 63, 59, 59, 74, 59, 80, 67, 59, 65, 49, 63, 67, 71, 71, 64, 81, 80, 74, 57, 79, 65, 72, 96, 75, 108, 79, 57, 76, 94, 86, 67, 67, 59, 90, 76, 64, 49, 68, 57, 59, 75, 73, 85, 54, 53, 47, 67, 58, 77, 69, 71, 64, 48, 63, 80, 49, 49, 76, 90, 74, 53, 58, 50, 62, 52, 75, 67, 68, 50, 66, 68, 63, 72, 83, 84, 45, 59, 87, 56, 42, 64, 96, 51, 54, 60, 61, 52, 58, 61, 62, 56, 59, 71, 46, 64, 62, 76, 85, 83, 57, 76, 69, 63, 59, 60, 66, 63, 108, 61, 75, 78, 69, 91, 46, 58, 76, 68, 80, 71, 67, 59, 60, 65, 88, 65, 70, 82, 65, 63, 69, 63, 81, 56, 71, 41, 89, 63, 62, 71, 63, 69, 78, 59, 69, 57, 70, 73, 88, 66, 58, 68, 82, 59, 71, 71, 60, 65, 53, 66, 69, 57, 51, 63, 62, 60, 53, 70, 74, 69, 63, 63, 51, 57, 68, 52, 64, 53, 56, 61, 62, 64, 58, 70, 60, 55, 59, 63, 75, 61, 54, 65, 72, 55, 77, 73, 71, 66, 78, 81, 60, 70, 60, 71, 63, 80, 66, 59, 58, 61, 63, 56, 42, 80, 55, 67, 55, 70, 54, 77, 65, 79, 76, 53, 68, 70, 77, 107, 71, 48, 77, 44, 82, 63, 55, 77, 59, 62, 82, 63, 66, 90, 69, 76, 80, 72, 48, 56, 59, 47, 60, 55, 90, 87, 53, 56, 55, 55, 54, 133, 52, 101, 63, 65, 53, 68, 68, 56, 53, 68, 66, 53, 61, 75, 54, 65, 58, 60, 48, 61, 70, 78, 112, 70, 67, 66, 91, 54, 62, 58, 79, 64, 59, 53, 70, 73, 65, 66, 55, 67, 77, 56, 65, 68, 57, 81, 73, 56, 49, 68, 77, 58, 57, 81, 67, 76, 70, 51, 55, 70, 66, 72, 71, 80, 92, 66, 54, 65, 79, 56, 61, 73, 57, 63, 59, 54, 54, 63, 57, 69, 58, 65, 53, 73, 53, 63, 58, 62, 76, 79, 56, 69, 63, 62, 55, 74, 73, 82, 84, 70, 79, 50, 85, 73, 51, 65, 68, 57, 60, 73, 55, 39, 69, 92, 62, 55, 114, 49, 62, 47, 54, 67, 67, 68, 74, 57, 76, 64, 71, 48, 56, 46, 59, 117, 69, 38, 53, 62, 74, 63, 44, 62, 70, 59, 65, 62, 56, 62, 69, 68, 58, 68, 149, 62, 61, 56, 72, 61, 44, 66, 50, 96, 63, 60, 86, 75, 55, 80, 98, 57, 54, 58, 72, 53, 46, 79, 66, 66, 66, 67, 62, 55, 45, 66, 54, 78, 50, 56, 55, 64, 62, 83, 64, 74, 44, 79, 52, 74, 51, 58, 57, 104, 62, 64, 70, 82, 69, 75, 70, 61, 65, 73, 60, 70, 76, 49, 73, 59, 68, 78, 70, 56, 60, 64, 48, 63, 60, 63, 66, 70, 55, 53, 65, 57, 74, 53, 69, 61, 70, 62, 55, 64, 56, 63, 58, 54, 61, 65, 58, 52, 106, 76, 53, 104, 62, 66, 58, 61, 80, 56, 73, 65, 66, 73, 54, 71, 80, 99, 60, 64, 58, 67, 79, 44, 57, 65, 77, 78, 84, 54, 73, 75, 59, 71, 55, 69, 59, 67, 57, 51, 52, 75, 52, 64, 50, 60, 56, 57, 68, 73, 58, 58, 61, 65, 65, 61, 61, 66, 49, 152, 59, 62, 42, 71, 74, 59, 66, 91, 55, 69, 49, 62, 57, 71, 72, 46, 67, 67, 64, 85, 67, 54, 91, 47, 64, 55, 61, 70, 65, 73, 61, 55, 84, 67, 48, 81, 53, 61, 54, 69, 96, 57, 63, 63, 110, 46, 73, 60, 73, 59, 91, 50, 78, 73, 99, 52, 61, 65, 53, 65, 53, 60, 57, 64, 67, 74, 98, 63, 70, 63, 58, 76, 56, 76, 49, 63, 47, 69, 69, 50, 67, 49, 76, 65, 70, 92, 60, 56, 81, 64, 70, 59, 71, 54, 116, 64, 60, 68, 55, 66, 64, 65, 55, 63, 70, 59, 57, 59, 101, 46, 61, 78, 93, 68, 84, 90, 108, 75, 99, 55, 83, 55, 109, 75, 61, 74, 65, 63, 60, 90, 90, 71, 69, 53, 87, 59, 70, 63, 63, 57, 78, 68, 87, 58, 50, 50, 80, 50, 74, 58, 64, 61, 66, 68, 60, 59, 92, 61, 47, 90, 66, 60, 69, 58, 69, 81, 66, 61, 57, 69, 85, 71, 73, 59, 54, 51, 66, 61, 51, 58, 71, 80, 81, 159, 52, 61, 70, 63, 72, 56, 61, 72, 103, 52, 52, 61, 51, 70, 67, 115, 85, 62, 58, 63, 55, 61, 71, 61, 73, 45, 73, 97, 92, 59, 50, 77, 67, 62, 56, 94, 57, 66, 99, 57, 58, 71, 72, 85, 83, 88, 58, 63, 85, 48, 63, 62, 74, 52, 84, 81, 51, 71, 52, 67, 65, 59, 83, 75, 81, 74, 40, 83, 54, 77, 66, 67, 57, 49, 67, 49, 68, 61, 59, 66, 64, 54, 47, 58, 64, 81, 63, 56, 61, 58, 62, 66, 52, 49, 68, 74, 70, 77, 65, 55, 74, 78, 76, 62, 56, 50, 68, 79, 60, 78, 70, 66, 66, 50, 67, 80, 62, 55, 80, 85, 50, 71, 157, 54, 83, 70, 68, 41, 144, 81, 88, 46, 81, 78, 51, 86, 59, 74, 87, 70, 55, 79, 85, 61, 77, 61, 67, 64, 65, 71, 110, 62, 51, 69, 45, 73, 66, 90, 53, 64, 69, 54, 52, 62, 59, 59, 70, 60, 51, 60, 61, 57, 66, 74, 72, 57, 66, 57, 76, 67, 55, 62, 73, 56, 75, 49, 68, 66, 76, 67, 79, 70, 63, 90, 70, 50, 74, 77, 80, 48, 72, 61, 62, 60, 71, 61, 62, 68, 66, 63, 59, 62, 69, 57, 60, 63, 62, 59, 62, 59, 58, 116, 80, 55, 58, 52, 67, 68, 79, 61, 55, 84, 52, 75, 81, 69, 60, 71, 66, 63, 83, 54, 68, 74, 76, 75, 69, 59, 72, 48, 72, 62, 56, 73, 55, 65, 54, 69, 64, 73, 102, 48, 51, 70, 52, 68, 61, 86, 45, 78, 68, 76, 63, 73, 81, 65, 60, 50, 45, 61, 52, 67, 67, 71, 47, 64, 76, 52, 69, 71, 52, 66, 72, 55, 62, 77, 47, 59, 63, 55, 108, 60, 64, 63, 58, 58, 79, 67, 54, 37, 65, 59, 54, 81, 66, 53, 62, 54, 52, 56, 76, 67, 46, 73, 59, 56, 63, 69, 66, 56, 58, 61, 66, 62, 87, 80, 71, 72, 62, 57, 55, 76, 89, 64, 43, 79, 41, 66, 56, 57, 57, 57, 53, 67, 52, 56, 69, 73, 62, 60, 53, 68, 65, 61, 49, 48, 59, 65, 67, 63, 72, 47, 65, 49, 59, 69, 58, 67, 61, 79, 62, 77, 61, 66, 59, 65, 64, 58, 66, 68, 74, 60, 73, 49, 62, 65, 56, 58, 56, 65, 66, 56, 78, 69, 62, 66, 47, 73, 62, 54, 53, 60, 63, 47, 77, 56, 63, 58, 69, 67, 63, 57, 56, 69, 79, 48, 58, 52, 54, 73, 85, 83, 65, 50, 63, 64, 71, 97, 71, 45, 76, 70, 74, 59, 56, 65, 72, 76, 74, 87, 56, 77, 69, 65, 56, 56, 74, 60, 71, 63, 56, 47, 73, 60, 54, 53, 53, 55, 70, 93, 62, 63, 55, 88, 54, 65, 57, 60, 74, 105, 58, 55, 66, 42, 55, 65, 71, 56, 72, 120, 78, 61, 56, 72, 79, 52, 68, 66, 58, 55, 67, 55, 53, 90, 56, 70, 64, 49, 71, 52, 53, 63, 60, 50, 72, 55, 59, 47, 92, 146, 70, 60, 61, 58, 51, 43, 90, 58, 75, 67, 59, 71, 78, 52, 86, 65, 92, 66, 60, 67, 95, 65, 62, 66, 111, 82, 61, 59, 55, 55, 62, 62, 56, 50, 92, 100, 76, 63, 79, 85, 60, 76, 58, 90, 52, 52, 60, 63, 61, 57, 77, 52, 71, 49, 62, 50, 66, 61, 61, 74, 49, 35, 49, 45, 70, 56, 66, 61, 87, 57, 69, 91, 81, 78, 68, 60, 55, 85, 96, 74, 55, 68, 62, 67, 77, 67, 63, 85, 82, 52, 61, 79, 60, 74, 58, 63, 67, 59, 47, 74, 48, 59, 97, 59, 59, 77, 61, 68, 48, 67, 58, 49, 60, 57, 53, 67, 70, 50, 68, 56, 62, 67, 78, 76, 63, 79, 68, 44, 73, 62, 49, 60, 100, 58, 66, 56, 63, 54, 51, 62, 65, 65, 61, 69, 61, 62, 74, 63, 64, 54, 64, 61, 66, 52, 54, 60, 73, 81, 62, 66, 63, 61, 91, 66, 48, 69, 71, 65, 84, 59, 77, 92, 53, 62, 48, 68, 78, 69, 69, 68, 83, 62, 74, 79, 57, 50, 89, 58, 53, 49, 69, 45, 72, 68, 66, 59, 60, 46, 86, 63, 46, 72, 68, 66, 80, 55, 68, 79, 89, 87, 60, 55, 55, 58, 67, 63, 65, 63, 48, 52, 70, 59, 65, 56, 60, 53, 67, 76, 74, 60, 58, 51, 71, 65, 86, 47, 56, 67, 60, 63, 86, 80, 63, 56, 63, 76, 55, 70, 77, 72, 77, 64, 61, 57, 62, 54, 57, 58, 81, 51, 57, 44, 81, 76, 159, 68, 96, 51, 69, 59, 52, 65, 59, 74, 53, 67, 81, 60, 60, 79, 62, 73, 69, 44, 72, 62, 81, 57, 59, 80, 79, 59, 83, 60, 55, 46, 55, 65, 60, 46, 98, 61, 61, 52, 64, 87, 64, 72, 88, 57, 72, 75, 74, 69, 58, 79, 65, 63, 38, 65, 63, 52, 54, 59, 70, 77, 116, 55, 46, 62, 43, 55, 54, 75, 83, 67, 62, 52, 69, 62, 75, 70, 101, 62, 53, 58, 64, 65, 47, 53, 65, 66, 47, 67, 44, 56, 56, 73, 78, 74, 70, 61, 46, 54, 70, 47, 62, 81, 61, 71, 66, 51, 49, 65, 98, 73, 71, 74, 84, 36, 113, 62, 101, 63, 60, 66, 89, 76, 58, 54, 61, 65, 92, 54, 55, 70, 65, 58, 75, 69, 62, 76, 68, 57, 56, 62, 96, 68, 71, 53, 65, 61, 52, 86, 58, 67, 53, 67, 72, 66, 53, 64, 60, 81, 88, 49, 81, 52, 58, 72, 64, 65, 61, 61, 60, 83, 77, 69, 93, 59, 65, 55, 67, 58, 104, 63, 70, 59, 54, 52, 109, 67, 58, 77, 61, 51, 98, 57, 63, 65, 39, 80, 44, 74, 67, 89, 76, 73, 60, 67, 51, 62, 68, 63, 60, 113, 51, 75, 86, 56, 63, 67, 63, 66, 64, 87, 47, 66, 74, 50, 70, 65, 57, 86, 71, 52, 68, 55, 59, 59, 60, 75, 54, 105, 74, 64, 71, 68, 62, 55, 78, 63, 64, 68, 71, 73, 66, 75, 54, 52, 67, 62, 69, 89, 69, 77, 55, 55, 91, 63, 58, 53, 58, 61, 80, 68, 85, 72, 52, 59, 47, 73, 80, 66, 47, 57, 72, 63, 109, 75, 75, 66, 71, 64, 65, 66, 57, 54, 56, 79, 56, 69, 63, 61, 57, 87, 74, 89, 56, 78, 64, 55, 50, 70, 65, 106, 70, 60, 67, 64, 60, 92, 66, 70, 77, 48, 68, 50, 69, 63, 51, 47, 75, 97, 49, 54, 69, 52, 84, 61, 65, 53, 80, 65, 83, 82, 66, 49, 121, 63, 93, 61, 112, 51, 89, 52, 50, 57, 53, 69, 57, 101, 58, 61, 88, 54, 76, 57, 60, 148, 67, 62, 64, 54, 55, 72, 71, 67, 58, 53, 50, 70, 66, 53, 42, 59, 54, 51, 43, 37, 82, 63, 72, 51, 66, 66, 57, 63, 95, 72, 70, 61, 44, 71, 76, 56, 112, 73, 60, 59, 74, 58, 78, 110, 70, 86, 65, 70, 73, 55, 62, 45, 53, 69, 65, 73, 65, 38, 56, 90, 68, 81, 61, 88, 65, 63, 52, 55, 54, 44, 65, 61, 63, 75, 54, 57, 76, 65, 76, 104, 57, 54, 63, 78, 77, 73, 58, 68, 60, 62, 70, 66, 52, 50, 61, 74, 63, 75, 74, 69, 72, 59, 80, 90, 115, 74, 82, 54, 73, 56, 72, 56, 47, 57, 69, 60, 58, 83, 55, 74, 73, 63, 60, 77, 65, 63, 53, 62, 97, 57, 61, 65, 65, 78, 38, 71, 70, 53, 52, 61, 68, 81, 70, 69, 56, 54, 50, 65, 53, 55, 52, 72, 65, 59, 57, 66, 70, 82, 57, 58, 57, 40, 61, 68, 66, 72, 47, 70, 57, 61, 76, 52, 71, 65, 69, 55, 54, 65, 55, 74, 73, 69, 66, 52, 55, 61, 65, 72, 54, 76, 59, 45, 49, 108, 59, 72, 58, 56, 65, 56, 90, 99, 53, 67, 57, 60, 69, 57, 60, 77, 57, 27, 87, 69, 53, 45, 62, 53, 55, 45, 57, 58, 69, 60, 60, 93, 65, 80, 54, 79, 59, 68, 51, 44, 67, 78, 87, 81, 63, 70, 71, 58, 59, 70, 75, 67, 49, 69, 57, 95, 75, 45, 63, 66, 60, 90, 52, 62, 66, 57, 45, 63, 78, 66, 57, 55, 47, 80, 63, 70, 55, 47, 56, 66, 95, 74, 48, 62, 59, 62, 55, 100, 54, 63, 111, 67, 63, 60, 70, 61, 86, 71, 49, 49, 99, 63, 55, 134, 72, 50, 71, 62, 77, 72, 69, 76, 60, 55, 75, 63, 74, 58, 73, 52, 92, 86, 84, 98, 49, 49, 70, 80, 76, 50, 66, 63, 82, 82, 66, 60, 73, 63, 57, 62, 70, 51, 76, 53, 52, 64, 51, 75, 53, 70, 66, 109, 56, 68, 45, 56, 79, 62, 60, 65, 68, 58, 50, 58, 57, 57, 70, 58, 58, 66, 50, 69, 59, 81, 57, 88, 48, 50, 67, 70, 65, 79, 81, 107, 51, 90, 64, 68, 77, 58, 46, 68, 42, 100, 67, 69, 54, 46, 61, 56, 53, 66, 61, 57, 46, 59, 79, 85, 53, 71, 88, 63, 62, 76, 91, 56, 60, 55, 41, 69, 67, 74, 65, 68, 49, 50, 62, 57, 46, 64, 73, 68, 83, 74, 92, 62, 72, 60, 63, 95, 60, 51, 59, 68, 45, 62, 60, 77, 69, 56, 57, 70, 35, 76, 85, 58, 71, 48, 78, 62, 60, 62, 65, 62, 83, 112, 97, 62, 57, 70, 73, 56, 81, 82, 94, 57, 60, 66, 72, 59, 58, 70, 64, 63, 60, 56, 44, 65, 63, 69, 51, 75, 55, 68, 67, 59, 41, 88, 51, 62, 54, 36, 64, 64, 75, 67, 80, 64, 50, 50, 99, 90, 79, 61, 55, 44, 66, 47, 56, 61, 72, 62, 49, 76, 88, 100, 73, 69, 61, 62, 63, 66, 49, 59, 57, 94, 151, 91, 37, 78, 58, 59, 83, 87, 61, 49, 75, 55, 63, 89, 60, 66, 55, 88, 56, 71, 89, 75, 64, 52, 66, 61, 95, 65, 67, 73, 122, 75, 107, 66, 47, 54, 92, 61, 89, 65, 80, 55, 52, 57, 89, 55, 59, 59, 95, 61, 57, 66, 77, 108, 53, 52, 62, 62, 44, 69, 73, 68, 67, 65, 61, 79, 55, 79, 69, 50, 76, 64, 63, 53, 72, 83, 64, 60, 90, 81, 64, 81, 97, 66, 71, 60, 58, 64, 42, 83, 57, 76, 64, 54, 67, 54, 39, 97, 50, 104, 67, 48, 54, 56, 63, 86, 65, 74, 54, 59, 64, 78, 59, 56, 73, 58, 65, 57, 56, 55, 58, 57, 61, 47, 62, 44, 63, 58, 114, 93, 50, 80, 64, 82, 58, 53, 64, 60, 57, 50, 56, 83, 54, 55, 74, 69, 92, 35, 57, 82, 49, 70, 73, 76, 46, 56, 55, 55, 60, 68, 69, 59, 65, 49, 58, 65, 59, 116, 63, 63, 60, 72, 66, 78, 65, 90, 67, 65, 52, 63, 75, 83, 71, 57, 45, 74, 55, 51, 53, 81, 65, 66, 67, 59, 45, 63, 70, 78, 70, 70, 59, 94, 82, 75, 50, 71, 64, 47, 50, 47, 68, 64, 62, 75, 78, 55, 73, 78, 64, 49, 40, 82, 72, 71, 79, 64, 62, 52, 49, 82, 55, 110, 67, 101, 69, 68, 73, 66, 67, 88, 92, 127, 54, 60, 49, 73, 73, 71, 70, 85, 77, 84, 51, 104, 78, 79, 78, 54, 57, 69, 53, 64, 78, 61, 87, 61, 70, 43, 55, 44, 74, 58, 63, 51, 71, 62, 61, 57, 59, 48, 75, 60, 71, 80, 94, 53, 49, 54, 65, 90, 59, 62, 77, 55, 59, 49, 59, 86, 52, 76, 66, 80, 68, 53, 82, 56, 56, 111, 58, 59, 77, 65, 65, 65, 51, 84, 55, 46, 76, 66, 47, 67, 90, 63, 59, 76, 95, 55, 44, 43, 83, 88, 62, 49, 68, 62, 54, 69, 53, 76, 79, 62, 57, 62, 62, 70, 61, 79, 71, 43, 65, 62, 50, 57, 68, 68, 73, 83, 63, 50, 55, 82, 84, 59, 87, 103, 71, 72, 54, 52, 70, 64, 80, 62, 64, 56, 60, 102, 50, 67, 48, 117, 62, 87, 62, 67, 60, 68, 63, 57, 60, 65, 76, 52, 79, 79, 70, 67, 102, 70, 59, 52, 70, 73, 43, 52, 63, 60, 73, 75, 62, 81, 75, 48, 84, 82, 85, 64, 99, 72, 78, 76, 61, 66, 65, 91, 71, 79, 46, 65, 85, 35, 86, 69, 75, 89, 63, 41, 68, 63, 87, 72, 65, 67, 52, 73, 48, 51, 36, 51, 82, 61, 78, 64, 74, 60, 69, 65, 70, 60, 88, 61, 82, 64, 66, 71, 75, 44, 63, 68, 63, 47, 50, 70, 87, 59, 60, 96, 118, 68, 64, 78, 57, 58, 43, 58, 37, 67, 59, 75, 65, 60, 32, 74, 57, 60, 58, 51, 45, 67, 95, 55, 59, 36, 48, 50, 61, 58, 86, 51, 62, 58, 42, 67, 75, 62, 54, 60, 52, 71, 60, 74, 70, 65, 50, 113, 96, 80, 63, 69, 69, 72, 54, 45, 60, 50, 60, 76, 62, 63, 57, 61, 61, 71, 64, 47, 60, 73, 43, 52, 72, 68, 82, 67, 76, 78, 77, 59, 44, 56, 86, 55, 48, 61, 67, 75, 73, 66, 55, 95, 55, 84, 100, 71, 62, 58, 58, 76, 70, 56, 84, 70, 54, 64, 89, 67, 56, 61, 71, 90, 61, 56, 65, 60, 67, 79, 69, 74, 54, 65, 60, 28, 56, 46, 47, 65, 62, 52, 43, 79, 45, 69, 49, 74, 90, 85, 61, 59, 107, 87, 66, 67, 61, 71, 80, 51, 84, 78, 76, 63, 70, 72, 59, 47, 44, 73, 56, 65, 67, 60, 89, 81, 56, 103, 49, 74, 65, 81, 57, 105, 57, 80, 57, 102, 71, 60, 68, 67, 91, 65, 57, 59, 90, 63, 58, 57, 58, 70, 53, 55, 52, 51, 58, 79, 86, 30, 60, 56, 78, 82, 46, 61, 80, 83, 68, 58, 68, 62, 91, 55, 60, 41, 64, 65, 52, 61, 72, 62, 78, 59, 60, 68, 68, 64, 90, 80, 65, 77, 76, 62, 87, 50, 66, 67, 61, 49, 76, 84, 51, 47, 58, 68, 103, 81, 68, 56, 74, 68, 70, 70, 65, 91, 53, 69, 77, 59, 61, 52, 85, 71, 59, 54, 84, 79, 70, 67, 69, 50, 56, 78, 54, 60, 49, 68, 63, 53, 71, 73, 52, 64, 86, 42, 58, 78, 64, 56, 53, 87, 79, 51, 61, 78, 70, 60, 61, 48, 72, 57, 89, 77, 64, 60, 81, 61, 61, 49, 68, 79, 69, 74, 57, 79, 65, 44, 56, 46, 75, 79, 75, 51, 71, 62, 67, 49, 49, 76, 68, 81, 49, 59, 62, 93, 66, 48, 46, 58, 76, 72, 62, 67, 70, 62, 62, 74, 93, 59, 90, 55, 69, 56, 52, 78, 55, 59, 64, 87, 77, 65, 82, 60, 70, 68, 70, 58, 57, 83, 53, 80, 54, 77, 41, 73, 31, 85, 64, 41, 55, 72, 54, 111, 63, 77, 79, 54, 111, 58, 71, 60, 58, 115, 54, 60, 66, 76, 70, 35, 66, 76, 70, 81, 65, 62, 50, 67, 98, 59, 51, 45, 74, 60, 66, 71, 63, 60, 50, 53, 60, 74, 83, 51, 69, 61, 64, 47, 71, 58, 59, 74, 53, 73, 70, 77, 54, 91, 70, 52, 92, 62, 69, 48, 59, 66, 50, 60, 43, 51, 68, 88, 56, 71, 74, 54, 69, 86, 63, 79, 76, 62, 70, 97, 57, 68, 66, 72, 71, 103, 62, 61, 45, 53, 55, 63, 64, 86, 79, 60, 49, 108, 40, 58, 69, 75, 80, 57, 49, 68, 46, 67, 45, 82, 74, 79, 67, 70, 69, 57, 61, 65, 51, 35, 67, 50, 66, 65, 80, 71, 58, 46, 91, 75, 53, 86, 69, 119, 68, 67, 67, 61, 89, 57, 56, 38, 59, 71, 51, 92, 100, 66, 86, 59, 111, 83, 62, 56, 73, 60, 47, 59, 60, 35, 68, 62, 36, 79, 59, 99, 64, 70, 65, 72, 69, 77, 51, 68, 61, 44, 52, 77, 81, 78, 92, 60, 41, 58, 48, 65, 76, 57, 56, 95, 69, 114, 75, 74, 56, 69, 68, 70, 83, 88, 74, 66, 57, 72, 41, 76, 72, 65, 58, 43, 63, 78, 61, 97, 46, 49, 74, 87, 72, 62, 74, 86, 47, 64, 72, 50, 74, 50, 58, 43, 53, 54, 62, 41, 69, 73, 55, 70, 63, 66, 37, 72, 52, 71, 54, 77, 68, 67, 77, 73, 72, 91, 70, 72, 57, 52, 74, 73, 73, 67, 68, 57, 79, 75, 63, 51, 67, 54, 73, 64, 84, 58, 100, 93, 63, 58, 56, 104, 62, 52, 58, 63, 109, 77, 49, 49, 170, 89, 62, 71, 65, 57, 55, 68, 94, 64, 77, 61, 49, 70, 66, 57, 110, 51, 69, 68, 64, 76, 79, 50, 60, 49, 68, 71, 77, 92, 65, 64, 78, 50, 53, 74, 39, 86, 68, 62, 57, 78, 68, 63, 64, 83, 60, 63, 61, 40, 51, 73, 98, 156, 62, 58, 74, 85, 88, 42, 64, 47, 53, 62, 83, 57, 46, 63, 73, 65, 64, 66, 73, 48, 62, 65, 73, 69, 57, 46, 77, 59, 83, 63, 65, 39, 69, 69, 65, 72, 74, 79, 90, 56, 49, 67, 61, 61, 55, 38, 75, 64, 76, 71, 58, 62, 79, 28, 71, 52, 49, 49, 65, 52, 103, 86, 85, 73, 70, 62, 65, 59, 65, 96, 71, 50, 66, 84, 57, 93, 122, 103, 76, 89, 65, 57, 84, 93, 53, 64, 51, 69, 50, 74, 98, 64, 51, 44, 74, 61, 80, 67, 51, 56, 104, 72, 53, 74, 66, 40, 48, 55, 47, 79, 57, 98, 57, 80, 67, 69, 71, 43, 74, 73, 98, 47, 75, 56, 44, 63, 72, 99, 75, 78, 78, 59, 52, 54, 74, 46, 61, 69, 60, 55, 66, 56, 66, 82, 70, 84, 91, 52, 73, 77, 52, 39, 88, 65, 60, 41, 65, 49, 55, 59, 76, 74, 76, 73, 75, 82, 72, 64, 61, 59, 72, 77, 60, 62, 73, 54, 57, 79, 71, 69, 28, 78, 78, 58, 80, 62, 65, 66, 73, 73, 67, 71, 67, 78, 74, 86, 58, 60, 72, 80, 72, 73, 58, 55, 71, 59, 59, 50, 66, 66, 127, 42, 66, 71, 66, 55, 71, 76, 62, 63, 67, 108, 54, 53, 76, 81, 63, 52, 51, 68, 76, 77, 85, 68, 70, 58, 60, 59, 78, 56, 65, 67, 71, 82, 103, 90, 72, 50, 55, 58, 90, 60, 53, 63, 51, 61, 73, 62, 66, 49, 74, 72, 66, 53, 67, 60, 74, 66, 53, 55, 69, 89, 91, 59, 77, 78, 54, 55, 51, 88, 53, 54, 63, 94, 68, 64, 71, 74, 66, 60, 87, 66, 64, 61, 57, 67, 66, 66, 83, 50, 63, 71, 58, 59, 78, 65, 105, 57, 62, 72, 86, 77, 54, 96, 56, 62, 63, 69, 51, 94, 70, 59, 78, 78, 67, 52, 59, 73, 69, 63, 55, 70, 72, 56, 53, 78, 56, 64, 60, 66, 66, 100, 52, 51, 49, 54, 53, 67, 71, 66, 49, 70, 71, 74, 64, 78, 44, 71, 73, 60, 79, 87, 63, 63, 99, 59, 62, 46, 69, 74, 60, 62, 49, 61, 47, 83, 74, 60, 69, 75, 72, 70, 40, 80, 66, 64, 63, 53, 71, 72, 71, 85, 94, 62, 64, 80, 63, 55, 47, 59, 61, 57, 72, 65, 57, 61, 44, 54, 61, 81, 58, 57, 93, 68, 76, 55, 69, 52, 80, 61, 43, 79, 52, 55, 81, 62, 51, 49, 63, 60, 60, 77, 70, 65, 50, 61, 46, 71, 72, 63, 35, 56, 49, 65, 78, 51, 68, 67, 68, 74, 74, 64, 57, 74, 57, 68, 80, 57, 72, 66, 47, 55, 87, 46, 45, 66, 51, 65, 110, 65, 70, 55, 49, 73, 62, 81, 48, 53, 46, 82, 76, 81, 39, 62, 52, 63, 50, 60, 57, 60, 46, 63, 51, 56, 66, 78, 55, 51, 70, 46, 54, 69, 49, 69, 74, 59, 95, 38, 84, 74, 50, 63, 74, 102, 60, 55, 70, 88, 71, 58, 67, 59, 55, 83, 72, 66, 62, 65, 51, 68, 50, 75, 54, 61, 66, 53, 55, 58, 55, 73, 53, 50, 60, 96, 72, 56, 42, 83, 65, 59, 67, 54, 53, 73, 70, 74, 96, 62, 87, 76, 78, 76, 72, 76, 66, 56, 89, 58, 85, 61, 69, 67, 55, 57, 78, 71, 87, 86, 84, 69, 98, 54, 71, 66, 55, 49, 88, 60, 62, 61, 64, 79, 31, 59, 66, 72, 95, 74, 68, 60, 73, 72, 63, 47, 77, 92, 79, 61, 78, 54, 63, 64, 60, 48, 74, 63, 57, 65, 65, 69, 54, 72, 53, 61, 79, 61, 60, 67, 61, 69, 56, 63, 59, 56, 70, 75, 57, 43, 69, 77, 62, 75, 59, 73, 62, 86, 55, 39, 81, 57, 84, 63, 73, 104, 59, 79, 65, 57, 56, 51, 78, 53, 83, 48, 57, 56, 45, 68, 86, 47, 60, 71, 64, 60, 66, 53, 44, 63, 66, 87, 55, 97, 52, 52, 68, 40, 50, 63, 72, 63, 69, 54, 71, 64, 60, 62, 63, 86, 55, 54, 60, 58, 56, 41, 55, 69, 62, 48, 50, 56, 71, 72, 71, 66, 53, 66, 58, 76, 53, 59, 67, 97, 86, 137, 76, 90, 62, 70, 55, 71, 53, 60, 85, 127, 72, 60, 85, 53, 71, 65, 83, 77, 68, 67, 60, 49, 58, 67, 72, 76, 45, 62, 78, 63, 71, 57, 81, 44, 66, 63, 72, 68, 85, 56, 67, 41, 65, 72, 49, 56, 93, 56, 45, 135, 76, 44, 141, 75, 47, 56, 68, 60, 55, 54, 56, 54, 69, 83, 49, 63, 50, 68, 69, 58, 74, 52, 65, 83, 82, 68, 63, 71, 73, 88, 76, 57, 65, 66, 69, 62, 98, 60, 76, 76, 59, 67, 53, 60, 64, 75, 61, 67, 56, 70, 72, 69, 56, 58, 50, 64, 53, 47, 44, 46, 60, 82, 72, 51, 91, 41, 79, 60, 50, 64, 61, 59, 109, 73, 65, 74, 62, 56, 64, 95, 74, 61, 89, 58, 59, 60, 60, 73, 78, 64, 50, 55, 65, 56, 72, 71, 48, 89, 57, 89, 83, 94, 49, 75, 62, 49, 68, 73, 61, 68, 63, 56, 70, 75, 65, 66, 67, 59, 107, 59, 54, 59, 65, 65, 66, 65, 42, 74, 72, 74, 87, 52, 70, 61, 62, 67, 64, 52, 66, 57, 54, 77, 74, 70, 61, 61, 88, 51, 63, 71, 57, 71, 53, 64, 63, 71, 71, 71, 61, 71, 54, 70, 39, 65, 49, 119, 46, 70, 58, 47, 64, 61, 73, 76, 71, 63, 53, 82, 59, 75, 71, 64, 53, 100, 66, 75, 90, 67, 68, 78, 51, 83, 71, 59, 78, 51, 64, 51, 62, 52, 87, 90, 66, 90, 71, 61, 88, 55, 67, 63, 72, 59, 72, 54, 51, 72, 72, 72, 68, 72, 60, 64, 76, 67, 65, 75, 80, 53, 49, 56, 55, 49, 55, 65, 68, 61, 70, 53, 87, 70, 91, 62, 142, 57, 69, 58, 110, 51, 74, 66, 60, 76, 61, 61, 58, 61, 68, 73, 30, 144, 97, 78, 71, 86, 62, 63, 50, 75, 68, 56, 53, 84, 83, 74, 61, 42, 74, 68, 126, 31, 64, 72, 79, 78, 62, 68, 60, 52, 51, 68, 61, 58, 67, 79, 74, 46, 97, 57, 65, 56, 49, 55, 42, 41, 69, 42, 56, 58, 47, 58, 63, 69, 69, 50, 70, 98, 76, 62, 50, 56, 53, 60, 53, 94, 50, 57, 64, 53, 59, 87, 62, 70, 71, 55, 51, 47, 91, 75, 80, 48, 55, 47, 61, 81, 81, 56, 61, 84, 73, 56, 68, 70, 56, 58, 87, 59, 43, 42, 61, 68, 91, 62, 55, 59, 63, 79, 103, 56, 77, 55, 45, 71, 56, 80, 71, 69, 66, 62, 59, 92, 78, 74, 85, 63, 98, 48, 52, 82, 55, 59, 55, 68, 41, 57, 60, 58, 61, 61, 44, 57, 70, 59, 69, 85, 81, 74, 59, 75, 56, 76, 107, 62, 59, 63, 61, 83, 71, 73, 78, 54, 56, 69, 82, 75, 66, 71, 65, 94, 57, 82, 60, 63, 69, 71, 71, 63, 70, 57, 70, 74, 84, 56, 40, 57, 62, 85, 72, 75, 66, 63, 46, 52, 58, 84, 74, 75, 60, 66, 65, 65, 55, 49, 62, 77, 45, 74, 93, 58, 51, 57, 74, 73, 59, 95, 60, 55, 63, 71, 60, 64, 57, 52, 51, 68, 63, 67, 73, 54, 69, 65, 88, 98, 49, 55, 78, 41, 77, 73, 54, 51, 72, 49, 60, 63, 79, 57, 69, 70, 94, 62, 68, 46, 62, 60, 57, 71, 55, 54, 56, 56, 56, 71, 60, 65, 65, 68, 44, 70, 59, 69, 53, 57, 113, 81, 107, 66, 72, 77, 57, 82, 71, 63, 41, 65, 94, 63, 67, 46, 58, 33, 56, 59, 57, 54, 67, 78, 54, 73, 70, 60, 70, 73, 46, 63, 90, 56, 64, 59, 56, 63, 72, 78, 64, 59, 58, 62, 50, 76, 69, 69, 65, 69, 74, 86, 76, 66, 56, 51, 73, 76, 67, 76, 52, 57, 65, 68, 80, 52, 36, 108, 65, 68, 87, 57, 64, 59, 56, 67, 85, 51, 65, 85, 53, 46, 80, 52, 60, 61, 57, 64, 132, 44, 69, 66, 66, 60, 52, 55, 62, 71, 57, 64, 64, 63, 57, 97, 60, 78, 51, 62, 64, 66, 88, 72, 74, 79, 60, 68, 62, 62, 61, 63, 56, 66, 88, 82, 71, 81, 61, 61, 59, 53, 51, 40, 66, 56, 57, 59, 77, 48, 53, 53, 67, 90, 70, 54, 148, 60, 58, 57, 77, 74, 97, 65, 57, 53, 91, 73, 41, 49, 71, 45, 58, 75, 74, 103, 49, 56, 76, 71, 78, 65, 68, 69, 47, 73, 72, 69, 56, 98, 63, 65, 63, 57, 72, 62, 60, 47, 81, 66, 42, 47, 69, 85, 48, 76, 57, 56, 50, 53, 59, 47, 71, 50, 79, 61, 77, 51, 73, 66, 72, 73, 64, 51, 57, 77, 75, 49, 55, 62, 47, 56, 65, 56, 64, 35, 43, 95, 59, 67, 90, 56, 64, 64, 47, 57, 92, 63, 70, 59, 81, 78, 63, 66, 57, 61, 47, 70, 69, 80, 50, 62, 69, 48, 68, 52, 61, 108, 87, 91, 63, 61, 65, 79, 53, 51, 100, 85, 51, 72, 44, 60, 67, 66, 73, 54, 68, 51, 83, 74, 69, 60, 70, 60, 56, 63, 64, 60, 80, 48, 67, 76, 85, 58, 73, 66, 58, 63, 70, 81, 70, 50, 49, 59, 65, 71, 74, 82, 51, 68, 57, 59, 79, 63, 52, 56, 45, 75, 65, 70, 59, 65, 73, 62, 74, 60, 52, 49, 39, 74, 44, 81, 63, 113, 57, 54, 67, 65, 59, 75, 93, 83, 85, 73, 71, 55, 60, 60, 61, 56, 54, 74, 61, 68, 56, 57, 61, 68, 48, 68, 48, 60, 63, 60, 73, 53, 52, 42, 54, 64, 81, 57, 60, 55, 54, 72, 82, 67, 72, 53, 68, 70, 55, 61, 63, 68, 49, 98, 76, 60, 60, 82, 71, 48, 59, 57, 37, 57, 141, 65, 52, 68, 55, 53, 68, 83, 85, 53, 57, 53, 82, 67, 84, 66, 59, 65, 74, 58, 73, 90, 68, 72, 65, 60, 44, 65, 92, 54, 64, 68, 57, 63, 78, 100, 59, 61, 48, 70, 97, 71, 39, 63, 57, 93, 76, 53, 60, 77, 60, 68, 70, 86, 65, 51, 90, 62, 53, 55, 67, 63, 49, 71, 51, 64, 59, 70, 47, 82, 70, 74, 42, 52, 62, 40, 41, 64, 63, 76, 58, 65, 91, 55, 51, 68, 56, 68, 76, 63, 83, 96, 134, 49, 56, 61, 62, 64, 60, 58, 70, 64, 80, 58, 62, 75, 61, 68, 78, 58, 69, 47, 50, 49, 46, 75, 69, 62, 60, 71, 76, 62, 69, 81, 101, 38, 89, 81, 66, 75, 75, 57, 73, 104, 77, 70, 89, 79, 54, 69, 67, 90, 50, 51, 51, 69, 53, 64, 71, 53, 85, 59, 61, 51, 63, 62, 60, 73, 72, 62, 56, 74, 49, 56, 86, 58, 106, 59, 56, 58, 69, 77, 89, 64, 45, 49, 57, 94, 59, 53, 63, 88, 69, 63, 59, 62, 55, 56, 51, 54, 76, 73, 74, 42, 65, 70, 55, 65, 52, 86, 68, 59, 72, 58, 56, 89, 64, 49, 54, 72, 56, 70, 58, 54, 60, 62, 64, 74, 64, 65, 121, 73, 77, 57, 65, 62, 87, 69, 112, 67, 80, 76, 77, 61, 54, 43, 54, 89, 64, 63, 74, 48, 68, 73, 64, 73, 72, 60, 56, 68, 57, 79, 52, 62, 60, 52, 61, 71, 59, 74, 95, 68, 64, 61, 100, 59, 95, 59, 113, 61, 95, 59, 59, 63, 45, 59, 57, 65, 44, 38, 53, 69, 59, 50, 54, 51, 71, 64, 55, 56, 69, 48, 59, 57, 80, 68, 95, 98, 104, 76, 53, 65, 69, 51, 72, 54, 101, 67, 73, 65, 77, 67, 56, 53, 90, 56, 66, 59, 56, 65, 55, 64, 52, 48, 75, 90, 75, 74, 59, 60, 51, 60, 76, 64, 69, 67, 57, 81, 76, 90, 66, 86, 51, 61, 83, 76, 65, 69, 118, 83, 62, 55, 83, 47, 28, 69, 67, 74, 86, 77, 155, 55, 57, 57, 64, 64, 67, 76, 61, 77, 64, 59, 58, 56, 65, 72, 78, 70, 52, 66, 59, 48, 50, 68, 60, 58, 66, 57, 103, 48, 72, 61, 60, 71, 75, 66, 48, 61, 60, 47, 71, 61, 56, 69, 53, 55, 74, 55, 70, 55, 51, 60, 84, 50, 63, 52, 91, 53, 43, 47, 57, 54, 54, 72, 64, 81, 90, 66, 55, 63, 76, 67, 52, 56, 62, 41, 55, 98, 41, 61, 54, 54, 71, 69, 66, 54, 72, 65, 69, 73, 53, 56, 89, 44, 80, 64, 66, 72, 77, 72, 72, 56, 96, 63, 69, 134, 75, 65, 65, 55, 67, 64, 60, 46, 57, 90, 51, 58, 104, 59, 51, 63, 51, 69, 67, 57, 64, 79, 44, 63, 48, 83, 47, 70, 82, 66, 52, 71, 47, 86, 97, 58, 34, 76, 52, 68, 66, 47, 57, 62, 56, 82, 80, 65, 53, 79, 70, 45, 52, 67, 71, 59, 56, 70, 43, 80, 59, 87, 58, 112, 99, 51, 91, 63, 58, 56, 61, 84, 121, 53, 89, 64, 66, 65, 102, 94, 72, 57, 57, 66, 84, 67, 70, 73, 83, 49, 62, 65, 52, 77, 68, 48, 66, 58, 52, 52, 60, 89, 54, 46, 100, 101, 45, 57, 61, 59, 76, 65, 60, 59, 65, 62, 61, 43, 86, 76, 43, 58, 69, 62, 84, 63, 53, 57, 64, 75, 66, 36, 82, 62, 92, 55, 43, 66, 57, 59, 78, 92, 67, 78, 65, 82, 53, 62, 77, 63, 62, 71, 72, 48, 58, 68, 52, 87, 83, 78, 75, 69, 52, 58, 75, 66, 64, 56, 143, 65, 72, 71, 50, 85, 90, 60, 61, 62, 70, 81, 53, 64, 61, 65, 78, 106, 76, 79, 85, 82, 44, 64, 70, 81, 50, 59, 90, 66, 66, 72, 75, 69, 49, 102, 45, 61, 79, 88, 79, 93, 67, 61, 61, 64, 55, 64, 58, 49, 53, 56, 68, 64, 73, 82, 94, 71, 43, 72, 61, 72, 88, 76, 71, 49, 92, 65, 69, 53, 57, 78, 97, 61, 71, 65, 65, 55, 47, 60, 54, 55, 43, 65, 98, 68, 53, 67, 58, 55, 64, 104, 62, 69, 91, 84, 74, 52, 43, 54, 67, 60, 61, 77, 49, 59, 72, 83, 59, 69, 66, 59, 57, 66, 68, 75, 82, 78, 38, 77, 58, 56, 47, 56, 127, 60, 68, 59, 57, 69, 56, 92, 70, 96, 44, 34, 56, 63, 86, 54, 57, 84, 67, 73, 68, 56, 52, 66, 86, 68, 94, 69, 52, 65, 40, 59, 72, 60, 51, 51, 68, 49, 45, 71, 96, 76, 68, 75, 60, 55, 75, 58, 53, 69, 89, 40, 68, 50, 86, 63, 68, 57, 60, 76, 79, 90, 60, 70, 44, 43, 81, 57, 55, 94, 56, 48, 50, 55, 85, 91, 52, 58, 51, 79, 73, 67, 62, 48, 66, 48, 65, 55, 59, 65, 69, 68, 89, 59, 63, 75, 66, 72, 70, 65, 83, 80, 53, 46, 47, 49, 80, 68, 79, 73, 69, 56, 25, 99, 75, 57, 111, 61, 72, 61, 63, 62, 42, 81, 79, 55, 73, 47, 63, 46, 74, 57, 63, 76, 76, 79, 101, 66, 70, 86, 64, 48, 66, 44, 81, 68, 62, 75, 61, 77, 52, 44, 87, 42, 52, 77, 72, 44, 53, 43, 57, 89, 61, 50, 70, 66, 75, 56, 50, 85, 77, 66, 56, 39, 72, 142, 65, 109, 108, 55, 85, 77, 56, 84, 75, 89, 61, 72, 52, 75, 66, 44, 71, 60, 67, 54, 65, 66, 66, 48, 94, 39, 107, 65, 60, 57, 70, 50, 53, 60, 56, 61, 57, 53, 77, 58, 60, 38, 62, 69, 65, 71, 68, 82, 71, 60, 76, 65, 61, 56, 76, 54, 67, 67, 56, 51, 59, 67, 52, 55, 66, 46, 65, 70, 136, 115, 66, 71, 42, 79, 70, 62, 54, 77, 61, 87, 40, 63, 56, 55, 74, 58, 60, 52, 70, 93, 79, 65, 61, 71, 69, 108, 72, 59, 51, 51, 64, 50, 52, 73, 39, 71, 60, 65, 101, 96, 39, 54, 71, 77, 65, 74, 73, 50, 59, 57, 49, 46, 80, 83, 45, 62, 44, 90, 65, 55, 59, 73, 57, 48, 81, 80, 40, 57, 60, 60, 43, 51, 66, 71, 55, 50, 68, 76, 64, 42, 66, 42, 80, 82, 39, 58, 51, 67, 90, 69, 67, 62, 35, 54, 69, 72, 67, 62, 76, 71, 66, 58, 73, 54, 71, 69, 69, 68, 62, 72, 69, 75, 74, 58, 64, 57, 122, 76, 83, 71, 53, 84, 66, 62, 57, 71, 53, 64, 86, 77, 102, 87, 82, 67, 89, 56, 52, 74, 68, 66, 90, 100, 68, 57, 67, 63, 66, 90, 56, 59, 80, 49, 85, 63, 59, 72, 46, 65, 65, 71, 78, 95, 63, 66, 93, 79, 64, 76, 85, 89, 75, 50, 69, 55, 57, 45, 66, 72, 82, 56, 69, 109, 81, 72, 58, 83, 63, 65, 54, 49, 63, 77, 71, 62, 59, 51, 67, 73, 60, 92, 63, 95, 48, 69, 64, 80, 73, 53, 40, 57, 49, 65, 69, 56, 68, 65, 72, 52, 72, 71, 62, 78, 62, 60, 117, 75, 62, 48, 66, 54, 69, 65, 51, 75, 54, 81, 75, 81, 49, 60, 82, 48, 45, 71, 59, 71, 72, 101, 57, 75, 57, 73, 34, 62, 52, 53, 56, 71, 63, 49, 47, 79, 66, 61, 74, 56, 69, 68, 58, 69, 62, 67, 105, 57, 44, 65, 63, 67, 63, 53, 56, 53, 72, 60, 72, 65, 86, 112, 66, 91, 75, 72, 69, 68, 62, 96, 93, 53, 58, 62, 38, 66, 53, 108, 75, 94, 54, 41, 55, 61, 61, 60, 71, 71, 81, 60, 63, 69, 76, 53, 56, 79, 72, 79, 63, 52, 52, 48, 59, 47, 44, 101, 58, 73, 63, 59, 75, 91, 88, 94, 66, 75, 63, 62, 50, 56, 45, 83, 46, 52, 88, 87, 71, 67, 64, 54, 55, 77, 100, 97, 70, 78, 83, 77, 93, 80, 51, 50, 80, 88, 50, 56, 60, 57, 51, 77, 53, 64, 79, 72, 68, 51, 53, 43, 48, 55, 62, 73, 61, 63, 95, 63, 87, 60, 63, 48, 53, 64, 56, 77, 67, 58, 57, 55, 52, 93, 64, 42, 73, 59, 70, 95, 95, 53, 71, 35, 60, 40, 85, 69, 69, 81, 51, 103, 78, 79, 57, 59, 54, 48, 49, 75, 63, 52, 61, 65, 83, 39, 57, 57, 62, 49, 87, 45, 57, 84, 67, 60, 71, 92, 66, 70, 63, 63, 68, 46, 84, 70, 79, 66, 62, 79, 61, 67, 52, 58, 65, 63, 43, 68, 54, 69, 78, 61, 62, 55, 113, 83, 55, 69, 57, 71, 70, 96, 50, 105, 90, 54, 77, 51, 41, 107, 66, 75, 59, 57, 85, 57, 62, 66, 55, 82, 97, 50, 77, 57, 69, 56, 73, 64, 66, 53, 62, 85, 79, 41, 62, 71, 59, 81, 79, 51, 66, 95, 68, 67, 85, 139, 37, 64, 65, 78, 70, 68, 49, 72, 66, 61, 70, 42, 48, 72, 68, 59, 71, 58, 108, 56, 71, 56, 76, 72, 65, 70, 65, 40, 70, 88, 47, 47, 88, 53, 76, 62, 77, 67, 73, 55, 57, 101, 67, 54, 68, 58, 53, 52, 73, 86, 72, 48, 48, 55, 64, 65, 94, 96, 69, 60, 65, 62, 60, 54, 53, 43, 46, 71, 67, 51, 64, 74, 61, 70, 81, 95, 56, 49, 72, 60, 48, 60, 52, 97, 47, 75, 57, 58, 75, 77, 63, 54, 62, 60, 52, 52, 57, 73, 62, 46, 77, 60, 64, 71, 80, 67, 51, 64, 53, 61, 68, 48, 57, 56, 55, 70, 67, 61, 69, 65, 66, 83, 101, 44, 61, 48, 64, 61, 63, 67, 61, 68, 55, 53, 64, 80, 44, 74, 56, 63, 45, 52, 123, 40, 58, 70, 54, 74, 71, 58, 72, 82, 49, 56, 79, 73, 81, 62, 68, 109, 79, 112, 54, 66, 62, 61, 55, 53, 51, 60, 52, 67, 58, 75, 51, 58, 50, 72, 105, 53, 60, 45, 63, 74, 57, 55, 92, 49, 65, 69, 56, 62, 82, 69, 92, 79, 91, 66, 61, 62, 64, 62, 142, 60, 48, 92, 61, 64, 67, 58, 38, 64, 68, 66, 55, 40, 56, 68, 49, 44, 48, 73, 50, 65, 83, 54, 49, 43, 145, 47, 69, 67, 60, 105, 52, 72, 52, 62, 57, 52, 63, 59, 117, 82, 63, 58, 55, 61, 65, 52, 44, 43, 82, 51, 54, 76, 65, 56, 56, 79, 78, 78, 70, 79, 61, 48, 45, 74, 77, 48, 104, 54, 56, 54, 53, 77, 76, 71, 83, 85, 83, 57, 51, 81, 81, 51, 50, 65, 71, 43, 87, 84, 55, 76, 61, 66, 50, 83, 59, 91, 61, 55, 54, 80, 57, 59, 53, 74, 106, 89, 72, 83, 55, 57, 55, 136, 62, 88, 56, 50, 74, 64, 97, 66, 62, 67, 64, 60, 41, 57, 66, 63, 56, 70, 63, 60, 64, 57, 52, 67, 48, 54, 62, 82, 41, 95, 78, 84, 76, 67, 91, 55, 76, 75, 66, 51, 53, 67, 84, 57, 57, 62, 53, 62, 89, 47, 57, 93, 58, 73, 54, 67, 59, 67, 71, 113, 74, 89, 69, 48, 59, 98, 71, 76, 60, 75, 89, 74, 71, 67, 56, 90, 45, 59, 48, 63, 54, 48, 49, 59, 122, 70, 58, 53, 54, 50, 54, 62, 90, 53, 66, 59, 61, 25, 120, 55, 57, 54, 62, 71, 101, 70, 79, 52, 75, 72, 155, 61, 56, 70, 72, 69, 86, 72, 63, 65, 50, 55, 62, 47, 57, 72, 51, 53, 59, 65, 73, 54, 63, 51, 77, 85, 69, 87, 57, 78, 43, 59, 37, 61, 92, 62, 65, 68, 72, 52, 52, 67, 72, 60, 88, 68, 66, 64, 59, 56, 74, 38, 99, 70, 58, 68, 88, 46, 64, 76, 65, 62, 126, 54, 75, 48, 55, 60, 113, 59, 54, 64, 53, 61, 73, 76, 52, 69, 47, 63, 55, 49, 67, 58, 50, 53, 58, 40, 91, 64, 78, 60, 44, 64, 74, 65, 78, 75, 83, 98, 52, 65, 80, 50, 60, 72, 64, 73, 53, 53, 106, 75, 50, 69, 64, 78, 66, 57, 60, 63, 65, 59, 106, 69, 88, 92, 128, 62, 86, 67, 67, 58, 67, 55, 66, 51, 70, 53, 47, 63, 59, 69, 107, 59, 70, 80, 63, 77, 45, 45, 55, 66, 63, 83, 57, 76, 64, 129, 52, 79, 44, 86, 51, 66, 50, 46, 51, 66, 51, 42, 82, 60, 72, 49, 69, 64, 67, 39, 45, 57, 63, 84, 68, 83, 49, 70, 70, 95, 56, 73, 57, 54, 92, 69, 64, 36, 93, 73, 82, 62, 50, 83, 50, 77, 88, 46, 53, 117, 90, 63, 68, 70, 88, 58, 75, 65, 86, 58, 56, 64, 73, 55, 67, 46, 119, 56, 79, 100, 80, 66, 51, 53, 69, 75, 67, 58, 54, 66, 47, 46, 58, 76, 66, 84, 66, 55, 73, 48, 57, 54, 64, 78, 58, 50, 65, 68, 86, 65, 34, 74, 83, 80, 64, 70, 48, 66, 75, 51, 70, 81, 50, 60, 65, 73, 55, 89, 60, 91, 62, 63, 71, 67, 60, 86, 66, 76, 82, 68, 60, 66, 70, 65, 71, 42, 58, 47, 82, 48, 85, 54, 67, 56, 45, 85, 68, 84, 103, 80, 62, 51, 59, 53, 56, 57, 54, 56, 41, 58, 54, 55, 85, 53, 72, 66, 50, 59, 85, 62, 43, 62, 51, 82, 72, 65, 48, 77, 64, 55, 87, 54, 59, 50, 54, 55, 69, 69, 68, 71, 62, 54, 102, 113, 63, 67, 67, 65, 58, 63, 77, 55, 59, 69, 54, 87, 88, 64, 50, 60, 89, 104, 59, 46, 46, 64, 67, 77, 61, 77, 55, 83, 54, 80, 50, 58, 37, 86, 39, 59, 45, 54, 65, 60, 75, 74, 49, 72, 51, 74, 91, 97, 54, 79, 61, 79, 64, 60, 58, 116, 73, 53, 63, 71, 66, 78, 51, 87, 60, 128, 58, 57, 50, 75, 78, 58, 85, 58, 84, 58, 62, 69, 67, 57, 49, 74, 80, 58, 84, 53, 67, 72, 41, 80, 87, 63, 89, 38, 78, 82, 44, 77, 60, 55, 67, 56, 56, 77, 72, 88, 62, 63, 56, 42, 63, 55, 58, 59, 88, 61, 74, 50, 75, 79, 55, 50, 64, 60, 84, 58, 94, 74, 56, 74, 104, 65, 89, 83, 61, 50, 63, 78, 60, 72, 79, 54, 59, 66, 60, 47, 58, 51, 55, 51, 53, 78, 54, 112, 75, 53, 57, 84, 43, 51, 52, 60, 58, 47, 67, 56, 67, 68, 61, 52, 62, 41, 55, 53, 58, 73, 60, 57, 40, 70, 53, 52, 67, 50, 76, 55, 71, 46, 61, 57, 44, 61, 84, 64, 53, 95, 48, 41, 58, 49, 49, 93, 65, 86, 52, 71, 60, 67, 62, 78, 70, 87, 62, 68, 56, 102, 68, 123, 65, 83, 53, 56, 71, 55, 125, 76, 55, 54, 86, 47, 71, 71, 64, 56, 100, 59, 59, 47, 53, 55, 52, 59, 90, 117, 87, 65, 67, 58, 105, 58, 60, 55, 84, 78, 66, 81, 57, 46, 50, 62, 61, 46, 63, 50, 64, 60, 64, 63, 70, 25, 106, 73, 57, 55, 67, 75, 72, 48, 59, 75, 51, 55, 96, 55, 60, 67, 74, 80, 56, 74, 76, 64, 73, 73, 68, 76, 63, 66, 80, 97, 73, 37, 52, 83, 65, 61, 52, 114, 68, 51, 84, 75, 113, 62, 75, 68, 75, 57, 65, 66, 57, 73, 79, 49, 62, 69, 66, 44, 72, 68, 78, 68, 67, 84, 57, 64, 86, 65, 64, 68, 69, 48, 77, 61, 65, 77, 58, 45, 67, 53, 55, 78, 62, 52, 59, 73, 73, 61, 76, 69, 80, 59, 67, 70, 51, 76, 67, 62, 55, 84, 50, 44, 50, 74, 70, 68, 52, 75, 62, 48, 93, 80, 53, 91, 61, 68, 82, 67, 82, 63, 61, 52, 65, 44, 53, 52, 62, 54, 58, 85, 56, 56, 51, 71, 101, 92, 73, 50, 52, 47, 48, 59, 114, 62, 67, 72, 76, 63, 62, 57, 47, 65, 70, 75, 70, 64, 69, 65, 73, 80, 71, 69, 74, 55, 66, 52, 65, 70, 90, 61, 71, 54, 62, 78, 81, 65, 65, 70, 50, 71, 60, 81, 94, 53, 62, 101, 116, 67, 64, 65, 62, 81, 66, 62, 67, 69, 57, 64, 47, 58, 62, 81, 66, 70, 54, 56, 57, 87, 77, 50, 75, 80, 47, 58, 53, 77, 58, 58, 59, 79, 53, 67, 60, 51, 59, 59, 74, 65, 60, 71, 79, 66, 50, 74, 74, 55, 54, 67, 38, 81, 59, 68, 70, 61, 66, 78, 85, 53, 75, 87, 53, 79, 64, 60, 78, 66, 56, 58, 81, 69, 82, 60, 66, 71, 62, 54, 65, 58, 63, 61, 78, 61, 60, 60, 43, 58, 55, 77, 37, 73, 53, 61, 76, 62, 59, 39, 66, 106, 61, 46, 69, 64, 59, 76, 82, 60, 63, 49, 74, 56, 69, 61, 72, 76, 72, 80, 80, 51, 75, 76, 41, 78, 42, 84, 58, 73, 50, 75, 77, 83, 61, 54, 75, 68, 57, 91, 74, 113, 80, 70, 105, 64, 58, 47, 78, 55, 78, 55, 83, 64, 57, 79, 51, 62, 74, 64, 73, 57, 65, 65, 70, 55, 50, 71, 53, 97, 86, 56, 91, 88, 57, 70, 69, 69, 62, 56, 64, 59, 60, 101, 75, 86, 70, 68, 48, 62, 42, 74, 56, 65, 53, 104, 57, 79, 55, 49, 72, 48, 48, 72, 55, 77, 61, 68, 85, 69, 71, 81, 76, 66, 61, 45, 68, 80, 52, 76, 44, 69, 65, 51, 80, 68, 75, 104, 74, 71, 67, 77, 42, 51, 71, 51, 79, 59, 52, 73, 60, 92, 73, 47, 64, 65, 62, 83, 87, 73, 76, 76, 62, 65, 74, 75, 57, 66, 66, 37, 64, 85, 58, 94, 52, 89, 66, 54, 74, 81, 61, 58, 56, 83, 52, 65, 64, 56, 62, 47, 56, 58, 52, 72, 76, 61, 67, 68, 58, 59, 60, 67, 66, 73, 55, 49, 51, 76, 53, 65, 91, 76, 79, 72, 95, 57, 51, 73, 42, 50, 79, 55, 115, 63, 43, 77, 70, 62, 63, 77, 65, 77, 89, 69, 83, 51, 72, 52, 94, 46, 52, 70, 76, 67, 56, 76, 100, 68, 59, 48, 47, 75, 72, 83, 45, 52, 59, 72, 52, 53, 42, 48, 70, 71, 70, 53, 84, 46, 84, 52, 43, 82, 68, 61, 60, 59, 69, 73, 52, 76, 80, 68, 78, 77, 65, 90, 69, 84, 49, 57, 63, 87, 65, 53, 61, 101, 52, 45, 77, 72, 46, 51, 54, 75, 57, 69, 64, 45, 68, 59, 55, 66, 88, 95, 68, 56, 65, 71, 50, 79, 74, 55, 65, 66, 74, 60, 67, 73, 67, 78, 66, 47, 85, 72, 78, 53, 64, 57, 73, 50, 70, 82, 75, 87, 67, 61, 58, 40, 64, 59, 63, 45, 66, 67, 43, 66, 74, 63, 67, 49, 82, 49, 64, 111, 53, 56, 61, 55, 58, 68, 87, 60, 76, 64, 73, 64, 77, 73, 99, 60, 59, 46, 65, 91, 43, 60, 61, 66, 67, 57, 61, 83, 53, 58, 68, 43, 73, 58, 77, 50, 45, 66, 66, 77, 49, 61, 55, 65, 73, 88, 66, 58, 78, 51, 73, 56, 56, 67, 66, 53, 130, 48, 69, 70, 55, 72, 74, 64, 71, 46, 84, 68, 70, 66, 73, 58, 74, 81, 96, 68, 69, 78, 70, 74, 63, 63, 72, 55, 73, 54, 70, 96, 57, 64, 59, 69, 55, 79, 48, 86, 56, 97, 92, 61, 63, 92, 73, 101, 64, 78, 91, 61, 56, 82, 61, 49, 56, 53, 64, 49, 64, 63, 61, 73, 64, 53, 57, 48, 63, 68, 63, 71, 79, 100, 62, 65, 68, 54, 67, 45, 74, 77, 68, 63, 60, 64, 43, 71, 52, 71, 71, 51, 103, 62, 75, 52, 55, 75, 54, 97, 86, 68, 69, 67, 82, 51, 48, 98, 72, 52, 50, 77, 71, 67, 74, 71, 53, 59, 58, 79, 65, 63, 50, 64, 61, 75, 58, 60, 84, 54, 77, 76, 53, 67, 72, 77, 68, 60, 69, 40, 43, 41, 72, 79, 64, 76, 89, 59, 76, 78, 62, 72, 67, 81, 56, 65, 60, 61, 65, 68, 50, 58, 53, 45, 72, 75, 53, 64, 50, 68, 42, 59, 47, 67, 57, 51, 61, 55, 71, 54, 132, 71, 77, 81, 58, 73, 62, 60, 60, 62, 62, 68, 42, 71, 65, 75, 46, 56, 53, 71, 61, 72, 73, 64, 58, 88, 66, 76, 62, 75, 84, 77, 63, 50, 83, 61, 45, 70, 45, 73, 70, 60, 69, 59, 61, 73, 75, 58, 85, 54, 71, 64, 67, 70, 87, 64, 61, 87, 59, 57, 76, 71, 59, 72, 68, 58, 69, 74, 41, 48, 57, 75, 69, 48, 79, 50, 71, 55, 61, 55, 60, 39, 70, 79, 76, 52, 60, 64, 93, 54, 59, 47, 58, 80, 42, 62, 49, 66, 63, 84, 62, 79, 64, 71, 83, 84, 74, 76, 85, 60, 50, 59, 74, 51, 82, 60, 69, 52, 75, 63, 71, 56, 57, 73, 48, 55, 60, 69, 64, 71, 71, 63, 67, 79, 56, 62, 64, 67, 62, 59, 60, 58, 71, 56, 89, 67, 70, 73, 51, 72, 73, 61, 69, 64, 83, 55, 53, 86, 67, 54, 54, 60, 77, 69, 59, 98, 68, 56, 53, 48, 86, 67, 60, 48, 67, 63, 69, 67, 102, 60, 63, 52, 56, 61, 63, 59, 78, 62, 49, 65, 39, 65, 62, 77, 61, 52, 69, 56, 52, 59, 78, 68, 71, 50, 82, 62, 48, 51, 58, 68, 60, 78, 53, 61, 61, 60, 93, 58, 61, 51, 61, 70, 53, 84, 51, 76, 58, 87, 58, 73, 61, 63, 57, 58, 57, 64, 61, 78, 49, 49, 59, 56, 69, 77, 64, 63, 66, 61, 72, 72, 80, 50, 69, 74, 82, 54, 95, 51, 93, 76, 72, 62, 63, 71, 58, 139, 59, 88, 56, 75, 80, 60, 66, 74, 62, 57, 62, 57, 72, 69, 64, 34, 74, 60, 60, 85, 60, 60, 63, 48, 111, 80, 53, 63, 63, 58, 47, 59, 62, 62, 59, 62, 63, 69, 121, 78, 69, 56, 68, 59, 54, 96, 70, 60, 57, 59, 56, 74, 58, 54, 157, 54, 58, 51, 58, 52, 56, 55, 58, 70, 48, 60, 61, 70, 66, 65, 72, 68, 74, 58, 73, 67, 70, 75, 97, 66, 96, 60, 56, 57, 75, 56, 75, 65, 64, 66, 71, 77, 46, 61, 81, 61, 62, 68, 79, 70, 129, 51, 63, 62, 56, 77, 67, 61, 71, 34, 74, 53, 53, 94, 57, 65, 61, 65, 83, 67, 73, 71, 53, 71, 33, 63, 54, 65, 80, 63, 62, 54, 77, 50, 66, 72, 57, 88, 61, 58, 74, 73, 57, 58, 38, 88, 80, 67, 73, 72, 52, 71, 71, 57, 55, 141, 68, 73, 61, 58, 67, 75, 63, 69, 61, 94, 68, 44, 47, 58, 50, 59, 56, 53, 89, 62, 54, 50, 55, 63, 58, 68, 75, 83, 42, 62, 56, 62, 79, 75, 51, 62, 57, 62, 69, 73, 59, 59, 70, 55, 73, 72, 54, 66, 62, 89, 68, 90, 70, 57, 71, 63, 75, 59, 55, 54, 55, 61, 50, 64, 75, 48, 55, 64, 97, 76, 64, 72, 53, 64, 88, 75, 63, 57, 61, 75, 62, 114, 46, 56, 56, 56, 70, 74, 71, 63, 75, 56, 65, 66, 58, 59, 62, 75, 101, 75, 78, 68, 98, 63, 83, 53, 69, 52, 51, 135, 30, 68, 68, 59, 63, 57, 68, 71, 67, 44, 71, 75, 58, 57, 77, 58, 71, 67, 44, 58, 53, 85, 53, 58, 69, 55, 77, 50, 63, 53, 52, 79, 66, 61, 64, 45, 78, 58, 77, 59, 65, 41, 48, 63, 63, 96, 87, 75, 74, 57, 86, 72, 87, 66, 65, 53, 77, 81, 75, 61, 62, 73, 55, 71, 102, 63, 80, 82, 71, 56, 98, 68, 55, 66, 55, 57, 59, 53, 86, 125, 63, 97, 61, 62, 64, 52, 60, 92, 93, 66, 66, 75, 72, 64, 65, 68, 68, 80, 69, 52, 68, 62, 69, 57, 55, 46, 63, 61, 47, 66, 56, 69, 63, 68, 58, 59, 67, 65, 64, 65, 59, 60, 65, 69, 63, 56, 56, 74, 58, 77, 76, 107, 58, 74, 27, 50, 69, 68, 47, 89, 58, 80, 60, 82, 55, 52, 49, 61, 48, 65, 73, 68, 59, 51, 68, 54, 56, 59, 52, 64, 62, 60, 68, 68, 63, 84, 70, 62, 79, 65, 65, 53, 59, 62, 57, 72, 95, 66, 57, 56, 74, 85, 41, 55, 74, 62, 54, 102, 60, 62, 58, 64, 74, 45, 73, 98, 78, 69, 75, 65, 50, 50, 52, 56, 71, 61, 52, 47, 63, 47, 72, 77, 51, 42, 57, 66, 70, 59, 65, 52, 78, 55, 69, 52, 68, 95, 72, 89, 69, 54, 58, 55, 61, 94, 87, 58, 63, 76, 77, 60, 92, 58, 137, 69, 63, 57, 53, 74, 65, 72, 70, 59, 49, 59, 68, 68, 56, 65, 58, 77, 59, 60, 44, 53, 79, 71, 58, 86, 49, 79, 65, 51, 49, 61, 63, 59, 58, 71, 60, 84, 50, 68, 57, 72, 72, 51, 83, 51, 51, 119, 68, 56, 61, 46, 64, 53, 57, 56, 50, 62, 84, 69, 82, 68, 50, 64, 72, 75, 87, 62, 65, 81, 84, 58, 51, 52, 90, 99, 65, 62, 61, 79, 68, 69, 53, 54, 37, 76, 68, 68, 60, 66, 67, 51, 44, 64, 87, 120, 54, 42, 67, 64, 70, 33, 52, 76, 51, 59, 61, 79, 69, 65, 44, 58, 58, 56, 63, 62, 60, 64, 62, 65, 61, 81, 60, 52, 51, 80, 65, 61, 69, 58, 54, 50, 60, 54, 79, 98, 127, 69, 69, 66, 69, 62, 82, 65, 69, 93, 59, 78, 55, 65, 67, 63, 83, 55, 62, 72, 55, 55, 60, 67, 39, 77, 43, 61, 59, 54, 60, 61, 70, 62, 53, 58, 63, 69, 69, 59, 61, 82, 58, 56, 66, 79, 58, 46, 74, 59, 58, 50, 62, 50, 58, 70, 71, 53, 55, 60, 58, 60, 67, 55, 71, 54, 49, 69, 78, 107, 68, 40, 71, 59, 71, 59, 59, 77, 56, 63, 72, 65, 118, 74, 65, 79, 61, 88, 81, 55, 68, 76, 82, 58, 52, 61, 54, 61, 72, 59, 84, 65, 64, 72, 60, 63, 76, 57, 52, 74, 66, 108, 77, 47, 53, 50, 50, 53, 75, 80, 80, 63, 73, 62, 63, 91, 62, 77, 58, 60, 65, 70, 67, 61, 54, 82, 70, 64, 74, 71, 54, 61, 57, 73, 77, 63, 65, 89, 58, 53, 57, 52, 54, 64, 68, 110, 60, 67, 60, 80, 62, 53, 85, 77, 60, 76, 51, 61, 66, 61, 56, 58, 53, 54, 57, 71, 54, 60, 55, 65, 75, 52, 58, 64, 54, 104, 62, 67, 77, 57, 66, 58, 73, 60, 63, 69, 56, 82, 64, 51, 96, 74, 61, 58, 45, 80, 59, 61, 87, 56, 65, 49, 66, 71, 84, 64, 77, 57, 59, 40, 85, 64, 51, 61, 64, 62, 68, 61, 78, 54, 67, 59, 71, 59, 71, 73, 77, 50, 82, 101, 85, 67, 54, 57, 49, 62, 71, 66, 59, 66, 58, 70, 52, 75, 46, 62, 38, 94, 79, 72, 71, 57, 67, 65, 54, 116, 61, 77, 71, 61, 76, 79, 70, 85, 65, 50, 52, 56, 55, 100, 66, 79, 69, 60, 68, 66, 47, 55, 62, 67, 101, 48, 79, 70, 66, 62, 57, 77, 64, 66, 45, 60, 50, 95, 48, 63, 77, 72, 76, 64, 58, 49, 73, 70, 46, 55, 64, 57, 70, 56, 57, 72, 74, 49, 75, 66, 56, 66, 76, 65, 63, 58, 75, 88, 66, 87, 89, 82, 72, 53, 58, 61, 54, 58, 56, 74, 70, 67, 62, 69, 59, 52, 68, 79, 113, 83, 65, 49, 69, 70, 54, 60, 67, 63, 61, 61, 55, 53, 66, 51, 64, 54, 61, 71, 41, 87, 63, 64, 78, 81, 45, 59, 43, 55, 85, 68, 58, 67, 93, 67, 62, 57, 59, 74, 88, 69, 66, 62, 52, 58, 69, 86, 110, 69, 57, 51, 60, 65, 61, 65, 56, 69, 56, 64, 58, 55, 68, 44, 76, 43, 56, 61, 69, 70, 71, 59, 67, 70, 48, 51, 102, 63, 56, 63, 60, 56, 79, 47, 51, 67, 56, 70, 52, 86, 43, 67, 50, 48, 82, 59, 67, 63, 66, 51, 73, 70, 62, 63, 93, 55, 69, 52, 62, 67, 60, 77, 55, 72, 62, 75, 57, 75, 76, 61, 60, 68, 71, 70, 53, 53, 70, 62, 56, 86, 48, 66, 78, 70, 79, 36, 61, 71, 57, 86, 66, 73, 71, 50, 63, 64, 74, 77, 57, 72, 85, 69, 50, 79, 55, 65, 58, 61, 44, 70, 62, 48, 59, 60, 98, 83, 50, 75, 54, 73, 58, 105, 70, 62, 59, 50, 106, 67, 65, 83, 47, 54, 74, 65, 71, 77, 85, 62, 68, 66, 60, 111, 72, 45, 57, 62, 55, 81, 63, 63, 58, 63, 73, 72, 66, 52, 59, 57, 51, 71, 63, 32, 60, 59, 51, 79, 87, 57, 51, 79, 59, 80, 75, 57, 61, 91, 64, 40, 76, 67, 57, 87, 52, 49, 79, 67, 56, 62, 73, 47, 77, 75, 70, 65, 57, 64, 66, 43, 74, 83, 68, 60, 98, 61, 47, 71, 78, 49, 57, 75, 70, 71, 64, 58, 67, 66, 128, 44, 67, 102, 51, 66, 71, 56, 67, 66, 71, 61, 51, 79, 55, 53, 55, 43, 66, 85, 66, 96, 60, 78, 59, 67, 73, 82, 56, 67, 62, 61, 64, 72, 56, 73, 56, 67, 71, 49, 82, 52, 64, 67, 71, 68, 91, 55, 54, 57, 50, 77, 66, 43, 64, 54, 60, 52, 70, 99, 83, 58, 67, 59, 59, 62, 103, 87, 63, 76, 62, 68, 79, 61, 73, 50, 55, 56, 69, 55, 65, 67, 69, 83, 75, 61, 68, 87, 58, 62, 57, 56, 85, 58, 61, 45, 75, 77, 69, 66, 60, 81, 73, 60, 78, 56, 44, 60, 61, 41, 64, 73, 66, 72, 89, 54, 54, 87, 61, 31, 83, 66, 52, 77, 66, 74, 63, 63, 59, 71, 77, 56, 94, 65, 47, 97, 59, 46, 55, 58, 77, 79, 65, 60, 51, 50, 56, 93, 61, 71, 65, 52, 77, 54, 75, 83, 78, 70, 57, 62, 57, 65, 69, 58, 54, 78, 53, 69, 61, 58, 57, 48, 73, 68, 67, 63, 44, 65, 59, 78, 68, 59, 68, 76, 66, 74, 65, 74, 89, 57, 50, 59, 60, 75, 67, 102, 59, 52, 64, 63, 65, 119, 72, 71, 78, 68, 76, 61, 75, 60, 58, 59, 89, 51, 66, 71, 128, 56, 62, 69, 53, 88, 82, 68, 60, 71, 53, 69, 68, 57, 44, 97, 53, 76, 68, 79, 60, 60, 71, 83, 55, 70, 72, 66, 78, 84, 50, 60, 60, 94, 78, 65, 58, 91, 59, 60, 114, 75, 60, 67, 56, 68, 50, 63, 65, 71, 71, 63, 42, 67, 55, 49, 66, 70, 61, 63, 83, 66, 46, 58, 80, 69, 60, 54, 54, 68, 44, 69, 63, 59, 47, 72, 45, 76, 58, 94, 59, 69, 56, 79, 64, 80, 65, 54, 70, 65, 88, 70, 44, 107, 94, 69, 73, 62, 68, 76, 59, 59, 58, 92, 63, 75, 95, 60, 58, 58, 55, 55, 53, 54, 56, 46, 71, 77, 63, 60, 49, 74, 100, 74, 30, 69, 61, 66, 76, 55, 60, 48, 63, 109, 42, 79, 78, 59, 64, 56, 93, 55, 56, 67, 57, 75, 59, 60, 67, 74, 35, 56, 68, 67, 62, 54, 94, 49, 77, 57, 112, 52, 66, 65, 50, 61, 65, 86, 72, 54, 71, 58, 62, 56, 76, 52, 58, 55, 87, 69, 69, 55, 84, 59, 73, 61, 64, 57, 83, 40, 68, 73, 81, 63, 62, 62, 58, 66, 77, 67, 75, 68, 61, 76, 57, 60, 73, 60, 62, 53, 93, 79, 44, 57, 69, 60, 83, 62, 52, 95, 69, 74, 53, 80, 56, 133, 94, 48, 57, 67, 80, 61, 78, 64, 50, 53, 57, 63, 62, 61, 75, 76, 97, 54, 61, 63, 59, 62, 81, 109, 58, 49, 71, 56, 53, 81, 68, 64, 55, 59, 56, 80, 85, 67, 54, 50, 72, 49, 68, 48, 65, 55, 98, 70, 64, 92, 82, 66, 67, 70, 66, 46, 61, 74, 82, 62, 61, 100, 87, 62, 58, 70, 62, 64, 53, 43, 54, 90, 73, 59, 70, 66, 75, 85, 88, 40, 71, 42, 64, 74, 82, 57, 47, 66, 60, 46, 60, 68, 86, 67, 54, 53, 75, 54, 67, 59, 65, 99, 79, 54, 70, 65, 71, 63, 71, 76, 90, 64, 66, 69, 73, 78, 95, 69, 62, 59, 69, 71, 63, 74, 53, 76, 63, 78, 56, 69, 71, 103, 69, 87, 63, 68, 63, 63, 63, 65, 55, 53, 63, 54, 65, 49, 51, 57, 74, 48, 68, 135, 74, 75, 84, 72, 58, 58, 57, 78, 55, 52, 67, 64, 61, 60, 58, 95, 42, 66, 57, 76, 77, 57, 81, 59, 65, 61, 81, 58, 73, 87, 56, 52, 81, 50, 91, 60, 63, 59, 58, 77, 54, 64, 57, 74, 56, 64, 53, 83, 67, 95, 67, 59, 70, 68, 61, 58, 60, 69, 60, 84, 68, 61, 56, 60, 62, 76, 56, 88, 68, 64, 66, 75, 66, 80, 48, 61, 62, 68, 66, 55, 66, 76, 53, 55, 55, 64, 70, 55, 67, 79, 62, 70, 60, 66, 81, 52, 59, 53, 71, 72, 74, 81, 58, 56, 74, 53, 67, 58, 62, 56, 63, 72, 67, 88, 64, 60, 78, 54, 70, 76, 57, 61, 62, 68, 90, 55, 70, 76, 67, 53, 63, 58, 75, 56, 63, 68, 52, 79, 60, 67, 61, 56, 56, 56, 63, 61, 82, 63, 57, 51, 87, 118, 58, 89, 73, 43, 112, 61, 62, 59, 74, 50, 62, 66, 80, 70, 94, 58, 53, 73, 53, 78, 67, 83, 66, 67, 77, 67, 61, 63, 48, 70, 64, 39, 78, 57, 67, 73, 69, 81, 80, 101, 58, 53, 62, 70, 61, 62, 67, 44, 55, 63, 66, 60, 46, 77, 63, 79, 48, 72, 56, 75, 55, 57, 86, 50, 47, 66, 43, 62, 81, 71, 52, 61, 52, 60, 79, 61, 73, 67, 75, 82, 72, 53, 62, 70, 112, 61, 79, 62, 74, 68, 57, 59, 72, 63, 58, 49, 61, 50, 53, 76, 65, 60, 58, 57, 51, 61, 64, 62, 79, 71, 77, 37, 56, 59, 73, 63, 69, 69, 85, 72, 50, 76, 54, 57, 72, 77, 131, 72, 48, 59, 71, 49, 67, 79, 77, 69, 82, 65, 92, 57, 63, 99, 50, 70, 56, 59, 73, 59, 76, 52, 56, 66, 63, 72, 59, 71, 61, 53, 51, 90, 50, 53, 43, 83, 111, 77, 99, 58, 70, 50, 74, 59, 52, 77, 62, 74, 66, 77, 57, 54, 40, 58, 67, 99, 55, 41, 57, 48, 74, 64, 59, 62, 69, 75, 67, 58, 69, 67, 61, 109, 53, 67, 61, 81, 73, 69, 77, 75, 68, 55, 53, 62, 101, 58, 51, 49, 77, 61, 57, 75, 55, 69, 44, 68, 68, 51, 50, 71, 79, 72, 57, 41, 59, 60, 52, 73, 60, 62, 68, 95, 61, 66, 57, 130, 80, 60, 60, 68, 72, 69, 81, 50, 77, 79, 66, 54, 66, 82, 55, 56, 70, 61, 82, 67, 58, 57, 76, 91, 76, 60, 74, 75, 54, 117, 66, 57, 54, 44, 64, 74, 58, 90, 55, 57, 64, 81, 87, 71, 50, 62, 45, 46, 47, 62, 75, 52, 97, 76, 56, 60, 71, 46, 62, 76, 108, 56, 71, 53, 63, 90, 64, 62, 46, 55, 76, 62, 64, 59, 70, 75, 61, 58, 50, 63, 71, 71, 53, 56, 82, 75, 62, 70, 70, 75, 58, 65, 74, 63, 62, 53, 67, 64, 53, 66, 62, 74, 74, 66, 53, 66, 56, 69, 76, 55, 66, 72, 89, 65, 78, 58, 92, 59, 62, 59, 63, 74, 43, 63, 92, 52, 67, 82, 56, 63, 96, 73, 57, 57, 57, 70, 54, 78, 95, 67, 118, 126, 54, 59, 50, 51, 48, 56, 58, 68, 55, 53, 52, 58, 63, 80, 87, 71, 65, 61, 95, 39, 79, 60, 64, 72, 65, 78, 75, 54, 51, 66, 43, 70, 77, 78, 72, 103, 80, 78, 82, 46, 53, 50, 61, 45, 72, 70, 54, 72, 75, 108, 82, 91, 58, 67, 48, 75, 85, 62, 103, 69, 60, 54, 75, 74, 55, 75, 86, 80, 54, 71, 66, 81, 70, 64, 72, 55, 60, 83, 66, 63, 79, 66, 64, 67, 65, 71, 60, 61, 53, 63, 106, 56, 66, 61, 80, 74, 76, 68, 74, 64, 61, 38, 53, 84, 61, 66, 71, 61, 57, 52, 67, 57, 58, 57, 71, 74, 61, 55, 55, 59, 57, 64, 54, 72, 56, 66, 76, 74, 71, 78, 58, 66, 85, 65, 91, 67, 62, 76, 62, 99, 73, 62, 57, 68, 74, 91, 52, 70, 68, 58, 59, 50, 75, 85, 66, 66, 80, 66, 67, 78, 54, 59, 54, 86, 57, 63, 77, 67, 53, 44, 70, 59, 57, 72, 93, 57, 54, 45, 61, 76, 75, 68, 72, 72, 69, 78, 67, 72, 66, 61, 60, 55, 60, 62, 71, 82, 61, 38, 55, 75, 55, 65, 71, 52, 78, 51, 108, 73, 50, 66, 72, 60, 62, 109, 51, 76, 50, 46, 62, 63, 68, 61, 75, 58, 52, 61, 72, 59, 73, 64, 56, 50, 37, 50, 63, 70, 60, 67, 83, 52, 59, 58, 78, 53, 68, 65, 47, 69, 55, 61, 54, 74, 49, 64, 119, 55, 64, 43, 66, 56, 62, 72, 67, 107, 66, 58, 68, 70, 79, 55, 60, 81, 41, 59, 62, 74, 45, 43, 56, 65, 78, 50, 65, 76, 58, 88, 68, 76, 58, 61, 62, 69, 71, 61, 102, 65, 122, 48, 64, 53, 66, 54, 73, 53, 77, 75, 67, 71, 71, 75, 76, 71, 59, 57, 50, 63, 69, 56, 64, 73, 40, 85, 58, 72, 72, 59, 52, 58, 46, 70, 54, 65, 59, 71, 74, 56, 62, 57, 58, 58, 44, 41, 57, 79, 58, 56, 65, 61, 63, 68, 59, 85, 92, 76, 63, 65, 71, 80, 50, 66, 70, 63, 50, 90, 93, 86, 57, 107, 74, 75, 65, 55, 63, 61, 71, 58, 52, 60, 76, 67, 74, 57, 84, 60, 76, 61, 146, 62, 54, 59, 68, 74, 59, 61, 79, 63, 74, 60, 65, 71, 62, 76, 88, 53, 60, 66, 58, 120, 58, 59, 60, 52, 65, 59, 67, 164, 113, 67, 64, 66, 60, 48, 51, 54, 91, 80, 54, 78, 64, 60, 47, 50, 51, 62, 68, 66, 87, 57, 54, 46, 65, 45, 56, 60, 71, 65, 55, 70, 56, 55, 44, 113, 53, 68, 64, 42, 57, 57, 120, 46, 62, 53, 59, 76, 53, 42, 56, 53, 62, 56, 62, 57, 59, 51, 105, 33, 64, 46, 56, 53, 50, 74, 55, 76, 66, 95, 63, 60, 101, 87, 67, 62, 62, 80, 54, 59, 67, 76, 67, 64, 67, 70, 107, 72, 92, 66, 64, 64, 58, 59, 94, 58, 63, 76, 59, 61, 49, 58, 48, 70, 50, 44, 63, 59, 64, 54, 66, 70, 77, 58, 63, 68, 78, 63, 56, 53, 69, 60, 68, 57, 60, 47, 52, 65, 83, 59, 71, 90, 64, 69, 60, 65, 79, 87, 73, 70, 69, 61, 59, 74, 61, 66, 83, 80, 53, 59, 64, 63, 56, 64, 63, 58, 79, 62, 78, 77, 77, 66, 68, 56, 57, 56, 68, 67, 74, 77, 58, 71, 91, 79, 60, 60, 64, 85, 83, 69, 52, 71, 82, 73, 56, 62, 60, 69, 71, 50, 62, 56, 63, 52, 78, 85, 57, 47, 67, 69, 68, 75, 64, 70, 73, 60, 57, 99, 149, 59, 64, 130, 62, 69, 69, 50, 79, 63, 58, 77, 63, 80, 62, 110, 98, 55, 64, 68, 68, 77, 64, 57, 76, 71, 64, 52, 60, 71, 65, 79, 52, 63, 54, 72, 82, 48, 84, 52, 128, 62, 45, 70, 67, 116, 79, 72, 50, 60, 67, 79, 60, 64, 70, 41, 70, 68, 29, 56, 93, 64, 60, 50, 75, 56, 62, 58, 74, 73, 66, 61, 64, 88, 67, 68, 64, 58, 64, 58, 65, 83, 53, 61, 94, 69, 105, 67, 78, 74, 65, 66, 63, 94, 81, 66, 57, 70, 43, 63, 82, 75, 61, 68, 78, 72, 56, 64, 57, 77, 67, 68, 65, 62, 66, 70, 58, 60, 52, 64, 57, 68, 83, 86, 49, 64, 40, 51, 106, 79, 97, 61, 69, 90, 66, 63, 64, 83, 47, 74, 45, 55, 69, 64, 60, 87, 63, 72, 45, 60, 72, 54, 65, 71, 58, 66, 67, 59, 50, 69, 83, 69, 73, 66, 76, 80, 59, 74, 81, 61, 72, 65, 61, 74, 133, 58, 56, 77, 79, 51, 63, 58, 71, 80, 46, 71, 94, 64, 100, 58, 55, 53, 51, 71, 60, 50, 60, 57, 52, 71, 56, 68, 64, 56, 52, 65, 54, 87, 60, 82, 56, 69, 70, 90, 74, 61, 61, 63, 64, 38, 64, 50, 54, 82, 61, 67, 61, 70, 56, 47, 48, 72, 54, 71, 55, 54, 124, 61, 74, 68, 59, 55, 76, 66, 73, 57, 71, 57, 47, 56, 63, 60, 67, 50, 62, 65, 66, 63, 58, 52, 64, 61, 81, 65, 51, 63, 56, 49, 66, 71, 77, 61, 62, 60, 59, 53, 56, 84, 70, 82, 64, 54, 54, 49, 67, 69, 65, 65, 46, 52, 46, 74, 67, 81, 53, 56, 78, 81, 82, 56, 52, 64, 54, 67, 67, 66, 67, 60, 68, 67, 57, 72, 55, 60, 61, 55, 60, 126, 67, 59, 53, 64, 76, 63, 62, 54, 51, 63, 64, 63, 53, 56, 67, 68, 43, 52, 56, 65, 73, 55, 88, 66, 54, 69, 68, 60, 47, 56, 56, 73, 72, 68, 94, 58, 59, 62, 65, 62, 66, 61, 67, 61, 61, 112, 66, 71, 67, 56, 70, 57, 53, 74, 69, 75, 60, 56, 60, 71, 48, 57, 57, 77, 72, 69, 43, 53, 78, 40, 46, 84, 62, 60, 55, 51, 56, 69, 68, 44, 58, 86, 50, 74, 55, 69, 49, 68, 65, 49, 66, 67, 73, 69, 46, 57, 69, 60, 62, 64, 42, 45, 54, 63, 55, 112, 79, 56, 52, 77, 81, 47, 53, 55, 65, 66, 62, 65, 63, 64, 63, 48, 66, 63, 58, 71, 59, 66, 72, 65, 51, 49, 60, 56, 70, 55, 76, 36, 75, 65, 115, 60, 71, 50, 66, 50, 88, 61, 57, 61, 69, 55, 64, 57, 52, 41, 72, 69, 64, 77, 79, 94, 64, 50, 57, 54, 87, 66, 73, 68, 74, 60, 63, 83, 62, 56, 73, 66, 74, 61, 62, 54, 78, 48, 51, 62, 64, 72, 66, 55, 57, 66, 68, 75, 64, 50, 59, 65, 58, 69, 84, 51, 56, 64, 56, 57, 65, 55, 59, 56, 84, 61, 64, 62, 79, 53, 73, 64, 85, 59, 59, 56, 63, 62, 62, 81, 65, 57, 72, 47, 69, 70, 76, 60, 68, 66, 63, 53, 76, 77, 59, 61, 74, 55, 100, 65, 52, 70, 139, 66, 77, 60, 66, 97, 47, 46, 62, 62, 60, 80, 86, 74, 73, 62, 66, 62, 67, 68, 109, 69, 59, 65, 49, 63, 51, 73, 61, 59, 61, 58, 67, 57, 65, 58, 67, 51, 60, 72, 96, 76, 57, 85, 68, 53, 58, 142, 64, 55, 44, 93, 55, 70, 85, 59, 46, 65, 49, 121, 78, 62, 55, 66, 58, 122, 64, 56, 58, 50, 62, 71, 57, 64, 63, 67, 64, 84, 69, 74, 48, 69, 69, 73, 70, 66, 62, 63, 99, 67, 76, 86, 73, 60, 63, 73, 55, 69, 67, 79, 70, 87, 64, 63, 64, 50, 63, 67, 64, 69, 64, 61, 57, 56, 67, 63, 62, 54, 57, 70, 60, 54, 79, 68, 60, 77, 56, 69, 47, 61, 56, 52, 58, 42, 64, 39, 75, 57, 74, 75, 69, 62, 72, 63, 100, 63, 64, 57, 58, 59, 60, 56, 70, 70, 52, 45, 57, 152, 68, 71, 41, 60, 89, 66, 74, 50, 63, 69, 64, 66, 44, 40, 58, 54, 112, 62, 52, 55, 95, 67, 59, 78, 62, 55, 71, 62, 61, 55, 70, 71, 78, 58, 62, 57, 60, 63, 74, 69, 49, 54, 51, 77, 67, 57, 63, 77, 73, 67, 61, 56, 77, 62, 98, 60, 75, 65, 64, 67, 52, 73, 59, 67, 36, 55, 59, 72, 66, 68, 68, 70, 78, 93, 66, 58, 59, 57, 57, 57, 67, 68, 69, 119, 64, 66, 70, 52, 47, 61, 69, 56, 69, 52, 56, 59, 59, 64, 63, 60, 78, 53, 58, 71, 63, 58, 82, 60, 69, 117, 74, 62, 60, 68, 81, 75, 50, 45, 68, 56, 51, 69, 91, 71, 64, 61, 70, 64, 65, 45, 95, 69, 59, 80, 67, 64, 49, 77, 63, 52, 99, 72, 54, 65, 53, 60, 59, 61, 53, 53, 67, 53, 67, 53, 56, 74, 61, 63, 60, 86, 63, 62, 57, 47, 67, 74, 57, 61, 60, 65, 60, 71, 59, 67, 52, 60, 89, 67, 85, 70, 78, 67, 57, 62, 73, 61, 60, 73, 112, 59, 61, 53, 58, 56, 60, 82, 69, 70, 81, 62, 82, 83, 77, 65, 65, 58, 57, 73, 51, 62, 70, 71, 48, 55, 57, 59, 67, 60, 78, 62, 88, 85, 85, 59, 55, 69, 58, 63, 73, 67, 47, 54, 50, 73, 70, 88, 64, 68, 67, 84, 85, 65, 63, 66, 64, 50, 84, 48, 57, 61, 54, 75, 70, 73, 94, 68, 54, 77, 77, 59, 62, 68, 69, 71, 71, 93, 71, 68, 54, 95, 86, 67, 78, 67, 85, 66, 57, 52, 77, 105, 64, 70, 61, 85, 67, 76, 79, 95, 82, 46, 65, 56, 75, 88, 57, 44, 47, 56, 51, 66, 57, 74, 77, 42, 71, 62, 61, 66, 53, 91, 50, 62, 65, 66, 66, 51, 73, 79, 48, 72, 76, 65, 57, 67, 68, 55, 51, 59, 71, 61, 61, 53, 54, 61, 62, 64, 54, 72, 73, 51, 104, 58, 67, 68, 75, 62, 72, 60, 62, 60, 67, 83, 50, 71, 73, 73, 57, 72, 56, 65, 71, 65, 71, 67, 71, 69, 40, 61, 37, 65, 50, 89, 50, 64, 82, 45, 65, 62, 60, 80, 63, 55, 69, 55, 82, 65, 65, 59, 58, 65, 58, 83, 67, 55, 69, 78, 68, 80, 61, 91, 59, 53, 61, 66, 64, 64, 53, 61, 72, 52, 100, 60, 104, 121, 87, 72, 46, 73, 63, 36, 61, 50, 57, 66, 53, 48, 69, 48, 67, 69, 61, 70, 51, 65, 50, 57, 73, 80, 67, 50, 75, 76, 56, 43, 87, 55, 69, 89, 63, 77, 84, 73, 77, 53, 50, 74, 53, 65, 53, 48, 69, 44, 68, 68, 59, 66, 66, 69, 63, 66, 89, 60, 79, 58, 68, 56, 66, 63, 56, 49, 84, 75, 78, 55, 90, 69, 71, 67, 62, 104, 65, 74, 58, 58, 81, 61, 66, 84, 79, 85, 57, 50, 59, 40, 71, 88, 58, 46, 66, 53, 74, 51, 70, 54, 64, 61, 62, 67, 75, 49, 53, 77, 53, 60, 92, 75, 80, 67, 79, 69, 88, 61, 69, 76, 57, 103, 58, 58, 52, 63, 60, 49, 69, 79, 49, 56, 71, 88, 81, 54, 66, 80, 67, 53, 87, 88, 56, 54, 63, 63, 81, 59, 59, 61, 65, 64, 52, 86, 60, 61, 79, 51, 40, 73, 52, 81, 57, 58, 58, 70, 70, 66, 72, 76, 60, 39, 63, 48, 58, 104, 60, 43, 75, 65, 44, 98, 86, 59, 75, 62, 57, 67, 73, 81, 57, 64, 87, 76, 47, 60, 63, 110, 55, 44, 55, 59, 65, 60, 53, 57, 71, 59, 87, 75, 63, 116, 49, 69, 67, 48, 48, 60, 41, 108, 85, 86, 70, 55, 89, 70, 62, 66, 53, 96, 48, 75, 80, 106, 73, 50, 57, 59, 73, 69, 82, 65, 61, 49, 74, 60, 64, 73, 42, 59, 43, 67, 67, 114, 80, 71, 65, 57, 56, 61, 75, 49, 48, 57, 67, 45, 67, 80, 66, 70, 88, 63, 62, 91, 57, 60, 62, 76, 73, 40, 56, 65, 65, 62, 100, 60, 65, 60, 95, 47, 50, 70, 35, 88, 75, 71, 42, 83, 47, 67, 77, 46, 44, 84, 54, 61, 69, 55, 70, 71, 61, 72, 73, 64, 72, 68, 72, 51, 72, 123, 58, 57, 71, 69, 58, 71, 66, 70, 83, 86, 49, 54, 76, 63, 80, 45, 63, 64, 77, 56, 64, 71, 58, 65, 52, 62, 61, 82, 70, 74, 63, 72, 50, 85, 64, 68, 64, 67, 69, 48, 73, 78, 81, 78, 60, 56, 80, 62, 58, 73, 44, 62, 59, 90, 80, 63, 52, 59, 100, 56, 56, 66, 68, 41, 74, 65, 42, 60, 58, 82, 85, 62, 76, 59, 48, 63, 62, 77, 66, 63, 79, 52, 67, 80, 62, 62, 61, 61, 43, 58, 71, 54, 59, 82, 51, 84, 94, 74, 42, 77, 57, 78, 62, 63, 52, 59, 54, 87, 71, 81, 70, 69, 65, 61, 68, 54, 59, 63, 52, 78, 41, 57, 82, 60, 74, 47, 53, 56, 47, 101, 44, 45, 76, 70, 53, 81, 70, 59, 80, 54, 66, 62, 43, 70, 93, 76, 72, 70, 48, 103, 68, 48, 77, 110, 85, 82, 50, 47, 65, 61, 60, 44, 59, 67, 58, 85, 56, 96, 51, 75, 61, 71, 65, 72, 81, 52, 54, 45, 58, 103, 74, 49, 57, 42, 52, 61, 64, 88, 54, 53, 56, 69, 42, 91, 92, 65, 66, 89, 91, 75, 96, 85, 71, 73, 72, 44, 66, 52, 73, 81, 84, 52, 56, 71, 66, 58, 78, 83, 69, 72, 68, 72, 68, 72, 44, 80, 67, 77, 61, 42, 94, 75, 47, 51, 61, 59, 59, 52, 70, 81, 78, 81, 49, 84, 67, 66, 67, 63, 63, 57, 57, 77, 66, 87, 50, 68, 66, 67, 61, 64, 66, 87, 61, 47, 73, 82, 63, 65, 75, 61, 60, 59, 51, 78, 78, 61, 62, 61, 51, 81, 64, 63, 89, 50, 93, 58, 67, 77, 47, 79, 57, 68, 66, 71, 75, 74, 69, 61, 55, 65, 83, 64, 66, 69, 72, 57, 45, 48, 106, 76, 60, 72, 58, 63, 58, 73, 90, 50, 64, 55, 81, 72, 64, 103, 80, 36, 99, 68, 49, 68, 57, 44, 87, 61, 63, 80, 81, 107, 56, 76, 71, 80, 60, 62, 57, 82, 57, 89, 57, 70, 74, 55, 66, 54, 73, 64, 60, 96, 54, 65, 59, 79, 91, 66, 109, 70, 57, 63, 58, 64, 68, 80, 75, 53, 74, 70, 110, 78, 56, 84, 91, 59, 62, 43, 83, 65, 63, 76, 61, 57, 45, 73, 72, 52, 60, 70, 74, 81, 91, 53, 49, 62, 81, 83, 83, 81, 66, 63, 60, 64, 76, 50, 84, 61, 58, 92, 55, 65, 39, 71, 83, 63, 68, 51, 56, 61, 45, 81, 54, 69, 58, 99, 83, 50, 78, 71, 58, 75, 63, 69, 59, 74, 65, 51, 77, 69, 55, 52, 49, 82, 60, 67, 61, 64, 67, 48, 59, 68, 51, 67, 71, 72, 96, 80, 92, 79, 80, 54, 59, 66, 81, 63, 66, 67, 65, 59, 50, 64, 73, 54, 61, 79, 72, 76, 67, 63, 57, 61, 72, 75, 81, 65, 51, 47, 62, 66, 70, 59, 50, 52, 77, 72, 83, 43, 70, 81, 54, 60, 67, 79, 81, 61, 61, 57, 62, 64, 65, 73, 72, 88, 67, 49, 26, 46, 58, 59, 58, 112, 45, 85, 86, 60, 74, 63, 82, 72, 113, 63, 61, 75, 89, 69, 47, 56, 59, 59, 55, 70, 87, 58, 70, 59, 59, 61, 47, 63, 63, 68, 70, 128, 49, 80, 64, 71, 97, 70, 68, 69, 73, 101, 56, 60, 67, 38, 61, 120, 75, 63, 80, 53, 52, 53, 77, 54, 44, 68, 53, 61, 70, 62, 60, 60, 83, 68, 79, 56, 61, 65, 61, 50, 68, 75, 70, 85, 61, 89, 51, 58, 59, 78, 67, 79, 66, 74, 66, 66, 70, 70, 57, 92, 67, 65, 65, 70, 77, 73, 57, 62, 134, 66, 54, 96, 66, 73, 53, 65, 53, 73, 62, 72, 59, 57, 68, 88, 89, 70, 76, 73, 72, 67, 66, 41, 55, 59, 53, 57, 59, 49, 65, 63, 65, 43, 71, 63, 79, 61, 61, 84, 76, 59, 84, 69, 62, 56, 70, 70, 62, 104, 79, 71, 66, 85, 45, 49, 38, 47, 54, 52, 72, 81, 62, 64, 73, 53, 109, 88, 58, 51, 69, 94, 56, 51, 82, 56, 68, 59, 80, 68, 66, 75, 56, 48, 83, 58, 54, 79, 64, 61, 69, 70, 72, 60, 62, 68, 60, 73, 63, 73, 62, 67, 69, 81, 77, 55, 65, 63, 93, 62, 70, 64, 86, 71, 48, 81, 74, 48, 76, 76, 83, 57, 43, 55, 73, 56, 56, 62, 55, 74, 81, 96, 66, 98, 72, 78, 83, 52, 77, 91, 67, 70, 53, 57, 68, 56, 56, 48, 79, 64, 94, 59, 68, 55, 55, 59, 53, 60, 47, 67, 60, 93, 83, 45, 60, 73, 51, 65, 69, 73, 58, 69, 59, 57, 63, 59, 59, 66, 92, 64, 50, 73, 61, 67, 76, 75, 60, 57, 56, 67, 72, 59, 94, 72, 65, 82, 60, 66, 73, 95, 65, 89, 56, 60, 68, 54, 31, 78, 62, 77, 44, 51, 49, 60, 111, 68, 74, 64, 51, 59, 62, 41, 64, 101, 116, 48, 51, 64, 59, 60, 58, 63, 83, 57, 78, 33, 80, 72, 64, 48, 48, 95, 74, 50, 59, 48, 57, 88, 40, 84, 70, 62, 41, 78, 44, 67, 58, 46, 57, 67, 65, 124, 61, 84, 58, 57, 59, 82, 60, 68, 74, 76, 61, 66, 67, 55, 50, 83, 70, 74, 55, 64, 66, 97, 92, 64, 97, 70, 76, 67, 50, 51, 66, 76, 63, 68, 105, 56, 82, 90, 59, 62, 87, 54, 55, 69, 69, 69, 73, 64, 68, 57, 70, 67, 65, 57, 57, 63, 50, 51, 75, 59, 63, 68, 66, 74, 54, 69, 65, 77, 52, 63, 75, 56, 70, 70, 98, 85, 65, 73, 67, 72, 58, 59, 49, 75, 73, 79, 75, 61, 41, 48, 60, 48, 89, 58, 75, 72, 83, 79, 66, 50, 54, 68, 90, 67, 70, 87, 50, 59, 61, 60, 74, 57, 59, 51, 50, 47, 76, 66, 75, 59, 72, 50, 66, 76, 60, 57, 66, 43, 52, 66, 61, 62, 62, 60, 64, 57, 58, 102, 77, 51, 58, 67, 76, 57, 60, 50, 72, 72, 107, 51, 70, 82, 62, 62, 71, 66, 44, 85, 37, 67, 62, 61, 74, 85, 106, 72, 78, 89, 58, 87, 74, 44, 66, 45, 92, 61, 47, 70, 51, 73, 57, 58, 71, 61, 70, 69, 73, 66, 60, 62, 56, 63, 59, 68, 77, 61, 54, 54, 61, 57, 57, 72, 65, 61, 60, 58, 66, 54, 61, 75, 87, 28, 72, 74, 54, 78, 61, 63, 74, 72, 67, 57, 43, 77, 95, 40, 90, 81, 58, 53, 52, 66, 79, 76, 61, 57, 91, 61, 72, 63, 57, 84, 80, 70, 49, 82, 57, 109, 67, 75, 64, 69, 84, 83, 62, 52, 59, 82, 74, 75, 67, 53, 71, 55, 60, 55, 68, 75, 68, 53, 65, 76, 61, 74, 51, 64, 65, 82, 63, 54, 54, 69, 48, 61, 65, 56, 68, 66, 70, 75, 69, 53, 61, 61, 76, 96, 57, 59, 72, 63, 51, 72, 82, 65, 70, 66, 61, 74, 49, 78, 60, 71, 42, 58, 66, 56, 68, 48, 77, 66, 68, 57, 78, 68, 57, 83, 67, 56, 52, 76, 67, 62, 57, 57, 59, 85, 60, 55, 78, 57, 64, 51, 69, 81, 66, 68, 75, 54, 56, 50, 71, 70, 64, 42, 59, 61, 60, 53, 53, 54, 61, 70, 67, 72, 58, 59, 54, 50, 59, 82, 47, 56, 86, 66, 65, 66, 72, 57, 69, 50, 68, 79, 59, 56, 85, 85, 74, 73, 46, 54, 63, 84, 65, 61, 82, 59, 65, 55, 90, 60, 84, 93, 86, 51, 70, 86, 58, 55, 70, 59, 56, 73, 66, 72, 66, 58, 64, 64, 73, 73, 70, 59, 69, 65, 65, 81, 78, 63, 65, 56, 62, 53, 61, 81, 71, 61, 83, 58, 73, 71, 58, 55, 49, 61, 41, 48, 79, 77, 101, 52, 51, 63, 63, 45, 61, 52, 87, 63, 53, 56, 61, 72, 69, 64, 74, 70, 52, 75, 70, 74, 56, 64, 69, 45, 57, 71, 61, 57, 72, 83, 42, 46, 56, 51, 89, 65, 61, 53, 64, 61, 73, 76, 71, 53, 64, 68, 55, 47, 81, 92, 49, 93, 63, 70, 61, 52, 65, 52, 56, 79, 62, 68, 92, 83, 33, 69, 82, 76, 70, 67, 69, 70, 59, 54, 69, 71, 80, 71, 45, 71, 52, 60, 85, 67, 78, 82, 69, 81, 49, 69, 58, 68, 60, 65, 84, 66, 76, 72, 67, 58, 106, 47, 72, 43, 65, 68, 65, 56, 103, 36, 86, 91, 87, 75, 62, 75, 59, 64, 55, 78, 84, 73, 101, 57, 72, 39, 87, 78, 82, 56, 65, 56, 64, 68, 35, 60, 56, 59, 96, 59, 85, 55, 72, 73, 72, 125, 60, 80, 71, 68, 47, 54, 85, 78, 51, 67, 77, 81, 73, 67, 68, 65, 64, 41, 66, 58, 64, 66, 62, 49, 51, 57, 80, 57, 70, 82, 50, 53, 84, 52, 64, 71, 92, 55, 69, 69, 93, 46, 66, 57, 115, 62, 68, 68, 44, 66, 62, 74, 73, 61, 83, 81, 52, 47, 44, 72, 44, 51, 66, 95, 63, 86, 74, 67, 74, 64, 68, 66, 64, 65, 85, 61, 56, 66, 44, 71, 69, 92, 66, 93, 42, 71, 56, 67, 56, 78, 81, 72, 70, 63, 57, 55, 70, 53, 60, 66, 78, 49, 86, 61, 67, 79, 54, 58, 61, 132, 52, 61, 67, 63, 54, 66, 42, 50, 72, 61, 88, 54, 65, 43, 75, 60, 66, 65, 85, 50, 51, 57, 81, 44, 95, 59, 66, 50, 54, 55, 58, 60, 47, 58, 45, 51, 82, 77, 62, 71, 47, 61, 80, 59, 85, 78, 66, 56, 59, 65, 60, 55, 76, 54, 49, 57, 77, 43, 72, 61, 75, 42, 84, 70, 64, 67, 58, 74, 60, 45, 86, 55, 94, 70, 60, 57, 82, 71, 56, 81, 66, 49, 97, 55, 87, 48, 97, 69, 69, 71, 90, 97, 58, 56, 49, 74, 60, 77, 41, 58, 69, 55, 71, 65, 68, 60, 47, 71, 60, 65, 50, 66, 82, 64, 56, 61, 57, 70, 44, 50, 66, 57, 50, 51, 55, 60, 73, 98, 89, 69, 46, 65, 95, 68, 48, 46, 50, 66, 57, 45, 50, 48, 52, 99, 70, 52, 71, 64, 57, 73, 53, 70, 63, 61, 63, 65, 58, 67, 72, 54, 49, 63, 47, 75, 53, 83, 55, 54, 99, 63, 51, 51, 54, 47, 64, 72, 67, 47, 76, 62, 59, 51, 58, 55, 71, 47, 55, 74, 109, 46, 62, 88, 89, 84, 68, 63, 68, 32, 86, 54, 65, 60, 80, 54, 141, 82, 65, 64, 63, 53, 101, 94, 61, 64, 56, 67, 58, 91, 86, 87, 78, 79, 59, 60, 57, 54, 79, 97, 60, 47, 103, 86, 59, 64, 61, 67, 68, 78, 56, 74, 68, 60, 56, 67, 71, 69, 57, 68, 58, 42, 68, 66, 60, 57, 71, 65, 51, 60, 69, 75, 54, 70, 59, 54, 61, 50, 60, 62, 41, 68, 73, 48, 55, 53, 64, 59, 61, 76, 53, 94, 73, 62, 70, 61, 71, 66, 58, 48, 69, 69, 60, 61, 57, 45, 49, 64, 66, 70, 69, 79, 64, 67, 60, 70, 64, 69, 55, 52, 87, 54, 56, 98, 45, 75, 40, 66, 52, 68, 57, 59, 87, 57, 59, 74, 90, 75, 111, 68, 48, 106, 82, 55, 72, 71, 63, 81, 59, 70, 67, 56, 57, 88, 52, 55, 111, 63, 65, 79, 69, 66, 74, 64, 114, 53, 65, 98, 81, 64, 73, 91, 58, 102, 64, 72, 58, 64, 63, 63, 83, 53, 50, 71, 64, 70, 66, 64, 81, 59, 62, 55, 81, 70, 63, 37, 79, 72, 83, 63, 80, 74, 43, 130, 61, 62, 75, 70, 55, 50, 49, 87, 53, 62, 67, 57, 120, 56, 51, 71, 57, 74, 74, 64, 66, 66, 74, 97, 67, 65, 42, 73, 58, 63, 63, 70, 73, 66, 52, 53, 60, 61, 63, 80, 66, 52, 66, 65, 38, 55, 80, 67, 80, 98, 55, 73, 51, 61, 72, 46, 87, 85, 97, 60, 61, 79, 66, 74, 75, 43, 61, 54, 68, 64, 73, 56, 51, 62, 55, 69, 62, 66, 77, 70, 83, 59, 41, 85, 65, 61, 91, 61, 82, 95, 58, 72, 51, 54, 60, 51, 60, 57, 61, 104, 70, 45, 56, 70, 67, 62, 53, 69, 54, 63, 49, 64, 71, 65, 55, 54, 50, 70, 52, 54, 84, 63, 94, 91, 49, 85, 56, 49, 43, 69, 58, 94, 61, 67, 47, 51, 82, 35, 75, 58, 54, 74, 43, 66, 66, 79, 67, 71, 65, 82, 48, 50, 76, 76, 46, 74, 80, 62, 55, 73, 129, 56, 57, 59, 52, 57, 75, 64, 72, 66, 57, 68, 68, 67, 67, 58, 68, 76, 49, 56, 76, 47, 52, 59, 62, 48, 73, 69, 67, 91, 38, 132, 59, 45, 58, 64, 60, 51, 83, 69, 55, 58, 60, 47, 51, 67, 52, 83, 49, 70, 83, 59, 91, 57, 55, 56, 87, 65, 53, 67, 63, 88, 54, 64, 63, 74, 59, 59, 62, 89, 66, 55, 37, 76, 76, 38, 59, 67, 51, 69, 55, 64, 68, 57, 52, 52, 56, 51, 104, 55, 48, 56, 58, 56, 110, 61, 59, 68, 58, 56, 74, 46, 69, 66, 64, 70, 57, 59, 49, 58, 65, 68, 54, 74, 58, 62, 71, 73, 57, 53, 60, 63, 60, 74, 54, 50, 66, 100, 58, 73, 47, 69, 52, 77, 63, 58, 72, 66, 50, 49, 60, 48, 61, 77, 75, 79, 53, 52, 75, 86, 92, 98, 55, 64, 50, 105, 64, 64, 67, 74, 59, 62, 86, 66, 76, 55, 62, 67, 64, 70, 71, 49, 92, 73, 71, 59, 58, 61, 46, 70, 53, 73, 78, 69, 67, 53, 61, 94, 60, 93, 57, 52, 57, 58, 88, 54, 63, 64, 60, 62, 78, 44, 71, 56, 68, 75, 72, 64, 74, 58, 62, 64, 57, 69, 93, 62, 63, 51, 66, 67, 57, 108, 61, 53, 66, 90, 50, 58, 83, 67, 58, 61, 67, 54, 76, 66, 76, 95, 73, 76, 54, 51, 64, 71, 61, 87, 54, 58, 57, 71, 61, 51, 61, 70, 69, 83, 144, 58, 51, 68, 98, 71, 59, 81, 55, 77, 56, 53, 59, 81, 55, 78, 61, 50, 58, 73, 65, 55, 58, 56, 90, 101, 59, 80, 59, 51, 70, 68, 59, 68, 91, 45, 57, 69, 84, 50, 66, 71, 99, 69, 94, 56, 45, 63, 37, 87, 63, 54, 57, 70, 56, 78, 90, 73, 59, 77, 74, 80, 52, 53, 49, 78, 54, 71, 122, 63, 66, 76, 68, 48, 45, 48, 99, 69, 49, 61, 66, 57, 81, 54, 70, 60, 46, 52, 52, 67, 55, 60, 67, 61, 61, 64, 76, 45, 71, 49, 66, 85, 60, 97, 49, 71, 66, 52, 72, 55, 72, 84, 67, 72, 78, 62, 61, 65, 69, 59, 63, 60, 87, 73, 55, 73, 60, 88, 119, 53, 44, 52, 69, 60, 73, 68, 65, 65, 82, 73, 66, 61, 71, 88, 46, 83, 77, 67, 73, 77, 54, 42, 64, 54, 90, 70, 51, 77, 46, 61, 70, 70, 66, 76, 53, 75, 51, 81, 165, 83, 61, 66, 58, 69, 57, 78, 47, 60, 57, 99, 51, 56, 61, 73, 51, 75, 73, 79, 65, 56, 76, 78, 60, 80, 72, 78, 51, 55, 70, 55, 57, 54, 77, 59, 74, 69, 85, 78, 56, 49, 59, 68, 66, 84, 72, 69, 64, 79, 56, 59, 80, 61, 84, 82, 56, 72, 72, 83, 92, 52, 63, 57, 76, 53, 88, 60, 72, 54, 59, 78, 88, 67, 111, 68, 63, 61, 62, 126, 65, 54, 94, 71, 68, 44, 63, 72, 64, 67, 79, 58, 51, 66, 69, 65, 63, 62, 61, 73, 71, 66, 39, 60, 63, 61, 55, 96, 69, 41, 64, 53, 56, 67, 56, 82, 67, 86, 59, 79, 86, 47, 57, 73, 51, 60, 67, 79, 44, 71, 81, 55, 78, 50, 62, 59, 93, 60, 86, 50, 132, 66, 66, 52, 58, 59, 65, 100, 84, 63, 101, 50, 81, 63, 57, 54, 58, 59, 74, 63, 64, 66, 72, 73, 67, 81, 71, 56, 55, 67, 73, 59, 86, 70, 53, 67, 61, 64, 51, 64, 67, 66, 67, 55, 68, 58, 65, 68, 66, 119, 56, 56, 75, 64, 70, 76, 63, 70, 66, 75, 75, 64, 73, 71, 110, 52, 65, 60, 50, 66, 55, 63, 94, 106, 59, 97, 51, 63, 103, 61, 63, 77, 62, 56, 46, 78, 68, 92, 67, 57, 72, 74, 85, 59, 68, 59, 63, 43, 40, 62, 67, 65, 81, 68, 43, 61, 68, 81, 94, 101, 46, 63, 81, 65, 86, 60, 71, 55, 63, 59, 66, 56, 71, 70, 69, 64, 66, 79, 55, 59, 68, 44, 52, 50, 69, 71, 57, 90, 51, 70, 82, 58, 61, 57, 76, 102, 72, 57, 70, 66, 66, 59, 58, 47, 58, 62, 67, 58, 62, 74, 67, 82, 71, 49, 74, 56, 78, 58, 70, 109, 55, 55, 60, 54, 60, 85, 64, 47, 76, 76, 69, 62, 48, 67, 52, 80, 48, 64, 75, 74, 64, 46, 67, 65, 82, 59, 72, 67, 80, 66, 62, 60, 69, 64, 83, 60, 93, 65, 77, 73, 93, 41, 52, 53, 50, 52, 75, 42, 57, 57, 62, 53, 34, 52, 66, 72, 51, 67, 67, 71, 41, 57, 61, 75, 76, 53, 65, 52, 69, 54, 66, 63, 61, 60, 68, 87, 71, 67, 61, 80, 51, 45, 68, 90, 71, 57, 65, 86, 57, 82, 66, 53, 63, 67, 64, 51, 45, 63, 72, 61, 56, 77, 50, 81, 56, 60, 78, 72, 62, 55, 48, 73, 60, 72, 61, 73, 53, 68, 70, 73, 64, 65, 77, 40, 83, 84, 51, 54, 57, 49, 54, 61, 54, 68, 61, 54, 59, 43, 87, 67, 65, 72, 69, 60, 56, 65, 67, 72, 68, 63, 69, 73, 72, 68, 54, 54, 95, 63, 55, 46, 66, 52, 63, 56, 66, 49, 65, 59, 49, 64, 78, 43, 70, 53, 58, 68, 69, 74, 55, 61, 86, 70, 61, 58, 48, 85, 78, 66, 89, 77, 64, 96, 56, 67, 65, 75, 50, 65, 62, 48, 88, 60, 86, 65, 66, 56, 59, 86, 46, 61, 63, 67, 80, 69, 49, 87, 63, 34, 67, 73, 51, 70, 50, 45, 62, 66, 82, 76, 46, 60, 96, 59, 67, 53, 60, 56, 89, 68, 83, 73, 59, 52, 106, 59, 63, 64, 46, 52, 56, 86, 80, 64, 51, 53, 52, 72, 57, 50, 63, 112, 62, 74, 48, 43, 81, 56, 70, 55, 74, 52, 66, 152, 67, 66, 69, 64, 60, 114, 70, 73, 62, 67, 49, 57, 53, 53, 70, 64, 73, 60, 50, 62, 74, 75, 77, 81, 59, 77, 64, 77, 58, 56, 52, 52, 54, 56, 56, 78, 71, 67, 64, 50, 63, 76, 65, 87, 31, 65, 57, 72, 69, 79, 58, 52, 54, 56, 66, 59, 40, 58, 55, 55, 66, 73, 50, 69, 76, 62, 114, 63, 66, 66, 45, 61, 53, 133, 61, 60, 95, 57, 45, 44, 54, 79, 72, 58, 67, 57, 63, 66, 69, 95, 76, 68, 51, 64, 70, 48, 50, 61, 54, 71, 79, 72, 65, 60, 55, 69, 47, 69, 78, 28, 48, 65, 70, 36, 67, 67, 59, 65, 87, 105, 57, 61, 93, 53, 55, 45, 47, 65, 63, 57, 69, 60, 51, 50, 52, 72, 85, 69, 52, 67, 54, 50, 69, 44, 64, 68, 60, 49, 61, 69, 68, 62, 75, 113, 68, 66, 72, 65, 61, 58, 58, 71, 70, 50, 75, 53, 53, 72, 81, 75, 60, 56, 64, 93, 58, 64, 38, 52, 62, 69, 69, 75, 66, 56, 93, 39, 56, 62, 57, 59, 109, 75, 55, 60, 90, 59, 57, 77, 47, 60, 60, 55, 71, 62, 56, 55, 102, 68, 80, 59, 67, 60, 56, 56, 80, 64, 54, 56, 69, 58, 58, 88, 66, 41, 78, 79, 55, 60, 52, 41, 79, 54, 85, 67, 71, 61, 67, 72, 76, 93, 44, 60, 67, 67, 61, 79, 70, 85, 52, 62, 58, 50, 53, 63, 72, 64, 47, 36, 77, 83, 85, 68, 41, 68, 61, 74, 74, 47, 54, 72, 60, 71, 48, 44, 75, 61, 59, 56, 50, 68, 70, 58, 67, 61, 59, 91, 90, 62, 64, 54, 63, 58, 72, 76, 60, 63, 40, 55, 128, 55, 84, 54, 72, 89, 50, 63, 61, 60, 61, 66, 64, 79, 56, 78, 59, 72, 51, 69, 77, 44, 58, 92, 72, 67, 53, 71, 113, 55, 87, 63, 52, 75, 64, 56, 88, 67, 107, 54, 56, 67, 53, 76, 63, 53, 67, 64, 46, 64, 75, 45, 57, 70, 61, 61, 83, 48, 61, 62, 67, 45, 55, 66, 65, 54, 71, 65, 75, 102, 66, 59, 118, 47, 71, 34, 43, 76, 55, 56, 50, 71, 79, 65, 80, 66, 102, 66, 69, 57, 51, 68, 60, 65, 63, 95, 77, 53, 43, 49, 65, 72, 57, 67, 42, 49, 54, 102, 130, 59, 64, 95, 102, 89, 60, 96, 41, 58, 64, 52, 58, 91, 34, 53, 54, 53, 64, 76, 65, 57, 75, 70, 72, 46, 75, 64, 61, 93, 47, 85, 54, 110, 67, 102, 61, 75, 45, 66, 61, 121, 59, 66, 74, 67, 70, 54, 74, 59, 62, 72, 57, 61, 93, 61, 51, 45, 72, 47, 82, 60, 53, 63, 68, 75, 62, 60, 92, 81, 56, 71, 58, 58, 60, 73, 51, 82, 43, 52, 83, 54, 57, 51, 74, 44, 49, 49, 60, 53, 55, 61, 61, 58, 51, 60, 64, 103, 50, 66, 66, 74, 74, 66, 67, 72, 87, 76, 67, 55, 49, 58, 71, 60, 83, 47, 58, 78, 49, 73, 60, 51, 67, 59, 80, 66, 70, 67, 57, 57, 105, 77, 55, 81, 51, 48, 68, 59, 70, 105, 53, 64, 69, 74, 47, 44, 75, 69, 94, 78, 61, 87, 52, 44, 62, 59, 53, 59, 57, 67, 81, 64, 54, 72, 50, 79, 56, 47, 69, 64, 80, 58, 57, 51, 51, 76, 80, 65, 83, 48, 49, 60, 54, 64, 87, 39, 50, 50, 80, 57, 62, 69, 62, 76, 51, 109, 58, 66, 58, 71, 84, 61, 69, 61, 72, 102, 55, 75, 58, 59, 58, 51, 57, 59, 74, 65, 68, 54, 72, 70, 65, 65, 76, 54, 64, 69, 54, 91, 58, 61, 70, 57, 58, 64, 90, 84, 79, 62, 53, 64, 52, 54, 67, 64, 98, 66, 71, 61, 57, 78, 57, 81, 63, 57, 64, 42, 77, 70, 52, 86, 70, 63, 59, 80, 54, 51, 62, 80, 61, 69, 61, 58, 64, 76, 62, 72, 58, 55, 65, 60, 55, 59, 80, 59, 72, 57, 54, 57, 60, 55, 69, 80, 101, 75, 63, 64, 69, 67, 46, 61, 55, 68, 70, 81, 69, 58, 48, 51, 72, 151, 79, 51, 61, 52, 76, 75, 59, 71, 71, 51, 64, 66, 55, 66, 62, 61, 60, 56, 46, 58, 64, 75, 66, 71, 79, 60, 85, 68, 81, 51, 78, 58, 59, 72, 69, 61, 56, 78, 81, 50, 61, 75, 58, 55, 51, 51, 52, 73, 70, 73, 67, 76, 64, 58, 61, 68, 67, 63, 49, 52, 57, 60, 57, 44, 76, 61, 53, 93, 59, 112, 81, 56, 56, 69, 73, 73, 79, 77, 62, 74, 60, 66, 67, 65, 70, 66, 60, 61, 68, 50, 75, 57, 75, 62, 62, 67, 60, 68, 52, 61, 73, 80, 81, 60, 100, 51, 48, 81, 67, 50, 85, 70, 56, 64, 62, 77, 71, 42, 68, 64, 57, 60, 62, 68, 60, 60, 67, 59, 56, 48, 53, 68, 62, 61, 77, 83, 62, 62, 81, 43, 62, 78, 83, 76, 69, 72, 65, 59, 57, 59, 75, 48, 89, 67, 47, 67, 69, 69, 58, 60, 67, 70, 73, 59, 67, 42, 52, 50, 60, 72, 63, 67, 59, 81, 58, 52, 65, 49, 62, 62, 66, 80, 104, 66, 72, 75, 61, 68, 58, 64, 80, 58, 55, 57, 73, 58, 59, 61, 58, 52, 61, 67, 61, 78, 51, 56, 64, 57, 67, 61, 71, 64, 59, 64, 52, 69, 67, 56, 78, 58, 65, 50, 77, 61, 63, 86, 62, 80, 74, 60, 60, 64, 47, 80, 79, 68, 53, 66, 54, 71, 59, 83, 69, 54, 71, 73, 52, 82, 63, 66, 59, 64, 80, 89, 63, 60, 42, 50, 55, 62, 63, 67, 83, 58, 75, 50, 96, 68, 64, 70, 61, 44, 55, 68, 70, 65, 46, 101, 59, 69, 70, 68, 51, 46, 94, 72, 83, 76, 42, 78, 49, 50, 58, 59, 80, 70, 54, 57, 105, 71, 63, 54, 82, 47, 57, 65, 81, 70, 97, 57, 73, 70, 73, 58, 72, 71, 59, 60, 54, 65, 59, 62, 62, 99, 76, 55, 66, 62, 60, 70, 63, 68, 64, 74, 62, 56, 54, 80, 79, 68, 67, 75, 67, 61, 67, 46, 58, 79, 91, 58, 71, 51, 48, 56, 76, 72, 47, 58, 57, 65, 65, 88, 64, 63, 68, 73, 49, 63, 52, 59, 81, 71, 42, 56, 68, 50, 85, 54, 56, 53, 62, 63, 67, 39, 58, 51, 57, 122, 85, 56, 51, 64, 72, 55, 75, 65, 65, 70, 47, 77, 70, 62, 64, 74, 60, 47, 54, 69, 61, 72, 67, 61, 106, 56, 58, 73, 70, 64, 93, 52, 57, 70, 71, 88, 57, 44, 59, 47, 56, 65, 60, 61, 71, 63, 67, 44, 60, 63, 70, 68, 75, 84, 69, 63, 58, 55, 95, 76, 70, 65, 69, 57, 63, 65, 95, 48, 49, 49, 57, 68, 59, 83, 71, 86, 53, 46, 68, 58, 79, 62, 76, 57, 88, 62, 86, 70, 74, 54, 48, 73, 55, 69, 79, 64, 55, 59, 75, 93, 69, 64, 61, 56, 67, 69, 86, 59, 63, 56, 60, 68, 71, 59, 70, 74, 74, 61, 61, 50, 62, 60, 60, 124, 50, 89, 94, 98, 74, 80, 82, 58, 32, 70, 65, 66, 61, 59, 58, 86, 56, 77, 69, 55, 58, 49, 78, 62, 65, 58, 70, 66, 70, 70, 72, 61, 54, 65, 74, 69, 61, 75, 53, 66, 90, 61, 66, 39, 55, 61, 95, 69, 62, 65, 74, 58, 63, 41, 57, 48, 66, 61, 71, 66, 56, 67, 74, 66, 88, 63, 87, 51, 67, 56, 69, 45, 61, 48, 57, 90, 65, 87, 58, 51, 59, 63, 69, 58, 51, 89, 61, 75, 71, 57, 62, 67, 79, 59, 70, 75, 56, 79, 60, 60, 69, 53, 57, 55, 71, 65, 58, 68, 82, 64, 67, 53, 58, 64, 89, 74, 74, 57, 68, 44, 81, 82, 59, 55, 89, 78, 43, 70, 49, 81, 65, 96, 51, 55, 65, 77, 72, 67, 111, 52, 64, 74, 65, 82, 68, 66, 59, 68, 86, 49, 68, 75, 72, 65, 54, 58, 54, 76, 70, 72, 74, 71, 56, 89, 60, 64, 122, 68, 53, 104, 62, 76, 74, 81, 62, 62, 51, 94, 60, 62, 88, 68, 67, 86, 83, 80, 76, 68, 75, 49, 65, 66, 64, 88, 83, 79, 66, 77, 47, 58, 51, 92, 60, 79, 56, 56, 61, 74, 63, 59, 86, 64, 52, 131, 64, 63, 65, 73, 67, 61, 68, 67, 63, 47, 58, 58, 80, 78, 82, 68, 53, 61, 81, 46, 47, 61, 88, 62, 54, 98, 55, 96, 71, 43, 54, 65, 56, 63, 67, 89, 67, 64, 66, 79, 62, 100, 68, 78, 39, 72, 56, 52, 68, 54, 67, 67, 109, 56, 56, 56, 54, 60, 66, 68, 55, 106, 74, 53, 59, 91, 80, 76, 39, 64, 80, 59, 58, 68, 56, 80, 78, 52, 61, 51, 43, 58, 52, 62, 68, 48, 60, 71, 70, 52, 66, 51, 80, 64, 45, 70, 90, 70, 74, 73, 63, 57, 55, 120, 54, 71, 72, 91, 89, 44, 59, 70, 81, 52, 77, 79, 72, 61, 81, 44, 74, 72, 97, 62, 76, 64, 75, 68, 70, 55, 49, 75, 69, 66, 64, 64, 68, 68, 55, 81, 48, 61, 70, 49, 55, 73, 60, 66, 87, 41, 74, 70, 65, 65, 64, 79, 59, 123, 53, 69, 81, 61, 53, 63, 49, 80, 53, 75, 41, 54, 51, 46, 65, 70, 62, 66, 77, 61, 91, 67, 54, 62, 43, 79, 64, 59, 47, 55, 117, 94, 78, 74, 81, 76, 77, 77, 83, 47, 71, 78, 77, 93, 58, 60, 55, 59, 68, 49, 62, 51, 52, 65, 97, 57, 42, 68, 55, 71, 57, 56, 69, 69, 81, 69, 64, 74, 51, 84, 62, 81, 65, 90, 43, 62, 77, 58, 90, 64, 65, 38, 61, 58, 68, 62, 47, 69, 58, 67, 52, 80, 69, 67, 51, 72, 64, 50, 57, 67, 81, 65, 67, 57, 89, 63, 71, 58, 68, 70, 87, 71, 50, 55, 94, 83, 47, 34, 74, 57, 70, 50, 50, 102, 60, 67, 57, 69, 67, 68, 48, 46, 67, 55, 39, 72, 76, 56, 44, 96, 70, 85, 54, 66, 71, 66, 74, 52, 72, 73, 69, 59, 49, 49, 80, 50, 75, 56, 54, 76, 58, 95, 69, 78, 63, 46, 55, 83, 65, 71, 70, 56, 59, 75, 51, 86, 68, 57, 68, 69, 62, 59, 48, 64, 62, 64, 70, 53, 48, 50, 64, 106, 66, 58, 59, 63, 50, 56, 68, 67, 68, 45, 79, 69, 72, 65, 64, 75, 56, 92, 73, 52, 107, 86, 40, 60, 92, 73, 74, 59, 52, 73, 66, 45, 52, 55, 81, 54, 62, 52, 79, 66, 68, 62, 57, 68, 78, 71, 53, 67, 64, 69, 86, 76, 73, 79, 98, 79, 63, 80, 67, 55, 51, 64, 55, 51, 64, 101, 60, 65, 76, 58, 66, 51, 57, 118, 68, 64, 54, 76, 78, 59, 72, 83, 54, 53, 71, 80, 76, 52, 55, 62, 69, 73, 64, 61, 71, 64, 63, 54, 70, 61, 55, 32, 66, 54, 61, 79, 67, 75, 62, 60, 72, 68, 59, 67, 57, 75, 48, 65, 44, 47, 63, 78, 47, 50, 68, 45, 72, 68, 56, 67, 75, 57, 106, 49, 51, 59, 59, 59, 60, 56, 84, 79, 55, 67, 47, 78, 87, 54, 59, 82, 69, 49, 43, 69, 61, 66, 52, 68, 65, 48, 57, 69, 52, 58, 59, 65, 70, 70, 63, 47, 125, 51, 61, 56, 72, 72, 82, 72, 80, 60, 47, 72, 89, 79, 59, 46, 77, 66, 70, 63, 71, 46, 68, 68, 89, 86, 61, 58, 87, 59, 95, 107, 64, 69, 67, 46, 55, 63, 69, 45, 52, 69, 57, 34, 94, 59, 69, 81, 51, 100, 64, 66, 71, 81, 75, 68, 73, 50, 60, 62, 56, 62, 53, 69, 75, 59, 53, 74, 50, 64, 55, 66, 74, 100, 52, 70, 51, 58, 70, 59, 71, 61, 66, 46, 52, 67, 60, 68, 80, 54, 55, 62, 79, 90, 65, 63, 74, 59, 58, 58, 63, 53, 50, 76, 75, 64, 61, 55, 69, 77, 61, 56, 61, 73, 82, 75, 59, 57, 74, 43, 74, 76, 81, 65, 63, 42, 77, 106, 45, 57, 76, 52, 59, 64, 78, 76, 57, 49, 62, 57, 54, 74, 84, 55, 79, 70, 74, 65, 61, 67, 64, 58, 73, 41, 51, 53, 42, 73, 62, 99, 85, 72, 82, 80, 62, 58, 73, 70, 100, 42, 73, 51, 55, 91, 55, 59, 61, 71, 88, 68, 70, 74, 79, 54, 71, 61, 62, 66, 84, 95, 89, 61, 50, 52, 57, 59, 58, 68, 69, 72, 53, 60, 75, 58, 62, 68, 82, 50, 65, 94, 65, 69, 66, 53, 80, 58, 52, 64, 58, 79, 58, 74, 70, 61, 51, 72, 69, 76, 49, 53, 46, 72, 87, 65, 57, 64, 51, 50, 59, 58, 74, 64, 66, 64, 71, 48, 57, 56, 52, 51, 67, 77, 61, 61, 65, 64, 61, 73, 96, 79, 64, 89, 69, 76, 79, 51, 65, 66, 76, 59, 54, 59, 65, 54, 53, 75, 77, 42, 85, 44, 82, 60, 56, 64, 59, 76, 54, 70, 96, 77, 66, 80, 57, 51, 53, 74, 67, 67, 48, 60, 62, 52, 57, 72, 58, 69, 82, 54, 66, 68, 69, 75, 74, 69, 45, 74, 46, 69, 72, 57, 87, 56, 64, 85, 54, 68, 68, 56, 53, 80, 76, 76, 50, 64, 61, 75, 73, 62, 58, 71, 55, 86, 59, 57, 55, 96, 59, 69, 71, 68, 78, 65, 75, 75, 53, 74, 49, 57, 90, 64, 73, 65, 68, 67, 66, 78, 48, 64, 70, 55, 65, 68, 66, 103, 41, 49, 63, 48, 66, 66, 53, 53, 53, 49, 77, 79, 65, 64, 69, 68, 54, 77, 59, 62, 67, 41, 50, 61, 63, 64, 63, 70, 111, 79, 67, 77, 59, 59, 92, 87, 78, 73, 77, 64, 82, 66, 92, 68, 111, 66, 52, 53, 71, 75, 77, 90, 74, 55, 69, 55, 70, 77, 59, 82, 48, 51, 76, 53, 88, 91, 81, 48, 45, 83, 67, 47, 67, 67, 56, 68, 57, 77, 71, 62, 69, 50, 73, 77, 72, 100, 71, 61, 71, 100, 70, 54, 50, 37, 71, 73, 76, 50, 85, 60, 46, 104, 99, 60, 46, 64, 58, 70, 64, 75, 75, 49, 58, 47, 50, 79, 69, 58, 45, 66, 74, 78, 74, 61, 60, 66, 39, 81, 52, 79, 39, 68, 67, 57, 62, 60, 67, 75, 63, 76, 60, 122, 54, 85, 53, 69, 54, 89, 82, 86, 48, 78, 88, 43, 62, 60, 97, 68, 82, 85, 70, 63, 63, 49, 62, 87, 78, 68, 88, 60, 57, 55, 44, 69, 91, 53, 64, 52, 50, 71, 46, 57, 74, 71, 115, 51, 85, 61, 60, 50, 76, 78, 60, 55, 51, 53, 67, 56, 50, 67, 62, 99, 50, 68, 62, 55, 70, 64, 62, 65, 70, 70, 80, 56, 65, 53, 72, 48, 87, 51, 69, 65, 63, 67, 65, 87, 54, 52, 58, 51, 61, 59, 61, 73, 50, 67, 55, 51, 50, 56, 55, 53, 48, 63, 47, 74, 61, 69, 26, 66, 73, 61, 86, 68, 64, 71, 34, 92, 65, 78, 72, 66, 79, 92, 51, 51, 69, 73, 58, 59, 69, 89, 57, 70, 67, 65, 71, 77, 48, 73, 44, 73, 43, 70, 67, 53, 70, 64, 67, 89, 56, 56, 55, 168, 59, 77, 62, 75, 56, 40, 68, 55, 63, 56, 61, 55, 66, 58, 72, 64, 60, 50, 62, 54, 64, 84, 39, 66, 82, 59, 97, 57, 68, 54, 58, 67, 73, 46, 64, 85, 70, 69, 72, 121, 84, 78, 75, 68, 66, 60, 61, 61, 54, 43, 46, 47, 61, 76, 69, 94, 65, 60, 69, 54, 84, 111, 61, 58, 65, 74, 82, 52, 73, 75, 63, 81, 59, 103, 44, 72, 46, 60, 108, 68, 50, 61, 63, 74, 78, 65, 61, 88, 112, 88, 57, 51, 73, 80, 75, 65, 89, 82, 71, 112, 121, 83, 69, 77, 50, 69, 71, 39, 69, 81, 63, 62, 52, 57, 68, 68, 56, 66, 85, 85, 66, 63, 64, 52, 83, 89, 58, 84, 56, 59, 47, 107, 68, 61, 73, 49, 54, 67, 58, 65, 57, 55, 52, 60, 41, 72, 64, 73, 81, 68, 66, 57, 114, 70, 67, 59, 72, 49, 58, 94, 109, 103, 68, 67, 55, 71, 79, 65, 56, 55, 52, 64, 88, 73, 89, 50, 72, 55, 73, 45, 64, 76, 89, 64, 67, 56, 66, 51, 47, 52, 56, 59, 62, 67, 57, 58, 61, 62, 45, 71, 50, 73, 89, 89, 63, 52, 70, 46, 49, 55, 65, 41, 62, 56, 45, 57, 78, 74, 58, 57, 62, 62, 129, 64, 61, 63, 68, 89, 60, 66, 66, 65, 72, 64, 85, 57, 71, 65, 52, 56, 48, 50, 62, 61, 57, 67, 122, 60, 91, 67, 65, 54, 51, 61, 88, 63, 74, 60, 63, 56, 38, 72, 87, 67, 82, 65, 87, 48, 56, 96, 83, 36, 70, 67, 75, 77, 48, 87, 90, 42, 47, 96, 71, 66, 100, 66, 58, 66, 53, 70, 82, 87, 66, 73, 69, 52, 40, 81, 81, 59, 54, 54, 60, 72, 67, 61, 56, 79, 49, 79, 50, 55, 73, 83, 52, 57, 68, 67, 75, 65, 62, 69, 66, 64, 38, 83, 59, 72, 60, 65, 67, 35, 56, 77, 79, 82, 75, 57, 61, 62, 80, 57, 65, 67, 64, 56, 43, 62, 50, 73, 76, 50, 65, 72, 61, 77, 54, 49, 59, 69, 52, 56, 48, 80, 64, 54, 60, 55, 59, 76, 89, 69, 46, 67, 73, 54, 49, 65, 66, 74, 78, 57, 55, 54, 102, 72, 61, 116, 113, 39, 60, 64, 83, 70, 57, 77, 45, 121, 56, 52, 51, 56, 60, 67, 73, 71, 79, 80, 55, 49, 55, 79, 66, 59, 72, 66, 59, 56, 68, 55, 72, 47, 56, 58, 40, 59, 86, 70, 62, 58, 58, 51, 54, 58, 58, 44, 57, 65, 52, 86, 113, 60, 57, 59, 82, 67, 69, 81, 46, 69, 62, 69, 71, 57, 73, 77, 86, 69, 55, 66, 75, 67, 92, 56, 61, 65, 44, 50, 65, 83, 64, 75, 68, 68, 70, 72, 96, 55, 63, 72, 71, 78, 56, 79, 61, 59, 52, 67, 55, 58, 50, 156, 96, 69, 100, 74, 73, 59, 68, 73, 80, 65, 54, 59, 67, 71, 67, 86, 71, 57, 65, 52, 67, 84, 46, 75, 62, 64, 63, 47, 63, 56, 66, 51, 99, 78, 50, 99, 70, 92, 54, 92, 52, 77, 84, 72, 84, 62, 77, 45, 55, 67, 58, 68, 40, 70, 74, 76, 82, 78, 36, 80, 60, 77, 77, 60, 72, 64, 70, 80, 128, 59, 68, 92, 52, 66, 56, 65, 58, 48, 54, 52, 54, 74, 52, 61, 74, 68, 109, 88, 68, 57, 60, 70, 59, 72, 95, 81, 50, 57, 44, 52, 84, 62, 66, 88, 59, 50, 53, 64, 72, 51, 62, 65, 60, 59, 51, 54, 72, 77, 61, 77, 80, 58, 56, 71, 46, 62, 55, 67, 63, 53, 48, 73, 85, 83, 49, 49, 66, 48, 70, 63, 75, 60, 76, 68, 70, 68, 51, 81, 65, 49, 78, 78, 52, 53, 56, 65, 55, 63, 69, 73, 67, 82, 59, 84, 73, 57, 61, 46, 54, 68, 96, 61, 67, 82, 53, 43, 65, 44, 76, 109, 52, 72, 54, 73, 50, 57, 57, 94, 76, 62, 52, 70, 51, 65, 60, 73, 70, 73, 62, 45, 60, 51, 55, 75, 86, 64, 62, 70, 58, 59, 58, 65, 60, 66, 49, 63, 80, 66, 70, 76, 58, 76, 74, 46, 86, 52, 60, 64, 62, 42, 64, 68, 63, 77, 56, 49, 50, 47, 68, 60, 84, 56, 51, 41, 54, 73, 69, 69, 75, 64, 60, 60, 74, 62, 58, 51, 84, 58, 38, 47, 72, 69, 71, 64, 87, 60, 79, 51, 66, 48, 63, 54, 91, 66, 76, 77, 81, 59, 67, 82, 116, 66, 69, 51, 55, 63, 92, 66, 62, 52, 63, 58, 59, 50, 74, 69, 65, 52, 67, 101, 73, 53, 76, 76, 52, 63, 71, 63, 67, 79, 73, 75, 61, 58, 64, 39, 75, 67, 57, 68, 55, 54, 68, 77, 67, 87, 62, 69, 53, 85, 55, 65, 77, 81, 59, 65, 59, 53, 59, 42, 47, 66, 59, 64, 49, 57, 68, 59, 66, 73, 50, 69, 56, 63, 77, 66, 59, 86, 78, 68, 45, 85, 64, 85, 39, 67, 63, 59, 100, 70, 82, 64, 55, 61, 83, 45, 74, 73, 50, 74, 71, 52, 63, 69, 62, 63, 52, 47, 62, 57, 78, 82, 68, 74, 50, 44, 57, 110, 47, 69, 46, 64, 46, 76, 76, 81, 67, 89, 44, 57, 71, 65, 116, 78, 59, 78, 85, 111, 71, 66, 44, 76, 66, 52, 77, 118, 63, 79, 81, 63, 64, 82, 85, 72, 54, 88, 66, 66, 57, 84, 57, 47, 73, 72, 43, 98, 48, 53, 114, 42, 67, 54, 81, 64, 51, 64, 55, 66, 54, 54, 58, 69, 63, 43, 65, 55, 57, 53, 66, 71, 77, 76, 50, 62, 54, 67, 79, 65, 70, 59, 64, 55, 41, 58, 64, 109, 47, 88, 59, 46, 83, 106, 87, 85, 34, 63, 66, 73, 64, 74, 74, 44, 79, 75, 93, 92, 66, 49, 70, 40, 69, 55, 59, 72, 84, 96, 42, 64, 109, 45, 60, 91, 87, 109, 52, 46, 65, 61, 70, 65, 78, 55, 42, 70, 58, 71, 53, 101, 62, 61, 53, 76, 68, 61, 73, 53, 44, 67, 86, 56, 56, 72, 49, 73, 56, 66, 73, 63, 43, 64, 50, 81, 42, 89, 72, 46, 74, 184, 74, 65, 35, 90, 71, 56, 79, 71, 36, 71, 66, 82, 93, 90, 52, 58, 61, 73, 76, 80, 57, 73, 107, 98, 74, 40, 57, 71, 42, 56, 88, 71, 55, 50, 61, 48, 77, 63, 51, 53, 82, 73, 69, 55, 68, 76, 62, 61, 82, 37, 59, 102, 48, 65, 57, 69, 41, 80, 58, 74, 55, 61, 65, 50, 56, 73, 59, 42, 55, 62, 61, 47, 48, 43, 55, 80, 27, 55, 86, 77, 45, 37, 61, 67, 47, 76, 52, 45, 80, 44, 60, 43, 69, 57, 61, 81, 57, 80, 40, 46, 60, 63, 77, 53, 34, 63, 63, 71, 53, 55, 74, 74, 67, 54, 54, 69, 57, 43, 44, 66, 56, 53, 47, 59, 52, 62, 99, 79, 90, 75, 58, 46, 76, 67, 108, 48, 83, 74, 67, 76, 95, 41, 54, 56, 79, 82, 63, 53, 57, 44, 91, 60, 71, 106, 86, 76, 56, 70, 43, 71, 79, 47, 40, 46, 48, 61, 76, 86, 67, 75, 70, 66, 99, 57, 52, 80, 64, 28, 55, 42, 68, 68, 80, 69, 60, 51, 57, 72, 57, 81, 52, 66, 112, 50, 62, 67, 80, 71, 66, 69, 43, 36, 41, 56, 54, 56, 53, 61, 55, 76, 73, 74, 85, 59, 54, 66, 57, 90, 83, 69, 77, 70, 49, 53, 51, 60, 53, 71, 47, 72, 79, 100, 58, 70, 71, 48, 80, 44, 49, 89, 94, 124, 58, 64, 85, 68, 55, 62, 49, 33, 58, 58, 82, 52, 73, 68, 48, 66, 83, 69, 85, 60, 117, 54, 70, 85, 72, 52, 47, 44, 61, 67, 61, 55, 83, 49, 85, 46, 52, 66, 55, 63, 65, 51, 56, 51, 55, 64, 60, 45, 71, 57, 68, 87, 65, 72, 42, 50, 75, 66, 68, 39, 70, 62, 83, 91, 63, 83, 53, 82, 54, 53, 63, 77, 59, 65, 70, 96, 37, 31, 52, 85, 67, 50, 88, 65, 86, 56, 60, 58, 50, 97, 46, 63, 45, 64, 50, 57, 60, 39, 60, 84, 61, 71, 48, 77, 88, 70, 70, 53, 83, 58, 59, 79, 106, 38, 53, 67, 92, 69, 107, 92, 78, 58, 71, 60, 113, 76, 58, 77, 68, 75, 61, 55, 79, 49, 66, 41, 34, 76, 81, 60, 89, 54, 79, 76, 75, 44, 44, 79, 76, 63, 66, 75, 67, 71, 109, 60, 53, 65, 78, 67, 61, 94, 77, 65, 52, 61, 70, 75, 61, 68, 71, 103, 96, 71, 76, 60, 55, 66, 82, 77, 60, 65, 78, 105, 55, 81, 47, 65, 64, 58, 71, 37, 57, 63, 84, 68, 106, 60, 68, 96, 57, 46, 58, 39, 50, 80, 69, 94, 73, 49, 73, 94, 57, 59, 49, 85, 61, 39, 55, 55, 76, 74, 70, 88, 77, 42, 66, 86, 100, 67, 100, 60, 52, 48, 61, 83, 70, 47, 67, 21, 60, 59, 49, 68, 60, 73, 56, 85, 48, 50, 78, 72, 55, 62, 68, 46, 73, 75, 66, 80, 76, 51, 70, 55, 93, 88, 68, 54, 66, 58, 38, 44, 52, 55, 93, 70, 39, 65, 57, 77, 68, 101, 52, 58, 80, 43, 50, 70, 72, 38, 45, 60, 81, 55, 80, 69, 76, 74, 56, 48, 69, 58, 62, 87, 79, 66, 85, 57, 74, 52, 109, 66, 101, 73, 90, 64, 53, 115, 64, 58, 81, 53, 65, 51, 51, 60, 70, 99, 86, 89, 56, 47, 42, 64, 73, 60, 60, 51, 39, 57, 89, 73, 63, 46, 64, 67, 72, 43, 51, 51, 106, 44, 42, 88, 103, 25, 94, 58, 64, 73, 74, 70, 65, 79, 98, 86, 34, 78, 99, 53, 67, 50, 78, 75, 60, 68, 66, 94, 45, 43, 69, 81, 87, 45, 59, 79, 57, 37, 68, 81, 58, 77, 38, 61, 65, 105, 56, 33, 64, 65, 56, 68, 44, 45, 48, 33, 66, 43, 85, 83, 55, 108, 68, 56, 73, 49, 75, 80, 58, 58, 49, 101, 58, 55, 53, 85, 56, 48, 46, 83, 58, 47, 63, 42, 65, 72, 89, 70, 68, 82, 51, 56, 61, 70, 80, 71, 81, 81, 61, 71, 56, 66, 46, 81, 64, 48, 64, 77, 67, 60, 55, 109, 61, 58, 65, 58, 79, 56, 18, 50, 62, 77, 45, 64, 171, 89, 68, 80, 62, 52, 54, 56, 51, 67, 71, 50, 54, 69, 76, 51, 46, 64, 76, 72, 74, 77, 55, 82, 52, 50, 70, 58, 52, 50, 59, 104, 77, 90, 49, 65, 73, 76, 56, 78, 41, 64, 52, 79, 61, 42, 59, 54, 51, 72, 61, 48, 71, 61, 71, 57, 74, 58, 92, 85, 52, 88, 75, 45, 47, 52, 67, 57, 49, 98, 73, 99, 81, 72, 58, 65, 73, 64, 73, 89, 51, 73, 56, 65, 48, 46, 87, 54, 69, 86, 97, 61, 64, 70, 98, 54, 68, 86, 90, 56, 69, 51, 98, 59, 61, 72, 65, 57, 69, 59, 74, 87, 61, 75, 50, 62, 78, 61, 63, 68, 64, 75, 60, 64, 69, 57, 52, 65, 61, 65, 67, 63, 64, 95, 70, 70, 70, 60, 59, 73, 52, 59, 95, 75, 97, 55, 62, 52, 74, 87, 69, 88, 70, 49, 76, 69, 41, 77, 87, 48, 83, 58, 42, 40, 94, 70, 61, 42, 97, 125, 70, 75, 54, 62, 63, 72, 71, 73, 37, 94, 54, 56, 73, 55, 62, 73, 77, 62, 61, 65, 88, 91, 59, 80, 94, 67, 54, 46, 56, 67, 68, 50, 59, 68, 57, 49, 91, 80, 53, 75, 80, 77, 101, 66, 65, 86, 44, 75, 69, 54, 51, 86, 83, 49, 86, 62, 69, 76, 59, 36, 75, 54, 64, 58, 48, 58, 84, 56, 65, 93, 56, 96, 71, 74, 85, 54, 64, 50, 31, 83, 62, 77, 59, 65, 58, 96, 59, 63, 52, 76, 89, 54, 68, 88, 63, 76, 71, 52, 75, 58, 49, 57, 79, 62, 70, 104, 62, 63, 74, 52, 79, 67, 69, 84, 44, 80, 85, 69, 65, 84, 56, 80, 87, 35, 37, 60, 88, 67, 54, 67, 54, 73, 50, 59, 67, 77, 75, 108, 59, 115, 57, 67, 74, 70, 46, 52, 55, 90, 80, 61, 106, 81, 70, 54, 75, 65, 59, 70, 67, 65, 73, 55, 78, 81, 62, 70, 55, 60, 43, 45, 62, 61, 58, 112, 78, 50, 69, 60, 56, 111, 74, 52, 62, 51, 58, 81, 82, 69, 73, 90, 45, 72, 58, 47, 64, 58, 52, 103, 72, 76, 57, 58, 62, 77, 88, 57, 48, 47, 88, 53, 44, 70, 48, 65, 55, 55, 93, 65, 72, 73, 57, 69, 67, 53, 64, 61, 88, 52, 79, 93, 94, 48, 91, 83, 59, 57, 75, 51, 54, 53, 49, 62, 48, 80, 48, 94, 45, 59, 67, 75, 65, 54, 93, 62, 71, 74, 84, 70, 75, 75, 75, 63, 69, 61, 64, 72, 64, 68, 59, 77, 61, 55, 54, 57, 65, 79, 82, 60, 75, 49, 93, 55, 65, 112, 64, 66, 53, 51, 69, 58, 96, 45, 59, 74, 65, 49, 81, 63, 45, 50, 87, 66, 69, 80, 83, 58, 62, 65, 69, 76, 67, 42, 56, 65, 63, 67, 88, 72, 64, 64, 59, 94, 67, 68, 73, 71, 80, 66, 70, 103, 60, 91, 53, 64, 54, 89, 64, 69, 95, 61, 65, 49, 59, 68, 66, 62, 66, 77, 68, 61, 148, 90, 44, 49, 64, 50, 56, 94, 63, 69, 78, 104, 56, 59, 74, 119, 81, 61, 72, 103, 96, 70, 84, 70, 71, 57, 61, 52, 48, 72, 68, 66, 63, 59, 59, 58, 92, 62, 60, 67, 66, 69, 63, 47, 43, 66, 85, 55, 70, 60, 66, 77, 62, 69, 61, 95, 58, 70, 59, 52, 51, 79, 62, 70, 90, 55, 49, 54, 65, 74, 45, 49, 75, 67, 66, 60, 66, 61, 54, 63, 48, 95, 63, 53, 35, 55, 38, 77, 88, 44, 45, 65, 86, 52, 55, 79, 71, 59, 52, 55, 63, 53, 54, 91, 54, 82, 59, 61, 60, 64, 64, 74, 51, 70, 71, 86, 66, 59, 65, 62, 56, 64, 51, 63, 77, 73, 60, 60, 87, 62, 57, 89, 89, 71, 62, 58, 89, 61, 68, 57, 38, 49, 48, 60, 79, 73, 58, 80, 57, 65, 58, 64, 74, 59, 50, 65, 50, 61, 60, 60, 71, 78, 60, 73, 68, 75, 69, 84, 39, 52, 63, 58, 55, 90, 59, 55, 37, 79, 76, 80, 71, 58, 49, 51, 59, 63, 67, 64, 67, 79, 61, 62, 57, 63, 78, 70, 71, 73, 73, 51, 51, 45, 79, 57, 59, 46, 82, 63, 48, 48, 45, 52, 71, 65, 53, 43, 83, 58, 57, 67, 44, 49, 66, 45, 76, 91, 55, 77, 98, 59, 71, 34, 64, 68, 71, 59, 74, 65, 78, 57, 66, 47, 63, 63, 83, 71, 83, 79, 47, 44, 75, 65, 62, 68, 73, 47, 36, 117, 70, 48, 55, 55, 59, 51, 73, 88, 52, 57, 58, 90, 54, 73, 107, 74, 68, 75, 70, 93, 61, 70, 75, 37, 66, 76, 82, 41, 55, 60, 58, 59, 55, 68, 71, 60, 72, 76, 61, 47, 65, 61, 108, 60, 60, 63, 68, 62, 77, 65, 66, 62, 74, 68, 58, 76, 86, 90, 61, 54, 73, 35, 67, 43, 81, 63, 69, 51, 76, 68, 78, 60, 77, 57, 58, 62, 69, 74, 66, 60, 75, 56, 84, 63, 52, 79, 64, 69, 61, 54, 69, 57, 42, 51, 74, 78, 62, 57, 60, 73, 65, 54, 56, 69, 90, 64, 64, 76, 63, 61, 52, 60, 65, 64, 46, 57, 58, 53, 82, 70, 79, 62, 69, 55, 38, 38, 74, 86, 44, 61, 67, 73, 85, 55, 107, 63, 74, 85, 56, 60, 77, 85, 62, 67, 57, 51, 50, 60, 66, 60, 43, 71, 65, 58, 42, 69, 68, 66, 101, 36, 61, 79, 54, 92, 55, 61, 64, 56, 68, 50, 65, 67, 86, 84, 68, 69, 67, 58, 80, 55, 71, 60, 90, 54, 75, 99, 60, 50, 62, 76, 57, 65, 76, 68, 62, 79, 74, 50, 74, 67, 45, 78, 66, 106, 56, 60, 59, 70, 67, 71, 83, 85, 67, 79, 68, 53, 53, 69, 76, 64, 54, 63, 52, 76, 59, 67, 67, 59, 63, 63, 69, 71, 83, 63, 72, 69, 58, 46, 76, 71, 49, 74, 43, 89, 78, 61, 92, 79, 46, 50, 68, 156, 103, 57, 62, 63, 55, 66, 54, 73, 91, 71, 55, 58, 61, 53, 55, 71, 80, 56, 69, 63, 76, 58, 58, 123, 68, 46, 70, 85, 58, 71, 86, 87, 50, 88, 61, 68, 62, 76, 51, 75, 82, 88, 87, 54, 72, 39, 61, 41, 63, 51, 68, 78, 76, 72, 62, 59, 74, 68, 59, 56, 51, 45, 94, 69, 68, 61, 81, 79, 73, 61, 68, 62, 76, 48, 67, 75, 56, 56, 58, 56, 51, 74, 67, 66, 65, 62, 64, 51, 63, 57, 53, 69, 73, 75, 55, 84, 92, 55, 46, 61, 66, 89, 57, 71, 77, 110, 59, 78, 59, 60, 85, 58, 64, 65, 56, 48, 65, 94, 66, 82, 66, 45, 80, 78, 54, 93, 58, 45, 37, 56, 66, 65, 67, 56, 72, 53, 58, 57, 61, 69, 73, 63, 59, 56, 69, 55, 72, 52, 55, 57, 63, 80, 62, 59, 67, 64, 73, 80, 55, 82, 52, 62, 62, 71, 80, 59, 66, 70, 57, 90, 61, 50, 67, 68, 63, 76, 76, 66, 81, 55, 91, 53, 55, 61, 64, 74, 79, 79, 64, 63, 59, 55, 46, 72, 56, 87, 82, 50, 65, 63, 64, 68, 50, 48, 52, 41, 62, 63, 70, 57, 62, 70, 57, 106, 60, 67, 76, 64, 66, 62, 52, 65, 69, 56, 53, 86, 59, 64, 70, 74, 59, 56, 77, 99, 118, 64, 61, 49, 63, 70, 68, 68, 70, 74, 59, 67, 60, 74, 73, 68, 65, 56, 65, 66, 59, 63, 49, 53, 72, 69, 53, 64, 58, 59, 57, 57, 62, 85, 76, 68, 67, 41, 59, 65, 68, 65, 58, 60, 68, 72, 46, 50, 103, 62, 57, 50, 60, 61, 80, 111, 63, 67, 60, 51, 56, 73, 60, 74, 60, 62, 84, 56, 58, 60, 87, 65, 55, 59, 54, 49, 53, 66, 52, 65, 49, 69, 72, 70, 69, 57, 73, 56, 64, 66, 99, 86, 60, 62, 62, 61, 56, 44, 73, 55, 66, 67, 51, 45, 57, 62, 60, 55, 71, 70, 72, 103, 53, 112, 80, 62, 55, 62, 55, 69, 75, 71, 70, 56, 70, 63, 62, 49, 61, 66, 60, 67, 103, 57, 65, 78, 66, 85, 78, 63, 63, 73, 58, 64, 70, 56, 70, 51, 55, 76, 58, 56, 58, 54, 67, 69, 55, 59, 44, 81, 66, 82, 58, 75, 57, 39, 70, 66, 88, 83, 69, 64, 78, 70, 57, 61, 64, 89, 79, 59, 53, 48, 53, 51, 86, 67, 57, 53, 67, 55, 62, 77, 60, 76, 52, 67, 66, 82, 55, 60, 59, 76, 63, 87, 60, 51, 55, 50, 62, 64, 55, 60, 56, 61, 68, 51, 62, 53, 59, 71, 67, 45, 58, 64, 87, 72, 80, 65, 64, 68, 68, 79, 73, 51, 88, 66, 74, 55, 54, 92, 81, 82, 65, 66, 65, 60, 57, 47, 60, 67, 71, 56, 70, 65, 69, 78, 52, 85, 65, 62, 73, 61, 70, 59, 66, 68, 54, 72, 70, 59, 63, 52, 62, 61, 80, 79, 36, 52, 80, 71, 53, 61, 65, 60, 66, 64, 75, 75, 52, 62, 57, 53, 72, 78, 54, 55, 68, 72, 47, 48, 58, 63, 71, 67, 82, 65, 64, 55, 68, 49, 70, 75, 69, 66, 57, 62, 68, 55, 95, 111, 60, 53, 63, 71, 58, 72, 66, 58, 55, 60, 61, 50, 74, 57, 66, 63, 62, 65, 56, 57, 72, 64, 68, 73, 61, 75, 64, 127, 62, 70, 77, 69, 64, 56, 66, 56, 58, 60, 61, 47, 64, 63, 78, 77, 79, 61, 70, 69, 56, 46, 53, 73, 60, 59, 55, 65, 89, 75, 69, 84, 139, 61, 64, 53, 120, 64, 67, 67, 66, 64, 57, 83, 53, 67, 62, 54, 69, 75, 69, 51, 67, 90, 57, 57, 82, 73, 69, 113, 59, 79, 104, 66, 67, 84, 55, 75, 100, 94, 64, 73, 70, 55, 74, 85, 70, 60, 72, 80, 67, 59, 57, 63, 40, 63, 82, 74, 52, 66, 66, 68, 56, 65, 78, 70, 95, 61, 54, 66, 81, 70, 90, 70, 62, 62, 57, 54, 77, 65, 67, 61, 69, 51, 65, 59, 87, 69, 53, 74, 127, 60, 67, 69, 74, 73, 59, 71, 82, 66, 54, 54, 57, 59, 62, 68, 68, 55, 54, 52, 43, 53, 68, 68, 67, 71, 58, 60, 40, 59, 66, 50, 67, 58, 76, 56, 75, 62, 69, 68, 88, 58, 54, 58, 66, 56, 56, 60, 47, 56, 46, 52, 56, 104, 46, 53, 77, 67, 63, 56, 83, 62, 61, 53, 61, 56, 65, 50, 52, 73, 70, 92, 57, 54, 81, 63, 46, 60, 82, 67, 61, 55, 73, 71, 101, 82, 79, 70, 51, 53, 63, 67, 67, 91, 59, 44, 78, 91, 64, 54, 72, 61, 58, 61, 46, 73, 62, 58, 53, 72, 65, 63, 57, 63, 74, 82, 101, 68, 69, 109, 75, 63, 75, 45, 62, 58, 95, 74, 45, 74, 67, 49, 74, 59, 74, 62, 86, 66, 52, 66, 69, 56, 96, 70, 76, 57, 64, 50, 63, 69, 69, 84, 57, 53, 65, 62, 69, 99, 64, 66, 69, 66, 44, 59, 53, 66, 72, 61, 67, 80, 67, 78, 72, 62, 59, 77, 72, 76, 53, 62, 89, 62, 69, 71, 88, 74, 72, 59, 54, 50, 66, 76, 78, 60, 62, 50, 60, 82, 69, 61, 66, 51, 81, 66, 50, 54, 61, 70, 62, 75, 64, 56, 61, 74, 60, 94, 62, 62, 60, 57, 52, 62, 49, 57, 61, 60, 67, 49, 66, 68, 86, 50, 64, 43, 84, 62, 55, 77, 60, 55, 60, 45, 69, 64, 68, 55, 71, 67, 53, 62, 68, 63, 58, 70, 79, 53, 95, 88, 74, 53, 64, 63, 62, 61, 66, 65, 65, 60, 57, 64, 51, 58, 69, 54, 68, 58, 147, 71, 75, 68, 46, 63, 65, 69, 58, 67, 80, 91, 62, 74, 52, 61, 85, 71, 64, 60, 44, 48, 93, 61, 56, 56, 52, 75, 54, 69, 82, 62, 63, 82, 47, 58, 56, 57, 62, 53, 66, 63, 60, 69, 73, 65, 62, 78, 63, 60, 64, 57, 65, 87, 77, 57, 59, 66, 61, 73, 54, 102, 59, 71, 63, 86, 65, 57, 69, 45, 70, 69, 111, 79, 50, 56, 52, 64, 78, 65, 72, 58, 158, 57, 58, 59, 60, 94, 46, 53, 60, 59, 84, 76, 76, 53, 75, 57, 49, 77, 53, 86, 66, 86, 60, 53, 62, 68, 63, 81, 74, 54, 55, 60, 55, 83, 61, 70, 68, 46, 63, 71, 55, 79, 68, 62, 43, 65, 81, 95, 68, 68, 72, 64, 55, 108, 62, 64, 76, 63, 76, 55, 78, 66, 57, 62, 47, 63, 51, 70, 45, 61, 75, 60, 52, 84, 63, 51, 84, 93, 47, 63, 49, 75, 48, 51, 87, 55, 58, 81, 79, 64, 76, 59, 60, 57, 66, 62, 99, 68, 85, 44, 63, 63, 96, 83, 64, 54, 55, 61, 67, 62, 75, 44, 68, 68, 90, 62, 67, 67, 67, 57, 61, 54, 90, 70, 57, 59, 45, 63, 65, 71, 62, 46, 66, 58, 68, 66, 58, 121, 73, 75, 133, 58, 71, 83, 75, 68, 53, 74, 64, 80, 70, 87, 60, 67, 88, 78, 54, 48, 71, 90, 64, 52, 64, 66, 74, 53, 77, 80, 56, 57, 72, 64, 86, 60, 79, 75, 44, 54, 60, 66, 68, 71, 64, 48, 78, 64, 65, 59, 47, 68, 57, 68, 66, 64, 61, 59, 61, 58, 52, 54, 91, 51, 65, 54, 61, 82, 83, 75, 56, 51, 62, 84, 53, 82, 82, 59, 88, 66, 51, 62, 79, 46, 58, 64, 85, 58, 53, 81, 54, 32, 53, 38, 54, 53, 44, 59, 68, 62, 68, 78, 61, 65, 58, 62, 58, 78, 50, 67, 80, 64, 73, 76, 62, 50, 66, 56, 65, 56, 61, 74, 86, 59, 62, 50, 62, 67, 96, 76, 52, 111, 72, 56, 47, 87, 61, 64, 53, 58, 70, 72, 63, 57, 69, 49, 42, 72, 51, 48, 94, 58, 63, 57, 77, 66, 38, 58, 72, 86, 69, 62, 61, 88, 75, 67, 63, 61, 33, 61, 73, 60, 47, 83, 76, 83, 54, 49, 58, 63, 84, 62, 58, 98, 67, 70, 75, 74, 40, 55, 64, 69, 57, 60, 81, 57, 96, 104, 60, 75, 33, 58, 70, 87, 60, 103, 99, 66, 53, 80, 59, 72, 59, 51, 48, 40, 70, 50, 61, 106, 63, 58, 73, 61, 48, 70, 52, 65, 65, 80, 60, 59, 66, 35, 55, 57, 62, 63, 50, 64, 62, 49, 95, 53, 51, 47, 76, 72, 72, 60, 59, 78, 74, 56, 70, 72, 74, 68, 68, 60, 43, 69, 70, 61, 66, 66, 94, 70, 80, 48, 59, 95, 119, 81, 50, 91, 69, 44, 99, 56, 57, 57, 84, 60, 68, 71, 64, 74, 59, 74, 74, 59, 85, 65, 99, 55, 52, 49, 56, 52, 68, 62, 73, 63, 95, 54, 59, 105, 68, 45, 43, 84, 63, 72, 65, 99, 68, 62, 100, 52, 55, 65, 45, 99, 53, 66, 84, 53, 57, 64, 108, 66, 69, 60, 72, 73, 65, 35, 72, 58, 77, 84, 48, 64, 51, 82, 54, 74, 80, 61, 47, 47, 69, 79, 47, 56, 52, 87, 59, 71, 75, 42, 48, 52, 76, 54, 46, 105, 101, 124, 79, 39, 72, 71, 59, 68, 106, 50, 49, 61, 65, 59, 82, 55, 54, 76, 71, 59, 78, 73, 57, 43, 58, 75, 60, 66, 53, 97, 42, 86, 59, 72, 58, 69, 49, 64, 72, 74, 61, 45, 61, 63, 79, 72, 68, 57, 64, 51, 70, 81, 57, 62, 61, 78, 76, 72, 79, 89, 75, 68, 67, 34, 69, 47, 76, 69, 68, 74, 88, 62, 74, 62, 42, 84, 54, 63, 58, 70, 62, 65, 73, 71, 50, 56, 73, 59, 51, 66, 80, 55, 71, 62, 85, 56, 57, 64, 75, 94, 53, 78, 107, 58, 75, 76, 66, 61, 61, 72, 58, 70, 96, 66, 72, 70, 85, 77, 43, 61, 68, 94, 55, 74, 70, 58, 68, 71, 55, 64, 45, 64, 38, 61, 65, 54, 75, 64, 62, 67, 60, 45, 57, 63, 77, 42, 61, 58, 47, 66, 78, 56, 54, 82, 117, 45, 83, 73, 61, 77, 51, 61, 66, 58, 60, 53, 72, 74, 63, 67, 62, 67, 54, 63, 88, 88, 55, 87, 73, 69, 82, 76, 81, 71, 45, 68, 80, 58, 89, 60, 74, 52, 94, 39, 43, 65, 60, 61, 79, 94, 70, 92, 50, 47, 53, 64, 81, 68, 62, 70, 53, 66, 85, 69, 65, 91, 96, 38, 65, 56, 52, 60, 85, 78, 39, 81, 77, 57, 66, 86, 73, 50, 63, 80, 58, 52, 53, 62, 31, 66, 71, 77, 55, 63, 71, 64, 63, 59, 52, 71, 62, 72, 70, 72, 42, 68, 82, 76, 77, 68, 53, 76, 84, 57, 59, 55, 79, 53, 55, 80, 54, 54, 63, 54, 58, 58, 69, 57, 64, 61, 53, 93, 27, 67, 69, 70, 62, 61, 57, 43, 72, 48, 59, 111, 77, 62, 49, 87, 68, 51, 57, 81, 72, 100, 82, 40, 87, 48, 62, 43, 65, 59, 57, 84, 78, 50, 137, 84, 78, 57, 72, 83, 57, 56, 82, 75, 87, 79, 54, 48, 72, 75, 79, 62, 52, 75, 57, 64, 54, 82, 110, 80, 63, 87, 78, 46, 97, 75, 84, 62, 61, 66, 74, 91, 43, 73, 72, 81, 47, 65, 46, 80, 54, 73, 76, 85, 76, 86, 48, 74, 69, 89, 68, 77, 62, 56, 64, 65, 52, 71, 52, 37, 82, 84, 49, 49, 50, 61, 69, 70, 85, 90, 63, 70, 72, 65, 52, 44, 64, 66, 75, 50, 93, 65, 55, 82, 68, 86, 68, 68, 78, 61, 58, 71, 71, 73, 70, 65, 56, 78, 71, 61, 122, 78, 64, 76, 56, 46, 51, 52, 64, 62, 50, 83, 57, 52, 51, 71, 60, 54, 78, 65, 59, 49, 65, 78, 55, 70, 86, 69, 69, 46, 43, 90, 39, 62, 75, 59, 58, 46, 48, 69, 72, 32, 84, 64, 47, 65, 63, 74, 84, 62, 70, 54, 66, 89, 44, 64, 43, 61, 55, 67, 75, 59, 80, 59, 70, 54, 89, 59, 48, 49, 82, 43, 70, 57, 95, 45, 64, 80, 52, 56, 55, 46, 95, 95, 56, 70, 65, 62, 81, 90, 74, 74, 79, 52, 73, 83, 58, 63, 57, 84, 77, 85, 72, 92, 85, 57, 72, 71, 74, 45, 63, 53, 62, 83, 53, 58, 63, 51, 82, 54, 65, 57, 84, 48, 51, 87, 45, 74, 69, 107, 64, 77, 48, 59, 62, 37, 82, 42, 67, 54, 78, 70, 53, 62, 73, 72, 46, 37, 93, 62, 62, 63, 71, 85, 66, 57, 82, 76, 54, 68, 65, 72, 50, 61, 53, 50, 57, 51, 55, 54, 50, 81, 61, 71, 69, 67, 76, 59, 77, 76, 57, 71, 93, 97, 64, 64, 45, 77, 58, 74, 99, 65, 62, 85, 53, 76, 67, 72, 60, 79, 65, 82, 56, 94, 65, 84, 62, 47, 67, 48, 76, 66, 47, 57, 57, 68, 65, 65, 52, 66, 50, 46, 50, 58, 90, 59, 86, 54, 65, 88, 67, 80, 63, 57, 47, 71, 51, 45, 65, 61, 72, 45, 52, 71, 64, 64, 73, 51, 56, 67, 91, 59, 50, 55, 74, 60, 80, 71, 59, 83, 86, 70, 71, 88, 59, 59, 96, 42, 46, 107, 59, 41, 58, 69, 67, 58, 58, 100, 82, 48, 119, 68, 77, 71, 58, 67, 65, 62, 81, 64, 55, 59, 53, 54, 52, 78, 65, 48, 57, 56, 61, 62, 56, 72, 60, 64, 81, 40, 63, 58, 81, 70, 85, 98, 46, 88, 53, 60, 60, 54, 47, 59, 63, 75, 69, 89, 43, 92, 45, 62, 67, 58, 67, 56, 50, 76, 66, 67, 65, 78, 71, 99, 45, 38, 71, 65, 62, 43, 59, 46, 68, 62, 57, 70, 67, 66, 74, 58, 62, 55, 74, 56, 83, 121, 84, 50, 88, 67, 76, 70, 64, 68, 86, 64, 80, 65, 55, 57, 52, 66, 54, 88, 63, 48, 61, 62, 52, 72, 73, 52, 71, 64, 64, 76, 48, 60, 64, 69, 76, 45, 67, 58, 50, 58, 56, 67, 60, 73, 59, 66, 58, 74, 74, 78, 68, 58, 53, 71, 79, 86, 61, 71, 56, 92, 67, 57, 65, 76, 73, 73, 58, 103, 75, 51, 56, 97, 73, 60, 52, 61, 59, 64, 54, 66, 50, 54, 75, 77, 55, 71, 65, 57, 56, 60, 69, 60, 53, 71, 77, 58, 81, 63, 50, 89, 55, 82, 65, 55, 69, 135, 72, 72, 54, 71, 60, 56, 43, 71, 62, 68, 79, 66, 63, 71, 81, 108, 53, 60, 73, 49, 44, 60, 69, 93, 72, 54, 85, 75, 57, 64, 85, 81, 59, 47, 85, 96, 37, 66, 76, 69, 74, 73, 67, 54, 63, 77, 69, 91, 54, 57, 79, 76, 82, 58, 62, 41, 61, 62, 40, 121, 70, 65, 66, 56, 43, 62, 53, 59, 61, 88, 36, 78, 73, 53, 70, 61, 48, 50, 102, 67, 84, 47, 64, 71, 55, 36, 80, 44, 69, 64, 48, 50, 55, 56, 57, 60, 57, 55, 70, 103, 57, 67, 47, 68, 48, 64, 101, 93, 72, 52, 79, 60, 77, 54, 51, 83, 63, 70, 75, 57, 72, 68, 65, 74, 59, 45, 65, 59, 73, 58, 84, 53, 56, 74, 72, 64, 53, 52, 57, 84, 76, 70, 64, 83, 82, 75, 50, 90, 56, 61, 94, 79, 79, 65, 73, 71, 61, 76, 57, 63, 85, 54, 70, 52, 65, 55, 63, 48, 62, 58, 82, 88, 65, 61, 47, 56, 83, 57, 65, 75, 86, 84, 122, 59, 46, 78, 65, 50, 120, 53, 66, 63, 60, 71, 66, 54, 69, 82, 64, 59, 33, 75, 58, 81, 58, 60, 83, 48, 71, 73, 68, 59, 62, 57, 53, 56, 104, 86, 64, 54, 47, 54, 76, 59, 91, 57, 60, 65, 60, 64, 57, 58, 48, 39, 64, 78, 79, 62, 65, 69, 63, 62, 104, 46, 71, 76, 69, 41, 51, 74, 68, 87, 65, 77, 72, 71, 57, 81, 75, 87, 75, 61, 61, 62, 56, 50, 47, 83, 46, 59, 54, 79, 61, 65, 58, 65, 49, 69, 68, 81, 56, 67, 67, 53, 44, 79, 98, 47, 42, 54, 61, 46, 67, 67, 54, 59, 50, 79, 79, 68, 76, 67, 71, 51, 57, 98, 56, 73, 68, 83, 48, 93, 67, 53, 44, 63, 65, 86, 81, 48, 57, 70, 75, 76, 58, 81, 56, 55, 48, 73, 84, 85, 117, 83, 59, 53, 71, 54, 76, 104, 61, 93, 59, 51, 55, 57, 60, 56, 86, 56, 46, 72, 67, 65, 92, 64, 64, 77, 51, 60, 64, 78, 50, 85, 66, 59, 42, 65, 52, 77, 53, 70, 47, 54, 54, 95, 83, 56, 74, 52, 91, 50, 58, 98, 71, 55, 45, 71, 73, 108, 59, 64, 122, 66, 67, 67, 53, 68, 48, 55, 63, 78, 114, 63, 71, 61, 59, 68, 68, 84, 48, 66, 68, 82, 57, 50, 49, 62, 43, 64, 50, 61, 64, 63, 64, 63, 58, 79, 64, 71, 93, 78, 81, 54, 38, 97, 59, 50, 78, 91, 69, 50, 85, 42, 76, 49, 45, 61, 71, 60, 63, 84, 77, 60, 63, 65, 69, 58, 78, 77, 82, 71, 76, 59, 75, 72, 70, 72, 65, 36, 76, 101, 71, 58, 79, 54, 75, 55, 73, 57, 101, 76, 73, 75, 62, 53, 68, 66, 91, 52, 45, 53, 57, 62, 56, 95, 74, 43, 60, 59, 51, 56, 68, 65, 67, 53, 74, 70, 82, 60, 53, 56, 142, 63, 70, 54, 53, 46, 59, 126, 51, 61, 67, 63, 70, 65, 67, 66, 94, 65, 79, 60, 110, 52, 57, 50, 84, 83, 58, 42, 62, 44, 86, 61, 70, 79, 66, 77, 61, 46, 149, 106, 62, 69, 73, 59, 81, 56, 71, 55, 82, 69, 52, 62, 70, 139, 64, 45, 75, 67, 71, 49, 94, 83, 75, 96, 93, 73, 101, 48, 59, 61, 63, 66, 66, 58, 72, 42, 89, 62, 67, 73, 73, 99, 76, 54, 73, 66, 67, 68, 54, 38, 77, 57, 42, 75, 48, 87, 59, 65, 67, 67, 55, 75, 53, 64, 77, 42, 68, 80, 92, 52, 76, 80, 73, 51, 63, 70, 60, 59, 87, 78, 57, 53, 80, 54, 79, 57, 82, 50, 73, 72, 85, 63, 133, 65, 64, 55, 42, 58, 86, 60, 59, 64, 65, 68, 97, 90, 52, 57, 99, 82, 54, 83, 59, 72, 107, 51, 73, 62, 49, 46, 68, 74, 53, 46, 43, 79, 99, 60, 65, 78, 85, 60, 98, 57, 70, 47, 81, 94, 73, 54, 62, 63, 73, 36, 78, 64, 55, 56, 110, 70, 76, 82, 69, 59, 102, 59, 61, 57, 101, 62, 47, 89, 64, 63, 76, 77, 58, 53, 64, 61, 63, 70, 71, 53, 55, 59, 43, 64, 65, 84, 69, 65, 43, 75, 84, 66, 84, 40, 63, 70, 55, 71, 53, 78, 43, 83, 45, 63, 88, 65, 77, 65, 44, 58, 71, 64, 72, 82, 84, 76, 79, 67, 60, 76, 88, 74, 72, 57, 61, 47, 72, 72, 69, 44, 58, 47, 87, 45, 70, 66, 68, 39, 63, 39, 44, 70, 78, 63, 101, 51, 81, 72, 62, 50, 75, 68, 106, 148, 60, 51, 51, 69, 69, 79, 77, 69, 65, 68, 53, 67, 58, 50, 73, 82, 71, 61, 47, 88, 95, 71, 67, 64, 55, 64, 48, 81, 74, 70, 57, 61, 66, 54, 79, 83, 85, 50, 104, 65, 78, 89, 56, 70, 75, 53, 48, 76, 110, 57, 69, 66, 57, 61, 41, 68, 83, 62, 44, 65, 101, 70, 92, 49, 50, 48, 43, 55, 62, 61, 76, 68, 59, 78, 59, 70, 67, 60, 46, 69, 40, 66, 77, 72, 43, 77, 64, 53, 67, 69, 65, 71, 83, 81, 75, 38, 76, 73, 95, 69, 62, 68, 62, 62, 65, 56, 58, 59, 70, 71, 57, 63, 72, 44, 47, 106, 81, 80, 56, 39, 73, 53, 74, 59, 95, 55, 60, 58, 69, 73, 59, 59, 76, 85, 54, 53, 74, 81, 51, 60, 66, 50, 53, 58, 49, 66, 69, 75, 68, 69, 53, 84, 72, 54, 58, 45, 75, 79, 56, 45, 57, 59, 51, 68, 78, 68, 78, 51, 61, 68, 55, 40, 88, 55, 83, 93, 84, 89, 39, 30, 63, 69, 64, 92, 51, 83, 53, 67, 79, 74, 68, 71, 62, 87, 62, 67, 67, 71, 38, 68, 60, 51, 68, 92, 63, 91, 51, 74, 105, 73, 81, 53, 76, 46, 43, 37, 65, 50, 44, 72, 74, 54, 65, 54, 87, 59, 62, 86, 70, 75, 67, 78, 69, 61, 67, 68, 63, 74, 53, 79, 52, 71, 65, 62, 72, 63, 60, 62, 66, 58, 91, 50, 49, 54, 65, 66, 71, 47, 68, 70, 140, 68, 68, 68, 70, 70, 43, 63, 71, 83, 51, 72, 42, 91, 77, 56, 65, 68, 64, 46, 65, 61, 89, 57, 78, 42, 46, 48, 73, 71, 40, 61, 60, 63, 62, 82, 67, 64, 75, 75, 79, 73, 69, 59, 63, 63, 55, 89, 39, 59, 72, 67, 45, 67, 73, 83, 64, 90, 43, 71, 73, 75, 49, 68, 61, 67, 72, 69, 98, 94, 65, 78, 61, 64, 52, 66, 59, 62, 50, 70, 61, 93, 53, 84, 44, 82, 121, 67, 96, 81, 71, 70, 85, 66, 79, 87, 51, 90, 76, 65, 66, 68, 68, 67, 72, 40, 69, 55, 74, 71, 52, 68, 70, 56, 88, 71, 83, 64, 79, 73, 53, 80, 75, 46, 66, 68, 60, 65, 55, 58, 67, 71, 66, 47, 74, 84, 68, 47, 30, 64, 50, 61, 59, 51, 71, 73, 61, 37, 63, 88, 68, 59, 56, 61, 56, 94, 75, 80, 49, 55, 64, 63, 66, 78, 58, 58, 62, 54, 65, 38, 53, 78, 26, 73, 74, 69, 48, 56, 70, 72, 98, 45, 83, 68, 69, 88, 63, 89, 72, 81, 52, 70, 66, 42, 63, 73, 57, 74, 67, 55, 62, 66, 64, 68, 75, 73, 47, 75, 85, 68, 59, 58, 65, 85, 69, 61, 59, 69, 63, 76, 52, 71, 65, 61, 90, 54, 51, 68, 89, 56, 42, 101, 76, 73, 73, 60, 61, 44, 60, 95, 61, 52, 60, 72, 55, 136, 88, 78, 63, 50, 59, 68, 69, 52, 73, 75, 78, 65, 67, 59, 61, 66, 59, 69, 55, 63, 65, 82, 42, 56, 95, 67, 70, 38, 46, 67, 60, 55, 86, 70, 64, 54, 42, 65, 57, 77, 68, 59, 53, 73, 52, 56, 68, 57, 92, 58, 125, 71, 68, 62, 49, 66, 62, 77, 55, 74, 49, 49, 58, 76, 82, 70, 65, 58, 58, 77, 77, 60, 45, 59, 44, 59, 71, 46, 74, 83, 68, 59, 112, 62, 96, 82, 80, 44, 54, 55, 46, 63, 61, 116, 57, 63, 63, 65, 56, 97, 57, 100, 65, 56, 45, 96, 60, 59, 54, 74, 54, 52, 47, 48, 120, 47, 81, 65, 71, 56, 59, 54, 114, 58, 52, 60, 54, 63, 70, 55, 71, 50, 73, 69, 47, 64, 67, 52, 60, 86, 63, 48, 62, 70, 66, 43, 42, 67, 85, 52, 114, 72, 56, 81, 65, 82, 61, 62, 57, 58, 61, 70, 71, 78, 65, 45, 96, 123, 79, 37, 71, 61, 53, 70, 44, 88, 68, 61, 77, 67, 58, 82, 75, 65, 79, 81, 44, 64, 60, 67, 78, 74, 64, 119, 38, 66, 81, 86, 61, 61, 56, 92, 66, 66, 40, 55, 60, 70, 52, 37, 72, 132, 65, 65, 60, 58, 79, 79, 66, 74, 70, 44, 57, 66, 74, 59, 73, 75, 55, 54, 80, 79, 81, 55, 51, 40, 42, 76, 87, 50, 75, 58, 69, 107, 63, 79, 55, 71, 46, 63, 63, 56, 70, 67, 95, 69, 71, 85, 56, 63, 48, 62, 76, 96, 77, 87, 77, 77, 68, 70, 61, 66, 52, 59, 55, 69, 76, 83, 49, 86, 58, 62, 65, 80, 59, 61, 63, 46, 57, 60, 85, 87, 49, 45, 38, 81, 61, 54, 81, 50, 57, 62, 78, 63, 64, 67, 70, 66, 52, 76, 71, 71, 79, 57, 51, 87, 68, 125, 83, 48, 46, 61, 55, 80, 62, 85, 49, 73, 55, 72, 51, 66, 82, 55, 60, 70, 60, 53, 65, 79, 51, 107, 70, 73, 81, 68, 73, 72, 46, 39, 55, 58, 67, 49, 56, 115, 88, 55, 82, 60, 61, 53, 98, 60, 71, 79, 48, 54, 69, 62, 69, 71, 91, 83, 48, 46, 63, 64, 45, 70, 48, 52, 56, 111, 62, 40, 70, 68, 57, 34, 52, 44, 61, 80, 86, 76, 69, 78, 67, 65, 46, 87, 77, 55, 58, 53, 57, 47, 64, 67, 42, 62, 45, 75, 84, 62, 83, 40, 95, 56, 84, 65, 65, 67, 76, 68, 71, 69, 71, 54, 119, 76, 56, 95, 60, 50, 74, 92, 63, 72, 57, 34, 79, 105, 98, 66, 49, 67, 38, 84, 50, 66, 48, 92, 87, 75, 64, 63, 58, 64, 61, 47, 64, 74, 76, 54, 58, 65, 61, 59, 75, 71, 61, 72, 54, 63, 64, 76, 85, 50, 54, 49, 59, 57, 59, 68, 116, 51, 75, 53, 87, 91, 80, 72, 63, 71, 51, 56, 53, 49, 66, 63, 56, 45, 57, 76, 52, 59, 57, 67, 61, 63, 84, 85, 56, 57, 55, 107, 76, 57, 76, 64, 72, 69, 67, 50, 73, 45, 69, 77, 72, 65, 56, 66, 72, 52, 65, 75, 69, 55, 83, 69, 53, 73, 63, 63, 49, 56, 58, 60, 75, 67, 72, 64, 58, 61, 70, 66, 97, 52, 50, 58, 48, 59, 63, 54, 54, 60, 80, 62, 56, 56, 48, 60, 55, 57, 44, 46, 63, 63, 73, 86, 58, 68, 60, 54, 86, 82, 72, 62, 64, 68, 70, 66, 67, 68, 54, 66, 64, 102, 74, 66, 61, 71, 57, 64, 64, 78, 54, 99, 88, 60, 53, 62, 55, 69, 94, 59, 58, 38, 74, 74, 48, 55, 40, 82, 61, 55, 86, 52, 67, 51, 80, 57, 60, 56, 47, 68, 78, 66, 58, 63, 50, 53, 67, 57, 67, 89, 49, 75, 65, 78, 60, 62, 71, 63, 57, 65, 100, 62, 70, 69, 74, 63, 72, 77, 58, 52, 78, 73, 68, 57, 64, 50, 53, 45, 59, 59, 67, 69, 54, 92, 75, 69, 77, 54, 63, 85, 90, 73, 50, 55, 82, 64, 65, 51, 53, 76, 64, 56, 57, 48, 58, 67, 48, 64, 61, 86, 75, 65, 95, 76, 67, 67, 53, 41, 62, 68, 44, 60, 61, 65, 68, 93, 79, 59, 54, 56, 47, 48, 50, 54, 57, 75, 58, 42, 60, 104, 76, 56, 70, 76, 69, 56, 76, 66, 90, 61, 72, 83, 53, 85, 63, 52, 57, 65, 75, 71, 51, 58, 54, 71, 59, 71, 68, 86, 65, 72, 84, 86, 52, 71, 48, 63, 78, 75, 65, 57, 64, 59, 48, 67, 63, 88, 85, 61, 85, 46, 90, 72, 56, 61, 35, 70, 63, 65, 69, 107, 115, 62, 69, 70, 51, 67, 61, 84, 56, 60, 58, 62, 57, 57, 51, 67, 57, 92, 57, 65, 64, 59, 46, 47, 67, 53, 51, 82, 101, 87, 63, 72, 68, 42, 59, 84, 70, 52, 66, 67, 58, 71, 61, 86, 61, 70, 66, 56, 63, 59, 75, 60, 55, 74, 63, 61, 62, 60, 77, 67, 60, 57, 54, 63, 60, 61, 37, 58, 40, 42, 84, 67, 63, 69, 73, 84, 78, 64, 57, 69, 58, 59, 59, 69, 68, 68, 74, 64, 63, 44, 59, 72, 64, 61, 69, 73, 46, 61, 50, 76, 70, 66, 65, 81, 78, 61, 60, 70, 60, 52, 61, 58, 69, 53, 60, 59, 47, 48, 59, 63, 80, 60, 67, 52, 67, 57, 72, 73, 59, 71, 58, 54, 55, 64, 52, 62, 78, 66, 53, 71, 69, 45, 75, 79, 85, 64, 61, 62, 85, 66, 61, 81, 46, 58, 72, 74, 52, 52, 81, 61, 67, 49, 72, 63, 65, 65, 86, 64, 57, 82, 62, 65, 69, 71, 54, 74, 70, 86, 57, 58, 70, 78, 51, 51, 61, 72, 58, 59, 75, 62, 62, 88, 92, 66, 63, 62, 53, 107, 88, 55, 72, 74, 68, 54, 69, 55, 51, 71, 60, 40, 59, 69, 45, 75, 48, 66, 74, 61, 54, 54, 59, 52, 58, 64, 54, 67, 51, 75, 73, 69, 70, 58, 45, 66, 65, 61, 62, 117, 67, 71, 59, 60, 69, 45, 53, 72, 64, 68, 65, 61, 74, 103, 58, 85, 74, 59, 74, 80, 79, 94, 68, 81, 91, 61, 72, 73, 44, 66, 66, 100, 65, 53, 76, 72, 103, 69, 56, 75, 58, 75, 61, 65, 59, 72, 64, 73, 66, 67, 57, 66, 63, 48, 71, 86, 60, 55, 91, 57, 69, 82, 65, 57, 50, 57, 72, 64, 60, 68, 72, 68, 78, 64, 62, 65, 47, 69, 63, 88, 73, 65, 69, 53, 49, 63, 57, 88, 50, 67, 55, 88, 51, 66, 62, 79, 51, 100, 59, 57, 50, 63, 62, 71, 95, 67, 61, 62, 57, 76, 60, 61, 69, 89, 66, 72, 66, 69, 80, 45, 74, 84, 79, 76, 60, 88, 83, 79, 79, 63, 51, 56, 60, 61, 48, 59, 47, 53, 59, 65, 45, 57, 83, 92, 62, 57, 57, 57, 36, 78, 68, 82, 66, 56, 81, 58, 97, 73, 83, 73, 61, 57, 55, 64, 63, 49, 111, 62, 55, 64, 120, 62, 94, 58, 84, 69, 54, 66, 46, 71, 81, 98, 87, 68, 62, 68, 101, 55, 62, 49, 55, 71, 118, 69, 51, 77, 74, 77, 59, 78, 64, 86, 111, 51, 61, 120, 57, 87, 57, 62, 57, 52, 84, 64, 59, 76, 68, 52, 65, 68, 70, 44, 70, 47, 82, 56, 58, 99, 54, 82, 59, 66, 63, 49, 46, 52, 57, 63, 59, 67, 66, 57, 51, 55, 91, 69, 123, 83, 50, 54, 58, 78, 59, 69, 45, 85, 49, 62, 57, 60, 55, 82, 54, 71, 60, 74, 53, 50, 74, 66, 59, 67, 54, 65, 66, 141, 71, 70, 69, 49, 68, 70, 49, 67, 82, 67, 72, 54, 63, 65, 58, 54, 53, 82, 61, 45, 61, 62, 69, 56, 54, 85, 70, 69, 65, 62, 71, 71, 49, 90, 56, 65, 64, 82, 89, 56, 59, 70, 64, 77, 48, 61, 66, 65, 89, 68, 55, 59, 59, 54, 59, 61, 63, 84, 63, 69, 69, 54, 77, 65, 54, 54, 60, 64, 64, 70, 57, 91, 68, 71, 45, 59, 53, 56, 67, 74, 72, 63, 57, 69, 60, 63, 52, 69, 63, 101, 63, 65, 57, 61, 54, 53, 45, 56, 59, 78, 80, 61, 63, 118, 62, 65, 63, 60, 54, 67, 62, 68, 108, 56, 63, 67, 83, 57, 79, 61, 59, 64, 58, 65, 77, 58, 71, 57, 69, 83, 60, 56, 83, 63, 62, 76, 58, 67, 60, 69, 73, 45, 54, 67, 62, 89, 63, 67, 83, 67, 62, 66, 51, 60, 82, 79, 40, 64, 67, 78, 63, 60, 66, 68, 69, 80, 50, 69, 75, 57, 67, 63, 50, 69, 65, 58, 87, 52, 50, 49, 79, 70, 67, 82, 55, 61, 40, 45, 82, 57, 79, 49, 54, 60, 75, 65, 46, 60, 64, 65, 64, 70, 74, 46, 91, 43, 65, 53, 81, 65, 58, 88, 70, 65, 64, 63, 54, 35, 47, 57, 48, 83, 68, 69, 44, 59, 69, 79, 78, 77, 72, 77, 70, 98, 62, 62, 58, 43, 86, 69, 60, 72, 42, 55, 64, 77, 67, 44, 53, 51, 77, 57, 56, 43, 69, 61, 69, 94, 50, 50, 74, 68, 67, 54, 86, 65, 63, 62, 60, 85, 76, 67, 68, 68, 74, 77, 41, 96, 55, 91, 54, 61, 85, 81, 62, 71, 46, 48, 69, 66, 78, 69, 57, 60, 69, 52, 74, 64, 81, 65, 51, 51, 56, 62, 59, 75, 65, 47, 71, 53, 57, 46, 59, 67, 76, 40, 65, 73, 58, 72, 86, 68, 63, 132, 59, 77, 62, 65, 46, 75, 58, 52, 59, 80, 62, 56, 64, 88, 49, 68, 46, 61, 79, 76, 106, 70, 40, 92, 41, 75, 62, 64, 98, 81, 68, 71, 104, 65, 66, 67, 87, 74, 93, 59, 76, 52, 40, 64, 50, 50, 59, 65, 83, 55, 44, 45, 53, 88, 68, 53, 77, 72, 77, 73, 80, 61, 59, 72, 74, 63, 58, 88, 78, 83, 60, 59, 49, 63, 60, 121, 68, 71, 64, 60, 69, 73, 55, 45, 57, 51, 114, 69, 72, 57, 56, 74, 50, 74, 73, 77, 82, 60, 76, 84, 79, 58, 51, 76, 65, 61, 69, 85, 73, 60, 62, 49, 45, 67, 62, 71, 54, 71, 54, 53, 64, 71, 56, 86, 60, 50, 86, 45, 54, 67, 59, 87, 95, 61, 82, 75, 66, 68, 50, 62, 67, 52, 93, 91, 59, 45, 63, 53, 66, 80, 59, 45, 67, 81, 57, 64, 75, 104, 64, 62, 80, 64, 46, 61, 94, 74, 63, 57, 42, 55, 76, 58, 83, 59, 79, 67, 81, 54, 84, 75, 62, 57, 86, 84, 80, 76, 50, 75, 62, 65, 56, 75, 69, 80, 71, 55, 87, 61, 53, 64, 49, 65, 51, 53, 82, 70, 83, 78, 69, 61, 105, 68, 52, 55, 43, 73, 34, 66, 54, 67, 72, 57, 56, 67, 61, 67, 47, 70, 58, 59, 58, 69, 66, 59, 81, 94, 57, 60, 138, 57, 89, 69, 50, 71, 67, 78, 62, 53, 60, 74, 58, 69, 44, 60, 76, 59, 89, 85, 71, 54, 51, 51, 49, 39, 80, 43, 80, 72, 66, 36, 46, 59, 69, 67, 42, 58, 95, 49, 52, 61, 76, 47, 60, 72, 81, 65, 55, 96, 43, 78, 75, 63, 64, 51, 70, 55, 63, 55, 44, 98, 71, 57, 55, 56, 85, 95, 59, 79, 74, 79, 82, 61, 58, 77, 70, 77, 70, 76, 60, 73, 70, 81, 57, 80, 70, 62, 65, 92, 71, 69, 60, 94, 77, 50, 49, 86, 57, 82, 54, 73, 64, 77, 58, 49, 79, 63, 68, 54, 89, 75, 60, 56, 53, 55, 48, 65, 68, 65, 54, 48, 55, 50, 70, 51, 63, 60, 76, 58, 75, 56, 74, 67, 53, 31, 70, 71, 66, 64, 81, 48, 76, 74, 90, 75, 70, 89, 56, 54, 79, 48, 68, 77, 51, 59, 70, 83, 51, 75, 58, 84, 78, 55, 69, 79, 64, 67, 83, 67, 79, 51, 49, 76, 51, 47, 87, 60, 44, 64, 56, 63, 67, 68, 80, 67, 51, 90, 53, 87, 51, 59, 100, 60, 68, 60, 65, 60, 55, 51, 70, 67, 61, 66, 50, 56, 63, 89, 65, 77, 55, 77, 53, 88, 92, 69, 66, 64, 89, 44, 69, 78, 87, 59, 75, 73, 70, 56, 52, 63, 89, 77, 53, 89, 65, 40, 52, 70, 61, 51, 65, 76, 76, 71, 57, 44, 53, 48, 70, 53, 85, 49, 82, 44, 74, 67, 79, 54, 70, 54, 66, 64, 62, 69, 64, 53, 57, 61, 109, 100, 60, 67, 63, 50, 58, 65, 94, 47, 55, 59, 72, 64, 72, 85, 63, 82, 63, 58, 48, 62, 104, 61, 71, 57, 46, 54, 48, 85, 63, 52, 55, 68, 74, 78, 66, 73, 81, 56, 76, 126, 42, 77, 89, 69, 56, 54, 49, 58, 89, 70, 64, 81, 47, 68, 44, 59, 54, 69, 65, 59, 80, 84, 75, 63, 72, 58, 88, 79, 44, 73, 59, 58, 59, 95, 67, 66, 89, 87, 58, 72, 61, 91, 107, 151, 37, 77, 65, 78, 94, 65, 71, 66, 85, 77, 60, 59, 81, 64, 63, 72, 106, 61, 56, 63, 52, 72, 76, 56, 87, 57, 55, 39, 92, 66, 57, 72, 75, 49, 41, 70, 49, 70, 55, 66, 93, 61, 65, 49, 118, 49, 54, 80, 68, 66, 61, 57, 60, 66, 61, 70, 66, 53, 62, 63, 84, 87, 79, 65, 70, 83, 86, 55, 67, 55, 64, 56, 66, 52, 67, 64, 52, 70, 82, 79, 96, 67, 45, 49, 56, 58, 71, 63, 59, 67, 66, 52, 64, 84, 55, 112, 50, 69, 51, 61, 73, 56, 55, 55, 69, 56, 78, 71, 69, 71, 67, 44, 55, 59, 71, 92, 60, 68, 88, 65, 44, 50, 52, 68, 49, 75, 66, 63, 81, 52, 54, 112, 56, 60, 68, 73, 72, 70, 77, 46, 78, 53, 64, 70, 85, 49, 65, 73, 77, 35, 49, 44, 65, 77, 58, 59, 65, 87, 78, 53, 45, 77, 50, 58, 53, 86, 73, 76, 37, 48, 52, 52, 58, 67, 52, 88, 103, 107, 48, 57, 66, 58, 69, 50, 66, 65, 68, 76, 76, 81, 68, 77, 57, 67, 70, 57, 88, 51, 74, 53, 61, 64, 70, 59, 64, 55, 71, 89, 60, 70, 57, 55, 38, 68, 63, 82, 68, 63, 68, 92, 53, 55, 77, 57, 40, 80, 80, 49, 76, 65, 79, 63, 47, 97, 60, 53, 95, 60, 77, 70, 76, 47, 60, 67, 90, 87, 63, 60, 64, 79, 66, 76, 53, 89, 76, 72, 76, 88, 86, 53, 67, 95, 42, 72, 85, 50, 68, 28, 67, 66, 83, 77, 72, 64, 81, 55, 62, 55, 63, 65, 45, 61, 57, 85, 101, 55, 47, 56, 73, 59, 60, 72, 59, 64, 61, 59, 55, 61, 74, 48, 62, 77, 60, 46, 57, 51, 54, 88, 65, 71, 80, 55, 74, 57, 67, 59, 65, 45, 59, 66, 47, 50, 53, 57, 53, 78, 73, 45, 56, 86, 95, 60, 127, 67, 88, 69, 66, 82, 60, 85, 52, 51, 52, 55, 79, 40, 54, 99, 46, 67, 73, 72, 74, 45, 50, 88, 69, 61, 72, 63, 48, 55, 53, 77, 51, 78, 70, 58, 69, 116, 56, 68, 57, 87, 59, 56, 59, 76, 52, 55, 60, 67, 75, 51, 78, 58, 82, 63, 98, 63, 35, 76, 86, 71, 63, 63, 88, 57, 42, 99, 52, 64, 49, 60, 58, 79, 76, 51, 65, 81, 54, 71, 57, 122, 97, 71, 67, 71, 69, 53, 52, 86, 60, 48, 63, 73, 74, 65, 52, 86, 65, 42, 50, 60, 77, 50, 44, 75, 63, 77, 59, 56, 57, 60, 61, 52, 57, 75, 67, 72, 57, 55, 60, 68, 72, 76, 99, 62, 92, 59, 53, 57, 82, 74, 104, 69, 34, 50, 57, 63, 56, 72, 58, 48, 69, 56, 68, 64, 73, 48, 53, 63, 69, 63, 50, 58, 49, 65, 71, 85, 57, 60, 58, 66, 86, 72, 49, 67, 69, 79, 42, 52, 63, 66, 63, 52, 77, 68, 49, 60, 75, 66, 78, 55, 58, 76, 57, 49, 72, 48, 66, 69, 103, 71, 50, 45, 46, 48, 69, 73, 92, 88, 65, 65, 77, 57, 101, 81, 71, 75, 55, 59, 67, 78, 76, 56, 75, 74, 68, 69, 59, 84, 63, 102, 66, 63, 69, 55, 97, 53, 60, 45, 71, 83, 85, 69, 71, 50, 53, 69, 61, 65, 65, 55, 70, 76, 91, 55, 73, 62, 64, 52, 65, 56, 47, 43, 50, 70, 44, 59, 83, 62, 115, 62, 82, 46, 69, 73, 91, 52, 82, 79, 64, 71, 87, 58, 62, 86, 56, 56, 62, 52, 74, 94, 52, 70, 91, 56, 82, 68, 49, 78, 114, 63, 64, 53, 62, 58, 48, 73, 60, 57, 53, 99, 63, 51, 95, 57, 64, 72, 76, 89, 69, 72, 63, 63, 34, 67, 63, 46, 80, 56, 68, 45, 51, 67, 74, 88, 55, 64, 58, 72, 69, 64, 54, 57, 63, 67, 61, 61, 76, 82, 60, 116, 57, 65, 56, 69, 58, 48, 65, 55, 88, 77, 66, 58, 77, 73, 36, 84, 60, 61, 65, 75, 40, 78, 57, 77, 68, 66, 60, 53, 49, 41, 84, 56, 53, 65, 79, 56, 44, 58, 72, 67, 93, 48, 71, 62, 47, 77, 53, 70, 77, 64, 73, 109, 48, 62, 65, 54, 57, 51, 63, 51, 61, 64, 58, 71, 42, 61, 63, 49, 62, 69, 56, 34, 58, 84, 53, 53, 88, 71, 63, 51, 69, 53, 68, 58, 74, 78, 72, 74, 69, 47, 59, 55, 54, 46, 71, 74, 50, 51, 56, 81, 57, 82, 54, 49, 63, 58, 71, 65, 123, 48, 64, 60, 41, 70, 67, 41, 68, 72, 49, 57, 65, 69, 60, 63, 58, 59, 83, 44, 90, 63, 50, 59, 53, 61, 77, 89, 84, 72, 68, 67, 63, 53, 61, 97, 66, 52, 74, 66, 60, 67, 63, 103, 62, 59, 46, 74, 67, 110, 47, 67, 63, 62, 61, 46, 66, 82, 64, 58, 49, 111, 67, 51, 83, 57, 40, 60, 58, 84, 83, 60, 58, 46, 71, 97, 114, 76, 60, 59, 79, 55, 67, 56, 86, 65, 43, 90, 67, 65, 56, 42, 76, 96, 55, 95, 113, 47, 67, 51, 86, 50, 56, 64, 54, 72, 57, 56, 55, 66, 92, 68, 59, 65, 49, 53, 54, 45, 70, 61, 101, 87, 59, 63, 64, 71, 75, 59, 62, 76, 57, 73, 30, 66, 91, 39, 64, 65, 75, 57, 58, 57, 74, 66, 69, 75, 67, 85, 69, 69, 63, 58, 79, 63, 71, 44, 77, 80, 56, 74, 45, 57, 47, 61, 65, 69, 65, 73, 53, 62, 63, 65, 82, 59, 58, 55, 55, 46, 98, 48, 60, 64, 102, 54, 53, 69, 63, 29, 61, 53, 38, 70, 67, 99, 77, 71, 83, 57, 59, 78, 76, 50, 69, 77, 62, 109, 60, 62, 58, 41, 100, 73, 76, 61, 66, 88, 70, 56, 50, 69, 55, 56, 53, 82, 45, 73, 43, 66, 105, 96, 83, 66, 50, 66, 52, 57, 97, 53, 65, 78, 48, 58, 77, 46, 97, 60, 65, 64, 72, 75, 61, 45, 76, 53, 64, 50, 67, 59, 86, 58, 82, 60, 69, 54, 70, 37, 131, 60, 78, 74, 52, 38, 59, 61, 63, 51, 93, 54, 57, 75, 77, 74, 71, 56, 45, 69, 77, 69, 55, 71, 54, 62, 79, 92, 56, 61, 44, 67, 69, 56, 53, 75, 68, 88, 50, 61, 69, 46, 73, 56, 66, 57, 92, 76, 72, 90, 63, 64, 55, 81, 64, 41, 63, 46, 48, 65, 58, 150, 53, 76, 59, 51, 55, 70, 55, 64, 60, 60, 64, 67, 40, 38, 70, 67, 79, 61, 86, 47, 69, 141, 93, 58, 71, 51, 76, 68, 60, 68, 78, 53, 129, 77, 61, 52, 53, 49, 54, 96, 66, 63, 59, 75, 103, 70, 61, 44, 60, 57, 89, 60, 68, 48, 69, 62, 55, 54, 57, 70, 66, 60, 94, 65, 47, 53, 55, 56, 93, 65, 70, 62, 84, 59, 60, 70, 44, 54, 79, 58, 55, 82, 66, 86, 70, 51, 67, 55, 56, 114, 60, 70, 66, 52, 49, 101, 65, 69, 59, 82, 97, 71, 65, 88, 61, 78, 64, 56, 70, 54, 67, 88, 62, 90, 71, 42, 48, 74, 58, 65, 58, 74, 68, 60, 57, 69, 79, 82, 39, 64, 85, 54, 85, 87, 48, 55, 73, 56, 64, 52, 61, 51, 61, 57, 64, 67, 71, 39, 64, 44, 52, 91, 60, 75, 68, 49, 47, 56, 72, 59, 76, 53, 83, 40, 61, 95, 61, 69, 72, 57, 61, 66, 81, 56, 47, 56, 98, 51, 71, 100, 72, 60, 62, 46, 77, 41, 75, 57, 69, 50, 51, 53, 75, 72, 78, 87, 55, 58, 67, 53, 68, 46, 78, 63, 65, 62, 80, 50, 60, 63, 73, 52, 117, 58, 80, 53, 53, 78, 68, 61, 40, 63, 86, 71, 71, 72, 42, 61, 70, 62, 51, 59, 79, 69, 61, 61, 62, 39, 83, 125, 56, 82, 38, 65, 70, 106, 56, 66, 62, 52, 52, 94, 51, 56, 86, 71, 56, 79, 73, 64, 60, 38, 57, 66, 68, 49, 58, 64, 75, 58, 63, 54, 53, 64, 60, 56, 61, 92, 59, 63, 91, 50, 116, 64, 70, 56, 59, 50, 61, 40, 79, 58, 66, 73, 89, 41, 100, 99, 78, 91, 67, 75, 66, 61, 43, 56, 76, 59, 73, 42, 58, 61, 70, 53, 84, 74, 55, 59, 78, 57, 47, 51, 59, 53, 52, 40, 52, 34, 66, 65, 88, 52, 54, 47, 54, 57, 57, 71, 59, 63, 66, 74, 62, 51, 50, 67, 52, 62, 80, 79, 64, 101, 53, 78, 54, 56, 75, 58, 79, 54, 57, 50, 57, 79, 51, 72, 108, 66, 73, 117, 73, 59, 106, 54, 75, 50, 52, 80, 66, 64, 54, 91, 50, 97, 75, 84, 76, 32, 89, 59, 55, 79, 100, 76, 71, 61, 28, 59, 65, 51, 60, 92, 86, 55, 47, 71, 52, 71, 64, 91, 63, 60, 51, 55, 61, 79, 70, 61, 72, 76, 57, 80, 47, 55, 51, 53, 60, 54, 91, 70, 63, 69, 58, 59, 59, 63, 58, 69, 67, 52, 43, 67, 72, 82, 53, 68, 92, 63, 67, 65, 50, 76, 73, 57, 42, 85, 63, 51, 106, 82, 76, 54, 43, 61, 72, 77, 49, 97, 56, 65, 59, 53, 56, 56, 63, 73, 82, 46, 81, 98, 61, 56, 83, 48, 91, 55, 78, 39, 45, 62, 76, 80, 70, 54, 52, 60, 66, 56, 56, 64, 68, 52, 94, 82, 77, 80, 94, 61, 54, 48, 58, 98, 63, 66, 53, 61, 51, 93, 65, 87, 70, 62, 57, 47, 75, 138, 54, 54, 70, 58, 53, 87, 70, 59, 38, 79, 81, 53, 59, 47, 93, 70, 56, 74, 81, 57, 47, 83, 58, 86, 63, 64, 87, 86, 80, 51, 77, 77, 63, 53, 71, 59, 69, 38, 49, 62, 98, 57, 46, 76, 67, 76, 68, 55, 54, 64, 82, 95, 52, 72, 105, 52, 50, 88, 62, 79, 65, 40, 58, 59, 62, 58, 53, 60, 73, 87, 154, 59, 52, 71, 71, 55, 108, 60, 73, 100, 49, 75, 71, 44, 99, 68, 55, 61, 43, 55, 73, 61, 52, 108, 67, 104, 72, 79, 58, 65, 48, 68, 62, 59, 78, 59, 78, 80, 79, 57, 70, 35, 68, 63, 53, 79, 70, 63, 110, 62, 62, 61, 47, 62, 70, 76, 52, 67, 65, 61, 59, 65, 62, 59, 62, 72, 79, 64, 53, 54, 46, 61, 82, 68, 56, 63, 84, 46, 81, 53, 64, 70, 110, 63, 46, 76, 58, 81, 46, 65, 38, 68, 59, 55, 107, 67, 53, 67, 58, 56, 81, 62, 55, 63, 53, 47, 75, 56, 55, 54, 66, 52, 53, 80, 67, 98, 62, 67, 53, 57, 51, 50, 78, 68, 57, 79, 60, 46, 91, 70, 61, 87, 57, 65, 71, 73, 110, 64, 78, 62, 85, 61, 79, 85, 72, 64, 60, 71, 65, 58, 51, 70, 94, 72, 61, 78, 65, 76, 70, 50, 69, 73, 69, 66, 51, 59, 62, 66, 68, 60, 71, 71, 57, 61, 55, 82, 60, 78, 57, 63, 67, 47, 55, 56, 63, 52, 64, 59, 93, 49, 90, 67, 79, 87, 69, 54, 53, 73, 50, 66, 81, 59, 45, 75, 63, 69, 62, 54, 77, 63, 141, 56, 72, 85, 57, 66, 71, 57, 48, 53, 103, 85, 62, 99, 61, 41, 81, 57, 60, 58, 64, 55, 47, 66, 71, 59, 70, 57, 59, 59, 60, 65, 67, 58, 61, 63, 118, 50, 77, 72, 54, 60, 62, 98, 62, 63, 56, 55, 94, 58, 86, 85, 57, 70, 62, 79, 64, 94, 65, 76, 54, 97, 107, 58, 64, 55, 74, 69, 97, 66, 63, 94, 61, 62, 66, 78, 50, 69, 71, 73, 94, 76, 59, 49, 60, 58, 87, 87, 59, 67, 66, 62, 64, 65, 50, 62, 81, 50, 59, 80, 69, 66, 73, 77, 75, 64, 66, 49, 44, 51, 61, 54, 96, 65, 55, 37, 89, 62, 109, 39, 79, 79, 82, 77, 51, 70, 58, 100, 55, 78, 73, 78, 51, 88, 54, 63, 62, 50, 90, 60, 69, 66, 58, 67, 117, 65, 111, 82, 46, 57, 61, 50, 71, 59, 56, 56, 33, 57, 73, 40, 50, 61, 38, 59, 84, 48, 47, 68, 56, 45, 71, 108, 68, 70, 52, 73, 60, 67, 53, 95, 72, 73, 29, 47, 78, 63, 57, 59, 69, 56, 56, 63, 79, 89, 60, 53, 46, 53, 70, 56, 64, 66, 43, 62, 82, 52, 63, 51, 74, 60, 55, 45, 57, 67, 44, 82, 75, 71, 67, 53, 60, 79, 97, 43, 49, 49, 54, 59, 65, 71, 95, 57, 69, 69, 47, 51, 65, 58, 66, 111, 81, 61, 50, 90, 62, 59, 79, 64, 57, 98, 65, 69, 74, 74, 63, 61, 58, 59, 59, 57, 58, 69, 91, 61, 66, 58, 74, 54, 67, 57, 41, 94, 94, 78, 49, 76, 61, 44, 81, 66, 84, 35, 54, 68, 72, 75, 59, 73, 57, 88, 62, 51, 64, 67, 50, 43, 60, 69, 80, 54, 65, 61, 83, 95, 50, 67, 45, 55, 67, 59, 81, 69, 73, 42, 73, 71, 42, 47, 82, 60, 38, 73, 57, 65, 74, 75, 82, 66, 86, 69, 58, 50, 55, 54, 51, 77, 60, 66, 70, 69, 58, 54, 67, 73, 66, 69, 75, 63, 54, 54, 69, 82, 68, 40, 66, 74, 71, 77, 53, 60, 68, 49, 73, 46, 37, 66, 54, 78, 68, 62, 53, 34, 75, 56, 52, 58, 57, 66, 67, 85, 80, 79, 87, 57, 59, 64, 66, 48, 75, 103, 54, 67, 66, 67, 75, 85, 56, 103, 44, 44, 81, 84, 59, 77, 59, 52, 54, 71, 60, 54, 69, 83, 75, 53, 65, 59, 84, 73, 53, 51, 63, 78, 64, 65, 67, 71, 63, 69, 65, 65, 65, 96, 56, 53, 63, 72, 54, 74, 78, 59, 45, 55, 46, 59, 77, 59, 75, 62, 57, 54, 72, 75, 59, 65, 54, 77, 54, 94, 60, 78, 57, 66, 47, 56, 52, 77, 58, 48, 70, 54, 50, 58, 51, 60, 74, 61, 46, 52, 53, 84, 60, 69, 60, 70, 77, 73, 47, 61, 64, 74, 66, 65, 67, 72, 59, 62, 68, 64, 61, 52, 68, 72, 54, 53, 45, 56, 60, 69, 55, 100, 68, 70, 58, 53, 85, 89, 106, 66, 58, 64, 73, 57, 56, 58, 76, 48, 76, 65, 49, 53, 64, 78, 61, 56, 93, 66, 57, 63, 59, 82, 83, 79, 53, 57, 54, 72, 72, 45, 70, 60, 57, 57, 58, 59, 75, 54, 51, 56, 64, 74, 55, 76, 67, 72, 61, 56, 56, 68, 52, 77, 71, 79, 68, 47, 50, 63, 61, 52, 73, 42, 71, 60, 69, 63, 60, 84, 91, 54, 60, 163, 82, 65, 105, 72, 57, 54, 62, 67, 51, 54, 48, 87, 60, 66, 56, 58, 64, 73, 70, 55, 60, 53, 67, 69, 61, 88, 52, 65, 63, 47, 79, 60, 68, 48, 70, 69, 48, 58, 63, 66, 52, 68, 67, 60, 66, 63, 56, 58, 89, 60, 67, 70, 80, 60, 65, 122, 127, 62, 85, 77, 75, 64, 70, 59, 72, 68, 52, 70, 41, 61, 52, 81, 49, 58, 77, 80, 62, 39, 59, 89, 58, 66, 59, 52, 64, 64, 67, 94, 62, 40, 69, 67, 62, 60, 72, 62, 58, 60, 140, 105, 42, 55, 46, 57, 63, 68, 59, 56, 81, 67, 58, 51, 80, 66, 75, 63, 62, 60, 77, 71, 63, 69, 69, 66, 67, 78, 45, 61, 52, 59, 54, 48, 76, 62, 60, 69, 66, 51, 43, 50, 61, 63, 65, 64, 66, 55, 68, 66, 74, 76, 73, 57, 63, 82, 63, 65, 65, 73, 78, 69, 59, 66, 56, 48, 71, 76, 55, 65, 63, 65, 61, 62, 74, 72, 59, 65, 82, 65, 56, 82, 60, 67, 64, 60, 55, 61, 52, 66, 66, 55, 51, 69, 79, 70, 60, 62, 76, 104, 154, 48, 60, 45, 62, 59, 60, 62, 58, 67, 98, 65, 78, 58, 70, 78, 59, 59, 60, 44, 72, 56, 80, 91, 63, 68, 69, 66, 62, 53, 89, 60, 47, 84, 53, 98, 74, 102, 69, 57, 71, 72, 65, 57, 59, 64, 63, 70, 48, 61, 52, 103, 60, 62, 57, 56, 64, 57, 92, 68, 66, 66, 66, 82, 59, 84, 81, 56, 71, 69, 75, 69, 62, 63, 72, 68, 69, 69, 53, 64, 68, 78, 66, 72, 61, 74, 66, 77, 51, 74, 64, 54, 83, 55, 51, 66, 63, 66, 82, 71, 53, 59, 136, 50, 65, 62, 74, 68, 68, 54, 57, 88, 66, 75, 58, 68, 53, 61, 61, 60, 89, 71, 58, 76, 56, 67, 62, 71, 56, 84, 60, 58, 66, 67, 57, 48, 70, 69, 66, 76, 86, 77, 161, 78, 45, 74, 45, 47, 69, 52, 54, 54, 56, 72, 72, 47, 68, 67, 63, 70, 58, 67, 90, 77, 69, 54, 68, 61, 63, 51, 57, 71, 65, 61, 68, 68, 72, 60, 56, 56, 74, 66, 61, 83, 104, 64, 71, 56, 76, 58, 88, 64, 110, 56, 59, 49, 66, 61, 57, 67, 53, 66, 73, 47, 76, 81, 78, 48, 50, 58, 54, 58, 63, 54, 69, 73, 51, 72, 58, 72, 54, 62, 76, 47, 58, 48, 72, 58, 57, 63, 62, 80, 62, 65, 73, 54, 59, 81, 52, 82, 66, 57, 72, 47, 61, 56, 68, 59, 91, 62, 70, 73, 85, 65, 70, 66, 70, 67, 65, 74, 67, 55, 76, 65, 66, 63, 51, 67, 68, 104, 69, 72, 59, 77, 91, 60, 91, 63, 50, 67, 65, 64, 63, 70, 72, 53, 89, 54, 50, 66, 60, 76, 46, 81, 44, 105, 69, 97, 58, 62, 56, 63, 123, 66, 68, 66, 73, 66, 63, 68, 84, 55, 62, 82, 75, 67, 90, 61, 67, 42, 62, 45, 102, 60, 65, 79, 85, 67, 64, 64, 57, 79, 52, 61, 69, 59, 60, 93, 66, 65, 56, 67, 74, 63, 66, 55, 64, 48, 74, 80, 66, 104, 64, 74, 57, 95, 61, 79, 61, 61, 62, 63, 66, 71, 69, 69, 75, 67, 60, 78, 62, 61, 110, 55, 64, 66, 40, 76, 52, 66, 50, 65, 72, 61, 96, 42, 56, 88, 61, 55, 71, 75, 59, 70, 62, 86, 79, 54, 63, 71, 51, 67, 52, 67, 54, 95, 65, 73, 50, 75, 40, 47, 72, 74, 59, 74, 61, 75, 63, 59, 75, 64, 68, 49, 55, 79, 56, 52, 49, 55, 67, 67, 57, 60, 54, 93, 70, 60, 56, 54, 71, 50, 97, 63, 56, 67, 67, 70, 88, 68, 67, 66, 83, 46, 49, 73, 62, 66, 68, 56, 58, 62, 61, 49, 56, 71, 63, 92, 61, 60, 68, 63, 84, 56, 56, 55, 80, 54, 65, 62, 61, 70, 69, 81, 54, 68, 58, 90, 53, 79, 61, 61, 56, 65, 63, 71, 54, 62, 63, 57, 94, 76, 78, 67, 85, 59, 60, 58, 122, 67, 71, 59, 89, 85, 69, 59, 67, 64, 74, 68, 85, 51, 65, 75, 92, 62, 79, 84, 69, 64, 70, 74, 67, 52, 58, 60, 54, 59, 53, 86, 72, 67, 57, 67, 59, 52, 57, 50, 58, 65, 65, 52, 63, 59, 68, 54, 62, 66, 80, 64, 51, 58, 96, 66, 55, 82, 65, 51, 60, 63, 44, 69, 71, 70, 130, 62, 71, 57, 67, 54, 62, 58, 108, 70, 96, 65, 69, 72, 89, 122, 61, 67, 48, 54, 48, 67, 49, 66, 58, 70, 68, 67, 61, 69, 91, 82, 84, 68, 58, 52, 101, 79, 50, 76, 56, 74, 75, 59, 77, 60, 81, 61, 63, 54, 99, 112, 68, 77, 62, 128, 82, 66, 62, 59, 59, 57, 86, 54, 48, 58, 75, 51, 67, 54, 63, 79, 73, 105, 62, 62, 59, 75, 52, 59, 96, 67, 66, 51, 61, 58, 49, 68, 50, 56, 61, 62, 55, 83, 52, 50, 61, 38, 72, 68, 55, 61, 86, 56, 67, 68, 87, 75, 52, 52, 67, 64, 55, 46, 61, 71, 63, 59, 68, 77, 62, 71, 62, 69, 50, 64, 77, 49, 91, 58, 62, 79, 68, 54, 62, 61, 90, 60, 52, 69, 59, 53, 40, 87, 93, 68, 85, 80, 60, 55, 55, 60, 69, 53, 74, 49, 67, 84, 59, 54, 51, 81, 91, 72, 73, 67, 65, 69, 61, 59, 104, 52, 60, 65, 58, 64, 54, 73, 65, 70, 107, 67, 73, 61, 82, 49, 68, 50, 67, 71, 43, 39, 56, 79, 59, 57, 70, 70, 65, 59, 65, 60, 70, 66, 63, 35, 71, 59, 41, 66, 75, 63, 64, 88, 51, 91, 36, 73, 75, 79, 91, 65, 60, 65, 47, 65, 53, 49, 75, 58, 58, 60, 64, 59, 93, 94, 79, 57, 49, 60, 56, 86, 61, 54, 97, 69, 46, 77, 64, 75, 74, 57, 60, 76, 63, 73, 45, 55, 51, 70, 57, 54, 68, 67, 55, 56, 64, 55, 52, 85, 78, 67, 76, 66, 50, 67, 51, 59, 52, 60, 68, 60, 77, 70, 59, 93, 59, 79, 50, 70, 104, 55, 67, 67, 69, 85, 53, 54, 69, 78, 66, 57, 61, 68, 53, 46, 61, 50, 56, 75, 54, 64, 89, 59, 59, 67, 66, 61, 54, 61, 70, 52, 58, 76, 59, 62, 80, 65, 62, 75, 52, 91, 57, 87, 68, 59, 47, 60, 58, 44, 60, 82, 47, 77, 62, 64, 53, 81, 68, 62, 70, 71, 55, 54, 58, 58, 62, 99, 67, 72, 59, 80, 58, 55, 58, 81, 58, 67, 56, 70, 71, 65, 76, 86, 78, 57, 85, 61, 43, 65, 58, 77, 48, 49, 58, 68, 90, 61, 63, 59, 60, 56, 61, 78, 55, 95, 104, 43, 62, 62, 74, 80, 75, 56, 42, 61, 70, 70, 65, 50, 69, 61, 66, 44, 67, 67, 62, 66, 83, 79, 68, 44, 62, 69, 75, 83, 39, 67, 70, 73, 68, 61, 46, 64, 72, 117, 93, 61, 60, 60, 60, 50, 58, 60, 81, 61, 85, 63, 72, 61, 51, 51, 71, 57, 59, 90, 55, 72, 72, 74, 51, 63, 57, 66, 66, 62, 80, 41, 68, 66, 69, 50, 88, 55, 78, 60, 65, 73, 60, 56, 69, 87, 62, 55, 62, 65, 60, 66, 73, 61, 56, 87, 59, 49, 83, 60, 65, 57, 59, 90, 87, 87, 79, 65, 46, 56, 81, 45, 58, 70, 67, 73, 64, 50, 56, 65, 71, 64, 57, 48, 69, 39, 73, 79, 65, 62, 78, 56, 55, 63, 61, 63, 45, 59, 62, 64, 59, 57, 77, 59, 71, 65, 92, 74, 71, 71, 48, 71, 56, 58, 54, 107, 56, 62, 61, 56, 58, 79, 58, 85, 55, 47, 60, 56, 127, 58, 54, 87, 75, 54, 76, 116, 58, 93, 68, 58, 81, 88, 65, 62, 67, 60, 60, 80, 65, 94, 64, 127, 62, 87, 78, 74, 81, 74, 71, 63, 60, 71, 70, 64, 63, 64, 76, 73, 64, 54, 65, 62, 73, 46, 82, 40, 71, 55, 56, 50, 70, 55, 65, 54, 82, 48, 89, 66, 67, 67, 57, 67, 55, 48, 65, 47, 50, 70, 62, 59, 67, 84, 62, 69, 73, 96, 69, 58, 74, 53, 58, 49, 58, 45, 70, 56, 70, 56, 47, 70, 58, 90, 59, 45, 63, 50, 58, 50, 66, 68, 83, 75, 61, 80, 60, 81, 90, 78, 85, 66, 67, 73, 65, 51, 62, 63, 56, 50, 64, 53, 70, 57, 108, 73, 87, 64, 55, 73, 63, 103, 59, 61, 74, 71, 69, 81, 75, 60, 51, 53, 47, 40, 57, 66, 66, 57, 87, 70, 71, 43, 61, 64, 70, 86, 51, 78, 80, 78, 51, 58, 64, 94, 64, 62, 111, 69, 58, 90, 76, 80, 76, 61, 78, 70, 91, 62, 71, 61, 47, 104, 61, 52, 55, 86, 48, 92, 71, 84, 81, 53, 67, 66, 67, 56, 60, 67, 62, 60, 43, 52, 47, 55, 67, 61, 90, 75, 105, 74, 65, 68, 66, 55, 45, 60, 53, 65, 52, 30, 101, 63, 62, 66, 51, 98, 78, 51, 64, 97, 81, 67, 79, 62, 54, 62, 77, 73, 48, 49, 70, 60, 69, 41, 73, 61, 81, 62, 89, 49, 60, 47, 46, 66, 70, 63, 81, 75, 85, 78, 72, 53, 58, 67, 53, 55, 66, 56, 50, 76, 62, 51, 61, 96, 142, 58, 55, 67, 66, 104, 74, 51, 67, 70, 52, 67, 82, 64, 54, 66, 53, 56, 58, 72, 69, 98, 114, 43, 61, 70, 82, 72, 51, 58, 54, 63, 59, 60, 57, 60, 60, 75, 58, 56, 76, 57, 98, 56, 79, 62, 73, 84, 64, 62, 54, 56, 53, 70, 69, 71, 67, 62, 63, 75, 67, 62, 59, 63, 63, 50, 67, 79, 65, 61, 85, 61, 56, 64, 62, 61, 63, 56, 52, 61, 72, 51, 43, 55, 72, 72, 151, 76, 51, 47, 71, 66, 72, 74, 69, 70, 59, 70, 62, 109, 65, 81, 84, 66, 52, 67, 71, 71, 99, 66, 59, 51, 41, 49, 58, 55, 78, 73, 77, 78, 70, 63, 69, 48, 81, 54, 74, 54, 57, 83, 66, 79, 70, 82, 83, 52, 90, 53, 81, 65, 109, 66, 86, 64, 79, 40, 76, 66, 60, 54, 77, 91, 81, 57, 71, 55, 50, 46, 67, 72, 49, 53, 58, 86, 78, 45, 67, 91, 59, 52, 89, 67, 55, 54, 74, 57, 69, 82, 69, 57, 62, 78, 51, 62, 94, 88, 67, 81, 74, 60, 39, 80, 68, 53, 62, 67, 69, 66, 63, 45, 55, 100, 68, 48, 71, 63, 60, 70, 75, 78, 52, 62, 73, 56, 77, 83, 53, 70, 62, 67, 89, 54, 75, 74, 105, 62, 69, 55, 64, 58, 69, 66, 76, 67, 68, 95, 52, 50, 51, 50, 69, 54, 61, 68, 64, 66, 61, 68, 79, 72, 82, 76, 59, 58, 57, 80, 46, 94, 77, 60, 95, 138, 62, 48, 83, 59, 102, 85, 72, 49, 100, 77, 63, 73, 61, 51, 64, 74, 91, 40, 83, 76, 73, 58, 66, 87, 64, 80, 51, 47, 66, 92, 63, 68, 104, 57, 53, 74, 52, 72, 68, 69, 80, 99, 64, 80, 67, 61, 123, 68, 69, 61, 52, 63, 50, 54, 60, 77, 60, 61, 72, 74, 86, 53, 59, 58, 64, 63, 75, 75, 61, 61, 72, 78, 76, 52, 35, 66, 75, 74, 74, 60, 97, 103, 108, 55, 57, 72, 46, 51, 74, 56, 57, 67, 77, 70, 49, 99, 52, 66, 42, 60, 66, 54, 96, 44, 78, 57, 94, 64, 60, 49, 60, 87, 84, 45, 92, 73, 50, 68, 68, 55, 87, 63, 83, 90, 61, 62, 91, 68, 71, 61, 55, 60, 81, 72, 45, 69, 84, 65, 71, 56, 43, 60, 61, 68, 56, 56, 49, 61, 74, 43, 70, 66, 55, 50, 48, 60, 72, 68, 82, 47, 77, 62, 72, 54, 66, 62, 79, 58, 53, 76, 43, 78, 74, 64, 50, 114, 71, 65, 55, 81, 56, 65, 87, 69, 75, 55, 69, 76, 62, 64, 82, 92, 61, 56, 83, 80, 92, 69, 64, 69, 59, 57, 68, 68, 73, 72, 61, 100, 74, 66, 79, 67, 50, 56, 67, 59, 57, 73, 68, 97, 50, 76, 59, 70, 69, 77, 62, 105, 64, 93, 62, 66, 77, 75, 50, 67, 54, 69, 59, 60, 75, 49, 51, 63, 59, 74, 56, 64, 46, 55, 108, 69, 81, 54, 44, 54, 59, 60, 72, 66, 78, 51, 76, 74, 65, 69, 48, 66, 68, 55, 68, 75, 72, 76, 59, 57, 70, 65, 78, 48, 70, 92, 74, 67, 85, 70, 70, 48, 63, 68, 57, 67, 98, 64, 76, 84, 70, 73, 59, 77, 77, 74, 58, 60, 58, 67, 53, 48, 78, 63, 76, 60, 95, 75, 70, 58, 82, 73, 71, 87, 76, 50, 70, 52, 57, 62, 53, 70, 59, 61, 63, 70, 55, 81, 72, 50, 54, 77, 69, 64, 34, 52, 50, 64, 59, 76, 50, 57, 82, 37, 45, 82, 59, 64, 83, 75, 54, 87, 73, 77, 59, 84, 96, 75, 59, 64, 48, 93, 61, 74, 65, 70, 64, 38, 74, 78, 59, 54, 98, 49, 68, 62, 66, 73, 61, 79, 42, 67, 78, 69, 69, 72, 45, 64, 60, 55, 59, 78, 91, 65, 66, 90, 80, 66, 27, 54, 76, 52, 65, 70, 54, 63, 54, 115, 70, 54, 62, 62, 68, 52, 98, 47, 45, 63, 60, 85, 62, 61, 55, 67, 69, 59, 77, 68, 75, 56, 52, 55, 62, 81, 37, 75, 86, 93, 61, 76, 68, 70, 55, 75, 59, 57, 33, 74, 65, 55, 52, 57, 83, 74, 55, 55, 62, 50, 59, 60, 65, 86, 65, 62, 60, 74, 83, 61, 53, 53, 92, 56, 62, 74, 55, 68, 83, 107, 90, 61, 64, 106, 60, 72, 77, 52, 57, 36, 67, 91, 68, 60, 56, 40, 72, 58, 62, 55, 87, 66, 71, 54, 79, 49, 89, 65, 58, 65, 87, 68, 64, 67, 71, 70, 60, 66, 43, 84, 136, 58, 69, 54, 66, 58, 57, 66, 34, 72, 57, 80, 73, 63, 69, 69, 32, 79, 61, 54, 74, 53, 57, 57, 65, 60, 50, 67, 63, 79, 80, 59, 55, 63, 96, 66, 53, 83, 58, 67, 48, 50, 128, 52, 67, 64, 50, 50, 74, 84, 68, 49, 75, 81, 63, 42, 78, 70, 70, 89, 83, 48, 71, 42, 62, 63, 62, 59, 115, 64, 72, 52, 50, 68, 38, 105, 77, 74, 50, 88, 68, 52, 63, 68, 73, 71, 72, 61, 69, 63, 58, 50, 58, 89, 70, 88, 70, 57, 63, 58, 51, 65, 54, 72, 72, 61, 51, 56, 72, 71, 64, 80, 69, 92, 52, 74, 66, 71, 64, 46, 58, 76, 57, 84, 53, 62, 70, 59, 112, 65, 69, 63, 69, 76, 58, 67, 73, 74, 47, 35, 53, 80, 65, 70, 42, 76, 84, 73, 68, 60, 94, 66, 75, 130, 76, 46, 47, 73, 60, 68, 83, 78, 61, 72, 62, 76, 77, 66, 53, 85, 58, 78, 75, 61, 62, 82, 59, 51, 70, 58, 68, 96, 93, 61, 41, 73, 53, 67, 75, 69, 88, 55, 90, 51, 91, 64, 57, 72, 59, 72, 51, 82, 74, 47, 142, 80, 76, 55, 51, 63, 54, 69, 71, 74, 62, 73, 71, 52, 60, 66, 86, 66, 55, 72, 58, 65, 59, 71, 44, 79, 59, 96, 72, 95, 115, 93, 79, 52, 50, 51, 48, 93, 59, 70, 63, 58, 71, 98, 60, 68, 60, 73, 63, 38, 92, 73, 58, 61, 66, 64, 73, 41, 71, 76, 66, 69, 108, 77, 65, 57, 78, 64, 56, 48, 44, 52, 67, 65, 82, 70, 78, 53, 75, 71, 46, 60, 72, 52, 66, 63, 93, 74, 48, 85, 51, 49, 51, 54, 69, 51, 62, 64, 50, 50, 58, 49, 63, 74, 65, 33, 92, 61, 65, 80, 59, 62, 51, 107, 66, 67, 81, 93, 84, 63, 39, 59, 56, 66, 67, 69, 74, 85, 57, 93, 57, 94, 67, 88, 59, 58, 51, 88, 62, 72, 85, 80, 67, 49, 70, 56, 50, 46, 51, 56, 52, 48, 67, 46, 59, 49, 60, 57, 103, 62, 54, 71, 67, 70, 70, 57, 53, 48, 73, 93, 63, 86, 72, 85, 51, 72, 92, 60, 78, 59, 74, 72, 59, 75, 54, 72, 68, 63, 75, 92, 50, 64, 52, 53, 48, 80, 59, 75, 54, 65, 68, 68, 78, 61, 69, 57, 65, 75, 40, 77, 69, 59, 52, 61, 63, 61, 61, 62, 75, 79, 56, 61, 62, 86, 67, 56, 56, 55, 91, 51, 57, 73, 88, 92, 65, 100, 64, 72, 82, 53, 61, 69, 47, 39, 46, 57, 60, 71, 60, 61, 48, 69, 59, 48, 78, 47, 60, 57, 62, 91, 50, 66, 54, 55, 54, 77, 40, 70, 57, 59, 55, 75, 63, 52, 48, 60, 69, 91, 45, 53, 61, 49, 81, 56, 65, 84, 60, 90, 81, 63, 63, 68, 59, 52, 46, 45, 64, 74, 102, 61, 51, 75, 56, 53, 57, 71, 42, 59, 62, 55, 64, 57, 109, 62, 63, 56, 49, 50, 48, 67, 67, 69, 77, 85, 92, 81, 52, 44, 40, 67, 67, 55, 55, 77, 59, 57, 62, 62, 65, 90, 46, 91, 70, 57, 81, 66, 68, 60, 27, 88, 50, 96, 62, 72, 69, 64, 109, 70, 71, 66, 56, 93, 69, 68, 65, 63, 83, 66, 63, 49, 72, 68, 38, 53, 50, 103, 60, 49, 93, 64, 73, 67, 76, 62, 68, 86, 64, 49, 62, 68, 58, 75, 52, 59, 58, 57, 58, 64, 73, 73, 66, 54, 65, 61, 67, 52, 109, 54, 68, 43, 95, 59, 67, 66, 96, 62, 66, 77, 55, 63, 59, 68, 61, 84, 74, 63, 61, 78, 79, 68, 56, 79, 42, 68, 61, 59, 46, 58, 56, 67, 94, 80, 53, 77, 72, 109, 59, 70, 56, 60, 64, 38, 56, 65, 83, 53, 72, 63, 58, 72, 68, 59, 81, 60, 71, 54, 52, 56, 66, 67, 85, 68, 87, 30, 69, 51, 62, 54, 102, 58, 73, 69, 74, 66, 52, 65, 95, 75, 109, 65, 73, 53, 64, 66, 66, 83, 83, 40, 60, 70, 48, 72, 76, 82, 64, 70, 56, 55, 57, 61, 43, 58, 65, 42, 64, 90, 75, 62, 57, 70, 79, 50, 59, 64, 66, 94, 55, 72, 59, 51, 70, 58, 41, 70, 51, 67, 63, 61, 60, 65, 56, 66, 59, 75, 73, 73, 74, 93, 57, 73, 51, 68, 64, 58, 67, 69, 65, 79, 80, 62, 52, 87, 64, 51, 52, 61, 68, 71, 66, 73, 60, 55, 53, 72, 54, 69, 71, 65, 55, 68, 93, 47, 88, 74, 58, 71, 93, 74, 78, 59, 73, 64, 77, 84, 66, 79, 52, 48, 63, 62, 60, 41, 67, 53, 58, 70, 49, 54, 68, 42, 34, 59, 65, 63, 59, 67, 78, 57, 64, 66, 109, 67, 77, 69, 49, 57, 55, 80, 57, 77, 56, 73, 51, 67, 88, 93, 69, 86, 50, 56, 103, 64, 40, 53, 44, 63, 46, 88, 48, 70, 48, 74, 67, 109, 64, 72, 57, 54, 58, 79, 73, 60, 74, 68, 55, 69, 65, 82, 55, 68, 57, 51, 55, 80, 59, 46, 58, 50, 53, 72, 54, 53, 86, 71, 77, 61, 65, 87, 61, 61, 62, 71, 102, 63, 59, 71, 72, 63, 67, 65, 98, 67, 53, 61, 64, 45, 59, 90, 44, 106, 67, 59, 62, 63, 64, 57, 59, 42, 76, 59, 99, 73, 62, 65, 75, 64, 53, 59, 63, 73, 67, 53, 69, 60, 62, 72, 83, 61, 51, 68, 54, 68, 54, 48, 49, 86, 88, 64, 58, 55, 57, 44, 48, 58, 81, 64, 55, 70, 73, 62, 55, 66, 73, 65, 57, 54, 64, 55, 47, 63, 51, 77, 58, 66, 45, 52, 60, 53, 63, 87, 43, 79, 53, 105, 67, 61, 69, 56, 60, 49, 41, 71, 41, 48, 60, 58, 61, 84, 73, 45, 60, 82, 49, 58, 100, 64, 60, 91, 70, 109, 71, 82, 64, 69, 60, 73, 56, 74, 57, 55, 60, 61, 72, 51, 63, 61, 67, 76, 59, 65, 72, 80, 43, 82, 64, 64, 66, 55, 54, 51, 53, 58, 64, 58, 65, 59, 54, 58, 47, 60, 81, 72, 72, 75, 116, 62, 64, 92, 52, 71, 60, 64, 53, 47, 71, 58, 50, 80, 51, 52, 53, 79, 64, 60, 75, 99, 95, 97, 66, 53, 75, 61, 82, 70, 47, 53, 48, 57, 80, 47, 79, 50, 64, 65, 115, 63, 67, 62, 56, 54, 57, 59, 67, 63, 63, 77, 80, 63, 63, 65, 49, 46, 65, 53, 63, 48, 58, 67, 61, 56, 55, 72, 50, 70, 63, 49, 57, 64, 73, 67, 57, 117, 106, 48, 70, 67, 64, 76, 64, 69, 67, 64, 80, 56, 61, 71, 73, 55, 81, 63, 78, 59, 95, 64, 90, 59, 41, 60, 60, 53, 89, 98, 58, 78, 63, 111, 63, 67, 57, 54, 66, 56, 47, 81, 77, 63, 68, 56, 58, 61, 67, 75, 74, 49, 71, 53, 66, 72, 70, 66, 83, 86, 52, 54, 70, 62, 47, 70, 57, 87, 79, 70, 52, 68, 48, 59, 63, 101, 89, 66, 82, 42, 62, 72, 73, 60, 55, 56, 76, 64, 73, 76, 76, 51, 115, 113, 70, 48, 62, 72, 74, 44, 71, 75, 63, 65, 47, 62, 58, 67, 60, 52, 93, 71, 63, 58, 57, 63, 59, 72, 67, 109, 88, 44, 84, 64, 57, 55, 65, 62, 65, 65, 62, 65, 53, 44, 76, 49, 56, 65, 59, 71, 85, 56, 46, 50, 53, 62, 49, 73, 66, 62, 57, 51, 71, 86, 123, 71, 50, 73, 58, 57, 47, 50, 66, 72, 70, 70, 64, 60, 90, 54, 47, 62, 88, 61, 66, 60, 56, 63, 100, 77, 55, 62, 56, 46, 60, 57, 62, 52, 81, 64, 79, 85, 61, 76, 50, 68, 58, 80, 54, 51, 61, 68, 59, 80, 50, 83, 59, 59, 45, 57, 56, 58, 91, 74, 58, 58, 58, 51, 75, 101, 67, 70, 48, 106, 51, 72, 54, 88, 78, 71, 57, 54, 53, 79, 63, 64, 72, 87, 78, 50, 56, 63, 58, 113, 54, 65, 58, 78, 83, 58, 62, 60, 78, 62, 69, 61, 77, 55, 80, 61, 65, 69, 52, 81, 65, 54, 80, 63, 63, 58, 41, 157, 72, 59, 63, 71, 67, 67, 42, 59, 62, 65, 65, 62, 106, 50, 74, 67, 57, 61, 54, 87, 61, 58, 65, 55, 44, 64, 67, 56, 81, 76, 60, 86, 76, 68, 62, 80, 53, 39, 66, 53, 64, 94, 66, 71, 72, 57, 73, 65, 70, 73, 53, 67, 79, 70, 90, 94, 67, 54, 58, 52, 59, 134, 73, 94, 60, 74, 59, 69, 59, 49, 55, 48, 75, 74, 57, 79, 70, 46, 48, 52, 61, 47, 82, 40, 64, 57, 72, 51, 65, 59, 61, 53, 62, 57, 46, 93, 86, 72, 62, 100, 46, 53, 50, 54, 61, 51, 96, 59, 62, 48, 62, 66, 82, 65, 72, 70, 69, 64, 59, 65, 65, 60, 74, 82, 61, 63, 65, 87, 73, 59, 63, 63, 66, 66, 69, 65, 63, 53, 58, 79, 58, 65, 67, 60, 60, 74, 47, 60, 67, 67, 65, 58, 61, 78, 78, 57, 56, 52, 48, 66, 69, 50, 62, 61, 91, 70, 114, 56, 61, 60, 86, 63, 73, 73, 63, 64, 64, 64, 60, 56, 72, 77, 70, 45, 60, 77, 62, 63, 54, 60, 66, 60, 58, 67, 85, 72, 71, 52, 71, 104, 62, 60, 72, 91, 59, 62, 78, 55, 57, 61, 74, 54, 59, 69, 44, 63, 62, 66, 69, 83, 64, 57, 64, 54, 90, 90, 69, 81, 52, 88, 58, 82, 55, 49, 64, 56, 67, 68, 59, 60, 103, 72, 93, 57, 69, 72, 83, 60, 90, 75, 72, 63, 55, 60, 56, 55, 63, 72, 67, 65, 75, 59, 67, 59, 72, 60, 39, 49, 90, 66, 61, 77, 108, 69, 86, 60, 51, 72, 64, 46, 61, 60, 71, 65, 51, 65, 66, 65, 77, 65, 72, 46, 45, 56, 59, 51, 62, 56, 95, 56, 48, 55, 67, 64, 65, 48, 58, 67, 62, 59, 74, 89, 58, 56, 83, 60, 72, 63, 95, 52, 53, 72, 45, 60, 65, 54, 45, 59, 67, 60, 67, 84, 56, 68, 43, 73, 79, 58, 66, 50, 42, 59, 52, 60, 50, 60, 55, 63, 51, 54, 60, 70, 76, 58, 59, 66, 61, 95, 67, 47, 72, 70, 58, 77, 56, 62, 74, 66, 63, 62, 115, 50, 59, 60, 58, 84, 50, 57, 78, 48, 67, 68, 65, 59, 61, 36, 87, 92, 71, 64, 66, 83, 73, 66, 63, 69, 90, 69, 63, 60, 63, 59, 46, 66, 52, 50, 50, 72, 62, 65, 69, 64, 65, 53, 60, 65, 69, 68, 59, 69, 63, 79, 56, 49, 81, 58, 68, 75, 64, 62, 64, 70, 56, 68, 74, 61, 67, 70, 60, 68, 72, 60, 58, 67, 66, 71, 64, 58, 66, 68, 75, 41, 65, 69, 47, 79, 61, 54, 57, 61, 49, 84, 90, 53, 58, 55, 52, 61, 75, 63, 45, 46, 65, 72, 62, 67, 61, 67, 55, 74, 61, 78, 63, 41, 57, 96, 55, 61, 68, 60, 59, 62, 66, 74, 58, 62, 78, 97, 87, 68, 60, 63, 70, 82, 48, 69, 61, 68, 60, 64, 71, 61, 56, 59, 76, 80, 70, 78, 63, 68, 49, 130, 66, 64, 117, 70, 77, 51, 56, 51, 52, 76, 69, 71, 82, 61, 51, 48, 57, 92, 72, 61, 59, 75, 47, 63, 65, 59, 81, 54, 64, 76, 50, 67, 65, 61, 69, 71, 56, 73, 60, 71, 75, 53, 67, 44, 84, 59, 63, 87, 92, 74, 67, 66, 67, 59, 96, 70, 62, 56, 66, 67, 45, 60, 64, 73, 90, 75, 48, 59, 62, 66, 55, 52, 51, 77, 61, 77, 58, 57, 55, 78, 71, 64, 61, 60, 64, 72, 86, 73, 67, 56, 57, 76, 49, 84, 50, 72, 46, 54, 71, 67, 72, 49, 69, 63, 44, 89, 76, 65, 75, 66, 68, 66, 63, 56, 67, 59, 67, 55, 104, 61, 73, 86, 74, 85, 57, 77, 65, 54, 53, 79, 62, 62, 66, 56, 64, 55, 75, 137, 61, 74, 73, 55, 65, 61, 58, 64, 71, 65, 57, 70, 88, 48, 72, 54, 57, 61, 66, 53, 62, 57, 90, 59, 66, 75, 56, 74, 69, 105, 58, 61, 121, 62, 72, 70, 67, 53, 66, 69, 69, 61, 46, 64, 70, 47, 62, 77, 71, 49, 60, 64, 57, 56, 65, 71, 69, 65, 58, 80, 49, 55, 81, 88, 61, 78, 54, 81, 40, 91, 56, 89, 75, 59, 69, 72, 56, 74, 69, 103, 58, 61, 58, 53, 43, 56, 53, 53, 53, 52, 85, 45, 103, 62, 56, 61, 67, 58, 93, 64, 62, 79, 83, 63, 63, 62, 59, 60, 60, 54, 47, 57, 85, 94, 57, 99, 68, 188, 67, 55, 56, 77, 62, 69, 63, 69, 41, 60, 61, 63, 48, 70, 67, 67, 56, 77, 65, 57, 69, 63, 126, 106, 66, 67, 74, 64, 66, 49, 49, 74, 67, 61, 71, 61, 103, 72, 57, 68, 46, 62, 58, 63, 70, 113, 57, 71, 60, 61, 57, 70, 52, 41, 40, 60, 60, 64, 54, 56, 79, 69, 66, 59, 47, 62, 53, 65, 73, 55, 49, 81, 75, 61, 74, 57, 59, 68, 65, 50, 64, 78, 77, 71, 66, 109, 64, 68, 53, 61, 54, 74, 96, 64, 65, 59, 76, 66, 72, 54, 57, 81, 58, 53, 58, 65, 67, 83, 54, 67, 54, 73, 66, 54, 74, 64, 54, 82, 67, 71, 49, 70, 79, 71, 59, 59, 61, 57, 55, 67, 47, 68, 78, 60, 53, 68, 70, 63, 59, 59, 71, 85, 82, 65, 80, 90, 71, 55, 70, 54, 52, 67, 51, 71, 62, 68, 55, 51, 133, 51, 48, 69, 69, 59, 69, 58, 95, 62, 64, 57, 74, 77, 69, 62, 67, 58, 63, 73, 76, 89, 63, 91, 71, 64, 63, 64, 53, 70, 98, 94, 63, 50, 53, 78, 73, 60, 69, 60, 45, 64, 61, 55, 65, 61, 59, 60, 67, 62, 68, 76, 66, 64, 63, 66, 76, 55, 68, 62, 65, 57, 58, 66, 68, 64, 50, 38, 57, 58, 66, 42, 60, 76, 40, 49, 75, 50, 55, 68, 70, 58, 59, 72, 59, 65, 62, 44, 58, 65, 61, 68, 93, 151, 75, 52, 55, 53, 54, 70, 73, 51, 53, 82, 86, 58, 64, 124, 65, 57, 62, 73, 48, 59, 51, 79, 59, 54, 62, 58, 53, 52, 66, 69, 60, 78, 51, 65, 66, 70, 94, 74, 69, 124, 83, 77, 64, 68, 63, 71, 58, 85, 68, 62, 84, 90, 82, 76, 49, 66, 51, 47, 77, 79, 80, 55, 68, 54, 63, 69, 40, 67, 95, 44, 73, 62, 70, 53, 57, 75, 63, 95, 81, 87, 56, 70, 66, 51, 108, 61, 66, 73, 63, 60, 57, 49, 73, 92, 92, 106, 64, 71, 66, 65, 55, 62, 55, 88, 38, 57, 84, 83, 57, 62, 102, 67, 33, 90, 51, 63, 68, 69, 67, 45, 61, 101, 81, 73, 77, 63, 65, 55, 81, 66, 53, 53, 77, 87, 59, 56, 54, 118, 59, 78, 50, 54, 65, 70, 72, 61, 33, 65, 58, 83, 72, 64, 52, 72, 102, 86, 61, 52, 42, 60, 58, 54, 73, 58, 76, 62, 43, 52, 57, 68, 49, 55, 52, 83, 68, 91, 57, 73, 74, 59, 70, 45, 57, 58, 68, 44, 73, 49, 77, 53, 55, 80, 61, 44, 67, 59, 100, 79, 40, 50, 69, 43, 71, 45, 67, 61, 57, 64, 63, 70, 56, 105, 47, 86, 63, 52, 78, 55, 50, 90, 57, 38, 53, 62, 54, 77, 74, 136, 76, 45, 45, 65, 62, 116, 36, 81, 61, 50, 82, 72, 69, 67, 54, 69, 54, 75, 59, 94, 72, 48, 85, 105, 78, 71, 60, 67, 47, 57, 57, 66, 52, 67, 65, 104, 73, 56, 102, 73, 53, 69, 83, 60, 66, 70, 60, 72, 53, 56, 61, 84, 78, 57, 58, 61, 35, 55, 78, 63, 66, 47, 79, 68, 84, 95, 79, 61, 63, 74, 73, 65, 78, 73, 61, 58, 46, 80, 61, 55, 59, 42, 32, 110, 74, 56, 55, 51, 75, 65, 49, 74, 72, 105, 83, 98, 64, 75, 71, 55, 50, 60, 42, 101, 75, 62, 50, 64, 71, 71, 61, 52, 90, 49, 71, 78, 71, 64, 48, 85, 41, 52, 63, 69, 68, 50, 58, 41, 57, 97, 50, 58, 49, 60, 85, 43, 68, 54, 67, 98, 69, 63, 68, 48, 53, 64, 84, 69, 67, 52, 59, 61, 55, 63, 74, 62, 88, 88, 87, 101, 48, 57, 52, 69, 60, 71, 69, 125, 62, 61, 66, 54, 62, 48, 73, 53, 51, 54, 57, 53, 71, 75, 80, 74, 65, 58, 56, 65, 74, 85, 85, 69, 65, 87, 69, 64, 79, 63, 53, 62, 95, 65, 46, 63, 58, 65, 68, 75, 83, 79, 60, 63, 56, 95, 73, 64, 55, 62, 81, 84, 68, 54, 53, 65, 61, 53, 74, 78, 95, 70, 78, 68, 52, 55, 64, 63, 92, 52, 64, 61, 51, 57, 93, 68, 62, 52, 71, 72, 59, 70, 83, 59, 57, 46, 68, 66, 54, 71, 69, 79, 64, 78, 68, 52, 75, 70, 60, 78, 71, 80, 65, 54, 56, 83, 72, 38, 60, 61, 88, 57, 65, 61, 76, 48, 71, 101, 48, 80, 41, 56, 55, 55, 84, 64, 71, 62, 69, 57, 60, 38, 63, 64, 68, 84, 58, 58, 75, 66, 82, 76, 80, 72, 44, 59, 61, 73, 60, 55, 72, 80, 51, 80, 52, 69, 54, 57, 67, 55, 74, 64, 43, 52, 63, 84, 49, 52, 73, 80, 41, 64, 53, 73, 73, 53, 83, 63, 61, 67, 64, 84, 77, 57, 94, 49, 62, 60, 59, 53, 71, 58, 68, 71, 52, 53, 62, 57, 84, 47, 68, 63, 54, 58, 103, 104, 58, 65, 79, 74, 76, 52, 37, 58, 64, 62, 47, 93, 87, 58, 81, 62, 45, 50, 53, 68, 66, 55, 80, 82, 70, 68, 72, 99, 83, 81, 110, 50, 57, 81, 87, 72, 49, 49, 43, 65, 75, 69, 97, 62, 89, 72, 49, 46, 57, 48, 85, 42, 51, 48, 67, 86, 72, 89, 75, 67, 57, 55, 41, 67, 96, 67, 56, 72, 40, 45, 70, 58, 77, 76, 63, 82, 132, 57, 75, 46, 65, 69, 69, 65, 66, 91, 54, 56, 94, 78, 67, 61, 71, 59, 63, 81, 63, 69, 61, 81, 81, 57, 53, 89, 75, 66, 83, 82, 57, 76, 65, 90, 83, 53, 65, 52, 64, 62, 69, 90, 36, 64, 70, 82, 67, 52, 88, 68, 48, 66, 57, 65, 34, 55, 71, 57, 69, 53, 48, 65, 52, 70, 95, 72, 61, 79, 81, 90, 60, 57, 90, 47, 44, 42, 67, 53, 48, 59, 55, 67, 65, 81, 59, 71, 46, 55, 70, 68, 44, 48, 59, 62, 65, 70, 66, 72, 58, 64, 76, 36, 68, 83, 59, 79, 58, 62, 65, 53, 92, 82, 62, 64, 47, 52, 50, 66, 53, 72, 68, 63, 68, 74, 47, 67, 78, 59, 61, 41, 105, 61, 63, 88, 73, 66, 83, 78, 64, 114, 94, 86, 54, 57, 87, 76, 65, 66, 81, 67, 67, 57, 70, 55, 53, 50, 58, 49, 63, 60, 94, 52, 67, 41, 78, 102, 67, 58, 111, 62, 76, 64, 77, 77, 43, 41, 76, 63, 57, 77, 47, 62, 81, 55, 47, 91, 82, 56, 52, 68, 66, 75, 81, 43, 59, 73, 72, 63, 72, 46, 47, 57, 62, 44, 52, 58, 70, 80, 67, 53, 67, 84, 46, 68, 59, 73, 74, 55, 51, 71, 53, 95, 80, 73, 46, 102, 84, 65, 63, 86, 83, 60, 87, 76, 65, 46, 85, 56, 61, 53, 84, 64, 96, 57, 58, 68, 52, 62, 53, 54, 73, 37, 68, 59, 65, 56, 49, 63, 61, 53, 56, 64, 62, 69, 76, 73, 65, 38, 64, 64, 64, 60, 71, 84, 64, 68, 69, 49, 82, 72, 77, 54, 82, 56, 73, 73, 70, 63, 67, 55, 68, 73, 75, 50, 56, 70, 67, 66, 65, 61, 63, 54, 64, 67, 64, 91, 87, 51, 61, 55, 66, 83, 59, 79, 44, 57, 87, 106, 89, 91, 82, 69, 55, 56, 74, 72, 88, 54, 58, 60, 68, 73, 71, 51, 62, 68, 56, 35, 53, 53, 93, 65, 54, 40, 112, 56, 67, 84, 71, 58, 54, 66, 54, 73, 31, 49, 47, 53, 53, 64, 65, 63, 55, 74, 50, 71, 60, 81, 62, 59, 86, 57, 55, 72, 148, 72, 92, 48, 66, 68, 49, 74, 61, 62, 75, 50, 65, 56, 70, 67, 54, 72, 76, 77, 112, 77, 55, 66, 65, 48, 76, 58, 58, 83, 53, 58, 95, 55, 57, 55, 56, 58, 69, 64, 71, 65, 67, 76, 44, 79, 38, 67, 43, 66, 53, 85, 55, 50, 68, 51, 59, 72, 73, 76, 62, 59, 93, 63, 68, 65, 54, 58, 59, 90, 63, 59, 79, 85, 49, 65, 63, 63, 70, 51, 68, 53, 43, 70, 159, 91, 63, 62, 69, 84, 55, 47, 53, 81, 49, 64, 115, 64, 46, 43, 58, 44, 63, 60, 55, 68, 56, 89, 63, 60, 61, 56, 69, 52, 81, 63, 67, 59, 85, 66, 84, 81, 75, 88, 55, 72, 79, 43, 38, 63, 88, 58, 69, 74, 65, 81, 79, 66, 52, 49, 97, 79, 68, 75, 43, 66, 50, 72, 61, 66, 65, 53, 71, 96, 65, 46, 64, 69, 61, 65, 63, 103, 68, 80, 52, 48, 74, 75, 71, 68, 59, 45, 91, 65, 70, 43, 48, 56, 69, 57, 84, 82, 86, 67, 60, 38, 59, 74, 65, 63, 79, 80, 65, 51, 73, 77, 66, 46, 70, 61, 65, 63, 90, 53, 47, 67, 72, 76, 45, 51, 96, 86, 83, 78, 52, 80, 71, 56, 71, 79, 107, 71, 50, 57, 66, 48, 38, 52, 24, 52, 56, 85, 60, 63, 83, 81, 84, 45, 68, 88, 90, 63, 37, 48, 56, 83, 96, 67, 67, 82, 73, 55, 68, 55, 57, 67, 69, 114, 72, 67, 61, 68, 67, 54, 45, 65, 78, 59, 69, 60, 65, 39, 53, 47, 83, 69, 62, 44, 78, 80, 64, 78, 59, 49, 92, 51, 35, 32, 102, 68, 85, 68, 50, 64, 54, 48, 57, 64, 69, 98, 63, 46, 75, 70, 70, 38, 56, 89, 40, 67, 80, 47, 56, 88, 58, 67, 61, 68, 57, 57, 56, 62, 85, 52, 92, 78, 93, 65, 74, 59, 62, 56, 74, 48, 47, 80, 86, 58, 58, 95, 70, 58, 44, 68, 66, 45, 64, 80, 65, 71, 58, 63, 60, 73, 66, 62, 65, 56, 84, 72, 69, 67, 63, 59, 84, 57, 68, 78, 63, 87, 72, 88, 67, 73, 66, 104, 90, 76, 85, 80, 72, 90, 82, 77, 70, 62, 83, 75, 88, 60, 67, 56, 66, 69, 63, 55, 74, 91, 80, 38, 61, 97, 74, 82, 67, 51, 68, 61, 57, 58, 94, 65, 79, 73, 62, 63, 67, 84, 59, 48, 84, 63, 69, 49, 115, 75, 82, 79, 56, 55, 54, 50, 62, 70, 62, 55, 45, 74, 50, 81, 79, 56, 80, 68, 50, 83, 66, 64, 48, 64, 69, 65, 61, 64, 87, 78, 55, 108, 58, 53, 44, 64, 62, 83, 86, 67, 54, 61, 58, 92, 81, 56, 49, 77, 41, 46, 32, 52, 48, 108, 70, 64, 38, 67, 66, 68, 64, 48, 65, 79, 60, 53, 89, 97, 73, 62, 59, 59, 67, 52, 68, 70, 55, 92, 57, 42, 81, 50, 62, 68, 64, 35, 87, 75, 55, 77, 74, 53, 47, 48, 90, 59, 82, 71, 79, 53, 106, 64, 45, 72, 67, 65, 81, 30, 68, 67, 32, 57, 41, 53, 77, 53, 59, 66, 65, 75, 83, 34, 71, 79, 105, 70, 52, 63, 38, 61, 78, 73, 77, 76, 46, 56, 71, 67, 72, 60, 72, 52, 80, 68, 65, 76, 50, 52, 70, 40, 105, 75, 94, 64, 50, 73, 72, 81, 66, 63, 83, 66, 67, 79, 66, 92, 63, 88, 54, 73, 57, 68, 57, 74, 102, 49, 59, 91, 76, 58, 75, 44, 75, 66, 58, 85, 41, 65, 57, 50, 53, 51, 97, 75, 83, 51, 57, 87, 77, 61, 70, 65, 46, 107, 50, 56, 51, 49, 55, 48, 64, 63, 74, 73, 50, 65, 76, 66, 52, 56, 57, 74, 58, 64, 67, 54, 64, 62, 69, 89, 73, 51, 47, 70, 74, 62, 71, 63, 64, 69, 50, 72, 79, 78, 95, 55, 72, 58, 74, 79, 59, 50, 71, 46, 80, 75, 59, 54, 68, 76, 75, 76, 50, 51, 59, 57, 62, 63, 59, 63, 60, 64, 47, 71, 66, 54, 76, 89, 69, 34, 63, 64, 59, 88, 68, 54, 51, 69, 53, 74, 70, 77, 71, 61, 63, 66, 71, 81, 77, 72, 85, 82, 82, 57, 62, 43, 59, 42, 50, 42, 80, 54, 60, 50, 57, 60, 75, 74, 53, 64, 78, 68, 75, 78, 82, 87, 55, 81, 63, 68, 71, 58, 65, 47, 58, 73, 105, 52, 61, 70, 44, 102, 60, 71, 74, 51, 78, 55, 66, 119, 67, 55, 88, 68, 74, 44, 58, 57, 82, 64, 71, 83, 87, 66, 81, 51, 58, 71, 76, 65, 60, 58, 81, 70, 67, 66, 80, 68, 59, 66, 93, 34, 90, 77, 57, 45, 47, 51, 74, 58, 79, 60, 61, 71, 55, 100, 59, 73, 64, 51, 69, 61, 69, 51, 60, 79, 72, 65, 82, 72, 50, 61, 69, 89, 69, 61, 64, 63, 80, 79, 57, 81, 53, 71, 71, 113, 42, 48, 65, 50, 84, 65, 63, 64, 57, 86, 87, 108, 68, 68, 77, 64, 62, 82, 62, 89, 55, 64, 53, 89, 68, 42, 62, 59, 92, 74, 79, 77, 63, 60, 86, 49, 83, 106, 91, 60, 102, 66, 57, 61, 70, 57, 78, 75, 56, 63, 53, 58, 47, 47, 50, 54, 56, 109, 94, 47, 65, 68, 69, 75, 59, 45, 59, 77, 56, 54, 50, 59, 45, 61, 39, 53, 51, 41, 99, 56, 64, 73, 61, 76, 58, 85, 60, 39, 47, 81, 59, 62, 61, 49, 60, 62, 70, 54, 75, 54, 51, 72, 61, 66, 69, 60, 79, 68, 69, 53, 82, 88, 74, 79, 88, 64, 60, 50, 50, 64, 112, 52, 62, 65, 61, 63, 60, 50, 63, 84, 80, 73, 84, 100, 72, 99, 65, 64, 56, 72, 59, 73, 69, 69, 94, 74, 57, 53, 52, 38, 92, 63, 58, 80, 65, 94, 91, 90, 70, 58, 66, 68, 73, 76, 75, 81, 52, 83, 47, 91, 93, 82, 59, 55, 56, 61, 88, 57, 76, 64, 56, 64, 71, 82, 62, 79, 90, 66, 68, 108, 59, 62, 61, 61, 73, 52, 116, 68, 70, 57, 48, 82, 64, 61, 62, 55, 69, 87, 57, 66, 67, 65, 47, 55, 78, 92, 52, 75, 79, 69, 76, 78, 58, 47, 50, 59, 54, 62, 95, 80, 65, 70, 59, 83, 46, 103, 46, 86, 70, 38, 59, 75, 57, 67, 58, 69, 47, 54, 43, 57, 50, 81, 70, 66, 85, 73, 55, 72, 51, 63, 68, 53, 53, 35, 55, 75, 69, 57, 64, 89, 52, 74, 59, 51, 59, 63, 59, 97, 54, 82, 52, 66, 77, 69, 72, 66, 50, 72, 68, 78, 52, 63, 49, 92, 52, 56, 58, 59, 49, 61, 64, 83, 63, 88, 54, 60, 57, 65, 78, 69, 62, 58, 52, 55, 47, 80, 77, 54, 94, 58, 69, 68, 53, 73, 76, 69, 80, 61, 61, 82, 58, 48, 60, 71, 61, 66, 74, 83, 61, 59, 93, 89, 55, 45, 51, 70, 63, 61, 61, 57, 66, 47, 110, 58, 58, 91, 49, 62, 77, 61, 66, 85, 56, 85, 64, 54, 63, 66, 71, 50, 50, 73, 80, 60, 61, 75, 64, 48, 76, 59, 58, 75, 66, 72, 63, 43, 53, 66, 65, 70, 81, 59, 51, 71, 59, 50, 68, 69, 55, 55, 66, 70, 64, 83, 83, 76, 55, 53, 68, 71, 61, 67, 60, 64, 67, 64, 54, 59, 75, 69, 60, 53, 81, 51, 57, 63, 98, 57, 77, 66, 71, 68, 62, 61, 56, 77, 66, 76, 61, 62, 73, 96, 64, 84, 67, 57, 61, 54, 101, 70, 58, 74, 79, 54, 54, 53, 72, 55, 63, 51, 46, 55, 65, 68, 64, 75, 58, 59, 51, 61, 80, 89, 58, 68, 67, 48, 60, 60, 60, 72, 74, 59, 56, 82, 65, 70, 82, 71, 57, 63, 65, 62, 46, 69, 64, 58, 60, 62, 53, 54, 78, 66, 81, 77, 59, 61, 76, 62, 49, 88, 67, 47, 59, 87, 67, 77, 78, 68, 70, 59, 60, 69, 55, 64, 87, 48, 56, 66, 55, 57, 94, 55, 72, 64, 64, 60, 64, 91, 50, 54, 55, 53, 52, 63, 75, 74, 49, 66, 62, 57, 66, 58, 69, 81, 79, 57, 69, 74, 65, 82, 70, 61, 49, 69, 64, 76, 116, 52, 60, 67, 53, 56, 70, 56, 90, 50, 116, 80, 72, 62, 67, 62, 57, 50, 62, 52, 46, 63, 64, 46, 78, 64, 53, 55, 43, 86, 56, 84, 65, 64, 57, 43, 58, 45, 49, 89, 53, 71, 55, 29, 70, 60, 83, 150, 63, 72, 81, 58, 61, 108, 51, 80, 51, 111, 51, 96, 62, 71, 53, 45, 87, 64, 82, 55, 63, 73, 112, 73, 57, 84, 75, 48, 52, 61, 74, 78, 63, 79, 98, 66, 57, 86, 56, 64, 94, 67, 54, 67, 101, 55, 64, 69, 61, 72, 65, 72, 93, 63, 73, 57, 77, 62, 55, 71, 61, 59, 80, 65, 53, 88, 73, 54, 98, 58, 49, 63, 76, 54, 80, 112, 66, 61, 55, 56, 77, 63, 81, 83, 55, 72, 51, 48, 77, 55, 79, 66, 61, 63, 87, 51, 75, 68, 59, 48, 65, 69, 83, 80, 64, 85, 73, 62, 80, 64, 76, 65, 53, 66, 70, 65, 54, 58, 50, 69, 43, 56, 87, 77, 44, 60, 57, 41, 54, 58, 48, 52, 73, 78, 58, 64, 78, 81, 60, 72, 76, 68, 65, 52, 51, 57, 59, 55, 96, 58, 69, 106, 62, 55, 53, 66, 40, 92, 52, 57, 64, 76, 65, 78, 57, 62, 60, 50, 66, 57, 46, 54, 69, 47, 48, 70, 65, 88, 52, 63, 69, 54, 100, 69, 74, 56, 66, 50, 69, 59, 59, 59, 63, 62, 85, 72, 52, 78, 61, 56, 52, 47, 56, 70, 67, 51, 65, 56, 70, 67, 75, 89, 59, 54, 60, 72, 57, 60, 80, 69, 70, 57, 71, 66, 103, 58, 75, 62, 65, 60, 61, 67, 81, 49, 79, 41, 75, 59, 72, 77, 49, 86, 72, 51, 55, 65, 44, 98, 54, 55, 74, 65, 47, 66, 53, 55, 48, 56, 83, 62, 59, 62, 54, 57, 70, 51, 70, 90, 63, 58, 96, 68, 59, 88, 70, 73, 60, 59, 80, 52, 66, 91, 49, 46, 58, 54, 81, 77, 61, 78, 62, 75, 69, 86, 62, 77, 65, 58, 65, 63, 63, 85, 69, 69, 59, 49, 53, 53, 70, 76, 72, 60, 60, 46, 54, 73, 59, 57, 75, 73, 54, 56, 69, 62, 62, 68, 51, 61, 63, 77, 56, 52, 64, 71, 48, 78, 65, 49, 63, 69, 69, 66, 54, 69, 65, 57, 72, 70, 90, 51, 61, 87, 60, 83, 59, 63, 77, 52, 75, 57, 60, 70, 66, 76, 63, 58, 67, 52, 67, 65, 69, 65, 46, 60, 63, 49, 79, 89, 65, 71, 52, 79, 61, 67, 53, 85, 68, 64, 82, 40, 63, 99, 55, 49, 65, 75, 67, 70, 60, 66, 52, 88, 62, 56, 56, 75, 95, 80, 69, 57, 90, 69, 74, 56, 61, 69, 78, 67, 73, 70, 65, 79, 69, 77, 54, 100, 96, 66, 68, 76, 65, 87, 57, 101, 50, 43, 55, 63, 112, 71, 41, 68, 69, 60, 75, 80, 99, 63, 74, 57, 70, 60, 71, 60, 66, 67, 109, 64, 66, 68, 58, 78, 62, 77, 82, 61, 79, 64, 54, 58, 72, 75, 66, 66, 74, 59, 66, 75, 80, 67, 58, 55, 62, 53, 66, 58, 65, 52, 54, 55, 73, 43, 68, 54, 56, 74, 60, 76, 71, 79, 67, 84, 67, 64, 65, 75, 57, 50, 64, 51, 68, 66, 44, 55, 58, 66, 67, 51, 62, 66, 67, 65, 64, 75, 64, 74, 59, 83, 60, 63, 68, 61, 79, 59, 86, 63, 58, 63, 62, 56, 46, 52, 53, 68, 67, 57, 87, 60, 40, 65, 81, 56, 79, 51, 64, 61, 82, 59, 70, 75, 53, 27, 77, 66, 59, 85, 56, 40, 66, 74, 63, 69, 99, 67, 64, 65, 56, 58, 75, 68, 92, 72, 54, 87, 47, 67, 68, 53, 79, 60, 72, 75, 43, 76, 61, 153, 82, 68, 67, 51, 64, 58, 92, 70, 78, 69, 77, 80, 96, 58, 71, 76, 74, 57, 79, 61, 63, 58, 73, 59, 76, 53, 67, 59, 68, 54, 70, 46, 118, 51, 57, 80, 63, 52, 91, 57, 70, 67, 67, 73, 127, 69, 56, 66, 52, 50, 74, 62, 77, 57, 64, 54, 68, 62, 51, 77, 60, 90, 71, 71, 60, 62, 63, 86, 65, 91, 58, 80, 68, 77, 58, 57, 75, 50, 76, 66, 115, 62, 49, 66, 66, 75, 69, 76, 56, 60, 64, 55, 65, 58, 68, 68, 60, 80, 56, 55, 69, 119, 66, 87, 73, 73, 51, 71, 85, 70, 77, 54, 64, 56, 59, 96, 60, 91, 72, 69, 54, 59, 63, 73, 66, 44, 57, 51, 71, 54, 68, 55, 81, 68, 47, 77, 70, 60, 70, 49, 81, 78, 56, 68, 59, 49, 57, 85, 55, 45, 72, 67, 55, 51, 62, 63, 63, 69, 68, 71, 67, 64, 58, 75, 63, 62, 66, 65, 65, 68, 64, 70, 64, 68, 51, 57, 56, 70, 55, 50, 40, 57, 63, 68, 83, 47, 68, 53, 67, 79, 91, 56, 71, 64, 108, 54, 73, 55, 58, 70, 53, 68, 70, 50, 81, 53, 67, 73, 52, 71, 66, 70, 72, 87, 68, 53, 60, 57, 55, 53, 45, 73, 62, 62, 53, 68, 80, 65, 73, 66, 52, 45, 86, 55, 56, 78, 50, 43, 65, 58, 40, 70, 67, 54, 60, 125, 53, 67, 57, 70, 56, 80, 48, 96, 54, 68, 56, 79, 81, 50, 54, 59, 57, 58, 119, 50, 50, 58, 81, 56, 73, 55, 54, 78, 72, 62, 69, 90, 66, 57, 60, 61, 60, 52, 90, 59, 71, 46, 82, 53, 38, 57, 71, 55, 67, 64, 60, 45, 55, 69, 57, 91, 57, 56, 75, 50, 83, 62, 41, 56, 86, 61, 94, 64, 62, 90, 55, 48, 61, 59, 52, 62, 70, 46, 102, 91, 88, 53, 54, 74, 78, 54, 38, 63, 83, 44, 40, 63, 67, 66, 69, 70, 59, 76, 67, 61, 60, 63, 68, 58, 60, 54, 64, 59, 51, 52, 60, 45, 52, 69, 62, 66, 56, 83, 69, 70, 69, 69, 87, 57, 61, 81, 83, 52, 58, 54, 71, 67, 69, 91, 69, 79, 55, 83, 77, 81, 57, 60, 115, 76, 66, 53, 65, 51, 82, 56, 66, 77, 63, 55, 79, 39, 64, 49, 70, 68, 68, 74, 55, 79, 58, 60, 57, 87, 26, 65, 66, 81, 86, 51, 71, 59, 74, 70, 77, 58, 97, 56, 68, 56, 57, 55, 84, 108, 61, 57, 62, 73, 66, 89, 56, 57, 61, 81, 76, 66, 75, 85, 82, 74, 71, 116, 47, 88, 89, 73, 71, 61, 50, 65, 62, 73, 64, 74, 50, 51, 65, 61, 118, 73, 77, 52, 92, 61, 69, 74, 58, 89, 69, 61, 50, 64, 62, 71, 74, 55, 82, 60, 66, 64, 93, 59, 56, 88, 69, 72, 66, 60, 63, 65, 59, 57, 125, 64, 69, 63, 54, 66, 72, 63, 68, 58, 58, 66, 71, 67, 87, 75, 48, 54, 63, 84, 74, 41, 63, 49, 49, 61, 59, 70, 67, 53, 62, 59, 63, 56, 70, 61, 58, 55, 62, 54, 62, 60, 66, 100, 71, 71, 66, 43, 60, 62, 69, 66, 76, 71, 70, 74, 50, 70, 78, 65, 62, 56, 52, 40, 55, 46, 52, 58, 86, 60, 50, 69, 60, 93, 56, 66, 68, 64, 35, 65, 70, 59, 88, 64, 65, 59, 67, 77, 54, 66, 83, 74, 61, 79, 78, 68, 73, 56, 52, 51, 82, 50, 94, 77, 79, 68, 63, 112, 39, 58, 85, 62, 77, 59, 69, 62, 68, 58, 69, 59, 73, 62, 74, 176, 45, 44, 72, 63, 60, 49, 68, 44, 51, 55, 52, 67, 57, 77, 51, 55, 45, 44, 110, 63, 73, 52, 76, 69, 74, 70, 60, 67, 48, 54, 75, 57, 65, 79, 55, 90, 55, 63, 47, 72, 50, 61, 54, 69, 54, 75, 54, 92, 53, 79, 50, 71, 92, 66, 52, 65, 45, 31, 45, 57, 55, 102, 64, 53, 91, 100, 90, 46, 61, 67, 88, 68, 54, 58, 57, 72, 63, 56, 66, 51, 52, 94, 57, 69, 81, 77, 68, 56, 90, 55, 49, 72, 48, 67, 78, 49, 61, 76, 59, 70, 68, 56, 71, 61, 45, 69, 66, 82, 137, 76, 62, 79, 58, 52, 60, 43, 62, 78, 78, 46, 67, 54, 100, 66, 86, 76, 67, 93, 79, 112, 61, 72, 74, 79, 55, 51, 141, 56, 65, 88, 82, 57, 72, 69, 79, 58, 93, 67, 51, 57, 65, 59, 61, 65, 65, 69, 57, 56, 75, 48, 71, 61, 54, 40, 57, 59, 80, 59, 54, 58, 57, 54, 84, 54, 63, 57, 86, 77, 55, 51, 63, 73, 59, 67, 63, 58, 75, 126, 76, 50, 59, 59, 56, 70, 47, 85, 52, 55, 64, 61, 53, 63, 50, 88, 49, 72, 70, 77, 67, 75, 60, 41, 77, 65, 49, 101, 62, 81, 59, 51, 55, 108, 65, 70, 60, 78, 47, 50, 57, 62, 87, 62, 64, 46, 61, 66, 89, 57, 159, 65, 62, 71, 80, 65, 51, 63, 116, 89, 54, 74, 64, 60, 102, 66, 53, 58, 63, 71, 71, 61, 74, 63, 66, 65, 53, 104, 70, 60, 65, 73, 52, 78, 60, 58, 69, 71, 68, 63, 66, 62, 94, 54, 65, 59, 70, 69, 67, 63, 81, 47, 118, 54, 56, 70, 56, 66, 93, 75, 70, 65, 65, 91, 64, 65, 67, 47, 63, 73, 73, 63, 76, 68, 58, 74, 80, 74, 75, 79, 61, 72, 68, 61, 51, 93, 66, 58, 53, 45, 78, 75, 52, 54, 64, 60, 65, 63, 49, 41, 56, 62, 81, 70, 49, 68, 49, 80, 53, 66, 43, 60, 61, 65, 85, 72, 70, 79, 64, 50, 69, 66, 54, 79, 82, 64, 101, 63, 59, 65, 95, 71, 58, 53, 73, 54, 59, 40, 61, 65, 67, 68, 79, 42, 63, 64, 113, 60, 68, 66, 50, 84, 76, 61, 68, 56, 47, 71, 56, 81, 65, 71, 49, 65, 64, 75, 72, 75, 66, 61, 71, 68, 55, 77, 60, 56, 74, 71, 73, 81, 51, 68, 42, 64, 62, 58, 53, 62, 72, 74, 64, 76, 42, 69, 62, 64, 68, 66, 97, 76, 75, 66, 69, 62, 67, 84, 70, 66, 55, 45, 77, 41, 64, 49, 60, 52, 65, 54, 67, 68, 47, 59, 57, 58, 73, 68, 80, 81, 59, 58, 69, 104, 56, 65, 59, 64, 80, 57, 81, 57, 61, 53, 61, 47, 53, 55, 62, 60, 59, 73, 72, 62, 76, 57, 71, 104, 49, 68, 58, 60, 76, 88, 70, 58, 62, 55, 74, 46, 74, 55, 46, 57, 55, 63, 45, 53, 62, 73, 82, 65, 76, 61, 57, 63, 77, 55, 62, 88, 60, 63, 57, 101, 56, 62, 55, 79, 62, 70, 61, 49, 66, 64, 69, 88, 51, 58, 86, 61, 57, 65, 59, 63, 74, 57, 62, 55, 51, 62, 77, 70, 65, 62, 59, 77, 62, 72, 52, 70, 46, 64, 104, 83, 50, 56, 47, 68, 63, 48, 62, 90, 53, 75, 71, 71, 58, 73, 56, 53, 61, 60, 79, 55, 70, 63, 58, 91, 52, 66, 80, 88, 52, 56, 59, 59, 57, 68, 63, 73, 63, 60, 70, 47, 74, 66, 49, 48, 63, 141, 48, 61, 60, 48, 69, 46, 55, 112, 56, 63, 69, 65, 60, 71, 64, 67, 73, 66, 64, 58, 63, 57, 91, 60, 61, 74, 58, 66, 73, 81, 65, 48, 48, 69, 62, 72, 60, 87, 79, 63, 60, 51, 89, 65, 61, 64, 71, 73, 79, 68, 81, 62, 64, 82, 69, 60, 56, 60, 53, 80, 50, 58, 96, 88, 65, 68, 57, 61, 60, 54, 52, 91, 72, 65, 76, 63, 63, 53, 57, 79, 68, 68, 53, 57, 71, 60, 43, 57, 56, 56, 82, 62, 66, 101, 66, 69, 78, 58, 52, 97, 60, 59, 64, 55, 70, 67, 54, 81, 55, 79, 65, 70, 73, 65, 55, 62, 67, 100, 69, 67, 61, 51, 58, 76, 62, 63, 80, 67, 53, 29, 67, 58, 56, 73, 56, 64, 75, 77, 49, 88, 60, 78, 78, 68, 69, 57, 60, 60, 60, 72, 51, 62, 52, 63, 65, 63, 70, 68, 52, 68, 54, 57, 77, 62, 64, 69, 66, 81, 79, 74, 51, 72, 61, 82, 54, 73, 78, 76, 68, 66, 62, 62, 69, 67, 77, 57, 60, 59, 56, 66, 73, 52, 84, 75, 66, 82, 62, 116, 71, 62, 59, 70, 56, 58, 53, 64, 52, 74, 65, 40, 87, 69, 61, 52, 63, 45, 128, 85, 58, 51, 67, 67, 56, 62, 73, 80, 99, 88, 34, 71, 82, 83, 53, 71, 93, 65, 42, 50, 82, 77, 72, 54, 67, 52, 95, 51, 55, 57, 62, 74, 65, 71, 62, 52, 79, 88, 56, 84, 60, 63, 63, 61, 120, 93, 64, 63, 67, 58, 49, 81, 73, 94, 79, 47, 76, 66, 62, 62, 66, 62, 90, 74, 88, 81, 77, 69, 60, 60, 58, 64, 60, 78, 59, 82, 66, 53, 63, 51, 67, 63, 79, 58, 61, 61, 52, 60, 50, 76, 64, 82, 77, 56, 66, 80, 65, 63, 70, 59, 58, 65, 48, 68, 48, 82, 70, 69, 66, 45, 80, 60, 51, 67, 61, 84, 49, 53, 53, 66, 61, 70, 56, 69, 69, 72, 61, 52, 78, 62, 60, 67, 62, 61, 63, 71, 57, 62, 73, 68, 50, 68, 57, 66, 50, 69, 55, 82, 57, 58, 93, 110, 86, 86, 65, 58, 58, 68, 56, 67, 77, 64, 61, 54, 66, 47, 91, 66, 56, 60, 52, 61, 71, 55, 79, 60, 98, 56, 94, 59, 42, 65, 68, 83, 65, 66, 89, 63, 67, 58, 49, 57, 59, 78, 59, 74, 57, 77, 60, 60, 52, 75, 65, 54, 88, 60, 54, 60, 60, 62, 67, 60, 69, 64, 65, 62, 51, 63, 70, 47, 56, 65, 60, 63, 69, 65, 95, 55, 73, 58, 67, 79, 78, 67, 55, 63, 65, 59, 56, 60, 60, 68, 108, 89, 73, 54, 55, 72, 68, 91, 67, 81, 64, 54, 71, 68, 38, 56, 84, 68, 78, 78, 75, 55, 64, 66, 49, 60, 68, 52, 61, 57, 86, 65, 65, 88, 71, 49, 63, 89, 65, 67, 58, 59, 77, 53, 44, 56, 61, 65, 66, 60, 56, 55, 72, 94, 66, 66, 75, 70, 81, 76, 63, 54, 64, 58, 52, 114, 69, 67, 74, 58, 59, 64, 63, 47, 70, 82, 91, 64, 52, 58, 80, 60, 65, 56, 120, 79, 73, 59, 59, 65, 63, 71, 75, 52, 68, 75, 110, 74, 61, 72, 63, 72, 92, 58, 59, 54, 67, 54, 59, 61, 51, 42, 59, 61, 76, 84, 69, 67, 71, 66, 65, 54, 66, 57, 51, 60, 59, 56, 70, 59, 68, 87, 64, 64, 57, 89, 57, 64, 62, 54, 71, 84, 66, 67, 81, 70, 60, 61, 68, 51, 78, 54, 55, 46, 64, 58, 69, 81, 58, 64, 88, 61, 66, 62, 74, 135, 73, 68, 61, 61, 70, 61, 60, 98, 50, 65, 51, 56, 64, 58, 51, 61, 76, 44, 53, 52, 75, 64, 50, 61, 93, 63, 137, 74, 54, 78, 70, 42, 50, 54, 87, 80, 81, 70, 67, 68, 61, 46, 77, 63, 43, 61, 91, 79, 91, 61, 64, 60, 46, 66, 59, 70, 63, 66, 65, 61, 57, 60, 56, 55, 55, 66, 52, 61, 83, 71, 86, 87, 69, 74, 81, 59, 73, 58, 64, 65, 68, 69, 61, 59, 74, 77, 99, 97, 44, 63, 56, 106, 64, 70, 56, 55, 79, 60, 73, 70, 64, 79, 58, 64, 66, 89, 62, 68, 92, 56, 63, 66, 75, 59, 59, 69, 75, 57, 47, 69, 83, 70, 66, 67, 83, 55, 74, 58, 63, 73, 68, 95, 60, 62, 45, 60, 59, 105, 78, 65, 67, 70, 70, 66, 56, 62, 57, 57, 87, 59, 70, 80, 51, 78, 56, 58, 65, 52, 59, 88, 58, 56, 71, 70, 60, 71, 73, 54, 68, 70, 69, 74, 55, 86, 75, 66, 61, 59, 71, 57, 52, 46, 66, 68, 63, 40, 86, 56, 67, 66, 60, 67, 64, 66, 71, 57, 58, 74, 76, 55, 52, 59, 69, 60, 53, 69, 64, 65, 59, 82, 78, 62, 64, 66, 66, 61, 57, 61, 90, 37, 69, 61, 67, 67, 66, 48, 45, 64, 53, 64, 57, 64, 57, 56, 49, 59, 105, 62, 80, 66, 86, 70, 69, 49, 64, 59, 61, 73, 78, 45, 57, 81, 55, 60, 57, 54, 92, 58, 61, 102, 58, 78, 76, 82, 53, 65, 61, 51, 92, 88, 60, 65, 86, 79, 75, 63, 73, 56, 77, 45, 67, 59, 79, 60, 46, 84, 70, 83, 67, 132, 42, 74, 54, 65, 51, 64, 69, 60, 55, 53, 71, 58, 60, 62, 80, 66, 55, 68, 59, 72, 59, 54, 56, 70, 81, 65, 73, 56, 71, 44, 58, 49, 70, 47, 65, 51, 56, 65, 67, 59, 67, 88, 69, 57, 69, 59, 71, 68, 50, 69, 67, 54, 77, 67, 43, 57, 58, 66, 62, 64, 72, 90, 85, 74, 55, 90, 95, 57, 43, 91, 66, 48, 51, 67, 65, 55, 49, 59, 107, 53, 66, 102, 95, 88, 49, 61, 60, 95, 79, 75, 79, 88, 70, 83, 72, 68, 63, 83, 57, 87, 64, 66, 61, 58, 51, 50, 45, 117, 55, 68, 68, 43, 41, 53, 58, 58, 48, 66, 86, 71, 68, 42, 50, 55, 79, 56, 67, 59, 126, 68, 66, 111, 74, 54, 73, 57, 71, 65, 78, 67, 65, 55, 66, 63, 38, 68, 52, 84, 59, 82, 62, 61, 55, 68, 78, 59, 49, 62, 59, 71, 63, 47, 55, 79, 69, 78, 55, 58, 69, 58, 85, 71, 69, 57, 66, 76, 92, 71, 56, 58, 65, 64, 71, 73, 71, 53, 64, 68, 56, 65, 72, 113, 57, 56, 74, 74, 64, 74, 57, 56, 66, 56, 47, 61, 55, 76, 58, 71, 99, 77, 58, 93, 57, 62, 52, 90, 56, 80, 64, 74, 75, 60, 61, 81, 81, 68, 65, 63, 63, 69, 70, 79, 64, 45, 61, 72, 54, 42, 61, 128, 60, 58, 57, 70, 69, 49, 58, 58, 81, 64, 66, 68, 53, 56, 56, 80, 80, 64, 71, 60, 50, 82, 61, 54, 73, 67, 60, 66, 48, 67, 92, 83, 74, 63, 65, 59, 57, 75, 83, 78, 60, 67, 81, 56, 57, 61, 65, 42, 74, 50, 101, 61, 48, 104, 68, 46, 44, 127, 66, 54, 48, 72, 53, 55, 60, 53, 58, 71, 56, 61, 59, 56, 52, 58, 123, 62, 84, 59, 58, 99, 75, 78, 72, 56, 86, 67, 60, 87, 77, 87, 83, 63, 55, 38, 61, 74, 93, 48, 78, 79, 61, 69, 65, 66, 59, 60, 67, 71, 67, 76, 59, 60, 78, 55, 84, 100, 59, 105, 58, 59, 73, 68, 63, 53, 57, 73, 77, 79, 59, 82, 67, 56, 55, 61, 75, 54, 85, 67, 61, 63, 80, 75, 71, 55, 54, 65, 68, 60, 67, 55, 63, 70, 56, 54, 55, 60, 66, 67, 70, 55, 58, 83, 93, 57, 63, 45, 65, 53, 136, 55, 55, 56, 54, 63, 73, 62, 84, 48, 76, 73, 61, 56, 61, 54, 62, 54, 61, 56, 49, 60, 64, 94, 63, 65, 80, 102, 73, 52, 47, 43, 71, 72, 76, 47, 64, 69, 48, 74, 69, 59, 60, 64, 58, 80, 70, 60, 59, 62, 63, 55, 64, 52, 77, 62, 70, 77, 74, 48, 57, 56, 74, 51, 73, 74, 57, 60, 78, 70, 73, 76, 84, 41, 46, 63, 88, 57, 59, 63, 56, 84, 61, 70, 83, 53, 129, 63, 70, 54, 60, 71, 62, 93, 62, 68, 59, 65, 129, 62, 54, 58, 62, 50, 62, 45, 77, 64, 54, 70, 50, 51, 55, 67, 112, 40, 66, 64, 61, 56, 63, 79, 59, 88, 62, 36, 68, 49, 59, 64, 80, 63, 85, 46, 86, 56, 70, 79, 63, 68, 63, 79, 71, 67, 74, 48, 54, 67, 51, 64, 53, 84, 53, 83, 84, 84, 67, 64, 79, 54, 59, 71, 55, 71, 57, 53, 62, 62, 69, 54, 53, 51, 73, 54, 64, 53, 48, 69, 69, 73, 62, 61, 92, 71, 52, 60, 55, 58, 134, 77, 64, 45, 65, 78, 63, 60, 64, 68, 37, 63, 55, 70, 65, 63, 70, 72, 66, 60, 66, 59, 58, 74, 66, 75, 65, 80, 46, 49, 92, 55, 63, 64, 64, 77, 59, 67, 74, 81, 63, 56, 58, 58, 92, 56, 60, 64, 65, 81, 83, 67, 114, 64, 54, 74, 90, 77, 60, 74, 46, 53, 56, 56, 55, 66, 37, 82, 81, 58, 97, 67, 55, 81, 69, 76, 55, 58, 71, 66, 68, 71, 76, 53, 61, 71, 53, 64, 70, 67, 60, 41, 95, 74, 89, 62, 69, 64, 64, 72, 65, 69, 79, 57, 85, 67, 52, 81, 46, 60, 49, 64, 65, 58, 61, 63, 70, 57, 57, 88, 74, 75, 58, 47, 54, 55, 82, 76, 59, 67, 67, 59, 83, 132, 77, 63, 67, 66, 62, 84, 66, 57, 69, 69, 79, 90, 51, 56, 61, 64, 64, 58, 58, 82, 80, 80, 55, 57, 37, 62, 52, 83, 56, 60, 64, 71, 82, 90, 81, 47, 76, 55, 64, 63, 44, 53, 51, 72, 62, 61, 65, 95, 59, 79, 68, 61, 54, 57, 80, 43, 39, 57, 52, 55, 79, 84, 58, 58, 100, 53, 70, 46, 84, 71, 50, 62, 77, 47, 69, 72, 67, 94, 44, 66, 65, 67, 65, 59, 59, 57, 57, 82, 60, 54, 81, 63, 68, 56, 67, 64, 69, 53, 102, 55, 90, 57, 71, 61, 85, 64, 39, 58, 50, 52, 60, 53, 91, 76, 81, 56, 106, 60, 83, 74, 59, 55, 66, 57, 67, 77, 71, 59, 77, 97, 45, 46, 46, 65, 61, 58, 76, 84, 62, 77, 67, 52, 59, 59, 53, 65, 58, 63, 75, 60, 72, 63, 81, 66, 52, 61, 70, 63, 78, 104, 61, 61, 138, 60, 57, 60, 48, 58, 77, 65, 67, 59, 57, 71, 51, 58, 59, 52, 56, 81, 61, 56, 62, 77, 68, 79, 55, 90, 55, 32, 76, 52, 54, 55, 45, 67, 90, 65, 47, 90, 47, 56, 41, 52, 78, 83, 60, 71, 55, 66, 85, 45, 74, 59, 61, 58, 91, 51, 58, 68, 58, 62, 63, 63, 110, 54, 56, 60, 104, 57, 59, 63, 68, 63, 67, 64, 84, 79, 63, 78, 58, 89, 71, 62, 55, 84, 59, 67, 71, 58, 56, 54, 54, 79, 62, 84, 65, 83, 52, 67, 56, 64, 62, 51, 50, 71, 88, 40, 55, 36, 61, 61, 78, 48, 47, 78, 81, 63, 73, 45, 71, 95, 66, 77, 63, 62, 60, 43, 62, 49, 47, 65, 66, 87, 67, 69, 75, 46, 61, 83, 122, 84, 55, 55, 58, 88, 84, 51, 52, 60, 56, 60, 63, 91, 50, 48, 62, 60, 61, 48, 63, 64, 69, 64, 48, 57, 19, 44, 51, 56, 61, 68, 73, 85, 64, 64, 65, 48, 72, 89, 41, 68, 75, 61, 65, 56, 52, 81, 52, 77, 86, 74, 58, 89, 70, 66, 72, 60, 69, 65, 57, 51, 53, 75, 54, 73, 41, 52, 75, 64, 52, 70, 51, 63, 83, 55, 85, 62, 46, 56, 67, 63, 55, 74, 47, 62, 96, 82, 67, 62, 63, 77, 45, 65, 61, 76, 54, 55, 74, 62, 80, 97, 52, 41, 45, 63, 63, 80, 74, 83, 68, 56, 84, 70, 51, 55, 79, 55, 62, 52, 104, 58, 73, 62, 60, 69, 110, 78, 55, 66, 48, 52, 65, 64, 70, 68, 72, 85, 59, 67, 57, 52, 69, 54, 68, 58, 89, 93, 59, 62, 71, 67, 56, 73, 66, 65, 68, 46, 64, 50, 47, 67, 93, 45, 61, 70, 69, 35, 58, 69, 100, 60, 55, 66, 45, 60, 82, 83, 99, 51, 57, 76, 58, 70, 59, 55, 52, 65, 51, 62, 71, 91, 75, 49, 61, 69, 58, 59, 90, 53, 47, 43, 77, 64, 65, 53, 64, 53, 72, 50, 74, 50, 73, 52, 72, 77, 87, 56, 61, 78, 52, 60, 68, 63, 64, 103, 50, 56, 58, 75, 91, 52, 67, 136, 67, 44, 92, 59, 75, 51, 83, 61, 52, 55, 59, 83, 59, 61, 61, 93, 56, 55, 63, 67, 68, 57, 55, 77, 62, 47, 95, 79, 47, 78, 86, 78, 53, 81, 75, 63, 68, 66, 70, 66, 67, 54, 68, 61, 54, 76, 69, 59, 57, 60, 44, 60, 66, 67, 60, 69, 42, 72, 41, 68, 74, 59, 71, 81, 52, 72, 55, 42, 59, 68, 66, 64, 62, 73, 87, 69, 66, 74, 52, 110, 80, 101, 63, 33, 70, 63, 72, 71, 100, 68, 67, 89, 66, 72, 65, 47, 95, 68, 73, 76, 68, 52, 68, 75, 104, 60, 51, 66, 68, 65, 52, 50, 69, 60, 53, 82, 60, 44, 73, 119, 51, 78, 59, 59, 45, 64, 105, 82, 67, 67, 64, 74, 54, 61, 98, 63, 69, 62, 38, 80, 71, 67, 70, 54, 62, 70, 55, 70, 102, 56, 85, 76, 64, 101, 51, 66, 42, 49, 86, 71, 77, 60, 78, 63, 44, 64, 51, 72, 45, 58, 51, 54, 52, 66, 84, 65, 58, 81, 72, 55, 56, 63, 65, 55, 79, 65, 77, 63, 54, 57, 57, 51, 61, 119, 69, 68, 49, 53, 62, 55, 78, 63, 52, 88, 60, 51, 54, 94, 55, 76, 50, 58, 54, 44, 70, 70, 44, 79, 51, 52, 53, 93, 69, 59, 66, 70, 49, 68, 63, 75, 69, 82, 65, 56, 46, 62, 69, 59, 84, 70, 91, 56, 61, 72, 73, 65, 56, 61, 55, 64, 62, 76, 72, 67, 72, 51, 76, 59, 78, 68, 60, 56, 59, 54, 58, 52, 72, 53, 57, 50, 72, 63, 82, 92, 73, 47, 48, 68, 94, 59, 69, 56, 63, 54, 54, 75, 63, 61, 55, 41, 62, 117, 104, 59, 54, 68, 52, 52, 65, 56, 81, 92, 76, 51, 65, 64, 54, 68, 123, 69, 58, 69, 61, 60, 74, 61, 63, 78, 65, 57, 59, 75, 62, 68, 69, 64, 106, 61, 52, 96, 76, 64, 62, 58, 62, 71, 42, 51, 89, 113, 41, 64, 66, 43, 57, 72, 75, 70, 68, 72, 69, 68, 63, 48, 52, 56, 63, 77, 79, 60, 67, 68, 61, 82, 79, 66, 72, 58, 55, 70, 68, 47, 56, 53, 62, 82, 51, 114, 52, 65, 57, 71, 58, 93, 74, 65, 54, 71, 66, 55, 53, 62, 100, 74, 80, 70, 68, 55, 100, 58, 61, 64, 49, 62, 79, 72, 43, 64, 70, 68, 113, 88, 41, 67, 90, 83, 65, 58, 54, 77, 43, 60, 82, 47, 71, 91, 78, 77, 66, 92, 57, 69, 60, 72, 63, 41, 65, 55, 68, 76, 55, 63, 70, 61, 62, 49, 64, 40, 48, 50, 57, 61, 61, 65, 61, 66, 47, 70, 58, 63, 131, 65, 66, 55, 44, 77, 61, 76, 62, 75, 54, 64, 74, 64, 63, 73, 101, 54, 62, 76, 65, 47, 146, 64, 87, 79, 48, 85, 56, 73, 59, 71, 113, 65, 74, 102, 72, 79, 59, 55, 43, 58, 55, 60, 79, 92, 57, 52, 70, 64, 66, 54, 108, 57, 70, 58, 56, 40, 70, 125, 66, 65, 62, 49, 74, 54, 55, 54, 75, 60, 67, 42, 57, 54, 68, 78, 98, 64, 72, 51, 45, 50, 70, 90, 103, 66, 45, 68, 54, 58, 55, 75, 45, 51, 46, 52, 47, 56, 66, 52, 52, 73, 52, 63, 55, 118, 82, 85, 57, 84, 57, 67, 56, 69, 71, 61, 54, 57, 66, 94, 60, 61, 47, 51, 72, 49, 67, 71, 61, 85, 71, 64, 59, 65, 55, 69, 78, 66, 64, 58, 58, 72, 57, 75, 59, 52, 75, 95, 75, 78, 55, 59, 66, 53, 64, 65, 77, 61, 54, 94, 61, 72, 77, 128, 59, 47, 66, 48, 60, 63, 52, 65, 59, 65, 73, 53, 78, 74, 45, 71, 92, 67, 48, 54, 66, 58, 70, 60, 54, 63, 55, 64, 71, 59, 67, 72, 75, 42, 125, 65, 55, 62, 86, 65, 82, 65, 65, 66, 71, 80, 60, 74, 54, 68, 63, 54, 62, 65, 67, 57, 54, 48, 69, 69, 59, 79, 45, 57, 71, 66, 61, 82, 71, 87, 64, 66, 56, 100, 49, 68, 74, 70, 71, 52, 57, 71, 79, 54, 79, 58, 80, 57, 66, 65, 78, 72, 51, 67, 51, 76, 62, 71, 62, 68, 87, 114, 65, 46, 83, 83, 63, 68, 70, 74, 55, 75, 58, 78, 66, 49, 50, 82, 68, 71, 80, 78, 91, 69, 41, 64, 63, 73, 56, 78, 43, 35, 70, 60, 55, 88, 57, 50, 91, 75, 53, 52, 55, 69, 47, 59, 55, 70, 64, 47, 90, 61, 82, 55, 48, 66, 72, 95, 60, 92, 68, 44, 41, 96, 43, 110, 60, 66, 48, 72, 121, 82, 59, 51, 64, 68, 54, 65, 42, 85, 81, 73, 68, 74, 80, 71, 68, 123, 62, 64, 55, 76, 79, 74, 86, 64, 42, 48, 92, 74, 60, 70, 67, 56, 60, 47, 83, 100, 77, 52, 82, 79, 66, 53, 40, 45, 50, 65, 71, 49, 71, 34, 38, 116, 46, 67, 57, 67, 64, 65, 56, 81, 41, 61, 69, 61, 62, 73, 85, 58, 75, 57, 100, 57, 68, 74, 48, 59, 58, 127, 93, 43, 90, 88, 71, 73, 54, 65, 50, 54, 59, 67, 48, 63, 52, 80, 62, 55, 46, 58, 59, 92, 68, 91, 27, 42, 104, 59, 52, 55, 68, 83, 48, 67, 44, 51, 76, 49, 99, 64, 81, 74, 66, 81, 74, 57, 58, 44, 51, 70, 77, 65, 85, 85, 75, 62, 56, 105, 72, 59, 71, 58, 59, 47, 70, 62, 65, 65, 65, 63, 95, 74, 70, 83, 102, 69, 64, 45, 64, 68, 76, 49, 70, 84, 57, 86, 56, 52, 48, 64, 73, 98, 51, 42, 92, 77, 63, 63, 81, 53, 99, 50, 67, 52, 51, 73, 55, 63, 99, 69, 64, 88, 58, 86, 56, 40, 46, 69, 72, 55, 54, 72, 71, 70, 73, 56, 61, 47, 129, 84, 50, 47, 52, 72, 93, 76, 78, 91, 50, 76, 55, 52, 68, 79, 64, 71, 73, 61, 68, 63, 76, 57, 71, 62, 47, 69, 83, 79, 105, 67, 65, 57, 66, 95, 74, 67, 54, 62, 67, 49, 43, 53, 81, 65, 101, 76, 57, 69, 56, 86, 78, 64, 63, 39, 52, 47, 77, 52, 46, 61, 61, 60, 52, 42, 69, 70, 51, 69, 63, 85, 50, 57, 72, 64, 106, 71, 75, 52, 60, 55, 70, 76, 57, 67, 75, 80, 76, 82, 54, 54, 71, 56, 73, 53, 93, 86, 48, 58, 50, 72, 66, 90, 53, 118, 47, 67, 71, 53, 65, 73, 61, 46, 64, 39, 58, 53, 68, 35, 51, 43, 69, 69, 59, 82, 59, 83, 49, 51, 55, 65, 44, 75, 56, 47, 68, 97, 107, 89, 82, 56, 62, 70, 67, 77, 85, 51, 73, 83, 69, 50, 62, 49, 76, 60, 73, 62, 52, 57, 60, 48, 84, 60, 80, 57, 111, 70, 59, 62, 108, 61, 62, 52, 66, 60, 53, 61, 52, 56, 76, 38, 72, 74, 76, 110, 61, 53, 39, 69, 92, 58, 58, 72, 68, 52, 62, 52, 69, 57, 42, 110, 61, 64, 73, 73, 41, 82, 84, 59, 68, 62, 59, 51, 71, 125, 73, 78, 62, 70, 59, 105, 48, 50, 49, 63, 85, 70, 76, 39, 43, 84, 60, 64, 42, 64, 76, 83, 53, 56, 59, 56, 65, 42, 61, 51, 55, 62, 63, 106, 57, 83, 48, 60, 56, 70, 58, 60, 60, 66, 63, 64, 63, 80, 70, 66, 82, 39, 71, 84, 43, 70, 51, 91, 54, 58, 53, 58, 52, 57, 78, 106, 81, 80, 48, 39, 60, 71, 74, 65, 70, 55, 37, 47, 78, 71, 67, 45, 57, 93, 77, 75, 41, 55, 46, 63, 87, 78, 73, 55, 61, 76, 54, 63, 63, 53, 61, 88, 49, 56, 43, 64, 57, 60, 78, 66, 56, 73, 44, 74, 49, 76, 50, 49, 57, 78, 55, 45, 67, 95, 64, 47, 49, 82, 54, 58, 87, 62, 68, 83, 58, 53, 58, 53, 68, 52, 37, 67, 74, 70, 68, 63, 92, 52, 106, 93, 64, 69, 63, 60, 63, 56, 73, 58, 73, 64, 87, 54, 68, 58, 73, 70, 69, 88, 49, 49, 54, 60, 54, 53, 60, 68, 43, 48, 61, 70, 35, 64, 64, 64, 55, 72, 58, 78, 65, 66, 78, 57, 75, 110, 62, 57, 44, 68, 78, 84, 75, 91, 76, 57, 79, 66, 78, 57, 82, 60, 84, 58, 64, 44, 61, 50, 103, 86, 63, 73, 62, 66, 95, 42, 83, 45, 76, 45, 76, 71, 95, 70, 47, 71, 62, 83, 77, 43, 83, 81, 75, 62, 39, 52, 39, 74, 43, 66, 50, 35, 68, 73, 65, 59, 64, 51, 72, 79, 81, 61, 47, 84, 71, 83, 56, 66, 73, 61, 70, 85, 83, 54, 49, 59, 127, 73, 54, 70, 100, 59, 67, 69, 62, 38, 57, 103, 58, 47, 78, 76, 73, 79, 48, 62, 50, 110, 63, 50, 60, 69, 49, 64, 51, 55, 89, 54, 41, 71, 84, 59, 48, 82, 99, 70, 63, 92, 88, 54, 69, 56, 56, 88, 81, 77, 61, 80, 59, 86, 39, 58, 94, 57, 82, 97, 63, 66, 70, 44, 54, 124, 85, 63, 71, 61, 61, 67, 75, 47, 84, 91, 86, 69, 105, 68, 53, 73, 58, 51, 79, 55, 75, 62, 81, 101, 83, 49, 89, 74, 48, 67, 51, 45, 68, 66, 38, 69, 60, 99, 54, 99, 68, 153, 64, 58, 57, 53, 75, 59, 54, 65, 80, 57, 67, 89, 51, 54, 66, 66, 62, 62, 51, 74, 59, 67, 112, 47, 55, 68, 53, 57, 47, 67, 70, 58, 57, 66, 80, 50, 79, 102, 82, 60, 81, 54, 82, 134, 62, 45, 70, 67, 53, 36, 76, 53, 40, 48, 58, 73, 61, 87, 65, 56, 57, 62, 78, 78, 64, 50, 64, 91, 51, 78, 94, 60, 62, 57, 94, 59, 70, 70, 69, 62, 46, 61, 84, 35, 71, 81, 59, 45, 92, 61, 68, 53, 80, 62, 54, 51, 44, 120, 70, 68, 35, 52, 79, 57, 74, 67, 72, 50, 92, 86, 99, 63, 99, 77, 70, 84, 71, 69, 71, 65, 62, 50, 81, 26, 51, 63, 85, 68, 58, 81, 57, 63, 71, 60, 82, 107, 53, 49, 69, 82, 57, 82, 43, 80, 56, 111, 50, 42, 80, 78, 94, 67, 78, 67, 78, 64, 62, 57, 62, 58, 42, 59, 64, 56, 44, 58, 58, 58, 54, 62, 54, 66, 72, 75, 71, 83, 62, 52, 53, 66, 49, 62, 73, 68, 68, 53, 58, 87, 59, 59, 63, 77, 73, 81, 107, 66, 57, 74, 63, 103, 56, 62, 51, 74, 69, 61, 57, 74, 58, 61, 72, 85, 62, 70, 56, 74, 68, 57, 57, 52, 45, 52, 63, 64, 64, 62, 65, 80, 65, 51, 124, 90, 65, 67, 49, 71, 54, 82, 66, 58, 69, 79, 55, 52, 62, 62, 81, 84, 74, 65, 62, 74, 77, 50, 64, 51, 60, 52, 51, 76, 84, 94, 88, 59, 60, 53, 62, 67, 64, 52, 61, 58, 63, 106, 65, 53, 56, 65, 73, 53, 63, 67, 57, 67, 62, 77, 60, 68, 76, 53, 107, 53, 57, 61, 46, 55, 49, 70, 59, 76, 72, 54, 64, 63, 66, 59, 61, 54, 59, 68, 66, 81, 59, 55, 57, 70, 84, 82, 100, 68, 56, 50, 68, 56, 70, 82, 63, 68, 64, 58, 54, 59, 70, 54, 63, 68, 64, 69, 67, 64, 69, 58, 67, 59, 58, 47, 59, 64, 55, 81, 63, 49, 71, 78, 82, 59, 94, 64, 58, 76, 55, 149, 78, 73, 69, 61, 59, 69, 61, 59, 65, 76, 98, 60, 61, 53, 67, 57, 76, 55, 64, 62, 63, 64, 46, 70, 72, 71, 65, 69, 62, 69, 82, 64, 62, 67, 109, 62, 54, 57, 71, 57, 87, 68, 59, 68, 55, 61, 67, 62, 69, 60, 55, 67, 58, 70, 71, 57, 59, 48, 60, 66, 46, 73, 52, 61, 66, 70, 72, 48, 66, 56, 67, 58, 66, 71, 69, 66, 63, 69, 79, 41, 64, 71, 72, 76, 68, 61, 68, 66, 99, 76, 62, 64, 69, 75, 67, 59, 58, 54, 67, 83, 63, 56, 92, 61, 67, 57, 60, 65, 56, 69, 61, 76, 73, 61, 61, 84, 54, 78, 79, 68, 50, 66, 59, 128, 65, 61, 49, 70, 79, 51, 62, 56, 68, 72, 74, 64, 70, 59, 64, 55, 69, 59, 83, 57, 70, 91, 56, 92, 66, 57, 97, 71, 68, 53, 60, 62, 48, 77, 57, 52, 64, 107, 54, 75, 90, 59, 101, 56, 79, 87, 60, 78, 63, 62, 81, 73, 58, 58, 52, 39, 61, 60, 59, 59, 77, 67, 66, 62, 57, 45, 57, 67, 52, 69, 69, 50, 56, 72, 55, 59, 60, 88, 65, 60, 68, 68, 41, 60, 69, 65, 68, 62, 67, 68, 71, 55, 61, 61, 71, 65, 62, 66, 54, 55, 68, 87, 74, 75, 69, 80, 60, 74, 75, 53, 52, 60, 60, 58, 59, 52, 78, 61, 60, 56, 73, 62, 69, 69, 68, 70, 53, 78, 59, 59, 76, 64, 81, 50, 66, 58, 66, 44, 58, 69, 69, 57, 97, 65, 54, 63, 72, 88, 61, 55, 60, 81, 71, 57, 54, 58, 63, 58, 68, 56, 56, 70, 60, 63, 54, 51, 67, 69, 69, 67, 67, 62, 45, 65, 58, 70, 61, 57, 65, 59, 62, 75, 66, 95, 103, 77, 71, 73, 51, 57, 60, 66, 89, 67, 59, 60, 83, 80, 64, 59, 63, 81, 58, 56, 54, 67, 76, 57, 57, 76, 67, 64, 58, 63, 60, 57, 56, 54, 62, 50, 62, 71, 57, 64, 64, 66, 48, 70, 66, 82, 57, 69, 75, 43, 69, 55, 59, 45, 56, 56, 65, 59, 65, 71, 58, 83, 61, 49, 59, 60, 74, 55, 67, 59, 63, 58, 63, 69, 48, 60, 63, 63, 47, 90, 66, 60, 70, 89, 76, 76, 67, 53, 54, 77, 68, 63, 47, 77, 67, 58, 70, 64, 62, 70, 67, 67, 81, 55, 59, 69, 63, 57, 64, 106, 59, 66, 97, 46, 60, 72, 63, 58, 75, 60, 53, 61, 63, 63, 76, 89, 56, 76, 61, 71, 64, 57, 67, 70, 70, 62, 70, 63, 72, 63, 56, 75, 64, 62, 55, 62, 57, 71, 63, 84, 56, 48, 69, 66, 85, 63, 70, 70, 65, 50, 59, 65, 64, 65, 82, 62, 60, 60, 115, 70, 61, 77, 56, 65, 55, 109, 75, 113, 74, 68, 84, 60, 69, 62, 61, 72, 57, 64, 69, 84, 61, 59, 79, 59, 63, 76, 65, 70, 67, 60, 58, 64, 103, 61, 73, 43, 66, 73, 75, 68, 76, 71, 75, 59, 65, 61, 69, 53, 56, 55, 50, 66, 56, 62, 68, 62, 56, 75, 71, 72, 72, 79, 53, 103, 54, 60, 59, 66, 73, 46, 58, 52, 56, 68, 51, 74, 84, 91, 57, 61, 83, 101, 61, 59, 65, 58, 64, 68, 67, 111, 49, 117, 64, 71, 57, 61, 87, 55, 65, 57, 55, 66, 65, 69, 83, 84, 60, 81, 59, 73, 62, 65, 47, 81, 49, 95, 58, 105, 54, 69, 57, 65, 66, 88, 66, 63, 65, 62, 81, 60, 54, 64, 60, 107, 49, 64, 83, 66, 55, 70, 66, 49, 58, 70, 59, 66, 88, 62, 67, 83, 69, 76, 87, 69, 64, 80, 73, 66, 58, 60, 69, 75, 64, 90, 55, 56, 91, 60, 72, 56, 52, 75, 74, 63, 61, 72, 70, 56, 64, 53, 85, 60, 92, 58, 51, 84, 57, 102, 65, 54, 62, 49, 60, 52, 64, 56, 76, 71, 57, 60, 78, 58, 60, 58, 81, 60, 57, 52, 64, 55, 62, 69, 62, 71, 71, 68, 70, 64, 68, 68, 66, 56, 84, 56, 53, 68, 63, 56, 62, 83, 62, 60, 64, 70, 75, 63, 60, 56, 62, 62, 64, 56, 70, 75, 59, 58, 68, 55, 75, 58, 55, 65, 53, 62, 63, 67, 61, 59, 106, 62, 59, 46, 55, 55, 70, 63, 57, 67, 52, 111, 63, 70, 67, 62, 66, 66, 66, 64, 77, 69, 80, 60, 54, 81, 62, 65, 61, 97, 61, 75, 69, 65, 66, 68, 52, 57, 57, 56, 87, 62, 55, 67, 61, 59, 76, 65, 62, 52, 58, 51, 69, 69, 73, 84, 51, 81, 67, 76, 49, 57, 63, 96, 68, 68, 78, 56, 72, 54, 55, 49, 72, 60, 68, 63, 88, 92, 61, 61, 60, 86, 63, 54, 64, 63, 56, 71, 68, 60, 82, 104, 75, 55, 54, 65, 75, 61, 85, 65, 66, 72, 57, 57, 52, 52, 95, 64, 63, 44, 66, 61, 67, 49, 60, 67, 64, 60, 48, 66, 44, 113, 55, 64, 59, 74, 81, 61, 65, 50, 82, 82, 59, 58, 67, 57, 47, 74, 63, 69, 64, 69, 68, 102, 82, 67, 41, 90, 61, 91, 48, 54, 69, 53, 67, 61, 58, 66, 78, 73, 90, 92, 65, 76, 64, 79, 56, 62, 43, 60, 61, 66, 45, 89, 48, 61, 85, 64, 58, 46, 74, 43, 54, 71, 57, 104, 77, 57, 56, 55, 53, 80, 65, 63, 61, 55, 89, 70, 68, 68, 55, 67, 58, 60, 100, 68, 67, 73, 95, 46, 81, 51, 70, 45, 56, 42, 58, 67, 54, 65, 61, 64, 63, 72, 58, 84, 56, 79, 75, 90, 46, 53, 63, 50, 76, 88, 72, 52, 69, 70, 71, 80, 59, 59, 70, 66, 43, 68, 70, 60, 58, 59, 76, 84, 73, 66, 65, 55, 45, 53, 57, 48, 67, 68, 82, 68, 72, 95, 68, 66, 68, 82, 79, 58, 59, 95, 64, 73, 68, 58, 74, 55, 74, 62, 64, 63, 79, 57, 108, 64, 67, 75, 55, 70, 58, 55, 59, 81, 55, 58, 45, 62, 63, 69, 81, 85, 60, 67, 59, 84, 86, 60, 64, 65, 80, 112, 71, 91, 110, 76, 74, 73, 88, 104, 47, 75, 58, 59, 64, 45, 67, 57, 55, 67, 56, 64, 45, 85, 59, 66, 95, 72, 54, 51, 58, 56, 65, 54, 63, 62, 32, 69, 41, 41, 59, 75, 70, 48, 69, 54, 66, 77, 60, 43, 64, 57, 68, 49, 67, 51, 44, 55, 65, 76, 62, 62, 46, 79, 58, 59, 56, 62, 66, 54, 64, 47, 75, 70, 75, 74, 69, 53, 58, 59, 57, 70, 68, 75, 78, 58, 69, 71, 79, 59, 71, 64, 60, 62, 50, 48, 62, 71, 92, 72, 55, 65, 62, 70, 75, 74, 63, 43, 53, 64, 76, 64, 67, 55, 67, 63, 57, 68, 72, 80, 86, 58, 59, 58, 64, 67, 65, 59, 63, 84, 80, 50, 86, 59, 67, 56, 63, 72, 91, 77, 70, 80, 50, 87, 47, 61, 56, 92, 72, 67, 61, 44, 59, 52, 64, 99, 67, 44, 63, 75, 75, 66, 96, 66, 97, 60, 33, 51, 60, 61, 68, 57, 44, 64, 41, 67, 76, 59, 65, 51, 58, 49, 70, 64, 111, 68, 51, 83, 57, 51, 60, 60, 59, 63, 65, 64, 62, 79, 55, 73, 60, 62, 62, 62, 53, 52, 61, 78, 65, 47, 54, 49, 88, 62, 62, 74, 73, 62, 46, 65, 35, 89, 46, 73, 82, 68, 76, 54, 67, 60, 62, 75, 58, 58, 56, 47, 52, 61, 61, 53, 60, 56, 61, 44, 60, 63, 67, 77, 73, 63, 68, 86, 52, 54, 58, 58, 60, 60, 48, 73, 55, 59, 68, 60, 59, 44, 59, 68, 64, 59, 60, 55, 79, 64, 43, 57, 54, 132, 78, 73, 62, 90, 54, 55, 65, 64, 68, 67, 67, 79, 73, 56, 56, 66, 54, 63, 61, 69, 61, 48, 48, 64, 109, 89, 55, 52, 71, 101, 70, 64, 63, 62, 77, 94, 57, 42, 62, 55, 57, 56, 142, 93, 59, 40, 56, 77, 64, 75, 52, 65, 79, 47, 43, 54, 51, 67, 68, 60, 49, 57, 73, 53, 74, 68, 58, 35, 40, 77, 71, 43, 59, 74, 42, 72, 60, 74, 74, 64, 59, 58, 63, 52, 118, 65, 62, 59, 54, 60, 85, 59, 79, 65, 94, 53, 81, 34, 78, 41, 74, 57, 68, 52, 77, 65, 50, 80, 74, 56, 70, 66, 88, 62, 73, 66, 62, 71, 73, 87, 83, 47, 72, 92, 70, 104, 70, 29, 52, 58, 55, 67, 65, 72, 58, 64, 64, 63, 71, 78, 69, 59, 64, 57, 64, 56, 66, 54, 64, 64, 76, 71, 73, 83, 98, 67, 68, 71, 70, 73, 72, 66, 96, 75, 59, 55, 69, 86, 58, 76, 54, 59, 71, 56, 59, 61, 87, 80, 66, 80, 38, 74, 70, 61, 59, 76, 75, 66, 65, 64, 51, 56, 55, 62, 93, 59, 74, 101, 64, 71, 59, 55, 99, 66, 78, 66, 64, 89, 81, 63, 58, 106, 58, 48, 72, 49, 61, 64, 55, 64, 81, 77, 62, 71, 52, 56, 71, 62, 56, 71, 74, 54, 54, 76, 63, 128, 74, 75, 69, 60, 65, 53, 57, 62, 80, 48, 52, 74, 60, 60, 53, 68, 75, 39, 74, 61, 97, 55, 63, 84, 44, 64, 75, 60, 61, 59, 82, 61, 62, 44, 85, 57, 90, 73, 57, 61, 58, 46, 57, 60, 114, 68, 57, 88, 62, 67, 63, 84, 152, 50, 57, 73, 59, 76, 49, 60, 63, 41, 55, 56, 77, 76, 66, 69, 77, 71, 46, 76, 91, 78, 76, 58, 103, 55, 79, 61, 85, 54, 56, 55, 62, 51, 88, 83, 52, 51, 62, 53, 81, 48, 60, 56, 48, 55, 79, 46, 59, 73, 69, 70, 64, 55, 60, 63, 53, 61, 57, 54, 53, 51, 55, 83, 77, 65, 64, 73, 54, 70, 68, 58, 67, 68, 58, 56, 62, 73, 54, 73, 70, 78, 68, 86, 76, 67, 83, 68, 83, 69, 61, 56, 72, 81, 75, 69, 51, 75, 63, 72, 83, 64, 73, 63, 68, 65, 73, 82, 52, 45, 63, 78, 72, 61, 62, 65, 78, 57, 53, 82, 58, 54, 89, 87, 63, 46, 61, 70, 103, 61, 69, 102, 59, 65, 80, 77, 80, 59, 67, 64, 51, 46, 78, 62, 90, 71, 61, 74, 77, 60, 40, 64, 51, 68, 70, 82, 57, 49, 62, 65, 71, 60, 80, 68, 50, 54, 70, 71, 55, 56, 80, 77, 63, 39, 55, 46, 73, 67, 99, 68, 69, 93, 75, 57, 58, 66, 50, 100, 82, 112, 50, 52, 62, 67, 62, 80, 70, 89, 63, 59, 57, 47, 55, 61, 78, 67, 60, 71, 71, 88, 53, 52, 58, 62, 53, 64, 94, 54, 53, 66, 62, 55, 128, 88, 63, 76, 56, 78, 68, 58, 62, 84, 52, 99, 77, 53, 61, 50, 82, 94, 64, 58, 58, 70, 77, 72, 62, 66, 64, 69, 57, 64, 58, 68, 57, 87, 63, 66, 61, 66, 48, 54, 74, 59, 59, 33, 47, 61, 59, 62, 69, 69, 57, 75, 61, 72, 81, 63, 34, 50, 67, 78, 68, 53, 67, 79, 54, 56, 75, 71, 58, 91, 56, 55, 60, 57, 94, 67, 65, 84, 66, 68, 51, 62, 93, 42, 55, 63, 62, 60, 56, 39, 113, 62, 89, 68, 56, 67, 70, 61, 58, 37, 58, 58, 73, 66, 70, 44, 53, 59, 100, 64, 52, 66, 55, 60, 47, 66, 54, 80, 65, 51, 62, 56, 61, 79, 65, 62, 63, 76, 48, 51, 55, 45, 68, 70, 69, 61, 41, 64, 78, 53, 70, 60, 69, 94, 51, 59, 61, 67, 70, 96, 86, 53, 82, 51, 62, 58, 68, 63, 59, 69, 66, 67, 63, 46, 64, 64, 74, 59, 76, 70, 56, 105, 62, 59, 73, 57, 57, 83, 66, 59, 60, 51, 55, 88, 63, 49, 54, 61, 67, 61, 79, 67, 75, 64, 43, 58, 44, 81, 76, 71, 51, 57, 56, 73, 48, 42, 82, 124, 53, 66, 67, 56, 61, 59, 87, 71, 60, 60, 60, 60, 60, 54, 71, 72, 63, 45, 100, 100, 85, 101, 69, 65, 80, 61, 57, 66, 60, 62, 83, 70, 59, 48, 56, 57, 60, 66, 61, 59, 80, 39, 45, 75, 87, 61, 88, 56, 45, 50, 59, 66, 63, 71, 53, 59, 59, 62, 60, 82, 74, 74, 64, 40, 75, 88, 96, 70, 58, 62, 69, 58, 63, 78, 70, 51, 71, 56, 104, 100, 66, 74, 53, 57, 67, 100, 56, 62, 75, 66, 57, 72, 88, 64, 56, 106, 63, 71, 84, 60, 52, 68, 48, 51, 81, 65, 53, 60, 68, 71, 63, 103, 58, 70, 61, 63, 56, 80, 69, 82, 53, 59, 73, 71, 86, 70, 52, 59, 54, 43, 81, 54, 77, 70, 76, 72, 58, 59, 57, 88, 54, 75, 75, 56, 52, 51, 68, 95, 85, 60, 80, 81, 60, 100, 73, 63, 61, 68, 54, 65, 78, 68, 58, 78, 65, 59, 74, 46, 59, 49, 52, 61, 83, 69, 42, 70, 63, 50, 57, 60, 58, 77, 99, 51, 58, 70, 92, 66, 72, 64, 60, 65, 57, 56, 91, 54, 57, 66, 101, 60, 62, 68, 35, 63, 80, 74, 66, 80, 56, 90, 74, 62, 60, 71, 66, 56, 42, 69, 65, 109, 66, 56, 61, 65, 64, 52, 81, 38, 69, 58, 54, 68, 64, 64, 72, 60, 93, 67, 51, 55, 57, 59, 57, 60, 46, 48, 92, 55, 79, 58, 53, 84, 58, 57, 49, 42, 52, 78, 75, 64, 59, 69, 55, 65, 89, 72, 52, 83, 73, 65, 74, 69, 80, 69, 51, 49, 48, 55, 62, 68, 61, 40, 71, 53, 77, 62, 62, 56, 69, 77, 62, 64, 54, 53, 73, 74, 70, 50, 68, 54, 86, 56, 58, 82, 77, 50, 47, 81, 71, 78, 62, 50, 65, 71, 50, 88, 68, 50, 65, 80, 57, 84, 68, 63, 63, 65, 36, 71, 64, 75, 68, 54, 59, 69, 54, 71, 69, 53, 76, 66, 55, 81, 54, 71, 63, 69, 63, 69, 73, 58, 52, 78, 62, 55, 56, 63, 88, 70, 110, 78, 55, 54, 78, 57, 60, 85, 66, 60, 65, 72, 64, 58, 62, 81, 52, 55, 125, 52, 81, 49, 96, 99, 62, 59, 81, 91, 71, 68, 97, 57, 61, 56, 56, 62, 88, 63, 82, 62, 63, 51, 114, 54, 68, 95, 49, 79, 63, 45, 63, 70, 83, 47, 77, 80, 96, 64, 49, 69, 52, 72, 65, 73, 47, 59, 55, 77, 66, 47, 84, 133, 79, 54, 78, 65, 68, 89, 59, 63, 62, 50, 42, 67, 47, 58, 59, 59, 56, 89, 42, 64, 53, 55, 75, 78, 43, 61, 61, 63, 79, 72, 88, 53, 57, 57, 52, 55, 41, 59, 58, 95, 59, 52, 72, 54, 38, 61, 58, 84, 49, 83, 86, 65, 56, 50, 83, 50, 79, 56, 59, 64, 67, 59, 63, 56, 64, 56, 49, 53, 79, 56, 66, 59, 58, 74, 91, 66, 55, 60, 58, 70, 82, 52, 54, 78, 64, 62, 70, 52, 69, 52, 114, 94, 55, 65, 63, 76, 59, 77, 72, 60, 61, 50, 60, 58, 64, 53, 72, 65, 51, 64, 65, 65, 63, 48, 73, 61, 70, 69, 78, 51, 69, 53, 70, 52, 57, 58, 83, 53, 57, 53, 54, 83, 71, 75, 79, 76, 58, 54, 40, 45, 77, 57, 78, 62, 49, 52, 68, 62, 67, 83, 79, 75, 59, 68, 80, 67, 68, 51, 105, 63, 62, 47, 106, 65, 62, 52, 76, 79, 63, 56, 76, 64, 77, 72, 66, 71, 61, 68, 66, 66, 52, 69, 50, 62, 60, 69, 74, 127, 56, 67, 82, 49, 57, 58, 60, 62, 52, 57, 83, 66, 72, 89, 81, 70, 55, 60, 63, 62, 74, 67, 44, 59, 58, 71, 74, 72, 61, 61, 63, 74, 85, 80, 114, 71, 70, 91, 68, 84, 41, 67, 48, 51, 69, 55, 63, 54, 81, 54, 48, 56, 59, 109, 78, 66, 76, 79, 67, 69, 57, 61, 57, 69, 104, 69, 89, 54, 71, 63, 60, 58, 79, 61, 67, 78, 75, 50, 61, 45, 69, 65, 62, 56, 54, 58, 72, 71, 92, 63, 52, 53, 65, 54, 68, 45, 62, 57, 87, 57, 38, 58, 59, 53, 91, 61, 119, 60, 60, 68, 43, 46, 49, 62, 79, 74, 102, 61, 43, 68, 50, 73, 65, 60, 66, 70, 71, 80, 74, 64, 65, 70, 53, 67, 89, 52, 66, 105, 67, 61, 81, 56, 62, 86, 66, 45, 54, 58, 58, 58, 102, 65, 94, 58, 61, 78, 58, 56, 58, 60, 68, 55, 65, 93, 57, 65, 67, 50, 54, 74, 85, 54, 74, 61, 65, 69, 74, 50, 53, 41, 55, 82, 63, 68, 67, 61, 99, 69, 58, 48, 58, 55, 64, 88, 55, 78, 87, 71, 82, 67, 83, 65, 65, 119, 79, 50, 55, 59, 53, 68, 66, 55, 53, 60, 57, 61, 47, 80, 65, 52, 75, 67, 51, 83, 73, 68, 63, 67, 69, 55, 86, 77, 58, 56, 69, 78, 69, 61, 51, 65, 70, 87, 90, 58, 79, 69, 69, 57, 64, 52, 42, 84, 54, 50, 76, 47, 80, 57, 79, 50, 78, 58, 58, 51, 77, 64, 70, 78, 77, 52, 98, 71, 54, 77, 109, 76, 62, 46, 70, 63, 45, 63, 68, 82, 72, 71, 75, 52, 48, 66, 75, 71, 84, 112, 74, 61, 61, 80, 69, 68, 91, 72, 68, 54, 52, 58, 70, 63, 64, 58, 61, 52, 60, 62, 70, 68, 90, 57, 50, 65, 61, 64, 67, 70, 71, 66, 80, 79, 57, 61, 89, 73, 59, 45, 51, 67, 68, 61, 60, 66, 58, 56, 63, 93, 71, 51, 66, 54, 93, 81, 76, 65, 46, 71, 85, 73, 66, 65, 52, 91, 74, 74, 62, 71, 79, 58, 45, 73, 60, 68, 82, 73, 44, 51, 59, 51, 92, 48, 79, 68, 59, 92, 67, 64, 56, 82, 66, 46, 57, 75, 57, 70, 61, 60, 66, 66, 62, 85, 58, 82, 61, 65, 62, 62, 66, 59, 53, 71, 77, 55, 87, 71, 55, 54, 72, 60, 76, 101, 91, 59, 70, 48, 45, 91, 76, 49, 51, 75, 54, 108, 59, 58, 66, 65, 61, 91, 65, 92, 61, 99, 65, 99, 69, 62, 60, 61, 72, 61, 57, 68, 65, 44, 59, 63, 65, 72, 62, 68, 95, 51, 57, 59, 82, 64, 61, 53, 57, 48, 63, 64, 78, 50, 70, 49, 61, 73, 63, 57, 53, 73, 77, 74, 64, 56, 64, 53, 59, 66, 70, 68, 56, 73, 52, 60, 111, 95, 61, 81, 62, 55, 61, 44, 54, 67, 54, 62, 63, 77, 66, 73, 78, 70, 86, 48, 57, 64, 51, 65, 63, 58, 48, 55, 58, 69, 72, 70, 99, 68, 104, 68, 92, 48, 83, 54, 41, 62, 60, 65, 60, 75, 62, 80, 79, 64, 62, 57, 77, 82, 57, 54, 96, 52, 69, 82, 56, 59, 63, 46, 79, 44, 75, 46, 71, 71, 60, 69, 57, 99, 64, 84, 71, 46, 54, 73, 61, 71, 73, 55, 61, 66, 52, 67, 58, 58, 62, 58, 47, 43, 69, 93, 75, 93, 56, 53, 58, 66, 105, 59, 93, 57, 76, 74, 73, 60, 61, 59, 73, 73, 59, 66, 70, 61, 99, 71, 35, 78, 76, 53, 59, 61, 59, 79, 64, 65, 47, 63, 85, 60, 68, 57, 57, 68, 66, 72, 70, 73, 51, 80, 56, 61, 81, 59, 78, 47, 59, 51, 91, 73, 61, 57, 49, 54, 70, 94, 64, 82, 70, 66, 58, 76, 78, 73, 54, 59, 63, 56, 62, 78, 50, 61, 54, 62, 59, 59, 68, 68, 62, 49, 55, 60, 63, 79, 46, 67, 75, 92, 41, 59, 75, 79, 58, 68, 59, 63, 62, 69, 66, 42, 94, 52, 57, 75, 57, 57, 69, 75, 62, 49, 55, 55, 60, 48, 50, 62, 51, 72, 67, 68, 81, 46, 89, 52, 56, 59, 77, 51, 73, 56, 52, 62, 46, 63, 63, 58, 74, 61, 75, 63, 63, 91, 52, 57, 51, 67, 88, 62, 43, 57, 71, 81, 78, 60, 66, 73, 78, 75, 66, 70, 64, 79, 74, 58, 74, 60, 71, 70, 72, 46, 58, 61, 61, 65, 79, 83, 67, 78, 58, 71, 81, 57, 50, 55, 60, 79, 86, 86, 60, 67, 74, 60, 71, 58, 53, 54, 60, 78, 128, 43, 94, 61, 68, 69, 100, 49, 88, 69, 73, 75, 77, 95, 45, 75, 62, 61, 26, 70, 63, 67, 63, 58, 57, 50, 45, 59, 71, 56, 36, 54, 84, 56, 110, 54, 61, 75, 69, 83, 71, 51, 74, 96, 79, 49, 74, 60, 51, 92, 62, 53, 55, 55, 72, 76, 74, 49, 57, 60, 66, 69, 54, 52, 69, 57, 67, 67, 47, 90, 76, 64, 69, 70, 63, 92, 51, 71, 39, 64, 67, 80, 67, 50, 80, 72, 81, 71, 55, 58, 67, 93, 69, 59, 85, 34, 71, 64, 107, 59, 56, 86, 57, 90, 75, 61, 52, 67, 51, 54, 64, 63, 93, 113, 65, 50, 69, 67, 54, 86, 71, 56, 51, 69, 44, 64, 61, 66, 64, 48, 69, 51, 71, 53, 78, 55, 46, 43, 68, 57, 40, 62, 54, 79, 60, 40, 71, 89, 63, 82, 59, 99, 60, 64, 89, 95, 60, 55, 65, 55, 68, 44, 46, 52, 63, 92, 60, 57, 65, 49, 57, 52, 87, 62, 122, 73, 59, 76, 68, 64, 50, 34, 59, 57, 68, 71, 71, 63, 67, 74, 41, 57, 55, 82, 68, 84, 74, 60, 45, 58, 68, 73, 83, 68, 71, 60, 76, 79, 62, 104, 42, 82, 98, 88, 60, 65, 81, 72, 90, 56, 55, 48, 51, 73, 58, 73, 72, 63, 51, 73, 58, 66, 71, 85, 68, 74, 58, 64, 80, 67, 44, 59, 77, 52, 100, 59, 43, 64, 48, 65, 48, 77, 75, 55, 62, 63, 52, 65, 59, 55, 52, 93, 49, 51, 79, 63, 61, 58, 54, 65, 45, 68, 36, 86, 57, 61, 59, 90, 57, 50, 55, 69, 57, 93, 59, 50, 41, 98, 60, 61, 66, 50, 52, 60, 67, 41, 63, 72, 50, 45, 71, 72, 55, 72, 56, 55, 62, 85, 46, 49, 65, 60, 58, 78, 71, 56, 58, 75, 67, 59, 72, 59, 91, 57, 51, 58, 74, 63, 65, 60, 74, 81, 71, 92, 58, 83, 61, 67, 86, 55, 67, 51, 59, 61, 59, 57, 64, 69, 59, 94, 69, 73, 104, 62, 79, 80, 61, 62, 71, 65, 45, 34, 66, 49, 48, 61, 52, 55, 153, 58, 55, 52, 54, 47, 60, 68, 72, 62, 50, 63, 67, 59, 63, 59, 80, 63, 89, 74, 60, 69, 59, 56, 61, 60, 97, 69, 58, 59, 59, 70, 69, 54, 75, 59, 42, 50, 83, 55, 46, 63, 78, 64, 64, 74, 81, 58, 71, 57, 89, 56, 75, 61, 70, 54, 68, 58, 90, 78, 62, 64, 58, 58, 117, 64, 81, 67, 76, 69, 79, 59, 62, 74, 80, 65, 53, 67, 74, 72, 86, 83, 80, 76, 47, 75, 68, 68, 59, 59, 55, 76, 56, 85, 61, 68, 53, 69, 49, 50, 61, 73, 59, 68, 56, 62, 50, 46, 65, 68, 57, 40, 47, 64, 77, 57, 70, 62, 80, 78, 68, 58, 67, 79, 56, 58, 64, 58, 68, 66, 63, 54, 73, 68, 73, 65, 65, 71, 54, 70, 53, 78, 79, 66, 60, 60, 68, 69, 63, 67, 73, 56, 85, 62, 61, 86, 81, 77, 80, 95, 59, 73, 49, 60, 68, 96, 56, 49, 61, 65, 61, 79, 67, 56, 53, 54, 85, 78, 55, 50, 66, 65, 55, 56, 65, 62, 72, 117, 58, 68, 77, 71, 69, 48, 51, 56, 66, 63, 69, 80, 79, 56, 81, 70, 59, 57, 69, 47, 58, 62, 62, 46, 55, 62, 58, 66, 58, 74, 59, 71, 52, 75, 46, 59, 72, 54, 61, 76, 72, 65, 62, 61, 75, 58, 74, 59, 50, 56, 36, 65, 89, 79, 76, 64, 66, 66, 123, 81, 69, 63, 49, 72, 52, 56, 69, 60, 53, 72, 60, 73, 60, 68, 68, 64, 88, 65, 56, 66, 53, 43, 62, 49, 62, 46, 56, 63, 71, 63, 80, 73, 71, 49, 64, 48, 76, 70, 57, 65, 85, 88, 46, 59, 65, 90, 57, 90, 47, 49, 87, 68, 58, 64, 71, 69, 92, 56, 72, 68, 45, 49, 52, 73, 60, 40, 63, 58, 81, 93, 59, 53, 58, 75, 77, 53, 73, 58, 74, 64, 55, 71, 67, 46, 52, 69, 71, 83, 72, 67, 60, 41, 58, 76, 61, 40, 48, 64, 107, 45, 62, 76, 85, 49, 83, 58, 55, 58, 59, 72, 61, 68, 72, 70, 74, 46, 65, 55, 74, 48, 56, 61, 72, 66, 64, 69, 67, 71, 80, 57, 81, 61, 73, 56, 63, 47, 62, 58, 67, 71, 77, 55, 78, 73, 90, 79, 44, 75, 78, 90, 71, 65, 68, 63, 56, 50, 95, 77, 58, 49, 73, 69, 75, 74, 63, 81, 72, 62, 48, 58, 85, 49, 65, 83, 62, 67, 42, 52, 70, 56, 61, 50, 75, 76, 95, 74, 72, 50, 75, 59, 65, 51, 54, 47, 73, 65, 71, 77, 70, 60, 45, 58, 52, 83, 52, 58, 58, 115, 69, 46, 57, 121, 63, 79, 57, 64, 66, 62, 79, 62, 68, 92, 75, 59, 84, 71, 46, 54, 58, 128, 111, 72, 119, 83, 53, 69, 66, 69, 78, 52, 46, 73, 65, 86, 55, 81, 57, 51, 51, 65, 72, 54, 59, 62, 54, 255, 75, 70, 60, 60, 58, 69, 58, 52, 72, 70, 47, 78, 106, 69, 60, 62, 71, 82, 59, 59, 44, 61, 83, 56, 72, 65, 70, 56, 70, 58, 63, 62, 68, 83, 54, 68, 68, 76, 68, 86, 62, 53, 54, 39, 83, 82, 63, 62, 49, 62, 69, 58, 59, 70, 69, 61, 60, 66, 83, 66, 53, 68, 42, 52, 55, 72, 69, 86, 62, 66, 51, 58, 71, 66, 85, 56, 63, 73, 55, 56, 66, 70, 56, 85, 77, 109, 72, 66, 58, 69, 59, 85, 58, 102, 63, 69, 75, 67, 59, 73, 52, 141, 62, 60, 50, 77, 72, 54, 74, 70, 84, 86, 67, 66, 51, 55, 52, 66, 55, 60, 59, 69, 66, 50, 59, 65, 54, 46, 90, 56, 51, 63, 70, 43, 65, 82, 62, 67, 92, 56, 59, 118, 69, 63, 68, 49, 62, 139, 52, 49, 65, 56, 55, 66, 56, 53, 61, 62, 86, 60, 51, 94, 54, 61, 64, 53, 60, 71, 55, 118, 73, 52, 67, 75, 112, 94, 53, 49, 57, 42, 68, 61, 52, 59, 70, 51, 36, 68, 62, 81, 66, 44, 77, 58, 62, 69, 57, 47, 64, 59, 63, 71, 50, 67, 61, 51, 72, 44, 63, 87, 64, 59, 68, 59, 67, 76, 72, 62, 58, 102, 88, 70, 43, 74, 61, 63, 71, 94, 71, 109, 70, 58, 71, 73, 69, 65, 59, 73, 57, 88, 67, 58, 62, 75, 58, 71, 91, 60, 44, 70, 54, 46, 51, 83, 58, 67, 61, 105, 65, 113, 63, 104, 81, 58, 69, 104, 80, 81, 54, 60, 46, 60, 49, 61, 60, 75, 74, 70, 77, 73, 55, 64, 83, 50, 71, 54, 107, 49, 57, 109, 56, 57, 38, 56, 54, 60, 63, 47, 64, 99, 97, 72, 90, 59, 68, 64, 56, 104, 64, 55, 65, 76, 41, 64, 61, 70, 63, 88, 69, 76, 88, 60, 68, 89, 104, 71, 77, 63, 63, 57, 80, 58, 71, 49, 65, 40, 72, 60, 65, 59, 34, 63, 66, 60, 43, 72, 75, 64, 53, 52, 67, 93, 51, 72, 163, 51, 64, 67, 78, 59, 68, 65, 66, 55, 62, 73, 92, 90, 38, 39, 71, 56, 69, 72, 74, 63, 68, 65, 64, 64, 65, 60, 69, 69, 60, 52, 69, 51, 65, 76, 78, 59, 57, 55, 64, 61, 54, 53, 72, 56, 69, 56, 70, 71, 73, 76, 60, 73, 62, 49, 93, 77, 59, 56, 59, 56, 69, 72, 56, 88, 72, 78, 67, 67, 44, 45, 62, 56, 75, 53, 58, 54, 80, 60, 71, 54, 79, 72, 75, 48, 43, 57, 62, 66, 74, 62, 70, 66, 79, 60, 56, 65, 58, 103, 69, 47, 79, 86, 55, 46, 91, 65, 73, 61, 79, 67, 61, 57, 72, 73, 63, 60, 65, 55, 58, 61, 61, 72, 72, 64, 84, 58, 75, 71, 48, 78, 56, 58, 73, 70, 49, 55, 58, 67, 54, 57, 72, 69, 111, 44, 78, 90, 57, 48, 70, 76, 66, 70, 61, 78, 71, 70, 81, 55, 47, 84, 69, 47, 55, 78, 47, 58, 78, 62, 72, 95, 63, 63, 71, 42, 56, 50, 38, 64, 67, 52, 72, 68, 69, 73, 58, 43, 56, 56, 86, 76, 71, 61, 70, 84, 68, 53, 60, 82, 82, 70, 66, 50, 80, 66, 64, 48, 85, 55, 68, 74, 70, 62, 65, 78, 58, 52, 70, 94, 60, 64, 52, 71, 58, 51, 53, 63, 58, 60, 47, 51, 83, 65, 48, 77, 89, 48, 69, 56, 67, 66, 66, 56, 47, 57, 69, 63, 65, 49, 81, 52, 65, 75, 73, 58, 65, 47, 59, 63, 53, 86, 59, 65, 75, 60, 50, 99, 81, 60, 68, 57, 54, 58, 67, 65, 70, 78, 75, 62, 62, 76, 58, 58, 47, 78, 48, 100, 65, 61, 68, 73, 61, 54, 60, 67, 64, 65, 81, 88, 48, 75, 54, 59, 63, 71, 64, 71, 62, 46, 62, 70, 55, 71, 94, 52, 92, 59, 69, 86, 62, 64, 53, 64, 76, 64, 60, 68, 66, 60, 74, 74, 96, 67, 73, 76, 82, 67, 63, 56, 53, 73, 77, 93, 54, 69, 67, 53, 60, 53, 77, 65, 59, 56, 52, 72, 62, 61, 53, 63, 74, 50, 55, 56, 93, 103, 70, 76, 53, 69, 66, 82, 66, 56, 58, 82, 54, 90, 55, 52, 57, 60, 61, 70, 60, 51, 72, 83, 66, 59, 59, 58, 66, 56, 45, 94, 78, 61, 66, 59, 61, 59, 64, 67, 51, 58, 67, 39, 67, 72, 90, 69, 64, 66, 60, 84, 110, 49, 66, 52, 66, 74, 59, 59, 65, 81, 69, 65, 76, 58, 63, 80, 61, 57, 75, 59, 82, 63, 60, 64, 64, 64, 59, 50, 76, 55, 67, 76, 67, 73, 61, 99, 66, 70, 49, 64, 58, 53, 60, 69, 68, 64, 60, 39, 79, 72, 62, 69, 87, 70, 70, 74, 73, 57, 56, 89, 51, 59, 63, 65, 71, 85, 53, 89, 58, 63, 46, 74, 59, 65, 62, 70, 131, 60, 48, 62, 48, 69, 59, 71, 56, 57, 87, 72, 67, 98, 70, 83, 68, 72, 57, 49, 67, 60, 70, 90, 57, 63, 59, 68, 61, 68, 65, 54, 63, 64, 65, 102, 87, 54, 56, 57, 61, 56, 59, 66, 88, 60, 60, 55, 65, 56, 74, 67, 69, 57, 65, 66, 67, 63, 52, 57, 62, 93, 60, 73, 49, 107, 75, 82, 92, 70, 75, 66, 71, 64, 59, 52, 44, 77, 54, 58, 60, 64, 48, 59, 60, 79, 62, 84, 62, 55, 76, 73, 62, 59, 61, 50, 52, 106, 60, 66, 69, 85, 76, 58, 50, 54, 60, 54, 62, 51, 61, 69, 99, 73, 54, 67, 61, 64, 63, 52, 66, 63, 58, 55, 46, 66, 55, 60, 75, 82, 42, 74, 65, 64, 98, 65, 62, 60, 61, 63, 100, 57, 64, 71, 65, 62, 72, 79, 59, 75, 92, 77, 74, 105, 61, 64, 57, 53, 106, 65, 50, 73, 53, 56, 54, 99, 95, 63, 72, 61, 76, 98, 71, 66, 55, 52, 115, 80, 48, 74, 77, 60, 59, 60, 52, 60, 89, 49, 44, 94, 65, 74, 78, 71, 69, 78, 68, 50, 66, 59, 65, 62, 69, 70, 87, 58, 72, 71, 72, 62, 43, 53, 58, 60, 54, 68, 51, 110, 62, 82, 81, 59, 61, 67, 85, 59, 70, 79, 66, 70, 65, 56, 81, 49, 52, 63, 54, 69, 71, 73, 64, 73, 60, 70, 76, 63, 69, 59, 62, 67, 73, 48, 58, 78, 66, 51, 64, 54, 78, 69, 58, 64, 61, 79, 85, 92, 59, 107, 75, 67, 63, 47, 54, 78, 56, 53, 50, 89, 71, 67, 64, 63, 58, 56, 68, 63, 53, 62, 59, 73, 79, 67, 54, 68, 77, 56, 60, 53, 54, 60, 79, 70, 71, 65, 53, 44, 52, 68, 52, 58, 76, 48, 58, 65, 95, 59, 60, 90, 68, 55, 57, 88, 74, 51, 65, 63, 57, 76, 69, 72, 54, 90, 63, 80, 63, 62, 56, 55, 65, 75, 82, 86, 55, 68, 72, 90, 73, 70, 55, 70, 56, 51, 64, 60, 63, 56, 85, 66, 92, 56, 52, 66, 151, 82, 63, 63, 86, 50, 87, 67, 89, 58, 52, 65, 38, 47, 97, 66, 73, 72, 56, 59, 58, 77, 73, 68, 73, 52, 93, 59, 58, 44, 104, 83, 62, 75, 96, 59, 91, 55, 61, 58, 66, 52, 57, 60, 57, 49, 53, 60, 60, 61, 73, 64, 66, 60, 53, 67, 63, 49, 76, 58, 92, 45, 47, 53, 58, 57, 67, 57, 58, 76, 160, 66, 55, 76, 64, 69, 61, 62, 87, 54, 73, 71, 55, 62, 50, 65, 66, 51, 44, 55, 68, 51, 61, 62, 71, 99, 53, 69, 87, 62, 70, 52, 65, 58, 59, 61, 49, 86, 84, 57, 66, 65, 57, 66, 60, 62, 58, 55, 52, 51, 61, 48, 59, 70, 70, 54, 67, 87, 52, 68, 68, 98, 53, 56, 61, 82, 62, 62, 56, 43, 99, 85, 70, 60, 63, 86, 99, 59, 76, 85, 54, 57, 59, 57, 73, 54, 63, 49, 65, 62, 91, 66, 76, 52, 51, 57, 104, 111, 72, 99, 69, 53, 52, 55, 56, 63, 63, 68, 63, 68, 103, 48, 82, 55, 88, 73, 77, 119, 62, 74, 60, 29, 75, 61, 50, 65, 54, 76, 88, 47, 51, 71, 62, 72, 78, 58, 74, 92, 42, 55, 72, 85, 69, 89, 76, 62, 63, 64, 61, 80, 56, 60, 66, 72, 60, 54, 67, 67, 64, 56, 86, 61, 53, 63, 70, 54, 73, 68, 66, 61, 76, 75, 61, 69, 48, 67, 82, 60, 67, 77, 58, 107, 53, 54, 62, 71, 59, 63, 105, 88, 99, 52, 69, 64, 49, 68, 64, 56, 84, 56, 66, 93, 65, 48, 55, 58, 57, 87, 48, 48, 59, 73, 69, 73, 73, 64, 66, 55, 65, 58, 84, 57, 70, 61, 66, 48, 48, 69, 54, 68, 45, 64, 86, 68, 55, 72, 59, 49, 45, 60, 59, 90, 68, 75, 55, 58, 41, 65, 90, 61, 55, 53, 66, 106, 57, 67, 64, 73, 63, 52, 72, 65, 54, 69, 61, 78, 64, 70, 57, 62, 68, 56, 59, 50, 72, 41, 79, 64, 67, 80, 60, 55, 58, 66, 46, 52, 128, 71, 43, 55, 66, 61, 57, 48, 77, 51, 72, 76, 62, 59, 60, 54, 62, 70, 37, 45, 77, 61, 68, 55, 65, 57, 63, 59, 54, 89, 70, 65, 61, 66, 59, 53, 74, 51, 49, 52, 116, 91, 75, 64, 72, 60, 76, 62, 59, 84, 68, 70, 76, 59, 55, 77, 55, 67, 112, 54, 57, 74, 90, 41, 74, 64, 81, 50, 84, 78, 58, 94, 67, 58, 94, 45, 70, 47, 75, 98, 57, 73, 53, 43, 66, 82, 49, 63, 58, 77, 62, 43, 49, 57, 57, 46, 49, 88, 94, 60, 124, 51, 85, 62, 73, 85, 38, 57, 55, 54, 77, 60, 67, 72, 53, 64, 55, 76, 54, 60, 59, 89, 72, 108, 58, 81, 88, 79, 70, 78, 59, 84, 71, 57, 80, 57, 66, 63, 52, 49, 55, 66, 48, 51, 79, 61, 81, 72, 46, 45, 42, 64, 60, 70, 62, 45, 64, 99, 72, 74, 59, 45, 86, 94, 86, 55, 80, 81, 62, 92, 55, 62, 37, 86, 54, 53, 82, 60, 82, 61, 88, 66, 58, 61, 55, 47, 78, 84, 60, 64, 63, 70, 80, 62, 41, 88, 55, 74, 84, 45, 45, 65, 62, 58, 93, 59, 84, 78, 76, 66, 82, 62, 74, 69, 76, 56, 67, 40, 65, 69, 68, 74, 51, 75, 72, 60, 77, 38, 73, 44, 61, 68, 62, 69, 64, 59, 75, 47, 62, 56, 63, 51, 73, 68, 59, 59, 54, 70, 46, 81, 63, 77, 71, 82, 54, 71, 87, 104, 47, 70, 80, 59, 64, 79, 46, 76, 49, 67, 75, 95, 78, 52, 45, 69, 70, 90, 57, 59, 61, 50, 60, 64, 48, 66, 44, 64, 81, 63, 45, 54, 63, 79, 56, 60, 68, 57, 70, 65, 67, 76, 62, 41, 60, 51, 105, 68, 43, 55, 61, 59, 78, 67, 74, 56, 54, 46, 74, 80, 62, 66, 63, 66, 66, 69, 50, 89, 64, 77, 54, 69, 82, 56, 89, 57, 58, 54, 53, 89, 65, 82, 85, 74, 75, 67, 56, 60, 84, 67, 77, 57, 58, 56, 85, 69, 73, 50, 92, 52, 61, 84, 80, 67, 61, 50, 58, 69, 58, 47, 58, 59, 55, 68, 121, 63, 66, 69, 69, 52, 54, 55, 82, 48, 60, 45, 45, 81, 80, 58, 92, 63, 73, 63, 57, 60, 115, 50, 63, 49, 50, 50, 59, 77, 79, 83, 50, 95, 73, 71, 30, 75, 91, 77, 58, 55, 102, 81, 52, 54, 72, 87, 79, 96, 85, 81, 66, 62, 54, 41, 60, 63, 64, 77, 73, 62, 60, 62, 76, 58, 63, 82, 58, 111, 54, 47, 50, 73, 62, 48, 47, 112, 24, 53, 50, 61, 84, 119, 85, 57, 82, 57, 75, 67, 79, 70, 78, 45, 54, 50, 110, 65, 62, 89, 57, 97, 92, 56, 55, 100, 34, 67, 64, 57, 106, 66, 69, 75, 78, 54, 69, 56, 64, 75, 53, 58, 73, 47, 68, 80, 72, 78, 69, 65, 51, 76, 74, 50, 86, 57, 75, 71, 83, 60, 64, 79, 63, 71, 69, 56, 67, 134, 73, 73, 70, 60, 70, 96, 84, 88, 58, 64, 99, 75, 64, 71, 68, 62, 51, 62, 63, 53, 73, 62, 60, 49, 61, 79, 103, 65, 75, 65, 53, 64, 126, 58, 49, 56, 49, 47, 49, 70, 62, 76, 55, 65, 57, 56, 47, 66, 54, 52, 58, 57, 57, 81, 56, 72, 82, 54, 79, 62, 61, 43, 59, 71, 65, 83, 64, 54, 52, 59, 72, 102, 55, 67, 78, 81, 74, 82, 59, 56, 48, 72, 86, 84, 64, 60, 85, 51, 48, 51, 64, 124, 68, 57, 52, 51, 58, 93, 49, 59, 83, 92, 61, 51, 46, 85, 49, 76, 48, 78, 63, 50, 70, 63, 57, 64, 63, 91, 51, 36, 76, 37, 61, 68, 58, 84, 56, 65, 56, 51, 64, 48, 70, 116, 90, 61, 79, 87, 75, 59, 90, 72, 51, 63, 48, 60, 50, 58, 81, 71, 66, 93, 95, 29, 51, 52, 70, 57, 53, 84, 52, 77, 64, 99, 59, 54, 79, 82, 42, 52, 49, 79, 69, 76, 86, 80, 65, 58, 59, 58, 83, 59, 68, 93, 60, 61, 92, 60, 50, 60, 43, 70, 47, 59, 49, 65, 55, 50, 76, 87, 50, 51, 72, 66, 51, 47, 59, 70, 77, 43, 60, 71, 50, 75, 69, 73, 57, 77, 70, 69, 83, 53, 71, 97, 105, 111, 69, 64, 91, 59, 81, 64, 68, 74, 63, 56, 44, 74, 72, 54, 48, 68, 56, 56, 65, 83, 70, 65, 58, 71, 72, 64, 73, 105, 72, 42, 55, 56, 48, 74, 61, 55, 82, 71, 64, 67, 60, 98, 66, 112, 103, 58, 85, 48, 66, 59, 56, 58, 69, 66, 58, 60, 59, 54, 58, 74, 70, 66, 70, 65, 66, 63, 67, 74, 61, 73, 66, 56, 85, 58, 63, 110, 48, 82, 64, 61, 101, 47, 49, 76, 59, 64, 62, 53, 80, 65, 56, 64, 79, 52, 75, 77, 64, 60, 67, 74, 57, 71, 68, 63, 69, 73, 63, 51, 56, 107, 66, 67, 72, 44, 61, 56, 69, 66, 56, 66, 85, 53, 62, 103, 85, 59, 70, 41, 92, 61, 85, 51, 70, 65, 64, 63, 72, 53, 69, 71, 76, 42, 75, 72, 80, 47, 70, 36, 51, 71, 80, 48, 62, 61, 69, 68, 53, 59, 50, 67, 59, 84, 55, 68, 58, 50, 53, 73, 84, 61, 58, 67, 61, 87, 53, 72, 44, 50, 69, 62, 38, 60, 52, 58, 72, 52, 63, 73, 54, 47, 65, 81, 58, 78, 52, 69, 92, 76, 78, 55, 99, 63, 77, 62, 43, 61, 60, 67, 68, 50, 87, 69, 74, 43, 82, 66, 92, 89, 60, 73, 67, 64, 57, 81, 41, 77, 87, 58, 71, 37, 66, 59, 83, 70, 91, 73, 83, 69, 43, 69, 55, 81, 48, 71, 73, 59, 48, 55, 79, 82, 68, 61, 68, 67, 55, 56, 52, 110, 60, 46, 72, 69, 68, 69, 63, 72, 57, 124, 59, 65, 59, 61, 48, 63, 64, 81, 70, 71, 50, 61, 60, 62, 70, 55, 53, 64, 96, 57, 53, 58, 75, 58, 54, 57, 77, 55, 83, 81, 57, 79, 87, 52, 66, 67, 43, 57, 55, 63, 79, 77, 60, 90, 67, 70, 81, 45, 70, 98, 69, 60, 72, 69, 77, 60, 66, 64, 53, 104, 42, 70, 53, 70, 76, 65, 62, 51, 74, 62, 67, 67, 59, 81, 72, 64, 63, 61, 55, 66, 75, 63, 57, 54, 59, 94, 59, 55, 68, 70, 53, 97, 63, 66, 62, 85, 55, 58, 53, 40, 62, 79, 48, 53, 45, 47, 64, 135, 75, 50, 62, 68, 75, 67, 67, 69, 62, 56, 57, 79, 89, 48, 73, 62, 53, 61, 71, 60, 70, 35, 66, 55, 61, 61, 63, 83, 52, 63, 64, 53, 58, 59, 74, 76, 60, 59, 68, 76, 39, 71, 63, 86, 79, 83, 55, 68, 87, 113, 59, 64, 177, 46, 74, 70, 66, 58, 66, 59, 73, 71, 62, 68, 60, 68, 93, 64, 78, 76, 58, 68, 54, 63, 64, 96, 65, 49, 63, 76, 62, 72, 51, 52, 58, 80, 68, 51, 60, 59, 58, 63, 68, 55, 74, 64, 73, 80, 59, 74, 76, 134, 63, 84, 54, 89, 55, 65, 77, 59, 50, 69, 60, 66, 72, 53, 74, 77, 57, 59, 67, 75, 65, 90, 73, 65, 66, 68, 80, 67, 79, 87, 70, 63, 63, 55, 77, 56, 55, 74, 75, 109, 93, 91, 121, 61, 58, 74, 85, 75, 62, 72, 69, 63, 55, 55, 68, 52, 65, 41, 78, 54, 59, 73, 76, 58, 57, 58, 50, 47, 69, 59, 95, 58, 66, 83, 54, 83, 63, 82, 46, 88, 59, 48, 69, 81, 79, 70, 103, 67, 72, 55, 70, 71, 53, 56, 66, 63, 58, 87, 59, 68, 79, 59, 76, 59, 41, 64, 56, 58, 49, 60, 55, 48, 59, 74, 63, 87, 58, 63, 64, 73, 84, 78, 56, 78, 53, 48, 58, 61, 67, 49, 76, 58, 77, 63, 160, 68, 73, 59, 79, 82, 66, 83, 90, 61, 50, 57, 56, 67, 56, 54, 63, 60, 78, 60, 74, 47, 71, 47, 58, 61, 56, 60, 60, 60, 80, 59, 57, 88, 48, 52, 60, 58, 61, 83, 56, 84, 56, 102, 76, 94, 62, 51, 84, 56, 79, 66, 41, 47, 57, 85, 61, 93, 80, 82, 56, 66, 49, 57, 64, 94, 70, 60, 50, 57, 50, 57, 50, 63, 67, 61, 66, 47, 58, 53, 69, 53, 68, 121, 79, 49, 63, 66, 66, 54, 79, 48, 59, 68, 53, 83, 71, 61, 53, 68, 76, 65, 126, 66, 70, 68, 72, 55, 64, 66, 83, 85, 88, 94, 67, 76, 72, 67, 14, 53, 53, 73, 48, 93, 85, 61, 57, 70, 61, 55, 77, 60, 77, 64, 71, 67, 44, 58, 62, 88, 58, 51, 63, 50, 63, 58, 71, 59, 56, 61, 56, 53, 43, 79, 66, 72, 57, 58, 74, 72, 80, 48, 72, 56, 59, 76, 59, 50, 48, 73, 77, 53, 58, 88, 82, 72, 79, 46, 59, 60, 58, 67, 57, 59, 72, 60, 49, 69, 61, 59, 66, 70, 79, 40, 43, 70, 72, 49, 48, 58, 43, 75, 82, 42, 60, 76, 62, 63, 65, 52, 46, 75, 70, 70, 58, 59, 69, 79, 66, 68, 77, 61, 79, 59, 58, 56, 78, 65, 54, 66, 47, 54, 82, 68, 38, 58, 62, 75, 62, 69, 62, 66, 57, 61, 49, 113, 69, 99, 68, 100, 103, 59, 49, 59, 57, 86, 94, 72, 49, 55, 62, 49, 58, 58, 73, 59, 60, 67, 61, 59, 73, 68, 72, 58, 68, 55, 52, 79, 50, 51, 55, 59, 72, 67, 77, 56, 67, 68, 54, 60, 44, 62, 62, 77, 73, 70, 45, 51, 53, 68, 70, 88, 124, 41, 100, 89, 70, 57, 69, 62, 84, 48, 55, 59, 97, 53, 92, 51, 57, 81, 74, 61, 79, 54, 63, 37, 58, 75, 55, 79, 50, 52, 59, 47, 36, 55, 57, 64, 50, 80, 58, 91, 55, 90, 44, 75, 63, 49, 78, 66, 64, 53, 131, 56, 68, 72, 68, 69, 51, 66, 74, 71, 64, 50, 74, 66, 50, 50, 58, 64, 65, 76, 60, 70, 59, 63, 56, 69, 55, 52, 52, 63, 67, 62, 72, 58, 55, 75, 55, 65, 66, 57, 54, 61, 76, 77, 79, 72, 59, 56, 63, 52, 55, 59, 68, 55, 63, 81, 70, 68, 64, 82, 68, 68, 63, 113, 61, 69, 52, 55, 95, 70, 43, 66, 64, 59, 62, 57, 57, 77, 31, 61, 61, 77, 57, 51, 69, 52, 52, 62, 63, 68, 52, 57, 72, 53, 57, 73, 82, 53, 106, 73, 71, 71, 55, 70, 56, 63, 79, 70, 60, 61, 68, 68, 61, 98, 60, 67, 70, 76, 65, 59, 60, 53, 79, 53, 64, 69, 69, 42, 74, 61, 62, 50, 27, 69, 65, 51, 80, 52, 60, 63, 63, 64, 76, 67, 60, 48, 86, 77, 87, 83, 71, 73, 45, 82, 57, 56, 96, 59, 70, 60, 69, 56, 79, 57, 61, 70, 61, 68, 80, 70, 47, 74, 90, 58, 56, 73, 72, 55, 73, 64, 66, 67, 58, 44, 81, 79, 88, 59, 114, 90, 81, 58, 109, 58, 79, 117, 63, 60, 54, 67, 73, 92, 50, 56, 53, 81, 40, 132, 79, 51, 52, 88, 44, 64, 82, 68, 60, 71, 83, 51, 83, 65, 71, 116, 60, 59, 81, 34, 57, 48, 77, 62, 59, 68, 61, 70, 74, 54, 92, 58, 70, 46, 93, 59, 59, 79, 55, 76, 69, 80, 78, 52, 92, 82, 71, 69, 71, 74, 52, 69, 41, 51, 60, 61, 67, 66, 65, 62, 64, 53, 70, 67, 53, 103, 48, 62, 73, 54, 50, 56, 58, 71, 81, 60, 73, 75, 61, 65, 70, 83, 51, 64, 62, 53, 45, 82, 51, 86, 57, 53, 79, 72, 84, 67, 60, 128, 42, 54, 65, 70, 66, 37, 58, 98, 60, 55, 59, 63, 73, 46, 67, 66, 53, 65, 75, 68, 82, 81, 72, 65, 36, 67, 61, 72, 70, 72, 59, 71, 62, 61, 55, 51, 74, 56, 61, 85, 71, 61, 67, 76, 70, 42, 87, 86, 96, 54, 58, 81, 65, 64, 70, 48, 116, 72, 63, 62, 67, 64, 62, 69, 26, 67, 59, 55, 63, 65, 74, 86, 59, 52, 62, 46, 65, 67, 61, 63, 65, 85, 67, 67, 76, 67, 54, 65, 62, 62, 63, 67, 54, 64, 67, 80, 85, 61, 72, 55, 67, 62, 70, 48, 64, 55, 59, 64, 62, 56, 57, 82, 51, 62, 83, 56, 53, 47, 59, 77, 45, 50, 53, 70, 105, 84, 60, 91, 48, 65, 67, 65, 52, 48, 56, 46, 61, 64, 78, 72, 74, 70, 58, 81, 62, 45, 45, 68, 60, 51, 52, 72, 51, 71, 76, 61, 53, 83, 55, 57, 87, 68, 60, 74, 74, 77, 70, 72, 61, 60, 73, 72, 59, 69, 54, 64, 56, 60, 52, 62, 70, 49, 45, 60, 46, 66, 51, 59, 49, 70, 64, 74, 74, 77, 58, 43, 95, 92, 93, 53, 83, 48, 68, 52, 64, 62, 45, 58, 51, 56, 77, 62, 44, 38, 65, 73, 83, 58, 46, 58, 88, 57, 65, 62, 69, 63, 41, 63, 49, 76, 57, 62, 71, 60, 98, 55, 83, 78, 63, 95, 82, 78, 46, 65, 61, 62, 80, 66, 62, 50, 73, 58, 69, 57, 55, 64, 85, 87, 65, 53, 79, 76, 47, 71, 131, 81, 52, 49, 87, 78, 57, 79, 56, 68, 124, 78, 64, 69, 45, 65, 69, 73, 74, 44, 59, 62, 50, 57, 51, 101, 68, 68, 53, 52, 61, 120, 39, 53, 65, 67, 74, 77, 102, 47, 68, 62, 78, 80, 36, 51, 74, 67, 58, 60, 53, 53, 66, 49, 74, 60, 80, 87, 68, 62, 66, 54, 57, 71, 54, 71, 61, 63, 62, 65, 68, 49, 64, 97, 63, 50, 70, 86, 87, 73, 61, 74, 71, 56, 72, 72, 63, 80, 62, 61, 90, 48, 50, 56, 56, 73, 57, 68, 71, 70, 57, 50, 75, 58, 41, 64, 70, 66, 69, 88, 75, 58, 89, 73, 48, 51, 67, 82, 47, 56, 89, 56, 55, 78, 70, 54, 43, 101, 89, 60, 54, 79, 92, 79, 103, 61, 83, 88, 72, 64, 53, 77, 64, 56, 48, 52, 123, 72, 72, 65, 71, 54, 60, 52, 65, 50, 65, 84, 70, 68, 65, 79, 123, 67, 85, 66, 81, 63, 64, 130, 67, 56, 70, 54, 57, 52, 52, 78, 83, 85, 44, 65, 62, 58, 71, 67, 68, 60, 81, 85, 56, 74, 72, 80, 67, 63, 92, 82, 56, 67, 57, 70, 82, 61, 87, 58, 43, 66, 50, 51, 79, 59, 82, 61, 65, 70, 53, 35, 68, 57, 90, 60, 122, 69, 54, 55, 74, 75, 59, 71, 52, 50, 73, 73, 46, 56, 55, 55, 85, 79, 56, 77, 59, 62, 48, 73, 39, 68, 77, 68, 54, 80, 58, 54, 67, 63, 67, 54, 80, 71, 78, 66, 75, 55, 62, 53, 73, 91, 77, 39, 74, 76, 68, 51, 52, 67, 59, 109, 53, 55, 59, 68, 76, 58, 86, 94, 70, 53, 54, 79, 56, 69, 59, 77, 65, 81, 71, 52, 76, 69, 91, 70, 61, 58, 67, 41, 82, 66, 41, 101, 71, 57, 69, 71, 74, 40, 68, 58, 65, 57, 59, 68, 68, 74, 67, 126, 51, 65, 51, 70, 56, 60, 72, 62, 71, 74, 81, 43, 87, 66, 75, 57, 67, 64, 46, 70, 50, 52, 64, 75, 48, 55, 49, 74, 57, 56, 59, 58, 74, 82, 64, 76, 95, 97, 61, 58, 51, 72, 50, 78, 65, 77, 42, 65, 86, 74, 63, 86, 54, 67, 44, 49, 64, 93, 57, 115, 62, 66, 60, 72, 45, 99, 65, 67, 99, 50, 68, 69, 64, 106, 76, 70, 72, 72, 84, 42, 69, 49, 87, 70, 79, 61, 60, 42, 80, 64, 52, 71, 61, 76, 49, 59, 50, 68, 53, 55, 53, 69, 72, 54, 58, 60, 59, 63, 60, 57, 55, 54, 56, 49, 71, 46, 52, 84, 57, 54, 67, 59, 58, 66, 51, 73, 66, 126, 98, 77, 62, 55, 50, 63, 66, 54, 72, 78, 51, 61, 65, 72, 63, 52, 63, 65, 68, 44, 80, 49, 70, 55, 61, 74, 72, 63, 76, 70, 67, 61, 60, 59, 65, 62, 52, 62, 64, 54, 68, 96, 66, 47, 66, 67, 73, 58, 52, 72, 70, 70, 90, 58, 80, 75, 80, 80, 112, 53, 63, 66, 63, 58, 47, 77, 78, 60, 58, 65, 78, 103, 55, 50, 99, 55, 55, 87, 71, 107, 37, 111, 92, 47, 47, 50, 50, 60, 81, 70, 69, 49, 81, 60, 53, 62, 126, 90, 71, 50, 76, 79, 91, 63, 65, 57, 68, 56, 50, 63, 63, 98, 54, 66, 56, 60, 58, 62, 46, 157, 63, 48, 71, 57, 64, 56, 53, 57, 80, 47, 65, 72, 62, 57, 45, 90, 49, 76, 77, 55, 65, 46, 77, 80, 60, 63, 76, 59, 56, 47, 61, 66, 88, 50, 137, 107, 53, 86, 56, 68, 58, 67, 117, 53, 41, 74, 58, 59, 73, 60, 81, 60, 58, 70, 82, 105, 67, 58, 69, 50, 57, 49, 89, 57, 68, 77, 107, 62, 56, 40, 45, 55, 70, 84, 54, 60, 66, 71, 51, 55, 64, 60, 68, 63, 66, 39, 49, 60, 43, 49, 74, 62, 58, 51, 64, 58, 65, 50, 53, 84, 60, 82, 61, 63, 64, 56, 57, 56, 49, 82, 72, 77, 59, 65, 67, 51, 68, 48, 112, 73, 78, 63, 98, 69, 112, 53, 60, 56, 61, 43, 54, 62, 61, 65, 47, 45, 58, 80, 55, 77, 54, 83, 58, 60, 65, 51, 63, 66, 93, 43, 76, 69, 77, 43, 53, 54, 38, 53, 74, 57, 58, 63, 57, 65, 84, 94, 46, 52, 72, 80, 74, 82, 96, 46, 65, 43, 46, 69, 87, 90, 77, 73, 68, 50, 90, 72, 67, 37, 94, 61, 59, 67, 66, 75, 75, 54, 98, 50, 85, 78, 69, 64, 67, 89, 67, 51, 60, 86, 69, 61, 85, 70, 85, 63, 72, 75, 44, 74, 63, 55, 84, 48, 72, 52, 42, 70, 40, 73, 60, 66, 83, 64, 54, 55, 84, 84, 73, 54, 87, 77, 55, 53, 57, 62, 58, 88, 74, 48, 81, 65, 77, 39, 43, 55, 69, 68, 76, 75, 94, 58, 57, 65, 77, 74, 75, 53, 66, 60, 55, 75, 55, 48, 86, 63, 66, 73, 55, 50, 70, 51, 58, 70, 81, 59, 120, 55, 48, 62, 59, 62, 71, 68, 88, 72, 59, 74, 100, 90, 59, 48, 62, 69, 95, 61, 83, 82, 69, 62, 85, 60, 53, 56, 72, 85, 51, 51, 36, 69, 65, 73, 69, 103, 57, 70, 69, 56, 57, 80, 73, 51, 78, 59, 63, 75, 65, 56, 63, 58, 62, 65, 53, 65, 61, 77, 84, 47, 71, 80, 56, 91, 41, 64, 62, 64, 76, 62, 77, 59, 64, 66, 89, 57, 81, 56, 59, 56, 64, 80, 87, 80, 63, 66, 69, 74, 73, 95, 56, 53, 71, 58, 92, 82, 64, 64, 76, 59, 45, 61, 62, 46, 56, 45, 82, 111, 77, 64, 71, 55, 61, 63, 64, 41, 46, 63, 62, 73, 74, 62, 75, 58, 75, 62, 51, 73, 61, 116, 57, 71, 59, 71, 56, 64, 45, 59, 75, 92, 68, 77, 60, 63, 70, 86, 60, 119, 47, 68, 51, 87, 71, 52, 97, 49, 80, 39, 61, 66, 64, 40, 50, 71, 58, 42, 45, 54, 79, 73, 81, 57, 64, 48, 58, 79, 62, 48, 75, 70, 84, 56, 52, 69, 75, 69, 71, 52, 87, 72, 73, 33, 63, 56, 119, 106, 55, 69, 74, 73, 85, 36, 90, 57, 78, 58, 61, 51, 64, 83, 47, 61, 62, 70, 62, 78, 49, 102, 61, 73, 59, 85, 40, 56, 53, 53, 76, 63, 69, 80, 64, 38, 55, 60, 57, 66, 70, 68, 75, 71, 70, 80, 53, 69, 61, 89, 59, 66, 73, 50, 61, 78, 55, 44, 68, 92, 52, 66, 57, 49, 148, 79, 58, 77, 66, 53, 70, 54, 71, 82, 59, 71, 50, 76, 81, 125, 59, 58, 52, 61, 73, 92, 75, 87, 60, 48, 80, 46, 60, 76, 46, 68, 63, 68, 78, 61, 86, 69, 49, 93, 52, 70, 51, 62, 68, 106, 84, 72, 60, 53, 71, 68, 83, 64, 57, 65, 88, 65, 67, 73, 67, 89, 68, 92, 78, 55, 61, 62, 61, 68, 50, 86, 65, 82, 64, 78, 61, 50, 58, 58, 43, 50, 78, 68, 80, 65, 72, 76, 64, 51, 51, 87, 63, 67, 45, 67, 62, 66, 43, 49, 69, 72, 59, 40, 63, 98, 53, 79, 68, 81, 45, 49, 71, 69, 59, 64, 82, 57, 70, 79, 88, 100, 41, 90, 54, 84, 62, 62, 55, 61, 50, 66, 66, 35, 68, 90, 54, 49, 79, 92, 79, 50, 69, 54, 51, 57, 64, 70, 52, 65, 64, 77, 93, 71, 92, 63, 69, 40, 81, 49, 68, 53, 84, 66, 54, 58, 85, 48, 65, 52, 47, 75, 66, 55, 58, 65, 56, 82, 57, 46, 56, 68, 70, 54, 51, 68, 63, 68, 62, 64, 35, 75, 75, 58, 60, 71, 63, 63, 64, 58, 76, 76, 50, 72, 62, 64, 64, 82, 80, 63, 66, 73, 54, 51, 51, 71, 49, 55, 81, 68, 73, 47, 71, 95, 52, 69, 93, 57, 53, 62, 90, 96, 44, 77, 65, 62, 56, 58, 56, 60, 74, 59, 58, 66, 59, 68, 59, 56, 55, 52, 77, 66, 57, 74, 50, 105, 64, 72, 67, 67, 78, 64, 88, 83, 63, 65, 78, 51, 68, 62, 73, 60, 41, 69, 70, 71, 43, 75, 76, 62, 82, 81, 43, 85, 73, 66, 96, 54, 52, 88, 78, 65, 56, 76, 60, 45, 96, 60, 57, 53, 74, 72, 47, 57, 49, 108, 72, 63, 95, 71, 47, 67, 67, 73, 78, 54, 56, 95, 30, 61, 57, 63, 59, 84, 44, 65, 69, 88, 63, 56, 46, 59, 59, 53, 59, 71, 63, 82, 77, 57, 56, 71, 59, 60, 60, 61, 75, 67, 111, 50, 56, 73, 68, 53, 55, 67, 51, 33, 57, 91, 64, 78, 63, 53, 62, 63, 96, 50, 63, 71, 68, 58, 43, 67, 72, 86, 63, 55, 54, 76, 67, 67, 63, 69, 55, 48, 49, 61, 67, 59, 85, 53, 63, 87, 65, 61, 45, 77, 67, 62, 54, 57, 86, 78, 50, 53, 73, 56, 57, 86, 54, 82, 58, 65, 64, 73, 67, 61, 52, 70, 77, 61, 73, 73, 67, 49, 40, 65, 59, 62, 83, 81, 50, 49, 59, 60, 85, 80, 55, 81, 80, 74, 99, 61, 50, 60, 88, 59, 60, 47, 73, 60, 65, 73, 62, 93, 89, 52, 85, 77, 60, 43, 83, 91, 79, 58, 53, 55, 54, 51, 57, 61, 91, 67, 55, 65, 51, 38, 59, 60, 68, 79, 75, 55, 54, 93, 80, 53, 57, 63, 46, 67, 56, 71, 64, 64, 73, 66, 56, 61, 51, 74, 61, 53, 113, 65, 65, 44, 91, 55, 44, 124, 57, 59, 74, 50, 85, 74, 74, 74, 80, 92, 83, 77, 102, 63, 65, 93, 77, 80, 72, 72, 67, 65, 69, 75, 76, 62, 91, 63, 53, 51, 76, 56, 90, 65, 43, 58, 43, 50, 73, 64, 80, 58, 61, 61, 67, 53, 59, 59, 60, 70, 61, 68, 71, 44, 72, 58, 45, 59, 62, 67, 54, 82, 57, 81, 41, 100, 79, 52, 66, 75, 54, 70, 58, 64, 63, 60, 54, 43, 90, 83, 68, 64, 49, 48, 80, 71, 54, 66, 49, 57, 55, 52, 65, 85, 67, 78, 61, 74, 66, 70, 56, 52, 105, 45, 66, 81, 83, 72, 68, 41, 57, 62, 38, 97, 82, 85, 54, 59, 39, 88, 51, 65, 59, 49, 77, 52, 60, 54, 64, 79, 58, 71, 95, 70, 72, 38, 68, 81, 74, 67, 57, 81, 61, 67, 64, 79, 58, 60, 104, 63, 94, 58, 63, 80, 81, 67, 41, 70, 81, 82, 59, 101, 56, 71, 55, 101, 52, 71, 83, 85, 50, 83, 71, 44, 65, 74, 64, 84, 59, 44, 55, 81, 57, 72, 50, 63, 61, 60, 58, 82, 53, 66, 57, 44, 73, 90, 70, 82, 41, 63, 81, 61, 56, 80, 76, 54, 62, 60, 58, 85, 55, 89, 79, 72, 67, 70, 52, 52, 43, 72, 53, 67, 38, 100, 59, 59, 61, 87, 59, 71, 62, 72, 65, 82, 73, 60, 74, 49, 57, 61, 70, 89, 53, 68, 53, 67, 73, 67, 67, 73, 68, 53, 75, 65, 106, 61, 62, 62, 62, 72, 55, 52, 64, 63, 61, 60, 68, 72, 59, 82, 59, 70, 59, 102, 53, 47, 80, 58, 55, 78, 66, 75, 72, 80, 94, 57, 54, 64, 71, 85, 80, 65, 62, 45, 59, 85, 64, 58, 144, 73, 62, 56, 81, 73, 77, 75, 50, 64, 65, 68, 72, 82, 56, 74, 52, 74, 55, 66, 55, 69, 57, 72, 93, 63, 73, 59, 59, 62, 77, 68, 62, 63, 67, 66, 61, 53, 79, 119, 78, 61, 62, 61, 50, 60, 76, 62, 79, 94, 81, 55, 47, 80, 75, 62, 87, 77, 63, 66, 67, 62, 56, 58, 58, 88, 56, 63, 59, 96, 57, 55, 60, 66, 60, 67, 50, 69, 76, 45, 67, 60, 59, 55, 68, 78, 63, 85, 66, 61, 78, 73, 76, 125, 62, 55, 54, 61, 65, 56, 52, 56, 51, 63, 73, 57, 67, 91, 60, 65, 81, 71, 63, 67, 57, 89, 80, 54, 74, 70, 76, 79, 85, 68, 68, 57, 70, 65, 97, 61, 80, 54, 75, 81, 87, 56, 65, 63, 79, 56, 57, 68, 50, 29, 73, 56, 63, 45, 85, 78, 59, 67, 59, 80, 75, 35, 57, 56, 72, 79, 82, 59, 79, 49, 73, 62, 69, 99, 46, 80, 69, 90, 96, 57, 90, 67, 68, 63, 59, 53, 69, 70, 72, 76, 75, 62, 49, 60, 65, 57, 66, 70, 37, 65, 77, 78, 60, 61, 68, 72, 76, 72, 46, 74, 39, 67, 78, 79, 67, 54, 69, 69, 58, 52, 60, 66, 65, 66, 84, 73, 44, 43, 47, 78, 64, 64, 69, 57, 61, 58, 60, 67, 64, 69, 61, 68, 56, 64, 59, 53, 56, 77, 54, 50, 62, 63, 71, 75, 59, 71, 76, 58, 68, 47, 48, 74, 71, 58, 35, 67, 69, 65, 69, 63, 70, 69, 70, 54, 81, 48, 64, 126, 71, 28, 50, 64, 73, 121, 57, 58, 50, 55, 66, 64, 45, 54, 65, 75, 91, 72, 72, 53, 41, 83, 72, 73, 65, 59, 66, 42, 57, 117, 66, 78, 61, 81, 52, 66, 58, 63, 74, 54, 63, 75, 78, 67, 61, 67, 80, 58, 73, 59, 67, 51, 77, 54, 71, 72, 61, 60, 65, 54, 56, 97, 53, 66, 56, 63, 41, 65, 61, 80, 69, 55, 85, 78, 64, 58, 67, 63, 45, 44, 49, 81, 59, 65, 63, 89, 61, 64, 61, 58, 65, 51, 60, 66, 71, 73, 56, 88, 69, 65, 51, 66, 56, 66, 55, 50, 56, 81, 49, 52, 59, 66, 48, 54, 69, 65, 73, 71, 71, 63, 59, 35, 58, 66, 59, 78, 80, 62, 73, 53, 66, 52, 83, 52, 58, 81, 67, 65, 55, 60, 74, 92, 68, 89, 79, 67, 56, 64, 89, 48, 56, 76, 55, 77, 55, 65, 62, 58, 72, 72, 61, 75, 69, 62, 58, 59, 58, 67, 56, 73, 59, 72, 72, 64, 59, 72, 67, 54, 48, 61, 58, 42, 69, 58, 79, 60, 75, 66, 64, 57, 60, 68, 73, 99, 56, 56, 77, 59, 51, 72, 92, 57, 72, 52, 74, 64, 61, 94, 63, 75, 77, 60, 54, 77, 51, 63, 63, 73, 64, 63, 59, 55, 59, 69, 55, 73, 65, 70, 54, 72, 50, 79, 57, 59, 62, 67, 56, 63, 67, 68, 76, 60, 59, 63, 78, 65, 61, 61, 88, 54, 118, 85, 67, 57, 47, 74, 52, 53, 79, 50, 76, 71, 100, 69, 62, 80, 54, 64, 81, 60, 56, 55, 101, 58, 56, 50, 52, 49, 62, 64, 56, 56, 51, 60, 115, 60, 50, 88, 73, 81, 71, 53, 65, 81, 60, 53, 70, 68, 73, 82, 72, 50, 96, 58, 69, 58, 80, 52, 70, 69, 57, 52, 45, 56, 70, 59, 86, 62, 61, 65, 58, 91, 65, 54, 66, 84, 51, 53, 67, 68, 56, 83, 60, 64, 65, 27, 65, 58, 67, 53, 73, 75, 109, 54, 56, 74, 87, 65, 70, 65, 57, 58, 44, 64, 51, 69, 56, 44, 70, 75, 67, 77, 71, 58, 69, 57, 86, 66, 50, 64, 57, 57, 74, 76, 73, 43, 59, 68, 53, 97, 63, 60, 52, 77, 61, 62, 76, 98, 71, 55, 66, 63, 60, 68, 63, 74, 67, 65, 59, 60, 53, 45, 60, 65, 60, 64, 66, 58, 78, 62, 59, 68, 62, 93, 74, 56, 60, 61, 59, 64, 78, 61, 72, 56, 69, 68, 69, 56, 69, 78, 64, 57, 45, 78, 69, 64, 61, 75, 65, 61, 79, 56, 73, 71, 66, 58, 84, 61, 63, 60, 57, 42, 54, 87, 74, 60, 69, 66, 66, 46, 89, 65, 51, 57, 58, 60, 67, 62, 78, 81, 55, 61, 80, 60, 67, 70, 54, 68, 54, 59, 80, 54, 135, 68, 76, 62, 85, 63, 72, 81, 114, 75, 90, 65, 147, 57, 68, 66, 70, 68, 60, 58, 45, 74, 69, 63, 72, 62, 53, 74, 76, 71, 55, 66, 75, 73, 67, 72, 42, 46, 79, 81, 56, 61, 77, 60, 77, 61, 59, 50, 61, 59, 61, 69, 61, 68, 62, 77, 66, 76, 51, 66, 59, 64, 78, 60, 63, 66, 44, 53, 56, 73, 82, 53, 47, 79, 64, 64, 90, 73, 54, 48, 58, 63, 72, 76, 68, 54, 64, 65, 72, 48, 54, 71, 59, 56, 74, 67, 77, 64, 70, 79, 57, 67, 74, 73, 78, 41, 84, 42, 54, 69, 72, 65, 63, 61, 72, 67, 79, 67, 45, 55, 63, 83, 71, 42, 55, 64, 100, 71, 69, 66, 69, 56, 60, 59, 58, 53, 63, 49, 94, 91, 52, 76, 51, 67, 76, 59, 55, 79, 83, 60, 109, 49, 60, 46, 54, 52, 73, 74, 50, 54, 70, 62, 89, 66, 78, 63, 80, 76, 65, 87, 49, 89, 50, 52, 69, 67, 47, 76, 62, 71, 67, 48, 73, 60, 61, 55, 70, 68, 61, 78, 46, 46, 67, 67, 69, 60, 61, 74, 71, 44, 78, 92, 57, 74, 58, 76, 65, 85, 111, 37, 96, 38, 51, 94, 73, 60, 61, 67, 51, 83, 79, 137, 75, 77, 83, 61, 45, 64, 96, 53, 60, 65, 73, 82, 56, 35, 60, 95, 54, 52, 72, 40, 74, 67, 58, 60, 67, 56, 41, 74, 49, 67, 90, 67, 60, 79, 64, 69, 96, 39, 90, 64, 29, 63, 53, 60, 75, 72, 91, 45, 61, 68, 74, 55, 54, 109, 63, 53, 58, 92, 105, 51, 39, 82, 101, 77, 76, 77, 92, 66, 44, 55, 77, 48, 51, 64, 51, 99, 65, 60, 52, 47, 62, 61, 78, 71, 54, 87, 48, 91, 72, 90, 71, 65, 79, 82, 55, 87, 119, 121, 92, 39, 56, 68, 52, 82, 64, 60, 84, 76, 69, 47, 53, 69, 70, 57, 44, 68, 35, 83, 65, 85, 76, 81, 63, 54, 58, 82, 109, 67, 67, 73, 75, 64, 46, 81, 88, 61, 46, 84, 50, 89, 62, 114, 56, 95, 73, 70, 79, 47, 64, 51, 60, 63, 56, 67, 61, 64, 101, 70, 55, 57, 96, 62, 58, 97, 47, 97, 43, 48, 67, 57, 60, 59, 81, 71, 61, 54, 84, 75, 88, 62, 45, 53, 68, 103, 80, 67, 55, 91, 80, 52, 88, 74, 51, 56, 46, 65, 81, 75, 82, 70, 48, 49, 74, 84, 44, 55, 77, 71, 73, 41, 74, 75, 51, 89, 70, 64, 94, 58, 53, 105, 43, 40, 47, 67, 72, 54, 65, 54, 53, 86, 82, 44, 66, 49, 102, 55, 83, 69, 51, 72, 66, 65, 61, 50, 72, 65, 59, 87, 55, 58, 75, 44, 60, 55, 48, 76, 82, 120, 111, 51, 60, 83, 69, 41, 79, 110, 90, 81, 59, 53, 61, 58, 80, 90, 44, 84, 57, 77, 47, 62, 70, 59, 61, 77, 68, 66, 72, 65, 62, 84, 99, 63, 69, 64, 38, 68, 86, 72, 59, 72, 73, 56, 86, 79, 74, 87, 68, 61, 76, 51, 101, 59, 64, 56, 69, 86, 64, 51, 74, 69, 66, 59, 58, 53, 68, 77, 81, 59, 56, 61, 69, 70, 62, 75, 62, 71, 72, 72, 82, 56, 59, 77, 106, 65, 55, 56, 67, 39, 55, 74, 75, 81, 56, 74, 57, 69, 57, 67, 81, 63, 70, 98, 46, 55, 63, 69, 65, 86, 56, 73, 53, 68, 67, 56, 67, 40, 64, 60, 59, 74, 72, 46, 74, 56, 58, 94, 54, 63, 84, 56, 67, 49, 85, 58, 57, 68, 66, 97, 52, 65, 84, 58, 89, 56, 99, 56, 105, 83, 67, 78, 73, 64, 76, 90, 82, 74, 66, 60, 60, 63, 55, 80, 45, 66, 65, 52, 80, 97, 69, 34, 69, 70, 40, 71, 57, 64, 54, 84, 58, 91, 75, 65, 63, 41, 62, 76, 78, 74, 54, 70, 48, 70, 55, 73, 73, 66, 82, 73, 57, 54, 48, 78, 52, 46, 81, 73, 81, 78, 98, 62, 75, 102, 83, 83, 56, 64, 62, 62, 75, 83, 102, 54, 91, 69, 64, 71, 76, 135, 81, 62, 81, 71, 60, 44, 65, 43, 53, 60, 62, 75, 62, 77, 98, 81, 55, 74, 57, 52, 57, 65, 83, 68, 76, 80, 59, 62, 87, 60, 40, 56, 77, 62, 100, 52, 74, 55, 46, 74, 60, 60, 77, 80, 57, 99, 51, 53, 92, 56, 86, 76, 65, 70, 58, 75, 47, 64, 96, 77, 56, 30, 70, 48, 82, 78, 64, 51, 66, 43, 86, 62, 49, 62, 54, 55, 69, 72, 59, 46, 76, 45, 47, 62, 50, 70, 62, 89, 62, 54, 43, 52, 65, 49, 66, 49, 92, 54, 45, 68, 82, 27, 59, 72, 69, 73, 34, 66, 58, 57, 55, 40, 71, 50, 77, 79, 51, 67, 85, 43, 55, 62, 57, 83, 58, 124, 64, 58, 70, 58, 58, 71, 62, 97, 79, 56, 76, 70, 54, 60, 112, 75, 44, 54, 31, 66, 93, 64, 63, 53, 76, 89, 55, 76, 84, 50, 66, 88, 44, 46, 47, 55, 58, 53, 70, 45, 37, 59, 75, 41, 54, 54, 77, 63, 77, 82, 64, 75, 75, 66, 42, 70, 49, 50, 72, 57, 100, 71, 59, 54, 64, 89, 68, 69, 60, 77, 78, 90, 73, 79, 86, 83, 60, 58, 79, 50, 51, 88, 67, 65, 61, 52, 65, 54, 43, 64, 87, 73, 48, 68, 66, 42, 86, 83, 128, 84, 55, 82, 59, 44, 59, 65, 50, 87, 86, 81, 78, 70, 48, 57, 65, 86, 81, 54, 57, 41, 67, 44, 85, 63, 53, 54, 66, 67, 47, 41, 60, 42, 42, 39, 75, 100, 56, 40, 71, 66, 105, 85, 50, 67, 58, 70, 55, 84, 58, 67, 84, 72, 61, 56, 59, 67, 52, 75, 60, 60, 69, 78, 53, 64, 67, 75, 93, 59, 64, 100, 78, 70, 67, 94, 72, 59, 93, 97, 63, 69, 77, 59, 60, 48, 80, 62, 81, 75, 41, 51, 66, 40, 56, 53, 105, 80, 63, 69, 51, 64, 69, 60, 63, 97, 41, 58, 67, 76, 91, 60, 51, 55, 62, 81, 51, 56, 69, 64, 82, 67, 53, 74, 64, 56, 70, 45, 56, 106, 78, 41, 60, 69, 74, 83, 43, 103, 61, 60, 96, 71, 61, 68, 88, 61, 61, 28, 65, 62, 68, 54, 51, 52, 61, 57, 41, 67, 87, 83, 65, 60, 58, 56, 64, 40, 81, 72, 79, 55, 69, 36, 69, 64, 70, 46, 75, 68, 61, 67, 62, 69, 75, 101, 59, 60, 72, 63, 68, 109, 67, 64, 43, 53, 77, 63, 80, 68, 47, 82, 40, 64, 34, 46, 56, 67, 62, 59, 75, 74, 89, 57, 44, 59, 67, 75, 69, 74, 71, 56, 56, 55, 50, 60, 42, 57, 51, 61, 71, 59, 60, 53, 43, 48, 57, 79, 61, 77, 47, 59, 62, 61, 73, 45, 66, 75, 63, 51, 88, 77, 70, 72, 71, 97, 53, 72, 62, 47, 56, 35, 55, 60, 81, 67, 63, 35, 61, 72, 51, 67, 70, 62, 66, 68, 58, 56, 74, 85, 73, 67, 69, 61, 42, 69, 56, 60, 54, 119, 58, 59, 54, 63, 61, 60, 76, 39, 60, 76, 67, 90, 62, 53, 51, 66, 98, 61, 78, 82, 71, 61, 51, 65, 53, 74, 70, 47, 96, 74, 37, 61, 61, 183, 80, 73, 60, 80, 62, 65, 81, 68, 72, 60, 86, 70, 64, 52, 78, 60, 38, 52, 69, 42, 54, 66, 64, 71, 49, 68, 70, 61, 67, 100, 60, 59, 66, 77, 70, 70, 80, 51, 69, 82, 64, 75, 54, 53, 71, 70, 58, 87, 69, 53, 76, 69, 69, 82, 90, 66, 96, 70, 52, 76, 67, 88, 57, 46, 66, 77, 61, 74, 70, 66, 70, 55, 84, 65, 100, 56, 50, 43, 64, 63, 51, 79, 72, 89, 43, 53, 58, 67, 86, 58, 75, 66, 45, 52, 73, 59, 60, 78, 87, 71, 81, 70, 77, 52, 111, 106, 60, 67, 54, 95, 48, 56, 38, 52, 43, 51, 54, 59, 62, 67, 60, 84, 81, 83, 57, 57, 71, 81, 74, 76, 78, 58, 62, 87, 65, 43, 55, 75, 53, 53, 64, 47, 49, 72, 65, 41, 71, 48, 47, 65, 54, 74, 66, 41, 75, 59, 75, 45, 60, 69, 51, 83, 63, 65, 78, 79, 55, 86, 49, 81, 80, 111, 57, 71, 57, 75, 61, 46, 54, 72, 61, 73, 73, 87, 59, 72, 63, 66, 85, 73, 60, 66, 50, 49, 82, 44, 85, 59, 66, 79, 69, 62, 59, 65, 62, 59, 53, 56, 79, 78, 61, 63, 41, 82, 78, 69, 61, 63, 68, 52, 77, 84, 73, 76, 54, 53, 117, 98, 119, 75, 60, 73, 57, 73, 46, 51, 51, 61, 43, 70, 89, 80, 55, 52, 66, 67, 49, 88, 77, 70, 70, 57, 49, 59, 78, 64, 36, 82, 37, 64, 73, 74, 59, 63, 76, 60, 93, 63, 60, 61, 52, 97, 55, 60, 70, 55, 69, 74, 76, 70, 80, 62, 57, 56, 53, 57, 56, 44, 88, 46, 51, 61, 84, 60, 78, 76, 69, 73, 55, 88, 110, 70, 53, 49, 64, 70, 89, 39, 44, 63, 70, 77, 53, 73, 82, 54, 65, 66, 61, 66, 72, 65, 90, 43, 67, 64, 60, 66, 70, 84, 40, 53, 54, 59, 77, 83, 42, 64, 94, 52, 90, 62, 51, 58, 72, 71, 83, 68, 79, 51, 60, 73, 70, 57, 84, 67, 63, 77, 76, 48, 83, 52, 76, 53, 59, 54, 52, 57, 41, 82, 79, 78, 60, 70, 77, 54, 66, 71, 69, 54, 85, 65, 58, 66, 78, 69, 71, 57, 67, 60, 93, 61, 74, 57, 55, 65, 77, 45, 63, 71, 55, 79, 61, 73, 62, 78, 74, 54, 61, 70, 73, 112, 70, 63, 84, 62, 90, 84, 62, 56, 92, 54, 62, 62, 67, 63, 65, 47, 62, 53, 82, 62, 66, 75, 76, 65, 60, 80, 47, 62, 64, 82, 59, 68, 66, 66, 59, 58, 67, 50, 60, 57, 31, 72, 61, 69, 67, 63, 65, 64, 68, 60, 72, 66, 86, 59, 51, 62, 94, 60, 47, 65, 59, 70, 65, 55, 88, 59, 85, 64, 70, 70, 50, 49, 86, 57, 67, 77, 47, 64, 36, 73, 76, 66, 83, 58, 69, 74, 70, 86, 57, 52, 47, 61, 45, 87, 62, 57, 75, 83, 55, 66, 74, 59, 61, 76, 54, 68, 65, 97, 51, 54, 81, 61, 67, 64, 84, 93, 87, 58, 64, 54, 67, 65, 49, 94, 59, 50, 56, 46, 80, 70, 83, 71, 54, 62, 66, 72, 75, 64, 70, 151, 69, 53, 57, 65, 57, 56, 49, 75, 57, 73, 48, 60, 41, 69, 78, 65, 74, 56, 73, 52, 70, 65, 70, 84, 44, 88, 62, 67, 66, 79, 74, 54, 57, 56, 73, 71, 60, 118, 50, 66, 43, 80, 66, 57, 53, 61, 64, 65, 60, 90, 72, 88, 64, 71, 61, 56, 62, 70, 81, 53, 71, 40, 62, 73, 65, 49, 87, 64, 58, 65, 74, 84, 62, 45, 66, 80, 43, 62, 71, 57, 54, 70, 67, 71, 75, 100, 88, 60, 50, 109, 47, 80, 102, 57, 55, 77, 84, 64, 56, 74, 74, 55, 73, 77, 88, 54, 58, 56, 59, 53, 65, 54, 58, 54, 80, 71, 58, 45, 63, 62, 83, 59, 63, 69, 60, 67, 62, 54, 97, 69, 78, 68, 68, 82, 50, 52, 72, 69, 54, 59, 71, 46, 69, 45, 71, 90, 67, 64, 79, 80, 73, 50, 49, 53, 58, 67, 59, 71, 41, 91, 61, 87, 82, 64, 64, 31, 71, 61, 49, 62, 57, 39, 59, 88, 59, 62, 50, 37, 53, 63, 65, 83, 68, 65, 54, 47, 56, 49, 53, 72, 63, 69, 65, 67, 74, 89, 66, 44, 55, 73, 67, 46, 65, 62, 56, 47, 73, 61, 81, 51, 84, 82, 35, 79, 69, 47, 54, 86, 62, 71, 56, 67, 72, 77, 64, 85, 43, 75, 69, 54, 51, 74, 68, 60, 76, 68, 55, 79, 82, 77, 86, 54, 61, 45, 74, 64, 50, 70, 51, 67, 90, 82, 65, 48, 73, 47, 90, 100, 63, 60, 77, 87, 63, 114, 59, 63, 75, 70, 47, 58, 55, 44, 122, 69, 67, 49, 46, 38, 82, 69, 57, 66, 63, 80, 56, 41, 75, 57, 64, 64, 71, 60, 77, 72, 70, 69, 71, 82, 77, 59, 85, 52, 93, 45, 53, 78, 67, 83, 85, 76, 37, 54, 43, 81, 58, 72, 64, 66, 53, 71, 73, 58, 62, 70, 80, 74, 71, 68, 65, 82, 48, 83, 56, 87, 61, 79, 52, 59, 59, 78, 76, 54, 75, 69, 56, 53, 92, 71, 55, 57, 40, 58, 72, 62, 67, 51, 61, 61, 68, 57, 62, 59, 83, 68, 77, 75, 48, 85, 84, 64, 53, 88, 56, 73, 54, 67, 55, 50, 64, 81, 94, 64, 78, 55, 50, 76, 53, 80, 51, 78, 125, 56, 70, 54, 72, 52, 41, 76, 60, 73, 88, 71, 49, 52, 40, 72, 69, 47, 52, 78, 79, 62, 89, 65, 56, 36, 74, 78, 68, 44, 68, 70, 56, 78, 112, 65, 64, 73, 60, 61, 43, 96, 102, 78, 72, 42, 57, 50, 63, 65, 61, 94, 60, 65, 79, 56, 68, 75, 65, 70, 80, 59, 52, 76, 55, 70, 60, 90, 75, 60, 52, 67, 55, 53, 77, 55, 68, 80, 66, 63, 57, 67, 47, 57, 147, 74, 69, 50, 69, 56, 43, 67, 60, 51, 57, 75, 64, 62, 44, 58, 69, 62, 54, 98, 65, 26, 109, 51, 68, 70, 50, 86, 68, 79, 88, 89, 57, 64, 51, 94, 71, 71, 61, 67, 67, 75, 68, 78, 79, 58, 62, 71, 60, 70, 63, 79, 58, 57, 68, 78, 37, 74, 94, 105, 51, 74, 80, 82, 70, 58, 56, 65, 86, 56, 73, 76, 73, 59, 46, 57, 63, 61, 76, 62, 51, 48, 71, 84, 59, 38, 34, 68, 64, 76, 72, 73, 98, 70, 65, 52, 67, 53, 55, 80, 61, 87, 54, 62, 72, 64, 77, 51, 46, 71, 71, 35, 65, 89, 49, 68, 60, 75, 55, 63, 50, 74, 54, 77, 96, 58, 64, 85, 68, 67, 79, 52, 57, 60, 69, 37, 59, 52, 52, 63, 65, 44, 71, 74, 63, 64, 50, 58, 115, 56, 58, 57, 56, 57, 67, 59, 65, 54, 56, 88, 69, 70, 79, 59, 53, 59, 97, 89, 40, 58, 64, 62, 79, 73, 46, 75, 65, 67, 87, 75, 62, 54, 63, 63, 52, 47, 62, 68, 62, 43, 61, 67, 62, 67, 69, 55, 62, 89, 55, 49, 46, 76, 65, 55, 121, 66, 65, 64, 50, 57, 63, 50, 58, 51, 42, 45, 68, 54, 51, 58, 89, 58, 59, 74, 67, 76, 67, 64, 67, 56, 59, 65, 83, 85, 103, 79, 57, 57, 70, 83, 54, 49, 70, 80, 56, 79, 53, 158, 72, 67, 67, 57, 66, 45, 58, 73, 49, 71, 76, 57, 91, 59, 71, 54, 51, 61, 50, 64, 66, 72, 62, 54, 71, 59, 68, 71, 60, 44, 84, 61, 84, 62, 74, 59, 68, 80, 54, 56, 67, 57, 77, 113, 56, 48, 68, 42, 101, 60, 52, 58, 78, 61, 65, 92, 67, 49, 48, 81, 55, 71, 67, 63, 69, 71, 62, 71, 70, 57, 68, 77, 93, 60, 49, 69, 58, 64, 72, 68, 44, 69, 56, 58, 54, 78, 51, 115, 46, 56, 79, 93, 45, 69, 52, 70, 103, 91, 76, 56, 32, 63, 53, 50, 101, 96, 64, 41, 43, 80, 66, 70, 77, 56, 46, 70, 87, 68, 41, 48, 71, 63, 52, 69, 58, 51, 70, 77, 64, 82, 67, 55, 92, 57, 98, 87, 69, 63, 59, 72, 64, 57, 75, 66, 47, 64, 59, 60, 54, 60, 71, 77, 70, 71, 69, 66, 86, 69, 59, 53, 72, 84, 49, 54, 73, 71, 44, 62, 55, 77, 66, 62, 64, 69, 79, 67, 53, 53, 67, 56, 85, 62, 75, 75, 45, 65, 66, 45, 53, 51, 70, 61, 60, 59, 58, 68, 39, 59, 50, 80, 71, 88, 56, 106, 119, 77, 48, 78, 55, 76, 75, 60, 50, 52, 59, 76, 57, 64, 95, 36, 66, 68, 58, 67, 58, 76, 72, 61, 71, 78, 59, 56, 71, 64, 51, 39, 71, 72, 64, 68, 52, 42, 62, 63, 61, 63, 55, 76, 59, 73, 54, 66, 61, 71, 72, 82, 56, 62, 64, 73, 66, 55, 95, 61, 81, 65, 62, 77, 68, 54, 94, 60, 87, 59, 60, 58, 67, 70, 56, 62, 53, 64, 80, 80, 83, 90, 57, 61, 80, 58, 77, 82, 59, 51, 59, 59, 62, 77, 47, 79, 72, 59, 73, 40, 68, 61, 71, 67, 69, 47, 71, 98, 57, 69, 58, 65, 78, 74, 55, 67, 77, 75, 53, 65, 62, 76, 78, 56, 53, 53, 74, 77, 55, 69, 65, 60, 41, 54, 57, 80, 82, 81, 83, 62, 56, 88, 65, 56, 68, 67, 58, 66, 75, 42, 46, 83, 68, 71, 38, 62, 79, 63, 47, 64, 50, 65, 84, 66, 50, 42, 55, 60, 73, 72, 64, 52, 70, 86, 53, 53, 73, 64, 68, 74, 49, 66, 54, 73, 65, 89, 52, 58, 84, 59, 45, 81, 69, 72, 67, 56, 59, 79, 61, 63, 88, 78, 73, 62, 78, 52, 65, 61, 99, 65, 91, 66, 71, 78, 76, 49, 75, 60, 52, 60, 62, 66, 45, 64, 121, 66, 84, 68, 63, 77, 68, 53, 87, 63, 57, 49, 64, 50, 58, 71, 68, 49, 64, 75, 73, 69, 75, 52, 76, 58, 62, 43, 73, 73, 76, 56, 53, 78, 80, 66, 59, 85, 67, 45, 76, 57, 86, 77, 72, 81, 56, 80, 56, 68, 47, 57, 61, 44, 80, 65, 81, 68, 56, 51, 63, 70, 107, 56, 76, 76, 64, 68, 41, 65, 39, 69, 42, 60, 67, 100, 77, 107, 60, 75, 58, 79, 87, 80, 73, 76, 49, 54, 66, 55, 53, 67, 66, 61, 48, 62, 87, 92, 77, 83, 68, 51, 90, 67, 71, 58, 76, 61, 53, 84, 65, 68, 73, 49, 82, 59, 92, 89, 62, 77, 71, 67, 66, 68, 60, 49, 33, 70, 48, 66, 63, 63, 52, 63, 61, 85, 66, 68, 77, 80, 53, 55, 70, 50, 70, 166, 54, 62, 65, 60, 60, 55, 92, 66, 62, 59, 85, 84, 83, 85, 71, 77, 40, 90, 58, 72, 49, 56, 72, 69, 60, 61, 64, 78, 72, 93, 79, 76, 53, 66, 79, 51, 66, 117, 56, 63, 76, 95, 61, 76, 87, 86, 57, 63, 60, 66, 64, 67, 62, 79, 44, 56, 75, 79, 35, 72, 59, 69, 57, 70, 95, 65, 67, 50, 75, 59, 54, 70, 54, 86, 78, 75, 54, 53, 46, 63, 76, 79, 60, 70, 60, 47, 77, 47, 78, 73, 69, 66, 59, 59, 63, 64, 68, 69, 74, 76, 59, 54, 46, 95, 91, 37, 85, 65, 75, 62, 76, 74, 74, 71, 54, 74, 79, 65, 54, 78, 67, 50, 45, 87, 81, 96, 110, 64, 76, 68, 90, 63, 38, 70, 52, 85, 84, 60, 57, 53, 69, 63, 91, 48, 68, 61, 50, 65, 69, 58, 75, 69, 66, 56, 58, 60, 52, 78, 72, 48, 68, 85, 50, 60, 69, 66, 63, 43, 67, 67, 102, 56, 67, 53, 56, 58, 61, 74, 59, 67, 72, 58, 78, 56, 61, 70, 71, 76, 64, 49, 43, 65, 59, 57, 67, 67, 59, 61, 56, 61, 70, 46, 79, 65, 85, 70, 57, 57, 71, 72, 70, 61, 53, 72, 81, 46, 89, 58, 53, 61, 69, 43, 48, 60, 90, 66, 46, 55, 57, 74, 61, 71, 63, 76, 68, 57, 82, 86, 75, 70, 54, 82, 74, 69, 49, 79, 60, 75, 63, 64, 60, 73, 81, 69, 65, 70, 60, 49, 44, 66, 85, 65, 67, 67, 77, 72, 54, 57, 71, 81, 54, 85, 84, 59, 68, 78, 49, 69, 112, 71, 43, 66, 62, 74, 63, 58, 80, 110, 66, 71, 66, 71, 59, 60, 55, 67, 58, 75, 39, 66, 87, 63, 125, 59, 70, 63, 67, 52, 81, 62, 61, 58, 36, 70, 55, 55, 72, 83, 65, 73, 76, 73, 51, 46, 62, 63, 70, 68, 68, 59, 67, 73, 89, 95, 112, 72, 71, 54, 47, 93, 67, 36, 87, 59, 89, 51, 58, 75, 37, 66, 47, 56, 50, 69, 63, 67, 60, 62, 50, 57, 66, 59, 70, 61, 62, 61, 52, 54, 103, 43, 100, 67, 55, 102, 67, 51, 64, 61, 68, 73, 73, 78, 53, 57, 54, 69, 47, 50, 115, 44, 56, 98, 59, 77, 63, 56, 61, 45, 56, 69, 61, 113, 63, 71, 55, 61, 71, 41, 60, 95, 79, 57, 44, 71, 59, 55, 74, 60, 80, 45, 40, 86, 72, 77, 91, 96, 76, 69, 45, 100, 49, 63, 55, 90, 44, 67, 74, 76, 67, 71, 47, 62, 63, 73, 97, 51, 77, 53, 68, 76, 58, 72, 65, 52, 90, 66, 57, 61, 64, 61, 67, 62, 63, 61, 51, 54, 54, 59, 88, 50, 64, 74, 67, 67, 63, 62, 102, 45, 64, 64, 47, 57, 59, 72, 63, 99, 71, 53, 64, 68, 70, 57, 61, 104, 60, 54, 77, 67, 56, 70, 56, 62, 92, 57, 71, 58, 49, 88, 63, 56, 75, 87, 85, 54, 63, 68, 75, 70, 76, 70, 52, 80, 89, 57, 57, 99, 62, 71, 43, 69, 70, 81, 64, 93, 123, 53, 76, 81, 57, 64, 57, 58, 120, 72, 70, 82, 72, 65, 52, 65, 70, 66, 58, 104, 55, 68, 58, 64, 59, 72, 68, 46, 49, 58, 76, 51, 50, 52, 58, 72, 63, 80, 79, 66, 69, 97, 55, 45, 62, 83, 61, 58, 52, 58, 41, 84, 101, 63, 62, 73, 73, 49, 63, 74, 65, 61, 87, 48, 64, 64, 70, 86, 72, 56, 61, 40, 60, 52, 58, 78, 61, 59, 70, 68, 79, 57, 52, 51, 61, 68, 72, 66, 74, 48, 75, 72, 58, 79, 73, 60, 61, 75, 71, 62, 53, 63, 71, 69, 67, 67, 53, 76, 54, 45, 63, 48, 60, 69, 58, 68, 53, 66, 63, 60, 84, 47, 69, 85, 64, 56, 93, 62, 115, 55, 61, 70, 63, 61, 73, 64, 68, 43, 62, 72, 78, 80, 43, 72, 67, 73, 61, 51, 69, 36, 66, 175, 69, 64, 83, 61, 54, 74, 59, 63, 66, 59, 71, 73, 60, 74, 67, 83, 86, 65, 63, 64, 53, 69, 70, 56, 84, 79, 74, 80, 68, 57, 54, 51, 47, 75, 54, 80, 60, 62, 61, 55, 71, 54, 51, 48, 64, 71, 62, 68, 58, 82, 46, 60, 57, 73, 65, 62, 62, 77, 84, 49, 55, 74, 57, 35, 118, 58, 70, 86, 71, 74, 65, 60, 66, 80, 55, 50, 74, 78, 102, 58, 62, 79, 60, 65, 92, 60, 88, 61, 74, 65, 75, 61, 59, 92, 82, 63, 70, 64, 76, 65, 52, 70, 105, 58, 63, 54, 42, 86, 48, 67, 61, 74, 56, 45, 52, 94, 79, 57, 57, 67, 54, 59, 63, 59, 87, 61, 69, 65, 70, 88, 39, 50, 62, 68, 70, 69, 52, 50, 49, 87, 73, 73, 56, 73, 57, 65, 80, 46, 66, 53, 61, 75, 60, 57, 44, 56, 35, 75, 68, 48, 75, 67, 67, 52, 72, 65, 64, 67, 85, 64, 65, 64, 55, 56, 65, 68, 67, 55, 63, 75, 48, 49, 94, 43, 68, 68, 60, 48, 76, 79, 71, 52, 57, 75, 50, 58, 72, 60, 58, 62, 61, 59, 49, 58, 49, 78, 60, 62, 69, 70, 65, 38, 67, 74, 85, 56, 75, 52, 68, 72, 63, 74, 47, 72, 86, 89, 52, 39, 57, 73, 66, 74, 40, 60, 64, 67, 68, 69, 54, 69, 69, 67, 94, 63, 81, 77, 66, 65, 89, 60, 73, 71, 74, 72, 85, 94, 70, 70, 51, 53, 62, 68, 56, 48, 69, 52, 63, 55, 62, 64, 56, 48, 61, 66, 62, 46, 73, 87, 58, 51, 75, 77, 71, 60, 53, 67, 58, 48, 43, 54, 75, 42, 70, 93, 46, 49, 69, 66, 87, 72, 68, 81, 70, 81, 71, 64, 59, 59, 52, 60, 54, 54, 64, 62, 65, 70, 72, 59, 70, 60, 83, 47, 68, 75, 57, 66, 73, 72, 67, 65, 51, 65, 24, 72, 77, 46, 96, 59, 61, 84, 60, 88, 49, 63, 60, 82, 58, 69, 74, 63, 57, 75, 83, 64, 59, 74, 59, 84, 64, 84, 56, 66, 55, 41, 88, 75, 106, 56, 61, 67, 79, 61, 67, 56, 36, 52, 63, 68, 64, 61, 34, 97, 54, 58, 55, 61, 84, 44, 88, 51, 67, 57, 66, 64, 60, 82, 81, 63, 44, 78, 68, 115, 70, 39, 58, 50, 72, 72, 95, 50, 71, 44, 58, 83, 69, 56, 65, 77, 59, 59, 80, 60, 105, 55, 59, 78, 59, 57, 69, 81, 44, 65, 76, 71, 76, 121, 60, 69, 63, 83, 79, 56, 76, 58, 69, 42, 55, 62, 68, 53, 64, 58, 82, 55, 44, 77, 68, 70, 54, 85, 75, 76, 110, 66, 78, 80, 52, 60, 54, 60, 63, 74, 85, 84, 111, 73, 58, 64, 42, 69, 79, 72, 111, 58, 75, 83, 66, 60, 62, 69, 71, 55, 39, 65, 69, 69, 62, 45, 58, 71, 90, 67, 85, 88, 69, 61, 70, 48, 85, 68, 51, 122, 51, 87, 62, 65, 81, 53, 59, 51, 54, 46, 63, 58, 53, 53, 67, 72, 81, 71, 42, 50, 56, 82, 80, 86, 67, 70, 59, 54, 66, 71, 75, 69, 63, 71, 60, 68, 58, 62, 49, 64, 70, 80, 68, 75, 57, 60, 68, 58, 107, 67, 99, 62, 50, 69, 62, 98, 66, 85, 75, 76, 63, 68, 52, 65, 81, 55, 59, 84, 74, 74, 92, 65, 67, 69, 56, 78, 55, 64, 60, 89, 57, 45, 62, 63, 79, 66, 52, 60, 65, 53, 69, 54, 61, 69, 56, 73, 75, 52, 61, 47, 82, 59, 76, 51, 59, 56, 61, 75, 72, 74, 44, 63, 60, 66, 78, 68, 71, 42, 72, 64, 66, 77, 58, 61, 70, 55, 88, 46, 69, 60, 62, 60, 55, 54, 64, 91, 56, 81, 63, 54, 72, 55, 57, 66, 73, 90, 65, 54, 65, 67, 45, 67, 33, 74, 99, 63, 51, 74, 58, 71, 74, 64, 63, 44, 118, 57, 70, 66, 75, 79, 84, 75, 51, 91, 85, 59, 125, 82, 86, 70, 50, 61, 82, 63, 61, 72, 59, 64, 88, 66, 68, 56, 101, 53, 75, 58, 69, 64, 57, 66, 48, 63, 73, 49, 68, 83, 84, 58, 66, 56, 70, 59, 52, 114, 61, 56, 58, 67, 49, 76, 71, 75, 72, 60, 73, 76, 71, 71, 64, 68, 55, 73, 58, 89, 53, 61, 73, 48, 81, 61, 79, 43, 60, 58, 57, 60, 58, 81, 60, 91, 71, 38, 79, 55, 59, 61, 62, 79, 63, 132, 92, 58, 106, 76, 59, 79, 71, 61, 66, 52, 58, 46, 78, 82, 55, 47, 59, 59, 58, 55, 50, 54, 74, 75, 61, 70, 62, 65, 58, 45, 85, 56, 75, 65, 75, 53, 64, 56, 80, 73, 44, 65, 52, 70, 62, 51, 86, 65, 59, 85, 84, 71, 57, 49, 57, 74, 78, 67, 65, 63, 63, 76, 107, 58, 47, 72, 57, 65, 57, 85, 94, 60, 65, 61, 69, 58, 44, 58, 90, 65, 63, 79, 51, 70, 83, 48, 52, 86, 62, 66, 68, 82, 54, 57, 59, 66, 60, 96, 69, 46, 69, 71, 41, 65, 108, 80, 79, 45, 64, 47, 53, 47, 85, 50, 60, 67, 58, 56, 60, 64, 70, 75, 61, 67, 55, 50, 60, 49, 53, 53, 67, 53, 59, 71, 96, 60, 64, 65, 66, 68, 48, 103, 64, 76, 87, 88, 53, 58, 50, 45, 61, 55, 78, 75, 88, 45, 52, 76, 57, 64, 77, 73, 43, 80, 60, 52, 42, 52, 77, 65, 75, 104, 88, 48, 57, 61, 51, 94, 52, 64, 53, 81, 66, 70, 83, 69, 43, 52, 95, 52, 66, 50, 71, 45, 64, 59, 64, 77, 66, 77, 48, 52, 81, 77, 59, 72, 105, 64, 56, 63, 55, 43, 81, 60, 68, 72, 64, 45, 84, 62, 58, 59, 55, 66, 60, 60, 82, 70, 55, 53, 74, 60, 61, 70, 70, 48, 66, 73, 49, 53, 71, 58, 60, 122, 83, 57, 62, 58, 47, 67, 75, 61, 70, 62, 58, 51, 56, 48, 81, 66, 60, 78, 60, 51, 63, 71, 72, 74, 114, 72, 77, 81, 67, 55, 55, 52, 50, 61, 70, 90, 78, 66, 70, 74, 73, 46, 64, 74, 71, 76, 70, 66, 74, 55, 69, 47, 67, 69, 59, 61, 64, 78, 74, 68, 105, 72, 58, 70, 55, 59, 66, 57, 64, 48, 62, 86, 120, 66, 68, 65, 69, 74, 84, 67, 58, 77, 57, 67, 56, 70, 61, 75, 62, 60, 49, 71, 45, 71, 71, 61, 58, 43, 102, 67, 68, 51, 119, 64, 72, 77, 54, 68, 64, 74, 65, 71, 73, 55, 74, 65, 60, 68, 84, 135, 68, 78, 61, 72, 74, 54, 65, 50, 57, 67, 52, 65, 54, 55, 61, 73, 59, 59, 62, 89, 45, 59, 60, 69, 79, 66, 72, 63, 48, 76, 66, 82, 64, 91, 52, 65, 52, 69, 56, 57, 62, 60, 57, 64, 68, 65, 46, 85, 73, 58, 42, 66, 70, 56, 35, 52, 65, 63, 85, 57, 62, 52, 97, 65, 47, 68, 51, 67, 69, 90, 76, 52, 64, 78, 63, 74, 56, 63, 79, 59, 69, 61, 80, 100, 86, 79, 73, 50, 47, 73, 59, 49, 73, 73, 72, 55, 69, 75, 82, 63, 58, 67, 60, 54, 63, 69, 67, 48, 62, 81, 53, 53, 64, 50, 67, 78, 51, 62, 58, 62, 54, 79, 65, 49, 52, 64, 54, 60, 57, 79, 72, 83, 59, 49, 67, 51, 109, 47, 63, 70, 54, 63, 62, 51, 67, 51, 71, 66, 80, 76, 63, 112, 103, 62, 83, 68, 74, 101, 72, 63, 64, 74, 66, 49, 54, 99, 55, 78, 126, 56, 64, 99, 88, 78, 65, 56, 89, 45, 65, 56, 42, 66, 69, 55, 55, 58, 91, 72, 84, 85, 60, 57, 113, 69, 71, 84, 67, 62, 64, 52, 61, 52, 63, 91, 56, 57, 57, 65, 66, 56, 50, 57, 74, 66, 45, 69, 54, 66, 46, 63, 60, 64, 63, 64, 52, 70, 58, 61, 63, 83, 69, 55, 65, 60, 56, 48, 75, 62, 83, 72, 87, 66, 50, 119, 75, 63, 62, 45, 65, 56, 64, 61, 64, 63, 61, 51, 56, 55, 64, 67, 48, 63, 83, 93, 85, 86, 75, 59, 67, 93, 61, 77, 59, 66, 58, 54, 50, 63, 75, 41, 64, 58, 76, 60, 93, 52, 84, 92, 57, 63, 55, 65, 63, 70, 85, 80, 75, 60, 58, 57, 85, 55, 59, 66, 51, 49, 60, 61, 65, 57, 48, 86, 60, 57, 73, 86, 64, 65, 49, 57, 81, 79, 64, 75, 43, 85, 59, 71, 71, 76, 50, 65, 72, 65, 55, 51, 67, 78, 58, 65, 62, 58, 60, 83, 52, 56, 90, 53, 55, 60, 71, 64, 60, 74, 58, 69, 61, 67, 41, 82, 57, 68, 53, 63, 68, 58, 62, 48, 45, 60, 59, 71, 62, 83, 63, 72, 45, 53, 62, 50, 73, 66, 64, 101, 54, 58, 58, 57, 57, 62, 138, 61, 76, 55, 73, 64, 57, 49, 88, 57, 61, 63, 78, 47, 54, 51, 67, 62, 105, 40, 54, 56, 66, 58, 47, 73, 104, 57, 63, 66, 58, 44, 96, 72, 53, 81, 53, 49, 67, 80, 55, 92, 88, 53, 68, 49, 59, 55, 61, 70, 58, 72, 69, 65, 54, 48, 77, 73, 73, 53, 61, 65, 53, 72, 71, 93, 62, 47, 71, 68, 58, 72, 54, 58, 99, 57, 63, 74, 60, 50, 66, 66, 81, 77, 99, 75, 52, 63, 76, 105, 94, 71, 55, 55, 85, 69, 54, 111, 63, 54, 70, 65, 84, 75, 70, 51, 50, 71, 59, 47, 73, 53, 87, 90, 49, 78, 65, 97, 68, 56, 71, 86, 97, 52, 68, 64, 37, 69, 37, 70, 72, 61, 77, 38, 63, 57, 57, 77, 57, 66, 78, 117, 82, 73, 48, 80, 68, 75, 65, 80, 64, 55, 53, 88, 65, 69, 70, 61, 42, 52, 56, 60, 56, 80, 68, 153, 66, 90, 35, 88, 65, 65, 64, 66, 74, 52, 83, 54, 70, 62, 76, 61, 64, 101, 58, 75, 75, 65, 72, 88, 63, 70, 67, 65, 74, 46, 78, 65, 69, 76, 70, 100, 71, 60, 73, 53, 82, 62, 57, 57, 64, 82, 75, 53, 64, 53, 70, 54, 67, 63, 50, 51, 59, 82, 76, 85, 54, 73, 68, 61, 79, 55, 64, 73, 81, 82, 59, 45, 62, 54, 71, 80, 40, 62, 48, 92, 81, 49, 47, 60, 72, 47, 60, 57, 40, 68, 52, 71, 74, 77, 107, 72, 62, 73, 64, 59, 63, 47, 61, 52, 65, 51, 60, 70, 48, 64, 81, 105, 79, 75, 53, 80, 53, 95, 96, 71, 67, 61, 65, 37, 92, 69, 67, 66, 42, 77, 53, 70, 74, 75, 78, 59, 69, 71, 63, 43, 75, 72, 61, 91, 66, 79, 48, 61, 57, 67, 80, 52, 71, 70, 90, 56, 71, 112, 51, 81, 82, 66, 60, 79, 83, 58, 78, 69, 55, 57, 61, 54, 54, 54, 69, 53, 58, 80, 65, 52, 57, 67, 56, 83, 72, 35, 56, 51, 74, 53, 73, 65, 62, 74, 55, 62, 73, 79, 69, 63, 85, 51, 52, 65, 70, 53, 80, 60, 68, 60, 68, 56, 50, 48, 70, 59, 62, 52, 86, 63, 41, 41, 55, 90, 56, 71, 62, 84, 55, 65, 85, 77, 56, 56, 64, 61, 64, 73, 62, 54, 87, 66, 88, 77, 97, 57, 66, 56, 67, 50, 77, 39, 52, 54, 51, 48, 73, 103, 70, 65, 77, 54, 63, 55, 71, 66, 50, 74, 65, 70, 66, 50, 62, 80, 49, 48, 73, 62, 61, 63, 62, 113, 56, 58, 78, 80, 48, 62, 76, 61, 43, 60, 72, 72, 56, 65, 64, 69, 51, 76, 71, 77, 42, 58, 69, 68, 85, 83, 81, 63, 65, 91, 59, 85, 90, 80, 63, 52, 65, 90, 83, 91, 100, 41, 75, 69, 68, 57, 69, 60, 42, 73, 71, 89, 63, 44, 67, 58, 67, 63, 79, 71, 62, 63, 47, 31, 46, 49, 71, 68, 64, 68, 62, 43, 44, 62, 56, 75, 87, 55, 56, 71, 62, 55, 63, 56, 80, 59, 56, 59, 70, 45, 56, 92, 71, 63, 121, 43, 70, 77, 49, 71, 96, 58, 53, 87, 59, 71, 76, 61, 47, 71, 54, 73, 80, 84, 60, 75, 93, 59, 54, 62, 77, 67, 52, 53, 74, 70, 60, 53, 74, 70, 85, 90, 43, 64, 49, 95, 48, 88, 40, 66, 66, 63, 66, 85, 58, 75, 69, 63, 62, 55, 85, 55, 70, 71, 39, 55, 52, 86, 78, 78, 47, 49, 88, 52, 54, 79, 63, 61, 68, 69, 69, 56, 76, 70, 60, 66, 55, 68, 74, 58, 78, 68, 60, 75, 68, 83, 58, 57, 78, 69, 56, 86, 76, 73, 54, 71, 74, 69, 61, 75, 61, 69, 55, 60, 70, 80, 77, 75, 63, 67, 68, 70, 65, 57, 67, 75, 65, 61, 44, 61, 63, 86, 46, 68, 70, 52, 64, 50, 35, 58, 78, 59, 76, 68, 68, 59, 77, 54, 67, 76, 64, 69, 52, 51, 77, 55, 96, 76, 55, 47, 56, 56, 66, 49, 75, 87, 73, 50, 56, 51, 60, 51, 45, 54, 64, 68, 74, 68, 68, 47, 56, 65, 78, 65, 83, 72, 63, 75, 88, 49, 80, 49, 66, 44, 75, 83, 84, 57, 53, 73, 76, 90, 67, 66, 78, 81, 79, 61, 93, 74, 87, 70, 51, 60, 72, 58, 84, 54, 45, 62, 65, 87, 65, 59, 67, 60, 40, 44, 68, 51, 67, 60, 70, 90, 70, 22, 76, 56, 79, 59, 68, 47, 87, 75, 61, 49, 45, 55, 74, 36, 47, 70, 74, 87, 70, 76, 75, 60, 73, 84, 41, 83, 85, 102, 53, 77, 75, 48, 60, 66, 54, 50, 74, 69, 61, 89, 61, 59, 37, 74, 65, 62, 73, 77, 43, 64, 54, 67, 72, 72, 51, 44, 60, 68, 71, 79, 69, 52, 64, 55, 74, 59, 58, 36, 71, 55, 91, 52, 86, 96, 60, 72, 61, 77, 42, 67, 53, 45, 57, 69, 44, 72, 52, 54, 45, 75, 41, 61, 61, 62, 68, 67, 51, 71, 66, 75, 55, 58, 62, 46, 82, 57, 70, 52, 56, 58, 89, 84, 63, 61, 58, 45, 59, 98, 90, 72, 64, 65, 68, 69, 78, 84, 65, 86, 69, 61, 54, 56, 69, 73, 48, 62, 82, 87, 71, 67, 51, 73, 66, 65, 55, 63, 63, 90, 66, 73, 108, 59, 56, 70, 51, 90, 76, 66, 71, 74, 55, 62, 86, 26, 63, 61, 72, 38, 53, 66, 73, 82, 57, 73, 79, 43, 45, 81, 104, 78, 81, 54, 59, 72, 76, 75, 63, 31, 62, 72, 80, 60, 96, 77, 42, 69, 78, 82, 78, 50, 69, 62, 56, 63, 47, 59, 63, 77, 61, 74, 70, 54, 95, 63, 43, 55, 81, 96, 58, 54, 50, 79, 63, 85, 69, 34, 81, 44, 49, 55, 53, 78, 64, 52, 65, 55, 47, 70, 74, 80, 66, 71, 55, 71, 72, 68, 63, 60, 58, 59, 80, 94, 75, 48, 56, 46, 73, 49, 56, 52, 65, 60, 62, 75, 48, 62, 68, 64, 60, 57, 41, 59, 49, 60, 57, 61, 57, 75, 70, 61, 72, 75, 64, 73, 68, 71, 62, 71, 72, 48, 55, 68, 49, 83, 76, 75, 50, 89, 62, 60, 47, 43, 79, 59, 65, 71, 58, 88, 78, 84, 51, 57, 54, 62, 46, 51, 69, 54, 57, 157, 53, 79, 40, 70, 66, 47, 71, 71, 72, 67, 56, 58, 50, 62, 52, 53, 68, 69, 67, 85, 63, 64, 73, 62, 54, 61, 72, 106, 73, 61, 138, 81, 43, 48, 72, 80, 68, 70, 62, 60, 67, 57, 58, 52, 70, 61, 71, 66, 81, 61, 98, 38, 35, 63, 78, 77, 61, 71, 62, 62, 65, 54, 54, 65, 103, 79, 70, 72, 55, 104, 99, 49, 54, 61, 68, 69, 72, 63, 61, 62, 61, 55, 58, 40, 75, 61, 53, 73, 48, 65, 59, 69, 60, 68, 71, 71, 54, 71, 68, 59, 72, 43, 52, 63, 61, 55, 52, 60, 86, 62, 73, 90, 68, 99, 89, 56, 62, 65, 53, 55, 66, 68, 72, 62, 47, 99, 46, 83, 83, 69, 76, 59, 51, 61, 70, 92, 55, 76, 63, 62, 104, 61, 61, 66, 62, 51, 89, 36, 61, 93, 61, 65, 58, 59, 79, 72, 78, 65, 56, 80, 62, 55, 75, 84, 70, 54, 54, 74, 57, 50, 57, 72, 97, 48, 84, 77, 73, 83, 77, 76, 92, 73, 70, 47, 108, 66, 57, 55, 100, 67, 72, 67, 66, 52, 72, 58, 81, 56, 64, 90, 68, 82, 61, 84, 52, 55, 65, 66, 49, 50, 55, 130, 57, 80, 71, 70, 56, 73, 63, 48, 56, 51, 72, 57, 59, 73, 65, 61, 64, 72, 50, 74, 48, 55, 87, 64, 82, 62, 70, 59, 88, 78, 78, 92, 71, 102, 80, 91, 73, 60, 71, 40, 62, 72, 68, 73, 60, 72, 65, 75, 82, 66, 54, 84, 75, 75, 52, 47, 65, 45, 64, 68, 51, 60, 64, 96, 52, 50, 61, 86, 80, 57, 67, 47, 69, 59, 68, 77, 73, 62, 72, 62, 67, 68, 68, 63, 55, 64, 70, 68, 61, 63, 60, 68, 55, 63, 61, 56, 82, 74, 69, 52, 92, 60, 60, 75, 94, 72, 48, 53, 55, 70, 65, 62, 73, 58, 63, 57, 61, 61, 65, 48, 56, 54, 87, 19, 85, 50, 64, 59, 70, 66, 62, 47, 83, 63, 44, 75, 64, 54, 65, 68, 63, 58, 50, 64, 64, 62, 115, 56, 81, 56, 55, 52, 62, 64, 62, 32, 55, 65, 59, 61, 50, 79, 76, 71, 84, 54, 65, 59, 62, 70, 58, 57, 57, 80, 57, 75, 73, 73, 53, 50, 63, 73, 115, 56, 57, 55, 62, 54, 79, 48, 53, 99, 64, 65, 77, 47, 65, 53, 33, 63, 80, 55, 54, 65, 67, 66, 60, 76, 76, 40, 78, 62, 65, 112, 81, 79, 69, 43, 68, 77, 67, 67, 91, 45, 67, 78, 67, 46, 62, 59, 97, 43, 57, 87, 57, 55, 74, 51, 86, 59, 56, 43, 61, 98, 57, 61, 70, 74, 62, 59, 62, 61, 63, 61, 66, 74, 65, 86, 46, 49, 54, 72, 66, 74, 70, 53, 70, 59, 50, 57, 55, 35, 67, 82, 70, 69, 82, 48, 65, 59, 70, 63, 64, 78, 84, 65, 76, 56, 58, 70, 60, 59, 45, 62, 79, 71, 44, 72, 60, 56, 69, 55, 75, 76, 74, 63, 47, 75, 72, 88, 64, 77, 91, 63, 55, 77, 73, 77, 76, 55, 70, 64, 54, 78, 69, 52, 72, 48, 52, 61, 41, 49, 63, 65, 54, 57, 51, 73, 48, 49, 62, 66, 73, 94, 60, 56, 48, 77, 74, 58, 110, 48, 106, 66, 59, 67, 89, 55, 60, 60, 55, 64, 63, 64, 71, 77, 61, 72, 47, 61, 107, 67, 71, 82, 72, 47, 58, 66, 96, 78, 66, 52, 91, 74, 65, 70, 56, 85, 65, 77, 50, 52, 54, 46, 60, 66, 58, 65, 80, 63, 74, 66, 62, 71, 61, 49, 58, 72, 59, 50, 85, 51, 70, 54, 79, 72, 77, 53, 91, 56, 67, 71, 73, 53, 65, 62, 62, 40, 63, 84, 47, 83, 59, 70, 66, 66, 44, 58, 64, 64, 66, 50, 65, 54, 73, 51, 49, 32, 63, 66, 81, 109, 78, 60, 81, 71, 61, 58, 91, 51, 63, 70, 96, 62, 56, 63, 53, 37, 49, 73, 65, 60, 49, 60, 83, 112, 67, 48, 55, 43, 59, 64, 60, 51, 49, 67, 58, 60, 83, 55, 88, 67, 61, 66, 82, 63, 72, 80, 47, 52, 64, 59, 79, 49, 61, 64, 75, 91, 69, 60, 77, 66, 55, 91, 70, 51, 62, 49, 70, 77, 90, 72, 64, 69, 65, 55, 84, 59, 67, 66, 45, 82, 80, 67, 50, 44, 70, 59, 56, 76, 63, 50, 64, 80, 68, 89, 73, 56, 80, 53, 78, 71, 80, 63, 52, 83, 75, 61, 50, 61, 57, 72, 73, 63, 59, 66, 60, 62, 88, 66, 59, 78, 75, 66, 60, 66, 52, 64, 93, 66, 107, 64, 63, 54, 58, 65, 74, 46, 63, 66, 56, 63, 91, 72, 55, 62, 48, 48, 26, 82, 67, 62, 53, 63, 82, 63, 69, 47, 49, 79, 89, 83, 59, 64, 93, 70, 46, 58, 88, 168, 51, 63, 80, 74, 81, 73, 57, 63, 49, 64, 73, 67, 70, 56, 56, 56, 73, 81, 57, 72, 50, 81, 73, 57, 66, 44, 67, 72, 57, 57, 66, 98, 69, 51, 61, 81, 48, 61, 66, 82, 70, 57, 71, 77, 77, 61, 58, 68, 54, 56, 59, 74, 64, 67, 69, 60, 85, 52, 63, 69, 57, 53, 77, 63, 65, 72, 57, 72, 63, 73, 64, 62, 71, 82, 49, 84, 74, 61, 64, 63, 63, 63, 71, 54, 67, 78, 73, 61, 79, 62, 71, 58, 124, 85, 58, 40, 81, 70, 78, 72, 74, 63, 53, 61, 66, 58, 83, 64, 53, 54, 80, 67, 71, 49, 74, 79, 90, 63, 55, 88, 76, 62, 85, 64, 45, 60, 48, 73, 53, 85, 86, 47, 77, 51, 57, 58, 59, 63, 79, 55, 64, 66, 103, 62, 60, 42, 60, 59, 67, 59, 54, 63, 50, 59, 59, 55, 54, 50, 74, 62, 66, 78, 51, 79, 67, 46, 46, 70, 49, 56, 67, 101, 59, 66, 75, 63, 100, 66, 59, 45, 74, 60, 82, 67, 83, 88, 75, 70, 46, 104, 70, 64, 82, 62, 96, 60, 62, 102, 113, 65, 77, 69, 60, 109, 81, 53, 66, 62, 76, 53, 60, 52, 59, 50, 57, 70, 73, 53, 73, 69, 66, 67, 57, 86, 82, 52, 65, 75, 66, 94, 80, 68, 66, 72, 73, 67, 50, 69, 47, 62, 62, 66, 63, 70, 51, 61, 59, 103, 69, 48, 58, 62, 101, 55, 57, 64, 64, 61, 63, 59, 56, 53, 93, 70, 63, 77, 58, 69, 52, 61, 56, 59, 110, 56, 56, 72, 75, 58, 85, 60, 84, 50, 58, 76, 63, 92, 65, 68, 60, 56, 81, 58, 66, 51, 56, 52, 72, 63, 52, 61, 63, 68, 47, 58, 75, 58, 75, 71, 48, 81, 82, 62, 79, 75, 65, 72, 63, 54, 67, 70, 50, 67, 60, 61, 87, 58, 69, 77, 70, 65, 83, 77, 72, 80, 69, 63, 58, 63, 52, 67, 60, 83, 68, 48, 45, 51, 103, 67, 63, 76, 91, 64, 66, 50, 80, 77, 57, 57, 59, 64, 75, 49, 82, 69, 51, 57, 73, 64, 92, 73, 76, 62, 69, 60, 83, 50, 88, 63, 89, 54, 72, 62, 71, 45, 74, 60, 65, 75, 62, 74, 69, 59, 66, 64, 81, 73, 82, 68, 44, 67, 57, 77, 67, 60, 51, 47, 55, 66, 54, 65, 76, 44, 62, 78, 57, 67, 65, 46, 54, 69, 68, 54, 58, 70, 65, 54, 83, 72, 54, 69, 131, 64, 118, 69, 64, 62, 78, 55, 55, 63, 66, 64, 59, 58, 62, 57, 58, 74, 55, 70, 63, 76, 54, 65, 54, 52, 52, 52, 48, 72, 86, 54, 66, 63, 57, 65, 73, 70, 85, 63, 70, 48, 55, 58, 58, 66, 42, 111, 54, 59, 40, 73, 69, 57, 64, 59, 70, 70, 62, 66, 65, 52, 83, 77, 61, 58, 55, 57, 65, 55, 71, 66, 64, 41, 59, 63, 86, 58, 60, 65, 68, 95, 79, 73, 82, 78, 62, 71, 58, 57, 66, 72, 61, 82, 46, 61, 58, 39, 51, 66, 61, 43, 91, 62, 55, 52, 73, 54, 67, 87, 73, 55, 75, 47, 59, 59, 77, 73, 78, 60, 75, 63, 55, 58, 75, 77, 56, 79, 72, 57, 69, 57, 100, 62, 50, 100, 57, 62, 54, 62, 61, 51, 63, 57, 71, 84, 59, 53, 100, 62, 75, 82, 77, 55, 59, 54, 73, 76, 57, 62, 62, 63, 65, 40, 74, 61, 87, 59, 62, 78, 64, 65, 78, 68, 75, 59, 56, 51, 68, 61, 54, 55, 66, 108, 63, 71, 57, 54, 57, 71, 69, 73, 65, 72, 52, 45, 69, 77, 93, 70, 41, 64, 59, 61, 62, 52, 68, 42, 70, 52, 72, 100, 55, 81, 80, 44, 58, 55, 82, 48, 55, 90, 55, 68, 82, 74, 65, 61, 60, 54, 80, 80, 91, 69, 57, 57, 41, 66, 65, 66, 63, 54, 73, 81, 58, 68, 61, 65, 88, 60, 57, 58, 62, 58, 64, 75, 44, 46, 61, 66, 58, 77, 61, 76, 61, 66, 59, 67, 62, 65, 61, 60, 55, 66, 55, 68, 47, 68, 74, 53, 73, 70, 53, 59, 62, 63, 62, 65, 55, 50, 95, 63, 99, 57, 61, 63, 77, 59, 76, 58, 54, 57, 63, 80, 55, 79, 57, 73, 40, 63, 59, 60, 76, 54, 92, 64, 55, 47, 106, 64, 55, 61, 108, 78, 117, 56, 45, 66, 68, 67, 67, 83, 63, 54, 71, 78, 67, 52, 68, 80, 50, 118, 86, 64, 80, 61, 71, 52, 54, 44, 58, 69, 64, 47, 64, 54, 73, 57, 53, 43, 70, 54, 68, 54, 55, 62, 55, 56, 68, 82, 85, 73, 67, 80, 68, 63, 53, 97, 58, 69, 53, 63, 64, 55, 68, 64, 65, 54, 73, 77, 63, 51, 77, 57, 75, 67, 67, 59, 57, 66, 67, 60, 60, 65, 51, 61, 53, 74, 61, 64, 81, 36, 65, 80, 86, 80, 65, 52, 59, 73, 60, 66, 75, 51, 51, 58, 56, 58, 62, 80, 62, 62, 56, 56, 74, 57, 55, 61, 61, 65, 66, 58, 74, 85, 59, 69, 62, 56, 64, 83, 82, 58, 67, 52, 64, 80, 52, 65, 74, 102, 53, 59, 99, 65, 54, 106, 75, 45, 55, 77, 60, 70, 71, 59, 80, 68, 52, 56, 71, 62, 70, 51, 71, 71, 56, 80, 74, 70, 55, 58, 57, 66, 64, 61, 76, 55, 82, 83, 64, 82, 69, 63, 82, 113, 68, 73, 57, 76, 61, 56, 65, 57, 62, 66, 66, 58, 71, 72, 73, 64, 58, 61, 51, 52, 53, 56, 65, 67, 56, 84, 62, 51, 68, 57, 70, 53, 64, 97, 62, 54, 91, 52, 77, 60, 86, 61, 59, 58, 44, 112, 62, 70, 72, 62, 71, 54, 64, 69, 70, 62, 64, 83, 62, 66, 75, 69, 66, 65, 55, 47, 84, 98, 66, 60, 59, 73, 71, 68, 52, 50, 49, 98, 49, 68, 92, 80, 52, 72, 52, 58, 50, 55, 65, 74, 69, 71, 69, 76, 60, 62, 102, 75, 74, 68, 67, 61, 68, 58, 51, 68, 88, 68, 70, 68, 64, 83, 68, 62, 77, 56, 64, 56, 86, 61, 58, 69, 71, 62, 95, 60, 100, 68, 72, 71, 38, 60, 80, 57, 72, 59, 67, 75, 63, 70, 59, 66, 69, 51, 81, 46, 68, 48, 48, 64, 59, 54, 47, 55, 60, 53, 64, 51, 61, 58, 73, 64, 69, 54, 73, 47, 55, 65, 65, 54, 59, 53, 61, 98, 75, 64, 52, 75, 49, 80, 62, 55, 57, 64, 65, 53, 68, 51, 73, 57, 87, 66, 53, 74, 64, 78, 45, 64, 64, 55, 60, 59, 61, 78, 71, 80, 55, 66, 50, 58, 59, 68, 63, 67, 77, 88, 50, 78, 76, 72, 69, 82, 50, 77, 56, 77, 73, 34, 51, 59, 70, 58, 75, 69, 74, 77, 63, 71, 79, 63, 95, 76, 61, 58, 104, 58, 69, 43, 61, 45, 70, 56, 84, 77, 62, 62, 74, 60, 47, 54, 60, 53, 73, 56, 53, 118, 60, 65, 33, 83, 68, 107, 62, 82, 58, 55, 77, 68, 99, 52, 67, 45, 90, 47, 104, 47, 57, 63, 59, 59, 79, 49, 65, 52, 60, 52, 62, 53, 83, 58, 70, 63, 79, 113, 82, 90, 82, 75, 93, 76, 60, 61, 96, 58, 55, 54, 101, 77, 47, 116, 70, 61, 74, 37, 50, 59, 89, 37, 98, 54, 49, 53, 71, 71, 70, 51, 55, 128, 64, 47, 73, 78, 80, 71, 77, 68, 74, 98, 79, 54, 47, 61, 54, 68, 49, 111, 68, 80, 53, 64, 94, 59, 74, 68, 61, 53, 90, 99, 50, 75, 65, 139, 52, 69, 75, 49, 46, 54, 50, 76, 83, 76, 44, 65, 48, 70, 62, 64, 38, 73, 86, 39, 64, 50, 49, 119, 46, 68, 62, 83, 67, 46, 58, 94, 72, 52, 76, 65, 49, 70, 73, 65, 76, 73, 55, 70, 81, 89, 108, 62, 52, 66, 56, 51, 63, 75, 56, 45, 54, 66, 67, 60, 61, 70, 50, 71, 55, 68, 58, 50, 58, 76, 68, 64, 50, 53, 60, 60, 102, 72, 68, 70, 56, 54, 78, 101, 43, 54, 56, 42, 50, 63, 53, 67, 73, 46, 76, 47, 61, 65, 72, 54, 109, 50, 73, 61, 84, 116, 66, 48, 126, 61, 53, 66, 91, 58, 86, 54, 41, 60, 65, 56, 50, 34, 73, 84, 71, 68, 86, 66, 53, 60, 66, 61, 57, 75, 63, 70, 66, 54, 49, 60, 72, 74, 66, 55, 107, 44, 100, 49, 54, 63, 44, 86, 41, 75, 79, 75, 60, 49, 31, 80, 65, 74, 63, 61, 69, 40, 62, 66, 60, 68, 52, 79, 73, 64, 76, 74, 60, 63, 56, 90, 96, 68, 63, 65, 53, 93, 67, 107, 81, 101, 65, 60, 69, 54, 50, 51, 69, 51, 72, 71, 74, 97, 57, 57, 77, 64, 54, 56, 88, 57, 72, 61, 53, 66, 77, 103, 68, 34, 47, 55, 81, 93, 106, 66, 62, 89, 68, 54, 69, 62, 43, 76, 55, 90, 46, 69, 114, 59, 69, 50, 57, 65, 64, 68, 78, 59, 91, 75, 109, 96, 87, 65, 60, 70, 48, 70, 66, 50, 75, 62, 59, 78, 80, 35, 53, 70, 85, 64, 62, 56, 77, 75, 53, 57, 80, 46, 71, 55, 57, 59, 73, 56, 59, 47, 49, 99, 60, 75, 86, 44, 78, 80, 92, 63, 72, 56, 62, 43, 67, 64, 54, 70, 54, 67, 45, 88, 59, 63, 59, 56, 39, 68, 81, 65, 47, 80, 37, 109, 73, 73, 69, 50, 84, 92, 60, 62, 89, 72, 68, 78, 46, 59, 52, 98, 73, 78, 60, 53, 65, 68, 39, 94, 61, 58, 45, 75, 59, 68, 77, 69, 54, 66, 64, 69, 74, 74, 53, 59, 88, 72, 84, 98, 75, 70, 129, 129, 40, 41, 75, 58, 55, 81, 71, 88, 69, 50, 68, 48, 66, 35, 44, 106, 82, 63, 58, 48, 62, 84, 78, 35, 49, 77, 76, 77, 56, 55, 68, 45, 39, 38, 66, 66, 84, 71, 66, 59, 60, 61, 68, 44, 30, 101, 83, 56, 85, 54, 53, 42, 64, 71, 59, 50, 57, 38, 61, 82, 69, 59, 65, 49, 69, 57, 63, 79, 89, 47, 47, 99, 76, 107, 96, 88, 89, 53, 46, 63, 64, 63, 53, 51, 68, 66, 38, 47, 39, 72, 54, 91, 105, 62, 111, 52, 66, 74, 57, 87, 52, 50, 46, 49, 52, 80, 84, 87, 64, 44, 57, 64, 61, 61, 66, 49, 99, 68, 85, 62, 44, 74, 66, 69, 84, 90, 43, 49, 39, 79, 58, 82, 47, 51, 72, 66, 72, 128, 43, 58, 63, 76, 71, 57, 46, 64, 79, 55, 77, 50, 63, 61, 147, 59, 50, 59, 69, 40, 55, 51, 57, 69, 105, 76, 51, 53, 64, 50, 47, 68, 63, 72, 69, 64, 42, 61, 47, 67, 69, 92, 50, 52, 29, 74, 40, 68, 68, 76, 57, 71, 50, 47, 74, 91, 69, 67, 65, 60, 58, 68, 72, 62, 59, 62, 51, 49, 80, 52, 86, 53, 61, 69, 61, 53, 50, 49, 68, 96, 90, 41, 72, 82, 56, 82, 82, 59, 104, 48, 55, 53, 91, 58, 38, 61, 52, 57, 50, 73, 62, 54, 81, 68, 64, 88, 79, 62, 46, 43, 72, 65, 66, 76, 56, 46, 60, 71, 72, 48, 52, 113, 66, 64, 57, 71, 68, 89, 43, 60, 99, 63, 72, 55, 45, 79, 62, 108, 77, 44, 85, 61, 47, 79, 67, 76, 56, 32, 42, 72, 71, 48, 69, 78, 74, 73, 67, 65, 55, 74, 70, 56, 71, 57, 80, 58, 67, 70, 27, 86, 53, 37, 54, 52, 29, 79, 35, 44, 69, 71, 58, 84, 90, 58, 66, 42, 41, 52, 57, 47, 45, 84, 61, 54, 64, 101, 76, 63, 60, 52, 47, 79, 80, 54, 62, 66, 56, 59, 60, 68, 53, 105, 56, 71, 75, 69, 54, 39, 66, 61, 66, 49, 48, 74, 82, 87, 60, 68, 95, 44, 69, 79, 45, 64, 107, 52, 92, 90, 66, 94, 62, 45, 84, 133, 63, 47, 61, 75, 53, 67, 55, 61, 84, 78, 72, 48, 50, 63, 70, 79, 52, 59, 65, 61, 55, 83, 62, 68, 77, 61, 68, 33, 91, 53, 72, 59, 68, 51, 67, 64, 70, 93, 82, 81, 66, 75, 72, 71, 54, 57, 69, 66, 93, 76, 59, 55, 56, 80, 54, 51, 64, 59, 132, 51, 102, 69, 80, 61, 51, 66, 54, 52, 47, 47, 78, 114, 59, 58, 48, 47, 55, 51, 54, 64, 68, 42, 50, 81, 71, 67, 61, 50, 66, 58, 60, 60, 68, 77, 70, 72, 58, 35, 68, 49, 71, 61, 53, 75, 53, 66, 73, 79, 51, 67, 29, 66, 77, 65, 70, 69, 63, 57, 96, 33, 47, 67, 59, 63, 75, 45, 68, 55, 85, 67, 60, 80, 54, 54, 87, 58, 74, 68, 65, 54, 51, 52, 54, 74, 52, 55, 70, 77, 57, 58, 65, 66, 96, 51, 48, 63, 58, 79, 55, 72, 61, 54, 99, 58, 62, 64, 42, 82, 59, 59, 68, 56, 62, 75, 74, 53, 70, 79, 70, 95, 59, 59, 132, 56, 71, 67, 44, 61, 35, 63, 74, 97, 86, 59, 56, 68, 49, 53, 72, 65, 70, 62, 76, 68, 62, 64, 40, 85, 59, 68, 76, 64, 55, 62, 55, 50, 36, 48, 73, 62, 68, 51, 65, 66, 68, 104, 100, 55, 49, 95, 75, 54, 58, 50, 76, 50, 50, 70, 61, 97, 89, 45, 78, 57, 59, 78, 55, 57, 50, 59, 70, 57, 63, 68, 61, 58, 77, 52, 71, 55, 56, 88, 84, 51, 64, 65, 46, 71, 62, 62, 61, 49, 69, 67, 117, 73, 66, 52, 69, 57, 75, 45, 63, 85, 50, 74, 63, 65, 57, 69, 64, 53, 58, 51, 77, 56, 57, 59, 61, 48, 71, 63, 62, 66, 58, 72, 88, 50, 63, 61, 78, 49, 72, 48, 83, 51, 62, 57, 71, 60, 77, 85, 54, 59, 58, 63, 60, 50, 66, 45, 68, 57, 48, 41, 84, 67, 68, 53, 73, 56, 40, 87, 81, 75, 55, 73, 62, 53, 72, 65, 58, 55, 72, 57, 73, 69, 101, 93, 82, 81, 49, 68, 42, 63, 89, 64, 49, 53, 112, 62, 74, 52, 60, 57, 75, 60, 97, 29, 61, 69, 111, 67, 53, 56, 60, 94, 64, 65, 54, 52, 61, 79, 62, 68, 73, 135, 120, 71, 77, 57, 71, 70, 50, 63, 49, 63, 83, 76, 68, 65, 61, 51, 63, 86, 75, 82, 51, 43, 78, 74, 54, 62, 53, 62, 80, 54, 65, 60, 74, 56, 62, 58, 49, 59, 79, 63, 57, 78, 84, 83, 63, 47, 56, 72, 51, 50, 43, 84, 72, 71, 94, 78, 62, 85, 70, 52, 69, 64, 81, 49, 59, 87, 62, 80, 57, 78, 67, 69, 75, 59, 70, 69, 71, 112, 69, 82, 79, 64, 51, 51, 64, 43, 73, 58, 57, 60, 58, 69, 51, 86, 60, 69, 62, 71, 62, 68, 87, 104, 80, 53, 62, 64, 63, 55, 68, 54, 73, 91, 55, 98, 65, 52, 71, 59, 51, 61, 50, 61, 70, 55, 54, 89, 64, 61, 57, 62, 47, 60, 86, 61, 60, 72, 108, 57, 79, 67, 64, 54, 90, 52, 59, 53, 49, 67, 72, 63, 74, 70, 92, 44, 74, 74, 102, 86, 60, 67, 63, 63, 65, 72, 64, 45, 67, 65, 65, 46, 61, 78, 62, 70, 71, 62, 53, 68, 84, 71, 65, 73, 85, 43, 71, 72, 67, 66, 69, 64, 44, 104, 62, 62, 54, 67, 69, 47, 45, 54, 57, 48, 66, 64, 67, 49, 69, 96, 74, 61, 64, 73, 55, 61, 81, 85, 49, 64, 63, 54, 56, 86, 47, 89, 69, 63, 63, 73, 64, 48, 61, 47, 54, 64, 63, 60, 60, 81, 63, 58, 54, 60, 79, 84, 67, 58, 68, 83, 90, 48, 61, 54, 79, 95, 65, 67, 71, 63, 74, 65, 65, 78, 64, 56, 55, 61, 90, 51, 74, 53, 62, 71, 53, 60, 78, 53, 67, 60, 63, 55, 74, 63, 86, 55, 137, 98, 64, 61, 56, 72, 64, 71, 53, 66, 77, 56, 58, 79, 65, 86, 76, 65, 76, 40, 55, 46, 69, 49, 43, 93, 78, 56, 70, 73, 75, 64, 51, 83, 70, 82, 70, 67, 76, 70, 51, 62, 64, 71, 66, 52, 130, 61, 84, 54, 83, 62, 58, 83, 57, 61, 47, 60, 102, 57, 59, 59, 58, 62, 63, 62, 67, 70, 109, 49, 53, 101, 56, 59, 57, 72, 76, 54, 62, 61, 58, 68, 65, 50, 56, 76, 67, 68, 80, 58, 69, 58, 51, 63, 52, 72, 49, 70, 58, 52, 65, 75, 48, 121, 51, 55, 81, 71, 52, 54, 60, 70, 68, 46, 66, 56, 67, 84, 102, 56, 59, 65, 70, 46, 44, 51, 51, 62, 122, 62, 66, 52, 64, 93, 47, 55, 67, 73, 65, 37, 50, 57, 75, 75, 76, 57, 47, 67, 95, 60, 57, 63, 71, 50, 63, 74, 51, 65, 90, 87, 85, 81, 127, 71, 73, 64, 52, 62, 40, 83, 60, 83, 69, 59, 67, 101, 73, 43, 69, 53, 63, 64, 72, 72, 66, 64, 61, 69, 75, 72, 57, 76, 56, 53, 65, 87, 66, 59, 79, 64, 62, 72, 63, 96, 73, 53, 54, 55, 65, 59, 72, 71, 70, 69, 71, 42, 60, 63, 49, 56, 69, 106, 58, 63, 50, 58, 69, 57, 55, 90, 116, 102, 62, 101, 56, 59, 52, 57, 75, 75, 76, 54, 83, 68, 54, 61, 60, 57, 57, 58, 55, 55, 65, 61, 68, 65, 42, 71, 59, 73, 93, 55, 57, 60, 57, 68, 65, 49, 76, 83, 74, 65, 71, 68, 89, 55, 91, 63, 52, 57, 56, 61, 63, 61, 86, 56, 51, 75, 65, 66, 61, 57, 70, 85, 89, 60, 61, 73, 57, 59, 63, 65, 125, 52, 54, 60, 71, 59, 56, 76, 80, 63, 64, 77, 66, 83, 55, 58, 54, 43, 66, 61, 63, 81, 100, 72, 72, 69, 66, 43, 77, 63, 107, 61, 64, 69, 74, 70, 50, 56, 46, 59, 63, 60, 89, 62, 61, 71, 65, 59, 46, 76, 52, 63, 72, 58, 50, 65, 71, 81, 54, 72, 51, 57, 53, 51, 106, 69, 58, 60, 66, 76, 111, 74, 61, 56, 65, 75, 54, 71, 57, 66, 72, 54, 61, 44, 49, 90, 67, 63, 55, 46, 84, 69, 60, 67, 74, 106, 74, 66, 57, 69, 86, 51, 39, 67, 78, 66, 50, 69, 56, 55, 100, 69, 70, 61, 44, 70, 63, 91, 74, 70, 50, 81, 80, 78, 78, 50, 65, 65, 76, 46, 66, 58, 54, 62, 82, 68, 62, 46, 58, 60, 62, 45, 63, 63, 65, 64, 76, 69, 68, 52, 65, 120, 45, 88, 65, 106, 68, 63, 80, 59, 65, 67, 52, 58, 87, 64, 55, 85, 64, 56, 57, 67, 81, 60, 58, 60, 52, 61, 125, 55, 81, 61, 71, 57, 61, 57, 68, 116, 58, 69, 67, 62, 54, 90, 57, 56, 76, 60, 53, 76, 52, 50, 104, 60, 60, 45, 71, 58, 51, 53, 55, 66, 71, 65, 71, 69, 81, 62, 62, 78, 52, 60, 68, 52, 73, 60, 55, 59, 59, 68, 69, 51, 77, 61, 66, 63, 72, 57, 90, 49, 68, 67, 58, 46, 67, 72, 59, 68, 65, 94, 57, 55, 69, 81, 66, 56, 65, 65, 72, 60, 87, 49, 94, 106, 45, 50, 56, 56, 62, 67, 74, 66, 67, 92, 86, 69, 66, 81, 68, 55, 52, 75, 63, 65, 71, 85, 66, 62, 80, 49, 63, 52, 54, 53, 79, 73, 77, 69, 67, 59, 65, 61, 64, 73, 79, 109, 58, 51, 73, 80, 55, 55, 81, 62, 62, 68, 76, 58, 79, 64, 66, 58, 60, 72, 62, 69, 53, 62, 70, 55, 67, 55, 65, 79, 52, 59, 60, 63, 60, 62, 84, 56, 58, 61, 70, 74, 65, 57, 81, 90, 66, 61, 60, 101, 43, 59, 54, 61, 55, 76, 60, 58, 58, 84, 62, 51, 71, 58, 58, 64, 62, 67, 61, 42, 57, 60, 60, 114, 46, 59, 51, 51, 66, 70, 45, 68, 117, 67, 55, 60, 49, 62, 61, 62, 65, 54, 66, 63, 88, 56, 63, 55, 127, 65, 73, 63, 53, 66, 67, 72, 67, 70, 90, 76, 61, 65, 74, 55, 66, 81, 74, 53, 80, 55, 56, 50, 89, 59, 68, 64, 72, 79, 52, 53, 72, 65, 66, 53, 69, 88, 56, 53, 76, 54, 50, 85, 61, 72, 60, 57, 54, 66, 58, 54, 60, 57, 64, 48, 68, 70, 55, 67, 52, 62, 49, 62, 125, 64, 59, 57, 74, 46, 66, 65, 68, 58, 79, 68, 84, 66, 90, 50, 52, 66, 63, 49, 61, 58, 78, 61, 61, 53, 61, 62, 65, 76, 54, 63, 58, 77, 62, 58, 72, 64, 83, 45, 69, 70, 83, 78, 76, 85, 58, 70, 67, 55, 66, 68, 70, 68, 57, 53, 55, 60, 68, 57, 53, 68, 71, 53, 65, 67, 60, 60, 60, 53, 63, 65, 69, 71, 49, 66, 81, 56, 66, 57, 68, 61, 49, 53, 46, 71, 62, 53, 56, 40, 66, 65, 59, 52, 63, 71, 77, 59, 66, 65, 80, 65, 69, 84, 70, 71, 67, 57, 48, 68, 57, 59, 78, 88, 61, 55, 67, 55, 57, 66, 55, 63, 55, 64, 67, 60, 68, 71, 61, 79, 65, 65, 69, 57, 64, 60, 69, 76, 61, 58, 68, 57, 52, 86, 66, 51, 70, 82, 60, 52, 77, 72, 79, 65, 57, 60, 82, 61, 65, 81, 76, 76, 61, 66, 63, 94, 123, 63, 59, 65, 86, 56, 73, 64, 61, 62, 72, 84, 58, 79, 80, 52, 89, 98, 60, 79, 59, 64, 67, 52, 71, 55, 62, 80, 62, 74, 52, 87, 68, 60, 52, 50, 47, 77, 65, 60, 66, 60, 57, 65, 59, 105, 64, 70, 75, 70, 62, 58, 63, 72, 65, 77, 63, 76, 54, 86, 67, 60, 74, 75, 76, 56, 58, 64, 57, 55, 46, 54, 79, 61, 74, 67, 68, 55, 55, 58, 75, 53, 77, 66, 52, 57, 63, 61, 61, 49, 62, 87, 60, 73, 71, 77, 58, 81, 58, 56, 75, 29, 53, 95, 54, 62, 66, 52, 68, 52, 60, 68, 70, 56, 73, 75, 51, 52, 49, 58, 66, 60, 65, 62, 68, 52, 76, 52, 69, 49, 60, 80, 72, 59, 61, 60, 64, 84, 67, 111, 59, 65, 59, 65, 60, 56, 78, 69, 48, 69, 47, 63, 114, 100, 67, 76, 86, 79, 66, 52, 80, 69, 66, 48, 60, 51, 64, 64, 64, 63, 57, 81, 62, 68, 52, 66, 52, 88, 74, 68, 88, 53, 72, 67, 86, 73, 66, 68, 68, 66, 63, 63, 73, 86, 90, 52, 63, 50, 61, 72, 55, 61, 76, 79, 78, 70, 76, 58, 100, 61, 62, 111, 47, 73, 69, 103, 87, 55, 67, 72, 47, 57, 55, 83, 64, 64, 62, 57, 61, 55, 62, 65, 63, 67, 80, 61, 62, 61, 59, 53, 56, 59, 65, 70, 64, 56, 60, 66, 76, 70, 54, 65, 98, 64, 64, 80, 63, 58, 60, 73, 60, 65, 65, 57, 98, 58, 46, 35, 71, 60, 63, 62, 74, 79, 71, 73, 74, 43, 72, 65, 60, 63, 92, 65, 93, 58, 60, 71, 66, 62, 51, 72, 60, 66, 56, 63, 64, 66, 62, 77, 48, 71, 77, 66, 61, 74, 70, 57, 90, 76, 65, 56, 73, 58, 60, 64, 59, 55, 74, 82, 59, 51, 62, 53, 53, 49, 85, 64, 69, 65, 68, 63, 75, 73, 64, 53, 64, 120, 84, 54, 71, 77, 78, 81, 81, 60, 67, 69, 71, 58, 57, 72, 71, 62, 102, 56, 67, 59, 92, 50, 67, 50, 57, 56, 66, 49, 85, 60, 52, 67, 62, 51, 72, 57, 87, 67, 68, 60, 64, 86, 58, 61, 55, 56, 75, 65, 57, 62, 63, 64, 62, 69, 76, 56, 59, 70, 60, 67, 66, 76, 61, 55, 76, 53, 68, 65, 75, 59, 62, 84, 96, 61, 61, 62, 73, 60, 52, 56, 77, 65, 61, 60, 61, 69, 60, 65, 64, 63, 52, 79, 83, 61, 95, 67, 65, 68, 50, 54, 58, 75, 72, 62, 82, 57, 80, 64, 61, 53, 85, 67, 75, 59, 65, 65, 63, 58, 64, 76, 67, 51, 60, 70, 80, 46, 94, 50, 63, 56, 62, 67, 101, 51, 50, 59, 62, 58, 65, 58, 77, 58, 65, 72, 57, 57, 105, 69, 63, 80, 52, 64, 65, 67, 64, 54, 53, 52, 60, 69, 52, 66, 63, 68, 60, 79, 48, 58, 77, 70, 74, 57, 58, 74, 52, 49, 49, 72, 59, 67, 48, 92, 56, 62, 69, 84, 69, 68, 59, 93, 52, 62, 56, 63, 58, 61, 68, 60, 68, 53, 58, 88, 72, 53, 68, 46, 56, 68, 66, 55, 63, 80, 58, 60, 68, 58, 92, 61, 70, 110, 62, 67, 63, 83, 89, 57, 79, 56, 67, 63, 104, 54, 67, 54, 50, 75, 53, 55, 65, 67, 35, 57, 37, 39, 74, 60, 91, 72, 100, 61, 61, 53, 53, 50, 74, 46, 69, 81, 45, 64, 57, 74, 54, 47, 44, 125, 53, 50, 76, 65, 101, 123, 72, 55, 57, 82, 70, 60, 105, 59, 53, 60, 45, 70, 66, 79, 43, 57, 69, 52, 60, 65, 53, 57, 59, 62, 76, 62, 91, 47, 82, 73, 69, 68, 73, 78, 91, 66, 81, 73, 58, 39, 59, 75, 49, 57, 79, 78, 80, 58, 75, 53, 54, 60, 63, 72, 61, 80, 53, 66, 76, 108, 76, 57, 66, 53, 65, 69, 50, 54, 48, 66, 56, 67, 91, 63, 106, 53, 69, 89, 51, 49, 40, 74, 63, 49, 69, 70, 49, 73, 59, 77, 94, 58, 80, 80, 117, 73, 65, 67, 54, 71, 79, 67, 76, 53, 65, 72, 55, 72, 55, 107, 45, 62, 49, 78, 48, 44, 72, 75, 87, 71, 73, 104, 89, 126, 58, 40, 79, 74, 85, 64, 75, 77, 63, 90, 61, 99, 54, 50, 62, 53, 63, 99, 75, 90, 49, 73, 52, 66, 91, 45, 51, 42, 55, 47, 63, 61, 62, 76, 64, 67, 69, 42, 51, 92, 46, 56, 59, 56, 55, 72, 59, 57, 52, 70, 59, 71, 47, 43, 63, 55, 66, 97, 60, 93, 74, 63, 57, 51, 57, 59, 63, 59, 59, 56, 62, 73, 62, 80, 50, 59, 76, 50, 72, 80, 81, 45, 53, 56, 75, 71, 81, 65, 68, 57, 86, 55, 61, 74, 41, 66, 55, 83, 72, 67, 72, 90, 57, 80, 54, 76, 47, 65, 60, 113, 56, 50, 73, 53, 63, 55, 71, 62, 91, 50, 60, 56, 45, 58, 76, 86, 56, 67, 70, 100, 52, 60, 74, 72, 46, 119, 80, 55, 72, 56, 82, 57, 82, 72, 91, 77, 47, 55, 45, 79, 54, 89, 69, 89, 62, 85, 66, 47, 66, 83, 58, 46, 62, 61, 58, 75, 63, 74, 59, 68, 62, 37, 45, 55, 58, 65, 62, 71, 85, 65, 73, 60, 56, 51, 56, 67, 64, 66, 59, 64, 75, 72, 46, 71, 59, 53, 50, 59, 53, 90, 47, 64, 56, 43, 61, 75, 68, 74, 62, 102, 75, 63, 117, 60, 61, 90, 62, 76, 60, 102, 58, 75, 59, 74, 61, 88, 56, 76, 57, 85, 63, 142, 66, 87, 72, 86, 51, 49, 62, 61, 49, 60, 98, 65, 115, 52, 37, 90, 65, 91, 92, 53, 84, 64, 55, 47, 70, 58, 72, 65, 68, 59, 67, 61, 65, 67, 68, 72, 55, 63, 100, 53, 54, 64, 40, 91, 60, 57, 64, 60, 52, 103, 69, 40, 66, 52, 39, 79, 61, 75, 94, 111, 42, 35, 68, 83, 56, 83, 114, 57, 57, 56, 70, 67, 69, 88, 64, 50, 79, 66, 89, 64, 64, 109, 59, 82, 64, 67, 66, 55, 38, 46, 65, 72, 82, 62, 62, 70, 59, 74, 58, 67, 75, 69, 71, 74, 53, 42, 54, 70, 59, 93, 70, 65, 53, 77, 82, 57, 63, 57, 58, 58, 74, 84, 79, 64, 50, 106, 109, 57, 53, 56, 72, 63, 44, 56, 52, 82, 59, 63, 61, 60, 71, 51, 65, 106, 91, 66, 50, 92, 55, 75, 52, 60, 66, 59, 53, 65, 104, 57, 48, 57, 63, 56, 61, 72, 68, 56, 56, 74, 54, 91, 56, 55, 48, 54, 65, 54, 54, 95, 43, 71, 75, 93, 58, 71, 28, 65, 52, 71, 64, 68, 73, 66, 72, 56, 65, 83, 87, 57, 69, 66, 96, 84, 73, 78, 81, 68, 88, 63, 46, 60, 58, 54, 62, 70, 72, 73, 66, 66, 36, 72, 67, 61, 65, 55, 69, 61, 87, 57, 71, 70, 48, 42, 70, 65, 75, 86, 57, 49, 95, 54, 46, 48, 56, 63, 58, 64, 46, 97, 75, 33, 63, 81, 68, 54, 56, 54, 58, 47, 79, 68, 73, 58, 67, 59, 46, 94, 56, 47, 44, 45, 73, 70, 108, 66, 61, 93, 67, 82, 62, 57, 57, 54, 51, 71, 73, 68, 69, 61, 46, 58, 62, 75, 79, 58, 53, 89, 56, 67, 54, 63, 51, 88, 74, 91, 59, 63, 57, 83, 65, 57, 57, 73, 78, 66, 66, 58, 58, 59, 51, 62, 60, 64, 67, 53, 50, 56, 59, 83, 60, 47, 83, 73, 65, 70, 70, 54, 59, 69, 54, 67, 76, 53, 56, 90, 63, 96, 61, 76, 82, 60, 65, 47, 68, 70, 56, 43, 66, 68, 59, 73, 75, 46, 60, 84, 74, 77, 72, 71, 49, 54, 103, 64, 75, 61, 52, 58, 66, 63, 62, 64, 62, 62, 79, 69, 53, 56, 78, 79, 49, 65, 40, 89, 77, 51, 36, 66, 47, 70, 49, 63, 59, 67, 60, 52, 59, 49, 55, 56, 81, 54, 73, 60, 59, 65, 82, 65, 77, 65, 45, 71, 42, 94, 56, 50, 53, 65, 56, 58, 82, 82, 61, 94, 31, 109, 58, 79, 39, 64, 71, 87, 66, 45, 66, 46, 53, 59, 82, 62, 57, 69, 59, 57, 53, 85, 59, 46, 57, 83, 45, 126, 64, 63, 57, 59, 74, 57, 60, 44, 72, 56, 51, 45, 57, 97, 66, 124, 62, 58, 87, 58, 74, 26, 69, 68, 74, 68, 76, 157, 75, 53, 81, 36, 65, 70, 33, 52, 67, 88, 58, 115, 69, 54, 69, 52, 84, 47, 51, 72, 57, 52, 56, 95, 56, 64, 65, 84, 68, 47, 54, 69, 66, 48, 55, 48, 59, 85, 52, 68, 62, 90, 63, 55, 84, 50, 62, 46, 79, 47, 70, 91, 81, 67, 49, 51, 83, 50, 47, 74, 63, 75, 65, 49, 62, 56, 73, 115, 75, 50, 69, 86, 74, 54, 59, 81, 65, 70, 66, 76, 70, 87, 72, 75, 57, 68, 72, 89, 80, 53, 72, 69, 75, 62, 70, 78, 85, 69, 66, 62, 47, 72, 78, 69, 58, 61, 85, 81, 62, 51, 54, 102, 73, 63, 84, 61, 69, 64, 71, 77, 54, 75, 95, 64, 50, 72, 70, 72, 72, 46, 52, 66, 48, 59, 65, 46, 52, 66, 88, 72, 63, 52, 56, 54, 72, 56, 35, 76, 61, 65, 60, 48, 80, 52, 52, 67, 58, 79, 58, 71, 46, 78, 79, 74, 81, 90, 69, 44, 49, 67, 83, 56, 80, 42, 67, 76, 51, 81, 79, 73, 67, 79, 69, 70, 69, 51, 52, 46, 62, 66, 57, 77, 64, 85, 55, 60, 79, 50, 46, 67, 70, 163, 44, 75, 81, 45, 66, 65, 46, 84, 56, 69, 70, 68, 60, 64, 62, 56, 65, 74, 58, 40, 65, 63, 43, 69, 60, 57, 57, 58, 51, 61, 59, 86, 70, 76, 55, 75, 55, 61, 82, 76, 61, 84, 104, 70, 67, 67, 83, 55, 59, 64, 77, 53, 68, 78, 81, 88, 76, 71, 48, 48, 56, 56, 59, 87, 67, 61, 72, 54, 58, 62, 63, 61, 71, 69, 56, 46, 112, 50, 66, 72, 48, 56, 51, 55, 53, 61, 70, 56, 49, 70, 86, 63, 71, 53, 70, 53, 87, 49, 93, 70, 93, 87, 81, 71, 59, 61, 67, 60, 50, 95, 72, 58, 49, 80, 73, 48, 78, 66, 54, 57, 72, 55, 40, 66, 59, 59, 67, 73, 72, 60, 55, 67, 46, 60, 57, 65, 56, 46, 70, 64, 50, 64, 59, 64, 56, 57, 57, 57, 70, 68, 68, 48, 65, 56, 54, 58, 78, 72, 114, 48, 64, 67, 65, 74, 67, 70, 49, 56, 52, 57, 55, 70, 53, 71, 55, 60, 87, 78, 86, 75, 70, 69, 51, 53, 61, 54, 56, 57, 70, 46, 67, 55, 34, 68, 41, 89, 75, 93, 67, 66, 76, 69, 63, 60, 65, 73, 64, 60, 58, 59, 50, 62, 81, 58, 74, 85, 45, 73, 58, 91, 55, 61, 63, 51, 33, 61, 60, 72, 64, 56, 47, 61, 65, 81, 78, 62, 64, 60, 66, 64, 60, 59, 69, 60, 63, 64, 48, 70, 60, 81, 81, 61, 114, 48, 58, 54, 70, 59, 69, 75, 71, 51, 94, 90, 49, 52, 60, 61, 52, 93, 60, 65, 64, 44, 67, 70, 71, 50, 64, 55, 89, 64, 69, 45, 90, 51, 65, 62, 71, 67, 80, 63, 37, 57, 57, 106, 83, 60, 27, 147, 53, 70, 52, 79, 61, 88, 87, 81, 64, 71, 64, 113, 83, 59, 70, 54, 75, 54, 45, 54, 73, 61, 90, 75, 67, 64, 61, 43, 83, 71, 46, 68, 60, 57, 60, 65, 59, 46, 47, 94, 77, 80, 55, 71, 66, 61, 59, 79, 59, 79, 47, 67, 88, 40, 58, 93, 55, 53, 65, 77, 74, 54, 59, 59, 49, 68, 46, 75, 70, 71, 47, 67, 48, 73, 76, 72, 78, 68, 92, 86, 60, 67, 57, 63, 72, 54, 57, 70, 52, 61, 39, 69, 57, 54, 78, 68, 65, 34, 68, 80, 60, 38, 51, 67, 73, 59, 68, 51, 57, 54, 57, 56, 50, 63, 69, 82, 59, 74, 64, 69, 53, 50, 60, 64, 63, 57, 54, 60, 76, 73, 64, 54, 52, 73, 66, 73, 83, 52, 93, 74, 53, 72, 52, 47, 57, 57, 50, 94, 83, 53, 55, 51, 63, 56, 56, 76, 69, 104, 62, 55, 52, 70, 76, 78, 70, 88, 58, 59, 80, 64, 69, 47, 62, 63, 46, 58, 41, 91, 74, 75, 63, 55, 81, 65, 63, 79, 102, 64, 60, 52, 54, 58, 60, 49, 67, 65, 74, 48, 72, 49, 68, 96, 65, 100, 48, 55, 71, 91, 49, 41, 51, 68, 63, 60, 80, 83, 63, 53, 64, 102, 81, 51, 60, 40, 67, 76, 56, 67, 46, 71, 68, 74, 71, 84, 54, 49, 51, 64, 50, 63, 70, 79, 57, 74, 66, 67, 64, 63, 74, 65, 57, 57, 52, 99, 65, 56, 62, 41, 53, 72, 74, 73, 68, 76, 55, 56, 61, 63, 73, 69, 53, 73, 52, 87, 58, 52, 73, 52, 90, 82, 55, 73, 58, 68, 59, 55, 53, 49, 57, 56, 60, 53, 62, 80, 65, 74, 75, 64, 57, 103, 63, 65, 84, 46, 70, 76, 53, 66, 66, 76, 44, 59, 67, 71, 51, 79, 43, 68, 75, 58, 63, 76, 70, 58, 55, 91, 70, 68, 59, 68, 62, 56, 52, 68, 60, 96, 77, 90, 91, 101, 53, 47, 65, 57, 66, 107, 71, 74, 53, 52, 50, 54, 81, 90, 59, 54, 62, 79, 56, 76, 65, 80, 78, 64, 63, 87, 62, 45, 67, 75, 51, 83, 47, 53, 62, 67, 57, 77, 67, 56, 61, 61, 78, 113, 46, 46, 37, 57, 77, 60, 63, 63, 78, 114, 41, 76, 74, 78, 60, 33, 83, 89, 91, 63, 66, 44, 52, 54, 53, 68, 78, 45, 69, 78, 75, 65, 38, 55, 151, 38, 53, 55, 59, 57, 74, 64, 72, 29, 71, 49, 56, 70, 49, 60, 71, 78, 83, 82, 63, 81, 44, 55, 52, 47, 71, 43, 81, 66, 69, 75, 75, 69, 73, 113, 106, 54, 61, 62, 75, 78, 54, 89, 48, 66, 53, 76, 60, 58, 71, 65, 101, 49, 69, 85, 55, 75, 59, 49, 77, 67, 68, 72, 63, 47, 66, 82, 63, 71, 48, 55, 80, 57, 70, 54, 61, 75, 57, 55, 59, 69, 75, 48, 70, 51, 78, 86, 78, 102, 53, 72, 80, 67, 94, 74, 59, 64, 78, 47, 48, 70, 67, 80, 61, 54, 105, 69, 50, 58, 46, 63, 75, 89, 81, 56, 63, 134, 61, 58, 58, 142, 88, 92, 53, 49, 60, 66, 49, 78, 82, 102, 54, 66, 53, 69, 94, 75, 65, 86, 55, 65, 71, 55, 65, 69, 70, 72, 64, 61, 61, 63, 51, 68, 74, 80, 55, 66, 59, 50, 74, 72, 65, 77, 75, 71, 66, 50, 71, 89, 66, 67, 60, 72, 55, 57, 89, 54, 81, 70, 91, 46, 67, 94, 52, 67, 59, 62, 74, 84, 51, 60, 103, 54, 55, 79, 44, 59, 64, 77, 73, 58, 68, 57, 94, 74, 54, 62, 68, 87, 65, 61, 56, 102, 78, 113, 54, 57, 52, 71, 52, 58, 78, 70, 67, 46, 72, 57, 86, 57, 95, 52, 68, 50, 85, 56, 49, 74, 61, 50, 90, 85, 61, 57, 54, 74, 65, 71, 42, 75, 70, 79, 80, 63, 68, 71, 80, 53, 77, 66, 38, 49, 57, 67, 81, 73, 107, 125, 51, 88, 98, 47, 52, 44, 51, 48, 48, 79, 62, 66, 54, 56, 57, 87, 70, 69, 70, 44, 82, 80, 81, 78, 68, 64, 48, 48, 46, 56, 67, 52, 102, 53, 65, 68, 62, 57, 77, 78, 79, 61, 112, 60, 43, 84, 73, 83, 56, 71, 56, 55, 77, 64, 43, 69, 79, 44, 85, 58, 87, 58, 66, 59, 87, 47, 84, 48, 62, 94, 67, 71, 68, 49, 91, 71, 70, 49, 39, 50, 67, 66, 51, 40, 53, 55, 49, 80, 93, 60, 68, 70, 61, 49, 68, 56, 84, 78, 59, 67, 62, 40, 97, 51, 71, 77, 66, 67, 83, 59, 83, 57, 49, 73, 60, 74, 107, 84, 82, 58, 81, 58, 76, 58, 65, 56, 68, 50, 53, 53, 83, 70, 79, 91, 71, 54, 78, 70, 74, 132, 66, 68, 52, 73, 49, 97, 66, 78, 91, 51, 91, 96, 52, 46, 57, 58, 53, 61, 50, 58, 38, 66, 91, 45, 53, 58, 50, 68, 63, 95, 60, 48, 43, 69, 64, 68, 67, 51, 71, 103, 52, 79, 56, 64, 68, 69, 58, 62, 67, 54, 59, 45, 88, 89, 73, 45, 50, 84, 57, 87, 60, 65, 59, 100, 80, 58, 112, 60, 60, 65, 40, 68, 50, 68, 53, 81, 75, 68, 83, 65, 110, 55, 84, 76, 50, 36, 75, 58, 81, 52, 71, 65, 65, 84, 87, 75, 56, 82, 57, 68, 66, 65, 57, 59, 65, 84, 62, 70, 56, 73, 57, 51, 58, 51, 68, 61, 67, 66, 74, 56, 72, 58, 72, 67, 48, 61, 57, 63, 70, 68, 71, 57, 74, 80, 49, 53, 53, 56, 53, 68, 80, 91, 82, 81, 50, 67, 85, 65, 79, 69, 65, 62, 76, 83, 59, 55, 66, 52, 87, 54, 56, 69, 57, 72, 54, 68, 54, 49, 72, 64, 53, 91, 75, 70, 64, 70, 74, 75, 57, 70, 52, 77, 64, 61, 42, 65, 53, 69, 59, 95, 64, 82, 54, 60, 58, 71, 44, 73, 85, 50, 49, 79, 75, 51, 81, 55, 61, 73, 61, 109, 53, 69, 61, 50, 51, 73, 86, 60, 61, 55, 77, 52, 82, 84, 90, 76, 98, 81, 62, 68, 101, 72, 65, 67, 68, 65, 55, 100, 70, 64, 58, 43, 80, 51, 69, 54, 81, 48, 61, 51, 89, 67, 61, 54, 50, 54, 46, 55, 67, 58, 52, 53, 59, 76, 51, 65, 59, 55, 78, 66, 68, 61, 52, 67, 81, 59, 74, 50, 69, 52, 63, 42, 51, 43, 65, 54, 66, 63, 69, 61, 65, 51, 45, 51, 61, 59, 66, 41, 74, 66, 67, 89, 79, 72, 72, 79, 66, 83, 65, 60, 53, 72, 82, 83, 51, 80, 50, 73, 51, 40, 43, 56, 62, 86, 73, 51, 61, 67, 85, 72, 83, 55, 96, 57, 64, 57, 64, 55, 78, 61, 72, 77, 78, 45, 68, 78, 67, 48, 74, 79, 64, 49, 49, 91, 70, 62, 78, 56, 71, 47, 60, 63, 60, 64, 57, 83, 66, 68, 51, 74, 56, 82, 63, 78, 55, 98, 74, 72, 66, 65, 36, 73, 54, 115, 53, 68, 59, 74, 81, 58, 54, 55, 57, 56, 73, 39, 80, 64, 65, 74, 53, 76, 71, 60, 72, 47, 67, 76, 64, 55, 81, 71, 66, 51, 71, 53, 63, 39, 65, 69, 62, 78, 71, 49, 63, 75, 55, 87, 56, 55, 42, 65, 61, 59, 50, 111, 73, 55, 67, 68, 52, 54, 75, 67, 96, 79, 68, 70, 80, 57, 52, 53, 55, 57, 74, 64, 61, 51, 73, 63, 70, 90, 92, 47, 90, 68, 62, 99, 51, 55, 87, 55, 50, 71, 71, 76, 67, 72, 60, 89, 48, 74, 82, 69, 71, 75, 79, 50, 61, 43, 84, 67, 51, 77, 74, 63, 66, 59, 77, 53, 43, 61, 75, 67, 66, 69, 64, 66, 51, 75, 71, 64, 66, 68, 70, 51, 69, 54, 40, 69, 47, 76, 64, 63, 94, 61, 67, 83, 75, 69, 54, 70, 62, 73, 60, 79, 70, 73, 60, 83, 76, 84, 51, 84, 77, 77, 84, 68, 81, 69, 63, 62, 49, 84, 60, 83, 71, 69, 101, 64, 78, 46, 84, 77, 73, 65, 60, 71, 70, 73, 55, 71, 64, 69, 51, 44, 57, 95, 67, 81, 77, 72, 62, 68, 53, 72, 94, 66, 66, 55, 58, 64, 69, 52, 53, 50, 76, 51, 67, 68, 65, 60, 93, 60, 76, 83, 74, 57, 72, 83, 50, 45, 88, 47, 71, 57, 80, 55, 86, 56, 52, 59, 69, 62, 54, 65, 77, 84, 68, 92, 75, 63, 63, 68, 58, 58, 64, 55, 39, 53, 65, 74, 74, 44, 101, 76, 70, 69, 77, 56, 59, 64, 65, 80, 58, 61, 45, 63, 63, 73, 100, 84, 59, 89, 86, 96, 67, 52, 83, 55, 63, 95, 85, 57, 68, 79, 62, 72, 60, 56, 48, 39, 60, 70, 85, 74, 54, 56, 61, 93, 77, 56, 91, 91, 62, 71, 53, 60, 68, 70, 65, 64, 79, 48, 60, 58, 56, 39, 57, 63, 64, 56, 71, 76, 76, 58, 70, 69, 46, 107, 60, 80, 69, 78, 64, 59, 74, 59, 62, 43, 53, 61, 65, 84, 71, 90, 57, 64, 79, 51, 57, 67, 78, 67, 72, 75, 63, 66, 49, 64, 80, 66, 61, 57, 58, 53, 52, 44, 66, 73, 96, 73, 76, 69, 62, 75, 58, 83, 78, 67, 72, 63, 72, 54, 61, 75, 39, 76, 59, 50, 56, 57, 98, 62, 52, 49, 57, 84, 66, 63, 83, 69, 87, 44, 67, 65, 57, 49, 73, 60, 55, 62, 84, 50, 53, 53, 73, 65, 66, 54, 74, 63, 63, 50, 58, 77, 69, 69, 71, 63, 84, 72, 70, 60, 73, 61, 85, 84, 77, 67, 62, 68, 61, 53, 67, 77, 62, 75, 85, 69, 74, 50, 58, 59, 56, 112, 73, 46, 55, 63, 63, 70, 74, 77, 65, 84, 54, 57, 59, 72, 57, 59, 41, 68, 57, 64, 36, 77, 61, 87, 69, 62, 60, 54, 84, 60, 50, 64, 80, 81, 61, 88, 41, 62, 56, 62, 86, 60, 62, 51, 59, 70, 56, 65, 56, 62, 76, 69, 95, 74, 131, 66, 66, 67, 55, 70, 70, 85, 65, 54, 62, 60, 59, 58, 98, 54, 66, 70, 45, 53, 91, 59, 56, 72, 77, 56, 59, 82, 75, 70, 53, 69, 63, 77, 73, 77, 67, 101, 59, 63, 75, 53, 70, 61, 69, 57, 65, 68, 63, 61, 63, 69, 50, 80, 86, 63, 51, 74, 74, 87, 56, 68, 81, 59, 60, 81, 56, 70, 82, 64, 70, 79, 60, 68, 91, 44, 55, 69, 73, 50, 64, 47, 63, 73, 75, 87, 47, 88, 75, 69, 53, 63, 52, 29, 65, 54, 58, 50, 67, 57, 58, 64, 86, 53, 59, 58, 102, 96, 95, 53, 48, 92, 68, 44, 83, 48, 66, 58, 76, 68, 51, 52, 68, 63, 87, 68, 95, 65, 57, 54, 65, 54, 109, 66, 61, 72, 56, 101, 71, 59, 68, 76, 69, 68, 46, 75, 64, 66, 75, 65, 61, 69, 62, 50, 47, 54, 49, 80, 67, 84, 65, 83, 78, 73, 55, 78, 91, 55, 78, 58, 94, 64, 64, 59, 57, 61, 93, 78, 60, 34, 72, 33, 86, 72, 53, 74, 56, 54, 66, 63, 62, 93, 48, 62, 72, 45, 77, 68, 57, 74, 103, 85, 59, 56, 77, 49, 60, 57, 81, 49, 67, 52, 100, 52, 49, 58, 56, 68, 59, 79, 83, 50, 81, 79, 74, 72, 69, 72, 57, 61, 67, 58, 73, 58, 62, 64, 57, 73, 50, 47, 74, 86, 68, 52, 57, 94, 78, 53, 65, 53, 67, 57, 86, 62, 68, 58, 52, 63, 92, 82, 103, 49, 64, 64, 74, 38, 39, 67, 81, 81, 49, 57, 61, 73, 82, 51, 77, 54, 65, 101, 65, 62, 66, 47, 42, 61, 60, 56, 79, 39, 56, 53, 61, 82, 83, 60, 100, 48, 72, 49, 58, 77, 54, 70, 75, 72, 65, 73, 79, 55, 68, 62, 51, 63, 54, 44, 79, 73, 65, 45, 62, 77, 82, 55, 82, 55, 80, 67, 64, 54, 101, 57, 78, 81, 48, 54, 70, 72, 56, 87, 52, 67, 54, 49, 84, 91, 49, 57, 67, 63, 81, 67, 64, 39, 54, 127, 91, 62, 73, 78, 99, 69, 58, 71, 65, 61, 54, 48, 78, 63, 63, 67, 51, 97, 78, 59, 57, 69, 80, 71, 89, 81, 97, 60, 58, 51, 76, 73, 55, 56, 70, 43, 72, 83, 61, 64, 95, 66, 64, 73, 76, 67, 57, 68, 37, 39, 49, 71, 73, 52, 66, 58, 67, 80, 45, 53, 68, 53, 81, 97, 61, 53, 76, 59, 60, 60, 42, 88, 66, 84, 67, 66, 58, 68, 72, 58, 69, 86, 63, 46, 54, 87, 60, 57, 69, 104, 71, 76, 67, 58, 64, 62, 58, 86, 115, 50, 37, 67, 61, 68, 50, 63, 85, 55, 38, 43, 63, 61, 66, 63, 50, 60, 59, 75, 89, 76, 68, 71, 67, 71, 57, 63, 65, 51, 53, 96, 72, 66, 50, 79, 66, 59, 56, 99, 78, 94, 75, 45, 47, 44, 69, 48, 50, 93, 50, 84, 63, 51, 35, 70, 44, 89, 68, 69, 51, 68, 55, 59, 85, 88, 70, 62, 65, 71, 62, 48, 138, 51, 92, 61, 68, 67, 62, 62, 70, 68, 46, 87, 60, 51, 50, 67, 77, 72, 77, 48, 72, 60, 44, 49, 63, 53, 42, 67, 73, 82, 78, 62, 60, 62, 75, 61, 68, 78, 71, 95, 62, 76, 67, 64, 46, 52, 71, 71, 52, 80, 63, 55, 67, 85, 64, 104, 75, 41, 67, 60, 59, 42, 54, 60, 72, 45, 83, 70, 52, 66, 47, 64, 71, 56, 41, 74, 58, 54, 57, 64, 38, 95, 75, 81, 47, 73, 47, 59, 63, 90, 36, 71, 79, 52, 35, 72, 65, 60, 57, 53, 58, 59, 62, 89, 50, 62, 75, 40, 82, 56, 66, 72, 61, 75, 101, 63, 54, 64, 65, 88, 56, 55, 112, 49, 54, 64, 68, 57, 58, 52, 53, 97, 64, 112, 63, 68, 44, 99, 75, 104, 85, 102, 69, 81, 43, 60, 83, 60, 69, 75, 67, 77, 60, 56, 71, 69, 60, 68, 53, 66, 36, 83, 48, 62, 51, 71, 60, 67, 61, 59, 61, 75, 46, 70, 59, 69, 70, 61, 65, 93, 74, 59, 68, 62, 93, 63, 46, 63, 49, 61, 41, 48, 60, 41, 48, 107, 67, 69, 56, 55, 133, 58, 63, 59, 63, 86, 64, 75, 61, 68, 58, 62, 57, 71, 77, 70, 57, 62, 69, 68, 77, 63, 55, 74, 66, 50, 67, 79, 65, 50, 68, 80, 83, 76, 74, 57, 65, 78, 69, 61, 78, 78, 54, 72, 69, 57, 68, 64, 60, 62, 54, 71, 98, 95, 72, 63, 58, 64, 87, 77, 61, 74, 71, 74, 49, 76, 47, 105, 37, 43, 59, 58, 63, 83, 64, 68, 54, 71, 64, 48, 58, 100, 67, 67, 60, 89, 94, 69, 79, 55, 48, 77, 84, 72, 62, 68, 57, 54, 72, 55, 46, 61, 74, 65, 85, 60, 34, 80, 49, 66, 80, 85, 56, 79, 58, 81, 61, 79, 54, 50, 41, 80, 72, 69, 63, 53, 51, 74, 78, 69, 59, 64, 68, 53, 51, 65, 72, 77, 66, 47, 67, 53, 69, 64, 75, 61, 68, 79, 71, 80, 50, 66, 73, 54, 62, 55, 54, 54, 45, 74, 61, 72, 51, 81, 59, 64, 77, 78, 61, 80, 46, 63, 63, 53, 48, 50, 52, 109, 53, 70, 71, 59, 55, 58, 75, 56, 55, 41, 75, 70, 88, 82, 94, 52, 53, 81, 73, 85, 63, 64, 57, 94, 67, 49, 80, 69, 81, 55, 65, 66, 61, 74, 83, 63, 67, 70, 90, 76, 75, 96, 50, 48, 64, 64, 77, 53, 59, 58, 53, 56, 58, 82, 92, 113, 58, 53, 106, 87, 54, 45, 52, 83, 58, 63, 78, 61, 54, 84, 65, 70, 76, 66, 59, 42, 68, 51, 82, 69, 50, 49, 52, 87, 61, 69, 102, 60, 45, 48, 43, 79, 80, 58, 93, 70, 57, 56, 63, 64, 48, 57, 77, 84, 67, 98, 67, 88, 66, 46, 70, 68, 83, 113, 80, 97, 48, 58, 103, 67, 90, 38, 62, 62, 72, 78, 53, 57, 68, 68, 74, 98, 69, 75, 75, 72, 69, 47, 60, 57, 65, 57, 59, 74, 117, 58, 68, 70, 52, 76, 55, 55, 69, 52, 80, 46, 78, 82, 75, 38, 68, 86, 49, 46, 44, 62, 78, 57, 78, 45, 56, 59, 111, 94, 37, 78, 80, 38, 55, 52, 66, 51, 73, 48, 59, 81, 70, 66, 77, 78, 58, 55, 84, 60, 49, 55, 44, 83, 83, 66, 49, 82, 91, 52, 61, 114, 72, 63, 57, 75, 51, 75, 62, 80, 45, 90, 53, 66, 65, 41, 62, 59, 65, 68, 95, 81, 47, 123, 58, 132, 93, 53, 73, 62, 102, 60, 53, 47, 70, 63, 100, 77, 78, 71, 54, 39, 50, 66, 59, 63, 51, 75, 70, 63, 60, 67, 62, 75, 67, 89, 77, 39, 55, 48, 81, 77, 57, 54, 63, 69, 73, 66, 80, 26, 55, 84, 66, 48, 50, 82, 49, 52, 59, 108, 60, 60, 51, 84, 88, 56, 41, 71, 78, 61, 56, 61, 80, 63, 54, 72, 68, 70, 39, 46, 66, 50, 63, 77, 56, 58, 80, 75, 70, 51, 66, 59, 57, 76, 77, 70, 74, 92, 64, 72, 76, 33, 111, 67, 52, 74, 66, 59, 79, 81, 52, 57, 69, 77, 81, 50, 88, 67, 56, 55, 53, 60, 92, 58, 44, 59, 63, 40, 119, 75, 64, 72, 62, 74, 45, 60, 58, 56, 91, 73, 63, 72, 92, 94, 60, 45, 60, 71, 54, 52, 42, 49, 52, 53, 62, 102, 83, 50, 76, 56, 58, 67, 80, 39, 68, 54, 46, 67, 53, 66, 56, 59, 49, 74, 55, 88, 72, 63, 68, 96, 88, 108, 49, 57, 73, 82, 94, 74, 93, 40, 77, 76, 58, 31, 72, 45, 53, 112, 62, 66, 68, 56, 61, 59, 100, 66, 67, 55, 66, 70, 67, 78, 45, 40, 47, 56, 48, 44, 64, 53, 40, 53, 64, 58, 47, 57, 63, 53, 71, 88, 61, 87, 79, 92, 45, 85, 92, 73, 55, 76, 94, 65, 58, 83, 69, 106, 59, 78, 74, 86, 65, 52, 86, 60, 61, 87, 60, 64, 54, 80, 65, 50, 41, 69, 102, 45, 56, 58, 59, 57, 86, 62, 78, 42, 86, 79, 82, 80, 62, 57, 46, 65, 67, 77, 63, 67, 87, 61, 44, 57, 46, 87, 44, 77, 69, 37, 52, 56, 71, 58, 50, 87, 69, 62, 64, 42, 29, 53, 59, 78, 60, 105, 65, 79, 58, 97, 42, 54, 70, 64, 67, 66, 75, 55, 118, 77, 59, 60, 54, 68, 65, 65, 60, 79, 67, 74, 61, 69, 98, 106, 62, 57, 45, 101, 38, 60, 61, 68, 86, 68, 76, 56, 79, 57, 59, 82, 75, 111, 44, 71, 35, 61, 35, 61, 19, 65, 67, 60, 85, 52, 46, 56, 54, 78, 87, 64, 106, 68, 64, 60, 56, 49, 86, 54, 70, 108, 55, 82, 90, 125, 68, 52, 40, 99, 60, 29, 94, 66, 74, 50, 55, 56, 58, 59, 78, 77, 60, 67, 67, 48, 94, 78, 76, 64, 55, 38, 67, 66, 68, 97, 54, 92, 74, 44, 53, 42, 50, 67, 62, 69, 120, 66, 61, 61, 67, 68, 49, 73, 64, 64, 58, 47, 91, 47, 70, 71, 74, 62, 77, 55, 61, 50, 78, 54, 80, 73, 68, 46, 49, 78, 50, 99, 80, 53, 78, 49, 91, 82, 100, 125, 38, 75, 68, 57, 80, 104, 82, 58, 61, 80, 74, 48, 58, 100, 90, 70, 60, 75, 61, 96, 82, 60, 48, 53, 68, 57, 52, 70, 83, 62, 94, 39, 75, 67, 61, 77, 54, 91, 69, 61, 50, 65, 58, 70, 79, 57, 65, 50, 80, 49, 49, 51, 80, 74, 84, 52, 55, 45, 85, 50, 72, 67, 48, 52, 69, 60, 47, 64, 84, 93, 72, 54, 87, 74, 67, 70, 64, 81, 60, 60, 55, 78, 79, 110, 65, 68, 90, 52, 60, 44, 71, 66, 75, 48, 30, 68, 47, 65, 54, 79, 65, 46, 80, 50, 95, 42, 84, 64, 64, 67, 70, 60, 74, 64, 79, 71, 68, 62, 77, 60, 75, 43, 54, 58, 71, 71, 98, 89, 70, 55, 54, 70, 64, 48, 63, 85, 41, 29, 61, 40, 45, 87, 87, 58, 70, 60, 63, 75, 66, 62, 68, 75, 96, 81, 66, 48, 65, 81, 67, 67, 43, 69, 83, 59, 68, 68, 66, 65, 90, 72, 70, 89, 56, 41, 50, 93, 57, 64, 56, 83, 74, 44, 103, 52, 64, 65, 51, 54, 50, 65, 66, 44, 60, 62, 56, 79, 72, 68, 69, 85, 71, 68, 65, 56, 96, 73, 58, 71, 87, 66, 53, 80, 61, 77, 85, 84, 52, 80, 58, 38, 78, 63, 64, 70, 57, 69, 64, 46, 40, 70, 48, 56, 51, 53, 80, 70, 59, 71, 71, 73, 84, 80, 60, 80, 60, 52, 49, 66, 84, 69, 78, 67, 55, 68, 74, 56, 62, 61, 77, 79, 59, 85, 88, 88, 76, 65, 102, 55, 53, 109, 73, 75, 80, 92, 72, 58, 58, 62, 48, 80, 71, 77, 50, 63, 53, 62, 54, 63, 30, 61, 77, 58, 54, 65, 65, 75, 57, 48, 55, 53, 47, 57, 79, 47, 50, 69, 49, 86, 53, 67, 69, 72, 58, 67, 79, 89, 69, 68, 62, 50, 65, 64, 54, 70, 78, 60, 55, 41, 68, 56, 54, 42, 90, 54, 51, 67, 42, 89, 54, 83, 48, 71, 52, 78, 56, 79, 71, 75, 91, 82, 70, 54, 62, 67, 71, 58, 62, 90, 70, 43, 46, 59, 57, 77, 58, 72, 51, 50, 53, 67, 56, 72, 70, 67, 53, 68, 59, 75, 95, 71, 77, 36, 40, 84, 50, 72, 58, 43, 70, 55, 77, 63, 94, 73, 59, 45, 58, 46, 61, 71, 59, 70, 44, 55, 79, 48, 66, 55, 81, 68, 51, 64, 61, 47, 49, 53, 67, 75, 102, 73, 82, 68, 81, 58, 64, 77, 74, 71, 64, 54, 69, 73, 55, 65, 79, 62, 52, 102, 59, 103, 44, 51, 40, 41, 46, 69, 64, 44, 59, 79, 72, 48, 57, 74, 68, 61, 67, 53, 64, 71, 86, 66, 58, 80, 50, 52, 66, 68, 60, 76, 70, 61, 56, 58, 56, 104, 60, 83, 72, 44, 52, 62, 65, 63, 91, 62, 130, 76, 71, 61, 75, 67, 90, 43, 58, 80, 69, 81, 97, 75, 55, 59, 79, 69, 81, 56, 62, 43, 70, 77, 59, 84, 57, 79, 50, 79, 65, 109, 73, 70, 54, 75, 68, 54, 62, 64, 51, 52, 83, 61, 89, 69, 63, 74, 82, 78, 54, 98, 44, 63, 72, 54, 83, 79, 52, 63, 71, 62, 60, 58, 57, 63, 72, 51, 59, 59, 73, 68, 70, 68, 52, 53, 79, 74, 74, 70, 65, 75, 56, 62, 50, 59, 84, 62, 98, 72, 74, 44, 55, 62, 46, 60, 91, 54, 64, 65, 91, 45, 68, 65, 73, 45, 72, 64, 75, 106, 54, 80, 69, 60, 81, 80, 63, 57, 74, 65, 74, 63, 57, 79, 63, 76, 68, 59, 113, 38, 50, 91, 54, 54, 54, 60, 72, 95, 82, 54, 54, 62, 88, 51, 61, 56, 60, 59, 52, 68, 70, 63, 71, 51, 64, 73, 60, 48, 80, 47, 65, 54, 99, 56, 86, 61, 57, 80, 42, 90, 63, 68, 55, 45, 66, 89, 53, 65, 55, 55, 49, 57, 70, 69, 82, 59, 78, 79, 54, 73, 83, 61, 68, 64, 83, 104, 59, 56, 69, 62, 62, 61, 81, 71, 59, 75, 59, 59, 72, 74, 48, 66, 58, 80, 57, 61, 62, 81, 70, 50, 46, 51, 61, 67, 76, 54, 67, 53, 40, 71, 62, 68, 56, 57, 82, 58, 64, 58, 57, 58, 61, 62, 80, 70, 65, 55, 70, 90, 48, 72, 82, 100, 75, 55, 60, 55, 66, 69, 61, 86, 87, 92, 88, 59, 62, 47, 81, 57, 72, 65, 54, 72, 111, 93, 69, 77, 84, 64, 52, 58, 72, 62, 69, 48, 68, 45, 66, 50, 61, 47, 68, 59, 67, 59, 69, 64, 80, 56, 89, 51, 56, 72, 79, 60, 49, 71, 58, 60, 63, 66, 34, 93, 63, 71, 58, 66, 61, 60, 79, 51, 74, 57, 68, 50, 56, 63, 66, 63, 58, 60, 51, 68, 80, 57, 92, 57, 67, 63, 80, 57, 95, 59, 52, 89, 103, 57, 49, 55, 65, 64, 67, 68, 55, 74, 43, 78, 86, 70, 35, 77, 54, 75, 91, 74, 69, 59, 86, 108, 70, 59, 72, 55, 45, 52, 95, 66, 80, 56, 70, 65, 75, 53, 76, 75, 77, 73, 70, 68, 52, 60, 66, 51, 89, 59, 50, 64, 63, 70, 54, 61, 44, 78, 33, 54, 63, 49, 73, 63, 51, 118, 60, 77, 47, 98, 70, 74, 59, 56, 51, 39, 46, 54, 51, 83, 46, 72, 55, 68, 70, 68, 58, 72, 75, 73, 66, 117, 55, 66, 58, 78, 69, 57, 65, 46, 72, 59, 58, 43, 112, 52, 59, 50, 79, 89, 39, 46, 75, 64, 72, 97, 68, 74, 56, 62, 60, 53, 65, 63, 68, 79, 65, 37, 53, 69, 46, 98, 52, 79, 57, 91, 80, 57, 50, 51, 46, 63, 68, 61, 55, 56, 60, 85, 30, 92, 51, 67, 63, 65, 62, 47, 64, 75, 59, 53, 69, 91, 82, 57, 61, 72, 98, 74, 61, 97, 87, 76, 62, 71, 62, 75, 61, 39, 57, 78, 48, 60, 93, 89, 34, 90, 40, 56, 68, 111, 61, 51, 64, 56, 73, 68, 73, 75, 60, 51, 49, 41, 49, 66, 85, 38, 86, 74, 51, 47, 70, 44, 43, 67, 68, 47, 51, 76, 47, 67, 60, 45, 51, 55, 107, 51, 92, 65, 56, 78, 52, 56, 83, 56, 60, 61, 65, 57, 65, 80, 64, 51, 111, 85, 63, 71, 61, 60, 52, 60, 59, 75, 86, 64, 80, 124, 82, 70, 50, 57, 89, 68, 72, 49, 70, 58, 97, 54, 62, 70, 82, 67, 54, 60, 59, 78, 61, 58, 87, 64, 66, 61, 65, 68, 66, 57, 57, 70, 65, 72, 85, 34, 94, 67, 55, 60, 78, 49, 70, 45, 72, 59, 89, 46, 64, 80, 72, 42, 69, 55, 75, 92, 52, 54, 60, 52, 72, 66, 74, 64, 44, 37, 76, 69, 54, 71, 97, 88, 61, 84, 57, 80, 58, 79, 59, 73, 67, 98, 65, 92, 84, 67, 74, 65, 76, 70, 68, 57, 100, 60, 77, 42, 64, 107, 48, 72, 81, 63, 40, 62, 55, 57, 56, 83, 77, 59, 42, 46, 85, 64, 54, 81, 79, 57, 69, 62, 44, 66, 83, 78, 68, 61, 57, 66, 58, 83, 74, 68, 78, 69, 64, 38, 83, 84, 69, 67, 59, 70, 77, 54, 74, 65, 96, 85, 56, 50, 71, 74, 49, 60, 57, 89, 50, 57, 71, 83, 66, 58, 74, 64, 89, 65, 65, 56, 69, 65, 48, 68, 81, 61, 66, 39, 65, 86, 90, 67, 49, 65, 60, 59, 83, 101, 61, 58, 74, 59, 35, 62, 70, 52, 77, 71, 91, 88, 68, 78, 77, 51, 57, 69, 58, 54, 46, 76, 103, 55, 66, 69, 64, 61, 45, 120, 60, 56, 116, 64, 36, 68, 88, 53, 50, 75, 72, 49, 69, 53, 67, 104, 32, 71, 66, 84, 59, 79, 59, 48, 67, 72, 80, 56, 80, 56, 54, 79, 64, 64, 93, 122, 63, 70, 65, 66, 52, 79, 50, 69, 84, 40, 64, 76, 75, 59, 59, 69, 70, 90, 64, 60, 79, 71, 90, 59, 40, 75, 94, 86, 73, 59, 59, 71, 55, 62, 64, 61, 56, 54, 70, 63, 59, 64, 55, 58, 79, 57, 61, 77, 80, 48, 62, 63, 58, 38, 57, 38, 58, 53, 50, 92, 53, 69, 52, 59, 58, 50, 62, 60, 73, 74, 61, 52, 58, 57, 68, 65, 48, 71, 66, 48, 87, 59, 66, 61, 66, 113, 67, 79, 61, 66, 63, 57, 56, 72, 61, 63, 47, 52, 60, 75, 70, 64, 141, 69, 42, 57, 62, 65, 66, 60, 70, 40, 54, 57, 48, 84, 53, 62, 65, 45, 62, 77, 61, 50, 82, 71, 52, 52, 61, 67, 61, 60, 46, 61, 68, 46, 87, 58, 62, 46, 56, 78, 72, 77, 69, 67, 74, 78, 65, 66, 54, 76, 62, 64, 59, 55, 75, 44, 52, 55, 84, 68, 77, 68, 90, 78, 58, 65, 65, 71, 59, 62, 45, 74, 75, 59, 56, 69, 61, 61, 62, 64, 51, 65, 41, 82, 63, 59, 75, 67, 85, 65, 81, 74, 73, 74, 67, 87, 80, 68, 61, 51, 65, 52, 66, 56, 40, 63, 82, 53, 55, 52, 70, 56, 69, 37, 56, 68, 59, 86, 49, 88, 89, 42, 60, 98, 66, 55, 55, 54, 56, 67, 60, 45, 62, 44, 80, 78, 42, 68, 57, 48, 57, 78, 48, 108, 83, 74, 46, 73, 74, 62, 49, 88, 81, 57, 81, 48, 93, 56, 38, 95, 50, 60, 79, 56, 72, 67, 89, 66, 81, 63, 53, 59, 82, 65, 65, 76, 78, 62, 57, 69, 65, 43, 72, 63, 57, 72, 71, 88, 62, 57, 92, 61, 63, 75, 57, 65, 67, 98, 71, 72, 58, 63, 78, 78, 53, 54, 56, 51, 60, 84, 62, 62, 55, 53, 43, 68, 69, 82, 81, 51, 90, 68, 48, 57, 42, 76, 75, 60, 49, 79, 64, 55, 66, 77, 64, 66, 46, 101, 72, 54, 80, 58, 59, 65, 44, 58, 49, 55, 45, 86, 88, 74, 53, 53, 71, 62, 58, 48, 59, 67, 97, 49, 51, 71, 70, 64, 67, 74, 60, 60, 101, 51, 74, 54, 77, 69, 56, 68, 83, 75, 52, 62, 49, 67, 59, 84, 63, 97, 58, 106, 37, 56, 63, 75, 76, 48, 48, 57, 70, 56, 84, 78, 40, 85, 55, 51, 64, 74, 61, 71, 56, 63, 61, 60, 52, 65, 73, 74, 83, 66, 50, 51, 51, 88, 73, 69, 58, 66, 83, 71, 41, 46, 76, 50, 61, 88, 60, 67, 37, 33, 51, 51, 89, 74, 74, 66, 58, 75, 75, 80, 101, 46, 49, 58, 68, 68, 73, 63, 69, 76, 65, 68, 71, 62, 48, 100, 52, 70, 65, 60, 61, 69, 55, 60, 44, 47, 60, 71, 64, 58, 74, 100, 71, 67, 63, 53, 65, 66, 64, 90, 49, 69, 69, 66, 78, 64, 68, 42, 57, 58, 58, 60, 55, 59, 62, 103, 86, 63, 87, 53, 71, 75, 49, 66, 70, 89, 93, 62, 53, 57, 57, 55, 63, 61, 75, 42, 90, 75, 39, 40, 87, 55, 56, 70, 37, 47, 71, 57, 89, 64, 67, 111, 55, 61, 47, 62, 75, 70, 70, 66, 59, 53, 59, 67, 74, 91, 67, 53, 82, 40, 43, 72, 97, 43, 56, 46, 67, 57, 73, 61, 69, 69, 76, 71, 70, 59, 60, 67, 67, 56, 113, 99, 39, 59, 60, 52, 61, 43, 72, 61, 52, 67, 58, 76, 56, 94, 70, 72, 71, 66, 75, 54, 57, 81, 54, 84, 59, 49, 62, 68, 65, 53, 143, 79, 58, 51, 90, 60, 62, 56, 53, 64, 55, 55, 53, 98, 85, 66, 70, 60, 57, 52, 70, 92, 87, 54, 82, 66, 78, 46, 68, 51, 71, 69, 60, 65, 58, 67, 60, 78, 44, 66, 78, 76, 64, 73, 49, 104, 58, 86, 94, 93, 67, 46, 56, 70, 60, 51, 61, 63, 67, 70, 64, 85, 63, 62, 61, 69, 64, 61, 47, 73, 89, 45, 56, 58, 92, 112, 75, 78, 71, 72, 69, 51, 83, 50, 44, 97, 48, 66, 70, 73, 72, 53, 69, 72, 65, 83, 54, 88, 53, 75, 60, 58, 69, 65, 94, 67, 79, 51, 63, 50, 72, 73, 86, 63, 65, 82, 104, 76, 71, 58, 81, 54, 52, 71, 63, 50, 60, 60, 74, 62, 96, 63, 111, 67, 51, 79, 63, 75, 52, 68, 53, 84, 104, 53, 55, 58, 59, 75, 62, 73, 63, 79, 51, 62, 60, 62, 59, 62, 83, 86, 73, 92, 57, 61, 61, 55, 56, 61, 91, 50, 50, 87, 57, 71, 71, 67, 56, 64, 67, 85, 73, 65, 62, 64, 53, 50, 56, 56, 96, 55, 58, 49, 47, 46, 67, 74, 95, 57, 43, 71, 69, 88, 79, 61, 78, 67, 81, 47, 84, 68, 64, 54, 72, 63, 87, 65, 113, 52, 66, 52, 49, 81, 58, 70, 63, 51, 68, 75, 62, 44, 92, 83, 53, 58, 63, 75, 75, 87, 64, 55, 62, 59, 75, 88, 55, 62, 65, 121, 72, 60, 60, 58, 58, 62, 79, 63, 69, 75, 64, 70, 59, 84, 57, 67, 48, 79, 51, 98, 60, 61, 57, 65, 64, 103, 63, 67, 51, 66, 53, 53, 52, 74, 60, 41, 68, 64, 76, 70, 49, 74, 56, 55, 90, 74, 54, 72, 53, 60, 69, 85, 67, 92, 68, 65, 58, 98, 49, 73, 51, 68, 67, 91, 59, 68, 53, 84, 49, 52, 43, 66, 50, 68, 58, 52, 59, 80, 75, 90, 76, 48, 74, 65, 72, 72, 62, 57, 67, 62, 71, 78, 52, 57, 69, 72, 72, 46, 77, 56, 50, 73, 75, 47, 66, 59, 66, 79, 44, 100, 56, 60, 63, 72, 81, 73, 57, 65, 78, 63, 87, 60, 63, 60, 87, 59, 60, 99, 53, 85, 54, 59, 61, 73, 44, 59, 93, 54, 63, 66, 68, 77, 59, 75, 59, 88, 62, 59, 45, 80, 81, 49, 65, 76, 48, 68, 45, 62, 70, 72, 63, 75, 55, 92, 74, 59, 84, 67, 65, 29, 106, 66, 69, 92, 51, 65, 57, 81, 70, 54, 59, 65, 63, 70, 67, 75, 87, 91, 68, 45, 55, 72, 64, 67, 62, 76, 46, 64, 51, 64, 62, 83, 53, 56, 78, 78, 52, 55, 62, 50, 92, 65, 25, 64, 38, 59, 57, 74, 83, 103, 64, 71, 55, 48, 61, 51, 65, 93, 53, 64, 60, 76, 71, 74, 70, 35, 85, 46, 90, 55, 76, 48, 64, 60, 100, 62, 57, 38, 56, 50, 80, 73, 55, 54, 78, 69, 55, 65, 62, 35, 56, 33, 79, 96, 102, 68, 46, 65, 41, 56, 59, 74, 74, 72, 65, 85, 79, 69, 70, 80, 52, 50, 75, 43, 56, 76, 62, 58, 54, 79, 64, 41, 55, 89, 50, 55, 73, 64, 55, 60, 65, 64, 69, 51, 91, 60, 80, 77, 89, 63, 65, 69, 75, 55, 102, 88, 42, 45, 52, 83, 44, 76, 63, 76, 55, 76, 54, 114, 59, 66, 65, 67, 77, 73, 74, 81, 65, 67, 80, 41, 67, 62, 76, 67, 71, 65, 58, 60, 93, 75, 52, 42, 55, 66, 62, 74, 71, 83, 95, 56, 57, 41, 57, 85, 62, 65, 91, 68, 55, 76, 78, 74, 54, 74, 58, 58, 59, 76, 71, 58, 63, 42, 62, 93, 95, 79, 45, 62, 46, 61, 61, 74, 62, 64, 81, 56, 62, 76, 70, 110, 54, 62, 66, 57, 69, 68, 70, 67, 108, 57, 60, 64, 59, 71, 56, 62, 64, 37, 90, 64, 60, 74, 67, 58, 60, 44, 58, 80, 68, 62, 74, 59, 67, 83, 52, 77, 59, 85, 63, 50, 63, 63, 71, 79, 65, 75, 67, 77, 71, 56, 81, 83, 62, 59, 79, 73, 59, 40, 69, 61, 86, 53, 58, 102, 38, 91, 66, 59, 69, 64, 76, 51, 92, 105, 58, 77, 56, 100, 69, 77, 68, 64, 74, 78, 67, 57, 47, 63, 56, 63, 61, 63, 65, 80, 121, 86, 61, 61, 90, 89, 70, 49, 61, 57, 60, 54, 47, 74, 49, 57, 72, 52, 63, 58, 58, 53, 66, 95, 78, 69, 56, 60, 48, 56, 59, 81, 58, 62, 60, 59, 63, 58, 69, 68, 53, 70, 67, 58, 52, 70, 51, 42, 68, 67, 41, 92, 58, 82, 77, 74, 64, 51, 64, 63, 82, 58, 60, 88, 65, 79, 67, 69, 64, 48, 58, 60, 57, 68, 56, 86, 68, 62, 50, 68, 51, 61, 65, 71, 45, 69, 65, 53, 46, 49, 60, 56, 49, 53, 68, 57, 55, 63, 74, 64, 68, 38, 68, 73, 65, 58, 78, 75, 82, 75, 76, 70, 74, 52, 81, 109, 69, 78, 54, 69, 86, 71, 69, 59, 52, 64, 76, 69, 75, 40, 66, 68, 76, 61, 89, 58, 62, 119, 65, 67, 80, 89, 69, 84, 55, 63, 75, 51, 60, 39, 50, 59, 59, 53, 70, 67, 73, 50, 69, 51, 110, 66, 58, 56, 89, 77, 72, 73, 88, 62, 80, 62, 60, 74, 92, 91, 51, 78, 76, 61, 82, 59, 107, 60, 60, 55, 44, 83, 68, 69, 65, 73, 62, 59, 56, 62, 61, 73, 74, 76, 61, 61, 81, 59, 56, 88, 66, 53, 65, 59, 73, 54, 66, 78, 56, 112, 46, 96, 72, 58, 60, 72, 53, 94, 81, 62, 70, 71, 40, 103, 48, 73, 103, 62, 73, 69, 94, 53, 63, 70, 52, 84, 69, 62, 74, 41, 76, 38, 66, 70, 76, 88, 63, 82, 65, 63, 63, 96, 103, 77, 77, 87, 79, 78, 64, 44, 59, 66, 75, 55, 56, 52, 67, 62, 49, 64, 67, 67, 63, 54, 41, 52, 57, 64, 44, 77, 68, 86, 77, 39, 54, 66, 66, 44, 76, 50, 82, 58, 62, 74, 61, 68, 53, 62, 65, 67, 43, 57, 82, 82, 77, 67, 79, 52, 63, 68, 47, 46, 56, 59, 67, 58, 70, 69, 70, 65, 57, 77, 56, 69, 75, 47, 61, 56, 60, 59, 52, 97, 47, 101, 73, 78, 60, 71, 69, 50, 97, 58, 61, 69, 92, 58, 73, 71, 59, 55, 48, 68, 76, 89, 63, 75, 50, 65, 71, 39, 58, 24, 44, 82, 64, 53, 77, 67, 73, 83, 61, 92, 60, 82, 77, 85, 65, 48, 66, 71, 57, 88, 54, 58, 53, 59, 50, 53, 59, 53, 56, 52, 53, 67, 80, 58, 83, 58, 82, 61, 79, 71, 55, 58, 126, 51, 46, 76, 64, 63, 63, 73, 84, 50, 48, 58, 56, 114, 58, 85, 61, 52, 68, 48, 55, 82, 102, 48, 62, 66, 62, 55, 68, 54, 37, 88, 59, 55, 71, 70, 56, 51, 60, 57, 49, 65, 58, 79, 76, 62, 79, 38, 62, 53, 75, 76, 111, 66, 89, 60, 48, 57, 82, 89, 61, 62, 41, 65, 74, 60, 76, 56, 63, 87, 73, 56, 72, 60, 56, 75, 96, 55, 89, 74, 52, 67, 76, 73, 77, 73, 78, 60, 61, 63, 70, 55, 73, 90, 45, 59, 56, 74, 53, 67, 76, 54, 52, 108, 88, 60, 56, 57, 68, 67, 58, 69, 108, 48, 88, 50, 73, 52, 68, 84, 47, 55, 50, 70, 84, 68, 67, 45, 70, 85, 65, 90, 89, 97, 82, 45, 75, 63, 58, 55, 70, 69, 83, 76, 73, 54, 67, 89, 63, 88, 38, 68, 47, 83, 45, 53, 51, 55, 52, 66, 62, 77, 77, 38, 77, 58, 70, 54, 71, 65, 80, 79, 62, 114, 63, 51, 51, 95, 60, 69, 57, 69, 43, 73, 74, 59, 78, 56, 70, 61, 76, 76, 70, 59, 94, 68, 75, 65, 53, 73, 56, 77, 52, 76, 68, 52, 58, 94, 76, 68, 67, 53, 46, 62, 69, 69, 54, 123, 57, 53, 79, 72, 81, 71, 65, 44, 109, 68, 66, 55, 50, 50, 65, 93, 60, 66, 60, 70, 57, 75, 85, 58, 59, 59, 78, 54, 56, 69, 52, 72, 68, 141, 60, 67, 51, 69, 50, 74, 49, 52, 80, 69, 113, 56, 60, 70, 80, 83, 77, 52, 78, 70, 58, 83, 68, 56, 50, 76, 40, 47, 68, 67, 63, 69, 93, 67, 51, 52, 57, 80, 58, 66, 57, 86, 59, 67, 71, 60, 65, 65, 61, 90, 56, 55, 66, 91, 82, 85, 51, 66, 48, 74, 72, 86, 81, 80, 73, 71, 58, 69, 48, 46, 55, 77, 66, 65, 94, 70, 63, 76, 60, 64, 41, 70, 91, 67, 47, 39, 45, 75, 42, 67, 80, 51, 66, 56, 76, 85, 92, 68, 86, 51, 64, 57, 72, 70, 66, 46, 68, 65, 74, 72, 54, 55, 73, 47, 69, 70, 66, 95, 53, 72, 60, 62, 72, 79, 83, 55, 50, 60, 57, 76, 88, 72, 120, 54, 62, 72, 67, 75, 49, 79, 73, 72, 49, 88, 75, 73, 72, 78, 54, 57, 72, 57, 71, 47, 80, 43, 107, 66, 49, 61, 51, 78, 61, 69, 69, 66, 78, 94, 77, 63, 56, 47, 97, 57, 65, 60, 55, 55, 70, 77, 44, 62, 56, 74, 71, 78, 76, 47, 45, 74, 48, 48, 62, 71, 35, 79, 70, 64, 51, 70, 70, 54, 84, 81, 68, 57, 66, 42, 58, 87, 78, 69, 62, 77, 102, 69, 62, 89, 43, 76, 62, 65, 44, 57, 58, 59, 63, 82, 64, 60, 59, 58, 59, 44, 71, 72, 106, 56, 51, 80, 54, 62, 56, 68, 61, 61, 75, 55, 61, 57, 70, 77, 81, 71, 75, 62, 52, 80, 69, 64, 68, 61, 56, 49, 54, 74, 74, 57, 43, 64, 102, 70, 65, 48, 80, 49, 54, 61, 57, 52, 53, 66, 82, 59, 83, 103, 69, 62, 49, 69, 52, 63, 61, 72, 46, 61, 54, 77, 54, 39, 54, 47, 59, 77, 62, 64, 66, 70, 52, 67, 60, 85, 99, 83, 79, 54, 77, 85, 49, 59, 71, 80, 65, 77, 62, 48, 71, 47, 71, 45, 67, 93, 62, 81, 66, 47, 66, 60, 73, 67, 61, 79, 64, 73, 80, 62, 47, 51, 63, 55, 55, 59, 117, 62, 53, 53, 41, 78, 77, 52, 81, 51, 88, 55, 64, 52, 70, 54, 66, 57, 73, 65, 71, 70, 51, 89, 48, 108, 104, 69, 69, 76, 71, 56, 64, 49, 72, 86, 53, 62, 59, 38, 59, 50, 95, 50, 61, 61, 41, 73, 70, 65, 67, 67, 60, 71, 75, 83, 81, 54, 72, 72, 61, 48, 44, 64, 73, 62, 59, 58, 49, 54, 72, 68, 73, 79, 68, 70, 80, 68, 61, 48, 66, 68, 47, 57, 68, 57, 55, 84, 50, 94, 55, 51, 57, 50, 52, 68, 67, 57, 70, 71, 63, 57, 61, 67, 66, 58, 70, 63, 52, 53, 85, 96, 55, 82, 72, 65, 61, 57, 55, 73, 76, 60, 86, 47, 67, 45, 76, 92, 67, 63, 53, 92, 52, 51, 50, 47, 80, 81, 74, 46, 55, 66, 116, 68, 56, 130, 56, 60, 55, 57, 60, 88, 96, 47, 61, 77, 63, 67, 48, 70, 71, 58, 86, 71, 54, 72, 61, 47, 52, 55, 57, 107, 69, 68, 46, 67, 66, 64, 65, 53, 64, 71, 66, 59, 67, 71, 65, 76, 58, 42, 112, 59, 47, 63, 51, 73, 126, 62, 72, 63, 51, 63, 64, 79, 85, 60, 38, 70, 62, 55, 50, 52, 67, 49, 86, 62, 65, 66, 71, 64, 60, 57, 66, 66, 63, 76, 72, 69, 65, 47, 56, 46, 68, 61, 51, 72, 52, 41, 65, 67, 45, 91, 66, 64, 63, 55, 61, 38, 63, 80, 71, 54, 46, 74, 59, 108, 56, 76, 76, 79, 47, 86, 61, 57, 53, 56, 54, 63, 59, 82, 68, 63, 109, 76, 85, 46, 53, 58, 83, 66, 72, 76, 47, 65, 56, 103, 52, 68, 82, 58, 58, 52, 71, 61, 65, 70, 66, 86, 99, 76, 64, 46, 82, 71, 72, 57, 60, 67, 61, 59, 52, 102, 54, 49, 53, 60, 85, 75, 70, 58, 57, 28, 65, 59, 63, 78, 57, 58, 71, 66, 52, 66, 74, 45, 93, 71, 69, 51, 59, 76, 57, 55, 87, 72, 54, 90, 56, 63, 48, 80, 55, 73, 61, 72, 80, 64, 66, 53, 47, 59, 68, 66, 59, 95, 77, 79, 69, 52, 82, 56, 80, 90, 49, 58, 64, 57, 90, 54, 67, 54, 76, 108, 131, 81, 79, 83, 65, 50, 82, 113, 69, 81, 67, 59, 46, 44, 59, 55, 74, 54, 75, 64, 47, 44, 53, 48, 59, 62, 58, 60, 70, 69, 67, 71, 61, 74, 69, 74, 73, 62, 63, 48, 66, 61, 53, 68, 51, 67, 83, 71, 70, 67, 67, 58, 76, 63, 58, 89, 62, 54, 55, 72, 93, 93, 71, 65, 70, 69, 53, 74, 79, 71, 82, 55, 76, 57, 66, 65, 49, 74, 76, 74, 82, 70, 58, 52, 79, 71, 54, 57, 71, 106, 93, 69, 79, 67, 72, 63, 82, 56, 60, 57, 79, 51, 59, 76, 62, 49, 92, 66, 66, 67, 56, 66, 100, 67, 73, 56, 60, 61, 60, 76, 64, 67, 49, 65, 59, 62, 65, 32, 55, 59, 88, 64, 62, 66, 83, 67, 56, 63, 67, 56, 57, 77, 99, 82, 70, 59, 57, 54, 81, 65, 56, 58, 61, 56, 82, 52, 65, 81, 112, 72, 64, 60, 73, 38, 57, 55, 71, 87, 76, 93, 57, 64, 75, 63, 57, 45, 68, 60, 67, 107, 58, 82, 49, 40, 89, 101, 77, 82, 65, 65, 63, 66, 70, 66, 57, 61, 80, 76, 75, 65, 59, 60, 69, 93, 112, 72, 102, 63, 55, 79, 41, 79, 78, 62, 73, 87, 44, 51, 74, 44, 59, 101, 56, 62, 55, 78, 56, 118, 127, 58, 66, 63, 70, 64, 69, 57, 52, 76, 92, 71, 66, 76, 64, 41, 53, 68, 67, 66, 44, 71, 43, 49, 64, 72, 43, 66, 71, 85, 105, 78, 70, 99, 74, 71, 112, 49, 63, 59, 45, 55, 70, 46, 72, 62, 75, 88, 74, 53, 87, 96, 41, 45, 62, 50, 81, 71, 65, 65, 58, 45, 54, 63, 80, 60, 55, 61, 52, 62, 73, 52, 62, 73, 74, 52, 57, 57, 60, 65, 54, 67, 59, 87, 66, 44, 86, 81, 48, 78, 55, 50, 47, 67, 89, 66, 58, 65, 56, 85, 75, 75, 47, 40, 74, 63, 61, 67, 66, 71, 77, 44, 65, 70, 64, 56, 50, 102, 41, 56, 60, 86, 63, 51, 62, 81, 46, 74, 87, 71, 71, 61, 93, 64, 57, 64, 43, 64, 55, 70, 50, 60, 72, 83, 77, 81, 93, 131, 59, 62, 75, 52, 60, 81, 77, 50, 79, 70, 48, 78, 92, 57, 47, 109, 78, 82, 71, 62, 81, 53, 70, 63, 80, 69, 53, 68, 66, 65, 42, 43, 93, 72, 54, 66, 66, 62, 105, 70, 89, 79, 63, 80, 56, 55, 60, 71, 55, 55, 67, 52, 74, 68, 65, 68, 61, 69, 67, 79, 62, 69, 52, 56, 65, 82, 78, 45, 72, 84, 72, 74, 59, 56, 80, 63, 87, 64, 73, 72, 65, 63, 70, 58, 83, 63, 68, 69, 50, 57, 82, 53, 75, 73, 100, 58, 65, 55, 71, 88, 65, 64, 76, 67, 62, 72, 70, 59, 55, 53, 49, 52, 58, 100, 100, 62, 53, 79, 57, 57, 65, 83, 77, 56, 83, 76, 76, 65, 60, 63, 86, 72, 77, 64, 59, 67, 55, 52, 50, 52, 69, 59, 47, 70, 81, 72, 71, 56, 71, 83, 79, 48, 55, 58, 59, 65, 53, 67, 34, 91, 51, 56, 59, 103, 66, 45, 76, 57, 89, 81, 92, 78, 54, 64, 69, 62, 71, 72, 67, 57, 61, 95, 75, 72, 73, 64, 41, 79, 69, 73, 62, 82, 63, 62, 69, 62, 84, 59, 82, 43, 69, 82, 63, 61, 62, 71, 62, 55, 72, 84, 66, 50, 50, 80, 56, 66, 49, 56, 60, 68, 58, 77, 91, 50, 47, 60, 79, 87, 47, 54, 84, 55, 68, 64, 63, 72, 75, 92, 83, 57, 74, 80, 90, 57, 57, 68, 69, 61, 58, 80, 60, 67, 94, 68, 82, 66, 74, 72, 63, 88, 56, 98, 50, 86, 58, 60, 50, 53, 62, 79, 88, 47, 66, 57, 53, 83, 86, 69, 49, 65, 71, 69, 71, 61, 99, 68, 70, 97, 52, 71, 71, 44, 57, 48, 48, 72, 78, 60, 84, 64, 56, 56, 72, 82, 67, 62, 68, 60, 65, 61, 58, 53, 51, 56, 64, 58, 69, 70, 83, 87, 77, 70, 74, 70, 48, 65, 73, 72, 73, 104, 43, 60, 41, 60, 80, 39, 56, 70, 69, 49, 67, 72, 57, 89, 80, 74, 83, 88, 67, 69, 71, 67, 77, 70, 67, 74, 68, 63, 74, 66, 71, 49, 58, 67, 68, 51, 87, 63, 56, 58, 39, 60, 69, 87, 73, 51, 68, 41, 53, 76, 67, 55, 80, 87, 71, 45, 75, 39, 66, 46, 41, 52, 69, 96, 43, 42, 63, 76, 57, 83, 63, 115, 72, 66, 49, 60, 60, 59, 110, 74, 79, 52, 50, 86, 63, 47, 99, 54, 85, 51, 64, 43, 59, 99, 68, 74, 71, 66, 59, 71, 60, 54, 102, 50, 56, 51, 68, 95, 71, 67, 56, 64, 48, 63, 68, 62, 51, 55, 69, 50, 48, 57, 50, 70, 59, 67, 48, 71, 84, 60, 59, 71, 64, 69, 63, 62, 65, 62, 48, 65, 62, 69, 68, 73, 50, 60, 58, 62, 68, 66, 76, 71, 65, 55, 48, 62, 76, 68, 70, 70, 69, 64, 66, 48, 74, 37, 81, 50, 58, 61, 85, 61, 76, 67, 61, 84, 65, 75, 63, 50, 64, 54, 45, 51, 70, 59, 62, 33, 65, 51, 67, 74, 45, 60, 63, 45, 57, 64, 66, 73, 75, 62, 82, 67, 57, 42, 71, 76, 79, 60, 56, 64, 69, 52, 56, 53, 89, 60, 57, 54, 90, 66, 79, 64, 60, 67, 81, 74, 69, 55, 57, 86, 66, 61, 63, 67, 69, 68, 65, 51, 83, 68, 65, 66, 48, 63, 57, 61, 74, 51, 52, 75, 63, 58, 64, 55, 69, 67, 67, 74, 61, 70, 43, 70, 55, 71, 68, 74, 64, 89, 66, 63, 71, 32, 58, 77, 76, 65, 59, 58, 98, 56, 63, 58, 54, 49, 103, 57, 77, 60, 58, 58, 55, 70, 77, 44, 53, 54, 91, 62, 66, 108, 68, 74, 61, 71, 68, 51, 68, 155, 58, 82, 74, 100, 59, 81, 75, 76, 71, 56, 64, 78, 71, 50, 77, 53, 85, 63, 58, 89, 67, 63, 55, 63, 56, 82, 62, 72, 71, 55, 79, 62, 80, 64, 66, 73, 72, 54, 88, 63, 83, 65, 80, 41, 67, 66, 103, 58, 68, 74, 57, 74, 84, 62, 65, 45, 56, 85, 69, 61, 54, 71, 66, 62, 73, 100, 55, 73, 81, 67, 93, 62, 47, 50, 79, 74, 61, 59, 61, 61, 61, 82, 74, 65, 68, 55, 70, 53, 52, 41, 65, 82, 61, 60, 62, 55, 78, 44, 64, 75, 116, 52, 54, 90, 64, 65, 64, 83, 52, 84, 52, 66, 68, 68, 57, 66, 59, 56, 60, 73, 56, 52, 67, 48, 102, 47, 65, 54, 47, 55, 72, 53, 70, 57, 55, 54, 94, 77, 61, 66, 62, 52, 62, 84, 54, 72, 64, 63, 84, 68, 86, 62, 66, 75, 70, 61, 61, 57, 77, 63, 54, 65, 58, 64, 67, 75, 69, 70, 60, 43, 53, 65, 84, 88, 61, 79, 61, 49, 48, 79, 52, 73, 64, 60, 64, 69, 51, 56, 69, 69, 46, 72, 78, 70, 59, 82, 106, 74, 80, 78, 77, 96, 72, 69, 54, 80, 78, 63, 45, 66, 66, 61, 74, 62, 98, 40, 65, 80, 84, 61, 58, 70, 58, 77, 56, 67, 60, 71, 82, 76, 57, 71, 56, 78, 63, 55, 82, 56, 54, 60, 76, 72, 66, 66, 70, 64, 70, 64, 58, 49, 70, 66, 73, 71, 60, 66, 62, 50, 69, 80, 80, 62, 74, 61, 88, 69, 50, 63, 67, 48, 67, 59, 72, 70, 67, 54, 62, 53, 64, 55, 82, 51, 62, 68, 41, 71, 51, 54, 67, 70, 73, 49, 79, 68, 60, 57, 53, 54, 56, 67, 69, 58, 67, 60, 52, 71, 77, 65, 42, 53, 70, 57, 85, 63, 63, 64, 58, 92, 75, 46, 65, 59, 64, 81, 78, 73, 52, 60, 121, 68, 117, 55, 70, 63, 70, 59, 50, 53, 74, 62, 76, 117, 51, 59, 91, 62, 73, 53, 59, 59, 62, 84, 58, 52, 88, 48, 92, 62, 84, 63, 51, 55, 70, 57, 68, 76, 71, 67, 62, 60, 75, 48, 61, 68, 65, 51, 75, 73, 56, 46, 78, 60, 52, 57, 70, 76, 81, 65, 78, 55, 56, 75, 65, 79, 65, 84, 64, 78, 46, 71, 52, 38, 61, 37, 77, 57, 66, 62, 83, 66, 34, 58, 57, 64, 66, 56, 92, 72, 67, 62, 57, 73, 107, 68, 82, 78, 67, 69, 73, 63, 67, 70, 69, 66, 53, 45, 68, 60, 49, 63, 57, 63, 61, 89, 57, 79, 52, 66, 57, 82, 59, 65, 71, 58, 80, 66, 73, 56, 72, 66, 58, 62, 74, 87, 57, 79, 62, 54, 92, 61, 74, 75, 40, 54, 55, 65, 85, 74, 80, 61, 62, 47, 94, 71, 59, 62, 67, 80, 56, 67, 59, 70, 54, 70, 70, 44, 49, 88, 94, 53, 70, 67, 62, 78, 48, 81, 77, 62, 67, 63, 76, 60, 54, 55, 68, 61, 67, 69, 74, 70, 65, 58, 56, 61, 64, 60, 51, 70, 53, 77, 58, 64, 49, 77, 70, 64, 55, 64, 55, 68, 44, 62, 59, 70, 64, 69, 66, 73, 67, 56, 84, 59, 70, 75, 81, 66, 65, 66, 64, 59, 62, 48, 62, 79, 58, 74, 48, 74, 55, 68, 58, 58, 81, 65, 70, 76, 62, 58, 70, 64, 96, 64, 91, 84, 60, 73, 63, 57, 57, 86, 51, 85, 90, 72, 71, 53, 50, 70, 80, 69, 39, 58, 57, 66, 73, 56, 82, 60, 58, 48, 65, 77, 53, 53, 97, 58, 62, 66, 67, 55, 63, 91, 97, 63, 51, 65, 84, 73, 69, 75, 56, 74, 71, 60, 90, 65, 115, 57, 62, 88, 72, 69, 63, 92, 56, 92, 34, 70, 55, 69, 65, 62, 42, 57, 69, 49, 76, 54, 56, 66, 57, 92, 50, 62, 66, 79, 75, 60, 98, 70, 57, 84, 57, 53, 65, 52, 66, 106, 50, 64, 68, 68, 100, 54, 45, 52, 62, 77, 73, 51, 84, 54, 65, 81, 56, 63, 66, 47, 55, 61, 79, 48, 74, 71, 76, 70, 57, 71, 58, 69, 60, 73, 84, 88, 60, 73, 57, 56, 87, 61, 43, 50, 69, 53, 59, 67, 48, 98, 58, 65, 78, 88, 53, 60, 59, 73, 75, 58, 76, 73, 76, 81, 54, 57, 66, 59, 71, 68, 57, 54, 63, 67, 59, 68, 63, 61, 52, 59, 63, 52, 70, 59, 48, 78, 56, 64, 77, 68, 67, 48, 69, 35, 69, 48, 44, 53, 60, 97, 56, 48, 59, 83, 70, 76, 72, 105, 44, 53, 49, 69, 57, 58, 107, 64, 82, 57, 65, 84, 87, 59, 114, 66, 83, 53, 74, 61, 77, 101, 60, 57, 62, 66, 57, 79, 52, 60, 109, 61, 67, 42, 65, 67, 62, 52, 46, 47, 60, 66, 52, 68, 47, 54, 66, 46, 68, 56, 58, 68, 54, 71, 38, 66, 68, 55, 57, 64, 76, 72, 61, 73, 63, 71, 62, 75, 68, 76, 72, 64, 71, 60, 58, 78, 71, 69, 72, 68, 64, 48, 75, 44, 71, 67, 85, 80, 66, 89, 39, 78, 61, 56, 60, 38, 66, 67, 68, 60, 71, 59, 49, 57, 49, 74, 68, 51, 65, 65, 54, 55, 56, 59, 61, 59, 59, 52, 59, 61, 56, 65, 56, 51, 67, 65, 76, 70, 79, 75, 66, 64, 56, 45, 68, 53, 59, 68, 74, 65, 61, 66, 51, 47, 69, 57, 56, 69, 82, 52, 44, 72, 54, 60, 75, 56, 61, 59, 62, 86, 64, 45, 65, 67, 69, 95, 58, 47, 72, 72, 52, 66, 47, 73, 78, 64, 80, 36, 79, 67, 61, 63, 68, 68, 54, 93, 76, 66, 53, 63, 58, 58, 66, 71, 65, 59, 56, 88, 67, 58, 62, 115, 77, 71, 71, 74, 82, 56, 80, 68, 65, 48, 51, 62, 105, 68, 70, 45, 68, 60, 53, 61, 81, 52, 71, 63, 76, 63, 68, 101, 61, 72, 57, 85, 64, 74, 63, 158, 61, 85, 77, 102, 59, 53, 86, 71, 58, 61, 65, 64, 63, 46, 54, 64, 81, 58, 56, 86, 71, 55, 63, 61, 49, 90, 47, 79, 58, 50, 67, 60, 99, 68, 76, 58, 57, 47, 96, 62, 59, 77, 75, 37, 57, 80, 109, 51, 78, 76, 53, 77, 82, 72, 65, 69, 71, 71, 58, 66, 63, 65, 65, 47, 86, 104, 52, 68, 49, 70, 105, 63, 52, 45, 82, 57, 63, 59, 82, 73, 66, 65, 72, 59, 66, 71, 57, 64, 68, 54, 57, 56, 74, 75, 70, 61, 80, 56, 52, 68, 117, 61, 52, 67, 54, 76, 58, 68, 57, 76, 65, 63, 53, 66, 60, 101, 54, 53, 61, 57, 54, 47, 58, 56, 86, 64, 63, 57, 63, 50, 80, 65, 87, 79, 60, 62, 88, 68, 56, 79, 80, 50, 55, 85, 60, 70, 68, 76, 78, 76, 87, 77, 68, 82, 70, 52, 69, 73, 81, 58, 71, 68, 68, 64, 69, 75, 67, 53, 69, 65, 64, 61, 65, 83, 53, 60, 69, 53, 60, 93, 64, 103, 69, 59, 58, 76, 55, 51, 77, 64, 60, 64, 61, 48, 50, 77, 100, 84, 71, 53, 74, 89, 53, 51, 60, 61, 58, 67, 49, 75, 72, 62, 48, 45, 66, 42, 53, 71, 80, 54, 62, 49, 69, 67, 60, 61, 69, 63, 67, 60, 73, 62, 43, 74, 73, 55, 79, 67, 72, 61, 69, 80, 76, 68, 75, 46, 74, 61, 60, 65, 60, 71, 62, 88, 61, 67, 70, 64, 61, 62, 75, 66, 84, 57, 101, 56, 49, 60, 70, 60, 63, 57, 59, 65, 57, 61, 68, 59, 59, 66, 64, 66, 56, 51, 45, 81, 61, 61, 62, 70, 72, 58, 64, 63, 83, 52, 66, 45, 56, 64, 50, 47, 73, 63, 52, 55, 67, 45, 55, 74, 64, 69, 87, 77, 61, 65, 59, 72, 63, 51, 53, 60, 70, 79, 100, 62, 77, 70, 136, 73, 137, 61, 76, 62, 67, 67, 46, 58, 78, 60, 74, 108, 55, 59, 65, 63, 69, 55, 60, 46, 77, 70, 63, 73, 89, 62, 77, 63, 100, 34, 57, 44, 70, 63, 78, 74, 81, 86, 57, 50, 72, 60, 94, 62, 61, 65, 64, 64, 72, 74, 86, 71, 46, 64, 65, 80, 88, 56, 77, 78, 57, 66, 65, 69, 90, 88, 70, 68, 65, 64, 70, 62, 78, 64, 46, 74, 60, 49, 81, 64, 52, 54, 67, 67, 69, 66, 54, 61, 57, 64, 63, 63, 74, 56, 74, 95, 70, 94, 69, 59, 56, 67, 67, 62, 71, 62, 47, 82, 60, 72, 64, 51, 69, 71, 52, 70, 53, 53, 62, 65, 57, 61, 60, 70, 69, 78, 72, 91, 68, 64, 54, 61, 55, 57, 55, 78, 70, 59, 65, 67, 63, 80, 67, 54, 68, 62, 64, 59, 67, 97, 82, 91, 70, 60, 67, 68, 75, 74, 49, 57, 48, 59, 62, 94, 85, 65, 71, 37, 66, 62, 49, 65, 65, 71, 76, 71, 64, 71, 61, 80, 71, 63, 65, 78, 60, 64, 61, 70, 60, 62, 82, 48, 71, 69, 63, 84, 67, 64, 64, 58, 67, 53, 69, 76, 64, 71, 53, 123, 69, 51, 62, 61, 58, 63, 74, 67, 93, 56, 56, 64, 78, 50, 73, 66, 69, 53, 73, 57, 61, 63, 71, 64, 55, 70, 56, 68, 67, 63, 67, 73, 52, 73, 61, 67, 64, 66, 64, 61, 99, 81, 55, 61, 74, 75, 73, 68, 50, 49, 75, 68, 62, 79, 68, 65, 64, 73, 92, 66, 64, 73, 45, 60, 65, 69, 63, 118, 56, 66, 65, 46, 54, 62, 70, 120, 69, 54, 61, 71, 70, 61, 85, 70, 60, 63, 70, 70, 59, 56, 76, 70, 55, 53, 77, 82, 71, 58, 56, 51, 63, 68, 71, 73, 75, 60, 67, 64, 66, 59, 86, 59, 70, 66, 73, 49, 55, 70, 50, 60, 57, 66, 51, 55, 59, 56, 60, 77, 58, 74, 64, 65, 65, 57, 63, 65, 74, 63, 86, 61, 56, 58, 70, 59, 78, 77, 71, 58, 65, 88, 61, 57, 93, 78, 63, 52, 58, 92, 66, 71, 73, 67, 57, 68, 60, 57, 74, 88, 59, 60, 117, 69, 53, 72, 65, 58, 55, 81, 67, 85, 69, 51, 64, 59, 64, 65, 63, 71, 73, 65, 73, 73, 60, 52, 74, 76, 71, 57, 76, 54, 57, 63, 69, 68, 69, 62, 70, 85, 51, 62, 57, 62, 43, 78, 61, 62, 62, 67, 47, 56, 56, 58, 72, 65, 48, 65, 56, 70, 67, 61, 68, 49, 43, 61, 64, 79, 82, 54, 66, 53, 74, 64, 67, 70, 69, 67, 88, 66, 64, 55, 65, 54, 61, 76, 77, 63, 47, 68, 82, 50, 62, 79, 70, 90, 69, 58, 55, 60, 62, 57, 57, 76, 57, 79, 68, 57, 83, 46, 58, 63, 67, 55, 59, 57, 58, 61, 64, 59, 55, 61, 68, 56, 69, 65, 62, 68, 68, 86, 67, 59, 70, 66, 72, 86, 66, 60, 68, 63, 67, 77, 57, 56, 62, 68, 68, 62, 63, 70, 52, 64, 61, 75, 64, 59, 67, 64, 129, 70, 75, 67, 61, 54, 64, 63, 80, 72, 66, 58, 60, 66, 68, 66, 60, 57, 50, 66, 58, 67, 65, 63, 76, 58, 65, 64, 65, 65, 71, 60, 66, 64, 73, 84, 57, 65, 70, 57, 58, 66, 65, 59, 65, 68, 59, 56, 71, 55, 62, 59, 62, 98, 87, 87, 54, 76, 72, 65, 62, 57, 64, 57, 71, 58, 57, 71, 59, 62, 66, 102, 65, 76, 68, 51, 64, 57, 49, 52, 64, 58, 80, 67, 69, 45, 54, 55, 72, 78, 59, 56, 72, 60, 66, 64, 64, 100, 56, 69, 49, 73, 49, 65, 53, 83, 70, 61, 76, 61, 72, 84, 54, 53, 57, 50, 52, 73, 71, 86, 56, 59, 63, 56, 67, 66, 95, 76, 72, 55, 47, 93, 92, 66, 59, 53, 68, 70, 63, 65, 60, 78, 69, 62, 60, 55, 67, 60, 64, 72, 54, 67, 53, 74, 56, 64, 77, 73, 67, 63, 74, 53, 59, 65, 63, 53, 63, 62, 59, 42, 65, 55, 59, 52, 55, 62, 72, 63, 55, 57, 58, 72, 87, 68, 64, 55, 77, 59, 60, 65, 53, 60, 79, 63, 69, 62, 81, 60, 57, 54, 45, 61, 75, 74, 63, 87, 73, 45, 76, 87, 66, 58, 67, 80, 51, 62, 71, 56, 85, 66, 78, 67, 61, 57, 73, 57, 56, 55, 80, 61, 75, 64, 66, 75, 47, 74, 81, 79, 60, 65, 61, 63, 68, 61, 59, 58, 72, 68, 85, 122, 53, 51, 84, 67, 73, 75, 70, 69, 64, 77, 61, 46, 71, 54, 83, 59, 78, 61, 63, 56, 63, 70, 55, 63, 64, 59, 37, 59, 90, 68, 68, 53, 58, 49, 55, 69, 65, 98, 69, 59, 52, 46, 64, 64, 56, 69, 62, 65, 97, 62, 50, 42, 67, 54, 63, 71, 66, 79, 75, 54, 60, 63, 68, 54, 63, 71, 103, 67, 61, 67, 52, 52, 64, 72, 59, 89, 77, 59, 66, 58, 82, 70, 57, 71, 64, 75, 91, 63, 84, 62, 86, 65, 51, 66, 74, 53, 82, 60, 63, 58, 54, 70, 74, 56, 78, 60, 66, 50, 64, 49, 64, 65, 85, 60, 63, 63, 70, 59, 67, 50, 60, 82, 75, 61, 58, 59, 69, 73, 60, 64, 63, 66, 79, 49, 66, 70, 63, 61, 64, 91, 80, 53, 52, 61, 74, 57, 62, 65, 68, 78, 73, 54, 80, 99, 56, 68, 71, 68, 58, 55, 70, 60, 55, 64, 54, 55, 57, 59, 69, 62, 60, 55, 84, 67, 62, 56, 82, 50, 57, 69, 57, 78, 62, 66, 66, 84, 57, 58, 61, 84, 63, 59, 56, 66, 69, 50, 58, 92, 60, 63, 74, 54, 50, 73, 56, 68, 62, 76, 64, 57, 58, 81, 66, 68, 66, 59, 59, 56, 58, 59, 62, 76, 67, 87, 64, 61, 64, 55, 67, 62, 73, 68, 69, 65, 52, 60, 61, 80, 60, 54, 60, 63, 60, 68, 64, 131, 70, 61, 65, 82, 63, 54, 69, 57, 67, 71, 65, 64, 57, 83, 68, 52, 68, 71, 62, 79, 81, 81, 69, 66, 56, 68, 68, 68, 67, 60, 63, 86, 165, 76, 51, 50, 53, 71, 100, 52, 67, 55, 70, 67, 61, 85, 66, 62, 64, 62, 63, 60, 60, 59, 77, 60, 58, 66, 61, 59, 65, 58, 62, 55, 64, 74, 55, 78, 59, 61, 73, 63, 63, 56, 55, 51, 55, 81, 64, 61, 69, 69, 45, 62, 64, 51, 88, 68, 90, 51, 70, 62, 79, 53, 61, 70, 84, 44, 62, 63, 79, 62, 56, 51, 48, 70, 49, 55, 55, 72, 93, 71, 89, 77, 72, 48, 80, 62, 41, 61, 91, 54, 60, 74, 82, 67, 97, 94, 82, 89, 63, 74, 52, 71, 44, 90, 61, 82, 68, 90, 77, 50, 75, 61, 51, 49, 51, 54, 31, 73, 56, 84, 58, 54, 86, 67, 49, 51, 74, 58, 68, 76, 71, 53, 75, 80, 95, 57, 77, 98, 82, 80, 100, 48, 52, 75, 66, 61, 70, 70, 54, 59, 73, 57, 54, 36, 57, 53, 62, 67, 129, 61, 59, 75, 71, 71, 88, 81, 76, 72, 65, 64, 62, 98, 74, 69, 70, 70, 80, 69, 61, 60, 55, 51, 58, 81, 44, 81, 60, 84, 81, 74, 57, 68, 65, 93, 58, 56, 84, 49, 71, 53, 70, 62, 66, 72, 66, 40, 98, 63, 35, 77, 133, 50, 48, 53, 70, 65, 57, 85, 61, 63, 55, 88, 72, 60, 73, 63, 64, 77, 61, 63, 58, 64, 82, 54, 73, 62, 81, 50, 84, 44, 56, 54, 65, 38, 68, 67, 67, 70, 65, 97, 55, 52, 89, 66, 62, 62, 60, 65, 75, 81, 52, 74, 45, 68, 76, 60, 54, 32, 64, 102, 57, 49, 61, 40, 56, 42, 79, 53, 79, 45, 65, 50, 55, 71, 50, 63, 62, 52, 89, 41, 35, 36, 67, 73, 72, 59, 45, 53, 79, 93, 50, 90, 58, 41, 62, 50, 54, 64, 68, 89, 76, 68, 74, 98, 72, 58, 45, 56, 60, 48, 57, 70, 51, 71, 62, 102, 79, 55, 58, 90, 66, 73, 81, 70, 89, 50, 50, 53, 64, 62, 79, 49, 51, 66, 54, 88, 79, 58, 63, 39, 56, 49, 115, 70, 75, 68, 69, 67, 66, 102, 55, 74, 63, 83, 67, 72, 68, 102, 66, 54, 100, 91, 78, 55, 53, 135, 63, 54, 74, 45, 73, 69, 66, 93, 50, 51, 62, 74, 65, 39, 33, 51, 62, 52, 79, 84, 52, 54, 54, 65, 63, 64, 72, 107, 69, 56, 58, 59, 50, 71, 96, 48, 64, 54, 66, 77, 63, 98, 60, 92, 89, 52, 39, 77, 72, 58, 53, 61, 84, 61, 123, 58, 61, 84, 70, 75, 63, 48, 73, 95, 87, 73, 72, 56, 66, 72, 70, 62, 80, 71, 78, 52, 62, 52, 67, 70, 56, 85, 58, 136, 77, 77, 66, 61, 54, 59, 92, 79, 82, 86, 80, 93, 58, 74, 60, 85, 46, 93, 60, 38, 44, 61, 62, 78, 51, 64, 77, 65, 32, 67, 64, 59, 54, 60, 67, 79, 62, 83, 78, 44, 48, 48, 51, 77, 51, 59, 41, 70, 77, 64, 75, 76, 69, 58, 90, 78, 62, 69, 63, 71, 75, 74, 66, 61, 63, 78, 67, 33, 53, 42, 68, 74, 53, 56, 84, 63, 44, 53, 66, 80, 83, 90, 99, 78, 64, 63, 60, 63, 62, 60, 51, 91, 62, 55, 66, 75, 52, 58, 58, 64, 50, 74, 37, 68, 86, 88, 58, 47, 61, 54, 60, 91, 48, 80, 63, 63, 66, 53, 57, 72, 55, 65, 51, 69, 76, 43, 47, 87, 54, 76, 45, 54, 59, 77, 64, 63, 71, 63, 76, 74, 80, 65, 52, 78, 60, 73, 59, 67, 96, 106, 60, 33, 86, 68, 82, 53, 76, 64, 60, 74, 65, 59, 52, 51, 59, 59, 89, 60, 60, 106, 59, 63, 79, 80, 46, 66, 49, 88, 80, 63, 61, 67, 60, 85, 75, 69, 77, 72, 49, 55, 70, 70, 50, 67, 87, 31, 73, 53, 66, 54, 50, 90, 57, 58, 80, 61, 49, 54, 64, 65, 59, 66, 69, 58, 65, 61, 63, 59, 71, 36, 71, 79, 78, 74, 57, 98, 68, 83, 67, 50, 75, 88, 67, 52, 86, 82, 55, 61, 50, 61, 58, 92, 80, 56, 59, 54, 66, 61, 85, 73, 78, 84, 56, 90, 91, 49, 70, 45, 74, 69, 70, 63, 56, 71, 81, 62, 68, 69, 79, 105, 69, 56, 62, 62, 65, 64, 65, 71, 88, 73, 59, 67, 63, 52, 52, 80, 50, 126, 61, 91, 44, 69, 52, 56, 84, 80, 77, 76, 56, 74, 62, 90, 62, 52, 52, 56, 70, 59, 78, 76, 55, 42, 70, 51, 123, 82, 90, 57, 53, 66, 97, 48, 59, 78, 58, 92, 73, 59, 127, 65, 52, 62, 58, 85, 57, 48, 41, 70, 46, 106, 41, 87, 67, 74, 72, 71, 64, 53, 65, 83, 71, 82, 54, 45, 79, 71, 51, 76, 51, 75, 52, 72, 50, 37, 88, 99, 70, 57, 62, 73, 72, 37, 63, 64, 50, 84, 75, 47, 49, 98, 73, 69, 105, 47, 68, 79, 74, 54, 53, 63, 48, 62, 65, 90, 53, 73, 49, 90, 92, 55, 58, 54, 54, 68, 74, 115, 56, 53, 87, 67, 51, 67, 63, 92, 64, 75, 57, 69, 70, 66, 75, 75, 57, 96, 84, 72, 70, 58, 80, 67, 57, 88, 52, 62, 59, 62, 67, 80, 42, 99, 56, 57, 76, 60, 63, 88, 81, 97, 75, 60, 51, 85, 66, 70, 69, 71, 71, 50, 60, 75, 65, 35, 81, 57, 79, 92, 59, 71, 77, 52, 61, 96, 64, 32, 57, 88, 60, 50, 53, 49, 79, 84, 51, 52, 61, 58, 41, 63, 78, 95, 47, 58, 71, 49, 72, 56, 72, 109, 58, 59, 68, 77, 59, 58, 70, 72, 57, 70, 88, 62, 58, 42, 61, 82, 120, 56, 58, 43, 68, 72, 57, 67, 62, 61, 116, 40, 41, 82, 66, 56, 52, 54, 63, 51, 59, 98, 69, 89, 98, 76, 51, 69, 96, 77, 77, 68, 62, 67, 69, 68, 60, 57, 53, 57, 65, 67, 80, 76, 59, 72, 31, 62, 119, 53, 75, 58, 64, 21, 93, 73, 76, 61, 90, 59, 75, 72, 71, 61, 66, 55, 85, 47, 60, 67, 74, 74, 47, 88, 62, 66, 71, 91, 52, 64, 64, 50, 68, 60, 71, 53, 64, 68, 81, 76, 53, 77, 87, 48, 60, 59, 82, 109, 70, 48, 67, 78, 74, 58, 85, 69, 54, 59, 76, 69, 73, 57, 66, 142, 57, 60, 72, 71, 54, 66, 61, 70, 76, 55, 56, 67, 76, 33, 66, 45, 90, 90, 69, 37, 73, 62, 73, 67, 58, 105, 48, 46, 65, 100, 78, 56, 54, 88, 52, 73, 75, 56, 80, 79, 62, 48, 62, 72, 51, 49, 68, 68, 50, 73, 59, 61, 49, 57, 58, 72, 68, 46, 53, 51, 79, 68, 58, 63, 62, 62, 56, 70, 90, 51, 57, 57, 42, 70, 50, 64, 67, 64, 61, 74, 43, 51, 75, 70, 89, 61, 78, 84, 68, 65, 67, 56, 76, 74, 56, 71, 50, 64, 42, 77, 54, 102, 99, 72, 68, 81, 74, 75, 100, 58, 45, 75, 48, 51, 65, 76, 74, 62, 68, 60, 49, 50, 63, 53, 58, 57, 68, 58, 51, 66, 49, 53, 77, 60, 64, 68, 70, 43, 70, 63, 53, 44, 62, 74, 63, 54, 65, 63, 65, 72, 52, 48, 71, 61, 84, 49, 59, 111, 168, 49, 95, 46, 73, 56, 62, 72, 48, 56, 44, 66, 61, 58, 52, 44, 66, 53, 77, 73, 55, 48, 42, 71, 54, 53, 62, 69, 58, 57, 67, 64, 62, 64, 75, 68, 57, 89, 51, 50, 68, 52, 64, 55, 51, 57, 147, 72, 74, 39, 63, 69, 81, 47, 64, 72, 105, 77, 67, 60, 74, 63, 56, 74, 72, 75, 63, 49, 72, 97, 62, 60, 100, 60, 68, 61, 67, 67, 61, 86, 77, 70, 89, 73, 41, 94, 67, 66, 60, 62, 46, 50, 60, 69, 69, 76, 58, 70, 68, 56, 53, 73, 123, 88, 66, 54, 49, 59, 65, 58, 84, 68, 78, 61, 62, 67, 127, 66, 68, 57, 60, 75, 52, 86, 62, 51, 47, 60, 61, 54, 88, 63, 73, 64, 87, 73, 68, 59, 54, 75, 71, 89, 74, 63, 63, 64, 62, 62, 53, 77, 65, 55, 69, 111, 53, 47, 57, 64, 63, 71, 90, 77, 67, 70, 61, 77, 52, 60, 60, 60, 80, 64, 59, 46, 56, 61, 72, 50, 90, 72, 45, 72, 48, 53, 64, 59, 48, 56, 98, 55, 65, 83, 57, 49, 87, 70, 61, 70, 64, 51, 70, 62, 51, 72, 59, 65, 91, 68, 54, 75, 58, 124, 81, 59, 80, 60, 103, 59, 56, 60, 91, 69, 50, 53, 57, 80, 65, 65, 67, 56, 60, 66, 82, 58, 95, 58, 46, 72, 89, 55, 70, 58, 65, 53, 74, 42, 73, 79, 79, 102, 63, 89, 65, 69, 52, 95, 72, 55, 55, 65, 67, 71, 43, 61, 63, 50, 63, 65, 59, 63, 67, 121, 55, 43, 77, 69, 59, 76, 53, 58, 62, 59, 59, 62, 56, 73, 62, 55, 57, 59, 63, 41, 55, 61, 54, 69, 73, 58, 54, 80, 56, 70, 51, 112, 73, 59, 60, 62, 62, 75, 53, 86, 61, 70, 52, 63, 48, 60, 75, 65, 68, 75, 102, 48, 66, 55, 52, 47, 66, 71, 60, 64, 68, 59, 66, 59, 52, 56, 58, 64, 48, 57, 71, 65, 65, 56, 69, 65, 65, 58, 61, 87, 84, 82, 68, 49, 53, 74, 59, 73, 50, 79, 80, 72, 56, 65, 64, 74, 71, 40, 97, 67, 92, 55, 69, 70, 74, 61, 77, 69, 71, 66, 84, 60, 63, 53, 53, 75, 55, 53, 82, 62, 64, 70, 60, 58, 67, 55, 66, 60, 89, 67, 45, 63, 92, 58, 46, 56, 56, 58, 78, 55, 76, 88, 67, 71, 66, 86, 75, 71, 50, 72, 53, 43, 76, 47, 74, 67, 61, 70, 90, 47, 70, 54, 84, 34, 59, 39, 58, 62, 52, 65, 67, 51, 72, 70, 56, 79, 57, 62, 61, 77, 84, 64, 47, 58, 59, 72, 49, 56, 52, 80, 56, 51, 75, 60, 64, 52, 66, 45, 59, 47, 64, 78, 43, 69, 62, 79, 43, 61, 87, 54, 57, 70, 48, 77, 51, 71, 57, 109, 65, 63, 74, 70, 66, 58, 101, 55, 59, 61, 67, 61, 79, 69, 85, 54, 48, 69, 44, 64, 55, 62, 54, 71, 119, 84, 61, 75, 59, 66, 91, 69, 67, 48, 48, 69, 73, 49, 77, 63, 62, 65, 88, 48, 66, 58, 65, 73, 71, 61, 67, 55, 61, 62, 73, 58, 90, 51, 64, 51, 89, 55, 75, 59, 62, 63, 71, 64, 88, 59, 73, 72, 56, 80, 42, 62, 100, 57, 64, 65, 87, 96, 65, 53, 49, 53, 59, 47, 60, 51, 58, 73, 60, 63, 72, 62, 64, 84, 68, 74, 60, 47, 78, 99, 47, 74, 85, 73, 69, 54, 63, 67, 123, 73, 57, 60, 62, 61, 73, 91, 49, 70, 72, 72, 59, 60, 67, 57, 68, 82, 74, 71, 58, 60, 57, 46, 64, 48, 103, 54, 55, 71, 46, 66, 83, 98, 74, 50, 68, 62, 67, 105, 63, 75, 69, 54, 60, 62, 101, 66, 33, 79, 55, 67, 86, 72, 68, 37, 68, 83, 56, 85, 51, 64, 62, 68, 86, 81, 69, 71, 67, 49, 61, 46, 72, 83, 59, 58, 78, 55, 67, 57, 112, 63, 71, 62, 78, 64, 86, 90, 47, 60, 71, 80, 74, 78, 68, 66, 41, 75, 70, 50, 50, 58, 73, 73, 90, 87, 68, 69, 84, 50, 82, 62, 65, 57, 49, 56, 112, 53, 90, 67, 62, 59, 84, 61, 67, 70, 113, 59, 60, 57, 83, 66, 60, 80, 42, 50, 61, 46, 68, 49, 65, 47, 70, 78, 59, 117, 70, 83, 57, 82, 118, 55, 80, 67, 47, 62, 70, 69, 54, 104, 71, 50, 74, 84, 61, 61, 72, 53, 50, 68, 47, 59, 55, 59, 73, 67, 55, 76, 65, 59, 79, 75, 45, 59, 49, 63, 62, 86, 43, 45, 102, 52, 61, 67, 57, 79, 68, 46, 80, 87, 76, 59, 71, 49, 47, 73, 70, 71, 47, 60, 71, 55, 49, 58, 57, 73, 68, 46, 75, 53, 83, 61, 51, 52, 73, 71, 82, 73, 77, 48, 64, 55, 56, 74, 51, 39, 54, 118, 56, 70, 104, 75, 73, 82, 57, 89, 66, 70, 69, 67, 71, 63, 63, 57, 96, 61, 71, 37, 63, 61, 48, 54, 63, 66, 105, 54, 63, 73, 49, 77, 71, 80, 52, 83, 57, 79, 65, 69, 65, 72, 68, 133, 83, 59, 67, 69, 64, 51, 71, 62, 63, 73, 53, 54, 62, 77, 49, 60, 57, 61, 61, 61, 52, 49, 70, 53, 58, 51, 75, 48, 78, 68, 45, 53, 63, 57, 72, 70, 67, 102, 59, 59, 68, 87, 75, 77, 56, 70, 89, 60, 72, 64, 60, 74, 75, 65, 58, 68, 82, 55, 52, 82, 77, 41, 87, 73, 86, 65, 69, 68, 56, 75, 63, 32, 57, 50, 67, 69, 72, 52, 68, 56, 71, 60, 79, 61, 69, 63, 67, 44, 47, 66, 54, 56, 69, 71, 92, 66, 68, 88, 72, 47, 74, 80, 65, 61, 61, 64, 50, 64, 59, 61, 61, 93, 76, 37, 55, 54, 87, 61, 79, 78, 33, 62, 51, 59, 63, 70, 59, 98, 78, 60, 64, 68, 64, 54, 70, 60, 75, 64, 65, 54, 76, 66, 43, 58, 41, 79, 78, 67, 55, 63, 58, 73, 47, 94, 55, 78, 107, 78, 47, 46, 52, 96, 67, 61, 36, 61, 55, 69, 77, 57, 67, 76, 62, 62, 42, 73, 82, 50, 60, 73, 48, 59, 67, 50, 55, 55, 63, 64, 40, 60, 79, 64, 58, 102, 82, 106, 69, 46, 87, 55, 65, 80, 65, 52, 61, 50, 50, 53, 60, 74, 55, 83, 63, 73, 60, 66, 71, 82, 74, 60, 47, 69, 80, 62, 65, 53, 57, 81, 73, 65, 71, 64, 74, 58, 76, 63, 69, 90, 79, 39, 50, 57, 87, 57, 63, 60, 64, 74, 72, 77, 116, 82, 50, 64, 55, 87, 73, 78, 70, 71, 63, 72, 58, 90, 77, 60, 55, 78, 65, 109, 64, 70, 78, 78, 57, 62, 75, 66, 67, 68, 84, 69, 109, 35, 61, 71, 86, 66, 96, 105, 90, 62, 70, 63, 52, 82, 72, 52, 56, 73, 69, 65, 86, 69, 71, 50, 48, 64, 68, 71, 72, 54, 58, 68, 56, 55, 67, 67, 63, 68, 74, 82, 79, 69, 56, 60, 60, 52, 62, 32, 97, 60, 61, 93, 63, 81, 65, 56, 71, 61, 66, 58, 61, 82, 49, 72, 66, 86, 79, 59, 42, 40, 54, 68, 72, 61, 62, 56, 58, 133, 66, 60, 78, 74, 41, 61, 70, 65, 86, 53, 80, 68, 59, 57, 46, 62, 81, 88, 76, 57, 69, 60, 67, 67, 69, 82, 71, 78, 73, 72, 59, 69, 51, 71, 65, 55, 83, 83, 68, 62, 48, 61, 60, 59, 45, 67, 59, 64, 80, 63, 91, 53, 59, 93, 56, 66, 58, 71, 101, 79, 76, 85, 76, 52, 47, 52, 63, 69, 56, 78, 80, 68, 55, 108, 53, 56, 67, 57, 48, 71, 72, 77, 75, 48, 52, 35, 47, 48, 77, 63, 84, 67, 54, 61, 58, 63, 66, 71, 47, 56, 54, 50, 56, 54, 58, 75, 65, 80, 52, 55, 71, 81, 66, 42, 73, 76, 47, 72, 69, 60, 71, 74, 58, 90, 76, 51, 56, 90, 70, 53, 41, 49, 55, 65, 62, 81, 71, 66, 64, 49, 58, 73, 81, 54, 62, 59, 69, 77, 71, 52, 68, 41, 54, 66, 39, 53, 32, 76, 58, 67, 73, 45, 87, 52, 59, 70, 52, 77, 60, 43, 68, 52, 74, 63, 93, 74, 68, 36, 55, 85, 63, 64, 72, 63, 103, 73, 75, 47, 70, 73, 98, 72, 63, 84, 71, 65, 96, 51, 42, 68, 60, 54, 73, 61, 67, 70, 68, 59, 48, 57, 49, 60, 170, 80, 59, 66, 70, 48, 56, 52, 70, 50, 57, 64, 68, 73, 59, 34, 57, 87, 60, 63, 53, 64, 58, 82, 62, 71, 59, 85, 61, 60, 72, 61, 57, 55, 57, 50, 57, 72, 60, 72, 59, 81, 81, 57, 72, 91, 59, 88, 52, 68, 65, 63, 79, 69, 60, 69, 73, 61, 71, 74, 71, 81, 73, 85, 60, 83, 65, 44, 79, 61, 66, 111, 67, 77, 57, 87, 60, 86, 52, 51, 66, 43, 75, 67, 55, 63, 76, 58, 71, 72, 54, 65, 62, 67, 81, 92, 71, 79, 53, 57, 48, 61, 61, 44, 74, 66, 56, 73, 58, 73, 67, 52, 68, 47, 65, 51, 78, 64, 58, 54, 50, 93, 57, 89, 67, 70, 68, 51, 70, 87, 52, 74, 78, 74, 76, 51, 62, 65, 77, 58, 63, 59, 53, 78, 95, 67, 58, 98, 63, 84, 62, 63, 75, 59, 48, 50, 70, 61, 73, 81, 85, 54, 62, 91, 81, 53, 66, 59, 84, 68, 62, 44, 51, 45, 45, 68, 54, 67, 50, 80, 65, 66, 61, 106, 50, 74, 75, 52, 70, 61, 66, 54, 61, 57, 85, 83, 77, 74, 56, 87, 95, 54, 65, 64, 66, 65, 63, 64, 64, 74, 69, 83, 56, 93, 66, 79, 55, 75, 97, 71, 68, 81, 74, 88, 73, 60, 65, 62, 53, 81, 66, 75, 77, 45, 60, 66, 55, 75, 55, 56, 53, 41, 60, 60, 57, 78, 52, 61, 85, 74, 82, 95, 54, 53, 72, 51, 56, 49, 54, 56, 68, 81, 60, 34, 77, 88, 63, 57, 35, 47, 55, 95, 50, 70, 74, 66, 62, 47, 66, 71, 81, 94, 103, 69, 66, 51, 42, 87, 68, 79, 54, 74, 67, 48, 81, 87, 62, 80, 39, 91, 48, 67, 63, 60, 82, 49, 66, 56, 72, 86, 57, 62, 66, 68, 109, 63, 68, 72, 71, 62, 77, 50, 65, 88, 68, 52, 48, 64, 87, 71, 61, 52, 61, 72, 61, 60, 51, 49, 60, 68, 77, 73, 95, 64, 58, 85, 54, 65, 49, 77, 71, 67, 69, 62, 56, 56, 50, 68, 55, 53, 56, 71, 50, 66, 60, 65, 76, 85, 53, 66, 61, 81, 91, 60, 69, 56, 63, 65, 49, 79, 78, 73, 51, 83, 74, 65, 66, 70, 51, 71, 81, 76, 67, 87, 79, 70, 46, 53, 66, 76, 53, 65, 61, 97, 78, 71, 79, 59, 85, 62, 47, 65, 64, 62, 48, 62, 71, 85, 70, 56, 84, 71, 90, 61, 52, 56, 52, 140, 108, 81, 100, 74, 81, 60, 45, 84, 114, 59, 56, 55, 93, 61, 71, 59, 61, 67, 82, 92, 66, 89, 92, 48, 63, 52, 60, 66, 56, 74, 59, 62, 63, 44, 72, 86, 41, 114, 63, 61, 79, 54, 57, 53, 143, 80, 83, 98, 62, 48, 42, 79, 55, 87, 65, 50, 68, 79, 61, 60, 66, 42, 68, 59, 82, 73, 84, 70, 64, 103, 46, 58, 57, 63, 85, 55, 57, 57, 53, 82, 69, 54, 60, 91, 53, 70, 59, 56, 53, 52, 98, 63, 70, 41, 98, 53, 70, 86, 47, 82, 71, 73, 69, 49, 56, 76, 62, 62, 57, 62, 80, 79, 56, 69, 105, 95, 74, 71, 61, 80, 65, 59, 63, 83, 60, 56, 73, 58, 52, 65, 87, 90, 41, 49, 48, 69, 68, 48, 82, 68, 50, 64, 68, 60, 82, 79, 49, 55, 75, 58, 71, 67, 57, 63, 73, 53, 57, 51, 57, 72, 51, 96, 54, 68, 81, 87, 90, 81, 55, 66, 61, 65, 62, 65, 55, 60, 76, 56, 68, 49, 57, 74, 58, 46, 72, 46, 42, 63, 48, 82, 53, 139, 58, 79, 64, 84, 85, 65, 53, 57, 53, 63, 75, 59, 79, 56, 62, 48, 64, 58, 69, 66, 89, 58, 60, 52, 58, 71, 81, 59, 72, 94, 68, 51, 53, 69, 44, 53, 42, 79, 63, 73, 57, 75, 60, 75, 56, 50, 74, 77, 86, 108, 69, 68, 49, 82, 89, 69, 47, 78, 73, 48, 38, 56, 53, 50, 58, 49, 52, 73, 84, 78, 120, 56, 72, 49, 69, 95, 57, 73, 52, 60, 49, 57, 67, 75, 71, 58, 62, 73, 49, 79, 73, 76, 52, 78, 71, 67, 43, 86, 53, 65, 63, 72, 64, 71, 58, 94, 122, 84, 77, 71, 40, 59, 58, 65, 79, 42, 81, 68, 54, 76, 71, 101, 109, 71, 60, 64, 69, 93, 55, 64, 45, 71, 55, 127, 66, 80, 72, 69, 56, 50, 59, 74, 100, 63, 60, 75, 60, 73, 68, 71, 95, 60, 45, 60, 53, 55, 53, 59, 51, 87, 77, 88, 60, 51, 71, 66, 73, 69, 127, 58, 90, 66, 72, 68, 55, 74, 41, 62, 49, 52, 55, 80, 79, 63, 64, 70, 61, 92, 77, 68, 91, 68, 52, 103, 66, 110, 78, 60, 54, 56, 74, 61, 63, 61, 44, 81, 74, 61, 57, 94, 62, 53, 56, 78, 94, 63, 109, 75, 49, 60, 101, 44, 105, 57, 78, 70, 47, 69, 95, 56, 55, 58, 50, 44, 71, 69, 63, 97, 41, 97, 51, 62, 48, 53, 54, 58, 59, 49, 53, 52, 51, 66, 83, 84, 52, 81, 59, 66, 70, 63, 81, 46, 59, 65, 68, 50, 63, 61, 56, 69, 65, 87, 76, 43, 50, 104, 64, 63, 74, 85, 78, 63, 49, 66, 52, 58, 62, 67, 49, 54, 57, 63, 54, 63, 53, 57, 59, 51, 44, 59, 70, 58, 60, 116, 53, 94, 80, 57, 60, 65, 73, 79, 89, 38, 81, 27, 58, 68, 63, 68, 65, 59, 71, 53, 55, 61, 61, 108, 64, 68, 58, 49, 53, 54, 71, 73, 61, 87, 59, 73, 57, 84, 56, 64, 71, 50, 61, 78, 75, 66, 61, 64, 64, 68, 69, 80, 53, 70, 43, 47, 98, 52, 108, 52, 61, 89, 48, 66, 61, 58, 45, 108, 57, 76, 86, 72, 68, 44, 56, 63, 70, 71, 67, 63, 51, 144, 45, 67, 48, 40, 85, 54, 54, 94, 58, 45, 84, 57, 68, 53, 46, 51, 62, 99, 76, 76, 94, 64, 63, 65, 52, 84, 55, 48, 77, 97, 60, 59, 68, 63, 54, 69, 67, 60, 56, 76, 57, 63, 80, 76, 91, 61, 53, 120, 50, 51, 56, 36, 56, 88, 64, 75, 73, 56, 65, 42, 66, 60, 64, 125, 51, 59, 50, 64, 38, 78, 39, 69, 49, 64, 49, 61, 61, 71, 68, 63, 87, 57, 91, 57, 58, 56, 54, 60, 69, 55, 83, 63, 71, 63, 66, 47, 70, 61, 44, 111, 82, 65, 83, 69, 44, 50, 72, 77, 44, 66, 66, 58, 42, 54, 67, 86, 84, 59, 52, 76, 53, 60, 74, 52, 70, 67, 53, 58, 80, 81, 70, 55, 52, 66, 71, 51, 74, 52, 58, 67, 47, 47, 78, 73, 94, 63, 40, 64, 51, 72, 54, 43, 56, 72, 64, 70, 59, 74, 44, 58, 95, 39, 58, 70, 60, 52, 59, 67, 79, 46, 87, 54, 70, 78, 79, 75, 64, 61, 62, 69, 74, 76, 58, 126, 71, 47, 56, 62, 43, 97, 59, 66, 76, 28, 58, 79, 52, 56, 43, 64, 80, 49, 47, 86, 59, 64, 50, 66, 70, 64, 69, 72, 43, 40, 48, 67, 51, 64, 75, 80, 62, 82, 34, 62, 57, 52, 44, 47, 72, 73, 59, 65, 72, 82, 59, 64, 54, 58, 59, 77, 92, 49, 62, 65, 52, 40, 65, 43, 76, 55, 70, 59, 81, 66, 63, 72, 56, 62, 55, 62, 89, 73, 89, 56, 47, 42, 86, 80, 71, 73, 78, 76, 54, 55, 42, 101, 43, 63, 75, 52, 70, 49, 55, 54, 66, 115, 63, 71, 70, 50, 55, 111, 64, 40, 77, 62, 70, 91, 64, 50, 70, 79, 74, 53, 61, 71, 63, 89, 66, 72, 67, 58, 55, 53, 103, 68, 51, 53, 79, 71, 75, 104, 81, 75, 42, 62, 60, 68, 31, 121, 63, 83, 110, 71, 51, 73, 55, 66, 41, 70, 70, 61, 51, 58, 50, 58, 48, 81, 67, 53, 69, 49, 59, 73, 47, 45, 82, 79, 54, 83, 58, 47, 61, 55, 43, 80, 88, 81, 93, 63, 95, 62, 49, 59, 69, 73, 52, 67, 67, 66, 48, 63, 61, 55, 70, 105, 61, 80, 64, 70, 59, 42, 61, 42, 50, 75, 62, 57, 73, 52, 92, 74, 52, 47, 50, 74, 57, 73, 69, 50, 60, 70, 86, 45, 80, 83, 72, 79, 72, 58, 71, 71, 75, 66, 68, 66, 74, 60, 49, 52, 50, 77, 69, 72, 87, 75, 52, 49, 54, 69, 64, 74, 74, 62, 69, 58, 75, 94, 120, 64, 64, 73, 75, 55, 68, 75, 56, 74, 66, 49, 41, 48, 70, 54, 63, 78, 62, 65, 65, 48, 76, 64, 62, 97, 63, 101, 53, 55, 59, 76, 49, 54, 53, 69, 62, 66, 64, 61, 55, 50, 62, 58, 57, 73, 80, 70, 95, 58, 61, 62, 51, 88, 81, 62, 44, 65, 57, 78, 68, 85, 67, 76, 44, 63, 62, 74, 67, 61, 71, 79, 54, 64, 75, 59, 72, 63, 64, 65, 46, 78, 47, 66, 66, 53, 94, 61, 61, 68, 67, 139, 54, 59, 62, 71, 56, 58, 60, 55, 68, 57, 71, 58, 58, 62, 45, 57, 69, 80, 58, 84, 72, 55, 129, 62, 69, 99, 51, 73, 70, 64, 104, 58, 62, 65, 57, 55, 68, 64, 64, 60, 62, 86, 64, 76, 66, 44, 53, 71, 59, 87, 103, 80, 78, 58, 66, 58, 56, 62, 73, 80, 71, 61, 48, 63, 51, 78, 44, 53, 53, 73, 56, 58, 78, 57, 58, 73, 75, 59, 71, 51, 54, 69, 71, 75, 72, 71, 58, 65, 62, 77, 75, 68, 58, 55, 61, 53, 59, 58, 48, 96, 70, 63, 81, 106, 58, 71, 82, 58, 77, 53, 62, 60, 82, 61, 65, 45, 84, 54, 68, 61, 67, 87, 66, 78, 61, 82, 88, 53, 56, 85, 66, 66, 70, 59, 48, 62, 54, 64, 61, 59, 95, 59, 65, 55, 59, 64, 79, 63, 64, 49, 61, 65, 96, 69, 73, 64, 48, 86, 62, 66, 83, 85, 56, 67, 72, 68, 65, 94, 67, 57, 63, 60, 64, 59, 61, 60, 60, 63, 70, 67, 64, 56, 85, 68, 58, 56, 79, 75, 44, 53, 67, 61, 50, 69, 44, 101, 101, 69, 61, 60, 82, 51, 57, 68, 68, 71, 62, 63, 56, 48, 67, 80, 69, 77, 61, 60, 54, 59, 61, 51, 65, 56, 64, 81, 65, 66, 61, 57, 113, 63, 50, 50, 60, 67, 56, 84, 69, 65, 77, 64, 72, 63, 63, 57, 66, 63, 70, 58, 59, 66, 63, 72, 88, 61, 67, 82, 57, 63, 64, 77, 64, 56, 76, 72, 63, 59, 62, 59, 61, 58, 70, 58, 67, 52, 57, 69, 66, 76, 109, 87, 73, 64, 69, 74, 121, 73, 84, 52, 61, 45, 67, 83, 70, 71, 59, 71, 87, 54, 84, 67, 62, 67, 56, 65, 54, 58, 82, 74, 100, 50, 60, 60, 57, 56, 80, 50, 51, 66, 32, 58, 71, 68, 51, 68, 88, 55, 53, 65, 70, 61, 73, 53, 68, 60, 57, 67, 61, 49, 67, 59, 148, 77, 83, 141, 56, 70, 55, 50, 66, 71, 71, 72, 75, 74, 59, 72, 67, 72, 76, 75, 65, 74, 70, 92, 51, 63, 74, 56, 39, 67, 63, 79, 53, 55, 60, 60, 85, 61, 66, 61, 56, 76, 59, 72, 51, 50, 42, 57, 63, 57, 70, 69, 66, 52, 60, 81, 60, 75, 58, 63, 78, 54, 54, 141, 61, 60, 62, 60, 66, 66, 58, 66, 63, 61, 64, 41, 60, 74, 57, 67, 76, 65, 61, 56, 69, 63, 64, 84, 71, 62, 65, 50, 60, 85, 73, 75, 71, 71, 77, 80, 72, 66, 95, 47, 68, 61, 65, 62, 72, 67, 66, 75, 59, 70, 53, 66, 48, 55, 61, 97, 52, 88, 69, 60, 62, 67, 66, 78, 65, 56, 56, 76, 43, 73, 69, 61, 61, 75, 66, 62, 67, 55, 63, 84, 55, 72, 58, 70, 67, 53, 70, 59, 69, 48, 61, 80, 63, 66, 68, 62, 70, 69, 53, 48, 57, 49, 58, 61, 50, 83, 61, 59, 64, 59, 51, 60, 65, 72, 57, 64, 68, 53, 111, 71, 144, 67, 61, 66, 52, 58, 79, 56, 56, 67, 76, 69, 64, 72, 78, 52, 62, 70, 53, 61, 60, 67, 69, 58, 91, 57, 66, 69, 76, 67, 62, 58, 56, 56, 74, 83, 50, 57, 57, 62, 69, 63, 81, 57, 48, 41, 56, 72, 42, 75, 62, 63, 64, 55, 70, 65, 68, 101, 76, 60, 68, 70, 70, 76, 65, 62, 53, 69, 75, 56, 59, 75, 67, 64, 104, 55, 58, 66, 54, 55, 78, 63, 64, 40, 51, 54, 105, 56, 66, 64, 70, 50, 52, 62, 55, 78, 44, 66, 59, 67, 63, 56, 65, 77, 70, 56, 63, 70, 41, 74, 55, 71, 91, 59, 66, 76, 61, 56, 62, 59, 65, 62, 68, 98, 54, 71, 53, 74, 68, 68, 74, 64, 58, 104, 62, 63, 65, 61, 62, 83, 54, 68, 46, 66, 57, 60, 59, 72, 81, 46, 56, 67, 64, 96, 53, 55, 62, 89, 64, 56, 60, 67, 60, 63, 66, 73, 64, 63, 82, 93, 71, 50, 41, 81, 93, 72, 95, 77, 68, 70, 85, 88, 71, 61, 60, 57, 53, 57, 61, 48, 56, 65, 88, 76, 56, 66, 72, 53, 60, 61, 95, 63, 51, 58, 67, 72, 62, 71, 58, 70, 47, 60, 54, 60, 68, 58, 64, 45, 55, 63, 54, 87, 73, 85, 68, 58, 78, 68, 56, 52, 56, 91, 58, 55, 50, 53, 62, 57, 61, 71, 49, 60, 75, 57, 78, 71, 56, 54, 60, 49, 57, 42, 68, 69, 60, 66, 66, 60, 75, 71, 58, 74, 73, 57, 97, 73, 74, 60, 65, 59, 57, 113, 63, 76, 41, 86, 87, 53, 53, 57, 62, 60, 79, 63, 71, 76, 62, 65, 58, 62, 55, 62, 75, 52, 73, 86, 56, 92, 42, 62, 68, 67, 80, 72, 51, 53, 59, 64, 85, 62, 63, 90, 66, 65, 65, 75, 64, 116, 50, 80, 48, 65, 74, 61, 64, 67, 64, 96, 57, 73, 67, 58, 67, 49, 83, 62, 52, 81, 75, 69, 66, 83, 67, 54, 59, 84, 71, 54, 69, 75, 75, 94, 95, 71, 90, 108, 58, 105, 50, 80, 72, 36, 39, 59, 63, 59, 68, 72, 61, 74, 63, 59, 90, 54, 77, 63, 76, 57, 62, 53, 61, 84, 61, 64, 65, 65, 57, 62, 70, 56, 62, 69, 63, 76, 67, 101, 61, 31, 61, 69, 53, 102, 71, 68, 58, 70, 68, 76, 46, 65, 75, 47, 61, 97, 69, 64, 55, 69, 84, 72, 58, 81, 51, 91, 56, 57, 70, 81, 52, 62, 57, 56, 61, 77, 66, 61, 40, 75, 57, 52, 59, 90, 58, 73, 72, 64, 45, 67, 86, 70, 65, 120, 65, 104, 48, 62, 52, 70, 58, 65, 71, 100, 67, 81, 74, 47, 72, 53, 57, 69, 58, 116, 73, 60, 54, 56, 67, 61, 63, 66, 60, 87, 72, 71, 56, 57, 89, 58, 58, 77, 46, 69, 48, 72, 44, 61, 70, 63, 70, 55, 71, 69, 71, 65, 67, 57, 61, 77, 64, 69, 91, 43, 61, 65, 62, 49, 67, 75, 69, 62, 88, 64, 78, 66, 59, 77, 60, 56, 71, 64, 58, 80, 70, 60, 51, 76, 56, 51, 59, 58, 77, 51, 74, 51, 59, 60, 46, 57, 55, 56, 52, 79, 61, 57, 54, 62, 68, 62, 56, 121, 58, 52, 76, 69, 107, 75, 55, 72, 61, 62, 73, 47, 92, 49, 74, 75, 58, 64, 65, 67, 63, 73, 65, 52, 54, 67, 55, 58, 50, 86, 66, 74, 80, 83, 71, 54, 62, 69, 60, 70, 97, 63, 66, 74, 99, 62, 57, 68, 58, 65, 70, 71, 47, 65, 68, 62, 49, 73, 67, 67, 69, 53, 73, 53, 75, 52, 80, 60, 104, 71, 65, 59, 68, 67, 83, 69, 72, 52, 64, 35, 59, 58, 69, 67, 53, 58, 64, 77, 62, 62, 55, 84, 45, 56, 73, 56, 52, 87, 65, 64, 85, 62, 57, 57, 125, 53, 55, 72, 72, 85, 145, 68, 72, 68, 54, 55, 67, 70, 55, 48, 74, 58, 82, 83, 77, 72, 55, 59, 53, 66, 74, 61, 57, 58, 66, 104, 57, 63, 66, 65, 61, 81, 73, 57, 84, 84, 39, 57, 76, 78, 56, 56, 60, 55, 53, 71, 62, 66, 47, 63, 50, 61, 33, 53, 51, 88, 70, 65, 72, 62, 67, 65, 63, 77, 80, 64, 68, 57, 70, 72, 77, 79, 45, 60, 64, 58, 62, 50, 69, 40, 56, 64, 69, 57, 44, 69, 58, 56, 59, 60, 67, 67, 58, 73, 83, 62, 78, 58, 61, 47, 67, 63, 82, 57, 58, 60, 82, 79, 58, 58, 72, 70, 52, 72, 64, 64, 70, 61, 58, 49, 59, 67, 56, 98, 46, 69, 65, 63, 59, 68, 61, 45, 68, 55, 77, 65, 70, 53, 54, 65, 55, 48, 58, 93, 72, 57, 62, 56, 90, 75, 45, 73, 65, 75, 63, 46, 66, 92, 65, 64, 68, 53, 62, 69, 71, 73, 85, 91, 56, 94, 75, 67, 80, 74, 54, 81, 71, 80, 51, 68, 65, 41, 59, 36, 59, 56, 55, 73, 66, 58, 63, 58, 69, 60, 39, 73, 60, 59, 55, 85, 58, 58, 97, 78, 58, 54, 43, 54, 37, 46, 69, 87, 53, 55, 69, 78, 67, 54, 77, 61, 62, 55, 57, 54, 50, 62, 62, 56, 56, 69, 62, 90, 73, 62, 52, 70, 61, 46, 50, 57, 78, 48, 63, 69, 62, 51, 63, 56, 42, 69, 70, 105, 61, 76, 59, 66, 58, 47, 68, 39, 63, 55, 46, 67, 72, 75, 73, 65, 81, 75, 66, 76, 63, 63, 55, 62, 71, 77, 73, 59, 74, 67, 60, 70, 56, 77, 66, 51, 56, 81, 59, 127, 58, 88, 59, 57, 54, 77, 53, 70, 41, 77, 71, 45, 70, 85, 38, 42, 58, 42, 74, 90, 63, 68, 65, 77, 59, 63, 57, 64, 71, 50, 52, 75, 101, 66, 61, 77, 62, 57, 67, 98, 65, 63, 89, 43, 83, 59, 61, 60, 54, 79, 60, 67, 52, 61, 55, 67, 55, 64, 63, 51, 77, 54, 52, 68, 71, 77, 56, 80, 64, 55, 66, 68, 55, 62, 73, 72, 55, 27, 67, 85, 80, 81, 65, 113, 83, 66, 93, 75, 55, 53, 54, 136, 64, 46, 66, 61, 90, 72, 70, 65, 65, 60, 69, 53, 101, 77, 54, 70, 46, 60, 52, 66, 95, 86, 82, 86, 69, 80, 80, 57, 47, 92, 83, 46, 62, 39, 67, 55, 48, 73, 68, 83, 81, 67, 69, 49, 74, 87, 62, 60, 37, 64, 55, 62, 67, 61, 68, 56, 62, 69, 66, 64, 55, 75, 96, 66, 68, 70, 63, 66, 73, 81, 50, 67, 81, 61, 49, 58, 64, 55, 59, 74, 55, 83, 66, 55, 50, 58, 63, 62, 71, 58, 44, 51, 56, 77, 73, 47, 56, 138, 66, 67, 49, 55, 77, 49, 69, 77, 71, 39, 80, 88, 58, 58, 74, 70, 80, 65, 62, 54, 59, 57, 59, 41, 56, 97, 61, 59, 80, 56, 46, 78, 62, 81, 46, 71, 62, 71, 78, 57, 67, 67, 70, 51, 65, 74, 88, 81, 102, 80, 102, 63, 89, 97, 65, 63, 68, 50, 64, 84, 56, 68, 67, 93, 81, 70, 68, 76, 66, 77, 58, 65, 77, 66, 71, 64, 60, 75, 63, 51, 61, 70, 74, 73, 61, 55, 58, 64, 56, 51, 58, 68, 62, 53, 75, 66, 59, 66, 69, 75, 67, 54, 71, 53, 61, 52, 74, 63, 45, 63, 77, 51, 60, 54, 77, 65, 47, 61, 50, 57, 91, 71, 85, 67, 55, 63, 69, 79, 62, 68, 77, 54, 71, 57, 45, 56, 78, 105, 69, 63, 56, 65, 87, 71, 65, 60, 58, 90, 63, 66, 45, 58, 47, 67, 84, 59, 98, 68, 47, 77, 54, 68, 68, 71, 82, 85, 57, 63, 72, 69, 56, 47, 68, 66, 58, 65, 49, 62, 59, 56, 51, 47, 74, 69, 66, 41, 64, 76, 43, 98, 72, 72, 57, 82, 88, 86, 72, 54, 61, 60, 82, 64, 64, 72, 80, 69, 88, 82, 59, 39, 68, 74, 53, 101, 57, 56, 61, 58, 64, 69, 168, 62, 53, 77, 84, 60, 89, 69, 59, 49, 46, 58, 70, 55, 54, 59, 71, 57, 96, 81, 62, 65, 76, 63, 73, 47, 79, 58, 75, 84, 49, 66, 54, 55, 49, 132, 56, 57, 87, 57, 74, 56, 62, 55, 76, 84, 49, 70, 92, 92, 68, 62, 76, 69, 52, 48, 68, 74, 62, 91, 64, 75, 70, 62, 61, 65, 66, 50, 74, 42, 62, 82, 72, 71, 57, 86, 79, 67, 70, 98, 73, 70, 73, 61, 66, 75, 62, 65, 68, 75, 63, 78, 65, 70, 62, 68, 71, 60, 74, 64, 50, 72, 48, 66, 61, 45, 44, 82, 58, 69, 61, 53, 36, 50, 64, 51, 66, 58, 67, 82, 74, 67, 75, 53, 58, 59, 58, 61, 77, 67, 65, 81, 58, 56, 68, 65, 52, 66, 62, 53, 84, 58, 69, 52, 62, 57, 64, 89, 78, 81, 68, 54, 49, 34, 62, 80, 60, 49, 90, 67, 62, 77, 47, 69, 76, 62, 75, 84, 48, 64, 54, 66, 55, 53, 59, 66, 78, 56, 55, 59, 88, 77, 47, 50, 63, 69, 51, 73, 90, 57, 69, 70, 66, 65, 59, 48, 63, 66, 46, 90, 81, 70, 55, 98, 71, 91, 71, 75, 66, 60, 90, 77, 63, 70, 60, 56, 63, 59, 64, 53, 65, 64, 75, 70, 65, 73, 67, 81, 93, 52, 78, 76, 65, 76, 46, 60, 60, 44, 58, 70, 108, 45, 82, 39, 52, 102, 48, 47, 65, 60, 60, 60, 98, 73, 69, 55, 75, 66, 88, 92, 67, 73, 57, 67, 78, 72, 66, 48, 62, 89, 48, 60, 59, 60, 50, 69, 63, 91, 56, 89, 57, 58, 57, 91, 77, 55, 77, 85, 65, 58, 84, 100, 70, 58, 52, 72, 55, 47, 94, 109, 99, 88, 67, 69, 48, 71, 71, 62, 64, 55, 113, 65, 50, 76, 53, 83, 61, 44, 76, 69, 65, 59, 58, 57, 61, 52, 53, 65, 74, 116, 64, 82, 70, 59, 54, 44, 69, 54, 51, 59, 91, 55, 88, 55, 70, 57, 53, 49, 85, 72, 68, 50, 69, 63, 62, 57, 63, 56, 60, 66, 87, 65, 41, 71, 65, 58, 55, 80, 72, 49, 59, 64, 78, 71, 59, 56, 71, 60, 72, 56, 61, 72, 102, 72, 72, 41, 62, 53, 74, 57, 98, 84, 78, 78, 68, 93, 60, 96, 59, 48, 57, 67, 58, 46, 63, 43, 62, 51, 58, 56, 71, 96, 64, 67, 50, 51, 63, 53, 76, 36, 60, 68, 56, 73, 61, 121, 70, 57, 72, 69, 56, 71, 54, 60, 69, 76, 67, 76, 50, 50, 63, 86, 54, 64, 63, 58, 30, 87, 48, 69, 83, 64, 67, 85, 61, 63, 73, 49, 91, 75, 55, 91, 61, 68, 76, 59, 61, 53, 88, 88, 67, 59, 57, 67, 51, 49, 47, 97, 49, 63, 67, 57, 73, 58, 43, 71, 64, 62, 54, 61, 50, 155, 73, 58, 67, 45, 57, 60, 80, 66, 73, 57, 73, 87, 62, 60, 73, 62, 51, 40, 56, 61, 65, 52, 77, 62, 73, 83, 66, 53, 52, 58, 58, 67, 46, 80, 57, 131, 58, 97, 69, 48, 44, 70, 83, 89, 70, 85, 67, 87, 57, 78, 82, 65, 92, 55, 55, 77, 70, 79, 56, 53, 50, 56, 39, 66, 76, 64, 57, 55, 63, 72, 80, 70, 120, 55, 62, 66, 72, 66, 72, 73, 53, 70, 98, 81, 68, 53, 82, 68, 65, 81, 63, 56, 68, 66, 85, 56, 68, 61, 59, 79, 49, 79, 58, 68, 49, 64, 67, 68, 56, 86, 61, 88, 55, 75, 63, 59, 67, 70, 43, 78, 57, 58, 33, 56, 64, 54, 60, 69, 62, 72, 54, 55, 67, 70, 55, 53, 53, 63, 52, 93, 56, 74, 60, 71, 58, 90, 51, 69, 53, 67, 70, 74, 87, 59, 50, 69, 80, 103, 52, 71, 57, 60, 49, 47, 70, 67, 62, 75, 62, 51, 65, 64, 57, 58, 84, 46, 72, 71, 74, 67, 60, 61, 46, 77, 81, 64, 64, 59, 78, 92, 59, 71, 79, 45, 63, 71, 52, 78, 93, 77, 73, 59, 72, 84, 120, 56, 123, 85, 54, 62, 57, 74, 63, 80, 52, 67, 85, 60, 63, 54, 65, 66, 61, 50, 72, 61, 64, 127, 63, 52, 63, 54, 79, 67, 57, 100, 65, 62, 79, 78, 53, 54, 61, 69, 49, 61, 67, 64, 43, 75, 61, 66, 78, 68, 61, 71, 60, 61, 77, 117, 69, 92, 75, 92, 80, 73, 51, 47, 56, 81, 55, 70, 61, 53, 57, 51, 80, 57, 77, 57, 60, 49, 61, 66, 60, 50, 63, 80, 44, 51, 62, 75, 99, 61, 93, 77, 78, 54, 96, 66, 61, 53, 101, 95, 59, 56, 52, 74, 77, 54, 66, 66, 70, 75, 65, 73, 57, 75, 62, 69, 68, 77, 71, 82, 63, 42, 55, 57, 71, 79, 73, 70, 57, 83, 61, 81, 79, 61, 41, 67, 43, 56, 48, 84, 55, 84, 57, 71, 67, 53, 96, 62, 58, 86, 69, 76, 60, 102, 56, 79, 107, 69, 53, 69, 76, 80, 72, 62, 55, 111, 83, 63, 47, 49, 41, 88, 47, 63, 60, 61, 53, 64, 46, 60, 76, 66, 65, 78, 88, 64, 56, 72, 72, 45, 81, 65, 35, 50, 83, 53, 75, 53, 75, 86, 58, 57, 62, 64, 41, 91, 64, 88, 53, 64, 56, 54, 72, 69, 67, 73, 60, 38, 42, 51, 51, 62, 50, 74, 60, 74, 50, 76, 59, 65, 87, 57, 63, 48, 47, 59, 60, 85, 45, 53, 40, 52, 62, 96, 67, 74, 50, 74, 54, 54, 69, 53, 47, 72, 64, 55, 57, 60, 55, 82, 89, 80, 74, 88, 50, 47, 80, 91, 63, 68, 67, 70, 64, 43, 73, 77, 81, 66, 47, 106, 50, 70, 61, 60, 74, 86, 59, 81, 63, 68, 78, 93, 84, 73, 56, 53, 97, 57, 42, 60, 54, 50, 74, 56, 56, 54, 49, 50, 80, 60, 62, 77, 77, 70, 42, 80, 71, 85, 65, 71, 100, 59, 64, 50, 47, 60, 87, 55, 49, 57, 85, 60, 85, 52, 82, 58, 58, 61, 56, 46, 55, 61, 59, 77, 64, 68, 77, 62, 74, 50, 53, 62, 60, 62, 71, 66, 56, 58, 58, 95, 71, 61, 67, 88, 60, 84, 54, 65, 56, 89, 49, 63, 50, 73, 60, 80, 61, 81, 51, 65, 60, 80, 65, 55, 56, 47, 84, 59, 59, 68, 50, 49, 67, 52, 48, 56, 55, 82, 90, 66, 71, 67, 63, 62, 84, 40, 62, 102, 70, 68, 132, 73, 68, 61, 85, 47, 58, 67, 75, 54, 67, 78, 67, 58, 77, 58, 62, 63, 165, 93, 63, 53, 65, 56, 69, 84, 61, 61, 69, 70, 58, 59, 62, 61, 83, 70, 62, 53, 76, 71, 56, 65, 53, 78, 57, 55, 61, 63, 56, 62, 59, 82, 67, 50, 77, 80, 67, 55, 81, 49, 47, 47, 65, 59, 67, 74, 63, 58, 51, 64, 71, 49, 62, 68, 75, 54, 69, 79, 81, 62, 70, 54, 52, 64, 74, 56, 102, 59, 75, 67, 49, 54, 64, 101, 51, 75, 58, 82, 79, 105, 67, 50, 51, 64, 54, 56, 73, 48, 79, 68, 52, 52, 72, 70, 89, 55, 66, 65, 73, 57, 60, 52, 76, 57, 67, 55, 60, 51, 53, 30, 66, 62, 52, 89, 64, 46, 75, 100, 75, 85, 57, 68, 72, 63, 45, 64, 64, 60, 108, 55, 62, 66, 90, 61, 53, 87, 64, 69, 68, 79, 50, 76, 66, 55, 61, 67, 51, 53, 51, 71, 77, 67, 65, 45, 83, 56, 60, 62, 59, 96, 56, 74, 59, 48, 43, 65, 55, 65, 77, 63, 93, 63, 75, 54, 66, 60, 68, 47, 62, 54, 75, 66, 72, 59, 64, 81, 67, 48, 69, 83, 69, 73, 56, 64, 76, 64, 56, 52, 48, 64, 60, 56, 53, 73, 70, 72, 58, 50, 85, 26, 71, 81, 53, 67, 97, 83, 68, 75, 69, 60, 99, 57, 43, 94, 101, 62, 85, 84, 62, 76, 69, 54, 46, 53, 60, 76, 61, 64, 78, 117, 75, 96, 58, 58, 50, 49, 77, 63, 66, 70, 53, 89, 52, 46, 64, 77, 50, 84, 71, 49, 66, 75, 82, 70, 64, 59, 85, 59, 62, 65, 57, 67, 46, 69, 78, 72, 64, 50, 56, 54, 83, 59, 78, 74, 66, 58, 55, 55, 50, 79, 47, 101, 64, 78, 56, 123, 53, 47, 63, 63, 75, 61, 51, 66, 46, 54, 60, 52, 49, 63, 66, 52, 59, 75, 59, 64, 61, 62, 63, 57, 67, 73, 60, 63, 69, 69, 58, 70, 54, 94, 75, 69, 77, 54, 62, 91, 54, 53, 67, 57, 53, 55, 87, 66, 61, 66, 47, 110, 66, 67, 60, 52, 59, 67, 67, 69, 63, 64, 58, 79, 53, 65, 54, 67, 64, 53, 72, 77, 53, 76, 53, 61, 64, 70, 52, 76, 79, 63, 69, 83, 52, 49, 50, 69, 151, 68, 59, 51, 73, 63, 77, 72, 55, 59, 59, 65, 72, 62, 74, 53, 46, 60, 65, 65, 66, 54, 78, 68, 60, 66, 65, 36, 78, 68, 94, 46, 78, 62, 59, 43, 47, 54, 60, 103, 67, 73, 51, 63, 62, 68, 62, 74, 46, 57, 66, 61, 52, 68, 74, 62, 60, 84, 62, 63, 70, 72, 91, 48, 55, 76, 56, 60, 66, 52, 71, 90, 54, 48, 58, 66, 57, 70, 73, 33, 76, 53, 75, 63, 57, 58, 87, 74, 72, 73, 58, 75, 57, 127, 68, 75, 75, 66, 68, 63, 66, 68, 61, 41, 70, 53, 73, 89, 65, 62, 87, 83, 68, 73, 55, 51, 69, 81, 87, 70, 55, 64, 75, 62, 57, 57, 71, 48, 50, 80, 68, 53, 57, 56, 57, 68, 41, 53, 56, 55, 50, 72, 79, 53, 83, 52, 52, 75, 64, 53, 50, 44, 57, 67, 61, 59, 75, 74, 72, 56, 87, 78, 72, 55, 60, 62, 68, 55, 51, 57, 63, 64, 69, 50, 52, 89, 54, 60, 72, 72, 64, 101, 88, 45, 85, 48, 67, 49, 63, 64, 54, 61, 63, 52, 71, 63, 60, 69, 66, 72, 66, 127, 58, 56, 60, 60, 68, 57, 66, 68, 73, 57, 68, 67, 55, 84, 82, 42, 45, 54, 55, 53, 64, 58, 70, 59, 83, 95, 80, 56, 80, 71, 76, 68, 56, 63, 50, 56, 56, 60, 69, 82, 78, 60, 73, 64, 62, 71, 46, 50, 51, 71, 61, 71, 65, 66, 65, 69, 106, 70, 64, 59, 47, 59, 64, 53, 72, 94, 65, 61, 52, 71, 58, 57, 70, 58, 53, 62, 58, 57, 57, 67, 70, 63, 51, 42, 46, 95, 61, 54, 78, 71, 84, 70, 68, 52, 81, 61, 64, 63, 50, 71, 57, 65, 51, 49, 152, 53, 109, 47, 53, 50, 61, 54, 69, 67, 66, 57, 50, 68, 53, 55, 73, 54, 68, 43, 60, 61, 63, 35, 58, 51, 67, 60, 78, 49, 85, 53, 60, 38, 75, 45, 81, 74, 60, 30, 61, 63, 65, 77, 62, 81, 60, 70, 54, 70, 77, 61, 76, 69, 70, 59, 65, 85, 58, 63, 60, 64, 51, 71, 71, 72, 54, 59, 64, 72, 60, 84, 80, 62, 68, 74, 48, 55, 53, 46, 56, 96, 55, 61, 62, 55, 56, 49, 83, 75, 68, 71, 70, 55, 70, 69, 78, 101, 50, 143, 67, 54, 71, 72, 150, 56, 66, 63, 56, 84, 40, 62, 64, 60, 57, 88, 64, 64, 51, 48, 47, 51, 59, 52, 63, 62, 70, 51, 53, 69, 45, 81, 68, 55, 77, 64, 72, 61, 55, 66, 61, 58, 73, 49, 90, 71, 65, 82, 66, 52, 52, 85, 64, 70, 60, 58, 51, 62, 69, 49, 73, 56, 63, 79, 57, 56, 56, 51, 90, 52, 82, 86, 73, 57, 69, 60, 54, 71, 92, 69, 79, 103, 60, 92, 46, 67, 54, 105, 98, 50, 43, 68, 65, 52, 55, 66, 54, 65, 71, 60, 39, 67, 44, 56, 69, 50, 54, 38, 79, 59, 73, 67, 89, 73, 60, 89, 69, 53, 61, 71, 57, 54, 58, 47, 43, 75, 70, 71, 85, 61, 67, 82, 66, 57, 64, 84, 83, 58, 77, 62, 82, 54, 58, 75, 76, 64, 44, 68, 49, 62, 55, 36, 69, 73, 48, 39, 51, 70, 57, 53, 58, 59, 74, 83, 76, 115, 54, 78, 62, 72, 57, 59, 86, 90, 44, 59, 44, 37, 86, 82, 81, 69, 75, 54, 80, 80, 71, 61, 67, 57, 50, 68, 62, 59, 64, 57, 86, 62, 62, 62, 51, 68, 60, 54, 55, 66, 59, 56, 68, 78, 69, 68, 76, 72, 54, 68, 50, 62, 60, 63, 87, 77, 61, 111, 59, 61, 50, 72, 59, 69, 58, 54, 58, 67, 62, 63, 72, 56, 67, 73, 54, 58, 62, 63, 71, 53, 58, 95, 89, 53, 71, 44, 70, 79, 47, 88, 87, 83, 55, 50, 68, 63, 66, 75, 55, 55, 59, 56, 62, 62, 94, 63, 55, 73, 58, 58, 61, 77, 77, 43, 57, 67, 62, 60, 66, 70, 67, 61, 65, 115, 63, 37, 70, 64, 66, 61, 59, 61, 96, 68, 68, 67, 59, 73, 82, 51, 47, 72, 64, 59, 58, 83, 63, 62, 58, 73, 77, 57, 27, 67, 67, 78, 64, 59, 62, 61, 62, 57, 68, 65, 66, 60, 53, 60, 54, 81, 80, 59, 57, 66, 55, 81, 58, 67, 49, 66, 59, 60, 74, 68, 78, 63, 91, 51, 63, 64, 52, 70, 91, 68, 77, 61, 70, 47, 80, 64, 80, 88, 54, 56, 78, 72, 90, 68, 73, 79, 94, 71, 50, 101, 60, 61, 40, 87, 62, 57, 54, 64, 80, 88, 64, 61, 61, 64, 54, 87, 66, 76, 55, 54, 46, 60, 75, 68, 62, 81, 64, 45, 66, 70, 76, 47, 86, 72, 57, 63, 77, 82, 53, 89, 60, 53, 52, 58, 66, 43, 68, 78, 57, 70, 66, 63, 65, 55, 57, 65, 51, 60, 75, 106, 58, 74, 66, 65, 54, 77, 61, 54, 68, 56, 82, 60, 64, 60, 73, 52, 57, 56, 47, 77, 62, 66, 60, 57, 70, 66, 93, 69, 82, 52, 55, 48, 64, 58, 73, 69, 63, 64, 58, 111, 59, 58, 59, 87, 62, 91, 62, 66, 67, 71, 63, 58, 81, 52, 76, 44, 91, 57, 79, 67, 64, 61, 53, 63, 64, 61, 41, 97, 63, 72, 55, 55, 64, 54, 57, 65, 66, 53, 61, 69, 71, 47, 57, 68, 56, 99, 43, 74, 86, 84, 58, 74, 61, 69, 77, 88, 87, 72, 87, 67, 77, 73, 63, 62, 73, 58, 69, 72, 62, 66, 61, 63, 52, 88, 63, 49, 51, 60, 62, 60, 78, 60, 63, 62, 91, 67, 75, 61, 101, 82, 45, 54, 51, 77, 72, 63, 63, 82, 58, 77, 56, 72, 64, 54, 64, 73, 58, 58, 51, 55, 56, 54, 63, 98, 66, 54, 60, 60, 69, 62, 54, 52, 71, 61, 56, 63, 61, 63, 53, 53, 64, 69, 55, 50, 68, 56, 66, 62, 56, 81, 57, 61, 57, 62, 46, 43, 75, 58, 73, 60, 79, 67, 69, 52, 53, 60, 59, 73, 64, 63, 55, 56, 84, 55, 59, 38, 64, 90, 69, 64, 56, 57, 51, 74, 92, 60, 62, 62, 68, 57, 45, 65, 78, 89, 76, 103, 67, 56, 54, 63, 83, 77, 50, 60, 61, 50, 63, 92, 73, 50, 60, 65, 70, 73, 60, 81, 65, 132, 56, 58, 53, 34, 96, 68, 55, 78, 55, 49, 67, 39, 61, 81, 70, 67, 61, 54, 57, 68, 60, 52, 72, 66, 72, 58, 55, 62, 81, 55, 51, 56, 68, 67, 75, 86, 62, 58, 75, 65, 67, 63, 79, 53, 59, 50, 66, 79, 69, 65, 56, 76, 53, 48, 82, 84, 61, 75, 61, 72, 57, 56, 71, 83, 40, 49, 59, 53, 86, 64, 53, 52, 41, 82, 73, 62, 87, 69, 57, 56, 56, 78, 66, 56, 71, 42, 56, 53, 63, 90, 70, 58, 54, 63, 86, 60, 60, 64, 83, 72, 52, 65, 92, 58, 53, 81, 51, 70, 55, 65, 58, 67, 64, 62, 77, 64, 71, 73, 51, 60, 55, 67, 65, 51, 63, 62, 76, 51, 50, 57, 77, 62, 71, 61, 64, 95, 67, 64, 66, 77, 57, 66, 52, 64, 78, 49, 85, 50, 70, 70, 78, 64, 85, 60, 52, 58, 105, 64, 61, 80, 62, 57, 72, 61, 62, 80, 67, 58, 69, 71, 108, 60, 48, 74, 72, 80, 65, 62, 62, 56, 51, 46, 71, 64, 54, 53, 60, 64, 83, 84, 68, 70, 63, 50, 71, 54, 59, 54, 57, 71, 56, 59, 78, 78, 57, 54, 70, 60, 60, 62, 75, 79, 54, 58, 57, 58, 69, 58, 77, 72, 69, 85, 62, 63, 65, 55, 77, 75, 73, 60, 66, 70, 70, 56, 73, 70, 44, 70, 60, 74, 67, 67, 81, 69, 68, 60, 57, 84, 65, 63, 59, 65, 73, 53, 71, 61, 63, 78, 57, 43, 80, 58, 59, 51, 42, 64, 74, 66, 105, 66, 85, 63, 61, 74, 80, 57, 69, 56, 73, 83, 60, 53, 67, 54, 53, 64, 55, 85, 56, 65, 53, 71, 67, 61, 62, 67, 63, 63, 64, 56, 59, 103, 62, 50, 81, 59, 62, 113, 60, 61, 57, 65, 66, 45, 49, 50, 80, 49, 50, 73, 71, 51, 73, 76, 92, 61, 62, 69, 67, 73, 68, 79, 65, 55, 72, 65, 106, 67, 82, 68, 58, 68, 58, 64, 54, 67, 68, 76, 72, 57, 64, 69, 81, 73, 61, 80, 50, 51, 141, 56, 42, 62, 56, 64, 81, 56, 58, 70, 66, 70, 44, 62, 67, 59, 54, 64, 57, 46, 59, 75, 61, 62, 75, 56, 64, 58, 82, 68, 125, 87, 72, 86, 69, 63, 53, 70, 57, 54, 75, 58, 57, 62, 75, 101, 60, 55, 61, 56, 54, 63, 59, 79, 75, 61, 59, 54, 71, 55, 56, 59, 73, 69, 68, 59, 53, 65, 73, 45, 53, 71, 65, 65, 50, 53, 38, 72, 43, 111, 63, 42, 60, 73, 74, 72, 85, 47, 65, 83, 57, 74, 68, 65, 59, 69, 62, 67, 53, 56, 77, 51, 70, 83, 73, 74, 67, 62, 47, 57, 86, 70, 56, 61, 70, 64, 61, 64, 70, 63, 50, 69, 58, 68, 58, 68, 56, 70, 69, 68, 64, 67, 79, 68, 81, 103, 50, 68, 55, 57, 72, 63, 68, 67, 62, 81, 56, 61, 78, 79, 83, 63, 57, 62, 81, 61, 73, 58, 73, 55, 70, 63, 67, 83, 89, 54, 58, 65, 70, 58, 44, 65, 90, 74, 76, 72, 98, 41, 99, 60, 63, 56, 38, 47, 80, 77, 71, 78, 54, 71, 98, 42, 50, 62, 75, 47, 46, 53, 71, 72, 55, 76, 72, 71, 77, 80, 88, 54, 132, 63, 50, 109, 43, 52, 59, 54, 46, 72, 55, 73, 75, 54, 51, 66, 40, 73, 83, 67, 71, 66, 64, 55, 99, 88, 78, 77, 78, 81, 78, 44, 63, 69, 89, 75, 76, 47, 70, 66, 55, 40, 80, 68, 62, 63, 74, 52, 61, 67, 66, 80, 47, 43, 37, 77, 59, 59, 52, 64, 109, 80, 66, 83, 66, 55, 97, 65, 56, 70, 55, 76, 87, 43, 86, 74, 101, 69, 74, 62, 64, 66, 59, 61, 70, 70, 64, 55, 65, 58, 51, 68, 76, 48, 56, 92, 65, 75, 67, 54, 66, 64, 66, 76, 42, 55, 64, 82, 59, 114, 49, 46, 51, 56, 66, 59, 64, 78, 74, 87, 68, 86, 55, 66, 82, 56, 84, 64, 61, 53, 63, 63, 83, 77, 48, 70, 65, 36, 68, 65, 84, 67, 68, 54, 62, 53, 71, 71, 69, 74, 63, 75, 60, 45, 70, 56, 67, 50, 51, 55, 74, 73, 84, 65, 75, 45, 73, 61, 70, 62, 51, 50, 45, 82, 79, 68, 74, 53, 67, 86, 69, 57, 46, 56, 70, 53, 65, 53, 72, 47, 49, 53, 66, 62, 94, 54, 85, 55, 82, 73, 88, 51, 60, 60, 57, 64, 75, 69, 73, 69, 62, 70, 60, 69, 77, 36, 89, 40, 76, 82, 49, 74, 41, 62, 73, 54, 52, 60, 73, 55, 66, 84, 44, 64, 58, 67, 52, 119, 63, 47, 80, 64, 72, 68, 74, 90, 64, 79, 62, 56, 49, 68, 69, 73, 68, 64, 80, 63, 80, 76, 49, 47, 56, 81, 64, 73, 73, 89, 40, 74, 76, 79, 80, 51, 56, 84, 52, 145, 105, 86, 57, 66, 66, 49, 67, 65, 74, 80, 98, 43, 63, 73, 71, 72, 79, 61, 65, 28, 60, 71, 71, 81, 67, 97, 58, 91, 50, 49, 83, 66, 67, 61, 64, 48, 75, 35, 61, 62, 71, 63, 62, 43, 70, 64, 64, 52, 51, 63, 73, 56, 43, 90, 91, 47, 50, 75, 79, 38, 55, 68, 47, 43, 70, 69, 74, 72, 59, 62, 92, 48, 69, 57, 73, 114, 75, 45, 57, 48, 55, 59, 63, 63, 49, 70, 61, 66, 42, 49, 74, 75, 64, 90, 77, 70, 54, 56, 82, 49, 82, 74, 72, 75, 60, 50, 78, 69, 54, 58, 48, 53, 57, 64, 104, 66, 40, 75, 75, 62, 45, 65, 58, 56, 62, 76, 53, 86, 74, 62, 65, 84, 65, 55, 61, 96, 77, 67, 68, 62, 68, 72, 78, 60, 44, 28, 66, 69, 76, 71, 80, 60, 63, 59, 67, 62, 82, 68, 66, 59, 44, 63, 53, 65, 85, 79, 69, 95, 67, 48, 53, 68, 68, 97, 56, 70, 83, 63, 63, 54, 72, 55, 63, 94, 65, 73, 70, 55, 54, 52, 74, 67, 69, 61, 45, 44, 56, 71, 70, 66, 55, 70, 90, 72, 53, 60, 58, 70, 61, 63, 83, 81, 50, 63, 51, 58, 76, 79, 74, 73, 74, 48, 76, 75, 89, 80, 59, 53, 63, 96, 55, 114, 74, 67, 95, 51, 74, 78, 92, 90, 69, 65, 71, 32, 65, 77, 122, 36, 64, 54, 112, 40, 87, 61, 24, 83, 70, 74, 68, 76, 61, 80, 89, 97, 47, 40, 58, 44, 85, 52, 49, 72, 71, 66, 48, 65, 67, 70, 64, 68, 86, 45, 77, 73, 68, 81, 54, 58, 66, 75, 62, 53, 81, 60, 53, 58, 64, 73, 58, 50, 71, 64, 69, 64, 61, 54, 71, 43, 55, 59, 74, 100, 71, 48, 78, 71, 102, 46, 89, 73, 55, 57, 51, 80, 40, 35, 54, 48, 73, 96, 68, 96, 66, 117, 80, 70, 62, 124, 48, 71, 43, 79, 54, 64, 63, 53, 89, 65, 94, 65, 72, 79, 69, 67, 94, 57, 61, 50, 54, 69, 72, 53, 126, 64, 69, 77, 70, 64, 87, 72, 64, 85, 71, 53, 67, 79, 53, 64, 55, 111, 68, 49, 63, 86, 60, 72, 72, 64, 68, 85, 71, 42, 52, 86, 64, 65, 108, 57, 82, 120, 63, 50, 89, 66, 58, 62, 59, 67, 67, 87, 44, 55, 48, 65, 84, 40, 72, 88, 56, 71, 50, 56, 68, 63, 77, 102, 62, 57, 54, 73, 59, 65, 61, 52, 60, 59, 49, 59, 57, 44, 33, 84, 55, 102, 51, 98, 85, 76, 57, 106, 65, 66, 58, 77, 38, 45, 46, 77, 73, 113, 53, 58, 70, 60, 71, 69, 73, 51, 61, 93, 62, 55, 67, 46, 56, 90, 79, 62, 56, 59, 95, 62, 60, 56, 72, 93, 62, 47, 61, 62, 85, 61, 62, 71, 38, 107, 69, 90, 69, 73, 52, 67, 59, 60, 87, 58, 65, 59, 51, 57, 55, 56, 79, 59, 45, 65, 73, 57, 71, 77, 61, 68, 48, 77, 75, 63, 68, 63, 53, 61, 57, 59, 57, 69, 45, 59, 75, 70, 58, 69, 86, 77, 73, 46, 61, 67, 50, 75, 75, 87, 82, 70, 45, 68, 84, 110, 55, 57, 66, 72, 77, 80, 63, 46, 63, 69, 46, 118, 53, 63, 59, 86, 47, 58, 62, 61, 71, 87, 83, 62, 70, 68, 79, 70, 78, 64, 47, 49, 76, 94, 62, 68, 73, 78, 69, 67, 61, 56, 117, 69, 43, 70, 68, 51, 46, 93, 41, 56, 72, 66, 57, 77, 70, 68, 61, 51, 55, 46, 72, 82, 57, 48, 53, 73, 74, 87, 60, 64, 80, 59, 87, 74, 73, 76, 67, 53, 67, 61, 64, 72, 52, 71, 50, 66, 66, 79, 48, 69, 59, 39, 69, 72, 64, 47, 53, 58, 74, 59, 76, 52, 81, 70, 58, 53, 61, 87, 76, 60, 57, 93, 68, 56, 48, 66, 77, 93, 50, 52, 58, 54, 57, 46, 57, 48, 73, 73, 86, 73, 67, 65, 60, 73, 99, 54, 75, 74, 53, 47, 54, 96, 71, 67, 57, 53, 53, 50, 91, 51, 65, 54, 35, 69, 51, 86, 64, 57, 57, 47, 87, 54, 74, 50, 66, 113, 133, 60, 65, 56, 75, 52, 108, 54, 69, 55, 79, 87, 67, 35, 70, 78, 44, 89, 64, 56, 127, 61, 47, 56, 81, 54, 55, 51, 67, 54, 71, 65, 51, 74, 76, 70, 48, 52, 61, 65, 60, 54, 70, 60, 69, 75, 70, 60, 89, 78, 67, 57, 58, 78, 52, 76, 91, 78, 64, 71, 68, 64, 60, 47, 57, 61, 67, 65, 79, 58, 60, 66, 61, 83, 63, 68, 71, 98, 76, 61, 69, 71, 53, 77, 63, 56, 71, 51, 52, 64, 90, 71, 71, 65, 70, 69, 77, 57, 56, 52, 58, 70, 61, 66, 60, 66, 56, 71, 63, 100, 56, 66, 71, 61, 66, 75, 61, 48, 63, 57, 70, 63, 73, 118, 106, 73, 120, 53, 68, 61, 66, 56, 84, 107, 71, 68, 76, 58, 54, 59, 47, 64, 74, 80, 51, 50, 73, 99, 60, 66, 57, 58, 45, 58, 59, 57, 77, 64, 59, 51, 55, 81, 60, 68, 75, 64, 98, 62, 62, 68, 64, 51, 75, 71, 48, 94, 63, 59, 74, 50, 79, 64, 57, 61, 50, 96, 50, 80, 66, 75, 57, 52, 73, 66, 60, 62, 59, 78, 54, 51, 55, 60, 63, 62, 70, 59, 48, 87, 59, 91, 86, 70, 70, 72, 59, 74, 73, 82, 75, 62, 71, 60, 58, 51, 53, 65, 80, 54, 70, 61, 78, 75, 88, 63, 85, 85, 74, 69, 49, 83, 50, 59, 54, 72, 51, 99, 69, 68, 74, 58, 113, 59, 75, 79, 59, 56, 59, 84, 69, 78, 67, 64, 55, 57, 80, 108, 53, 69, 47, 72, 65, 53, 58, 63, 59, 55, 68, 70, 63, 56, 64, 62, 87, 72, 78, 61, 87, 68, 66, 50, 58, 67, 51, 84, 61, 41, 69, 83, 56, 68, 48, 72, 78, 48, 72, 79, 65, 62, 73, 73, 78, 55, 73, 65, 34, 83, 31, 63, 53, 73, 66, 59, 67, 54, 57, 47, 73, 75, 58, 58, 30, 59, 55, 54, 67, 91, 81, 74, 62, 64, 66, 59, 53, 57, 69, 65, 58, 62, 54, 66, 71, 45, 75, 86, 70, 74, 55, 65, 56, 61, 69, 55, 73, 69, 49, 64, 73, 43, 68, 53, 67, 48, 51, 67, 62, 51, 86, 46, 71, 68, 88, 85, 74, 69, 61, 69, 57, 58, 53, 59, 47, 53, 84, 44, 57, 79, 55, 74, 56, 68, 116, 62, 103, 70, 47, 54, 55, 69, 62, 99, 61, 75, 68, 79, 72, 75, 60, 80, 50, 70, 51, 66, 63, 59, 74, 92, 69, 68, 62, 64, 118, 44, 56, 77, 56, 46, 72, 69, 60, 58, 103, 72, 66, 74, 53, 59, 36, 63, 71, 63, 56, 72, 81, 80, 65, 49, 76, 65, 54, 49, 63, 76, 72, 115, 56, 63, 49, 64, 70, 45, 54, 61, 42, 75, 64, 59, 84, 58, 58, 55, 57, 60, 74, 69, 59, 54, 59, 63, 67, 64, 80, 58, 73, 40, 65, 95, 94, 59, 85, 76, 64, 104, 56, 53, 73, 64, 56, 83, 56, 49, 70, 68, 77, 73, 57, 74, 67, 70, 53, 58, 58, 68, 56, 61, 61, 52, 57, 46, 59, 61, 65, 78, 82, 60, 54, 70, 69, 62, 65, 77, 62, 70, 68, 84, 64, 53, 59, 62, 62, 61, 53, 39, 65, 51, 63, 65, 48, 80, 77, 76, 72, 73, 91, 50, 62, 57, 63, 69, 40, 62, 80, 57, 83, 64, 63, 100, 80, 74, 67, 62, 82, 72, 91, 47, 66, 70, 67, 63, 64, 52, 81, 102, 55, 59, 54, 86, 60, 54, 51, 80, 67, 89, 68, 57, 61, 62, 66, 67, 61, 74, 71, 68, 80, 86, 58, 52, 41, 64, 48, 49, 43, 67, 46, 63, 64, 51, 57, 59, 70, 65, 80, 81, 59, 43, 68, 42, 63, 71, 64, 55, 56, 59, 70, 65, 55, 53, 64, 71, 59, 65, 53, 58, 106, 129, 60, 41, 71, 57, 58, 63, 66, 57, 55, 74, 62, 69, 78, 61, 61, 66, 47, 70, 59, 41, 80, 48, 67, 137, 69, 78, 65, 68, 78, 65, 59, 72, 39, 86, 75, 54, 49, 49, 78, 62, 52, 63, 65, 64, 57, 68, 50, 78, 71, 63, 57, 59, 58, 77, 70, 62, 73, 53, 67, 57, 87, 78, 92, 50, 43, 65, 65, 98, 78, 77, 81, 57, 57, 59, 64, 52, 50, 72, 63, 77, 64, 73, 98, 71, 64, 58, 53, 71, 49, 45, 40, 62, 47, 58, 68, 70, 55, 64, 67, 78, 57, 63, 68, 67, 56, 57, 66, 42, 69, 73, 60, 70, 80, 70, 62, 79, 66, 63, 69, 78, 66, 73, 65, 93, 74, 62, 68, 58, 72, 51, 64, 95, 64, 68, 59, 62, 70, 85, 64, 56, 55, 62, 52, 49, 84, 68, 57, 55, 55, 86, 91, 54, 77, 46, 66, 55, 70, 60, 76, 67, 67, 60, 62, 87, 60, 72, 74, 76, 76, 53, 58, 70, 39, 69, 75, 49, 61, 59, 45, 45, 76, 65, 59, 62, 65, 55, 50, 63, 82, 68, 57, 54, 69, 46, 59, 56, 78, 66, 71, 58, 60, 57, 70, 54, 62, 59, 69, 89, 68, 72, 68, 68, 51, 59, 59, 59, 63, 69, 63, 67, 67, 63, 61, 44, 72, 87, 73, 55, 59, 61, 76, 54, 71, 124, 60, 51, 56, 62, 58, 94, 99, 62, 71, 92, 61, 79, 69, 79, 54, 56, 67, 53, 58, 51, 64, 55, 52, 79, 63, 66, 63, 73, 49, 73, 69, 73, 74, 96, 63, 63, 90, 52, 51, 53, 75, 66, 73, 83, 76, 83, 67, 51, 74, 55, 63, 71, 59, 78, 66, 75, 66, 82, 75, 92, 67, 67, 85, 53, 63, 52, 50, 60, 78, 61, 52, 64, 64, 62, 68, 81, 58, 179, 72, 66, 63, 54, 89, 63, 52, 53, 54, 81, 76, 77, 60, 67, 55, 65, 65, 54, 86, 69, 73, 56, 64, 67, 90, 65, 52, 59, 80, 69, 54, 56, 65, 72, 49, 53, 37, 49, 61, 65, 63, 61, 44, 51, 58, 41, 67, 77, 86, 120, 73, 65, 63, 72, 58, 71, 61, 57, 67, 52, 67, 56, 63, 89, 51, 67, 57, 46, 61, 61, 64, 55, 67, 56, 73, 77, 76, 51, 63, 68, 83, 73, 57, 69, 70, 102, 48, 45, 67, 46, 65, 48, 65, 58, 49, 48, 57, 54, 61, 74, 74, 80, 46, 67, 64, 54, 61, 66, 67, 64, 69, 99, 124, 67, 63, 48, 46, 79, 50, 82, 59, 64, 46, 72, 81, 38, 69, 51, 109, 39, 80, 100, 104, 55, 54, 67, 50, 77, 58, 54, 66, 60, 58, 62, 58, 78, 64, 75, 64, 51, 56, 61, 48, 59, 74, 57, 71, 61, 69, 72, 60, 69, 41, 59, 60, 76, 94, 55, 62, 51, 66, 57, 67, 71, 59, 51, 83, 74, 60, 101, 67, 44, 44, 71, 69, 52, 70, 58, 80, 75, 80, 71, 66, 69, 50, 56, 94, 70, 61, 53, 44, 62, 79, 65, 73, 64, 57, 58, 73, 48, 62, 57, 85, 69, 45, 78, 83, 63, 84, 65, 156, 56, 68, 49, 73, 45, 72, 80, 59, 63, 71, 65, 51, 68, 84, 66, 59, 51, 69, 87, 55, 53, 92, 82, 51, 90, 77, 64, 41, 56, 82, 63, 71, 75, 55, 86, 119, 74, 75, 56, 75, 59, 69, 59, 73, 69, 76, 66, 73, 74, 55, 44, 98, 53, 71, 68, 63, 69, 67, 56, 60, 65, 74, 65, 63, 63, 81, 39, 66, 73, 54, 78, 58, 87, 62, 57, 83, 62, 67, 77, 75, 55, 71, 69, 70, 59, 60, 63, 58, 44, 88, 66, 55, 53, 53, 75, 70, 73, 73, 74, 64, 70, 68, 76, 63, 50, 78, 85, 47, 63, 73, 48, 55, 73, 82, 65, 71, 59, 73, 55, 59, 60, 55, 82, 63, 73, 62, 84, 62, 45, 75, 75, 58, 73, 55, 62, 78, 67, 83, 47, 58, 69, 41, 102, 64, 49, 61, 70, 53, 35, 51, 67, 80, 49, 59, 60, 61, 65, 61, 75, 83, 73, 68, 52, 71, 88, 65, 63, 54, 59, 80, 58, 65, 56, 63, 67, 72, 65, 68, 63, 50, 76, 68, 60, 61, 77, 55, 46, 45, 79, 70, 60, 40, 76, 67, 57, 37, 57, 48, 80, 71, 71, 62, 50, 87, 43, 60, 37, 59, 92, 36, 61, 32, 62, 64, 73, 92, 56, 55, 77, 65, 55, 78, 84, 57, 43, 66, 60, 72, 62, 65, 67, 55, 70, 68, 56, 39, 59, 79, 72, 57, 68, 55, 84, 42, 53, 51, 66, 64, 41, 83, 60, 66, 59, 48, 72, 42, 83, 97, 67, 62, 56, 81, 48, 72, 63, 93, 61, 80, 69, 43, 63, 51, 45, 74, 72, 51, 50, 65, 59, 47, 63, 62, 62, 62, 63, 74, 50, 67, 58, 57, 73, 169, 52, 73, 53, 50, 98, 57, 69, 68, 58, 83, 69, 57, 66, 54, 72, 44, 57, 56, 66, 62, 61, 71, 68, 51, 93, 81, 77, 62, 107, 83, 51, 106, 57, 65, 66, 72, 61, 46, 68, 63, 71, 92, 44, 58, 84, 75, 55, 82, 62, 50, 47, 92, 57, 62, 66, 56, 59, 53, 72, 79, 62, 72, 67, 104, 68, 65, 58, 72, 67, 63, 58, 58, 66, 64, 82, 67, 50, 61, 73, 58, 56, 58, 62, 75, 64, 89, 67, 81, 56, 53, 57, 45, 71, 71, 81, 41, 90, 60, 76, 74, 74, 124, 58, 53, 85, 61, 48, 80, 62, 88, 56, 58, 65, 71, 76, 83, 79, 54, 49, 84, 65, 56, 87, 46, 54, 52, 54, 55, 77, 62, 62, 68, 90, 85, 57, 71, 62, 73, 101, 50, 68, 52, 100, 81, 49, 60, 55, 54, 54, 88, 72, 84, 57, 68, 69, 82, 78, 74, 56, 65, 94, 65, 69, 49, 76, 46, 50, 63, 53, 82, 57, 73, 72, 80, 82, 70, 78, 61, 53, 49, 40, 70, 46, 42, 79, 61, 115, 67, 41, 38, 59, 62, 58, 83, 92, 57, 64, 56, 59, 66, 64, 50, 67, 69, 68, 57, 69, 55, 53, 66, 62, 60, 59, 71, 51, 79, 78, 57, 62, 69, 62, 71, 76, 84, 68, 95, 93, 63, 70, 136, 52, 86, 71, 72, 44, 69, 72, 72, 61, 65, 72, 70, 59, 53, 84, 58, 69, 59, 68, 88, 68, 88, 33, 52, 66, 66, 76, 63, 62, 75, 63, 69, 79, 69, 71, 60, 43, 69, 43, 65, 53, 71, 76, 69, 67, 55, 113, 45, 69, 77, 55, 56, 40, 23, 93, 62, 60, 70, 69, 67, 67, 94, 69, 63, 45, 59, 85, 61, 72, 69, 68, 66, 62, 77, 51, 75, 54, 76, 50, 51, 62, 76, 58, 51, 53, 52, 77, 73, 63, 87, 72, 69, 86, 65, 48, 73, 68, 65, 71, 66, 62, 67, 55, 86, 90, 59, 51, 80, 60, 40, 96, 94, 50, 54, 59, 61, 62, 72, 71, 55, 49, 67, 71, 60, 59, 59, 47, 46, 58, 70, 57, 70, 62, 88, 84, 58, 51, 66, 59, 75, 59, 94, 67, 56, 71, 64, 81, 66, 61, 73, 111, 56, 55, 71, 71, 74, 58, 57, 126, 80, 69, 55, 56, 105, 73, 78, 77, 90, 70, 59, 90, 58, 71, 79, 54, 63, 54, 59, 72, 57, 61, 34, 82, 76, 58, 55, 49, 59, 63, 51, 78, 71, 58, 56, 47, 48, 52, 82, 77, 63, 51, 60, 69, 49, 37, 88, 76, 55, 52, 55, 63, 80, 103, 82, 74, 113, 54, 66, 78, 53, 51, 48, 55, 60, 80, 76, 86, 56, 58, 101, 68, 68, 75, 74, 81, 54, 61, 64, 67, 67, 78, 36, 55, 59, 86, 54, 48, 48, 69, 91, 51, 61, 68, 67, 65, 59, 71, 52, 52, 78, 69, 76, 63, 47, 65, 73, 93, 64, 80, 71, 88, 51, 42, 71, 47, 60, 45, 72, 93, 71, 54, 38, 83, 70, 94, 62, 54, 38, 53, 64, 55, 82, 48, 72, 64, 59, 86, 48, 74, 104, 72, 67, 81, 73, 90, 60, 88, 70, 75, 69, 94, 87, 89, 79, 58, 54, 61, 93, 77, 71, 60, 62, 149, 58, 79, 59, 66, 93, 61, 45, 54, 49, 65, 60, 96, 69, 90, 73, 52, 48, 58, 93, 106, 54, 70, 64, 59, 94, 60, 88, 43, 66, 83, 93, 51, 61, 61, 66, 57, 48, 67, 53, 57, 64, 56, 75, 83, 85, 50, 55, 59, 59, 63, 53, 69, 73, 64, 70, 65, 68, 67, 61, 55, 53, 67, 105, 62, 55, 42, 59, 59, 66, 61, 64, 82, 71, 49, 68, 64, 66, 58, 71, 56, 73, 50, 82, 73, 86, 69, 51, 59, 64, 74, 79, 81, 43, 50, 70, 64, 75, 68, 63, 111, 60, 53, 52, 59, 63, 72, 67, 75, 71, 63, 81, 43, 55, 68, 63, 69, 55, 65, 63, 55, 77, 67, 48, 56, 70, 63, 54, 68, 82, 65, 98, 73, 55, 57, 69, 123, 75, 62, 72, 61, 87, 62, 54, 77, 58, 83, 68, 51, 47, 63, 45, 80, 63, 67, 87, 72, 87, 48, 67, 37, 80, 59, 62, 83, 70, 58, 81, 64, 55, 74, 128, 69, 70, 47, 65, 75, 52, 88, 60, 51, 59, 78, 63, 50, 63, 61, 58, 47, 74, 48, 77, 81, 54, 56, 79, 52, 59, 40, 63, 65, 70, 64, 62, 83, 76, 79, 69, 70, 63, 59, 53, 44, 71, 91, 67, 99, 63, 60, 61, 64, 71, 76, 57, 61, 67, 99, 54, 76, 110, 49, 87, 64, 94, 65, 69, 45, 54, 85, 72, 71, 48, 48, 61, 49, 71, 84, 76, 52, 67, 91, 67, 60, 58, 60, 56, 53, 66, 65, 72, 57, 58, 53, 59, 54, 66, 84, 66, 87, 61, 65, 63, 50, 83, 64, 73, 75, 69, 57, 86, 67, 71, 70, 124, 65, 64, 66, 58, 58, 60, 91, 56, 76, 83, 68, 61, 72, 51, 58, 62, 68, 65, 69, 69, 71, 49, 56, 72, 63, 87, 69, 42, 57, 55, 70, 78, 63, 59, 63, 56, 67, 48, 83, 108, 69, 64, 33, 53, 66, 45, 63, 67, 54, 54, 64, 59, 80, 61, 107, 41, 75, 69, 58, 59, 54, 76, 93, 73, 63, 73, 66, 62, 75, 76, 55, 58, 68, 55, 64, 67, 54, 45, 61, 60, 67, 74, 61, 75, 61, 76, 81, 56, 71, 151, 90, 52, 60, 64, 46, 48, 77, 78, 70, 96, 65, 65, 62, 37, 74, 64, 41, 41, 58, 73, 91, 60, 74, 74, 65, 90, 55, 67, 72, 62, 54, 62, 59, 64, 92, 69, 63, 63, 47, 89, 83, 89, 72, 56, 100, 94, 61, 64, 55, 57, 76, 81, 54, 76, 69, 68, 53, 51, 75, 74, 74, 62, 72, 72, 64, 68, 69, 57, 49, 60, 74, 61, 52, 81, 49, 55, 61, 63, 53, 55, 88, 77, 65, 50, 61, 66, 63, 55, 75, 45, 69, 56, 89, 82, 40, 54, 57, 48, 62, 65, 76, 79, 83, 64, 63, 87, 52, 70, 47, 62, 68, 65, 56, 59, 59, 56, 61, 67, 61, 70, 60, 117, 56, 67, 74, 79, 63, 63, 57, 59, 86, 65, 73, 54, 65, 77, 66, 73, 77, 102, 74, 66, 64, 62, 76, 43, 91, 82, 58, 48, 73, 63, 64, 73, 90, 58, 91, 42, 60, 77, 80, 59, 29, 63, 55, 58, 30, 53, 61, 66, 90, 65, 74, 64, 47, 52, 65, 52, 72, 50, 62, 56, 59, 61, 51, 67, 100, 63, 59, 59, 56, 59, 111, 57, 79, 58, 73, 66, 69, 52, 83, 77, 65, 51, 78, 75, 64, 51, 58, 42, 66, 107, 61, 56, 75, 83, 71, 50, 62, 64, 72, 61, 47, 62, 65, 94, 70, 67, 50, 85, 96, 41, 99, 55, 70, 55, 96, 55, 57, 68, 79, 69, 63, 64, 51, 74, 70, 33, 61, 63, 56, 77, 85, 85, 68, 52, 63, 52, 80, 68, 43, 53, 48, 48, 47, 71, 75, 70, 116, 52, 58, 74, 86, 82, 70, 74, 76, 50, 91, 54, 74, 47, 76, 81, 51, 51, 76, 71, 68, 74, 83, 75, 66, 80, 47, 79, 77, 62, 70, 60, 74, 89, 65, 89, 63, 77, 70, 59, 60, 90, 63, 62, 60, 63, 66, 65, 67, 61, 63, 36, 60, 98, 75, 62, 62, 67, 90, 71, 67, 57, 45, 64, 52, 66, 78, 73, 47, 68, 60, 68, 82, 47, 60, 58, 74, 60, 60, 49, 54, 44, 58, 66, 56, 66, 58, 46, 64, 70, 67, 81, 72, 72, 57, 36, 69, 68, 78, 57, 52, 77, 60, 51, 62, 59, 69, 56, 60, 79, 62, 65, 62, 58, 58, 70, 71, 49, 60, 57, 52, 52, 62, 57, 64, 73, 60, 60, 48, 43, 68, 64, 64, 77, 68, 72, 58, 56, 52, 57, 75, 80, 64, 61, 65, 68, 66, 97, 35, 70, 91, 57, 64, 54, 72, 44, 70, 51, 66, 51, 103, 60, 60, 61, 57, 58, 79, 62, 56, 40, 68, 82, 59, 76, 53, 59, 74, 69, 134, 68, 65, 68, 62, 50, 95, 54, 67, 69, 114, 48, 52, 66, 54, 58, 72, 74, 69, 69, 59, 63, 58, 59, 57, 56, 103, 47, 54, 69, 70, 65, 69, 79, 64, 74, 53, 50, 72, 66, 56, 55, 60, 90, 72, 99, 57, 63, 69, 46, 57, 61, 52, 72, 64, 56, 65, 70, 46, 91, 107, 71, 51, 55, 98, 47, 75, 48, 64, 72, 62, 73, 65, 60, 69, 71, 52, 63, 66, 63, 44, 56, 78, 53, 63, 67, 63, 84, 68, 64, 60, 61, 71, 61, 70, 59, 74, 71, 47, 54, 54, 84, 61, 59, 73, 89, 50, 48, 82, 57, 63, 66, 68, 51, 93, 57, 59, 62, 60, 63, 54, 66, 69, 57, 51, 55, 59, 56, 58, 68, 52, 79, 53, 67, 72, 66, 70, 113, 42, 76, 83, 47, 53, 47, 67, 52, 106, 90, 54, 96, 73, 65, 80, 49, 63, 39, 78, 87, 74, 49, 96, 49, 62, 76, 97, 65, 70, 67, 64, 98, 67, 75, 63, 64, 57, 86, 56, 51, 68, 64, 56, 44, 95, 81, 48, 48, 72, 68, 67, 78, 63, 63, 56, 51, 60, 68, 72, 74, 73, 55, 67, 104, 70, 60, 50, 55, 60, 51, 80, 66, 80, 64, 67, 65, 74, 63, 57, 72, 66, 53, 71, 47, 71, 55, 80, 64, 52, 59, 44, 58, 61, 67, 66, 70, 61, 65, 44, 66, 76, 63, 51, 78, 91, 69, 56, 47, 53, 49, 75, 68, 74, 52, 75, 68, 57, 65, 81, 84, 56, 66, 67, 61, 67, 70, 56, 72, 83, 51, 47, 62, 61, 85, 65, 67, 57, 51, 75, 64, 54, 51, 78, 51, 67, 70, 93, 63, 70, 71, 71, 61, 44, 65, 77, 50, 78, 64, 96, 43, 73, 47, 69, 65, 82, 58, 75, 67, 74, 73, 67, 54, 53, 41, 58, 64, 60, 34, 74, 46, 63, 66, 80, 73, 88, 68, 68, 62, 75, 39, 75, 81, 68, 61, 34, 56, 79, 51, 72, 53, 60, 72, 61, 65, 54, 59, 59, 90, 63, 69, 50, 66, 60, 60, 55, 50, 69, 57, 63, 56, 55, 75, 60, 64, 79, 71, 60, 70, 63, 55, 67, 51, 54, 78, 56, 64, 60, 75, 61, 65, 44, 67, 66, 54, 68, 55, 27, 66, 77, 79, 63, 53, 60, 66, 66, 70, 78, 64, 71, 52, 57, 58, 73, 64, 67, 84, 57, 62, 82, 66, 59, 61, 54, 51, 65, 55, 72, 58, 50, 54, 71, 65, 58, 74, 65, 52, 47, 38, 82, 52, 51, 66, 93, 61, 49, 100, 64, 98, 73, 75, 61, 62, 60, 95, 99, 59, 59, 74, 93, 80, 62, 45, 41, 53, 53, 66, 65, 70, 40, 54, 73, 86, 109, 48, 39, 57, 67, 60, 69, 52, 66, 53, 62, 105, 68, 79, 46, 79, 68, 60, 83, 82, 62, 64, 79, 75, 81, 82, 61, 58, 76, 83, 63, 30, 66, 58, 92, 53, 59, 73, 98, 60, 83, 69, 77, 57, 78, 67, 86, 78, 63, 60, 103, 68, 65, 81, 70, 80, 62, 63, 59, 77, 72, 63, 64, 82, 67, 46, 67, 68, 73, 60, 70, 42, 55, 67, 82, 66, 68, 67, 104, 44, 85, 65, 68, 45, 86, 62, 61, 58, 65, 48, 42, 74, 81, 89, 69, 86, 62, 92, 43, 70, 64, 65, 41, 67, 53, 62, 59, 60, 57, 58, 45, 84, 77, 100, 69, 64, 58, 60, 75, 48, 55, 61, 77, 64, 55, 78, 85, 82, 47, 68, 56, 60, 67, 67, 79, 55, 54, 69, 89, 104, 53, 54, 77, 70, 70, 63, 70, 53, 69, 50, 71, 46, 46, 85, 68, 71, 59, 62, 59, 52, 52, 46, 79, 71, 69, 57, 95, 75, 72, 58, 62, 54, 61, 68, 48, 65, 49, 49, 92, 72, 76, 91, 64, 57, 81, 71, 77, 55, 81, 81, 55, 72, 112, 61, 73, 46, 69, 77, 69, 68, 64, 75, 43, 42, 81, 52, 64, 69, 64, 44, 51, 85, 52, 56, 55, 66, 89, 58, 50, 74, 71, 57, 71, 32, 76, 88, 60, 92, 57, 61, 63, 107, 63, 63, 56, 75, 65, 89, 76, 53, 71, 58, 68, 56, 63, 56, 72, 61, 78, 86, 67, 56, 73, 79, 83, 76, 74, 57, 47, 84, 55, 47, 56, 66, 34, 82, 56, 46, 55, 58, 57, 61, 61, 58, 60, 109, 61, 75, 42, 47, 65, 55, 58, 88, 62, 56, 77, 76, 68, 58, 74, 50, 60, 72, 68, 50, 51, 54, 59, 69, 74, 67, 70, 61, 59, 61, 61, 72, 102, 64, 72, 59, 60, 54, 69, 85, 60, 63, 66, 59, 58, 66, 74, 59, 107, 60, 65, 68, 61, 82, 65, 64, 58, 62, 54, 65, 62, 87, 75, 78, 67, 55, 67, 59, 65, 75, 73, 51, 67, 66, 65, 72, 63, 121, 55, 104, 96, 101, 75, 58, 78, 56, 55, 49, 46, 52, 66, 87, 73, 63, 68, 101, 60, 80, 69, 61, 47, 56, 70, 45, 83, 76, 53, 50, 59, 85, 80, 72, 102, 57, 68, 58, 71, 66, 60, 67, 53, 47, 61, 42, 65, 65, 70, 71, 79, 70, 64, 100, 71, 69, 63, 66, 71, 75, 51, 68, 71, 56, 81, 88, 67, 73, 47, 58, 58, 72, 54, 73, 69, 80, 60, 68, 59, 49, 82, 65, 59, 61, 66, 45, 75, 62, 67, 75, 57, 57, 102, 128, 38, 70, 70, 60, 64, 44, 57, 47, 67, 62, 78, 73, 75, 61, 42, 60, 50, 51, 61, 77, 50, 71, 67, 57, 53, 75, 60, 64, 62, 62, 75, 47, 83, 52, 74, 90, 76, 69, 68, 91, 75, 89, 75, 54, 62, 82, 53, 61, 57, 69, 57, 63, 55, 71, 66, 54, 50, 57, 97, 72, 58, 61, 73, 52, 62, 72, 47, 124, 70, 56, 61, 66, 55, 58, 71, 58, 59, 57, 61, 45, 58, 52, 66, 60, 71, 65, 74, 59, 61, 77, 51, 53, 54, 65, 54, 50, 79, 52, 52, 63, 68, 67, 69, 49, 63, 92, 56, 81, 55, 80, 70, 77, 78, 76, 66, 84, 55, 66, 57, 65, 88, 100, 80, 80, 64, 42, 105, 56, 48, 81, 65, 79, 65, 47, 59, 71, 62, 42, 45, 81, 66, 58, 59, 79, 58, 73, 78, 63, 65, 63, 103, 61, 71, 77, 72, 56, 67, 74, 63, 71, 85, 70, 58, 71, 74, 63, 73, 64, 45, 106, 78, 64, 61, 82, 78, 56, 71, 100, 58, 52, 49, 66, 83, 62, 60, 73, 69, 61, 59, 68, 57, 51, 65, 54, 68, 75, 105, 73, 61, 55, 70, 48, 71, 64, 70, 57, 83, 60, 92, 51, 49, 76, 52, 81, 66, 57, 53, 52, 68, 89, 67, 59, 76, 43, 35, 79, 80, 68, 63, 68, 51, 67, 65, 87, 82, 40, 59, 75, 63, 70, 98, 86, 75, 57, 86, 63, 69, 73, 74, 66, 76, 64, 65, 67, 84, 64, 55, 52, 75, 53, 68, 69, 54, 74, 66, 103, 48, 66, 60, 59, 55, 61, 54, 62, 55, 52, 67, 63, 84, 45, 63, 58, 64, 72, 69, 44, 51, 57, 59, 95, 65, 47, 78, 75, 46, 61, 66, 67, 65, 79, 121, 74, 57, 53, 79, 73, 70, 65, 70, 69, 74, 58, 50, 62, 77, 69, 55, 76, 87, 71, 52, 61, 59, 69, 81, 65, 71, 75, 64, 52, 59, 57, 113, 77, 60, 61, 53, 54, 68, 59, 59, 69, 55, 59, 57, 65, 59, 75, 66, 33, 48, 72, 53, 53, 45, 74, 69, 65, 66, 76, 54, 60, 58, 89, 87, 46, 81, 46, 55, 67, 98, 58, 66, 100, 57, 63, 51, 60, 61, 61, 68, 58, 90, 57, 59, 74, 79, 66, 53, 65, 70, 53, 57, 76, 41, 84, 80, 56, 51, 57, 61, 63, 98, 48, 55, 51, 81, 54, 60, 76, 53, 60, 66, 54, 95, 59, 72, 55, 57, 93, 51, 102, 62, 56, 65, 67, 49, 60, 68, 52, 86, 63, 59, 38, 72, 50, 86, 59, 63, 62, 76, 66, 46, 66, 59, 77, 60, 53, 70, 45, 67, 55, 58, 71, 59, 47, 87, 71, 91, 70, 74, 61, 52, 66, 59, 62, 74, 77, 52, 81, 83, 73, 70, 54, 60, 87, 60, 94, 83, 56, 70, 75, 63, 71, 45, 60, 77, 89, 58, 54, 53, 38, 88, 54, 63, 59, 57, 48, 69, 48, 58, 73, 104, 77, 64, 57, 52, 63, 76, 61, 52, 66, 56, 102, 70, 75, 68, 59, 61, 74, 74, 67, 69, 62, 81, 57, 70, 58, 67, 97, 60, 42, 101, 72, 67, 76, 58, 44, 130, 82, 46, 59, 53, 77, 46, 73, 49, 81, 65, 54, 58, 66, 61, 62, 70, 74, 73, 61, 65, 71, 60, 101, 93, 76, 60, 64, 57, 68, 67, 60, 72, 54, 72, 45, 56, 105, 92, 58, 51, 77, 83, 63, 79, 71, 63, 65, 52, 74, 58, 68, 66, 79, 53, 74, 53, 77, 90, 70, 54, 71, 58, 59, 80, 56, 65, 42, 47, 81, 52, 53, 60, 51, 53, 55, 87, 49, 69, 68, 47, 47, 47, 99, 49, 39, 72, 72, 82, 68, 75, 71, 61, 80, 68, 50, 68, 67, 52, 61, 57, 67, 53, 54, 49, 60, 70, 73, 72, 73, 70, 67, 70, 55, 45, 60, 69, 64, 74, 67, 57, 78, 64, 58, 41, 60, 71, 57, 62, 58, 120, 77, 71, 65, 62, 56, 126, 45, 73, 47, 115, 66, 67, 63, 84, 59, 58, 79, 68, 54, 51, 60, 78, 83, 80, 59, 77, 75, 55, 58, 50, 65, 40, 57, 63, 107, 50, 46, 85, 43, 43, 98, 59, 56, 66, 69, 64, 72, 67, 56, 55, 54, 55, 79, 52, 64, 51, 56, 61, 51, 61, 71, 71, 64, 61, 70, 52, 78, 69, 51, 65, 80, 54, 37, 80, 99, 88, 58, 52, 52, 61, 75, 66, 49, 70, 62, 48, 74, 92, 61, 55, 60, 64, 76, 66, 70, 62, 76, 70, 78, 61, 75, 74, 54, 39, 58, 54, 64, 62, 91, 66, 72, 49, 43, 78, 60, 58, 73, 69, 59, 61, 56, 63, 68, 47, 73, 76, 49, 63, 61, 55, 74, 55, 78, 46, 73, 48, 64, 56, 62, 98, 68, 59, 78, 83, 66, 59, 71, 85, 61, 51, 68, 71, 51, 74, 47, 66, 48, 63, 110, 69, 69, 73, 55, 65, 75, 64, 74, 62, 65, 58, 75, 66, 56, 84, 54, 79, 59, 82, 41, 61, 52, 67, 107, 57, 43, 65, 55, 73, 57, 73, 56, 48, 57, 55, 80, 52, 76, 70, 62, 70, 48, 83, 69, 59, 57, 63, 62, 55, 58, 61, 61, 37, 60, 74, 81, 81, 84, 49, 76, 60, 88, 56, 49, 55, 114, 88, 63, 68, 72, 67, 37, 67, 100, 72, 79, 66, 96, 62, 64, 141, 80, 82, 50, 65, 62, 71, 81, 101, 62, 82, 82, 101, 80, 85, 70, 67, 64, 55, 64, 86, 80, 47, 55, 71, 70, 41, 68, 101, 59, 62, 51, 52, 56, 91, 52, 64, 55, 65, 92, 45, 72, 61, 52, 71, 77, 70, 62, 57, 78, 79, 65, 64, 60, 61, 116, 78, 53, 81, 68, 98, 52, 69, 90, 52, 70, 68, 62, 57, 48, 79, 58, 57, 98, 48, 64, 99, 66, 69, 77, 64, 59, 61, 74, 59, 55, 79, 52, 74, 87, 63, 71, 41, 68, 49, 59, 68, 64, 53, 47, 65, 69, 55, 65, 69, 50, 71, 59, 55, 110, 45, 63, 76, 74, 71, 87, 89, 70, 63, 54, 57, 79, 45, 70, 53, 69, 82, 57, 53, 57, 72, 81, 53, 55, 68, 71, 71, 67, 70, 67, 50, 68, 70, 74, 68, 61, 62, 66, 75, 51, 44, 66, 53, 78, 44, 47, 52, 84, 66, 73, 62, 67, 103, 71, 59, 51, 61, 72, 78, 56, 54, 100, 67, 68, 80, 56, 52, 55, 55, 60, 70, 60, 76, 70, 71, 58, 72, 49, 81, 131, 55, 56, 58, 67, 80, 74, 66, 63, 64, 73, 77, 84, 55, 48, 89, 92, 65, 74, 77, 62, 53, 57, 62, 54, 43, 34, 86, 59, 82, 61, 86, 104, 65, 79, 72, 60, 72, 84, 60, 49, 47, 53, 60, 66, 54, 72, 68, 58, 80, 66, 55, 70, 77, 98, 63, 49, 61, 70, 56, 61, 68, 62, 63, 59, 64, 63, 49, 49, 96, 61, 97, 85, 86, 63, 69, 42, 118, 62, 60, 51, 67, 68, 114, 66, 60, 65, 68, 67, 63, 60, 63, 57, 61, 68, 93, 39, 61, 54, 51, 61, 58, 53, 52, 59, 67, 71, 55, 77, 62, 77, 54, 51, 63, 67, 46, 56, 62, 78, 89, 61, 50, 46, 92, 50, 56, 57, 71, 55, 58, 72, 60, 61, 67, 68, 56, 62, 86, 80, 41, 59, 54, 61, 60, 70, 63, 75, 37, 62, 67, 48, 106, 66, 70, 66, 46, 61, 40, 65, 76, 106, 43, 70, 63, 44, 65, 54, 53, 72, 49, 71, 74, 74, 66, 75, 70, 88, 60, 125, 70, 68, 65, 48, 60, 50, 65, 95, 73, 93, 58, 105, 60, 51, 50, 51, 67, 44, 77, 62, 47, 81, 55, 78, 48, 80, 62, 50, 45, 53, 74, 65, 78, 57, 63, 67, 59, 50, 65, 48, 55, 53, 80, 86, 67, 87, 44, 72, 81, 61, 93, 60, 87, 64, 66, 94, 50, 79, 61, 60, 66, 64, 74, 54, 74, 75, 63, 63, 54, 71, 67, 51, 56, 67, 48, 61, 63, 57, 64, 80, 74, 55, 49, 68, 67, 69, 66, 68, 51, 66, 53, 60, 49, 52, 73, 57, 61, 65, 61, 61, 62, 76, 59, 43, 78, 63, 64, 90, 38, 67, 79, 58, 55, 102, 62, 61, 68, 61, 56, 44, 69, 75, 68, 65, 76, 74, 83, 77, 59, 53, 64, 65, 57, 65, 62, 55, 89, 54, 70, 68, 66, 46, 82, 73, 79, 48, 51, 65, 91, 64, 85, 73, 91, 58, 71, 58, 54, 70, 65, 71, 77, 61, 69, 32, 65, 68, 59, 58, 74, 60, 61, 45, 69, 55, 65, 56, 46, 55, 62, 58, 58, 69, 54, 49, 80, 63, 78, 75, 51, 51, 51, 68, 52, 65, 57, 77, 63, 55, 56, 60, 64, 69, 57, 63, 71, 57, 58, 55, 69, 67, 63, 68, 66, 67, 67, 50, 67, 90, 53, 82, 62, 59, 40, 75, 59, 82, 73, 55, 79, 80, 51, 56, 62, 61, 68, 91, 54, 73, 68, 45, 45, 66, 68, 73, 51, 55, 47, 72, 52, 87, 79, 75, 69, 69, 55, 54, 66, 58, 61, 35, 72, 55, 55, 64, 58, 59, 65, 55, 58, 90, 80, 60, 81, 64, 90, 55, 56, 53, 70, 70, 55, 52, 67, 62, 72, 61, 63, 63, 84, 46, 52, 57, 76, 66, 54, 92, 44, 76, 49, 70, 58, 77, 68, 67, 61, 46, 95, 46, 58, 82, 43, 56, 58, 64, 82, 81, 70, 59, 68, 51, 74, 80, 54, 51, 124, 89, 56, 40, 62, 67, 86, 65, 47, 62, 69, 53, 100, 65, 61, 54, 61, 58, 79, 80, 75, 61, 82, 62, 72, 95, 66, 40, 78, 63, 64, 53, 62, 50, 60, 66, 69, 75, 105, 73, 64, 72, 66, 62, 85, 66, 72, 59, 66, 56, 121, 47, 71, 70, 65, 78, 63, 91, 78, 80, 62, 73, 62, 83, 58, 49, 50, 81, 69, 54, 55, 60, 72, 66, 68, 60, 63, 65, 79, 63, 95, 76, 66, 46, 71, 44, 59, 59, 51, 63, 60, 83, 66, 99, 63, 53, 59, 63, 64, 71, 52, 55, 65, 68, 61, 69, 65, 67, 45, 62, 58, 81, 76, 84, 78, 60, 76, 80, 70, 61, 48, 72, 57, 70, 80, 86, 67, 73, 53, 46, 65, 73, 69, 75, 68, 58, 66, 67, 86, 88, 66, 60, 55, 82, 69, 68, 77, 71, 73, 59, 63, 74, 64, 85, 57, 43, 71, 90, 71, 47, 67, 83, 50, 64, 73, 74, 70, 69, 65, 63, 63, 59, 54, 46, 63, 62, 59, 55, 67, 77, 56, 65, 84, 64, 75, 66, 52, 64, 49, 69, 58, 61, 119, 75, 62, 68, 55, 71, 52, 76, 67, 67, 50, 72, 58, 72, 54, 70, 60, 42, 70, 65, 58, 80, 67, 51, 64, 55, 61, 72, 59, 62, 61, 75, 57, 58, 57, 72, 74, 54, 80, 56, 62, 74, 69, 85, 58, 66, 61, 62, 62, 71, 69, 47, 55, 65, 61, 71, 61, 62, 75, 77, 71, 67, 93, 70, 62, 50, 66, 58, 88, 65, 67, 64, 73, 80, 64, 63, 77, 63, 67, 56, 59, 72, 55, 129, 62, 87, 73, 52, 68, 91, 74, 67, 69, 67, 87, 73, 82, 62, 79, 44, 70, 74, 65, 67, 87, 51, 49, 66, 67, 71, 55, 58, 81, 75, 38, 68, 64, 59, 65, 64, 66, 64, 69, 58, 49, 53, 68, 56, 70, 57, 60, 63, 91, 54, 69, 74, 52, 61, 70, 47, 68, 60, 69, 80, 50, 59, 97, 74, 46, 70, 87, 66, 75, 95, 69, 57, 62, 74, 64, 79, 42, 92, 64, 70, 80, 86, 64, 69, 62, 61, 90, 65, 76, 74, 63, 91, 67, 87, 55, 59, 68, 78, 48, 68, 61, 60, 51, 73, 53, 69, 59, 77, 62, 77, 67, 48, 125, 55, 60, 75, 78, 59, 49, 59, 66, 68, 52, 55, 73, 51, 68, 74, 75, 56, 74, 82, 67, 50, 70, 79, 65, 63, 60, 52, 59, 60, 86, 72, 69, 54, 59, 53, 56, 91, 68, 62, 71, 78, 59, 84, 65, 80, 53, 73, 65, 65, 53, 72, 66, 64, 58, 85, 65, 69, 53, 135, 74, 50, 58, 58, 71, 63, 73, 77, 65, 70, 48, 77, 82, 74, 71, 61, 62, 51, 63, 64, 57, 64, 63, 58, 64, 65, 72, 51, 56, 64, 52, 65, 62, 58, 58, 97, 69, 74, 70, 72, 58, 75, 58, 69, 57, 69, 68, 73, 77, 67, 53, 57, 63, 63, 59, 66, 39, 56, 120, 67, 64, 69, 64, 68, 75, 72, 78, 135, 86, 60, 67, 85, 67, 73, 55, 71, 90, 60, 61, 72, 74, 57, 62, 61, 62, 62, 59, 56, 56, 64, 64, 66, 55, 70, 58, 66, 65, 54, 68, 66, 41, 69, 56, 75, 63, 86, 59, 78, 44, 63, 72, 74, 80, 78, 59, 58, 71, 65, 53, 65, 66, 98, 63, 63, 51, 47, 64, 56, 95, 96, 65, 82, 71, 50, 52, 67, 87, 63, 57, 95, 66, 59, 86, 59, 67, 76, 97, 57, 62, 74, 102, 65, 59, 60, 43, 51, 49, 65, 43, 77, 69, 96, 50, 74, 78, 73, 65, 64, 63, 39, 58, 73, 69, 57, 64, 62, 68, 50, 63, 61, 43, 60, 64, 60, 56, 59, 75, 62, 66, 80, 77, 104, 55, 63, 66, 74, 68, 58, 57, 62, 54, 57, 60, 74, 66, 49, 75, 56, 97, 68, 60, 68, 43, 75, 79, 54, 60, 78, 65, 84, 75, 72, 64, 72, 71, 73, 72, 58, 91, 82, 51, 63, 52, 48, 76, 74, 65, 60, 50, 70, 99, 90, 68, 65, 68, 63, 85, 94, 58, 84, 86, 74, 68, 75, 49, 73, 82, 67, 80, 51, 62, 69, 52, 64, 75, 68, 52, 61, 55, 62, 53, 61, 71, 67, 42, 83, 38, 68, 55, 42, 62, 70, 59, 67, 67, 60, 48, 64, 59, 103, 52, 86, 74, 60, 99, 68, 68, 45, 73, 59, 102, 68, 60, 63, 61, 64, 69, 62, 53, 50, 55, 66, 66, 56, 54, 75, 55, 65, 56, 76, 46, 67, 47, 53, 71, 86, 77, 67, 64, 70, 48, 51, 62, 58, 59, 57, 52, 71, 72, 50, 87, 69, 90, 63, 55, 62, 58, 74, 72, 69, 81, 61, 56, 69, 75, 76, 131, 68, 42, 61, 76, 52, 48, 63, 45, 30, 66, 77, 66, 55, 72, 58, 66, 66, 44, 55, 66, 66, 47, 65, 58, 55, 66, 65, 78, 49, 56, 64, 95, 79, 59, 73, 72, 87, 73, 52, 54, 60, 83, 65, 60, 71, 67, 41, 61, 56, 58, 65, 71, 58, 94, 91, 99, 82, 57, 73, 58, 68, 53, 76, 68, 59, 64, 47, 65, 41, 60, 49, 72, 75, 66, 88, 69, 75, 87, 53, 52, 76, 72, 60, 55, 60, 74, 70, 77, 66, 56, 87, 49, 72, 62, 69, 57, 61, 68, 51, 74, 98, 55, 67, 49, 72, 63, 52, 64, 81, 81, 89, 53, 66, 57, 59, 42, 60, 61, 66, 69, 72, 60, 60, 84, 78, 62, 45, 91, 67, 109, 85, 64, 73, 84, 92, 71, 58, 46, 60, 52, 124, 72, 65, 83, 59, 94, 54, 59, 147, 67, 64, 58, 68, 86, 52, 71, 56, 70, 58, 83, 71, 54, 39, 80, 70, 63, 64, 100, 160, 62, 43, 57, 65, 85, 70, 62, 62, 58, 79, 61, 54, 50, 60, 79, 38, 55, 88, 56, 77, 65, 64, 70, 88, 54, 49, 56, 63, 68, 55, 57, 90, 100, 74, 69, 79, 89, 59, 57, 73, 63, 50, 66, 62, 71, 87, 87, 55, 75, 63, 55, 59, 70, 51, 72, 63, 65, 67, 55, 70, 53, 72, 59, 57, 66, 51, 76, 69, 74, 53, 77, 52, 76, 63, 63, 67, 42, 71, 67, 54, 51, 62, 69, 67, 51, 71, 66, 70, 73, 84, 88, 69, 62, 70, 86, 53, 63, 65, 63, 60, 74, 46, 75, 65, 64, 69, 69, 69, 73, 58, 67, 64, 76, 57, 73, 58, 56, 78, 84, 68, 56, 65, 58, 58, 92, 48, 64, 46, 82, 63, 101, 55, 49, 66, 75, 72, 68, 64, 60, 86, 74, 65, 61, 63, 73, 69, 63, 60, 44, 80, 45, 54, 76, 68, 69, 59, 72, 58, 52, 55, 65, 67, 62, 65, 43, 58, 62, 73, 55, 74, 53, 28, 91, 49, 66, 69, 78, 48, 67, 48, 56, 63, 53, 50, 75, 46, 92, 65, 58, 74, 76, 72, 70, 51, 78, 64, 78, 55, 54, 70, 46, 95, 44, 55, 53, 60, 70, 77, 62, 51, 57, 73, 51, 65, 64, 58, 70, 52, 57, 72, 52, 49, 61, 70, 71, 71, 85, 59, 57, 80, 68, 109, 75, 77, 44, 137, 63, 53, 55, 67, 44, 58, 60, 48, 59, 70, 61, 63, 65, 62, 81, 70, 61, 66, 46, 79, 64, 50, 92, 56, 82, 45, 78, 65, 110, 87, 58, 77, 79, 62, 71, 60, 65, 55, 52, 65, 56, 74, 65, 55, 52, 59, 70, 70, 62, 53, 70, 70, 69, 54, 62, 71, 75, 91, 70, 62, 58, 76, 75, 62, 55, 69, 78, 61, 95, 58, 55, 61, 54, 77, 59, 67, 67, 115, 59, 57, 72, 58, 51, 60, 78, 61, 70, 74, 53, 61, 58, 90, 79, 58, 83, 53, 61, 71, 94, 63, 71, 62, 54, 79, 65, 57, 59, 59, 65, 74, 87, 59, 48, 49, 74, 62, 71, 101, 52, 62, 58, 84, 55, 62, 83, 88, 72, 72, 65, 76, 80, 53, 66, 57, 64, 79, 48, 79, 70, 66, 62, 58, 52, 59, 54, 48, 83, 66, 59, 67, 67, 65, 68, 58, 64, 61, 65, 87, 79, 68, 68, 92, 118, 46, 69, 72, 63, 73, 72, 61, 55, 72, 80, 56, 69, 70, 67, 58, 65, 73, 62, 68, 82, 61, 126, 53, 67, 46, 41, 66, 88, 65, 65, 48, 86, 81, 57, 63, 60, 71, 103, 43, 60, 70, 66, 57, 78, 63, 49, 52, 76, 70, 71, 79, 88, 64, 63, 58, 63, 45, 49, 57, 54, 57, 64, 74, 59, 73, 101, 55, 69, 72, 88, 50, 71, 77, 63, 65, 49, 80, 71, 71, 78, 96, 54, 61, 62, 47, 52, 99, 56, 61, 56, 69, 61, 52, 50, 77, 66, 101, 41, 81, 58, 89, 60, 77, 52, 72, 97, 62, 53, 73, 72, 76, 68, 61, 73, 59, 62, 79, 71, 62, 74, 132, 45, 82, 78, 53, 66, 72, 54, 63, 100, 74, 48, 52, 58, 57, 62, 58, 49, 70, 48, 70, 65, 89, 79, 72, 49, 79, 48, 60, 48, 90, 49, 31, 54, 57, 74, 60, 66, 85, 75, 67, 61, 69, 60, 60, 98, 65, 56, 62, 70, 54, 70, 73, 44, 63, 54, 55, 102, 52, 62, 69, 63, 59, 72, 54, 65, 46, 51, 79, 81, 74, 61, 55, 88, 48, 55, 95, 55, 65, 83, 50, 103, 83, 71, 100, 63, 63, 103, 67, 58, 74, 62, 68, 67, 55, 68, 48, 61, 67, 56, 45, 66, 54, 63, 51, 52, 65, 75, 58, 70, 105, 65, 59, 75, 56, 74, 76, 66, 63, 48, 69, 61, 77, 71, 53, 62, 67, 48, 58, 59, 63, 64, 77, 96, 58, 60, 61, 66, 62, 43, 96, 124, 73, 135, 49, 87, 62, 47, 72, 57, 71, 73, 53, 51, 80, 98, 56, 62, 62, 71, 53, 80, 70, 68, 63, 70, 69, 63, 69, 52, 76, 67, 52, 69, 59, 71, 66, 57, 53, 69, 63, 48, 85, 69, 69, 63, 60, 61, 70, 68, 110, 51, 128, 59, 61, 60, 56, 54, 60, 61, 52, 54, 61, 89, 65, 67, 57, 64, 64, 53, 77, 50, 64, 64, 73, 60, 46, 52, 52, 70, 65, 52, 54, 62, 58, 42, 59, 62, 66, 67, 86, 58, 70, 58, 60, 69, 66, 50, 66, 73, 70, 71, 58, 86, 74, 73, 107, 51, 57, 58, 75, 63, 51, 67, 64, 61, 81, 74, 59, 66, 81, 54, 76, 72, 99, 63, 46, 56, 67, 65, 60, 39, 57, 49, 60, 43, 74, 61, 78, 64, 70, 61, 85, 51, 57, 68, 52, 71, 78, 86, 80, 57, 54, 83, 96, 107, 61, 50, 73, 60, 64, 69, 66, 79, 54, 63, 73, 39, 23, 57, 57, 68, 58, 56, 59, 65, 91, 59, 47, 70, 88, 71, 60, 55, 62, 53, 61, 63, 47, 40, 58, 61, 67, 43, 55, 74, 76, 77, 61, 96, 46, 57, 60, 46, 76, 41, 65, 59, 75, 42, 72, 71, 64, 50, 74, 79, 65, 55, 78, 75, 45, 70, 61, 57, 76, 64, 52, 79, 79, 79, 68, 68, 57, 60, 55, 137, 54, 64, 65, 71, 71, 58, 59, 85, 78, 52, 75, 57, 87, 74, 47, 70, 51, 71, 73, 56, 62, 52, 60, 59, 78, 97, 76, 74, 63, 59, 98, 44, 80, 62, 43, 62, 67, 64, 71, 50, 61, 65, 58, 84, 73, 70, 67, 69, 71, 54, 64, 55, 75, 78, 71, 70, 53, 64, 61, 75, 69, 77, 61, 81, 42, 73, 70, 70, 60, 55, 64, 71, 62, 108, 72, 63, 49, 66, 69, 58, 79, 74, 47, 81, 76, 43, 61, 47, 59, 47, 69, 78, 56, 73, 60, 73, 60, 66, 68, 50, 40, 68, 110, 61, 81, 78, 68, 56, 69, 63, 42, 70, 56, 82, 62, 58, 57, 68, 70, 62, 65, 73, 57, 76, 53, 66, 66, 81, 70, 86, 71, 77, 75, 52, 50, 73, 67, 82, 96, 90, 40, 62, 51, 64, 68, 48, 56, 58, 83, 77, 65, 30, 57, 56, 61, 83, 64, 44, 68, 52, 61, 61, 52, 59, 49, 56, 79, 67, 75, 60, 67, 55, 68, 48, 66, 48, 57, 48, 68, 59, 62, 53, 64, 63, 50, 57, 52, 55, 59, 101, 59, 64, 69, 62, 46, 65, 83, 76, 59, 88, 91, 62, 71, 78, 62, 49, 67, 73, 64, 134, 77, 47, 60, 67, 58, 70, 82, 95, 74, 48, 72, 102, 61, 56, 76, 62, 67, 60, 81, 59, 62, 101, 69, 51, 80, 60, 83, 78, 66, 56, 88, 52, 72, 95, 56, 65, 58, 62, 61, 101, 99, 75, 50, 57, 68, 60, 84, 76, 51, 53, 67, 67, 82, 66, 52, 65, 67, 55, 60, 48, 86, 54, 77, 56, 76, 71, 82, 57, 58, 60, 50, 117, 66, 86, 54, 60, 60, 63, 66, 63, 60, 79, 66, 65, 62, 64, 73, 61, 56, 48, 78, 62, 69, 50, 78, 71, 71, 60, 71, 67, 79, 61, 60, 62, 48, 72, 60, 73, 63, 55, 68, 68, 80, 50, 71, 59, 63, 69, 62, 66, 88, 62, 69, 61, 49, 48, 80, 62, 66, 51, 65, 69, 42, 50, 56, 83, 56, 46, 66, 54, 64, 77, 74, 57, 78, 58, 71, 49, 76, 56, 61, 66, 53, 87, 65, 52, 66, 73, 53, 59, 83, 52, 47, 52, 47, 69, 60, 43, 55, 50, 73, 61, 94, 67, 58, 67, 58, 69, 66, 72, 54, 86, 55, 61, 69, 82, 62, 80, 55, 76, 64, 73, 49, 70, 74, 59, 81, 55, 66, 77, 61, 81, 62, 49, 58, 63, 65, 87, 66, 60, 56, 87, 67, 50, 54, 58, 50, 57, 90, 64, 47, 63, 66, 56, 57, 53, 65, 55, 68, 60, 67, 74, 67, 63, 96, 82, 49, 68, 62, 58, 71, 74, 53, 60, 57, 83, 85, 55, 63, 59, 63, 64, 56, 82, 74, 61, 58, 57, 62, 66, 64, 63, 61, 61, 70, 80, 73, 66, 49, 104, 78, 59, 59, 37, 68, 74, 51, 69, 56, 48, 86, 49, 57, 100, 89, 52, 68, 46, 77, 60, 77, 55, 87, 55, 54, 64, 64, 90, 55, 72, 73, 76, 54, 71, 103, 48, 78, 61, 71, 40, 58, 68, 60, 93, 55, 64, 63, 70, 73, 65, 67, 56, 52, 73, 56, 64, 93, 58, 78, 61, 57, 76, 41, 65, 58, 82, 62, 70, 61, 74, 63, 63, 65, 77, 57, 43, 62, 61, 74, 53, 72, 63, 60, 62, 55, 64, 98, 67, 67, 55, 58, 68, 62, 65, 55, 79, 61, 74, 72, 56, 77, 102, 64, 87, 52, 70, 59, 67, 60, 67, 72, 80, 54, 79, 47, 52, 63, 62, 82, 56, 60, 55, 66, 76, 64, 63, 50, 52, 83, 46, 76, 64, 88, 74, 64, 61, 96, 57, 62, 62, 59, 63, 92, 65, 57, 77, 67, 66, 37, 53, 63, 48, 68, 64, 64, 99, 69, 60, 74, 72, 88, 72, 137, 61, 70, 55, 73, 66, 60, 73, 67, 78, 89, 62, 61, 64, 57, 85, 97, 54, 64, 96, 47, 78, 73, 62, 75, 67, 76, 50, 62, 56, 61, 83, 65, 47, 60, 76, 141, 48, 68, 59, 57, 94, 80, 58, 82, 63, 88, 87, 57, 73, 63, 80, 74, 68, 89, 58, 53, 57, 91, 70, 56, 80, 64, 62, 70, 62, 65, 61, 89, 71, 83, 59, 71, 57, 55, 54, 87, 63, 61, 55, 60, 54, 81, 57, 74, 77, 61, 73, 64, 62, 61, 69, 57, 53, 64, 62, 67, 56, 75, 47, 65, 54, 65, 53, 75, 54, 56, 73, 80, 50, 44, 63, 60, 65, 82, 57, 53, 68, 67, 62, 46, 83, 64, 78, 65, 66, 59, 58, 78, 93, 72, 79, 55, 57, 67, 66, 48, 65, 79, 66, 68, 74, 52, 59, 58, 78, 73, 59, 74, 62, 59, 91, 50, 62, 55, 89, 81, 65, 59, 80, 60, 44, 44, 84, 60, 41, 76, 87, 57, 95, 44, 52, 64, 84, 57, 82, 53, 73, 82, 73, 64, 62, 38, 63, 47, 57, 65, 128, 73, 93, 85, 74, 83, 80, 82, 55, 68, 44, 75, 45, 77, 62, 53, 39, 50, 63, 75, 59, 62, 70, 67, 82, 111, 79, 59, 53, 46, 60, 74, 69, 64, 53, 49, 55, 55, 60, 60, 86, 80, 53, 89, 67, 82, 54, 60, 63, 69, 82, 80, 65, 59, 77, 60, 55, 53, 53, 90, 54, 82, 57, 55, 45, 54, 81, 70, 67, 66, 125, 70, 44, 79, 70, 60, 63, 51, 56, 66, 61, 63, 63, 58, 79, 38, 60, 74, 52, 75, 48, 57, 50, 50, 60, 78, 61, 74, 65, 57, 57, 50, 56, 72, 71, 59, 54, 53, 54, 60, 62, 82, 63, 80, 48, 69, 73, 65, 58, 90, 99, 53, 58, 64, 64, 71, 73, 66, 53, 84, 85, 54, 64, 64, 162, 59, 54, 47, 89, 79, 52, 63, 79, 57, 67, 66, 55, 59, 64, 53, 99, 64, 58, 64, 51, 69, 50, 136, 54, 53, 58, 88, 67, 49, 63, 45, 78, 78, 91, 62, 60, 103, 81, 56, 86, 84, 61, 85, 68, 60, 61, 36, 73, 49, 61, 49, 64, 66, 67, 59, 55, 79, 56, 57, 63, 54, 92, 48, 61, 70, 57, 54, 70, 52, 62, 59, 68, 48, 63, 66, 75, 67, 40, 72, 51, 75, 62, 62, 96, 63, 91, 74, 63, 59, 66, 54, 64, 75, 46, 56, 40, 41, 65, 63, 68, 49, 47, 60, 59, 107, 77, 58, 70, 51, 56, 76, 83, 95, 57, 95, 60, 40, 51, 55, 57, 39, 60, 70, 75, 83, 39, 51, 60, 75, 46, 46, 69, 64, 69, 83, 56, 77, 53, 69, 51, 71, 69, 86, 91, 66, 58, 78, 46, 60, 64, 70, 75, 63, 55, 57, 65, 63, 62, 64, 73, 63, 60, 67, 54, 83, 81, 57, 61, 79, 95, 55, 59, 53, 59, 48, 61, 79, 92, 84, 62, 76, 63, 71, 58, 55, 73, 58, 70, 58, 48, 59, 59, 68, 78, 58, 72, 52, 60, 73, 65, 49, 72, 61, 78, 88, 75, 56, 61, 60, 66, 72, 70, 70, 60, 49, 63, 73, 67, 77, 77, 60, 89, 71, 96, 95, 68, 89, 65, 57, 73, 72, 78, 52, 63, 78, 70, 64, 71, 66, 67, 58, 62, 91, 70, 60, 77, 57, 57, 64, 79, 57, 78, 80, 76, 56, 90, 56, 61, 54, 61, 53, 52, 59, 76, 48, 68, 76, 53, 40, 60, 55, 73, 75, 61, 89, 61, 96, 51, 63, 60, 76, 52, 62, 80, 59, 73, 37, 46, 65, 66, 51, 61, 65, 70, 61, 57, 97, 54, 69, 66, 61, 63, 97, 69, 80, 56, 63, 56, 69, 105, 75, 81, 61, 73, 57, 93, 59, 42, 67, 70, 49, 71, 67, 59, 53, 67, 76, 71, 91, 49, 60, 100, 77, 81, 69, 63, 63, 78, 54, 55, 54, 59, 75, 53, 39, 60, 57, 75, 44, 71, 45, 65, 84, 80, 67, 43, 49, 63, 61, 59, 53, 53, 60, 56, 62, 64, 77, 65, 53, 74, 58, 50, 68, 61, 63, 84, 66, 73, 80, 89, 109, 50, 78, 51, 64, 80, 95, 65, 74, 49, 76, 56, 76, 78, 40, 72, 75, 56, 91, 63, 86, 105, 60, 67, 67, 57, 53, 90, 56, 65, 61, 47, 56, 63, 57, 71, 43, 54, 58, 61, 88, 68, 76, 62, 58, 56, 59, 80, 48, 45, 86, 58, 94, 109, 48, 65, 61, 61, 76, 71, 79, 64, 74, 99, 64, 52, 92, 66, 66, 72, 43, 148, 53, 50, 67, 57, 74, 71, 72, 71, 72, 67, 30, 81, 75, 57, 66, 80, 66, 76, 75, 61, 55, 83, 78, 61, 98, 68, 86, 95, 65, 52, 56, 52, 60, 62, 81, 57, 104, 77, 65, 51, 67, 74, 61, 78, 92, 56, 61, 54, 87, 75, 55, 64, 49, 101, 63, 55, 100, 77, 79, 67, 55, 63, 94, 71, 57, 59, 59, 87, 52, 72, 59, 56, 62, 65, 62, 55, 68, 55, 59, 63, 57, 58, 70, 43, 61, 58, 64, 68, 88, 40, 68, 51, 70, 67, 57, 63, 56, 79, 51, 71, 58, 69, 50, 62, 64, 89, 57, 66, 72, 81, 81, 48, 68, 68, 57, 73, 59, 59, 63, 70, 61, 79, 68, 62, 75, 78, 61, 54, 47, 71, 58, 56, 54, 52, 52, 52, 66, 61, 57, 59, 74, 54, 66, 67, 51, 47, 58, 83, 65, 67, 82, 62, 90, 72, 59, 53, 53, 58, 70, 61, 90, 54, 39, 77, 71, 62, 56, 90, 81, 67, 82, 54, 59, 95, 58, 81, 40, 102, 63, 61, 68, 43, 74, 56, 59, 51, 88, 64, 73, 64, 81, 74, 59, 54, 81, 49, 58, 76, 63, 57, 60, 58, 68, 67, 69, 57, 68, 52, 60, 60, 83, 60, 52, 66, 63, 63, 80, 47, 75, 66, 58, 49, 58, 83, 79, 61, 62, 61, 42, 91, 79, 66, 67, 64, 72, 70, 77, 72, 61, 53, 58, 50, 46, 55, 101, 44, 50, 62, 71, 57, 56, 74, 53, 61, 52, 70, 87, 89, 60, 59, 46, 67, 69, 66, 66, 67, 52, 74, 52, 82, 71, 74, 55, 67, 64, 62, 47, 74, 59, 63, 64, 77, 70, 61, 54, 48, 59, 75, 93, 76, 66, 54, 52, 57, 48, 67, 66, 58, 114, 73, 75, 51, 77, 49, 63, 56, 58, 78, 50, 90, 52, 60, 62, 68, 77, 49, 64, 77, 64, 55, 60, 102, 60, 43, 67, 62, 54, 74, 66, 57, 66, 69, 51, 79, 82, 75, 62, 58, 70, 59, 70, 63, 59, 49, 71, 66, 74, 44, 57, 70, 75, 88, 53, 106, 70, 98, 106, 63, 93, 69, 58, 70, 81, 68, 70, 63, 63, 71, 77, 64, 47, 39, 71, 65, 56, 72, 79, 68, 76, 82, 61, 54, 57, 81, 60, 44, 55, 43, 62, 71, 89, 57, 66, 79, 80, 80, 56, 58, 67, 77, 67, 51, 57, 80, 72, 69, 52, 48, 61, 81, 88, 100, 58, 90, 58, 90, 67, 56, 50, 52, 63, 70, 56, 70, 57, 62, 58, 95, 109, 84, 54, 47, 45, 55, 47, 103, 68, 59, 57, 47, 62, 58, 65, 61, 94, 64, 51, 68, 60, 77, 51, 53, 62, 80, 63, 73, 57, 77, 64, 79, 40, 72, 56, 50, 38, 50, 60, 67, 45, 57, 58, 58, 51, 81, 73, 83, 63, 73, 42, 91, 57, 51, 54, 78, 61, 50, 66, 70, 80, 55, 64, 85, 76, 81, 61, 58, 78, 66, 69, 66, 77, 71, 57, 59, 71, 75, 53, 75, 71, 60, 54, 34, 59, 85, 67, 47, 71, 80, 62, 72, 109, 43, 56, 70, 63, 67, 50, 66, 46, 75, 85, 59, 85, 87, 42, 56, 81, 40, 60, 59, 74, 46, 56, 59, 58, 54, 62, 67, 68, 67, 64, 58, 84, 76, 83, 62, 66, 65, 71, 72, 41, 57, 58, 58, 71, 80, 60, 54, 72, 64, 61, 71, 67, 71, 84, 62, 74, 58, 73, 67, 67, 50, 69, 42, 69, 60, 56, 64, 64, 64, 78, 55, 72, 50, 57, 81, 81, 62, 60, 53, 65, 65, 59, 62, 54, 60, 64, 61, 85, 68, 65, 62, 69, 72, 56, 64, 53, 68, 67, 80, 52, 54, 72, 64, 71, 73, 70, 58, 49, 74, 52, 62, 70, 65, 78, 63, 65, 56, 93, 47, 73, 81, 60, 56, 68, 63, 72, 52, 73, 50, 76, 70, 62, 80, 47, 66, 56, 56, 81, 84, 46, 53, 74, 63, 61, 81, 74, 63, 61, 70, 79, 65, 66, 118, 55, 57, 112, 50, 76, 53, 78, 69, 66, 79, 64, 103, 49, 54, 65, 54, 56, 55, 53, 48, 74, 52, 78, 57, 71, 63, 88, 63, 61, 80, 60, 57, 70, 65, 62, 65, 57, 79, 56, 62, 66, 62, 61, 56, 56, 77, 53, 79, 74, 56, 59, 60, 56, 54, 60, 46, 151, 69, 59, 65, 53, 63, 77, 61, 63, 58, 67, 61, 107, 55, 55, 56, 82, 124, 54, 64, 64, 52, 63, 81, 64, 62, 58, 53, 68, 68, 84, 71, 64, 67, 94, 63, 60, 67, 56, 66, 57, 56, 61, 61, 58, 59, 67, 57, 40, 62, 63, 62, 95, 77, 74, 58, 72, 65, 74, 42, 94, 62, 56, 82, 65, 62, 59, 59, 73, 65, 63, 68, 69, 69, 61, 68, 58, 101, 80, 78, 69, 80, 109, 79, 53, 66, 66, 86, 69, 88, 62, 56, 68, 92, 65, 60, 64, 71, 55, 76, 50, 64, 47, 80, 52, 68, 63, 68, 58, 69, 59, 66, 51, 72, 55, 57, 71, 64, 59, 62, 61, 59, 77, 53, 56, 66, 60, 58, 69, 85, 51, 88, 98, 52, 57, 48, 60, 85, 55, 57, 63, 57, 59, 69, 75, 70, 62, 54, 68, 61, 65, 61, 75, 75, 58, 81, 69, 47, 76, 82, 57, 91, 66, 59, 71, 49, 58, 57, 54, 65, 46, 72, 54, 70, 55, 62, 79, 55, 85, 66, 66, 57, 64, 57, 65, 80, 69, 73, 84, 74, 60, 69, 78, 81, 83, 61, 59, 80, 69, 54, 69, 62, 76, 65, 56, 63, 71, 68, 57, 59, 69, 50, 58, 52, 65, 56, 68, 75, 60, 71, 67, 62, 64, 53, 71, 55, 66, 71, 55, 62, 60, 70, 64, 75, 74, 70, 55, 73, 60, 60, 50, 60, 67, 75, 63, 82, 77, 57, 73, 46, 72, 83, 72, 67, 66, 49, 64, 58, 56, 90, 64, 48, 64, 93, 64, 62, 59, 64, 68, 62, 69, 43, 95, 50, 59, 52, 57, 60, 78, 84, 54, 62, 63, 62, 58, 46, 78, 73, 70, 83, 67, 51, 65, 64, 56, 62, 61, 48, 61, 57, 49, 66, 86, 64, 74, 92, 62, 76, 55, 78, 60, 42, 51, 44, 55, 79, 58, 103, 81, 63, 52, 76, 90, 61, 53, 63, 77, 64, 48, 72, 65, 80, 68, 66, 55, 46, 76, 62, 58, 47, 74, 52, 77, 87, 58, 61, 69, 55, 59, 80, 51, 52, 59, 99, 77, 60, 75, 76, 81, 59, 61, 67, 62, 70, 57, 61, 61, 68, 54, 69, 81, 51, 73, 67, 59, 73, 61, 101, 67, 43, 65, 63, 57, 63, 66, 101, 78, 52, 41, 49, 74, 68, 62, 75, 60, 74, 76, 66, 55, 49, 78, 72, 70, 60, 67, 116, 50, 66, 66, 66, 43, 77, 48, 55, 59, 56, 82, 47, 61, 68, 65, 83, 68, 57, 79, 51, 93, 75, 66, 62, 90, 80, 66, 67, 56, 65, 55, 80, 60, 57, 87, 59, 69, 64, 56, 69, 76, 63, 51, 69, 60, 58, 61, 74, 102, 64, 81, 69, 62, 57, 59, 80, 60, 70, 61, 62, 90, 61, 71, 86, 62, 73, 80, 48, 63, 53, 60, 93, 52, 70, 69, 63, 59, 48, 75, 47, 59, 71, 44, 71, 67, 54, 61, 65, 61, 87, 85, 69, 72, 62, 59, 51, 77, 76, 64, 63, 64, 71, 75, 77, 70, 57, 60, 57, 77, 61, 92, 68, 53, 69, 68, 75, 57, 54, 69, 57, 50, 52, 97, 58, 45, 56, 54, 52, 51, 53, 44, 66, 52, 76, 48, 70, 53, 75, 77, 73, 67, 68, 58, 57, 53, 68, 69, 63, 61, 71, 57, 77, 98, 46, 57, 58, 61, 57, 89, 70, 49, 67, 84, 62, 50, 58, 53, 69, 52, 43, 59, 64, 71, 83, 57, 78, 58, 70, 66, 60, 55, 58, 48, 69, 62, 50, 53, 66, 56, 54, 90, 72, 49, 60, 68, 65, 63, 58, 56, 65, 59, 61, 55, 71, 66, 65, 57, 72, 90, 91, 72, 59, 61, 79, 44, 76, 53, 56, 58, 55, 73, 69, 56, 60, 56, 75, 51, 64, 113, 65, 59, 60, 76, 70, 69, 57, 96, 43, 70, 87, 78, 50, 60, 62, 65, 65, 60, 59, 74, 60, 63, 72, 64, 67, 97, 65, 82, 63, 68, 64, 48, 58, 55, 64, 75, 68, 62, 60, 68, 67, 68, 74, 58, 64, 61, 68, 40, 57, 58, 59, 60, 50, 53, 58, 60, 79, 56, 51, 66, 62, 67, 59, 105, 71, 100, 75, 68, 47, 83, 68, 44, 52, 83, 54, 60, 65, 71, 45, 60, 57, 87, 73, 67, 65, 61, 66, 52, 63, 50, 70, 78, 69, 62, 58, 66, 63, 65, 66, 60, 60, 42, 74, 62, 65, 85, 68, 70, 58, 62, 57, 122, 68, 61, 59, 71, 66, 80, 67, 40, 61, 62, 47, 61, 81, 74, 53, 75, 103, 61, 81, 79, 61, 74, 69, 75, 72, 42, 69, 60, 70, 61, 66, 98, 74, 36, 62, 71, 57, 53, 70, 78, 85, 54, 93, 68, 72, 52, 59, 84, 68, 62, 67, 58, 72, 47, 55, 54, 83, 72, 55, 51, 57, 75, 69, 60, 58, 51, 61, 47, 55, 65, 70, 84, 66, 69, 62, 52, 67, 56, 80, 54, 74, 67, 108, 90, 60, 50, 91, 49, 97, 57, 133, 78, 65, 55, 47, 58, 70, 61, 54, 72, 67, 59, 57, 68, 67, 67, 62, 63, 50, 56, 68, 39, 67, 62, 67, 54, 58, 71, 54, 67, 46, 60, 73, 63, 69, 58, 69, 81, 67, 56, 64, 59, 59, 52, 64, 75, 50, 87, 54, 99, 59, 54, 68, 65, 68, 60, 66, 55, 59, 58, 53, 70, 69, 74, 79, 52, 58, 59, 71, 55, 59, 48, 62, 59, 65, 60, 73, 68, 74, 77, 58, 63, 82, 50, 79, 75, 60, 65, 61, 83, 62, 42, 56, 61, 69, 103, 66, 84, 73, 66, 60, 58, 74, 87, 63, 57, 65, 68, 72, 60, 65, 57, 42, 57, 84, 60, 63, 83, 60, 64, 53, 67, 69, 58, 62, 35, 71, 68, 64, 64, 87, 50, 70, 60, 101, 56, 69, 83, 75, 59, 68, 56, 77, 59, 62, 59, 59, 57, 58, 71, 63, 60, 58, 74, 67, 49, 74, 60, 124, 61, 58, 84, 52, 51, 56, 71, 78, 66, 69, 65, 66, 90, 72, 48, 60, 62, 57, 54, 63, 55, 76, 80, 78, 68, 69, 93, 69, 76, 76, 55, 69, 71, 56, 109, 56, 64, 77, 60, 77, 80, 52, 72, 75, 67, 69, 65, 62, 63, 55, 62, 59, 64, 82, 128, 128, 59, 92, 65, 65, 60, 53, 66, 46, 66, 64, 50, 65, 58, 54, 61, 75, 49, 55, 65, 55, 64, 66, 58, 71, 72, 49, 91, 57, 68, 65, 70, 52, 66, 60, 57, 30, 56, 73, 49, 55, 106, 77, 62, 57, 49, 41, 51, 97, 57, 70, 61, 58, 61, 139, 64, 59, 53, 62, 58, 85, 61, 64, 71, 58, 82, 64, 59, 77, 59, 59, 58, 56, 90, 55, 70, 77, 64, 61, 63, 60, 61, 89, 79, 66, 57, 60, 57, 69, 60, 60, 59, 76, 57, 63, 79, 58, 61, 58, 67, 73, 61, 76, 70, 68, 62, 77, 58, 65, 70, 77, 53, 70, 80, 66, 74, 67, 62, 66, 60, 71, 65, 78, 56, 74, 59, 63, 62, 104, 107, 71, 85, 72, 54, 103, 64, 74, 52, 30, 57, 59, 63, 73, 97, 67, 82, 36, 76, 75, 60, 65, 68, 50, 91, 57, 51, 72, 64, 45, 61, 59, 68, 69, 90, 80, 57, 52, 75, 65, 74, 68, 58, 60, 73, 58, 76, 69, 69, 59, 61, 37, 55, 58, 95, 62, 64, 72, 55, 64, 46, 70, 49, 71, 58, 56, 59, 69, 55, 65, 48, 54, 69, 81, 58, 65, 71, 74, 65, 54, 70, 68, 68, 86, 62, 81, 94, 57, 70, 61, 65, 41, 84, 49, 82, 44, 80, 56, 66, 68, 60, 56, 72, 64, 75, 54, 84, 54, 80, 76, 67, 67, 56, 71, 66, 62, 99, 77, 68, 56, 59, 54, 63, 59, 69, 61, 78, 72, 50, 76, 62, 91, 57, 67, 53, 58, 61, 82, 68, 58, 44, 70, 57, 60, 66, 60, 62, 56, 53, 60, 82, 53, 54, 101, 49, 59, 67, 62, 55, 57, 84, 57, 82, 46, 60, 75, 57, 85, 55, 44, 59, 66, 66, 80, 58, 72, 63, 84, 62, 65, 81, 68, 68, 58, 82, 60, 59, 85, 68, 70, 83, 63, 70, 67, 59, 60, 51, 73, 56, 71, 71, 49, 57, 44, 78, 69, 67, 61, 67, 62, 92, 52, 76, 46, 67, 61, 73, 37, 57, 53, 54, 61, 61, 56, 69, 63, 28, 57, 69, 51, 62, 53, 64, 78, 72, 68, 55, 52, 52, 72, 86, 68, 65, 72, 85, 55, 67, 81, 59, 60, 59, 50, 103, 65, 89, 66, 54, 66, 66, 69, 60, 57, 63, 61, 79, 67, 59, 48, 85, 88, 75, 62, 45, 53, 114, 76, 60, 51, 73, 64, 49, 73, 51, 87, 64, 59, 57, 74, 64, 61, 61, 61, 57, 69, 57, 53, 86, 54, 59, 65, 63, 67, 78, 64, 54, 56, 72, 66, 56, 60, 87, 61, 56, 115, 73, 64, 86, 52, 59, 62, 58, 77, 48, 64, 66, 48, 59, 83, 60, 64, 60, 79, 72, 58, 72, 57, 62, 58, 39, 59, 63, 63, 71, 71, 103, 54, 54, 85, 70, 54, 58, 62, 81, 59, 59, 59, 60, 70, 69, 47, 80, 63, 62, 52, 64, 73, 60, 65, 60, 56, 51, 61, 66, 74, 59, 63, 64, 67, 71, 66, 101, 53, 56, 69, 49, 79, 74, 95, 59, 61, 77, 65, 54, 59, 70, 59, 65, 75, 58, 90, 42, 100, 83, 79, 59, 62, 55, 64, 75, 62, 60, 57, 34, 62, 78, 65, 54, 60, 63, 73, 87, 60, 57, 61, 66, 68, 60, 49, 76, 62, 66, 52, 71, 65, 54, 61, 48, 74, 52, 60, 70, 81, 60, 83, 55, 58, 70, 58, 61, 57, 63, 60, 53, 51, 64, 46, 64, 91, 62, 66, 70, 69, 65, 58, 70, 100, 62, 63, 73, 83, 86, 60, 64, 49, 59, 97, 60, 51, 88, 67, 72, 76, 69, 74, 89, 55, 67, 66, 54, 61, 60, 59, 55, 81, 58, 55, 105, 76, 59, 64, 59, 75, 99, 59, 115, 66, 78, 49, 126, 50, 70, 72, 56, 58, 75, 80, 67, 67, 55, 63, 89, 67, 59, 75, 68, 115, 66, 68, 56, 55, 73, 65, 88, 69, 93, 75, 61, 61, 63, 64, 62, 59, 98, 63, 57, 61, 51, 54, 56, 61, 66, 54, 62, 63, 55, 69, 52, 56, 70, 52, 51, 74, 49, 59, 61, 61, 66, 69, 66, 76, 60, 60, 66, 51, 56, 59, 119, 53, 66, 75, 81, 47, 60, 61, 100, 74, 48, 77, 65, 63, 57, 69, 77, 67, 63, 59, 61, 60, 85, 84, 60, 86, 60, 71, 74, 54, 47, 94, 59, 48, 62, 58, 54, 62, 70, 60, 58, 57, 76, 60, 89, 57, 82, 72, 62, 78, 60, 52, 50, 57, 84, 59, 75, 88, 52, 61, 45, 61, 58, 89, 78, 60, 77, 63, 60, 62, 56, 78, 69, 63, 57, 99, 67, 64, 53, 60, 75, 60, 68, 58, 60, 63, 63, 76, 60, 50, 56, 58, 42, 65, 112, 67, 72, 69, 62, 54, 99, 71, 68, 54, 72, 78, 60, 81, 59, 54, 83, 54, 86, 60, 83, 49, 67, 51, 49, 62, 109, 62, 41, 47, 50, 60, 86, 59, 85, 65, 48, 61, 68, 70, 88, 52, 73, 44, 86, 48, 59, 62, 41, 70, 61, 77, 57, 103, 61, 53, 70, 84, 65, 69, 100, 62, 64, 60, 77, 95, 73, 60, 72, 43, 50, 67, 78, 69, 37, 74, 55, 58, 59, 65, 53, 91, 51, 85, 71, 70, 77, 66, 74, 77, 79, 88, 72, 63, 78, 76, 67, 76, 60, 61, 52, 89, 61, 63, 91, 103, 62, 59, 47, 62, 52, 67, 55, 67, 80, 60, 61, 69, 43, 72, 84, 67, 58, 71, 71, 54, 81, 106, 52, 67, 59, 61, 87, 83, 72, 54, 68, 46, 107, 78, 46, 90, 84, 70, 99, 49, 88, 37, 52, 71, 55, 65, 59, 82, 51, 65, 67, 74, 55, 53, 67, 66, 55, 48, 49, 73, 94, 67, 61, 57, 89, 61, 55, 62, 54, 63, 70, 48, 55, 74, 54, 52, 60, 82, 116, 48, 79, 61, 54, 49, 60, 44, 94, 59, 73, 39, 56, 96, 46, 79, 48, 69, 71, 71, 50, 85, 74, 71, 68, 57, 50, 70, 61, 81, 58, 68, 55, 66, 47, 55, 59, 55, 64, 94, 69, 66, 70, 63, 64, 67, 79, 61, 70, 64, 72, 96, 76, 44, 53, 62, 79, 71, 64, 44, 68, 88, 79, 56, 53, 72, 54, 63, 65, 54, 56, 47, 77, 85, 61, 55, 56, 67, 42, 76, 89, 52, 82, 63, 61, 75, 55, 81, 102, 63, 41, 82, 91, 67, 61, 52, 51, 82, 61, 58, 37, 82, 93, 75, 55, 81, 58, 66, 66, 70, 84, 52, 77, 67, 67, 66, 58, 50, 58, 76, 46, 84, 61, 66, 59, 56, 70, 72, 81, 54, 52, 56, 47, 76, 106, 62, 73, 77, 80, 62, 78, 78, 78, 66, 93, 67, 65, 54, 42, 75, 46, 99, 50, 50, 62, 56, 100, 75, 67, 56, 52, 54, 91, 70, 83, 53, 68, 56, 56, 56, 47, 56, 75, 67, 52, 72, 69, 70, 66, 63, 58, 110, 58, 54, 58, 67, 76, 74, 63, 62, 63, 74, 56, 72, 50, 64, 47, 74, 74, 56, 52, 63, 88, 59, 66, 64, 91, 59, 74, 70, 66, 50, 66, 60, 58, 51, 52, 56, 82, 57, 69, 54, 57, 53, 39, 44, 91, 70, 57, 83, 40, 92, 81, 66, 79, 72, 87, 83, 43, 47, 56, 71, 66, 57, 55, 64, 75, 71, 74, 88, 82, 67, 84, 74, 67, 66, 56, 51, 65, 64, 56, 59, 65, 57, 77, 64, 70, 69, 56, 65, 81, 43, 43, 45, 49, 82, 49, 65, 64, 63, 58, 63, 63, 85, 73, 67, 72, 76, 67, 65, 63, 57, 44, 65, 77, 76, 59, 54, 44, 98, 59, 74, 60, 138, 55, 55, 90, 82, 48, 64, 69, 46, 57, 52, 57, 62, 63, 61, 50, 102, 57, 69, 68, 52, 70, 74, 95, 99, 58, 69, 69, 85, 49, 55, 70, 80, 62, 69, 84, 87, 81, 57, 85, 72, 43, 45, 57, 47, 72, 41, 46, 65, 75, 68, 87, 61, 65, 86, 61, 66, 55, 87, 57, 64, 57, 30, 72, 64, 69, 80, 83, 63, 77, 76, 65, 56, 99, 56, 75, 56, 49, 79, 56, 74, 64, 99, 41, 86, 57, 57, 65, 69, 57, 57, 64, 87, 81, 62, 54, 68, 63, 90, 82, 57, 67, 73, 50, 55, 59, 43, 92, 59, 92, 91, 62, 48, 65, 85, 49, 124, 51, 61, 68, 58, 74, 49, 70, 68, 48, 73, 57, 102, 86, 79, 53, 85, 91, 85, 41, 54, 74, 69, 62, 69, 81, 73, 55, 97, 65, 64, 71, 53, 38, 72, 48, 86, 55, 63, 74, 67, 39, 52, 67, 71, 71, 45, 54, 69, 83, 84, 87, 72, 48, 52, 76, 48, 64, 55, 68, 57, 64, 60, 80, 69, 68, 66, 56, 57, 54, 58, 43, 64, 71, 63, 48, 79, 73, 60, 53, 60, 59, 51, 56, 54, 91, 66, 49, 49, 76, 60, 67, 87, 59, 53, 71, 84, 83, 77, 56, 65, 62, 59, 58, 56, 89, 101, 39, 45, 62, 78, 52, 64, 67, 73, 53, 50, 58, 51, 56, 48, 57, 73, 72, 79, 56, 66, 47, 52, 58, 89, 58, 59, 53, 47, 62, 81, 70, 53, 62, 62, 52, 58, 56, 59, 80, 73, 57, 47, 81, 66, 63, 53, 55, 80, 57, 49, 64, 59, 52, 52, 74, 76, 64, 76, 72, 61, 89, 52, 64, 76, 53, 59, 79, 73, 82, 57, 59, 50, 66, 71, 48, 59, 93, 68, 56, 70, 51, 75, 54, 43, 66, 57, 61, 79, 77, 62, 55, 55, 76, 77, 64, 66, 74, 70, 59, 46, 65, 81, 75, 70, 75, 83, 74, 57, 104, 59, 63, 53, 108, 61, 56, 79, 110, 56, 85, 60, 66, 41, 53, 57, 50, 59, 52, 101, 76, 106, 68, 55, 62, 61, 61, 57, 56, 77, 46, 62, 60, 48, 56, 54, 50, 92, 73, 65, 63, 132, 66, 81, 52, 60, 54, 72, 89, 57, 45, 74, 58, 63, 55, 63, 64, 59, 111, 54, 64, 60, 64, 82, 46, 60, 71, 90, 56, 64, 63, 63, 76, 70, 68, 64, 42, 66, 65, 81, 61, 66, 51, 89, 69, 64, 47, 87, 65, 73, 61, 57, 50, 59, 89, 49, 43, 69, 107, 75, 56, 62, 64, 96, 76, 60, 45, 66, 64, 68, 68, 75, 73, 67, 68, 106, 94, 51, 89, 74, 42, 76, 53, 89, 51, 73, 81, 51, 69, 61, 70, 69, 55, 60, 71, 52, 60, 56, 52, 54, 45, 69, 65, 53, 46, 83, 81, 58, 59, 53, 64, 93, 63, 77, 61, 112, 65, 62, 38, 63, 84, 58, 55, 63, 66, 40, 77, 63, 60, 55, 52, 50, 60, 63, 47, 81, 55, 43, 89, 52, 43, 63, 52, 50, 63, 72, 54, 84, 55, 60, 64, 65, 61, 65, 64, 56, 60, 63, 66, 106, 64, 74, 55, 55, 47, 49, 78, 60, 68, 78, 54, 74, 66, 50, 84, 107, 75, 77, 88, 63, 61, 53, 61, 58, 57, 61, 76, 38, 98, 60, 63, 56, 72, 60, 47, 77, 75, 61, 62, 45, 47, 58, 69, 55, 69, 55, 55, 66, 72, 69, 79, 89, 86, 47, 55, 47, 42, 62, 59, 49, 82, 84, 64, 59, 56, 65, 39, 72, 58, 43, 66, 56, 77, 56, 73, 50, 56, 50, 52, 64, 61, 124, 53, 46, 60, 96, 61, 47, 67, 74, 52, 47, 54, 69, 66, 45, 62, 58, 67, 93, 65, 63, 53, 37, 51, 75, 62, 70, 51, 50, 48, 69, 57, 53, 74, 80, 63, 67, 51, 72, 73, 85, 74, 54, 62, 49, 75, 58, 76, 77, 48, 78, 65, 55, 58, 53, 53, 61, 69, 76, 60, 91, 80, 74, 70, 51, 99, 68, 65, 135, 54, 50, 75, 59, 50, 43, 88, 64, 56, 62, 54, 56, 71, 49, 68, 75, 67, 56, 66, 70, 66, 57, 92, 80, 53, 60, 68, 42, 65, 73, 53, 70, 42, 62, 55, 92, 69, 58, 68, 65, 71, 61, 56, 69, 58, 58, 81, 38, 77, 64, 62, 53, 56, 49, 79, 49, 67, 57, 61, 76, 68, 104, 63, 74, 46, 66, 90, 62, 54, 46, 66, 69, 73, 63, 53, 93, 56, 62, 73, 68, 55, 65, 63, 81, 61, 55, 64, 63, 55, 64, 80, 60, 58, 27, 76, 50, 80, 62, 65, 63, 92, 76, 58, 101, 57, 68, 55, 74, 45, 61, 72, 73, 40, 58, 60, 56, 67, 72, 55, 61, 69, 66, 56, 56, 96, 65, 62, 83, 77, 61, 51, 65, 54, 61, 64, 47, 63, 85, 74, 63, 64, 61, 73, 81, 61, 60, 47, 56, 68, 90, 67, 55, 76, 71, 76, 49, 51, 57, 113, 73, 59, 82, 74, 97, 76, 52, 69, 74, 73, 95, 52, 58, 73, 70, 66, 74, 63, 50, 51, 71, 51, 66, 68, 76, 54, 56, 68, 60, 64, 83, 55, 51, 70, 46, 72, 63, 59, 87, 53, 66, 83, 68, 60, 58, 56, 92, 61, 58, 55, 90, 58, 44, 83, 94, 58, 75, 98, 56, 75, 60, 49, 68, 49, 63, 50, 65, 56, 67, 55, 60, 52, 66, 73, 50, 48, 77, 72, 50, 46, 82, 60, 79, 72, 60, 57, 53, 53, 82, 82, 76, 57, 44, 80, 60, 60, 59, 55, 54, 81, 50, 74, 79, 60, 50, 85, 74, 67, 68, 51, 71, 63, 58, 68, 53, 82, 67, 48, 63, 75, 72, 65, 68, 51, 63, 61, 79, 68, 69, 69, 73, 63, 73, 80, 69, 59, 47, 36, 73, 72, 60, 68, 87, 80, 48, 65, 52, 60, 52, 57, 61, 64, 69, 71, 86, 104, 86, 63, 64, 72, 66, 55, 64, 47, 60, 78, 82, 56, 60, 66, 55, 64, 56, 51, 58, 60, 58, 64, 83, 67, 69, 62, 65, 74, 60, 53, 60, 54, 66, 57, 51, 68, 137, 78, 74, 70, 64, 54, 63, 82, 68, 88, 78, 58, 81, 56, 77, 65, 40, 72, 69, 73, 48, 96, 64, 61, 63, 91, 81, 64, 68, 74, 64, 50, 83, 66, 64, 56, 61, 61, 42, 88, 51, 72, 57, 58, 58, 51, 63, 62, 80, 80, 73, 88, 56, 64, 98, 61, 81, 88, 54, 66, 59, 75, 65, 73, 60, 43, 55, 97, 61, 47, 64, 64, 67, 71, 60, 72, 80, 66, 56, 47, 84, 74, 49, 58, 71, 71, 46, 60, 91, 55, 50, 42, 61, 97, 50, 74, 74, 55, 39, 76, 61, 54, 69, 66, 66, 60, 86, 71, 60, 76, 57, 55, 51, 39, 46, 70, 54, 64, 72, 63, 61, 72, 71, 49, 90, 73, 56, 49, 58, 64, 68, 40, 71, 53, 57, 70, 72, 58, 61, 51, 88, 54, 65, 64, 54, 51, 47, 71, 66, 54, 46, 47, 57, 68, 53, 81, 106, 69, 73, 67, 80, 63, 59, 87, 62, 48, 94, 55, 47, 108, 78, 76, 54, 72, 83, 72, 64, 63, 60, 50, 62, 77, 59, 69, 82, 53, 78, 80, 60, 55, 85, 60, 59, 58, 63, 80, 52, 58, 82, 73, 67, 63, 79, 53, 104, 58, 46, 53, 65, 55, 70, 56, 83, 67, 95, 69, 94, 71, 54, 53, 65, 64, 74, 81, 75, 60, 64, 60, 69, 72, 49, 79, 55, 88, 62, 58, 58, 71, 88, 70, 60, 69, 49, 70, 64, 53, 64, 62, 58, 67, 65, 60, 56, 97, 56, 65, 46, 52, 68, 53, 55, 69, 60, 76, 53, 70, 48, 75, 61, 56, 61, 95, 70, 73, 63, 57, 57, 70, 63, 50, 61, 70, 66, 59, 64, 74, 51, 83, 51, 58, 79, 75, 63, 63, 59, 64, 63, 70, 87, 63, 92, 56, 75, 62, 59, 59, 56, 57, 61, 60, 51, 50, 58, 75, 63, 66, 51, 58, 75, 72, 67, 64, 66, 79, 56, 59, 78, 65, 56, 75, 59, 48, 50, 73, 60, 57, 80, 85, 61, 92, 75, 74, 70, 61, 54, 78, 53, 65, 79, 55, 58, 59, 53, 76, 66, 66, 69, 80, 74, 62, 60, 72, 82, 60, 49, 55, 58, 76, 73, 65, 64, 66, 65, 74, 65, 74, 87, 70, 46, 79, 60, 47, 47, 103, 64, 72, 86, 49, 60, 71, 53, 66, 45, 49, 61, 64, 70, 84, 53, 49, 58, 91, 55, 56, 106, 92, 64, 81, 63, 73, 68, 80, 85, 70, 55, 68, 52, 49, 68, 68, 71, 49, 56, 76, 64, 60, 71, 74, 65, 65, 73, 64, 57, 70, 57, 59, 60, 58, 62, 50, 81, 68, 47, 73, 62, 74, 59, 59, 54, 63, 81, 63, 72, 62, 69, 81, 62, 75, 66, 59, 84, 81, 72, 41, 115, 69, 80, 56, 91, 86, 88, 69, 68, 95, 72, 74, 58, 83, 72, 70, 76, 65, 66, 50, 76, 76, 67, 82, 57, 57, 57, 63, 54, 80, 59, 90, 65, 74, 42, 118, 54, 44, 62, 61, 89, 66, 112, 55, 55, 52, 65, 47, 87, 73, 53, 67, 58, 77, 68, 69, 54, 55, 95, 53, 70, 57, 53, 70, 75, 64, 64, 65, 49, 66, 102, 69, 73, 68, 68, 61, 71, 61, 75, 65, 67, 63, 66, 72, 62, 51, 56, 55, 64, 56, 67, 58, 61, 68, 67, 47, 66, 61, 75, 55, 54, 65, 58, 75, 84, 70, 70, 64, 63, 63, 70, 68, 44, 48, 78, 79, 65, 67, 74, 73, 69, 73, 102, 72, 68, 110, 73, 70, 46, 61, 131, 82, 65, 59, 80, 58, 73, 61, 65, 65, 66, 68, 49, 50, 63, 69, 62, 51, 63, 93, 98, 56, 69, 68, 55, 74, 59, 56, 68, 68, 71, 93, 110, 83, 53, 69, 38, 67, 63, 73, 94, 82, 63, 64, 68, 67, 67, 61, 66, 56, 69, 64, 65, 87, 80, 74, 61, 67, 93, 80, 82, 96, 89, 62, 56, 81, 58, 60, 84, 70, 52, 81, 69, 73, 67, 42, 68, 81, 61, 59, 68, 61, 59, 64, 75, 63, 70, 49, 73, 71, 64, 76, 48, 71, 55, 66, 56, 61, 76, 65, 67, 69, 64, 60, 65, 82, 53, 65, 74, 62, 56, 58, 78, 72, 62, 73, 60, 62, 44, 64, 90, 85, 36, 57, 55, 77, 68, 64, 72, 66, 64, 51, 97, 78, 74, 69, 78, 58, 73, 54, 65, 63, 76, 67, 52, 71, 104, 55, 49, 61, 55, 56, 57, 53, 56, 60, 50, 50, 55, 57, 84, 61, 56, 54, 81, 66, 68, 51, 65, 56, 84, 72, 82, 67, 49, 72, 66, 53, 64, 66, 69, 69, 63, 75, 58, 77, 73, 75, 60, 59, 39, 64, 78, 70, 56, 53, 50, 53, 83, 57, 68, 61, 66, 63, 77, 47, 74, 86, 29, 72, 75, 43, 60, 58, 81, 69, 72, 65, 88, 73, 79, 62, 63, 60, 51, 118, 71, 60, 58, 55, 60, 67, 65, 52, 68, 73, 56, 76, 67, 59, 53, 62, 74, 53, 57, 76, 59, 68, 54, 59, 52, 45, 100, 70, 61, 51, 89, 49, 65, 81, 65, 57, 60, 76, 69, 73, 64, 56, 53, 66, 62, 58, 57, 54, 61, 67, 82, 63, 57, 64, 72, 82, 73, 64, 50, 65, 73, 73, 49, 61, 68, 80, 65, 58, 44, 74, 43, 50, 63, 78, 54, 70, 67, 71, 77, 67, 60, 59, 87, 62, 50, 54, 85, 95, 59, 95, 50, 68, 68, 79, 55, 87, 55, 62, 66, 62, 61, 88, 55, 66, 61, 63, 63, 54, 51, 64, 70, 68, 59, 72, 70, 65, 64, 66, 74, 76, 62, 60, 45, 60, 83, 73, 86, 69, 59, 103, 46, 72, 74, 46, 56, 46, 59, 81, 57, 54, 43, 66, 82, 52, 51, 46, 86, 74, 59, 62, 77, 79, 80, 58, 63, 65, 66, 77, 70, 73, 69, 67, 79, 64, 49, 89, 55, 49, 76, 65, 53, 72, 80, 54, 65, 58, 61, 65, 74, 55, 70, 75, 49, 77, 55, 72, 54, 74, 64, 57, 54, 59, 86, 62, 53, 53, 88, 47, 81, 123, 56, 70, 66, 62, 57, 64, 73, 54, 42, 64, 63, 53, 63, 68, 71, 60, 100, 42, 60, 66, 59, 73, 61, 76, 74, 67, 50, 51, 54, 93, 49, 48, 51, 48, 77, 99, 57, 63, 67, 59, 69, 58, 78, 58, 61, 69, 56, 61, 77, 53, 85, 64, 69, 62, 83, 76, 61, 72, 53, 61, 68, 63, 70, 81, 50, 89, 69, 51, 53, 54, 71, 66, 99, 89, 69, 87, 63, 62, 59, 51, 57, 69, 58, 52, 43, 64, 56, 65, 67, 59, 64, 70, 25, 80, 66, 84, 53, 73, 57, 60, 68, 54, 67, 66, 68, 68, 59, 54, 59, 63, 63, 54, 58, 53, 55, 63, 60, 69, 73, 50, 76, 58, 65, 44, 83, 45, 74, 60, 56, 58, 87, 40, 59, 63, 60, 91, 64, 61, 66, 70, 92, 69, 89, 46, 64, 56, 63, 47, 45, 68, 61, 82, 74, 57, 68, 40, 56, 94, 69, 61, 70, 80, 61, 78, 103, 60, 57, 60, 62, 51, 73, 73, 59, 64, 51, 45, 57, 67, 54, 79, 120, 43, 61, 51, 75, 77, 59, 38, 58, 64, 61, 68, 73, 57, 73, 52, 53, 82, 74, 64, 88, 75, 58, 45, 73, 80, 70, 68, 86, 78, 86, 59, 65, 51, 105, 54, 60, 71, 60, 42, 64, 52, 57, 66, 58, 76, 68, 80, 61, 73, 48, 65, 49, 65, 55, 48, 100, 82, 64, 63, 67, 61, 110, 58, 76, 57, 72, 75, 74, 65, 79, 53, 65, 65, 54, 71, 60, 89, 87, 80, 65, 80, 52, 83, 62, 87, 77, 61, 60, 63, 69, 59, 70, 38, 51, 72, 54, 69, 50, 91, 60, 69, 40, 66, 62, 65, 56, 56, 43, 71, 69, 53, 67, 59, 69, 49, 51, 57, 83, 77, 76, 86, 64, 59, 75, 57, 51, 63, 56, 50, 63, 68, 70, 54, 69, 89, 91, 86, 53, 68, 109, 65, 65, 73, 73, 61, 67, 83, 69, 54, 55, 76, 53, 69, 84, 71, 65, 79, 65, 210, 56, 66, 57, 45, 53, 70, 55, 80, 75, 56, 69, 71, 101, 73, 54, 73, 55, 63, 69, 68, 79, 78, 63, 72, 54, 61, 86, 67, 106, 59, 72, 62, 77, 52, 51, 68, 74, 72, 52, 52, 65, 98, 51, 78, 56, 53, 56, 61, 50, 70, 66, 60, 52, 70, 79, 37, 66, 62, 76, 77, 55, 44, 55, 50, 61, 84, 96, 71, 53, 79, 81, 59, 77, 50, 51, 68, 62, 65, 74, 56, 51, 57, 58, 73, 63, 67, 56, 69, 50, 66, 56, 56, 89, 74, 73, 51, 71, 70, 75, 84, 69, 68, 91, 92, 56, 57, 41, 68, 72, 63, 65, 78, 82, 61, 71, 67, 51, 60, 76, 62, 76, 68, 84, 67, 80, 74, 72, 63, 56, 69, 58, 58, 103, 71, 53, 71, 79, 33, 71, 66, 61, 39, 55, 81, 95, 77, 69, 43, 68, 92, 119, 73, 66, 61, 79, 67, 61, 69, 52, 57, 60, 80, 64, 68, 68, 117, 66, 71, 56, 51, 49, 73, 65, 58, 63, 51, 86, 66, 45, 61, 69, 57, 52, 55, 74, 80, 91, 79, 87, 73, 45, 65, 89, 94, 53, 77, 64, 45, 65, 53, 53, 62, 44, 59, 50, 72, 63, 68, 65, 60, 51, 49, 64, 54, 57, 65, 56, 72, 61, 61, 67, 75, 87, 68, 59, 68, 73, 69, 68, 56, 48, 58, 64, 71, 109, 76, 63, 53, 74, 60, 87, 86, 48, 102, 52, 74, 72, 55, 62, 83, 84, 48, 43, 49, 85, 53, 60, 82, 94, 45, 62, 70, 67, 50, 53, 55, 59, 44, 84, 72, 56, 57, 71, 74, 104, 60, 87, 93, 69, 64, 56, 52, 73, 62, 59, 65, 48, 76, 52, 69, 70, 55, 72, 68, 63, 61, 65, 59, 53, 77, 41, 56, 58, 64, 61, 55, 52, 91, 59, 72, 33, 51, 75, 57, 63, 58, 63, 51, 55, 70, 73, 69, 52, 65, 70, 61, 60, 57, 67, 59, 64, 83, 73, 69, 61, 67, 61, 51, 61, 75, 54, 102, 40, 42, 72, 57, 60, 73, 69, 63, 57, 76, 77, 71, 58, 57, 53, 61, 79, 48, 56, 81, 62, 60, 80, 57, 70, 61, 59, 76, 69, 75, 53, 62, 52, 69, 62, 53, 68, 70, 71, 64, 61, 57, 80, 76, 84, 53, 60, 88, 69, 56, 121, 75, 62, 46, 71, 37, 76, 52, 62, 78, 56, 73, 68, 79, 62, 71, 66, 72, 71, 48, 64, 68, 69, 73, 65, 61, 62, 59, 91, 63, 77, 55, 65, 62, 96, 66, 57, 82, 57, 89, 47, 86, 39, 63, 46, 67, 50, 51, 61, 52, 72, 89, 57, 84, 82, 70, 63, 82, 84, 72, 65, 51, 64, 51, 61, 66, 57, 109, 47, 69, 59, 61, 64, 73, 64, 45, 69, 42, 84, 43, 109, 56, 51, 71, 69, 58, 54, 66, 53, 51, 67, 80, 78, 56, 76, 68, 68, 62, 61, 71, 59, 63, 51, 40, 57, 57, 52, 67, 67, 64, 62, 65, 50, 71, 61, 57, 63, 49, 70, 76, 63, 51, 71, 61, 57, 68, 60, 59, 67, 54, 129, 59, 84, 51, 65, 55, 72, 67, 46, 84, 43, 73, 61, 65, 63, 52, 41, 69, 60, 95, 71, 72, 58, 56, 71, 81, 70, 68, 72, 86, 101, 60, 122, 54, 50, 64, 55, 80, 85, 66, 91, 69, 75, 61, 65, 59, 62, 64, 75, 112, 71, 77, 79, 80, 44, 51, 47, 65, 79, 58, 105, 52, 73, 67, 63, 68, 61, 57, 56, 58, 60, 51, 77, 61, 63, 64, 47, 64, 59, 77, 75, 76, 55, 52, 63, 99, 86, 50, 69, 65, 70, 60, 44, 64, 63, 54, 47, 63, 66, 57, 62, 58, 110, 125, 57, 64, 83, 42, 80, 77, 73, 70, 35, 66, 89, 67, 79, 47, 47, 58, 73, 70, 92, 63, 55, 78, 92, 69, 53, 30, 56, 85, 79, 51, 68, 92, 68, 80, 55, 65, 84, 63, 51, 96, 69, 65, 72, 83, 108, 50, 57, 60, 67, 63, 64, 58, 60, 74, 66, 76, 45, 75, 83, 48, 65, 68, 75, 67, 72, 124, 62, 67, 50, 65, 71, 61, 67, 60, 93, 66, 53, 55, 67, 90, 69, 65, 48, 34, 49, 63, 46, 60, 70, 79, 67, 68, 63, 60, 49, 73, 64, 112, 87, 68, 107, 63, 62, 38, 63, 72, 65, 74, 67, 70, 51, 72, 67, 54, 55, 59, 55, 40, 65, 65, 47, 63, 57, 83, 59, 87, 60, 55, 65, 68, 59, 62, 74, 59, 66, 55, 62, 50, 97, 65, 58, 69, 54, 50, 71, 74, 61, 35, 60, 65, 69, 55, 53, 81, 65, 61, 70, 72, 61, 67, 55, 52, 57, 53, 39, 60, 67, 68, 45, 60, 54, 68, 62, 70, 47, 86, 49, 65, 55, 60, 59, 65, 63, 67, 48, 64, 70, 64, 72, 62, 59, 80, 93, 79, 68, 72, 86, 69, 61, 79, 62, 75, 93, 44, 63, 65, 60, 68, 63, 81, 60, 70, 63, 68, 70, 60, 75, 52, 75, 92, 79, 73, 67, 52, 72, 48, 53, 70, 61, 74, 56, 61, 79, 64, 61, 96, 66, 81, 57, 71, 67, 83, 66, 60, 81, 78, 91, 52, 60, 55, 77, 55, 52, 93, 51, 62, 60, 58, 59, 65, 73, 60, 73, 52, 50, 80, 60, 88, 87, 64, 74, 78, 51, 69, 81, 78, 88, 61, 62, 77, 60, 47, 81, 60, 68, 52, 49, 60, 54, 85, 67, 56, 79, 75, 65, 67, 57, 61, 60, 62, 58, 62, 110, 69, 66, 66, 80, 65, 71, 55, 36, 71, 57, 41, 80, 68, 101, 65, 68, 53, 60, 56, 65, 68, 72, 59, 56, 57, 71, 66, 59, 47, 73, 60, 76, 84, 51, 56, 56, 67, 68, 100, 46, 58, 51, 58, 68, 54, 73, 92, 65, 67, 61, 55, 49, 80, 43, 68, 45, 71, 77, 66, 100, 86, 50, 57, 61, 51, 55, 60, 72, 62, 59, 68, 64, 69, 69, 67, 81, 51, 43, 98, 111, 42, 66, 62, 103, 72, 53, 62, 61, 46, 51, 50, 51, 66, 56, 61, 82, 90, 82, 93, 82, 63, 79, 69, 130, 53, 61, 67, 60, 65, 59, 47, 59, 69, 55, 73, 49, 59, 79, 55, 89, 80, 57, 48, 61, 54, 73, 50, 51, 49, 61, 52, 56, 64, 67, 65, 72, 81, 57, 62, 70, 58, 69, 65, 133, 65, 71, 41, 60, 65, 68, 77, 84, 129, 77, 79, 55, 59, 74, 61, 58, 71, 68, 83, 50, 42, 62, 59, 50, 59, 67, 55, 57, 102, 41, 77, 55, 64, 126, 55, 54, 65, 66, 67, 66, 63, 55, 47, 76, 62, 73, 82, 54, 68, 64, 48, 67, 74, 69, 56, 81, 70, 67, 71, 64, 56, 59, 73, 72, 70, 67, 63, 52, 63, 67, 98, 87, 91, 45, 49, 72, 68, 61, 76, 65, 74, 88, 54, 56, 47, 57, 70, 77, 68, 61, 55, 57, 53, 81, 71, 67, 60, 96, 86, 104, 61, 57, 73, 59, 54, 85, 53, 49, 48, 66, 52, 54, 76, 78, 53, 52, 64, 52, 50, 62, 57, 72, 71, 60, 89, 58, 65, 89, 93, 75, 61, 62, 61, 63, 83, 77, 62, 56, 54, 59, 66, 74, 58, 49, 73, 31, 59, 49, 52, 75, 51, 81, 69, 100, 67, 60, 83, 50, 83, 66, 98, 60, 82, 56, 67, 69, 80, 100, 53, 136, 75, 80, 68, 39, 69, 59, 54, 63, 66, 65, 58, 65, 59, 74, 65, 78, 64, 73, 53, 59, 96, 72, 105, 58, 64, 69, 67, 71, 70, 72, 59, 69, 52, 51, 74, 59, 122, 55, 55, 58, 45, 54, 70, 69, 71, 64, 52, 58, 55, 54, 70, 40, 70, 62, 82, 73, 56, 74, 58, 65, 76, 54, 50, 56, 56, 36, 64, 64, 71, 42, 44, 62, 56, 56, 63, 85, 82, 68, 74, 60, 58, 51, 60, 58, 58, 52, 49, 69, 59, 61, 63, 41, 57, 60, 42, 75, 97, 84, 71, 73, 71, 83, 60, 52, 52, 49, 54, 77, 65, 54, 82, 125, 92, 69, 69, 69, 40, 59, 95, 43, 69, 76, 81, 52, 66, 65, 113, 47, 53, 49, 37, 50, 77, 68, 71, 45, 53, 91, 71, 56, 61, 82, 95, 53, 71, 61, 37, 63, 71, 44, 48, 74, 55, 47, 51, 52, 87, 71, 62, 87, 62, 66, 60, 63, 79, 50, 72, 48, 66, 63, 71, 68, 52, 99, 75, 55, 58, 85, 64, 65, 72, 70, 76, 88, 54, 94, 66, 91, 79, 60, 80, 99, 77, 63, 71, 64, 50, 67, 64, 70, 57, 78, 87, 114, 72, 45, 78, 83, 65, 77, 58, 52, 49, 63, 42, 63, 73, 80, 54, 66, 60, 43, 67, 46, 63, 79, 65, 70, 84, 80, 49, 93, 60, 52, 45, 81, 42, 115, 70, 84, 89, 43, 53, 53, 77, 54, 41, 53, 77, 86, 44, 55, 58, 61, 64, 53, 48, 51, 71, 48, 61, 80, 52, 48, 77, 49, 48, 50, 58, 69, 104, 55, 81, 91, 67, 60, 73, 49, 64, 148, 59, 64, 72, 73, 65, 80, 57, 72, 59, 65, 57, 29, 56, 67, 76, 58, 61, 70, 62, 65, 66, 64, 41, 45, 68, 84, 43, 85, 60, 83, 111, 56, 67, 65, 58, 60, 71, 104, 67, 59, 91, 72, 48, 81, 42, 87, 88, 73, 86, 61, 62, 62, 82, 81, 66, 74, 84, 64, 53, 49, 56, 47, 87, 80, 49, 63, 49, 61, 50, 109, 56, 53, 50, 65, 59, 59, 68, 85, 115, 80, 87, 43, 48, 68, 63, 63, 50, 89, 63, 45, 76, 65, 59, 90, 82, 57, 43, 47, 57, 62, 75, 50, 70, 79, 56, 75, 64, 71, 58, 62, 60, 53, 69, 51, 100, 64, 59, 34, 82, 69, 77, 50, 73, 55, 113, 67, 58, 72, 73, 73, 67, 62, 73, 105, 85, 99, 65, 70, 63, 53, 54, 63, 50, 58, 76, 48, 50, 57, 74, 66, 55, 48, 55, 87, 80, 46, 42, 58, 57, 66, 98, 68, 64, 49, 71, 61, 43, 71, 78, 71, 86, 58, 64, 87, 74, 52, 56, 75, 64, 71, 59, 70, 34, 60, 85, 98, 62, 90, 64, 82, 69, 87, 69, 87, 61, 58, 48, 82, 78, 96, 62, 64, 69, 62, 70, 72, 55, 73, 67, 64, 88, 54, 56, 64, 54, 48, 67, 82, 80, 67, 61, 72, 80, 80, 60, 68, 59, 52, 76, 61, 69, 69, 68, 62, 59, 63, 48, 67, 84, 65, 68, 61, 63, 41, 72, 67, 68, 44, 51, 64, 71, 67, 68, 94, 78, 56, 84, 62, 52, 78, 57, 81, 87, 58, 41, 72, 45, 57, 58, 59, 128, 55, 74, 77, 62, 46, 58, 64, 60, 57, 74, 55, 67, 42, 60, 51, 70, 75, 77, 62, 54, 103, 64, 47, 63, 83, 140, 64, 58, 88, 69, 91, 48, 58, 69, 102, 51, 87, 59, 60, 76, 51, 66, 50, 68, 78, 78, 66, 60, 75, 60, 83, 79, 88, 49, 66, 66, 59, 68, 76, 55, 63, 57, 66, 71, 47, 54, 62, 90, 71, 62, 47, 58, 70, 57, 97, 63, 96, 69, 100, 62, 54, 76, 71, 82, 78, 74, 76, 65, 85, 63, 63, 62, 57, 86, 57, 46, 63, 45, 71, 78, 65, 64, 88, 59, 64, 61, 54, 67, 54, 54, 79, 80, 85, 80, 85, 52, 73, 50, 80, 83, 82, 63, 70, 52, 44, 69, 72, 56, 54, 86, 66, 57, 56, 60, 49, 59, 71, 69, 50, 60, 91, 78, 74, 58, 69, 70, 80, 91, 80, 59, 71, 65, 56, 64, 48, 73, 63, 56, 60, 85, 83, 88, 69, 64, 71, 70, 54, 69, 70, 91, 54, 82, 72, 82, 83, 54, 101, 57, 62, 72, 83, 93, 58, 52, 74, 38, 55, 61, 55, 63, 83, 50, 83, 54, 88, 60, 60, 58, 71, 55, 66, 59, 69, 48, 75, 57, 66, 66, 61, 62, 70, 73, 55, 67, 53, 50, 73, 56, 53, 52, 60, 69, 52, 48, 95, 50, 118, 58, 51, 121, 37, 52, 67, 58, 60, 62, 40, 85, 50, 86, 77, 58, 79, 41, 56, 46, 50, 83, 72, 90, 83, 61, 53, 70, 55, 73, 87, 54, 76, 62, 72, 71, 43, 64, 59, 74, 62, 84, 63, 38, 91, 47, 62, 43, 53, 72, 32, 63, 63, 79, 67, 60, 69, 54, 66, 64, 63, 43, 57, 64, 56, 61, 67, 48, 87, 51, 70, 63, 87, 63, 62, 67, 54, 45, 62, 67, 66, 86, 52, 72, 105, 75, 61, 62, 59, 91, 68, 74, 60, 79, 54, 69, 53, 80, 76, 76, 82, 64, 76, 53, 60, 87, 52, 44, 85, 68, 130, 83, 142, 59, 62, 58, 82, 77, 73, 57, 60, 55, 53, 68, 65, 76, 79, 82, 47, 109, 60, 55, 69, 72, 62, 63, 70, 95, 37, 67, 56, 56, 43, 41, 79, 63, 72, 58, 69, 85, 58, 75, 73, 80, 75, 76, 61, 58, 52, 60, 62, 62, 54, 85, 78, 62, 57, 51, 93, 65, 56, 37, 74, 71, 65, 68, 84, 94, 67, 56, 86, 60, 54, 73, 58, 76, 70, 59, 51, 87, 70, 78, 34, 55, 50, 55, 65, 64, 78, 58, 45, 74, 64, 70, 52, 42, 63, 64, 76, 51, 49, 66, 68, 76, 68, 56, 77, 85, 93, 60, 84, 83, 77, 84, 72, 56, 58, 64, 58, 57, 56, 43, 63, 76, 56, 52, 61, 77, 46, 104, 81, 63, 87, 64, 51, 85, 54, 98, 58, 45, 62, 59, 49, 57, 51, 83, 121, 72, 63, 53, 57, 59, 60, 77, 55, 68, 123, 56, 67, 69, 55, 66, 57, 68, 64, 72, 73, 73, 58, 63, 51, 78, 80, 59, 87, 55, 74, 47, 119, 59, 49, 57, 59, 56, 77, 68, 59, 73, 66, 55, 50, 61, 57, 64, 55, 53, 53, 68, 55, 58, 64, 74, 88, 82, 57, 63, 64, 50, 72, 71, 65, 44, 49, 68, 65, 63, 89, 81, 64, 58, 61, 66, 55, 68, 58, 77, 73, 75, 56, 67, 65, 65, 84, 68, 64, 62, 61, 70, 64, 54, 75, 43, 47, 73, 66, 87, 75, 72, 69, 62, 61, 54, 63, 68, 92, 53, 56, 54, 77, 66, 63, 47, 57, 71, 86, 53, 59, 58, 69, 47, 73, 62, 43, 49, 60, 52, 59, 107, 78, 70, 58, 73, 72, 53, 78, 68, 72, 69, 57, 65, 62, 67, 49, 75, 67, 80, 69, 61, 62, 70, 65, 62, 53, 114, 69, 84, 66, 43, 62, 57, 54, 84, 72, 128, 59, 87, 48, 70, 62, 64, 64, 62, 65, 55, 78, 52, 59, 65, 50, 89, 50, 43, 56, 61, 75, 54, 60, 59, 78, 57, 49, 63, 51, 63, 62, 71, 84, 88, 53, 55, 85, 74, 99, 81, 78, 58, 61, 65, 57, 80, 57, 84, 60, 78, 62, 48, 53, 68, 57, 95, 50, 59, 74, 64, 89, 48, 63, 52, 60, 75, 81, 70, 70, 55, 69, 88, 63, 62, 60, 67, 53, 64, 47, 62, 71, 63, 43, 75, 64, 49, 56, 53, 53, 53, 61, 82, 51, 63, 59, 51, 62, 84, 64, 87, 64, 55, 60, 62, 56, 58, 65, 60, 72, 88, 58, 76, 95, 71, 73, 48, 69, 69, 52, 79, 71, 67, 62, 103, 44, 61, 62, 45, 40, 72, 59, 91, 79, 68, 97, 78, 69, 67, 74, 63, 65, 69, 106, 58, 78, 46, 53, 72, 69, 64, 71, 79, 56, 52, 82, 54, 61, 62, 68, 79, 53, 70, 70, 70, 65, 79, 60, 48, 73, 77, 58, 53, 75, 47, 70, 77, 59, 67, 61, 69, 84, 68, 61, 57, 47, 82, 61, 72, 53, 59, 74, 60, 46, 65, 58, 103, 76, 62, 69, 62, 80, 63, 74, 56, 62, 73, 63, 102, 65, 57, 57, 57, 85, 71, 47, 46, 80, 65, 69, 65, 65, 48, 62, 51, 68, 79, 53, 86, 50, 56, 70, 50, 72, 77, 79, 62, 70, 58, 54, 79, 69, 76, 75, 61, 85, 61, 67, 56, 83, 61, 62, 51, 56, 69, 60, 58, 84, 84, 65, 47, 74, 63, 65, 52, 65, 46, 66, 61, 120, 71, 69, 54, 50, 57, 57, 65, 82, 55, 44, 67, 61, 56, 116, 65, 62, 74, 65, 80, 83, 76, 58, 63, 84, 77, 67, 70, 87, 64, 51, 55, 89, 68, 69, 54, 66, 77, 55, 70, 52, 47, 58, 68, 67, 88, 62, 49, 71, 74, 59, 68, 73, 84, 67, 66, 98, 95, 83, 62, 58, 70, 117, 118, 60, 58, 70, 53, 89, 82, 84, 74, 79, 60, 61, 77, 57, 74, 69, 52, 64, 57, 63, 64, 51, 59, 55, 81, 62, 75, 68, 37, 72, 57, 73, 65, 56, 58, 63, 62, 71, 90, 62, 61, 72, 63, 48, 62, 56, 73, 55, 58, 52, 67, 55, 41, 83, 74, 59, 67, 51, 64, 65, 71, 50, 75, 56, 74, 94, 68, 67, 72, 70, 65, 59, 56, 66, 74, 54, 56, 47, 63, 54, 95, 67, 73, 69, 47, 63, 64, 74, 58, 67, 58, 64, 60, 64, 74, 74, 76, 48, 35, 75, 57, 69, 86, 57, 73, 70, 62, 59, 67, 67, 56, 63, 46, 84, 54, 64, 53, 74, 74, 55, 74, 71, 53, 67, 60, 61, 56, 80, 65, 66, 66, 61, 60, 93, 54, 58, 62, 55, 73, 56, 43, 55, 60, 73, 63, 47, 60, 114, 65, 53, 69, 58, 62, 60, 24, 74, 58, 72, 65, 64, 55, 57, 51, 71, 40, 77, 92, 62, 54, 82, 53, 59, 58, 66, 50, 74, 60, 53, 67, 58, 56, 51, 78, 88, 71, 89, 77, 74, 77, 49, 53, 111, 80, 54, 101, 86, 46, 45, 68, 75, 69, 56, 53, 56, 80, 56, 57, 84, 60, 61, 65, 46, 59, 68, 55, 70, 65, 61, 69, 68, 86, 53, 89, 157, 54, 56, 55, 65, 86, 61, 77, 61, 49, 38, 61, 69, 70, 67, 78, 62, 105, 78, 81, 73, 73, 63, 66, 38, 100, 79, 55, 70, 77, 79, 70, 52, 63, 68, 63, 64, 62, 53, 64, 180, 63, 70, 67, 62, 81, 75, 72, 78, 72, 59, 63, 66, 49, 51, 56, 110, 60, 72, 90, 57, 66, 54, 72, 59, 72, 61, 69, 55, 54, 69, 57, 64, 66, 63, 61, 76, 83, 133, 120, 54, 69, 72, 106, 95, 66, 62, 72, 92, 67, 65, 61, 63, 63, 72, 68, 89, 56, 60, 67, 63, 96, 64, 69, 63, 62, 47, 63, 56, 44, 58, 66, 57, 67, 76, 42, 66, 71, 60, 68, 65, 66, 61, 48, 52, 71, 71, 75, 64, 86, 77, 73, 52, 55, 74, 55, 44, 68, 56, 64, 47, 73, 82, 90, 85, 71, 71, 41, 71, 48, 56, 83, 60, 76, 38, 55, 87, 48, 61, 62, 57, 100, 57, 59, 66, 80, 68, 86, 59, 95, 57, 50, 74, 56, 84, 97, 80, 67, 73, 57, 98, 54, 61, 63, 65, 117, 59, 66, 61, 100, 44, 63, 62, 54, 70, 65, 84, 53, 59, 80, 75, 60, 95, 43, 64, 90, 68, 47, 71, 73, 134, 80, 55, 47, 87, 55, 65, 59, 57, 77, 88, 70, 83, 75, 60, 71, 68, 71, 53, 72, 60, 58, 60, 86, 69, 77, 68, 55, 39, 65, 71, 74, 64, 82, 67, 61, 51, 67, 56, 75, 72, 72, 80, 60, 57, 62, 54, 59, 50, 61, 54, 70, 62, 51, 47, 52, 83, 75, 60, 78, 55, 68, 64, 75, 81, 48, 54, 46, 58, 69, 51, 60, 68, 60, 61, 67, 59, 86, 67, 106, 61, 56, 64, 68, 73, 73, 68, 56, 73, 56, 61, 64, 51, 74, 78, 55, 62, 68, 79, 65, 62, 56, 84, 67, 70, 60, 77, 36, 72, 68, 71, 41, 45, 82, 81, 56, 56, 59, 73, 51, 62, 72, 49, 65, 63, 72, 57, 75, 79, 77, 53, 45, 63, 74, 60, 69, 49, 70, 59, 57, 68, 65, 62, 71, 64, 62, 60, 58, 64, 62, 85, 72, 56, 70, 46, 82, 59, 60, 66, 61, 81, 58, 81, 66, 58, 73, 59, 74, 56, 66, 61, 81, 100, 59, 67, 60, 63, 58, 60, 72, 50, 69, 56, 71, 71, 48, 69, 57, 62, 62, 54, 79, 43, 73, 63, 65, 69, 53, 86, 53, 72, 94, 73, 128, 63, 52, 80, 66, 53, 63, 79, 77, 69, 67, 104, 66, 63, 81, 58, 75, 70, 82, 63, 76, 75, 67, 77, 67, 66, 63, 55, 67, 58, 66, 64, 70, 66, 75, 59, 53, 66, 70, 51, 64, 61, 84, 59, 69, 50, 52, 64, 70, 82, 63, 66, 66, 67, 98, 89, 67, 104, 71, 44, 58, 80, 61, 69, 39, 65, 64, 58, 47, 58, 70, 61, 49, 120, 61, 81, 61, 57, 65, 57, 67, 53, 79, 72, 60, 60, 68, 66, 52, 69, 57, 59, 75, 58, 63, 72, 64, 68, 67, 55, 71, 77, 53, 75, 80, 66, 47, 54, 66, 58, 41, 46, 66, 60, 72, 65, 63, 54, 60, 70, 60, 70, 66, 59, 75, 69, 72, 51, 62, 60, 56, 52, 59, 55, 64, 56, 62, 74, 50, 64, 99, 76, 71, 64, 50, 57, 65, 70, 59, 56, 56, 64, 71, 57, 68, 73, 64, 69, 57, 74, 76, 78, 67, 67, 49, 48, 79, 61, 61, 62, 78, 66, 82, 75, 68, 58, 53, 52, 96, 57, 48, 64, 72, 61, 54, 61, 55, 41, 56, 65, 68, 67, 65, 47, 62, 70, 71, 62, 56, 63, 79, 81, 56, 56, 74, 67, 69, 83, 56, 62, 125, 75, 51, 73, 77, 69, 75, 57, 58, 78, 69, 65, 71, 62, 80, 76, 49, 63, 68, 63, 56, 69, 52, 62, 59, 59, 57, 87, 133, 61, 61, 65, 48, 66, 52, 81, 62, 63, 75, 61, 78, 59, 65, 80, 55, 54, 61, 63, 57, 72, 72, 73, 61, 73, 59, 72, 61, 63, 61, 69, 72, 82, 61, 55, 82, 68, 60, 60, 65, 79, 50, 64, 59, 61, 66, 59, 53, 79, 83, 51, 61, 68, 67, 63, 59, 51, 68, 40, 61, 105, 75, 73, 65, 54, 71, 64, 59, 58, 61, 68, 57, 88, 60, 70, 57, 76, 66, 56, 90, 65, 57, 57, 84, 61, 65, 63, 44, 98, 66, 71, 51, 68, 64, 68, 64, 44, 50, 61, 45, 56, 60, 61, 92, 75, 58, 61, 81, 68, 67, 142, 59, 65, 60, 73, 63, 67, 71, 51, 70, 72, 75, 42, 54, 54, 75, 73, 58, 61, 58, 61, 51, 74, 53, 74, 81, 60, 72, 89, 93, 76, 63, 52, 68, 55, 64, 63, 72, 64, 65, 51, 59, 69, 63, 63, 67, 86, 71, 78, 73, 84, 68, 66, 73, 60, 61, 92, 83, 59, 71, 70, 64, 68, 77, 66, 63, 65, 73, 121, 72, 77, 60, 55, 108, 60, 56, 58, 61, 71, 62, 86, 75, 68, 56, 76, 63, 74, 56, 51, 73, 65, 59, 84, 73, 59, 63, 63, 64, 55, 77, 66, 56, 60, 61, 65, 38, 96, 67, 95, 131, 60, 63, 56, 60, 56, 59, 70, 83, 81, 60, 59, 55, 80, 74, 71, 63, 60, 84, 67, 61, 61, 61, 73, 63, 50, 67, 55, 61, 59, 56, 75, 62, 71, 76, 76, 60, 58, 73, 62, 69, 57, 51, 58, 55, 62, 88, 132, 61, 62, 89, 83, 66, 58, 76, 72, 46, 68, 49, 76, 80, 70, 71, 65, 85, 61, 74, 54, 52, 70, 73, 71, 48, 62, 52, 61, 65, 75, 61, 65, 81, 75, 71, 56, 65, 68, 106, 61, 54, 74, 54, 53, 63, 86, 57, 72, 55, 65, 53, 61, 53, 66, 59, 57, 86, 61, 72, 94, 71, 55, 108, 57, 67, 100, 71, 67, 49, 62, 53, 55, 55, 62, 77, 59, 55, 58, 74, 82, 55, 48, 78, 68, 51, 62, 54, 84, 61, 68, 69, 60, 47, 71, 70, 71, 94, 54, 58, 60, 67, 58, 63, 56, 68, 57, 58, 55, 54, 70, 65, 57, 61, 62, 81, 63, 70, 61, 57, 65, 74, 64, 70, 56, 70, 51, 66, 63, 92, 61, 108, 76, 49, 69, 75, 52, 56, 71, 58, 57, 56, 91, 78, 121, 86, 73, 73, 75, 64, 53, 66, 64, 49, 78, 63, 71, 60, 61, 58, 89, 53, 55, 76, 57, 63, 59, 64, 59, 57, 82, 76, 56, 61, 56, 53, 61, 72, 57, 62, 54, 81, 64, 74, 57, 71, 54, 68, 54, 84, 69, 93, 60, 60, 71, 64, 60, 57, 57, 52, 60, 73, 64, 56, 59, 49, 62, 71, 58, 67, 67, 87, 62, 65, 53, 65, 64, 69, 74, 51, 65, 71, 58, 47, 70, 60, 56, 94, 61, 103, 48, 67, 67, 52, 73, 80, 60, 73, 60, 111, 79, 76, 69, 59, 71, 80, 56, 72, 61, 55, 70, 52, 68, 74, 78, 69, 59, 63, 61, 63, 48, 48, 64, 64, 55, 60, 64, 49, 54, 58, 60, 64, 62, 68, 45, 62, 67, 71, 68, 55, 59, 65, 85, 62, 76, 63, 61, 72, 34, 68, 58, 69, 56, 98, 51, 59, 71, 56, 70, 62, 64, 58, 55, 80, 66, 78, 53, 66, 63, 64, 64, 54, 62, 61, 76, 98, 81, 63, 72, 57, 59, 73, 53, 69, 65, 65, 73, 43, 69, 64, 85, 72, 66, 65, 62, 74, 81, 67, 59, 54, 74, 62, 63, 149, 61, 61, 59, 55, 69, 51, 88, 61, 72, 71, 55, 62, 76, 78, 65, 74, 53, 59, 42, 54, 61, 57, 110, 82, 65, 71, 68, 81, 68, 58, 71, 51, 53, 93, 87, 58, 86, 53, 69, 58, 70, 69, 92, 82, 66, 66, 65, 84, 83, 58, 80, 59, 47, 57, 49, 67, 80, 54, 56, 65, 56, 58, 81, 54, 69, 54, 61, 76, 59, 83, 89, 62, 63, 74, 66, 68, 53, 58, 72, 86, 80, 66, 55, 61, 74, 57, 71, 61, 56, 69, 52, 78, 82, 72, 66, 75, 49, 65, 55, 69, 70, 46, 69, 77, 81, 74, 45, 106, 53, 41, 81, 73, 56, 62, 81, 66, 99, 50, 61, 60, 74, 57, 119, 67, 137, 73, 68, 57, 62, 84, 61, 53, 82, 68, 80, 43, 52, 78, 54, 71, 58, 70, 56, 68, 77, 64, 57, 68, 74, 61, 60, 57, 72, 65, 59, 100, 65, 64, 72, 72, 77, 51, 56, 44, 81, 46, 65, 49, 62, 57, 70, 62, 56, 43, 57, 52, 57, 60, 70, 57, 64, 59, 78, 62, 65, 62, 64, 78, 66, 63, 66, 55, 103, 71, 57, 74, 55, 56, 62, 53, 62, 67, 62, 74, 64, 53, 80, 74, 67, 69, 39, 68, 77, 71, 51, 56, 57, 88, 76, 71, 53, 66, 50, 68, 64, 59, 61, 77, 63, 57, 74, 64, 57, 53, 84, 46, 68, 86, 78, 77, 50, 60, 70, 47, 65, 46, 60, 44, 54, 92, 82, 72, 57, 45, 52, 66, 61, 50, 59, 72, 52, 68, 56, 71, 88, 68, 62, 43, 66, 45, 87, 68, 84, 57, 56, 63, 60, 63, 78, 60, 83, 72, 68, 89, 48, 100, 78, 79, 64, 60, 76, 68, 70, 46, 60, 54, 69, 57, 87, 77, 70, 47, 73, 95, 64, 96, 58, 60, 58, 71, 67, 58, 66, 70, 60, 65, 52, 92, 70, 52, 68, 83, 46, 64, 52, 48, 81, 110, 63, 69, 66, 56, 60, 75, 68, 59, 60, 65, 51, 41, 64, 47, 40, 53, 61, 86, 79, 58, 68, 61, 68, 58, 78, 126, 60, 48, 64, 88, 47, 57, 67, 62, 72, 50, 59, 47, 121, 46, 82, 62, 59, 53, 69, 76, 57, 43, 57, 62, 59, 75, 74, 47, 57, 63, 62, 82, 64, 61, 58, 67, 66, 58, 63, 57, 60, 60, 78, 64, 55, 85, 63, 64, 62, 62, 71, 55, 89, 69, 69, 73, 82, 58, 70, 75, 47, 64, 86, 67, 63, 109, 85, 66, 65, 50, 90, 67, 64, 65, 67, 95, 65, 57, 61, 63, 63, 80, 70, 41, 62, 79, 61, 81, 57, 54, 54, 60, 72, 46, 52, 50, 70, 77, 54, 59, 60, 60, 56, 66, 60, 63, 61, 56, 80, 78, 62, 70, 70, 48, 57, 79, 46, 58, 63, 72, 64, 62, 76, 44, 64, 53, 52, 62, 78, 70, 66, 68, 69, 61, 55, 50, 60, 55, 58, 64, 62, 76, 77, 74, 95, 81, 61, 64, 66, 73, 60, 83, 66, 94, 61, 66, 60, 66, 65, 48, 58, 45, 59, 50, 61, 57, 82, 59, 67, 59, 46, 63, 75, 59, 67, 57, 80, 70, 56, 59, 73, 65, 98, 49, 66, 58, 78, 64, 81, 57, 59, 57, 57, 69, 67, 73, 67, 54, 94, 79, 71, 89, 72, 62, 71, 70, 66, 63, 48, 60, 83, 75, 64, 69, 48, 63, 68, 51, 50, 66, 35, 63, 88, 69, 65, 75, 65, 78, 55, 86, 66, 60, 62, 73, 38, 73, 67, 91, 54, 75, 101, 44, 59, 64, 76, 57, 70, 61, 48, 66, 47, 79, 96, 72, 85, 69, 54, 84, 49, 61, 51, 71, 85, 69, 51, 67, 62, 66, 79, 77, 99, 69, 66, 66, 74, 63, 51, 48, 64, 69, 57, 64, 55, 79, 67, 50, 57, 80, 80, 77, 59, 74, 84, 60, 81, 65, 63, 68, 61, 64, 81, 46, 55, 59, 67, 64, 72, 67, 60, 63, 57, 76, 72, 73, 51, 65, 83, 57, 62, 49, 66, 64, 57, 77, 61, 68, 56, 55, 78, 64, 62, 52, 51, 98, 59, 51, 56, 65, 94, 59, 52, 47, 49, 54, 66, 70, 52, 61, 75, 74, 98, 41, 71, 64, 58, 55, 44, 73, 68, 60, 62, 59, 61, 90, 49, 58, 60, 53, 62, 157, 48, 58, 60, 65, 53, 57, 53, 69, 77, 69, 65, 88, 89, 56, 54, 42, 56, 58, 55, 53, 62, 61, 60, 73, 112, 57, 63, 44, 53, 61, 81, 58, 62, 54, 71, 58, 64, 71, 51, 60, 75, 44, 65, 67, 52, 73, 68, 66, 97, 82, 89, 64, 142, 86, 54, 72, 77, 49, 55, 55, 73, 82, 64, 66, 63, 75, 81, 71, 74, 58, 53, 64, 69, 65, 62, 72, 66, 55, 61, 68, 69, 71, 61, 55, 67, 90, 51, 50, 63, 69, 61, 55, 68, 64, 64, 51, 55, 96, 75, 49, 88, 48, 59, 84, 64, 69, 49, 76, 70, 64, 50, 61, 59, 67, 68, 50, 65, 66, 94, 56, 70, 66, 73, 73, 66, 82, 55, 91, 82, 78, 59, 46, 53, 56, 64, 49, 57, 54, 64, 68, 50, 66, 67, 50, 73, 70, 61, 88, 68, 70, 78, 54, 64, 105, 60, 92, 143, 67, 53, 56, 65, 69, 64, 65, 56, 86, 68, 75, 72, 56, 51, 100, 90, 75, 83, 67, 59, 56, 71, 53, 54, 55, 49, 74, 73, 58, 67, 56, 45, 55, 58, 53, 79, 52, 64, 70, 56, 68, 59, 57, 77, 87, 80, 54, 61, 79, 49, 63, 73, 72, 86, 85, 59, 58, 64, 71, 65, 60, 68, 69, 54, 69, 54, 66, 56, 51, 64, 61, 84, 67, 54, 73, 53, 59, 81, 65, 67, 64, 54, 104, 65, 95, 89, 55, 58, 69, 58, 67, 69, 49, 67, 64, 66, 48, 57, 68, 70, 53, 70, 55, 72, 57, 77, 63, 65, 72, 61, 53, 75, 59, 79, 89, 78, 84, 64, 76, 65, 73, 70, 79, 62, 68, 44, 83, 60, 68, 80, 55, 69, 57, 61, 108, 76, 65, 70, 61, 89, 47, 54, 82, 74, 79, 62, 56, 76, 47, 56, 77, 50, 68, 77, 72, 59, 53, 61, 60, 76, 84, 67, 63, 59, 75, 72, 49, 60, 65, 56, 55, 67, 63, 57, 57, 63, 52, 101, 52, 56, 52, 65, 72, 160, 50, 68, 70, 67, 63, 54, 56, 57, 71, 60, 80, 61, 54, 59, 57, 58, 79, 61, 62, 51, 44, 85, 72, 56, 55, 54, 70, 63, 71, 74, 76, 53, 62, 72, 67, 55, 70, 66, 68, 67, 59, 54, 51, 57, 59, 67, 66, 66, 55, 64, 79, 60, 72, 81, 84, 61, 74, 57, 57, 60, 52, 87, 63, 75, 61, 62, 50, 63, 66, 96, 60, 73, 112, 80, 58, 85, 56, 75, 49, 67, 65, 63, 65, 56, 69, 39, 57, 58, 74, 66, 63, 69, 76, 72, 82, 87, 67, 78, 50, 58, 46, 77, 96, 57, 61, 52, 55, 58, 58, 64, 86, 65, 69, 61, 65, 55, 89, 67, 58, 74, 73, 72, 53, 74, 87, 58, 49, 91, 72, 47, 68, 76, 56, 92, 52, 56, 94, 60, 67, 55, 47, 69, 64, 63, 69, 72, 43, 76, 61, 59, 76, 60, 68, 93, 57, 62, 61, 47, 58, 69, 68, 64, 75, 49, 65, 66, 82, 67, 63, 49, 73, 53, 72, 43, 57, 70, 64, 62, 60, 66, 88, 56, 58, 71, 74, 63, 68, 164, 67, 52, 67, 51, 63, 76, 57, 74, 54, 62, 80, 70, 62, 68, 51, 59, 61, 71, 55, 58, 62, 58, 56, 60, 64, 78, 51, 82, 49, 79, 63, 63, 80, 72, 60, 82, 51, 61, 92, 72, 55, 70, 78, 75, 55, 71, 67, 58, 69, 76, 90, 81, 79, 61, 53, 73, 60, 73, 58, 48, 63, 60, 47, 64, 71, 56, 75, 79, 63, 61, 69, 56, 55, 72, 53, 74, 60, 68, 55, 104, 45, 54, 64, 52, 68, 52, 68, 66, 71, 54, 60, 91, 57, 56, 57, 49, 76, 71, 51, 63, 80, 67, 80, 61, 64, 67, 84, 59, 59, 60, 51, 62, 62, 65, 67, 78, 65, 76, 71, 62, 89, 75, 51, 56, 63, 44, 64, 67, 60, 121, 77, 70, 61, 67, 49, 71, 65, 62, 83, 49, 61, 62, 43, 48, 62, 63, 68, 57, 84, 64, 51, 59, 64, 67, 104, 40, 60, 64, 68, 119, 49, 106, 61, 33, 55, 68, 83, 69, 68, 62, 64, 59, 55, 73, 74, 68, 66, 77, 101, 58, 50, 58, 67, 56, 52, 60, 48, 69, 76, 94, 65, 63, 75, 59, 67, 58, 61, 63, 65, 43, 62, 100, 43, 77, 69, 64, 57, 60, 71, 64, 66, 56, 56, 83, 105, 73, 66, 96, 65, 76, 56, 53, 66, 63, 70, 71, 65, 61, 59, 54, 66, 87, 136, 50, 69, 71, 73, 87, 57, 55, 60, 55, 63, 63, 61, 62, 62, 69, 67, 65, 53, 69, 75, 50, 88, 51, 74, 65, 77, 77, 49, 80, 62, 70, 83, 67, 54, 65, 62, 109, 62, 45, 72, 62, 61, 65, 52, 69, 68, 46, 65, 63, 70, 63, 60, 52, 58, 65, 66, 61, 95, 71, 53, 65, 59, 91, 63, 69, 139, 63, 68, 67, 62, 66, 62, 79, 69, 103, 72, 73, 56, 52, 53, 69, 56, 71, 61, 68, 63, 70, 51, 90, 62, 62, 92, 51, 48, 64, 67, 66, 118, 74, 71, 93, 62, 57, 69, 55, 63, 51, 53, 65, 72, 48, 67, 61, 59, 65, 69, 64, 55, 56, 70, 68, 61, 63, 59, 59, 87, 66, 43, 56, 57, 71, 55, 71, 61, 59, 48, 65, 68, 79, 61, 56, 51, 49, 57, 60, 60, 65, 44, 66, 59, 63, 63, 57, 60, 44, 53, 55, 63, 53, 50, 72, 53, 64, 70, 67, 56, 65, 63, 78, 71, 69, 83, 59, 72, 49, 66, 70, 65, 65, 71, 75, 69, 53, 76, 52, 72, 64, 59, 64, 39, 85, 85, 68, 62, 68, 84, 72, 51, 79, 63, 55, 75, 114, 66, 76, 69, 66, 81, 67, 79, 71, 47, 59, 75, 88, 69, 82, 61, 71, 60, 67, 68, 64, 71, 61, 58, 46, 71, 79, 86, 71, 61, 58, 61, 63, 63, 49, 68, 70, 49, 69, 67, 62, 50, 83, 68, 71, 68, 62, 64, 54, 56, 59, 65, 61, 47, 44, 44, 71, 68, 69, 57, 67, 56, 92, 71, 65, 90, 70, 61, 59, 83, 52, 47, 60, 75, 92, 69, 62, 57, 58, 63, 74, 71, 53, 60, 64, 61, 53, 61, 59, 70, 51, 79, 78, 54, 65, 49, 55, 61, 57, 80, 136, 74, 61, 64, 74, 60, 71, 51, 51, 66, 65, 47, 62, 68, 77, 50, 71, 71, 69, 68, 75, 69, 81, 73, 89, 57, 65, 72, 62, 66, 55, 44, 45, 81, 68, 65, 52, 85, 79, 100, 64, 69, 61, 73, 86, 67, 75, 91, 79, 57, 68, 69, 53, 54, 57, 59, 68, 79, 72, 73, 92, 70, 58, 66, 64, 50, 63, 55, 79, 77, 85, 59, 66, 72, 57, 50, 58, 68, 62, 51, 93, 70, 58, 72, 89, 85, 47, 62, 59, 57, 39, 69, 58, 32, 54, 58, 86, 94, 61, 83, 64, 76, 73, 73, 124, 54, 61, 82, 82, 56, 67, 68, 49, 44, 73, 50, 75, 65, 53, 57, 86, 74, 66, 62, 57, 66, 71, 57, 73, 58, 52, 70, 71, 61, 61, 67, 56, 70, 66, 74, 81, 65, 68, 66, 147, 66, 59, 50, 43, 61, 52, 86, 85, 115, 77, 68, 66, 58, 75, 59, 66, 58, 66, 69, 56, 73, 66, 68, 58, 62, 56, 60, 68, 103, 68, 57, 72, 75, 111, 66, 67, 79, 61, 75, 60, 69, 71, 43, 73, 52, 40, 64, 47, 72, 60, 63, 59, 82, 75, 72, 74, 70, 56, 75, 59, 66, 60, 88, 53, 66, 62, 49, 63, 53, 67, 86, 75, 81, 70, 54, 80, 48, 74, 52, 72, 73, 68, 85, 64, 62, 57, 63, 70, 81, 66, 70, 46, 43, 80, 68, 65, 80, 90, 72, 88, 57, 65, 79, 62, 57, 85, 59, 33, 59, 73, 62, 62, 67, 59, 53, 57, 72, 60, 60, 71, 58, 62, 53, 58, 94, 71, 69, 83, 80, 67, 54, 75, 63, 76, 80, 81, 82, 49, 73, 64, 81, 77, 63, 69, 59, 53, 67, 72, 54, 66, 65, 84, 72, 47, 79, 50, 63, 65, 85, 64, 67, 72, 64, 66, 53, 55, 68, 57, 51, 57, 48, 84, 75, 71, 67, 58, 65, 60, 64, 61, 61, 50, 73, 61, 55, 74, 71, 64, 67, 77, 63, 68, 54, 72, 54, 67, 78, 49, 48, 68, 90, 64, 61, 42, 59, 65, 84, 63, 62, 79, 67, 79, 64, 66, 78, 58, 81, 63, 62, 65, 45, 65, 63, 81, 69, 64, 62, 55, 90, 77, 56, 57, 78, 97, 66, 48, 66, 66, 71, 90, 80, 72, 52, 47, 61, 63, 62, 56, 75, 67, 71, 45, 64, 55, 59, 51, 88, 71, 47, 86, 51, 55, 72, 59, 69, 92, 56, 75, 69, 51, 71, 50, 83, 52, 68, 57, 49, 62, 77, 64, 63, 63, 120, 63, 54, 53, 64, 86, 67, 56, 66, 72, 120, 42, 110, 77, 57, 65, 57, 65, 61, 60, 61, 63, 64, 57, 96, 68, 57, 86, 54, 49, 71, 67, 63, 68, 75, 53, 60, 63, 55, 60, 52, 82, 66, 50, 41, 70, 75, 58, 70, 55, 62, 41, 81, 64, 56, 71, 57, 51, 89, 76, 76, 64, 66, 62, 71, 57, 54, 70, 80, 55, 55, 58, 52, 62, 63, 60, 64, 62, 59, 100, 61, 68, 68, 83, 64, 86, 35, 59, 56, 69, 51, 92, 75, 46, 60, 80, 68, 57, 61, 66, 65, 56, 58, 55, 55, 70, 80, 55, 79, 66, 77, 83, 67, 78, 77, 75, 91, 72, 62, 75, 70, 59, 77, 45, 81, 67, 52, 64, 131, 83, 49, 46, 68, 63, 62, 51, 57, 57, 60, 66, 60, 59, 61, 62, 59, 68, 71, 51, 62, 67, 55, 59, 48, 79, 61, 71, 55, 76, 60, 82, 67, 75, 58, 32, 56, 55, 64, 68, 56, 86, 76, 69, 59, 67, 54, 70, 57, 59, 68, 77, 73, 52, 32, 80, 59, 39, 56, 57, 70, 88, 61, 71, 50, 80, 83, 58, 58, 56, 75, 48, 77, 76, 79, 72, 48, 107, 73, 53, 73, 54, 86, 75, 62, 63, 66, 75, 67, 83, 58, 82, 51, 88, 95, 132, 57, 95, 82, 58, 50, 69, 42, 53, 37, 100, 66, 52, 53, 48, 73, 63, 70, 64, 72, 75, 84, 54, 56, 57, 60, 62, 55, 69, 60, 69, 48, 65, 57, 78, 54, 60, 58, 53, 78, 70, 64, 61, 64, 75, 79, 73, 49, 73, 43, 65, 78, 67, 61, 71, 81, 73, 57, 69, 71, 59, 54, 81, 67, 64, 56, 52, 57, 57, 64, 98, 65, 60, 66, 62, 38, 102, 65, 70, 77, 57, 81, 68, 59, 64, 70, 69, 59, 66, 78, 61, 61, 78, 54, 93, 63, 71, 61, 59, 56, 66, 62, 87, 76, 76, 59, 56, 73, 69, 53, 50, 90, 70, 81, 52, 78, 119, 55, 65, 50, 52, 56, 74, 48, 68, 66, 90, 67, 49, 56, 50, 68, 32, 58, 69, 46, 58, 49, 59, 68, 76, 75, 58, 59, 85, 75, 56, 51, 58, 60, 71, 67, 78, 65, 68, 64, 69, 65, 86, 57, 66, 69, 68, 68, 89, 77, 69, 65, 67, 80, 78, 61, 73, 65, 59, 101, 51, 120, 57, 66, 57, 63, 81, 75, 61, 61, 79, 64, 64, 61, 58, 70, 101, 64, 55, 74, 64, 87, 61, 86, 64, 65, 61, 50, 71, 85, 69, 58, 58, 48, 74, 55, 54, 76, 102, 62, 106, 63, 49, 49, 65, 65, 51, 63, 56, 64, 60, 77, 72, 69, 60, 58, 48, 60, 56, 61, 71, 80, 78, 57, 69, 67, 61, 85, 81, 81, 67, 31, 86, 93, 46, 67, 60, 74, 68, 51, 63, 68, 72, 70, 64, 71, 78, 90, 71, 58, 102, 66, 52, 67, 76, 65, 52, 73, 54, 68, 38, 71, 57, 52, 60, 94, 58, 67, 60, 46, 43, 64, 45, 60, 73, 65, 79, 64, 68, 59, 93, 44, 75, 70, 69, 106, 75, 68, 72, 62, 68, 56, 98, 67, 76, 58, 71, 52, 62, 51, 75, 70, 63, 70, 72, 73, 57, 60, 50, 75, 94, 60, 69, 66, 66, 64, 63, 48, 68, 65, 82, 48, 55, 94, 58, 60, 45, 49, 58, 79, 81, 51, 82, 61, 56, 65, 61, 50, 85, 59, 59, 76, 78, 54, 64, 50, 82, 80, 57, 82, 90, 64, 60, 88, 101, 55, 66, 75, 66, 83, 49, 53, 52, 75, 70, 72, 39, 73, 58, 78, 60, 48, 47, 62, 61, 65, 54, 69, 64, 60, 98, 58, 68, 66, 62, 69, 78, 65, 67, 64, 107, 77, 82, 78, 62, 72, 71, 104, 64, 60, 59, 57, 59, 65, 69, 77, 65, 85, 71, 58, 57, 49, 58, 48, 58, 63, 78, 73, 61, 83, 65, 59, 64, 45, 52, 63, 56, 52, 65, 69, 47, 67, 72, 65, 44, 70, 58, 57, 112, 78, 59, 69, 122, 67, 73, 49, 68, 66, 71, 65, 59, 48, 84, 66, 60, 68, 74, 64, 70, 55, 65, 76, 69, 52, 76, 94, 43, 69, 70, 55, 64, 68, 57, 52, 71, 59, 62, 53, 62, 78, 69, 66, 64, 65, 63, 83, 68, 85, 81, 58, 39, 109, 67, 64, 69, 50, 47, 63, 60, 61, 66, 72, 84, 54, 60, 56, 51, 44, 67, 63, 77, 129, 63, 83, 72, 67, 81, 62, 66, 58, 55, 62, 61, 72, 63, 79, 75, 41, 51, 61, 67, 65, 76, 87, 54, 84, 91, 64, 50, 64, 57, 81, 58, 53, 72, 39, 62, 73, 82, 86, 49, 77, 66, 67, 90, 61, 80, 60, 73, 72, 41, 54, 44, 55, 71, 44, 52, 56, 74, 62, 63, 75, 56, 73, 80, 46, 61, 76, 54, 66, 69, 61, 59, 65, 82, 83, 58, 53, 53, 62, 55, 68, 75, 64, 60, 72, 85, 58, 61, 66, 66, 51, 66, 70, 55, 70, 55, 67, 59, 74, 55, 63, 66, 49, 56, 61, 65, 78, 88, 56, 75, 65, 67, 80, 66, 62, 65, 79, 67, 78, 74, 67, 58, 84, 62, 44, 58, 69, 61, 61, 78, 47, 63, 79, 53, 74, 59, 79, 43, 49, 85, 104, 61, 49, 47, 61, 82, 67, 49, 63, 62, 54, 54, 66, 68, 70, 49, 74, 72, 64, 58, 59, 65, 65, 65, 59, 67, 56, 54, 59, 72, 73, 75, 47, 74, 61, 80, 74, 72, 60, 58, 65, 78, 49, 66, 67, 51, 61, 111, 78, 49, 59, 66, 63, 63, 55, 55, 65, 77, 54, 64, 60, 71, 56, 58, 80, 43, 138, 74, 66, 63, 44, 67, 51, 60, 93, 71, 96, 72, 66, 67, 64, 64, 73, 73, 52, 51, 76, 52, 70, 100, 56, 64, 53, 61, 41, 86, 57, 90, 61, 50, 60, 64, 78, 70, 71, 75, 65, 59, 68, 72, 57, 125, 80, 62, 62, 62, 61, 82, 68, 70, 61, 78, 65, 59, 61, 71, 69, 68, 47, 60, 74, 104, 84, 78, 64, 53, 69, 86, 66, 78, 69, 61, 39, 53, 71, 64, 82, 66, 70, 72, 53, 64, 68, 59, 56, 52, 53, 67, 77, 70, 55, 62, 68, 58, 66, 58, 63, 66, 88, 71, 75, 48, 61, 60, 65, 60, 53, 58, 54, 67, 73, 73, 82, 62, 64, 54, 107, 54, 36, 74, 55, 51, 86, 67, 73, 76, 60, 85, 55, 74, 73, 86, 66, 92, 68, 65, 76, 48, 69, 54, 49, 96, 72, 84, 52, 53, 76, 61, 52, 69, 59, 59, 64, 73, 61, 58, 54, 68, 71, 71, 62, 88, 58, 51, 141, 76, 66, 64, 57, 69, 67, 62, 68, 64, 70, 72, 65, 52, 72, 57, 73, 59, 75, 58, 58, 69, 66, 70, 69, 68, 60, 67, 61, 66, 55, 67, 101, 88, 88, 69, 61, 67, 55, 90, 65, 77, 48, 66, 65, 75, 60, 104, 59, 69, 47, 64, 58, 109, 62, 53, 67, 80, 64, 70, 96, 64, 61, 60, 65, 57, 51, 72, 75, 82, 107, 65, 65, 64, 69, 61, 71, 71, 60, 79, 62, 65, 66, 69, 56, 66, 65, 65, 74, 56, 68, 63, 67, 77, 66, 66, 63, 65, 53, 58, 58, 77, 65, 73, 67, 67, 65, 54, 54, 72, 50, 59, 66, 71, 62, 96, 56, 75, 50, 65, 61, 50, 86, 64, 56, 57, 96, 58, 52, 78, 75, 65, 62, 76, 52, 71, 81, 69, 53, 86, 69, 84, 62, 65, 78, 56, 69, 72, 60, 40, 81, 55, 97, 74, 69, 72, 68, 59, 76, 62, 69, 79, 134, 59, 79, 71, 64, 54, 59, 69, 69, 49, 70, 66, 62, 59, 57, 49, 78, 76, 51, 82, 57, 65, 62, 59, 86, 79, 65, 61, 52, 54, 70, 69, 64, 58, 80, 69, 62, 67, 72, 61, 57, 74, 68, 67, 61, 66, 62, 66, 51, 65, 59, 68, 60, 63, 60, 66, 42, 66, 67, 47, 62, 65, 71, 82, 49, 69, 66, 58, 59, 64, 67, 134, 61, 62, 62, 78, 59, 68, 66, 58, 68, 76, 54, 63, 67, 56, 67, 69, 85, 79, 70, 68, 53, 74, 70, 58, 66, 51, 58, 53, 90, 61, 62, 79, 57, 78, 77, 85, 59, 84, 68, 52, 65, 63, 67, 60, 64, 80, 71, 73, 54, 62, 61, 65, 68, 61, 79, 58, 54, 85, 61, 60, 59, 56, 61, 55, 66, 63, 64, 61, 60, 80, 56, 74, 44, 63, 62, 82, 69, 87, 65, 59, 58, 49, 73, 61, 76, 58, 74, 60, 66, 60, 56, 66, 64, 69, 70, 38, 72, 56, 55, 56, 66, 50, 70, 61, 67, 67, 61, 62, 64, 58, 59, 89, 61, 50, 73, 134, 71, 61, 74, 62, 48, 55, 59, 73, 64, 61, 55, 68, 67, 70, 54, 62, 60, 48, 75, 60, 60, 91, 65, 59, 55, 64, 53, 65, 69, 58, 56, 56, 62, 62, 69, 107, 73, 53, 56, 50, 68, 62, 53, 88, 54, 61, 64, 54, 69, 60, 67, 62, 65, 53, 57, 65, 54, 61, 62, 53, 74, 56, 48, 57, 91, 65, 58, 53, 66, 86, 101, 63, 59, 53, 57, 54, 67, 63, 75, 69, 69, 66, 120, 76, 50, 64, 66, 100, 62, 94, 59, 62, 66, 48, 74, 56, 70, 65, 54, 58, 52, 72, 73, 60, 60, 75, 97, 81, 112, 60, 71, 63, 49, 60, 60, 90, 82, 55, 82, 59, 56, 57, 54, 51, 85, 56, 68, 70, 66, 63, 90, 57, 75, 58, 65, 60, 52, 60, 73, 74, 73, 78, 65, 53, 63, 51, 53, 73, 60, 56, 62, 61, 61, 71, 60, 51, 86, 78, 65, 77, 77, 62, 61, 59, 71, 54, 53, 63, 60, 74, 60, 59, 63, 70, 66, 66, 46, 74, 55, 60, 103, 56, 82, 63, 55, 69, 68, 80, 66, 53, 50, 60, 62, 76, 87, 51, 61, 62, 50, 59, 57, 61, 73, 48, 63, 72, 96, 67, 60, 51, 76, 57, 90, 59, 59, 68, 58, 57, 62, 68, 67, 60, 48, 60, 63, 70, 59, 74, 39, 56, 76, 57, 64, 72, 78, 57, 59, 57, 71, 57, 71, 55, 58, 57, 84, 77, 57, 49, 54, 59, 57, 63, 64, 52, 70, 84, 61, 64, 63, 55, 80, 77, 70, 78, 85, 49, 71, 60, 73, 57, 60, 57, 66, 56, 52, 61, 69, 63, 85, 87, 78, 77, 51, 93, 69, 66, 62, 64, 49, 61, 68, 66, 58, 86, 70, 87, 48, 61, 53, 69, 72, 89, 58, 53, 51, 69, 93, 65, 75, 81, 52, 60, 61, 65, 64, 71, 59, 74, 67, 56, 57, 62, 88, 56, 62, 72, 62, 69, 66, 55, 90, 72, 62, 72, 51, 58, 99, 61, 75, 60, 89, 63, 59, 65, 58, 73, 68, 77, 51, 52, 61, 54, 60, 58, 79, 55, 53, 64, 58, 43, 48, 53, 69, 74, 61, 69, 64, 63, 51, 93, 63, 71, 62, 92, 75, 51, 64, 71, 80, 102, 82, 58, 59, 60, 49, 99, 42, 67, 79, 63, 61, 63, 71, 75, 65, 64, 63, 47, 61, 96, 71, 57, 49, 64, 81, 43, 63, 65, 65, 64, 57, 47, 56, 65, 54, 61, 76, 61, 83, 74, 69, 58, 59, 53, 59, 67, 62, 59, 56, 74, 71, 56, 60, 48, 54, 44, 53, 72, 59, 76, 88, 67, 42, 73, 57, 77, 73, 104, 72, 65, 67, 62, 45, 65, 46, 62, 65, 54, 61, 59, 73, 51, 73, 67, 60, 69, 60, 62, 99, 53, 93, 65, 55, 84, 102, 71, 53, 48, 37, 39, 68, 65, 73, 69, 47, 62, 51, 66, 71, 59, 68, 54, 66, 53, 57, 75, 132, 51, 54, 74, 75, 133, 64, 58, 47, 58, 66, 73, 55, 68, 51, 77, 51, 74, 70, 73, 70, 54, 67, 60, 45, 59, 107, 71, 75, 79, 49, 64, 80, 47, 54, 62, 57, 75, 61, 59, 95, 112, 71, 73, 62, 86, 86, 64, 62, 50, 64, 68, 62, 85, 53, 60, 31, 67, 70, 74, 66, 49, 47, 80, 52, 44, 69, 95, 69, 116, 58, 79, 92, 80, 97, 51, 81, 51, 53, 70, 68, 84, 52, 87, 64, 56, 76, 64, 72, 69, 63, 54, 74, 54, 64, 87, 74, 81, 35, 75, 53, 89, 62, 61, 44, 60, 70, 101, 56, 88, 57, 53, 57, 58, 86, 64, 67, 59, 61, 57, 95, 97, 65, 62, 40, 64, 128, 51, 52, 70, 61, 54, 85, 65, 50, 53, 57, 68, 81, 67, 62, 60, 69, 84, 74, 55, 66, 63, 55, 68, 51, 55, 77, 66, 61, 56, 72, 62, 61, 76, 55, 66, 77, 100, 65, 97, 69, 35, 43, 52, 81, 42, 61, 53, 61, 90, 76, 52, 85, 58, 57, 51, 49, 77, 64, 53, 65, 67, 85, 66, 63, 63, 84, 51, 65, 70, 49, 54, 45, 77, 32, 126, 64, 68, 90, 81, 62, 67, 67, 85, 56, 62, 81, 72, 87, 71, 77, 55, 38, 61, 54, 65, 67, 74, 50, 89, 75, 105, 64, 55, 84, 54, 66, 74, 56, 48, 77, 85, 55, 67, 102, 50, 74, 55, 79, 38, 66, 66, 65, 64, 52, 53, 63, 46, 62, 77, 91, 60, 47, 67, 71, 69, 63, 78, 78, 66, 70, 63, 80, 64, 67, 43, 88, 58, 76, 54, 67, 63, 65, 66, 73, 54, 59, 63, 67, 64, 59, 54, 82, 61, 55, 53, 58, 70, 107, 65, 67, 43, 55, 57, 60, 70, 68, 57, 63, 81, 67, 64, 38, 48, 50, 50, 64, 78, 99, 102, 61, 64, 54, 69, 71, 106, 62, 70, 65, 60, 52, 66, 30, 48, 67, 83, 58, 39, 54, 55, 42, 56, 61, 69, 68, 56, 76, 67, 79, 64, 63, 42, 83, 66, 53, 47, 61, 55, 81, 79, 67, 65, 91, 74, 48, 75, 66, 54, 64, 56, 98, 70, 65, 60, 55, 67, 57, 54, 84, 51, 77, 59, 52, 59, 46, 50, 67, 59, 67, 77, 58, 54, 62, 71, 58, 68, 48, 52, 78, 76, 84, 63, 68, 53, 67, 67, 65, 67, 59, 56, 64, 52, 61, 64, 52, 74, 68, 55, 44, 60, 52, 44, 56, 51, 68, 64, 67, 57, 70, 60, 55, 79, 80, 72, 133, 62, 54, 59, 57, 60, 71, 63, 83, 63, 76, 55, 56, 107, 63, 63, 70, 81, 86, 61, 45, 59, 86, 45, 62, 82, 87, 54, 67, 49, 51, 98, 61, 60, 69, 58, 75, 69, 38, 53, 71, 64, 63, 53, 73, 59, 53, 76, 49, 69, 69, 83, 73, 65, 57, 98, 61, 76, 84, 76, 76, 52, 60, 66, 68, 64, 71, 75, 78, 76, 45, 61, 32, 59, 58, 66, 65, 77, 53, 78, 68, 64, 55, 73, 65, 56, 68, 49, 68, 74, 44, 70, 70, 49, 65, 80, 88, 83, 48, 51, 71, 56, 71, 66, 47, 61, 51, 67, 47, 62, 69, 54, 61, 52, 53, 58, 61, 46, 57, 61, 63, 88, 64, 48, 44, 53, 52, 82, 48, 70, 61, 71, 64, 72, 64, 58, 59, 70, 65, 70, 48, 65, 91, 64, 63, 61, 93, 53, 76, 67, 50, 64, 99, 74, 72, 76, 65, 66, 62, 102, 59, 41, 57, 56, 82, 68, 80, 50, 57, 60, 78, 47, 56, 60, 63, 56, 67, 54, 81, 70, 46, 63, 75, 76, 64, 57, 56, 60, 60, 59, 42, 90, 87, 59, 50, 49, 53, 42, 59, 54, 58, 87, 81, 54, 58, 54, 56, 75, 72, 68, 68, 49, 57, 72, 100, 69, 77, 63, 93, 55, 59, 64, 63, 81, 72, 72, 78, 66, 58, 67, 102, 74, 55, 61, 58, 94, 93, 79, 37, 78, 61, 53, 73, 108, 67, 52, 54, 62, 60, 55, 72, 51, 65, 77, 77, 71, 48, 44, 42, 85, 71, 72, 63, 45, 76, 63, 54, 70, 61, 60, 58, 65, 54, 52, 69, 81, 60, 50, 65, 45, 88, 69, 54, 64, 84, 61, 82, 68, 74, 58, 58, 119, 58, 94, 50, 59, 60, 57, 89, 67, 76, 60, 74, 64, 49, 49, 54, 82, 69, 98, 70, 76, 86, 75, 59, 93, 73, 59, 79, 60, 50, 83, 59, 72, 62, 51, 58, 75, 83, 48, 61, 37, 58, 85, 54, 53, 72, 70, 62, 53, 112, 57, 67, 68, 87, 55, 53, 103, 42, 52, 60, 61, 52, 51, 54, 98, 70, 65, 64, 77, 52, 61, 64, 77, 71, 74, 67, 83, 53, 92, 73, 70, 54, 72, 57, 56, 79, 56, 47, 68, 65, 52, 96, 66, 67, 57, 45, 66, 65, 71, 71, 91, 64, 80, 66, 40, 70, 41, 58, 79, 71, 53, 58, 71, 75, 57, 48, 125, 65, 64, 63, 81, 63, 65, 55, 82, 55, 63, 67, 62, 47, 74, 57, 59, 70, 87, 95, 75, 56, 42, 69, 161, 68, 68, 71, 60, 58, 98, 59, 63, 61, 45, 147, 79, 70, 36, 94, 80, 50, 60, 55, 62, 63, 50, 69, 65, 57, 65, 89, 82, 47, 78, 61, 57, 63, 82, 75, 61, 53, 58, 76, 69, 71, 115, 55, 83, 80, 75, 63, 61, 56, 51, 54, 61, 67, 72, 69, 82, 49, 58, 113, 64, 50, 73, 60, 60, 58, 60, 55, 41, 83, 60, 93, 84, 55, 69, 69, 54, 61, 69, 48, 37, 86, 63, 84, 53, 58, 56, 64, 49, 76, 81, 52, 46, 49, 82, 63, 84, 61, 65, 52, 108, 72, 57, 55, 55, 61, 41, 62, 65, 67, 58, 75, 67, 64, 53, 57, 69, 76, 57, 64, 50, 50, 75, 60, 35, 82, 59, 54, 55, 80, 50, 51, 60, 74, 80, 63, 67, 59, 52, 93, 50, 84, 54, 102, 77, 97, 56, 59, 70, 59, 59, 69, 57, 86, 64, 69, 86, 97, 69, 104, 53, 82, 88, 68, 58, 56, 58, 64, 79, 53, 55, 87, 68, 54, 58, 67, 65, 71, 54, 47, 91, 74, 70, 74, 59, 58, 61, 59, 44, 77, 52, 43, 68, 52, 88, 88, 50, 66, 90, 49, 78, 88, 69, 71, 69, 61, 89, 74, 71, 81, 68, 62, 74, 61, 77, 27, 54, 58, 46, 81, 48, 40, 73, 62, 59, 57, 95, 52, 63, 54, 55, 69, 58, 77, 58, 64, 103, 47, 60, 86, 43, 46, 74, 77, 100, 56, 64, 66, 71, 52, 59, 67, 81, 59, 62, 56, 66, 69, 82, 56, 72, 60, 79, 81, 77, 91, 74, 47, 24, 54, 85, 51, 63, 74, 68, 86, 56, 47, 48, 49, 61, 57, 48, 53, 56, 72, 90, 37, 68, 94, 57, 64, 68, 76, 75, 55, 63, 76, 62, 57, 67, 59, 65, 44, 70, 70, 53, 80, 78, 88, 64, 62, 80, 53, 57, 72, 55, 56, 84, 71, 81, 57, 67, 67, 51, 79, 40, 65, 69, 44, 116, 56, 76, 73, 64, 56, 75, 83, 84, 56, 73, 54, 49, 88, 65, 59, 85, 80, 58, 91, 74, 54, 41, 63, 56, 60, 131, 69, 72, 69, 79, 55, 61, 47, 65, 67, 62, 69, 67, 77, 50, 68, 79, 69, 64, 82, 61, 76, 52, 69, 83, 82, 65, 64, 55, 50, 88, 61, 103, 88, 93, 42, 79, 55, 56, 54, 66, 60, 65, 49, 77, 50, 77, 65, 70, 62, 61, 149, 80, 71, 79, 57, 47, 61, 73, 66, 66, 63, 64, 45, 99, 89, 50, 121, 68, 64, 73, 48, 85, 76, 71, 79, 54, 59, 64, 84, 50, 50, 60, 75, 51, 50, 59, 60, 41, 52, 70, 108, 75, 62, 57, 72, 60, 47, 52, 50, 61, 65, 54, 93, 58, 73, 56, 47, 67, 93, 63, 66, 69, 70, 67, 67, 60, 57, 67, 48, 63, 51, 57, 51, 63, 67, 52, 62, 62, 95, 50, 74, 67, 47, 48, 55, 115, 81, 102, 50, 61, 78, 49, 71, 52, 66, 91, 57, 67, 61, 63, 76, 44, 50, 58, 87, 98, 47, 61, 67, 75, 68, 83, 36, 58, 53, 80, 72, 41, 111, 34, 68, 49, 82, 82, 78, 67, 56, 45, 47, 36, 66, 84, 85, 76, 70, 67, 42, 64, 97, 71, 67, 131, 66, 94, 52, 52, 55, 84, 64, 55, 68, 86, 39, 42, 47, 74, 59, 70, 63, 67, 71, 61, 80, 54, 47, 50, 58, 69, 44, 65, 73, 58, 55, 58, 55, 66, 69, 62, 140, 50, 83, 56, 45, 48, 49, 59, 56, 52, 81, 55, 40, 53, 78, 62, 54, 108, 59, 75, 70, 78, 52, 68, 65, 50, 77, 51, 76, 78, 72, 54, 53, 62, 69, 45, 63, 58, 70, 90, 73, 65, 40, 34, 54, 49, 86, 78, 61, 53, 53, 61, 64, 68, 78, 82, 47, 61, 47, 75, 62, 83, 72, 62, 44, 76, 73, 58, 97, 70, 72, 59, 74, 71, 78, 66, 60, 56, 65, 53, 84, 72, 101, 90, 35, 84, 63, 78, 80, 74, 58, 71, 69, 81, 61, 52, 67, 66, 83, 61, 66, 64, 73, 83, 104, 90, 64, 56, 39, 55, 83, 52, 76, 61, 61, 81, 101, 62, 58, 70, 39, 25, 78, 70, 89, 39, 67, 69, 46, 76, 65, 53, 55, 156, 75, 49, 67, 87, 67, 84, 40, 97, 65, 51, 56, 68, 73, 52, 40, 90, 81, 60, 71, 92, 49, 54, 72, 46, 60, 57, 68, 59, 101, 84, 78, 130, 47, 68, 70, 76, 61, 62, 78, 59, 68, 54, 57, 61, 76, 56, 48, 74, 55, 99, 114, 55, 68, 53, 49, 78, 91, 61, 58, 72, 57, 79, 77, 97, 72, 79, 56, 77, 59, 55, 74, 59, 74, 62, 66, 96, 83, 49, 67, 49, 77, 70, 66, 72, 60, 54, 67, 77, 105, 87, 81, 55, 65, 77, 69, 80, 73, 53, 63, 90, 77, 72, 68, 80, 83, 69, 57, 76, 41, 113, 49, 66, 61, 81, 52, 66, 71, 79, 64, 54, 87, 71, 64, 62, 58, 51, 75, 56, 151, 65, 78, 73, 60, 55, 66, 47, 71, 73, 73, 65, 60, 67, 77, 50, 71, 81, 56, 72, 105, 63, 85, 66, 51, 65, 54, 88, 55, 88, 58, 72, 67, 50, 74, 49, 45, 69, 64, 85, 93, 65, 51, 47, 71, 58, 69, 76, 54, 72, 64, 54, 54, 73, 52, 79, 92, 61, 49, 51, 57, 83, 71, 79, 60, 100, 118, 64, 53, 58, 62, 49, 89, 91, 60, 44, 54, 48, 75, 61, 54, 54, 57, 82, 59, 67, 57, 84, 49, 58, 55, 44, 59, 77, 63, 90, 64, 73, 83, 61, 81, 103, 58, 52, 42, 56, 86, 43, 53, 63, 53, 53, 62, 94, 68, 67, 83, 53, 53, 62, 77, 67, 78, 68, 75, 56, 68, 64, 62, 66, 71, 62, 46, 62, 57, 56, 54, 78, 73, 42, 53, 64, 53, 70, 71, 74, 60, 77, 45, 69, 74, 53, 57, 62, 95, 85, 69, 51, 55, 79, 75, 62, 93, 61, 60, 51, 71, 70, 51, 84, 84, 62, 66, 68, 40, 68, 73, 77, 73, 83, 73, 67, 61, 58, 52, 66, 74, 103, 81, 54, 69, 56, 76, 53, 57, 91, 66, 53, 85, 49, 50, 98, 63, 98, 50, 65, 40, 54, 100, 68, 69, 65, 86, 53, 56, 86, 56, 57, 56, 35, 60, 96, 52, 78, 67, 37, 61, 54, 46, 66, 47, 60, 67, 67, 65, 60, 67, 70, 65, 71, 94, 78, 61, 86, 76, 64, 58, 67, 77, 92, 61, 76, 76, 50, 49, 79, 47, 58, 57, 49, 47, 52, 66, 85, 75, 87, 76, 73, 58, 61, 46, 97, 73, 99, 57, 75, 58, 69, 59, 70, 85, 56, 53, 71, 65, 67, 61, 68, 62, 68, 94, 73, 56, 47, 61, 68, 62, 75, 60, 56, 83, 46, 51, 65, 66, 62, 61, 80, 59, 82, 55, 67, 60, 66, 69, 51, 85, 55, 54, 67, 48, 58, 105, 89, 73, 60, 65, 80, 50, 62, 47, 61, 64, 53, 80, 64, 59, 55, 77, 72, 75, 76, 67, 61, 47, 59, 69, 73, 79, 58, 62, 67, 62, 55, 70, 87, 58, 53, 43, 77, 54, 61, 53, 72, 59, 51, 58, 59, 81, 93, 74, 86, 58, 85, 49, 66, 68, 130, 75, 58, 78, 68, 47, 68, 52, 46, 50, 56, 112, 67, 66, 51, 66, 52, 58, 62, 59, 66, 70, 47, 70, 85, 65, 87, 71, 65, 73, 72, 54, 47, 76, 76, 62, 38, 115, 65, 55, 60, 66, 69, 69, 96, 77, 64, 52, 78, 35, 46, 50, 71, 58, 87, 70, 64, 54, 52, 65, 66, 74, 59, 68, 68, 90, 67, 79, 56, 51, 69, 46, 88, 49, 73, 71, 61, 64, 51, 59, 46, 57, 63, 57, 72, 47, 65, 52, 57, 51, 92, 58, 61, 106, 77, 48, 67, 63, 53, 67, 53, 63, 67, 67, 60, 65, 62, 68, 45, 56, 57, 81, 50, 90, 55, 68, 51, 60, 68, 64, 69, 67, 61, 54, 90, 71, 62, 57, 64, 61, 60, 48, 78, 63, 58, 63, 54, 74, 52, 54, 54, 95, 64, 76, 60, 73, 63, 71, 62, 56, 73, 55, 81, 59, 57, 97, 56, 102, 76, 38, 58, 57, 59, 59, 71, 52, 46, 66, 94, 47, 66, 32, 43, 72, 60, 55, 71, 57, 64, 65, 86, 81, 107, 83, 67, 65, 63, 57, 70, 74, 42, 61, 72, 74, 64, 72, 58, 78, 56, 60, 70, 66, 65, 49, 74, 53, 52, 37, 56, 63, 50, 75, 49, 78, 54, 75, 63, 73, 60, 55, 59, 74, 64, 59, 78, 59, 78, 55, 74, 72, 72, 61, 74, 78, 67, 56, 83, 65, 45, 63, 64, 75, 59, 80, 72, 50, 69, 55, 54, 71, 77, 45, 79, 62, 55, 63, 73, 57, 71, 58, 111, 94, 70, 62, 70, 61, 70, 63, 55, 55, 64, 45, 60, 64, 70, 85, 80, 66, 57, 97, 62, 72, 74, 68, 53, 78, 84, 63, 58, 69, 62, 60, 46, 78, 76, 57, 62, 61, 50, 46, 66, 63, 59, 101, 64, 71, 59, 78, 55, 73, 62, 70, 58, 57, 54, 58, 51, 71, 40, 58, 70, 94, 60, 50, 50, 80, 57, 55, 62, 64, 67, 65, 65, 58, 53, 90, 53, 53, 69, 81, 82, 69, 57, 79, 57, 70, 74, 48, 72, 63, 56, 72, 59, 79, 64, 79, 55, 50, 60, 58, 74, 69, 51, 77, 71, 53, 67, 52, 55, 49, 76, 67, 49, 68, 56, 55, 87, 53, 61, 72, 66, 81, 49, 83, 77, 64, 59, 64, 93, 49, 73, 58, 47, 86, 64, 65, 54, 51, 62, 52, 106, 96, 57, 54, 71, 58, 57, 52, 71, 61, 58, 61, 69, 52, 69, 57, 68, 72, 72, 56, 64, 59, 62, 76, 47, 70, 62, 66, 51, 90, 61, 66, 73, 62, 93, 58, 60, 54, 52, 58, 62, 56, 61, 70, 92, 52, 102, 56, 71, 51, 54, 55, 58, 65, 71, 56, 72, 80, 66, 69, 60, 63, 52, 58, 58, 62, 64, 66, 92, 78, 104, 53, 74, 51, 71, 50, 67, 58, 103, 60, 62, 50, 47, 71, 64, 65, 108, 60, 54, 60, 79, 80, 75, 72, 48, 64, 77, 80, 62, 63, 71, 110, 48, 60, 67, 66, 98, 69, 59, 95, 61, 40, 48, 69, 70, 52, 56, 46, 57, 66, 73, 97, 48, 64, 68, 82, 53, 67, 62, 44, 87, 77, 78, 64, 127, 70, 65, 94, 64, 84, 55, 84, 67, 74, 71, 58, 64, 68, 58, 91, 50, 88, 56, 48, 60, 64, 78, 65, 79, 73, 66, 65, 62, 81, 68, 72, 60, 81, 55, 68, 56, 57, 55, 62, 70, 54, 69, 58, 74, 46, 68, 58, 64, 78, 47, 73, 55, 54, 67, 87, 59, 32, 76, 60, 52, 87, 75, 69, 70, 65, 93, 72, 55, 63, 68, 62, 63, 84, 59, 80, 54, 52, 42, 79, 56, 50, 65, 43, 73, 58, 53, 68, 59, 57, 57, 67, 50, 80, 52, 163, 69, 88, 76, 64, 59, 61, 74, 51, 70, 75, 65, 56, 83, 59, 56, 64, 63, 67, 59, 50, 53, 53, 59, 57, 62, 74, 71, 54, 65, 74, 55, 76, 62, 65, 53, 124, 65, 71, 65, 117, 59, 89, 72, 50, 60, 54, 63, 64, 75, 55, 73, 45, 44, 60, 68, 65, 64, 57, 63, 56, 56, 51, 81, 84, 51, 69, 38, 125, 80, 64, 56, 53, 54, 58, 61, 55, 70, 74, 64, 59, 73, 94, 59, 55, 98, 60, 70, 56, 65, 54, 86, 67, 70, 82, 63, 73, 93, 70, 60, 73, 63, 59, 66, 88, 62, 75, 68, 56, 68, 49, 71, 77, 67, 56, 56, 74, 70, 57, 63, 83, 60, 112, 62, 65, 58, 46, 69, 76, 72, 78, 53, 104, 87, 68, 67, 57, 61, 61, 76, 47, 87, 46, 73, 47, 60, 63, 67, 91, 58, 67, 103, 62, 57, 62, 64, 52, 58, 71, 66, 99, 65, 72, 62, 70, 55, 51, 62, 72, 51, 40, 52, 59, 65, 63, 85, 73, 64, 78, 58, 51, 66, 47, 59, 80, 72, 58, 46, 105, 72, 58, 59, 72, 71, 69, 65, 74, 72, 59, 48, 55, 66, 62, 58, 56, 71, 61, 61, 46, 94, 62, 74, 65, 78, 74, 68, 66, 59, 75, 111, 84, 52, 44, 93, 64, 85, 66, 71, 60, 61, 59, 51, 54, 68, 98, 81, 56, 69, 81, 66, 74, 82, 61, 57, 68, 53, 68, 94, 72, 64, 103, 94, 56, 53, 61, 67, 72, 50, 78, 94, 54, 68, 61, 62, 49, 72, 71, 71, 67, 60, 61, 63, 50, 61, 61, 76, 67, 55, 77, 59, 65, 48, 51, 50, 68, 60, 63, 74, 47, 56, 57, 80, 61, 33, 52, 68, 63, 47, 49, 65, 74, 63, 78, 67, 58, 59, 60, 63, 67, 94, 85, 55, 56, 59, 56, 46, 41, 61, 68, 66, 58, 47, 68, 55, 89, 48, 66, 54, 82, 67, 67, 95, 67, 57, 116, 65, 96, 41, 51, 71, 81, 65, 70, 101, 59, 68, 66, 111, 76, 62, 68, 70, 76, 73, 69, 77, 91, 59, 63, 46, 108, 76, 54, 67, 65, 72, 78, 65, 60, 43, 42, 65, 63, 58, 70, 57, 50, 67, 49, 51, 62, 76, 93, 70, 80, 63, 78, 48, 56, 73, 71, 61, 57, 64, 56, 50, 59, 66, 45, 37, 49, 80, 81, 61, 59, 69, 84, 52, 58, 64, 65, 62, 77, 66, 61, 51, 81, 66, 81, 59, 62, 50, 91, 76, 92, 34, 113, 92, 66, 56, 70, 59, 59, 50, 27, 61, 76, 98, 74, 72, 68, 74, 62, 59, 57, 62, 47, 80, 42, 49, 47, 67, 50, 99, 80, 67, 66, 72, 68, 81, 57, 67, 53, 52, 84, 91, 68, 95, 95, 58, 96, 72, 75, 97, 61, 61, 79, 46, 81, 86, 58, 62, 71, 75, 56, 59, 87, 56, 49, 57, 74, 63, 51, 61, 44, 77, 67, 83, 64, 70, 57, 60, 66, 60, 47, 68, 79, 108, 76, 96, 50, 43, 78, 57, 48, 89, 78, 41, 54, 69, 59, 82, 54, 62, 68, 61, 63, 35, 58, 54, 55, 67, 69, 65, 89, 73, 53, 116, 58, 86, 59, 89, 67, 62, 55, 84, 67, 74, 60, 86, 54, 83, 59, 90, 90, 74, 58, 59, 45, 90, 70, 46, 53, 85, 41, 58, 47, 93, 68, 84, 73, 62, 72, 62, 85, 84, 76, 73, 76, 46, 89, 72, 47, 62, 59, 80, 85, 65, 59, 51, 65, 52, 54, 55, 57, 80, 51, 61, 81, 59, 59, 68, 77, 48, 77, 95, 53, 91, 72, 62, 28, 60, 58, 38, 51, 41, 67, 63, 97, 59, 62, 78, 40, 78, 91, 48, 61, 53, 71, 73, 33, 82, 78, 57, 67, 64, 81, 38, 59, 80, 52, 59, 70, 77, 70, 61, 55, 96, 64, 77, 63, 46, 57, 44, 70, 24, 77, 65, 62, 61, 87, 82, 60, 44, 67, 47, 75, 70, 73, 64, 50, 82, 76, 43, 58, 88, 60, 68, 55, 57, 53, 46, 70, 53, 65, 52, 86, 82, 50, 60, 83, 43, 63, 70, 70, 55, 43, 69, 76, 56, 80, 65, 60, 62, 63, 85, 61, 59, 56, 100, 61, 60, 62, 62, 85, 69, 83, 57, 70, 105, 72, 69, 96, 87, 71, 101, 87, 51, 76, 22, 122, 70, 88, 124, 81, 72, 55, 56, 93, 70, 65, 61, 85, 68, 49, 48, 57, 98, 60, 55, 48, 80, 66, 45, 68, 51, 60, 55, 56, 55, 55, 92, 92, 49, 59, 64, 38, 66, 54, 71, 70, 68, 76, 74, 80, 66, 75, 77, 65, 73, 63, 48, 70, 64, 64, 59, 53, 59, 76, 47, 54, 59, 57, 51, 50, 58, 73, 63, 64, 72, 57, 87, 67, 68, 72, 59, 74, 53, 72, 65, 78, 47, 74, 105, 57, 70, 116, 52, 69, 56, 85, 69, 57, 64, 58, 48, 44, 40, 107, 77, 35, 61, 39, 59, 61, 79, 51, 51, 126, 61, 70, 85, 72, 67, 55, 58, 83, 51, 101, 54, 63, 79, 54, 69, 55, 84, 100, 62, 87, 57, 57, 62, 66, 69, 73, 76, 72, 47, 76, 69, 49, 83, 77, 73, 63, 66, 68, 64, 92, 70, 61, 76, 65, 84, 55, 62, 56, 63, 47, 70, 96, 66, 73, 76, 59, 50, 66, 72, 48, 72, 54, 85, 82, 37, 64, 58, 84, 63, 62, 83, 58, 71, 60, 63, 65, 68, 76, 51, 71, 94, 74, 56, 56, 53, 54, 52, 49, 83, 79, 48, 73, 69, 54, 93, 52, 64, 58, 66, 91, 66, 73, 71, 66, 66, 73, 58, 77, 75, 49, 42, 73, 92, 37, 64, 57, 101, 37, 63, 122, 55, 56, 58, 73, 62, 82, 54, 86, 76, 54, 68, 67, 93, 61, 83, 75, 50, 62, 118, 35, 52, 67, 48, 62, 41, 39, 136, 73, 35, 63, 80, 55, 64, 70, 66, 56, 68, 82, 73, 61, 70, 104, 54, 42, 73, 39, 74, 62, 84, 62, 34, 65, 91, 115, 72, 108, 62, 68, 100, 84, 54, 53, 54, 77, 89, 57, 66, 69, 53, 45, 69, 79, 53, 49, 95, 77, 96, 111, 78, 66, 66, 49, 71, 32, 47, 71, 58, 51, 53, 96, 75, 72, 84, 63, 90, 68, 54, 68, 69, 84, 63, 76, 68, 80, 153, 71, 92, 75, 54, 66, 63, 65, 79, 70, 60, 49, 51, 64, 58, 45, 73, 36, 61, 47, 55, 54, 112, 50, 77, 64, 44, 57, 51, 55, 48, 96, 90, 85, 91, 99, 77, 49, 44, 77, 52, 65, 44, 79, 76, 60, 68, 59, 52, 74, 60, 57, 71, 51, 70, 66, 76, 93, 103, 75, 75, 41, 63, 74, 78, 70, 65, 59, 52, 53, 69, 52, 69, 92, 76, 47, 51, 50, 70, 36, 61, 82, 53, 49, 91, 61, 62, 64, 52, 79, 51, 72, 85, 65, 48, 64, 51, 63, 69, 55, 95, 59, 79, 51, 52, 73, 72, 82, 72, 50, 60, 58, 60, 71, 63, 32, 54, 79, 39, 58, 53, 94, 70, 77, 109, 62, 60, 81, 72, 64, 45, 69, 77, 78, 80, 67, 54, 61, 40, 63, 52, 50, 61, 49, 110, 84, 47, 81, 77, 59, 48, 50, 46, 53, 49, 57, 63, 87, 57, 65, 79, 77, 65, 74, 29, 61, 78, 64, 77, 77, 49, 51, 37, 56, 95, 83, 61, 73, 64, 63, 75, 53, 77, 67, 55, 49, 79, 46, 45, 81, 50, 47, 81, 54, 54, 88, 72, 75, 77, 59, 92, 58, 64, 61, 61, 59, 61, 62, 61, 49, 54, 73, 72, 51, 58, 59, 59, 96, 82, 43, 50, 91, 51, 60, 61, 42, 60, 62, 56, 60, 67, 66, 95, 33, 43, 42, 50, 54, 76, 81, 62, 75, 77, 61, 89, 33, 64, 67, 64, 83, 49, 52, 81, 72, 84, 81, 60, 47, 50, 85, 62, 65, 50, 53, 66, 79, 39, 65, 58, 72, 63, 60, 78, 144, 58, 58, 84, 72, 76, 90, 61, 63, 66, 47, 68, 86, 52, 88, 62, 73, 80, 72, 77, 64, 50, 45, 59, 43, 51, 86, 47, 74, 46, 86, 63, 54, 60, 64, 70, 72, 74, 58, 126, 59, 67, 86, 45, 58, 56, 71, 40, 85, 94, 72, 72, 42, 61, 56, 66, 38, 65, 69, 47, 63, 63, 64, 89, 56, 98, 59, 65, 57, 76, 49, 84, 68, 86, 67, 49, 51, 102, 61, 61, 63, 50, 60, 58, 68, 60, 58, 59, 64, 44, 88, 72, 59, 42, 68, 94, 76, 68, 89, 62, 72, 57, 60, 41, 53, 59, 74, 77, 98, 68, 71, 71, 75, 65, 49, 71, 66, 85, 61, 73, 38, 55, 80, 68, 67, 55, 47, 56, 60, 58, 55, 61, 50, 82, 67, 74, 84, 65, 42, 70, 51, 50, 77, 83, 73, 56, 45, 59, 57, 57, 57, 107, 66, 64, 78, 81, 38, 62, 46, 55, 99, 87, 84, 58, 56, 81, 46, 76, 91, 49, 61, 52, 70, 77, 66, 86, 55, 69, 47, 55, 66, 66, 43, 66, 56, 56, 51, 57, 69, 55, 108, 63, 68, 75, 62, 47, 68, 57, 64, 74, 113, 71, 74, 63, 60, 65, 80, 57, 63, 65, 69, 70, 58, 77, 48, 52, 56, 54, 48, 71, 78, 82, 69, 76, 60, 86, 54, 88, 63, 82, 59, 81, 59, 62, 58, 70, 53, 98, 61, 48, 85, 76, 52, 74, 75, 56, 63, 85, 68, 58, 70, 49, 71, 66, 51, 83, 61, 56, 43, 70, 63, 69, 39, 75, 43, 62, 80, 76, 54, 88, 58, 68, 41, 57, 64, 75, 59, 59, 94, 50, 58, 92, 84, 58, 65, 94, 53, 82, 48, 51, 38, 46, 83, 59, 68, 69, 76, 82, 36, 81, 53, 64, 65, 66, 61, 63, 94, 51, 88, 73, 68, 63, 74, 69, 72, 55, 59, 56, 42, 74, 68, 61, 136, 73, 59, 64, 49, 85, 67, 86, 58, 64, 80, 97, 43, 61, 93, 67, 57, 48, 80, 69, 53, 81, 58, 71, 76, 78, 54, 73, 84, 56, 64, 85, 53, 61, 67, 61, 81, 52, 65, 56, 48, 49, 77, 67, 75, 72, 39, 62, 73, 106, 63, 55, 61, 60, 56, 50, 52, 72, 69, 49, 57, 73, 84, 54, 46, 57, 82, 68, 54, 58, 70, 73, 70, 69, 48, 100, 76, 65, 48, 96, 64, 63, 57, 51, 48, 66, 77, 119, 71, 42, 70, 59, 84, 58, 69, 48, 45, 58, 72, 115, 82, 70, 70, 66, 58, 53, 67, 82, 80, 57, 38, 62, 46, 48, 81, 78, 66, 73, 59, 94, 58, 48, 76, 59, 34, 57, 53, 52, 61, 68, 62, 63, 57, 78, 44, 53, 71, 50, 87, 61, 61, 98, 88, 75, 54, 46, 48, 63, 87, 72, 67, 70, 83, 87, 65, 65, 54, 43, 51, 40, 81, 60, 51, 97, 97, 71, 49, 44, 64, 115, 46, 52, 62, 51, 58, 73, 58, 76, 60, 58, 59, 81, 78, 60, 80, 48, 54, 59, 87, 76, 87, 87, 60, 52, 60, 79, 64, 74, 68, 71, 70, 62, 63, 66, 49, 76, 67, 81, 56, 55, 63, 62, 86, 83, 67, 68, 67, 60, 67, 70, 42, 65, 83, 64, 65, 57, 69, 59, 78, 61, 40, 70, 45, 66, 58, 42, 61, 71, 64, 72, 73, 72, 70, 54, 64, 65, 75, 55, 58, 79, 78, 68, 57, 94, 67, 62, 64, 82, 50, 76, 58, 93, 57, 60, 57, 80, 70, 49, 42, 64, 73, 114, 67, 74, 76, 57, 69, 69, 91, 56, 68, 54, 56, 77, 36, 73, 79, 89, 58, 51, 67, 60, 62, 90, 77, 70, 62, 66, 59, 64, 61, 69, 68, 66, 83, 50, 63, 37, 75, 39, 51, 72, 77, 70, 73, 87, 59, 61, 68, 73, 120, 38, 62, 74, 56, 69, 44, 72, 62, 48, 54, 56, 70, 71, 55, 64, 59, 81, 51, 48, 70, 63, 54, 110, 130, 64, 59, 60, 63, 43, 62, 74, 51, 48, 53, 76, 73, 59, 82, 75, 66, 58, 50, 59, 66, 36, 71, 110, 69, 82, 42, 70, 85, 95, 68, 62, 53, 53, 61, 72, 61, 64, 54, 40, 39, 62, 67, 75, 41, 105, 67, 57, 55, 74, 90, 61, 59, 82, 76, 68, 111, 61, 68, 75, 81, 73, 44, 65, 40, 78, 63, 64, 62, 69, 46, 52, 64, 53, 69, 82, 64, 68, 80, 74, 59, 48, 83, 56, 59, 88, 84, 52, 38, 68, 65, 57, 57, 63, 56, 66, 61, 66, 59, 49, 76, 70, 47, 83, 57, 72, 84, 49, 85, 112, 64, 56, 92, 51, 68, 77, 73, 66, 74, 51, 75, 60, 62, 50, 93, 57, 45, 94, 61, 65, 74, 79, 74, 70, 71, 82, 104, 65, 76, 72, 46, 50, 57, 62, 48, 59, 62, 64, 65, 78, 73, 60, 60, 68, 48, 58, 55, 79, 82, 51, 67, 57, 51, 55, 91, 74, 53, 62, 48, 40, 64, 45, 62, 55, 69, 50, 66, 54, 69, 77, 91, 54, 83, 66, 57, 80, 75, 57, 71, 63, 59, 54, 64, 51, 47, 84, 45, 51, 92, 62, 55, 73, 59, 57, 59, 70, 74, 58, 73, 59, 73, 113, 68, 72, 60, 55, 55, 89, 56, 88, 60, 64, 80, 64, 61, 68, 90, 49, 46, 49, 54, 75, 57, 87, 106, 86, 56, 69, 55, 59, 54, 82, 66, 53, 80, 58, 102, 52, 76, 109, 43, 67, 58, 78, 53, 63, 59, 65, 63, 81, 59, 62, 69, 72, 74, 72, 67, 85, 58, 64, 71, 56, 77, 57, 50, 58, 60, 55, 58, 110, 57, 71, 44, 59, 78, 79, 37, 56, 64, 72, 57, 65, 64, 95, 64, 61, 52, 41, 56, 34, 57, 64, 68, 38, 54, 78, 80, 52, 97, 93, 91, 59, 76, 93, 70, 90, 84, 62, 69, 66, 54, 80, 36, 60, 54, 102, 75, 47, 69, 52, 49, 61, 65, 52, 36, 61, 65, 58, 63, 54, 78, 66, 81, 47, 72, 68, 94, 60, 49, 46, 66, 80, 49, 67, 64, 60, 63, 65, 60, 75, 60, 70, 73, 50, 56, 71, 71, 58, 72, 51, 70, 67, 66, 87, 65, 51, 65, 68, 56, 62, 58, 64, 71, 91, 67, 72, 80, 65, 41, 96, 66, 51, 79, 58, 75, 41, 94, 59, 46, 50, 66, 41, 55, 70, 64, 67, 85, 62, 66, 81, 53, 65, 56, 62, 76, 79, 69, 81, 118, 70, 67, 44, 48, 60, 61, 88, 50, 57, 71, 84, 58, 64, 63, 68, 62, 56, 94, 82, 65, 67, 63, 61, 94, 65, 66, 78, 71, 57, 85, 61, 33, 66, 80, 61, 60, 47, 64, 59, 59, 59, 56, 40, 51, 57, 46, 57, 57, 53, 85, 61, 66, 62, 67, 74, 47, 81, 60, 79, 70, 58, 62, 48, 56, 68, 70, 68, 67, 64, 68, 59, 62, 68, 73, 75, 39, 75, 63, 90, 64, 72, 75, 46, 51, 59, 70, 65, 72, 60, 75, 69, 61, 73, 61, 64, 68, 70, 59, 75, 46, 80, 82, 68, 67, 62, 89, 77, 47, 56, 74, 70, 55, 57, 67, 51, 69, 56, 48, 54, 111, 38, 56, 69, 72, 153, 74, 79, 76, 63, 63, 55, 73, 58, 52, 63, 64, 50, 45, 77, 58, 70, 48, 65, 59, 57, 54, 49, 42, 73, 69, 67, 61, 76, 61, 146, 47, 76, 108, 59, 69, 43, 74, 55, 89, 66, 70, 85, 52, 63, 63, 58, 52, 63, 65, 50, 46, 62, 60, 58, 59, 64, 73, 76, 66, 59, 73, 53, 69, 71, 75, 73, 53, 64, 65, 76, 86, 53, 77, 77, 63, 43, 69, 96, 107, 75, 107, 69, 74, 59, 94, 57, 66, 90, 52, 58, 94, 51, 76, 57, 52, 58, 54, 79, 55, 82, 40, 72, 40, 68, 62, 67, 60, 88, 62, 67, 67, 56, 71, 73, 60, 79, 27, 71, 58, 72, 69, 55, 52, 53, 85, 61, 111, 53, 57, 62, 50, 57, 54, 57, 75, 54, 64, 88, 58, 88, 53, 62, 57, 54, 64, 109, 53, 59, 120, 77, 57, 65, 59, 62, 159, 80, 51, 63, 62, 47, 55, 63, 75, 72, 78, 76, 50, 76, 61, 72, 65, 57, 75, 75, 63, 72, 69, 78, 59, 58, 49, 79, 56, 69, 86, 52, 63, 58, 89, 69, 86, 65, 77, 57, 66, 53, 43, 57, 48, 77, 70, 79, 139, 49, 70, 85, 59, 76, 60, 73, 59, 68, 104, 70, 68, 79, 47, 46, 66, 46, 54, 85, 74, 64, 72, 56, 82, 64, 59, 55, 73, 54, 61, 60, 51, 56, 57, 89, 60, 48, 60, 54, 69, 50, 71, 49, 91, 58, 68, 51, 54, 67, 58, 70, 75, 62, 66, 68, 77, 67, 80, 45, 64, 57, 78, 44, 44, 39, 57, 64, 58, 99, 89, 70, 56, 38, 73, 68, 65, 64, 65, 68, 70, 74, 64, 64, 136, 78, 117, 63, 49, 71, 67, 60, 71, 67, 66, 64, 103, 68, 56, 66, 68, 95, 64, 60, 57, 58, 73, 62, 65, 66, 65, 60, 76, 65, 67, 62, 77, 61, 78, 33, 64, 89, 57, 61, 56, 55, 75, 49, 82, 55, 70, 61, 55, 57, 62, 66, 81, 62, 79, 49, 60, 65, 56, 80, 63, 62, 61, 76, 54, 55, 52, 115, 62, 63, 63, 53, 57, 66, 50, 56, 73, 61, 55, 72, 90, 54, 65, 67, 56, 64, 72, 69, 58, 61, 74, 57, 65, 78, 72, 62, 45, 90, 59, 58, 55, 73, 57, 57, 93, 57, 67, 42, 130, 50, 77, 75, 61, 89, 75, 124, 67, 54, 85, 75, 60, 51, 60, 69, 70, 67, 43, 58, 68, 65, 64, 62, 65, 76, 55, 51, 62, 48, 57, 61, 48, 85, 61, 70, 92, 72, 66, 64, 79, 60, 64, 44, 63, 50, 64, 63, 73, 60, 54, 80, 76, 59, 50, 67, 79, 28, 61, 63, 69, 54, 51, 65, 69, 70, 48, 64, 70, 46, 62, 68, 43, 71, 45, 83, 82, 67, 69, 66, 52, 56, 58, 82, 52, 41, 69, 81, 51, 56, 60, 63, 55, 75, 85, 55, 72, 118, 45, 60, 80, 37, 45, 61, 76, 58, 65, 69, 41, 59, 52, 46, 62, 58, 76, 73, 63, 62, 65, 61, 70, 69, 47, 87, 53, 63, 83, 65, 80, 52, 72, 86, 82, 77, 62, 43, 51, 48, 90, 55, 76, 82, 76, 63, 56, 93, 78, 64, 72, 43, 55, 69, 56, 50, 103, 60, 107, 58, 48, 59, 57, 61, 62, 62, 71, 36, 63, 51, 56, 55, 75, 95, 67, 72, 77, 55, 54, 84, 78, 63, 47, 64, 74, 54, 85, 64, 48, 72, 93, 68, 65, 63, 73, 72, 60, 68, 47, 82, 87, 72, 79, 83, 61, 74, 60, 52, 67, 78, 74, 67, 66, 58, 70, 49, 86, 83, 56, 56, 68, 95, 49, 67, 81, 87, 72, 68, 52, 69, 53, 54, 48, 82, 56, 66, 109, 93, 52, 43, 69, 61, 47, 40, 66, 55, 51, 88, 58, 78, 63, 66, 71, 46, 62, 61, 53, 54, 80, 42, 63, 83, 81, 58, 70, 83, 75, 69, 70, 63, 64, 56, 88, 72, 65, 65, 48, 89, 139, 59, 64, 59, 61, 78, 75, 75, 50, 88, 64, 71, 78, 67, 76, 72, 67, 66, 66, 52, 66, 56, 61, 69, 55, 50, 64, 65, 44, 64, 72, 64, 74, 63, 69, 77, 91, 64, 82, 69, 63, 63, 57, 68, 66, 138, 69, 82, 50, 50, 75, 69, 94, 59, 63, 64, 65, 84, 81, 49, 59, 75, 64, 51, 80, 61, 66, 74, 94, 54, 58, 66, 82, 60, 58, 51, 68, 66, 61, 40, 70, 65, 57, 77, 61, 52, 75, 51, 45, 73, 65, 59, 63, 76, 54, 67, 53, 75, 79, 60, 64, 49, 56, 60, 90, 77, 105, 51, 75, 65, 138, 59, 69, 48, 93, 67, 52, 68, 48, 71, 63, 60, 71, 44, 58, 71, 74, 55, 63, 59, 55, 64, 65, 63, 52, 43, 53, 49, 79, 68, 73, 74, 97, 44, 63, 56, 63, 82, 64, 110, 67, 65, 75, 79, 81, 74, 77, 72, 66, 96, 50, 72, 68, 73, 71, 69, 89, 82, 63, 66, 72, 52, 102, 57, 47, 84, 56, 58, 69, 65, 131, 67, 63, 79, 55, 108, 72, 52, 73, 78, 77, 72, 65, 68, 73, 49, 63, 71, 75, 89, 89, 77, 65, 79, 67, 57, 49, 68, 54, 47, 74, 89, 83, 54, 70, 78, 67, 74, 62, 49, 88, 73, 67, 80, 68, 80, 87, 62, 68, 54, 65, 54, 91, 72, 59, 60, 89, 84, 76, 60, 80, 56, 68, 89, 62, 63, 68, 75, 75, 67, 67, 80, 66, 97, 53, 64, 60, 61, 53, 101, 62, 56, 60, 79, 76, 75, 78, 84, 57, 145, 56, 64, 67, 71, 59, 66, 53, 74, 72, 44, 70, 62, 63, 60, 52, 55, 80, 55, 55, 61, 57, 55, 54, 49, 63, 62, 62, 62, 65, 59, 77, 59, 72, 81, 82, 86, 62, 79, 52, 75, 59, 82, 54, 87, 64, 59, 65, 62, 48, 56, 61, 68, 56, 89, 72, 62, 75, 65, 80, 45, 57, 55, 80, 40, 76, 76, 73, 65, 60, 52, 60, 61, 94, 55, 58, 90, 64, 73, 63, 66, 55, 83, 60, 82, 69, 116, 65, 65, 98, 77, 63, 71, 54, 69, 62, 88, 68, 89, 65, 49, 73, 50, 66, 57, 53, 57, 70, 56, 54, 58, 53, 80, 56, 60, 59, 60, 59, 62, 76, 82, 55, 52, 72, 75, 77, 93, 72, 65, 71, 50, 57, 54, 101, 86, 66, 57, 66, 56, 73, 72, 94, 80, 55, 69, 61, 61, 45, 56, 71, 46, 67, 109, 62, 84, 46, 68, 76, 65, 55, 55, 64, 76, 50, 57, 50, 57, 87, 58, 76, 71, 50, 48, 60, 60, 66, 56, 56, 68, 69, 55, 47, 61, 55, 49, 66, 59, 78, 76, 67, 58, 90, 61, 61, 71, 60, 83, 57, 71, 69, 69, 38, 88, 82, 76, 50, 48, 76, 67, 71, 42, 91, 70, 66, 86, 59, 89, 56, 72, 44, 80, 65, 54, 51, 64, 74, 63, 67, 57, 65, 58, 56, 77, 54, 47, 69, 79, 70, 68, 82, 75, 77, 57, 73, 61, 51, 55, 69, 74, 77, 72, 63, 69, 61, 82, 79, 71, 51, 72, 71, 94, 47, 64, 66, 49, 57, 85, 51, 78, 63, 81, 72, 69, 59, 57, 42, 50, 74, 56, 68, 60, 57, 71, 65, 100, 78, 59, 80, 75, 74, 51, 51, 81, 77, 83, 83, 64, 53, 48, 73, 64, 54, 80, 62, 63, 72, 56, 59, 55, 50, 77, 59, 62, 74, 60, 52, 83, 72, 59, 61, 69, 54, 66, 48, 63, 60, 68, 77, 68, 58, 54, 57, 83, 69, 70, 85, 41, 53, 68, 84, 74, 104, 56, 46, 58, 70, 57, 72, 68, 50, 116, 55, 66, 43, 70, 61, 54, 62, 60, 52, 70, 72, 60, 60, 52, 55, 48, 63, 51, 45, 61, 70, 37, 75, 81, 75, 43, 80, 48, 93, 56, 58, 53, 72, 84, 64, 55, 56, 76, 61, 64, 47, 61, 80, 61, 61, 71, 40, 56, 59, 77, 103, 78, 94, 71, 76, 49, 85, 54, 56, 60, 59, 67, 59, 80, 107, 47, 61, 78, 62, 48, 69, 55, 56, 66, 60, 83, 60, 65, 56, 67, 69, 65, 76, 72, 78, 74, 60, 69, 80, 64, 73, 59, 66, 83, 64, 71, 60, 70, 63, 47, 49, 49, 62, 65, 60, 73, 48, 72, 61, 50, 60, 62, 77, 51, 74, 70, 66, 51, 70, 64, 79, 64, 63, 65, 61, 79, 64, 74, 67, 61, 99, 52, 67, 60, 61, 66, 89, 57, 69, 84, 45, 63, 79, 54, 60, 71, 46, 85, 78, 48, 52, 68, 60, 79, 81, 47, 55, 64, 85, 78, 67, 50, 74, 105, 48, 61, 68, 82, 77, 84, 56, 49, 56, 77, 48, 61, 51, 58, 90, 94, 69, 84, 71, 56, 58, 95, 124, 73, 81, 87, 80, 64, 62, 75, 48, 76, 81, 69, 67, 67, 52, 60, 83, 77, 57, 58, 63, 60, 71, 55, 57, 53, 67, 59, 68, 73, 67, 66, 39, 57, 54, 66, 68, 81, 51, 49, 63, 44, 96, 73, 63, 88, 72, 48, 53, 72, 67, 77, 65, 57, 60, 70, 75, 53, 58, 76, 76, 76, 75, 53, 56, 78, 66, 79, 69, 68, 56, 62, 59, 66, 48, 76, 57, 54, 82, 72, 55, 53, 70, 52, 102, 53, 67, 67, 65, 79, 66, 80, 118, 79, 72, 74, 86, 52, 104, 78, 80, 71, 39, 76, 73, 77, 64, 55, 51, 72, 58, 57, 62, 59, 60, 49, 72, 65, 67, 59, 54, 52, 57, 64, 84, 61, 76, 60, 64, 66, 57, 80, 68, 49, 61, 62, 56, 59, 57, 95, 80, 55, 51, 57, 55, 52, 57, 169, 60, 64, 60, 62, 53, 68, 54, 44, 53, 57, 59, 53, 71, 47, 54, 64, 64, 65, 74, 83, 83, 63, 79, 52, 41, 54, 67, 51, 68, 61, 50, 59, 75, 78, 64, 58, 68, 57, 81, 73, 68, 52, 47, 59, 69, 74, 64, 70, 63, 70, 68, 59, 65, 55, 47, 76, 88, 64, 79, 62, 61, 80, 68, 57, 58, 79, 84, 66, 63, 77, 86, 53, 68, 103, 68, 71, 59, 69, 58, 67, 72, 75, 76, 62, 62, 80, 56, 56, 80, 55, 59, 51, 74, 71, 55, 56, 65, 63, 76, 59, 51, 69, 59, 69, 66, 80, 51, 53, 60, 59, 109, 64, 82, 46, 51, 83, 63, 71, 57, 46, 48, 61, 55, 56, 53, 73, 68, 58, 67, 35, 54, 60, 60, 68, 75, 57, 78, 68, 58, 52, 70, 70, 70, 56, 89, 59, 55, 89, 64, 69, 77, 52, 70, 54, 64, 84, 59, 44, 80, 57, 67, 60, 56, 71, 74, 37, 75, 65, 94, 87, 56, 68, 66, 57, 62, 59, 59, 74, 53, 63, 77, 68, 65, 49, 70, 68, 67, 62, 85, 93, 68, 83, 51, 52, 72, 49, 59, 64, 56, 70, 90, 56, 69, 55, 76, 55, 43, 69, 98, 78, 61, 48, 72, 60, 75, 73, 72, 87, 67, 67, 57, 36, 78, 83, 70, 54, 55, 48, 70, 55, 55, 64, 57, 67, 63, 70, 99, 72, 61, 53, 64, 71, 54, 122, 68, 64, 59, 50, 98, 73, 78, 97, 39, 78, 63, 103, 101, 78, 78, 60, 56, 52, 52, 62, 53, 66, 93, 69, 61, 70, 62, 69, 49, 42, 54, 63, 55, 81, 92, 66, 50, 54, 43, 87, 70, 63, 53, 74, 69, 47, 45, 51, 49, 71, 59, 61, 59, 60, 70, 49, 48, 51, 68, 85, 70, 82, 81, 75, 54, 45, 83, 63, 66, 59, 62, 71, 53, 75, 66, 52, 67, 76, 57, 81, 49, 53, 49, 58, 81, 46, 47, 60, 78, 60, 58, 53, 75, 55, 64, 58, 76, 81, 71, 53, 58, 52, 66, 89, 77, 82, 46, 58, 56, 61, 86, 65, 144, 55, 57, 61, 43, 79, 55, 85, 58, 68, 93, 58, 61, 52, 63, 78, 68, 61, 62, 50, 78, 44, 66, 98, 62, 66, 85, 96, 66, 56, 62, 57, 92, 70, 58, 90, 70, 49, 67, 46, 81, 58, 49, 60, 65, 63, 62, 77, 64, 102, 58, 72, 70, 67, 55, 86, 47, 55, 61, 57, 83, 60, 79, 92, 57, 75, 61, 71, 57, 79, 84, 70, 55, 77, 78, 62, 63, 60, 82, 55, 62, 62, 65, 67, 90, 47, 68, 50, 39, 69, 55, 76, 52, 68, 73, 57, 46, 75, 73, 43, 68, 80, 58, 59, 71, 73, 72, 50, 66, 54, 62, 49, 57, 84, 71, 52, 67, 69, 53, 68, 76, 55, 55, 60, 66, 77, 55, 51, 67, 55, 62, 51, 53, 98, 77, 62, 51, 78, 74, 85, 62, 87, 109, 71, 45, 65, 109, 60, 73, 57, 48, 67, 58, 56, 67, 91, 67, 60, 61, 57, 89, 136, 53, 68, 32, 49, 51, 68, 57, 70, 55, 50, 62, 63, 58, 72, 70, 71, 49, 44, 64, 58, 53, 38, 104, 53, 57, 56, 77, 51, 68, 35, 118, 51, 50, 52, 62, 49, 67, 62, 97, 74, 65, 89, 83, 66, 59, 44, 63, 43, 66, 80, 72, 100, 70, 72, 60, 62, 70, 65, 48, 69, 64, 67, 65, 62, 53, 72, 73, 73, 67, 61, 52, 97, 61, 69, 47, 79, 61, 53, 84, 84, 53, 82, 92, 73, 84, 70, 62, 85, 84, 72, 76, 45, 63, 61, 65, 43, 63, 64, 55, 70, 57, 57, 76, 65, 39, 51, 67, 75, 54, 60, 72, 55, 66, 93, 58, 58, 63, 77, 48, 101, 74, 52, 41, 81, 83, 59, 63, 62, 66, 61, 61, 68, 66, 51, 77, 58, 47, 60, 62, 60, 73, 60, 56, 67, 69, 87, 77, 61, 64, 52, 70, 71, 60, 48, 64, 71, 69, 55, 71, 82, 82, 74, 75, 61, 66, 73, 51, 44, 64, 69, 79, 65, 55, 77, 64, 49, 66, 71, 69, 85, 57, 98, 52, 61, 64, 73, 49, 54, 73, 74, 63, 79, 70, 62, 61, 83, 57, 55, 55, 51, 58, 49, 85, 54, 69, 52, 69, 55, 49, 46, 46, 41, 65, 59, 72, 49, 61, 74, 62, 59, 70, 73, 49, 70, 74, 61, 63, 68, 67, 72, 68, 62, 55, 60, 62, 80, 52, 70, 80, 62, 64, 74, 87, 69, 71, 55, 59, 56, 80, 63, 82, 73, 63, 60, 73, 63, 51, 51, 75, 79, 55, 62, 68, 57, 67, 79, 70, 65, 78, 109, 52, 71, 66, 77, 109, 51, 89, 56, 95, 79, 83, 53, 44, 88, 46, 56, 70, 61, 61, 69, 65, 90, 43, 71, 73, 57, 48, 50, 80, 57, 44, 89, 54, 53, 49, 54, 46, 75, 56, 34, 56, 67, 76, 86, 53, 61, 61, 65, 78, 60, 55, 77, 86, 56, 50, 82, 66, 77, 50, 60, 63, 65, 62, 36, 58, 58, 86, 67, 59, 75, 54, 75, 55, 83, 80, 89, 65, 70, 73, 61, 74, 61, 105, 60, 66, 71, 68, 70, 69, 129, 82, 42, 63, 64, 82, 63, 74, 55, 70, 60, 61, 82, 90, 72, 70, 53, 79, 55, 66, 67, 93, 63, 68, 80, 66, 76, 70, 74, 55, 73, 58, 73, 51, 66, 69, 40, 71, 58, 52, 96, 43, 46, 58, 53, 106, 108, 68, 71, 68, 56, 77, 88, 65, 74, 94, 61, 67, 61, 56, 74, 73, 44, 73, 53, 63, 57, 62, 81, 65, 57, 43, 85, 48, 59, 49, 72, 79, 56, 67, 55, 54, 67, 75, 70, 89, 52, 66, 69, 77, 79, 59, 59, 94, 73, 70, 66, 63, 43, 71, 53, 92, 59, 45, 62, 57, 58, 61, 59, 39, 48, 92, 76, 56, 81, 50, 60, 49, 42, 46, 63, 63, 74, 61, 52, 74, 45, 58, 95, 56, 106, 54, 49, 58, 63, 61, 81, 54, 67, 87, 47, 77, 78, 67, 67, 76, 83, 46, 55, 65, 82, 83, 41, 105, 48, 46, 75, 47, 71, 64, 58, 60, 65, 54, 64, 68, 68, 59, 57, 63, 48, 48, 79, 59, 69, 110, 65, 60, 79, 61, 85, 78, 63, 55, 82, 49, 64, 70, 55, 62, 48, 99, 62, 74, 79, 61, 38, 57, 48, 98, 82, 49, 56, 71, 74, 42, 69, 59, 57, 76, 61, 55, 100, 57, 59, 62, 60, 126, 69, 67, 87, 62, 98, 79, 68, 57, 76, 44, 67, 71, 81, 85, 48, 69, 54, 82, 48, 64, 81, 55, 55, 63, 46, 62, 83, 59, 90, 73, 62, 70, 128, 75, 60, 70, 61, 59, 54, 67, 67, 59, 76, 64, 83, 57, 66, 36, 60, 75, 37, 45, 75, 71, 89, 38, 45, 83, 37, 64, 81, 78, 43, 53, 61, 50, 61, 59, 57, 54, 72, 72, 73, 63, 105, 70, 49, 75, 62, 52, 66, 92, 92, 40, 56, 72, 57, 80, 74, 82, 64, 59, 53, 62, 47, 74, 46, 62, 66, 86, 82, 80, 69, 83, 56, 70, 77, 69, 71, 70, 67, 50, 91, 54, 51, 69, 58, 59, 54, 80, 53, 68, 57, 53, 68, 72, 67, 57, 69, 79, 47, 70, 77, 51, 130, 77, 56, 52, 55, 45, 89, 99, 79, 60, 64, 55, 85, 60, 68, 74, 84, 76, 71, 61, 45, 49, 55, 50, 87, 62, 60, 64, 51, 53, 66, 66, 37, 63, 64, 71, 59, 80, 70, 57, 91, 75, 55, 69, 73, 71, 62, 70, 72, 64, 59, 60, 71, 74, 61, 70, 56, 146, 76, 51, 56, 62, 52, 29, 52, 76, 73, 66, 68, 65, 68, 63, 43, 76, 64, 79, 90, 76, 63, 97, 68, 73, 71, 49, 84, 49, 61, 46, 80, 59, 69, 49, 105, 76, 74, 78, 66, 68, 67, 51, 63, 64, 69, 66, 69, 78, 80, 37, 71, 63, 60, 74, 72, 43, 62, 79, 106, 67, 56, 57, 103, 50, 79, 68, 66, 69, 60, 53, 86, 74, 60, 78, 60, 71, 66, 39, 72, 57, 58, 63, 85, 61, 56, 60, 49, 87, 58, 77, 102, 86, 63, 49, 64, 94, 49, 64, 73, 71, 46, 70, 61, 80, 56, 56, 53, 54, 79, 79, 62, 51, 59, 57, 74, 74, 69, 84, 55, 57, 56, 68, 66, 77, 62, 68, 91, 72, 61, 110, 55, 55, 62, 50, 47, 61, 73, 62, 52, 53, 53, 85, 51, 65, 63, 73, 52, 131, 77, 64, 56, 61, 64, 69, 68, 79, 94, 59, 104, 70, 55, 58, 66, 70, 36, 49, 67, 65, 71, 62, 66, 66, 72, 44, 83, 71, 58, 81, 46, 72, 75, 60, 52, 67, 84, 65, 69, 51, 56, 64, 76, 63, 61, 65, 84, 51, 81, 77, 73, 66, 82, 77, 66, 94, 81, 65, 57, 45, 63, 50, 59, 61, 59, 57, 61, 59, 81, 64, 54, 99, 53, 62, 82, 59, 62, 105, 53, 54, 54, 61, 62, 63, 58, 91, 48, 56, 63, 75, 67, 64, 50, 61, 79, 63, 83, 66, 147, 61, 72, 60, 86, 73, 67, 38, 86, 86, 55, 53, 51, 49, 67, 61, 66, 59, 51, 60, 45, 48, 66, 49, 61, 67, 59, 36, 50, 64, 67, 63, 70, 65, 62, 62, 51, 76, 71, 64, 50, 62, 56, 82, 72, 46, 55, 61, 75, 66, 53, 60, 54, 72, 66, 66, 79, 76, 66, 67, 80, 66, 38, 80, 64, 39, 57, 53, 61, 58, 63, 71, 63, 69, 70, 49, 65, 73, 54, 58, 35, 69, 56, 63, 70, 75, 66, 62, 85, 64, 78, 78, 89, 54, 56, 45, 37, 86, 56, 63, 54, 44, 84, 66, 79, 83, 80, 59, 55, 69, 68, 67, 64, 73, 51, 69, 61, 65, 67, 65, 65, 64, 54, 55, 61, 76, 59, 76, 43, 69, 55, 75, 39, 84, 67, 93, 59, 73, 47, 63, 75, 65, 45, 86, 55, 63, 66, 70, 86, 83, 57, 64, 58, 71, 77, 53, 54, 88, 55, 58, 60, 75, 78, 65, 62, 46, 69, 70, 55, 59, 70, 81, 64, 71, 65, 60, 56, 68, 72, 80, 53, 53, 55, 86, 76, 47, 59, 53, 70, 50, 62, 54, 53, 56, 62, 66, 69, 58, 57, 49, 57, 86, 78, 81, 105, 133, 65, 57, 58, 66, 66, 70, 77, 57, 60, 62, 69, 62, 71, 82, 63, 50, 75, 48, 58, 71, 74, 52, 79, 71, 45, 84, 77, 67, 62, 60, 38, 46, 71, 51, 70, 61, 44, 54, 50, 69, 48, 77, 57, 45, 64, 53, 64, 78, 97, 65, 93, 64, 66, 71, 89, 55, 79, 57, 59, 55, 43, 73, 70, 80, 63, 57, 82, 44, 88, 57, 56, 69, 59, 24, 66, 71, 68, 76, 70, 82, 82, 77, 79, 58, 76, 56, 69, 74, 69, 58, 64, 54, 22, 52, 81, 101, 61, 64, 43, 71, 66, 84, 48, 77, 46, 69, 104, 61, 48, 79, 76, 62, 53, 72, 75, 64, 63, 75, 56, 85, 67, 63, 45, 75, 53, 75, 69, 58, 56, 63, 64, 61, 68, 47, 74, 83, 54, 62, 57, 68, 85, 49, 63, 84, 67, 56, 63, 79, 53, 62, 76, 91, 45, 63, 76, 50, 61, 52, 46, 61, 73, 83, 42, 82, 65, 68, 66, 51, 74, 77, 67, 63, 65, 63, 65, 80, 73, 52, 72, 65, 76, 58, 68, 56, 76, 70, 67, 67, 71, 55, 74, 41, 62, 74, 63, 80, 57, 81, 66, 73, 69, 77, 70, 52, 76, 75, 75, 93, 57, 87, 66, 63, 106, 80, 46, 65, 60, 71, 58, 49, 61, 94, 69, 67, 52, 57, 56, 48, 50, 54, 52, 73, 58, 56, 77, 59, 63, 59, 70, 57, 58, 79, 71, 69, 69, 64, 76, 85, 67, 67, 52, 73, 61, 70, 64, 54, 69, 75, 67, 49, 45, 80, 67, 77, 82, 90, 56, 70, 35, 51, 89, 54, 56, 50, 61, 82, 59, 90, 74, 92, 47, 68, 65, 57, 66, 108, 74, 46, 67, 53, 60, 64, 38, 56, 59, 54, 60, 64, 73, 56, 114, 58, 51, 67, 73, 45, 75, 75, 64, 68, 55, 76, 55, 66, 55, 70, 71, 73, 92, 43, 96, 63, 81, 65, 65, 50, 68, 91, 72, 82, 55, 52, 81, 60, 67, 73, 49, 60, 53, 110, 68, 68, 65, 68, 83, 79, 54, 61, 62, 45, 64, 41, 55, 55, 53, 54, 91, 50, 79, 33, 91, 70, 49, 91, 57, 55, 66, 57, 165, 74, 94, 64, 70, 46, 53, 93, 74, 95, 67, 80, 96, 68, 71, 54, 65, 56, 76, 59, 53, 70, 58, 45, 76, 67, 60, 60, 73, 65, 63, 64, 66, 70, 86, 60, 64, 50, 121, 49, 74, 70, 80, 76, 46, 54, 70, 64, 61, 53, 80, 90, 63, 65, 69, 50, 61, 65, 81, 71, 78, 52, 76, 78, 73, 90, 55, 65, 62, 69, 76, 67, 67, 62, 71, 75, 62, 56, 66, 61, 69, 74, 63, 57, 50, 53, 59, 72, 65, 85, 77, 70, 68, 56, 72, 58, 81, 70, 75, 79, 63, 75, 62, 78, 100, 72, 75, 61, 68, 53, 63, 77, 65, 79, 75, 88, 69, 64, 47, 52, 68, 49, 33, 80, 74, 65, 69, 64, 49, 62, 52, 88, 71, 77, 76, 67, 59, 100, 64, 51, 57, 65, 54, 58, 59, 79, 77, 61, 74, 56, 70, 63, 53, 70, 56, 49, 59, 54, 73, 79, 78, 82, 57, 61, 59, 86, 57, 78, 59, 48, 66, 110, 88, 69, 41, 60, 70, 65, 63, 74, 54, 81, 71, 60, 71, 91, 56, 74, 64, 66, 64, 64, 63, 56, 64, 44, 59, 82, 57, 54, 66, 56, 58, 77, 70, 79, 81, 59, 76, 72, 71, 79, 62, 61, 96, 52, 57, 64, 70, 74, 59, 62, 59, 62, 77, 64, 97, 77, 68, 92, 47, 58, 73, 65, 49, 67, 67, 69, 72, 56, 58, 55, 77, 66, 90, 91, 79, 55, 53, 61, 58, 46, 75, 71, 96, 58, 76, 58, 51, 68, 43, 61, 73, 80, 76, 59, 67, 58, 56, 49, 61, 64, 66, 68, 71, 80, 67, 51, 67, 67, 100, 60, 71, 73, 69, 54, 78, 79, 77, 59, 55, 58, 50, 59, 66, 66, 72, 62, 44, 77, 67, 78, 47, 63, 65, 61, 86, 59, 60, 50, 85, 101, 65, 45, 81, 54, 70, 55, 66, 54, 61, 79, 77, 58, 66, 49, 57, 53, 72, 56, 61, 59, 83, 60, 62, 51, 81, 66, 50, 56, 80, 51, 75, 98, 76, 56, 51, 49, 64, 64, 60, 57, 70, 65, 66, 68, 70, 50, 74, 53, 55, 70, 59, 42, 60, 77, 72, 62, 60, 56, 75, 76, 49, 91, 55, 71, 59, 60, 47, 59, 72, 70, 54, 68, 96, 53, 74, 74, 58, 76, 60, 65, 96, 88, 60, 58, 74, 72, 74, 138, 57, 55, 75, 68, 55, 66, 57, 83, 52, 71, 68, 54, 64, 57, 68, 81, 52, 53, 66, 50, 59, 30, 43, 64, 46, 77, 58, 62, 76, 71, 54, 67, 60, 55, 72, 74, 70, 81, 59, 61, 65, 66, 62, 60, 80, 54, 60, 67, 86, 65, 62, 60, 66, 52, 61, 60, 70, 60, 61, 57, 58, 55, 57, 53, 50, 59, 61, 79, 69, 65, 76, 57, 50, 54, 53, 62, 71, 52, 74, 80, 103, 60, 105, 76, 54, 65, 69, 79, 48, 56, 62, 73, 39, 97, 60, 70, 59, 63, 94, 67, 78, 78, 58, 60, 67, 56, 73, 62, 56, 68, 58, 87, 63, 60, 81, 84, 48, 59, 67, 62, 67, 58, 68, 68, 63, 79, 61, 85, 56, 58, 63, 61, 58, 77, 60, 54, 60, 57, 63, 60, 59, 75, 60, 64, 59, 78, 66, 45, 73, 45, 63, 56, 54, 66, 63, 80, 50, 59, 59, 58, 84, 75, 80, 74, 62, 91, 49, 63, 67, 56, 55, 64, 57, 79, 75, 67, 58, 63, 65, 62, 58, 54, 80, 41, 62, 75, 54, 61, 49, 68, 71, 67, 65, 56, 77, 127, 63, 56, 62, 58, 61, 64, 54, 51, 61, 80, 68, 85, 58, 71, 60, 112, 65, 87, 69, 79, 63, 66, 60, 66, 76, 71, 86, 61, 73, 72, 54, 60, 64, 53, 76, 69, 62, 66, 64, 59, 59, 58, 57, 55, 61, 64, 68, 72, 61, 54, 61, 84, 63, 49, 65, 59, 57, 70, 69, 63, 63, 60, 53, 58, 118, 61, 69, 62, 62, 53, 52, 70, 59, 73, 66, 72, 70, 58, 59, 63, 62, 59, 62, 69, 54, 69, 59, 65, 91, 70, 64, 58, 48, 87, 51, 73, 64, 71, 55, 64, 54, 70, 83, 68, 57, 85, 57, 69, 65, 56, 54, 76, 63, 78, 66, 69, 69, 58, 69, 61, 59, 73, 59, 60, 70, 56, 65, 47, 72, 76, 55, 59, 91, 68, 66, 68, 87, 84, 63, 53, 62, 69, 51, 64, 78, 58, 76, 74, 51, 79, 60, 63, 56, 60, 64, 54, 67, 67, 78, 63, 104, 69, 60, 60, 78, 47, 81, 65, 71, 81, 70, 60, 74, 63, 52, 60, 65, 42, 76, 68, 64, 65, 60, 91, 61, 70, 101, 71, 61, 57, 70, 45, 74, 53, 74, 63, 69, 59, 63, 56, 57, 71, 69, 70, 70, 67, 76, 81, 56, 57, 66, 66, 58, 86, 59, 52, 46, 55, 58, 71, 47, 71, 82, 112, 164, 70, 56, 59, 62, 65, 91, 70, 58, 61, 66, 72, 75, 68, 79, 66, 62, 74, 65, 75, 71, 48, 71, 56, 74, 58, 80, 72, 53, 60, 59, 63, 77, 67, 63, 66, 58, 73, 61, 53, 142, 82, 127, 60, 56, 74, 54, 53, 58, 62, 59, 62, 55, 87, 62, 51, 68, 69, 65, 117, 65, 42, 58, 69, 45, 64, 60, 59, 69, 101, 74, 99, 72, 64, 71, 66, 71, 66, 64, 64, 66, 55, 64, 63, 59, 61, 70, 62, 63, 64, 78, 61, 72, 82, 67, 52, 67, 84, 66, 58, 70, 81, 57, 59, 64, 59, 62, 52, 60, 71, 58, 79, 72, 53, 68, 64, 54, 71, 59, 67, 59, 59, 78, 52, 57, 65, 61, 78, 47, 72, 66, 52, 68, 64, 47, 57, 74, 68, 91, 51, 85, 76, 54, 80, 58, 48, 53, 61, 54, 49, 59, 81, 53, 64, 66, 71, 67, 71, 74, 63, 68, 67, 64, 69, 67, 50, 66, 68, 63, 101, 68, 70, 94, 63, 71, 63, 69, 89, 66, 52, 52, 57, 54, 54, 51, 58, 66, 56, 59, 67, 63, 71, 66, 57, 54, 66, 51, 55, 87, 56, 58, 52, 79, 59, 98, 80, 54, 80, 59, 71, 56, 68, 46, 51, 63, 57, 73, 86, 69, 62, 62, 65, 64, 73, 63, 67, 62, 84, 67, 66, 57, 60, 58, 80, 67, 50, 104, 61, 62, 58, 57, 57, 64, 61, 79, 71, 69, 69, 54, 61, 55, 71, 55, 71, 56, 55, 60, 79, 57, 44, 64, 68, 55, 78, 56, 71, 75, 78, 68, 84, 60, 81, 59, 69, 104, 57, 65, 73, 86, 62, 62, 78, 53, 72, 69, 60, 58, 69, 59, 60, 57, 53, 61, 58, 67, 60, 66, 63, 67, 53, 69, 78, 69, 45, 53, 62, 53, 69, 81, 57, 53, 78, 73, 58, 81, 77, 70, 75, 66, 74, 57, 72, 56, 96, 70, 62, 69, 55, 51, 68, 65, 58, 54, 85, 40, 75, 56, 61, 73, 93, 94, 52, 59, 69, 50, 72, 90, 77, 59, 51, 68, 69, 61, 76, 52, 63, 54, 111, 95, 67, 57, 64, 63, 59, 62, 74, 62, 62, 71, 56, 57, 46, 53, 66, 65, 62, 56, 71, 74, 52, 44, 59, 57, 39, 48, 105, 63, 57, 74, 59, 60, 69, 82, 69, 58, 79, 58, 70, 54, 65, 95, 71, 46, 64, 54, 79, 64, 49, 57, 57, 75, 88, 52, 64, 91, 67, 71, 56, 75, 81, 74, 73, 81, 83, 64, 86, 71, 125, 48, 60, 40, 49, 62, 58, 63, 53, 77, 84, 69, 57, 68, 59, 33, 64, 73, 64, 57, 60, 64, 77, 52, 57, 57, 54, 44, 92, 38, 62, 57, 46, 57, 51, 48, 93, 66, 49, 73, 64, 68, 57, 66, 65, 64, 65, 66, 60, 66, 60, 54, 90, 108, 76, 63, 83, 51, 56, 62, 53, 61, 57, 62, 93, 59, 51, 80, 56, 79, 87, 72, 63, 69, 93, 81, 46, 71, 46, 163, 61, 44, 60, 60, 63, 65, 72, 62, 55, 89, 83, 50, 68, 82, 68, 97, 52, 69, 63, 57, 50, 50, 75, 61, 58, 61, 47, 100, 106, 82, 68, 61, 67, 54, 70, 57, 70, 59, 47, 50, 71, 50, 97, 131, 87, 73, 75, 53, 50, 47, 65, 61, 66, 59, 58, 59, 54, 54, 56, 98, 74, 52, 75, 68, 58, 73, 67, 118, 66, 70, 53, 60, 80, 77, 84, 70, 58, 56, 79, 63, 53, 62, 59, 54, 68, 74, 72, 58, 54, 53, 91, 56, 63, 62, 64, 64, 62, 60, 60, 70, 67, 49, 63, 84, 132, 68, 71, 81, 75, 74, 47, 57, 72, 55, 57, 50, 54, 59, 64, 60, 107, 55, 118, 70, 74, 61, 74, 61, 70, 53, 63, 65, 53, 57, 50, 68, 63, 49, 63, 63, 63, 56, 69, 89, 57, 71, 49, 72, 71, 42, 61, 69, 60, 58, 71, 66, 41, 49, 71, 43, 82, 55, 77, 63, 57, 105, 65, 51, 83, 60, 72, 47, 76, 79, 46, 57, 51, 78, 69, 64, 62, 73, 71, 61, 81, 62, 61, 74, 72, 86, 52, 55, 77, 84, 75, 79, 71, 53, 46, 56, 67, 115, 80, 77, 65, 73, 49, 107, 64, 80, 60, 37, 58, 57, 64, 97, 116, 73, 63, 67, 55, 47, 62, 62, 69, 70, 60, 72, 58, 39, 52, 59, 55, 73, 57, 56, 71, 68, 54, 77, 76, 45, 58, 73, 83, 73, 72, 63, 50, 76, 56, 71, 66, 66, 75, 61, 79, 78, 74, 75, 57, 49, 68, 134, 50, 69, 51, 64, 67, 62, 74, 62, 75, 69, 65, 71, 82, 78, 65, 67, 68, 66, 70, 60, 62, 46, 70, 62, 52, 59, 52, 47, 62, 79, 62, 68, 48, 142, 78, 71, 58, 66, 57, 58, 65, 71, 51, 65, 66, 41, 55, 65, 54, 77, 72, 63, 79, 77, 49, 74, 57, 66, 44, 61, 68, 65, 57, 98, 49, 37, 58, 74, 61, 60, 32, 50, 52, 94, 47, 51, 65, 57, 61, 77, 51, 60, 63, 96, 66, 52, 78, 79, 72, 53, 84, 51, 58, 70, 69, 75, 73, 87, 56, 52, 66, 58, 81, 76, 40, 72, 59, 75, 58, 54, 57, 49, 80, 59, 97, 52, 70, 57, 80, 67, 65, 85, 72, 57, 70, 47, 52, 51, 54, 61, 57, 71, 33, 61, 93, 75, 50, 66, 57, 58, 62, 69, 46, 75, 54, 62, 68, 77, 59, 60, 63, 80, 72, 76, 69, 42, 92, 53, 53, 56, 53, 71, 95, 59, 78, 55, 56, 91, 60, 47, 70, 82, 62, 55, 86, 59, 56, 68, 64, 62, 61, 108, 77, 50, 42, 67, 60, 53, 66, 69, 69, 54, 62, 64, 95, 60, 45, 69, 68, 86, 70, 83, 72, 142, 113, 64, 66, 69, 57, 58, 54, 97, 58, 94, 78, 61, 63, 60, 128, 51, 52, 59, 76, 72, 63, 58, 55, 75, 67, 58, 60, 73, 57, 57, 69, 85, 67, 71, 45, 56, 50, 72, 62, 69, 50, 58, 57, 51, 46, 75, 73, 46, 63, 78, 52, 55, 78, 52, 53, 64, 50, 78, 54, 70, 72, 71, 80, 65, 67, 56, 55, 73, 58, 51, 58, 51, 66, 46, 51, 87, 78, 82, 60, 58, 66, 64, 84, 58, 73, 92, 38, 62, 73, 110, 53, 43, 126, 71, 88, 84, 68, 63, 95, 51, 73, 81, 70, 53, 48, 73, 55, 48, 50, 45, 60, 61, 68, 73, 59, 49, 51, 76, 78, 83, 47, 41, 68, 58, 86, 39, 51, 74, 49, 86, 70, 67, 58, 82, 62, 49, 76, 37, 61, 66, 85, 64, 96, 62, 57, 54, 71, 81, 52, 65, 59, 48, 53, 64, 62, 74, 84, 71, 39, 69, 71, 71, 51, 69, 57, 76, 96, 58, 56, 57, 58, 62, 67, 54, 33, 78, 81, 57, 57, 59, 107, 67, 73, 66, 57, 60, 60, 49, 79, 70, 87, 69, 77, 59, 62, 119, 51, 51, 98, 72, 61, 56, 56, 71, 77, 73, 46, 49, 56, 55, 128, 57, 63, 69, 77, 58, 76, 63, 66, 66, 56, 84, 61, 63, 69, 61, 69, 75, 67, 115, 74, 57, 71, 52, 62, 59, 67, 64, 63, 52, 58, 65, 50, 95, 72, 72, 67, 81, 105, 58, 61, 55, 61, 74, 63, 46, 56, 65, 81, 62, 67, 79, 48, 67, 56, 55, 66, 82, 61, 46, 62, 61, 68, 89, 47, 70, 50, 65, 73, 88, 51, 65, 50, 74, 53, 56, 122, 93, 67, 68, 63, 94, 72, 75, 47, 71, 74, 56, 44, 61, 62, 67, 58, 70, 76, 66, 68, 52, 70, 55, 74, 81, 87, 53, 51, 58, 85, 71, 62, 59, 42, 58, 63, 74, 69, 68, 66, 74, 81, 54, 62, 58, 53, 46, 52, 67, 50, 52, 70, 65, 82, 55, 102, 63, 58, 66, 119, 50, 46, 46, 61, 59, 85, 69, 52, 74, 72, 76, 62, 45, 84, 68, 54, 52, 53, 82, 78, 61, 86, 43, 63, 58, 99, 48, 46, 62, 63, 49, 65, 67, 69, 64, 66, 56, 90, 56, 81, 62, 60, 53, 65, 57, 72, 77, 55, 79, 64, 58, 58, 56, 56, 83, 69, 80, 84, 75, 75, 82, 52, 61, 68, 63, 77, 62, 71, 57, 84, 84, 60, 49, 78, 59, 53, 74, 70, 60, 60, 60, 62, 71, 85, 65, 66, 58, 50, 57, 46, 60, 56, 83, 61, 32, 59, 64, 100, 64, 55, 28, 66, 102, 61, 52, 71, 69, 98, 89, 82, 74, 62, 104, 96, 76, 48, 96, 77, 52, 64, 117, 65, 61, 62, 65, 60, 71, 46, 85, 66, 108, 59, 84, 79, 54, 60, 98, 76, 75, 64, 83, 46, 50, 62, 60, 76, 56, 110, 57, 57, 56, 82, 58, 71, 44, 62, 53, 102, 54, 71, 70, 52, 52, 102, 66, 95, 59, 49, 65, 56, 61, 87, 92, 63, 63, 57, 60, 81, 78, 94, 68, 80, 57, 79, 65, 66, 66, 53, 68, 47, 51, 86, 48, 59, 65, 86, 66, 60, 60, 57, 57, 71, 67, 51, 68, 49, 68, 63, 85, 59, 44, 63, 55, 113, 73, 82, 57, 90, 48, 62, 97, 55, 52, 46, 52, 60, 49, 39, 84, 71, 75, 76, 67, 81, 63, 62, 67, 94, 108, 42, 53, 73, 65, 55, 33, 84, 70, 46, 59, 58, 40, 111, 80, 63, 59, 79, 77, 71, 50, 83, 48, 48, 97, 52, 85, 63, 87, 47, 95, 72, 63, 68, 81, 46, 75, 94, 55, 53, 69, 69, 58, 46, 59, 50, 68, 72, 49, 83, 40, 77, 54, 52, 61, 66, 79, 88, 67, 61, 59, 67, 48, 63, 51, 69, 66, 52, 55, 70, 45, 44, 49, 93, 44, 63, 82, 78, 60, 92, 50, 68, 61, 41, 76, 47, 82, 83, 84, 83, 56, 53, 58, 79, 63, 84, 67, 42, 66, 75, 55, 53, 68, 69, 66, 67, 63, 58, 58, 43, 62, 61, 52, 54, 59, 52, 68, 55, 79, 57, 81, 39, 73, 57, 50, 93, 61, 68, 63, 102, 46, 70, 53, 59, 66, 71, 68, 83, 61, 62, 86, 66, 80, 109, 88, 75, 64, 80, 62, 84, 51, 63, 76, 48, 45, 52, 63, 55, 80, 60, 69, 61, 52, 60, 61, 74, 77, 66, 63, 71, 72, 68, 122, 45, 57, 69, 47, 68, 90, 72, 54, 54, 62, 80, 60, 48, 82, 63, 43, 61, 61, 56, 48, 72, 93, 87, 78, 74, 74, 81, 57, 73, 67, 68, 47, 69, 60, 136, 64, 68, 65, 60, 52, 41, 87, 70, 57, 65, 62, 77, 120, 46, 68, 103, 54, 65, 68, 84, 45, 58, 94, 88, 73, 60, 93, 37, 62, 68, 73, 63, 74, 53, 64, 92, 58, 57, 61, 98, 39, 91, 54, 81, 67, 58, 72, 63, 74, 83, 55, 58, 55, 73, 72, 46, 107, 76, 59, 61, 87, 70, 44, 52, 65, 59, 48, 55, 86, 64, 58, 104, 50, 75, 74, 67, 63, 65, 61, 51, 47, 92, 64, 53, 127, 54, 67, 76, 57, 85, 76, 57, 55, 67, 68, 53, 55, 57, 54, 52, 81, 86, 55, 57, 89, 84, 51, 88, 56, 58, 78, 63, 58, 90, 49, 82, 84, 125, 59, 61, 62, 61, 83, 63, 58, 58, 53, 76, 64, 57, 64, 55, 40, 108, 61, 72, 27, 71, 53, 72, 101, 83, 67, 70, 62, 49, 65, 61, 56, 81, 59, 66, 91, 44, 62, 38, 58, 76, 70, 58, 85, 51, 69, 68, 45, 67, 62, 76, 57, 50, 63, 48, 67, 83, 81, 78, 81, 54, 59, 75, 56, 34, 67, 62, 84, 59, 73, 63, 63, 56, 68, 59, 83, 69, 42, 56, 107, 42, 81, 56, 40, 65, 73, 83, 57, 78, 67, 51, 96, 59, 96, 83, 50, 74, 57, 60, 70, 49, 77, 55, 64, 53, 58, 53, 51, 69, 81, 69, 68, 49, 98, 76, 52, 85, 58, 59, 70, 55, 67, 52, 58, 52, 56, 70, 67, 141, 87, 76, 108, 56, 51, 88, 64, 66, 50, 77, 69, 74, 44, 34, 60, 75, 64, 69, 68, 57, 69, 59, 75, 92, 53, 62, 54, 48, 65, 97, 66, 83, 71, 63, 47, 54, 70, 95, 61, 62, 67, 48, 46, 58, 92, 57, 53, 69, 58, 86, 110, 78, 54, 80, 64, 59, 64, 67, 61, 81, 104, 58, 87, 79, 60, 73, 78, 116, 75, 74, 91, 50, 67, 72, 66, 44, 58, 21, 72, 128, 68, 56, 64, 38, 75, 60, 95, 64, 39, 67, 54, 79, 75, 76, 45, 96, 47, 51, 65, 60, 71, 97, 74, 46, 82, 56, 103, 51, 77, 59, 42, 73, 70, 88, 47, 71, 70, 84, 85, 41, 62, 45, 57, 62, 39, 63, 76, 47, 67, 50, 39, 100, 60, 52, 57, 68, 76, 48, 55, 39, 58, 81, 51, 91, 70, 76, 93, 72, 75, 52, 94, 55, 67, 58, 57, 67, 64, 68, 50, 58, 72, 71, 67, 67, 55, 66, 74, 62, 74, 54, 64, 90, 73, 56, 58, 90, 56, 67, 100, 89, 99, 53, 104, 58, 60, 71, 59, 73, 61, 75, 95, 56, 65, 52, 76, 63, 70, 55, 63, 76, 38, 48, 61, 79, 66, 59, 70, 65, 86, 60, 67, 45, 65, 67, 78, 60, 83, 50, 69, 64, 62, 69, 73, 60, 72, 69, 60, 81, 57, 106, 64, 65, 30, 48, 61, 64, 65, 41, 61, 67, 57, 74, 81, 62, 81, 69, 61, 49, 70, 66, 79, 56, 99, 66, 54, 68, 51, 90, 77, 55, 69, 71, 78, 46, 63, 81, 52, 70, 102, 69, 55, 39, 78, 58, 95, 55, 78, 55, 69, 70, 61, 68, 74, 59, 70, 52, 66, 77, 69, 65, 56, 57, 71, 62, 61, 67, 63, 66, 46, 59, 85, 69, 51, 39, 60, 53, 60, 75, 47, 54, 77, 41, 70, 55, 64, 64, 65, 43, 64, 86, 52, 69, 75, 53, 63, 60, 78, 45, 51, 51, 54, 62, 50, 65, 54, 80, 54, 62, 70, 57, 42, 54, 60, 74, 73, 86, 59, 84, 78, 38, 59, 76, 39, 76, 60, 59, 69, 48, 78, 88, 54, 65, 54, 56, 52, 57, 63, 56, 49, 71, 75, 57, 48, 61, 70, 72, 61, 83, 87, 75, 70, 76, 81, 45, 56, 62, 80, 79, 95, 51, 99, 49, 88, 75, 62, 66, 59, 51, 43, 60, 54, 62, 71, 67, 63, 45, 84, 53, 89, 68, 108, 72, 72, 72, 57, 66, 58, 52, 68, 60, 72, 65, 67, 51, 51, 59, 69, 75, 65, 73, 51, 65, 55, 72, 60, 81, 61, 85, 67, 52, 62, 64, 98, 70, 55, 78, 47, 71, 58, 63, 59, 100, 60, 59, 63, 60, 55, 63, 67, 82, 89, 74, 84, 69, 61, 72, 60, 61, 85, 52, 82, 69, 59, 64, 58, 58, 73, 72, 74, 44, 62, 71, 59, 92, 67, 95, 66, 67, 53, 56, 66, 66, 60, 81, 79, 72, 69, 64, 70, 54, 53, 61, 66, 63, 52, 64, 64, 76, 73, 61, 77, 61, 60, 62, 54, 71, 64, 59, 67, 59, 65, 61, 81, 58, 68, 70, 63, 73, 65, 61, 66, 65, 62, 83, 77, 54, 59, 51, 56, 94, 75, 70, 56, 67, 73, 69, 53, 63, 56, 80, 51, 64, 42, 61, 57, 66, 71, 75, 58, 93, 76, 55, 51, 75, 63, 59, 62, 65, 70, 65, 86, 64, 67, 64, 77, 62, 101, 61, 58, 86, 87, 56, 60, 52, 54, 51, 48, 70, 74, 62, 84, 54, 61, 85, 74, 53, 62, 60, 57, 65, 67, 51, 58, 57, 58, 82, 60, 70, 59, 61, 61, 55, 76, 78, 62, 51, 82, 80, 53, 57, 85, 58, 63, 65, 92, 89, 84, 61, 52, 70, 44, 74, 65, 61, 62, 62, 60, 64, 54, 57, 84, 74, 61, 60, 56, 67, 61, 67, 68, 51, 46, 57, 57, 73, 65, 76, 56, 64, 64, 65, 64, 73, 54, 56, 70, 89, 74, 66, 60, 69, 64, 70, 56, 55, 71, 75, 54, 58, 76, 68, 62, 59, 90, 62, 68, 66, 67, 55, 44, 75, 57, 64, 91, 53, 74, 67, 67, 60, 58, 54, 131, 67, 65, 57, 55, 62, 59, 63, 58, 59, 68, 60, 62, 58, 69, 73, 74, 55, 61, 68, 56, 72, 71, 57, 56, 65, 64, 61, 79, 59, 72, 61, 104, 72, 58, 65, 53, 66, 71, 62, 62, 59, 61, 57, 50, 62, 67, 61, 59, 72, 59, 72, 73, 51, 69, 48, 61, 97, 58, 70, 68, 62, 56, 62, 65, 63, 70, 63, 65, 64, 66, 72, 66, 58, 64, 69, 53, 80, 56, 66, 51, 75, 59, 62, 65, 95, 57, 63, 62, 66, 63, 68, 65, 73, 97, 70, 66, 58, 100, 68, 53, 54, 67, 59, 79, 55, 59, 51, 62, 56, 97, 66, 45, 79, 66, 60, 68, 63, 90, 63, 62, 56, 64, 56, 61, 52, 59, 65, 60, 64, 65, 75, 64, 75, 107, 99, 67, 66, 61, 61, 66, 59, 64, 54, 69, 56, 76, 58, 61, 70, 55, 55, 55, 52, 91, 90, 56, 82, 80, 82, 68, 69, 61, 68, 53, 57, 66, 74, 57, 57, 86, 66, 54, 77, 70, 61, 60, 58, 62, 75, 64, 51, 65, 61, 70, 66, 64, 73, 57, 60, 59, 59, 70, 66, 72, 69, 69, 52, 64, 62, 54, 64, 77, 54, 67, 62, 60, 69, 67, 78, 68, 53, 65, 63, 60, 57, 63, 58, 74, 64, 71, 55, 68, 61, 58, 58, 63, 55, 60, 59, 74, 75, 67, 75, 64, 68, 50, 59, 63, 58, 56, 143, 54, 63, 66, 82, 52, 61, 65, 85, 71, 59, 61, 70, 56, 106, 43, 63, 81, 81, 92, 75, 58, 90, 60, 70, 69, 75, 95, 57, 56, 57, 90, 66, 70, 84, 50, 58, 63, 84, 70, 76, 60, 67, 62, 59, 45, 70, 59, 89, 56, 62, 53, 63, 49, 69, 60, 76, 55, 55, 60, 43, 85, 55, 40, 79, 65, 65, 65, 43, 52, 61, 59, 67, 64, 78, 63, 71, 74, 54, 63, 57, 54, 100, 66, 68, 71, 67, 67, 66, 93, 77, 67, 80, 49, 86, 74, 58, 52, 50, 82, 55, 65, 59, 73, 57, 82, 59, 69, 55, 90, 64, 80, 57, 60, 61, 59, 82, 68, 68, 70, 76, 67, 86, 67, 66, 56, 74, 68, 55, 112, 58, 79, 60, 74, 59, 61, 59, 74, 61, 53, 76, 72, 101, 69, 56, 73, 60, 70, 66, 53, 65, 65, 65, 116, 67, 53, 58, 56, 65, 67, 73, 71, 73, 81, 64, 67, 66, 53, 60, 73, 67, 67, 56, 50, 61, 50, 71, 87, 70, 56, 122, 57, 82, 64, 66, 67, 65, 47, 91, 61, 59, 67, 61, 60, 62, 78, 52, 64, 56, 70, 66, 76, 44, 63, 70, 53, 48, 49, 82, 61, 45, 74, 77, 60, 51, 61, 118, 56, 52, 65, 66, 86, 55, 66, 52, 61, 55, 65, 59, 56, 71, 61, 55, 60, 56, 61, 68, 58, 82, 64, 66, 60, 57, 130, 68, 77, 62, 65, 81, 65, 73, 62, 45, 83, 64, 73, 69, 54, 89, 56, 70, 60, 71, 54, 56, 65, 68, 86, 84, 87, 59, 64, 64, 62, 63, 71, 70, 75, 66, 55, 67, 62, 55, 64, 60, 58, 73, 67, 63, 58, 60, 87, 68, 51, 70, 64, 68, 60, 60, 66, 77, 88, 76, 56, 74, 81, 63, 55, 64, 83, 81, 57, 75, 62, 67, 63, 61, 63, 53, 57, 58, 70, 64, 58, 55, 59, 56, 62, 84, 71, 65, 56, 78, 55, 61, 60, 67, 61, 51, 67, 60, 90, 65, 71, 82, 67, 65, 63, 61, 59, 66, 73, 69, 66, 101, 64, 55, 54, 72, 67, 53, 59, 83, 76, 63, 83, 60, 59, 54, 57, 68, 54, 74, 69, 54, 57, 59, 68, 69, 87, 61, 63, 65, 70, 65, 67, 60, 60, 73, 60, 60, 84, 61, 57, 73, 57, 52, 69, 68, 64, 67, 57, 75, 60, 57, 55, 62, 68, 56, 64, 68, 108, 65, 60, 60, 56, 61, 101, 70, 59, 62, 56, 65, 69, 63, 64, 52, 63, 68, 62, 54, 71, 60, 78, 64, 57, 59, 62, 96, 99, 69, 60, 83, 64, 74, 56, 72, 47, 57, 58, 53, 60, 69, 74, 60, 82, 59, 68, 74, 63, 57, 71, 57, 69, 58, 82, 62, 70, 61, 83, 69, 52, 61, 60, 62, 65, 57, 116, 61, 56, 59, 78, 75, 61, 78, 58, 92, 72, 61, 51, 55, 70, 84, 57, 39, 71, 65, 50, 74, 56, 54, 58, 102, 53, 59, 72, 59, 74, 57, 136, 55, 57, 74, 69, 57, 50, 59, 45, 65, 47, 69, 57, 50, 85, 67, 62, 61, 84, 99, 102, 44, 77, 65, 55, 61, 62, 74, 79, 49, 87, 84, 73, 70, 56, 89, 54, 86, 51, 63, 55, 58, 62, 102, 50, 67, 50, 69, 96, 65, 89, 79, 76, 64, 82, 58, 43, 23, 81, 67, 81, 76, 59, 98, 56, 67, 51, 52, 85, 69, 62, 70, 57, 44, 68, 62, 70, 60, 72, 81, 55, 87, 45, 66, 67, 61, 85, 58, 51, 73, 54, 94, 69, 77, 52, 57, 66, 58, 60, 34, 51, 78, 60, 51, 71, 60, 60, 72, 76, 55, 85, 92, 64, 57, 43, 68, 63, 94, 87, 69, 53, 39, 58, 57, 54, 50, 105, 60, 67, 53, 50, 64, 71, 59, 85, 44, 60, 93, 48, 63, 96, 81, 75, 63, 55, 51, 64, 60, 67, 81, 84, 72, 68, 54, 51, 61, 66, 56, 53, 54, 92, 91, 71, 54, 55, 61, 56, 53, 48, 81, 68, 112, 61, 60, 52, 64, 57, 52, 69, 56, 65, 67, 63, 68, 59, 77, 84, 60, 75, 50, 61, 73, 48, 55, 83, 44, 55, 57, 58, 64, 106, 60, 75, 49, 60, 66, 56, 66, 62, 68, 65, 69, 80, 56, 55, 54, 54, 47, 78, 41, 65, 52, 58, 86, 64, 62, 55, 61, 109, 102, 50, 65, 60, 40, 58, 52, 65, 66, 48, 70, 77, 67, 61, 64, 69, 56, 68, 51, 54, 70, 59, 31, 67, 55, 89, 62, 88, 54, 50, 57, 84, 100, 48, 76, 64, 50, 72, 67, 82, 68, 63, 62, 62, 72, 73, 67, 58, 66, 66, 65, 68, 53, 72, 84, 66, 48, 60, 62, 53, 65, 77, 61, 43, 56, 51, 41, 54, 51, 70, 58, 63, 61, 37, 43, 82, 133, 80, 52, 52, 67, 75, 80, 60, 59, 51, 68, 64, 95, 67, 53, 65, 62, 52, 66, 128, 76, 99, 58, 65, 60, 56, 43, 74, 66, 62, 71, 60, 71, 103, 53, 59, 90, 80, 73, 66, 68, 81, 57, 49, 74, 58, 75, 51, 61, 46, 64, 83, 80, 35, 57, 58, 46, 56, 69, 83, 63, 51, 71, 66, 64, 74, 58, 61, 86, 56, 43, 59, 54, 72, 97, 61, 73, 70, 51, 63, 102, 55, 73, 81, 57, 60, 92, 49, 64, 50, 39, 74, 57, 62, 71, 47, 71, 75, 72, 63, 59, 57, 52, 68, 113, 60, 60, 64, 88, 69, 56, 61, 72, 65, 70, 73, 57, 65, 52, 88, 88, 64, 50, 103, 75, 59, 46, 86, 81, 55, 85, 49, 69, 39, 84, 80, 60, 57, 83, 62, 71, 45, 93, 60, 56, 89, 42, 62, 64, 68, 87, 62, 64, 59, 58, 59, 58, 60, 103, 58, 69, 79, 45, 65, 57, 54, 76, 61, 35, 76, 48, 49, 66, 59, 48, 37, 54, 84, 58, 44, 76, 57, 71, 52, 47, 37, 71, 87, 41, 54, 62, 78, 66, 75, 60, 64, 95, 67, 79, 106, 51, 61, 60, 65, 76, 57, 57, 61, 38, 44, 69, 65, 69, 61, 50, 65, 56, 42, 60, 60, 55, 57, 80, 62, 50, 80, 63, 41, 69, 48, 56, 62, 66, 72, 37, 52, 57, 83, 75, 70, 62, 94, 46, 77, 52, 72, 58, 78, 50, 55, 69, 57, 78, 68, 55, 71, 59, 85, 87, 62, 64, 72, 82, 64, 48, 55, 74, 70, 65, 91, 53, 68, 85, 54, 70, 66, 47, 67, 78, 68, 58, 70, 54, 57, 79, 56, 89, 59, 60, 51, 65, 63, 101, 46, 55, 96, 68, 49, 42, 67, 35, 46, 83, 55, 67, 54, 83, 51, 78, 72, 66, 66, 68, 58, 44, 76, 58, 68, 62, 64, 73, 84, 59, 37, 68, 66, 51, 72, 105, 62, 85, 52, 57, 46, 75, 44, 58, 79, 64, 95, 60, 109, 44, 48, 72, 65, 40, 53, 59, 60, 52, 77, 65, 90, 71, 70, 72, 67, 48, 71, 60, 81, 63, 65, 77, 58, 65, 81, 59, 74, 47, 85, 54, 74, 110, 62, 75, 70, 71, 58, 44, 46, 60, 75, 57, 43, 48, 80, 58, 75, 65, 55, 80, 59, 79, 83, 89, 69, 74, 50, 71, 81, 87, 56, 74, 59, 69, 73, 75, 68, 66, 46, 88, 68, 75, 58, 61, 55, 141, 61, 69, 60, 67, 58, 44, 56, 85, 54, 71, 69, 71, 55, 58, 62, 71, 58, 54, 82, 95, 51, 47, 53, 67, 56, 62, 67, 60, 58, 56, 74, 56, 110, 49, 74, 60, 44, 74, 68, 91, 65, 37, 77, 87, 61, 77, 54, 85, 46, 62, 44, 41, 77, 68, 73, 37, 62, 53, 105, 69, 70, 59, 77, 58, 71, 79, 56, 62, 86, 86, 107, 66, 71, 52, 54, 47, 61, 46, 65, 69, 47, 72, 90, 70, 64, 55, 69, 53, 51, 78, 105, 64, 46, 72, 52, 60, 72, 101, 103, 116, 80, 78, 52, 65, 59, 78, 68, 64, 84, 86, 64, 67, 72, 90, 55, 62, 59, 75, 79, 64, 67, 60, 60, 62, 69, 84, 81, 81, 60, 70, 79, 60, 109, 61, 64, 66, 54, 57, 67, 47, 64, 90, 72, 91, 79, 95, 52, 60, 39, 63, 81, 71, 81, 51, 73, 57, 115, 70, 52, 26, 64, 56, 59, 57, 68, 50, 54, 61, 63, 53, 53, 42, 42, 51, 85, 89, 39, 56, 45, 36, 67, 64, 89, 72, 86, 62, 40, 63, 71, 61, 59, 82, 112, 69, 56, 49, 57, 48, 70, 66, 69, 70, 83, 62, 68, 73, 52, 53, 76, 48, 59, 49, 66, 43, 63, 70, 51, 47, 68, 70, 67, 74, 36, 70, 75, 86, 90, 63, 74, 58, 51, 64, 65, 84, 77, 62, 54, 88, 92, 78, 49, 41, 68, 83, 65, 72, 68, 42, 48, 76, 59, 74, 57, 61, 67, 56, 86, 53, 62, 50, 46, 68, 51, 59, 50, 64, 63, 72, 56, 70, 65, 49, 59, 64, 112, 114, 41, 78, 88, 138, 60, 55, 94, 81, 56, 80, 97, 75, 70, 65, 64, 56, 61, 57, 51, 69, 53, 71, 46, 69, 52, 68, 59, 63, 88, 80, 62, 87, 63, 58, 79, 91, 71, 54, 65, 46, 70, 63, 47, 58, 45, 87, 60, 89, 58, 64, 72, 82, 37, 58, 100, 56, 64, 80, 88, 47, 58, 47, 52, 53, 57, 73, 46, 48, 57, 60, 58, 56, 45, 62, 66, 59, 85, 66, 67, 68, 57, 57, 60, 61, 73, 47, 60, 53, 67, 67, 67, 58, 56, 62, 100, 67, 71, 67, 59, 70, 57, 66, 60, 54, 131, 56, 118, 70, 57, 79, 53, 56, 65, 52, 66, 64, 56, 59, 59, 52, 85, 63, 57, 56, 56, 58, 58, 47, 144, 54, 82, 48, 63, 67, 66, 99, 49, 99, 85, 58, 54, 73, 65, 68, 83, 77, 66, 64, 93, 55, 56, 60, 106, 60, 45, 68, 55, 73, 61, 63, 66, 88, 68, 62, 63, 67, 68, 90, 52, 60, 63, 53, 63, 49, 76, 70, 47, 63, 60, 76, 73, 76, 119, 101, 58, 69, 83, 127, 75, 63, 52, 43, 74, 69, 62, 53, 61, 47, 91, 65, 65, 63, 70, 77, 63, 61, 68, 43, 55, 89, 64, 67, 63, 68, 63, 81, 49, 75, 55, 73, 85, 57, 62, 57, 59, 73, 73, 51, 54, 62, 51, 49, 61, 42, 77, 53, 61, 88, 50, 63, 59, 61, 63, 65, 54, 85, 87, 107, 45, 71, 72, 55, 100, 74, 59, 90, 78, 64, 59, 92, 57, 82, 62, 68, 67, 83, 56, 63, 60, 65, 63, 61, 73, 68, 67, 56, 82, 65, 74, 55, 46, 56, 79, 58, 58, 64, 59, 70, 55, 86, 75, 58, 81, 50, 60, 86, 70, 68, 67, 101, 64, 53, 65, 60, 101, 54, 74, 64, 58, 67, 61, 59, 54, 70, 79, 66, 111, 68, 47, 103, 57, 65, 41, 60, 70, 63, 59, 65, 73, 62, 64, 65, 74, 58, 119, 90, 53, 62, 56, 49, 59, 69, 70, 73, 54, 75, 80, 72, 59, 61, 75, 76, 118, 57, 70, 63, 65, 72, 59, 64, 65, 60, 74, 98, 67, 66, 65, 53, 60, 60, 76, 59, 64, 65, 56, 56, 74, 64, 66, 70, 58, 79, 62, 74, 133, 64, 56, 62, 66, 57, 70, 55, 66, 60, 75, 70, 63, 68, 39, 64, 58, 70, 51, 56, 61, 58, 63, 63, 68, 69, 79, 64, 57, 58, 66, 71, 60, 40, 69, 93, 58, 62, 57, 58, 89, 65, 53, 57, 67, 61, 55, 66, 68, 56, 72, 80, 65, 60, 66, 73, 45, 69, 68, 57, 52, 53, 66, 47, 62, 63, 66, 52, 61, 60, 61, 54, 66, 79, 57, 93, 62, 58, 64, 63, 57, 67, 76, 67, 65, 70, 62, 82, 61, 55, 75, 58, 65, 66, 59, 53, 61, 67, 68, 105, 54, 87, 78, 53, 56, 65, 67, 42, 62, 57, 52, 57, 130, 86, 92, 85, 60, 59, 86, 66, 63, 60, 71, 57, 69, 47, 64, 78, 58, 71, 82, 53, 80, 76, 67, 82, 60, 53, 63, 70, 80, 69, 68, 51, 64, 68, 125, 66, 70, 62, 58, 70, 60, 63, 105, 57, 58, 58, 65, 75, 66, 69, 59, 46, 62, 75, 67, 60, 58, 71, 58, 76, 98, 70, 61, 58, 74, 60, 52, 71, 58, 76, 52, 120, 66, 58, 44, 50, 54, 49, 40, 71, 50, 78, 84, 72, 64, 58, 54, 56, 78, 60, 42, 60, 77, 62, 100, 70, 62, 62, 65, 63, 74, 66, 90, 58, 70, 88, 54, 69, 67, 70, 66, 101, 60, 62, 53, 51, 66, 92, 77, 71, 51, 76, 74, 58, 72, 72, 65, 47, 55, 66, 76, 49, 60, 87, 73, 48, 58, 86, 88, 40, 64, 47, 76, 83, 53, 49, 58, 43, 61, 74, 65, 60, 62, 58, 67, 60, 62, 63, 80, 60, 59, 76, 59, 57, 51, 78, 47, 65, 52, 51, 57, 46, 69, 62, 66, 48, 54, 55, 68, 47, 76, 73, 75, 72, 56, 68, 90, 66, 68, 81, 56, 53, 48, 33, 84, 55, 58, 57, 58, 96, 55, 65, 55, 59, 55, 103, 62, 54, 50, 54, 58, 62, 75, 63, 55, 68, 50, 57, 64, 85, 79, 81, 66, 54, 38, 61, 66, 51, 60, 66, 68, 84, 52, 70, 58, 66, 62, 107, 80, 83, 71, 55, 62, 94, 67, 57, 55, 67, 80, 62, 60, 76, 61, 64, 75, 76, 60, 68, 72, 54, 107, 91, 66, 77, 52, 67, 67, 73, 68, 50, 59, 46, 72, 61, 52, 68, 56, 63, 62, 56, 63, 66, 78, 56, 77, 68, 57, 51, 62, 52, 58, 51, 58, 51, 62, 90, 65, 43, 55, 123, 116, 72, 61, 68, 54, 60, 61, 64, 65, 55, 64, 60, 79, 62, 58, 63, 62, 60, 42, 74, 54, 64, 76, 69, 58, 70, 75, 70, 57, 56, 51, 69, 78, 72, 56, 68, 83, 63, 78, 52, 78, 67, 57, 70, 66, 63, 51, 63, 75, 69, 62, 68, 81, 77, 83, 61, 35, 61, 51, 49, 63, 57, 57, 54, 52, 54, 47, 107, 59, 54, 62, 63, 60, 56, 74, 52, 43, 62, 53, 56, 66, 60, 69, 58, 56, 50, 129, 62, 69, 63, 73, 53, 58, 59, 78, 65, 88, 57, 57, 50, 68, 55, 81, 63, 95, 61, 57, 67, 72, 66, 51, 58, 46, 63, 59, 61, 67, 75, 61, 91, 70, 46, 73, 65, 59, 46, 84, 69, 68, 52, 56, 66, 60, 56, 61, 105, 76, 66, 50, 74, 86, 61, 93, 58, 73, 71, 53, 58, 56, 58, 56, 70, 66, 55, 60, 70, 60, 84, 56, 59, 79, 59, 64, 60, 52, 51, 59, 64, 61, 73, 68, 69, 74, 73, 64, 82, 83, 67, 58, 62, 88, 75, 50, 59, 56, 66, 50, 51, 63, 54, 95, 53, 68, 58, 61, 71, 69, 70, 58, 103, 66, 55, 56, 64, 60, 66, 78, 49, 72, 54, 90, 63, 69, 40, 53, 63, 57, 74, 76, 68, 65, 95, 54, 85, 55, 51, 63, 51, 66, 61, 56, 50, 58, 64, 70, 74, 60, 47, 110, 55, 72, 75, 51, 50, 64, 67, 71, 65, 51, 80, 58, 55, 54, 69, 76, 79, 70, 77, 75, 70, 88, 58, 63, 86, 65, 58, 68, 81, 87, 106, 67, 60, 57, 70, 78, 66, 72, 63, 68, 84, 61, 60, 65, 63, 50, 73, 103, 64, 51, 139, 79, 70, 62, 81, 97, 55, 64, 64, 59, 87, 59, 65, 59, 74, 56, 74, 77, 77, 57, 83, 97, 91, 61, 73, 52, 57, 55, 95, 74, 52, 66, 63, 52, 114, 73, 60, 91, 72, 59, 73, 67, 67, 54, 74, 69, 71, 58, 64, 69, 59, 63, 64, 70, 55, 66, 64, 51, 51, 83, 62, 42, 66, 69, 69, 62, 52, 90, 53, 49, 55, 124, 71, 43, 91, 52, 112, 52, 71, 71, 48, 79, 50, 51, 42, 55, 73, 111, 70, 75, 58, 55, 70, 67, 70, 55, 87, 73, 59, 53, 73, 63, 47, 39, 85, 81, 92, 61, 53, 83, 56, 62, 37, 63, 61, 49, 88, 63, 62, 53, 63, 58, 59, 81, 67, 68, 42, 95, 44, 59, 68, 70, 60, 48, 64, 50, 78, 76, 76, 72, 64, 59, 63, 61, 45, 51, 83, 64, 74, 48, 57, 56, 72, 73, 54, 61, 65, 71, 53, 70, 62, 54, 71, 81, 64, 91, 65, 69, 67, 50, 61, 51, 54, 65, 60, 80, 101, 56, 58, 54, 50, 58, 69, 79, 47, 66, 76, 79, 62, 56, 86, 75, 61, 47, 66, 64, 55, 68, 85, 59, 66, 64, 60, 56, 64, 72, 64, 57, 59, 74, 80, 88, 100, 78, 48, 64, 62, 67, 49, 65, 73, 80, 56, 62, 78, 75, 86, 51, 53, 107, 80, 64, 101, 62, 82, 52, 67, 78, 47, 49, 54, 60, 42, 61, 56, 79, 81, 60, 57, 54, 59, 55, 153, 82, 57, 47, 84, 63, 71, 71, 98, 41, 63, 64, 71, 71, 64, 49, 76, 60, 57, 76, 57, 79, 94, 70, 47, 51, 65, 80, 84, 64, 54, 59, 64, 76, 65, 75, 61, 79, 56, 62, 68, 54, 60, 62, 60, 48, 75, 60, 62, 58, 48, 71, 67, 64, 73, 65, 86, 73, 63, 52, 44, 76, 96, 76, 77, 62, 55, 53, 65, 58, 35, 59, 66, 73, 59, 52, 61, 72, 80, 49, 93, 61, 55, 70, 68, 60, 65, 47, 66, 58, 59, 85, 55, 56, 64, 56, 65, 60, 76, 64, 58, 56, 77, 119, 47, 64, 69, 61, 80, 44, 53, 57, 76, 64, 62, 58, 35, 61, 67, 89, 79, 66, 55, 51, 59, 81, 67, 63, 62, 69, 76, 82, 46, 103, 60, 72, 56, 90, 54, 86, 65, 69, 67, 55, 69, 47, 72, 57, 79, 70, 61, 50, 60, 64, 66, 61, 81, 68, 48, 75, 65, 118, 74, 69, 47, 71, 51, 65, 52, 72, 77, 91, 50, 68, 42, 63, 74, 71, 61, 69, 97, 65, 68, 63, 94, 73, 55, 73, 77, 41, 72, 80, 51, 64, 60, 68, 72, 76, 65, 75, 63, 65, 70, 60, 56, 80, 72, 43, 42, 60, 80, 64, 55, 58, 54, 60, 63, 82, 82, 61, 58, 60, 65, 79, 63, 68, 107, 80, 59, 59, 69, 76, 68, 67, 68, 70, 53, 58, 100, 60, 69, 66, 69, 71, 80, 73, 74, 61, 48, 71, 73, 81, 77, 65, 68, 63, 81, 74, 68, 98, 41, 62, 69, 59, 55, 64, 59, 69, 69, 83, 76, 56, 51, 72, 53, 55, 74, 67, 73, 64, 57, 74, 61, 51, 60, 78, 55, 72, 70, 81, 72, 63, 60, 85, 55, 61, 49, 53, 81, 57, 66, 44, 61, 60, 54, 64, 53, 62, 82, 64, 72, 36, 52, 104, 65, 60, 76, 67, 57, 90, 55, 69, 61, 83, 139, 61, 61, 98, 67, 41, 64, 69, 58, 67, 61, 63, 56, 64, 62, 73, 87, 108, 69, 75, 61, 59, 79, 57, 74, 59, 64, 48, 54, 62, 69, 63, 58, 79, 65, 65, 64, 50, 72, 84, 84, 70, 68, 73, 84, 37, 42, 61, 59, 52, 69, 64, 53, 59, 58, 116, 133, 68, 67, 74, 57, 43, 68, 66, 65, 44, 87, 60, 67, 78, 93, 65, 53, 77, 52, 60, 73, 59, 48, 67, 74, 77, 74, 41, 43, 68, 52, 71, 63, 54, 70, 60, 54, 71, 51, 54, 46, 71, 72, 49, 75, 49, 85, 61, 74, 64, 70, 59, 67, 77, 70, 66, 81, 87, 61, 71, 68, 61, 63, 49, 60, 72, 63, 76, 47, 81, 50, 54, 51, 60, 77, 59, 44, 53, 64, 57, 67, 47, 56, 63, 92, 58, 63, 76, 62, 93, 54, 64, 63, 70, 72, 67, 52, 89, 67, 67, 63, 58, 66, 45, 67, 54, 72, 74, 52, 54, 70, 51, 51, 46, 57, 105, 60, 57, 60, 105, 79, 63, 70, 55, 89, 72, 43, 90, 81, 65, 67, 59, 53, 72, 54, 56, 63, 106, 112, 75, 70, 59, 60, 69, 72, 65, 66, 74, 77, 62, 76, 61, 51, 88, 59, 65, 56, 64, 56, 41, 65, 41, 90, 87, 49, 89, 66, 92, 55, 70, 62, 48, 65, 65, 87, 53, 67, 63, 63, 53, 56, 71, 58, 51, 53, 53, 64, 79, 68, 83, 67, 50, 60, 74, 48, 68, 55, 63, 88, 72, 70, 91, 58, 58, 60, 91, 79, 46, 103, 66, 56, 35, 70, 67, 61, 74, 53, 73, 69, 54, 58, 63, 63, 73, 87, 62, 63, 56, 68, 60, 48, 63, 39, 89, 55, 107, 64, 55, 60, 61, 57, 76, 76, 52, 47, 55, 85, 75, 75, 52, 52, 69, 68, 48, 52, 67, 61, 91, 75, 57, 47, 54, 71, 44, 66, 65, 56, 74, 54, 82, 56, 115, 76, 68, 64, 65, 46, 61, 65, 63, 89, 62, 65, 61, 49, 62, 83, 69, 78, 70, 48, 57, 61, 48, 74, 87, 76, 62, 65, 68, 75, 64, 66, 71, 61, 63, 60, 66, 86, 68, 72, 93, 83, 97, 62, 70, 70, 51, 46, 80, 50, 67, 76, 74, 64, 110, 69, 45, 52, 60, 57, 63, 68, 48, 67, 48, 68, 64, 72, 93, 79, 93, 56, 70, 97, 73, 43, 69, 68, 71, 74, 59, 66, 64, 60, 85, 86, 60, 54, 54, 52, 53, 93, 58, 57, 54, 53, 73, 64, 75, 56, 61, 75, 77, 97, 66, 57, 102, 85, 83, 60, 51, 61, 68, 46, 72, 43, 81, 62, 85, 83, 61, 60, 64, 67, 75, 49, 60, 68, 50, 72, 55, 78, 66, 63, 56, 54, 62, 84, 71, 56, 59, 52, 69, 71, 78, 71, 71, 52, 56, 66, 66, 72, 70, 57, 103, 63, 77, 60, 63, 77, 80, 82, 58, 57, 70, 58, 72, 68, 54, 71, 76, 110, 65, 43, 70, 58, 82, 65, 67, 60, 58, 62, 72, 65, 59, 77, 85, 61, 64, 67, 64, 89, 62, 71, 66, 59, 73, 74, 60, 65, 60, 66, 57, 66, 57, 75, 47, 81, 57, 85, 66, 60, 73, 63, 55, 58, 64, 64, 67, 44, 72, 71, 75, 69, 84, 67, 86, 71, 67, 82, 61, 50, 79, 51, 69, 49, 61, 93, 57, 43, 73, 70, 80, 57, 60, 108, 54, 64, 64, 58, 77, 61, 49, 56, 77, 79, 68, 66, 82, 58, 101, 52, 67, 77, 66, 63, 71, 78, 68, 74, 61, 82, 57, 61, 59, 70, 75, 55, 51, 71, 81, 57, 72, 77, 59, 63, 53, 59, 62, 62, 83, 82, 56, 90, 65, 66, 62, 65, 46, 94, 67, 72, 73, 63, 62, 47, 88, 64, 63, 61, 73, 64, 72, 57, 63, 72, 61, 54, 80, 61, 77, 74, 41, 76, 50, 65, 66, 58, 73, 60, 62, 33, 61, 69, 44, 65, 75, 63, 59, 67, 66, 53, 53, 69, 64, 91, 69, 60, 52, 75, 60, 57, 79, 58, 69, 86, 57, 67, 86, 62, 62, 75, 63, 78, 56, 64, 77, 53, 58, 73, 75, 65, 57, 74, 78, 64, 58, 65, 79, 61, 64, 56, 43, 60, 76, 102, 58, 60, 68, 36, 53, 71, 66, 79, 73, 95, 62, 60, 67, 60, 70, 80, 60, 78, 49, 44, 46, 78, 53, 82, 63, 53, 70, 76, 67, 59, 47, 57, 62, 70, 77, 51, 62, 76, 81, 66, 60, 47, 55, 49, 60, 85, 148, 60, 65, 62, 44, 56, 99, 71, 71, 80, 70, 64, 94, 70, 52, 72, 51, 59, 73, 71, 57, 57, 49, 106, 67, 90, 92, 99, 69, 64, 66, 59, 94, 66, 74, 98, 42, 83, 80, 57, 57, 93, 68, 70, 60, 85, 49, 70, 57, 59, 72, 64, 68, 78, 65, 54, 57, 63, 65, 54, 61, 65, 67, 65, 43, 58, 65, 67, 75, 58, 60, 46, 69, 74, 60, 64, 87, 51, 74, 89, 50, 59, 59, 67, 64, 66, 55, 69, 62, 63, 64, 81, 72, 56, 58, 85, 61, 57, 79, 59, 58, 69, 57, 71, 75, 66, 63, 57, 77, 77, 57, 73, 58, 64, 63, 67, 74, 83, 51, 68, 55, 68, 61, 73, 64, 58, 49, 59, 67, 54, 52, 65, 60, 57, 45, 61, 62, 64, 67, 59, 68, 65, 58, 62, 58, 77, 76, 69, 67, 92, 54, 89, 99, 54, 62, 60, 52, 48, 71, 61, 73, 67, 68, 58, 65, 84, 51, 56, 60, 66, 82, 67, 47, 69, 75, 64, 46, 62, 65, 80, 66, 65, 53, 82, 58, 55, 62, 51, 51, 60, 52, 58, 59, 60, 69, 59, 75, 63, 73, 61, 57, 61, 68, 63, 56, 54, 63, 68, 65, 47, 67, 74, 94, 54, 67, 64, 86, 51, 64, 104, 63, 57, 67, 40, 81, 108, 46, 74, 62, 71, 60, 60, 76, 98, 61, 78, 75, 112, 58, 60, 51, 63, 74, 77, 66, 49, 66, 49, 89, 57, 55, 65, 49, 56, 60, 59, 55, 65, 78, 74, 74, 42, 73, 58, 68, 58, 66, 76, 69, 85, 57, 67, 66, 53, 74, 55, 74, 69, 58, 75, 58, 49, 74, 74, 60, 74, 80, 64, 63, 72, 66, 71, 74, 55, 60, 63, 59, 53, 59, 56, 75, 68, 87, 81, 71, 62, 69, 64, 79, 68, 72, 61, 54, 74, 66, 79, 69, 52, 56, 61, 53, 56, 55, 61, 71, 68, 72, 53, 59, 72, 80, 65, 49, 63, 65, 58, 68, 65, 56, 74, 72, 80, 57, 71, 61, 47, 92, 58, 75, 96, 65, 70, 76, 70, 38, 71, 66, 68, 65, 70, 59, 82, 66, 66, 95, 54, 64, 69, 50, 58, 62, 65, 36, 72, 70, 87, 50, 76, 43, 69, 52, 50, 67, 72, 66, 59, 81, 51, 63, 60, 54, 55, 81, 56, 64, 81, 83, 69, 56, 77, 66, 54, 93, 59, 62, 52, 74, 72, 38, 73, 58, 79, 73, 60, 60, 76, 49, 73, 57, 58, 62, 63, 71, 55, 61, 60, 70, 57, 71, 66, 65, 43, 69, 54, 75, 59, 66, 64, 73, 111, 62, 68, 69, 62, 42, 60, 74, 104, 59, 59, 60, 52, 47, 63, 62, 50, 62, 82, 61, 64, 58, 80, 101, 42, 57, 68, 50, 60, 59, 64, 85, 44, 72, 56, 63, 64, 77, 66, 61, 51, 70, 53, 56, 56, 56, 65, 56, 69, 71, 54, 59, 79, 53, 71, 61, 66, 51, 71, 62, 72, 98, 76, 89, 62, 52, 58, 75, 73, 56, 59, 62, 71, 74, 77, 49, 79, 56, 78, 45, 62, 70, 59, 40, 82, 81, 49, 52, 55, 82, 55, 66, 53, 70, 34, 62, 88, 69, 80, 68, 61, 68, 51, 74, 70, 51, 64, 65, 86, 56, 61, 59, 79, 53, 80, 70, 75, 61, 77, 45, 89, 64, 62, 43, 81, 76, 67, 67, 57, 56, 73, 93, 66, 57, 102, 53, 51, 63, 47, 58, 74, 71, 79, 70, 54, 60, 53, 77, 58, 74, 70, 45, 72, 64, 52, 71, 59, 144, 63, 76, 83, 48, 68, 62, 82, 56, 53, 48, 90, 108, 76, 64, 52, 54, 53, 71, 64, 55, 89, 55, 72, 51, 71, 65, 73, 73, 65, 56, 56, 61, 115, 62, 63, 65, 98, 67, 80, 73, 70, 75, 59, 65, 115, 59, 66, 55, 52, 57, 60, 60, 55, 55, 59, 93, 70, 50, 66, 48, 58, 69, 76, 51, 60, 73, 72, 78, 50, 61, 68, 63, 52, 77, 61, 39, 69, 76, 76, 60, 66, 57, 65, 77, 64, 66, 65, 90, 63, 82, 63, 64, 74, 70, 75, 55, 58, 56, 69, 67, 78, 73, 56, 73, 51, 64, 50, 78, 72, 56, 59, 55, 78, 73, 58, 53, 56, 89, 44, 46, 53, 70, 53, 42, 62, 70, 60, 59, 61, 62, 45, 57, 79, 58, 104, 60, 59, 53, 49, 57, 73, 68, 57, 70, 60, 64, 92, 54, 58, 76, 76, 89, 63, 75, 79, 60, 47, 63, 75, 50, 58, 68, 82, 75, 62, 50, 66, 55, 67, 70, 124, 73, 57, 57, 55, 59, 59, 53, 72, 68, 78, 67, 65, 59, 51, 63, 52, 60, 58, 51, 63, 62, 68, 56, 69, 77, 65, 64, 70, 60, 56, 66, 81, 79, 47, 75, 52, 76, 52, 57, 52, 54, 46, 84, 52, 68, 71, 72, 52, 54, 64, 57, 75, 45, 77, 115, 60, 65, 56, 59, 64, 75, 46, 84, 63, 64, 64, 65, 64, 70, 84, 59, 63, 50, 64, 41, 80, 50, 65, 52, 68, 59, 76, 59, 53, 77, 84, 62, 53, 67, 59, 68, 52, 82, 92, 73, 89, 79, 93, 57, 60, 40, 59, 43, 64, 70, 69, 72, 66, 69, 92, 64, 58, 58, 62, 65, 61, 41, 76, 50, 67, 58, 74, 66, 58, 69, 66, 53, 44, 60, 78, 56, 76, 48, 59, 56, 72, 49, 57, 47, 73, 107, 80, 61, 62, 55, 79, 67, 57, 83, 70, 61, 68, 50, 54, 46, 47, 59, 54, 77, 46, 43, 71, 89, 63, 89, 75, 56, 66, 69, 61, 64, 84, 68, 46, 90, 50, 74, 52, 53, 39, 70, 65, 69, 86, 66, 57, 82, 119, 68, 78, 53, 62, 58, 64, 66, 73, 69, 58, 68, 60, 59, 85, 55, 66, 73, 63, 69, 56, 60, 51, 59, 61, 78, 81, 74, 78, 67, 62, 65, 61, 62, 69, 58, 59, 77, 66, 63, 52, 78, 61, 65, 75, 72, 48, 62, 67, 60, 65, 71, 69, 69, 58, 81, 50, 121, 60, 59, 50, 65, 69, 65, 56, 53, 84, 97, 41, 64, 106, 37, 66, 62, 76, 62, 109, 86, 64, 55, 56, 68, 69, 70, 60, 46, 64, 76, 64, 57, 91, 52, 71, 87, 46, 63, 61, 61, 48, 94, 58, 69, 55, 112, 83, 73, 63, 59, 64, 63, 57, 63, 99, 62, 86, 51, 50, 90, 60, 65, 62, 53, 116, 145, 56, 68, 54, 99, 60, 43, 58, 78, 103, 63, 63, 75, 49, 71, 58, 59, 81, 66, 62, 86, 79, 57, 67, 74, 66, 60, 43, 68, 77, 85, 55, 88, 59, 60, 89, 47, 48, 102, 70, 75, 56, 41, 46, 64, 78, 67, 55, 70, 56, 59, 42, 78, 51, 63, 44, 62, 67, 61, 57, 77, 57, 74, 50, 47, 71, 71, 73, 49, 56, 56, 56, 59, 52, 54, 63, 59, 60, 53, 67, 79, 54, 61, 64, 59, 75, 54, 44, 62, 57, 72, 71, 53, 71, 65, 31, 70, 78, 64, 93, 61, 55, 89, 56, 72, 58, 86, 92, 70, 62, 59, 52, 44, 50, 58, 71, 67, 53, 75, 96, 53, 59, 68, 49, 61, 88, 73, 66, 59, 65, 75, 82, 64, 73, 68, 52, 59, 70, 70, 80, 66, 76, 77, 83, 66, 44, 63, 53, 80, 59, 63, 76, 67, 77, 70, 51, 59, 73, 59, 107, 53, 57, 56, 64, 84, 63, 53, 45, 66, 53, 49, 49, 53, 70, 56, 64, 63, 74, 84, 67, 58, 70, 80, 70, 52, 48, 39, 91, 72, 62, 61, 78, 63, 94, 71, 59, 62, 111, 68, 81, 49, 54, 60, 86, 75, 71, 97, 51, 78, 58, 119, 62, 75, 76, 73, 80, 74, 63, 53, 56, 50, 63, 61, 53, 61, 68, 55, 75, 71, 70, 78, 62, 62, 74, 65, 56, 84, 56, 59, 73, 78, 71, 50, 82, 60, 66, 64, 91, 68, 67, 95, 65, 77, 56, 78, 82, 63, 76, 83, 48, 74, 36, 112, 72, 55, 63, 62, 46, 58, 50, 86, 59, 68, 111, 49, 59, 89, 58, 59, 68, 59, 101, 46, 72, 83, 62, 69, 52, 65, 88, 69, 76, 56, 116, 53, 57, 64, 55, 61, 53, 58, 70, 83, 64, 51, 88, 49, 71, 47, 60, 70, 64, 58, 62, 49, 85, 57, 62, 55, 65, 60, 64, 59, 58, 57, 64, 78, 65, 55, 68, 62, 63, 81, 80, 53, 67, 66, 62, 101, 73, 65, 67, 71, 64, 83, 52, 63, 72, 57, 59, 69, 62, 50, 75, 70, 84, 64, 60, 59, 77, 61, 56, 136, 98, 63, 64, 59, 82, 53, 67, 70, 67, 83, 70, 62, 60, 56, 70, 61, 47, 57, 65, 49, 68, 67, 70, 67, 91, 59, 103, 59, 46, 59, 54, 58, 52, 46, 60, 100, 60, 65, 65, 65, 109, 50, 61, 63, 76, 58, 64, 77, 84, 71, 84, 56, 56, 43, 86, 78, 64, 76, 62, 63, 66, 57, 61, 58, 59, 57, 59, 62, 78, 65, 67, 61, 61, 54, 65, 65, 72, 69, 53, 68, 64, 50, 77, 69, 58, 84, 69, 72, 91, 69, 62, 74, 49, 64, 92, 62, 76, 97, 70, 79, 82, 63, 69, 67, 67, 98, 56, 63, 66, 59, 53, 69, 86, 60, 56, 83, 83, 59, 65, 67, 78, 65, 54, 103, 59, 48, 77, 77, 83, 72, 48, 83, 54, 69, 97, 62, 55, 67, 64, 43, 95, 71, 47, 60, 75, 56, 51, 48, 119, 72, 53, 77, 62, 65, 72, 50, 66, 55, 62, 87, 52, 65, 63, 72, 63, 77, 55, 62, 65, 62, 70, 90, 52, 78, 77, 50, 59, 109, 54, 74, 66, 70, 74, 66, 71, 64, 69, 60, 81, 60, 43, 67, 49, 46, 69, 54, 64, 65, 68, 71, 60, 68, 52, 72, 72, 67, 86, 70, 64, 62, 56, 61, 55, 122, 61, 87, 85, 58, 49, 66, 44, 77, 71, 75, 62, 70, 70, 56, 52, 84, 105, 59, 55, 72, 75, 64, 47, 71, 72, 63, 54, 62, 66, 59, 57, 67, 67, 68, 106, 60, 52, 50, 84, 66, 80, 106, 76, 66, 62, 82, 69, 39, 110, 81, 74, 50, 63, 80, 54, 60, 61, 71, 46, 69, 62, 66, 84, 55, 42, 67, 61, 98, 71, 51, 65, 65, 76, 45, 47, 87, 65, 83, 74, 55, 69, 80, 71, 49, 62, 60, 77, 89, 67, 58, 71, 53, 82, 57, 48, 47, 79, 48, 50, 66, 81, 40, 36, 67, 88, 72, 55, 69, 69, 62, 54, 43, 53, 52, 68, 87, 65, 66, 60, 57, 55, 71, 68, 51, 51, 58, 95, 44, 60, 83, 109, 57, 52, 96, 50, 67, 74, 48, 54, 95, 55, 56, 96, 78, 37, 62, 76, 75, 29, 69, 103, 50, 56, 88, 95, 49, 57, 61, 99, 65, 64, 61, 95, 92, 57, 90, 44, 48, 81, 68, 87, 86, 87, 38, 45, 80, 66, 74, 50, 50, 73, 60, 63, 92, 83, 59, 91, 57, 62, 103, 53, 50, 112, 55, 57, 77, 71, 69, 62, 65, 75, 76, 84, 79, 59, 55, 68, 61, 49, 76, 64, 77, 76, 50, 62, 67, 43, 85, 71, 90, 50, 77, 60, 67, 74, 62, 47, 47, 59, 47, 68, 66, 48, 83, 57, 61, 51, 66, 73, 75, 64, 52, 65, 65, 70, 54, 52, 110, 43, 84, 70, 59, 44, 50, 85, 56, 69, 51, 77, 40, 47, 68, 80, 95, 58, 58, 35, 60, 93, 61, 65, 73, 64, 58, 71, 48, 60, 64, 70, 60, 72, 69, 69, 70, 75, 74, 56, 76, 63, 50, 70, 58, 85, 47, 69, 62, 66, 65, 91, 49, 104, 57, 71, 54, 70, 47, 56, 72, 71, 47, 72, 57, 73, 48, 83, 86, 58, 57, 68, 81, 65, 38, 64, 53, 60, 68, 53, 68, 87, 69, 70, 56, 78, 73, 80, 76, 78, 51, 61, 65, 61, 59, 52, 56, 56, 64, 144, 76, 63, 59, 57, 81, 70, 55, 69, 66, 96, 62, 91, 78, 52, 64, 60, 56, 52, 73, 90, 110, 67, 76, 90, 59, 69, 59, 67, 66, 61, 111, 82, 65, 73, 63, 68, 48, 85, 70, 65, 97, 75, 58, 119, 101, 82, 60, 72, 96, 64, 154, 67, 68, 56, 66, 61, 60, 70, 49, 69, 45, 55, 74, 62, 37, 63, 95, 64, 82, 74, 62, 51, 55, 47, 60, 63, 71, 99, 65, 52, 42, 55, 82, 57, 60, 68, 69, 58, 56, 45, 80, 59, 48, 71, 47, 66, 67, 56, 45, 92, 70, 59, 68, 62, 55, 57, 60, 90, 67, 63, 57, 47, 73, 59, 74, 96, 61, 83, 56, 74, 65, 52, 56, 73, 52, 71, 56, 53, 63, 53, 65, 67, 54, 87, 70, 82, 47, 59, 58, 95, 73, 70, 57, 87, 46, 52, 74, 65, 64, 55, 67, 107, 86, 87, 46, 74, 119, 58, 52, 53, 60, 63, 68, 95, 55, 74, 70, 45, 55, 107, 47, 88, 47, 73, 72, 97, 68, 69, 92, 54, 59, 59, 58, 77, 64, 76, 62, 54, 52, 51, 38, 60, 74, 54, 57, 80, 73, 54, 81, 55, 53, 62, 54, 54, 57, 71, 63, 111, 69, 64, 44, 52, 62, 49, 89, 64, 62, 55, 69, 44, 54, 65, 69, 89, 60, 88, 52, 70, 64, 63, 65, 56, 55, 42, 66, 54, 54, 56, 53, 70, 54, 80, 51, 66, 69, 68, 68, 61, 48, 61, 74, 75, 60, 71, 66, 68, 58, 58, 60, 76, 58, 64, 61, 75, 40, 79, 62, 70, 39, 104, 53, 65, 53, 65, 71, 82, 38, 80, 74, 35, 68, 77, 64, 63, 65, 62, 75, 64, 65, 43, 70, 49, 28, 88, 65, 62, 60, 47, 48, 57, 72, 44, 71, 63, 89, 79, 64, 77, 52, 50, 68, 69, 68, 56, 119, 77, 64, 58, 59, 72, 61, 73, 62, 60, 67, 68, 62, 51, 72, 79, 54, 74, 52, 74, 46, 74, 76, 61, 45, 53, 63, 57, 55, 49, 107, 79, 107, 62, 45, 72, 70, 56, 81, 57, 89, 65, 69, 61, 61, 60, 59, 62, 85, 64, 52, 73, 58, 68, 63, 68, 52, 56, 53, 65, 87, 60, 61, 112, 54, 97, 62, 68, 73, 72, 80, 55, 83, 70, 92, 61, 97, 74, 79, 63, 61, 55, 47, 67, 63, 79, 62, 74, 53, 71, 70, 52, 48, 42, 52, 45, 60, 39, 57, 53, 79, 49, 60, 75, 62, 79, 73, 75, 62, 62, 64, 67, 41, 78, 69, 62, 62, 51, 49, 52, 61, 49, 62, 70, 71, 92, 121, 60, 112, 42, 57, 56, 68, 51, 82, 62, 53, 67, 75, 52, 47, 64, 67, 52, 135, 69, 55, 57, 78, 62, 77, 64, 44, 99, 63, 87, 73, 66, 57, 66, 54, 68, 67, 70, 76, 48, 71, 53, 60, 65, 75, 51, 72, 72, 59, 73, 62, 43, 72, 58, 66, 60, 54, 44, 51, 56, 61, 62, 67, 91, 74, 82, 34, 67, 49, 79, 56, 73, 82, 63, 75, 61, 44, 83, 89, 68, 95, 69, 97, 58, 56, 52, 63, 74, 36, 54, 51, 81, 110, 72, 60, 61, 48, 62, 65, 72, 109, 77, 41, 77, 105, 90, 72, 75, 73, 58, 63, 59, 54, 60, 81, 58, 75, 52, 57, 74, 59, 66, 67, 42, 89, 104, 50, 57, 69, 58, 67, 81, 62, 73, 71, 49, 74, 53, 99, 62, 49, 53, 71, 80, 72, 74, 49, 60, 77, 54, 63, 60, 56, 65, 63, 48, 60, 62, 67, 60, 73, 76, 83, 59, 68, 54, 48, 56, 77, 76, 72, 47, 66, 72, 79, 77, 57, 41, 63, 93, 62, 80, 65, 59, 65, 61, 75, 73, 60, 62, 77, 60, 63, 73, 80, 58, 69, 63, 80, 75, 74, 61, 80, 48, 59, 76, 81, 68, 58, 65, 51, 72, 63, 62, 57, 59, 46, 54, 58, 58, 63, 78, 51, 58, 64, 85, 51, 100, 47, 50, 61, 54, 82, 73, 52, 84, 70, 71, 56, 64, 100, 62, 70, 63, 69, 52, 63, 70, 66, 71, 53, 80, 65, 52, 58, 54, 54, 72, 61, 101, 60, 59, 82, 55, 59, 67, 79, 73, 80, 64, 85, 70, 61, 52, 61, 95, 73, 66, 80, 47, 70, 71, 74, 70, 77, 71, 57, 64, 64, 91, 57, 72, 58, 76, 53, 47, 56, 64, 65, 96, 67, 76, 84, 67, 50, 59, 63, 78, 61, 69, 78, 55, 70, 46, 86, 69, 57, 50, 61, 58, 64, 70, 76, 70, 63, 76, 59, 77, 79, 62, 61, 69, 64, 88, 74, 45, 79, 69, 100, 59, 72, 76, 74, 47, 82, 50, 68, 58, 76, 56, 109, 66, 63, 67, 60, 60, 105, 49, 66, 49, 70, 59, 67, 56, 61, 41, 58, 67, 67, 63, 93, 59, 66, 55, 42, 76, 64, 62, 53, 57, 62, 63, 48, 63, 42, 51, 80, 64, 75, 60, 73, 48, 63, 65, 78, 74, 55, 57, 90, 66, 79, 60, 65, 47, 55, 56, 76, 63, 77, 57, 55, 88, 55, 63, 54, 63, 74, 43, 61, 82, 77, 79, 73, 47, 81, 37, 53, 59, 59, 46, 61, 67, 60, 73, 81, 83, 63, 62, 76, 98, 40, 53, 108, 61, 62, 70, 74, 78, 74, 74, 57, 91, 67, 57, 62, 62, 45, 68, 60, 64, 52, 72, 60, 58, 77, 94, 56, 69, 101, 70, 65, 89, 71, 62, 56, 60, 78, 65, 85, 65, 48, 58, 55, 79, 81, 45, 67, 52, 55, 55, 58, 63, 49, 73, 106, 71, 72, 70, 55, 56, 43, 66, 74, 63, 65, 36, 68, 68, 55, 60, 57, 61, 57, 41, 49, 61, 68, 51, 71, 72, 62, 62, 76, 60, 52, 84, 49, 50, 46, 68, 85, 53, 61, 84, 55, 64, 62, 47, 66, 48, 88, 73, 50, 56, 45, 88, 66, 60, 96, 83, 67, 63, 69, 70, 57, 60, 61, 64, 51, 60, 66, 61, 75, 64, 84, 76, 51, 66, 82, 61, 63, 45, 68, 67, 79, 59, 32, 57, 84, 63, 66, 53, 80, 88, 71, 64, 80, 87, 71, 60, 46, 49, 61, 60, 45, 80, 90, 62, 55, 78, 68, 62, 61, 66, 61, 75, 74, 69, 69, 65, 68, 87, 55, 83, 61, 70, 49, 66, 60, 55, 92, 90, 49, 70, 42, 66, 57, 63, 49, 45, 81, 48, 98, 65, 52, 52, 55, 90, 49, 70, 77, 79, 67, 67, 64, 84, 59, 63, 56, 108, 82, 80, 89, 52, 73, 62, 76, 107, 66, 67, 80, 62, 60, 51, 109, 81, 76, 67, 100, 60, 69, 49, 62, 79, 54, 66, 58, 57, 48, 67, 67, 60, 61, 53, 52, 79, 42, 58, 40, 67, 43, 76, 67, 75, 47, 56, 63, 73, 60, 57, 67, 60, 60, 62, 73, 63, 75, 80, 87, 88, 64, 52, 80, 58, 78, 84, 48, 69, 80, 47, 60, 54, 78, 61, 66, 70, 57, 72, 69, 101, 73, 84, 59, 79, 51, 44, 63, 90, 85, 55, 74, 56, 83, 82, 96, 62, 58, 50, 76, 81, 69, 61, 74, 64, 75, 74, 88, 85, 45, 62, 73, 81, 71, 48, 61, 68, 66, 67, 40, 67, 76, 70, 78, 53, 74, 61, 63, 87, 55, 66, 83, 47, 69, 87, 84, 79, 69, 97, 72, 62, 48, 45, 67, 73, 64, 80, 68, 71, 69, 68, 49, 72, 52, 50, 66, 83, 56, 74, 69, 57, 63, 52, 67, 61, 45, 86, 74, 66, 51, 71, 65, 60, 63, 70, 63, 86, 73, 91, 61, 41, 64, 56, 69, 56, 58, 45, 65, 78, 53, 61, 57, 39, 48, 62, 107, 59, 61, 74, 50, 90, 59, 60, 62, 82, 103, 68, 65, 91, 62, 62, 66, 51, 74, 49, 67, 62, 52, 45, 60, 56, 83, 67, 85, 79, 62, 67, 72, 42, 74, 93, 48, 68, 80, 57, 65, 66, 51, 61, 66, 95, 77, 56, 63, 59, 89, 70, 45, 76, 71, 75, 50, 68, 64, 90, 68, 61, 63, 65, 52, 58, 46, 63, 54, 52, 43, 63, 62, 79, 75, 70, 51, 78, 69, 39, 46, 94, 67, 54, 61, 68, 53, 63, 82, 96, 67, 79, 71, 78, 73, 58, 51, 66, 64, 47, 74, 61, 71, 37, 83, 56, 58, 48, 56, 48, 92, 68, 62, 65, 61, 105, 49, 45, 56, 58, 55, 49, 52, 71, 84, 49, 74, 65, 90, 89, 63, 83, 64, 85, 73, 80, 63, 52, 111, 72, 81, 64, 121, 92, 54, 66, 70, 66, 55, 58, 82, 36, 64, 88, 85, 56, 102, 89, 53, 110, 59, 65, 70, 53, 71, 73, 62, 88, 59, 62, 69, 97, 64, 79, 59, 46, 77, 63, 89, 66, 55, 48, 82, 67, 62, 88, 62, 82, 55, 76, 65, 70, 69, 47, 81, 54, 61, 87, 56, 58, 75, 72, 67, 63, 98, 67, 55, 96, 60, 64, 62, 79, 63, 62, 58, 57, 63, 84, 52, 77, 64, 76, 58, 58, 80, 69, 64, 80, 57, 57, 85, 53, 77, 73, 55, 57, 84, 82, 60, 55, 85, 65, 58, 54, 79, 67, 57, 69, 45, 65, 51, 68, 78, 75, 59, 81, 53, 54, 60, 80, 59, 53, 81, 70, 55, 86, 62, 78, 47, 76, 55, 54, 67, 50, 77, 61, 60, 64, 61, 63, 67, 71, 61, 93, 54, 61, 73, 54, 65, 60, 68, 59, 78, 57, 60, 53, 54, 74, 110, 65, 65, 59, 65, 35, 75, 57, 65, 60, 74, 50, 59, 63, 66, 61, 60, 69, 56, 63, 65, 68, 76, 56, 77, 78, 62, 66, 45, 83, 72, 54, 54, 76, 65, 50, 86, 78, 47, 63, 63, 78, 62, 46, 64, 76, 109, 68, 84, 78, 65, 64, 63, 69, 62, 47, 71, 74, 67, 63, 63, 61, 79, 83, 72, 73, 70, 66, 58, 55, 68, 54, 59, 71, 65, 56, 103, 55, 53, 57, 62, 77, 67, 56, 79, 54, 76, 63, 70, 54, 84, 69, 61, 45, 70, 53, 53, 65, 62, 72, 84, 81, 67, 72, 78, 42, 57, 54, 103, 64, 54, 34, 46, 65, 53, 60, 52, 70, 57, 75, 71, 53, 65, 94, 77, 79, 77, 77, 87, 79, 48, 67, 59, 53, 97, 71, 66, 53, 73, 93, 38, 60, 49, 70, 52, 33, 37, 86, 83, 76, 61, 62, 51, 69, 74, 81, 70, 76, 76, 56, 64, 81, 60, 55, 65, 79, 70, 68, 78, 61, 47, 77, 39, 48, 70, 50, 70, 70, 80, 61, 84, 53, 49, 53, 80, 63, 34, 44, 105, 46, 54, 67, 62, 66, 70, 74, 67, 100, 95, 55, 76, 43, 87, 62, 46, 43, 98, 62, 93, 60, 58, 79, 71, 103, 46, 55, 54, 72, 55, 68, 84, 98, 49, 84, 65, 51, 53, 57, 60, 43, 61, 69, 85, 61, 60, 72, 52, 53, 76, 52, 71, 71, 56, 63, 53, 73, 67, 64, 69, 66, 65, 72, 42, 60, 51, 63, 70, 62, 122, 62, 50, 61, 46, 119, 99, 51, 71, 71, 68, 89, 74, 61, 47, 72, 52, 58, 95, 62, 72, 63, 63, 79, 61, 71, 92, 72, 90, 56, 69, 83, 67, 92, 83, 59, 40, 63, 53, 79, 62, 62, 56, 64, 53, 80, 65, 53, 73, 59, 109, 62, 60, 52, 94, 56, 42, 72, 67, 72, 53, 58, 44, 54, 66, 63, 54, 38, 111, 53, 72, 72, 63, 55, 57, 74, 59, 66, 50, 72, 65, 69, 82, 85, 94, 85, 63, 60, 74, 90, 88, 82, 71, 53, 61, 58, 67, 46, 85, 90, 56, 92, 59, 96, 58, 83, 69, 62, 53, 46, 60, 68, 69, 65, 66, 88, 76, 57, 50, 70, 106, 89, 97, 65, 78, 77, 62, 71, 121, 110, 66, 73, 60, 74, 67, 79, 78, 67, 77, 70, 55, 43, 68, 132, 85, 71, 150, 65, 72, 60, 85, 94, 37, 46, 86, 77, 68, 82, 97, 71, 41, 72, 62, 57, 58, 62, 93, 84, 90, 62, 84, 63, 55, 53, 105, 30, 49, 58, 60, 52, 103, 100, 63, 66, 80, 45, 52, 61, 74, 60, 49, 94, 85, 79, 58, 77, 62, 88, 74, 72, 77, 69, 50, 79, 68, 62, 81, 46, 56, 67, 57, 87, 71, 67, 84, 58, 65, 46, 57, 53, 66, 56, 67, 40, 47, 72, 56, 50, 74, 71, 56, 65, 77, 57, 46, 51, 82, 76, 73, 63, 39, 62, 35, 47, 59, 40, 64, 63, 114, 44, 58, 69, 60, 49, 69, 64, 79, 65, 63, 69, 78, 92, 73, 74, 84, 61, 48, 55, 48, 69, 58, 51, 98, 60, 80, 55, 56, 61, 53, 60, 70, 67, 61, 58, 63, 71, 48, 47, 63, 52, 72, 69, 53, 53, 74, 79, 52, 73, 42, 74, 72, 51, 51, 58, 68, 50, 88, 84, 57, 66, 57, 81, 58, 104, 61, 56, 37, 70, 93, 87, 60, 65, 58, 84, 80, 76, 79, 56, 53, 79, 76, 61, 80, 77, 77, 52, 60, 73, 49, 53, 78, 64, 93, 47, 62, 62, 91, 53, 55, 64, 53, 48, 89, 61, 59, 69, 61, 79, 55, 62, 70, 79, 54, 74, 73, 61, 53, 57, 59, 63, 57, 65, 66, 52, 66, 67, 78, 40, 69, 85, 63, 51, 60, 41, 72, 55, 77, 53, 72, 72, 53, 58, 81, 84, 49, 65, 75, 95, 50, 54, 73, 93, 57, 57, 70, 84, 61, 54, 59, 65, 72, 70, 97, 50, 79, 62, 63, 76, 76, 53, 49, 56, 44, 74, 39, 97, 63, 84, 63, 65, 72, 81, 65, 88, 68, 52, 40, 66, 52, 61, 68, 59, 76, 90, 43, 57, 74, 57, 69, 50, 56, 43, 59, 72, 76, 43, 63, 48, 71, 73, 70, 101, 61, 71, 60, 68, 64, 62, 47, 61, 54, 73, 58, 66, 68, 59, 39, 72, 70, 62, 56, 67, 63, 74, 60, 44, 50, 77, 70, 65, 60, 58, 89, 53, 83, 54, 65, 63, 46, 79, 75, 63, 99, 40, 78, 69, 53, 71, 68, 109, 87, 49, 55, 46, 48, 47, 63, 92, 64, 87, 77, 55, 67, 72, 60, 64, 51, 59, 44, 56, 74, 71, 81, 50, 101, 83, 67, 79, 70, 83, 95, 63, 65, 51, 69, 53, 54, 72, 59, 64, 69, 84, 34, 35, 69, 53, 68, 66, 38, 77, 78, 60, 86, 70, 53, 66, 78, 41, 108, 86, 61, 69, 69, 57, 71, 52, 64, 61, 56, 57, 61, 97, 75, 75, 61, 66, 65, 84, 62, 56, 57, 59, 59, 76, 59, 59, 58, 53, 89, 56, 68, 63, 59, 61, 90, 67, 78, 73, 53, 64, 66, 57, 67, 57, 63, 54, 59, 70, 47, 53, 80, 63, 74, 66, 52, 62, 60, 92, 81, 60, 53, 44, 45, 57, 66, 77, 76, 76, 54, 74, 121, 58, 50, 53, 48, 48, 75, 56, 59, 65, 78, 63, 61, 82, 69, 37, 68, 69, 84, 53, 74, 57, 75, 78, 61, 76, 57, 62, 48, 81, 62, 91, 82, 54, 53, 70, 65, 78, 70, 38, 50, 69, 57, 80, 71, 84, 50, 66, 92, 61, 60, 78, 53, 61, 71, 98, 105, 56, 57, 75, 78, 74, 46, 84, 78, 73, 69, 63, 43, 66, 55, 48, 79, 72, 59, 84, 61, 72, 68, 40, 69, 63, 62, 75, 50, 80, 69, 97, 81, 59, 68, 100, 41, 62, 60, 87, 54, 76, 53, 64, 67, 65, 54, 56, 48, 45, 68, 93, 56, 56, 96, 67, 71, 64, 87, 64, 59, 62, 77, 62, 64, 69, 134, 57, 48, 66, 70, 65, 68, 59, 63, 71, 84, 66, 80, 68, 99, 54, 75, 63, 84, 74, 56, 63, 64, 65, 62, 43, 120, 60, 60, 71, 51, 81, 56, 46, 65, 62, 62, 59, 56, 63, 62, 58, 101, 68, 61, 72, 64, 39, 67, 78, 52, 57, 57, 61, 83, 64, 77, 132, 92, 56, 72, 77, 51, 82, 53, 67, 50, 74, 100, 69, 57, 72, 60, 54, 54, 60, 42, 69, 50, 57, 61, 75, 92, 49, 60, 84, 47, 61, 80, 76, 71, 67, 73, 53, 72, 62, 61, 85, 59, 83, 70, 52, 67, 74, 85, 96, 53, 70, 60, 64, 62, 53, 68, 63, 56, 70, 50, 82, 59, 70, 90, 71, 59, 72, 39, 72, 75, 93, 54, 65, 59, 46, 75, 76, 76, 80, 73, 52, 65, 56, 64, 79, 73, 59, 48, 61, 118, 75, 48, 61, 69, 74, 61, 59, 79, 71, 72, 62, 70, 92, 58, 61, 54, 62, 59, 113, 60, 64, 63, 121, 73, 53, 79, 46, 63, 58, 70, 53, 67, 67, 55, 82, 47, 65, 46, 79, 62, 79, 59, 53, 73, 62, 63, 62, 84, 72, 53, 80, 68, 48, 77, 51, 77, 59, 75, 53, 71, 69, 59, 57, 63, 49, 48, 62, 72, 47, 63, 118, 68, 57, 74, 59, 82, 95, 58, 54, 50, 57, 45, 52, 61, 55, 100, 76, 51, 68, 58, 65, 44, 53, 84, 74, 65, 71, 51, 72, 68, 82, 70, 62, 78, 44, 78, 49, 50, 65, 57, 45, 70, 81, 76, 57, 65, 70, 73, 57, 61, 65, 56, 64, 59, 74, 76, 65, 52, 75, 69, 74, 59, 55, 64, 70, 62, 56, 60, 58, 71, 62, 62, 73, 80, 56, 65, 44, 51, 88, 68, 58, 86, 92, 59, 59, 91, 69, 59, 83, 65, 91, 59, 69, 55, 66, 67, 52, 109, 81, 61, 71, 74, 82, 120, 55, 61, 71, 45, 51, 79, 57, 64, 77, 56, 65, 70, 68, 59, 73, 70, 70, 73, 61, 68, 57, 68, 59, 58, 74, 61, 64, 54, 82, 64, 47, 49, 66, 69, 62, 69, 68, 58, 75, 59, 54, 71, 79, 100, 67, 81, 63, 79, 69, 82, 60, 60, 68, 54, 52, 60, 79, 85, 74, 64, 85, 68, 70, 91, 63, 50, 64, 57, 93, 76, 77, 57, 61, 54, 69, 49, 53, 59, 72, 49, 51, 54, 54, 64, 81, 65, 54, 56, 77, 51, 37, 71, 83, 57, 61, 57, 66, 75, 48, 70, 63, 60, 79, 52, 64, 89, 52, 51, 68, 72, 64, 65, 59, 120, 68, 81, 64, 65, 58, 51, 63, 74, 95, 72, 65, 69, 48, 54, 72, 60, 74, 93, 113, 62, 110, 64, 47, 52, 60, 71, 60, 66, 59, 55, 71, 83, 43, 60, 64, 68, 62, 103, 63, 58, 69, 73, 56, 45, 52, 62, 60, 67, 47, 53, 53, 52, 64, 106, 55, 66, 56, 60, 62, 46, 68, 75, 46, 74, 56, 51, 68, 43, 71, 50, 65, 64, 53, 78, 63, 77, 70, 51, 47, 98, 49, 60, 70, 77, 60, 52, 57, 70, 63, 88, 69, 74, 67, 64, 60, 52, 59, 53, 53, 77, 70, 75, 80, 67, 58, 38, 65, 75, 50, 53, 65, 58, 54, 63, 56, 56, 89, 62, 59, 47, 52, 67, 53, 65, 58, 76, 58, 61, 63, 94, 54, 54, 66, 46, 62, 56, 62, 58, 83, 66, 54, 49, 70, 90, 56, 76, 93, 63, 85, 62, 131, 63, 61, 76, 68, 55, 58, 57, 85, 74, 62, 73, 74, 52, 60, 62, 49, 69, 58, 64, 75, 65, 60, 66, 76, 62, 49, 48, 52, 48, 99, 75, 96, 51, 78, 60, 64, 45, 52, 61, 57, 72, 59, 64, 81, 55, 68, 64, 92, 75, 59, 61, 65, 42, 60, 59, 75, 58, 83, 50, 83, 57, 64, 65, 63, 79, 53, 51, 54, 47, 96, 62, 71, 69, 52, 57, 55, 64, 52, 88, 86, 67, 93, 49, 74, 75, 105, 62, 71, 54, 58, 90, 62, 77, 67, 49, 48, 63, 64, 71, 58, 71, 91, 80, 62, 55, 72, 68, 51, 59, 77, 69, 66, 53, 73, 62, 104, 54, 58, 70, 54, 54, 53, 71, 86, 114, 80, 58, 57, 61, 48, 73, 53, 62, 61, 72, 61, 59, 65, 61, 61, 80, 55, 74, 51, 69, 61, 61, 74, 66, 58, 59, 57, 52, 62, 86, 58, 70, 71, 47, 47, 60, 62, 69, 60, 85, 54, 56, 69, 67, 66, 83, 63, 113, 52, 88, 63, 90, 51, 58, 82, 66, 66, 68, 59, 94, 90, 65, 56, 48, 120, 57, 83, 63, 64, 81, 71, 97, 101, 62, 57, 61, 82, 63, 73, 60, 66, 69, 59, 64, 55, 65, 46, 59, 117, 55, 58, 77, 53, 55, 71, 72, 52, 83, 91, 60, 66, 54, 46, 57, 54, 63, 57, 53, 71, 48, 48, 53, 75, 85, 79, 57, 108, 73, 60, 170, 80, 63, 68, 66, 58, 68, 103, 54, 59, 53, 55, 81, 85, 76, 61, 65, 55, 55, 75, 62, 72, 86, 76, 56, 65, 82, 66, 70, 68, 84, 64, 61, 132, 67, 56, 54, 57, 76, 52, 55, 95, 60, 78, 61, 77, 46, 86, 76, 65, 58, 47, 55, 72, 59, 54, 55, 70, 54, 62, 73, 65, 61, 64, 54, 60, 82, 46, 46, 68, 64, 60, 88, 86, 51, 60, 63, 61, 57, 51, 62, 71, 66, 73, 50, 53, 57, 66, 60, 52, 55, 51, 71, 61, 55, 56, 56, 67, 57, 66, 54, 71, 64, 114, 61, 62, 57, 57, 48, 55, 60, 96, 79, 60, 62, 55, 51, 42, 58, 54, 51, 67, 58, 61, 63, 64, 79, 73, 61, 47, 63, 60, 72, 46, 57, 49, 106, 72, 63, 61, 86, 58, 75, 86, 64, 60, 38, 73, 53, 69, 99, 64, 59, 72, 64, 66, 66, 57, 79, 63, 69, 77, 69, 65, 60, 47, 64, 68, 52, 57, 68, 60, 81, 63, 58, 76, 58, 63, 96, 58, 54, 69, 49, 68, 81, 62, 66, 64, 65, 53, 68, 68, 63, 50, 69, 82, 45, 73, 70, 64, 74, 60, 56, 73, 94, 52, 69, 106, 58, 71, 52, 106, 90, 70, 63, 69, 81, 69, 68, 51, 93, 86, 44, 81, 64, 33, 55, 43, 52, 102, 55, 54, 48, 71, 66, 57, 70, 70, 55, 66, 61, 69, 60, 82, 66, 63, 69, 61, 68, 48, 66, 51, 69, 48, 54, 83, 66, 67, 58, 85, 70, 81, 67, 54, 71, 68, 76, 47, 70, 49, 45, 71, 52, 64, 77, 52, 61, 58, 63, 66, 65, 70, 67, 72, 74, 73, 60, 58, 77, 68, 69, 67, 69, 60, 55, 49, 58, 60, 82, 76, 100, 82, 71, 63, 57, 53, 84, 69, 72, 61, 81, 59, 71, 106, 65, 45, 54, 71, 69, 44, 55, 55, 49, 63, 65, 66, 57, 52, 71, 67, 55, 63, 52, 89, 53, 60, 54, 69, 61, 57, 65, 61, 42, 81, 54, 45, 77, 58, 83, 56, 64, 69, 76, 101, 87, 68, 57, 60, 78, 80, 80, 81, 47, 75, 82, 104, 56, 72, 99, 55, 66, 104, 67, 52, 70, 52, 59, 56, 50, 60, 71, 65, 70, 66, 51, 39, 60, 57, 51, 112, 60, 57, 109, 41, 58, 81, 53, 72, 49, 53, 66, 79, 71, 48, 79, 47, 64, 57, 49, 54, 64, 86, 91, 63, 92, 62, 62, 59, 70, 50, 58, 65, 65, 63, 77, 70, 65, 94, 147, 61, 73, 81, 55, 62, 70, 92, 62, 44, 66, 74, 47, 61, 61, 43, 78, 31, 62, 58, 75, 65, 96, 84, 55, 56, 96, 67, 75, 69, 47, 71, 76, 33, 61, 55, 118, 51, 50, 59, 54, 48, 84, 73, 45, 73, 72, 51, 49, 82, 65, 59, 56, 83, 64, 84, 94, 89, 61, 92, 88, 69, 72, 57, 92, 59, 50, 79, 115, 53, 76, 63, 77, 61, 64, 76, 65, 62, 70, 61, 66, 83, 66, 66, 77, 41, 86, 61, 62, 74, 99, 56, 71, 87, 81, 77, 53, 72, 46, 57, 69, 74, 78, 56, 57, 86, 66, 78, 63, 74, 87, 52, 60, 69, 83, 84, 74, 67, 107, 84, 46, 51, 65, 63, 71, 82, 87, 61, 47, 59, 50, 46, 75, 30, 80, 47, 45, 57, 63, 67, 85, 45, 51, 69, 69, 61, 66, 60, 84, 92, 75, 53, 59, 65, 53, 67, 86, 53, 67, 59, 90, 66, 70, 74, 59, 77, 85, 85, 82, 68, 62, 44, 62, 68, 65, 94, 70, 43, 52, 79, 66, 36, 55, 107, 64, 66, 52, 55, 59, 54, 55, 58, 58, 62, 56, 50, 42, 83, 66, 72, 83, 55, 41, 57, 70, 55, 65, 70, 56, 61, 60, 77, 57, 80, 64, 67, 86, 79, 67, 65, 74, 71, 82, 92, 48, 53, 44, 57, 63, 70, 67, 55, 95, 64, 51, 62, 58, 70, 64, 45, 27, 57, 63, 62, 80, 47, 64, 63, 74, 64, 53, 90, 62, 59, 73, 61, 52, 50, 88, 92, 90, 66, 76, 62, 55, 69, 86, 65, 107, 66, 35, 41, 61, 66, 46, 49, 77, 82, 57, 72, 81, 77, 76, 83, 76, 74, 67, 51, 63, 55, 61, 60, 63, 60, 60, 69, 53, 69, 61, 49, 57, 71, 39, 53, 106, 77, 46, 67, 66, 72, 62, 56, 88, 56, 63, 41, 62, 75, 76, 84, 46, 74, 87, 50, 69, 77, 50, 52, 82, 62, 70, 51, 73, 67, 69, 73, 48, 54, 70, 65, 57, 47, 67, 101, 56, 71, 52, 72, 51, 53, 57, 45, 63, 103, 83, 79, 54, 84, 89, 47, 52, 102, 72, 61, 62, 83, 70, 67, 74, 46, 74, 73, 55, 62, 73, 65, 90, 57, 82, 57, 77, 44, 80, 116, 55, 73, 76, 46, 61, 55, 64, 85, 81, 37, 57, 59, 80, 56, 65, 73, 83, 72, 42, 80, 87, 46, 49, 60, 40, 49, 67, 36, 82, 58, 75, 45, 79, 80, 59, 69, 59, 57, 67, 55, 81, 62, 98, 53, 60, 91, 92, 66, 78, 69, 44, 55, 102, 75, 71, 62, 91, 66, 87, 62, 58, 97, 61, 60, 81, 63, 55, 55, 97, 73, 89, 41, 48, 90, 45, 83, 54, 40, 81, 57, 75, 66, 56, 63, 79, 65, 54, 83, 63, 49, 59, 68, 71, 76, 64, 62, 75, 65, 125, 71, 70, 60, 64, 34, 65, 49, 75, 88, 53, 85, 60, 81, 83, 66, 53, 71, 73, 44, 69, 67, 63, 98, 49, 57, 112, 54, 58, 79, 37, 78, 54, 45, 82, 64, 42, 54, 58, 79, 61, 62, 48, 44, 79, 38, 81, 61, 72, 72, 55, 75, 54, 102, 68, 57, 56, 73, 62, 67, 55, 66, 100, 74, 106, 69, 60, 74, 57, 81, 65, 65, 70, 80, 71, 63, 90, 66, 61, 56, 61, 94, 48, 68, 65, 47, 61, 60, 65, 33, 49, 60, 65, 61, 83, 63, 68, 59, 62, 37, 63, 58, 65, 64, 52, 65, 74, 83, 59, 75, 77, 58, 84, 69, 70, 66, 70, 70, 68, 55, 64, 93, 59, 78, 48, 39, 55, 81, 63, 89, 52, 57, 68, 62, 107, 54, 66, 50, 81, 66, 64, 66, 47, 52, 60, 78, 55, 60, 80, 89, 36, 69, 88, 98, 88, 53, 41, 59, 68, 80, 41, 65, 58, 69, 71, 82, 61, 69, 45, 69, 89, 78, 47, 81, 62, 81, 58, 60, 52, 111, 56, 65, 42, 60, 54, 62, 47, 60, 71, 43, 70, 68, 53, 59, 71, 80, 53, 72, 68, 47, 54, 65, 71, 55, 77, 58, 47, 74, 78, 71, 103, 79, 50, 40, 49, 77, 57, 84, 61, 50, 58, 58, 68, 67, 78, 77, 75, 40, 107, 66, 71, 68, 46, 75, 55, 74, 68, 59, 63, 51, 65, 75, 61, 79, 59, 60, 60, 63, 74, 103, 76, 67, 54, 48, 74, 54, 65, 46, 72, 64, 101, 70, 44, 54, 76, 65, 81, 42, 85, 74, 58, 66, 56, 39, 72, 86, 78, 69, 56, 78, 51, 53, 63, 63, 68, 102, 84, 65, 56, 74, 63, 67, 66, 47, 88, 78, 38, 55, 79, 59, 82, 63, 66, 57, 52, 64, 58, 43, 77, 77, 46, 66, 73, 44, 44, 44, 61, 77, 67, 55, 78, 108, 78, 67, 73, 62, 36, 66, 70, 56, 90, 73, 47, 80, 59, 44, 59, 76, 66, 78, 61, 62, 75, 80, 91, 42, 70, 68, 43, 71, 82, 58, 52, 68, 88, 74, 59, 53, 74, 57, 47, 78, 74, 43, 57, 65, 67, 73, 57, 73, 63, 69, 79, 74, 67, 69, 76, 69, 67, 58, 128, 70, 87, 38, 80, 52, 54, 79, 90, 73, 72, 75, 56, 48, 66, 78, 67, 73, 69, 88, 58, 49, 81, 79, 64, 62, 56, 46, 90, 61, 55, 115, 41, 64, 69, 60, 44, 105, 57, 56, 49, 62, 57, 70, 75, 86, 68, 50, 59, 96, 54, 79, 91, 71, 64, 59, 51, 73, 42, 67, 52, 48, 69, 50, 89, 58, 54, 75, 79, 91, 51, 47, 75, 50, 44, 68, 61, 57, 73, 49, 88, 69, 65, 55, 60, 59, 76, 80, 107, 56, 63, 74, 39, 52, 60, 81, 88, 76, 83, 45, 76, 58, 77, 57, 69, 67, 53, 82, 66, 74, 54, 77, 54, 84, 74, 72, 67, 71, 50, 66, 58, 73, 54, 80, 88, 62, 68, 67, 62, 109, 54, 38, 100, 50, 39, 55, 77, 80, 80, 47, 62, 59, 61, 62, 75, 106, 79, 83, 152, 78, 72, 67, 73, 70, 53, 83, 64, 70, 78, 59, 82, 43, 49, 57, 119, 47, 55, 86, 49, 45, 60, 63, 131, 84, 71, 81, 64, 52, 69, 54, 55, 62, 47, 64, 59, 49, 55, 58, 48, 76, 59, 81, 57, 57, 76, 93, 56, 76, 40, 80, 61, 67, 55, 55, 69, 88, 65, 94, 45, 69, 87, 63, 72, 61, 80, 58, 48, 83, 69, 67, 67, 67, 82, 52, 55, 90, 79, 48, 43, 65, 79, 53, 65, 65, 60, 72, 55, 68, 63, 39, 62, 49, 37, 78, 42, 66, 57, 50, 60, 93, 69, 45, 64, 68, 54, 50, 90, 88, 45, 98, 62, 76, 90, 61, 58, 43, 76, 93, 66, 53, 60, 81, 63, 72, 61, 73, 55, 55, 62, 68, 62, 88, 52, 70, 73, 52, 46, 94, 77, 97, 69, 76, 75, 65, 53, 74, 63, 61, 76, 51, 66, 68, 60, 53, 60, 69, 45, 79, 77, 51, 66, 43, 71, 50, 67, 61, 56, 73, 85, 65, 56, 64, 63, 72, 79, 65, 53, 33, 53, 47, 77, 108, 71, 59, 54, 62, 117, 40, 68, 75, 56, 81, 52, 84, 76, 70, 81, 45, 49, 48, 97, 71, 53, 100, 132, 50, 61, 61, 82, 59, 72, 43, 56, 86, 77, 72, 69, 76, 55, 68, 60, 79, 73, 35, 56, 82, 68, 72, 57, 41, 43, 61, 84, 60, 85, 69, 70, 55, 62, 91, 83, 52, 72, 64, 57, 62, 44, 47, 59, 74, 83, 62, 68, 64, 57, 48, 59, 68, 59, 96, 67, 48, 72, 38, 90, 81, 80, 74, 71, 55, 57, 86, 63, 63, 59, 53, 62, 67, 77, 50, 43, 90, 66, 50, 65, 62, 63, 64, 82, 58, 48, 71, 75, 47, 98, 71, 37, 86, 68, 65, 85, 60, 53, 36, 51, 61, 40, 73, 65, 31, 92, 105, 53, 65, 70, 55, 94, 57, 62, 60, 64, 66, 71, 37, 77, 49, 64, 42, 80, 86, 80, 70, 69, 65, 16, 64, 86, 52, 85, 75, 36, 63, 65, 46, 62, 68, 91, 68, 70, 70, 84, 53, 54, 53, 81, 82, 89, 69, 52, 61, 45, 64, 79, 78, 69, 118, 76, 56, 49, 62, 66, 46, 64, 76, 87, 54, 70, 70, 63, 59, 77, 70, 69, 62, 73, 45, 59, 53, 79, 91, 69, 70, 83, 64, 55, 42, 77, 46, 66, 67, 56, 78, 81, 58, 76, 63, 49, 61, 56, 68, 49, 74, 89, 69, 79, 101, 106, 80, 43, 59, 95, 52, 67, 104, 72, 49, 72, 63, 62, 75, 56, 72, 56, 81, 63, 75, 44, 76, 49, 61, 59, 70, 80, 67, 72, 52, 51, 66, 104, 51, 71, 67, 45, 58, 74, 66, 63, 68, 41, 82, 45, 67, 81, 79, 119, 126, 77, 86, 55, 74, 66, 99, 76, 64, 79, 72, 52, 81, 86, 70, 43, 64, 78, 56, 43, 85, 64, 49, 51, 53, 48, 54, 78, 53, 73, 92, 64, 59, 57, 116, 68, 48, 44, 43, 56, 64, 50, 56, 86, 78, 42, 45, 70, 40, 80, 66, 60, 91, 80, 70, 83, 46, 79, 51, 67, 44, 23, 71, 88, 64, 53, 77, 50, 74, 61, 101, 56, 49, 62, 68, 59, 36, 77, 94, 49, 79, 68, 74, 50, 82, 68, 68, 77, 53, 77, 52, 79, 58, 55, 58, 91, 61, 65, 42, 41, 52, 64, 50, 78, 81, 59, 63, 67, 58, 70, 69, 56, 53, 50, 58, 66, 90, 57, 76, 56, 56, 101, 55, 80, 88, 62, 50, 63, 46, 63, 54, 68, 92, 65, 52, 73, 51, 58, 60, 77, 66, 69, 45, 79, 58, 51, 75, 78, 38, 52, 56, 79, 77, 77, 61, 65, 63, 60, 60, 80, 47, 83, 66, 67, 50, 42, 67, 85, 62, 60, 84, 82, 66, 72, 72, 56, 55, 93, 62, 59, 81, 85, 47, 82, 53, 81, 74, 57, 66, 40, 64, 97, 69, 95, 75, 63, 87, 82, 49, 89, 127, 64, 68, 67, 55, 68, 62, 72, 78, 85, 79, 76, 76, 51, 60, 81, 54, 54, 53, 53, 48, 69, 60, 71, 82, 44, 75, 74, 73, 73, 47, 60, 62, 64, 58, 65, 56, 63, 51, 71, 67, 74, 36, 78, 110, 65, 66, 70, 46, 43, 78, 60, 81, 59, 43, 46, 60, 51, 82, 70, 49, 85, 57, 71, 97, 59, 63, 63, 72, 70, 86, 68, 53, 59, 52, 78, 56, 36, 59, 67, 45, 66, 95, 81, 86, 37, 60, 65, 53, 79, 61, 43, 73, 76, 92, 57, 57, 52, 94, 73, 54, 74, 126, 119, 54, 130, 101, 54, 82, 74, 81, 72, 46, 62, 78, 51, 63, 44, 42, 55, 100, 44, 64, 67, 50, 51, 63, 56, 45, 54, 55, 59, 48, 43, 54, 61, 34, 61, 56, 68, 69, 61, 68, 73, 69, 55, 36, 67, 80, 68, 116, 65, 50, 53, 51, 64, 109, 66, 76, 73, 61, 79, 93, 61, 61, 61, 50, 71, 73, 55, 56, 63, 86, 72, 73, 59, 82, 63, 74, 101, 50, 76, 37, 66, 48, 68, 76, 72, 55, 61, 75, 71, 61, 65, 48, 58, 55, 76, 53, 98, 114, 91, 91, 70, 94, 65, 56, 61, 75, 79, 72, 62, 73, 59, 57, 70, 54, 73, 54, 47, 64, 47, 86, 46, 62, 52, 52, 57, 71, 89, 43, 53, 44, 49, 39, 75, 66, 78, 59, 65, 86, 58, 121, 60, 72, 69, 91, 64, 57, 72, 45, 48, 84, 58, 79, 57, 87, 71, 58, 69, 55, 45, 50, 78, 51, 59, 49, 45, 76, 69, 79, 62, 55, 75, 60, 74, 70, 66, 87, 57, 60, 55, 45, 76, 104, 46, 59, 78, 48, 64, 58, 47, 73, 80, 53, 70, 56, 62, 58, 68, 47, 67, 60, 61, 59, 56, 70, 73, 62, 48, 42, 56, 81, 80, 65, 50, 49, 58, 39, 67, 50, 72, 52, 82, 58, 64, 50, 82, 66, 55, 75, 65, 81, 58, 44, 53, 55, 57, 55, 76, 55, 72, 76, 72, 64, 63, 62, 44, 55, 50, 56, 72, 74, 58, 50, 76, 82, 58, 57, 63, 61, 78, 56, 77, 81, 60, 52, 67, 51, 63, 57, 61, 56, 62, 67, 92, 41, 70, 75, 69, 107, 37, 79, 67, 63, 64, 63, 85, 67, 100, 66, 64, 122, 55, 64, 50, 66, 71, 62, 58, 64, 64, 47, 59, 51, 58, 60, 76, 73, 65, 50, 48, 78, 58, 63, 57, 52, 68, 41, 68, 55, 70, 64, 73, 64, 76, 65, 61, 81, 58, 65, 61, 55, 70, 60, 64, 51, 58, 70, 45, 60, 84, 56, 61, 51, 95, 63, 78, 46, 51, 50, 61, 53, 61, 67, 65, 61, 64, 68, 62, 74, 68, 50, 69, 62, 59, 70, 55, 44, 61, 93, 64, 74, 67, 53, 76, 56, 73, 61, 54, 61, 73, 59, 58, 107, 59, 68, 59, 74, 46, 50, 77, 86, 70, 52, 62, 57, 64, 68, 58, 83, 56, 46, 62, 86, 90, 64, 58, 58, 75, 65, 50, 63, 70, 86, 58, 61, 83, 65, 26, 68, 49, 73, 83, 64, 150, 68, 58, 82, 52, 133, 58, 65, 68, 69, 61, 73, 68, 72, 62, 63, 65, 63, 57, 84, 63, 62, 81, 66, 69, 46, 65, 59, 74, 71, 31, 51, 68, 63, 47, 99, 68, 69, 63, 57, 71, 53, 63, 58, 46, 77, 64, 67, 55, 71, 67, 86, 59, 82, 70, 59, 79, 56, 68, 55, 55, 46, 63, 74, 62, 61, 70, 58, 72, 57, 69, 60, 59, 111, 51, 61, 53, 82, 78, 50, 80, 92, 65, 61, 70, 80, 58, 52, 74, 53, 69, 69, 81, 52, 77, 47, 86, 51, 48, 56, 47, 74, 59, 73, 60, 61, 65, 61, 39, 49, 63, 59, 66, 57, 57, 101, 76, 77, 53, 89, 81, 80, 50, 60, 52, 66, 92, 67, 69, 73, 83, 79, 67, 80, 84, 52, 57, 74, 76, 70, 68, 68, 41, 61, 89, 67, 80, 60, 70, 51, 106, 66, 67, 90, 59, 74, 57, 80, 64, 65, 52, 61, 68, 85, 45, 63, 58, 60, 54, 67, 61, 90, 78, 66, 68, 102, 65, 40, 69, 52, 57, 64, 64, 80, 67, 78, 59, 71, 63, 64, 57, 67, 92, 61, 66, 65, 61, 67, 72, 75, 89, 55, 51, 55, 75, 53, 75, 69, 67, 69, 66, 69, 64, 74, 58, 53, 58, 44, 69, 64, 58, 73, 69, 60, 70, 76, 56, 69, 62, 77, 89, 62, 58, 61, 71, 60, 55, 56, 74, 83, 56, 52, 59, 59, 51, 79, 57, 71, 69, 77, 59, 101, 59, 60, 45, 70, 79, 72, 53, 51, 59, 62, 53, 80, 62, 92, 65, 67, 89, 49, 63, 85, 66, 74, 50, 60, 51, 91, 65, 45, 78, 58, 46, 48, 69, 65, 50, 60, 54, 81, 66, 71, 56, 101, 51, 64, 86, 68, 79, 87, 48, 52, 71, 65, 67, 70, 79, 65, 57, 75, 91, 83, 96, 64, 61, 74, 70, 61, 68, 69, 67, 58, 54, 68, 60, 56, 54, 106, 71, 108, 55, 66, 74, 64, 90, 51, 62, 71, 55, 60, 69, 71, 62, 60, 66, 72, 70, 67, 56, 58, 97, 69, 66, 113, 46, 52, 65, 61, 48, 57, 65, 62, 54, 70, 79, 111, 47, 82, 62, 61, 54, 65, 57, 60, 88, 65, 62, 66, 59, 76, 81, 58, 53, 64, 49, 88, 83, 82, 68, 77, 90, 49, 69, 78, 58, 68, 100, 71, 56, 89, 53, 60, 80, 55, 68, 78, 78, 108, 54, 69, 58, 56, 61, 78, 55, 56, 76, 59, 57, 66, 58, 62, 56, 71, 77, 60, 63, 124, 75, 52, 66, 77, 53, 80, 73, 60, 66, 71, 56, 62, 94, 76, 57, 53, 65, 69, 49, 55, 82, 76, 57, 84, 51, 58, 68, 59, 62, 59, 63, 76, 59, 70, 56, 61, 69, 74, 68, 60, 59, 50, 72, 91, 71, 59, 62, 63, 71, 60, 55, 66, 46, 50, 50, 74, 78, 66, 70, 105, 54, 56, 46, 49, 98, 62, 65, 47, 46, 77, 60, 82, 125, 45, 62, 62, 74, 69, 84, 59, 53, 60, 62, 60, 59, 104, 77, 55, 87, 62, 58, 59, 66, 51, 49, 66, 53, 75, 63, 64, 81, 53, 109, 59, 53, 58, 66, 49, 55, 64, 43, 56, 57, 60, 57, 56, 67, 84, 54, 50, 64, 88, 74, 79, 74, 62, 64, 67, 75, 51, 59, 57, 88, 52, 55, 69, 59, 48, 76, 45, 66, 76, 61, 49, 44, 67, 74, 60, 64, 54, 90, 62, 59, 71, 79, 61, 65, 73, 67, 66, 49, 76, 91, 106, 60, 92, 74, 50, 60, 105, 101, 36, 58, 50, 64, 71, 77, 48, 63, 57, 80, 57, 82, 59, 63, 62, 79, 65, 59, 56, 84, 56, 52, 55, 51, 82, 93, 62, 54, 79, 86, 67, 80, 68, 66, 47, 32, 56, 55, 59, 76, 96, 81, 63, 59, 55, 81, 82, 69, 70, 66, 53, 76, 53, 77, 75, 69, 57, 80, 60, 51, 52, 71, 57, 57, 61, 44, 52, 60, 59, 76, 66, 58, 57, 87, 82, 64, 60, 41, 53, 58, 51, 74, 53, 50, 59, 72, 65, 81, 65, 84, 60, 63, 60, 71, 71, 92, 55, 56, 67, 76, 50, 68, 75, 66, 67, 62, 108, 61, 69, 49, 37, 47, 97, 63, 63, 48, 68, 77, 63, 53, 32, 78, 58, 73, 55, 54, 56, 124, 75, 76, 68, 56, 99, 57, 93, 52, 115, 94, 40, 61, 54, 52, 56, 68, 79, 64, 72, 60, 100, 51, 50, 62, 58, 59, 81, 63, 71, 91, 53, 67, 71, 57, 67, 54, 48, 55, 75, 61, 60, 75, 77, 61, 78, 60, 75, 62, 59, 59, 53, 66, 72, 82, 47, 48, 72, 55, 80, 85, 59, 47, 56, 86, 70, 69, 44, 39, 55, 63, 56, 63, 61, 58, 85, 67, 86, 81, 59, 63, 53, 66, 76, 63, 71, 57, 57, 59, 74, 66, 67, 59, 88, 66, 59, 72, 81, 68, 81, 81, 104, 67, 64, 58, 74, 65, 82, 63, 40, 49, 67, 52, 56, 61, 54, 102, 91, 56, 72, 46, 59, 66, 72, 58, 73, 80, 77, 63, 65, 105, 56, 69, 61, 71, 107, 74, 58, 70, 71, 51, 64, 64, 66, 61, 68, 62, 72, 105, 78, 71, 67, 51, 85, 54, 45, 44, 62, 78, 59, 90, 56, 58, 53, 75, 63, 67, 50, 55, 62, 55, 64, 57, 62, 84, 82, 63, 61, 82, 68, 57, 81, 40, 61, 83, 55, 71, 63, 91, 80, 69, 77, 56, 58, 55, 80, 78, 53, 73, 86, 61, 65, 49, 67, 57, 87, 77, 68, 53, 66, 69, 92, 71, 59, 69, 75, 63, 67, 49, 55, 72, 59, 62, 66, 51, 63, 52, 65, 83, 63, 67, 63, 62, 72, 93, 65, 66, 69, 72, 66, 61, 60, 75, 68, 49, 83, 68, 64, 63, 66, 49, 36, 48, 85, 76, 67, 69, 50, 61, 55, 91, 73, 87, 65, 60, 71, 66, 50, 59, 45, 57, 72, 61, 73, 99, 68, 62, 61, 55, 67, 61, 65, 67, 66, 55, 75, 63, 65, 55, 61, 58, 51, 47, 65, 73, 59, 73, 61, 70, 73, 78, 62, 67, 73, 77, 89, 56, 56, 71, 86, 68, 60, 63, 46, 55, 66, 59, 58, 66, 79, 55, 64, 50, 81, 81, 61, 76, 75, 58, 74, 58, 56, 65, 62, 54, 73, 80, 59, 68, 68, 60, 87, 57, 67, 70, 45, 83, 64, 70, 80, 64, 55, 61, 62, 62, 78, 60, 62, 64, 74, 65, 71, 66, 68, 61, 66, 100, 86, 73, 59, 91, 69, 69, 77, 61, 85, 62, 70, 74, 71, 63, 58, 80, 43, 62, 48, 68, 64, 88, 44, 73, 53, 54, 66, 52, 78, 67, 101, 50, 80, 61, 81, 56, 62, 65, 82, 49, 67, 72, 78, 50, 83, 67, 72, 82, 81, 53, 70, 62, 71, 60, 42, 66, 71, 60, 84, 58, 69, 66, 62, 72, 70, 75, 65, 64, 66, 47, 62, 69, 69, 57, 77, 81, 53, 65, 56, 88, 75, 46, 66, 65, 78, 80, 67, 50, 67, 74, 49, 58, 64, 53, 58, 70, 41, 61, 74, 55, 60, 66, 74, 60, 59, 34, 75, 63, 58, 65, 82, 61, 69, 54, 65, 73, 55, 71, 66, 77, 48, 56, 92, 62, 69, 73, 63, 75, 55, 60, 73, 89, 64, 83, 78, 58, 73, 61, 66, 53, 74, 63, 60, 63, 67, 77, 89, 80, 72, 61, 69, 66, 71, 65, 61, 46, 80, 55, 93, 58, 75, 58, 39, 65, 75, 50, 54, 66, 65, 64, 59, 63, 55, 54, 45, 54, 54, 64, 65, 52, 96, 65, 44, 163, 66, 73, 52, 63, 72, 82, 79, 68, 69, 56, 82, 70, 57, 65, 67, 71, 67, 51, 55, 61, 61, 52, 74, 47, 79, 55, 64, 53, 72, 68, 50, 83, 77, 69, 46, 72, 88, 32, 61, 72, 56, 73, 46, 41, 45, 78, 56, 58, 47, 69, 54, 56, 62, 49, 68, 69, 50, 55, 64, 62, 71, 63, 51, 61, 53, 66, 57, 76, 79, 71, 97, 55, 91, 61, 60, 71, 56, 54, 65, 93, 72, 67, 51, 77, 76, 49, 60, 29, 67, 65, 74, 73, 54, 59, 67, 68, 79, 74, 58, 69, 70, 75, 66, 63, 73, 74, 67, 61, 64, 53, 54, 61, 43, 61, 86, 73, 81, 63, 63, 61, 75, 85, 67, 55, 59, 69, 61, 55, 55, 89, 81, 63, 53, 83, 67, 50, 52, 70, 82, 71, 65, 59, 78, 58, 63, 66, 72, 71, 57, 69, 59, 60, 73, 59, 89, 79, 49, 64, 79, 75, 67, 63, 63, 69, 72, 107, 74, 62, 82, 82, 55, 54, 66, 79, 64, 70, 62, 52, 58, 65, 65, 69, 64, 84, 68, 75, 99, 79, 58, 65, 70, 67, 75, 43, 75, 70, 69, 71, 61, 46, 61, 97, 64, 85, 59, 47, 47, 64, 62, 75, 61, 65, 57, 66, 91, 32, 67, 57, 55, 54, 82, 70, 66, 58, 67, 79, 68, 58, 69, 57, 76, 64, 57, 55, 52, 51, 63, 71, 86, 67, 57, 71, 54, 73, 66, 55, 63, 70, 53, 55, 67, 61, 58, 80, 58, 69, 54, 47, 58, 66, 64, 59, 67, 58, 50, 61, 65, 63, 56, 63, 73, 59, 42, 71, 52, 73, 82, 69, 57, 66, 51, 65, 62, 70, 71, 55, 90, 62, 68, 67, 40, 70, 57, 50, 56, 48, 48, 87, 57, 113, 57, 71, 76, 52, 68, 68, 86, 59, 67, 49, 68, 76, 62, 64, 68, 37, 73, 62, 73, 71, 45, 42, 59, 76, 50, 88, 61, 66, 57, 61, 39, 54, 101, 58, 62, 72, 61, 63, 77, 76, 57, 65, 52, 63, 67, 62, 89, 45, 46, 53, 61, 61, 61, 65, 63, 67, 68, 76, 75, 70, 69, 62, 83, 73, 72, 49, 68, 63, 83, 61, 52, 54, 86, 87, 73, 59, 49, 53, 89, 60, 62, 56, 82, 45, 77, 42, 78, 63, 69, 61, 80, 70, 58, 66, 47, 82, 66, 69, 70, 54, 69, 68, 56, 87, 60, 60, 68, 70, 61, 64, 88, 75, 68, 63, 53, 63, 71, 69, 76, 66, 57, 72, 60, 65, 51, 64, 102, 65, 63, 42, 72, 54, 59, 60, 52, 74, 66, 62, 42, 70, 68, 78, 65, 54, 62, 59, 77, 58, 76, 70, 51, 56, 50, 67, 63, 67, 89, 60, 60, 71, 86, 80, 63, 121, 71, 53, 64, 48, 62, 75, 58, 57, 67, 70, 99, 67, 84, 64, 81, 62, 58, 61, 62, 71, 59, 53, 47, 73, 62, 53, 83, 73, 65, 61, 57, 42, 78, 70, 90, 53, 66, 72, 58, 72, 67, 49, 59, 85, 56, 45, 61, 77, 48, 65, 77, 62, 67, 65, 73, 66, 59, 115, 99, 51, 68, 51, 69, 91, 51, 57, 79, 57, 70, 55, 60, 88, 68, 51, 61, 55, 74, 59, 62, 65, 74, 68, 50, 69, 63, 47, 57, 59, 46, 68, 98, 81, 94, 66, 57, 83, 66, 63, 46, 83, 97, 71, 54, 56, 71, 108, 93, 70, 70, 59, 76, 62, 59, 78, 64, 74, 88, 59, 62, 110, 44, 69, 71, 64, 82, 82, 75, 59, 65, 57, 44, 49, 69, 64, 72, 54, 66, 87, 70, 58, 71, 65, 58, 51, 53, 58, 69, 74, 62, 63, 66, 53, 77, 60, 70, 63, 62, 57, 69, 54, 51, 49, 67, 83, 56, 61, 86, 52, 70, 92, 71, 52, 65, 67, 64, 66, 127, 65, 58, 75, 41, 67, 62, 60, 76, 60, 61, 47, 33, 75, 62, 93, 50, 65, 80, 75, 60, 47, 128, 66, 86, 97, 86, 65, 67, 48, 68, 58, 59, 64, 66, 68, 37, 75, 52, 70, 92, 71, 72, 57, 47, 67, 71, 64, 119, 60, 51, 64, 65, 60, 76, 92, 54, 54, 71, 56, 74, 61, 64, 50, 63, 61, 67, 56, 69, 50, 56, 58, 73, 104, 128, 60, 67, 55, 56, 65, 53, 79, 78, 61, 55, 70, 63, 55, 44, 63, 69, 41, 53, 46, 51, 66, 68, 78, 62, 73, 67, 73, 70, 46, 68, 53, 62, 57, 48, 63, 58, 83, 56, 68, 55, 58, 63, 59, 81, 56, 49, 64, 43, 58, 56, 57, 55, 53, 63, 84, 85, 110, 58, 57, 53, 56, 72, 64, 75, 86, 54, 44, 69, 70, 55, 57, 86, 64, 63, 56, 64, 57, 43, 68, 49, 58, 89, 78, 50, 66, 113, 71, 61, 60, 59, 58, 63, 75, 69, 62, 59, 72, 101, 55, 81, 56, 76, 89, 97, 66, 68, 60, 80, 51, 65, 41, 57, 47, 73, 60, 78, 105, 60, 63, 61, 74, 62, 65, 67, 63, 55, 83, 62, 70, 53, 72, 56, 64, 76, 54, 69, 80, 50, 86, 72, 55, 63, 66, 52, 60, 70, 77, 52, 63, 84, 83, 60, 170, 73, 71, 47, 55, 81, 70, 58, 58, 44, 93, 60, 65, 61, 74, 45, 69, 66, 60, 65, 50, 62, 97, 81, 65, 63, 70, 84, 69, 59, 55, 56, 62, 74, 46, 47, 66, 79, 78, 71, 61, 84, 50, 46, 69, 60, 51, 87, 69, 72, 66, 85, 69, 39, 80, 91, 65, 44, 56, 66, 59, 60, 69, 71, 60, 58, 80, 78, 68, 57, 78, 59, 84, 53, 59, 70, 55, 61, 51, 54, 78, 83, 69, 74, 92, 127, 63, 63, 62, 56, 69, 66, 61, 57, 79, 70, 99, 83, 56, 55, 61, 58, 66, 66, 54, 78, 68, 66, 69, 70, 54, 59, 62, 61, 66, 62, 65, 79, 71, 45, 86, 71, 63, 56, 60, 55, 78, 73, 59, 71, 61, 87, 70, 53, 72, 53, 104, 103, 50, 66, 85, 52, 61, 93, 66, 63, 63, 114, 63, 74, 111, 66, 73, 43, 71, 59, 84, 67, 60, 69, 58, 48, 67, 83, 55, 70, 70, 61, 71, 66, 65, 61, 57, 62, 54, 75, 55, 77, 67, 69, 57, 65, 57, 58, 56, 69, 77, 86, 56, 63, 101, 53, 54, 67, 60, 54, 66, 75, 47, 52, 78, 64, 64, 77, 69, 63, 64, 55, 55, 63, 50, 39, 44, 61, 36, 84, 70, 80, 60, 65, 58, 74, 67, 62, 77, 50, 72, 67, 69, 76, 71, 57, 58, 51, 67, 55, 59, 143, 55, 68, 54, 46, 71, 60, 98, 36, 60, 79, 75, 55, 68, 63, 57, 58, 68, 52, 56, 77, 63, 102, 56, 54, 94, 48, 73, 77, 74, 57, 76, 73, 73, 59, 54, 56, 50, 71, 71, 90, 57, 69, 68, 73, 50, 59, 76, 83, 58, 61, 58, 77, 64, 57, 74, 68, 68, 61, 60, 64, 49, 64, 55, 49, 61, 55, 79, 54, 62, 67, 51, 54, 63, 63, 89, 72, 50, 38, 73, 69, 74, 101, 50, 69, 62, 57, 47, 53, 78, 94, 59, 71, 58, 58, 64, 96, 67, 69, 80, 56, 68, 45, 83, 66, 108, 59, 67, 88, 67, 58, 70, 95, 56, 75, 66, 44, 95, 59, 41, 69, 67, 82, 98, 102, 101, 72, 68, 50, 44, 60, 93, 64, 52, 66, 72, 59, 56, 72, 64, 116, 70, 63, 63, 70, 87, 65, 80, 62, 53, 66, 67, 69, 75, 66, 58, 74, 48, 67, 87, 68, 64, 72, 70, 63, 58, 91, 76, 73, 78, 65, 70, 67, 71, 61, 57, 101, 65, 58, 58, 42, 74, 61, 54, 67, 45, 63, 62, 66, 120, 78, 59, 70, 79, 50, 72, 59, 60, 63, 82, 52, 60, 69, 65, 44, 59, 48, 63, 51, 57, 48, 64, 43, 99, 50, 54, 67, 77, 76, 72, 62, 56, 55, 67, 51, 80, 55, 59, 88, 57, 60, 43, 77, 77, 88, 56, 86, 61, 72, 85, 49, 83, 62, 67, 55, 61, 55, 46, 66, 59, 57, 65, 67, 55, 73, 57, 107, 59, 63, 80, 102, 68, 67, 64, 69, 43, 70, 61, 49, 62, 50, 67, 84, 98, 58, 57, 58, 69, 60, 68, 70, 68, 75, 75, 96, 66, 35, 68, 77, 83, 66, 74, 66, 111, 65, 62, 81, 48, 64, 83, 63, 66, 97, 75, 73, 64, 62, 89, 67, 62, 74, 72, 65, 69, 67, 120, 57, 77, 50, 80, 110, 48, 75, 81, 67, 61, 66, 71, 70, 59, 66, 60, 61, 58, 118, 73, 47, 52, 59, 58, 51, 63, 56, 54, 68, 71, 65, 86, 66, 62, 106, 54, 69, 97, 56, 58, 70, 54, 46, 69, 65, 69, 70, 60, 72, 65, 51, 58, 54, 58, 78, 70, 65, 56, 63, 58, 66, 60, 53, 56, 75, 72, 55, 61, 58, 70, 69, 54, 64, 67, 75, 75, 73, 49, 59, 57, 82, 48, 77, 110, 34, 71, 62, 41, 62, 67, 74, 63, 46, 70, 53, 77, 72, 58, 67, 80, 62, 49, 60, 77, 55, 62, 91, 46, 71, 77, 80, 56, 56, 60, 100, 49, 61, 101, 54, 66, 61, 77, 62, 48, 63, 78, 52, 57, 46, 81, 70, 83, 62, 52, 67, 66, 54, 67, 77, 44, 68, 113, 58, 39, 69, 97, 54, 62, 122, 76, 67, 68, 43, 72, 58, 116, 55, 67, 78, 56, 53, 57, 53, 60, 88, 71, 58, 53, 67, 69, 61, 65, 63, 59, 53, 77, 82, 86, 37, 74, 39, 51, 59, 65, 64, 42, 67, 62, 65, 63, 51, 74, 76, 82, 70, 81, 79, 53, 71, 61, 76, 70, 47, 59, 53, 55, 114, 70, 53, 81, 80, 54, 80, 42, 53, 53, 56, 66, 53, 66, 66, 77, 73, 80, 60, 54, 68, 40, 59, 66, 50, 61, 53, 57, 39, 62, 77, 70, 51, 65, 72, 72, 79, 58, 49, 84, 70, 69, 94, 53, 65, 64, 51, 57, 70, 68, 79, 61, 52, 54, 63, 70, 51, 66, 69, 59, 86, 66, 45, 64, 70, 83, 58, 66, 59, 54, 55, 67, 52, 68, 55, 107, 44, 60, 82, 51, 65, 57, 60, 65, 63, 95, 39, 61, 73, 62, 130, 54, 70, 59, 50, 54, 81, 65, 87, 63, 61, 64, 67, 71, 45, 59, 65, 76, 77, 63, 50, 68, 64, 77, 69, 72, 69, 61, 82, 55, 117, 70, 54, 84, 75, 68, 67, 67, 50, 70, 59, 52, 47, 62, 59, 54, 64, 66, 79, 72, 48, 78, 80, 105, 50, 54, 108, 73, 51, 56, 88, 76, 77, 67, 66, 62, 87, 53, 50, 60, 48, 83, 59, 62, 96, 60, 48, 65, 72, 76, 49, 68, 102, 48, 70, 80, 56, 87, 72, 65, 80, 61, 57, 75, 74, 52, 58, 77, 64, 94, 76, 59, 41, 93, 55, 70, 53, 76, 55, 75, 80, 105, 70, 81, 82, 60, 31, 77, 38, 57, 67, 62, 86, 52, 57, 81, 54, 35, 60, 64, 61, 57, 48, 57, 52, 62, 77, 62, 54, 48, 82, 135, 78, 58, 51, 63, 61, 75, 67, 61, 68, 47, 75, 58, 60, 69, 72, 63, 109, 68, 139, 84, 119, 81, 63, 68, 74, 85, 74, 57, 59, 53, 63, 58, 49, 57, 76, 41, 72, 40, 72, 70, 60, 62, 80, 62, 65, 36, 75, 72, 52, 64, 68, 65, 62, 69, 65, 65, 57, 73, 117, 60, 68, 96, 65, 62, 66, 76, 52, 40, 60, 47, 84, 47, 57, 84, 80, 73, 57, 90, 78, 70, 52, 62, 92, 66, 72, 77, 80, 89, 68, 64, 82, 78, 86, 42, 49, 60, 61, 66, 59, 70, 50, 66, 84, 80, 74, 59, 46, 58, 80, 67, 73, 43, 92, 52, 61, 69, 72, 70, 51, 64, 50, 64, 67, 75, 81, 48, 73, 60, 47, 85, 133, 52, 44, 65, 87, 67, 55, 54, 57, 71, 67, 126, 70, 72, 53, 65, 72, 72, 67, 46, 69, 54, 58, 76, 55, 58, 34, 59, 76, 60, 83, 80, 57, 57, 71, 68, 63, 61, 57, 70, 57, 83, 59, 79, 62, 61, 72, 72, 54, 46, 62, 41, 51, 61, 49, 65, 64, 50, 62, 80, 90, 116, 62, 59, 52, 67, 58, 71, 56, 114, 113, 68, 88, 66, 70, 44, 53, 88, 60, 51, 51, 57, 71, 60, 52, 54, 48, 47, 57, 65, 68, 61, 100, 62, 63, 39, 50, 50, 74, 76, 68, 46, 55, 122, 72, 47, 33, 56, 69, 87, 79, 74, 82, 90, 71, 58, 84, 57, 82, 100, 57, 69, 62, 56, 44, 58, 61, 144, 77, 116, 79, 108, 63, 61, 72, 71, 85, 71, 53, 57, 76, 65, 63, 41, 53, 52, 65, 49, 69, 63, 50, 71, 82, 45, 78, 55, 59, 70, 75, 89, 51, 55, 74, 62, 68, 80, 49, 82, 65, 67, 75, 62, 60, 59, 59, 56, 73, 47, 64, 78, 80, 67, 49, 51, 75, 74, 61, 66, 59, 59, 53, 59, 71, 74, 73, 54, 80, 41, 61, 33, 61, 64, 80, 59, 50, 72, 66, 77, 60, 44, 56, 52, 63, 61, 72, 67, 52, 38, 59, 44, 66, 59, 51, 68, 60, 48, 48, 59, 59, 64, 80, 73, 86, 57, 69, 57, 73, 67, 69, 53, 72, 50, 63, 69, 86, 63, 76, 107, 59, 71, 43, 74, 61, 71, 46, 76, 93, 60, 55, 51, 60, 43, 59, 69, 71, 56, 41, 70, 59, 96, 57, 69, 78, 44, 56, 53, 54, 72, 75, 80, 70, 75, 65, 73, 80, 67, 54, 57, 50, 81, 46, 67, 94, 56, 55, 47, 89, 108, 80, 56, 67, 54, 63, 54, 51, 72, 75, 68, 63, 148, 73, 86, 35, 71, 48, 78, 55, 67, 65, 68, 86, 55, 79, 68, 67, 74, 60, 48, 97, 66, 72, 69, 66, 47, 62, 54, 59, 61, 72, 59, 72, 82, 95, 68, 70, 85, 37, 44, 61, 62, 70, 69, 46, 47, 56, 49, 131, 72, 56, 59, 83, 68, 65, 73, 67, 77, 55, 78, 78, 33, 65, 81, 70, 74, 100, 55, 62, 61, 54, 76, 77, 86, 76, 74, 54, 75, 87, 98, 70, 80, 70, 70, 48, 64, 54, 93, 48, 66, 48, 56, 71, 53, 68, 89, 72, 75, 61, 54, 69, 76, 65, 59, 87, 57, 55, 68, 88, 77, 62, 89, 70, 58, 56, 81, 72, 48, 56, 92, 64, 65, 82, 75, 64, 55, 50, 75, 63, 72, 55, 78, 65, 68, 75, 84, 55, 53, 59, 36, 65, 88, 66, 50, 54, 56, 73, 89, 52, 71, 55, 45, 58, 65, 54, 74, 58, 37, 124, 101, 53, 88, 40, 74, 73, 35, 54, 63, 79, 71, 46, 69, 51, 61, 58, 85, 100, 80, 48, 51, 68, 56, 65, 88, 53, 70, 112, 80, 73, 63, 111, 46, 54, 71, 68, 68, 71, 53, 51, 86, 71, 45, 69, 79, 66, 63, 75, 74, 67, 72, 50, 72, 52, 73, 44, 62, 83, 67, 43, 72, 74, 52, 85, 73, 72, 46, 58, 53, 60, 42, 55, 53, 68, 61, 68, 49, 69, 33, 110, 69, 48, 94, 63, 58, 69, 81, 75, 83, 67, 60, 58, 70, 73, 65, 60, 55, 62, 65, 63, 54, 68, 58, 47, 58, 62, 83, 53, 81, 59, 79, 65, 42, 148, 58, 100, 62, 73, 99, 55, 72, 92, 62, 102, 56, 48, 65, 71, 53, 80, 54, 89, 58, 79, 50, 56, 58, 89, 65, 32, 55, 100, 67, 78, 63, 93, 59, 76, 55, 50, 80, 41, 60, 61, 51, 54, 103, 58, 52, 57, 55, 78, 90, 70, 51, 65, 85, 55, 68, 60, 76, 65, 78, 66, 59, 55, 71, 62, 53, 69, 38, 69, 67, 60, 64, 58, 67, 85, 80, 62, 53, 88, 60, 59, 58, 69, 70, 79, 84, 110, 72, 80, 69, 76, 67, 79, 62, 64, 69, 52, 73, 51, 62, 64, 70, 53, 76, 47, 70, 61, 88, 71, 84, 53, 88, 86, 78, 53, 58, 60, 55, 69, 66, 94, 52, 56, 60, 60, 41, 65, 84, 62, 78, 42, 59, 61, 59, 65, 60, 73, 69, 71, 56, 65, 63, 61, 62, 54, 52, 68, 62, 75, 97, 60, 71, 41, 64, 64, 73, 77, 77, 85, 64, 55, 65, 56, 55, 68, 115, 60, 63, 61, 60, 72, 54, 78, 55, 67, 58, 71, 71, 76, 50, 65, 61, 56, 56, 64, 60, 78, 76, 73, 64, 61, 60, 56, 68, 64, 99, 75, 40, 71, 74, 61, 61, 46, 68, 44, 68, 88, 72, 84, 82, 63, 78, 58, 62, 53, 58, 70, 59, 55, 58, 70, 51, 51, 78, 44, 73, 76, 72, 45, 51, 79, 52, 69, 52, 58, 46, 91, 73, 69, 60, 79, 60, 85, 61, 59, 73, 66, 71, 74, 65, 83, 57, 77, 72, 61, 66, 67, 78, 68, 64, 65, 65, 57, 62, 52, 59, 70, 53, 69, 63, 57, 63, 78, 56, 53, 82, 63, 92, 96, 72, 62, 88, 60, 83, 67, 80, 96, 74, 63, 61, 79, 105, 71, 75, 52, 104, 74, 76, 51, 72, 65, 73, 75, 53, 64, 59, 44, 70, 61, 86, 77, 72, 46, 65, 66, 58, 85, 92, 47, 58, 53, 57, 55, 85, 65, 51, 76, 75, 63, 81, 91, 53, 54, 73, 91, 56, 94, 51, 58, 84, 59, 65, 82, 62, 69, 52, 79, 44, 65, 65, 64, 69, 59, 56, 53, 61, 60, 72, 81, 68, 60, 141, 42, 93, 58, 52, 59, 78, 58, 62, 48, 61, 75, 69, 63, 66, 63, 63, 85, 88, 66, 79, 47, 69, 72, 45, 72, 95, 74, 41, 76, 45, 95, 80, 66, 73, 77, 78, 55, 64, 57, 50, 70, 83, 70, 71, 89, 82, 153, 64, 72, 67, 48, 47, 63, 72, 67, 53, 58, 80, 61, 57, 78, 57, 59, 37, 53, 38, 92, 69, 59, 59, 73, 81, 70, 55, 59, 51, 51, 81, 75, 76, 83, 50, 85, 64, 57, 57, 94, 58, 43, 65, 68, 85, 43, 67, 48, 70, 71, 67, 68, 88, 64, 73, 81, 84, 70, 76, 66, 61, 71, 51, 90, 70, 75, 55, 59, 69, 71, 99, 62, 62, 76, 65, 85, 42, 62, 60, 54, 78, 73, 75, 47, 78, 107, 62, 58, 74, 70, 85, 66, 58, 70, 85, 87, 59, 59, 72, 49, 62, 62, 72, 54, 75, 66, 57, 51, 69, 64, 79, 65, 68, 63, 69, 75, 69, 51, 126, 73, 64, 71, 65, 58, 68, 68, 60, 51, 62, 74, 69, 82, 85, 80, 70, 49, 95, 55, 49, 73, 70, 78, 71, 66, 65, 84, 56, 76, 67, 80, 64, 80, 56, 57, 60, 67, 50, 58, 61, 59, 47, 40, 58, 55, 71, 58, 79, 63, 69, 51, 43, 69, 61, 56, 109, 57, 74, 73, 51, 65, 54, 58, 66, 53, 50, 72, 115, 62, 48, 71, 67, 63, 77, 58, 61, 40, 70, 60, 70, 72, 59, 46, 51, 77, 45, 64, 78, 93, 57, 66, 74, 52, 77, 66, 64, 65, 68, 63, 63, 54, 60, 80, 90, 55, 43, 72, 110, 59, 65, 49, 50, 54, 70, 47, 63, 61, 63, 43, 74, 86, 57, 79, 55, 65, 61, 70, 75, 55, 73, 66, 62, 68, 55, 55, 56, 57, 65, 55, 59, 68, 67, 81, 44, 58, 52, 58, 49, 49, 62, 58, 60, 64, 64, 62, 29, 75, 87, 56, 67, 41, 45, 63, 57, 64, 59, 81, 63, 72, 64, 56, 62, 62, 55, 62, 50, 54, 60, 68, 65, 52, 80, 70, 73, 85, 63, 69, 63, 69, 63, 52, 71, 63, 68, 61, 62, 63, 49, 55, 49, 64, 55, 57, 37, 104, 67, 64, 58, 50, 66, 70, 55, 54, 64, 66, 74, 59, 71, 39, 56, 55, 67, 77, 95, 62, 74, 49, 41, 69, 49, 52, 99, 69, 68, 54, 67, 64, 65, 81, 64, 51, 68, 64, 77, 50, 60, 54, 84, 87, 58, 73, 62, 70, 60, 92, 70, 58, 77, 65, 69, 79, 59, 74, 98, 54, 65, 66, 93, 63, 135, 63, 48, 54, 108, 83, 64, 95, 58, 70, 88, 50, 63, 55, 66, 65, 99, 41, 50, 74, 69, 65, 66, 54, 78, 82, 63, 64, 57, 58, 56, 98, 61, 63, 68, 59, 63, 70, 64, 54, 75, 70, 55, 75, 58, 72, 62, 54, 72, 53, 54, 82, 62, 51, 60, 78, 71, 64, 57, 60, 62, 67, 70, 94, 68, 48, 61, 62, 53, 65, 62, 77, 51, 71, 64, 66, 62, 70, 69, 64, 90, 84, 40, 63, 42, 61, 51, 67, 61, 74, 75, 77, 77, 54, 97, 63, 62, 63, 63, 75, 60, 59, 53, 58, 110, 76, 68, 57, 63, 65, 69, 48, 37, 59, 62, 64, 55, 61, 59, 65, 61, 61, 75, 55, 54, 53, 74, 108, 56, 31, 48, 47, 53, 85, 69, 69, 86, 88, 65, 69, 59, 62, 85, 53, 51, 51, 73, 75, 50, 49, 64, 68, 73, 110, 69, 68, 60, 84, 61, 82, 67, 68, 72, 55, 76, 95, 63, 61, 95, 56, 62, 56, 71, 46, 57, 62, 61, 126, 83, 70, 76, 67, 76, 82, 75, 71, 65, 44, 61, 69, 73, 46, 46, 45, 62, 61, 69, 82, 50, 76, 54, 66, 81, 53, 55, 75, 67, 87, 54, 72, 56, 101, 60, 45, 70, 76, 58, 88, 55, 62, 64, 63, 55, 52, 80, 53, 65, 84, 84, 63, 94, 72, 52, 56, 64, 63, 81, 51, 65, 51, 57, 68, 107, 58, 61, 82, 60, 65, 55, 58, 68, 77, 74, 55, 70, 80, 71, 83, 67, 94, 68, 57, 74, 57, 63, 58, 55, 62, 59, 64, 65, 55, 62, 54, 55, 67, 63, 85, 59, 42, 134, 67, 63, 75, 55, 68, 75, 73, 84, 66, 62, 55, 81, 66, 101, 73, 85, 81, 80, 71, 51, 54, 65, 62, 56, 82, 61, 73, 71, 66, 47, 76, 56, 66, 69, 63, 33, 81, 68, 64, 49, 72, 62, 81, 73, 77, 61, 64, 57, 64, 56, 66, 60, 37, 51, 82, 73, 51, 70, 75, 67, 59, 54, 42, 56, 69, 81, 51, 59, 41, 80, 61, 41, 55, 45, 84, 68, 68, 51, 78, 65, 46, 65, 46, 45, 66, 54, 74, 75, 47, 62, 51, 56, 96, 61, 80, 65, 62, 67, 55, 59, 69, 93, 48, 60, 51, 53, 60, 69, 61, 64, 55, 66, 63, 75, 58, 48, 74, 62, 68, 64, 68, 64, 65, 35, 48, 65, 64, 59, 64, 111, 76, 60, 53, 72, 71, 47, 50, 59, 73, 61, 68, 64, 61, 108, 64, 68, 73, 47, 62, 61, 73, 87, 89, 67, 58, 55, 54, 60, 53, 51, 66, 68, 104, 68, 68, 50, 56, 73, 83, 49, 50, 59, 64, 54, 62, 93, 34, 75, 56, 72, 58, 81, 77, 80, 37, 64, 49, 60, 68, 77, 84, 83, 84, 51, 49, 36, 63, 61, 52, 92, 91, 67, 78, 91, 103, 53, 65, 61, 107, 34, 76, 79, 78, 96, 67, 57, 74, 64, 77, 72, 69, 71, 62, 92, 77, 71, 59, 96, 98, 72, 66, 83, 97, 70, 70, 46, 72, 65, 50, 73, 71, 82, 73, 62, 48, 78, 59, 74, 64, 66, 73, 61, 57, 61, 60, 50, 48, 48, 65, 71, 57, 80, 58, 65, 71, 62, 51, 60, 56, 63, 69, 63, 76, 64, 52, 85, 63, 97, 71, 70, 35, 105, 60, 91, 74, 73, 103, 69, 53, 52, 45, 58, 77, 68, 61, 67, 60, 68, 55, 48, 84, 74, 53, 53, 63, 79, 70, 56, 55, 78, 53, 91, 40, 68, 59, 65, 65, 86, 80, 62, 56, 48, 60, 53, 76, 55, 63, 61, 61, 69, 70, 115, 71, 51, 67, 73, 73, 86, 60, 54, 54, 79, 56, 62, 66, 51, 87, 79, 46, 94, 61, 81, 84, 50, 43, 61, 72, 62, 69, 69, 52, 76, 60, 67, 41, 55, 125, 62, 105, 54, 77, 76, 65, 78, 58, 52, 75, 69, 67, 61, 66, 72, 67, 66, 102, 73, 59, 75, 101, 78, 75, 86, 49, 72, 63, 65, 67, 54, 70, 65, 104, 47, 55, 76, 46, 64, 59, 79, 50, 68, 57, 74, 69, 42, 84, 72, 55, 78, 65, 45, 46, 61, 56, 47, 71, 67, 52, 59, 61, 62, 61, 43, 54, 62, 57, 51, 54, 73, 52, 60, 60, 74, 68, 49, 60, 89, 70, 52, 61, 59, 55, 94, 50, 70, 47, 62, 66, 55, 78, 72, 53, 64, 49, 49, 65, 60, 53, 65, 82, 56, 60, 49, 58, 72, 52, 57, 61, 60, 53, 88, 57, 65, 75, 58, 68, 89, 59, 73, 55, 78, 67, 56, 63, 53, 54, 43, 77, 65, 59, 55, 44, 50, 56, 73, 70, 62, 71, 68, 69, 65, 76, 67, 50, 61, 61, 89, 92, 57, 67, 66, 55, 63, 56, 186, 46, 61, 61, 64, 77, 77, 120, 52, 57, 70, 77, 63, 76, 75, 60, 68, 59, 59, 134, 71, 62, 61, 75, 76, 52, 55, 79, 69, 63, 71, 60, 73, 63, 61, 49, 52, 69, 84, 39, 73, 71, 56, 67, 53, 69, 61, 53, 70, 57, 91, 57, 49, 86, 60, 52, 87, 57, 59, 69, 73, 66, 65, 54, 70, 72, 90, 70, 73, 62, 62, 99, 60, 65, 68, 58, 62, 73, 44, 50, 62, 73, 54, 62, 65, 55, 65, 74, 69, 113, 66, 62, 45, 67, 55, 51, 49, 51, 77, 54, 67, 45, 79, 58, 53, 58, 61, 63, 52, 109, 53, 68, 64, 73, 64, 66, 59, 54, 65, 63, 61, 60, 57, 97, 43, 68, 70, 64, 61, 66, 89, 65, 46, 66, 93, 54, 64, 138, 67, 77, 62, 62, 79, 79, 54, 85, 62, 69, 55, 65, 66, 64, 66, 68, 78, 36, 76, 47, 47, 52, 78, 71, 51, 45, 54, 64, 55, 79, 76, 63, 63, 63, 62, 58, 67, 53, 51, 64, 75, 64, 55, 64, 56, 64, 44, 67, 64, 76, 120, 94, 64, 71, 73, 66, 43, 63, 62, 40, 67, 62, 76, 57, 52, 56, 106, 53, 70, 67, 69, 62, 80, 59, 42, 65, 67, 41, 78, 57, 60, 56, 74, 60, 48, 82, 95, 56, 82, 58, 50, 71, 67, 67, 71, 55, 68, 66, 70, 67, 61, 159, 74, 74, 54, 55, 67, 73, 61, 131, 100, 62, 79, 86, 71, 72, 60, 54, 110, 67, 82, 62, 61, 57, 58, 66, 68, 80, 96, 58, 81, 66, 61, 65, 65, 58, 60, 66, 70, 54, 62, 71, 62, 97, 69, 78, 34, 77, 78, 54, 51, 61, 68, 98, 88, 46, 64, 76, 73, 67, 58, 47, 54, 72, 85, 47, 73, 63, 59, 65, 73, 58, 56, 54, 77, 86, 68, 74, 97, 32, 55, 56, 61, 66, 71, 77, 119, 61, 54, 43, 43, 63, 63, 53, 64, 62, 65, 54, 53, 59, 54, 65, 82, 56, 59, 87, 69, 57, 62, 52, 57, 68, 65, 48, 46, 60, 43, 36, 89, 75, 74, 53, 75, 74, 85, 81, 75, 79, 62, 51, 63, 91, 87, 66, 82, 80, 53, 83, 65, 69, 62, 56, 54, 81, 60, 57, 56, 98, 68, 60, 76, 67, 57, 46, 54, 62, 69, 60, 39, 64, 80, 106, 52, 55, 72, 75, 61, 78, 57, 60, 55, 87, 84, 72, 59, 66, 60, 67, 59, 69, 56, 63, 78, 77, 70, 61, 67, 51, 79, 68, 61, 58, 84, 86, 60, 48, 47, 72, 76, 77, 68, 86, 60, 50, 56, 77, 69, 48, 65, 46, 93, 57, 67, 65, 81, 56, 75, 65, 117, 67, 69, 68, 47, 58, 59, 49, 85, 55, 83, 57, 79, 66, 61, 45, 60, 56, 76, 65, 64, 57, 74, 80, 45, 49, 110, 67, 60, 90, 68, 97, 74, 81, 83, 84, 54, 48, 68, 62, 39, 63, 67, 56, 59, 64, 71, 55, 66, 42, 52, 61, 112, 48, 44, 49, 65, 58, 65, 51, 101, 55, 51, 70, 51, 77, 53, 72, 81, 55, 68, 82, 74, 61, 44, 48, 74, 56, 62, 72, 54, 48, 63, 69, 62, 54, 121, 47, 78, 73, 71, 52, 67, 58, 76, 57, 61, 57, 57, 54, 51, 70, 71, 57, 78, 61, 53, 48, 40, 54, 44, 67, 82, 32, 63, 54, 59, 61, 58, 58, 80, 48, 68, 58, 64, 62, 81, 72, 53, 79, 57, 75, 80, 75, 64, 64, 61, 41, 68, 71, 60, 67, 51, 101, 46, 56, 67, 60, 56, 53, 50, 55, 68, 65, 63, 80, 65, 58, 74, 87, 49, 56, 58, 56, 62, 52, 48, 52, 74, 67, 74, 84, 55, 53, 79, 80, 62, 44, 55, 73, 76, 51, 58, 54, 56, 63, 58, 64, 54, 55, 41, 79, 68, 57, 72, 53, 70, 64, 94, 79, 76, 65, 57, 85, 51, 39, 68, 69, 76, 94, 46, 51, 51, 91, 94, 80, 68, 43, 75, 63, 80, 68, 47, 44, 49, 73, 88, 64, 59, 51, 50, 76, 67, 78, 41, 71, 86, 63, 81, 52, 93, 84, 54, 72, 73, 80, 60, 88, 86, 54, 63, 53, 46, 34, 82, 48, 77, 47, 62, 61, 89, 72, 76, 74, 58, 58, 67, 78, 92, 67, 93, 93, 97, 72, 45, 50, 77, 83, 62, 45, 66, 57, 50, 78, 69, 57, 56, 70, 54, 33, 66, 69, 47, 61, 65, 43, 87, 58, 81, 67, 99, 37, 72, 80, 72, 60, 95, 68, 69, 65, 57, 46, 67, 68, 58, 89, 56, 51, 58, 53, 88, 75, 63, 50, 64, 69, 55, 65, 48, 72, 66, 68, 53, 87, 57, 59, 81, 99, 40, 59, 69, 68, 49, 73, 89, 66, 56, 54, 74, 57, 71, 77, 68, 59, 56, 58, 64, 77, 76, 77, 68, 79, 81, 82, 47, 64, 44, 80, 66, 58, 53, 84, 63, 68, 81, 57, 63, 85, 62, 74, 54, 41, 108, 83, 54, 74, 80, 105, 106, 71, 67, 81, 60, 56, 56, 97, 39, 98, 63, 87, 59, 73, 61, 71, 73, 60, 71, 69, 89, 73, 71, 80, 55, 78, 62, 87, 94, 55, 52, 57, 70, 76, 66, 52, 83, 71, 40, 59, 44, 53, 74, 87, 105, 64, 53, 102, 68, 40, 50, 78, 70, 65, 72, 61, 83, 111, 60, 67, 55, 65, 79, 52, 64, 60, 74, 73, 49, 50, 61, 99, 77, 57, 78, 69, 53, 49, 91, 73, 58, 70, 57, 50, 59, 61, 78, 45, 44, 74, 73, 44, 58, 63, 48, 70, 74, 63, 63, 63, 76, 53, 54, 45, 72, 69, 66, 59, 45, 53, 56, 55, 40, 66, 60, 56, 66, 76, 36, 74, 34, 51, 71, 51, 70, 55, 43, 48, 89, 74, 76, 79, 54, 82, 66, 46, 72, 72, 72, 65, 89, 57, 104, 52, 72, 85, 54, 44, 52, 67, 58, 46, 47, 69, 69, 75, 18, 95, 60, 67, 95, 62, 82, 86, 51, 72, 75, 64, 86, 62, 55, 61, 73, 105, 72, 101, 66, 72, 55, 54, 51, 45, 77, 49, 69, 77, 54, 70, 69, 68, 61, 66, 85, 50, 59, 69, 69, 75, 50, 87, 95, 71, 57, 60, 67, 80, 53, 60, 64, 87, 46, 72, 51, 72, 58, 54, 66, 70, 57, 50, 75, 51, 45, 42, 84, 68, 76, 69, 78, 64, 63, 49, 62, 66, 68, 75, 82, 87, 65, 50, 79, 50, 56, 53, 70, 63, 63, 74, 89, 68, 81, 64, 58, 82, 75, 48, 57, 78, 62, 70, 77, 64, 62, 38, 74, 65, 69, 61, 59, 52, 45, 55, 67, 88, 45, 77, 59, 55, 61, 56, 40, 63, 99, 66, 61, 72, 84, 55, 65, 43, 61, 57, 59, 55, 68, 50, 71, 67, 78, 70, 67, 66, 74, 65, 60, 47, 52, 53, 85, 126, 54, 120, 100, 53, 45, 58, 76, 84, 58, 65, 81, 37, 61, 86, 58, 55, 59, 70, 44, 72, 71, 47, 64, 62, 72, 66, 66, 53, 82, 41, 74, 59, 67, 54, 122, 69, 86, 84, 59, 74, 46, 63, 49, 65, 97, 52, 69, 44, 70, 71, 54, 60, 73, 59, 67, 56, 61, 70, 55, 77, 64, 76, 78, 72, 60, 62, 92, 62, 58, 87, 63, 58, 66, 51, 38, 52, 65, 53, 67, 81, 127, 75, 65, 79, 56, 58, 38, 72, 70, 75, 61, 73, 58, 71, 73, 72, 102, 62, 51, 47, 49, 48, 56, 66, 80, 72, 71, 59, 65, 53, 50, 61, 81, 75, 67, 99, 56, 57, 52, 57, 87, 74, 69, 78, 78, 59, 56, 91, 125, 58, 85, 58, 60, 88, 52, 56, 67, 70, 59, 63, 75, 74, 57, 58, 79, 70, 73, 74, 65, 56, 72, 43, 56, 75, 74, 95, 55, 59, 63, 48, 72, 89, 69, 68, 82, 71, 60, 66, 53, 50, 49, 52, 67, 66, 99, 60, 67, 69, 54, 94, 78, 54, 65, 63, 63, 77, 49, 72, 58, 51, 90, 65, 36, 53, 68, 69, 46, 77, 63, 70, 61, 56, 67, 68, 57, 55, 62, 57, 76, 85, 48, 69, 53, 58, 59, 37, 68, 60, 62, 62, 50, 50, 50, 64, 114, 104, 70, 78, 56, 72, 51, 54, 77, 51, 61, 51, 67, 70, 63, 68, 77, 46, 65, 94, 57, 53, 79, 32, 84, 57, 55, 71, 74, 55, 50, 84, 79, 55, 78, 63, 56, 51, 63, 75, 43, 61, 79, 64, 68, 103, 60, 65, 64, 69, 66, 81, 50, 117, 84, 54, 101, 56, 52, 68, 53, 41, 61, 81, 67, 71, 69, 62, 59, 61, 72, 86, 79, 68, 64, 97, 65, 63, 51, 80, 86, 91, 73, 53, 73, 63, 57, 50, 63, 69, 71, 71, 55, 62, 63, 52, 63, 90, 60, 62, 76, 86, 70, 74, 61, 59, 51, 95, 57, 59, 109, 68, 47, 55, 72, 57, 58, 103, 66, 83, 69, 51, 59, 69, 49, 54, 63, 73, 54, 52, 60, 90, 53, 92, 72, 72, 60, 52, 49, 70, 49, 60, 84, 66, 55, 74, 56, 84, 78, 74, 59, 67, 87, 58, 82, 75, 59, 56, 77, 103, 71, 64, 75, 58, 68, 76, 66, 77, 54, 72, 63, 53, 69, 52, 81, 73, 50, 73, 57, 57, 45, 98, 73, 77, 63, 33, 79, 50, 73, 59, 67, 65, 51, 62, 66, 55, 71, 70, 46, 61, 61, 76, 51, 56, 52, 58, 49, 72, 57, 93, 69, 60, 49, 68, 57, 50, 76, 71, 43, 72, 59, 71, 75, 105, 48, 55, 111, 69, 52, 63, 41, 72, 79, 63, 63, 73, 58, 63, 49, 82, 76, 108, 54, 90, 33, 71, 55, 48, 56, 64, 61, 57, 53, 54, 73, 49, 56, 79, 76, 56, 90, 68, 89, 60, 55, 57, 91, 84, 50, 67, 67, 63, 71, 61, 66, 53, 82, 95, 52, 92, 98, 73, 68, 50, 53, 52, 58, 58, 73, 77, 56, 47, 64, 80, 53, 47, 58, 60, 44, 49, 61, 49, 71, 84, 68, 53, 67, 80, 62, 82, 68, 71, 54, 59, 62, 60, 62, 72, 48, 76, 81, 64, 77, 47, 45, 56, 101, 78, 67, 54, 56, 37, 60, 67, 50, 51, 52, 71, 71, 43, 49, 57, 56, 59, 49, 59, 86, 30, 58, 62, 51, 60, 65, 100, 60, 68, 70, 57, 101, 60, 64, 45, 62, 54, 55, 51, 72, 74, 62, 56, 48, 95, 59, 101, 90, 67, 116, 76, 59, 54, 123, 64, 68, 59, 69, 51, 68, 56, 68, 75, 64, 62, 85, 74, 67, 47, 69, 61, 102, 48, 72, 65, 70, 71, 75, 77, 62, 73, 65, 54, 59, 70, 57, 93, 70, 63, 65, 56, 69, 43, 53, 48, 108, 67, 57, 65, 75, 61, 56, 73, 113, 46, 60, 72, 65, 73, 75, 65, 66, 57, 58, 47, 54, 51, 61, 68, 65, 70, 79, 67, 76, 74, 56, 64, 53, 84, 81, 74, 92, 54, 80, 93, 72, 53, 87, 62, 61, 69, 48, 115, 55, 69, 71, 63, 75, 83, 64, 70, 68, 53, 66, 86, 105, 84, 51, 96, 60, 60, 108, 113, 93, 78, 52, 69, 86, 59, 44, 69, 49, 52, 74, 61, 48, 50, 48, 68, 67, 58, 79, 44, 70, 72, 57, 54, 58, 76, 64, 91, 56, 54, 50, 68, 64, 51, 69, 56, 60, 56, 61, 65, 53, 41, 99, 75, 43, 72, 54, 56, 106, 49, 52, 58, 57, 167, 79, 67, 77, 63, 62, 59, 75, 51, 64, 100, 41, 58, 90, 62, 61, 71, 60, 89, 64, 58, 39, 69, 64, 67, 77, 74, 44, 51, 37, 48, 64, 91, 58, 49, 102, 82, 45, 65, 69, 62, 50, 90, 69, 60, 57, 73, 50, 41, 69, 74, 66, 71, 59, 57, 66, 75, 58, 66, 66, 67, 74, 82, 54, 59, 83, 70, 54, 47, 68, 54, 61, 78, 66, 65, 60, 46, 53, 59, 123, 62, 76, 49, 66, 58, 49, 78, 72, 56, 64, 54, 97, 64, 55, 47, 58, 89, 59, 57, 42, 70, 40, 44, 42, 40, 56, 47, 57, 69, 92, 70, 84, 83, 91, 54, 104, 69, 56, 76, 70, 61, 38, 76, 49, 69, 55, 65, 71, 64, 84, 56, 72, 67, 70, 98, 75, 62, 77, 76, 61, 62, 52, 67, 60, 68, 86, 68, 50, 78, 73, 67, 82, 130, 89, 45, 73, 75, 65, 54, 52, 96, 50, 55, 68, 68, 66, 93, 49, 78, 56, 55, 43, 67, 36, 72, 55, 88, 62, 63, 73, 66, 56, 56, 73, 56, 72, 90, 82, 89, 47, 76, 89, 74, 75, 51, 58, 58, 55, 72, 72, 67, 57, 85, 107, 70, 56, 50, 68, 88, 63, 69, 71, 73, 56, 59, 56, 81, 58, 58, 65, 61, 67, 68, 55, 71, 71, 69, 70, 67, 61, 50, 62, 39, 62, 79, 54, 106, 63, 82, 65, 65, 77, 62, 55, 68, 57, 70, 80, 52, 70, 79, 54, 67, 60, 76, 74, 55, 65, 111, 60, 71, 77, 66, 46, 55, 69, 65, 65, 65, 90, 75, 43, 67, 58, 62, 60, 98, 63, 63, 83, 63, 85, 76, 49, 56, 60, 44, 80, 57, 48, 51, 58, 70, 56, 114, 63, 61, 64, 48, 68, 57, 77, 73, 50, 40, 63, 88, 75, 54, 53, 79, 70, 45, 63, 99, 49, 88, 46, 51, 74, 62, 52, 42, 43, 80, 44, 53, 65, 36, 69, 80, 78, 50, 50, 93, 68, 65, 64, 54, 89, 81, 59, 60, 105, 81, 62, 53, 120, 45, 67, 54, 83, 81, 157, 69, 62, 72, 50, 78, 65, 60, 61, 65, 86, 77, 65, 65, 50, 56, 62, 62, 74, 77, 62, 77, 100, 59, 72, 92, 78, 64, 71, 93, 53, 64, 65, 69, 39, 52, 53, 79, 67, 47, 84, 46, 92, 54, 83, 62, 51, 72, 52, 63, 71, 66, 53, 57, 94, 69, 49, 70, 69, 57, 76, 61, 71, 52, 80, 78, 64, 58, 69, 51, 62, 69, 59, 70, 67, 63, 59, 63, 60, 58, 64, 70, 91, 49, 70, 84, 77, 47, 65, 133, 66, 97, 53, 78, 84, 74, 87, 63, 59, 55, 85, 65, 51, 81, 60, 69, 60, 59, 39, 36, 59, 44, 48, 67, 57, 65, 57, 96, 59, 77, 73, 70, 72, 71, 51, 51, 69, 49, 67, 47, 64, 68, 57, 58, 60, 71, 70, 48, 45, 43, 61, 64, 75, 77, 70, 52, 44, 58, 68, 76, 69, 66, 66, 62, 93, 77, 47, 43, 57, 63, 49, 81, 55, 50, 65, 66, 92, 67, 63, 28, 64, 64, 75, 74, 64, 56, 53, 73, 65, 62, 57, 55, 126, 79, 62, 83, 81, 55, 71, 47, 56, 69, 72, 67, 60, 88, 69, 52, 62, 61, 56, 47, 73, 55, 114, 79, 68, 68, 158, 62, 58, 77, 58, 64, 50, 75, 54, 65, 77, 55, 119, 83, 64, 54, 67, 65, 64, 70, 66, 70, 85, 38, 59, 76, 66, 60, 56, 66, 62, 55, 49, 84, 63, 71, 58, 68, 67, 58, 66, 125, 49, 68, 71, 74, 63, 64, 86, 81, 68, 50, 69, 71, 61, 55, 66, 58, 45, 118, 82, 64, 62, 62, 47, 69, 61, 68, 51, 61, 69, 66, 63, 53, 52, 122, 84, 58, 44, 70, 54, 91, 50, 69, 61, 99, 54, 63, 102, 72, 57, 58, 53, 73, 59, 73, 56, 64, 72, 47, 86, 58, 52, 51, 62, 66, 54, 79, 81, 90, 62, 95, 62, 59, 50, 66, 68, 55, 51, 68, 63, 62, 45, 54, 93, 92, 63, 72, 49, 68, 90, 69, 68, 80, 56, 44, 59, 61, 68, 84, 59, 91, 86, 66, 77, 73, 83, 64, 64, 49, 62, 51, 57, 46, 85, 94, 69, 58, 67, 81, 55, 162, 70, 61, 68, 89, 82, 66, 60, 58, 72, 68, 62, 64, 91, 68, 57, 66, 55, 64, 52, 61, 56, 94, 42, 83, 59, 47, 61, 64, 71, 72, 56, 61, 68, 64, 63, 35, 60, 68, 60, 62, 48, 47, 71, 63, 45, 59, 85, 53, 66, 62, 59, 72, 56, 115, 82, 55, 74, 80, 61, 67, 74, 75, 67, 116, 71, 77, 58, 55, 63, 56, 79, 77, 65, 48, 76, 50, 48, 70, 55, 63, 64, 52, 60, 58, 62, 51, 45, 99, 76, 61, 61, 64, 88, 76, 64, 65, 76, 48, 71, 69, 55, 63, 57, 52, 65, 74, 69, 67, 64, 63, 85, 67, 55, 100, 71, 59, 54, 61, 83, 70, 90, 47, 61, 76, 70, 58, 56, 56, 53, 59, 60, 86, 102, 64, 87, 60, 80, 45, 85, 72, 80, 58, 52, 58, 57, 63, 67, 56, 58, 93, 76, 53, 58, 43, 61, 53, 55, 71, 48, 57, 69, 52, 57, 67, 55, 84, 100, 37, 65, 68, 67, 54, 89, 54, 50, 85, 79, 83, 86, 48, 51, 55, 63, 62, 71, 57, 66, 65, 75, 46, 69, 49, 83, 91, 47, 67, 73, 47, 53, 66, 67, 55, 61, 91, 62, 73, 74, 53, 55, 51, 63, 51, 79, 86, 52, 86, 66, 53, 62, 77, 70, 51, 74, 55, 58, 56, 67, 69, 89, 46, 80, 59, 46, 93, 55, 64, 59, 61, 79, 61, 78, 68, 61, 62, 52, 50, 54, 56, 61, 61, 63, 61, 48, 59, 46, 56, 56, 50, 63, 64, 59, 67, 62, 56, 64, 65, 55, 69, 49, 63, 57, 56, 67, 68, 56, 67, 84, 92, 46, 55, 73, 82, 60, 67, 56, 70, 51, 79, 55, 66, 61, 67, 91, 67, 46, 57, 61, 60, 67, 65, 58, 65, 55, 63, 61, 56, 61, 52, 83, 62, 58, 72, 72, 66, 64, 88, 65, 106, 84, 81, 56, 69, 101, 86, 64, 60, 64, 55, 70, 93, 83, 65, 62, 71, 66, 80, 62, 74, 57, 78, 53, 51, 59, 68, 69, 75, 97, 56, 64, 43, 62, 50, 64, 58, 31, 59, 48, 80, 49, 45, 106, 90, 62, 81, 70, 78, 79, 47, 47, 78, 74, 58, 53, 80, 93, 55, 61, 71, 76, 50, 68, 67, 67, 73, 62, 58, 62, 75, 61, 66, 142, 53, 81, 89, 59, 60, 69, 59, 99, 65, 63, 57, 59, 67, 71, 93, 71, 52, 58, 52, 77, 66, 58, 79, 81, 86, 52, 69, 46, 51, 51, 61, 60, 75, 68, 44, 76, 46, 67, 62, 50, 66, 72, 63, 54, 53, 58, 75, 44, 45, 75, 55, 104, 56, 89, 53, 61, 66, 68, 58, 80, 62, 60, 72, 55, 42, 48, 57, 70, 71, 44, 69, 112, 61, 62, 69, 54, 60, 66, 102, 46, 67, 63, 70, 63, 80, 54, 83, 59, 63, 63, 82, 77, 59, 71, 47, 64, 87, 53, 80, 41, 80, 76, 82, 74, 58, 63, 52, 61, 63, 57, 60, 67, 54, 62, 63, 58, 69, 70, 93, 74, 68, 50, 91, 61, 73, 89, 63, 116, 71, 71, 69, 51, 91, 44, 76, 121, 50, 32, 70, 63, 86, 63, 62, 82, 65, 70, 72, 77, 48, 61, 87, 51, 50, 53, 82, 59, 81, 48, 73, 63, 59, 79, 57, 57, 51, 84, 69, 70, 81, 73, 74, 50, 73, 67, 59, 54, 58, 57, 85, 60, 58, 55, 62, 63, 60, 55, 78, 65, 64, 55, 87, 54, 71, 64, 97, 76, 47, 86, 71, 70, 73, 93, 81, 64, 51, 67, 56, 79, 48, 75, 57, 60, 60, 69, 66, 53, 64, 60, 55, 63, 59, 69, 68, 67, 88, 55, 61, 81, 56, 59, 33, 66, 54, 73, 75, 68, 78, 51, 61, 61, 62, 66, 63, 83, 68, 58, 56, 73, 62, 58, 61, 73, 65, 73, 58, 59, 47, 51, 71, 92, 79, 68, 111, 79, 67, 67, 69, 65, 67, 69, 60, 50, 84, 59, 54, 57, 82, 89, 59, 78, 53, 92, 60, 70, 69, 68, 88, 60, 70, 51, 62, 51, 61, 55, 57, 46, 65, 86, 45, 66, 103, 57, 66, 71, 66, 55, 83, 88, 74, 61, 84, 73, 50, 51, 79, 70, 72, 62, 104, 44, 92, 72, 58, 64, 69, 42, 75, 70, 55, 45, 42, 67, 51, 72, 62, 54, 52, 43, 64, 52, 70, 74, 50, 58, 57, 47, 51, 48, 61, 83, 67, 72, 76, 65, 50, 76, 71, 60, 72, 53, 61, 47, 73, 51, 119, 44, 57, 60, 52, 76, 87, 38, 70, 51, 62, 58, 39, 88, 59, 61, 55, 52, 64, 60, 55, 58, 71, 68, 62, 41, 46, 79, 55, 60, 55, 56, 73, 50, 93, 61, 63, 103, 61, 73, 55, 100, 63, 70, 52, 50, 79, 73, 61, 67, 78, 79, 72, 68, 58, 44, 41, 64, 66, 64, 59, 51, 93, 58, 93, 88, 47, 85, 63, 56, 55, 69, 45, 51, 83, 79, 46, 67, 72, 71, 73, 66, 55, 77, 64, 89, 46, 69, 77, 90, 84, 48, 91, 69, 63, 71, 78, 59, 90, 70, 53, 68, 57, 47, 83, 49, 63, 76, 50, 63, 57, 114, 43, 61, 44, 52, 60, 85, 63, 60, 62, 72, 47, 75, 44, 46, 45, 73, 57, 56, 55, 146, 68, 45, 54, 72, 76, 78, 74, 87, 54, 78, 67, 50, 61, 55, 52, 61, 59, 39, 123, 60, 75, 51, 49, 66, 45, 63, 59, 80, 73, 68, 83, 88, 53, 58, 81, 73, 51, 51, 58, 58, 78, 40, 64, 86, 64, 51, 77, 59, 61, 50, 71, 59, 75, 49, 51, 57, 62, 70, 70, 63, 72, 69, 63, 48, 69, 78, 62, 78, 47, 53, 64, 84, 61, 64, 61, 57, 90, 99, 61, 62, 60, 88, 69, 102, 84, 50, 55, 45, 60, 70, 78, 51, 65, 73, 66, 71, 60, 87, 77, 57, 44, 69, 64, 64, 76, 69, 58, 73, 62, 79, 80, 85, 66, 85, 62, 81, 57, 71, 41, 81, 73, 53, 53, 87, 56, 50, 96, 63, 79, 66, 42, 66, 65, 61, 68, 52, 71, 74, 56, 64, 80, 57, 60, 53, 65, 54, 68, 59, 73, 67, 70, 59, 63, 61, 50, 56, 54, 64, 80, 69, 61, 55, 78, 57, 71, 65, 87, 78, 64, 56, 62, 70, 48, 68, 61, 66, 43, 63, 78, 57, 60, 61, 66, 60, 94, 65, 68, 70, 54, 71, 55, 67, 79, 45, 80, 71, 57, 62, 71, 64, 62, 54, 63, 51, 66, 71, 81, 61, 66, 61, 61, 65, 71, 61, 58, 69, 69, 72, 57, 62, 59, 61, 55, 69, 58, 68, 76, 55, 60, 83, 56, 74, 57, 50, 67, 64, 60, 50, 103, 58, 67, 71, 67, 74, 82, 85, 47, 65, 70, 55, 60, 126, 66, 73, 75, 58, 67, 52, 61, 77, 84, 55, 44, 79, 65, 81, 46, 70, 49, 40, 67, 65, 83, 70, 56, 33, 86, 80, 75, 65, 64, 64, 92, 57, 71, 63, 99, 58, 50, 65, 61, 61, 68, 59, 78, 76, 56, 42, 60, 54, 85, 109, 63, 103, 62, 58, 72, 74, 80, 81, 69, 51, 66, 95, 62, 55, 41, 100, 59, 61, 60, 81, 65, 51, 86, 43, 62, 57, 55, 77, 122, 74, 53, 79, 66, 68, 57, 79, 56, 88, 47, 56, 72, 71, 59, 118, 74, 67, 43, 83, 64, 53, 51, 77, 69, 66, 42, 67, 58, 52, 76, 62, 74, 52, 60, 58, 71, 71, 46, 51, 58, 54, 63, 53, 55, 56, 74, 83, 76, 51, 51, 64, 57, 59, 64, 83, 60, 53, 77, 63, 52, 75, 59, 58, 57, 55, 74, 52, 54, 50, 47, 75, 67, 78, 48, 73, 63, 59, 61, 58, 49, 98, 51, 74, 61, 63, 53, 68, 64, 57, 64, 46, 68, 54, 51, 65, 71, 63, 89, 74, 46, 56, 63, 99, 70, 54, 65, 78, 68, 55, 61, 81, 94, 63, 63, 66, 35, 69, 55, 52, 63, 96, 81, 89, 56, 62, 76, 74, 56, 77, 83, 60, 65, 59, 58, 106, 83, 66, 43, 71, 59, 53, 57, 82, 68, 68, 60, 70, 47, 47, 49, 65, 53, 70, 66, 62, 88, 54, 57, 71, 69, 76, 70, 58, 68, 53, 55, 55, 83, 68, 59, 67, 78, 76, 66, 100, 61, 79, 58, 66, 65, 55, 60, 65, 79, 76, 70, 55, 61, 67, 61, 54, 98, 41, 65, 56, 82, 44, 48, 82, 66, 77, 64, 73, 50, 78, 64, 31, 59, 60, 58, 76, 83, 62, 80, 74, 53, 65, 71, 54, 55, 51, 79, 73, 70, 56, 66, 56, 80, 57, 90, 61, 50, 63, 53, 69, 71, 45, 58, 53, 76, 98, 70, 68, 63, 77, 92, 65, 76, 99, 52, 60, 80, 75, 91, 73, 81, 73, 60, 56, 52, 56, 59, 59, 56, 69, 78, 90, 51, 58, 74, 56, 54, 64, 65, 67, 84, 72, 82, 85, 69, 59, 63, 86, 70, 79, 57, 71, 82, 63, 65, 66, 76, 67, 53, 86, 87, 84, 45, 56, 69, 55, 73, 65, 95, 77, 61, 59, 61, 66, 74, 63, 52, 50, 51, 56, 83, 86, 93, 69, 76, 94, 91, 46, 60, 66, 54, 93, 53, 63, 91, 65, 60, 73, 63, 63, 51, 87, 56, 56, 64, 86, 54, 65, 64, 62, 59, 66, 58, 58, 71, 69, 77, 74, 57, 68, 59, 56, 51, 67, 40, 64, 50, 41, 94, 71, 62, 63, 61, 76, 65, 55, 74, 74, 58, 68, 66, 66, 28, 67, 54, 60, 76, 58, 61, 69, 57, 70, 54, 77, 77, 70, 49, 62, 84, 67, 64, 66, 55, 119, 52, 59, 57, 47, 65, 58, 43, 83, 68, 65, 71, 53, 69, 59, 64, 43, 69, 69, 55, 57, 59, 57, 71, 65, 58, 69, 40, 64, 56, 59, 69, 62, 57, 63, 66, 55, 67, 60, 55, 67, 59, 67, 86, 56, 30, 77, 61, 58, 74, 62, 45, 55, 73, 52, 57, 60, 56, 53, 60, 81, 83, 74, 59, 68, 61, 47, 66, 61, 64, 62, 51, 76, 75, 62, 61, 54, 71, 84, 69, 87, 62, 71, 64, 53, 76, 53, 77, 86, 73, 57, 65, 62, 72, 76, 88, 50, 55, 69, 66, 68, 67, 73, 67, 75, 64, 56, 77, 75, 61, 50, 86, 60, 59, 59, 71, 63, 63, 97, 61, 67, 59, 64, 61, 93, 66, 54, 53, 41, 100, 65, 59, 57, 67, 57, 58, 75, 46, 76, 56, 55, 103, 59, 46, 58, 64, 52, 91, 61, 83, 51, 91, 61, 144, 50, 40, 77, 51, 64, 57, 51, 64, 50, 67, 67, 72, 82, 60, 42, 76, 70, 67, 62, 79, 61, 71, 66, 72, 88, 83, 56, 72, 64, 61, 73, 84, 63, 60, 61, 57, 82, 69, 69, 74, 27, 50, 60, 65, 74, 51, 53, 49, 53, 61, 75, 49, 60, 68, 53, 63, 74, 100, 49, 65, 64, 63, 65, 68, 52, 67, 53, 47, 52, 95, 64, 42, 80, 69, 117, 103, 51, 98, 82, 59, 64, 78, 60, 78, 55, 52, 70, 53, 73, 91, 49, 55, 78, 62, 76, 91, 58, 68, 55, 69, 66, 80, 58, 51, 60, 87, 90, 69, 62, 63, 58, 65, 71, 73, 78, 47, 56, 42, 77, 67, 42, 53, 61, 63, 70, 85, 74, 71, 47, 56, 83, 73, 72, 67, 56, 57, 57, 66, 66, 64, 77, 63, 66, 50, 84, 75, 94, 60, 66, 85, 75, 47, 63, 59, 58, 72, 68, 67, 78, 46, 62, 81, 58, 60, 68, 73, 87, 58, 56, 62, 61, 68, 69, 71, 74, 65, 61, 63, 101, 67, 64, 54, 38, 40, 66, 61, 53, 71, 70, 89, 64, 51, 77, 67, 65, 97, 74, 90, 70, 61, 66, 72, 131, 86, 53, 68, 73, 58, 73, 66, 61, 47, 53, 64, 62, 61, 32, 67, 62, 65, 59, 47, 59, 87, 70, 72, 61, 70, 77, 56, 47, 49, 70, 77, 65, 76, 53, 44, 72, 64, 63, 89, 65, 50, 72, 80, 63, 73, 51, 70, 48, 64, 56, 69, 69, 48, 88, 51, 67, 53, 56, 73, 63, 55, 59, 85, 67, 45, 65, 61, 70, 71, 75, 80, 62, 48, 89, 79, 67, 56, 69, 68, 71, 53, 107, 61, 61, 64, 54, 63, 65, 62, 66, 64, 54, 77, 70, 57, 81, 72, 64, 58, 85, 86, 68, 85, 48, 44, 66, 97, 70, 59, 68, 60, 59, 95, 48, 76, 73, 61, 67, 112, 66, 51, 76, 61, 56, 70, 77, 55, 68, 62, 66, 71, 82, 58, 64, 59, 54, 66, 59, 63, 53, 72, 63, 61, 56, 76, 61, 63, 61, 57, 47, 112, 76, 86, 82, 57, 87, 100, 78, 86, 75, 66, 75, 56, 57, 45, 55, 54, 48, 58, 57, 74, 55, 93, 78, 58, 51, 92, 37, 52, 76, 46, 52, 63, 66, 61, 82, 70, 57, 73, 75, 81, 64, 44, 59, 70, 51, 62, 58, 57, 76, 63, 43, 60, 63, 77, 58, 53, 75, 76, 69, 140, 57, 68, 58, 56, 51, 70, 68, 60, 61, 57, 49, 66, 61, 107, 67, 76, 76, 58, 60, 64, 88, 68, 48, 67, 77, 55, 90, 65, 57, 57, 44, 73, 61, 60, 53, 73, 59, 58, 61, 52, 59, 70, 61, 69, 74, 84, 64, 51, 57, 48, 53, 67, 55, 93, 68, 58, 97, 55, 52, 49, 67, 71, 59, 83, 61, 59, 62, 61, 69, 96, 60, 68, 77, 66, 61, 62, 53, 81, 41, 46, 84, 66, 83, 66, 65, 68, 77, 51, 64, 66, 73, 68, 50, 64, 80, 77, 87, 90, 66, 54, 61, 44, 71, 73, 44, 98, 74, 68, 49, 74, 71, 83, 62, 78, 73, 86, 78, 77, 45, 51, 89, 79, 62, 88, 49, 54, 58, 57, 55, 63, 103, 63, 89, 78, 70, 46, 55, 70, 85, 75, 57, 60, 55, 56, 104, 56, 59, 74, 52, 69, 57, 68, 55, 74, 84, 57, 48, 54, 54, 65, 66, 78, 75, 47, 53, 113, 66, 74, 60, 57, 66, 65, 59, 79, 54, 130, 81, 61, 75, 87, 49, 50, 69, 83, 53, 56, 72, 69, 74, 51, 58, 60, 72, 73, 56, 57, 70, 60, 65, 108, 78, 73, 71, 73, 80, 86, 39, 51, 77, 63, 53, 68, 43, 48, 54, 80, 62, 63, 69, 51, 86, 44, 68, 65, 49, 49, 65, 57, 80, 68, 88, 62, 55, 67, 48, 68, 74, 79, 75, 68, 55, 67, 64, 112, 65, 67, 40, 62, 55, 60, 46, 62, 75, 78, 97, 82, 75, 101, 59, 83, 59, 52, 58, 60, 63, 40, 65, 46, 53, 84, 61, 62, 72, 71, 65, 67, 66, 62, 78, 64, 119, 90, 67, 60, 96, 69, 67, 74, 69, 39, 53, 68, 70, 59, 33, 56, 64, 61, 69, 62, 62, 72, 52, 57, 75, 55, 77, 56, 58, 38, 62, 87, 58, 41, 42, 45, 62, 54, 89, 67, 115, 63, 59, 59, 64, 49, 82, 64, 57, 62, 67, 84, 51, 88, 64, 78, 49, 53, 148, 87, 70, 49, 52, 99, 76, 102, 66, 89, 71, 67, 61, 71, 81, 72, 48, 95, 67, 86, 63, 86, 47, 66, 64, 73, 41, 57, 80, 61, 54, 73, 57, 83, 66, 68, 65, 47, 47, 59, 83, 53, 77, 58, 68, 66, 57, 60, 55, 64, 84, 64, 71, 54, 64, 66, 67, 82, 60, 63, 72, 53, 61, 61, 53, 73, 57, 52, 59, 61, 75, 48, 59, 100, 48, 64, 62, 75, 68, 58, 66, 63, 66, 56, 76, 80, 68, 92, 57, 78, 89, 89, 71, 58, 84, 65, 77, 72, 72, 86, 44, 65, 54, 62, 79, 64, 64, 72, 75, 59, 54, 51, 45, 60, 66, 54, 46, 59, 76, 77, 67, 62, 61, 67, 30, 124, 70, 68, 54, 58, 56, 45, 63, 59, 65, 72, 70, 50, 58, 69, 68, 137, 64, 69, 57, 81, 60, 98, 70, 86, 67, 90, 51, 51, 62, 68, 62, 57, 71, 65, 71, 125, 51, 67, 56, 54, 59, 93, 62, 43, 83, 39, 63, 68, 109, 65, 58, 77, 92, 69, 73, 77, 61, 66, 84, 64, 61, 56, 66, 55, 58, 57, 59, 54, 63, 60, 78, 77, 67, 58, 54, 94, 83, 63, 53, 67, 61, 72, 71, 76, 56, 59, 80, 62, 69, 70, 76, 117, 62, 61, 69, 63, 100, 37, 100, 68, 57, 80, 82, 54, 50, 41, 65, 60, 64, 54, 54, 48, 64, 53, 84, 63, 53, 51, 72, 84, 57, 70, 62, 61, 73, 75, 51, 81, 74, 56, 51, 61, 46, 66, 77, 57, 50, 46, 50, 60, 64, 50, 58, 63, 62, 117, 45, 84, 43, 115, 62, 71, 66, 84, 59, 74, 40, 52, 64, 67, 58, 64, 72, 63, 46, 69, 55, 56, 76, 58, 62, 97, 69, 93, 62, 93, 70, 81, 78, 62, 63, 49, 73, 63, 58, 54, 53, 50, 59, 59, 56, 90, 39, 67, 74, 50, 66, 64, 63, 70, 57, 64, 57, 59, 87, 63, 58, 58, 80, 56, 73, 48, 57, 59, 91, 64, 61, 66, 76, 55, 83, 51, 119, 62, 68, 79, 59, 77, 54, 79, 62, 64, 81, 63, 74, 44, 116, 57, 54, 66, 68, 88, 56, 63, 66, 58, 67, 35, 62, 84, 68, 72, 70, 60, 42, 73, 64, 62, 67, 58, 63, 90, 101, 48, 56, 71, 34, 57, 66, 50, 65, 60, 64, 64, 153, 108, 58, 62, 58, 52, 61, 44, 61, 64, 71, 52, 62, 68, 83, 43, 52, 36, 59, 71, 58, 70, 61, 74, 69, 56, 51, 87, 82, 88, 54, 73, 91, 52, 55, 68, 38, 74, 50, 69, 49, 59, 50, 67, 73, 67, 51, 52, 79, 58, 73, 97, 71, 66, 56, 92, 77, 61, 87, 71, 83, 74, 52, 73, 65, 66, 60, 56, 81, 40, 75, 68, 64, 66, 63, 69, 75, 53, 73, 70, 83, 47, 103, 60, 58, 66, 64, 61, 57, 98, 49, 68, 54, 65, 55, 70, 62, 68, 59, 63, 47, 69, 48, 62, 55, 72, 83, 63, 61, 79, 66, 78, 91, 48, 67, 56, 56, 35, 70, 41, 54, 50, 52, 77, 74, 60, 51, 62, 76, 83, 69, 72, 97, 69, 61, 59, 78, 47, 63, 55, 55, 60, 67, 58, 61, 66, 49, 65, 71, 57, 60, 74, 42, 57, 68, 65, 75, 49, 44, 47, 63, 66, 66, 63, 62, 86, 58, 44, 67, 72, 75, 36, 51, 55, 72, 79, 76, 72, 76, 60, 65, 64, 66, 66, 50, 84, 49, 74, 60, 60, 47, 72, 70, 73, 96, 73, 53, 62, 68, 40, 98, 60, 73, 56, 69, 54, 52, 49, 95, 70, 67, 61, 65, 71, 59, 109, 78, 93, 43, 50, 61, 49, 53, 68, 82, 65, 57, 54, 87, 77, 85, 78, 54, 67, 74, 67, 70, 89, 65, 93, 64, 54, 53, 76, 83, 48, 89, 48, 60, 54, 62, 65, 68, 87, 86, 50, 69, 61, 81, 70, 102, 85, 62, 54, 56, 51, 53, 73, 71, 88, 44, 60, 54, 79, 67, 79, 63, 59, 72, 53, 88, 85, 76, 70, 75, 70, 62, 41, 69, 76, 54, 56, 90, 53, 49, 73, 72, 90, 79, 56, 71, 78, 58, 79, 64, 78, 35, 57, 68, 63, 60, 80, 50, 41, 88, 65, 101, 52, 64, 66, 51, 53, 73, 51, 103, 49, 68, 48, 74, 88, 51, 44, 81, 76, 80, 59, 53, 54, 60, 70, 55, 87, 112, 69, 50, 62, 69, 85, 97, 71, 56, 73, 68, 50, 35, 63, 56, 51, 79, 63, 72, 63, 38, 46, 59, 69, 50, 77, 68, 41, 59, 88, 47, 100, 57, 55, 80, 52, 72, 73, 73, 55, 68, 56, 51, 74, 68, 51, 77, 65, 78, 71, 95, 101, 52, 80, 56, 62, 73, 78, 50, 74, 90, 53, 54, 93, 65, 87, 33, 69, 61, 68, 73, 56, 55, 80, 69, 91, 70, 84, 58, 87, 59, 83, 52, 89, 85, 83, 46, 69, 78, 62, 71, 58, 46, 70, 52, 54, 48, 66, 74, 50, 59, 49, 56, 76, 76, 78, 68, 65, 88, 74, 65, 68, 70, 65, 40, 68, 71, 86, 54, 58, 71, 46, 62, 63, 101, 79, 63, 55, 67, 57, 48, 60, 65, 79, 50, 56, 71, 62, 50, 52, 76, 66, 70, 99, 80, 59, 83, 50, 53, 59, 72, 55, 84, 72, 63, 65, 52, 52, 50, 59, 63, 65, 139, 55, 62, 73, 63, 58, 69, 71, 64, 86, 76, 82, 83, 75, 60, 69, 74, 80, 57, 88, 64, 55, 83, 57, 85, 100, 73, 59, 60, 90, 74, 75, 86, 57, 49, 89, 47, 73, 87, 61, 67, 68, 69, 58, 74, 54, 60, 53, 85, 60, 74, 60, 46, 84, 45, 54, 87, 66, 60, 41, 84, 54, 56, 73, 67, 63, 63, 61, 93, 50, 70, 80, 78, 45, 70, 49, 68, 51, 83, 52, 79, 59, 70, 91, 59, 53, 57, 58, 116, 37, 58, 65, 51, 47, 76, 63, 68, 67, 66, 47, 85, 66, 62, 67, 42, 57, 67, 62, 68, 93, 68, 58, 98, 77, 62, 66, 56, 48, 68, 70, 47, 71, 101, 76, 77, 54, 44, 56, 59, 48, 82, 60, 57, 59, 61, 43, 47, 55, 66, 63, 62, 46, 61, 57, 56, 99, 66, 75, 71, 75, 89, 66, 46, 51, 51, 54, 62, 66, 68, 78, 111, 56, 54, 59, 56, 49, 96, 45, 75, 78, 65, 84, 62, 53, 73, 55, 67, 55, 80, 71, 73, 78, 58, 89, 63, 73, 43, 66, 67, 62, 76, 65, 72, 49, 62, 73, 65, 87, 52, 73, 62, 47, 149, 89, 79, 66, 70, 62, 53, 68, 56, 66, 48, 54, 59, 76, 62, 66, 66, 98, 62, 74, 71, 78, 50, 44, 64, 48, 73, 79, 63, 52, 72, 54, 55, 66, 64, 127, 78, 74, 46, 64, 73, 57, 45, 59, 49, 91, 70, 57, 67, 48, 91, 55, 56, 54, 73, 70, 75, 60, 64, 42, 74, 59, 67, 76, 61, 68, 72, 58, 48, 87, 49, 54, 59, 58, 75, 73, 70, 77, 58, 63, 54, 73, 52, 73, 58, 63, 90, 50, 50, 76, 60, 69, 61, 52, 57, 53, 57, 66, 61, 68, 79, 96, 60, 47, 74, 50, 66, 50, 68, 87, 68, 68, 67, 63, 77, 60, 52, 56, 86, 42, 72, 98, 77, 87, 55, 55, 62, 116, 45, 60, 86, 71, 53, 51, 54, 76, 66, 63, 84, 87, 53, 60, 62, 72, 69, 80, 76, 91, 48, 76, 55, 55, 73, 45, 99, 63, 63, 72, 56, 74, 68, 64, 94, 100, 108, 67, 46, 62, 48, 50, 76, 51, 54, 60, 63, 55, 73, 82, 59, 60, 54, 82, 72, 57, 65, 86, 72, 66, 80, 84, 79, 68, 61, 50, 51, 64, 53, 71, 61, 83, 56, 63, 78, 42, 54, 94, 71, 58, 51, 77, 62, 82, 72, 61, 71, 60, 85, 52, 68, 84, 52, 59, 74, 53, 71, 50, 67, 57, 61, 86, 51, 69, 53, 57, 58, 51, 66, 63, 91, 55, 53, 88, 77, 66, 100, 69, 66, 65, 50, 64, 68, 45, 61, 79, 51, 64, 85, 88, 52, 96, 61, 49, 71, 51, 57, 50, 88, 51, 65, 74, 55, 81, 72, 77, 85, 78, 50, 72, 93, 69, 69, 57, 62, 64, 41, 93, 61, 73, 55, 80, 55, 91, 52, 67, 57, 82, 58, 86, 86, 50, 60, 69, 89, 62, 80, 68, 83, 73, 84, 69, 75, 79, 65, 68, 71, 136, 66, 60, 45, 59, 78, 52, 48, 61, 60, 86, 84, 107, 40, 99, 68, 90, 73, 61, 78, 75, 80, 68, 73, 92, 98, 38, 56, 66, 53, 69, 74, 51, 62, 77, 51, 83, 100, 81, 66, 63, 72, 68, 55, 88, 59, 51, 59, 63, 85, 103, 104, 68, 84, 48, 63, 81, 53, 61, 66, 75, 68, 95, 69, 71, 65, 71, 61, 115, 71, 72, 92, 71, 70, 111, 68, 89, 65, 62, 65, 67, 49, 81, 60, 46, 57, 53, 57, 79, 46, 72, 84, 58, 110, 71, 76, 64, 59, 48, 58, 74, 57, 68, 86, 62, 59, 69, 64, 52, 66, 53, 58, 63, 44, 67, 64, 81, 47, 52, 57, 71, 46, 80, 60, 71, 44, 54, 59, 77, 48, 73, 61, 44, 76, 30, 60, 66, 75, 71, 86, 57, 75, 52, 77, 68, 55, 74, 66, 54, 56, 57, 88, 45, 65, 116, 66, 79, 48, 76, 86, 77, 51, 64, 65, 78, 80, 56, 64, 80, 79, 86, 94, 68, 65, 76, 46, 81, 100, 68, 63, 59, 49, 64, 63, 65, 52, 71, 59, 53, 48, 49, 54, 57, 65, 67, 93, 57, 52, 89, 43, 79, 43, 71, 81, 68, 65, 59, 41, 72, 54, 73, 65, 92, 47, 69, 60, 66, 95, 113, 56, 52, 54, 55, 57, 60, 65, 51, 78, 60, 56, 49, 62, 70, 60, 71, 52, 72, 42, 49, 53, 45, 56, 63, 56, 60, 57, 66, 63, 48, 61, 73, 80, 70, 56, 59, 48, 75, 64, 73, 53, 52, 98, 58, 50, 53, 63, 51, 67, 86, 71, 79, 81, 82, 64, 61, 66, 73, 72, 48, 48, 70, 71, 63, 44, 63, 55, 62, 61, 64, 31, 66, 62, 65, 97, 66, 79, 77, 87, 74, 53, 50, 60, 61, 71, 77, 67, 75, 47, 64, 69, 55, 71, 67, 78, 44, 78, 58, 52, 58, 42, 64, 99, 55, 35, 51, 94, 55, 75, 118, 77, 74, 53, 113, 84, 79, 55, 71, 90, 57, 79, 53, 68, 81, 66, 42, 52, 65, 62, 57, 39, 54, 52, 65, 88, 84, 80, 103, 66, 61, 75, 54, 41, 62, 58, 59, 60, 80, 102, 62, 82, 63, 52, 56, 49, 67, 52, 90, 61, 75, 58, 114, 60, 51, 96, 60, 77, 62, 80, 66, 61, 80, 65, 68, 77, 64, 78, 54, 54, 55, 46, 37, 52, 82, 83, 50, 71, 66, 67, 70, 73, 70, 79, 58, 70, 65, 50, 82, 89, 63, 66, 44, 85, 90, 79, 71, 80, 73, 103, 62, 64, 72, 48, 65, 75, 64, 66, 61, 88, 69, 56, 71, 53, 63, 64, 79, 85, 65, 54, 112, 50, 51, 67, 66, 60, 50, 79, 47, 64, 40, 92, 69, 61, 63, 73, 49, 52, 72, 61, 65, 45, 62, 58, 46, 59, 63, 58, 36, 67, 95, 84, 67, 68, 70, 65, 64, 76, 66, 65, 59, 70, 64, 76, 46, 74, 89, 76, 56, 61, 62, 67, 58, 65, 51, 82, 52, 78, 60, 49, 69, 82, 61, 66, 60, 54, 54, 84, 73, 59, 79, 84, 54, 58, 50, 74, 40, 66, 78, 90, 62, 74, 92, 53, 71, 71, 70, 47, 73, 114, 61, 68, 60, 64, 69, 58, 69, 59, 67, 67, 82, 67, 69, 76, 57, 62, 85, 67, 69, 70, 38, 55, 83, 69, 66, 45, 78, 70, 61, 75, 111, 47, 79, 54, 62, 54, 84, 60, 59, 49, 73, 40, 72, 75, 58, 61, 93, 51, 71, 73, 107, 61, 61, 78, 46, 71, 62, 57, 60, 110, 56, 67, 60, 79, 53, 70, 89, 62, 63, 59, 70, 63, 73, 69, 47, 72, 55, 49, 55, 67, 64, 68, 65, 70, 80, 65, 64, 63, 71, 74, 45, 56, 54, 54, 56, 51, 72, 53, 67, 59, 67, 54, 81, 59, 40, 67, 86, 56, 86, 58, 74, 74, 55, 61, 48, 65, 76, 55, 58, 49, 77, 54, 69, 74, 79, 66, 51, 60, 72, 50, 85, 49, 54, 46, 64, 53, 92, 78, 70, 47, 108, 68, 70, 56, 58, 81, 89, 51, 52, 44, 70, 81, 66, 55, 63, 80, 67, 92, 73, 57, 53, 156, 91, 73, 57, 63, 73, 63, 37, 59, 56, 56, 77, 59, 60, 72, 58, 74, 68, 53, 64, 74, 80, 52, 62, 76, 68, 106, 84, 47, 42, 98, 63, 81, 65, 68, 79, 62, 45, 65, 58, 50, 76, 71, 70, 51, 64, 59, 53, 62, 48, 58, 56, 44, 61, 53, 63, 67, 63, 68, 88, 77, 55, 79, 72, 51, 91, 68, 64, 48, 74, 73, 51, 74, 52, 76, 56, 65, 59, 64, 53, 45, 58, 70, 59, 52, 69, 40, 79, 63, 102, 69, 77, 84, 75, 75, 61, 67, 72, 74, 69, 69, 63, 63, 49, 67, 70, 60, 74, 71, 67, 74, 74, 83, 45, 63, 69, 68, 63, 53, 43, 59, 56, 58, 79, 74, 50, 108, 64, 77, 74, 68, 52, 60, 64, 78, 60, 50, 69, 64, 59, 61, 87, 84, 69, 58, 67, 55, 71, 58, 37, 61, 69, 75, 79, 74, 52, 32, 53, 89, 64, 48, 51, 61, 69, 62, 69, 43, 69, 93, 62, 74, 61, 61, 63, 61, 49, 67, 43, 59, 92, 68, 54, 73, 63, 92, 56, 68, 54, 78, 57, 66, 65, 81, 65, 67, 38, 116, 64, 76, 47, 65, 70, 66, 62, 94, 75, 56, 57, 60, 73, 55, 93, 71, 49, 51, 62, 65, 55, 66, 84, 56, 65, 78, 74, 57, 69, 53, 52, 78, 66, 128, 59, 59, 68, 70, 64, 64, 60, 51, 90, 76, 111, 91, 73, 63, 68, 49, 66, 66, 69, 55, 68, 46, 73, 41, 68, 58, 81, 110, 77, 58, 71, 66, 72, 67, 55, 67, 71, 59, 87, 59, 55, 72, 65, 55, 57, 59, 63, 47, 66, 55, 65, 71, 55, 105, 105, 78, 59, 50, 61, 69, 70, 59, 69, 57, 83, 57, 86, 79, 38, 65, 56, 66, 68, 58, 44, 70, 46, 69, 87, 54, 73, 68, 79, 47, 65, 55, 66, 62, 62, 56, 49, 84, 45, 59, 62, 67, 76, 88, 63, 74, 100, 67, 39, 61, 91, 70, 76, 51, 61, 65, 47, 70, 75, 75, 91, 81, 62, 51, 73, 42, 77, 63, 59, 52, 74, 55, 61, 57, 105, 84, 116, 61, 90, 87, 76, 62, 54, 57, 69, 64, 55, 74, 49, 57, 85, 61, 87, 69, 44, 67, 90, 60, 53, 53, 67, 43, 73, 99, 47, 92, 62, 74, 91, 73, 82, 107, 94, 75, 37, 59, 74, 61, 53, 82, 53, 48, 58, 57, 68, 39, 92, 52, 50, 67, 72, 74, 48, 58, 52, 54, 65, 59, 92, 61, 102, 49, 73, 85, 58, 53, 43, 56, 71, 67, 58, 104, 45, 55, 80, 55, 109, 83, 48, 57, 56, 70, 58, 57, 74, 57, 67, 62, 108, 44, 47, 53, 62, 38, 70, 65, 58, 65, 60, 60, 113, 76, 62, 57, 56, 66, 81, 54, 54, 62, 48, 68, 52, 63, 65, 69, 46, 63, 50, 61, 36, 78, 49, 80, 69, 75, 67, 63, 51, 90, 65, 64, 60, 57, 42, 66, 117, 60, 57, 118, 66, 72, 47, 63, 74, 66, 66, 66, 46, 45, 60, 58, 59, 77, 79, 75, 116, 60, 67, 56, 71, 57, 55, 99, 49, 54, 55, 62, 65, 58, 79, 39, 74, 82, 67, 83, 59, 78, 69, 50, 51, 52, 51, 46, 90, 36, 43, 80, 87, 79, 101, 78, 42, 47, 52, 35, 130, 69, 61, 96, 82, 73, 37, 62, 71, 37, 71, 55, 48, 35, 58, 47, 59, 71, 49, 51, 50, 83, 55, 61, 60, 56, 64, 105, 80, 73, 58, 64, 45, 62, 36, 63, 64, 111, 60, 97, 75, 54, 66, 68, 52, 66, 68, 55, 98, 85, 96, 63, 53, 89, 55, 57, 56, 127, 74, 59, 115, 61, 71, 69, 51, 42, 79, 41, 79, 86, 73, 104, 74, 53, 31, 70, 91, 50, 62, 67, 95, 74, 39, 72, 66, 57, 43, 49, 74, 79, 47, 61, 80, 63, 42, 63, 67, 43, 108, 59, 62, 101, 59, 102, 68, 84, 89, 60, 75, 66, 49, 71, 71, 57, 52, 69, 65, 49, 57, 101, 63, 74, 83, 52, 46, 86, 67, 66, 56, 81, 75, 97, 90, 53, 58, 68, 55, 51, 52, 49, 68, 51, 74, 57, 93, 66, 66, 67, 66, 30, 58, 92, 50, 81, 62, 75, 55, 62, 65, 63, 60, 60, 67, 61, 63, 78, 75, 99, 73, 74, 48, 48, 74, 60, 84, 84, 61, 73, 34, 43, 67, 57, 54, 74, 82, 76, 78, 71, 63, 78, 70, 38, 69, 88, 55, 97, 81, 38, 58, 71, 51, 89, 68, 55, 105, 64, 51, 65, 54, 66, 75, 52, 57, 59, 87, 64, 53, 59, 83, 71, 64, 42, 61, 80, 60, 59, 73, 73, 59, 75, 65, 49, 59, 91, 47, 73, 61, 55, 88, 67, 62, 75, 106, 90, 77, 54, 73, 60, 99, 61, 65, 48, 59, 88, 81, 66, 41, 58, 78, 74, 59, 65, 65, 44, 86, 67, 76, 59, 71, 78, 55, 61, 75, 53, 51, 36, 76, 64, 36, 92, 61, 82, 71, 104, 73, 60, 90, 69, 84, 58, 52, 110, 87, 52, 60, 60, 80, 96, 69, 61, 62, 79, 58, 57, 60, 114, 84, 63, 37, 60, 67, 77, 55, 63, 69, 89, 69, 54, 82, 78, 49, 72, 67, 66, 64, 79, 61, 46, 61, 54, 54, 56, 48, 83, 61, 97, 54, 87, 73, 90, 69, 83, 56, 84, 90, 56, 50, 81, 79, 48, 50, 52, 146, 55, 69, 72, 93, 71, 60, 38, 62, 126, 76, 80, 83, 60, 71, 76, 51, 53, 68, 84, 48, 39, 51, 79, 84, 51, 88, 45, 70, 59, 71, 55, 67, 65, 74, 51, 55, 77, 68, 50, 71, 74, 67, 85, 114, 57, 58, 61, 50, 66, 78, 57, 102, 53, 71, 54, 78, 60, 51, 63, 49, 53, 48, 55, 40, 82, 50, 60, 61, 63, 81, 80, 69, 78, 68, 85, 84, 84, 93, 78, 41, 57, 52, 52, 46, 60, 67, 74, 47, 95, 68, 38, 49, 39, 65, 54, 87, 64, 65, 64, 51, 65, 83, 52, 55, 58, 69, 79, 63, 49, 47, 71, 80, 76, 51, 43, 56, 63, 73, 55, 75, 55, 78, 58, 60, 61, 71, 55, 65, 93, 42, 61, 42, 66, 97, 86, 70, 63, 60, 84, 58, 69, 55, 45, 102, 71, 50, 41, 62, 100, 77, 56, 67, 69, 92, 68, 45, 67, 77, 68, 59, 86, 61, 42, 62, 54, 125, 77, 104, 136, 67, 42, 53, 58, 78, 61, 89, 48, 56, 58, 82, 88, 58, 58, 66, 64, 115, 52, 41, 72, 80, 49, 58, 77, 49, 61, 65, 51, 45, 84, 45, 82, 63, 71, 99, 81, 52, 76, 69, 100, 56, 55, 82, 61, 50, 79, 51, 75, 51, 30, 82, 62, 73, 60, 58, 86, 47, 55, 37, 76, 33, 41, 66, 67, 58, 65, 71, 120, 76, 82, 62, 54, 64, 31, 59, 63, 67, 53, 34, 61, 59, 68, 67, 89, 59, 66, 51, 141, 55, 52, 40, 85, 56, 45, 41, 90, 56, 110, 56, 61, 71, 63, 66, 52, 56, 60, 61, 56, 90, 64, 71, 68, 58, 35, 75, 56, 58, 48, 54, 60, 84, 55, 67, 55, 36, 57, 63, 84, 62, 72, 41, 42, 61, 49, 63, 50, 47, 47, 80, 31, 46, 54, 52, 55, 57, 77, 88, 48, 98, 53, 68, 72, 100, 93, 67, 65, 48, 128, 56, 47, 69, 71, 96, 56, 80, 58, 55, 41, 44, 67, 58, 91, 60, 64, 44, 97, 45, 71, 60, 61, 89, 95, 52, 62, 62, 101, 68, 94, 62, 90, 44, 69, 56, 48, 56, 46, 68, 69, 60, 110, 57, 71, 49, 76, 49, 45, 76, 60, 39, 65, 51, 61, 53, 87, 64, 39, 53, 58, 69, 47, 91, 79, 66, 57, 66, 103, 40, 75, 41, 102, 52, 68, 46, 64, 62, 49, 56, 25, 84, 48, 60, 93, 65, 97, 87, 53, 80, 115, 48, 71, 69, 72, 110, 47, 72, 44, 53, 98, 62, 53, 39, 54, 66, 64, 71, 68, 91, 52, 54, 93, 68, 73, 80, 55, 72, 65, 66, 44, 76, 31, 88, 53, 61, 84, 60, 69, 77, 47, 58, 49, 99, 75, 72, 67, 61, 43, 94, 53, 47, 61, 89, 71, 46, 65, 61, 72, 57, 66, 71, 82, 75, 53, 108, 65, 61, 63, 66, 62, 76, 74, 54, 106, 84, 69, 102, 51, 60, 53, 46, 59, 42, 69, 45, 71, 63, 72, 62, 58, 93, 54, 75, 54, 67, 34, 84, 54, 80, 69, 87, 88, 61, 66, 86, 74, 73, 62, 61, 58, 84, 59, 73, 86, 59, 77, 77, 71, 58, 96, 67, 62, 70, 60, 51, 84, 63, 99, 48, 49, 87, 76, 66, 53, 91, 55, 80, 41, 73, 70, 55, 73, 65, 87, 80, 50, 51, 73, 62, 60, 48, 65, 62, 51, 66, 67, 60, 51, 63, 68, 48, 71, 84, 86, 88, 57, 75, 74, 53, 86, 70, 57, 79, 72, 66, 72, 66, 46, 69, 83, 73, 92, 68, 67, 83, 71, 60, 62, 44, 82, 102, 61, 58, 81, 41, 80, 55, 53, 62, 78, 62, 68, 85, 55, 59, 71, 74, 59, 53, 54, 51, 77, 44, 135, 65, 47, 46, 57, 99, 66, 56, 52, 99, 72, 70, 82, 65, 74, 53, 90, 70, 62, 55, 66, 54, 91, 82, 85, 71, 71, 72, 76, 47, 62, 82, 44, 81, 69, 62, 57, 57, 69, 71, 80, 48, 58, 64, 57, 60, 76, 57, 61, 66, 36, 70, 64, 60, 57, 69, 61, 46, 113, 55, 87, 58, 58, 84, 71, 55, 50, 52, 55, 70, 60, 59, 75, 60, 76, 58, 43, 84, 56, 71, 50, 80, 66, 66, 102, 61, 75, 55, 35, 79, 71, 66, 49, 80, 35, 68, 78, 40, 60, 67, 81, 68, 50, 64, 63, 60, 56, 74, 68, 65, 113, 43, 61, 75, 62, 82, 59, 86, 76, 62, 34, 90, 54, 47, 59, 78, 63, 70, 85, 81, 47, 65, 75, 73, 53, 54, 40, 74, 73, 56, 70, 82, 77, 84, 65, 50, 57, 64, 82, 84, 65, 82, 79, 87, 63, 57, 50, 60, 60, 77, 43, 92, 51, 49, 66, 63, 78, 80, 57, 64, 44, 72, 57, 58, 63, 71, 67, 66, 60, 67, 63, 51, 84, 54, 47, 57, 56, 47, 58, 56, 99, 68, 73, 72, 56, 87, 66, 42, 66, 52, 62, 109, 76, 73, 86, 63, 74, 74, 70, 52, 109, 49, 56, 40, 81, 80, 77, 55, 76, 58, 70, 59, 75, 81, 56, 125, 119, 77, 91, 63, 40, 73, 65, 79, 58, 64, 53, 58, 58, 46, 69, 60, 58, 98, 56, 76, 58, 60, 62, 76, 56, 75, 58, 68, 60, 45, 57, 88, 46, 53, 55, 60, 64, 68, 70, 50, 70, 56, 59, 73, 54, 75, 69, 63, 36, 57, 63, 78, 76, 49, 77, 58, 58, 62, 61, 64, 69, 53, 52, 62, 77, 83, 72, 58, 84, 78, 60, 65, 88, 79, 68, 67, 92, 69, 55, 78, 90, 59, 50, 65, 51, 63, 69, 102, 80, 82, 90, 47, 110, 73, 72, 80, 81, 65, 143, 63, 85, 62, 61, 79, 87, 50, 70, 117, 81, 57, 65, 78, 54, 65, 55, 79, 81, 66, 57, 40, 75, 85, 43, 55, 52, 58, 70, 73, 68, 110, 49, 59, 73, 80, 65, 56, 50, 99, 51, 62, 58, 54, 58, 58, 86, 81, 75, 48, 55, 73, 86, 78, 61, 57, 64, 43, 58, 44, 109, 90, 94, 66, 59, 68, 69, 58, 63, 49, 42, 44, 69, 48, 70, 60, 46, 64, 84, 71, 51, 64, 41, 75, 65, 68, 80, 54, 51, 56, 72, 84, 63, 68, 48, 75, 44, 75, 90, 67, 65, 54, 67, 41, 70, 58, 65, 84, 61, 64, 63, 70, 92, 77, 71, 39, 47, 49, 80, 52, 63, 98, 49, 88, 64, 45, 89, 67, 70, 70, 52, 59, 44, 72, 79, 55, 66, 112, 70, 47, 53, 61, 60, 74, 120, 61, 71, 67, 54, 65, 58, 79, 66, 57, 62, 46, 88, 47, 49, 64, 56, 52, 67, 56, 54, 43, 67, 61, 60, 56, 44, 76, 46, 95, 55, 83, 41, 61, 68, 65, 60, 85, 56, 75, 66, 57, 67, 56, 56, 54, 69, 63, 42, 35, 52, 35, 64, 68, 65, 67, 63, 115, 71, 41, 75, 55, 75, 47, 81, 87, 57, 75, 89, 85, 71, 42, 74, 42, 87, 53, 56, 96, 78, 65, 61, 78, 48, 78, 42, 52, 71, 52, 81, 53, 89, 85, 88, 59, 69, 59, 63, 69, 61, 74, 79, 84, 73, 56, 70, 71, 55, 58, 66, 53, 65, 51, 94, 50, 65, 53, 62, 73, 46, 94, 77, 71, 52, 73, 85, 56, 71, 70, 65, 60, 70, 53, 64, 47, 50, 74, 59, 50, 55, 65, 73, 40, 50, 58, 53, 61, 68, 58, 78, 64, 62, 93, 87, 54, 68, 82, 56, 71, 72, 62, 60, 72, 57, 66, 56, 52, 69, 68, 75, 67, 69, 56, 62, 81, 77, 30, 59, 67, 52, 81, 38, 56, 58, 66, 49, 59, 60, 56, 35, 56, 53, 74, 60, 102, 82, 71, 62, 82, 72, 84, 52, 58, 57, 98, 97, 57, 56, 81, 75, 77, 52, 72, 60, 69, 66, 85, 80, 58, 73, 48, 67, 51, 84, 110, 62, 62, 47, 49, 57, 54, 71, 83, 45, 101, 63, 36, 44, 33, 67, 84, 56, 57, 44, 71, 68, 80, 48, 50, 42, 47, 87, 65, 111, 59, 62, 65, 90, 86, 52, 39, 60, 34, 68, 73, 66, 62, 54, 92, 108, 49, 86, 59, 62, 60, 45, 79, 73, 60, 59, 87, 63, 82, 76, 64, 60, 70, 39, 66, 73, 76, 71, 88, 65, 55, 46, 80, 55, 58, 58, 64, 55, 92, 46, 107, 65, 78, 56, 56, 33, 72, 56, 56, 60, 55, 49, 72, 84, 82, 67, 65, 82, 90, 71, 50, 80, 35, 54, 84, 76, 39, 44, 57, 56, 83, 51, 73, 64, 83, 63, 63, 92, 45, 39, 71, 82, 67, 71, 62, 58, 73, 43, 67, 53, 56, 55, 53, 68, 78, 82, 60, 83, 67, 63, 54, 82, 70, 53, 67, 39, 90, 55, 61, 51, 64, 46, 87, 56, 68, 94, 62, 60, 67, 71, 62, 51, 69, 82, 53, 79, 77, 55, 71, 83, 52, 52, 89, 108, 49, 99, 78, 39, 60, 89, 77, 63, 53, 61, 49, 47, 58, 59, 46, 100, 70, 65, 82, 74, 59, 73, 86, 48, 50, 70, 51, 64, 62, 61, 65, 59, 50, 68, 62, 44, 55, 48, 91, 61, 74, 76, 59, 67, 59, 61, 60, 97, 60, 61, 65, 63, 59, 48, 70, 53, 89, 81, 76, 74, 59, 70, 42, 63, 54, 56, 62, 53, 40, 62, 78, 62, 85, 79, 88, 61, 76, 81, 77, 67, 59, 70, 74, 54, 64, 42, 53, 65, 56, 81, 48, 64, 77, 92, 62, 86, 70, 71, 64, 70, 68, 80, 72, 80, 62, 68, 68, 70, 72, 74, 80, 69, 68, 67, 62, 69, 55, 105, 86, 75, 59, 72, 64, 56, 47, 62, 59, 54, 83, 61, 71, 66, 68, 58, 52, 66, 60, 62, 42, 70, 57, 51, 53, 83, 51, 57, 59, 65, 63, 71, 93, 57, 63, 70, 60, 62, 67, 53, 60, 76, 66, 67, 59, 81, 76, 66, 70, 74, 69, 66, 74, 75, 60, 50, 74, 49, 62, 76, 64, 59, 47, 53, 58, 50, 66, 54, 54, 58, 66, 112, 53, 52, 57, 72, 56, 58, 80, 52, 53, 87, 57, 51, 55, 59, 47, 62, 68, 51, 73, 65, 72, 59, 90, 58, 81, 50, 68, 60, 57, 65, 50, 75, 62, 55, 51, 59, 55, 70, 79, 76, 60, 64, 60, 77, 81, 65, 70, 60, 68, 83, 86, 50, 107, 60, 74, 63, 63, 49, 52, 48, 94, 71, 74, 75, 67, 61, 55, 55, 54, 64, 56, 69, 67, 42, 64, 59, 58, 65, 90, 70, 80, 81, 73, 83, 50, 88, 46, 58, 73, 59, 69, 58, 70, 56, 63, 45, 66, 53, 49, 109, 64, 63, 43, 66, 44, 80, 76, 58, 56, 68, 73, 65, 78, 53, 75, 59, 68, 67, 63, 55, 70, 91, 68, 62, 50, 64, 60, 53, 76, 61, 83, 69, 66, 90, 79, 71, 93, 80, 69, 57, 69, 76, 69, 58, 60, 50, 51, 89, 54, 54, 66, 41, 50, 67, 68, 63, 75, 82, 63, 44, 106, 146, 54, 60, 76, 59, 66, 57, 73, 53, 75, 46, 76, 63, 50, 70, 62, 63, 63, 57, 69, 96, 64, 61, 73, 58, 56, 69, 57, 65, 50, 57, 67, 96, 47, 52, 52, 80, 67, 75, 63, 48, 66, 66, 82, 95, 64, 63, 58, 47, 55, 61, 85, 55, 70, 63, 59, 93, 81, 75, 63, 63, 62, 73, 72, 71, 84, 53, 57, 81, 66, 57, 54, 63, 68, 74, 81, 106, 58, 90, 64, 113, 56, 55, 54, 63, 60, 72, 65, 55, 62, 65, 86, 52, 55, 61, 50, 75, 74, 64, 62, 86, 54, 50, 67, 44, 93, 75, 63, 60, 63, 72, 59, 70, 72, 63, 63, 55, 74, 41, 61, 69, 71, 88, 53, 55, 70, 53, 86, 53, 45, 76, 73, 72, 63, 57, 57, 50, 72, 71, 51, 55, 52, 66, 64, 65, 59, 67, 64, 52, 46, 54, 67, 50, 41, 85, 59, 70, 71, 72, 52, 62, 125, 59, 63, 54, 54, 60, 49, 53, 65, 79, 67, 49, 62, 62, 66, 46, 56, 75, 93, 65, 84, 77, 67, 63, 71, 57, 75, 62, 43, 61, 50, 38, 56, 68, 61, 63, 59, 62, 80, 70, 53, 82, 71, 74, 64, 66, 61, 99, 62, 69, 43, 73, 68, 48, 61, 47, 60, 60, 64, 67, 66, 58, 48, 76, 77, 68, 74, 72, 65, 77, 90, 72, 64, 66, 66, 76, 57, 78, 66, 65, 76, 58, 59, 88, 67, 54, 59, 76, 65, 69, 55, 64, 140, 67, 73, 45, 73, 83, 73, 51, 65, 58, 62, 75, 59, 67, 89, 78, 86, 54, 72, 59, 55, 56, 63, 64, 53, 87, 43, 56, 54, 64, 67, 74, 117, 77, 41, 44, 54, 57, 66, 74, 77, 98, 60, 44, 66, 68, 71, 59, 61, 75, 72, 65, 60, 81, 81, 75, 53, 77, 56, 77, 66, 57, 60, 77, 75, 73, 65, 48, 96, 60, 54, 118, 66, 71, 68, 99, 73, 56, 75, 64, 86, 57, 71, 45, 59, 75, 54, 43, 91, 48, 67, 53, 85, 47, 56, 72, 62, 61, 63, 63, 61, 66, 93, 57, 74, 61, 67, 59, 94, 66, 53, 58, 71, 75, 67, 50, 43, 67, 77, 51, 74, 49, 82, 67, 61, 64, 57, 64, 46, 62, 87, 64, 60, 59, 65, 70, 60, 86, 68, 64, 78, 58, 88, 92, 59, 66, 54, 65, 93, 61, 74, 51, 86, 60, 85, 93, 55, 52, 87, 56, 86, 61, 92, 59, 72, 62, 49, 57, 77, 66, 60, 64, 79, 54, 43, 97, 64, 52, 101, 48, 81, 70, 40, 67, 61, 83, 52, 49, 95, 55, 59, 49, 58, 65, 62, 53, 66, 60, 73, 47, 95, 72, 78, 74, 39, 51, 60, 70, 64, 63, 71, 54, 41, 61, 72, 52, 76, 77, 58, 60, 88, 93, 76, 62, 60, 47, 70, 72, 60, 58, 81, 82, 96, 46, 56, 53, 62, 61, 69, 66, 80, 58, 50, 64, 64, 60, 57, 47, 60, 100, 57, 56, 75, 43, 54, 50, 105, 82, 54, 52, 61, 71, 64, 68, 57, 79, 81, 74, 76, 95, 94, 74, 62, 78, 72, 101, 61, 66, 77, 96, 65, 77, 41, 58, 57, 115, 67, 52, 61, 49, 57, 50, 81, 101, 54, 67, 59, 53, 56, 86, 54, 68, 57, 50, 75, 59, 56, 61, 76, 57, 67, 102, 62, 100, 55, 80, 88, 57, 65, 80, 67, 53, 97, 68, 52, 50, 75, 66, 92, 66, 56, 66, 59, 56, 65, 60, 114, 77, 70, 58, 55, 53, 62, 75, 78, 103, 50, 74, 82, 61, 66, 60, 58, 60, 73, 69, 60, 78, 60, 46, 73, 55, 45, 86, 71, 63, 61, 73, 59, 60, 76, 92, 54, 75, 48, 42, 53, 70, 62, 52, 58, 81, 51, 64, 64, 63, 78, 87, 66, 57, 53, 49, 52, 50, 62, 59, 72, 79, 72, 59, 61, 64, 81, 64, 74, 47, 61, 81, 62, 71, 58, 52, 48, 71, 61, 74, 69, 56, 68, 61, 62, 70, 68, 61, 67, 109, 70, 63, 69, 65, 76, 74, 68, 51, 90, 74, 63, 63, 55, 60, 63, 64, 61, 83, 56, 50, 60, 47, 79, 68, 46, 55, 68, 89, 61, 81, 52, 65, 36, 70, 82, 51, 63, 46, 91, 57, 86, 60, 75, 66, 56, 71, 74, 89, 66, 63, 61, 54, 63, 71, 61, 72, 67, 76, 61, 70, 51, 52, 72, 46, 61, 62, 73, 129, 60, 66, 78, 65, 68, 73, 79, 80, 70, 57, 56, 61, 70, 64, 70, 45, 54, 58, 90, 93, 71, 64, 63, 52, 63, 57, 83, 41, 66, 91, 72, 53, 67, 51, 68, 76, 79, 69, 76, 64, 70, 80, 62, 57, 63, 55, 83, 69, 77, 59, 53, 58, 62, 70, 55, 79, 59, 60, 59, 52, 59, 78, 67, 46, 76, 62, 72, 52, 70, 71, 58, 61, 65, 50, 87, 120, 60, 66, 49, 55, 65, 71, 66, 68, 49, 57, 61, 70, 98, 56, 55, 62, 58, 76, 53, 57, 83, 76, 58, 93, 79, 64, 74, 59, 57, 84, 74, 85, 52, 75, 58, 60, 67, 61, 104, 51, 50, 41, 64, 67, 58, 82, 59, 70, 71, 69, 55, 73, 48, 82, 46, 69, 75, 58, 66, 68, 66, 66, 72, 49, 74, 57, 44, 73, 51, 53, 60, 55, 74, 60, 63, 80, 73, 84, 61, 56, 52, 64, 57, 85, 98, 61, 72, 69, 59, 49, 62, 85, 61, 61, 63, 54, 61, 74, 93, 78, 72, 64, 79, 62, 50, 44, 79, 58, 78, 81, 62, 138, 65, 51, 66, 64, 50, 111, 149, 52, 84, 52, 75, 100, 71, 53, 73, 56, 44, 69, 48, 66, 66, 75, 69, 76, 73, 57, 58, 63, 57, 73, 55, 45, 54, 47, 45, 74, 75, 109, 56, 66, 76, 67, 74, 67, 72, 61, 59, 64, 68, 78, 59, 55, 60, 60, 63, 52, 81, 67, 56, 56, 79, 54, 53, 58, 74, 55, 72, 65, 64, 72, 53, 99, 74, 58, 55, 59, 82, 63, 70, 63, 61, 49, 69, 65, 71, 72, 80, 48, 64, 67, 82, 89, 57, 63, 41, 66, 82, 70, 67, 52, 77, 57, 65, 61, 67, 42, 46, 54, 59, 69, 76, 56, 74, 74, 64, 70, 60, 61, 72, 61, 54, 61, 59, 74, 108, 68, 62, 71, 71, 83, 81, 54, 80, 53, 59, 53, 59, 75, 65, 66, 57, 55, 74, 51, 68, 84, 72, 68, 70, 64, 85, 99, 59, 65, 46, 51, 88, 59, 64, 67, 68, 61, 62, 77, 63, 87, 70, 75, 83, 59, 56, 77, 94, 53, 64, 61, 56, 59, 74, 52, 81, 65, 72, 62, 50, 61, 64, 79, 59, 56, 65, 50, 67, 64, 50, 66, 56, 56, 53, 66, 62, 107, 71, 67, 65, 63, 61, 66, 53, 84, 63, 103, 106, 64, 67, 43, 67, 65, 54, 44, 64, 66, 53, 55, 57, 55, 67, 65, 66, 62, 63, 67, 57, 48, 72, 62, 121, 97, 63, 58, 47, 57, 48, 63, 56, 51, 66, 63, 70, 78, 77, 57, 111, 92, 63, 73, 79, 85, 62, 48, 56, 64, 74, 66, 73, 69, 69, 44, 63, 53, 58, 94, 58, 66, 51, 63, 57, 63, 68, 77, 74, 75, 71, 80, 52, 60, 44, 60, 76, 40, 57, 53, 69, 56, 107, 48, 47, 34, 56, 55, 94, 78, 64, 60, 63, 61, 57, 82, 79, 84, 64, 65, 62, 62, 79, 49, 66, 58, 63, 63, 72, 50, 62, 65, 51, 58, 67, 79, 62, 67, 60, 75, 69, 50, 62, 75, 59, 58, 80, 61, 70, 55, 65, 62, 58, 71, 65, 60, 79, 56, 58, 54, 76, 85, 63, 64, 73, 65, 62, 64, 109, 62, 67, 77, 49, 63, 77, 105, 61, 55, 49, 58, 71, 65, 60, 51, 66, 68, 44, 42, 86, 36, 51, 43, 73, 63, 68, 77, 68, 56, 57, 71, 61, 63, 62, 57, 62, 65, 57, 53, 68, 61, 90, 85, 58, 105, 123, 56, 75, 67, 63, 51, 68, 62, 64, 44, 63, 56, 68, 59, 75, 51, 65, 76, 70, 67, 67, 64, 69, 66, 62, 48, 65, 56, 76, 58, 83, 73, 71, 60, 49, 52, 89, 61, 78, 59, 62, 77, 64, 72, 50, 67, 53, 54, 76, 59, 56, 71, 63, 76, 78, 50, 57, 59, 72, 61, 63, 48, 52, 44, 85, 69, 56, 44, 56, 66, 62, 58, 60, 66, 50, 66, 84, 46, 71, 70, 56, 51, 89, 66, 82, 60, 55, 70, 71, 67, 81, 55, 64, 81, 66, 59, 71, 64, 70, 61, 97, 55, 55, 78, 69, 66, 52, 63, 69, 62, 63, 54, 62, 66, 74, 94, 81, 80, 70, 57, 61, 40, 64, 54, 81, 96, 73, 63, 57, 61, 54, 68, 69, 66, 63, 40, 51, 56, 68, 43, 77, 73, 62, 95, 46, 52, 54, 69, 71, 62, 64, 67, 60, 66, 100, 64, 52, 73, 52, 67, 49, 69, 59, 63, 59, 70, 79, 94, 46, 59, 69, 67, 65, 61, 69, 90, 65, 61, 64, 57, 57, 74, 76, 58, 74, 76, 64, 59, 74, 71, 55, 53, 65, 50, 70, 53, 59, 54, 58, 63, 49, 63, 71, 52, 68, 48, 66, 68, 46, 104, 54, 64, 78, 46, 88, 64, 61, 56, 59, 62, 49, 53, 55, 55, 62, 53, 71, 65, 44, 57, 76, 101, 76, 49, 67, 87, 67, 87, 64, 56, 52, 63, 81, 68, 62, 87, 69, 62, 136, 73, 63, 68, 63, 53, 68, 73, 60, 58, 57, 136, 63, 68, 70, 55, 60, 66, 78, 54, 64, 59, 71, 96, 59, 58, 63, 61, 94, 50, 68, 55, 64, 56, 55, 53, 62, 69, 68, 72, 59, 59, 50, 57, 62, 64, 59, 76, 70, 69, 96, 65, 56, 64, 66, 63, 75, 57, 47, 78, 78, 66, 75, 83, 78, 64, 54, 49, 78, 57, 63, 74, 69, 69, 58, 89, 118, 85, 103, 86, 61, 59, 58, 64, 69, 64, 82, 52, 58, 63, 65, 57, 65, 57, 60, 93, 68, 62, 53, 58, 107, 82, 50, 77, 74, 59, 53, 60, 84, 68, 85, 84, 67, 64, 42, 62, 68, 67, 59, 61, 96, 58, 72, 80, 84, 48, 82, 84, 60, 62, 52, 68, 51, 53, 59, 81, 65, 76, 74, 94, 67, 75, 76, 84, 79, 42, 49, 70, 75, 66, 85, 51, 70, 102, 60, 43, 69, 70, 102, 59, 41, 60, 48, 58, 79, 71, 69, 75, 42, 47, 91, 56, 67, 93, 43, 63, 142, 60, 74, 52, 60, 38, 61, 59, 77, 43, 64, 75, 66, 44, 52, 60, 53, 62, 42, 56, 103, 62, 59, 87, 58, 96, 73, 82, 65, 59, 68, 65, 93, 86, 59, 79, 65, 51, 58, 60, 61, 55, 54, 55, 51, 70, 45, 60, 90, 57, 51, 75, 73, 101, 77, 64, 49, 58, 66, 66, 69, 58, 69, 51, 64, 62, 44, 59, 66, 89, 85, 56, 69, 54, 51, 55, 64, 59, 52, 69, 76, 62, 130, 60, 58, 52, 59, 60, 95, 77, 52, 57, 64, 80, 113, 74, 87, 58, 54, 109, 51, 62, 50, 52, 56, 52, 81, 93, 58, 98, 63, 75, 111, 42, 59, 75, 61, 73, 71, 54, 51, 55, 60, 83, 59, 67, 79, 64, 67, 49, 70, 95, 42, 54, 50, 49, 46, 63, 79, 48, 52, 56, 74, 80, 50, 51, 59, 53, 59, 56, 45, 60, 56, 64, 54, 61, 78, 58, 75, 86, 68, 44, 61, 60, 71, 68, 65, 61, 51, 44, 69, 122, 86, 50, 43, 63, 57, 98, 99, 38, 97, 53, 49, 76, 74, 55, 66, 67, 68, 67, 77, 95, 95, 43, 50, 62, 65, 57, 48, 63, 63, 67, 64, 61, 55, 62, 60, 58, 62, 57, 52, 52, 59, 60, 54, 85, 87, 69, 69, 56, 62, 62, 53, 76, 61, 69, 69, 78, 61, 53, 43, 56, 66, 52, 99, 71, 57, 66, 63, 87, 67, 50, 58, 76, 48, 62, 49, 101, 57, 51, 64, 69, 63, 75, 84, 110, 64, 77, 85, 64, 61, 73, 74, 45, 70, 64, 47, 74, 59, 82, 79, 48, 98, 113, 61, 82, 61, 65, 55, 67, 53, 47, 108, 60, 60, 38, 49, 66, 103, 74, 70, 73, 80, 72, 73, 48, 48, 77, 48, 51, 59, 66, 62, 59, 49, 67, 40, 90, 69, 64, 76, 87, 56, 85, 61, 62, 69, 82, 79, 91, 88, 63, 68, 91, 70, 49, 75, 73, 87, 72, 64, 52, 58, 71, 84, 55, 69, 66, 66, 67, 74, 69, 96, 110, 60, 46, 62, 56, 74, 70, 53, 66, 67, 72, 66, 62, 65, 64, 71, 45, 107, 51, 76, 98, 79, 60, 63, 61, 52, 77, 61, 61, 48, 103, 57, 77, 74, 64, 67, 49, 95, 71, 88, 77, 62, 71, 73, 89, 54, 83, 48, 52, 60, 67, 55, 68, 65, 45, 57, 57, 63, 54, 82, 51, 77, 66, 64, 49, 43, 62, 51, 56, 65, 67, 54, 80, 74, 61, 48, 69, 65, 74, 65, 62, 60, 46, 58, 35, 50, 53, 68, 66, 83, 47, 55, 64, 51, 57, 75, 58, 63, 74, 74, 63, 77, 84, 86, 63, 49, 67, 62, 82, 53, 50, 85, 54, 65, 64, 70, 64, 48, 76, 79, 50, 64, 52, 58, 148, 93, 53, 43, 47, 78, 65, 49, 60, 39, 79, 68, 70, 75, 72, 62, 79, 69, 73, 63, 87, 70, 60, 59, 101, 74, 90, 70, 50, 75, 87, 72, 62, 76, 64, 68, 80, 69, 45, 56, 59, 65, 63, 63, 67, 68, 72, 86, 64, 63, 51, 62, 90, 74, 51, 56, 84, 77, 55, 59, 62, 76, 60, 61, 65, 52, 61, 47, 65, 90, 35, 48, 83, 68, 77, 71, 66, 56, 57, 54, 64, 59, 65, 51, 74, 67, 113, 51, 53, 59, 57, 50, 66, 52, 58, 48, 65, 59, 61, 48, 30, 66, 56, 54, 99, 52, 53, 71, 63, 73, 65, 55, 72, 79, 69, 67, 64, 55, 76, 49, 76, 51, 96, 63, 42, 65, 53, 70, 49, 60, 75, 57, 65, 76, 96, 83, 68, 59, 65, 58, 93, 71, 62, 54, 66, 54, 55, 53, 68, 71, 75, 67, 41, 62, 77, 64, 73, 101, 85, 59, 71, 71, 70, 65, 60, 76, 44, 60, 68, 41, 58, 54, 63, 51, 96, 96, 61, 74, 51, 52, 46, 72, 49, 70, 105, 75, 67, 59, 58, 58, 78, 85, 70, 83, 72, 75, 55, 60, 66, 52, 59, 62, 73, 74, 55, 68, 69, 59, 53, 103, 60, 46, 76, 45, 63, 51, 56, 94, 62, 58, 62, 38, 74, 57, 84, 73, 59, 81, 63, 74, 55, 61, 53, 68, 64, 66, 67, 55, 56, 37, 72, 63, 113, 72, 58, 66, 78, 67, 53, 70, 59, 46, 50, 57, 79, 51, 55, 59, 66, 60, 80, 65, 66, 36, 65, 69, 49, 57, 52, 61, 67, 60, 78, 67, 58, 56, 55, 80, 65, 81, 61, 64, 41, 65, 63, 44, 83, 59, 43, 46, 58, 52, 67, 43, 72, 72, 74, 58, 54, 62, 65, 75, 57, 50, 50, 57, 60, 55, 52, 56, 80, 81, 100, 74, 45, 56, 62, 58, 53, 67, 65, 40, 43, 50, 62, 79, 59, 33, 68, 57, 56, 56, 86, 97, 46, 60, 59, 61, 62, 92, 49, 118, 98, 65, 66, 48, 58, 40, 70, 52, 74, 126, 69, 85, 61, 52, 91, 51, 78, 65, 58, 48, 85, 71, 71, 63, 64, 74, 59, 49, 109, 73, 85, 58, 75, 56, 78, 97, 115, 108, 122, 67, 78, 153, 69, 54, 77, 69, 71, 76, 68, 58, 67, 52, 43, 65, 77, 74, 49, 109, 69, 48, 52, 58, 75, 82, 83, 64, 95, 82, 70, 72, 74, 54, 51, 54, 59, 61, 76, 59, 66, 40, 95, 58, 59, 56, 67, 42, 66, 81, 72, 65, 58, 75, 67, 58, 66, 60, 87, 86, 80, 77, 60, 71, 64, 73, 44, 54, 64, 79, 77, 58, 77, 44, 67, 107, 73, 64, 47, 71, 67, 68, 47, 73, 56, 77, 71, 64, 61, 56, 57, 52, 81, 62, 55, 64, 53, 84, 44, 51, 62, 62, 57, 55, 69, 51, 57, 68, 70, 57, 78, 59, 52, 51, 68, 79, 92, 67, 53, 61, 86, 53, 97, 63, 52, 48, 54, 72, 58, 81, 84, 73, 70, 56, 58, 74, 53, 78, 58, 70, 93, 52, 61, 75, 50, 61, 59, 55, 64, 57, 80, 55, 87, 62, 56, 88, 61, 57, 61, 48, 59, 79, 64, 55, 67, 49, 64, 73, 60, 70, 53, 63, 66, 68, 51, 61, 72, 75, 55, 76, 70, 81, 68, 53, 60, 63, 69, 64, 57, 54, 72, 65, 61, 112, 62, 59, 67, 30, 92, 108, 68, 55, 59, 57, 42, 82, 59, 70, 49, 77, 61, 72, 59, 91, 59, 68, 65, 81, 37, 59, 62, 63, 66, 61, 59, 68, 66, 69, 69, 79, 61, 69, 53, 69, 72, 68, 67, 60, 54, 65, 51, 83, 60, 62, 56, 52, 92, 99, 71, 60, 79, 67, 73, 81, 51, 97, 73, 56, 74, 67, 57, 61, 63, 95, 44, 57, 64, 61, 68, 93, 66, 56, 64, 64, 61, 59, 46, 45, 67, 73, 63, 74, 53, 61, 76, 144, 47, 105, 65, 66, 37, 69, 62, 71, 62, 66, 60, 65, 50, 61, 86, 51, 45, 60, 52, 55, 67, 77, 63, 63, 56, 64, 60, 80, 66, 61, 61, 45, 70, 83, 63, 52, 55, 72, 98, 62, 59, 82, 94, 63, 65, 82, 69, 61, 59, 45, 63, 66, 64, 44, 66, 68, 63, 36, 103, 64, 79, 39, 70, 55, 61, 64, 60, 55, 58, 48, 74, 81, 149, 64, 46, 66, 75, 71, 62, 49, 65, 64, 53, 62, 60, 56, 91, 56, 49, 68, 81, 71, 76, 64, 68, 78, 65, 58, 65, 73, 74, 51, 75, 62, 51, 65, 68, 77, 61, 57, 54, 78, 59, 66, 104, 56, 60, 51, 63, 56, 55, 70, 86, 56, 63, 57, 68, 73, 77, 68, 51, 52, 82, 77, 73, 69, 81, 73, 76, 59, 56, 99, 53, 68, 92, 66, 38, 60, 82, 71, 65, 61, 67, 110, 65, 61, 55, 51, 68, 66, 50, 64, 82, 64, 67, 82, 74, 70, 55, 53, 64, 63, 53, 69, 50, 72, 93, 54, 63, 59, 70, 62, 71, 107, 67, 65, 66, 55, 44, 87, 85, 80, 66, 43, 67, 63, 71, 64, 51, 54, 82, 57, 48, 66, 80, 64, 49, 72, 34, 62, 57, 33, 68, 62, 59, 58, 73, 68, 50, 70, 63, 67, 73, 83, 75, 69, 54, 75, 63, 74, 64, 63, 62, 57, 80, 64, 67, 48, 75, 87, 51, 61, 86, 75, 56, 54, 60, 86, 63, 61, 63, 78, 73, 56, 83, 56, 58, 48, 82, 65, 80, 56, 69, 57, 55, 75, 60, 49, 77, 51, 48, 57, 59, 55, 60, 79, 91, 61, 60, 78, 67, 62, 52, 56, 61, 71, 62, 73, 63, 55, 50, 66, 50, 83, 98, 100, 55, 71, 78, 72, 56, 60, 87, 53, 52, 54, 69, 62, 57, 64, 68, 70, 58, 99, 56, 62, 75, 61, 56, 67, 58, 59, 59, 128, 55, 68, 76, 41, 78, 87, 59, 61, 61, 60, 52, 54, 59, 65, 47, 66, 50, 72, 59, 45, 55, 55, 58, 67, 57, 49, 45, 49, 66, 67, 59, 58, 64, 94, 69, 67, 61, 85, 82, 68, 59, 54, 66, 90, 70, 67, 38, 54, 73, 59, 71, 61, 88, 59, 59, 70, 58, 72, 95, 76, 67, 54, 60, 61, 69, 53, 89, 53, 81, 56, 63, 86, 76, 76, 79, 93, 60, 75, 51, 102, 63, 56, 60, 53, 59, 52, 52, 64, 74, 81, 58, 65, 70, 54, 66, 58, 49, 65, 71, 66, 53, 52, 70, 77, 62, 53, 65, 60, 80, 68, 76, 56, 47, 67, 58, 74, 52, 59, 66, 64, 61, 57, 66, 56, 63, 97, 66, 69, 79, 94, 73, 54, 71, 83, 69, 63, 88, 72, 53, 61, 59, 66, 68, 85, 69, 71, 60, 57, 57, 94, 66, 52, 50, 86, 66, 67, 67, 67, 60, 66, 45, 65, 64, 65, 73, 96, 60, 69, 52, 79, 63, 66, 73, 54, 74, 56, 57, 73, 63, 75, 73, 65, 58, 56, 56, 63, 81, 79, 107, 59, 58, 70, 62, 61, 65, 102, 115, 54, 70, 109, 69, 65, 74, 80, 88, 78, 63, 87, 61, 50, 65, 78, 74, 65, 112, 60, 55, 54, 55, 70, 41, 89, 57, 69, 74, 48, 70, 62, 60, 79, 57, 62, 61, 69, 103, 58, 49, 59, 43, 57, 76, 78, 93, 71, 87, 62, 71, 63, 67, 103, 53, 62, 58, 57, 51, 67, 79, 115, 55, 67, 70, 68, 80, 34, 52, 104, 59, 77, 61, 48, 70, 68, 59, 58, 50, 112, 76, 71, 101, 52, 49, 88, 63, 47, 80, 55, 66, 63, 50, 78, 76, 74, 54, 81, 69, 57, 51, 60, 63, 63, 80, 73, 70, 75, 80, 61, 66, 67, 45, 68, 89, 105, 55, 64, 69, 63, 61, 71, 89, 74, 66, 59, 54, 67, 98, 64, 70, 54, 47, 48, 63, 97, 51, 53, 54, 72, 52, 71, 54, 60, 76, 41, 64, 57, 79, 108, 105, 77, 102, 61, 92, 73, 54, 56, 55, 64, 45, 33, 97, 48, 64, 57, 50, 51, 51, 56, 75, 70, 47, 77, 99, 53, 62, 58, 47, 66, 50, 52, 69, 64, 71, 72, 128, 82, 66, 67, 52, 60, 82, 62, 57, 73, 60, 91, 102, 67, 86, 63, 59, 52, 56, 75, 64, 58, 57, 66, 64, 82, 61, 53, 43, 50, 52, 61, 76, 84, 78, 60, 65, 74, 57, 47, 60, 64, 52, 58, 46, 76, 71, 49, 64, 78, 57, 58, 60, 62, 51, 67, 74, 58, 77, 41, 69, 62, 89, 82, 55, 58, 66, 57, 42, 73, 58, 81, 98, 74, 62, 54, 67, 75, 53, 68, 50, 76, 61, 107, 60, 54, 65, 83, 67, 78, 61, 95, 58, 68, 60, 72, 73, 58, 61, 105, 66, 76, 120, 67, 56, 57, 66, 58, 55, 82, 60, 68, 58, 53, 71, 55, 51, 70, 69, 123, 49, 111, 74, 55, 58, 76, 61, 92, 58, 66, 82, 64, 75, 62, 55, 80, 75, 59, 62, 96, 68, 77, 62, 54, 64, 54, 63, 80, 80, 76, 76, 52, 63, 97, 74, 70, 68, 63, 67, 61, 47, 72, 82, 81, 60, 61, 60, 67, 39, 60, 74, 64, 47, 54, 53, 65, 54, 43, 63, 82, 71, 65, 54, 60, 57, 62, 77, 54, 67, 55, 69, 60, 62, 60, 69, 61, 54, 60, 76, 73, 84, 61, 42, 64, 157, 67, 68, 45, 77, 60, 64, 61, 54, 53, 57, 54, 85, 51, 67, 60, 60, 50, 84, 73, 63, 67, 61, 53, 53, 67, 53, 71, 55, 63, 47, 71, 67, 74, 118, 77, 61, 65, 67, 88, 51, 52, 88, 73, 58, 78, 53, 76, 70, 99, 58, 86, 52, 48, 96, 66, 52, 91, 60, 73, 58, 63, 53, 71, 75, 52, 85, 59, 69, 63, 41, 59, 60, 78, 66, 65, 71, 50, 73, 49, 41, 76, 63, 65, 70, 63, 64, 65, 80, 61, 72, 68, 144, 81, 64, 41, 62, 63, 49, 87, 57, 56, 60, 65, 59, 73, 49, 70, 56, 74, 48, 55, 59, 51, 78, 85, 73, 66, 69, 44, 60, 75, 57, 115, 65, 59, 56, 58, 81, 77, 64, 92, 60, 52, 64, 66, 47, 63, 72, 63, 87, 66, 55, 43, 76, 70, 62, 74, 62, 69, 65, 78, 68, 65, 73, 47, 51, 50, 64, 55, 66, 49, 74, 56, 74, 61, 61, 77, 59, 70, 64, 87, 66, 73, 64, 46, 71, 73, 80, 87, 54, 60, 70, 63, 62, 62, 92, 68, 60, 89, 46, 57, 89, 46, 76, 59, 53, 69, 53, 68, 46, 73, 77, 59, 42, 62, 60, 60, 59, 51, 67, 66, 73, 67, 53, 93, 70, 74, 69, 75, 53, 62, 77, 62, 70, 77, 49, 46, 70, 69, 53, 63, 65, 79, 77, 46, 55, 71, 62, 80, 65, 64, 55, 77, 71, 62, 77, 71, 86, 57, 66, 76, 68, 75, 64, 55, 49, 68, 74, 62, 59, 57, 67, 124, 66, 72, 68, 57, 73, 92, 47, 89, 67, 50, 62, 57, 69, 69, 82, 79, 54, 68, 59, 73, 79, 49, 81, 63, 61, 70, 69, 64, 64, 103, 54, 74, 63, 74, 62, 62, 56, 74, 56, 72, 61, 85, 78, 54, 72, 68, 63, 63, 74, 57, 56, 72, 72, 54, 55, 43, 71, 68, 83, 65, 71, 63, 52, 73, 55, 49, 99, 66, 62, 73, 58, 54, 60, 68, 66, 79, 71, 57, 58, 64, 74, 60, 55, 57, 62, 72, 69, 62, 73, 78, 50, 88, 62, 58, 67, 56, 75, 88, 77, 60, 71, 77, 50, 59, 65, 88, 69, 65, 65, 63, 61, 95, 54, 64, 66, 64, 65, 61, 87, 76, 62, 79, 54, 68, 46, 71, 60, 79, 53, 54, 69, 71, 63, 61, 55, 68, 68, 69, 74, 52, 58, 68, 56, 65, 75, 67, 80, 69, 63, 62, 54, 51, 50, 59, 69, 77, 65, 64, 68, 82, 80, 58, 60, 54, 84, 67, 66, 60, 69, 69, 73, 74, 49, 55, 89, 53, 48, 67, 56, 88, 77, 64, 60, 61, 65, 67, 39, 64, 96, 70, 66, 64, 91, 73, 62, 65, 64, 64, 61, 50, 84, 69, 52, 96, 56, 52, 76, 75, 53, 68, 61, 46, 112, 52, 66, 93, 73, 78, 56, 65, 66, 68, 64, 90, 91, 58, 66, 50, 72, 30, 59, 67, 51, 82, 54, 69, 55, 173, 59, 58, 66, 60, 72, 83, 81, 64, 75, 56, 66, 58, 40, 74, 70, 70, 67, 59, 72, 80, 78, 74, 73, 50, 41, 74, 46, 51, 59, 77, 70, 57, 57, 61, 59, 85, 71, 63, 73, 68, 78, 75, 66, 58, 57, 46, 58, 63, 72, 98, 60, 56, 54, 71, 69, 79, 65, 54, 69, 65, 76, 63, 86, 55, 69, 41, 42, 70, 75, 53, 65, 51, 70, 51, 68, 74, 57, 58, 64, 70, 49, 86, 79, 76, 64, 61, 60, 80, 81, 58, 56, 43, 62, 50, 70, 56, 72, 36, 62, 51, 56, 72, 67, 46, 86, 64, 75, 63, 45, 63, 51, 47, 63, 43, 40, 55, 63, 59, 74, 65, 51, 82, 52, 86, 54, 61, 42, 72, 55, 65, 50, 61, 60, 54, 71, 83, 62, 81, 63, 72, 64, 67, 41, 73, 58, 77, 64, 52, 72, 47, 75, 54, 109, 52, 64, 55, 100, 58, 76, 69, 67, 73, 77, 46, 77, 58, 81, 116, 49, 69, 64, 60, 65, 64, 82, 51, 58, 56, 78, 58, 50, 48, 57, 76, 65, 65, 54, 65, 70, 51, 45, 55, 59, 62, 72, 65, 71, 59, 69, 76, 77, 67, 50, 51, 69, 59, 57, 60, 43, 71, 63, 48, 59, 68, 56, 62, 59, 58, 72, 50, 78, 60, 67, 128, 49, 88, 83, 70, 44, 52, 61, 75, 115, 82, 61, 72, 65, 60, 88, 49, 52, 51, 36, 98, 72, 68, 55, 60, 67, 51, 57, 81, 71, 55, 60, 77, 68, 69, 57, 60, 61, 66, 48, 73, 61, 54, 58, 84, 51, 65, 70, 65, 67, 68, 57, 70, 65, 68, 66, 83, 77, 53, 59, 66, 42, 66, 66, 57, 47, 81, 59, 66, 45, 60, 58, 73, 70, 85, 61, 112, 69, 45, 107, 45, 56, 63, 64, 75, 47, 70, 67, 71, 109, 50, 50, 55, 49, 64, 73, 65, 48, 70, 73, 47, 65, 76, 72, 49, 48, 75, 65, 59, 68, 57, 70, 83, 76, 75, 59, 65, 75, 70, 53, 64, 60, 52, 65, 76, 59, 63, 53, 39, 46, 86, 52, 68, 55, 58, 62, 54, 73, 65, 58, 66, 67, 77, 93, 96, 76, 62, 50, 63, 92, 51, 66, 58, 58, 99, 59, 53, 61, 75, 73, 59, 61, 44, 71, 44, 87, 74, 42, 75, 77, 55, 56, 60, 63, 43, 64, 70, 70, 92, 67, 44, 63, 61, 64, 53, 69, 97, 121, 59, 38, 80, 59, 59, 93, 52, 69, 63, 65, 61, 66, 83, 95, 64, 62, 96, 45, 74, 43, 77, 55, 63, 54, 73, 58, 51, 60, 77, 58, 70, 70, 60, 112, 61, 70, 73, 60, 49, 67, 53, 65, 69, 53, 64, 72, 59, 60, 84, 66, 46, 72, 71, 59, 55, 68, 54, 44, 57, 62, 91, 74, 75, 85, 60, 72, 57, 73, 52, 48, 52, 56, 51, 65, 62, 54, 87, 68, 84, 67, 65, 59, 64, 62, 69, 92, 60, 79, 62, 71, 52, 92, 62, 74, 77, 56, 62, 64, 65, 61, 91, 69, 57, 72, 60, 41, 60, 51, 64, 73, 71, 75, 63, 55, 66, 78, 51, 60, 62, 64, 73, 65, 55, 70, 63, 71, 66, 60, 82, 48, 51, 44, 74, 69, 62, 60, 59, 71, 90, 65, 62, 54, 111, 64, 62, 54, 63, 67, 66, 71, 73, 69, 57, 73, 60, 57, 56, 74, 54, 70, 63, 52, 62, 69, 70, 76, 58, 72, 61, 83, 91, 74, 80, 58, 68, 73, 73, 63, 61, 52, 67, 56, 88, 61, 61, 47, 70, 61, 68, 77, 54, 60, 83, 78, 75, 57, 41, 66, 48, 64, 52, 79, 73, 60, 57, 53, 61, 56, 73, 60, 65, 50, 60, 55, 64, 62, 50, 49, 80, 68, 69, 57, 58, 70, 59, 59, 48, 62, 80, 84, 56, 62, 48, 63, 76, 60, 54, 83, 62, 81, 51, 68, 56, 57, 67, 68, 71, 77, 85, 69, 82, 54, 66, 57, 57, 61, 61, 72, 66, 51, 58, 60, 58, 82, 46, 55, 57, 76, 72, 68, 83, 63, 63, 52, 61, 74, 61, 50, 100, 61, 71, 67, 87, 74, 67, 85, 66, 66, 53, 46, 73, 61, 59, 67, 60, 74, 60, 57, 59, 68, 56, 57, 50, 133, 46, 75, 58, 49, 55, 91, 56, 45, 79, 65, 51, 98, 50, 66, 106, 63, 69, 62, 65, 61, 58, 99, 66, 59, 76, 62, 62, 71, 62, 62, 75, 75, 66, 59, 73, 60, 54, 50, 61, 61, 64, 67, 52, 56, 61, 83, 55, 64, 55, 65, 60, 74, 63, 72, 68, 63, 83, 69, 68, 65, 73, 66, 58, 84, 95, 59, 58, 58, 62, 60, 90, 61, 65, 69, 82, 44, 75, 138, 62, 78, 63, 59, 54, 62, 50, 58, 63, 52, 73, 53, 72, 94, 82, 87, 67, 63, 58, 118, 82, 59, 58, 57, 59, 89, 63, 63, 69, 48, 88, 68, 67, 61, 83, 68, 69, 73, 80, 63, 60, 55, 50, 49, 57, 60, 63, 66, 67, 60, 63, 70, 82, 56, 68, 62, 73, 54, 93, 60, 61, 65, 59, 72, 67, 70, 81, 74, 57, 55, 52, 53, 58, 51, 97, 67, 56, 58, 69, 76, 80, 67, 67, 81, 65, 75, 53, 69, 68, 58, 55, 77, 63, 62, 87, 70, 68, 57, 68, 89, 69, 69, 63, 79, 68, 64, 71, 53, 57, 71, 56, 51, 51, 80, 59, 53, 58, 64, 92, 48, 65, 67, 78, 64, 48, 56, 71, 55, 59, 71, 57, 58, 79, 53, 50, 59, 43, 54, 59, 59, 47, 54, 82, 50, 57, 63, 80, 52, 65, 94, 79, 51, 52, 72, 52, 51, 57, 44, 61, 65, 61, 65, 43, 81, 71, 74, 91, 87, 54, 57, 77, 73, 67, 71, 90, 104, 73, 59, 72, 76, 66, 56, 68, 70, 70, 61, 92, 53, 64, 59, 70, 56, 61, 52, 72, 69, 38, 69, 88, 45, 68, 61, 52, 63, 80, 52, 63, 48, 67, 90, 41, 82, 59, 59, 54, 39, 73, 71, 60, 56, 61, 57, 57, 59, 100, 47, 81, 54, 70, 64, 60, 79, 55, 59, 68, 63, 65, 57, 55, 40, 56, 55, 72, 62, 60, 77, 72, 61, 75, 49, 41, 72, 34, 64, 79, 63, 60, 72, 53, 94, 79, 80, 64, 69, 78, 75, 58, 89, 51, 68, 81, 70, 67, 61, 71, 61, 62, 63, 78, 71, 38, 58, 55, 52, 57, 66, 52, 53, 99, 63, 62, 54, 44, 64, 66, 101, 53, 64, 91, 81, 55, 83, 67, 72, 63, 64, 69, 70, 60, 102, 71, 64, 72, 64, 59, 74, 55, 74, 65, 65, 74, 48, 61, 65, 76, 88, 55, 57, 50, 59, 40, 87, 90, 61, 68, 54, 61, 67, 73, 65, 65, 58, 75, 53, 76, 60, 65, 55, 53, 67, 64, 53, 73, 75, 51, 59, 55, 57, 54, 75, 51, 61, 68, 57, 68, 68, 52, 73, 73, 94, 59, 80, 59, 60, 54, 66, 87, 55, 50, 68, 77, 100, 75, 49, 57, 69, 60, 61, 66, 60, 56, 55, 67, 62, 65, 82, 62, 83, 60, 69, 73, 60, 77, 53, 106, 58, 67, 65, 46, 57, 72, 58, 61, 66, 85, 72, 59, 80, 60, 82, 88, 80, 56, 71, 54, 66, 45, 49, 55, 63, 65, 60, 55, 117, 52, 71, 91, 65, 49, 69, 84, 62, 65, 64, 57, 75, 78, 70, 66, 82, 65, 68, 55, 66, 70, 89, 50, 52, 52, 61, 48, 45, 74, 79, 61, 103, 85, 69, 68, 78, 56, 59, 70, 65, 77, 59, 68, 50, 68, 45, 85, 103, 71, 62, 69, 55, 54, 53, 106, 61, 63, 80, 75, 61, 63, 61, 54, 61, 76, 68, 108, 56, 65, 61, 59, 79, 65, 60, 53, 75, 58, 58, 61, 66, 48, 72, 68, 50, 56, 79, 67, 61, 71, 77, 56, 59, 76, 50, 77, 51, 87, 70, 61, 65, 60, 49, 106, 66, 51, 70, 59, 67, 52, 68, 56, 56, 62, 51, 66, 74, 75, 62, 85, 56, 66, 67, 63, 68, 61, 86, 53, 73, 49, 46, 65, 87, 75, 60, 63, 68, 64, 59, 126, 70, 61, 67, 63, 62, 58, 92, 67, 55, 68, 75, 83, 55, 60, 81, 62, 83, 63, 76, 69, 66, 56, 92, 60, 67, 75, 68, 67, 41, 66, 112, 53, 57, 61, 57, 56, 66, 60, 58, 58, 61, 69, 74, 95, 59, 94, 81, 61, 80, 49, 67, 69, 72, 67, 105, 62, 119, 63, 61, 57, 64, 62, 100, 67, 78, 68, 62, 62, 65, 67, 54, 74, 79, 64, 64, 64, 39, 59, 45, 61, 68, 54, 100, 74, 69, 57, 58, 55, 75, 51, 66, 63, 55, 71, 78, 50, 60, 48, 41, 75, 76, 66, 64, 56, 83, 63, 59, 66, 63, 57, 52, 66, 62, 93, 59, 51, 71, 59, 54, 52, 68, 64, 58, 71, 68, 85, 80, 63, 49, 59, 62, 71, 68, 79, 76, 61, 64, 77, 63, 50, 70, 65, 60, 50, 84, 95, 74, 61, 68, 67, 107, 65, 65, 49, 60, 83, 75, 68, 52, 39, 58, 58, 65, 50, 79, 90, 69, 46, 46, 54, 55, 69, 49, 82, 58, 63, 55, 54, 67, 47, 68, 68, 54, 64, 58, 55, 66, 49, 84, 55, 46, 69, 64, 47, 69, 74, 64, 62, 107, 56, 62, 78, 68, 60, 58, 89, 65, 63, 58, 63, 65, 64, 119, 63, 79, 66, 71, 82, 68, 78, 73, 58, 75, 88, 76, 72, 54, 73, 55, 56, 62, 53, 72, 79, 87, 76, 57, 69, 98, 67, 64, 64, 49, 78, 62, 60, 76, 50, 58, 59, 88, 49, 71, 79, 55, 70, 61, 78, 53, 72, 93, 37, 93, 67, 62, 54, 73, 69, 41, 66, 58, 88, 54, 61, 62, 69, 57, 41, 44, 64, 67, 63, 72, 69, 59, 73, 54, 71, 45, 76, 67, 68, 60, 62, 76, 58, 49, 60, 60, 70, 65, 55, 60, 77, 104, 67, 71, 87, 56, 65, 83, 63, 109, 71, 50, 67, 55, 43, 64, 75, 68, 78, 62, 69, 55, 91, 62, 91, 66, 66, 57, 63, 63, 75, 58, 46, 82, 62, 82, 67, 66, 65, 57, 73, 95, 101, 38, 97, 58, 55, 42, 68, 57, 61, 39, 71, 65, 66, 63, 44, 77, 66, 66, 61, 102, 71, 53, 40, 52, 67, 42, 64, 46, 56, 89, 67, 67, 65, 58, 73, 68, 65, 61, 60, 77, 65, 56, 61, 71, 63, 62, 54, 107, 60, 71, 64, 67, 54, 66, 65, 86, 97, 57, 60, 58, 56, 58, 49, 69, 87, 69, 97, 66, 66, 47, 66, 68, 72, 49, 67, 63, 56, 58, 63, 50, 63, 76, 68, 65, 58, 65, 84, 70, 53, 54, 57, 65, 84, 57, 81, 45, 77, 70, 48, 70, 52, 75, 71, 70, 74, 72, 85, 62, 49, 63, 76, 75, 51, 48, 56, 53, 45, 68, 51, 66, 55, 67, 45, 55, 57, 60, 50, 75, 69, 72, 48, 64, 77, 80, 74, 62, 79, 85, 70, 58, 46, 104, 69, 47, 66, 71, 53, 62, 54, 65, 78, 55, 75, 50, 65, 59, 70, 69, 62, 70, 72, 55, 52, 51, 69, 85, 75, 64, 45, 67, 81, 57, 61, 72, 66, 50, 55, 60, 57, 75, 62, 73, 60, 83, 54, 70, 71, 71, 66, 66, 44, 74, 57, 82, 78, 64, 54, 67, 58, 66, 54, 71, 79, 77, 59, 66, 75, 81, 90, 66, 50, 56, 55, 65, 60, 72, 67, 64, 79, 59, 55, 78, 67, 58, 66, 47, 60, 57, 50, 53, 59, 54, 74, 70, 60, 69, 57, 54, 81, 59, 50, 79, 58, 98, 77, 53, 72, 64, 60, 77, 73, 74, 80, 54, 82, 35, 54, 58, 91, 86, 66, 63, 71, 51, 56, 64, 63, 87, 80, 66, 81, 76, 47, 66, 66, 66, 68, 50, 48, 74, 50, 94, 67, 49, 52, 44, 57, 48, 71, 79, 69, 76, 84, 65, 56, 57, 74, 55, 63, 68, 67, 62, 80, 63, 115, 68, 53, 51, 67, 57, 65, 66, 83, 70, 65, 80, 54, 57, 60, 79, 53, 73, 68, 80, 69, 69, 60, 48, 59, 56, 59, 55, 60, 81, 53, 79, 81, 53, 59, 60, 74, 54, 62, 64, 65, 61, 72, 58, 49, 74, 56, 59, 65, 65, 66, 58, 44, 64, 71, 57, 64, 64, 68, 76, 78, 70, 50, 77, 63, 52, 71, 55, 67, 96, 111, 67, 76, 75, 58, 58, 60, 61, 60, 41, 62, 86, 54, 60, 77, 62, 73, 77, 66, 55, 63, 66, 60, 74, 58, 57, 71, 76, 88, 86, 67, 72, 78, 67, 57, 77, 63, 59, 89, 59, 79, 71, 55, 60, 67, 57, 65, 63, 48, 61, 79, 64, 69, 50, 85, 54, 44, 88, 54, 123, 68, 73, 66, 67, 66, 55, 74, 60, 100, 81, 48, 57, 59, 67, 53, 62, 71, 68, 68, 52, 69, 81, 60, 80, 78, 62, 65, 59, 72, 65, 70, 86, 64, 81, 63, 72, 46, 60, 47, 54, 102, 100, 56, 66, 67, 79, 73, 61, 55, 66, 60, 63, 86, 49, 52, 75, 56, 60, 56, 59, 50, 64, 69, 56, 80, 87, 77, 81, 46, 70, 81, 65, 46, 79, 63, 66, 58, 65, 63, 69, 84, 64, 66, 72, 43, 55, 60, 60, 54, 67, 59, 76, 68, 87, 58, 62, 69, 71, 47, 58, 78, 53, 72, 73, 60, 58, 67, 65, 41, 79, 73, 84, 81, 41, 53, 62, 70, 57, 51, 63, 77, 72, 53, 67, 59, 102, 98, 68, 48, 60, 42, 79, 81, 63, 76, 114, 73, 65, 77, 60, 67, 87, 83, 65, 66, 73, 65, 64, 88, 62, 63, 45, 68, 75, 64, 52, 58, 64, 55, 91, 76, 65, 63, 51, 65, 64, 68, 70, 56, 52, 72, 62, 89, 87, 93, 55, 64, 80, 72, 52, 68, 74, 62, 50, 59, 61, 68, 56, 62, 72, 53, 62, 62, 71, 76, 54, 54, 72, 73, 50, 68, 67, 61, 61, 74, 40, 67, 66, 60, 58, 73, 64, 57, 73, 84, 67, 68, 69, 76, 99, 67, 70, 82, 65, 83, 84, 64, 74, 89, 56, 67, 52, 109, 61, 53, 69, 90, 56, 70, 53, 80, 49, 59, 54, 42, 50, 55, 40, 77, 65, 60, 73, 79, 65, 68, 62, 48, 55, 63, 61, 62, 97, 80, 38, 54, 62, 43, 54, 50, 71, 68, 53, 129, 55, 58, 79, 59, 61, 79, 82, 60, 80, 77, 81, 71, 50, 51, 71, 58, 52, 68, 57, 55, 49, 63, 91, 73, 73, 62, 45, 64, 69, 57, 77, 56, 44, 69, 55, 44, 54, 61, 55, 55, 72, 54, 55, 66, 54, 94, 52, 66, 83, 76, 65, 52, 50, 79, 143, 69, 63, 69, 81, 63, 58, 65, 62, 55, 66, 79, 65, 55, 51, 58, 74, 61, 61, 69, 58, 71, 61, 91, 75, 70, 57, 76, 69, 94, 46, 53, 73, 76, 48, 49, 63, 62, 57, 56, 56, 64, 119, 53, 55, 68, 52, 57, 74, 66, 62, 57, 60, 61, 58, 57, 70, 110, 54, 80, 95, 58, 136, 62, 61, 69, 59, 61, 60, 65, 106, 69, 77, 81, 57, 72, 56, 54, 42, 53, 78, 49, 55, 71, 48, 85, 64, 84, 49, 66, 61, 61, 58, 69, 56, 46, 63, 74, 72, 60, 63, 81, 73, 64, 49, 49, 66, 55, 72, 83, 60, 57, 105, 50, 58, 72, 65, 59, 114, 69, 92, 65, 81, 59, 62, 62, 60, 65, 59, 62, 59, 63, 52, 66, 52, 66, 47, 52, 72, 71, 112, 58, 46, 57, 58, 61, 45, 61, 74, 64, 76, 73, 65, 109, 64, 63, 58, 82, 66, 75, 72, 55, 64, 70, 57, 51, 74, 61, 67, 79, 52, 92, 78, 65, 58, 103, 47, 60, 48, 52, 51, 49, 65, 57, 61, 102, 70, 49, 46, 70, 63, 66, 62, 66, 77, 61, 62, 81, 65, 58, 70, 68, 71, 61, 57, 60, 44, 67, 66, 58, 49, 54, 62, 71, 53, 65, 52, 53, 138, 55, 62, 62, 53, 63, 54, 56, 64, 61, 88, 105, 66, 54, 57, 55, 81, 53, 49, 54, 66, 46, 77, 71, 64, 60, 69, 58, 52, 68, 68, 60, 54, 70, 59, 48, 64, 53, 58, 74, 55, 128, 48, 41, 72, 65, 48, 71, 85, 52, 66, 63, 74, 68, 77, 61, 73, 55, 83, 41, 42, 55, 77, 63, 64, 52, 69, 82, 64, 45, 73, 75, 55, 81, 77, 70, 63, 70, 68, 56, 76, 62, 62, 71, 63, 65, 98, 64, 52, 84, 68, 58, 60, 55, 64, 69, 68, 58, 57, 58, 74, 56, 49, 64, 45, 60, 72, 62, 57, 85, 51, 69, 85, 69, 63, 59, 54, 79, 54, 69, 68, 63, 64, 60, 56, 70, 55, 69, 85, 113, 69, 52, 53, 65, 73, 78, 57, 51, 46, 55, 67, 70, 56, 73, 56, 61, 58, 60, 114, 63, 45, 92, 79, 76, 69, 64, 60, 56, 52, 57, 65, 73, 76, 71, 55, 74, 87, 89, 62, 88, 75, 63, 91, 54, 59, 45, 75, 70, 71, 59, 71, 72, 71, 113, 91, 61, 79, 67, 41, 54, 58, 60, 58, 60, 69, 65, 64, 74, 83, 82, 69, 52, 63, 62, 69, 51, 56, 97, 76, 71, 55, 110, 92, 64, 69, 74, 66, 47, 66, 84, 57, 59, 49, 45, 80, 88, 63, 91, 46, 58, 63, 74, 74, 77, 53, 69, 60, 58, 69, 55, 73, 60, 61, 97, 74, 48, 54, 49, 69, 65, 60, 61, 97, 78, 72, 67, 71, 72, 54, 54, 37, 56, 49, 53, 64, 51, 62, 64, 39, 62, 71, 66, 80, 76, 62, 71, 61, 67, 70, 52, 75, 85, 60, 64, 62, 67, 65, 76, 68, 61, 43, 41, 36, 50, 44, 71, 60, 76, 69, 46, 80, 82, 82, 74, 46, 70, 75, 62, 78, 131, 70, 72, 62, 58, 106, 56, 65, 64, 68, 56, 60, 95, 75, 51, 70, 101, 73, 84, 72, 67, 65, 67, 56, 64, 50, 67, 62, 64, 72, 69, 70, 79, 78, 68, 34, 64, 57, 58, 87, 67, 77, 63, 60, 73, 67, 70, 43, 64, 73, 63, 66, 42, 58, 39, 55, 77, 62, 66, 72, 59, 50, 57, 70, 75, 59, 58, 57, 66, 86, 46, 74, 73, 57, 81, 65, 76, 71, 81, 70, 82, 66, 76, 70, 58, 72, 95, 71, 68, 74, 53, 60, 55, 72, 68, 78, 56, 54, 106, 48, 58, 56, 91, 80, 82, 77, 77, 51, 56, 70, 55, 79, 77, 101, 56, 57, 76, 68, 66, 76, 65, 57, 68, 72, 61, 96, 71, 68, 80, 50, 60, 52, 57, 56, 51, 57, 60, 83, 62, 64, 60, 61, 56, 60, 94, 43, 68, 99, 59, 92, 67, 57, 68, 47, 80, 48, 97, 58, 58, 80, 54, 63, 81, 62, 87, 57, 68, 107, 78, 60, 71, 70, 74, 75, 43, 54, 66, 74, 61, 75, 56, 65, 46, 67, 59, 60, 48, 66, 80, 93, 62, 42, 70, 83, 50, 77, 63, 75, 51, 64, 77, 69, 71, 77, 82, 62, 56, 67, 61, 99, 49, 60, 85, 69, 66, 58, 91, 65, 90, 50, 68, 68, 53, 70, 56, 63, 76, 64, 60, 55, 55, 76, 67, 60, 74, 80, 92, 64, 62, 69, 89, 80, 79, 53, 70, 64, 103, 69, 72, 56, 57, 73, 68, 56, 76, 62, 51, 60, 67, 71, 55, 86, 70, 57, 72, 78, 70, 54, 68, 55, 74, 81, 54, 91, 82, 56, 55, 73, 68, 69, 45, 65, 49, 79, 72, 62, 67, 60, 49, 82, 77, 55, 61, 80, 63, 51, 47, 56, 58, 71, 56, 74, 50, 63, 68, 68, 91, 53, 65, 64, 50, 70, 72, 90, 89, 74, 51, 65, 63, 61, 65, 88, 70, 37, 51, 43, 69, 69, 58, 52, 50, 62, 51, 90, 63, 48, 69, 92, 80, 72, 118, 82, 56, 51, 95, 62, 59, 84, 45, 48, 51, 54, 75, 63, 70, 67, 68, 83, 55, 79, 78, 60, 68, 62, 57, 48, 60, 53, 92, 60, 65, 68, 76, 98, 49, 76, 64, 50, 44, 67, 53, 58, 69, 75, 87, 64, 60, 82, 69, 67, 61, 65, 134, 91, 52, 60, 67, 63, 55, 60, 54, 64, 57, 82, 62, 42, 64, 55, 60, 48, 68, 74, 74, 93, 68, 59, 63, 50, 61, 50, 62, 64, 58, 77, 91, 72, 89, 72, 70, 55, 70, 62, 48, 58, 80, 93, 58, 60, 55, 42, 87, 57, 54, 80, 51, 71, 69, 58, 84, 96, 64, 44, 58, 60, 46, 61, 62, 80, 54, 74, 62, 66, 65, 77, 46, 58, 58, 80, 56, 71, 63, 59, 60, 44, 69, 54, 75, 75, 62, 64, 76, 59, 73, 69, 66, 100, 83, 59, 48, 62, 58, 99, 64, 62, 46, 71, 68, 44, 62, 60, 63, 76, 85, 79, 62, 62, 60, 62, 69, 59, 63, 79, 61, 63, 71, 73, 48, 54, 79, 61, 72, 83, 69, 68, 92, 80, 65, 88, 53, 51, 56, 117, 84, 68, 78, 77, 80, 62, 59, 20, 88, 62, 108, 61, 64, 44, 67, 62, 88, 41, 54, 115, 51, 69, 86, 54, 68, 66, 70, 62, 62, 78, 63, 59, 63, 86, 59, 43, 67, 64, 66, 56, 64, 85, 77, 54, 68, 67, 52, 78, 72, 57, 63, 68, 56, 47, 60, 42, 76, 61, 74, 92, 63, 64, 68, 72, 66, 95, 69, 108, 62, 59, 62, 66, 91, 54, 89, 63, 48, 65, 50, 63, 68, 51, 57, 64, 81, 58, 68, 82, 55, 52, 61, 67, 54, 66, 74, 71, 86, 55, 48, 89, 81, 48, 61, 71, 57, 57, 63, 68, 98, 58, 50, 54, 65, 42, 69, 57, 78, 87, 64, 69, 67, 81, 61, 78, 56, 83, 58, 66, 136, 46, 54, 61, 75, 77, 60, 64, 87, 74, 60, 60, 80, 46, 50, 63, 44, 70, 76, 69, 59, 48, 58, 40, 49, 68, 73, 50, 77, 79, 70, 72, 61, 52, 70, 75, 69, 52, 60, 74, 49, 58, 121, 72, 65, 71, 47, 56, 67, 51, 78, 45, 66, 84, 60, 52, 61, 61, 63, 91, 45, 66, 63, 57, 54, 72, 69, 90, 39, 65, 40, 94, 45, 75, 68, 46, 49, 80, 59, 65, 71, 60, 89, 57, 98, 52, 74, 81, 68, 75, 54, 64, 52, 49, 55, 85, 69, 72, 78, 75, 65, 58, 78, 72, 53, 70, 57, 71, 47, 52, 70, 64, 83, 67, 62, 54, 54, 77, 44, 103, 75, 54, 78, 68, 73, 81, 60, 69, 64, 73, 78, 57, 83, 62, 54, 47, 55, 54, 53, 59, 81, 97, 56, 60, 59, 46, 61, 53, 60, 84, 84, 56, 63, 51, 68, 51, 56, 51, 64, 47, 53, 56, 66, 51, 65, 47, 64, 85, 49, 57, 71, 82, 57, 61, 80, 82, 71, 50, 79, 85, 61, 70, 57, 76, 42, 64, 56, 49, 71, 58, 81, 72, 43, 68, 79, 59, 91, 36, 61, 66, 71, 48, 52, 61, 70, 65, 50, 56, 50, 108, 71, 78, 81, 54, 67, 56, 74, 64, 64, 66, 53, 74, 68, 78, 69, 74, 86, 75, 56, 46, 79, 61, 32, 72, 60, 68, 47, 72, 55, 66, 51, 43, 52, 71, 74, 76, 58, 81, 59, 71, 49, 80, 61, 79, 87, 64, 80, 79, 64, 71, 61, 74, 71, 59, 62, 58, 53, 76, 61, 69, 55, 53, 52, 62, 61, 66, 71, 65, 78, 73, 39, 63, 98, 88, 43, 52, 92, 57, 79, 56, 58, 77, 69, 52, 57, 56, 42, 63, 53, 76, 59, 66, 55, 67, 71, 41, 67, 68, 59, 67, 72, 68, 57, 50, 96, 58, 55, 59, 129, 65, 66, 53, 63, 48, 68, 50, 70, 65, 84, 71, 88, 61, 62, 88, 99, 89, 66, 103, 68, 58, 55, 57, 63, 58, 68, 74, 66, 80, 51, 48, 72, 47, 45, 79, 34, 55, 46, 64, 64, 44, 54, 74, 46, 94, 70, 51, 64, 69, 64, 49, 72, 58, 55, 88, 54, 50, 51, 66, 74, 77, 73, 41, 71, 57, 90, 85, 75, 69, 62, 68, 61, 95, 105, 91, 79, 61, 64, 72, 47, 67, 71, 45, 39, 72, 64, 87, 48, 51, 63, 75, 46, 71, 65, 57, 62, 126, 49, 57, 67, 58, 62, 46, 60, 93, 43, 47, 58, 69, 85, 74, 55, 71, 65, 91, 67, 50, 73, 61, 50, 61, 52, 72, 64, 47, 64, 66, 70, 52, 53, 37, 50, 72, 48, 55, 94, 67, 80, 64, 72, 55, 63, 49, 58, 73, 68, 115, 73, 69, 81, 57, 52, 86, 72, 83, 72, 45, 86, 103, 97, 78, 66, 68, 81, 64, 72, 60, 77, 61, 68, 77, 57, 66, 73, 49, 50, 61, 70, 87, 66, 91, 86, 81, 81, 68, 71, 58, 51, 27, 46, 46, 76, 133, 53, 82, 73, 74, 76, 65, 100, 51, 57, 89, 46, 68, 69, 77, 74, 60, 60, 81, 82, 83, 50, 56, 49, 70, 61, 62, 58, 54, 99, 76, 64, 55, 66, 70, 69, 109, 52, 66, 69, 73, 62, 71, 70, 66, 115, 94, 62, 64, 60, 56, 60, 73, 60, 54, 91, 55, 64, 70, 70, 64, 59, 100, 37, 74, 58, 65, 63, 62, 109, 41, 74, 56, 61, 66, 36, 66, 62, 59, 90, 104, 49, 59, 90, 68, 48, 68, 63, 80, 47, 61, 60, 56, 67, 64, 61, 66, 63, 52, 75, 50, 66, 77, 47, 62, 64, 59, 47, 77, 39, 62, 56, 60, 81, 83, 73, 43, 79, 63, 63, 68, 68, 68, 89, 63, 42, 68, 67, 62, 81, 54, 61, 53, 44, 67, 58, 145, 65, 77, 64, 62, 55, 89, 93, 67, 58, 75, 64, 90, 67, 79, 56, 61, 85, 63, 65, 77, 47, 56, 47, 59, 48, 72, 89, 61, 75, 80, 67, 59, 54, 74, 71, 49, 77, 100, 63, 65, 59, 52, 71, 60, 98, 74, 43, 51, 69, 68, 63, 64, 78, 81, 69, 90, 85, 76, 54, 68, 90, 52, 59, 41, 63, 41, 90, 65, 59, 75, 50, 52, 76, 61, 52, 34, 50, 67, 60, 51, 70, 62, 57, 87, 65, 51, 67, 50, 56, 63, 44, 56, 46, 62, 38, 46, 52, 49, 103, 83, 63, 67, 80, 88, 61, 63, 71, 124, 48, 55, 56, 53, 61, 78, 55, 90, 61, 84, 59, 48, 43, 39, 63, 57, 75, 62, 70, 77, 54, 66, 84, 90, 87, 65, 56, 73, 72, 54, 52, 110, 56, 56, 64, 67, 67, 78, 53, 59, 65, 81, 66, 73, 57, 64, 98, 63, 116, 63, 54, 55, 64, 58, 60, 69, 69, 71, 117, 54, 55, 96, 58, 82, 73, 100, 80, 52, 69, 49, 73, 67, 76, 61, 66, 164, 66, 58, 50, 79, 81, 60, 58, 67, 58, 66, 55, 61, 53, 65, 45, 69, 57, 65, 70, 49, 57, 87, 53, 56, 63, 69, 77, 61, 64, 68, 73, 53, 46, 53, 52, 75, 80, 80, 59, 76, 57, 73, 44, 65, 59, 82, 84, 74, 56, 61, 64, 48, 53, 94, 55, 47, 75, 71, 81, 73, 69, 115, 58, 46, 89, 67, 65, 70, 57, 66, 46, 72, 62, 72, 84, 85, 69, 50, 76, 112, 104, 62, 56, 49, 99, 68, 76, 63, 64, 55, 68, 104, 51, 56, 61, 50, 65, 69, 62, 62, 66, 64, 66, 57, 93, 44, 109, 85, 61, 109, 46, 62, 76, 78, 56, 74, 50, 51, 71, 56, 51, 75, 60, 68, 75, 98, 79, 107, 57, 70, 57, 52, 62, 118, 70, 59, 46, 74, 64, 71, 52, 51, 67, 66, 60, 70, 57, 73, 80, 81, 76, 68, 55, 87, 77, 56, 71, 86, 59, 60, 72, 53, 63, 69, 80, 67, 85, 82, 52, 78, 58, 60, 74, 64, 52, 66, 57, 74, 67, 53, 62, 75, 71, 57, 60, 54, 55, 63, 61, 67, 68, 77, 61, 48, 57, 54, 82, 73, 37, 81, 63, 53, 76, 63, 47, 72, 57, 65, 72, 44, 33, 75, 67, 52, 80, 66, 70, 75, 57, 70, 103, 69, 61, 58, 58, 62, 49, 73, 82, 50, 56, 60, 118, 63, 54, 85, 57, 76, 65, 59, 58, 64, 53, 68, 61, 48, 48, 69, 76, 70, 68, 65, 62, 58, 59, 50, 78, 72, 78, 77, 62, 56, 74, 67, 64, 80, 66, 54, 64, 55, 50, 60, 58, 78, 71, 76, 56, 57, 65, 103, 82, 93, 52, 57, 66, 66, 63, 56, 60, 76, 48, 48, 87, 50, 56, 61, 52, 58, 51, 88, 67, 66, 64, 68, 82, 67, 61, 75, 57, 54, 70, 61, 38, 54, 53, 67, 72, 61, 79, 47, 100, 48, 58, 54, 57, 63, 102, 53, 62, 68, 63, 76, 86, 62, 59, 58, 37, 50, 67, 52, 62, 95, 119, 43, 55, 56, 59, 54, 72, 85, 52, 48, 64, 63, 61, 62, 94, 71, 61, 86, 75, 77, 71, 72, 88, 84, 59, 69, 54, 69, 76, 68, 71, 57, 46, 72, 55, 62, 59, 75, 62, 89, 77, 80, 77, 67, 48, 64, 76, 56, 80, 59, 89, 42, 57, 50, 80, 55, 76, 55, 68, 78, 67, 71, 70, 52, 79, 77, 61, 57, 70, 97, 58, 94, 78, 66, 70, 47, 66, 52, 72, 87, 70, 67, 62, 59, 73, 49, 56, 61, 178, 49, 47, 66, 73, 49, 67, 59, 63, 49, 49, 66, 72, 65, 69, 69, 62, 87, 70, 84, 48, 63, 62, 64, 58, 69, 47, 81, 55, 60, 57, 66, 57, 67, 59, 66, 81, 63, 55, 58, 44, 64, 54, 53, 71, 72, 50, 34, 60, 59, 65, 54, 47, 71, 51, 71, 74, 99, 69, 49, 81, 48, 67, 60, 75, 65, 65, 76, 81, 85, 64, 65, 65, 61, 78, 83, 72, 58, 86, 58, 68, 64, 59, 87, 65, 61, 64, 65, 55, 78, 58, 63, 75, 75, 58, 47, 72, 69, 66, 69, 57, 85, 69, 57, 63, 65, 71, 41, 57, 58, 54, 40, 68, 90, 65, 50, 59, 56, 69, 74, 77, 71, 64, 73, 84, 97, 87, 59, 64, 67, 110, 53, 44, 69, 62, 72, 91, 72, 55, 66, 65, 45, 61, 63, 69, 51, 68, 68, 47, 59, 72, 60, 64, 66, 60, 82, 56, 74, 69, 70, 49, 55, 67, 55, 85, 72, 95, 75, 94, 80, 67, 74, 58, 74, 65, 51, 77, 66, 66, 69, 70, 37, 82, 56, 60, 44, 60, 54, 61, 60, 73, 78, 54, 50, 74, 45, 67, 82, 39, 51, 51, 56, 61, 69, 72, 98, 62, 69, 68, 70, 59, 64, 124, 78, 58, 43, 76, 75, 46, 59, 68, 63, 83, 58, 37, 68, 59, 49, 77, 46, 64, 55, 72, 63, 71, 57, 67, 58, 62, 54, 59, 44, 57, 93, 57, 49, 56, 44, 65, 50, 78, 47, 46, 67, 45, 88, 65, 63, 55, 81, 39, 70, 78, 60, 60, 77, 68, 69, 66, 73, 57, 65, 65, 56, 68, 59, 114, 62, 105, 63, 82, 58, 67, 57, 88, 57, 69, 81, 59, 48, 57, 62, 65, 79, 64, 51, 69, 108, 63, 61, 52, 68, 69, 62, 66, 60, 55, 57, 68, 56, 54, 61, 57, 72, 69, 60, 70, 91, 66, 81, 88, 83, 101, 67, 74, 66, 57, 80, 73, 47, 68, 56, 52, 91, 68, 65, 63, 77, 67, 65, 70, 63, 66, 75, 64, 77, 94, 50, 69, 62, 69, 76, 69, 52, 68, 59, 64, 61, 45, 78, 45, 122, 66, 72, 64, 73, 53, 71, 80, 48, 63, 75, 50, 77, 60, 54, 66, 68, 53, 69, 58, 75, 65, 79, 69, 34, 49, 81, 68, 75, 64, 64, 75, 85, 72, 67, 58, 63, 72, 65, 79, 71, 83, 65, 56, 77, 76, 56, 65, 90, 51, 53, 93, 60, 62, 76, 61, 57, 51, 61, 70, 65, 57, 78, 63, 69, 69, 67, 82, 72, 76, 81, 88, 82, 77, 67, 38, 66, 81, 58, 55, 31, 76, 60, 64, 59, 53, 62, 85, 51, 72, 84, 60, 60, 68, 62, 65, 65, 63, 69, 92, 83, 54, 72, 58, 61, 58, 65, 64, 72, 58, 73, 79, 98, 60, 53, 72, 84, 47, 55, 53, 53, 56, 91, 56, 50, 68, 67, 86, 64, 39, 53, 57, 130, 41, 81, 66, 62, 63, 75, 65, 51, 61, 49, 54, 64, 69, 83, 75, 80, 70, 66, 61, 72, 87, 58, 56, 61, 95, 47, 87, 63, 88, 61, 42, 61, 65, 67, 50, 56, 84, 60, 42, 68, 50, 60, 62, 58, 142, 85, 62, 59, 69, 64, 79, 75, 56, 71, 54, 54, 75, 81, 56, 75, 43, 56, 83, 54, 80, 76, 75, 61, 65, 56, 64, 71, 57, 63, 52, 81, 68, 59, 59, 46, 127, 76, 69, 103, 52, 43, 68, 58, 62, 64, 67, 60, 60, 70, 37, 64, 44, 61, 74, 70, 40, 101, 65, 55, 89, 54, 63, 48, 52, 66, 69, 76, 61, 63, 56, 77, 46, 69, 68, 64, 84, 58, 59, 60, 64, 69, 78, 81, 47, 49, 56, 71, 72, 59, 65, 63, 73, 65, 79, 71, 60, 61, 73, 61, 49, 52, 50, 64, 78, 59, 66, 56, 73, 69, 73, 65, 38, 67, 52, 57, 67, 65, 67, 70, 66, 98, 71, 54, 71, 68, 64, 40, 88, 61, 64, 51, 53, 60, 64, 41, 54, 63, 78, 88, 75, 57, 76, 66, 75, 62, 91, 59, 59, 73, 45, 96, 72, 65, 65, 86, 52, 66, 51, 80, 56, 55, 31, 81, 88, 43, 43, 50, 65, 35, 64, 63, 53, 50, 61, 90, 74, 45, 62, 51, 43, 36, 58, 73, 66, 58, 41, 55, 79, 52, 59, 65, 57, 57, 50, 41, 54, 126, 60, 52, 54, 69, 71, 48, 53, 51, 67, 55, 84, 68, 69, 74, 62, 121, 70, 79, 59, 62, 56, 62, 74, 48, 37, 52, 66, 57, 58, 61, 91, 59, 62, 70, 47, 62, 65, 71, 63, 65, 44, 63, 56, 111, 55, 77, 45, 57, 47, 68, 88, 52, 87, 88, 59, 49, 62, 67, 103, 82, 69, 75, 65, 45, 69, 60, 46, 78, 80, 46, 82, 55, 116, 66, 81, 66, 53, 44, 58, 64, 50, 67, 51, 75, 63, 31, 66, 65, 66, 53, 75, 46, 73, 62, 76, 86, 67, 60, 83, 73, 117, 61, 63, 58, 57, 71, 88, 57, 93, 76, 58, 74, 57, 54, 57, 87, 69, 43, 74, 54, 55, 76, 57, 58, 52, 56, 62, 67, 86, 61, 82, 75, 52, 62, 67, 76, 71, 59, 39, 48, 74, 50, 53, 40, 82, 44, 77, 73, 61, 72, 48, 72, 62, 67, 49, 73, 119, 67, 53, 66, 99, 82, 85, 71, 50, 68, 59, 43, 70, 89, 41, 75, 126, 89, 109, 50, 66, 59, 95, 49, 69, 47, 93, 56, 68, 36, 56, 49, 57, 63, 52, 64, 71, 59, 96, 51, 64, 47, 79, 82, 46, 62, 67, 42, 62, 58, 58, 51, 97, 66, 44, 55, 72, 58, 56, 52, 52, 49, 55, 60, 68, 69, 50, 74, 64, 50, 67, 65, 61, 81, 52, 55, 49, 52, 79, 69, 140, 83, 81, 53, 49, 65, 60, 56, 68, 62, 59, 107, 83, 62, 55, 56, 89, 46, 90, 58, 48, 60, 57, 59, 57, 64, 97, 74, 52, 113, 42, 44, 66, 57, 86, 45, 88, 68, 62, 73, 63, 42, 57, 66, 79, 102, 61, 63, 55, 56, 65, 72, 59, 68, 52, 53, 48, 81, 54, 72, 64, 85, 49, 83, 63, 67, 67, 79, 91, 64, 63, 53, 89, 65, 67, 61, 66, 68, 74, 72, 89, 68, 79, 53, 51, 97, 52, 56, 78, 70, 56, 57, 107, 82, 41, 84, 76, 77, 57, 82, 70, 44, 54, 85, 62, 76, 63, 55, 71, 46, 54, 76, 67, 78, 82, 59, 66, 67, 80, 77, 60, 39, 74, 49, 61, 51, 107, 53, 58, 79, 81, 49, 67, 57, 83, 59, 57, 67, 77, 55, 63, 62, 66, 83, 91, 63, 89, 70, 60, 52, 71, 105, 73, 54, 73, 73, 70, 52, 66, 67, 66, 70, 50, 73, 55, 71, 51, 56, 39, 52, 42, 51, 67, 66, 69, 54, 81, 67, 72, 57, 51, 58, 74, 55, 58, 54, 69, 96, 72, 69, 62, 81, 79, 53, 74, 56, 51, 63, 97, 61, 78, 74, 50, 57, 35, 68, 39, 92, 89, 46, 58, 48, 46, 69, 96, 68, 60, 58, 74, 29, 40, 72, 59, 70, 63, 53, 58, 62, 67, 84, 61, 68, 81, 56, 85, 60, 62, 80, 54, 60, 61, 69, 77, 51, 61, 72, 51, 91, 58, 86, 48, 103, 69, 53, 74, 84, 72, 110, 59, 48, 49, 58, 54, 52, 50, 62, 48, 59, 57, 62, 59, 77, 56, 65, 80, 71, 67, 61, 112, 121, 58, 65, 84, 69, 52, 54, 41, 71, 67, 55, 68, 74, 54, 106, 80, 90, 65, 56, 126, 67, 60, 63, 60, 72, 62, 71, 69, 110, 73, 71, 78, 83, 68, 74, 70, 52, 83, 50, 89, 51, 29, 117, 96, 71, 53, 72, 71, 62, 54, 69, 81, 62, 50, 61, 46, 83, 61, 90, 61, 50, 74, 62, 43, 60, 58, 60, 70, 45, 81, 50, 62, 59, 68, 57, 61, 70, 66, 52, 71, 55, 78, 85, 56, 70, 71, 90, 72, 73, 99, 72, 47, 57, 46, 82, 50, 73, 60, 92, 69, 75, 55, 81, 74, 74, 63, 103, 75, 65, 50, 58, 57, 62, 82, 48, 42, 90, 59, 61, 64, 57, 56, 73, 102, 50, 81, 59, 43, 75, 72, 65, 51, 54, 52, 54, 88, 54, 72, 56, 44, 91, 69, 60, 57, 59, 81, 89, 44, 53, 66, 66, 63, 91, 85, 81, 65, 55, 59, 56, 60, 74, 56, 56, 82, 65, 78, 61, 60, 55, 66, 62, 100, 67, 66, 54, 59, 47, 51, 50, 76, 63, 60, 97, 59, 57, 85, 40, 70, 97, 70, 67, 136, 54, 73, 79, 52, 81, 48, 46, 62, 81, 53, 49, 82, 41, 58, 63, 68, 88, 69, 61, 61, 159, 117, 48, 56, 62, 60, 57, 66, 85, 53, 62, 93, 129, 67, 70, 98, 48, 65, 52, 75, 41, 102, 37, 69, 53, 49, 89, 57, 68, 70, 51, 46, 63, 83, 63, 37, 68, 84, 66, 65, 55, 53, 60, 76, 59, 91, 44, 39, 65, 63, 60, 56, 37, 60, 118, 70, 44, 42, 53, 62, 53, 90, 63, 64, 104, 52, 57, 62, 65, 60, 49, 91, 66, 93, 73, 56, 83, 70, 76, 66, 77, 70, 94, 67, 70, 62, 86, 99, 63, 53, 65, 86, 85, 48, 78, 102, 51, 77, 73, 85, 89, 74, 41, 54, 57, 81, 66, 60, 97, 58, 73, 54, 59, 68, 53, 51, 78, 63, 41, 97, 63, 86, 56, 55, 64, 48, 54, 114, 61, 96, 70, 60, 52, 80, 72, 43, 86, 49, 71, 55, 74, 50, 58, 63, 107, 49, 92, 62, 72, 55, 57, 64, 46, 70, 95, 65, 65, 50, 89, 55, 74, 68, 55, 65, 69, 70, 47, 51, 57, 65, 66, 86, 65, 82, 67, 70, 75, 79, 67, 78, 61, 48, 64, 81, 68, 56, 93, 51, 68, 57, 50, 79, 77, 65, 43, 85, 93, 76, 64, 67, 66, 56, 78, 77, 56, 51, 58, 82, 56, 64, 55, 65, 66, 83, 53, 60, 61, 68, 96, 81, 72, 79, 66, 71, 72, 49, 70, 58, 72, 86, 72, 51, 52, 64, 57, 52, 47, 74, 58, 78, 41, 62, 67, 61, 60, 77, 56, 71, 54, 53, 63, 58, 55, 53, 100, 44, 80, 82, 59, 77, 55, 75, 50, 59, 81, 40, 62, 48, 60, 47, 56, 56, 80, 71, 64, 87, 38, 48, 46, 56, 55, 52, 98, 68, 68, 65, 57, 75, 63, 47, 39, 63, 53, 67, 60, 83, 61, 82, 63, 77, 59, 48, 61, 51, 60, 83, 51, 113, 61, 79, 51, 68, 66, 98, 61, 63, 52, 102, 72, 49, 51, 74, 52, 67, 48, 75, 65, 60, 50, 78, 62, 58, 66, 77, 71, 75, 55, 92, 52, 73, 58, 61, 66, 69, 78, 55, 54, 75, 72, 71, 55, 69, 53, 54, 50, 68, 63, 61, 73, 53, 67, 85, 66, 48, 69, 49, 66, 70, 51, 70, 59, 40, 66, 78, 102, 68, 103, 63, 77, 43, 83, 74, 69, 81, 51, 63, 51, 59, 68, 75, 66, 58, 51, 73, 66, 65, 97, 77, 66, 74, 56, 68, 50, 76, 71, 53, 67, 58, 76, 82, 75, 63, 58, 96, 90, 83, 50, 129, 57, 37, 77, 47, 57, 63, 71, 61, 39, 69, 62, 55, 68, 48, 84, 47, 102, 64, 59, 79, 63, 50, 72, 77, 71, 71, 48, 64, 61, 64, 55, 64, 63, 60, 41, 63, 74, 43, 76, 75, 66, 70, 53, 65, 65, 81, 74, 89, 55, 64, 70, 64, 65, 71, 61, 44, 70, 41, 82, 65, 98, 90, 67, 59, 57, 54, 52, 61, 69, 46, 57, 73, 95, 62, 76, 51, 50, 66, 62, 46, 61, 76, 73, 77, 50, 61, 54, 70, 70, 76, 85, 66, 68, 88, 64, 48, 58, 57, 53, 58, 67, 47, 96, 88, 50, 45, 72, 60, 74, 66, 66, 66, 65, 77, 63, 78, 83, 59, 42, 89, 75, 62, 54, 62, 72, 92, 92, 78, 44, 52, 36, 55, 61, 51, 47, 65, 72, 63, 40, 73, 62, 61, 73, 68, 57, 88, 61, 49, 59, 53, 90, 43, 85, 79, 52, 65, 56, 60, 53, 57, 66, 43, 64, 63, 83, 88, 78, 70, 71, 55, 65, 60, 70, 65, 61, 56, 55, 114, 64, 63, 74, 72, 66, 53, 73, 69, 90, 55, 51, 63, 64, 83, 68, 40, 76, 75, 74, 52, 70, 56, 74, 72, 53, 60, 68, 68, 60, 102, 47, 64, 68, 80, 57, 63, 67, 72, 57, 85, 50, 57, 60, 75, 49, 67, 86, 56, 66, 53, 46, 88, 70, 62, 70, 58, 80, 50, 85, 66, 61, 51, 88, 62, 51, 64, 50, 60, 45, 53, 61, 93, 67, 60, 77, 59, 69, 60, 64, 49, 60, 55, 55, 67, 60, 62, 58, 74, 73, 63, 78, 61, 64, 94, 73, 102, 58, 55, 64, 77, 53, 52, 53, 91, 64, 65, 67, 54, 61, 52, 52, 73, 65, 58, 68, 86, 68, 60, 57, 52, 62, 60, 44, 86, 66, 73, 54, 69, 54, 77, 86, 64, 93, 60, 64, 62, 49, 72, 66, 59, 55, 94, 60, 68, 65, 78, 65, 65, 94, 59, 59, 67, 68, 63, 44, 58, 45, 70, 78, 58, 67, 47, 85, 96, 62, 67, 71, 69, 41, 70, 63, 96, 51, 70, 91, 80, 38, 58, 64, 88, 66, 56, 78, 78, 72, 46, 81, 53, 70, 61, 67, 65, 74, 67, 57, 65, 57, 55, 88, 42, 59, 102, 53, 96, 67, 77, 129, 82, 78, 56, 48, 65, 72, 76, 65, 71, 58, 64, 63, 55, 64, 76, 60, 56, 73, 65, 69, 55, 61, 58, 59, 67, 85, 67, 62, 62, 51, 71, 75, 73, 61, 72, 72, 81, 57, 81, 83, 60, 59, 70, 73, 70, 73, 62, 52, 55, 56, 64, 71, 59, 88, 119, 60, 63, 68, 65, 59, 54, 79, 79, 56, 55, 76, 99, 78, 60, 59, 108, 59, 77, 74, 75, 62, 59, 71, 54, 62, 73, 52, 60, 62, 68, 53, 55, 53, 74, 78, 74, 67, 63, 80, 62, 67, 80, 65, 48, 65, 66, 87, 65, 66, 63, 44, 76, 77, 90, 60, 66, 66, 60, 48, 64, 61, 54, 54, 60, 53, 215, 73, 74, 66, 85, 66, 60, 71, 76, 57, 69, 61, 63, 83, 54, 81, 90, 66, 64, 69, 79, 67, 81, 51, 139, 70, 69, 74, 55, 59, 64, 59, 61, 59, 54, 69, 53, 68, 65, 54, 73, 80, 65, 56, 73, 72, 66, 67, 46, 62, 75, 84, 62, 62, 63, 49, 57, 63, 58, 53, 87, 77, 64, 61, 89, 56, 43, 62, 55, 43, 79, 57, 54, 65, 44, 50, 63, 45, 77, 51, 55, 90, 74, 62, 76, 57, 64, 67, 44, 60, 49, 62, 82, 63, 69, 80, 76, 63, 63, 55, 58, 60, 51, 53, 89, 46, 52, 73, 47, 66, 65, 59, 69, 77, 69, 61, 60, 71, 57, 65, 51, 96, 59, 67, 51, 66, 58, 57, 71, 57, 58, 55, 95, 52, 72, 83, 58, 36, 56, 46, 65, 60, 81, 79, 69, 59, 68, 67, 61, 57, 64, 81, 69, 60, 66, 51, 56, 63, 65, 64, 103, 67, 56, 70, 62, 51, 76, 69, 72, 50, 60, 65, 73, 103, 73, 70, 46, 71, 49, 56, 62, 68, 53, 54, 55, 66, 68, 72, 49, 79, 71, 106, 46, 73, 91, 64, 68, 60, 46, 61, 65, 79, 68, 63, 74, 70, 57, 80, 71, 76, 62, 67, 66, 66, 136, 50, 70, 77, 67, 78, 82, 55, 114, 95, 71, 62, 61, 85, 73, 87, 61, 96, 60, 73, 68, 58, 72, 62, 68, 53, 68, 69, 106, 97, 66, 65, 71, 57, 65, 73, 48, 81, 60, 67, 65, 73, 56, 87, 57, 50, 61, 80, 88, 59, 81, 57, 64, 73, 59, 83, 79, 57, 73, 44, 64, 68, 50, 81, 47, 96, 80, 62, 47, 72, 59, 83, 83, 57, 55, 68, 61, 51, 70, 46, 62, 77, 65, 87, 57, 46, 69, 53, 71, 85, 54, 61, 65, 63, 67, 61, 78, 68, 68, 72, 57, 60, 100, 73, 58, 74, 49, 55, 55, 80, 65, 59, 58, 62, 61, 70, 55, 66, 52, 69, 65, 56, 77, 77, 85, 51, 80, 61, 60, 81, 55, 77, 56, 71, 87, 63, 56, 67, 56, 64, 63, 45, 69, 81, 72, 53, 55, 77, 60, 88, 82, 55, 63, 63, 76, 60, 72, 58, 88, 62, 68, 63, 62, 64, 80, 84, 64, 61, 55, 72, 75, 64, 87, 56, 69, 77, 60, 59, 67, 49, 68, 62, 53, 71, 77, 63, 51, 101, 63, 79, 60, 54, 79, 70, 50, 74, 80, 69, 46, 63, 65, 55, 63, 72, 54, 84, 75, 53, 52, 63, 69, 62, 116, 53, 69, 60, 84, 49, 73, 62, 99, 68, 82, 53, 64, 77, 73, 63, 86, 57, 74, 58, 84, 70, 55, 71, 72, 65, 105, 65, 63, 51, 45, 69, 59, 53, 62, 84, 73, 51, 51, 66, 84, 65, 66, 53, 65, 72, 61, 66, 59, 59, 59, 75, 72, 59, 53, 63, 65, 54, 66, 67, 64, 49, 80, 66, 75, 71, 70, 81, 80, 78, 90, 56, 67, 73, 62, 77, 64, 67, 78, 48, 56, 69, 63, 76, 74, 43, 57, 49, 52, 43, 62, 89, 71, 60, 94, 58, 59, 88, 113, 58, 62, 59, 59, 75, 64, 44, 68, 53, 72, 52, 74, 82, 70, 73, 89, 75, 87, 57, 61, 48, 64, 69, 67, 49, 71, 69, 105, 61, 73, 65, 63, 60, 64, 68, 70, 56, 64, 76, 98, 35, 73, 64, 55, 52, 71, 74, 56, 74, 58, 75, 47, 57, 53, 80, 55, 54, 60, 53, 78, 64, 65, 51, 51, 62, 65, 59, 59, 49, 73, 62, 71, 61, 91, 68, 77, 59, 60, 65, 142, 56, 84, 58, 59, 49, 67, 77, 67, 64, 85, 58, 67, 64, 68, 58, 71, 68, 66, 80, 73, 67, 75, 61, 63, 69, 65, 54, 98, 67, 66, 62, 71, 50, 62, 62, 85, 58, 73, 83, 63, 89, 65, 81, 48, 115, 70, 51, 59, 64, 62, 78, 98, 51, 80, 64, 50, 55, 57, 64, 84, 56, 68, 60, 76, 67, 64, 68, 66, 66, 61, 56, 60, 61, 59, 57, 102, 53, 65, 53, 67, 76, 82, 64, 113, 63, 67, 65, 74, 52, 79, 65, 68, 62, 75, 59, 67, 81, 76, 61, 70, 66, 52, 80, 62, 63, 73, 54, 66, 59, 49, 74, 65, 80, 63, 61, 77, 58, 56, 62, 76, 63, 65, 60, 64, 61, 82, 88, 59, 55, 57, 61, 91, 51, 52, 55, 74, 58, 57, 106, 64, 97, 54, 65, 68, 95, 58, 72, 56, 47, 85, 78, 58, 62, 65, 73, 70, 55, 59, 51, 59, 53, 49, 69, 92, 56, 83, 60, 78, 53, 63, 63, 55, 81, 71, 58, 71, 57, 63, 53, 57, 80, 54, 90, 57, 64, 53, 52, 48, 69, 56, 92, 65, 73, 63, 64, 67, 70, 69, 57, 51, 66, 42, 54, 66, 70, 66, 61, 72, 78, 72, 132, 86, 61, 76, 54, 63, 63, 76, 64, 56, 51, 54, 62, 57, 68, 71, 72, 61, 71, 65, 57, 67, 60, 62, 66, 63, 63, 55, 52, 72, 55, 71, 62, 71, 91, 61, 51, 71, 60, 64, 69, 53, 68, 59, 60, 81, 57, 79, 71, 68, 60, 54, 72, 64, 62, 74, 85, 70, 66, 51, 71, 48, 63, 66, 105, 71, 86, 61, 100, 62, 70, 76, 66, 69, 66, 52, 61, 57, 60, 51, 84, 67, 52, 66, 63, 95, 58, 51, 72, 64, 48, 75, 53, 53, 111, 50, 63, 65, 75, 60, 66, 65, 84, 58, 66, 66, 70, 99, 47, 55, 51, 57, 55, 71, 70, 61, 64, 54, 81, 57, 80, 57, 76, 71, 58, 92, 71, 64, 50, 59, 63, 76, 54, 72, 49, 58, 55, 64, 75, 70, 72, 82, 82, 56, 73, 85, 59, 65, 45, 64, 62, 70, 55, 65, 56, 66, 69, 70, 114, 70, 53, 70, 84, 82, 49, 71, 93, 74, 55, 65, 81, 48, 55, 55, 70, 71, 62, 58, 115, 59, 61, 64, 59, 55, 75, 67, 64, 56, 60, 58, 57, 59, 67, 56, 78, 44, 66, 76, 47, 40, 69, 89, 75, 67, 85, 88, 57, 99, 72, 63, 57, 56, 49, 62, 53, 68, 73, 77, 57, 78, 90, 78, 97, 80, 43, 53, 63, 70, 53, 57, 71, 91, 65, 66, 44, 63, 66, 55, 90, 67, 58, 70, 57, 55, 54, 72, 61, 53, 69, 72, 59, 59, 84, 60, 82, 76, 78, 76, 57, 68, 60, 63, 52, 72, 65, 60, 65, 68, 69, 75, 55, 60, 66, 71, 70, 60, 82, 69, 71, 53, 49, 79, 56, 56, 57, 66, 72, 60, 73, 77, 60, 70, 102, 66, 62, 72, 63, 45, 84, 83, 62, 72, 75, 65, 63, 64, 70, 57, 54, 57, 70, 54, 67, 50, 63, 55, 75, 59, 66, 58, 60, 58, 55, 68, 64, 63, 56, 55, 62, 55, 58, 99, 70, 47, 60, 58, 56, 57, 36, 65, 50, 71, 64, 65, 62, 70, 64, 56, 60, 68, 91, 59, 81, 67, 62, 61, 64, 69, 65, 88, 73, 75, 68, 55, 82, 57, 60, 62, 70, 63, 50, 100, 73, 90, 63, 56, 52, 70, 76, 58, 54, 58, 69, 61, 69, 50, 105, 74, 71, 80, 54, 80, 72, 69, 71, 75, 60, 71, 61, 55, 73, 68, 56, 63, 72, 61, 62, 56, 58, 76, 66, 63, 64, 60, 66, 52, 69, 52, 74, 53, 57, 65, 62, 57, 68, 42, 69, 77, 59, 68, 57, 62, 61, 51, 59, 70, 67, 69, 50, 94, 118, 59, 68, 56, 79, 53, 52, 71, 80, 72, 58, 73, 57, 59, 61, 60, 66, 80, 55, 79, 76, 76, 82, 64, 56, 66, 75, 64, 52, 72, 58, 61, 61, 55, 62, 58, 54, 53, 56, 54, 57, 64, 66, 56, 66, 125, 57, 71, 54, 47, 73, 73, 73, 75, 69, 58, 48, 59, 73, 47, 69, 70, 79, 64, 59, 73, 74, 54, 66, 61, 43, 74, 36, 57, 71, 55, 83, 99, 77, 68, 64, 76, 73, 57, 76, 54, 79, 61, 94, 87, 79, 79, 75, 59, 55, 79, 94, 59, 63, 57, 77, 75, 123, 70, 93, 90, 42, 49, 104, 59, 53, 89, 50, 82, 59, 78, 40, 51, 84, 43, 47, 63, 63, 55, 70, 65, 59, 70, 68, 75, 37, 64, 80, 55, 80, 48, 73, 84, 69, 64, 65, 97, 50, 48, 72, 75, 77, 62, 44, 49, 55, 100, 41, 65, 58, 62, 41, 63, 66, 63, 58, 59, 65, 65, 69, 52, 63, 70, 45, 69, 51, 65, 54, 45, 50, 59, 71, 56, 42, 42, 76, 62, 45, 58, 63, 47, 56, 76, 84, 55, 60, 75, 73, 43, 67, 101, 59, 51, 88, 56, 61, 68, 76, 54, 70, 67, 46, 40, 66, 55, 81, 95, 84, 70, 50, 65, 77, 71, 52, 73, 82, 68, 66, 96, 89, 62, 51, 80, 44, 66, 56, 47, 67, 46, 68, 72, 91, 51, 43, 68, 43, 71, 70, 53, 60, 81, 62, 73, 50, 66, 51, 65, 49, 83, 70, 69, 85, 69, 54, 67, 65, 63, 69, 51, 77, 76, 56, 54, 59, 56, 78, 90, 72, 59, 63, 51, 81, 67, 80, 79, 43, 94, 65, 76, 61, 64, 54, 80, 52, 58, 68, 83, 75, 59, 72, 95, 62, 78, 78, 70, 56, 79, 72, 62, 77, 74, 71, 66, 71, 70, 58, 50, 57, 67, 79, 75, 82, 64, 67, 63, 108, 86, 79, 67, 62, 76, 69, 51, 55, 66, 76, 71, 71, 59, 65, 72, 61, 59, 60, 75, 74, 62, 69, 64, 33, 109, 63, 61, 63, 62, 40, 69, 65, 55, 65, 47, 78, 71, 113, 40, 78, 96, 63, 53, 69, 93, 74, 90, 64, 66, 58, 68, 68, 54, 68, 65, 71, 61, 49, 44, 57, 65, 54, 69, 66, 57, 69, 76, 55, 78, 74, 64, 54, 56, 53, 65, 82, 63, 70, 72, 53, 151, 75, 57, 59, 59, 64, 48, 58, 79, 50, 62, 49, 69, 69, 60, 47, 76, 62, 38, 71, 72, 57, 51, 81, 49, 105, 56, 62, 56, 76, 63, 74, 65, 72, 53, 63, 77, 76, 65, 90, 88, 67, 70, 60, 44, 93, 66, 85, 80, 82, 59, 65, 57, 63, 53, 34, 50, 80, 74, 50, 83, 70, 63, 78, 41, 63, 75, 48, 67, 55, 64, 70, 66, 81, 62, 63, 46, 50, 82, 72, 64, 64, 66, 77, 81, 64, 81, 81, 60, 66, 78, 67, 72, 60, 48, 86, 79, 67, 56, 65, 58, 43, 74, 98, 42, 94, 63, 84, 60, 49, 122, 68, 72, 70, 79, 60, 77, 70, 48, 85, 65, 59, 61, 61, 57, 80, 90, 48, 87, 44, 47, 78, 52, 60, 77, 50, 72, 68, 87, 49, 74, 37, 69, 53, 53, 45, 58, 74, 50, 81, 73, 64, 63, 74, 59, 84, 62, 95, 67, 93, 73, 93, 69, 74, 48, 56, 50, 58, 77, 70, 60, 61, 62, 56, 59, 56, 53, 65, 41, 81, 108, 77, 58, 78, 59, 65, 62, 54, 53, 70, 76, 53, 91, 59, 55, 66, 61, 54, 60, 60, 69, 54, 72, 78, 67, 87, 53, 59, 58, 67, 67, 59, 70, 53, 53, 53, 62, 67, 63, 76, 56, 67, 92, 71, 63, 68, 77, 86, 55, 55, 71, 83, 85, 56, 55, 53, 84, 60, 60, 53, 65, 62, 53, 60, 94, 70, 74, 60, 46, 71, 67, 57, 72, 64, 69, 56, 58, 61, 44, 105, 64, 63, 65, 65, 66, 61, 38, 70, 67, 48, 55, 45, 70, 77, 46, 68, 58, 61, 60, 92, 46, 68, 79, 62, 41, 76, 69, 49, 59, 68, 56, 56, 53, 72, 64, 64, 78, 88, 69, 50, 63, 57, 65, 87, 63, 37, 83, 47, 82, 59, 75, 76, 76, 55, 78, 84, 84, 81, 56, 80, 71, 57, 69, 80, 75, 74, 107, 72, 58, 110, 57, 92, 70, 60, 64, 67, 64, 62, 68, 79, 58, 61, 69, 49, 84, 64, 55, 52, 51, 46, 60, 55, 71, 99, 48, 50, 63, 58, 117, 50, 49, 54, 73, 65, 62, 57, 76, 69, 54, 79, 77, 84, 46, 74, 74, 57, 57, 68, 55, 50, 68, 52, 42, 58, 75, 66, 51, 71, 75, 63, 71, 55, 50, 61, 80, 72, 72, 76, 52, 76, 79, 60, 53, 57, 50, 84, 66, 66, 81, 60, 48, 74, 79, 99, 53, 108, 66, 79, 54, 55, 74, 61, 68, 67, 70, 46, 65, 65, 68, 80, 62, 56, 61, 67, 87, 83, 67, 69, 55, 51, 49, 74, 56, 61, 83, 63, 71, 47, 64, 52, 50, 67, 71, 54, 68, 61, 57, 63, 66, 54, 72, 69, 66, 71, 62, 68, 76, 54, 54, 73, 60, 72, 46, 100, 63, 67, 50, 77, 83, 69, 70, 62, 52, 59, 62, 54, 74, 56, 53, 75, 64, 54, 60, 93, 58, 77, 75, 62, 36, 77, 47, 44, 61, 59, 62, 58, 58, 67, 69, 64, 80, 80, 69, 81, 78, 76, 67, 81, 62, 43, 77, 66, 74, 57, 67, 77, 70, 65, 68, 70, 87, 71, 57, 78, 67, 66, 70, 80, 68, 76, 63, 61, 66, 45, 54, 64, 80, 66, 86, 74, 62, 84, 51, 61, 57, 51, 66, 72, 70, 60, 49, 58, 50, 70, 58, 61, 57, 104, 31, 79, 48, 84, 95, 63, 44, 67, 78, 58, 74, 71, 65, 70, 67, 84, 64, 67, 46, 51, 63, 49, 63, 62, 74, 59, 75, 64, 83, 59, 66, 65, 74, 76, 57, 52, 84, 74, 65, 71, 75, 55, 103, 63, 57, 86, 79, 78, 45, 74, 64, 45, 88, 50, 57, 72, 58, 72, 53, 60, 105, 52, 54, 73, 62, 74, 58, 81, 44, 55, 65, 74, 76, 66, 54, 61, 93, 77, 78, 82, 42, 60, 59, 58, 70, 87, 43, 68, 65, 39, 69, 90, 76, 57, 81, 71, 56, 69, 74, 116, 76, 83, 75, 70, 71, 56, 51, 72, 90, 69, 75, 77, 81, 53, 70, 106, 73, 75, 71, 61, 78, 53, 49, 53, 56, 78, 60, 72, 50, 64, 57, 65, 74, 56, 60, 68, 56, 51, 61, 54, 60, 65, 111, 62, 61, 77, 102, 81, 51, 65, 67, 78, 74, 88, 73, 61, 74, 96, 69, 38, 52, 68, 93, 53, 59, 56, 86, 66, 68, 47, 58, 59, 57, 94, 76, 58, 103, 76, 72, 54, 85, 54, 94, 55, 40, 79, 48, 40, 45, 65, 85, 44, 75, 50, 46, 70, 65, 51, 52, 64, 63, 63, 47, 90, 59, 53, 73, 74, 85, 51, 52, 90, 100, 70, 54, 59, 49, 64, 65, 51, 95, 70, 52, 49, 51, 43, 58, 63, 77, 56, 47, 47, 72, 58, 48, 57, 65, 60, 55, 45, 65, 45, 59, 91, 54, 62, 85, 94, 87, 67, 55, 61, 92, 56, 51, 76, 55, 73, 47, 69, 64, 70, 56, 61, 54, 80, 65, 76, 58, 73, 54, 58, 65, 43, 62, 68, 71, 80, 74, 58, 69, 63, 78, 101, 48, 46, 57, 75, 53, 56, 62, 66, 81, 67, 65, 98, 77, 61, 60, 47, 74, 71, 118, 64, 63, 55, 68, 54, 80, 62, 48, 76, 67, 56, 73, 76, 66, 66, 52, 65, 68, 64, 85, 46, 48, 66, 85, 77, 39, 94, 67, 48, 72, 60, 40, 65, 60, 85, 66, 48, 52, 57, 71, 38, 87, 50, 42, 84, 60, 62, 66, 55, 48, 46, 52, 97, 68, 48, 74, 68, 65, 48, 42, 84, 69, 41, 77, 58, 54, 49, 58, 63, 59, 52, 60, 66, 64, 81, 71, 74, 101, 66, 75, 79, 54, 111, 52, 71, 53, 65, 77, 57, 67, 70, 42, 46, 47, 79, 67, 58, 68, 80, 52, 61, 82, 68, 58, 61, 59, 85, 51, 71, 79, 66, 66, 85, 78, 82, 53, 57, 80, 86, 64, 78, 41, 52, 54, 63, 57, 60, 77, 57, 86, 58, 55, 67, 57, 48, 73, 78, 78, 73, 94, 52, 59, 61, 75, 57, 48, 59, 61, 62, 100, 69, 61, 60, 72, 80, 50, 67, 52, 95, 66, 57, 69, 116, 74, 81, 84, 56, 61, 61, 55, 50, 129, 60, 58, 68, 97, 62, 80, 86, 63, 60, 51, 47, 101, 58, 81, 42, 59, 72, 59, 49, 65, 48, 58, 114, 59, 75, 73, 72, 59, 52, 108, 59, 64, 60, 61, 79, 41, 103, 58, 55, 70, 55, 60, 53, 56, 60, 63, 61, 65, 45, 60, 65, 61, 52, 63, 56, 47, 66, 55, 73, 70, 80, 55, 72, 84, 56, 74, 84, 65, 71, 52, 52, 53, 58, 70, 62, 50, 76, 61, 82, 74, 81, 89, 85, 50, 84, 45, 41, 67, 70, 48, 57, 56, 43, 65, 54, 42, 67, 56, 52, 79, 64, 51, 70, 70, 83, 52, 67, 50, 73, 79, 66, 68, 82, 46, 61, 58, 70, 58, 52, 72, 60, 62, 102, 54, 58, 82, 49, 58, 54, 68, 57, 64, 58, 73, 53, 49, 72, 66, 95, 51, 85, 75, 112, 63, 78, 71, 81, 72, 98, 67, 59, 53, 47, 60, 55, 60, 43, 40, 59, 50, 51, 76, 61, 86, 62, 74, 119, 50, 64, 47, 50, 55, 81, 70, 93, 124, 77, 83, 51, 77, 63, 63, 100, 53, 54, 70, 54, 95, 61, 64, 76, 63, 71, 88, 78, 54, 75, 65, 76, 71, 69, 64, 74, 95, 70, 64, 78, 72, 74, 81, 62, 74, 56, 64, 85, 54, 78, 53, 84, 53, 64, 65, 47, 67, 59, 41, 52, 61, 94, 47, 87, 55, 68, 60, 73, 47, 73, 63, 52, 80, 89, 50, 48, 75, 52, 87, 55, 61, 73, 70, 64, 74, 70, 65, 57, 75, 73, 57, 67, 77, 114, 49, 41, 60, 63, 86, 81, 39, 120, 77, 37, 45, 86, 57, 55, 66, 64, 60, 65, 81, 77, 83, 92, 70, 64, 56, 65, 66, 49, 102, 66, 100, 61, 75, 49, 71, 92, 82, 65, 68, 87, 47, 64, 61, 51, 63, 51, 84, 55, 50, 42, 55, 54, 62, 48, 62, 54, 58, 66, 64, 65, 53, 57, 71, 104, 70, 49, 55, 39, 63, 51, 57, 69, 63, 53, 49, 65, 62, 69, 74, 64, 51, 65, 81, 74, 68, 50, 56, 67, 78, 111, 74, 66, 70, 64, 73, 75, 54, 89, 56, 68, 65, 71, 53, 65, 43, 75, 66, 56, 124, 71, 56, 78, 60, 59, 72, 72, 107, 64, 63, 46, 67, 57, 91, 53, 54, 66, 78, 52, 51, 54, 71, 79, 58, 58, 69, 58, 69, 66, 70, 55, 52, 52, 55, 67, 58, 60, 61, 48, 69, 45, 120, 59, 73, 72, 52, 48, 60, 56, 52, 58, 62, 60, 62, 96, 58, 95, 52, 55, 101, 51, 80, 40, 59, 81, 61, 74, 73, 56, 89, 65, 84, 55, 72, 59, 92, 82, 62, 50, 56, 86, 44, 54, 66, 46, 55, 61, 49, 70, 63, 59, 34, 61, 46, 55, 59, 73, 69, 62, 89, 76, 63, 112, 66, 105, 44, 60, 67, 79, 85, 71, 79, 63, 64, 61, 53, 60, 49, 78, 108, 60, 66, 85, 66, 39, 75, 45, 71, 57, 53, 78, 60, 73, 55, 58, 63, 63, 67, 68, 70, 75, 55, 63, 83, 69, 79, 48, 67, 59, 49, 79, 71, 65, 60, 49, 59, 59, 43, 51, 77, 73, 52, 74, 66, 57, 67, 83, 80, 51, 66, 54, 64, 65, 65, 57, 43, 53, 72, 66, 87, 42, 87, 66, 81, 67, 36, 41, 48, 128, 102, 50, 64, 122, 78, 67, 56, 59, 60, 66, 96, 56, 111, 99, 63, 62, 65, 73, 63, 96, 45, 47, 54, 79, 61, 90, 88, 72, 64, 73, 66, 75, 50, 70, 64, 65, 56, 64, 64, 43, 78, 62, 64, 70, 61, 53, 68, 54, 59, 84, 67, 79, 113, 58, 83, 66, 52, 64, 58, 76, 64, 53, 53, 86, 73, 58, 79, 68, 53, 77, 54, 62, 62, 73, 81, 68, 63, 61, 74, 80, 60, 50, 64, 101, 68, 100, 53, 41, 70, 76, 77, 77, 92, 63, 90, 55, 51, 59, 80, 69, 83, 66, 71, 57, 54, 60, 93, 50, 56, 56, 48, 65, 70, 74, 70, 63, 43, 76, 57, 82, 63, 65, 68, 79, 66, 50, 60, 67, 68, 59, 65, 60, 76, 61, 59, 68, 65, 52, 113, 82, 64, 71, 55, 88, 75, 77, 63, 35, 73, 59, 47, 51, 78, 73, 59, 63, 63, 102, 74, 67, 53, 51, 70, 67, 53, 63, 71, 71, 81, 62, 59, 71, 124, 72, 87, 80, 62, 62, 61, 55, 85, 72, 73, 81, 50, 55, 66, 65, 65, 59, 96, 41, 52, 111, 60, 70, 63, 67, 65, 59, 86, 51, 65, 53, 58, 63, 55, 67, 66, 56, 39, 54, 53, 52, 76, 73, 58, 46, 96, 80, 66, 49, 59, 68, 82, 45, 57, 58, 72, 72, 74, 86, 58, 81, 64, 57, 75, 59, 67, 61, 68, 68, 63, 61, 93, 48, 56, 67, 80, 70, 81, 55, 53, 50, 86, 74, 118, 48, 72, 45, 59, 64, 69, 45, 79, 54, 74, 46, 58, 58, 80, 64, 82, 54, 93, 72, 63, 85, 64, 59, 51, 51, 54, 62, 75, 52, 60, 52, 61, 48, 59, 53, 71, 64, 65, 87, 52, 65, 58, 64, 63, 73, 53, 54, 64, 96, 56, 74, 57, 55, 55, 69, 68, 79, 64, 50, 67, 61, 75, 61, 66, 70, 61, 67, 57, 71, 65, 95, 51, 136, 63, 58, 79, 66, 60, 64, 58, 60, 102, 61, 55, 53, 62, 62, 44, 65, 53, 68, 73, 67, 74, 56, 95, 53, 89, 65, 44, 66, 61, 46, 60, 58, 75, 60, 58, 54, 47, 53, 67, 56, 70, 63, 76, 54, 78, 59, 61, 76, 43, 74, 73, 65, 78, 53, 66, 63, 44, 87, 62, 68, 79, 69, 59, 86, 68, 71, 64, 75, 65, 79, 49, 71, 92, 65, 44, 62, 50, 67, 61, 53, 56, 87, 69, 53, 53, 77, 66, 51, 75, 57, 86, 82, 76, 95, 92, 55, 66, 55, 65, 85, 69, 82, 90, 81, 60, 83, 58, 73, 102, 66, 48, 98, 63, 33, 66, 60, 71, 61, 69, 71, 43, 63, 82, 61, 56, 67, 55, 60, 57, 66, 68, 63, 53, 58, 64, 65, 70, 81, 47, 83, 75, 64, 58, 50, 52, 60, 72, 97, 56, 65, 108, 88, 67, 64, 54, 57, 71, 37, 56, 55, 65, 67, 72, 66, 141, 64, 94, 60, 70, 58, 65, 103, 61, 54, 80, 71, 63, 59, 42, 75, 79, 79, 69, 68, 35, 63, 58, 48, 56, 81, 61, 54, 58, 60, 71, 86, 78, 62, 56, 70, 54, 71, 49, 55, 50, 68, 57, 72, 48, 64, 45, 53, 71, 49, 67, 69, 60, 66, 72, 67, 75, 68, 51, 66, 54, 69, 82, 51, 74, 56, 63, 65, 47, 107, 62, 72, 57, 67, 86, 68, 77, 65, 46, 71, 94, 54, 58, 65, 64, 61, 64, 72, 51, 78, 45, 58, 100, 68, 64, 84, 53, 54, 53, 53, 65, 83, 71, 46, 65, 67, 63, 88, 69, 48, 67, 89, 62, 48, 72, 54, 60, 70, 59, 66, 65, 56, 83, 67, 68, 56, 73, 55, 63, 66, 70, 66, 60, 62, 70, 56, 68, 59, 74, 48, 44, 59, 69, 77, 69, 77, 68, 52, 95, 53, 65, 80, 54, 80, 64, 64, 46, 47, 74, 66, 58, 68, 60, 71, 41, 57, 62, 50, 103, 83, 74, 78, 44, 59, 95, 66, 61, 65, 53, 94, 81, 58, 38, 60, 101, 87, 74, 70, 70, 56, 58, 92, 58, 55, 76, 95, 66, 51, 66, 91, 75, 74, 81, 104, 54, 56, 50, 61, 43, 77, 97, 51, 58, 62, 58, 52, 55, 40, 63, 64, 79, 76, 54, 56, 60, 56, 77, 59, 54, 78, 58, 56, 45, 85, 81, 49, 79, 54, 69, 60, 53, 63, 70, 58, 58, 63, 65, 72, 82, 76, 63, 73, 71, 93, 75, 61, 63, 57, 72, 63, 73, 54, 63, 54, 88, 54, 46, 55, 76, 53, 65, 108, 61, 71, 71, 84, 101, 73, 56, 58, 110, 77, 67, 84, 71, 73, 56, 112, 54, 61, 56, 70, 61, 107, 69, 57, 57, 70, 37, 63, 65, 90, 57, 62, 51, 72, 76, 84, 85, 78, 72, 71, 54, 62, 64, 74, 95, 80, 84, 66, 77, 54, 51, 84, 50, 76, 65, 131, 61, 64, 61, 75, 67, 56, 78, 37, 64, 96, 72, 78, 68, 63, 70, 67, 66, 81, 56, 47, 69, 71, 87, 78, 76, 64, 64, 66, 93, 63, 56, 67, 59, 80, 64, 80, 60, 86, 74, 56, 76, 62, 61, 61, 71, 81, 35, 57, 61, 69, 59, 45, 66, 60, 50, 60, 80, 60, 51, 47, 56, 73, 64, 66, 49, 56, 65, 48, 49, 60, 51, 55, 63, 74, 60, 63, 65, 67, 57, 58, 57, 72, 92, 92, 64, 65, 37, 73, 55, 67, 59, 61, 46, 69, 65, 70, 55, 59, 82, 95, 44, 42, 58, 74, 46, 79, 72, 58, 65, 75, 84, 59, 66, 61, 84, 56, 78, 54, 61, 86, 97, 53, 55, 88, 59, 63, 77, 58, 80, 58, 80, 91, 68, 82, 56, 67, 69, 67, 60, 57, 68, 77, 60, 65, 61, 54, 66, 57, 73, 76, 71, 55, 70, 61, 74, 65, 75, 72, 48, 57, 62, 54, 37, 60, 54, 56, 91, 52, 65, 66, 65, 56, 62, 69, 84, 48, 63, 85, 76, 62, 57, 52, 50, 88, 63, 55, 84, 69, 35, 101, 57, 71, 58, 83, 71, 49, 63, 57, 48, 74, 66, 62, 70, 78, 48, 59, 83, 40, 64, 59, 76, 70, 54, 49, 59, 53, 66, 89, 63, 58, 56, 112, 65, 93, 54, 67, 102, 76, 78, 80, 81, 69, 104, 63, 40, 69, 53, 49, 58, 55, 66, 49, 60, 89, 66, 62, 104, 54, 53, 92, 66, 70, 85, 50, 55, 77, 82, 83, 62, 70, 69, 63, 54, 73, 73, 79, 45, 58, 69, 59, 66, 66, 62, 63, 67, 66, 71, 58, 52, 70, 87, 83, 90, 69, 60, 51, 72, 60, 79, 57, 68, 62, 72, 60, 67, 63, 64, 42, 69, 63, 70, 46, 50, 70, 81, 94, 61, 49, 58, 60, 56, 79, 74, 79, 65, 61, 103, 91, 61, 105, 75, 78, 64, 86, 76, 97, 71, 80, 97, 69, 65, 55, 68, 62, 46, 57, 70, 53, 57, 65, 68, 50, 79, 65, 78, 66, 49, 42, 67, 67, 49, 52, 77, 77, 55, 65, 66, 72, 60, 108, 52, 66, 56, 114, 73, 72, 47, 40, 59, 61, 55, 81, 66, 46, 64, 27, 65, 64, 83, 41, 80, 48, 76, 39, 79, 61, 152, 65, 59, 63, 73, 78, 71, 55, 79, 54, 66, 68, 56, 65, 68, 100, 60, 52, 80, 65, 59, 78, 43, 79, 49, 49, 61, 70, 52, 72, 80, 57, 84, 52, 63, 64, 48, 75, 47, 65, 45, 63, 82, 78, 87, 54, 69, 71, 70, 54, 68, 60, 75, 48, 55, 92, 93, 65, 71, 77, 48, 99, 52, 56, 61, 78, 66, 86, 71, 47, 46, 81, 79, 76, 58, 80, 49, 82, 56, 64, 57, 64, 50, 75, 68, 78, 55, 81, 67, 82, 63, 74, 71, 76, 72, 54, 94, 74, 85, 54, 70, 79, 55, 51, 68, 64, 76, 46, 48, 76, 59, 59, 64, 91, 84, 65, 67, 62, 85, 61, 74, 70, 60, 67, 81, 74, 54, 68, 54, 63, 77, 63, 75, 76, 113, 62, 48, 62, 58, 64, 77, 62, 62, 66, 51, 65, 83, 74, 72, 73, 56, 50, 90, 64, 53, 51, 62, 60, 71, 68, 67, 124, 71, 76, 54, 82, 76, 69, 65, 59, 92, 68, 75, 73, 83, 70, 53, 49, 53, 73, 70, 68, 99, 63, 59, 62, 86, 57, 66, 62, 53, 56, 77, 55, 51, 66, 75, 78, 53, 78, 78, 97, 70, 59, 84, 78, 56, 59, 60, 71, 69, 71, 92, 54, 64, 75, 60, 66, 50, 56, 57, 78, 50, 70, 51, 56, 77, 75, 59, 77, 47, 50, 68, 59, 80, 93, 59, 75, 79, 72, 56, 70, 47, 65, 65, 65, 66, 79, 73, 62, 85, 51, 64, 66, 82, 61, 39, 64, 72, 79, 69, 66, 58, 56, 60, 66, 69, 62, 47, 57, 83, 45, 66, 105, 51, 54, 61, 67, 74, 89, 50, 109, 73, 51, 101, 72, 136, 66, 78, 67, 52, 95, 83, 86, 50, 56, 60, 87, 63, 61, 70, 60, 70, 91, 66, 70, 45, 59, 76, 61, 77, 64, 69, 71, 66, 83, 77, 70, 63, 101, 77, 66, 79, 75, 74, 74, 93, 64, 68, 69, 54, 82, 65, 44, 54, 61, 61, 62, 63, 61, 57, 61, 84, 46, 63, 71, 59, 45, 70, 58, 75, 63, 67, 44, 42, 68, 72, 67, 47, 57, 71, 42, 71, 109, 78, 68, 57, 76, 56, 80, 69, 62, 51, 98, 50, 70, 81, 85, 53, 55, 60, 76, 67, 68, 58, 93, 72, 51, 84, 43, 74, 67, 84, 66, 62, 55, 76, 59, 62, 75, 79, 78, 75, 71, 102, 61, 71, 80, 52, 98, 95, 48, 50, 77, 63, 87, 60, 77, 56, 68, 40, 51, 67, 69, 60, 67, 78, 77, 69, 80, 72, 55, 51, 70, 51, 56, 61, 63, 64, 55, 74, 98, 70, 76, 68, 61, 80, 86, 64, 82, 71, 59, 85, 75, 51, 73, 61, 50, 52, 75, 76, 61, 63, 49, 52, 58, 63, 64, 58, 66, 85, 74, 72, 78, 79, 71, 93, 59, 66, 73, 78, 59, 41, 69, 60, 76, 48, 68, 49, 49, 51, 57, 64, 54, 76, 71, 57, 46, 74, 55, 83, 83, 72, 55, 64, 83, 95, 52, 55, 114, 67, 86, 50, 65, 70, 57, 63, 83, 64, 67, 48, 158, 77, 53, 72, 87, 71, 52, 64, 64, 52, 74, 60, 69, 68, 64, 54, 51, 56, 67, 48, 60, 61, 72, 76, 69, 56, 64, 74, 47, 84, 55, 84, 62, 64, 67, 63, 87, 54, 82, 77, 75, 53, 55, 71, 63, 44, 50, 50, 58, 51, 70, 63, 64, 56, 59, 64, 61, 58, 64, 76, 27, 71, 52, 60, 71, 60, 69, 71, 71, 70, 52, 74, 48, 86, 50, 106, 70, 57, 77, 70, 53, 63, 93, 59, 100, 63, 68, 58, 81, 46, 58, 77, 69, 73, 75, 52, 69, 75, 59, 98, 71, 98, 69, 60, 62, 112, 72, 67, 64, 60, 59, 60, 56, 46, 87, 50, 54, 63, 76, 78, 73, 75, 88, 45, 81, 79, 47, 62, 58, 71, 71, 55, 60, 100, 73, 56, 54, 70, 62, 64, 76, 64, 36, 66, 55, 46, 74, 79, 59, 55, 56, 64, 42, 53, 74, 66, 71, 59, 61, 62, 69, 75, 47, 74, 77, 62, 68, 68, 92, 81, 70, 83, 82, 62, 48, 56, 70, 64, 52, 65, 75, 59, 55, 68, 64, 58, 55, 101, 55, 70, 65, 60, 60, 65, 88, 77, 68, 102, 56, 58, 100, 80, 48, 68, 61, 71, 56, 55, 69, 67, 70, 67, 73, 86, 85, 70, 46, 81, 60, 48, 68, 53, 62, 70, 69, 63, 65, 77, 74, 58, 57, 63, 88, 65, 52, 57, 74, 99, 71, 59, 53, 39, 64, 73, 77, 47, 69, 89, 59, 68, 73, 85, 90, 57, 65, 89, 55, 60, 86, 52, 52, 64, 103, 66, 55, 65, 71, 67, 59, 41, 60, 76, 72, 53, 90, 67, 53, 68, 99, 55, 83, 89, 59, 42, 64, 67, 53, 54, 76, 96, 67, 72, 60, 66, 64, 48, 60, 56, 60, 43, 53, 55, 70, 75, 62, 52, 67, 46, 57, 38, 61, 48, 59, 61, 63, 54, 57, 44, 58, 44, 60, 96, 69, 56, 60, 80, 50, 54, 64, 74, 45, 112, 88, 104, 69, 72, 78, 75, 52, 83, 43, 66, 60, 62, 60, 65, 91, 53, 75, 61, 58, 65, 75, 72, 34, 62, 58, 41, 62, 37, 74, 59, 52, 60, 54, 73, 73, 61, 54, 66, 51, 44, 79, 46, 55, 52, 80, 76, 60, 49, 62, 58, 70, 81, 67, 51, 74, 75, 67, 58, 66, 61, 48, 72, 48, 57, 69, 100, 70, 39, 71, 50, 90, 52, 60, 71, 51, 70, 55, 66, 57, 46, 67, 70, 60, 61, 66, 56, 60, 44, 71, 76, 54, 61, 70, 64, 88, 36, 68, 82, 62, 84, 64, 55, 82, 63, 49, 34, 69, 86, 65, 60, 50, 50, 69, 53, 57, 59, 70, 65, 47, 94, 67, 85, 62, 83, 69, 75, 47, 58, 62, 52, 76, 84, 61, 69, 66, 79, 64, 76, 62, 103, 85, 49, 46, 169, 78, 52, 47, 100, 83, 45, 76, 65, 51, 53, 57, 64, 54, 79, 52, 83, 57, 87, 66, 63, 55, 65, 81, 42, 61, 57, 53, 72, 77, 68, 54, 52, 53, 71, 72, 61, 48, 95, 46, 55, 80, 60, 62, 72, 63, 57, 66, 99, 65, 73, 67, 59, 72, 69, 85, 64, 61, 55, 60, 56, 42, 64, 77, 57, 75, 61, 51, 58, 57, 49, 68, 60, 59, 70, 49, 60, 104, 79, 65, 67, 60, 78, 82, 70, 71, 103, 64, 55, 71, 58, 47, 36, 52, 60, 66, 86, 58, 62, 68, 57, 59, 73, 73, 57, 50, 65, 65, 66, 65, 52, 67, 53, 53, 59, 57, 80, 86, 83, 87, 51, 55, 49, 57, 89, 60, 41, 47, 61, 57, 85, 67, 59, 57, 63, 52, 82, 52, 89, 86, 73, 49, 59, 50, 51, 76, 95, 58, 58, 45, 77, 52, 65, 52, 102, 45, 74, 60, 57, 58, 70, 73, 71, 67, 70, 53, 76, 61, 64, 53, 46, 61, 92, 54, 78, 71, 82, 69, 56, 103, 59, 48, 73, 64, 49, 63, 69, 62, 63, 60, 64, 61, 60, 82, 61, 49, 55, 67, 56, 53, 52, 49, 56, 60, 81, 58, 62, 59, 52, 57, 68, 70, 76, 58, 69, 65, 93, 88, 76, 73, 56, 49, 62, 85, 63, 58, 68, 67, 61, 52, 65, 81, 48, 63, 46, 39, 59, 55, 63, 70, 60, 57, 46, 116, 57, 76, 50, 81, 70, 67, 92, 63, 77, 64, 40, 59, 71, 46, 84, 72, 63, 106, 64, 69, 55, 83, 54, 51, 62, 65, 86, 62, 42, 70, 112, 59, 46, 79, 86, 53, 43, 67, 49, 65, 79, 48, 59, 56, 60, 69, 62, 72, 58, 49, 69, 74, 74, 53, 61, 57, 64, 74, 100, 66, 65, 70, 58, 88, 61, 54, 68, 62, 56, 58, 78, 70, 82, 54, 59, 83, 88, 60, 73, 72, 88, 56, 73, 47, 60, 62, 73, 65, 80, 66, 69, 70, 64, 84, 59, 58, 83, 48, 54, 106, 69, 110, 61, 85, 65, 57, 68, 71, 71, 63, 49, 56, 46, 58, 63, 75, 62, 74, 77, 80, 82, 57, 60, 61, 40, 85, 55, 60, 78, 73, 63, 68, 84, 63, 74, 62, 77, 64, 52, 47, 61, 77, 54, 85, 37, 57, 91, 66, 96, 64, 49, 91, 54, 64, 54, 40, 61, 81, 47, 79, 71, 32, 66, 71, 57, 69, 98, 69, 59, 43, 60, 75, 50, 63, 85, 76, 62, 67, 51, 73, 118, 73, 60, 83, 55, 60, 61, 75, 84, 60, 79, 103, 65, 53, 62, 60, 54, 75, 86, 96, 69, 60, 59, 78, 96, 68, 65, 60, 54, 68, 62, 87, 55, 73, 49, 48, 63, 69, 89, 49, 51, 77, 51, 69, 53, 79, 78, 53, 91, 66, 80, 56, 70, 46, 47, 56, 65, 56, 76, 65, 60, 53, 70, 52, 56, 51, 105, 68, 46, 70, 57, 58, 66, 87, 73, 58, 63, 44, 99, 56, 50, 67, 63, 48, 66, 67, 80, 57, 47, 46, 60, 76, 55, 78, 101, 65, 56, 67, 68, 53, 58, 69, 56, 54, 83, 97, 56, 74, 79, 58, 94, 68, 76, 72, 55, 47, 81, 55, 94, 85, 66, 74, 49, 69, 50, 61, 72, 53, 73, 53, 87, 61, 73, 53, 71, 60, 73, 67, 53, 69, 77, 111, 74, 60, 65, 66, 71, 65, 65, 68, 38, 53, 53, 92, 57, 53, 56, 72, 64, 59, 67, 71, 61, 81, 49, 61, 84, 56, 51, 99, 35, 50, 137, 68, 75, 39, 48, 64, 57, 55, 44, 85, 60, 82, 81, 45, 71, 52, 71, 55, 80, 59, 51, 61, 63, 61, 56, 80, 63, 48, 62, 47, 106, 92, 50, 87, 51, 65, 66, 50, 86, 60, 74, 87, 57, 55, 55, 60, 56, 66, 60, 61, 92, 82, 66, 43, 71, 56, 69, 80, 53, 49, 54, 60, 54, 55, 90, 59, 59, 52, 102, 59, 65, 62, 63, 79, 67, 113, 120, 49, 60, 87, 60, 51, 64, 46, 57, 71, 57, 70, 75, 61, 88, 76, 75, 78, 61, 58, 74, 57, 72, 82, 49, 55, 62, 64, 59, 57, 84, 65, 60, 56, 71, 62, 45, 72, 56, 57, 62, 59, 87, 60, 78, 66, 44, 61, 67, 53, 66, 60, 55, 66, 50, 59, 72, 41, 88, 70, 69, 73, 88, 69, 69, 47, 66, 63, 59, 67, 56, 63, 49, 65, 43, 61, 59, 72, 100, 71, 48, 69, 69, 68, 63, 54, 73, 60, 57, 56, 77, 70, 65, 53, 59, 101, 61, 51, 97, 74, 80, 61, 69, 68, 81, 57, 54, 61, 88, 78, 83, 70, 58, 94, 49, 73, 47, 62, 50, 80, 74, 68, 67, 62, 108, 69, 55, 62, 72, 59, 61, 66, 61, 80, 59, 69, 56, 62, 76, 86, 77, 94, 82, 66, 61, 76, 59, 50, 67, 59, 74, 68, 68, 83, 59, 69, 61, 66, 65, 80, 74, 61, 56, 125, 89, 56, 74, 70, 73, 52, 49, 43, 62, 60, 72, 50, 70, 39, 51, 59, 53, 95, 73, 48, 77, 63, 89, 65, 72, 68, 82, 54, 75, 48, 48, 57, 64, 64, 65, 63, 43, 87, 53, 53, 61, 54, 55, 63, 79, 104, 94, 100, 42, 62, 71, 86, 47, 59, 76, 113, 75, 61, 49, 90, 51, 44, 89, 61, 60, 74, 61, 50, 74, 66, 48, 70, 66, 47, 55, 55, 110, 53, 54, 48, 59, 35, 72, 80, 66, 59, 132, 60, 61, 44, 64, 77, 49, 70, 54, 61, 62, 40, 60, 91, 55, 59, 77, 64, 57, 76, 52, 63, 101, 58, 64, 65, 90, 66, 63, 58, 50, 74, 58, 60, 56, 78, 78, 83, 61, 52, 69, 67, 51, 70, 69, 80, 66, 63, 63, 57, 98, 73, 79, 71, 60, 75, 57, 82, 47, 41, 75, 53, 62, 86, 64, 69, 54, 75, 70, 74, 96, 71, 58, 76, 55, 65, 87, 50, 65, 48, 70, 56, 71, 56, 109, 64, 62, 70, 70, 60, 57, 57, 57, 55, 49, 47, 69, 67, 51, 58, 71, 64, 60, 54, 57, 66, 87, 75, 62, 77, 45, 48, 59, 63, 90, 80, 54, 55, 98, 49, 59, 45, 66, 76, 62, 52, 67, 71, 54, 90, 65, 68, 47, 92, 103, 72, 73, 75, 56, 59, 65, 65, 71, 84, 64, 43, 52, 61, 104, 39, 69, 78, 94, 51, 49, 64, 65, 52, 46, 60, 84, 50, 58, 53, 54, 58, 55, 69, 65, 58, 95, 68, 38, 44, 57, 64, 103, 54, 107, 37, 81, 55, 53, 58, 78, 52, 63, 71, 69, 56, 89, 57, 63, 65, 98, 55, 70, 87, 83, 65, 54, 79, 56, 130, 57, 68, 67, 56, 59, 65, 50, 41, 74, 86, 72, 62, 57, 106, 71, 58, 57, 65, 46, 63, 69, 95, 101, 52, 80, 57, 51, 81, 52, 64, 90, 63, 70, 60, 88, 44, 47, 76, 57, 60, 68, 72, 76, 62, 80, 58, 68, 59, 50, 62, 64, 34, 50, 60, 52, 54, 53, 86, 33, 58, 49, 82, 76, 72, 62, 58, 91, 75, 47, 50, 51, 67, 84, 61, 53, 61, 75, 44, 42, 86, 60, 53, 51, 59, 74, 78, 63, 53, 59, 101, 62, 56, 52, 63, 56, 73, 64, 60, 81, 68, 75, 72, 48, 84, 70, 51, 94, 58, 64, 69, 54, 46, 52, 97, 56, 67, 65, 74, 54, 88, 50, 53, 84, 92, 67, 71, 62, 47, 57, 42, 46, 28, 115, 53, 57, 50, 67, 62, 56, 56, 57, 54, 54, 86, 54, 64, 49, 91, 69, 61, 59, 83, 53, 62, 59, 59, 45, 63, 58, 75, 73, 54, 77, 52, 64, 61, 79, 51, 63, 60, 58, 65, 69, 97, 105, 80, 63, 61, 58, 43, 70, 45, 59, 79, 55, 58, 60, 69, 58, 60, 61, 51, 67, 54, 52, 58, 65, 50, 54, 69, 62, 63, 60, 58, 56, 62, 55, 65, 67, 63, 70, 54, 51, 76, 99, 83, 103, 95, 60, 58, 67, 113, 37, 59, 65, 72, 55, 50, 86, 72, 88, 68, 77, 51, 78, 40, 63, 48, 66, 78, 65, 46, 94, 61, 50, 57, 37, 63, 68, 70, 44, 61, 54, 67, 80, 47, 57, 61, 70, 71, 81, 81, 42, 77, 49, 59, 64, 55, 70, 71, 70, 51, 74, 58, 79, 70, 100, 121, 59, 77, 61, 57, 60, 64, 75, 104, 64, 99, 66, 69, 46, 68, 79, 64, 77, 67, 70, 72, 80, 71, 61, 65, 61, 51, 83, 61, 59, 60, 51, 80, 49, 75, 43, 76, 64, 69, 90, 56, 64, 54, 78, 59, 49, 86, 92, 65, 75, 62, 67, 61, 66, 66, 61, 34, 75, 49, 59, 63, 54, 52, 60, 80, 84, 65, 84, 73, 66, 68, 56, 60, 114, 52, 63, 62, 75, 64, 92, 93, 50, 49, 65, 55, 79, 72, 54, 63, 73, 42, 85, 39, 69, 82, 62, 99, 54, 80, 89, 58, 61, 49, 50, 66, 84, 55, 67, 76, 88, 72, 59, 77, 74, 79, 54, 66, 77, 70, 91, 49, 76, 73, 79, 62, 82, 67, 62, 75, 68, 64, 99, 50, 114, 63, 91, 49, 66, 84, 49, 85, 57, 59, 60, 45, 46, 87, 61, 66, 37, 50, 81, 61, 59, 65, 67, 57, 61, 55, 84, 65, 50, 48, 58, 89, 83, 67, 62, 68, 76, 55, 55, 75, 67, 77, 102, 65, 93, 65, 62, 68, 93, 70, 46, 65, 69, 70, 80, 44, 66, 68, 54, 43, 65, 48, 82, 65, 77, 80, 51, 72, 45, 67, 82, 95, 50, 84, 57, 73, 55, 100, 78, 109, 52, 74, 70, 63, 69, 58, 94, 59, 43, 75, 53, 58, 78, 70, 57, 70, 44, 77, 63, 63, 74, 75, 66, 58, 61, 61, 68, 59, 48, 61, 73, 67, 77, 69, 96, 65, 67, 41, 79, 58, 40, 76, 70, 64, 70, 99, 81, 62, 60, 125, 55, 46, 53, 57, 42, 52, 63, 63, 76, 53, 56, 56, 63, 69, 66, 49, 50, 80, 107, 62, 59, 67, 67, 53, 97, 62, 58, 67, 67, 54, 86, 61, 60, 67, 67, 60, 85, 72, 53, 61, 85, 46, 76, 99, 59, 67, 69, 71, 57, 47, 97, 75, 68, 46, 50, 50, 65, 56, 84, 64, 45, 53, 66, 46, 59, 57, 79, 61, 63, 42, 74, 64, 67, 60, 78, 59, 52, 42, 53, 98, 67, 111, 57, 67, 58, 48, 61, 85, 55, 83, 64, 59, 69, 54, 63, 48, 86, 54, 63, 94, 69, 55, 56, 75, 46, 75, 79, 56, 66, 66, 129, 48, 46, 62, 66, 47, 68, 76, 77, 75, 72, 67, 52, 74, 86, 71, 165, 71, 120, 75, 68, 65, 68, 53, 90, 97, 64, 103, 79, 51, 52, 49, 69, 57, 56, 53, 86, 49, 62, 60, 45, 61, 59, 63, 57, 69, 49, 52, 63, 61, 55, 82, 65, 58, 61, 95, 81, 53, 76, 42, 134, 73, 51, 73, 53, 60, 57, 64, 61, 68, 64, 56, 64, 64, 63, 72, 75, 68, 79, 47, 75, 41, 57, 77, 75, 46, 41, 69, 60, 75, 86, 45, 65, 46, 56, 88, 71, 52, 75, 64, 49, 61, 40, 72, 76, 64, 64, 55, 90, 67, 107, 50, 68, 77, 79, 67, 52, 65, 60, 51, 62, 33, 57, 72, 94, 44, 71, 65, 82, 55, 71, 72, 82, 49, 72, 73, 71, 66, 71, 46, 60, 78, 53, 72, 113, 54, 59, 59, 75, 49, 66, 55, 68, 48, 58, 61, 47, 75, 59, 93, 56, 76, 64, 68, 56, 59, 87, 126, 79, 53, 79, 66, 79, 61, 59, 64, 68, 76, 40, 67, 69, 73, 50, 79, 102, 48, 119, 68, 61, 91, 64, 55, 60, 80, 59, 66, 58, 69, 86, 66, 53, 80, 80, 86, 32, 79, 67, 53, 51, 133, 54, 65, 52, 72, 56, 60, 82, 54, 79, 70, 77, 53, 59, 63, 65, 107, 57, 72, 49, 50, 94, 60, 64, 70, 53, 157, 56, 41, 71, 75, 80, 78, 65, 61, 79, 46, 65, 58, 65, 49, 75, 67, 47, 50, 98, 51, 75, 124, 74, 47, 91, 56, 68, 46, 62, 50, 60, 78, 89, 70, 62, 73, 101, 64, 81, 80, 67, 73, 62, 88, 74, 67, 92, 47, 54, 81, 74, 86, 65, 64, 98, 55, 49, 67, 49, 78, 70, 79, 66, 70, 83, 74, 64, 78, 49, 63, 77, 57, 64, 82, 56, 80, 67, 72, 90, 70, 67, 105, 88, 66, 81, 62, 57, 70, 70, 74, 49, 39, 70, 60, 73, 58, 53, 57, 70, 52, 93, 46, 67, 41, 79, 89, 48, 59, 66, 52, 87, 63, 115, 103, 88, 75, 62, 57, 63, 55, 72, 63, 47, 56, 56, 79, 79, 59, 51, 92, 83, 78, 65, 57, 40, 57, 89, 70, 63, 88, 68, 48, 66, 84, 82, 67, 64, 54, 55, 48, 68, 102, 59, 63, 62, 72, 98, 85, 64, 53, 83, 52, 56, 66, 67, 61, 77, 55, 85, 55, 73, 71, 62, 74, 60, 84, 59, 80, 47, 54, 29, 62, 75, 89, 71, 64, 68, 47, 59, 75, 79, 73, 73, 42, 40, 51, 57, 62, 65, 53, 77, 53, 66, 98, 63, 59, 87, 112, 67, 54, 41, 77, 89, 63, 49, 62, 59, 47, 60, 59, 73, 63, 73, 45, 58, 84, 67, 70, 99, 60, 71, 67, 76, 71, 44, 112, 78, 61, 56, 52, 69, 62, 58, 61, 48, 57, 43, 60, 47, 61, 72, 70, 63, 60, 68, 71, 65, 65, 77, 55, 69, 75, 57, 73, 53, 81, 72, 88, 54, 67, 70, 93, 55, 79, 70, 85, 38, 56, 93, 58, 84, 58, 82, 81, 79, 61, 112, 82, 55, 73, 70, 105, 74, 82, 60, 147, 51, 51, 80, 62, 64, 64, 68, 45, 54, 81, 68, 49, 81, 67, 73, 77, 50, 59, 56, 88, 60, 88, 55, 92, 58, 63, 66, 86, 93, 62, 41, 73, 88, 55, 66, 76, 66, 78, 39, 74, 81, 42, 59, 78, 46, 50, 54, 46, 53, 75, 54, 84, 38, 57, 73, 58, 78, 19, 68, 98, 39, 120, 66, 71, 67, 59, 53, 80, 88, 51, 63, 53, 73, 55, 58, 50, 73, 63, 63, 50, 52, 43, 74, 69, 63, 56, 67, 75, 54, 64, 63, 73, 44, 45, 71, 85, 68, 72, 66, 76, 66, 73, 55, 67, 67, 101, 68, 96, 55, 63, 53, 73, 62, 67, 36, 70, 51, 72, 61, 52, 65, 53, 98, 56, 101, 85, 19, 57, 68, 65, 68, 89, 47, 98, 57, 99, 72, 72, 53, 53, 56, 43, 82, 56, 94, 52, 51, 54, 85, 47, 49, 67, 59, 93, 52, 69, 63, 68, 36, 58, 63, 65, 82, 62, 57, 68, 80, 80, 78, 64, 42, 79, 59, 66, 67, 86, 72, 54, 51, 50, 83, 54, 57, 66, 46, 61, 69, 55, 51, 63, 90, 67, 108, 57, 56, 77, 83, 46, 79, 73, 55, 80, 72, 59, 48, 81, 59, 68, 49, 77, 71, 57, 39, 55, 92, 81, 64, 49, 63, 64, 70, 40, 74, 58, 60, 53, 55, 69, 65, 61, 87, 47, 51, 84, 52, 73, 67, 64, 47, 86, 59, 78, 52, 70, 48, 53, 61, 63, 60, 78, 72, 75, 48, 69, 53, 34, 87, 80, 51, 69, 79, 58, 50, 61, 72, 57, 47, 81, 55, 46, 115, 31, 71, 50, 78, 59, 77, 87, 46, 58, 85, 54, 71, 62, 61, 57, 76, 68, 54, 70, 75, 60, 72, 83, 104, 82, 58, 56, 75, 80, 57, 67, 52, 80, 82, 52, 58, 80, 67, 60, 96, 46, 82, 71, 40, 53, 74, 96, 68, 100, 62, 55, 78, 64, 38, 73, 67, 50, 42, 65, 62, 60, 61, 103, 85, 57, 80, 71, 42, 56, 54, 74, 48, 64, 60, 76, 63, 42, 81, 93, 87, 73, 73, 39, 86, 65, 68, 52, 52, 152, 79, 62, 64, 57, 61, 70, 44, 61, 71, 66, 69, 86, 48, 78, 49, 86, 68, 72, 59, 54, 40, 76, 57, 60, 51, 59, 84, 61, 71, 48, 66, 76, 55, 70, 61, 73, 106, 68, 59, 61, 48, 55, 42, 89, 53, 57, 71, 70, 65, 65, 54, 83, 84, 68, 57, 82, 53, 80, 52, 81, 62, 55, 58, 55, 78, 53, 57, 94, 58, 57, 44, 61, 88, 68, 71, 47, 57, 74, 65, 59, 64, 81, 49, 61, 58, 50, 74, 62, 57, 49, 56, 68, 71, 49, 62, 61, 90, 85, 75, 79, 70, 89, 53, 70, 69, 69, 40, 51, 60, 78, 65, 60, 78, 38, 62, 96, 67, 78, 51, 72, 62, 70, 77, 71, 64, 60, 66, 75, 69, 65, 64, 94, 72, 57, 68, 57, 87, 76, 61, 61, 111, 34, 51, 80, 72, 88, 41, 61, 59, 52, 98, 63, 65, 68, 105, 61, 41, 67, 67, 82, 70, 62, 66, 83, 43, 99, 75, 55, 80, 57, 69, 63, 78, 67, 79, 70, 64, 48, 43, 71, 78, 99, 99, 102, 62, 58, 64, 64, 64, 46, 50, 69, 71, 77, 60, 65, 50, 62, 63, 64, 64, 96, 44, 82, 134, 56, 80, 73, 53, 85, 68, 57, 55, 57, 41, 64, 97, 63, 62, 78, 50, 78, 68, 58, 66, 68, 57, 50, 72, 62, 52, 55, 66, 89, 48, 50, 100, 56, 84, 48, 44, 72, 57, 61, 43, 74, 63, 65, 76, 62, 60, 51, 60, 49, 76, 76, 77, 50, 69, 57, 65, 47, 64, 83, 64, 41, 70, 63, 97, 45, 77, 96, 88, 67, 69, 58, 70, 34, 70, 76, 70, 93, 70, 50, 90, 62, 72, 61, 38, 68, 45, 61, 87, 64, 43, 89, 64, 60, 60, 79, 98, 47, 57, 79, 50, 71, 60, 81, 59, 78, 68, 73, 57, 101, 71, 64, 76, 81, 45, 75, 42, 77, 51, 50, 82, 53, 68, 51, 37, 68, 77, 58, 72, 81, 58, 78, 62, 53, 58, 61, 73, 62, 87, 71, 53, 63, 86, 34, 58, 100, 72, 73, 60, 68, 50, 83, 76, 61, 93, 70, 65, 79, 80, 95, 78, 74, 64, 57, 76, 85, 64, 53, 67, 57, 72, 42, 60, 49, 62, 62, 65, 64, 65, 56, 60, 76, 64, 49, 58, 57, 58, 59, 73, 58, 70, 58, 67, 122, 78, 69, 46, 73, 74, 63, 54, 67, 85, 49, 57, 59, 64, 68, 63, 72, 55, 72, 72, 61, 123, 48, 73, 70, 79, 66, 53, 60, 59, 69, 62, 59, 57, 63, 63, 81, 49, 73, 58, 55, 86, 63, 65, 84, 69, 49, 84, 58, 74, 64, 55, 83, 64, 78, 71, 63, 80, 104, 67, 69, 51, 69, 68, 56, 62, 50, 73, 50, 76, 67, 65, 58, 57, 52, 82, 62, 44, 104, 72, 60, 62, 71, 62, 66, 58, 87, 62, 39, 57, 77, 65, 64, 45, 45, 75, 54, 67, 65, 62, 59, 74, 84, 54, 55, 49, 61, 58, 71, 59, 79, 54, 49, 65, 53, 63, 78, 62, 54, 67, 81, 49, 72, 58, 67, 66, 55, 66, 48, 57, 64, 67, 61, 63, 50, 85, 73, 71, 65, 59, 63, 86, 60, 81, 88, 75, 53, 62, 67, 57, 62, 61, 72, 70, 53, 54, 70, 67, 61, 46, 69, 55, 54, 61, 69, 70, 49, 54, 63, 48, 63, 45, 63, 115, 75, 63, 65, 68, 89, 66, 57, 63, 58, 61, 76, 63, 59, 91, 64, 69, 64, 72, 59, 67, 53, 83, 78, 76, 67, 63, 73, 73, 83, 64, 72, 57, 59, 81, 71, 48, 65, 37, 47, 65, 54, 72, 71, 65, 88, 50, 60, 41, 68, 68, 73, 49, 88, 70, 70, 53, 37, 52, 68, 90, 73, 52, 27, 61, 47, 63, 39, 63, 43, 61, 66, 69, 87, 80, 52, 64, 80, 94, 41, 89, 73, 70, 64, 55, 69, 58, 61, 75, 45, 67, 66, 57, 60, 47, 67, 73, 61, 63, 60, 49, 69, 53, 49, 63, 97, 90, 58, 102, 63, 78, 60, 76, 55, 68, 52, 70, 68, 66, 68, 61, 78, 81, 56, 64, 60, 67, 69, 64, 62, 59, 44, 53, 55, 48, 79, 117, 42, 37, 69, 59, 63, 68, 54, 89, 83, 65, 75, 71, 71, 64, 78, 85, 57, 65, 84, 66, 58, 64, 63, 46, 62, 77, 52, 53, 82, 65, 66, 62, 59, 60, 75, 59, 62, 69, 61, 65, 64, 84, 114, 57, 54, 55, 69, 70, 56, 91, 52, 65, 61, 53, 60, 87, 57, 57, 59, 52, 86, 71, 68, 65, 99, 60, 86, 85, 58, 64, 51, 74, 73, 71, 58, 52, 61, 74, 65, 56, 60, 54, 49, 65, 103, 74, 62, 76, 67, 59, 62, 65, 62, 51, 57, 64, 47, 54, 68, 55, 65, 57, 62, 93, 68, 59, 56, 60, 109, 57, 62, 57, 64, 83, 58, 58, 76, 74, 57, 68, 61, 62, 67, 28, 64, 55, 50, 94, 50, 37, 118, 74, 56, 81, 43, 51, 119, 69, 89, 68, 87, 60, 67, 66, 52, 47, 51, 72, 91, 46, 60, 63, 52, 61, 72, 75, 100, 74, 95, 77, 54, 74, 65, 50, 70, 94, 88, 70, 55, 63, 64, 66, 106, 57, 72, 69, 69, 45, 61, 65, 58, 79, 65, 52, 71, 114, 68, 60, 56, 105, 53, 53, 85, 54, 75, 71, 49, 70, 55, 54, 58, 73, 40, 54, 77, 68, 61, 62, 104, 54, 59, 51, 57, 80, 53, 83, 70, 57, 68, 61, 60, 78, 89, 77, 59, 50, 49, 68, 53, 59, 73, 59, 47, 57, 48, 56, 113, 88, 53, 74, 65, 71, 58, 63, 60, 85, 52, 87, 48, 62, 85, 70, 109, 109, 70, 69, 78, 36, 64, 81, 64, 67, 62, 84, 52, 77, 100, 55, 76, 82, 54, 111, 61, 77, 58, 51, 55, 66, 61, 67, 54, 76, 75, 69, 115, 68, 116, 65, 48, 75, 53, 62, 59, 66, 79, 75, 59, 91, 71, 62, 82, 50, 81, 51, 84, 63, 75, 52, 69, 81, 73, 88, 61, 72, 59, 58, 67, 77, 43, 73, 73, 63, 75, 58, 64, 54, 49, 63, 70, 68, 90, 63, 56, 52, 52, 60, 59, 60, 69, 106, 55, 55, 48, 84, 46, 61, 51, 60, 34, 74, 70, 94, 71, 49, 48, 65, 73, 48, 60, 59, 74, 101, 60, 75, 54, 50, 67, 66, 51, 69, 56, 55, 62, 42, 46, 65, 61, 104, 58, 99, 70, 68, 60, 53, 81, 64, 79, 62, 61, 35, 50, 83, 55, 75, 74, 71, 59, 43, 55, 56, 68, 78, 58, 77, 56, 66, 96, 91, 46, 45, 79, 68, 61, 57, 54, 66, 71, 66, 53, 59, 60, 104, 52, 67, 75, 66, 71, 64, 79, 55, 58, 53, 61, 71, 72, 76, 89, 52, 64, 50, 56, 57, 73, 67, 50, 54, 54, 65, 60, 63, 54, 45, 63, 126, 53, 53, 68, 55, 61, 59, 57, 77, 64, 57, 88, 47, 78, 71, 50, 64, 65, 61, 70, 74, 77, 49, 52, 78, 56, 62, 88, 77, 71, 62, 51, 64, 48, 54, 82, 106, 68, 55, 58, 62, 58, 77, 70, 78, 59, 78, 52, 69, 57, 62, 60, 91, 66, 70, 46, 77, 82, 64, 63, 50, 47, 56, 53, 81, 64, 56, 84, 67, 70, 60, 83, 100, 64, 75, 50, 78, 67, 55, 58, 68, 67, 71, 53, 46, 44, 55, 86, 57, 92, 52, 52, 61, 68, 95, 49, 52, 56, 71, 70, 60, 63, 51, 51, 60, 74, 63, 71, 62, 64, 75, 60, 75, 68, 67, 57, 73, 61, 68, 63, 55, 30, 50, 52, 64, 80, 74, 68, 109, 85, 66, 84, 70, 44, 51, 67, 77, 50, 55, 59, 71, 52, 67, 72, 40, 55, 71, 85, 77, 55, 70, 63, 53, 83, 64, 60, 60, 56, 107, 91, 62, 60, 65, 73, 47, 77, 41, 65, 54, 107, 48, 77, 52, 67, 50, 68, 57, 80, 62, 72, 56, 43, 73, 55, 63, 52, 59, 58, 60, 77, 54, 123, 52, 71, 54, 65, 64, 68, 51, 60, 68, 64, 58, 55, 70, 89, 53, 67, 37, 62, 57, 72, 72, 58, 75, 66, 39, 83, 69, 67, 52, 54, 155, 64, 43, 52, 59, 70, 123, 66, 69, 100, 58, 55, 65, 59, 62, 73, 79, 64, 83, 64, 62, 62, 63, 71, 62, 89, 73, 69, 67, 53, 117, 76, 61, 75, 70, 54, 68, 58, 82, 55, 63, 40, 70, 57, 41, 73, 60, 79, 51, 81, 52, 71, 71, 60, 52, 73, 85, 36, 95, 66, 60, 68, 79, 61, 58, 79, 76, 70, 64, 67, 79, 75, 76, 46, 66, 62, 49, 41, 68, 83, 73, 64, 53, 64, 54, 60, 84, 43, 48, 69, 66, 74, 66, 70, 71, 60, 64, 51, 42, 70, 47, 63, 78, 53, 71, 61, 46, 54, 58, 71, 45, 55, 56, 39, 76, 74, 56, 90, 63, 74, 49, 69, 69, 54, 70, 50, 62, 74, 77, 81, 73, 61, 59, 87, 71, 61, 60, 87, 90, 73, 77, 92, 57, 40, 64, 68, 72, 59, 61, 64, 68, 42, 70, 67, 55, 43, 68, 92, 63, 49, 76, 55, 72, 80, 67, 100, 60, 72, 55, 50, 70, 75, 56, 74, 58, 85, 64, 53, 88, 74, 72, 50, 62, 85, 44, 76, 80, 66, 94, 84, 44, 57, 75, 46, 104, 61, 69, 59, 72, 56, 68, 69, 47, 49, 51, 85, 61, 60, 70, 68, 65, 59, 74, 63, 73, 83, 69, 96, 62, 50, 59, 50, 109, 52, 58, 41, 74, 88, 76, 48, 60, 56, 84, 54, 83, 54, 66, 78, 54, 32, 71, 86, 63, 54, 49, 99, 61, 54, 53, 72, 71, 76, 64, 48, 86, 52, 102, 79, 59, 75, 85, 71, 62, 68, 78, 76, 69, 55, 73, 68, 67, 52, 40, 70, 57, 95, 71, 82, 88, 57, 52, 94, 59, 71, 61, 51, 38, 49, 65, 53, 58, 85, 48, 61, 61, 84, 66, 64, 48, 72, 62, 68, 46, 64, 64, 86, 71, 82, 68, 50, 58, 61, 70, 74, 75, 63, 65, 84, 57, 63, 59, 80, 72, 74, 60, 49, 52, 78, 57, 85, 66, 81, 74, 102, 74, 48, 66, 56, 76, 63, 81, 84, 61, 52, 107, 69, 73, 50, 97, 48, 78, 64, 49, 83, 97, 75, 72, 84, 56, 90, 91, 47, 70, 41, 69, 43, 46, 55, 81, 53, 53, 68, 65, 68, 38, 54, 62, 84, 72, 55, 59, 67, 65, 78, 80, 53, 80, 58, 73, 50, 60, 52, 57, 77, 82, 89, 45, 56, 69, 38, 71, 44, 68, 48, 46, 41, 51, 55, 40, 86, 67, 81, 104, 68, 103, 66, 74, 78, 76, 70, 62, 48, 73, 49, 80, 71, 65, 59, 78, 76, 123, 52, 82, 64, 79, 68, 75, 59, 36, 65, 77, 65, 40, 57, 80, 62, 46, 50, 45, 78, 60, 64, 47, 65, 50, 80, 87, 67, 55, 81, 61, 75, 52, 73, 64, 62, 58, 63, 62, 45, 60, 85, 90, 57, 71, 45, 97, 84, 61, 77, 74, 62, 70, 60, 60, 54, 54, 65, 64, 70, 58, 51, 59, 56, 55, 57, 79, 63, 48, 75, 42, 70, 83, 77, 74, 56, 43, 58, 50, 51, 71, 76, 41, 80, 61, 69, 50, 74, 44, 111, 53, 49, 47, 67, 62, 96, 67, 71, 53, 73, 60, 86, 80, 101, 59, 60, 101, 63, 51, 75, 59, 129, 61, 68, 70, 79, 75, 51, 64, 61, 48, 57, 91, 59, 86, 88, 65, 59, 60, 52, 73, 56, 44, 71, 71, 70, 57, 85, 64, 111, 45, 73, 53, 66, 57, 75, 83, 75, 55, 64, 63, 65, 77, 62, 67, 48, 89, 61, 59, 82, 65, 73, 72, 48, 72, 68, 71, 57, 53, 75, 55, 67, 76, 70, 83, 79, 75, 56, 62, 71, 127, 68, 68, 54, 59, 82, 65, 51, 74, 100, 70, 91, 61, 57, 46, 49, 106, 76, 54, 61, 95, 54, 53, 71, 48, 81, 27, 66, 100, 52, 97, 55, 59, 88, 44, 47, 68, 62, 60, 58, 44, 51, 61, 71, 68, 60, 76, 71, 71, 59, 71, 71, 60, 65, 73, 82, 47, 79, 71, 91, 60, 57, 71, 72, 65, 81, 77, 113, 90, 61, 69, 57, 72, 63, 66, 61, 57, 87, 53, 53, 56, 61, 59, 46, 60, 74, 72, 105, 52, 47, 52, 66, 67, 63, 79, 82, 74, 73, 46, 94, 75, 76, 69, 50, 85, 45, 71, 54, 68, 75, 56, 61, 61, 77, 70, 74, 75, 78, 86, 64, 66, 56, 34, 65, 86, 55, 90, 62, 59, 96, 65, 54, 55, 89, 90, 72, 63, 84, 65, 86, 59, 71, 51, 62, 75, 70, 88, 113, 66, 90, 56, 72, 58, 44, 58, 64, 81, 62, 65, 59, 89, 49, 94, 67, 55, 52, 52, 47, 54, 77, 66, 92, 75, 60, 79, 54, 76, 65, 63, 79, 65, 87, 51, 86, 55, 88, 44, 76, 84, 82, 68, 57, 54, 76, 55, 74, 44, 46, 111, 79, 97, 70, 66, 64, 103, 58, 97, 84, 73, 62, 97, 54, 92, 66, 91, 55, 77, 82, 70, 73, 55, 65, 103, 53, 49, 68, 89, 42, 63, 70, 56, 74, 49, 66, 80, 97, 59, 47, 60, 84, 57, 39, 32, 59, 57, 80, 63, 95, 63, 86, 96, 61, 47, 89, 51, 71, 62, 59, 69, 52, 58, 55, 74, 68, 70, 37, 61, 63, 94, 52, 71, 62, 58, 64, 74, 63, 94, 52, 61, 58, 60, 59, 55, 79, 58, 82, 74, 29, 50, 54, 47, 42, 100, 70, 73, 68, 48, 68, 64, 60, 88, 74, 56, 77, 57, 59, 47, 55, 56, 82, 53, 53, 66, 86, 59, 64, 59, 56, 62, 89, 58, 70, 57, 58, 76, 67, 61, 69, 62, 66, 58, 67, 57, 62, 87, 67, 60, 82, 56, 79, 93, 62, 69, 75, 70, 55, 60, 78, 71, 85, 126, 78, 61, 77, 89, 59, 57, 81, 74, 63, 72, 59, 75, 57, 63, 76, 74, 71, 57, 47, 80, 77, 64, 67, 69, 72, 55, 75, 62, 81, 66, 116, 82, 69, 100, 67, 54, 69, 77, 73, 56, 40, 81, 41, 35, 57, 60, 48, 64, 66, 47, 58, 81, 58, 57, 84, 58, 61, 67, 56, 77, 93, 55, 57, 64, 110, 72, 81, 53, 56, 50, 68, 36, 54, 67, 70, 57, 55, 56, 97, 49, 61, 52, 64, 77, 80, 59, 60, 59, 59, 55, 85, 55, 45, 82, 59, 60, 52, 64, 72, 59, 61, 55, 54, 73, 48, 62, 44, 61, 76, 59, 64, 62, 52, 53, 91, 53, 68, 77, 68, 65, 43, 73, 55, 77, 95, 62, 77, 45, 53, 62, 111, 93, 65, 135, 63, 66, 61, 72, 76, 62, 66, 62, 34, 64, 58, 52, 65, 53, 55, 58, 61, 84, 45, 61, 73, 62, 59, 69, 56, 42, 68, 56, 61, 74, 77, 57, 65, 57, 64, 46, 40, 66, 67, 50, 54, 63, 60, 62, 70, 70, 70, 49, 54, 70, 90, 71, 84, 60, 59, 91, 72, 59, 66, 69, 73, 60, 67, 78, 69, 66, 61, 48, 75, 74, 100, 50, 85, 53, 76, 71, 72, 72, 54, 50, 75, 61, 61, 83, 74, 60, 64, 36, 47, 71, 63, 75, 61, 74, 74, 51, 53, 66, 64, 60, 63, 91, 57, 78, 45, 58, 60, 91, 59, 57, 54, 60, 58, 67, 60, 59, 75, 58, 70, 85, 59, 64, 65, 47, 59, 66, 55, 48, 57, 67, 55, 77, 61, 62, 73, 59, 44, 58, 55, 51, 77, 71, 71, 70, 68, 57, 59, 58, 66, 63, 63, 56, 59, 66, 68, 57, 68, 42, 72, 56, 56, 65, 61, 70, 66, 56, 54, 55, 54, 46, 64, 72, 63, 63, 66, 63, 36, 58, 79, 87, 58, 94, 68, 85, 56, 90, 56, 57, 67, 56, 84, 63, 70, 56, 71, 61, 54, 64, 53, 50, 57, 55, 61, 70, 77, 90, 51, 73, 101, 60, 59, 61, 62, 85, 53, 67, 76, 75, 59, 77, 57, 67, 62, 57, 53, 59, 60, 61, 82, 62, 62, 50, 65, 62, 55, 94, 42, 82, 56, 49, 63, 61, 69, 67, 77, 49, 85, 62, 47, 44, 42, 54, 61, 54, 66, 58, 37, 71, 119, 64, 69, 54, 54, 57, 83, 53, 51, 47, 75, 86, 76, 51, 53, 64, 83, 59, 69, 62, 61, 69, 59, 51, 62, 60, 59, 84, 86, 57, 86, 58, 71, 52, 70, 63, 56, 66, 55, 56, 44, 65, 68, 112, 39, 63, 56, 93, 52, 69, 76, 73, 72, 120, 58, 66, 53, 56, 68, 69, 75, 73, 65, 88, 61, 54, 65, 58, 64, 65, 62, 71, 61, 80, 61, 95, 63, 62, 58, 64, 64, 59, 63, 63, 99, 63, 62, 80, 88, 54, 67, 56, 45, 45, 61, 56, 71, 67, 82, 64, 58, 63, 58, 53, 104, 68, 64, 61, 73, 57, 55, 85, 61, 57, 72, 77, 105, 60, 65, 48, 67, 64, 64, 59, 59, 53, 63, 61, 53, 54, 83, 54, 103, 66, 64, 68, 60, 63, 64, 62, 70, 63, 74, 53, 63, 70, 60, 95, 67, 72, 67, 78, 55, 76, 53, 65, 95, 67, 120, 65, 64, 97, 63, 59, 56, 56, 78, 63, 52, 90, 65, 58, 80, 41, 85, 71, 62, 78, 60, 64, 65, 87, 97, 72, 67, 59, 99, 47, 86, 63, 57, 66, 63, 79, 83, 61, 68, 53, 73, 58, 69, 60, 77, 67, 79, 71, 61, 44, 62, 98, 84, 58, 74, 58, 73, 53, 52, 55, 68, 55, 58, 63, 61, 63, 61, 78, 70, 39, 60, 66, 60, 61, 54, 92, 56, 78, 79, 77, 75, 64, 49, 68, 67, 60, 49, 86, 63, 66, 57, 52, 77, 78, 65, 67, 69, 92, 79, 66, 80, 67, 66, 64, 60, 64, 70, 58, 60, 51, 56, 84, 55, 75, 57, 71, 92, 68, 57, 51, 57, 51, 50, 72, 58, 69, 69, 68, 55, 55, 71, 46, 68, 63, 54, 51, 72, 78, 74, 47, 71, 62, 57, 60, 67, 103, 55, 105, 103, 105, 57, 86, 60, 64, 64, 74, 66, 63, 54, 77, 54, 65, 76, 49, 71, 93, 60, 57, 65, 100, 68, 54, 73, 74, 64, 67, 69, 68, 61, 81, 63, 64, 74, 56, 61, 82, 62, 64, 53, 61, 63, 67, 73, 67, 79, 60, 52, 63, 74, 78, 53, 86, 69, 57, 69, 90, 67, 59, 60, 64, 107, 57, 71, 55, 81, 65, 55, 59, 65, 56, 68, 84, 76, 78, 64, 64, 71, 66, 61, 58, 69, 89, 53, 70, 62, 57, 66, 91, 64, 137, 51, 63, 68, 65, 51, 65, 59, 74, 84, 58, 51, 43, 61, 63, 73, 69, 55, 69, 61, 59, 54, 61, 91, 66, 82, 63, 41, 66, 58, 57, 49, 69, 78, 65, 63, 103, 65, 44, 55, 56, 71, 79, 56, 63, 68, 67, 68, 65, 64, 95, 68, 68, 75, 55, 70, 70, 53, 52, 67, 60, 46, 61, 46, 61, 51, 57, 97, 93, 86, 59, 59, 79, 68, 63, 52, 58, 70, 68, 60, 74, 84, 63, 63, 83, 74, 64, 52, 55, 69, 56, 62, 63, 64, 55, 66, 69, 66, 64, 68, 65, 63, 66, 73, 67, 68, 70, 59, 52, 81, 58, 52, 82, 78, 66, 49, 60, 62, 87, 62, 72, 64, 125, 70, 99, 56, 92, 55, 51, 90, 115, 83, 108, 54, 60, 89, 64, 90, 61, 60, 76, 58, 67, 67, 61, 56, 60, 85, 61, 64, 53, 100, 69, 97, 68, 62, 55, 67, 56, 68, 73, 78, 87, 51, 66, 52, 76, 79, 47, 73, 68, 58, 71, 62, 77, 50, 58, 82, 49, 63, 65, 60, 47, 66, 51, 51, 41, 64, 55, 71, 57, 57, 108, 58, 57, 58, 91, 80, 45, 73, 60, 76, 65, 55, 64, 67, 63, 64, 57, 60, 62, 61, 90, 72, 74, 57, 59, 63, 44, 56, 57, 57, 54, 51, 50, 71, 61, 80, 60, 68, 54, 73, 60, 58, 75, 55, 67, 63, 69, 60, 65, 58, 36, 58, 54, 75, 54, 75, 66, 61, 100, 63, 71, 67, 53, 69, 53, 57, 56, 65, 64, 47, 64, 67, 68, 49, 55, 60, 92, 61, 55, 63, 75, 63, 78, 62, 65, 43, 71, 60, 62, 47, 49, 82, 80, 67, 59, 74, 60, 56, 68, 90, 45, 72, 61, 63, 47, 68, 82, 51, 62, 134, 79, 64, 69, 60, 71, 42, 51, 71, 44, 124, 69, 68, 70, 49, 69, 65, 54, 72, 54, 63, 71, 71, 51, 70, 52, 74, 104, 63, 35, 74, 58, 54, 57, 55, 64, 65, 56, 57, 48, 67, 67, 102, 56, 58, 65, 60, 74, 73, 47, 47, 81, 74, 55, 51, 63, 74, 59, 85, 62, 53, 71, 66, 72, 70, 53, 55, 96, 50, 50, 51, 93, 52, 62, 58, 69, 70, 60, 66, 58, 55, 67, 63, 51, 51, 64, 58, 67, 62, 56, 87, 66, 54, 70, 76, 96, 144, 67, 55, 77, 59, 78, 78, 68, 79, 72, 42, 83, 67, 74, 87, 59, 87, 65, 71, 54, 83, 70, 74, 69, 60, 70, 48, 65, 82, 70, 43, 57, 63, 61, 66, 76, 88, 64, 69, 55, 93, 73, 59, 64, 98, 91, 45, 78, 57, 91, 77, 61, 81, 73, 66, 66, 67, 78, 106, 41, 63, 72, 47, 90, 75, 65, 46, 53, 38, 74, 64, 57, 61, 69, 54, 33, 77, 61, 56, 63, 54, 71, 84, 54, 57, 66, 67, 58, 52, 71, 59, 86, 62, 51, 76, 51, 60, 74, 59, 64, 68, 57, 49, 86, 58, 47, 48, 50, 104, 88, 38, 69, 63, 54, 53, 54, 60, 76, 49, 76, 67, 65, 45, 61, 102, 54, 64, 55, 64, 66, 44, 64, 54, 73, 54, 46, 95, 77, 75, 37, 89, 68, 77, 60, 73, 66, 41, 65, 52, 75, 56, 49, 56, 62, 55, 48, 74, 46, 62, 53, 59, 58, 59, 69, 62, 45, 50, 42, 71, 67, 55, 69, 56, 71, 45, 64, 56, 49, 49, 59, 81, 67, 74, 56, 58, 62, 60, 85, 47, 60, 67, 70, 79, 49, 55, 129, 72, 64, 63, 51, 51, 89, 78, 61, 77, 50, 58, 66, 65, 65, 95, 88, 83, 77, 57, 62, 49, 61, 66, 57, 65, 58, 82, 83, 59, 85, 49, 76, 52, 115, 67, 71, 60, 76, 58, 74, 53, 86, 50, 54, 63, 75, 49, 71, 85, 73, 63, 75, 70, 62, 73, 68, 53, 52, 70, 72, 87, 81, 74, 67, 63, 50, 55, 52, 47, 71, 59, 76, 49, 42, 61, 98, 63, 58, 54, 68, 63, 54, 64, 82, 70, 56, 83, 60, 74, 62, 70, 55, 61, 97, 78, 57, 67, 66, 75, 38, 70, 85, 56, 170, 83, 82, 58, 83, 80, 62, 57, 94, 63, 55, 80, 55, 69, 37, 58, 62, 62, 60, 71, 65, 60, 58, 61, 73, 59, 98, 74, 41, 70, 71, 50, 38, 83, 59, 85, 76, 58, 46, 71, 59, 56, 70, 71, 61, 67, 58, 78, 85, 84, 65, 43, 89, 51, 54, 65, 71, 75, 51, 69, 65, 90, 68, 71, 55, 67, 71, 67, 54, 61, 68, 84, 50, 49, 66, 81, 60, 67, 97, 80, 56, 62, 90, 72, 89, 48, 60, 62, 66, 44, 70, 48, 58, 57, 61, 73, 74, 57, 70, 74, 77, 47, 60, 77, 56, 48, 80, 68, 108, 83, 75, 59, 74, 70, 65, 67, 92, 61, 67, 69, 71, 52, 65, 67, 39, 86, 67, 56, 73, 57, 62, 55, 57, 60, 75, 57, 68, 77, 62, 72, 60, 39, 73, 79, 61, 54, 51, 89, 57, 52, 72, 69, 62, 55, 62, 96, 65, 63, 77, 61, 75, 58, 55, 77, 70, 69, 62, 70, 94, 81, 55, 62, 61, 74, 74, 69, 47, 59, 73, 58, 65, 55, 66, 63, 71, 35, 56, 44, 66, 77, 65, 64, 105, 41, 61, 114, 59, 60, 152, 63, 47, 71, 59, 65, 77, 52, 56, 62, 60, 45, 58, 51, 76, 73, 54, 62, 75, 58, 58, 68, 76, 63, 53, 81, 80, 54, 76, 65, 65, 75, 91, 60, 68, 72, 63, 64, 59, 65, 59, 56, 73, 52, 53, 56, 65, 47, 48, 44, 59, 64, 65, 55, 87, 56, 79, 77, 68, 54, 98, 58, 73, 39, 69, 63, 59, 63, 54, 66, 64, 71, 52, 46, 64, 71, 50, 70, 81, 54, 78, 54, 69, 63, 69, 56, 86, 48, 73, 82, 61, 83, 49, 49, 85, 70, 75, 65, 67, 47, 63, 73, 63, 56, 56, 64, 73, 47, 55, 62, 64, 58, 59, 58, 67, 37, 71, 84, 61, 70, 70, 62, 61, 69, 70, 50, 58, 78, 86, 62, 79, 66, 56, 84, 61, 57, 56, 51, 83, 41, 63, 61, 63, 57, 67, 75, 98, 59, 87, 53, 72, 73, 86, 77, 95, 73, 77, 68, 63, 66, 70, 72, 70, 50, 64, 73, 64, 91, 46, 49, 63, 66, 97, 65, 83, 74, 54, 74, 52, 62, 74, 74, 152, 57, 76, 78, 44, 59, 54, 58, 75, 69, 56, 62, 76, 77, 72, 74, 89, 61, 32, 80, 65, 90, 68, 71, 48, 69, 73, 61, 62, 51, 62, 61, 81, 66, 70, 72, 63, 74, 62, 67, 69, 62, 63, 79, 80, 68, 67, 54, 56, 48, 76, 61, 54, 59, 87, 43, 138, 85, 83, 67, 67, 59, 47, 76, 68, 67, 72, 58, 44, 74, 52, 61, 71, 60, 58, 111, 94, 60, 105, 53, 57, 48, 46, 61, 93, 88, 73, 68, 75, 51, 78, 82, 72, 94, 73, 75, 65, 79, 44, 78, 75, 66, 66, 67, 69, 70, 60, 63, 80, 73, 80, 68, 77, 63, 55, 54, 71, 91, 70, 68, 48, 97, 75, 58, 71, 67, 56, 92, 37, 71, 76, 75, 49, 68, 76, 61, 82, 68, 71, 81, 70, 69, 58, 50, 68, 64, 70, 56, 60, 56, 81, 72, 80, 59, 64, 55, 63, 64, 77, 66, 67, 58, 65, 74, 71, 79, 60, 53, 78, 79, 68, 49, 82, 58, 65, 66, 54, 53, 60, 67, 57, 62, 72, 56, 70, 63, 62, 67, 53, 135, 68, 56, 49, 69, 75, 41, 46, 69, 59, 57, 88, 47, 65, 76, 77, 52, 39, 73, 68, 74, 62, 63, 60, 63, 61, 46, 65, 85, 80, 72, 64, 65, 65, 48, 43, 48, 66, 54, 43, 51, 66, 82, 77, 61, 73, 70, 53, 46, 102, 63, 59, 68, 73, 80, 60, 65, 58, 64, 63, 89, 77, 81, 55, 49, 94, 89, 52, 70, 69, 70, 49, 52, 49, 58, 65, 59, 56, 96, 81, 67, 57, 50, 66, 65, 61, 56, 51, 66, 106, 66, 65, 82, 64, 59, 60, 97, 76, 60, 82, 60, 55, 78, 74, 54, 68, 71, 47, 61, 55, 77, 58, 65, 41, 92, 74, 74, 54, 57, 58, 58, 58, 48, 76, 73, 63, 60, 93, 68, 67, 68, 66, 79, 74, 71, 66, 54, 59, 49, 84, 73, 75, 69, 55, 65, 73, 76, 61, 64, 65, 67, 62, 59, 59, 53, 71, 62, 53, 120, 61, 64, 83, 63, 57, 91, 78, 85, 67, 53, 63, 59, 64, 67, 63, 53, 48, 67, 76, 50, 61, 63, 69, 63, 59, 52, 60, 54, 53, 63, 106, 65, 55, 72, 61, 49, 76, 82, 78, 63, 71, 58, 50, 92, 88, 43, 62, 62, 59, 80, 76, 50, 53, 77, 72, 52, 61, 55, 71, 53, 85, 56, 70, 51, 62, 46, 73, 58, 71, 68, 68, 55, 69, 64, 62, 54, 55, 46, 73, 76, 48, 56, 52, 61, 46, 78, 44, 47, 69, 60, 52, 58, 61, 66, 67, 111, 61, 59, 57, 47, 68, 58, 66, 67, 67, 59, 35, 54, 75, 51, 52, 101, 60, 52, 63, 78, 54, 44, 61, 63, 66, 58, 70, 51, 66, 65, 67, 67, 67, 62, 94, 63, 62, 71, 67, 50, 74, 59, 56, 64, 82, 67, 54, 79, 116, 66, 73, 101, 79, 103, 52, 53, 71, 58, 68, 92, 64, 52, 59, 63, 80, 53, 89, 90, 52, 66, 55, 96, 46, 95, 56, 90, 71, 60, 39, 83, 61, 67, 94, 93, 70, 72, 68, 145, 63, 53, 58, 53, 64, 69, 50, 59, 78, 53, 76, 68, 60, 61, 83, 49, 58, 43, 113, 58, 53, 78, 54, 67, 64, 57, 68, 46, 57, 67, 51, 55, 63, 60, 48, 55, 48, 69, 55, 63, 79, 47, 66, 79, 65, 84, 83, 71, 77, 59, 50, 65, 35, 76, 75, 52, 61, 47, 69, 55, 59, 75, 59, 149, 70, 56, 63, 69, 62, 53, 43, 81, 62, 49, 50, 68, 63, 62, 64, 46, 54, 95, 70, 52, 81, 70, 66, 66, 98, 52, 67, 75, 83, 73, 72, 82, 88, 113, 79, 43, 74, 61, 73, 53, 47, 49, 60, 63, 44, 92, 55, 59, 71, 86, 61, 63, 66, 76, 60, 72, 86, 65, 58, 77, 63, 62, 43, 61, 80, 47, 72, 56, 64, 63, 64, 73, 52, 68, 82, 60, 51, 55, 60, 51, 90, 76, 66, 71, 69, 71, 57, 114, 64, 65, 56, 75, 60, 61, 62, 75, 50, 58, 59, 71, 54, 49, 49, 62, 63, 70, 83, 79, 49, 48, 64, 70, 59, 59, 91, 54, 62, 68, 64, 70, 80, 62, 69, 54, 67, 58, 71, 59, 47, 52, 69, 69, 68, 69, 59, 74, 73, 46, 70, 86, 66, 47, 68, 55, 57, 84, 72, 77, 49, 69, 61, 61, 56, 52, 91, 57, 62, 55, 71, 65, 62, 66, 60, 64, 65, 64, 73, 66, 85, 59, 50, 81, 54, 68, 56, 50, 61, 51, 70, 74, 57, 56, 74, 54, 69, 69, 48, 72, 62, 62, 73, 67, 71, 55, 65, 77, 70, 83, 53, 63, 76, 86, 60, 53, 77, 54, 62, 61, 72, 75, 63, 63, 63, 68, 74, 67, 48, 72, 68, 58, 55, 48, 55, 72, 57, 51, 105, 58, 62, 73, 54, 64, 81, 68, 66, 99, 54, 57, 54, 66, 60, 60, 81, 75, 71, 58, 75, 44, 64, 125, 61, 55, 86, 61, 74, 52, 54, 74, 67, 65, 66, 68, 64, 58, 85, 73, 74, 71, 63, 47, 104, 67, 60, 66, 57, 63, 53, 54, 71, 66, 61, 82, 118, 83, 56, 65, 57, 59, 73, 65, 82, 53, 56, 54, 64, 49, 58, 58, 51, 89, 88, 49, 75, 67, 56, 63, 77, 57, 65, 63, 59, 52, 48, 103, 67, 78, 51, 52, 49, 62, 69, 86, 56, 65, 77, 93, 59, 81, 53, 48, 68, 57, 65, 69, 56, 58, 57, 76, 78, 65, 51, 51, 83, 59, 57, 54, 58, 63, 59, 53, 75, 54, 64, 160, 79, 63, 109, 50, 58, 59, 58, 65, 54, 61, 61, 46, 56, 77, 71, 61, 56, 64, 60, 58, 60, 80, 57, 73, 66, 85, 53, 57, 73, 85, 63, 80, 56, 93, 89, 62, 75, 51, 54, 33, 68, 68, 70, 62, 69, 86, 57, 66, 94, 58, 54, 37, 63, 84, 74, 48, 67, 52, 58, 50, 62, 56, 61, 69, 55, 63, 68, 57, 45, 61, 65, 56, 65, 61, 60, 66, 65, 71, 62, 58, 63, 72, 66, 90, 74, 59, 77, 67, 64, 144, 46, 86, 73, 76, 65, 89, 93, 60, 58, 58, 68, 116, 71, 68, 97, 56, 70, 64, 69, 61, 72, 34, 60, 65, 70, 65, 64, 76, 67, 64, 81, 98, 47, 99, 67, 126, 42, 56, 50, 60, 76, 38, 73, 61, 84, 83, 81, 54, 58, 54, 63, 59, 58, 61, 62, 91, 41, 80, 84, 73, 43, 61, 78, 54, 65, 52, 63, 73, 116, 61, 59, 59, 61, 63, 67, 71, 66, 50, 71, 50, 72, 65, 62, 71, 47, 70, 72, 63, 49, 55, 104, 66, 58, 58, 56, 69, 76, 53, 61, 57, 68, 71, 94, 69, 63, 61, 68, 61, 69, 59, 74, 73, 72, 54, 85, 52, 94, 74, 67, 78, 61, 75, 61, 58, 59, 51, 58, 68, 47, 61, 128, 64, 55, 88, 73, 57, 61, 57, 63, 70, 58, 68, 75, 102, 73, 68, 85, 56, 72, 61, 55, 53, 59, 75, 107, 69, 91, 63, 49, 92, 60, 64, 47, 81, 63, 71, 129, 75, 53, 56, 64, 64, 57, 65, 74, 54, 78, 58, 56, 66, 71, 51, 84, 59, 62, 63, 59, 60, 63, 70, 98, 93, 61, 72, 61, 53, 74, 60, 66, 66, 63, 55, 72, 61, 49, 69, 72, 58, 62, 80, 59, 62, 53, 52, 65, 63, 68, 70, 62, 52, 51, 65, 90, 50, 63, 63, 72, 56, 74, 57, 75, 72, 56, 93, 54, 81, 65, 56, 54, 50, 54, 60, 63, 48, 75, 59, 77, 51, 98, 74, 53, 78, 75, 85, 82, 75, 67, 75, 60, 58, 54, 68, 79, 57, 102, 73, 51, 58, 45, 63, 51, 50, 56, 142, 53, 64, 65, 48, 67, 62, 75, 55, 58, 43, 99, 72, 65, 83, 86, 63, 71, 78, 55, 55, 79, 67, 86, 44, 54, 99, 57, 53, 86, 72, 56, 63, 79, 57, 60, 60, 79, 77, 65, 58, 67, 51, 62, 58, 84, 67, 48, 71, 52, 61, 96, 62, 116, 70, 51, 76, 62, 72, 64, 65, 59, 54, 54, 66, 73, 68, 53, 78, 42, 79, 85, 59, 61, 66, 76, 49, 59, 81, 55, 64, 61, 39, 54, 55, 54, 56, 70, 83, 109, 124, 50, 98, 85, 70, 71, 65, 54, 103, 60, 58, 81, 63, 67, 56, 44, 60, 67, 61, 82, 58, 75, 64, 55, 54, 82, 87, 37, 53, 95, 80, 65, 63, 54, 54, 53, 58, 91, 64, 52, 48, 89, 67, 64, 68, 92, 66, 50, 66, 46, 36, 57, 64, 58, 57, 57, 92, 68, 69, 51, 51, 142, 55, 63, 57, 86, 53, 52, 68, 92, 44, 52, 76, 55, 55, 71, 81, 42, 67, 70, 66, 86, 50, 58, 90, 63, 74, 52, 64, 72, 77, 75, 77, 66, 58, 59, 63, 94, 64, 57, 64, 63, 51, 85, 55, 43, 61, 58, 65, 64, 58, 54, 72, 53, 97, 54, 56, 61, 85, 58, 73, 70, 58, 81, 79, 85, 60, 70, 59, 83, 64, 64, 85, 36, 68, 59, 59, 65, 57, 80, 68, 76, 75, 57, 53, 76, 57, 68, 84, 47, 83, 51, 52, 67, 55, 55, 65, 92, 65, 56, 129, 64, 57, 52, 74, 62, 55, 31, 80, 55, 72, 57, 65, 98, 71, 54, 75, 73, 64, 48, 59, 65, 56, 68, 61, 60, 86, 58, 51, 62, 44, 66, 88, 56, 62, 64, 37, 62, 82, 59, 49, 75, 52, 98, 56, 50, 52, 50, 86, 86, 46, 96, 71, 65, 86, 60, 84, 47, 69, 106, 64, 75, 57, 68, 60, 87, 62, 63, 55, 59, 64, 68, 91, 73, 61, 63, 72, 68, 58, 84, 44, 70, 95, 62, 72, 75, 65, 60, 97, 104, 60, 71, 75, 66, 62, 91, 63, 62, 51, 59, 71, 54, 77, 79, 87, 62, 54, 50, 79, 84, 83, 70, 111, 84, 70, 60, 55, 58, 63, 55, 58, 62, 50, 86, 53, 64, 48, 52, 30, 75, 65, 52, 58, 68, 60, 70, 60, 64, 57, 50, 75, 65, 48, 58, 72, 73, 85, 66, 47, 80, 102, 63, 61, 70, 56, 59, 85, 53, 97, 56, 38, 73, 104, 55, 67, 81, 73, 85, 70, 57, 52, 76, 69, 61, 71, 54, 79, 70, 60, 97, 54, 79, 71, 68, 54, 92, 62, 51, 55, 83, 71, 76, 89, 56, 38, 59, 69, 71, 78, 64, 45, 76, 59, 106, 33, 69, 93, 49, 65, 66, 62, 99, 74, 67, 48, 66, 71, 65, 56, 59, 93, 66, 99, 83, 54, 58, 69, 68, 67, 75, 75, 65, 56, 83, 97, 82, 80, 65, 73, 52, 76, 72, 72, 72, 75, 58, 76, 73, 54, 66, 56, 63, 61, 86, 59, 81, 63, 59, 63, 62, 60, 64, 57, 86, 51, 80, 74, 80, 70, 43, 49, 61, 82, 54, 56, 68, 60, 62, 57, 77, 70, 73, 72, 54, 61, 48, 70, 70, 79, 62, 49, 68, 99, 51, 60, 79, 75, 66, 67, 55, 51, 70, 71, 49, 62, 56, 51, 55, 53, 55, 57, 80, 70, 86, 56, 76, 62, 76, 46, 73, 74, 72, 76, 77, 67, 65, 47, 56, 70, 81, 69, 72, 74, 76, 54, 58, 68, 99, 44, 73, 53, 49, 63, 55, 62, 88, 70, 42, 82, 57, 61, 53, 57, 71, 51, 49, 56, 108, 103, 60, 84, 81, 72, 57, 86, 67, 68, 50, 73, 63, 81, 68, 49, 78, 49, 66, 53, 67, 47, 65, 49, 103, 63, 61, 68, 32, 58, 75, 62, 60, 54, 43, 63, 60, 77, 76, 67, 36, 66, 64, 67, 90, 49, 81, 65, 57, 51, 62, 51, 88, 66, 67, 52, 68, 54, 84, 82, 85, 76, 70, 65, 57, 63, 115, 62, 47, 54, 96, 53, 71, 115, 51, 44, 73, 87, 61, 59, 71, 58, 69, 64, 47, 56, 79, 70, 60, 101, 54, 78, 72, 83, 83, 59, 100, 72, 60, 80, 77, 59, 77, 50, 96, 76, 30, 80, 60, 71, 59, 87, 65, 82, 83, 79, 58, 92, 63, 57, 51, 70, 65, 51, 60, 81, 61, 51, 72, 58, 69, 63, 49, 60, 52, 71, 71, 74, 39, 82, 57, 50, 63, 67, 62, 53, 58, 61, 55, 80, 73, 67, 72, 51, 57, 87, 54, 71, 63, 56, 58, 66, 82, 72, 68, 60, 46, 75, 59, 56, 79, 65, 53, 50, 72, 76, 67, 86, 66, 61, 65, 59, 87, 42, 63, 71, 79, 74, 51, 57, 38, 59, 51, 69, 98, 69, 62, 74, 54, 39, 113, 53, 51, 57, 42, 111, 90, 54, 70, 86, 67, 63, 56, 69, 61, 71, 70, 53, 64, 68, 89, 45, 44, 80, 48, 66, 59, 86, 80, 57, 66, 49, 53, 53, 81, 51, 61, 66, 66, 82, 78, 51, 61, 62, 48, 79, 66, 61, 76, 49, 82, 63, 56, 127, 70, 41, 59, 72, 61, 58, 58, 64, 84, 64, 78, 52, 52, 56, 53, 48, 76, 61, 66, 81, 54, 47, 73, 74, 68, 62, 77, 58, 65, 87, 61, 63, 41, 72, 40, 80, 78, 58, 81, 48, 86, 54, 76, 82, 79, 73, 67, 70, 92, 54, 74, 70, 57, 57, 56, 53, 73, 57, 65, 61, 51, 71, 67, 53, 64, 92, 46, 53, 75, 57, 62, 63, 90, 77, 80, 71, 69, 85, 72, 83, 75, 81, 55, 45, 76, 45, 64, 80, 45, 70, 67, 59, 54, 56, 59, 75, 73, 74, 84, 62, 69, 81, 46, 64, 73, 63, 62, 56, 60, 61, 82, 88, 84, 67, 54, 38, 96, 66, 57, 60, 46, 57, 75, 79, 46, 77, 68, 64, 61, 63, 56, 67, 69, 58, 68, 74, 63, 95, 65, 68, 62, 68, 64, 107, 92, 132, 88, 74, 90, 74, 67, 81, 99, 49, 65, 67, 65, 69, 60, 88, 59, 54, 72, 49, 43, 48, 60, 81, 65, 69, 93, 52, 52, 67, 57, 49, 70, 63, 88, 56, 78, 72, 83, 70, 56, 59, 64, 52, 59, 72, 57, 70, 58, 76, 72, 57, 86, 40, 56, 60, 47, 63, 52, 56, 53, 76, 69, 81, 64, 62, 66, 103, 56, 70, 78, 70, 80, 88, 60, 52, 50, 53, 79, 68, 53, 93, 71, 73, 51, 66, 55, 50, 59, 81, 66, 50, 52, 58, 68, 81, 53, 56, 63, 106, 57, 49, 41, 80, 46, 61, 63, 69, 79, 63, 79, 60, 57, 40, 88, 64, 55, 65, 60, 59, 71, 70, 53, 69, 51, 78, 46, 69, 70, 76, 83, 60, 59, 65, 56, 56, 72, 52, 62, 49, 61, 77, 61, 66, 56, 70, 52, 71, 62, 49, 74, 93, 62, 77, 49, 50, 73, 54, 43, 63, 55, 46, 79, 69, 39, 65, 51, 60, 73, 46, 63, 97, 73, 72, 72, 84, 73, 65, 69, 60, 68, 67, 59, 76, 77, 72, 86, 70, 62, 59, 98, 59, 62, 56, 96, 70, 72, 72, 72, 62, 79, 47, 67, 63, 74, 66, 59, 90, 52, 67, 61, 63, 54, 55, 58, 73, 62, 77, 59, 78, 55, 62, 56, 77, 59, 68, 60, 105, 50, 77, 82, 89, 65, 50, 56, 53, 80, 55, 54, 92, 75, 52, 78, 58, 69, 64, 68, 63, 61, 46, 46, 66, 33, 93, 49, 57, 69, 56, 109, 61, 70, 61, 67, 54, 55, 75, 57, 69, 53, 45, 66, 115, 80, 65, 58, 68, 70, 52, 62, 68, 77, 97, 76, 46, 49, 54, 55, 78, 84, 62, 73, 81, 62, 44, 56, 76, 75, 54, 71, 68, 53, 92, 58, 49, 63, 61, 72, 81, 57, 66, 62, 107, 95, 52, 56, 58, 52, 52, 70, 93, 64, 57, 76, 64, 45, 111, 50, 49, 99, 64, 53, 92, 71, 75, 70, 65, 62, 67, 80, 62, 83, 55, 64, 53, 49, 69, 88, 71, 67, 61, 104, 48, 67, 69, 64, 48, 61, 70, 72, 84, 71, 62, 65, 72, 74, 58, 64, 57, 83, 60, 66, 71, 99, 86, 85, 61, 80, 50, 60, 67, 59, 50, 71, 60, 51, 63, 54, 47, 60, 60, 49, 60, 51, 75, 63, 63, 114, 56, 75, 48, 70, 59, 54, 57, 59, 60, 56, 100, 81, 69, 65, 65, 66, 86, 67, 59, 68, 129, 53, 61, 65, 74, 70, 71, 85, 67, 65, 71, 64, 67, 58, 46, 68, 61, 54, 73, 102, 72, 59, 88, 74, 56, 84, 72, 76, 54, 58, 57, 89, 68, 80, 65, 49, 54, 68, 67, 54, 84, 57, 55, 76, 65, 74, 71, 66, 70, 44, 92, 72, 79, 65, 73, 89, 59, 54, 71, 65, 62, 59, 47, 71, 81, 90, 47, 83, 63, 61, 51, 49, 77, 82, 47, 100, 51, 111, 69, 81, 77, 47, 72, 67, 68, 51, 71, 63, 60, 73, 87, 54, 82, 65, 65, 80, 62, 69, 54, 72, 54, 79, 55, 64, 70, 64, 64, 92, 79, 55, 55, 67, 71, 57, 74, 52, 51, 70, 70, 64, 62, 51, 69, 62, 59, 38, 54, 62, 92, 43, 76, 64, 51, 59, 64, 54, 76, 38, 64, 61, 53, 77, 46, 48, 64, 64, 72, 89, 98, 45, 47, 70, 105, 56, 75, 62, 67, 36, 67, 71, 85, 89, 53, 62, 52, 61, 74, 90, 43, 82, 54, 73, 66, 70, 69, 72, 53, 70, 87, 61, 41, 47, 60, 65, 63, 69, 61, 100, 75, 51, 65, 53, 100, 56, 74, 65, 62, 67, 60, 67, 55, 50, 75, 70, 105, 66, 53, 53, 58, 64, 61, 62, 47, 44, 71, 81, 59, 70, 71, 74, 95, 74, 59, 69, 56, 63, 68, 56, 57, 76, 49, 84, 89, 57, 82, 71, 66, 73, 57, 61, 59, 77, 65, 87, 64, 57, 85, 59, 134, 63, 83, 66, 85, 53, 80, 83, 61, 69, 80, 68, 75, 70, 64, 69, 58, 62, 82, 87, 50, 45, 56, 69, 51, 72, 58, 71, 78, 60, 63, 70, 55, 60, 55, 67, 87, 53, 66, 70, 62, 72, 60, 71, 65, 67, 63, 62, 67, 61, 65, 50, 121, 65, 68, 76, 82, 60, 49, 60, 53, 58, 98, 58, 65, 59, 55, 51, 60, 56, 61, 80, 68, 71, 51, 49, 43, 65, 66, 47, 59, 88, 40, 48, 37, 68, 85, 55, 70, 77, 79, 69, 59, 63, 67, 57, 48, 50, 72, 76, 57, 74, 61, 52, 39, 65, 47, 59, 59, 74, 40, 77, 82, 57, 86, 72, 58, 66, 55, 67, 77, 85, 75, 77, 74, 48, 47, 56, 66, 53, 55, 55, 70, 80, 62, 70, 49, 86, 77, 63, 52, 67, 61, 56, 60, 70, 72, 95, 77, 63, 55, 77, 59, 58, 80, 81, 46, 45, 84, 95, 75, 54, 103, 63, 48, 103, 44, 75, 64, 59, 75, 32, 76, 57, 73, 55, 86, 62, 64, 62, 47, 70, 68, 75, 62, 59, 71, 54, 73, 44, 90, 77, 70, 54, 73, 78, 72, 129, 56, 54, 45, 55, 48, 46, 55, 65, 39, 72, 66, 75, 71, 51, 97, 73, 78, 45, 67, 52, 72, 68, 61, 69, 72, 74, 61, 57, 69, 89, 57, 50, 80, 55, 56, 57, 82, 88, 75, 49, 77, 78, 51, 50, 69, 70, 67, 58, 76, 57, 62, 48, 64, 68, 78, 98, 65, 81, 70, 80, 70, 49, 53, 55, 87, 75, 55, 65, 66, 53, 41, 59, 66, 42, 61, 54, 58, 69, 76, 68, 50, 81, 58, 70, 63, 101, 73, 79, 72, 78, 82, 63, 61, 69, 60, 63, 63, 68, 63, 74, 60, 61, 44, 71, 73, 56, 47, 91, 106, 55, 62, 84, 70, 64, 60, 70, 42, 62, 69, 55, 46, 54, 65, 49, 58, 53, 52, 69, 57, 57, 75, 43, 71, 75, 81, 62, 76, 56, 53, 78, 67, 56, 64, 67, 54, 55, 77, 62, 72, 39, 82, 70, 50, 95, 74, 75, 52, 63, 53, 61, 88, 63, 52, 53, 51, 60, 73, 54, 76, 76, 60, 67, 71, 50, 65, 57, 57, 74, 53, 60, 83, 61, 55, 84, 58, 73, 47, 71, 31, 46, 75, 54, 95, 49, 53, 77, 66, 73, 58, 67, 61, 72, 57, 65, 73, 46, 80, 91, 56, 50, 151, 54, 89, 47, 72, 84, 59, 63, 84, 67, 60, 62, 80, 65, 52, 64, 75, 68, 41, 54, 45, 67, 59, 70, 58, 61, 65, 79, 58, 55, 56, 61, 58, 59, 58, 92, 76, 76, 62, 68, 60, 67, 56, 86, 72, 75, 63, 66, 50, 43, 142, 96, 92, 69, 49, 91, 68, 75, 78, 53, 90, 54, 84, 81, 74, 60, 72, 58, 47, 56, 70, 59, 68, 61, 68, 62, 84, 59, 56, 110, 63, 48, 68, 55, 72, 65, 57, 79, 69, 53, 66, 54, 47, 51, 69, 82, 64, 65, 79, 68, 56, 60, 74, 58, 52, 53, 72, 82, 69, 71, 50, 70, 81, 57, 62, 55, 64, 70, 65, 60, 52, 47, 64, 65, 58, 77, 71, 65, 70, 65, 73, 53, 66, 80, 60, 57, 56, 65, 55, 67, 52, 73, 86, 104, 75, 58, 63, 69, 63, 45, 73, 80, 87, 61, 83, 59, 99, 54, 65, 76, 64, 61, 52, 46, 61, 52, 82, 47, 55, 87, 78, 65, 78, 67, 53, 47, 63, 57, 94, 25, 59, 72, 104, 60, 72, 75, 84, 55, 61, 65, 64, 72, 60, 75, 57, 52, 64, 51, 80, 45, 89, 61, 46, 96, 73, 61, 69, 64, 76, 54, 42, 40, 44, 48, 69, 71, 58, 63, 76, 81, 35, 62, 54, 69, 56, 98, 68, 88, 82, 61, 52, 52, 65, 63, 68, 59, 79, 79, 89, 81, 67, 67, 65, 77, 63, 68, 85, 65, 63, 63, 108, 90, 73, 69, 51, 51, 58, 60, 51, 99, 75, 59, 66, 61, 61, 70, 81, 80, 52, 82, 55, 61, 79, 81, 97, 106, 58, 52, 52, 52, 65, 49, 49, 60, 76, 132, 45, 53, 103, 89, 48, 83, 73, 54, 56, 86, 50, 55, 77, 74, 65, 78, 48, 57, 63, 78, 89, 38, 62, 52, 62, 95, 70, 47, 50, 69, 49, 73, 48, 59, 51, 49, 64, 75, 69, 69, 56, 71, 84, 68, 81, 65, 70, 56, 89, 76, 66, 91, 51, 73, 79, 80, 79, 69, 73, 82, 54, 51, 56, 43, 64, 59, 60, 77, 58, 55, 54, 68, 67, 60, 68, 60, 64, 65, 84, 73, 111, 55, 144, 98, 62, 65, 64, 64, 50, 111, 54, 54, 68, 63, 81, 73, 88, 51, 58, 62, 61, 70, 65, 75, 60, 62, 54, 41, 57, 66, 90, 71, 47, 58, 60, 68, 65, 60, 55, 52, 65, 89, 66, 68, 54, 69, 67, 61, 52, 48, 63, 61, 87, 46, 82, 75, 63, 59, 67, 84, 38, 86, 56, 49, 84, 68, 54, 59, 72, 67, 56, 58, 57, 80, 59, 80, 50, 72, 58, 77, 64, 57, 62, 64, 91, 52, 51, 63, 66, 50, 67, 48, 59, 90, 58, 82, 70, 60, 59, 93, 64, 52, 74, 77, 74, 60, 62, 73, 72, 59, 56, 116, 71, 53, 114, 56, 81, 52, 78, 71, 52, 60, 64, 80, 94, 47, 71, 61, 79, 90, 81, 58, 66, 60, 66, 67, 63, 63, 85, 53, 60, 76, 71, 51, 82, 63, 47, 80, 66, 64, 126, 64, 61, 64, 66, 56, 73, 76, 63, 70, 66, 65, 77, 52, 65, 70, 108, 65, 74, 53, 55, 43, 61, 52, 74, 80, 71, 55, 72, 68, 84, 48, 53, 79, 57, 65, 74, 58, 64, 71, 52, 50, 74, 46, 56, 58, 64, 67, 54, 88, 80, 56, 67, 98, 72, 66, 101, 51, 69, 61, 64, 61, 70, 54, 60, 50, 53, 61, 57, 64, 43, 79, 53, 42, 92, 50, 102, 75, 49, 70, 90, 45, 72, 46, 60, 62, 67, 50, 77, 52, 54, 67, 63, 59, 61, 61, 49, 58, 53, 95, 57, 89, 88, 75, 78, 57, 53, 50, 62, 58, 76, 70, 66, 39, 59, 59, 60, 137, 62, 55, 47, 69, 53, 70, 58, 63, 48, 59, 67, 60, 69, 59, 62, 65, 89, 52, 85, 56, 75, 58, 62, 52, 45, 55, 95, 74, 81, 60, 67, 72, 56, 71, 48, 60, 69, 36, 66, 54, 68, 66, 43, 68, 54, 61, 73, 88, 42, 46, 67, 64, 55, 55, 69, 57, 83, 57, 61, 90, 65, 66, 134, 79, 52, 45, 70, 84, 51, 69, 46, 68, 94, 59, 71, 54, 48, 75, 61, 71, 56, 71, 78, 50, 57, 55, 64, 56, 64, 76, 70, 58, 57, 78, 73, 71, 91, 71, 64, 76, 67, 61, 61, 50, 101, 54, 70, 71, 48, 55, 72, 66, 60, 56, 74, 55, 60, 71, 75, 58, 90, 86, 51, 65, 50, 68, 49, 111, 68, 61, 51, 68, 96, 55, 64, 58, 52, 82, 68, 41, 136, 68, 50, 68, 72, 77, 49, 66, 55, 67, 77, 68, 54, 59, 74, 78, 79, 63, 53, 48, 29, 60, 63, 102, 59, 70, 68, 57, 74, 69, 65, 43, 46, 60, 66, 112, 52, 64, 60, 45, 78, 58, 55, 62, 57, 57, 73, 65, 72, 86, 125, 53, 73, 82, 74, 80, 68, 120, 57, 113, 54, 42, 61, 67, 66, 70, 91, 85, 53, 63, 59, 62, 72, 49, 50, 67, 71, 56, 56, 58, 62, 65, 60, 46, 72, 67, 76, 45, 66, 67, 65, 62, 83, 70, 127, 57, 62, 66, 65, 63, 45, 76, 66, 66, 146, 71, 65, 72, 60, 43, 56, 50, 73, 62, 51, 53, 51, 70, 85, 72, 52, 79, 73, 63, 103, 62, 47, 64, 73, 56, 66, 91, 51, 74, 63, 53, 68, 44, 64, 52, 55, 84, 63, 80, 55, 81, 57, 74, 80, 60, 63, 82, 57, 55, 60, 54, 89, 68, 66, 91, 61, 89, 77, 66, 65, 62, 60, 70, 68, 66, 50, 68, 62, 77, 63, 72, 62, 55, 67, 63, 76, 53, 71, 62, 59, 55, 74, 53, 65, 46, 81, 80, 64, 71, 65, 94, 58, 65, 63, 65, 60, 58, 66, 58, 76, 56, 58, 56, 58, 77, 72, 77, 54, 47, 58, 76, 70, 75, 59, 84, 65, 54, 60, 66, 75, 66, 64, 69, 62, 80, 93, 63, 98, 101, 79, 115, 44, 83, 51, 56, 67, 64, 137, 47, 78, 85, 60, 70, 58, 79, 54, 64, 48, 63, 64, 63, 59, 62, 89, 71, 57, 55, 93, 64, 74, 61, 61, 78, 50, 59, 59, 47, 73, 63, 76, 73, 67, 76, 46, 69, 71, 80, 57, 70, 72, 60, 61, 56, 55, 57, 72, 52, 69, 70, 51, 70, 73, 63, 68, 64, 89, 46, 57, 62, 80, 73, 64, 66, 81, 60, 63, 53, 57, 83, 70, 64, 57, 55, 76, 52, 65, 55, 57, 58, 67, 59, 56, 47, 62, 84, 79, 56, 80, 58, 80, 76, 70, 58, 55, 68, 90, 146, 70, 70, 53, 59, 50, 68, 57, 79, 70, 55, 37, 76, 57, 62, 66, 57, 72, 58, 53, 52, 53, 62, 51, 58, 72, 71, 57, 61, 78, 68, 61, 62, 84, 80, 61, 59, 47, 74, 59, 75, 46, 69, 60, 73, 47, 54, 58, 38, 63, 66, 53, 59, 93, 72, 66, 60, 72, 71, 61, 58, 59, 63, 95, 74, 75, 64, 48, 80, 72, 51, 70, 50, 43, 62, 65, 66, 84, 62, 55, 54, 68, 56, 68, 79, 52, 54, 72, 88, 50, 64, 54, 58, 47, 55, 58, 61, 68, 61, 70, 55, 103, 61, 74, 59, 71, 106, 84, 43, 69, 64, 72, 96, 73, 65, 60, 75, 62, 52, 80, 79, 74, 64, 66, 80, 42, 70, 97, 63, 93, 49, 72, 87, 65, 64, 58, 66, 91, 77, 91, 70, 67, 89, 67, 57, 62, 65, 70, 55, 63, 63, 52, 77, 61, 60, 71, 96, 73, 51, 54, 44, 76, 115, 38, 64, 72, 53, 71, 52, 64, 55, 64, 60, 70, 58, 87, 82, 56, 72, 45, 77, 51, 67, 70, 123, 73, 71, 63, 60, 58, 87, 70, 64, 101, 98, 48, 56, 80, 67, 50, 52, 64, 67, 81, 65, 45, 79, 67, 64, 70, 77, 43, 68, 83, 54, 79, 57, 63, 78, 59, 59, 60, 63, 62, 64, 61, 67, 71, 56, 42, 68, 69, 61, 58, 64, 85, 81, 75, 59, 71, 45, 52, 55, 62, 69, 69, 53, 42, 75, 59, 73, 39, 77, 70, 68, 57, 61, 46, 63, 56, 63, 49, 43, 58, 74, 64, 82, 77, 83, 70, 74, 61, 46, 85, 79, 60, 70, 66, 83, 57, 62, 48, 45, 69, 54, 76, 63, 89, 74, 67, 69, 54, 41, 92, 43, 53, 69, 68, 80, 52, 84, 69, 48, 67, 66, 62, 55, 43, 65, 45, 59, 82, 74, 73, 64, 80, 46, 79, 77, 54, 52, 92, 68, 57, 53, 70, 43, 56, 53, 64, 82, 61, 69, 68, 97, 66, 91, 70, 52, 61, 72, 61, 43, 63, 79, 54, 49, 60, 65, 66, 51, 90, 53, 44, 65, 53, 59, 31, 51, 70, 88, 47, 60, 55, 71, 53, 62, 71, 61, 66, 76, 58, 72, 67, 99, 67, 59, 78, 70, 71, 63, 83, 75, 77, 56, 60, 74, 83, 59, 67, 58, 66, 45, 68, 62, 91, 57, 68, 59, 89, 46, 64, 70, 56, 40, 77, 61, 115, 68, 54, 60, 61, 55, 67, 45, 38, 70, 81, 52, 84, 35, 81, 74, 56, 83, 72, 81, 49, 64, 71, 65, 65, 76, 100, 56, 57, 138, 95, 61, 72, 116, 94, 67, 60, 68, 35, 54, 71, 35, 66, 55, 57, 85, 60, 40, 82, 75, 65, 77, 54, 47, 70, 62, 66, 55, 74, 50, 83, 71, 65, 84, 50, 68, 81, 63, 84, 67, 61, 64, 67, 75, 88, 109, 64, 52, 88, 75, 57, 48, 46, 77, 67, 70, 55, 62, 76, 88, 55, 78, 92, 63, 76, 119, 64, 62, 69, 66, 71, 70, 65, 74, 58, 65, 59, 58, 42, 79, 60, 61, 69, 56, 53, 61, 64, 74, 60, 72, 70, 53, 70, 83, 66, 80, 65, 49, 66, 86, 57, 66, 55, 71, 65, 94, 41, 66, 51, 62, 63, 51, 61, 62, 60, 65, 72, 80, 72, 63, 55, 84, 47, 55, 66, 64, 70, 48, 67, 51, 69, 98, 96, 43, 75, 83, 41, 123, 130, 43, 76, 75, 79, 81, 81, 63, 86, 70, 63, 47, 64, 58, 64, 61, 59, 69, 63, 79, 58, 55, 50, 97, 63, 74, 92, 72, 49, 78, 95, 76, 65, 53, 68, 52, 84, 70, 65, 63, 99, 61, 84, 50, 77, 79, 116, 70, 49, 73, 87, 63, 70, 57, 41, 57, 60, 41, 68, 54, 81, 62, 68, 66, 58, 92, 64, 63, 41, 49, 43, 74, 60, 89, 56, 84, 85, 75, 54, 48, 67, 54, 65, 69, 61, 74, 64, 89, 51, 73, 88, 64, 55, 55, 65, 68, 79, 68, 88, 74, 69, 70, 72, 64, 39, 42, 63, 57, 59, 59, 93, 57, 61, 72, 58, 53, 99, 61, 52, 52, 83, 67, 66, 66, 78, 59, 120, 54, 59, 62, 47, 70, 79, 85, 60, 41, 54, 70, 58, 69, 40, 56, 60, 89, 60, 67, 75, 74, 66, 53, 88, 62, 62, 84, 96, 66, 49, 72, 71, 97, 60, 70, 63, 67, 53, 90, 64, 62, 75, 77, 65, 56, 51, 83, 55, 118, 73, 69, 66, 69, 82, 78, 53, 62, 76, 83, 58, 64, 86, 71, 61, 63, 48, 60, 54, 76, 61, 55, 59, 38, 66, 42, 62, 41, 62, 93, 100, 75, 64, 58, 73, 73, 55, 56, 45, 65, 69, 57, 56, 62, 75, 52, 75, 70, 55, 79, 86, 100, 87, 59, 78, 71, 73, 86, 61, 59, 82, 88, 43, 85, 58, 60, 69, 81, 46, 70, 60, 50, 62, 68, 85, 52, 73, 66, 57, 50, 64, 124, 69, 76, 66, 51, 61, 87, 63, 49, 63, 48, 69, 83, 76, 61, 43, 48, 63, 95, 66, 54, 62, 32, 63, 52, 50, 74, 61, 46, 59, 54, 85, 57, 63, 60, 66, 53, 97, 42, 74, 72, 107, 82, 74, 73, 58, 64, 75, 56, 92, 41, 47, 57, 54, 50, 92, 59, 54, 71, 71, 77, 80, 63, 84, 59, 52, 70, 59, 79, 93, 68, 90, 86, 90, 66, 78, 64, 64, 51, 110, 70, 55, 51, 54, 72, 49, 45, 59, 43, 35, 70, 80, 54, 61, 55, 61, 70, 93, 83, 66, 102, 60, 64, 75, 64, 77, 69, 59, 56, 48, 47, 49, 58, 62, 67, 57, 39, 68, 61, 76, 61, 120, 81, 48, 69, 63, 71, 67, 78, 61, 65, 62, 53, 85, 62, 62, 73, 61, 55, 95, 69, 61, 62, 64, 89, 57, 63, 79, 77, 73, 64, 48, 72, 59, 71, 83, 62, 62, 47, 64, 54, 79, 68, 68, 60, 58, 59, 54, 87, 82, 76, 93, 51, 84, 84, 74, 73, 104, 51, 66, 67, 67, 80, 67, 66, 71, 52, 64, 60, 54, 90, 36, 72, 60, 70, 77, 61, 62, 66, 57, 56, 74, 59, 64, 58, 62, 94, 64, 44, 50, 50, 70, 68, 58, 81, 66, 76, 61, 62, 69, 62, 58, 62, 67, 65, 78, 78, 70, 78, 71, 64, 73, 54, 71, 74, 72, 68, 55, 64, 72, 63, 53, 51, 61, 90, 48, 60, 48, 69, 70, 54, 59, 50, 66, 71, 72, 55, 66, 58, 59, 71, 78, 67, 111, 65, 68, 68, 48, 47, 68, 77, 50, 59, 57, 40, 55, 70, 80, 64, 57, 59, 55, 62, 64, 65, 83, 67, 62, 81, 78, 67, 68, 53, 67, 44, 63, 53, 60, 76, 67, 58, 54, 59, 67, 58, 50, 64, 58, 75, 54, 59, 60, 56, 58, 165, 63, 90, 81, 66, 61, 52, 62, 56, 77, 57, 39, 68, 85, 78, 51, 68, 74, 61, 83, 62, 64, 47, 56, 51, 64, 77, 57, 57, 66, 65, 70, 82, 64, 74, 90, 61, 69, 73, 53, 69, 58, 69, 65, 69, 51, 67, 72, 55, 63, 55, 76, 66, 74, 64, 58, 58, 65, 67, 64, 71, 73, 63, 61, 73, 86, 63, 81, 59, 55, 98, 59, 99, 60, 62, 76, 76, 61, 57, 55, 62, 62, 86, 69, 109, 63, 70, 54, 46, 64, 63, 91, 64, 63, 91, 49, 76, 77, 57, 48, 53, 71, 62, 82, 65, 51, 65, 54, 60, 59, 64, 73, 65, 66, 91, 65, 69, 44, 63, 88, 62, 63, 63, 82, 43, 62, 52, 60, 51, 46, 69, 53, 84, 61, 80, 90, 80, 53, 72, 66, 66, 65, 61, 64, 60, 29, 51, 64, 58, 73, 63, 65, 70, 53, 60, 61, 48, 73, 63, 56, 74, 74, 59, 51, 80, 51, 52, 53, 84, 70, 70, 51, 64, 73, 80, 50, 66, 63, 77, 60, 79, 62, 71, 71, 96, 71, 74, 55, 64, 63, 48, 60, 67, 49, 73, 78, 67, 55, 56, 52, 69, 65, 61, 66, 63, 68, 70, 61, 50, 87, 58, 74, 66, 70, 60, 72, 76, 74, 55, 44, 60, 63, 57, 73, 70, 53, 56, 65, 44, 86, 63, 66, 47, 62, 60, 62, 44, 52, 64, 82, 53, 76, 42, 85, 49, 61, 72, 48, 66, 62, 79, 63, 55, 57, 53, 75, 52, 67, 60, 104, 81, 50, 57, 52, 55, 63, 50, 62, 78, 60, 59, 51, 65, 54, 52, 62, 68, 65, 64, 49, 63, 63, 62, 81, 62, 53, 46, 64, 59, 50, 87, 81, 81, 59, 68, 67, 75, 107, 61, 90, 70, 51, 54, 60, 83, 72, 73, 59, 57, 48, 77, 76, 70, 60, 66, 50, 73, 53, 68, 71, 43, 58, 65, 71, 55, 57, 79, 68, 55, 64, 68, 68, 76, 58, 87, 63, 69, 79, 63, 57, 77, 46, 68, 47, 66, 81, 34, 58, 71, 38, 50, 48, 59, 53, 65, 76, 62, 49, 46, 85, 64, 53, 54, 71, 67, 73, 68, 62, 73, 80, 79, 61, 66, 66, 64, 50, 60, 63, 67, 60, 79, 50, 68, 62, 56, 61, 58, 55, 76, 63, 86, 54, 72, 58, 86, 81, 56, 66, 67, 53, 90, 64, 58, 56, 42, 79, 66, 72, 47, 70, 62, 68, 83, 64, 69, 62, 68, 89, 61, 51, 68, 57, 66, 67, 75, 79, 70, 75, 68, 60, 82, 105, 83, 91, 48, 53, 74, 79, 56, 64, 49, 70, 64, 67, 73, 68, 68, 79, 56, 104, 74, 63, 86, 79, 73, 73, 105, 63, 79, 59, 67, 75, 71, 56, 67, 77, 69, 69, 73, 60, 48, 68, 86, 65, 65, 76, 78, 57, 62, 60, 73, 71, 67, 66, 51, 59, 91, 75, 57, 68, 84, 70, 72, 89, 118, 64, 74, 63, 65, 63, 63, 52, 60, 52, 65, 83, 88, 97, 64, 108, 97, 85, 73, 60, 78, 50, 70, 62, 66, 42, 62, 66, 70, 63, 85, 53, 80, 65, 54, 68, 70, 62, 62, 41, 59, 79, 72, 67, 62, 74, 58, 42, 69, 58, 86, 55, 45, 65, 72, 64, 53, 69, 78, 59, 76, 70, 69, 60, 54, 60, 58, 62, 55, 52, 71, 59, 74, 63, 75, 62, 56, 57, 64, 42, 64, 64, 83, 103, 74, 60, 77, 57, 73, 72, 69, 109, 57, 51, 63, 60, 41, 56, 51, 70, 60, 67, 52, 92, 85, 63, 60, 73, 54, 69, 62, 51, 47, 89, 77, 53, 71, 84, 73, 79, 42, 59, 67, 68, 72, 61, 61, 56, 70, 76, 65, 43, 54, 70, 35, 68, 78, 62, 56, 53, 48, 59, 41, 69, 73, 53, 65, 85, 87, 67, 50, 68, 57, 89, 92, 58, 61, 75, 57, 63, 57, 82, 87, 68, 69, 52, 63, 70, 99, 50, 58, 76, 55, 65, 67, 65, 83, 80, 61, 66, 68, 53, 58, 69, 55, 55, 59, 73, 71, 90, 65, 53, 68, 59, 53, 61, 43, 42, 59, 72, 66, 50, 59, 67, 59, 65, 62, 82, 141, 59, 38, 54, 102, 66, 63, 62, 72, 84, 60, 77, 66, 53, 54, 63, 63, 73, 94, 64, 75, 70, 66, 63, 67, 67, 63, 56, 51, 88, 65, 59, 63, 84, 83, 42, 63, 82, 50, 72, 58, 90, 73, 56, 76, 83, 73, 71, 59, 72, 61, 61, 64, 57, 51, 50, 53, 48, 62, 52, 74, 72, 56, 63, 45, 87, 67, 63, 82, 61, 66, 66, 66, 62, 63, 74, 77, 48, 36, 80, 60, 50, 53, 58, 55, 66, 71, 67, 67, 68, 77, 60, 41, 82, 67, 65, 95, 66, 68, 71, 58, 89, 74, 86, 65, 65, 75, 66, 55, 55, 84, 70, 96, 65, 63, 73, 66, 55, 69, 73, 73, 55, 70, 81, 86, 69, 60, 61, 63, 57, 62, 70, 54, 110, 52, 41, 59, 102, 64, 76, 75, 81, 70, 52, 59, 70, 57, 64, 69, 81, 75, 57, 67, 87, 85, 55, 70, 98, 57, 64, 58, 63, 59, 89, 49, 28, 51, 73, 77, 70, 70, 67, 53, 66, 60, 65, 81, 74, 75, 69, 40, 72, 60, 75, 72, 83, 60, 69, 53, 70, 44, 51, 69, 78, 68, 58, 76, 57, 57, 66, 86, 44, 45, 53, 68, 71, 67, 67, 57, 75, 75, 68, 74, 58, 83, 64, 99, 50, 67, 81, 72, 71, 68, 80, 60, 60, 69, 58, 70, 57, 59, 61, 62, 126, 56, 64, 53, 60, 62, 70, 92, 71, 83, 81, 68, 65, 57, 52, 125, 62, 69, 78, 36, 67, 55, 91, 62, 55, 76, 52, 75, 81, 70, 38, 57, 57, 58, 66, 63, 55, 59, 66, 84, 54, 62, 77, 67, 88, 70, 70, 62, 58, 81, 58, 82, 78, 58, 63, 89, 65, 65, 75, 70, 90, 81, 63, 71, 57, 76, 59, 71, 64, 62, 63, 73, 64, 69, 78, 96, 82, 59, 75, 67, 53, 55, 60, 74, 56, 70, 45, 67, 107, 62, 60, 72, 75, 53, 71, 119, 60, 70, 65, 53, 83, 60, 60, 61, 56, 79, 70, 56, 66, 82, 75, 68, 54, 65, 74, 51, 66, 61, 70, 57, 38, 73, 69, 91, 49, 94, 70, 76, 76, 95, 47, 57, 77, 87, 62, 57, 68, 72, 58, 51, 56, 56, 73, 70, 63, 69, 78, 57, 59, 90, 49, 74, 61, 73, 72, 63, 40, 52, 64, 66, 57, 62, 60, 73, 48, 33, 39, 77, 60, 47, 67, 45, 61, 65, 65, 70, 76, 70, 72, 64, 91, 53, 51, 98, 73, 102, 67, 67, 64, 76, 58, 54, 66, 70, 84, 71, 47, 52, 53, 66, 55, 68, 62, 51, 67, 76, 65, 47, 77, 60, 39, 55, 84, 95, 51, 63, 80, 72, 64, 72, 67, 73, 58, 65, 63, 66, 66, 60, 54, 46, 61, 60, 62, 82, 83, 55, 64, 70, 85, 61, 64, 55, 58, 82, 64, 71, 80, 89, 52, 59, 63, 50, 66, 58, 53, 63, 40, 71, 70, 67, 63, 117, 62, 64, 58, 68, 61, 57, 74, 111, 52, 69, 86, 42, 65, 75, 67, 65, 66, 50, 69, 65, 67, 75, 67, 48, 85, 64, 54, 48, 92, 80, 86, 70, 43, 76, 55, 46, 71, 67, 75, 97, 70, 57, 62, 57, 65, 66, 58, 57, 87, 60, 59, 68, 61, 94, 48, 61, 55, 67, 55, 49, 52, 61, 62, 102, 75, 72, 65, 62, 71, 50, 53, 64, 73, 66, 37, 106, 65, 49, 62, 79, 108, 71, 79, 57, 63, 57, 89, 75, 58, 93, 58, 51, 56, 57, 78, 85, 77, 74, 86, 73, 62, 58, 63, 57, 58, 60, 71, 78, 67, 59, 49, 48, 63, 58, 88, 68, 59, 56, 45, 69, 51, 94, 61, 51, 81, 66, 77, 133, 60, 59, 63, 71, 57, 58, 62, 75, 72, 62, 76, 64, 44, 56, 53, 68, 55, 55, 59, 79, 75, 55, 71, 87, 75, 41, 55, 75, 67, 67, 80, 51, 74, 86, 61, 77, 49, 70, 63, 73, 55, 70, 70, 62, 65, 61, 58, 64, 58, 63, 52, 64, 48, 59, 81, 74, 69, 64, 60, 98, 53, 50, 85, 59, 59, 58, 67, 57, 36, 69, 63, 67, 63, 60, 49, 63, 60, 50, 61, 77, 57, 73, 49, 67, 55, 55, 60, 78, 50, 56, 66, 53, 62, 58, 61, 57, 48, 74, 54, 52, 61, 67, 50, 77, 59, 123, 60, 59, 52, 85, 62, 89, 41, 60, 61, 55, 63, 70, 71, 89, 78, 100, 72, 74, 76, 58, 66, 91, 75, 53, 52, 57, 48, 80, 70, 81, 69, 70, 89, 59, 64, 101, 57, 67, 47, 42, 103, 63, 77, 58, 68, 54, 51, 64, 63, 77, 52, 86, 68, 54, 65, 70, 73, 71, 62, 93, 54, 86, 69, 73, 57, 56, 58, 59, 56, 83, 56, 74, 92, 65, 38, 70, 66, 71, 57, 68, 74, 54, 41, 79, 58, 66, 76, 64, 68, 75, 52, 49, 95, 66, 68, 65, 76, 45, 77, 63, 39, 57, 65, 69, 40, 47, 49, 51, 72, 82, 68, 61, 73, 67, 75, 76, 61, 58, 73, 76, 68, 68, 59, 69, 81, 80, 55, 85, 66, 69, 75, 64, 80, 57, 37, 84, 71, 56, 55, 75, 62, 77, 68, 57, 76, 76, 97, 62, 69, 53, 70, 67, 39, 98, 69, 67, 61, 62, 79, 66, 57, 40, 82, 77, 80, 45, 102, 53, 63, 68, 48, 56, 51, 56, 66, 55, 74, 68, 55, 88, 76, 48, 67, 63, 52, 47, 64, 69, 55, 49, 56, 58, 58, 60, 49, 54, 59, 50, 72, 84, 63, 72, 75, 57, 73, 70, 91, 77, 93, 58, 68, 52, 60, 99, 69, 52, 57, 71, 50, 52, 51, 69, 73, 61, 83, 76, 58, 57, 63, 82, 78, 96, 61, 57, 64, 50, 87, 87, 70, 71, 53, 111, 52, 50, 89, 67, 85, 116, 82, 89, 63, 55, 68, 74, 48, 51, 68, 40, 100, 42, 50, 53, 59, 88, 52, 55, 68, 48, 49, 68, 67, 58, 50, 53, 68, 89, 61, 64, 48, 51, 72, 79, 61, 71, 59, 71, 55, 77, 77, 55, 87, 77, 70, 63, 60, 55, 79, 81, 46, 50, 73, 55, 63, 64, 57, 79, 49, 81, 48, 82, 76, 81, 66, 65, 77, 74, 50, 69, 42, 52, 66, 47, 89, 101, 62, 101, 94, 51, 55, 60, 65, 65, 55, 72, 53, 33, 81, 86, 58, 77, 71, 48, 54, 58, 88, 75, 59, 60, 55, 77, 68, 77, 72, 102, 70, 48, 62, 76, 55, 71, 54, 63, 69, 52, 57, 62, 69, 88, 62, 62, 65, 71, 68, 59, 57, 55, 76, 77, 55, 72, 105, 74, 84, 72, 56, 61, 95, 54, 88, 56, 42, 69, 51, 56, 47, 71, 69, 48, 57, 58, 46, 75, 73, 68, 59, 57, 144, 62, 45, 72, 83, 38, 67, 62, 53, 61, 77, 55, 43, 63, 86, 59, 66, 58, 63, 94, 68, 71, 72, 70, 57, 60, 56, 62, 57, 64, 77, 87, 54, 55, 75, 60, 60, 60, 46, 70, 65, 49, 46, 42, 49, 69, 69, 62, 68, 57, 76, 73, 74, 70, 80, 46, 50, 47, 76, 50, 73, 72, 71, 44, 62, 66, 36, 66, 67, 68, 59, 67, 68, 65, 46, 58, 62, 60, 62, 63, 76, 56, 66, 56, 63, 66, 55, 84, 52, 91, 58, 58, 91, 49, 88, 75, 72, 50, 67, 72, 100, 61, 42, 83, 79, 58, 37, 62, 50, 108, 58, 74, 44, 56, 70, 71, 55, 75, 69, 76, 51, 69, 69, 108, 70, 56, 68, 78, 58, 71, 91, 50, 74, 44, 74, 86, 75, 54, 60, 70, 29, 58, 75, 80, 96, 48, 57, 47, 66, 42, 70, 80, 64, 51, 75, 69, 56, 68, 63, 47, 49, 67, 40, 53, 82, 72, 63, 69, 71, 70, 116, 72, 71, 66, 133, 75, 89, 67, 71, 104, 73, 65, 65, 68, 60, 45, 64, 57, 58, 89, 64, 65, 51, 78, 79, 85, 65, 68, 75, 63, 58, 77, 67, 69, 48, 80, 68, 58, 57, 50, 48, 68, 55, 63, 52, 42, 42, 40, 72, 85, 66, 56, 51, 72, 85, 78, 56, 43, 64, 76, 83, 77, 87, 67, 79, 48, 74, 84, 86, 63, 81, 58, 74, 78, 68, 46, 72, 70, 79, 50, 78, 72, 51, 69, 64, 59, 54, 73, 67, 53, 48, 60, 58, 72, 55, 68, 57, 70, 30, 76, 115, 56, 152, 66, 48, 61, 58, 67, 35, 60, 68, 56, 79, 59, 79, 89, 63, 63, 53, 59, 56, 68, 52, 61, 58, 82, 68, 72, 64, 68, 66, 66, 51, 68, 66, 68, 61, 73, 54, 66, 63, 61, 48, 46, 60, 77, 62, 42, 78, 63, 62, 58, 57, 57, 56, 76, 64, 65, 50, 45, 34, 86, 59, 75, 86, 64, 64, 58, 87, 64, 55, 68, 74, 105, 50, 53, 60, 92, 57, 59, 55, 58, 55, 71, 54, 100, 58, 63, 57, 60, 56, 71, 64, 57, 70, 76, 57, 49, 73, 48, 65, 43, 85, 61, 47, 78, 56, 60, 73, 78, 55, 58, 74, 105, 82, 69, 71, 63, 50, 89, 65, 57, 70, 80, 57, 35, 71, 59, 63, 65, 62, 72, 55, 59, 55, 67, 95, 58, 50, 59, 58, 87, 50, 85, 58, 80, 55, 56, 63, 59, 69, 58, 28, 52, 47, 51, 63, 75, 75, 52, 75, 43, 60, 62, 57, 61, 89, 69, 78, 51, 59, 62, 71, 52, 75, 76, 67, 58, 88, 79, 52, 53, 75, 49, 54, 55, 62, 67, 46, 95, 64, 68, 61, 78, 68, 63, 82, 45, 68, 62, 52, 74, 53, 82, 53, 65, 56, 71, 49, 54, 55, 70, 68, 88, 59, 63, 79, 61, 77, 61, 73, 48, 68, 70, 65, 80, 91, 60, 64, 52, 84, 57, 60, 64, 57, 85, 65, 77, 95, 58, 80, 49, 65, 118, 46, 47, 92, 61, 62, 48, 73, 69, 84, 58, 61, 62, 75, 89, 65, 75, 65, 44, 59, 53, 57, 49, 60, 64, 71, 58, 91, 53, 55, 52, 68, 58, 62, 50, 66, 76, 67, 41, 52, 74, 44, 91, 51, 66, 64, 118, 100, 54, 48, 111, 60, 79, 67, 71, 75, 76, 91, 58, 60, 68, 60, 53, 71, 69, 69, 73, 51, 69, 58, 49, 44, 63, 50, 64, 52, 135, 63, 67, 38, 57, 58, 71, 47, 42, 73, 62, 72, 56, 87, 122, 67, 55, 94, 100, 65, 53, 51, 67, 50, 53, 59, 60, 48, 64, 80, 54, 63, 65, 86, 46, 60, 81, 60, 73, 66, 74, 99, 55, 70, 52, 70, 65, 98, 79, 70, 49, 60, 52, 47, 60, 99, 53, 59, 62, 66, 86, 54, 66, 64, 58, 76, 65, 52, 89, 81, 77, 58, 55, 69, 41, 43, 64, 74, 67, 83, 56, 48, 66, 45, 60, 59, 46, 75, 58, 49, 44, 47, 54, 74, 94, 49, 68, 63, 65, 67, 41, 54, 68, 74, 73, 63, 58, 81, 46, 103, 61, 44, 73, 64, 37, 71, 64, 63, 93, 61, 59, 83, 87, 53, 59, 87, 64, 58, 88, 62, 77, 76, 74, 68, 101, 58, 54, 77, 66, 62, 60, 52, 92, 64, 106, 56, 70, 48, 54, 63, 63, 58, 52, 63, 89, 65, 41, 79, 75, 45, 54, 75, 49, 79, 68, 43, 77, 111, 62, 62, 82, 56, 49, 58, 54, 53, 69, 67, 82, 61, 58, 52, 58, 84, 74, 81, 59, 76, 59, 53, 64, 55, 63, 66, 65, 78, 82, 72, 78, 76, 68, 60, 63, 46, 68, 62, 63, 83, 64, 62, 78, 86, 63, 59, 79, 56, 59, 115, 71, 54, 72, 76, 35, 61, 68, 56, 72, 74, 65, 93, 69, 73, 86, 45, 60, 51, 45, 75, 69, 43, 90, 59, 62, 57, 54, 57, 55, 34, 51, 62, 90, 59, 51, 60, 71, 52, 45, 42, 93, 46, 74, 76, 68, 69, 49, 77, 55, 62, 69, 44, 96, 57, 65, 75, 53, 64, 78, 63, 61, 85, 75, 67, 71, 31, 74, 65, 54, 59, 57, 63, 101, 116, 86, 60, 84, 68, 60, 80, 63, 68, 66, 73, 60, 75, 60, 63, 54, 58, 59, 40, 80, 100, 59, 63, 52, 62, 61, 61, 64, 69, 79, 48, 70, 71, 66, 69, 53, 71, 73, 76, 64, 89, 45, 62, 56, 52, 72, 55, 80, 56, 52, 49, 69, 59, 29, 81, 72, 70, 51, 90, 58, 39, 61, 65, 84, 93, 46, 69, 60, 53, 75, 83, 67, 60, 73, 120, 52, 63, 61, 42, 73, 83, 85, 73, 77, 57, 65, 47, 83, 59, 49, 51, 68, 68, 56, 51, 68, 77, 77, 44, 61, 69, 52, 71, 73, 58, 63, 60, 56, 73, 60, 58, 65, 82, 72, 88, 49, 73, 46, 53, 63, 69, 56, 82, 61, 61, 75, 65, 81, 110, 58, 66, 73, 64, 75, 49, 73, 71, 56, 60, 73, 63, 60, 72, 51, 60, 50, 37, 46, 68, 78, 51, 66, 71, 49, 71, 79, 75, 61, 66, 52, 88, 69, 61, 77, 67, 78, 45, 61, 61, 62, 85, 68, 61, 60, 41, 63, 54, 40, 55, 69, 65, 45, 84, 88, 81, 86, 58, 71, 48, 74, 52, 81, 54, 39, 63, 54, 84, 50, 68, 71, 54, 69, 59, 91, 66, 56, 76, 59, 73, 69, 71, 48, 65, 60, 62, 72, 63, 72, 69, 83, 50, 49, 43, 66, 55, 54, 63, 56, 68, 86, 66, 50, 45, 71, 71, 71, 65, 102, 95, 85, 44, 66, 66, 54, 48, 62, 59, 77, 60, 73, 65, 68, 78, 70, 76, 40, 67, 53, 97, 77, 57, 72, 83, 96, 62, 42, 55, 59, 64, 122, 82, 51, 87, 71, 83, 54, 61, 145, 60, 57, 50, 55, 63, 58, 69, 64, 76, 52, 73, 49, 66, 35, 65, 87, 59, 45, 96, 146, 53, 53, 55, 63, 57, 71, 57, 61, 63, 75, 58, 65, 75, 44, 65, 42, 46, 82, 70, 80, 68, 69, 79, 102, 51, 60, 57, 67, 78, 61, 55, 96, 101, 56, 66, 68, 72, 43, 58, 97, 67, 63, 74, 68, 63, 71, 91, 63, 83, 55, 49, 56, 68, 68, 54, 60, 67, 49, 63, 71, 67, 69, 40, 69, 73, 51, 72, 91, 70, 47, 59, 56, 82, 66, 76, 73, 64, 61, 61, 73, 66, 65, 64, 54, 63, 64, 67, 52, 69, 74, 81, 47, 47, 69, 64, 48, 62, 60, 59, 51, 67, 58, 62, 59, 73, 57, 61, 70, 41, 65, 68, 45, 89, 66, 62, 61, 67, 73, 80, 72, 64, 71, 61, 73, 89, 64, 76, 48, 74, 52, 103, 65, 67, 50, 72, 72, 71, 81, 72, 79, 78, 69, 82, 87, 63, 47, 63, 67, 57, 63, 57, 63, 65, 66, 69, 52, 66, 55, 55, 67, 57, 60, 61, 51, 48, 81, 60, 76, 59, 69, 69, 39, 81, 45, 75, 65, 47, 68, 74, 59, 61, 57, 78, 45, 80, 58, 82, 64, 60, 70, 72, 68, 50, 58, 67, 70, 67, 63, 50, 79, 56, 98, 52, 59, 72, 68, 69, 65, 67, 71, 65, 61, 40, 57, 47, 61, 58, 46, 62, 77, 69, 62, 51, 51, 76, 67, 58, 50, 52, 75, 71, 85, 73, 84, 42, 140, 54, 61, 48, 56, 49, 86, 58, 52, 68, 67, 71, 82, 67, 77, 77, 67, 58, 59, 61, 83, 73, 58, 62, 54, 79, 64, 82, 65, 117, 99, 67, 71, 73, 80, 59, 67, 54, 47, 62, 59, 44, 75, 75, 60, 37, 63, 62, 55, 75, 74, 73, 57, 60, 41, 47, 66, 88, 101, 67, 64, 62, 70, 87, 46, 47, 70, 91, 53, 90, 67, 52, 56, 57, 65, 61, 62, 59, 100, 59, 70, 62, 63, 57, 66, 53, 49, 72, 57, 55, 59, 62, 84, 57, 51, 91, 56, 54, 69, 106, 69, 49, 59, 61, 57, 49, 56, 63, 46, 69, 73, 89, 63, 61, 54, 77, 67, 77, 79, 84, 59, 65, 72, 61, 62, 79, 73, 65, 64, 88, 61, 47, 59, 80, 47, 59, 68, 77, 85, 62, 71, 65, 56, 54, 33, 59, 44, 86, 59, 65, 61, 61, 71, 63, 51, 55, 72, 63, 79, 74, 67, 83, 90, 129, 59, 83, 70, 60, 62, 71, 49, 68, 68, 84, 62, 66, 80, 55, 55, 56, 77, 59, 70, 88, 64, 124, 51, 63, 65, 50, 54, 99, 76, 61, 41, 97, 67, 40, 32, 59, 68, 78, 62, 61, 66, 69, 51, 57, 78, 55, 71, 86, 59, 70, 79, 99, 71, 61, 73, 58, 62, 51, 44, 67, 63, 52, 76, 58, 69, 81, 77, 54, 83, 81, 89, 100, 77, 57, 93, 50, 78, 50, 67, 74, 82, 60, 61, 68, 52, 40, 103, 49, 79, 68, 75, 61, 52, 76, 80, 77, 94, 46, 81, 55, 76, 52, 74, 60, 72, 94, 62, 71, 69, 56, 78, 73, 58, 70, 67, 71, 56, 54, 50, 59, 129, 50, 70, 75, 63, 61, 71, 57, 53, 106, 66, 59, 54, 57, 71, 54, 68, 61, 71, 45, 57, 59, 90, 85, 84, 65, 56, 48, 60, 63, 62, 55, 60, 40, 51, 66, 38, 67, 75, 40, 74, 53, 69, 87, 65, 103, 66, 51, 70, 65, 58, 63, 67, 54, 66, 57, 77, 77, 58, 57, 70, 65, 67, 73, 62, 69, 79, 44, 91, 74, 84, 52, 71, 86, 52, 77, 81, 60, 77, 90, 47, 94, 75, 79, 86, 59, 44, 89, 64, 67, 70, 56, 56, 62, 66, 78, 70, 69, 55, 64, 49, 78, 55, 71, 44, 48, 65, 75, 55, 70, 85, 83, 79, 70, 47, 77, 74, 64, 64, 44, 73, 66, 79, 61, 64, 53, 54, 64, 70, 58, 35, 65, 85, 108, 47, 42, 77, 76, 67, 51, 76, 126, 73, 138, 72, 72, 68, 59, 62, 57, 66, 59, 54, 57, 96, 106, 58, 56, 67, 58, 61, 57, 66, 54, 64, 54, 55, 56, 52, 73, 78, 73, 55, 66, 50, 66, 42, 59, 49, 70, 54, 52, 83, 77, 52, 73, 57, 64, 66, 50, 110, 66, 134, 74, 55, 52, 68, 65, 44, 52, 67, 77, 64, 98, 69, 62, 57, 65, 66, 45, 78, 67, 60, 58, 67, 66, 55, 62, 61, 66, 62, 66, 70, 82, 45, 48, 81, 55, 80, 68, 89, 56, 67, 53, 58, 80, 54, 47, 65, 68, 67, 77, 57, 85, 61, 66, 76, 50, 58, 48, 62, 63, 52, 63, 55, 71, 76, 75, 61, 76, 74, 52, 73, 51, 112, 55, 69, 60, 60, 58, 69, 57, 63, 67, 52, 52, 84, 52, 60, 72, 51, 56, 86, 58, 45, 70, 51, 88, 67, 45, 62, 72, 56, 63, 82, 70, 79, 73, 75, 43, 25, 71, 67, 67, 57, 72, 56, 54, 52, 67, 70, 59, 66, 55, 62, 88, 65, 53, 62, 72, 65, 38, 53, 73, 77, 64, 67, 79, 43, 77, 35, 58, 69, 52, 80, 57, 103, 60, 71, 44, 75, 67, 57, 63, 89, 75, 58, 57, 63, 46, 77, 49, 57, 65, 58, 66, 76, 62, 75, 79, 56, 39, 59, 84, 78, 51, 47, 98, 53, 66, 49, 76, 73, 91, 67, 54, 69, 79, 66, 65, 58, 76, 102, 49, 42, 90, 80, 55, 50, 80, 47, 38, 75, 70, 85, 68, 55, 65, 74, 50, 51, 53, 54, 72, 83, 51, 59, 87, 64, 77, 78, 54, 80, 69, 84, 72, 109, 82, 48, 55, 55, 63, 38, 66, 59, 68, 59, 46, 58, 48, 115, 76, 68, 59, 54, 56, 70, 113, 64, 77, 72, 59, 86, 43, 58, 61, 59, 71, 73, 76, 68, 69, 54, 69, 70, 61, 72, 66, 85, 55, 90, 63, 56, 46, 57, 59, 67, 74, 80, 61, 70, 57, 66, 104, 98, 51, 45, 71, 74, 62, 53, 55, 77, 66, 54, 45, 76, 61, 77, 83, 44, 73, 69, 64, 65, 107, 58, 51, 53, 101, 113, 63, 72, 60, 60, 45, 46, 55, 53, 64, 67, 55, 47, 51, 62, 53, 67, 69, 85, 113, 64, 67, 46, 66, 57, 90, 107, 67, 74, 52, 61, 52, 44, 40, 78, 72, 47, 80, 61, 84, 56, 73, 62, 51, 64, 55, 51, 53, 72, 53, 57, 86, 57, 64, 67, 54, 69, 62, 60, 49, 98, 65, 52, 67, 63, 61, 45, 76, 77, 86, 49, 67, 63, 81, 75, 67, 111, 59, 75, 62, 65, 67, 56, 70, 65, 49, 72, 76, 45, 60, 50, 62, 64, 54, 76, 49, 92, 49, 83, 68, 62, 91, 61, 59, 39, 42, 69, 53, 52, 59, 54, 90, 51, 66, 55, 58, 136, 63, 73, 76, 58, 64, 71, 80, 105, 66, 63, 68, 48, 35, 42, 75, 78, 54, 68, 36, 51, 51, 52, 59, 58, 59, 70, 57, 108, 125, 50, 59, 70, 44, 51, 89, 83, 72, 67, 72, 56, 62, 55, 60, 49, 55, 56, 65, 51, 70, 101, 50, 44, 69, 54, 59, 60, 46, 68, 45, 64, 78, 46, 50, 127, 86, 68, 63, 78, 83, 43, 64, 87, 70, 76, 86, 80, 69, 101, 71, 94, 57, 60, 53, 55, 63, 66, 52, 39, 62, 47, 48, 107, 64, 63, 64, 53, 77, 70, 63, 61, 49, 108, 53, 41, 48, 97, 85, 66, 52, 49, 67, 48, 88, 37, 64, 64, 78, 46, 61, 59, 54, 52, 57, 74, 54, 64, 50, 61, 42, 86, 56, 63, 69, 76, 65, 82, 54, 68, 73, 69, 59, 51, 73, 61, 83, 49, 88, 74, 61, 65, 51, 64, 42, 59, 59, 74, 71, 73, 49, 60, 67, 66, 134, 47, 85, 94, 90, 66, 48, 50, 59, 54, 60, 67, 75, 90, 63, 66, 50, 67, 73, 93, 83, 59, 73, 60, 58, 37, 61, 58, 63, 63, 80, 86, 74, 66, 63, 90, 72, 56, 50, 73, 65, 50, 51, 59, 75, 52, 55, 60, 56, 54, 93, 75, 51, 66, 72, 62, 64, 73, 76, 75, 65, 46, 67, 43, 41, 53, 49, 59, 151, 53, 65, 74, 66, 72, 56, 97, 51, 96, 48, 53, 43, 42, 67, 93, 59, 68, 53, 68, 58, 62, 51, 73, 51, 59, 69, 45, 81, 39, 89, 59, 101, 59, 67, 53, 68, 62, 53, 51, 68, 81, 74, 52, 64, 51, 71, 65, 110, 104, 79, 37, 45, 57, 78, 58, 58, 58, 56, 57, 59, 88, 74, 45, 60, 78, 51, 52, 100, 46, 50, 55, 54, 79, 61, 70, 60, 103, 65, 56, 61, 80, 79, 68, 59, 73, 53, 86, 54, 58, 57, 57, 61, 44, 69, 68, 75, 55, 60, 71, 139, 78, 82, 67, 87, 61, 100, 70, 71, 88, 66, 94, 52, 58, 46, 74, 54, 65, 51, 64, 46, 78, 35, 76, 69, 68, 63, 90, 78, 85, 80, 79, 47, 78, 75, 84, 76, 89, 50, 56, 90, 44, 48, 62, 97, 58, 68, 63, 49, 63, 85, 87, 64, 117, 69, 105, 55, 65, 85, 94, 47, 63, 68, 47, 72, 56, 64, 65, 51, 54, 86, 66, 65, 67, 75, 68, 91, 63, 108, 79, 67, 86, 56, 80, 67, 74, 93, 58, 56, 55, 45, 60, 58, 62, 58, 61, 56, 42, 49, 60, 55, 60, 61, 64, 63, 106, 67, 59, 52, 74, 69, 60, 62, 69, 60, 53, 47, 99, 90, 68, 79, 78, 50, 46, 44, 99, 58, 64, 67, 37, 51, 65, 42, 62, 58, 60, 56, 73, 52, 63, 78, 60, 52, 77, 47, 57, 104, 48, 70, 64, 59, 42, 68, 53, 70, 78, 55, 75, 86, 78, 56, 60, 68, 65, 59, 76, 78, 92, 114, 101, 61, 72, 38, 50, 85, 47, 72, 73, 59, 81, 94, 63, 45, 67, 48, 58, 58, 50, 100, 102, 77, 58, 58, 69, 46, 71, 138, 50, 83, 67, 48, 66, 69, 54, 67, 55, 72, 65, 58, 49, 72, 59, 46, 68, 71, 60, 77, 54, 84, 60, 42, 95, 45, 77, 63, 75, 95, 92, 92, 70, 79, 48, 53, 68, 69, 78, 63, 54, 55, 74, 74, 78, 63, 59, 50, 61, 67, 53, 54, 52, 73, 52, 74, 79, 47, 54, 47, 60, 71, 81, 71, 75, 104, 76, 85, 59, 81, 85, 52, 55, 54, 57, 57, 61, 59, 56, 50, 62, 71, 62, 107, 50, 57, 56, 69, 56, 55, 66, 67, 67, 59, 62, 65, 83, 71, 79, 65, 50, 42, 124, 56, 67, 98, 70, 45, 59, 50, 76, 103, 64, 45, 63, 99, 51, 59, 73, 81, 57, 68, 40, 58, 51, 105, 83, 66, 62, 52, 71, 56, 64, 55, 66, 70, 36, 81, 55, 79, 63, 56, 82, 51, 61, 55, 50, 49, 62, 47, 65, 81, 68, 66, 55, 50, 94, 59, 69, 64, 73, 83, 73, 49, 70, 61, 63, 67, 69, 66, 67, 51, 65, 46, 44, 71, 61, 89, 73, 91, 82, 60, 66, 61, 56, 81, 69, 41, 88, 48, 98, 60, 55, 52, 63, 70, 67, 48, 80, 68, 85, 69, 50, 56, 36, 67, 60, 82, 75, 88, 55, 45, 51, 41, 115, 48, 65, 67, 69, 48, 52, 84, 61, 108, 82, 103, 127, 52, 56, 63, 98, 63, 78, 86, 53, 61, 55, 61, 71, 23, 67, 81, 44, 51, 53, 61, 72, 94, 54, 65, 60, 61, 67, 47, 60, 58, 71, 53, 58, 67, 61, 54, 70, 63, 56, 98, 52, 60, 92, 51, 71, 70, 64, 84, 68, 64, 53, 81, 108, 113, 40, 76, 67, 77, 64, 63, 58, 66, 76, 62, 53, 54, 50, 57, 97, 76, 55, 65, 63, 73, 67, 68, 58, 75, 91, 67, 59, 56, 70, 83, 49, 57, 51, 68, 61, 62, 51, 86, 128, 72, 61, 57, 67, 63, 90, 83, 62, 63, 57, 38, 82, 57, 60, 65, 92, 59, 72, 77, 59, 73, 71, 81, 96, 86, 160, 65, 56, 46, 61, 51, 88, 54, 64, 49, 72, 52, 51, 82, 84, 73, 45, 60, 53, 50, 66, 64, 53, 47, 61, 68, 72, 70, 67, 73, 88, 52, 65, 70, 69, 77, 83, 70, 53, 49, 87, 88, 67, 62, 46, 35, 50, 61, 76, 45, 59, 60, 44, 38, 67, 54, 58, 81, 59, 68, 66, 79, 55, 114, 76, 51, 63, 62, 58, 57, 65, 71, 63, 48, 66, 99, 46, 65, 75, 56, 46, 81, 91, 48, 86, 63, 51, 67, 64, 51, 77, 36, 54, 61, 57, 80, 58, 70, 87, 57, 50, 71, 89, 46, 62, 66, 58, 67, 62, 80, 70, 44, 61, 123, 61, 57, 62, 75, 77, 62, 63, 68, 56, 93, 104, 75, 55, 80, 70, 73, 71, 64, 60, 62, 55, 53, 61, 87, 75, 74, 54, 59, 81, 61, 71, 69, 58, 59, 71, 64, 77, 45, 84, 90, 72, 49, 53, 61, 35, 77, 63, 104, 59, 72, 61, 67, 86, 87, 67, 52, 59, 76, 64, 70, 48, 69, 53, 86, 51, 55, 40, 83, 50, 43, 53, 66, 107, 82, 61, 46, 78, 48, 62, 57, 68, 76, 60, 72, 54, 82, 36, 81, 58, 88, 72, 56, 60, 77, 63, 69, 51, 58, 43, 45, 82, 70, 58, 74, 57, 48, 63, 55, 75, 124, 56, 56, 78, 64, 57, 51, 57, 61, 60, 86, 74, 71, 104, 53, 52, 60, 61, 56, 80, 105, 61, 64, 64, 54, 65, 49, 66, 48, 71, 55, 55, 57, 56, 78, 72, 70, 71, 63, 74, 88, 66, 93, 64, 65, 56, 69, 71, 48, 51, 58, 68, 51, 62, 71, 54, 77, 54, 83, 72, 63, 57, 66, 78, 68, 73, 57, 71, 68, 67, 75, 63, 118, 77, 56, 81, 74, 40, 59, 71, 80, 65, 68, 57, 60, 50, 48, 58, 53, 74, 53, 62, 50, 69, 56, 61, 55, 60, 62, 59, 77, 74, 66, 51, 71, 60, 66, 60, 50, 53, 52, 59, 51, 62, 64, 71, 59, 85, 68, 70, 63, 88, 63, 60, 60, 62, 54, 65, 55, 58, 51, 52, 52, 52, 54, 62, 65, 46, 47, 72, 71, 68, 91, 68, 70, 56, 80, 64, 63, 54, 59, 69, 40, 56, 59, 129, 56, 64, 78, 53, 60, 66, 44, 53, 65, 81, 61, 55, 82, 66, 41, 62, 85, 59, 90, 98, 65, 78, 112, 65, 52, 81, 54, 100, 68, 66, 48, 88, 65, 59, 73, 55, 87, 57, 52, 104, 82, 60, 64, 60, 67, 80, 70, 52, 55, 92, 66, 68, 73, 93, 69, 46, 74, 88, 61, 51, 76, 90, 61, 75, 65, 70, 65, 44, 71, 71, 56, 79, 64, 41, 74, 75, 52, 88, 49, 56, 61, 55, 47, 60, 55, 48, 48, 47, 78, 56, 94, 97, 98, 59, 49, 67, 65, 74, 84, 45, 101, 57, 74, 59, 73, 67, 65, 62, 58, 81, 54, 91, 51, 47, 122, 55, 74, 58, 72, 52, 87, 49, 100, 86, 58, 73, 51, 55, 65, 51, 63, 88, 89, 38, 67, 64, 55, 84, 67, 68, 85, 83, 59, 76, 41, 126, 89, 82, 68, 70, 81, 63, 105, 54, 68, 52, 78, 64, 99, 82, 41, 62, 70, 71, 56, 58, 68, 81, 106, 75, 66, 59, 56, 97, 67, 56, 59, 66, 41, 96, 65, 52, 58, 57, 60, 66, 45, 49, 65, 55, 56, 62, 55, 47, 64, 48, 45, 61, 65, 66, 77, 69, 59, 56, 51, 64, 66, 66, 73, 72, 74, 58, 47, 103, 50, 55, 63, 84, 55, 116, 63, 90, 76, 79, 50, 67, 83, 62, 62, 69, 51, 85, 69, 99, 89, 52, 75, 55, 125, 86, 54, 76, 67, 52, 59, 62, 85, 55, 49, 71, 63, 80, 68, 100, 54, 63, 55, 67, 55, 63, 91, 56, 75, 63, 57, 68, 61, 48, 52, 60, 69, 100, 54, 44, 101, 111, 71, 77, 55, 59, 66, 47, 54, 57, 49, 68, 66, 68, 56, 58, 63, 66, 57, 65, 51, 42, 88, 44, 74, 56, 67, 44, 80, 61, 41, 55, 66, 58, 59, 67, 91, 78, 51, 67, 70, 74, 62, 84, 51, 60, 59, 58, 42, 60, 77, 58, 70, 70, 77, 77, 80, 38, 65, 56, 78, 50, 68, 60, 70, 85, 67, 47, 65, 71, 64, 63, 62, 65, 64, 64, 57, 81, 55, 54, 77, 52, 65, 73, 74, 57, 62, 51, 82, 51, 66, 59, 78, 87, 63, 55, 54, 78, 69, 69, 70, 51, 63, 53, 61, 72, 43, 63, 73, 53, 53, 63, 59, 108, 49, 57, 40, 54, 78, 43, 41, 54, 75, 66, 101, 78, 51, 77, 73, 98, 90, 65, 71, 60, 73, 59, 86, 59, 58, 67, 52, 59, 63, 79, 59, 34, 85, 54, 56, 69, 61, 71, 63, 69, 58, 83, 75, 72, 82, 70, 60, 54, 75, 89, 75, 87, 65, 55, 75, 60, 74, 70, 77, 72, 66, 57, 51, 56, 62, 54, 49, 57, 61, 57, 54, 77, 47, 65, 58, 51, 88, 77, 41, 63, 85, 67, 73, 96, 57, 78, 53, 111, 63, 54, 71, 69, 79, 68, 68, 67, 79, 50, 58, 74, 72, 70, 60, 76, 79, 54, 107, 52, 65, 64, 71, 82, 84, 53, 47, 56, 94, 54, 51, 49, 67, 71, 43, 53, 62, 69, 62, 122, 97, 69, 70, 52, 90, 65, 86, 70, 56, 63, 71, 60, 74, 66, 54, 78, 64, 67, 49, 81, 50, 59, 71, 72, 45, 44, 63, 83, 65, 47, 58, 55, 78, 71, 61, 69, 73, 52, 59, 99, 76, 68, 59, 74, 70, 92, 61, 56, 68, 59, 68, 51, 75, 75, 83, 49, 71, 64, 64, 70, 73, 55, 54, 64, 57, 56, 76, 47, 53, 62, 46, 59, 65, 80, 113, 56, 60, 84, 68, 53, 81, 87, 68, 82, 104, 59, 42, 54, 54, 76, 53, 48, 49, 58, 76, 51, 57, 73, 64, 78, 154, 65, 103, 72, 65, 101, 67, 69, 86, 72, 43, 100, 62, 58, 53, 93, 37, 68, 58, 48, 85, 74, 71, 70, 58, 95, 53, 51, 68, 43, 72, 35, 57, 87, 47, 111, 64, 80, 68, 97, 56, 41, 35, 78, 71, 66, 78, 93, 60, 146, 69, 67, 49, 69, 50, 87, 76, 91, 137, 54, 63, 72, 72, 81, 95, 45, 66, 42, 50, 53, 51, 86, 56, 76, 65, 159, 61, 72, 72, 65, 69, 45, 59, 78, 54, 68, 87, 59, 56, 70, 100, 55, 65, 86, 57, 79, 39, 112, 47, 52, 57, 80, 64, 91, 90, 95, 53, 67, 62, 50, 79, 29, 58, 55, 104, 64, 89, 74, 81, 57, 74, 40, 57, 54, 75, 57, 58, 53, 53, 59, 66, 44, 46, 64, 67, 43, 57, 68, 64, 65, 59, 68, 91, 77, 57, 71, 66, 65, 98, 78, 70, 55, 68, 60, 61, 49, 70, 89, 63, 85, 82, 71, 71, 116, 58, 58, 39, 31, 73, 67, 72, 73, 88, 73, 58, 62, 79, 64, 66, 45, 66, 76, 64, 87, 84, 102, 59, 50, 76, 59, 96, 49, 53, 40, 61, 74, 73, 70, 71, 54, 89, 60, 39, 59, 45, 53, 56, 58, 117, 78, 59, 64, 85, 47, 48, 72, 55, 93, 66, 99, 113, 61, 64, 47, 56, 74, 61, 46, 54, 68, 38, 83, 70, 67, 93, 60, 81, 86, 68, 63, 61, 93, 64, 74, 56, 58, 42, 56, 59, 67, 82, 66, 67, 94, 43, 79, 60, 69, 60, 86, 73, 66, 84, 42, 64, 48, 54, 56, 52, 88, 61, 42, 75, 65, 55, 39, 41, 77, 80, 74, 51, 65, 65, 71, 68, 50, 64, 42, 55, 84, 74, 58, 63, 87, 53, 67, 82, 116, 56, 117, 62, 60, 60, 59, 67, 100, 39, 71, 58, 73, 51, 82, 48, 65, 54, 54, 42, 63, 47, 74, 55, 60, 56, 54, 70, 74, 50, 87, 54, 61, 71, 53, 57, 59, 73, 50, 58, 59, 66, 70, 58, 61, 62, 64, 81, 73, 71, 106, 79, 48, 71, 55, 91, 57, 80, 52, 48, 76, 53, 91, 51, 72, 45, 53, 65, 78, 57, 43, 63, 65, 62, 65, 52, 61, 60, 55, 81, 75, 83, 49, 63, 48, 89, 67, 51, 71, 61, 82, 73, 54, 57, 55, 71, 57, 40, 81, 61, 55, 59, 51, 61, 68, 98, 66, 88, 59, 51, 81, 65, 47, 63, 55, 74, 36, 76, 58, 51, 58, 79, 68, 86, 71, 68, 53, 90, 55, 39, 105, 59, 80, 65, 55, 65, 59, 59, 71, 96, 60, 51, 73, 59, 58, 86, 59, 59, 46, 66, 60, 70, 57, 71, 69, 47, 31, 118, 84, 53, 60, 38, 65, 85, 59, 54, 103, 55, 63, 47, 55, 69, 50, 56, 62, 73, 94, 51, 58, 65, 64, 63, 61, 75, 62, 62, 45, 64, 62, 62, 73, 53, 54, 63, 90, 67, 58, 60, 45, 62, 82, 59, 103, 64, 83, 74, 52, 76, 66, 64, 86, 47, 68, 71, 54, 56, 63, 61, 75, 52, 73, 68, 59, 85, 62, 54, 49, 61, 50, 57, 70, 69, 83, 65, 50, 54, 58, 104, 63, 76, 77, 69, 55, 69, 93, 55, 75, 73, 64, 38, 80, 64, 52, 70, 86, 60, 61, 107, 66, 63, 90, 124, 49, 103, 77, 61, 86, 54, 49, 96, 61, 39, 58, 82, 78, 77, 67, 85, 70, 84, 73, 52, 49, 61, 76, 67, 60, 104, 63, 48, 50, 71, 46, 71, 37, 63, 54, 72, 66, 43, 66, 60, 39, 45, 78, 42, 86, 60, 56, 78, 56, 64, 48, 65, 72, 78, 70, 62, 47, 58, 55, 108, 79, 60, 54, 53, 50, 51, 60, 70, 72, 72, 69, 144, 49, 61, 69, 83, 74, 53, 66, 53, 68, 53, 93, 66, 58, 70, 54, 110, 65, 65, 108, 76, 54, 75, 57, 51, 69, 67, 88, 84, 36, 63, 66, 79, 90, 61, 58, 51, 68, 63, 69, 107, 62, 58, 106, 69, 52, 61, 73, 59, 70, 54, 60, 77, 73, 71, 50, 56, 77, 54, 74, 44, 66, 61, 49, 56, 71, 86, 53, 97, 69, 46, 51, 40, 76, 55, 53, 89, 75, 64, 61, 66, 74, 92, 85, 58, 72, 52, 74, 74, 42, 60, 70, 71, 78, 80, 62, 44, 67, 50, 74, 58, 91, 49, 86, 80, 69, 74, 49, 41, 55, 75, 60, 80, 74, 54, 129, 61, 58, 36, 49, 69, 69, 69, 76, 39, 98, 64, 64, 57, 62, 74, 55, 111, 60, 76, 71, 73, 86, 47, 65, 65, 102, 72, 63, 49, 101, 58, 57, 101, 63, 73, 72, 81, 71, 51, 66, 56, 47, 65, 72, 50, 68, 48, 30, 61, 84, 67, 59, 59, 69, 71, 61, 54, 66, 68, 55, 83, 106, 74, 66, 64, 48, 67, 48, 53, 50, 57, 72, 51, 42, 63, 63, 81, 82, 51, 59, 84, 60, 58, 61, 52, 56, 36, 65, 62, 61, 58, 67, 66, 78, 57, 66, 61, 62, 50, 67, 69, 43, 49, 94, 75, 61, 65, 44, 54, 47, 98, 42, 74, 50, 57, 66, 95, 79, 40, 50, 60, 70, 65, 52, 69, 52, 54, 69, 66, 72, 86, 59, 78, 44, 57, 60, 61, 55, 87, 69, 53, 53, 36, 45, 81, 95, 76, 60, 82, 41, 47, 61, 52, 58, 62, 62, 86, 63, 106, 76, 52, 54, 66, 91, 51, 70, 71, 53, 83, 65, 38, 57, 60, 68, 40, 68, 53, 70, 60, 62, 73, 79, 62, 78, 63, 72, 61, 70, 73, 61, 73, 51, 48, 51, 76, 113, 74, 52, 77, 56, 105, 76, 79, 98, 66, 74, 84, 54, 89, 38, 63, 76, 55, 70, 73, 58, 96, 75, 65, 47, 67, 52, 68, 70, 90, 65, 72, 70, 75, 73, 67, 55, 73, 84, 62, 65, 65, 67, 76, 80, 37, 80, 45, 62, 122, 97, 72, 73, 54, 55, 80, 55, 58, 63, 52, 93, 56, 70, 45, 55, 84, 69, 57, 66, 81, 72, 86, 60, 61, 56, 75, 73, 61, 55, 67, 75, 51, 66, 93, 62, 94, 82, 74, 52, 58, 67, 54, 59, 61, 125, 62, 61, 59, 70, 81, 47, 90, 51, 69, 96, 78, 83, 116, 75, 70, 95, 62, 68, 49, 73, 59, 83, 58, 48, 52, 50, 62, 73, 83, 99, 55, 46, 63, 58, 65, 49, 70, 55, 94, 58, 53, 52, 79, 62, 64, 51, 81, 75, 70, 72, 66, 53, 97, 52, 67, 54, 46, 66, 69, 82, 56, 85, 77, 72, 81, 54, 60, 49, 91, 62, 77, 48, 71, 60, 86, 103, 50, 64, 49, 67, 67, 67, 69, 60, 63, 108, 59, 56, 81, 64, 67, 50, 54, 68, 36, 83, 64, 72, 80, 81, 61, 57, 49, 50, 73, 74, 56, 77, 68, 62, 53, 74, 76, 71, 47, 67, 62, 59, 57, 58, 55, 77, 52, 46, 85, 56, 73, 80, 71, 58, 71, 60, 77, 84, 63, 59, 98, 58, 77, 48, 74, 61, 80, 68, 78, 114, 72, 73, 86, 91, 62, 62, 75, 68, 69, 44, 81, 64, 81, 42, 50, 70, 55, 61, 71, 65, 74, 61, 74, 59, 62, 76, 65, 46, 41, 59, 56, 50, 53, 55, 82, 61, 48, 68, 79, 64, 43, 81, 71, 88, 43, 63, 81, 76, 77, 68, 49, 57, 57, 62, 89, 55, 42, 82, 60, 100, 58, 71, 63, 48, 55, 60, 55, 56, 42, 43, 59, 56, 66, 89, 60, 68, 151, 72, 54, 77, 84, 96, 83, 71, 46, 70, 43, 70, 67, 42, 65, 66, 58, 83, 67, 62, 40, 45, 68, 105, 89, 60, 46, 77, 71, 47, 68, 46, 57, 68, 59, 67, 60, 71, 79, 66, 73, 89, 65, 115, 68, 84, 51, 49, 62, 48, 70, 68, 91, 49, 94, 85, 55, 107, 67, 58, 58, 55, 75, 82, 54, 66, 64, 53, 90, 64, 48, 66, 48, 78, 55, 35, 69, 45, 65, 44, 63, 68, 44, 72, 66, 68, 89, 42, 68, 65, 69, 70, 81, 65, 64, 73, 96, 45, 72, 69, 55, 52, 71, 70, 59, 65, 57, 53, 97, 47, 75, 64, 70, 73, 76, 80, 68, 51, 51, 61, 57, 86, 93, 91, 69, 73, 51, 65, 48, 76, 65, 61, 78, 58, 64, 60, 69, 72, 41, 81, 60, 74, 65, 63, 64, 77, 61, 86, 105, 48, 74, 53, 69, 75, 60, 90, 66, 57, 73, 47, 58, 84, 63, 95, 55, 59, 46, 39, 57, 72, 53, 75, 52, 59, 43, 76, 72, 57, 64, 51, 51, 68, 50, 63, 83, 68, 48, 64, 48, 68, 24, 55, 115, 56, 72, 51, 94, 62, 34, 71, 62, 67, 75, 73, 58, 52, 47, 55, 37, 87, 46, 53, 74, 61, 67, 96, 73, 68, 63, 60, 51, 69, 60, 70, 66, 52, 47, 72, 56, 55, 48, 69, 57, 71, 57, 84, 55, 63, 54, 67, 95, 35, 84, 62, 85, 47, 50, 55, 58, 60, 97, 73, 62, 42, 63, 60, 72, 84, 67, 52, 64, 42, 69, 97, 58, 71, 78, 63, 78, 84, 42, 61, 73, 118, 45, 59, 63, 72, 56, 49, 77, 66, 37, 58, 57, 65, 88, 75, 73, 64, 91, 69, 56, 61, 63, 88, 47, 72, 78, 53, 33, 78, 55, 73, 76, 79, 43, 67, 60, 35, 55, 58, 77, 43, 57, 55, 70, 71, 92, 65, 42, 45, 60, 70, 78, 75, 65, 81, 59, 66, 44, 57, 42, 65, 53, 48, 54, 63, 56, 74, 58, 67, 81, 59, 67, 58, 63, 62, 62, 67, 68, 52, 70, 67, 41, 46, 64, 70, 58, 60, 58, 42, 71, 66, 47, 79, 65, 71, 61, 64, 83, 62, 80, 68, 53, 57, 47, 52, 59, 65, 61, 59, 71, 86, 80, 97, 65, 80, 53, 63, 65, 65, 79, 69, 63, 82, 50, 58, 96, 67, 68, 66, 51, 65, 77, 76, 108, 61, 64, 56, 83, 59, 60, 77, 48, 69, 68, 66, 54, 70, 101, 58, 81, 53, 60, 49, 71, 65, 52, 54, 61, 76, 80, 53, 58, 72, 37, 60, 78, 67, 39, 80, 55, 81, 81, 55, 68, 61, 72, 47, 61, 68, 64, 63, 57, 50, 67, 44, 70, 57, 90, 40, 61, 101, 52, 72, 82, 54, 80, 75, 68, 65, 75, 65, 75, 58, 96, 50, 56, 74, 61, 58, 53, 61, 80, 39, 77, 44, 51, 62, 118, 54, 98, 66, 68, 55, 70, 67, 66, 68, 55, 73, 59, 65, 65, 91, 52, 68, 57, 46, 40, 53, 56, 42, 57, 54, 76, 73, 62, 65, 78, 56, 73, 61, 130, 79, 70, 57, 69, 69, 61, 58, 57, 81, 61, 92, 76, 72, 63, 53, 78, 62, 74, 47, 76, 95, 71, 48, 58, 74, 91, 89, 56, 64, 68, 72, 54, 84, 52, 83, 56, 73, 65, 61, 59, 46, 104, 65, 49, 67, 102, 60, 78, 78, 60, 73, 53, 61, 55, 70, 110, 43, 56, 64, 53, 58, 78, 55, 51, 66, 72, 43, 80, 57, 46, 54, 87, 70, 75, 57, 65, 66, 88, 88, 48, 58, 70, 89, 66, 56, 74, 61, 64, 75, 49, 53, 63, 67, 42, 84, 73, 94, 78, 57, 83, 58, 64, 41, 81, 59, 91, 84, 58, 84, 67, 64, 67, 57, 67, 69, 68, 58, 52, 49, 92, 62, 78, 52, 65, 82, 75, 67, 58, 73, 101, 72, 79, 90, 58, 91, 50, 54, 62, 105, 65, 30, 56, 72, 68, 44, 61, 47, 58, 58, 66, 72, 58, 58, 78, 75, 76, 71, 60, 65, 67, 51, 48, 98, 70, 68, 82, 65, 71, 94, 71, 60, 103, 57, 58, 56, 63, 64, 59, 47, 59, 41, 68, 71, 62, 123, 45, 57, 59, 67, 65, 63, 61, 67, 54, 66, 79, 79, 105, 79, 58, 88, 63, 56, 74, 70, 45, 90, 62, 55, 59, 63, 44, 82, 64, 57, 57, 68, 59, 60, 87, 57, 68, 58, 63, 83, 67, 61, 71, 126, 110, 70, 53, 78, 67, 62, 79, 71, 65, 67, 63, 61, 81, 69, 66, 33, 54, 87, 66, 51, 80, 71, 68, 60, 62, 51, 93, 64, 43, 56, 79, 117, 67, 66, 81, 81, 47, 59, 67, 71, 55, 59, 107, 74, 84, 45, 57, 79, 59, 65, 80, 60, 46, 45, 50, 57, 57, 104, 55, 78, 107, 53, 66, 57, 85, 56, 80, 70, 115, 81, 54, 57, 59, 54, 62, 79, 64, 43, 55, 64, 65, 68, 65, 64, 67, 136, 50, 49, 62, 88, 60, 88, 41, 75, 89, 91, 66, 50, 48, 85, 71, 77, 83, 51, 71, 82, 39, 62, 63, 58, 60, 55, 90, 56, 78, 68, 89, 61, 52, 90, 84, 67, 83, 100, 72, 56, 53, 65, 66, 79, 52, 48, 70, 54, 72, 87, 61, 84, 67, 38, 65, 69, 72, 65, 73, 77, 63, 66, 120, 74, 80, 64, 69, 72, 57, 84, 62, 48, 93, 58, 91, 57, 79, 81, 90, 59, 71, 51, 60, 53, 59, 81, 42, 65, 53, 81, 43, 67, 53, 61, 79, 60, 44, 52, 63, 66, 58, 75, 51, 56, 78, 75, 65, 71, 65, 61, 66, 83, 51, 75, 119, 74, 77, 77, 79, 86, 34, 82, 35, 92, 44, 65, 47, 72, 44, 64, 52, 63, 84, 41, 45, 46, 89, 68, 49, 68, 86, 48, 68, 63, 78, 83, 50, 58, 57, 73, 72, 49, 84, 53, 55, 59, 57, 72, 63, 82, 53, 61, 68, 53, 64, 57, 62, 55, 43, 62, 63, 58, 68, 64, 72, 79, 77, 51, 55, 57, 67, 65, 73, 54, 73, 42, 56, 62, 74, 70, 57, 91, 75, 81, 76, 79, 62, 66, 58, 77, 76, 53, 63, 54, 64, 55, 67, 38, 68, 114, 54, 64, 60, 79, 85, 63, 64, 82, 51, 41, 67, 54, 48, 52, 70, 53, 58, 48, 96, 69, 71, 80, 58, 69, 67, 56, 75, 60, 116, 86, 48, 80, 87, 58, 62, 65, 66, 70, 52, 79, 45, 53, 67, 67, 68, 60, 75, 68, 92, 73, 63, 69, 95, 75, 65, 69, 75, 52, 57, 43, 35, 59, 64, 67, 67, 81, 94, 93, 66, 91, 71, 67, 49, 47, 80, 52, 78, 103, 84, 65, 55, 113, 66, 72, 65, 64, 70, 70, 72, 69, 68, 56, 64, 50, 42, 76, 61, 69, 50, 51, 57, 38, 78, 51, 72, 61, 48, 65, 67, 73, 60, 63, 62, 62, 44, 59, 86, 36, 57, 56, 82, 55, 59, 152, 59, 46, 73, 91, 84, 59, 81, 57, 66, 45, 52, 51, 57, 92, 38, 60, 71, 72, 72, 59, 50, 70, 70, 74, 39, 71, 78, 62, 59, 55, 59, 58, 62, 84, 74, 45, 56, 58, 98, 78, 61, 62, 94, 61, 46, 46, 54, 64, 80, 99, 75, 74, 65, 77, 85, 50, 110, 52, 56, 48, 90, 52, 83, 58, 78, 40, 70, 80, 64, 69, 77, 92, 67, 69, 56, 70, 65, 71, 37, 57, 81, 67, 61, 50, 75, 64, 62, 71, 62, 44, 70, 89, 64, 43, 66, 76, 64, 50, 61, 66, 77, 70, 47, 62, 59, 81, 77, 34, 65, 66, 94, 64, 105, 54, 67, 80, 46, 76, 55, 36, 70, 62, 64, 77, 66, 50, 55, 87, 58, 69, 99, 76, 59, 58, 78, 62, 58, 69, 42, 52, 61, 50, 66, 51, 74, 80, 73, 69, 83, 46, 53, 62, 66, 83, 67, 57, 62, 53, 70, 45, 70, 58, 59, 50, 96, 70, 56, 102, 36, 69, 61, 68, 67, 65, 96, 77, 61, 54, 91, 66, 63, 55, 77, 65, 77, 55, 61, 79, 43, 95, 93, 73, 53, 73, 61, 66, 43, 61, 62, 85, 46, 91, 53, 48, 63, 61, 105, 72, 66, 98, 55, 56, 60, 52, 56, 70, 65, 98, 69, 63, 49, 71, 46, 76, 63, 57, 95, 78, 59, 49, 85, 85, 80, 52, 74, 64, 73, 99, 81, 37, 77, 73, 52, 65, 64, 85, 88, 66, 59, 73, 71, 83, 64, 50, 73, 79, 54, 51, 56, 127, 66, 153, 93, 80, 52, 67, 58, 53, 30, 55, 74, 55, 46, 67, 59, 83, 64, 63, 54, 69, 66, 28, 71, 71, 50, 38, 60, 77, 70, 69, 64, 67, 63, 57, 55, 49, 55, 48, 52, 68, 60, 54, 95, 54, 70, 59, 50, 67, 112, 79, 68, 88, 66, 69, 65, 41, 40, 45, 61, 52, 70, 67, 64, 59, 66, 68, 75, 91, 57, 62, 53, 56, 75, 68, 98, 68, 74, 51, 58, 44, 72, 70, 69, 45, 66, 54, 40, 52, 74, 77, 76, 69, 61, 83, 86, 100, 64, 56, 54, 67, 91, 70, 58, 55, 81, 63, 64, 62, 59, 46, 78, 93, 77, 50, 45, 74, 68, 74, 73, 74, 51, 69, 51, 58, 68, 80, 61, 69, 61, 70, 56, 59, 52, 63, 53, 68, 75, 57, 61, 47, 64, 57, 75, 66, 60, 88, 55, 69, 68, 73, 45, 88, 73, 41, 64, 46, 58, 78, 66, 93, 75, 83, 75, 90, 66, 65, 45, 56, 79, 70, 100, 79, 68, 49, 69, 76, 73, 83, 54, 57, 86, 63, 57, 77, 54, 69, 78, 70, 106, 85, 48, 67, 86, 60, 57, 48, 84, 51, 65, 57, 67, 42, 80, 53, 63, 68, 85, 68, 61, 60, 58, 104, 55, 75, 80, 59, 43, 99, 48, 72, 72, 83, 86, 97, 63, 43, 73, 74, 62, 72, 62, 77, 59, 59, 97, 62, 58, 53, 53, 62, 90, 55, 110, 69, 50, 69, 56, 63, 73, 56, 84, 92, 58, 66, 84, 47, 58, 61, 69, 59, 73, 78, 68, 55, 61, 71, 47, 63, 99, 59, 52, 43, 64, 52, 61, 68, 60, 57, 64, 79, 63, 71, 47, 71, 48, 51, 74, 62, 75, 59, 75, 49, 62, 70, 64, 75, 80, 63, 87, 79, 76, 77, 69, 86, 73, 79, 97, 46, 65, 39, 66, 71, 80, 56, 145, 68, 40, 65, 54, 76, 63, 59, 83, 49, 47, 66, 67, 70, 64, 70, 80, 61, 54, 77, 82, 53, 66, 73, 65, 57, 62, 71, 75, 53, 58, 61, 59, 78, 63, 54, 54, 63, 61, 75, 54, 61, 44, 49, 57, 74, 52, 60, 95, 60, 67, 44, 78, 69, 105, 79, 63, 63, 67, 70, 79, 59, 82, 64, 53, 58, 77, 63, 56, 87, 66, 85, 61, 48, 65, 53, 70, 58, 56, 74, 64, 67, 91, 66, 63, 49, 48, 94, 48, 71, 74, 62, 68, 71, 62, 85, 68, 62, 48, 89, 73, 60, 62, 63, 94, 60, 82, 55, 57, 64, 50, 64, 58, 56, 102, 47, 65, 61, 67, 60, 67, 70, 62, 72, 56, 66, 53, 80, 72, 66, 48, 87, 71, 70, 56, 54, 61, 53, 55, 60, 63, 66, 84, 61, 49, 82, 61, 61, 64, 68, 72, 63, 71, 64, 72, 74, 57, 97, 88, 60, 62, 79, 52, 40, 69, 60, 53, 56, 65, 68, 71, 33, 49, 57, 94, 60, 69, 73, 56, 53, 71, 61, 78, 65, 47, 77, 42, 72, 65, 58, 88, 92, 61, 101, 63, 113, 66, 57, 67, 50, 65, 104, 63, 69, 74, 41, 58, 79, 62, 70, 48, 67, 72, 64, 75, 75, 68, 59, 71, 72, 60, 55, 60, 89, 69, 69, 74, 38, 60, 83, 65, 78, 94, 79, 74, 51, 66, 90, 61, 69, 66, 50, 57, 53, 84, 48, 58, 75, 53, 63, 64, 75, 71, 43, 57, 52, 55, 60, 69, 51, 68, 68, 64, 75, 57, 62, 57, 95, 61, 44, 65, 73, 70, 76, 55, 58, 59, 64, 95, 64, 61, 60, 72, 49, 61, 49, 66, 78, 74, 84, 60, 51, 38, 76, 74, 76, 72, 136, 54, 68, 87, 67, 64, 76, 67, 88, 81, 91, 56, 56, 72, 60, 62, 85, 53, 60, 87, 68, 63, 65, 66, 60, 60, 73, 51, 83, 57, 57, 77, 72, 57, 58, 61, 72, 62, 76, 89, 45, 54, 67, 61, 71, 54, 76, 58, 52, 50, 41, 70, 56, 59, 57, 75, 39, 63, 49, 76, 62, 56, 54, 59, 70, 57, 79, 85, 57, 49, 60, 68, 48, 83, 62, 46, 68, 38, 74, 73, 80, 66, 93, 84, 62, 82, 46, 89, 70, 62, 64, 54, 58, 74, 57, 66, 68, 62, 87, 61, 70, 73, 61, 62, 49, 66, 56, 67, 99, 58, 85, 41, 86, 57, 49, 65, 54, 64, 73, 88, 72, 80, 54, 74, 60, 80, 74, 68, 64, 54, 57, 58, 78, 83, 113, 68, 62, 67, 46, 90, 45, 62, 64, 75, 70, 77, 58, 67, 47, 66, 57, 87, 52, 66, 65, 87, 100, 78, 82, 76, 64, 66, 81, 48, 59, 63, 76, 62, 48, 69, 58, 55, 67, 72, 73, 40, 74, 63, 49, 68, 70, 60, 122, 52, 65, 51, 68, 71, 65, 84, 65, 53, 71, 62, 70, 37, 68, 78, 76, 44, 77, 51, 82, 57, 60, 75, 61, 47, 67, 46, 63, 77, 65, 59, 59, 64, 69, 76, 65, 56, 67, 79, 70, 69, 57, 52, 70, 54, 66, 72, 58, 60, 85, 75, 72, 60, 59, 38, 67, 57, 80, 58, 52, 55, 65, 51, 55, 80, 42, 50, 50, 55, 66, 57, 58, 51, 60, 63, 40, 62, 66, 70, 84, 72, 52, 67, 66, 58, 68, 73, 70, 86, 51, 65, 63, 48, 80, 46, 68, 86, 67, 49, 76, 71, 64, 66, 66, 69, 54, 68, 61, 85, 71, 63, 68, 65, 50, 103, 44, 55, 72, 66, 60, 59, 48, 59, 46, 47, 67, 78, 41, 73, 60, 77, 63, 46, 59, 64, 41, 62, 61, 62, 85, 63, 81, 59, 73, 78, 45, 73, 67, 57, 73, 56, 60, 82, 63, 70, 80, 63, 55, 61, 71, 63, 67, 64, 53, 49, 59, 54, 57, 53, 61, 81, 71, 69, 54, 84, 72, 79, 59, 146, 74, 76, 65, 53, 59, 60, 60, 43, 73, 79, 63, 80, 55, 78, 80, 59, 60, 74, 77, 80, 61, 62, 57, 54, 60, 53, 55, 63, 55, 69, 53, 60, 58, 82, 53, 78, 59, 62, 71, 68, 97, 64, 76, 90, 67, 58, 88, 60, 70, 69, 75, 54, 65, 49, 58, 56, 57, 63, 72, 59, 74, 84, 57, 58, 55, 75, 79, 89, 89, 66, 64, 55, 48, 61, 57, 72, 74, 79, 66, 66, 102, 47, 60, 63, 52, 61, 62, 57, 79, 43, 62, 60, 60, 68, 77, 84, 61, 53, 83, 157, 71, 83, 56, 84, 91, 75, 78, 58, 63, 52, 98, 113, 55, 53, 77, 85, 41, 67, 45, 66, 74, 80, 71, 72, 59, 55, 65, 54, 53, 65, 54, 105, 64, 46, 56, 45, 55, 68, 79, 64, 74, 59, 63, 70, 59, 78, 49, 55, 50, 66, 57, 77, 59, 59, 92, 71, 70, 80, 56, 49, 50, 60, 74, 54, 72, 68, 62, 63, 63, 53, 43, 62, 98, 62, 68, 55, 54, 74, 84, 87, 67, 52, 76, 46, 70, 68, 58, 41, 58, 81, 85, 51, 105, 67, 69, 68, 96, 70, 68, 82, 64, 60, 79, 53, 55, 59, 65, 86, 86, 40, 46, 78, 89, 60, 77, 77, 71, 116, 43, 52, 67, 45, 108, 59, 42, 68, 76, 51, 70, 56, 72, 57, 62, 73, 64, 52, 51, 69, 74, 55, 56, 69, 80, 69, 68, 43, 40, 64, 50, 68, 61, 63, 59, 66, 88, 62, 54, 76, 54, 71, 69, 66, 69, 57, 77, 67, 74, 77, 61, 63, 76, 72, 67, 61, 56, 46, 47, 54, 78, 42, 71, 93, 65, 68, 82, 58, 74, 60, 57, 50, 57, 60, 94, 59, 62, 41, 56, 62, 43, 51, 57, 87, 57, 71, 58, 74, 56, 72, 69, 91, 62, 62, 65, 74, 47, 87, 85, 67, 50, 54, 62, 37, 81, 42, 61, 71, 97, 54, 66, 68, 86, 58, 49, 45, 52, 70, 58, 77, 86, 84, 52, 60, 52, 71, 52, 75, 84, 67, 72, 76, 65, 86, 64, 109, 47, 39, 48, 77, 56, 59, 57, 63, 77, 55, 76, 65, 58, 80, 68, 62, 63, 56, 70, 100, 62, 50, 44, 45, 48, 87, 46, 112, 57, 63, 71, 101, 84, 61, 69, 72, 69, 51, 59, 48, 76, 76, 63, 66, 58, 67, 53, 64, 67, 71, 61, 67, 58, 62, 78, 66, 70, 85, 77, 69, 87, 63, 49, 51, 66, 68, 49, 74, 67, 68, 65, 53, 48, 59, 50, 62, 57, 61, 57, 55, 105, 82, 73, 61, 73, 62, 49, 77, 52, 57, 74, 54, 81, 71, 70, 50, 114, 61, 40, 63, 76, 74, 65, 65, 51, 55, 42, 65, 72, 43, 57, 56, 88, 75, 85, 64, 52, 64, 57, 83, 61, 68, 96, 75, 81, 83, 72, 51, 67, 55, 52, 59, 62, 64, 56, 75, 53, 61, 54, 45, 56, 85, 93, 43, 65, 62, 56, 47, 64, 61, 45, 40, 73, 60, 78, 75, 70, 80, 59, 56, 49, 90, 67, 67, 80, 57, 65, 53, 66, 77, 48, 59, 66, 55, 80, 85, 76, 63, 51, 53, 76, 72, 37, 68, 42, 46, 67, 74, 74, 78, 56, 87, 55, 70, 58, 63, 61, 131, 60, 63, 67, 51, 78, 60, 57, 85, 57, 76, 82, 58, 54, 59, 53, 70, 62, 70, 87, 52, 55, 45, 60, 61, 70, 62, 60, 65, 59, 58, 63, 57, 88, 81, 47, 61, 59, 65, 74, 53, 66, 62, 64, 57, 72, 66, 65, 91, 58, 45, 88, 65, 62, 39, 60, 56, 52, 62, 60, 55, 67, 75, 61, 69, 81, 68, 55, 64, 56, 49, 49, 46, 70, 80, 106, 73, 50, 60, 52, 76, 77, 60, 59, 87, 55, 68, 50, 51, 59, 63, 55, 79, 75, 68, 88, 60, 75, 69, 94, 67, 64, 56, 83, 48, 54, 78, 63, 63, 68, 86, 89, 73, 54, 61, 59, 77, 69, 51, 60, 60, 68, 108, 75, 52, 87, 70, 51, 79, 69, 55, 35, 88, 70, 60, 64, 67, 57, 59, 59, 59, 51, 69, 77, 65, 71, 95, 60, 57, 65, 58, 45, 53, 57, 97, 67, 60, 71, 62, 73, 64, 51, 66, 101, 82, 76, 69, 64, 62, 62, 60, 61, 53, 63, 55, 66, 65, 65, 78, 76, 89, 77, 36, 70, 42, 77, 58, 65, 58, 64, 89, 49, 56, 58, 64, 84, 60, 76, 66, 100, 76, 63, 63, 64, 54, 65, 56, 52, 82, 50, 67, 70, 86, 81, 54, 55, 48, 70, 43, 106, 63, 56, 54, 67, 73, 40, 57, 47, 36, 61, 71, 92, 89, 87, 77, 61, 54, 72, 66, 71, 58, 56, 109, 79, 73, 66, 65, 79, 84, 54, 67, 50, 57, 57, 50, 53, 61, 80, 79, 90, 46, 57, 61, 65, 65, 54, 57, 32, 72, 57, 59, 56, 68, 61, 67, 44, 76, 95, 80, 59, 62, 52, 63, 51, 64, 80, 89, 54, 74, 55, 47, 60, 63, 50, 75, 73, 57, 60, 104, 62, 74, 111, 56, 68, 46, 68, 77, 65, 75, 82, 62, 57, 56, 54, 48, 78, 74, 57, 69, 42, 42, 68, 59, 56, 71, 66, 67, 53, 56, 69, 69, 80, 56, 75, 65, 60, 74, 44, 62, 83, 78, 76, 69, 79, 105, 46, 45, 67, 76, 112, 54, 63, 74, 100, 57, 69, 94, 61, 46, 64, 52, 58, 55, 54, 77, 67, 66, 77, 63, 63, 55, 64, 71, 53, 68, 70, 66, 67, 88, 74, 66, 70, 51, 83, 57, 92, 72, 62, 88, 45, 108, 81, 53, 64, 52, 41, 56, 61, 49, 72, 86, 69, 64, 59, 58, 58, 88, 60, 64, 48, 56, 60, 65, 64, 60, 64, 58, 66, 52, 74, 42, 65, 89, 75, 52, 77, 51, 69, 78, 71, 59, 59, 68, 75, 48, 91, 65, 72, 56, 74, 69, 66, 55, 48, 74, 46, 48, 95, 107, 77, 67, 69, 77, 78, 65, 71, 69, 84, 60, 67, 95, 63, 67, 52, 73, 48, 49, 62, 101, 61, 73, 44, 68, 73, 78, 65, 55, 55, 95, 60, 87, 53, 65, 55, 58, 51, 65, 81, 74, 86, 84, 60, 57, 54, 67, 71, 64, 68, 56, 79, 87, 61, 79, 54, 62, 66, 58, 60, 59, 39, 53, 55, 89, 60, 71, 66, 81, 66, 54, 58, 58, 61, 56, 44, 65, 57, 70, 57, 70, 69, 92, 53, 86, 55, 64, 56, 66, 87, 73, 77, 59, 55, 78, 54, 67, 56, 57, 66, 79, 61, 65, 62, 70, 76, 66, 52, 76, 70, 68, 89, 64, 56, 68, 36, 63, 55, 54, 54, 83, 61, 68, 66, 87, 68, 73, 62, 76, 112, 66, 61, 66, 124, 54, 71, 68, 56, 58, 53, 62, 56, 62, 69, 62, 75, 69, 86, 49, 58, 96, 82, 80, 78, 50, 52, 52, 53, 72, 67, 76, 68, 62, 76, 50, 66, 69, 64, 76, 51, 45, 59, 48, 70, 78, 58, 69, 68, 69, 66, 62, 81, 86, 90, 60, 75, 63, 54, 58, 70, 51, 62, 48, 48, 74, 58, 94, 80, 62, 64, 72, 80, 48, 151, 54, 83, 63, 56, 46, 74, 49, 93, 44, 62, 59, 69, 59, 80, 86, 45, 71, 79, 53, 53, 61, 71, 67, 45, 72, 83, 72, 68, 68, 44, 75, 53, 48, 88, 52, 41, 73, 63, 70, 67, 76, 45, 55, 51, 75, 89, 58, 80, 64, 69, 76, 75, 46, 77, 65, 76, 104, 56, 70, 42, 82, 48, 94, 68, 73, 47, 76, 68, 47, 84, 55, 81, 53, 51, 63, 73, 69, 71, 62, 53, 51, 69, 74, 69, 67, 63, 64, 74, 69, 69, 87, 71, 70, 45, 65, 76, 82, 76, 66, 46, 79, 77, 67, 44, 69, 50, 47, 80, 61, 62, 72, 83, 49, 66, 73, 84, 74, 64, 73, 46, 48, 50, 62, 72, 76, 72, 63, 64, 45, 55, 60, 83, 47, 55, 67, 55, 56, 75, 56, 70, 72, 51, 55, 84, 67, 69, 74, 48, 48, 84, 59, 67, 63, 42, 74, 65, 81, 47, 91, 63, 58, 54, 66, 35, 80, 48, 58, 72, 63, 57, 62, 75, 52, 64, 83, 59, 60, 70, 77, 65, 64, 56, 82, 53, 66, 65, 59, 54, 60, 75, 45, 125, 70, 87, 52, 58, 70, 67, 69, 63, 57, 51, 54, 46, 42, 104, 63, 63, 70, 65, 111, 67, 51, 58, 64, 108, 48, 59, 68, 57, 56, 67, 62, 63, 72, 60, 80, 57, 45, 60, 72, 39, 89, 75, 65, 58, 76, 84, 23, 62, 66, 62, 76, 50, 61, 55, 55, 58, 60, 64, 61, 51, 61, 62, 39, 65, 57, 56, 94, 84, 89, 35, 76, 82, 59, 49, 73, 145, 52, 72, 52, 78, 63, 54, 76, 63, 66, 66, 70, 94, 58, 54, 74, 55, 57, 55, 67, 64, 67, 73, 71, 66, 58, 58, 76, 64, 62, 50, 59, 70, 51, 72, 115, 91, 65, 59, 67, 58, 56, 126, 72, 41, 56, 71, 64, 42, 67, 45, 61, 46, 59, 65, 71, 98, 58, 90, 59, 56, 63, 103, 47, 74, 58, 64, 60, 56, 77, 75, 60, 72, 70, 65, 59, 73, 49, 92, 81, 56, 77, 66, 64, 86, 59, 86, 58, 77, 98, 69, 48, 106, 58, 57, 61, 57, 55, 49, 75, 74, 69, 63, 54, 53, 64, 111, 74, 62, 75, 118, 53, 75, 62, 51, 53, 81, 61, 49, 59, 49, 49, 70, 65, 51, 71, 64, 64, 82, 61, 85, 56, 48, 66, 78, 78, 39, 61, 85, 96, 49, 63, 44, 49, 92, 57, 38, 60, 92, 53, 63, 56, 75, 77, 64, 61, 76, 60, 79, 49, 67, 47, 47, 49, 152, 56, 49, 62, 44, 74, 83, 62, 67, 58, 51, 61, 57, 62, 78, 78, 85, 60, 64, 63, 66, 71, 65, 67, 59, 97, 82, 71, 67, 125, 67, 69, 48, 87, 63, 77, 87, 48, 47, 69, 72, 40, 58, 86, 69, 51, 36, 85, 50, 72, 103, 68, 63, 74, 65, 65, 81, 76, 75, 66, 70, 84, 66, 54, 58, 53, 48, 76, 72, 71, 60, 57, 83, 60, 100, 61, 45, 63, 74, 58, 45, 89, 95, 83, 69, 107, 62, 62, 57, 64, 67, 59, 66, 62, 75, 60, 62, 48, 59, 65, 63, 67, 59, 48, 64, 66, 65, 75, 69, 65, 80, 76, 77, 77, 64, 44, 51, 54, 66, 77, 83, 25, 48, 57, 55, 73, 55, 74, 62, 68, 58, 66, 45, 43, 61, 67, 61, 58, 55, 51, 69, 52, 61, 69, 66, 48, 55, 72, 65, 97, 56, 70, 57, 46, 49, 68, 63, 64, 71, 54, 56, 57, 68, 75, 58, 71, 63, 68, 75, 64, 52, 69, 56, 95, 70, 94, 68, 52, 74, 65, 76, 56, 62, 56, 65, 57, 63, 81, 67, 71, 45, 60, 84, 68, 75, 53, 76, 76, 37, 110, 122, 84, 73, 64, 55, 68, 73, 55, 87, 48, 70, 60, 65, 56, 44, 82, 48, 41, 57, 64, 62, 77, 86, 68, 64, 65, 59, 45, 79, 57, 63, 69, 46, 71, 75, 55, 53, 56, 71, 69, 73, 51, 72, 47, 49, 31, 59, 79, 102, 49, 73, 68, 85, 59, 54, 45, 56, 54, 63, 48, 74, 69, 59, 87, 55, 70, 93, 67, 48, 49, 89, 60, 72, 86, 53, 59, 59, 65, 56, 78, 48, 84, 83, 61, 63, 66, 100, 72, 59, 71, 72, 57, 64, 72, 73, 56, 81, 41, 69, 72, 49, 52, 64, 62, 89, 52, 62, 74, 60, 49, 56, 56, 50, 74, 60, 53, 58, 74, 77, 78, 58, 63, 63, 57, 86, 78, 75, 61, 75, 48, 75, 48, 58, 63, 61, 77, 72, 52, 46, 58, 77, 63, 47, 72, 52, 60, 56, 93, 66, 67, 69, 61, 77, 55, 51, 37, 76, 68, 58, 81, 50, 49, 62, 63, 90, 69, 49, 74, 48, 71, 80, 40, 49, 67, 62, 65, 54, 74, 60, 51, 62, 82, 74, 86, 56, 77, 55, 35, 66, 90, 52, 75, 57, 57, 66, 52, 62, 65, 83, 81, 75, 85, 71, 67, 58, 39, 69, 55, 58, 105, 100, 42, 50, 63, 76, 60, 62, 61, 66, 68, 55, 75, 107, 76, 56, 67, 64, 112, 58, 67, 74, 55, 73, 49, 65, 46, 88, 43, 96, 39, 64, 75, 46, 62, 77, 65, 71, 54, 43, 59, 60, 52, 65, 56, 75, 59, 58, 55, 105, 91, 82, 82, 72, 62, 58, 56, 98, 100, 68, 59, 120, 72, 81, 75, 63, 52, 64, 73, 76, 85, 61, 51, 78, 85, 50, 57, 61, 38, 103, 59, 62, 134, 65, 84, 54, 67, 68, 56, 77, 77, 74, 75, 62, 144, 69, 70, 53, 76, 62, 48, 78, 53, 59, 61, 96, 55, 53, 79, 45, 79, 59, 54, 56, 64, 66, 78, 50, 81, 44, 93, 87, 65, 75, 89, 63, 74, 67, 71, 62, 106, 70, 65, 79, 45, 54, 98, 82, 77, 82, 56, 35, 71, 86, 59, 82, 79, 49, 72, 76, 48, 69, 78, 56, 67, 38, 57, 49, 52, 71, 63, 58, 67, 147, 63, 79, 74, 59, 51, 61, 66, 65, 57, 82, 94, 61, 59, 58, 69, 42, 45, 49, 77, 75, 83, 57, 80, 75, 80, 62, 74, 55, 84, 57, 62, 31, 64, 55, 49, 68, 72, 81, 56, 67, 67, 62, 50, 72, 55, 92, 62, 103, 76, 78, 66, 69, 60, 55, 79, 61, 65, 59, 57, 71, 69, 61, 82, 72, 49, 93, 61, 65, 44, 55, 72, 54, 55, 106, 56, 45, 62, 41, 60, 61, 80, 38, 54, 79, 67, 59, 75, 59, 79, 88, 55, 71, 53, 76, 63, 78, 63, 69, 81, 80, 65, 63, 58, 75, 72, 45, 76, 57, 59, 83, 58, 72, 55, 53, 62, 52, 55, 50, 90, 87, 84, 68, 58, 46, 119, 92, 69, 71, 61, 60, 64, 94, 86, 76, 67, 71, 61, 61, 69, 47, 82, 63, 79, 54, 69, 68, 83, 45, 59, 71, 74, 46, 47, 79, 82, 85, 64, 54, 69, 90, 49, 55, 55, 121, 91, 67, 78, 65, 68, 75, 63, 60, 36, 81, 53, 82, 58, 91, 60, 70, 67, 62, 78, 73, 68, 63, 71, 58, 94, 61, 86, 70, 67, 76, 54, 64, 63, 76, 58, 66, 77, 54, 67, 50, 45, 56, 104, 65, 64, 64, 64, 51, 63, 116, 67, 94, 50, 57, 45, 62, 68, 63, 55, 76, 52, 47, 75, 77, 53, 53, 82, 69, 57, 71, 63, 60, 67, 89, 73, 62, 76, 99, 54, 56, 56, 62, 81, 74, 60, 56, 50, 63, 63, 111, 85, 53, 53, 55, 107, 65, 59, 56, 58, 51, 53, 57, 44, 72, 77, 57, 65, 61, 70, 54, 59, 61, 57, 62, 64, 67, 78, 51, 64, 84, 61, 52, 74, 67, 61, 59, 73, 74, 108, 66, 75, 75, 64, 67, 75, 70, 67, 58, 55, 75, 83, 63, 42, 64, 62, 69, 82, 88, 53, 84, 56, 44, 68, 63, 69, 52, 59, 85, 65, 71, 79, 58, 66, 85, 80, 69, 72, 86, 61, 68, 55, 59, 58, 69, 64, 53, 54, 62, 79, 66, 48, 72, 52, 74, 69, 51, 42, 67, 59, 57, 81, 60, 54, 54, 46, 61, 78, 69, 109, 56, 63, 58, 60, 63, 66, 74, 62, 57, 76, 48, 71, 58, 45, 77, 61, 64, 61, 55, 45, 65, 46, 66, 74, 64, 56, 82, 74, 71, 81, 100, 61, 61, 61, 54, 60, 57, 76, 63, 39, 54, 74, 53, 69, 86, 80, 95, 68, 68, 88, 81, 71, 70, 58, 71, 83, 77, 74, 58, 55, 78, 64, 56, 73, 73, 69, 52, 102, 66, 59, 93, 63, 67, 82, 81, 50, 72, 72, 53, 64, 64, 57, 63, 57, 59, 57, 55, 56, 59, 61, 77, 55, 71, 51, 60, 58, 54, 61, 66, 64, 104, 51, 68, 68, 77, 55, 64, 76, 74, 68, 71, 69, 91, 67, 55, 58, 87, 57, 73, 72, 63, 60, 85, 64, 48, 57, 57, 71, 51, 55, 77, 48, 67, 80, 49, 63, 53, 67, 57, 49, 63, 72, 72, 90, 51, 60, 77, 65, 58, 65, 65, 77, 84, 66, 62, 73, 75, 67, 51, 74, 67, 76, 61, 61, 66, 67, 73, 70, 76, 46, 98, 69, 53, 46, 62, 53, 61, 63, 58, 71, 68, 65, 73, 53, 65, 51, 85, 94, 53, 53, 67, 52, 53, 86, 68, 73, 82, 60, 46, 71, 86, 87, 75, 43, 59, 66, 58, 58, 92, 53, 60, 59, 103, 53, 72, 41, 53, 42, 75, 60, 69, 57, 71, 56, 62, 71, 56, 61, 87, 57, 65, 55, 71, 76, 66, 108, 62, 75, 53, 51, 56, 51, 68, 54, 68, 78, 64, 61, 73, 64, 61, 70, 71, 59, 73, 77, 73, 57, 87, 60, 60, 37, 73, 80, 93, 92, 59, 75, 68, 52, 56, 62, 38, 52, 61, 95, 59, 35, 59, 86, 72, 59, 73, 60, 64, 36, 54, 71, 63, 68, 66, 65, 67, 82, 61, 121, 72, 46, 48, 37, 63, 61, 87, 59, 105, 56, 55, 81, 64, 86, 60, 80, 44, 55, 79, 52, 68, 57, 73, 68, 49, 94, 57, 54, 84, 66, 73, 56, 62, 64, 55, 66, 59, 87, 53, 58, 123, 50, 140, 102, 59, 63, 31, 89, 54, 101, 54, 54, 54, 58, 49, 70, 78, 105, 83, 57, 40, 61, 52, 74, 74, 83, 60, 52, 51, 60, 72, 64, 48, 82, 86, 62, 48, 56, 74, 53, 55, 61, 76, 32, 69, 114, 40, 57, 52, 61, 73, 80, 98, 71, 63, 69, 87, 69, 63, 57, 58, 59, 68, 44, 62, 82, 62, 77, 65, 42, 98, 61, 56, 38, 49, 53, 94, 94, 49, 66, 67, 62, 77, 71, 62, 69, 69, 49, 43, 70, 65, 84, 44, 90, 84, 55, 83, 90, 51, 57, 54, 63, 63, 60, 87, 87, 66, 77, 59, 70, 65, 61, 82, 80, 61, 79, 57, 56, 62, 52, 70, 70, 50, 66, 96, 81, 66, 61, 79, 59, 91, 74, 46, 77, 71, 65, 45, 61, 59, 82, 65, 55, 70, 58, 83, 66, 49, 48, 115, 58, 88, 45, 56, 73, 55, 50, 35, 57, 69, 53, 72, 53, 52, 83, 57, 72, 51, 62, 52, 64, 50, 53, 57, 64, 86, 60, 75, 59, 70, 41, 69, 54, 54, 65, 68, 50, 53, 56, 95, 62, 53, 83, 89, 45, 63, 48, 69, 69, 56, 89, 63, 63, 82, 91, 61, 61, 64, 66, 63, 52, 43, 57, 57, 56, 79, 68, 53, 55, 81, 78, 51, 49, 58, 62, 51, 86, 77, 60, 67, 73, 74, 86, 80, 66, 77, 69, 80, 56, 63, 47, 73, 89, 78, 71, 86, 52, 73, 77, 62, 71, 71, 53, 55, 64, 92, 59, 62, 73, 58, 60, 66, 61, 72, 65, 67, 78, 55, 72, 58, 74, 77, 53, 82, 82, 87, 61, 90, 75, 52, 65, 78, 93, 50, 80, 73, 66, 84, 57, 67, 57, 86, 64, 74, 97, 75, 76, 51, 89, 64, 85, 70, 62, 63, 64, 75, 110, 94, 43, 82, 63, 65, 68, 61, 51, 64, 47, 74, 46, 56, 45, 63, 71, 74, 54, 75, 50, 67, 71, 58, 51, 70, 77, 72, 63, 73, 53, 59, 51, 81, 53, 61, 48, 64, 67, 69, 63, 85, 79, 93, 56, 36, 42, 67, 73, 64, 55, 65, 52, 81, 94, 72, 46, 52, 94, 55, 51, 72, 52, 66, 72, 42, 45, 90, 50, 64, 70, 63, 75, 58, 73, 77, 73, 62, 196, 83, 43, 65, 71, 78, 135, 108, 71, 65, 53, 61, 51, 43, 68, 50, 59, 50, 79, 63, 70, 73, 77, 55, 102, 48, 73, 79, 49, 61, 71, 63, 67, 50, 67, 87, 123, 62, 51, 53, 75, 59, 155, 73, 58, 96, 73, 44, 66, 60, 72, 57, 77, 100, 31, 65, 50, 70, 71, 67, 72, 62, 57, 58, 99, 46, 53, 79, 77, 68, 52, 73, 48, 83, 63, 70, 67, 71, 60, 67, 57, 70, 69, 64, 86, 61, 74, 63, 63, 70, 63, 58, 69, 69, 55, 66, 74, 62, 61, 51, 45, 70, 76, 80, 66, 85, 70, 53, 51, 55, 41, 67, 95, 95, 62, 132, 55, 73, 69, 81, 115, 76, 59, 50, 68, 61, 41, 51, 53, 63, 65, 55, 59, 54, 61, 54, 83, 61, 54, 57, 51, 100, 56, 74, 65, 61, 70, 60, 70, 77, 56, 82, 104, 48, 49, 83, 55, 60, 50, 51, 65, 65, 62, 81, 86, 43, 101, 51, 100, 63, 81, 71, 80, 41, 80, 38, 48, 114, 71, 66, 66, 80, 105, 65, 41, 44, 44, 66, 76, 77, 52, 92, 86, 96, 58, 52, 53, 65, 58, 68, 75, 65, 92, 60, 61, 52, 56, 47, 71, 36, 71, 44, 101, 80, 58, 59, 67, 55, 48, 51, 46, 90, 90, 94, 55, 92, 68, 82, 62, 46, 45, 73, 57, 46, 58, 52, 65, 79, 78, 64, 57, 78, 96, 60, 32, 61, 46, 81, 51, 75, 56, 51, 84, 64, 59, 60, 59, 58, 60, 58, 82, 65, 72, 73, 60, 84, 63, 75, 89, 50, 39, 52, 46, 64, 101, 55, 46, 99, 78, 67, 92, 72, 63, 121, 96, 45, 61, 50, 57, 50, 53, 89, 92, 37, 71, 79, 78, 56, 42, 77, 87, 69, 33, 98, 66, 56, 49, 65, 45, 59, 39, 83, 59, 78, 57, 80, 87, 62, 52, 63, 57, 57, 51, 64, 67, 66, 45, 107, 67, 59, 90, 62, 66, 64, 57, 66, 101, 64, 30, 69, 64, 64, 72, 68, 90, 56, 60, 46, 58, 67, 67, 57, 73, 60, 88, 60, 52, 80, 83, 70, 78, 62, 72, 62, 62, 59, 77, 73, 104, 56, 50, 73, 55, 63, 43, 76, 70, 85, 63, 62, 74, 55, 57, 53, 63, 56, 79, 59, 88, 72, 72, 91, 74, 62, 56, 88, 54, 54, 59, 64, 71, 89, 58, 69, 69, 75, 82, 98, 60, 68, 55, 76, 68, 51, 51, 73, 64, 42, 55, 65, 56, 80, 65, 61, 53, 74, 58, 67, 54, 67, 60, 63, 64, 58, 48, 76, 54, 79, 61, 58, 62, 90, 50, 54, 79, 82, 62, 44, 71, 65, 79, 80, 33, 76, 80, 62, 35, 95, 59, 55, 48, 57, 57, 123, 46, 60, 68, 53, 90, 50, 86, 59, 74, 50, 49, 98, 49, 89, 41, 58, 73, 49, 60, 48, 55, 73, 95, 100, 98, 89, 61, 60, 59, 70, 45, 90, 72, 56, 57, 63, 69, 66, 55, 42, 63, 70, 68, 49, 65, 65, 62, 54, 42, 78, 51, 40, 46, 53, 45, 73, 54, 71, 50, 69, 55, 108, 75, 49, 85, 62, 53, 72, 74, 70, 54, 71, 77, 76, 69, 72, 72, 76, 68, 45, 56, 86, 72, 81, 65, 77, 63, 56, 52, 53, 74, 60, 78, 58, 66, 50, 57, 66, 83, 73, 54, 67, 61, 67, 46, 75, 56, 50, 70, 61, 92, 67, 82, 92, 93, 79, 67, 70, 66, 57, 59, 69, 48, 77, 88, 59, 102, 54, 65, 87, 79, 61, 102, 59, 14, 74, 75, 96, 56, 52, 69, 96, 45, 62, 101, 53, 49, 59, 95, 69, 50, 74, 69, 65, 65, 75, 62, 82, 59, 48, 72, 50, 75, 69, 40, 82, 79, 62, 64, 64, 100, 70, 65, 66, 37, 68, 64, 118, 67, 61, 57, 68, 64, 50, 73, 99, 80, 63, 91, 55, 52, 57, 97, 74, 82, 78, 87, 47, 52, 64, 62, 72, 63, 49, 64, 60, 72, 57, 68, 83, 62, 84, 82, 49, 93, 66, 75, 51, 57, 113, 63, 59, 72, 40, 67, 68, 79, 68, 61, 67, 87, 55, 73, 58, 64, 72, 54, 53, 72, 48, 69, 63, 67, 107, 58, 70, 83, 55, 46, 80, 64, 71, 50, 66, 64, 67, 51, 86, 77, 111, 81, 59, 79, 90, 57, 58, 59, 63, 71, 71, 66, 96, 47, 54, 62, 49, 67, 85, 55, 88, 61, 68, 81, 68, 57, 31, 57, 71, 94, 54, 54, 69, 55, 81, 59, 66, 83, 65, 48, 91, 72, 45, 67, 60, 63, 50, 70, 61, 76, 67, 77, 83, 67, 54, 88, 68, 95, 63, 81, 65, 53, 64, 44, 93, 94, 65, 69, 56, 58, 50, 74, 55, 47, 71, 62, 65, 52, 59, 67, 50, 77, 70, 54, 74, 48, 106, 56, 76, 62, 60, 101, 52, 62, 85, 59, 76, 91, 75, 56, 55, 76, 82, 60, 56, 51, 60, 64, 87, 74, 75, 91, 54, 107, 66, 95, 53, 66, 79, 68, 46, 87, 46, 98, 69, 42, 55, 45, 41, 86, 110, 57, 41, 51, 69, 46, 77, 87, 52, 70, 73, 46, 56, 66, 75, 68, 76, 62, 72, 55, 54, 51, 50, 42, 73, 85, 74, 65, 70, 82, 67, 94, 66, 54, 38, 61, 69, 52, 54, 71, 128, 91, 60, 63, 68, 62, 60, 62, 79, 66, 59, 69, 55, 56, 46, 52, 43, 79, 41, 73, 67, 96, 67, 62, 67, 73, 48, 63, 56, 65, 70, 77, 49, 63, 60, 54, 67, 54, 61, 45, 63, 63, 59, 60, 71, 67, 79, 67, 51, 59, 60, 66, 73, 67, 82, 66, 63, 61, 66, 80, 44, 56, 65, 57, 52, 76, 69, 65, 60, 48, 61, 54, 53, 38, 64, 44, 67, 72, 49, 83, 39, 74, 59, 68, 76, 68, 40, 64, 73, 85, 70, 80, 66, 72, 81, 100, 66, 71, 80, 78, 77, 62, 74, 48, 41, 81, 56, 58, 70, 58, 61, 71, 71, 56, 58, 81, 64, 61, 50, 57, 91, 62, 61, 44, 80, 96, 82, 80, 60, 59, 38, 65, 106, 69, 76, 50, 46, 73, 103, 56, 60, 35, 61, 64, 60, 61, 80, 95, 52, 69, 80, 56, 61, 58, 98, 83, 61, 46, 64, 56, 79, 74, 47, 57, 81, 64, 50, 76, 47, 66, 57, 48, 58, 60, 69, 78, 57, 105, 56, 66, 59, 65, 69, 63, 67, 90, 50, 88, 74, 50, 67, 60, 81, 58, 73, 101, 65, 67, 78, 65, 47, 52, 64, 55, 58, 70, 42, 62, 77, 72, 56, 60, 52, 62, 57, 69, 59, 67, 55, 71, 67, 98, 70, 60, 45, 63, 87, 61, 56, 70, 81, 63, 74, 74, 70, 76, 55, 72, 73, 46, 57, 73, 70, 62, 57, 59, 66, 62, 69, 69, 78, 66, 75, 79, 62, 61, 83, 80, 60, 71, 45, 76, 53, 65, 49, 80, 110, 63, 63, 68, 75, 38, 55, 49, 62, 61, 57, 56, 76, 51, 62, 78, 59, 71, 58, 47, 62, 55, 53, 69, 66, 45, 64, 68, 42, 57, 64, 64, 80, 74, 75, 63, 87, 47, 69, 84, 63, 76, 66, 66, 60, 54, 72, 46, 73, 54, 71, 70, 54, 71, 57, 58, 80, 58, 104, 49, 65, 92, 117, 61, 58, 87, 103, 67, 73, 73, 70, 70, 67, 67, 58, 63, 67, 80, 65, 72, 75, 69, 70, 58, 83, 61, 62, 70, 75, 69, 72, 67, 64, 67, 83, 55, 49, 63, 77, 51, 65, 69, 67, 66, 55, 60, 69, 67, 56, 61, 92, 88, 57, 63, 76, 76, 63, 65, 57, 60, 59, 88, 54, 61, 57, 55, 80, 64, 87, 60, 92, 54, 77, 59, 52, 88, 49, 50, 58, 160, 51, 64, 44, 77, 66, 63, 55, 103, 142, 54, 54, 54, 55, 81, 74, 57, 66, 89, 66, 54, 94, 81, 64, 87, 86, 73, 73, 49, 78, 72, 82, 79, 74, 71, 71, 48, 88, 51, 82, 52, 64, 49, 52, 66, 55, 59, 58, 57, 61, 85, 62, 89, 67, 56, 47, 84, 50, 80, 84, 74, 73, 62, 60, 87, 46, 77, 66, 67, 52, 46, 68, 56, 66, 57, 67, 56, 54, 70, 63, 85, 72, 77, 74, 51, 55, 60, 92, 55, 67, 48, 62, 60, 75, 96, 73, 62, 64, 73, 51, 56, 70, 75, 57, 70, 83, 62, 55, 74, 61, 60, 106, 81, 65, 82, 64, 61, 80, 64, 54, 59, 57, 60, 60, 67, 68, 63, 45, 80, 55, 77, 59, 53, 51, 71, 59, 58, 61, 47, 70, 49, 69, 64, 60, 46, 57, 74, 56, 83, 63, 57, 55, 52, 54, 74, 74, 64, 62, 62, 88, 63, 75, 66, 66, 65, 62, 81, 57, 64, 65, 79, 55, 89, 61, 69, 53, 48, 74, 58, 51, 69, 51, 66, 63, 66, 69, 62, 50, 43, 62, 49, 67, 68, 68, 55, 59, 99, 74, 46, 69, 54, 70, 54, 82, 70, 55, 89, 63, 57, 69, 68, 57, 75, 68, 60, 100, 67, 80, 59, 66, 49, 103, 90, 70, 61, 64, 56, 80, 103, 71, 68, 59, 86, 43, 66, 61, 55, 75, 52, 61, 70, 52, 60, 57, 69, 60, 50, 50, 58, 51, 76, 70, 78, 36, 70, 72, 57, 72, 67, 60, 61, 47, 48, 58, 62, 64, 50, 56, 71, 82, 71, 67, 58, 69, 61, 78, 63, 61, 77, 64, 59, 64, 60, 55, 67, 64, 53, 77, 121, 64, 78, 52, 79, 63, 50, 61, 61, 63, 78, 56, 84, 78, 53, 61, 47, 64, 62, 62, 64, 68, 95, 55, 74, 62, 39, 55, 123, 63, 61, 89, 67, 63, 52, 76, 60, 98, 54, 56, 74, 46, 47, 65, 69, 76, 74, 57, 51, 44, 57, 70, 72, 69, 64, 53, 70, 73, 77, 49, 47, 81, 57, 63, 85, 52, 63, 61, 88, 69, 76, 68, 74, 64, 56, 62, 51, 51, 54, 80, 79, 71, 62, 60, 59, 78, 96, 56, 60, 68, 78, 83, 67, 61, 88, 51, 70, 57, 63, 48, 59, 60, 39, 71, 62, 58, 84, 74, 76, 60, 73, 60, 62, 58, 66, 54, 49, 71, 64, 33, 48, 68, 50, 76, 81, 58, 63, 45, 65, 68, 55, 82, 59, 45, 49, 72, 59, 56, 64, 63, 62, 63, 67, 75, 56, 75, 94, 67, 67, 52, 62, 54, 56, 61, 76, 87, 71, 92, 47, 54, 60, 70, 78, 67, 73, 60, 69, 70, 65, 70, 48, 91, 58, 74, 81, 59, 50, 49, 76, 96, 46, 80, 58, 75, 77, 74, 62, 55, 60, 68, 59, 55, 65, 70, 70, 69, 63, 65, 64, 62, 77, 83, 66, 54, 73, 56, 67, 46, 74, 72, 50, 52, 53, 61, 90, 60, 46, 76, 54, 59, 59, 88, 76, 59, 48, 57, 75, 53, 74, 94, 62, 83, 70, 61, 47, 68, 66, 55, 61, 71, 54, 62, 70, 64, 62, 57, 69, 107, 57, 60, 77, 66, 54, 56, 87, 73, 64, 59, 60, 61, 73, 54, 50, 48, 67, 58, 65, 61, 72, 84, 62, 65, 58, 69, 72, 74, 79, 68, 61, 64, 67, 85, 55, 68, 69, 70, 62, 76, 40, 77, 50, 92, 60, 52, 58, 61, 64, 71, 73, 65, 64, 61, 54, 63, 98, 59, 52, 51, 53, 64, 60, 84, 72, 61, 65, 79, 65, 68, 68, 66, 52, 60, 55, 64, 75, 83, 69, 91, 78, 66, 83, 78, 76, 76, 60, 95, 56, 69, 81, 66, 56, 48, 49, 71, 65, 75, 58, 72, 58, 60, 68, 84, 74, 58, 66, 53, 69, 65, 56, 75, 53, 118, 89, 62, 58, 57, 68, 65, 58, 58, 81, 48, 52, 56, 52, 122, 68, 67, 59, 85, 71, 113, 89, 88, 61, 63, 58, 73, 54, 58, 53, 67, 73, 55, 87, 65, 67, 62, 63, 92, 85, 88, 68, 69, 75, 62, 70, 89, 61, 56, 51, 80, 59, 60, 68, 68, 59, 59, 60, 90, 67, 77, 73, 57, 69, 50, 78, 50, 77, 69, 59, 51, 61, 63, 50, 78, 70, 84, 53, 72, 54, 59, 58, 56, 59, 71, 104, 51, 61, 73, 90, 125, 65, 67, 66, 41, 64, 42, 60, 79, 64, 48, 57, 131, 50, 59, 67, 64, 58, 55, 74, 74, 64, 57, 63, 71, 53, 37, 63, 65, 74, 68, 92, 63, 53, 72, 61, 74, 88, 64, 56, 66, 61, 66, 64, 49, 56, 50, 57, 64, 56, 75, 58, 63, 66, 86, 71, 71, 62, 66, 59, 61, 73, 86, 62, 52, 62, 77, 70, 59, 72, 61, 61, 56, 94, 55, 54, 56, 69, 57, 79, 72, 48, 74, 64, 84, 60, 46, 45, 59, 119, 63, 100, 78, 87, 71, 72, 59, 56, 67, 67, 53, 110, 54, 57, 81, 38, 67, 51, 91, 86, 70, 81, 54, 88, 49, 67, 92, 40, 49, 80, 66, 53, 79, 48, 87, 54, 64, 53, 70, 63, 62, 79, 49, 54, 59, 85, 86, 63, 99, 68, 55, 70, 132, 58, 74, 52, 68, 64, 101, 54, 56, 76, 74, 71, 81, 60, 57, 90, 60, 53, 69, 63, 45, 49, 58, 59, 75, 62, 37, 59, 80, 82, 71, 86, 76, 76, 48, 68, 68, 70, 48, 67, 76, 94, 47, 76, 70, 74, 74, 56, 60, 69, 56, 55, 56, 77, 58, 58, 54, 60, 62, 67, 69, 53, 87, 55, 89, 59, 58, 51, 59, 68, 81, 83, 71, 67, 63, 65, 65, 64, 73, 57, 55, 54, 74, 116, 61, 58, 54, 36, 123, 68, 63, 42, 89, 64, 76, 59, 58, 65, 69, 74, 76, 73, 69, 84, 60, 61, 44, 55, 67, 55, 80, 80, 47, 72, 48, 52, 79, 75, 57, 49, 73, 90, 64, 64, 68, 103, 54, 59, 50, 77, 63, 61, 70, 66, 71, 50, 58, 52, 72, 82, 58, 71, 62, 74, 46, 57, 65, 63, 74, 56, 53, 62, 50, 55, 47, 62, 67, 78, 74, 70, 77, 51, 79, 100, 55, 60, 58, 69, 72, 69, 43, 63, 92, 71, 70, 99, 154, 49, 68, 64, 46, 61, 60, 71, 52, 78, 61, 58, 44, 54, 49, 55, 62, 50, 82, 62, 76, 65, 45, 66, 64, 60, 59, 80, 62, 62, 55, 66, 55, 67, 62, 77, 62, 67, 97, 114, 49, 59, 71, 67, 69, 79, 83, 65, 70, 53, 65, 58, 70, 77, 52, 79, 54, 65, 62, 57, 86, 92, 60, 56, 73, 75, 85, 59, 70, 59, 96, 55, 62, 77, 54, 59, 75, 73, 44, 69, 66, 61, 50, 65, 72, 58, 79, 71, 90, 57, 49, 75, 54, 83, 79, 51, 69, 92, 48, 80, 56, 63, 65, 69, 95, 65, 67, 63, 59, 74, 66, 45, 51, 59, 47, 38, 85, 75, 63, 76, 56, 74, 47, 76, 133, 73, 51, 95, 59, 38, 60, 89, 80, 75, 88, 70, 65, 53, 74, 85, 75, 82, 63, 71, 68, 63, 53, 61, 85, 79, 61, 84, 68, 56, 64, 87, 51, 68, 58, 94, 47, 65, 82, 81, 63, 82, 83, 66, 92, 54, 52, 74, 58, 82, 70, 51, 64, 50, 64, 77, 59, 56, 60, 66, 61, 77, 43, 73, 82, 125, 74, 55, 69, 65, 47, 75, 87, 73, 84, 65, 58, 62, 84, 52, 79, 55, 55, 88, 49, 52, 59, 82, 54, 49, 66, 76, 85, 49, 52, 40, 66, 67, 67, 71, 58, 77, 80, 50, 55, 63, 53, 77, 61, 82, 78, 81, 61, 50, 55, 61, 60, 65, 63, 63, 67, 82, 54, 70, 72, 69, 75, 69, 57, 72, 67, 41, 65, 65, 59, 67, 72, 87, 83, 80, 59, 58, 129, 60, 65, 64, 49, 58, 81, 42, 50, 57, 69, 78, 60, 77, 65, 127, 66, 61, 71, 55, 49, 95, 66, 58, 56, 73, 53, 86, 60, 60, 64, 61, 80, 68, 55, 68, 64, 66, 60, 96, 80, 67, 51, 56, 60, 78, 51, 79, 57, 137, 52, 57, 52, 65, 45, 67, 78, 74, 69, 56, 54, 56, 69, 63, 63, 74, 61, 54, 105, 60, 82, 85, 60, 47, 78, 86, 60, 81, 54, 51, 47, 65, 63, 61, 78, 61, 56, 81, 66, 67, 76, 52, 30, 97, 87, 73, 52, 58, 59, 60, 79, 67, 48, 64, 56, 66, 52, 67, 51, 44, 58, 91, 46, 64, 52, 54, 46, 52, 52, 58, 63, 45, 64, 67, 72, 48, 67, 92, 78, 69, 62, 56, 53, 76, 64, 70, 41, 59, 68, 49, 58, 83, 66, 82, 50, 65, 76, 125, 79, 47, 72, 108, 52, 92, 40, 61, 46, 61, 60, 59, 38, 52, 66, 44, 69, 82, 69, 64, 67, 64, 38, 45, 48, 47, 82, 61, 67, 63, 55, 55, 65, 63, 70, 69, 78, 93, 79, 59, 98, 74, 80, 52, 77, 66, 82, 67, 55, 75, 62, 83, 55, 81, 82, 76, 52, 66, 61, 60, 64, 57, 82, 108, 70, 93, 48, 88, 57, 64, 59, 55, 72, 49, 60, 71, 59, 69, 78, 82, 71, 62, 72, 47, 41, 62, 47, 76, 69, 54, 80, 60, 58, 72, 69, 60, 78, 52, 58, 54, 51, 59, 58, 73, 64, 58, 46, 61, 40, 51, 54, 69, 60, 69, 81, 66, 51, 57, 67, 78, 95, 62, 51, 58, 62, 78, 70, 71, 70, 43, 63, 59, 48, 74, 73, 57, 65, 65, 65, 42, 54, 64, 60, 42, 59, 51, 77, 66, 72, 119, 55, 66, 67, 68, 56, 75, 75, 64, 34, 75, 87, 51, 55, 72, 64, 45, 66, 64, 55, 70, 47, 70, 79, 61, 81, 75, 60, 84, 68, 47, 113, 61, 76, 72, 95, 75, 57, 86, 88, 53, 82, 56, 59, 69, 63, 55, 79, 65, 74, 78, 67, 48, 64, 48, 69, 57, 71, 74, 70, 56, 60, 87, 63, 59, 57, 74, 54, 52, 45, 54, 140, 66, 56, 59, 37, 82, 54, 75, 56, 94, 84, 66, 80, 73, 60, 72, 60, 48, 112, 70, 75, 53, 61, 64, 79, 67, 55, 93, 59, 43, 68, 75, 71, 61, 85, 68, 47, 77, 69, 52, 91, 42, 47, 60, 54, 65, 57, 53, 58, 42, 72, 72, 63, 71, 50, 56, 98, 71, 78, 53, 59, 104, 60, 72, 81, 80, 85, 65, 52, 76, 74, 95, 49, 66, 40, 67, 42, 43, 88, 68, 101, 53, 70, 62, 59, 35, 61, 56, 79, 55, 73, 88, 67, 66, 43, 47, 59, 74, 73, 48, 68, 57, 62, 63, 51, 70, 64, 66, 55, 72, 63, 88, 78, 45, 52, 71, 85, 72, 67, 72, 61, 79, 77, 70, 75, 54, 80, 48, 95, 62, 65, 70, 68, 62, 60, 91, 68, 69, 59, 45, 82, 65, 81, 51, 64, 65, 63, 81, 67, 103, 63, 59, 64, 74, 76, 59, 61, 65, 73, 65, 50, 63, 50, 61, 59, 61, 75, 70, 65, 58, 68, 58, 67, 63, 62, 67, 62, 78, 85, 54, 57, 75, 68, 63, 54, 72, 59, 88, 74, 49, 70, 58, 61, 38, 52, 70, 54, 54, 108, 58, 60, 66, 57, 54, 60, 66, 55, 71, 53, 56, 51, 65, 67, 73, 92, 72, 74, 66, 83, 67, 58, 77, 69, 47, 55, 69, 60, 70, 105, 54, 75, 65, 64, 58, 58, 55, 72, 56, 71, 110, 58, 65, 65, 55, 65, 69, 80, 57, 65, 61, 83, 50, 75, 93, 67, 57, 55, 52, 80, 73, 60, 91, 43, 76, 96, 51, 60, 64, 57, 42, 56, 62, 48, 75, 60, 65, 60, 47, 47, 119, 81, 63, 62, 79, 60, 54, 70, 53, 65, 83, 61, 52, 50, 62, 77, 62, 48, 89, 57, 62, 78, 76, 75, 67, 53, 55, 56, 63, 59, 69, 78, 75, 61, 53, 63, 78, 69, 55, 71, 76, 83, 60, 50, 62, 54, 61, 70, 49, 114, 58, 64, 68, 95, 64, 60, 64, 81, 58, 59, 49, 63, 67, 67, 70, 55, 52, 59, 61, 53, 76, 78, 55, 46, 116, 72, 60, 62, 57, 58, 66, 75, 70, 68, 89, 62, 77, 77, 60, 54, 52, 50, 71, 65, 65, 55, 74, 63, 68, 51, 58, 56, 97, 60, 52, 77, 58, 76, 83, 73, 57, 62, 59, 86, 58, 77, 60, 70, 56, 77, 83, 45, 83, 73, 66, 63, 63, 46, 72, 53, 56, 60, 61, 61, 139, 60, 61, 62, 84, 57, 56, 60, 79, 72, 121, 76, 86, 72, 59, 65, 86, 78, 69, 79, 64, 61, 98, 58, 73, 67, 72, 54, 74, 84, 84, 40, 69, 68, 62, 53, 93, 59, 69, 61, 72, 40, 83, 59, 48, 60, 44, 71, 78, 51, 70, 76, 94, 66, 85, 86, 60, 56, 52, 74, 91, 75, 62, 51, 69, 67, 58, 60, 60, 59, 81, 41, 56, 63, 52, 62, 63, 58, 95, 53, 53, 54, 99, 56, 83, 91, 65, 55, 72, 67, 53, 54, 48, 59, 79, 61, 79, 82, 48, 79, 45, 89, 54, 51, 65, 74, 65, 55, 45, 61, 64, 69, 60, 53, 67, 67, 73, 98, 68, 62, 71, 58, 63, 64, 58, 69, 52, 65, 68, 81, 67, 75, 74, 66, 60, 100, 66, 65, 57, 72, 84, 58, 77, 67, 62, 60, 71, 67, 65, 46, 62, 72, 51, 57, 61, 56, 71, 91, 60, 66, 60, 77, 66, 61, 67, 86, 72, 76, 73, 59, 111, 72, 52, 60, 94, 91, 58, 56, 64, 58, 60, 73, 66, 65, 81, 79, 98, 60, 64, 103, 72, 63, 60, 64, 60, 70, 63, 42, 58, 76, 66, 58, 71, 49, 46, 97, 83, 46, 54, 80, 90, 55, 104, 72, 54, 80, 41, 66, 51, 65, 53, 57, 71, 60, 61, 70, 71, 46, 55, 46, 65, 75, 83, 52, 75, 55, 55, 53, 56, 67, 71, 57, 74, 49, 66, 37, 61, 76, 78, 54, 64, 57, 39, 61, 63, 81, 52, 77, 50, 60, 68, 84, 64, 90, 59, 69, 88, 70, 70, 35, 85, 55, 53, 64, 80, 57, 65, 61, 48, 84, 82, 104, 67, 58, 55, 62, 64, 51, 66, 51, 74, 55, 93, 53, 53, 70, 71, 44, 72, 73, 86, 60, 70, 57, 65, 63, 58, 68, 44, 64, 45, 86, 52, 60, 64, 44, 80, 61, 59, 70, 56, 57, 72, 85, 72, 59, 59, 71, 103, 69, 56, 57, 59, 41, 66, 47, 50, 74, 66, 61, 55, 48, 83, 50, 53, 77, 69, 84, 80, 88, 66, 65, 78, 68, 51, 62, 60, 76, 64, 65, 90, 72, 65, 70, 61, 58, 54, 67, 68, 63, 68, 76, 60, 74, 47, 72, 78, 70, 68, 60, 65, 52, 85, 57, 76, 65, 72, 65, 56, 69, 70, 74, 100, 59, 64, 67, 80, 86, 79, 69, 55, 55, 56, 61, 40, 62, 68, 96, 50, 56, 67, 78, 61, 62, 62, 68, 65, 72, 59, 74, 77, 66, 57, 52, 54, 54, 55, 78, 53, 76, 76, 77, 73, 68, 82, 72, 61, 86, 59, 79, 62, 67, 60, 53, 66, 56, 56, 66, 47, 38, 38, 136, 66, 90, 51, 67, 55, 81, 63, 42, 46, 61, 60, 73, 72, 56, 55, 65, 64, 49, 57, 76, 68, 51, 59, 61, 51, 60, 76, 69, 81, 99, 63, 66, 57, 63, 64, 85, 62, 57, 75, 76, 63, 53, 68, 52, 63, 68, 63, 62, 63, 77, 67, 56, 92, 60, 86, 54, 87, 68, 61, 54, 64, 64, 66, 77, 57, 65, 53, 104, 58, 67, 46, 81, 52, 59, 53, 65, 63, 67, 54, 69, 55, 76, 55, 51, 71, 73, 68, 69, 58, 64, 42, 91, 64, 66, 62, 68, 79, 63, 74, 67, 47, 52, 59, 46, 69, 110, 58, 68, 52, 87, 57, 98, 64, 62, 61, 79, 77, 83, 63, 52, 63, 64, 63, 63, 83, 73, 60, 88, 59, 51, 67, 75, 57, 78, 90, 62, 64, 53, 52, 47, 68, 53, 70, 63, 78, 65, 50, 62, 53, 58, 97, 76, 74, 55, 76, 56, 66, 93, 62, 53, 75, 65, 58, 46, 59, 58, 47, 62, 55, 95, 75, 60, 60, 57, 63, 63, 72, 71, 48, 87, 63, 44, 33, 79, 44, 69, 42, 59, 58, 59, 66, 68, 67, 53, 61, 71, 85, 69, 58, 69, 64, 55, 54, 54, 52, 64, 70, 85, 86, 77, 57, 96, 58, 60, 64, 79, 61, 49, 72, 71, 60, 61, 66, 48, 67, 47, 80, 54, 49, 79, 75, 48, 76, 95, 137, 53, 69, 67, 77, 66, 70, 94, 67, 69, 75, 46, 48, 70, 52, 97, 84, 63, 61, 75, 52, 76, 70, 58, 63, 71, 55, 70, 62, 50, 50, 57, 54, 76, 68, 71, 57, 57, 58, 58, 73, 51, 57, 56, 75, 52, 56, 60, 64, 66, 66, 66, 53, 49, 55, 102, 65, 77, 65, 55, 72, 60, 79, 59, 105, 73, 128, 68, 71, 57, 78, 62, 79, 71, 109, 68, 60, 73, 80, 52, 56, 70, 67, 57, 63, 52, 76, 57, 59, 94, 48, 69, 78, 79, 38, 66, 75, 63, 68, 74, 72, 56, 66, 66, 44, 80, 76, 64, 68, 52, 55, 56, 76, 61, 59, 94, 59, 74, 57, 95, 70, 126, 84, 54, 75, 67, 68, 70, 112, 56, 57, 65, 72, 79, 70, 74, 60, 109, 55, 56, 61, 89, 59, 68, 68, 72, 77, 70, 38, 81, 54, 89, 91, 57, 83, 55, 69, 56, 63, 60, 67, 55, 64, 65, 46, 48, 51, 45, 61, 64, 38, 50, 80, 59, 57, 46, 39, 119, 78, 52, 75, 79, 74, 52, 70, 99, 47, 80, 35, 67, 49, 78, 70, 55, 59, 94, 66, 91, 59, 44, 87, 101, 62, 53, 113, 48, 54, 71, 52, 77, 104, 85, 76, 91, 59, 70, 97, 104, 55, 50, 70, 71, 67, 70, 60, 71, 60, 81, 60, 48, 57, 62, 77, 55, 76, 61, 83, 57, 48, 60, 51, 95, 69, 76, 58, 74, 66, 63, 57, 57, 57, 64, 47, 54, 74, 66, 53, 51, 47, 85, 44, 75, 48, 66, 86, 72, 64, 67, 46, 51, 60, 47, 57, 72, 80, 93, 56, 76, 39, 73, 66, 62, 58, 62, 36, 58, 53, 89, 59, 40, 56, 60, 104, 58, 48, 57, 65, 71, 55, 73, 101, 47, 66, 68, 56, 44, 68, 88, 93, 67, 54, 81, 53, 55, 73, 49, 81, 72, 59, 99, 48, 68, 57, 96, 53, 85, 75, 98, 64, 52, 76, 68, 66, 60, 55, 82, 73, 63, 41, 67, 61, 58, 49, 73, 68, 64, 48, 60, 60, 56, 69, 56, 50, 46, 53, 64, 79, 88, 46, 51, 77, 62, 44, 57, 67, 63, 48, 70, 53, 53, 70, 84, 53, 70, 39, 48, 69, 79, 64, 75, 83, 80, 68, 62, 56, 55, 58, 80, 43, 68, 41, 62, 69, 55, 67, 60, 58, 66, 48, 61, 86, 72, 89, 76, 46, 55, 50, 58, 82, 73, 52, 96, 83, 42, 59, 68, 57, 113, 94, 60, 67, 58, 68, 52, 109, 66, 48, 94, 55, 100, 78, 92, 87, 57, 81, 59, 41, 60, 49, 60, 67, 71, 60, 37, 43, 66, 78, 70, 71, 69, 85, 71, 63, 47, 68, 52, 70, 65, 57, 68, 66, 76, 63, 62, 52, 63, 62, 49, 62, 50, 65, 45, 47, 82, 79, 78, 51, 82, 55, 46, 65, 64, 58, 70, 65, 74, 46, 51, 64, 77, 54, 54, 57, 84, 48, 175, 55, 80, 87, 45, 77, 50, 64, 53, 111, 72, 77, 55, 52, 64, 66, 55, 84, 41, 74, 64, 83, 57, 51, 76, 51, 75, 81, 52, 89, 81, 59, 73, 77, 64, 65, 39, 45, 56, 83, 57, 73, 40, 73, 50, 118, 62, 68, 75, 79, 56, 60, 57, 97, 57, 63, 64, 79, 80, 47, 71, 63, 62, 58, 46, 63, 63, 59, 49, 66, 44, 50, 55, 97, 35, 54, 46, 64, 65, 67, 49, 61, 58, 115, 95, 58, 48, 97, 77, 51, 61, 57, 55, 84, 67, 112, 68, 66, 70, 68, 133, 42, 64, 57, 61, 67, 93, 90, 60, 58, 67, 58, 78, 70, 64, 51, 76, 100, 38, 52, 51, 49, 69, 64, 53, 70, 50, 66, 64, 41, 65, 60, 74, 69, 73, 65, 56, 48, 58, 58, 67, 60, 80, 57, 90, 65, 59, 75, 60, 54, 55, 84, 64, 55, 69, 47, 88, 71, 70, 51, 87, 73, 67, 67, 49, 99, 67, 80, 61, 34, 59, 98, 100, 70, 61, 87, 78, 62, 88, 61, 70, 57, 70, 52, 45, 71, 80, 69, 108, 90, 33, 60, 62, 80, 151, 92, 63, 69, 55, 44, 65, 89, 48, 71, 68, 62, 61, 66, 69, 75, 56, 58, 71, 56, 76, 81, 79, 67, 79, 51, 46, 93, 57, 64, 57, 62, 98, 52, 91, 57, 54, 60, 64, 51, 75, 41, 48, 56, 74, 51, 64, 88, 86, 63, 47, 94, 73, 64, 74, 66, 64, 75, 62, 104, 57, 69, 64, 52, 100, 55, 87, 51, 41, 57, 54, 106, 89, 58, 76, 88, 49, 56, 103, 93, 57, 62, 74, 68, 80, 71, 66, 88, 91, 86, 45, 93, 99, 37, 58, 83, 55, 36, 95, 101, 63, 70, 55, 58, 63, 53, 48, 75, 67, 51, 94, 64, 77, 49, 75, 72, 34, 71, 71, 37, 105, 72, 104, 60, 78, 74, 88, 64, 65, 76, 81, 100, 77, 69, 74, 58, 95, 67, 68, 143, 66, 41, 89, 74, 46, 73, 80, 32, 69, 66, 72, 70, 69, 90, 91, 81, 59, 88, 70, 56, 94, 59, 158, 46, 67, 59, 68, 76, 66, 59, 72, 68, 63, 63, 71, 65, 58, 71, 51, 48, 54, 59, 52, 86, 87, 75, 102, 58, 66, 44, 64, 65, 58, 48, 92, 81, 89, 48, 53, 87, 56, 49, 62, 95, 73, 44, 46, 59, 62, 82, 62, 78, 76, 71, 53, 65, 51, 79, 90, 61, 44, 83, 87, 77, 65, 90, 59, 63, 71, 91, 67, 61, 55, 36, 57, 77, 64, 56, 84, 48, 37, 62, 43, 44, 78, 77, 55, 70, 74, 48, 64, 83, 51, 55, 74, 80, 58, 83, 72, 73, 48, 73, 80, 58, 48, 49, 75, 91, 45, 74, 54, 53, 88, 90, 52, 66, 40, 84, 59, 62, 71, 70, 48, 50, 110, 58, 50, 77, 62, 62, 80, 54, 67, 56, 68, 74, 77, 78, 85, 42, 51, 54, 65, 71, 53, 63, 79, 64, 63, 90, 79, 37, 55, 50, 30, 88, 65, 74, 74, 80, 50, 67, 55, 58, 54, 72, 76, 66, 57, 77, 75, 65, 94, 61, 59, 79, 88, 54, 76, 59, 63, 62, 88, 50, 47, 68, 76, 67, 42, 39, 57, 50, 83, 89, 50, 66, 79, 61, 87, 58, 47, 80, 51, 92, 54, 116, 73, 48, 60, 52, 80, 51, 63, 80, 85, 45, 95, 68, 82, 69, 38, 91, 72, 48, 56, 46, 55, 73, 67, 67, 62, 54, 82, 54, 101, 79, 46, 72, 63, 102, 60, 79, 81, 48, 50, 62, 60, 80, 46, 62, 66, 81, 72, 51, 50, 51, 62, 61, 72, 77, 59, 73, 50, 74, 84, 79, 69, 67, 63, 61, 60, 58, 87, 66, 36, 66, 63, 52, 68, 76, 67, 76, 59, 69, 65, 57, 65, 81, 89, 64, 66, 39, 78, 68, 63, 62, 116, 55, 101, 64, 74, 71, 71, 74, 68, 70, 55, 49, 108, 84, 59, 88, 60, 54, 66, 58, 55, 55, 101, 58, 68, 71, 78, 70, 69, 60, 83, 57, 62, 74, 52, 62, 50, 74, 86, 59, 68, 66, 68, 55, 52, 52, 56, 56, 69, 51, 73, 65, 70, 53, 63, 73, 43, 55, 70, 60, 79, 59, 49, 65, 61, 49, 56, 55, 61, 57, 62, 76, 54, 55, 53, 48, 62, 64, 72, 63, 93, 50, 75, 53, 53, 46, 63, 79, 65, 87, 68, 63, 69, 50, 63, 76, 63, 76, 61, 54, 62, 64, 65, 62, 91, 62, 101, 48, 49, 64, 65, 61, 65, 62, 60, 65, 55, 53, 71, 54, 65, 62, 83, 69, 85, 101, 86, 52, 50, 74, 64, 78, 54, 75, 57, 58, 58, 70, 53, 76, 77, 58, 51, 59, 70, 65, 53, 57, 54, 50, 66, 61, 57, 66, 65, 67, 52, 60, 92, 73, 58, 67, 63, 68, 75, 99, 54, 84, 55, 62, 76, 77, 84, 78, 58, 51, 65, 65, 66, 71, 67, 67, 57, 68, 80, 74, 57, 74, 74, 82, 39, 56, 53, 62, 61, 64, 62, 60, 73, 79, 64, 52, 52, 44, 66, 48, 59, 59, 92, 56, 66, 72, 83, 69, 102, 88, 61, 81, 65, 62, 56, 46, 66, 72, 49, 72, 53, 70, 60, 56, 55, 75, 72, 63, 83, 79, 68, 58, 43, 68, 71, 56, 80, 54, 64, 45, 68, 41, 82, 54, 63, 61, 57, 69, 72, 66, 63, 50, 71, 74, 59, 55, 74, 53, 59, 51, 65, 82, 70, 56, 48, 61, 60, 60, 74, 52, 61, 61, 69, 61, 61, 100, 78, 57, 64, 54, 59, 65, 67, 65, 64, 76, 47, 70, 53, 61, 64, 50, 85, 57, 55, 83, 50, 110, 49, 69, 77, 51, 74, 86, 95, 65, 81, 73, 48, 73, 61, 76, 93, 58, 66, 52, 37, 59, 76, 51, 62, 56, 54, 73, 47, 66, 73, 66, 61, 83, 61, 52, 78, 48, 66, 65, 56, 79, 63, 59, 81, 62, 64, 68, 77, 88, 61, 72, 85, 77, 59, 75, 75, 79, 64, 77, 68, 61, 66, 39, 84, 76, 59, 71, 68, 54, 57, 60, 75, 56, 58, 70, 74, 61, 62, 56, 52, 63, 89, 64, 79, 72, 55, 76, 72, 61, 73, 69, 75, 75, 65, 62, 61, 54, 70, 51, 70, 52, 55, 66, 60, 92, 61, 72, 65, 55, 53, 63, 57, 80, 61, 61, 112, 58, 68, 100, 73, 55, 77, 65, 60, 98, 63, 78, 51, 69, 73, 62, 84, 58, 46, 72, 58, 84, 85, 48, 62, 64, 59, 68, 57, 54, 101, 68, 67, 68, 73, 79, 60, 64, 56, 62, 58, 66, 52, 83, 62, 85, 60, 79, 84, 62, 54, 78, 59, 66, 64, 50, 112, 56, 50, 58, 53, 64, 93, 86, 73, 72, 76, 50, 58, 42, 63, 62, 69, 58, 78, 72, 58, 85, 80, 53, 49, 73, 70, 52, 56, 77, 72, 61, 81, 42, 93, 58, 66, 52, 63, 80, 74, 64, 102, 56, 57, 60, 64, 77, 70, 79, 45, 61, 62, 85, 102, 63, 72, 68, 86, 87, 68, 60, 104, 47, 58, 50, 67, 72, 101, 75, 71, 73, 72, 64, 65, 64, 74, 71, 55, 60, 104, 55, 65, 96, 60, 74, 78, 56, 95, 70, 81, 54, 48, 80, 60, 55, 60, 60, 87, 55, 44, 112, 57, 101, 65, 75, 64, 67, 62, 73, 77, 56, 62, 59, 56, 47, 66, 67, 56, 63, 54, 64, 61, 49, 52, 56, 77, 65, 54, 72, 66, 53, 63, 51, 70, 73, 67, 83, 79, 87, 54, 60, 67, 49, 61, 81, 60, 79, 69, 86, 62, 54, 63, 73, 56, 67, 106, 67, 70, 63, 57, 58, 72, 62, 94, 50, 72, 67, 81, 72, 62, 69, 70, 60, 55, 65, 48, 52, 63, 51, 55, 60, 80, 52, 66, 74, 74, 51, 66, 56, 77, 49, 63, 57, 64, 87, 93, 67, 67, 59, 62, 64, 79, 52, 79, 64, 53, 30, 112, 67, 67, 45, 66, 79, 54, 63, 75, 52, 67, 62, 52, 76, 75, 60, 72, 57, 72, 74, 72, 79, 45, 59, 57, 98, 65, 52, 84, 64, 75, 61, 72, 64, 58, 62, 52, 60, 59, 62, 73, 82, 65, 73, 58, 124, 83, 71, 56, 48, 61, 125, 67, 59, 69, 72, 64, 43, 57, 67, 55, 43, 109, 62, 57, 75, 78, 61, 81, 73, 59, 64, 61, 69, 40, 64, 54, 87, 74, 74, 72, 57, 66, 71, 72, 66, 105, 59, 71, 86, 73, 77, 71, 64, 44, 62, 75, 68, 63, 62, 70, 93, 63, 59, 74, 62, 74, 87, 85, 44, 57, 75, 40, 60, 60, 44, 49, 41, 43, 59, 66, 62, 69, 57, 50, 72, 67, 64, 60, 66, 50, 64, 56, 77, 76, 63, 57, 73, 60, 46, 77, 72, 56, 73, 96, 60, 45, 54, 76, 60, 75, 52, 45, 63, 57, 64, 106, 89, 80, 80, 57, 74, 60, 66, 52, 65, 63, 72, 53, 50, 60, 63, 64, 50, 66, 60, 58, 83, 73, 54, 78, 63, 66, 42, 67, 50, 62, 78, 62, 58, 83, 62, 67, 96, 93, 76, 53, 33, 68, 68, 49, 61, 45, 75, 51, 53, 60, 68, 90, 74, 66, 76, 61, 62, 65, 63, 65, 52, 60, 85, 57, 68, 65, 55, 51, 61, 66, 56, 51, 66, 65, 50, 74, 49, 70, 40, 76, 70, 82, 74, 50, 74, 61, 76, 60, 39, 53, 85, 75, 68, 86, 71, 91, 103, 62, 60, 67, 59, 80, 69, 65, 54, 50, 76, 55, 77, 55, 81, 53, 79, 57, 61, 63, 60, 63, 69, 58, 102, 44, 71, 55, 63, 53, 56, 142, 73, 71, 65, 66, 71, 66, 59, 54, 87, 70, 49, 59, 92, 76, 62, 57, 51, 106, 61, 62, 45, 81, 43, 44, 38, 76, 61, 64, 60, 56, 62, 58, 88, 76, 90, 54, 62, 58, 61, 67, 40, 61, 84, 71, 64, 75, 58, 68, 57, 62, 82, 56, 78, 54, 70, 48, 44, 80, 52, 63, 50, 66, 61, 42, 49, 59, 67, 66, 60, 104, 45, 54, 75, 56, 51, 66, 68, 61, 50, 57, 59, 82, 64, 72, 46, 99, 56, 75, 104, 77, 61, 53, 54, 85, 67, 55, 105, 49, 82, 72, 57, 61, 51, 68, 66, 63, 54, 75, 78, 76, 64, 51, 48, 95, 77, 60, 65, 59, 68, 68, 69, 68, 82, 56, 70, 90, 75, 55, 65, 41, 52, 65, 72, 84, 54, 88, 65, 65, 107, 71, 75, 49, 65, 57, 61, 57, 51, 60, 54, 63, 51, 54, 34, 74, 68, 68, 54, 40, 85, 48, 52, 77, 77, 65, 71, 55, 54, 61, 51, 48, 72, 73, 51, 69, 53, 55, 67, 84, 63, 105, 59, 52, 51, 86, 63, 91, 75, 54, 78, 81, 68, 55, 82, 92, 55, 56, 58, 57, 60, 59, 72, 56, 64, 96, 66, 84, 47, 57, 54, 63, 63, 69, 74, 59, 83, 63, 96, 42, 56, 76, 52, 77, 53, 80, 110, 49, 65, 53, 69, 47, 67, 58, 67, 61, 38, 76, 72, 69, 58, 64, 57, 57, 69, 64, 78, 73, 42, 68, 107, 96, 77, 77, 63, 44, 56, 60, 91, 59, 65, 59, 79, 67, 57, 63, 74, 66, 43, 101, 48, 64, 61, 71, 72, 64, 75, 76, 58, 61, 63, 58, 44, 72, 87, 89, 72, 63, 65, 85, 79, 98, 99, 75, 67, 47, 63, 57, 76, 67, 64, 81, 33, 69, 57, 66, 103, 69, 40, 55, 59, 60, 60, 56, 57, 55, 49, 74, 60, 40, 64, 63, 101, 60, 75, 80, 61, 59, 80, 43, 56, 61, 60, 50, 60, 35, 79, 61, 51, 54, 54, 63, 91, 221, 47, 58, 54, 43, 53, 55, 56, 64, 50, 87, 77, 68, 63, 62, 92, 73, 51, 69, 88, 74, 70, 65, 63, 105, 60, 55, 54, 56, 71, 63, 72, 64, 53, 98, 51, 50, 65, 75, 71, 83, 63, 75, 51, 45, 83, 59, 64, 56, 68, 51, 79, 70, 90, 102, 49, 57, 75, 63, 61, 82, 63, 50, 77, 65, 62, 76, 70, 55, 70, 58, 79, 85, 56, 63, 64, 77, 52, 77, 64, 50, 82, 61, 85, 68, 62, 56, 67, 48, 63, 77, 67, 82, 51, 78, 58, 35, 55, 65, 64, 84, 58, 69, 52, 85, 53, 68, 60, 49, 82, 59, 55, 48, 50, 53, 74, 54, 54, 66, 59, 63, 61, 69, 102, 83, 93, 53, 69, 58, 53, 47, 73, 72, 76, 57, 55, 126, 60, 93, 51, 103, 57, 73, 62, 67, 55, 58, 100, 82, 98, 43, 81, 77, 54, 93, 83, 63, 51, 55, 90, 60, 69, 75, 59, 80, 74, 43, 67, 46, 52, 51, 68, 58, 59, 82, 75, 75, 68, 53, 57, 77, 64, 70, 71, 53, 49, 101, 75, 101, 73, 75, 37, 106, 69, 129, 71, 49, 95, 59, 107, 77, 53, 63, 67, 86, 64, 56, 49, 59, 60, 55, 50, 56, 59, 66, 88, 62, 69, 54, 76, 76, 65, 75, 63, 60, 90, 59, 66, 57, 84, 97, 76, 68, 60, 60, 62, 51, 69, 85, 51, 61, 40, 78, 55, 42, 65, 70, 57, 49, 49, 66, 69, 60, 81, 81, 93, 77, 73, 48, 58, 45, 57, 70, 45, 58, 85, 45, 98, 76, 65, 71, 44, 48, 77, 66, 71, 73, 65, 57, 75, 91, 78, 56, 92, 56, 56, 71, 53, 80, 99, 101, 72, 35, 76, 57, 75, 55, 57, 68, 64, 78, 59, 58, 64, 74, 68, 59, 55, 72, 75, 56, 49, 57, 55, 72, 57, 45, 49, 70, 58, 56, 72, 71, 56, 48, 56, 63, 61, 53, 62, 55, 62, 59, 49, 43, 81, 53, 53, 63, 79, 70, 74, 56, 54, 55, 70, 111, 68, 56, 47, 118, 79, 62, 77, 65, 66, 64, 65, 61, 58, 66, 57, 68, 72, 52, 70, 51, 103, 57, 57, 76, 57, 87, 68, 122, 87, 70, 57, 47, 65, 71, 79, 41, 64, 54, 60, 69, 38, 48, 53, 71, 76, 40, 62, 63, 51, 49, 80, 37, 74, 87, 74, 74, 82, 87, 35, 55, 80, 56, 61, 69, 60, 62, 97, 117, 44, 62, 85, 55, 69, 69, 62, 45, 61, 49, 61, 60, 56, 105, 83, 70, 71, 60, 69, 64, 75, 33, 60, 66, 86, 67, 57, 61, 75, 56, 69, 43, 74, 51, 81, 51, 71, 89, 55, 56, 70, 50, 56, 72, 70, 84, 70, 68, 62, 66, 62, 67, 67, 57, 70, 63, 82, 59, 76, 62, 78, 108, 49, 35, 56, 108, 67, 78, 76, 71, 74, 78, 66, 41, 73, 79, 47, 52, 72, 60, 63, 53, 62, 102, 45, 67, 63, 81, 53, 67, 59, 49, 64, 72, 61, 50, 61, 66, 66, 56, 84, 87, 82, 68, 56, 70, 61, 66, 76, 78, 69, 76, 73, 62, 61, 76, 49, 78, 59, 79, 81, 69, 65, 53, 65, 45, 58, 51, 64, 74, 59, 57, 67, 55, 72, 69, 46, 62, 52, 47, 49, 49, 63, 69, 69, 51, 80, 65, 50, 67, 63, 70, 53, 66, 64, 74, 48, 75, 62, 55, 43, 52, 47, 123, 57, 55, 71, 61, 63, 65, 103, 56, 72, 67, 69, 54, 55, 79, 82, 60, 54, 56, 70, 68, 85, 64, 56, 80, 85, 63, 56, 79, 69, 47, 47, 55, 66, 58, 68, 78, 68, 74, 85, 79, 84, 63, 47, 67, 69, 56, 62, 77, 59, 73, 87, 64, 71, 75, 98, 70, 65, 65, 68, 66, 61, 60, 65, 85, 73, 58, 43, 52, 63, 65, 57, 59, 88, 50, 57, 71, 50, 48, 52, 41, 57, 77, 67, 58, 101, 53, 55, 63, 54, 66, 71, 51, 60, 76, 76, 56, 40, 71, 49, 73, 78, 68, 54, 56, 69, 53, 51, 62, 65, 56, 78, 64, 63, 69, 64, 59, 69, 66, 61, 29, 67, 71, 51, 63, 66, 40, 60, 56, 65, 88, 79, 62, 96, 45, 82, 65, 63, 158, 75, 69, 63, 42, 71, 80, 92, 55, 57, 51, 77, 58, 68, 66, 55, 60, 64, 78, 74, 59, 77, 78, 55, 93, 102, 106, 66, 63, 66, 75, 106, 80, 52, 78, 72, 50, 71, 51, 78, 69, 59, 62, 54, 59, 68, 68, 85, 69, 62, 72, 58, 80, 58, 69, 97, 59, 75, 65, 81, 66, 70, 38, 63, 52, 60, 75, 64, 50, 61, 68, 66, 53, 55, 78, 66, 52, 62, 57, 86, 58, 53, 98, 79, 68, 68, 55, 73, 82, 73, 71, 75, 65, 69, 73, 55, 46, 65, 56, 55, 64, 69, 85, 63, 63, 65, 65, 61, 79, 79, 71, 76, 53, 60, 65, 96, 57, 61, 71, 80, 69, 73, 73, 72, 61, 59, 46, 70, 56, 43, 57, 80, 73, 75, 86, 53, 61, 61, 79, 94, 72, 63, 79, 62, 75, 78, 87, 56, 70, 85, 45, 72, 90, 58, 52, 57, 80, 55, 70, 46, 78, 72, 68, 90, 86, 62, 70, 58, 71, 67, 60, 61, 115, 58, 57, 80, 72, 55, 63, 41, 61, 58, 68, 48, 57, 82, 63, 59, 41, 98, 59, 67, 50, 62, 52, 88, 71, 64, 72, 49, 52, 61, 91, 59, 61, 57, 62, 62, 75, 86, 72, 49, 71, 62, 78, 72, 72, 80, 46, 47, 54, 85, 55, 78, 65, 54, 75, 65, 79, 44, 94, 59, 83, 56, 65, 94, 73, 54, 92, 54, 49, 74, 66, 48, 74, 56, 59, 77, 72, 81, 99, 79, 81, 51, 70, 70, 73, 57, 52, 82, 51, 52, 87, 65, 63, 70, 70, 65, 60, 72, 59, 56, 92, 74, 78, 53, 72, 60, 59, 52, 65, 59, 81, 52, 76, 61, 69, 45, 65, 75, 72, 77, 73, 66, 43, 66, 79, 80, 98, 92, 65, 60, 68, 69, 56, 64, 89, 57, 48, 48, 89, 67, 50, 57, 54, 75, 97, 69, 64, 54, 75, 64, 62, 43, 69, 73, 71, 80, 53, 60, 74, 92, 72, 74, 62, 82, 58, 64, 61, 83, 55, 85, 84, 65, 51, 73, 67, 87, 93, 56, 49, 58, 57, 59, 79, 71, 58, 77, 52, 78, 65, 46, 77, 54, 74, 61, 77, 43, 47, 80, 77, 43, 51, 64, 43, 62, 59, 60, 62, 82, 67, 79, 65, 61, 58, 62, 58, 51, 75, 56, 60, 58, 49, 71, 50, 67, 98, 43, 56, 49, 56, 82, 60, 60, 65, 75, 56, 64, 70, 83, 64, 63, 50, 68, 52, 48, 79, 75, 79, 55, 70, 56, 77, 50, 61, 83, 51, 61, 51, 73, 68, 74, 75, 71, 56, 52, 58, 59, 70, 97, 55, 85, 47, 56, 48, 96, 59, 58, 53, 67, 52, 46, 77, 83, 77, 56, 53, 96, 68, 62, 62, 48, 66, 62, 51, 64, 75, 53, 60, 81, 64, 66, 70, 84, 93, 92, 66, 45, 82, 75, 66, 68, 57, 59, 61, 48, 73, 87, 56, 86, 85, 84, 55, 108, 62, 42, 46, 54, 38, 65, 60, 64, 90, 89, 67, 53, 60, 104, 37, 52, 58, 83, 53, 62, 89, 60, 52, 68, 84, 70, 56, 77, 85, 114, 58, 75, 53, 74, 57, 60, 50, 82, 57, 54, 60, 56, 87, 105, 64, 54, 51, 64, 57, 55, 73, 93, 70, 82, 52, 49, 59, 69, 75, 75, 59, 61, 63, 48, 51, 50, 71, 75, 73, 43, 72, 80, 71, 89, 50, 75, 66, 51, 36, 61, 57, 71, 57, 49, 51, 67, 62, 70, 66, 57, 76, 52, 123, 89, 71, 91, 36, 39, 59, 65, 56, 53, 103, 56, 82, 49, 79, 68, 58, 46, 63, 37, 52, 78, 72, 50, 71, 70, 51, 58, 63, 68, 53, 55, 102, 59, 44, 46, 55, 73, 72, 68, 53, 59, 55, 96, 72, 48, 76, 43, 67, 62, 68, 84, 59, 53, 57, 82, 76, 62, 56, 55, 50, 76, 67, 39, 65, 75, 67, 54, 83, 73, 71, 68, 62, 82, 55, 76, 57, 59, 88, 54, 57, 56, 57, 62, 120, 58, 49, 66, 100, 82, 52, 47, 66, 74, 85, 58, 79, 42, 73, 63, 65, 58, 57, 55, 60, 73, 55, 85, 55, 40, 64, 56, 76, 75, 68, 81, 80, 56, 74, 69, 99, 73, 63, 92, 37, 53, 44, 104, 52, 101, 84, 75, 81, 66, 76, 59, 59, 51, 41, 49, 48, 90, 33, 71, 65, 52, 45, 69, 75, 68, 61, 62, 57, 59, 70, 48, 122, 50, 73, 50, 79, 105, 62, 78, 80, 60, 113, 69, 51, 97, 69, 77, 60, 67, 52, 58, 43, 75, 73, 89, 86, 75, 63, 54, 65, 75, 66, 73, 73, 73, 46, 58, 64, 84, 55, 76, 53, 77, 55, 56, 62, 28, 61, 81, 58, 57, 38, 64, 69, 67, 82, 71, 32, 62, 57, 67, 62, 91, 44, 68, 69, 60, 69, 59, 43, 71, 75, 62, 66, 42, 70, 69, 70, 75, 56, 54, 69, 59, 62, 113, 73, 58, 45, 68, 60, 68, 61, 89, 68, 71, 72, 55, 70, 49, 42, 77, 63, 68, 59, 61, 57, 48, 55, 51, 76, 64, 67, 89, 42, 55, 76, 60, 49, 80, 56, 77, 57, 66, 101, 68, 80, 55, 52, 81, 47, 55, 70, 57, 52, 86, 40, 67, 75, 59, 47, 61, 73, 98, 38, 74, 74, 69, 71, 82, 63, 53, 40, 54, 69, 63, 88, 72, 66, 90, 70, 62, 79, 50, 55, 72, 70, 68, 79, 66, 59, 73, 57, 49, 70, 72, 46, 73, 59, 59, 55, 78, 47, 68, 60, 47, 73, 73, 51, 100, 64, 66, 89, 62, 47, 90, 58, 83, 61, 49, 76, 73, 64, 56, 70, 62, 81, 63, 55, 63, 51, 59, 74, 76, 53, 80, 64, 54, 45, 61, 50, 71, 70, 51, 58, 107, 32, 56, 59, 69, 69, 67, 74, 63, 58, 69, 64, 84, 74, 84, 76, 62, 65, 84, 60, 49, 58, 61, 87, 83, 67, 61, 52, 78, 67, 49, 52, 60, 79, 68, 72, 80, 57, 44, 74, 54, 77, 75, 65, 55, 59, 65, 113, 51, 62, 52, 69, 70, 62, 63, 55, 67, 88, 55, 69, 70, 71, 82, 54, 69, 61, 78, 61, 45, 90, 59, 86, 63, 63, 63, 48, 63, 53, 72, 47, 52, 66, 85, 62, 44, 57, 77, 53, 59, 120, 47, 77, 66, 59, 74, 47, 62, 53, 63, 74, 82, 61, 48, 51, 82, 39, 100, 70, 68, 69, 81, 59, 98, 59, 84, 56, 66, 54, 108, 71, 60, 70, 51, 55, 60, 74, 54, 67, 70, 61, 55, 53, 60, 63, 69, 55, 61, 81, 68, 64, 62, 66, 87, 68, 58, 55, 64, 61, 62, 60, 64, 49, 59, 52, 61, 103, 52, 61, 54, 72, 72, 87, 72, 50, 76, 74, 74, 75, 46, 88, 56, 65, 75, 62, 69, 67, 60, 106, 72, 82, 65, 51, 63, 74, 52, 81, 58, 80, 99, 78, 88, 64, 72, 55, 49, 98, 66, 71, 67, 75, 68, 55, 48, 68, 61, 58, 54, 71, 78, 56, 58, 59, 66, 72, 72, 56, 69, 67, 66, 72, 82, 59, 76, 60, 50, 65, 69, 76, 80, 91, 83, 61, 69, 62, 79, 83, 74, 81, 59, 60, 48, 75, 63, 76, 61, 77, 77, 63, 64, 96, 51, 53, 64, 56, 60, 78, 66, 54, 74, 68, 57, 58, 50, 62, 56, 62, 66, 77, 65, 57, 64, 95, 66, 49, 53, 55, 96, 60, 52, 87, 106, 51, 46, 96, 70, 65, 43, 57, 64, 59, 86, 70, 72, 76, 59, 75, 41, 76, 65, 53, 56, 42, 62, 60, 59, 57, 66, 61, 39, 72, 64, 58, 61, 50, 56, 69, 55, 67, 73, 72, 102, 59, 46, 72, 87, 49, 49, 55, 85, 60, 85, 64, 71, 60, 63, 87, 79, 60, 59, 60, 76, 78, 100, 60, 65, 58, 62, 59, 68, 56, 56, 65, 71, 81, 73, 52, 63, 50, 66, 148, 82, 93, 50, 51, 64, 71, 73, 83, 82, 71, 58, 62, 57, 70, 67, 72, 62, 89, 112, 83, 52, 59, 102, 80, 62, 63, 76, 62, 55, 63, 55, 71, 72, 90, 61, 67, 47, 55, 64, 73, 108, 64, 63, 55, 72, 55, 56, 81, 48, 69, 48, 84, 54, 62, 71, 57, 74, 73, 83, 40, 60, 76, 53, 58, 54, 49, 63, 57, 69, 76, 81, 75, 43, 50, 99, 68, 74, 77, 73, 52, 49, 68, 64, 83, 68, 67, 53, 75, 67, 61, 65, 88, 61, 76, 50, 71, 63, 67, 54, 74, 58, 46, 63, 72, 58, 64, 57, 58, 78, 54, 70, 64, 59, 67, 83, 56, 47, 54, 69, 59, 74, 79, 76, 58, 53, 66, 68, 60, 63, 96, 74, 59, 58, 67, 79, 75, 66, 72, 56, 60, 61, 63, 71, 53, 69, 65, 76, 60, 69, 60, 48, 52, 69, 78, 44, 44, 41, 69, 64, 87, 74, 58, 62, 70, 70, 78, 78, 68, 62, 79, 60, 49, 67, 101, 47, 113, 55, 76, 76, 84, 54, 55, 64, 69, 71, 52, 59, 66, 48, 65, 59, 81, 76, 96, 49, 60, 61, 77, 108, 59, 62, 65, 70, 66, 77, 74, 44, 60, 65, 55, 52, 60, 64, 52, 48, 57, 51, 66, 74, 69, 69, 74, 65, 63, 66, 45, 68, 77, 65, 49, 64, 94, 84, 76, 61, 70, 46, 77, 66, 48, 58, 59, 77, 66, 59, 69, 64, 66, 54, 64, 58, 43, 70, 56, 54, 68, 73, 59, 48, 73, 60, 56, 57, 66, 47, 69, 58, 47, 73, 62, 79, 62, 67, 73, 50, 68, 78, 92, 67, 57, 50, 64, 74, 62, 69, 59, 52, 59, 31, 52, 75, 83, 60, 56, 60, 48, 44, 70, 58, 42, 65, 62, 58, 84, 58, 69, 54, 92, 68, 60, 60, 56, 63, 76, 68, 70, 72, 113, 49, 55, 63, 71, 56, 72, 40, 53, 73, 59, 57, 78, 60, 59, 97, 56, 54, 56, 90, 53, 64, 54, 79, 52, 78, 66, 60, 51, 64, 79, 79, 67, 76, 69, 112, 94, 63, 61, 41, 77, 68, 81, 62, 64, 92, 80, 46, 91, 77, 71, 94, 42, 60, 83, 53, 58, 49, 60, 68, 77, 83, 67, 56, 61, 43, 66, 83, 53, 76, 58, 65, 71, 55, 66, 53, 73, 55, 52, 41, 134, 111, 76, 64, 51, 57, 72, 68, 56, 89, 51, 40, 90, 66, 62, 51, 87, 99, 52, 49, 57, 59, 43, 60, 54, 78, 68, 54, 42, 70, 58, 67, 63, 64, 60, 75, 49, 65, 58, 51, 72, 67, 43, 57, 78, 55, 68, 55, 60, 58, 81, 69, 70, 67, 65, 55, 77, 50, 82, 67, 88, 68, 70, 69, 62, 62, 60, 68, 51, 58, 78, 61, 70, 49, 57, 80, 74, 58, 67, 65, 57, 48, 76, 66, 71, 57, 72, 61, 69, 59, 50, 54, 44, 63, 58, 46, 79, 52, 63, 78, 87, 64, 67, 50, 75, 55, 70, 72, 60, 69, 57, 87, 65, 58, 63, 73, 74, 56, 80, 129, 71, 86, 81, 51, 64, 62, 37, 68, 57, 70, 65, 57, 54, 58, 64, 54, 76, 62, 64, 47, 58, 56, 84, 65, 49, 68, 53, 47, 74, 85, 96, 61, 64, 75, 75, 97, 53, 57, 77, 57, 78, 83, 53, 56, 83, 91, 58, 68, 69, 51, 63, 60, 88, 64, 68, 66, 51, 91, 53, 77, 63, 73, 85, 70, 53, 65, 75, 83, 94, 61, 44, 75, 56, 62, 68, 78, 68, 77, 57, 63, 73, 87, 43, 73, 70, 93, 48, 60, 56, 52, 70, 56, 30, 65, 67, 70, 48, 46, 65, 72, 66, 58, 73, 118, 58, 50, 57, 76, 79, 58, 70, 58, 83, 57, 73, 71, 58, 40, 55, 57, 70, 54, 73, 48, 71, 135, 71, 53, 74, 71, 59, 55, 69, 86, 71, 84, 56, 65, 73, 74, 83, 70, 64, 48, 60, 62, 74, 64, 58, 81, 52, 61, 51, 58, 58, 55, 87, 79, 72, 46, 62, 56, 67, 81, 68, 77, 62, 72, 69, 58, 52, 50, 63, 75, 67, 80, 77, 76, 59, 80, 111, 68, 52, 67, 91, 54, 70, 49, 66, 55, 94, 75, 54, 63, 59, 62, 66, 57, 108, 72, 106, 55, 89, 62, 62, 88, 56, 79, 97, 92, 70, 75, 68, 46, 96, 101, 75, 68, 52, 71, 64, 64, 63, 41, 48, 106, 77, 57, 160, 72, 89, 80, 50, 71, 43, 59, 66, 101, 68, 58, 67, 64, 47, 67, 50, 69, 38, 68, 60, 58, 53, 53, 73, 76, 59, 48, 61, 73, 63, 61, 42, 71, 61, 61, 81, 68, 51, 62, 88, 70, 65, 48, 81, 71, 47, 50, 48, 42, 63, 42, 74, 63, 65, 55, 84, 82, 47, 75, 77, 55, 64, 56, 94, 59, 58, 58, 60, 65, 62, 93, 70, 53, 75, 62, 53, 42, 85, 74, 63, 74, 70, 89, 56, 61, 40, 74, 62, 67, 50, 116, 50, 74, 61, 48, 42, 87, 55, 87, 51, 56, 71, 56, 71, 71, 60, 75, 48, 46, 58, 64, 68, 76, 39, 63, 55, 54, 50, 72, 56, 52, 78, 53, 70, 67, 74, 75, 52, 68, 65, 81, 59, 86, 82, 63, 75, 54, 53, 50, 73, 43, 53, 117, 93, 62, 71, 69, 75, 66, 70, 62, 78, 64, 69, 70, 76, 59, 67, 56, 68, 74, 50, 55, 66, 50, 52, 84, 48, 87, 55, 36, 78, 70, 67, 58, 53, 53, 69, 76, 75, 96, 76, 61, 59, 89, 68, 47, 59, 58, 64, 69, 65, 75, 71, 57, 48, 65, 63, 80, 59, 46, 55, 66, 45, 69, 71, 67, 74, 64, 115, 74, 73, 61, 70, 47, 81, 80, 69, 106, 57, 54, 65, 72, 56, 92, 64, 62, 57, 88, 75, 84, 79, 80, 80, 68, 61, 62, 65, 55, 72, 55, 82, 107, 76, 44, 113, 63, 77, 66, 49, 46, 80, 67, 74, 52, 47, 67, 59, 46, 63, 60, 57, 47, 65, 58, 36, 43, 54, 57, 71, 45, 55, 55, 56, 75, 97, 58, 59, 59, 57, 60, 50, 96, 63, 113, 78, 49, 73, 79, 50, 91, 115, 90, 66, 77, 63, 62, 128, 56, 64, 72, 52, 60, 63, 63, 79, 47, 47, 61, 88, 131, 82, 82, 60, 60, 48, 82, 64, 67, 64, 75, 63, 50, 62, 65, 59, 52, 68, 51, 61, 56, 74, 44, 90, 53, 51, 73, 53, 54, 83, 49, 66, 41, 57, 132, 62, 72, 63, 59, 74, 60, 57, 103, 68, 70, 56, 80, 73, 62, 81, 45, 63, 52, 51, 64, 66, 61, 64, 60, 70, 47, 55, 48, 59, 65, 102, 59, 59, 76, 56, 70, 54, 54, 56, 46, 52, 49, 60, 53, 57, 54, 58, 55, 81, 64, 44, 76, 61, 39, 48, 59, 57, 62, 110, 84, 55, 71, 65, 86, 48, 54, 60, 49, 61, 54, 36, 52, 72, 80, 54, 95, 58, 89, 79, 82, 56, 63, 65, 83, 122, 73, 58, 69, 52, 77, 81, 81, 60, 58, 51, 89, 76, 63, 75, 64, 70, 30, 40, 75, 30, 71, 50, 45, 64, 73, 63, 57, 76, 77, 81, 53, 72, 63, 76, 93, 64, 77, 120, 91, 52, 53, 39, 86, 50, 48, 65, 43, 80, 74, 73, 73, 47, 69, 66, 81, 61, 51, 101, 57, 60, 60, 45, 78, 39, 83, 55, 56, 62, 69, 42, 61, 53, 65, 59, 90, 61, 34, 81, 73, 59, 53, 66, 56, 97, 43, 76, 62, 77, 71, 53, 48, 86, 64, 53, 73, 52, 79, 57, 89, 59, 80, 56, 68, 60, 92, 76, 68, 59, 69, 69, 78, 57, 52, 44, 57, 73, 40, 116, 68, 74, 48, 65, 86, 68, 36, 49, 49, 63, 59, 59, 68, 74, 71, 107, 76, 48, 85, 66, 82, 61, 68, 75, 51, 59, 60, 53, 74, 56, 43, 71, 53, 64, 70, 70, 51, 45, 77, 63, 82, 57, 51, 48, 69, 51, 42, 57, 62, 63, 67, 67, 75, 73, 51, 63, 64, 65, 78, 59, 56, 69, 78, 53, 55, 41, 64, 55, 63, 59, 57, 50, 56, 75, 82, 47, 92, 70, 58, 58, 39, 70, 84, 68, 55, 47, 48, 55, 76, 80, 57, 69, 69, 49, 73, 55, 54, 65, 71, 112, 84, 86, 76, 57, 58, 61, 61, 69, 65, 96, 69, 51, 67, 42, 69, 66, 70, 69, 69, 117, 46, 46, 53, 50, 62, 55, 55, 81, 76, 52, 65, 90, 55, 133, 85, 56, 64, 55, 85, 57, 91, 62, 77, 59, 88, 63, 47, 65, 62, 72, 44, 82, 67, 56, 69, 62, 52, 123, 93, 58, 107, 103, 51, 110, 71, 77, 52, 63, 83, 64, 62, 42, 71, 66, 75, 63, 33, 51, 49, 65, 88, 58, 53, 92, 64, 57, 40, 54, 55, 57, 57, 80, 67, 50, 81, 96, 84, 64, 64, 66, 54, 58, 68, 61, 52, 63, 55, 60, 46, 42, 43, 54, 77, 55, 60, 55, 63, 85, 88, 60, 129, 57, 73, 49, 65, 56, 68, 69, 50, 60, 99, 61, 54, 76, 50, 55, 75, 70, 80, 49, 43, 61, 62, 75, 111, 69, 62, 56, 65, 58, 84, 77, 62, 63, 94, 87, 80, 118, 62, 65, 88, 58, 60, 54, 105, 66, 81, 56, 58, 65, 53, 61, 36, 48, 54, 74, 70, 73, 61, 90, 48, 63, 69, 75, 104, 78, 84, 61, 92, 48, 60, 87, 48, 98, 86, 71, 57, 51, 97, 63, 57, 66, 79, 68, 56, 75, 87, 55, 42, 53, 65, 69, 74, 51, 62, 60, 55, 43, 57, 94, 70, 83, 83, 63, 73, 53, 54, 75, 64, 60, 88, 42, 86, 39, 73, 64, 70, 85, 84, 69, 103, 69, 86, 68, 94, 92, 43, 68, 73, 51, 55, 71, 67, 59, 64, 91, 59, 69, 61, 73, 62, 90, 69, 53, 75, 59, 65, 47, 49, 67, 68, 48, 65, 62, 110, 70, 66, 50, 93, 89, 56, 50, 51, 54, 63, 73, 82, 75, 89, 57, 42, 74, 69, 57, 71, 70, 62, 59, 51, 76, 71, 55, 66, 81, 79, 60, 64, 43, 60, 84, 72, 54, 85, 51, 57, 69, 71, 39, 76, 64, 75, 86, 109, 52, 78, 54, 49, 67, 55, 72, 77, 72, 69, 54, 43, 49, 58, 57, 59, 58, 56, 70, 62, 53, 58, 87, 57, 54, 62, 65, 63, 77, 79, 78, 92, 55, 59, 65, 84, 81, 48, 44, 80, 52, 56, 52, 80, 79, 65, 59, 49, 57, 60, 52, 59, 110, 85, 73, 79, 38, 61, 47, 70, 49, 116, 65, 77, 54, 59, 63, 77, 44, 36, 62, 55, 49, 79, 60, 79, 79, 45, 77, 62, 59, 69, 57, 73, 60, 64, 73, 82, 55, 52, 94, 71, 68, 72, 88, 73, 94, 61, 59, 68, 58, 78, 27, 77, 66, 55, 46, 61, 98, 86, 58, 79, 72, 41, 71, 56, 58, 60, 69, 69, 57, 71, 58, 92, 51, 55, 66, 60, 42, 62, 79, 70, 62, 109, 62, 68, 63, 75, 80, 55, 64, 59, 77, 123, 77, 84, 75, 59, 62, 99, 81, 90, 67, 56, 59, 52, 62, 74, 72, 51, 82, 55, 61, 69, 122, 52, 51, 82, 39, 70, 72, 56, 58, 61, 77, 74, 68, 56, 77, 69, 58, 81, 50, 57, 64, 96, 53, 78, 66, 64, 53, 61, 69, 60, 73, 97, 107, 68, 100, 69, 58, 68, 58, 72, 52, 57, 74, 65, 81, 52, 39, 85, 81, 53, 55, 50, 74, 41, 46, 42, 57, 61, 50, 72, 51, 65, 61, 49, 97, 53, 73, 49, 61, 90, 53, 57, 65, 73, 73, 73, 67, 63, 67, 81, 53, 54, 92, 55, 50, 58, 87, 85, 79, 73, 61, 62, 132, 75, 73, 88, 99, 47, 62, 70, 62, 61, 64, 74, 93, 85, 63, 91, 56, 77, 68, 57, 59, 115, 61, 70, 70, 75, 64, 71, 64, 61, 95, 64, 87, 66, 81, 76, 71, 75, 92, 69, 86, 46, 76, 70, 69, 58, 62, 54, 56, 70, 89, 103, 61, 43, 50, 55, 82, 62, 66, 60, 63, 73, 49, 70, 51, 60, 69, 42, 77, 73, 81, 80, 92, 53, 48, 52, 50, 42, 53, 106, 61, 92, 62, 62, 50, 95, 77, 71, 126, 85, 60, 63, 66, 68, 54, 66, 89, 56, 85, 66, 54, 52, 70, 74, 75, 47, 103, 81, 81, 63, 63, 68, 67, 61, 73, 29, 73, 84, 54, 42, 93, 62, 56, 48, 52, 50, 57, 47, 73, 48, 66, 69, 44, 61, 67, 77, 71, 51, 68, 63, 81, 161, 81, 50, 65, 66, 89, 66, 62, 96, 84, 68, 51, 55, 41, 66, 73, 63, 50, 55, 51, 62, 67, 65, 71, 67, 64, 57, 55, 66, 81, 42, 55, 32, 42, 79, 77, 76, 68, 51, 54, 50, 61, 64, 50, 83, 74, 67, 55, 58, 57, 76, 63, 89, 74, 83, 49, 62, 54, 86, 73, 70, 62, 78, 58, 61, 62, 66, 58, 40, 63, 65, 75, 51, 65, 58, 79, 62, 63, 93, 76, 73, 43, 63, 44, 53, 53, 56, 43, 82, 58, 82, 76, 49, 100, 76, 75, 47, 46, 53, 58, 68, 68, 52, 62, 46, 57, 40, 84, 55, 52, 75, 56, 76, 64, 66, 57, 65, 65, 56, 75, 80, 60, 115, 85, 55, 59, 68, 56, 68, 76, 60, 62, 76, 58, 74, 81, 54, 45, 89, 86, 82, 53, 38, 85, 79, 69, 63, 80, 55, 68, 46, 62, 65, 70, 86, 68, 58, 87, 52, 61, 60, 83, 61, 86, 56, 54, 78, 68, 58, 72, 90, 74, 66, 70, 69, 73, 53, 58, 63, 58, 58, 61, 29, 67, 63, 97, 62, 65, 36, 80, 53, 67, 66, 49, 69, 83, 69, 70, 52, 75, 57, 79, 47, 48, 78, 67, 56, 42, 73, 60, 46, 69, 72, 53, 89, 54, 55, 66, 64, 83, 74, 61, 89, 52, 68, 53, 84, 86, 65, 69, 72, 55, 73, 72, 60, 56, 69, 79, 58, 69, 44, 53, 55, 62, 61, 58, 54, 64, 49, 74, 73, 52, 54, 50, 69, 62, 47, 79, 67, 57, 49, 59, 67, 64, 57, 88, 46, 71, 57, 87, 69, 48, 73, 74, 57, 78, 64, 65, 94, 67, 83, 66, 61, 52, 38, 65, 72, 53, 55, 69, 81, 64, 54, 63, 60, 53, 64, 77, 75, 94, 51, 57, 68, 52, 76, 91, 59, 81, 36, 59, 57, 66, 46, 70, 49, 61, 61, 69, 62, 52, 67, 79, 48, 132, 59, 53, 43, 80, 62, 53, 72, 78, 92, 58, 61, 76, 56, 120, 77, 68, 75, 67, 55, 52, 55, 61, 77, 65, 43, 47, 64, 50, 58, 55, 67, 55, 63, 57, 65, 48, 66, 66, 76, 71, 64, 91, 74, 55, 55, 82, 42, 49, 73, 106, 74, 46, 75, 64, 93, 58, 87, 50, 50, 99, 67, 50, 55, 87, 59, 63, 61, 76, 91, 54, 53, 76, 62, 51, 58, 100, 76, 63, 64, 51, 45, 56, 42, 62, 48, 55, 77, 80, 75, 61, 32, 79, 67, 67, 65, 74, 51, 73, 72, 81, 116, 68, 62, 53, 63, 61, 65, 50, 45, 49, 81, 87, 65, 37, 49, 88, 72, 101, 54, 70, 67, 56, 76, 58, 75, 58, 68, 63, 52, 66, 91, 64, 54, 57, 55, 63, 59, 74, 44, 94, 56, 71, 66, 69, 54, 71, 73, 58, 46, 43, 57, 55, 85, 62, 75, 63, 71, 67, 77, 73, 61, 78, 56, 74, 76, 101, 59, 114, 92, 58, 65, 83, 66, 92, 66, 48, 38, 79, 76, 72, 72, 61, 59, 55, 57, 86, 78, 66, 76, 55, 54, 55, 100, 73, 62, 47, 60, 73, 59, 59, 44, 74, 61, 86, 50, 72, 76, 59, 44, 65, 82, 66, 73, 76, 57, 89, 47, 63, 60, 41, 59, 70, 71, 80, 51, 55, 64, 72, 63, 73, 82, 46, 67, 40, 75, 87, 49, 75, 54, 48, 56, 62, 60, 51, 90, 63, 53, 84, 50, 53, 64, 62, 57, 60, 58, 70, 58, 71, 65, 112, 60, 72, 64, 64, 51, 69, 118, 57, 114, 66, 51, 71, 50, 70, 86, 66, 69, 80, 100, 61, 90, 69, 100, 48, 90, 67, 42, 64, 68, 64, 63, 72, 97, 51, 44, 85, 71, 64, 79, 96, 79, 60, 60, 65, 78, 48, 79, 55, 63, 47, 59, 59, 46, 55, 65, 61, 33, 50, 61, 67, 73, 55, 59, 61, 69, 43, 55, 69, 56, 77, 60, 71, 65, 74, 57, 71, 58, 57, 65, 60, 53, 46, 52, 71, 48, 79, 97, 54, 58, 74, 66, 87, 61, 71, 60, 61, 60, 58, 70, 40, 76, 95, 52, 61, 49, 72, 65, 61, 68, 64, 88, 58, 81, 78, 48, 42, 97, 53, 87, 67, 62, 68, 50, 52, 60, 69, 93, 70, 59, 64, 47, 90, 77, 72, 46, 48, 74, 56, 68, 49, 64, 73, 74, 93, 54, 70, 64, 65, 61, 68, 71, 55, 69, 52, 59, 65, 71, 60, 64, 54, 72, 64, 29, 60, 48, 91, 80, 60, 48, 96, 68, 65, 72, 96, 71, 62, 73, 59, 67, 53, 73, 54, 75, 70, 76, 55, 50, 68, 61, 66, 89, 63, 62, 69, 59, 78, 48, 58, 82, 48, 41, 75, 93, 71, 54, 67, 44, 70, 100, 60, 48, 59, 62, 71, 67, 92, 64, 78, 59, 68, 66, 103, 49, 43, 99, 74, 57, 60, 67, 79, 62, 82, 67, 64, 43, 70, 108, 73, 66, 66, 125, 82, 67, 64, 59, 71, 62, 60, 61, 58, 65, 55, 47, 67, 60, 109, 78, 64, 54, 59, 56, 96, 57, 107, 79, 52, 46, 72, 66, 62, 76, 65, 74, 70, 57, 74, 66, 64, 139, 79, 73, 75, 66, 50, 58, 49, 59, 78, 92, 67, 72, 54, 52, 66, 106, 57, 72, 60, 67, 58, 66, 68, 64, 85, 77, 111, 66, 61, 74, 65, 51, 48, 93, 61, 84, 61, 61, 53, 66, 62, 73, 67, 74, 69, 50, 54, 59, 69, 86, 93, 66, 66, 57, 66, 68, 65, 50, 65, 67, 57, 55, 78, 70, 57, 49, 44, 68, 73, 91, 42, 54, 77, 69, 81, 53, 83, 75, 65, 70, 47, 58, 61, 69, 70, 79, 67, 56, 78, 58, 97, 73, 74, 47, 141, 62, 73, 46, 64, 68, 65, 53, 56, 65, 54, 60, 81, 91, 72, 40, 69, 53, 52, 54, 55, 79, 65, 66, 77, 37, 75, 49, 95, 56, 58, 60, 69, 56, 60, 84, 78, 58, 72, 46, 75, 54, 62, 98, 73, 63, 78, 89, 62, 128, 64, 63, 76, 63, 51, 99, 123, 80, 77, 68, 69, 74, 54, 48, 91, 71, 68, 53, 87, 65, 54, 54, 71, 73, 82, 52, 93, 78, 59, 82, 91, 73, 50, 66, 71, 68, 46, 63, 88, 75, 73, 88, 54, 71, 73, 48, 63, 57, 58, 37, 60, 74, 50, 54, 70, 67, 62, 59, 60, 60, 62, 70, 82, 79, 60, 69, 92, 43, 58, 60, 39, 80, 63, 72, 62, 57, 92, 57, 71, 63, 53, 65, 51, 73, 96, 90, 82, 53, 50, 70, 70, 74, 73, 56, 44, 63, 50, 68, 55, 64, 59, 53, 86, 64, 53, 75, 60, 69, 61, 67, 54, 86, 56, 73, 57, 58, 81, 68, 67, 71, 57, 86, 105, 69, 57, 56, 75, 68, 73, 86, 90, 55, 67, 58, 51, 83, 68, 53, 68, 59, 64, 58, 69, 47, 67, 45, 126, 66, 64, 134, 77, 90, 70, 63, 69, 49, 69, 60, 55, 139, 67, 98, 67, 60, 82, 58, 49, 66, 73, 61, 52, 68, 64, 61, 67, 56, 56, 68, 56, 64, 58, 69, 69, 45, 81, 54, 53, 69, 68, 46, 60, 67, 90, 62, 50, 70, 70, 47, 71, 55, 63, 54, 34, 49, 109, 52, 68, 53, 59, 83, 59, 66, 48, 74, 57, 94, 65, 59, 69, 73, 57, 47, 80, 91, 67, 57, 51, 90, 72, 70, 72, 78, 67, 64, 75, 47, 64, 53, 55, 60, 44, 75, 67, 64, 48, 84, 69, 86, 54, 71, 74, 84, 61, 65, 50, 62, 70, 59, 73, 78, 75, 70, 60, 56, 54, 54, 59, 51, 65, 75, 55, 71, 59, 67, 61, 64, 54, 70, 50, 54, 71, 77, 61, 74, 56, 68, 61, 52, 63, 70, 55, 61, 66, 56, 50, 62, 72, 54, 36, 67, 62, 69, 81, 54, 86, 82, 52, 74, 78, 75, 61, 49, 53, 51, 132, 58, 50, 77, 62, 83, 51, 68, 69, 85, 59, 77, 66, 63, 62, 74, 58, 57, 58, 77, 73, 57, 60, 71, 59, 60, 62, 71, 52, 76, 53, 63, 78, 53, 65, 120, 71, 81, 58, 74, 59, 69, 55, 76, 44, 134, 56, 58, 59, 39, 74, 67, 57, 56, 54, 75, 70, 56, 46, 59, 54, 67, 79, 86, 60, 69, 61, 79, 49, 52, 55, 82, 50, 71, 91, 131, 72, 110, 65, 69, 48, 80, 53, 56, 79, 63, 59, 54, 52, 60, 47, 87, 47, 73, 63, 46, 63, 92, 69, 57, 51, 57, 91, 84, 61, 68, 55, 61, 84, 56, 83, 95, 60, 66, 51, 77, 88, 68, 76, 77, 74, 60, 52, 67, 53, 55, 50, 52, 64, 71, 57, 79, 53, 49, 79, 68, 49, 58, 68, 57, 66, 63, 81, 63, 66, 55, 86, 71, 56, 49, 58, 76, 65, 84, 63, 71, 90, 70, 69, 60, 61, 68, 69, 67, 55, 54, 77, 53, 72, 39, 72, 64, 65, 53, 60, 60, 67, 75, 56, 46, 48, 72, 67, 58, 49, 68, 79, 71, 95, 69, 56, 65, 64, 69, 93, 59, 74, 60, 67, 34, 48, 66, 82, 65, 51, 77, 48, 71, 86, 60, 81, 58, 51, 76, 76, 70, 77, 80, 60, 65, 52, 69, 71, 61, 66, 82, 69, 69, 51, 65, 67, 70, 55, 58, 73, 62, 75, 58, 50, 48, 55, 61, 60, 55, 55, 60, 83, 70, 66, 91, 62, 54, 45, 55, 44, 81, 60, 98, 54, 57, 78, 69, 41, 57, 61, 73, 57, 80, 71, 40, 81, 34, 69, 79, 53, 47, 87, 56, 94, 68, 62, 93, 82, 79, 82, 63, 88, 61, 66, 67, 57, 67, 50, 71, 88, 55, 69, 52, 49, 57, 62, 55, 88, 68, 54, 67, 74, 58, 68, 57, 58, 59, 56, 58, 46, 59, 76, 61, 61, 75, 45, 71, 72, 57, 45, 72, 51, 58, 55, 54, 58, 69, 79, 75, 76, 99, 68, 54, 95, 65, 75, 73, 76, 63, 58, 56, 50, 53, 43, 62, 54, 63, 50, 71, 70, 95, 46, 81, 68, 58, 83, 54, 81, 64, 68, 92, 155, 88, 76, 61, 61, 59, 65, 62, 90, 45, 67, 58, 49, 70, 62, 72, 67, 69, 59, 43, 103, 61, 37, 62, 51, 56, 62, 63, 85, 71, 64, 78, 49, 89, 83, 40, 53, 56, 70, 49, 69, 68, 48, 56, 54, 77, 61, 68, 74, 71, 60, 53, 64, 58, 44, 54, 88, 61, 60, 60, 69, 57, 107, 68, 64, 58, 59, 65, 69, 64, 64, 71, 67, 59, 72, 54, 56, 97, 55, 105, 51, 59, 60, 62, 57, 68, 59, 64, 75, 77, 77, 47, 57, 96, 47, 79, 67, 64, 76, 82, 61, 74, 80, 63, 55, 57, 57, 86, 74, 72, 68, 148, 53, 58, 52, 63, 67, 65, 63, 52, 62, 53, 78, 73, 82, 57, 66, 55, 79, 60, 99, 121, 80, 76, 58, 61, 61, 59, 76, 67, 59, 61, 61, 42, 56, 54, 59, 61, 66, 72, 59, 60, 66, 66, 39, 60, 112, 59, 49, 48, 85, 58, 57, 81, 85, 51, 68, 59, 55, 66, 58, 81, 110, 42, 72, 72, 64, 63, 60, 70, 56, 66, 60, 60, 78, 105, 45, 76, 68, 76, 72, 80, 85, 60, 72, 77, 59, 77, 65, 57, 43, 44, 58, 60, 49, 63, 80, 54, 52, 67, 61, 68, 70, 50, 55, 63, 61, 61, 85, 42, 77, 73, 74, 46, 67, 67, 35, 49, 62, 56, 74, 68, 86, 55, 82, 74, 70, 69, 68, 58, 63, 66, 66, 50, 59, 65, 60, 52, 65, 77, 92, 88, 56, 39, 75, 68, 82, 64, 69, 60, 59, 58, 77, 62, 64, 76, 69, 74, 57, 73, 57, 62, 51, 45, 49, 56, 57, 75, 71, 62, 97, 56, 80, 57, 77, 74, 73, 71, 66, 55, 44, 55, 44, 63, 55, 56, 71, 80, 78, 88, 59, 63, 53, 96, 67, 90, 61, 85, 59, 71, 59, 73, 62, 81, 54, 64, 57, 59, 75, 47, 76, 63, 65, 56, 64, 66, 98, 71, 59, 69, 46, 69, 51, 61, 106, 58, 62, 80, 41, 55, 84, 69, 63, 65, 69, 44, 41, 81, 71, 76, 43, 76, 61, 67, 48, 58, 62, 62, 67, 61, 55, 70, 76, 73, 50, 89, 67, 68, 41, 65, 66, 58, 59, 69, 88, 69, 98, 32, 76, 59, 69, 45, 73, 55, 65, 66, 72, 78, 66, 51, 44, 60, 82, 79, 51, 68, 65, 65, 62, 67, 71, 63, 73, 65, 69, 63, 67, 65, 59, 54, 48, 57, 93, 66, 70, 63, 64, 83, 51, 73, 71, 45, 62, 64, 64, 54, 63, 72, 54, 68, 90, 58, 95, 80, 73, 60, 49, 101, 71, 53, 127, 83, 64, 41, 57, 52, 47, 69, 68, 70, 52, 52, 90, 43, 82, 54, 56, 93, 61, 94, 80, 66, 53, 76, 66, 100, 48, 81, 76, 63, 62, 63, 63, 71, 66, 48, 41, 44, 65, 51, 49, 65, 77, 56, 56, 56, 42, 69, 88, 71, 56, 75, 103, 80, 57, 85, 69, 56, 62, 65, 61, 77, 64, 48, 59, 71, 84, 66, 65, 70, 102, 58, 57, 63, 43, 75, 65, 52, 86, 61, 68, 71, 67, 63, 69, 63, 44, 92, 79, 60, 49, 70, 57, 111, 113, 45, 64, 55, 64, 42, 78, 74, 72, 107, 80, 47, 91, 56, 52, 94, 67, 75, 59, 46, 64, 70, 68, 79, 96, 89, 90, 69, 46, 79, 64, 97, 61, 58, 86, 55, 66, 43, 70, 70, 69, 76, 52, 73, 81, 53, 83, 84, 100, 53, 54, 61, 56, 71, 51, 56, 76, 81, 83, 63, 58, 61, 59, 69, 67, 63, 82, 52, 74, 78, 44, 72, 56, 70, 59, 102, 57, 76, 59, 50, 58, 49, 52, 61, 71, 41, 55, 50, 54, 61, 61, 68, 57, 70, 64, 67, 55, 52, 62, 57, 57, 95, 60, 54, 83, 46, 71, 64, 64, 70, 69, 61, 55, 64, 71, 49, 58, 60, 120, 83, 73, 104, 64, 68, 78, 76, 73, 60, 72, 81, 67, 67, 85, 63, 70, 70, 67, 62, 61, 55, 66, 57, 59, 86, 87, 49, 58, 58, 56, 45, 66, 93, 90, 81, 79, 57, 126, 50, 71, 79, 71, 65, 102, 84, 74, 52, 70, 109, 60, 49, 71, 60, 53, 74, 71, 72, 67, 73, 42, 48, 85, 140, 92, 64, 97, 49, 78, 57, 86, 58, 67, 53, 69, 84, 79, 69, 72, 73, 54, 58, 57, 60, 41, 85, 79, 68, 59, 91, 72, 62, 71, 84, 60, 106, 51, 73, 72, 69, 57, 64, 60, 57, 54, 61, 59, 57, 52, 60, 52, 63, 55, 71, 62, 80, 65, 77, 48, 51, 58, 64, 69, 58, 67, 75, 69, 72, 79, 66, 46, 62, 57, 84, 43, 57, 76, 61, 49, 79, 49, 64, 53, 71, 48, 63, 71, 79, 66, 58, 81, 120, 70, 72, 63, 70, 71, 64, 69, 70, 61, 60, 57, 64, 62, 91, 65, 68, 50, 65, 76, 63, 78, 36, 57, 67, 61, 58, 61, 55, 62, 77, 59, 79, 67, 60, 55, 66, 90, 69, 48, 50, 52, 62, 97, 82, 59, 60, 64, 76, 52, 85, 68, 66, 55, 59, 84, 81, 80, 44, 53, 55, 80, 78, 94, 56, 64, 69, 73, 67, 68, 66, 63, 64, 68, 51, 56, 63, 137, 56, 61, 40, 66, 42, 54, 61, 80, 71, 54, 52, 65, 121, 78, 81, 76, 55, 59, 64, 42, 88, 49, 47, 69, 68, 53, 46, 66, 82, 55, 60, 79, 63, 84, 83, 92, 78, 67, 59, 60, 58, 70, 76, 57, 59, 59, 55, 84, 52, 72, 60, 58, 39, 70, 132, 112, 52, 59, 98, 68, 73, 52, 73, 47, 51, 62, 70, 66, 87, 75, 58, 56, 68, 76, 56, 77, 91, 78, 58, 52, 55, 61, 76, 48, 141, 52, 77, 93, 56, 64, 72, 69, 48, 80, 70, 67, 83, 60, 67, 58, 49, 62, 60, 51, 98, 79, 49, 80, 68, 76, 49, 60, 51, 49, 74, 62, 71, 58, 63, 66, 67, 77, 90, 47, 73, 67, 79, 102, 61, 78, 83, 69, 62, 70, 61, 82, 74, 97, 47, 56, 45, 69, 103, 65, 80, 81, 71, 73, 79, 43, 64, 58, 129, 69, 92, 70, 57, 65, 57, 84, 45, 79, 112, 68, 54, 67, 63, 72, 71, 67, 49, 69, 60, 69, 62, 66, 70, 58, 74, 74, 55, 43, 56, 89, 53, 70, 69, 65, 56, 82, 69, 45, 66, 65, 126, 73, 69, 47, 55, 44, 73, 69, 64, 49, 51, 52, 62, 113, 67, 83, 69, 58, 55, 55, 46, 77, 39, 45, 77, 84, 50, 54, 57, 70, 80, 61, 62, 54, 60, 63, 82, 62, 61, 65, 59, 85, 67, 81, 58, 65, 94, 63, 64, 45, 52, 60, 63, 44, 67, 53, 62, 58, 60, 64, 54, 47, 92, 63, 59, 50, 69, 55, 93, 55, 73, 49, 77, 72, 83, 93, 114, 72, 49, 27, 66, 90, 44, 65, 74, 61, 67, 77, 71, 83, 66, 51, 115, 77, 75, 96, 65, 69, 49, 93, 74, 57, 71, 41, 50, 48, 60, 56, 75, 57, 60, 83, 54, 85, 45, 61, 57, 73, 50, 74, 60, 61, 65, 49, 83, 51, 63, 77, 67, 44, 41, 79, 67, 64, 47, 57, 52, 63, 80, 38, 52, 34, 60, 74, 75, 61, 47, 68, 67, 64, 73, 60, 61, 64, 78, 75, 56, 56, 59, 44, 77, 65, 56, 52, 64, 74, 47, 61, 60, 57, 64, 64, 50, 93, 76, 59, 74, 72, 56, 69, 65, 78, 93, 59, 94, 57, 51, 46, 49, 58, 128, 82, 66, 77, 64, 60, 57, 61, 62, 75, 67, 57, 77, 69, 68, 91, 63, 62, 97, 73, 51, 47, 88, 71, 49, 59, 61, 71, 70, 72, 54, 58, 67, 57, 43, 89, 83, 65, 65, 97, 72, 68, 70, 52, 63, 87, 103, 57, 93, 53, 71, 57, 60, 87, 50, 80, 55, 52, 58, 86, 91, 69, 40, 92, 75, 68, 67, 55, 54, 63, 47, 66, 64, 56, 57, 67, 44, 70, 64, 40, 58, 55, 55, 78, 86, 59, 61, 73, 84, 64, 65, 61, 53, 66, 61, 76, 42, 80, 66, 64, 39, 91, 40, 48, 60, 57, 56, 55, 58, 47, 57, 71, 60, 50, 54, 70, 59, 77, 74, 70, 55, 71, 71, 47, 95, 78, 64, 56, 78, 107, 59, 51, 67, 62, 75, 81, 86, 68, 59, 86, 57, 69, 76, 68, 67, 87, 64, 40, 62, 63, 80, 52, 51, 68, 66, 93, 63, 56, 64, 77, 67, 79, 79, 71, 50, 61, 45, 54, 107, 83, 61, 85, 64, 53, 61, 73, 53, 49, 55, 48, 47, 108, 73, 85, 35, 81, 59, 50, 102, 80, 56, 81, 52, 69, 45, 51, 83, 60, 72, 36, 56, 88, 77, 56, 57, 58, 54, 108, 54, 62, 80, 64, 82, 53, 60, 45, 66, 59, 47, 91, 50, 54, 74, 62, 74, 91, 51, 76, 67, 72, 56, 58, 51, 47, 112, 73, 44, 57, 50, 56, 70, 73, 75, 58, 124, 84, 48, 46, 66, 56, 67, 63, 72, 52, 59, 85, 59, 61, 107, 87, 51, 106, 85, 51, 69, 78, 50, 44, 62, 55, 96, 67, 58, 64, 74, 58, 45, 56, 100, 91, 55, 48, 82, 53, 76, 65, 49, 72, 60, 60, 84, 85, 48, 84, 84, 123, 68, 51, 77, 89, 75, 63, 41, 68, 118, 63, 72, 55, 94, 72, 94, 78, 68, 64, 86, 59, 76, 92, 75, 58, 60, 72, 55, 61, 50, 64, 81, 57, 54, 84, 97, 74, 65, 82, 64, 75, 82, 76, 68, 94, 81, 58, 55, 63, 103, 60, 53, 65, 85, 52, 56, 76, 68, 74, 67, 87, 73, 44, 55, 67, 52, 68, 46, 85, 64, 64, 57, 55, 75, 62, 70, 70, 58, 58, 69, 56, 38, 63, 55, 70, 54, 65, 104, 56, 51, 58, 64, 70, 76, 35, 78, 63, 67, 49, 63, 78, 59, 42, 48, 73, 54, 61, 47, 53, 58, 62, 58, 60, 56, 71, 57, 61, 78, 60, 76, 46, 55, 61, 64, 66, 54, 109, 60, 63, 57, 61, 60, 76, 58, 48, 68, 79, 67, 69, 78, 78, 99, 83, 63, 50, 60, 71, 73, 53, 77, 69, 60, 71, 56, 66, 54, 61, 57, 79, 60, 71, 67, 55, 47, 53, 47, 122, 43, 83, 87, 112, 79, 59, 56, 55, 80, 107, 88, 50, 71, 65, 83, 85, 50, 61, 52, 50, 73, 66, 44, 49, 57, 79, 67, 88, 52, 89, 82, 57, 79, 95, 51, 54, 77, 59, 66, 65, 72, 50, 81, 67, 61, 59, 63, 62, 69, 54, 78, 69, 65, 62, 56, 64, 51, 71, 69, 54, 96, 81, 74, 48, 55, 56, 54, 37, 78, 63, 96, 64, 76, 97, 78, 51, 97, 62, 59, 61, 50, 78, 69, 64, 56, 61, 60, 70, 71, 47, 55, 54, 78, 53, 55, 73, 36, 67, 53, 63, 64, 83, 35, 68, 60, 64, 103, 78, 71, 53, 49, 64, 79, 73, 56, 40, 76, 49, 57, 46, 33, 40, 57, 72, 60, 51, 65, 69, 55, 71, 77, 54, 80, 75, 60, 64, 64, 56, 66, 62, 69, 51, 106, 58, 49, 61, 45, 93, 70, 88, 59, 97, 64, 37, 65, 97, 87, 114, 69, 67, 71, 54, 57, 78, 63, 71, 73, 97, 59, 44, 44, 48, 61, 59, 55, 64, 50, 64, 67, 52, 59, 56, 61, 84, 81, 57, 93, 63, 82, 58, 67, 57, 55, 74, 69, 59, 87, 69, 45, 60, 60, 45, 119, 70, 51, 62, 62, 59, 62, 74, 48, 84, 69, 57, 73, 43, 74, 78, 60, 69, 78, 84, 89, 93, 65, 78, 67, 65, 99, 68, 48, 74, 53, 57, 63, 47, 40, 65, 72, 53, 59, 38, 65, 37, 67, 95, 71, 75, 70, 39, 60, 71, 55, 94, 82, 66, 68, 123, 59, 60, 50, 59, 67, 38, 50, 61, 43, 57, 56, 40, 62, 65, 50, 53, 56, 61, 70, 68, 76, 65, 53, 76, 54, 57, 68, 63, 77, 78, 75, 68, 69, 56, 63, 72, 60, 76, 66, 92, 62, 74, 60, 52, 62, 57, 52, 58, 44, 53, 53, 47, 71, 57, 72, 65, 60, 76, 73, 55, 52, 61, 56, 60, 52, 62, 73, 60, 65, 59, 88, 76, 61, 66, 65, 79, 61, 52, 49, 68, 83, 55, 53, 49, 84, 61, 73, 59, 50, 62, 132, 57, 87, 42, 55, 65, 82, 60, 52, 68, 70, 63, 49, 72, 80, 64, 49, 69, 70, 68, 63, 70, 72, 67, 57, 77, 48, 62, 74, 54, 69, 70, 69, 77, 91, 60, 63, 50, 59, 51, 60, 58, 61, 45, 55, 73, 60, 47, 70, 51, 61, 57, 82, 61, 60, 64, 69, 76, 72, 65, 60, 36, 83, 64, 52, 83, 81, 48, 57, 58, 56, 65, 54, 56, 63, 65, 77, 42, 61, 82, 54, 46, 52, 61, 62, 64, 62, 64, 64, 67, 59, 73, 64, 57, 53, 92, 60, 56, 72, 78, 67, 94, 70, 51, 69, 70, 64, 129, 44, 127, 59, 73, 54, 66, 69, 60, 54, 75, 72, 57, 72, 58, 65, 47, 66, 52, 75, 49, 46, 49, 57, 57, 65, 75, 57, 103, 56, 55, 67, 69, 61, 84, 75, 79, 66, 67, 67, 59, 79, 42, 100, 72, 80, 67, 57, 69, 60, 64, 58, 60, 56, 75, 55, 73, 51, 72, 63, 59, 72, 62, 110, 54, 71, 47, 42, 59, 84, 126, 63, 73, 54, 76, 73, 75, 81, 72, 67, 60, 65, 79, 62, 63, 59, 56, 54, 71, 74, 69, 54, 83, 64, 79, 95, 136, 61, 66, 72, 62, 65, 57, 64, 61, 56, 54, 67, 71, 83, 63, 51, 61, 60, 59, 66, 75, 51, 52, 46, 61, 61, 46, 75, 50, 72, 70, 62, 43, 62, 66, 57, 67, 55, 72, 63, 59, 76, 77, 69, 58, 84, 56, 57, 77, 62, 77, 76, 80, 62, 64, 56, 69, 76, 79, 73, 62, 49, 73, 72, 55, 51, 54, 55, 142, 66, 76, 73, 70, 62, 64, 60, 52, 70, 78, 42, 79, 78, 61, 71, 55, 73, 62, 65, 78, 45, 60, 106, 71, 61, 43, 70, 61, 96, 79, 51, 63, 48, 62, 54, 99, 59, 101, 63, 62, 78, 66, 62, 51, 52, 92, 80, 59, 64, 76, 70, 59, 43, 63, 58, 53, 69, 62, 72, 53, 66, 75, 58, 84, 62, 68, 56, 59, 74, 76, 71, 115, 87, 127, 60, 73, 65, 62, 68, 63, 63, 56, 59, 56, 64, 61, 55, 79, 85, 68, 66, 70, 72, 62, 62, 63, 79, 59, 72, 59, 61, 47, 65, 66, 93, 72, 77, 60, 71, 60, 63, 89, 78, 58, 66, 52, 70, 81, 69, 71, 133, 49, 54, 70, 70, 73, 63, 73, 66, 74, 83, 66, 66, 66, 57, 84, 55, 82, 50, 84, 73, 62, 48, 52, 66, 76, 66, 54, 109, 53, 55, 66, 88, 55, 59, 53, 76, 57, 42, 72, 67, 51, 71, 61, 88, 76, 69, 80, 84, 52, 73, 62, 100, 70, 69, 63, 57, 59, 55, 76, 80, 54, 59, 62, 56, 70, 68, 46, 67, 57, 61, 79, 66, 50, 73, 64, 65, 53, 108, 50, 65, 53, 73, 66, 64, 61, 80, 55, 60, 59, 49, 67, 54, 88, 74, 58, 67, 49, 61, 64, 87, 63, 50, 41, 83, 53, 85, 85, 66, 64, 46, 74, 65, 77, 57, 69, 57, 60, 75, 80, 73, 55, 54, 57, 67, 67, 51, 92, 54, 48, 59, 80, 58, 70, 58, 68, 77, 74, 72, 78, 64, 53, 70, 64, 66, 71, 65, 75, 62, 87, 64, 60, 98, 70, 67, 93, 48, 101, 59, 54, 50, 58, 56, 63, 60, 67, 71, 60, 62, 57, 59, 82, 80, 65, 51, 59, 54, 41, 65, 52, 61, 72, 65, 53, 72, 51, 50, 76, 68, 73, 102, 78, 64, 74, 79, 58, 65, 64, 69, 62, 60, 73, 64, 59, 45, 67, 61, 56, 73, 60, 69, 67, 70, 54, 89, 54, 56, 85, 44, 71, 65, 62, 63, 77, 77, 67, 62, 62, 91, 71, 59, 63, 51, 77, 66, 59, 78, 60, 87, 58, 57, 72, 91, 76, 56, 50, 74, 43, 50, 70, 42, 69, 63, 65, 65, 63, 62, 90, 66, 82, 78, 66, 68, 57, 63, 50, 80, 65, 49, 68, 65, 59, 60, 55, 59, 57, 79, 54, 63, 58, 78, 67, 48, 59, 82, 58, 68, 85, 75, 47, 63, 62, 65, 50, 57, 51, 62, 62, 46, 67, 73, 60, 58, 67, 57, 56, 63, 49, 50, 54, 55, 90, 64, 76, 55, 60, 42, 65, 65, 61, 66, 75, 43, 90, 62, 62, 50, 65, 71, 70, 67, 57, 77, 57, 75, 62, 78, 48, 60, 69, 94, 86, 61, 72, 55, 87, 77, 83, 44, 94, 63, 52, 119, 71, 56, 67, 78, 107, 58, 53, 57, 73, 73, 72, 54, 72, 56, 61, 66, 73, 100, 72, 63, 95, 57, 67, 42, 74, 72, 62, 53, 48, 74, 42, 71, 72, 59, 64, 70, 95, 62, 38, 60, 123, 59, 67, 63, 54, 64, 55, 60, 50, 63, 54, 98, 75, 66, 65, 65, 56, 70, 65, 74, 60, 53, 55, 85, 57, 109, 64, 56, 63, 61, 75, 64, 53, 63, 62, 64, 115, 82, 54, 65, 75, 111, 57, 75, 60, 59, 54, 65, 69, 68, 59, 53, 27, 65, 52, 61, 68, 66, 48, 54, 54, 54, 55, 65, 92, 60, 69, 58, 64, 100, 53, 70, 61, 69, 71, 58, 55, 71, 73, 71, 60, 80, 95, 61, 79, 62, 67, 77, 51, 81, 64, 80, 48, 46, 62, 48, 59, 53, 61, 90, 64, 64, 54, 60, 40, 111, 57, 56, 57, 43, 41, 66, 79, 55, 59, 81, 68, 64, 76, 67, 55, 92, 56, 56, 79, 78, 51, 34, 73, 77, 72, 67, 68, 58, 60, 53, 51, 66, 72, 67, 54, 86, 83, 67, 80, 47, 75, 76, 64, 57, 54, 62, 64, 78, 77, 77, 66, 86, 58, 51, 70, 61, 69, 115, 75, 62, 63, 62, 56, 89, 59, 56, 88, 60, 55, 65, 62, 70, 56, 90, 82, 52, 58, 82, 75, 69, 93, 74, 47, 47, 79, 48, 69, 29, 66, 71, 79, 87, 85, 85, 63, 35, 55, 57, 75, 66, 55, 93, 51, 62, 70, 73, 52, 38, 56, 57, 55, 71, 70, 59, 71, 83, 57, 80, 57, 96, 71, 67, 75, 66, 81, 98, 74, 75, 65, 80, 95, 88, 64, 60, 71, 77, 59, 74, 48, 89, 70, 64, 78, 63, 57, 58, 65, 69, 70, 68, 48, 54, 51, 45, 70, 82, 55, 59, 69, 60, 57, 62, 52, 67, 55, 75, 65, 69, 75, 77, 78, 72, 66, 68, 72, 62, 76, 78, 77, 53, 71, 51, 71, 47, 79, 68, 47, 63, 75, 48, 63, 70, 61, 83, 72, 73, 43, 61, 92, 75, 57, 50, 63, 48, 66, 39, 57, 57, 74, 71, 72, 63, 55, 96, 44, 86, 55, 49, 67, 61, 71, 66, 69, 48, 62, 62, 58, 55, 56, 61, 72, 59, 72, 84, 56, 86, 61, 68, 87, 59, 88, 110, 65, 64, 30, 72, 55, 62, 73, 91, 57, 77, 71, 97, 63, 62, 55, 109, 64, 61, 71, 61, 44, 92, 52, 71, 63, 81, 69, 47, 52, 86, 85, 59, 59, 59, 53, 58, 51, 52, 58, 77, 84, 79, 72, 82, 66, 40, 66, 54, 71, 41, 51, 91, 57, 59, 48, 83, 76, 60, 100, 91, 53, 60, 51, 67, 39, 77, 64, 52, 66, 67, 75, 52, 57, 48, 47, 59, 71, 57, 51, 64, 56, 65, 55, 84, 61, 58, 57, 61, 93, 66, 72, 65, 66, 91, 64, 48, 73, 65, 97, 61, 74, 76, 56, 76, 64, 102, 58, 79, 51, 72, 55, 64, 67, 78, 52, 43, 48, 53, 71, 68, 92, 81, 65, 43, 58, 100, 57, 69, 58, 72, 77, 81, 83, 64, 68, 64, 47, 66, 74, 52, 75, 77, 55, 66, 78, 58, 72, 64, 76, 46, 62, 128, 73, 79, 68, 68, 71, 67, 57, 56, 84, 55, 57, 63, 64, 59, 46, 60, 59, 69, 67, 58, 70, 87, 62, 69, 74, 49, 96, 70, 64, 84, 70, 78, 78, 58, 70, 63, 50, 77, 77, 68, 48, 59, 72, 77, 45, 88, 60, 71, 64, 52, 55, 60, 84, 66, 45, 54, 68, 70, 68, 68, 71, 55, 44, 79, 56, 50, 82, 73, 50, 71, 70, 60, 60, 48, 71, 76, 67, 55, 77, 42, 60, 66, 93, 67, 61, 52, 85, 58, 67, 66, 41, 64, 70, 51, 53, 44, 77, 70, 52, 60, 50, 77, 50, 93, 104, 84, 61, 52, 79, 45, 58, 62, 60, 64, 89, 53, 57, 58, 58, 86, 62, 65, 53, 58, 88, 67, 62, 89, 81, 51, 89, 59, 60, 63, 70, 51, 65, 77, 60, 80, 62, 53, 59, 63, 76, 81, 62, 54, 66, 68, 68, 72, 50, 65, 52, 55, 69, 48, 54, 66, 77, 79, 75, 76, 71, 74, 58, 41, 59, 85, 68, 80, 75, 59, 61, 56, 67, 62, 72, 60, 64, 59, 74, 64, 57, 64, 56, 61, 75, 66, 84, 60, 56, 46, 51, 71, 63, 91, 77, 56, 57, 55, 54, 71, 55, 79, 58, 50, 70, 58, 92, 76, 63, 67, 57, 73, 72, 77, 68, 54, 52, 60, 55, 81, 76, 59, 73, 72, 57, 77, 61, 63, 52, 64, 58, 85, 74, 57, 78, 47, 90, 57, 57, 62, 45, 46, 50, 55, 94, 72, 48, 80, 61, 59, 62, 45, 81, 101, 57, 68, 100, 52, 45, 90, 73, 65, 66, 61, 70, 59, 55, 88, 59, 60, 38, 78, 79, 66, 42, 93, 59, 70, 66, 78, 71, 53, 69, 58, 44, 66, 69, 69, 80, 78, 62, 87, 73, 54, 62, 70, 47, 85, 83, 73, 77, 72, 66, 37, 98, 75, 60, 56, 70, 62, 46, 71, 76, 57, 47, 66, 58, 67, 64, 67, 42, 61, 50, 56, 75, 73, 84, 67, 88, 65, 59, 80, 38, 51, 73, 53, 49, 68, 71, 72, 70, 64, 57, 53, 77, 63, 62, 60, 111, 72, 48, 76, 73, 49, 67, 66, 62, 53, 58, 77, 72, 56, 70, 71, 64, 75, 43, 84, 74, 66, 63, 87, 75, 57, 74, 70, 50, 58, 76, 62, 80, 64, 57, 63, 62, 60, 77, 49, 58, 46, 62, 47, 67, 80, 56, 63, 72, 61, 68, 56, 73, 68, 65, 79, 60, 64, 79, 105, 71, 60, 75, 37, 86, 74, 63, 59, 60, 61, 57, 73, 84, 62, 49, 61, 70, 42, 46, 72, 63, 57, 59, 69, 50, 52, 62, 69, 63, 65, 50, 58, 67, 69, 72, 53, 76, 54, 44, 69, 104, 86, 99, 59, 49, 94, 62, 74, 59, 83, 69, 55, 62, 76, 55, 60, 70, 55, 64, 80, 69, 64, 64, 60, 54, 71, 67, 63, 75, 79, 78, 86, 65, 114, 62, 75, 52, 75, 54, 63, 66, 96, 65, 59, 89, 65, 55, 75, 89, 49, 78, 75, 51, 62, 46, 103, 55, 34, 73, 62, 79, 59, 80, 64, 63, 65, 58, 69, 68, 69, 49, 59, 48, 54, 58, 73, 55, 48, 50, 85, 63, 53, 72, 67, 88, 45, 69, 75, 71, 73, 54, 61, 68, 65, 58, 67, 72, 49, 55, 68, 60, 79, 56, 64, 61, 67, 74, 62, 57, 72, 68, 82, 54, 54, 68, 52, 49, 74, 57, 65, 67, 78, 64, 73, 62, 56, 46, 70, 62, 59, 58, 61, 78, 58, 67, 60, 69, 81, 59, 76, 70, 70, 45, 48, 69, 73, 90, 80, 58, 111, 67, 58, 70, 84, 63, 67, 59, 66, 59, 87, 70, 47, 82, 58, 66, 122, 75, 74, 75, 64, 62, 69, 59, 65, 64, 80, 142, 59, 51, 74, 48, 56, 56, 71, 73, 76, 62, 69, 80, 65, 72, 82, 66, 69, 63, 60, 64, 85, 87, 47, 77, 68, 52, 57, 46, 74, 52, 59, 81, 74, 71, 65, 65, 100, 55, 58, 79, 53, 67, 76, 83, 59, 76, 50, 36, 106, 77, 68, 66, 67, 104, 72, 49, 125, 69, 59, 46, 107, 67, 67, 69, 62, 72, 70, 68, 55, 68, 59, 63, 61, 70, 61, 54, 44, 51, 63, 57, 49, 72, 72, 56, 65, 61, 64, 59, 60, 40, 56, 63, 49, 40, 70, 57, 71, 56, 66, 105, 78, 69, 80, 55, 78, 118, 62, 59, 81, 72, 65, 64, 70, 58, 57, 53, 70, 83, 65, 72, 63, 59, 66, 43, 55, 59, 55, 71, 77, 82, 91, 51, 55, 88, 81, 72, 53, 46, 62, 73, 40, 78, 61, 62, 65, 51, 64, 63, 134, 53, 73, 62, 53, 46, 49, 70, 87, 58, 50, 58, 72, 60, 68, 62, 79, 64, 56, 68, 63, 57, 56, 79, 65, 61, 60, 82, 52, 78, 65, 69, 44, 50, 54, 67, 73, 65, 53, 47, 66, 85, 76, 75, 56, 67, 69, 56, 63, 70, 79, 63, 69, 65, 60, 70, 63, 40, 74, 64, 75, 75, 48, 61, 58, 54, 61, 81, 56, 78, 56, 69, 62, 53, 62, 79, 59, 70, 70, 51, 75, 71, 71, 58, 92, 68, 78, 55, 46, 65, 80, 81, 57, 55, 79, 66, 54, 40, 71, 64, 65, 81, 70, 81, 70, 51, 75, 70, 46, 80, 125, 77, 57, 65, 66, 68, 89, 66, 101, 55, 60, 67, 54, 42, 66, 71, 66, 77, 59, 63, 67, 63, 63, 61, 55, 52, 82, 60, 60, 73, 54, 61, 99, 50, 68, 62, 66, 54, 102, 113, 54, 58, 62, 63, 69, 94, 89, 70, 71, 58, 57, 62, 68, 51, 50, 50, 55, 114, 85, 59, 57, 53, 42, 62, 92, 74, 82, 57, 83, 61, 46, 76, 74, 73, 46, 66, 64, 76, 84, 62, 68, 65, 67, 55, 76, 73, 75, 51, 62, 74, 53, 49, 70, 111, 41, 73, 74, 63, 55, 77, 56, 53, 70, 59, 61, 60, 70, 79, 56, 64, 100, 62, 59, 57, 64, 72, 60, 64, 60, 60, 56, 68, 76, 73, 91, 59, 62, 77, 64, 63, 74, 63, 96, 80, 55, 43, 79, 61, 62, 58, 77, 130, 67, 55, 71, 77, 66, 80, 59, 72, 79, 89, 70, 76, 65, 81, 69, 73, 68, 41, 62, 55, 74, 56, 65, 56, 57, 73, 75, 71, 56, 45, 67, 85, 65, 66, 83, 58, 73, 83, 54, 73, 71, 56, 62, 54, 49, 60, 73, 71, 118, 73, 47, 59, 89, 83, 58, 102, 131, 76, 62, 48, 53, 62, 63, 60, 73, 101, 80, 42, 86, 68, 68, 78, 36, 79, 69, 60, 57, 85, 57, 81, 58, 60, 52, 59, 95, 56, 60, 59, 64, 48, 47, 46, 56, 66, 49, 62, 57, 58, 80, 78, 59, 74, 53, 55, 84, 68, 57, 58, 54, 84, 75, 69, 85, 63, 62, 68, 80, 46, 75, 62, 52, 72, 73, 67, 69, 72, 47, 65, 60, 59, 59, 60, 93, 44, 70, 74, 68, 57, 56, 51, 79, 62, 71, 84, 75, 108, 59, 65, 76, 62, 63, 51, 36, 80, 108, 69, 60, 54, 72, 51, 57, 70, 60, 68, 73, 64, 68, 70, 68, 55, 80, 80, 70, 66, 47, 71, 69, 55, 66, 65, 73, 41, 57, 58, 81, 60, 53, 82, 52, 54, 56, 69, 70, 80, 50, 76, 74, 76, 68, 67, 63, 59, 61, 69, 54, 92, 71, 45, 58, 58, 70, 68, 67, 45, 64, 48, 57, 122, 56, 62, 56, 78, 53, 71, 59, 65, 70, 48, 89, 60, 50, 89, 72, 118, 51, 78, 69, 54, 55, 55, 65, 72, 61, 57, 79, 76, 71, 51, 54, 48, 59, 79, 86, 77, 51, 67, 86, 78, 68, 41, 65, 55, 77, 89, 111, 60, 70, 79, 58, 49, 66, 63, 66, 59, 50, 53, 60, 54, 64, 60, 69, 61, 65, 58, 79, 88, 77, 69, 70, 74, 71, 80, 53, 77, 56, 68, 66, 60, 71, 52, 38, 74, 79, 73, 51, 42, 74, 52, 64, 81, 100, 56, 59, 71, 60, 82, 79, 65, 66, 57, 56, 76, 57, 69, 80, 58, 67, 38, 78, 54, 70, 65, 50, 54, 52, 84, 52, 67, 60, 60, 55, 68, 57, 57, 55, 62, 70, 64, 79, 56, 67, 67, 60, 68, 85, 66, 71, 68, 74, 56, 52, 55, 62, 59, 63, 89, 45, 62, 55, 53, 70, 52, 45, 56, 66, 57, 78, 57, 51, 60, 61, 61, 69, 62, 75, 69, 69, 74, 93, 57, 53, 73, 52, 75, 53, 53, 51, 57, 61, 74, 61, 58, 104, 69, 60, 89, 55, 59, 94, 79, 47, 46, 69, 71, 83, 62, 81, 35, 74, 51, 68, 63, 51, 81, 52, 61, 65, 68, 57, 48, 66, 60, 132, 70, 53, 83, 69, 72, 48, 58, 89, 55, 121, 56, 54, 55, 60, 50, 58, 69, 73, 69, 83, 69, 64, 77, 64, 61, 68, 122, 52, 65, 84, 88, 69, 58, 79, 129, 53, 52, 65, 91, 74, 53, 68, 78, 45, 88, 64, 78, 60, 72, 65, 76, 65, 65, 49, 60, 70, 52, 68, 63, 54, 64, 63, 65, 62, 71, 61, 82, 46, 74, 53, 65, 62, 59, 56, 51, 64, 61, 59, 51, 54, 60, 75, 57, 58, 76, 79, 88, 55, 67, 66, 45, 67, 64, 51, 56, 50, 55, 104, 44, 70, 64, 89, 69, 49, 62, 52, 57, 85, 53, 60, 68, 75, 66, 56, 61, 44, 75, 45, 83, 70, 70, 62, 66, 78, 58, 64, 69, 44, 71, 78, 73, 81, 56, 60, 70, 79, 55, 86, 71, 62, 78, 76, 56, 62, 53, 58, 74, 55, 64, 53, 85, 37, 57, 56, 49, 117, 91, 59, 61, 60, 72, 74, 70, 77, 70, 117, 60, 95, 75, 41, 56, 61, 78, 70, 52, 54, 56, 64, 74, 72, 60, 67, 43, 61, 80, 85, 57, 68, 57, 70, 60, 70, 97, 64, 104, 67, 78, 66, 47, 65, 69, 55, 40, 81, 68, 60, 96, 85, 52, 73, 53, 78, 67, 78, 79, 75, 87, 62, 62, 65, 51, 69, 50, 58, 63, 58, 42, 59, 94, 79, 63, 92, 56, 108, 65, 42, 68, 64, 49, 60, 49, 74, 85, 57, 48, 63, 79, 64, 37, 55, 58, 47, 59, 68, 79, 57, 68, 64, 43, 61, 69, 54, 89, 73, 76, 94, 57, 50, 78, 56, 50, 86, 80, 82, 62, 68, 69, 73, 60, 74, 45, 59, 68, 63, 74, 69, 59, 54, 72, 70, 59, 83, 90, 71, 55, 47, 63, 96, 54, 57, 91, 39, 85, 48, 52, 86, 56, 76, 86, 76, 61, 72, 62, 68, 64, 77, 70, 44, 71, 64, 67, 56, 50, 48, 57, 64, 65, 73, 76, 74, 84, 71, 44, 72, 43, 37, 52, 54, 58, 43, 47, 66, 70, 57, 71, 41, 125, 56, 79, 77, 54, 64, 82, 62, 45, 127, 71, 62, 64, 37, 90, 54, 80, 68, 101, 50, 59, 96, 57, 78, 72, 96, 81, 63, 63, 89, 39, 65, 88, 64, 63, 92, 60, 54, 63, 66, 67, 70, 59, 62, 90, 52, 69, 58, 82, 56, 76, 69, 98, 43, 77, 73, 59, 67, 52, 85, 67, 46, 60, 87, 61, 72, 66, 71, 67, 69, 48, 82, 112, 75, 50, 64, 55, 55, 64, 72, 77, 50, 59, 72, 76, 62, 56, 65, 71, 66, 76, 70, 66, 48, 59, 83, 63, 47, 53, 97, 61, 54, 79, 58, 66, 68, 50, 61, 63, 92, 83, 64, 55, 65, 52, 64, 63, 52, 77, 68, 73, 58, 79, 60, 47, 71, 62, 56, 63, 62, 59, 61, 64, 73, 66, 76, 50, 155, 98, 103, 63, 42, 50, 63, 70, 59, 68, 73, 41, 72, 55, 56, 63, 64, 70, 62, 61, 54, 95, 76, 58, 100, 55, 94, 39, 49, 58, 57, 59, 65, 88, 56, 65, 46, 87, 106, 65, 62, 77, 63, 62, 65, 61, 70, 93, 64, 73, 60, 49, 53, 69, 62, 45, 74, 69, 76, 84, 87, 64, 78, 62, 38, 86, 75, 77, 71, 67, 61, 54, 127, 61, 57, 58, 61, 70, 46, 104, 80, 58, 56, 74, 50, 75, 41, 55, 67, 55, 56, 57, 47, 39, 63, 84, 54, 79, 104, 42, 65, 56, 56, 66, 67, 57, 70, 64, 50, 63, 68, 59, 47, 49, 53, 100, 83, 67, 60, 76, 79, 60, 78, 88, 61, 63, 67, 58, 59, 62, 61, 59, 90, 83, 71, 79, 45, 72, 54, 50, 49, 35, 79, 54, 50, 88, 70, 66, 58, 71, 67, 71, 70, 63, 66, 65, 49, 52, 54, 70, 81, 54, 59, 58, 67, 51, 77, 91, 71, 59, 67, 63, 99, 83, 50, 65, 62, 60, 85, 62, 66, 62, 102, 81, 50, 57, 77, 58, 51, 62, 51, 48, 59, 102, 69, 80, 66, 64, 81, 68, 50, 49, 60, 51, 56, 55, 48, 93, 51, 50, 60, 43, 92, 51, 63, 71, 56, 91, 74, 56, 64, 71, 73, 51, 59, 128, 72, 85, 64, 65, 67, 64, 73, 69, 60, 67, 72, 36, 57, 70, 95, 43, 51, 52, 49, 63, 43, 82, 35, 61, 62, 59, 73, 76, 67, 63, 74, 61, 59, 62, 77, 53, 63, 56, 47, 55, 50, 52, 64, 71, 74, 61, 87, 60, 47, 49, 84, 77, 90, 75, 82, 70, 73, 47, 66, 38, 76, 67, 59, 44, 108, 58, 68, 69, 51, 61, 63, 67, 64, 53, 73, 61, 73, 63, 66, 68, 89, 58, 64, 109, 46, 60, 76, 62, 72, 62, 44, 60, 80, 43, 64, 73, 50, 60, 56, 63, 69, 41, 91, 60, 67, 80, 86, 103, 46, 95, 61, 65, 65, 67, 58, 61, 75, 39, 66, 76, 76, 77, 61, 80, 53, 63, 35, 65, 60, 71, 68, 45, 80, 73, 53, 59, 46, 68, 63, 66, 69, 58, 64, 63, 59, 65, 77, 69, 72, 50, 68, 112, 99, 71, 82, 73, 82, 57, 64, 37, 64, 85, 90, 67, 75, 67, 60, 53, 67, 59, 58, 53, 61, 65, 81, 58, 46, 63, 60, 91, 84, 53, 66, 72, 92, 55, 64, 76, 78, 85, 51, 58, 55, 57, 64, 66, 64, 120, 77, 61, 74, 66, 47, 66, 79, 61, 59, 64, 41, 88, 54, 74, 67, 64, 68, 56, 47, 62, 78, 65, 41, 65, 50, 47, 92, 97, 73, 87, 55, 120, 52, 61, 73, 54, 73, 41, 59, 94, 52, 54, 58, 80, 69, 68, 72, 105, 73, 63, 40, 53, 59, 79, 61, 74, 99, 51, 67, 56, 74, 72, 70, 65, 65, 50, 63, 74, 77, 73, 51, 60, 57, 53, 65, 74, 83, 69, 64, 72, 71, 104, 67, 96, 64, 62, 66, 68, 55, 48, 43, 58, 46, 62, 55, 94, 51, 79, 65, 53, 84, 74, 57, 50, 67, 46, 82, 80, 68, 93, 42, 64, 55, 81, 49, 54, 52, 51, 64, 87, 52, 83, 58, 51, 64, 56, 55, 79, 68, 53, 67, 55, 73, 68, 60, 57, 51, 74, 60, 50, 105, 57, 77, 112, 67, 61, 64, 66, 59, 82, 66, 63, 44, 70, 75, 50, 57, 49, 52, 59, 69, 68, 81, 61, 70, 95, 67, 56, 66, 65, 71, 48, 71, 70, 45, 53, 77, 106, 67, 61, 101, 72, 57, 69, 60, 44, 46, 90, 73, 66, 78, 47, 73, 56, 61, 86, 60, 58, 64, 66, 81, 61, 54, 94, 64, 62, 57, 52, 61, 68, 69, 40, 68, 39, 64, 60, 58, 54, 115, 60, 74, 57, 61, 49, 61, 57, 66, 79, 69, 47, 61, 63, 74, 58, 49, 55, 65, 60, 59, 62, 51, 58, 54, 96, 79, 45, 60, 41, 66, 73, 51, 103, 78, 71, 77, 85, 51, 81, 69, 66, 58, 63, 54, 50, 50, 47, 80, 60, 51, 73, 49, 64, 69, 59, 71, 57, 85, 51, 64, 77, 61, 80, 59, 59, 50, 83, 63, 71, 81, 61, 62, 76, 66, 56, 82, 104, 67, 51, 35, 48, 69, 48, 59, 66, 56, 67, 107, 69, 46, 58, 78, 69, 73, 54, 58, 47, 53, 68, 75, 57, 86, 74, 75, 78, 57, 63, 57, 76, 39, 58, 67, 50, 58, 56, 88, 48, 68, 73, 76, 99, 65, 65, 62, 52, 93, 70, 53, 81, 68, 58, 65, 67, 59, 45, 68, 66, 92, 76, 52, 69, 69, 56, 61, 66, 64, 84, 84, 76, 51, 79, 129, 76, 57, 75, 74, 59, 54, 53, 37, 67, 84, 77, 71, 41, 57, 68, 54, 65, 77, 55, 53, 83, 83, 61, 90, 53, 50, 43, 51, 73, 50, 90, 44, 55, 66, 64, 76, 65, 59, 76, 65, 49, 74, 48, 62, 105, 76, 101, 52, 82, 69, 71, 46, 52, 57, 54, 48, 39, 94, 29, 56, 59, 59, 72, 69, 72, 73, 66, 74, 88, 82, 66, 79, 63, 63, 57, 58, 64, 87, 65, 75, 72, 63, 66, 76, 68, 56, 59, 106, 65, 57, 52, 95, 51, 91, 67, 81, 59, 69, 56, 54, 66, 56, 88, 103, 74, 53, 85, 60, 121, 86, 65, 88, 69, 57, 66, 73, 50, 60, 54, 95, 67, 58, 75, 78, 75, 76, 54, 55, 62, 54, 84, 82, 93, 101, 56, 55, 155, 49, 70, 80, 79, 66, 77, 59, 58, 34, 65, 48, 63, 118, 66, 62, 68, 126, 65, 56, 59, 74, 89, 76, 70, 50, 58, 58, 66, 77, 66, 68, 54, 63, 76, 84, 69, 56, 45, 63, 47, 65, 63, 52, 72, 58, 53, 70, 69, 80, 67, 80, 58, 75, 95, 84, 74, 59, 42, 56, 66, 66, 83, 76, 80, 59, 74, 72, 89, 47, 61, 60, 125, 57, 87, 80, 71, 68, 50, 66, 57, 49, 55, 79, 64, 69, 67, 63, 72, 40, 81, 97, 61, 33, 57, 87, 58, 56, 63, 48, 95, 53, 59, 63, 59, 62, 67, 63, 61, 71, 59, 63, 44, 91, 55, 65, 65, 81, 50, 49, 71, 52, 66, 46, 119, 63, 63, 57, 42, 85, 71, 51, 59, 50, 41, 70, 56, 77, 62, 64, 57, 61, 82, 63, 51, 67, 55, 51, 56, 40, 62, 70, 84, 62, 77, 40, 55, 48, 78, 53, 87, 40, 53, 77, 68, 51, 69, 60, 66, 56, 62, 52, 60, 80, 56, 102, 77, 88, 52, 64, 47, 78, 44, 61, 78, 63, 60, 68, 111, 74, 60, 45, 69, 43, 65, 94, 48, 38, 48, 70, 75, 45, 64, 66, 74, 81, 81, 66, 66, 84, 69, 85, 53, 51, 57, 79, 66, 70, 48, 52, 60, 61, 74, 81, 78, 142, 47, 79, 55, 67, 79, 63, 66, 67, 60, 73, 94, 66, 52, 68, 85, 81, 61, 112, 92, 72, 68, 68, 68, 78, 72, 53, 48, 99, 58, 68, 48, 97, 52, 60, 114, 76, 81, 66, 48, 74, 71, 63, 75, 66, 55, 78, 48, 68, 71, 75, 37, 68, 94, 48, 45, 80, 78, 67, 48, 82, 52, 53, 46, 77, 137, 69, 64, 73, 73, 58, 74, 55, 87, 68, 50, 43, 58, 77, 74, 55, 91, 54, 71, 49, 55, 55, 88, 38, 105, 88, 61, 55, 75, 92, 51, 71, 98, 85, 94, 63, 44, 74, 52, 56, 50, 74, 61, 74, 62, 88, 74, 46, 75, 60, 66, 80, 46, 70, 45, 56, 47, 47, 72, 81, 67, 60, 70, 56, 82, 62, 63, 84, 67, 49, 55, 60, 62, 48, 55, 88, 62, 71, 71, 46, 62, 67, 63, 60, 105, 68, 91, 66, 58, 106, 82, 80, 70, 50, 72, 50, 72, 52, 62, 57, 55, 62, 77, 67, 63, 88, 59, 71, 70, 86, 81, 104, 67, 53, 77, 64, 46, 83, 50, 69, 81, 61, 47, 54, 81, 86, 119, 52, 64, 70, 52, 72, 64, 62, 78, 72, 79, 58, 62, 45, 57, 66, 63, 61, 55, 77, 90, 70, 58, 46, 66, 63, 55, 70, 56, 54, 59, 63, 54, 69, 61, 72, 53, 89, 46, 81, 46, 68, 58, 69, 62, 47, 74, 103, 95, 74, 45, 52, 52, 89, 82, 84, 56, 57, 75, 52, 55, 42, 45, 34, 53, 35, 52, 91, 75, 39, 69, 87, 73, 50, 70, 57, 100, 71, 42, 48, 57, 91, 44, 59, 69, 49, 78, 61, 58, 78, 48, 93, 62, 69, 95, 68, 62, 113, 63, 90, 42, 56, 75, 42, 71, 66, 81, 97, 86, 76, 50, 65, 56, 57, 52, 79, 81, 56, 59, 64, 83, 67, 37, 50, 60, 50, 35, 72, 59, 57, 73, 52, 81, 92, 62, 72, 70, 53, 48, 71, 56, 70, 38, 66, 70, 79, 71, 46, 72, 80, 65, 78, 62, 66, 58, 58, 69, 71, 63, 68, 75, 76, 55, 83, 44, 61, 78, 72, 74, 77, 43, 66, 66, 52, 60, 46, 45, 76, 68, 40, 61, 61, 67, 52, 81, 58, 73, 57, 72, 54, 85, 51, 48, 36, 75, 51, 73, 87, 67, 72, 80, 84, 61, 83, 57, 53, 84, 75, 57, 48, 69, 62, 71, 79, 69, 46, 85, 23, 67, 56, 88, 43, 57, 80, 72, 91, 96, 79, 47, 40, 108, 44, 31, 73, 61, 48, 102, 76, 68, 58, 77, 110, 68, 90, 96, 54, 79, 50, 71, 82, 50, 82, 86, 50, 53, 60, 75, 63, 61, 43, 59, 60, 62, 34, 90, 51, 53, 63, 47, 62, 84, 106, 48, 74, 34, 42, 55, 56, 62, 34, 74, 40, 60, 35, 84, 78, 56, 74, 80, 79, 62, 40, 67, 89, 80, 63, 58, 95, 66, 51, 51, 47, 54, 48, 50, 64, 69, 66, 74, 37, 49, 58, 66, 64, 63, 82, 64, 66, 70, 57, 40, 87, 52, 72, 64, 62, 69, 52, 45, 70, 52, 83, 60, 52, 50, 92, 59, 61, 87, 69, 58, 72, 76, 53, 48, 53, 55, 76, 41, 73, 99, 48, 61, 55, 68, 70, 52, 71, 88, 63, 71, 57, 68, 66, 45, 67, 62, 95, 76, 41, 77, 76, 54, 85, 57, 50, 82, 65, 41, 46, 64, 43, 63, 57, 83, 47, 62, 55, 96, 50, 87, 79, 72, 93, 62, 51, 72, 102, 48, 63, 42, 73, 54, 56, 78, 83, 48, 42, 73, 64, 73, 129, 49, 106, 58, 47, 89, 65, 85, 54, 53, 62, 74, 76, 48, 57, 58, 65, 55, 92, 68, 90, 80, 89, 75, 92, 54, 68, 58, 66, 74, 142, 57, 58, 45, 52, 53, 43, 64, 52, 51, 68, 87, 86, 38, 74, 51, 71, 60, 57, 63, 55, 68, 81, 48, 58, 60, 110, 65, 75, 56, 61, 70, 77, 78, 66, 74, 136, 67, 34, 83, 71, 71, 79, 61, 76, 50, 71, 90, 66, 46, 50, 47, 68, 83, 68, 56, 48, 68, 86, 79, 69, 64, 72, 74, 93, 71, 113, 118, 66, 57, 68, 74, 69, 62, 69, 76, 81, 51, 69, 66, 65, 98, 68, 79, 85, 37, 76, 61, 55, 71, 72, 50, 90, 76, 77, 54, 60, 53, 56, 53, 75, 64, 54, 46, 83, 45, 64, 93, 40, 66, 67, 77, 73, 56, 54, 66, 62, 69, 72, 72, 80, 90, 63, 120, 66, 63, 52, 68, 73, 63, 61, 55, 101, 68, 70, 62, 74, 90, 43, 69, 61, 53, 70, 64, 67, 60, 58, 59, 113, 59, 71, 42, 71, 27, 85, 58, 71, 60, 48, 57, 62, 86, 62, 55, 54, 102, 38, 78, 70, 79, 74, 63, 86, 60, 72, 51, 58, 67, 43, 89, 78, 59, 47, 61, 79, 50, 51, 89, 53, 60, 56, 58, 61, 87, 75, 52, 64, 68, 53, 107, 92, 33, 76, 84, 66, 56, 65, 85, 60, 63, 65, 73, 58, 42, 59, 70, 47, 79, 69, 41, 58, 96, 58, 76, 55, 32, 72, 70, 58, 80, 58, 61, 79, 57, 69, 44, 62, 72, 66, 42, 71, 63, 60, 77, 67, 54, 68, 52, 87, 70, 74, 43, 60, 99, 53, 51, 64, 49, 50, 61, 69, 56, 73, 88, 56, 74, 54, 59, 77, 98, 57, 38, 46, 54, 51, 58, 61, 56, 58, 51, 78, 46, 53, 61, 79, 57, 81, 54, 67, 67, 91, 63, 70, 63, 84, 67, 63, 76, 67, 72, 104, 71, 45, 72, 71, 54, 64, 59, 82, 79, 47, 66, 67, 42, 58, 61, 45, 78, 63, 44, 48, 68, 58, 56, 69, 76, 58, 57, 97, 42, 52, 86, 45, 50, 67, 69, 69, 63, 59, 59, 80, 77, 57, 50, 65, 63, 70, 53, 76, 48, 61, 66, 62, 49, 60, 79, 84, 57, 77, 71, 90, 51, 64, 87, 38, 50, 63, 52, 59, 52, 77, 149, 64, 60, 51, 83, 93, 62, 59, 91, 45, 62, 61, 41, 63, 86, 94, 71, 77, 31, 64, 65, 93, 55, 47, 74, 92, 53, 55, 73, 42, 57, 62, 64, 76, 56, 63, 58, 75, 72, 84, 75, 66, 81, 57, 75, 76, 102, 52, 67, 65, 77, 45, 55, 104, 54, 59, 69, 106, 46, 58, 105, 75, 58, 72, 65, 73, 77, 73, 58, 58, 61, 46, 91, 74, 74, 77, 70, 65, 97, 57, 100, 60, 61, 68, 93, 63, 47, 45, 48, 61, 89, 50, 64, 51, 56, 103, 77, 67, 50, 55, 73, 64, 93, 97, 91, 42, 72, 79, 69, 61, 70, 94, 55, 61, 63, 52, 68, 60, 49, 63, 63, 49, 62, 74, 54, 70, 105, 56, 79, 81, 76, 73, 62, 119, 59, 57, 42, 60, 60, 63, 62, 47, 67, 68, 55, 64, 102, 65, 69, 68, 78, 80, 46, 47, 55, 68, 55, 48, 73, 59, 64, 92, 69, 52, 61, 67, 89, 65, 79, 76, 74, 71, 93, 38, 60, 59, 73, 80, 69, 59, 72, 64, 65, 79, 101, 38, 57, 73, 83, 95, 75, 71, 71, 46, 75, 59, 60, 55, 68, 64, 80, 60, 60, 47, 76, 57, 88, 62, 60, 60, 56, 72, 65, 61, 62, 75, 51, 50, 80, 57, 58, 90, 47, 82, 68, 99, 46, 54, 52, 78, 60, 86, 62, 67, 57, 61, 60, 56, 85, 54, 68, 56, 43, 70, 99, 47, 62, 48, 63, 98, 59, 67, 56, 58, 72, 76, 58, 66, 77, 50, 51, 68, 85, 44, 65, 62, 56, 80, 61, 53, 80, 62, 36, 55, 66, 54, 84, 43, 72, 65, 69, 76, 86, 85, 81, 49, 61, 87, 55, 108, 51, 47, 79, 48, 84, 62, 84, 46, 64, 68, 68, 58, 43, 60, 51, 63, 80, 75, 107, 69, 61, 61, 64, 53, 64, 105, 50, 94, 67, 46, 57, 53, 56, 62, 61, 58, 53, 79, 70, 71, 58, 93, 61, 50, 78, 76, 55, 55, 62, 70, 58, 64, 76, 76, 97, 62, 44, 67, 95, 73, 59, 51, 52, 71, 45, 63, 38, 64, 61, 64, 66, 63, 62, 65, 64, 66, 71, 66, 71, 85, 54, 66, 82, 80, 86, 83, 67, 64, 46, 44, 75, 60, 71, 67, 89, 66, 67, 103, 47, 89, 54, 37, 60, 91, 54, 54, 65, 66, 66, 75, 58, 61, 61, 50, 60, 54, 73, 80, 59, 65, 96, 59, 58, 81, 51, 85, 54, 57, 39, 82, 75, 76, 58, 58, 55, 62, 48, 90, 48, 48, 61, 66, 62, 61, 39, 71, 70, 64, 45, 55, 65, 88, 67, 79, 72, 79, 58, 58, 64, 61, 101, 59, 55, 51, 86, 40, 57, 76, 43, 61, 68, 59, 59, 51, 94, 81, 71, 67, 61, 70, 59, 65, 59, 57, 64, 56, 44, 62, 60, 83, 49, 55, 57, 34, 44, 49, 73, 53, 80, 60, 76, 78, 64, 89, 51, 70, 39, 65, 56, 70, 47, 50, 71, 65, 57, 75, 63, 42, 110, 69, 58, 59, 67, 41, 83, 61, 64, 104, 92, 53, 85, 62, 56, 71, 35, 48, 55, 76, 60, 63, 76, 96, 70, 57, 71, 86, 79, 70, 64, 66, 52, 88, 54, 63, 61, 55, 60, 85, 63, 54, 51, 72, 59, 91, 71, 69, 48, 52, 55, 86, 87, 60, 87, 39, 72, 82, 65, 121, 54, 75, 62, 122, 104, 68, 64, 65, 94, 82, 62, 55, 52, 67, 55, 59, 62, 70, 65, 64, 77, 62, 43, 89, 44, 74, 80, 57, 63, 48, 48, 60, 69, 57, 74, 57, 64, 70, 50, 83, 32, 80, 72, 67, 68, 61, 84, 52, 73, 68, 90, 73, 71, 57, 58, 48, 52, 69, 57, 56, 62, 82, 69, 60, 75, 77, 77, 82, 67, 74, 73, 57, 56, 53, 51, 70, 85, 79, 58, 76, 72, 70, 76, 56, 76, 68, 72, 65, 72, 81, 62, 55, 47, 87, 59, 75, 95, 51, 69, 83, 81, 49, 81, 55, 80, 58, 90, 54, 100, 79, 60, 74, 39, 36, 69, 78, 54, 88, 51, 62, 101, 54, 42, 80, 72, 67, 67, 41, 51, 65, 70, 65, 67, 75, 50, 45, 63, 67, 91, 71, 61, 60, 61, 60, 88, 60, 64, 62, 54, 98, 73, 57, 77, 81, 70, 38, 51, 69, 76, 48, 56, 91, 58, 42, 59, 49, 76, 60, 62, 101, 81, 72, 73, 70, 65, 67, 38, 61, 85, 76, 49, 70, 96, 48, 55, 48, 78, 57, 60, 50, 84, 86, 44, 57, 57, 52, 67, 59, 68, 63, 44, 104, 78, 78, 58, 54, 50, 56, 53, 72, 56, 65, 61, 59, 58, 68, 72, 48, 71, 67, 81, 66, 64, 41, 61, 91, 63, 51, 58, 59, 91, 64, 56, 66, 58, 56, 50, 61, 53, 81, 72, 75, 61, 73, 110, 88, 68, 58, 87, 59, 66, 79, 57, 61, 54, 68, 69, 72, 65, 73, 47, 48, 45, 64, 59, 64, 62, 59, 72, 69, 76, 65, 58, 65, 34, 64, 83, 63, 55, 63, 70, 82, 54, 68, 60, 55, 54, 67, 60, 81, 71, 64, 79, 73, 44, 75, 51, 66, 77, 57, 71, 53, 72, 66, 103, 70, 33, 46, 79, 82, 96, 125, 58, 53, 68, 62, 85, 51, 66, 73, 92, 55, 67, 45, 50, 58, 67, 59, 44, 73, 53, 70, 82, 46, 60, 74, 54, 68, 63, 56, 44, 69, 78, 89, 67, 67, 58, 69, 62, 70, 76, 52, 87, 83, 82, 61, 69, 68, 76, 75, 59, 47, 71, 59, 50, 60, 61, 75, 125, 101, 59, 53, 67, 86, 66, 71, 67, 75, 64, 64, 60, 53, 60, 56, 60, 58, 96, 74, 58, 69, 65, 60, 86, 68, 57, 76, 70, 94, 86, 63, 88, 59, 58, 54, 83, 50, 39, 67, 52, 75, 63, 69, 43, 62, 60, 49, 78, 72, 72, 63, 64, 81, 83, 89, 48, 57, 54, 70, 52, 78, 66, 66, 66, 35, 75, 37, 73, 79, 64, 78, 41, 75, 49, 63, 63, 57, 61, 54, 51, 58, 64, 63, 92, 61, 69, 59, 92, 49, 65, 52, 50, 75, 62, 82, 55, 58, 82, 70, 73, 36, 68, 89, 59, 52, 118, 59, 72, 46, 70, 65, 60, 85, 62, 69, 84, 70, 84, 47, 85, 75, 61, 74, 44, 80, 69, 74, 67, 96, 58, 64, 61, 132, 58, 52, 57, 78, 57, 53, 89, 137, 70, 98, 64, 78, 60, 68, 60, 68, 50, 54, 59, 54, 65, 39, 82, 59, 79, 36, 64, 72, 84, 79, 67, 67, 69, 57, 52, 75, 50, 68, 74, 60, 48, 67, 42, 58, 51, 70, 58, 75, 60, 52, 32, 57, 70, 62, 50, 84, 63, 78, 69, 86, 66, 65, 56, 94, 59, 98, 49, 77, 74, 72, 36, 52, 82, 74, 46, 53, 82, 68, 72, 45, 60, 64, 81, 90, 39, 57, 75, 82, 62, 69, 68, 55, 65, 56, 67, 62, 68, 75, 69, 83, 71, 64, 68, 64, 61, 62, 110, 43, 76, 59, 48, 65, 67, 73, 63, 76, 57, 63, 76, 52, 63, 103, 56, 45, 88, 65, 77, 78, 63, 75, 59, 44, 44, 54, 84, 66, 91, 60, 55, 86, 69, 73, 57, 41, 52, 53, 43, 87, 108, 71, 59, 57, 55, 53, 80, 79, 82, 75, 61, 64, 73, 74, 53, 73, 70, 72, 53, 93, 74, 60, 58, 81, 59, 70, 92, 68, 53, 68, 94, 69, 37, 80, 53, 66, 68, 56, 96, 60, 70, 55, 48, 60, 72, 49, 46, 72, 45, 75, 46, 61, 66, 61, 80, 80, 67, 52, 65, 63, 59, 60, 61, 62, 76, 52, 78, 80, 81, 52, 71, 63, 84, 61, 38, 78, 49, 75, 72, 70, 73, 74, 67, 66, 50, 53, 95, 57, 45, 64, 62, 62, 53, 42, 63, 79, 84, 57, 60, 61, 66, 69, 63, 71, 70, 98, 79, 39, 66, 55, 32, 47, 63, 62, 42, 62, 76, 40, 61, 51, 60, 47, 55, 87, 74, 68, 66, 45, 60, 48, 56, 62, 80, 68, 59, 58, 61, 51, 72, 41, 60, 54, 57, 72, 66, 50, 58, 76, 83, 79, 49, 76, 77, 65, 119, 70, 78, 78, 55, 56, 76, 79, 71, 56, 85, 60, 70, 60, 81, 50, 50, 61, 50, 70, 59, 68, 66, 64, 62, 87, 69, 64, 64, 71, 67, 58, 63, 64, 62, 65, 66, 96, 58, 94, 84, 53, 58, 48, 75, 61, 58, 57, 62, 53, 66, 61, 72, 43, 59, 50, 74, 71, 52, 74, 88, 56, 67, 71, 74, 63, 85, 72, 64, 61, 59, 54, 75, 83, 69, 58, 58, 63, 50, 55, 61, 141, 92, 60, 63, 69, 70, 53, 70, 59, 57, 112, 73, 51, 79, 74, 47, 66, 72, 69, 64, 54, 66, 100, 58, 58, 53, 90, 73, 64, 36, 85, 88, 69, 55, 62, 117, 56, 41, 62, 73, 49, 61, 55, 66, 51, 87, 67, 53, 73, 43, 75, 65, 62, 58, 32, 70, 68, 44, 63, 48, 62, 65, 64, 72, 62, 55, 87, 83, 47, 36, 72, 81, 79, 50, 36, 73, 51, 63, 76, 81, 58, 54, 88, 43, 114, 64, 56, 72, 49, 82, 62, 82, 49, 56, 62, 66, 61, 63, 94, 59, 54, 68, 93, 120, 69, 51, 67, 63, 50, 75, 53, 52, 62, 73, 47, 77, 50, 79, 60, 67, 43, 73, 57, 57, 77, 67, 36, 64, 64, 58, 62, 107, 67, 85, 62, 71, 75, 55, 60, 81, 69, 84, 84, 54, 72, 40, 48, 79, 80, 82, 91, 64, 110, 76, 63, 37, 144, 64, 87, 65, 49, 49, 69, 76, 59, 74, 137, 53, 67, 63, 57, 81, 69, 41, 78, 74, 57, 75, 75, 97, 57, 69, 66, 65, 74, 80, 43, 60, 74, 79, 80, 73, 57, 61, 45, 69, 59, 77, 43, 49, 97, 61, 71, 55, 54, 82, 58, 59, 97, 52, 61, 112, 74, 92, 54, 95, 55, 76, 71, 44, 65, 70, 63, 66, 65, 67, 45, 47, 55, 66, 50, 89, 70, 70, 103, 110, 54, 67, 71, 75, 61, 52, 65, 66, 74, 51, 82, 72, 63, 74, 78, 58, 79, 54, 68, 85, 67, 63, 61, 69, 46, 70, 57, 57, 88, 65, 76, 57, 63, 61, 68, 70, 120, 71, 64, 67, 66, 39, 69, 58, 56, 75, 42, 62, 53, 65, 85, 55, 54, 90, 65, 50, 70, 66, 88, 55, 64, 73, 55, 70, 86, 50, 65, 71, 59, 59, 56, 58, 66, 56, 81, 49, 52, 78, 48, 61, 77, 75, 58, 73, 77, 75, 55, 73, 54, 46, 51, 64, 88, 86, 67, 62, 68, 62, 66, 93, 86, 64, 69, 67, 47, 70, 72, 78, 64, 54, 48, 66, 77, 61, 62, 70, 89, 54, 56, 107, 57, 71, 49, 55, 54, 84, 55, 64, 84, 101, 67, 70, 46, 100, 51, 66, 59, 68, 92, 76, 49, 54, 68, 64, 58, 61, 61, 60, 86, 68, 58, 38, 64, 50, 60, 40, 105, 62, 63, 43, 47, 41, 63, 58, 50, 67, 64, 51, 70, 63, 52, 81, 82, 88, 82, 67, 64, 88, 70, 52, 84, 59, 140, 49, 78, 65, 83, 68, 57, 62, 25, 89, 63, 69, 48, 49, 79, 81, 70, 69, 74, 42, 64, 52, 56, 72, 50, 54, 80, 64, 60, 55, 44, 73, 67, 74, 60, 60, 93, 78, 74, 61, 76, 61, 68, 76, 70, 56, 81, 53, 65, 83, 63, 82, 62, 52, 70, 61, 52, 52, 52, 92, 62, 68, 87, 69, 72, 60, 81, 48, 71, 89, 36, 81, 67, 70, 68, 64, 72, 56, 68, 66, 68, 72, 74, 84, 69, 68, 57, 42, 52, 64, 64, 58, 78, 55, 54, 60, 75, 90, 50, 70, 76, 78, 49, 65, 48, 71, 67, 68, 74, 75, 61, 44, 63, 57, 64, 64, 78, 81, 52, 128, 59, 65, 67, 88, 57, 59, 63, 67, 65, 44, 72, 66, 62, 60, 49, 91, 61, 62, 88, 86, 82, 76, 54, 102, 67, 49, 53, 72, 78, 76, 80, 63, 65, 43, 65, 54, 49, 62, 105, 62, 56, 79, 65, 62, 76, 91, 49, 55, 55, 67, 54, 58, 64, 79, 54, 59, 60, 52, 66, 46, 86, 47, 57, 55, 79, 67, 62, 50, 75, 67, 65, 67, 80, 84, 75, 67, 48, 88, 73, 74, 73, 56, 71, 69, 71, 72, 85, 55, 84, 67, 50, 77, 139, 65, 70, 52, 63, 82, 88, 83, 70, 59, 69, 57, 83, 77, 60, 64, 63, 80, 88, 53, 49, 67, 84, 53, 52, 52, 94, 83, 60, 54, 67, 49, 34, 54, 83, 97, 56, 59, 83, 51, 67, 130, 57, 65, 58, 59, 66, 66, 62, 47, 51, 64, 64, 57, 59, 71, 68, 66, 67, 59, 66, 55, 54, 71, 60, 68, 64, 71, 55, 56, 51, 53, 60, 49, 63, 36, 52, 49, 77, 70, 57, 63, 64, 64, 38, 75, 73, 67, 54, 67, 63, 64, 70, 63, 70, 56, 71, 62, 68, 74, 79, 69, 65, 69, 71, 71, 61, 56, 59, 80, 59, 68, 60, 63, 64, 68, 48, 87, 56, 79, 50, 75, 68, 67, 80, 47, 59, 61, 83, 55, 69, 57, 63, 66, 68, 67, 51, 55, 60, 67, 56, 54, 55, 56, 102, 50, 51, 71, 87, 63, 52, 65, 95, 70, 65, 51, 76, 82, 89, 79, 67, 69, 52, 53, 67, 74, 53, 54, 83, 66, 78, 59, 78, 47, 58, 48, 63, 64, 51, 80, 61, 59, 69, 66, 92, 59, 59, 40, 75, 59, 63, 67, 56, 55, 70, 69, 87, 77, 67, 52, 53, 51, 61, 62, 74, 66, 72, 63, 46, 57, 66, 81, 63, 66, 58, 42, 83, 69, 78, 83, 39, 71, 66, 63, 75, 127, 77, 61, 73, 55, 57, 62, 71, 63, 87, 62, 85, 89, 68, 76, 47, 63, 60, 71, 68, 75, 65, 40, 106, 54, 71, 72, 69, 54, 73, 109, 47, 60, 60, 45, 57, 71, 54, 59, 50, 79, 98, 68, 102, 90, 49, 54, 54, 47, 75, 60, 62, 63, 63, 84, 69, 63, 52, 68, 77, 59, 78, 86, 52, 57, 58, 113, 59, 55, 64, 73, 45, 50, 68, 69, 54, 88, 63, 72, 66, 51, 69, 63, 60, 64, 81, 60, 56, 62, 86, 64, 69, 57, 76, 84, 63, 70, 65, 83, 85, 63, 59, 62, 62, 45, 90, 70, 66, 55, 59, 66, 68, 55, 67, 60, 58, 65, 66, 68, 73, 51, 65, 57, 62, 96, 62, 49, 71, 48, 100, 64, 87, 64, 64, 61, 63, 75, 49, 73, 65, 55, 65, 57, 59, 54, 76, 67, 77, 62, 63, 66, 58, 75, 79, 64, 57, 94, 74, 48, 65, 44, 80, 67, 49, 79, 80, 61, 75, 59, 85, 71, 88, 50, 65, 120, 60, 60, 48, 93, 71, 61, 58, 74, 50, 82, 60, 86, 67, 63, 57, 52, 62, 61, 52, 56, 74, 71, 64, 58, 62, 71, 58, 93, 47, 65, 75, 55, 64, 72, 71, 54, 56, 80, 46, 42, 70, 59, 60, 52, 78, 67, 52, 88, 65, 96, 57, 74, 63, 48, 68, 53, 47, 65, 53, 72, 60, 94, 69, 61, 48, 66, 59, 63, 72, 101, 75, 64, 68, 61, 60, 58, 67, 59, 71, 55, 74, 73, 66, 72, 63, 63, 60, 57, 58, 58, 68, 73, 49, 82, 42, 87, 53, 65, 83, 51, 67, 68, 113, 66, 57, 49, 61, 53, 80, 68, 88, 62, 46, 118, 59, 77, 96, 62, 64, 74, 57, 66, 59, 67, 81, 58, 56, 55, 77, 61, 72, 52, 71, 70, 67, 70, 57, 80, 81, 59, 90, 63, 74, 69, 58, 72, 44, 64, 49, 66, 57, 83, 41, 63, 56, 57, 79, 106, 75, 91, 64, 43, 56, 60, 40, 64, 86, 63, 58, 86, 68, 66, 74, 83, 56, 83, 87, 108, 57, 66, 69, 59, 54, 72, 54, 63, 71, 67, 69, 60, 118, 50, 54, 37, 58, 47, 86, 56, 44, 40, 43, 58, 58, 66, 54, 69, 54, 109, 127, 55, 43, 69, 69, 58, 78, 86, 47, 91, 58, 71, 64, 68, 59, 60, 56, 49, 54, 51, 60, 93, 66, 64, 70, 66, 68, 56, 57, 68, 74, 63, 53, 67, 95, 80, 85, 45, 52, 76, 69, 65, 62, 70, 95, 51, 69, 58, 64, 95, 73, 73, 67, 62, 83, 41, 81, 65, 62, 75, 63, 61, 60, 66, 57, 44, 65, 55, 64, 58, 57, 51, 67, 64, 77, 68, 51, 51, 57, 52, 57, 63, 70, 43, 59, 52, 62, 84, 73, 78, 45, 56, 57, 73, 89, 64, 66, 60, 66, 63, 65, 70, 68, 55, 65, 62, 55, 66, 53, 56, 64, 54, 60, 52, 60, 74, 83, 61, 82, 58, 102, 57, 66, 43, 73, 73, 84, 56, 76, 57, 58, 71, 60, 49, 70, 56, 61, 72, 49, 71, 66, 64, 76, 50, 72, 62, 92, 59, 62, 63, 58, 51, 52, 86, 66, 46, 166, 66, 68, 72, 54, 47, 66, 75, 43, 86, 67, 57, 68, 64, 44, 57, 65, 116, 70, 57, 77, 95, 70, 56, 55, 71, 181, 73, 49, 45, 54, 61, 69, 53, 49, 56, 67, 60, 56, 64, 50, 69, 89, 57, 59, 61, 70, 55, 67, 63, 62, 62, 68, 57, 52, 95, 68, 67, 79, 70, 70, 45, 54, 60, 60, 59, 55, 75, 71, 55, 74, 106, 66, 67, 48, 58, 48, 91, 62, 41, 79, 66, 80, 53, 61, 52, 80, 66, 57, 57, 45, 84, 61, 91, 97, 66, 54, 68, 49, 82, 95, 69, 59, 53, 79, 78, 77, 53, 54, 59, 65, 70, 70, 48, 73, 57, 55, 68, 66, 68, 66, 70, 60, 72, 75, 42, 74, 63, 55, 99, 74, 127, 107, 64, 51, 63, 55, 57, 62, 100, 69, 56, 56, 62, 59, 74, 87, 80, 60, 69, 68, 93, 46, 61, 77, 80, 68, 47, 75, 57, 64, 63, 102, 70, 71, 59, 55, 59, 83, 79, 51, 60, 67, 48, 67, 47, 78, 54, 43, 61, 58, 74, 65, 72, 52, 74, 73, 62, 62, 53, 90, 59, 104, 74, 55, 84, 56, 67, 52, 100, 58, 82, 51, 57, 59, 52, 71, 55, 69, 85, 62, 64, 54, 45, 64, 55, 85, 74, 56, 77, 78, 60, 80, 53, 77, 60, 75, 74, 128, 66, 56, 86, 63, 68, 95, 59, 78, 60, 84, 50, 74, 85, 84, 74, 53, 37, 65, 62, 68, 73, 71, 69, 100, 64, 94, 60, 94, 54, 58, 64, 43, 59, 51, 62, 71, 79, 53, 55, 70, 67, 82, 66, 66, 104, 52, 56, 66, 71, 68, 71, 65, 64, 59, 50, 72, 59, 73, 71, 57, 70, 69, 71, 73, 63, 55, 61, 52, 62, 77, 69, 51, 46, 50, 59, 66, 48, 61, 66, 76, 64, 72, 98, 61, 58, 70, 44, 59, 58, 65, 67, 58, 65, 74, 67, 148, 70, 52, 68, 51, 57, 71, 70, 76, 50, 51, 60, 84, 78, 51, 65, 76, 55, 75, 112, 62, 56, 64, 79, 71, 70, 60, 45, 57, 75, 83, 64, 49, 71, 66, 60, 92, 63, 57, 68, 55, 69, 69, 66, 84, 55, 71, 64, 65, 55, 62, 64, 63, 75, 83, 68, 64, 60, 92, 76, 70, 65, 72, 57, 77, 62, 72, 67, 62, 65, 84, 63, 106, 64, 55, 58, 56, 70, 62, 58, 84, 45, 68, 53, 66, 65, 58, 57, 68, 60, 79, 71, 86, 53, 57, 54, 61, 92, 68, 37, 59, 50, 60, 66, 68, 63, 30, 72, 71, 65, 60, 76, 61, 55, 51, 40, 47, 72, 71, 47, 71, 64, 60, 61, 56, 68, 64, 51, 72, 65, 50, 69, 72, 65, 63, 58, 67, 90, 70, 50, 60, 58, 67, 61, 68, 67, 56, 86, 72, 62, 61, 72, 57, 75, 69, 99, 62, 75, 72, 50, 71, 57, 64, 75, 69, 78, 81, 82, 62, 63, 113, 64, 49, 100, 43, 61, 70, 54, 74, 58, 50, 63, 58, 60, 54, 79, 66, 63, 77, 65, 85, 64, 37, 59, 72, 61, 59, 73, 83, 81, 75, 77, 71, 70, 71, 44, 48, 54, 65, 40, 85, 71, 54, 59, 65, 49, 70, 76, 83, 64, 87, 57, 57, 80, 55, 98, 67, 53, 82, 52, 51, 81, 68, 134, 72, 39, 76, 72, 78, 56, 88, 73, 62, 46, 58, 41, 98, 53, 52, 70, 47, 65, 70, 91, 73, 79, 57, 56, 50, 77, 71, 53, 64, 81, 75, 78, 75, 71, 69, 75, 63, 62, 85, 56, 64, 52, 65, 71, 64, 60, 60, 66, 67, 52, 70, 74, 77, 62, 65, 62, 70, 73, 75, 64, 67, 80, 68, 58, 81, 61, 90, 85, 66, 61, 84, 53, 70, 69, 66, 68, 66, 54, 80, 64, 79, 59, 49, 62, 53, 66, 60, 79, 71, 57, 62, 58, 53, 71, 62, 78, 75, 48, 76, 74, 60, 55, 75, 90, 53, 91, 73, 75, 70, 60, 65, 71, 67, 79, 88, 68, 63, 60, 61, 62, 61, 62, 55, 57, 73, 52, 71, 63, 80, 68, 62, 74, 86, 73, 59, 77, 40, 47, 57, 68, 61, 42, 77, 74, 51, 107, 47, 71, 57, 76, 88, 80, 61, 62, 64, 55, 81, 62, 73, 70, 63, 84, 86, 57, 85, 60, 77, 77, 89, 64, 74, 79, 74, 72, 71, 56, 55, 53, 65, 74, 71, 69, 73, 75, 54, 73, 50, 73, 64, 48, 72, 52, 59, 75, 63, 69, 75, 76, 52, 57, 89, 61, 51, 62, 70, 63, 77, 75, 72, 57, 65, 85, 70, 53, 69, 68, 47, 77, 62, 52, 82, 72, 56, 94, 90, 65, 58, 62, 72, 63, 51, 58, 60, 59, 97, 73, 57, 80, 76, 100, 79, 84, 70, 55, 69, 52, 63, 65, 82, 105, 51, 60, 85, 67, 69, 72, 76, 93, 68, 54, 68, 58, 76, 71, 67, 57, 63, 76, 75, 83, 52, 59, 62, 58, 56, 69, 66, 79, 54, 69, 55, 57, 75, 111, 82, 81, 63, 67, 57, 50, 64, 70, 39, 59, 88, 63, 64, 51, 70, 74, 80, 48, 45, 57, 108, 74, 50, 54, 60, 72, 63, 54, 37, 63, 79, 35, 52, 63, 69, 56, 39, 73, 55, 66, 54, 89, 65, 63, 40, 79, 60, 67, 54, 40, 74, 65, 69, 60, 68, 62, 76, 80, 60, 47, 54, 77, 76, 52, 62, 58, 70, 59, 75, 49, 44, 68, 62, 62, 38, 59, 73, 108, 77, 82, 82, 64, 78, 92, 74, 44, 54, 74, 54, 54, 80, 57, 55, 55, 71, 48, 53, 130, 52, 42, 56, 66, 61, 58, 73, 46, 83, 61, 37, 65, 84, 66, 81, 41, 70, 66, 56, 69, 55, 47, 69, 72, 60, 58, 56, 54, 85, 93, 65, 81, 54, 56, 66, 53, 71, 60, 61, 70, 40, 54, 68, 119, 63, 88, 47, 60, 61, 72, 81, 64, 62, 39, 65, 67, 65, 47, 64, 82, 68, 77, 84, 45, 83, 53, 51, 62, 53, 60, 39, 63, 65, 53, 56, 48, 49, 36, 55, 46, 59, 73, 44, 60, 58, 96, 76, 63, 69, 52, 72, 62, 78, 79, 49, 84, 35, 101, 53, 39, 91, 88, 24, 50, 74, 75, 65, 53, 59, 63, 38, 51, 47, 49, 75, 68, 79, 81, 76, 66, 40, 50, 73, 74, 47, 50, 64, 64, 67, 70, 68, 72, 48, 58, 56, 71, 53, 60, 134, 78, 48, 61, 65, 79, 54, 56, 72, 51, 72, 85, 68, 69, 60, 78, 55, 70, 97, 81, 70, 52, 60, 49, 123, 52, 58, 89, 58, 52, 68, 78, 65, 57, 85, 67, 93, 59, 87, 78, 59, 97, 52, 58, 40, 68, 70, 83, 45, 53, 69, 58, 86, 61, 73, 55, 54, 46, 76, 62, 48, 69, 73, 60, 75, 71, 75, 61, 72, 72, 35, 68, 66, 68, 77, 83, 87, 75, 86, 53, 105, 63, 51, 58, 63, 61, 57, 66, 51, 63, 93, 75, 60, 50, 72, 145, 58, 46, 58, 66, 71, 103, 48, 52, 59, 49, 95, 62, 40, 59, 66, 55, 68, 62, 45, 55, 85, 51, 78, 74, 33, 55, 78, 74, 76, 80, 66, 63, 70, 56, 51, 92, 72, 59, 74, 65, 84, 74, 79, 39, 84, 111, 101, 59, 76, 64, 62, 63, 83, 66, 50, 60, 56, 67, 71, 46, 73, 90, 122, 62, 73, 61, 71, 55, 92, 59, 53, 50, 60, 68, 55, 67, 46, 65, 57, 68, 97, 74, 74, 64, 90, 69, 65, 71, 70, 72, 71, 55, 60, 68, 50, 60, 51, 66, 67, 57, 87, 56, 86, 56, 52, 58, 70, 55, 62, 50, 65, 57, 73, 43, 62, 56, 81, 77, 64, 91, 68, 52, 70, 56, 45, 43, 68, 61, 65, 80, 46, 59, 80, 61, 54, 57, 51, 60, 66, 75, 72, 64, 49, 71, 58, 73, 56, 94, 77, 77, 73, 60, 41, 57, 53, 86, 53, 50, 80, 38, 55, 71, 89, 123, 78, 70, 52, 53, 66, 50, 46, 69, 45, 80, 99, 63, 49, 68, 53, 74, 73, 65, 89, 71, 74, 79, 74, 63, 54, 71, 67, 68, 71, 68, 70, 66, 44, 47, 67, 67, 59, 60, 76, 62, 52, 42, 74, 53, 80, 45, 58, 102, 61, 64, 52, 55, 96, 50, 72, 55, 52, 70, 55, 56, 48, 99, 57, 56, 50, 59, 68, 55, 67, 38, 77, 45, 73, 53, 83, 69, 54, 69, 54, 69, 66, 53, 45, 63, 80, 65, 50, 63, 50, 43, 68, 56, 61, 90, 43, 53, 58, 50, 85, 88, 61, 85, 75, 60, 78, 64, 43, 61, 59, 76, 50, 56, 65, 56, 81, 68, 66, 102, 52, 59, 51, 66, 70, 79, 51, 85, 83, 68, 57, 70, 52, 82, 55, 75, 90, 73, 55, 61, 59, 79, 64, 60, 68, 84, 76, 75, 63, 56, 60, 54, 56, 69, 64, 80, 62, 45, 80, 60, 73, 49, 65, 54, 60, 67, 71, 60, 95, 60, 60, 55, 63, 60, 90, 68, 66, 67, 65, 68, 40, 72, 69, 68, 74, 70, 56, 84, 69, 47, 82, 54, 84, 65, 59, 70, 50, 76, 78, 56, 71, 65, 66, 70, 56, 69, 68, 48, 61, 60, 66, 41, 54, 67, 79, 67, 67, 81, 60, 64, 78, 48, 62, 59, 51, 53, 36, 56, 93, 55, 93, 70, 58, 59, 70, 48, 52, 78, 78, 66, 66, 76, 50, 85, 57, 56, 94, 63, 65, 58, 72, 78, 78, 45, 63, 51, 86, 79, 94, 72, 61, 69, 63, 51, 60, 59, 69, 69, 73, 68, 55, 41, 71, 59, 56, 62, 66, 65, 55, 57, 96, 65, 76, 75, 55, 85, 101, 75, 70, 72, 47, 55, 59, 71, 92, 88, 74, 50, 95, 84, 62, 92, 64, 76, 68, 53, 76, 78, 48, 63, 45, 82, 51, 54, 63, 78, 77, 45, 40, 63, 62, 84, 58, 78, 72, 57, 72, 58, 46, 62, 88, 71, 60, 53, 89, 79, 55, 57, 46, 107, 52, 52, 94, 82, 60, 73, 55, 64, 91, 50, 64, 77, 50, 58, 77, 65, 73, 106, 52, 53, 69, 60, 60, 95, 58, 65, 58, 60, 58, 55, 72, 60, 43, 79, 81, 68, 69, 108, 83, 50, 66, 63, 53, 77, 50, 58, 74, 86, 77, 71, 75, 77, 91, 63, 70, 82, 53, 79, 64, 51, 71, 79, 82, 50, 87, 57, 77, 63, 62, 85, 88, 59, 95, 82, 45, 65, 48, 67, 106, 55, 49, 82, 57, 47, 103, 54, 119, 45, 69, 63, 68, 50, 71, 44, 63, 73, 96, 64, 79, 41, 86, 77, 86, 61, 60, 71, 92, 55, 43, 57, 56, 45, 64, 69, 56, 57, 56, 70, 63, 62, 93, 56, 72, 57, 57, 50, 71, 71, 86, 115, 50, 85, 59, 87, 56, 61, 62, 50, 64, 54, 58, 57, 66, 76, 77, 52, 60, 54, 61, 76, 68, 54, 48, 79, 103, 42, 72, 58, 73, 63, 68, 64, 64, 46, 51, 61, 82, 73, 59, 60, 83, 89, 61, 84, 57, 74, 78, 85, 60, 62, 66, 62, 66, 45, 75, 57, 60, 39, 73, 95, 99, 68, 56, 54, 54, 66, 90, 68, 66, 75, 48, 86, 52, 83, 123, 61, 82, 55, 67, 50, 52, 64, 70, 73, 52, 68, 84, 55, 59, 45, 66, 82, 60, 89, 62, 74, 36, 52, 76, 72, 74, 82, 60, 48, 55, 85, 73, 73, 74, 62, 73, 62, 70, 65, 46, 73, 65, 56, 52, 88, 70, 72, 79, 66, 86, 68, 75, 65, 50, 77, 58, 72, 105, 62, 56, 71, 73, 54, 94, 63, 59, 55, 66, 62, 72, 94, 65, 53, 80, 65, 79, 49, 57, 30, 60, 62, 99, 37, 57, 75, 45, 74, 44, 69, 90, 50, 63, 68, 68, 55, 105, 57, 131, 65, 60, 53, 60, 82, 53, 73, 78, 52, 89, 94, 60, 49, 51, 40, 84, 59, 83, 40, 55, 74, 59, 70, 66, 87, 42, 60, 64, 83, 53, 70, 58, 53, 79, 49, 61, 44, 51, 43, 61, 97, 53, 60, 61, 74, 65, 56, 95, 94, 54, 69, 43, 63, 71, 56, 94, 68, 80, 75, 41, 62, 67, 60, 64, 74, 63, 55, 90, 55, 76, 66, 71, 117, 60, 39, 71, 50, 57, 53, 91, 41, 113, 57, 67, 47, 61, 51, 96, 49, 57, 59, 71, 46, 61, 56, 58, 89, 82, 111, 58, 63, 78, 48, 93, 61, 64, 69, 49, 53, 67, 46, 75, 56, 63, 55, 70, 75, 48, 82, 75, 67, 66, 37, 75, 81, 69, 64, 83, 83, 69, 57, 94, 52, 72, 80, 37, 51, 69, 70, 50, 118, 57, 88, 63, 72, 64, 60, 73, 75, 72, 55, 54, 62, 61, 60, 61, 77, 39, 57, 75, 59, 89, 63, 72, 59, 45, 57, 60, 69, 64, 100, 83, 74, 58, 51, 72, 74, 53, 51, 68, 53, 61, 61, 64, 67, 89, 60, 84, 76, 65, 75, 60, 68, 128, 58, 56, 106, 47, 55, 54, 97, 56, 56, 82, 53, 58, 73, 96, 57, 50, 89, 61, 74, 60, 103, 68, 118, 71, 57, 55, 59, 84, 80, 70, 58, 58, 64, 61, 66, 71, 70, 92, 73, 70, 58, 75, 76, 55, 74, 82, 59, 55, 71, 70, 56, 65, 90, 83, 62, 54, 77, 51, 58, 67, 62, 57, 54, 79, 66, 60, 128, 67, 53, 90, 84, 114, 48, 65, 46, 54, 78, 95, 66, 63, 81, 52, 70, 68, 67, 83, 43, 154, 68, 71, 65, 82, 105, 65, 68, 85, 51, 51, 33, 64, 68, 55, 53, 56, 71, 36, 65, 61, 52, 53, 54, 65, 87, 72, 68, 61, 77, 63, 60, 70, 60, 46, 147, 55, 46, 84, 92, 67, 70, 61, 55, 59, 69, 74, 117, 48, 62, 46, 76, 99, 58, 48, 58, 57, 67, 70, 69, 82, 90, 51, 47, 92, 75, 71, 89, 53, 61, 51, 66, 54, 62, 57, 53, 89, 54, 84, 62, 55, 51, 93, 53, 103, 114, 60, 91, 57, 91, 58, 49, 55, 52, 53, 65, 45, 68, 63, 45, 68, 56, 61, 63, 59, 49, 54, 57, 75, 79, 61, 40, 74, 82, 64, 102, 51, 59, 84, 74, 56, 56, 69, 77, 65, 55, 65, 57, 66, 73, 74, 33, 63, 82, 52, 88, 73, 80, 63, 81, 53, 72, 94, 48, 92, 56, 41, 80, 45, 81, 39, 61, 45, 80, 68, 63, 68, 47, 58, 67, 56, 50, 53, 34, 101, 81, 72, 50, 80, 74, 69, 55, 80, 72, 63, 92, 40, 49, 50, 64, 70, 79, 63, 72, 103, 116, 68, 55, 63, 78, 76, 70, 60, 59, 73, 81, 67, 79, 69, 76, 98, 56, 37, 93, 66, 70, 66, 74, 89, 81, 57, 79, 42, 89, 68, 104, 48, 56, 52, 58, 36, 46, 57, 63, 48, 91, 66, 25, 64, 62, 97, 84, 84, 53, 88, 69, 70, 56, 83, 85, 62, 54, 86, 77, 50, 58, 104, 48, 83, 50, 67, 29, 59, 59, 62, 86, 68, 38, 67, 61, 48, 42, 53, 59, 60, 58, 79, 48, 58, 87, 60, 118, 60, 58, 71, 59, 58, 78, 67, 63, 110, 50, 35, 52, 26, 61, 64, 47, 70, 71, 109, 68, 45, 67, 119, 85, 53, 51, 47, 48, 51, 41, 53, 84, 41, 84, 83, 108, 78, 75, 67, 56, 66, 72, 82, 44, 132, 85, 73, 74, 53, 55, 77, 123, 63, 68, 72, 68, 56, 82, 59, 70, 92, 53, 47, 89, 63, 84, 54, 43, 47, 73, 62, 50, 64, 85, 46, 78, 41, 48, 70, 60, 64, 48, 71, 59, 49, 102, 88, 51, 95, 60, 65, 66, 82, 81, 58, 61, 62, 78, 57, 62, 47, 52, 78, 86, 46, 81, 59, 65, 67, 66, 65, 93, 44, 69, 63, 78, 35, 43, 54, 46, 50, 60, 64, 28, 63, 82, 65, 71, 78, 62, 78, 61, 57, 69, 66, 60, 58, 54, 75, 56, 67, 66, 54, 77, 99, 53, 72, 83, 42, 41, 40, 74, 82, 61, 84, 79, 61, 59, 58, 67, 53, 68, 61, 82, 85, 63, 72, 63, 99, 59, 47, 61, 50, 87, 78, 95, 102, 51, 52, 52, 88, 93, 110, 70, 50, 53, 91, 66, 52, 50, 143, 78, 67, 40, 86, 71, 62, 95, 81, 58, 47, 56, 56, 55, 79, 70, 60, 51, 62, 43, 54, 59, 77, 73, 78, 43, 83, 69, 66, 60, 47, 62, 93, 61, 77, 61, 43, 86, 50, 91, 65, 64, 74, 81, 42, 72, 77, 75, 58, 99, 47, 56, 46, 65, 63, 60, 61, 75, 77, 75, 54, 68, 52, 85, 57, 87, 43, 52, 44, 62, 53, 37, 93, 73, 100, 58, 40, 51, 69, 70, 47, 54, 86, 62, 57, 69, 74, 66, 53, 78, 83, 51, 62, 60, 51, 62, 78, 96, 41, 54, 36, 88, 63, 73, 72, 59, 62, 99, 84, 52, 49, 70, 58, 41, 62, 139, 37, 64, 83, 87, 60, 60, 67, 57, 50, 52, 63, 64, 63, 62, 93, 91, 115, 84, 51, 83, 52, 59, 68, 58, 58, 63, 75, 40, 74, 73, 68, 62, 64, 90, 55, 49, 32, 72, 58, 70, 56, 62, 63, 63, 49, 50, 47, 51, 56, 63, 75, 49, 76, 52, 56, 90, 52, 46, 76, 76, 46, 53, 43, 58, 90, 82, 49, 73, 79, 71, 84, 66, 67, 132, 46, 69, 77, 65, 45, 65, 78, 49, 76, 41, 59, 33, 69, 75, 52, 43, 43, 56, 99, 47, 58, 118, 64, 61, 29, 73, 73, 94, 79, 58, 104, 40, 62, 45, 44, 88, 68, 67, 152, 47, 133, 63, 74, 67, 109, 78, 47, 48, 49, 60, 88, 95, 65, 51, 74, 39, 64, 69, 55, 57, 68, 57, 47, 70, 64, 54, 63, 54, 57, 105, 62, 61, 76, 95, 60, 53, 78, 74, 52, 61, 61, 74, 72, 48, 60, 68, 81, 55, 64, 58, 62, 81, 67, 59, 57, 70, 56, 66, 67, 63, 66, 97, 61, 59, 67, 47, 55, 84, 71, 101, 95, 56, 84, 65, 77, 70, 61, 65, 48, 60, 75, 70, 51, 70, 64, 74, 78, 76, 66, 77, 63, 74, 61, 66, 49, 69, 62, 57, 57, 94, 74, 68, 50, 57, 70, 45, 53, 77, 62, 70, 55, 67, 73, 46, 58, 56, 89, 65, 59, 71, 57, 65, 71, 57, 52, 55, 50, 49, 53, 63, 61, 75, 58, 66, 47, 64, 77, 82, 73, 82, 59, 72, 65, 52, 75, 56, 68, 67, 41, 58, 61, 55, 58, 56, 62, 61, 55, 74, 54, 66, 57, 63, 66, 62, 63, 87, 45, 47, 65, 68, 63, 58, 61, 69, 72, 55, 75, 84, 69, 53, 59, 59, 68, 66, 51, 64, 67, 85, 66, 62, 80, 76, 50, 89, 64, 61, 72, 52, 71, 44, 75, 53, 68, 117, 53, 74, 60, 77, 64, 53, 56, 49, 61, 65, 63, 72, 62, 74, 71, 65, 53, 46, 70, 88, 93, 56, 63, 55, 68, 52, 88, 89, 59, 60, 63, 59, 66, 56, 65, 56, 58, 61, 46, 77, 63, 76, 57, 68, 50, 70, 76, 62, 54, 57, 61, 48, 73, 81, 58, 49, 61, 57, 66, 97, 52, 62, 54, 48, 52, 65, 68, 81, 66, 53, 111, 75, 66, 58, 58, 93, 65, 53, 59, 66, 46, 51, 45, 63, 138, 61, 61, 68, 66, 69, 85, 57, 68, 59, 65, 117, 82, 51, 68, 68, 68, 61, 73, 67, 71, 69, 79, 56, 63, 81, 66, 64, 70, 69, 55, 130, 61, 62, 74, 65, 56, 78, 72, 50, 79, 56, 49, 62, 59, 72, 58, 71, 50, 58, 48, 69, 70, 65, 59, 66, 65, 70, 74, 73, 49, 67, 68, 62, 75, 56, 65, 41, 71, 54, 73, 52, 57, 80, 76, 60, 80, 75, 64, 68, 71, 57, 60, 56, 59, 70, 54, 74, 58, 64, 62, 57, 66, 94, 58, 118, 56, 69, 44, 71, 52, 65, 66, 64, 69, 78, 62, 58, 63, 62, 77, 82, 55, 113, 57, 64, 63, 55, 54, 72, 76, 67, 55, 50, 54, 65, 57, 61, 61, 71, 57, 55, 66, 73, 50, 80, 96, 56, 81, 79, 69, 71, 59, 46, 66, 63, 72, 61, 63, 62, 51, 67, 67, 105, 80, 63, 55, 63, 70, 65, 61, 68, 80, 61, 57, 61, 90, 69, 66, 57, 61, 55, 73, 68, 63, 52, 69, 53, 70, 62, 92, 87, 62, 67, 69, 37, 62, 65, 72, 83, 49, 110, 78, 44, 62, 39, 68, 90, 74, 46, 70, 72, 53, 72, 69, 63, 47, 68, 60, 61, 64, 74, 61, 61, 66, 67, 55, 58, 48, 94, 66, 69, 85, 81, 63, 54, 72, 98, 67, 55, 53, 50, 62, 74, 70, 63, 68, 66, 63, 62, 72, 67, 76, 53, 60, 83, 66, 44, 72, 50, 72, 46, 81, 65, 83, 68, 57, 61, 53, 71, 76, 72, 72, 69, 54, 75, 54, 55, 61, 73, 71, 60, 56, 60, 76, 70, 48, 50, 58, 63, 59, 59, 67, 68, 70, 57, 80, 58, 62, 69, 46, 54, 79, 53, 107, 47, 88, 54, 57, 81, 52, 55, 65, 56, 68, 56, 55, 69, 56, 53, 60, 60, 92, 62, 62, 52, 71, 67, 56, 87, 67, 147, 88, 56, 84, 54, 57, 138, 65, 97, 77, 95, 64, 48, 66, 71, 86, 112, 55, 84, 76, 87, 77, 58, 69, 72, 62, 66, 61, 81, 69, 54, 59, 151, 51, 61, 105, 47, 75, 41, 56, 75, 81, 65, 57, 49, 64, 92, 62, 56, 76, 74, 58, 98, 68, 54, 66, 74, 59, 65, 82, 68, 64, 71, 63, 94, 61, 68, 74, 80, 70, 67, 74, 84, 61, 64, 68, 62, 62, 67, 56, 66, 85, 98, 71, 55, 82, 73, 54, 79, 70, 51, 60, 63, 74, 51, 61, 67, 83, 69, 58, 80, 55, 60, 60, 50, 41, 50, 79, 53, 59, 63, 63, 79, 62, 69, 68, 79, 59, 69, 89, 55, 72, 87, 68, 61, 60, 73, 61, 75, 64, 53, 69, 60, 65, 61, 70, 69, 73, 71, 53, 58, 63, 58, 77, 62, 55, 56, 60, 63, 57, 49, 67, 61, 41, 66, 57, 73, 84, 82, 56, 66, 63, 91, 56, 38, 51, 74, 56, 84, 58, 70, 58, 93, 56, 61, 59, 56, 70, 64, 63, 62, 51, 55, 67, 65, 66, 69, 58, 60, 80, 51, 66, 70, 70, 124, 117, 58, 61, 64, 63, 55, 54, 72, 59, 57, 67, 75, 94, 66, 57, 64, 61, 59, 95, 63, 81, 55, 57, 68, 75, 64, 51, 68, 79, 59, 61, 62, 58, 66, 62, 54, 75, 94, 61, 67, 68, 66, 60, 76, 77, 67, 63, 98, 60, 64, 62, 53, 57, 99, 59, 85, 83, 83, 56, 72, 69, 57, 51, 112, 55, 68, 58, 73, 71, 67, 49, 36, 71, 57, 58, 65, 64, 42, 53, 69, 64, 54, 55, 62, 59, 89, 84, 70, 65, 82, 63, 61, 55, 92, 95, 58, 55, 51, 45, 51, 57, 76, 56, 55, 68, 60, 50, 57, 40, 57, 62, 77, 56, 65, 62, 62, 59, 73, 54, 55, 76, 57, 62, 65, 71, 64, 55, 77, 74, 70, 58, 65, 68, 52, 63, 64, 53, 56, 66, 69, 75, 52, 94, 93, 67, 66, 64, 63, 69, 52, 69, 61, 52, 47, 53, 61, 75, 62, 50, 64, 68, 75, 64, 81, 51, 83, 55, 69, 68, 70, 67, 67, 82, 48, 64, 73, 80, 58, 59, 67, 77, 72, 77, 82, 61, 51, 59, 69, 70, 68, 60, 68, 45, 67, 60, 64, 50, 59, 78, 83, 62, 66, 53, 48, 59, 57, 67, 85, 66, 50, 67, 71, 53, 72, 64, 78, 70, 55, 78, 63, 69, 52, 137, 43, 70, 81, 49, 49, 83, 44, 67, 67, 113, 52, 58, 57, 78, 64, 80, 55, 71, 49, 55, 71, 49, 58, 74, 69, 54, 87, 84, 64, 73, 58, 48, 60, 69, 63, 62, 65, 62, 64, 74, 69, 62, 43, 59, 81, 60, 83, 52, 65, 57, 79, 60, 67, 94, 61, 77, 53, 72, 61, 64, 48, 51, 69, 65, 55, 42, 61, 64, 77, 61, 59, 86, 52, 47, 52, 70, 72, 60, 79, 72, 49, 58, 96, 63, 56, 64, 77, 43, 57, 69, 56, 64, 70, 71, 81, 44, 79, 54, 68, 62, 64, 52, 74, 62, 65, 65, 58, 69, 62, 61, 72, 103, 84, 52, 65, 61, 69, 48, 57, 58, 93, 79, 49, 68, 75, 53, 70, 68, 55, 65, 54, 67, 72, 64, 65, 58, 124, 59, 77, 50, 110, 67, 59, 69, 90, 68, 49, 86, 56, 83, 67, 78, 70, 67, 52, 51, 80, 59, 63, 69, 58, 65, 67, 89, 58, 60, 57, 72, 76, 67, 64, 91, 69, 77, 66, 46, 54, 64, 53, 53, 63, 63, 57, 67, 75, 54, 55, 76, 68, 57, 70, 58, 65, 68, 76, 83, 49, 68, 64, 61, 73, 66, 70, 75, 75, 58, 74, 78, 65, 64, 56, 79, 57, 51, 73, 50, 60, 57, 58, 80, 55, 63, 57, 54, 50, 63, 65, 73, 65, 60, 71, 65, 74, 77, 122, 77, 75, 63, 53, 45, 69, 63, 76, 63, 62, 50, 78, 86, 54, 84, 100, 73, 89, 75, 63, 59, 79, 68, 48, 71, 48, 64, 116, 69, 91, 63, 67, 69, 62, 54, 53, 54, 68, 52, 63, 50, 57, 57, 74, 64, 64, 51, 65, 99, 58, 71, 65, 69, 63, 59, 51, 81, 80, 69, 60, 55, 53, 88, 67, 70, 55, 60, 58, 46, 61, 54, 60, 54, 61, 60, 64, 59, 71, 58, 138, 59, 76, 54, 53, 70, 53, 66, 63, 62, 75, 60, 53, 70, 61, 58, 54, 57, 61, 55, 55, 70, 78, 69, 54, 74, 60, 57, 93, 66, 59, 52, 61, 63, 60, 56, 93, 65, 62, 58, 50, 72, 72, 95, 70, 120, 54, 65, 60, 56, 79, 65, 78, 69, 66, 62, 59, 80, 67, 63, 72, 65, 46, 76, 67, 54, 58, 77, 69, 42, 62, 57, 52, 83, 70, 71, 87, 61, 67, 57, 66, 64, 45, 46, 69, 74, 69, 68, 75, 67, 87, 46, 81, 58, 80, 59, 72, 63, 66, 69, 67, 62, 58, 69, 77, 66, 73, 45, 65, 68, 57, 70, 66, 58, 61, 58, 76, 59, 62, 65, 55, 54, 63, 55, 58, 68, 70, 77, 63, 79, 54, 58, 71, 74, 69, 57, 70, 52, 57, 86, 66, 70, 68, 58, 53, 65, 69, 83, 50, 56, 84, 62, 61, 64, 55, 51, 67, 86, 65, 68, 54, 73, 59, 65, 83, 55, 95, 68, 62, 64, 55, 48, 63, 71, 60, 69, 68, 55, 64, 77, 67, 59, 63, 85, 61, 66, 67, 44, 41, 63, 54, 57, 59, 89, 56, 65, 76, 65, 67, 64, 61, 66, 67, 63, 57, 62, 49, 67, 69, 56, 61, 92, 76, 56, 60, 71, 80, 70, 65, 69, 75, 79, 57, 74, 61, 66, 66, 69, 69, 56, 52, 67, 73, 60, 71, 79, 35, 66, 65, 60, 58, 47, 57, 69, 63, 66, 82, 61, 79, 62, 55, 65, 60, 72, 79, 76, 65, 57, 64, 82, 60, 73, 46, 58, 65, 58, 70, 66, 63, 64, 65, 45, 58, 70, 59, 46, 69, 55, 65, 125, 66, 81, 49, 70, 58, 66, 56, 49, 60, 57, 60, 72, 62, 55, 52, 54, 80, 66, 78, 56, 57, 54, 60, 59, 67, 85, 63, 64, 76, 69, 63, 54, 68, 70, 61, 77, 62, 73, 106, 71, 61, 53, 86, 78, 60, 71, 54, 64, 62, 46, 71, 88, 78, 63, 103, 49, 72, 50, 51, 60, 60, 47, 76, 65, 73, 60, 78, 58, 63, 88, 75, 67, 69, 50, 93, 68, 57, 72, 58, 68, 62, 58, 63, 60, 54, 68, 42, 45, 76, 111, 68, 75, 66, 68, 69, 54, 84, 89, 65, 66, 64, 89, 67, 68, 65, 74, 81, 72, 60, 108, 76, 67, 63, 47, 61, 76, 62, 58, 64, 45, 71, 75, 63, 74, 78, 59, 59, 58, 67, 68, 61, 73, 65, 65, 76, 62, 83, 62, 83, 61, 62, 68, 54, 66, 71, 71, 66, 64, 59, 69, 65, 71, 67, 77, 64, 61, 66, 67, 63, 76, 62, 66, 56, 63, 65, 62, 58, 60, 58, 94, 66, 53, 88, 61, 57, 68, 63, 64, 71, 79, 67, 67, 62, 67, 58, 73, 52, 50, 62, 74, 57, 84, 66, 63, 59, 71, 66, 57, 69, 60, 65, 59, 58, 49, 74, 69, 70, 72, 67, 68, 53, 70, 104, 63, 74, 60, 65, 56, 62, 73, 68, 54, 70, 116, 80, 53, 75, 72, 62, 58, 72, 88, 47, 79, 93, 57, 58, 71, 67, 59, 72, 67, 48, 66, 70, 60, 78, 68, 71, 54, 60, 49, 71, 64, 58, 69, 77, 51, 71, 65, 59, 67, 65, 62, 71, 66, 67, 61, 54, 64, 78, 61, 88, 65, 60, 55, 56, 69, 76, 105, 87, 74, 63, 91, 60, 59, 66, 62, 60, 50, 70, 103, 78, 77, 56, 73, 76, 80, 58, 68, 67, 72, 49, 87, 66, 71, 66, 60, 81, 77, 46, 66, 59, 66, 71, 58, 85, 97, 62, 65, 95, 56, 84, 50, 44, 58, 73, 69, 54, 60, 61, 72, 64, 42, 54, 56, 68, 72, 61, 66, 52, 67, 74, 70, 64, 75, 50, 77, 71, 66, 56, 56, 64, 60, 61, 74, 56, 54, 77, 71, 56, 84, 76, 75, 70, 77, 75, 58, 76, 74, 94, 92, 68, 61, 63, 51, 63, 60, 55, 78, 66, 63, 74, 55, 65, 56, 83, 69, 55, 64, 87, 70, 60, 65, 66, 71, 60, 53, 60, 67, 82, 78, 62, 71, 42, 61, 74, 80, 62, 71, 51, 50, 44, 48, 69, 62, 131, 68, 44, 69, 60, 70, 55, 73, 77, 78, 82, 68, 45, 77, 53, 70, 80, 68, 59, 56, 79, 70, 57, 65, 59, 50, 60, 70, 62, 33, 62, 74, 61, 96, 49, 78, 42, 58, 71, 60, 114, 48, 43, 61, 60, 56, 62, 72, 85, 67, 77, 60, 62, 67, 69, 69, 54, 59, 72, 74, 87, 55, 69, 58, 88, 92, 65, 79, 69, 54, 62, 65, 61, 62, 62, 92, 56, 107, 49, 63, 70, 67, 67, 52, 51, 63, 91, 72, 59, 61, 59, 53, 67, 92, 63, 54, 51, 60, 63, 99, 59, 60, 58, 63, 56, 52, 52, 82, 52, 105, 64, 69, 52, 94, 71, 55, 67, 63, 73, 60, 63, 56, 62, 65, 55, 35, 64, 57, 55, 74, 63, 46, 57, 76, 59, 52, 67, 53, 64, 71, 55, 57, 76, 47, 44, 60, 57, 85, 53, 87, 76, 56, 70, 65, 84, 66, 76, 61, 62, 63, 82, 47, 49, 66, 84, 53, 69, 67, 65, 63, 68, 59, 65, 62, 69, 59, 90, 56, 60, 67, 41, 61, 87, 62, 58, 60, 94, 87, 65, 90, 75, 61, 67, 103, 58, 70, 59, 58, 86, 63, 62, 54, 63, 66, 66, 63, 88, 48, 87, 63, 84, 57, 54, 53, 71, 62, 53, 57, 58, 55, 64, 65, 68, 58, 59, 49, 56, 73, 66, 61, 54, 44, 63, 62, 59, 57, 58, 56, 58, 78, 66, 59, 72, 58, 51, 72, 69, 71, 62, 57, 82, 59, 65, 71, 57, 61, 66, 60, 56, 71, 71, 55, 55, 67, 96, 78, 69, 76, 73, 61, 47, 40, 60, 60, 78, 55, 61, 49, 52, 61, 59, 52, 57, 64, 57, 40, 75, 58, 56, 91, 74, 66, 56, 74, 47, 74, 57, 66, 63, 42, 81, 79, 59, 63, 71, 66, 81, 55, 74, 103, 95, 74, 70, 48, 49, 64, 55, 67, 60, 70, 64, 65, 89, 49, 76, 60, 83, 89, 61, 51, 56, 66, 83, 70, 59, 68, 47, 62, 59, 74, 69, 59, 84, 54, 64, 70, 65, 107, 50, 47, 60, 56, 63, 75, 56, 90, 57, 61, 97, 50, 68, 59, 69, 58, 41, 71, 84, 82, 75, 64, 51, 61, 85, 66, 56, 65, 75, 65, 64, 79, 98, 54, 58, 54, 81, 58, 74, 70, 64, 56, 80, 50, 70, 71, 61, 61, 70, 61, 64, 81, 54, 62, 70, 71, 62, 57, 59, 74, 72, 62, 80, 109, 71, 54, 61, 70, 61, 79, 64, 67, 56, 66, 60, 62, 60, 69, 59, 59, 58, 101, 64, 46, 67, 68, 52, 74, 60, 53, 60, 61, 67, 79, 77, 61, 58, 63, 62, 57, 60, 58, 70, 58, 73, 50, 67, 63, 80, 56, 57, 57, 60, 57, 82, 60, 57, 58, 67, 58, 66, 52, 51, 61, 59, 77, 57, 70, 87, 73, 53, 80, 45, 58, 69, 97, 74, 67, 60, 75, 61, 52, 56, 68, 67, 65, 53, 61, 60, 64, 63, 57, 59, 70, 50, 65, 57, 56, 51, 57, 70, 60, 84, 56, 54, 59, 55, 61, 49, 57, 63, 64, 65, 62, 55, 59, 62, 52, 62, 66, 70, 49, 57, 75, 69, 68, 57, 115, 59, 48, 74, 61, 66, 55, 56, 64, 65, 64, 66, 62, 76, 63, 57, 64, 43, 60, 77, 60, 45, 107, 64, 61, 65, 78, 58, 66, 65, 57, 66, 44, 75, 51, 73, 61, 53, 119, 78, 59, 72, 64, 57, 62, 60, 72, 57, 65, 75, 90, 92, 64, 72, 89, 52, 54, 56, 56, 52, 72, 66, 70, 73, 66, 65, 72, 65, 64, 42, 69, 67, 55, 91, 68, 57, 60, 53, 46, 77, 61, 67, 51, 69, 52, 75, 74, 68, 77, 60, 59, 72, 57, 61, 68, 62, 49, 44, 83, 53, 68, 64, 62, 60, 68, 98, 59, 65, 72, 63, 84, 49, 55, 71, 59, 79, 64, 80, 63, 75, 44, 64, 58, 65, 63, 51, 93, 69, 54, 59, 97, 66, 58, 62, 83, 69, 68, 75, 68, 80, 75, 64, 56, 59, 46, 78, 59, 63, 58, 123, 73, 69, 69, 70, 70, 57, 88, 73, 55, 55, 62, 65, 56, 107, 58, 51, 47, 75, 64, 52, 83, 51, 56, 60, 65, 59, 109, 64, 66, 57, 52, 58, 97, 66, 63, 50, 92, 49, 90, 66, 66, 83, 63, 72, 68, 60, 59, 62, 65, 73, 59, 49, 78, 39, 73, 82, 57, 70, 72, 52, 65, 85, 77, 49, 53, 71, 53, 72, 78, 63, 72, 55, 59, 51, 87, 69, 57, 66, 67, 56, 68, 62, 64, 52, 62, 69, 42, 60, 58, 81, 67, 52, 114, 95, 101, 60, 62, 68, 68, 96, 56, 55, 84, 56, 67, 54, 64, 50, 67, 41, 57, 79, 57, 62, 68, 82, 72, 54, 54, 78, 61, 42, 65, 53, 94, 56, 68, 73, 85, 59, 80, 61, 66, 79, 61, 61, 74, 58, 61, 48, 57, 65, 83, 76, 62, 59, 118, 53, 69, 63, 59, 53, 69, 81, 57, 74, 65, 58, 56, 62, 58, 80, 85, 54, 70, 88, 56, 64, 59, 63, 63, 52, 49, 90, 67, 90, 70, 63, 64, 63, 69, 53, 64, 61, 64, 55, 56, 100, 67, 67, 51, 61, 59, 64, 76, 118, 67, 74, 50, 65, 86, 51, 72, 65, 60, 60, 72, 61, 47, 52, 57, 59, 69, 67, 79, 79, 54, 65, 73, 74, 69, 52, 60, 65, 67, 142, 72, 61, 62, 61, 56, 57, 67, 61, 59, 66, 57, 58, 70, 57, 51, 91, 62, 51, 74, 65, 68, 83, 70, 85, 52, 63, 63, 69, 68, 60, 60, 103, 60, 93, 64, 64, 59, 69, 69, 46, 62, 68, 60, 87, 72, 59, 64, 83, 105, 64, 66, 74, 61, 53, 55, 49, 59, 70, 58, 64, 64, 69, 57, 59, 106, 57, 83, 101, 58, 48, 52, 52, 86, 77, 63, 64, 59, 74, 70, 67, 103, 67, 49, 54, 84, 84, 90, 93, 54, 66, 73, 79, 59, 76, 63, 69, 66, 52, 62, 64, 66, 74, 80, 51, 73, 62, 69, 82, 71, 68, 64, 56, 62, 80, 73, 98, 117, 63, 83, 60, 67, 63, 56, 61, 65, 64, 79, 59, 65, 60, 92, 79, 69, 75, 67, 76, 87, 73, 48, 93, 69, 68, 73, 62, 66, 58, 75, 77, 71, 73, 81, 72, 63, 50, 44, 54, 49, 55, 48, 68, 66, 58, 65, 82, 64, 73, 72, 62, 72, 60, 56, 82, 88, 53, 64, 83, 60, 65, 70, 76, 56, 60, 66, 59, 80, 50, 66, 73, 60, 72, 106, 84, 80, 110, 68, 60, 64, 78, 100, 65, 58, 73, 77, 62, 63, 67, 71, 50, 79, 76, 46, 55, 70, 57, 69, 64, 47, 62, 77, 61, 75, 87, 67, 68, 70, 48, 64, 49, 58, 80, 102, 66, 56, 59, 52, 59, 62, 88, 60, 71, 58, 54, 66, 43, 35, 71, 39, 66, 56, 47, 70, 75, 70, 68, 64, 68, 83, 63, 58, 104, 88, 57, 76, 70, 64, 55, 69, 82, 67, 89, 48, 78, 54, 63, 85, 81, 62, 47, 61, 51, 72, 53, 76, 67, 64, 53, 67, 59, 65, 58, 61, 73, 73, 62, 66, 43, 75, 59, 61, 66, 59, 72, 74, 82, 59, 69, 55, 61, 58, 83, 51, 80, 62, 73, 70, 71, 40, 69, 73, 67, 55, 61, 60, 63, 43, 62, 50, 55, 51, 45, 87, 66, 66, 65, 100, 66, 61, 54, 75, 58, 77, 48, 59, 63, 102, 83, 58, 74, 58, 95, 69, 62, 55, 64, 66, 56, 57, 70, 78, 46, 59, 59, 65, 40, 59, 63, 61, 50, 85, 83, 83, 70, 50, 88, 74, 66, 64, 61, 53, 58, 54, 59, 70, 75, 71, 72, 65, 64, 56, 67, 72, 70, 63, 49, 65, 46, 59, 72, 66, 63, 69, 64, 55, 52, 81, 92, 65, 57, 54, 52, 70, 55, 84, 55, 72, 58, 72, 50, 82, 62, 64, 57, 77, 122, 73, 74, 55, 54, 86, 64, 75, 66, 83, 77, 62, 56, 60, 64, 76, 63, 60, 49, 62, 68, 53, 58, 67, 47, 51, 51, 76, 48, 44, 63, 78, 54, 66, 63, 68, 74, 60, 80, 57, 74, 63, 59, 65, 81, 55, 57, 70, 55, 37, 73, 78, 52, 60, 59, 65, 77, 71, 61, 68, 78, 64, 66, 55, 59, 63, 64, 46, 51, 76, 51, 50, 70, 72, 73, 62, 62, 69, 53, 79, 61, 59, 49, 78, 80, 59, 84, 101, 68, 84, 88, 60, 80, 61, 64, 53, 96, 62, 72, 63, 64, 74, 84, 64, 73, 84, 52, 38, 59, 67, 74, 61, 56, 59, 84, 72, 58, 59, 75, 59, 58, 77, 72, 59, 65, 69, 53, 81, 65, 64, 111, 69, 53, 68, 54, 83, 60, 64, 58, 52, 54, 86, 71, 64, 51, 63, 60, 59, 65, 52, 71, 71, 77, 87, 83, 62, 63, 58, 67, 55, 64, 69, 81, 64, 59, 71, 71, 71, 56, 55, 77, 51, 62, 74, 72, 59, 69, 65, 69, 69, 54, 74, 65, 86, 50, 62, 73, 62, 60, 58, 49, 70, 58, 72, 66, 49, 75, 79, 71, 80, 114, 55, 60, 68, 62, 73, 65, 74, 67, 51, 66, 85, 76, 68, 56, 56, 74, 79, 38, 53, 59, 80, 73, 54, 70, 83, 63, 65, 73, 54, 70, 55, 68, 54, 47, 63, 63, 71, 41, 64, 67, 73, 78, 65, 53, 69, 62, 71, 73, 80, 70, 71, 50, 68, 63, 88, 70, 66, 48, 45, 65, 67, 49, 46, 71, 53, 68, 54, 49, 49, 53, 90, 69, 98, 75, 77, 80, 63, 76, 79, 59, 67, 77, 65, 74, 60, 84, 61, 61, 50, 59, 79, 52, 102, 82, 64, 92, 54, 84, 62, 59, 74, 51, 52, 77, 59, 56, 59, 68, 60, 61, 58, 65, 59, 53, 65, 54, 48, 54, 70, 46, 80, 45, 69, 62, 74, 54, 58, 66, 55, 62, 89, 67, 66, 65, 63, 62, 60, 64, 70, 51, 89, 75, 64, 52, 63, 67, 59, 44, 61, 58, 47, 91, 78, 44, 53, 57, 54, 61, 66, 67, 64, 87, 63, 52, 80, 59, 72, 56, 61, 76, 64, 68, 61, 54, 74, 62, 68, 57, 61, 77, 100, 50, 58, 63, 62, 75, 71, 54, 63, 70, 66, 63, 57, 58, 49, 67, 57, 63, 63, 57, 76, 92, 78, 54, 86, 82, 77, 68, 82, 79, 56, 65, 64, 45, 61, 61, 87, 73, 52, 35, 73, 78, 60, 83, 69, 76, 81, 68, 55, 62, 65, 45, 53, 73, 75, 37, 70, 79, 58, 62, 65, 59, 111, 64, 55, 75, 74, 58, 61, 66, 79, 75, 57, 55, 49, 56, 72, 67, 103, 81, 57, 72, 81, 77, 51, 78, 84, 59, 65, 53, 72, 51, 75, 60, 57, 74, 68, 59, 59, 72, 44, 69, 55, 63, 60, 81, 61, 58, 46, 67, 69, 58, 67, 52, 68, 64, 61, 66, 72, 78, 70, 83, 72, 67, 50, 52, 48, 46, 51, 68, 47, 82, 66, 51, 81, 117, 76, 51, 80, 61, 104, 77, 62, 63, 65, 73, 66, 98, 70, 29, 63, 68, 70, 54, 57, 76, 65, 69, 65, 201, 60, 84, 50, 50, 49, 56, 76, 64, 71, 64, 68, 65, 87, 55, 67, 78, 49, 48, 72, 65, 82, 79, 61, 69, 64, 65, 74, 68, 89, 60, 70, 44, 56, 74, 68, 58, 70, 60, 64, 66, 103, 78, 60, 64, 54, 49, 67, 73, 62, 66, 54, 43, 80, 59, 68, 38, 55, 57, 90, 70, 58, 64, 68, 69, 58, 79, 87, 62, 63, 83, 73, 78, 85, 66, 76, 86, 69, 52, 56, 46, 50, 69, 59, 65, 54, 69, 70, 69, 63, 68, 64, 57, 87, 77, 71, 40, 81, 55, 84, 65, 59, 59, 98, 76, 59, 53, 47, 66, 57, 59, 61, 69, 49, 80, 77, 60, 45, 76, 75, 56, 65, 75, 86, 67, 84, 76, 64, 48, 56, 64, 85, 51, 105, 61, 57, 79, 64, 53, 81, 56, 60, 58, 76, 77, 69, 40, 66, 59, 66, 81, 106, 66, 67, 56, 62, 76, 73, 68, 50, 68, 50, 84, 63, 43, 52, 58, 61, 59, 61, 79, 50, 71, 62, 54, 98, 55, 83, 82, 50, 59, 77, 73, 67, 68, 70, 52, 82, 58, 96, 66, 68, 68, 83, 76, 82, 64, 85, 75, 70, 79, 53, 61, 69, 79, 58, 69, 48, 114, 53, 70, 75, 63, 69, 49, 66, 56, 71, 72, 70, 82, 71, 72, 50, 48, 50, 92, 55, 79, 60, 47, 74, 73, 62, 92, 49, 100, 57, 67, 68, 53, 77, 71, 73, 73, 52, 70, 68, 68, 62, 96, 80, 78, 60, 105, 53, 64, 70, 73, 51, 44, 72, 53, 100, 72, 84, 86, 72, 72, 81, 57, 64, 57, 66, 59, 63, 53, 49, 67, 62, 68, 63, 49, 59, 53, 88, 71, 52, 64, 61, 52, 63, 80, 67, 81, 61, 96, 61, 89, 70, 53, 75, 90, 85, 39, 73, 52, 68, 48, 61, 40, 70, 50, 69, 62, 83, 75, 55, 91, 57, 80, 103, 71, 52, 61, 76, 97, 63, 35, 74, 55, 46, 63, 86, 69, 70, 82, 47, 43, 47, 83, 50, 57, 80, 71, 46, 77, 49, 55, 66, 67, 62, 66, 49, 51, 68, 53, 60, 84, 76, 68, 93, 66, 65, 55, 75, 67, 62, 76, 55, 61, 64, 50, 82, 58, 66, 63, 62, 86, 63, 53, 75, 68, 72, 62, 75, 64, 96, 60, 67, 67, 52, 59, 75, 56, 68, 56, 38, 66, 46, 59, 65, 48, 64, 48, 78, 50, 58, 75, 91, 50, 84, 41, 57, 63, 61, 38, 95, 53, 77, 42, 94, 64, 67, 68, 43, 57, 55, 52, 86, 54, 66, 49, 44, 48, 67, 40, 81, 50, 65, 78, 73, 49, 55, 56, 78, 65, 50, 58, 48, 74, 49, 62, 47, 60, 98, 80, 64, 119, 67, 62, 59, 44, 70, 74, 75, 96, 64, 59, 75, 58, 68, 71, 79, 64, 67, 44, 77, 57, 86, 83, 49, 67, 86, 65, 72, 103, 43, 85, 79, 97, 72, 49, 95, 58, 75, 83, 79, 66, 79, 87, 34, 71, 63, 77, 82, 40, 66, 63, 69, 48, 43, 49, 59, 73, 76, 66, 51, 71, 43, 50, 67, 63, 94, 57, 54, 70, 63, 55, 78, 62, 59, 65, 42, 63, 97, 49, 93, 37, 58, 82, 53, 107, 86, 118, 64, 68, 49, 48, 56, 61, 61, 72, 48, 53, 61, 79, 47, 48, 55, 57, 75, 104, 93, 65, 56, 62, 57, 94, 59, 60, 60, 41, 50, 60, 74, 55, 69, 46, 104, 63, 65, 71, 77, 88, 61, 68, 58, 56, 60, 61, 69, 52, 66, 58, 63, 56, 64, 69, 68, 58, 69, 85, 40, 60, 125, 66, 56, 72, 52, 73, 75, 82, 61, 68, 74, 73, 97, 59, 48, 56, 49, 82, 64, 54, 71, 51, 74, 52, 116, 77, 41, 54, 55, 64, 81, 61, 89, 75, 74, 65, 61, 61, 44, 80, 62, 62, 90, 90, 86, 56, 66, 52, 57, 49, 58, 71, 55, 77, 86, 58, 57, 49, 76, 49, 50, 44, 71, 84, 66, 62, 63, 94, 43, 55, 56, 71, 58, 76, 73, 74, 55, 85, 80, 68, 62, 82, 52, 68, 54, 36, 70, 52, 117, 68, 56, 50, 38, 40, 82, 58, 49, 58, 53, 60, 47, 62, 52, 60, 55, 63, 69, 44, 66, 80, 91, 63, 58, 90, 74, 69, 56, 66, 91, 63, 77, 68, 74, 64, 52, 64, 78, 70, 66, 62, 56, 99, 67, 62, 59, 64, 49, 54, 62, 62, 98, 65, 91, 84, 62, 58, 76, 51, 57, 76, 80, 60, 59, 60, 79, 68, 43, 60, 66, 51, 83, 75, 49, 75, 67, 94, 63, 62, 46, 61, 56, 75, 69, 63, 84, 106, 65, 56, 72, 69, 57, 71, 82, 56, 73, 46, 65, 64, 73, 66, 79, 89, 81, 68, 51, 66, 91, 67, 47, 56, 81, 62, 66, 67, 79, 84, 55, 67, 76, 47, 59, 57, 49, 76, 65, 63, 75, 55, 84, 74, 73, 68, 76, 54, 86, 67, 63, 66, 131, 63, 85, 52, 70, 78, 60, 49, 52, 73, 57, 42, 117, 59, 57, 62, 53, 119, 58, 48, 62, 45, 62, 85, 87, 55, 57, 59, 76, 56, 63, 70, 56, 58, 56, 78, 58, 69, 70, 54, 50, 133, 65, 74, 83, 80, 57, 53, 57, 64, 65, 56, 75, 65, 59, 87, 72, 82, 48, 65, 36, 73, 48, 57, 79, 57, 95, 76, 58, 112, 72, 61, 61, 65, 76, 47, 76, 51, 65, 74, 54, 65, 67, 62, 44, 68, 61, 77, 76, 71, 65, 59, 75, 74, 80, 92, 61, 98, 64, 99, 58, 76, 65, 73, 62, 95, 81, 83, 75, 55, 64, 48, 48, 67, 55, 64, 64, 74, 68, 78, 55, 62, 84, 81, 66, 50, 73, 58, 71, 90, 66, 84, 53, 57, 71, 53, 70, 91, 46, 48, 51, 37, 105, 49, 60, 90, 54, 68, 65, 72, 64, 84, 55, 83, 65, 48, 64, 67, 47, 56, 80, 65, 86, 57, 41, 59, 78, 53, 57, 62, 67, 60, 82, 64, 54, 80, 55, 110, 55, 72, 52, 92, 51, 75, 75, 50, 61, 59, 46, 69, 67, 56, 65, 56, 77, 67, 57, 50, 82, 50, 64, 67, 64, 81, 71, 101, 48, 53, 38, 66, 50, 70, 45, 77, 50, 65, 53, 51, 59, 49, 76, 55, 52, 57, 58, 63, 43, 89, 73, 67, 31, 73, 84, 62, 80, 52, 56, 67, 51, 76, 84, 82, 54, 75, 54, 62, 65, 40, 63, 42, 53, 53, 88, 74, 56, 109, 55, 89, 58, 68, 96, 69, 79, 84, 59, 94, 65, 55, 91, 82, 52, 65, 56, 45, 79, 80, 48, 59, 52, 60, 60, 64, 60, 66, 69, 73, 53, 57, 79, 86, 78, 66, 44, 59, 67, 55, 63, 59, 72, 87, 86, 84, 59, 61, 64, 76, 76, 68, 100, 64, 72, 72, 80, 82, 68, 81, 34, 53, 54, 70, 106, 63, 62, 62, 64, 52, 70, 50, 73, 47, 77, 73, 76, 64, 73, 97, 113, 69, 74, 75, 65, 61, 93, 74, 62, 58, 65, 58, 67, 87, 100, 46, 61, 63, 71, 81, 76, 91, 77, 56, 67, 65, 68, 56, 80, 70, 67, 67, 46, 56, 43, 54, 68, 80, 55, 55, 67, 70, 42, 56, 61, 83, 56, 105, 74, 112, 65, 90, 66, 54, 69, 68, 69, 59, 70, 46, 67, 72, 59, 65, 60, 89, 56, 88, 37, 74, 58, 77, 68, 81, 88, 55, 60, 70, 46, 55, 66, 71, 71, 94, 65, 84, 52, 77, 70, 65, 44, 16, 69, 58, 80, 78, 52, 56, 66, 69, 48, 78, 121, 69, 61, 129, 45, 61, 84, 108, 69, 77, 59, 84, 59, 51, 68, 43, 59, 57, 67, 60, 41, 75, 57, 68, 68, 63, 61, 56, 73, 57, 50, 53, 71, 54, 40, 70, 77, 74, 81, 58, 58, 91, 62, 88, 69, 71, 69, 89, 86, 72, 69, 88, 66, 55, 66, 76, 56, 59, 79, 69, 60, 59, 49, 80, 59, 49, 87, 72, 89, 104, 63, 69, 61, 62, 81, 59, 80, 76, 50, 76, 64, 47, 63, 48, 56, 55, 56, 73, 65, 59, 74, 57, 78, 61, 87, 75, 56, 51, 43, 46, 89, 74, 77, 53, 76, 45, 66, 60, 78, 53, 46, 64, 84, 77, 64, 63, 44, 37, 68, 49, 70, 62, 43, 65, 46, 61, 84, 70, 97, 71, 62, 70, 54, 71, 64, 107, 95, 60, 50, 77, 59, 63, 66, 45, 95, 75, 59, 52, 78, 40, 65, 73, 96, 66, 69, 64, 84, 45, 69, 47, 71, 65, 72, 50, 74, 46, 59, 58, 71, 79, 62, 57, 43, 50, 82, 54, 61, 69, 46, 65, 58, 83, 77, 62, 61, 65, 77, 57, 60, 64, 77, 72, 73, 74, 56, 70, 66, 53, 51, 58, 43, 80, 50, 64, 54, 57, 63, 73, 51, 77, 75, 73, 71, 92, 51, 74, 86, 78, 68, 66, 87, 69, 83, 99, 100, 75, 66, 58, 64, 50, 88, 95, 84, 74, 67, 42, 49, 73, 38, 79, 82, 48, 55, 72, 56, 62, 57, 63, 64, 70, 68, 60, 77, 56, 59, 59, 91, 43, 47, 59, 61, 66, 74, 78, 58, 64, 83, 137, 57, 82, 59, 74, 60, 80, 37, 65, 74, 79, 78, 47, 46, 47, 68, 51, 76, 60, 58, 82, 66, 78, 62, 56, 78, 79, 55, 90, 82, 54, 61, 70, 43, 80, 64, 65, 87, 93, 60, 66, 60, 58, 70, 42, 53, 78, 82, 90, 72, 55, 64, 63, 55, 49, 50, 64, 52, 73, 68, 90, 71, 49, 45, 61, 85, 58, 61, 60, 63, 75, 98, 60, 65, 69, 92, 57, 52, 67, 45, 77, 86, 51, 51, 49, 58, 79, 91, 74, 65, 60, 66, 63, 72, 68, 81, 57, 54, 66, 60, 61, 71, 75, 85, 56, 52, 69, 56, 64, 58, 77, 82, 66, 60, 65, 48, 72, 74, 66, 57, 58, 55, 95, 59, 51, 44, 67, 77, 70, 72, 66, 91, 75, 48, 77, 55, 42, 54, 59, 59, 73, 62, 63, 57, 69, 97, 75, 93, 61, 72, 59, 64, 65, 70, 56, 65, 93, 73, 63, 76, 88, 113, 87, 64, 38, 66, 69, 83, 66, 83, 84, 70, 66, 72, 81, 67, 79, 59, 55, 83, 123, 47, 68, 44, 67, 35, 57, 57, 60, 65, 77, 57, 53, 59, 68, 65, 82, 81, 46, 55, 43, 81, 50, 70, 80, 74, 46, 63, 69, 56, 56, 59, 72, 55, 67, 77, 85, 61, 58, 78, 53, 60, 117, 70, 61, 78, 57, 60, 93, 60, 53, 98, 58, 78, 55, 71, 63, 61, 74, 48, 62, 80, 33, 51, 39, 81, 70, 67, 71, 55, 53, 69, 91, 110, 61, 50, 86, 56, 64, 40, 81, 60, 74, 95, 39, 99, 72, 70, 89, 82, 72, 42, 60, 80, 61, 71, 66, 75, 89, 79, 152, 82, 74, 52, 60, 96, 51, 68, 51, 61, 47, 55, 71, 60, 68, 56, 73, 79, 71, 67, 90, 86, 55, 71, 86, 96, 65, 86, 35, 65, 65, 63, 51, 68, 77, 78, 67, 62, 90, 87, 85, 55, 96, 52, 67, 66, 64, 65, 64, 84, 69, 87, 57, 63, 84, 65, 81, 59, 73, 63, 60, 70, 74, 98, 51, 79, 68, 56, 78, 66, 49, 47, 54, 69, 59, 55, 51, 60, 88, 48, 67, 89, 53, 62, 64, 77, 60, 62, 62, 52, 56, 58, 57, 52, 51, 93, 79, 67, 68, 79, 62, 58, 72, 96, 96, 71, 60, 62, 87, 54, 60, 65, 58, 54, 45, 59, 50, 59, 63, 81, 72, 68, 86, 79, 59, 84, 55, 42, 55, 58, 62, 63, 79, 56, 75, 67, 86, 56, 100, 112, 69, 23, 72, 78, 59, 78, 98, 68, 62, 78, 76, 78, 78, 52, 78, 54, 64, 62, 95, 54, 64, 71, 54, 68, 67, 76, 66, 62, 48, 81, 58, 55, 70, 74, 42, 71, 70, 84, 71, 57, 38, 102, 63, 61, 68, 67, 77, 57, 70, 108, 88, 64, 82, 58, 73, 69, 53, 45, 69, 78, 72, 59, 41, 70, 71, 58, 49, 86, 57, 50, 70, 63, 54, 67, 52, 56, 66, 57, 53, 51, 52, 76, 52, 56, 53, 92, 52, 68, 63, 82, 54, 74, 54, 59, 54, 92, 79, 81, 77, 52, 61, 52, 61, 61, 90, 65, 59, 90, 46, 42, 84, 82, 73, 49, 70, 93, 52, 66, 36, 60, 87, 55, 59, 65, 62, 69, 87, 58, 55, 64, 78, 46, 85, 55, 49, 65, 41, 46, 65, 74, 65, 34, 58, 42, 67, 53, 50, 66, 61, 68, 48, 78, 96, 27, 49, 74, 54, 75, 109, 58, 55, 48, 73, 81, 82, 62, 52, 59, 71, 47, 51, 62, 66, 32, 70, 71, 69, 75, 54, 55, 63, 64, 83, 73, 85, 60, 65, 91, 78, 60, 79, 70, 79, 73, 66, 68, 90, 41, 62, 65, 81, 53, 51, 68, 99, 90, 77, 74, 85, 47, 64, 44, 87, 36, 56, 55, 78, 67, 65, 66, 57, 60, 56, 72, 81, 64, 74, 63, 36, 61, 72, 66, 81, 50, 52, 116, 77, 61, 48, 61, 73, 47, 78, 109, 92, 78, 79, 73, 52, 107, 65, 60, 74, 57, 47, 69, 68, 68, 47, 56, 64, 70, 64, 54, 57, 53, 71, 96, 51, 58, 62, 51, 70, 57, 78, 86, 57, 73, 45, 66, 62, 72, 89, 56, 61, 130, 60, 54, 66, 60, 45, 58, 61, 98, 62, 59, 56, 60, 69, 64, 50, 54, 64, 62, 41, 59, 67, 71, 51, 42, 114, 53, 76, 52, 72, 75, 73, 64, 58, 73, 60, 55, 49, 67, 104, 47, 55, 63, 50, 59, 84, 43, 82, 50, 70, 64, 82, 81, 62, 60, 79, 57, 48, 77, 61, 65, 57, 79, 90, 74, 68, 51, 61, 62, 49, 86, 52, 60, 51, 60, 59, 48, 89, 65, 66, 67, 81, 59, 50, 76, 67, 54, 73, 80, 55, 47, 51, 54, 66, 65, 56, 66, 69, 99, 50, 53, 60, 57, 63, 59, 42, 82, 66, 46, 74, 54, 80, 36, 51, 62, 75, 57, 84, 51, 67, 63, 46, 69, 94, 57, 73, 43, 81, 63, 49, 70, 50, 73, 81, 58, 64, 73, 57, 53, 52, 57, 64, 71, 72, 72, 54, 63, 75, 56, 67, 43, 54, 62, 95, 57, 76, 41, 50, 70, 54, 89, 82, 56, 72, 53, 55, 55, 64, 69, 50, 50, 64, 75, 75, 72, 74, 64, 59, 60, 51, 74, 79, 65, 88, 70, 47, 54, 63, 88, 64, 60, 73, 64, 59, 77, 40, 53, 63, 69, 70, 86, 90, 66, 67, 68, 63, 85, 43, 84, 51, 78, 68, 38, 62, 68, 61, 72, 80, 75, 58, 39, 53, 68, 86, 69, 51, 54, 51, 56, 60, 55, 66, 67, 95, 60, 66, 43, 75, 106, 57, 100, 72, 63, 56, 73, 77, 79, 54, 87, 74, 59, 53, 59, 70, 54, 54, 67, 68, 49, 56, 55, 102, 67, 104, 64, 49, 50, 78, 59, 53, 64, 47, 60, 68, 62, 63, 57, 62, 82, 70, 54, 76, 55, 68, 64, 45, 60, 76, 58, 77, 69, 66, 67, 66, 58, 51, 179, 73, 46, 78, 78, 72, 77, 59, 90, 55, 53, 61, 76, 72, 65, 63, 75, 54, 56, 48, 58, 71, 76, 62, 79, 79, 117, 64, 80, 92, 47, 55, 112, 77, 112, 68, 64, 81, 64, 63, 52, 48, 78, 67, 44, 54, 78, 62, 76, 54, 79, 64, 68, 65, 114, 54, 68, 97, 49, 68, 74, 80, 55, 68, 75, 66, 60, 65, 78, 65, 60, 53, 47, 64, 67, 64, 64, 58, 74, 80, 60, 62, 81, 59, 59, 66, 65, 72, 64, 59, 56, 46, 57, 95, 66, 64, 87, 49, 64, 83, 63, 47, 73, 60, 66, 55, 59, 64, 37, 51, 82, 56, 94, 57, 83, 51, 77, 59, 53, 67, 60, 95, 67, 46, 82, 66, 48, 63, 61, 74, 71, 57, 54, 79, 60, 43, 99, 72, 52, 66, 55, 66, 56, 48, 49, 51, 62, 92, 57, 76, 73, 74, 57, 76, 82, 46, 73, 57, 72, 73, 65, 60, 61, 58, 76, 68, 73, 61, 96, 75, 69, 73, 66, 58, 122, 60, 71, 56, 62, 54, 52, 73, 61, 50, 66, 64, 50, 49, 51, 73, 80, 69, 82, 58, 62, 75, 57, 66, 56, 60, 37, 62, 70, 49, 69, 70, 78, 77, 78, 76, 66, 57, 59, 63, 70, 60, 56, 82, 42, 79, 50, 78, 71, 78, 77, 55, 84, 47, 57, 125, 49, 79, 62, 73, 66, 53, 72, 58, 52, 64, 66, 51, 70, 84, 67, 58, 98, 88, 75, 74, 68, 67, 82, 54, 92, 67, 60, 57, 47, 72, 66, 62, 68, 62, 68, 71, 71, 77, 69, 84, 68, 57, 71, 57, 59, 108, 55, 87, 31, 39, 58, 68, 71, 48, 66, 55, 69, 67, 38, 111, 52, 85, 78, 53, 63, 76, 83, 99, 65, 83, 69, 59, 77, 68, 72, 71, 69, 71, 56, 83, 62, 58, 39, 56, 60, 53, 68, 44, 93, 55, 51, 101, 61, 74, 48, 77, 70, 52, 56, 64, 87, 83, 61, 59, 49, 71, 131, 57, 49, 73, 90, 57, 53, 65, 51, 81, 58, 60, 62, 75, 83, 50, 89, 53, 78, 67, 65, 49, 69, 58, 64, 75, 85, 65, 44, 83, 63, 69, 73, 84, 145, 66, 70, 65, 90, 62, 50, 71, 61, 59, 86, 44, 88, 68, 75, 64, 56, 67, 88, 61, 58, 49, 85, 63, 76, 68, 66, 71, 50, 49, 82, 83, 62, 53, 70, 71, 72, 73, 75, 49, 61, 70, 66, 75, 64, 59, 53, 72, 60, 59, 48, 66, 94, 50, 47, 57, 57, 68, 69, 72, 65, 46, 58, 60, 58, 55, 52, 60, 121, 50, 74, 81, 82, 74, 58, 77, 57, 37, 75, 76, 127, 76, 58, 61, 99, 56, 56, 67, 77, 49, 58, 48, 69, 67, 66, 80, 45, 58, 69, 75, 57, 67, 57, 55, 59, 40, 48, 65, 67, 58, 74, 62, 63, 92, 77, 63, 61, 73, 75, 68, 83, 48, 71, 54, 78, 87, 53, 115, 67, 50, 64, 53, 65, 70, 76, 74, 53, 65, 72, 82, 48, 35, 86, 51, 66, 79, 67, 62, 88, 62, 47, 75, 57, 62, 66, 66, 58, 66, 56, 64, 60, 65, 59, 62, 58, 83, 57, 56, 71, 66, 59, 46, 69, 44, 50, 68, 66, 84, 37, 61, 46, 65, 50, 58, 67, 95, 88, 52, 56, 66, 57, 66, 81, 57, 59, 71, 59, 53, 55, 92, 74, 60, 52, 68, 55, 66, 59, 63, 66, 60, 60, 53, 84, 69, 57, 61, 67, 83, 51, 48, 65, 76, 67, 76, 74, 56, 72, 60, 83, 74, 73, 56, 63, 74, 36, 70, 59, 37, 57, 64, 67, 65, 62, 73, 70, 93, 66, 39, 52, 73, 93, 46, 64, 55, 63, 63, 78, 53, 76, 68, 45, 62, 65, 85, 58, 69, 83, 46, 48, 55, 38, 75, 71, 80, 72, 56, 49, 53, 75, 48, 60, 81, 64, 78, 54, 61, 70, 42, 51, 82, 48, 79, 55, 59, 100, 64, 47, 62, 69, 66, 88, 55, 70, 63, 65, 65, 93, 78, 60, 128, 59, 71, 80, 79, 82, 65, 73, 84, 58, 73, 65, 67, 63, 22, 59, 65, 59, 66, 64, 67, 61, 69, 93, 80, 89, 61, 77, 71, 60, 74, 63, 63, 52, 71, 68, 68, 62, 74, 77, 65, 72, 62, 65, 77, 89, 52, 58, 50, 93, 68, 83, 52, 48, 74, 60, 65, 71, 75, 63, 63, 70, 81, 62, 58, 72, 46, 56, 49, 71, 64, 61, 55, 49, 73, 70, 58, 61, 66, 63, 85, 73, 70, 68, 85, 79, 82, 87, 65, 67, 72, 47, 50, 57, 63, 64, 71, 49, 58, 61, 72, 62, 54, 52, 56, 79, 63, 56, 87, 52, 21, 76, 99, 73, 75, 78, 67, 60, 95, 70, 64, 47, 51, 63, 51, 56, 73, 86, 67, 104, 68, 57, 72, 72, 68, 51, 65, 48, 53, 51, 67, 82, 86, 71, 63, 78, 67, 63, 52, 61, 55, 48, 114, 58, 60, 74, 55, 50, 106, 71, 74, 57, 81, 72, 55, 90, 62, 66, 58, 69, 44, 52, 58, 58, 93, 66, 96, 70, 104, 68, 65, 56, 74, 52, 66, 62, 92, 71, 93, 45, 63, 47, 81, 66, 75, 115, 75, 63, 65, 53, 50, 47, 83, 58, 55, 67, 67, 65, 73, 69, 60, 98, 97, 73, 67, 57, 58, 76, 50, 64, 89, 58, 70, 57, 53, 65, 60, 49, 98, 69, 104, 49, 83, 56, 65, 93, 74, 54, 78, 69, 79, 55, 73, 71, 63, 68, 56, 89, 57, 57, 56, 67, 72, 47, 61, 48, 45, 67, 71, 67, 107, 47, 90, 53, 53, 65, 85, 65, 63, 64, 76, 55, 86, 57, 63, 91, 60, 55, 58, 61, 92, 51, 53, 109, 54, 64, 49, 59, 103, 94, 72, 71, 53, 61, 82, 62, 52, 76, 74, 70, 60, 66, 72, 73, 50, 71, 43, 47, 74, 55, 69, 68, 94, 83, 68, 59, 55, 66, 40, 67, 81, 86, 51, 69, 64, 71, 69, 62, 37, 49, 55, 71, 55, 81, 77, 65, 54, 67, 49, 58, 126, 83, 55, 60, 61, 58, 69, 96, 59, 76, 66, 66, 95, 48, 89, 77, 50, 63, 51, 64, 42, 68, 56, 38, 99, 58, 48, 55, 94, 64, 49, 57, 62, 36, 79, 62, 67, 63, 69, 44, 41, 40, 78, 49, 65, 63, 62, 69, 54, 62, 63, 42, 45, 83, 64, 83, 123, 50, 52, 63, 77, 59, 92, 64, 51, 78, 70, 57, 59, 45, 60, 60, 40, 52, 50, 47, 58, 42, 46, 66, 59, 87, 66, 74, 58, 73, 60, 74, 36, 78, 80, 69, 81, 54, 47, 84, 74, 63, 96, 53, 67, 71, 63, 62, 53, 67, 75, 87, 69, 101, 55, 59, 55, 84, 88, 72, 98, 84, 38, 81, 60, 53, 77, 116, 51, 65, 76, 25, 74, 46, 66, 78, 70, 66, 58, 55, 66, 56, 56, 47, 109, 56, 32, 76, 50, 67, 60, 80, 60, 86, 65, 74, 62, 145, 85, 77, 51, 52, 59, 64, 88, 102, 55, 87, 50, 108, 74, 53, 63, 68, 56, 57, 65, 80, 60, 58, 63, 82, 36, 50, 53, 81, 54, 54, 77, 69, 57, 49, 81, 73, 73, 57, 62, 65, 54, 108, 66, 44, 49, 68, 63, 64, 60, 75, 81, 57, 49, 42, 62, 75, 51, 62, 60, 85, 92, 57, 72, 62, 49, 92, 69, 71, 55, 76, 109, 73, 61, 42, 57, 101, 53, 84, 49, 57, 54, 87, 63, 52, 71, 52, 64, 97, 100, 69, 52, 68, 116, 74, 56, 93, 59, 65, 66, 97, 51, 58, 62, 61, 48, 56, 52, 69, 56, 62, 54, 66, 81, 64, 60, 50, 58, 38, 34, 53, 73, 55, 67, 76, 58, 119, 65, 49, 41, 59, 62, 67, 69, 60, 80, 52, 71, 60, 60, 71, 52, 60, 46, 100, 74, 48, 55, 70, 48, 57, 59, 85, 114, 63, 38, 87, 90, 117, 63, 64, 54, 68, 57, 57, 78, 65, 51, 59, 59, 67, 51, 76, 46, 57, 67, 44, 75, 51, 41, 65, 70, 64, 68, 80, 87, 56, 82, 59, 62, 55, 77, 80, 76, 53, 68, 64, 69, 48, 72, 61, 65, 47, 55, 52, 35, 67, 65, 44, 53, 45, 57, 53, 46, 62, 68, 51, 60, 92, 51, 58, 57, 69, 65, 62, 63, 73, 63, 62, 47, 65, 58, 56, 79, 86, 73, 57, 60, 63, 57, 61, 71, 74, 56, 55, 53, 74, 71, 56, 77, 54, 54, 42, 67, 61, 78, 53, 77, 62, 63, 77, 105, 63, 96, 56, 53, 54, 54, 87, 76, 63, 78, 28, 82, 60, 79, 66, 94, 69, 49, 72, 46, 55, 67, 63, 94, 67, 64, 131, 53, 60, 63, 68, 61, 62, 70, 69, 84, 47, 67, 71, 83, 53, 51, 89, 52, 95, 68, 79, 68, 56, 59, 56, 80, 54, 78, 65, 184, 81, 60, 48, 56, 50, 49, 64, 51, 65, 66, 53, 66, 96, 64, 63, 70, 60, 74, 66, 60, 47, 83, 62, 78, 59, 82, 81, 63, 83, 77, 63, 59, 56, 50, 51, 70, 80, 85, 71, 91, 66, 54, 89, 48, 70, 48, 83, 55, 53, 70, 64, 123, 66, 66, 54, 69, 69, 48, 39, 65, 79, 64, 77, 65, 81, 71, 118, 50, 101, 64, 60, 59, 62, 64, 66, 64, 51, 72, 55, 61, 71, 69, 69, 77, 67, 58, 58, 57, 90, 88, 62, 104, 65, 83, 86, 69, 78, 64, 96, 52, 79, 66, 61, 82, 66, 71, 121, 43, 76, 83, 64, 62, 64, 57, 60, 67, 68, 52, 57, 67, 59, 59, 85, 61, 52, 53, 46, 74, 72, 121, 84, 60, 68, 53, 87, 67, 84, 85, 75, 51, 34, 69, 60, 69, 61, 66, 57, 46, 76, 62, 63, 73, 49, 68, 63, 91, 49, 60, 53, 53, 67, 77, 49, 70, 72, 92, 51, 58, 50, 59, 68, 69, 51, 65, 68, 78, 83, 65, 79, 67, 53, 69, 72, 90, 63, 84, 47, 54, 58, 70, 61, 70, 80, 75, 47, 82, 63, 52, 56, 51, 43, 60, 50, 119, 57, 115, 69, 77, 69, 68, 83, 83, 54, 32, 62, 82, 59, 55, 57, 72, 59, 47, 62, 58, 104, 76, 67, 71, 50, 60, 105, 54, 51, 55, 71, 70, 61, 70, 51, 58, 101, 71, 79, 78, 53, 51, 89, 48, 68, 62, 58, 76, 73, 52, 100, 57, 104, 51, 63, 69, 56, 50, 59, 58, 42, 83, 61, 54, 55, 58, 71, 62, 43, 75, 93, 49, 53, 48, 68, 47, 56, 62, 62, 72, 35, 53, 65, 98, 51, 64, 57, 65, 47, 66, 102, 80, 60, 54, 53, 59, 76, 71, 70, 44, 84, 74, 72, 45, 73, 77, 50, 78, 59, 65, 44, 59, 82, 78, 75, 67, 57, 69, 53, 53, 48, 57, 78, 92, 56, 78, 97, 74, 73, 49, 64, 72, 73, 48, 65, 72, 50, 64, 59, 73, 84, 67, 58, 51, 77, 71, 75, 71, 96, 87, 74, 56, 62, 78, 59, 71, 79, 49, 69, 75, 53, 52, 97, 62, 69, 58, 75, 57, 77, 94, 73, 81, 110, 97, 76, 100, 64, 74, 61, 67, 63, 75, 79, 81, 54, 104, 68, 52, 46, 48, 78, 49, 60, 71, 56, 64, 62, 82, 54, 62, 85, 54, 46, 61, 112, 66, 36, 65, 47, 65, 60, 56, 45, 68, 44, 61, 74, 50, 64, 91, 68, 51, 79, 57, 55, 83, 70, 40, 61, 48, 58, 73, 59, 84, 44, 55, 45, 44, 62, 69, 46, 57, 99, 63, 37, 84, 72, 63, 62, 68, 61, 46, 49, 60, 52, 80, 80, 58, 109, 45, 77, 68, 45, 82, 77, 90, 54, 74, 63, 66, 81, 52, 88, 85, 46, 70, 56, 51, 54, 81, 41, 68, 69, 46, 76, 63, 81, 66, 58, 57, 69, 50, 82, 69, 111, 88, 102, 51, 55, 78, 62, 73, 60, 68, 55, 68, 65, 57, 82, 93, 68, 61, 50, 54, 54, 64, 66, 89, 73, 71, 65, 67, 82, 82, 49, 64, 60, 81, 56, 32, 74, 59, 71, 38, 67, 78, 37, 50, 63, 54, 75, 74, 66, 81, 57, 56, 63, 86, 66, 58, 47, 53, 53, 72, 63, 84, 62, 63, 90, 70, 53, 96, 78, 135, 61, 53, 60, 57, 55, 62, 50, 61, 101, 71, 72, 72, 57, 39, 58, 64, 59, 49, 56, 63, 83, 69, 81, 72, 46, 44, 67, 73, 32, 72, 62, 103, 47, 56, 64, 66, 78, 74, 66, 70, 75, 71, 61, 53, 43, 70, 65, 82, 57, 61, 51, 56, 76, 56, 75, 51, 57, 71, 58, 52, 93, 92, 70, 49, 55, 70, 57, 48, 49, 86, 75, 73, 94, 72, 57, 74, 47, 94, 51, 54, 84, 62, 83, 51, 47, 67, 64, 60, 66, 95, 77, 69, 55, 84, 50, 124, 57, 74, 67, 64, 73, 54, 106, 72, 84, 58, 51, 93, 63, 62, 66, 63, 78, 59, 81, 65, 73, 202, 65, 55, 62, 82, 68, 66, 73, 65, 76, 72, 74, 62, 75, 56, 53, 82, 80, 55, 67, 76, 52, 63, 71, 67, 79, 90, 56, 78, 92, 87, 61, 91, 61, 55, 105, 66, 35, 62, 75, 57, 49, 47, 68, 68, 62, 68, 63, 61, 66, 73, 64, 78, 76, 38, 65, 83, 62, 94, 52, 77, 77, 71, 61, 50, 62, 78, 50, 48, 68, 70, 106, 63, 76, 71, 56, 66, 56, 70, 55, 51, 103, 47, 83, 53, 85, 51, 69, 75, 77, 75, 93, 63, 87, 130, 45, 67, 39, 57, 66, 92, 61, 87, 52, 59, 51, 66, 35, 57, 45, 48, 35, 46, 55, 40, 66, 57, 74, 79, 57, 54, 50, 73, 62, 63, 30, 71, 44, 70, 53, 119, 68, 46, 69, 117, 77, 62, 55, 70, 81, 48, 60, 66, 74, 78, 53, 59, 83, 56, 60, 62, 52, 76, 65, 87, 54, 43, 70, 49, 54, 70, 55, 53, 65, 64, 61, 77, 110, 72, 58, 76, 58, 91, 90, 62, 37, 64, 66, 64, 59, 101, 44, 76, 110, 47, 57, 54, 68, 73, 87, 81, 70, 53, 88, 77, 58, 72, 57, 69, 53, 67, 48, 52, 108, 50, 58, 59, 77, 68, 46, 62, 104, 68, 79, 57, 93, 75, 55, 71, 67, 52, 89, 58, 58, 61, 65, 83, 73, 74, 68, 67, 69, 61, 62, 62, 87, 69, 45, 76, 55, 54, 57, 61, 66, 79, 71, 54, 62, 74, 46, 59, 41, 69, 43, 63, 69, 102, 97, 57, 62, 58, 57, 55, 59, 53, 61, 63, 61, 52, 51, 80, 68, 52, 64, 88, 117, 72, 84, 64, 74, 85, 90, 52, 38, 67, 86, 57, 83, 67, 68, 55, 65, 45, 64, 50, 66, 61, 70, 56, 55, 68, 59, 44, 88, 64, 77, 86, 47, 70, 54, 62, 64, 70, 75, 44, 78, 67, 51, 72, 70, 66, 70, 72, 47, 83, 71, 72, 63, 68, 58, 62, 47, 57, 73, 91, 67, 67, 68, 59, 61, 65, 73, 50, 74, 61, 73, 72, 85, 53, 75, 50, 63, 97, 76, 69, 74, 61, 109, 44, 57, 65, 55, 56, 67, 74, 66, 110, 138, 51, 64, 67, 48, 77, 80, 79, 50, 80, 131, 43, 78, 77, 78, 82, 67, 60, 53, 66, 103, 54, 66, 112, 62, 59, 51, 80, 61, 62, 64, 70, 58, 60, 58, 89, 68, 65, 90, 61, 45, 92, 63, 71, 88, 62, 59, 85, 43, 46, 96, 63, 94, 64, 82, 52, 61, 61, 79, 76, 51, 73, 69, 40, 66, 72, 80, 73, 65, 63, 70, 67, 81, 73, 63, 67, 73, 61, 58, 74, 73, 63, 31, 88, 70, 54, 76, 65, 80, 75, 59, 105, 71, 63, 54, 37, 44, 60, 86, 66, 76, 80, 76, 66, 66, 52, 61, 61, 72, 71, 61, 67, 71, 69, 61, 63, 106, 76, 68, 70, 70, 55, 63, 75, 52, 41, 50, 60, 78, 45, 99, 48, 50, 80, 49, 56, 83, 61, 77, 55, 56, 56, 69, 95, 74, 104, 44, 56, 61, 57, 50, 91, 72, 79, 65, 71, 74, 39, 55, 72, 60, 64, 55, 66, 67, 58, 92, 62, 58, 64, 60, 68, 68, 80, 66, 56, 83, 58, 62, 92, 51, 79, 112, 55, 98, 44, 62, 60, 68, 66, 54, 50, 62, 75, 74, 76, 63, 87, 53, 77, 63, 72, 43, 80, 82, 62, 58, 78, 62, 84, 86, 60, 57, 61, 67, 68, 70, 63, 40, 42, 59, 74, 68, 90, 65, 63, 57, 50, 67, 57, 44, 73, 39, 76, 61, 56, 82, 81, 82, 55, 53, 67, 57, 66, 96, 58, 71, 73, 57, 61, 40, 74, 57, 55, 62, 74, 61, 80, 53, 51, 68, 58, 66, 60, 64, 79, 75, 69, 64, 74, 67, 69, 67, 59, 110, 66, 51, 50, 55, 65, 63, 77, 74, 62, 50, 71, 51, 76, 46, 64, 69, 79, 72, 55, 56, 96, 71, 95, 68, 76, 60, 51, 50, 66, 64, 63, 65, 73, 57, 66, 51, 79, 47, 61, 149, 55, 43, 55, 151, 54, 65, 67, 87, 74, 60, 60, 75, 87, 47, 66, 72, 77, 61, 107, 53, 95, 66, 57, 64, 45, 72, 55, 55, 80, 58, 54, 50, 49, 59, 74, 73, 62, 68, 59, 41, 73, 84, 92, 81, 68, 42, 62, 69, 74, 63, 65, 70, 62, 80, 61, 64, 78, 104, 72, 78, 81, 58, 76, 62, 54, 64, 57, 58, 41, 90, 51, 70, 53, 52, 62, 62, 80, 78, 50, 64, 80, 59, 57, 65, 44, 65, 61, 67, 75, 72, 74, 67, 46, 66, 43, 45, 59, 70, 40, 92, 92, 82, 55, 66, 44, 55, 83, 53, 85, 75, 61, 71, 72, 56, 66, 66, 69, 78, 52, 74, 57, 72, 57, 63, 62, 67, 71, 52, 57, 56, 48, 64, 53, 36, 51, 62, 72, 56, 73, 77, 85, 75, 58, 63, 53, 66, 68, 99, 76, 62, 80, 63, 60, 48, 89, 39, 72, 57, 62, 70, 78, 49, 59, 67, 74, 55, 102, 74, 90, 51, 60, 73, 84, 69, 61, 46, 67, 69, 67, 68, 60, 50, 71, 52, 71, 56, 66, 66, 62, 63, 69, 68, 64, 49, 46, 65, 53, 55, 57, 67, 65, 90, 85, 58, 74, 78, 55, 59, 76, 29, 55, 68, 53, 57, 64, 65, 41, 50, 63, 86, 81, 53, 63, 63, 67, 58, 54, 60, 73, 59, 106, 78, 58, 82, 55, 74, 53, 46, 57, 72, 64, 74, 56, 52, 66, 58, 67, 91, 62, 76, 84, 82, 71, 88, 58, 65, 36, 44, 65, 56, 78, 67, 84, 59, 67, 73, 59, 73, 38, 61, 70, 81, 85, 62, 68, 84, 68, 54, 60, 47, 72, 47, 68, 68, 128, 59, 66, 50, 59, 60, 42, 70, 63, 83, 79, 57, 66, 75, 60, 86, 110, 75, 58, 48, 70, 77, 56, 142, 67, 68, 72, 86, 40, 53, 55, 70, 77, 55, 52, 80, 60, 75, 69, 109, 73, 58, 54, 56, 74, 48, 87, 69, 83, 51, 52, 73, 90, 60, 62, 64, 66, 74, 52, 67, 59, 68, 60, 55, 53, 62, 50, 60, 59, 71, 85, 51, 66, 59, 60, 63, 98, 71, 67, 65, 57, 69, 50, 81, 55, 46, 65, 74, 62, 77, 52, 107, 54, 68, 56, 56, 60, 52, 86, 76, 64, 76, 66, 63, 62, 74, 50, 74, 54, 74, 64, 87, 84, 53, 45, 75, 72, 54, 51, 67, 60, 70, 55, 57, 69, 75, 74, 61, 67, 57, 58, 101, 57, 76, 68, 77, 51, 76, 77, 58, 54, 75, 71, 54, 83, 48, 78, 61, 76, 62, 33, 57, 63, 44, 76, 64, 70, 62, 59, 66, 51, 57, 47, 123, 90, 71, 63, 98, 68, 60, 51, 90, 61, 82, 59, 77, 60, 45, 50, 52, 76, 56, 69, 76, 75, 40, 54, 86, 61, 62, 73, 60, 84, 80, 87, 70, 58, 80, 88, 88, 68, 62, 87, 56, 69, 60, 50, 59, 58, 52, 59, 64, 44, 48, 78, 53, 75, 52, 70, 73, 64, 38, 72, 79, 107, 57, 53, 76, 68, 55, 63, 67, 76, 76, 76, 53, 52, 70, 74, 54, 89, 71, 58, 54, 49, 77, 63, 59, 57, 63, 62, 98, 50, 61, 62, 64, 81, 92, 50, 62, 48, 40, 57, 81, 76, 64, 78, 77, 69, 44, 67, 75, 82, 61, 54, 78, 68, 57, 60, 69, 65, 70, 56, 79, 76, 75, 35, 60, 62, 69, 68, 46, 70, 65, 73, 39, 73, 79, 69, 65, 49, 94, 55, 58, 74, 63, 61, 59, 77, 76, 70, 64, 72, 62, 70, 72, 60, 71, 72, 67, 60, 74, 50, 60, 53, 78, 67, 63, 50, 43, 66, 74, 68, 55, 67, 47, 59, 79, 77, 71, 56, 69, 65, 78, 70, 92, 73, 59, 49, 53, 77, 65, 73, 79, 64, 65, 56, 107, 68, 50, 69, 83, 45, 60, 62, 55, 86, 67, 53, 67, 54, 66, 61, 58, 65, 54, 79, 80, 57, 56, 73, 89, 65, 39, 76, 69, 72, 79, 80, 57, 59, 49, 92, 66, 61, 62, 52, 63, 42, 103, 76, 39, 55, 58, 66, 59, 53, 70, 56, 164, 62, 55, 89, 65, 54, 52, 71, 79, 66, 62, 47, 76, 48, 65, 47, 68, 86, 53, 57, 60, 76, 58, 63, 76, 110, 49, 53, 66, 58, 39, 69, 89, 60, 49, 78, 54, 54, 60, 85, 87, 76, 65, 69, 55, 50, 66, 48, 59, 54, 65, 63, 63, 57, 45, 63, 62, 39, 67, 68, 97, 63, 58, 42, 66, 59, 59, 63, 69, 68, 65, 67, 58, 71, 73, 80, 72, 59, 70, 62, 66, 62, 59, 62, 65, 84, 77, 57, 72, 81, 64, 46, 78, 51, 81, 61, 49, 59, 67, 59, 79, 59, 64, 42, 51, 73, 85, 61, 73, 61, 61, 40, 76, 71, 73, 73, 67, 75, 54, 59, 59, 76, 55, 53, 46, 55, 123, 53, 61, 113, 46, 73, 55, 52, 70, 60, 54, 62, 61, 78, 61, 89, 108, 90, 67, 51, 118, 48, 82, 66, 66, 70, 100, 48, 57, 73, 123, 63, 75, 63, 76, 67, 80, 74, 62, 54, 71, 78, 79, 52, 73, 64, 46, 62, 49, 75, 65, 65, 57, 64, 104, 57, 64, 67, 69, 66, 59, 39, 66, 62, 89, 51, 72, 63, 53, 66, 71, 66, 54, 77, 67, 82, 124, 73, 51, 73, 59, 62, 63, 72, 69, 67, 71, 57, 64, 64, 55, 42, 82, 56, 76, 52, 51, 62, 131, 73, 45, 48, 68, 90, 72, 67, 71, 111, 70, 50, 49, 48, 69, 73, 94, 64, 40, 69, 86, 64, 54, 60, 98, 73, 67, 47, 45, 57, 87, 64, 78, 98, 64, 59, 54, 55, 109, 70, 51, 65, 68, 69, 67, 62, 57, 96, 67, 61, 58, 66, 52, 69, 65, 102, 77, 64, 60, 50, 70, 54, 61, 75, 65, 61, 68, 83, 69, 48, 75, 39, 68, 58, 84, 106, 86, 63, 44, 64, 59, 64, 93, 66, 75, 64, 65, 60, 49, 46, 71, 79, 65, 85, 74, 63, 60, 85, 65, 60, 50, 65, 87, 66, 69, 56, 63, 58, 65, 62, 59, 53, 52, 70, 43, 49, 69, 60, 96, 53, 48, 62, 70, 50, 68, 122, 61, 59, 74, 65, 69, 66, 67, 54, 57, 76, 81, 61, 58, 68, 61, 65, 58, 87, 65, 104, 72, 60, 82, 65, 71, 78, 59, 45, 69, 58, 64, 71, 80, 69, 58, 90, 58, 61, 52, 87, 47, 63, 72, 61, 63, 58, 62, 66, 72, 67, 71, 64, 84, 55, 65, 67, 61, 59, 70, 78, 72, 66, 97, 76, 57, 59, 65, 51, 75, 74, 52, 64, 74, 58, 69, 49, 58, 60, 89, 48, 63, 65, 83, 59, 83, 86, 68, 93, 55, 78, 64, 62, 67, 61, 52, 59, 64, 138, 89, 66, 61, 69, 53, 41, 71, 82, 78, 59, 56, 69, 65, 56, 49, 50, 53, 56, 54, 68, 81, 68, 64, 71, 60, 58, 79, 60, 65, 64, 63, 94, 67, 64, 72, 73, 76, 56, 67, 69, 54, 57, 61, 51, 56, 61, 73, 61, 74, 70, 95, 48, 49, 54, 45, 71, 52, 48, 109, 76, 63, 87, 68, 61, 82, 100, 72, 62, 56, 52, 94, 65, 62, 60, 73, 68, 49, 55, 93, 64, 63, 89, 47, 76, 77, 67, 70, 67, 50, 78, 57, 114, 56, 63, 60, 116, 58, 124, 63, 62, 63, 61, 62, 75, 57, 75, 55, 54, 47, 69, 71, 67, 88, 105, 78, 66, 85, 69, 65, 79, 56, 66, 44, 55, 84, 88, 102, 59, 66, 57, 57, 75, 61, 65, 82, 74, 64, 63, 52, 64, 62, 75, 58, 53, 73, 64, 65, 70, 77, 79, 53, 60, 60, 65, 64, 57, 68, 74, 81, 76, 38, 84, 70, 63, 50, 66, 66, 42, 51, 68, 81, 75, 66, 62, 52, 75, 69, 78, 57, 67, 51, 78, 56, 61, 60, 43, 61, 67, 61, 57, 76, 88, 67, 84, 60, 59, 114, 69, 89, 68, 80, 61, 59, 58, 62, 64, 60, 74, 69, 53, 56, 69, 63, 59, 45, 57, 77, 60, 46, 129, 51, 72, 50, 74, 72, 75, 54, 65, 54, 58, 75, 83, 61, 63, 62, 68, 50, 63, 90, 57, 62, 61, 64, 59, 44, 66, 58, 64, 72, 58, 65, 85, 61, 66, 57, 59, 58, 59, 64, 90, 67, 53, 65, 60, 67, 84, 78, 65, 66, 85, 58, 84, 68, 44, 59, 59, 74, 58, 64, 62, 63, 73, 81, 78, 69, 63, 65, 53, 53, 42, 67, 81, 72, 58, 63, 79, 56, 44, 59, 63, 57, 56, 63, 75, 60, 78, 51, 49, 61, 60, 85, 76, 62, 63, 52, 70, 71, 72, 58, 59, 68, 67, 72, 77, 73, 68, 51, 69, 72, 88, 69, 61, 60, 75, 75, 80, 75, 50, 63, 84, 57, 52, 89, 61, 60, 49, 58, 55, 76, 87, 57, 83, 65, 84, 83, 66, 65, 59, 70, 64, 72, 54, 57, 53, 74, 65, 84, 77, 92, 83, 58, 53, 62, 59, 61, 56, 59, 83, 66, 70, 75, 60, 63, 56, 79, 66, 72, 74, 111, 88, 64, 67, 75, 51, 59, 71, 55, 78, 67, 67, 70, 60, 69, 69, 76, 61, 65, 65, 64, 46, 65, 81, 68, 75, 67, 54, 70, 58, 65, 59, 70, 70, 63, 77, 74, 45, 79, 53, 68, 62, 70, 56, 48, 57, 62, 77, 67, 65, 76, 77, 66, 71, 61, 68, 66, 45, 81, 56, 62, 66, 69, 55, 51, 81, 73, 64, 100, 67, 56, 86, 68, 64, 76, 64, 65, 73, 82, 48, 58, 55, 62, 76, 79, 77, 68, 64, 68, 59, 79, 85, 52, 79, 71, 75, 52, 42, 60, 56, 67, 79, 66, 56, 80, 48, 59, 72, 71, 58, 47, 73, 52, 65, 60, 62, 87, 56, 73, 85, 74, 63, 51, 57, 54, 65, 67, 60, 47, 61, 75, 62, 65, 66, 58, 63, 62, 74, 70, 52, 67, 43, 50, 56, 57, 116, 81, 54, 87, 34, 69, 48, 58, 68, 71, 75, 69, 74, 68, 63, 61, 76, 71, 64, 80, 70, 50, 69, 56, 54, 77, 48, 75, 68, 55, 61, 61, 63, 57, 54, 70, 69, 49, 69, 55, 89, 74, 58, 63, 42, 73, 51, 69, 74, 59, 45, 52, 61, 62, 69, 62, 64, 56, 63, 62, 74, 85, 51, 73, 73, 58, 64, 66, 64, 58, 84, 69, 55, 81, 63, 71, 99, 60, 66, 77, 44, 70, 61, 76, 39, 89, 89, 52, 57, 60, 73, 65, 55, 60, 58, 57, 52, 64, 69, 103, 54, 71, 85, 75, 56, 62, 78, 73, 78, 57, 72, 66, 74, 71, 77, 61, 57, 54, 62, 78, 56, 64, 65, 85, 59, 79, 73, 67, 52, 57, 81, 64, 60, 66, 73, 65, 70, 61, 71, 65, 60, 65, 85, 66, 50, 66, 72, 73, 52, 66, 78, 71, 61, 69, 102, 64, 61, 75, 64, 72, 60, 63, 58, 52, 46, 68, 74, 71, 51, 55, 71, 64, 86, 67, 71, 80, 55, 51, 71, 80, 90, 64, 93, 65, 66, 54, 66, 56, 82, 56, 61, 71, 66, 48, 70, 64, 61, 93, 62, 78, 62, 48, 91, 55, 55, 85, 59, 77, 57, 53, 54, 59, 60, 81, 74, 76, 65, 54, 52, 66, 74, 58, 79, 72, 82, 53, 84, 51, 61, 53, 57, 80, 77, 77, 61, 64, 115, 62, 68, 81, 65, 71, 72, 66, 58, 56, 64, 57, 73, 77, 56, 63, 70, 63, 95, 65, 52, 60, 83, 62, 74, 64, 47, 61, 60, 65, 71, 74, 66, 69, 58, 60, 85, 63, 69, 55, 71, 51, 59, 85, 57, 62, 69, 67, 57, 59, 64, 52, 61, 59, 68, 57, 65, 65, 63, 73, 66, 55, 55, 57, 77, 52, 55, 49, 55, 65, 52, 87, 75, 73, 49, 58, 82, 74, 65, 45, 91, 83, 54, 104, 51, 87, 91, 62, 67, 49, 62, 41, 84, 90, 68, 54, 47, 117, 73, 42, 88, 66, 71, 55, 47, 66, 56, 71, 109, 55, 50, 71, 55, 62, 58, 54, 41, 65, 61, 60, 56, 74, 74, 37, 51, 78, 93, 61, 105, 57, 61, 70, 116, 70, 52, 51, 66, 68, 57, 53, 59, 94, 59, 69, 70, 95, 51, 60, 54, 47, 70, 68, 52, 90, 65, 60, 62, 54, 90, 50, 82, 44, 55, 42, 55, 62, 66, 57, 48, 63, 52, 58, 52, 67, 55, 49, 54, 57, 66, 64, 80, 61, 57, 46, 98, 48, 54, 56, 78, 107, 51, 75, 50, 56, 76, 99, 41, 62, 58, 52, 74, 83, 70, 99, 53, 95, 54, 63, 75, 59, 71, 67, 69, 60, 69, 76, 75, 67, 68, 63, 65, 46, 54, 70, 61, 76, 126, 60, 76, 61, 58, 76, 50, 95, 72, 53, 44, 70, 86, 63, 54, 37, 54, 46, 60, 50, 78, 57, 54, 79, 84, 75, 55, 77, 59, 71, 62, 45, 79, 65, 86, 51, 60, 74, 46, 60, 68, 57, 44, 70, 50, 48, 56, 65, 84, 61, 60, 67, 64, 57, 70, 64, 69, 61, 62, 56, 60, 55, 52, 62, 59, 61, 61, 60, 82, 80, 74, 62, 67, 60, 76, 79, 63, 67, 52, 67, 57, 70, 44, 77, 107, 109, 81, 70, 56, 67, 65, 58, 76, 64, 56, 74, 43, 50, 51, 121, 52, 72, 48, 84, 74, 63, 45, 64, 38, 114, 73, 95, 70, 66, 58, 81, 59, 64, 61, 123, 73, 43, 66, 58, 63, 46, 76, 64, 34, 74, 73, 92, 54, 78, 100, 58, 73, 77, 61, 56, 82, 65, 61, 76, 60, 63, 112, 70, 54, 60, 116, 73, 37, 68, 61, 62, 50, 49, 59, 61, 68, 79, 58, 74, 63, 64, 56, 51, 50, 61, 63, 57, 92, 55, 34, 54, 91, 62, 63, 158, 83, 68, 76, 63, 51, 63, 63, 60, 66, 81, 62, 91, 62, 83, 51, 63, 61, 57, 89, 78, 77, 56, 40, 69, 57, 47, 87, 61, 52, 64, 44, 38, 54, 53, 75, 52, 62, 71, 69, 93, 56, 71, 74, 47, 84, 81, 81, 61, 76, 69, 59, 53, 68, 57, 47, 91, 51, 68, 75, 81, 75, 47, 66, 75, 68, 67, 91, 81, 79, 68, 76, 73, 58, 54, 65, 75, 70, 44, 72, 65, 73, 66, 57, 39, 70, 53, 52, 95, 86, 56, 49, 81, 38, 71, 53, 77, 49, 64, 57, 54, 71, 69, 59, 56, 57, 75, 67, 43, 46, 63, 93, 76, 54, 49, 68, 63, 77, 93, 68, 80, 58, 69, 75, 68, 77, 77, 61, 78, 151, 75, 80, 56, 62, 47, 58, 64, 65, 43, 60, 59, 67, 64, 55, 65, 53, 53, 44, 76, 60, 71, 61, 45, 97, 64, 58, 47, 66, 74, 35, 70, 53, 68, 80, 59, 129, 93, 71, 54, 72, 64, 54, 49, 84, 55, 51, 61, 118, 73, 61, 83, 77, 77, 79, 81, 67, 62, 61, 66, 77, 62, 91, 71, 85, 79, 60, 55, 79, 50, 76, 62, 59, 64, 64, 66, 55, 76, 77, 91, 60, 66, 71, 62, 58, 75, 66, 62, 41, 62, 72, 54, 45, 72, 51, 53, 82, 61, 46, 72, 66, 64, 61, 75, 54, 81, 58, 78, 82, 72, 57, 63, 69, 59, 72, 65, 72, 72, 78, 83, 100, 45, 75, 82, 76, 59, 66, 82, 60, 63, 86, 48, 57, 93, 60, 50, 49, 59, 57, 71, 43, 71, 50, 74, 93, 43, 99, 59, 51, 57, 55, 83, 63, 53, 116, 51, 54, 52, 59, 68, 56, 75, 59, 43, 79, 55, 73, 67, 53, 84, 68, 62, 56, 63, 51, 75, 65, 52, 62, 55, 80, 92, 71, 60, 52, 61, 70, 51, 63, 70, 75, 73, 84, 49, 72, 74, 50, 58, 85, 51, 56, 50, 71, 51, 104, 48, 45, 49, 59, 101, 86, 60, 74, 111, 81, 67, 69, 59, 59, 76, 56, 66, 56, 46, 52, 40, 52, 56, 49, 78, 71, 54, 53, 51, 54, 102, 62, 59, 68, 65, 53, 39, 74, 52, 82, 92, 62, 76, 73, 73, 61, 80, 77, 56, 65, 67, 68, 61, 94, 48, 65, 41, 60, 61, 85, 62, 36, 55, 60, 80, 41, 44, 100, 43, 66, 50, 67, 50, 71, 83, 129, 57, 70, 72, 71, 84, 87, 96, 58, 75, 86, 65, 97, 80, 64, 67, 71, 64, 55, 85, 103, 52, 61, 57, 79, 60, 60, 69, 48, 79, 48, 54, 55, 62, 68, 51, 59, 44, 86, 61, 59, 81, 69, 72, 103, 71, 62, 49, 69, 69, 59, 83, 53, 42, 71, 83, 53, 59, 66, 71, 71, 59, 69, 73, 95, 58, 57, 53, 104, 84, 72, 65, 91, 92, 71, 65, 71, 84, 70, 73, 72, 70, 78, 96, 53, 59, 75, 56, 74, 78, 52, 63, 84, 87, 61, 60, 58, 101, 68, 72, 65, 98, 63, 53, 53, 57, 46, 94, 65, 94, 139, 49, 67, 46, 63, 84, 74, 79, 60, 61, 63, 85, 51, 60, 35, 94, 94, 56, 62, 58, 59, 69, 40, 47, 78, 49, 54, 44, 57, 61, 62, 67, 59, 80, 82, 70, 51, 72, 71, 51, 61, 80, 94, 64, 79, 102, 60, 62, 71, 81, 64, 86, 72, 98, 90, 52, 47, 61, 59, 63, 79, 62, 48, 83, 81, 60, 80, 60, 88, 54, 64, 64, 64, 67, 69, 60, 66, 65, 59, 69, 62, 49, 64, 88, 47, 83, 50, 139, 64, 63, 67, 63, 53, 61, 41, 60, 73, 87, 78, 44, 74, 50, 50, 66, 85, 114, 78, 47, 56, 67, 74, 78, 53, 54, 89, 45, 78, 70, 78, 69, 67, 48, 50, 59, 81, 44, 84, 109, 84, 58, 58, 43, 82, 55, 56, 60, 65, 79, 40, 53, 91, 73, 63, 55, 64, 51, 68, 60, 78, 71, 86, 47, 70, 76, 56, 72, 73, 123, 78, 70, 39, 57, 61, 38, 62, 82, 98, 65, 55, 84, 74, 74, 64, 69, 48, 64, 87, 69, 77, 49, 69, 48, 65, 78, 66, 64, 51, 64, 82, 60, 79, 75, 71, 51, 60, 36, 65, 78, 57, 73, 88, 46, 48, 77, 62, 52, 79, 81, 109, 76, 69, 56, 71, 52, 62, 90, 47, 93, 71, 51, 68, 61, 58, 64, 86, 77, 66, 50, 82, 66, 64, 61, 87, 66, 91, 55, 83, 55, 48, 72, 48, 75, 59, 68, 76, 75, 58, 67, 48, 56, 57, 68, 74, 74, 51, 57, 67, 66, 85, 80, 83, 62, 64, 56, 79, 70, 67, 65, 73, 45, 54, 54, 61, 71, 57, 59, 55, 60, 73, 60, 70, 64, 48, 53, 94, 66, 72, 62, 72, 97, 80, 48, 52, 72, 71, 100, 59, 77, 51, 48, 61, 89, 71, 80, 75, 76, 76, 73, 69, 84, 96, 74, 86, 55, 66, 69, 66, 72, 57, 71, 90, 55, 93, 51, 52, 71, 93, 74, 70, 45, 47, 90, 46, 58, 97, 64, 68, 61, 42, 52, 65, 50, 75, 60, 36, 61, 44, 53, 58, 87, 68, 57, 40, 60, 67, 67, 59, 48, 58, 55, 76, 57, 65, 64, 60, 56, 74, 52, 52, 81, 52, 81, 67, 71, 77, 55, 75, 46, 83, 51, 68, 61, 45, 69, 44, 64, 58, 53, 56, 98, 62, 40, 67, 65, 56, 44, 83, 87, 64, 51, 41, 64, 52, 70, 52, 74, 68, 73, 73, 51, 69, 75, 69, 75, 58, 50, 70, 74, 57, 44, 42, 71, 64, 61, 60, 113, 68, 71, 60, 66, 52, 79, 53, 55, 51, 95, 95, 59, 66, 78, 93, 69, 87, 63, 65, 93, 71, 60, 66, 80, 63, 60, 58, 71, 57, 81, 58, 86, 47, 67, 56, 85, 56, 56, 62, 83, 78, 64, 48, 56, 79, 85, 106, 66, 83, 53, 122, 100, 83, 54, 58, 45, 61, 64, 52, 71, 94, 60, 65, 58, 93, 55, 68, 81, 42, 52, 53, 80, 75, 58, 63, 56, 77, 45, 60, 103, 76, 67, 59, 64, 75, 57, 85, 85, 54, 78, 90, 36, 64, 59, 76, 58, 67, 62, 59, 65, 98, 56, 78, 44, 71, 60, 48, 58, 60, 41, 68, 66, 57, 78, 66, 68, 55, 77, 49, 123, 39, 62, 80, 56, 72, 76, 60, 78, 61, 61, 65, 68, 74, 66, 53, 74, 68, 66, 96, 60, 74, 79, 67, 67, 79, 57, 67, 87, 75, 63, 52, 50, 67, 65, 77, 65, 75, 60, 100, 53, 71, 82, 51, 46, 80, 62, 71, 81, 58, 45, 66, 75, 63, 55, 58, 93, 78, 41, 57, 42, 73, 79, 62, 57, 67, 88, 53, 75, 67, 83, 87, 61, 43, 72, 52, 62, 63, 40, 50, 60, 43, 85, 87, 71, 47, 63, 53, 65, 140, 51, 57, 54, 50, 55, 38, 78, 67, 65, 57, 65, 89, 67, 47, 78, 72, 65, 56, 72, 64, 39, 73, 79, 64, 57, 94, 66, 65, 74, 56, 71, 51, 74, 58, 66, 56, 78, 56, 59, 55, 54, 50, 66, 98, 43, 62, 83, 151, 66, 69, 83, 70, 64, 56, 84, 63, 69, 39, 55, 61, 46, 61, 75, 74, 89, 91, 47, 93, 80, 71, 58, 70, 68, 65, 57, 62, 59, 53, 66, 72, 48, 55, 55, 55, 68, 79, 60, 80, 57, 87, 57, 60, 77, 40, 64, 84, 57, 70, 85, 47, 50, 60, 62, 56, 49, 68, 46, 71, 68, 66, 50, 57, 59, 60, 58, 75, 87, 55, 80, 75, 74, 63, 64, 62, 53, 42, 86, 63, 58, 64, 57, 52, 57, 77, 46, 63, 71, 67, 52, 66, 76, 82, 83, 97, 70, 88, 87, 71, 91, 53, 77, 49, 68, 101, 59, 67, 86, 60, 58, 78, 51, 57, 60, 104, 49, 51, 70, 53, 56, 46, 71, 64, 80, 30, 70, 75, 70, 99, 61, 48, 69, 49, 63, 67, 58, 78, 56, 67, 63, 81, 76, 79, 71, 50, 69, 71, 95, 82, 68, 56, 64, 47, 63, 73, 64, 60, 67, 49, 72, 68, 64, 59, 65, 83, 80, 48, 59, 56, 77, 92, 54, 53, 64, 54, 43, 49, 78, 34, 88, 72, 70, 67, 73, 86, 70, 59, 47, 65, 73, 70, 59, 53, 49, 78, 81, 45, 60, 68, 61, 74, 74, 57, 74, 59, 89, 94, 97, 93, 59, 78, 69, 56, 71, 70, 58, 49, 79, 62, 81, 78, 90, 63, 57, 68, 58, 64, 65, 74, 99, 69, 58, 87, 88, 85, 53, 67, 58, 57, 70, 82, 61, 53, 56, 58, 93, 77, 63, 55, 81, 62, 57, 46, 75, 79, 51, 57, 77, 79, 81, 62, 68, 77, 64, 84, 76, 66, 37, 56, 53, 78, 58, 49, 54, 73, 64, 71, 47, 65, 61, 52, 55, 57, 58, 59, 75, 56, 59, 57, 106, 60, 86, 60, 67, 44, 59, 53, 70, 61, 77, 71, 72, 52, 107, 116, 52, 69, 31, 48, 105, 58, 69, 71, 53, 80, 53, 65, 76, 39, 51, 57, 52, 66, 65, 55, 80, 77, 68, 66, 119, 73, 80, 44, 67, 53, 67, 83, 57, 58, 61, 62, 39, 67, 53, 59, 66, 85, 72, 63, 58, 77, 72, 42, 102, 47, 65, 81, 83, 96, 69, 83, 65, 69, 58, 58, 68, 73, 60, 58, 58, 70, 59, 67, 60, 62, 52, 71, 33, 56, 55, 67, 59, 128, 53, 98, 42, 50, 89, 69, 67, 66, 49, 72, 62, 65, 78, 52, 62, 49, 51, 87, 63, 51, 61, 61, 48, 79, 77, 44, 70, 82, 89, 58, 71, 54, 58, 55, 65, 74, 52, 58, 61, 42, 53, 61, 86, 69, 98, 80, 51, 101, 70, 42, 91, 60, 63, 64, 94, 50, 61, 79, 67, 54, 55, 66, 81, 77, 74, 69, 81, 72, 120, 70, 69, 98, 48, 84, 54, 84, 54, 51, 66, 70, 62, 54, 53, 49, 69, 48, 77, 60, 91, 63, 59, 61, 64, 59, 68, 78, 103, 71, 65, 57, 54, 68, 65, 73, 67, 79, 53, 56, 66, 51, 73, 63, 50, 52, 97, 68, 63, 82, 60, 59, 77, 70, 58, 46, 50, 73, 91, 91, 65, 84, 72, 86, 57, 51, 51, 59, 66, 110, 83, 81, 69, 61, 67, 72, 41, 59, 70, 71, 68, 47, 57, 62, 80, 61, 57, 51, 46, 72, 49, 66, 77, 52, 64, 82, 56, 90, 79, 55, 60, 143, 47, 75, 92, 69, 72, 78, 51, 58, 68, 35, 83, 57, 57, 88, 53, 69, 75, 62, 74, 57, 81, 47, 66, 58, 54, 91, 54, 81, 65, 93, 100, 65, 35, 63, 85, 192, 72, 58, 46, 67, 72, 54, 89, 69, 81, 64, 59, 68, 83, 61, 65, 89, 71, 54, 49, 79, 65, 65, 46, 82, 58, 79, 83, 55, 88, 128, 46, 57, 39, 50, 56, 87, 62, 60, 67, 54, 66, 75, 66, 51, 96, 90, 59, 90, 80, 37, 74, 74, 66, 74, 45, 37, 74, 95, 56, 54, 58, 67, 56, 61, 71, 66, 69, 54, 73, 47, 46, 54, 76, 40, 81, 86, 69, 111, 77, 60, 51, 51, 76, 73, 67, 76, 53, 70, 53, 74, 59, 70, 86, 42, 63, 48, 62, 56, 57, 69, 65, 67, 47, 55, 52, 74, 52, 47, 46, 66, 62, 87, 72, 57, 70, 63, 69, 59, 48, 69, 64, 83, 100, 69, 71, 77, 58, 63, 60, 44, 54, 61, 69, 64, 46, 57, 32, 54, 55, 63, 81, 50, 97, 50, 99, 65, 86, 81, 71, 62, 44, 69, 125, 53, 75, 69, 68, 84, 58, 70, 60, 61, 64, 53, 73, 85, 60, 80, 96, 48, 70, 52, 57, 60, 73, 51, 64, 65, 70, 48, 82, 60, 94, 76, 73, 56, 69, 97, 51, 66, 66, 74, 71, 58, 76, 86, 72, 73, 90, 49, 59, 63, 110, 107, 54, 75, 70, 62, 80, 66, 51, 43, 85, 54, 76, 44, 77, 69, 48, 99, 54, 63, 58, 69, 75, 57, 95, 51, 71, 102, 54, 61, 77, 61, 85, 81, 87, 74, 61, 41, 67, 53, 66, 67, 53, 70, 54, 51, 57, 54, 46, 45, 43, 105, 72, 78, 111, 38, 71, 63, 65, 92, 76, 53, 61, 73, 65, 62, 81, 56, 80, 68, 140, 65, 84, 64, 50, 58, 72, 57, 74, 50, 57, 73, 80, 78, 66, 45, 21, 61, 42, 55, 88, 53, 101, 115, 54, 40, 72, 75, 82, 59, 57, 91, 77, 60, 57, 73, 50, 73, 61, 48, 63, 62, 74, 60, 80, 51, 40, 68, 54, 67, 79, 66, 72, 80, 51, 70, 69, 77, 86, 112, 64, 64, 52, 66, 34, 87, 55, 58, 45, 51, 54, 61, 69, 34, 57, 69, 57, 68, 79, 54, 59, 59, 71, 57, 79, 67, 73, 58, 70, 50, 77, 58, 66, 79, 53, 71, 61, 65, 50, 54, 67, 52, 87, 59, 57, 37, 47, 75, 75, 83, 71, 66, 83, 64, 60, 95, 79, 58, 59, 53, 55, 57, 73, 84, 83, 86, 61, 60, 69, 69, 44, 95, 82, 53, 58, 48, 79, 70, 82, 69, 61, 93, 42, 62, 72, 65, 48, 50, 93, 61, 68, 57, 51, 64, 73, 62, 51, 42, 77, 54, 64, 61, 83, 58, 69, 74, 78, 58, 85, 97, 68, 69, 52, 78, 59, 53, 50, 59, 68, 62, 63, 44, 82, 66, 68, 51, 72, 62, 75, 69, 110, 55, 61, 49, 60, 65, 63, 33, 68, 55, 81, 61, 71, 67, 59, 54, 101, 76, 42, 47, 64, 115, 61, 61, 73, 75, 50, 79, 49, 62, 50, 55, 62, 78, 82, 46, 45, 49, 95, 60, 48, 63, 81, 76, 53, 71, 58, 73, 70, 34, 103, 84, 65, 73, 76, 51, 66, 65, 55, 55, 80, 67, 71, 80, 64, 49, 54, 61, 65, 61, 72, 56, 86, 58, 64, 84, 68, 47, 66, 46, 60, 56, 56, 82, 64, 69, 59, 71, 67, 77, 57, 72, 65, 55, 65, 76, 42, 65, 65, 68, 82, 71, 51, 58, 67, 41, 61, 65, 56, 82, 96, 92, 75, 89, 54, 79, 62, 41, 82, 66, 121, 50, 51, 58, 81, 51, 63, 71, 80, 73, 71, 49, 77, 94, 60, 71, 68, 57, 75, 71, 56, 95, 65, 64, 65, 45, 74, 55, 77, 58, 81, 55, 77, 33, 55, 78, 70, 174, 66, 75, 97, 63, 67, 70, 68, 75, 56, 67, 65, 70, 94, 62, 59, 63, 67, 39, 75, 49, 45, 59, 95, 56, 58, 104, 76, 68, 65, 61, 53, 78, 72, 63, 65, 63, 63, 67, 40, 67, 66, 51, 66, 47, 58, 68, 39, 81, 80, 79, 46, 61, 87, 80, 84, 89, 83, 88, 81, 69, 63, 54, 115, 56, 77, 56, 63, 48, 96, 65, 84, 55, 66, 63, 61, 75, 46, 61, 67, 30, 84, 57, 92, 44, 60, 81, 74, 49, 72, 41, 68, 58, 61, 78, 59, 31, 58, 55, 55, 94, 75, 65, 72, 77, 65, 70, 50, 105, 39, 47, 67, 51, 60, 58, 68, 52, 48, 58, 71, 64, 53, 95, 51, 76, 76, 70, 47, 73, 97, 84, 68, 66, 50, 58, 101, 71, 57, 77, 69, 56, 66, 73, 63, 67, 36, 96, 58, 73, 63, 66, 57, 63, 70, 75, 75, 51, 60, 62, 53, 57, 45, 66, 76, 66, 75, 35, 58, 64, 84, 60, 76, 44, 51, 72, 46, 66, 63, 54, 78, 75, 64, 62, 81, 70, 53, 68, 70, 44, 47, 62, 58, 50, 52, 57, 81, 104, 58, 79, 76, 73, 59, 101, 73, 64, 96, 59, 50, 67, 86, 50, 49, 65, 66, 71, 75, 53, 51, 94, 46, 84, 60, 69, 67, 89, 68, 65, 52, 61, 72, 74, 58, 54, 71, 60, 66, 44, 30, 46, 78, 69, 46, 57, 74, 57, 71, 39, 68, 60, 81, 82, 69, 65, 47, 68, 54, 80, 72, 46, 64, 69, 57, 84, 43, 47, 63, 87, 50, 65, 63, 66, 82, 79, 78, 39, 57, 44, 80, 71, 75, 79, 69, 72, 67, 67, 60, 69, 62, 62, 55, 70, 63, 67, 63, 65, 96, 77, 60, 94, 73, 65, 44, 67, 55, 48, 62, 73, 63, 66, 66, 81, 83, 63, 113, 71, 74, 85, 50, 65, 68, 71, 93, 59, 77, 58, 61, 62, 59, 46, 60, 71, 69, 49, 79, 57, 70, 50, 61, 68, 70, 42, 52, 62, 56, 63, 57, 59, 49, 65, 72, 69, 66, 62, 70, 65, 60, 56, 55, 72, 94, 59, 57, 128, 51, 87, 69, 65, 57, 63, 44, 59, 94, 75, 63, 59, 60, 99, 78, 52, 73, 52, 56, 77, 72, 76, 76, 71, 46, 98, 60, 67, 63, 51, 57, 87, 37, 42, 54, 71, 41, 63, 56, 65, 67, 60, 81, 54, 73, 63, 71, 80, 59, 83, 65, 66, 58, 114, 64, 73, 70, 49, 71, 45, 70, 78, 47, 69, 67, 79, 59, 56, 84, 59, 62, 66, 59, 80, 54, 56, 64, 72, 59, 66, 63, 67, 70, 48, 57, 65, 61, 61, 76, 60, 53, 77, 57, 66, 70, 86, 59, 49, 64, 54, 67, 60, 73, 51, 70, 82, 74, 89, 79, 63, 43, 85, 83, 65, 61, 51, 73, 69, 65, 51, 54, 86, 66, 60, 57, 56, 80, 54, 54, 54, 40, 60, 78, 75, 52, 49, 72, 123, 69, 125, 32, 80, 68, 124, 59, 53, 71, 65, 67, 48, 69, 80, 59, 57, 61, 81, 96, 62, 60, 70, 78, 59, 75, 62, 61, 62, 74, 95, 59, 82, 71, 50, 86, 57, 64, 60, 55, 56, 71, 68, 52, 87, 76, 67, 98, 81, 54, 65, 54, 73, 93, 66, 64, 57, 57, 37, 70, 62, 64, 58, 65, 43, 62, 56, 54, 51, 75, 60, 67, 56, 60, 77, 62, 91, 78, 53, 61, 68, 72, 59, 58, 80, 59, 90, 74, 48, 52, 59, 63, 96, 53, 72, 64, 51, 64, 44, 86, 82, 63, 79, 54, 60, 61, 56, 56, 79, 90, 54, 62, 59, 81, 64, 56, 61, 81, 60, 58, 61, 169, 60, 54, 61, 73, 74, 84, 85, 67, 72, 59, 51, 51, 67, 63, 53, 40, 71, 51, 51, 84, 61, 85, 50, 56, 72, 60, 67, 65, 62, 72, 79, 74, 70, 54, 57, 57, 63, 58, 65, 64, 60, 69, 74, 71, 70, 79, 79, 68, 51, 48, 65, 78, 70, 73, 51, 75, 49, 79, 43, 53, 62, 105, 55, 66, 66, 57, 43, 69, 62, 66, 70, 55, 64, 59, 62, 52, 66, 67, 83, 57, 52, 76, 69, 89, 76, 104, 66, 70, 67, 64, 77, 75, 57, 89, 69, 85, 75, 82, 65, 69, 65, 48, 86, 62, 55, 60, 61, 66, 51, 49, 52, 59, 68, 49, 56, 70, 75, 60, 44, 65, 56, 69, 56, 58, 57, 73, 69, 71, 53, 66, 101, 69, 61, 78, 54, 56, 63, 80, 69, 91, 62, 72, 52, 65, 69, 68, 79, 60, 59, 79, 67, 75, 59, 69, 50, 63, 141, 66, 67, 114, 64, 55, 66, 77, 68, 139, 58, 51, 65, 63, 81, 62, 72, 87, 57, 58, 62, 75, 62, 67, 58, 66, 59, 58, 89, 52, 58, 58, 56, 47, 57, 65, 53, 58, 84, 92, 71, 54, 68, 83, 83, 71, 60, 52, 69, 74, 72, 61, 69, 54, 56, 55, 70, 77, 75, 62, 61, 74, 104, 58, 100, 64, 81, 57, 81, 95, 69, 44, 62, 52, 63, 68, 57, 71, 62, 74, 61, 65, 69, 72, 68, 66, 71, 103, 52, 77, 56, 64, 84, 76, 59, 60, 61, 56, 50, 80, 68, 61, 45, 58, 63, 65, 71, 70, 72, 78, 72, 74, 68, 85, 78, 59, 63, 64, 45, 71, 80, 79, 45, 68, 67, 56, 62, 61, 52, 93, 91, 64, 94, 61, 55, 56, 56, 71, 51, 55, 57, 62, 109, 53, 62, 52, 66, 64, 79, 66, 64, 65, 62, 98, 60, 63, 45, 60, 56, 68, 59, 49, 49, 60, 76, 69, 51, 67, 58, 83, 60, 53, 72, 72, 54, 55, 55, 51, 82, 67, 78, 64, 66, 82, 91, 48, 67, 60, 69, 62, 56, 58, 77, 90, 76, 57, 82, 74, 68, 56, 56, 80, 66, 66, 49, 55, 66, 128, 71, 69, 63, 62, 74, 57, 72, 64, 66, 55, 66, 55, 85, 64, 46, 74, 100, 66, 59, 58, 60, 44, 57, 60, 70, 59, 89, 94, 64, 48, 49, 63, 65, 69, 67, 44, 68, 62, 53, 62, 61, 81, 59, 52, 66, 49, 88, 40, 62, 39, 64, 91, 45, 57, 69, 71, 61, 52, 60, 61, 74, 56, 73, 54, 58, 65, 51, 45, 66, 64, 60, 60, 73, 81, 49, 83, 69, 71, 59, 65, 65, 68, 89, 77, 62, 67, 59, 70, 60, 56, 62, 67, 61, 88, 77, 68, 52, 61, 58, 61, 128, 63, 60, 87, 65, 45, 56, 114, 89, 53, 74, 64, 68, 76, 92, 61, 88, 58, 81, 75, 68, 73, 91, 64, 72, 51, 86, 50, 68, 39, 54, 68, 83, 83, 69, 74, 67, 53, 50, 54, 63, 76, 73, 75, 78, 74, 69, 58, 59, 71, 63, 64, 69, 52, 77, 70, 53, 70, 58, 53, 74, 75, 63, 86, 48, 63, 54, 55, 180, 53, 76, 58, 61, 73, 52, 60, 67, 55, 56, 50, 58, 59, 67, 58, 49, 58, 60, 59, 77, 54, 63, 80, 52, 70, 91, 70, 58, 56, 65, 47, 64, 54, 46, 64, 59, 73, 42, 49, 97, 69, 53, 72, 59, 96, 45, 85, 58, 62, 61, 66, 45, 67, 62, 50, 43, 79, 81, 71, 51, 67, 81, 59, 56, 27, 65, 47, 52, 56, 68, 67, 49, 64, 52, 56, 97, 67, 63, 42, 71, 75, 63, 57, 63, 63, 73, 62, 70, 64, 70, 61, 107, 60, 92, 69, 72, 54, 81, 113, 60, 64, 77, 60, 72, 75, 56, 73, 58, 72, 62, 71, 60, 44, 64, 65, 68, 63, 70, 50, 47, 60, 73, 72, 64, 64, 65, 33, 72, 84, 60, 56, 49, 54, 90, 52, 65, 80, 46, 65, 70, 53, 70, 64, 74, 68, 66, 64, 64, 61, 66, 61, 54, 125, 90, 82, 74, 91, 94, 66, 60, 53, 64, 56, 57, 63, 85, 69, 69, 65, 59, 65, 59, 47, 67, 64, 59, 70, 77, 44, 97, 77, 71, 62, 50, 65, 67, 97, 70, 54, 77, 66, 86, 52, 39, 118, 94, 67, 66, 69, 82, 54, 68, 61, 59, 42, 72, 50, 58, 70, 76, 68, 138, 50, 67, 83, 66, 64, 67, 57, 81, 69, 73, 65, 74, 50, 55, 50, 63, 69, 75, 50, 102, 61, 46, 59, 67, 65, 57, 60, 51, 63, 54, 48, 77, 71, 151, 71, 54, 70, 54, 51, 44, 51, 51, 58, 70, 56, 50, 49, 63, 59, 75, 74, 97, 55, 65, 89, 55, 60, 73, 56, 76, 66, 59, 85, 78, 81, 44, 83, 36, 69, 111, 73, 61, 63, 62, 47, 65, 76, 49, 64, 49, 71, 74, 75, 51, 80, 55, 69, 46, 75, 73, 69, 51, 85, 64, 60, 90, 81, 58, 75, 67, 67, 68, 55, 100, 96, 45, 74, 52, 58, 40, 74, 82, 94, 53, 59, 62, 70, 71, 62, 61, 86, 69, 68, 68, 62, 79, 62, 60, 130, 57, 70, 55, 95, 55, 76, 55, 73, 60, 56, 77, 63, 54, 55, 68, 62, 59, 55, 37, 75, 78, 52, 79, 66, 92, 61, 75, 71, 78, 70, 65, 59, 40, 46, 56, 54, 41, 48, 61, 60, 60, 73, 81, 73, 56, 51, 52, 71, 93, 58, 42, 60, 64, 72, 58, 68, 74, 50, 95, 55, 55, 62, 64, 71, 65, 61, 56, 64, 65, 50, 87, 51, 67, 66, 69, 65, 58, 44, 59, 55, 61, 115, 80, 90, 60, 55, 53, 55, 63, 62, 81, 63, 70, 57, 67, 58, 65, 60, 40, 61, 69, 62, 112, 69, 52, 54, 54, 88, 67, 64, 74, 76, 55, 73, 76, 91, 75, 79, 79, 55, 63, 43, 65, 60, 57, 83, 63, 60, 70, 67, 69, 67, 68, 64, 81, 72, 57, 52, 74, 43, 57, 65, 47, 44, 66, 109, 86, 140, 57, 57, 63, 46, 64, 76, 89, 69, 82, 48, 66, 55, 49, 56, 58, 54, 71, 71, 46, 64, 62, 77, 57, 67, 64, 69, 55, 62, 47, 68, 55, 76, 65, 61, 48, 57, 69, 50, 49, 76, 66, 85, 48, 61, 63, 84, 62, 65, 62, 61, 70, 68, 69, 71, 72, 49, 75, 68, 68, 64, 58, 80, 72, 75, 62, 61, 58, 93, 76, 51, 57, 68, 87, 71, 75, 83, 85, 58, 88, 61, 60, 52, 85, 62, 50, 130, 56, 65, 59, 77, 73, 46, 76, 86, 63, 117, 60, 61, 52, 54, 50, 83, 65, 56, 57, 70, 42, 51, 80, 73, 61, 86, 59, 65, 72, 48, 45, 59, 52, 66, 59, 56, 68, 58, 53, 66, 42, 73, 66, 88, 67, 57, 78, 78, 62, 89, 65, 84, 68, 69, 49, 81, 53, 61, 75, 66, 55, 59, 41, 61, 46, 63, 65, 69, 60, 72, 47, 59, 63, 68, 98, 72, 66, 65, 57, 79, 53, 64, 71, 51, 52, 60, 63, 57, 79, 61, 58, 69, 69, 64, 80, 66, 50, 99, 63, 53, 61, 83, 61, 68, 61, 64, 68, 74, 59, 70, 69, 73, 68, 59, 54, 69, 69, 76, 68, 59, 95, 73, 59, 64, 70, 43, 56, 56, 58, 42, 54, 55, 43, 71, 61, 86, 86, 48, 49, 54, 53, 67, 75, 76, 43, 58, 65, 74, 72, 65, 65, 72, 67, 75, 97, 46, 54, 73, 68, 64, 73, 84, 58, 82, 71, 69, 73, 82, 56, 60, 76, 70, 74, 70, 88, 48, 66, 60, 105, 72, 65, 51, 71, 48, 63, 61, 67, 52, 71, 55, 66, 51, 75, 64, 53, 89, 63, 61, 68, 79, 77, 72, 75, 68, 48, 64, 61, 51, 56, 52, 69, 79, 45, 55, 70, 61, 57, 60, 53, 65, 67, 63, 61, 72, 61, 66, 63, 75, 63, 58, 67, 67, 75, 51, 58, 51, 102, 73, 70, 65, 73, 38, 58, 40, 49, 80, 52, 55, 60, 61, 47, 74, 59, 65, 77, 53, 82, 45, 69, 73, 45, 58, 72, 59, 46, 58, 101, 54, 78, 79, 63, 76, 68, 45, 59, 71, 60, 62, 60, 75, 73, 70, 55, 79, 54, 68, 62, 52, 95, 69, 57, 58, 57, 82, 53, 66, 81, 82, 77, 36, 112, 65, 57, 76, 57, 56, 55, 57, 54, 49, 69, 79, 76, 98, 75, 81, 92, 65, 75, 63, 89, 75, 63, 64, 70, 82, 80, 61, 66, 77, 64, 65, 52, 67, 64, 48, 54, 50, 46, 76, 40, 54, 76, 82, 62, 52, 82, 68, 78, 64, 63, 64, 81, 66, 56, 40, 57, 89, 88, 55, 66, 63, 79, 91, 85, 40, 92, 76, 62, 78, 53, 63, 78, 63, 79, 68, 52, 51, 72, 77, 46, 57, 65, 49, 62, 60, 71, 83, 80, 66, 110, 66, 49, 49, 52, 97, 88, 45, 77, 63, 42, 51, 84, 68, 69, 58, 54, 47, 73, 64, 55, 51, 54, 69, 67, 64, 57, 75, 73, 67, 51, 53, 73, 38, 48, 46, 140, 64, 86, 56, 55, 60, 67, 70, 56, 64, 67, 73, 80, 65, 50, 47, 62, 63, 42, 61, 74, 84, 55, 56, 34, 71, 57, 76, 54, 79, 59, 108, 50, 73, 62, 55, 65, 71, 80, 77, 74, 84, 70, 73, 52, 88, 86, 73, 53, 51, 65, 102, 61, 33, 62, 47, 47, 63, 74, 67, 70, 49, 74, 75, 59, 74, 110, 98, 57, 89, 88, 99, 83, 56, 57, 63, 83, 59, 52, 51, 75, 45, 64, 89, 81, 61, 73, 60, 45, 60, 88, 47, 68, 77, 55, 81, 89, 55, 81, 70, 60, 54, 54, 61, 94, 55, 60, 61, 39, 45, 60, 73, 62, 40, 107, 63, 68, 68, 75, 58, 48, 83, 83, 54, 92, 62, 83, 34, 61, 58, 56, 86, 70, 64, 51, 51, 80, 51, 39, 76, 49, 65, 66, 87, 60, 79, 51, 63, 50, 74, 71, 73, 57, 73, 77, 79, 54, 46, 57, 49, 80, 62, 72, 67, 59, 87, 54, 63, 64, 84, 84, 45, 60, 52, 54, 61, 83, 72, 59, 63, 40, 62, 60, 61, 64, 66, 69, 62, 66, 55, 100, 73, 58, 69, 53, 62, 75, 88, 63, 67, 57, 114, 87, 81, 69, 74, 79, 76, 56, 92, 57, 61, 69, 62, 60, 75, 72, 79, 61, 52, 90, 66, 50, 68, 72, 57, 89, 59, 64, 69, 48, 42, 69, 58, 67, 58, 56, 86, 77, 66, 100, 61, 61, 86, 56, 103, 77, 67, 58, 94, 63, 71, 50, 60, 77, 59, 67, 39, 60, 44, 90, 34, 85, 105, 68, 84, 55, 59, 65, 55, 62, 109, 63, 88, 73, 56, 79, 67, 79, 57, 55, 70, 73, 60, 108, 83, 66, 56, 56, 80, 58, 101, 53, 82, 90, 53, 60, 62, 48, 69, 82, 37, 71, 76, 81, 66, 55, 57, 56, 59, 55, 50, 67, 62, 56, 48, 93, 62, 45, 103, 60, 69, 68, 65, 63, 54, 114, 64, 60, 62, 65, 77, 123, 57, 53, 62, 98, 63, 54, 64, 51, 69, 110, 62, 64, 67, 49, 76, 57, 62, 63, 55, 72, 73, 129, 53, 60, 58, 68, 56, 53, 62, 59, 56, 56, 52, 113, 67, 64, 119, 67, 72, 42, 47, 49, 68, 52, 49, 58, 76, 41, 57, 53, 80, 47, 56, 64, 65, 85, 70, 77, 69, 84, 65, 69, 85, 76, 50, 72, 68, 82, 65, 60, 37, 57, 67, 84, 60, 59, 59, 64, 61, 65, 67, 64, 50, 55, 66, 53, 101, 61, 65, 51, 66, 54, 49, 81, 71, 56, 53, 83, 74, 69, 74, 83, 80, 49, 48, 69, 61, 80, 91, 65, 57, 91, 73, 68, 60, 77, 67, 75, 56, 70, 68, 76, 79, 65, 57, 62, 48, 71, 81, 56, 62, 160, 53, 56, 72, 47, 80, 58, 64, 80, 62, 71, 71, 50, 83, 68, 71, 60, 82, 52, 55, 72, 98, 54, 60, 101, 53, 58, 69, 77, 56, 43, 61, 59, 68, 55, 90, 71, 54, 63, 56, 67, 91, 59, 76, 44, 84, 70, 65, 63, 57, 68, 69, 66, 51, 60, 54, 63, 53, 61, 64, 99, 50, 64, 54, 87, 51, 59, 125, 75, 75, 60, 65, 72, 65, 62, 76, 59, 67, 85, 91, 65, 87, 58, 82, 58, 64, 67, 65, 78, 76, 65, 63, 56, 71, 63, 64, 61, 64, 56, 51, 71, 65, 54, 39, 73, 67, 48, 68, 62, 53, 69, 61, 69, 80, 49, 52, 58, 73, 37, 74, 66, 116, 69, 38, 70, 82, 45, 75, 55, 86, 65, 75, 64, 42, 95, 59, 47, 62, 51, 67, 56, 70, 71, 64, 61, 79, 68, 52, 50, 47, 60, 59, 59, 46, 87, 73, 71, 70, 79, 58, 68, 80, 101, 78, 107, 47, 96, 55, 72, 59, 89, 59, 65, 69, 75, 54, 82, 51, 77, 61, 63, 48, 68, 86, 57, 73, 59, 52, 57, 66, 66, 53, 56, 74, 50, 57, 61, 98, 64, 52, 75, 59, 81, 69, 58, 58, 76, 58, 47, 83, 77, 62, 47, 70, 61, 54, 70, 54, 73, 60, 61, 71, 59, 72, 51, 60, 80, 53, 85, 64, 71, 64, 53, 66, 69, 80, 75, 65, 77, 61, 79, 63, 50, 75, 54, 58, 60, 48, 62, 66, 61, 51, 68, 58, 51, 47, 58, 64, 63, 60, 75, 66, 57, 69, 86, 63, 62, 51, 71, 72, 74, 130, 73, 73, 67, 69, 58, 53, 82, 59, 63, 73, 43, 93, 44, 78, 63, 64, 56, 56, 54, 46, 67, 70, 64, 53, 46, 70, 51, 63, 47, 71, 58, 58, 70, 80, 51, 92, 64, 72, 50, 79, 64, 51, 63, 102, 83, 82, 71, 52, 58, 57, 48, 71, 46, 50, 94, 61, 94, 89, 73, 69, 74, 69, 63, 81, 56, 55, 60, 65, 70, 43, 59, 80, 76, 57, 61, 71, 45, 54, 66, 58, 69, 74, 60, 86, 75, 40, 74, 55, 58, 44, 53, 67, 45, 71, 67, 88, 57, 84, 66, 76, 81, 67, 73, 53, 81, 67, 69, 61, 89, 68, 57, 57, 60, 63, 76, 63, 66, 49, 74, 50, 56, 60, 54, 76, 67, 119, 127, 65, 80, 57, 67, 62, 63, 52, 72, 67, 42, 51, 49, 78, 62, 63, 74, 44, 65, 86, 51, 63, 58, 83, 70, 51, 54, 49, 64, 70, 54, 56, 59, 80, 75, 75, 72, 73, 70, 74, 68, 68, 66, 38, 54, 51, 73, 65, 61, 93, 50, 42, 74, 72, 144, 76, 78, 96, 67, 61, 69, 65, 67, 63, 69, 86, 98, 71, 44, 60, 65, 73, 60, 69, 70, 58, 81, 77, 94, 67, 53, 74, 53, 65, 71, 57, 58, 74, 55, 77, 58, 68, 59, 70, 76, 45, 51, 58, 50, 62, 84, 58, 69, 72, 55, 57, 82, 45, 53, 64, 61, 62, 76, 64, 57, 49, 112, 70, 69, 74, 70, 62, 69, 95, 63, 67, 79, 61, 54, 61, 55, 62, 63, 91, 79, 109, 76, 56, 71, 63, 50, 50, 66, 43, 68, 62, 69, 115, 56, 70, 64, 73, 57, 52, 62, 67, 38, 64, 69, 52, 70, 53, 26, 83, 69, 61, 89, 59, 49, 86, 61, 75, 64, 48, 50, 62, 63, 76, 59, 77, 66, 65, 69, 62, 61, 60, 75, 69, 61, 65, 67, 49, 48, 76, 63, 76, 59, 71, 67, 53, 56, 52, 59, 82, 72, 69, 71, 84, 97, 67, 89, 43, 74, 55, 67, 69, 46, 68, 51, 57, 47, 72, 79, 57, 64, 67, 45, 88, 57, 53, 74, 67, 65, 52, 107, 85, 57, 67, 57, 45, 74, 54, 57, 97, 52, 65, 52, 47, 71, 61, 65, 82, 80, 62, 68, 49, 50, 76, 61, 73, 51, 44, 62, 90, 69, 52, 77, 60, 69, 49, 62, 67, 72, 87, 57, 86, 82, 84, 71, 81, 62, 56, 81, 51, 66, 58, 53, 63, 77, 84, 82, 59, 86, 78, 55, 64, 95, 50, 61, 53, 35, 66, 66, 74, 59, 61, 64, 49, 71, 60, 56, 88, 46, 71, 68, 74, 57, 63, 78, 97, 47, 63, 61, 46, 61, 79, 127, 85, 62, 80, 57, 51, 69, 68, 77, 55, 74, 64, 66, 58, 53, 91, 51, 52, 48, 74, 84, 71, 77, 63, 66, 72, 45, 46, 55, 68, 71, 72, 78, 58, 62, 68, 73, 66, 59, 58, 57, 82, 54, 94, 58, 66, 67, 96, 67, 90, 61, 45, 66, 79, 69, 62, 49, 70, 93, 66, 63, 81, 60, 66, 63, 73, 43, 59, 68, 71, 66, 53, 62, 88, 80, 54, 62, 59, 59, 71, 50, 67, 46, 68, 44, 117, 59, 58, 44, 54, 88, 45, 39, 57, 55, 83, 64, 56, 78, 83, 60, 73, 63, 80, 65, 57, 54, 49, 71, 80, 75, 62, 93, 79, 70, 53, 60, 58, 74, 57, 62, 98, 78, 101, 91, 78, 64, 55, 55, 68, 58, 72, 80, 81, 52, 65, 75, 74, 75, 91, 83, 58, 57, 57, 63, 73, 71, 64, 117, 52, 46, 70, 101, 78, 61, 89, 60, 71, 66, 58, 55, 60, 113, 40, 57, 77, 64, 71, 124, 57, 65, 78, 67, 50, 67, 72, 79, 56, 104, 68, 75, 67, 49, 96, 53, 67, 72, 65, 60, 79, 82, 71, 62, 52, 68, 62, 59, 63, 51, 57, 67, 36, 71, 84, 61, 67, 53, 65, 60, 77, 55, 51, 52, 91, 72, 67, 63, 78, 56, 61, 57, 33, 58, 56, 44, 74, 55, 73, 80, 62, 52, 61, 48, 67, 57, 64, 62, 68, 58, 116, 63, 67, 122, 65, 55, 59, 56, 92, 74, 51, 40, 55, 80, 52, 52, 92, 60, 62, 58, 59, 43, 67, 57, 66, 69, 69, 80, 71, 58, 90, 84, 62, 72, 65, 54, 64, 75, 49, 64, 86, 72, 89, 49, 74, 55, 74, 65, 58, 60, 66, 66, 69, 59, 58, 52, 66, 52, 70, 47, 70, 56, 79, 60, 54, 67, 60, 57, 60, 53, 67, 89, 66, 63, 70, 63, 87, 53, 66, 54, 63, 73, 66, 48, 70, 69, 68, 46, 67, 74, 58, 64, 83, 84, 73, 61, 66, 78, 87, 55, 74, 70, 93, 56, 56, 57, 74, 73, 63, 58, 151, 64, 43, 45, 57, 60, 28, 74, 74, 63, 96, 82, 67, 74, 81, 52, 50, 70, 51, 109, 69, 68, 59, 88, 66, 61, 95, 65, 90, 63, 58, 40, 62, 74, 54, 48, 74, 57, 81, 65, 78, 74, 61, 62, 57, 43, 55, 61, 58, 63, 72, 64, 55, 92, 58, 61, 48, 86, 81, 121, 58, 69, 67, 51, 43, 71, 48, 70, 74, 62, 64, 50, 95, 63, 50, 71, 59, 53, 74, 68, 75, 57, 62, 68, 57, 63, 61, 67, 60, 76, 38, 81, 60, 74, 70, 70, 82, 52, 57, 59, 65, 54, 41, 52, 53, 46, 65, 69, 55, 68, 57, 72, 70, 75, 55, 74, 61, 78, 67, 60, 60, 81, 59, 68, 63, 84, 75, 71, 89, 66, 65, 51, 59, 58, 42, 56, 71, 69, 61, 69, 66, 61, 57, 72, 57, 39, 85, 82, 66, 50, 50, 61, 79, 45, 61, 83, 56, 54, 61, 55, 56, 83, 100, 63, 81, 75, 87, 82, 85, 64, 159, 60, 66, 64, 70, 76, 68, 60, 59, 81, 76, 74, 73, 49, 68, 59, 76, 57, 104, 68, 91, 53, 77, 61, 49, 70, 55, 57, 67, 64, 76, 80, 68, 58, 56, 81, 81, 88, 100, 70, 60, 93, 52, 38, 98, 67, 78, 65, 56, 68, 66, 46, 97, 69, 73, 67, 80, 73, 56, 65, 65, 58, 54, 78, 90, 53, 79, 83, 51, 47, 49, 61, 59, 62, 66, 67, 69, 60, 60, 52, 59, 60, 70, 69, 73, 74, 62, 73, 64, 92, 58, 53, 52, 66, 60, 66, 82, 69, 77, 73, 72, 61, 57, 56, 69, 66, 63, 63, 60, 85, 68, 71, 62, 53, 74, 79, 65, 68, 56, 61, 70, 68, 58, 65, 65, 53, 68, 60, 81, 58, 69, 60, 60, 49, 54, 49, 61, 63, 59, 138, 66, 61, 60, 52, 58, 65, 60, 42, 93, 69, 60, 53, 58, 71, 47, 56, 66, 59, 60, 62, 67, 66, 57, 58, 64, 58, 98, 68, 61, 79, 82, 84, 67, 61, 54, 58, 55, 53, 127, 87, 59, 92, 62, 64, 69, 61, 57, 84, 78, 74, 62, 76, 71, 58, 49, 108, 56, 55, 71, 56, 59, 56, 92, 66, 51, 69, 47, 47, 92, 49, 77, 43, 90, 78, 92, 55, 59, 63, 101, 56, 80, 72, 47, 81, 66, 51, 54, 66, 51, 52, 59, 68, 101, 129, 38, 62, 67, 56, 81, 49, 55, 73, 52, 57, 48, 95, 87, 67, 72, 55, 61, 62, 74, 83, 57, 50, 77, 86, 75, 51, 40, 66, 69, 84, 51, 70, 71, 83, 80, 58, 69, 92, 58, 66, 71, 67, 70, 58, 69, 70, 58, 61, 125, 64, 74, 69, 88, 72, 66, 60, 60, 63, 77, 57, 71, 78, 47, 59, 87, 95, 59, 43, 98, 56, 55, 79, 71, 81, 64, 80, 76, 64, 62, 55, 81, 74, 80, 64, 81, 59, 74, 74, 69, 74, 68, 49, 93, 73, 71, 75, 61, 57, 67, 57, 63, 76, 54, 58, 97, 67, 50, 58, 70, 58, 62, 81, 79, 67, 63, 63, 54, 57, 42, 55, 56, 54, 68, 84, 71, 85, 42, 58, 62, 124, 73, 52, 52, 54, 66, 54, 64, 80, 70, 52, 53, 78, 65, 67, 61, 69, 59, 64, 50, 74, 74, 63, 67, 66, 71, 65, 79, 59, 76, 68, 91, 79, 51, 58, 73, 57, 71, 69, 45, 50, 65, 97, 54, 61, 50, 60, 65, 65, 60, 66, 68, 58, 76, 82, 77, 75, 66, 62, 72, 73, 70, 69, 46, 44, 59, 68, 62, 64, 59, 52, 65, 57, 98, 65, 52, 62, 73, 56, 56, 57, 35, 62, 61, 73, 59, 61, 81, 46, 73, 79, 54, 79, 66, 61, 79, 80, 69, 64, 59, 47, 49, 75, 64, 80, 54, 65, 49, 66, 62, 61, 58, 73, 78, 50, 49, 64, 51, 73, 81, 56, 48, 56, 62, 62, 74, 76, 85, 54, 48, 84, 70, 58, 72, 85, 64, 62, 85, 58, 58, 83, 50, 70, 87, 56, 72, 65, 73, 63, 61, 92, 61, 62, 54, 43, 68, 74, 65, 48, 70, 65, 59, 66, 68, 94, 83, 63, 111, 96, 65, 61, 53, 55, 69, 88, 59, 59, 63, 51, 107, 87, 81, 71, 88, 61, 54, 81, 65, 83, 90, 45, 61, 72, 66, 70, 69, 71, 56, 80, 69, 77, 87, 72, 78, 76, 66, 56, 55, 42, 47, 55, 80, 68, 63, 81, 64, 77, 47, 62, 54, 61, 45, 60, 81, 64, 55, 41, 50, 98, 62, 76, 59, 82, 62, 45, 52, 61, 44, 61, 61, 70, 69, 64, 84, 63, 68, 86, 92, 101, 66, 68, 60, 81, 58, 55, 52, 51, 38, 76, 68, 75, 58, 40, 85, 85, 56, 68, 49, 82, 73, 63, 68, 63, 65, 58, 122, 61, 62, 47, 54, 51, 62, 86, 52, 66, 58, 82, 77, 82, 52, 82, 52, 82, 69, 80, 58, 67, 58, 85, 51, 62, 61, 65, 93, 60, 59, 82, 79, 61, 73, 74, 45, 49, 76, 53, 67, 73, 56, 55, 58, 59, 62, 73, 58, 64, 73, 65, 75, 74, 51, 73, 73, 86, 76, 49, 60, 82, 76, 75, 88, 56, 92, 54, 61, 79, 117, 71, 65, 59, 103, 77, 70, 54, 92, 65, 58, 68, 42, 72, 51, 62, 46, 48, 50, 60, 59, 74, 54, 46, 40, 95, 62, 83, 75, 81, 48, 62, 65, 85, 63, 81, 61, 55, 68, 61, 68, 52, 83, 116, 78, 132, 67, 87, 74, 63, 63, 50, 78, 50, 44, 37, 58, 65, 54, 71, 78, 54, 57, 54, 73, 60, 90, 67, 45, 56, 86, 65, 100, 66, 49, 62, 78, 34, 69, 87, 88, 61, 71, 44, 79, 56, 69, 54, 66, 83, 59, 53, 55, 69, 60, 65, 73, 92, 54, 94, 67, 60, 57, 56, 72, 67, 76, 104, 102, 49, 61, 70, 70, 59, 63, 72, 69, 69, 59, 70, 90, 71, 61, 73, 80, 93, 53, 63, 63, 51, 56, 51, 57, 59, 67, 93, 59, 71, 82, 69, 62, 53, 72, 65, 55, 80, 70, 61, 63, 80, 52, 57, 53, 98, 65, 64, 90, 57, 62, 75, 63, 69, 43, 80, 49, 88, 75, 43, 45, 70, 53, 71, 60, 54, 75, 56, 66, 60, 49, 61, 58, 61, 44, 64, 65, 59, 56, 62, 44, 78, 79, 82, 72, 54, 64, 46, 66, 64, 56, 67, 60, 48, 107, 52, 74, 83, 66, 60, 118, 67, 77, 62, 47, 97, 76, 61, 90, 83, 81, 69, 62, 50, 45, 61, 58, 71, 68, 70, 60, 61, 30, 40, 91, 66, 81, 64, 50, 124, 62, 70, 38, 67, 66, 53, 87, 41, 34, 70, 69, 72, 55, 94, 54, 55, 84, 39, 59, 54, 66, 58, 64, 54, 60, 87, 55, 58, 63, 74, 58, 53, 56, 68, 60, 95, 85, 56, 46, 72, 67, 40, 70, 54, 71, 68, 76, 50, 65, 51, 75, 53, 50, 91, 63, 55, 72, 91, 48, 90, 54, 70, 60, 70, 39, 81, 63, 51, 85, 73, 128, 59, 87, 53, 99, 88, 88, 71, 58, 70, 68, 70, 68, 53, 42, 80, 67, 71, 68, 66, 67, 56, 57, 57, 54, 52, 94, 68, 60, 66, 52, 59, 70, 40, 66, 95, 64, 69, 55, 74, 57, 75, 79, 64, 58, 64, 62, 52, 55, 61, 54, 62, 96, 52, 72, 48, 46, 45, 43, 56, 61, 76, 70, 70, 67, 61, 53, 84, 57, 60, 87, 67, 74, 50, 77, 112, 78, 62, 88, 57, 68, 57, 77, 37, 49, 76, 71, 82, 81, 62, 75, 61, 44, 53, 74, 45, 78, 54, 53, 77, 70, 73, 55, 53, 54, 60, 83, 64, 61, 57, 65, 47, 81, 69, 88, 72, 82, 50, 70, 66, 58, 78, 60, 87, 85, 83, 53, 60, 65, 106, 63, 58, 64, 48, 35, 87, 49, 65, 75, 65, 49, 51, 56, 84, 50, 65, 72, 55, 59, 57, 61, 67, 93, 60, 58, 68, 54, 76, 57, 71, 65, 60, 54, 77, 79, 88, 79, 53, 46, 49, 63, 58, 67, 31, 63, 58, 59, 63, 93, 53, 41, 51, 58, 60, 64, 66, 70, 84, 61, 83, 40, 51, 63, 48, 54, 115, 60, 54, 42, 94, 62, 39, 61, 58, 69, 70, 72, 71, 47, 61, 44, 75, 78, 62, 44, 65, 73, 59, 55, 71, 83, 45, 101, 68, 71, 65, 72, 67, 72, 55, 69, 68, 67, 86, 74, 122, 51, 59, 43, 38, 75, 68, 56, 39, 67, 70, 97, 62, 94, 64, 59, 56, 60, 67, 89, 61, 63, 68, 107, 54, 67, 47, 53, 68, 72, 61, 53, 47, 42, 64, 69, 57, 79, 60, 50, 36, 62, 74, 95, 73, 55, 51, 71, 53, 71, 89, 79, 61, 64, 81, 60, 50, 86, 63, 89, 75, 61, 70, 69, 65, 70, 61, 115, 78, 69, 73, 101, 72, 64, 75, 50, 78, 57, 63, 66, 51, 71, 39, 75, 56, 54, 110, 79, 87, 50, 69, 74, 88, 59, 63, 57, 55, 54, 44, 61, 66, 68, 68, 65, 56, 61, 48, 63, 65, 88, 78, 84, 44, 60, 73, 56, 65, 45, 66, 53, 82, 48, 69, 53, 73, 88, 52, 77, 77, 63, 72, 55, 56, 67, 76, 50, 65, 68, 74, 46, 59, 59, 84, 65, 92, 66, 63, 108, 52, 89, 71, 64, 64, 62, 66, 45, 38, 57, 64, 55, 83, 71, 72, 57, 71, 55, 62, 54, 64, 74, 72, 58, 61, 67, 54, 78, 51, 118, 109, 45, 78, 63, 86, 78, 65, 65, 62, 57, 105, 84, 62, 79, 74, 83, 70, 58, 60, 65, 57, 55, 33, 55, 56, 54, 79, 55, 71, 60, 57, 57, 77, 71, 65, 65, 67, 72, 65, 40, 95, 43, 61, 53, 71, 66, 72, 69, 44, 78, 77, 44, 64, 81, 57, 76, 70, 85, 90, 58, 62, 69, 62, 61, 83, 79, 55, 56, 67, 73, 77, 76, 55, 74, 59, 80, 70, 70, 61, 78, 61, 66, 62, 66, 47, 90, 46, 53, 68, 48, 59, 60, 75, 70, 59, 60, 54, 61, 56, 43, 34, 50, 57, 78, 61, 62, 70, 56, 75, 56, 53, 96, 79, 52, 54, 52, 77, 38, 63, 59, 58, 51, 81, 72, 77, 46, 57, 43, 66, 72, 75, 52, 68, 64, 50, 72, 48, 75, 77, 69, 67, 76, 75, 75, 75, 61, 69, 56, 77, 63, 95, 61, 83, 60, 53, 63, 61, 59, 83, 65, 85, 56, 55, 61, 60, 52, 109, 68, 86, 56, 68, 76, 52, 84, 113, 42, 85, 57, 45, 89, 80, 81, 82, 72, 67, 68, 52, 62, 58, 58, 73, 56, 63, 80, 58, 65, 63, 93, 76, 89, 82, 57, 48, 49, 66, 60, 67, 73, 80, 66, 78, 59, 76, 61, 74, 61, 70, 70, 64, 67, 84, 62, 69, 66, 77, 56, 60, 60, 64, 51, 58, 82, 66, 53, 72, 72, 64, 60, 101, 53, 59, 61, 60, 50, 55, 64, 70, 53, 52, 47, 66, 50, 71, 56, 62, 53, 60, 69, 56, 50, 71, 59, 65, 39, 62, 68, 84, 100, 80, 77, 71, 52, 59, 57, 67, 56, 71, 55, 66, 65, 56, 85, 88, 63, 57, 84, 71, 83, 66, 84, 62, 52, 52, 49, 67, 63, 64, 66, 65, 61, 72, 69, 72, 52, 60, 63, 76, 82, 53, 56, 61, 56, 50, 84, 66, 89, 56, 56, 54, 54, 86, 65, 63, 118, 74, 62, 84, 89, 48, 85, 52, 79, 88, 48, 70, 59, 52, 59, 49, 74, 63, 80, 60, 73, 63, 64, 70, 62, 54, 54, 61, 49, 72, 58, 53, 72, 49, 70, 65, 58, 74, 61, 58, 68, 60, 75, 64, 56, 59, 68, 72, 62, 55, 57, 133, 51, 72, 62, 73, 62, 60, 65, 64, 68, 75, 67, 65, 66, 62, 69, 68, 67, 54, 52, 58, 49, 64, 56, 73, 50, 96, 95, 69, 64, 55, 57, 54, 61, 63, 53, 65, 55, 66, 65, 47, 61, 46, 60, 52, 51, 61, 92, 65, 44, 78, 53, 69, 64, 70, 55, 62, 60, 63, 65, 58, 50, 75, 90, 53, 53, 61, 78, 58, 68, 61, 64, 78, 81, 62, 56, 70, 59, 54, 83, 64, 59, 46, 55, 57, 78, 77, 42, 52, 91, 117, 77, 70, 79, 76, 80, 54, 78, 62, 66, 69, 60, 61, 57, 65, 64, 72, 45, 70, 55, 67, 72, 51, 65, 45, 72, 65, 70, 68, 38, 83, 68, 51, 72, 51, 66, 53, 63, 46, 70, 54, 71, 66, 77, 61, 66, 70, 59, 69, 48, 57, 67, 73, 75, 71, 66, 133, 69, 64, 68, 90, 57, 51, 58, 54, 58, 57, 56, 55, 71, 64, 70, 69, 61, 46, 70, 61, 64, 54, 61, 60, 58, 63, 104, 73, 64, 75, 53, 56, 56, 67, 83, 69, 46, 48, 53, 64, 68, 63, 44, 78, 57, 45, 54, 66, 52, 65, 65, 64, 62, 62, 54, 96, 55, 62, 69, 54, 53, 55, 50, 52, 76, 67, 65, 59, 76, 59, 76, 95, 72, 61, 72, 56, 67, 71, 58, 47, 64, 62, 55, 67, 52, 67, 96, 64, 62, 69, 62, 47, 52, 58, 63, 70, 72, 53, 61, 85, 44, 69, 54, 62, 62, 89, 65, 113, 50, 58, 60, 59, 50, 85, 46, 69, 83, 50, 51, 57, 57, 59, 53, 63, 59, 56, 49, 58, 53, 57, 87, 48, 58, 64, 57, 71, 64, 70, 70, 49, 68, 65, 69, 71, 55, 73, 63, 72, 46, 50, 70, 56, 53, 63, 79, 71, 53, 82, 70, 54, 56, 51, 57, 56, 68, 64, 52, 43, 46, 51, 61, 67, 60, 58, 74, 45, 65, 56, 58, 52, 63, 60, 72, 43, 60, 56, 61, 87, 72, 59, 63, 64, 61, 54, 46, 93, 78, 52, 95, 67, 65, 50, 88, 49, 101, 58, 55, 81, 59, 50, 78, 61, 55, 63, 60, 62, 63, 63, 85, 71, 84, 91, 74, 70, 62, 78, 62, 57, 61, 83, 69, 51, 99, 66, 92, 52, 64, 66, 52, 80, 70, 58, 85, 66, 49, 70, 65, 47, 52, 93, 54, 56, 76, 57, 67, 109, 79, 80, 79, 53, 61, 82, 47, 83, 58, 90, 97, 60, 55, 71, 58, 65, 59, 77, 90, 52, 51, 87, 87, 58, 65, 67, 69, 56, 55, 68, 60, 55, 68, 57, 46, 66, 71, 74, 54, 45, 64, 80, 91, 90, 79, 61, 86, 50, 58, 50, 79, 80, 54, 53, 66, 81, 50, 57, 50, 53, 63, 64, 99, 73, 42, 59, 89, 58, 81, 75, 44, 60, 70, 52, 45, 58, 56, 62, 61, 69, 82, 78, 67, 60, 52, 67, 68, 58, 56, 45, 55, 62, 58, 78, 52, 81, 109, 53, 50, 72, 62, 95, 73, 63, 71, 47, 70, 66, 59, 62, 96, 82, 55, 62, 63, 50, 53, 76, 58, 76, 62, 84, 114, 56, 47, 59, 71, 62, 66, 72, 69, 65, 42, 47, 56, 85, 51, 63, 62, 85, 72, 59, 64, 90, 59, 64, 47, 71, 113, 77, 71, 70, 60, 55, 70, 48, 71, 64, 46, 59, 69, 55, 61, 76, 51, 46, 63, 48, 63, 65, 70, 83, 67, 64, 87, 91, 91, 54, 59, 43, 130, 57, 55, 71, 60, 69, 67, 65, 59, 58, 94, 65, 59, 40, 67, 64, 64, 63, 57, 65, 66, 67, 54, 81, 51, 69, 54, 65, 69, 59, 43, 61, 66, 67, 56, 55, 61, 55, 62, 56, 67, 71, 121, 59, 50, 65, 76, 72, 59, 60, 82, 65, 70, 71, 69, 54, 50, 51, 79, 73, 50, 43, 65, 69, 55, 101, 65, 60, 48, 48, 67, 59, 66, 73, 74, 43, 73, 73, 57, 63, 64, 88, 65, 62, 86, 60, 68, 57, 57, 64, 73, 62, 71, 65, 64, 61, 66, 67, 64, 64, 99, 66, 63, 55, 64, 73, 57, 67, 66, 66, 58, 74, 100, 77, 86, 69, 56, 64, 63, 61, 95, 94, 94, 68, 70, 63, 63, 62, 88, 55, 75, 54, 89, 66, 51, 89, 57, 51, 67, 157, 65, 80, 78, 62, 63, 66, 88, 54, 62, 102, 72, 55, 57, 60, 68, 76, 73, 50, 54, 59, 55, 53, 80, 58, 66, 55, 71, 59, 65, 71, 65, 55, 58, 84, 79, 62, 75, 63, 65, 59, 57, 110, 81, 52, 54, 67, 78, 63, 52, 63, 62, 72, 64, 75, 51, 70, 91, 66, 71, 61, 78, 69, 73, 60, 61, 64, 65, 57, 75, 67, 62, 60, 70, 69, 65, 76, 57, 72, 86, 63, 109, 51, 65, 66, 64, 65, 52, 61, 66, 74, 54, 58, 56, 49, 78, 49, 69, 70, 62, 80, 51, 59, 53, 76, 62, 91, 55, 36, 55, 64, 78, 60, 73, 62, 68, 69, 71, 88, 94, 72, 66, 77, 84, 61, 76, 69, 73, 59, 77, 48, 71, 52, 55, 99, 70, 81, 61, 53, 52, 61, 58, 58, 81, 83, 62, 61, 60, 47, 68, 62, 65, 52, 82, 74, 73, 72, 51, 58, 45, 71, 80, 73, 56, 75, 70, 56, 74, 78, 74, 50, 58, 74, 64, 75, 36, 81, 69, 60, 54, 69, 74, 71, 85, 55, 85, 68, 54, 108, 66, 59, 55, 85, 50, 67, 70, 65, 63, 68, 63, 84, 42, 78, 47, 66, 58, 67, 52, 60, 61, 69, 56, 65, 90, 71, 54, 52, 60, 64, 63, 60, 99, 65, 65, 57, 55, 55, 62, 67, 51, 75, 81, 53, 70, 64, 63, 45, 69, 53, 50, 73, 60, 46, 77, 50, 55, 61, 73, 44, 47, 49, 45, 75, 80, 66, 56, 71, 63, 65, 60, 59, 60, 75, 61, 94, 65, 105, 69, 68, 73, 52, 56, 52, 68, 65, 60, 67, 60, 68, 59, 81, 50, 56, 70, 74, 54, 67, 73, 61, 52, 43, 49, 70, 75, 55, 63, 45, 58, 98, 61, 63, 93, 64, 58, 52, 58, 58, 56, 79, 59, 69, 86, 59, 66, 50, 83, 56, 71, 89, 66, 71, 58, 58, 87, 69, 106, 68, 66, 81, 57, 53, 67, 93, 54, 61, 62, 53, 51, 72, 71, 53, 55, 62, 89, 78, 67, 73, 105, 63, 60, 73, 90, 49, 81, 61, 65, 63, 65, 134, 66, 59, 55, 85, 66, 93, 61, 79, 63, 72, 73, 62, 51, 67, 76, 66, 86, 47, 59, 52, 68, 65, 52, 62, 61, 59, 71, 66, 78, 48, 83, 69, 61, 68, 82, 65, 64, 71, 84, 50, 66, 55, 74, 64, 59, 46, 70, 56, 68, 71, 82, 68, 64, 72, 43, 54, 61, 63, 56, 54, 56, 75, 63, 65, 63, 63, 83, 80, 54, 56, 48, 64, 62, 70, 65, 54, 71, 66, 52, 65, 75, 52, 62, 51, 59, 81, 55, 69, 72, 50, 58, 100, 62, 71, 61, 65, 67, 42, 68, 78, 62, 72, 44, 60, 116, 81, 77, 55, 60, 65, 67, 60, 65, 67, 58, 51, 45, 80, 72, 59, 56, 67, 83, 89, 59, 68, 71, 92, 52, 64, 46, 68, 70, 61, 55, 47, 71, 65, 82, 74, 71, 69, 58, 66, 38, 68, 77, 59, 56, 78, 49, 51, 54, 38, 69, 71, 81, 64, 83, 58, 56, 46, 61, 71, 53, 95, 61, 72, 74, 85, 60, 64, 79, 67, 61, 64, 84, 67, 56, 81, 80, 74, 53, 57, 57, 95, 72, 37, 68, 83, 83, 63, 58, 50, 71, 57, 70, 46, 46, 72, 65, 63, 52, 65, 60, 93, 47, 76, 64, 49, 75, 85, 53, 46, 66, 58, 63, 59, 108, 55, 59, 55, 68, 64, 66, 72, 74, 87, 54, 53, 66, 79, 77, 63, 77, 79, 38, 60, 60, 66, 68, 73, 78, 76, 51, 70, 57, 65, 74, 70, 89, 68, 66, 56, 107, 89, 40, 61, 73, 59, 63, 59, 59, 111, 73, 77, 93, 78, 65, 66, 64, 45, 65, 73, 54, 70, 87, 64, 58, 68, 58, 56, 89, 53, 64, 61, 52, 76, 92, 89, 72, 61, 40, 57, 68, 60, 64, 79, 69, 76, 63, 63, 30, 44, 87, 59, 65, 56, 62, 67, 78, 54, 64, 65, 57, 66, 57, 57, 58, 84, 65, 76, 51, 64, 71, 49, 76, 61, 60, 58, 68, 61, 69, 60, 67, 94, 60, 49, 69, 92, 66, 86, 73, 82, 79, 45, 69, 66, 46, 87, 54, 57, 66, 76, 56, 80, 66, 43, 59, 61, 74, 76, 67, 82, 64, 81, 73, 69, 67, 76, 56, 76, 63, 55, 46, 76, 43, 68, 56, 68, 35, 59, 53, 82, 76, 56, 69, 88, 61, 62, 54, 84, 84, 65, 59, 79, 63, 83, 64, 57, 62, 52, 52, 96, 41, 70, 52, 74, 49, 83, 47, 74, 48, 64, 76, 71, 65, 70, 44, 76, 106, 73, 65, 73, 73, 62, 43, 66, 57, 76, 58, 63, 73, 71, 57, 77, 54, 67, 51, 79, 66, 88, 79, 47, 64, 61, 59, 60, 61, 57, 73, 55, 88, 68, 52, 63, 64, 47, 75, 69, 73, 67, 67, 70, 61, 105, 31, 57, 54, 54, 55, 84, 74, 66, 66, 78, 76, 55, 52, 45, 44, 63, 51, 61, 79, 45, 66, 40, 85, 90, 98, 97, 65, 51, 62, 63, 63, 51, 67, 59, 56, 63, 65, 80, 70, 73, 64, 58, 47, 89, 60, 48, 64, 63, 55, 49, 89, 66, 68, 64, 66, 52, 67, 80, 70, 68, 78, 66, 78, 81, 72, 60, 78, 93, 61, 65, 69, 57, 50, 83, 74, 41, 81, 60, 71, 65, 66, 36, 58, 85, 77, 45, 66, 71, 43, 63, 55, 57, 74, 80, 109, 64, 63, 47, 54, 67, 74, 61, 69, 66, 67, 83, 54, 56, 66, 99, 77, 56, 55, 57, 45, 76, 71, 110, 65, 64, 62, 55, 56, 52, 48, 76, 50, 74, 49, 93, 75, 55, 59, 53, 57, 63, 55, 55, 59, 66, 75, 68, 75, 57, 53, 61, 61, 70, 84, 48, 51, 60, 68, 66, 84, 63, 65, 104, 49, 63, 77, 77, 70, 73, 64, 73, 58, 68, 72, 54, 47, 35, 69, 58, 51, 57, 90, 82, 74, 54, 75, 76, 53, 59, 51, 59, 50, 118, 59, 80, 75, 57, 59, 77, 62, 77, 70, 57, 76, 79, 42, 51, 62, 78, 61, 65, 53, 72, 59, 59, 59, 84, 65, 62, 65, 49, 60, 69, 67, 94, 55, 61, 67, 66, 60, 58, 66, 58, 51, 78, 68, 98, 56, 77, 57, 55, 74, 81, 64, 47, 60, 64, 79, 58, 65, 44, 49, 87, 88, 49, 66, 72, 68, 68, 78, 48, 77, 55, 64, 58, 73, 67, 67, 56, 49, 71, 61, 55, 79, 89, 102, 56, 81, 56, 90, 67, 63, 69, 58, 54, 61, 57, 86, 48, 61, 64, 67, 58, 74, 66, 70, 61, 58, 58, 65, 56, 59, 48, 52, 49, 110, 82, 55, 56, 72, 60, 68, 63, 97, 69, 55, 79, 52, 41, 68, 51, 79, 64, 59, 61, 73, 69, 65, 58, 101, 78, 49, 62, 69, 53, 65, 63, 64, 62, 54, 73, 56, 56, 68, 57, 57, 38, 82, 55, 68, 55, 46, 67, 59, 59, 70, 82, 64, 73, 70, 95, 61, 55, 53, 64, 61, 67, 67, 65, 75, 63, 78, 55, 41, 62, 58, 55, 63, 59, 56, 56, 77, 69, 57, 59, 69, 55, 62, 59, 57, 61, 50, 58, 89, 67, 73, 52, 58, 73, 57, 64, 62, 46, 55, 83, 72, 76, 85, 76, 67, 67, 70, 88, 89, 74, 72, 66, 50, 85, 116, 63, 77, 55, 51, 53, 70, 58, 63, 52, 66, 63, 78, 50, 69, 53, 61, 67, 63, 108, 59, 60, 64, 71, 57, 102, 66, 82, 58, 70, 59, 70, 57, 69, 105, 65, 63, 68, 81, 78, 47, 60, 68, 78, 75, 69, 72, 53, 58, 59, 66, 59, 58, 58, 66, 61, 58, 94, 52, 57, 69, 50, 66, 57, 80, 55, 67, 48, 56, 109, 57, 70, 68, 61, 76, 66, 60, 131, 75, 71, 54, 60, 63, 81, 64, 68, 67, 63, 59, 81, 56, 57, 74, 55, 85, 82, 79, 67, 66, 65, 46, 67, 68, 47, 66, 78, 48, 66, 73, 61, 71, 48, 63, 65, 60, 67, 64, 59, 72, 64, 62, 68, 38, 57, 43, 77, 54, 62, 56, 58, 61, 58, 82, 52, 69, 72, 62, 70, 65, 75, 52, 67, 75, 42, 75, 62, 65, 59, 60, 93, 56, 56, 60, 63, 59, 57, 48, 96, 64, 52, 58, 56, 65, 53, 63, 65, 80, 65, 71, 61, 49, 75, 66, 65, 65, 68, 76, 61, 70, 71, 79, 64, 68, 64, 65, 64, 62, 112, 62, 61, 66, 68, 66, 72, 76, 72, 62, 101, 70, 61, 57, 95, 62, 54, 60, 80, 74, 66, 55, 63, 76, 59, 55, 72, 59, 102, 72, 37, 54, 67, 76, 63, 41, 56, 67, 46, 83, 63, 60, 92, 75, 56, 55, 55, 62, 106, 74, 75, 58, 57, 68, 73, 60, 77, 109, 64, 59, 70, 66, 81, 55, 58, 57, 69, 65, 57, 51, 68, 125, 67, 74, 86, 69, 59, 65, 60, 62, 64, 53, 60, 101, 67, 71, 51, 72, 81, 51, 61, 58, 55, 55, 89, 54, 63, 83, 62, 96, 59, 72, 59, 57, 70, 56, 56, 66, 78, 76, 55, 56, 49, 56, 60, 69, 58, 55, 70, 71, 60, 61, 70, 38, 79, 64, 61, 51, 59, 71, 86, 77, 72, 71, 67, 100, 96, 70, 79, 82, 56, 64, 80, 71, 44, 79, 58, 52, 65, 50, 61, 72, 68, 58, 72, 62, 65, 49, 64, 60, 60, 59, 79, 79, 76, 66, 55, 67, 62, 54, 108, 67, 70, 87, 53, 61, 54, 64, 96, 50, 59, 68, 71, 55, 48, 56, 76, 60, 48, 53, 47, 44, 69, 64, 90, 62, 49, 57, 55, 57, 90, 62, 57, 77, 79, 63, 68, 60, 59, 73, 75, 71, 79, 57, 69, 56, 57, 63, 65, 61, 61, 64, 81, 66, 71, 58, 58, 61, 60, 62, 95, 61, 139, 81, 75, 57, 60, 68, 46, 59, 54, 60, 82, 91, 55, 55, 49, 56, 68, 66, 65, 49, 63, 61, 64, 72, 60, 72, 56, 51, 68, 98, 63, 100, 69, 66, 63, 60, 58, 80, 61, 44, 72, 65, 70, 75, 58, 55, 101, 77, 69, 88, 101, 65, 62, 68, 172, 68, 48, 70, 60, 93, 67, 54, 72, 64, 51, 79, 146, 89, 61, 104, 58, 66, 46, 61, 74, 97, 66, 59, 63, 69, 61, 53, 64, 97, 105, 56, 69, 86, 73, 65, 51, 49, 58, 46, 51, 50, 64, 52, 44, 63, 76, 86, 69, 59, 65, 63, 59, 56, 54, 63, 47, 80, 77, 69, 78, 55, 51, 76, 57, 55, 60, 65, 75, 68, 57, 63, 94, 56, 71, 62, 65, 47, 71, 67, 94, 54, 63, 67, 48, 73, 51, 55, 85, 66, 54, 64, 67, 67, 45, 72, 57, 61, 60, 65, 71, 70, 57, 42, 65, 67, 63, 53, 63, 63, 62, 65, 53, 53, 68, 66, 60, 63, 89, 60, 64, 61, 79, 72, 62, 53, 66, 103, 63, 102, 60, 56, 98, 47, 68, 50, 94, 59, 86, 57, 65, 81, 58, 54, 61, 56, 77, 77, 52, 59, 60, 65, 66, 70, 61, 64, 54, 51, 57, 52, 74, 52, 57, 54, 65, 88, 55, 57, 61, 66, 64, 65, 39, 62, 64, 48, 93, 60, 79, 59, 62, 58, 69, 56, 64, 50, 55, 105, 55, 63, 58, 62, 53, 47, 59, 56, 68, 60, 63, 67, 68, 79, 58, 67, 61, 67, 63, 51, 64, 58, 113, 57, 52, 66, 81, 90, 93, 39, 69, 61, 60, 56, 86, 84, 60, 59, 50, 74, 64, 57, 57, 58, 101, 99, 88, 78, 50, 78, 57, 73, 59, 57, 58, 94, 61, 60, 61, 63, 55, 60, 41, 65, 88, 72, 49, 61, 76, 71, 71, 65, 69, 68, 64, 60, 63, 73, 60, 49, 69, 70, 87, 61, 84, 78, 58, 57, 74, 72, 44, 58, 113, 59, 61, 66, 90, 72, 62, 71, 70, 77, 77, 76, 60, 60, 41, 57, 56, 55, 51, 58, 77, 58, 56, 75, 56, 58, 68, 52, 57, 77, 51, 59, 56, 53, 68, 80, 64, 106, 69, 80, 63, 55, 79, 66, 47, 61, 90, 62, 57, 58, 65, 64, 55, 59, 56, 68, 77, 58, 61, 59, 61, 51, 54, 74, 54, 60, 62, 64, 65, 67, 72, 93, 93, 60, 55, 47, 75, 56, 75, 61, 62, 81, 77, 62, 86, 93, 56, 44, 65, 54, 73, 69, 81, 58, 68, 63, 56, 73, 67, 96, 80, 62, 77, 76, 78, 53, 75, 81, 87, 70, 50, 65, 51, 60, 74, 74, 96, 96, 70, 53, 62, 83, 53, 69, 62, 73, 67, 48, 93, 36, 73, 71, 68, 69, 67, 44, 88, 63, 66, 54, 73, 56, 66, 64, 49, 71, 68, 66, 57, 52, 54, 70, 72, 74, 80, 54, 64, 55, 53, 62, 55, 47, 65, 89, 54, 76, 58, 56, 76, 111, 59, 57, 75, 62, 73, 60, 99, 95, 53, 73, 101, 68, 55, 66, 39, 50, 62, 53, 63, 87, 68, 103, 45, 70, 91, 87, 100, 71, 53, 60, 60, 59, 60, 76, 59, 103, 55, 42, 66, 65, 75, 55, 59, 81, 64, 80, 63, 46, 79, 56, 64, 83, 62, 77, 73, 73, 81, 57, 62, 61, 41, 67, 110, 54, 59, 48, 46, 52, 56, 60, 69, 73, 46, 56, 80, 70, 73, 59, 74, 68, 58, 69, 77, 58, 71, 120, 107, 62, 65, 59, 40, 71, 35, 64, 36, 105, 72, 87, 64, 52, 79, 54, 44, 78, 111, 64, 64, 60, 60, 53, 73, 68, 80, 66, 63, 51, 67, 68, 63, 58, 59, 73, 63, 69, 66, 105, 82, 73, 48, 55, 69, 48, 43, 55, 70, 37, 54, 62, 56, 55, 79, 68, 77, 67, 77, 57, 90, 55, 85, 96, 44, 62, 45, 65, 61, 57, 70, 48, 59, 74, 80, 70, 62, 67, 55, 60, 60, 60, 66, 41, 53, 61, 68, 43, 64, 78, 59, 57, 64, 58, 61, 50, 85, 57, 66, 65, 84, 79, 58, 54, 69, 69, 58, 81, 70, 90, 74, 60, 58, 82, 56, 44, 57, 68, 70, 45, 81, 59, 66, 55, 61, 71, 70, 55, 71, 67, 66, 48, 57, 65, 68, 106, 79, 45, 81, 76, 63, 62, 98, 74, 64, 56, 68, 63, 46, 78, 60, 73, 60, 55, 42, 60, 52, 66, 58, 55, 86, 89, 57, 51, 49, 56, 76, 71, 60, 64, 62, 75, 67, 76, 93, 62, 93, 55, 67, 60, 51, 61, 84, 90, 67, 67, 30, 71, 69, 69, 85, 47, 58, 70, 35, 53, 41, 86, 60, 65, 59, 62, 53, 79, 63, 68, 54, 59, 63, 89, 64, 63, 59, 62, 67, 59, 91, 67, 65, 55, 64, 76, 67, 95, 64, 69, 47, 71, 87, 65, 68, 46, 70, 77, 74, 102, 96, 68, 61, 89, 61, 55, 38, 70, 56, 54, 57, 54, 62, 62, 56, 61, 79, 53, 67, 60, 106, 63, 63, 59, 63, 78, 48, 97, 73, 57, 69, 75, 41, 65, 54, 123, 67, 55, 57, 136, 48, 61, 59, 67, 107, 98, 86, 83, 64, 54, 58, 72, 61, 62, 52, 68, 57, 72, 59, 62, 56, 53, 55, 72, 48, 69, 87, 63, 65, 60, 58, 73, 72, 76, 64, 49, 73, 79, 63, 54, 93, 100, 46, 50, 60, 62, 71, 65, 60, 48, 49, 65, 76, 67, 47, 83, 44, 82, 100, 60, 48, 79, 67, 57, 55, 65, 60, 64, 66, 86, 81, 81, 62, 58, 69, 62, 60, 60, 75, 50, 86, 78, 57, 55, 46, 40, 72, 49, 51, 65, 66, 98, 54, 60, 70, 64, 61, 51, 60, 42, 54, 77, 63, 62, 68, 63, 69, 61, 47, 59, 72, 79, 57, 70, 64, 66, 99, 61, 68, 67, 60, 72, 112, 47, 51, 62, 80, 92, 75, 65, 63, 81, 63, 83, 70, 66, 38, 53, 90, 110, 54, 61, 77, 66, 73, 63, 63, 66, 55, 60, 84, 49, 56, 95, 107, 61, 83, 71, 69, 88, 82, 70, 53, 65, 85, 49, 74, 58, 62, 70, 61, 91, 51, 71, 73, 50, 80, 65, 72, 84, 60, 68, 64, 56, 53, 44, 79, 65, 71, 53, 70, 55, 70, 58, 56, 74, 75, 80, 48, 68, 57, 49, 45, 79, 54, 58, 55, 82, 47, 65, 107, 91, 51, 76, 60, 69, 51, 72, 66, 95, 53, 56, 64, 90, 75, 73, 70, 56, 56, 52, 78, 70, 66, 59, 71, 45, 45, 71, 81, 75, 42, 56, 58, 62, 79, 59, 75, 70, 49, 62, 80, 69, 98, 63, 51, 66, 69, 59, 56, 65, 55, 70, 84, 58, 53, 49, 56, 50, 70, 66, 69, 87, 45, 75, 58, 48, 79, 80, 46, 53, 77, 68, 50, 77, 49, 69, 74, 65, 53, 88, 42, 68, 56, 67, 69, 59, 49, 68, 71, 58, 73, 74, 64, 76, 49, 49, 67, 51, 70, 57, 49, 52, 47, 54, 76, 57, 63, 64, 44, 71, 77, 115, 44, 59, 53, 73, 65, 57, 45, 85, 71, 44, 44, 66, 74, 70, 84, 73, 62, 62, 60, 62, 40, 55, 70, 55, 77, 64, 50, 98, 70, 52, 77, 50, 73, 66, 73, 127, 75, 123, 84, 55, 82, 60, 50, 60, 149, 64, 71, 55, 47, 67, 46, 120, 84, 95, 54, 54, 52, 63, 77, 72, 69, 86, 73, 107, 54, 34, 62, 67, 54, 72, 51, 61, 77, 64, 62, 55, 58, 48, 63, 60, 65, 79, 60, 132, 65, 86, 101, 67, 87, 58, 49, 80, 63, 65, 52, 71, 53, 55, 68, 71, 95, 76, 69, 48, 46, 70, 48, 60, 59, 60, 54, 47, 50, 57, 55, 76, 74, 68, 79, 56, 62, 80, 35, 45, 64, 55, 64, 77, 70, 60, 53, 68, 55, 78, 74, 103, 43, 102, 62, 58, 84, 50, 73, 60, 56, 76, 80, 62, 70, 92, 64, 74, 60, 52, 54, 62, 59, 74, 47, 59, 33, 48, 59, 54, 76, 57, 76, 61, 93, 62, 54, 73, 61, 66, 41, 87, 78, 54, 30, 75, 77, 85, 56, 78, 74, 83, 72, 75, 60, 46, 36, 75, 110, 51, 50, 59, 89, 70, 56, 58, 74, 65, 95, 86, 71, 93, 72, 62, 45, 70, 53, 57, 78, 82, 97, 85, 63, 85, 58, 52, 57, 70, 73, 58, 50, 60, 50, 62, 64, 64, 50, 115, 67, 67, 53, 68, 75, 54, 51, 89, 54, 101, 68, 43, 58, 50, 43, 60, 62, 96, 56, 72, 86, 51, 63, 44, 78, 77, 57, 44, 67, 72, 68, 74, 55, 95, 84, 82, 59, 68, 77, 60, 46, 56, 78, 71, 69, 57, 53, 72, 65, 72, 79, 62, 92, 66, 73, 55, 69, 79, 71, 50, 78, 52, 66, 54, 71, 56, 100, 39, 76, 42, 55, 102, 78, 67, 55, 55, 77, 60, 72, 58, 57, 76, 56, 72, 61, 47, 62, 52, 63, 76, 71, 61, 60, 55, 45, 67, 69, 68, 66, 62, 84, 62, 64, 55, 58, 66, 51, 65, 53, 64, 50, 66, 66, 52, 50, 61, 71, 67, 63, 53, 41, 57, 63, 64, 74, 70, 49, 76, 75, 76, 74, 65, 69, 58, 57, 66, 82, 45, 74, 75, 73, 64, 89, 92, 75, 44, 49, 71, 55, 57, 60, 62, 61, 48, 65, 59, 63, 64, 63, 58, 48, 45, 56, 89, 91, 59, 59, 49, 62, 76, 82, 59, 50, 57, 78, 76, 94, 67, 78, 68, 74, 90, 59, 73, 60, 80, 56, 73, 62, 67, 72, 56, 51, 68, 51, 67, 61, 71, 66, 64, 48, 73, 62, 69, 70, 72, 61, 67, 59, 56, 67, 89, 92, 55, 52, 61, 64, 53, 58, 57, 69, 60, 63, 48, 122, 67, 63, 102, 99, 67, 69, 65, 74, 66, 63, 53, 79, 68, 61, 62, 58, 44, 64, 56, 67, 65, 51, 67, 56, 65, 64, 56, 56, 61, 61, 60, 52, 55, 78, 74, 50, 112, 67, 68, 79, 69, 47, 78, 50, 91, 56, 79, 73, 87, 60, 65, 46, 138, 77, 54, 70, 61, 63, 65, 61, 63, 67, 56, 61, 90, 52, 63, 49, 66, 63, 63, 70, 61, 62, 73, 59, 66, 84, 55, 77, 64, 60, 52, 75, 62, 56, 79, 99, 60, 81, 57, 67, 58, 79, 55, 84, 41, 61, 60, 55, 60, 76, 48, 56, 74, 53, 59, 76, 58, 50, 56, 58, 76, 63, 64, 51, 67, 63, 68, 63, 75, 52, 49, 65, 57, 38, 55, 61, 100, 67, 72, 56, 81, 64, 55, 67, 60, 57, 44, 57, 68, 71, 65, 71, 73, 74, 58, 75, 78, 69, 66, 51, 66, 54, 64, 72, 61, 104, 67, 62, 55, 101, 63, 57, 44, 63, 86, 66, 56, 74, 71, 68, 74, 58, 67, 72, 54, 55, 78, 56, 62, 61, 69, 52, 61, 54, 36, 65, 48, 61, 68, 90, 99, 70, 60, 55, 46, 69, 55, 73, 44, 74, 62, 70, 67, 59, 71, 92, 63, 70, 60, 74, 70, 58, 63, 116, 57, 57, 72, 67, 66, 87, 66, 58, 62, 52, 129, 60, 66, 53, 49, 59, 58, 58, 54, 59, 65, 64, 78, 60, 75, 58, 78, 57, 53, 62, 62, 86, 55, 44, 58, 62, 58, 60, 75, 62, 56, 61, 68, 60, 75, 55, 62, 68, 52, 77, 59, 81, 60, 81, 70, 76, 70, 51, 52, 62, 60, 73, 63, 96, 60, 57, 67, 49, 58, 73, 61, 64, 58, 54, 55, 80, 65, 77, 97, 65, 48, 70, 62, 61, 65, 50, 53, 64, 67, 74, 108, 57, 55, 73, 59, 70, 55, 73, 84, 60, 57, 61, 47, 53, 71, 64, 51, 64, 53, 55, 57, 84, 97, 58, 52, 79, 65, 60, 60, 83, 65, 55, 74, 61, 68, 69, 66, 87, 86, 59, 75, 60, 66, 64, 59, 59, 70, 65, 90, 60, 75, 55, 108, 53, 82, 59, 54, 61, 59, 61, 75, 43, 123, 61, 68, 62, 81, 47, 69, 63, 52, 68, 55, 68, 50, 85, 55, 74, 51, 62, 63, 82, 76, 56, 106, 50, 61, 57, 57, 64, 74, 56, 54, 66, 63, 60, 64, 105, 60, 68, 62, 48, 66, 82, 61, 57, 89, 58, 74, 65, 74, 57, 60, 85, 47, 48, 52, 73, 69, 48, 66, 56, 53, 59, 73, 66, 77, 56, 56, 60, 61, 73, 63, 56, 74, 54, 53, 86, 48, 75, 71, 76, 66, 56, 71, 76, 64, 66, 51, 78, 64, 60, 48, 70, 75, 61, 66, 76, 73, 59, 58, 54, 68, 66, 62, 83, 52, 66, 57, 75, 87, 66, 62, 64, 78, 75, 77, 72, 63, 63, 69, 71, 67, 56, 64, 62, 83, 63, 130, 122, 57, 69, 58, 75, 59, 36, 60, 74, 61, 59, 67, 51, 81, 67, 69, 73, 72, 85, 57, 60, 105, 63, 77, 56, 64, 52, 66, 64, 40, 58, 66, 62, 63, 56, 68, 66, 61, 76, 50, 79, 72, 51, 56, 58, 43, 90, 56, 64, 74, 56, 53, 59, 60, 149, 77, 53, 53, 75, 54, 52, 50, 65, 51, 73, 64, 60, 50, 59, 106, 82, 61, 60, 68, 50, 90, 61, 130, 49, 96, 66, 72, 58, 76, 66, 60, 50, 66, 50, 59, 86, 58, 55, 35, 64, 48, 55, 53, 49, 50, 73, 72, 57, 57, 71, 66, 81, 67, 79, 64, 63, 72, 54, 63, 69, 75, 116, 77, 66, 70, 60, 65, 65, 70, 67, 69, 52, 54, 52, 57, 44, 64, 52, 76, 65, 71, 70, 64, 58, 72, 77, 59, 65, 70, 61, 65, 82, 50, 60, 66, 54, 83, 61, 82, 54, 65, 55, 86, 68, 63, 63, 61, 56, 58, 62, 58, 84, 50, 61, 77, 64, 55, 56, 58, 49, 57, 57, 61, 90, 72, 77, 110, 66, 59, 70, 87, 70, 55, 62, 50, 82, 88, 79, 71, 60, 70, 76, 76, 63, 52, 70, 62, 69, 69, 71, 68, 67, 47, 80, 58, 65, 46, 79, 107, 49, 63, 92, 69, 67, 60, 56, 152, 59, 56, 67, 52, 52, 58, 75, 67, 54, 70, 66, 82, 63, 64, 70, 82, 83, 73, 56, 60, 58, 83, 75, 56, 68, 67, 52, 54, 53, 66, 57, 68, 66, 52, 55, 74, 63, 63, 58, 57, 64, 59, 62, 72, 44, 45, 68, 61, 49, 62, 109, 79, 50, 45, 53, 47, 60, 75, 61, 54, 102, 54, 47, 68, 53, 83, 63, 73, 70, 73, 60, 70, 51, 68, 70, 88, 68, 39, 50, 61, 62, 73, 52, 51, 80, 63, 59, 73, 64, 99, 53, 62, 56, 78, 66, 60, 79, 53, 67, 51, 60, 48, 63, 57, 60, 74, 72, 66, 60, 75, 71, 57, 58, 67, 61, 52, 72, 58, 61, 66, 71, 75, 72, 58, 88, 53, 76, 61, 72, 60, 49, 60, 81, 68, 63, 50, 73, 88, 62, 67, 77, 54, 73, 64, 83, 46, 60, 76, 80, 53, 73, 97, 59, 84, 79, 69, 77, 64, 64, 66, 59, 62, 50, 64, 93, 47, 64, 49, 76, 68, 62, 65, 53, 63, 57, 59, 56, 72, 55, 51, 76, 50, 69, 77, 105, 58, 69, 66, 51, 56, 55, 60, 60, 56, 72, 54, 47, 52, 60, 57, 59, 62, 76, 81, 70, 77, 69, 86, 49, 55, 86, 73, 72, 51, 90, 69, 76, 56, 62, 71, 39, 64, 72, 55, 70, 54, 59, 55, 49, 77, 73, 74, 55, 45, 55, 81, 70, 62, 47, 87, 117, 73, 65, 69, 65, 55, 82, 56, 53, 53, 64, 60, 62, 85, 68, 43, 58, 80, 103, 70, 70, 63, 62, 43, 56, 72, 70, 97, 60, 59, 69, 64, 62, 75, 74, 66, 65, 61, 65, 66, 58, 59, 60, 46, 70, 108, 64, 69, 44, 72, 78, 100, 43, 56, 70, 73, 69, 64, 67, 67, 64, 53, 82, 65, 63, 69, 70, 72, 57, 77, 58, 51, 65, 78, 38, 73, 52, 57, 62, 55, 57, 40, 56, 62, 50, 76, 79, 67, 62, 59, 47, 60, 43, 48, 54, 52, 68, 69, 64, 60, 66, 63, 57, 54, 53, 55, 52, 67, 64, 98, 61, 55, 71, 75, 78, 67, 52, 53, 73, 135, 60, 65, 63, 60, 60, 55, 67, 67, 73, 90, 75, 46, 53, 57, 58, 57, 65, 61, 55, 66, 72, 62, 65, 57, 68, 64, 59, 57, 54, 71, 73, 56, 52, 51, 51, 70, 53, 49, 74, 51, 73, 55, 62, 59, 59, 74, 54, 64, 73, 86, 60, 68, 110, 65, 56, 75, 85, 72, 61, 60, 79, 63, 52, 64, 50, 53, 52, 56, 64, 86, 54, 64, 55, 59, 59, 86, 53, 80, 63, 114, 62, 87, 59, 54, 67, 82, 63, 61, 82, 56, 58, 53, 70, 55, 90, 49, 62, 73, 74, 54, 60, 64, 69, 74, 81, 61, 151, 58, 77, 72, 55, 51, 69, 46, 99, 98, 64, 61, 76, 47, 74, 67, 60, 61, 65, 67, 60, 61, 64, 68, 75, 58, 64, 77, 75, 65, 67, 50, 38, 59, 64, 90, 54, 81, 65, 80, 76, 94, 99, 89, 64, 59, 77, 91, 44, 76, 71, 80, 56, 54, 56, 66, 92, 50, 60, 85, 50, 47, 60, 61, 67, 69, 88, 87, 84, 61, 72, 51, 62, 56, 69, 62, 67, 51, 70, 73, 64, 59, 69, 46, 55, 63, 98, 50, 57, 56, 57, 67, 57, 67, 57, 52, 53, 63, 54, 62, 58, 62, 78, 71, 57, 66, 70, 71, 59, 59, 70, 60, 79, 58, 77, 57, 51, 55, 101, 61, 48, 95, 78, 53, 59, 61, 64, 57, 64, 67, 64, 53, 61, 51, 50, 93, 58, 63, 69, 76, 53, 42, 75, 49, 60, 51, 73, 71, 51, 65, 51, 48, 63, 63, 88, 65, 64, 62, 87, 58, 67, 59, 58, 68, 43, 59, 47, 81, 71, 101, 59, 68, 57, 76, 43, 55, 116, 60, 59, 67, 67, 58, 96, 106, 68, 60, 67, 75, 58, 67, 60, 63, 55, 51, 83, 65, 93, 66, 49, 85, 49, 60, 101, 50, 64, 57, 72, 59, 55, 122, 62, 105, 50, 64, 54, 66, 74, 69, 62, 64, 64, 86, 68, 58, 66, 77, 58, 47, 51, 78, 59, 88, 71, 50, 63, 55, 57, 57, 63, 63, 59, 62, 69, 83, 56, 65, 50, 61, 42, 94, 58, 58, 66, 90, 52, 77, 57, 51, 70, 64, 75, 73, 80, 51, 66, 54, 61, 91, 77, 71, 55, 72, 69, 62, 64, 61, 50, 63, 93, 56, 61, 58, 50, 69, 98, 67, 61, 63, 105, 79, 50, 84, 49, 44, 66, 54, 87, 64, 69, 55, 65, 83, 66, 98, 56, 83, 91, 88, 87, 82, 76, 64, 60, 70, 58, 58, 71, 66, 53, 63, 61, 58, 97, 60, 41, 69, 55, 72, 71, 44, 40, 84, 57, 63, 56, 90, 85, 50, 68, 53, 59, 66, 87, 62, 67, 64, 69, 75, 65, 70, 67, 85, 83, 113, 62, 59, 71, 89, 64, 65, 47, 69, 59, 69, 52, 75, 53, 57, 66, 60, 60, 48, 68, 60, 47, 88, 60, 63, 49, 53, 58, 56, 68, 50, 91, 90, 49, 50, 71, 56, 151, 53, 58, 57, 61, 51, 101, 61, 66, 54, 58, 89, 61, 65, 77, 71, 75, 56, 78, 65, 67, 58, 53, 57, 58, 60, 56, 71, 67, 57, 79, 67, 57, 71, 56, 65, 68, 64, 56, 62, 51, 60, 67, 66, 59, 68, 70, 80, 59, 64, 100, 69, 55, 66, 83, 70, 85, 64, 61, 76, 51, 47, 49, 76, 60, 100, 70, 54, 59, 56, 88, 63, 53, 50, 56, 63, 62, 49, 80, 62, 58, 93, 114, 100, 58, 61, 68, 54, 52, 62, 80, 67, 91, 64, 62, 40, 62, 51, 58, 91, 59, 53, 52, 66, 79, 64, 59, 51, 44, 65, 49, 91, 99, 66, 57, 55, 67, 79, 75, 65, 62, 70, 81, 82, 53, 56, 69, 57, 54, 46, 77, 76, 57, 55, 70, 52, 49, 84, 66, 74, 63, 48, 55, 68, 76, 74, 126, 53, 63, 67, 54, 55, 44, 53, 65, 63, 61, 83, 80, 60, 56, 54, 86, 66, 55, 59, 66, 88, 66, 51, 63, 61, 57, 48, 45, 49, 65, 54, 61, 69, 71, 63, 64, 59, 59, 67, 63, 48, 60, 50, 73, 59, 55, 53, 80, 64, 79, 58, 71, 46, 102, 59, 51, 65, 60, 60, 51, 77, 55, 62, 63, 48, 84, 65, 52, 53, 92, 58, 110, 52, 55, 66, 69, 61, 65, 54, 78, 66, 38, 60, 74, 70, 50, 66, 72, 72, 81, 57, 61, 65, 64, 76, 54, 62, 65, 34, 63, 50, 69, 50, 59, 91, 67, 56, 68, 57, 69, 79, 56, 62, 64, 58, 72, 55, 69, 71, 57, 70, 70, 73, 58, 67, 84, 104, 57, 93, 78, 75, 52, 66, 69, 85, 66, 60, 79, 78, 52, 69, 62, 61, 67, 54, 67, 68, 54, 55, 50, 59, 75, 62, 61, 79, 68, 53, 66, 69, 98, 54, 62, 65, 86, 66, 55, 65, 52, 96, 48, 76, 46, 70, 52, 101, 53, 68, 86, 56, 61, 48, 78, 61, 79, 82, 52, 58, 50, 65, 65, 80, 54, 71, 126, 63, 72, 84, 73, 49, 61, 66, 77, 61, 69, 65, 66, 80, 58, 71, 51, 95, 59, 57, 80, 49, 95, 42, 48, 86, 72, 62, 55, 64, 63, 81, 74, 55, 57, 72, 156, 77, 62, 67, 56, 60, 38, 41, 67, 53, 65, 79, 71, 52, 47, 39, 53, 51, 67, 52, 72, 52, 93, 59, 50, 56, 91, 106, 56, 56, 62, 57, 69, 65, 59, 47, 57, 69, 60, 66, 71, 66, 51, 50, 80, 62, 85, 47, 53, 66, 71, 85, 63, 56, 52, 61, 83, 60, 34, 76, 54, 74, 61, 60, 73, 49, 53, 50, 87, 67, 94, 58, 62, 80, 67, 60, 87, 49, 79, 42, 65, 78, 73, 55, 55, 50, 73, 52, 66, 45, 85, 69, 59, 74, 64, 103, 75, 61, 84, 93, 73, 44, 57, 61, 77, 70, 83, 57, 79, 61, 94, 60, 56, 48, 61, 67, 45, 62, 69, 48, 48, 66, 57, 80, 81, 50, 55, 76, 92, 75, 67, 107, 47, 69, 67, 73, 72, 57, 83, 86, 65, 57, 74, 96, 48, 65, 61, 64, 99, 60, 68, 55, 72, 74, 64, 46, 63, 59, 52, 34, 86, 87, 56, 80, 64, 53, 55, 59, 69, 61, 56, 48, 81, 47, 91, 50, 62, 53, 71, 54, 50, 62, 62, 59, 58, 69, 73, 58, 56, 68, 59, 54, 73, 50, 60, 54, 94, 46, 68, 61, 71, 45, 55, 58, 68, 47, 68, 70, 104, 49, 86, 99, 72, 89, 60, 117, 57, 58, 46, 73, 54, 44, 78, 69, 59, 43, 69, 40, 78, 90, 78, 63, 75, 73, 75, 92, 67, 61, 41, 60, 92, 108, 105, 68, 51, 39, 59, 69, 70, 66, 63, 40, 85, 61, 67, 54, 85, 65, 91, 84, 78, 72, 118, 48, 77, 50, 54, 42, 58, 55, 70, 49, 115, 56, 58, 52, 61, 88, 107, 65, 46, 84, 51, 63, 48, 105, 61, 48, 75, 47, 70, 74, 89, 96, 69, 71, 67, 57, 78, 76, 54, 94, 44, 48, 62, 64, 41, 50, 43, 59, 56, 85, 105, 65, 47, 73, 55, 74, 113, 64, 59, 86, 69, 63, 59, 68, 67, 47, 50, 93, 46, 59, 80, 90, 49, 66, 53, 89, 53, 57, 55, 80, 72, 54, 86, 74, 55, 74, 47, 40, 101, 52, 88, 84, 41, 48, 68, 78, 97, 90, 58, 69, 73, 48, 51, 72, 42, 89, 54, 71, 58, 81, 65, 65, 54, 58, 47, 107, 94, 65, 70, 69, 54, 47, 48, 49, 71, 57, 79, 68, 62, 71, 62, 48, 62, 59, 61, 96, 65, 56, 56, 94, 67, 51, 77, 67, 61, 62, 89, 50, 84, 50, 54, 89, 40, 86, 60, 91, 78, 59, 66, 46, 101, 84, 55, 75, 57, 56, 82, 73, 70, 78, 78, 53, 48, 64, 75, 80, 53, 52, 82, 67, 68, 66, 98, 63, 63, 46, 71, 53, 67, 98, 102, 65, 75, 73, 89, 55, 74, 42, 85, 76, 63, 68, 69, 43, 86, 58, 62, 73, 92, 86, 58, 63, 50, 67, 59, 48, 53, 62, 79, 73, 76, 54, 74, 61, 45, 68, 71, 70, 72, 55, 67, 76, 68, 96, 63, 65, 35, 69, 52, 78, 49, 125, 81, 83, 45, 57, 70, 70, 54, 59, 44, 47, 44, 65, 79, 68, 60, 76, 53, 88, 54, 65, 70, 62, 47, 60, 62, 59, 72, 65, 75, 94, 70, 82, 53, 69, 60, 84, 68, 85, 64, 50, 70, 61, 34, 51, 50, 69, 69, 46, 62, 88, 62, 68, 43, 64, 33, 96, 70, 60, 48, 68, 70, 72, 66, 62, 83, 72, 71, 81, 69, 61, 63, 73, 66, 78, 43, 53, 103, 75, 73, 43, 71, 64, 68, 72, 58, 45, 82, 79, 51, 55, 43, 72, 56, 72, 61, 83, 59, 94, 63, 45, 55, 74, 67, 63, 60, 64, 71, 71, 62, 53, 73, 61, 54, 59, 75, 64, 60, 64, 54, 57, 77, 75, 66, 58, 63, 91, 61, 62, 56, 64, 58, 75, 65, 55, 84, 65, 50, 69, 42, 58, 84, 79, 48, 55, 60, 74, 58, 62, 96, 79, 80, 55, 72, 68, 47, 95, 49, 86, 56, 54, 80, 67, 123, 50, 64, 89, 79, 42, 56, 93, 61, 54, 50, 49, 71, 87, 71, 56, 110, 57, 46, 49, 72, 75, 78, 63, 42, 68, 67, 79, 60, 72, 56, 64, 62, 150, 73, 30, 105, 64, 60, 55, 75, 42, 55, 55, 36, 63, 98, 56, 61, 73, 64, 94, 79, 73, 109, 53, 73, 67, 79, 67, 60, 48, 70, 141, 62, 61, 60, 44, 76, 53, 44, 87, 69, 87, 50, 70, 66, 64, 68, 80, 75, 76, 58, 71, 48, 58, 65, 82, 61, 60, 71, 75, 52, 56, 57, 66, 72, 36, 78, 90, 62, 101, 34, 56, 47, 52, 61, 41, 60, 115, 76, 80, 70, 54, 107, 69, 56, 50, 76, 63, 56, 55, 66, 67, 70, 66, 63, 56, 52, 59, 89, 56, 84, 57, 57, 58, 56, 53, 63, 86, 75, 87, 48, 107, 81, 38, 55, 56, 78, 46, 44, 60, 65, 54, 71, 76, 69, 124, 48, 58, 65, 74, 60, 76, 55, 73, 102, 73, 58, 43, 95, 79, 45, 91, 67, 61, 45, 75, 93, 55, 48, 59, 55, 64, 66, 56, 61, 63, 101, 99, 51, 67, 54, 42, 79, 57, 140, 104, 50, 52, 50, 73, 76, 68, 108, 31, 72, 53, 69, 53, 59, 72, 61, 91, 57, 61, 49, 54, 51, 64, 50, 65, 88, 55, 38, 71, 69, 59, 63, 67, 74, 58, 47, 57, 67, 77, 64, 67, 73, 56, 52, 60, 83, 81, 66, 71, 61, 53, 64, 53, 43, 59, 43, 66, 61, 55, 49, 105, 50, 62, 47, 64, 86, 72, 59, 59, 74, 50, 74, 71, 81, 70, 71, 69, 54, 76, 70, 74, 51, 80, 71, 59, 82, 61, 59, 86, 106, 65, 50, 90, 74, 89, 70, 50, 53, 88, 86, 45, 47, 89, 69, 55, 55, 77, 64, 81, 80, 56, 82, 64, 46, 51, 51, 50, 64, 78, 76, 59, 69, 48, 37, 41, 75, 49, 70, 55, 58, 68, 52, 53, 90, 75, 86, 68, 55, 52, 71, 83, 71, 68, 43, 70, 83, 37, 54, 63, 65, 47, 49, 60, 42, 63, 60, 67, 63, 54, 72, 60, 51, 68, 54, 105, 60, 59, 43, 52, 68, 55, 98, 55, 64, 64, 63, 67, 83, 99, 59, 58, 65, 62, 64, 73, 63, 70, 45, 63, 58, 62, 58, 65, 60, 70, 91, 59, 59, 72, 61, 88, 69, 67, 75, 91, 68, 66, 62, 67, 61, 66, 52, 55, 39, 58, 51, 63, 77, 66, 47, 62, 69, 50, 52, 70, 35, 46, 57, 58, 70, 68, 66, 61, 53, 54, 60, 57, 72, 48, 71, 63, 65, 69, 60, 60, 73, 60, 45, 62, 61, 61, 58, 68, 65, 69, 62, 48, 67, 66, 97, 59, 63, 86, 69, 79, 67, 53, 59, 73, 88, 56, 123, 56, 124, 70, 78, 60, 69, 58, 49, 79, 65, 73, 66, 62, 78, 73, 50, 66, 62, 54, 66, 66, 72, 62, 64, 60, 48, 54, 97, 59, 53, 60, 66, 103, 57, 60, 75, 87, 78, 55, 86, 62, 65, 78, 54, 67, 58, 56, 69, 57, 70, 68, 63, 70, 72, 63, 75, 59, 47, 115, 69, 59, 55, 112, 90, 54, 74, 55, 81, 46, 75, 46, 49, 56, 96, 73, 91, 48, 52, 52, 64, 42, 62, 60, 62, 69, 70, 32, 58, 66, 42, 55, 31, 81, 77, 110, 75, 67, 71, 91, 97, 90, 55, 55, 39, 49, 89, 58, 50, 67, 51, 62, 61, 43, 83, 52, 53, 39, 63, 64, 60, 73, 61, 64, 76, 72, 61, 86, 59, 48, 66, 65, 56, 64, 75, 55, 81, 60, 75, 55, 50, 107, 78, 65, 58, 68, 60, 81, 82, 55, 42, 69, 119, 76, 61, 32, 51, 52, 76, 74, 71, 91, 67, 58, 104, 53, 82, 58, 60, 59, 40, 53, 57, 66, 65, 55, 78, 49, 63, 70, 71, 48, 60, 52, 97, 51, 73, 42, 75, 62, 52, 71, 70, 93, 67, 68, 69, 64, 49, 51, 126, 66, 53, 48, 48, 107, 75, 89, 80, 81, 70, 56, 63, 90, 76, 76, 29, 68, 77, 85, 62, 55, 88, 77, 75, 76, 83, 67, 74, 47, 57, 77, 62, 91, 44, 46, 65, 49, 132, 62, 91, 52, 67, 55, 76, 106, 56, 67, 70, 71, 48, 48, 112, 79, 57, 52, 77, 80, 92, 50, 61, 83, 69, 54, 70, 70, 55, 55, 72, 63, 47, 71, 57, 103, 94, 60, 61, 72, 72, 75, 49, 52, 42, 83, 69, 80, 46, 61, 76, 52, 90, 60, 65, 71, 79, 47, 62, 69, 68, 56, 55, 58, 68, 46, 119, 46, 54, 74, 48, 85, 77, 50, 72, 69, 61, 59, 62, 61, 62, 68, 73, 61, 56, 52, 76, 69, 64, 83, 67, 40, 71, 83, 50, 71, 52, 51, 48, 84, 71, 69, 60, 96, 72, 63, 74, 64, 33, 72, 85, 64, 49, 77, 59, 67, 69, 65, 91, 64, 53, 70, 60, 71, 47, 90, 69, 61, 70, 78, 51, 66, 59, 116, 86, 59, 63, 67, 56, 52, 61, 62, 69, 72, 49, 72, 76, 68, 61, 73, 53, 62, 58, 71, 66, 54, 54, 73, 96, 42, 95, 77, 60, 82, 46, 64, 43, 79, 48, 38, 64, 69, 65, 74, 63, 90, 73, 59, 76, 57, 110, 52, 64, 47, 64, 59, 74, 82, 62, 63, 60, 63, 51, 77, 93, 85, 77, 69, 71, 84, 44, 94, 92, 51, 66, 65, 41, 93, 59, 82, 71, 56, 75, 51, 79, 66, 56, 76, 56, 79, 45, 67, 72, 43, 63, 66, 57, 68, 64, 80, 58, 76, 82, 45, 63, 64, 49, 72, 66, 60, 82, 53, 45, 53, 69, 50, 52, 59, 51, 69, 57, 64, 65, 79, 47, 84, 50, 65, 54, 57, 61, 47, 75, 63, 91, 52, 67, 58, 87, 57, 61, 68, 80, 70, 66, 80, 86, 54, 51, 70, 52, 49, 55, 70, 57, 68, 54, 64, 47, 62, 52, 76, 62, 60, 57, 86, 52, 48, 73, 43, 54, 74, 90, 55, 67, 69, 50, 62, 84, 70, 73, 51, 63, 56, 88, 72, 90, 66, 58, 85, 81, 64, 48, 76, 48, 36, 50, 61, 74, 57, 68, 65, 45, 59, 60, 113, 62, 66, 59, 55, 71, 66, 56, 57, 60, 58, 61, 55, 41, 66, 89, 51, 57, 118, 70, 58, 65, 78, 50, 80, 75, 58, 51, 84, 70, 75, 60, 64, 42, 91, 65, 74, 52, 55, 48, 63, 48, 77, 92, 73, 54, 79, 67, 66, 76, 52, 64, 121, 62, 61, 56, 91, 50, 143, 72, 53, 87, 85, 64, 62, 56, 52, 80, 47, 72, 71, 94, 66, 71, 76, 72, 72, 57, 50, 71, 63, 73, 98, 75, 60, 106, 52, 49, 72, 55, 81, 67, 67, 65, 113, 47, 45, 60, 74, 82, 78, 147, 50, 67, 56, 92, 77, 56, 70, 79, 71, 68, 58, 64, 41, 58, 87, 65, 46, 67, 54, 58, 50, 60, 81, 60, 59, 69, 73, 43, 62, 33, 63, 69, 69, 68, 53, 54, 42, 67, 69, 140, 52, 57, 92, 71, 68, 83, 66, 82, 58, 54, 124, 121, 56, 58, 59, 66, 53, 60, 66, 70, 64, 47, 70, 89, 67, 78, 49, 119, 72, 57, 57, 63, 82, 65, 56, 69, 68, 54, 62, 70, 55, 45, 63, 82, 71, 46, 50, 60, 63, 55, 59, 65, 67, 53, 59, 61, 59, 54, 72, 59, 74, 101, 50, 74, 62, 61, 61, 69, 96, 57, 59, 67, 75, 89, 78, 72, 56, 55, 56, 59, 45, 60, 67, 77, 71, 65, 63, 58, 55, 87, 71, 93, 65, 54, 38, 63, 70, 46, 68, 53, 61, 65, 62, 65, 64, 55, 64, 41, 57, 57, 73, 57, 52, 73, 75, 51, 53, 83, 77, 98, 59, 48, 51, 56, 76, 50, 62, 66, 73, 85, 110, 79, 58, 66, 75, 50, 59, 68, 63, 95, 58, 77, 117, 70, 45, 67, 98, 61, 87, 63, 65, 61, 90, 82, 43, 68, 56, 70, 62, 73, 66, 68, 96, 58, 56, 68, 55, 81, 73, 56, 62, 69, 60, 53, 64, 80, 64, 65, 53, 77, 54, 83, 74, 58, 63, 66, 75, 70, 55, 77, 66, 47, 50, 72, 64, 58, 66, 78, 57, 60, 78, 78, 64, 76, 113, 63, 62, 57, 62, 59, 69, 87, 44, 72, 76, 87, 118, 85, 60, 62, 52, 74, 52, 69, 61, 70, 45, 61, 66, 80, 70, 65, 60, 71, 87, 67, 65, 61, 61, 72, 71, 63, 64, 53, 47, 130, 64, 69, 70, 56, 84, 62, 68, 69, 60, 73, 51, 77, 69, 103, 69, 73, 62, 64, 59, 63, 62, 44, 69, 53, 64, 51, 60, 65, 54, 56, 63, 55, 50, 62, 43, 52, 59, 65, 64, 54, 65, 60, 66, 62, 90, 70, 76, 70, 77, 65, 53, 88, 59, 65, 75, 73, 63, 55, 72, 55, 55, 68, 58, 85, 55, 57, 56, 49, 63, 90, 69, 64, 74, 44, 54, 71, 51, 61, 73, 73, 78, 76, 85, 68, 63, 54, 82, 72, 66, 56, 65, 75, 65, 41, 59, 73, 67, 56, 77, 55, 50, 53, 69, 71, 74, 40, 75, 77, 68, 71, 61, 89, 71, 56, 55, 59, 70, 66, 74, 70, 58, 69, 76, 50, 50, 71, 52, 48, 69, 70, 70, 53, 58, 59, 65, 74, 68, 82, 75, 63, 69, 67, 55, 56, 74, 59, 86, 57, 55, 45, 45, 69, 63, 77, 83, 42, 59, 52, 57, 56, 69, 57, 55, 61, 53, 64, 68, 71, 70, 44, 64, 68, 55, 65, 72, 46, 65, 49, 81, 67, 65, 53, 59, 57, 52, 64, 91, 46, 58, 52, 65, 56, 58, 70, 61, 64, 78, 61, 115, 64, 62, 69, 88, 70, 67, 61, 90, 85, 60, 76, 64, 75, 54, 69, 68, 60, 79, 69, 65, 82, 87, 86, 71, 59, 71, 61, 77, 70, 63, 60, 59, 65, 96, 73, 63, 56, 65, 52, 62, 79, 72, 60, 71, 66, 60, 60, 48, 68, 80, 81, 87, 57, 57, 74, 94, 64, 50, 61, 61, 52, 53, 72, 72, 59, 56, 62, 85, 65, 86, 70, 150, 82, 68, 55, 77, 61, 147, 68, 83, 73, 66, 63, 68, 58, 70, 61, 70, 108, 76, 54, 62, 83, 68, 60, 53, 78, 53, 80, 59, 54, 61, 69, 58, 67, 62, 69, 67, 107, 60, 75, 67, 64, 53, 67, 67, 65, 58, 69, 52, 96, 72, 70, 63, 86, 43, 63, 70, 60, 57, 55, 52, 51, 49, 61, 70, 69, 57, 41, 61, 72, 66, 76, 81, 46, 52, 65, 81, 55, 50, 75, 90, 59, 58, 63, 52, 60, 49, 64, 57, 53, 56, 58, 66, 55, 55, 62, 44, 40, 72, 63, 61, 83, 72, 68, 61, 91, 99, 75, 76, 66, 67, 69, 70, 46, 103, 62, 75, 64, 78, 79, 52, 54, 52, 62, 58, 67, 76, 67, 58, 65, 110, 61, 71, 33, 66, 64, 78, 67, 92, 99, 58, 70, 73, 61, 62, 49, 63, 49, 54, 114, 73, 71, 61, 83, 86, 65, 65, 59, 60, 74, 81, 67, 54, 78, 80, 51, 55, 73, 76, 60, 45, 78, 58, 61, 66, 60, 65, 57, 75, 62, 82, 46, 61, 52, 50, 69, 71, 56, 55, 69, 69, 96, 55, 72, 65, 62, 58, 51, 60, 65, 55, 50, 64, 61, 67, 65, 62, 72, 64, 61, 78, 61, 90, 88, 54, 70, 54, 48, 63, 65, 62, 83, 80, 48, 62, 89, 56, 50, 62, 79, 63, 71, 60, 56, 75, 51, 85, 68, 64, 57, 69, 51, 127, 52, 75, 73, 57, 59, 100, 63, 73, 60, 66, 47, 65, 54, 61, 47, 81, 61, 61, 84, 53, 44, 89, 63, 59, 73, 70, 55, 62, 60, 50, 70, 60, 67, 63, 67, 52, 57, 61, 57, 94, 64, 55, 69, 61, 53, 71, 63, 74, 75, 45, 64, 63, 64, 73, 61, 65, 55, 87, 54, 77, 86, 68, 70, 105, 74, 57, 75, 66, 74, 66, 65, 56, 59, 55, 58, 68, 50, 56, 65, 75, 64, 64, 73, 60, 67, 69, 71, 67, 62, 56, 74, 70, 63, 85, 61, 79, 87, 50, 53, 66, 95, 69, 65, 70, 58, 60, 65, 50, 65, 62, 62, 75, 60, 64, 57, 55, 75, 62, 144, 56, 89, 52, 66, 73, 60, 54, 72, 73, 98, 58, 57, 73, 52, 73, 77, 62, 81, 100, 50, 67, 69, 69, 52, 73, 77, 54, 53, 70, 60, 56, 55, 75, 65, 60, 134, 61, 104, 64, 61, 52, 82, 65, 64, 59, 58, 57, 60, 56, 70, 68, 53, 75, 54, 64, 41, 66, 95, 66, 66, 46, 57, 75, 73, 47, 69, 60, 56, 58, 119, 52, 70, 57, 107, 82, 69, 59, 95, 106, 53, 58, 68, 49, 67, 64, 76, 86, 67, 73, 73, 66, 51, 69, 64, 65, 55, 61, 68, 48, 55, 61, 78, 70, 74, 46, 50, 53, 57, 71, 49, 63, 55, 58, 58, 44, 51, 68, 58, 62, 49, 65, 57, 69, 59, 62, 54, 62, 62, 57, 83, 63, 70, 54, 79, 50, 65, 73, 57, 62, 61, 80, 56, 73, 67, 62, 59, 87, 74, 65, 55, 37, 56, 61, 89, 75, 68, 52, 70, 80, 52, 46, 61, 63, 46, 67, 69, 65, 54, 88, 84, 62, 63, 64, 56, 74, 75, 72, 75, 89, 69, 83, 56, 55, 66, 68, 60, 96, 75, 117, 74, 93, 72, 54, 81, 67, 83, 74, 56, 49, 77, 63, 101, 69, 57, 67, 98, 71, 69, 47, 65, 53, 74, 68, 57, 58, 60, 81, 75, 63, 58, 77, 45, 63, 81, 70, 67, 94, 75, 68, 61, 54, 60, 56, 80, 61, 103, 70, 81, 53, 88, 59, 69, 63, 67, 73, 45, 65, 42, 47, 49, 57, 82, 59, 74, 78, 77, 90, 71, 48, 56, 55, 69, 47, 57, 61, 61, 71, 56, 68, 67, 65, 58, 57, 96, 77, 65, 60, 60, 75, 97, 63, 84, 54, 92, 56, 59, 59, 72, 72, 62, 77, 49, 72, 62, 80, 55, 59, 55, 54, 76, 54, 66, 47, 73, 70, 43, 73, 45, 52, 85, 59, 101, 62, 54, 70, 72, 83, 66, 64, 82, 73, 66, 64, 51, 58, 50, 54, 59, 62, 56, 87, 97, 74, 44, 81, 64, 89, 63, 59, 65, 75, 52, 76, 72, 78, 53, 60, 76, 62, 72, 68, 62, 61, 71, 47, 83, 69, 62, 60, 67, 63, 85, 70, 63, 62, 49, 49, 66, 60, 49, 63, 65, 69, 82, 50, 79, 52, 64, 83, 65, 60, 112, 66, 55, 52, 65, 42, 89, 54, 55, 54, 52, 102, 57, 70, 45, 49, 58, 62, 68, 42, 51, 66, 53, 55, 54, 49, 49, 90, 102, 76, 64, 74, 81, 60, 92, 74, 56, 73, 96, 48, 58, 57, 72, 70, 45, 93, 54, 56, 72, 67, 47, 47, 41, 59, 72, 57, 68, 76, 67, 62, 78, 68, 46, 52, 88, 87, 47, 59, 97, 84, 72, 52, 63, 47, 64, 60, 67, 50, 79, 52, 65, 52, 51, 89, 63, 77, 91, 54, 60, 64, 91, 68, 72, 79, 63, 59, 79, 63, 63, 65, 59, 47, 55, 53, 65, 61, 89, 85, 71, 48, 75, 65, 63, 49, 43, 94, 68, 66, 48, 69, 53, 64, 71, 77, 67, 58, 57, 59, 103, 78, 40, 73, 70, 60, 65, 79, 54, 78, 64, 64, 63, 45, 77, 89, 116, 68, 48, 71, 72, 51, 60, 68, 58, 36, 95, 46, 74, 67, 128, 48, 74, 60, 62, 45, 103, 83, 59, 57, 78, 47, 79, 78, 61, 39, 58, 52, 66, 74, 50, 73, 128, 55, 74, 59, 64, 93, 62, 63, 101, 47, 74, 106, 68, 68, 77, 70, 64, 50, 51, 98, 60, 47, 53, 52, 57, 72, 109, 64, 36, 57, 43, 71, 58, 69, 61, 71, 44, 55, 60, 56, 69, 98, 52, 58, 72, 90, 66, 79, 43, 66, 57, 97, 59, 80, 60, 62, 77, 49, 92, 55, 77, 69, 73, 51, 34, 73, 50, 78, 69, 50, 76, 59, 42, 63, 62, 57, 88, 64, 79, 90, 58, 68, 53, 65, 70, 77, 66, 74, 55, 61, 68, 65, 53, 71, 54, 63, 80, 67, 54, 65, 73, 65, 74, 84, 46, 55, 76, 63, 55, 53, 52, 72, 54, 60, 72, 81, 91, 47, 68, 61, 92, 60, 65, 75, 46, 57, 57, 66, 64, 82, 86, 65, 54, 68, 44, 97, 73, 55, 73, 57, 64, 70, 68, 59, 69, 91, 67, 56, 79, 63, 72, 64, 50, 65, 69, 58, 75, 74, 78, 71, 101, 61, 76, 83, 58, 66, 50, 70, 66, 71, 76, 50, 66, 74, 59, 88, 63, 55, 65, 81, 59, 65, 55, 41, 54, 73, 49, 58, 55, 66, 68, 52, 73, 72, 85, 63, 60, 79, 37, 65, 94, 69, 65, 56, 60, 59, 78, 59, 74, 75, 71, 55, 61, 63, 68, 68, 62, 83, 61, 65, 83, 64, 77, 62, 59, 84, 77, 112, 56, 83, 50, 67, 89, 60, 47, 53, 50, 60, 45, 82, 71, 49, 80, 70, 81, 93, 55, 67, 49, 91, 63, 80, 67, 48, 76, 85, 63, 57, 46, 55, 73, 47, 51, 62, 59, 49, 69, 51, 90, 58, 72, 65, 47, 65, 115, 57, 70, 55, 52, 58, 73, 60, 54, 73, 56, 109, 37, 88, 72, 55, 50, 67, 57, 55, 66, 122, 51, 81, 61, 69, 78, 58, 56, 60, 56, 46, 75, 77, 51, 56, 59, 69, 52, 45, 57, 69, 56, 64, 60, 83, 67, 71, 51, 88, 55, 62, 65, 67, 70, 37, 65, 44, 56, 142, 57, 74, 65, 60, 84, 66, 69, 45, 74, 103, 61, 63, 49, 68, 54, 66, 61, 73, 48, 52, 47, 82, 109, 67, 62, 68, 47, 71, 58, 47, 56, 66, 86, 56, 66, 58, 63, 87, 87, 80, 81, 63, 81, 67, 45, 79, 50, 73, 68, 57, 125, 63, 77, 73, 50, 55, 59, 60, 51, 58, 47, 80, 69, 77, 55, 61, 52, 72, 55, 70, 57, 66, 55, 59, 55, 67, 72, 74, 55, 58, 66, 53, 61, 64, 53, 101, 61, 64, 51, 60, 74, 64, 103, 82, 58, 58, 76, 54, 74, 63, 65, 72, 73, 67, 58, 39, 52, 77, 39, 49, 94, 59, 56, 60, 86, 60, 75, 48, 61, 52, 66, 86, 69, 50, 47, 65, 61, 65, 40, 67, 49, 52, 55, 51, 62, 56, 40, 72, 64, 61, 72, 50, 71, 54, 72, 53, 66, 58, 74, 65, 71, 60, 69, 76, 59, 59, 81, 69, 59, 58, 58, 61, 63, 63, 89, 112, 59, 79, 50, 83, 69, 80, 76, 53, 55, 72, 63, 53, 45, 55, 70, 86, 61, 66, 82, 71, 51, 63, 67, 65, 74, 72, 50, 48, 86, 66, 49, 81, 55, 35, 71, 73, 71, 75, 77, 56, 65, 70, 101, 61, 54, 58, 56, 65, 56, 62, 91, 107, 76, 63, 69, 70, 54, 89, 59, 79, 41, 54, 69, 78, 86, 82, 69, 60, 72, 39, 39, 57, 71, 61, 71, 52, 116, 66, 49, 77, 82, 71, 73, 64, 64, 73, 84, 66, 105, 61, 75, 71, 55, 51, 50, 49, 53, 78, 64, 62, 70, 54, 166, 53, 88, 50, 93, 68, 58, 68, 68, 64, 56, 69, 66, 68, 81, 61, 66, 79, 58, 65, 48, 74, 73, 62, 62, 92, 57, 80, 60, 61, 56, 70, 92, 54, 83, 82, 86, 63, 58, 54, 83, 64, 93, 89, 51, 58, 56, 74, 48, 61, 66, 60, 58, 56, 84, 60, 77, 53, 68, 62, 60, 59, 60, 63, 74, 69, 55, 69, 65, 69, 45, 83, 55, 86, 53, 78, 117, 50, 74, 94, 54, 88, 66, 51, 90, 83, 79, 60, 115, 46, 67, 59, 62, 69, 84, 70, 97, 50, 80, 49, 68, 66, 71, 45, 59, 42, 59, 99, 57, 63, 78, 48, 51, 55, 59, 63, 116, 71, 78, 73, 73, 77, 65, 115, 63, 51, 61, 41, 63, 44, 72, 72, 55, 69, 100, 58, 62, 66, 67, 60, 68, 67, 52, 68, 47, 72, 64, 47, 58, 78, 70, 71, 65, 60, 55, 56, 58, 51, 59, 65, 79, 49, 51, 55, 56, 56, 74, 60, 52, 57, 74, 58, 65, 69, 67, 45, 63, 87, 63, 54, 52, 90, 82, 85, 49, 58, 144, 85, 76, 77, 68, 73, 64, 70, 91, 72, 66, 88, 53, 51, 69, 80, 64, 50, 90, 68, 92, 83, 62, 69, 59, 85, 75, 53, 66, 48, 74, 65, 67, 57, 99, 71, 64, 74, 63, 64, 90, 45, 62, 89, 58, 53, 76, 62, 99, 58, 68, 60, 71, 57, 77, 54, 58, 77, 78, 60, 51, 42, 78, 55, 73, 48, 60, 57, 99, 74, 45, 68, 79, 87, 73, 58, 69, 53, 65, 68, 70, 60, 52, 48, 73, 58, 59, 72, 60, 60, 73, 59, 77, 51, 53, 70, 53, 73, 55, 59, 65, 70, 59, 65, 47, 55, 66, 78, 96, 55, 55, 60, 108, 66, 68, 53, 58, 85, 82, 59, 56, 79, 59, 69, 63, 62, 53, 56, 51, 82, 64, 70, 57, 62, 51, 79, 46, 49, 77, 49, 90, 72, 48, 67, 91, 62, 42, 40, 68, 53, 71, 87, 44, 74, 55, 78, 43, 70, 58, 87, 62, 59, 63, 73, 71, 63, 57, 51, 58, 68, 66, 66, 64, 97, 79, 37, 74, 64, 52, 74, 54, 68, 72, 80, 70, 68, 66, 73, 54, 77, 84, 67, 55, 67, 79, 68, 78, 86, 53, 64, 98, 68, 58, 72, 69, 67, 61, 65, 63, 68, 59, 58, 69, 55, 73, 51, 43, 71, 62, 71, 63, 51, 62, 70, 77, 68, 58, 63, 49, 76, 65, 58, 60, 51, 62, 69, 83, 76, 75, 66, 52, 68, 59, 65, 84, 103, 69, 52, 60, 70, 59, 53, 57, 70, 102, 69, 56, 62, 78, 52, 48, 64, 64, 70, 57, 43, 87, 63, 63, 73, 56, 60, 56, 80, 66, 66, 71, 77, 65, 71, 75, 43, 110, 57, 77, 54, 62, 65, 65, 78, 63, 73, 63, 57, 70, 78, 78, 45, 73, 60, 74, 77, 57, 86, 49, 75, 46, 86, 61, 67, 95, 49, 68, 67, 68, 70, 67, 65, 52, 66, 80, 59, 47, 79, 60, 79, 87, 78, 66, 69, 85, 70, 75, 87, 73, 58, 59, 44, 63, 73, 35, 70, 57, 61, 85, 60, 61, 57, 69, 53, 81, 49, 72, 63, 77, 66, 62, 56, 50, 61, 65, 129, 71, 70, 47, 77, 143, 55, 69, 70, 56, 53, 89, 52, 43, 53, 61, 80, 63, 72, 93, 76, 53, 73, 64, 67, 79, 74, 61, 87, 45, 69, 51, 56, 72, 60, 54, 71, 54, 67, 57, 67, 71, 70, 62, 60, 130, 65, 70, 87, 59, 71, 88, 79, 67, 75, 71, 98, 47, 52, 73, 71, 54, 63, 83, 57, 66, 79, 69, 79, 70, 81, 61, 80, 58, 51, 45, 58, 63, 63, 62, 56, 60, 66, 66, 58, 50, 77, 69, 62, 50, 51, 80, 78, 64, 81, 71, 61, 62, 66, 56, 59, 83, 78, 81, 65, 61, 71, 61, 52, 71, 79, 53, 40, 54, 78, 56, 48, 73, 64, 65, 74, 55, 72, 61, 111, 67, 66, 69, 62, 63, 78, 82, 57, 62, 61, 58, 79, 64, 61, 52, 74, 81, 49, 58, 57, 55, 52, 52, 65, 61, 77, 45, 49, 63, 66, 69, 59, 77, 89, 64, 55, 61, 56, 103, 75, 59, 61, 72, 54, 87, 58, 70, 73, 81, 72, 56, 53, 54, 46, 64, 67, 61, 46, 59, 43, 69, 48, 72, 63, 70, 64, 97, 52, 66, 80, 59, 82, 95, 53, 75, 47, 58, 53, 75, 42, 74, 102, 60, 45, 110, 74, 67, 75, 46, 73, 56, 73, 54, 85, 67, 41, 66, 65, 65, 93, 65, 47, 77, 48, 66, 82, 77, 55, 40, 99, 71, 64, 45, 73, 62, 71, 85, 61, 51, 79, 74, 54, 62, 91, 47, 79, 57, 59, 64, 64, 72, 64, 59, 53, 67, 51, 64, 67, 87, 74, 61, 70, 47, 86, 93, 60, 69, 62, 59, 57, 60, 87, 68, 80, 70, 80, 66, 95, 73, 63, 97, 55, 73, 61, 56, 63, 62, 60, 60, 73, 72, 51, 62, 67, 73, 73, 57, 55, 75, 72, 55, 34, 68, 64, 58, 78, 65, 67, 79, 57, 42, 215, 63, 105, 73, 65, 53, 54, 70, 67, 79, 63, 58, 55, 64, 58, 47, 73, 68, 81, 89, 61, 66, 64, 74, 58, 65, 69, 50, 83, 59, 86, 81, 74, 61, 59, 80, 73, 47, 67, 62, 76, 62, 104, 42, 66, 67, 74, 60, 73, 59, 68, 75, 67, 77, 82, 70, 71, 78, 77, 62, 61, 64, 57, 67, 76, 90, 78, 68, 61, 63, 66, 82, 74, 76, 94, 70, 54, 63, 61, 62, 56, 47, 60, 51, 68, 66, 79, 73, 49, 56, 58, 63, 74, 65, 85, 82, 54, 71, 44, 66, 59, 91, 54, 68, 61, 81, 70, 45, 71, 73, 65, 79, 53, 51, 82, 86, 66, 60, 64, 61, 62, 102, 55, 85, 46, 55, 54, 47, 41, 74, 77, 81, 51, 58, 72, 70, 60, 57, 52, 59, 58, 74, 145, 81, 85, 55, 68, 73, 71, 73, 63, 63, 70, 61, 56, 66, 54, 65, 75, 55, 50, 128, 70, 69, 47, 71, 80, 100, 66, 44, 56, 52, 60, 83, 107, 56, 75, 62, 78, 81, 51, 50, 102, 93, 53, 66, 53, 66, 53, 55, 47, 56, 69, 70, 61, 55, 56, 55, 69, 63, 60, 61, 63, 62, 81, 59, 60, 50, 86, 92, 56, 80, 58, 59, 81, 63, 88, 71, 67, 68, 84, 74, 62, 62, 85, 84, 79, 55, 48, 46, 66, 80, 74, 71, 58, 55, 46, 55, 65, 75, 70, 39, 56, 72, 70, 58, 72, 66, 67, 80, 67, 77, 54, 42, 75, 56, 77, 66, 62, 68, 52, 70, 52, 76, 74, 53, 62, 65, 48, 50, 73, 60, 72, 103, 76, 80, 65, 65, 50, 105, 59, 66, 63, 52, 61, 67, 66, 65, 65, 56, 70, 75, 113, 80, 72, 60, 64, 55, 63, 64, 82, 70, 48, 73, 65, 69, 46, 84, 67, 85, 66, 62, 61, 71, 66, 81, 49, 75, 67, 71, 100, 99, 62, 50, 74, 71, 75, 77, 132, 84, 74, 51, 69, 50, 52, 83, 83, 86, 51, 56, 79, 57, 108, 70, 75, 64, 104, 57, 69, 81, 48, 68, 45, 49, 57, 58, 69, 69, 85, 88, 72, 69, 70, 62, 60, 113, 61, 64, 65, 43, 65, 75, 83, 52, 62, 76, 48, 77, 58, 106, 51, 71, 109, 62, 87, 74, 77, 82, 65, 68, 66, 50, 53, 68, 57, 68, 62, 63, 68, 45, 56, 70, 62, 71, 76, 77, 48, 54, 57, 58, 54, 63, 60, 88, 65, 52, 69, 58, 74, 99, 73, 50, 62, 60, 45, 75, 96, 69, 62, 50, 65, 56, 63, 64, 48, 77, 62, 64, 53, 70, 79, 53, 55, 53, 58, 81, 71, 67, 82, 64, 49, 51, 73, 54, 56, 119, 68, 65, 49, 64, 56, 49, 67, 51, 67, 65, 62, 59, 73, 48, 67, 79, 50, 62, 73, 53, 61, 80, 63, 42, 70, 60, 68, 72, 55, 60, 51, 61, 70, 76, 57, 82, 85, 58, 89, 69, 68, 52, 73, 58, 55, 61, 69, 75, 74, 88, 58, 128, 51, 57, 71, 66, 68, 58, 49, 61, 91, 69, 51, 94, 59, 56, 66, 58, 44, 89, 69, 60, 59, 53, 84, 69, 59, 82, 51, 53, 65, 56, 82, 83, 48, 59, 73, 60, 59, 54, 49, 55, 75, 64, 81, 66, 65, 74, 74, 70, 52, 62, 45, 63, 66, 80, 63, 72, 78, 80, 35, 61, 66, 82, 74, 66, 55, 79, 64, 53, 72, 58, 58, 76, 44, 65, 63, 62, 65, 67, 68, 75, 65, 69, 46, 62, 67, 60, 45, 84, 75, 56, 37, 70, 58, 63, 65, 75, 41, 79, 91, 55, 49, 81, 63, 69, 56, 78, 47, 54, 88, 70, 73, 78, 88, 50, 50, 44, 62, 55, 81, 87, 73, 43, 73, 62, 80, 70, 72, 74, 65, 59, 62, 122, 50, 62, 59, 64, 74, 68, 103, 94, 77, 67, 70, 45, 95, 63, 59, 75, 54, 66, 58, 67, 63, 67, 72, 61, 101, 55, 71, 75, 65, 76, 63, 84, 67, 99, 62, 61, 75, 53, 66, 106, 68, 67, 59, 68, 47, 92, 54, 70, 63, 90, 60, 55, 48, 57, 50, 50, 70, 70, 67, 56, 33, 62, 51, 50, 63, 63, 51, 66, 88, 49, 64, 73, 72, 60, 75, 59, 82, 59, 54, 56, 61, 57, 67, 133, 47, 88, 76, 77, 60, 83, 67, 107, 71, 76, 91, 68, 66, 60, 61, 50, 50, 67, 60, 65, 85, 55, 80, 67, 60, 62, 88, 121, 71, 72, 80, 68, 61, 51, 60, 78, 70, 76, 74, 55, 73, 70, 57, 62, 93, 75, 55, 87, 52, 78, 57, 47, 55, 71, 64, 63, 48, 81, 52, 75, 47, 61, 70, 44, 55, 78, 66, 88, 50, 73, 75, 68, 43, 74, 55, 48, 84, 77, 55, 67, 64, 86, 71, 78, 58, 51, 46, 99, 56, 61, 64, 73, 74, 50, 60, 61, 64, 60, 54, 56, 78, 60, 44, 65, 80, 78, 89, 104, 74, 30, 59, 69, 56, 63, 69, 88, 51, 57, 46, 100, 68, 65, 62, 60, 59, 74, 63, 76, 81, 60, 79, 62, 72, 80, 70, 59, 43, 63, 87, 80, 53, 77, 70, 48, 73, 71, 69, 55, 56, 65, 55, 57, 63, 73, 63, 76, 61, 109, 52, 47, 75, 54, 36, 54, 80, 53, 56, 71, 58, 74, 69, 53, 56, 56, 30, 56, 50, 59, 52, 54, 92, 70, 58, 53, 47, 62, 82, 57, 56, 66, 50, 62, 56, 53, 81, 67, 77, 71, 40, 42, 66, 88, 53, 89, 70, 75, 66, 64, 50, 55, 57, 76, 48, 58, 59, 50, 60, 67, 77, 62, 73, 63, 73, 50, 74, 53, 60, 64, 68, 59, 66, 98, 63, 67, 62, 71, 56, 73, 72, 90, 67, 66, 50, 57, 86, 65, 72, 46, 36, 76, 61, 74, 62, 66, 48, 65, 58, 74, 66, 58, 77, 56, 66, 59, 54, 60, 73, 72, 95, 58, 67, 81, 78, 64, 75, 73, 57, 65, 71, 60, 52, 84, 76, 70, 82, 62, 53, 74, 64, 62, 61, 57, 73, 57, 72, 57, 83, 80, 66, 49, 64, 57, 71, 55, 47, 50, 48, 73, 57, 65, 49, 67, 58, 79, 54, 73, 68, 51, 66, 46, 104, 61, 29, 56, 50, 71, 51, 71, 82, 79, 78, 45, 56, 88, 64, 67, 62, 87, 51, 62, 59, 73, 43, 63, 65, 54, 67, 68, 62, 55, 59, 61, 77, 68, 77, 72, 61, 49, 83, 85, 58, 75, 60, 73, 54, 56, 51, 58, 88, 95, 85, 60, 51, 76, 58, 69, 60, 59, 68, 58, 58, 64, 68, 69, 72, 98, 59, 61, 72, 57, 75, 54, 84, 64, 56, 76, 71, 57, 65, 61, 67, 64, 54, 63, 57, 49, 60, 48, 69, 72, 63, 75, 73, 83, 85, 65, 60, 55, 79, 63, 70, 89, 68, 76, 62, 67, 89, 86, 71, 79, 62, 69, 73, 57, 71, 62, 66, 65, 63, 58, 66, 58, 57, 53, 66, 72, 77, 99, 56, 68, 47, 81, 55, 64, 76, 65, 71, 65, 51, 49, 75, 110, 57, 90, 70, 38, 98, 62, 56, 54, 65, 61, 61, 65, 66, 72, 93, 95, 57, 44, 90, 56, 91, 69, 85, 88, 62, 71, 57, 62, 60, 63, 53, 67, 73, 77, 62, 67, 61, 104, 88, 54, 64, 55, 59, 56, 52, 63, 82, 81, 53, 72, 67, 56, 66, 75, 63, 54, 52, 39, 80, 47, 56, 54, 56, 63, 59, 55, 61, 73, 74, 69, 57, 70, 58, 78, 62, 47, 71, 77, 59, 72, 67, 57, 74, 70, 73, 52, 55, 51, 75, 86, 62, 67, 94, 53, 81, 74, 65, 75, 53, 68, 51, 91, 56, 72, 79, 61, 61, 52, 93, 44, 68, 43, 53, 70, 67, 55, 73, 48, 56, 50, 62, 79, 63, 82, 53, 56, 49, 65, 74, 81, 93, 63, 75, 57, 68, 56, 66, 79, 80, 78, 61, 76, 59, 68, 69, 91, 66, 66, 63, 77, 57, 49, 78, 53, 50, 61, 56, 66, 62, 68, 74, 71, 50, 68, 68, 51, 61, 59, 65, 66, 57, 74, 70, 45, 50, 117, 61, 60, 74, 88, 85, 50, 61, 89, 52, 70, 58, 42, 68, 50, 73, 54, 74, 57, 71, 51, 69, 58, 50, 87, 72, 73, 69, 53, 53, 73, 66, 64, 68, 78, 59, 81, 70, 53, 73, 53, 77, 53, 93, 58, 54, 74, 42, 88, 53, 73, 58, 61, 59, 59, 47, 68, 70, 78, 61, 73, 60, 86, 93, 62, 59, 69, 65, 64, 61, 70, 83, 75, 62, 48, 75, 49, 57, 53, 72, 105, 59, 67, 70, 60, 51, 93, 67, 63, 83, 63, 68, 71, 58, 71, 160, 49, 63, 63, 72, 70, 62, 100, 63, 62, 75, 79, 62, 53, 62, 62, 72, 74, 100, 43, 55, 57, 105, 72, 54, 71, 63, 76, 67, 73, 58, 67, 57, 75, 55, 79, 48, 66, 73, 70, 57, 83, 73, 49, 66, 62, 61, 58, 63, 60, 58, 93, 70, 64, 54, 58, 93, 117, 63, 66, 63, 77, 67, 62, 60, 65, 67, 56, 68, 74, 75, 76, 99, 80, 65, 66, 87, 74, 59, 56, 66, 68, 62, 119, 69, 69, 67, 61, 62, 67, 75, 56, 58, 54, 65, 81, 64, 58, 71, 55, 46, 62, 67, 60, 62, 59, 92, 101, 53, 68, 58, 56, 69, 63, 73, 65, 76, 65, 67, 76, 78, 61, 66, 52, 62, 68, 69, 87, 82, 68, 70, 62, 60, 47, 70, 112, 68, 62, 58, 64, 61, 64, 61, 65, 78, 61, 53, 65, 73, 63, 74, 59, 79, 62, 97, 74, 118, 54, 68, 64, 54, 69, 50, 65, 58, 81, 58, 63, 78, 53, 78, 67, 77, 55, 60, 60, 66, 69, 61, 74, 69, 55, 65, 69, 49, 68, 74, 58, 63, 72, 60, 84, 60, 71, 69, 74, 31, 75, 57, 57, 67, 60, 62, 60, 60, 94, 67, 59, 82, 51, 65, 38, 62, 58, 60, 55, 71, 44, 108, 94, 62, 54, 54, 51, 67, 83, 62, 43, 50, 68, 66, 62, 65, 62, 58, 70, 53, 68, 54, 80, 45, 49, 55, 58, 63, 61, 56, 54, 68, 66, 73, 92, 78, 70, 68, 63, 53, 66, 63, 54, 87, 75, 66, 53, 74, 61, 80, 70, 63, 62, 82, 68, 64, 59, 57, 61, 92, 66, 74, 60, 62, 67, 61, 51, 59, 60, 57, 68, 57, 77, 60, 63, 48, 52, 70, 59, 72, 53, 58, 57, 68, 76, 71, 55, 72, 67, 66, 83, 81, 58, 80, 65, 62, 64, 50, 79, 74, 55, 58, 57, 70, 55, 52, 74, 89, 57, 82, 69, 53, 65, 77, 51, 59, 75, 62, 53, 67, 57, 67, 41, 60, 65, 60, 45, 65, 81, 61, 55, 61, 55, 65, 69, 55, 69, 63, 65, 107, 50, 89, 95, 48, 113, 63, 57, 69, 53, 84, 75, 60, 80, 51, 65, 97, 69, 55, 83, 54, 75, 79, 57, 68, 65, 60, 62, 59, 102, 58, 59, 67, 63, 61, 76, 100, 65, 76, 93, 96, 67, 74, 65, 56, 74, 61, 57, 50, 61, 49, 66, 59, 70, 86, 61, 66, 62, 59, 60, 64, 61, 55, 78, 60, 71, 78, 63, 43, 61, 66, 67, 108, 46, 57, 46, 52, 88, 72, 93, 67, 60, 53, 71, 94, 66, 77, 59, 65, 60, 64, 61, 72, 63, 66, 60, 59, 59, 73, 73, 81, 85, 64, 62, 59, 98, 58, 72, 57, 81, 68, 48, 66, 76, 76, 70, 71, 51, 67, 56, 50, 66, 72, 67, 64, 65, 41, 61, 65, 52, 86, 80, 70, 76, 82, 104, 61, 61, 57, 60, 62, 45, 60, 54, 87, 66, 54, 85, 51, 74, 69, 62, 59, 62, 65, 60, 60, 95, 70, 69, 62, 63, 64, 59, 62, 42, 75, 43, 58, 58, 71, 72, 61, 74, 66, 74, 65, 76, 43, 51, 57, 72, 66, 77, 54, 64, 68, 52, 59, 68, 65, 45, 81, 76, 81, 72, 47, 83, 73, 58, 62, 56, 70, 81, 63, 64, 73, 68, 55, 70, 71, 83, 57, 54, 72, 85, 68, 53, 73, 62, 52, 60, 55, 75, 60, 55, 59, 56, 74, 56, 64, 55, 56, 51, 46, 43, 68, 65, 70, 62, 58, 72, 58, 55, 61, 83, 59, 52, 87, 54, 65, 55, 55, 69, 59, 85, 78, 85, 86, 65, 55, 85, 52, 52, 68, 96, 60, 61, 66, 57, 91, 58, 56, 90, 54, 69, 50, 61, 67, 46, 65, 71, 72, 73, 63, 73, 59, 66, 46, 72, 65, 103, 79, 61, 65, 71, 61, 50, 66, 80, 94, 67, 66, 50, 66, 59, 79, 69, 68, 63, 56, 69, 54, 64, 60, 67, 57, 53, 59, 55, 89, 68, 72, 68, 60, 52, 64, 53, 52, 70, 60, 65, 74, 70, 98, 58, 73, 57, 82, 54, 67, 75, 51, 93, 45, 84, 59, 51, 62, 70, 64, 77, 56, 50, 72, 72, 59, 49, 76, 41, 57, 73, 61, 75, 59, 65, 93, 58, 62, 89, 53, 75, 58, 48, 151, 59, 80, 68, 51, 58, 52, 81, 66, 51, 152, 64, 50, 60, 70, 55, 70, 54, 51, 57, 79, 65, 71, 64, 70, 68, 59, 85, 56, 54, 56, 67, 82, 117, 72, 49, 68, 65, 104, 70, 67, 68, 105, 62, 90, 72, 60, 61, 44, 53, 55, 60, 57, 50, 84, 54, 58, 56, 79, 55, 42, 73, 74, 72, 71, 55, 57, 59, 68, 58, 85, 59, 53, 61, 90, 47, 52, 87, 52, 47, 55, 94, 62, 54, 60, 73, 50, 54, 54, 41, 72, 70, 63, 78, 62, 49, 71, 47, 69, 76, 59, 63, 50, 58, 91, 56, 72, 64, 68, 59, 47, 69, 85, 69, 60, 56, 106, 43, 67, 77, 61, 67, 71, 59, 70, 59, 63, 78, 63, 62, 68, 100, 73, 64, 74, 65, 56, 45, 51, 57, 52, 64, 59, 62, 50, 75, 87, 73, 54, 82, 66, 73, 69, 52, 59, 59, 52, 59, 37, 93, 71, 46, 52, 64, 64, 80, 42, 72, 63, 62, 60, 89, 69, 65, 126, 58, 63, 58, 76, 66, 62, 66, 73, 72, 53, 74, 37, 76, 77, 57, 53, 69, 58, 55, 60, 59, 84, 91, 84, 52, 71, 69, 74, 52, 49, 90, 58, 72, 41, 41, 73, 100, 55, 75, 61, 85, 91, 55, 97, 57, 60, 55, 60, 64, 40, 100, 55, 48, 58, 53, 92, 52, 73, 44, 62, 78, 50, 57, 68, 67, 77, 62, 66, 70, 68, 62, 94, 88, 58, 41, 70, 84, 80, 77, 57, 54, 78, 58, 80, 63, 48, 91, 92, 57, 63, 98, 51, 59, 74, 58, 52, 60, 47, 85, 93, 96, 66, 53, 49, 59, 68, 43, 63, 55, 62, 54, 45, 52, 48, 62, 62, 81, 51, 64, 82, 60, 68, 50, 122, 91, 48, 77, 53, 68, 74, 50, 45, 51, 57, 61, 58, 73, 95, 47, 60, 60, 72, 58, 62, 99, 99, 88, 79, 71, 52, 64, 64, 60, 68, 68, 66, 53, 63, 62, 74, 57, 72, 50, 64, 69, 67, 62, 58, 60, 52, 59, 57, 28, 79, 69, 77, 52, 65, 62, 66, 69, 65, 57, 76, 73, 51, 59, 72, 98, 87, 55, 81, 75, 53, 55, 60, 69, 40, 61, 60, 61, 85, 50, 61, 45, 85, 49, 49, 53, 66, 59, 49, 81, 36, 86, 70, 71, 61, 61, 75, 107, 56, 73, 49, 93, 60, 69, 57, 91, 66, 49, 69, 85, 63, 74, 45, 90, 69, 126, 105, 79, 81, 71, 109, 67, 62, 56, 56, 49, 76, 85, 57, 50, 66, 64, 54, 52, 71, 60, 63, 66, 50, 108, 58, 49, 75, 58, 71, 87, 62, 51, 63, 71, 85, 50, 83, 73, 57, 82, 80, 69, 63, 59, 63, 64, 55, 57, 63, 60, 70, 79, 64, 56, 55, 89, 94, 63, 102, 70, 56, 79, 73, 59, 90, 61, 69, 42, 48, 74, 71, 56, 45, 106, 53, 44, 44, 72, 58, 104, 51, 69, 83, 57, 62, 78, 56, 53, 61, 45, 93, 102, 85, 66, 60, 74, 56, 52, 56, 81, 86, 110, 70, 55, 59, 62, 39, 59, 88, 67, 60, 75, 53, 62, 53, 85, 55, 66, 64, 53, 74, 74, 59, 63, 67, 71, 48, 54, 74, 83, 55, 69, 67, 50, 87, 53, 47, 52, 65, 50, 68, 80, 62, 55, 57, 53, 57, 71, 65, 37, 38, 60, 71, 50, 51, 70, 66, 79, 78, 57, 58, 64, 73, 53, 55, 53, 52, 95, 54, 81, 50, 94, 70, 62, 71, 66, 67, 59, 81, 41, 87, 52, 63, 79, 51, 43, 45, 103, 85, 81, 58, 67, 55, 58, 61, 81, 62, 78, 52, 51, 59, 65, 65, 83, 64, 49, 48, 67, 71, 65, 60, 83, 51, 93, 49, 58, 62, 65, 60, 51, 75, 66, 71, 85, 60, 67, 60, 53, 74, 56, 80, 66, 67, 73, 82, 53, 74, 62, 110, 61, 82, 62, 72, 58, 44, 58, 78, 61, 105, 69, 55, 54, 46, 55, 73, 70, 80, 82, 85, 79, 103, 57, 64, 76, 83, 58, 88, 55, 74, 55, 53, 70, 85, 61, 90, 72, 71, 69, 75, 39, 76, 49, 61, 73, 57, 76, 57, 46, 64, 64, 71, 58, 65, 56, 53, 55, 130, 67, 64, 73, 49, 88, 45, 49, 71, 64, 81, 77, 86, 60, 69, 57, 79, 50, 58, 37, 80, 53, 64, 60, 57, 100, 93, 78, 68, 67, 79, 55, 59, 81, 54, 83, 90, 83, 102, 65, 73, 105, 79, 86, 51, 79, 54, 86, 70, 54, 99, 56, 50, 92, 70, 87, 56, 64, 77, 62, 63, 91, 64, 63, 57, 75, 47, 43, 83, 57, 61, 62, 63, 54, 67, 92, 72, 69, 85, 56, 87, 52, 48, 66, 69, 91, 89, 80, 68, 41, 82, 65, 55, 74, 47, 70, 65, 82, 101, 61, 85, 64, 74, 80, 115, 51, 58, 81, 79, 61, 63, 105, 68, 48, 103, 76, 59, 54, 85, 69, 64, 59, 64, 63, 67, 60, 54, 55, 87, 40, 87, 60, 60, 59, 65, 66, 57, 63, 68, 44, 75, 54, 67, 43, 53, 49, 71, 117, 105, 60, 90, 62, 65, 56, 61, 49, 70, 73, 53, 54, 68, 78, 60, 92, 57, 73, 62, 50, 40, 71, 74, 65, 86, 67, 83, 70, 60, 70, 65, 54, 104, 87, 69, 83, 56, 50, 56, 48, 63, 77, 72, 52, 66, 49, 52, 67, 49, 50, 72, 50, 71, 78, 95, 56, 100, 53, 56, 85, 77, 44, 85, 80, 52, 80, 58, 43, 105, 50, 55, 83, 47, 73, 55, 57, 55, 76, 67, 47, 68, 70, 58, 39, 72, 81, 67, 46, 85, 60, 67, 43, 51, 83, 88, 90, 85, 61, 37, 99, 75, 55, 51, 63, 64, 51, 62, 49, 69, 50, 48, 60, 58, 65, 67, 68, 33, 87, 86, 66, 71, 61, 69, 78, 72, 71, 53, 74, 63, 92, 73, 70, 86, 59, 50, 66, 96, 50, 90, 64, 71, 64, 58, 73, 64, 66, 121, 46, 51, 57, 92, 56, 85, 44, 56, 89, 93, 57, 88, 43, 90, 83, 75, 50, 67, 72, 76, 77, 54, 52, 49, 51, 58, 73, 85, 72, 62, 55, 66, 64, 65, 63, 69, 86, 63, 43, 67, 68, 91, 75, 61, 42, 60, 56, 57, 67, 78, 86, 69, 60, 74, 43, 83, 69, 68, 48, 52, 73, 71, 61, 80, 76, 71, 67, 73, 61, 74, 48, 59, 42, 65, 84, 50, 88, 85, 60, 72, 59, 49, 50, 60, 69, 63, 62, 71, 84, 47, 90, 74, 75, 93, 70, 57, 71, 33, 55, 94, 73, 35, 72, 77, 54, 76, 63, 55, 54, 73, 47, 63, 67, 87, 45, 69, 98, 55, 76, 58, 47, 44, 54, 75, 56, 79, 59, 46, 77, 75, 68, 39, 69, 80, 75, 100, 58, 52, 59, 62, 74, 64, 65, 52, 65, 61, 69, 64, 53, 57, 95, 73, 66, 69, 48, 80, 60, 65, 88, 70, 46, 48, 112, 67, 61, 142, 63, 37, 61, 60, 80, 96, 84, 62, 78, 54, 52, 59, 79, 65, 89, 91, 65, 79, 61, 73, 45, 54, 54, 64, 74, 58, 90, 62, 46, 59, 40, 65, 57, 55, 54, 60, 68, 57, 65, 63, 33, 56, 81, 83, 65, 55, 66, 64, 62, 59, 58, 65, 72, 75, 58, 68, 64, 60, 100, 79, 65, 54, 68, 83, 60, 84, 49, 61, 61, 42, 75, 54, 69, 53, 52, 61, 71, 89, 48, 61, 66, 69, 77, 62, 101, 83, 54, 63, 61, 73, 76, 64, 62, 86, 49, 68, 56, 63, 82, 71, 54, 70, 59, 62, 70, 39, 62, 53, 65, 97, 66, 59, 53, 71, 47, 64, 57, 59, 65, 45, 60, 54, 42, 82, 54, 72, 68, 57, 66, 56, 76, 51, 50, 45, 51, 76, 56, 75, 59, 72, 70, 45, 67, 65, 35, 74, 57, 54, 61, 65, 62, 79, 101, 67, 57, 54, 100, 55, 94, 51, 52, 83, 79, 65, 66, 77, 64, 55, 56, 66, 82, 60, 82, 58, 60, 72, 76, 86, 69, 58, 75, 64, 94, 61, 47, 53, 66, 99, 69, 60, 57, 50, 57, 50, 50, 65, 84, 49, 69, 54, 39, 71, 45, 66, 62, 92, 59, 68, 43, 52, 39, 93, 96, 96, 58, 80, 57, 57, 68, 57, 65, 60, 56, 64, 57, 94, 73, 73, 68, 63, 101, 75, 62, 85, 61, 52, 69, 98, 46, 54, 69, 64, 60, 54, 78, 81, 132, 50, 69, 57, 62, 102, 74, 51, 55, 64, 71, 46, 93, 61, 49, 57, 73, 68, 136, 83, 60, 67, 48, 55, 117, 67, 36, 56, 39, 67, 48, 67, 60, 45, 46, 64, 75, 165, 65, 59, 67, 86, 75, 36, 93, 42, 50, 88, 45, 99, 61, 68, 52, 46, 67, 91, 50, 71, 75, 88, 61, 60, 85, 71, 56, 64, 76, 68, 46, 94, 73, 46, 67, 65, 65, 84, 60, 44, 63, 74, 59, 72, 55, 60, 70, 64, 62, 115, 60, 58, 84, 87, 67, 48, 57, 48, 85, 43, 62, 78, 62, 52, 62, 61, 99, 45, 53, 57, 61, 62, 69, 52, 84, 103, 80, 54, 59, 62, 45, 80, 66, 39, 62, 56, 52, 89, 61, 50, 86, 68, 62, 72, 77, 53, 80, 80, 84, 78, 52, 52, 84, 52, 61, 87, 52, 61, 51, 63, 63, 58, 57, 52, 56, 117, 54, 97, 55, 47, 62, 73, 43, 78, 71, 64, 45, 69, 71, 57, 94, 58, 75, 83, 63, 74, 54, 68, 88, 71, 71, 54, 52, 56, 76, 51, 56, 67, 44, 118, 68, 73, 66, 28, 75, 82, 75, 67, 62, 79, 44, 53, 68, 56, 81, 72, 55, 59, 65, 58, 76, 61, 41, 62, 52, 65, 56, 47, 56, 65, 51, 78, 54, 56, 65, 59, 63, 42, 63, 47, 53, 85, 131, 70, 57, 66, 49, 72, 132, 70, 49, 44, 56, 62, 75, 45, 47, 46, 84, 52, 50, 71, 56, 89, 62, 82, 76, 47, 75, 51, 85, 126, 58, 113, 105, 79, 55, 55, 51, 37, 70, 76, 70, 71, 62, 53, 59, 98, 50, 51, 46, 70, 57, 65, 71, 87, 63, 60, 58, 61, 86, 66, 58, 104, 83, 67, 51, 54, 85, 59, 50, 66, 92, 63, 57, 79, 88, 30, 77, 52, 57, 71, 58, 70, 60, 60, 76, 63, 74, 54, 42, 86, 59, 67, 53, 67, 56, 52, 72, 60, 56, 45, 69, 93, 76, 88, 65, 57, 45, 84, 55, 74, 46, 73, 60, 63, 63, 55, 82, 67, 52, 51, 45, 75, 54, 66, 70, 88, 53, 54, 63, 40, 70, 61, 66, 63, 67, 61, 53, 54, 50, 67, 121, 66, 45, 77, 56, 68, 35, 52, 67, 87, 55, 51, 60, 89, 63, 41, 43, 98, 55, 50, 65, 49, 87, 71, 95, 42, 58, 59, 72, 110, 71, 50, 62, 58, 89, 54, 69, 63, 63, 45, 51, 52, 57, 74, 67, 71, 64, 68, 50, 74, 75, 65, 66, 40, 45, 124, 108, 41, 43, 66, 80, 78, 114, 66, 52, 78, 60, 61, 64, 77, 77, 56, 41, 88, 60, 57, 83, 47, 64, 52, 68, 76, 73, 82, 57, 45, 47, 73, 76, 83, 59, 56, 59, 66, 46, 69, 46, 54, 58, 60, 69, 91, 56, 49, 61, 74, 52, 69, 37, 66, 89, 54, 44, 86, 67, 50, 70, 65, 39, 54, 62, 60, 46, 74, 68, 97, 37, 76, 68, 55, 85, 84, 79, 65, 56, 65, 72, 65, 67, 56, 112, 33, 50, 29, 73, 70, 70, 85, 54, 50, 116, 66, 64, 61, 57, 67, 82, 60, 56, 67, 41, 92, 52, 55, 67, 95, 57, 55, 90, 78, 80, 65, 40, 51, 68, 73, 46, 49, 61, 52, 61, 54, 53, 61, 71, 39, 60, 96, 49, 60, 72, 66, 59, 87, 51, 92, 52, 75, 66, 92, 71, 71, 71, 95, 64, 52, 42, 60, 59, 61, 47, 72, 70, 100, 51, 44, 67, 41, 56, 77, 61, 57, 67, 86, 49, 63, 65, 67, 47, 45, 46, 73, 82, 87, 72, 62, 122, 71, 53, 102, 76, 57, 79, 55, 61, 86, 42, 66, 54, 65, 64, 70, 76, 68, 50, 57, 52, 62, 77, 77, 55, 77, 93, 71, 72, 77, 55, 73, 68, 78, 68, 48, 118, 81, 57, 61, 47, 73, 62, 59, 59, 65, 153, 70, 93, 49, 70, 73, 59, 74, 58, 80, 91, 126, 75, 56, 63, 59, 70, 66, 63, 65, 49, 55, 69, 63, 62, 89, 72, 63, 56, 73, 55, 45, 63, 96, 67, 53, 53, 75, 71, 60, 53, 55, 98, 54, 53, 54, 39, 53, 43, 53, 61, 83, 52, 65, 60, 63, 52, 70, 116, 63, 58, 61, 58, 63, 83, 46, 85, 61, 67, 68, 65, 56, 81, 53, 65, 63, 63, 68, 57, 50, 52, 65, 59, 80, 86, 67, 81, 53, 67, 47, 118, 68, 62, 79, 74, 61, 71, 105, 58, 78, 51, 36, 97, 59, 58, 63, 81, 53, 54, 87, 78, 63, 78, 107, 89, 56, 63, 85, 58, 68, 62, 70, 82, 53, 57, 68, 78, 54, 78, 59, 45, 81, 102, 72, 104, 58, 80, 82, 59, 71, 42, 60, 51, 69, 49, 59, 51, 108, 102, 71, 88, 98, 58, 65, 50, 74, 54, 50, 45, 56, 77, 54, 90, 57, 57, 58, 49, 72, 71, 112, 63, 73, 60, 48, 79, 53, 79, 70, 52, 70, 67, 58, 57, 44, 65, 51, 96, 58, 69, 65, 72, 95, 67, 61, 69, 56, 74, 100, 35, 54, 75, 54, 80, 80, 91, 61, 93, 56, 86, 93, 75, 54, 74, 52, 65, 62, 80, 54, 62, 52, 61, 57, 69, 94, 73, 59, 63, 52, 69, 46, 47, 60, 44, 68, 57, 73, 135, 50, 57, 58, 49, 46, 67, 89, 58, 38, 80, 52, 91, 42, 78, 53, 73, 51, 64, 53, 49, 69, 54, 67, 85, 68, 80, 96, 59, 94, 128, 72, 57, 53, 61, 54, 56, 49, 91, 61, 71, 85, 58, 67, 64, 85, 51, 59, 60, 68, 50, 86, 88, 58, 69, 48, 86, 71, 58, 98, 89, 61, 56, 56, 45, 60, 75, 57, 67, 81, 68, 70, 59, 77, 74, 119, 83, 61, 89, 83, 89, 59, 47, 78, 48, 53, 69, 72, 68, 86, 48, 91, 45, 103, 80, 72, 95, 77, 69, 66, 82, 73, 78, 66, 71, 58, 78, 71, 90, 81, 92, 60, 89, 46, 47, 78, 59, 61, 109, 50, 57, 56, 64, 75, 88, 70, 100, 66, 75, 44, 88, 75, 61, 58, 56, 61, 64, 56, 64, 43, 65, 59, 76, 78, 56, 73, 80, 113, 63, 61, 90, 50, 61, 88, 54, 55, 55, 55, 103, 68, 51, 53, 44, 62, 54, 78, 99, 57, 78, 60, 81, 84, 68, 71, 69, 56, 61, 73, 47, 65, 63, 63, 40, 70, 72, 126, 137, 63, 47, 75, 77, 83, 104, 83, 52, 63, 79, 63, 70, 73, 55, 81, 66, 81, 57, 72, 59, 71, 55, 92, 85, 34, 68, 67, 73, 66, 65, 114, 59, 50, 64, 62, 47, 51, 59, 91, 60, 67, 53, 65, 64, 77, 82, 91, 81, 84, 57, 88, 33, 64, 52, 64, 70, 50, 63, 73, 69, 65, 45, 54, 85, 48, 51, 51, 43, 59, 99, 72, 99, 66, 63, 56, 72, 57, 65, 55, 74, 52, 52, 34, 50, 63, 52, 118, 84, 93, 63, 57, 51, 86, 64, 70, 55, 62, 61, 44, 66, 77, 41, 70, 66, 47, 47, 74, 69, 71, 83, 90, 33, 66, 50, 67, 48, 65, 73, 49, 55, 52, 69, 34, 78, 73, 62, 62, 68, 78, 42, 74, 62, 54, 63, 59, 68, 87, 51, 61, 61, 78, 68, 66, 77, 92, 45, 53, 76, 78, 79, 84, 86, 46, 110, 66, 76, 67, 84, 60, 84, 56, 64, 64, 90, 75, 66, 47, 50, 48, 62, 44, 81, 67, 67, 72, 58, 69, 65, 46, 57, 57, 46, 54, 44, 67, 47, 75, 71, 76, 60, 83, 63, 73, 67, 71, 50, 89, 73, 68, 68, 56, 96, 81, 72, 61, 58, 55, 63, 56, 69, 47, 78, 58, 75, 90, 63, 65, 48, 75, 63, 69, 65, 68, 37, 92, 82, 37, 61, 53, 126, 51, 75, 64, 49, 116, 61, 58, 60, 43, 42, 47, 76, 61, 55, 64, 53, 75, 55, 49, 105, 43, 59, 75, 68, 70, 95, 53, 63, 42, 49, 54, 78, 80, 93, 66, 70, 73, 68, 53, 94, 65, 55, 60, 82, 56, 76, 65, 47, 97, 40, 59, 80, 71, 109, 55, 69, 64, 62, 61, 65, 48, 56, 92, 74, 58, 77, 50, 64, 57, 70, 62, 83, 63, 72, 97, 74, 59, 75, 49, 56, 85, 78, 60, 74, 75, 61, 60, 74, 74, 64, 71, 74, 86, 74, 56, 90, 67, 45, 59, 66, 46, 86, 85, 71, 60, 41, 47, 71, 81, 62, 52, 65, 60, 44, 60, 60, 49, 60, 76, 90, 52, 55, 63, 68, 105, 76, 61, 47, 98, 63, 74, 39, 72, 87, 64, 62, 67, 75, 75, 64, 66, 75, 48, 70, 54, 73, 41, 35, 65, 51, 59, 63, 49, 68, 71, 69, 71, 38, 76, 51, 73, 62, 52, 57, 84, 83, 85, 45, 45, 55, 45, 63, 64, 59, 80, 57, 69, 41, 59, 90, 62, 50, 78, 59, 73, 61, 81, 56, 46, 72, 72, 61, 61, 84, 92, 45, 71, 65, 71, 66, 71, 59, 49, 50, 64, 78, 89, 75, 65, 59, 65, 55, 70, 66, 78, 55, 46, 64, 49, 73, 55, 66, 60, 68, 37, 55, 70, 61, 67, 128, 78, 49, 56, 60, 66, 66, 67, 62, 61, 63, 80, 59, 65, 69, 59, 60, 112, 59, 47, 42, 57, 61, 55, 66, 67, 58, 65, 89, 102, 54, 67, 64, 62, 71, 75, 66, 58, 85, 70, 57, 56, 76, 80, 59, 46, 55, 75, 82, 52, 68, 79, 120, 29, 42, 67, 48, 65, 30, 66, 75, 58, 54, 73, 69, 48, 66, 75, 113, 70, 52, 68, 60, 56, 82, 72, 65, 62, 78, 87, 47, 52, 67, 75, 65, 61, 81, 60, 64, 107, 65, 44, 63, 91, 45, 43, 50, 60, 48, 68, 38, 61, 65, 58, 54, 107, 88, 59, 74, 95, 87, 69, 71, 57, 80, 64, 62, 68, 85, 80, 53, 80, 45, 46, 77, 76, 50, 60, 76, 88, 57, 76, 74, 75, 65, 58, 57, 48, 57, 53, 44, 63, 89, 76, 68, 66, 49, 67, 70, 65, 56, 47, 70, 98, 53, 45, 62, 65, 55, 52, 68, 84, 56, 67, 54, 55, 66, 66, 68, 60, 79, 54, 81, 34, 80, 56, 51, 60, 58, 54, 69, 84, 114, 120, 50, 40, 58, 47, 65, 101, 67, 55, 61, 38, 57, 67, 81, 60, 75, 64, 70, 72, 88, 49, 49, 56, 65, 108, 67, 82, 75, 49, 78, 91, 55, 59, 58, 87, 75, 60, 67, 77, 54, 62, 47, 58, 76, 82, 89, 50, 54, 82, 55, 87, 76, 72, 69, 72, 86, 48, 83, 46, 70, 69, 69, 81, 55, 48, 56, 53, 59, 48, 71, 74, 61, 64, 59, 66, 60, 68, 58, 69, 57, 67, 48, 50, 77, 76, 74, 62, 55, 74, 66, 80, 73, 73, 69, 59, 65, 81, 55, 68, 57, 64, 78, 54, 58, 93, 67, 61, 78, 77, 65, 64, 43, 38, 73, 57, 57, 56, 57, 83, 65, 53, 76, 60, 87, 64, 68, 66, 52, 58, 60, 52, 67, 70, 71, 61, 59, 66, 51, 62, 69, 70, 52, 60, 59, 64, 68, 61, 78, 73, 60, 55, 63, 64, 63, 57, 59, 55, 77, 71, 62, 61, 61, 90, 56, 85, 73, 56, 59, 51, 66, 93, 64, 65, 68, 66, 102, 63, 58, 74, 114, 57, 76, 78, 86, 73, 46, 68, 57, 44, 60, 61, 56, 64, 75, 71, 58, 53, 68, 61, 90, 64, 70, 50, 82, 86, 66, 51, 62, 56, 51, 62, 61, 81, 58, 52, 48, 71, 79, 72, 66, 65, 67, 61, 63, 47, 68, 67, 42, 82, 65, 53, 76, 59, 54, 78, 71, 71, 58, 38, 77, 57, 55, 66, 72, 59, 60, 69, 49, 43, 78, 70, 62, 60, 88, 49, 63, 60, 76, 67, 48, 62, 100, 62, 60, 70, 77, 56, 62, 106, 62, 77, 61, 49, 61, 71, 58, 62, 83, 69, 53, 50, 65, 64, 56, 58, 57, 86, 60, 59, 65, 58, 87, 62, 59, 72, 65, 70, 58, 55, 61, 63, 67, 58, 63, 78, 66, 69, 58, 86, 61, 80, 60, 69, 57, 76, 90, 76, 61, 60, 64, 51, 66, 83, 45, 81, 60, 58, 74, 53, 65, 62, 101, 63, 51, 52, 67, 62, 64, 58, 57, 71, 60, 66, 53, 65, 50, 59, 79, 60, 70, 63, 45, 73, 60, 72, 59, 49, 73, 56, 69, 83, 94, 67, 63, 43, 54, 70, 59, 75, 65, 64, 56, 66, 77, 72, 55, 54, 94, 71, 70, 83, 60, 86, 99, 64, 60, 57, 92, 94, 58, 68, 40, 79, 57, 84, 100, 76, 60, 72, 60, 49, 67, 66, 62, 59, 60, 57, 83, 60, 69, 67, 149, 63, 63, 61, 47, 66, 60, 59, 63, 58, 55, 46, 61, 62, 66, 70, 57, 63, 43, 63, 68, 52, 61, 70, 63, 70, 74, 69, 60, 66, 71, 62, 49, 73, 56, 61, 72, 57, 64, 66, 90, 49, 48, 54, 53, 53, 63, 67, 73, 69, 70, 72, 61, 78, 78, 59, 84, 51, 47, 69, 59, 51, 67, 56, 42, 46, 62, 67, 70, 68, 69, 62, 50, 71, 70, 69, 46, 51, 75, 63, 68, 94, 42, 75, 53, 49, 63, 60, 54, 55, 72, 76, 58, 72, 60, 57, 62, 58, 65, 55, 50, 72, 71, 60, 71, 66, 56, 73, 76, 72, 64, 56, 45, 62, 62, 60, 50, 69, 67, 53, 62, 65, 48, 67, 67, 74, 81, 63, 52, 64, 66, 59, 44, 60, 81, 52, 56, 65, 73, 58, 43, 80, 63, 72, 67, 55, 72, 53, 76, 84, 90, 73, 46, 65, 54, 74, 51, 68, 58, 52, 68, 101, 71, 74, 69, 61, 79, 58, 65, 50, 69, 65, 60, 64, 73, 70, 52, 55, 68, 93, 63, 49, 79, 104, 82, 67, 52, 72, 79, 71, 66, 53, 66, 74, 68, 75, 70, 73, 72, 68, 57, 68, 70, 51, 56, 64, 52, 74, 63, 65, 59, 83, 55, 99, 66, 74, 62, 55, 57, 59, 67, 96, 70, 69, 63, 72, 63, 58, 57, 52, 60, 63, 62, 79, 69, 79, 47, 53, 48, 79, 51, 42, 92, 91, 61, 62, 60, 64, 86, 66, 61, 59, 55, 55, 85, 101, 69, 51, 83, 66, 66, 51, 44, 78, 51, 68, 72, 79, 62, 56, 71, 70, 58, 62, 64, 66, 38, 84, 70, 47, 61, 48, 60, 70, 58, 72, 60, 58, 53, 75, 59, 54, 64, 58, 57, 52, 61, 53, 56, 56, 62, 53, 101, 83, 53, 68, 68, 99, 77, 64, 58, 62, 69, 80, 77, 56, 60, 59, 116, 80, 80, 57, 110, 76, 72, 54, 88, 80, 97, 66, 53, 74, 52, 58, 65, 54, 78, 61, 66, 83, 62, 67, 63, 60, 77, 61, 74, 64, 70, 73, 65, 67, 61, 57, 49, 60, 78, 58, 51, 62, 59, 67, 50, 54, 59, 63, 74, 69, 107, 82, 78, 99, 56, 68, 61, 61, 58, 114, 65, 59, 70, 59, 81, 47, 55, 66, 50, 54, 58, 80, 71, 75, 66, 106, 70, 53, 64, 61, 71, 104, 77, 83, 62, 64, 62, 63, 66, 64, 64, 79, 50, 72, 57, 79, 104, 59, 56, 59, 58, 86, 66, 51, 63, 56, 81, 56, 71, 60, 50, 72, 73, 76, 62, 68, 82, 53, 77, 64, 63, 55, 56, 55, 76, 52, 59, 46, 58, 42, 45, 82, 79, 62, 70, 66, 55, 69, 70, 56, 80, 66, 67, 66, 94, 78, 53, 47, 65, 58, 78, 76, 70, 87, 70, 68, 91, 65, 85, 75, 98, 69, 57, 68, 52, 66, 89, 97, 71, 88, 49, 63, 64, 78, 69, 73, 56, 47, 90, 93, 67, 54, 61, 97, 62, 117, 49, 73, 51, 57, 58, 70, 65, 62, 61, 58, 81, 58, 62, 56, 59, 57, 69, 68, 64, 78, 60, 58, 61, 50, 60, 80, 54, 67, 64, 50, 75, 74, 77, 94, 70, 62, 103, 57, 70, 54, 52, 60, 46, 69, 68, 60, 75, 58, 61, 70, 58, 56, 49, 65, 62, 66, 69, 63, 46, 74, 74, 57, 91, 72, 56, 62, 73, 106, 78, 66, 93, 68, 80, 81, 46, 100, 64, 82, 58, 74, 65, 73, 67, 40, 66, 53, 74, 65, 52, 65, 65, 50, 64, 77, 97, 62, 86, 70, 58, 59, 64, 74, 44, 65, 62, 47, 54, 54, 65, 69, 50, 45, 77, 53, 79, 63, 42, 62, 96, 50, 71, 76, 57, 69, 97, 56, 77, 52, 88, 65, 81, 60, 55, 89, 63, 68, 88, 63, 54, 73, 54, 63, 67, 56, 67, 61, 58, 57, 101, 99, 49, 62, 54, 49, 60, 71, 63, 66, 70, 58, 69, 50, 72, 62, 65, 54, 62, 62, 54, 61, 62, 52, 52, 80, 72, 86, 68, 77, 53, 71, 78, 71, 59, 86, 59, 55, 57, 72, 60, 61, 63, 72, 71, 50, 73, 66, 67, 61, 61, 56, 61, 59, 56, 99, 70, 55, 52, 61, 54, 76, 82, 63, 68, 60, 67, 61, 80, 54, 66, 84, 61, 92, 65, 70, 77, 94, 64, 54, 78, 69, 55, 54, 74, 56, 66, 55, 81, 77, 58, 66, 65, 70, 58, 54, 67, 71, 67, 85, 49, 77, 58, 74, 77, 72, 53, 67, 67, 79, 52, 55, 51, 52, 83, 56, 68, 73, 52, 53, 62, 88, 90, 69, 52, 91, 73, 57, 60, 54, 62, 53, 71, 63, 56, 78, 54, 66, 80, 82, 62, 64, 73, 55, 70, 65, 79, 75, 92, 57, 47, 85, 68, 72, 59, 47, 76, 67, 62, 89, 58, 75, 53, 49, 62, 61, 70, 65, 72, 62, 44, 56, 74, 68, 54, 77, 59, 75, 45, 68, 74, 77, 64, 72, 52, 54, 54, 54, 69, 68, 69, 67, 70, 65, 46, 48, 58, 50, 63, 89, 77, 78, 76, 64, 63, 76, 70, 68, 68, 76, 68, 66, 42, 86, 74, 110, 54, 133, 52, 55, 90, 78, 59, 70, 50, 61, 70, 56, 61, 59, 74, 65, 65, 56, 77, 61, 61, 78, 48, 71, 84, 57, 58, 78, 80, 62, 66, 66, 72, 59, 97, 56, 55, 54, 65, 66, 79, 57, 58, 103, 62, 82, 77, 61, 79, 74, 49, 87, 44, 53, 73, 52, 76, 57, 74, 69, 67, 56, 47, 78, 53, 55, 74, 58, 84, 80, 82, 69, 63, 52, 75, 53, 62, 71, 69, 107, 75, 89, 51, 77, 68, 77, 60, 69, 62, 56, 78, 67, 55, 60, 57, 57, 48, 66, 54, 68, 61, 57, 68, 69, 67, 69, 68, 57, 83, 73, 83, 73, 70, 55, 53, 112, 71, 62, 55, 48, 59, 78, 65, 72, 65, 62, 60, 63, 69, 56, 69, 58, 62, 74, 63, 63, 56, 47, 65, 74, 63, 69, 59, 81, 51, 68, 55, 59, 58, 67, 55, 51, 71, 93, 79, 78, 59, 70, 62, 73, 40, 54, 53, 64, 56, 72, 80, 62, 76, 41, 65, 48, 45, 65, 61, 51, 80, 64, 62, 63, 69, 61, 75, 66, 54, 58, 66, 48, 58, 63, 61, 47, 67, 60, 72, 58, 58, 73, 74, 71, 75, 54, 64, 53, 76, 42, 58, 59, 64, 56, 54, 89, 94, 55, 60, 60, 73, 62, 52, 51, 85, 57, 54, 50, 62, 48, 73, 59, 74, 53, 75, 64, 69, 93, 91, 71, 48, 64, 60, 54, 74, 71, 64, 81, 48, 39, 73, 82, 42, 65, 60, 93, 46, 60, 58, 51, 93, 61, 64, 70, 70, 54, 63, 62, 71, 86, 55, 63, 61, 64, 57, 53, 67, 51, 44, 70, 67, 77, 54, 66, 64, 62, 71, 62, 70, 55, 57, 74, 84, 65, 57, 72, 60, 51, 72, 104, 64, 70, 74, 89, 74, 205, 60, 68, 67, 65, 62, 62, 53, 44, 60, 72, 65, 79, 51, 49, 85, 65, 60, 61, 74, 49, 61, 55, 56, 50, 55, 61, 74, 61, 49, 83, 50, 72, 65, 64, 53, 60, 66, 55, 66, 81, 51, 71, 60, 79, 78, 48, 101, 56, 100, 54, 57, 68, 63, 55, 75, 77, 54, 88, 55, 75, 55, 61, 62, 57, 81, 68, 59, 57, 57, 57, 65, 64, 89, 72, 59, 71, 58, 68, 60, 71, 65, 63, 40, 71, 59, 75, 67, 61, 65, 62, 73, 48, 68, 78, 79, 85, 57, 86, 141, 66, 54, 72, 63, 55, 67, 49, 72, 52, 61, 69, 65, 72, 49, 63, 65, 85, 50, 70, 68, 67, 69, 58, 69, 66, 56, 56, 75, 54, 75, 51, 53, 53, 63, 65, 69, 54, 64, 64, 72, 54, 49, 78, 70, 44, 73, 81, 72, 72, 51, 57, 61, 63, 48, 66, 73, 82, 87, 74, 60, 104, 77, 71, 69, 58, 42, 59, 64, 77, 58, 56, 83, 52, 57, 64, 100, 56, 67, 61, 66, 63, 62, 66, 74, 69, 60, 84, 54, 53, 79, 89, 51, 61, 51, 89, 55, 66, 57, 62, 77, 60, 58, 65, 54, 75, 62, 80, 59, 90, 76, 67, 71, 67, 60, 50, 55, 64, 53, 63, 68, 77, 91, 65, 57, 66, 98, 57, 82, 66, 74, 53, 37, 71, 71, 61, 66, 65, 60, 61, 87, 67, 75, 66, 83, 57, 66, 46, 63, 80, 71, 67, 59, 58, 77, 81, 84, 61, 60, 70, 56, 44, 49, 53, 56, 70, 54, 47, 64, 50, 69, 79, 55, 56, 79, 55, 62, 55, 78, 67, 57, 58, 80, 65, 67, 86, 65, 58, 66, 79, 58, 47, 68, 83, 81, 62, 74, 43, 67, 70, 63, 66, 72, 71, 90, 78, 76, 64, 88, 91, 60, 58, 53, 69, 53, 50, 63, 54, 46, 73, 67, 79, 57, 82, 65, 66, 55, 69, 74, 60, 53, 65, 59, 64, 57, 63, 80, 57, 81, 63, 60, 56, 65, 57, 59, 58, 48, 68, 58, 80, 60, 57, 50, 68, 65, 63, 61, 69, 61, 51, 51, 64, 65, 87, 55, 65, 88, 58, 85, 62, 86, 55, 62, 92, 63, 80, 49, 58, 68, 61, 65, 58, 55, 63, 53, 68, 52, 64, 84, 55, 59, 51, 77, 81, 65, 82, 70, 69, 62, 56, 52, 59, 46, 71, 71, 76, 57, 64, 69, 50, 72, 87, 69, 53, 72, 62, 74, 52, 74, 43, 64, 62, 52, 67, 51, 71, 62, 60, 68, 73, 66, 49, 56, 66, 72, 64, 67, 67, 62, 58, 63, 62, 72, 60, 79, 54, 67, 67, 71, 68, 89, 56, 59, 67, 62, 52, 58, 71, 66, 48, 67, 73, 71, 58, 74, 58, 64, 73, 64, 61, 53, 55, 97, 76, 61, 58, 60, 51, 68, 51, 66, 72, 68, 67, 67, 57, 69, 61, 63, 53, 62, 101, 70, 67, 72, 51, 92, 67, 77, 62, 58, 60, 61, 53, 62, 58, 105, 60, 56, 76, 59, 70, 73, 50, 79, 66, 61, 54, 68, 65, 57, 64, 58, 71, 87, 62, 58, 61, 68, 78, 67, 71, 50, 79, 64, 60, 73, 86, 58, 83, 47, 76, 64, 69, 62, 65, 59, 43, 50, 74, 112, 69, 72, 67, 67, 83, 56, 61, 58, 56, 66, 73, 81, 53, 54, 45, 61, 56, 67, 51, 57, 91, 71, 55, 81, 70, 47, 66, 48, 90, 54, 57, 71, 68, 66, 77, 46, 55, 56, 76, 80, 65, 64, 57, 56, 62, 55, 78, 103, 67, 77, 73, 71, 67, 81, 49, 57, 71, 63, 90, 66, 47, 73, 55, 59, 58, 78, 74, 53, 71, 54, 70, 69, 68, 39, 56, 67, 59, 85, 62, 76, 73, 58, 50, 52, 84, 52, 71, 53, 53, 76, 91, 86, 83, 75, 52, 89, 59, 61, 63, 46, 48, 66, 73, 96, 64, 61, 38, 73, 53, 73, 91, 61, 127, 48, 68, 59, 68, 53, 82, 75, 63, 62, 39, 60, 60, 51, 49, 48, 65, 68, 58, 83, 72, 65, 63, 82, 50, 69, 55, 110, 73, 76, 55, 125, 81, 93, 68, 66, 81, 56, 56, 70, 46, 39, 61, 57, 68, 42, 49, 73, 97, 85, 88, 61, 50, 54, 66, 69, 69, 64, 78, 52, 56, 39, 119, 90, 51, 45, 69, 54, 86, 56, 51, 48, 51, 59, 73, 62, 46, 70, 69, 72, 69, 44, 74, 61, 54, 52, 68, 84, 61, 82, 66, 118, 63, 46, 117, 61, 79, 58, 65, 61, 96, 59, 55, 48, 71, 66, 59, 59, 66, 88, 69, 50, 52, 66, 73, 69, 76, 66, 67, 47, 63, 57, 137, 63, 54, 51, 88, 82, 51, 62, 69, 95, 97, 69, 54, 55, 106, 49, 70, 48, 55, 104, 97, 61, 47, 71, 82, 51, 73, 54, 61, 48, 53, 79, 117, 57, 55, 52, 66, 63, 55, 44, 46, 65, 78, 85, 71, 46, 54, 91, 78, 84, 50, 88, 69, 71, 66, 39, 60, 47, 74, 60, 37, 68, 77, 66, 57, 43, 63, 68, 63, 55, 61, 73, 66, 54, 79, 75, 81, 71, 62, 75, 51, 64, 50, 77, 75, 76, 53, 71, 49, 88, 99, 52, 43, 44, 57, 67, 88, 57, 56, 74, 77, 55, 83, 85, 69, 64, 46, 69, 78, 71, 51, 40, 73, 65, 50, 49, 68, 62, 70, 60, 83, 65, 75, 67, 47, 58, 50, 66, 72, 68, 102, 66, 81, 60, 71, 68, 60, 76, 73, 67, 57, 61, 72, 44, 74, 60, 57, 53, 42, 76, 62, 68, 95, 57, 53, 81, 93, 69, 80, 69, 68, 53, 73, 63, 68, 59, 74, 58, 49, 62, 78, 56, 70, 70, 61, 69, 43, 73, 71, 57, 75, 71, 46, 52, 59, 72, 67, 52, 62, 53, 64, 44, 87, 69, 55, 53, 63, 60, 107, 63, 51, 73, 74, 50, 58, 57, 86, 61, 53, 83, 47, 63, 67, 78, 57, 87, 62, 82, 74, 79, 67, 56, 48, 49, 84, 63, 56, 59, 70, 62, 52, 50, 51, 63, 77, 64, 66, 67, 52, 62, 67, 93, 62, 117, 78, 62, 79, 55, 87, 54, 82, 70, 51, 73, 55, 59, 68, 46, 50, 81, 73, 74, 68, 70, 84, 57, 45, 61, 70, 47, 58, 47, 69, 67, 60, 51, 77, 55, 62, 67, 61, 74, 43, 73, 68, 48, 110, 93, 54, 68, 64, 50, 89, 61, 63, 75, 62, 71, 59, 48, 59, 54, 69, 55, 63, 35, 82, 77, 53, 58, 61, 98, 63, 60, 73, 53, 61, 65, 68, 64, 72, 53, 70, 75, 68, 43, 66, 94, 79, 72, 71, 49, 79, 58, 72, 97, 64, 54, 55, 61, 74, 63, 85, 71, 74, 58, 67, 81, 80, 72, 108, 53, 68, 48, 75, 78, 73, 78, 75, 45, 73, 90, 59, 131, 55, 63, 54, 44, 54, 60, 52, 62, 64, 53, 70, 63, 42, 62, 60, 52, 73, 46, 48, 57, 51, 54, 75, 67, 68, 72, 60, 70, 79, 88, 75, 78, 57, 66, 72, 53, 62, 65, 58, 113, 81, 67, 63, 59, 73, 56, 69, 70, 63, 49, 65, 53, 66, 50, 62, 58, 45, 48, 67, 72, 61, 62, 50, 83, 73, 61, 48, 62, 72, 74, 63, 50, 56, 42, 46, 68, 71, 74, 76, 64, 68, 61, 59, 69, 62, 68, 41, 59, 79, 46, 65, 79, 51, 72, 51, 83, 65, 78, 59, 65, 57, 72, 79, 100, 62, 72, 59, 79, 70, 66, 71, 49, 79, 96, 41, 66, 65, 47, 52, 73, 51, 74, 40, 52, 69, 81, 60, 50, 126, 56, 90, 68, 81, 74, 78, 65, 70, 69, 79, 62, 60, 129, 77, 69, 52, 69, 53, 49, 55, 69, 56, 84, 57, 95, 55, 54, 64, 73, 88, 93, 69, 65, 47, 49, 44, 109, 74, 74, 47, 99, 86, 57, 54, 71, 65, 102, 80, 69, 58, 70, 52, 59, 76, 70, 76, 68, 61, 66, 62, 45, 75, 62, 67, 58, 116, 53, 94, 64, 61, 67, 67, 123, 78, 71, 68, 55, 94, 42, 35, 65, 59, 102, 45, 57, 71, 65, 97, 61, 70, 62, 48, 64, 78, 86, 72, 57, 72, 89, 55, 45, 60, 42, 81, 68, 79, 69, 67, 62, 92, 60, 36, 45, 58, 90, 60, 67, 47, 60, 63, 75, 67, 55, 85, 45, 57, 66, 51, 72, 65, 75, 67, 69, 70, 92, 75, 78, 88, 80, 146, 56, 61, 59, 79, 60, 72, 58, 84, 66, 59, 65, 58, 63, 74, 53, 69, 54, 54, 48, 64, 58, 71, 69, 52, 91, 71, 84, 73, 68, 45, 50, 52, 50, 53, 80, 63, 111, 90, 62, 74, 53, 68, 84, 83, 68, 56, 77, 47, 90, 61, 51, 68, 100, 83, 57, 88, 46, 76, 50, 73, 47, 48, 70, 64, 47, 53, 48, 63, 59, 53, 68, 52, 62, 88, 63, 59, 51, 51, 63, 67, 52, 60, 80, 88, 87, 61, 74, 83, 64, 56, 64, 77, 57, 71, 82, 110, 57, 47, 57, 55, 52, 85, 87, 64, 45, 79, 89, 78, 84, 63, 57, 62, 64, 83, 51, 75, 63, 64, 64, 47, 96, 79, 73, 59, 64, 77, 54, 96, 59, 35, 60, 53, 69, 74, 70, 107, 69, 60, 65, 53, 83, 62, 39, 50, 54, 68, 59, 47, 57, 87, 55, 73, 67, 76, 83, 53, 72, 89, 81, 55, 44, 59, 61, 113, 50, 87, 70, 57, 56, 84, 58, 49, 55, 70, 49, 60, 59, 69, 43, 78, 69, 49, 62, 70, 58, 60, 60, 33, 62, 52, 86, 64, 60, 89, 60, 76, 51, 71, 75, 59, 73, 78, 69, 60, 54, 57, 70, 78, 57, 68, 73, 45, 68, 71, 73, 64, 64, 69, 51, 73, 57, 60, 82, 68, 65, 59, 52, 61, 67, 63, 58, 45, 68, 47, 63, 70, 72, 61, 61, 61, 65, 64, 72, 60, 70, 94, 62, 50, 65, 73, 54, 57, 68, 75, 86, 52, 60, 55, 65, 102, 65, 64, 62, 69, 60, 79, 64, 66, 63, 61, 58, 55, 50, 67, 85, 54, 48, 64, 58, 77, 59, 47, 79, 54, 103, 100, 54, 109, 63, 52, 61, 79, 58, 67, 72, 65, 84, 54, 51, 52, 55, 54, 63, 64, 70, 61, 42, 52, 84, 44, 58, 71, 69, 67, 70, 72, 59, 79, 54, 83, 86, 64, 60, 76, 65, 77, 54, 71, 59, 72, 63, 69, 57, 58, 57, 59, 56, 57, 57, 67, 96, 84, 69, 64, 70, 69, 65, 69, 80, 51, 51, 52, 69, 57, 59, 56, 90, 67, 57, 49, 42, 77, 76, 58, 66, 62, 57, 55, 65, 52, 54, 68, 81, 56, 61, 86, 61, 67, 66, 63, 70, 80, 99, 94, 60, 96, 62, 57, 73, 51, 93, 56, 58, 70, 55, 54, 50, 54, 55, 62, 80, 53, 72, 53, 49, 80, 55, 66, 61, 60, 73, 56, 67, 57, 68, 70, 59, 75, 65, 68, 68, 62, 62, 58, 47, 77, 62, 71, 57, 77, 75, 52, 47, 57, 75, 59, 81, 60, 58, 51, 49, 67, 66, 69, 59, 59, 65, 63, 56, 53, 58, 55, 63, 50, 80, 54, 60, 145, 87, 56, 68, 51, 56, 95, 57, 78, 73, 69, 64, 59, 62, 56, 50, 58, 62, 55, 49, 72, 53, 39, 61, 56, 64, 66, 89, 56, 58, 61, 76, 56, 70, 63, 53, 53, 60, 66, 69, 51, 55, 69, 87, 55, 66, 91, 54, 69, 64, 83, 74, 74, 67, 67, 73, 63, 61, 57, 76, 59, 69, 87, 77, 73, 64, 52, 81, 54, 52, 52, 48, 59, 67, 76, 92, 77, 59, 75, 58, 76, 74, 52, 63, 72, 69, 55, 53, 63, 66, 71, 74, 83, 74, 52, 82, 79, 79, 69, 76, 64, 51, 82, 89, 65, 64, 64, 57, 75, 73, 57, 113, 74, 49, 70, 58, 60, 71, 62, 48, 57, 71, 68, 72, 62, 79, 50, 49, 43, 68, 51, 45, 68, 70, 44, 55, 69, 75, 62, 65, 64, 49, 58, 70, 37, 76, 77, 69, 59, 60, 56, 76, 51, 75, 77, 69, 60, 68, 63, 66, 70, 59, 71, 93, 68, 60, 50, 50, 60, 61, 43, 53, 74, 38, 66, 57, 71, 58, 58, 76, 56, 62, 52, 59, 67, 84, 65, 63, 55, 68, 82, 76, 72, 53, 67, 59, 59, 56, 70, 66, 49, 72, 53, 55, 59, 50, 77, 67, 60, 61, 58, 83, 63, 71, 51, 49, 66, 75, 64, 97, 58, 56, 44, 80, 73, 66, 58, 69, 72, 59, 69, 63, 59, 57, 66, 54, 71, 52, 63, 60, 74, 76, 104, 63, 56, 74, 56, 62, 72, 47, 55, 57, 62, 58, 59, 50, 52, 68, 100, 84, 65, 81, 60, 80, 76, 65, 47, 46, 45, 70, 47, 51, 57, 51, 78, 41, 81, 83, 93, 88, 56, 68, 71, 44, 64, 73, 73, 80, 63, 71, 57, 71, 56, 48, 74, 71, 57, 66, 48, 58, 57, 72, 56, 46, 88, 50, 59, 54, 71, 104, 53, 67, 67, 67, 57, 66, 81, 69, 60, 48, 65, 86, 59, 58, 85, 70, 72, 52, 58, 58, 56, 61, 69, 80, 101, 63, 60, 64, 61, 70, 84, 84, 79, 61, 100, 57, 58, 59, 69, 119, 71, 56, 64, 80, 70, 76, 98, 60, 62, 63, 61, 70, 66, 59, 53, 68, 58, 63, 61, 59, 94, 62, 72, 62, 56, 65, 51, 66, 90, 81, 38, 70, 67, 93, 58, 63, 57, 60, 108, 51, 87, 67, 64, 83, 53, 56, 57, 56, 66, 72, 97, 59, 65, 61, 78, 58, 66, 73, 59, 65, 41, 61, 64, 70, 57, 78, 66, 61, 67, 72, 85, 50, 96, 64, 49, 58, 51, 64, 70, 56, 65, 72, 81, 64, 51, 74, 62, 79, 48, 64, 65, 89, 55, 65, 61, 44, 53, 88, 106, 47, 66, 58, 55, 87, 68, 102, 64, 53, 70, 69, 81, 70, 85, 56, 73, 55, 62, 55, 61, 68, 27, 66, 60, 61, 78, 63, 61, 72, 79, 62, 46, 66, 58, 44, 58, 64, 61, 83, 68, 75, 65, 60, 60, 73, 89, 61, 87, 70, 45, 62, 55, 53, 70, 77, 55, 81, 55, 62, 40, 63, 77, 82, 105, 65, 59, 67, 76, 58, 77, 78, 51, 60, 59, 59, 77, 55, 84, 52, 77, 84, 92, 50, 100, 63, 82, 70, 71, 55, 73, 88, 76, 75, 64, 68, 49, 59, 55, 56, 57, 49, 66, 71, 79, 69, 58, 63, 60, 67, 57, 66, 57, 56, 71, 47, 82, 65, 74, 60, 67, 65, 54, 52, 55, 60, 71, 51, 76, 112, 73, 66, 64, 89, 50, 72, 73, 58, 54, 57, 68, 68, 73, 59, 66, 56, 50, 56, 67, 66, 84, 59, 71, 66, 87, 55, 53, 69, 78, 60, 71, 76, 73, 57, 74, 63, 76, 66, 72, 44, 48, 49, 76, 54, 61, 53, 88, 73, 69, 73, 55, 74, 75, 82, 68, 62, 86, 68, 54, 43, 86, 51, 64, 67, 68, 68, 128, 38, 54, 60, 60, 96, 68, 158, 60, 75, 80, 64, 61, 68, 55, 71, 58, 49, 58, 58, 64, 74, 88, 82, 92, 67, 72, 63, 65, 47, 46, 67, 70, 71, 90, 56, 52, 63, 74, 54, 64, 84, 68, 58, 66, 44, 86, 61, 47, 51, 48, 47, 67, 62, 51, 59, 60, 78, 45, 47, 74, 56, 66, 83, 84, 59, 57, 61, 62, 102, 77, 52, 56, 51, 58, 70, 53, 65, 60, 45, 49, 59, 66, 77, 57, 67, 60, 51, 55, 64, 77, 75, 76, 59, 55, 93, 76, 66, 78, 55, 69, 72, 59, 71, 48, 80, 74, 50, 94, 72, 77, 107, 77, 57, 58, 61, 61, 52, 61, 103, 66, 61, 73, 52, 68, 62, 82, 97, 50, 66, 53, 56, 61, 47, 56, 75, 76, 64, 53, 74, 42, 67, 62, 65, 74, 47, 80, 49, 48, 40, 58, 48, 82, 50, 72, 72, 60, 53, 51, 72, 66, 48, 99, 85, 72, 75, 96, 36, 53, 64, 77, 60, 89, 80, 59, 87, 74, 33, 74, 81, 39, 98, 60, 54, 66, 72, 54, 79, 75, 35, 49, 57, 62, 86, 70, 66, 58, 77, 68, 68, 78, 76, 69, 56, 64, 81, 59, 65, 64, 54, 108, 69, 55, 50, 66, 83, 71, 64, 73, 68, 61, 69, 44, 62, 79, 65, 59, 66, 75, 92, 70, 41, 70, 92, 54, 70, 63, 89, 63, 75, 47, 77, 72, 78, 57, 99, 61, 68, 86, 77, 78, 56, 72, 68, 64, 54, 45, 38, 64, 63, 53, 89, 56, 61, 65, 95, 58, 56, 68, 77, 88, 68, 90, 36, 48, 68, 68, 53, 50, 85, 60, 56, 47, 80, 55, 83, 49, 23, 69, 44, 72, 89, 74, 67, 59, 48, 58, 54, 138, 73, 36, 64, 95, 70, 67, 42, 59, 51, 93, 42, 58, 94, 44, 46, 50, 76, 79, 62, 92, 59, 70, 41, 52, 69, 70, 65, 73, 62, 62, 86, 70, 74, 63, 62, 55, 69, 54, 47, 55, 56, 69, 60, 68, 109, 48, 72, 72, 62, 71, 55, 106, 60, 81, 62, 74, 72, 60, 63, 81, 41, 39, 62, 93, 85, 75, 112, 61, 70, 53, 78, 69, 77, 74, 49, 54, 86, 67, 31, 78, 56, 87, 72, 58, 63, 76, 46, 53, 45, 74, 62, 82, 58, 60, 82, 63, 61, 62, 64, 55, 76, 90, 94, 77, 62, 70, 73, 64, 71, 108, 66, 90, 59, 64, 87, 116, 60, 69, 45, 46, 52, 79, 97, 71, 40, 53, 77, 58, 62, 61, 78, 56, 61, 59, 69, 79, 36, 62, 74, 63, 67, 93, 65, 76, 73, 66, 51, 35, 66, 39, 84, 63, 64, 42, 62, 65, 73, 66, 67, 64, 62, 56, 51, 48, 60, 75, 66, 81, 68, 51, 80, 110, 59, 55, 59, 62, 80, 60, 53, 78, 64, 69, 75, 74, 84, 65, 56, 61, 50, 53, 63, 79, 72, 63, 61, 65, 70, 92, 97, 61, 29, 89, 49, 67, 44, 67, 85, 79, 64, 68, 62, 80, 83, 62, 63, 59, 42, 98, 92, 66, 47, 61, 61, 56, 74, 47, 55, 58, 63, 46, 62, 49, 85, 70, 58, 77, 54, 64, 64, 62, 63, 56, 63, 59, 71, 71, 59, 54, 65, 59, 51, 68, 90, 79, 81, 75, 50, 69, 91, 52, 73, 61, 48, 59, 62, 73, 76, 62, 44, 79, 80, 70, 53, 77, 69, 66, 35, 53, 50, 88, 40, 81, 65, 78, 62, 56, 67, 80, 57, 63, 54, 75, 71, 52, 58, 47, 82, 75, 93, 51, 81, 65, 48, 76, 78, 78, 92, 75, 55, 102, 54, 58, 58, 53, 58, 63, 61, 88, 49, 117, 34, 49, 56, 65, 65, 79, 60, 56, 83, 65, 52, 56, 28, 74, 56, 98, 64, 80, 55, 57, 57, 89, 61, 78, 64, 77, 59, 52, 68, 60, 59, 60, 65, 68, 65, 71, 54, 58, 73, 76, 49, 152, 98, 63, 57, 96, 45, 105, 77, 34, 54, 76, 61, 85, 71, 64, 78, 95, 58, 61, 71, 66, 77, 57, 53, 69, 55, 97, 95, 69, 71, 48, 70, 90, 66, 70, 57, 64, 63, 60, 67, 54, 73, 93, 67, 74, 81, 59, 53, 51, 69, 73, 83, 50, 49, 62, 65, 67, 66, 59, 53, 56, 66, 56, 82, 78, 53, 106, 38, 68, 51, 63, 52, 63, 40, 45, 80, 104, 51, 64, 71, 68, 74, 55, 91, 54, 72, 68, 57, 45, 58, 71, 99, 82, 49, 71, 98, 72, 45, 50, 54, 42, 61, 92, 53, 68, 58, 43, 67, 102, 45, 80, 63, 85, 69, 81, 91, 81, 66, 99, 76, 65, 57, 38, 62, 54, 62, 50, 52, 61, 59, 57, 71, 54, 94, 54, 76, 47, 78, 80, 80, 51, 76, 65, 98, 61, 57, 67, 84, 53, 68, 90, 86, 58, 69, 62, 92, 70, 81, 46, 42, 69, 61, 88, 97, 83, 84, 117, 62, 73, 73, 76, 70, 55, 61, 76, 56, 83, 100, 112, 62, 85, 80, 59, 82, 74, 60, 78, 53, 36, 71, 87, 60, 39, 54, 65, 72, 62, 79, 68, 69, 54, 41, 51, 93, 64, 54, 107, 80, 56, 64, 83, 47, 39, 67, 55, 66, 54, 46, 75, 56, 60, 57, 64, 66, 81, 70, 45, 58, 52, 58, 92, 46, 54, 53, 85, 80, 79, 66, 83, 51, 68, 74, 64, 74, 37, 53, 63, 82, 50, 68, 101, 67, 62, 53, 48, 75, 88, 97, 55, 62, 66, 110, 64, 69, 48, 55, 55, 42, 43, 85, 71, 76, 46, 74, 65, 70, 72, 53, 58, 87, 79, 57, 74, 86, 67, 96, 63, 68, 63, 78, 94, 59, 50, 53, 52, 49, 71, 57, 51, 56, 56, 105, 59, 63, 45, 54, 76, 96, 74, 24, 69, 90, 90, 57, 74, 68, 54, 48, 94, 59, 57, 70, 97, 72, 75, 49, 77, 61, 69, 63, 60, 88, 69, 46, 45, 26, 84, 59, 56, 58, 80, 61, 65, 59, 56, 63, 74, 72, 134, 45, 51, 57, 52, 71, 69, 69, 49, 66, 63, 61, 84, 57, 71, 48, 68, 66, 62, 55, 48, 64, 63, 81, 77, 44, 67, 61, 73, 64, 56, 94, 61, 55, 45, 45, 45, 87, 63, 51, 71, 91, 170, 66, 71, 58, 47, 60, 63, 64, 60, 63, 58, 37, 34, 62, 61, 57, 67, 62, 69, 51, 58, 78, 69, 79, 91, 58, 59, 68, 53, 70, 48, 40, 80, 48, 74, 50, 56, 61, 52, 62, 66, 54, 55, 79, 84, 57, 75, 66, 50, 54, 60, 90, 64, 60, 57, 55, 96, 56, 73, 59, 48, 74, 82, 74, 62, 60, 89, 78, 62, 59, 94, 69, 70, 59, 53, 107, 57, 81, 58, 56, 92, 78, 61, 68, 74, 67, 55, 75, 40, 55, 75, 63, 74, 72, 60, 65, 62, 58, 51, 44, 84, 53, 72, 56, 57, 59, 84, 58, 74, 60, 52, 66, 69, 89, 62, 73, 66, 66, 68, 70, 52, 49, 34, 63, 92, 62, 101, 81, 66, 53, 64, 47, 46, 53, 88, 86, 63, 55, 60, 59, 76, 82, 46, 55, 56, 58, 72, 64, 63, 44, 76, 53, 65, 87, 85, 93, 66, 50, 81, 48, 30, 58, 96, 50, 78, 60, 71, 52, 83, 84, 56, 69, 69, 82, 60, 43, 84, 62, 55, 55, 76, 53, 73, 64, 65, 61, 87, 61, 70, 67, 73, 58, 71, 81, 73, 52, 63, 73, 77, 72, 67, 56, 78, 75, 63, 71, 89, 57, 68, 88, 64, 64, 62, 61, 51, 55, 44, 54, 71, 128, 78, 77, 75, 66, 58, 30, 95, 48, 64, 59, 53, 56, 44, 64, 64, 58, 77, 79, 68, 62, 82, 48, 58, 45, 69, 65, 111, 52, 78, 63, 60, 78, 57, 61, 64, 45, 76, 76, 62, 66, 69, 58, 56, 79, 62, 71, 65, 56, 52, 57, 67, 45, 87, 48, 60, 53, 65, 70, 59, 57, 96, 56, 86, 70, 58, 53, 63, 66, 62, 55, 58, 79, 66, 71, 64, 47, 52, 77, 68, 41, 68, 55, 50, 61, 58, 61, 74, 58, 71, 72, 62, 59, 49, 95, 61, 67, 62, 70, 83, 59, 73, 56, 62, 63, 79, 83, 71, 58, 53, 64, 68, 60, 82, 65, 86, 57, 52, 149, 64, 71, 81, 68, 70, 74, 54, 70, 48, 66, 33, 83, 92, 51, 54, 74, 62, 44, 72, 59, 41, 81, 70, 73, 59, 49, 60, 58, 51, 40, 69, 66, 52, 67, 44, 41, 111, 65, 49, 106, 56, 61, 49, 71, 106, 59, 79, 67, 50, 84, 70, 56, 51, 50, 61, 70, 55, 84, 65, 106, 53, 50, 54, 63, 66, 61, 75, 62, 69, 60, 70, 52, 71, 56, 47, 48, 128, 68, 61, 59, 77, 50, 78, 62, 81, 63, 98, 71, 78, 52, 74, 51, 73, 75, 61, 54, 79, 60, 73, 54, 85, 65, 77, 68, 80, 72, 64, 46, 60, 76, 57, 61, 61, 54, 67, 54, 69, 114, 48, 78, 63, 69, 67, 61, 49, 45, 55, 71, 70, 70, 64, 99, 68, 72, 55, 58, 61, 62, 52, 53, 85, 63, 95, 65, 63, 54, 44, 58, 74, 81, 57, 80, 51, 59, 78, 81, 67, 49, 44, 79, 38, 60, 53, 68, 65, 58, 63, 60, 55, 90, 56, 74, 93, 54, 41, 57, 68, 63, 80, 61, 52, 58, 61, 63, 75, 73, 57, 62, 78, 48, 84, 96, 62, 100, 78, 86, 61, 50, 67, 63, 52, 40, 81, 65, 72, 87, 60, 81, 70, 68, 57, 74, 81, 73, 61, 59, 57, 62, 56, 86, 64, 63, 99, 52, 68, 46, 76, 62, 68, 74, 69, 69, 57, 72, 57, 59, 69, 63, 85, 39, 68, 99, 96, 61, 72, 65, 63, 97, 63, 51, 56, 71, 62, 80, 72, 38, 35, 85, 66, 67, 80, 62, 58, 61, 55, 43, 53, 69, 58, 70, 59, 62, 48, 60, 78, 70, 59, 80, 75, 72, 71, 60, 40, 51, 76, 59, 39, 65, 61, 63, 63, 61, 81, 71, 72, 47, 102, 73, 37, 66, 65, 49, 76, 68, 59, 59, 74, 49, 67, 57, 49, 72, 88, 67, 68, 69, 71, 54, 59, 48, 43, 88, 64, 64, 97, 55, 70, 100, 62, 63, 50, 57, 44, 54, 56, 66, 68, 57, 73, 57, 64, 111, 70, 85, 49, 65, 56, 62, 61, 67, 76, 64, 87, 100, 74, 44, 73, 56, 62, 59, 50, 65, 87, 47, 74, 60, 51, 56, 75, 52, 71, 53, 35, 57, 46, 63, 65, 44, 54, 54, 56, 59, 55, 67, 65, 61, 56, 62, 86, 56, 80, 48, 55, 66, 61, 89, 73, 83, 76, 53, 74, 85, 48, 47, 57, 75, 61, 45, 114, 58, 51, 75, 63, 68, 126, 72, 68, 64, 99, 81, 70, 59, 75, 46, 52, 65, 68, 69, 63, 52, 63, 74, 85, 54, 85, 53, 44, 66, 59, 71, 62, 64, 84, 53, 72, 98, 68, 52, 85, 72, 54, 48, 67, 78, 76, 52, 99, 65, 66, 47, 102, 69, 48, 54, 56, 53, 101, 52, 54, 83, 78, 55, 68, 79, 80, 50, 88, 80, 44, 70, 56, 56, 56, 76, 91, 59, 45, 60, 58, 83, 65, 92, 70, 69, 64, 89, 75, 63, 55, 59, 64, 82, 48, 43, 59, 68, 70, 67, 64, 61, 62, 51, 70, 65, 52, 61, 79, 50, 50, 45, 69, 60, 70, 55, 52, 49, 77, 64, 54, 50, 84, 52, 61, 69, 72, 83, 76, 77, 59, 77, 56, 64, 46, 44, 71, 85, 80, 58, 52, 85, 69, 47, 62, 56, 71, 55, 70, 92, 66, 58, 48, 71, 62, 75, 63, 66, 74, 67, 52, 75, 71, 52, 93, 89, 95, 93, 48, 79, 76, 63, 67, 62, 85, 51, 45, 59, 51, 72, 63, 77, 79, 56, 51, 65, 61, 56, 87, 81, 65, 100, 58, 58, 65, 73, 69, 39, 91, 68, 63, 77, 77, 74, 78, 60, 83, 65, 85, 66, 85, 90, 71, 69, 62, 36, 51, 44, 66, 71, 57, 61, 77, 59, 88, 46, 69, 75, 47, 75, 55, 52, 54, 41, 59, 91, 56, 69, 57, 81, 72, 67, 59, 54, 52, 62, 54, 112, 61, 43, 61, 83, 96, 83, 64, 66, 46, 60, 90, 57, 74, 55, 76, 64, 49, 73, 56, 125, 105, 67, 61, 73, 51, 27, 96, 55, 48, 58, 60, 65, 62, 93, 51, 91, 93, 65, 46, 70, 78, 74, 67, 54, 80, 67, 78, 69, 52, 66, 84, 108, 63, 73, 62, 52, 57, 101, 71, 94, 58, 43, 44, 87, 57, 66, 60, 102, 61, 71, 59, 47, 62, 57, 57, 64, 52, 51, 75, 69, 63, 52, 46, 55, 64, 72, 64, 53, 124, 52, 104, 70, 62, 62, 56, 104, 72, 89, 63, 71, 64, 80, 61, 68, 75, 78, 91, 63, 102, 79, 94, 67, 98, 60, 64, 53, 123, 58, 60, 73, 65, 71, 63, 59, 69, 56, 61, 62, 57, 66, 67, 60, 96, 122, 56, 58, 65, 64, 74, 53, 61, 66, 63, 74, 57, 41, 68, 59, 64, 60, 77, 59, 56, 68, 72, 49, 59, 59, 65, 49, 62, 68, 47, 65, 60, 104, 60, 51, 63, 51, 59, 62, 66, 81, 55, 47, 67, 70, 69, 53, 82, 54, 59, 66, 90, 80, 75, 57, 65, 63, 73, 62, 44, 56, 63, 73, 62, 60, 65, 69, 63, 82, 67, 66, 61, 48, 44, 59, 53, 64, 71, 68, 67, 79, 50, 68, 70, 65, 75, 112, 70, 56, 65, 144, 58, 66, 82, 62, 67, 72, 61, 60, 68, 71, 57, 71, 77, 56, 69, 91, 63, 64, 74, 53, 66, 138, 55, 56, 92, 54, 73, 76, 61, 47, 57, 79, 60, 60, 72, 46, 60, 55, 74, 62, 53, 58, 64, 94, 65, 70, 65, 55, 67, 67, 66, 70, 54, 62, 61, 59, 70, 71, 62, 55, 55, 54, 67, 36, 57, 95, 60, 62, 62, 58, 64, 66, 71, 81, 77, 82, 70, 48, 62, 61, 65, 70, 59, 60, 62, 62, 104, 65, 53, 72, 81, 65, 62, 58, 62, 49, 64, 61, 56, 59, 50, 70, 59, 63, 46, 98, 92, 60, 63, 81, 65, 66, 60, 66, 115, 58, 60, 65, 46, 77, 71, 57, 51, 55, 83, 76, 58, 72, 55, 57, 61, 44, 59, 71, 58, 68, 59, 66, 71, 74, 67, 72, 56, 70, 62, 57, 82, 62, 66, 47, 57, 62, 62, 77, 78, 59, 109, 69, 64, 56, 68, 65, 68, 56, 53, 64, 62, 70, 70, 55, 61, 62, 69, 57, 73, 59, 74, 61, 55, 67, 52, 60, 56, 59, 69, 70, 51, 69, 58, 68, 61, 54, 94, 64, 57, 65, 50, 58, 70, 59, 58, 66, 81, 54, 60, 68, 62, 55, 57, 57, 65, 48, 58, 58, 49, 57, 59, 56, 63, 71, 56, 58, 55, 59, 64, 63, 63, 66, 71, 54, 51, 53, 60, 73, 75, 123, 75, 75, 80, 55, 76, 51, 67, 63, 79, 55, 61, 61, 64, 93, 99, 60, 64, 70, 59, 52, 69, 45, 58, 68, 58, 78, 61, 60, 92, 100, 56, 58, 58, 51, 56, 71, 83, 65, 73, 56, 54, 66, 57, 44, 67, 60, 58, 71, 62, 72, 65, 66, 71, 67, 59, 66, 59, 56, 62, 47, 69, 46, 57, 61, 65, 56, 78, 67, 59, 67, 61, 71, 68, 59, 74, 49, 66, 60, 77, 62, 72, 62, 62, 57, 59, 88, 65, 60, 70, 66, 51, 75, 65, 70, 61, 62, 67, 80, 82, 63, 59, 55, 67, 78, 71, 65, 67, 68, 90, 83, 63, 58, 74, 66, 73, 79, 55, 70, 61, 48, 57, 64, 56, 62, 60, 61, 47, 53, 66, 54, 61, 54, 51, 56, 66, 63, 55, 65, 66, 62, 58, 66, 55, 75, 63, 68, 81, 67, 69, 68, 77, 71, 57, 54, 53, 68, 101, 59, 70, 64, 78, 75, 70, 67, 90, 71, 72, 55, 49, 49, 62, 51, 59, 61, 65, 57, 164, 49, 80, 64, 64, 56, 70, 78, 48, 76, 53, 58, 75, 85, 50, 93, 65, 64, 72, 64, 77, 65, 64, 81, 60, 100, 58, 110, 60, 55, 56, 75, 57, 65, 101, 49, 64, 56, 78, 74, 70, 59, 74, 60, 70, 57, 59, 64, 53, 49, 69, 60, 50, 67, 55, 73, 66, 85, 65, 63, 65, 61, 66, 59, 56, 69, 58, 63, 56, 74, 75, 59, 70, 54, 62, 52, 59, 77, 64, 89, 59, 55, 67, 66, 66, 66, 65, 55, 62, 63, 53, 94, 55, 88, 51, 65, 64, 94, 51, 66, 68, 64, 50, 67, 57, 60, 63, 49, 57, 67, 61, 58, 58, 59, 59, 61, 62, 65, 62, 87, 84, 51, 57, 74, 68, 75, 83, 57, 42, 66, 64, 76, 64, 103, 64, 83, 48, 79, 62, 52, 50, 58, 65, 61, 91, 63, 58, 68, 74, 49, 82, 55, 62, 70, 53, 68, 55, 63, 56, 68, 74, 69, 52, 53, 60, 61, 101, 68, 63, 71, 68, 54, 52, 91, 70, 72, 67, 76, 63, 75, 56, 66, 87, 69, 81, 67, 65, 90, 74, 62, 59, 59, 88, 71, 57, 61, 51, 54, 71, 108, 74, 61, 78, 55, 62, 65, 64, 65, 60, 69, 71, 62, 61, 56, 51, 76, 62, 71, 56, 71, 74, 67, 50, 74, 65, 64, 65, 100, 76, 55, 61, 61, 60, 60, 69, 74, 66, 55, 64, 57, 59, 81, 83, 74, 58, 60, 63, 83, 64, 69, 48, 64, 66, 65, 68, 73, 61, 66, 55, 57, 77, 48, 70, 96, 57, 73, 75, 83, 72, 56, 58, 68, 60, 71, 85, 64, 61, 67, 52, 57, 69, 86, 56, 55, 47, 71, 61, 64, 69, 56, 69, 70, 62, 61, 58, 61, 53, 66, 74, 58, 59, 73, 62, 80, 50, 70, 54, 106, 55, 69, 61, 85, 66, 69, 57, 61, 54, 48, 69, 91, 64, 57, 73, 58, 87, 76, 70, 87, 72, 60, 58, 68, 60, 68, 62, 66, 66, 60, 78, 83, 58, 71, 77, 75, 69, 62, 57, 56, 65, 76, 73, 76, 62, 65, 61, 66, 61, 82, 49, 62, 72, 84, 61, 71, 59, 72, 60, 54, 55, 57, 50, 56, 56, 71, 61, 57, 84, 52, 46, 82, 62, 50, 60, 67, 63, 62, 64, 83, 63, 52, 52, 67, 86, 69, 55, 73, 69, 47, 61, 83, 56, 53, 53, 71, 74, 72, 78, 125, 61, 89, 67, 71, 65, 74, 50, 56, 68, 69, 48, 69, 60, 61, 56, 61, 70, 75, 71, 81, 75, 64, 66, 59, 73, 60, 62, 49, 61, 61, 65, 77, 57, 67, 68, 52, 60, 63, 67, 66, 64, 65, 56, 64, 81, 68, 42, 88, 61, 88, 64, 67, 59, 62, 116, 62, 54, 77, 63, 59, 65, 60, 67, 65, 60, 62, 58, 81, 109, 52, 72, 60, 68, 59, 76, 72, 62, 50, 71, 64, 49, 71, 54, 56, 58, 83, 62, 70, 113, 136, 80, 80, 74, 57, 94, 50, 75, 83, 79, 58, 56, 66, 68, 55, 62, 92, 68, 58, 64, 78, 53, 72, 58, 49, 57, 56, 61, 76, 55, 81, 56, 64, 51, 67, 52, 53, 81, 82, 60, 54, 58, 68, 75, 55, 74, 67, 48, 78, 74, 64, 53, 66, 67, 74, 55, 65, 54, 99, 75, 76, 54, 56, 53, 76, 59, 66, 62, 68, 71, 61, 64, 62, 64, 73, 61, 86, 47, 64, 63, 61, 62, 58, 60, 75, 61, 57, 54, 66, 45, 51, 80, 53, 70, 65, 74, 79, 49, 67, 67, 53, 110, 55, 78, 55, 73, 66, 65, 108, 53, 49, 54, 62, 65, 63, 116, 58, 71, 61, 70, 58, 49, 73, 88, 54, 70, 80, 73, 65, 60, 47, 75, 71, 58, 69, 64, 64, 55, 75, 54, 64, 80, 117, 45, 69, 67, 72, 75, 67, 63, 69, 62, 81, 65, 52, 66, 78, 52, 85, 58, 77, 68, 75, 66, 50, 93, 83, 57, 45, 55, 79, 67, 59, 56, 80, 58, 71, 61, 69, 66, 68, 63, 67, 62, 49, 59, 57, 57, 69, 63, 78, 59, 84, 58, 62, 58, 78, 88, 52, 70, 74, 51, 103, 57, 49, 60, 40, 75, 62, 61, 67, 47, 74, 57, 63, 147, 60, 74, 67, 66, 62, 67, 62, 62, 56, 65, 63, 70, 79, 55, 58, 58, 60, 97, 75, 58, 66, 53, 58, 69, 81, 62, 78, 54, 61, 68, 75, 54, 57, 50, 71, 80, 72, 74, 55, 58, 58, 67, 82, 68, 62, 68, 66, 66, 51, 74, 64, 128, 71, 85, 66, 63, 73, 53, 72, 70, 74, 77, 52, 67, 79, 54, 64, 46, 72, 90, 41, 67, 59, 64, 74, 74, 57, 71, 65, 116, 61, 64, 65, 57, 58, 62, 59, 62, 80, 60, 62, 65, 58, 62, 68, 64, 64, 62, 98, 63, 45, 61, 79, 66, 58, 72, 65, 64, 76, 68, 64, 53, 69, 77, 57, 64, 64, 52, 54, 67, 61, 61, 52, 67, 81, 65, 40, 80, 55, 64, 61, 73, 59, 71, 66, 65, 48, 71, 69, 59, 74, 62, 60, 86, 60, 65, 68, 60, 54, 66, 55, 62, 65, 58, 74, 79, 62, 61, 67, 60, 63, 50, 61, 65, 72, 74, 61, 72, 57, 57, 57, 72, 61, 47, 54, 94, 67, 67, 53, 52, 63, 68, 78, 68, 49, 55, 55, 51, 62, 48, 71, 57, 61, 63, 63, 49, 65, 61, 77, 63, 66, 72, 62, 62, 61, 67, 69, 61, 61, 58, 63, 54, 55, 66, 54, 77, 55, 63, 64, 68, 53, 66, 65, 45, 66, 70, 76, 65, 65, 61, 69, 65, 46, 50, 64, 49, 60, 73, 76, 79, 44, 63, 62, 89, 72, 63, 68, 66, 68, 55, 68, 82, 77, 65, 52, 65, 66, 64, 50, 75, 67, 56, 69, 92, 64, 74, 86, 56, 60, 60, 75, 66, 65, 46, 57, 77, 72, 76, 60, 62, 54, 58, 80, 104, 53, 57, 105, 52, 59, 75, 52, 63, 51, 75, 52, 64, 63, 59, 63, 63, 56, 62, 60, 63, 93, 67, 82, 83, 67, 62, 53, 80, 68, 81, 76, 86, 70, 53, 85, 56, 68, 82, 70, 70, 93, 59, 61, 39, 64, 57, 66, 61, 80, 50, 79, 76, 128, 60, 59, 61, 72, 61, 82, 74, 75, 53, 69, 73, 74, 66, 58, 59, 67, 62, 67, 76, 52, 71, 60, 62, 49, 54, 60, 96, 70, 55, 64, 87, 68, 75, 65, 60, 62, 55, 59, 66, 54, 57, 58, 65, 65, 63, 70, 78, 65, 66, 51, 69, 67, 67, 91, 57, 73, 44, 61, 79, 67, 69, 68, 55, 70, 81, 58, 66, 69, 57, 76, 60, 64, 57, 58, 66, 58, 71, 69, 58, 56, 96, 54, 54, 50, 55, 65, 55, 74, 67, 57, 71, 52, 63, 49, 66, 78, 74, 91, 68, 50, 58, 64, 74, 63, 80, 60, 51, 122, 53, 63, 59, 57, 74, 58, 64, 63, 60, 47, 75, 92, 84, 45, 64, 72, 55, 68, 56, 61, 115, 52, 70, 66, 63, 75, 64, 58, 61, 66, 65, 55, 76, 63, 100, 68, 61, 76, 56, 63, 64, 72, 69, 57, 66, 105, 50, 48, 56, 66, 65, 67, 62, 79, 59, 55, 59, 79, 74, 52, 70, 80, 64, 55, 75, 62, 74, 65, 50, 57, 62, 54, 73, 93, 61, 67, 86, 56, 63, 65, 71, 57, 56, 76, 51, 68, 73, 67, 71, 58, 52, 55, 67, 63, 72, 46, 63, 52, 68, 52, 41, 51, 49, 59, 73, 75, 57, 77, 72, 61, 56, 65, 73, 55, 70, 69, 70, 80, 49, 54, 63, 70, 66, 75, 75, 62, 58, 60, 84, 64, 95, 68, 72, 86, 66, 67, 54, 67, 61, 91, 66, 71, 73, 70, 66, 68, 83, 58, 81, 53, 50, 63, 67, 81, 59, 77, 97, 46, 61, 80, 77, 93, 69, 56, 63, 63, 40, 67, 56, 66, 41, 70, 63, 49, 70, 58, 56, 63, 70, 67, 54, 61, 56, 51, 47, 75, 68, 54, 69, 54, 66, 62, 73, 79, 52, 53, 53, 50, 65, 71, 61, 58, 72, 87, 75, 58, 63, 70, 91, 67, 58, 64, 65, 74, 82, 95, 72, 66, 67, 56, 62, 64, 66, 48, 72, 60, 55, 80, 57, 60, 58, 82, 54, 77, 54, 61, 64, 67, 66, 66, 94, 66, 60, 54, 99, 67, 64, 62, 53, 94, 55, 80, 69, 108, 47, 62, 62, 100, 69, 62, 67, 73, 59, 71, 52, 70, 55, 63, 90, 58, 62, 53, 68, 69, 64, 76, 68, 68, 53, 53, 61, 72, 73, 64, 81, 50, 60, 59, 79, 56, 58, 49, 48, 146, 58, 68, 101, 53, 66, 60, 59, 63, 68, 65, 51, 66, 52, 54, 96, 65, 71, 86, 53, 70, 65, 67, 89, 56, 69, 58, 82, 64, 90, 73, 68, 63, 81, 66, 81, 61, 90, 66, 52, 43, 57, 70, 47, 60, 60, 79, 70, 64, 51, 56, 60, 76, 57, 61, 125, 61, 59, 57, 98, 67, 56, 62, 62, 59, 63, 75, 63, 80, 73, 59, 57, 68, 99, 65, 68, 58, 49, 73, 53, 52, 93, 70, 60, 65, 58, 63, 67, 65, 107, 77, 162, 74, 72, 85, 56, 56, 56, 60, 58, 69, 67, 103, 56, 62, 63, 50, 64, 74, 48, 69, 65, 48, 51, 64, 51, 72, 75, 58, 61, 72, 93, 55, 76, 95, 57, 49, 81, 58, 65, 62, 43, 68, 47, 41, 82, 51, 79, 44, 58, 61, 47, 70, 65, 47, 57, 56, 84, 95, 49, 57, 69, 59, 68, 61, 96, 80, 66, 66, 68, 87, 61, 122, 66, 59, 83, 53, 63, 60, 54, 65, 65, 53, 87, 67, 63, 60, 67, 66, 60, 67, 89, 58, 67, 71, 73, 70, 50, 81, 54, 63, 73, 49, 52, 77, 55, 72, 64, 65, 69, 91, 57, 63, 68, 73, 97, 50, 67, 57, 65, 89, 81, 75, 69, 73, 60, 56, 76, 48, 65, 49, 64, 72, 53, 60, 71, 62, 46, 61, 62, 74, 90, 86, 50, 45, 53, 62, 56, 68, 74, 68, 73, 57, 60, 78, 67, 55, 55, 69, 63, 79, 83, 60, 65, 85, 60, 55, 69, 59, 57, 52, 60, 41, 48, 49, 59, 62, 77, 96, 59, 54, 64, 66, 54, 64, 72, 73, 75, 56, 76, 63, 65, 62, 59, 69, 54, 75, 84, 58, 60, 60, 58, 47, 66, 75, 56, 60, 52, 62, 88, 57, 52, 75, 44, 63, 75, 63, 66, 103, 53, 84, 66, 61, 90, 74, 70, 84, 87, 145, 74, 53, 49, 53, 64, 72, 40, 53, 70, 69, 49, 42, 68, 54, 62, 65, 58, 66, 64, 59, 66, 58, 64, 50, 78, 58, 72, 77, 92, 60, 67, 62, 47, 64, 59, 70, 73, 79, 73, 56, 82, 59, 60, 81, 60, 56, 74, 56, 73, 72, 67, 56, 74, 99, 76, 66, 55, 63, 64, 75, 46, 59, 56, 62, 74, 54, 75, 58, 54, 60, 73, 76, 74, 51, 60, 65, 47, 59, 48, 59, 56, 72, 76, 68, 60, 69, 122, 50, 53, 52, 70, 62, 67, 82, 51, 48, 84, 53, 64, 61, 72, 44, 71, 53, 66, 51, 64, 57, 68, 64, 46, 61, 63, 51, 89, 87, 84, 67, 46, 55, 58, 90, 60, 79, 79, 53, 57, 45, 55, 59, 74, 51, 56, 65, 57, 86, 59, 59, 71, 50, 70, 76, 61, 73, 74, 74, 56, 79, 56, 60, 60, 64, 83, 62, 81, 59, 50, 69, 92, 68, 65, 56, 64, 54, 69, 79, 64, 59, 78, 61, 63, 84, 69, 162, 55, 50, 55, 74, 79, 62, 62, 69, 96, 46, 66, 54, 61, 55, 128, 49, 64, 59, 59, 65, 53, 61, 55, 68, 87, 51, 75, 46, 54, 69, 71, 47, 67, 65, 67, 63, 55, 57, 54, 65, 60, 81, 89, 111, 48, 36, 54, 64, 59, 59, 54, 45, 55, 56, 42, 55, 62, 52, 87, 58, 59, 86, 74, 102, 78, 58, 82, 72, 66, 80, 70, 64, 68, 68, 72, 49, 69, 58, 67, 76, 133, 75, 56, 71, 61, 77, 51, 73, 88, 60, 53, 63, 57, 61, 47, 65, 59, 65, 69, 79, 69, 57, 65, 56, 53, 54, 73, 53, 65, 73, 56, 54, 66, 51, 63, 86, 67, 59, 61, 67, 50, 88, 82, 50, 64, 59, 59, 59, 63, 80, 77, 75, 56, 58, 58, 72, 68, 75, 99, 92, 69, 71, 66, 45, 56, 62, 51, 71, 63, 58, 78, 87, 56, 75, 56, 63, 77, 71, 62, 84, 87, 55, 85, 64, 57, 53, 62, 61, 93, 67, 74, 80, 96, 83, 50, 61, 52, 52, 82, 76, 93, 48, 44, 52, 66, 119, 36, 71, 111, 67, 52, 84, 57, 51, 63, 69, 60, 83, 73, 86, 42, 91, 62, 57, 70, 61, 37, 59, 75, 74, 74, 83, 58, 70, 58, 64, 89, 52, 62, 70, 61, 53, 53, 87, 59, 74, 52, 62, 44, 57, 49, 120, 76, 47, 66, 105, 55, 69, 90, 61, 43, 45, 76, 55, 68, 73, 59, 64, 57, 65, 52, 97, 73, 69, 41, 77, 57, 65, 64, 70, 85, 78, 55, 65, 56, 70, 67, 73, 63, 52, 69, 61, 74, 104, 63, 61, 54, 60, 56, 62, 70, 64, 70, 68, 57, 75, 79, 67, 64, 47, 64, 62, 60, 55, 61, 60, 52, 54, 83, 55, 60, 72, 39, 66, 49, 33, 35, 57, 102, 56, 50, 56, 79, 93, 76, 58, 68, 65, 68, 58, 65, 67, 96, 69, 57, 77, 68, 67, 46, 51, 117, 72, 61, 80, 44, 89, 64, 69, 56, 78, 77, 73, 54, 55, 63, 56, 72, 73, 70, 71, 63, 70, 56, 64, 94, 64, 67, 61, 61, 69, 77, 98, 76, 55, 75, 57, 75, 58, 47, 61, 51, 50, 57, 66, 66, 57, 77, 56, 95, 87, 50, 66, 72, 40, 67, 72, 59, 51, 71, 105, 64, 69, 39, 80, 48, 72, 52, 50, 65, 70, 51, 55, 72, 54, 68, 70, 72, 84, 89, 72, 46, 77, 61, 50, 74, 64, 155, 50, 68, 69, 65, 86, 71, 70, 75, 83, 66, 72, 49, 77, 75, 73, 64, 53, 73, 43, 69, 65, 58, 53, 45, 60, 56, 75, 77, 65, 68, 58, 101, 65, 75, 55, 71, 75, 67, 79, 56, 62, 52, 91, 62, 60, 67, 54, 93, 53, 77, 61, 61, 61, 58, 76, 73, 54, 50, 63, 66, 101, 65, 69, 93, 59, 91, 75, 64, 69, 69, 72, 62, 58, 50, 58, 54, 44, 63, 63, 75, 83, 61, 71, 76, 51, 75, 54, 70, 86, 84, 54, 75, 77, 71, 88, 55, 73, 126, 48, 98, 52, 85, 75, 45, 53, 66, 59, 64, 55, 38, 53, 65, 78, 55, 60, 74, 54, 82, 71, 62, 64, 63, 60, 76, 86, 60, 54, 58, 40, 46, 68, 70, 66, 60, 92, 54, 58, 66, 91, 71, 48, 52, 60, 85, 47, 58, 76, 38, 81, 51, 61, 83, 53, 60, 89, 64, 87, 79, 66, 50, 59, 72, 72, 50, 62, 65, 73, 58, 53, 66, 60, 74, 73, 70, 61, 57, 60, 67, 62, 56, 66, 93, 73, 66, 113, 45, 69, 49, 60, 73, 52, 70, 74, 68, 61, 54, 74, 75, 67, 111, 66, 53, 57, 74, 80, 46, 78, 66, 62, 60, 78, 59, 72, 54, 72, 55, 56, 47, 76, 58, 64, 70, 54, 70, 49, 74, 66, 71, 86, 94, 72, 53, 87, 53, 61, 59, 111, 64, 45, 77, 94, 55, 59, 69, 70, 85, 77, 69, 58, 54, 59, 67, 53, 58, 78, 70, 69, 62, 68, 73, 69, 58, 76, 90, 106, 65, 47, 36, 60, 80, 50, 61, 70, 102, 64, 66, 50, 82, 100, 77, 54, 65, 56, 58, 51, 73, 83, 61, 62, 85, 101, 43, 94, 67, 79, 72, 69, 71, 74, 67, 63, 64, 34, 61, 42, 53, 48, 62, 78, 77, 64, 63, 93, 47, 64, 56, 70, 121, 45, 66, 47, 67, 69, 62, 76, 63, 72, 46, 76, 66, 54, 61, 43, 80, 58, 66, 82, 67, 79, 67, 58, 92, 64, 69, 42, 41, 110, 91, 30, 81, 102, 60, 55, 63, 62, 51, 49, 69, 64, 66, 97, 93, 99, 100, 80, 54, 81, 81, 49, 62, 65, 97, 68, 54, 74, 44, 69, 78, 34, 65, 55, 51, 84, 65, 93, 57, 78, 70, 59, 75, 76, 76, 58, 74, 55, 69, 67, 64, 70, 57, 79, 54, 129, 92, 73, 68, 53, 81, 64, 55, 70, 69, 95, 61, 57, 82, 68, 52, 45, 61, 63, 77, 52, 65, 63, 84, 63, 65, 61, 60, 53, 49, 68, 49, 57, 44, 50, 56, 56, 79, 54, 94, 64, 41, 77, 56, 63, 58, 53, 71, 55, 71, 73, 79, 63, 53, 74, 68, 69, 63, 88, 58, 55, 59, 46, 69, 71, 62, 62, 63, 54, 40, 68, 53, 52, 65, 49, 71, 59, 62, 91, 48, 56, 54, 59, 54, 102, 59, 68, 86, 68, 54, 56, 69, 53, 50, 91, 74, 63, 51, 88, 49, 42, 53, 61, 72, 37, 56, 58, 80, 60, 66, 60, 59, 77, 61, 58, 63, 49, 56, 72, 77, 38, 67, 59, 100, 67, 68, 60, 66, 42, 70, 56, 64, 100, 72, 58, 55, 84, 73, 81, 112, 74, 53, 57, 57, 68, 88, 71, 53, 77, 74, 71, 106, 115, 98, 67, 35, 56, 55, 63, 43, 52, 62, 78, 69, 54, 69, 63, 55, 41, 61, 75, 75, 71, 69, 58, 63, 71, 44, 57, 75, 61, 75, 56, 51, 58, 90, 64, 47, 93, 95, 74, 71, 76, 81, 49, 59, 93, 67, 52, 62, 64, 105, 76, 69, 49, 58, 60, 47, 55, 63, 46, 60, 65, 51, 64, 51, 65, 77, 75, 117, 72, 61, 68, 68, 86, 67, 65, 56, 42, 67, 41, 81, 41, 57, 63, 60, 62, 55, 52, 56, 73, 62, 46, 63, 62, 60, 101, 78, 55, 109, 58, 74, 59, 56, 67, 66, 73, 50, 81, 76, 79, 44, 55, 77, 70, 86, 95, 69, 63, 70, 109, 40, 60, 46, 93, 66, 35, 43, 39, 53, 76, 59, 77, 54, 63, 55, 60, 72, 51, 68, 71, 58, 44, 92, 77, 64, 73, 70, 50, 67, 56, 57, 39, 61, 58, 48, 61, 28, 103, 52, 61, 69, 65, 73, 53, 50, 130, 106, 57, 59, 64, 52, 74, 51, 73, 49, 88, 52, 102, 54, 40, 76, 61, 65, 84, 55, 52, 58, 71, 66, 47, 75, 60, 45, 70, 44, 70, 73, 72, 59, 64, 53, 51, 67, 43, 61, 73, 59, 62, 53, 67, 61, 79, 70, 23, 68, 49, 66, 67, 49, 69, 50, 89, 73, 93, 111, 77, 59, 36, 74, 62, 70, 34, 61, 79, 59, 121, 63, 76, 50, 90, 54, 74, 107, 82, 73, 67, 65, 72, 59, 74, 83, 33, 76, 58, 65, 48, 57, 84, 86, 60, 76, 68, 44, 37, 75, 74, 67, 57, 65, 85, 45, 72, 93, 63, 56, 61, 76, 57, 91, 50, 52, 89, 118, 62, 41, 37, 59, 54, 50, 67, 76, 50, 58, 67, 65, 70, 94, 69, 89, 62, 59, 61, 78, 62, 64, 49, 52, 55, 76, 67, 77, 49, 54, 50, 67, 68, 54, 56, 64, 57, 58, 60, 63, 76, 89, 74, 59, 41, 54, 47, 77, 71, 72, 72, 79, 81, 67, 53, 70, 91, 63, 81, 83, 34, 59, 62, 52, 69, 66, 54, 52, 72, 88, 65, 57, 87, 62, 63, 61, 74, 81, 48, 36, 70, 66, 61, 80, 78, 41, 64, 99, 31, 32, 70, 61, 54, 74, 53, 72, 53, 54, 40, 49, 62, 37, 67, 61, 58, 66, 65, 73, 52, 58, 63, 57, 77, 81, 74, 76, 49, 45, 57, 59, 67, 68, 87, 96, 64, 80, 78, 69, 63, 36, 117, 68, 70, 42, 63, 66, 65, 62, 52, 79, 59, 66, 61, 118, 62, 58, 57, 63, 54, 52, 82, 87, 54, 66, 58, 53, 54, 66, 63, 101, 69, 72, 73, 86, 59, 66, 44, 57, 56, 69, 67, 46, 91, 87, 66, 67, 67, 57, 79, 58, 73, 74, 40, 53, 80, 51, 51, 55, 46, 78, 60, 98, 62, 39, 73, 67, 78, 67, 66, 87, 93, 67, 69, 50, 98, 56, 58, 75, 47, 89, 49, 89, 74, 60, 79, 61, 72, 61, 96, 82, 52, 61, 58, 51, 47, 55, 44, 63, 67, 67, 53, 75, 70, 77, 86, 48, 87, 47, 52, 52, 72, 84, 100, 39, 51, 82, 52, 76, 96, 60, 57, 76, 59, 67, 47, 50, 77, 118, 57, 131, 56, 54, 76, 58, 60, 90, 77, 38, 57, 55, 70, 78, 71, 59, 73, 62, 62, 46, 80, 67, 66, 55, 80, 58, 68, 55, 81, 75, 69, 70, 57, 64, 69, 72, 66, 44, 70, 40, 82, 74, 102, 46, 63, 75, 62, 38, 58, 96, 45, 55, 46, 68, 67, 54, 47, 102, 44, 64, 86, 50, 63, 89, 89, 67, 73, 63, 38, 78, 107, 69, 51, 56, 69, 80, 100, 67, 81, 73, 60, 69, 59, 71, 33, 74, 75, 51, 102, 54, 67, 101, 44, 69, 68, 90, 65, 71, 49, 49, 86, 103, 59, 66, 66, 37, 62, 61, 71, 76, 36, 130, 78, 69, 60, 70, 70, 88, 52, 66, 78, 80, 85, 123, 60, 57, 46, 81, 60, 55, 71, 95, 56, 42, 49, 61, 66, 59, 90, 71, 104, 59, 52, 52, 75, 44, 53, 88, 74, 76, 67, 77, 50, 76, 69, 58, 70, 63, 74, 61, 75, 75, 87, 59, 66, 43, 50, 56, 72, 57, 61, 86, 77, 62, 66, 69, 82, 66, 67, 91, 55, 67, 53, 44, 59, 60, 80, 60, 77, 54, 50, 60, 65, 45, 51, 72, 62, 36, 78, 57, 80, 60, 67, 69, 68, 81, 57, 49, 110, 121, 59, 52, 80, 61, 66, 71, 68, 59, 87, 59, 102, 56, 73, 65, 83, 57, 40, 68, 78, 78, 68, 58, 34, 79, 96, 68, 63, 62, 60, 31, 72, 50, 63, 69, 76, 65, 53, 61, 79, 72, 66, 67, 64, 88, 57, 48, 60, 66, 62, 90, 72, 63, 62, 75, 71, 74, 78, 58, 54, 78, 58, 68, 77, 88, 51, 54, 51, 65, 47, 70, 51, 44, 54, 80, 61, 64, 61, 59, 82, 59, 81, 62, 49, 66, 47, 57, 52, 62, 51, 59, 73, 62, 60, 48, 49, 59, 81, 70, 73, 123, 68, 77, 79, 65, 71, 59, 50, 45, 53, 65, 74, 74, 58, 65, 67, 70, 57, 89, 80, 56, 99, 54, 55, 87, 75, 57, 59, 57, 56, 84, 47, 44, 73, 64, 59, 93, 52, 86, 83, 82, 65, 141, 67, 86, 64, 44, 64, 88, 69, 48, 53, 60, 64, 56, 54, 65, 64, 63, 50, 67, 78, 54, 68, 49, 46, 72, 54, 83, 61, 62, 68, 65, 71, 41, 54, 43, 79, 66, 57, 70, 65, 54, 36, 71, 60, 56, 43, 60, 62, 61, 74, 64, 68, 78, 88, 82, 51, 72, 55, 89, 70, 75, 57, 78, 51, 58, 51, 47, 64, 76, 73, 79, 74, 97, 47, 63, 51, 97, 49, 46, 72, 92, 64, 45, 62, 62, 57, 46, 66, 48, 70, 75, 74, 83, 70, 83, 63, 47, 70, 68, 77, 67, 64, 87, 54, 69, 52, 62, 73, 47, 51, 115, 59, 56, 73, 62, 72, 63, 46, 48, 47, 67, 68, 73, 58, 62, 64, 68, 79, 66, 64, 65, 58, 48, 58, 76, 68, 92, 73, 51, 80, 104, 67, 64, 69, 36, 47, 62, 58, 80, 57, 76, 56, 55, 50, 46, 83, 62, 93, 61, 68, 65, 79, 21, 67, 73, 86, 57, 59, 80, 63, 74, 43, 64, 64, 58, 67, 65, 66, 73, 60, 59, 60, 54, 62, 67, 70, 60, 87, 46, 73, 59, 57, 75, 44, 80, 78, 48, 50, 46, 66, 71, 75, 62, 59, 58, 73, 65, 57, 54, 51, 55, 48, 55, 66, 59, 69, 69, 62, 50, 71, 81, 59, 80, 60, 91, 53, 70, 84, 59, 55, 61, 61, 73, 56, 89, 82, 80, 68, 61, 105, 54, 79, 52, 55, 48, 62, 88, 70, 49, 77, 46, 69, 70, 57, 90, 59, 53, 63, 64, 61, 76, 57, 50, 56, 50, 79, 72, 74, 68, 65, 69, 86, 54, 49, 90, 51, 69, 79, 69, 47, 62, 58, 63, 43, 53, 81, 73, 67, 50, 63, 53, 66, 68, 67, 48, 54, 55, 56, 30, 48, 62, 46, 116, 68, 95, 47, 75, 73, 56, 73, 48, 92, 119, 64, 86, 55, 48, 59, 72, 67, 66, 74, 57, 70, 73, 72, 60, 101, 49, 55, 53, 85, 63, 85, 54, 58, 72, 57, 71, 71, 58, 64, 52, 60, 70, 66, 89, 76, 73, 81, 60, 92, 117, 85, 64, 109, 74, 54, 52, 61, 71, 64, 48, 47, 76, 74, 76, 79, 67, 46, 56, 50, 40, 84, 77, 62, 56, 58, 55, 56, 62, 48, 86, 75, 89, 61, 73, 53, 76, 74, 60, 83, 76, 60, 50, 72, 86, 41, 65, 86, 44, 51, 55, 60, 52, 43, 58, 52, 72, 61, 55, 79, 62, 64, 72, 78, 80, 75, 62, 62, 71, 84, 60, 45, 58, 101, 66, 79, 66, 53, 63, 76, 59, 109, 89, 74, 55, 67, 94, 62, 93, 90, 47, 37, 64, 51, 57, 60, 55, 81, 92, 50, 79, 42, 70, 68, 58, 63, 76, 50, 72, 68, 98, 78, 46, 74, 56, 64, 59, 46, 64, 66, 53, 71, 86, 60, 68, 47, 68, 65, 54, 84, 73, 78, 70, 54, 74, 78, 71, 70, 68, 69, 73, 69, 74, 41, 76, 40, 53, 55, 51, 74, 58, 66, 81, 50, 55, 98, 86, 100, 90, 93, 59, 48, 56, 95, 49, 57, 54, 81, 87, 68, 54, 58, 66, 66, 50, 64, 41, 65, 75, 37, 80, 55, 61, 63, 57, 135, 58, 117, 53, 49, 75, 70, 70, 61, 66, 63, 55, 73, 54, 80, 74, 66, 70, 53, 59, 119, 75, 73, 65, 64, 76, 59, 74, 58, 56, 84, 68, 53, 51, 81, 94, 55, 47, 62, 74, 49, 60, 62, 62, 69, 42, 69, 66, 62, 61, 69, 79, 59, 47, 76, 67, 78, 76, 85, 53, 58, 76, 88, 61, 57, 57, 51, 65, 80, 70, 64, 65, 57, 74, 80, 78, 41, 71, 56, 53, 57, 51, 43, 75, 69, 100, 50, 70, 48, 85, 52, 62, 65, 59, 70, 98, 58, 36, 71, 88, 47, 58, 68, 57, 59, 65, 64, 91, 74, 67, 68, 59, 68, 72, 62, 61, 66, 75, 82, 68, 71, 46, 84, 69, 68, 49, 70, 66, 56, 72, 91, 75, 59, 61, 46, 81, 72, 77, 63, 60, 78, 70, 56, 54, 66, 59, 55, 63, 51, 70, 74, 38, 80, 60, 99, 67, 70, 56, 60, 61, 79, 60, 55, 64, 69, 70, 80, 73, 81, 58, 72, 62, 73, 87, 60, 55, 87, 87, 68, 64, 71, 51, 60, 63, 70, 53, 76, 52, 65, 92, 69, 66, 64, 68, 62, 61, 63, 78, 54, 55, 80, 58, 72, 67, 51, 72, 57, 57, 44, 61, 44, 51, 78, 76, 73, 61, 66, 94, 46, 49, 60, 75, 70, 56, 57, 84, 64, 57, 59, 65, 97, 66, 52, 51, 92, 81, 98, 38, 75, 57, 49, 86, 60, 59, 51, 77, 66, 63, 75, 77, 56, 54, 56, 47, 80, 52, 66, 65, 61, 74, 101, 51, 49, 64, 65, 54, 51, 63, 72, 63, 71, 62, 58, 46, 93, 74, 68, 70, 65, 59, 66, 73, 64, 51, 51, 79, 49, 49, 66, 72, 78, 41, 66, 57, 65, 64, 46, 55, 56, 64, 85, 107, 62, 57, 55, 82, 64, 75, 65, 52, 72, 54, 63, 58, 69, 87, 66, 63, 61, 68, 78, 61, 65, 103, 60, 66, 72, 65, 81, 68, 58, 81, 69, 112, 70, 67, 71, 75, 86, 49, 71, 61, 66, 58, 58, 75, 62, 71, 62, 69, 61, 75, 64, 53, 59, 57, 66, 60, 63, 66, 64, 99, 67, 57, 78, 79, 61, 122, 54, 61, 59, 67, 82, 55, 51, 66, 69, 85, 58, 54, 65, 87, 68, 71, 57, 55, 59, 71, 70, 59, 67, 75, 65, 61, 127, 72, 63, 45, 61, 83, 65, 60, 64, 77, 62, 74, 61, 60, 52, 65, 72, 56, 66, 58, 61, 69, 81, 60, 71, 68, 65, 65, 57, 62, 74, 96, 76, 63, 80, 67, 43, 63, 49, 47, 58, 63, 63, 46, 59, 55, 70, 62, 115, 63, 57, 68, 79, 50, 86, 73, 59, 66, 49, 65, 62, 76, 99, 142, 71, 75, 54, 63, 79, 64, 64, 62, 92, 58, 64, 51, 65, 77, 49, 71, 66, 56, 55, 75, 65, 85, 59, 56, 58, 72, 68, 51, 64, 67, 65, 62, 52, 53, 62, 65, 57, 58, 55, 60, 50, 51, 79, 65, 59, 54, 69, 54, 64, 57, 60, 57, 61, 54, 52, 67, 63, 60, 91, 77, 64, 67, 58, 70, 53, 72, 68, 57, 57, 61, 59, 58, 56, 64, 45, 69, 60, 61, 67, 69, 61, 58, 75, 72, 61, 72, 65, 80, 62, 64, 64, 73, 83, 67, 66, 62, 54, 71, 66, 90, 77, 55, 70, 64, 63, 72, 61, 63, 61, 78, 62, 53, 58, 62, 63, 61, 63, 60, 57, 58, 55, 64, 56, 58, 56, 67, 68, 58, 54, 55, 68, 67, 63, 62, 69, 79, 70, 74, 51, 75, 69, 59, 58, 48, 65, 67, 76, 91, 92, 62, 71, 44, 66, 60, 127, 53, 54, 61, 54, 57, 58, 57, 68, 61, 64, 60, 69, 60, 71, 68, 59, 59, 67, 43, 77, 69, 91, 67, 66, 56, 104, 61, 44, 64, 63, 56, 52, 61, 71, 60, 52, 76, 59, 66, 55, 67, 64, 69, 62, 73, 67, 48, 57, 67, 61, 102, 63, 58, 61, 72, 63, 58, 61, 108, 69, 66, 57, 54, 80, 64, 65, 57, 71, 49, 78, 67, 58, 59, 58, 62, 57, 63, 69, 67, 88, 60, 51, 67, 48, 52, 57, 59, 90, 66, 69, 67, 64, 56, 55, 71, 80, 67, 59, 72, 55, 56, 59, 89, 63, 80, 65, 71, 66, 65, 59, 69, 63, 50, 66, 67, 67, 58, 58, 57, 68, 69, 68, 62, 77, 62, 58, 59, 65, 53, 59, 63, 68, 63, 58, 62, 86, 76, 62, 62, 69, 54, 77, 46, 50, 55, 69, 81, 69, 65, 58, 58, 65, 79, 61, 62, 59, 82, 71, 74, 58, 64, 69, 50, 75, 50, 52, 70, 89, 34, 56, 73, 60, 59, 75, 72, 67, 49, 88, 69, 57, 64, 70, 57, 62, 57, 82, 56, 58, 75, 106, 65, 67, 62, 62, 66, 62, 68, 52, 75, 59, 62, 62, 58, 63, 49, 44, 58, 102, 64, 54, 61, 67, 64, 52, 51, 74, 84, 84, 54, 60, 71, 59, 79, 61, 69, 58, 86, 64, 73, 60, 54, 80, 56, 61, 93, 73, 73, 59, 58, 61, 64, 54, 94, 58, 67, 64, 65, 82, 52, 62, 59, 61, 59, 58, 49, 67, 66, 75, 37, 48, 74, 68, 121, 66, 63, 58, 59, 67, 48, 73, 78, 63, 61, 68, 46, 59, 55, 77, 58, 59, 71, 59, 61, 59, 55, 48, 57, 54, 73, 72, 67, 63, 59, 58, 69, 60, 61, 93, 59, 94, 46, 57, 52, 88, 67, 61, 62, 72, 81, 81, 61, 64, 54, 63, 46, 58, 88, 54, 50, 49, 71, 58, 74, 103, 73, 58, 53, 59, 98, 52, 59, 60, 60, 60, 64, 60, 61, 52, 66, 54, 71, 60, 111, 70, 79, 101, 57, 66, 79, 59, 54, 62, 53, 53, 64, 52, 62, 68, 63, 67, 53, 60, 75, 68, 97, 50, 79, 58, 55, 83, 52, 78, 74, 63, 79, 60, 65, 67, 66, 65, 68, 66, 59, 81, 62, 66, 71, 56, 72, 67, 60, 58, 88, 71, 65, 75, 68, 63, 65, 55, 68, 64, 71, 50, 66, 70, 65, 57, 60, 52, 77, 76, 59, 65, 66, 58, 77, 47, 69, 64, 63, 78, 52, 50, 58, 63, 64, 65, 65, 81, 46, 93, 54, 57, 61, 62, 53, 58, 60, 77, 52, 59, 63, 39, 44, 61, 70, 78, 67, 57, 66, 73, 59, 50, 75, 61, 61, 62, 51, 52, 103, 63, 62, 56, 66, 72, 71, 64, 48, 49, 82, 58, 62, 66, 60, 61, 63, 69, 55, 76, 68, 63, 53, 62, 67, 55, 54, 115, 71, 49, 67, 62, 66, 63, 50, 52, 63, 54, 69, 57, 66, 65, 72, 66, 47, 61, 55, 70, 67, 64, 57, 112, 59, 62, 63, 49, 58, 57, 79, 83, 54, 90, 70, 62, 78, 65, 69, 64, 58, 61, 59, 69, 74, 70, 57, 65, 63, 77, 52, 52, 53, 81, 62, 65, 70, 79, 53, 55, 62, 62, 88, 57, 54, 69, 66, 50, 50, 67, 69, 52, 62, 85, 61, 74, 61, 71, 81, 59, 73, 81, 65, 59, 56, 59, 57, 69, 61, 58, 57, 70, 67, 68, 69, 65, 68, 56, 65, 49, 71, 65, 116, 68, 83, 67, 81, 87, 44, 72, 49, 44, 52, 79, 48, 62, 62, 53, 74, 73, 70, 75, 67, 49, 40, 57, 53, 58, 78, 59, 71, 67, 54, 83, 88, 59, 68, 68, 62, 55, 96, 51, 63, 65, 79, 42, 69, 91, 96, 68, 64, 56, 72, 50, 56, 44, 66, 50, 81, 66, 49, 43, 57, 67, 66, 56, 60, 68, 102, 56, 63, 65, 59, 50, 51, 56, 83, 63, 58, 89, 67, 61, 59, 65, 81, 51, 52, 66, 64, 34, 56, 51, 70, 50, 87, 115, 53, 82, 46, 58, 66, 65, 70, 85, 65, 61, 49, 76, 57, 60, 74, 73, 67, 63, 65, 60, 60, 76, 60, 65, 75, 71, 70, 64, 68, 61, 68, 86, 47, 79, 66, 73, 56, 64, 96, 60, 78, 64, 66, 60, 53, 71, 57, 56, 75, 69, 59, 77, 68, 83, 44, 85, 73, 55, 48, 53, 75, 47, 76, 60, 73, 70, 71, 56, 60, 65, 56, 86, 66, 60, 67, 57, 59, 68, 86, 102, 75, 64, 42, 58, 71, 68, 53, 65, 59, 67, 74, 61, 54, 69, 57, 62, 59, 72, 61, 56, 76, 46, 77, 69, 55, 70, 72, 87, 59, 94, 80, 57, 48, 98, 64, 95, 103, 52, 71, 70, 76, 102, 63, 64, 65, 51, 71, 43, 64, 100, 85, 53, 45, 58, 46, 54, 64, 46, 54, 49, 55, 67, 73, 69, 57, 75, 94, 85, 71, 74, 48, 78, 76, 63, 76, 90, 70, 63, 66, 66, 68, 105, 49, 103, 53, 63, 63, 80, 65, 53, 53, 57, 67, 68, 56, 82, 59, 74, 57, 48, 45, 52, 53, 79, 49, 53, 55, 66, 66, 74, 60, 78, 76, 67, 55, 88, 53, 46, 61, 62, 115, 54, 81, 45, 65, 52, 65, 60, 63, 63, 72, 62, 84, 63, 74, 58, 72, 68, 61, 84, 51, 57, 69, 58, 57, 48, 62, 61, 66, 57, 75, 60, 88, 53, 43, 54, 49, 56, 51, 50, 59, 50, 47, 102, 75, 76, 75, 57, 47, 63, 74, 61, 78, 64, 56, 60, 64, 72, 68, 59, 61, 59, 55, 57, 42, 35, 66, 67, 44, 109, 58, 58, 55, 52, 81, 74, 71, 95, 65, 71, 62, 72, 59, 61, 75, 53, 65, 49, 62, 59, 56, 46, 111, 50, 56, 70, 47, 90, 57, 78, 77, 56, 91, 77, 60, 54, 56, 63, 105, 70, 57, 87, 50, 82, 52, 78, 60, 77, 63, 49, 56, 76, 65, 50, 75, 63, 54, 56, 41, 42, 45, 53, 50, 55, 79, 60, 68, 69, 47, 57, 91, 56, 85, 72, 57, 50, 58, 99, 67, 51, 44, 77, 69, 53, 53, 66, 61, 55, 84, 80, 73, 85, 55, 41, 77, 40, 75, 45, 87, 49, 51, 54, 60, 54, 68, 73, 67, 68, 63, 60, 137, 91, 69, 66, 89, 69, 62, 52, 71, 43, 68, 62, 61, 55, 87, 69, 63, 63, 59, 104, 66, 63, 67, 65, 128, 89, 70, 58, 70, 35, 73, 65, 65, 47, 76, 64, 50, 64, 70, 57, 64, 58, 60, 63, 61, 67, 71, 96, 57, 95, 60, 79, 84, 61, 56, 72, 53, 57, 89, 62, 84, 60, 60, 74, 67, 94, 64, 57, 65, 74, 141, 98, 97, 62, 56, 62, 52, 42, 71, 52, 85, 80, 50, 55, 66, 52, 47, 48, 64, 82, 56, 64, 77, 50, 49, 59, 60, 93, 102, 62, 51, 58, 51, 94, 90, 57, 51, 79, 36, 71, 69, 59, 59, 65, 94, 52, 53, 39, 51, 65, 43, 55, 66, 66, 50, 47, 53, 62, 57, 59, 54, 133, 84, 56, 42, 62, 78, 81, 54, 58, 66, 69, 59, 53, 71, 73, 76, 56, 66, 50, 74, 76, 86, 68, 71, 79, 51, 56, 77, 84, 70, 49, 74, 57, 76, 52, 67, 66, 68, 60, 71, 45, 47, 68, 97, 73, 87, 66, 99, 97, 54, 68, 53, 71, 76, 63, 63, 97, 82, 84, 101, 64, 74, 84, 76, 66, 70, 65, 47, 49, 57, 76, 54, 56, 71, 74, 114, 65, 55, 63, 66, 63, 60, 95, 65, 63, 64, 60, 59, 54, 53, 97, 79, 92, 66, 81, 83, 72, 59, 56, 72, 73, 65, 46, 75, 71, 57, 83, 82, 78, 65, 49, 74, 53, 74, 71, 64, 88, 54, 79, 76, 57, 63, 77, 54, 56, 44, 68, 51, 62, 75, 86, 57, 64, 65, 71, 53, 50, 58, 50, 67, 75, 54, 122, 62, 68, 77, 44, 65, 60, 68, 64, 79, 65, 69, 54, 62, 61, 76, 63, 97, 71, 90, 68, 65, 66, 74, 92, 58, 54, 61, 60, 58, 49, 62, 62, 64, 53, 59, 50, 60, 51, 71, 69, 60, 34, 56, 54, 81, 77, 61, 61, 48, 108, 59, 80, 70, 91, 61, 55, 81, 70, 91, 49, 50, 54, 73, 77, 48, 49, 85, 34, 74, 84, 55, 82, 49, 106, 82, 62, 60, 75, 85, 52, 65, 65, 53, 47, 101, 57, 89, 57, 60, 80, 79, 63, 63, 65, 83, 97, 69, 65, 61, 53, 87, 61, 92, 73, 88, 63, 56, 57, 137, 60, 69, 61, 62, 50, 60, 84, 59, 50, 92, 52, 71, 43, 79, 78, 56, 79, 124, 73, 59, 50, 79, 75, 66, 48, 101, 60, 90, 65, 65, 74, 66, 58, 60, 63, 64, 56, 61, 67, 85, 85, 63, 58, 58, 87, 66, 53, 58, 52, 68, 57, 80, 50, 64, 58, 52, 64, 60, 93, 52, 65, 59, 75, 78, 59, 84, 70, 49, 66, 68, 79, 67, 92, 59, 50, 90, 80, 52, 63, 70, 67, 55, 56, 59, 77, 50, 59, 94, 67, 59, 62, 58, 82, 58, 73, 53, 73, 54, 53, 72, 74, 58, 75, 62, 63, 57, 64, 57, 73, 42, 81, 69, 66, 47, 56, 71, 55, 78, 62, 68, 77, 60, 46, 71, 74, 72, 67, 85, 53, 76, 57, 64, 61, 69, 70, 66, 69, 80, 58, 74, 62, 72, 76, 68, 68, 62, 58, 52, 54, 60, 52, 61, 65, 75, 84, 81, 58, 62, 65, 81, 65, 67, 97, 77, 67, 61, 85, 64, 72, 106, 65, 65, 59, 52, 53, 56, 43, 52, 48, 65, 39, 78, 70, 51, 55, 64, 62, 61, 58, 67, 76, 55, 49, 58, 58, 58, 67, 65, 54, 63, 66, 55, 54, 92, 49, 56, 61, 65, 68, 72, 69, 76, 73, 72, 72, 74, 84, 44, 54, 71, 81, 51, 53, 70, 75, 88, 71, 47, 63, 112, 79, 41, 67, 68, 108, 75, 44, 58, 93, 70, 90, 70, 77, 58, 61, 58, 66, 58, 55, 58, 56, 66, 50, 49, 41, 63, 99, 112, 53, 92, 49, 57, 53, 63, 45, 62, 77, 72, 48, 56, 55, 65, 30, 75, 89, 68, 52, 56, 54, 66, 52, 53, 55, 63, 70, 82, 51, 64, 62, 72, 90, 49, 55, 75, 90, 34, 38, 54, 67, 47, 55, 60, 37, 63, 49, 102, 59, 44, 92, 67, 49, 76, 42, 121, 65, 58, 71, 50, 51, 52, 73, 75, 68, 58, 45, 63, 49, 107, 61, 42, 72, 83, 66, 64, 77, 61, 73, 44, 49, 73, 75, 56, 66, 72, 66, 73, 61, 74, 79, 74, 138, 118, 67, 91, 64, 68, 53, 79, 67, 69, 79, 62, 55, 75, 43, 73, 67, 120, 86, 43, 113, 87, 65, 52, 77, 76, 59, 64, 54, 54, 68, 55, 52, 59, 76, 63, 57, 59, 101, 73, 55, 80, 69, 53, 55, 51, 77, 87, 118, 71, 59, 54, 55, 53, 77, 45, 71, 41, 49, 92, 50, 51, 49, 62, 54, 63, 49, 50, 43, 62, 103, 114, 61, 49, 55, 58, 77, 69, 57, 56, 86, 64, 64, 77, 47, 61, 42, 75, 56, 60, 99, 53, 88, 68, 56, 107, 64, 38, 82, 79, 66, 59, 84, 74, 54, 58, 70, 60, 101, 109, 67, 89, 85, 53, 68, 59, 50, 55, 49, 70, 64, 66, 67, 73, 70, 60, 68, 66, 55, 83, 59, 63, 59, 65, 60, 54, 74, 55, 50, 68, 47, 49, 72, 58, 60, 72, 72, 74, 50, 75, 47, 62, 59, 56, 61, 76, 104, 74, 50, 80, 44, 37, 58, 74, 67, 57, 59, 56, 78, 124, 68, 62, 76, 70, 62, 112, 71, 41, 67, 52, 61, 42, 64, 71, 68, 61, 74, 47, 70, 85, 87, 66, 61, 52, 63, 66, 63, 92, 65, 78, 63, 65, 82, 66, 80, 51, 55, 66, 43, 57, 53, 85, 52, 71, 59, 61, 75, 57, 50, 55, 72, 63, 51, 59, 87, 103, 54, 51, 44, 54, 70, 74, 89, 61, 44, 46, 51, 52, 97, 57, 59, 57, 102, 76, 59, 104, 98, 54, 49, 91, 80, 57, 55, 63, 39, 54, 68, 49, 83, 62, 35, 68, 77, 85, 55, 66, 60, 86, 60, 48, 56, 57, 57, 48, 76, 62, 51, 48, 69, 55, 83, 81, 74, 52, 57, 68, 46, 87, 58, 64, 54, 103, 60, 73, 68, 59, 62, 50, 59, 61, 53, 53, 72, 85, 39, 47, 77, 67, 81, 53, 78, 74, 79, 62, 75, 69, 75, 125, 62, 77, 68, 54, 72, 65, 66, 87, 81, 69, 64, 106, 73, 66, 78, 74, 67, 77, 73, 74, 69, 88, 56, 60, 65, 50, 42, 72, 82, 59, 84, 78, 64, 47, 51, 66, 60, 56, 71, 69, 53, 53, 51, 55, 94, 55, 94, 62, 57, 75, 53, 92, 57, 52, 66, 89, 56, 73, 85, 72, 62, 72, 61, 64, 44, 66, 61, 76, 72, 60, 71, 74, 70, 76, 141, 64, 38, 54, 76, 71, 67, 74, 86, 89, 59, 60, 59, 52, 66, 79, 65, 55, 73, 64, 72, 57, 69, 95, 57, 58, 71, 57, 69, 87, 89, 83, 60, 103, 60, 66, 68, 60, 54, 54, 70, 71, 60, 67, 69, 55, 68, 67, 91, 50, 74, 48, 42, 78, 48, 56, 102, 60, 74, 75, 56, 74, 56, 91, 66, 52, 62, 42, 64, 72, 41, 64, 87, 82, 69, 72, 57, 68, 55, 69, 50, 72, 66, 60, 68, 56, 53, 47, 53, 83, 68, 72, 66, 88, 83, 79, 62, 65, 64, 84, 64, 74, 69, 52, 57, 73, 79, 76, 63, 61, 56, 70, 70, 92, 49, 69, 39, 106, 52, 67, 64, 40, 76, 72, 60, 58, 40, 64, 65, 56, 47, 74, 67, 83, 76, 135, 87, 69, 56, 57, 55, 46, 65, 73, 75, 65, 100, 74, 74, 67, 47, 62, 61, 54, 55, 76, 59, 69, 90, 44, 59, 83, 50, 63, 67, 73, 67, 109, 55, 61, 63, 65, 75, 59, 64, 66, 53, 49, 72, 78, 82, 66, 68, 64, 128, 121, 59, 49, 114, 55, 78, 97, 61, 70, 92, 55, 60, 39, 79, 61, 80, 64, 64, 95, 65, 63, 68, 87, 70, 105, 42, 50, 50, 71, 50, 56, 43, 63, 77, 88, 62, 81, 71, 40, 64, 63, 69, 61, 68, 53, 89, 52, 52, 72, 62, 78, 58, 67, 44, 83, 60, 81, 52, 70, 61, 72, 63, 88, 44, 66, 61, 54, 40, 93, 72, 62, 45, 56, 67, 66, 72, 71, 62, 56, 59, 69, 90, 89, 76, 61, 57, 83, 60, 58, 71, 77, 68, 65, 77, 72, 63, 109, 53, 49, 30, 68, 81, 55, 67, 43, 56, 59, 58, 55, 72, 47, 60, 53, 62, 104, 65, 49, 81, 39, 70, 49, 45, 72, 54, 79, 75, 40, 40, 77, 64, 60, 71, 90, 74, 50, 60, 66, 57, 83, 60, 49, 55, 57, 70, 68, 57, 95, 56, 71, 50, 115, 75, 59, 63, 81, 114, 76, 52, 98, 94, 70, 83, 48, 51, 78, 78, 60, 83, 93, 54, 83, 61, 55, 79, 61, 40, 45, 60, 54, 51, 58, 52, 70, 57, 67, 64, 58, 65, 61, 71, 59, 50, 78, 75, 72, 60, 45, 40, 53, 68, 56, 77, 61, 71, 58, 45, 70, 54, 52, 39, 55, 81, 65, 63, 59, 92, 63, 81, 63, 46, 53, 73, 71, 45, 77, 96, 66, 77, 59, 83, 71, 66, 56, 51, 109, 60, 55, 59, 64, 77, 71, 67, 54, 49, 81, 56, 59, 60, 56, 78, 65, 58, 62, 50, 64, 47, 60, 50, 61, 54, 47, 57, 65, 60, 60, 64, 81, 58, 58, 57, 65, 65, 69, 75, 91, 92, 59, 66, 90, 83, 65, 46, 73, 61, 71, 72, 39, 62, 51, 72, 52, 56, 65, 52, 49, 50, 68, 109, 50, 82, 67, 83, 68, 99, 62, 60, 61, 70, 105, 84, 100, 57, 83, 40, 67, 69, 82, 64, 67, 76, 48, 65, 52, 73, 62, 107, 69, 75, 66, 94, 88, 60, 91, 85, 43, 88, 80, 56, 40, 56, 52, 62, 53, 74, 56, 67, 73, 59, 71, 83, 62, 63, 47, 67, 83, 103, 124, 65, 67, 95, 79, 83, 75, 96, 70, 81, 76, 82, 64, 71, 57, 62, 79, 53, 47, 60, 83, 89, 53, 75, 58, 60, 67, 54, 73, 53, 67, 58, 57, 70, 56, 69, 65, 87, 57, 99, 54, 89, 40, 48, 60, 90, 94, 72, 37, 74, 71, 70, 77, 59, 71, 73, 92, 75, 76, 102, 72, 40, 47, 87, 51, 61, 65, 63, 58, 51, 60, 78, 67, 72, 57, 48, 77, 55, 52, 88, 83, 62, 70, 76, 76, 38, 88, 53, 82, 114, 51, 66, 80, 73, 73, 48, 35, 95, 84, 43, 66, 65, 60, 78, 67, 60, 40, 73, 65, 57, 77, 57, 74, 77, 60, 66, 87, 77, 56, 119, 78, 67, 73, 60, 93, 81, 51, 60, 62, 68, 60, 61, 82, 56, 48, 61, 56, 67, 83, 83, 55, 59, 127, 73, 64, 83, 59, 73, 93, 76, 71, 62, 69, 55, 57, 51, 58, 70, 61, 70, 69, 69, 50, 63, 75, 61, 77, 71, 62, 61, 72, 57, 63, 70, 67, 60, 78, 54, 55, 82, 55, 61, 57, 64, 80, 71, 58, 60, 78, 66, 39, 124, 58, 54, 55, 58, 59, 57, 103, 62, 55, 44, 59, 48, 55, 85, 64, 64, 85, 64, 54, 74, 55, 64, 82, 56, 86, 41, 62, 69, 64, 64, 60, 56, 58, 53, 85, 76, 64, 65, 63, 82, 64, 66, 63, 66, 59, 71, 97, 57, 81, 86, 59, 63, 56, 57, 54, 48, 94, 88, 76, 91, 57, 67, 71, 69, 52, 58, 85, 50, 54, 57, 62, 56, 61, 51, 61, 66, 77, 78, 75, 44, 60, 56, 43, 55, 62, 65, 84, 57, 56, 79, 77, 41, 53, 50, 70, 78, 72, 54, 79, 66, 53, 84, 63, 71, 86, 60, 53, 57, 84, 74, 50, 88, 76, 53, 76, 47, 70, 96, 63, 83, 44, 77, 50, 62, 79, 95, 84, 83, 67, 48, 55, 78, 86, 74, 72, 62, 52, 44, 53, 64, 80, 103, 60, 55, 84, 50, 55, 63, 81, 73, 108, 42, 60, 74, 66, 67, 56, 64, 84, 59, 76, 62, 51, 46, 57, 66, 57, 42, 63, 98, 83, 56, 48, 53, 51, 55, 72, 51, 62, 68, 70, 58, 60, 58, 71, 70, 64, 49, 68, 70, 71, 54, 72, 38, 59, 79, 31, 74, 48, 72, 62, 39, 56, 84, 64, 94, 85, 93, 58, 81, 69, 54, 45, 64, 51, 92, 47, 59, 77, 87, 71, 72, 62, 91, 91, 62, 64, 76, 25, 84, 119, 96, 61, 60, 76, 75, 61, 58, 54, 68, 53, 63, 66, 46, 64, 46, 64, 44, 75, 45, 52, 37, 81, 70, 50, 68, 109, 48, 62, 48, 69, 85, 67, 48, 47, 47, 60, 59, 61, 68, 68, 46, 65, 57, 77, 36, 82, 48, 55, 78, 70, 76, 57, 46, 50, 61, 71, 62, 70, 69, 92, 90, 51, 58, 65, 59, 106, 98, 71, 61, 66, 46, 42, 49, 55, 64, 64, 73, 69, 55, 102, 90, 69, 61, 59, 45, 45, 79, 128, 64, 65, 44, 77, 54, 64, 69, 77, 52, 91, 51, 72, 87, 60, 94, 75, 87, 74, 51, 62, 46, 76, 81, 89, 62, 51, 94, 52, 67, 67, 53, 78, 72, 66, 54, 50, 103, 65, 61, 79, 61, 58, 65, 37, 71, 58, 71, 35, 63, 74, 84, 73, 34, 68, 69, 87, 56, 54, 66, 55, 65, 68, 58, 63, 66, 82, 60, 48, 71, 51, 57, 80, 60, 64, 59, 84, 86, 66, 48, 78, 35, 89, 41, 70, 69, 68, 42, 43, 96, 92, 74, 65, 86, 70, 53, 67, 87, 95, 65, 50, 56, 74, 53, 80, 74, 53, 106, 67, 64, 55, 61, 57, 61, 57, 74, 43, 51, 72, 101, 77, 80, 71, 57, 61, 67, 48, 66, 67, 68, 96, 53, 74, 67, 91, 61, 58, 38, 79, 49, 70, 68, 54, 45, 57, 86, 52, 46, 83, 49, 66, 46, 71, 58, 60, 62, 57, 67, 56, 82, 74, 70, 49, 69, 60, 46, 57, 55, 51, 48, 67, 59, 59, 95, 59, 41, 93, 73, 92, 59, 78, 52, 72, 68, 109, 69, 102, 94, 69, 73, 82, 91, 59, 62, 79, 78, 60, 63, 51, 72, 71, 75, 71, 87, 83, 65, 64, 60, 57, 70, 68, 66, 69, 90, 56, 69, 53, 30, 59, 44, 55, 67, 91, 68, 62, 89, 122, 68, 58, 85, 73, 77, 55, 73, 55, 75, 78, 51, 64, 99, 34, 67, 73, 60, 53, 46, 64, 59, 60, 72, 62, 83, 51, 34, 61, 112, 61, 63, 71, 57, 64, 92, 59, 51, 67, 52, 80, 61, 56, 77, 58, 116, 50, 70, 64, 56, 42, 68, 65, 62, 49, 62, 56, 50, 69, 90, 97, 110, 67, 79, 75, 120, 76, 65, 71, 62, 78, 64, 64, 70, 72, 61, 52, 56, 37, 78, 69, 84, 88, 61, 57, 71, 77, 70, 63, 66, 61, 29, 46, 82, 77, 78, 53, 58, 76, 70, 59, 66, 81, 65, 52, 63, 68, 51, 53, 48, 48, 54, 60, 96, 99, 64, 55, 76, 45, 44, 45, 61, 74, 64, 46, 65, 86, 93, 60, 40, 57, 80, 56, 54, 88, 56, 73, 88, 37, 56, 70, 67, 93, 86, 87, 45, 82, 60, 56, 75, 70, 52, 44, 60, 52, 62, 68, 68, 75, 98, 70, 60, 68, 66, 55, 119, 127, 49, 73, 65, 57, 53, 41, 83, 60, 79, 72, 74, 50, 81, 37, 39, 51, 93, 76, 46, 76, 72, 91, 57, 72, 88, 98, 55, 96, 53, 55, 60, 68, 57, 55, 33, 58, 65, 46, 72, 80, 67, 80, 63, 92, 41, 64, 66, 73, 51, 53, 61, 50, 85, 90, 64, 27, 53, 51, 76, 74, 53, 100, 65, 56, 57, 61, 59, 80, 89, 60, 52, 56, 69, 38, 83, 75, 59, 105, 70, 53, 78, 64, 73, 59, 59, 59, 45, 75, 67, 72, 60, 41, 51, 66, 51, 77, 58, 81, 45, 61, 63, 71, 70, 52, 64, 46, 65, 67, 67, 53, 65, 38, 68, 80, 61, 67, 132, 58, 53, 64, 66, 62, 71, 119, 72, 89, 56, 70, 55, 70, 62, 52, 60, 66, 43, 55, 57, 58, 74, 85, 49, 57, 64, 70, 67, 52, 65, 64, 56, 70, 78, 73, 79, 56, 49, 67, 104, 80, 75, 67, 63, 55, 58, 68, 65, 68, 70, 66, 47, 74, 57, 67, 69, 73, 61, 48, 48, 52, 61, 64, 71, 67, 63, 75, 80, 58, 100, 57, 62, 51, 49, 69, 71, 69, 63, 57, 70, 71, 52, 63, 98, 85, 74, 88, 63, 49, 73, 82, 79, 50, 51, 66, 65, 75, 72, 64, 83, 74, 55, 40, 64, 62, 58, 57, 74, 89, 57, 67, 50, 59, 55, 65, 59, 63, 58, 72, 60, 52, 57, 96, 82, 80, 80, 68, 66, 65, 73, 52, 64, 68, 48, 60, 61, 62, 63, 77, 48, 78, 84, 53, 52, 57, 62, 50, 45, 52, 58, 57, 64, 81, 65, 53, 63, 65, 57, 58, 60, 68, 58, 62, 56, 64, 58, 66, 63, 67, 70, 57, 65, 63, 92, 62, 68, 90, 65, 59, 64, 82, 57, 99, 47, 96, 58, 72, 64, 65, 73, 59, 72, 73, 57, 64, 65, 60, 60, 70, 54, 67, 60, 56, 97, 62, 57, 90, 57, 69, 58, 60, 59, 81, 64, 55, 47, 75, 56, 68, 93, 67, 53, 64, 87, 79, 50, 75, 69, 72, 58, 39, 80, 56, 49, 80, 84, 72, 54, 73, 90, 44, 67, 66, 66, 91, 70, 60, 54, 68, 67, 78, 76, 72, 53, 50, 92, 60, 55, 90, 51, 66, 67, 72, 60, 69, 64, 66, 61, 76, 76, 59, 59, 70, 58, 54, 57, 67, 86, 59, 61, 67, 54, 64, 70, 63, 77, 67, 49, 61, 69, 60, 66, 74, 65, 78, 78, 53, 119, 65, 70, 69, 57, 54, 70, 56, 67, 60, 54, 72, 64, 62, 58, 67, 78, 71, 72, 59, 68, 55, 52, 49, 54, 62, 58, 71, 88, 62, 64, 57, 53, 69, 71, 53, 68, 49, 79, 86, 40, 40, 59, 50, 60, 87, 63, 49, 46, 65, 69, 64, 88, 98, 45, 84, 86, 66, 62, 51, 73, 55, 83, 55, 46, 68, 66, 55, 54, 77, 71, 58, 63, 67, 116, 73, 55, 64, 52, 61, 61, 66, 66, 61, 60, 82, 62, 63, 42, 62, 90, 67, 47, 59, 56, 78, 65, 78, 62, 60, 77, 56, 58, 57, 90, 62, 57, 62, 50, 59, 56, 76, 81, 62, 93, 67, 53, 67, 60, 51, 62, 76, 72, 76, 65, 73, 96, 82, 71, 61, 80, 92, 55, 74, 74, 64, 66, 61, 75, 73, 82, 84, 59, 72, 55, 63, 74, 57, 51, 64, 67, 64, 104, 72, 84, 71, 64, 69, 50, 57, 65, 61, 64, 66, 63, 63, 61, 60, 62, 69, 55, 55, 65, 63, 68, 67, 56, 83, 76, 64, 62, 61, 75, 103, 65, 55, 96, 61, 54, 76, 66, 57, 92, 70, 62, 47, 51, 107, 53, 73, 60, 62, 59, 64, 60, 72, 78, 69, 67, 52, 58, 62, 65, 129, 66, 91, 84, 54, 65, 61, 93, 79, 69, 68, 80, 65, 68, 49, 55, 71, 66, 58, 60, 59, 55, 63, 58, 64, 85, 76, 77, 61, 65, 63, 52, 67, 70, 62, 116, 88, 63, 47, 61, 47, 65, 67, 74, 36, 70, 75, 53, 71, 99, 63, 67, 47, 53, 44, 48, 52, 46, 37, 52, 54, 55, 68, 61, 47, 53, 54, 73, 68, 64, 65, 82, 97, 68, 78, 45, 62, 107, 93, 108, 72, 77, 79, 57, 59, 54, 82, 47, 48, 53, 59, 53, 57, 54, 75, 55, 80, 82, 62, 71, 62, 97, 70, 116, 79, 58, 82, 93, 67, 76, 87, 61, 74, 50, 69, 70, 64, 66, 71, 123, 63, 76, 61, 47, 55, 64, 68, 69, 17, 66, 93, 73, 60, 81, 57, 66, 56, 70, 85, 59, 59, 65, 66, 64, 63, 55, 68, 50, 54, 70, 52, 91, 56, 48, 58, 72, 62, 117, 58, 65, 74, 68, 66, 60, 63, 53, 118, 73, 56, 90, 62, 63, 65, 86, 57, 67, 74, 55, 63, 72, 51, 73, 58, 64, 65, 65, 62, 40, 72, 63, 56, 61, 65, 73, 52, 85, 50, 67, 67, 49, 59, 52, 76, 55, 63, 60, 68, 51, 74, 77, 44, 56, 73, 52, 67, 47, 72, 62, 60, 65, 63, 79, 83, 89, 65, 66, 52, 70, 60, 59, 61, 45, 56, 83, 58, 62, 43, 64, 71, 65, 69, 68, 77, 66, 94, 69, 49, 67, 88, 73, 68, 84, 74, 57, 65, 56, 63, 67, 66, 67, 61, 60, 74, 124, 91, 62, 58, 68, 61, 80, 58, 76, 66, 46, 71, 54, 49, 94, 59, 52, 114, 63, 76, 61, 66, 62, 53, 67, 63, 72, 62, 62, 45, 81, 66, 55, 76, 75, 48, 53, 71, 87, 60, 59, 73, 68, 41, 70, 66, 75, 65, 73, 60, 60, 73, 60, 60, 55, 55, 88, 56, 66, 69, 56, 65, 63, 45, 89, 67, 72, 74, 70, 58, 66, 55, 91, 64, 60, 46, 57, 63, 51, 52, 58, 64, 71, 66, 69, 79, 89, 59, 61, 58, 57, 65, 64, 74, 51, 52, 81, 60, 58, 64, 64, 42, 117, 59, 52, 57, 72, 72, 68, 56, 71, 49, 86, 65, 57, 79, 71, 69, 59, 56, 49, 65, 61, 65, 66, 62, 79, 62, 60, 58, 65, 62, 58, 61, 57, 52, 58, 70, 71, 51, 53, 69, 50, 71, 75, 75, 70, 79, 67, 74, 55, 69, 79, 75, 49, 68, 64, 85, 77, 51, 47, 61, 65, 61, 80, 71, 100, 60, 63, 76, 61, 60, 64, 72, 44, 66, 74, 53, 65, 51, 66, 63, 71, 69, 67, 65, 73, 62, 73, 62, 56, 48, 80, 63, 68, 73, 67, 68, 59, 61, 58, 65, 64, 72, 55, 65, 68, 63, 83, 87, 56, 57, 64, 57, 61, 54, 74, 66, 64, 55, 53, 63, 66, 74, 79, 76, 65, 61, 70, 63, 76, 72, 63, 118, 55, 52, 64, 78, 53, 57, 62, 79, 61, 76, 55, 81, 53, 58, 63, 92, 100, 61, 51, 48, 63, 64, 66, 70, 50, 51, 94, 86, 61, 69, 47, 66, 77, 57, 72, 50, 54, 46, 41, 60, 65, 46, 75, 65, 69, 68, 62, 83, 64, 80, 69, 62, 69, 64, 63, 75, 86, 53, 70, 66, 65, 51, 74, 93, 54, 56, 110, 56, 83, 85, 75, 82, 48, 53, 59, 57, 71, 75, 58, 81, 55, 71, 61, 75, 93, 50, 56, 60, 75, 97, 47, 68, 81, 65, 45, 67, 45, 57, 61, 55, 77, 40, 58, 68, 59, 80, 68, 59, 62, 41, 65, 49, 51, 47, 46, 63, 77, 53, 55, 73, 67, 56, 50, 68, 45, 76, 64, 70, 41, 45, 68, 52, 39, 70, 58, 66, 51, 73, 62, 88, 53, 61, 36, 63, 77, 74, 74, 61, 66, 65, 64, 65, 64, 74, 61, 58, 52, 57, 80, 62, 50, 89, 69, 65, 80, 94, 141, 75, 89, 62, 67, 53, 59, 48, 63, 48, 48, 56, 68, 54, 65, 52, 77, 63, 52, 97, 91, 73, 68, 71, 54, 69, 63, 51, 68, 67, 69, 76, 99, 44, 71, 112, 126, 68, 111, 53, 65, 62, 51, 50, 58, 47, 54, 73, 76, 62, 57, 82, 55, 74, 61, 55, 56, 96, 54, 54, 52, 41, 103, 84, 88, 65, 78, 52, 47, 76, 74, 41, 43, 59, 62, 56, 59, 77, 63, 114, 71, 48, 55, 65, 86, 36, 76, 51, 78, 60, 107, 52, 50, 68, 55, 98, 50, 66, 72, 90, 109, 103, 52, 98, 55, 75, 67, 55, 71, 69, 86, 87, 71, 82, 42, 71, 74, 67, 103, 60, 51, 84, 52, 67, 42, 58, 59, 67, 80, 93, 75, 110, 52, 65, 60, 80, 68, 78, 54, 87, 44, 74, 71, 53, 49, 69, 47, 61, 76, 59, 84, 69, 55, 81, 64, 78, 58, 56, 59, 60, 95, 84, 55, 64, 73, 68, 60, 45, 75, 80, 43, 44, 68, 54, 73, 89, 72, 56, 45, 49, 55, 76, 61, 50, 68, 96, 53, 115, 50, 71, 48, 48, 68, 61, 56, 61, 39, 88, 55, 73, 112, 53, 77, 53, 76, 106, 77, 53, 92, 79, 100, 75, 63, 57, 60, 47, 56, 72, 38, 71, 62, 60, 64, 69, 79, 65, 49, 66, 75, 60, 87, 69, 54, 39, 55, 39, 59, 56, 57, 47, 66, 63, 90, 63, 66, 79, 76, 75, 86, 56, 47, 51, 83, 50, 92, 57, 63, 60, 45, 72, 52, 46, 74, 60, 72, 69, 97, 75, 53, 75, 74, 59, 65, 49, 80, 73, 74, 57, 68, 51, 61, 71, 65, 54, 49, 86, 68, 90, 64, 85, 62, 79, 56, 67, 94, 63, 101, 57, 63, 48, 54, 86, 56, 63, 63, 72, 114, 85, 67, 64, 72, 61, 78, 78, 52, 87, 108, 121, 56, 64, 70, 67, 50, 62, 90, 51, 51, 58, 111, 66, 50, 78, 62, 71, 38, 48, 97, 84, 88, 75, 54, 75, 62, 52, 63, 56, 56, 56, 79, 73, 73, 57, 63, 89, 70, 64, 73, 62, 46, 43, 61, 54, 66, 85, 82, 71, 64, 56, 86, 68, 66, 69, 52, 41, 74, 32, 33, 49, 68, 60, 63, 80, 58, 56, 80, 67, 83, 62, 53, 61, 51, 59, 60, 63, 87, 67, 56, 63, 64, 61, 54, 61, 67, 67, 57, 68, 44, 51, 70, 65, 39, 55, 56, 76, 90, 62, 76, 59, 52, 66, 35, 54, 84, 54, 74, 66, 62, 60, 59, 96, 63, 70, 44, 74, 68, 89, 82, 65, 80, 79, 41, 39, 61, 55, 54, 86, 61, 56, 67, 58, 64, 59, 74, 79, 50, 45, 55, 78, 61, 73, 88, 85, 105, 62, 64, 53, 88, 78, 65, 86, 61, 61, 93, 57, 48, 50, 69, 53, 72, 114, 63, 55, 60, 55, 50, 97, 60, 64, 49, 65, 54, 50, 35, 51, 49, 74, 88, 46, 75, 48, 91, 67, 53, 42, 55, 59, 35, 52, 55, 86, 65, 65, 70, 65, 84, 58, 56, 65, 79, 50, 68, 65, 88, 57, 84, 76, 66, 79, 100, 61, 50, 70, 58, 89, 64, 73, 82, 61, 60, 74, 68, 67, 77, 67, 47, 50, 72, 63, 72, 92, 67, 72, 50, 76, 97, 74, 79, 73, 56, 58, 56, 46, 55, 83, 54, 57, 59, 68, 78, 62, 49, 71, 58, 65, 79, 61, 71, 53, 77, 69, 57, 62, 86, 61, 53, 60, 75, 75, 68, 73, 49, 60, 70, 59, 64, 76, 75, 71, 58, 85, 63, 60, 65, 96, 82, 71, 85, 61, 53, 42, 57, 88, 45, 54, 81, 56, 59, 75, 64, 59, 53, 57, 95, 72, 76, 81, 55, 71, 69, 37, 71, 68, 43, 54, 67, 46, 53, 44, 75, 61, 59, 48, 61, 51, 117, 84, 87, 60, 61, 73, 80, 53, 51, 69, 67, 68, 59, 102, 62, 55, 68, 83, 58, 46, 58, 68, 75, 44, 98, 56, 68, 83, 72, 49, 72, 56, 71, 110, 56, 50, 100, 49, 74, 73, 47, 74, 67, 80, 48, 57, 69, 92, 60, 54, 71, 69, 69, 55, 54, 46, 81, 70, 62, 76, 63, 85, 50, 54, 74, 91, 67, 61, 46, 91, 78, 56, 39, 41, 91, 88, 77, 64, 40, 101, 54, 76, 59, 50, 52, 71, 76, 40, 71, 59, 110, 72, 65, 75, 69, 77, 166, 63, 49, 56, 43, 59, 60, 79, 50, 58, 71, 57, 59, 68, 89, 89, 61, 50, 65, 65, 72, 57, 53, 59, 73, 60, 90, 64, 85, 52, 72, 67, 50, 71, 97, 47, 53, 64, 61, 50, 53, 107, 47, 76, 102, 96, 56, 63, 69, 50, 70, 57, 73, 54, 65, 69, 51, 61, 86, 45, 58, 62, 71, 53, 77, 58, 91, 78, 64, 44, 63, 70, 34, 62, 54, 69, 57, 51, 54, 71, 58, 50, 85, 58, 57, 47, 71, 62, 68, 50, 83, 64, 76, 55, 27, 57, 80, 65, 74, 69, 56, 68, 80, 80, 63, 66, 70, 66, 70, 64, 82, 55, 68, 84, 66, 82, 66, 78, 88, 66, 58, 78, 70, 60, 93, 41, 70, 57, 73, 61, 61, 75, 67, 78, 60, 72, 89, 60, 72, 55, 65, 82, 61, 88, 83, 67, 69, 91, 67, 45, 76, 66, 57, 85, 63, 56, 100, 66, 71, 49, 66, 41, 75, 52, 56, 65, 48, 69, 65, 43, 51, 69, 61, 99, 76, 63, 82, 67, 86, 84, 83, 59, 70, 84, 64, 57, 60, 42, 48, 57, 60, 63, 77, 63, 65, 62, 71, 78, 61, 93, 81, 71, 57, 71, 75, 110, 54, 152, 82, 72, 61, 85, 79, 85, 53, 60, 61, 70, 75, 42, 76, 89, 58, 52, 74, 63, 84, 77, 63, 67, 76, 56, 70, 59, 76, 64, 44, 101, 60, 72, 77, 74, 52, 62, 87, 66, 62, 77, 106, 67, 76, 54, 49, 65, 49, 81, 80, 53, 61, 58, 75, 48, 80, 79, 52, 56, 67, 47, 50, 58, 88, 62, 75, 55, 56, 81, 53, 83, 67, 60, 58, 51, 60, 66, 63, 86, 53, 58, 56, 48, 70, 66, 53, 50, 81, 41, 64, 44, 43, 89, 90, 51, 66, 71, 54, 56, 74, 53, 49, 84, 63, 83, 78, 83, 54, 46, 77, 73, 63, 55, 64, 55, 68, 69, 77, 60, 74, 69, 58, 65, 92, 55, 75, 64, 84, 77, 68, 57, 41, 66, 53, 62, 53, 55, 62, 66, 56, 65, 65, 58, 71, 63, 76, 79, 81, 69, 84, 75, 81, 59, 60, 51, 62, 69, 57, 59, 54, 61, 56, 53, 43, 58, 63, 57, 75, 55, 75, 57, 113, 61, 53, 71, 68, 56, 69, 79, 65, 61, 51, 69, 81, 61, 69, 69, 65, 78, 58, 91, 44, 68, 60, 76, 101, 86, 58, 60, 56, 57, 64, 69, 78, 60, 83, 66, 64, 63, 74, 72, 62, 77, 65, 79, 62, 65, 82, 55, 68, 51, 58, 57, 71, 80, 68, 50, 54, 67, 83, 60, 83, 62, 57, 49, 69, 56, 76, 101, 89, 57, 70, 76, 52, 106, 63, 65, 50, 60, 69, 62, 68, 40, 49, 42, 54, 51, 57, 56, 59, 69, 64, 61, 53, 79, 63, 56, 80, 54, 53, 63, 67, 58, 64, 68, 58, 72, 74, 72, 49, 64, 45, 65, 61, 66, 82, 61, 85, 57, 62, 75, 55, 61, 77, 65, 60, 62, 152, 58, 87, 86, 65, 71, 72, 84, 100, 71, 83, 56, 98, 61, 59, 72, 75, 60, 76, 71, 74, 64, 64, 68, 68, 71, 77, 70, 66, 60, 48, 64, 91, 67, 69, 55, 73, 70, 51, 83, 59, 75, 55, 88, 76, 66, 125, 86, 46, 57, 70, 71, 63, 75, 70, 62, 68, 69, 59, 67, 67, 62, 70, 59, 93, 62, 68, 74, 70, 52, 56, 56, 67, 60, 77, 75, 79, 61, 85, 60, 96, 63, 63, 76, 47, 62, 65, 74, 81, 55, 58, 69, 63, 64, 51, 52, 61, 61, 75, 54, 62, 93, 58, 52, 78, 68, 62, 53, 56, 53, 56, 57, 67, 84, 67, 77, 90, 64, 61, 34, 67, 74, 72, 63, 91, 51, 71, 55, 61, 56, 81, 61, 68, 63, 31, 61, 72, 54, 38, 59, 53, 41, 75, 64, 64, 75, 58, 69, 53, 64, 55, 64, 58, 48, 58, 68, 58, 74, 75, 87, 71, 58, 65, 61, 74, 67, 70, 74, 51, 78, 52, 50, 69, 59, 94, 75, 64, 44, 55, 57, 78, 67, 77, 52, 74, 40, 61, 60, 70, 59, 69, 77, 55, 99, 69, 58, 55, 68, 88, 59, 75, 62, 61, 53, 45, 81, 54, 51, 50, 70, 54, 70, 85, 68, 61, 60, 63, 59, 64, 70, 75, 68, 42, 57, 78, 65, 60, 68, 71, 46, 55, 62, 87, 72, 52, 91, 56, 56, 58, 49, 71, 65, 70, 71, 53, 64, 83, 74, 59, 94, 72, 74, 69, 69, 61, 52, 52, 52, 65, 64, 73, 72, 53, 76, 66, 96, 100, 53, 66, 53, 62, 68, 59, 57, 70, 59, 99, 74, 57, 48, 89, 57, 45, 60, 57, 89, 69, 77, 50, 62, 75, 62, 71, 63, 62, 67, 50, 64, 59, 46, 73, 64, 86, 58, 76, 53, 66, 57, 113, 56, 81, 62, 71, 81, 56, 67, 93, 75, 56, 56, 68, 54, 50, 60, 59, 71, 71, 74, 64, 64, 65, 54, 41, 63, 61, 68, 52, 78, 62, 59, 63, 48, 83, 76, 58, 83, 51, 44, 73, 59, 54, 62, 58, 88, 56, 74, 54, 85, 71, 49, 87, 57, 57, 85, 54, 76, 59, 56, 66, 83, 81, 60, 51, 74, 61, 61, 88, 58, 51, 64, 55, 64, 55, 68, 77, 60, 82, 75, 69, 54, 51, 51, 60, 64, 71, 50, 74, 67, 64, 63, 53, 48, 82, 85, 65, 83, 61, 57, 91, 65, 60, 52, 74, 50, 64, 61, 51, 60, 63, 61, 52, 64, 70, 70, 65, 59, 64, 119, 42, 68, 63, 68, 75, 62, 71, 57, 93, 83, 59, 77, 51, 67, 72, 66, 64, 53, 76, 85, 58, 63, 53, 61, 48, 75, 56, 65, 80, 72, 51, 67, 81, 117, 70, 90, 60, 53, 74, 67, 61, 65, 87, 48, 80, 65, 59, 45, 51, 49, 59, 63, 66, 51, 76, 85, 53, 57, 74, 55, 53, 66, 56, 93, 65, 54, 73, 51, 86, 67, 72, 89, 62, 69, 48, 63, 64, 70, 79, 65, 58, 69, 66, 62, 59, 60, 66, 79, 119, 73, 45, 76, 74, 70, 72, 57, 63, 69, 72, 58, 39, 54, 60, 57, 49, 63, 53, 77, 74, 76, 78, 41, 82, 63, 68, 77, 50, 76, 68, 81, 59, 76, 65, 74, 72, 56, 89, 45, 68, 52, 71, 53, 60, 62, 59, 58, 58, 58, 60, 47, 69, 61, 93, 56, 61, 79, 58, 60, 64, 67, 86, 74, 70, 66, 67, 72, 107, 84, 74, 48, 39, 73, 65, 66, 71, 59, 59, 53, 55, 49, 74, 46, 66, 76, 76, 62, 70, 49, 86, 61, 41, 49, 54, 84, 73, 68, 59, 77, 58, 61, 68, 56, 62, 83, 82, 58, 70, 72, 70, 57, 66, 74, 68, 65, 65, 49, 45, 59, 85, 57, 52, 64, 77, 66, 62, 88, 73, 86, 78, 53, 71, 62, 72, 62, 73, 62, 36, 63, 73, 59, 63, 57, 63, 62, 78, 62, 61, 50, 52, 68, 59, 82, 66, 78, 75, 76, 59, 98, 57, 56, 106, 90, 81, 71, 67, 87, 52, 74, 62, 51, 87, 68, 63, 55, 52, 69, 56, 61, 62, 73, 56, 56, 51, 36, 80, 68, 63, 77, 69, 82, 54, 59, 50, 59, 81, 64, 83, 63, 65, 71, 70, 61, 72, 71, 68, 70, 66, 75, 61, 65, 55, 78, 53, 55, 50, 61, 56, 56, 81, 49, 64, 72, 68, 52, 98, 68, 54, 58, 62, 71, 52, 136, 47, 52, 64, 74, 69, 90, 54, 105, 81, 67, 60, 56, 62, 70, 57, 54, 45, 32, 58, 73, 80, 52, 82, 56, 78, 113, 50, 56, 65, 47, 109, 62, 60, 64, 89, 48, 54, 59, 66, 62, 68, 70, 54, 45, 75, 75, 73, 75, 65, 56, 85, 57, 64, 50, 68, 65, 57, 66, 60, 62, 72, 62, 68, 51, 61, 58, 81, 73, 67, 58, 54, 71, 75, 60, 72, 72, 63, 61, 72, 111, 52, 60, 62, 55, 89, 55, 73, 40, 58, 71, 46, 67, 58, 83, 76, 99, 61, 59, 72, 49, 54, 56, 53, 118, 50, 83, 72, 65, 65, 70, 60, 51, 65, 56, 53, 54, 69, 54, 50, 63, 63, 63, 80, 54, 55, 66, 82, 81, 122, 56, 69, 54, 57, 68, 60, 81, 60, 53, 80, 74, 73, 52, 61, 82, 71, 67, 57, 61, 67, 55, 57, 70, 104, 80, 78, 75, 70, 57, 57, 67, 74, 55, 57, 53, 51, 79, 48, 47, 56, 65, 57, 61, 69, 70, 67, 76, 88, 55, 56, 55, 60, 69, 57, 45, 55, 77, 110, 63, 60, 56, 70, 59, 69, 51, 61, 73, 46, 74, 48, 58, 91, 67, 56, 66, 61, 74, 78, 68, 57, 62, 61, 78, 60, 52, 56, 60, 57, 72, 55, 63, 62, 59, 55, 64, 71, 69, 111, 52, 110, 53, 66, 50, 53, 81, 71, 76, 72, 116, 60, 69, 44, 61, 50, 79, 72, 66, 57, 53, 53, 76, 90, 76, 62, 65, 89, 66, 47, 68, 58, 53, 67, 67, 73, 61, 72, 69, 73, 111, 80, 66, 64, 59, 72, 68, 61, 94, 64, 83, 66, 65, 56, 53, 68, 62, 69, 54, 76, 53, 64, 82, 67, 60, 71, 74, 58, 76, 71, 69, 65, 69, 51, 62, 68, 58, 73, 70, 53, 60, 74, 69, 70, 66, 65, 60, 47, 56, 58, 75, 48, 66, 54, 65, 62, 75, 64, 59, 69, 58, 64, 77, 116, 69, 69, 65, 74, 53, 69, 78, 72, 53, 104, 131, 87, 68, 65, 76, 81, 68, 55, 45, 57, 56, 68, 67, 78, 67, 50, 86, 49, 53, 70, 70, 62, 66, 67, 56, 58, 58, 58, 48, 51, 69, 72, 71, 52, 59, 58, 48, 65, 110, 45, 65, 55, 82, 74, 65, 76, 62, 68, 73, 57, 49, 61, 73, 49, 91, 62, 56, 77, 60, 62, 57, 66, 95, 67, 53, 57, 53, 74, 54, 57, 69, 67, 71, 63, 69, 58, 57, 68, 69, 63, 73, 54, 69, 61, 58, 54, 78, 71, 61, 56, 69, 74, 56, 66, 42, 74, 57, 79, 47, 67, 82, 55, 63, 63, 63, 63, 118, 45, 50, 39, 60, 64, 72, 62, 76, 68, 78, 77, 86, 77, 63, 60, 53, 63, 52, 66, 52, 61, 59, 56, 61, 82, 57, 60, 61, 51, 42, 62, 58, 55, 62, 66, 51, 50, 77, 67, 69, 53, 61, 74, 46, 67, 87, 56, 65, 66, 114, 55, 59, 73, 88, 72, 73, 70, 61, 69, 68, 62, 74, 47, 73, 67, 56, 62, 67, 66, 66, 39, 66, 62, 54, 47, 71, 54, 61, 60, 87, 59, 66, 57, 53, 54, 59, 67, 73, 63, 61, 81, 73, 68, 62, 83, 68, 56, 90, 76, 55, 64, 71, 77, 54, 68, 60, 65, 65, 52, 53, 100, 63, 56, 60, 64, 79, 77, 64, 114, 64, 57, 48, 53, 79, 54, 48, 81, 56, 63, 77, 49, 51, 65, 98, 87, 62, 56, 48, 58, 58, 56, 71, 72, 59, 99, 55, 43, 79, 89, 90, 66, 76, 104, 74, 59, 78, 86, 91, 58, 69, 52, 60, 58, 55, 102, 55, 65, 61, 75, 69, 60, 54, 79, 93, 81, 90, 73, 75, 58, 57, 72, 65, 88, 53, 83, 74, 56, 81, 95, 63, 52, 64, 72, 56, 66, 70, 71, 50, 62, 58, 56, 73, 83, 58, 49, 55, 56, 57, 81, 63, 72, 61, 72, 79, 50, 55, 65, 60, 75, 69, 66, 63, 46, 55, 80, 51, 63, 63, 82, 61, 76, 86, 73, 57, 86, 59, 78, 61, 56, 55, 61, 58, 75, 66, 52, 57, 54, 81, 71, 55, 54, 60, 58, 53, 58, 59, 61, 47, 65, 61, 69, 74, 69, 92, 110, 77, 64, 58, 72, 69, 74, 66, 70, 49, 50, 64, 71, 59, 72, 66, 67, 66, 60, 50, 60, 70, 77, 45, 66, 63, 64, 68, 75, 58, 63, 62, 57, 67, 89, 77, 69, 63, 65, 60, 78, 115, 104, 71, 68, 63, 61, 65, 54, 46, 90, 67, 66, 64, 60, 63, 48, 65, 78, 58, 58, 59, 55, 82, 80, 52, 71, 80, 94, 60, 57, 81, 65, 73, 56, 60, 79, 58, 88, 83, 61, 66, 62, 70, 62, 72, 57, 60, 53, 64, 51, 66, 48, 57, 86, 67, 46, 64, 69, 57, 77, 104, 101, 61, 66, 68, 54, 71, 52, 49, 52, 74, 84, 71, 64, 56, 87, 57, 62, 77, 59, 56, 73, 71, 58, 53, 96, 87, 76, 66, 64, 78, 54, 55, 87, 73, 65, 64, 56, 58, 61, 60, 64, 66, 72, 46, 77, 70, 47, 46, 65, 56, 77, 69, 68, 73, 58, 70, 60, 61, 85, 79, 65, 88, 73, 82, 114, 68, 68, 64, 67, 65, 56, 64, 65, 70, 69, 70, 79, 60, 69, 58, 45, 62, 87, 69, 73, 60, 88, 41, 60, 65, 51, 56, 76, 75, 64, 65, 60, 52, 76, 80, 61, 116, 63, 55, 71, 79, 64, 58, 64, 65, 50, 63, 57, 76, 104, 57, 48, 71, 75, 73, 57, 68, 60, 51, 44, 57, 54, 66, 67, 54, 63, 61, 67, 74, 51, 58, 65, 68, 68, 65, 64, 87, 59, 80, 42, 62, 58, 82, 60, 63, 58, 62, 83, 86, 69, 68, 85, 85, 65, 70, 66, 72, 31, 52, 63, 86, 90, 89, 53, 94, 64, 74, 44, 69, 42, 63, 53, 72, 62, 71, 70, 75, 52, 60, 68, 64, 68, 54, 82, 72, 60, 63, 52, 62, 64, 65, 54, 64, 71, 68, 65, 101, 65, 64, 55, 59, 62, 67, 64, 81, 46, 82, 57, 62, 54, 73, 63, 66, 51, 61, 57, 80, 69, 53, 71, 78, 60, 62, 63, 79, 77, 67, 73, 54, 104, 41, 58, 92, 98, 50, 61, 56, 58, 87, 97, 57, 52, 63, 64, 56, 71, 79, 52, 72, 41, 53, 56, 64, 61, 87, 62, 76, 80, 70, 70, 58, 63, 87, 54, 48, 59, 64, 110, 57, 62, 53, 62, 54, 62, 71, 60, 61, 78, 70, 63, 72, 74, 51, 64, 66, 54, 72, 50, 54, 67, 66, 68, 44, 59, 88, 58, 85, 118, 49, 61, 62, 63, 66, 50, 57, 53, 52, 78, 49, 69, 59, 62, 54, 72, 83, 60, 80, 128, 61, 56, 70, 62, 73, 53, 66, 61, 45, 65, 68, 64, 55, 51, 58, 49, 56, 66, 54, 62, 45, 64, 67, 129, 47, 91, 64, 60, 74, 62, 57, 68, 60, 50, 52, 107, 72, 66, 49, 69, 59, 50, 69, 91, 101, 41, 102, 99, 76, 57, 62, 69, 49, 72, 77, 82, 62, 66, 68, 123, 80, 55, 78, 69, 66, 60, 67, 61, 63, 58, 58, 63, 84, 61, 60, 88, 48, 59, 75, 156, 65, 46, 60, 65, 47, 70, 83, 67, 61, 102, 72, 81, 51, 123, 54, 68, 67, 66, 58, 57, 55, 63, 61, 56, 59, 65, 71, 66, 70, 74, 66, 70, 71, 76, 60, 58, 52, 60, 77, 70, 46, 64, 63, 62, 103, 69, 77, 71, 65, 52, 42, 67, 51, 35, 66, 56, 66, 49, 55, 66, 57, 55, 64, 80, 60, 51, 51, 79, 69, 78, 76, 57, 49, 66, 51, 65, 59, 61, 67, 61, 77, 87, 53, 65, 55, 68, 59, 54, 69, 58, 59, 58, 99, 58, 58, 65, 76, 68, 58, 60, 56, 57, 54, 68, 77, 55, 95, 55, 51, 71, 67, 57, 54, 69, 59, 83, 70, 48, 68, 88, 77, 59, 70, 51, 61, 67, 65, 60, 82, 56, 56, 88, 54, 52, 69, 56, 50, 77, 64, 84, 48, 70, 50, 68, 70, 56, 68, 56, 83, 90, 60, 97, 68, 83, 72, 111, 71, 53, 58, 59, 69, 69, 54, 84, 69, 57, 54, 43, 101, 65, 97, 97, 78, 58, 60, 62, 63, 64, 55, 51, 58, 82, 73, 61, 70, 71, 46, 56, 50, 45, 50, 78, 45, 68, 59, 48, 74, 66, 63, 67, 70, 45, 76, 61, 61, 55, 74, 53, 55, 81, 77, 79, 71, 69, 60, 78, 57, 64, 62, 78, 47, 72, 125, 74, 75, 71, 50, 62, 97, 49, 68, 63, 83, 75, 63, 56, 53, 84, 90, 71, 55, 79, 53, 68, 73, 73, 69, 74, 61, 74, 52, 63, 53, 61, 69, 49, 65, 66, 51, 46, 49, 71, 68, 50, 65, 68, 72, 52, 65, 66, 96, 69, 71, 71, 53, 106, 63, 61, 64, 54, 58, 64, 55, 52, 59, 57, 62, 52, 80, 68, 42, 49, 59, 65, 39, 74, 78, 68, 60, 56, 47, 89, 69, 87, 79, 69, 56, 52, 84, 76, 82, 49, 65, 102, 64, 55, 64, 108, 99, 68, 62, 48, 58, 76, 62, 96, 57, 79, 70, 56, 74, 80, 54, 83, 67, 69, 64, 69, 73, 65, 53, 100, 74, 71, 78, 109, 84, 54, 46, 81, 72, 70, 62, 59, 69, 59, 80, 74, 77, 68, 69, 52, 75, 67, 69, 59, 58, 82, 67, 48, 77, 49, 69, 69, 56, 65, 105, 61, 65, 69, 48, 85, 76, 77, 49, 54, 60, 67, 106, 68, 79, 45, 69, 65, 55, 57, 76, 82, 66, 100, 63, 69, 76, 63, 63, 58, 65, 53, 54, 60, 64, 64, 57, 115, 64, 68, 73, 51, 89, 57, 107, 92, 52, 46, 45, 51, 59, 62, 59, 81, 64, 68, 92, 64, 56, 69, 67, 64, 56, 58, 53, 62, 61, 63, 65, 63, 63, 89, 54, 65, 65, 54, 55, 65, 69, 60, 66, 63, 46, 64, 52, 75, 49, 51, 83, 76, 51, 62, 53, 69, 42, 68, 68, 48, 80, 70, 57, 105, 58, 58, 69, 53, 69, 56, 76, 71, 47, 69, 58, 77, 53, 68, 74, 41, 77, 64, 63, 63, 47, 65, 62, 63, 103, 105, 59, 57, 60, 80, 68, 73, 70, 52, 54, 112, 55, 72, 79, 61, 48, 77, 51, 71, 64, 66, 52, 48, 42, 72, 67, 68, 62, 65, 66, 59, 64, 52, 76, 80, 49, 51, 61, 66, 51, 56, 56, 52, 57, 55, 58, 53, 80, 71, 58, 57, 64, 73, 67, 50, 55, 46, 59, 69, 58, 63, 62, 56, 65, 87, 62, 72, 87, 73, 56, 70, 58, 63, 70, 66, 64, 55, 56, 68, 76, 113, 49, 88, 57, 49, 66, 48, 60, 59, 60, 68, 57, 61, 68, 54, 57, 68, 57, 70, 73, 48, 58, 61, 53, 53, 82, 59, 65, 58, 77, 66, 82, 113, 61, 66, 70, 51, 46, 54, 59, 69, 84, 74, 72, 83, 60, 89, 76, 54, 66, 59, 65, 57, 68, 61, 55, 57, 63, 62, 49, 80, 68, 63, 71, 69, 60, 62, 89, 56, 74, 61, 67, 73, 50, 58, 65, 52, 66, 65, 58, 51, 63, 56, 48, 61, 55, 61, 88, 62, 55, 81, 61, 83, 67, 82, 72, 66, 67, 60, 54, 86, 103, 69, 73, 78, 54, 61, 80, 58, 62, 54, 56, 52, 70, 86, 54, 63, 61, 74, 59, 66, 90, 100, 71, 63, 78, 51, 55, 53, 58, 59, 56, 64, 49, 73, 60, 82, 56, 93, 52, 53, 70, 54, 90, 88, 52, 68, 63, 75, 68, 120, 79, 75, 80, 74, 66, 69, 62, 73, 53, 64, 70, 61, 84, 62, 65, 61, 55, 73, 72, 60, 53, 84, 46, 50, 52, 54, 62, 66, 70, 61, 64, 69, 67, 83, 53, 64, 55, 73, 68, 53, 59, 59, 67, 63, 55, 60, 77, 46, 71, 41, 78, 75, 67, 70, 61, 65, 71, 72, 62, 55, 51, 75, 69, 84, 49, 67, 57, 83, 53, 67, 61, 61, 60, 72, 71, 54, 59, 72, 46, 33, 71, 76, 73, 70, 74, 53, 96, 58, 76, 60, 61, 65, 50, 62, 94, 60, 56, 73, 75, 55, 87, 77, 75, 59, 53, 68, 72, 62, 67, 87, 74, 71, 73, 54, 77, 58, 89, 60, 67, 45, 69, 67, 65, 56, 71, 70, 62, 73, 58, 87, 60, 79, 62, 63, 68, 64, 67, 56, 86, 57, 68, 79, 79, 50, 68, 60, 70, 55, 67, 56, 63, 49, 76, 62, 59, 64, 42, 64, 69, 89, 64, 78, 85, 59, 50, 76, 71, 64, 59, 75, 76, 81, 57, 54, 58, 59, 77, 71, 68, 62, 64, 58, 63, 96, 68, 57, 50, 70, 61, 61, 64, 70, 57, 60, 64, 69, 47, 62, 45, 71, 63, 64, 62, 69, 56, 68, 43, 60, 66, 47, 60, 69, 57, 53, 73, 68, 61, 58, 54, 107, 73, 116, 45, 87, 72, 60, 64, 58, 66, 67, 67, 57, 74, 61, 65, 65, 62, 57, 59, 109, 56, 68, 78, 64, 58, 72, 55, 63, 75, 60, 61, 81, 57, 59, 52, 73, 90, 54, 57, 41, 79, 72, 55, 68, 91, 76, 53, 61, 75, 80, 65, 76, 65, 84, 67, 68, 51, 59, 79, 72, 72, 78, 68, 75, 73, 67, 75, 53, 61, 85, 72, 70, 55, 55, 60, 68, 71, 63, 66, 72, 52, 67, 53, 52, 74, 50, 68, 72, 61, 78, 50, 58, 68, 78, 46, 106, 60, 45, 66, 64, 60, 71, 59, 54, 53, 73, 86, 78, 83, 57, 59, 70, 106, 57, 54, 64, 72, 70, 65, 52, 64, 52, 55, 61, 96, 75, 50, 70, 87, 56, 57, 60, 50, 82, 36, 58, 57, 69, 88, 60, 62, 58, 57, 57, 73, 65, 55, 55, 61, 78, 58, 58, 60, 64, 83, 74, 84, 85, 75, 87, 58, 57, 47, 57, 64, 66, 54, 63, 73, 56, 75, 53, 64, 49, 66, 72, 56, 73, 61, 73, 51, 63, 64, 54, 72, 56, 67, 48, 54, 63, 44, 48, 68, 58, 52, 75, 53, 55, 73, 68, 52, 60, 74, 71, 63, 47, 52, 82, 63, 51, 63, 65, 60, 56, 65, 59, 58, 66, 64, 55, 67, 68, 88, 65, 64, 71, 62, 56, 50, 80, 63, 64, 72, 69, 66, 61, 41, 72, 84, 47, 68, 81, 66, 57, 61, 59, 64, 55, 57, 79, 57, 56, 60, 63, 62, 43, 67, 63, 72, 71, 51, 51, 55, 62, 57, 70, 70, 58, 66, 60, 53, 66, 82, 68, 64, 81, 60, 69, 66, 80, 54, 80, 83, 53, 68, 65, 58, 69, 60, 53, 64, 69, 51, 70, 50, 62, 39, 58, 62, 68, 74, 66, 70, 93, 75, 52, 80, 63, 53, 58, 65, 70, 55, 69, 101, 72, 76, 67, 54, 62, 60, 61, 50, 77, 72, 66, 58, 53, 51, 47, 58, 45, 57, 75, 72, 72, 70, 58, 55, 61, 68, 77, 67, 59, 58, 61, 85, 56, 70, 74, 53, 74, 59, 63, 53, 86, 55, 130, 78, 58, 54, 72, 67, 76, 65, 69, 46, 63, 82, 71, 64, 60, 73, 59, 59, 109, 53, 61, 51, 59, 78, 65, 54, 65, 69, 53, 62, 71, 48, 54, 73, 64, 57, 68, 96, 87, 59, 61, 59, 61, 36, 59, 64, 55, 59, 64, 98, 59, 53, 62, 65, 80, 51, 62, 55, 63, 62, 77, 53, 88, 69, 58, 63, 98, 70, 68, 49, 81, 77, 49, 68, 71, 67, 64, 60, 55, 59, 63, 69, 66, 70, 71, 131, 62, 70, 72, 70, 63, 74, 74, 69, 62, 87, 60, 28, 66, 83, 58, 66, 65, 71, 68, 59, 72, 63, 86, 76, 81, 90, 65, 62, 53, 65, 41, 70, 63, 75, 61, 100, 58, 77, 63, 64, 69, 61, 82, 76, 65, 81, 70, 94, 45, 66, 74, 80, 55, 89, 57, 62, 78, 88, 69, 59, 42, 75, 67, 82, 49, 77, 82, 63, 50, 52, 71, 66, 48, 61, 85, 55, 70, 81, 67, 58, 59, 45, 65, 52, 62, 63, 57, 57, 45, 90, 61, 59, 66, 64, 51, 63, 71, 60, 75, 49, 78, 57, 70, 55, 78, 61, 65, 61, 58, 56, 59, 67, 107, 76, 69, 103, 89, 67, 80, 80, 68, 52, 59, 66, 88, 79, 67, 56, 66, 52, 56, 53, 70, 64, 89, 84, 123, 62, 70, 77, 62, 61, 55, 54, 68, 64, 71, 48, 68, 72, 75, 56, 50, 99, 105, 65, 56, 62, 91, 78, 69, 64, 74, 61, 75, 61, 62, 64, 50, 68, 44, 48, 54, 59, 55, 74, 55, 48, 75, 79, 72, 48, 50, 56, 93, 78, 44, 62, 74, 38, 56, 48, 77, 68, 59, 73, 62, 59, 92, 79, 73, 87, 80, 60, 65, 63, 80, 44, 58, 55, 61, 43, 52, 58, 89, 68, 44, 54, 48, 56, 70, 78, 73, 66, 68, 52, 73, 88, 65, 59, 60, 76, 75, 67, 86, 49, 63, 69, 58, 57, 94, 83, 66, 56, 62, 60, 62, 42, 76, 70, 50, 59, 61, 34, 72, 80, 79, 84, 60, 54, 46, 61, 69, 71, 60, 71, 98, 64, 55, 50, 73, 57, 87, 86, 53, 72, 55, 81, 62, 75, 103, 65, 55, 57, 56, 41, 53, 67, 88, 54, 74, 93, 71, 57, 76, 68, 107, 75, 42, 66, 66, 68, 54, 68, 73, 63, 62, 54, 94, 70, 93, 78, 63, 85, 51, 68, 53, 83, 60, 64, 87, 51, 76, 75, 45, 74, 76, 61, 82, 55, 32, 67, 81, 52, 39, 67, 70, 74, 74, 59, 59, 91, 60, 62, 63, 67, 37, 71, 73, 52, 53, 70, 78, 75, 62, 69, 79, 54, 51, 63, 67, 59, 73, 75, 84, 89, 52, 80, 93, 60, 94, 61, 60, 58, 73, 62, 55, 76, 44, 77, 71, 69, 80, 53, 60, 72, 61, 67, 72, 83, 85, 68, 71, 67, 77, 72, 63, 63, 59, 73, 52, 62, 58, 74, 112, 76, 69, 54, 62, 95, 88, 50, 60, 58, 57, 86, 57, 84, 79, 89, 74, 68, 85, 63, 66, 76, 80, 60, 52, 84, 65, 76, 51, 70, 58, 73, 64, 55, 74, 71, 65, 51, 68, 63, 58, 66, 48, 47, 45, 55, 41, 50, 66, 68, 65, 50, 74, 64, 126, 82, 54, 53, 52, 70, 61, 70, 58, 53, 62, 77, 68, 60, 70, 58, 50, 60, 70, 75, 61, 81, 67, 60, 79, 43, 53, 82, 89, 63, 95, 56, 53, 84, 46, 61, 66, 67, 50, 64, 70, 94, 57, 47, 58, 55, 53, 61, 53, 71, 45, 57, 65, 57, 52, 66, 46, 70, 77, 45, 62, 53, 96, 70, 65, 64, 71, 51, 52, 87, 62, 64, 72, 56, 52, 63, 73, 59, 54, 68, 64, 58, 26, 72, 49, 114, 68, 41, 74, 54, 101, 65, 66, 86, 51, 105, 60, 60, 58, 104, 56, 73, 74, 65, 66, 69, 82, 56, 69, 57, 55, 110, 70, 83, 55, 67, 56, 68, 80, 95, 57, 105, 74, 42, 66, 57, 67, 70, 72, 79, 89, 71, 83, 54, 50, 87, 63, 71, 64, 50, 84, 69, 74, 59, 57, 75, 73, 75, 44, 54, 83, 66, 66, 53, 61, 102, 71, 76, 63, 64, 77, 46, 51, 45, 58, 86, 58, 46, 65, 67, 79, 61, 66, 75, 82, 64, 51, 55, 65, 62, 78, 59, 75, 71, 89, 68, 86, 49, 84, 66, 61, 75, 59, 63, 62, 82, 86, 69, 52, 119, 62, 84, 44, 60, 44, 66, 63, 65, 82, 72, 52, 62, 76, 67, 53, 57, 69, 89, 44, 56, 49, 58, 56, 73, 54, 59, 59, 59, 75, 51, 84, 43, 57, 55, 73, 73, 51, 66, 77, 65, 76, 44, 45, 81, 59, 68, 97, 70, 59, 81, 57, 70, 68, 77, 72, 71, 71, 84, 88, 47, 62, 63, 61, 52, 78, 71, 54, 86, 68, 49, 59, 63, 61, 52, 59, 58, 37, 59, 81, 50, 91, 72, 68, 70, 55, 73, 73, 55, 46, 73, 51, 71, 86, 106, 60, 46, 49, 68, 61, 105, 64, 96, 62, 69, 62, 65, 86, 64, 51, 66, 63, 63, 53, 61, 98, 107, 68, 59, 87, 58, 65, 43, 66, 96, 57, 78, 68, 51, 48, 65, 62, 74, 62, 68, 84, 59, 73, 67, 78, 114, 45, 60, 75, 62, 102, 64, 71, 71, 80, 42, 67, 103, 64, 81, 61, 92, 58, 54, 77, 63, 50, 70, 42, 55, 58, 52, 68, 68, 55, 59, 59, 51, 82, 56, 71, 45, 60, 62, 89, 103, 57, 76, 63, 68, 112, 57, 72, 57, 94, 43, 71, 44, 66, 62, 55, 50, 69, 64, 56, 48, 49, 62, 48, 91, 52, 52, 59, 57, 86, 62, 132, 74, 63, 59, 76, 50, 78, 53, 62, 53, 58, 77, 56, 63, 54, 70, 69, 73, 54, 42, 53, 58, 72, 54, 69, 59, 62, 57, 84, 70, 84, 89, 60, 61, 68, 55, 95, 78, 58, 60, 78, 50, 88, 56, 58, 84, 66, 62, 69, 110, 51, 90, 49, 47, 53, 53, 61, 63, 57, 54, 69, 58, 46, 65, 49, 74, 55, 60, 64, 56, 59, 72, 63, 66, 57, 61, 81, 77, 55, 58, 55, 73, 50, 55, 65, 52, 52, 95, 111, 73, 88, 98, 68, 61, 51, 53, 59, 89, 67, 53, 44, 54, 69, 45, 68, 62, 54, 62, 75, 57, 58, 87, 62, 49, 64, 62, 104, 59, 66, 62, 59, 42, 47, 59, 65, 61, 57, 54, 80, 98, 73, 67, 92, 53, 76, 95, 87, 62, 71, 58, 65, 67, 87, 73, 57, 59, 64, 79, 70, 51, 67, 43, 53, 67, 115, 53, 62, 71, 50, 94, 57, 83, 39, 84, 79, 75, 73, 66, 84, 70, 66, 85, 56, 55, 74, 107, 64, 76, 43, 57, 57, 51, 66, 96, 58, 85, 46, 81, 67, 46, 61, 74, 51, 82, 54, 49, 60, 66, 41, 75, 71, 64, 41, 75, 86, 56, 58, 73, 63, 95, 53, 81, 76, 97, 80, 52, 65, 83, 67, 54, 63, 82, 61, 45, 77, 65, 56, 75, 136, 85, 58, 55, 75, 48, 62, 84, 74, 63, 76, 73, 89, 75, 75, 70, 41, 62, 71, 44, 63, 63, 55, 59, 48, 79, 53, 69, 59, 61, 90, 69, 63, 74, 74, 60, 48, 76, 52, 66, 47, 67, 83, 58, 35, 59, 88, 62, 69, 63, 55, 55, 95, 54, 65, 58, 66, 96, 42, 60, 53, 60, 58, 76, 92, 69, 102, 75, 83, 59, 77, 71, 70, 50, 48, 72, 73, 71, 59, 57, 89, 50, 87, 51, 60, 55, 72, 78, 58, 62, 102, 39, 50, 65, 79, 101, 61, 60, 57, 58, 58, 51, 63, 53, 62, 78, 71, 87, 64, 102, 73, 62, 70, 90, 94, 74, 46, 41, 60, 36, 61, 65, 56, 61, 65, 54, 60, 60, 80, 53, 75, 64, 107, 69, 52, 66, 55, 76, 75, 60, 48, 66, 72, 54, 83, 58, 43, 67, 70, 49, 62, 50, 71, 57, 66, 52, 54, 47, 43, 84, 71, 69, 62, 55, 50, 62, 45, 74, 55, 49, 66, 59, 58, 60, 63, 84, 54, 45, 64, 76, 41, 65, 97, 77, 76, 54, 70, 54, 70, 81, 54, 62, 55, 61, 55, 65, 51, 73, 58, 73, 49, 72, 74, 90, 85, 75, 51, 85, 59, 92, 63, 104, 50, 67, 71, 80, 71, 64, 61, 85, 68, 41, 64, 116, 62, 49, 55, 69, 69, 73, 60, 59, 60, 58, 135, 90, 78, 79, 79, 60, 66, 94, 64, 57, 74, 74, 69, 77, 75, 99, 70, 76, 57, 58, 77, 80, 61, 57, 71, 93, 61, 68, 68, 73, 51, 65, 67, 61, 98, 90, 59, 61, 64, 58, 42, 53, 65, 72, 57, 67, 71, 66, 56, 69, 77, 58, 34, 59, 77, 69, 78, 77, 52, 57, 87, 57, 79, 84, 82, 63, 57, 73, 114, 92, 45, 69, 63, 53, 57, 104, 65, 93, 53, 67, 66, 65, 62, 68, 48, 70, 54, 74, 66, 44, 76, 57, 68, 80, 70, 62, 75, 64, 49, 64, 64, 67, 59, 84, 54, 62, 58, 215, 90, 73, 71, 72, 55, 57, 45, 67, 97, 88, 68, 77, 69, 102, 65, 68, 50, 57, 68, 64, 74, 75, 59, 64, 72, 47, 89, 60, 81, 53, 63, 62, 66, 58, 54, 65, 85, 71, 75, 53, 91, 61, 58, 67, 66, 63, 51, 64, 59, 71, 69, 68, 54, 67, 82, 69, 91, 71, 62, 51, 71, 64, 76, 55, 67, 67, 59, 43, 42, 62, 78, 70, 47, 54, 63, 56, 55, 74, 56, 54, 67, 59, 47, 70, 54, 47, 60, 53, 54, 69, 70, 79, 71, 69, 61, 71, 79, 84, 100, 73, 58, 62, 82, 54, 85, 59, 69, 75, 66, 86, 66, 65, 63, 55, 47, 51, 73, 68, 51, 80, 56, 66, 92, 88, 63, 67, 64, 81, 65, 57, 78, 57, 111, 57, 70, 73, 78, 57, 68, 63, 49, 54, 70, 51, 56, 83, 77, 76, 80, 66, 46, 71, 80, 63, 69, 65, 55, 72, 59, 59, 81, 68, 90, 60, 70, 68, 74, 61, 58, 106, 80, 73, 70, 46, 60, 64, 61, 64, 66, 58, 43, 43, 71, 80, 72, 80, 65, 61, 52, 71, 52, 82, 76, 87, 64, 61, 73, 88, 76, 56, 63, 77, 71, 60, 52, 78, 97, 79, 45, 63, 48, 60, 63, 55, 62, 47, 83, 64, 64, 54, 79, 74, 65, 59, 59, 80, 80, 85, 61, 87, 85, 51, 62, 53, 78, 60, 85, 64, 60, 61, 67, 72, 52, 87, 58, 75, 86, 67, 58, 43, 80, 56, 69, 46, 104, 73, 48, 55, 63, 46, 67, 77, 76, 79, 62, 74, 79, 94, 56, 69, 41, 84, 51, 75, 56, 76, 67, 63, 69, 55, 67, 63, 83, 60, 68, 47, 49, 52, 65, 75, 68, 70, 92, 71, 61, 63, 83, 43, 59, 48, 74, 64, 72, 51, 54, 62, 97, 57, 53, 62, 79, 68, 70, 73, 47, 66, 86, 57, 71, 52, 81, 70, 91, 58, 54, 71, 78, 58, 42, 49, 68, 61, 61, 66, 67, 91, 51, 60, 92, 65, 94, 78, 61, 54, 53, 47, 85, 60, 93, 88, 87, 63, 67, 58, 55, 87, 51, 54, 71, 78, 53, 70, 97, 51, 58, 46, 76, 62, 64, 106, 47, 53, 80, 62, 60, 58, 71, 61, 48, 48, 51, 47, 72, 85, 80, 56, 61, 94, 63, 55, 59, 36, 58, 79, 49, 60, 62, 44, 59, 73, 53, 67, 53, 93, 73, 52, 62, 88, 89, 80, 79, 73, 52, 68, 63, 70, 80, 68, 70, 66, 67, 45, 53, 64, 54, 63, 52, 59, 72, 68, 58, 118, 56, 64, 47, 75, 55, 49, 41, 64, 67, 48, 77, 80, 77, 76, 54, 69, 66, 74, 74, 66, 45, 59, 75, 59, 46, 94, 80, 66, 52, 66, 65, 55, 67, 63, 72, 75, 81, 69, 86, 56, 46, 62, 66, 61, 135, 100, 90, 70, 57, 91, 79, 76, 70, 63, 72, 48, 72, 69, 154, 63, 76, 61, 61, 60, 39, 50, 52, 73, 83, 52, 69, 75, 70, 77, 65, 51, 40, 57, 69, 101, 52, 78, 70, 68, 59, 54, 87, 47, 59, 72, 68, 57, 103, 36, 67, 77, 64, 67, 55, 55, 70, 72, 59, 70, 68, 42, 73, 65, 61, 57, 74, 94, 60, 81, 110, 94, 47, 62, 34, 59, 69, 74, 81, 74, 81, 60, 75, 61, 59, 80, 61, 61, 63, 71, 76, 56, 60, 73, 47, 70, 69, 89, 74, 68, 39, 53, 40, 75, 88, 61, 55, 57, 77, 54, 64, 59, 72, 69, 135, 64, 87, 56, 43, 52, 52, 78, 70, 87, 75, 64, 47, 53, 70, 72, 66, 42, 70, 60, 72, 54, 56, 73, 51, 75, 55, 81, 68, 64, 59, 72, 66, 54, 65, 83, 78, 58, 71, 43, 60, 97, 60, 74, 70, 67, 72, 66, 52, 68, 84, 59, 61, 65, 73, 54, 72, 47, 92, 82, 75, 82, 78, 99, 59, 73, 60, 66, 67, 47, 51, 73, 62, 61, 77, 60, 72, 61, 48, 60, 59, 74, 71, 61, 47, 61, 42, 28, 59, 79, 65, 55, 80, 59, 82, 39, 73, 57, 53, 65, 78, 61, 65, 78, 35, 55, 79, 78, 95, 54, 61, 62, 98, 75, 66, 80, 70, 53, 57, 75, 83, 87, 61, 76, 85, 65, 70, 50, 51, 80, 53, 61, 67, 73, 61, 74, 80, 67, 82, 47, 74, 62, 53, 61, 63, 59, 70, 64, 45, 56, 76, 55, 53, 69, 77, 53, 70, 54, 62, 54, 57, 55, 75, 67, 64, 57, 91, 75, 69, 74, 69, 56, 59, 63, 75, 54, 55, 49, 57, 57, 57, 52, 65, 66, 51, 44, 61, 50, 59, 43, 49, 68, 61, 42, 63, 64, 46, 54, 62, 41, 66, 62, 62, 50, 81, 62, 84, 91, 55, 49, 64, 66, 58, 51, 65, 64, 60, 74, 54, 63, 66, 76, 45, 66, 79, 74, 43, 51, 85, 59, 65, 63, 54, 66, 50, 67, 83, 59, 90, 53, 46, 57, 60, 60, 58, 75, 66, 60, 58, 57, 61, 54, 90, 71, 68, 90, 84, 70, 64, 76, 63, 67, 66, 61, 73, 76, 65, 55, 97, 83, 57, 79, 86, 56, 55, 70, 55, 75, 62, 69, 82, 54, 57, 48, 52, 78, 63, 49, 63, 66, 76, 79, 64, 69, 54, 66, 72, 63, 76, 104, 69, 75, 74, 75, 63, 42, 72, 69, 58, 91, 82, 72, 78, 71, 87, 65, 77, 66, 105, 56, 79, 70, 66, 53, 64, 63, 69, 67, 53, 58, 109, 78, 62, 48, 63, 50, 68, 58, 65, 60, 61, 56, 69, 61, 59, 95, 75, 61, 61, 69, 71, 56, 78, 83, 55, 44, 56, 53, 50, 66, 70, 65, 77, 59, 48, 56, 79, 73, 59, 50, 84, 66, 70, 46, 68, 64, 55, 60, 69, 55, 95, 62, 68, 61, 54, 54, 95, 63, 71, 63, 78, 59, 72, 58, 113, 104, 71, 46, 75, 68, 73, 95, 60, 99, 61, 57, 49, 84, 58, 46, 55, 71, 79, 54, 53, 43, 58, 58, 52, 74, 65, 52, 66, 80, 60, 74, 65, 47, 62, 67, 54, 64, 60, 218, 79, 49, 70, 57, 54, 58, 76, 98, 68, 93, 43, 77, 60, 79, 57, 81, 60, 79, 57, 58, 69, 64, 44, 67, 76, 70, 86, 69, 66, 37, 50, 37, 76, 71, 68, 87, 53, 62, 79, 81, 66, 50, 59, 48, 60, 79, 63, 75, 60, 57, 50, 77, 77, 56, 57, 64, 98, 66, 72, 55, 47, 58, 67, 56, 53, 61, 52, 65, 44, 56, 67, 72, 55, 60, 59, 60, 56, 65, 58, 49, 45, 66, 63, 88, 77, 59, 60, 51, 60, 81, 68, 54, 48, 72, 42, 80, 86, 76, 80, 67, 62, 62, 95, 85, 69, 63, 59, 66, 54, 96, 80, 65, 66, 54, 71, 62, 78, 43, 65, 66, 42, 50, 82, 83, 54, 64, 62, 65, 66, 70, 76, 56, 89, 54, 74, 78, 77, 62, 70, 77, 64, 62, 72, 63, 50, 68, 44, 60, 62, 61, 53, 75, 71, 66, 52, 53, 78, 79, 70, 51, 82, 85, 74, 59, 58, 62, 84, 75, 56, 96, 74, 78, 71, 57, 60, 63, 61, 64, 51, 69, 75, 63, 54, 83, 66, 87, 65, 52, 60, 60, 54, 73, 72, 76, 75, 59, 49, 85, 58, 73, 76, 76, 84, 68, 64, 52, 51, 66, 52, 63, 80, 66, 69, 57, 53, 51, 58, 49, 59, 50, 65, 57, 61, 63, 55, 90, 69, 76, 54, 64, 72, 71, 65, 54, 67, 58, 81, 62, 85, 71, 71, 59, 53, 59, 96, 64, 80, 41, 56, 66, 67, 70, 52, 62, 98, 67, 82, 69, 57, 68, 81, 53, 73, 81, 51, 82, 76, 88, 42, 78, 35, 74, 51, 52, 72, 83, 41, 68, 50, 73, 61, 57, 86, 72, 43, 69, 63, 62, 64, 50, 64, 71, 86, 78, 79, 62, 73, 82, 70, 74, 67, 52, 72, 74, 63, 67, 76, 58, 75, 58, 59, 63, 51, 87, 58, 75, 98, 67, 61, 78, 46, 66, 83, 59, 44, 62, 45, 60, 64, 69, 49, 67, 73, 60, 70, 75, 58, 47, 84, 70, 62, 78, 47, 61, 65, 55, 67, 71, 84, 79, 81, 92, 76, 66, 59, 58, 75, 48, 56, 67, 58, 78, 97, 59, 55, 63, 61, 67, 61, 96, 63, 63, 82, 71, 49, 83, 67, 56, 41, 70, 54, 64, 61, 58, 65, 67, 56, 70, 79, 60, 56, 67, 61, 78, 71, 61, 52, 59, 58, 84, 54, 77, 61, 80, 69, 51, 72, 98, 68, 71, 75, 74, 48, 34, 70, 58, 75, 77, 64, 47, 72, 39, 57, 58, 56, 76, 64, 41, 73, 68, 49, 94, 75, 85, 69, 96, 75, 80, 66, 58, 71, 73, 73, 90, 48, 74, 78, 71, 62, 72, 58, 55, 64, 77, 66, 54, 56, 97, 72, 41, 71, 63, 54, 47, 89, 44, 88, 71, 80, 89, 62, 65, 49, 75, 59, 66, 135, 84, 70, 45, 82, 80, 59, 73, 68, 61, 51, 40, 61, 72, 167, 58, 37, 47, 56, 61, 49, 69, 80, 70, 97, 66, 68, 61, 70, 50, 69, 62, 72, 63, 62, 46, 68, 78, 77, 88, 62, 46, 88, 69, 63, 64, 69, 79, 110, 61, 56, 72, 58, 56, 55, 66, 67, 67, 56, 60, 69, 61, 52, 58, 64, 68, 67, 83, 45, 67, 92, 79, 73, 61, 52, 63, 70, 56, 65, 73, 65, 48, 71, 70, 64, 101, 48, 67, 60, 73, 80, 48, 65, 59, 57, 73, 48, 85, 80, 60, 73, 61, 61, 75, 89, 46, 54, 79, 82, 45, 79, 56, 64, 77, 116, 59, 70, 59, 62, 71, 72, 46, 59, 108, 72, 59, 49, 58, 74, 84, 58, 66, 66, 64, 59, 56, 66, 70, 59, 76, 72, 81, 73, 60, 29, 59, 87, 78, 66, 71, 68, 56, 68, 84, 72, 51, 55, 52, 53, 61, 57, 85, 45, 68, 60, 49, 78, 54, 62, 55, 54, 69, 88, 83, 62, 70, 63, 40, 56, 59, 50, 53, 54, 54, 81, 70, 39, 58, 53, 66, 75, 65, 54, 70, 72, 56, 81, 53, 63, 75, 41, 58, 71, 68, 71, 57, 78, 64, 90, 67, 74, 54, 63, 53, 88, 57, 75, 74, 37, 61, 50, 79, 93, 60, 61, 82, 81, 69, 92, 64, 108, 68, 55, 70, 103, 79, 47, 74, 74, 78, 65, 52, 44, 92, 62, 67, 64, 86, 83, 85, 48, 73, 72, 69, 75, 75, 57, 68, 86, 69, 71, 63, 58, 33, 84, 72, 82, 56, 85, 78, 61, 79, 57, 56, 49, 66, 79, 92, 46, 56, 91, 68, 52, 70, 80, 59, 60, 64, 77, 76, 50, 55, 59, 52, 50, 65, 56, 73, 57, 46, 42, 54, 48, 70, 57, 55, 73, 86, 89, 46, 61, 62, 48, 44, 81, 63, 48, 55, 83, 58, 84, 67, 73, 61, 66, 72, 78, 57, 65, 50, 61, 75, 67, 63, 59, 77, 61, 55, 64, 72, 58, 70, 95, 51, 51, 70, 58, 67, 70, 57, 77, 72, 96, 60, 59, 65, 50, 47, 76, 66, 50, 50, 61, 50, 68, 48, 73, 69, 71, 73, 82, 71, 51, 79, 96, 65, 48, 60, 79, 67, 70, 77, 90, 56, 51, 68, 80, 64, 72, 69, 53, 78, 77, 92, 71, 58, 64, 67, 46, 74, 60, 73, 49, 65, 65, 65, 59, 59, 75, 69, 82, 55, 65, 98, 65, 71, 54, 48, 50, 54, 51, 70, 49, 77, 74, 75, 66, 30, 82, 59, 79, 72, 104, 48, 70, 88, 75, 68, 72, 59, 75, 77, 45, 68, 52, 60, 55, 81, 68, 65, 64, 69, 56, 72, 68, 68, 74, 56, 75, 70, 68, 47, 48, 68, 72, 58, 76, 85, 59, 65, 76, 75, 67, 60, 69, 85, 72, 69, 63, 50, 64, 63, 70, 65, 65, 64, 55, 65, 55, 66, 73, 54, 108, 56, 58, 68, 52, 63, 56, 57, 80, 47, 73, 81, 69, 84, 66, 84, 65, 81, 86, 80, 77, 59, 59, 95, 90, 80, 116, 121, 53, 64, 46, 64, 47, 54, 42, 42, 55, 94, 72, 81, 57, 71, 56, 71, 76, 56, 71, 78, 50, 117, 70, 55, 52, 57, 60, 68, 59, 64, 77, 65, 73, 60, 63, 63, 60, 66, 80, 72, 52, 59, 100, 74, 51, 69, 78, 91, 77, 60, 56, 43, 72, 66, 63, 75, 55, 52, 62, 46, 93, 54, 80, 79, 65, 57, 69, 53, 55, 80, 57, 87, 62, 75, 67, 73, 64, 53, 54, 61, 65, 77, 72, 37, 50, 63, 80, 52, 98, 56, 73, 63, 86, 61, 59, 65, 74, 56, 46, 66, 45, 66, 75, 71, 78, 89, 70, 76, 72, 55, 82, 44, 62, 59, 51, 47, 110, 78, 57, 53, 69, 63, 74, 45, 59, 55, 80, 57, 48, 59, 71, 56, 69, 66, 59, 71, 59, 74, 74, 129, 65, 76, 71, 73, 60, 75, 58, 70, 90, 61, 77, 92, 88, 63, 53, 71, 68, 63, 65, 50, 61, 53, 113, 60, 58, 73, 77, 60, 55, 40, 70, 67, 62, 71, 114, 43, 54, 49, 68, 76, 58, 70, 56, 72, 67, 79, 45, 55, 85, 62, 48, 66, 64, 85, 66, 60, 81, 54, 47, 97, 66, 64, 60, 77, 74, 66, 75, 71, 77, 45, 58, 67, 50, 41, 41, 70, 79, 56, 78, 62, 72, 40, 56, 53, 88, 56, 100, 49, 77, 43, 67, 65, 74, 84, 65, 71, 72, 71, 53, 55, 67, 73, 78, 56, 49, 103, 41, 49, 77, 72, 52, 65, 75, 65, 91, 58, 67, 64, 84, 62, 62, 52, 54, 58, 77, 89, 59, 64, 62, 60, 57, 59, 64, 66, 56, 46, 74, 40, 43, 84, 59, 71, 74, 99, 60, 77, 77, 49, 84, 64, 55, 39, 52, 37, 65, 108, 99, 63, 82, 51, 54, 63, 65, 74, 50, 61, 67, 64, 56, 47, 61, 92, 63, 52, 46, 48, 52, 63, 84, 53, 72, 74, 70, 110, 73, 50, 64, 71, 63, 107, 73, 62, 57, 43, 61, 46, 55, 66, 79, 58, 80, 76, 77, 45, 112, 56, 47, 72, 55, 110, 61, 60, 44, 68, 62, 56, 81, 67, 63, 93, 62, 45, 69, 45, 58, 67, 113, 86, 73, 90, 100, 60, 63, 19, 64, 54, 54, 66, 54, 50, 62, 56, 88, 52, 63, 58, 69, 70, 64, 61, 77, 50, 67, 53, 74, 54, 91, 38, 68, 71, 64, 47, 46, 44, 67, 84, 62, 56, 70, 56, 60, 35, 55, 51, 70, 50, 96, 65, 51, 55, 41, 51, 74, 67, 39, 62, 92, 84, 46, 60, 50, 60, 38, 62, 54, 53, 62, 67, 79, 69, 68, 59, 61, 53, 57, 89, 69, 106, 74, 49, 54, 73, 47, 76, 57, 71, 58, 52, 45, 67, 79, 52, 39, 62, 73, 62, 45, 60, 59, 57, 63, 58, 41, 77, 96, 80, 57, 42, 53, 69, 67, 71, 77, 51, 76, 57, 99, 58, 48, 64, 51, 81, 55, 89, 78, 38, 41, 98, 48, 56, 65, 85, 57, 59, 59, 94, 72, 83, 57, 50, 56, 98, 65, 71, 75, 99, 81, 48, 70, 40, 64, 59, 55, 89, 58, 62, 50, 53, 109, 79, 47, 85, 68, 55, 46, 53, 57, 57, 44, 89, 54, 57, 59, 73, 62, 60, 60, 74, 54, 45, 108, 87, 77, 65, 70, 56, 56, 53, 68, 52, 42, 64, 70, 55, 58, 72, 53, 81, 62, 45, 58, 51, 56, 71, 86, 151, 67, 67, 78, 82, 53, 45, 87, 80, 82, 64, 81, 83, 52, 68, 80, 59, 42, 62, 54, 51, 67, 61, 41, 65, 49, 115, 63, 72, 86, 74, 61, 79, 77, 62, 86, 61, 44, 72, 75, 92, 42, 51, 75, 73, 57, 67, 60, 64, 50, 63, 57, 60, 79, 87, 77, 65, 53, 51, 42, 50, 48, 46, 66, 53, 57, 78, 55, 78, 68, 76, 69, 78, 62, 81, 63, 67, 64, 53, 57, 50, 50, 79, 49, 60, 60, 45, 63, 82, 76, 73, 62, 69, 56, 77, 90, 71, 80, 58, 63, 85, 59, 54, 76, 54, 77, 92, 64, 77, 69, 41, 62, 42, 73, 69, 71, 54, 66, 57, 39, 84, 57, 53, 103, 57, 92, 60, 78, 53, 65, 38, 48, 58, 60, 57, 67, 47, 43, 70, 68, 53, 73, 99, 46, 65, 84, 51, 79, 91, 76, 61, 79, 44, 51, 56, 67, 52, 57, 60, 79, 79, 85, 57, 55, 58, 68, 56, 116, 78, 63, 79, 74, 54, 76, 79, 62, 64, 72, 68, 65, 63, 54, 50, 56, 45, 65, 53, 92, 86, 61, 75, 59, 52, 67, 102, 78, 82, 72, 71, 55, 70, 55, 49, 56, 61, 71, 64, 38, 69, 74, 70, 61, 74, 59, 71, 55, 64, 62, 77, 69, 82, 48, 76, 78, 76, 83, 60, 58, 67, 103, 77, 46, 58, 93, 75, 70, 102, 86, 57, 67, 69, 68, 62, 81, 45, 59, 45, 49, 58, 72, 76, 66, 35, 48, 82, 49, 68, 72, 111, 58, 90, 56, 69, 52, 85, 58, 79, 76, 82, 74, 77, 46, 72, 42, 62, 51, 80, 61, 61, 56, 81, 54, 65, 61, 103, 89, 67, 67, 44, 59, 56, 62, 64, 81, 67, 63, 50, 76, 63, 51, 49, 70, 49, 64, 63, 44, 50, 82, 61, 76, 60, 42, 94, 69, 69, 102, 88, 44, 70, 67, 77, 67, 65, 54, 81, 93, 90, 82, 65, 60, 80, 98, 67, 20, 60, 47, 55, 83, 86, 69, 60, 67, 118, 85, 62, 66, 68, 90, 58, 73, 83, 68, 76, 58, 71, 59, 53, 84, 53, 83, 65, 74, 78, 84, 60, 74, 57, 65, 57, 44, 67, 62, 69, 56, 58, 53, 51, 53, 64, 45, 95, 70, 62, 60, 64, 82, 77, 66, 49, 63, 63, 72, 86, 65, 73, 82, 59, 78, 53, 68, 57, 80, 88, 72, 77, 58, 59, 72, 66, 67, 83, 56, 60, 64, 66, 61, 71, 63, 69, 56, 44, 52, 62, 58, 64, 72, 70, 57, 84, 53, 59, 50, 85, 62, 72, 55, 57, 73, 98, 81, 76, 80, 45, 70, 57, 64, 62, 59, 67, 60, 80, 54, 56, 57, 39, 54, 83, 55, 79, 61, 51, 75, 63, 64, 55, 54, 86, 72, 71, 75, 67, 58, 54, 76, 50, 65, 70, 50, 92, 75, 95, 53, 52, 62, 63, 65, 39, 57, 63, 53, 67, 62, 59, 73, 62, 38, 59, 67, 62, 63, 78, 63, 55, 88, 65, 69, 66, 63, 54, 49, 100, 60, 55, 48, 60, 57, 77, 53, 53, 60, 87, 58, 55, 67, 101, 66, 56, 65, 73, 60, 60, 86, 69, 57, 71, 79, 55, 72, 57, 65, 55, 84, 77, 74, 69, 39, 75, 55, 69, 58, 106, 84, 53, 84, 70, 102, 55, 58, 69, 81, 61, 66, 66, 47, 61, 65, 62, 60, 59, 55, 76, 78, 82, 51, 65, 55, 89, 73, 69, 70, 91, 58, 50, 60, 63, 58, 70, 56, 63, 42, 40, 61, 77, 73, 74, 79, 63, 73, 68, 72, 68, 109, 67, 52, 48, 83, 98, 63, 62, 52, 79, 63, 60, 67, 74, 62, 69, 42, 57, 55, 66, 63, 55, 62, 106, 64, 66, 61, 55, 81, 59, 40, 71, 49, 60, 56, 52, 75, 70, 72, 58, 61, 76, 85, 65, 57, 58, 45, 49, 60, 74, 87, 55, 74, 74, 44, 35, 63, 42, 120, 95, 84, 78, 57, 77, 68, 60, 64, 51, 74, 51, 58, 54, 105, 61, 89, 136, 46, 78, 60, 70, 73, 56, 55, 71, 53, 58, 51, 46, 74, 56, 70, 62, 72, 73, 51, 57, 46, 62, 54, 60, 69, 68, 91, 96, 65, 63, 79, 75, 55, 63, 59, 110, 62, 72, 43, 63, 86, 55, 70, 84, 69, 62, 91, 56, 63, 83, 72, 65, 63, 57, 67, 48, 68, 72, 84, 49, 71, 99, 77, 43, 68, 61, 58, 62, 61, 63, 66, 62, 101, 62, 87, 74, 62, 82, 68, 95, 60, 51, 73, 63, 62, 59, 75, 68, 66, 74, 47, 54, 53, 72, 71, 70, 99, 75, 70, 65, 61, 69, 61, 54, 75, 56, 67, 62, 61, 63, 67, 79, 70, 60, 62, 56, 53, 67, 72, 71, 77, 90, 78, 85, 57, 64, 51, 74, 72, 88, 55, 66, 63, 51, 71, 57, 76, 83, 60, 70, 62, 51, 61, 76, 48, 53, 68, 62, 63, 56, 67, 87, 70, 66, 69, 65, 60, 46, 36, 64, 59, 66, 56, 56, 71, 54, 71, 48, 55, 44, 60, 56, 74, 64, 48, 75, 69, 78, 72, 75, 60, 84, 64, 68, 47, 74, 89, 70, 60, 66, 84, 88, 70, 57, 53, 74, 53, 77, 67, 65, 41, 59, 31, 55, 92, 68, 66, 65, 73, 67, 68, 55, 38, 71, 85, 66, 78, 56, 56, 57, 72, 67, 73, 70, 65, 55, 59, 59, 52, 60, 67, 57, 69, 58, 50, 72, 67, 51, 67, 63, 59, 52, 66, 52, 78, 69, 53, 115, 97, 69, 63, 47, 96, 44, 69, 71, 89, 74, 74, 68, 82, 68, 73, 67, 67, 66, 75, 71, 60, 59, 71, 83, 79, 50, 72, 67, 73, 97, 74, 60, 53, 91, 72, 68, 102, 63, 81, 63, 58, 58, 55, 63, 84, 53, 57, 79, 55, 80, 48, 58, 53, 59, 79, 60, 75, 50, 58, 59, 68, 59, 49, 56, 94, 56, 73, 72, 75, 60, 75, 80, 74, 51, 58, 59, 39, 51, 62, 64, 68, 66, 58, 73, 56, 73, 74, 68, 87, 90, 89, 77, 62, 81, 64, 85, 63, 77, 73, 60, 75, 64, 86, 47, 90, 83, 88, 68, 73, 50, 67, 99, 60, 62, 46, 67, 56, 91, 63, 44, 48, 47, 59, 48, 79, 99, 67, 59, 78, 60, 72, 46, 71, 87, 75, 85, 46, 86, 49, 61, 64, 83, 50, 66, 95, 70, 57, 81, 65, 61, 60, 66, 71, 86, 66, 67, 58, 68, 65, 116, 69, 64, 66, 78, 67, 104, 58, 57, 36, 66, 57, 58, 43, 82, 75, 57, 69, 47, 78, 50, 57, 52, 67, 50, 58, 59, 70, 74, 66, 79, 68, 44, 78, 51, 54, 78, 53, 57, 63, 74, 62, 72, 54, 70, 76, 57, 96, 84, 47, 66, 63, 76, 70, 68, 64, 60, 44, 58, 75, 49, 60, 55, 61, 72, 53, 61, 59, 67, 65, 67, 63, 53, 69, 67, 55, 54, 81, 59, 44, 69, 57, 71, 49, 70, 61, 54, 65, 60, 46, 54, 71, 42, 76, 70, 61, 67, 65, 53, 73, 82, 66, 54, 88, 80, 56, 68, 65, 57, 77, 43, 53, 75, 55, 87, 66, 76, 46, 42, 61, 72, 68, 64, 72, 64, 71, 59, 68, 62, 62, 50, 53, 99, 48, 55, 55, 50, 47, 54, 57, 45, 83, 92, 65, 52, 66, 69, 47, 74, 48, 55, 77, 74, 93, 59, 94, 59, 48, 68, 53, 76, 54, 61, 47, 63, 82, 49, 85, 54, 55, 64, 49, 51, 61, 56, 57, 58, 56, 70, 56, 63, 75, 53, 48, 44, 68, 79, 97, 73, 73, 65, 69, 102, 69, 76, 63, 55, 58, 61, 76, 46, 62, 80, 52, 74, 59, 62, 34, 64, 52, 69, 61, 44, 85, 66, 66, 56, 66, 46, 57, 54, 55, 57, 59, 62, 58, 145, 69, 57, 67, 66, 63, 77, 67, 49, 60, 71, 70, 76, 94, 51, 67, 68, 70, 66, 73, 59, 86, 54, 81, 56, 68, 66, 61, 64, 64, 62, 57, 76, 74, 91, 58, 61, 57, 67, 61, 79, 57, 66, 62, 81, 57, 60, 63, 61, 55, 63, 67, 41, 155, 65, 53, 67, 56, 60, 65, 77, 62, 54, 56, 67, 41, 68, 55, 66, 73, 46, 63, 51, 76, 105, 66, 99, 67, 59, 91, 54, 48, 71, 63, 90, 65, 46, 37, 66, 84, 94, 55, 90, 51, 60, 56, 77, 97, 72, 84, 65, 71, 52, 72, 53, 50, 65, 69, 122, 68, 59, 70, 75, 72, 52, 52, 61, 59, 83, 79, 67, 75, 53, 54, 76, 70, 85, 59, 42, 51, 62, 49, 64, 72, 69, 57, 80, 73, 76, 52, 75, 78, 86, 76, 75, 61, 67, 62, 60, 79, 74, 72, 80, 55, 67, 66, 58, 77, 56, 79, 56, 34, 69, 64, 49, 62, 71, 71, 69, 96, 86, 73, 66, 50, 57, 87, 66, 51, 106, 57, 56, 77, 75, 78, 57, 58, 54, 79, 38, 114, 80, 62, 55, 71, 77, 77, 77, 65, 57, 83, 65, 51, 53, 52, 83, 75, 64, 108, 68, 59, 87, 41, 69, 45, 71, 70, 47, 100, 77, 72, 55, 66, 64, 95, 43, 75, 70, 57, 50, 72, 55, 60, 61, 81, 69, 56, 78, 66, 58, 62, 81, 121, 64, 63, 44, 62, 54, 72, 67, 84, 62, 75, 58, 81, 53, 39, 65, 121, 65, 101, 90, 53, 68, 59, 84, 73, 71, 60, 79, 60, 67, 82, 69, 73, 67, 57, 95, 62, 45, 63, 53, 47, 57, 45, 79, 87, 59, 43, 73, 68, 66, 51, 49, 56, 77, 47, 53, 52, 60, 56, 61, 60, 56, 79, 78, 36, 57, 78, 59, 51, 87, 50, 52, 83, 65, 47, 93, 64, 55, 54, 68, 61, 59, 93, 68, 59, 60, 61, 50, 66, 89, 49, 33, 117, 75, 59, 99, 86, 80, 39, 70, 56, 76, 86, 78, 54, 84, 56, 62, 83, 42, 68, 73, 73, 105, 67, 84, 95, 83, 71, 98, 77, 50, 64, 117, 40, 69, 40, 82, 69, 69, 67, 60, 72, 67, 52, 71, 66, 91, 70, 58, 66, 102, 60, 95, 64, 59, 50, 88, 48, 69, 58, 84, 94, 86, 54, 82, 57, 67, 108, 72, 58, 56, 57, 73, 61, 91, 89, 67, 79, 87, 57, 70, 65, 63, 43, 76, 50, 55, 73, 60, 47, 142, 65, 78, 71, 70, 42, 154, 85, 53, 91, 55, 58, 55, 61, 113, 76, 78, 54, 67, 75, 56, 59, 61, 35, 52, 83, 91, 91, 68, 71, 80, 59, 74, 41, 66, 63, 55, 50, 56, 56, 82, 67, 88, 97, 52, 71, 88, 52, 59, 82, 80, 70, 71, 50, 59, 56, 63, 84, 66, 68, 78, 50, 95, 58, 76, 45, 70, 40, 92, 52, 79, 41, 58, 65, 70, 83, 81, 86, 57, 63, 71, 52, 71, 65, 26, 50, 56, 58, 57, 81, 58, 53, 72, 61, 60, 74, 62, 28, 77, 51, 62, 62, 85, 51, 84, 88, 82, 51, 41, 67, 70, 54, 88, 77, 100, 71, 49, 46, 55, 40, 56, 102, 69, 57, 36, 79, 58, 87, 51, 72, 65, 54, 83, 81, 129, 32, 82, 68, 75, 69, 62, 64, 70, 45, 45, 90, 67, 63, 73, 71, 71, 52, 64, 63, 58, 63, 82, 72, 66, 69, 64, 63, 88, 60, 65, 56, 43, 52, 38, 60, 65, 61, 84, 66, 104, 87, 42, 89, 88, 88, 64, 45, 57, 51, 53, 77, 77, 52, 92, 49, 74, 66, 58, 123, 66, 72, 41, 64, 51, 69, 61, 57, 59, 61, 64, 47, 48, 66, 57, 72, 78, 80, 50, 58, 53, 62, 62, 74, 88, 50, 60, 66, 75, 77, 42, 79, 69, 52, 52, 63, 53, 45, 56, 84, 85, 60, 65, 38, 88, 73, 91, 40, 72, 47, 63, 54, 41, 72, 46, 62, 73, 57, 59, 58, 75, 66, 63, 64, 53, 70, 53, 49, 52, 66, 50, 59, 68, 50, 79, 51, 62, 63, 60, 92, 71, 87, 100, 70, 75, 57, 45, 63, 65, 78, 62, 58, 70, 64, 55, 59, 137, 66, 77, 82, 36, 52, 73, 70, 66, 89, 82, 63, 72, 59, 42, 66, 38, 52, 67, 76, 50, 62, 81, 70, 97, 80, 88, 76, 53, 63, 58, 58, 55, 48, 74, 57, 63, 58, 52, 77, 45, 57, 65, 73, 50, 70, 84, 91, 62, 52, 76, 44, 55, 106, 53, 66, 62, 86, 61, 64, 50, 107, 74, 55, 68, 63, 67, 99, 73, 61, 76, 50, 91, 45, 78, 77, 56, 71, 71, 57, 92, 55, 55, 84, 84, 84, 46, 58, 50, 50, 62, 58, 61, 60, 61, 89, 60, 44, 72, 63, 68, 61, 81, 57, 70, 70, 56, 59, 62, 56, 62, 49, 56, 82, 71, 98, 56, 79, 57, 46, 77, 75, 41, 55, 68, 60, 82, 78, 53, 69, 77, 64, 134, 64, 68, 67, 53, 65, 42, 58, 72, 84, 79, 71, 49, 62, 75, 54, 58, 88, 78, 59, 77, 70, 70, 65, 65, 60, 91, 56, 77, 94, 64, 71, 54, 58, 63, 97, 68, 57, 66, 69, 119, 68, 54, 52, 75, 61, 70, 51, 67, 52, 84, 55, 61, 87, 56, 54, 80, 80, 60, 68, 53, 48, 60, 67, 64, 49, 64, 99, 61, 65, 47, 47, 80, 51, 76, 59, 84, 53, 63, 60, 50, 78, 59, 57, 67, 87, 58, 83, 68, 44, 69, 65, 67, 46, 45, 96, 70, 43, 52, 71, 45, 56, 56, 50, 64, 87, 102, 63, 78, 65, 77, 49, 59, 77, 43, 99, 66, 59, 67, 55, 107, 54, 36, 75, 83, 38, 90, 44, 58, 65, 78, 82, 79, 50, 70, 56, 70, 113, 57, 54, 81, 64, 128, 86, 59, 72, 54, 77, 75, 71, 69, 54, 65, 93, 54, 77, 82, 58, 45, 48, 70, 64, 53, 76, 76, 62, 53, 63, 50, 81, 61, 72, 53, 74, 82, 57, 60, 55, 59, 77, 78, 79, 57, 65, 51, 62, 62, 84, 89, 81, 40, 81, 91, 64, 64, 59, 61, 81, 71, 62, 56, 88, 90, 80, 38, 57, 56, 75, 52, 76, 85, 124, 87, 56, 70, 56, 71, 70, 55, 82, 65, 38, 56, 59, 54, 53, 90, 63, 72, 54, 83, 64, 53, 55, 63, 46, 58, 45, 68, 55, 76, 40, 68, 58, 73, 77, 56, 78, 73, 63, 62, 59, 83, 103, 71, 78, 45, 56, 52, 126, 45, 65, 71, 82, 43, 40, 75, 44, 52, 49, 70, 79, 73, 61, 55, 64, 44, 60, 78, 54, 48, 89, 55, 60, 63, 59, 60, 47, 71, 61, 69, 65, 81, 50, 68, 67, 85, 140, 55, 51, 94, 77, 70, 91, 94, 79, 59, 62, 56, 90, 57, 96, 52, 67, 58, 82, 85, 79, 107, 66, 81, 57, 89, 87, 67, 85, 68, 66, 70, 92, 50, 59, 56, 72, 80, 56, 57, 74, 54, 91, 71, 61, 67, 72, 74, 63, 65, 62, 68, 59, 72, 62, 48, 74, 85, 94, 41, 73, 51, 64, 61, 71, 79, 92, 64, 59, 66, 91, 72, 85, 64, 56, 63, 89, 63, 61, 84, 66, 78, 72, 82, 61, 63, 77, 58, 79, 58, 52, 64, 88, 58, 66, 107, 58, 60, 67, 56, 77, 71, 57, 81, 63, 59, 76, 73, 67, 70, 80, 53, 72, 50, 80, 44, 58, 86, 55, 54, 53, 49, 57, 73, 73, 62, 84, 67, 62, 53, 92, 60, 107, 61, 54, 85, 55, 68, 60, 56, 51, 81, 62, 60, 63, 101, 58, 61, 60, 62, 78, 51, 65, 60, 52, 66, 55, 90, 65, 76, 65, 75, 62, 75, 79, 74, 72, 88, 65, 85, 59, 87, 62, 81, 70, 51, 52, 76, 69, 63, 63, 44, 61, 55, 56, 68, 66, 58, 59, 57, 66, 51, 71, 87, 37, 62, 63, 57, 56, 56, 61, 76, 64, 71, 123, 61, 55, 57, 66, 56, 79, 70, 91, 67, 63, 64, 72, 59, 84, 50, 60, 55, 61, 59, 63, 59, 56, 65, 50, 80, 53, 88, 84, 55, 74, 69, 49, 82, 56, 89, 63, 81, 79, 81, 73, 42, 48, 75, 64, 62, 80, 56, 67, 67, 59, 66, 55, 51, 72, 55, 62, 66, 83, 59, 58, 79, 51, 56, 57, 61, 78, 58, 62, 54, 60, 61, 54, 62, 83, 56, 51, 57, 45, 52, 57, 68, 70, 57, 75, 52, 68, 57, 61, 61, 40, 45, 44, 66, 68, 52, 67, 66, 81, 61, 60, 59, 64, 84, 51, 71, 70, 58, 67, 55, 72, 66, 63, 58, 65, 80, 63, 70, 70, 56, 67, 68, 58, 49, 107, 54, 83, 65, 58, 79, 94, 56, 66, 61, 76, 94, 67, 87, 62, 92, 73, 64, 62, 61, 55, 57, 57, 68, 53, 84, 59, 73, 70, 58, 66, 60, 72, 61, 51, 48, 79, 65, 56, 64, 65, 51, 82, 61, 81, 31, 63, 70, 59, 73, 98, 67, 50, 73, 58, 63, 65, 58, 48, 58, 67, 67, 57, 84, 70, 63, 56, 59, 42, 75, 45, 44, 105, 66, 62, 69, 64, 69, 52, 62, 88, 69, 61, 94, 34, 57, 40, 65, 69, 59, 51, 56, 97, 64, 61, 71, 64, 69, 40, 72, 61, 62, 65, 63, 68, 84, 61, 66, 83, 70, 72, 62, 114, 58, 54, 68, 54, 58, 64, 55, 49, 42, 60, 54, 53, 70, 53, 85, 54, 77, 58, 75, 71, 67, 56, 57, 61, 67, 79, 60, 75, 85, 72, 80, 78, 78, 53, 68, 60, 94, 67, 65, 59, 50, 57, 54, 71, 80, 59, 64, 48, 64, 96, 67, 64, 63, 69, 62, 44, 57, 80, 74, 43, 61, 61, 76, 62, 45, 85, 45, 75, 46, 63, 59, 52, 85, 50, 65, 76, 58, 62, 68, 56, 67, 45, 58, 64, 44, 57, 76, 76, 63, 67, 61, 49, 87, 56, 80, 56, 75, 61, 69, 67, 75, 75, 67, 66, 62, 67, 91, 56, 66, 51, 84, 75, 60, 64, 73, 40, 76, 62, 60, 55, 65, 64, 84, 74, 52, 53, 64, 50, 76, 48, 37, 62, 82, 58, 63, 74, 58, 60, 68, 58, 73, 79, 61, 75, 105, 68, 65, 70, 78, 61, 69, 67, 58, 59, 48, 105, 67, 144, 57, 108, 66, 61, 51, 95, 69, 82, 60, 67, 71, 41, 60, 58, 71, 74, 64, 75, 46, 60, 64, 59, 51, 53, 51, 70, 56, 74, 63, 72, 60, 141, 59, 99, 47, 68, 68, 74, 71, 55, 81, 61, 98, 62, 50, 67, 55, 53, 83, 74, 63, 68, 75, 116, 61, 69, 76, 59, 42, 58, 51, 70, 52, 53, 75, 74, 58, 71, 56, 94, 48, 61, 80, 72, 107, 62, 70, 62, 59, 65, 59, 64, 69, 55, 92, 84, 51, 67, 67, 52, 80, 58, 59, 57, 43, 66, 114, 63, 73, 70, 72, 48, 64, 54, 52, 62, 62, 60, 76, 69, 68, 72, 92, 85, 61, 75, 77, 79, 71, 80, 55, 58, 68, 62, 85, 71, 75, 58, 61, 60, 68, 75, 89, 62, 62, 51, 48, 58, 57, 67, 55, 60, 38, 59, 68, 67, 55, 70, 58, 71, 86, 63, 71, 53, 48, 55, 52, 61, 72, 53, 50, 63, 60, 55, 75, 57, 58, 68, 60, 73, 49, 97, 52, 71, 67, 62, 92, 70, 67, 54, 77, 55, 61, 65, 63, 79, 47, 70, 73, 61, 59, 84, 71, 85, 50, 87, 60, 52, 84, 71, 56, 64, 73, 83, 40, 55, 50, 62, 81, 110, 71, 86, 61, 72, 47, 68, 47, 68, 54, 73, 66, 51, 57, 71, 61, 82, 62, 54, 61, 83, 46, 48, 81, 60, 63, 86, 51, 135, 51, 90, 49, 84, 56, 84, 49, 75, 48, 63, 66, 61, 53, 42, 64, 78, 72, 36, 77, 58, 47, 56, 59, 65, 56, 62, 72, 52, 77, 40, 85, 66, 76, 86, 60, 70, 60, 63, 73, 63, 95, 44, 64, 56, 70, 72, 87, 70, 90, 64, 64, 69, 80, 78, 66, 74, 80, 64, 68, 53, 84, 53, 67, 57, 67, 63, 62, 63, 64, 50, 62, 59, 47, 56, 48, 53, 60, 73, 65, 57, 56, 60, 47, 76, 58, 51, 46, 66, 74, 65, 50, 78, 68, 62, 85, 68, 63, 77, 60, 72, 49, 68, 74, 61, 63, 61, 93, 71, 47, 63, 72, 79, 46, 75, 64, 50, 37, 74, 59, 78, 60, 49, 71, 57, 59, 58, 63, 72, 61, 109, 61, 49, 62, 63, 53, 83, 65, 68, 55, 47, 48, 62, 50, 57, 44, 61, 57, 70, 68, 70, 47, 77, 56, 59, 72, 51, 65, 66, 57, 55, 117, 54, 75, 45, 77, 58, 67, 73, 54, 64, 64, 69, 70, 58, 74, 75, 60, 58, 98, 72, 71, 128, 79, 54, 64, 61, 56, 60, 75, 59, 50, 52, 92, 94, 40, 73, 89, 64, 68, 89, 51, 52, 56, 96, 72, 82, 73, 69, 68, 62, 65, 72, 71, 58, 88, 64, 74, 64, 54, 55, 96, 52, 68, 74, 71, 70, 49, 60, 66, 56, 76, 56, 45, 61, 65, 66, 56, 56, 58, 78, 54, 82, 48, 62, 70, 51, 79, 58, 87, 57, 59, 58, 74, 71, 56, 57, 52, 60, 91, 96, 69, 96, 81, 67, 74, 71, 61, 84, 58, 60, 87, 85, 64, 54, 39, 51, 78, 47, 71, 62, 77, 43, 79, 75, 68, 86, 85, 47, 65, 77, 51, 91, 64, 75, 62, 73, 62, 52, 47, 73, 65, 61, 65, 61, 65, 66, 64, 55, 50, 65, 57, 54, 73, 55, 47, 58, 74, 46, 73, 66, 60, 76, 77, 54, 67, 67, 48, 60, 47, 58, 69, 63, 61, 55, 59, 87, 69, 69, 54, 99, 69, 55, 76, 59, 50, 82, 52, 66, 87, 69, 38, 51, 60, 63, 75, 65, 67, 63, 62, 51, 63, 50, 39, 43, 53, 56, 59, 42, 58, 56, 43, 79, 62, 78, 70, 50, 50, 82, 62, 75, 49, 59, 77, 63, 49, 57, 74, 61, 50, 77, 73, 121, 78, 76, 61, 57, 51, 60, 60, 50, 58, 54, 84, 68, 48, 65, 42, 110, 67, 90, 64, 48, 74, 54, 77, 60, 60, 72, 55, 56, 66, 49, 55, 67, 109, 52, 85, 105, 46, 57, 38, 73, 50, 58, 69, 42, 47, 58, 66, 70, 68, 55, 59, 76, 81, 73, 59, 60, 52, 48, 68, 47, 86, 52, 71, 73, 76, 74, 87, 56, 62, 54, 76, 115, 60, 67, 60, 86, 55, 80, 64, 75, 64, 49, 65, 59, 50, 46, 56, 74, 56, 72, 71, 68, 70, 80, 55, 56, 76, 42, 61, 69, 67, 49, 65, 73, 84, 77, 77, 50, 81, 36, 63, 88, 60, 72, 60, 59, 75, 59, 79, 92, 95, 65, 77, 56, 90, 80, 69, 56, 77, 27, 86, 69, 66, 78, 55, 44, 74, 56, 84, 45, 65, 62, 72, 45, 61, 49, 86, 56, 50, 56, 64, 67, 73, 74, 53, 49, 66, 64, 83, 46, 80, 77, 54, 63, 82, 61, 68, 54, 109, 76, 28, 45, 56, 49, 79, 44, 59, 50, 83, 60, 57, 63, 68, 49, 44, 129, 54, 68, 65, 59, 61, 47, 54, 78, 66, 77, 65, 74, 69, 62, 75, 60, 71, 68, 47, 58, 59, 48, 62, 130, 65, 71, 58, 94, 63, 61, 83, 69, 58, 72, 60, 39, 55, 47, 57, 80, 61, 67, 83, 64, 51, 86, 72, 77, 50, 70, 77, 57, 76, 71, 82, 57, 66, 62, 64, 68, 50, 78, 54, 69, 72, 69, 62, 62, 53, 75, 60, 83, 46, 72, 68, 72, 73, 59, 81, 77, 71, 72, 57, 47, 66, 86, 45, 72, 56, 70, 66, 67, 59, 60, 83, 63, 60, 68, 52, 79, 85, 76, 66, 65, 57, 76, 57, 72, 52, 70, 75, 72, 85, 57, 60, 69, 55, 75, 61, 66, 70, 74, 57, 67, 62, 60, 83, 64, 54, 59, 75, 39, 62, 75, 57, 79, 65, 71, 62, 70, 78, 105, 55, 73, 73, 56, 81, 62, 68, 70, 57, 63, 75, 56, 54, 66, 68, 65, 79, 60, 52, 66, 70, 51, 51, 67, 91, 84, 57, 55, 48, 54, 82, 77, 88, 71, 47, 77, 64, 65, 71, 57, 68, 56, 74, 50, 93, 74, 55, 47, 83, 78, 57, 56, 66, 65, 96, 61, 67, 60, 63, 54, 60, 75, 59, 56, 43, 130, 57, 65, 60, 66, 59, 38, 60, 55, 71, 70, 69, 61, 67, 58, 90, 57, 62, 57, 71, 59, 72, 59, 168, 60, 59, 74, 69, 77, 42, 90, 51, 46, 52, 87, 71, 50, 57, 61, 66, 76, 50, 63, 90, 69, 67, 45, 65, 56, 107, 62, 51, 60, 44, 44, 65, 96, 75, 65, 74, 66, 66, 62, 64, 71, 77, 62, 44, 80, 75, 92, 80, 70, 70, 69, 42, 62, 58, 40, 52, 54, 69, 66, 62, 66, 78, 65, 55, 66, 77, 89, 105, 66, 58, 98, 73, 65, 54, 57, 58, 65, 66, 88, 77, 81, 77, 75, 61, 65, 58, 70, 55, 55, 83, 64, 52, 68, 62, 67, 64, 61, 90, 55, 79, 199, 56, 73, 55, 54, 73, 74, 72, 61, 79, 57, 54, 57, 53, 75, 101, 60, 47, 68, 55, 68, 62, 64, 95, 52, 65, 61, 73, 67, 52, 59, 55, 71, 62, 42, 55, 57, 60, 69, 77, 59, 64, 50, 46, 73, 73, 62, 82, 147, 59, 74, 60, 73, 93, 55, 63, 61, 88, 55, 87, 54, 52, 60, 65, 70, 59, 85, 57, 62, 78, 66, 60, 65, 66, 46, 67, 62, 83, 62, 40, 84, 75, 57, 70, 69, 71, 61, 67, 76, 55, 49, 53, 50, 82, 51, 75, 72, 72, 62, 73, 83, 71, 69, 57, 70, 59, 60, 66, 70, 75, 65, 83, 83, 55, 61, 54, 70, 70, 62, 67, 44, 66, 38, 55, 58, 62, 68, 73, 75, 46, 56, 62, 65, 59, 67, 64, 61, 72, 45, 58, 70, 82, 86, 75, 62, 91, 92, 105, 85, 53, 84, 63, 70, 113, 52, 58, 62, 77, 66, 79, 62, 59, 78, 79, 51, 74, 81, 101, 66, 62, 83, 54, 54, 66, 53, 72, 72, 67, 63, 66, 69, 75, 59, 61, 79, 73, 54, 55, 65, 72, 70, 67, 73, 67, 56, 79, 71, 63, 74, 53, 59, 59, 50, 77, 70, 71, 69, 51, 78, 56, 59, 77, 63, 57, 69, 87, 73, 76, 71, 69, 80, 61, 58, 63, 82, 46, 73, 71, 101, 70, 49, 53, 77, 46, 59, 54, 90, 70, 69, 79, 64, 76, 72, 51, 40, 50, 63, 78, 44, 50, 46, 42, 48, 85, 53, 51, 61, 56, 130, 82, 49, 59, 83, 53, 66, 73, 103, 57, 59, 67, 43, 74, 85, 62, 75, 67, 76, 78, 75, 52, 104, 81, 80, 73, 80, 65, 98, 58, 81, 41, 74, 60, 68, 89, 58, 41, 80, 50, 61, 64, 54, 80, 102, 78, 62, 58, 43, 52, 108, 44, 48, 56, 61, 75, 55, 42, 58, 49, 89, 59, 60, 79, 66, 71, 54, 51, 60, 56, 71, 71, 64, 57, 71, 68, 64, 51, 80, 68, 71, 63, 79, 64, 73, 47, 65, 71, 46, 67, 72, 138, 82, 45, 62, 69, 41, 103, 104, 92, 59, 58, 46, 64, 67, 66, 43, 114, 93, 54, 54, 81, 70, 96, 53, 58, 59, 68, 79, 65, 60, 67, 85, 65, 60, 50, 63, 62, 61, 52, 67, 123, 68, 77, 53, 65, 56, 56, 65, 55, 66, 82, 48, 88, 68, 84, 54, 54, 44, 58, 74, 54, 58, 88, 55, 61, 54, 71, 68, 53, 45, 88, 56, 63, 49, 62, 41, 86, 52, 83, 62, 62, 80, 46, 71, 59, 68, 66, 72, 82, 54, 71, 49, 72, 77, 41, 72, 62, 49, 74, 70, 70, 51, 65, 55, 71, 62, 69, 54, 91, 65, 54, 66, 69, 66, 58, 48, 75, 57, 67, 61, 81, 60, 59, 70, 59, 55, 72, 66, 74, 58, 63, 52, 46, 15, 61, 59, 99, 63, 62, 49, 111, 76, 51, 56, 82, 22, 87, 51, 58, 61, 61, 63, 36, 96, 45, 79, 71, 65, 56, 52, 62, 73, 93, 61, 68, 89, 54, 48, 63, 76, 52, 96, 58, 71, 94, 58, 76, 60, 59, 72, 59, 46, 47, 64, 102, 61, 44, 75, 50, 87, 70, 62, 72, 89, 108, 55, 93, 113, 60, 74, 117, 59, 68, 52, 73, 60, 70, 58, 58, 83, 79, 179, 77, 57, 69, 54, 78, 62, 72, 67, 60, 89, 59, 73, 167, 49, 71, 64, 107, 72, 64, 79, 64, 71, 45, 46, 81, 82, 90, 79, 38, 67, 67, 76, 82, 73, 85, 59, 58, 58, 61, 55, 69, 68, 42, 54, 69, 54, 55, 60, 74, 59, 96, 72, 57, 55, 69, 111, 57, 77, 62, 84, 75, 42, 60, 66, 107, 75, 52, 68, 64, 71, 76, 70, 80, 54, 80, 98, 65, 97, 58, 75, 73, 71, 86, 56, 49, 67, 53, 66, 40, 60, 66, 52, 94, 64, 61, 53, 80, 55, 55, 59, 75, 51, 105, 56, 39, 74, 68, 112, 66, 65, 79, 66, 51, 56, 61, 54, 75, 63, 71, 51, 64, 36, 62, 74, 76, 68, 97, 61, 56, 66, 118, 94, 76, 65, 60, 58, 78, 80, 116, 67, 77, 93, 92, 66, 68, 44, 52, 56, 63, 69, 69, 86, 87, 59, 72, 67, 64, 126, 63, 44, 67, 47, 78, 83, 73, 49, 66, 90, 57, 58, 54, 67, 63, 80, 74, 62, 63, 63, 81, 79, 68, 52, 45, 84, 59, 59, 83, 55, 48, 77, 70, 71, 89, 43, 67, 69, 67, 65, 70, 88, 67, 52, 63, 88, 71, 65, 90, 70, 105, 57, 54, 66, 52, 81, 61, 64, 69, 74, 81, 96, 43, 86, 72, 54, 52, 69, 66, 57, 53, 71, 79, 55, 96, 64, 42, 43, 50, 58, 60, 59, 49, 62, 59, 63, 77, 68, 66, 62, 71, 90, 70, 75, 58, 72, 82, 72, 54, 86, 76, 56, 76, 46, 64, 29, 64, 66, 49, 44, 64, 65, 57, 42, 66, 71, 48, 80, 52, 61, 57, 57, 71, 57, 54, 54, 107, 59, 84, 69, 55, 79, 90, 70, 85, 88, 51, 59, 64, 68, 75, 37, 67, 74, 68, 72, 62, 57, 35, 55, 54, 70, 88, 68, 76, 40, 86, 66, 52, 62, 48, 51, 78, 62, 50, 69, 60, 53, 42, 59, 32, 58, 56, 71, 55, 79, 61, 60, 74, 60, 68, 50, 61, 61, 67, 79, 69, 65, 80, 97, 88, 75, 57, 74, 58, 69, 68, 55, 73, 59, 83, 47, 44, 57, 60, 70, 75, 65, 57, 49, 58, 87, 64, 73, 42, 59, 104, 82, 51, 81, 67, 77, 99, 92, 68, 59, 53, 95, 72, 66, 63, 90, 70, 63, 59, 60, 73, 65, 60, 69, 83, 53, 67, 93, 53, 89, 67, 60, 49, 80, 57, 62, 48, 68, 56, 75, 62, 67, 64, 44, 76, 54, 77, 80, 40, 71, 53, 65, 75, 61, 46, 63, 54, 45, 74, 46, 67, 52, 72, 54, 54, 73, 85, 110, 49, 66, 66, 78, 59, 68, 69, 120, 71, 63, 90, 60, 70, 63, 52, 63, 59, 59, 58, 68, 73, 73, 53, 81, 74, 59, 62, 61, 65, 55, 74, 65, 57, 73, 49, 66, 78, 60, 63, 79, 34, 50, 82, 62, 92, 56, 53, 79, 58, 73, 50, 74, 75, 64, 62, 57, 59, 64, 54, 74, 56, 49, 62, 66, 70, 72, 47, 57, 45, 70, 83, 69, 71, 84, 76, 107, 77, 51, 59, 62, 103, 50, 67, 58, 65, 43, 58, 54, 50, 52, 61, 64, 67, 65, 63, 48, 74, 44, 78, 44, 100, 68, 71, 48, 53, 70, 67, 62, 58, 56, 67, 72, 61, 47, 63, 56, 63, 111, 55, 68, 75, 36, 92, 80, 63, 74, 57, 58, 55, 60, 51, 56, 70, 66, 72, 53, 60, 50, 56, 36, 71, 45, 61, 45, 55, 56, 92, 65, 59, 57, 75, 84, 57, 67, 56, 81, 50, 67, 67, 63, 68, 62, 69, 57, 51, 65, 49, 72, 33, 107, 47, 95, 89, 56, 49, 49, 62, 50, 71, 60, 71, 64, 48, 62, 72, 57, 71, 63, 61, 65, 53, 71, 80, 85, 116, 54, 55, 52, 48, 76, 57, 69, 64, 58, 47, 37, 69, 60, 52, 65, 68, 66, 73, 40, 60, 68, 88, 54, 62, 43, 50, 57, 62, 86, 74, 62, 61, 62, 38, 69, 61, 66, 84, 73, 60, 41, 76, 61, 54, 64, 52, 67, 59, 58, 57, 76, 62, 69, 70, 57, 51, 63, 64, 51, 69, 83, 64, 75, 104, 67, 56, 79, 64, 59, 39, 55, 66, 64, 85, 84, 102, 45, 58, 61, 51, 123, 67, 82, 69, 59, 50, 59, 91, 64, 56, 65, 59, 82, 89, 87, 57, 62, 70, 44, 69, 60, 69, 61, 71, 58, 66, 73, 66, 80, 63, 75, 64, 80, 80, 58, 67, 56, 63, 66, 59, 72, 69, 62, 79, 70, 66, 92, 64, 60, 74, 69, 56, 62, 95, 54, 58, 54, 72, 69, 78, 78, 79, 56, 81, 104, 80, 26, 53, 65, 56, 63, 66, 55, 57, 57, 58, 100, 50, 69, 74, 50, 66, 64, 53, 88, 51, 59, 53, 75, 77, 73, 58, 67, 55, 68, 59, 66, 63, 55, 80, 94, 27, 51, 92, 47, 36, 49, 69, 78, 79, 101, 65, 65, 62, 110, 40, 69, 88, 48, 59, 82, 59, 84, 53, 60, 87, 84, 71, 56, 66, 28, 67, 71, 80, 58, 58, 85, 58, 59, 90, 63, 85, 50, 54, 60, 55, 69, 61, 78, 89, 55, 54, 73, 44, 74, 69, 115, 73, 69, 43, 61, 60, 59, 63, 99, 74, 81, 105, 56, 64, 99, 61, 80, 99, 77, 62, 123, 77, 64, 67, 53, 62, 78, 60, 81, 52, 64, 55, 73, 60, 85, 77, 74, 47, 62, 61, 77, 49, 45, 61, 66, 49, 63, 82, 59, 85, 47, 81, 72, 56, 86, 87, 59, 63, 83, 52, 83, 71, 50, 38, 65, 68, 45, 60, 55, 46, 51, 74, 72, 90, 57, 51, 61, 51, 60, 66, 74, 62, 54, 75, 77, 76, 106, 66, 53, 82, 66, 84, 57, 43, 62, 72, 66, 46, 57, 43, 80, 69, 69, 70, 67, 74, 61, 60, 86, 67, 55, 96, 58, 63, 53, 60, 78, 91, 43, 76, 56, 78, 73, 81, 85, 57, 63, 61, 73, 73, 68, 52, 104, 54, 84, 60, 35, 55, 85, 63, 59, 65, 47, 55, 63, 73, 48, 46, 23, 50, 68, 60, 71, 50, 34, 63, 61, 54, 83, 74, 65, 53, 56, 90, 49, 90, 97, 69, 53, 87, 63, 58, 68, 71, 67, 86, 82, 97, 34, 70, 57, 86, 55, 91, 58, 61, 82, 85, 83, 72, 62, 85, 72, 77, 66, 69, 64, 82, 60, 73, 68, 50, 83, 70, 77, 57, 52, 71, 52, 92, 94, 57, 57, 90, 66, 88, 60, 62, 90, 66, 86, 51, 56, 57, 63, 59, 60, 60, 61, 73, 44, 58, 59, 44, 100, 60, 71, 69, 69, 56, 77, 39, 65, 67, 74, 53, 53, 59, 99, 43, 62, 98, 68, 51, 59, 43, 68, 76, 83, 76, 53, 49, 69, 49, 89, 59, 57, 73, 75, 62, 80, 65, 113, 45, 64, 44, 82, 67, 72, 49, 57, 60, 51, 72, 47, 59, 43, 67, 49, 66, 55, 64, 100, 79, 70, 49, 70, 57, 52, 46, 66, 52, 91, 87, 65, 64, 59, 69, 62, 53, 69, 73, 52, 66, 72, 68, 73, 66, 71, 55, 49, 52, 105, 62, 62, 45, 40, 105, 56, 66, 68, 54, 83, 53, 70, 90, 91, 41, 64, 70, 65, 49, 51, 71, 101, 72, 75, 61, 68, 37, 80, 51, 45, 68, 58, 55, 50, 62, 78, 71, 46, 93, 66, 36, 81, 71, 55, 109, 60, 45, 66, 62, 55, 86, 81, 67, 82, 68, 59, 80, 56, 50, 59, 87, 58, 73, 62, 64, 51, 48, 69, 55, 81, 53, 63, 88, 51, 53, 67, 62, 82, 50, 59, 62, 71, 64, 58, 63, 52, 81, 50, 50, 74, 47, 79, 89, 59, 65, 63, 44, 72, 64, 38, 89, 63, 53, 79, 53, 102, 73, 65, 75, 53, 68, 61, 60, 60, 78, 54, 60, 62, 77, 58, 56, 46, 79, 47, 64, 82, 62, 76, 78, 71, 54, 55, 79, 81, 61, 40, 63, 68, 62, 53, 64, 48, 113, 78, 50, 50, 86, 47, 66, 70, 62, 89, 52, 54, 71, 60, 82, 46, 52, 70, 53, 72, 62, 53, 72, 35, 63, 69, 101, 69, 35, 108, 53, 56, 90, 55, 88, 64, 43, 54, 54, 47, 72, 38, 57, 74, 101, 67, 49, 57, 72, 96, 73, 70, 67, 70, 87, 70, 61, 67, 61, 103, 67, 92, 54, 65, 107, 44, 66, 81, 112, 57, 96, 65, 71, 61, 58, 64, 86, 56, 60, 95, 97, 47, 77, 110, 43, 64, 63, 66, 65, 42, 53, 71, 80, 53, 66, 55, 48, 73, 74, 50, 65, 63, 55, 91, 87, 71, 75, 79, 111, 108, 48, 78, 47, 72, 54, 47, 44, 52, 59, 82, 80, 77, 49, 68, 82, 70, 46, 63, 63, 52, 51, 64, 46, 64, 95, 52, 70, 77, 69, 65, 50, 51, 61, 59, 45, 45, 83, 81, 60, 68, 74, 67, 81, 85, 98, 64, 60, 80, 68, 71, 75, 67, 61, 84, 35, 33, 59, 40, 69, 79, 63, 61, 59, 47, 84, 74, 46, 72, 47, 42, 82, 70, 65, 66, 52, 58, 57, 75, 69, 53, 38, 54, 74, 72, 62, 90, 73, 60, 79, 58, 83, 40, 71, 54, 53, 62, 81, 62, 76, 72, 98, 74, 68, 53, 80, 107, 66, 54, 60, 59, 90, 51, 68, 76, 60, 59, 88, 50, 89, 59, 75, 46, 56, 59, 43, 46, 70, 56, 52, 71, 68, 86, 49, 71, 61, 81, 92, 64, 47, 76, 39, 74, 75, 47, 78, 45, 63, 59, 124, 71, 33, 37, 48, 57, 44, 79, 64, 42, 108, 77, 61, 66, 65, 56, 62, 43, 61, 77, 72, 60, 39, 50, 45, 78, 78, 54, 49, 57, 98, 93, 48, 78, 64, 54, 72, 74, 57, 47, 66, 71, 109, 57, 51, 44, 54, 62, 57, 58, 57, 57, 76, 57, 58, 63, 62, 55, 66, 59, 68, 58, 55, 124, 56, 54, 69, 82, 58, 60, 77, 80, 60, 41, 84, 67, 59, 83, 59, 58, 54, 70, 60, 50, 70, 44, 54, 58, 88, 55, 85, 59, 55, 48, 50, 39, 68, 72, 65, 48, 60, 62, 49, 53, 47, 57, 62, 57, 85, 58, 79, 67, 71, 45, 83, 65, 43, 58, 60, 54, 74, 74, 44, 48, 63, 59, 68, 49, 83, 80, 54, 41, 35, 53, 54, 56, 62, 103, 51, 61, 62, 74, 112, 66, 92, 68, 81, 58, 85, 64, 59, 93, 75, 88, 42, 82, 59, 50, 79, 85, 51, 76, 85, 63, 58, 69, 57, 75, 71, 75, 67, 61, 76, 79, 90, 52, 87, 60, 84, 59, 49, 96, 107, 63, 78, 66, 57, 77, 73, 51, 48, 64, 50, 80, 77, 119, 33, 84, 42, 55, 69, 70, 77, 71, 88, 55, 63, 63, 64, 71, 61, 67, 62, 112, 71, 61, 49, 54, 64, 51, 66, 56, 34, 76, 66, 64, 69, 41, 53, 68, 85, 37, 44, 68, 69, 57, 40, 64, 90, 49, 86, 54, 64, 51, 68, 68, 44, 52, 157, 79, 78, 132, 54, 63, 88, 45, 81, 51, 67, 87, 81, 69, 74, 59, 80, 55, 82, 65, 87, 62, 89, 74, 55, 57, 52, 60, 69, 59, 72, 75, 58, 57, 67, 63, 70, 65, 106, 61, 70, 78, 65, 76, 55, 96, 70, 50, 67, 52, 68, 115, 68, 74, 52, 76, 57, 83, 73, 65, 69, 68, 58, 50, 89, 66, 54, 58, 68, 81, 68, 66, 63, 73, 71, 68, 46, 56, 66, 65, 54, 70, 43, 73, 73, 85, 88, 90, 96, 64, 114, 59, 62, 76, 63, 56, 68, 66, 77, 68, 62, 61, 79, 65, 98, 71, 39, 67, 77, 76, 59, 45, 69, 63, 46, 50, 87, 61, 73, 65, 66, 63, 92, 54, 60, 69, 55, 68, 86, 49, 89, 60, 40, 51, 141, 57, 89, 61, 77, 59, 74, 92, 63, 48, 66, 71, 58, 48, 48, 50, 65, 70, 63, 74, 58, 60, 68, 55, 81, 57, 73, 43, 62, 69, 82, 73, 66, 57, 78, 89, 24, 50, 64, 59, 72, 68, 77, 78, 74, 68, 53, 66, 70, 68, 52, 57, 80, 57, 97, 51, 76, 59, 53, 69, 73, 59, 64, 48, 57, 95, 59, 65, 50, 50, 93, 53, 66, 60, 81, 47, 56, 65, 74, 75, 38, 82, 59, 66, 55, 57, 73, 53, 68, 65, 56, 98, 71, 70, 95, 65, 75, 62, 56, 60, 50, 49, 36, 61, 72, 85, 95, 70, 62, 71, 66, 56, 57, 64, 51, 69, 90, 84, 72, 118, 80, 69, 85, 58, 64, 71, 75, 47, 80, 49, 68, 83, 73, 100, 90, 54, 80, 82, 74, 65, 39, 74, 60, 67, 76, 49, 59, 72, 63, 65, 53, 59, 72, 61, 73, 83, 74, 75, 77, 75, 70, 47, 45, 44, 86, 49, 62, 45, 67, 79, 79, 81, 65, 44, 76, 79, 66, 56, 76, 60, 57, 53, 53, 39, 68, 63, 72, 69, 113, 57, 60, 88, 52, 48, 64, 114, 65, 67, 84, 72, 68, 90, 60, 63, 66, 73, 58, 63, 58, 60, 47, 86, 81, 76, 93, 75, 62, 64, 63, 69, 65, 79, 72, 62, 50, 67, 51, 66, 84, 67, 66, 85, 85, 52, 64, 29, 25, 57, 57, 44, 57, 65, 65, 53, 53, 75, 66, 70, 75, 62, 70, 69, 81, 54, 69, 82, 67, 85, 86, 52, 47, 44, 64, 44, 117, 49, 70, 55, 50, 54, 59, 78, 65, 71, 62, 51, 47, 40, 83, 52, 53, 69, 51, 86, 74, 89, 79, 63, 90, 88, 72, 82, 67, 84, 74, 67, 57, 73, 65, 46, 54, 43, 75, 72, 73, 45, 64, 72, 74, 74, 96, 48, 97, 95, 71, 37, 60, 51, 38, 55, 51, 79, 84, 51, 84, 47, 56, 73, 61, 73, 72, 70, 55, 58, 49, 50, 88, 68, 87, 48, 56, 69, 54, 69, 72, 75, 62, 50, 56, 60, 59, 85, 105, 60, 94, 63, 65, 94, 78, 58, 69, 70, 49, 57, 77, 65, 75, 70, 67, 58, 77, 46, 59, 49, 75, 33, 61, 50, 55, 54, 64, 56, 54, 49, 60, 77, 49, 71, 55, 61, 58, 73, 63, 78, 83, 62, 51, 70, 69, 57, 64, 53, 72, 72, 75, 63, 53, 52, 76, 67, 82, 62, 78, 57, 61, 64, 88, 49, 115, 52, 82, 67, 68, 66, 61, 54, 94, 66, 56, 61, 44, 62, 60, 61, 80, 49, 68, 49, 63, 69, 69, 52, 55, 70, 86, 50, 70, 72, 64, 59, 73, 64, 49, 71, 65, 58, 56, 96, 74, 71, 88, 53, 64, 56, 111, 77, 87, 111, 69, 67, 58, 59, 71, 101, 53, 70, 129, 67, 55, 63, 78, 85, 57, 88, 70, 55, 86, 61, 58, 69, 77, 53, 43, 79, 58, 77, 43, 53, 49, 58, 55, 76, 55, 58, 66, 71, 65, 59, 56, 63, 74, 75, 56, 60, 66, 63, 72, 79, 61, 63, 57, 126, 95, 81, 75, 68, 81, 81, 62, 52, 62, 100, 73, 83, 66, 56, 102, 62, 40, 66, 50, 73, 62, 70, 52, 70, 36, 68, 71, 125, 50, 29, 98, 57, 58, 63, 87, 63, 69, 71, 49, 74, 65, 81, 52, 44, 61, 65, 51, 80, 46, 55, 66, 67, 78, 74, 73, 58, 65, 35, 37, 71, 63, 48, 65, 59, 74, 74, 59, 52, 50, 52, 43, 57, 87, 55, 55, 61, 67, 72, 55, 68, 57, 68, 66, 97, 54, 61, 63, 68, 64, 77, 70, 86, 68, 58, 65, 52, 94, 78, 53, 63, 54, 81, 76, 51, 74, 60, 50, 100, 72, 88, 52, 66, 51, 76, 66, 58, 39, 66, 65, 96, 62, 70, 69, 57, 51, 52, 66, 61, 79, 69, 50, 38, 63, 54, 77, 82, 86, 76, 77, 76, 66, 69, 57, 81, 64, 71, 74, 60, 63, 56, 75, 69, 56, 73, 63, 74, 72, 73, 42, 42, 87, 66, 79, 75, 69, 59, 59, 78, 52, 70, 65, 64, 63, 61, 71, 55, 91, 40, 56, 89, 46, 64, 93, 58, 63, 43, 65, 51, 77, 95, 79, 43, 54, 50, 87, 52, 49, 62, 66, 68, 55, 87, 75, 66, 71, 52, 76, 63, 75, 44, 87, 86, 58, 65, 68, 73, 51, 51, 84, 64, 56, 74, 50, 106, 68, 75, 54, 53, 64, 45, 74, 88, 53, 69, 68, 75, 120, 59, 56, 68, 52, 68, 54, 101, 69, 75, 73, 92, 54, 51, 66, 83, 55, 68, 57, 62, 58, 132, 61, 51, 41, 87, 67, 86, 65, 45, 85, 81, 80, 55, 69, 51, 61, 49, 75, 77, 42, 93, 54, 60, 73, 61, 61, 56, 58, 52, 57, 64, 67, 49, 75, 69, 51, 53, 62, 35, 72, 95, 57, 74, 60, 64, 82, 66, 61, 94, 52, 39, 57, 62, 66, 78, 93, 69, 79, 76, 72, 43, 52, 62, 53, 61, 48, 75, 59, 64, 50, 84, 64, 58, 79, 50, 79, 100, 79, 51, 90, 67, 78, 81, 60, 55, 87, 84, 41, 82, 71, 61, 70, 52, 167, 59, 91, 66, 61, 69, 64, 88, 60, 83, 62, 85, 49, 55, 58, 80, 82, 61, 62, 61, 46, 70, 64, 89, 72, 92, 55, 91, 66, 61, 105, 70, 37, 63, 74, 58, 75, 76, 57, 59, 53, 43, 47, 54, 58, 74, 68, 54, 63, 72, 61, 80, 51, 61, 66, 86, 63, 95, 97, 67, 56, 59, 64, 83, 63, 77, 80, 54, 60, 74, 66, 75, 78, 95, 73, 58, 63, 72, 46, 67, 56, 60, 82, 55, 94, 80, 62, 86, 91, 53, 53, 80, 47, 49, 52, 68, 86, 79, 61, 67, 52, 57, 62, 63, 67, 70, 83, 44, 52, 49, 93, 69, 72, 74, 60, 54, 63, 50, 44, 61, 56, 50, 64, 58, 77, 51, 49, 44, 63, 52, 59, 54, 64, 58, 73, 84, 68, 57, 71, 73, 67, 62, 49, 43, 72, 51, 63, 60, 63, 76, 57, 53, 55, 82, 61, 59, 61, 88, 85, 59, 62, 67, 84, 59, 55, 81, 48, 75, 90, 71, 72, 67, 97, 85, 63, 73, 76, 55, 62, 65, 73, 52, 58, 66, 42, 54, 57, 72, 72, 71, 61, 68, 67, 70, 46, 84, 67, 74, 68, 59, 75, 60, 51, 43, 56, 53, 43, 75, 58, 58, 75, 86, 88, 61, 63, 54, 60, 68, 65, 71, 78, 73, 80, 91, 59, 78, 91, 52, 50, 104, 81, 36, 69, 83, 61, 58, 56, 85, 62, 62, 63, 69, 87, 78, 63, 73, 63, 58, 86, 52, 54, 68, 58, 68, 51, 78, 40, 62, 77, 62, 58, 68, 65, 54, 81, 83, 47, 79, 68, 88, 60, 61, 84, 69, 47, 60, 57, 55, 81, 56, 80, 64, 50, 70, 56, 55, 62, 75, 76, 84, 44, 54, 64, 76, 72, 58, 67, 60, 89, 57, 60, 78, 65, 68, 44, 65, 57, 83, 66, 89, 56, 63, 54, 42, 66, 77, 57, 69, 74, 46, 60, 65, 50, 55, 75, 49, 62, 69, 69, 69, 88, 78, 47, 82, 79, 73, 51, 77, 59, 72, 76, 73, 62, 60, 55, 62, 79, 83, 63, 79, 52, 71, 46, 65, 54, 91, 77, 51, 67, 46, 48, 75, 53, 70, 55, 53, 46, 67, 67, 68, 60, 88, 66, 79, 77, 72, 81, 65, 53, 54, 69, 91, 79, 73, 76, 66, 57, 74, 56, 43, 68, 71, 60, 84, 61, 60, 70, 83, 82, 69, 68, 47, 69, 77, 50, 66, 74, 102, 63, 57, 92, 53, 80, 64, 49, 96, 64, 65, 76, 68, 52, 85, 64, 61, 53, 80, 70, 61, 59, 63, 62, 56, 52, 74, 52, 53, 63, 61, 58, 82, 59, 46, 58, 43, 68, 75, 69, 67, 58, 58, 57, 54, 74, 54, 76, 77, 80, 49, 70, 80, 82, 92, 55, 45, 56, 56, 48, 45, 55, 79, 81, 58, 70, 59, 90, 54, 60, 72, 73, 66, 69, 92, 80, 60, 56, 65, 59, 65, 71, 52, 60, 61, 58, 60, 74, 58, 63, 43, 68, 60, 81, 69, 48, 62, 76, 72, 66, 57, 54, 53, 102, 61, 51, 80, 46, 80, 66, 64, 61, 57, 85, 42, 81, 66, 77, 77, 76, 84, 110, 85, 50, 60, 78, 75, 44, 60, 57, 68, 76, 59, 65, 51, 91, 60, 63, 75, 64, 82, 95, 52, 42, 72, 124, 53, 47, 66, 68, 64, 57, 54, 57, 65, 70, 56, 73, 69, 52, 69, 62, 63, 63, 69, 49, 67, 62, 57, 90, 60, 85, 94, 69, 70, 77, 64, 48, 51, 57, 55, 89, 58, 91, 59, 45, 52, 51, 49, 48, 64, 72, 69, 50, 48, 58, 71, 60, 74, 51, 51, 63, 73, 73, 58, 52, 58, 52, 67, 69, 56, 51, 76, 73, 67, 62, 80, 64, 62, 58, 59, 74, 55, 73, 62, 68, 77, 69, 44, 59, 67, 125, 77, 74, 45, 87, 67, 79, 88, 53, 56, 59, 62, 58, 72, 78, 59, 72, 66, 60, 91, 58, 65, 76, 63, 64, 61, 80, 66, 51, 76, 60, 74, 67, 110, 59, 80, 59, 59, 56, 66, 76, 39, 68, 69, 69, 59, 55, 57, 52, 56, 75, 74, 74, 55, 57, 62, 62, 66, 57, 102, 85, 39, 59, 50, 75, 58, 86, 58, 47, 71, 81, 83, 57, 59, 69, 67, 78, 50, 56, 60, 61, 73, 57, 65, 49, 57, 59, 61, 64, 58, 69, 53, 55, 58, 77, 93, 65, 53, 58, 58, 64, 54, 75, 57, 73, 72, 87, 46, 68, 63, 120, 47, 78, 75, 86, 55, 55, 60, 41, 68, 53, 51, 70, 61, 50, 52, 85, 51, 69, 88, 71, 77, 88, 71, 63, 48, 112, 62, 72, 69, 63, 82, 71, 71, 81, 52, 78, 55, 52, 55, 66, 65, 66, 62, 79, 65, 62, 65, 65, 52, 74, 65, 79, 62, 57, 56, 51, 59, 54, 42, 65, 102, 76, 72, 58, 76, 82, 43, 124, 59, 51, 67, 74, 64, 77, 62, 56, 116, 75, 78, 73, 66, 75, 78, 84, 35, 71, 59, 68, 60, 54, 64, 67, 61, 65, 63, 49, 64, 70, 82, 54, 96, 74, 57, 54, 51, 68, 93, 50, 73, 57, 55, 71, 72, 85, 78, 54, 62, 64, 81, 41, 68, 76, 51, 76, 65, 59, 70, 49, 76, 53, 75, 56, 70, 69, 72, 73, 67, 58, 53, 88, 71, 75, 69, 52, 72, 50, 69, 62, 70, 62, 91, 68, 44, 81, 47, 61, 75, 74, 72, 69, 73, 54, 64, 77, 51, 59, 49, 52, 76, 68, 74, 64, 53, 52, 49, 90, 66, 57, 49, 70, 57, 71, 65, 74, 97, 82, 80, 66, 50, 95, 80, 67, 80, 67, 131, 44, 45, 58, 80, 84, 61, 70, 71, 47, 56, 74, 73, 47, 71, 55, 71, 57, 95, 59, 54, 63, 99, 74, 58, 68, 70, 82, 62, 61, 60, 82, 42, 66, 112, 49, 77, 70, 58, 80, 62, 99, 88, 84, 54, 63, 49, 80, 69, 71, 54, 64, 57, 65, 68, 62, 53, 45, 64, 50, 61, 76, 72, 64, 64, 56, 80, 48, 61, 53, 71, 62, 68, 52, 95, 69, 62, 74, 71, 57, 85, 85, 61, 59, 68, 51, 58, 68, 61, 62, 62, 68, 54, 68, 57, 71, 64, 67, 59, 64, 72, 56, 50, 62, 72, 88, 85, 57, 61, 48, 59, 81, 85, 87, 81, 74, 76, 67, 54, 81, 47, 74, 63, 69, 63, 84, 61, 87, 54, 65, 52, 68, 53, 58, 65, 77, 53, 74, 63, 62, 53, 74, 67, 75, 62, 54, 73, 55, 71, 53, 79, 87, 54, 68, 84, 38, 70, 63, 71, 71, 75, 59, 69, 70, 77, 79, 61, 83, 81, 60, 64, 65, 55, 67, 51, 55, 55, 76, 56, 75, 69, 58, 54, 85, 62, 64, 101, 69, 68, 66, 66, 64, 77, 84, 71, 67, 77, 51, 62, 56, 84, 52, 58, 54, 45, 66, 66, 64, 91, 73, 74, 82, 57, 60, 77, 59, 81, 58, 71, 58, 64, 51, 69, 68, 101, 72, 45, 62, 58, 70, 63, 61, 65, 70, 60, 86, 58, 72, 52, 68, 77, 74, 63, 63, 66, 72, 82, 70, 68, 63, 59, 64, 50, 62, 73, 54, 48, 59, 56, 71, 70, 66, 74, 70, 77, 66, 55, 68, 56, 58, 54, 40, 68, 120, 83, 60, 59, 58, 51, 64, 57, 59, 55, 69, 62, 63, 73, 55, 56, 72, 67, 66, 49, 59, 83, 63, 66, 99, 83, 61, 45, 60, 65, 57, 74, 49, 44, 50, 72, 57, 54, 73, 45, 67, 96, 68, 51, 61, 86, 69, 66, 122, 50, 66, 55, 52, 52, 65, 65, 57, 75, 55, 58, 66, 64, 67, 72, 83, 61, 54, 74, 88, 60, 70, 73, 84, 60, 72, 64, 63, 115, 66, 55, 47, 68, 67, 77, 78, 53, 52, 58, 71, 72, 70, 70, 77, 51, 51, 61, 74, 57, 66, 62, 65, 64, 71, 60, 58, 67, 49, 63, 64, 65, 56, 81, 49, 82, 55, 73, 80, 49, 64, 61, 65, 70, 76, 51, 62, 67, 79, 48, 66, 63, 52, 61, 112, 57, 48, 59, 71, 50, 84, 68, 73, 79, 63, 87, 56, 61, 52, 69, 47, 58, 58, 61, 65, 66, 56, 68, 59, 83, 74, 50, 74, 43, 79, 74, 70, 62, 46, 61, 73, 66, 59, 77, 67, 74, 94, 63, 59, 68, 45, 51, 69, 54, 57, 74, 60, 69, 52, 52, 66, 66, 49, 74, 71, 53, 64, 76, 62, 68, 70, 69, 56, 62, 59, 97, 58, 69, 78, 56, 60, 57, 84, 68, 49, 85, 59, 85, 63, 63, 82, 77, 61, 66, 67, 53, 71, 61, 59, 65, 68, 57, 83, 61, 54, 71, 66, 61, 60, 80, 65, 57, 44, 54, 55, 76, 54, 55, 67, 62, 62, 50, 74, 55, 70, 59, 41, 91, 50, 68, 73, 61, 39, 71, 69, 91, 85, 63, 64, 124, 68, 67, 65, 89, 61, 74, 37, 53, 90, 69, 85, 74, 41, 57, 69, 66, 73, 67, 61, 69, 59, 59, 59, 51, 94, 41, 85, 56, 66, 66, 54, 50, 47, 49, 63, 65, 90, 61, 72, 58, 71, 49, 72, 61, 56, 71, 92, 73, 58, 84, 52, 100, 54, 82, 64, 76, 72, 68, 73, 60, 67, 104, 60, 57, 65, 53, 56, 88, 70, 75, 59, 58, 44, 59, 51, 46, 88, 69, 57, 65, 63, 89, 61, 51, 66, 79, 67, 99, 57, 74, 56, 71, 66, 57, 84, 58, 66, 70, 77, 64, 56, 59, 79, 48, 73, 81, 79, 68, 72, 57, 72, 59, 76, 68, 55, 58, 72, 72, 55, 50, 50, 59, 66, 49, 80, 74, 50, 84, 51, 73, 72, 55, 67, 71, 49, 53, 47, 66, 71, 75, 121, 67, 74, 67, 60, 56, 55, 64, 66, 80, 65, 66, 71, 61, 67, 47, 102, 49, 47, 76, 93, 50, 79, 50, 68, 56, 56, 68, 69, 97, 73, 81, 83, 65, 95, 57, 60, 61, 71, 64, 67, 68, 65, 75, 89, 83, 55, 99, 63, 55, 57, 66, 56, 55, 87, 88, 53, 78, 68, 64, 62, 81, 83, 64, 57, 56, 68, 61, 56, 58, 73, 47, 78, 65, 64, 59, 66, 56, 52, 64, 49, 79, 60, 88, 67, 56, 96, 42, 70, 110, 49, 62, 77, 68, 66, 67, 70, 59, 62, 77, 65, 62, 52, 50, 53, 57, 64, 81, 67, 64, 71, 86, 62, 55, 66, 94, 63, 55, 56, 74, 64, 65, 93, 65, 64, 68, 60, 71, 56, 48, 66, 119, 77, 63, 83, 63, 66, 68, 59, 75, 59, 80, 76, 42, 51, 70, 65, 63, 62, 48, 61, 63, 79, 64, 70, 63, 66, 72, 65, 65, 60, 65, 101, 40, 63, 44, 54, 69, 55, 48, 53, 73, 68, 79, 70, 68, 85, 64, 56, 55, 59, 53, 59, 53, 76, 64, 62, 59, 51, 69, 82, 60, 58, 62, 73, 59, 67, 78, 64, 56, 86, 70, 64, 99, 49, 56, 71, 66, 74, 51, 59, 67, 68, 52, 60, 51, 92, 53, 81, 72, 68, 70, 72, 143, 84, 60, 55, 91, 58, 78, 48, 58, 70, 73, 60, 75, 69, 54, 61, 65, 64, 52, 87, 75, 56, 89, 85, 61, 78, 56, 70, 71, 52, 52, 64, 71, 49, 68, 62, 63, 56, 69, 48, 90, 64, 59, 77, 58, 75, 76, 48, 67, 58, 50, 57, 75, 94, 69, 48, 76, 66, 62, 75, 53, 65, 54, 77, 71, 93, 62, 52, 45, 76, 71, 54, 80, 64, 58, 58, 75, 73, 109, 48, 100, 46, 175, 64, 75, 66, 51, 45, 54, 68, 48, 73, 61, 72, 79, 36, 67, 58, 59, 61, 63, 61, 61, 68, 50, 70, 74, 57, 72, 67, 44, 66, 67, 62, 58, 67, 91, 68, 80, 71, 78, 78, 74, 62, 67, 43, 59, 55, 64, 83, 63, 73, 52, 54, 73, 84, 71, 49, 74, 65, 48, 48, 68, 96, 52, 59, 78, 61, 72, 40, 49, 56, 53, 63, 49, 60, 64, 56, 72, 50, 75, 59, 68, 54, 82, 56, 59, 63, 76, 83, 57, 45, 64, 66, 55, 67, 69, 57, 71, 59, 55, 62, 48, 68, 55, 57, 45, 59, 76, 48, 74, 53, 81, 102, 58, 104, 61, 57, 60, 59, 83, 49, 54, 57, 46, 77, 45, 56, 42, 51, 51, 47, 61, 48, 62, 57, 64, 53, 64, 70, 65, 39, 59, 87, 88, 75, 102, 53, 53, 71, 69, 80, 57, 73, 47, 69, 71, 102, 64, 42, 73, 51, 59, 83, 58, 74, 58, 98, 58, 81, 48, 52, 98, 61, 76, 71, 47, 73, 51, 62, 70, 60, 66, 76, 69, 59, 97, 55, 58, 67, 65, 54, 48, 65, 53, 78, 61, 76, 51, 81, 49, 79, 84, 64, 63, 51, 71, 77, 69, 76, 77, 84, 72, 76, 72, 79, 68, 76, 62, 92, 85, 59, 81, 73, 60, 67, 47, 71, 67, 111, 51, 64, 60, 96, 68, 79, 59, 49, 57, 86, 44, 67, 77, 54, 57, 69, 53, 72, 75, 66, 67, 53, 77, 66, 66, 82, 77, 54, 54, 74, 71, 58, 63, 53, 59, 58, 63, 62, 52, 90, 66, 62, 72, 65, 60, 62, 60, 73, 78, 44, 60, 64, 58, 66, 66, 52, 74, 44, 67, 64, 63, 79, 61, 59, 50, 69, 51, 67, 76, 76, 57, 60, 69, 55, 80, 73, 59, 57, 71, 66, 61, 91, 66, 83, 60, 65, 58, 47, 55, 73, 61, 90, 99, 79, 71, 53, 69, 75, 83, 65, 81, 79, 46, 57, 61, 71, 62, 69, 53, 56, 81, 75, 66, 62, 112, 85, 70, 81, 59, 81, 42, 77, 66, 52, 65, 86, 58, 45, 126, 58, 55, 58, 52, 45, 41, 62, 69, 92, 58, 81, 66, 82, 63, 71, 59, 64, 66, 61, 72, 60, 64, 53, 48, 90, 59, 71, 74, 49, 60, 81, 79, 53, 57, 101, 49, 73, 75, 63, 72, 69, 64, 67, 52, 72, 100, 73, 105, 62, 66, 75, 55, 53, 93, 58, 69, 41, 41, 61, 58, 79, 73, 73, 64, 50, 52, 69, 60, 80, 66, 57, 53, 71, 62, 62, 79, 35, 82, 56, 61, 63, 61, 59, 76, 73, 65, 68, 46, 71, 82, 74, 77, 57, 64, 72, 59, 51, 94, 63, 70, 58, 86, 89, 84, 59, 73, 63, 57, 59, 70, 64, 74, 59, 68, 50, 60, 44, 59, 68, 70, 61, 68, 54, 67, 46, 89, 68, 54, 70, 70, 48, 46, 59, 61, 71, 67, 79, 72, 64, 58, 54, 73, 105, 69, 65, 60, 61, 65, 58, 56, 59, 69, 57, 53, 59, 37, 55, 73, 56, 77, 118, 66, 53, 51, 63, 77, 45, 54, 71, 83, 60, 70, 49, 71, 105, 58, 69, 84, 112, 75, 80, 77, 70, 74, 55, 75, 66, 58, 72, 60, 66, 56, 60, 58, 73, 63, 60, 56, 79, 62, 58, 59, 57, 69, 46, 65, 73, 81, 46, 55, 50, 46, 92, 63, 57, 60, 54, 71, 50, 49, 62, 74, 76, 69, 70, 77, 57, 52, 59, 53, 66, 50, 62, 69, 62, 65, 58, 67, 68, 55, 42, 85, 51, 87, 65, 65, 49, 61, 68, 85, 64, 78, 71, 74, 59, 58, 49, 34, 65, 58, 46, 71, 71, 58, 69, 100, 40, 71, 79, 47, 82, 71, 91, 95, 51, 66, 54, 80, 68, 43, 97, 58, 50, 56, 91, 57, 51, 61, 83, 48, 50, 103, 64, 71, 58, 51, 79, 68, 85, 59, 56, 64, 67, 58, 71, 65, 78, 75, 47, 66, 48, 62, 77, 100, 49, 56, 57, 79, 52, 72, 64, 87, 96, 68, 67, 57, 58, 70, 60, 66, 58, 73, 69, 59, 49, 69, 55, 59, 54, 71, 56, 53, 78, 56, 56, 63, 60, 101, 53, 67, 67, 79, 48, 47, 57, 67, 62, 60, 68, 64, 76, 99, 62, 49, 70, 67, 63, 49, 97, 53, 76, 69, 72, 77, 56, 57, 55, 67, 59, 76, 64, 60, 49, 61, 87, 72, 76, 112, 62, 76, 63, 52, 72, 69, 98, 72, 85, 64, 59, 59, 72, 49, 79, 57, 62, 56, 71, 71, 60, 57, 71, 60, 54, 48, 52, 86, 60, 63, 66, 67, 64, 74, 64, 70, 74, 55, 68, 67, 55, 57, 45, 66, 78, 67, 54, 46, 49, 63, 46, 81, 70, 55, 63, 55, 80, 58, 84, 61, 81, 91, 57, 83, 72, 61, 71, 64, 78, 67, 60, 85, 67, 64, 74, 50, 50, 54, 63, 77, 67, 67, 69, 46, 50, 68, 67, 56, 72, 42, 70, 60, 69, 74, 65, 58, 72, 75, 61, 67, 44, 79, 58, 66, 66, 85, 64, 50, 66, 72, 52, 59, 59, 62, 60, 88, 73, 77, 73, 60, 99, 63, 95, 57, 75, 70, 55, 58, 61, 31, 41, 67, 49, 65, 56, 65, 52, 80, 71, 59, 66, 39, 59, 78, 43, 73, 106, 53, 68, 61, 74, 60, 81, 66, 74, 61, 76, 63, 70, 57, 55, 58, 80, 60, 60, 57, 69, 48, 68, 47, 49, 65, 66, 76, 55, 60, 76, 140, 83, 66, 63, 61, 65, 60, 62, 64, 69, 65, 79, 64, 76, 69, 62, 59, 49, 72, 48, 66, 93, 58, 94, 71, 59, 69, 82, 51, 64, 57, 56, 65, 63, 50, 68, 71, 63, 70, 55, 47, 64, 62, 63, 104, 63, 56, 72, 71, 56, 55, 67, 64, 53, 59, 49, 54, 48, 67, 91, 73, 48, 72, 68, 68, 94, 53, 55, 77, 64, 49, 64, 65, 40, 56, 76, 62, 76, 69, 61, 58, 65, 34, 57, 82, 87, 75, 51, 71, 96, 79, 72, 55, 57, 55, 106, 76, 81, 53, 83, 73, 75, 61, 64, 72, 59, 45, 62, 51, 62, 63, 87, 59, 59, 69, 46, 72, 89, 87, 63, 66, 68, 60, 50, 47, 72, 79, 55, 60, 55, 59, 61, 103, 74, 70, 66, 50, 62, 91, 92, 84, 45, 43, 80, 55, 69, 56, 55, 37, 111, 75, 52, 85, 75, 93, 75, 53, 64, 39, 75, 59, 59, 66, 76, 119, 52, 58, 76, 52, 78, 60, 58, 73, 55, 62, 54, 72, 110, 66, 91, 75, 52, 52, 71, 65, 64, 49, 73, 64, 77, 61, 76, 72, 66, 80, 60, 53, 52, 83, 68, 55, 70, 42, 62, 75, 71, 53, 72, 66, 86, 71, 48, 79, 48, 59, 75, 68, 77, 81, 49, 61, 53, 72, 82, 59, 63, 85, 64, 67, 62, 68, 61, 79, 69, 80, 69, 68, 76, 50, 66, 69, 51, 57, 79, 75, 73, 82, 57, 51, 67, 63, 40, 65, 84, 60, 68, 52, 95, 60, 83, 69, 56, 81, 57, 56, 40, 70, 63, 48, 67, 70, 65, 63, 57, 37, 66, 62, 75, 70, 87, 60, 53, 68, 66, 60, 75, 69, 96, 73, 59, 67, 64, 64, 70, 89, 65, 73, 53, 64, 65, 60, 74, 68, 81, 58, 75, 58, 66, 64, 91, 101, 81, 60, 69, 67, 72, 67, 61, 70, 67, 49, 71, 91, 89, 63, 73, 39, 61, 46, 69, 67, 74, 164, 98, 61, 63, 62, 73, 75, 60, 71, 72, 52, 66, 52, 80, 61, 60, 78, 75, 51, 63, 77, 62, 84, 57, 90, 70, 47, 60, 65, 74, 57, 61, 50, 75, 62, 30, 90, 48, 81, 56, 75, 57, 80, 77, 62, 57, 56, 62, 82, 72, 58, 68, 38, 59, 54, 85, 48, 74, 66, 45, 54, 77, 65, 55, 79, 75, 53, 58, 59, 72, 99, 75, 72, 61, 53, 74, 59, 53, 62, 55, 59, 67, 61, 64, 101, 61, 54, 92, 64, 68, 61, 65, 68, 112, 63, 78, 52, 56, 59, 93, 49, 63, 60, 58, 62, 54, 61, 68, 72, 138, 62, 87, 54, 57, 55, 85, 49, 61, 82, 77, 64, 68, 56, 65, 58, 56, 66, 64, 79, 93, 68, 55, 52, 68, 40, 59, 66, 54, 59, 57, 79, 64, 52, 51, 49, 74, 75, 110, 68, 67, 67, 73, 63, 62, 64, 55, 78, 70, 69, 55, 62, 77, 57, 59, 59, 58, 68, 43, 55, 69, 79, 80, 52, 54, 78, 66, 68, 52, 71, 55, 60, 66, 55, 36, 64, 85, 70, 52, 57, 63, 149, 61, 52, 66, 52, 65, 50, 56, 72, 56, 76, 61, 68, 58, 98, 66, 60, 63, 70, 75, 89, 85, 62, 80, 65, 86, 65, 65, 40, 48, 84, 72, 55, 59, 68, 98, 56, 59, 62, 65, 78, 70, 52, 56, 63, 55, 82, 61, 60, 54, 77, 98, 69, 53, 72, 59, 69, 59, 70, 65, 72, 74, 71, 46, 47, 57, 67, 39, 66, 100, 78, 41, 65, 93, 77, 67, 58, 55, 93, 59, 55, 55, 65, 77, 95, 64, 68, 57, 66, 79, 73, 60, 53, 57, 64, 73, 75, 56, 42, 71, 79, 63, 73, 43, 67, 81, 66, 65, 69, 83, 88, 79, 60, 78, 90, 70, 73, 63, 50, 69, 59, 52, 73, 64, 68, 84, 62, 74, 53, 71, 82, 68, 54, 68, 91, 70, 49, 87, 62, 59, 61, 90, 47, 73, 74, 78, 60, 59, 69, 59, 58, 56, 58, 53, 75, 119, 68, 64, 96, 59, 49, 51, 57, 66, 55, 72, 73, 41, 72, 74, 60, 53, 84, 59, 56, 62, 74, 53, 61, 66, 101, 80, 73, 64, 62, 77, 58, 50, 63, 69, 63, 46, 100, 83, 69, 48, 76, 66, 60, 54, 43, 71, 75, 53, 55, 83, 51, 77, 63, 55, 55, 74, 59, 67, 82, 63, 63, 71, 64, 72, 51, 84, 54, 64, 55, 81, 90, 57, 68, 71, 73, 70, 102, 47, 52, 94, 56, 67, 48, 47, 57, 55, 50, 81, 59, 64, 53, 50, 71, 69, 70, 60, 60, 51, 67, 52, 84, 55, 51, 50, 62, 57, 63, 57, 60, 110, 48, 58, 73, 85, 61, 68, 59, 55, 68, 46, 79, 98, 99, 54, 74, 83, 68, 66, 49, 67, 40, 61, 40, 70, 65, 66, 58, 63, 55, 90, 64, 72, 73, 48, 64, 43, 60, 37, 68, 79, 50, 69, 64, 58, 66, 64, 49, 62, 71, 76, 72, 77, 52, 92, 49, 51, 79, 57, 56, 68, 64, 63, 74, 114, 54, 67, 60, 78, 76, 55, 80, 56, 46, 45, 61, 63, 85, 60, 83, 70, 65, 72, 62, 58, 53, 84, 60, 85, 74, 65, 73, 50, 92, 69, 58, 63, 84, 56, 62, 70, 57, 48, 74, 77, 70, 59, 77, 69, 70, 72, 55, 51, 71, 50, 49, 46, 73, 73, 53, 48, 61, 74, 73, 62, 64, 67, 71, 54, 72, 70, 82, 53, 69, 65, 46, 81, 53, 50, 49, 83, 59, 64, 44, 64, 73, 54, 62, 83, 64, 82, 71, 67, 55, 57, 73, 99, 62, 46, 78, 66, 82, 67, 65, 42, 57, 85, 60, 81, 78, 63, 84, 62, 64, 69, 40, 68, 76, 65, 61, 62, 69, 51, 63, 59, 71, 67, 52, 84, 83, 66, 67, 59, 85, 63, 89, 64, 52, 84, 87, 53, 68, 96, 60, 79, 61, 63, 61, 92, 67, 43, 65, 54, 52, 96, 68, 53, 54, 56, 60, 66, 84, 68, 50, 69, 67, 77, 47, 64, 63, 86, 69, 56, 121, 51, 63, 78, 61, 41, 56, 59, 82, 61, 55, 71, 71, 63, 88, 79, 71, 68, 63, 120, 67, 55, 65, 52, 66, 78, 77, 57, 66, 78, 69, 76, 55, 50, 83, 69, 68, 58, 61, 61, 64, 69, 53, 70, 73, 72, 62, 64, 64, 72, 60, 39, 58, 54, 59, 39, 56, 47, 58, 62, 67, 65, 74, 56, 76, 76, 64, 67, 58, 80, 55, 72, 78, 84, 59, 75, 55, 71, 67, 69, 52, 64, 66, 74, 53, 26, 88, 53, 66, 61, 51, 54, 65, 81, 63, 79, 101, 57, 61, 75, 97, 84, 45, 64, 69, 64, 60, 73, 51, 71, 83, 54, 68, 52, 68, 62, 49, 96, 121, 67, 60, 60, 71, 61, 56, 52, 62, 62, 65, 54, 56, 69, 52, 47, 54, 61, 52, 94, 66, 72, 53, 64, 39, 51, 59, 69, 104, 79, 62, 82, 68, 62, 49, 55, 64, 69, 58, 79, 40, 63, 74, 76, 50, 57, 80, 80, 71, 69, 55, 99, 63, 68, 80, 66, 54, 62, 55, 72, 71, 62, 64, 77, 49, 52, 86, 85, 51, 66, 79, 67, 53, 50, 75, 43, 67, 70, 67, 67, 69, 70, 51, 62, 103, 57, 78, 51, 71, 75, 61, 89, 69, 69, 63, 78, 68, 46, 47, 63, 70, 52, 80, 83, 62, 49, 59, 65, 68, 60, 55, 84, 87, 52, 55, 58, 64, 74, 73, 62, 124, 67, 65, 75, 89, 72, 82, 80, 61, 77, 59, 56, 55, 73, 65, 103, 87, 75, 59, 63, 79, 73, 78, 68, 59, 75, 63, 57, 53, 58, 54, 50, 71, 68, 84, 96, 67, 53, 51, 67, 57, 72, 57, 58, 53, 57, 57, 65, 71, 84, 69, 98, 55, 48, 83, 100, 66, 55, 57, 101, 49, 78, 82, 72, 57, 71, 48, 55, 60, 63, 106, 56, 65, 57, 55, 71, 61, 49, 73, 68, 83, 56, 72, 44, 53, 55, 59, 60, 80, 66, 58, 80, 69, 63, 120, 63, 87, 51, 79, 55, 58, 52, 52, 67, 63, 72, 57, 72, 64, 60, 77, 59, 50, 79, 78, 57, 61, 56, 62, 70, 50, 53, 69, 63, 47, 73, 56, 49, 53, 48, 68, 70, 73, 64, 80, 72, 50, 57, 79, 54, 68, 64, 49, 70, 75, 57, 56, 71, 56, 80, 76, 102, 43, 39, 60, 75, 67, 73, 51, 117, 65, 91, 54, 91, 59, 66, 69, 62, 73, 41, 61, 69, 84, 74, 77, 99, 81, 76, 76, 55, 89, 82, 71, 64, 71, 71, 61, 46, 57, 76, 94, 83, 49, 73, 68, 67, 52, 46, 89, 72, 65, 85, 66, 51, 79, 53, 65, 65, 61, 83, 70, 58, 62, 58, 62, 70, 77, 68, 88, 54, 76, 67, 86, 62, 69, 79, 98, 56, 86, 55, 76, 72, 58, 68, 94, 40, 67, 70, 47, 55, 87, 96, 64, 68, 49, 52, 70, 64, 45, 54, 69, 93, 57, 90, 66, 80, 77, 55, 46, 52, 55, 97, 62, 66, 54, 45, 65, 51, 98, 49, 62, 85, 53, 64, 70, 83, 59, 72, 94, 58, 59, 69, 74, 90, 62, 74, 63, 69, 78, 61, 59, 54, 62, 55, 56, 76, 83, 53, 70, 56, 49, 57, 83, 83, 53, 55, 65, 64, 41, 62, 65, 69, 63, 72, 73, 62, 55, 50, 72, 62, 82, 60, 65, 49, 67, 67, 56, 67, 70, 46, 55, 69, 54, 79, 76, 78, 58, 49, 64, 82, 59, 61, 63, 40, 81, 49, 42, 65, 75, 57, 61, 60, 66, 69, 59, 73, 51, 75, 64, 70, 77, 55, 50, 56, 82, 78, 76, 66, 52, 78, 58, 89, 45, 50, 69, 62, 89, 48, 47, 76, 107, 52, 70, 75, 74, 79, 61, 60, 54, 45, 68, 58, 90, 54, 68, 63, 88, 53, 78, 66, 73, 51, 95, 69, 69, 65, 61, 68, 42, 49, 58, 80, 59, 78, 67, 75, 81, 67, 53, 56, 71, 68, 77, 64, 84, 58, 66, 74, 51, 56, 65, 99, 59, 65, 54, 61, 54, 77, 51, 67, 58, 70, 67, 58, 80, 72, 71, 70, 54, 77, 77, 72, 60, 71, 51, 50, 54, 72, 63, 65, 77, 75, 81, 101, 78, 89, 48, 84, 138, 67, 54, 67, 101, 73, 56, 56, 49, 58, 61, 73, 71, 72, 80, 74, 63, 47, 61, 69, 73, 55, 68, 66, 75, 37, 52, 107, 76, 63, 64, 66, 72, 85, 72, 62, 64, 66, 60, 78, 73, 64, 59, 54, 51, 68, 56, 59, 56, 83, 61, 68, 50, 59, 55, 76, 97, 60, 73, 74, 64, 44, 95, 90, 71, 59, 100, 50, 65, 66, 60, 46, 64, 62, 82, 58, 102, 64, 65, 66, 108, 71, 66, 61, 60, 70, 82, 73, 48, 60, 63, 100, 52, 76, 50, 62, 74, 66, 74, 57, 88, 93, 65, 64, 57, 76, 72, 55, 48, 74, 52, 62, 55, 73, 49, 53, 64, 81, 76, 54, 78, 77, 64, 83, 51, 75, 60, 63, 66, 53, 60, 89, 74, 95, 74, 51, 46, 63, 71, 47, 59, 59, 54, 87, 77, 52, 57, 68, 71, 73, 53, 53, 60, 66, 72, 53, 46, 51, 53, 55, 49, 79, 71, 59, 56, 73, 54, 65, 56, 56, 102, 69, 86, 45, 72, 107, 52, 58, 60, 79, 64, 86, 74, 58, 73, 69, 65, 63, 70, 63, 71, 58, 52, 61, 56, 50, 63, 73, 70, 67, 53, 53, 47, 71, 62, 74, 70, 74, 65, 38, 59, 58, 84, 70, 64, 43, 70, 53, 49, 61, 80, 85, 52, 51, 64, 79, 82, 64, 65, 46, 90, 53, 38, 78, 59, 71, 55, 57, 49, 60, 72, 64, 64, 102, 67, 69, 54, 81, 44, 76, 54, 67, 70, 53, 79, 72, 63, 57, 59, 61, 79, 91, 66, 72, 91, 70, 70, 113, 63, 75, 55, 54, 64, 67, 58, 86, 62, 46, 60, 79, 40, 68, 53, 68, 63, 46, 93, 62, 62, 60, 90, 44, 53, 52, 56, 38, 70, 52, 58, 64, 56, 61, 82, 67, 91, 70, 67, 64, 76, 56, 58, 76, 69, 67, 84, 69, 62, 48, 49, 55, 80, 72, 60, 60, 69, 52, 53, 72, 66, 50, 57, 47, 72, 62, 69, 54, 80, 60, 56, 64, 52, 86, 67, 52, 62, 63, 97, 71, 70, 76, 59, 51, 83, 65, 67, 61, 58, 73, 60, 72, 71, 78, 71, 64, 56, 62, 63, 57, 53, 99, 58, 75, 66, 72, 71, 57, 46, 64, 73, 73, 71, 59, 73, 82, 66, 84, 72, 67, 46, 63, 56, 56, 56, 80, 52, 80, 66, 56, 70, 59, 67, 65, 47, 47, 41, 51, 60, 82, 74, 75, 89, 61, 64, 102, 63, 106, 59, 56, 42, 63, 55, 61, 61, 63, 50, 47, 61, 58, 51, 59, 59, 58, 90, 58, 75, 61, 51, 63, 92, 76, 87, 60, 60, 87, 92, 67, 64, 59, 55, 50, 52, 69, 51, 90, 77, 75, 69, 55, 104, 53, 67, 73, 60, 82, 61, 58, 76, 58, 58, 57, 65, 47, 80, 51, 69, 69, 44, 93, 53, 46, 70, 77, 80, 75, 90, 75, 80, 67, 47, 57, 54, 57, 72, 74, 67, 71, 78, 58, 57, 59, 45, 48, 55, 81, 77, 90, 57, 62, 54, 60, 77, 47, 65, 69, 70, 80, 91, 77, 55, 73, 57, 54, 56, 46, 95, 58, 57, 59, 59, 64, 66, 62, 60, 78, 73, 79, 63, 82, 81, 61, 50, 56, 75, 68, 75, 62, 59, 74, 62, 79, 47, 58, 70, 64, 69, 66, 58, 54, 40, 71, 64, 61, 71, 50, 92, 74, 52, 74, 108, 78, 63, 51, 66, 60, 83, 93, 96, 70, 67, 67, 71, 77, 57, 70, 70, 50, 65, 50, 55, 74, 71, 68, 68, 54, 66, 67, 103, 71, 55, 63, 71, 90, 41, 68, 84, 115, 76, 73, 71, 67, 65, 74, 77, 74, 53, 57, 39, 74, 67, 55, 67, 75, 61, 53, 74, 69, 44, 58, 99, 50, 64, 76, 58, 66, 45, 69, 49, 63, 56, 54, 83, 81, 87, 58, 84, 62, 81, 72, 75, 73, 87, 54, 52, 78, 102, 66, 84, 76, 72, 62, 48, 69, 56, 80, 66, 64, 59, 66, 67, 72, 50, 83, 72, 67, 68, 43, 61, 67, 51, 67, 60, 58, 54, 74, 80, 56, 55, 61, 98, 71, 52, 44, 65, 71, 50, 55, 70, 57, 44, 59, 83, 55, 53, 56, 64, 76, 55, 98, 61, 111, 66, 49, 52, 74, 65, 55, 64, 78, 56, 79, 80, 48, 54, 61, 64, 67, 62, 57, 76, 53, 65, 54, 52, 89, 57, 75, 59, 68, 77, 59, 79, 47, 65, 51, 58, 77, 44, 62, 93, 53, 74, 47, 47, 42, 57, 58, 72, 64, 41, 81, 76, 52, 67, 48, 75, 95, 47, 62, 51, 71, 55, 52, 80, 56, 66, 54, 59, 70, 81, 81, 99, 111, 79, 70, 66, 68, 71, 83, 55, 70, 57, 68, 50, 82, 55, 54, 57, 59, 109, 75, 59, 55, 67, 68, 65, 78, 50, 54, 55, 54, 68, 57, 81, 92, 65, 65, 39, 69, 87, 74, 53, 66, 51, 77, 70, 47, 81, 43, 59, 71, 55, 53, 64, 65, 64, 65, 55, 37, 80, 67, 52, 80, 89, 47, 51, 76, 60, 83, 68, 63, 67, 52, 49, 68, 96, 56, 57, 63, 73, 82, 59, 71, 42, 68, 66, 58, 60, 55, 59, 70, 80, 71, 64, 63, 89, 78, 67, 86, 76, 58, 57, 51, 70, 59, 122, 70, 87, 73, 59, 50, 80, 68, 74, 59, 52, 52, 55, 62, 69, 67, 77, 76, 67, 67, 71, 71, 62, 64, 58, 58, 70, 61, 46, 73, 50, 71, 130, 76, 81, 77, 80, 52, 64, 100, 71, 72, 63, 58, 79, 60, 62, 69, 61, 69, 65, 74, 73, 58, 75, 61, 44, 56, 72, 66, 80, 43, 67, 59, 79, 106, 69, 73, 59, 76, 36, 52, 73, 45, 68, 70, 76, 66, 65, 78, 80, 66, 62, 64, 61, 59, 63, 43, 81, 57, 58, 49, 55, 68, 66, 51, 71, 58, 70, 64, 61, 63, 42, 62, 64, 54, 55, 51, 72, 56, 69, 55, 54, 64, 47, 72, 51, 61, 45, 75, 60, 77, 74, 62, 76, 64, 66, 72, 48, 53, 43, 84, 60, 74, 59, 75, 51, 63, 62, 74, 79, 60, 68, 43, 80, 69, 58, 83, 81, 44, 70, 87, 49, 71, 73, 64, 67, 55, 69, 70, 68, 51, 48, 65, 67, 82, 62, 48, 71, 106, 74, 65, 61, 53, 89, 62, 67, 71, 70, 46, 57, 53, 54, 64, 53, 99, 72, 100, 67, 58, 94, 54, 54, 49, 75, 51, 87, 72, 67, 77, 52, 58, 57, 46, 79, 108, 56, 63, 71, 56, 78, 68, 59, 49, 80, 66, 60, 63, 103, 54, 58, 65, 39, 53, 77, 48, 67, 55, 60, 71, 53, 66, 46, 73, 75, 63, 52, 73, 79, 101, 55, 73, 61, 65, 83, 77, 55, 84, 63, 47, 59, 52, 59, 61, 76, 58, 91, 60, 56, 69, 69, 56, 82, 68, 48, 50, 77, 53, 65, 69, 91, 70, 76, 65, 78, 84, 55, 62, 65, 64, 53, 72, 74, 49, 67, 64, 55, 56, 55, 53, 63, 71, 47, 39, 64, 143, 69, 63, 81, 66, 80, 85, 54, 83, 62, 71, 58, 69, 64, 66, 72, 56, 73, 81, 74, 59, 81, 51, 75, 50, 84, 88, 77, 64, 72, 89, 83, 73, 56, 58, 73, 77, 97, 77, 64, 67, 58, 69, 46, 74, 72, 94, 72, 56, 53, 59, 68, 63, 74, 87, 48, 65, 58, 57, 52, 66, 70, 57, 69, 65, 71, 89, 57, 63, 117, 73, 71, 72, 55, 61, 63, 61, 71, 80, 81, 76, 65, 89, 61, 64, 62, 83, 46, 77, 65, 63, 66, 69, 74, 51, 61, 52, 57, 66, 56, 59, 70, 66, 55, 63, 61, 65, 72, 48, 53, 63, 55, 63, 61, 70, 51, 51, 64, 84, 76, 75, 67, 70, 82, 73, 48, 70, 67, 41, 54, 65, 52, 75, 51, 66, 45, 51, 79, 72, 75, 94, 54, 71, 61, 58, 71, 57, 78, 57, 69, 75, 60, 65, 56, 61, 49, 55, 74, 57, 195, 50, 73, 56, 48, 68, 69, 61, 94, 58, 69, 64, 72, 81, 63, 53, 74, 54, 66, 57, 46, 98, 63, 60, 84, 70, 67, 49, 59, 69, 84, 90, 81, 79, 61, 66, 55, 79, 71, 48, 63, 68, 53, 63, 50, 66, 58, 76, 46, 59, 67, 62, 47, 75, 79, 65, 62, 64, 56, 53, 70, 70, 71, 75, 54, 56, 80, 56, 47, 69, 60, 56, 66, 55, 76, 63, 63, 60, 53, 70, 69, 60, 59, 53, 40, 60, 51, 71, 51, 78, 80, 48, 77, 122, 84, 70, 63, 61, 54, 67, 65, 67, 49, 122, 72, 76, 72, 83, 68, 69, 57, 69, 82, 63, 86, 72, 52, 65, 61, 88, 75, 44, 52, 53, 56, 76, 87, 74, 67, 45, 83, 82, 60, 89, 53, 86, 63, 62, 94, 59, 71, 71, 74, 44, 55, 56, 71, 77, 80, 60, 118, 81, 60, 68, 74, 61, 67, 78, 59, 71, 82, 81, 73, 58, 53, 95, 61, 67, 70, 70, 73, 53, 82, 74, 51, 97, 90, 68, 56, 52, 80, 80, 71, 63, 56, 62, 49, 78, 75, 88, 78, 63, 54, 74, 69, 60, 57, 74, 53, 63, 96, 73, 60, 74, 81, 67, 64, 39, 82, 44, 54, 62, 70, 73, 69, 80, 53, 80, 43, 77, 60, 37, 89, 47, 63, 52, 68, 52, 61, 67, 70, 58, 52, 65, 74, 58, 50, 50, 50, 52, 60, 67, 95, 70, 72, 52, 63, 78, 58, 49, 59, 44, 55, 58, 44, 64, 57, 60, 85, 66, 41, 49, 50, 67, 57, 67, 66, 61, 76, 53, 44, 68, 49, 66, 87, 47, 54, 60, 84, 54, 55, 94, 64, 61, 43, 73, 76, 81, 50, 34, 78, 77, 166, 42, 84, 87, 85, 31, 56, 41, 64, 77, 61, 50, 59, 46, 74, 66, 87, 69, 59, 98, 81, 70, 83, 81, 78, 49, 61, 48, 47, 56, 66, 70, 68, 52, 73, 50, 72, 53, 70, 55, 59, 58, 57, 53, 62, 53, 82, 54, 71, 78, 72, 52, 69, 49, 65, 62, 94, 95, 73, 72, 43, 66, 72, 48, 53, 57, 77, 78, 64, 89, 41, 65, 77, 64, 65, 42, 126, 54, 72, 95, 72, 65, 58, 51, 56, 53, 70, 59, 70, 73, 63, 51, 60, 58, 68, 74, 62, 69, 64, 76, 68, 72, 46, 69, 82, 62, 58, 58, 76, 124, 49, 101, 57, 66, 58, 73, 83, 69, 84, 84, 47, 62, 56, 46, 64, 70, 62, 73, 56, 63, 66, 44, 69, 54, 57, 59, 69, 65, 90, 76, 61, 54, 62, 56, 73, 55, 68, 57, 76, 62, 67, 65, 70, 65, 63, 66, 58, 62, 89, 86, 62, 57, 56, 37, 73, 75, 48, 47, 90, 61, 73, 67, 51, 48, 52, 50, 100, 63, 59, 65, 47, 69, 121, 57, 73, 75, 91, 60, 73, 33, 49, 73, 86, 52, 81, 43, 55, 54, 75, 73, 41, 83, 61, 65, 47, 81, 47, 68, 63, 55, 60, 45, 69, 66, 57, 53, 83, 52, 70, 58, 66, 111, 72, 66, 82, 65, 56, 62, 73, 71, 66, 88, 58, 91, 79, 54, 110, 49, 85, 54, 109, 73, 89, 54, 33, 58, 41, 46, 76, 45, 67, 52, 51, 84, 103, 81, 70, 39, 47, 67, 78, 62, 67, 77, 75, 67, 78, 68, 79, 131, 50, 81, 67, 76, 84, 59, 120, 49, 50, 75, 37, 63, 67, 50, 87, 58, 33, 79, 58, 61, 98, 69, 56, 44, 53, 70, 64, 66, 64, 63, 65, 64, 40, 71, 54, 62, 43, 48, 88, 78, 67, 75, 91, 66, 77, 94, 58, 50, 77, 95, 75, 72, 51, 78, 64, 98, 59, 53, 78, 117, 42, 78, 98, 59, 54, 56, 68, 55, 53, 71, 96, 63, 51, 50, 66, 60, 68, 59, 60, 34, 66, 77, 58, 101, 71, 79, 54, 49, 83, 72, 60, 34, 58, 50, 60, 49, 69, 56, 84, 72, 51, 88, 71, 47, 76, 63, 102, 84, 65, 57, 108, 67, 46, 59, 35, 68, 76, 78, 73, 69, 81, 83, 58, 58, 67, 65, 72, 55, 83, 57, 85, 100, 47, 51, 70, 69, 89, 102, 85, 67, 58, 56, 70, 77, 56, 97, 62, 24, 64, 62, 76, 38, 59, 65, 100, 52, 67, 82, 59, 70, 39, 75, 56, 74, 73, 67, 70, 75, 58, 88, 50, 59, 108, 67, 51, 52, 38, 107, 71, 57, 68, 46, 75, 65, 95, 49, 70, 88, 59, 69, 76, 64, 58, 54, 57, 62, 78, 44, 70, 84, 81, 79, 96, 67, 77, 56, 69, 55, 54, 71, 82, 89, 70, 71, 36, 38, 58, 62, 81, 76, 62, 61, 34, 53, 77, 60, 47, 75, 64, 60, 78, 79, 76, 102, 69, 66, 89, 51, 78, 73, 78, 92, 85, 79, 103, 60, 56, 71, 100, 87, 59, 51, 64, 80, 49, 57, 74, 57, 58, 57, 62, 82, 61, 51, 63, 60, 72, 41, 57, 82, 63, 63, 83, 86, 85, 50, 58, 61, 69, 71, 74, 84, 87, 56, 74, 69, 78, 63, 61, 74, 48, 112, 66, 65, 98, 60, 73, 62, 85, 74, 56, 50, 45, 60, 56, 75, 72, 68, 69, 85, 56, 53, 55, 61, 66, 39, 88, 72, 61, 76, 90, 82, 69, 69, 47, 66, 42, 57, 80, 63, 54, 61, 59, 87, 134, 71, 63, 73, 60, 81, 50, 82, 75, 34, 58, 38, 85, 65, 83, 85, 95, 72, 72, 55, 62, 64, 64, 117, 64, 102, 52, 74, 51, 61, 52, 66, 72, 63, 71, 84, 63, 67, 78, 46, 142, 45, 69, 57, 41, 59, 59, 55, 59, 73, 42, 72, 62, 71, 79, 76, 86, 107, 90, 75, 67, 50, 65, 51, 49, 40, 63, 63, 57, 56, 70, 66, 59, 69, 50, 45, 58, 62, 67, 55, 75, 58, 64, 73, 58, 88, 51, 72, 80, 59, 69, 63, 62, 45, 34, 60, 51, 91, 71, 74, 64, 43, 71, 67, 69, 63, 122, 42, 77, 59, 62, 69, 72, 66, 63, 72, 68, 60, 102, 73, 62, 76, 78, 49, 52, 51, 47, 121, 63, 65, 44, 54, 57, 59, 62, 68, 65, 69, 55, 100, 73, 67, 55, 44, 62, 45, 60, 78, 82, 76, 148, 89, 63, 69, 48, 53, 65, 74, 82, 53, 66, 70, 54, 64, 81, 83, 100, 41, 59, 61, 53, 77, 69, 66, 80, 85, 49, 47, 88, 54, 55, 77, 59, 75, 46, 45, 64, 61, 90, 55, 77, 76, 81, 67, 44, 51, 63, 67, 66, 45, 40, 86, 53, 52, 77, 54, 62, 61, 112, 78, 92, 73, 48, 63, 61, 43, 74, 63, 116, 75, 44, 61, 67, 39, 62, 65, 73, 75, 53, 76, 72, 72, 66, 48, 59, 78, 62, 90, 70, 58, 59, 63, 52, 61, 58, 47, 96, 67, 59, 59, 71, 52, 43, 55, 54, 76, 91, 64, 78, 63, 80, 77, 50, 53, 69, 46, 90, 65, 67, 97, 67, 58, 51, 63, 62, 50, 57, 40, 53, 59, 84, 75, 59, 69, 55, 58, 85, 91, 56, 59, 65, 83, 53, 50, 79, 48, 62, 40, 76, 74, 71, 92, 62, 73, 78, 92, 79, 63, 87, 83, 64, 69, 54, 40, 46, 82, 96, 54, 43, 72, 66, 55, 69, 56, 75, 68, 54, 71, 75, 100, 83, 58, 43, 130, 57, 57, 44, 67, 47, 43, 65, 59, 49, 55, 104, 67, 62, 49, 78, 100, 76, 75, 87, 63, 53, 66, 63, 60, 53, 69, 51, 78, 90, 68, 65, 69, 72, 72, 66, 43, 52, 50, 65, 88, 78, 61, 64, 65, 72, 71, 71, 56, 48, 67, 77, 56, 64, 63, 64, 57, 79, 78, 91, 51, 62, 67, 58, 97, 65, 62, 68, 50, 62, 51, 58, 73, 54, 82, 58, 48, 66, 53, 114, 83, 64, 78, 69, 88, 79, 56, 61, 82, 82, 67, 70, 65, 57, 85, 77, 65, 103, 57, 62, 78, 61, 62, 60, 85, 56, 66, 63, 72, 66, 96, 59, 58, 53, 45, 49, 70, 65, 59, 44, 54, 60, 62, 63, 73, 58, 90, 58, 72, 54, 78, 66, 59, 62, 60, 81, 72, 76, 40, 57, 64, 55, 60, 71, 60, 68, 53, 63, 48, 66, 59, 71, 59, 85, 73, 61, 60, 66, 78, 65, 66, 69, 71, 56, 70, 54, 51, 42, 51, 63, 67, 82, 60, 40, 78, 62, 55, 56, 67, 84, 61, 71, 67, 75, 74, 78, 64, 63, 66, 53, 59, 63, 71, 79, 79, 64, 58, 54, 57, 122, 50, 64, 58, 57, 37, 55, 57, 58, 77, 60, 95, 71, 51, 72, 79, 57, 60, 71, 66, 79, 56, 65, 77, 65, 62, 60, 122, 61, 70, 66, 62, 68, 60, 65, 78, 53, 66, 65, 72, 49, 58, 60, 53, 74, 54, 88, 73, 54, 56, 68, 54, 69, 99, 68, 69, 65, 69, 60, 48, 70, 56, 73, 73, 73, 58, 73, 66, 53, 64, 64, 76, 60, 57, 72, 66, 46, 55, 69, 63, 47, 55, 56, 69, 52, 68, 59, 72, 65, 45, 56, 62, 40, 43, 63, 66, 78, 53, 65, 53, 61, 101, 66, 62, 56, 54, 68, 63, 47, 73, 60, 73, 57, 73, 46, 53, 56, 58, 61, 72, 68, 61, 63, 71, 54, 63, 55, 57, 72, 50, 76, 54, 68, 66, 38, 70, 72, 53, 70, 78, 53, 66, 63, 85, 60, 54, 63, 85, 67, 60, 113, 70, 58, 74, 61, 54, 69, 52, 75, 56, 55, 75, 55, 71, 64, 54, 55, 54, 56, 72, 62, 71, 61, 101, 62, 62, 56, 62, 60, 62, 82, 60, 55, 52, 61, 55, 51, 82, 91, 65, 65, 61, 54, 53, 69, 59, 76, 70, 59, 72, 73, 54, 60, 56, 56, 44, 66, 116, 69, 56, 63, 66, 51, 57, 61, 93, 61, 58, 42, 63, 64, 60, 73, 81, 204, 66, 82, 75, 60, 41, 48, 54, 50, 67, 64, 72, 70, 60, 55, 58, 47, 57, 55, 55, 56, 71, 116, 48, 50, 76, 63, 68, 71, 73, 71, 78, 52, 57, 60, 61, 49, 49, 71, 53, 52, 62, 65, 87, 54, 55, 59, 51, 65, 56, 62, 48, 61, 60, 64, 70, 58, 50, 57, 80, 50, 67, 55, 57, 61, 61, 64, 59, 78, 77, 61, 61, 71, 65, 63, 66, 85, 55, 48, 84, 107, 55, 73, 54, 77, 79, 74, 62, 60, 66, 53, 74, 58, 55, 57, 62, 62, 63, 84, 86, 58, 75, 49, 53, 61, 60, 64, 58, 72, 90, 79, 76, 83, 60, 61, 83, 57, 64, 71, 51, 70, 51, 58, 54, 75, 60, 69, 59, 78, 68, 67, 65, 51, 73, 76, 76, 49, 55, 53, 67, 67, 59, 85, 62, 54, 51, 56, 70, 72, 94, 71, 63, 54, 74, 77, 54, 63, 51, 59, 59, 74, 65, 61, 70, 64, 57, 70, 73, 61, 64, 61, 64, 57, 58, 52, 51, 64, 66, 52, 73, 47, 67, 65, 61, 49, 58, 59, 54, 112, 58, 60, 87, 90, 76, 67, 101, 68, 78, 59, 62, 55, 78, 72, 76, 73, 72, 71, 60, 72, 70, 66, 78, 68, 54, 70, 64, 57, 71, 65, 64, 62, 102, 77, 54, 66, 62, 60, 82, 64, 74, 98, 76, 82, 57, 65, 57, 51, 47, 41, 89, 83, 74, 82, 55, 67, 95, 97, 63, 60, 56, 82, 62, 63, 50, 79, 63, 107, 63, 71, 72, 65, 53, 92, 67, 68, 52, 54, 79, 55, 57, 67, 52, 85, 61, 73, 73, 86, 59, 77, 59, 53, 61, 68, 60, 61, 55, 57, 71, 82, 57, 69, 63, 67, 59, 70, 76, 77, 84, 72, 70, 60, 64, 79, 55, 48, 62, 61, 74, 72, 64, 60, 67, 58, 73, 50, 60, 74, 57, 60, 69, 64, 65, 65, 65, 57, 71, 82, 64, 58, 63, 50, 62, 58, 47, 80, 62, 59, 68, 53, 113, 64, 52, 43, 62, 73, 53, 66, 62, 58, 45, 57, 73, 76, 55, 66, 66, 63, 53, 63, 65, 63, 74, 71, 78, 96, 53, 63, 60, 75, 53, 66, 51, 59, 64, 73, 70, 82, 58, 74, 58, 84, 56, 72, 68, 83, 74, 74, 83, 41, 65, 65, 66, 45, 63, 64, 61, 63, 83, 56, 84, 67, 71, 75, 58, 64, 74, 52, 56, 48, 55, 75, 101, 59, 58, 62, 68, 55, 75, 68, 56, 69, 51, 60, 62, 72, 74, 48, 65, 81, 75, 67, 63, 49, 59, 95, 68, 66, 38, 62, 58, 60, 59, 63, 71, 80, 72, 62, 91, 61, 57, 68, 67, 89, 62, 64, 48, 55, 67, 60, 72, 63, 74, 60, 66, 56, 51, 73, 53, 58, 74, 71, 80, 55, 56, 63, 69, 80, 141, 73, 67, 149, 54, 78, 85, 62, 67, 58, 61, 74, 70, 61, 72, 74, 53, 86, 78, 56, 48, 54, 53, 78, 68, 55, 71, 65, 91, 72, 60, 59, 54, 52, 46, 62, 80, 60, 56, 58, 64, 65, 62, 72, 62, 82, 52, 68, 83, 58, 106, 63, 59, 91, 84, 62, 80, 69, 66, 72, 58, 65, 52, 52, 60, 54, 77, 77, 80, 55, 82, 56, 65, 75, 53, 62, 68, 56, 75, 54, 51, 86, 70, 112, 82, 42, 71, 74, 86, 60, 63, 73, 54, 61, 64, 68, 69, 73, 72, 64, 78, 55, 55, 56, 59, 78, 61, 63, 86, 51, 44, 66, 85, 63, 66, 70, 85, 79, 69, 56, 54, 128, 63, 64, 63, 64, 67, 77, 60, 63, 63, 46, 64, 62, 53, 80, 68, 59, 54, 54, 78, 56, 61, 66, 53, 64, 70, 59, 46, 79, 50, 61, 77, 77, 112, 77, 77, 86, 68, 40, 56, 61, 95, 95, 82, 75, 63, 68, 57, 70, 71, 70, 68, 84, 55, 62, 68, 53, 74, 60, 50, 53, 67, 77, 48, 59, 63, 64, 96, 66, 80, 67, 53, 71, 57, 61, 65, 77, 56, 44, 67, 76, 59, 71, 62, 78, 91, 70, 63, 64, 57, 73, 69, 44, 51, 54, 62, 49, 63, 62, 61, 96, 75, 103, 76, 67, 62, 52, 53, 57, 41, 67, 96, 70, 67, 58, 70, 75, 48, 60, 57, 53, 79, 68, 65, 51, 76, 104, 53, 109, 62, 66, 57, 59, 62, 61, 52, 113, 43, 58, 65, 49, 57, 60, 82, 54, 74, 53, 55, 51, 91, 59, 91, 66, 72, 73, 62, 67, 67, 70, 74, 63, 59, 71, 54, 59, 64, 67, 54, 66, 56, 68, 46, 68, 65, 44, 53, 55, 65, 86, 72, 61, 62, 65, 81, 52, 52, 71, 50, 85, 81, 28, 54, 88, 58, 70, 42, 61, 65, 64, 57, 84, 59, 66, 72, 64, 53, 61, 64, 66, 75, 63, 70, 57, 61, 64, 56, 42, 56, 56, 53, 56, 68, 92, 63, 47, 57, 84, 68, 83, 62, 62, 69, 79, 75, 74, 57, 80, 52, 70, 58, 56, 69, 56, 77, 60, 55, 49, 48, 59, 76, 64, 60, 54, 46, 77, 58, 78, 67, 79, 62, 57, 67, 59, 73, 52, 54, 66, 59, 55, 64, 46, 74, 61, 84, 52, 53, 51, 68, 77, 52, 73, 55, 53, 60, 53, 49, 79, 60, 63, 62, 72, 59, 56, 61, 64, 60, 87, 71, 74, 58, 74, 64, 60, 52, 67, 105, 62, 57, 62, 63, 64, 60, 70, 74, 79, 65, 55, 56, 71, 62, 70, 60, 67, 60, 66, 70, 82, 67, 68, 77, 60, 72, 58, 61, 86, 61, 74, 64, 59, 67, 67, 83, 53, 69, 75, 73, 49, 59, 67, 50, 63, 61, 61, 65, 51, 63, 66, 53, 90, 45, 66, 67, 78, 78, 45, 62, 55, 62, 70, 56, 62, 63, 78, 55, 65, 65, 72, 46, 64, 48, 61, 58, 63, 76, 54, 50, 60, 72, 82, 63, 53, 55, 75, 63, 47, 61, 71, 107, 76, 77, 71, 65, 85, 51, 53, 55, 88, 80, 67, 56, 88, 56, 50, 59, 76, 65, 60, 66, 62, 55, 67, 56, 50, 72, 67, 51, 68, 61, 71, 76, 72, 67, 79, 62, 61, 80, 76, 81, 61, 68, 80, 74, 52, 69, 52, 80, 57, 65, 66, 58, 83, 68, 52, 73, 69, 54, 67, 43, 60, 45, 64, 55, 91, 60, 65, 59, 68, 64, 73, 64, 59, 54, 63, 57, 62, 66, 64, 86, 61, 54, 81, 71, 68, 49, 53, 52, 59, 102, 72, 43, 75, 66, 63, 80, 52, 69, 96, 59, 36, 68, 67, 56, 76, 68, 58, 59, 96, 63, 52, 72, 62, 55, 60, 65, 49, 60, 72, 65, 80, 70, 48, 73, 69, 72, 63, 51, 61, 70, 58, 81, 73, 72, 63, 76, 61, 88, 60, 41, 58, 62, 57, 57, 63, 47, 58, 76, 59, 96, 73, 64, 59, 83, 61, 66, 69, 61, 77, 77, 82, 51, 108, 88, 55, 51, 61, 52, 60, 53, 75, 60, 73, 65, 57, 52, 52, 91, 61, 79, 75, 33, 66, 59, 96, 88, 57, 61, 65, 51, 105, 55, 82, 68, 71, 61, 65, 61, 69, 68, 82, 55, 56, 61, 75, 61, 56, 67, 54, 60, 47, 96, 66, 69, 67, 54, 50, 67, 49, 76, 59, 52, 83, 76, 66, 54, 66, 68, 72, 54, 67, 60, 93, 79, 60, 57, 64, 81, 52, 70, 58, 69, 66, 63, 49, 66, 68, 63, 62, 50, 63, 69, 57, 57, 81, 57, 45, 82, 76, 56, 56, 75, 58, 73, 53, 90, 55, 47, 76, 37, 68, 106, 61, 61, 62, 93, 59, 73, 52, 72, 42, 75, 46, 73, 67, 63, 73, 70, 164, 55, 52, 56, 89, 86, 87, 72, 64, 84, 66, 47, 51, 68, 61, 43, 55, 90, 54, 63, 61, 59, 58, 109, 95, 45, 70, 84, 58, 58, 60, 77, 56, 64, 55, 80, 60, 55, 76, 76, 77, 50, 84, 64, 73, 71, 45, 41, 69, 76, 63, 57, 57, 73, 71, 118, 77, 61, 63, 64, 69, 60, 51, 84, 62, 61, 63, 55, 62, 75, 62, 74, 65, 61, 61, 54, 52, 56, 68, 69, 64, 50, 79, 73, 63, 57, 74, 60, 60, 68, 62, 57, 55, 59, 62, 108, 67, 55, 63, 64, 65, 90, 76, 61, 75, 61, 88, 55, 74, 68, 62, 67, 79, 66, 62, 65, 63, 70, 64, 61, 75, 77, 63, 70, 70, 79, 115, 67, 85, 61, 73, 76, 69, 58, 57, 64, 81, 61, 84, 78, 69, 64, 75, 78, 55, 79, 73, 65, 87, 72, 60, 77, 65, 52, 83, 71, 71, 97, 98, 73, 65, 58, 75, 70, 75, 58, 78, 87, 128, 71, 51, 56, 80, 75, 66, 57, 59, 128, 78, 48, 75, 56, 63, 66, 54, 63, 58, 119, 70, 82, 71, 61, 38, 59, 56, 71, 57, 77, 62, 67, 66, 62, 62, 47, 59, 71, 74, 62, 64, 74, 59, 64, 65, 71, 63, 60, 69, 67, 56, 63, 58, 41, 46, 72, 115, 56, 65, 56, 67, 100, 79, 69, 58, 62, 66, 47, 55, 79, 62, 72, 39, 56, 73, 80, 76, 74, 60, 94, 60, 60, 66, 71, 67, 59, 62, 57, 78, 61, 60, 50, 67, 65, 53, 50, 78, 74, 72, 71, 66, 90, 67, 66, 70, 75, 48, 66, 69, 54, 62, 53, 78, 75, 67, 61, 51, 72, 70, 62, 84, 68, 76, 45, 72, 54, 69, 69, 63, 64, 85, 90, 100, 59, 101, 63, 82, 67, 43, 67, 54, 55, 57, 72, 77, 101, 68, 60, 96, 61, 35, 48, 62, 53, 73, 83, 75, 54, 67, 75, 79, 92, 52, 57, 60, 52, 55, 62, 54, 85, 59, 84, 54, 81, 103, 78, 58, 59, 47, 66, 59, 68, 95, 61, 76, 148, 45, 87, 102, 70, 54, 73, 70, 76, 99, 78, 83, 69, 60, 52, 73, 72, 69, 68, 47, 65, 92, 50, 96, 68, 44, 72, 57, 56, 68, 83, 53, 66, 63, 67, 59, 40, 53, 72, 72, 70, 36, 83, 59, 63, 54, 93, 57, 84, 71, 76, 64, 65, 87, 65, 81, 69, 43, 53, 86, 82, 64, 62, 50, 53, 65, 52, 84, 83, 60, 60, 57, 58, 80, 57, 44, 66, 95, 69, 72, 65, 63, 95, 62, 61, 91, 70, 51, 51, 57, 65, 52, 59, 76, 74, 59, 53, 63, 45, 97, 69, 44, 125, 66, 60, 69, 51, 79, 75, 99, 52, 57, 44, 65, 74, 66, 56, 46, 74, 60, 61, 70, 114, 66, 57, 50, 64, 59, 44, 65, 67, 79, 63, 44, 59, 70, 73, 69, 62, 85, 55, 67, 76, 60, 57, 79, 59, 42, 63, 84, 47, 58, 82, 51, 59, 71, 72, 88, 44, 75, 78, 60, 66, 70, 78, 86, 70, 58, 86, 56, 75, 80, 46, 59, 57, 74, 70, 45, 69, 64, 76, 72, 55, 69, 57, 72, 55, 62, 71, 73, 74, 62, 59, 57, 67, 75, 52, 73, 58, 71, 65, 84, 68, 48, 69, 82, 63, 78, 56, 48, 78, 51, 74, 69, 52, 74, 61, 69, 56, 55, 55, 57, 46, 62, 54, 57, 43, 70, 70, 59, 40, 64, 89, 79, 64, 73, 60, 48, 58, 57, 63, 89, 44, 100, 71, 62, 63, 55, 46, 35, 107, 60, 60, 73, 72, 68, 52, 62, 92, 60, 84, 68, 48, 76, 106, 53, 102, 67, 49, 71, 47, 56, 60, 65, 77, 71, 115, 58, 77, 75, 76, 68, 69, 87, 79, 57, 89, 68, 68, 59, 58, 65, 62, 58, 59, 95, 82, 70, 65, 58, 78, 59, 63, 82, 50, 79, 61, 56, 78, 54, 59, 64, 46, 65, 68, 55, 61, 64, 89, 70, 75, 68, 67, 67, 76, 80, 62, 61, 56, 60, 62, 69, 53, 60, 75, 59, 55, 61, 75, 68, 53, 72, 55, 48, 79, 53, 88, 55, 54, 68, 79, 77, 70, 40, 72, 82, 81, 68, 24, 55, 52, 55, 104, 75, 46, 54, 92, 90, 77, 52, 53, 74, 53, 80, 88, 82, 90, 71, 69, 76, 76, 80, 63, 48, 69, 59, 56, 84, 62, 60, 52, 97, 64, 76, 65, 53, 82, 62, 92, 84, 57, 69, 48, 84, 65, 51, 60, 67, 88, 86, 47, 86, 54, 90, 48, 91, 61, 46, 81, 56, 56, 50, 77, 73, 82, 39, 53, 57, 71, 63, 54, 72, 78, 93, 78, 69, 62, 76, 65, 86, 53, 55, 76, 51, 58, 93, 53, 70, 60, 85, 86, 56, 54, 64, 77, 36, 74, 49, 81, 94, 72, 75, 51, 65, 65, 55, 86, 62, 55, 68, 85, 65, 80, 45, 78, 55, 67, 75, 60, 67, 43, 72, 97, 79, 52, 92, 53, 46, 52, 59, 53, 33, 89, 73, 77, 54, 72, 129, 64, 52, 65, 69, 86, 52, 75, 48, 103, 66, 59, 64, 60, 54, 56, 33, 78, 66, 79, 57, 52, 75, 81, 61, 54, 58, 62, 77, 63, 99, 73, 52, 52, 62, 52, 61, 79, 77, 79, 91, 72, 64, 56, 77, 41, 73, 84, 53, 46, 65, 52, 88, 36, 61, 66, 70, 66, 40, 54, 59, 47, 60, 42, 57, 82, 59, 58, 57, 69, 118, 52, 65, 55, 43, 89, 68, 61, 72, 66, 62, 92, 77, 65, 76, 43, 66, 68, 51, 59, 64, 89, 114, 45, 77, 59, 72, 75, 67, 70, 87, 52, 58, 83, 48, 87, 60, 69, 80, 44, 69, 66, 90, 55, 52, 50, 58, 59, 71, 53, 70, 89, 57, 64, 68, 43, 54, 62, 82, 40, 59, 37, 59, 63, 62, 93, 76, 57, 69, 50, 72, 59, 60, 82, 58, 65, 72, 60, 59, 78, 74, 71, 50, 93, 55, 63, 52, 66, 46, 43, 69, 50, 58, 68, 70, 61, 51, 44, 31, 48, 65, 80, 50, 45, 62, 71, 46, 58, 58, 79, 70, 53, 109, 55, 62, 58, 67, 65, 117, 71, 60, 79, 72, 103, 37, 46, 70, 67, 79, 92, 75, 83, 54, 57, 66, 83, 96, 50, 72, 59, 70, 58, 70, 53, 70, 75, 83, 46, 77, 57, 129, 68, 118, 80, 76, 53, 100, 58, 79, 59, 69, 68, 80, 73, 67, 59, 78, 66, 65, 57, 40, 63, 62, 70, 73, 61, 68, 54, 60, 70, 46, 74, 51, 81, 79, 71, 63, 66, 60, 69, 54, 58, 49, 60, 58, 76, 46, 45, 67, 62, 54, 60, 67, 47, 54, 45, 67, 66, 58, 85, 87, 140, 57, 73, 47, 63, 63, 55, 76, 88, 66, 62, 63, 82, 45, 63, 73, 57, 42, 79, 72, 54, 96, 65, 81, 71, 67, 61, 66, 59, 59, 50, 63, 75, 56, 63, 76, 92, 64, 50, 65, 65, 64, 58, 81, 101, 62, 95, 35, 78, 59, 54, 71, 55, 78, 81, 85, 79, 49, 65, 49, 75, 73, 70, 44, 68, 60, 62, 63, 72, 47, 86, 58, 69, 64, 61, 59, 50, 68, 72, 50, 62, 57, 64, 65, 58, 51, 53, 51, 80, 56, 53, 80, 53, 86, 127, 50, 98, 92, 49, 80, 61, 67, 102, 55, 52, 56, 126, 64, 67, 73, 57, 47, 55, 62, 86, 57, 46, 53, 53, 46, 69, 64, 63, 68, 53, 67, 75, 74, 88, 58, 71, 73, 53, 43, 66, 50, 101, 62, 72, 65, 63, 64, 59, 54, 42, 68, 69, 58, 65, 59, 58, 56, 55, 42, 93, 74, 95, 55, 70, 54, 61, 51, 68, 51, 60, 91, 56, 72, 49, 86, 63, 45, 55, 57, 79, 56, 63, 74, 63, 59, 71, 68, 66, 86, 85, 71, 77, 85, 73, 65, 72, 66, 60, 64, 80, 57, 61, 58, 50, 66, 68, 65, 50, 50, 66, 64, 52, 73, 68, 71, 57, 102, 69, 51, 84, 47, 56, 59, 81, 73, 80, 74, 37, 71, 64, 131, 66, 45, 96, 63, 56, 55, 66, 58, 58, 59, 56, 90, 60, 58, 60, 67, 67, 53, 63, 92, 61, 64, 54, 55, 81, 66, 59, 58, 57, 66, 54, 62, 64, 75, 76, 67, 63, 86, 82, 80, 55, 39, 51, 49, 82, 57, 72, 56, 67, 48, 62, 69, 77, 57, 66, 67, 57, 70, 61, 84, 63, 70, 70, 60, 66, 51, 59, 96, 68, 47, 69, 77, 48, 56, 79, 80, 79, 76, 73, 57, 52, 64, 47, 62, 65, 75, 50, 79, 65, 42, 50, 45, 72, 76, 93, 51, 57, 47, 58, 83, 59, 109, 38, 53, 58, 96, 63, 70, 72, 63, 71, 115, 62, 49, 50, 69, 52, 56, 48, 64, 57, 45, 88, 56, 69, 88, 66, 67, 57, 66, 47, 84, 66, 67, 56, 88, 69, 87, 54, 81, 51, 56, 53, 60, 95, 58, 74, 59, 78, 78, 62, 80, 58, 74, 63, 72, 71, 94, 63, 73, 70, 69, 82, 60, 49, 97, 96, 65, 72, 57, 54, 51, 58, 59, 54, 59, 57, 49, 68, 61, 67, 80, 57, 74, 79, 65, 66, 58, 61, 70, 57, 74, 51, 64, 55, 61, 55, 65, 66, 54, 70, 78, 81, 66, 51, 62, 57, 73, 57, 62, 59, 48, 75, 60, 62, 78, 63, 56, 58, 83, 61, 121, 94, 52, 103, 79, 59, 57, 60, 42, 57, 52, 63, 59, 86, 50, 79, 63, 82, 58, 66, 60, 66, 110, 58, 62, 57, 60, 53, 76, 46, 68, 49, 77, 42, 68, 55, 65, 50, 71, 64, 65, 80, 62, 82, 103, 56, 63, 62, 62, 81, 61, 73, 52, 128, 60, 61, 60, 57, 59, 52, 81, 69, 61, 53, 61, 83, 44, 98, 50, 66, 63, 68, 63, 47, 63, 68, 56, 49, 54, 92, 66, 66, 78, 76, 74, 51, 77, 71, 70, 62, 82, 59, 79, 54, 84, 83, 63, 55, 74, 52, 64, 60, 85, 87, 49, 57, 63, 90, 76, 63, 63, 84, 43, 64, 69, 62, 73, 69, 49, 55, 54, 70, 78, 67, 51, 47, 51, 67, 79, 82, 59, 61, 89, 83, 49, 75, 55, 56, 87, 64, 77, 70, 48, 58, 82, 121, 52, 34, 82, 67, 68, 49, 73, 67, 89, 69, 62, 59, 48, 46, 55, 53, 53, 59, 39, 79, 81, 69, 99, 61, 68, 59, 53, 70, 60, 45, 61, 62, 49, 61, 68, 69, 69, 74, 87, 39, 49, 50, 62, 70, 48, 74, 53, 56, 78, 72, 70, 54, 60, 52, 69, 74, 75, 60, 67, 56, 79, 51, 91, 50, 52, 64, 65, 75, 78, 73, 66, 68, 85, 64, 58, 84, 72, 81, 54, 50, 87, 70, 82, 70, 70, 55, 38, 61, 58, 96, 68, 63, 65, 57, 65, 60, 61, 57, 39, 53, 77, 64, 81, 54, 60, 82, 84, 80, 69, 63, 55, 75, 46, 81, 61, 108, 71, 53, 56, 56, 43, 81, 64, 63, 67, 68, 60, 77, 94, 50, 63, 71, 65, 63, 68, 75, 86, 61, 62, 69, 62, 75, 49, 63, 58, 72, 61, 69, 59, 68, 49, 90, 67, 64, 97, 65, 53, 74, 67, 56, 58, 88, 62, 66, 55, 60, 65, 59, 89, 64, 56, 64, 60, 89, 111, 37, 62, 53, 62, 73, 54, 70, 79, 58, 58, 73, 130, 91, 92, 51, 65, 68, 67, 63, 62, 59, 58, 75, 73, 51, 73, 115, 55, 70, 85, 87, 77, 49, 61, 49, 50, 58, 54, 85, 73, 52, 60, 69, 46, 48, 78, 63, 65, 51, 62, 61, 67, 58, 77, 50, 82, 49, 72, 46, 57, 68, 100, 71, 63, 47, 61, 58, 77, 80, 74, 68, 68, 53, 36, 71, 76, 74, 123, 83, 93, 72, 54, 42, 89, 79, 52, 40, 55, 66, 64, 67, 63, 82, 71, 82, 66, 60, 70, 76, 30, 57, 93, 57, 67, 54, 72, 57, 52, 88, 76, 86, 59, 75, 69, 99, 67, 53, 67, 55, 86, 66, 65, 99, 52, 82, 84, 67, 60, 49, 75, 68, 57, 50, 64, 113, 73, 56, 90, 61, 56, 82, 61, 38, 62, 72, 87, 59, 59, 47, 80, 59, 58, 64, 70, 75, 54, 52, 72, 52, 57, 73, 139, 61, 82, 31, 46, 54, 51, 55, 60, 75, 58, 58, 87, 75, 50, 65, 59, 60, 66, 80, 51, 82, 77, 77, 70, 67, 78, 82, 69, 86, 74, 64, 70, 73, 77, 84, 46, 76, 78, 59, 70, 55, 66, 60, 67, 58, 56, 73, 60, 62, 66, 64, 66, 65, 69, 70, 53, 88, 67, 65, 26, 68, 64, 67, 62, 38, 84, 49, 69, 48, 61, 91, 78, 62, 59, 54, 60, 52, 60, 71, 70, 56, 71, 63, 61, 68, 74, 77, 76, 43, 84, 82, 70, 59, 46, 75, 55, 54, 51, 60, 80, 63, 65, 53, 65, 69, 105, 59, 91, 52, 74, 72, 51, 80, 62, 77, 56, 53, 54, 68, 62, 65, 58, 78, 62, 59, 73, 55, 79, 56, 65, 68, 71, 63, 54, 55, 73, 65, 61, 80, 71, 76, 47, 93, 63, 76, 81, 68, 45, 53, 67, 77, 121, 70, 56, 75, 98, 63, 38, 50, 48, 54, 62, 79, 61, 46, 75, 84, 78, 51, 55, 73, 46, 52, 80, 69, 61, 65, 52, 68, 73, 71, 76, 66, 76, 80, 87, 62, 73, 59, 84, 73, 67, 98, 62, 64, 75, 64, 90, 58, 65, 66, 71, 54, 69, 107, 40, 80, 116, 60, 57, 57, 72, 54, 59, 68, 71, 61, 56, 51, 55, 51, 66, 66, 66, 72, 59, 52, 58, 69, 66, 67, 96, 57, 71, 70, 110, 47, 39, 71, 52, 64, 53, 69, 65, 87, 65, 88, 55, 59, 61, 107, 74, 72, 60, 50, 53, 93, 59, 51, 47, 87, 76, 77, 66, 60, 64, 72, 67, 65, 66, 65, 67, 74, 59, 57, 37, 60, 65, 64, 71, 72, 49, 67, 63, 74, 68, 63, 77, 58, 62, 65, 79, 54, 67, 65, 51, 70, 67, 83, 45, 90, 56, 77, 64, 62, 58, 66, 68, 53, 72, 57, 66, 53, 47, 82, 79, 74, 75, 99, 62, 57, 94, 86, 64, 47, 68, 111, 71, 75, 94, 67, 72, 73, 68, 73, 64, 60, 62, 44, 73, 66, 82, 62, 58, 62, 70, 59, 46, 49, 58, 75, 63, 70, 70, 68, 63, 54, 52, 114, 74, 78, 115, 60, 59, 65, 59, 59, 100, 60, 52, 65, 73, 64, 60, 59, 67, 59, 48, 58, 63, 53, 55, 50, 61, 57, 67, 73, 56, 62, 60, 78, 57, 58, 65, 56, 62, 58, 84, 41, 69, 77, 55, 66, 71, 85, 80, 72, 51, 58, 62, 93, 84, 79, 57, 53, 61, 59, 48, 62, 60, 52, 64, 94, 62, 55, 62, 62, 55, 69, 68, 62, 55, 53, 63, 63, 64, 77, 86, 70, 50, 57, 74, 62, 51, 82, 69, 53, 53, 77, 57, 61, 94, 55, 68, 61, 86, 56, 84, 76, 72, 108, 64, 90, 76, 63, 77, 76, 50, 108, 57, 56, 61, 58, 45, 69, 54, 63, 70, 76, 94, 73, 61, 53, 74, 64, 52, 67, 60, 59, 53, 45, 49, 69, 68, 67, 67, 69, 56, 53, 60, 75, 51, 66, 64, 62, 79, 62, 71, 52, 80, 69, 57, 59, 50, 70, 60, 66, 72, 58, 81, 57, 70, 56, 50, 58, 69, 52, 67, 49, 59, 69, 54, 65, 88, 55, 63, 64, 52, 49, 63, 81, 71, 59, 75, 48, 49, 81, 61, 55, 62, 74, 84, 67, 65, 54, 89, 110, 76, 77, 83, 59, 70, 55, 74, 59, 58, 96, 64, 62, 63, 68, 56, 48, 59, 82, 52, 61, 56, 60, 62, 57, 54, 63, 74, 72, 88, 62, 60, 59, 48, 54, 75, 84, 78, 80, 56, 48, 46, 73, 70, 59, 60, 47, 105, 64, 66, 77, 53, 58, 60, 86, 81, 89, 60, 49, 56, 67, 53, 72, 87, 37, 33, 62, 63, 42, 85, 82, 62, 55, 75, 84, 62, 68, 58, 74, 68, 57, 60, 61, 71, 64, 80, 72, 50, 79, 81, 73, 78, 77, 76, 54, 81, 48, 49, 56, 75, 45, 62, 60, 65, 47, 71, 55, 51, 79, 41, 57, 64, 58, 65, 46, 60, 75, 62, 60, 44, 55, 45, 65, 76, 64, 73, 54, 62, 45, 66, 67, 102, 63, 82, 62, 50, 64, 60, 82, 89, 56, 59, 77, 73, 142, 64, 57, 51, 75, 54, 67, 97, 77, 94, 85, 92, 53, 79, 41, 55, 41, 60, 51, 53, 72, 60, 89, 74, 64, 78, 71, 79, 70, 52, 65, 99, 62, 47, 82, 57, 50, 53, 57, 69, 65, 68, 56, 76, 94, 69, 67, 52, 45, 47, 68, 82, 79, 60, 61, 73, 78, 58, 60, 71, 73, 69, 68, 88, 41, 64, 60, 77, 59, 66, 68, 58, 67, 96, 82, 62, 63, 70, 66, 74, 87, 68, 54, 65, 68, 54, 59, 92, 54, 47, 61, 64, 67, 60, 81, 52, 85, 93, 59, 65, 57, 59, 68, 74, 59, 52, 56, 66, 56, 57, 59, 68, 139, 60, 58, 54, 59, 61, 59, 55, 67, 100, 79, 96, 100, 61, 70, 67, 66, 78, 77, 68, 62, 90, 79, 64, 62, 70, 56, 48, 67, 67, 61, 73, 74, 56, 66, 74, 73, 52, 59, 47, 93, 68, 61, 53, 51, 61, 58, 64, 88, 62, 37, 65, 63, 59, 72, 67, 59, 56, 63, 58, 60, 54, 65, 87, 52, 62, 72, 50, 62, 76, 76, 48, 67, 72, 68, 54, 65, 52, 84, 54, 48, 70, 55, 73, 69, 76, 68, 81, 74, 61, 59, 67, 58, 75, 64, 98, 88, 65, 67, 69, 85, 72, 59, 89, 68, 63, 70, 51, 68, 52, 63, 69, 87, 64, 57, 50, 55, 64, 82, 60, 47, 70, 108, 61, 65, 86, 60, 62, 39, 76, 61, 60, 53, 71, 59, 73, 58, 63, 61, 58, 51, 49, 82, 75, 83, 63, 52, 59, 81, 71, 75, 61, 68, 67, 66, 89, 44, 60, 88, 79, 60, 72, 63, 60, 65, 66, 51, 90, 53, 70, 59, 67, 50, 64, 58, 54, 48, 57, 55, 59, 67, 58, 75, 62, 76, 75, 74, 52, 65, 62, 65, 51, 61, 73, 47, 59, 79, 51, 83, 83, 66, 67, 46, 135, 61, 43, 59, 53, 70, 72, 80, 73, 63, 69, 64, 82, 67, 56, 92, 68, 70, 73, 96, 71, 58, 60, 71, 55, 67, 42, 79, 55, 89, 45, 58, 62, 68, 91, 78, 71, 58, 67, 77, 63, 73, 58, 53, 71, 128, 67, 71, 75, 81, 64, 78, 48, 53, 65, 53, 66, 55, 65, 63, 65, 68, 63, 61, 56, 87, 70, 62, 72, 69, 57, 74, 51, 54, 64, 71, 69, 72, 51, 74, 49, 62, 64, 60, 65, 67, 51, 73, 70, 97, 60, 75, 50, 54, 58, 80, 44, 144, 93, 65, 57, 57, 64, 97, 95, 75, 63, 64, 79, 69, 77, 60, 62, 48, 51, 68, 70, 65, 73, 31, 76, 54, 79, 75, 76, 61, 75, 78, 66, 80, 58, 72, 47, 69, 55, 65, 56, 76, 61, 55, 66, 73, 57, 56, 78, 53, 61, 75, 93, 65, 64, 53, 78, 93, 77, 47, 81, 69, 65, 62, 36, 90, 60, 51, 92, 77, 62, 59, 54, 54, 54, 84, 69, 71, 65, 43, 60, 66, 60, 74, 75, 65, 66, 50, 64, 64, 94, 64, 60, 90, 84, 73, 60, 63, 96, 49, 65, 63, 76, 59, 67, 56, 75, 54, 51, 70, 38, 62, 87, 78, 70, 64, 51, 83, 95, 77, 85, 55, 57, 65, 88, 77, 65, 62, 79, 107, 60, 73, 48, 73, 59, 76, 47, 62, 56, 60, 63, 70, 53, 85, 64, 61, 82, 62, 61, 66, 72, 87, 67, 70, 58, 59, 67, 58, 64, 74, 81, 52, 75, 55, 61, 74, 61, 32, 65, 86, 62, 66, 49, 52, 76, 70, 73, 49, 74, 59, 124, 50, 69, 65, 56, 66, 83, 65, 68, 50, 71, 87, 65, 47, 70, 52, 62, 66, 49, 51, 109, 47, 72, 60, 91, 63, 70, 67, 67, 77, 59, 64, 99, 72, 83, 63, 63, 59, 66, 53, 62, 75, 70, 71, 59, 64, 61, 54, 52, 56, 60, 61, 53, 48, 68, 93, 52, 61, 58, 91, 79, 93, 77, 56, 81, 60, 66, 53, 56, 62, 55, 72, 96, 61, 69, 44, 82, 54, 65, 62, 137, 64, 52, 64, 45, 71, 76, 51, 76, 68, 69, 62, 62, 69, 66, 80, 54, 74, 54, 68, 66, 57, 57, 56, 68, 66, 55, 58, 52, 63, 50, 62, 64, 68, 70, 58, 72, 52, 53, 101, 83, 47, 57, 51, 68, 112, 47, 128, 72, 54, 53, 59, 58, 52, 70, 59, 70, 62, 55, 72, 74, 47, 67, 60, 75, 47, 71, 57, 58, 66, 90, 74, 54, 51, 70, 95, 82, 72, 110, 98, 59, 51, 52, 54, 65, 71, 60, 52, 65, 58, 58, 105, 63, 55, 61, 57, 64, 58, 50, 83, 69, 48, 51, 99, 90, 50, 100, 69, 60, 59, 73, 62, 79, 69, 69, 73, 67, 54, 69, 55, 67, 46, 50, 61, 67, 65, 55, 69, 63, 91, 73, 66, 62, 106, 73, 71, 70, 59, 52, 63, 70, 56, 57, 74, 67, 69, 62, 57, 79, 79, 53, 62, 78, 59, 47, 43, 84, 36, 58, 86, 67, 51, 75, 60, 75, 60, 79, 59, 83, 89, 65, 80, 67, 72, 56, 66, 56, 39, 65, 59, 78, 48, 59, 77, 79, 51, 62, 62, 46, 74, 90, 73, 49, 56, 56, 55, 69, 60, 58, 94, 62, 47, 76, 68, 81, 37, 52, 80, 71, 51, 64, 84, 73, 60, 55, 47, 76, 72, 66, 71, 41, 81, 55, 53, 101, 70, 76, 60, 42, 59, 47, 61, 53, 76, 98, 74, 63, 43, 84, 43, 60, 53, 63, 57, 58, 42, 68, 69, 63, 52, 78, 64, 63, 80, 65, 80, 95, 56, 72, 48, 77, 78, 61, 54, 75, 43, 86, 61, 58, 76, 59, 59, 70, 34, 75, 58, 63, 61, 61, 63, 83, 70, 67, 69, 64, 72, 54, 92, 77, 54, 66, 123, 59, 53, 75, 76, 56, 54, 71, 59, 66, 63, 87, 62, 44, 67, 64, 64, 99, 67, 54, 63, 88, 68, 46, 54, 72, 63, 64, 81, 61, 55, 62, 80, 67, 62, 52, 70, 62, 63, 63, 44, 76, 45, 81, 66, 50, 65, 82, 60, 109, 49, 80, 65, 58, 57, 31, 72, 57, 60, 51, 52, 64, 56, 57, 74, 62, 66, 67, 73, 52, 49, 77, 89, 82, 62, 77, 76, 74, 70, 46, 73, 84, 51, 62, 67, 85, 60, 76, 71, 66, 62, 60, 63, 79, 62, 66, 94, 57, 52, 74, 52, 52, 91, 62, 75, 64, 67, 57, 54, 52, 73, 61, 54, 111, 59, 79, 66, 57, 52, 46, 65, 62, 66, 70, 71, 63, 64, 75, 60, 49, 76, 71, 59, 61, 56, 69, 70, 74, 35, 52, 60, 82, 74, 56, 53, 73, 56, 105, 64, 80, 69, 58, 58, 60, 77, 54, 65, 59, 60, 55, 83, 61, 63, 63, 52, 49, 71, 91, 63, 68, 76, 60, 47, 61, 102, 68, 83, 68, 59, 64, 61, 86, 60, 49, 63, 80, 53, 107, 72, 77, 62, 70, 54, 105, 43, 73, 69, 93, 92, 50, 59, 63, 56, 82, 64, 73, 83, 52, 118, 76, 65, 52, 62, 70, 52, 58, 96, 62, 55, 68, 74, 42, 44, 62, 51, 70, 64, 65, 85, 68, 72, 90, 60, 52, 68, 58, 65, 53, 57, 57, 54, 52, 58, 83, 60, 72, 68, 158, 44, 54, 58, 64, 44, 55, 91, 75, 55, 35, 53, 69, 69, 67, 54, 51, 44, 56, 62, 57, 107, 61, 84, 54, 60, 68, 75, 49, 54, 59, 47, 58, 56, 60, 70, 67, 106, 61, 64, 59, 64, 52, 78, 63, 88, 59, 52, 59, 68, 44, 74, 67, 41, 67, 101, 45, 57, 58, 52, 79, 118, 57, 55, 46, 60, 61, 47, 50, 63, 79, 82, 68, 49, 70, 71, 65, 75, 69, 78, 51, 77, 80, 101, 78, 88, 82, 52, 97, 55, 50, 67, 61, 97, 62, 76, 124, 76, 68, 73, 61, 64, 45, 58, 60, 52, 54, 78, 59, 47, 66, 81, 51, 77, 89, 71, 78, 70, 51, 73, 69, 102, 64, 44, 56, 62, 71, 52, 50, 49, 65, 71, 68, 48, 65, 101, 66, 49, 70, 53, 72, 75, 75, 77, 58, 67, 81, 65, 72, 75, 70, 63, 58, 60, 62, 47, 99, 74, 58, 54, 58, 52, 57, 57, 71, 45, 79, 58, 88, 58, 60, 71, 75, 61, 50, 45, 70, 48, 65, 40, 74, 50, 62, 67, 55, 67, 56, 86, 55, 74, 58, 55, 64, 48, 72, 62, 56, 76, 78, 57, 61, 46, 60, 49, 60, 54, 48, 69, 55, 81, 91, 74, 68, 57, 50, 57, 72, 49, 66, 62, 57, 64, 44, 71, 62, 57, 68, 58, 67, 59, 71, 60, 78, 57, 73, 60, 69, 54, 63, 79, 64, 56, 58, 82, 80, 67, 83, 68, 56, 56, 72, 49, 66, 90, 87, 66, 51, 51, 41, 54, 49, 49, 52, 72, 67, 62, 64, 67, 54, 72, 78, 72, 72, 55, 62, 75, 78, 57, 63, 112, 65, 122, 50, 54, 49, 58, 63, 65, 70, 81, 81, 75, 50, 84, 59, 65, 78, 51, 51, 74, 72, 70, 56, 61, 70, 58, 58, 66, 58, 61, 60, 67, 90, 60, 58, 58, 57, 64, 85, 55, 72, 70, 49, 101, 73, 61, 58, 49, 58, 58, 76, 82, 64, 42, 66, 51, 59, 58, 73, 65, 60, 70, 58, 90, 73, 52, 84, 75, 52, 56, 53, 64, 55, 44, 47, 63, 71, 58, 75, 63, 71, 55, 67, 59, 86, 67, 62, 57, 43, 67, 82, 51, 76, 69, 64, 60, 49, 41, 61, 79, 40, 49, 63, 83, 55, 64, 81, 88, 52, 47, 71, 86, 59, 98, 74, 77, 55, 91, 71, 76, 68, 88, 75, 48, 93, 57, 73, 135, 76, 64, 59, 65, 78, 58, 66, 79, 63, 54, 72, 81, 62, 84, 69, 75, 41, 55, 84, 77, 64, 52, 71, 48, 42, 68, 84, 61, 69, 51, 67, 35, 76, 54, 73, 38, 83, 48, 63, 58, 87, 91, 57, 75, 77, 50, 88, 89, 65, 55, 58, 64, 63, 68, 57, 80, 67, 63, 95, 70, 60, 64, 76, 64, 81, 49, 54, 65, 52, 78, 55, 49, 66, 42, 61, 82, 56, 127, 54, 68, 66, 65, 55, 69, 92, 42, 66, 55, 136, 55, 41, 42, 63, 61, 95, 72, 89, 86, 74, 88, 55, 163, 89, 51, 27, 84, 75, 60, 108, 52, 59, 41, 80, 77, 48, 39, 54, 67, 69, 53, 48, 51, 63, 42, 54, 118, 62, 47, 64, 84, 64, 41, 65, 92, 55, 59, 114, 68, 60, 58, 72, 57, 55, 67, 66, 87, 42, 57, 58, 111, 78, 73, 58, 61, 63, 64, 45, 41, 68, 89, 54, 70, 64, 58, 82, 87, 115, 83, 56, 80, 49, 64, 63, 56, 57, 59, 69, 119, 74, 58, 56, 42, 77, 66, 84, 70, 85, 69, 64, 44, 73, 47, 66, 46, 41, 65, 66, 50, 47, 83, 67, 49, 62, 81, 49, 59, 61, 62, 66, 57, 67, 103, 141, 52, 60, 66, 77, 86, 40, 46, 102, 69, 52, 65, 53, 60, 63, 79, 69, 44, 71, 63, 67, 69, 53, 59, 80, 47, 71, 55, 56, 69, 67, 98, 49, 64, 45, 88, 62, 67, 63, 61, 74, 73, 63, 93, 48, 73, 67, 54, 124, 63, 48, 71, 67, 78, 77, 76, 56, 75, 46, 44, 59, 79, 66, 75, 78, 59, 79, 53, 69, 65, 83, 51, 71, 55, 88, 76, 61, 64, 61, 80, 52, 72, 47, 47, 62, 57, 80, 32, 74, 45, 52, 72, 51, 74, 62, 68, 102, 67, 44, 73, 65, 60, 46, 60, 42, 85, 50, 55, 119, 77, 71, 55, 61, 72, 53, 80, 58, 61, 85, 50, 29, 85, 64, 74, 52, 75, 58, 70, 57, 63, 56, 79, 39, 57, 60, 78, 56, 74, 54, 56, 78, 102, 43, 83, 68, 78, 66, 55, 85, 52, 55, 56, 98, 87, 77, 56, 96, 80, 97, 71, 51, 81, 54, 65, 81, 48, 58, 51, 45, 53, 51, 55, 72, 61, 40, 47, 34, 18, 54, 65, 54, 68, 36, 82, 75, 78, 82, 73, 70, 58, 66, 69, 54, 62, 51, 47, 58, 65, 61, 80, 103, 61, 67, 63, 79, 88, 54, 67, 52, 72, 126, 74, 73, 85, 69, 52, 74, 64, 84, 66, 51, 63, 64, 66, 58, 54, 90, 84, 48, 51, 67, 57, 71, 49, 62, 51, 65, 69, 48, 65, 68, 54, 86, 62, 79, 91, 80, 64, 68, 57, 99, 54, 99, 68, 50, 66, 90, 48, 88, 62, 59, 53, 61, 71, 42, 60, 61, 59, 119, 63, 55, 84, 38, 56, 48, 57, 53, 95, 91, 73, 51, 48, 48, 72, 63, 66, 71, 82, 39, 62, 56, 73, 62, 44, 147, 55, 65, 68, 68, 76, 52, 78, 70, 72, 47, 55, 56, 71, 64, 90, 86, 61, 80, 67, 63, 60, 90, 73, 83, 46, 60, 78, 66, 91, 85, 30, 71, 95, 67, 71, 54, 62, 97, 63, 78, 43, 60, 74, 45, 72, 53, 66, 35, 85, 74, 44, 85, 55, 63, 54, 51, 80, 99, 65, 58, 59, 76, 65, 36, 65, 85, 69, 71, 68, 72, 76, 85, 51, 70, 46, 54, 58, 77, 65, 53, 81, 67, 106, 80, 61, 67, 87, 46, 71, 68, 52, 88, 63, 69, 53, 51, 52, 75, 75, 47, 60, 61, 61, 82, 56, 53, 84, 60, 55, 80, 39, 56, 80, 62, 65, 61, 61, 52, 71, 54, 42, 65, 83, 63, 58, 64, 69, 74, 64, 60, 52, 67, 46, 53, 45, 63, 95, 70, 81, 38, 55, 72, 92, 76, 68, 91, 42, 73, 68, 67, 54, 79, 73, 55, 63, 79, 71, 45, 72, 78, 52, 81, 58, 71, 46, 51, 86, 66, 62, 114, 62, 61, 93, 88, 61, 43, 54, 67, 74, 64, 61, 54, 46, 70, 71, 67, 50, 39, 65, 64, 46, 84, 53, 45, 54, 65, 81, 81, 64, 66, 47, 72, 64, 75, 60, 61, 64, 31, 64, 63, 68, 70, 66, 53, 84, 46, 65, 44, 64, 68, 80, 80, 70, 58, 87, 73, 60, 82, 51, 44, 80, 93, 70, 114, 49, 73, 94, 54, 60, 99, 67, 54, 58, 54, 75, 93, 66, 39, 51, 69, 69, 67, 50, 62, 63, 75, 78, 72, 105, 64, 74, 90, 60, 71, 58, 80, 68, 48, 51, 60, 60, 53, 67, 50, 54, 61, 61, 54, 45, 69, 53, 90, 54, 71, 65, 76, 46, 41, 65, 66, 43, 78, 72, 83, 65, 67, 67, 49, 78, 72, 56, 61, 76, 51, 66, 60, 24, 72, 88, 60, 62, 72, 63, 62, 55, 66, 71, 45, 55, 48, 74, 55, 104, 80, 83, 71, 97, 55, 64, 68, 84, 47, 76, 68, 56, 128, 59, 79, 72, 75, 77, 69, 56, 70, 53, 48, 36, 61, 37, 57, 64, 64, 70, 68, 52, 59, 73, 50, 119, 74, 80, 70, 57, 42, 70, 59, 62, 60, 85, 59, 76, 59, 89, 61, 60, 94, 53, 67, 73, 36, 114, 64, 50, 60, 72, 83, 55, 95, 64, 73, 65, 69, 76, 76, 41, 54, 62, 141, 78, 58, 62, 67, 66, 61, 81, 68, 86, 77, 67, 62, 84, 69, 75, 57, 85, 78, 101, 74, 54, 55, 74, 66, 94, 52, 72, 53, 57, 66, 49, 63, 41, 67, 71, 41, 35, 66, 80, 63, 73, 48, 51, 82, 72, 59, 97, 59, 58, 51, 65, 72, 92, 65, 50, 59, 51, 81, 70, 63, 81, 53, 85, 98, 54, 66, 62, 75, 44, 57, 111, 82, 74, 50, 53, 77, 89, 96, 82, 51, 50, 68, 66, 55, 70, 67, 60, 69, 53, 70, 58, 72, 42, 68, 82, 55, 64, 53, 48, 62, 61, 68, 66, 93, 70, 49, 59, 69, 69, 66, 41, 50, 57, 56, 69, 82, 55, 68, 118, 71, 68, 53, 66, 65, 61, 57, 55, 65, 93, 59, 59, 69, 54, 87, 60, 70, 76, 72, 57, 94, 60, 71, 80, 85, 63, 90, 66, 69, 72, 72, 86, 86, 48, 46, 47, 66, 66, 38, 97, 66, 54, 63, 93, 52, 92, 67, 73, 68, 76, 75, 80, 48, 60, 65, 72, 76, 66, 66, 55, 73, 44, 78, 55, 53, 61, 59, 51, 61, 86, 76, 80, 51, 51, 60, 73, 70, 75, 89, 60, 51, 68, 80, 69, 46, 64, 48, 61, 64, 98, 105, 74, 61, 76, 65, 60, 70, 94, 85, 57, 58, 67, 56, 67, 53, 85, 59, 50, 64, 63, 56, 57, 58, 49, 54, 60, 57, 78, 60, 76, 62, 57, 57, 71, 62, 61, 45, 68, 59, 69, 69, 64, 64, 108, 76, 58, 62, 90, 66, 55, 76, 61, 70, 61, 79, 79, 78, 71, 114, 62, 40, 81, 97, 55, 49, 70, 85, 65, 65, 49, 64, 62, 64, 60, 47, 72, 56, 63, 71, 58, 77, 79, 59, 66, 56, 74, 56, 51, 81, 50, 70, 68, 105, 70, 59, 78, 59, 151, 59, 62, 64, 68, 59, 74, 49, 65, 70, 48, 51, 77, 74, 67, 44, 59, 52, 83, 66, 67, 68, 43, 78, 84, 45, 69, 62, 75, 57, 52, 67, 49, 83, 58, 57, 37, 47, 57, 46, 67, 68, 82, 113, 102, 58, 51, 97, 47, 64, 62, 38, 48, 62, 64, 87, 55, 59, 42, 71, 56, 56, 60, 64, 49, 73, 56, 72, 69, 69, 61, 64, 49, 59, 83, 55, 57, 67, 55, 56, 75, 99, 71, 75, 79, 95, 80, 71, 63, 57, 65, 46, 63, 58, 72, 66, 71, 110, 38, 81, 60, 63, 65, 67, 73, 65, 53, 110, 74, 54, 85, 52, 84, 53, 62, 85, 66, 65, 59, 73, 67, 69, 50, 82, 60, 67, 78, 61, 84, 66, 65, 66, 58, 54, 88, 57, 54, 66, 71, 69, 65, 57, 69, 76, 41, 57, 55, 70, 58, 91, 94, 71, 77, 77, 69, 51, 65, 64, 81, 53, 61, 53, 61, 55, 44, 63, 70, 45, 58, 76, 65, 55, 67, 90, 79, 78, 72, 51, 62, 57, 93, 103, 75, 77, 48, 41, 55, 72, 73, 62, 72, 53, 51, 67, 82, 91, 97, 72, 62, 63, 76, 65, 83, 72, 78, 59, 82, 63, 88, 51, 74, 95, 75, 36, 85, 65, 56, 46, 69, 62, 76, 65, 66, 59, 59, 73, 62, 52, 72, 47, 40, 69, 75, 76, 48, 67, 63, 71, 70, 74, 64, 59, 60, 95, 61, 42, 64, 68, 63, 59, 70, 53, 56, 78, 68, 60, 60, 61, 83, 49, 53, 63, 66, 86, 63, 77, 108, 92, 68, 50, 77, 61, 76, 63, 78, 76, 32, 65, 64, 53, 57, 65, 59, 67, 104, 70, 67, 66, 54, 90, 55, 87, 83, 60, 80, 74, 87, 98, 69, 58, 66, 60, 61, 64, 59, 57, 72, 62, 85, 53, 69, 70, 67, 64, 63, 64, 49, 86, 81, 69, 54, 49, 59, 53, 59, 84, 68, 53, 53, 65, 46, 71, 61, 112, 76, 56, 69, 93, 86, 56, 63, 70, 61, 60, 65, 50, 76, 56, 55, 93, 63, 56, 70, 85, 64, 69, 80, 79, 59, 46, 62, 62, 66, 59, 67, 57, 60, 68, 78, 88, 63, 81, 65, 57, 68, 89, 62, 50, 57, 96, 48, 53, 48, 60, 60, 66, 69, 54, 106, 80, 75, 79, 63, 66, 92, 57, 43, 79, 83, 57, 67, 42, 83, 69, 68, 63, 55, 86, 57, 78, 26, 55, 60, 46, 75, 56, 98, 42, 64, 75, 88, 45, 67, 51, 67, 61, 40, 75, 52, 92, 63, 50, 58, 65, 54, 63, 52, 86, 67, 63, 107, 49, 65, 70, 87, 72, 67, 73, 52, 64, 44, 80, 63, 74, 50, 91, 75, 65, 70, 73, 72, 71, 71, 63, 54, 59, 54, 80, 74, 63, 59, 88, 52, 72, 78, 61, 56, 69, 74, 72, 68, 61, 58, 55, 65, 64, 69, 55, 60, 56, 37, 68, 64, 86, 73, 81, 55, 66, 56, 43, 78, 56, 64, 55, 72, 79, 55, 55, 66, 64, 55, 63, 94, 69, 55, 58, 66, 81, 63, 66, 53, 70, 73, 78, 77, 74, 62, 51, 47, 69, 82, 57, 60, 62, 58, 46, 69, 62, 55, 83, 60, 58, 93, 70, 81, 70, 49, 65, 75, 68, 57, 57, 78, 72, 60, 66, 80, 65, 83, 61, 77, 69, 49, 58, 62, 58, 59, 59, 61, 49, 90, 62, 65, 60, 62, 58, 83, 62, 65, 60, 62, 69, 66, 54, 51, 74, 75, 55, 66, 75, 65, 81, 42, 81, 55, 75, 73, 63, 54, 76, 95, 78, 66, 67, 70, 60, 60, 59, 64, 56, 73, 73, 88, 75, 58, 75, 75, 77, 69, 58, 49, 59, 61, 53, 45, 73, 89, 83, 55, 65, 60, 55, 80, 66, 64, 73, 80, 51, 75, 54, 54, 41, 50, 90, 54, 49, 67, 54, 70, 77, 69, 62, 63, 66, 63, 74, 70, 69, 51, 75, 63, 61, 47, 71, 60, 62, 43, 45, 100, 51, 64, 58, 62, 48, 60, 65, 72, 51, 67, 76, 104, 39, 78, 48, 57, 77, 65, 47, 48, 70, 46, 54, 61, 68, 82, 63, 49, 66, 59, 87, 75, 53, 52, 76, 86, 66, 77, 74, 86, 73, 71, 72, 62, 85, 72, 65, 67, 53, 95, 63, 70, 60, 82, 71, 48, 66, 66, 50, 55, 68, 66, 53, 55, 49, 74, 62, 82, 65, 60, 55, 61, 37, 64, 65, 67, 72, 61, 79, 78, 47, 62, 82, 94, 78, 38, 61, 81, 63, 67, 55, 64, 79, 75, 56, 89, 66, 71, 49, 47, 51, 65, 58, 66, 66, 66, 78, 67, 67, 72, 121, 69, 58, 60, 66, 66, 65, 62, 54, 70, 57, 58, 55, 93, 80, 72, 53, 58, 46, 116, 66, 69, 63, 69, 53, 66, 52, 62, 61, 47, 72, 69, 83, 67, 68, 59, 73, 83, 62, 61, 83, 52, 55, 61, 60, 67, 62, 47, 68, 60, 71, 59, 75, 72, 65, 68, 68, 88, 71, 139, 68, 49, 52, 53, 74, 71, 62, 74, 87, 58, 61, 52, 70, 105, 94, 62, 73, 94, 49, 56, 41, 71, 69, 68, 52, 82, 68, 65, 56, 40, 96, 87, 67, 78, 59, 36, 72, 70, 64, 94, 53, 51, 69, 67, 45, 65, 59, 67, 84, 63, 78, 79, 61, 68, 97, 92, 77, 80, 59, 84, 58, 45, 61, 49, 55, 52, 71, 69, 68, 80, 53, 37, 38, 56, 53, 57, 48, 51, 43, 70, 78, 78, 58, 70, 57, 62, 69, 62, 40, 72, 70, 98, 65, 86, 74, 67, 67, 47, 91, 65, 55, 70, 60, 54, 61, 86, 59, 59, 84, 94, 138, 60, 61, 69, 61, 57, 62, 52, 69, 118, 77, 53, 95, 70, 74, 46, 48, 69, 114, 73, 72, 63, 88, 59, 83, 75, 74, 55, 55, 57, 79, 60, 57, 48, 51, 64, 69, 88, 62, 55, 76, 89, 83, 72, 55, 51, 50, 58, 74, 62, 56, 57, 73, 52, 59, 70, 61, 72, 61, 65, 63, 56, 58, 66, 65, 58, 82, 63, 81, 44, 71, 49, 54, 55, 51, 83, 55, 60, 65, 61, 73, 93, 51, 79, 78, 60, 64, 64, 65, 80, 66, 61, 61, 37, 63, 77, 82, 63, 58, 84, 78, 60, 68, 57, 51, 62, 89, 59, 46, 61, 112, 85, 58, 54, 86, 31, 89, 59, 92, 63, 66, 90, 72, 61, 66, 65, 104, 50, 86, 42, 59, 51, 59, 57, 46, 69, 61, 95, 42, 66, 62, 63, 58, 66, 71, 43, 67, 68, 47, 39, 66, 61, 52, 69, 59, 46, 78, 67, 61, 53, 76, 68, 62, 64, 62, 80, 67, 86, 86, 83, 47, 73, 59, 65, 107, 81, 74, 84, 71, 78, 64, 51, 69, 51, 70, 98, 101, 68, 69, 72, 72, 68, 62, 72, 86, 76, 51, 70, 53, 68, 56, 59, 147, 60, 56, 59, 70, 93, 55, 70, 75, 89, 63, 99, 71, 50, 53, 80, 50, 63, 76, 75, 63, 76, 50, 62, 64, 88, 92, 75, 61, 70, 88, 56, 67, 73, 78, 91, 81, 59, 77, 136, 80, 74, 59, 61, 74, 63, 69, 93, 74, 82, 74, 63, 58, 53, 58, 73, 81, 71, 65, 69, 75, 65, 48, 135, 51, 55, 54, 87, 76, 61, 54, 50, 60, 55, 56, 84, 66, 65, 54, 54, 67, 56, 70, 50, 53, 76, 57, 68, 48, 81, 77, 88, 71, 50, 63, 59, 115, 63, 50, 61, 55, 60, 53, 77, 88, 56, 42, 71, 60, 76, 59, 67, 46, 50, 63, 84, 100, 45, 53, 53, 72, 36, 51, 66, 93, 78, 54, 106, 50, 68, 62, 58, 49, 70, 52, 60, 104, 59, 59, 81, 74, 89, 67, 73, 66, 49, 61, 69, 48, 71, 69, 72, 80, 47, 69, 70, 45, 75, 47, 85, 61, 62, 63, 53, 66, 81, 52, 57, 67, 80, 54, 96, 68, 63, 54, 101, 68, 65, 57, 73, 71, 49, 65, 64, 95, 96, 51, 63, 84, 59, 61, 47, 46, 83, 48, 70, 48, 54, 61, 62, 55, 57, 62, 66, 65, 65, 42, 92, 69, 45, 63, 66, 66, 72, 57, 71, 70, 60, 75, 52, 73, 76, 91, 67, 38, 68, 52, 55, 84, 70, 46, 50, 91, 40, 52, 56, 65, 59, 68, 90, 72, 71, 58, 62, 56, 56, 63, 60, 78, 53, 54, 61, 83, 79, 66, 74, 105, 60, 71, 77, 57, 76, 58, 63, 60, 47, 83, 65, 85, 64, 63, 65, 65, 61, 58, 76, 65, 54, 61, 57, 58, 68, 75, 48, 62, 71, 53, 64, 69, 54, 62, 98, 75, 102, 51, 51, 45, 70, 79, 42, 79, 38, 48, 67, 53, 97, 58, 62, 53, 75, 54, 59, 58, 59, 70, 57, 60, 54, 68, 31, 84, 66, 84, 53, 43, 65, 66, 59, 73, 57, 65, 63, 61, 78, 43, 71, 50, 77, 66, 74, 73, 71, 85, 93, 52, 65, 56, 68, 53, 64, 65, 58, 89, 52, 64, 59, 53, 60, 58, 75, 58, 68, 79, 50, 77, 94, 74, 80, 44, 95, 48, 63, 74, 59, 59, 61, 89, 56, 52, 78, 50, 78, 46, 65, 73, 69, 56, 67, 56, 49, 67, 39, 29, 52, 59, 65, 58, 75, 67, 55, 71, 60, 64, 77, 45, 61, 47, 64, 59, 79, 64, 58, 44, 55, 57, 85, 68, 61, 68, 71, 76, 64, 58, 77, 67, 63, 79, 69, 66, 63, 66, 49, 85, 60, 53, 66, 59, 70, 58, 94, 50, 80, 89, 46, 75, 65, 66, 58, 72, 63, 73, 77, 72, 56, 92, 84, 76, 66, 85, 104, 39, 58, 71, 84, 66, 62, 58, 84, 47, 57, 72, 79, 59, 62, 54, 57, 65, 55, 60, 64, 47, 63, 78, 79, 66, 76, 84, 79, 65, 62, 70, 52, 65, 67, 71, 74, 93, 62, 70, 46, 61, 52, 66, 64, 53, 62, 75, 54, 67, 72, 85, 74, 76, 55, 48, 34, 74, 61, 67, 56, 55, 73, 76, 42, 76, 58, 81, 52, 60, 97, 99, 71, 67, 52, 48, 75, 68, 42, 65, 78, 130, 56, 45, 91, 65, 36, 66, 50, 75, 59, 74, 74, 59, 81, 73, 39, 76, 54, 76, 55, 34, 65, 77, 48, 63, 96, 62, 52, 54, 76, 55, 80, 75, 67, 51, 93, 75, 58, 79, 62, 85, 46, 84, 69, 75, 75, 65, 74, 77, 78, 55, 54, 69, 71, 92, 37, 81, 58, 87, 68, 77, 45, 76, 51, 50, 42, 53, 83, 60, 59, 61, 63, 81, 52, 69, 65, 111, 41, 65, 53, 78, 58, 57, 81, 48, 59, 65, 65, 60, 88, 51, 58, 59, 84, 78, 72, 46, 79, 98, 65, 56, 74, 83, 58, 55, 79, 63, 110, 80, 111, 52, 43, 41, 49, 38, 122, 58, 66, 57, 62, 57, 55, 56, 63, 60, 83, 58, 53, 109, 67, 83, 98, 73, 45, 67, 79, 55, 88, 67, 66, 54, 84, 43, 62, 70, 101, 74, 59, 42, 70, 70, 53, 70, 43, 47, 66, 59, 79, 52, 66, 63, 67, 62, 73, 74, 50, 65, 72, 59, 50, 58, 52, 55, 73, 99, 41, 75, 60, 75, 66, 50, 86, 61, 63, 52, 69, 81, 60, 27, 48, 67, 47, 71, 100, 68, 62, 70, 63, 68, 61, 82, 86, 57, 63, 64, 78, 75, 60, 86, 70, 71, 89, 54, 102, 78, 84, 89, 86, 33, 66, 89, 71, 64, 94, 61, 60, 62, 62, 93, 75, 59, 56, 60, 71, 73, 56, 110, 71, 69, 66, 43, 77, 59, 66, 106, 68, 73, 78, 70, 78, 76, 68, 74, 117, 73, 59, 83, 63, 85, 69, 95, 78, 55, 70, 47, 53, 55, 65, 58, 56, 49, 71, 51, 56, 74, 81, 74, 92, 47, 80, 63, 47, 57, 64, 97, 53, 65, 59, 60, 55, 55, 56, 75, 65, 54, 45, 62, 75, 70, 42, 68, 49, 67, 77, 81, 72, 68, 65, 49, 58, 56, 69, 59, 84, 70, 53, 103, 64, 56, 73, 66, 62, 61, 51, 80, 80, 68, 66, 51, 49, 84, 86, 66, 68, 114, 68, 56, 85, 41, 67, 100, 72, 69, 65, 66, 57, 51, 47, 53, 69, 77, 66, 54, 71, 59, 92, 55, 66, 87, 64, 59, 51, 55, 49, 58, 81, 71, 53, 62, 75, 54, 77, 71, 94, 74, 64, 65, 60, 30, 62, 51, 70, 64, 62, 63, 74, 70, 70, 71, 96, 69, 58, 51, 72, 47, 57, 101, 86, 43, 61, 62, 86, 39, 73, 59, 39, 79, 73, 65, 75, 86, 68, 67, 49, 35, 54, 95, 66, 51, 85, 84, 89, 62, 79, 62, 61, 65, 62, 59, 109, 54, 69, 65, 80, 90, 55, 87, 89, 73, 41, 67, 84, 31, 51, 54, 50, 74, 59, 71, 54, 68, 70, 47, 45, 63, 67, 61, 66, 111, 67, 89, 115, 70, 70, 49, 65, 56, 73, 42, 60, 59, 67, 56, 44, 65, 37, 54, 57, 56, 52, 55, 54, 62, 70, 91, 50, 97, 71, 59, 70, 57, 71, 67, 69, 65, 67, 49, 79, 66, 50, 56, 88, 59, 61, 53, 85, 66, 73, 45, 61, 65, 53, 55, 109, 73, 57, 59, 88, 94, 76, 86, 56, 64, 58, 69, 53, 67, 65, 69, 49, 55, 57, 60, 57, 49, 64, 72, 63, 71, 59, 65, 64, 76, 67, 65, 116, 77, 92, 64, 63, 63, 65, 61, 49, 54, 71, 57, 63, 68, 43, 66, 66, 59, 69, 81, 61, 69, 73, 68, 49, 51, 64, 72, 58, 57, 69, 60, 62, 53, 51, 54, 92, 94, 56, 57, 65, 119, 68, 53, 69, 50, 80, 52, 71, 54, 64, 72, 84, 53, 91, 53, 97, 54, 70, 57, 75, 70, 70, 56, 62, 74, 58, 58, 85, 68, 55, 74, 66, 70, 80, 52, 54, 73, 96, 74, 92, 69, 88, 76, 90, 81, 81, 59, 78, 86, 52, 54, 68, 57, 45, 45, 64, 65, 77, 59, 55, 90, 57, 109, 61, 54, 74, 73, 97, 70, 64, 49, 41, 70, 73, 91, 60, 51, 47, 74, 83, 46, 60, 61, 77, 62, 56, 59, 88, 54, 76, 62, 113, 57, 53, 49, 71, 54, 64, 53, 64, 60, 67, 40, 75, 46, 65, 63, 58, 87, 55, 60, 61, 73, 49, 73, 62, 58, 55, 104, 81, 61, 57, 56, 61, 57, 63, 49, 85, 64, 34, 88, 61, 69, 71, 48, 82, 57, 76, 59, 62, 53, 66, 66, 83, 64, 59, 59, 73, 80, 88, 72, 71, 46, 74, 65, 60, 120, 100, 148, 102, 57, 77, 65, 65, 59, 99, 77, 54, 61, 62, 63, 119, 66, 71, 63, 60, 53, 54, 64, 55, 75, 48, 68, 95, 55, 80, 72, 41, 76, 75, 50, 116, 80, 48, 90, 56, 52, 73, 53, 70, 67, 50, 65, 90, 58, 60, 81, 67, 69, 85, 49, 67, 99, 84, 66, 73, 76, 87, 77, 59, 53, 54, 40, 64, 58, 54, 74, 41, 67, 64, 60, 108, 73, 56, 55, 67, 71, 51, 40, 63, 104, 67, 75, 75, 82, 91, 75, 48, 57, 62, 56, 78, 66, 58, 51, 76, 54, 131, 75, 61, 84, 65, 72, 63, 54, 70, 81, 64, 68, 73, 101, 55, 83, 70, 57, 59, 45, 73, 94, 55, 61, 53, 71, 90, 69, 84, 69, 57, 70, 57, 61, 51, 78, 78, 46, 71, 62, 57, 84, 102, 57, 83, 64, 65, 46, 48, 51, 50, 75, 95, 66, 101, 47, 35, 51, 52, 50, 86, 92, 53, 50, 55, 77, 77, 59, 37, 50, 54, 60, 64, 49, 57, 65, 94, 77, 63, 79, 55, 53, 25, 35, 61, 62, 67, 74, 47, 120, 50, 73, 55, 66, 77, 65, 60, 54, 78, 75, 60, 55, 35, 69, 91, 55, 47, 54, 59, 51, 45, 45, 93, 54, 78, 75, 63, 57, 85, 45, 60, 65, 44, 100, 109, 64, 85, 47, 66, 68, 65, 57, 79, 60, 57, 43, 57, 61, 72, 47, 88, 53, 58, 59, 88, 70, 67, 70, 83, 87, 75, 56, 63, 41, 46, 80, 100, 43, 49, 64, 56, 51, 53, 83, 104, 54, 67, 92, 51, 53, 63, 76, 57, 73, 64, 49, 87, 70, 69, 49, 61, 76, 50, 108, 72, 55, 76, 73, 69, 53, 58, 76, 41, 64, 52, 45, 67, 52, 62, 69, 51, 72, 68, 62, 53, 53, 73, 72, 84, 55, 61, 96, 71, 56, 85, 86, 75, 76, 81, 74, 62, 60, 53, 79, 89, 68, 79, 48, 98, 46, 59, 60, 60, 55, 51, 75, 81, 55, 60, 68, 59, 96, 85, 69, 70, 78, 59, 61, 69, 66, 89, 58, 62, 81, 77, 70, 57, 60, 65, 72, 66, 82, 62, 74, 67, 70, 42, 108, 131, 60, 57, 68, 49, 80, 62, 53, 62, 52, 61, 72, 58, 75, 67, 72, 68, 79, 57, 47, 49, 58, 47, 70, 68, 55, 108, 66, 72, 84, 45, 80, 75, 49, 52, 85, 74, 66, 74, 57, 70, 64, 58, 54, 81, 64, 51, 61, 56, 56, 64, 42, 59, 81, 69, 48, 84, 70, 74, 47, 63, 61, 64, 79, 62, 67, 61, 54, 50, 45, 58, 61, 43, 84, 70, 59, 70, 68, 100, 65, 68, 51, 63, 64, 80, 59, 55, 53, 67, 68, 56, 54, 108, 65, 61, 70, 56, 57, 108, 62, 59, 82, 66, 63, 68, 64, 57, 44, 70, 60, 56, 54, 84, 96, 85, 49, 64, 71, 64, 86, 84, 66, 68, 67, 67, 71, 76, 112, 59, 60, 68, 85, 74, 64, 35, 91, 57, 46, 95, 54, 63, 54, 117, 66, 44, 71, 56, 49, 60, 65, 92, 70, 56, 59, 69, 60, 51, 59, 70, 83, 67, 63, 51, 50, 77, 52, 76, 62, 58, 64, 58, 76, 77, 60, 75, 61, 83, 86, 74, 63, 71, 86, 60, 85, 57, 64, 61, 58, 49, 80, 55, 66, 67, 51, 55, 52, 55, 76, 91, 63, 80, 66, 52, 69, 59, 60, 71, 51, 80, 58, 67, 75, 67, 61, 49, 71, 55, 52, 53, 58, 88, 76, 53, 62, 80, 60, 52, 55, 59, 67, 65, 60, 65, 52, 76, 53, 64, 83, 53, 82, 78, 56, 66, 56, 60, 44, 101, 64, 65, 59, 59, 78, 59, 85, 56, 54, 56, 53, 57, 77, 69, 60, 77, 54, 62, 38, 39, 67, 62, 53, 49, 62, 108, 62, 53, 64, 36, 58, 60, 72, 64, 72, 71, 61, 64, 60, 80, 65, 102, 74, 59, 51, 48, 80, 70, 72, 116, 59, 63, 77, 58, 59, 56, 62, 79, 57, 57, 74, 68, 60, 45, 57, 70, 68, 56, 50, 64, 66, 61, 56, 49, 56, 75, 76, 42, 115, 71, 74, 55, 76, 69, 52, 62, 74, 47, 61, 66, 46, 111, 98, 57, 49, 65, 61, 67, 63, 92, 51, 45, 59, 60, 59, 71, 54, 62, 82, 52, 74, 68, 65, 67, 64, 64, 48, 74, 73, 58, 60, 88, 102, 62, 55, 61, 57, 51, 66, 70, 56, 50, 59, 66, 57, 61, 58, 66, 40, 67, 60, 48, 53, 86, 66, 79, 66, 64, 58, 62, 71, 62, 58, 67, 82, 47, 61, 55, 63, 104, 49, 84, 54, 61, 59, 62, 62, 91, 65, 71, 49, 74, 50, 48, 55, 70, 62, 60, 63, 60, 58, 57, 64, 75, 64, 53, 62, 68, 56, 63, 69, 58, 69, 80, 63, 89, 80, 59, 57, 83, 72, 79, 76, 98, 57, 57, 67, 57, 66, 67, 72, 144, 54, 65, 68, 69, 82, 118, 52, 90, 52, 77, 66, 39, 66, 74, 74, 53, 68, 53, 51, 80, 74, 73, 74, 56, 57, 97, 67, 56, 63, 79, 59, 65, 59, 66, 52, 55, 48, 54, 52, 70, 116, 77, 77, 68, 85, 56, 69, 52, 75, 79, 71, 51, 81, 59, 60, 53, 69, 39, 54, 44, 72, 73, 70, 71, 51, 49, 56, 61, 82, 63, 61, 72, 58, 52, 56, 63, 63, 66, 61, 51, 85, 85, 55, 62, 77, 60, 62, 54, 54, 88, 73, 64, 83, 64, 63, 60, 46, 64, 60, 57, 49, 62, 73, 96, 65, 59, 53, 59, 65, 64, 62, 79, 80, 63, 69, 73, 70, 65, 55, 66, 66, 88, 64, 54, 66, 66, 49, 76, 59, 67, 77, 43, 68, 52, 64, 97, 111, 44, 65, 59, 83, 64, 58, 70, 77, 69, 60, 52, 76, 84, 68, 82, 73, 59, 58, 62, 57, 52, 77, 48, 52, 53, 55, 66, 75, 55, 61, 53, 57, 86, 70, 84, 71, 75, 65, 62, 49, 56, 69, 47, 50, 61, 76, 55, 62, 63, 52, 72, 63, 78, 70, 94, 70, 84, 63, 59, 74, 53, 54, 70, 57, 59, 64, 57, 71, 96, 61, 71, 84, 66, 56, 58, 68, 52, 70, 61, 60, 71, 60, 64, 63, 59, 69, 63, 70, 85, 69, 119, 65, 52, 44, 51, 53, 79, 59, 64, 71, 84, 98, 55, 57, 53, 43, 69, 60, 53, 57, 70, 78, 75, 59, 72, 52, 69, 64, 49, 55, 53, 81, 69, 72, 66, 97, 65, 73, 54, 54, 86, 75, 55, 75, 59, 73, 60, 93, 75, 67, 65, 50, 57, 54, 78, 96, 62, 74, 62, 71, 63, 57, 99, 100, 58, 62, 72, 69, 63, 65, 67, 47, 51, 45, 53, 66, 72, 67, 81, 71, 64, 49, 60, 53, 65, 79, 58, 73, 61, 66, 71, 82, 56, 65, 61, 52, 62, 64, 77, 63, 85, 58, 91, 79, 69, 73, 83, 59, 67, 64, 63, 73, 50, 60, 80, 64, 55, 53, 59, 57, 71, 83, 73, 65, 60, 67, 77, 61, 70, 81, 65, 64, 69, 67, 70, 77, 56, 68, 56, 78, 78, 55, 79, 84, 54, 60, 72, 54, 61, 74, 69, 63, 69, 69, 81, 58, 66, 62, 58, 116, 73, 61, 70, 50, 49, 79, 61, 56, 62, 87, 51, 80, 69, 83, 61, 51, 72, 67, 66, 57, 45, 59, 67, 65, 115, 54, 70, 82, 76, 69, 71, 79, 45, 67, 83, 61, 67, 78, 70, 65, 67, 51, 59, 48, 47, 73, 56, 81, 66, 54, 80, 88, 78, 59, 54, 56, 62, 70, 60, 54, 78, 56, 87, 46, 61, 50, 51, 108, 50, 66, 83, 63, 70, 65, 52, 56, 64, 65, 104, 64, 84, 50, 102, 76, 51, 70, 45, 58, 76, 67, 52, 60, 64, 71, 56, 49, 88, 66, 58, 75, 48, 59, 75, 73, 69, 51, 50, 45, 53, 89, 58, 50, 63, 69, 65, 63, 77, 78, 68, 50, 55, 78, 56, 58, 75, 54, 49, 56, 64, 66, 62, 66, 74, 67, 82, 58, 67, 62, 60, 62, 74, 66, 74, 65, 73, 57, 63, 65, 101, 54, 56, 65, 63, 67, 73, 65, 57, 81, 55, 59, 57, 57, 54, 134, 62, 61, 65, 71, 57, 79, 54, 100, 67, 55, 80, 59, 62, 59, 58, 47, 71, 84, 55, 72, 62, 55, 64, 82, 64, 62, 63, 76, 54, 63, 57, 67, 59, 68, 47, 53, 48, 75, 81, 76, 48, 57, 61, 64, 73, 64, 60, 64, 56, 60, 71, 70, 60, 79, 56, 70, 69, 66, 71, 60, 52, 53, 60, 79, 47, 51, 67, 73, 82, 49, 66, 79, 54, 70, 70, 48, 74, 67, 91, 69, 61, 69, 74, 73, 103, 52, 73, 58, 59, 56, 78, 71, 56, 66, 86, 79, 71, 70, 64, 64, 53, 69, 79, 66, 69, 65, 67, 51, 56, 66, 61, 39, 60, 55, 64, 119, 66, 81, 68, 68, 114, 52, 60, 66, 84, 56, 59, 61, 62, 95, 89, 69, 65, 88, 65, 47, 56, 57, 67, 59, 64, 71, 54, 76, 57, 64, 51, 53, 42, 62, 63, 52, 88, 54, 63, 83, 85, 50, 59, 79, 61, 72, 56, 55, 64, 88, 63, 41, 73, 65, 54, 69, 66, 69, 79, 70, 80, 62, 80, 81, 58, 74, 51, 37, 47, 49, 56, 38, 61, 59, 70, 65, 55, 57, 70, 62, 63, 43, 84, 85, 60, 61, 58, 93, 80, 88, 62, 92, 72, 66, 66, 70, 82, 58, 50, 98, 62, 64, 90, 71, 74, 47, 58, 55, 62, 78, 69, 71, 59, 67, 67, 57, 36, 78, 51, 55, 51, 58, 53, 56, 58, 62, 58, 63, 54, 61, 51, 60, 67, 74, 37, 76, 67, 60, 84, 63, 56, 51, 81, 79, 63, 55, 61, 67, 55, 68, 70, 62, 63, 61, 48, 63, 60, 69, 62, 54, 64, 68, 65, 56, 64, 48, 73, 71, 77, 63, 126, 74, 58, 54, 49, 85, 72, 48, 58, 75, 58, 60, 64, 57, 72, 72, 53, 53, 68, 58, 74, 103, 59, 63, 68, 48, 74, 79, 70, 70, 66, 53, 48, 63, 76, 69, 68, 77, 54, 65, 67, 68, 64, 55, 59, 70, 59, 63, 57, 50, 47, 55, 51, 76, 63, 66, 46, 65, 55, 57, 76, 55, 57, 65, 116, 71, 51, 49, 63, 59, 47, 55, 69, 71, 87, 64, 56, 66, 56, 55, 64, 59, 66, 106, 66, 71, 117, 58, 56, 55, 54, 81, 53, 44, 62, 52, 55, 61, 62, 109, 65, 48, 50, 59, 55, 82, 69, 80, 57, 74, 82, 76, 61, 88, 55, 85, 72, 69, 67, 40, 69, 59, 56, 67, 79, 50, 61, 124, 58, 59, 64, 52, 53, 60, 80, 65, 74, 65, 150, 78, 85, 65, 69, 62, 67, 69, 67, 69, 70, 62, 104, 82, 44, 58, 69, 72, 70, 90, 81, 70, 66, 61, 56, 53, 66, 59, 46, 58, 62, 47, 49, 74, 58, 49, 47, 65, 54, 61, 74, 58, 63, 34, 60, 76, 65, 62, 105, 71, 53, 65, 62, 55, 69, 47, 76, 84, 60, 82, 91, 55, 72, 83, 61, 79, 60, 71, 58, 47, 68, 88, 56, 52, 50, 66, 61, 67, 111, 54, 60, 60, 53, 53, 61, 69, 59, 62, 49, 64, 74, 75, 53, 61, 72, 61, 65, 87, 89, 76, 56, 68, 62, 66, 55, 60, 57, 59, 61, 70, 57, 51, 57, 61, 105, 67, 66, 41, 64, 47, 65, 54, 90, 51, 60, 64, 50, 70, 55, 82, 66, 63, 54, 60, 54, 53, 58, 75, 76, 55, 54, 57, 54, 90, 58, 46, 81, 62, 46, 59, 67, 51, 55, 66, 91, 38, 71, 65, 61, 60, 54, 95, 53, 98, 73, 77, 50, 58, 62, 65, 76, 48, 101, 78, 42, 62, 54, 70, 71, 66, 51, 83, 65, 57, 69, 71, 55, 65, 74, 69, 71, 65, 48, 54, 48, 59, 62, 95, 65, 61, 79, 72, 68, 73, 67, 52, 46, 73, 89, 47, 41, 90, 70, 65, 111, 84, 57, 49, 56, 92, 62, 62, 54, 60, 71, 63, 84, 81, 61, 61, 51, 79, 86, 76, 57, 57, 64, 47, 76, 54, 61, 49, 83, 55, 64, 113, 56, 69, 69, 65, 65, 55, 56, 90, 70, 47, 62, 76, 61, 70, 48, 68, 73, 60, 60, 71, 46, 72, 77, 54, 59, 63, 56, 63, 51, 45, 62, 60, 72, 57, 72, 62, 55, 72, 66, 72, 93, 72, 68, 80, 78, 59, 62, 53, 50, 80, 60, 69, 63, 59, 48, 72, 65, 68, 38, 63, 54, 84, 62, 71, 77, 66, 86, 53, 79, 72, 54, 58, 62, 70, 62, 92, 49, 75, 78, 59, 66, 93, 60, 56, 84, 64, 45, 82, 62, 61, 70, 55, 64, 83, 59, 70, 76, 95, 69, 67, 71, 47, 83, 105, 61, 73, 66, 87, 70, 69, 84, 55, 78, 111, 75, 61, 55, 63, 68, 54, 47, 55, 65, 63, 66, 53, 67, 68, 124, 57, 43, 60, 58, 67, 87, 69, 59, 64, 63, 104, 68, 54, 83, 70, 68, 45, 68, 52, 59, 63, 97, 71, 74, 57, 108, 65, 78, 54, 71, 55, 64, 53, 67, 70, 67, 58, 59, 67, 55, 51, 50, 54, 95, 63, 54, 61, 64, 77, 73, 77, 65, 103, 55, 43, 54, 79, 71, 75, 79, 71, 61, 71, 50, 82, 72, 64, 58, 74, 66, 60, 86, 112, 70, 62, 66, 60, 57, 72, 64, 56, 63, 73, 72, 93, 77, 67, 76, 58, 70, 56, 67, 63, 80, 50, 68, 80, 76, 48, 69, 64, 69, 46, 70, 51, 61, 54, 65, 94, 65, 63, 53, 107, 72, 81, 74, 55, 80, 100, 51, 50, 91, 57, 62, 71, 73, 63, 42, 48, 66, 60, 46, 68, 56, 61, 57, 77, 32, 71, 88, 70, 75, 53, 55, 75, 60, 73, 58, 49, 70, 43, 60, 74, 69, 60, 38, 38, 77, 61, 48, 47, 60, 56, 70, 54, 86, 55, 73, 66, 56, 71, 60, 61, 55, 57, 77, 90, 63, 90, 52, 72, 73, 83, 64, 68, 63, 59, 68, 67, 51, 61, 64, 106, 55, 63, 71, 54, 67, 66, 56, 81, 73, 106, 66, 120, 77, 77, 48, 65, 57, 59, 64, 80, 50, 60, 68, 56, 60, 75, 67, 59, 52, 49, 56, 56, 70, 57, 62, 61, 56, 56, 76, 53, 72, 88, 80, 65, 56, 92, 53, 78, 82, 49, 66, 60, 62, 57, 61, 61, 63, 62, 69, 82, 58, 39, 71, 97, 88, 84, 68, 72, 55, 51, 59, 67, 68, 71, 67, 72, 59, 58, 54, 70, 42, 77, 66, 67, 57, 61, 49, 55, 69, 55, 59, 80, 58, 78, 61, 53, 65, 55, 90, 58, 45, 58, 68, 68, 67, 67, 71, 55, 66, 52, 53, 79, 53, 81, 70, 69, 41, 65, 58, 62, 69, 62, 60, 47, 75, 68, 56, 70, 67, 100, 62, 51, 44, 65, 70, 58, 53, 72, 121, 56, 105, 63, 68, 77, 66, 80, 106, 53, 66, 46, 75, 64, 65, 72, 56, 69, 86, 57, 57, 54, 44, 69, 76, 69, 66, 66, 74, 62, 83, 69, 64, 63, 54, 58, 63, 57, 35, 69, 60, 69, 91, 72, 63, 83, 71, 70, 70, 73, 52, 55, 72, 71, 52, 55, 68, 67, 73, 61, 78, 106, 72, 74, 67, 59, 77, 46, 51, 59, 99, 43, 80, 42, 79, 66, 66, 76, 75, 69, 100, 57, 59, 74, 85, 85, 68, 95, 77, 58, 52, 88, 51, 85, 80, 79, 63, 67, 58, 66, 65, 64, 70, 45, 58, 66, 73, 45, 83, 79, 68, 55, 58, 69, 49, 74, 68, 68, 70, 71, 47, 66, 80, 49, 107, 53, 47, 69, 68, 54, 44, 55, 62, 68, 60, 73, 95, 63, 68, 104, 66, 67, 89, 117, 61, 63, 76, 63, 67, 70, 54, 85, 69, 72, 61, 73, 55, 102, 64, 82, 51, 66, 69, 81, 69, 71, 69, 54, 64, 68, 62, 64, 61, 73, 76, 67, 62, 89, 135, 63, 59, 83, 80, 80, 56, 81, 50, 86, 59, 51, 67, 55, 78, 76, 80, 68, 59, 66, 54, 67, 100, 67, 74, 67, 61, 70, 63, 49, 70, 75, 64, 63, 72, 74, 54, 71, 60, 58, 59, 66, 83, 43, 74, 46, 62, 75, 56, 64, 62, 50, 49, 63, 68, 78, 47, 52, 57, 64, 87, 52, 60, 87, 89, 55, 67, 58, 98, 74, 62, 52, 66, 49, 56, 60, 51, 74, 52, 67, 77, 68, 67, 55, 47, 106, 91, 56, 64, 54, 65, 66, 57, 56, 68, 50, 39, 71, 37, 96, 52, 57, 64, 71, 55, 54, 84, 83, 63, 60, 89, 65, 61, 64, 46, 84, 72, 69, 65, 66, 78, 56, 46, 58, 69, 66, 77, 80, 62, 90, 78, 78, 61, 55, 85, 90, 66, 56, 59, 49, 50, 57, 68, 85, 56, 65, 56, 84, 74, 74, 52, 86, 36, 83, 52, 88, 81, 46, 37, 64, 65, 61, 70, 48, 65, 64, 55, 59, 66, 60, 78, 70, 69, 84, 50, 64, 77, 120, 75, 71, 80, 81, 68, 58, 75, 39, 65, 75, 64, 50, 81, 59, 54, 56, 60, 62, 103, 54, 55, 43, 81, 76, 59, 69, 61, 77, 73, 64, 57, 63, 72, 55, 81, 56, 57, 63, 80, 58, 53, 77, 52, 80, 55, 65, 70, 74, 69, 97, 59, 68, 51, 74, 61, 74, 64, 73, 63, 82, 62, 56, 91, 60, 55, 74, 79, 60, 42, 69, 59, 50, 96, 64, 53, 75, 49, 78, 68, 77, 53, 66, 77, 52, 48, 87, 54, 68, 130, 65, 59, 48, 64, 81, 56, 61, 75, 108, 44, 68, 68, 80, 72, 62, 59, 64, 63, 68, 75, 51, 69, 37, 58, 93, 74, 73, 76, 42, 77, 62, 73, 55, 53, 85, 54, 60, 79, 74, 59, 57, 44, 69, 69, 55, 73, 64, 83, 62, 54, 74, 57, 69, 77, 45, 69, 63, 71, 76, 55, 59, 55, 58, 71, 78, 64, 82, 61, 57, 79, 83, 68, 62, 78, 54, 84, 78, 66, 57, 63, 68, 60, 42, 62, 66, 74, 55, 67, 66, 72, 68, 56, 71, 74, 65, 51, 84, 75, 73, 52, 35, 78, 75, 53, 70, 79, 53, 76, 64, 58, 70, 56, 58, 53, 103, 75, 73, 34, 75, 58, 80, 48, 90, 56, 90, 58, 65, 87, 69, 70, 60, 64, 74, 58, 75, 60, 66, 95, 59, 35, 68, 63, 83, 62, 70, 70, 68, 59, 75, 57, 65, 71, 87, 61, 63, 68, 49, 88, 58, 87, 93, 63, 73, 83, 62, 52, 64, 62, 70, 64, 66, 57, 59, 72, 54, 68, 74, 65, 60, 57, 64, 78, 53, 63, 64, 55, 73, 81, 65, 69, 46, 66, 55, 74, 68, 67, 52, 59, 65, 68, 54, 58, 48, 53, 67, 89, 50, 66, 62, 47, 54, 72, 70, 74, 69, 51, 72, 56, 73, 49, 44, 98, 80, 52, 75, 54, 83, 77, 50, 51, 67, 69, 48, 65, 85, 73, 50, 53, 79, 79, 58, 69, 74, 91, 52, 74, 67, 65, 49, 59, 63, 60, 61, 69, 84, 34, 70, 63, 61, 67, 71, 76, 51, 62, 64, 61, 57, 48, 44, 60, 57, 69, 57, 60, 63, 59, 61, 70, 57, 65, 61, 60, 65, 61, 69, 82, 54, 67, 90, 98, 57, 63, 63, 55, 47, 91, 53, 72, 76, 57, 60, 56, 52, 83, 49, 112, 58, 53, 54, 63, 82, 72, 53, 57, 66, 73, 68, 110, 85, 56, 80, 65, 52, 48, 67, 54, 83, 64, 64, 57, 67, 54, 73, 71, 73, 74, 81, 72, 63, 63, 112, 85, 61, 40, 66, 44, 103, 53, 76, 69, 65, 77, 40, 74, 50, 65, 76, 69, 67, 58, 69, 69, 68, 54, 62, 55, 57, 58, 73, 63, 74, 71, 50, 60, 47, 66, 58, 64, 55, 58, 53, 56, 54, 57, 40, 69, 55, 42, 70, 47, 97, 70, 77, 62, 78, 55, 70, 101, 65, 70, 66, 73, 43, 71, 81, 85, 61, 59, 68, 63, 62, 44, 91, 73, 71, 83, 57, 69, 51, 76, 68, 60, 55, 66, 67, 77, 52, 64, 62, 75, 65, 82, 50, 60, 55, 71, 63, 70, 55, 59, 74, 49, 59, 61, 90, 78, 84, 81, 57, 68, 67, 58, 89, 67, 81, 55, 60, 59, 61, 62, 61, 91, 68, 79, 67, 58, 61, 49, 44, 72, 58, 66, 52, 56, 62, 63, 44, 51, 61, 101, 77, 72, 60, 68, 62, 56, 77, 64, 73, 59, 69, 52, 55, 65, 68, 70, 68, 73, 67, 65, 71, 67, 60, 72, 67, 55, 65, 71, 59, 59, 57, 54, 83, 69, 63, 58, 55, 50, 56, 66, 77, 83, 56, 68, 76, 58, 70, 56, 39, 71, 58, 48, 86, 66, 70, 49, 54, 61, 49, 92, 56, 59, 60, 65, 66, 80, 48, 63, 72, 67, 50, 54, 81, 88, 76, 45, 54, 57, 61, 56, 71, 59, 79, 74, 74, 82, 46, 64, 101, 63, 55, 114, 58, 50, 61, 54, 75, 64, 59, 59, 72, 49, 66, 67, 67, 60, 119, 59, 44, 77, 83, 77, 81, 71, 80, 62, 54, 70, 57, 71, 60, 81, 91, 76, 51, 80, 71, 62, 63, 67, 71, 139, 63, 66, 48, 69, 64, 59, 73, 62, 52, 56, 61, 120, 53, 68, 123, 86, 52, 64, 71, 56, 68, 67, 48, 58, 45, 43, 62, 53, 64, 63, 78, 64, 57, 66, 68, 63, 65, 81, 52, 48, 90, 42, 69, 67, 56, 62, 78, 62, 55, 60, 56, 68, 63, 62, 61, 72, 59, 55, 73, 69, 49, 60, 71, 44, 76, 65, 79, 71, 76, 70, 57, 77, 79, 51, 50, 57, 77, 74, 76, 59, 55, 57, 64, 75, 68, 58, 79, 87, 61, 63, 76, 63, 57, 69, 76, 62, 64, 62, 65, 53, 69, 78, 87, 62, 57, 68, 58, 67, 70, 67, 86, 64, 78, 87, 64, 69, 59, 56, 65, 57, 112, 84, 59, 65, 59, 69, 63, 53, 48, 82, 58, 74, 70, 52, 57, 44, 63, 72, 54, 72, 57, 67, 67, 70, 33, 85, 81, 71, 61, 73, 75, 71, 68, 91, 52, 75, 64, 79, 79, 65, 59, 52, 73, 66, 57, 54, 52, 60, 83, 50, 78, 69, 65, 61, 62, 108, 89, 59, 48, 58, 55, 66, 57, 68, 56, 85, 73, 68, 68, 51, 90, 54, 57, 70, 68, 60, 44, 67, 70, 57, 71, 59, 69, 51, 74, 37, 20, 50, 62, 54, 73, 74, 41, 64, 61, 69, 92, 76, 53, 54, 80, 86, 62, 47, 58, 55, 51, 78, 67, 72, 114, 56, 49, 86, 64, 64, 62, 49, 59, 45, 58, 52, 78, 63, 72, 92, 63, 60, 55, 63, 80, 85, 105, 63, 70, 59, 62, 60, 84, 54, 69, 50, 44, 91, 63, 67, 37, 72, 61, 52, 71, 74, 83, 67, 60, 57, 71, 67, 64, 97, 65, 50, 61, 61, 47, 79, 41, 70, 60, 63, 74, 93, 57, 66, 67, 60, 73, 98, 68, 55, 74, 66, 64, 50, 97, 71, 50, 62, 76, 56, 68, 55, 65, 92, 62, 61, 66, 54, 65, 64, 65, 67, 64, 65, 82, 81, 71, 63, 72, 54, 63, 75, 59, 74, 73, 72, 82, 85, 67, 69, 77, 69, 72, 63, 73, 74, 72, 61, 51, 72, 58, 48, 68, 65, 73, 65, 59, 65, 93, 71, 67, 50, 65, 59, 71, 101, 71, 87, 76, 75, 63, 60, 70, 49, 57, 149, 66, 66, 36, 55, 75, 60, 64, 42, 81, 55, 58, 58, 62, 72, 70, 62, 71, 54, 73, 71, 55, 63, 64, 48, 56, 63, 46, 70, 46, 44, 77, 68, 56, 51, 61, 50, 52, 74, 100, 59, 64, 75, 58, 78, 90, 46, 74, 71, 64, 61, 88, 64, 73, 59, 54, 107, 53, 58, 46, 53, 66, 72, 89, 67, 34, 69, 67, 53, 70, 59, 93, 52, 66, 83, 63, 72, 75, 58, 62, 58, 79, 73, 56, 54, 75, 62, 61, 46, 61, 59, 54, 48, 81, 76, 76, 67, 75, 55, 79, 62, 67, 73, 76, 63, 76, 55, 81, 72, 44, 71, 48, 67, 69, 73, 61, 87, 69, 71, 77, 63, 65, 70, 65, 54, 77, 59, 57, 82, 125, 49, 69, 66, 75, 66, 89, 101, 89, 86, 60, 55, 59, 63, 71, 56, 63, 59, 62, 74, 75, 76, 68, 117, 67, 89, 99, 72, 68, 65, 56, 55, 45, 101, 60, 74, 91, 66, 72, 62, 88, 64, 62, 87, 80, 64, 75, 52, 50, 60, 46, 96, 59, 59, 65, 61, 50, 64, 88, 63, 66, 60, 63, 53, 82, 69, 52, 74, 61, 64, 61, 63, 57, 77, 69, 45, 64, 75, 58, 52, 59, 82, 51, 78, 72, 55, 66, 73, 72, 78, 68, 73, 75, 80, 64, 69, 70, 65, 49, 40, 80, 73, 59, 62, 66, 53, 80, 60, 91, 45, 69, 66, 83, 68, 52, 69, 58, 57, 55, 59, 47, 65, 70, 64, 84, 55, 77, 46, 64, 52, 102, 99, 53, 73, 91, 50, 84, 67, 70, 57, 64, 69, 61, 81, 63, 71, 57, 61, 61, 63, 56, 72, 71, 70, 66, 69, 73, 54, 64, 61, 82, 81, 49, 69, 48, 71, 62, 51, 55, 53, 101, 78, 73, 50, 51, 67, 83, 76, 67, 66, 52, 95, 43, 65, 78, 52, 44, 63, 56, 65, 77, 62, 71, 70, 63, 73, 58, 60, 66, 60, 59, 64, 58, 73, 63, 62, 48, 53, 94, 75, 71, 71, 77, 72, 65, 69, 71, 68, 57, 54, 106, 80, 57, 51, 61, 56, 88, 71, 54, 58, 67, 75, 71, 80, 63, 51, 87, 57, 59, 56, 72, 52, 75, 69, 67, 52, 61, 56, 65, 54, 69, 65, 60, 49, 57, 56, 72, 44, 55, 81, 61, 54, 60, 55, 78, 50, 50, 61, 43, 87, 91, 80, 63, 59, 86, 55, 48, 72, 57, 108, 94, 46, 73, 73, 87, 60, 58, 71, 75, 65, 94, 61, 57, 67, 60, 99, 68, 43, 81, 90, 79, 60, 68, 55, 59, 66, 52, 64, 73, 55, 82, 64, 71, 66, 88, 57, 104, 46, 68, 50, 69, 62, 58, 57, 78, 70, 57, 69, 69, 67, 59, 81, 64, 57, 61, 55, 57, 89, 89, 52, 66, 53, 62, 67, 56, 77, 62, 66, 63, 59, 59, 86, 69, 66, 59, 68, 59, 49, 70, 49, 138, 75, 59, 51, 81, 67, 66, 59, 66, 43, 60, 49, 60, 80, 71, 61, 87, 71, 79, 60, 68, 80, 75, 52, 65, 53, 62, 65, 71, 72, 92, 88, 74, 58, 67, 64, 69, 54, 62, 58, 57, 66, 57, 42, 58, 50, 72, 43, 61, 61, 81, 59, 32, 69, 113, 60, 68, 63, 54, 54, 64, 48, 58, 82, 70, 48, 63, 56, 53, 59, 73, 54, 71, 72, 57, 70, 39, 76, 57, 55, 69, 79, 63, 51, 86, 54, 64, 74, 42, 82, 64, 62, 65, 64, 60, 68, 56, 66, 59, 81, 65, 63, 44, 88, 64, 64, 84, 59, 81, 67, 41, 77, 61, 68, 92, 56, 78, 80, 64, 53, 57, 74, 46, 49, 70, 66, 95, 57, 58, 88, 55, 70, 73, 63, 85, 100, 88, 67, 82, 54, 48, 40, 73, 68, 91, 79, 50, 63, 58, 75, 62, 52, 43, 68, 70, 63, 63, 52, 67, 61, 48, 63, 77, 67, 86, 58, 56, 42, 70, 68, 84, 57, 59, 68, 58, 111, 71, 81, 55, 85, 61, 79, 54, 69, 59, 57, 108, 63, 42, 55, 81, 63, 56, 36, 81, 73, 64, 51, 67, 63, 93, 55, 73, 72, 63, 56, 65, 61, 64, 62, 56, 48, 64, 60, 63, 50, 61, 72, 57, 71, 59, 59, 55, 64, 70, 77, 77, 53, 66, 64, 71, 57, 67, 58, 65, 50, 69, 55, 58, 71, 50, 78, 66, 76, 61, 88, 89, 67, 71, 47, 58, 57, 57, 59, 67, 67, 79, 82, 54, 55, 68, 71, 69, 66, 50, 88, 65, 78, 47, 57, 37, 50, 46, 58, 52, 47, 56, 58, 86, 75, 63, 68, 67, 85, 70, 89, 49, 55, 65, 71, 52, 42, 61, 87, 50, 57, 80, 61, 55, 58, 135, 58, 63, 67, 62, 50, 71, 94, 76, 53, 89, 53, 41, 62, 102, 52, 76, 71, 62, 89, 63, 63, 43, 43, 50, 73, 62, 65, 71, 54, 61, 58, 44, 71, 66, 109, 56, 51, 49, 47, 54, 75, 54, 61, 102, 64, 49, 78, 59, 52, 70, 49, 47, 71, 58, 63, 83, 58, 54, 75, 86, 50, 58, 56, 63, 53, 80, 59, 47, 82, 68, 69, 72, 53, 67, 64, 65, 54, 67, 51, 47, 94, 65, 64, 73, 62, 71, 68, 68, 51, 88, 70, 66, 79, 53, 54, 60, 62, 56, 62, 61, 61, 71, 70, 67, 80, 71, 51, 55, 63, 65, 74, 76, 58, 86, 56, 78, 74, 74, 49, 84, 57, 55, 67, 51, 72, 67, 46, 72, 66, 53, 71, 45, 60, 60, 56, 53, 64, 71, 50, 62, 88, 67, 81, 57, 134, 55, 96, 55, 53, 83, 63, 68, 69, 55, 74, 52, 87, 57, 78, 86, 63, 60, 81, 71, 57, 61, 58, 71, 57, 57, 61, 60, 49, 61, 59, 79, 65, 66, 42, 64, 76, 65, 69, 101, 65, 56, 49, 78, 70, 76, 65, 62, 97, 61, 57, 71, 52, 63, 57, 66, 54, 76, 59, 52, 58, 54, 63, 62, 60, 55, 56, 70, 48, 53, 74, 87, 69, 75, 67, 79, 55, 70, 103, 69, 53, 80, 68, 78, 56, 69, 58, 65, 75, 52, 81, 62, 75, 69, 69, 74, 66, 81, 52, 58, 70, 50, 67, 66, 100, 59, 64, 72, 72, 64, 74, 56, 110, 70, 71, 56, 62, 71, 63, 50, 72, 69, 74, 76, 56, 54, 57, 86, 45, 44, 65, 80, 74, 63, 49, 65, 45, 54, 67, 58, 58, 65, 74, 49, 70, 59, 78, 56, 61, 65, 58, 68, 64, 62, 63, 56, 58, 57, 65, 71, 63, 112, 62, 54, 50, 55, 70, 68, 71, 47, 57, 62, 59, 67, 75, 45, 59, 54, 50, 68, 54, 60, 66, 59, 57, 77, 66, 62, 53, 77, 71, 65, 61, 59, 60, 68, 53, 52, 62, 69, 58, 66, 61, 56, 56, 76, 68, 73, 98, 53, 54, 78, 65, 51, 56, 47, 53, 73, 59, 105, 77, 63, 67, 70, 73, 53, 69, 39, 61, 68, 56, 59, 73, 56, 60, 49, 76, 55, 88, 42, 93, 68, 78, 54, 61, 64, 82, 73, 57, 54, 77, 58, 93, 64, 61, 75, 67, 61, 56, 78, 60, 59, 106, 71, 58, 80, 49, 79, 55, 58, 56, 51, 63, 58, 88, 58, 60, 81, 64, 68, 55, 61, 68, 50, 53, 83, 88, 39, 71, 53, 44, 65, 67, 72, 60, 83, 81, 73, 49, 70, 61, 59, 62, 52, 80, 76, 69, 86, 67, 83, 61, 90, 70, 58, 55, 54, 63, 60, 63, 109, 103, 98, 71, 61, 85, 93, 53, 114, 53, 55, 75, 86, 48, 109, 60, 53, 76, 63, 84, 92, 66, 83, 46, 109, 107, 85, 99, 56, 67, 94, 76, 72, 120, 55, 58, 88, 72, 68, 50, 56, 44, 42, 50, 61, 53, 48, 58, 103, 75, 64, 57, 42, 57, 63, 86, 74, 66, 49, 49, 69, 69, 82, 56, 48, 70, 69, 71, 71, 85, 60, 89, 65, 82, 73, 68, 63, 88, 68, 66, 79, 71, 60, 65, 60, 56, 70, 53, 48, 62, 69, 74, 62, 52, 68, 47, 77, 54, 80, 74, 73, 78, 58, 92, 73, 62, 64, 55, 62, 66, 73, 56, 48, 70, 83, 67, 59, 55, 65, 47, 74, 57, 52, 55, 63, 78, 82, 54, 127, 60, 93, 74, 75, 58, 63, 49, 68, 67, 57, 49, 66, 53, 81, 76, 67, 63, 71, 49, 64, 56, 50, 63, 46, 61, 67, 93, 39, 65, 88, 63, 59, 43, 66, 65, 102, 50, 60, 73, 133, 59, 77, 55, 57, 60, 77, 73, 69, 50, 68, 77, 59, 67, 66, 45, 78, 60, 57, 65, 56, 77, 63, 62, 78, 67, 83, 82, 86, 73, 65, 71, 84, 58, 66, 67, 54, 74, 91, 65, 77, 53, 87, 58, 70, 108, 75, 63, 59, 57, 60, 82, 55, 59, 65, 39, 49, 55, 105, 59, 58, 62, 74, 73, 72, 75, 70, 66, 74, 39, 64, 91, 59, 44, 84, 71, 75, 63, 50, 59, 95, 51, 77, 50, 59, 70, 71, 58, 46, 67, 57, 58, 141, 103, 69, 74, 107, 49, 69, 58, 81, 77, 56, 66, 55, 53, 65, 68, 74, 55, 49, 50, 56, 56, 75, 53, 62, 61, 65, 71, 34, 67, 63, 92, 53, 59, 75, 74, 81, 55, 51, 68, 48, 47, 77, 46, 51, 92, 50, 65, 46, 51, 57, 64, 68, 78, 91, 49, 79, 61, 64, 55, 66, 54, 66, 63, 64, 91, 79, 59, 59, 81, 36, 53, 71, 66, 57, 70, 62, 89, 62, 44, 89, 70, 53, 50, 55, 63, 55, 83, 54, 74, 71, 73, 67, 72, 58, 54, 62, 80, 76, 65, 48, 57, 63, 96, 59, 73, 83, 63, 60, 58, 45, 83, 74, 68, 64, 86, 54, 67, 98, 86, 50, 56, 63, 48, 65, 72, 85, 119, 69, 100, 69, 78, 62, 55, 89, 96, 74, 62, 72, 90, 67, 48, 72, 77, 56, 50, 85, 66, 57, 47, 71, 62, 42, 57, 63, 63, 47, 53, 77, 80, 57, 66, 64, 63, 58, 59, 59, 58, 58, 61, 74, 73, 71, 59, 66, 64, 52, 92, 66, 75, 69, 67, 69, 81, 68, 61, 48, 81, 70, 61, 97, 71, 77, 81, 49, 39, 48, 61, 56, 98, 61, 65, 63, 69, 64, 94, 74, 49, 43, 77, 56, 50, 83, 67, 95, 68, 81, 80, 66, 50, 52, 57, 64, 84, 81, 65, 58, 53, 86, 53, 51, 52, 93, 53, 65, 91, 51, 68, 54, 56, 84, 91, 42, 47, 70, 27, 75, 74, 93, 72, 38, 58, 62, 81, 83, 56, 53, 60, 52, 86, 57, 76, 53, 72, 90, 73, 62, 91, 69, 45, 80, 65, 57, 46, 50, 62, 73, 119, 95, 75, 72, 73, 62, 78, 94, 69, 65, 71, 58, 86, 59, 88, 63, 47, 48, 89, 65, 69, 72, 61, 65, 67, 66, 63, 63, 77, 59, 68, 37, 59, 66, 74, 60, 66, 81, 71, 54, 73, 77, 72, 53, 57, 79, 62, 84, 92, 47, 64, 65, 72, 86, 62, 52, 42, 57, 55, 59, 79, 58, 77, 37, 75, 59, 92, 83, 60, 80, 55, 64, 62, 71, 87, 59, 23, 52, 35, 63, 60, 82, 51, 66, 71, 51, 57, 46, 55, 81, 89, 35, 70, 58, 63, 71, 36, 71, 54, 47, 69, 61, 55, 57, 81, 64, 75, 66, 79, 101, 62, 43, 25, 57, 43, 51, 43, 60, 72, 78, 68, 76, 43, 47, 104, 79, 48, 70, 74, 42, 53, 52, 67, 72, 50, 61, 51, 37, 78, 81, 72, 38, 33, 54, 59, 66, 59, 53, 82, 63, 52, 79, 81, 66, 79, 97, 87, 33, 81, 88, 54, 59, 74, 72, 88, 80, 51, 45, 72, 67, 59, 95, 70, 72, 67, 94, 96, 48, 75, 95, 50, 42, 84, 30, 70, 75, 60, 51, 82, 31, 65, 83, 57, 125, 60, 78, 68, 64, 63, 78, 53, 65, 91, 62, 70, 90, 54, 40, 62, 91, 52, 75, 53, 70, 98, 53, 61, 65, 30, 67, 60, 71, 70, 55, 42, 109, 66, 51, 74, 52, 90, 65, 46, 29, 72, 91, 49, 73, 57, 78, 61, 91, 51, 59, 69, 75, 56, 49, 68, 45, 89, 62, 59, 37, 68, 80, 83, 37, 72, 70, 53, 47, 69, 56, 67, 51, 73, 65, 47, 81, 47, 78, 66, 68, 39, 66, 51, 73, 70, 68, 73, 54, 82, 59, 63, 50, 83, 106, 61, 57, 70, 52, 53, 76, 40, 104, 85, 80, 75, 69, 72, 91, 58, 121, 52, 59, 78, 65, 62, 109, 63, 80, 66, 61, 55, 72, 58, 95, 56, 49, 87, 61, 39, 60, 56, 126, 78, 75, 80, 47, 75, 43, 83, 54, 60, 75, 90, 43, 55, 82, 54, 46, 75, 64, 87, 71, 67, 68, 65, 72, 48, 66, 47, 42, 52, 63, 76, 42, 78, 73, 71, 84, 81, 57, 58, 71, 60, 93, 34, 73, 65, 59, 51, 71, 72, 71, 52, 66, 66, 50, 59, 66, 127, 101, 63, 79, 64, 50, 44, 62, 65, 93, 91, 43, 60, 69, 69, 121, 71, 55, 30, 65, 72, 69, 95, 63, 55, 71, 68, 48, 47, 61, 52, 72, 57, 65, 89, 71, 66, 75, 66, 55, 42, 43, 67, 67, 64, 50, 73, 72, 48, 66, 109, 98, 81, 59, 70, 82, 81, 75, 56, 76, 58, 74, 89, 47, 79, 72, 79, 70, 63, 62, 59, 73, 77, 86, 41, 81, 75, 45, 61, 68, 69, 95, 47, 63, 81, 64, 76, 77, 64, 51, 27, 86, 66, 70, 79, 74, 60, 69, 86, 60, 60, 35, 42, 77, 68, 92, 57, 40, 101, 59, 76, 71, 88, 28, 69, 67, 46, 79, 78, 62, 52, 77, 54, 50, 53, 74, 72, 46, 52, 41, 51, 80, 70, 60, 73, 101, 60, 47, 84, 79, 64, 61, 50, 53, 64, 45, 116, 65, 63, 100, 45, 70, 62, 72, 37, 76, 101, 55, 57, 66, 53, 95, 89, 67, 60, 55, 54, 73, 53, 68, 68, 58, 56, 57, 91, 74, 68, 86, 85, 56, 60, 72, 64, 55, 86, 74, 44, 64, 74, 76, 63, 53, 58, 46, 38, 93, 58, 65, 33, 67, 49, 95, 102, 56, 46, 64, 50, 85, 92, 44, 74, 69, 70, 88, 74, 75, 105, 46, 65, 73, 69, 46, 58, 79, 42, 93, 69, 61, 62, 62, 85, 70, 55, 89, 61, 56, 47, 44, 66, 87, 59, 50, 58, 41, 56, 59, 50, 42, 87, 58, 47, 77, 62, 50, 59, 73, 64, 67, 67, 71, 59, 48, 56, 81, 37, 52, 59, 46, 65, 75, 129, 63, 68, 78, 36, 74, 55, 103, 66, 49, 62, 92, 58, 71, 58, 60, 82, 67, 62, 69, 59, 54, 60, 79, 66, 42, 48, 84, 71, 38, 50, 47, 86, 75, 49, 80, 50, 71, 70, 107, 60, 69, 65, 63, 64, 97, 74, 93, 71, 57, 93, 79, 31, 81, 43, 73, 109, 76, 80, 62, 51, 52, 67, 51, 70, 47, 78, 63, 92, 68, 47, 67, 36, 49, 73, 58, 56, 53, 64, 101, 89, 71, 59, 65, 77, 44, 63, 97, 48, 87, 69, 53, 91, 64, 49, 64, 83, 67, 97, 78, 85, 42, 82, 20, 104, 40, 62, 64, 65, 57, 61, 70, 70, 73, 83, 53, 43, 76, 59, 84, 83, 54, 69, 36, 62, 54, 60, 82, 60, 78, 114, 54, 59, 54, 66, 85, 75, 85, 74, 70, 69, 60, 65, 87, 73, 54, 78, 52, 45, 57, 83, 83, 42, 34, 130, 75, 80, 89, 98, 74, 48, 100, 57, 49, 74, 68, 67, 88, 46, 64, 67, 71, 70, 114, 28, 48, 83, 70, 74, 81, 32, 38, 75, 84, 74, 46, 59, 73, 58, 97, 62, 77, 40, 50, 48, 50, 70, 58, 75, 54, 56, 53, 62, 42, 51, 89, 58, 93, 77, 71, 56, 72, 69, 57, 67, 52, 44, 71, 69, 54, 47, 79, 104, 79, 71, 69, 39, 76, 63, 51, 58, 92, 58, 104, 54, 66, 78, 71, 95, 79, 38, 35, 75, 59, 38, 70, 72, 59, 57, 51, 67, 67, 63, 81, 68, 46, 107, 61, 75, 88, 55, 61, 60, 66, 71, 71, 58, 57, 63, 52, 37, 84, 44, 66, 56, 52, 53, 49, 80, 75, 68, 93, 61, 62, 68, 79, 54, 81, 49, 64, 70, 102, 49, 62, 65, 65, 68, 59, 67, 68, 60, 36, 105, 76, 59, 51, 82, 70, 61, 59, 51, 61, 64, 58, 61, 64, 63, 74, 67, 61, 74, 71, 73, 63, 57, 75, 74, 67, 44, 70, 67, 70, 74, 58, 50, 56, 55, 65, 50, 78, 63, 100, 65, 90, 71, 55, 82, 64, 65, 66, 71, 64, 46, 75, 80, 59, 67, 70, 61, 81, 64, 50, 77, 51, 47, 65, 89, 73, 72, 76, 68, 84, 67, 75, 100, 56, 62, 68, 74, 58, 55, 83, 67, 66, 64, 88, 54, 48, 55, 48, 63, 70, 55, 57, 77, 50, 70, 50, 64, 58, 82, 61, 49, 77, 53, 78, 52, 69, 72, 63, 64, 66, 63, 59, 67, 60, 74, 55, 64, 95, 75, 61, 116, 68, 84, 66, 53, 55, 76, 66, 67, 59, 64, 61, 46, 66, 57, 70, 61, 73, 51, 70, 61, 60, 60, 93, 72, 59, 60, 80, 72, 65, 69, 60, 69, 43, 104, 52, 68, 61, 105, 55, 62, 95, 64, 51, 58, 65, 68, 88, 64, 87, 56, 58, 66, 68, 67, 56, 36, 65, 51, 58, 68, 80, 69, 56, 73, 57, 65, 69, 35, 57, 62, 63, 61, 87, 69, 72, 67, 96, 71, 75, 67, 72, 53, 74, 68, 75, 63, 59, 59, 67, 72, 76, 63, 54, 64, 46, 61, 61, 67, 72, 50, 50, 65, 94, 81, 52, 59, 53, 71, 76, 52, 72, 56, 66, 78, 77, 53, 66, 92, 71, 60, 64, 94, 49, 66, 57, 63, 53, 71, 61, 70, 69, 60, 63, 61, 43, 72, 74, 70, 68, 70, 72, 46, 59, 73, 60, 50, 68, 55, 80, 64, 77, 53, 57, 63, 67, 75, 59, 70, 57, 68, 78, 82, 58, 82, 65, 52, 57, 63, 94, 57, 47, 66, 56, 57, 70, 60, 62, 53, 62, 63, 60, 59, 53, 89, 72, 67, 63, 57, 52, 69, 51, 54, 57, 54, 73, 71, 50, 60, 67, 69, 63, 108, 57, 67, 50, 104, 74, 57, 58, 69, 68, 66, 56, 72, 55, 78, 74, 74, 64, 59, 67, 61, 66, 54, 60, 68, 79, 66, 73, 62, 61, 88, 71, 47, 94, 76, 69, 61, 74, 76, 58, 63, 58, 51, 68, 61, 42, 72, 61, 55, 19, 61, 73, 51, 77, 41, 79, 59, 69, 61, 63, 51, 60, 76, 61, 60, 73, 60, 52, 54, 62, 57, 62, 76, 62, 81, 88, 55, 69, 68, 53, 69, 62, 70, 61, 71, 66, 55, 71, 64, 86, 61, 68, 54, 48, 63, 58, 74, 77, 76, 64, 69, 68, 60, 53, 39, 90, 90, 60, 62, 54, 61, 53, 45, 54, 73, 68, 71, 68, 69, 89, 65, 66, 77, 55, 57, 129, 73, 53, 48, 98, 47, 114, 62, 64, 65, 70, 47, 55, 56, 49, 68, 62, 55, 73, 56, 50, 44, 58, 60, 60, 66, 60, 71, 64, 91, 65, 58, 66, 67, 64, 65, 71, 66, 45, 93, 63, 52, 62, 67, 77, 74, 67, 64, 54, 68, 72, 55, 64, 59, 56, 83, 78, 81, 102, 67, 55, 74, 72, 68, 49, 90, 65, 51, 56, 61, 56, 54, 56, 67, 76, 68, 59, 61, 61, 66, 68, 70, 66, 70, 76, 52, 46, 59, 81, 69, 56, 61, 61, 63, 53, 113, 63, 69, 69, 85, 68, 69, 81, 61, 66, 63, 77, 67, 141, 72, 54, 91, 65, 58, 68, 59, 59, 53, 59, 112, 75, 59, 70, 76, 66, 52, 82, 49, 55, 66, 61, 57, 59, 67, 67, 61, 64, 61, 77, 70, 57, 72, 67, 77, 72, 83, 79, 89, 74, 54, 56, 73, 68, 67, 63, 72, 62, 72, 44, 65, 53, 54, 64, 64, 60, 58, 63, 62, 66, 61, 90, 56, 78, 60, 88, 60, 67, 63, 70, 75, 56, 56, 69, 92, 47, 56, 65, 49, 60, 69, 55, 74, 73, 58, 50, 61, 61, 86, 61, 81, 69, 64, 58, 124, 61, 60, 74, 61, 61, 66, 45, 55, 76, 68, 67, 51, 60, 64, 64, 75, 79, 73, 63, 64, 71, 69, 66, 64, 58, 66, 61, 87, 52, 73, 67, 71, 133, 56, 62, 73, 59, 80, 56, 52, 61, 80, 70, 70, 65, 72, 70, 45, 59, 73, 51, 58, 75, 66, 78, 69, 71, 77, 65, 73, 69, 62, 71, 70, 85, 54, 51, 54, 62, 67, 53, 64, 70, 64, 61, 56, 86, 48, 56, 67, 70, 74, 46, 77, 59, 55, 75, 68, 54, 66, 86, 45, 66, 56, 65, 63, 91, 61, 57, 65, 80, 62, 76, 58, 60, 72, 57, 75, 97, 58, 54, 64, 62, 69, 51, 55, 54, 81, 97, 59, 68, 78, 55, 57, 55, 68, 101, 92, 46, 69, 60, 72, 69, 54, 75, 55, 75, 73, 70, 91, 76, 78, 87, 81, 160, 73, 65, 64, 72, 51, 68, 88, 67, 56, 53, 79, 62, 58, 63, 72, 32, 62, 61, 49, 60, 90, 66, 64, 71, 75, 57, 57, 60, 82, 66, 47, 54, 49, 52, 61, 56, 59, 78, 74, 61, 46, 64, 47, 59, 57, 44, 93, 54, 67, 67, 57, 73, 60, 75, 64, 58, 52, 63, 64, 83, 65, 73, 69, 60, 59, 49, 59, 62, 72, 74, 81, 63, 76, 76, 61, 62, 52, 62, 59, 65, 67, 78, 65, 55, 53, 77, 74, 58, 54, 59, 68, 73, 66, 91, 63, 69, 52, 79, 47, 54, 78, 65, 74, 52, 62, 69, 69, 68, 67, 85, 72, 72, 64, 64, 39, 65, 62, 93, 61, 66, 69, 47, 118, 77, 71, 72, 46, 64, 62, 57, 104, 63, 47, 64, 70, 65, 61, 71, 63, 74, 66, 78, 99, 61, 65, 66, 79, 70, 64, 71, 70, 62, 57, 82, 50, 78, 64, 77, 59, 48, 49, 56, 83, 56, 52, 58, 76, 42, 71, 57, 56, 77, 71, 60, 75, 96, 78, 101, 67, 46, 65, 56, 62, 63, 75, 67, 75, 94, 59, 64, 60, 76, 73, 73, 55, 66, 57, 57, 48, 57, 84, 65, 53, 66, 69, 48, 53, 74, 44, 57, 56, 70, 66, 47, 53, 67, 81, 70, 71, 52, 49, 58, 68, 57, 52, 58, 58, 57, 60, 59, 67, 70, 57, 53, 72, 79, 63, 92, 71, 123, 66, 74, 69, 58, 70, 58, 109, 51, 74, 61, 61, 67, 67, 66, 43, 55, 63, 72, 77, 53, 72, 67, 59, 55, 66, 70, 41, 56, 73, 46, 47, 106, 57, 57, 84, 56, 51, 71, 84, 79, 62, 64, 61, 43, 91, 68, 54, 91, 65, 93, 52, 80, 63, 78, 75, 78, 63, 54, 76, 63, 71, 83, 66, 54, 60, 67, 57, 58, 80, 70, 108, 51, 62, 69, 66, 82, 66, 68, 70, 77, 56, 78, 50, 57, 54, 82, 86, 62, 68, 81, 68, 83, 67, 64, 64, 75, 78, 44, 81, 45, 72, 61, 54, 54, 40, 75, 50, 68, 43, 78, 56, 74, 44, 64, 54, 59, 130, 102, 70, 66, 61, 62, 71, 70, 63, 100, 59, 74, 74, 68, 58, 64, 55, 62, 69, 78, 55, 61, 56, 59, 75, 65, 56, 59, 55, 63, 54, 79, 73, 56, 47, 54, 74, 46, 65, 56, 69, 78, 62, 72, 64, 92, 67, 82, 79, 74, 64, 69, 73, 80, 67, 84, 91, 60, 87, 67, 53, 55, 61, 69, 56, 62, 58, 78, 110, 86, 64, 40, 65, 75, 57, 85, 65, 50, 64, 68, 72, 67, 58, 56, 64, 65, 112, 65, 93, 68, 57, 66, 64, 50, 70, 51, 63, 125, 69, 72, 62, 67, 59, 67, 108, 71, 86, 51, 61, 59, 104, 75, 61, 53, 52, 74, 54, 73, 65, 64, 81, 65, 51, 60, 65, 73, 79, 59, 58, 48, 76, 65, 57, 72, 66, 82, 60, 61, 64, 66, 61, 71, 73, 89, 31, 72, 62, 77, 64, 83, 54, 53, 73, 71, 33, 72, 61, 70, 92, 64, 115, 58, 80, 68, 64, 67, 61, 77, 59, 74, 66, 55, 66, 59, 59, 66, 40, 70, 52, 77, 70, 61, 55, 63, 49, 85, 72, 65, 59, 75, 69, 76, 112, 51, 93, 79, 57, 57, 61, 39, 58, 50, 69, 73, 48, 74, 65, 56, 71, 112, 72, 49, 83, 78, 98, 70, 54, 59, 52, 68, 103, 61, 57, 76, 51, 64, 67, 47, 63, 70, 72, 67, 62, 51, 70, 63, 49, 79, 57, 90, 63, 62, 93, 59, 57, 112, 57, 51, 94, 69, 85, 40, 52, 68, 42, 52, 63, 53, 73, 59, 66, 56, 132, 65, 70, 56, 78, 59, 110, 63, 56, 53, 69, 87, 70, 50, 36, 69, 106, 44, 61, 60, 51, 62, 60, 98, 103, 55, 66, 76, 55, 63, 53, 55, 40, 54, 52, 67, 64, 81, 61, 50, 63, 106, 61, 92, 50, 21, 74, 63, 72, 59, 61, 58, 88, 68, 81, 63, 45, 55, 52, 47, 61, 58, 68, 67, 33, 56, 57, 120, 63, 65, 69, 71, 71, 48, 76, 48, 79, 59, 70, 48, 49, 49, 66, 69, 65, 51, 48, 53, 66, 74, 63, 80, 62, 59, 56, 72, 58, 67, 61, 80, 87, 60, 68, 65, 68, 41, 59, 70, 68, 61, 68, 57, 78, 50, 72, 84, 68, 73, 68, 65, 52, 55, 66, 65, 80, 75, 52, 82, 57, 73, 73, 66, 49, 59, 72, 53, 77, 66, 44, 88, 68, 59, 57, 57, 48, 55, 78, 58, 64, 81, 58, 60, 58, 57, 68, 47, 69, 54, 72, 61, 70, 60, 89, 57, 54, 50, 68, 71, 69, 73, 44, 54, 50, 89, 56, 49, 46, 66, 79, 72, 55, 77, 74, 52, 55, 79, 46, 79, 43, 69, 60, 74, 69, 59, 60, 64, 62, 51, 77, 44, 67, 51, 50, 71, 47, 40, 71, 74, 51, 76, 64, 96, 72, 66, 69, 101, 75, 62, 67, 55, 73, 60, 56, 67, 54, 66, 38, 79, 75, 71, 66, 61, 68, 61, 73, 54, 70, 75, 65, 70, 65, 72, 64, 77, 65, 68, 62, 63, 84, 71, 71, 69, 57, 51, 50, 50, 70, 55, 68, 56, 61, 63, 58, 62, 65, 59, 86, 92, 61, 70, 71, 61, 51, 60, 59, 66, 58, 33, 66, 78, 80, 70, 56, 74, 83, 56, 61, 56, 82, 58, 51, 60, 68, 59, 64, 76, 57, 58, 50, 68, 76, 49, 70, 79, 76, 68, 79, 65, 58, 65, 62, 56, 88, 96, 69, 181, 75, 81, 90, 57, 54, 55, 50, 71, 71, 73, 73, 64, 47, 58, 70, 57, 67, 51, 81, 53, 49, 94, 70, 62, 74, 60, 71, 85, 59, 56, 86, 61, 76, 58, 64, 104, 90, 55, 67, 62, 74, 54, 75, 66, 57, 55, 78, 78, 65, 78, 52, 82, 69, 42, 45, 44, 75, 58, 62, 91, 67, 55, 87, 86, 64, 73, 73, 82, 138, 97, 76, 71, 57, 88, 58, 79, 65, 52, 84, 70, 71, 46, 101, 58, 70, 93, 69, 89, 55, 64, 83, 51, 65, 72, 70, 54, 50, 52, 69, 63, 86, 53, 79, 94, 75, 57, 52, 60, 61, 65, 60, 55, 64, 55, 80, 65, 70, 66, 56, 48, 39, 65, 43, 76, 75, 60, 71, 71, 92, 55, 51, 40, 72, 60, 60, 58, 61, 70, 31, 73, 75, 93, 56, 47, 46, 62, 76, 54, 56, 56, 54, 69, 88, 80, 57, 75, 56, 39, 77, 56, 53, 56, 47, 83, 62, 67, 68, 78, 90, 66, 59, 65, 61, 65, 81, 67, 83, 71, 90, 85, 71, 65, 69, 84, 65, 58, 64, 55, 87, 58, 52, 45, 75, 61, 65, 82, 58, 99, 64, 57, 61, 44, 69, 56, 46, 58, 98, 51, 76, 47, 57, 50, 59, 46, 62, 83, 63, 66, 79, 76, 67, 70, 78, 76, 66, 70, 80, 71, 62, 52, 66, 94, 98, 57, 38, 90, 70, 66, 73, 53, 57, 63, 68, 55, 71, 57, 76, 43, 67, 83, 53, 62, 59, 57, 55, 53, 63, 75, 53, 50, 90, 63, 78, 76, 54, 74, 62, 65, 68, 51, 55, 65, 76, 80, 62, 62, 65, 85, 56, 64, 84, 50, 82, 60, 94, 67, 79, 46, 67, 65, 59, 53, 66, 65, 60, 54, 60, 81, 70, 55, 55, 65, 62, 67, 56, 45, 62, 56, 71, 61, 62, 62, 52, 66, 57, 79, 61, 71, 68, 80, 71, 81, 56, 58, 64, 55, 76, 56, 68, 62, 68, 69, 70, 108, 42, 57, 104, 71, 113, 90, 55, 84, 66, 74, 71, 69, 57, 72, 65, 94, 61, 58, 67, 49, 67, 74, 58, 82, 86, 75, 47, 54, 69, 72, 50, 70, 61, 57, 58, 58, 67, 58, 67, 77, 52, 44, 72, 58, 98, 53, 56, 55, 41, 66, 56, 72, 45, 58, 63, 69, 89, 49, 55, 59, 78, 51, 60, 55, 65, 61, 68, 48, 72, 77, 63, 61, 59, 101, 62, 78, 69, 78, 72, 52, 56, 89, 75, 63, 57, 62, 62, 83, 78, 63, 72, 77, 60, 75, 65, 58, 48, 97, 61, 61, 83, 61, 77, 51, 68, 87, 63, 56, 69, 42, 50, 60, 71, 70, 73, 46, 67, 48, 74, 71, 77, 97, 65, 51, 56, 70, 66, 54, 76, 69, 57, 59, 82, 51, 81, 69, 70, 56, 60, 63, 52, 58, 47, 64, 50, 51, 64, 81, 52, 40, 64, 81, 77, 50, 54, 77, 44, 62, 73, 58, 65, 60, 65, 50, 56, 69, 52, 48, 50, 50, 84, 122, 60, 62, 78, 61, 64, 79, 56, 66, 61, 62, 58, 48, 55, 55, 63, 49, 75, 75, 44, 56, 53, 69, 56, 67, 58, 52, 70, 79, 54, 60, 64, 74, 64, 58, 72, 71, 50, 66, 65, 48, 80, 73, 74, 66, 61, 83, 76, 75, 60, 47, 69, 51, 72, 54, 84, 63, 59, 56, 55, 66, 67, 72, 129, 80, 78, 66, 62, 56, 73, 53, 83, 47, 57, 76, 68, 49, 73, 76, 38, 166, 59, 44, 82, 72, 73, 70, 49, 55, 66, 61, 74, 84, 66, 86, 63, 58, 58, 60, 43, 71, 64, 66, 59, 72, 63, 59, 65, 81, 42, 50, 83, 51, 65, 58, 75, 71, 58, 107, 62, 44, 68, 76, 65, 63, 97, 81, 47, 63, 62, 67, 52, 56, 69, 84, 59, 81, 64, 68, 71, 70, 69, 48, 74, 85, 61, 53, 41, 61, 55, 89, 60, 61, 70, 78, 61, 64, 61, 66, 60, 75, 71, 61, 64, 43, 59, 65, 53, 47, 65, 69, 63, 84, 69, 64, 71, 99, 62, 58, 50, 70, 54, 73, 66, 67, 61, 54, 47, 82, 76, 42, 60, 56, 60, 71, 53, 68, 54, 51, 60, 64, 77, 84, 62, 68, 69, 82, 56, 54, 68, 53, 56, 62, 57, 84, 57, 75, 69, 59, 71, 55, 70, 57, 57, 76, 65, 75, 73, 85, 103, 91, 46, 59, 47, 69, 66, 71, 67, 76, 78, 75, 68, 55, 64, 66, 76, 63, 54, 56, 35, 60, 58, 63, 55, 55, 55, 60, 70, 70, 68, 64, 71, 103, 86, 74, 53, 64, 56, 83, 54, 47, 50, 88, 72, 69, 63, 53, 68, 60, 70, 56, 64, 61, 77, 81, 64, 59, 69, 60, 54, 67, 99, 77, 80, 52, 63, 58, 73, 70, 59, 56, 66, 55, 56, 55, 96, 53, 73, 55, 61, 78, 66, 64, 68, 66, 49, 57, 55, 60, 60, 46, 61, 72, 71, 72, 55, 75, 87, 46, 57, 67, 86, 58, 56, 61, 56, 63, 50, 85, 131, 58, 68, 57, 49, 50, 57, 64, 65, 67, 60, 61, 42, 55, 52, 58, 82, 38, 65, 69, 78, 44, 50, 60, 65, 66, 56, 63, 47, 59, 73, 68, 81, 62, 70, 55, 60, 63, 54, 68, 73, 46, 62, 58, 80, 71, 57, 55, 62, 56, 66, 58, 57, 57, 51, 60, 51, 63, 62, 94, 91, 70, 63, 59, 103, 74, 79, 59, 74, 67, 81, 77, 56, 94, 60, 56, 47, 52, 61, 90, 89, 61, 59, 56, 62, 63, 58, 78, 60, 63, 84, 63, 63, 56, 63, 65, 57, 58, 61, 62, 75, 61, 57, 70, 51, 67, 63, 67, 63, 94, 75, 59, 81, 66, 62, 86, 54, 74, 102, 54, 83, 52, 48, 57, 44, 58, 74, 62, 97, 68, 86, 53, 60, 56, 56, 60, 60, 51, 66, 74, 65, 58, 65, 61, 70, 77, 59, 76, 50, 71, 60, 54, 70, 76, 58, 65, 66, 64, 55, 87, 109, 86, 83, 71, 64, 68, 107, 73, 35, 76, 60, 71, 74, 71, 93, 60, 65, 94, 60, 64, 67, 107, 64, 46, 80, 67, 64, 73, 67, 57, 67, 51, 57, 61, 73, 63, 58, 71, 64, 59, 65, 62, 65, 54, 73, 73, 71, 56, 76, 59, 73, 59, 52, 48, 62, 54, 57, 81, 55, 62, 98, 56, 61, 60, 87, 84, 82, 63, 67, 63, 65, 59, 66, 75, 58, 55, 62, 72, 59, 75, 55, 49, 71, 66, 96, 55, 75, 69, 57, 102, 55, 65, 71, 43, 67, 48, 64, 80, 72, 63, 52, 63, 69, 68, 64, 96, 61, 71, 63, 45, 62, 63, 53, 54, 82, 64, 55, 65, 78, 42, 70, 65, 56, 53, 78, 55, 68, 73, 48, 76, 65, 64, 54, 67, 64, 75, 58, 52, 68, 66, 93, 66, 77, 85, 84, 62, 62, 62, 65, 72, 55, 76, 130, 79, 44, 61, 60, 77, 53, 49, 94, 66, 64, 58, 73, 51, 73, 57, 72, 52, 84, 97, 66, 49, 64, 76, 65, 58, 89, 87, 69, 92, 59, 68, 49, 87, 66, 46, 67, 49, 61, 68, 60, 53, 57, 64, 86, 67, 66, 70, 59, 82, 73, 53, 72, 63, 79, 62, 67, 103, 45, 58, 60, 75, 53, 53, 61, 58, 49, 59, 71, 85, 78, 52, 62, 70, 72, 67, 57, 69, 47, 129, 66, 82, 77, 66, 81, 84, 58, 65, 49, 60, 65, 61, 55, 57, 82, 65, 60, 58, 54, 111, 68, 53, 63, 66, 45, 95, 67, 75, 130, 54, 57, 62, 71, 57, 66, 68, 91, 64, 65, 67, 66, 63, 58, 64, 64, 75, 65, 78, 58, 54, 69, 63, 63, 53, 98, 61, 52, 79, 96, 60, 66, 58, 48, 67, 70, 58, 59, 75, 48, 67, 55, 65, 70, 57, 72, 70, 91, 77, 70, 64, 48, 68, 75, 44, 58, 66, 60, 67, 60, 53, 60, 82, 62, 66, 54, 85, 74, 76, 58, 96, 61, 76, 55, 84, 59, 60, 48, 83, 47, 64, 71, 69, 65, 95, 49, 61, 101, 76, 72, 54, 70, 70, 55, 94, 89, 50, 65, 43, 54, 94, 115, 69, 66, 77, 62, 93, 63, 58, 61, 69, 50, 70, 46, 71, 70, 111, 67, 68, 60, 55, 82, 62, 77, 81, 60, 78, 34, 61, 56, 54, 84, 92, 66, 57, 55, 70, 46, 57, 59, 72, 65, 68, 55, 79, 62, 53, 58, 62, 107, 57, 72, 63, 87, 53, 73, 73, 62, 61, 53, 84, 66, 59, 72, 101, 69, 79, 63, 38, 44, 81, 53, 69, 68, 106, 81, 104, 79, 61, 74, 62, 63, 51, 53, 121, 63, 85, 54, 69, 78, 63, 74, 65, 86, 64, 52, 68, 62, 64, 73, 92, 65, 69, 58, 56, 57, 67, 67, 63, 52, 67, 58, 56, 58, 65, 63, 84, 72, 55, 44, 59, 76, 53, 82, 75, 52, 85, 70, 95, 51, 51, 67, 80, 60, 55, 61, 57, 51, 92, 67, 59, 55, 73, 77, 79, 59, 76, 73, 54, 82, 61, 63, 62, 78, 53, 75, 75, 61, 61, 62, 48, 70, 54, 58, 52, 96, 65, 57, 62, 52, 61, 51, 71, 84, 73, 63, 51, 70, 79, 65, 73, 105, 58, 56, 50, 42, 68, 50, 55, 52, 70, 50, 76, 97, 47, 59, 78, 62, 67, 70, 101, 86, 69, 51, 63, 80, 71, 57, 79, 53, 71, 79, 43, 52, 56, 41, 80, 73, 56, 64, 50, 106, 49, 64, 72, 58, 44, 85, 44, 62, 38, 64, 59, 70, 60, 86, 71, 88, 74, 64, 67, 69, 61, 64, 66, 65, 63, 84, 120, 58, 78, 64, 66, 62, 33, 56, 56, 96, 51, 45, 67, 61, 81, 53, 65, 115, 49, 71, 85, 69, 44, 52, 42, 57, 56, 113, 103, 59, 66, 60, 81, 53, 48, 50, 101, 88, 70, 79, 63, 70, 103, 58, 73, 51, 64, 64, 74, 86, 47, 100, 71, 66, 63, 67, 87, 72, 56, 69, 64, 79, 46, 45, 60, 50, 68, 50, 57, 74, 80, 78, 55, 59, 64, 47, 52, 61, 56, 65, 56, 72, 62, 87, 86, 64, 52, 85, 72, 100, 50, 55, 59, 69, 68, 104, 60, 76, 70, 87, 45, 55, 55, 78, 103, 73, 68, 47, 65, 69, 33, 99, 66, 73, 68, 56, 65, 52, 76, 66, 66, 47, 48, 50, 46, 72, 71, 59, 51, 49, 42, 67, 57, 68, 51, 54, 57, 75, 67, 60, 64, 64, 59, 70, 53, 72, 75, 58, 82, 62, 49, 69, 88, 52, 67, 66, 62, 54, 70, 59, 59, 106, 97, 54, 145, 71, 59, 69, 87, 61, 78, 80, 58, 73, 69, 80, 69, 73, 44, 59, 81, 66, 42, 65, 85, 55, 58, 70, 78, 52, 46, 72, 64, 73, 68, 87, 50, 55, 57, 76, 66, 28, 59, 84, 94, 56, 49, 79, 88, 71, 68, 81, 79, 74, 80, 56, 56, 90, 64, 37, 78, 57, 73, 60, 81, 66, 95, 50, 66, 42, 64, 41, 64, 80, 53, 74, 74, 63, 62, 73, 74, 73, 43, 59, 73, 69, 69, 58, 72, 79, 56, 68, 82, 65, 50, 75, 49, 59, 70, 71, 59, 67, 104, 90, 94, 80, 60, 68, 79, 75, 55, 85, 49, 81, 100, 66, 48, 66, 67, 74, 49, 68, 67, 69, 55, 56, 52, 65, 77, 76, 84, 59, 52, 53, 75, 117, 67, 79, 86, 52, 34, 93, 55, 75, 51, 64, 67, 61, 73, 58, 64, 46, 58, 83, 79, 57, 68, 78, 68, 73, 77, 125, 82, 52, 65, 78, 91, 93, 81, 57, 44, 75, 47, 49, 67, 58, 63, 53, 57, 69, 55, 66, 108, 75, 89, 91, 118, 60, 72, 74, 42, 91, 74, 56, 83, 58, 63, 77, 75, 69, 55, 52, 81, 56, 60, 55, 53, 78, 60, 72, 59, 57, 58, 63, 53, 64, 72, 60, 76, 42, 48, 65, 67, 61, 66, 73, 63, 67, 105, 76, 49, 66, 75, 89, 69, 51, 74, 55, 77, 70, 62, 50, 53, 91, 53, 86, 58, 65, 67, 58, 52, 58, 71, 49, 72, 56, 87, 55, 61, 81, 63, 64, 55, 43, 120, 60, 65, 68, 71, 65, 67, 66, 52, 56, 112, 75, 64, 38, 58, 61, 53, 59, 77, 64, 59, 35, 65, 60, 38, 60, 66, 76, 56, 66, 64, 46, 56, 75, 113, 63, 69, 85, 55, 56, 60, 53, 63, 92, 68, 48, 72, 56, 47, 71, 68, 54, 62, 63, 53, 79, 72, 82, 59, 74, 69, 79, 61, 61, 55, 55, 60, 55, 60, 80, 48, 51, 72, 42, 64, 57, 58, 75, 62, 73, 65, 75, 71, 64, 69, 73, 80, 77, 62, 44, 62, 76, 61, 52, 59, 72, 62, 85, 61, 98, 64, 86, 72, 61, 55, 65, 57, 58, 69, 59, 47, 54, 40, 67, 69, 69, 56, 70, 50, 74, 45, 50, 50, 75, 59, 48, 80, 68, 67, 62, 52, 71, 95, 82, 53, 84, 63, 52, 48, 71, 56, 46, 54, 58, 57, 65, 63, 69, 57, 63, 65, 68, 63, 70, 83, 77, 59, 76, 51, 45, 75, 65, 78, 53, 63, 65, 66, 60, 41, 83, 55, 72, 66, 54, 55, 49, 68, 41, 73, 53, 52, 59, 60, 65, 50, 44, 78, 57, 51, 67, 78, 60, 69, 47, 84, 62, 55, 74, 79, 109, 67, 75, 48, 58, 74, 41, 58, 53, 63, 52, 59, 81, 64, 53, 79, 58, 45, 71, 59, 41, 72, 56, 50, 77, 53, 64, 58, 87, 58, 70, 64, 79, 61, 75, 54, 55, 60, 47, 47, 79, 60, 54, 47, 66, 54, 64, 65, 61, 72, 45, 76, 95, 59, 76, 78, 82, 78, 68, 47, 70, 58, 70, 49, 54, 73, 55, 86, 69, 76, 100, 46, 60, 72, 89, 50, 60, 84, 73, 42, 62, 74, 70, 66, 63, 96, 46, 59, 65, 116, 109, 71, 57, 77, 56, 66, 42, 57, 85, 59, 85, 65, 58, 62, 69, 56, 70, 55, 62, 55, 60, 48, 72, 70, 63, 71, 62, 71, 57, 69, 63, 71, 72, 67, 65, 66, 78, 65, 66, 64, 86, 79, 65, 67, 82, 69, 68, 58, 48, 55, 52, 60, 62, 62, 74, 63, 62, 65, 59, 68, 71, 53, 61, 76, 64, 54, 70, 75, 60, 60, 79, 60, 74, 67, 87, 59, 62, 60, 86, 73, 54, 56, 58, 51, 70, 54, 65, 62, 62, 51, 64, 64, 79, 61, 54, 60, 80, 89, 60, 58, 64, 97, 53, 60, 70, 59, 71, 75, 54, 57, 86, 75, 64, 62, 59, 62, 65, 59, 46, 75, 66, 52, 59, 59, 67, 58, 63, 55, 65, 79, 51, 79, 56, 86, 79, 59, 64, 69, 70, 64, 59, 69, 72, 70, 111, 65, 104, 71, 69, 55, 56, 60, 64, 60, 56, 73, 49, 61, 65, 72, 56, 55, 62, 54, 55, 30, 67, 73, 64, 77, 62, 66, 60, 55, 61, 44, 60, 92, 63, 78, 70, 64, 61, 55, 61, 63, 64, 76, 55, 68, 56, 59, 65, 57, 47, 57, 67, 62, 63, 69, 64, 54, 68, 53, 110, 50, 50, 70, 62, 76, 82, 58, 68, 68, 64, 71, 60, 68, 156, 65, 75, 60, 65, 55, 73, 59, 70, 67, 67, 60, 71, 70, 64, 72, 70, 65, 76, 91, 51, 73, 63, 60, 53, 69, 68, 61, 59, 75, 54, 118, 48, 56, 70, 66, 79, 72, 73, 62, 49, 65, 54, 62, 48, 64, 67, 63, 56, 75, 86, 77, 122, 63, 54, 52, 67, 73, 70, 57, 67, 73, 47, 51, 83, 64, 60, 55, 65, 55, 75, 54, 58, 58, 60, 72, 51, 66, 53, 54, 63, 67, 61, 49, 57, 76, 60, 56, 104, 62, 73, 71, 53, 63, 55, 58, 99, 63, 71, 72, 49, 62, 56, 60, 66, 51, 49, 75, 66, 60, 54, 59, 54, 53, 93, 64, 58, 71, 70, 68, 59, 69, 68, 67, 57, 64, 55, 57, 55, 58, 54, 83, 66, 63, 68, 60, 57, 57, 113, 77, 42, 53, 61, 60, 72, 89, 58, 77, 107, 98, 76, 47, 70, 63, 67, 65, 62, 68, 57, 56, 55, 76, 61, 50, 77, 47, 61, 67, 65, 65, 62, 62, 48, 48, 62, 65, 52, 87, 85, 64, 68, 68, 56, 71, 64, 59, 61, 64, 62, 52, 80, 83, 61, 62, 61, 54, 91, 67, 84, 60, 68, 61, 63, 50, 66, 63, 52, 58, 70, 50, 64, 68, 58, 63, 66, 69, 64, 70, 62, 61, 61, 61, 64, 60, 80, 59, 78, 120, 54, 65, 51, 65, 63, 63, 51, 65, 75, 60, 111, 52, 82, 78, 67, 62, 54, 58, 67, 68, 60, 48, 71, 41, 58, 54, 70, 78, 63, 64, 67, 68, 52, 67, 72, 50, 51, 71, 109, 60, 67, 87, 67, 56, 54, 75, 76, 75, 138, 75, 78, 58, 58, 64, 48, 73, 51, 70, 59, 67, 59, 61, 49, 56, 57, 51, 66, 84, 61, 81, 58, 64, 100, 73, 56, 61, 69, 101, 77, 61, 56, 57, 50, 67, 68, 59, 48, 58, 66, 67, 74, 65, 84, 66, 76, 55, 82, 52, 48, 72, 59, 65, 59, 56, 66, 70, 71, 82, 47, 70, 66, 51, 62, 56, 60, 55, 57, 79, 52, 86, 78, 64, 58, 65, 60, 46, 53, 75, 94, 71, 62, 65, 53, 64, 66, 74, 68, 62, 63, 55, 72, 58, 57, 98, 41, 62, 64, 76, 64, 61, 63, 80, 63, 70, 60, 52, 66, 49, 79, 58, 61, 64, 43, 56, 64, 103, 58, 61, 58, 57, 83, 57, 56, 80, 66, 73, 51, 61, 75, 42, 59, 66, 63, 56, 62, 55, 58, 82, 60, 54, 44, 54, 65, 58, 59, 62, 62, 59, 61, 49, 60, 60, 64, 76, 43, 72, 67, 81, 67, 88, 61, 73, 61, 59, 71, 69, 66, 77, 70, 92, 64, 77, 77, 58, 60, 60, 73, 71, 70, 75, 62, 47, 52, 68, 68, 79, 77, 52, 51, 118, 65, 80, 53, 67, 65, 53, 67, 81, 59, 62, 83, 49, 61, 68, 52, 63, 70, 63, 72, 60, 53, 67, 67, 125, 55, 69, 76, 64, 72, 59, 78, 47, 70, 61, 62, 63, 76, 52, 56, 97, 79, 73, 63, 72, 89, 68, 89, 54, 59, 73, 60, 65, 52, 75, 59, 57, 67, 62, 59, 62, 71, 72, 88, 58, 60, 61, 61, 80, 62, 60, 58, 61, 67, 68, 57, 51, 62, 59, 62, 61, 72, 60, 68, 64, 62, 45, 54, 66, 63, 60, 60, 66, 42, 105, 63, 94, 53, 71, 79, 47, 65, 60, 65, 57, 83, 76, 83, 54, 90, 66, 56, 76, 63, 72, 76, 74, 64, 74, 66, 82, 55, 65, 66, 70, 60, 72, 59, 54, 55, 61, 59, 55, 60, 63, 95, 70, 62, 85, 66, 84, 61, 76, 65, 53, 74, 74, 57, 79, 71, 77, 61, 50, 58, 47, 55, 64, 55, 55, 55, 66, 65, 61, 62, 67, 44, 73, 78, 61, 56, 54, 83, 69, 67, 89, 81, 98, 73, 58, 82, 71, 71, 81, 71, 54, 62, 81, 69, 53, 58, 50, 77, 99, 71, 69, 64, 65, 62, 69, 60, 77, 64, 83, 64, 58, 59, 61, 71, 72, 59, 75, 62, 84, 59, 62, 53, 68, 65, 65, 56, 65, 91, 58, 51, 61, 45, 64, 56, 61, 68, 60, 51, 79, 62, 63, 78, 66, 74, 59, 66, 64, 58, 60, 59, 56, 70, 59, 49, 71, 71, 61, 56, 88, 83, 61, 57, 68, 63, 44, 61, 72, 69, 63, 63, 68, 69, 68, 58, 71, 77, 85, 72, 88, 60, 69, 69, 62, 64, 71, 61, 60, 67, 62, 64, 69, 58, 59, 69, 58, 68, 62, 75, 53, 62, 98, 66, 59, 58, 49, 68, 60, 63, 82, 67, 79, 56, 57, 66, 119, 77, 59, 72, 103, 53, 63, 64, 53, 62, 63, 60, 75, 68, 70, 49, 77, 78, 76, 75, 67, 64, 60, 102, 63, 67, 58, 63, 90, 63, 40, 64, 67, 60, 73, 100, 79, 54, 48, 64, 65, 57, 67, 64, 60, 85, 55, 55, 62, 57, 79, 53, 68, 59, 76, 64, 61, 52, 65, 64, 65, 75, 71, 69, 51, 65, 45, 78, 67, 50, 87, 80, 89, 56, 89, 57, 55, 69, 56, 69, 59, 81, 72, 60, 73, 72, 67, 53, 75, 59, 134, 85, 54, 71, 62, 56, 61, 60, 111, 61, 64, 66, 68, 77, 46, 51, 78, 65, 55, 47, 47, 76, 69, 53, 58, 73, 61, 56, 66, 63, 90, 70, 66, 52, 64, 68, 66, 86, 57, 68, 51, 66, 74, 92, 76, 63, 66, 67, 56, 71, 60, 54, 70, 41, 60, 80, 60, 92, 67, 56, 59, 70, 62, 72, 58, 70, 68, 76, 69, 82, 59, 58, 58, 61, 55, 56, 69, 65, 78, 89, 59, 46, 72, 69, 46, 77, 120, 62, 75, 54, 76, 80, 60, 72, 74, 69, 58, 65, 58, 71, 57, 58, 75, 67, 76, 52, 57, 64, 60, 60, 69, 68, 91, 89, 53, 57, 66, 58, 89, 70, 53, 68, 62, 67, 65, 56, 62, 46, 48, 54, 61, 72, 54, 69, 131, 54, 64, 55, 63, 63, 58, 67, 55, 93, 66, 77, 52, 60, 58, 58, 53, 55, 69, 67, 66, 74, 49, 57, 59, 36, 69, 60, 77, 63, 92, 66, 61, 60, 69, 58, 57, 65, 65, 48, 66, 59, 76, 63, 48, 56, 85, 64, 58, 71, 77, 68, 67, 72, 57, 86, 56, 73, 89, 55, 54, 60, 90, 61, 75, 75, 87, 100, 65, 73, 51, 87, 66, 65, 69, 101, 69, 63, 61, 77, 58, 75, 54, 71, 57, 94, 54, 70, 66, 63, 66, 50, 68, 54, 68, 60, 85, 58, 61, 67, 65, 57, 63, 48, 49, 53, 66, 67, 60, 69, 107, 69, 59, 67, 56, 59, 90, 67, 78, 66, 62, 73, 73, 76, 94, 66, 58, 82, 55, 55, 56, 56, 71, 95, 65, 36, 96, 51, 65, 66, 68, 63, 54, 57, 63, 90, 73, 58, 61, 62, 95, 59, 109, 56, 70, 68, 59, 60, 70, 77, 61, 70, 89, 79, 58, 75, 57, 63, 59, 68, 55, 58, 66, 50, 58, 64, 71, 58, 84, 45, 104, 71, 60, 69, 66, 69, 64, 63, 33, 55, 79, 67, 61, 58, 55, 51, 105, 69, 121, 74, 55, 70, 59, 69, 62, 61, 74, 65, 65, 82, 60, 51, 56, 59, 64, 56, 65, 56, 57, 66, 63, 77, 63, 72, 64, 59, 60, 60, 96, 66, 61, 59, 63, 55, 47, 80, 72, 61, 64, 86, 51, 58, 59, 64, 64, 61, 90, 65, 76, 55, 71, 82, 56, 61, 70, 57, 61, 66, 55, 58, 67, 55, 66, 67, 69, 65, 52, 68, 64, 61, 80, 57, 67, 45, 54, 66, 97, 59, 73, 63, 60, 60, 86, 62, 52, 53, 49, 52, 56, 65, 57, 70, 69, 91, 76, 55, 66, 49, 71, 85, 58, 56, 82, 61, 72, 52, 73, 98, 61, 67, 79, 58, 42, 49, 64, 61, 99, 62, 65, 63, 68, 71, 39, 63, 66, 50, 64, 70, 53, 66, 46, 67, 53, 77, 61, 61, 61, 117, 55, 79, 63, 64, 83, 64, 90, 81, 46, 54, 59, 52, 58, 48, 61, 57, 63, 63, 63, 52, 73, 49, 83, 68, 64, 59, 66, 63, 62, 67, 61, 63, 96, 81, 71, 65, 76, 65, 55, 60, 49, 95, 63, 41, 62, 61, 56, 54, 56, 48, 54, 63, 56, 51, 49, 49, 66, 103, 47, 90, 61, 75, 80, 71, 52, 55, 52, 71, 76, 66, 63, 79, 74, 61, 54, 62, 89, 68, 61, 85, 74, 74, 66, 59, 75, 85, 44, 74, 60, 67, 69, 59, 67, 61, 56, 97, 85, 56, 43, 53, 85, 61, 85, 92, 58, 63, 51, 66, 54, 63, 55, 53, 60, 57, 85, 64, 54, 57, 96, 66, 58, 50, 63, 60, 74, 77, 86, 89, 76, 53, 62, 66, 90, 60, 68, 54, 74, 57, 54, 64, 82, 56, 73, 64, 56, 68, 69, 60, 62, 65, 105, 83, 64, 42, 61, 63, 103, 66, 63, 48, 87, 54, 52, 58, 64, 119, 59, 57, 60, 51, 53, 68, 61, 71, 70, 85, 70, 63, 51, 47, 95, 68, 64, 61, 62, 53, 65, 62, 67, 61, 73, 63, 67, 65, 116, 72, 60, 72, 58, 61, 64, 61, 50, 66, 68, 68, 66, 57, 76, 68, 54, 64, 53, 63, 64, 84, 65, 68, 69, 91, 59, 57, 101, 59, 51, 53, 94, 76, 58, 58, 58, 60, 75, 71, 71, 46, 53, 60, 59, 58, 60, 73, 65, 59, 62, 55, 72, 69, 60, 70, 78, 67, 66, 96, 58, 60, 59, 72, 55, 59, 57, 46, 130, 83, 64, 61, 76, 65, 55, 68, 55, 64, 81, 66, 74, 68, 68, 69, 75, 106, 57, 76, 57, 58, 59, 63, 58, 63, 56, 63, 77, 60, 55, 78, 58, 59, 55, 72, 62, 65, 117, 66, 61, 67, 56, 50, 61, 47, 83, 70, 55, 62, 70, 62, 68, 62, 55, 71, 93, 63, 65, 67, 63, 66, 92, 59, 106, 78, 55, 61, 56, 59, 66, 76, 80, 65, 61, 55, 59, 59, 66, 51, 63, 61, 63, 42, 65, 89, 67, 59, 56, 60, 62, 62, 70, 63, 51, 69, 61, 64, 64, 58, 62, 68, 102, 62, 60, 56, 54, 57, 64, 57, 54, 60, 71, 54, 51, 65, 79, 61, 60, 70, 55, 61, 61, 70, 59, 55, 52, 73, 55, 66, 68, 54, 60, 60, 59, 64, 56, 57, 63, 68, 66, 58, 57, 85, 53, 59, 67, 58, 54, 77, 66, 57, 74, 65, 64, 70, 62, 78, 71, 51, 68, 48, 83, 56, 81, 63, 97, 92, 63, 57, 68, 73, 60, 58, 66, 78, 57, 73, 66, 66, 53, 69, 66, 67, 47, 56, 76, 72, 65, 67, 67, 54, 54, 73, 42, 63, 61, 81, 64, 71, 56, 67, 62, 73, 56, 60, 65, 58, 63, 65, 93, 60, 61, 56, 64, 68, 75, 64, 67, 59, 59, 66, 72, 81, 67, 65, 61, 68, 53, 54, 62, 87, 60, 121, 63, 59, 62, 71, 61, 58, 59, 74, 60, 62, 48, 62, 53, 64, 67, 68, 50, 58, 69, 71, 49, 88, 33, 65, 59, 65, 62, 61, 54, 50, 55, 60, 119, 55, 54, 51, 56, 77, 81, 76, 69, 66, 58, 60, 67, 35, 56, 69, 80, 81, 61, 62, 66, 65, 54, 61, 81, 60, 63, 60, 55, 60, 62, 69, 62, 58, 49, 84, 56, 58, 83, 87, 75, 54, 53, 53, 67, 59, 72, 88, 54, 63, 71, 70, 52, 83, 49, 73, 77, 58, 91, 60, 75, 59, 67, 72, 76, 67, 61, 63, 54, 69, 38, 70, 78, 61, 50, 90, 61, 76, 89, 48, 56, 57, 103, 61, 151, 68, 88, 66, 71, 62, 56, 60, 64, 89, 60, 65, 83, 66, 71, 62, 69, 124, 74, 61, 59, 58, 68, 74, 57, 101, 51, 56, 51, 65, 64, 55, 69, 81, 54, 56, 66, 60, 67, 71, 39, 63, 84, 60, 56, 81, 80, 102, 59, 65, 71, 59, 53, 59, 79, 70, 45, 46, 71, 80, 97, 76, 65, 62, 83, 63, 38, 59, 81, 70, 72, 61, 61, 64, 70, 58, 53, 78, 71, 62, 59, 50, 80, 58, 79, 40, 88, 74, 68, 64, 71, 81, 49, 57, 44, 74, 57, 85, 86, 52, 58, 63, 82, 61, 58, 95, 66, 61, 70, 51, 68, 54, 69, 75, 79, 56, 45, 77, 54, 53, 82, 50, 103, 83, 61, 50, 62, 82, 75, 73, 72, 65, 50, 50, 41, 62, 45, 59, 85, 60, 104, 64, 84, 68, 58, 82, 54, 69, 90, 81, 55, 52, 58, 79, 62, 72, 62, 75, 80, 71, 77, 75, 54, 72, 63, 102, 58, 75, 57, 74, 59, 57, 71, 79, 59, 65, 75, 56, 55, 25, 47, 82, 61, 50, 76, 55, 62, 98, 68, 71, 73, 45, 58, 61, 66, 72, 60, 75, 46, 67, 72, 85, 61, 70, 83, 66, 63, 95, 63, 55, 73, 96, 52, 64, 48, 78, 47, 72, 65, 99, 65, 46, 59, 62, 92, 55, 67, 73, 72, 56, 51, 90, 73, 55, 59, 59, 47, 74, 56, 62, 61, 58, 70, 60, 60, 71, 80, 86, 66, 56, 81, 65, 70, 71, 59, 58, 47, 70, 59, 66, 70, 100, 63, 49, 82, 44, 70, 73, 68, 85, 44, 98, 85, 122, 63, 44, 66, 62, 98, 46, 83, 106, 41, 62, 54, 64, 54, 73, 60, 42, 62, 56, 68, 39, 52, 56, 43, 56, 39, 98, 64, 66, 189, 57, 85, 60, 54, 63, 56, 58, 68, 40, 75, 96, 45, 139, 69, 51, 93, 80, 84, 60, 51, 82, 60, 66, 69, 67, 85, 61, 64, 59, 78, 79, 68, 77, 71, 68, 59, 99, 64, 59, 57, 61, 77, 69, 60, 58, 64, 58, 48, 46, 49, 61, 50, 94, 79, 49, 69, 56, 82, 81, 64, 59, 80, 71, 48, 56, 68, 82, 68, 59, 69, 70, 58, 56, 96, 61, 74, 70, 53, 72, 66, 66, 65, 88, 68, 110, 41, 54, 64, 64, 70, 71, 69, 66, 47, 83, 67, 77, 82, 64, 60, 77, 80, 57, 56, 88, 57, 59, 69, 60, 50, 74, 56, 81, 69, 60, 63, 57, 66, 47, 58, 63, 76, 65, 57, 85, 67, 75, 53, 57, 69, 67, 60, 43, 47, 57, 71, 81, 79, 51, 79, 73, 72, 56, 66, 73, 55, 72, 50, 94, 73, 52, 63, 51, 99, 75, 65, 58, 61, 70, 54, 43, 63, 61, 68, 69, 50, 46, 72, 64, 53, 78, 74, 61, 69, 76, 81, 62, 69, 62, 80, 53, 67, 50, 69, 71, 79, 55, 61, 57, 66, 55, 57, 123, 75, 60, 55, 67, 51, 53, 75, 35, 47, 50, 64, 75, 86, 40, 74, 68, 61, 61, 63, 67, 50, 88, 75, 61, 124, 51, 47, 65, 66, 63, 68, 75, 60, 53, 61, 84, 51, 68, 54, 63, 77, 79, 86, 77, 58, 72, 59, 67, 52, 53, 83, 74, 123, 59, 45, 46, 51, 69, 62, 85, 55, 99, 64, 53, 59, 134, 77, 76, 62, 57, 52, 75, 72, 65, 52, 63, 70, 48, 81, 52, 60, 66, 96, 91, 66, 67, 47, 71, 63, 41, 55, 68, 63, 65, 48, 69, 56, 72, 49, 58, 69, 56, 67, 78, 59, 105, 65, 63, 62, 77, 52, 65, 67, 76, 66, 50, 52, 52, 64, 60, 61, 60, 89, 64, 68, 63, 68, 61, 51, 72, 57, 76, 69, 86, 34, 55, 59, 52, 70, 69, 61, 55, 58, 79, 79, 50, 87, 85, 47, 60, 55, 108, 59, 66, 70, 53, 56, 80, 62, 80, 71, 55, 71, 71, 73, 52, 56, 39, 62, 63, 60, 87, 49, 60, 46, 48, 69, 65, 41, 67, 57, 63, 85, 57, 68, 72, 69, 59, 62, 52, 66, 40, 64, 92, 58, 55, 67, 59, 68, 52, 94, 67, 71, 86, 63, 62, 90, 70, 52, 56, 73, 65, 78, 67, 68, 71, 61, 56, 75, 113, 80, 68, 68, 55, 64, 55, 73, 82, 68, 82, 72, 57, 102, 46, 58, 65, 53, 56, 60, 70, 60, 63, 55, 59, 47, 58, 75, 69, 77, 64, 67, 67, 38, 91, 83, 84, 66, 61, 50, 62, 45, 61, 58, 82, 73, 71, 94, 75, 74, 44, 54, 71, 46, 57, 82, 71, 73, 96, 72, 62, 67, 60, 97, 115, 47, 78, 86, 63, 75, 69, 84, 77, 89, 96, 63, 91, 55, 79, 42, 70, 60, 70, 82, 58, 56, 56, 46, 58, 62, 70, 45, 78, 51, 60, 71, 62, 76, 73, 71, 70, 81, 70, 73, 36, 55, 68, 86, 55, 45, 80, 59, 56, 60, 55, 55, 85, 120, 55, 74, 63, 55, 72, 63, 68, 64, 72, 64, 91, 68, 65, 84, 68, 68, 74, 59, 57, 64, 56, 55, 48, 61, 95, 66, 64, 49, 54, 56, 64, 82, 51, 64, 68, 64, 63, 67, 68, 69, 99, 73, 57, 56, 66, 71, 75, 73, 50, 72, 65, 61, 67, 50, 58, 83, 72, 59, 73, 69, 69, 49, 56, 65, 57, 52, 64, 61, 78, 80, 66, 86, 131, 67, 60, 83, 75, 86, 87, 63, 66, 50, 46, 62, 94, 55, 58, 64, 51, 75, 59, 60, 77, 65, 83, 48, 91, 61, 57, 70, 52, 76, 77, 58, 82, 66, 71, 81, 67, 76, 85, 77, 45, 49, 56, 47, 53, 70, 57, 60, 68, 59, 80, 61, 55, 76, 48, 64, 60, 41, 66, 70, 78, 83, 55, 142, 68, 75, 69, 57, 81, 46, 60, 85, 60, 63, 76, 111, 65, 54, 68, 71, 64, 215, 81, 53, 68, 71, 56, 68, 49, 43, 66, 69, 70, 64, 57, 52, 55, 48, 60, 68, 70, 72, 56, 58, 64, 80, 67, 52, 68, 66, 74, 71, 62, 60, 73, 71, 70, 74, 65, 68, 55, 98, 74, 89, 74, 77, 70, 70, 59, 57, 56, 50, 76, 68, 53, 57, 63, 57, 39, 81, 64, 63, 72, 87, 88, 50, 49, 62, 47, 70, 81, 56, 67, 53, 69, 59, 65, 65, 51, 56, 89, 54, 85, 70, 63, 47, 49, 65, 48, 56, 77, 47, 56, 82, 54, 65, 62, 59, 48, 45, 85, 71, 41, 48, 51, 71, 75, 78, 64, 71, 63, 64, 57, 70, 56, 71, 38, 97, 68, 61, 52, 68, 60, 67, 63, 58, 60, 91, 55, 69, 41, 64, 74, 84, 75, 61, 69, 59, 77, 71, 60, 60, 81, 51, 60, 69, 77, 56, 63, 62, 53, 91, 44, 53, 69, 40, 75, 60, 69, 54, 121, 73, 74, 49, 51, 74, 84, 82, 110, 67, 77, 79, 64, 42, 52, 73, 89, 52, 79, 67, 62, 66, 49, 67, 76, 67, 76, 78, 78, 76, 57, 76, 85, 85, 68, 83, 58, 62, 50, 104, 56, 67, 43, 55, 42, 78, 85, 61, 63, 58, 124, 74, 61, 74, 55, 85, 71, 80, 74, 76, 82, 45, 82, 82, 100, 56, 77, 54, 45, 73, 61, 76, 77, 60, 50, 52, 85, 61, 57, 76, 69, 65, 78, 55, 61, 43, 64, 63, 63, 49, 81, 62, 53, 61, 57, 55, 75, 46, 63, 52, 98, 69, 64, 64, 52, 78, 81, 63, 84, 51, 62, 60, 78, 60, 68, 59, 66, 59, 48, 63, 61, 36, 53, 69, 57, 60, 59, 68, 72, 78, 48, 47, 51, 68, 69, 58, 68, 63, 57, 36, 54, 67, 61, 53, 59, 73, 60, 69, 56, 82, 53, 64, 78, 56, 82, 60, 66, 75, 87, 64, 62, 70, 129, 63, 61, 47, 59, 57, 67, 57, 93, 96, 71, 52, 58, 50, 49, 65, 74, 95, 55, 73, 41, 66, 68, 72, 69, 80, 74, 66, 60, 70, 92, 57, 76, 55, 81, 67, 68, 53, 112, 54, 59, 65, 58, 58, 53, 77, 84, 67, 76, 64, 56, 54, 61, 68, 71, 62, 83, 62, 76, 65, 44, 50, 64, 72, 71, 63, 61, 83, 70, 85, 51, 61, 60, 65, 69, 109, 52, 82, 74, 43, 62, 70, 68, 86, 49, 73, 83, 46, 85, 58, 68, 64, 84, 64, 70, 53, 88, 55, 86, 44, 72, 80, 75, 61, 65, 66, 67, 59, 79, 46, 53, 56, 74, 91, 69, 76, 48, 77, 45, 47, 63, 56, 60, 60, 73, 48, 78, 86, 67, 52, 67, 67, 59, 74, 74, 67, 47, 68, 80, 64, 69, 71, 41, 87, 67, 59, 90, 80, 50, 60, 53, 61, 80, 66, 58, 47, 55, 60, 82, 73, 61, 73, 58, 54, 65, 55, 49, 65, 56, 57, 64, 63, 66, 51, 73, 62, 60, 47, 65, 66, 42, 59, 59, 60, 63, 61, 67, 55, 54, 76, 61, 43, 72, 90, 53, 67, 66, 68, 68, 64, 61, 69, 61, 58, 56, 70, 95, 74, 64, 56, 59, 68, 94, 67, 107, 71, 113, 55, 44, 66, 76, 55, 63, 61, 44, 66, 73, 96, 110, 68, 54, 51, 80, 44, 73, 40, 55, 53, 52, 60, 76, 76, 56, 51, 54, 78, 62, 66, 44, 80, 34, 68, 59, 70, 67, 80, 78, 73, 47, 65, 77, 82, 95, 48, 46, 68, 31, 63, 84, 70, 61, 70, 86, 49, 48, 60, 60, 53, 52, 85, 72, 69, 63, 57, 71, 51, 62, 94, 75, 58, 69, 60, 59, 41, 75, 73, 60, 75, 107, 59, 59, 46, 60, 55, 67, 72, 55, 78, 68, 87, 62, 61, 73, 82, 55, 54, 81, 70, 57, 51, 70, 56, 95, 31, 49, 69, 103, 45, 50, 71, 101, 74, 72, 65, 68, 63, 47, 85, 60, 43, 48, 48, 67, 62, 48, 58, 72, 71, 60, 73, 65, 60, 61, 67, 65, 82, 56, 75, 50, 61, 52, 58, 69, 61, 114, 70, 63, 39, 52, 57, 54, 67, 61, 64, 81, 62, 81, 84, 53, 62, 57, 87, 42, 54, 63, 67, 60, 66, 66, 65, 95, 65, 80, 57, 55, 68, 65, 74, 91, 67, 64, 63, 44, 101, 56, 40, 58, 76, 107, 73, 104, 40, 46, 64, 88, 79, 105, 93, 67, 60, 53, 61, 65, 60, 69, 57, 67, 69, 72, 58, 82, 56, 53, 53, 64, 65, 66, 82, 71, 61, 64, 76, 67, 68, 64, 78, 67, 59, 104, 58, 92, 65, 69, 66, 64, 74, 80, 79, 55, 63, 60, 70, 67, 86, 65, 58, 68, 69, 58, 69, 55, 69, 82, 52, 64, 58, 79, 63, 55, 51, 58, 65, 74, 93, 137, 72, 51, 51, 70, 81, 70, 56, 68, 59, 68, 77, 63, 66, 54, 92, 72, 84, 77, 141, 80, 81, 76, 81, 86, 56, 51, 59, 80, 60, 93, 51, 34, 72, 75, 48, 67, 109, 61, 51, 49, 83, 79, 68, 66, 62, 43, 89, 61, 110, 74, 54, 51, 75, 77, 70, 74, 68, 57, 46, 91, 68, 91, 64, 48, 59, 81, 75, 58, 76, 56, 55, 70, 79, 76, 50, 81, 85, 75, 57, 45, 89, 66, 66, 86, 51, 44, 86, 51, 66, 66, 54, 62, 75, 67, 68, 69, 51, 46, 77, 64, 64, 43, 81, 69, 57, 90, 68, 73, 63, 60, 61, 48, 68, 60, 48, 77, 64, 77, 77, 84, 43, 60, 77, 67, 65, 64, 61, 68, 66, 65, 64, 97, 65, 77, 44, 60, 51, 45, 71, 73, 61, 69, 73, 67, 64, 103, 64, 80, 55, 90, 51, 53, 68, 48, 67, 66, 65, 76, 58, 82, 73, 76, 53, 71, 53, 55, 39, 62, 84, 78, 71, 73, 67, 56, 64, 61, 76, 67, 95, 59, 62, 76, 60, 60, 58, 67, 62, 60, 56, 67, 81, 78, 99, 71, 54, 74, 93, 51, 53, 63, 62, 51, 53, 84, 57, 66, 70, 70, 52, 58, 58, 68, 55, 63, 50, 73, 39, 67, 61, 67, 65, 58, 80, 87, 145, 76, 46, 60, 49, 67, 53, 79, 65, 72, 86, 63, 64, 72, 46, 59, 94, 63, 51, 69, 53, 49, 61, 61, 60, 139, 60, 61, 54, 88, 66, 78, 80, 68, 58, 56, 53, 85, 63, 69, 61, 64, 52, 57, 60, 108, 73, 48, 49, 75, 68, 90, 79, 43, 73, 74, 77, 51, 50, 68, 70, 62, 52, 68, 63, 72, 50, 53, 53, 41, 72, 60, 71, 70, 55, 52, 51, 53, 83, 67, 81, 58, 45, 56, 69, 63, 64, 120, 62, 63, 63, 63, 64, 45, 55, 57, 96, 53, 66, 57, 55, 67, 61, 48, 55, 78, 72, 68, 80, 78, 70, 62, 66, 77, 65, 48, 61, 51, 150, 63, 77, 69, 89, 58, 51, 62, 80, 51, 69, 41, 69, 85, 57, 61, 55, 55, 80, 36, 61, 48, 38, 66, 89, 47, 55, 55, 87, 64, 69, 56, 47, 55, 62, 56, 73, 49, 76, 61, 62, 52, 49, 70, 72, 61, 90, 76, 58, 59, 64, 67, 99, 57, 38, 60, 71, 61, 64, 82, 53, 52, 68, 57, 79, 96, 70, 62, 67, 68, 67, 79, 63, 50, 68, 71, 48, 58, 46, 70, 36, 55, 54, 49, 64, 75, 63, 47, 87, 80, 48, 87, 87, 130, 74, 42, 82, 73, 58, 105, 61, 90, 52, 71, 63, 79, 92, 62, 39, 68, 51, 44, 70, 70, 49, 72, 71, 67, 82, 100, 76, 71, 54, 115, 58, 50, 50, 45, 52, 63, 73, 47, 45, 64, 51, 51, 62, 72, 71, 72, 66, 72, 39, 66, 57, 84, 56, 74, 53, 50, 76, 80, 69, 80, 76, 86, 59, 67, 79, 81, 50, 50, 57, 60, 54, 59, 85, 66, 71, 59, 49, 66, 65, 75, 54, 78, 103, 60, 73, 59, 55, 70, 58, 72, 61, 59, 63, 81, 67, 74, 93, 67, 66, 59, 57, 66, 75, 66, 66, 53, 85, 48, 72, 39, 84, 56, 74, 70, 65, 61, 58, 61, 85, 52, 75, 64, 92, 96, 54, 48, 62, 76, 73, 59, 88, 64, 53, 46, 66, 69, 49, 115, 58, 66, 62, 75, 123, 66, 55, 44, 64, 67, 55, 67, 55, 59, 74, 53, 39, 70, 59, 64, 70, 90, 67, 58, 67, 59, 110, 51, 67, 70, 52, 83, 87, 72, 94, 62, 55, 51, 42, 51, 55, 68, 78, 66, 64, 58, 54, 59, 68, 49, 55, 105, 57, 51, 44, 50, 61, 79, 58, 118, 58, 56, 79, 84, 46, 63, 49, 55, 58, 85, 59, 56, 97, 61, 58, 52, 75, 70, 50, 78, 69, 70, 97, 76, 58, 59, 68, 83, 59, 81, 72, 67, 92, 80, 83, 82, 75, 62, 82, 74, 75, 46, 53, 60, 79, 59, 71, 55, 78, 67, 56, 78, 47, 62, 51, 74, 45, 75, 76, 101, 48, 56, 56, 50, 63, 74, 54, 65, 71, 65, 69, 62, 72, 87, 64, 61, 76, 75, 71, 82, 59, 61, 57, 63, 57, 74, 60, 58, 61, 76, 95, 54, 48, 43, 74, 77, 66, 61, 63, 91, 75, 80, 70, 49, 88, 71, 54, 71, 69, 130, 74, 64, 49, 53, 101, 78, 53, 64, 68, 102, 58, 69, 54, 63, 60, 67, 57, 56, 55, 60, 60, 61, 68, 59, 98, 152, 91, 79, 71, 54, 64, 56, 66, 75, 69, 52, 77, 47, 63, 70, 68, 53, 75, 66, 29, 81, 73, 80, 67, 71, 51, 58, 109, 62, 64, 63, 95, 59, 54, 58, 102, 81, 79, 87, 60, 71, 63, 56, 74, 104, 91, 66, 65, 74, 49, 50, 63, 40, 90, 55, 117, 51, 67, 62, 148, 100, 62, 51, 57, 61, 57, 46, 114, 61, 81, 77, 60, 72, 58, 73, 72, 64, 69, 91, 70, 54, 67, 56, 80, 45, 56, 65, 59, 63, 68, 86, 64, 79, 68, 64, 68, 91, 61, 61, 61, 51, 57, 72, 67, 58, 79, 68, 54, 77, 60, 71, 76, 67, 62, 61, 61, 62, 55, 70, 67, 63, 74, 62, 101, 86, 62, 56, 74, 64, 49, 72, 52, 44, 54, 62, 50, 64, 80, 63, 66, 91, 66, 83, 59, 71, 65, 45, 56, 70, 120, 55, 69, 72, 52, 51, 70, 78, 65, 59, 68, 67, 51, 46, 59, 43, 56, 56, 75, 42, 53, 61, 67, 59, 55, 63, 95, 46, 68, 63, 88, 62, 78, 65, 109, 63, 48, 76, 51, 49, 79, 55, 66, 68, 89, 88, 60, 63, 64, 73, 72, 55, 73, 65, 73, 43, 81, 59, 61, 59, 64, 66, 81, 62, 62, 53, 53, 60, 70, 64, 63, 66, 62, 60, 59, 78, 66, 77, 74, 56, 69, 61, 62, 54, 66, 68, 67, 51, 64, 59, 74, 71, 74, 76, 61, 68, 66, 61, 64, 66, 63, 63, 83, 54, 64, 59, 63, 58, 78, 60, 56, 75, 74, 64, 63, 38, 53, 57, 60, 87, 66, 61, 75, 43, 51, 58, 62, 64, 39, 58, 71, 61, 49, 68, 53, 62, 65, 62, 49, 59, 74, 46, 64, 51, 58, 61, 119, 66, 58, 71, 47, 85, 52, 53, 59, 60, 68, 60, 73, 62, 63, 66, 69, 70, 77, 46, 45, 55, 68, 86, 57, 53, 66, 117, 60, 47, 57, 66, 55, 62, 49, 47, 82, 71, 63, 68, 90, 85, 48, 60, 70, 73, 50, 62, 71, 70, 57, 91, 65, 74, 62, 51, 66, 58, 55, 82, 53, 81, 43, 24, 63, 69, 61, 68, 80, 68, 70, 62, 65, 49, 42, 71, 65, 60, 65, 56, 44, 68, 83, 70, 66, 69, 65, 51, 69, 40, 64, 44, 56, 53, 49, 67, 52, 90, 60, 56, 96, 84, 56, 69, 59, 90, 50, 52, 57, 50, 55, 61, 79, 68, 69, 61, 72, 56, 63, 65, 55, 55, 71, 48, 73, 62, 62, 67, 80, 61, 127, 68, 89, 72, 82, 70, 58, 46, 70, 43, 76, 61, 54, 69, 58, 68, 83, 44, 60, 58, 68, 76, 64, 71, 100, 71, 73, 43, 85, 47, 89, 59, 66, 55, 54, 76, 76, 62, 59, 95, 62, 69, 67, 56, 68, 63, 77, 64, 46, 67, 80, 60, 55, 63, 64, 51, 56, 57, 72, 57, 75, 60, 77, 91, 75, 68, 73, 56, 65, 58, 66, 85, 75, 54, 84, 78, 59, 55, 64, 58, 91, 67, 65, 60, 63, 70, 65, 72, 60, 75, 56, 54, 73, 99, 80, 163, 64, 69, 78, 91, 71, 75, 119, 52, 78, 55, 94, 65, 66, 49, 55, 64, 68, 111, 64, 46, 58, 90, 55, 50, 82, 66, 59, 70, 72, 69, 63, 66, 59, 57, 61, 53, 64, 68, 39, 57, 76, 52, 49, 57, 54, 55, 64, 58, 61, 79, 82, 82, 77, 68, 57, 72, 95, 62, 47, 65, 86, 64, 73, 60, 58, 61, 68, 57, 63, 61, 58, 47, 93, 68, 53, 69, 64, 60, 47, 66, 60, 65, 106, 68, 77, 60, 49, 52, 78, 60, 64, 67, 64, 53, 55, 81, 58, 68, 69, 58, 88, 62, 89, 64, 71, 100, 91, 64, 63, 53, 68, 60, 63, 86, 60, 65, 84, 52, 83, 68, 78, 97, 71, 57, 55, 58, 80, 63, 60, 70, 63, 52, 64, 75, 95, 59, 68, 53, 59, 63, 51, 48, 70, 71, 49, 69, 60, 83, 67, 71, 67, 71, 58, 36, 79, 120, 73, 73, 58, 45, 55, 58, 75, 48, 78, 60, 59, 57, 66, 69, 59, 57, 59, 58, 75, 56, 75, 60, 83, 50, 67, 72, 53, 63, 61, 57, 52, 76, 60, 67, 86, 47, 56, 73, 66, 48, 82, 60, 65, 56, 59, 55, 82, 73, 77, 61, 72, 76, 60, 74, 29, 77, 70, 67, 53, 67, 77, 108, 90, 61, 58, 55, 55, 65, 72, 57, 53, 54, 64, 67, 73, 93, 74, 62, 63, 57, 55, 63, 78, 85, 55, 65, 66, 63, 61, 58, 48, 85, 70, 64, 93, 75, 65, 63, 62, 58, 56, 69, 53, 73, 63, 63, 70, 68, 55, 54, 51, 48, 53, 61, 57, 87, 64, 64, 61, 88, 52, 53, 60, 59, 86, 104, 58, 69, 57, 71, 65, 57, 60, 53, 70, 59, 57, 64, 72, 86, 70, 51, 80, 51, 65, 74, 67, 58, 67, 63, 69, 68, 61, 47, 56, 68, 66, 59, 62, 55, 45, 50, 68, 79, 59, 75, 46, 97, 61, 71, 60, 74, 67, 55, 73, 48, 54, 57, 55, 57, 62, 51, 86, 70, 75, 49, 74, 47, 58, 72, 60, 57, 73, 72, 53, 72, 52, 58, 69, 95, 78, 93, 110, 56, 110, 61, 60, 58, 57, 67, 87, 61, 67, 46, 54, 79, 59, 55, 65, 75, 87, 69, 51, 63, 57, 72, 55, 70, 87, 65, 48, 70, 69, 54, 53, 93, 59, 69, 73, 79, 53, 51, 56, 79, 75, 61, 69, 75, 74, 80, 59, 67, 45, 77, 68, 55, 64, 73, 51, 58, 57, 62, 81, 80, 74, 74, 71, 55, 81, 72, 48, 96, 66, 70, 72, 58, 72, 60, 88, 51, 65, 63, 78, 93, 51, 58, 65, 52, 56, 58, 78, 98, 67, 81, 57, 57, 60, 91, 74, 79, 90, 46, 49, 80, 98, 70, 68, 54, 68, 65, 56, 80, 70, 67, 61, 72, 65, 63, 60, 51, 61, 51, 63, 62, 66, 56, 60, 69, 56, 62, 82, 86, 66, 48, 100, 66, 82, 50, 55, 64, 74, 59, 73, 75, 83, 83, 70, 60, 68, 60, 41, 60, 60, 66, 60, 51, 94, 82, 51, 69, 75, 54, 55, 53, 63, 59, 52, 73, 75, 63, 69, 67, 47, 44, 68, 60, 80, 60, 55, 60, 73, 49, 54, 44, 62, 60, 58, 46, 62, 75, 51, 57, 77, 86, 87, 50, 51, 61, 51, 59, 62, 87, 80, 67, 66, 51, 89, 62, 67, 87, 61, 69, 68, 59, 89, 62, 61, 47, 66, 71, 77, 56, 57, 74, 52, 70, 46, 59, 67, 47, 63, 60, 67, 47, 59, 74, 92, 74, 56, 69, 56, 67, 87, 61, 75, 63, 56, 102, 77, 61, 57, 69, 62, 61, 77, 51, 81, 83, 47, 46, 79, 62, 54, 68, 84, 66, 56, 57, 63, 71, 69, 61, 98, 66, 72, 80, 65, 61, 65, 52, 64, 67, 77, 59, 57, 74, 67, 63, 69, 77, 100, 70, 49, 52, 71, 66, 78, 56, 51, 70, 57, 70, 78, 77, 54, 82, 88, 53, 56, 56, 47, 75, 52, 48, 69, 66, 63, 57, 71, 79, 68, 46, 60, 43, 62, 57, 57, 39, 81, 52, 82, 80, 63, 94, 62, 46, 56, 60, 63, 72, 93, 69, 79, 63, 62, 82, 58, 62, 61, 56, 43, 55, 60, 42, 41, 52, 52, 66, 67, 75, 74, 67, 77, 53, 71, 43, 47, 59, 62, 55, 82, 73, 50, 142, 49, 65, 72, 57, 54, 64, 52, 49, 72, 148, 54, 63, 60, 66, 52, 73, 52, 67, 52, 59, 65, 70, 69, 60, 57, 53, 60, 68, 53, 56, 68, 56, 100, 59, 69, 79, 62, 95, 66, 72, 50, 81, 50, 83, 64, 62, 56, 54, 75, 60, 70, 71, 55, 60, 66, 70, 75, 66, 82, 59, 73, 65, 52, 62, 54, 44, 80, 66, 59, 72, 61, 53, 66, 67, 70, 77, 61, 66, 70, 56, 84, 65, 58, 75, 52, 55, 69, 61, 64, 48, 49, 51, 60, 54, 64, 70, 63, 73, 56, 66, 50, 50, 61, 94, 56, 64, 67, 52, 67, 51, 62, 83, 63, 56, 62, 103, 57, 65, 76, 76, 56, 66, 70, 58, 54, 55, 65, 54, 59, 62, 88, 77, 74, 93, 67, 67, 63, 54, 55, 84, 79, 88, 50, 59, 53, 111, 69, 58, 64, 75, 71, 63, 64, 73, 78, 63, 87, 73, 73, 94, 61, 68, 74, 70, 74, 62, 88, 68, 65, 61, 77, 63, 75, 117, 64, 67, 73, 46, 85, 62, 56, 64, 86, 43, 73, 86, 79, 63, 92, 75, 51, 76, 77, 54, 68, 39, 69, 44, 70, 128, 53, 63, 48, 60, 71, 86, 65, 50, 79, 69, 58, 58, 56, 74, 80, 65, 117, 48, 67, 67, 78, 59, 60, 76, 65, 69, 60, 58, 72, 85, 57, 43, 66, 59, 53, 54, 70, 60, 65, 71, 59, 82, 207, 84, 71, 69, 58, 65, 71, 61, 64, 92, 67, 61, 81, 65, 48, 84, 77, 73, 46, 73, 78, 73, 69, 108, 50, 66, 65, 64, 74, 70, 66, 92, 68, 67, 79, 60, 48, 53, 90, 70, 65, 56, 72, 89, 87, 64, 80, 71, 59, 54, 58, 74, 52, 54, 76, 60, 83, 59, 62, 84, 43, 116, 82, 87, 56, 97, 71, 71, 59, 63, 73, 73, 51, 68, 70, 66, 71, 56, 57, 65, 62, 71, 73, 70, 77, 54, 43, 69, 69, 49, 89, 63, 63, 68, 92, 63, 63, 90, 68, 50, 69, 45, 48, 46, 60, 56, 55, 72, 69, 71, 87, 71, 70, 47, 60, 57, 73, 66, 54, 72, 58, 79, 56, 77, 59, 59, 50, 95, 62, 88, 90, 57, 57, 106, 78, 73, 54, 56, 59, 69, 73, 83, 87, 56, 76, 64, 93, 71, 59, 51, 57, 55, 74, 36, 75, 83, 75, 75, 72, 86, 72, 67, 77, 110, 58, 61, 52, 86, 44, 52, 43, 80, 59, 45, 79, 61, 70, 81, 81, 99, 63, 46, 106, 47, 97, 53, 70, 63, 79, 69, 51, 51, 50, 51, 72, 94, 57, 80, 73, 49, 70, 74, 87, 65, 69, 89, 45, 79, 53, 62, 47, 53, 66, 50, 62, 55, 47, 66, 54, 94, 57, 67, 66, 79, 77, 86, 47, 79, 63, 67, 101, 71, 83, 58, 52, 57, 60, 63, 82, 64, 59, 64, 68, 86, 81, 51, 62, 45, 98, 63, 80, 63, 56, 90, 70, 68, 46, 60, 56, 50, 39, 85, 56, 85, 43, 74, 75, 49, 93, 59, 63, 84, 45, 57, 55, 63, 54, 46, 57, 89, 75, 83, 69, 60, 73, 62, 63, 81, 61, 88, 53, 60, 74, 72, 58, 84, 65, 56, 47, 41, 74, 75, 82, 57, 54, 49, 54, 70, 54, 59, 62, 59, 43, 54, 60, 91, 88, 71, 76, 68, 135, 61, 98, 61, 61, 66, 64, 85, 67, 71, 55, 66, 49, 57, 61, 63, 55, 61, 64, 63, 51, 54, 50, 60, 96, 87, 73, 68, 59, 61, 71, 70, 51, 76, 65, 52, 52, 59, 69, 65, 70, 60, 72, 59, 93, 95, 49, 75, 75, 61, 58, 73, 59, 71, 91, 59, 65, 74, 72, 86, 46, 86, 61, 45, 50, 71, 59, 75, 39, 62, 67, 35, 47, 92, 59, 77, 63, 63, 74, 83, 65, 69, 63, 95, 54, 52, 69, 88, 74, 76, 81, 83, 99, 68, 63, 72, 53, 92, 68, 57, 56, 70, 79, 59, 54, 60, 78, 33, 38, 72, 43, 57, 52, 51, 66, 45, 70, 86, 55, 46, 73, 67, 72, 68, 67, 79, 67, 60, 73, 62, 42, 80, 57, 50, 89, 109, 68, 40, 55, 68, 64, 35, 73, 70, 63, 63, 74, 47, 58, 70, 92, 85, 97, 76, 53, 63, 45, 72, 40, 45, 75, 70, 54, 60, 54, 62, 74, 63, 48, 46, 45, 73, 53, 73, 87, 49, 58, 99, 80, 61, 55, 81, 81, 63, 63, 48, 69, 65, 85, 169, 53, 55, 58, 53, 61, 75, 67, 73, 79, 53, 38, 70, 78, 74, 44, 58, 90, 96, 78, 71, 56, 53, 67, 58, 57, 127, 101, 74, 75, 76, 39, 64, 66, 62, 90, 62, 56, 44, 74, 58, 30, 64, 57, 79, 64, 55, 76, 54, 66, 72, 80, 61, 62, 54, 69, 76, 72, 68, 71, 34, 55, 56, 44, 53, 63, 49, 65, 62, 58, 67, 64, 74, 59, 68, 60, 66, 46, 74, 51, 51, 62, 47, 70, 53, 48, 53, 79, 62, 63, 71, 64, 54, 78, 73, 70, 60, 47, 87, 74, 68, 50, 49, 93, 72, 72, 63, 55, 74, 89, 56, 53, 48, 63, 56, 51, 51, 63, 62, 40, 55, 74, 65, 46, 65, 70, 60, 68, 54, 66, 66, 46, 72, 84, 43, 40, 92, 83, 51, 72, 74, 85, 71, 84, 58, 45, 52, 73, 56, 92, 57, 40, 57, 68, 68, 82, 63, 54, 46, 97, 57, 41, 44, 53, 89, 71, 66, 84, 61, 76, 79, 60, 70, 49, 53, 79, 99, 57, 67, 51, 47, 101, 44, 59, 52, 79, 76, 64, 73, 48, 54, 48, 57, 48, 78, 106, 43, 73, 59, 54, 63, 92, 58, 61, 52, 64, 83, 72, 117, 83, 81, 64, 64, 93, 64, 84, 63, 62, 48, 74, 68, 41, 40, 72, 48, 64, 69, 60, 73, 83, 68, 59, 86, 87, 84, 85, 58, 47, 57, 49, 64, 59, 56, 80, 81, 102, 71, 74, 81, 60, 56, 71, 84, 66, 80, 92, 52, 76, 105, 85, 64, 70, 58, 52, 81, 57, 40, 41, 63, 60, 64, 56, 58, 56, 51, 74, 79, 48, 74, 90, 75, 71, 81, 77, 46, 68, 67, 86, 47, 99, 64, 60, 56, 81, 68, 82, 67, 45, 98, 62, 57, 94, 61, 60, 72, 83, 55, 62, 68, 50, 56, 60, 87, 62, 46, 65, 70, 41, 79, 80, 63, 79, 71, 67, 56, 77, 70, 44, 64, 76, 50, 54, 59, 65, 52, 60, 59, 68, 71, 105, 76, 84, 52, 35, 59, 48, 37, 78, 51, 77, 66, 55, 70, 49, 44, 102, 74, 75, 78, 104, 80, 43, 53, 74, 77, 61, 76, 53, 100, 74, 79, 60, 85, 71, 41, 64, 63, 101, 47, 72, 91, 54, 67, 54, 71, 99, 84, 76, 56, 67, 63, 80, 64, 49, 72, 64, 60, 71, 93, 71, 40, 70, 92, 48, 66, 50, 60, 73, 45, 66, 59, 67, 35, 64, 47, 55, 74, 40, 50, 56, 71, 68, 46, 71, 48, 61, 74, 82, 96, 75, 66, 69, 81, 50, 63, 65, 45, 69, 83, 73, 51, 68, 68, 81, 50, 70, 69, 49, 48, 69, 71, 77, 56, 59, 75, 58, 64, 54, 66, 70, 67, 48, 57, 63, 74, 53, 84, 55, 70, 91, 61, 62, 58, 81, 59, 74, 83, 59, 68, 58, 56, 64, 64, 74, 91, 72, 68, 90, 68, 58, 65, 46, 69, 81, 54, 66, 86, 65, 68, 57, 63, 82, 67, 72, 76, 69, 67, 63, 69, 51, 61, 68, 53, 51, 60, 52, 75, 67, 76, 48, 71, 153, 62, 67, 68, 54, 53, 87, 65, 60, 55, 93, 65, 64, 51, 91, 59, 61, 49, 78, 77, 54, 66, 57, 54, 60, 78, 64, 41, 83, 67, 86, 66, 74, 46, 57, 58, 61, 97, 97, 56, 58, 72, 64, 75, 99, 60, 78, 59, 79, 63, 66, 68, 61, 45, 61, 67, 68, 65, 56, 83, 77, 64, 86, 71, 90, 76, 94, 47, 52, 50, 65, 66, 50, 71, 57, 53, 75, 20, 77, 60, 64, 55, 67, 56, 48, 94, 61, 72, 85, 67, 66, 70, 113, 58, 56, 59, 65, 60, 66, 77, 67, 61, 65, 59, 74, 40, 67, 64, 55, 106, 51, 76, 100, 89, 81, 40, 57, 106, 69, 60, 71, 63, 50, 69, 87, 68, 75, 73, 55, 69, 75, 66, 46, 102, 64, 65, 80, 61, 68, 44, 64, 63, 55, 68, 57, 107, 56, 60, 74, 91, 50, 60, 69, 66, 75, 62, 79, 49, 63, 66, 56, 64, 50, 58, 66, 108, 67, 70, 70, 74, 48, 51, 53, 46, 64, 62, 74, 50, 47, 69, 58, 66, 110, 48, 61, 63, 82, 71, 53, 83, 83, 94, 60, 59, 71, 85, 65, 52, 57, 73, 63, 65, 73, 60, 65, 56, 69, 58, 51, 53, 72, 70, 50, 85, 61, 62, 82, 73, 73, 71, 80, 61, 55, 64, 63, 60, 56, 65, 67, 64, 58, 53, 71, 74, 61, 56, 56, 75, 76, 50, 68, 67, 65, 105, 51, 54, 73, 57, 79, 95, 59, 53, 62, 62, 90, 77, 58, 60, 49, 55, 73, 58, 76, 59, 70, 56, 51, 70, 50, 46, 61, 99, 78, 68, 53, 108, 75, 61, 88, 87, 40, 69, 53, 81, 63, 51, 40, 56, 56, 45, 77, 57, 82, 75, 50, 70, 62, 64, 66, 73, 68, 60, 59, 76, 64, 44, 73, 49, 67, 69, 76, 70, 54, 59, 74, 55, 70, 57, 56, 53, 71, 72, 66, 69, 74, 57, 50, 68, 99, 73, 85, 58, 61, 79, 60, 87, 68, 81, 65, 54, 68, 83, 58, 61, 61, 81, 60, 121, 63, 60, 78, 71, 77, 102, 53, 72, 73, 63, 65, 59, 65, 56, 70, 62, 63, 54, 67, 61, 84, 74, 69, 75, 47, 74, 59, 82, 104, 60, 60, 77, 72, 92, 75, 67, 38, 77, 79, 59, 99, 72, 69, 66, 96, 56, 72, 53, 49, 60, 56, 83, 60, 63, 51, 70, 50, 66, 78, 77, 73, 56, 55, 71, 120, 86, 71, 60, 52, 74, 73, 72, 39, 71, 75, 59, 65, 69, 60, 47, 56, 74, 63, 71, 73, 79, 80, 57, 67, 64, 54, 58, 75, 61, 72, 58, 63, 59, 100, 71, 61, 80, 36, 57, 57, 78, 76, 85, 60, 54, 64, 56, 87, 43, 72, 58, 69, 56, 66, 54, 70, 52, 56, 51, 72, 68, 75, 57, 74, 78, 42, 64, 66, 62, 94, 63, 62, 73, 58, 84, 55, 59, 58, 87, 58, 106, 62, 48, 55, 48, 59, 69, 67, 59, 44, 60, 60, 49, 57, 52, 55, 90, 57, 67, 104, 119, 74, 75, 79, 77, 59, 50, 77, 56, 71, 52, 80, 58, 78, 64, 72, 85, 61, 58, 60, 68, 71, 61, 68, 54, 72, 67, 61, 65, 64, 63, 70, 17, 52, 67, 49, 46, 61, 59, 46, 50, 97, 54, 54, 66, 100, 64, 72, 50, 85, 60, 59, 77, 50, 42, 84, 69, 76, 53, 59, 57, 73, 85, 54, 83, 58, 75, 79, 72, 61, 55, 74, 75, 50, 66, 64, 64, 63, 58, 43, 62, 51, 71, 49, 72, 70, 53, 42, 58, 62, 74, 40, 95, 61, 60, 83, 52, 80, 69, 79, 125, 28, 65, 54, 52, 59, 60, 74, 54, 58, 58, 65, 59, 53, 87, 51, 64, 85, 57, 59, 49, 75, 39, 59, 52, 51, 58, 90, 72, 77, 55, 64, 67, 57, 67, 72, 64, 61, 65, 62, 53, 55, 58, 52, 61, 54, 67, 84, 59, 61, 96, 55, 80, 60, 45, 87, 66, 58, 70, 70, 112, 57, 49, 50, 62, 76, 74, 65, 71, 48, 78, 48, 34, 54, 47, 79, 75, 91, 61, 63, 67, 82, 70, 45, 69, 75, 76, 87, 53, 73, 40, 67, 68, 60, 74, 57, 86, 82, 65, 78, 59, 56, 65, 41, 65, 64, 73, 67, 54, 63, 62, 85, 65, 63, 59, 85, 65, 22, 112, 83, 65, 45, 117, 86, 62, 82, 61, 63, 58, 95, 77, 62, 66, 63, 61, 71, 54, 70, 69, 56, 56, 76, 90, 58, 79, 78, 52, 66, 76, 57, 51, 62, 51, 60, 72, 75, 71, 41, 58, 67, 51, 62, 66, 83, 42, 63, 77, 79, 73, 78, 0, 56, 53, 66, 75, 73, 76, 57, 63, 61, 83, 86, 66, 64, 72, 53, 52, 104, 46, 68, 50, 116, 55, 55, 73, 67, 66, 55, 64, 75, 41, 71, 72, 60, 58, 110, 61, 66, 55, 65, 67, 77, 61, 94, 70, 46, 67, 70, 65, 60, 68, 84, 73, 65, 67, 30, 93, 59, 43, 61, 69, 92, 60, 73, 86, 61, 69, 86, 63, 76, 78, 76, 56, 72, 64, 77, 50, 73, 76, 79, 62, 62, 68, 78, 54, 36, 77, 77, 85, 66, 42, 78, 59, 53, 71, 66, 67, 67, 58, 69, 61, 69, 68, 63, 79, 54, 76, 52, 69, 59, 90, 87, 65, 66, 51, 55, 53, 37, 71, 59, 71, 51, 69, 54, 59, 44, 66, 85, 73, 51, 58, 72, 55, 66, 54, 51, 49, 85, 63, 70, 54, 77, 55, 85, 66, 70, 63, 53, 62, 52, 66, 74, 74, 81, 74, 71, 55, 71, 86, 56, 58, 87, 60, 66, 55, 81, 68, 58, 47, 63, 58, 59, 61, 63, 54, 54, 57, 63, 79, 43, 66, 38, 52, 59, 112, 59, 77, 83, 82, 62, 61, 68, 60, 49, 63, 105, 102, 60, 60, 75, 69, 60, 54, 78, 66, 62, 49, 76, 53, 83, 99, 50, 64, 68, 84, 69, 54, 48, 62, 70, 61, 67, 67, 72, 54, 68, 71, 74, 60, 63, 74, 65, 64, 60, 72, 67, 71, 55, 55, 67, 74, 67, 35, 68, 50, 79, 78, 49, 67, 70, 74, 55, 51, 74, 77, 64, 52, 55, 47, 59, 60, 50, 59, 76, 67, 57, 48, 62, 62, 57, 61, 58, 73, 44, 57, 76, 55, 64, 51, 64, 33, 91, 66, 68, 85, 65, 91, 73, 65, 47, 58, 68, 69, 100, 60, 75, 67, 88, 66, 74, 53, 71, 78, 55, 75, 59, 68, 51, 66, 63, 62, 60, 74, 77, 59, 48, 62, 56, 68, 58, 67, 40, 72, 79, 66, 68, 48, 47, 60, 83, 62, 54, 54, 63, 59, 66, 81, 85, 110, 85, 70, 104, 68, 105, 59, 63, 54, 64, 72, 72, 54, 47, 63, 62, 83, 95, 59, 58, 59, 52, 56, 62, 57, 70, 55, 65, 64, 55, 64, 70, 64, 51, 55, 107, 62, 70, 64, 60, 64, 71, 56, 84, 131, 57, 71, 53, 44, 72, 66, 65, 85, 55, 44, 39, 78, 70, 57, 58, 59, 73, 48, 50, 64, 84, 61, 58, 87, 83, 66, 60, 60, 71, 78, 51, 73, 75, 66, 88, 79, 78, 48, 83, 64, 75, 64, 57, 62, 64, 83, 84, 71, 64, 71, 49, 84, 58, 69, 67, 81, 65, 51, 103, 78, 47, 68, 59, 56, 65, 93, 54, 63, 70, 64, 67, 69, 43, 71, 65, 66, 74, 63, 94, 92, 52, 56, 78, 63, 71, 68, 67, 78, 77, 69, 76, 63, 65, 66, 49, 60, 53, 76, 52, 80, 57, 72, 64, 54, 67, 62, 70, 92, 48, 68, 58, 62, 73, 79, 59, 71, 66, 54, 60, 83, 62, 66, 50, 59, 43, 50, 82, 82, 70, 55, 50, 89, 87, 75, 67, 59, 95, 55, 53, 62, 55, 66, 63, 61, 47, 55, 64, 69, 63, 59, 59, 70, 68, 69, 134, 48, 64, 96, 78, 54, 60, 80, 66, 55, 78, 59, 92, 97, 72, 62, 84, 44, 107, 71, 77, 68, 52, 52, 64, 61, 119, 68, 87, 90, 64, 56, 57, 79, 62, 84, 56, 89, 72, 55, 61, 70, 81, 69, 58, 49, 67, 59, 59, 57, 48, 53, 52, 86, 74, 62, 62, 59, 52, 71, 65, 82, 50, 62, 68, 110, 63, 55, 66, 50, 76, 48, 63, 83, 80, 80, 68, 61, 140, 57, 64, 56, 65, 63, 53, 43, 75, 87, 56, 95, 74, 71, 64, 73, 73, 62, 46, 66, 79, 57, 56, 53, 58, 67, 59, 70, 55, 63, 85, 62, 49, 59, 49, 66, 57, 78, 67, 65, 54, 62, 66, 48, 61, 45, 91, 73, 76, 82, 75, 52, 59, 76, 58, 68, 81, 58, 106, 63, 93, 64, 63, 61, 57, 73, 56, 46, 82, 79, 68, 70, 75, 73, 76, 50, 76, 53, 59, 73, 62, 58, 66, 98, 71, 59, 68, 90, 88, 46, 62, 60, 57, 75, 56, 55, 59, 59, 77, 74, 67, 46, 70, 59, 64, 73, 77, 64, 60, 92, 84, 86, 58, 50, 70, 62, 63, 57, 77, 80, 70, 72, 75, 54, 57, 74, 79, 61, 58, 76, 65, 82, 75, 62, 61, 40, 61, 74, 45, 47, 75, 69, 44, 42, 76, 60, 62, 59, 118, 63, 69, 64, 32, 71, 56, 53, 67, 71, 65, 83, 65, 52, 42, 65, 62, 55, 68, 44, 73, 58, 84, 66, 42, 59, 62, 56, 57, 70, 52, 66, 73, 64, 54, 56, 51, 49, 53, 50, 59, 50, 52, 57, 77, 67, 48, 81, 57, 71, 89, 67, 63, 68, 65, 63, 81, 75, 64, 53, 61, 55, 73, 66, 67, 63, 59, 64, 54, 89, 49, 72, 53, 64, 64, 59, 64, 52, 67, 66, 57, 48, 81, 47, 54, 40, 92, 46, 53, 65, 59, 58, 54, 76, 155, 86, 63, 53, 65, 70, 69, 53, 70, 62, 71, 58, 68, 64, 56, 67, 72, 83, 71, 61, 62, 86, 66, 61, 53, 64, 84, 52, 71, 64, 72, 59, 97, 86, 71, 85, 68, 85, 59, 52, 45, 63, 68, 33, 61, 68, 66, 76, 54, 86, 60, 48, 71, 57, 70, 73, 116, 56, 65, 71, 59, 68, 64, 67, 71, 69, 71, 64, 57, 57, 56, 67, 56, 56, 89, 63, 57, 63, 76, 48, 114, 70, 56, 57, 59, 50, 70, 64, 74, 57, 68, 73, 78, 62, 64, 85, 69, 50, 65, 67, 60, 82, 66, 59, 66, 84, 59, 69, 39, 51, 74, 61, 51, 60, 83, 58, 63, 72, 60, 58, 71, 84, 58, 73, 78, 48, 63, 99, 74, 63, 59, 59, 45, 71, 57, 82, 67, 49, 69, 77, 66, 73, 71, 66, 68, 54, 77, 84, 54, 57, 58, 54, 55, 51, 71, 53, 50, 57, 68, 66, 83, 73, 127, 73, 42, 59, 68, 58, 53, 76, 66, 63, 62, 51, 59, 66, 62, 76, 48, 67, 70, 70, 54, 61, 64, 56, 82, 46, 68, 64, 56, 82, 78, 75, 82, 67, 71, 68, 63, 64, 64, 62, 63, 41, 77, 63, 74, 58, 66, 64, 63, 63, 60, 50, 76, 71, 55, 44, 81, 57, 47, 64, 57, 99, 97, 61, 63, 60, 43, 60, 85, 65, 66, 67, 69, 61, 53, 115, 73, 70, 67, 57, 52, 58, 72, 56, 59, 82, 55, 60, 72, 68, 74, 58, 48, 55, 83, 62, 65, 76, 45, 65, 78, 62, 65, 74, 67, 50, 98, 56, 72, 77, 63, 61, 52, 80, 59, 52, 66, 71, 57, 61, 55, 65, 60, 65, 80, 66, 67, 60, 69, 56, 86, 41, 82, 86, 60, 73, 46, 74, 75, 62, 54, 40, 61, 81, 68, 56, 68, 66, 71, 78, 59, 80, 72, 64, 58, 73, 60, 60, 67, 71, 71, 83, 67, 73, 64, 76, 79, 58, 136, 66, 72, 69, 55, 69, 52, 63, 77, 62, 77, 64, 76, 75, 59, 55, 74, 70, 50, 47, 53, 53, 71, 64, 66, 60, 83, 43, 70, 72, 58, 65, 120, 60, 47, 59, 65, 83, 56, 58, 58, 60, 75, 67, 74, 89, 61, 72, 75, 77, 107, 80, 44, 84, 56, 59, 64, 71, 63, 122, 53, 73, 68, 67, 52, 63, 63, 63, 65, 60, 56, 49, 65, 55, 68, 110, 68, 45, 65, 66, 106, 71, 71, 65, 67, 56, 49, 77, 60, 61, 49, 62, 43, 74, 86, 77, 72, 66, 48, 56, 78, 64, 63, 65, 65, 69, 83, 56, 51, 84, 74, 58, 54, 52, 61, 78, 88, 50, 77, 60, 59, 37, 58, 51, 61, 59, 57, 74, 47, 52, 80, 57, 101, 79, 52, 71, 42, 75, 72, 60, 85, 88, 65, 74, 41, 75, 66, 77, 97, 67, 71, 49, 68, 50, 65, 45, 53, 45, 99, 67, 56, 91, 74, 60, 85, 67, 85, 53, 71, 65, 58, 47, 55, 67, 105, 59, 63, 64, 70, 70, 58, 49, 69, 80, 69, 70, 51, 59, 49, 60, 66, 116, 50, 54, 70, 59, 48, 66, 79, 73, 50, 60, 89, 52, 69, 79, 74, 54, 56, 63, 56, 60, 54, 57, 54, 55, 62, 53, 119, 76, 58, 37, 52, 68, 75, 52, 73, 60, 54, 94, 72, 66, 95, 81, 65, 52, 73, 68, 54, 96, 76, 47, 68, 82, 89, 51, 101, 58, 60, 94, 67, 94, 44, 92, 82, 62, 64, 71, 78, 97, 61, 42, 62, 67, 41, 50, 109, 63, 58, 70, 45, 72, 84, 85, 110, 58, 85, 70, 54, 68, 59, 68, 53, 59, 55, 70, 63, 88, 64, 65, 54, 61, 49, 80, 81, 56, 62, 65, 76, 63, 65, 50, 67, 55, 76, 48, 59, 66, 61, 65, 108, 50, 63, 71, 49, 83, 90, 57, 68, 54, 63, 55, 41, 93, 64, 67, 60, 58, 76, 70, 43, 34, 51, 54, 78, 93, 57, 56, 52, 52, 95, 51, 74, 69, 56, 82, 60, 71, 36, 61, 58, 74, 61, 43, 54, 62, 58, 95, 63, 69, 69, 55, 66, 65, 73, 72, 59, 52, 72, 49, 74, 77, 99, 62, 58, 62, 68, 78, 75, 42, 63, 46, 70, 77, 86, 62, 69, 52, 81, 91, 67, 51, 47, 67, 52, 70, 74, 67, 54, 65, 68, 67, 68, 67, 62, 71, 66, 78, 73, 67, 88, 52, 96, 58, 52, 61, 58, 39, 63, 51, 68, 73, 60, 65, 72, 53, 85, 54, 57, 67, 61, 81, 40, 81, 84, 59, 84, 54, 89, 45, 79, 48, 70, 70, 70, 50, 35, 65, 79, 47, 71, 61, 81, 45, 68, 57, 66, 60, 68, 76, 61, 71, 64, 57, 58, 62, 46, 76, 59, 63, 89, 58, 96, 63, 85, 72, 59, 65, 44, 66, 64, 46, 60, 61, 71, 63, 72, 65, 67, 66, 112, 73, 82, 73, 72, 73, 71, 74, 83, 58, 79, 68, 40, 74, 76, 78, 71, 63, 65, 50, 52, 76, 55, 58, 79, 137, 63, 58, 49, 72, 67, 57, 62, 84, 54, 61, 62, 61, 77, 67, 54, 68, 80, 57, 60, 65, 66, 60, 64, 91, 56, 61, 55, 63, 106, 72, 68, 65, 65, 104, 69, 78, 35, 43, 59, 69, 57, 45, 62, 73, 57, 65, 52, 68, 70, 90, 73, 66, 70, 70, 80, 48, 70, 51, 84, 58, 66, 44, 59, 72, 100, 55, 69, 61, 67, 60, 38, 69, 55, 71, 67, 80, 69, 89, 90, 68, 50, 105, 57, 69, 81, 48, 73, 40, 48, 112, 54, 75, 69, 86, 45, 70, 56, 77, 82, 61, 76, 64, 93, 66, 56, 82, 53, 110, 47, 73, 66, 67, 76, 46, 88, 46, 67, 54, 92, 57, 60, 39, 45, 49, 94, 60, 72, 61, 66, 49, 51, 65, 78, 78, 83, 40, 62, 48, 66, 74, 57, 69, 60, 74, 66, 67, 67, 98, 64, 61, 80, 80, 67, 57, 62, 64, 48, 83, 54, 72, 59, 50, 55, 68, 99, 50, 64, 69, 69, 58, 55, 81, 72, 60, 54, 64, 65, 42, 59, 70, 60, 76, 156, 44, 70, 56, 52, 73, 60, 102, 66, 66, 89, 75, 56, 67, 55, 60, 75, 66, 68, 52, 49, 73, 70, 103, 70, 52, 64, 45, 66, 64, 64, 66, 90, 55, 68, 70, 68, 57, 64, 62, 54, 60, 61, 94, 39, 52, 47, 76, 71, 44, 76, 60, 72, 73, 58, 63, 69, 76, 64, 66, 41, 85, 70, 56, 72, 63, 68, 73, 64, 73, 68, 69, 57, 64, 62, 59, 56, 97, 73, 54, 54, 83, 64, 61, 50, 46, 89, 45, 84, 66, 53, 81, 72, 53, 65, 98, 48, 48, 46, 56, 93, 58, 72, 46, 62, 71, 74, 71, 99, 70, 56, 80, 67, 54, 66, 93, 82, 61, 84, 68, 73, 61, 50, 52, 75, 70, 72, 54, 72, 60, 85, 65, 84, 58, 50, 96, 75, 76, 46, 83, 50, 59, 99, 59, 78, 74, 63, 62, 71, 65, 66, 67, 68, 61, 64, 111, 71, 74, 65, 68, 50, 111, 75, 86, 71, 67, 71, 60, 61, 71, 64, 81, 42, 61, 72, 34, 64, 78, 52, 50, 67, 60, 77, 34, 48, 81, 61, 79, 82, 90, 48, 78, 63, 52, 57, 63, 52, 77, 50, 118, 65, 50, 80, 60, 69, 62, 56, 70, 52, 88, 70, 81, 60, 84, 77, 48, 65, 76, 32, 72, 62, 52, 74, 79, 86, 78, 69, 57, 61, 36, 100, 53, 65, 77, 48, 52, 48, 50, 57, 65, 85, 68, 74, 77, 51, 59, 54, 67, 59, 58, 51, 69, 52, 51, 48, 55, 72, 57, 57, 54, 56, 69, 68, 60, 83, 95, 42, 73, 55, 81, 73, 46, 41, 63, 88, 89, 58, 44, 71, 63, 126, 67, 91, 52, 88, 54, 58, 51, 63, 53, 46, 57, 59, 72, 67, 56, 84, 50, 73, 56, 95, 76, 92, 76, 84, 67, 75, 66, 81, 49, 72, 89, 46, 75, 56, 75, 55, 65, 65, 58, 77, 101, 24, 66, 73, 55, 50, 88, 69, 54, 67, 55, 105, 68, 59, 68, 47, 57, 81, 61, 57, 56, 78, 92, 62, 96, 75, 89, 99, 76, 74, 89, 64, 84, 68, 87, 48, 79, 58, 84, 47, 54, 61, 124, 41, 45, 129, 64, 86, 66, 89, 68, 63, 51, 46, 74, 52, 41, 82, 67, 60, 48, 92, 73, 60, 51, 78, 94, 56, 72, 45, 74, 70, 71, 67, 58, 85, 69, 60, 59, 57, 77, 73, 76, 66, 63, 90, 78, 60, 59, 37, 46, 62, 86, 73, 72, 62, 66, 77, 43, 57, 70, 73, 69, 77, 75, 60, 64, 81, 59, 49, 61, 61, 79, 57, 53, 76, 68, 69, 76, 80, 54, 94, 92, 50, 63, 61, 81, 61, 58, 54, 71, 83, 73, 49, 74, 59, 67, 61, 63, 57, 88, 74, 64, 58, 42, 77, 97, 41, 82, 62, 66, 81, 66, 42, 67, 74, 64, 71, 85, 58, 67, 71, 59, 72, 54, 66, 51, 70, 63, 45, 52, 91, 68, 61, 39, 66, 74, 45, 65, 75, 55, 60, 61, 70, 57, 54, 44, 72, 62, 65, 68, 85, 79, 64, 67, 52, 65, 68, 83, 69, 79, 52, 48, 65, 67, 59, 69, 79, 70, 70, 46, 51, 66, 61, 58, 68, 53, 55, 69, 59, 57, 78, 72, 79, 63, 72, 62, 73, 64, 68, 62, 74, 80, 55, 51, 62, 55, 49, 57, 71, 70, 64, 101, 44, 51, 55, 58, 58, 80, 48, 104, 40, 70, 87, 68, 76, 80, 63, 70, 99, 51, 73, 58, 63, 60, 52, 70, 80, 79, 63, 62, 57, 79, 47, 73, 59, 91, 55, 71, 67, 46, 68, 58, 60, 71, 64, 65, 58, 90, 64, 58, 70, 63, 59, 79, 130, 65, 50, 56, 57, 55, 79, 42, 73, 57, 41, 45, 51, 79, 59, 84, 56, 53, 59, 68, 52, 44, 60, 60, 77, 58, 70, 75, 59, 88, 43, 79, 78, 58, 61, 86, 43, 65, 66, 77, 121, 69, 55, 45, 62, 60, 61, 78, 49, 73, 81, 69, 56, 97, 71, 36, 67, 84, 48, 124, 70, 59, 85, 80, 62, 73, 70, 73, 58, 84, 58, 67, 67, 44, 69, 67, 50, 72, 86, 94, 61, 73, 63, 64, 57, 89, 65, 58, 60, 63, 41, 64, 76, 48, 55, 63, 74, 51, 67, 53, 92, 57, 64, 73, 84, 66, 74, 64, 80, 62, 71, 65, 66, 87, 67, 70, 41, 61, 59, 60, 72, 71, 55, 63, 69, 67, 68, 46, 65, 73, 69, 54, 71, 51, 66, 95, 73, 72, 44, 52, 61, 61, 79, 78, 56, 73, 72, 89, 70, 61, 81, 64, 81, 46, 104, 59, 59, 55, 81, 64, 60, 60, 68, 64, 60, 105, 67, 59, 73, 69, 61, 72, 103, 61, 65, 56, 78, 79, 74, 45, 75, 76, 58, 81, 78, 47, 43, 68, 51, 73, 69, 73, 49, 55, 72, 68, 79, 79, 55, 92, 98, 75, 67, 79, 62, 71, 52, 44, 68, 67, 52, 60, 60, 61, 52, 58, 51, 55, 50, 60, 118, 63, 75, 90, 66, 59, 62, 65, 70, 75, 50, 92, 82, 83, 87, 61, 59, 68, 64, 65, 83, 52, 69, 87, 86, 43, 88, 76, 73, 77, 57, 73, 88, 65, 78, 54, 48, 95, 61, 73, 63, 52, 85, 82, 60, 42, 60, 77, 52, 67, 57, 85, 34, 39, 54, 65, 61, 53, 47, 68, 63, 46, 66, 42, 64, 56, 62, 80, 55, 50, 55, 53, 62, 64, 59, 46, 42, 83, 38, 80, 63, 42, 75, 62, 78, 87, 78, 74, 61, 59, 69, 61, 106, 43, 58, 53, 56, 83, 85, 59, 67, 68, 58, 45, 61, 48, 78, 57, 67, 64, 59, 54, 87, 70, 62, 62, 76, 64, 80, 68, 68, 51, 53, 89, 62, 80, 71, 38, 98, 90, 50, 57, 56, 58, 80, 68, 49, 47, 54, 80, 78, 59, 69, 75, 63, 64, 55, 55, 58, 101, 51, 55, 49, 69, 49, 58, 50, 76, 73, 70, 59, 82, 60, 66, 68, 63, 89, 60, 60, 47, 51, 75, 68, 84, 51, 72, 60, 82, 62, 84, 66, 53, 83, 72, 64, 65, 62, 54, 62, 61, 51, 45, 54, 50, 88, 42, 63, 64, 54, 53, 72, 71, 58, 83, 67, 55, 56, 64, 64, 79, 56, 83, 59, 59, 76, 40, 83, 58, 83, 63, 72, 61, 74, 53, 56, 108, 64, 70, 62, 74, 66, 91, 59, 72, 53, 69, 66, 67, 64, 46, 67, 71, 72, 61, 62, 64, 50, 70, 50, 74, 42, 58, 53, 87, 69, 57, 42, 68, 60, 68, 75, 84, 74, 72, 60, 40, 42, 63, 57, 56, 74, 55, 71, 49, 50, 47, 73, 43, 73, 54, 61, 59, 69, 74, 62, 91, 73, 83, 41, 65, 71, 59, 67, 97, 78, 66, 78, 66, 81, 67, 75, 51, 70, 76, 69, 70, 88, 60, 95, 66, 78, 62, 63, 71, 62, 87, 99, 84, 69, 69, 65, 49, 62, 40, 89, 67, 67, 64, 91, 75, 77, 86, 86, 78, 58, 105, 41, 91, 74, 71, 74, 51, 57, 84, 59, 71, 65, 65, 67, 44, 90, 154, 46, 60, 68, 62, 37, 86, 59, 80, 62, 74, 51, 55, 77, 61, 89, 59, 121, 101, 76, 42, 50, 43, 50, 66, 59, 57, 50, 90, 68, 59, 73, 55, 57, 94, 67, 82, 44, 78, 55, 66, 90, 66, 60, 66, 50, 62, 57, 65, 75, 82, 64, 71, 67, 72, 70, 57, 54, 84, 61, 60, 55, 59, 75, 53, 82, 84, 38, 90, 77, 84, 61, 58, 48, 60, 72, 65, 65, 59, 123, 56, 70, 60, 53, 65, 78, 62, 71, 55, 60, 58, 54, 49, 59, 56, 71, 71, 75, 67, 48, 44, 69, 74, 63, 49, 66, 69, 65, 73, 51, 56, 70, 50, 66, 59, 55, 83, 70, 64, 57, 63, 58, 58, 53, 57, 43, 90, 88, 80, 58, 64, 78, 63, 86, 67, 64, 56, 56, 64, 67, 48, 66, 88, 59, 73, 95, 59, 68, 51, 89, 63, 71, 57, 87, 71, 62, 71, 43, 65, 44, 51, 59, 78, 54, 65, 69, 54, 69, 55, 65, 55, 79, 51, 63, 75, 51, 62, 56, 53, 82, 55, 45, 54, 77, 73, 62, 59, 81, 53, 67, 49, 77, 58, 61, 64, 64, 66, 67, 64, 67, 56, 83, 78, 76, 46, 55, 86, 74, 78, 63, 49, 61, 77, 71, 52, 61, 60, 67, 60, 62, 57, 54, 56, 60, 57, 59, 54, 62, 65, 68, 55, 64, 99, 56, 62, 113, 68, 58, 52, 73, 47, 53, 48, 58, 59, 84, 88, 60, 46, 60, 95, 63, 76, 66, 51, 55, 49, 52, 65, 68, 63, 54, 69, 75, 68, 90, 81, 64, 54, 74, 58, 59, 70, 57, 68, 65, 54, 71, 68, 65, 76, 62, 64, 62, 60, 44, 56, 45, 64, 60, 88, 57, 64, 69, 70, 66, 57, 96, 56, 79, 116, 70, 57, 97, 61, 71, 58, 72, 59, 62, 64, 53, 66, 72, 66, 64, 60, 75, 62, 56, 64, 57, 64, 62, 64, 54, 72, 57, 70, 73, 66, 56, 71, 66, 51, 42, 86, 73, 64, 55, 60, 70, 46, 55, 45, 68, 45, 76, 74, 72, 76, 66, 67, 63, 75, 83, 64, 69, 68, 61, 91, 65, 73, 57, 65, 54, 77, 73, 81, 50, 53, 71, 59, 62, 66, 55, 131, 73, 60, 82, 61, 59, 48, 60, 63, 59, 63, 71, 61, 53, 71, 60, 62, 54, 64, 74, 59, 60, 58, 90, 58, 71, 72, 56, 70, 44, 101, 57, 100, 50, 66, 82, 72, 73, 49, 68, 55, 55, 70, 65, 54, 48, 65, 64, 58, 50, 62, 60, 100, 63, 63, 85, 48, 62, 59, 63, 51, 59, 56, 76, 68, 53, 75, 65, 64, 76, 71, 47, 52, 66, 61, 84, 77, 67, 75, 115, 60, 43, 88, 50, 64, 79, 98, 69, 70, 69, 67, 64, 64, 81, 71, 52, 85, 74, 61, 94, 96, 69, 73, 72, 78, 61, 49, 64, 67, 77, 68, 69, 56, 62, 38, 63, 54, 60, 62, 56, 55, 58, 76, 85, 60, 81, 72, 58, 49, 54, 73, 48, 39, 57, 68, 68, 60, 58, 86, 64, 58, 66, 49, 60, 64, 77, 63, 51, 55, 78, 74, 61, 57, 67, 44, 57, 61, 68, 49, 65, 58, 59, 70, 67, 87, 58, 84, 62, 62, 50, 59, 84, 68, 60, 72, 68, 75, 82, 86, 62, 57, 48, 61, 84, 50, 61, 58, 72, 84, 53, 88, 79, 58, 54, 60, 72, 66, 61, 68, 64, 82, 76, 41, 90, 98, 53, 57, 90, 66, 61, 61, 45, 67, 60, 66, 76, 56, 62, 70, 56, 51, 76, 61, 60, 65, 52, 66, 88, 67, 62, 50, 66, 65, 45, 64, 56, 38, 69, 51, 73, 58, 72, 72, 70, 52, 70, 58, 61, 62, 64, 57, 54, 68, 76, 61, 90, 48, 73, 64, 78, 54, 67, 85, 68, 49, 57, 57, 66, 58, 65, 54, 69, 64, 65, 68, 59, 134, 72, 72, 55, 63, 55, 72, 74, 88, 85, 63, 87, 59, 62, 60, 77, 70, 52, 67, 86, 50, 77, 79, 53, 72, 71, 62, 64, 64, 62, 70, 72, 68, 37, 49, 59, 60, 69, 72, 61, 81, 78, 47, 73, 46, 64, 59, 63, 99, 54, 55, 96, 55, 68, 72, 60, 67, 75, 50, 55, 55, 72, 68, 71, 48, 63, 85, 72, 66, 66, 64, 100, 39, 49, 71, 65, 61, 59, 71, 69, 87, 51, 95, 58, 57, 69, 42, 65, 60, 51, 66, 59, 98, 67, 54, 80, 54, 78, 68, 67, 89, 51, 63, 69, 59, 76, 45, 61, 69, 57, 59, 71, 63, 56, 60, 70, 58, 67, 79, 56, 50, 73, 81, 51, 70, 71, 67, 72, 76, 62, 90, 57, 47, 69, 72, 61, 65, 53, 53, 57, 49, 64, 69, 70, 67, 64, 56, 67, 46, 61, 66, 78, 81, 69, 64, 69, 63, 79, 55, 67, 38, 62, 53, 54, 79, 70, 40, 72, 69, 69, 60, 56, 80, 52, 84, 56, 79, 72, 61, 82, 60, 83, 70, 76, 85, 82, 59, 60, 72, 77, 65, 74, 73, 59, 67, 86, 65, 104, 76, 88, 56, 68, 73, 55, 74, 73, 54, 57, 56, 62, 67, 73, 96, 60, 66, 50, 93, 46, 81, 67, 90, 76, 61, 73, 67, 72, 66, 65, 71, 91, 63, 88, 72, 67, 64, 67, 74, 94, 74, 69, 59, 67, 57, 69, 74, 77, 80, 56, 81, 72, 51, 70, 67, 86, 67, 67, 87, 52, 70, 60, 55, 77, 67, 67, 62, 64, 74, 66, 76, 61, 63, 75, 60, 90, 92, 46, 63, 64, 59, 45, 62, 63, 86, 70, 72, 109, 68, 100, 63, 85, 75, 78, 77, 54, 76, 69, 57, 61, 67, 56, 77, 74, 58, 57, 65, 53, 59, 67, 61, 59, 58, 65, 72, 68, 57, 78, 70, 68, 59, 75, 51, 69, 36, 94, 51, 87, 87, 92, 46, 73, 66, 87, 56, 70, 55, 50, 67, 86, 63, 74, 63, 63, 59, 65, 67, 67, 58, 77, 67, 98, 63, 57, 68, 59, 55, 66, 64, 43, 66, 66, 54, 64, 62, 69, 72, 82, 68, 83, 81, 60, 56, 55, 64, 61, 79, 64, 60, 60, 72, 68, 53, 49, 65, 100, 54, 80, 103, 62, 58, 63, 66, 39, 70, 72, 66, 65, 54, 74, 56, 57, 43, 47, 62, 71, 53, 55, 48, 55, 71, 65, 42, 58, 80, 70, 61, 58, 65, 69, 58, 84, 60, 78, 56, 57, 68, 58, 70, 62, 76, 60, 60, 67, 79, 75, 78, 54, 72, 62, 88, 61, 64, 61, 61, 70, 68, 68, 50, 59, 67, 48, 67, 72, 80, 69, 55, 54, 80, 66, 53, 66, 69, 77, 63, 67, 65, 62, 88, 56, 70, 82, 72, 77, 62, 62, 72, 65, 64, 71, 72, 71, 69, 62, 51, 54, 59, 69, 71, 68, 55, 53, 67, 58, 54, 78, 65, 58, 87, 74, 75, 144, 60, 58, 66, 74, 68, 46, 86, 56, 58, 67, 56, 70, 44, 79, 49, 64, 73, 87, 79, 77, 63, 61, 68, 66, 60, 63, 53, 76, 69, 62, 69, 58, 67, 75, 112, 52, 82, 61, 53, 74, 49, 94, 92, 56, 52, 67, 57, 89, 76, 51, 80, 45, 50, 67, 56, 84, 54, 46, 70, 73, 84, 58, 59, 52, 66, 76, 66, 67, 80, 60, 100, 119, 72, 66, 89, 51, 57, 72, 62, 67, 76, 65, 36, 55, 46, 45, 65, 59, 86, 68, 63, 86, 60, 44, 59, 71, 69, 54, 83, 64, 55, 55, 59, 48, 74, 68, 182, 55, 45, 71, 64, 57, 65, 57, 56, 50, 60, 54, 72, 77, 51, 95, 87, 95, 60, 72, 53, 80, 71, 55, 57, 53, 59, 80, 73, 72, 58, 75, 75, 98, 66, 66, 55, 60, 65, 65, 60, 57, 74, 64, 55, 45, 44, 70, 69, 60, 57, 76, 43, 64, 70, 44, 66, 73, 58, 64, 75, 58, 50, 79, 56, 76, 51, 78, 85, 70, 68, 42, 58, 50, 58, 62, 89, 72, 79, 85, 47, 60, 79, 53, 107, 74, 70, 62, 46, 98, 58, 44, 50, 62, 60, 55, 68, 59, 70, 86, 49, 55, 41, 58, 63, 66, 51, 71, 54, 69, 69, 68, 67, 53, 75, 76, 53, 56, 77, 67, 60, 50, 82, 61, 52, 47, 73, 53, 49, 72, 57, 61, 79, 70, 63, 57, 68, 62, 72, 78, 65, 80, 64, 64, 77, 80, 76, 45, 66, 61, 61, 92, 87, 80, 59, 74, 73, 82, 96, 56, 63, 94, 85, 64, 95, 59, 70, 56, 51, 73, 60, 63, 76, 64, 76, 73, 50, 76, 72, 56, 60, 70, 80, 72, 49, 58, 69, 85, 72, 57, 70, 49, 92, 57, 76, 49, 45, 62, 61, 67, 62, 75, 67, 70, 53, 72, 65, 53, 63, 53, 76, 80, 48, 46, 72, 54, 63, 67, 62, 74, 68, 38, 65, 64, 70, 85, 64, 49, 69, 64, 68, 62, 64, 60, 53, 53, 45, 46, 77, 71, 83, 66, 84, 52, 78, 69, 62, 62, 53, 73, 77, 66, 55, 72, 57, 76, 77, 94, 64, 68, 67, 55, 41, 67, 56, 81, 62, 39, 55, 53, 49, 69, 42, 56, 45, 71, 57, 49, 50, 75, 62, 54, 63, 53, 49, 56, 97, 49, 73, 55, 76, 69, 46, 74, 61, 63, 85, 63, 68, 58, 123, 85, 59, 69, 62, 43, 84, 61, 40, 78, 61, 59, 74, 74, 49, 57, 95, 63, 55, 65, 80, 80, 69, 70, 52, 58, 45, 49, 78, 107, 83, 77, 65, 79, 51, 61, 53, 67, 69, 68, 49, 77, 55, 67, 58, 64, 66, 71, 56, 87, 72, 46, 59, 89, 62, 65, 80, 49, 57, 86, 45, 67, 59, 58, 80, 54, 76, 76, 52, 79, 60, 78, 71, 57, 70, 56, 83, 46, 56, 76, 57, 75, 51, 70, 61, 62, 81, 63, 59, 46, 96, 78, 78, 78, 42, 53, 91, 73, 66, 52, 46, 60, 59, 52, 56, 72, 94, 63, 65, 75, 47, 85, 67, 56, 58, 36, 55, 67, 71, 75, 65, 44, 66, 66, 63, 52, 65, 97, 49, 80, 61, 74, 62, 76, 60, 62, 73, 79, 55, 95, 81, 67, 54, 49, 50, 64, 87, 73, 83, 54, 56, 60, 69, 64, 73, 60, 58, 69, 55, 69, 61, 47, 58, 69, 89, 71, 53, 62, 75, 58, 66, 48, 62, 57, 51, 55, 73, 101, 75, 69, 46, 52, 65, 58, 57, 48, 61, 67, 47, 56, 77, 55, 68, 54, 55, 78, 61, 52, 58, 70, 93, 73, 66, 52, 61, 64, 93, 56, 80, 50, 66, 53, 74, 82, 59, 58, 46, 63, 73, 97, 78, 60, 54, 54, 68, 94, 43, 72, 65, 62, 79, 76, 81, 67, 66, 56, 57, 53, 65, 75, 96, 52, 81, 67, 81, 57, 58, 71, 76, 56, 64, 79, 65, 47, 68, 58, 78, 74, 58, 73, 83, 51, 75, 60, 55, 46, 49, 78, 59, 42, 50, 62, 62, 82, 65, 67, 46, 79, 73, 54, 47, 70, 60, 60, 53, 80, 55, 77, 51, 73, 74, 66, 68, 56, 45, 54, 59, 88, 54, 84, 59, 63, 78, 54, 69, 74, 72, 55, 55, 47, 102, 44, 68, 46, 72, 59, 80, 93, 67, 69, 61, 99, 105, 49, 57, 53, 43, 49, 57, 69, 80, 87, 64, 81, 95, 100, 61, 65, 50, 67, 86, 84, 88, 67, 71, 60, 79, 56, 69, 59, 64, 67, 84, 68, 58, 80, 69, 75, 63, 54, 59, 45, 47, 39, 61, 91, 85, 81, 72, 71, 62, 55, 52, 63, 92, 50, 62, 65, 63, 106, 68, 58, 43, 60, 69, 67, 66, 61, 69, 61, 70, 84, 101, 25, 53, 70, 86, 60, 54, 62, 89, 64, 40, 79, 62, 73, 48, 63, 65, 91, 75, 85, 50, 64, 56, 62, 70, 58, 85, 99, 106, 78, 72, 52, 45, 72, 72, 54, 63, 70, 50, 62, 57, 72, 54, 46, 55, 65, 78, 61, 57, 94, 93, 74, 51, 62, 56, 55, 61, 60, 49, 60, 53, 76, 62, 56, 63, 79, 63, 115, 67, 64, 55, 44, 96, 61, 78, 57, 54, 58, 70, 61, 63, 92, 88, 54, 72, 60, 67, 77, 73, 65, 66, 52, 54, 96, 58, 54, 75, 53, 91, 84, 90, 61, 106, 59, 75, 73, 49, 57, 60, 54, 77, 52, 83, 62, 77, 73, 63, 65, 53, 63, 55, 77, 47, 62, 40, 63, 48, 39, 72, 65, 59, 59, 50, 71, 50, 55, 60, 103, 74, 51, 85, 68, 41, 60, 74, 57, 54, 44, 78, 80, 65, 48, 73, 70, 63, 65, 43, 65, 36, 91, 46, 69, 67, 99, 67, 46, 56, 66, 67, 58, 49, 49, 82, 68, 66, 86, 34, 58, 58, 109, 80, 46, 58, 65, 57, 84, 70, 62, 87, 54, 80, 60, 60, 64, 79, 77, 49, 42, 93, 52, 66, 83, 62, 75, 70, 85, 72, 45, 61, 84, 69, 72, 66, 69, 64, 58, 56, 72, 42, 50, 96, 84, 44, 66, 65, 51, 80, 57, 84, 80, 69, 76, 82, 77, 50, 66, 56, 43, 60, 77, 71, 77, 110, 60, 82, 71, 62, 90, 53, 68, 69, 55, 60, 86, 104, 46, 56, 55, 64, 60, 64, 72, 46, 99, 64, 88, 45, 72, 74, 56, 51, 59, 83, 69, 62, 65, 56, 72, 87, 64, 74, 74, 50, 59, 71, 56, 47, 56, 84, 71, 69, 58, 79, 47, 81, 72, 69, 71, 94, 54, 80, 85, 75, 72, 53, 77, 77, 63, 67, 55, 73, 53, 69, 84, 59, 66, 84, 56, 48, 44, 50, 84, 55, 59, 55, 64, 89, 61, 57, 79, 67, 57, 62, 63, 81, 64, 72, 69, 54, 82, 67, 63, 62, 74, 80, 65, 61, 66, 51, 70, 67, 125, 82, 78, 53, 61, 64, 51, 68, 107, 66, 50, 58, 67, 48, 65, 72, 65, 52, 57, 52, 60, 64, 67, 67, 71, 70, 61, 55, 90, 67, 68, 57, 68, 57, 44, 70, 75, 111, 81, 62, 82, 60, 62, 78, 65, 62, 68, 67, 70, 59, 54, 52, 51, 59, 67, 64, 63, 64, 48, 57, 49, 60, 51, 90, 65, 75, 90, 49, 83, 58, 84, 71, 59, 66, 60, 57, 66, 63, 76, 45, 96, 61, 69, 55, 75, 63, 68, 47, 78, 61, 56, 77, 54, 63, 66, 65, 69, 61, 69, 72, 56, 50, 64, 62, 54, 63, 70, 56, 63, 53, 88, 66, 65, 70, 63, 62, 53, 70, 65, 79, 54, 79, 51, 65, 50, 72, 73, 65, 60, 102, 57, 53, 64, 53, 61, 58, 65, 93, 64, 55, 99, 56, 60, 73, 76, 55, 81, 72, 103, 58, 75, 60, 44, 90, 111, 78, 70, 66, 40, 51, 61, 66, 58, 65, 69, 68, 70, 53, 59, 70, 78, 66, 60, 45, 55, 64, 106, 68, 74, 75, 46, 59, 53, 57, 66, 90, 57, 45, 56, 61, 47, 65, 67, 84, 130, 56, 52, 60, 82, 62, 59, 60, 42, 59, 62, 59, 65, 43, 83, 82, 66, 54, 49, 53, 72, 60, 56, 70, 55, 75, 74, 57, 69, 58, 56, 84, 69, 54, 60, 81, 71, 77, 61, 70, 63, 62, 70, 45, 62, 74, 64, 56, 50, 41, 51, 55, 71, 53, 50, 64, 66, 103, 68, 49, 52, 62, 80, 71, 57, 59, 57, 67, 63, 65, 71, 94, 69, 78, 65, 54, 75, 67, 64, 103, 54, 63, 70, 67, 66, 64, 57, 62, 69, 74, 54, 56, 109, 52, 62, 55, 50, 54, 54, 68, 75, 68, 42, 59, 67, 71, 69, 68, 63, 62, 65, 68, 65, 61, 65, 104, 67, 56, 71, 88, 60, 62, 59, 104, 72, 62, 64, 74, 101, 57, 51, 70, 66, 81, 56, 56, 66, 50, 73, 57, 68, 62, 66, 57, 62, 59, 58, 64, 66, 55, 52, 62, 100, 65, 58, 53, 50, 68, 61, 107, 59, 53, 54, 60, 62, 51, 94, 50, 70, 54, 72, 58, 61, 66, 61, 97, 66, 73, 64, 63, 69, 58, 89, 116, 72, 60, 65, 53, 74, 76, 78, 87, 77, 69, 58, 75, 58, 58, 62, 53, 58, 51, 77, 69, 67, 57, 79, 76, 47, 53, 58, 76, 54, 56, 85, 56, 63, 75, 66, 62, 55, 55, 73, 44, 64, 93, 53, 65, 75, 58, 66, 59, 48, 52, 62, 60, 71, 66, 70, 94, 50, 73, 53, 80, 94, 57, 70, 63, 76, 67, 63, 62, 72, 68, 69, 55, 121, 71, 67, 65, 57, 54, 61, 83, 70, 44, 78, 118, 67, 74, 68, 69, 62, 72, 48, 83, 88, 89, 78, 34, 60, 65, 63, 61, 69, 65, 63, 63, 52, 115, 51, 55, 53, 48, 68, 53, 74, 74, 79, 55, 56, 50, 62, 50, 90, 88, 79, 92, 67, 72, 77, 56, 82, 107, 67, 115, 77, 66, 81, 72, 50, 71, 71, 79, 66, 78, 72, 59, 70, 62, 62, 65, 102, 55, 75, 71, 58, 58, 55, 80, 88, 64, 132, 55, 58, 117, 68, 59, 67, 56, 43, 51, 64, 62, 53, 73, 80, 67, 58, 65, 79, 84, 52, 62, 55, 76, 89, 79, 68, 53, 88, 55, 61, 54, 63, 46, 64, 59, 67, 62, 73, 64, 70, 55, 64, 57, 60, 74, 41, 62, 61, 68, 58, 60, 61, 66, 50, 58, 71, 48, 58, 48, 95, 67, 58, 68, 67, 67, 58, 72, 55, 52, 69, 54, 55, 63, 67, 63, 70, 76, 76, 57, 60, 69, 78, 48, 74, 59, 55, 89, 56, 61, 63, 75, 74, 76, 65, 87, 63, 61, 61, 64, 49, 52, 65, 74, 56, 63, 49, 64, 81, 71, 98, 62, 71, 60, 46, 53, 71, 57, 59, 61, 96, 70, 47, 53, 82, 65, 92, 69, 59, 94, 47, 68, 68, 61, 59, 56, 52, 48, 59, 61, 55, 57, 64, 57, 94, 53, 64, 72, 44, 63, 54, 69, 50, 57, 52, 63, 79, 110, 62, 62, 36, 56, 54, 61, 88, 71, 78, 54, 66, 56, 48, 51, 76, 53, 54, 56, 66, 48, 62, 78, 63, 60, 66, 62, 65, 64, 70, 68, 62, 56, 56, 64, 70, 63, 72, 59, 59, 65, 81, 67, 94, 65, 48, 59, 74, 54, 72, 69, 61, 64, 68, 69, 78, 57, 56, 77, 55, 62, 59, 58, 75, 62, 79, 84, 56, 68, 63, 74, 56, 56, 65, 69, 53, 56, 74, 62, 68, 55, 59, 64, 55, 48, 56, 71, 93, 50, 60, 62, 52, 54, 48, 52, 59, 49, 99, 60, 58, 59, 68, 55, 61, 70, 60, 57, 62, 84, 63, 48, 75, 66, 58, 91, 79, 48, 70, 71, 51, 65, 62, 70, 84, 63, 46, 52, 65, 47, 54, 60, 123, 64, 79, 88, 72, 60, 67, 54, 55, 62, 53, 59, 66, 81, 77, 73, 45, 114, 54, 74, 46, 55, 55, 62, 49, 65, 74, 65, 53, 52, 75, 49, 62, 64, 62, 84, 69, 59, 67, 58, 71, 63, 59, 47, 62, 68, 49, 62, 59, 63, 64, 78, 52, 63, 71, 103, 126, 94, 73, 63, 54, 67, 87, 79, 94, 70, 66, 47, 51, 70, 62, 64, 56, 59, 77, 75, 71, 55, 71, 59, 94, 71, 68, 79, 77, 80, 54, 52, 73, 73, 68, 50, 62, 62, 51, 77, 108, 70, 52, 75, 56, 93, 56, 64, 60, 57, 75, 54, 68, 64, 68, 64, 82, 77, 57, 77, 90, 60, 61, 73, 58, 64, 73, 50, 55, 63, 56, 57, 42, 64, 70, 76, 55, 72, 80, 80, 56, 76, 73, 75, 48, 95, 61, 50, 82, 78, 54, 49, 61, 118, 109, 73, 65, 66, 67, 51, 76, 58, 59, 59, 71, 64, 67, 46, 51, 67, 59, 57, 77, 66, 78, 53, 61, 67, 110, 58, 89, 53, 93, 71, 89, 81, 51, 64, 68, 62, 58, 74, 56, 50, 62, 65, 91, 76, 43, 56, 75, 101, 81, 61, 67, 73, 45, 54, 68, 87, 83, 80, 40, 91, 56, 71, 56, 51, 118, 60, 94, 70, 58, 56, 58, 52, 65, 78, 68, 61, 65, 72, 52, 53, 94, 50, 60, 62, 81, 77, 73, 51, 51, 48, 54, 70, 41, 53, 85, 61, 78, 103, 77, 65, 79, 52, 73, 62, 66, 72, 54, 63, 68, 64, 35, 84, 60, 94, 57, 61, 72, 59, 75, 53, 101, 95, 64, 75, 66, 66, 71, 47, 57, 102, 57, 58, 101, 68, 58, 43, 55, 92, 102, 54, 84, 56, 81, 77, 91, 54, 61, 54, 36, 65, 68, 66, 63, 73, 69, 59, 53, 64, 53, 67, 105, 85, 49, 88, 82, 78, 66, 54, 66, 47, 55, 33, 73, 71, 54, 74, 61, 51, 64, 52, 58, 81, 76, 42, 59, 61, 62, 60, 45, 92, 57, 72, 43, 103, 52, 48, 72, 61, 55, 79, 71, 62, 67, 82, 60, 81, 47, 59, 60, 57, 82, 55, 70, 60, 87, 59, 57, 76, 58, 71, 57, 92, 66, 60, 54, 50, 66, 71, 44, 54, 68, 52, 59, 81, 68, 61, 70, 59, 48, 46, 81, 55, 53, 90, 72, 48, 78, 73, 90, 53, 84, 60, 58, 57, 55, 64, 63, 64, 67, 76, 67, 64, 59, 66, 53, 72, 69, 64, 66, 84, 62, 67, 64, 54, 64, 68, 62, 58, 59, 60, 56, 90, 76, 65, 66, 56, 78, 72, 83, 58, 64, 68, 60, 60, 58, 68, 85, 61, 93, 143, 61, 68, 60, 49, 70, 91, 79, 49, 65, 66, 57, 67, 50, 41, 63, 52, 49, 43, 62, 70, 52, 37, 57, 57, 65, 54, 58, 64, 73, 61, 85, 92, 55, 77, 72, 66, 56, 88, 52, 58, 86, 44, 52, 41, 67, 68, 66, 50, 66, 73, 71, 59, 79, 56, 77, 65, 68, 59, 63, 113, 76, 84, 68, 72, 68, 57, 58, 56, 60, 68, 69, 71, 68, 66, 60, 56, 74, 49, 59, 58, 78, 53, 66, 72, 65, 57, 55, 61, 61, 63, 67, 54, 47, 48, 64, 54, 56, 64, 87, 70, 56, 84, 61, 72, 68, 60, 60, 54, 63, 60, 53, 56, 64, 68, 66, 51, 56, 56, 77, 46, 56, 42, 111, 87, 79, 71, 64, 52, 65, 93, 53, 66, 65, 60, 58, 90, 80, 66, 62, 33, 58, 89, 72, 63, 78, 87, 75, 54, 79, 90, 65, 99, 57, 61, 62, 52, 72, 58, 81, 67, 79, 90, 68, 82, 59, 74, 72, 56, 64, 90, 79, 63, 63, 62, 46, 61, 65, 69, 78, 69, 69, 59, 49, 74, 58, 61, 55, 70, 53, 57, 42, 61, 57, 72, 62, 63, 54, 40, 53, 75, 42, 55, 64, 48, 69, 56, 68, 68, 52, 74, 54, 51, 68, 98, 55, 66, 68, 70, 62, 72, 56, 74, 62, 85, 47, 58, 47, 41, 66, 63, 81, 74, 83, 64, 58, 67, 62, 68, 58, 46, 51, 66, 70, 72, 102, 64, 65, 71, 57, 74, 111, 60, 61, 57, 53, 70, 51, 74, 87, 37, 46, 49, 77, 60, 62, 58, 70, 80, 62, 88, 71, 84, 53, 53, 56, 60, 77, 56, 58, 60, 74, 48, 76, 57, 74, 43, 53, 65, 70, 78, 46, 97, 77, 87, 78, 47, 73, 57, 64, 74, 74, 68, 64, 50, 86, 72, 92, 53, 80, 65, 74, 51, 66, 73, 67, 69, 53, 67, 71, 48, 50, 49, 66, 50, 50, 58, 55, 64, 57, 56, 32, 65, 50, 58, 77, 75, 54, 43, 50, 59, 73, 60, 73, 64, 50, 64, 73, 95, 82, 73, 49, 65, 70, 68, 103, 40, 64, 63, 78, 69, 57, 63, 76, 75, 75, 57, 74, 76, 56, 74, 57, 77, 66, 61, 67, 67, 64, 65, 35, 75, 56, 55, 67, 70, 66, 52, 74, 81, 63, 75, 85, 53, 53, 97, 59, 68, 88, 47, 95, 64, 54, 60, 98, 109, 58, 78, 76, 53, 66, 62, 56, 95, 53, 75, 68, 84, 51, 34, 74, 61, 70, 60, 86, 54, 67, 65, 90, 77, 48, 57, 47, 71, 44, 63, 86, 79, 54, 75, 56, 55, 51, 53, 78, 67, 50, 67, 46, 71, 61, 68, 79, 70, 75, 77, 63, 54, 101, 61, 53, 64, 65, 62, 80, 60, 48, 65, 66, 70, 66, 55, 64, 58, 39, 58, 73, 62, 62, 62, 82, 62, 61, 67, 74, 54, 56, 60, 86, 63, 86, 71, 67, 82, 51, 73, 147, 103, 74, 64, 63, 64, 49, 73, 58, 48, 61, 81, 60, 101, 54, 74, 61, 82, 55, 54, 61, 45, 72, 83, 71, 58, 52, 55, 87, 66, 79, 80, 62, 62, 50, 73, 70, 60, 44, 50, 70, 67, 67, 79, 80, 58, 73, 62, 71, 66, 84, 89, 86, 94, 75, 49, 76, 46, 49, 75, 57, 46, 79, 78, 89, 65, 69, 59, 72, 107, 61, 63, 74, 55, 59, 67, 71, 94, 68, 61, 54, 57, 55, 52, 81, 57, 66, 92, 70, 59, 55, 63, 50, 45, 70, 70, 81, 81, 65, 75, 72, 82, 77, 86, 45, 65, 60, 62, 71, 61, 83, 78, 78, 63, 76, 46, 110, 55, 64, 52, 80, 65, 58, 81, 58, 62, 82, 129, 64, 71, 95, 62, 81, 60, 86, 77, 51, 65, 58, 48, 81, 60, 57, 53, 67, 78, 46, 73, 105, 52, 90, 83, 67, 66, 82, 61, 52, 61, 69, 49, 68, 57, 47, 61, 70, 59, 73, 81, 62, 94, 81, 65, 116, 69, 86, 52, 48, 68, 66, 68, 51, 65, 74, 89, 47, 71, 65, 64, 62, 93, 61, 68, 54, 40, 45, 69, 48, 67, 65, 79, 54, 63, 66, 65, 73, 55, 57, 76, 58, 67, 53, 54, 63, 59, 106, 61, 52, 65, 59, 71, 57, 76, 72, 67, 80, 59, 57, 58, 49, 77, 61, 73, 64, 69, 43, 54, 76, 79, 55, 55, 80, 78, 65, 64, 62, 76, 45, 74, 60, 60, 60, 64, 83, 66, 68, 76, 69, 56, 59, 100, 59, 65, 71, 72, 83, 71, 58, 61, 70, 51, 56, 60, 82, 86, 63, 50, 72, 53, 57, 72, 65, 68, 67, 54, 59, 60, 56, 58, 72, 88, 62, 62, 55, 53, 72, 72, 112, 54, 65, 69, 56, 57, 68, 89, 48, 70, 59, 58, 53, 85, 64, 50, 55, 65, 66, 54, 73, 58, 73, 96, 70, 59, 52, 69, 73, 63, 77, 69, 43, 131, 67, 92, 59, 56, 84, 58, 65, 91, 48, 59, 42, 53, 54, 52, 58, 59, 44, 60, 68, 64, 67, 94, 59, 65, 50, 61, 77, 61, 65, 65, 62, 62, 84, 67, 62, 66, 55, 60, 63, 44, 64, 72, 50, 70, 69, 52, 53, 67, 53, 71, 58, 55, 72, 58, 62, 84, 52, 59, 64, 54, 63, 53, 55, 56, 35, 53, 58, 62, 76, 70, 81, 61, 64, 88, 85, 56, 60, 58, 64, 55, 58, 64, 80, 56, 57, 54, 67, 67, 79, 68, 69, 58, 65, 68, 59, 64, 53, 64, 47, 73, 68, 91, 76, 78, 57, 74, 51, 57, 72, 50, 90, 76, 79, 64, 55, 68, 52, 81, 64, 73, 64, 61, 64, 69, 109, 143, 79, 80, 79, 54, 61, 44, 62, 87, 65, 70, 72, 69, 74, 78, 50, 61, 63, 66, 57, 72, 57, 62, 63, 69, 49, 68, 66, 81, 59, 69, 71, 71, 81, 68, 52, 87, 71, 58, 60, 60, 75, 64, 62, 59, 64, 78, 53, 62, 72, 48, 61, 55, 48, 50, 77, 61, 61, 59, 43, 61, 72, 77, 53, 77, 68, 58, 54, 52, 56, 56, 59, 55, 58, 61, 75, 63, 61, 89, 80, 71, 69, 72, 55, 59, 74, 78, 64, 69, 81, 65, 68, 61, 54, 79, 54, 55, 89, 66, 67, 59, 64, 56, 65, 72, 67, 55, 59, 70, 65, 61, 59, 64, 61, 65, 64, 71, 56, 65, 54, 71, 63, 62, 56, 64, 62, 69, 73, 64, 68, 69, 83, 61, 74, 64, 56, 57, 70, 55, 67, 52, 57, 60, 63, 45, 71, 99, 52, 62, 59, 66, 98, 61, 66, 48, 71, 71, 65, 73, 62, 55, 51, 85, 74, 61, 50, 44, 56, 64, 65, 61, 50, 71, 68, 64, 92, 61, 60, 65, 45, 63, 62, 64, 54, 68, 74, 66, 81, 58, 72, 69, 69, 93, 45, 74, 61, 73, 49, 76, 54, 52, 74, 55, 72, 61, 66, 62, 61, 59, 54, 66, 50, 70, 63, 47, 44, 76, 50, 59, 61, 58, 78, 59, 54, 74, 65, 62, 67, 58, 68, 87, 78, 68, 69, 58, 81, 69, 62, 61, 62, 65, 61, 62, 61, 68, 71, 66, 58, 110, 62, 58, 63, 51, 48, 59, 47, 52, 69, 62, 57, 91, 61, 89, 63, 41, 56, 64, 64, 68, 74, 63, 64, 53, 103, 75, 63, 57, 77, 62, 60, 58, 87, 63, 93, 62, 119, 57, 70, 63, 71, 71, 105, 68, 77, 86, 63, 68, 78, 52, 68, 64, 58, 67, 67, 73, 76, 78, 85, 49, 86, 69, 65, 52, 64, 65, 61, 46, 60, 74, 66, 40, 61, 62, 58, 56, 54, 55, 62, 70, 76, 61, 76, 79, 54, 74, 61, 67, 68, 76, 55, 55, 66, 50, 58, 60, 53, 69, 59, 62, 57, 64, 63, 84, 63, 55, 57, 70, 63, 68, 58, 68, 66, 69, 77, 65, 65, 66, 62, 65, 75, 52, 66, 57, 66, 63, 63, 58, 61, 73, 64, 74, 54, 48, 54, 66, 73, 58, 56, 61, 51, 84, 63, 68, 104, 64, 55, 60, 60, 70, 59, 90, 68, 57, 63, 49, 74, 68, 49, 46, 65, 60, 69, 80, 56, 53, 72, 70, 61, 71, 56, 56, 47, 63, 54, 55, 58, 63, 53, 71, 55, 50, 60, 62, 65, 59, 51, 106, 51, 74, 83, 62, 75, 72, 64, 97, 57, 78, 52, 73, 62, 64, 65, 72, 53, 67, 95, 58, 58, 65, 64, 57, 50, 78, 52, 62, 82, 70, 70, 53, 57, 60, 63, 75, 66, 71, 49, 68, 72, 57, 61, 80, 72, 65, 56, 73, 53, 66, 67, 61, 69, 71, 94, 55, 70, 64, 68, 55, 62, 56, 61, 70, 54, 69, 59, 74, 64, 60, 69, 68, 70, 67, 102, 56, 63, 76, 60, 60, 61, 53, 58, 66, 62, 89, 85, 83, 70, 72, 104, 64, 72, 69, 54, 51, 75, 72, 59, 67, 55, 66, 63, 65, 81, 62, 71, 62, 61, 60, 81, 59, 61, 148, 69, 57, 72, 70, 54, 88, 91, 61, 69, 56, 73, 85, 60, 56, 49, 60, 76, 43, 64, 53, 57, 82, 59, 60, 61, 53, 71, 72, 59, 58, 71, 64, 51, 60, 56, 61, 97, 56, 61, 53, 69, 62, 72, 73, 70, 56, 55, 75, 52, 77, 58, 76, 46, 56, 74, 68, 96, 73, 81, 62, 74, 55, 69, 82, 65, 61, 75, 46, 57, 71, 76, 67, 67, 67, 75, 69, 75, 60, 67, 62, 65, 66, 62, 98, 48, 87, 74, 80, 66, 56, 51, 58, 75, 62, 58, 58, 81, 63, 67, 78, 66, 54, 65, 56, 70, 66, 99, 59, 60, 73, 65, 95, 51, 48, 66, 85, 76, 68, 66, 62, 60, 71, 54, 57, 65, 81, 59, 58, 59, 66, 70, 66, 56, 97, 66, 75, 127, 43, 62, 71, 71, 49, 101, 49, 66, 71, 59, 60, 72, 62, 52, 90, 69, 54, 58, 62, 68, 50, 71, 85, 51, 84, 61, 68, 44, 69, 59, 49, 53, 64, 47, 73, 111, 61, 64, 61, 49, 63, 67, 63, 70, 59, 63, 73, 62, 64, 67, 56, 49, 70, 97, 109, 74, 55, 61, 127, 87, 64, 56, 64, 64, 61, 70, 72, 54, 83, 73, 55, 64, 54, 60, 75, 64, 64, 50, 57, 76, 66, 58, 50, 66, 47, 64, 68, 49, 52, 52, 86, 50, 65, 58, 72, 67, 83, 58, 81, 77, 59, 71, 62, 57, 59, 85, 64, 51, 77, 54, 59, 51, 63, 69, 74, 78, 61, 81, 59, 87, 71, 94, 72, 84, 72, 59, 65, 60, 69, 62, 85, 66, 56, 55, 75, 60, 76, 71, 58, 87, 87, 52, 60, 121, 52, 84, 119, 54, 62, 41, 53, 52, 74, 55, 74, 44, 64, 63, 58, 84, 60, 48, 73, 63, 71, 70, 64, 57, 69, 62, 62, 58, 82, 47, 49, 90, 53, 70, 79, 62, 70, 94, 49, 62, 71, 62, 45, 63, 52, 55, 53, 75, 83, 75, 43, 69, 59, 83, 72, 56, 57, 64, 45, 60, 51, 90, 57, 56, 43, 59, 108, 56, 56, 69, 67, 45, 78, 60, 74, 63, 63, 63, 61, 82, 62, 50, 68, 49, 45, 43, 55, 75, 60, 53, 71, 60, 53, 69, 54, 52, 41, 71, 57, 53, 65, 100, 82, 105, 48, 62, 53, 94, 85, 64, 53, 73, 98, 63, 64, 53, 66, 56, 91, 65, 66, 57, 56, 65, 55, 80, 65, 58, 63, 47, 58, 39, 77, 78, 60, 53, 73, 88, 65, 50, 52, 72, 75, 64, 63, 70, 68, 62, 65, 56, 60, 75, 74, 70, 69, 71, 47, 60, 53, 69, 61, 77, 87, 101, 63, 55, 102, 74, 69, 53, 54, 57, 53, 48, 45, 59, 90, 71, 51, 75, 60, 59, 71, 81, 47, 65, 81, 69, 66, 59, 53, 30, 61, 55, 74, 54, 41, 44, 37, 40, 77, 70, 78, 63, 50, 66, 91, 53, 80, 63, 66, 68, 52, 73, 60, 60, 52, 57, 70, 60, 86, 74, 52, 73, 68, 70, 74, 63, 37, 66, 60, 64, 68, 67, 82, 66, 75, 53, 57, 48, 49, 86, 50, 50, 49, 63, 73, 69, 70, 47, 39, 65, 52, 88, 82, 68, 58, 62, 74, 64, 97, 65, 64, 54, 93, 56, 71, 82, 71, 70, 75, 80, 71, 62, 70, 57, 41, 60, 43, 79, 54, 70, 71, 74, 45, 66, 60, 118, 89, 49, 79, 75, 88, 67, 65, 73, 55, 60, 49, 94, 65, 126, 62, 62, 55, 137, 59, 55, 83, 78, 71, 55, 56, 64, 68, 80, 86, 65, 55, 66, 66, 55, 161, 49, 49, 45, 70, 107, 77, 64, 76, 54, 62, 70, 69, 48, 87, 48, 57, 44, 122, 48, 62, 108, 50, 88, 53, 51, 59, 61, 60, 109, 55, 89, 61, 56, 92, 75, 58, 53, 51, 47, 72, 60, 88, 49, 76, 69, 52, 45, 49, 67, 64, 50, 50, 93, 54, 57, 65, 71, 74, 73, 63, 54, 70, 47, 37, 51, 89, 68, 52, 53, 58, 62, 58, 78, 78, 68, 51, 88, 78, 75, 87, 84, 71, 54, 79, 84, 62, 64, 74, 73, 60, 91, 64, 70, 70, 59, 38, 79, 75, 65, 61, 57, 51, 70, 79, 62, 55, 56, 62, 57, 51, 57, 52, 56, 75, 61, 74, 65, 63, 66, 80, 47, 77, 71, 45, 68, 61, 55, 62, 73, 62, 77, 55, 67, 50, 62, 71, 55, 66, 72, 68, 76, 52, 58, 63, 74, 56, 65, 72, 62, 46, 78, 61, 60, 52, 58, 56, 79, 64, 49, 66, 57, 75, 47, 70, 77, 47, 73, 61, 73, 61, 75, 80, 77, 55, 62, 74, 56, 62, 58, 69, 51, 42, 64, 55, 54, 49, 70, 56, 69, 79, 57, 77, 71, 44, 76, 69, 68, 66, 69, 74, 60, 83, 89, 56, 75, 70, 93, 65, 77, 73, 57, 63, 68, 110, 58, 74, 71, 67, 67, 51, 63, 65, 43, 78, 57, 68, 82, 47, 62, 46, 61, 80, 50, 53, 80, 70, 101, 66, 130, 59, 56, 54, 50, 70, 90, 81, 59, 61, 74, 31, 87, 58, 76, 59, 53, 74, 59, 60, 112, 73, 42, 57, 62, 68, 63, 56, 76, 84, 59, 77, 65, 67, 65, 70, 55, 76, 62, 43, 45, 91, 74, 69, 72, 67, 30, 67, 90, 57, 74, 56, 59, 72, 69, 55, 49, 70, 69, 65, 77, 67, 49, 83, 79, 114, 77, 39, 38, 79, 69, 47, 62, 57, 66, 90, 64, 87, 63, 59, 66, 53, 52, 50, 74, 53, 65, 54, 77, 59, 72, 48, 62, 66, 53, 75, 70, 55, 74, 86, 63, 51, 79, 66, 76, 44, 69, 67, 59, 89, 59, 48, 59, 50, 74, 55, 57, 76, 47, 64, 66, 63, 53, 75, 68, 71, 69, 71, 65, 66, 89, 70, 58, 61, 74, 50, 56, 98, 66, 63, 85, 75, 67, 70, 65, 72, 73, 66, 70, 50, 49, 62, 61, 73, 72, 74, 106, 69, 64, 52, 87, 71, 56, 62, 71, 80, 52, 67, 43, 42, 58, 69, 66, 60, 77, 77, 70, 77, 37, 44, 77, 63, 79, 68, 67, 55, 46, 77, 50, 45, 54, 68, 71, 70, 68, 81, 73, 53, 57, 59, 86, 62, 62, 62, 42, 63, 55, 58, 69, 84, 76, 62, 70, 65, 75, 57, 59, 59, 62, 57, 81, 69, 73, 58, 66, 58, 53, 53, 74, 95, 88, 85, 76, 64, 66, 58, 58, 68, 58, 68, 55, 52, 49, 68, 85, 80, 80, 52, 68, 77, 48, 47, 44, 77, 65, 67, 51, 44, 108, 61, 57, 80, 52, 68, 78, 54, 67, 63, 62, 44, 73, 51, 62, 72, 67, 47, 56, 77, 57, 62, 58, 141, 86, 118, 55, 63, 86, 70, 60, 52, 63, 63, 72, 55, 73, 85, 47, 64, 94, 67, 49, 59, 57, 60, 56, 76, 66, 54, 69, 64, 63, 28, 66, 54, 65, 62, 51, 60, 52, 53, 70, 78, 78, 62, 84, 67, 76, 66, 67, 84, 62, 65, 86, 58, 73, 46, 57, 73, 78, 74, 78, 52, 67, 97, 76, 70, 64, 83, 59, 91, 77, 69, 56, 65, 55, 51, 77, 75, 82, 63, 62, 54, 49, 57, 79, 80, 58, 65, 93, 132, 65, 61, 68, 71, 162, 59, 68, 91, 57, 41, 71, 66, 79, 85, 59, 91, 79, 74, 53, 78, 77, 79, 55, 58, 71, 57, 75, 70, 62, 70, 74, 49, 46, 49, 63, 54, 98, 45, 62, 47, 44, 80, 63, 67, 49, 45, 80, 72, 74, 58, 54, 55, 66, 72, 55, 73, 55, 68, 48, 55, 60, 90, 51, 53, 59, 90, 71, 76, 59, 54, 70, 72, 74, 57, 57, 70, 58, 73, 67, 46, 64, 88, 59, 53, 79, 79, 48, 70, 70, 50, 58, 70, 50, 60, 42, 76, 63, 58, 56, 70, 80, 63, 50, 103, 59, 53, 50, 52, 65, 62, 63, 49, 82, 74, 69, 100, 60, 66, 68, 61, 89, 51, 63, 60, 59, 69, 69, 63, 74, 61, 81, 81, 59, 55, 65, 55, 66, 48, 86, 118, 79, 65, 44, 71, 46, 69, 75, 45, 61, 62, 53, 58, 67, 53, 72, 59, 58, 51, 66, 74, 77, 89, 62, 62, 95, 59, 63, 82, 72, 53, 69, 70, 73, 53, 64, 56, 64, 69, 93, 66, 55, 88, 97, 74, 54, 56, 63, 65, 120, 76, 71, 66, 96, 59, 53, 73, 67, 54, 49, 93, 61, 53, 98, 75, 54, 66, 56, 69, 76, 79, 77, 91, 65, 56, 57, 68, 129, 73, 56, 65, 58, 67, 72, 80, 80, 50, 55, 55, 54, 58, 50, 65, 65, 61, 68, 57, 53, 39, 66, 53, 75, 48, 59, 66, 117, 63, 51, 75, 74, 73, 58, 70, 70, 52, 42, 87, 68, 57, 80, 59, 59, 71, 58, 60, 74, 66, 62, 57, 66, 54, 78, 58, 60, 57, 54, 64, 81, 65, 56, 71, 63, 68, 113, 78, 68, 70, 61, 51, 69, 52, 48, 58, 68, 39, 73, 56, 88, 54, 101, 46, 63, 75, 55, 60, 69, 73, 58, 58, 63, 72, 54, 61, 54, 55, 68, 63, 58, 69, 54, 58, 58, 57, 62, 59, 70, 65, 83, 44, 68, 61, 43, 53, 61, 82, 77, 63, 57, 78, 55, 63, 105, 103, 68, 59, 60, 45, 90, 83, 75, 78, 54, 51, 96, 70, 82, 63, 83, 62, 45, 60, 66, 48, 75, 71, 76, 58, 61, 90, 67, 60, 53, 67, 63, 47, 59, 71, 76, 60, 106, 66, 51, 120, 79, 55, 52, 42, 43, 92, 94, 59, 77, 60, 49, 85, 49, 88, 69, 89, 117, 59, 67, 56, 111, 66, 54, 68, 59, 68, 67, 75, 64, 41, 49, 49, 76, 65, 67, 82, 59, 60, 74, 71, 54, 68, 65, 69, 163, 66, 60, 50, 64, 53, 53, 91, 43, 64, 65, 48, 73, 59, 102, 69, 48, 77, 99, 64, 53, 68, 62, 73, 53, 51, 54, 52, 46, 54, 76, 77, 77, 57, 88, 47, 75, 45, 81, 59, 59, 73, 36, 66, 55, 65, 81, 90, 73, 57, 54, 60, 90, 70, 89, 54, 61, 66, 49, 47, 69, 71, 63, 49, 56, 52, 69, 46, 42, 66, 63, 75, 58, 49, 87, 90, 89, 52, 38, 74, 49, 63, 63, 88, 60, 101, 68, 52, 64, 82, 68, 70, 59, 80, 111, 50, 58, 56, 76, 95, 54, 75, 59, 56, 91, 59, 78, 85, 38, 69, 69, 70, 54, 57, 75, 61, 44, 79, 101, 46, 91, 51, 52, 58, 60, 126, 47, 45, 65, 47, 57, 67, 46, 50, 57, 63, 47, 55, 109, 51, 65, 57, 83, 55, 76, 70, 119, 73, 64, 59, 65, 56, 69, 85, 77, 92, 88, 70, 57, 48, 90, 65, 71, 87, 52, 82, 68, 83, 44, 56, 64, 52, 60, 41, 76, 91, 66, 47, 41, 73, 59, 73, 49, 59, 61, 66, 52, 49, 64, 53, 61, 50, 59, 75, 70, 45, 89, 54, 52, 68, 56, 66, 83, 47, 77, 44, 61, 85, 60, 56, 40, 66, 61, 68, 50, 51, 38, 69, 51, 79, 66, 50, 78, 100, 68, 58, 52, 62, 90, 67, 60, 94, 53, 77, 67, 59, 76, 79, 55, 67, 34, 67, 69, 56, 63, 78, 90, 81, 47, 61, 56, 67, 82, 69, 78, 50, 64, 49, 51, 64, 69, 76, 55, 54, 53, 75, 83, 65, 47, 95, 78, 80, 52, 56, 81, 86, 53, 60, 54, 59, 71, 65, 57, 60, 61, 76, 66, 68, 76, 64, 59, 55, 82, 56, 70, 62, 89, 61, 55, 73, 48, 67, 61, 62, 100, 65, 50, 62, 66, 60, 58, 73, 49, 88, 72, 65, 58, 53, 57, 55, 59, 50, 69, 53, 58, 64, 71, 69, 58, 43, 88, 56, 90, 66, 82, 69, 49, 87, 55, 69, 69, 56, 57, 53, 60, 65, 69, 69, 62, 62, 59, 63, 58, 58, 58, 65, 92, 82, 58, 52, 50, 66, 59, 72, 64, 71, 58, 32, 56, 56, 52, 60, 49, 72, 66, 69, 70, 68, 67, 50, 50, 81, 64, 52, 42, 64, 72, 99, 75, 69, 77, 62, 62, 88, 43, 64, 137, 93, 69, 120, 67, 73, 66, 39, 125, 69, 79, 65, 94, 92, 48, 56, 53, 77, 47, 55, 56, 94, 75, 75, 92, 59, 61, 39, 36, 68, 70, 66, 97, 120, 64, 63, 61, 59, 56, 51, 88, 60, 54, 59, 127, 55, 72, 72, 55, 55, 68, 68, 45, 78, 97, 59, 44, 71, 57, 61, 53, 52, 63, 60, 68, 56, 58, 60, 53, 63, 61, 74, 60, 68, 82, 55, 54, 79, 50, 62, 63, 61, 100, 47, 59, 64, 66, 70, 58, 40, 68, 77, 62, 45, 68, 66, 71, 79, 66, 78, 62, 69, 51, 63, 81, 54, 95, 58, 101, 59, 52, 69, 55, 65, 65, 65, 56, 52, 86, 74, 63, 68, 71, 64, 60, 64, 98, 54, 54, 106, 88, 68, 63, 65, 47, 90, 50, 99, 65, 60, 69, 65, 49, 51, 70, 71, 64, 45, 57, 40, 87, 66, 69, 67, 74, 56, 47, 66, 68, 74, 61, 53, 48, 49, 42, 125, 64, 68, 77, 67, 86, 63, 78, 75, 56, 57, 60, 70, 63, 42, 37, 75, 58, 81, 58, 72, 55, 57, 47, 53, 82, 52, 47, 64, 72, 66, 64, 64, 101, 84, 50, 43, 73, 58, 57, 81, 46, 78, 68, 69, 60, 85, 75, 73, 57, 61, 46, 60, 47, 47, 81, 53, 61, 64, 66, 60, 62, 89, 62, 59, 77, 59, 49, 42, 71, 73, 54, 54, 60, 73, 62, 60, 70, 68, 59, 55, 71, 56, 84, 75, 51, 61, 42, 49, 76, 73, 89, 56, 85, 74, 48, 65, 92, 58, 83, 57, 74, 65, 61, 71, 98, 69, 54, 77, 78, 54, 108, 61, 65, 66, 78, 93, 83, 67, 66, 60, 61, 53, 58, 70, 81, 69, 56, 77, 60, 52, 53, 73, 61, 61, 78, 51, 55, 60, 50, 81, 49, 77, 60, 47, 70, 62, 47, 54, 57, 68, 86, 69, 74, 53, 54, 69, 80, 64, 72, 65, 81, 67, 79, 65, 102, 67, 65, 57, 48, 74, 55, 91, 89, 60, 73, 71, 70, 72, 54, 105, 84, 62, 80, 35, 75, 62, 48, 53, 60, 69, 75, 38, 101, 76, 58, 74, 63, 42, 69, 66, 61, 86, 86, 63, 46, 51, 68, 55, 60, 68, 52, 64, 54, 120, 61, 57, 53, 51, 83, 73, 87, 59, 91, 86, 83, 78, 68, 43, 52, 47, 63, 69, 60, 76, 52, 53, 79, 72, 63, 66, 36, 54, 83, 65, 64, 54, 66, 72, 77, 75, 91, 69, 66, 64, 82, 59, 60, 71, 62, 69, 61, 45, 129, 56, 47, 71, 105, 79, 76, 63, 55, 73, 38, 59, 71, 60, 71, 55, 46, 78, 67, 49, 63, 55, 52, 62, 122, 64, 52, 51, 56, 68, 61, 63, 54, 57, 60, 48, 58, 53, 92, 25, 84, 53, 52, 89, 83, 69, 57, 80, 63, 82, 68, 50, 43, 70, 80, 63, 65, 69, 70, 71, 77, 60, 64, 80, 70, 100, 52, 87, 61, 74, 69, 72, 79, 94, 68, 60, 62, 69, 46, 78, 50, 81, 54, 99, 66, 59, 74, 83, 77, 77, 71, 50, 82, 63, 68, 61, 74, 64, 50, 58, 64, 94, 71, 49, 69, 75, 79, 77, 84, 68, 74, 64, 51, 69, 66, 72, 64, 90, 39, 65, 47, 57, 91, 64, 82, 50, 61, 59, 80, 44, 95, 66, 76, 76, 60, 77, 98, 92, 51, 64, 52, 61, 56, 86, 74, 87, 51, 57, 58, 100, 51, 78, 66, 82, 74, 61, 49, 56, 114, 61, 91, 53, 72, 65, 62, 77, 59, 48, 76, 51, 75, 71, 56, 72, 45, 41, 58, 72, 103, 61, 42, 56, 55, 58, 55, 134, 82, 64, 64, 59, 55, 55, 79, 49, 50, 84, 49, 50, 75, 60, 61, 74, 66, 91, 89, 57, 71, 61, 66, 60, 67, 101, 66, 72, 131, 61, 62, 50, 68, 71, 73, 62, 61, 80, 75, 84, 76, 73, 68, 59, 52, 46, 50, 46, 70, 66, 55, 66, 49, 77, 44, 61, 91, 50, 60, 65, 62, 59, 64, 57, 62, 63, 81, 57, 58, 39, 38, 81, 61, 76, 36, 61, 69, 61, 64, 65, 53, 58, 86, 50, 66, 60, 59, 87, 81, 84, 37, 69, 73, 66, 56, 90, 53, 76, 48, 61, 89, 67, 58, 63, 50, 99, 48, 61, 65, 60, 92, 61, 48, 58, 45, 140, 70, 81, 53, 61, 38, 60, 58, 77, 72, 65, 83, 83, 69, 56, 103, 52, 71, 57, 72, 54, 71, 75, 99, 40, 35, 61, 68, 57, 73, 48, 58, 72, 57, 79, 47, 52, 58, 50, 66, 64, 70, 63, 69, 128, 52, 56, 43, 61, 61, 44, 93, 78, 53, 70, 50, 103, 59, 131, 70, 81, 56, 59, 68, 46, 54, 49, 97, 56, 49, 82, 60, 46, 83, 64, 56, 59, 36, 78, 69, 47, 38, 59, 59, 58, 68, 68, 62, 54, 69, 82, 64, 64, 80, 65, 65, 49, 62, 42, 56, 61, 59, 66, 60, 62, 57, 44, 95, 57, 74, 60, 57, 102, 59, 48, 55, 32, 56, 67, 66, 104, 43, 57, 59, 71, 47, 64, 62, 78, 122, 75, 56, 38, 74, 57, 74, 69, 77, 73, 66, 91, 69, 102, 67, 58, 74, 60, 65, 58, 53, 30, 58, 90, 52, 83, 66, 35, 94, 68, 64, 38, 54, 90, 67, 60, 74, 57, 78, 90, 67, 62, 61, 62, 64, 77, 63, 69, 80, 65, 40, 64, 80, 72, 67, 67, 71, 72, 84, 66, 64, 53, 141, 57, 72, 66, 69, 63, 71, 71, 68, 57, 66, 66, 63, 87, 49, 58, 71, 71, 70, 50, 58, 91, 76, 76, 73, 59, 48, 50, 75, 79, 59, 51, 68, 60, 62, 49, 38, 53, 63, 51, 70, 78, 26, 67, 49, 62, 64, 65, 63, 53, 78, 32, 49, 51, 45, 83, 67, 63, 36, 67, 55, 65, 87, 74, 58, 55, 70, 58, 63, 59, 56, 67, 92, 67, 71, 65, 58, 70, 58, 64, 65, 71, 75, 40, 65, 61, 67, 67, 56, 71, 68, 81, 65, 62, 50, 72, 86, 79, 67, 67, 64, 69, 64, 57, 53, 74, 72, 54, 61, 55, 69, 57, 48, 80, 51, 63, 50, 64, 45, 71, 68, 71, 42, 86, 46, 71, 56, 71, 43, 86, 52, 56, 64, 66, 54, 133, 58, 63, 65, 108, 56, 61, 59, 54, 76, 83, 49, 62, 60, 91, 63, 63, 89, 53, 49, 53, 61, 62, 68, 89, 52, 53, 68, 63, 83, 74, 69, 67, 54, 59, 78, 52, 62, 66, 54, 70, 75, 81, 84, 53, 50, 57, 66, 94, 40, 87, 71, 63, 102, 56, 41, 70, 66, 85, 67, 63, 68, 65, 69, 47, 77, 70, 58, 74, 82, 56, 77, 49, 67, 104, 59, 61, 37, 68, 71, 54, 93, 86, 65, 24, 57, 42, 53, 112, 61, 121, 64, 49, 60, 77, 116, 79, 59, 67, 49, 67, 87, 52, 78, 60, 74, 41, 70, 72, 62, 65, 58, 65, 43, 50, 44, 94, 78, 80, 71, 76, 59, 64, 57, 49, 88, 67, 53, 65, 35, 87, 75, 76, 174, 73, 83, 91, 54, 55, 147, 79, 81, 80, 72, 69, 52, 62, 69, 76, 70, 59, 90, 48, 63, 82, 56, 72, 89, 61, 89, 67, 76, 64, 59, 78, 70, 69, 63, 80, 45, 57, 64, 70, 53, 68, 52, 93, 78, 69, 97, 50, 68, 97, 51, 98, 51, 64, 95, 40, 71, 71, 60, 60, 60, 41, 53, 48, 65, 100, 70, 71, 61, 75, 75, 59, 75, 74, 63, 60, 66, 75, 87, 76, 71, 77, 67, 69, 35, 65, 60, 65, 55, 62, 45, 67, 76, 69, 88, 81, 80, 71, 74, 65, 68, 33, 89, 49, 51, 52, 72, 70, 55, 67, 68, 63, 65, 52, 57, 58, 70, 62, 87, 72, 67, 83, 54, 63, 57, 51, 51, 85, 62, 70, 63, 72, 49, 76, 69, 66, 88, 60, 45, 80, 55, 84, 77, 59, 41, 75, 76, 63, 60, 63, 67, 104, 84, 52, 58, 57, 79, 69, 68, 67, 51, 73, 61, 65, 105, 59, 67, 45, 82, 68, 54, 66, 62, 53, 56, 60, 45, 63, 65, 62, 46, 49, 72, 60, 60, 71, 71, 74, 41, 59, 44, 90, 96, 65, 80, 114, 59, 71, 50, 49, 47, 66, 60, 57, 62, 47, 64, 53, 87, 95, 71, 79, 62, 78, 46, 55, 53, 56, 70, 51, 59, 61, 54, 52, 84, 107, 53, 69, 81, 50, 62, 60, 83, 79, 85, 45, 56, 78, 58, 50, 61, 66, 54, 58, 70, 51, 62, 71, 67, 130, 77, 54, 67, 93, 91, 72, 69, 57, 60, 68, 86, 76, 78, 76, 55, 64, 75, 65, 72, 78, 60, 72, 64, 59, 73, 78, 84, 55, 61, 93, 67, 43, 63, 62, 78, 68, 67, 96, 49, 49, 67, 70, 70, 63, 67, 44, 98, 67, 56, 47, 47, 57, 84, 67, 87, 55, 69, 74, 52, 85, 61, 58, 53, 49, 97, 73, 61, 65, 88, 53, 64, 49, 76, 85, 74, 78, 65, 63, 57, 60, 65, 62, 61, 63, 45, 119, 62, 69, 77, 51, 52, 54, 73, 55, 74, 47, 57, 65, 70, 56, 55, 65, 63, 50, 60, 66, 96, 78, 39, 57, 58, 68, 73, 88, 52, 63, 66, 53, 51, 77, 69, 51, 63, 74, 63, 59, 73, 93, 78, 71, 53, 58, 69, 62, 61, 64, 73, 71, 74, 73, 56, 71, 61, 58, 59, 65, 99, 48, 70, 55, 45, 72, 55, 63, 74, 54, 85, 69, 64, 78, 36, 71, 70, 49, 70, 62, 27, 74, 67, 57, 77, 78, 81, 54, 64, 68, 130, 61, 48, 46, 101, 53, 86, 65, 66, 104, 66, 57, 67, 70, 59, 61, 68, 89, 42, 76, 52, 74, 77, 39, 46, 79, 61, 69, 57, 60, 54, 78, 67, 113, 81, 88, 69, 80, 63, 58, 66, 89, 57, 87, 51, 93, 58, 54, 50, 75, 48, 58, 67, 74, 61, 53, 58, 85, 64, 50, 72, 74, 76, 47, 73, 83, 64, 70, 51, 70, 67, 79, 70, 61, 77, 54, 66, 57, 66, 83, 59, 51, 81, 67, 76, 51, 95, 62, 44, 62, 77, 71, 66, 75, 64, 50, 71, 51, 74, 63, 43, 55, 70, 51, 58, 59, 57, 67, 59, 55, 58, 63, 72, 82, 65, 104, 80, 72, 82, 51, 80, 108, 65, 69, 53, 52, 72, 59, 74, 35, 58, 52, 78, 61, 49, 74, 62, 54, 60, 75, 61, 48, 87, 83, 83, 71, 64, 56, 66, 71, 74, 52, 56, 70, 51, 71, 73, 79, 71, 39, 40, 49, 49, 46, 84, 66, 70, 60, 60, 55, 49, 78, 54, 54, 82, 56, 42, 61, 54, 57, 58, 60, 72, 80, 59, 77, 59, 92, 51, 81, 61, 63, 72, 39, 61, 70, 63, 68, 59, 52, 59, 30, 58, 48, 77, 57, 89, 64, 58, 82, 63, 97, 37, 63, 40, 40, 86, 89, 57, 49, 85, 51, 83, 61, 52, 124, 57, 64, 50, 59, 62, 52, 67, 103, 69, 53, 62, 61, 63, 80, 51, 71, 68, 80, 59, 53, 64, 66, 69, 40, 72, 54, 49, 62, 59, 80, 54, 43, 61, 72, 76, 60, 67, 47, 65, 74, 55, 62, 65, 51, 56, 32, 70, 69, 52, 62, 86, 49, 90, 85, 46, 62, 66, 55, 49, 55, 41, 50, 66, 87, 66, 48, 70, 60, 54, 82, 44, 60, 50, 73, 52, 79, 99, 77, 68, 71, 113, 60, 59, 61, 54, 61, 68, 57, 58, 49, 52, 74, 84, 57, 69, 51, 68, 98, 72, 51, 74, 48, 49, 71, 75, 78, 46, 63, 86, 68, 59, 55, 59, 74, 100, 66, 69, 76, 97, 67, 60, 83, 54, 79, 66, 101, 128, 81, 67, 84, 69, 57, 61, 86, 80, 53, 75, 84, 60, 115, 78, 76, 56, 60, 67, 73, 62, 67, 58, 47, 61, 59, 59, 62, 142, 63, 60, 52, 60, 84, 45, 68, 49, 72, 63, 48, 62, 48, 72, 54, 78, 88, 66, 57, 47, 70, 78, 55, 61, 82, 50, 94, 58, 51, 69, 50, 66, 62, 79, 51, 53, 63, 58, 44, 58, 51, 67, 66, 83, 66, 65, 112, 88, 70, 57, 66, 64, 61, 69, 74, 67, 74, 42, 66, 61, 59, 66, 94, 66, 68, 112, 67, 73, 66, 72, 69, 66, 52, 66, 46, 71, 77, 48, 79, 86, 61, 53, 60, 71, 55, 45, 60, 63, 70, 59, 70, 49, 59, 77, 45, 83, 84, 83, 73, 55, 73, 80, 68, 50, 73, 76, 50, 76, 72, 54, 77, 72, 73, 85, 61, 47, 44, 62, 55, 55, 61, 62, 91, 73, 64, 80, 85, 87, 59, 51, 70, 61, 57, 85, 76, 93, 70, 48, 74, 66, 50, 102, 62, 55, 110, 61, 63, 75, 85, 93, 62, 46, 40, 49, 46, 67, 55, 71, 48, 94, 77, 61, 77, 60, 73, 58, 73, 54, 80, 46, 74, 61, 67, 53, 44, 69, 73, 45, 41, 74, 67, 83, 84, 62, 61, 58, 56, 77, 60, 65, 47, 59, 63, 46, 96, 76, 49, 53, 77, 73, 76, 41, 54, 70, 111, 60, 53, 60, 53, 58, 66, 59, 58, 63, 56, 82, 116, 68, 43, 69, 107, 76, 83, 66, 69, 79, 79, 44, 80, 67, 75, 73, 65, 73, 79, 126, 87, 76, 78, 47, 66, 78, 59, 85, 72, 77, 112, 59, 67, 58, 78, 59, 75, 95, 73, 74, 75, 57, 59, 89, 90, 61, 75, 59, 119, 65, 68, 69, 60, 42, 56, 45, 78, 80, 64, 48, 44, 31, 47, 65, 60, 73, 89, 68, 67, 27, 60, 65, 61, 62, 56, 43, 58, 71, 76, 89, 60, 55, 69, 60, 55, 66, 66, 67, 78, 63, 51, 67, 97, 77, 57, 55, 72, 69, 57, 46, 50, 64, 61, 29, 62, 54, 58, 63, 53, 46, 48, 82, 58, 94, 81, 55, 66, 69, 61, 65, 60, 56, 94, 86, 55, 84, 43, 39, 66, 66, 71, 55, 62, 50, 75, 50, 73, 88, 54, 49, 65, 73, 67, 56, 54, 64, 63, 64, 60, 65, 104, 51, 47, 50, 66, 51, 62, 54, 66, 70, 63, 87, 81, 85, 72, 75, 55, 77, 53, 82, 67, 64, 76, 68, 75, 58, 36, 82, 51, 70, 70, 62, 45, 56, 73, 69, 73, 57, 57, 56, 54, 52, 67, 67, 63, 77, 62, 74, 99, 78, 67, 58, 62, 53, 47, 82, 50, 77, 62, 78, 68, 68, 71, 40, 60, 87, 51, 69, 72, 69, 73, 66, 73, 70, 48, 83, 85, 68, 84, 82, 41, 73, 59, 40, 63, 78, 121, 84, 74, 83, 61, 82, 73, 75, 82, 70, 74, 60, 60, 56, 55, 60, 76, 59, 76, 68, 45, 48, 73, 51, 53, 48, 54, 79, 56, 67, 66, 58, 56, 56, 64, 62, 63, 71, 44, 63, 88, 58, 77, 52, 61, 56, 38, 64, 61, 71, 101, 95, 99, 66, 58, 65, 76, 47, 66, 84, 61, 61, 44, 63, 47, 61, 77, 61, 38, 49, 49, 53, 59, 56, 95, 52, 55, 62, 89, 46, 25, 64, 41, 65, 57, 69, 61, 60, 60, 63, 62, 80, 52, 57, 73, 61, 68, 66, 69, 58, 70, 50, 44, 80, 60, 61, 55, 98, 40, 69, 71, 64, 62, 89, 91, 64, 74, 81, 61, 88, 53, 55, 51, 61, 57, 66, 87, 61, 61, 53, 59, 63, 59, 123, 61, 40, 93, 78, 72, 70, 59, 69, 80, 47, 74, 64, 66, 53, 52, 50, 59, 66, 52, 48, 67, 63, 69, 46, 58, 60, 55, 79, 64, 52, 68, 54, 76, 54, 62, 75, 52, 77, 44, 60, 62, 51, 84, 77, 62, 76, 53, 67, 35, 69, 86, 77, 78, 90, 63, 82, 63, 90, 77, 75, 68, 74, 71, 60, 65, 61, 46, 58, 80, 59, 69, 56, 56, 96, 85, 62, 118, 66, 52, 61, 80, 81, 101, 57, 44, 70, 59, 53, 54, 78, 68, 53, 72, 80, 68, 70, 66, 60, 68, 107, 56, 47, 59, 58, 113, 53, 69, 51, 69, 47, 56, 56, 61, 77, 64, 41, 76, 78, 74, 87, 55, 66, 64, 63, 67, 68, 84, 48, 74, 48, 99, 67, 47, 81, 48, 59, 50, 71, 79, 77, 71, 86, 56, 65, 56, 66, 44, 64, 70, 67, 85, 84, 71, 64, 51, 53, 58, 67, 69, 66, 50, 48, 76, 51, 44, 57, 82, 68, 59, 51, 69, 53, 78, 65, 90, 64, 55, 54, 64, 40, 60, 42, 61, 63, 135, 52, 59, 51, 66, 76, 73, 104, 51, 100, 82, 66, 66, 106, 51, 68, 57, 93, 60, 56, 45, 86, 73, 47, 103, 73, 74, 57, 58, 90, 42, 71, 75, 68, 66, 77, 62, 68, 77, 59, 60, 58, 85, 47, 53, 76, 59, 94, 75, 65, 32, 56, 83, 61, 78, 101, 52, 78, 50, 62, 61, 52, 68, 67, 75, 71, 54, 72, 60, 50, 87, 62, 71, 57, 55, 54, 65, 66, 56, 72, 54, 67, 52, 76, 120, 67, 82, 93, 74, 68, 50, 76, 61, 83, 68, 63, 63, 62, 55, 60, 85, 86, 42, 59, 61, 81, 74, 72, 56, 49, 80, 89, 70, 47, 77, 85, 64, 57, 74, 84, 65, 40, 65, 56, 66, 61, 54, 58, 70, 52, 91, 80, 78, 88, 70, 75, 61, 63, 49, 64, 62, 71, 81, 51, 60, 51, 69, 59, 68, 78, 58, 64, 62, 72, 57, 91, 74, 86, 61, 58, 49, 79, 67, 63, 66, 115, 45, 84, 60, 32, 66, 63, 62, 86, 65, 37, 47, 64, 49, 45, 54, 72, 115, 81, 69, 54, 40, 88, 74, 59, 86, 60, 60, 60, 66, 69, 56, 73, 86, 53, 86, 64, 84, 65, 72, 63, 61, 55, 66, 72, 73, 49, 54, 62, 56, 73, 92, 107, 79, 70, 80, 67, 75, 66, 56, 74, 47, 49, 55, 74, 48, 65, 57, 41, 66, 41, 56, 54, 56, 68, 50, 62, 80, 86, 65, 87, 44, 55, 64, 73, 86, 76, 44, 39, 54, 76, 52, 70, 61, 52, 65, 51, 53, 80, 65, 85, 62, 86, 86, 78, 69, 67, 55, 55, 66, 63, 66, 75, 48, 61, 66, 43, 65, 58, 63, 91, 47, 67, 74, 62, 72, 76, 55, 61, 62, 75, 53, 73, 65, 73, 58, 75, 65, 58, 49, 72, 74, 66, 61, 70, 82, 56, 127, 72, 55, 84, 49, 31, 72, 68, 62, 94, 68, 79, 50, 58, 62, 71, 49, 71, 65, 45, 65, 48, 53, 67, 69, 56, 77, 57, 73, 48, 40, 48, 59, 55, 60, 45, 81, 46, 51, 49, 68, 67, 68, 60, 62, 77, 71, 70, 81, 38, 81, 58, 76, 62, 37, 64, 58, 48, 69, 47, 48, 80, 53, 47, 69, 63, 67, 53, 60, 60, 98, 80, 73, 75, 71, 58, 101, 70, 60, 65, 52, 70, 57, 79, 74, 75, 92, 68, 57, 68, 44, 49, 71, 58, 82, 50, 62, 82, 59, 73, 46, 50, 77, 79, 113, 67, 36, 52, 63, 53, 75, 93, 57, 72, 76, 102, 80, 81, 40, 61, 80, 56, 53, 76, 89, 53, 78, 64, 47, 65, 113, 39, 92, 72, 55, 49, 60, 78, 65, 77, 45, 61, 51, 65, 60, 72, 98, 52, 60, 78, 50, 86, 65, 61, 58, 78, 56, 57, 73, 99, 105, 49, 75, 71, 67, 80, 65, 68, 58, 61, 62, 53, 86, 72, 59, 60, 77, 46, 63, 74, 60, 63, 58, 80, 64, 87, 63, 77, 59, 63, 60, 76, 64, 46, 74, 56, 76, 62, 52, 41, 59, 68, 61, 53, 69, 75, 62, 59, 66, 81, 88, 71, 72, 82, 65, 52, 70, 92, 60, 80, 64, 91, 74, 51, 96, 84, 70, 63, 105, 53, 63, 76, 74, 60, 62, 68, 49, 61, 60, 53, 65, 58, 52, 59, 97, 53, 61, 74, 59, 70, 59, 47, 75, 61, 62, 52, 68, 82, 94, 60, 48, 124, 91, 50, 61, 67, 50, 61, 33, 68, 67, 59, 49, 55, 56, 41, 56, 81, 66, 74, 66, 64, 65, 67, 49, 60, 42, 57, 47, 61, 75, 77, 88, 61, 69, 60, 69, 62, 53, 64, 50, 58, 68, 59, 106, 64, 51, 83, 114, 56, 56, 85, 55, 58, 74, 73, 75, 71, 67, 49, 55, 70, 61, 62, 77, 62, 85, 58, 62, 57, 71, 82, 70, 50, 66, 164, 66, 79, 49, 45, 68, 84, 68, 64, 62, 97, 56, 62, 51, 52, 69, 51, 59, 65, 96, 44, 93, 73, 50, 51, 50, 58, 62, 59, 83, 54, 60, 68, 74, 44, 66, 50, 88, 79, 58, 74, 50, 67, 66, 77, 62, 76, 53, 85, 65, 63, 56, 49, 51, 66, 63, 54, 42, 74, 70, 79, 52, 80, 68, 48, 66, 80, 68, 69, 70, 69, 58, 61, 78, 53, 50, 49, 58, 77, 59, 68, 60, 78, 54, 73, 60, 68, 38, 51, 62, 46, 51, 63, 69, 54, 46, 54, 59, 63, 59, 72, 113, 56, 42, 57, 65, 87, 57, 65, 55, 69, 71, 61, 52, 103, 52, 58, 66, 60, 74, 65, 61, 89, 56, 73, 76, 67, 55, 47, 56, 63, 62, 55, 89, 42, 62, 77, 70, 53, 67, 76, 67, 66, 71, 56, 67, 88, 68, 64, 56, 50, 39, 73, 60, 72, 57, 52, 60, 66, 76, 72, 67, 60, 60, 70, 52, 81, 54, 64, 69, 67, 66, 89, 62, 56, 54, 69, 80, 95, 60, 122, 68, 61, 58, 72, 62, 57, 57, 66, 79, 51, 59, 78, 60, 81, 63, 70, 84, 55, 71, 62, 45, 69, 59, 58, 46, 50, 65, 70, 72, 63, 55, 48, 79, 51, 56, 64, 74, 84, 68, 76, 50, 57, 52, 61, 66, 67, 81, 72, 67, 53, 75, 44, 81, 87, 49, 91, 67, 54, 61, 61, 62, 87, 81, 57, 74, 72, 62, 74, 72, 60, 69, 116, 43, 53, 51, 82, 59, 78, 66, 71, 56, 64, 71, 39, 61, 111, 64, 46, 72, 54, 81, 62, 80, 94, 72, 72, 71, 71, 68, 66, 48, 71, 45, 58, 62, 53, 66, 68, 48, 96, 91, 43, 66, 99, 66, 63, 48, 66, 52, 68, 75, 65, 65, 54, 68, 62, 55, 61, 72, 49, 52, 52, 53, 56, 63, 59, 54, 62, 57, 73, 73, 65, 49, 70, 63, 54, 41, 59, 80, 63, 62, 52, 80, 79, 80, 56, 69, 68, 55, 63, 80, 70, 65, 57, 62, 79, 103, 42, 58, 74, 68, 123, 63, 84, 76, 70, 79, 74, 58, 52, 66, 59, 70, 66, 71, 64, 66, 62, 52, 72, 79, 66, 64, 74, 68, 51, 82, 85, 61, 41, 57, 80, 66, 69, 66, 65, 59, 73, 73, 80, 74, 65, 64, 58, 83, 36, 70, 61, 79, 92, 56, 44, 67, 32, 56, 67, 73, 59, 69, 60, 62, 69, 63, 49, 80, 61, 84, 91, 87, 49, 62, 56, 75, 72, 51, 59, 65, 70, 66, 60, 56, 59, 73, 72, 49, 65, 34, 57, 57, 86, 53, 78, 80, 77, 53, 41, 50, 77, 69, 60, 77, 69, 50, 62, 74, 52, 58, 59, 65, 57, 72, 67, 63, 60, 58, 125, 75, 67, 40, 56, 72, 65, 49, 70, 66, 47, 65, 57, 66, 53, 66, 63, 62, 62, 52, 61, 67, 52, 54, 56, 59, 68, 76, 69, 57, 54, 69, 74, 71, 61, 57, 52, 57, 68, 56, 45, 83, 76, 47, 58, 66, 71, 90, 62, 56, 88, 37, 71, 54, 92, 82, 77, 75, 65, 62, 91, 54, 72, 61, 66, 62, 53, 53, 77, 62, 61, 42, 94, 69, 72, 83, 49, 53, 73, 70, 43, 74, 73, 81, 50, 85, 62, 80, 57, 60, 117, 75, 65, 74, 68, 67, 63, 58, 74, 77, 62, 81, 82, 63, 83, 53, 61, 40, 84, 54, 70, 39, 55, 90, 65, 73, 70, 54, 58, 52, 111, 68, 56, 49, 59, 49, 71, 76, 46, 66, 55, 54, 56, 76, 47, 86, 54, 76, 69, 65, 56, 52, 120, 76, 65, 62, 60, 72, 71, 56, 55, 64, 63, 75, 63, 63, 69, 68, 55, 54, 57, 69, 68, 49, 68, 66, 129, 86, 64, 55, 57, 76, 61, 76, 75, 58, 57, 58, 66, 48, 58, 74, 150, 70, 60, 69, 79, 51, 51, 51, 98, 54, 67, 57, 55, 61, 79, 63, 61, 71, 75, 61, 114, 70, 58, 74, 68, 73, 60, 70, 73, 62, 45, 57, 75, 74, 49, 67, 52, 52, 84, 71, 53, 69, 47, 58, 75, 115, 88, 64, 83, 68, 60, 58, 58, 51, 74, 74, 50, 70, 46, 60, 76, 63, 54, 59, 83, 65, 58, 43, 72, 56, 52, 72, 93, 65, 50, 52, 57, 83, 65, 82, 54, 75, 72, 57, 59, 53, 63, 65, 66, 54, 65, 70, 53, 63, 64, 54, 57, 62, 60, 66, 68, 57, 79, 58, 60, 66, 74, 80, 104, 79, 49, 63, 49, 65, 66, 49, 55, 69, 49, 52, 98, 52, 64, 67, 75, 59, 64, 62, 62, 104, 86, 45, 95, 55, 77, 64, 70, 68, 66, 64, 46, 41, 58, 75, 100, 54, 82, 87, 77, 85, 72, 81, 39, 71, 82, 59, 53, 63, 76, 83, 58, 89, 77, 44, 85, 118, 72, 67, 56, 86, 74, 48, 101, 72, 40, 91, 67, 62, 73, 67, 64, 68, 72, 82, 68, 63, 50, 68, 80, 63, 67, 51, 81, 50, 69, 60, 132, 58, 83, 68, 100, 65, 67, 64, 67, 43, 71, 61, 65, 58, 50, 82, 67, 48, 56, 73, 106, 69, 91, 71, 58, 72, 94, 60, 86, 54, 75, 87, 77, 47, 59, 53, 46, 50, 62, 63, 55, 58, 51, 107, 50, 63, 58, 62, 71, 59, 77, 98, 48, 45, 117, 61, 80, 57, 65, 71, 52, 97, 66, 83, 71, 49, 72, 48, 49, 81, 56, 60, 55, 56, 70, 48, 70, 52, 53, 49, 68, 69, 73, 59, 90, 69, 62, 63, 72, 64, 76, 52, 72, 65, 54, 52, 69, 65, 54, 56, 76, 61, 46, 65, 74, 71, 65, 91, 57, 69, 69, 39, 55, 62, 58, 70, 50, 47, 85, 52, 54, 73, 77, 84, 57, 57, 61, 75, 66, 67, 77, 96, 48, 69, 52, 79, 61, 65, 58, 69, 54, 63, 71, 63, 62, 71, 53, 57, 76, 47, 69, 56, 70, 68, 76, 46, 69, 94, 50, 52, 87, 50, 53, 67, 91, 77, 60, 69, 60, 92, 57, 60, 66, 67, 66, 75, 62, 53, 67, 68, 63, 83, 74, 71, 71, 70, 55, 62, 50, 70, 65, 81, 51, 72, 58, 59, 74, 88, 64, 84, 64, 61, 79, 84, 71, 60, 80, 70, 67, 79, 77, 55, 72, 80, 67, 73, 53, 64, 83, 66, 80, 101, 50, 76, 77, 70, 67, 74, 63, 49, 71, 70, 54, 62, 90, 63, 69, 63, 70, 54, 69, 52, 60, 58, 47, 112, 74, 53, 81, 95, 77, 58, 54, 69, 75, 49, 97, 71, 49, 77, 46, 67, 62, 75, 73, 53, 56, 51, 64, 67, 57, 76, 66, 45, 65, 61, 62, 46, 51, 48, 70, 56, 72, 107, 135, 53, 76, 60, 72, 67, 78, 64, 60, 45, 59, 58, 79, 54, 68, 76, 52, 58, 99, 69, 61, 56, 69, 49, 80, 73, 81, 54, 67, 91, 81, 49, 64, 59, 58, 61, 60, 61, 65, 63, 55, 94, 64, 71, 75, 84, 47, 102, 73, 69, 62, 99, 53, 74, 51, 93, 67, 69, 68, 62, 67, 68, 50, 67, 82, 76, 68, 68, 60, 65, 67, 61, 74, 61, 82, 73, 60, 54, 77, 73, 101, 74, 63, 77, 56, 46, 69, 78, 53, 52, 68, 59, 69, 62, 72, 66, 71, 83, 56, 55, 54, 72, 87, 64, 81, 48, 66, 60, 53, 62, 76, 48, 51, 58, 61, 57, 65, 69, 67, 81, 73, 44, 72, 63, 47, 131, 85, 61, 76, 61, 56, 84, 86, 77, 87, 82, 61, 52, 74, 56, 75, 58, 59, 65, 75, 56, 82, 56, 73, 81, 87, 63, 47, 89, 67, 74, 47, 54, 59, 88, 57, 89, 52, 69, 59, 63, 56, 51, 50, 53, 93, 95, 49, 93, 66, 62, 58, 63, 70, 65, 43, 51, 61, 64, 75, 61, 66, 74, 60, 70, 55, 65, 46, 72, 71, 56, 56, 78, 64, 57, 53, 85, 63, 66, 69, 65, 91, 66, 79, 68, 61, 53, 72, 65, 74, 48, 50, 76, 56, 56, 71, 66, 65, 50, 59, 84, 61, 70, 81, 84, 81, 68, 51, 68, 53, 66, 136, 53, 52, 71, 47, 64, 81, 73, 93, 62, 52, 57, 50, 58, 72, 61, 52, 60, 55, 73, 53, 66, 47, 58, 49, 76, 72, 91, 80, 51, 58, 59, 64, 59, 79, 55, 83, 74, 70, 66, 60, 72, 95, 86, 58, 65, 58, 71, 63, 70, 51, 51, 50, 74, 52, 97, 58, 64, 48, 61, 86, 37, 56, 55, 73, 65, 82, 60, 120, 76, 77, 63, 46, 103, 79, 77, 53, 64, 57, 54, 40, 46, 59, 57, 62, 70, 58, 101, 58, 56, 55, 75, 88, 57, 116, 62, 58, 52, 92, 70, 57, 70, 45, 67, 70, 64, 60, 87, 48, 55, 48, 105, 53, 52, 59, 64, 40, 99, 64, 43, 57, 57, 65, 83, 70, 56, 46, 48, 65, 67, 61, 78, 44, 77, 68, 49, 57, 55, 55, 54, 53, 73, 57, 90, 60, 49, 48, 59, 68, 70, 68, 56, 59, 60, 61, 64, 63, 60, 99, 76, 84, 59, 55, 151, 78, 52, 72, 63, 51, 66, 76, 62, 57, 60, 55, 90, 73, 64, 60, 83, 52, 55, 63, 82, 64, 79, 57, 62, 64, 55, 99, 68, 54, 44, 74, 60, 72, 59, 58, 48, 74, 63, 95, 79, 63, 70, 50, 50, 64, 64, 60, 36, 56, 73, 69, 69, 74, 79, 62, 56, 62, 68, 47, 52, 60, 62, 57, 81, 44, 58, 73, 55, 57, 66, 65, 53, 50, 70, 71, 59, 65, 81, 51, 70, 48, 75, 63, 72, 69, 53, 63, 55, 53, 50, 74, 62, 73, 65, 78, 62, 83, 53, 52, 97, 71, 67, 58, 70, 55, 60, 62, 49, 54, 47, 63, 88, 68, 90, 57, 66, 72, 81, 79, 46, 79, 65, 62, 63, 67, 55, 61, 62, 79, 67, 77, 77, 68, 59, 83, 53, 79, 63, 60, 63, 77, 64, 66, 67, 69, 45, 75, 51, 65, 80, 65, 79, 62, 73, 40, 49, 35, 67, 68, 68, 74, 64, 62, 50, 64, 89, 77, 64, 62, 73, 62, 68, 59, 56, 67, 58, 63, 63, 59, 53, 73, 72, 51, 86, 55, 80, 69, 64, 70, 65, 54, 49, 98, 71, 66, 74, 64, 58, 88, 52, 60, 52, 64, 62, 84, 69, 64, 79, 66, 64, 77, 66, 75, 78, 59, 69, 55, 70, 54, 70, 83, 52, 70, 79, 52, 64, 47, 77, 76, 69, 62, 93, 62, 74, 60, 72, 55, 65, 63, 58, 51, 61, 75, 64, 55, 79, 64, 68, 51, 51, 80, 58, 91, 85, 73, 38, 65, 92, 65, 67, 74, 124, 85, 81, 65, 66, 64, 61, 53, 52, 66, 58, 49, 60, 65, 38, 74, 69, 64, 64, 76, 70, 83, 84, 69, 53, 61, 60, 69, 61, 56, 74, 65, 62, 71, 95, 59, 59, 81, 67, 73, 78, 84, 62, 61, 94, 51, 65, 89, 52, 93, 75, 82, 61, 91, 68, 52, 77, 56, 63, 54, 77, 62, 72, 53, 75, 45, 69, 63, 64, 46, 47, 57, 62, 58, 73, 72, 59, 64, 42, 68, 84, 80, 58, 61, 71, 62, 78, 60, 57, 58, 63, 68, 80, 75, 75, 63, 78, 66, 71, 61, 77, 59, 69, 41, 81, 71, 61, 75, 60, 60, 72, 57, 59, 42, 63, 80, 47, 55, 52, 63, 60, 54, 55, 54, 77, 64, 63, 65, 39, 74, 54, 60, 60, 66, 70, 59, 70, 58, 93, 64, 44, 63, 71, 52, 66, 54, 71, 80, 69, 65, 57, 59, 48, 52, 67, 57, 66, 72, 57, 62, 93, 57, 56, 66, 72, 68, 64, 72, 68, 49, 40, 65, 50, 45, 75, 55, 67, 74, 73, 59, 55, 73, 56, 50, 49, 105, 64, 68, 75, 76, 74, 59, 85, 75, 74, 67, 45, 80, 65, 120, 66, 53, 62, 66, 53, 37, 64, 101, 45, 50, 72, 77, 67, 64, 67, 52, 78, 129, 60, 58, 49, 51, 52, 62, 68, 50, 79, 77, 78, 55, 56, 50, 95, 77, 54, 57, 79, 74, 45, 59, 64, 55, 58, 69, 88, 74, 62, 64, 57, 64, 78, 45, 57, 60, 66, 59, 47, 61, 118, 56, 82, 83, 44, 65, 56, 53, 68, 68, 64, 65, 68, 62, 64, 65, 44, 66, 57, 77, 50, 50, 85, 64, 56, 61, 58, 71, 72, 59, 44, 46, 70, 76, 81, 69, 45, 61, 74, 84, 72, 41, 69, 58, 72, 59, 55, 94, 75, 67, 65, 59, 61, 59, 57, 53, 56, 61, 73, 70, 58, 51, 72, 72, 68, 86, 57, 55, 72, 88, 43, 47, 55, 83, 47, 65, 74, 89, 74, 54, 83, 51, 52, 78, 69, 67, 71, 53, 83, 66, 63, 64, 60, 64, 53, 63, 64, 56, 69, 54, 57, 60, 72, 58, 85, 78, 78, 57, 49, 65, 61, 68, 73, 51, 61, 62, 72, 49, 58, 62, 80, 73, 56, 76, 74, 54, 119, 52, 84, 64, 52, 48, 64, 50, 88, 80, 39, 42, 69, 46, 74, 66, 36, 95, 68, 60, 81, 58, 65, 66, 94, 69, 61, 82, 57, 55, 91, 50, 61, 70, 84, 51, 56, 80, 61, 70, 58, 71, 48, 89, 85, 59, 70, 93, 72, 54, 93, 83, 76, 56, 52, 94, 121, 95, 78, 63, 61, 55, 68, 69, 84, 57, 51, 87, 77, 53, 67, 44, 85, 63, 51, 63, 54, 62, 61, 65, 90, 53, 97, 57, 91, 50, 48, 102, 76, 71, 62, 70, 86, 75, 60, 38, 59, 66, 41, 90, 54, 56, 58, 69, 57, 80, 58, 51, 70, 61, 67, 57, 60, 68, 52, 38, 68, 72, 41, 73, 41, 45, 68, 45, 59, 42, 76, 54, 41, 59, 69, 58, 48, 65, 76, 80, 45, 61, 75, 76, 83, 73, 59, 65, 79, 68, 58, 86, 61, 92, 63, 51, 112, 60, 111, 63, 52, 65, 52, 77, 74, 83, 73, 78, 44, 89, 52, 85, 106, 67, 57, 78, 63, 107, 104, 58, 58, 76, 70, 56, 51, 48, 62, 75, 73, 58, 87, 56, 52, 88, 53, 67, 128, 66, 67, 76, 91, 65, 80, 47, 52, 76, 76, 64, 82, 58, 49, 53, 88, 58, 56, 72, 57, 72, 58, 76, 91, 53, 51, 60, 71, 88, 82, 77, 31, 83, 55, 85, 67, 82, 69, 44, 65, 68, 67, 58, 55, 69, 61, 66, 59, 52, 52, 86, 72, 57, 69, 54, 73, 69, 55, 53, 74, 45, 63, 79, 70, 68, 58, 95, 50, 55, 53, 62, 47, 85, 105, 62, 69, 55, 75, 80, 90, 62, 63, 60, 50, 77, 68, 76, 61, 44, 64, 77, 71, 68, 37, 74, 72, 60, 64, 61, 89, 73, 79, 60, 60, 71, 56, 63, 63, 69, 58, 67, 60, 50, 60, 63, 74, 63, 81, 63, 59, 51, 37, 76, 130, 76, 48, 70, 64, 50, 63, 78, 78, 84, 54, 56, 58, 62, 68, 54, 69, 63, 70, 70, 54, 53, 68, 38, 59, 44, 71, 65, 70, 57, 40, 53, 80, 87, 55, 60, 63, 78, 43, 62, 67, 54, 63, 83, 66, 50, 54, 78, 74, 49, 64, 74, 87, 67, 66, 67, 66, 51, 86, 62, 86, 65, 63, 91, 51, 77, 70, 94, 63, 63, 64, 39, 52, 46, 82, 65, 68, 58, 42, 78, 50, 64, 65, 42, 74, 54, 77, 57, 59, 58, 70, 70, 49, 56, 106, 74, 63, 74, 100, 44, 78, 59, 55, 62, 86, 60, 63, 65, 72, 45, 43, 44, 42, 67, 70, 50, 60, 56, 68, 68, 61, 61, 76, 68, 51, 72, 61, 69, 67, 66, 81, 72, 59, 64, 69, 53, 36, 77, 57, 62, 65, 58, 67, 73, 69, 45, 66, 70, 67, 62, 51, 77, 53, 43, 70, 66, 74, 65, 61, 69, 99, 89, 70, 95, 60, 63, 66, 96, 105, 68, 68, 70, 81, 48, 73, 60, 108, 69, 48, 54, 70, 49, 57, 49, 78, 68, 74, 75, 47, 50, 58, 72, 54, 38, 60, 53, 54, 72, 66, 69, 52, 71, 57, 76, 86, 61, 81, 45, 58, 47, 48, 63, 52, 64, 57, 87, 69, 77, 71, 86, 57, 68, 63, 60, 60, 78, 52, 52, 86, 43, 57, 73, 52, 62, 53, 59, 90, 55, 98, 88, 64, 56, 54, 63, 64, 57, 91, 63, 59, 50, 55, 87, 77, 58, 75, 74, 68, 88, 58, 68, 79, 87, 91, 70, 47, 56, 92, 63, 53, 77, 45, 80, 70, 46, 43, 97, 87, 54, 80, 90, 82, 90, 58, 66, 48, 67, 76, 54, 82, 79, 55, 91, 70, 76, 69, 110, 79, 49, 61, 50, 59, 46, 63, 64, 59, 81, 52, 77, 60, 51, 59, 71, 55, 70, 70, 44, 73, 95, 64, 51, 44, 78, 128, 72, 68, 75, 60, 70, 59, 60, 68, 79, 74, 66, 63, 42, 90, 80, 76, 94, 78, 106, 60, 69, 48, 56, 52, 57, 53, 57, 83, 68, 82, 90, 80, 57, 70, 75, 42, 93, 66, 130, 76, 49, 57, 47, 70, 62, 52, 71, 62, 128, 91, 65, 54, 57, 93, 56, 76, 65, 75, 77, 56, 58, 61, 51, 58, 55, 84, 68, 68, 88, 58, 55, 63, 64, 43, 55, 74, 68, 50, 78, 69, 73, 75, 58, 66, 70, 96, 60, 51, 50, 42, 60, 70, 53, 62, 75, 56, 53, 68, 65, 72, 81, 53, 36, 52, 38, 51, 73, 57, 63, 92, 60, 56, 57, 69, 73, 77, 71, 63, 60, 79, 67, 47, 66, 43, 51, 58, 44, 87, 87, 48, 62, 79, 89, 120, 65, 88, 52, 59, 74, 87, 48, 89, 63, 69, 56, 69, 54, 78, 79, 65, 65, 55, 92, 58, 64, 57, 61, 71, 60, 47, 55, 99, 59, 67, 53, 58, 56, 82, 46, 77, 71, 65, 55, 67, 91, 66, 59, 65, 76, 62, 56, 65, 77, 74, 66, 53, 55, 96, 83, 69, 46, 52, 59, 91, 46, 86, 90, 61, 56, 69, 87, 56, 50, 65, 67, 68, 70, 55, 75, 71, 67, 56, 66, 47, 59, 68, 57, 65, 69, 66, 40, 51, 78, 56, 66, 62, 48, 55, 60, 70, 55, 70, 56, 70, 63, 64, 58, 78, 71, 49, 106, 61, 66, 71, 86, 49, 65, 54, 80, 54, 57, 60, 63, 157, 127, 50, 68, 61, 63, 84, 67, 46, 75, 64, 81, 55, 60, 52, 52, 56, 56, 60, 47, 74, 64, 75, 50, 49, 65, 97, 55, 67, 52, 63, 60, 58, 57, 89, 45, 50, 77, 59, 66, 67, 82, 63, 64, 65, 72, 77, 67, 62, 41, 73, 57, 71, 74, 58, 57, 74, 61, 52, 69, 62, 58, 72, 48, 71, 61, 51, 65, 64, 68, 45, 62, 48, 55, 68, 68, 65, 54, 55, 49, 91, 61, 72, 59, 76, 51, 73, 55, 58, 57, 42, 62, 60, 68, 43, 44, 62, 53, 82, 62, 64, 82, 62, 50, 62, 109, 70, 68, 58, 64, 73, 50, 76, 63, 55, 69, 54, 56, 66, 61, 65, 97, 83, 62, 73, 50, 51, 63, 69, 48, 63, 83, 122, 74, 109, 80, 70, 60, 53, 66, 88, 52, 62, 67, 50, 71, 66, 73, 43, 118, 56, 51, 61, 82, 56, 95, 75, 73, 84, 67, 62, 52, 78, 49, 61, 51, 63, 71, 95, 49, 55, 77, 57, 62, 82, 111, 68, 55, 58, 59, 123, 66, 59, 77, 91, 52, 71, 81, 149, 93, 60, 63, 71, 76, 71, 70, 57, 52, 68, 88, 84, 83, 53, 62, 98, 74, 102, 62, 82, 41, 83, 78, 49, 69, 67, 74, 60, 68, 73, 50, 47, 55, 53, 68, 40, 60, 76, 60, 56, 81, 64, 101, 61, 68, 79, 64, 65, 53, 64, 59, 61, 71, 59, 63, 45, 63, 64, 63, 58, 49, 48, 81, 53, 82, 67, 67, 61, 55, 79, 57, 64, 56, 58, 78, 53, 69, 66, 57, 52, 46, 71, 74, 81, 56, 67, 80, 52, 66, 72, 87, 70, 68, 56, 69, 50, 55, 61, 75, 62, 65, 62, 72, 89, 61, 44, 72, 66, 60, 75, 52, 83, 90, 76, 60, 64, 70, 58, 69, 64, 74, 65, 51, 57, 61, 62, 62, 119, 42, 63, 60, 72, 67, 75, 76, 89, 115, 51, 60, 71, 70, 45, 104, 58, 79, 54, 65, 111, 59, 79, 63, 90, 62, 70, 54, 61, 87, 76, 64, 77, 104, 47, 60, 73, 73, 61, 59, 80, 53, 68, 85, 58, 102, 46, 50, 79, 54, 79, 58, 70, 53, 93, 76, 68, 62, 97, 87, 53, 73, 51, 61, 72, 92, 55, 51, 82, 92, 43, 66, 80, 78, 60, 51, 49, 53, 61, 59, 62, 63, 72, 63, 79, 85, 73, 74, 50, 88, 68, 68, 57, 59, 39, 38, 62, 76, 70, 48, 64, 120, 64, 77, 78, 46, 100, 65, 111, 56, 71, 57, 67, 102, 71, 70, 62, 64, 79, 60, 70, 66, 73, 68, 81, 74, 59, 53, 55, 37, 50, 81, 76, 48, 94, 64, 53, 59, 45, 66, 74, 54, 44, 61, 63, 54, 43, 58, 65, 74, 47, 76, 46, 55, 80, 62, 74, 84, 87, 71, 76, 76, 98, 58, 57, 67, 77, 60, 81, 67, 75, 67, 101, 68, 75, 85, 77, 59, 68, 110, 72, 77, 53, 53, 112, 61, 76, 51, 64, 67, 39, 67, 71, 58, 57, 52, 86, 71, 90, 69, 58, 70, 88, 48, 58, 102, 73, 48, 67, 76, 57, 58, 62, 51, 67, 77, 66, 64, 54, 72, 63, 62, 66, 60, 51, 69, 50, 70, 71, 86, 63, 75, 69, 58, 63, 56, 100, 63, 81, 60, 59, 62, 47, 63, 40, 73, 46, 54, 75, 70, 55, 61, 58, 54, 60, 83, 66, 49, 51, 85, 89, 70, 70, 75, 52, 59, 61, 54, 66, 67, 61, 63, 62, 74, 82, 63, 63, 60, 51, 71, 39, 60, 99, 51, 71, 73, 75, 76, 75, 95, 62, 80, 84, 105, 65, 54, 59, 69, 58, 59, 65, 83, 63, 68, 80, 85, 41, 77, 59, 64, 69, 50, 62, 72, 86, 54, 55, 84, 64, 45, 58, 87, 64, 59, 43, 42, 73, 71, 70, 58, 52, 52, 68, 43, 53, 50, 61, 41, 59, 66, 63, 49, 74, 78, 53, 57, 60, 55, 70, 76, 62, 73, 108, 52, 68, 70, 62, 79, 56, 76, 54, 43, 59, 65, 65, 43, 63, 84, 64, 59, 59, 41, 76, 56, 37, 71, 83, 50, 49, 62, 51, 61, 66, 69, 88, 72, 59, 63, 65, 56, 59, 73, 37, 105, 64, 93, 52, 60, 64, 76, 63, 64, 66, 52, 63, 77, 61, 66, 63, 71, 92, 67, 55, 77, 72, 101, 49, 71, 66, 84, 76, 61, 53, 48, 79, 59, 89, 52, 42, 66, 78, 59, 60, 73, 72, 66, 56, 58, 74, 73, 73, 51, 60, 53, 83, 89, 41, 68, 89, 49, 51, 74, 45, 68, 62, 77, 65, 59, 62, 65, 76, 73, 104, 85, 66, 69, 50, 62, 50, 71, 68, 97, 53, 68, 83, 37, 50, 67, 68, 69, 53, 75, 52, 68, 63, 50, 60, 51, 76, 64, 72, 43, 56, 56, 67, 91, 53, 55, 64, 81, 67, 110, 63, 79, 65, 67, 61, 56, 72, 60, 61, 73, 64, 71, 61, 75, 83, 57, 149, 69, 53, 88, 52, 51, 57, 60, 60, 58, 46, 48, 67, 57, 51, 65, 63, 60, 74, 64, 61, 68, 77, 62, 41, 57, 83, 73, 87, 68, 99, 68, 83, 81, 67, 69, 114, 55, 61, 51, 65, 50, 76, 60, 57, 101, 65, 71, 38, 70, 91, 61, 86, 62, 101, 89, 64, 73, 75, 47, 56, 51, 50, 58, 73, 51, 126, 54, 85, 74, 78, 71, 84, 72, 50, 57, 48, 58, 55, 65, 56, 74, 67, 70, 63, 60, 56, 80, 81, 102, 73, 47, 70, 76, 36, 75, 103, 62, 76, 54, 47, 38, 72, 56, 72, 92, 84, 95, 55, 69, 46, 76, 48, 91, 73, 40, 123, 70, 77, 31, 57, 58, 75, 86, 58, 55, 67, 41, 52, 48, 53, 49, 63, 70, 71, 74, 74, 78, 93, 79, 51, 49, 58, 74, 62, 56, 51, 91, 69, 65, 72, 48, 59, 105, 64, 75, 82, 57, 51, 90, 78, 61, 90, 65, 92, 53, 56, 72, 68, 66, 50, 51, 47, 44, 65, 82, 50, 58, 47, 49, 41, 77, 61, 84, 56, 39, 69, 61, 55, 60, 70, 82, 63, 53, 49, 67, 78, 54, 52, 61, 68, 83, 57, 77, 71, 90, 79, 82, 50, 54, 87, 41, 96, 63, 80, 41, 81, 66, 63, 88, 66, 72, 65, 70, 63, 76, 75, 80, 65, 63, 65, 66, 99, 30, 76, 79, 48, 99, 67, 69, 50, 74, 66, 86, 58, 117, 45, 45, 89, 59, 51, 76, 70, 59, 63, 48, 32, 59, 73, 79, 52, 75, 67, 75, 64, 46, 65, 110, 62, 87, 55, 67, 69, 56, 35, 62, 72, 35, 65, 81, 70, 74, 65, 78, 75, 65, 64, 59, 62, 62, 81, 66, 54, 58, 55, 89, 87, 56, 60, 50, 50, 54, 60, 103, 60, 59, 77, 64, 55, 76, 83, 60, 91, 73, 63, 61, 38, 42, 44, 70, 65, 70, 63, 62, 73, 69, 68, 91, 36, 82, 70, 74, 57, 74, 62, 73, 61, 66, 57, 61, 58, 38, 48, 66, 64, 56, 77, 96, 61, 75, 52, 50, 57, 69, 69, 66, 75, 45, 57, 58, 63, 59, 75, 51, 54, 48, 92, 44, 59, 66, 80, 67, 61, 70, 65, 66, 55, 68, 47, 93, 61, 59, 125, 71, 90, 74, 47, 69, 50, 81, 50, 43, 60, 76, 56, 80, 51, 92, 51, 63, 67, 56, 75, 83, 46, 80, 88, 57, 35, 83, 70, 71, 57, 66, 78, 83, 56, 106, 61, 54, 72, 80, 43, 75, 55, 63, 75, 73, 52, 60, 77, 54, 80, 72, 52, 75, 71, 70, 76, 61, 88, 63, 58, 65, 65, 57, 69, 71, 69, 68, 47, 79, 57, 73, 62, 92, 61, 78, 65, 55, 76, 103, 55, 73, 59, 56, 74, 53, 75, 97, 82, 57, 64, 69, 61, 56, 77, 57, 44, 61, 65, 71, 91, 54, 68, 76, 78, 86, 71, 78, 50, 78, 52, 49, 77, 63, 60, 73, 62, 67, 57, 52, 61, 70, 42, 61, 46, 70, 49, 51, 67, 79, 65, 75, 58, 68, 68, 59, 66, 70, 56, 55, 53, 66, 103, 61, 41, 72, 144, 66, 56, 54, 84, 73, 73, 72, 53, 71, 49, 67, 67, 77, 88, 74, 53, 72, 66, 49, 73, 51, 74, 55, 75, 69, 64, 66, 43, 73, 51, 76, 71, 58, 85, 93, 56, 63, 68, 59, 64, 71, 47, 52, 72, 57, 71, 61, 76, 79, 47, 73, 60, 67, 70, 55, 59, 62, 62, 54, 99, 96, 45, 90, 70, 51, 83, 59, 52, 52, 57, 77, 51, 54, 93, 70, 64, 65, 84, 62, 53, 120, 61, 85, 54, 50, 50, 65, 62, 90, 73, 142, 55, 83, 51, 68, 54, 60, 75, 65, 53, 80, 61, 84, 54, 55, 69, 36, 68, 46, 92, 75, 68, 88, 69, 87, 55, 83, 42, 62, 59, 51, 61, 67, 62, 71, 80, 62, 54, 67, 72, 69, 89, 68, 71, 81, 70, 79, 56, 60, 79, 68, 56, 60, 46, 97, 61, 82, 65, 54, 72, 53, 63, 82, 67, 69, 63, 65, 58, 47, 74, 80, 52, 67, 69, 62, 78, 54, 63, 52, 86, 61, 64, 57, 67, 62, 71, 52, 58, 60, 77, 68, 76, 67, 39, 54, 49, 64, 55, 67, 55, 89, 58, 54, 72, 59, 103, 80, 65, 54, 62, 73, 92, 94, 68, 84, 73, 52, 57, 56, 69, 74, 64, 78, 72, 105, 73, 68, 76, 117, 42, 69, 64, 70, 67, 60, 93, 53, 50, 53, 69, 50, 92, 75, 59, 51, 54, 75, 43, 60, 55, 59, 65, 70, 58, 73, 67, 94, 69, 83, 60, 63, 55, 95, 70, 62, 64, 59, 73, 48, 57, 96, 76, 72, 66, 96, 60, 54, 99, 85, 87, 50, 63, 74, 68, 68, 62, 92, 61, 56, 62, 66, 59, 67, 91, 59, 80, 82, 62, 64, 61, 64, 59, 82, 60, 66, 62, 107, 60, 72, 56, 75, 62, 79, 47, 92, 57, 60, 88, 84, 59, 46, 47, 44, 71, 92, 61, 49, 78, 60, 52, 56, 61, 91, 57, 60, 75, 75, 71, 76, 55, 57, 82, 61, 74, 88, 68, 60, 53, 75, 79, 82, 69, 74, 99, 62, 69, 59, 53, 69, 85, 72, 76, 43, 50, 51, 48, 51, 85, 67, 117, 69, 60, 49, 67, 46, 69, 57, 76, 61, 65, 38, 71, 98, 60, 68, 49, 51, 45, 89, 58, 45, 50, 51, 61, 61, 84, 54, 58, 93, 69, 84, 61, 101, 56, 42, 62, 66, 133, 62, 79, 76, 63, 74, 51, 52, 57, 58, 67, 63, 76, 79, 73, 40, 66, 86, 65, 67, 62, 68, 73, 63, 88, 61, 64, 57, 85, 72, 47, 99, 57, 81, 63, 61, 56, 79, 71, 56, 54, 72, 42, 56, 86, 65, 78, 65, 77, 40, 111, 63, 60, 80, 84, 57, 75, 69, 71, 92, 87, 65, 45, 49, 84, 68, 48, 74, 49, 61, 56, 76, 63, 73, 64, 55, 37, 68, 56, 82, 59, 80, 64, 61, 68, 67, 68, 48, 54, 53, 70, 60, 54, 68, 84, 64, 58, 52, 82, 50, 52, 46, 53, 85, 68, 79, 46, 49, 58, 61, 65, 60, 59, 85, 63, 51, 92, 57, 72, 69, 73, 59, 65, 71, 59, 59, 77, 51, 59, 52, 81, 63, 53, 73, 61, 68, 57, 66, 71, 61, 65, 60, 67, 61, 89, 44, 34, 61, 69, 40, 49, 58, 106, 40, 54, 70, 87, 71, 59, 65, 53, 74, 67, 62, 68, 71, 65, 74, 72, 83, 96, 53, 66, 61, 69, 67, 68, 68, 74, 55, 61, 52, 58, 105, 64, 85, 54, 79, 57, 75, 60, 77, 63, 51, 78, 102, 84, 58, 56, 57, 90, 66, 69, 80, 93, 82, 57, 53, 119, 91, 40, 70, 90, 57, 95, 51, 70, 61, 68, 85, 53, 62, 80, 88, 71, 54, 46, 61, 75, 75, 57, 83, 84, 60, 61, 81, 59, 50, 46, 51, 46, 65, 63, 77, 72, 58, 82, 76, 61, 48, 58, 71, 79, 61, 60, 54, 49, 97, 54, 62, 85, 61, 64, 34, 73, 75, 59, 59, 61, 63, 40, 56, 94, 94, 30, 87, 62, 63, 64, 56, 63, 55, 64, 58, 46, 66, 69, 72, 60, 92, 60, 68, 73, 65, 79, 60, 65, 51, 59, 81, 57, 122, 39, 78, 60, 81, 63, 81, 92, 61, 68, 64, 65, 68, 78, 67, 82, 112, 79, 57, 45, 65, 57, 44, 50, 61, 53, 38, 55, 77, 120, 86, 66, 54, 87, 80, 60, 54, 84, 63, 57, 78, 67, 97, 62, 57, 59, 74, 75, 94, 54, 84, 53, 65, 75, 51, 58, 69, 50, 80, 58, 94, 74, 62, 58, 89, 52, 75, 62, 56, 78, 81, 72, 55, 57, 52, 72, 75, 78, 62, 49, 53, 70, 79, 54, 63, 64, 79, 62, 58, 48, 44, 80, 65, 85, 72, 71, 84, 55, 96, 80, 96, 69, 38, 61, 69, 61, 57, 96, 71, 81, 66, 63, 80, 68, 52, 42, 53, 28, 82, 57, 70, 77, 117, 59, 70, 94, 61, 86, 72, 49, 48, 73, 49, 53, 71, 62, 42, 62, 64, 56, 71, 74, 73, 53, 82, 59, 60, 48, 63, 79, 98, 66, 55, 80, 50, 58, 62, 65, 59, 74, 50, 89, 76, 119, 34, 63, 111, 49, 72, 81, 67, 63, 84, 63, 43, 80, 48, 71, 38, 69, 65, 59, 39, 46, 72, 68, 89, 46, 49, 75, 77, 65, 63, 54, 63, 77, 79, 64, 57, 75, 73, 76, 77, 66, 80, 49, 132, 50, 71, 68, 55, 61, 65, 68, 64, 66, 51, 79, 61, 54, 61, 66, 47, 46, 64, 69, 83, 69, 59, 69, 54, 91, 52, 75, 57, 105, 78, 82, 81, 46, 61, 73, 61, 43, 50, 105, 76, 44, 44, 81, 70, 69, 45, 61, 69, 77, 71, 72, 101, 93, 45, 47, 65, 55, 78, 58, 64, 92, 54, 75, 52, 69, 68, 61, 60, 63, 77, 52, 57, 70, 51, 82, 82, 64, 63, 59, 87, 87, 65, 76, 66, 68, 90, 87, 69, 48, 55, 56, 63, 38, 60, 63, 54, 52, 55, 57, 67, 49, 66, 94, 72, 85, 65, 64, 75, 50, 76, 94, 45, 59, 87, 57, 50, 72, 59, 99, 81, 54, 33, 67, 90, 98, 81, 52, 45, 51, 75, 83, 65, 62, 94, 53, 41, 61, 74, 61, 82, 40, 70, 68, 56, 45, 66, 88, 66, 71, 60, 91, 53, 80, 79, 96, 47, 82, 70, 36, 79, 53, 55, 47, 49, 50, 55, 78, 53, 94, 84, 71, 74, 61, 47, 69, 68, 75, 54, 59, 99, 55, 90, 69, 51, 75, 66, 84, 60, 54, 52, 51, 67, 89, 50, 63, 47, 76, 67, 80, 84, 69, 86, 65, 53, 84, 78, 58, 91, 89, 79, 70, 72, 79, 55, 78, 56, 48, 48, 64, 56, 70, 70, 58, 73, 61, 71, 68, 82, 60, 48, 97, 64, 47, 42, 56, 63, 64, 60, 55, 67, 51, 67, 51, 64, 63, 42, 64, 75, 51, 89, 52, 80, 58, 58, 89, 67, 60, 68, 95, 73, 70, 74, 51, 80, 75, 65, 76, 76, 45, 81, 68, 99, 68, 60, 72, 67, 88, 59, 77, 52, 54, 63, 73, 54, 73, 74, 68, 68, 69, 59, 58, 67, 63, 63, 39, 67, 82, 56, 66, 65, 59, 54, 64, 51, 67, 83, 55, 63, 67, 112, 66, 67, 70, 49, 89, 46, 77, 73, 79, 60, 53, 54, 76, 89, 79, 84, 43, 66, 99, 78, 85, 53, 84, 51, 51, 79, 78, 74, 61, 53, 52, 92, 57, 69, 54, 59, 71, 55, 52, 59, 61, 82, 72, 94, 70, 73, 60, 112, 73, 63, 65, 60, 74, 74, 63, 74, 87, 95, 79, 68, 61, 75, 56, 85, 67, 55, 40, 65, 57, 78, 67, 77, 69, 65, 74, 85, 93, 61, 67, 58, 58, 50, 53, 65, 68, 78, 54, 64, 71, 62, 74, 45, 64, 66, 68, 73, 53, 66, 54, 40, 93, 60, 113, 46, 58, 82, 69, 77, 67, 67, 63, 61, 49, 77, 65, 49, 94, 66, 48, 58, 71, 57, 80, 78, 47, 55, 60, 62, 65, 58, 77, 61, 60, 92, 70, 62, 70, 67, 51, 50, 65, 50, 77, 79, 62, 63, 56, 56, 71, 62, 44, 34, 67, 51, 118, 79, 67, 56, 74, 71, 55, 49, 104, 59, 100, 73, 98, 55, 71, 73, 65, 81, 62, 51, 60, 72, 62, 64, 81, 53, 60, 78, 67, 72, 76, 64, 77, 54, 48, 65, 81, 47, 68, 46, 76, 41, 63, 77, 58, 74, 79, 69, 62, 56, 98, 87, 79, 79, 60, 57, 61, 79, 69, 54, 59, 47, 61, 58, 72, 53, 75, 72, 75, 51, 61, 69, 52, 92, 82, 73, 57, 62, 48, 52, 61, 69, 64, 94, 50, 63, 71, 70, 37, 63, 57, 53, 75, 64, 70, 44, 81, 67, 51, 71, 54, 63, 95, 91, 74, 63, 82, 84, 47, 73, 73, 45, 57, 65, 68, 44, 71, 72, 68, 77, 47, 56, 39, 82, 62, 58, 85, 56, 47, 57, 63, 65, 68, 94, 63, 59, 58, 58, 61, 46, 79, 48, 66, 66, 65, 91, 60, 90, 65, 66, 71, 58, 65, 57, 75, 58, 56, 62, 40, 77, 57, 54, 81, 109, 65, 64, 79, 76, 50, 30, 55, 53, 75, 93, 71, 80, 45, 56, 66, 102, 64, 87, 73, 49, 62, 55, 65, 68, 81, 75, 68, 96, 51, 62, 55, 74, 49, 52, 69, 68, 62, 50, 116, 56, 91, 36, 71, 28, 50, 59, 59, 88, 64, 82, 70, 99, 51, 78, 87, 81, 81, 60, 67, 67, 88, 40, 60, 67, 86, 54, 64, 67, 62, 56, 69, 75, 70, 70, 70, 62, 57, 52, 55, 96, 58, 72, 56, 68, 65, 51, 65, 57, 59, 53, 60, 42, 59, 66, 65, 85, 77, 60, 48, 80, 62, 57, 88, 50, 69, 67, 68, 74, 81, 56, 69, 73, 65, 52, 60, 65, 56, 53, 59, 60, 76, 62, 65, 60, 48, 60, 74, 57, 67, 59, 88, 67, 51, 77, 77, 59, 61, 56, 71, 77, 69, 64, 71, 62, 64, 65, 55, 59, 68, 47, 75, 64, 68, 56, 55, 63, 85, 63, 64, 63, 91, 59, 68, 83, 77, 52, 65, 87, 58, 61, 78, 53, 58, 63, 63, 66, 62, 58, 81, 62, 69, 61, 67, 81, 71, 82, 84, 63, 65, 49, 70, 59, 80, 108, 55, 59, 53, 70, 74, 76, 48, 76, 62, 68, 113, 62, 59, 67, 48, 71, 95, 62, 54, 74, 56, 71, 101, 63, 59, 90, 77, 57, 71, 58, 59, 81, 77, 65, 51, 69, 68, 70, 76, 82, 86, 52, 64, 100, 92, 61, 65, 63, 54, 65, 64, 45, 55, 55, 59, 61, 101, 71, 66, 60, 61, 51, 56, 111, 63, 68, 99, 88, 110, 68, 54, 77, 57, 64, 63, 66, 56, 61, 58, 38, 87, 65, 75, 66, 119, 62, 58, 62, 55, 65, 66, 56, 39, 114, 58, 50, 56, 66, 73, 72, 53, 57, 47, 53, 62, 73, 70, 61, 66, 47, 64, 74, 77, 56, 56, 61, 70, 62, 49, 125, 51, 60, 67, 63, 66, 63, 71, 86, 87, 43, 57, 68, 58, 61, 103, 54, 64, 62, 84, 79, 61, 64, 78, 51, 71, 63, 69, 55, 73, 62, 74, 69, 68, 87, 77, 46, 67, 60, 59, 60, 67, 55, 52, 54, 91, 69, 69, 44, 71, 88, 49, 61, 31, 68, 62, 51, 67, 80, 49, 65, 64, 58, 65, 67, 114, 59, 57, 71, 54, 61, 48, 92, 54, 56, 70, 68, 79, 52, 79, 69, 79, 63, 58, 66, 66, 70, 48, 58, 70, 83, 70, 53, 50, 58, 60, 75, 82, 62, 67, 71, 75, 52, 84, 55, 66, 88, 82, 70, 59, 57, 77, 72, 66, 91, 61, 53, 57, 66, 42, 56, 56, 81, 56, 53, 60, 67, 49, 76, 65, 121, 69, 65, 70, 59, 60, 63, 94, 57, 71, 57, 65, 53, 66, 59, 61, 56, 56, 61, 70, 61, 60, 54, 64, 76, 52, 59, 82, 63, 63, 56, 81, 69, 72, 61, 46, 73, 62, 70, 88, 67, 73, 86, 55, 56, 58, 66, 59, 53, 70, 55, 62, 58, 59, 73, 63, 67, 79, 52, 71, 71, 83, 64, 77, 50, 67, 56, 61, 51, 55, 63, 85, 61, 63, 61, 52, 65, 61, 64, 66, 61, 52, 70, 66, 61, 68, 85, 57, 43, 69, 67, 63, 72, 70, 60, 73, 56, 59, 52, 57, 53, 117, 65, 74, 60, 49, 62, 66, 79, 58, 59, 66, 72, 61, 46, 60, 56, 44, 69, 69, 68, 70, 74, 50, 64, 60, 60, 77, 62, 78, 65, 78, 92, 63, 70, 95, 58, 64, 58, 64, 55, 63, 66, 65, 62, 66, 60, 80, 65, 96, 56, 87, 78, 67, 83, 61, 61, 80, 53, 69, 73, 101, 60, 76, 75, 52, 59, 94, 52, 60, 52, 65, 62, 69, 74, 68, 50, 50, 60, 73, 59, 66, 65, 85, 63, 55, 57, 70, 64, 58, 73, 88, 82, 57, 56, 48, 54, 70, 65, 59, 65, 57, 75, 113, 66, 75, 70, 60, 59, 63, 61, 74, 68, 52, 66, 74, 54, 75, 63, 73, 65, 73, 63, 69, 49, 74, 84, 61, 70, 75, 80, 76, 81, 70, 53, 65, 65, 70, 96, 54, 54, 66, 55, 65, 75, 63, 56, 72, 59, 53, 59, 54, 63, 95, 58, 57, 50, 68, 47, 53, 50, 47, 54, 60, 65, 66, 65, 66, 81, 71, 66, 57, 56, 66, 75, 58, 58, 79, 69, 77, 124, 79, 54, 67, 72, 59, 61, 48, 54, 79, 57, 69, 57, 62, 59, 56, 48, 66, 74, 70, 55, 40, 95, 55, 81, 69, 62, 63, 56, 53, 70, 58, 77, 112, 69, 58, 83, 74, 112, 51, 77, 60, 72, 59, 77, 69, 60, 89, 60, 72, 57, 78, 45, 78, 73, 53, 87, 61, 57, 64, 67, 45, 68, 58, 47, 66, 66, 56, 60, 63, 61, 56, 71, 64, 67, 65, 62, 75, 54, 52, 58, 66, 64, 53, 77, 66, 55, 70, 60, 86, 58, 62, 60, 73, 52, 58, 62, 54, 68, 50, 59, 57, 64, 84, 47, 56, 73, 80, 68, 112, 52, 53, 64, 49, 57, 62, 67, 62, 71, 99, 72, 68, 62, 66, 61, 91, 51, 51, 68, 58, 70, 60, 57, 60, 56, 56, 46, 73, 64, 74, 75, 95, 68, 65, 49, 61, 64, 83, 77, 73, 59, 68, 70, 77, 52, 73, 80, 108, 106, 54, 99, 77, 57, 63, 70, 83, 64, 37, 82, 68, 79, 58, 54, 69, 48, 57, 79, 57, 77, 72, 93, 62, 58, 62, 95, 60, 63, 58, 74, 66, 64, 55, 63, 61, 70, 49, 67, 49, 77, 73, 75, 58, 69, 58, 68, 54, 57, 74, 60, 59, 88, 62, 56, 56, 61, 54, 63, 50, 73, 91, 62, 59, 75, 66, 90, 59, 99, 63, 63, 49, 66, 57, 72, 62, 49, 62, 72, 51, 73, 57, 70, 59, 64, 92, 61, 53, 55, 56, 74, 74, 67, 57, 55, 58, 81, 60, 50, 77, 54, 68, 66, 62, 54, 61, 63, 64, 64, 50, 59, 39, 73, 50, 57, 63, 68, 74, 60, 77, 64, 56, 84, 63, 76, 59, 69, 61, 60, 55, 51, 64, 46, 69, 66, 61, 104, 67, 64, 73, 51, 68, 82, 59, 52, 55, 45, 59, 54, 58, 55, 49, 55, 66, 72, 84, 55, 78, 67, 81, 77, 63, 90, 68, 75, 87, 72, 60, 57, 62, 48, 87, 83, 56, 55, 69, 66, 57, 70, 57, 56, 59, 49, 62, 59, 59, 70, 66, 93, 64, 121, 57, 65, 75, 76, 71, 57, 82, 54, 55, 58, 70, 64, 88, 70, 47, 102, 52, 59, 85, 69, 58, 65, 75, 99, 58, 62, 69, 56, 64, 51, 63, 38, 48, 79, 97, 55, 58, 70, 62, 65, 74, 53, 76, 55, 95, 67, 65, 50, 109, 58, 58, 60, 49, 83, 149, 59, 71, 98, 59, 84, 66, 73, 75, 61, 69, 62, 83, 72, 52, 89, 71, 57, 64, 73, 52, 68, 78, 78, 62, 77, 59, 51, 71, 82, 74, 71, 90, 99, 47, 59, 69, 57, 79, 71, 68, 51, 64, 123, 53, 57, 85, 85, 55, 67, 65, 53, 61, 63, 75, 78, 58, 86, 75, 66, 46, 67, 75, 57, 56, 100, 68, 57, 63, 53, 51, 60, 50, 48, 63, 80, 58, 69, 60, 72, 83, 61, 78, 61, 66, 78, 82, 65, 47, 67, 88, 44, 63, 53, 79, 58, 75, 81, 73, 66, 63, 47, 73, 99, 81, 101, 92, 63, 61, 59, 56, 71, 63, 63, 65, 68, 63, 128, 63, 75, 64, 71, 80, 66, 60, 57, 54, 78, 52, 49, 62, 55, 62, 63, 75, 54, 58, 66, 59, 76, 78, 66, 60, 61, 74, 63, 55, 63, 53, 65, 75, 137, 49, 66, 58, 57, 100, 82, 50, 59, 78, 51, 71, 75, 77, 62, 67, 83, 54, 53, 74, 61, 67, 90, 62, 48, 61, 68, 67, 45, 83, 78, 63, 63, 67, 64, 54, 70, 55, 61, 71, 68, 68, 58, 81, 56, 76, 86, 59, 67, 81, 68, 61, 63, 88, 72, 81, 73, 81, 76, 65, 55, 79, 73, 92, 70, 63, 73, 44, 46, 53, 46, 63, 56, 47, 57, 65, 70, 45, 53, 71, 45, 75, 70, 72, 62, 61, 103, 73, 57, 69, 58, 66, 64, 66, 67, 65, 64, 84, 76, 126, 48, 48, 107, 76, 53, 60, 72, 59, 57, 50, 63, 68, 60, 67, 54, 71, 75, 54, 65, 63, 59, 56, 53, 59, 63, 74, 82, 71, 59, 64, 78, 52, 58, 47, 39, 54, 68, 47, 61, 72, 70, 52, 117, 70, 62, 56, 74, 49, 78, 57, 61, 67, 54, 72, 57, 62, 68, 56, 55, 73, 55, 80, 57, 87, 59, 64, 58, 107, 43, 61, 56, 86, 74, 60, 64, 51, 62, 67, 56, 67, 55, 92, 87, 78, 67, 52, 62, 40, 58, 61, 55, 60, 57, 46, 57, 56, 63, 63, 59, 95, 80, 49, 90, 79, 65, 70, 82, 73, 55, 72, 62, 70, 45, 68, 90, 64, 55, 71, 60, 63, 71, 66, 84, 52, 53, 65, 70, 79, 92, 61, 54, 58, 84, 52, 84, 51, 50, 66, 86, 68, 57, 72, 72, 80, 97, 89, 75, 83, 52, 74, 74, 61, 83, 91, 74, 57, 67, 61, 77, 62, 48, 61, 82, 62, 56, 60, 58, 70, 94, 51, 55, 82, 79, 89, 59, 67, 62, 85, 53, 63, 100, 56, 74, 103, 74, 74, 94, 46, 61, 62, 66, 69, 69, 69, 47, 66, 55, 81, 66, 74, 76, 66, 85, 84, 53, 54, 66, 73, 69, 40, 49, 68, 77, 78, 81, 59, 53, 46, 60, 65, 44, 64, 95, 75, 79, 64, 84, 85, 45, 73, 55, 49, 75, 65, 84, 75, 58, 80, 62, 52, 51, 72, 35, 72, 73, 56, 57, 51, 77, 56, 60, 69, 106, 51, 64, 49, 77, 50, 47, 46, 59, 51, 56, 36, 64, 83, 97, 72, 94, 89, 60, 61, 52, 77, 50, 68, 63, 69, 67, 54, 54, 61, 59, 73, 53, 72, 58, 66, 61, 81, 64, 74, 45, 60, 71, 61, 60, 61, 53, 51, 74, 75, 72, 73, 79, 62, 80, 102, 50, 47, 57, 67, 67, 59, 57, 75, 59, 60, 57, 64, 63, 76, 66, 75, 66, 56, 83, 68, 77, 55, 58, 74, 44, 77, 71, 72, 71, 56, 66, 98, 52, 67, 56, 62, 63, 72, 55, 56, 75, 46, 76, 104, 63, 63, 85, 70, 91, 60, 66, 81, 69, 57, 56, 53, 56, 91, 50, 45, 83, 55, 52, 95, 57, 57, 75, 47, 62, 54, 57, 78, 55, 57, 60, 81, 64, 68, 68, 56, 75, 50, 52, 56, 64, 56, 78, 72, 63, 57, 67, 64, 72, 54, 71, 62, 67, 52, 75, 56, 82, 55, 54, 71, 62, 51, 53, 55, 76, 66, 65, 92, 53, 63, 81, 70, 70, 56, 53, 84, 82, 76, 77, 41, 61, 49, 51, 61, 73, 50, 89, 58, 63, 52, 67, 53, 58, 68, 60, 64, 106, 69, 53, 66, 59, 66, 93, 51, 78, 64, 60, 59, 74, 51, 74, 78, 84, 66, 56, 62, 53, 79, 62, 60, 48, 70, 51, 64, 58, 60, 76, 63, 70, 66, 60, 69, 80, 60, 65, 61, 60, 76, 51, 88, 86, 59, 65, 68, 78, 57, 59, 66, 54, 66, 75, 65, 52, 56, 58, 93, 75, 69, 46, 79, 50, 55, 60, 68, 61, 59, 57, 59, 70, 71, 49, 63, 60, 67, 58, 52, 49, 56, 52, 44, 44, 64, 61, 85, 63, 69, 68, 74, 60, 77, 72, 78, 74, 62, 74, 44, 55, 62, 64, 56, 80, 64, 79, 72, 57, 69, 93, 61, 67, 74, 80, 50, 61, 60, 67, 71, 68, 57, 56, 84, 68, 58, 67, 56, 68, 63, 62, 86, 92, 57, 82, 58, 52, 65, 59, 54, 63, 81, 76, 73, 61, 44, 51, 75, 65, 50, 89, 74, 58, 109, 64, 59, 55, 58, 51, 52, 56, 92, 74, 53, 62, 70, 73, 53, 55, 53, 59, 86, 69, 74, 76, 76, 69, 75, 48, 58, 63, 72, 49, 59, 63, 63, 65, 63, 57, 81, 67, 48, 43, 43, 61, 73, 52, 58, 59, 57, 59, 73, 81, 56, 75, 68, 72, 69, 54, 68, 55, 64, 71, 57, 57, 68, 61, 36, 54, 66, 64, 75, 94, 84, 77, 54, 63, 77, 69, 58, 79, 71, 53, 67, 67, 56, 64, 68, 100, 63, 74, 60, 67, 62, 62, 57, 61, 59, 58, 62, 81, 83, 76, 69, 65, 59, 50, 61, 70, 75, 59, 99, 57, 72, 87, 51, 66, 69, 63, 70, 96, 57, 50, 62, 67, 60, 53, 87, 74, 73, 61, 65, 62, 55, 51, 68, 82, 96, 66, 53, 50, 57, 62, 52, 69, 66, 75, 61, 53, 80, 75, 69, 61, 82, 52, 69, 92, 62, 119, 64, 72, 54, 44, 64, 71, 72, 41, 62, 59, 59, 74, 42, 56, 58, 81, 67, 53, 71, 52, 71, 65, 64, 52, 48, 83, 134, 41, 66, 51, 59, 55, 77, 57, 88, 52, 61, 61, 65, 82, 74, 61, 53, 38, 95, 50, 55, 66, 57, 51, 47, 56, 64, 60, 56, 69, 72, 79, 60, 41, 70, 76, 79, 49, 94, 62, 41, 53, 67, 59, 51, 73, 44, 73, 78, 55, 57, 106, 89, 61, 64, 71, 37, 62, 59, 56, 107, 56, 55, 48, 57, 74, 65, 41, 68, 65, 47, 86, 47, 72, 59, 86, 59, 64, 79, 73, 42, 93, 68, 72, 116, 56, 74, 78, 74, 54, 59, 88, 103, 61, 62, 43, 64, 75, 45, 25, 63, 65, 55, 52, 79, 64, 66, 78, 96, 63, 82, 87, 47, 82, 100, 69, 62, 58, 54, 68, 69, 39, 67, 45, 66, 53, 58, 64, 78, 67, 83, 79, 85, 100, 60, 65, 27, 65, 85, 46, 72, 66, 84, 69, 83, 60, 66, 46, 89, 60, 59, 100, 96, 67, 75, 85, 53, 50, 70, 50, 71, 66, 67, 77, 73, 109, 39, 68, 79, 67, 100, 90, 59, 65, 55, 65, 83, 62, 58, 66, 57, 42, 46, 43, 83, 51, 60, 60, 81, 72, 78, 68, 40, 66, 58, 58, 76, 89, 76, 50, 58, 72, 48, 95, 62, 71, 58, 103, 40, 72, 73, 46, 49, 49, 72, 81, 79, 63, 77, 59, 67, 95, 81, 91, 95, 53, 59, 80, 81, 73, 59, 36, 62, 63, 81, 42, 64, 65, 47, 80, 92, 58, 55, 113, 58, 61, 74, 91, 93, 106, 135, 68, 53, 47, 87, 64, 78, 68, 55, 56, 63, 56, 53, 74, 70, 42, 80, 45, 62, 44, 62, 45, 74, 62, 61, 51, 53, 66, 51, 66, 66, 63, 70, 66, 64, 62, 128, 62, 91, 67, 58, 57, 93, 86, 67, 51, 64, 81, 59, 65, 53, 78, 63, 93, 38, 74, 65, 51, 70, 47, 54, 75, 56, 67, 79, 51, 48, 66, 80, 46, 62, 73, 69, 81, 67, 53, 103, 73, 74, 74, 65, 55, 57, 45, 79, 63, 85, 62, 80, 67, 76, 58, 33, 74, 72, 73, 76, 83, 78, 67, 62, 51, 62, 50, 98, 59, 66, 74, 48, 73, 54, 44, 45, 58, 69, 69, 53, 54, 110, 46, 97, 74, 53, 54, 59, 69, 46, 51, 48, 70, 48, 53, 57, 86, 41, 52, 65, 64, 56, 77, 78, 59, 38, 85, 76, 64, 55, 55, 65, 70, 44, 78, 62, 79, 53, 68, 119, 60, 64, 55, 69, 56, 131, 48, 64, 81, 50, 72, 89, 79, 46, 60, 89, 75, 72, 77, 67, 74, 54, 67, 80, 67, 37, 73, 52, 61, 89, 81, 70, 74, 51, 54, 62, 60, 86, 54, 80, 47, 65, 52, 132, 66, 65, 74, 83, 54, 52, 55, 73, 51, 64, 91, 75, 66, 56, 71, 63, 62, 140, 55, 59, 60, 75, 64, 104, 83, 66, 47, 52, 62, 64, 41, 62, 77, 63, 90, 67, 42, 74, 79, 67, 53, 53, 57, 56, 52, 80, 74, 112, 44, 46, 46, 75, 61, 87, 75, 58, 105, 56, 74, 64, 67, 56, 69, 69, 75, 55, 52, 57, 68, 76, 70, 68, 83, 59, 75, 47, 44, 52, 51, 101, 67, 75, 61, 62, 51, 65, 35, 46, 74, 58, 69, 68, 67, 74, 63, 60, 41, 81, 52, 58, 57, 65, 78, 51, 69, 66, 63, 48, 74, 70, 60, 84, 60, 69, 67, 60, 100, 66, 49, 95, 59, 55, 50, 63, 100, 46, 44, 64, 66, 56, 65, 64, 53, 50, 83, 80, 100, 59, 73, 81, 50, 92, 84, 69, 72, 74, 78, 105, 75, 83, 100, 85, 51, 75, 46, 69, 56, 58, 73, 65, 51, 84, 58, 68, 105, 93, 63, 61, 59, 76, 60, 83, 60, 71, 56, 66, 67, 71, 64, 74, 79, 47, 85, 70, 60, 40, 66, 96, 52, 88, 40, 73, 63, 98, 85, 61, 70, 45, 58, 79, 62, 97, 39, 79, 56, 94, 81, 72, 73, 60, 66, 51, 59, 78, 53, 88, 69, 70, 81, 50, 85, 72, 80, 115, 98, 50, 59, 40, 54, 57, 83, 72, 97, 60, 84, 110, 49, 45, 92, 69, 54, 46, 55, 57, 46, 87, 71, 66, 110, 56, 56, 57, 60, 53, 62, 47, 60, 62, 57, 63, 82, 70, 56, 66, 100, 58, 77, 67, 43, 55, 62, 47, 68, 47, 44, 70, 58, 74, 46, 79, 64, 58, 72, 60, 75, 57, 61, 72, 44, 67, 67, 54, 76, 58, 60, 150, 82, 48, 35, 82, 73, 101, 65, 83, 69, 65, 70, 31, 52, 94, 59, 73, 77, 42, 53, 83, 76, 70, 57, 48, 74, 79, 60, 82, 50, 102, 87, 68, 43, 77, 55, 75, 74, 57, 71, 87, 41, 64, 49, 68, 77, 59, 59, 72, 75, 66, 72, 57, 84, 60, 60, 51, 73, 43, 106, 87, 74, 61, 55, 66, 58, 98, 60, 70, 53, 83, 39, 75, 62, 69, 65, 94, 51, 53, 69, 69, 80, 50, 59, 87, 78, 67, 60, 51, 77, 59, 68, 78, 97, 77, 58, 63, 83, 48, 66, 52, 87, 70, 60, 53, 68, 65, 70, 60, 76, 87, 103, 54, 50, 64, 44, 92, 72, 54, 73, 51, 33, 48, 93, 83, 56, 54, 67, 79, 47, 61, 47, 73, 37, 48, 68, 70, 63, 54, 72, 52, 73, 66, 65, 58, 81, 86, 66, 76, 46, 64, 95, 76, 94, 58, 64, 54, 96, 86, 51, 49, 52, 91, 44, 71, 76, 68, 91, 51, 50, 45, 70, 87, 64, 32, 70, 71, 74, 55, 77, 63, 103, 57, 62, 81, 60, 64, 69, 83, 69, 46, 102, 85, 54, 58, 69, 48, 99, 59, 72, 58, 66, 44, 61, 64, 50, 70, 77, 68, 36, 85, 70, 89, 50, 71, 63, 74, 90, 39, 48, 50, 76, 43, 72, 69, 89, 90, 48, 56, 69, 92, 54, 55, 71, 64, 52, 52, 68, 73, 50, 66, 61, 78, 113, 58, 67, 51, 66, 79, 80, 51, 55, 67, 90, 59, 67, 58, 59, 67, 76, 65, 48, 73, 62, 36, 48, 82, 68, 71, 57, 60, 68, 60, 69, 62, 48, 75, 61, 50, 52, 65, 66, 51, 58, 57, 63, 91, 59, 48, 79, 60, 63, 73, 69, 68, 55, 60, 57, 61, 63, 55, 49, 53, 59, 47, 56, 62, 70, 80, 61, 61, 65, 83, 76, 62, 39, 59, 62, 73, 78, 71, 57, 74, 65, 82, 74, 62, 50, 59, 67, 62, 116, 49, 61, 60, 82, 68, 94, 64, 46, 51, 65, 72, 124, 73, 72, 67, 58, 53, 81, 65, 82, 78, 52, 71, 58, 71, 74, 62, 57, 86, 85, 59, 73, 102, 62, 54, 73, 71, 111, 55, 65, 61, 63, 74, 46, 62, 57, 67, 65, 62, 77, 38, 45, 58, 78, 81, 51, 81, 58, 77, 82, 61, 58, 61, 62, 65, 93, 82, 70, 59, 46, 67, 60, 52, 87, 81, 87, 59, 79, 51, 48, 49, 72, 57, 54, 84, 44, 75, 84, 75, 46, 61, 59, 53, 58, 88, 71, 64, 49, 56, 60, 78, 66, 76, 63, 50, 63, 62, 52, 73, 66, 69, 66, 56, 52, 47, 80, 102, 87, 63, 59, 67, 66, 77, 69, 59, 67, 67, 65, 81, 74, 50, 75, 63, 58, 123, 56, 88, 67, 62, 53, 68, 66, 70, 61, 60, 69, 58, 58, 60, 70, 87, 67, 76, 49, 59, 54, 93, 66, 64, 55, 81, 73, 58, 100, 61, 64, 35, 102, 57, 59, 66, 64, 66, 39, 75, 45, 65, 53, 57, 58, 62, 43, 59, 83, 57, 60, 66, 67, 75, 31, 50, 65, 52, 85, 68, 93, 108, 66, 62, 62, 56, 77, 84, 61, 57, 78, 62, 62, 79, 50, 57, 54, 51, 73, 81, 80, 72, 65, 91, 75, 73, 79, 69, 64, 65, 72, 63, 94, 77, 67, 74, 61, 61, 71, 42, 72, 79, 69, 58, 62, 63, 56, 61, 59, 66, 85, 58, 51, 60, 51, 53, 60, 65, 47, 46, 61, 62, 62, 76, 60, 84, 54, 64, 62, 66, 55, 77, 54, 68, 63, 99, 62, 76, 54, 77, 56, 81, 48, 71, 62, 70, 49, 64, 68, 68, 96, 70, 55, 72, 72, 69, 64, 68, 76, 62, 73, 70, 53, 76, 59, 56, 72, 74, 69, 63, 63, 121, 71, 89, 71, 65, 57, 71, 66, 68, 63, 49, 65, 59, 45, 63, 36, 60, 50, 54, 54, 61, 61, 50, 54, 60, 74, 75, 56, 56, 63, 64, 67, 57, 72, 72, 84, 62, 51, 44, 66, 77, 66, 69, 79, 58, 61, 102, 68, 63, 71, 55, 67, 76, 68, 62, 66, 66, 76, 70, 65, 55, 61, 77, 80, 91, 59, 94, 46, 79, 72, 60, 54, 61, 68, 75, 63, 71, 70, 61, 64, 49, 78, 52, 58, 44, 60, 84, 70, 63, 66, 59, 71, 69, 64, 48, 66, 76, 41, 81, 64, 61, 79, 59, 85, 65, 51, 70, 95, 72, 65, 52, 55, 44, 74, 52, 99, 69, 68, 57, 77, 87, 55, 60, 67, 60, 67, 74, 52, 60, 79, 72, 49, 77, 54, 69, 53, 66, 70, 50, 67, 84, 71, 68, 62, 71, 63, 85, 88, 101, 51, 58, 60, 51, 66, 54, 52, 65, 36, 51, 58, 69, 78, 63, 75, 57, 62, 63, 67, 74, 68, 59, 88, 68, 61, 72, 52, 62, 94, 79, 69, 54, 29, 80, 94, 59, 59, 61, 71, 76, 55, 73, 51, 62, 70, 66, 58, 62, 70, 66, 56, 75, 75, 61, 64, 61, 118, 83, 63, 70, 93, 67, 68, 59, 63, 60, 67, 40, 73, 61, 62, 74, 81, 54, 55, 61, 124, 71, 56, 84, 70, 59, 58, 88, 76, 52, 62, 60, 51, 61, 55, 59, 55, 70, 65, 50, 69, 55, 61, 57, 75, 58, 74, 47, 63, 49, 56, 85, 36, 77, 82, 113, 76, 82, 50, 75, 80, 69, 76, 72, 48, 66, 61, 51, 50, 58, 66, 79, 71, 57, 56, 63, 73, 58, 60, 62, 67, 61, 55, 50, 50, 76, 68, 66, 75, 64, 61, 66, 61, 60, 69, 88, 46, 65, 54, 66, 71, 33, 68, 57, 52, 80, 91, 101, 61, 63, 67, 85, 91, 44, 67, 66, 61, 58, 53, 57, 47, 74, 91, 40, 55, 54, 72, 73, 65, 68, 57, 83, 64, 72, 77, 75, 69, 87, 70, 74, 71, 49, 52, 81, 68, 51, 63, 54, 49, 98, 57, 64, 61, 71, 59, 63, 88, 73, 64, 72, 68, 57, 76, 73, 94, 78, 66, 57, 94, 72, 59, 60, 59, 63, 61, 66, 53, 67, 59, 38, 69, 63, 83, 80, 55, 62, 65, 55, 89, 75, 59, 83, 65, 57, 67, 58, 62, 73, 56, 57, 73, 79, 54, 61, 58, 61, 96, 68, 65, 56, 67, 53, 70, 87, 62, 72, 68, 81, 76, 43, 53, 61, 67, 57, 112, 53, 67, 52, 58, 59, 63, 64, 75, 57, 66, 67, 60, 53, 60, 78, 63, 63, 52, 66, 48, 85, 55, 55, 63, 74, 71, 78, 34, 105, 80, 78, 53, 58, 65, 60, 62, 60, 53, 61, 58, 77, 57, 81, 52, 36, 66, 77, 48, 64, 51, 88, 70, 59, 52, 64, 50, 63, 88, 49, 71, 63, 67, 65, 58, 54, 58, 60, 71, 73, 59, 81, 62, 56, 66, 71, 54, 77, 46, 77, 55, 73, 62, 41, 55, 63, 68, 62, 69, 56, 70, 62, 93, 62, 54, 62, 69, 58, 91, 138, 75, 55, 56, 85, 72, 65, 63, 54, 58, 70, 64, 57, 74, 77, 68, 60, 71, 75, 99, 53, 60, 61, 55, 63, 67, 55, 58, 60, 83, 80, 56, 61, 63, 62, 97, 66, 105, 72, 67, 63, 69, 57, 67, 74, 71, 65, 63, 80, 76, 62, 49, 71, 56, 56, 52, 50, 81, 68, 66, 77, 73, 55, 83, 56, 48, 58, 152, 58, 56, 62, 79, 72, 70, 43, 66, 82, 59, 70, 46, 72, 48, 55, 77, 51, 106, 56, 65, 71, 53, 64, 73, 42, 63, 66, 58, 70, 89, 65, 61, 49, 52, 75, 87, 49, 74, 59, 72, 59, 81, 62, 61, 53, 93, 61, 66, 83, 47, 56, 68, 83, 67, 69, 87, 39, 62, 67, 74, 83, 60, 52, 76, 50, 104, 42, 68, 59, 64, 72, 60, 69, 63, 85, 63, 71, 56, 80, 57, 78, 72, 60, 75, 58, 50, 64, 65, 55, 50, 105, 67, 63, 79, 39, 83, 46, 63, 57, 63, 72, 72, 73, 60, 54, 73, 46, 75, 82, 72, 61, 83, 88, 55, 58, 85, 70, 74, 65, 82, 66, 79, 56, 67, 56, 66, 78, 45, 61, 62, 63, 49, 62, 57, 82, 93, 71, 45, 71, 54, 70, 71, 67, 63, 86, 55, 40, 61, 50, 56, 80, 77, 51, 39, 68, 55, 65, 52, 56, 82, 89, 60, 50, 58, 57, 63, 71, 50, 48, 56, 68, 77, 48, 64, 62, 71, 59, 75, 67, 58, 68, 54, 65, 65, 59, 70, 63, 83, 68, 69, 65, 72, 75, 55, 62, 40, 75, 61, 71, 68, 55, 57, 87, 55, 75, 61, 57, 66, 56, 78, 88, 66, 68, 131, 65, 69, 70, 53, 68, 68, 80, 84, 72, 67, 56, 53, 81, 60, 67, 77, 86, 63, 81, 73, 87, 64, 48, 80, 63, 69, 70, 85, 57, 58, 71, 82, 90, 66, 61, 52, 70, 63, 49, 61, 48, 69, 48, 61, 72, 49, 59, 50, 56, 48, 48, 66, 58, 62, 73, 78, 56, 67, 87, 87, 65, 73, 64, 67, 63, 69, 50, 86, 79, 69, 68, 62, 58, 50, 66, 57, 71, 69, 45, 45, 61, 80, 49, 63, 69, 69, 87, 77, 57, 54, 65, 148, 62, 60, 65, 78, 55, 82, 96, 76, 89, 86, 63, 45, 55, 51, 87, 68, 66, 46, 75, 55, 66, 84, 97, 62, 73, 65, 62, 93, 61, 69, 69, 58, 69, 79, 57, 79, 52, 77, 69, 53, 59, 61, 61, 60, 69, 68, 69, 60, 84, 63, 42, 65, 55, 65, 61, 88, 59, 70, 62, 86, 55, 58, 66, 59, 60, 90, 45, 64, 60, 54, 56, 68, 67, 60, 65, 54, 88, 61, 62, 61, 65, 80, 81, 54, 52, 58, 57, 71, 66, 62, 44, 47, 66, 52, 48, 83, 62, 52, 64, 54, 48, 55, 53, 51, 97, 61, 58, 76, 75, 68, 64, 62, 105, 55, 44, 61, 55, 88, 87, 59, 57, 93, 73, 48, 70, 74, 73, 83, 61, 81, 49, 64, 53, 51, 64, 58, 61, 66, 53, 79, 61, 66, 56, 51, 56, 80, 82, 70, 41, 79, 71, 44, 115, 75, 69, 58, 58, 74, 71, 68, 67, 60, 49, 63, 76, 64, 81, 68, 47, 52, 48, 56, 50, 66, 61, 62, 67, 63, 70, 90, 64, 78, 71, 73, 57, 72, 53, 78, 57, 65, 84, 74, 85, 70, 55, 84, 69, 64, 77, 62, 72, 79, 57, 47, 92, 62, 66, 50, 46, 86, 88, 57, 62, 57, 73, 48, 55, 50, 77, 57, 56, 82, 66, 78, 56, 74, 68, 43, 53, 68, 63, 73, 65, 62, 64, 71, 54, 51, 68, 72, 69, 52, 75, 65, 98, 93, 75, 59, 31, 75, 59, 47, 71, 60, 60, 70, 117, 65, 72, 74, 147, 98, 54, 76, 58, 65, 84, 66, 60, 58, 82, 64, 72, 61, 83, 84, 73, 54, 48, 59, 69, 83, 61, 61, 76, 64, 100, 63, 69, 73, 60, 58, 42, 61, 61, 64, 77, 63, 65, 56, 83, 47, 61, 61, 90, 53, 77, 54, 60, 59, 71, 73, 67, 69, 66, 63, 72, 78, 37, 91, 69, 67, 53, 58, 86, 77, 49, 69, 82, 51, 50, 51, 79, 74, 65, 62, 54, 58, 60, 44, 105, 61, 72, 73, 43, 71, 91, 58, 55, 64, 60, 82, 80, 59, 70, 98, 63, 54, 66, 64, 55, 83, 66, 68, 66, 77, 58, 60, 76, 52, 73, 68, 63, 74, 64, 70, 60, 66, 58, 57, 65, 73, 79, 68, 67, 63, 64, 103, 58, 79, 51, 51, 56, 57, 46, 66, 77, 61, 62, 56, 66, 55, 61, 102, 51, 64, 52, 71, 58, 53, 49, 69, 56, 67, 56, 91, 80, 69, 51, 78, 68, 54, 50, 68, 78, 73, 70, 62, 157, 57, 70, 64, 65, 84, 77, 75, 66, 57, 51, 58, 59, 48, 71, 74, 51, 71, 67, 76, 58, 86, 82, 62, 58, 65, 70, 65, 82, 58, 64, 58, 67, 66, 80, 41, 64, 87, 75, 89, 38, 67, 90, 49, 71, 49, 54, 79, 56, 77, 73, 67, 57, 71, 59, 57, 70, 67, 54, 56, 90, 73, 64, 82, 66, 65, 63, 72, 65, 69, 69, 82, 68, 74, 49, 69, 56, 112, 54, 60, 48, 49, 53, 74, 68, 45, 53, 63, 69, 83, 64, 74, 64, 61, 58, 61, 59, 63, 64, 53, 63, 51, 47, 74, 69, 66, 62, 67, 65, 69, 50, 76, 85, 46, 70, 52, 48, 63, 56, 52, 71, 76, 81, 55, 60, 74, 64, 57, 55, 64, 54, 60, 100, 76, 75, 65, 69, 64, 62, 67, 77, 80, 74, 55, 54, 42, 64, 62, 93, 57, 46, 61, 82, 87, 72, 67, 74, 55, 59, 46, 64, 53, 72, 76, 68, 81, 70, 114, 70, 62, 74, 56, 63, 49, 67, 59, 51, 67, 74, 71, 58, 61, 55, 66, 60, 73, 54, 46, 54, 61, 101, 67, 55, 80, 65, 92, 68, 67, 65, 77, 58, 69, 63, 57, 74, 98, 51, 51, 69, 79, 83, 52, 72, 75, 76, 47, 48, 74, 53, 71, 65, 62, 51, 81, 80, 109, 64, 70, 49, 69, 76, 64, 62, 92, 62, 54, 69, 72, 72, 53, 63, 75, 69, 49, 85, 75, 57, 63, 65, 101, 69, 59, 71, 50, 54, 65, 67, 78, 82, 61, 58, 57, 55, 85, 67, 63, 67, 54, 89, 68, 65, 78, 77, 102, 58, 77, 69, 63, 48, 77, 82, 52, 69, 59, 58, 68, 89, 54, 60, 52, 55, 53, 40, 77, 70, 56, 43, 52, 60, 56, 74, 60, 82, 51, 68, 52, 66, 62, 78, 53, 66, 69, 72, 74, 57, 69, 84, 63, 92, 61, 59, 46, 53, 86, 54, 53, 54, 100, 46, 103, 58, 60, 70, 55, 52, 74, 80, 51, 79, 50, 63, 66, 69, 53, 74, 112, 78, 63, 51, 67, 80, 55, 70, 62, 72, 115, 56, 49, 76, 68, 54, 53, 76, 58, 69, 72, 48, 62, 60, 87, 85, 64, 53, 65, 69, 66, 69, 72, 73, 44, 101, 60, 97, 75, 62, 66, 59, 60, 48, 56, 76, 62, 83, 60, 63, 56, 55, 94, 112, 44, 52, 57, 72, 44, 74, 94, 72, 54, 52, 54, 66, 64, 71, 57, 60, 73, 65, 72, 72, 68, 74, 63, 62, 78, 90, 59, 60, 72, 79, 51, 61, 73, 57, 67, 54, 62, 51, 76, 91, 93, 76, 96, 62, 67, 44, 55, 70, 64, 57, 76, 94, 64, 58, 60, 66, 52, 59, 61, 59, 58, 64, 51, 81, 56, 93, 68, 65, 62, 87, 51, 62, 75, 66, 60, 54, 65, 70, 79, 67, 55, 73, 63, 58, 40, 75, 60, 87, 57, 56, 73, 63, 73, 67, 55, 70, 49, 62, 65, 61, 68, 106, 51, 63, 173, 66, 61, 54, 67, 86, 65, 64, 60, 53, 57, 69, 48, 82, 57, 66, 76, 60, 52, 67, 67, 82, 90, 62, 77, 54, 66, 36, 87, 56, 49, 58, 52, 52, 56, 58, 89, 70, 81, 59, 58, 59, 52, 85, 66, 57, 69, 68, 70, 59, 66, 63, 81, 31, 60, 54, 63, 102, 69, 71, 55, 69, 72, 69, 92, 53, 73, 68, 62, 53, 55, 55, 69, 49, 57, 66, 44, 54, 69, 55, 75, 49, 71, 67, 72, 63, 75, 58, 55, 53, 64, 72, 117, 46, 60, 53, 66, 73, 38, 70, 65, 56, 72, 64, 54, 65, 62, 65, 44, 74, 57, 50, 69, 67, 81, 87, 68, 51, 60, 51, 58, 70, 66, 52, 69, 57, 110, 90, 70, 46, 77, 55, 70, 77, 59, 70, 52, 58, 50, 70, 64, 61, 74, 62, 89, 68, 90, 62, 108, 74, 53, 60, 98, 58, 52, 65, 66, 73, 63, 95, 57, 48, 71, 53, 87, 67, 50, 66, 74, 84, 48, 61, 82, 61, 87, 63, 61, 74, 60, 55, 69, 53, 59, 48, 51, 71, 61, 32, 59, 62, 48, 56, 97, 61, 59, 88, 51, 44, 80, 48, 66, 61, 66, 65, 47, 66, 68, 56, 65, 54, 62, 96, 51, 74, 67, 70, 67, 73, 77, 93, 95, 77, 61, 70, 62, 62, 59, 59, 58, 50, 39, 51, 94, 62, 78, 73, 69, 67, 91, 59, 59, 76, 72, 47, 72, 63, 58, 81, 34, 61, 65, 79, 65, 58, 52, 48, 59, 63, 59, 68, 70, 50, 56, 67, 71, 59, 63, 85, 59, 79, 58, 74, 113, 57, 65, 65, 59, 65, 77, 44, 54, 71, 66, 56, 64, 55, 49, 56, 90, 83, 46, 77, 68, 88, 79, 75, 59, 91, 57, 30, 70, 60, 59, 78, 79, 58, 68, 61, 86, 65, 61, 69, 73, 45, 77, 77, 74, 64, 69, 67, 57, 62, 45, 73, 67, 73, 62, 66, 54, 60, 60, 72, 73, 58, 61, 72, 61, 99, 46, 93, 47, 61, 55, 62, 64, 69, 48, 70, 79, 101, 87, 35, 69, 64, 75, 51, 70, 70, 81, 93, 51, 54, 72, 62, 60, 58, 85, 71, 64, 67, 94, 45, 68, 53, 73, 70, 60, 38, 76, 63, 75, 63, 64, 67, 67, 86, 65, 60, 69, 62, 67, 60, 76, 80, 52, 91, 67, 85, 118, 67, 53, 57, 72, 70, 64, 52, 69, 60, 74, 45, 39, 56, 60, 63, 96, 72, 59, 58, 88, 60, 50, 51, 60, 60, 58, 99, 54, 63, 83, 75, 52, 60, 65, 71, 47, 65, 89, 48, 67, 80, 90, 54, 47, 66, 59, 47, 64, 64, 65, 58, 50, 61, 51, 63, 81, 64, 56, 68, 64, 59, 55, 58, 75, 58, 72, 48, 49, 63, 82, 64, 56, 56, 58, 40, 65, 66, 78, 82, 58, 58, 70, 49, 72, 66, 63, 67, 55, 63, 77, 71, 60, 67, 76, 76, 69, 71, 90, 46, 66, 87, 77, 58, 83, 37, 72, 71, 72, 38, 71, 87, 89, 63, 72, 84, 56, 43, 56, 60, 71, 84, 61, 60, 59, 71, 68, 62, 68, 75, 55, 69, 55, 72, 60, 65, 69, 87, 76, 66, 63, 85, 64, 45, 61, 84, 73, 62, 68, 50, 48, 56, 90, 64, 53, 63, 75, 74, 60, 68, 61, 68, 55, 62, 57, 57, 54, 62, 56, 56, 61, 63, 68, 55, 61, 67, 67, 47, 57, 79, 65, 67, 63, 63, 85, 74, 72, 77, 57, 55, 75, 65, 63, 86, 54, 72, 63, 59, 64, 55, 102, 70, 76, 69, 55, 49, 64, 67, 59, 57, 58, 79, 60, 46, 65, 48, 69, 49, 60, 63, 80, 83, 71, 63, 99, 73, 75, 55, 51, 96, 98, 66, 67, 76, 78, 69, 53, 68, 61, 56, 63, 59, 63, 69, 67, 73, 88, 73, 53, 60, 59, 63, 64, 46, 65, 158, 51, 56, 75, 55, 66, 89, 54, 73, 101, 50, 56, 71, 61, 68, 112, 85, 70, 60, 79, 61, 72, 56, 62, 63, 55, 61, 75, 54, 46, 87, 70, 54, 64, 63, 75, 72, 70, 69, 72, 69, 57, 59, 54, 45, 89, 83, 65, 41, 85, 79, 68, 62, 60, 69, 40, 41, 60, 80, 48, 52, 53, 77, 64, 59, 66, 65, 88, 78, 61, 49, 50, 92, 76, 66, 67, 54, 77, 66, 61, 69, 68, 49, 56, 82, 56, 57, 68, 63, 82, 51, 52, 59, 66, 67, 63, 60, 61, 83, 44, 57, 63, 63, 67, 90, 93, 66, 84, 64, 87, 73, 47, 64, 78, 63, 56, 68, 70, 95, 63, 57, 58, 51, 69, 86, 61, 61, 76, 54, 77, 50, 69, 53, 80, 64, 64, 52, 71, 61, 116, 51, 97, 72, 60, 50, 56, 38, 77, 75, 74, 78, 54, 54, 104, 86, 76, 41, 65, 56, 66, 78, 74, 70, 72, 47, 88, 61, 52, 85, 49, 60, 63, 75, 67, 54, 90, 66, 76, 44, 64, 52, 85, 91, 62, 69, 113, 93, 56, 51, 66, 72, 78, 50, 59, 73, 61, 67, 57, 65, 121, 81, 50, 43, 54, 67, 64, 65, 68, 53, 66, 102, 61, 45, 66, 72, 101, 78, 67, 55, 60, 65, 51, 68, 71, 51, 90, 56, 53, 95, 61, 76, 78, 41, 56, 57, 79, 62, 67, 64, 96, 60, 77, 48, 57, 81, 64, 55, 76, 51, 73, 67, 64, 64, 46, 96, 80, 58, 85, 59, 62, 50, 72, 60, 69, 66, 75, 90, 73, 55, 60, 89, 79, 41, 80, 59, 67, 49, 61, 89, 57, 53, 68, 66, 68, 55, 65, 68, 56, 64, 59, 62, 56, 55, 72, 50, 85, 55, 85, 44, 52, 49, 57, 73, 67, 54, 62, 56, 56, 74, 72, 61, 63, 46, 79, 42, 84, 43, 63, 56, 76, 52, 98, 54, 60, 51, 53, 62, 80, 69, 60, 146, 74, 90, 55, 42, 65, 82, 59, 79, 51, 105, 65, 46, 56, 63, 66, 60, 55, 71, 48, 53, 70, 48, 95, 70, 70, 67, 78, 60, 67, 66, 44, 75, 91, 63, 59, 49, 56, 68, 75, 48, 70, 52, 62, 53, 53, 64, 85, 63, 120, 50, 52, 71, 64, 41, 66, 57, 48, 65, 68, 56, 73, 49, 60, 60, 98, 51, 60, 77, 59, 73, 52, 82, 66, 58, 70, 90, 92, 88, 66, 45, 94, 89, 111, 49, 81, 75, 57, 84, 39, 69, 62, 49, 41, 68, 49, 50, 65, 71, 56, 88, 78, 55, 67, 53, 52, 68, 55, 57, 58, 56, 63, 40, 37, 43, 56, 60, 36, 58, 57, 56, 55, 65, 68, 39, 56, 80, 91, 49, 42, 89, 79, 92, 57, 72, 102, 63, 71, 54, 73, 81, 68, 46, 44, 49, 75, 53, 55, 66, 72, 47, 82, 103, 57, 58, 58, 68, 63, 67, 64, 81, 92, 62, 90, 73, 69, 41, 66, 74, 81, 60, 77, 61, 53, 77, 84, 72, 102, 81, 50, 60, 61, 65, 56, 56, 62, 81, 78, 107, 57, 101, 60, 51, 64, 161, 59, 55, 62, 68, 57, 64, 68, 68, 86, 74, 63, 73, 54, 53, 63, 70, 67, 40, 68, 75, 56, 64, 74, 93, 68, 70, 63, 75, 57, 58, 58, 60, 71, 61, 56, 65, 60, 42, 70, 53, 73, 58, 66, 72, 73, 97, 62, 53, 54, 57, 93, 85, 57, 72, 69, 53, 58, 55, 44, 43, 54, 67, 69, 57, 70, 51, 69, 77, 59, 64, 60, 54, 56, 52, 45, 54, 57, 73, 74, 71, 51, 46, 68, 63, 110, 40, 63, 49, 58, 63, 48, 59, 49, 50, 86, 51, 75, 54, 65, 73, 65, 63, 74, 65, 72, 69, 67, 49, 51, 68, 101, 56, 54, 51, 66, 35, 94, 76, 62, 90, 58, 46, 57, 52, 66, 64, 81, 72, 70, 88, 76, 63, 51, 60, 85, 71, 76, 71, 51, 90, 63, 65, 52, 65, 67, 70, 55, 54, 50, 53, 58, 64, 56, 63, 77, 58, 69, 59, 51, 70, 89, 42, 68, 73, 66, 57, 80, 48, 89, 67, 57, 77, 84, 66, 51, 56, 72, 69, 73, 56, 59, 53, 54, 47, 55, 65, 63, 88, 60, 49, 86, 63, 69, 53, 62, 53, 75, 53, 68, 57, 64, 83, 66, 56, 58, 69, 62, 54, 66, 59, 75, 75, 49, 64, 54, 46, 60, 74, 57, 86, 80, 77, 56, 62, 47, 46, 69, 65, 58, 53, 60, 82, 66, 66, 62, 64, 72, 62, 52, 57, 63, 57, 90, 60, 111, 52, 65, 41, 59, 66, 54, 68, 87, 66, 76, 74, 57, 64, 66, 66, 59, 60, 92, 46, 51, 53, 64, 73, 66, 58, 58, 48, 45, 71, 52, 54, 47, 55, 65, 76, 63, 87, 61, 51, 53, 61, 61, 66, 74, 75, 36, 52, 58, 64, 73, 47, 66, 74, 71, 55, 65, 54, 50, 47, 68, 60, 66, 39, 74, 68, 63, 50, 62, 61, 81, 70, 54, 65, 66, 62, 85, 69, 57, 77, 55, 85, 61, 75, 79, 115, 56, 57, 47, 53, 70, 70, 75, 62, 53, 75, 57, 66, 80, 80, 59, 35, 57, 66, 72, 63, 63, 62, 57, 61, 47, 70, 48, 66, 76, 79, 42, 55, 75, 56, 46, 78, 76, 82, 89, 61, 51, 58, 57, 75, 72, 56, 101, 63, 71, 60, 77, 62, 61, 67, 43, 67, 84, 119, 74, 66, 75, 101, 74, 69, 44, 66, 63, 71, 55, 84, 69, 80, 51, 64, 68, 74, 70, 69, 62, 63, 65, 66, 74, 89, 56, 72, 58, 55, 67, 74, 71, 43, 54, 57, 77, 54, 68, 77, 84, 58, 90, 63, 55, 71, 59, 59, 52, 60, 54, 49, 66, 55, 75, 80, 47, 88, 83, 70, 60, 49, 52, 65, 74, 48, 67, 63, 87, 71, 69, 61, 72, 59, 61, 51, 72, 67, 115, 57, 68, 70, 59, 58, 75, 57, 76, 58, 125, 84, 66, 56, 78, 76, 46, 82, 81, 67, 66, 67, 79, 68, 68, 52, 70, 59, 66, 84, 55, 64, 55, 66, 69, 49, 63, 50, 49, 59, 55, 51, 69, 92, 73, 80, 58, 52, 83, 90, 67, 63, 68, 77, 52, 74, 102, 50, 70, 70, 61, 55, 58, 77, 66, 55, 79, 51, 66, 65, 62, 67, 73, 92, 76, 73, 62, 88, 118, 63, 59, 78, 70, 45, 91, 61, 64, 59, 60, 63, 78, 51, 66, 64, 77, 77, 55, 57, 54, 60, 41, 86, 60, 39, 109, 63, 41, 66, 77, 74, 78, 49, 70, 64, 51, 84, 72, 47, 68, 70, 68, 76, 76, 82, 68, 45, 85, 61, 76, 65, 94, 54, 116, 87, 58, 43, 71, 66, 64, 54, 73, 77, 92, 69, 68, 75, 61, 45, 61, 69, 56, 71, 56, 82, 93, 83, 49, 68, 54, 44, 104, 57, 52, 80, 51, 54, 48, 122, 44, 71, 85, 82, 69, 52, 57, 50, 69, 52, 68, 49, 33, 66, 63, 66, 56, 64, 59, 80, 59, 61, 68, 75, 56, 48, 57, 77, 80, 60, 82, 57, 49, 70, 70, 60, 55, 59, 62, 71, 71, 65, 62, 65, 78, 81, 50, 66, 59, 72, 58, 76, 50, 59, 82, 61, 70, 72, 55, 51, 70, 79, 49, 60, 58, 73, 78, 81, 73, 77, 73, 73, 128, 75, 67, 66, 48, 56, 77, 56, 58, 137, 78, 67, 58, 67, 52, 53, 69, 55, 69, 35, 64, 61, 71, 69, 135, 53, 66, 77, 65, 67, 61, 56, 62, 57, 58, 61, 72, 68, 61, 39, 67, 59, 82, 64, 54, 66, 71, 62, 62, 49, 38, 57, 58, 83, 68, 98, 57, 81, 49, 74, 77, 79, 84, 72, 47, 60, 92, 63, 64, 68, 46, 76, 77, 55, 72, 50, 86, 59, 48, 72, 68, 79, 62, 57, 89, 65, 59, 77, 92, 65, 85, 63, 68, 99, 57, 57, 57, 71, 58, 78, 89, 52, 65, 89, 43, 56, 77, 71, 71, 63, 71, 56, 71, 57, 65, 74, 66, 71, 58, 59, 45, 64, 65, 58, 68, 98, 116, 63, 60, 58, 63, 75, 70, 51, 75, 48, 68, 78, 61, 70, 59, 44, 49, 47, 70, 63, 55, 56, 94, 87, 57, 61, 70, 70, 75, 81, 58, 82, 80, 49, 110, 42, 74, 79, 62, 70, 74, 68, 50, 71, 53, 45, 75, 46, 87, 73, 43, 84, 67, 57, 77, 45, 66, 71, 50, 50, 78, 58, 109, 71, 62, 39, 47, 53, 60, 82, 95, 57, 39, 60, 95, 71, 64, 61, 60, 57, 71, 47, 66, 68, 66, 42, 77, 149, 60, 63, 61, 65, 78, 46, 60, 74, 76, 69, 63, 73, 51, 61, 65, 76, 68, 79, 46, 54, 55, 54, 52, 61, 85, 60, 61, 61, 77, 64, 46, 68, 60, 88, 57, 71, 53, 81, 51, 53, 65, 65, 66, 72, 48, 138, 48, 64, 61, 79, 50, 68, 45, 78, 93, 97, 62, 47, 68, 66, 61, 61, 60, 56, 71, 45, 60, 65, 93, 52, 67, 59, 54, 54, 83, 66, 67, 67, 73, 54, 59, 80, 69, 73, 63, 69, 66, 78, 87, 76, 47, 48, 68, 75, 83, 49, 80, 65, 68, 74, 64, 44, 66, 48, 89, 66, 84, 55, 87, 67, 62, 62, 44, 62, 52, 51, 80, 85, 69, 49, 61, 60, 55, 47, 51, 90, 74, 127, 63, 85, 90, 56, 55, 77, 67, 72, 40, 65, 70, 104, 67, 63, 60, 73, 52, 61, 54, 49, 100, 65, 62, 72, 64, 48, 55, 83, 67, 57, 68, 52, 81, 69, 85, 59, 70, 74, 75, 55, 61, 56, 61, 91, 78, 64, 66, 71, 68, 76, 84, 62, 91, 60, 88, 67, 69, 65, 94, 87, 63, 53, 64, 56, 58, 75, 50, 73, 62, 59, 50, 85, 73, 52, 63, 87, 48, 84, 49, 80, 59, 81, 92, 56, 68, 61, 94, 61, 37, 75, 49, 85, 73, 67, 85, 52, 80, 54, 60, 52, 69, 51, 68, 65, 86, 72, 57, 54, 59, 57, 68, 77, 77, 56, 73, 78, 61, 67, 71, 66, 101, 83, 51, 68, 42, 60, 53, 77, 60, 65, 49, 62, 75, 59, 65, 37, 63, 83, 66, 63, 67, 61, 56, 49, 68, 61, 70, 56, 33, 65, 98, 43, 74, 103, 65, 52, 78, 63, 75, 89, 39, 102, 71, 57, 57, 46, 85, 65, 67, 76, 88, 48, 90, 42, 47, 80, 45, 73, 64, 51, 73, 53, 69, 60, 72, 65, 82, 71, 73, 52, 52, 80, 75, 78, 34, 66, 65, 65, 65, 61, 67, 84, 82, 84, 45, 70, 64, 53, 70, 75, 92, 55, 74, 84, 75, 75, 72, 63, 93, 67, 44, 42, 64, 53, 63, 70, 58, 60, 60, 73, 82, 76, 85, 75, 59, 58, 51, 54, 51, 76, 55, 83, 64, 50, 57, 70, 94, 82, 94, 85, 72, 62, 66, 92, 48, 53, 70, 40, 72, 66, 73, 42, 67, 62, 75, 62, 63, 58, 67, 65, 69, 65, 70, 51, 114, 98, 51, 61, 56, 51, 93, 69, 54, 62, 63, 74, 69, 88, 65, 52, 61, 56, 59, 52, 79, 63, 65, 49, 76, 72, 81, 74, 79, 46, 83, 73, 56, 77, 64, 64, 81, 57, 90, 54, 69, 50, 55, 81, 44, 61, 47, 60, 56, 60, 111, 62, 98, 53, 53, 88, 69, 82, 52, 66, 48, 74, 72, 98, 49, 38, 55, 72, 56, 74, 106, 61, 55, 96, 63, 49, 53, 65, 56, 98, 55, 78, 90, 67, 40, 61, 30, 57, 67, 93, 73, 77, 55, 82, 75, 73, 113, 88, 48, 72, 63, 74, 75, 54, 73, 49, 59, 57, 49, 69, 48, 85, 87, 84, 74, 63, 66, 77, 51, 91, 84, 78, 62, 61, 61, 72, 56, 73, 56, 61, 77, 69, 66, 80, 69, 60, 69, 75, 64, 50, 48, 39, 80, 79, 86, 71, 51, 48, 75, 56, 43, 64, 68, 68, 55, 37, 66, 69, 60, 59, 52, 78, 71, 59, 53, 69, 72, 58, 60, 68, 72, 93, 49, 48, 65, 70, 83, 60, 77, 75, 28, 80, 74, 69, 58, 87, 92, 51, 57, 74, 62, 44, 57, 44, 54, 50, 63, 57, 88, 96, 58, 73, 74, 54, 56, 169, 69, 59, 50, 44, 72, 47, 65, 78, 46, 59, 62, 70, 60, 51, 64, 75, 76, 100, 58, 51, 55, 69, 69, 60, 50, 59, 81, 59, 95, 60, 69, 62, 84, 71, 64, 44, 70, 39, 53, 62, 39, 52, 93, 67, 69, 73, 64, 59, 75, 81, 75, 56, 92, 51, 78, 54, 60, 78, 81, 55, 59, 88, 57, 87, 56, 60, 66, 71, 69, 68, 77, 65, 69, 78, 74, 76, 87, 74, 62, 58, 87, 93, 58, 55, 89, 40, 51, 53, 74, 47, 47, 61, 56, 84, 73, 77, 44, 77, 77, 69, 47, 50, 73, 64, 50, 60, 42, 45, 68, 74, 85, 60, 86, 83, 63, 79, 71, 39, 64, 88, 59, 66, 64, 80, 82, 81, 63, 66, 56, 58, 61, 71, 51, 49, 87, 79, 32, 51, 58, 60, 54, 51, 70, 61, 73, 75, 68, 67, 56, 62, 62, 76, 80, 80, 71, 51, 84, 62, 85, 73, 68, 50, 59, 70, 87, 77, 64, 71, 77, 52, 62, 65, 76, 57, 56, 62, 64, 41, 68, 47, 57, 77, 70, 59, 66, 55, 55, 57, 92, 60, 66, 50, 55, 71, 53, 55, 57, 58, 46, 74, 52, 60, 62, 72, 60, 59, 77, 50, 76, 59, 57, 57, 62, 49, 58, 55, 76, 52, 71, 107, 88, 71, 83, 55, 47, 57, 73, 96, 50, 29, 110, 67, 33, 60, 60, 66, 78, 60, 75, 59, 79, 72, 55, 72, 59, 88, 67, 76, 121, 64, 63, 57, 61, 53, 63, 43, 41, 76, 71, 53, 54, 71, 69, 59, 81, 71, 68, 50, 71, 82, 61, 66, 63, 71, 100, 66, 46, 51, 52, 77, 77, 68, 81, 67, 67, 63, 84, 65, 70, 66, 64, 47, 51, 64, 68, 93, 67, 56, 60, 62, 63, 47, 63, 83, 60, 66, 58, 72, 61, 83, 73, 48, 49, 84, 62, 64, 68, 51, 97, 96, 61, 67, 95, 56, 69, 63, 57, 58, 65, 57, 90, 55, 71, 65, 69, 62, 69, 93, 61, 67, 66, 74, 54, 49, 77, 60, 88, 53, 69, 65, 66, 66, 50, 64, 63, 50, 53, 90, 51, 69, 84, 74, 60, 71, 51, 60, 73, 50, 74, 111, 53, 60, 61, 74, 86, 54, 81, 66, 43, 52, 52, 67, 44, 57, 59, 80, 62, 44, 73, 38, 65, 58, 74, 69, 72, 89, 135, 49, 64, 73, 51, 81, 104, 60, 61, 64, 70, 70, 86, 75, 46, 56, 62, 67, 106, 57, 69, 51, 55, 58, 93, 52, 62, 57, 73, 84, 78, 45, 67, 58, 59, 62, 77, 61, 87, 53, 60, 97, 45, 55, 90, 72, 66, 84, 50, 55, 69, 86, 58, 64, 61, 78, 61, 85, 90, 48, 62, 82, 82, 56, 75, 67, 58, 65, 57, 84, 85, 54, 47, 66, 66, 62, 66, 124, 53, 74, 57, 62, 76, 66, 55, 59, 54, 57, 62, 64, 78, 54, 83, 82, 80, 44, 117, 69, 77, 76, 60, 65, 69, 67, 53, 78, 52, 59, 60, 53, 71, 56, 72, 54, 64, 52, 39, 71, 47, 51, 72, 66, 81, 38, 51, 110, 73, 47, 71, 37, 61, 64, 53, 48, 75, 58, 48, 60, 69, 42, 59, 64, 70, 62, 65, 59, 61, 60, 65, 66, 84, 67, 66, 92, 69, 56, 61, 53, 61, 104, 78, 74, 61, 68, 60, 46, 56, 53, 48, 54, 87, 107, 75, 48, 55, 55, 59, 53, 61, 71, 68, 53, 64, 57, 60, 90, 69, 64, 54, 38, 68, 75, 83, 63, 65, 64, 64, 46, 60, 60, 74, 61, 76, 79, 54, 56, 71, 63, 63, 65, 59, 52, 74, 66, 63, 65, 72, 67, 77, 57, 64, 74, 50, 59, 55, 78, 71, 59, 51, 95, 89, 67, 61, 56, 82, 78, 116, 47, 69, 76, 57, 66, 87, 97, 89, 82, 76, 58, 53, 66, 59, 90, 57, 66, 64, 62, 52, 56, 51, 73, 65, 57, 99, 107, 74, 59, 57, 77, 42, 54, 58, 46, 57, 58, 84, 64, 64, 73, 76, 54, 56, 64, 60, 72, 68, 66, 59, 52, 51, 76, 64, 113, 66, 57, 65, 74, 92, 82, 67, 53, 75, 74, 62, 76, 97, 78, 72, 52, 78, 70, 85, 102, 72, 60, 61, 54, 60, 55, 66, 84, 64, 68, 54, 71, 63, 61, 66, 71, 77, 67, 62, 51, 58, 51, 73, 76, 65, 71, 80, 72, 62, 54, 70, 73, 50, 63, 73, 99, 46, 55, 72, 65, 46, 108, 44, 48, 58, 42, 68, 58, 63, 83, 69, 71, 65, 69, 42, 53, 62, 45, 71, 70, 49, 59, 65, 68, 66, 56, 65, 67, 53, 73, 63, 75, 72, 94, 70, 42, 57, 44, 75, 56, 42, 67, 79, 81, 75, 100, 43, 158, 66, 55, 71, 56, 59, 62, 64, 74, 98, 68, 63, 68, 61, 61, 51, 64, 53, 57, 70, 80, 52, 62, 59, 64, 67, 74, 95, 83, 74, 61, 66, 75, 60, 47, 67, 69, 73, 61, 65, 63, 71, 68, 51, 98, 63, 70, 84, 46, 63, 81, 43, 71, 70, 76, 76, 55, 54, 56, 55, 48, 51, 58, 45, 70, 59, 48, 85, 60, 46, 63, 54, 49, 62, 66, 69, 45, 50, 56, 75, 74, 62, 55, 72, 60, 86, 62, 60, 53, 70, 55, 78, 56, 61, 42, 49, 68, 71, 52, 63, 56, 69, 74, 56, 43, 77, 59, 61, 64, 65, 39, 82, 56, 66, 84, 77, 73, 56, 71, 75, 70, 63, 57, 69, 71, 58, 63, 58, 67, 52, 53, 53, 62, 60, 82, 53, 69, 60, 68, 49, 72, 96, 64, 70, 62, 40, 64, 54, 75, 104, 66, 65, 87, 68, 76, 50, 65, 73, 67, 79, 59, 70, 49, 93, 79, 49, 79, 97, 51, 50, 68, 50, 84, 72, 68, 56, 49, 73, 53, 86, 55, 61, 70, 70, 92, 62, 74, 78, 52, 72, 55, 45, 67, 84, 79, 65, 56, 62, 46, 68, 76, 61, 63, 63, 52, 87, 63, 88, 89, 71, 79, 66, 68, 71, 67, 75, 58, 60, 66, 68, 60, 92, 58, 62, 72, 60, 67, 54, 70, 73, 64, 74, 73, 58, 63, 54, 83, 60, 59, 119, 53, 47, 64, 55, 81, 72, 61, 66, 71, 49, 62, 57, 63, 69, 56, 63, 60, 96, 79, 47, 42, 52, 122, 53, 71, 67, 86, 80, 60, 92, 65, 58, 66, 47, 54, 60, 79, 55, 68, 52, 51, 52, 59, 66, 38, 80, 55, 59, 66, 58, 64, 54, 48, 56, 60, 61, 50, 91, 64, 66, 54, 61, 71, 85, 55, 70, 81, 54, 52, 75, 49, 119, 69, 51, 64, 102, 68, 63, 82, 46, 62, 61, 51, 55, 89, 67, 95, 59, 68, 55, 88, 81, 60, 68, 70, 59, 64, 97, 62, 77, 54, 72, 111, 72, 74, 58, 79, 57, 75, 50, 66, 63, 77, 73, 59, 54, 56, 77, 57, 75, 81, 57, 56, 42, 51, 59, 83, 72, 75, 64, 69, 57, 60, 58, 55, 71, 50, 70, 67, 53, 62, 38, 55, 58, 85, 58, 85, 80, 53, 48, 64, 55, 80, 74, 68, 64, 69, 63, 61, 63, 65, 68, 85, 64, 81, 66, 77, 64, 73, 44, 57, 96, 58, 64, 47, 74, 53, 47, 52, 53, 57, 76, 141, 52, 55, 59, 65, 55, 56, 67, 67, 73, 73, 58, 59, 62, 114, 58, 78, 67, 65, 49, 81, 75, 68, 48, 46, 49, 61, 60, 63, 47, 67, 70, 83, 41, 46, 84, 80, 88, 58, 68, 98, 69, 64, 66, 73, 61, 67, 55, 73, 63, 53, 75, 46, 72, 68, 76, 59, 43, 67, 77, 92, 71, 71, 65, 60, 51, 62, 72, 52, 77, 59, 69, 72, 66, 71, 55, 59, 55, 85, 64, 51, 41, 52, 46, 63, 85, 64, 59, 72, 74, 56, 69, 78, 79, 53, 66, 76, 109, 111, 47, 62, 57, 56, 53, 60, 64, 65, 44, 65, 66, 59, 57, 85, 66, 84, 71, 78, 63, 55, 75, 69, 61, 49, 58, 57, 56, 74, 80, 51, 61, 54, 93, 53, 87, 79, 86, 70, 63, 79, 58, 65, 56, 50, 73, 84, 103, 112, 88, 36, 79, 59, 55, 72, 63, 55, 53, 75, 65, 77, 66, 79, 84, 61, 66, 50, 57, 59, 53, 44, 78, 85, 86, 68, 66, 70, 75, 56, 69, 91, 108, 52, 53, 69, 61, 74, 69, 88, 62, 65, 60, 65, 77, 95, 62, 65, 51, 61, 82, 89, 68, 61, 50, 47, 81, 67, 43, 76, 54, 59, 57, 61, 60, 54, 55, 77, 80, 56, 69, 81, 82, 64, 60, 52, 71, 47, 57, 71, 78, 55, 84, 81, 66, 76, 92, 42, 52, 86, 64, 103, 98, 87, 62, 75, 84, 55, 59, 63, 79, 78, 32, 100, 55, 59, 63, 69, 83, 62, 75, 76, 66, 75, 57, 55, 38, 76, 61, 68, 65, 73, 57, 58, 55, 76, 97, 66, 71, 71, 80, 88, 72, 70, 96, 70, 69, 71, 62, 62, 64, 58, 51, 81, 58, 53, 90, 69, 65, 80, 83, 82, 62, 76, 76, 63, 76, 82, 59, 61, 78, 84, 79, 44, 66, 51, 48, 72, 80, 68, 93, 71, 37, 66, 65, 61, 93, 64, 64, 61, 77, 57, 68, 61, 58, 49, 76, 48, 60, 86, 54, 62, 58, 86, 68, 67, 58, 67, 42, 69, 50, 51, 59, 74, 57, 66, 75, 53, 56, 45, 69, 71, 62, 59, 102, 59, 63, 80, 60, 48, 79, 63, 52, 81, 51, 68, 48, 52, 72, 61, 52, 48, 62, 78, 70, 68, 63, 60, 79, 65, 55, 75, 58, 78, 46, 49, 74, 65, 63, 64, 92, 86, 64, 51, 42, 65, 119, 50, 61, 75, 72, 82, 69, 83, 63, 57, 63, 40, 63, 84, 62, 79, 65, 74, 71, 73, 93, 56, 71, 82, 44, 94, 49, 69, 54, 53, 43, 63, 71, 65, 63, 73, 85, 60, 51, 74, 69, 66, 61, 55, 54, 55, 70, 50, 54, 80, 51, 66, 90, 57, 52, 65, 51, 69, 45, 50, 67, 83, 74, 103, 59, 53, 69, 60, 71, 100, 66, 61, 53, 72, 53, 98, 69, 69, 75, 60, 92, 58, 47, 55, 65, 70, 70, 47, 70, 57, 51, 57, 56, 61, 70, 61, 63, 57, 58, 54, 90, 59, 57, 78, 76, 63, 68, 71, 61, 44, 59, 62, 101, 78, 58, 64, 59, 49, 68, 56, 72, 52, 48, 41, 60, 70, 57, 58, 61, 80, 85, 63, 65, 38, 62, 79, 65, 67, 86, 58, 50, 54, 55, 71, 47, 65, 75, 49, 84, 73, 80, 68, 60, 72, 77, 59, 53, 50, 73, 58, 66, 63, 64, 65, 63, 70, 85, 63, 53, 63, 105, 106, 69, 93, 82, 62, 62, 66, 52, 72, 58, 49, 62, 75, 58, 82, 63, 79, 33, 64, 60, 60, 58, 72, 52, 68, 67, 67, 56, 60, 64, 68, 53, 113, 61, 67, 67, 78, 87, 96, 64, 71, 70, 68, 65, 49, 43, 52, 71, 71, 48, 93, 78, 56, 77, 74, 70, 60, 56, 64, 32, 90, 57, 62, 66, 72, 48, 61, 46, 82, 67, 85, 83, 63, 63, 57, 39, 59, 63, 64, 68, 57, 64, 62, 54, 54, 57, 70, 65, 53, 68, 63, 54, 58, 74, 74, 57, 56, 58, 56, 58, 35, 66, 70, 68, 69, 72, 69, 67, 71, 41, 81, 57, 95, 56, 85, 74, 85, 53, 88, 61, 49, 86, 76, 77, 55, 58, 73, 49, 41, 53, 51, 84, 61, 58, 50, 79, 63, 62, 51, 56, 71, 52, 79, 64, 66, 61, 90, 51, 61, 58, 95, 49, 98, 51, 50, 39, 82, 68, 63, 65, 66, 77, 60, 53, 68, 54, 61, 67, 79, 56, 50, 74, 71, 66, 55, 68, 47, 70, 73, 56, 46, 71, 86, 75, 45, 83, 54, 70, 57, 59, 73, 179, 78, 58, 72, 50, 79, 51, 47, 54, 75, 58, 59, 40, 50, 51, 78, 60, 81, 65, 80, 63, 60, 93, 70, 50, 53, 74, 71, 85, 66, 52, 63, 87, 54, 74, 47, 57, 68, 67, 58, 67, 75, 63, 72, 56, 77, 88, 74, 81, 71, 57, 91, 55, 178, 80, 98, 67, 134, 71, 59, 68, 62, 54, 94, 57, 66, 55, 73, 70, 65, 63, 77, 65, 56, 66, 80, 33, 70, 78, 68, 76, 49, 80, 89, 72, 52, 57, 83, 63, 66, 65, 61, 93, 76, 64, 80, 60, 55, 43, 45, 69, 75, 81, 72, 59, 45, 66, 39, 75, 55, 61, 89, 89, 62, 97, 53, 47, 54, 71, 68, 91, 67, 77, 45, 45, 61, 66, 73, 66, 40, 109, 80, 60, 31, 71, 55, 118, 107, 65, 90, 72, 67, 83, 75, 108, 33, 60, 81, 51, 63, 105, 68, 64, 58, 60, 73, 68, 73, 71, 65, 63, 83, 82, 88, 62, 82, 59, 58, 67, 63, 67, 56, 58, 57, 70, 58, 71, 69, 56, 47, 61, 43, 60, 64, 70, 75, 48, 48, 84, 74, 80, 71, 59, 59, 61, 62, 65, 50, 63, 50, 59, 60, 54, 65, 63, 67, 84, 72, 75, 63, 76, 48, 57, 47, 90, 62, 69, 70, 86, 65, 54, 65, 55, 57, 80, 75, 61, 66, 77, 45, 73, 89, 67, 45, 81, 59, 46, 51, 63, 91, 51, 62, 62, 73, 80, 61, 68, 58, 57, 70, 79, 73, 83, 71, 67, 66, 86, 84, 76, 123, 76, 52, 78, 79, 80, 49, 81, 63, 55, 74, 63, 104, 56, 60, 47, 82, 47, 70, 75, 73, 83, 103, 70, 85, 80, 66, 70, 69, 58, 58, 91, 57, 63, 93, 75, 59, 71, 46, 68, 49, 57, 50, 74, 88, 63, 68, 74, 39, 44, 79, 54, 65, 85, 124, 57, 33, 88, 63, 83, 48, 72, 69, 64, 62, 63, 117, 103, 74, 49, 42, 69, 47, 80, 83, 57, 49, 80, 64, 74, 57, 58, 47, 61, 78, 46, 52, 59, 51, 58, 75, 80, 60, 62, 83, 66, 62, 113, 69, 74, 62, 68, 59, 49, 63, 62, 59, 60, 68, 62, 42, 57, 79, 45, 53, 60, 69, 43, 53, 94, 55, 56, 64, 55, 51, 50, 76, 70, 68, 54, 74, 63, 77, 72, 73, 62, 82, 67, 86, 85, 63, 62, 108, 63, 79, 72, 48, 77, 60, 68, 70, 64, 51, 64, 51, 49, 71, 91, 85, 71, 128, 78, 61, 66, 79, 50, 106, 76, 55, 81, 68, 74, 68, 71, 45, 94, 37, 85, 65, 89, 59, 87, 64, 70, 72, 81, 67, 74, 98, 61, 56, 123, 68, 65, 63, 52, 50, 112, 48, 90, 71, 61, 56, 53, 76, 94, 52, 64, 78, 49, 62, 73, 47, 60, 62, 61, 58, 47, 64, 57, 58, 40, 67, 70, 72, 70, 57, 59, 45, 53, 81, 70, 49, 96, 56, 79, 66, 96, 117, 53, 67, 60, 60, 61, 79, 67, 59, 58, 55, 45, 68, 59, 74, 80, 79, 59, 71, 62, 71, 80, 62, 57, 44, 59, 38, 75, 90, 85, 66, 72, 68, 71, 61, 94, 38, 53, 74, 72, 70, 66, 67, 59, 71, 54, 95, 68, 55, 90, 47, 56, 57, 87, 70, 50, 48, 93, 55, 55, 108, 49, 59, 54, 80, 81, 75, 81, 78, 53, 60, 69, 69, 77, 56, 57, 51, 47, 66, 73, 59, 44, 72, 69, 59, 68, 68, 69, 74, 60, 79, 71, 91, 55, 63, 67, 55, 48, 80, 65, 61, 55, 64, 71, 59, 63, 62, 45, 90, 79, 52, 45, 60, 65, 51, 57, 49, 69, 78, 65, 46, 76, 49, 75, 61, 83, 68, 54, 75, 80, 66, 60, 85, 61, 55, 63, 42, 75, 69, 62, 53, 54, 68, 79, 55, 67, 86, 74, 74, 54, 61, 60, 76, 54, 64, 44, 55, 72, 46, 67, 100, 86, 68, 47, 44, 98, 52, 109, 70, 75, 68, 69, 73, 62, 52, 51, 95, 78, 78, 67, 67, 61, 62, 78, 60, 43, 56, 80, 49, 67, 64, 44, 54, 63, 57, 54, 66, 76, 56, 56, 58, 53, 72, 54, 63, 68, 80, 46, 59, 83, 75, 82, 65, 56, 67, 63, 64, 92, 47, 71, 52, 52, 53, 55, 88, 52, 68, 87, 57, 83, 114, 57, 46, 44, 99, 66, 52, 47, 64, 52, 81, 72, 45, 80, 65, 72, 49, 59, 99, 52, 80, 78, 41, 69, 57, 76, 54, 66, 74, 70, 49, 57, 70, 48, 69, 147, 72, 47, 73, 51, 57, 73, 66, 58, 61, 77, 83, 69, 87, 77, 72, 66, 52, 52, 47, 82, 70, 22, 84, 59, 60, 54, 68, 44, 60, 49, 89, 86, 67, 96, 54, 49, 56, 57, 88, 91, 85, 67, 63, 86, 71, 55, 53, 89, 57, 85, 69, 54, 61, 75, 38, 82, 75, 40, 51, 59, 47, 68, 44, 75, 56, 83, 59, 51, 35, 64, 85, 68, 81, 68, 41, 79, 49, 67, 78, 68, 65, 74, 94, 74, 67, 51, 47, 98, 62, 54, 63, 51, 60, 74, 70, 33, 57, 45, 47, 53, 87, 49, 84, 50, 96, 49, 53, 78, 114, 42, 66, 56, 49, 69, 75, 69, 94, 81, 70, 56, 55, 64, 74, 69, 65, 65, 54, 80, 68, 56, 67, 72, 61, 86, 74, 50, 99, 79, 61, 129, 63, 66, 70, 48, 69, 81, 70, 62, 66, 70, 72, 61, 63, 75, 62, 56, 57, 63, 72, 59, 65, 22, 63, 56, 92, 53, 73, 55, 54, 64, 67, 70, 38, 78, 74, 50, 59, 81, 75, 61, 80, 61, 66, 59, 54, 71, 101, 82, 66, 61, 62, 62, 103, 63, 48, 50, 49, 66, 58, 59, 63, 64, 81, 60, 50, 50, 60, 78, 76, 109, 73, 67, 62, 77, 63, 68, 94, 54, 65, 71, 59, 64, 67, 69, 61, 82, 71, 76, 60, 46, 76, 79, 73, 62, 64, 60, 22, 68, 92, 40, 67, 64, 58, 53, 82, 57, 86, 57, 25, 72, 68, 63, 74, 82, 39, 67, 65, 88, 58, 88, 51, 106, 46, 95, 72, 91, 83, 52, 52, 75, 75, 55, 52, 77, 92, 55, 55, 98, 81, 57, 43, 75, 54, 65, 53, 62, 53, 60, 70, 48, 63, 65, 70, 53, 63, 49, 58, 76, 81, 70, 56, 56, 69, 32, 58, 87, 62, 56, 58, 48, 66, 77, 70, 55, 92, 31, 54, 50, 81, 99, 80, 55, 65, 66, 94, 48, 62, 77, 67, 61, 60, 47, 53, 63, 111, 54, 62, 53, 46, 63, 58, 74, 64, 64, 71, 47, 62, 48, 65, 60, 83, 75, 65, 68, 62, 60, 77, 82, 58, 52, 65, 75, 69, 63, 56, 46, 59, 76, 61, 65, 66, 61, 53, 86, 64, 61, 64, 51, 63, 68, 79, 67, 70, 62, 30, 64, 58, 75, 43, 61, 75, 99, 117, 61, 91, 66, 64, 44, 106, 58, 103, 48, 72, 60, 70, 80, 56, 77, 74, 78, 74, 75, 58, 44, 52, 59, 58, 88, 107, 76, 66, 56, 73, 58, 86, 66, 59, 62, 64, 63, 66, 64, 55, 51, 62, 60, 80, 69, 48, 52, 85, 57, 82, 46, 69, 50, 51, 80, 67, 56, 47, 97, 47, 51, 94, 85, 67, 62, 83, 71, 67, 61, 66, 56, 58, 50, 48, 65, 83, 59, 105, 80, 65, 50, 67, 57, 44, 89, 48, 112, 62, 61, 59, 60, 82, 82, 65, 63, 53, 54, 82, 79, 33, 83, 76, 60, 80, 68, 45, 54, 55, 58, 45, 49, 54, 76, 75, 68, 85, 74, 53, 65, 75, 67, 55, 73, 79, 36, 57, 101, 50, 101, 65, 62, 94, 96, 78, 85, 69, 64, 57, 47, 85, 67, 64, 63, 57, 51, 59, 61, 126, 80, 59, 68, 63, 69, 60, 65, 65, 65, 81, 65, 40, 68, 67, 60, 79, 59, 59, 67, 34, 73, 58, 67, 112, 69, 39, 65, 59, 67, 48, 53, 109, 63, 75, 48, 69, 86, 50, 58, 55, 62, 67, 63, 68, 60, 83, 78, 73, 78, 69, 78, 60, 74, 119, 68, 63, 56, 66, 42, 109, 73, 33, 83, 53, 76, 57, 70, 50, 49, 55, 76, 62, 114, 63, 60, 57, 40, 65, 79, 60, 98, 81, 56, 64, 56, 47, 46, 68, 76, 55, 59, 70, 82, 79, 79, 39, 55, 29, 80, 43, 76, 57, 61, 76, 59, 71, 38, 79, 69, 79, 65, 65, 93, 76, 64, 80, 44, 58, 61, 48, 71, 85, 50, 41, 74, 41, 47, 62, 63, 60, 80, 53, 65, 64, 55, 54, 38, 68, 55, 50, 65, 72, 87, 68, 61, 128, 67, 85, 85, 59, 70, 55, 58, 74, 43, 82, 57, 58, 62, 54, 76, 68, 76, 63, 92, 71, 54, 118, 67, 37, 57, 61, 73, 53, 72, 55, 63, 78, 61, 75, 59, 59, 92, 71, 95, 57, 54, 89, 55, 75, 58, 63, 100, 74, 83, 91, 68, 62, 46, 51, 78, 69, 58, 77, 65, 75, 67, 63, 61, 74, 74, 60, 89, 53, 85, 64, 71, 76, 60, 51, 68, 88, 88, 60, 68, 58, 74, 38, 67, 62, 68, 79, 40, 42, 55, 54, 55, 55, 44, 59, 71, 58, 72, 109, 57, 58, 52, 52, 68, 50, 40, 67, 62, 58, 63, 61, 60, 112, 69, 68, 63, 65, 62, 95, 76, 54, 91, 62, 61, 76, 64, 69, 66, 61, 72, 89, 58, 50, 57, 69, 93, 63, 51, 61, 87, 66, 78, 67, 72, 72, 77, 68, 66, 40, 80, 45, 70, 65, 34, 84, 56, 48, 44, 67, 56, 72, 55, 77, 71, 58, 58, 55, 89, 57, 51, 69, 53, 65, 64, 70, 81, 53, 64, 65, 85, 87, 49, 66, 77, 67, 80, 66, 63, 57, 60, 84, 72, 74, 48, 56, 58, 77, 66, 70, 76, 61, 70, 53, 65, 54, 71, 65, 107, 78, 62, 68, 72, 60, 79, 76, 80, 53, 51, 52, 87, 56, 66, 64, 76, 66, 78, 52, 44, 43, 72, 81, 62, 63, 74, 56, 44, 72, 39, 46, 78, 61, 52, 76, 82, 55, 93, 55, 62, 63, 50, 53, 57, 54, 46, 72, 77, 69, 63, 63, 70, 60, 51, 66, 50, 56, 87, 77, 71, 43, 60, 73, 58, 59, 52, 73, 80, 62, 91, 73, 65, 69, 50, 62, 79, 106, 56, 73, 57, 54, 73, 58, 68, 43, 72, 49, 71, 63, 73, 49, 76, 45, 55, 64, 48, 74, 52, 67, 38, 90, 51, 51, 73, 76, 73, 61, 100, 61, 53, 74, 74, 67, 43, 94, 108, 67, 76, 71, 59, 68, 50, 62, 46, 66, 37, 64, 66, 62, 54, 70, 75, 57, 73, 53, 52, 67, 68, 73, 65, 68, 55, 61, 53, 87, 77, 54, 41, 64, 63, 57, 51, 35, 61, 92, 78, 103, 46, 72, 62, 51, 66, 66, 59, 69, 63, 79, 71, 51, 68, 55, 54, 65, 84, 59, 53, 61, 53, 59, 72, 53, 68, 46, 59, 74, 74, 79, 66, 49, 56, 34, 69, 70, 55, 100, 83, 58, 105, 56, 39, 67, 66, 69, 93, 61, 71, 63, 75, 68, 58, 69, 59, 69, 66, 76, 81, 51, 56, 71, 71, 57, 71, 51, 70, 71, 70, 78, 56, 61, 64, 72, 79, 67, 46, 42, 71, 44, 88, 65, 80, 89, 63, 87, 80, 58, 77, 54, 48, 82, 47, 93, 106, 66, 65, 62, 62, 52, 56, 67, 72, 144, 56, 43, 57, 67, 54, 48, 93, 46, 31, 65, 84, 60, 63, 84, 92, 62, 95, 83, 82, 96, 63, 82, 46, 40, 52, 48, 53, 92, 80, 84, 81, 59, 60, 72, 59, 55, 69, 55, 52, 56, 67, 86, 60, 67, 70, 51, 62, 80, 71, 85, 63, 75, 53, 62, 37, 60, 42, 68, 70, 78, 70, 78, 75, 72, 82, 88, 67, 53, 64, 71, 80, 43, 54, 68, 70, 77, 69, 70, 43, 81, 76, 58, 105, 65, 66, 66, 88, 69, 77, 62, 76, 79, 75, 97, 68, 61, 92, 93, 75, 49, 60, 68, 57, 70, 61, 77, 78, 70, 67, 68, 60, 76, 64, 71, 75, 67, 91, 84, 121, 49, 85, 56, 54, 85, 80, 61, 46, 71, 47, 60, 71, 71, 50, 61, 88, 61, 78, 76, 53, 62, 64, 45, 75, 64, 67, 57, 71, 57, 57, 87, 74, 56, 63, 89, 83, 62, 69, 70, 97, 52, 56, 67, 76, 71, 100, 89, 98, 56, 58, 46, 64, 70, 69, 62, 79, 51, 99, 75, 71, 62, 66, 58, 84, 56, 63, 141, 63, 64, 79, 50, 62, 83, 65, 69, 66, 74, 64, 73, 55, 66, 69, 74, 72, 93, 74, 59, 43, 45, 57, 91, 66, 91, 69, 71, 56, 70, 47, 63, 57, 53, 67, 49, 57, 70, 65, 83, 60, 50, 56, 53, 68, 44, 59, 55, 34, 78, 54, 69, 68, 39, 82, 86, 59, 52, 71, 72, 86, 54, 85, 60, 70, 65, 68, 59, 63, 97, 75, 77, 54, 77, 87, 64, 48, 72, 56, 52, 39, 53, 59, 81, 59, 62, 74, 85, 42, 49, 51, 70, 56, 66, 66, 112, 46, 69, 58, 30, 58, 57, 84, 58, 60, 81, 64, 62, 89, 46, 67, 70, 55, 70, 48, 68, 62, 54, 80, 63, 60, 69, 67, 53, 76, 53, 57, 51, 66, 53, 69, 50, 84, 63, 65, 65, 43, 79, 71, 74, 56, 76, 80, 93, 71, 69, 73, 56, 53, 78, 68, 103, 76, 61, 59, 56, 67, 67, 73, 65, 66, 66, 71, 58, 33, 92, 78, 64, 80, 42, 79, 48, 53, 62, 58, 99, 58, 69, 44, 64, 67, 66, 98, 52, 57, 60, 71, 50, 41, 66, 70, 85, 64, 48, 54, 41, 58, 60, 34, 75, 71, 62, 54, 64, 55, 62, 72, 54, 78, 72, 74, 93, 55, 56, 116, 87, 65, 54, 45, 57, 83, 70, 59, 76, 100, 64, 66, 62, 80, 74, 47, 74, 66, 67, 59, 74, 72, 53, 36, 66, 107, 76, 62, 67, 81, 75, 69, 64, 58, 65, 51, 54, 50, 67, 65, 60, 87, 64, 67, 49, 75, 38, 74, 84, 68, 138, 56, 74, 57, 62, 59, 51, 72, 65, 72, 58, 68, 66, 89, 54, 67, 83, 57, 96, 49, 44, 55, 67, 61, 77, 73, 55, 65, 69, 102, 76, 48, 78, 78, 126, 58, 79, 76, 61, 72, 63, 64, 84, 61, 61, 75, 47, 54, 60, 64, 57, 65, 53, 44, 92, 57, 49, 73, 77, 56, 69, 142, 65, 54, 41, 46, 65, 55, 63, 85, 80, 62, 63, 85, 65, 47, 61, 74, 68, 47, 53, 58, 76, 91, 73, 59, 72, 59, 122, 53, 87, 67, 116, 77, 45, 67, 53, 61, 68, 55, 49, 55, 47, 60, 68, 74, 58, 75, 61, 81, 94, 48, 58, 101, 67, 61, 69, 75, 41, 61, 47, 79, 58, 103, 57, 55, 74, 99, 113, 63, 49, 93, 46, 86, 53, 76, 60, 60, 54, 72, 61, 85, 64, 56, 85, 73, 72, 74, 52, 60, 59, 51, 54, 52, 45, 74, 56, 61, 81, 70, 65, 49, 55, 66, 67, 62, 64, 93, 68, 56, 46, 59, 86, 65, 63, 98, 47, 56, 52, 46, 82, 69, 40, 70, 49, 49, 76, 80, 54, 45, 58, 60, 65, 75, 59, 77, 50, 64, 60, 56, 67, 63, 67, 78, 114, 70, 51, 67, 50, 54, 68, 50, 78, 57, 72, 65, 77, 68, 67, 48, 41, 53, 65, 68, 60, 82, 67, 67, 63, 69, 77, 58, 68, 74, 74, 63, 86, 60, 54, 44, 69, 61, 74, 100, 78, 44, 43, 60, 91, 72, 62, 76, 59, 50, 66, 64, 59, 65, 82, 56, 81, 61, 73, 71, 67, 61, 71, 73, 45, 99, 64, 81, 47, 68, 56, 79, 59, 69, 67, 68, 48, 67, 63, 69, 56, 97, 65, 63, 52, 71, 61, 55, 80, 63, 62, 57, 56, 50, 72, 84, 64, 57, 88, 73, 60, 74, 73, 46, 83, 71, 65, 66, 86, 51, 57, 68, 69, 81, 69, 96, 66, 64, 52, 56, 78, 69, 68, 75, 65, 65, 86, 61, 55, 64, 83, 58, 71, 62, 118, 66, 72, 48, 55, 51, 49, 53, 82, 46, 84, 80, 53, 65, 62, 77, 56, 47, 59, 61, 47, 91, 57, 54, 81, 72, 58, 68, 45, 84, 49, 64, 47, 72, 68, 57, 89, 90, 82, 49, 83, 46, 62, 35, 63, 67, 81, 105, 64, 59, 59, 61, 45, 49, 69, 87, 68, 62, 62, 75, 69, 71, 79, 62, 65, 48, 59, 77, 70, 56, 81, 62, 75, 51, 48, 55, 66, 75, 65, 66, 69, 78, 57, 49, 47, 60, 72, 52, 53, 62, 70, 79, 61, 69, 61, 64, 48, 66, 67, 79, 83, 64, 60, 70, 48, 56, 98, 61, 65, 71, 64, 74, 49, 69, 64, 56, 65, 65, 64, 84, 34, 63, 46, 86, 71, 61, 86, 83, 71, 61, 55, 62, 68, 61, 53, 56, 65, 73, 79, 72, 48, 64, 67, 83, 59, 74, 51, 46, 70, 78, 87, 66, 76, 47, 65, 82, 66, 48, 88, 67, 57, 56, 75, 175, 63, 56, 51, 68, 80, 73, 72, 49, 88, 76, 74, 85, 49, 63, 45, 52, 76, 67, 68, 67, 69, 103, 70, 68, 71, 58, 73, 63, 53, 71, 48, 57, 62, 107, 62, 117, 48, 48, 75, 57, 90, 90, 61, 52, 70, 64, 89, 61, 75, 82, 56, 72, 63, 61, 82, 40, 63, 65, 73, 63, 51, 60, 53, 47, 75, 64, 51, 50, 75, 55, 65, 64, 50, 45, 52, 103, 67, 63, 55, 54, 75, 54, 59, 66, 63, 76, 41, 81, 55, 49, 88, 69, 70, 45, 72, 61, 68, 86, 86, 60, 65, 66, 54, 51, 64, 54, 53, 62, 68, 59, 103, 55, 104, 50, 92, 63, 81, 55, 74, 74, 39, 58, 70, 81, 84, 57, 47, 78, 50, 57, 75, 64, 56, 59, 66, 66, 58, 61, 59, 83, 66, 76, 85, 65, 50, 79, 58, 57, 47, 48, 81, 67, 84, 68, 75, 54, 64, 56, 57, 66, 50, 79, 60, 75, 37, 57, 71, 65, 83, 63, 61, 47, 79, 53, 52, 43, 66, 45, 84, 51, 45, 62, 55, 78, 72, 60, 72, 90, 54, 66, 63, 77, 58, 59, 57, 75, 47, 45, 97, 89, 73, 42, 52, 63, 69, 68, 72, 45, 72, 97, 63, 75, 63, 43, 58, 61, 61, 64, 57, 65, 57, 67, 77, 87, 41, 71, 44, 71, 74, 72, 73, 57, 80, 86, 58, 57, 91, 82, 92, 74, 79, 75, 103, 74, 91, 37, 69, 73, 59, 64, 64, 60, 59, 66, 67, 106, 63, 97, 48, 76, 63, 65, 88, 52, 65, 78, 59, 62, 54, 51, 55, 56, 67, 57, 67, 53, 101, 36, 68, 87, 63, 54, 65, 48, 42, 86, 68, 62, 60, 82, 64, 57, 61, 51, 65, 72, 79, 68, 67, 90, 52, 67, 117, 66, 62, 61, 68, 66, 74, 64, 69, 108, 80, 55, 51, 82, 59, 72, 63, 48, 63, 49, 71, 55, 57, 75, 70, 55, 66, 91, 84, 67, 48, 75, 46, 52, 50, 52, 59, 78, 86, 38, 63, 93, 74, 54, 47, 60, 79, 71, 76, 119, 72, 71, 79, 65, 72, 61, 115, 70, 64, 81, 56, 65, 61, 71, 83, 72, 120, 73, 60, 44, 70, 42, 64, 67, 98, 86, 69, 82, 69, 43, 66, 56, 61, 61, 58, 35, 73, 64, 53, 61, 67, 57, 53, 56, 58, 53, 79, 56, 68, 70, 58, 86, 66, 71, 71, 42, 60, 73, 79, 46, 39, 88, 60, 57, 96, 58, 73, 50, 46, 68, 84, 66, 74, 77, 48, 85, 71, 67, 64, 79, 56, 56, 82, 53, 60, 59, 71, 74, 52, 66, 76, 72, 108, 73, 78, 65, 59, 87, 69, 86, 56, 56, 66, 66, 58, 76, 53, 71, 64, 40, 43, 54, 66, 77, 56, 49, 70, 57, 51, 112, 58, 59, 47, 56, 55, 52, 70, 61, 58, 64, 63, 51, 70, 49, 68, 58, 64, 73, 65, 74, 59, 49, 73, 50, 53, 117, 58, 69, 60, 94, 84, 57, 54, 78, 77, 78, 56, 65, 96, 78, 81, 87, 90, 45, 60, 64, 63, 72, 83, 58, 87, 75, 44, 69, 46, 64, 62, 74, 51, 55, 53, 88, 91, 41, 49, 151, 60, 45, 55, 63, 52, 60, 46, 71, 74, 73, 76, 53, 61, 103, 45, 64, 60, 59, 93, 64, 55, 52, 46, 50, 58, 70, 81, 82, 162, 76, 60, 61, 40, 60, 57, 104, 42, 69, 51, 50, 67, 44, 50, 61, 60, 77, 68, 70, 65, 57, 105, 73, 69, 115, 45, 73, 59, 58, 68, 80, 52, 62, 46, 80, 73, 60, 53, 62, 86, 59, 84, 67, 89, 70, 65, 64, 69, 57, 57, 138, 66, 59, 60, 65, 62, 72, 58, 81, 69, 66, 47, 61, 57, 64, 59, 78, 61, 52, 79, 57, 62, 65, 133, 56, 68, 36, 62, 64, 52, 60, 65, 98, 54, 46, 64, 58, 53, 57, 74, 48, 65, 34, 89, 59, 59, 106, 60, 64, 62, 61, 86, 58, 66, 54, 55, 36, 66, 63, 78, 69, 54, 97, 72, 64, 74, 89, 60, 75, 83, 84, 45, 63, 51, 74, 64, 50, 55, 40, 70, 58, 64, 47, 54, 66, 80, 80, 61, 66, 35, 65, 65, 76, 58, 43, 63, 57, 87, 70, 76, 73, 67, 59, 55, 62, 81, 77, 103, 37, 76, 62, 58, 74, 59, 57, 56, 76, 62, 68, 84, 46, 57, 101, 53, 74, 87, 105, 56, 59, 84, 47, 52, 73, 54, 51, 78, 66, 61, 71, 76, 61, 59, 82, 73, 65, 89, 57, 54, 87, 63, 77, 57, 62, 55, 63, 65, 54, 48, 56, 62, 47, 69, 61, 49, 104, 58, 67, 78, 72, 49, 56, 64, 53, 68, 55, 71, 72, 112, 71, 52, 63, 45, 58, 76, 86, 60, 76, 96, 74, 61, 41, 65, 53, 72, 54, 59, 55, 71, 67, 70, 74, 50, 89, 57, 72, 56, 77, 79, 72, 60, 91, 70, 64, 60, 50, 57, 67, 52, 67, 70, 62, 62, 46, 58, 41, 52, 67, 59, 72, 89, 66, 62, 65, 60, 64, 67, 70, 57, 71, 53, 73, 51, 69, 67, 61, 63, 89, 60, 41, 51, 64, 50, 54, 76, 50, 40, 77, 70, 95, 69, 81, 67, 51, 47, 67, 61, 68, 72, 55, 71, 55, 49, 67, 59, 70, 73, 92, 59, 55, 70, 65, 80, 49, 43, 51, 78, 68, 74, 51, 58, 82, 50, 102, 68, 65, 66, 65, 72, 48, 65, 57, 61, 67, 66, 58, 77, 59, 79, 79, 65, 63, 70, 72, 49, 70, 62, 46, 62, 82, 45, 58, 55, 58, 59, 59, 79, 64, 59, 57, 67, 58, 69, 64, 89, 78, 65, 47, 107, 47, 74, 72, 62, 70, 67, 49, 48, 68, 56, 77, 80, 66, 41, 80, 56, 77, 74, 51, 71, 62, 74, 52, 56, 47, 53, 72, 94, 51, 54, 72, 59, 63, 46, 87, 79, 88, 55, 49, 60, 70, 69, 47, 60, 54, 87, 57, 81, 65, 47, 63, 76, 93, 61, 55, 91, 62, 70, 72, 71, 52, 64, 62, 49, 83, 95, 64, 84, 53, 67, 68, 49, 54, 49, 70, 75, 54, 65, 49, 65, 64, 50, 70, 74, 80, 85, 52, 75, 50, 67, 87, 76, 58, 73, 70, 60, 57, 52, 66, 63, 57, 78, 98, 57, 66, 53, 109, 95, 61, 69, 48, 69, 63, 56, 77, 87, 77, 84, 50, 78, 73, 56, 74, 57, 56, 67, 57, 77, 63, 65, 63, 69, 61, 71, 85, 57, 72, 69, 96, 55, 62, 79, 60, 86, 75, 69, 53, 48, 79, 65, 63, 69, 64, 66, 79, 55, 89, 94, 60, 77, 62, 50, 75, 106, 58, 109, 71, 69, 58, 59, 72, 68, 52, 79, 66, 50, 58, 104, 54, 79, 46, 63, 70, 63, 75, 66, 52, 40, 80, 55, 52, 74, 53, 74, 60, 53, 65, 59, 77, 45, 73, 58, 72, 101, 64, 59, 58, 80, 66, 88, 58, 53, 46, 55, 56, 72, 72, 64, 44, 63, 53, 55, 75, 57, 75, 62, 75, 51, 66, 52, 70, 53, 56, 46, 56, 64, 51, 51, 52, 63, 52, 75, 73, 63, 65, 68, 55, 99, 80, 70, 66, 71, 68, 56, 81, 73, 69, 53, 64, 72, 65, 62, 43, 100, 62, 68, 61, 57, 72, 47, 62, 64, 60, 73, 49, 73, 63, 77, 84, 61, 111, 63, 56, 60, 48, 75, 75, 54, 66, 65, 52, 115, 71, 58, 55, 47, 50, 68, 74, 48, 71, 41, 86, 59, 50, 67, 71, 51, 65, 57, 67, 57, 51, 58, 67, 52, 47, 40, 68, 53, 52, 61, 60, 67, 56, 58, 61, 53, 73, 57, 58, 51, 60, 69, 83, 58, 50, 64, 89, 74, 44, 57, 65, 74, 58, 55, 60, 81, 74, 72, 59, 68, 75, 58, 45, 74, 64, 75, 98, 44, 70, 54, 56, 75, 66, 79, 53, 48, 65, 86, 48, 38, 96, 53, 71, 67, 80, 62, 76, 77, 58, 49, 64, 52, 61, 95, 102, 82, 57, 56, 72, 57, 58, 49, 133, 53, 60, 59, 49, 49, 54, 62, 72, 83, 68, 50, 56, 72, 98, 66, 69, 78, 62, 69, 70, 68, 99, 92, 60, 54, 58, 52, 57, 63, 47, 74, 76, 107, 51, 62, 53, 76, 62, 48, 47, 60, 56, 64, 82, 64, 54, 60, 44, 92, 67, 111, 73, 76, 50, 59, 71, 58, 56, 43, 75, 82, 58, 76, 60, 63, 76, 88, 62, 78, 56, 68, 58, 81, 57, 66, 69, 51, 70, 62, 62, 49, 71, 46, 59, 71, 51, 65, 64, 72, 50, 59, 57, 73, 60, 60, 61, 78, 53, 82, 73, 69, 75, 60, 57, 55, 59, 59, 64, 101, 53, 62, 66, 61, 72, 95, 74, 84, 64, 66, 48, 111, 67, 63, 48, 75, 73, 67, 50, 80, 73, 57, 62, 76, 71, 86, 55, 70, 63, 71, 54, 64, 65, 57, 65, 58, 62, 57, 51, 63, 63, 55, 81, 66, 65, 57, 65, 75, 60, 54, 54, 73, 69, 58, 71, 51, 60, 56, 63, 58, 61, 74, 66, 59, 67, 55, 58, 76, 81, 64, 91, 58, 109, 65, 66, 71, 107, 55, 76, 67, 67, 70, 61, 60, 63, 72, 60, 66, 56, 62, 71, 67, 61, 56, 88, 51, 80, 67, 45, 74, 61, 63, 59, 57, 56, 78, 59, 79, 67, 63, 62, 57, 67, 76, 65, 93, 48, 54, 66, 57, 68, 59, 70, 69, 73, 69, 58, 66, 57, 58, 146, 70, 81, 59, 70, 73, 64, 73, 94, 61, 55, 72, 68, 80, 70, 85, 50, 61, 68, 50, 71, 68, 57, 93, 74, 58, 34, 59, 62, 57, 53, 57, 68, 66, 63, 53, 51, 72, 82, 59, 59, 51, 65, 62, 59, 62, 49, 58, 95, 55, 63, 68, 72, 61, 52, 59, 64, 66, 83, 60, 77, 59, 72, 62, 61, 56, 64, 69, 64, 56, 61, 65, 72, 51, 62, 43, 57, 65, 95, 55, 61, 59, 66, 66, 51, 70, 62, 64, 51, 47, 69, 59, 70, 61, 77, 48, 57, 76, 133, 54, 66, 56, 62, 55, 82, 48, 59, 63, 98, 65, 61, 67, 71, 65, 108, 64, 80, 61, 68, 65, 61, 60, 64, 59, 79, 50, 87, 53, 64, 58, 76, 71, 72, 68, 65, 64, 67, 70, 49, 66, 52, 90, 76, 68, 74, 72, 45, 60, 56, 63, 49, 70, 61, 68, 62, 84, 59, 62, 71, 57, 64, 79, 73, 71, 63, 72, 59, 50, 95, 52, 68, 59, 55, 59, 81, 55, 57, 53, 158, 61, 55, 56, 71, 62, 76, 54, 57, 65, 51, 73, 72, 71, 68, 63, 76, 49, 66, 63, 55, 64, 76, 66, 66, 58, 62, 70, 51, 62, 58, 59, 58, 58, 59, 68, 68, 65, 87, 59, 71, 60, 52, 61, 65, 99, 56, 59, 61, 86, 75, 68, 72, 54, 65, 55, 70, 56, 74, 64, 59, 63, 69, 55, 66, 80, 65, 65, 62, 72, 67, 57, 69, 61, 56, 62, 64, 61, 65, 55, 63, 70, 62, 65, 85, 72, 58, 45, 73, 84, 61, 70, 85, 64, 51, 61, 65, 64, 77, 66, 81, 64, 70, 61, 57, 54, 67, 60, 64, 58, 57, 82, 57, 49, 77, 70, 67, 82, 51, 59, 67, 67, 87, 59, 70, 58, 63, 78, 60, 68, 50, 53, 75, 61, 74, 57, 78, 57, 61, 87, 75, 55, 74, 75, 51, 74, 65, 66, 69, 75, 112, 54, 77, 71, 62, 75, 69, 60, 53, 64, 59, 72, 65, 59, 56, 57, 70, 59, 58, 63, 51, 62, 62, 56, 55, 60, 54, 61, 71, 102, 85, 88, 56, 105, 46, 52, 60, 65, 71, 70, 63, 57, 86, 53, 60, 48, 74, 64, 78, 87, 56, 61, 52, 64, 58, 67, 61, 61, 55, 60, 54, 73, 63, 70, 56, 52, 66, 53, 56, 74, 53, 61, 62, 60, 42, 69, 66, 61, 51, 69, 65, 69, 69, 79, 78, 69, 70, 57, 63, 58, 78, 59, 77, 71, 70, 62, 63, 65, 57, 73, 52, 76, 84, 59, 69, 97, 59, 60, 67, 102, 64, 48, 63, 73, 54, 63, 54, 77, 72, 53, 56, 53, 62, 70, 59, 68, 61, 48, 45, 96, 62, 76, 65, 59, 57, 55, 60, 63, 73, 52, 59, 63, 63, 63, 64, 69, 54, 51, 60, 59, 61, 57, 56, 66, 59, 67, 56, 72, 71, 50, 56, 51, 57, 78, 58, 79, 66, 57, 86, 70, 58, 65, 85, 91, 55, 57, 52, 63, 53, 68, 76, 81, 75, 59, 74, 63, 55, 50, 63, 70, 60, 55, 67, 61, 60, 54, 58, 38, 68, 79, 94, 65, 63, 62, 57, 53, 66, 65, 58, 52, 68, 68, 76, 59, 64, 45, 72, 70, 67, 61, 78, 79, 58, 52, 60, 80, 74, 91, 60, 65, 43, 98, 58, 55, 69, 62, 51, 74, 67, 105, 66, 84, 65, 54, 60, 86, 66, 59, 65, 64, 44, 64, 64, 64, 62, 56, 69, 66, 80, 63, 64, 58, 41, 66, 57, 51, 71, 51, 93, 45, 72, 74, 67, 69, 54, 71, 70, 62, 61, 52, 54, 82, 71, 55, 65, 59, 74, 52, 56, 49, 98, 60, 54, 66, 68, 68, 93, 60, 60, 68, 57, 51, 77, 61, 66, 49, 51, 65, 61, 64, 73, 51, 55, 66, 54, 84, 52, 79, 54, 72, 57, 85, 61, 68, 56, 56, 77, 52, 46, 56, 85, 53, 62, 44, 48, 51, 73, 58, 66, 60, 58, 55, 84, 67, 70, 61, 64, 65, 66, 62, 84, 65, 60, 117, 61, 74, 77, 48, 54, 67, 83, 74, 53, 83, 61, 66, 64, 53, 59, 57, 76, 63, 69, 81, 44, 68, 69, 52, 48, 61, 59, 57, 76, 77, 53, 59, 50, 74, 63, 57, 73, 71, 90, 57, 56, 58, 46, 58, 66, 60, 60, 67, 63, 58, 65, 51, 74, 68, 67, 109, 85, 88, 56, 62, 64, 57, 64, 60, 52, 51, 55, 127, 67, 68, 70, 68, 57, 87, 66, 62, 72, 61, 56, 99, 57, 106, 65, 70, 67, 91, 47, 57, 61, 61, 88, 96, 99, 80, 60, 70, 83, 62, 78, 67, 62, 99, 97, 67, 56, 58, 48, 67, 66, 52, 68, 48, 78, 58, 68, 66, 74, 71, 70, 57, 56, 85, 63, 58, 75, 61, 68, 54, 45, 83, 60, 78, 66, 52, 72, 48, 90, 94, 66, 59, 67, 58, 91, 54, 52, 70, 82, 74, 99, 73, 56, 62, 56, 65, 80, 66, 74, 55, 64, 53, 64, 68, 50, 56, 61, 63, 107, 59, 73, 73, 64, 68, 62, 74, 74, 68, 62, 71, 61, 96, 81, 56, 42, 82, 54, 57, 52, 75, 48, 73, 53, 56, 98, 83, 69, 54, 69, 55, 61, 61, 46, 90, 61, 67, 87, 51, 65, 62, 60, 62, 57, 58, 65, 61, 53, 72, 57, 62, 64, 62, 56, 78, 72, 73, 89, 59, 77, 61, 61, 80, 67, 69, 69, 54, 56, 61, 91, 53, 64, 128, 80, 70, 64, 63, 48, 67, 52, 94, 84, 67, 59, 69, 60, 65, 54, 68, 92, 72, 53, 99, 49, 71, 66, 141, 71, 58, 62, 50, 58, 58, 65, 69, 68, 60, 87, 63, 57, 67, 62, 75, 66, 70, 99, 78, 68, 56, 58, 52, 74, 62, 67, 62, 70, 59, 68, 74, 74, 54, 66, 50, 76, 67, 130, 74, 59, 121, 54, 79, 54, 66, 72, 56, 64, 86, 68, 66, 55, 52, 82, 64, 115, 60, 55, 43, 53, 52, 52, 111, 77, 58, 58, 67, 59, 78, 56, 87, 75, 49, 59, 53, 60, 65, 67, 54, 53, 72, 58, 64, 73, 58, 74, 54, 42, 53, 66, 62, 60, 58, 148, 43, 60, 62, 81, 64, 62, 50, 65, 58, 61, 75, 61, 61, 66, 71, 63, 50, 90, 62, 73, 87, 72, 70, 65, 61, 71, 63, 63, 73, 66, 61, 100, 68, 44, 58, 63, 76, 77, 50, 59, 65, 68, 77, 83, 62, 52, 56, 99, 49, 79, 85, 96, 54, 59, 62, 66, 52, 68, 54, 66, 63, 73, 65, 79, 67, 66, 64, 60, 97, 56, 47, 62, 68, 61, 82, 71, 64, 55, 55, 59, 82, 51, 65, 74, 61, 78, 51, 57, 58, 75, 54, 73, 62, 60, 54, 77, 69, 81, 61, 58, 78, 66, 67, 63, 65, 58, 57, 59, 62, 70, 42, 67, 57, 80, 59, 74, 86, 85, 68, 66, 50, 52, 55, 78, 71, 76, 51, 63, 62, 68, 57, 68, 63, 54, 73, 72, 38, 58, 47, 67, 71, 58, 71, 58, 71, 67, 61, 60, 68, 68, 53, 99, 57, 68, 74, 63, 67, 68, 50, 64, 65, 63, 61, 63, 70, 92, 68, 67, 52, 104, 68, 66, 60, 62, 70, 49, 61, 71, 62, 59, 70, 58, 65, 59, 52, 58, 49, 57, 79, 61, 60, 47, 58, 59, 58, 75, 55, 71, 61, 61, 62, 50, 60, 66, 73, 47, 89, 60, 54, 55, 64, 63, 64, 92, 67, 54, 73, 62, 55, 54, 64, 69, 66, 55, 68, 67, 51, 55, 50, 56, 36, 67, 73, 64, 71, 48, 71, 75, 51, 65, 61, 69, 72, 64, 57, 91, 64, 80, 81, 54, 56, 57, 67, 115, 59, 56, 65, 61, 58, 66, 84, 108, 41, 73, 72, 64, 54, 72, 58, 68, 80, 78, 54, 65, 61, 80, 67, 69, 140, 65, 62, 69, 67, 78, 75, 62, 75, 105, 45, 50, 59, 70, 63, 73, 62, 68, 56, 51, 82, 59, 80, 79, 76, 55, 54, 61, 52, 75, 82, 87, 76, 46, 72, 71, 77, 67, 112, 50, 58, 69, 74, 66, 78, 81, 47, 56, 68, 52, 68, 56, 61, 58, 75, 67, 58, 59, 51, 61, 67, 67, 68, 56, 65, 66, 59, 57, 69, 78, 45, 58, 57, 49, 59, 61, 51, 79, 97, 61, 105, 64, 101, 62, 57, 48, 71, 59, 79, 66, 86, 49, 67, 67, 78, 75, 60, 59, 71, 76, 65, 82, 67, 49, 71, 68, 83, 57, 40, 58, 84, 51, 54, 53, 66, 55, 86, 63, 68, 50, 71, 68, 64, 51, 79, 64, 45, 57, 80, 71, 54, 49, 69, 116, 50, 54, 65, 59, 59, 72, 59, 46, 76, 79, 49, 58, 59, 58, 64, 61, 74, 89, 83, 57, 57, 49, 62, 65, 53, 64, 74, 61, 134, 73, 67, 49, 60, 56, 51, 58, 69, 51, 61, 76, 74, 69, 53, 65, 63, 71, 50, 79, 65, 71, 64, 52, 84, 52, 70, 71, 64, 62, 76, 77, 59, 69, 69, 98, 43, 83, 98, 66, 78, 66, 80, 71, 50, 62, 54, 35, 77, 61, 64, 61, 64, 60, 58, 51, 75, 60, 59, 65, 61, 74, 64, 55, 71, 86, 65, 72, 54, 60, 69, 78, 63, 60, 62, 49, 54, 97, 68, 104, 70, 46, 51, 76, 87, 66, 62, 57, 122, 54, 62, 71, 60, 48, 63, 63, 63, 78, 69, 58, 75, 42, 62, 65, 79, 79, 72, 78, 55, 69, 60, 49, 57, 56, 54, 64, 61, 59, 59, 81, 46, 51, 58, 57, 49, 85, 63, 67, 66, 54, 58, 59, 67, 61, 55, 63, 60, 82, 65, 50, 71, 61, 73, 57, 75, 70, 59, 61, 64, 77, 60, 71, 68, 82, 57, 67, 83, 53, 62, 72, 70, 68, 75, 69, 64, 77, 79, 54, 74, 112, 64, 50, 61, 68, 58, 54, 51, 55, 56, 57, 57, 94, 57, 61, 54, 53, 85, 64, 71, 71, 64, 53, 77, 61, 48, 49, 54, 57, 78, 55, 67, 74, 60, 72, 64, 89, 81, 111, 65, 63, 77, 68, 83, 61, 52, 49, 52, 53, 73, 63, 69, 74, 56, 57, 95, 56, 52, 62, 68, 66, 55, 63, 49, 69, 54, 65, 59, 58, 76, 48, 77, 80, 65, 53, 81, 63, 58, 61, 56, 56, 55, 57, 49, 82, 58, 62, 60, 65, 64, 56, 58, 84, 63, 55, 51, 68, 62, 66, 60, 52, 77, 78, 59, 60, 57, 66, 64, 62, 64, 64, 57, 80, 107, 58, 78, 67, 54, 68, 70, 68, 75, 58, 63, 63, 74, 82, 73, 56, 54, 78, 58, 78, 82, 56, 73, 68, 63, 60, 74, 88, 71, 69, 53, 59, 62, 59, 54, 73, 57, 86, 54, 69, 68, 87, 46, 71, 55, 115, 72, 60, 55, 105, 59, 52, 57, 83, 74, 58, 70, 53, 52, 65, 58, 75, 70, 64, 67, 55, 53, 67, 80, 56, 87, 60, 69, 45, 62, 60, 71, 61, 69, 58, 52, 52, 45, 48, 58, 72, 66, 59, 76, 17, 62, 69, 99, 60, 72, 96, 63, 63, 62, 74, 65, 45, 63, 66, 99, 54, 64, 62, 43, 49, 66, 90, 61, 53, 63, 60, 79, 60, 55, 68, 66, 47, 60, 60, 74, 123, 61, 57, 46, 46, 39, 86, 75, 76, 63, 67, 60, 66, 65, 47, 62, 68, 61, 56, 55, 71, 71, 54, 66, 77, 65, 57, 73, 74, 59, 58, 50, 66, 106, 91, 93, 60, 56, 64, 99, 69, 114, 69, 56, 68, 67, 61, 78, 76, 65, 61, 74, 88, 56, 78, 67, 61, 63, 62, 55, 61, 65, 58, 66, 81, 80, 57, 68, 70, 58, 75, 65, 64, 57, 69, 57, 61, 64, 55, 49, 49, 69, 54, 51, 68, 51, 61, 90, 67, 73, 82, 62, 60, 63, 75, 96, 61, 65, 61, 69, 67, 69, 85, 65, 67, 72, 64, 63, 59, 67, 51, 68, 54, 63, 81, 71, 58, 55, 61, 87, 77, 57, 66, 55, 56, 67, 60, 81, 53, 63, 71, 52, 68, 50, 95, 69, 79, 63, 74, 79, 59, 62, 87, 64, 53, 58, 63, 80, 51, 61, 61, 76, 62, 82, 85, 55, 83, 80, 56, 69, 75, 51, 80, 67, 66, 77, 84, 55, 65, 84, 71, 65, 76, 45, 70, 61, 64, 136, 68, 75, 61, 81, 54, 73, 68, 74, 62, 64, 74, 64, 53, 53, 50, 70, 50, 69, 67, 74, 72, 69, 57, 62, 84, 52, 73, 84, 77, 58, 56, 55, 118, 93, 54, 63, 66, 66, 80, 92, 56, 61, 57, 80, 50, 68, 63, 73, 55, 77, 85, 63, 58, 69, 71, 64, 58, 62, 80, 71, 72, 60, 67, 74, 58, 59, 55, 62, 54, 48, 57, 64, 63, 56, 65, 59, 60, 67, 57, 62, 61, 69, 57, 62, 58, 57, 69, 54, 69, 69, 56, 66, 55, 75, 76, 123, 83, 60, 47, 80, 58, 53, 102, 55, 57, 63, 64, 57, 64, 144, 85, 61, 59, 56, 66, 64, 59, 63, 51, 61, 61, 54, 51, 78, 70, 42, 48, 54, 66, 72, 59, 54, 62, 79, 47, 68, 75, 104, 57, 70, 53, 72, 70, 78, 66, 61, 60, 60, 54, 66, 57, 58, 70, 68, 67, 67, 54, 46, 72, 61, 71, 64, 75, 72, 64, 59, 59, 78, 62, 68, 63, 71, 56, 55, 54, 51, 73, 63, 76, 82, 53, 60, 64, 62, 70, 60, 80, 63, 65, 53, 71, 57, 58, 72, 51, 56, 59, 74, 71, 72, 53, 66, 70, 59, 68, 94, 65, 66, 64, 79, 56, 74, 68, 58, 60, 68, 72, 54, 60, 68, 60, 66, 73, 61, 59, 108, 66, 68, 82, 68, 65, 64, 67, 55, 80, 76, 74, 56, 74, 64, 65, 62, 61, 53, 58, 71, 53, 53, 67, 79, 65, 57, 64, 62, 68, 50, 59, 52, 62, 70, 57, 72, 56, 63, 60, 72, 48, 64, 77, 66, 85, 68, 60, 84, 69, 66, 72, 56, 59, 66, 79, 61, 68, 83, 51, 79, 80, 62, 64, 65, 56, 61, 57, 58, 70, 79, 65, 56, 63, 65, 57, 67, 65, 65, 54, 60, 51, 66, 80, 84, 66, 70, 62, 70, 61, 61, 44, 68, 57, 70, 55, 71, 73, 56, 62, 64, 54, 59, 73, 57, 54, 61, 67, 61, 54, 73, 62, 77, 70, 103, 62, 98, 105, 90, 55, 67, 62, 66, 66, 59, 59, 69, 71, 70, 61, 50, 64, 59, 75, 65, 51, 63, 66, 67, 74, 66, 67, 54, 70, 65, 71, 60, 54, 60, 53, 59, 60, 66, 52, 78, 54, 62, 59, 65, 81, 52, 75, 62, 64, 58, 67, 70, 58, 59, 62, 61, 55, 66, 67, 55, 59, 74, 64, 62, 67, 86, 59, 70, 66, 60, 66, 69, 68, 70, 62, 65, 58, 75, 94, 58, 112, 56, 62, 66, 74, 90, 61, 95, 77, 58, 60, 66, 59, 53, 73, 80, 61, 61, 51, 64, 48, 65, 57, 63, 62, 61, 91, 62, 64, 79, 87, 53, 54, 65, 64, 61, 64, 54, 73, 49, 72, 54, 56, 72, 59, 71, 68, 54, 79, 58, 65, 56, 80, 57, 77, 68, 64, 77, 70, 55, 69, 61, 73, 70, 54, 70, 56, 76, 69, 61, 54, 66, 71, 55, 59, 69, 58, 59, 79, 58, 77, 54, 65, 56, 81, 53, 55, 68, 55, 64, 91, 70, 58, 67, 84, 61, 67, 56, 72, 53, 71, 67, 55, 99, 58, 61, 62, 63, 62, 49, 61, 66, 63, 70, 60, 71, 65, 63, 62, 58, 68, 82, 60, 48, 59, 85, 59, 65, 59, 65, 64, 56, 85, 65, 67, 58, 63, 64, 58, 67, 72, 70, 61, 84, 81, 79, 57, 73, 58, 50, 80, 75, 49, 58, 73, 72, 45, 86, 63, 60, 52, 62, 62, 62, 57, 66, 63, 55, 57, 74, 90, 68, 71, 72, 67, 69, 62, 59, 64, 55, 51, 81, 60, 73, 65, 60, 53, 79, 75, 68, 50, 73, 53, 76, 58, 55, 81, 60, 66, 64, 73, 74, 76, 58, 60, 58, 71, 55, 62, 62, 68, 64, 68, 58, 59, 63, 62, 66, 63, 60, 67, 82, 54, 69, 75, 63, 66, 60, 67, 66, 77, 68, 54, 73, 91, 58, 66, 66, 78, 74, 47, 69, 74, 85, 69, 65, 78, 64, 61, 74, 62, 54, 69, 55, 67, 83, 59, 66, 71, 66, 102, 55, 115, 65, 53, 62, 53, 68, 73, 65, 77, 60, 63, 56, 67, 66, 58, 71, 56, 56, 51, 60, 53, 72, 67, 65, 69, 53, 64, 52, 67, 77, 52, 99, 66, 58, 79, 69, 73, 63, 62, 64, 56, 57, 67, 59, 72, 62, 86, 64, 69, 63, 55, 74, 56, 66, 65, 56, 55, 59, 62, 71, 108, 66, 62, 74, 61, 63, 68, 94, 56, 66, 83, 87, 60, 63, 67, 59, 45, 61, 59, 74, 67, 70, 75, 61, 55, 70, 80, 48, 73, 62, 67, 60, 48, 84, 79, 56, 65, 72, 56, 55, 73, 54, 58, 56, 58, 72, 67, 55, 50, 65, 55, 76, 57, 64, 73, 70, 58, 60, 79, 89, 64, 70, 71, 67, 63, 60, 64, 54, 71, 66, 74, 54, 66, 85, 89, 66, 63, 73, 69, 59, 78, 70, 65, 55, 63, 58, 74, 87, 51, 65, 66, 53, 61, 56, 77, 55, 72, 68, 89, 63, 49, 70, 64, 56, 63, 63, 94, 41, 55, 56, 63, 49, 63, 81, 69, 74, 59, 59, 73, 71, 64, 64, 63, 76, 62, 77, 71, 78, 64, 75, 53, 46, 67, 62, 69, 65, 52, 56, 64, 56, 61, 90, 65, 63, 70, 60, 74, 90, 69, 63, 69, 61, 79, 72, 68, 79, 77, 59, 61, 81, 48, 85, 62, 61, 52, 63, 56, 63, 59, 83, 68, 73, 53, 65, 68, 85, 69, 68, 61, 92, 64, 66, 62, 56, 51, 49, 64, 66, 50, 74, 57, 63, 59, 57, 76, 51, 52, 61, 66, 80, 68, 75, 52, 100, 52, 52, 75, 62, 66, 67, 62, 54, 74, 57, 61, 52, 78, 64, 60, 95, 61, 69, 56, 73, 54, 66, 65, 68, 67, 72, 51, 133, 69, 59, 105, 74, 60, 55, 61, 68, 70, 50, 61, 65, 71, 60, 90, 77, 67, 57, 53, 61, 69, 70, 65, 65, 55, 58, 75, 73, 83, 94, 68, 65, 73, 73, 58, 57, 41, 70, 74, 65, 73, 127, 53, 45, 67, 63, 79, 61, 63, 96, 103, 85, 86, 71, 60, 80, 55, 51, 59, 64, 62, 69, 58, 51, 48, 64, 80, 67, 82, 47, 61, 60, 56, 96, 59, 70, 64, 46, 66, 57, 46, 49, 96, 49, 70, 72, 68, 86, 64, 53, 44, 59, 51, 100, 65, 66, 47, 82, 42, 79, 51, 83, 64, 64, 73, 68, 93, 69, 54, 85, 71, 55, 59, 74, 51, 50, 60, 63, 54, 58, 78, 60, 54, 60, 92, 72, 95, 91, 71, 67, 52, 66, 74, 74, 64, 37, 58, 82, 73, 55, 81, 76, 73, 70, 83, 83, 59, 67, 50, 60, 73, 69, 92, 78, 55, 58, 66, 74, 60, 76, 96, 56, 72, 63, 56, 67, 76, 71, 48, 75, 55, 56, 59, 61, 66, 63, 50, 67, 50, 76, 69, 64, 58, 106, 65, 65, 86, 66, 82, 69, 63, 60, 80, 60, 73, 65, 54, 49, 70, 62, 76, 61, 57, 76, 68, 75, 69, 50, 55, 49, 68, 60, 78, 74, 44, 51, 71, 93, 89, 51, 59, 75, 70, 71, 67, 73, 51, 47, 62, 93, 59, 82, 76, 75, 72, 65, 64, 51, 59, 67, 71, 51, 54, 85, 60, 61, 65, 57, 42, 84, 47, 56, 48, 61, 64, 73, 56, 61, 68, 61, 48, 54, 59, 50, 71, 61, 51, 53, 75, 50, 72, 68, 73, 50, 62, 50, 53, 68, 64, 48, 64, 81, 58, 69, 76, 59, 72, 69, 59, 57, 46, 66, 56, 82, 79, 58, 56, 47, 86, 59, 79, 76, 51, 57, 69, 81, 59, 101, 66, 52, 58, 138, 52, 61, 86, 65, 75, 78, 64, 48, 73, 65, 64, 82, 53, 67, 49, 61, 81, 50, 88, 54, 57, 61, 72, 68, 66, 66, 61, 62, 75, 55, 73, 59, 58, 77, 62, 52, 48, 58, 72, 59, 60, 49, 62, 61, 75, 51, 69, 63, 74, 60, 54, 67, 65, 56, 91, 62, 79, 70, 127, 65, 63, 72, 71, 66, 82, 75, 93, 52, 46, 66, 62, 62, 70, 68, 52, 75, 65, 62, 74, 82, 58, 63, 69, 60, 71, 64, 87, 39, 60, 66, 51, 66, 78, 58, 63, 53, 54, 63, 53, 60, 74, 72, 55, 76, 61, 76, 51, 75, 57, 55, 63, 60, 59, 63, 57, 102, 75, 78, 82, 56, 90, 70, 83, 70, 65, 46, 58, 73, 61, 84, 43, 60, 68, 61, 54, 82, 84, 59, 67, 57, 66, 55, 57, 40, 62, 61, 72, 52, 76, 59, 72, 66, 51, 68, 64, 71, 86, 60, 90, 58, 59, 63, 65, 60, 77, 57, 75, 48, 70, 70, 65, 67, 52, 82, 64, 84, 72, 61, 62, 61, 65, 88, 62, 60, 63, 59, 61, 60, 74, 70, 88, 47, 72, 57, 86, 82, 73, 53, 37, 50, 55, 78, 59, 53, 68, 64, 58, 87, 90, 81, 78, 61, 59, 63, 56, 65, 63, 62, 65, 67, 67, 85, 80, 56, 111, 65, 59, 70, 83, 49, 65, 67, 58, 62, 61, 77, 68, 54, 66, 72, 43, 67, 91, 53, 68, 68, 58, 59, 69, 56, 63, 53, 70, 61, 61, 80, 59, 76, 56, 65, 74, 59, 63, 61, 67, 52, 90, 98, 68, 65, 53, 75, 79, 59, 60, 72, 62, 47, 78, 61, 61, 60, 76, 66, 59, 58, 53, 57, 57, 82, 61, 55, 63, 82, 66, 63, 57, 73, 69, 116, 114, 55, 69, 87, 67, 72, 61, 57, 65, 57, 62, 70, 59, 69, 83, 34, 49, 76, 64, 63, 55, 61, 62, 77, 67, 54, 58, 59, 88, 68, 65, 35, 71, 71, 89, 57, 73, 81, 67, 59, 52, 67, 76, 57, 53, 61, 53, 69, 69, 64, 70, 65, 61, 60, 73, 70, 54, 74, 95, 68, 55, 55, 74, 84, 64, 49, 61, 55, 62, 79, 59, 63, 91, 93, 61, 56, 70, 68, 38, 79, 55, 46, 66, 54, 65, 48, 74, 73, 88, 61, 65, 68, 87, 94, 96, 50, 76, 59, 48, 85, 71, 68, 58, 82, 78, 63, 58, 82, 55, 64, 74, 78, 55, 62, 79, 79, 59, 58, 54, 66, 50, 57, 76, 69, 49, 57, 86, 64, 74, 67, 64, 55, 50, 75, 67, 57, 53, 74, 55, 81, 47, 57, 65, 66, 72, 65, 61, 81, 56, 66, 68, 78, 66, 58, 113, 68, 41, 73, 56, 74, 65, 57, 71, 52, 57, 49, 72, 73, 68, 64, 76, 89, 68, 80, 63, 58, 62, 52, 82, 75, 96, 72, 53, 57, 53, 59, 72, 65, 64, 55, 48, 78, 63, 51, 57, 54, 84, 85, 58, 54, 60, 81, 43, 90, 65, 61, 69, 57, 50, 56, 43, 57, 72, 89, 96, 52, 55, 75, 87, 69, 56, 60, 66, 65, 68, 69, 57, 56, 61, 75, 60, 76, 107, 59, 50, 117, 48, 64, 59, 67, 59, 65, 58, 82, 78, 64, 66, 57, 66, 99, 75, 53, 44, 53, 48, 62, 44, 52, 52, 56, 153, 60, 59, 57, 43, 65, 55, 69, 79, 64, 54, 50, 74, 67, 57, 80, 61, 43, 65, 66, 62, 74, 89, 63, 65, 62, 58, 62, 73, 70, 66, 85, 49, 56, 51, 60, 165, 65, 74, 79, 99, 58, 65, 59, 49, 80, 94, 128, 83, 62, 86, 65, 53, 58, 66, 81, 46, 50, 75, 75, 59, 79, 73, 73, 62, 60, 67, 67, 65, 63, 44, 66, 79, 62, 46, 37, 66, 61, 65, 79, 66, 40, 71, 66, 62, 66, 67, 76, 72, 46, 49, 65, 102, 48, 70, 47, 69, 68, 49, 79, 71, 53, 86, 59, 74, 96, 58, 71, 54, 72, 73, 94, 52, 65, 45, 62, 59, 66, 57, 85, 67, 90, 57, 73, 85, 62, 48, 72, 56, 59, 58, 81, 50, 68, 58, 60, 56, 58, 63, 57, 64, 63, 57, 102, 57, 70, 66, 73, 61, 58, 68, 68, 45, 60, 71, 49, 57, 63, 68, 64, 68, 60, 62, 61, 53, 44, 59, 64, 70, 66, 76, 62, 65, 61, 60, 53, 67, 56, 54, 72, 94, 90, 59, 79, 58, 61, 62, 67, 74, 63, 61, 64, 67, 57, 68, 55, 49, 43, 56, 82, 79, 76, 91, 134, 59, 46, 72, 66, 77, 75, 78, 41, 62, 61, 65, 66, 75, 72, 62, 65, 57, 57, 57, 52, 51, 52, 57, 37, 72, 67, 57, 62, 58, 70, 103, 73, 68, 76, 71, 62, 69, 74, 61, 58, 67, 77, 71, 54, 56, 73, 63, 70, 72, 76, 62, 57, 61, 57, 87, 60, 54, 66, 84, 75, 58, 56, 48, 74, 57, 54, 71, 49, 78, 72, 55, 59, 65, 67, 68, 62, 47, 80, 76, 65, 108, 57, 56, 53, 65, 87, 43, 68, 72, 65, 57, 70, 48, 81, 68, 59, 66, 66, 63, 65, 58, 53, 66, 61, 57, 97, 52, 77, 61, 64, 63, 61, 51, 56, 75, 60, 61, 57, 62, 68, 65, 65, 59, 85, 52, 56, 70, 62, 66, 59, 69, 85, 65, 100, 71, 50, 135, 69, 64, 56, 63, 77, 71, 71, 54, 69, 99, 93, 78, 59, 68, 55, 70, 68, 58, 55, 82, 68, 79, 61, 58, 57, 87, 50, 61, 54, 50, 70, 59, 72, 51, 39, 66, 65, 58, 80, 60, 78, 67, 76, 70, 76, 63, 103, 51, 67, 69, 63, 57, 96, 67, 56, 74, 64, 45, 74, 66, 59, 65, 54, 81, 55, 66, 56, 64, 70, 62, 66, 123, 56, 71, 65, 49, 54, 48, 58, 57, 65, 64, 60, 54, 55, 63, 63, 57, 83, 60, 60, 64, 51, 40, 54, 57, 85, 53, 59, 66, 65, 64, 61, 60, 57, 76, 62, 52, 86, 72, 58, 62, 89, 77, 58, 65, 99, 59, 48, 69, 62, 62, 68, 64, 53, 61, 74, 57, 58, 69, 52, 78, 84, 86, 74, 66, 68, 61, 61, 52, 55, 54, 62, 57, 56, 55, 62, 58, 60, 63, 71, 78, 69, 63, 67, 60, 79, 68, 56, 79, 87, 68, 80, 54, 49, 62, 69, 58, 113, 71, 63, 137, 51, 64, 62, 74, 70, 58, 79, 60, 59, 62, 61, 69, 90, 80, 55, 57, 62, 62, 76, 81, 50, 67, 84, 58, 77, 62, 54, 58, 60, 137, 70, 70, 70, 58, 73, 71, 62, 74, 68, 72, 70, 91, 61, 54, 60, 67, 74, 52, 66, 66, 63, 122, 70, 66, 63, 57, 62, 75, 56, 45, 66, 60, 57, 52, 70, 61, 85, 62, 65, 96, 74, 63, 57, 46, 78, 74, 73, 60, 50, 96, 60, 55, 65, 66, 67, 54, 107, 79, 69, 84, 68, 74, 75, 69, 74, 67, 54, 58, 70, 66, 64, 60, 61, 70, 66, 52, 80, 59, 57, 64, 58, 74, 68, 66, 74, 73, 67, 92, 67, 53, 98, 78, 67, 85, 55, 48, 73, 58, 57, 69, 61, 63, 48, 76, 66, 87, 76, 47, 96, 67, 63, 61, 63, 57, 59, 53, 47, 66, 66, 59, 62, 72, 58, 53, 46, 45, 66, 70, 84, 73, 79, 50, 66, 72, 80, 83, 69, 80, 70, 59, 53, 68, 66, 68, 88, 63, 82, 62, 83, 70, 61, 65, 72, 69, 64, 92, 56, 67, 62, 63, 60, 69, 59, 71, 72, 78, 67, 64, 59, 51, 75, 60, 61, 57, 99, 70, 66, 78, 78, 58, 75, 79, 72, 60, 73, 67, 57, 73, 53, 75, 60, 70, 90, 92, 79, 65, 64, 71, 62, 72, 59, 65, 94, 67, 78, 68, 62, 53, 87, 66, 63, 84, 69, 72, 99, 58, 55, 79, 72, 81, 109, 55, 73, 66, 61, 72, 75, 61, 63, 70, 66, 80, 75, 61, 47, 58, 65, 59, 67, 50, 64, 61, 56, 63, 86, 69, 66, 61, 59, 72, 73, 60, 63, 64, 53, 90, 49, 47, 88, 59, 91, 61, 74, 63, 57, 76, 55, 117, 59, 56, 61, 66, 61, 52, 53, 72, 72, 77, 58, 53, 73, 64, 69, 59, 59, 61, 60, 109, 72, 91, 49, 54, 64, 62, 65, 53, 44, 46, 57, 55, 73, 68, 57, 66, 49, 61, 67, 73, 65, 60, 43, 65, 61, 67, 60, 61, 58, 57, 60, 53, 81, 56, 57, 71, 68, 51, 69, 86, 79, 58, 62, 61, 80, 45, 62, 74, 64, 66, 56, 84, 58, 47, 86, 66, 100, 64, 79, 65, 80, 56, 54, 106, 53, 65, 63, 54, 76, 66, 69, 74, 62, 67, 65, 75, 68, 58, 57, 66, 63, 65, 71, 53, 52, 72, 95, 64, 61, 66, 71, 90, 70, 74, 62, 73, 58, 63, 78, 51, 58, 57, 79, 66, 57, 73, 72, 64, 65, 63, 50, 53, 82, 96, 76, 63, 47, 60, 56, 68, 122, 122, 54, 84, 57, 57, 62, 71, 71, 53, 69, 55, 68, 58, 55, 62, 71, 51, 53, 62, 64, 66, 88, 81, 70, 54, 77, 52, 51, 70, 67, 67, 111, 71, 59, 71, 90, 68, 70, 62, 64, 75, 76, 58, 79, 48, 61, 75, 78, 65, 57, 69, 83, 76, 74, 130, 66, 66, 59, 54, 71, 58, 66, 72, 72, 71, 45, 89, 44, 111, 62, 63, 72, 54, 93, 70, 52, 68, 46, 88, 60, 88, 66, 83, 58, 58, 58, 63, 63, 58, 60, 61, 57, 63, 48, 52, 46, 57, 74, 55, 74, 67, 62, 62, 64, 61, 71, 39, 62, 85, 57, 64, 58, 76, 73, 62, 63, 58, 57, 93, 60, 62, 55, 70, 73, 71, 66, 52, 63, 79, 63, 67, 53, 61, 56, 64, 43, 66, 70, 63, 71, 72, 61, 59, 59, 65, 68, 69, 62, 76, 71, 46, 60, 54, 65, 42, 59, 82, 63, 60, 53, 65, 102, 59, 68, 53, 53, 59, 77, 64, 102, 55, 72, 81, 52, 69, 58, 73, 72, 69, 60, 75, 76, 62, 51, 63, 51, 59, 96, 58, 63, 48, 64, 54, 63, 70, 95, 58, 73, 71, 51, 44, 60, 65, 61, 68, 55, 63, 66, 61, 51, 71, 63, 48, 55, 47, 143, 75, 100, 86, 59, 40, 66, 85, 48, 60, 63, 61, 58, 63, 68, 72, 73, 60, 73, 63, 70, 68, 75, 68, 67, 60, 71, 64, 65, 64, 61, 77, 57, 70, 57, 75, 62, 61, 50, 113, 73, 90, 70, 67, 71, 88, 47, 60, 73, 60, 121, 70, 96, 109, 68, 41, 68, 72, 50, 76, 67, 65, 64, 57, 60, 70, 59, 49, 63, 45, 55, 64, 50, 80, 62, 75, 77, 59, 93, 63, 61, 56, 55, 70, 60, 58, 75, 66, 65, 75, 68, 52, 60, 55, 53, 61, 72, 60, 58, 54, 56, 82, 60, 72, 49, 63, 67, 67, 52, 52, 57, 72, 65, 62, 69, 69, 68, 59, 85, 88, 69, 74, 74, 66, 53, 68, 73, 63, 59, 71, 80, 70, 58, 65, 51, 58, 39, 64, 69, 64, 63, 73, 59, 58, 52, 82, 60, 61, 61, 68, 68, 59, 55, 58, 46, 49, 90, 75, 58, 66, 66, 69, 62, 57, 51, 62, 69, 66, 66, 80, 49, 78, 54, 64, 86, 64, 72, 78, 63, 49, 82, 47, 54, 48, 49, 63, 74, 70, 66, 67, 92, 53, 64, 56, 76, 83, 84, 78, 57, 67, 66, 53, 59, 56, 64, 59, 86, 57, 47, 65, 61, 63, 78, 47, 66, 90, 71, 85, 66, 48, 62, 87, 58, 63, 53, 64, 64, 59, 80, 48, 52, 72, 78, 59, 58, 56, 50, 51, 62, 66, 90, 65, 54, 72, 74, 54, 67, 54, 67, 75, 56, 65, 65, 74, 54, 64, 69, 101, 65, 43, 80, 61, 58, 61, 64, 44, 61, 78, 77, 59, 84, 74, 51, 59, 55, 71, 69, 71, 65, 79, 65, 68, 49, 77, 59, 72, 63, 75, 58, 65, 57, 66, 63, 82, 88, 66, 53, 91, 58, 74, 61, 60, 100, 61, 65, 70, 64, 60, 58, 53, 54, 59, 62, 94, 61, 70, 71, 49, 59, 64, 64, 86, 61, 61, 65, 56, 73, 66, 67, 64, 72, 64, 60, 70, 60, 59, 68, 46, 70, 69, 66, 82, 80, 54, 63, 73, 56, 75, 65, 54, 62, 55, 66, 82, 66, 54, 64, 59, 47, 61, 50, 59, 65, 68, 45, 82, 69, 81, 66, 66, 81, 70, 67, 65, 76, 66, 62, 65, 65, 57, 60, 72, 61, 117, 62, 83, 60, 85, 72, 58, 66, 66, 59, 65, 55, 72, 60, 64, 64, 72, 63, 61, 60, 49, 68, 57, 73, 68, 51, 66, 63, 60, 59, 76, 53, 55, 73, 55, 65, 60, 62, 43, 62, 68, 65, 65, 65, 60, 60, 77, 81, 51, 43, 65, 79, 60, 66, 67, 72, 57, 67, 56, 72, 76, 130, 56, 60, 117, 54, 61, 54, 61, 58, 59, 70, 59, 45, 57, 64, 82, 57, 72, 60, 77, 82, 59, 83, 46, 69, 36, 60, 64, 60, 71, 76, 78, 59, 61, 65, 64, 66, 58, 101, 50, 71, 68, 84, 74, 66, 75, 44, 69, 66, 61, 58, 61, 74, 64, 53, 65, 62, 69, 61, 57, 68, 67, 57, 60, 59, 93, 67, 71, 75, 59, 85, 63, 92, 54, 73, 71, 63, 54, 52, 61, 64, 57, 70, 50, 51, 63, 55, 69, 55, 77, 46, 42, 81, 50, 77, 78, 62, 99, 63, 56, 60, 61, 97, 59, 52, 74, 49, 81, 36, 58, 58, 57, 54, 58, 70, 67, 68, 66, 84, 72, 74, 61, 70, 67, 67, 62, 58, 72, 78, 62, 59, 49, 62, 53, 74, 80, 64, 61, 65, 61, 57, 74, 75, 60, 59, 84, 60, 70, 71, 61, 69, 66, 66, 70, 64, 59, 74, 77, 77, 85, 51, 63, 58, 84, 110, 82, 56, 88, 70, 68, 58, 60, 66, 78, 75, 71, 93, 70, 67, 72, 70, 74, 71, 70, 76, 87, 60, 46, 56, 47, 60, 80, 50, 80, 75, 82, 76, 54, 66, 56, 51, 66, 75, 61, 73, 81, 63, 51, 54, 59, 57, 60, 60, 62, 73, 61, 71, 66, 73, 70, 49, 94, 62, 67, 55, 61, 67, 60, 60, 53, 57, 53, 69, 61, 80, 84, 73, 57, 53, 57, 62, 74, 62, 80, 75, 50, 70, 53, 54, 78, 56, 55, 60, 56, 61, 63, 79, 76, 88, 71, 64, 62, 91, 48, 75, 73, 69, 54, 76, 51, 54, 65, 53, 68, 89, 78, 81, 63, 87, 67, 57, 58, 51, 73, 70, 91, 54, 55, 76, 57, 56, 96, 54, 60, 75, 72, 45, 67, 69, 96, 60, 49, 73, 70, 59, 57, 70, 72, 72, 73, 67, 76, 68, 57, 56, 61, 70, 69, 70, 50, 63, 87, 57, 74, 66, 58, 88, 82, 70, 68, 58, 79, 54, 77, 54, 72, 46, 61, 61, 70, 76, 64, 58, 67, 66, 76, 63, 63, 59, 72, 66, 85, 59, 88, 60, 56, 70, 128, 67, 55, 77, 52, 71, 128, 56, 59, 64, 49, 70, 63, 66, 59, 58, 69, 59, 52, 55, 51, 57, 79, 62, 52, 67, 65, 72, 60, 70, 61, 52, 85, 67, 63, 63, 59, 56, 62, 82, 59, 53, 60, 56, 61, 64, 68, 68, 55, 72, 59, 68, 68, 65, 59, 77, 76, 58, 93, 45, 87, 61, 61, 57, 48, 105, 51, 68, 86, 84, 64, 62, 63, 55, 64, 62, 49, 63, 65, 60, 78, 122, 71, 96, 52, 58, 57, 122, 65, 54, 63, 70, 61, 61, 41, 55, 68, 63, 73, 66, 67, 62, 72, 58, 58, 62, 61, 61, 59, 63, 64, 80, 56, 78, 76, 69, 39, 77, 62, 64, 80, 71, 40, 63, 67, 43, 56, 106, 68, 83, 88, 53, 68, 58, 74, 60, 55, 75, 70, 74, 52, 54, 65, 59, 58, 65, 66, 74, 53, 50, 65, 93, 60, 55, 69, 72, 53, 63, 63, 97, 64, 63, 68, 64, 83, 45, 58, 112, 56, 48, 50, 69, 73, 61, 74, 63, 50, 68, 57, 60, 73, 49, 68, 67, 49, 63, 62, 85, 70, 62, 56, 61, 81, 65, 49, 84, 55, 62, 66, 50, 60, 74, 69, 72, 62, 61, 74, 57, 53, 58, 53, 56, 50, 59, 85, 58, 57, 77, 49, 60, 58, 51, 64, 91, 94, 66, 98, 56, 79, 50, 65, 56, 69, 58, 68, 54, 66, 64, 53, 57, 65, 59, 47, 64, 54, 56, 72, 59, 56, 65, 57, 70, 67, 79, 69, 70, 64, 49, 77, 49, 65, 69, 134, 54, 62, 84, 61, 63, 76, 64, 66, 55, 78, 65, 77, 72, 59, 61, 45, 55, 61, 49, 42, 50, 63, 69, 60, 76, 74, 55, 53, 57, 60, 56, 79, 40, 73, 72, 64, 63, 61, 73, 78, 104, 67, 66, 80, 67, 91, 66, 60, 85, 93, 68, 55, 46, 63, 61, 67, 54, 67, 57, 55, 74, 53, 71, 79, 69, 62, 62, 49, 45, 42, 72, 56, 58, 38, 41, 70, 82, 56, 58, 68, 82, 55, 79, 71, 52, 58, 62, 75, 56, 56, 79, 59, 52, 65, 63, 66, 77, 61, 56, 51, 63, 77, 62, 69, 56, 56, 59, 65, 51, 75, 52, 62, 67, 56, 64, 65, 83, 83, 63, 73, 54, 58, 51, 56, 58, 93, 53, 61, 55, 72, 52, 71, 62, 54, 70, 64, 53, 83, 51, 87, 90, 89, 52, 63, 47, 70, 120, 80, 59, 51, 102, 52, 69, 54, 109, 84, 70, 92, 70, 69, 61, 63, 88, 78, 41, 75, 45, 54, 72, 49, 46, 44, 77, 69, 69, 84, 69, 52, 109, 41, 55, 80, 52, 52, 105, 67, 58, 51, 81, 66, 60, 69, 50, 60, 77, 84, 44, 84, 70, 62, 103, 91, 47, 65, 69, 67, 94, 46, 55, 104, 66, 48, 56, 69, 57, 82, 59, 73, 61, 85, 80, 63, 57, 75, 62, 75, 61, 125, 103, 61, 70, 58, 47, 46, 91, 50, 62, 64, 51, 94, 69, 63, 61, 69, 58, 75, 67, 82, 64, 119, 75, 60, 86, 91, 62, 45, 40, 63, 50, 60, 52, 68, 63, 60, 85, 51, 61, 60, 59, 60, 76, 57, 63, 63, 48, 58, 44, 59, 44, 66, 102, 84, 69, 52, 61, 75, 114, 40, 44, 56, 72, 80, 61, 68, 49, 61, 73, 59, 73, 52, 64, 82, 65, 58, 65, 51, 67, 58, 65, 71, 68, 75, 61, 82, 78, 77, 68, 61, 61, 69, 49, 53, 55, 96, 72, 62, 63, 75, 63, 52, 62, 83, 84, 72, 82, 54, 51, 52, 51, 52, 64, 70, 66, 56, 57, 71, 57, 55, 59, 67, 68, 72, 60, 61, 54, 57, 55, 69, 61, 54, 132, 62, 77, 74, 65, 54, 60, 63, 71, 71, 53, 45, 70, 86, 68, 61, 99, 71, 51, 53, 63, 68, 49, 46, 54, 57, 54, 60, 90, 70, 92, 57, 63, 61, 63, 73, 53, 51, 64, 58, 66, 39, 100, 75, 155, 68, 63, 87, 62, 58, 79, 67, 62, 68, 58, 61, 66, 56, 60, 41, 66, 82, 65, 77, 29, 92, 64, 67, 55, 55, 71, 61, 71, 52, 82, 47, 55, 80, 47, 52, 50, 50, 78, 95, 65, 73, 77, 59, 70, 68, 61, 60, 98, 63, 63, 66, 39, 69, 38, 78, 71, 55, 88, 61, 75, 68, 58, 71, 39, 62, 68, 87, 72, 95, 73, 60, 60, 41, 42, 98, 60, 76, 50, 58, 56, 52, 92, 69, 89, 51, 64, 78, 60, 49, 91, 56, 120, 46, 72, 102, 50, 66, 72, 73, 50, 62, 75, 67, 52, 73, 62, 50, 58, 79, 70, 53, 62, 55, 69, 61, 62, 65, 57, 62, 55, 60, 73, 56, 57, 67, 67, 57, 79, 82, 59, 83, 57, 59, 78, 60, 65, 57, 73, 56, 68, 65, 54, 60, 81, 87, 77, 72, 69, 74, 59, 73, 63, 58, 54, 77, 59, 58, 65, 73, 57, 45, 55, 64, 112, 67, 76, 75, 79, 80, 62, 91, 58, 74, 54, 76, 66, 39, 69, 82, 99, 58, 59, 62, 113, 62, 48, 70, 81, 54, 61, 57, 54, 103, 66, 71, 56, 61, 50, 67, 67, 57, 68, 60, 59, 54, 62, 45, 102, 51, 54, 55, 61, 54, 47, 82, 63, 64, 58, 54, 54, 61, 63, 83, 62, 81, 62, 42, 92, 68, 57, 39, 65, 69, 61, 56, 72, 59, 51, 70, 72, 50, 76, 71, 81, 49, 84, 84, 53, 96, 45, 51, 52, 70, 59, 56, 62, 82, 76, 60, 126, 37, 97, 45, 69, 70, 77, 64, 68, 51, 62, 62, 45, 61, 46, 63, 57, 69, 71, 54, 60, 68, 48, 103, 84, 85, 76, 52, 77, 89, 58, 54, 57, 89, 48, 56, 45, 69, 79, 117, 54, 51, 85, 68, 60, 61, 113, 66, 51, 89, 80, 38, 61, 44, 68, 52, 68, 54, 59, 71, 51, 72, 69, 65, 59, 66, 68, 76, 64, 67, 45, 72, 64, 62, 58, 59, 44, 66, 69, 75, 74, 38, 66, 61, 73, 67, 59, 75, 63, 53, 57, 86, 59, 53, 58, 60, 98, 156, 73, 50, 64, 64, 60, 73, 51, 77, 78, 63, 60, 66, 66, 73, 69, 56, 55, 73, 59, 56, 45, 48, 71, 67, 53, 64, 62, 56, 69, 57, 75, 54, 86, 59, 61, 40, 62, 67, 76, 53, 87, 83, 50, 46, 60, 62, 54, 92, 44, 55, 59, 49, 60, 42, 56, 81, 70, 56, 82, 71, 55, 63, 60, 73, 84, 93, 59, 82, 44, 73, 41, 54, 78, 69, 49, 70, 57, 53, 82, 69, 65, 84, 104, 73, 72, 57, 83, 59, 48, 53, 87, 64, 73, 71, 56, 61, 58, 64, 74, 60, 73, 45, 62, 55, 55, 60, 61, 41, 68, 56, 69, 68, 67, 85, 116, 57, 67, 81, 74, 86, 69, 71, 45, 95, 55, 70, 70, 81, 66, 58, 66, 51, 65, 70, 52, 84, 69, 52, 50, 56, 53, 54, 70, 64, 76, 71, 67, 53, 70, 55, 72, 82, 57, 78, 91, 71, 46, 52, 67, 81, 56, 48, 61, 68, 54, 56, 71, 86, 97, 49, 63, 52, 42, 75, 64, 88, 67, 67, 64, 70, 40, 63, 60, 45, 69, 59, 65, 69, 65, 54, 66, 64, 59, 69, 40, 64, 60, 51, 50, 74, 81, 96, 100, 72, 64, 79, 68, 61, 44, 43, 72, 66, 78, 50, 65, 72, 58, 62, 62, 103, 84, 45, 70, 57, 116, 63, 66, 74, 70, 69, 149, 69, 63, 66, 90, 61, 61, 65, 89, 74, 46, 36, 76, 58, 62, 59, 52, 55, 64, 83, 77, 65, 75, 66, 76, 60, 55, 56, 50, 93, 55, 74, 70, 71, 94, 73, 78, 81, 48, 74, 58, 82, 78, 58, 67, 55, 39, 65, 71, 75, 72, 60, 108, 60, 66, 72, 61, 62, 64, 66, 67, 61, 62, 71, 68, 49, 65, 67, 64, 61, 62, 40, 68, 58, 113, 59, 53, 61, 34, 78, 68, 52, 78, 105, 50, 78, 94, 71, 77, 100, 60, 50, 54, 59, 62, 98, 69, 74, 77, 52, 64, 53, 87, 68, 54, 77, 59, 60, 65, 63, 65, 70, 52, 61, 59, 67, 68, 50, 67, 58, 66, 56, 57, 57, 54, 89, 78, 58, 73, 59, 65, 76, 55, 62, 60, 68, 55, 70, 95, 55, 53, 96, 48, 64, 82, 49, 97, 56, 49, 72, 80, 53, 45, 70, 76, 86, 56, 62, 80, 55, 99, 78, 75, 55, 67, 81, 83, 98, 69, 48, 66, 56, 69, 57, 60, 106, 71, 69, 42, 51, 85, 64, 46, 71, 89, 57, 73, 55, 67, 87, 56, 64, 83, 90, 60, 69, 59, 54, 57, 72, 52, 59, 62, 45, 54, 62, 70, 73, 63, 42, 61, 71, 72, 62, 69, 73, 68, 74, 96, 67, 84, 66, 55, 70, 49, 72, 66, 85, 59, 59, 81, 97, 60, 59, 62, 61, 64, 55, 61, 65, 58, 52, 65, 66, 81, 60, 65, 78, 75, 68, 65, 57, 70, 92, 76, 61, 77, 73, 69, 72, 65, 54, 57, 61, 72, 78, 71, 60, 53, 86, 75, 70, 71, 88, 79, 57, 76, 67, 52, 59, 50, 64, 70, 62, 52, 66, 77, 76, 51, 77, 63, 49, 56, 73, 87, 69, 65, 82, 52, 73, 59, 85, 49, 54, 78, 69, 66, 61, 55, 86, 52, 69, 103, 65, 77, 74, 75, 76, 71, 68, 48, 41, 70, 56, 63, 66, 76, 58, 65, 74, 54, 85, 68, 40, 67, 73, 50, 48, 78, 69, 57, 58, 50, 55, 64, 18, 77, 59, 72, 74, 83, 54, 55, 44, 45, 90, 69, 78, 70, 47, 73, 76, 52, 70, 69, 73, 67, 58, 54, 47, 74, 77, 88, 57, 65, 48, 54, 81, 77, 75, 61, 50, 78, 75, 63, 73, 65, 56, 69, 78, 59, 53, 90, 77, 46, 59, 67, 61, 62, 43, 90, 48, 67, 53, 56, 70, 63, 53, 72, 29, 51, 60, 76, 81, 87, 61, 68, 80, 65, 64, 60, 102, 73, 43, 54, 58, 71, 53, 101, 59, 100, 56, 77, 48, 54, 72, 54, 62, 73, 61, 81, 84, 86, 56, 51, 56, 72, 61, 74, 62, 70, 53, 52, 49, 55, 59, 54, 72, 74, 67, 56, 59, 66, 74, 84, 83, 76, 65, 83, 93, 77, 74, 61, 46, 63, 71, 65, 65, 56, 47, 66, 56, 70, 72, 60, 46, 66, 66, 119, 57, 72, 53, 86, 73, 54, 73, 57, 58, 93, 78, 63, 92, 60, 78, 66, 76, 72, 82, 64, 76, 58, 40, 58, 91, 83, 76, 69, 62, 63, 57, 66, 95, 84, 60, 66, 50, 80, 56, 54, 65, 70, 49, 74, 37, 119, 66, 89, 68, 72, 46, 85, 102, 50, 66, 73, 88, 75, 69, 65, 67, 49, 65, 68, 70, 61, 63, 63, 49, 76, 69, 67, 78, 58, 48, 60, 60, 35, 71, 69, 67, 79, 62, 73, 75, 63, 43, 57, 55, 56, 52, 67, 81, 66, 63, 68, 72, 48, 66, 67, 71, 70, 81, 52, 63, 70, 81, 84, 66, 74, 51, 92, 61, 78, 48, 58, 55, 82, 52, 52, 65, 63, 61, 65, 77, 64, 71, 80, 39, 68, 51, 44, 72, 75, 56, 39, 31, 82, 69, 28, 49, 67, 58, 54, 53, 56, 79, 49, 70, 72, 80, 54, 73, 70, 64, 59, 75, 81, 60, 87, 85, 56, 59, 57, 84, 62, 57, 53, 85, 82, 66, 63, 66, 86, 70, 66, 70, 55, 52, 40, 70, 54, 90, 72, 60, 66, 66, 60, 74, 57, 91, 71, 56, 79, 82, 40, 87, 57, 58, 60, 49, 79, 62, 98, 55, 79, 89, 62, 46, 73, 65, 69, 68, 56, 64, 63, 87, 67, 50, 74, 61, 38, 61, 70, 70, 75, 45, 97, 75, 60, 72, 68, 60, 71, 63, 68, 56, 61, 58, 72, 69, 68, 59, 71, 37, 66, 72, 63, 50, 66, 72, 56, 68, 46, 75, 68, 51, 71, 59, 61, 50, 61, 49, 78, 71, 57, 41, 75, 53, 77, 68, 60, 38, 87, 60, 105, 67, 72, 106, 43, 50, 62, 50, 65, 39, 74, 69, 70, 80, 66, 71, 45, 57, 48, 64, 60, 66, 70, 53, 49, 39, 60, 60, 62, 62, 44, 58, 56, 42, 57, 66, 81, 57, 64, 70, 74, 83, 63, 84, 64, 52, 49, 70, 65, 74, 59, 93, 63, 50, 65, 53, 59, 72, 61, 76, 61, 92, 58, 66, 57, 74, 68, 54, 71, 80, 56, 66, 92, 65, 56, 61, 71, 71, 67, 77, 79, 45, 70, 58, 90, 75, 66, 60, 69, 79, 47, 68, 53, 74, 98, 59, 78, 54, 82, 69, 62, 50, 59, 87, 74, 54, 81, 59, 55, 178, 65, 63, 57, 63, 70, 74, 49, 75, 62, 49, 61, 64, 57, 70, 103, 74, 46, 53, 71, 59, 57, 102, 50, 76, 61, 35, 55, 62, 96, 51, 63, 53, 64, 71, 89, 152, 66, 51, 80, 107, 58, 63, 50, 47, 63, 50, 92, 73, 63, 66, 72, 66, 49, 61, 59, 63, 76, 52, 110, 54, 56, 82, 63, 69, 61, 60, 100, 61, 61, 74, 65, 65, 61, 56, 78, 54, 68, 59, 65, 73, 55, 51, 65, 90, 70, 66, 68, 77, 82, 62, 64, 66, 62, 72, 90, 72, 66, 72, 61, 76, 64, 72, 66, 55, 77, 72, 61, 63, 54, 67, 123, 78, 69, 56, 86, 63, 62, 77, 55, 74, 68, 67, 64, 56, 96, 90, 64, 65, 51, 53, 51, 81, 79, 54, 79, 62, 50, 100, 66, 57, 77, 53, 89, 73, 61, 69, 69, 66, 56, 73, 77, 62, 79, 73, 57, 63, 64, 57, 55, 60, 55, 61, 61, 72, 56, 68, 47, 70, 60, 67, 68, 58, 68, 58, 64, 53, 83, 48, 56, 79, 114, 56, 80, 80, 61, 73, 43, 84, 76, 65, 57, 44, 82, 78, 71, 61, 66, 51, 70, 63, 50, 49, 107, 58, 67, 68, 62, 78, 64, 48, 56, 69, 59, 62, 85, 49, 93, 52, 69, 66, 49, 68, 62, 59, 55, 60, 67, 69, 63, 68, 63, 53, 61, 153, 54, 71, 53, 53, 91, 74, 76, 71, 67, 64, 61, 70, 53, 67, 62, 51, 66, 69, 56, 72, 68, 65, 67, 90, 72, 120, 65, 54, 66, 64, 69, 60, 47, 75, 66, 57, 63, 63, 69, 57, 79, 61, 66, 57, 66, 60, 64, 64, 68, 73, 63, 62, 67, 67, 75, 52, 57, 64, 61, 45, 59, 59, 108, 70, 63, 58, 96, 66, 59, 66, 54, 71, 53, 60, 69, 62, 77, 95, 68, 58, 58, 57, 60, 83, 59, 67, 51, 58, 66, 64, 54, 55, 61, 96, 63, 66, 69, 56, 64, 61, 66, 57, 80, 63, 61, 87, 64, 61, 75, 83, 44, 88, 93, 58, 51, 60, 61, 63, 53, 63, 90, 84, 63, 75, 60, 60, 64, 52, 67, 70, 66, 73, 67, 63, 68, 70, 79, 60, 64, 54, 63, 71, 67, 75, 84, 51, 62, 65, 61, 63, 83, 73, 69, 64, 48, 77, 55, 50, 72, 57, 85, 62, 70, 50, 56, 103, 60, 66, 66, 70, 53, 108, 72, 61, 72, 54, 65, 70, 65, 70, 62, 50, 61, 60, 68, 92, 67, 55, 81, 63, 70, 66, 76, 67, 92, 69, 58, 67, 66, 65, 60, 78, 73, 54, 51, 78, 59, 47, 65, 73, 61, 59, 57, 66, 109, 54, 56, 59, 99, 60, 63, 64, 71, 66, 67, 66, 40, 61, 90, 41, 55, 50, 57, 66, 56, 70, 61, 97, 62, 87, 74, 83, 68, 60, 57, 86, 60, 79, 58, 68, 69, 61, 73, 68, 61, 47, 65, 72, 56, 60, 75, 53, 61, 48, 62, 71, 68, 58, 68, 66, 50, 66, 67, 62, 78, 59, 69, 53, 54, 65, 56, 64, 60, 80, 70, 60, 60, 53, 70, 93, 58, 54, 104, 51, 58, 50, 57, 51, 72, 67, 59, 51, 63, 47, 96, 64, 82, 81, 67, 67, 61, 57, 75, 59, 65, 71, 67, 67, 54, 58, 64, 75, 63, 60, 64, 47, 66, 61, 56, 78, 91, 64, 41, 62, 47, 57, 66, 74, 74, 55, 74, 61, 53, 64, 56, 84, 72, 75, 65, 75, 62, 79, 81, 68, 71, 91, 67, 56, 76, 81, 75, 60, 69, 56, 54, 77, 60, 80, 56, 65, 61, 51, 86, 62, 64, 110, 51, 65, 68, 62, 61, 63, 60, 57, 96, 84, 65, 62, 89, 52, 74, 38, 82, 55, 47, 57, 70, 61, 64, 53, 58, 49, 59, 58, 89, 60, 62, 71, 67, 53, 79, 89, 77, 55, 56, 68, 55, 85, 50, 61, 71, 57, 55, 60, 77, 98, 65, 64, 64, 63, 68, 74, 62, 65, 60, 75, 69, 58, 68, 56, 68, 76, 54, 54, 59, 76, 54, 63, 62, 76, 74, 89, 66, 48, 76, 51, 78, 63, 77, 56, 69, 94, 63, 65, 58, 45, 59, 101, 73, 73, 72, 55, 59, 64, 79, 59, 57, 47, 88, 79, 65, 64, 75, 70, 67, 70, 119, 63, 61, 62, 67, 56, 53, 58, 53, 50, 56, 63, 64, 58, 66, 59, 69, 54, 62, 72, 74, 73, 58, 58, 84, 59, 69, 60, 77, 63, 69, 63, 56, 97, 53, 63, 57, 65, 64, 57, 59, 66, 56, 59, 67, 56, 54, 64, 58, 74, 67, 76, 65, 50, 67, 57, 74, 46, 55, 65, 72, 58, 47, 58, 55, 103, 72, 64, 64, 80, 69, 68, 54, 42, 70, 63, 70, 100, 74, 69, 59, 75, 96, 56, 76, 54, 62, 74, 81, 75, 72, 74, 83, 59, 98, 79, 71, 48, 40, 73, 61, 71, 76, 90, 67, 65, 62, 65, 58, 87, 59, 78, 63, 74, 51, 61, 62, 63, 57, 68, 61, 71, 50, 56, 60, 54, 73, 57, 54, 100, 65, 66, 59, 55, 69, 77, 60, 55, 61, 86, 71, 57, 74, 50, 54, 62, 85, 103, 61, 69, 37, 65, 86, 61, 68, 55, 50, 67, 50, 63, 78, 63, 54, 71, 74, 66, 57, 59, 79, 53, 61, 58, 65, 68, 68, 75, 74, 55, 55, 51, 113, 64, 69, 71, 58, 75, 66, 75, 62, 60, 40, 70, 65, 58, 34, 53, 49, 75, 56, 61, 58, 61, 59, 63, 57, 52, 61, 70, 53, 49, 58, 61, 62, 55, 51, 60, 44, 63, 61, 72, 65, 59, 62, 90, 52, 74, 65, 73, 54, 59, 78, 71, 54, 59, 61, 68, 62, 60, 69, 58, 77, 62, 75, 63, 62, 54, 59, 82, 63, 64, 63, 56, 56, 109, 55, 63, 72, 82, 56, 59, 73, 44, 84, 59, 68, 60, 63, 94, 93, 52, 59, 70, 68, 52, 54, 74, 48, 49, 80, 82, 55, 74, 51, 56, 86, 86, 76, 68, 111, 70, 62, 53, 42, 80, 53, 54, 73, 79, 65, 82, 62, 63, 55, 83, 52, 55, 76, 63, 69, 55, 65, 74, 60, 69, 72, 63, 63, 51, 75, 51, 46, 64, 56, 46, 54, 42, 57, 73, 46, 97, 53, 65, 63, 53, 69, 71, 89, 76, 51, 77, 48, 65, 68, 77, 78, 89, 44, 67, 76, 56, 67, 58, 69, 62, 51, 60, 41, 58, 60, 69, 74, 60, 69, 89, 53, 65, 63, 64, 63, 68, 54, 72, 73, 91, 61, 57, 65, 59, 74, 75, 73, 71, 59, 82, 56, 60, 81, 70, 54, 78, 58, 65, 75, 59, 75, 60, 64, 73, 49, 71, 76, 61, 68, 92, 61, 70, 75, 61, 62, 37, 56, 65, 68, 63, 74, 78, 80, 68, 58, 56, 61, 80, 65, 65, 77, 51, 82, 70, 55, 62, 68, 84, 57, 65, 49, 58, 67, 52, 47, 84, 57, 86, 52, 51, 69, 69, 60, 61, 60, 59, 85, 53, 57, 67, 68, 81, 74, 83, 59, 74, 62, 70, 45, 55, 57, 71, 54, 67, 59, 71, 102, 59, 58, 70, 101, 70, 56, 78, 68, 71, 87, 60, 70, 82, 42, 56, 66, 68, 51, 90, 75, 70, 52, 69, 50, 40, 89, 64, 56, 58, 85, 38, 86, 60, 61, 72, 77, 43, 66, 128, 81, 70, 61, 48, 68, 82, 66, 55, 77, 61, 73, 58, 76, 59, 65, 75, 41, 63, 73, 60, 79, 64, 76, 55, 62, 69, 64, 70, 50, 54, 62, 59, 73, 72, 59, 91, 62, 65, 54, 68, 51, 53, 75, 71, 61, 64, 68, 81, 69, 67, 72, 94, 46, 59, 66, 47, 88, 53, 53, 65, 68, 83, 57, 74, 69, 58, 57, 69, 65, 73, 46, 58, 60, 53, 83, 63, 68, 92, 64, 57, 71, 68, 78, 86, 48, 72, 71, 48, 56, 44, 71, 75, 67, 67, 53, 71, 58, 63, 62, 75, 72, 50, 65, 68, 74, 92, 64, 58, 60, 56, 80, 60, 63, 92, 48, 67, 56, 68, 63, 63, 45, 58, 66, 59, 85, 62, 74, 70, 71, 101, 52, 60, 64, 67, 66, 64, 75, 68, 45, 65, 80, 91, 98, 49, 39, 61, 70, 52, 82, 80, 99, 71, 70, 67, 61, 70, 67, 77, 71, 61, 64, 64, 69, 64, 78, 65, 67, 43, 59, 54, 70, 65, 69, 72, 58, 76, 42, 67, 51, 56, 71, 65, 60, 80, 74, 55, 58, 64, 52, 66, 84, 51, 102, 57, 49, 59, 50, 85, 74, 78, 51, 69, 65, 74, 64, 69, 54, 62, 69, 53, 46, 72, 73, 87, 49, 57, 68, 64, 66, 59, 58, 83, 86, 49, 59, 82, 63, 59, 53, 86, 64, 64, 50, 112, 64, 65, 82, 56, 75, 61, 67, 57, 78, 66, 62, 67, 104, 61, 58, 64, 86, 52, 82, 64, 90, 68, 65, 53, 70, 90, 74, 65, 75, 55, 57, 71, 52, 101, 66, 68, 81, 61, 53, 90, 69, 68, 60, 61, 79, 66, 69, 62, 48, 48, 41, 67, 63, 97, 83, 54, 73, 63, 54, 56, 67, 73, 41, 65, 41, 52, 81, 70, 70, 87, 52, 60, 44, 55, 47, 75, 64, 59, 49, 67, 77, 86, 84, 60, 65, 53, 52, 48, 56, 53, 59, 59, 89, 77, 67, 62, 56, 70, 56, 60, 95, 40, 60, 67, 50, 69, 87, 93, 73, 78, 99, 70, 62, 78, 57, 58, 75, 82, 82, 49, 67, 104, 65, 72, 56, 49, 65, 72, 78, 52, 57, 50, 63, 66, 86, 50, 70, 72, 61, 56, 66, 93, 66, 69, 58, 68, 77, 54, 61, 59, 54, 63, 58, 88, 53, 61, 67, 64, 49, 58, 73, 73, 76, 85, 52, 73, 45, 46, 43, 78, 68, 57, 72, 50, 52, 77, 68, 44, 70, 76, 71, 59, 61, 56, 46, 50, 74, 87, 61, 62, 62, 68, 57, 83, 59, 66, 57, 54, 64, 70, 60, 56, 48, 58, 84, 64, 61, 70, 36, 80, 60, 73, 70, 76, 61, 61, 72, 60, 97, 64, 72, 67, 68, 56, 77, 57, 51, 77, 65, 70, 44, 60, 71, 69, 75, 100, 62, 68, 60, 60, 44, 75, 65, 60, 65, 58, 50, 73, 61, 71, 65, 78, 71, 84, 62, 101, 49, 69, 66, 57, 72, 77, 77, 59, 47, 85, 60, 65, 50, 70, 67, 63, 80, 67, 89, 65, 90, 58, 62, 63, 99, 66, 59, 84, 69, 60, 80, 43, 62, 61, 66, 67, 87, 49, 73, 61, 64, 70, 60, 60, 58, 59, 62, 75, 45, 75, 75, 64, 77, 55, 68, 64, 60, 67, 75, 62, 82, 72, 78, 60, 62, 77, 54, 68, 75, 95, 38, 60, 65, 71, 42, 52, 72, 63, 90, 57, 38, 57, 77, 59, 82, 74, 69, 61, 66, 62, 81, 62, 61, 42, 60, 55, 92, 59, 91, 70, 45, 88, 64, 44, 60, 79, 97, 79, 62, 64, 46, 73, 91, 49, 50, 71, 64, 40, 59, 52, 72, 55, 71, 65, 74, 57, 46, 75, 46, 59, 61, 55, 52, 64, 68, 60, 53, 90, 58, 61, 54, 61, 66, 72, 57, 71, 68, 102, 62, 56, 56, 64, 94, 61, 48, 84, 46, 66, 69, 68, 83, 79, 86, 72, 71, 57, 65, 77, 75, 48, 89, 63, 71, 56, 50, 94, 106, 77, 74, 68, 48, 54, 59, 86, 73, 52, 53, 71, 76, 62, 58, 67, 56, 86, 63, 91, 56, 70, 61, 76, 61, 67, 62, 47, 90, 74, 55, 60, 63, 86, 43, 68, 83, 77, 63, 61, 59, 60, 56, 69, 61, 77, 52, 61, 65, 76, 68, 62, 76, 74, 70, 47, 64, 57, 64, 59, 53, 59, 80, 105, 67, 70, 81, 54, 78, 58, 69, 63, 80, 62, 69, 88, 66, 82, 77, 73, 53, 61, 82, 70, 74, 90, 58, 48, 86, 54, 73, 47, 55, 79, 60, 71, 69, 51, 85, 60, 57, 104, 63, 45, 48, 61, 57, 66, 93, 52, 90, 50, 75, 75, 61, 48, 75, 86, 67, 59, 66, 64, 81, 68, 52, 43, 44, 85, 53, 54, 72, 69, 77, 83, 83, 77, 66, 73, 53, 72, 79, 53, 68, 44, 55, 88, 86, 66, 58, 66, 61, 43, 94, 74, 58, 69, 74, 72, 53, 68, 62, 71, 68, 56, 75, 67, 73, 53, 54, 75, 82, 77, 73, 77, 66, 79, 67, 89, 79, 72, 73, 67, 41, 61, 65, 55, 71, 63, 61, 72, 57, 62, 33, 48, 51, 56, 75, 53, 69, 62, 47, 57, 73, 62, 71, 52, 67, 57, 44, 74, 46, 53, 67, 81, 64, 58, 65, 48, 54, 63, 70, 99, 49, 63, 59, 62, 90, 52, 83, 69, 57, 88, 59, 69, 106, 85, 65, 94, 61, 66, 65, 52, 56, 63, 65, 73, 61, 62, 72, 85, 58, 53, 65, 66, 72, 60, 82, 54, 61, 75, 60, 50, 63, 62, 66, 63, 52, 66, 50, 56, 77, 89, 73, 54, 69, 60, 66, 56, 54, 76, 74, 59, 58, 63, 91, 67, 77, 64, 65, 61, 61, 70, 63, 59, 69, 67, 48, 72, 51, 59, 54, 64, 67, 58, 69, 59, 66, 63, 59, 104, 54, 62, 54, 71, 64, 111, 77, 83, 65, 58, 46, 69, 63, 47, 48, 55, 85, 66, 77, 75, 69, 59, 69, 56, 59, 79, 45, 66, 65, 51, 45, 67, 66, 51, 77, 72, 65, 66, 72, 108, 60, 74, 57, 70, 46, 65, 51, 117, 63, 83, 55, 61, 75, 80, 70, 50, 74, 56, 51, 64, 66, 64, 59, 68, 52, 68, 63, 72, 48, 83, 56, 78, 68, 56, 51, 51, 54, 70, 66, 48, 69, 72, 77, 50, 56, 65, 68, 57, 63, 60, 78, 62, 60, 66, 60, 66, 63, 65, 85, 86, 64, 70, 69, 61, 78, 62, 63, 66, 88, 70, 54, 67, 70, 88, 62, 87, 75, 66, 50, 57, 59, 51, 74, 48, 56, 56, 67, 74, 69, 61, 84, 80, 65, 56, 60, 55, 68, 54, 67, 67, 73, 58, 65, 63, 59, 61, 76, 77, 60, 62, 73, 59, 63, 47, 57, 92, 61, 70, 104, 54, 75, 85, 66, 63, 80, 71, 38, 56, 55, 66, 60, 65, 58, 56, 63, 69, 60, 83, 56, 70, 57, 67, 58, 53, 65, 54, 43, 59, 71, 58, 96, 52, 74, 57, 65, 68, 42, 78, 61, 91, 84, 48, 69, 57, 67, 59, 62, 55, 78, 53, 99, 68, 108, 68, 60, 56, 59, 64, 75, 80, 53, 66, 48, 58, 95, 74, 96, 61, 83, 77, 55, 70, 51, 48, 64, 54, 63, 87, 87, 78, 67, 74, 77, 63, 78, 77, 59, 98, 65, 78, 89, 41, 66, 63, 74, 64, 79, 59, 53, 59, 88, 87, 73, 66, 77, 66, 63, 66, 66, 64, 92, 52, 63, 59, 72, 66, 52, 79, 61, 76, 53, 109, 57, 58, 57, 80, 75, 73, 53, 59, 42, 63, 50, 64, 79, 53, 66, 74, 59, 52, 62, 61, 56, 65, 54, 49, 66, 67, 80, 56, 63, 54, 50, 70, 77, 83, 75, 75, 67, 61, 52, 63, 61, 63, 64, 56, 63, 57, 63, 73, 50, 55, 71, 73, 66, 64, 63, 63, 62, 62, 60, 70, 54, 62, 57, 57, 63, 74, 58, 62, 57, 76, 63, 62, 62, 75, 44, 67, 106, 62, 57, 62, 69, 44, 57, 80, 67, 70, 72, 59, 57, 62, 57, 62, 70, 54, 56, 48, 66, 66, 66, 69, 86, 81, 51, 62, 82, 65, 115, 89, 65, 75, 97, 92, 83, 62, 60, 79, 55, 84, 52, 54, 64, 76, 62, 76, 67, 51, 135, 60, 71, 70, 86, 72, 58, 73, 44, 69, 48, 63, 57, 59, 65, 121, 64, 82, 42, 48, 83, 66, 70, 73, 68, 66, 85, 46, 60, 60, 57, 65, 62, 74, 52, 59, 75, 67, 73, 65, 60, 74, 56, 60, 57, 65, 69, 71, 49, 85, 46, 85, 54, 52, 67, 56, 75, 46, 66, 48, 83, 54, 58, 71, 79, 87, 67, 50, 60, 65, 62, 63, 83, 65, 72, 54, 65, 50, 76, 58, 79, 61, 82, 52, 62, 79, 56, 83, 78, 114, 62, 67, 74, 72, 82, 64, 44, 64, 89, 51, 80, 51, 50, 60, 49, 48, 63, 61, 74, 59, 97, 65, 64, 67, 67, 84, 45, 52, 46, 76, 59, 64, 85, 66, 118, 74, 63, 64, 74, 73, 61, 103, 74, 57, 55, 88, 68, 67, 67, 55, 52, 56, 70, 67, 53, 60, 86, 71, 68, 66, 70, 63, 48, 67, 77, 64, 65, 83, 69, 51, 65, 44, 55, 48, 66, 76, 60, 64, 72, 64, 81, 75, 65, 55, 92, 70, 82, 69, 61, 58, 47, 55, 65, 67, 53, 59, 76, 56, 72, 64, 80, 74, 55, 65, 53, 67, 62, 58, 77, 58, 53, 61, 45, 65, 69, 57, 76, 65, 79, 51, 55, 57, 38, 43, 74, 98, 103, 58, 56, 69, 84, 71, 60, 37, 51, 55, 63, 51, 84, 51, 52, 94, 65, 72, 71, 36, 80, 63, 76, 53, 61, 69, 41, 66, 55, 64, 82, 66, 52, 53, 89, 87, 61, 55, 89, 64, 86, 71, 63, 68, 76, 50, 66, 39, 62, 69, 55, 70, 67, 59, 59, 59, 58, 63, 58, 45, 72, 61, 72, 55, 49, 80, 50, 49, 66, 70, 51, 57, 66, 64, 65, 70, 55, 56, 72, 68, 64, 52, 70, 74, 60, 60, 74, 56, 55, 67, 57, 35, 83, 63, 58, 73, 64, 104, 61, 33, 72, 63, 74, 61, 81, 60, 70, 82, 59, 56, 59, 56, 64, 75, 67, 90, 67, 55, 129, 83, 69, 66, 66, 60, 67, 71, 69, 69, 59, 61, 59, 72, 71, 72, 54, 56, 68, 72, 63, 133, 65, 63, 112, 67, 63, 68, 93, 65, 66, 58, 64, 57, 56, 103, 53, 73, 81, 56, 59, 52, 63, 72, 41, 68, 66, 47, 46, 67, 110, 62, 68, 73, 79, 60, 61, 64, 89, 65, 78, 77, 81, 66, 66, 72, 47, 53, 95, 47, 60, 64, 62, 78, 64, 60, 63, 66, 58, 65, 70, 76, 59, 68, 71, 72, 58, 61, 64, 66, 61, 60, 64, 65, 55, 68, 47, 58, 70, 75, 66, 51, 103, 65, 63, 79, 92, 55, 59, 63, 54, 75, 75, 69, 70, 61, 66, 66, 60, 61, 51, 57, 62, 63, 78, 85, 73, 51, 56, 51, 59, 67, 83, 97, 55, 75, 56, 84, 66, 95, 64, 74, 78, 76, 68, 52, 56, 67, 70, 53, 88, 57, 77, 80, 61, 47, 67, 76, 53, 80, 66, 62, 52, 64, 56, 65, 69, 74, 43, 69, 60, 70, 86, 60, 69, 60, 102, 72, 57, 74, 70, 82, 39, 68, 56, 64, 51, 52, 66, 54, 78, 73, 67, 68, 73, 71, 63, 65, 65, 68, 79, 51, 113, 50, 55, 49, 74, 53, 65, 85, 69, 59, 64, 55, 62, 71, 49, 63, 61, 69, 114, 71, 53, 72, 58, 76, 62, 57, 53, 70, 70, 66, 67, 73, 54, 49, 68, 55, 58, 78, 65, 67, 74, 99, 54, 55, 63, 60, 70, 63, 40, 85, 62, 85, 58, 65, 100, 44, 66, 107, 59, 80, 69, 67, 75, 48, 53, 56, 68, 78, 76, 68, 50, 65, 61, 83, 70, 57, 71, 57, 89, 63, 64, 75, 55, 67, 58, 79, 54, 74, 75, 75, 71, 84, 73, 76, 51, 83, 56, 74, 67, 67, 86, 73, 52, 63, 73, 61, 64, 47, 60, 52, 69, 65, 68, 54, 60, 46, 64, 56, 83, 65, 71, 50, 71, 66, 89, 55, 53, 135, 90, 74, 65, 58, 59, 66, 76, 61, 70, 54, 66, 56, 64, 60, 62, 64, 62, 74, 62, 55, 59, 60, 72, 59, 82, 63, 50, 61, 52, 75, 62, 56, 78, 68, 51, 59, 56, 66, 49, 58, 54, 61, 64, 54, 52, 63, 62, 57, 85, 72, 70, 121, 65, 44, 58, 78, 72, 54, 61, 56, 67, 58, 70, 61, 69, 54, 81, 52, 58, 79, 50, 57, 50, 53, 61, 87, 73, 120, 59, 51, 63, 59, 84, 56, 60, 52, 57, 43, 78, 53, 79, 61, 70, 55, 63, 62, 60, 44, 74, 67, 51, 81, 79, 68, 63, 90, 64, 68, 46, 52, 44, 70, 54, 78, 52, 73, 50, 64, 64, 50, 57, 59, 74, 71, 75, 59, 48, 64, 65, 73, 57, 58, 49, 64, 81, 60, 51, 63, 59, 99, 83, 71, 41, 59, 80, 50, 57, 59, 85, 68, 90, 96, 53, 67, 61, 59, 73, 84, 95, 56, 72, 60, 91, 54, 71, 57, 75, 62, 64, 64, 75, 81, 75, 61, 80, 63, 63, 69, 75, 57, 66, 68, 53, 104, 68, 57, 54, 41, 46, 43, 93, 71, 68, 52, 62, 69, 56, 61, 73, 76, 58, 62, 80, 72, 65, 50, 64, 60, 81, 77, 56, 61, 58, 69, 66, 48, 50, 82, 61, 58, 53, 95, 90, 46, 65, 73, 75, 47, 52, 84, 74, 68, 36, 91, 61, 47, 68, 77, 36, 53, 80, 55, 51, 62, 76, 57, 72, 68, 58, 67, 89, 64, 90, 77, 67, 78, 53, 55, 63, 60, 88, 58, 57, 60, 67, 66, 76, 139, 60, 64, 63, 66, 60, 79, 52, 89, 63, 65, 64, 63, 52, 72, 48, 66, 63, 72, 91, 91, 75, 65, 61, 58, 61, 58, 59, 106, 63, 60, 54, 70, 75, 78, 55, 74, 60, 44, 55, 65, 65, 47, 54, 49, 53, 67, 41, 72, 49, 70, 56, 68, 64, 56, 73, 59, 64, 79, 91, 70, 60, 67, 68, 79, 47, 62, 84, 50, 81, 63, 81, 73, 58, 72, 67, 73, 78, 71, 75, 72, 70, 62, 71, 68, 71, 56, 75, 100, 49, 68, 62, 93, 63, 66, 54, 79, 58, 77, 47, 40, 70, 59, 61, 69, 65, 50, 48, 76, 55, 48, 65, 91, 59, 55, 73, 73, 54, 44, 73, 71, 54, 61, 52, 58, 56, 65, 47, 48, 59, 78, 60, 52, 82, 66, 54, 61, 68, 90, 98, 76, 57, 76, 82, 74, 57, 98, 78, 58, 54, 53, 51, 53, 42, 52, 90, 51, 73, 133, 76, 58, 62, 61, 86, 68, 101, 71, 78, 66, 72, 78, 85, 77, 69, 58, 65, 42, 66, 78, 67, 27, 62, 54, 85, 63, 56, 57, 67, 79, 61, 61, 58, 55, 55, 78, 63, 38, 46, 55, 57, 58, 56, 105, 71, 64, 61, 62, 74, 69, 48, 55, 48, 65, 75, 62, 64, 69, 84, 66, 66, 62, 74, 75, 67, 58, 89, 62, 68, 81, 69, 70, 70, 58, 49, 83, 65, 59, 60, 61, 47, 90, 67, 47, 87, 67, 62, 65, 88, 54, 70, 74, 63, 52, 61, 74, 61, 59, 59, 66, 61, 72, 63, 57, 71, 69, 66, 61, 78, 61, 72, 65, 55, 70, 79, 73, 77, 52, 63, 70, 86, 69, 68, 77, 57, 65, 86, 58, 55, 64, 56, 61, 64, 48, 83, 53, 74, 72, 67, 82, 58, 69, 81, 62, 63, 66, 85, 75, 45, 75, 39, 62, 59, 48, 73, 57, 54, 59, 82, 64, 71, 73, 87, 80, 71, 78, 50, 67, 60, 72, 72, 87, 66, 49, 71, 60, 68, 61, 58, 102, 56, 51, 69, 83, 36, 79, 62, 65, 78, 65, 65, 61, 49, 78, 64, 60, 90, 80, 78, 61, 64, 100, 67, 69, 71, 63, 49, 70, 59, 38, 64, 93, 63, 61, 52, 64, 60, 53, 59, 67, 56, 49, 67, 61, 109, 80, 52, 65, 69, 67, 63, 62, 51, 66, 73, 70, 52, 65, 51, 65, 92, 65, 60, 71, 57, 56, 46, 53, 61, 47, 63, 56, 62, 56, 61, 67, 73, 63, 59, 56, 63, 65, 64, 60, 89, 79, 54, 57, 102, 92, 70, 51, 72, 58, 59, 84, 62, 57, 73, 67, 68, 48, 61, 85, 62, 76, 77, 73, 73, 111, 62, 60, 73, 66, 52, 63, 60, 69, 89, 65, 61, 68, 58, 104, 55, 79, 79, 61, 95, 60, 56, 84, 59, 73, 70, 83, 79, 59, 65, 85, 46, 53, 97, 77, 83, 63, 69, 58, 69, 56, 73, 51, 56, 63, 71, 71, 63, 53, 79, 65, 72, 73, 67, 75, 57, 68, 49, 58, 59, 77, 52, 54, 53, 70, 67, 75, 86, 54, 60, 72, 59, 53, 78, 55, 58, 54, 58, 64, 52, 56, 73, 65, 64, 81, 49, 72, 57, 68, 75, 62, 63, 57, 52, 68, 58, 77, 77, 54, 60, 62, 56, 46, 77, 61, 63, 57, 67, 76, 51, 64, 73, 88, 61, 61, 57, 71, 46, 63, 75, 74, 77, 56, 72, 73, 50, 63, 52, 63, 55, 48, 60, 52, 69, 64, 65, 71, 73, 68, 62, 60, 64, 59, 62, 64, 61, 78, 73, 61, 60, 67, 76, 72, 47, 43, 65, 68, 65, 54, 62, 56, 51, 66, 69, 56, 77, 54, 59, 58, 68, 45, 58, 71, 76, 81, 53, 54, 50, 80, 75, 60, 143, 68, 56, 64, 78, 67, 63, 97, 68, 59, 74, 59, 94, 80, 53, 72, 68, 77, 69, 56, 66, 67, 62, 59, 59, 58, 63, 69, 78, 65, 54, 81, 69, 65, 52, 52, 77, 80, 63, 63, 68, 60, 56, 66, 77, 65, 56, 82, 61, 72, 61, 88, 76, 75, 93, 75, 77, 61, 81, 88, 64, 71, 79, 63, 58, 63, 59, 67, 58, 50, 59, 75, 61, 76, 62, 70, 75, 68, 69, 39, 56, 40, 53, 59, 59, 60, 67, 77, 71, 96, 50, 100, 56, 92, 69, 50, 71, 74, 90, 39, 71, 64, 51, 61, 93, 60, 58, 64, 63, 60, 65, 60, 75, 68, 60, 67, 64, 59, 55, 77, 65, 68, 69, 49, 60, 66, 79, 132, 75, 63, 57, 55, 66, 69, 72, 69, 63, 49, 82, 63, 53, 61, 68, 66, 36, 96, 80, 73, 59, 84, 65, 49, 57, 59, 43, 87, 63, 66, 55, 54, 78, 65, 52, 70, 58, 68, 61, 61, 63, 57, 84, 57, 66, 58, 64, 59, 77, 58, 98, 138, 51, 70, 59, 54, 51, 57, 65, 76, 57, 69, 49, 64, 64, 47, 67, 65, 53, 67, 58, 62, 54, 54, 61, 68, 65, 69, 73, 62, 70, 61, 65, 64, 57, 78, 61, 57, 62, 62, 60, 55, 71, 63, 73, 66, 47, 55, 73, 76, 58, 72, 83, 68, 72, 85, 57, 57, 68, 63, 50, 69, 59, 73, 73, 76, 57, 89, 58, 87, 57, 64, 79, 79, 66, 51, 52, 71, 70, 81, 60, 76, 55, 58, 63, 62, 69, 58, 84, 94, 71, 82, 54, 80, 74, 66, 62, 57, 54, 60, 77, 77, 69, 59, 55, 49, 68, 73, 51, 65, 89, 46, 73, 70, 73, 64, 61, 80, 60, 62, 69, 57, 80, 59, 68, 54, 81, 50, 81, 61, 84, 65, 71, 51, 55, 55, 56, 64, 47, 84, 97, 75, 63, 71, 53, 54, 63, 63, 76, 81, 56, 83, 84, 72, 74, 77, 65, 64, 114, 51, 64, 57, 79, 82, 56, 52, 79, 73, 52, 57, 61, 88, 76, 66, 41, 71, 62, 57, 68, 79, 46, 65, 51, 87, 68, 56, 76, 57, 61, 66, 64, 58, 57, 67, 74, 56, 64, 56, 59, 50, 63, 74, 115, 78, 56, 57, 56, 53, 58, 55, 84, 70, 75, 71, 66, 74, 77, 87, 77, 88, 69, 61, 57, 79, 63, 93, 68, 87, 102, 80, 64, 70, 57, 58, 40, 66, 78, 59, 74, 74, 60, 49, 55, 72, 68, 78, 65, 66, 70, 63, 40, 40, 45, 81, 56, 71, 44, 57, 77, 62, 61, 59, 67, 59, 72, 63, 63, 74, 67, 92, 60, 65, 67, 57, 65, 74, 64, 68, 71, 78, 71, 58, 80, 73, 82, 70, 60, 59, 56, 72, 58, 87, 59, 75, 108, 54, 56, 55, 55, 102, 52, 81, 63, 73, 89, 92, 64, 54, 61, 57, 66, 82, 62, 59, 63, 63, 72, 54, 66, 57, 54, 55, 57, 64, 69, 85, 69, 85, 92, 58, 67, 64, 59, 44, 65, 63, 62, 100, 70, 54, 63, 65, 59, 53, 72, 68, 47, 55, 52, 64, 83, 52, 51, 50, 51, 73, 67, 64, 49, 68, 76, 42, 46, 57, 70, 83, 46, 52, 75, 67, 74, 57, 56, 66, 64, 117, 66, 61, 59, 62, 105, 55, 50, 75, 68, 63, 51, 62, 52, 52, 80, 57, 62, 60, 51, 33, 81, 80, 69, 69, 62, 60, 65, 67, 57, 39, 43, 53, 59, 52, 101, 52, 65, 49, 49, 40, 76, 73, 47, 110, 55, 49, 61, 57, 62, 60, 67, 58, 46, 66, 73, 68, 62, 53, 98, 56, 69, 91, 71, 81, 73, 63, 71, 61, 69, 71, 74, 51, 60, 70, 66, 53, 87, 71, 64, 60, 69, 83, 70, 56, 46, 59, 54, 64, 86, 38, 46, 77, 59, 67, 50, 64, 64, 61, 62, 48, 54, 71, 69, 48, 56, 65, 49, 68, 71, 89, 77, 54, 61, 65, 60, 50, 44, 60, 59, 70, 85, 63, 76, 74, 66, 78, 66, 52, 66, 74, 63, 58, 70, 50, 68, 74, 45, 63, 69, 68, 58, 55, 63, 59, 61, 52, 75, 77, 63, 47, 40, 69, 53, 70, 75, 61, 62, 57, 82, 57, 59, 72, 65, 58, 55, 74, 63, 57, 70, 49, 99, 82, 57, 62, 55, 76, 58, 67, 63, 81, 56, 55, 89, 75, 72, 73, 71, 103, 78, 58, 69, 85, 58, 46, 57, 68, 50, 73, 65, 64, 70, 59, 86, 52, 58, 83, 64, 52, 55, 84, 57, 135, 46, 70, 60, 58, 65, 75, 56, 67, 69, 75, 84, 99, 63, 52, 60, 57, 58, 59, 69, 49, 55, 40, 47, 76, 70, 63, 70, 88, 91, 73, 54, 68, 73, 98, 74, 63, 70, 59, 62, 55, 58, 59, 71, 76, 52, 53, 74, 54, 98, 71, 64, 72, 139, 68, 64, 65, 117, 71, 56, 81, 61, 62, 86, 65, 74, 56, 59, 69, 70, 61, 64, 77, 56, 78, 71, 60, 73, 86, 64, 64, 86, 69, 68, 59, 81, 54, 106, 58, 60, 58, 58, 105, 72, 72, 68, 63, 86, 38, 71, 45, 62, 65, 61, 91, 81, 67, 59, 62, 67, 70, 63, 65, 66, 68, 71, 70, 55, 62, 54, 66, 50, 57, 51, 57, 69, 79, 70, 59, 70, 107, 81, 53, 75, 94, 66, 58, 87, 63, 62, 59, 66, 76, 72, 55, 50, 51, 60, 85, 59, 49, 52, 65, 50, 61, 53, 44, 66, 56, 84, 62, 53, 88, 83, 70, 71, 89, 63, 55, 58, 70, 66, 60, 53, 80, 71, 60, 52, 52, 62, 54, 55, 78, 77, 85, 63, 56, 53, 59, 67, 57, 59, 57, 77, 66, 57, 66, 53, 59, 56, 89, 94, 58, 57, 70, 77, 62, 52, 73, 101, 70, 69, 62, 91, 53, 72, 72, 54, 71, 58, 66, 66, 67, 59, 72, 68, 63, 63, 70, 81, 64, 65, 61, 72, 82, 87, 48, 66, 67, 82, 69, 48, 48, 47, 67, 50, 51, 48, 59, 64, 66, 77, 61, 60, 54, 93, 89, 126, 47, 71, 69, 57, 54, 46, 64, 75, 68, 69, 60, 77, 55, 55, 71, 71, 67, 63, 64, 78, 105, 58, 58, 59, 48, 54, 66, 70, 55, 63, 53, 64, 66, 79, 78, 47, 59, 55, 64, 63, 49, 70, 101, 51, 56, 60, 62, 72, 60, 70, 44, 70, 71, 66, 62, 56, 75, 92, 66, 87, 69, 67, 53, 67, 50, 62, 59, 75, 66, 66, 73, 52, 72, 74, 58, 68, 63, 51, 74, 82, 58, 66, 56, 62, 69, 72, 47, 66, 70, 71, 55, 68, 78, 72, 48, 66, 68, 54, 55, 65, 60, 92, 67, 57, 66, 58, 57, 45, 48, 67, 66, 62, 67, 75, 57, 64, 83, 65, 68, 70, 69, 54, 72, 61, 84, 65, 82, 67, 115, 62, 88, 57, 66, 62, 49, 63, 52, 64, 70, 75, 65, 71, 55, 63, 48, 59, 85, 48, 66, 64, 85, 66, 61, 56, 74, 81, 48, 85, 52, 63, 48, 50, 68, 62, 68, 74, 71, 42, 49, 75, 74, 100, 62, 58, 63, 55, 91, 63, 70, 67, 81, 81, 52, 98, 51, 77, 67, 54, 57, 133, 56, 61, 64, 87, 56, 53, 162, 72, 81, 74, 55, 47, 69, 60, 71, 60, 79, 60, 51, 51, 58, 60, 65, 67, 62, 57, 60, 62, 58, 66, 67, 62, 53, 91, 73, 63, 85, 75, 64, 62, 59, 55, 52, 51, 49, 49, 65, 68, 76, 53, 65, 60, 67, 54, 58, 74, 51, 61, 75, 62, 97, 71, 64, 94, 69, 69, 65, 76, 47, 63, 74, 64, 57, 71, 70, 61, 53, 58, 79, 91, 60, 119, 40, 67, 46, 59, 46, 51, 62, 71, 47, 56, 54, 65, 57, 45, 60, 44, 58, 72, 77, 108, 97, 60, 67, 63, 59, 51, 62, 69, 68, 55, 61, 52, 72, 73, 56, 68, 60, 90, 84, 97, 94, 74, 62, 56, 64, 68, 97, 73, 65, 67, 61, 67, 64, 58, 70, 58, 55, 76, 71, 79, 58, 72, 61, 74, 61, 66, 55, 67, 52, 69, 81, 58, 70, 46, 54, 61, 61, 69, 55, 104, 57, 70, 67, 55, 68, 62, 79, 68, 59, 61, 47, 66, 57, 55, 51, 66, 60, 64, 67, 71, 66, 59, 70, 63, 63, 71, 59, 76, 59, 85, 73, 66, 56, 61, 42, 74, 70, 55, 57, 62, 51, 69, 67, 60, 66, 60, 70, 63, 64, 76, 95, 62, 62, 57, 77, 84, 63, 52, 76, 63, 51, 71, 78, 51, 59, 61, 46, 68, 62, 51, 65, 82, 59, 74, 65, 44, 55, 70, 73, 57, 102, 55, 85, 76, 69, 69, 101, 63, 57, 60, 69, 44, 57, 51, 48, 71, 76, 92, 62, 55, 59, 83, 50, 60, 79, 49, 56, 54, 55, 71, 68, 81, 59, 70, 69, 67, 63, 75, 66, 69, 64, 63, 70, 89, 75, 59, 55, 53, 65, 60, 56, 70, 97, 55, 60, 62, 76, 69, 58, 70, 57, 54, 47, 53, 59, 74, 62, 71, 58, 62, 66, 53, 103, 77, 74, 76, 77, 64, 58, 71, 63, 60, 61, 62, 57, 75, 62, 48, 49, 82, 59, 59, 75, 65, 68, 72, 56, 62, 69, 50, 71, 75, 67, 97, 58, 64, 110, 70, 62, 59, 57, 92, 55, 59, 57, 56, 88, 74, 62, 60, 62, 84, 59, 53, 68, 61, 55, 62, 55, 90, 73, 64, 77, 49, 59, 60, 46, 83, 70, 111, 59, 63, 74, 73, 58, 66, 62, 62, 65, 72, 49, 53, 80, 58, 64, 69, 54, 69, 61, 72, 55, 57, 63, 50, 72, 51, 73, 65, 62, 75, 61, 58, 73, 55, 64, 45, 83, 56, 80, 54, 62, 57, 72, 65, 66, 63, 87, 58, 72, 73, 62, 67, 90, 52, 78, 71, 67, 52, 65, 66, 71, 69, 68, 54, 70, 59, 62, 80, 58, 69, 66, 81, 55, 59, 63, 71, 68, 59, 58, 54, 59, 61, 60, 69, 72, 79, 78, 63, 64, 60, 60, 75, 60, 59, 78, 57, 69, 75, 69, 60, 66, 64, 67, 70, 60, 86, 64, 61, 53, 72, 57, 61, 73, 62, 77, 83, 85, 45, 48, 73, 53, 45, 62, 62, 75, 81, 63, 46, 65, 61, 51, 49, 76, 59, 51, 61, 67, 72, 74, 52, 51, 74, 66, 61, 67, 78, 50, 44, 74, 66, 73, 79, 57, 122, 67, 72, 58, 60, 57, 45, 64, 57, 95, 77, 54, 57, 61, 93, 84, 60, 49, 80, 54, 69, 51, 60, 55, 63, 60, 97, 79, 83, 59, 63, 83, 73, 74, 72, 51, 65, 56, 56, 80, 77, 95, 42, 53, 61, 76, 45, 54, 65, 76, 79, 65, 60, 65, 74, 44, 84, 62, 65, 60, 65, 61, 67, 50, 65, 57, 52, 64, 59, 70, 71, 68, 53, 53, 65, 49, 62, 51, 49, 64, 66, 63, 65, 65, 56, 58, 67, 65, 67, 55, 60, 54, 69, 70, 80, 79, 71, 67, 50, 74, 75, 77, 80, 82, 63, 99, 62, 73, 75, 60, 57, 61, 57, 89, 59, 73, 53, 51, 64, 82, 61, 61, 70, 49, 62, 56, 63, 65, 78, 69, 49, 70, 50, 114, 61, 63, 102, 67, 43, 51, 72, 50, 76, 65, 61, 77, 72, 74, 62, 57, 80, 50, 77, 117, 40, 54, 87, 72, 65, 62, 53, 67, 65, 52, 87, 58, 57, 54, 82, 58, 75, 114, 83, 70, 64, 77, 54, 68, 72, 73, 50, 61, 61, 63, 68, 75, 81, 71, 67, 49, 70, 56, 81, 68, 63, 83, 77, 72, 52, 58, 61, 58, 70, 63, 83, 75, 73, 77, 67, 71, 53, 57, 64, 49, 60, 73, 64, 44, 60, 61, 61, 71, 72, 83, 82, 129, 72, 60, 55, 56, 59, 63, 62, 55, 66, 90, 82, 62, 61, 71, 61, 88, 64, 61, 59, 63, 60, 50, 65, 128, 85, 54, 56, 69, 55, 70, 58, 61, 63, 44, 59, 55, 52, 41, 55, 68, 85, 58, 62, 92, 54, 60, 110, 60, 55, 65, 64, 77, 71, 76, 52, 56, 68, 62, 58, 63, 87, 64, 55, 36, 57, 116, 56, 83, 49, 56, 70, 67, 110, 94, 78, 87, 58, 58, 78, 70, 74, 81, 63, 71, 47, 66, 81, 56, 70, 56, 84, 61, 70, 66, 61, 66, 62, 92, 61, 69, 61, 80, 77, 70, 44, 71, 80, 64, 95, 80, 63, 59, 53, 57, 64, 105, 72, 50, 50, 63, 54, 67, 69, 50, 51, 52, 65, 75, 72, 49, 73, 65, 76, 57, 90, 51, 73, 49, 56, 61, 76, 67, 64, 58, 51, 67, 80, 71, 62, 70, 42, 90, 83, 85, 63, 50, 55, 77, 54, 84, 72, 61, 69, 78, 80, 71, 90, 83, 55, 76, 92, 87, 73, 59, 57, 92, 88, 48, 94, 68, 54, 52, 78, 30, 52, 43, 68, 60, 91, 60, 72, 62, 69, 76, 58, 72, 55, 91, 66, 51, 79, 104, 79, 71, 76, 66, 75, 71, 44, 55, 59, 75, 64, 50, 84, 64, 53, 68, 79, 64, 67, 57, 46, 69, 72, 64, 68, 68, 60, 46, 80, 75, 50, 67, 64, 68, 68, 63, 57, 55, 73, 51, 60, 82, 80, 54, 47, 51, 57, 44, 74, 96, 66, 63, 49, 61, 79, 70, 59, 49, 64, 65, 87, 56, 49, 70, 62, 69, 57, 72, 53, 70, 58, 44, 76, 50, 65, 51, 53, 82, 48, 75, 61, 73, 85, 86, 53, 42, 55, 58, 59, 65, 72, 62, 67, 71, 89, 69, 64, 36, 75, 54, 71, 56, 96, 78, 96, 73, 72, 64, 44, 116, 63, 65, 63, 91, 71, 77, 67, 50, 86, 52, 32, 105, 61, 71, 58, 58, 87, 65, 40, 78, 69, 63, 67, 77, 59, 50, 111, 63, 48, 68, 75, 52, 123, 31, 71, 57, 64, 66, 56, 63, 66, 63, 44, 58, 79, 62, 64, 64, 75, 55, 49, 76, 84, 66, 110, 49, 75, 65, 57, 60, 49, 108, 51, 56, 80, 59, 84, 35, 70, 50, 77, 68, 72, 71, 53, 59, 59, 73, 84, 89, 54, 68, 57, 52, 61, 50, 103, 77, 46, 58, 55, 79, 101, 58, 64, 65, 56, 52, 77, 80, 62, 70, 60, 49, 75, 45, 61, 58, 56, 62, 72, 90, 76, 65, 62, 80, 57, 65, 58, 56, 70, 55, 69, 49, 70, 75, 51, 54, 66, 74, 67, 60, 43, 65, 77, 84, 85, 54, 39, 53, 77, 59, 54, 101, 53, 48, 92, 75, 65, 58, 73, 80, 65, 52, 76, 83, 69, 76, 53, 80, 70, 66, 81, 88, 77, 59, 52, 88, 62, 71, 57, 47, 85, 58, 57, 63, 59, 48, 87, 70, 66, 63, 71, 64, 76, 83, 83, 76, 41, 63, 64, 56, 64, 65, 57, 46, 73, 60, 42, 84, 54, 77, 65, 51, 70, 67, 57, 37, 82, 54, 71, 66, 48, 77, 69, 66, 67, 67, 87, 63, 63, 57, 52, 99, 68, 49, 57, 52, 68, 57, 63, 77, 65, 74, 113, 60, 55, 79, 58, 72, 56, 58, 48, 59, 71, 138, 53, 56, 53, 91, 83, 56, 67, 66, 52, 82, 90, 57, 69, 58, 64, 66, 64, 77, 47, 109, 72, 44, 56, 68, 71, 56, 50, 59, 58, 69, 69, 52, 73, 67, 65, 58, 65, 72, 47, 72, 75, 51, 54, 72, 72, 59, 55, 58, 72, 87, 95, 58, 45, 95, 60, 42, 66, 100, 79, 80, 81, 51, 71, 80, 62, 58, 60, 66, 66, 69, 86, 58, 52, 55, 48, 61, 87, 61, 76, 50, 63, 83, 57, 73, 84, 107, 71, 45, 77, 72, 79, 62, 65, 56, 60, 58, 73, 55, 82, 90, 52, 82, 57, 55, 64, 72, 62, 59, 60, 71, 31, 80, 65, 64, 46, 59, 70, 56, 51, 54, 36, 61, 62, 96, 61, 81, 59, 50, 59, 73, 58, 56, 57, 65, 59, 86, 97, 64, 93, 77, 67, 79, 66, 55, 57, 68, 62, 61, 52, 78, 60, 74, 47, 45, 83, 64, 86, 78, 118, 40, 76, 68, 44, 84, 45, 76, 63, 53, 76, 67, 53, 68, 66, 50, 81, 71, 57, 62, 88, 72, 66, 91, 47, 54, 63, 74, 73, 82, 68, 58, 47, 46, 40, 83, 49, 48, 66, 70, 58, 74, 76, 86, 69, 64, 55, 60, 55, 68, 61, 100, 73, 70, 88, 55, 49, 63, 49, 47, 74, 59, 79, 73, 60, 58, 83, 56, 45, 57, 54, 80, 66, 62, 74, 73, 54, 68, 50, 72, 60, 77, 87, 61, 78, 84, 59, 52, 76, 55, 61, 59, 114, 44, 75, 86, 68, 61, 55, 67, 43, 67, 86, 70, 75, 46, 48, 61, 45, 63, 61, 52, 63, 84, 69, 65, 61, 71, 73, 66, 71, 75, 63, 100, 56, 54, 73, 35, 87, 67, 85, 61, 75, 70, 54, 70, 73, 68, 90, 68, 48, 69, 74, 78, 58, 59, 51, 69, 54, 63, 40, 51, 64, 60, 50, 62, 68, 57, 55, 61, 54, 76, 64, 59, 95, 67, 48, 67, 88, 58, 61, 46, 46, 59, 56, 85, 53, 59, 44, 54, 64, 85, 43, 48, 48, 60, 70, 101, 94, 61, 101, 62, 63, 69, 71, 63, 71, 36, 60, 77, 77, 89, 62, 73, 50, 64, 61, 62, 93, 48, 59, 53, 66, 75, 75, 101, 60, 100, 80, 57, 55, 51, 59, 55, 82, 66, 53, 88, 62, 65, 83, 56, 54, 69, 56, 106, 56, 43, 45, 66, 77, 68, 54, 68, 67, 46, 77, 46, 62, 60, 54, 53, 78, 83, 78, 62, 62, 53, 63, 76, 62, 74, 72, 54, 55, 77, 59, 69, 108, 48, 65, 56, 68, 63, 64, 84, 67, 54, 55, 68, 69, 80, 86, 64, 52, 68, 84, 82, 52, 80, 53, 61, 48, 60, 62, 43, 55, 63, 93, 61, 58, 45, 46, 89, 77, 77, 87, 110, 74, 69, 57, 82, 77, 112, 61, 53, 71, 95, 71, 65, 41, 84, 55, 85, 87, 45, 67, 56, 84, 61, 86, 85, 51, 83, 88, 52, 51, 105, 57, 68, 57, 56, 89, 55, 45, 59, 72, 63, 64, 55, 43, 52, 83, 42, 80, 48, 61, 53, 63, 53, 72, 113, 47, 62, 75, 77, 55, 68, 61, 82, 59, 49, 72, 112, 57, 74, 61, 59, 50, 44, 71, 96, 68, 99, 87, 84, 50, 122, 68, 70, 76, 78, 60, 56, 46, 68, 58, 64, 69, 49, 69, 72, 55, 55, 77, 66, 75, 64, 55, 91, 98, 70, 73, 73, 44, 47, 127, 53, 72, 59, 76, 64, 71, 75, 76, 61, 36, 54, 67, 61, 64, 86, 71, 52, 59, 66, 46, 53, 67, 68, 69, 70, 52, 68, 68, 63, 46, 62, 56, 70, 52, 91, 68, 59, 72, 82, 53, 74, 48, 51, 66, 90, 67, 58, 72, 69, 52, 60, 71, 43, 57, 87, 53, 102, 88, 66, 70, 52, 66, 83, 53, 58, 80, 81, 66, 48, 47, 90, 66, 60, 56, 58, 86, 79, 47, 62, 55, 81, 97, 80, 39, 93, 56, 52, 70, 79, 62, 54, 58, 68, 58, 99, 50, 55, 65, 57, 72, 47, 55, 68, 81, 37, 63, 55, 63, 60, 44, 62, 67, 48, 61, 52, 60, 44, 70, 64, 67, 64, 110, 47, 57, 78, 71, 75, 48, 69, 57, 89, 73, 64, 36, 61, 70, 57, 59, 64, 71, 50, 86, 73, 56, 74, 76, 77, 63, 58, 46, 55, 55, 53, 107, 66, 55, 86, 70, 87, 59, 61, 61, 63, 65, 53, 58, 58, 74, 43, 77, 58, 48, 63, 83, 127, 71, 63, 89, 76, 118, 70, 75, 79, 76, 49, 40, 66, 80, 68, 49, 82, 52, 59, 77, 75, 70, 47, 80, 63, 64, 57, 59, 76, 45, 83, 79, 80, 53, 61, 53, 68, 63, 68, 66, 77, 87, 100, 59, 86, 68, 67, 42, 73, 69, 55, 46, 87, 67, 64, 80, 70, 46, 55, 113, 54, 60, 61, 67, 56, 48, 72, 61, 52, 58, 48, 69, 67, 42, 68, 71, 49, 63, 60, 61, 67, 54, 78, 107, 56, 65, 59, 62, 78, 62, 61, 82, 72, 63, 93, 48, 59, 46, 54, 53, 78, 77, 57, 65, 79, 92, 74, 66, 64, 59, 46, 70, 67, 95, 51, 70, 58, 77, 72, 81, 64, 59, 72, 49, 62, 52, 59, 66, 60, 74, 71, 100, 56, 87, 53, 63, 69, 69, 68, 61, 84, 69, 68, 45, 74, 37, 59, 54, 64, 87, 81, 65, 42, 40, 97, 63, 86, 79, 49, 67, 59, 53, 67, 97, 96, 68, 73, 53, 52, 81, 48, 56, 88, 83, 46, 70, 43, 62, 88, 87, 79, 44, 52, 51, 43, 57, 63, 67, 73, 59, 58, 66, 112, 69, 102, 54, 81, 69, 66, 63, 62, 68, 71, 62, 57, 58, 64, 89, 59, 78, 69, 70, 96, 63, 46, 62, 72, 70, 58, 63, 84, 49, 97, 49, 60, 82, 82, 60, 60, 57, 68, 96, 41, 71, 77, 42, 61, 56, 71, 61, 60, 85, 74, 68, 106, 82, 79, 80, 90, 70, 50, 76, 65, 69, 82, 60, 64, 82, 49, 51, 81, 52, 66, 84, 65, 52, 69, 53, 67, 66, 98, 50, 80, 48, 62, 51, 64, 61, 45, 75, 70, 85, 52, 66, 86, 69, 64, 46, 69, 53, 91, 77, 59, 53, 91, 71, 80, 62, 76, 86, 76, 61, 43, 49, 54, 49, 69, 67, 59, 48, 58, 63, 67, 58, 64, 88, 53, 47, 65, 62, 60, 93, 59, 69, 64, 75, 75, 58, 81, 54, 77, 77, 64, 77, 58, 98, 61, 67, 78, 49, 63, 62, 34, 62, 69, 53, 60, 64, 52, 51, 76, 70, 70, 83, 71, 68, 59, 55, 59, 49, 56, 82, 91, 45, 81, 42, 57, 82, 69, 51, 68, 79, 66, 61, 51, 48, 63, 62, 60, 61, 55, 54, 66, 77, 84, 89, 55, 47, 60, 58, 70, 53, 76, 51, 87, 55, 60, 63, 67, 90, 60, 61, 58, 64, 87, 53, 61, 63, 77, 64, 47, 60, 59, 75, 53, 52, 62, 74, 82, 68, 61, 51, 59, 65, 52, 47, 82, 66, 75, 100, 74, 68, 53, 93, 63, 67, 76, 71, 74, 67, 77, 62, 62, 58, 83, 45, 79, 62, 60, 79, 56, 57, 75, 72, 83, 53, 51, 70, 59, 64, 60, 82, 73, 61, 60, 63, 61, 85, 108, 52, 63, 81, 75, 49, 86, 86, 58, 72, 66, 49, 70, 39, 58, 41, 63, 56, 67, 62, 83, 76, 90, 39, 74, 44, 59, 55, 58, 60, 39, 59, 47, 76, 54, 55, 48, 46, 65, 61, 62, 91, 74, 89, 41, 49, 43, 56, 47, 83, 75, 60, 48, 52, 78, 39, 67, 59, 48, 66, 60, 57, 89, 56, 62, 81, 65, 61, 71, 57, 56, 57, 92, 49, 62, 60, 59, 62, 59, 77, 64, 53, 75, 54, 50, 67, 75, 61, 54, 52, 76, 52, 49, 72, 77, 76, 63, 98, 60, 58, 77, 70, 58, 51, 80, 68, 58, 67, 49, 56, 58, 68, 73, 56, 72, 52, 77, 71, 49, 46, 38, 70, 61, 60, 63, 70, 70, 91, 81, 69, 52, 63, 55, 53, 67, 65, 48, 69, 60, 73, 56, 51, 107, 62, 71, 70, 52, 77, 84, 44, 99, 71, 65, 71, 73, 75, 86, 67, 51, 60, 69, 66, 47, 58, 69, 55, 47, 74, 71, 49, 95, 71, 69, 83, 57, 58, 44, 72, 95, 61, 65, 49, 68, 114, 89, 49, 80, 67, 44, 136, 53, 61, 71, 59, 50, 50, 68, 65, 77, 61, 89, 56, 71, 65, 77, 38, 50, 62, 54, 69, 60, 69, 64, 69, 76, 80, 108, 49, 90, 69, 77, 60, 43, 68, 37, 47, 81, 82, 80, 74, 77, 79, 84, 54, 65, 57, 78, 66, 55, 81, 54, 69, 76, 80, 43, 109, 63, 52, 68, 68, 61, 65, 68, 59, 115, 104, 59, 65, 64, 87, 59, 62, 86, 65, 107, 59, 49, 61, 61, 117, 68, 71, 66, 90, 114, 66, 65, 71, 65, 65, 85, 86, 73, 73, 82, 63, 78, 60, 68, 58, 51, 59, 56, 47, 61, 63, 72, 69, 57, 76, 77, 63, 66, 32, 69, 68, 48, 56, 48, 54, 76, 68, 53, 48, 72, 71, 59, 68, 41, 45, 58, 61, 64, 51, 70, 76, 75, 99, 80, 65, 81, 60, 74, 63, 88, 75, 55, 53, 69, 74, 60, 76, 51, 100, 86, 61, 57, 41, 77, 84, 50, 92, 48, 59, 79, 51, 56, 66, 53, 49, 71, 70, 73, 64, 48, 60, 73, 53, 80, 73, 57, 71, 54, 60, 71, 69, 50, 84, 66, 48, 77, 70, 63, 73, 77, 68, 53, 93, 64, 58, 50, 58, 76, 71, 67, 88, 77, 63, 56, 84, 65, 87, 83, 66, 68, 59, 64, 69, 82, 63, 54, 69, 60, 57, 70, 52, 51, 56, 64, 54, 67, 57, 73, 65, 54, 55, 66, 57, 70, 52, 96, 89, 84, 66, 55, 68, 78, 69, 47, 71, 53, 76, 75, 82, 71, 83, 73, 53, 52, 65, 53, 90, 62, 55, 61, 85, 50, 76, 72, 58, 47, 91, 55, 60, 59, 53, 61, 65, 45, 72, 62, 70, 37, 68, 113, 89, 93, 57, 61, 76, 68, 71, 51, 56, 58, 92, 57, 70, 55, 85, 57, 50, 65, 71, 58, 96, 64, 62, 52, 61, 55, 48, 49, 55, 87, 86, 65, 70, 60, 55, 73, 81, 69, 50, 50, 59, 74, 74, 77, 48, 65, 68, 67, 65, 79, 67, 87, 73, 106, 62, 78, 83, 58, 67, 44, 79, 37, 55, 59, 65, 66, 50, 63, 56, 55, 65, 69, 72, 63, 61, 54, 53, 79, 51, 64, 71, 72, 79, 78, 53, 57, 73, 54, 60, 53, 64, 70, 66, 67, 62, 58, 60, 86, 55, 71, 55, 49, 90, 62, 50, 62, 48, 103, 66, 58, 66, 73, 51, 93, 51, 59, 76, 55, 63, 80, 56, 76, 75, 56, 68, 70, 76, 65, 65, 83, 60, 59, 61, 57, 69, 66, 116, 74, 57, 79, 74, 83, 127, 120, 67, 75, 65, 62, 67, 51, 36, 64, 64, 53, 63, 57, 75, 62, 60, 52, 71, 66, 61, 70, 75, 65, 85, 54, 75, 77, 88, 71, 70, 46, 66, 63, 85, 68, 61, 49, 50, 86, 63, 53, 75, 76, 61, 57, 57, 77, 58, 76, 80, 57, 58, 65, 77, 69, 70, 84, 67, 73, 64, 51, 53, 83, 63, 48, 67, 53, 53, 63, 77, 42, 63, 59, 78, 61, 81, 62, 63, 70, 88, 65, 49, 34, 53, 75, 67, 50, 75, 76, 48, 103, 69, 78, 69, 77, 57, 64, 62, 69, 69, 69, 76, 62, 75, 79, 67, 60, 62, 70, 82, 72, 44, 71, 75, 68, 82, 56, 75, 58, 61, 67, 60, 77, 63, 78, 66, 63, 48, 76, 49, 72, 55, 38, 71, 74, 57, 67, 66, 61, 80, 94, 61, 62, 51, 63, 77, 70, 50, 68, 54, 52, 85, 62, 58, 73, 63, 59, 71, 38, 69, 85, 45, 88, 60, 44, 64, 74, 52, 73, 73, 63, 51, 50, 65, 61, 49, 70, 65, 99, 50, 64, 82, 73, 73, 76, 83, 61, 59, 53, 63, 48, 51, 57, 61, 64, 58, 62, 102, 56, 45, 62, 60, 75, 73, 49, 55, 71, 62, 64, 65, 64, 65, 78, 68, 61, 77, 58, 65, 69, 41, 55, 60, 85, 48, 61, 69, 67, 46, 57, 34, 67, 83, 77, 52, 65, 66, 88, 72, 68, 52, 72, 52, 55, 46, 73, 45, 106, 79, 53, 79, 80, 62, 66, 82, 56, 48, 74, 63, 52, 73, 65, 53, 69, 60, 55, 82, 61, 67, 49, 68, 57, 43, 58, 80, 61, 59, 53, 58, 78, 43, 73, 43, 71, 66, 53, 70, 60, 84, 46, 52, 67, 59, 57, 67, 58, 61, 43, 65, 67, 79, 60, 50, 52, 65, 53, 72, 59, 66, 60, 58, 72, 48, 71, 42, 82, 105, 63, 87, 71, 57, 58, 75, 65, 76, 57, 66, 75, 83, 47, 74, 55, 57, 58, 72, 63, 50, 69, 67, 69, 61, 78, 53, 66, 64, 65, 64, 60, 58, 100, 79, 46, 59, 74, 73, 54, 68, 55, 55, 57, 66, 59, 65, 59, 63, 60, 48, 52, 58, 69, 75, 64, 71, 66, 52, 66, 58, 119, 64, 64, 61, 74, 70, 71, 51, 70, 54, 84, 61, 69, 54, 61, 81, 59, 45, 128, 64, 71, 70, 61, 139, 76, 80, 69, 72, 79, 78, 75, 79, 55, 70, 67, 81, 50, 51, 76, 72, 64, 48, 76, 74, 57, 89, 54, 47, 52, 64, 57, 77, 58, 65, 67, 62, 99, 56, 72, 44, 58, 62, 63, 58, 72, 63, 51, 59, 89, 57, 61, 54, 69, 84, 55, 55, 55, 63, 74, 56, 63, 51, 74, 74, 69, 64, 61, 86, 54, 69, 72, 66, 75, 70, 67, 53, 53, 75, 53, 61, 69, 81, 67, 80, 67, 93, 53, 65, 84, 70, 62, 61, 90, 69, 76, 38, 82, 56, 83, 48, 55, 89, 63, 69, 65, 61, 69, 95, 60, 54, 74, 58, 61, 75, 66, 58, 71, 77, 68, 69, 77, 53, 69, 64, 44, 57, 74, 75, 54, 54, 49, 63, 58, 63, 53, 64, 72, 55, 62, 80, 64, 58, 60, 80, 71, 48, 56, 47, 57, 66, 72, 60, 72, 59, 51, 73, 56, 70, 64, 68, 115, 54, 66, 68, 70, 90, 72, 63, 59, 57, 78, 55, 48, 61, 65, 58, 72, 63, 71, 74, 64, 60, 69, 68, 57, 66, 66, 68, 73, 66, 52, 70, 54, 65, 77, 43, 64, 57, 67, 80, 59, 57, 63, 59, 66, 86, 63, 79, 72, 72, 73, 57, 71, 58, 44, 85, 64, 79, 81, 54, 101, 74, 77, 61, 48, 68, 55, 57, 139, 60, 63, 53, 65, 49, 76, 78, 92, 70, 46, 116, 72, 58, 66, 61, 72, 59, 72, 67, 56, 80, 59, 48, 69, 60, 65, 79, 75, 73, 52, 64, 63, 61, 76, 59, 47, 54, 59, 66, 61, 70, 58, 52, 48, 50, 55, 73, 81, 80, 64, 79, 64, 69, 85, 89, 61, 55, 62, 51, 69, 80, 68, 89, 72, 62, 83, 54, 60, 69, 60, 66, 81, 58, 58, 64, 87, 71, 70, 121, 82, 71, 36, 50, 69, 64, 72, 58, 84, 66, 60, 66, 58, 81, 62, 46, 64, 52, 49, 68, 70, 51, 49, 51, 78, 63, 79, 61, 68, 50, 59, 67, 60, 55, 77, 54, 41, 68, 73, 67, 90, 65, 58, 50, 53, 110, 69, 71, 94, 50, 75, 74, 65, 95, 66, 44, 85, 61, 64, 49, 59, 56, 52, 60, 60, 83, 80, 59, 61, 80, 50, 40, 78, 63, 60, 74, 79, 63, 56, 115, 62, 63, 79, 84, 66, 63, 58, 48, 58, 66, 43, 74, 56, 80, 67, 74, 61, 107, 139, 57, 85, 69, 73, 66, 53, 63, 55, 74, 76, 63, 70, 54, 54, 83, 80, 60, 45, 74, 59, 58, 63, 51, 79, 49, 66, 61, 56, 82, 53, 58, 54, 65, 92, 53, 53, 63, 81, 54, 69, 60, 61, 72, 70, 73, 53, 52, 61, 74, 65, 54, 56, 80, 70, 51, 74, 42, 42, 74, 71, 76, 61, 73, 58, 84, 51, 67, 77, 72, 76, 58, 53, 51, 64, 66, 58, 53, 128, 73, 83, 52, 82, 54, 69, 72, 74, 63, 67, 59, 81, 60, 60, 74, 60, 56, 63, 56, 55, 59, 70, 48, 68, 62, 64, 68, 78, 62, 70, 90, 62, 48, 82, 64, 63, 46, 63, 71, 62, 68, 63, 78, 81, 68, 71, 71, 65, 70, 67, 51, 65, 63, 68, 74, 64, 67, 45, 61, 62, 76, 60, 67, 49, 59, 79, 50, 80, 77, 49, 56, 69, 56, 57, 81, 69, 57, 77, 56, 63, 59, 59, 94, 93, 53, 72, 62, 76, 65, 80, 49, 77, 54, 73, 59, 71, 59, 73, 45, 67, 75, 74, 72, 56, 79, 65, 56, 59, 96, 53, 58, 70, 60, 69, 72, 63, 68, 50, 65, 48, 54, 74, 68, 56, 67, 70, 64, 71, 56, 83, 56, 55, 63, 64, 58, 55, 64, 63, 65, 56, 59, 66, 110, 87, 62, 75, 70, 58, 79, 108, 44, 61, 51, 64, 52, 56, 77, 61, 119, 49, 64, 78, 48, 64, 119, 56, 84, 51, 67, 59, 62, 74, 56, 63, 54, 78, 92, 51, 52, 51, 53, 66, 78, 64, 70, 58, 70, 55, 66, 121, 56, 67, 57, 59, 64, 75, 63, 60, 56, 84, 56, 71, 67, 74, 59, 44, 68, 57, 84, 52, 59, 64, 56, 67, 52, 58, 54, 52, 36, 65, 77, 99, 72, 59, 63, 74, 52, 61, 70, 79, 52, 97, 56, 84, 76, 60, 64, 56, 62, 70, 64, 60, 77, 61, 84, 62, 46, 73, 61, 54, 76, 44, 70, 54, 50, 63, 71, 64, 52, 75, 55, 51, 59, 54, 51, 62, 62, 64, 70, 52, 59, 57, 58, 71, 60, 100, 56, 59, 56, 57, 61, 47, 56, 52, 45, 65, 120, 66, 79, 42, 59, 61, 59, 73, 85, 77, 54, 65, 74, 71, 67, 102, 50, 69, 66, 63, 70, 56, 62, 57, 67, 73, 69, 77, 56, 66, 64, 71, 47, 62, 90, 61, 76, 58, 53, 68, 49, 120, 76, 55, 52, 55, 50, 70, 63, 61, 50, 54, 70, 58, 70, 80, 61, 50, 60, 83, 69, 73, 57, 70, 66, 66, 96, 61, 66, 97, 59, 64, 66, 68, 53, 58, 40, 56, 53, 84, 73, 62, 58, 64, 55, 70, 66, 77, 76, 80, 53, 57, 70, 67, 64, 69, 66, 63, 68, 57, 74, 94, 69, 84, 55, 64, 77, 72, 69, 69, 62, 55, 68, 48, 82, 63, 58, 62, 65, 57, 74, 54, 103, 56, 56, 59, 60, 54, 63, 75, 69, 51, 57, 59, 68, 81, 64, 54, 62, 78, 54, 66, 58, 46, 59, 72, 77, 71, 53, 66, 67, 62, 57, 56, 45, 56, 85, 59, 43, 96, 88, 49, 109, 65, 61, 64, 66, 58, 56, 100, 40, 59, 83, 76, 62, 39, 69, 79, 88, 42, 50, 64, 70, 61, 59, 61, 55, 63, 64, 57, 74, 67, 45, 83, 81, 58, 96, 53, 42, 58, 69, 62, 138, 65, 74, 109, 49, 69, 51, 62, 71, 51, 65, 61, 73, 72, 54, 52, 65, 53, 56, 50, 63, 58, 44, 79, 81, 58, 92, 104, 49, 56, 64, 42, 102, 56, 58, 57, 65, 92, 54, 66, 69, 77, 100, 69, 64, 74, 76, 74, 53, 112, 87, 59, 48, 87, 81, 58, 60, 55, 76, 55, 51, 59, 88, 74, 70, 71, 61, 75, 71, 86, 102, 68, 89, 64, 59, 92, 58, 67, 68, 67, 47, 78, 72, 52, 73, 55, 55, 80, 84, 101, 60, 54, 60, 27, 57, 56, 62, 59, 64, 78, 42, 65, 109, 72, 88, 70, 58, 64, 43, 65, 51, 62, 61, 71, 66, 66, 58, 55, 91, 43, 74, 62, 63, 71, 87, 71, 57, 72, 86, 51, 105, 64, 70, 56, 83, 80, 88, 41, 55, 64, 63, 80, 61, 48, 68, 73, 38, 74, 68, 82, 50, 55, 62, 74, 66, 62, 52, 56, 70, 52, 65, 35, 62, 56, 61, 62, 46, 77, 85, 54, 71, 67, 74, 70, 72, 55, 49, 62, 55, 67, 63, 51, 76, 74, 83, 65, 62, 65, 54, 47, 80, 41, 58, 56, 72, 88, 76, 75, 75, 63, 72, 51, 80, 59, 60, 53, 56, 37, 70, 67, 57, 80, 60, 72, 49, 45, 92, 74, 61, 64, 72, 40, 62, 29, 30, 62, 49, 88, 73, 49, 55, 54, 77, 70, 51, 53, 65, 53, 64, 75, 71, 47, 72, 87, 72, 71, 63, 55, 71, 79, 60, 57, 65, 52, 73, 63, 65, 54, 109, 68, 83, 87, 51, 63, 74, 60, 55, 59, 62, 74, 69, 72, 48, 67, 85, 42, 55, 69, 112, 137, 70, 59, 52, 69, 65, 84, 82, 38, 66, 65, 57, 98, 54, 72, 68, 94, 75, 77, 65, 63, 84, 54, 53, 70, 65, 56, 61, 60, 63, 69, 72, 72, 58, 47, 77, 92, 54, 81, 67, 53, 68, 62, 84, 67, 66, 90, 80, 68, 77, 55, 52, 54, 54, 85, 66, 90, 63, 63, 54, 90, 60, 60, 82, 85, 78, 79, 62, 52, 61, 72, 62, 45, 61, 61, 56, 92, 86, 75, 77, 59, 76, 54, 83, 53, 62, 72, 48, 56, 64, 61, 81, 62, 57, 65, 60, 59, 63, 98, 64, 60, 61, 80, 67, 58, 72, 65, 50, 66, 94, 63, 54, 50, 61, 52, 84, 65, 58, 51, 59, 93, 54, 69, 72, 62, 91, 48, 69, 71, 66, 73, 59, 66, 70, 55, 63, 70, 101, 51, 76, 40, 56, 57, 71, 63, 85, 53, 75, 58, 57, 53, 54, 81, 55, 61, 60, 60, 66, 59, 75, 54, 63, 56, 72, 70, 97, 67, 61, 74, 52, 83, 69, 62, 84, 64, 102, 43, 165, 94, 61, 55, 54, 74, 87, 64, 73, 55, 68, 42, 68, 67, 66, 61, 70, 68, 57, 60, 72, 47, 60, 91, 80, 57, 58, 55, 57, 80, 96, 52, 56, 68, 74, 68, 44, 58, 57, 57, 54, 43, 72, 83, 84, 73, 74, 67, 63, 47, 68, 45, 77, 56, 55, 59, 108, 64, 54, 64, 56, 63, 52, 53, 60, 50, 105, 52, 74, 62, 57, 89, 66, 63, 72, 58, 67, 66, 66, 91, 55, 97, 57, 62, 81, 80, 70, 44, 67, 67, 69, 59, 75, 69, 51, 56, 85, 56, 59, 50, 62, 48, 107, 44, 51, 85, 65, 80, 72, 60, 55, 64, 61, 73, 64, 63, 73, 68, 65, 58, 71, 79, 60, 64, 75, 83, 49, 54, 67, 51, 67, 75, 68, 54, 51, 61, 60, 66, 53, 52, 46, 30, 55, 59, 57, 50, 55, 69, 76, 62, 60, 67, 79, 52, 42, 53, 54, 64, 67, 62, 54, 58, 94, 78, 76, 62, 62, 64, 79, 68, 64, 70, 89, 74, 62, 61, 53, 61, 45, 73, 85, 54, 71, 55, 65, 67, 67, 50, 40, 64, 68, 58, 39, 76, 45, 63, 44, 71, 61, 46, 58, 87, 51, 73, 71, 46, 80, 68, 81, 46, 68, 69, 81, 56, 43, 54, 82, 77, 66, 84, 78, 74, 51, 66, 56, 58, 61, 59, 84, 55, 53, 68, 64, 77, 78, 79, 67, 62, 88, 50, 50, 55, 64, 61, 82, 59, 51, 76, 72, 54, 65, 80, 52, 56, 82, 55, 66, 55, 77, 71, 87, 40, 114, 55, 82, 79, 76, 48, 60, 107, 43, 57, 52, 65, 58, 53, 63, 60, 80, 78, 81, 49, 61, 41, 59, 55, 57, 60, 100, 44, 63, 69, 44, 71, 86, 62, 92, 65, 42, 58, 71, 87, 61, 72, 55, 66, 51, 54, 82, 64, 66, 68, 54, 72, 59, 79, 67, 59, 47, 108, 61, 62, 60, 63, 85, 51, 73, 60, 58, 60, 78, 61, 90, 61, 57, 53, 81, 65, 86, 61, 73, 52, 49, 69, 57, 48, 70, 80, 63, 58, 58, 68, 59, 82, 46, 67, 65, 48, 58, 51, 56, 49, 100, 68, 74, 63, 85, 97, 73, 72, 46, 71, 56, 80, 70, 82, 80, 95, 77, 71, 84, 94, 61, 70, 66, 51, 87, 60, 36, 52, 67, 75, 79, 45, 59, 70, 80, 88, 77, 63, 60, 79, 70, 71, 36, 56, 70, 130, 62, 71, 68, 62, 62, 61, 63, 59, 61, 53, 70, 46, 50, 58, 71, 67, 64, 66, 80, 46, 59, 56, 45, 63, 54, 70, 68, 111, 57, 63, 53, 65, 82, 56, 59, 68, 56, 107, 57, 69, 56, 55, 49, 44, 77, 90, 52, 72, 60, 72, 82, 54, 72, 72, 61, 87, 93, 66, 84, 73, 69, 60, 75, 59, 58, 55, 56, 69, 59, 39, 89, 44, 75, 72, 49, 69, 66, 63, 73, 75, 58, 53, 54, 50, 60, 68, 76, 62, 76, 48, 66, 79, 83, 90, 49, 57, 65, 46, 65, 54, 70, 56, 50, 44, 91, 65, 62, 60, 37, 76, 67, 60, 56, 76, 89, 43, 78, 60, 60, 96, 91, 75, 72, 67, 85, 70, 66, 80, 42, 71, 63, 67, 55, 48, 60, 75, 80, 51, 55, 67, 44, 72, 72, 59, 62, 65, 56, 82, 98, 56, 87, 69, 60, 65, 44, 60, 68, 71, 62, 90, 141, 81, 78, 89, 82, 79, 50, 51, 48, 65, 50, 78, 54, 56, 45, 106, 59, 61, 59, 76, 59, 58, 88, 74, 65, 93, 66, 43, 53, 30, 51, 49, 60, 51, 54, 60, 50, 79, 63, 51, 80, 76, 75, 68, 68, 97, 59, 66, 67, 113, 122, 49, 78, 93, 89, 46, 78, 62, 67, 90, 82, 59, 56, 57, 100, 76, 66, 75, 60, 74, 79, 52, 74, 65, 78, 46, 59, 58, 95, 87, 74, 68, 59, 60, 66, 64, 77, 95, 68, 107, 54, 48, 58, 82, 76, 46, 64, 55, 51, 46, 55, 67, 86, 61, 50, 68, 97, 74, 72, 72, 52, 77, 75, 55, 74, 64, 63, 66, 70, 72, 71, 79, 64, 50, 93, 86, 47, 72, 67, 56, 83, 53, 71, 90, 68, 60, 78, 74, 60, 67, 66, 84, 58, 55, 60, 63, 58, 67, 62, 88, 66, 67, 58, 51, 63, 110, 83, 58, 93, 76, 86, 58, 68, 82, 62, 77, 75, 44, 71, 70, 64, 70, 79, 97, 68, 60, 63, 64, 98, 63, 65, 52, 52, 40, 94, 63, 50, 57, 59, 54, 81, 68, 67, 69, 81, 60, 53, 52, 52, 65, 68, 50, 69, 59, 58, 51, 67, 75, 90, 76, 73, 87, 57, 79, 53, 61, 101, 75, 51, 59, 56, 47, 53, 58, 76, 71, 55, 72, 74, 34, 55, 50, 62, 74, 45, 73, 67, 53, 60, 104, 78, 52, 64, 79, 60, 58, 52, 74, 54, 66, 65, 56, 68, 58, 66, 62, 86, 75, 163, 50, 60, 76, 54, 70, 81, 59, 60, 55, 86, 49, 62, 82, 107, 56, 142, 44, 45, 66, 83, 108, 53, 56, 54, 45, 50, 56, 55, 63, 62, 58, 57, 87, 64, 57, 54, 61, 51, 67, 65, 58, 79, 68, 53, 47, 59, 57, 64, 62, 54, 57, 70, 57, 61, 72, 70, 64, 67, 74, 55, 68, 67, 71, 62, 62, 90, 65, 85, 51, 83, 70, 70, 42, 65, 52, 74, 43, 58, 76, 56, 53, 78, 58, 65, 71, 65, 71, 74, 82, 65, 68, 69, 57, 60, 76, 73, 72, 116, 70, 74, 71, 73, 73, 64, 66, 57, 74, 60, 60, 69, 79, 57, 57, 66, 54, 71, 72, 71, 52, 73, 63, 66, 61, 72, 80, 70, 54, 59, 61, 60, 55, 70, 68, 64, 72, 73, 58, 61, 55, 59, 123, 80, 74, 73, 66, 109, 68, 90, 78, 42, 63, 53, 57, 81, 58, 51, 92, 76, 76, 57, 64, 59, 56, 76, 109, 59, 67, 65, 71, 71, 49, 67, 63, 61, 58, 68, 54, 60, 66, 80, 71, 93, 59, 71, 57, 69, 65, 68, 66, 49, 54, 64, 60, 86, 55, 62, 60, 56, 108, 60, 75, 83, 67, 43, 59, 71, 45, 81, 62, 58, 73, 84, 60, 68, 62, 66, 62, 76, 55, 55, 62, 77, 50, 61, 56, 67, 49, 51, 62, 64, 71, 64, 102, 53, 66, 63, 64, 60, 79, 58, 48, 84, 63, 67, 58, 74, 64, 75, 56, 62, 44, 51, 52, 82, 71, 64, 68, 62, 58, 65, 76, 64, 57, 71, 73, 59, 46, 64, 54, 70, 69, 46, 63, 54, 61, 61, 129, 67, 84, 60, 64, 58, 50, 64, 56, 71, 75, 84, 54, 61, 62, 58, 76, 66, 66, 73, 83, 45, 92, 72, 50, 61, 65, 67, 72, 52, 59, 74, 76, 66, 76, 68, 70, 57, 55, 56, 74, 71, 73, 56, 63, 72, 54, 60, 58, 88, 73, 91, 59, 71, 52, 71, 68, 67, 66, 62, 57, 99, 62, 55, 68, 68, 79, 70, 75, 67, 49, 69, 53, 58, 47, 58, 59, 43, 90, 70, 43, 57, 66, 82, 90, 52, 58, 47, 62, 72, 59, 69, 63, 56, 80, 67, 62, 74, 58, 63, 62, 55, 59, 82, 62, 65, 67, 69, 61, 64, 69, 68, 71, 65, 81, 77, 80, 68, 64, 84, 65, 64, 56, 59, 63, 60, 74, 51, 55, 70, 151, 61, 58, 60, 60, 75, 58, 80, 63, 69, 75, 79, 64, 84, 57, 66, 53, 64, 53, 55, 87, 56, 61, 66, 52, 60, 63, 80, 64, 72, 64, 68, 55, 49, 72, 107, 67, 105, 58, 98, 77, 83, 62, 60, 72, 67, 49, 64, 65, 54, 44, 73, 88, 55, 75, 85, 59, 56, 56, 44, 106, 102, 68, 55, 64, 47, 49, 64, 48, 67, 89, 67, 58, 91, 69, 72, 84, 74, 64, 84, 51, 69, 52, 75, 99, 70, 61, 79, 130, 68, 68, 53, 58, 72, 62, 46, 65, 59, 67, 60, 78, 56, 71, 50, 78, 58, 64, 59, 64, 86, 57, 55, 54, 51, 51, 80, 55, 67, 74, 66, 45, 57, 60, 73, 55, 60, 60, 53, 75, 57, 65, 54, 65, 67, 63, 55, 74, 57, 45, 55, 48, 61, 62, 68, 85, 61, 86, 59, 54, 71, 55, 70, 63, 79, 64, 58, 89, 55, 54, 62, 66, 60, 60, 58, 54, 75, 84, 67, 74, 55, 58, 55, 58, 62, 53, 63, 62, 72, 55, 66, 55, 48, 74, 67, 80, 58, 50, 44, 77, 76, 57, 60, 67, 92, 55, 56, 70, 76, 85, 76, 54, 67, 58, 57, 54, 65, 66, 65, 59, 63, 66, 92, 63, 55, 86, 77, 127, 68, 67, 57, 51, 62, 73, 60, 65, 60, 80, 61, 108, 63, 55, 71, 52, 76, 65, 47, 80, 56, 72, 67, 69, 50, 84, 49, 71, 71, 105, 63, 80, 61, 58, 79, 57, 52, 52, 70, 58, 67, 77, 64, 64, 66, 65, 50, 61, 81, 52, 47, 65, 68, 59, 89, 54, 64, 58, 62, 60, 43, 60, 33, 87, 72, 70, 54, 62, 70, 78, 66, 52, 64, 64, 85, 44, 56, 59, 64, 76, 56, 75, 62, 75, 55, 59, 67, 64, 77, 60, 70, 77, 59, 53, 61, 80, 91, 70, 60, 75, 49, 71, 82, 47, 74, 59, 68, 56, 54, 69, 69, 56, 76, 55, 58, 54, 60, 78, 83, 71, 61, 59, 63, 45, 38, 57, 56, 61, 59, 66, 78, 75, 66, 78, 67, 68, 68, 72, 67, 65, 58, 57, 58, 83, 70, 128, 61, 66, 72, 83, 67, 63, 60, 72, 55, 66, 62, 55, 68, 60, 43, 65, 47, 62, 70, 50, 59, 64, 92, 71, 61, 48, 59, 63, 90, 69, 55, 52, 59, 47, 69, 64, 74, 52, 43, 102, 72, 55, 76, 66, 85, 93, 73, 49, 69, 59, 96, 42, 58, 65, 54, 77, 47, 72, 62, 67, 72, 61, 43, 78, 51, 49, 140, 59, 65, 64, 69, 81, 71, 72, 60, 60, 50, 68, 66, 52, 64, 58, 84, 65, 46, 74, 59, 66, 70, 81, 52, 62, 53, 60, 65, 59, 62, 75, 86, 79, 45, 62, 69, 48, 76, 67, 68, 49, 58, 71, 77, 53, 59, 56, 73, 49, 67, 65, 79, 73, 68, 53, 75, 65, 50, 52, 74, 57, 61, 53, 75, 70, 92, 62, 73, 56, 66, 55, 61, 74, 60, 59, 50, 54, 53, 79, 49, 56, 79, 79, 83, 92, 41, 62, 64, 59, 70, 79, 87, 53, 52, 60, 110, 66, 54, 86, 65, 62, 91, 60, 64, 70, 64, 60, 45, 55, 74, 74, 58, 51, 58, 80, 52, 66, 61, 56, 79, 73, 70, 75, 64, 54, 49, 69, 52, 56, 70, 81, 55, 65, 73, 64, 109, 52, 57, 58, 57, 86, 75, 69, 62, 48, 81, 67, 57, 66, 43, 63, 48, 52, 53, 73, 64, 76, 56, 53, 64, 63, 63, 54, 39, 58, 51, 66, 62, 86, 60, 63, 74, 63, 82, 54, 58, 62, 62, 70, 58, 68, 61, 74, 59, 69, 113, 62, 59, 66, 77, 59, 70, 65, 62, 59, 50, 81, 52, 70, 63, 73, 61, 90, 61, 47, 55, 69, 114, 104, 53, 70, 51, 55, 58, 67, 59, 47, 67, 71, 64, 80, 58, 73, 92, 67, 51, 61, 60, 53, 65, 68, 55, 66, 69, 63, 72, 50, 46, 75, 65, 68, 44, 64, 63, 88, 64, 58, 66, 65, 68, 55, 58, 64, 66, 69, 58, 58, 58, 65, 65, 92, 72, 68, 48, 56, 68, 63, 67, 68, 57, 86, 51, 72, 86, 74, 71, 58, 65, 60, 81, 59, 63, 57, 50, 77, 55, 64, 70, 61, 51, 62, 63, 55, 60, 81, 53, 88, 58, 63, 58, 58, 69, 60, 85, 71, 68, 81, 58, 68, 94, 68, 53, 57, 52, 62, 57, 59, 69, 58, 60, 50, 63, 72, 69, 62, 49, 64, 81, 84, 62, 62, 88, 72, 72, 58, 58, 67, 59, 63, 61, 59, 76, 72, 74, 80, 73, 59, 69, 79, 58, 44, 69, 78, 71, 63, 65, 57, 63, 48, 55, 111, 61, 57, 76, 55, 46, 71, 67, 53, 59, 60, 83, 54, 89, 68, 64, 65, 61, 111, 52, 65, 70, 54, 89, 48, 54, 82, 54, 67, 62, 88, 88, 80, 77, 55, 49, 65, 83, 69, 72, 63, 48, 62, 62, 52, 85, 77, 63, 74, 70, 66, 43, 67, 66, 36, 52, 75, 58, 78, 58, 67, 69, 64, 62, 49, 66, 49, 64, 82, 78, 57, 55, 58, 52, 50, 52, 59, 55, 48, 70, 49, 101, 74, 55, 65, 59, 52, 55, 52, 59, 64, 73, 83, 72, 61, 58, 49, 66, 63, 72, 73, 67, 53, 54, 94, 70, 65, 57, 58, 61, 98, 82, 58, 64, 62, 92, 57, 87, 51, 59, 38, 53, 68, 97, 61, 51, 65, 68, 45, 48, 76, 59, 63, 70, 67, 70, 59, 77, 62, 50, 85, 61, 70, 52, 67, 58, 66, 76, 59, 49, 61, 69, 63, 60, 62, 53, 50, 95, 62, 53, 72, 66, 81, 72, 74, 125, 62, 66, 71, 65, 84, 64, 55, 64, 51, 67, 72, 72, 69, 60, 63, 76, 86, 39, 56, 47, 52, 63, 79, 63, 54, 46, 63, 57, 75, 66, 62, 75, 69, 49, 43, 67, 89, 74, 63, 60, 67, 89, 65, 54, 78, 82, 65, 53, 56, 72, 98, 63, 66, 48, 70, 64, 63, 61, 62, 58, 76, 65, 71, 39, 65, 67, 52, 51, 74, 43, 72, 65, 43, 68, 51, 61, 67, 63, 85, 68, 52, 80, 86, 54, 47, 78, 63, 64, 92, 67, 49, 42, 65, 78, 69, 62, 73, 40, 54, 64, 54, 70, 61, 68, 66, 93, 79, 52, 89, 73, 89, 77, 64, 70, 73, 61, 59, 54, 72, 69, 79, 77, 73, 113, 58, 78, 61, 46, 107, 67, 43, 53, 66, 74, 58, 77, 60, 72, 105, 65, 65, 106, 43, 75, 58, 60, 62, 91, 63, 66, 63, 62, 80, 82, 65, 54, 62, 61, 54, 60, 50, 64, 61, 52, 69, 78, 133, 67, 75, 78, 57, 53, 108, 54, 69, 61, 50, 65, 62, 65, 51, 66, 56, 75, 61, 49, 57, 56, 70, 44, 61, 71, 41, 73, 58, 72, 63, 51, 58, 91, 42, 60, 66, 52, 72, 61, 69, 58, 51, 58, 64, 62, 91, 65, 64, 47, 56, 65, 63, 71, 62, 95, 75, 65, 53, 60, 60, 77, 67, 60, 52, 60, 60, 73, 46, 92, 53, 67, 61, 55, 60, 59, 60, 55, 95, 65, 68, 58, 49, 45, 66, 78, 69, 83, 56, 85, 78, 83, 56, 60, 57, 71, 57, 53, 53, 143, 89, 52, 55, 60, 60, 61, 76, 63, 53, 45, 62, 75, 69, 67, 59, 50, 66, 83, 65, 60, 51, 65, 60, 72, 58, 54, 51, 63, 91, 74, 63, 54, 62, 61, 58, 62, 64, 68, 117, 68, 68, 76, 61, 37, 78, 74, 67, 77, 78, 69, 64, 63, 76, 74, 62, 64, 89, 71, 61, 66, 49, 48, 58, 75, 62, 47, 62, 72, 62, 59, 49, 71, 70, 43, 55, 68, 72, 64, 79, 67, 68, 53, 77, 61, 62, 61, 67, 55, 76, 56, 70, 59, 70, 70, 55, 63, 66, 59, 112, 54, 71, 54, 61, 48, 79, 61, 71, 91, 52, 51, 52, 79, 71, 74, 61, 63, 64, 58, 66, 53, 48, 71, 49, 59, 65, 111, 63, 64, 67, 44, 54, 59, 68, 64, 60, 54, 101, 58, 56, 60, 59, 69, 75, 72, 64, 58, 51, 59, 51, 56, 80, 49, 77, 67, 71, 79, 76, 77, 71, 69, 55, 50, 76, 49, 120, 70, 51, 78, 60, 98, 57, 74, 54, 59, 74, 60, 86, 54, 58, 62, 68, 53, 48, 63, 59, 56, 49, 103, 66, 69, 57, 74, 46, 83, 78, 62, 66, 54, 83, 58, 61, 95, 77, 57, 96, 61, 69, 69, 103, 43, 72, 70, 53, 82, 68, 110, 64, 85, 65, 69, 93, 56, 79, 57, 65, 74, 51, 52, 78, 81, 76, 81, 66, 45, 68, 67, 69, 68, 65, 67, 63, 59, 59, 66, 63, 54, 62, 60, 107, 58, 67, 56, 68, 64, 68, 70, 55, 48, 69, 56, 57, 56, 55, 57, 93, 75, 64, 71, 66, 55, 58, 66, 63, 46, 71, 68, 64, 62, 76, 69, 60, 44, 62, 71, 86, 61, 44, 44, 53, 63, 62, 47, 66, 69, 67, 61, 85, 95, 65, 53, 64, 57, 68, 59, 97, 60, 63, 58, 61, 50, 63, 70, 90, 54, 66, 53, 60, 62, 55, 60, 71, 62, 67, 70, 82, 39, 104, 60, 68, 39, 76, 51, 58, 84, 58, 61, 72, 62, 72, 62, 61, 63, 60, 67, 45, 57, 60, 62, 50, 54, 76, 77, 69, 70, 78, 64, 69, 70, 70, 62, 63, 78, 47, 57, 54, 84, 69, 93, 54, 66, 81, 79, 125, 52, 62, 64, 58, 101, 64, 56, 69, 50, 63, 56, 62, 73, 88, 72, 67, 86, 74, 52, 56, 60, 66, 63, 70, 49, 44, 65, 90, 62, 64, 67, 63, 109, 87, 55, 70, 64, 84, 75, 64, 81, 109, 63, 59, 101, 66, 66, 51, 71, 112, 69, 56, 58, 69, 74, 76, 60, 47, 59, 67, 54, 70, 73, 63, 63, 69, 73, 94, 99, 61, 47, 80, 67, 50, 54, 49, 72, 68, 81, 111, 66, 68, 40, 65, 45, 84, 72, 44, 84, 52, 46, 50, 72, 66, 50, 67, 77, 66, 60, 78, 61, 72, 63, 77, 58, 66, 48, 88, 79, 53, 69, 51, 57, 67, 73, 57, 66, 83, 58, 99, 74, 73, 66, 74, 73, 57, 63, 76, 51, 78, 91, 69, 98, 89, 60, 63, 54, 66, 98, 66, 61, 52, 53, 81, 62, 60, 67, 76, 56, 69, 60, 54, 57, 65, 81, 64, 56, 67, 58, 59, 91, 59, 106, 53, 62, 54, 68, 61, 58, 44, 72, 73, 55, 52, 65, 78, 77, 59, 70, 67, 58, 65, 57, 65, 60, 60, 67, 73, 64, 70, 56, 49, 61, 50, 58, 79, 56, 64, 49, 54, 57, 73, 61, 84, 55, 75, 73, 61, 84, 78, 64, 81, 58, 69, 78, 67, 63, 78, 54, 43, 61, 52, 45, 40, 71, 50, 75, 86, 56, 82, 60, 68, 54, 67, 62, 76, 93, 76, 61, 61, 82, 69, 55, 68, 84, 60, 61, 103, 66, 72, 92, 64, 64, 75, 88, 62, 52, 60, 69, 60, 54, 58, 74, 59, 52, 60, 51, 72, 66, 63, 71, 67, 55, 58, 54, 75, 57, 63, 62, 88, 56, 71, 55, 61, 61, 75, 45, 112, 67, 93, 66, 60, 76, 75, 78, 49, 48, 73, 57, 74, 73, 74, 78, 62, 57, 69, 53, 69, 84, 68, 71, 81, 62, 56, 74, 56, 62, 71, 68, 53, 45, 54, 83, 123, 48, 63, 65, 60, 78, 100, 82, 71, 66, 61, 83, 61, 68, 72, 59, 54, 55, 79, 58, 50, 60, 66, 60, 56, 48, 51, 55, 63, 69, 65, 67, 59, 63, 59, 61, 57, 55, 73, 63, 54, 65, 70, 57, 73, 65, 60, 47, 64, 75, 68, 79, 84, 58, 63, 57, 63, 82, 67, 49, 68, 95, 60, 53, 69, 55, 84, 54, 75, 53, 59, 49, 68, 60, 55, 70, 54, 77, 57, 43, 50, 68, 58, 84, 80, 48, 69, 67, 53, 61, 65, 60, 67, 60, 56, 70, 79, 57, 68, 58, 59, 56, 60, 72, 61, 67, 72, 59, 62, 62, 70, 67, 65, 68, 77, 52, 67, 79, 74, 65, 48, 50, 66, 65, 60, 47, 54, 57, 68, 72, 73, 57, 72, 72, 68, 58, 71, 69, 84, 64, 62, 76, 67, 99, 46, 52, 62, 58, 66, 41, 70, 67, 49, 56, 59, 67, 66, 50, 61, 90, 61, 81, 72, 66, 64, 96, 78, 67, 71, 46, 57, 60, 84, 84, 71, 62, 80, 86, 100, 65, 62, 68, 64, 77, 67, 68, 57, 60, 64, 64, 54, 126, 74, 55, 51, 53, 59, 54, 72, 61, 71, 62, 68, 59, 62, 60, 66, 63, 60, 82, 54, 75, 57, 69, 64, 66, 58, 70, 59, 87, 64, 61, 63, 56, 58, 52, 73, 73, 87, 59, 83, 73, 60, 50, 103, 78, 71, 66, 61, 67, 44, 74, 62, 66, 62, 70, 77, 75, 60, 63, 66, 72, 67, 57, 68, 57, 63, 68, 62, 52, 54, 67, 60, 59, 64, 66, 78, 67, 69, 60, 69, 96, 75, 59, 112, 50, 97, 65, 55, 65, 60, 61, 57, 59, 48, 67, 66, 72, 64, 76, 65, 53, 63, 65, 70, 45, 111, 74, 65, 54, 64, 70, 66, 48, 82, 57, 58, 39, 65, 62, 82, 63, 64, 61, 58, 70, 62, 63, 69, 78, 75, 59, 56, 57, 70, 50, 68, 84, 76, 64, 63, 82, 57, 66, 70, 60, 69, 46, 69, 66, 70, 116, 80, 57, 93, 51, 83, 74, 83, 62, 58, 77, 60, 75, 65, 102, 100, 47, 53, 56, 53, 100, 83, 47, 57, 66, 62, 52, 57, 67, 60, 63, 70, 81, 52, 80, 113, 59, 75, 70, 61, 68, 66, 57, 69, 64, 59, 69, 60, 62, 72, 69, 62, 90, 58, 69, 73, 79, 62, 75, 58, 67, 60, 54, 56, 64, 71, 84, 69, 79, 66, 68, 72, 61, 47, 62, 47, 69, 59, 62, 60, 76, 79, 53, 58, 68, 65, 67, 59, 57, 73, 86, 60, 60, 59, 65, 61, 58, 67, 105, 55, 52, 63, 45, 67, 62, 79, 63, 49, 47, 59, 62, 54, 94, 61, 60, 66, 58, 50, 74, 65, 70, 60, 73, 68, 66, 60, 60, 58, 94, 70, 54, 58, 53, 70, 52, 71, 71, 72, 64, 52, 65, 56, 68, 64, 82, 67, 69, 67, 59, 87, 56, 109, 62, 64, 64, 50, 61, 49, 56, 58, 57, 59, 60, 51, 61, 62, 70, 71, 66, 55, 63, 69, 54, 64, 81, 66, 69, 69, 51, 81, 44, 66, 49, 55, 42, 68, 54, 64, 75, 49, 53, 57, 72, 73, 51, 68, 63, 66, 61, 66, 58, 67, 88, 55, 57, 58, 72, 71, 48, 84, 76, 51, 62, 56, 51, 69, 59, 72, 70, 53, 56, 59, 52, 73, 56, 70, 65, 60, 89, 70, 62, 62, 75, 58, 100, 49, 65, 69, 56, 59, 65, 68, 60, 65, 55, 65, 76, 72, 60, 66, 51, 73, 67, 67, 63, 101, 78, 74, 65, 69, 57, 83, 129, 55, 61, 93, 122, 61, 65, 55, 83, 60, 70, 65, 63, 76, 98, 66, 64, 58, 56, 67, 71, 62, 66, 62, 61, 69, 64, 66, 58, 50, 68, 66, 58, 71, 64, 99, 72, 67, 101, 45, 67, 54, 63, 76, 62, 57, 63, 51, 53, 63, 67, 48, 72, 62, 63, 87, 62, 64, 87, 53, 62, 70, 53, 61, 65, 125, 71, 67, 54, 71, 74, 73, 65, 83, 60, 72, 62, 56, 64, 68, 66, 64, 62, 54, 70, 65, 59, 49, 74, 84, 80, 69, 81, 69, 40, 64, 68, 71, 55, 70, 119, 56, 62, 56, 67, 67, 57, 63, 66, 63, 73, 63, 68, 77, 60, 48, 58, 68, 81, 62, 82, 61, 76, 61, 60, 76, 41, 107, 80, 76, 69, 66, 67, 69, 64, 62, 89, 67, 45, 86, 64, 58, 80, 51, 61, 67, 63, 71, 62, 60, 75, 70, 65, 56, 55, 64, 67, 69, 67, 57, 58, 59, 89, 63, 73, 107, 66, 61, 65, 86, 62, 54, 63, 55, 72, 61, 59, 52, 64, 86, 78, 79, 74, 55, 57, 83, 75, 64, 67, 63, 71, 82, 61, 61, 68, 52, 63, 65, 75, 72, 56, 62, 58, 58, 74, 52, 54, 71, 73, 81, 51, 71, 55, 49, 59, 65, 57, 74, 80, 76, 79, 48, 62, 70, 63, 106, 73, 55, 69, 87, 50, 47, 61, 67, 59, 78, 65, 64, 65, 65, 59, 62, 108, 77, 69, 56, 54, 55, 50, 77, 64, 66, 49, 72, 70, 59, 58, 61, 59, 71, 58, 63, 57, 63, 79, 49, 68, 60, 57, 63, 60, 56, 57, 115, 43, 58, 56, 66, 70, 60, 59, 125, 34, 52, 64, 61, 58, 55, 60, 62, 58, 62, 46, 71, 60, 66, 79, 64, 60, 49, 69, 48, 71, 53, 56, 69, 77, 48, 63, 74, 67, 54, 91, 55, 52, 63, 83, 92, 64, 62, 69, 56, 59, 59, 78, 65, 61, 68, 52, 62, 75, 63, 52, 82, 61, 70, 61, 58, 60, 50, 69, 56, 51, 60, 70, 85, 97, 73, 104, 61, 60, 74, 70, 68, 60, 62, 86, 88, 53, 60, 56, 64, 70, 67, 55, 72, 89, 63, 53, 59, 63, 65, 59, 96, 64, 66, 36, 48, 58, 71, 58, 53, 65, 58, 47, 62, 65, 86, 40, 56, 49, 69, 54, 68, 61, 67, 65, 57, 70, 65, 96, 74, 61, 58, 53, 60, 58, 64, 77, 67, 66, 69, 71, 88, 53, 74, 91, 68, 62, 67, 53, 108, 73, 72, 59, 65, 79, 53, 67, 55, 61, 65, 107, 80, 58, 55, 72, 50, 75, 54, 56, 80, 54, 72, 63, 53, 64, 84, 58, 58, 83, 62, 87, 60, 60, 71, 57, 75, 54, 68, 76, 72, 62, 64, 60, 76, 67, 59, 61, 84, 54, 54, 87, 86, 81, 64, 113, 67, 67, 63, 77, 70, 90, 49, 58, 81, 71, 53, 56, 51, 58, 63, 73, 46, 57, 37, 66, 60, 63, 84, 61, 78, 81, 61, 64, 62, 41, 65, 67, 58, 62, 53, 67, 91, 51, 59, 90, 59, 64, 71, 65, 65, 60, 69, 59, 61, 59, 53, 45, 77, 58, 130, 63, 64, 52, 60, 65, 57, 71, 79, 75, 47, 66, 64, 72, 54, 82, 47, 53, 71, 62, 52, 64, 66, 62, 81, 86, 73, 86, 51, 71, 48, 58, 83, 100, 68, 70, 58, 84, 76, 66, 54, 74, 69, 59, 60, 57, 71, 44, 57, 64, 74, 73, 60, 86, 55, 79, 62, 92, 100, 80, 69, 81, 77, 81, 89, 72, 70, 106, 84, 45, 65, 67, 56, 63, 63, 61, 68, 61, 54, 69, 93, 71, 44, 69, 63, 62, 67, 51, 58, 61, 114, 48, 63, 51, 112, 68, 81, 72, 56, 53, 50, 50, 45, 65, 75, 70, 82, 48, 38, 64, 58, 64, 67, 52, 40, 69, 63, 61, 64, 51, 59, 65, 62, 36, 112, 61, 65, 51, 67, 57, 46, 58, 80, 75, 46, 60, 82, 74, 97, 62, 63, 65, 78, 59, 74, 57, 66, 104, 61, 58, 64, 49, 62, 83, 63, 93, 57, 59, 65, 60, 73, 64, 57, 60, 67, 57, 59, 73, 59, 64, 59, 69, 67, 80, 58, 69, 52, 86, 68, 65, 46, 55, 69, 52, 51, 50, 57, 59, 58, 57, 54, 79, 87, 53, 59, 55, 55, 65, 66, 41, 61, 65, 67, 77, 112, 109, 54, 56, 56, 60, 72, 55, 70, 74, 65, 69, 62, 67, 56, 58, 74, 71, 51, 72, 68, 59, 67, 75, 63, 70, 66, 87, 69, 55, 63, 61, 66, 66, 58, 67, 63, 76, 65, 63, 65, 85, 69, 50, 73, 66, 67, 43, 58, 88, 64, 67, 58, 67, 62, 66, 69, 128, 54, 54, 66, 66, 107, 57, 57, 72, 60, 59, 64, 71, 50, 122, 64, 66, 64, 52, 52, 59, 134, 75, 95, 65, 61, 72, 44, 59, 48, 60, 61, 61, 75, 75, 73, 111, 53, 70, 54, 53, 69, 72, 58, 63, 63, 70, 45, 71, 55, 73, 70, 58, 67, 75, 72, 56, 77, 61, 54, 71, 83, 65, 58, 65, 64, 72, 73, 105, 68, 91, 71, 67, 102, 70, 74, 57, 48, 69, 59, 78, 53, 80, 85, 54, 64, 74, 79, 63, 62, 63, 80, 55, 50, 64, 46, 60, 52, 60, 85, 41, 71, 69, 52, 60, 78, 82, 73, 74, 70, 64, 63, 54, 56, 86, 54, 57, 58, 70, 70, 63, 67, 55, 60, 67, 74, 55, 52, 51, 46, 54, 45, 63, 94, 66, 64, 102, 70, 125, 68, 67, 82, 68, 60, 57, 66, 53, 86, 75, 64, 63, 89, 44, 62, 40, 65, 60, 65, 56, 60, 50, 48, 55, 67, 55, 75, 62, 67, 66, 61, 65, 82, 92, 66, 70, 48, 63, 64, 52, 70, 70, 63, 79, 71, 77, 59, 84, 80, 43, 66, 45, 57, 54, 64, 72, 70, 66, 79, 65, 54, 66, 69, 93, 52, 57, 71, 64, 53, 60, 62, 53, 80, 74, 69, 64, 68, 76, 65, 82, 58, 55, 54, 59, 73, 49, 64, 70, 57, 60, 60, 61, 66, 71, 56, 59, 61, 87, 74, 63, 66, 81, 68, 72, 58, 94, 51, 69, 64, 76, 50, 56, 48, 52, 46, 88, 89, 68, 70, 44, 57, 51, 60, 62, 49, 45, 76, 62, 99, 76, 54, 65, 63, 65, 40, 77, 54, 37, 56, 93, 66, 78, 71, 58, 60, 56, 61, 107, 21, 66, 53, 47, 85, 71, 63, 61, 63, 60, 68, 56, 46, 68, 83, 71, 49, 70, 104, 60, 64, 66, 68, 58, 69, 86, 48, 58, 66, 71, 53, 56, 80, 79, 51, 49, 62, 106, 55, 57, 44, 54, 68, 67, 56, 98, 63, 71, 69, 72, 58, 59, 76, 46, 77, 75, 76, 72, 125, 55, 56, 67, 61, 69, 53, 67, 68, 82, 57, 57, 65, 63, 66, 59, 60, 78, 62, 57, 65, 71, 75, 43, 61, 58, 76, 51, 67, 66, 55, 63, 64, 72, 78, 54, 74, 69, 59, 72, 49, 60, 68, 62, 51, 59, 45, 49, 99, 52, 68, 75, 89, 68, 68, 50, 66, 64, 67, 77, 76, 57, 59, 57, 63, 66, 93, 81, 74, 71, 62, 64, 67, 109, 59, 81, 59, 60, 58, 58, 73, 56, 55, 61, 66, 71, 57, 59, 61, 53, 73, 60, 64, 58, 61, 63, 51, 55, 46, 62, 73, 76, 76, 72, 57, 84, 67, 64, 68, 56, 69, 53, 83, 56, 57, 53, 70, 66, 65, 57, 100, 71, 63, 67, 67, 60, 37, 137, 72, 78, 46, 84, 56, 67, 57, 54, 52, 62, 66, 109, 65, 79, 67, 79, 64, 74, 66, 67, 88, 64, 51, 69, 55, 104, 97, 99, 60, 64, 84, 60, 61, 56, 68, 54, 67, 66, 62, 90, 59, 76, 55, 79, 67, 55, 62, 77, 50, 72, 67, 80, 78, 73, 57, 61, 67, 78, 101, 61, 79, 58, 70, 62, 70, 59, 69, 60, 67, 51, 53, 60, 68, 77, 63, 69, 54, 64, 50, 66, 60, 61, 66, 78, 77, 56, 54, 78, 61, 59, 71, 60, 66, 87, 65, 53, 76, 65, 67, 56, 80, 65, 92, 60, 48, 65, 87, 80, 67, 62, 83, 76, 55, 48, 72, 53, 53, 53, 62, 54, 53, 55, 47, 69, 52, 53, 72, 70, 58, 101, 44, 89, 63, 62, 58, 72, 65, 53, 49, 48, 65, 71, 78, 48, 57, 65, 64, 67, 70, 59, 65, 65, 63, 61, 82, 54, 62, 58, 65, 66, 61, 45, 46, 87, 63, 145, 57, 58, 58, 58, 58, 69, 93, 66, 62, 56, 58, 58, 59, 63, 63, 58, 61, 51, 60, 65, 60, 76, 71, 60, 68, 83, 62, 56, 78, 67, 59, 69, 80, 36, 83, 49, 60, 52, 80, 80, 45, 50, 72, 71, 59, 79, 66, 75, 79, 56, 78, 55, 49, 59, 70, 66, 53, 48, 52, 82, 56, 55, 95, 80, 91, 65, 84, 53, 61, 54, 55, 69, 57, 95, 62, 105, 40, 45, 58, 54, 68, 54, 81, 36, 58, 80, 63, 65, 60, 68, 53, 58, 63, 56, 52, 68, 62, 79, 46, 54, 61, 51, 62, 51, 65, 74, 81, 53, 72, 53, 65, 59, 76, 74, 67, 85, 63, 59, 51, 58, 90, 60, 59, 61, 57, 74, 57, 45, 97, 84, 61, 89, 49, 77, 67, 60, 65, 64, 83, 75, 67, 61, 70, 56, 54, 53, 66, 71, 62, 63, 58, 66, 70, 48, 77, 77, 85, 99, 50, 60, 57, 61, 57, 65, 71, 85, 90, 50, 85, 68, 69, 72, 48, 96, 55, 68, 61, 47, 71, 79, 52, 55, 71, 48, 57, 73, 73, 100, 74, 59, 90, 60, 61, 65, 63, 72, 72, 112, 63, 79, 58, 58, 53, 66, 62, 100, 63, 55, 70, 62, 66, 98, 69, 49, 86, 63, 75, 52, 110, 69, 78, 57, 62, 59, 75, 71, 53, 81, 80, 78, 66, 57, 75, 54, 60, 53, 64, 91, 61, 48, 64, 54, 57, 56, 84, 46, 89, 72, 126, 71, 54, 58, 71, 64, 59, 73, 45, 42, 73, 62, 88, 57, 48, 85, 58, 86, 78, 59, 73, 63, 59, 75, 82, 61, 72, 66, 69, 66, 55, 46, 57, 67, 79, 63, 56, 67, 60, 63, 82, 62, 67, 76, 47, 68, 76, 63, 62, 70, 62, 53, 58, 38, 66, 68, 63, 64, 61, 65, 76, 66, 75, 64, 51, 65, 64, 80, 62, 67, 44, 55, 66, 57, 53, 65, 63, 55, 67, 123, 77, 92, 60, 54, 79, 72, 60, 53, 69, 73, 60, 69, 80, 66, 83, 62, 69, 74, 64, 70, 61, 51, 56, 61, 54, 61, 54, 74, 80, 63, 47, 55, 79, 54, 47, 62, 73, 59, 59, 66, 48, 64, 63, 57, 54, 63, 58, 87, 61, 61, 68, 59, 74, 52, 66, 68, 66, 137, 48, 46, 64, 68, 129, 74, 80, 66, 60, 69, 79, 56, 65, 78, 62, 69, 106, 53, 53, 66, 62, 91, 63, 65, 80, 57, 50, 76, 62, 68, 83, 51, 77, 61, 64, 61, 72, 83, 106, 66, 56, 70, 57, 59, 51, 84, 55, 61, 66, 88, 77, 68, 54, 66, 54, 67, 64, 73, 60, 71, 63, 88, 59, 46, 62, 67, 95, 77, 82, 71, 74, 50, 58, 63, 104, 58, 73, 67, 53, 74, 56, 57, 56, 46, 93, 42, 75, 56, 66, 64, 58, 65, 72, 92, 74, 71, 54, 60, 79, 51, 61, 66, 62, 54, 64, 83, 67, 54, 72, 78, 66, 52, 54, 103, 66, 65, 53, 64, 58, 64, 74, 66, 71, 47, 47, 60, 63, 76, 62, 49, 82, 66, 51, 61, 59, 77, 55, 186, 65, 103, 102, 67, 75, 74, 68, 58, 62, 54, 74, 71, 73, 51, 61, 50, 72, 52, 89, 72, 69, 74, 60, 62, 73, 57, 61, 118, 66, 73, 53, 66, 60, 70, 60, 57, 69, 59, 55, 52, 70, 60, 69, 70, 66, 72, 63, 71, 63, 74, 71, 54, 70, 65, 67, 61, 62, 71, 66, 83, 65, 65, 69, 81, 78, 94, 62, 74, 71, 53, 54, 78, 96, 55, 104, 52, 74, 92, 64, 37, 66, 66, 62, 49, 62, 64, 57, 67, 85, 60, 59, 55, 70, 92, 60, 51, 60, 65, 78, 68, 70, 53, 67, 75, 54, 71, 68, 52, 64, 67, 63, 47, 64, 56, 63, 76, 63, 68, 55, 60, 65, 66, 58, 57, 79, 60, 67, 70, 132, 59, 62, 58, 61, 54, 35, 58, 66, 70, 60, 61, 67, 61, 84, 71, 79, 68, 57, 52, 95, 58, 63, 86, 50, 56, 56, 67, 65, 70, 55, 47, 51, 54, 71, 60, 60, 75, 59, 58, 54, 78, 51, 111, 64, 49, 68, 67, 76, 64, 61, 71, 82, 82, 70, 55, 64, 87, 65, 74, 59, 50, 66, 77, 55, 63, 46, 64, 63, 60, 92, 72, 57, 52, 66, 63, 66, 77, 58, 61, 59, 54, 72, 60, 60, 77, 60, 56, 54, 57, 72, 67, 74, 45, 71, 93, 40, 51, 64, 55, 67, 73, 53, 54, 50, 38, 68, 63, 57, 61, 65, 103, 62, 59, 47, 57, 67, 123, 72, 65, 74, 143, 73, 61, 54, 67, 66, 71, 68, 65, 94, 67, 55, 61, 52, 61, 62, 83, 60, 67, 58, 67, 44, 67, 98, 67, 69, 50, 78, 54, 81, 69, 65, 55, 58, 61, 73, 62, 74, 61, 62, 58, 54, 72, 73, 56, 70, 61, 72, 80, 62, 55, 60, 71, 61, 59, 66, 72, 82, 61, 66, 70, 64, 79, 87, 41, 61, 75, 68, 67, 62, 60, 57, 70, 94, 61, 86, 58, 44, 40, 65, 62, 44, 61, 48, 100, 76, 97, 49, 71, 64, 48, 55, 59, 58, 74, 56, 39, 47, 69, 71, 72, 64, 62, 61, 74, 57, 63, 70, 62, 72, 87, 69, 67, 62, 59, 73, 61, 71, 68, 64, 54, 75, 55, 63, 63, 81, 80, 49, 54, 44, 70, 60, 70, 63, 69, 60, 69, 58, 93, 68, 74, 75, 64, 90, 58, 102, 69, 54, 69, 63, 51, 57, 58, 89, 61, 67, 65, 69, 69, 105, 65, 62, 60, 71, 53, 53, 64, 66, 107, 73, 55, 58, 71, 67, 67, 62, 57, 66, 76, 63, 57, 60, 67, 61, 57, 53, 58, 59, 71, 65, 60, 65, 99, 68, 70, 77, 86, 60, 72, 57, 45, 63, 82, 64, 62, 61, 54, 77, 63, 65, 66, 72, 58, 52, 65, 56, 91, 64, 57, 74, 68, 44, 83, 82, 69, 78, 76, 63, 48, 67, 73, 105, 79, 56, 51, 60, 60, 70, 62, 70, 75, 61, 62, 68, 69, 51, 62, 72, 63, 57, 81, 54, 89, 60, 82, 53, 55, 72, 62, 59, 65, 76, 57, 53, 82, 83, 77, 69, 69, 65, 58, 110, 42, 52, 57, 70, 86, 61, 69, 73, 60, 55, 64, 66, 49, 75, 62, 77, 69, 66, 58, 61, 61, 74, 83, 67, 64, 56, 73, 73, 71, 53, 54, 55, 78, 71, 65, 78, 81, 49, 76, 60, 48, 98, 67, 75, 74, 99, 56, 70, 67, 69, 61, 60, 83, 53, 89, 72, 57, 64, 59, 76, 56, 57, 55, 61, 56, 75, 63, 58, 60, 58, 55, 57, 53, 66, 92, 66, 72, 50, 59, 67, 58, 62, 63, 57, 62, 74, 54, 65, 61, 67, 53, 74, 68, 71, 60, 64, 74, 81, 66, 63, 78, 64, 58, 66, 39, 63, 63, 86, 56, 71, 68, 96, 62, 69, 63, 70, 68, 72, 66, 63, 74, 52, 71, 51, 60, 56, 60, 57, 88, 60, 58, 62, 60, 64, 57, 86, 100, 63, 62, 51, 65, 73, 73, 59, 80, 78, 52, 61, 63, 66, 88, 56, 60, 60, 75, 59, 70, 66, 75, 63, 51, 61, 52, 66, 90, 116, 88, 65, 69, 42, 64, 67, 56, 60, 56, 64, 82, 60, 51, 63, 78, 63, 56, 64, 69, 81, 60, 63, 67, 62, 59, 49, 45, 79, 60, 62, 76, 77, 54, 94, 71, 48, 78, 69, 98, 50, 56, 94, 52, 72, 56, 132, 49, 66, 59, 72, 67, 68, 55, 67, 67, 59, 67, 56, 67, 74, 65, 78, 58, 72, 51, 64, 66, 57, 64, 56, 62, 68, 54, 74, 55, 76, 73, 62, 57, 66, 73, 70, 70, 75, 64, 68, 71, 46, 68, 71, 65, 73, 57, 70, 57, 75, 53, 59, 67, 66, 66, 73, 67, 57, 57, 68, 74, 50, 70, 66, 55, 68, 70, 68, 56, 62, 83, 66, 64, 66, 61, 75, 80, 103, 53, 34, 55, 61, 82, 58, 62, 56, 56, 89, 81, 68, 60, 62, 49, 65, 54, 129, 94, 60, 69, 74, 58, 75, 72, 77, 63, 52, 54, 60, 65, 61, 75, 64, 97, 59, 69, 71, 54, 65, 58, 64, 65, 54, 69, 73, 55, 64, 65, 59, 57, 60, 62, 67, 66, 63, 45, 65, 55, 67, 78, 67, 50, 84, 64, 59, 67, 82, 60, 73, 63, 62, 67, 52, 177, 60, 76, 74, 74, 135, 45, 69, 63, 64, 66, 77, 61, 70, 90, 64, 66, 63, 71, 66, 62, 89, 69, 66, 61, 49, 73, 51, 93, 60, 66, 72, 67, 76, 75, 66, 70, 59, 52, 89, 58, 56, 67, 59, 87, 58, 74, 49, 66, 72, 71, 80, 48, 69, 64, 52, 61, 64, 64, 65, 83, 53, 71, 58, 58, 67, 45, 92, 99, 74, 47, 57, 74, 65, 52, 103, 62, 56, 69, 55, 73, 53, 69, 58, 55, 68, 65, 55, 62, 67, 54, 90, 67, 70, 61, 91, 70, 61, 57, 90, 73, 46, 81, 57, 63, 60, 79, 69, 78, 59, 65, 99, 55, 58, 70, 57, 73, 54, 71, 71, 76, 107, 58, 64, 63, 70, 75, 63, 65, 47, 70, 56, 66, 63, 58, 60, 79, 64, 141, 62, 85, 60, 79, 63, 52, 49, 60, 99, 79, 70, 60, 60, 57, 65, 57, 72, 64, 61, 69, 69, 67, 68, 61, 61, 49, 60, 88, 36, 72, 73, 66, 58, 74, 53, 56, 62, 72, 57, 37, 52, 70, 53, 60, 67, 83, 60, 61, 62, 61, 61, 81, 65, 49, 80, 63, 55, 66, 59, 68, 67, 61, 53, 71, 58, 73, 62, 65, 71, 48, 57, 60, 91, 60, 63, 54, 61, 61, 57, 54, 68, 60, 61, 53, 72, 62, 71, 72, 60, 60, 91, 63, 59, 56, 78, 66, 81, 88, 55, 57, 71, 81, 47, 74, 67, 70, 66, 62, 53, 50, 56, 65, 65, 61, 57, 64, 78, 56, 51, 66, 69, 59, 69, 56, 66, 57, 62, 72, 106, 73, 49, 48, 65, 53, 55, 73, 80, 68, 59, 88, 78, 71, 62, 72, 55, 76, 60, 80, 107, 68, 64, 57, 83, 65, 67, 91, 59, 62, 101, 76, 53, 55, 79, 70, 69, 65, 50, 60, 62, 67, 66, 75, 69, 75, 44, 61, 56, 69, 55, 75, 72, 71, 56, 46, 68, 80, 72, 65, 62, 59, 65, 60, 60, 106, 57, 58, 65, 59, 65, 99, 57, 68, 40, 54, 64, 58, 69, 64, 52, 69, 65, 49, 64, 61, 55, 63, 49, 59, 64, 71, 75, 78, 50, 117, 59, 65, 65, 65, 69, 79, 65, 65, 55, 55, 62, 54, 75, 54, 72, 37, 53, 66, 54, 78, 62, 92, 66, 61, 58, 84, 57, 63, 58, 69, 100, 70, 68, 60, 134, 56, 57, 65, 51, 62, 56, 62, 68, 65, 61, 62, 69, 56, 74, 46, 60, 57, 62, 63, 68, 67, 64, 81, 58, 62, 57, 73, 69, 67, 68, 59, 60, 75, 51, 61, 68, 57, 77, 67, 56, 67, 76, 55, 76, 57, 72, 65, 53, 76, 68, 61, 52, 70, 63, 56, 75, 56, 49, 63, 51, 68, 55, 50, 65, 60, 90, 60, 70, 61, 68, 61, 52, 51, 53, 106, 71, 126, 69, 54, 56, 59, 64, 65, 47, 72, 60, 75, 57, 66, 61, 65, 72, 67, 69, 72, 70, 63, 63, 65, 64, 70, 83, 92, 75, 65, 53, 60, 117, 47, 68, 62, 67, 62, 70, 59, 44, 62, 56, 64, 65, 71, 68, 53, 59, 66, 58, 73, 70, 49, 72, 103, 77, 57, 91, 60, 68, 55, 73, 48, 74, 74, 62, 61, 65, 71, 77, 85, 64, 69, 54, 67, 64, 77, 59, 75, 49, 52, 58, 59, 73, 47, 75, 82, 54, 64, 71, 56, 58, 85, 59, 46, 73, 119, 56, 65, 54, 67, 78, 50, 58, 75, 53, 72, 56, 50, 81, 51, 78, 57, 93, 82, 90, 52, 47, 51, 43, 57, 114, 212, 61, 66, 77, 74, 59, 64, 45, 74, 69, 58, 47, 55, 54, 64, 69, 55, 50, 48, 57, 58, 61, 68, 38, 64, 57, 82, 87, 83, 70, 68, 67, 72, 52, 71, 63, 54, 106, 52, 46, 72, 54, 58, 65, 67, 63, 71, 43, 62, 64, 59, 54, 53, 50, 48, 63, 77, 86, 90, 68, 65, 54, 59, 86, 51, 71, 65, 67, 61, 60, 55, 55, 57, 85, 83, 64, 40, 55, 60, 69, 97, 55, 56, 63, 52, 71, 64, 49, 71, 58, 61, 77, 62, 52, 52, 60, 67, 68, 66, 67, 74, 70, 54, 62, 62, 66, 67, 50, 74, 102, 67, 81, 76, 104, 58, 63, 62, 71, 92, 50, 54, 55, 70, 61, 55, 70, 54, 55, 72, 66, 65, 41, 51, 63, 63, 60, 83, 80, 41, 62, 65, 64, 80, 64, 88, 66, 54, 58, 61, 66, 69, 64, 62, 50, 64, 82, 42, 76, 71, 58, 66, 66, 59, 76, 70, 79, 85, 58, 69, 52, 75, 81, 53, 80, 54, 49, 68, 65, 55, 53, 75, 78, 56, 46, 58, 58, 71, 77, 82, 43, 71, 50, 59, 75, 66, 69, 61, 62, 84, 65, 115, 108, 98, 54, 37, 82, 79, 68, 98, 51, 70, 104, 50, 52, 64, 64, 64, 56, 79, 48, 66, 65, 56, 55, 33, 55, 73, 88, 61, 63, 73, 91, 64, 64, 56, 67, 97, 51, 43, 62, 63, 52, 45, 58, 64, 58, 64, 64, 74, 57, 62, 74, 44, 81, 49, 79, 64, 148, 61, 84, 90, 37, 77, 72, 63, 56, 66, 48, 52, 77, 87, 99, 76, 50, 75, 56, 76, 68, 73, 51, 61, 74, 78, 60, 100, 124, 81, 52, 71, 47, 59, 103, 73, 56, 79, 99, 49, 64, 49, 64, 58, 51, 56, 82, 51, 91, 61, 61, 66, 70, 68, 49, 86, 65, 60, 67, 77, 68, 59, 66, 69, 54, 55, 67, 57, 73, 65, 48, 79, 66, 48, 63, 64, 64, 54, 90, 60, 65, 70, 54, 66, 66, 55, 59, 101, 56, 83, 66, 46, 55, 76, 50, 67, 55, 70, 68, 75, 79, 61, 51, 52, 45, 62, 63, 60, 71, 97, 63, 70, 51, 53, 52, 76, 70, 66, 73, 62, 58, 64, 67, 57, 67, 56, 82, 84, 67, 69, 72, 71, 59, 71, 77, 57, 95, 65, 78, 83, 70, 74, 109, 66, 66, 77, 51, 66, 58, 52, 63, 62, 64, 51, 60, 64, 95, 71, 62, 74, 61, 69, 74, 65, 71, 53, 74, 53, 52, 55, 51, 56, 86, 108, 76, 62, 103, 48, 80, 57, 67, 49, 73, 74, 76, 112, 59, 62, 107, 69, 71, 75, 61, 82, 49, 47, 54, 103, 62, 92, 70, 81, 45, 57, 76, 57, 58, 68, 74, 87, 56, 47, 60, 44, 77, 74, 59, 82, 50, 57, 52, 63, 56, 60, 47, 59, 61, 61, 59, 54, 59, 71, 55, 76, 67, 69, 69, 63, 74, 88, 58, 43, 92, 63, 74, 53, 72, 62, 77, 78, 59, 55, 68, 74, 62, 83, 75, 56, 57, 75, 72, 68, 57, 66, 70, 92, 58, 69, 55, 115, 54, 43, 64, 71, 46, 68, 48, 94, 66, 56, 62, 65, 63, 69, 64, 81, 78, 49, 72, 54, 88, 49, 61, 82, 72, 67, 46, 59, 64, 76, 71, 70, 59, 68, 83, 56, 57, 77, 66, 138, 59, 36, 51, 41, 70, 73, 63, 68, 88, 72, 77, 70, 95, 54, 51, 57, 52, 66, 59, 55, 60, 95, 118, 72, 70, 65, 60, 62, 53, 69, 58, 63, 72, 80, 64, 52, 72, 44, 93, 51, 85, 91, 58, 70, 61, 49, 53, 58, 84, 62, 59, 64, 67, 48, 75, 65, 53, 66, 71, 81, 56, 43, 59, 65, 63, 71, 79, 78, 69, 56, 62, 64, 59, 57, 74, 63, 55, 66, 62, 72, 82, 56, 43, 78, 65, 81, 107, 51, 61, 62, 57, 52, 53, 43, 68, 51, 57, 50, 60, 47, 61, 61, 74, 62, 93, 52, 61, 56, 80, 71, 77, 79, 57, 54, 131, 95, 59, 71, 63, 63, 72, 70, 60, 64, 67, 57, 68, 73, 71, 55, 64, 48, 62, 75, 49, 57, 57, 67, 64, 56, 53, 52, 53, 57, 72, 52, 66, 61, 61, 41, 65, 56, 63, 61, 85, 61, 59, 87, 51, 62, 63, 60, 65, 67, 60, 60, 74, 74, 52, 60, 65, 98, 81, 70, 50, 63, 69, 61, 55, 75, 53, 66, 60, 60, 54, 84, 77, 71, 70, 60, 59, 68, 86, 94, 72, 71, 55, 55, 54, 75, 71, 85, 50, 60, 57, 65, 62, 81, 53, 67, 55, 81, 59, 78, 52, 109, 56, 92, 68, 68, 56, 65, 67, 67, 68, 49, 74, 48, 68, 66, 76, 64, 84, 58, 78, 65, 85, 73, 75, 50, 47, 54, 55, 48, 67, 66, 54, 63, 50, 114, 56, 70, 68, 83, 129, 91, 64, 53, 111, 53, 67, 58, 68, 48, 65, 96, 79, 65, 63, 97, 83, 92, 62, 72, 59, 72, 61, 62, 88, 61, 55, 79, 66, 75, 67, 71, 60, 84, 61, 61, 52, 50, 66, 70, 51, 52, 59, 74, 51, 79, 63, 83, 81, 67, 48, 64, 67, 79, 63, 59, 71, 63, 67, 65, 62, 76, 65, 42, 64, 62, 69, 89, 65, 61, 71, 60, 74, 66, 80, 51, 63, 57, 68, 69, 72, 79, 95, 95, 33, 57, 67, 48, 45, 60, 76, 62, 70, 86, 64, 54, 59, 99, 56, 56, 66, 53, 66, 61, 53, 73, 72, 46, 50, 65, 86, 54, 80, 71, 66, 75, 61, 73, 51, 51, 51, 74, 51, 52, 83, 59, 84, 45, 66, 47, 54, 41, 54, 60, 84, 87, 60, 69, 58, 70, 78, 63, 84, 56, 64, 48, 53, 59, 63, 54, 84, 53, 56, 65, 43, 112, 50, 56, 70, 67, 67, 94, 75, 59, 79, 89, 66, 97, 72, 61, 64, 73, 68, 53, 54, 45, 54, 81, 47, 86, 54, 64, 64, 71, 45, 64, 62, 114, 48, 62, 64, 69, 71, 49, 103, 78, 68, 68, 58, 77, 66, 71, 57, 153, 100, 71, 83, 92, 67, 53, 45, 57, 57, 48, 72, 52, 61, 55, 65, 68, 64, 52, 58, 48, 69, 60, 77, 43, 48, 60, 66, 83, 62, 99, 58, 61, 65, 68, 46, 51, 68, 59, 38, 62, 55, 44, 72, 53, 59, 40, 59, 86, 69, 76, 56, 57, 54, 68, 54, 81, 100, 38, 61, 39, 55, 81, 58, 53, 108, 73, 58, 141, 73, 49, 68, 58, 63, 59, 61, 42, 79, 68, 80, 50, 64, 57, 61, 61, 61, 56, 58, 67, 60, 72, 58, 62, 67, 59, 79, 56, 61, 56, 93, 61, 63, 82, 68, 67, 45, 59, 59, 41, 80, 64, 61, 61, 75, 76, 48, 79, 51, 70, 55, 49, 73, 67, 24, 76, 60, 68, 49, 70, 64, 52, 65, 58, 68, 86, 84, 57, 83, 54, 58, 66, 75, 76, 66, 74, 55, 49, 77, 57, 65, 67, 80, 67, 69, 61, 64, 61, 98, 52, 54, 63, 62, 53, 48, 92, 52, 66, 64, 60, 56, 65, 65, 49, 78, 63, 68, 66, 61, 53, 56, 53, 67, 66, 58, 78, 56, 84, 65, 55, 60, 75, 63, 77, 57, 53, 60, 73, 69, 71, 64, 139, 85, 64, 60, 64, 54, 56, 85, 144, 49, 54, 51, 48, 54, 58, 63, 56, 64, 57, 65, 61, 61, 88, 44, 48, 64, 53, 57, 51, 35, 57, 82, 57, 69, 81, 95, 115, 73, 53, 63, 68, 78, 48, 57, 56, 59, 60, 81, 61, 64, 69, 57, 73, 65, 64, 63, 60, 100, 59, 62, 83, 52, 56, 50, 60, 71, 67, 52, 64, 51, 84, 71, 65, 64, 65, 70, 39, 59, 59, 79, 57, 104, 92, 56, 73, 61, 72, 63, 66, 72, 68, 101, 54, 94, 57, 49, 58, 46, 69, 57, 74, 74, 64, 63, 71, 57, 63, 60, 56, 61, 66, 81, 60, 46, 60, 100, 53, 63, 65, 62, 66, 73, 81, 70, 78, 74, 71, 80, 78, 56, 63, 131, 76, 91, 62, 54, 52, 53, 74, 58, 55, 68, 86, 87, 110, 68, 62, 66, 72, 57, 56, 49, 46, 88, 80, 40, 76, 69, 48, 81, 56, 53, 66, 51, 67, 36, 57, 89, 57, 58, 51, 91, 99, 59, 68, 80, 89, 79, 67, 108, 66, 64, 78, 69, 62, 52, 67, 61, 72, 62, 62, 67, 50, 74, 59, 80, 56, 75, 55, 68, 71, 60, 54, 63, 63, 64, 47, 73, 91, 53, 70, 83, 82, 71, 52, 60, 69, 68, 75, 83, 47, 72, 64, 71, 68, 49, 51, 68, 90, 75, 59, 62, 66, 95, 64, 55, 59, 49, 69, 66, 51, 68, 91, 64, 55, 76, 65, 61, 55, 68, 71, 52, 94, 71, 81, 62, 46, 55, 72, 74, 77, 93, 75, 59, 68, 58, 49, 60, 107, 64, 60, 56, 70, 51, 60, 63, 54, 48, 49, 67, 57, 48, 54, 129, 70, 65, 61, 56, 97, 80, 54, 70, 53, 61, 91, 66, 75, 70, 64, 65, 74, 67, 62, 65, 67, 95, 43, 72, 66, 65, 90, 66, 65, 70, 67, 54, 103, 66, 66, 62, 54, 86, 63, 65, 65, 99, 50, 62, 89, 66, 69, 65, 67, 48, 62, 44, 93, 82, 59, 62, 85, 48, 68, 82, 71, 59, 66, 72, 59, 74, 62, 57, 73, 47, 82, 56, 61, 93, 64, 87, 51, 45, 58, 64, 106, 69, 110, 53, 71, 63, 76, 60, 73, 105, 51, 98, 42, 55, 60, 80, 67, 62, 65, 86, 48, 45, 53, 60, 76, 89, 72, 60, 66, 61, 72, 81, 106, 76, 82, 64, 62, 69, 54, 70, 96, 62, 55, 103, 93, 82, 75, 61, 63, 70, 52, 60, 69, 61, 52, 61, 63, 49, 57, 67, 62, 68, 112, 65, 76, 69, 77, 68, 45, 69, 71, 60, 64, 55, 62, 86, 59, 69, 58, 64, 73, 52, 75, 73, 63, 70, 100, 69, 74, 67, 74, 49, 76, 69, 79, 63, 104, 67, 59, 61, 54, 59, 68, 56, 100, 65, 67, 78, 71, 64, 66, 79, 75, 86, 64, 61, 80, 77, 145, 75, 56, 65, 62, 72, 68, 68, 70, 60, 66, 68, 87, 66, 68, 37, 44, 66, 78, 67, 67, 69, 46, 45, 67, 78, 72, 66, 45, 62, 57, 62, 77, 58, 85, 61, 45, 59, 67, 57, 51, 74, 49, 45, 72, 76, 52, 61, 69, 64, 60, 59, 72, 46, 76, 52, 71, 43, 52, 52, 55, 66, 60, 70, 66, 43, 62, 71, 49, 64, 64, 74, 66, 50, 50, 68, 61, 63, 54, 79, 74, 50, 52, 62, 55, 63, 81, 55, 69, 48, 61, 84, 59, 43, 84, 69, 65, 73, 93, 53, 84, 72, 64, 59, 76, 56, 75, 57, 99, 63, 66, 72, 42, 82, 62, 56, 59, 51, 49, 56, 74, 65, 60, 61, 82, 74, 61, 59, 71, 56, 43, 47, 59, 54, 75, 76, 51, 69, 76, 118, 63, 71, 55, 108, 71, 69, 65, 78, 52, 66, 79, 71, 46, 58, 73, 30, 53, 69, 84, 90, 84, 58, 45, 69, 78, 52, 68, 71, 68, 59, 61, 67, 56, 67, 76, 40, 55, 58, 50, 54, 53, 63, 73, 54, 70, 41, 49, 62, 53, 70, 61, 46, 66, 53, 49, 64, 55, 78, 68, 79, 63, 77, 64, 96, 54, 86, 95, 59, 68, 74, 65, 50, 50, 74, 63, 72, 85, 61, 70, 77, 46, 73, 75, 54, 50, 93, 69, 63, 54, 68, 75, 68, 74, 66, 79, 64, 75, 75, 76, 65, 63, 54, 54, 76, 72, 61, 54, 55, 58, 76, 64, 56, 44, 58, 60, 55, 69, 68, 70, 58, 56, 79, 59, 87, 47, 72, 65, 61, 52, 53, 81, 76, 44, 62, 98, 64, 81, 72, 75, 65, 78, 55, 68, 65, 52, 66, 85, 69, 60, 54, 81, 50, 76, 76, 67, 75, 70, 61, 86, 68, 91, 64, 62, 60, 64, 120, 46, 77, 57, 64, 59, 62, 47, 44, 51, 54, 99, 56, 76, 54, 87, 66, 66, 87, 71, 59, 135, 62, 68, 68, 86, 54, 64, 76, 48, 69, 79, 55, 60, 76, 69, 69, 191, 53, 65, 81, 60, 65, 89, 57, 48, 35, 54, 59, 64, 34, 64, 74, 46, 61, 59, 69, 77, 65, 59, 69, 64, 51, 87, 67, 68, 60, 76, 59, 68, 75, 70, 49, 60, 68, 51, 71, 62, 54, 62, 47, 77, 56, 75, 67, 63, 53, 61, 69, 66, 46, 69, 50, 69, 79, 40, 56, 77, 75, 73, 62, 68, 68, 63, 58, 118, 65, 55, 59, 61, 71, 65, 63, 45, 70, 63, 90, 80, 54, 62, 66, 44, 48, 64, 49, 83, 96, 86, 86, 70, 90, 54, 58, 68, 67, 76, 62, 69, 68, 65, 64, 75, 53, 64, 67, 65, 74, 54, 49, 67, 115, 62, 61, 68, 50, 76, 59, 59, 66, 49, 57, 55, 48, 54, 59, 65, 59, 57, 48, 60, 64, 59, 89, 68, 104, 59, 54, 78, 77, 52, 67, 43, 73, 73, 64, 69, 69, 59, 60, 66, 71, 72, 57, 43, 67, 51, 49, 61, 65, 53, 67, 124, 52, 59, 71, 67, 53, 52, 75, 57, 69, 60, 42, 51, 58, 61, 39, 66, 72, 76, 64, 80, 48, 50, 49, 68, 58, 76, 62, 82, 77, 65, 71, 42, 81, 82, 46, 137, 104, 68, 58, 62, 55, 59, 88, 101, 75, 72, 72, 56, 52, 57, 139, 61, 59, 63, 54, 100, 73, 79, 53, 62, 60, 49, 74, 69, 51, 53, 81, 62, 72, 85, 63, 110, 64, 90, 64, 64, 71, 58, 57, 47, 54, 65, 77, 66, 61, 56, 70, 62, 59, 78, 64, 73, 105, 54, 51, 65, 83, 64, 73, 50, 55, 62, 60, 58, 66, 79, 75, 67, 62, 86, 63, 57, 47, 71, 43, 79, 127, 51, 65, 67, 51, 39, 79, 66, 80, 73, 74, 54, 59, 63, 54, 46, 59, 70, 77, 76, 64, 82, 64, 75, 70, 69, 53, 62, 54, 54, 55, 62, 62, 58, 94, 59, 63, 104, 73, 62, 54, 53, 58, 67, 60, 71, 69, 78, 64, 72, 78, 64, 116, 56, 59, 44, 54, 82, 62, 57, 65, 74, 73, 71, 77, 69, 66, 53, 65, 61, 63, 51, 77, 56, 51, 72, 55, 64, 74, 63, 70, 58, 65, 69, 56, 69, 50, 64, 50, 56, 68, 74, 69, 58, 69, 63, 75, 78, 77, 59, 58, 71, 93, 52, 55, 74, 58, 66, 63, 45, 48, 62, 73, 69, 65, 66, 59, 96, 67, 52, 52, 73, 71, 67, 52, 58, 68, 57, 42, 47, 45, 71, 112, 53, 60, 63, 45, 73, 61, 75, 89, 77, 47, 67, 75, 62, 62, 114, 69, 60, 68, 68, 92, 61, 82, 54, 56, 65, 50, 66, 74, 94, 56, 51, 90, 79, 54, 51, 65, 71, 51, 82, 49, 57, 57, 29, 58, 60, 90, 56, 56, 62, 64, 65, 67, 56, 58, 36, 59, 64, 59, 59, 89, 79, 57, 72, 65, 57, 60, 51, 74, 55, 86, 60, 60, 50, 51, 67, 52, 81, 54, 64, 74, 85, 56, 91, 60, 54, 56, 47, 66, 58, 52, 60, 131, 59, 53, 84, 56, 90, 82, 60, 57, 84, 62, 102, 73, 81, 58, 70, 77, 66, 55, 63, 68, 68, 75, 79, 73, 62, 61, 61, 78, 52, 72, 71, 60, 59, 68, 55, 64, 76, 52, 66, 64, 70, 54, 61, 90, 57, 59, 52, 65, 88, 57, 77, 62, 67, 76, 79, 47, 64, 63, 86, 61, 98, 62, 79, 81, 64, 67, 84, 65, 64, 91, 60, 67, 55, 76, 58, 58, 53, 72, 68, 64, 41, 60, 79, 111, 60, 61, 93, 58, 81, 49, 69, 66, 83, 55, 89, 60, 67, 49, 75, 64, 53, 117, 68, 65, 96, 63, 62, 73, 41, 81, 53, 68, 60, 53, 56, 61, 68, 55, 66, 64, 147, 56, 54, 56, 76, 67, 53, 72, 45, 50, 74, 69, 73, 97, 59, 81, 57, 57, 56, 65, 102, 61, 57, 98, 70, 57, 69, 77, 86, 55, 51, 53, 79, 63, 96, 66, 73, 92, 64, 70, 75, 60, 67, 60, 67, 82, 83, 57, 49, 61, 60, 62, 83, 58, 64, 114, 140, 65, 77, 75, 60, 51, 67, 72, 59, 68, 66, 51, 65, 84, 64, 63, 56, 58, 55, 63, 62, 60, 59, 50, 86, 88, 71, 68, 75, 83, 70, 62, 98, 47, 70, 54, 55, 83, 94, 77, 62, 56, 54, 67, 66, 87, 74, 54, 60, 68, 60, 64, 59, 56, 89, 64, 61, 61, 73, 53, 61, 68, 56, 67, 66, 64, 58, 51, 59, 58, 74, 40, 64, 74, 57, 52, 48, 73, 72, 69, 54, 50, 75, 63, 66, 69, 54, 71, 69, 66, 70, 59, 58, 55, 69, 71, 72, 55, 91, 63, 79, 57, 55, 61, 62, 58, 66, 58, 71, 75, 73, 51, 56, 42, 56, 61, 61, 42, 82, 55, 55, 50, 62, 64, 74, 61, 51, 53, 71, 73, 61, 78, 69, 70, 64, 54, 60, 76, 75, 135, 51, 57, 63, 112, 62, 62, 57, 74, 66, 72, 77, 90, 59, 42, 69, 77, 63, 63, 104, 46, 71, 58, 60, 61, 98, 60, 44, 75, 49, 62, 73, 72, 59, 51, 70, 64, 57, 73, 63, 59, 69, 78, 74, 85, 65, 58, 49, 61, 76, 55, 61, 60, 72, 61, 87, 68, 71, 78, 79, 61, 62, 44, 69, 62, 66, 44, 55, 53, 47, 53, 58, 66, 64, 59, 78, 70, 71, 59, 72, 60, 75, 57, 83, 64, 67, 77, 61, 63, 59, 59, 76, 72, 93, 67, 73, 49, 64, 66, 50, 58, 57, 59, 76, 79, 62, 67, 34, 40, 67, 39, 44, 73, 51, 80, 67, 68, 59, 84, 67, 62, 54, 58, 58, 59, 79, 85, 69, 51, 57, 55, 77, 60, 70, 68, 96, 72, 59, 86, 100, 84, 64, 46, 60, 59, 64, 80, 75, 43, 64, 50, 51, 66, 68, 54, 95, 72, 60, 64, 67, 76, 102, 65, 53, 70, 68, 47, 81, 69, 62, 60, 73, 73, 86, 78, 60, 64, 79, 64, 95, 77, 92, 61, 54, 83, 77, 65, 133, 60, 69, 65, 64, 56, 51, 50, 63, 65, 61, 57, 68, 68, 75, 64, 165, 79, 85, 80, 58, 48, 78, 55, 70, 46, 66, 62, 48, 66, 60, 64, 60, 55, 59, 57, 86, 72, 63, 66, 74, 58, 43, 73, 60, 80, 79, 80, 52, 62, 49, 80, 60, 63, 54, 52, 58, 56, 71, 65, 64, 76, 71, 72, 71, 60, 64, 69, 63, 73, 80, 49, 69, 87, 48, 53, 58, 65, 52, 77, 77, 86, 62, 49, 135, 69, 61, 49, 57, 55, 60, 62, 63, 66, 55, 62, 60, 69, 72, 73, 51, 62, 60, 42, 75, 49, 72, 106, 39, 47, 57, 56, 55, 62, 49, 97, 59, 59, 61, 73, 64, 60, 52, 69, 35, 93, 56, 83, 59, 65, 60, 58, 58, 65, 62, 47, 57, 66, 60, 60, 57, 74, 47, 59, 70, 76, 64, 70, 64, 75, 69, 74, 59, 65, 65, 67, 76, 68, 53, 67, 60, 64, 60, 48, 48, 77, 57, 48, 66, 69, 71, 52, 75, 45, 56, 53, 61, 68, 50, 86, 79, 68, 94, 65, 62, 79, 58, 72, 47, 57, 54, 70, 57, 69, 58, 59, 54, 61, 67, 49, 61, 59, 58, 51, 50, 74, 60, 67, 76, 48, 60, 70, 47, 76, 68, 64, 99, 89, 69, 57, 53, 62, 67, 54, 95, 70, 58, 57, 62, 44, 66, 178, 68, 55, 47, 52, 63, 44, 76, 46, 48, 68, 41, 88, 64, 28, 84, 68, 43, 57, 65, 68, 103, 69, 68, 75, 48, 86, 54, 50, 56, 61, 59, 62, 55, 78, 72, 59, 62, 60, 56, 61, 56, 58, 62, 52, 52, 62, 54, 57, 70, 63, 67, 56, 50, 51, 71, 69, 76, 44, 87, 54, 59, 71, 77, 47, 67, 82, 69, 48, 64, 87, 151, 67, 46, 74, 69, 89, 50, 78, 47, 64, 64, 58, 55, 47, 66, 73, 74, 76, 55, 69, 82, 67, 52, 61, 62, 59, 54, 61, 62, 60, 57, 82, 55, 67, 43, 59, 68, 49, 76, 56, 70, 51, 84, 63, 62, 102, 62, 65, 50, 60, 62, 58, 55, 58, 97, 86, 79, 66, 134, 79, 69, 50, 67, 60, 63, 72, 37, 60, 55, 52, 74, 69, 47, 68, 57, 48, 45, 73, 57, 52, 57, 52, 54, 88, 85, 61, 81, 64, 65, 54, 63, 100, 67, 74, 67, 59, 57, 73, 53, 67, 56, 71, 97, 49, 44, 77, 59, 58, 61, 67, 62, 87, 73, 56, 63, 71, 102, 71, 61, 83, 56, 54, 92, 64, 75, 62, 43, 80, 56, 56, 63, 57, 57, 53, 79, 86, 61, 70, 61, 81, 51, 68, 118, 74, 58, 46, 57, 108, 74, 60, 65, 57, 50, 73, 53, 75, 91, 65, 52, 61, 61, 54, 60, 63, 61, 59, 69, 62, 49, 75, 43, 49, 51, 63, 69, 101, 66, 56, 62, 85, 120, 44, 69, 57, 60, 67, 63, 64, 79, 60, 58, 47, 79, 61, 55, 56, 46, 86, 60, 55, 65, 73, 68, 60, 73, 71, 56, 60, 84, 57, 59, 60, 78, 68, 72, 53, 79, 93, 66, 76, 81, 44, 54, 72, 85, 59, 61, 65, 88, 47, 85, 79, 60, 55, 77, 76, 81, 54, 81, 52, 57, 100, 90, 75, 57, 71, 48, 71, 61, 61, 77, 73, 76, 65, 64, 51, 79, 76, 45, 48, 84, 59, 54, 93, 58, 53, 75, 64, 91, 53, 62, 78, 48, 77, 53, 70, 58, 63, 79, 65, 106, 64, 79, 60, 67, 54, 61, 128, 54, 71, 48, 68, 87, 55, 61, 66, 56, 65, 64, 61, 72, 48, 69, 85, 73, 46, 72, 62, 97, 55, 75, 68, 88, 55, 82, 62, 57, 78, 70, 56, 56, 85, 99, 54, 76, 57, 61, 62, 48, 59, 66, 59, 92, 65, 68, 82, 54, 56, 68, 72, 104, 68, 59, 62, 77, 55, 69, 70, 79, 55, 60, 56, 53, 62, 49, 75, 69, 64, 75, 65, 83, 102, 61, 66, 103, 56, 63, 52, 62, 66, 59, 57, 69, 70, 68, 72, 54, 72, 53, 61, 99, 65, 91, 47, 65, 70, 65, 65, 55, 48, 69, 43, 61, 57, 75, 93, 110, 71, 64, 90, 52, 61, 69, 78, 52, 59, 60, 51, 57, 89, 61, 55, 38, 49, 73, 48, 58, 60, 63, 47, 57, 65, 81, 76, 68, 54, 69, 62, 90, 43, 71, 59, 65, 70, 68, 61, 56, 65, 73, 61, 54, 54, 62, 45, 73, 83, 65, 57, 50, 59, 75, 72, 68, 82, 62, 54, 63, 65, 57, 60, 58, 62, 63, 90, 61, 64, 91, 61, 104, 55, 73, 74, 71, 73, 90, 89, 58, 65, 57, 68, 63, 57, 64, 55, 63, 64, 65, 75, 61, 93, 70, 55, 88, 68, 96, 54, 48, 62, 79, 57, 51, 66, 81, 84, 74, 64, 72, 62, 58, 77, 84, 66, 53, 79, 44, 59, 39, 81, 50, 54, 84, 77, 57, 80, 51, 70, 75, 60, 57, 65, 48, 62, 57, 60, 92, 121, 97, 63, 57, 114, 79, 57, 43, 66, 62, 57, 77, 75, 66, 60, 85, 53, 64, 70, 71, 41, 66, 60, 56, 76, 62, 42, 64, 77, 66, 70, 59, 70, 60, 75, 55, 56, 54, 48, 70, 71, 68, 73, 90, 58, 80, 63, 64, 44, 56, 65, 54, 68, 46, 80, 121, 97, 60, 77, 90, 82, 73, 60, 55, 95, 46, 69, 67, 84, 77, 64, 52, 57, 73, 64, 67, 66, 91, 66, 78, 54, 71, 91, 53, 67, 41, 86, 68, 66, 75, 43, 71, 58, 46, 56, 86, 92, 61, 83, 61, 65, 59, 56, 48, 51, 78, 56, 51, 73, 62, 53, 50, 77, 75, 60, 50, 53, 47, 60, 81, 72, 69, 73, 49, 54, 69, 51, 69, 81, 58, 63, 60, 60, 50, 74, 73, 88, 89, 83, 53, 86, 63, 83, 63, 99, 54, 66, 73, 52, 73, 62, 63, 63, 78, 105, 46, 69, 56, 67, 60, 41, 73, 64, 67, 75, 56, 58, 60, 58, 45, 40, 70, 54, 69, 40, 60, 52, 125, 57, 58, 52, 72, 79, 45, 68, 60, 54, 69, 62, 69, 70, 82, 64, 77, 67, 66, 82, 69, 66, 78, 71, 55, 121, 165, 42, 75, 46, 45, 58, 76, 86, 52, 99, 58, 51, 52, 81, 40, 73, 47, 51, 47, 67, 59, 49, 48, 69, 51, 61, 64, 61, 57, 66, 50, 44, 61, 59, 69, 68, 59, 74, 52, 53, 69, 68, 36, 88, 76, 62, 52, 57, 63, 76, 61, 54, 63, 68, 57, 56, 55, 106, 80, 57, 70, 61, 72, 58, 63, 68, 65, 71, 37, 59, 70, 54, 61, 66, 58, 61, 73, 59, 61, 66, 82, 41, 51, 50, 79, 68, 51, 71, 60, 79, 70, 77, 71, 55, 52, 54, 65, 66, 102, 58, 90, 82, 57, 91, 60, 52, 60, 70, 71, 102, 80, 72, 55, 95, 48, 56, 73, 58, 105, 64, 52, 56, 68, 81, 51, 71, 55, 56, 54, 58, 53, 86, 61, 58, 67, 66, 83, 69, 54, 85, 64, 60, 89, 66, 69, 91, 87, 62, 58, 91, 67, 75, 56, 48, 55, 71, 68, 83, 65, 60, 102, 69, 83, 75, 55, 77, 47, 54, 77, 76, 46, 84, 65, 82, 66, 63, 79, 69, 65, 74, 50, 59, 60, 57, 68, 57, 66, 77, 84, 47, 72, 58, 51, 47, 47, 76, 47, 70, 58, 58, 113, 119, 62, 50, 66, 69, 67, 68, 87, 56, 65, 81, 74, 66, 45, 75, 74, 79, 60, 57, 50, 69, 76, 61, 70, 64, 59, 78, 62, 60, 60, 71, 62, 50, 52, 66, 106, 55, 64, 58, 48, 74, 45, 82, 53, 64, 52, 65, 62, 54, 57, 70, 65, 47, 67, 56, 67, 154, 58, 56, 69, 61, 53, 70, 55, 57, 67, 62, 66, 76, 106, 89, 82, 57, 75, 71, 51, 60, 81, 54, 64, 55, 66, 62, 85, 104, 59, 86, 63, 67, 55, 101, 62, 60, 76, 86, 57, 70, 69, 57, 69, 59, 67, 51, 54, 43, 48, 52, 53, 45, 90, 65, 91, 67, 86, 65, 77, 68, 52, 55, 72, 67, 87, 59, 70, 44, 73, 60, 95, 70, 54, 57, 69, 81, 67, 42, 65, 72, 50, 50, 56, 74, 68, 53, 116, 87, 69, 67, 51, 68, 52, 77, 59, 60, 53, 63, 75, 69, 67, 59, 59, 61, 56, 49, 54, 63, 66, 77, 62, 60, 49, 39, 61, 60, 69, 64, 74, 92, 77, 68, 53, 68, 52, 92, 71, 58, 57, 56, 72, 50, 53, 81, 78, 82, 61, 66, 66, 47, 69, 102, 53, 76, 61, 68, 68, 76, 84, 51, 54, 53, 72, 61, 64, 94, 76, 70, 54, 115, 58, 53, 49, 70, 50, 59, 56, 44, 61, 59, 84, 71, 95, 50, 75, 89, 39, 85, 62, 56, 88, 56, 48, 64, 122, 61, 77, 72, 48, 55, 63, 57, 56, 94, 55, 68, 77, 59, 56, 53, 71, 57, 55, 66, 58, 57, 98, 63, 94, 63, 69, 59, 84, 68, 68, 58, 65, 57, 59, 51, 52, 57, 56, 49, 60, 44, 42, 75, 69, 59, 63, 56, 81, 63, 65, 75, 53, 62, 80, 59, 77, 87, 64, 66, 56, 45, 63, 53, 76, 60, 67, 66, 97, 63, 60, 70, 75, 70, 84, 72, 62, 54, 94, 65, 127, 60, 58, 80, 86, 68, 84, 83, 50, 76, 63, 83, 85, 62, 78, 63, 65, 48, 60, 85, 102, 69, 76, 54, 53, 52, 57, 72, 58, 67, 49, 72, 60, 62, 73, 53, 84, 50, 69, 60, 60, 66, 59, 44, 100, 54, 72, 54, 64, 65, 64, 59, 60, 89, 77, 67, 60, 122, 69, 58, 54, 64, 59, 41, 54, 66, 77, 86, 76, 53, 69, 61, 63, 56, 57, 76, 81, 55, 83, 60, 49, 64, 55, 71, 58, 103, 66, 69, 76, 58, 76, 72, 63, 48, 60, 58, 75, 49, 63, 70, 53, 69, 57, 67, 76, 75, 61, 49, 60, 75, 82, 85, 75, 54, 79, 62, 71, 46, 66, 51, 64, 51, 69, 83, 65, 59, 73, 65, 70, 52, 56, 116, 53, 66, 72, 54, 78, 50, 67, 67, 45, 70, 55, 57, 75, 60, 97, 55, 76, 65, 62, 74, 61, 59, 85, 56, 60, 51, 68, 73, 86, 76, 59, 63, 67, 60, 81, 55, 61, 47, 41, 72, 57, 77, 62, 70, 68, 46, 59, 63, 58, 56, 56, 59, 65, 62, 56, 52, 77, 71, 73, 81, 60, 84, 58, 72, 97, 79, 62, 56, 62, 50, 70, 47, 63, 68, 43, 89, 52, 78, 66, 94, 58, 52, 60, 61, 86, 72, 74, 61, 54, 61, 64, 87, 66, 81, 59, 53, 59, 71, 93, 63, 73, 70, 56, 65, 58, 51, 102, 61, 79, 62, 80, 58, 57, 78, 57, 55, 70, 81, 67, 70, 68, 36, 46, 76, 63, 55, 78, 61, 143, 79, 62, 79, 75, 67, 76, 57, 64, 80, 48, 63, 77, 60, 62, 60, 52, 77, 47, 70, 81, 65, 67, 86, 78, 89, 53, 53, 51, 67, 67, 54, 72, 59, 85, 62, 50, 49, 83, 90, 57, 62, 53, 91, 65, 61, 55, 52, 64, 67, 77, 63, 60, 64, 86, 87, 88, 65, 47, 75, 58, 71, 55, 77, 86, 69, 76, 62, 57, 66, 63, 52, 90, 71, 67, 61, 64, 45, 47, 65, 64, 66, 76, 72, 64, 66, 60, 64, 78, 51, 44, 56, 108, 79, 64, 88, 84, 80, 94, 68, 66, 87, 79, 38, 89, 57, 69, 60, 57, 54, 51, 59, 53, 76, 61, 67, 66, 84, 49, 102, 51, 63, 115, 64, 68, 64, 55, 45, 54, 82, 76, 72, 48, 64, 57, 64, 77, 65, 51, 55, 52, 61, 76, 49, 57, 82, 62, 64, 73, 69, 51, 58, 44, 53, 59, 42, 68, 52, 56, 77, 50, 60, 80, 44, 58, 78, 70, 57, 62, 95, 65, 63, 70, 78, 44, 92, 61, 68, 62, 69, 81, 55, 59, 54, 67, 65, 70, 101, 62, 59, 62, 106, 52, 75, 65, 67, 51, 63, 60, 55, 61, 59, 59, 54, 68, 66, 49, 57, 64, 66, 55, 44, 67, 44, 68, 58, 70, 74, 57, 60, 57, 63, 58, 58, 85, 64, 61, 91, 53, 74, 71, 62, 70, 60, 74, 69, 63, 58, 71, 71, 55, 60, 67, 56, 49, 74, 55, 78, 59, 58, 108, 63, 60, 57, 68, 65, 66, 82, 63, 60, 59, 68, 79, 76, 53, 55, 124, 64, 59, 71, 52, 55, 55, 59, 54, 54, 61, 65, 50, 41, 66, 82, 70, 74, 66, 64, 78, 57, 103, 60, 63, 68, 63, 53, 85, 58, 59, 57, 70, 95, 61, 63, 59, 61, 80, 67, 64, 64, 50, 68, 57, 70, 56, 83, 77, 72, 64, 65, 61, 65, 59, 76, 63, 57, 50, 64, 74, 58, 67, 62, 62, 54, 66, 75, 56, 60, 55, 65, 73, 58, 56, 73, 78, 58, 50, 62, 56, 107, 69, 85, 116, 73, 57, 70, 89, 58, 51, 54, 88, 56, 62, 49, 63, 104, 67, 71, 78, 53, 51, 47, 68, 72, 59, 79, 57, 64, 71, 77, 92, 95, 63, 58, 69, 57, 63, 67, 55, 66, 81, 75, 70, 64, 61, 66, 81, 84, 72, 63, 46, 65, 74, 72, 55, 66, 48, 68, 58, 65, 71, 51, 77, 65, 67, 78, 72, 93, 75, 77, 64, 52, 70, 68, 63, 54, 69, 64, 62, 66, 73, 54, 84, 45, 57, 68, 56, 61, 59, 57, 62, 61, 64, 68, 58, 85, 48, 63, 71, 69, 56, 53, 54, 54, 60, 65, 51, 61, 59, 63, 60, 60, 44, 51, 86, 53, 83, 70, 81, 67, 68, 61, 55, 56, 86, 67, 69, 59, 92, 68, 57, 49, 71, 65, 64, 88, 60, 70, 48, 59, 72, 81, 55, 63, 65, 66, 78, 58, 73, 89, 69, 98, 67, 70, 74, 119, 65, 82, 50, 67, 55, 84, 52, 73, 74, 56, 50, 61, 61, 59, 57, 56, 71, 69, 73, 60, 73, 66, 45, 72, 68, 69, 54, 91, 65, 96, 68, 71, 48, 73, 62, 118, 64, 45, 65, 53, 59, 61, 61, 39, 70, 59, 60, 44, 74, 61, 60, 58, 52, 54, 58, 67, 73, 52, 82, 82, 67, 62, 65, 63, 60, 65, 107, 51, 57, 84, 59, 63, 72, 59, 44, 69, 59, 61, 50, 54, 65, 47, 73, 65, 84, 58, 64, 52, 64, 48, 71, 61, 67, 47, 70, 57, 64, 47, 62, 56, 58, 71, 56, 65, 56, 84, 69, 66, 71, 70, 74, 82, 51, 59, 73, 59, 59, 60, 69, 69, 60, 76, 61, 55, 68, 49, 71, 48, 100, 48, 65, 62, 60, 56, 63, 53, 58, 101, 80, 66, 48, 68, 47, 55, 66, 113, 60, 54, 67, 69, 51, 108, 65, 62, 65, 46, 75, 83, 87, 46, 61, 52, 75, 53, 74, 55, 70, 55, 68, 72, 64, 98, 59, 59, 64, 73, 69, 61, 65, 79, 99, 62, 63, 85, 64, 68, 62, 57, 73, 81, 66, 70, 83, 50, 39, 48, 60, 61, 84, 57, 60, 59, 51, 73, 71, 53, 62, 72, 71, 55, 74, 57, 65, 57, 72, 55, 73, 49, 75, 59, 71, 58, 56, 55, 65, 58, 78, 52, 92, 67, 58, 73, 80, 57, 94, 72, 67, 65, 36, 77, 60, 74, 68, 63, 56, 53, 70, 55, 68, 75, 66, 64, 73, 61, 55, 61, 52, 111, 60, 58, 88, 66, 47, 68, 65, 81, 63, 41, 47, 56, 58, 60, 43, 65, 67, 63, 110, 69, 58, 80, 73, 76, 64, 62, 64, 75, 78, 79, 47, 60, 62, 61, 48, 73, 62, 69, 71, 60, 49, 63, 64, 62, 66, 66, 87, 47, 62, 62, 49, 72, 63, 71, 65, 94, 43, 71, 76, 52, 56, 74, 59, 60, 65, 71, 56, 70, 96, 81, 61, 55, 88, 54, 72, 48, 56, 65, 82, 61, 75, 57, 63, 52, 59, 61, 57, 49, 65, 64, 85, 65, 58, 57, 56, 61, 49, 62, 60, 55, 72, 76, 63, 80, 53, 60, 81, 59, 68, 105, 67, 69, 78, 65, 54, 53, 85, 60, 54, 59, 82, 75, 63, 67, 81, 59, 65, 63, 96, 65, 84, 53, 65, 53, 74, 55, 70, 56, 66, 63, 64, 57, 64, 52, 70, 45, 52, 69, 72, 78, 85, 63, 108, 73, 57, 64, 79, 82, 58, 72, 83, 57, 62, 85, 64, 76, 49, 63, 57, 66, 61, 53, 72, 51, 60, 85, 71, 64, 63, 71, 74, 44, 61, 41, 69, 68, 73, 55, 81, 71, 64, 58, 51, 56, 112, 68, 70, 79, 89, 108, 57, 59, 56, 47, 39, 61, 59, 56, 43, 89, 71, 84, 60, 61, 62, 149, 68, 51, 93, 69, 49, 64, 56, 61, 70, 63, 59, 58, 70, 73, 109, 62, 124, 67, 95, 66, 62, 92, 72, 60, 94, 57, 61, 64, 72, 51, 48, 76, 62, 91, 54, 67, 64, 61, 55, 60, 63, 70, 47, 62, 130, 52, 59, 69, 54, 69, 54, 74, 49, 53, 82, 81, 54, 63, 61, 69, 63, 68, 62, 57, 63, 58, 77, 77, 59, 75, 65, 51, 63, 74, 54, 64, 71, 81, 47, 72, 69, 64, 57, 82, 65, 120, 58, 55, 62, 84, 69, 65, 57, 67, 80, 61, 93, 58, 48, 56, 49, 77, 62, 64, 123, 60, 69, 64, 75, 55, 76, 33, 70, 76, 61, 51, 57, 47, 66, 61, 60, 72, 74, 70, 73, 62, 55, 68, 84, 55, 56, 96, 56, 59, 94, 61, 60, 76, 54, 49, 70, 71, 70, 50, 55, 59, 66, 54, 57, 87, 101, 54, 72, 69, 68, 55, 52, 74, 70, 82, 64, 50, 60, 61, 136, 74, 95, 49, 90, 81, 55, 42, 63, 66, 62, 64, 66, 51, 88, 71, 77, 58, 63, 74, 50, 60, 61, 66, 74, 51, 75, 53, 96, 88, 64, 78, 59, 59, 71, 81, 67, 56, 75, 75, 62, 68, 65, 55, 68, 85, 75, 41, 59, 63, 62, 50, 63, 54, 69, 78, 78, 44, 62, 60, 67, 113, 61, 51, 63, 59, 43, 65, 76, 62, 64, 57, 66, 61, 68, 54, 72, 63, 43, 66, 72, 65, 62, 63, 88, 84, 48, 62, 73, 73, 51, 62, 61, 64, 55, 69, 61, 60, 73, 52, 72, 65, 61, 71, 59, 84, 73, 59, 61, 72, 61, 63, 56, 54, 71, 62, 47, 50, 63, 55, 63, 117, 63, 60, 73, 67, 52, 67, 56, 60, 84, 51, 51, 104, 75, 70, 58, 76, 54, 62, 75, 42, 65, 82, 67, 61, 68, 61, 77, 120, 54, 67, 67, 70, 64, 61, 70, 52, 70, 51, 60, 51, 74, 61, 99, 51, 61, 64, 63, 67, 65, 60, 59, 89, 57, 65, 66, 78, 67, 53, 51, 80, 52, 69, 64, 73, 50, 46, 64, 59, 75, 58, 127, 76, 64, 70, 66, 50, 67, 69, 94, 58, 48, 66, 59, 57, 68, 58, 54, 55, 61, 55, 70, 75, 63, 64, 67, 67, 55, 68, 61, 69, 53, 70, 65, 59, 49, 71, 48, 46, 56, 57, 47, 93, 64, 63, 69, 78, 76, 55, 59, 55, 62, 47, 61, 63, 68, 91, 58, 64, 85, 52, 64, 66, 51, 77, 62, 59, 55, 54, 94, 53, 49, 64, 68, 57, 63, 56, 64, 69, 57, 51, 65, 69, 70, 68, 58, 49, 55, 71, 66, 54, 74, 63, 86, 43, 68, 63, 92, 61, 42, 77, 49, 81, 62, 72, 48, 48, 40, 53, 56, 59, 82, 76, 62, 68, 61, 47, 76, 59, 57, 50, 79, 51, 60, 75, 82, 50, 65, 60, 60, 83, 66, 59, 107, 91, 54, 57, 57, 89, 65, 68, 69, 47, 66, 60, 70, 58, 47, 81, 74, 59, 56, 70, 111, 54, 71, 47, 67, 70, 56, 68, 60, 72, 61, 63, 68, 61, 53, 64, 131, 60, 55, 70, 65, 93, 61, 64, 64, 67, 55, 87, 66, 46, 54, 59, 77, 56, 52, 66, 57, 136, 48, 59, 53, 117, 77, 82, 80, 59, 56, 43, 66, 64, 78, 69, 70, 69, 79, 57, 57, 53, 57, 63, 53, 74, 71, 69, 94, 76, 58, 63, 54, 97, 70, 73, 66, 58, 50, 59, 65, 51, 53, 59, 52, 63, 47, 67, 66, 77, 57, 57, 61, 66, 77, 79, 48, 67, 50, 56, 72, 52, 66, 78, 68, 49, 71, 63, 59, 58, 67, 87, 74, 70, 59, 40, 58, 99, 66, 64, 62, 81, 61, 63, 52, 62, 64, 59, 85, 67, 68, 65, 77, 57, 61, 78, 65, 78, 58, 82, 60, 72, 71, 69, 64, 38, 63, 67, 84, 66, 68, 56, 64, 71, 59, 57, 60, 70, 62, 60, 53, 80, 54, 60, 57, 68, 40, 79, 75, 62, 71, 59, 52, 56, 64, 61, 68, 64, 60, 71, 64, 69, 73, 47, 75, 59, 51, 67, 71, 68, 53, 74, 66, 53, 64, 57, 65, 69, 51, 80, 52, 64, 62, 60, 49, 91, 75, 98, 65, 75, 69, 57, 141, 49, 65, 80, 61, 115, 82, 59, 63, 68, 74, 49, 68, 69, 69, 72, 57, 60, 56, 56, 46, 66, 82, 57, 50, 50, 62, 50, 74, 54, 53, 74, 55, 52, 73, 66, 69, 70, 77, 85, 75, 58, 66, 63, 56, 83, 57, 45, 56, 55, 67, 58, 65, 62, 50, 49, 68, 71, 66, 56, 92, 49, 91, 69, 68, 73, 50, 55, 52, 69, 56, 73, 52, 66, 58, 71, 66, 102, 79, 68, 41, 59, 79, 64, 63, 79, 72, 60, 59, 75, 56, 77, 97, 60, 103, 60, 81, 78, 65, 56, 61, 72, 48, 70, 57, 99, 70, 75, 68, 67, 42, 60, 71, 51, 103, 75, 57, 75, 83, 62, 64, 51, 92, 63, 54, 84, 77, 47, 66, 69, 57, 74, 62, 73, 62, 64, 36, 52, 79, 51, 70, 64, 57, 63, 61, 47, 42, 103, 56, 61, 77, 108, 57, 53, 97, 90, 56, 82, 42, 65, 63, 105, 58, 80, 56, 57, 75, 67, 82, 61, 87, 49, 51, 52, 61, 69, 70, 63, 60, 108, 57, 74, 64, 61, 76, 66, 69, 52, 58, 56, 92, 61, 57, 72, 62, 44, 58, 62, 83, 62, 50, 66, 74, 62, 78, 85, 81, 62, 73, 73, 75, 123, 52, 58, 57, 65, 52, 71, 89, 57, 93, 80, 53, 67, 51, 56, 38, 50, 60, 73, 71, 52, 79, 59, 51, 94, 57, 71, 89, 67, 84, 63, 57, 57, 93, 70, 55, 49, 70, 65, 51, 43, 75, 84, 126, 65, 75, 67, 61, 81, 75, 61, 63, 61, 63, 52, 67, 87, 63, 67, 58, 42, 53, 59, 51, 66, 78, 44, 90, 65, 47, 62, 73, 58, 70, 58, 49, 57, 50, 71, 53, 56, 57, 56, 57, 60, 58, 54, 47, 53, 64, 41, 59, 67, 71, 55, 64, 63, 78, 58, 34, 51, 67, 90, 68, 59, 60, 64, 56, 47, 69, 56, 51, 73, 96, 71, 55, 50, 46, 63, 42, 76, 77, 73, 67, 50, 71, 53, 70, 52, 59, 42, 64, 70, 61, 67, 61, 84, 59, 67, 81, 55, 128, 75, 59, 54, 55, 58, 57, 58, 66, 68, 74, 81, 53, 66, 60, 64, 79, 59, 58, 90, 54, 54, 55, 77, 87, 60, 61, 72, 62, 58, 68, 66, 65, 45, 70, 73, 56, 75, 60, 66, 59, 80, 90, 50, 77, 97, 62, 55, 48, 58, 64, 66, 71, 74, 76, 60, 58, 114, 58, 69, 85, 74, 57, 56, 85, 74, 74, 87, 81, 56, 56, 93, 77, 79, 80, 61, 80, 71, 57, 92, 54, 70, 54, 55, 59, 58, 56, 58, 56, 67, 59, 80, 70, 52, 65, 62, 65, 59, 72, 72, 64, 71, 73, 72, 73, 76, 85, 61, 82, 86, 69, 71, 58, 70, 50, 51, 59, 73, 62, 60, 75, 138, 79, 52, 75, 38, 55, 48, 64, 67, 63, 62, 67, 50, 65, 54, 75, 64, 62, 67, 93, 57, 60, 73, 81, 65, 76, 76, 68, 60, 85, 71, 75, 66, 66, 61, 74, 55, 60, 50, 50, 53, 64, 61, 66, 67, 59, 54, 70, 68, 56, 72, 66, 65, 50, 51, 48, 91, 53, 64, 40, 58, 77, 56, 75, 64, 86, 62, 54, 89, 62, 94, 54, 92, 59, 60, 64, 69, 66, 54, 58, 67, 60, 68, 68, 64, 66, 55, 57, 65, 70, 75, 58, 56, 62, 65, 61, 71, 77, 67, 48, 63, 59, 68, 57, 62, 83, 46, 55, 73, 60, 59, 74, 57, 53, 59, 75, 71, 70, 53, 69, 71, 66, 65, 52, 66, 58, 84, 56, 97, 68, 64, 60, 58, 71, 58, 65, 73, 63, 69, 64, 74, 67, 69, 74, 65, 82, 53, 120, 65, 63, 72, 60, 65, 65, 68, 75, 74, 57, 68, 57, 55, 75, 72, 71, 53, 56, 60, 65, 73, 89, 73, 70, 62, 59, 60, 84, 65, 65, 56, 80, 57, 54, 84, 58, 66, 69, 75, 58, 62, 54, 59, 53, 60, 65, 76, 60, 66, 58, 64, 56, 69, 62, 75, 77, 67, 65, 71, 86, 50, 53, 50, 51, 59, 68, 55, 56, 66, 57, 70, 65, 57, 73, 57, 59, 65, 66, 75, 60, 88, 55, 69, 84, 59, 62, 57, 67, 62, 56, 61, 71, 67, 51, 67, 60, 65, 63, 70, 67, 62, 56, 72, 65, 65, 60, 61, 71, 57, 65, 56, 127, 62, 46, 62, 53, 45, 52, 60, 59, 67, 61, 66, 61, 64, 70, 69, 63, 65, 53, 68, 61, 60, 71, 72, 62, 70, 70, 78, 63, 69, 58, 80, 65, 49, 65, 65, 82, 76, 67, 62, 59, 55, 54, 62, 63, 51, 89, 76, 59, 66, 90, 66, 61, 76, 67, 62, 60, 60, 88, 73, 80, 56, 59, 74, 59, 65, 59, 51, 59, 61, 58, 52, 76, 56, 58, 62, 61, 64, 61, 49, 57, 66, 65, 52, 86, 66, 82, 70, 82, 60, 51, 59, 82, 50, 86, 63, 66, 62, 54, 74, 50, 58, 67, 59, 78, 84, 72, 73, 64, 61, 70, 80, 63, 73, 58, 58, 58, 66, 62, 81, 52, 67, 46, 55, 58, 123, 64, 63, 67, 53, 58, 82, 62, 66, 65, 53, 49, 54, 65, 61, 57, 61, 64, 56, 90, 67, 64, 64, 54, 68, 57, 64, 58, 65, 52, 116, 75, 61, 63, 60, 69, 86, 61, 48, 55, 58, 63, 66, 60, 66, 60, 66, 52, 56, 58, 60, 69, 63, 39, 63, 64, 64, 72, 49, 66, 62, 67, 58, 64, 52, 63, 66, 84, 62, 64, 54, 67, 58, 50, 56, 57, 71, 59, 58, 65, 55, 65, 63, 76, 70, 55, 62, 59, 64, 72, 59, 73, 65, 62, 49, 58, 59, 78, 70, 67, 85, 58, 56, 71, 64, 74, 69, 80, 72, 55, 75, 101, 115, 80, 57, 56, 67, 66, 55, 62, 61, 61, 62, 63, 56, 54, 67, 65, 87, 84, 74, 60, 53, 62, 67, 55, 61, 65, 67, 97, 80, 65, 61, 79, 71, 73, 97, 61, 55, 56, 65, 69, 53, 62, 65, 52, 47, 79, 78, 60, 63, 56, 59, 75, 68, 64, 70, 103, 73, 80, 53, 59, 58, 56, 63, 63, 76, 58, 60, 80, 64, 76, 58, 55, 67, 64, 86, 71, 85, 63, 95, 55, 71, 91, 61, 69, 56, 63, 56, 55, 62, 64, 60, 72, 66, 54, 60, 51, 63, 72, 71, 82, 45, 47, 58, 59, 66, 67, 70, 69, 63, 41, 81, 58, 70, 76, 60, 78, 63, 49, 59, 73, 71, 64, 59, 67, 64, 75, 54, 65, 59, 60, 70, 58, 66, 56, 67, 71, 64, 73, 65, 70, 73, 56, 65, 53, 59, 55, 96, 69, 58, 68, 61, 64, 57, 69, 73, 58, 69, 64, 49, 63, 75, 47, 58, 60, 57, 94, 63, 63, 51, 48, 80, 55, 61, 64, 58, 83, 55, 56, 65, 77, 69, 49, 70, 52, 67, 52, 70, 59, 48, 84, 49, 66, 51, 93, 81, 97, 66, 56, 72, 63, 64, 63, 95, 74, 69, 82, 60, 71, 50, 68, 70, 62, 60, 55, 52, 88, 75, 59, 58, 75, 63, 71, 60, 65, 70, 82, 67, 60, 72, 55, 59, 72, 68, 75, 62, 79, 50, 64, 68, 60, 71, 58, 54, 65, 54, 64, 63, 60, 77, 60, 62, 69, 62, 58, 60, 61, 75, 59, 60, 65, 83, 57, 70, 81, 61, 55, 70, 59, 67, 67, 51, 65, 81, 86, 73, 65, 46, 95, 63, 54, 56, 66, 75, 85, 87, 58, 72, 74, 67, 74, 68, 60, 58, 53, 77, 69, 61, 68, 82, 80, 71, 74, 66, 72, 81, 77, 77, 97, 59, 55, 72, 84, 62, 65, 49, 57, 61, 62, 66, 87, 54, 68, 57, 66, 61, 57, 59, 63, 69, 63, 63, 51, 58, 65, 53, 60, 71, 69, 68, 62, 58, 107, 71, 54, 89, 60, 88, 52, 57, 69, 58, 62, 76, 57, 75, 73, 72, 63, 74, 56, 78, 51, 147, 57, 68, 86, 77, 74, 60, 69, 56, 67, 65, 60, 52, 79, 67, 103, 69, 79, 63, 56, 60, 62, 72, 56, 63, 48, 55, 67, 79, 68, 60, 79, 86, 58, 90, 52, 71, 54, 58, 66, 51, 55, 71, 58, 77, 105, 61, 73, 73, 72, 51, 46, 69, 60, 56, 90, 98, 68, 78, 61, 54, 52, 59, 58, 77, 66, 69, 73, 65, 72, 63, 46, 67, 58, 64, 72, 61, 79, 67, 55, 60, 82, 59, 56, 77, 58, 131, 49, 63, 47, 77, 78, 68, 60, 60, 96, 59, 61, 56, 60, 66, 72, 67, 65, 58, 87, 69, 74, 70, 62, 61, 86, 39, 77, 73, 63, 58, 74, 63, 60, 59, 70, 63, 73, 62, 62, 60, 60, 68, 77, 54, 76, 76, 56, 62, 51, 65, 67, 61, 68, 56, 64, 62, 63, 54, 73, 53, 72, 63, 55, 67, 68, 62, 47, 57, 96, 69, 61, 68, 75, 49, 60, 67, 57, 62, 117, 62, 67, 75, 79, 74, 80, 46, 65, 60, 63, 67, 62, 68, 62, 58, 84, 68, 66, 69, 68, 58, 65, 66, 62, 76, 48, 62, 64, 60, 55, 73, 75, 67, 54, 68, 52, 56, 97, 59, 66, 56, 48, 89, 56, 68, 56, 70, 57, 65, 60, 87, 72, 68, 72, 63, 77, 66, 65, 57, 75, 84, 71, 54, 95, 61, 78, 61, 70, 79, 65, 51, 62, 72, 50, 86, 68, 64, 90, 62, 64, 56, 53, 61, 78, 67, 81, 47, 66, 61, 70, 77, 63, 51, 63, 80, 52, 71, 70, 65, 80, 68, 59, 56, 60, 75, 55, 91, 97, 79, 75, 63, 91, 70, 66, 66, 63, 64, 41, 68, 86, 85, 71, 56, 70, 73, 62, 59, 79, 70, 74, 56, 76, 73, 57, 72, 61, 55, 79, 75, 61, 62, 46, 77, 79, 76, 57, 70, 87, 55, 58, 48, 75, 84, 63, 74, 62, 47, 79, 75, 87, 66, 78, 61, 73, 83, 59, 70, 73, 47, 78, 73, 62, 58, 64, 71, 46, 53, 68, 76, 54, 77, 52, 45, 50, 72, 75, 70, 42, 79, 57, 70, 51, 73, 73, 52, 71, 74, 84, 59, 56, 82, 80, 75, 56, 61, 63, 58, 98, 68, 57, 86, 64, 51, 57, 64, 59, 85, 67, 67, 75, 52, 89, 64, 66, 73, 71, 73, 49, 61, 59, 72, 45, 81, 72, 60, 69, 53, 51, 64, 63, 64, 77, 69, 64, 65, 49, 42, 65, 83, 64, 88, 55, 61, 50, 47, 80, 56, 79, 84, 83, 70, 48, 57, 71, 79, 77, 51, 85, 82, 56, 58, 48, 73, 70, 88, 60, 53, 80, 61, 62, 62, 68, 65, 64, 58, 93, 67, 72, 49, 66, 80, 71, 64, 61, 71, 58, 42, 56, 55, 89, 56, 60, 63, 55, 74, 39, 79, 57, 92, 86, 98, 48, 55, 59, 62, 64, 65, 49, 78, 55, 61, 55, 61, 83, 55, 56, 77, 84, 56, 52, 72, 59, 68, 63, 60, 67, 46, 87, 68, 60, 54, 47, 55, 76, 64, 80, 56, 73, 67, 51, 47, 75, 69, 84, 77, 49, 74, 59, 72, 68, 54, 74, 61, 70, 55, 47, 62, 69, 57, 65, 58, 100, 41, 63, 67, 59, 70, 77, 72, 45, 71, 67, 66, 70, 56, 70, 61, 61, 58, 64, 69, 67, 77, 55, 71, 62, 91, 61, 95, 54, 73, 59, 71, 37, 50, 54, 76, 49, 51, 58, 83, 69, 72, 78, 80, 62, 81, 38, 58, 69, 68, 56, 51, 71, 84, 99, 74, 36, 61, 59, 48, 45, 68, 64, 62, 80, 52, 58, 57, 86, 44, 78, 65, 61, 65, 91, 85, 80, 82, 63, 52, 80, 66, 78, 70, 61, 75, 54, 89, 67, 44, 47, 59, 83, 67, 55, 66, 91, 43, 62, 66, 53, 58, 53, 65, 82, 79, 90, 70, 56, 62, 66, 53, 72, 75, 51, 56, 66, 71, 67, 78, 61, 53, 55, 76, 72, 85, 47, 62, 62, 77, 75, 83, 51, 59, 79, 81, 58, 59, 72, 68, 62, 68, 67, 77, 67, 67, 71, 63, 44, 70, 73, 85, 52, 82, 90, 62, 66, 75, 66, 53, 72, 68, 65, 63, 51, 73, 42, 59, 59, 55, 90, 96, 57, 68, 72, 51, 70, 65, 55, 59, 45, 62, 51, 94, 62, 73, 70, 69, 63, 82, 69, 57, 98, 78, 60, 53, 55, 80, 76, 69, 61, 79, 63, 66, 69, 48, 138, 63, 78, 83, 50, 56, 63, 39, 43, 80, 85, 52, 72, 70, 75, 55, 51, 58, 67, 78, 63, 48, 63, 83, 85, 57, 66, 64, 64, 75, 61, 76, 72, 85, 37, 64, 50, 61, 73, 54, 58, 59, 86, 64, 84, 71, 60, 58, 66, 59, 68, 53, 77, 47, 63, 70, 56, 55, 48, 60, 158, 59, 77, 67, 70, 74, 66, 98, 93, 67, 43, 58, 58, 97, 43, 52, 79, 97, 62, 68, 60, 57, 68, 42, 70, 49, 70, 69, 72, 61, 64, 60, 44, 57, 52, 86, 61, 48, 66, 63, 48, 61, 53, 75, 62, 52, 64, 74, 54, 58, 74, 52, 56, 53, 69, 50, 57, 51, 67, 92, 55, 75, 76, 59, 111, 76, 54, 43, 56, 78, 56, 66, 45, 63, 86, 98, 46, 72, 58, 54, 57, 80, 56, 75, 56, 70, 68, 64, 47, 54, 70, 55, 46, 66, 85, 65, 59, 63, 73, 79, 93, 63, 95, 57, 70, 60, 74, 55, 43, 58, 49, 65, 65, 65, 73, 49, 60, 71, 59, 52, 61, 165, 39, 54, 76, 82, 56, 80, 74, 66, 43, 66, 47, 44, 76, 56, 69, 59, 56, 67, 66, 63, 63, 66, 63, 62, 77, 58, 41, 54, 57, 70, 58, 66, 82, 43, 68, 87, 70, 65, 61, 68, 98, 84, 48, 62, 77, 86, 73, 68, 68, 75, 40, 62, 67, 82, 56, 53, 72, 69, 61, 85, 50, 60, 60, 68, 71, 66, 65, 54, 62, 66, 81, 81, 52, 45, 86, 53, 71, 49, 58, 69, 73, 61, 66, 53, 74, 59, 45, 76, 68, 79, 87, 77, 77, 73, 55, 71, 72, 71, 50, 66, 72, 43, 77, 73, 44, 71, 75, 71, 79, 61, 66, 83, 78, 85, 81, 53, 83, 66, 63, 53, 74, 74, 52, 41, 68, 68, 40, 48, 61, 63, 59, 62, 68, 83, 55, 48, 58, 69, 65, 51, 78, 69, 52, 56, 81, 50, 68, 86, 62, 78, 77, 59, 58, 57, 90, 67, 67, 66, 75, 79, 57, 53, 62, 88, 64, 61, 55, 52, 71, 67, 74, 75, 66, 52, 58, 62, 75, 50, 63, 69, 65, 60, 76, 76, 62, 61, 59, 65, 54, 92, 76, 58, 50, 62, 53, 46, 73, 61, 55, 61, 70, 54, 61, 57, 83, 59, 61, 79, 83, 75, 55, 79, 74, 44, 60, 58, 45, 62, 79, 72, 54, 49, 75, 70, 62, 52, 82, 53, 69, 68, 72, 56, 59, 57, 76, 83, 59, 56, 64, 63, 52, 59, 82, 85, 57, 51, 59, 79, 41, 62, 67, 86, 76, 61, 97, 95, 67, 78, 58, 56, 58, 46, 79, 56, 71, 77, 77, 30, 60, 70, 51, 59, 60, 59, 76, 56, 82, 60, 65, 59, 88, 78, 59, 56, 60, 63, 44, 86, 98, 63, 52, 72, 90, 59, 63, 57, 71, 73, 69, 56, 72, 65, 54, 76, 60, 53, 58, 82, 43, 66, 66, 45, 64, 71, 84, 87, 64, 73, 53, 49, 52, 67, 68, 63, 44, 65, 59, 85, 63, 37, 52, 59, 66, 54, 64, 56, 67, 52, 61, 62, 67, 89, 71, 53, 71, 68, 50, 52, 92, 60, 69, 52, 66, 67, 88, 64, 53, 89, 55, 59, 67, 59, 90, 70, 78, 74, 70, 87, 64, 69, 90, 61, 58, 54, 74, 54, 86, 58, 66, 63, 76, 84, 65, 62, 65, 68, 53, 49, 72, 60, 66, 72, 79, 61, 85, 46, 58, 81, 75, 68, 77, 71, 67, 67, 59, 69, 63, 61, 73, 59, 68, 71, 52, 79, 62, 68, 62, 57, 41, 71, 64, 72, 98, 54, 63, 72, 52, 73, 53, 70, 69, 57, 58, 63, 55, 65, 60, 70, 49, 50, 48, 62, 47, 59, 56, 77, 64, 80, 75, 69, 57, 61, 58, 96, 73, 54, 55, 76, 44, 61, 50, 67, 65, 57, 61, 52, 60, 76, 51, 76, 51, 53, 50, 65, 68, 58, 83, 45, 66, 58, 74, 59, 58, 64, 72, 47, 67, 67, 117, 72, 79, 63, 70, 55, 57, 67, 73, 74, 62, 65, 51, 61, 62, 61, 40, 54, 103, 66, 76, 70, 54, 58, 52, 70, 91, 35, 76, 78, 50, 106, 52, 60, 77, 79, 42, 70, 62, 58, 59, 76, 74, 73, 56, 74, 75, 58, 70, 52, 59, 69, 66, 55, 69, 64, 87, 73, 64, 63, 62, 44, 71, 60, 54, 69, 107, 71, 68, 61, 42, 81, 61, 66, 73, 72, 50, 62, 63, 84, 65, 64, 76, 68, 58, 56, 75, 77, 60, 79, 70, 66, 67, 63, 56, 63, 66, 84, 60, 75, 108, 75, 75, 53, 63, 97, 75, 62, 56, 65, 61, 42, 45, 63, 61, 47, 51, 57, 61, 61, 44, 75, 72, 62, 63, 59, 61, 66, 63, 78, 54, 62, 72, 72, 74, 63, 101, 73, 61, 65, 74, 94, 55, 66, 61, 67, 70, 81, 48, 53, 42, 66, 48, 62, 50, 68, 68, 149, 53, 52, 66, 141, 66, 59, 67, 65, 68, 60, 52, 62, 75, 78, 91, 61, 61, 64, 54, 50, 52, 71, 68, 80, 54, 54, 75, 57, 71, 71, 76, 112, 66, 77, 62, 71, 75, 64, 56, 70, 60, 83, 52, 57, 43, 68, 79, 46, 68, 55, 59, 47, 67, 73, 60, 64, 73, 81, 74, 64, 82, 68, 54, 57, 77, 58, 61, 55, 52, 51, 47, 57, 45, 74, 58, 71, 55, 42, 68, 84, 79, 56, 67, 57, 52, 62, 127, 65, 59, 62, 74, 60, 55, 85, 55, 91, 61, 50, 49, 48, 79, 88, 78, 51, 51, 56, 63, 68, 72, 78, 49, 51, 70, 61, 62, 66, 51, 90, 53, 92, 66, 118, 50, 61, 94, 96, 58, 61, 65, 39, 46, 66, 66, 58, 57, 80, 73, 72, 64, 59, 84, 93, 48, 60, 79, 53, 86, 77, 62, 68, 57, 85, 70, 88, 53, 100, 60, 67, 61, 64, 63, 64, 49, 72, 65, 80, 59, 56, 48, 74, 98, 93, 73, 71, 52, 76, 56, 71, 60, 47, 65, 65, 61, 68, 72, 58, 57, 47, 83, 64, 68, 57, 88, 44, 82, 63, 74, 53, 92, 59, 70, 71, 54, 63, 45, 67, 86, 62, 80, 63, 68, 59, 67, 61, 71, 65, 54, 49, 60, 77, 48, 73, 59, 73, 73, 80, 52, 54, 52, 57, 55, 65, 89, 56, 71, 56, 50, 52, 73, 79, 66, 68, 53, 73, 58, 80, 66, 64, 64, 53, 74, 47, 53, 65, 52, 75, 46, 75, 72, 62, 51, 99, 78, 94, 98, 54, 58, 77, 52, 59, 67, 58, 59, 53, 65, 85, 61, 63, 63, 70, 102, 60, 69, 64, 56, 65, 60, 70, 51, 72, 54, 82, 65, 67, 64, 63, 69, 58, 67, 74, 55, 48, 71, 60, 65, 62, 97, 57, 35, 69, 72, 59, 63, 57, 75, 68, 65, 51, 79, 57, 63, 90, 63, 52, 57, 67, 72, 80, 75, 64, 54, 110, 57, 65, 69, 73, 74, 57, 76, 69, 74, 69, 78, 57, 70, 81, 70, 61, 72, 66, 68, 50, 93, 77, 64, 75, 71, 73, 73, 79, 111, 69, 63, 66, 54, 68, 61, 49, 54, 78, 89, 58, 85, 56, 67, 51, 103, 51, 52, 79, 50, 45, 61, 89, 56, 56, 68, 61, 71, 55, 67, 74, 85, 56, 70, 59, 71, 80, 61, 67, 60, 98, 65, 53, 68, 84, 70, 58, 73, 59, 43, 91, 54, 85, 56, 63, 45, 53, 66, 77, 67, 56, 51, 69, 56, 63, 67, 49, 60, 59, 59, 54, 60, 64, 49, 59, 77, 85, 80, 63, 57, 65, 52, 63, 66, 67, 59, 74, 72, 59, 51, 53, 68, 72, 56, 61, 74, 72, 72, 43, 99, 73, 54, 61, 78, 53, 60, 69, 50, 120, 54, 69, 64, 57, 70, 48, 109, 70, 59, 67, 50, 67, 68, 53, 61, 57, 58, 49, 60, 53, 68, 59, 88, 60, 79, 63, 127, 62, 54, 89, 73, 102, 41, 106, 70, 59, 50, 66, 61, 58, 71, 83, 60, 73, 57, 91, 58, 66, 67, 75, 84, 57, 104, 52, 69, 57, 52, 75, 62, 64, 53, 66, 77, 63, 53, 56, 31, 93, 66, 86, 46, 70, 63, 60, 58, 75, 60, 61, 79, 89, 57, 50, 69, 67, 67, 80, 50, 54, 56, 70, 63, 53, 79, 33, 99, 67, 63, 122, 44, 71, 59, 66, 51, 63, 63, 63, 75, 53, 75, 50, 70, 61, 76, 76, 61, 65, 62, 94, 63, 52, 66, 59, 71, 73, 77, 72, 73, 67, 73, 80, 43, 75, 60, 78, 61, 52, 68, 62, 63, 75, 77, 76, 79, 67, 58, 50, 82, 66, 76, 93, 54, 70, 50, 70, 75, 51, 70, 51, 47, 73, 83, 62, 56, 79, 63, 54, 86, 66, 60, 74, 77, 55, 39, 59, 81, 67, 77, 59, 64, 58, 53, 67, 60, 35, 80, 63, 70, 48, 92, 68, 64, 49, 56, 61, 70, 67, 81, 59, 90, 80, 56, 72, 39, 74, 52, 49, 57, 59, 79, 63, 60, 71, 73, 81, 69, 66, 58, 54, 116, 67, 49, 64, 87, 68, 70, 60, 86, 67, 68, 76, 67, 68, 74, 79, 58, 68, 65, 62, 62, 65, 45, 54, 50, 81, 60, 69, 53, 73, 63, 66, 50, 64, 53, 65, 66, 62, 47, 74, 71, 59, 62, 63, 58, 62, 71, 50, 68, 118, 54, 59, 72, 59, 62, 68, 71, 55, 83, 67, 60, 61, 57, 47, 64, 70, 53, 61, 75, 57, 57, 61, 67, 72, 63, 84, 68, 67, 87, 48, 50, 62, 63, 73, 77, 67, 61, 68, 62, 58, 82, 72, 57, 78, 47, 69, 62, 62, 69, 74, 71, 72, 85, 45, 55, 63, 50, 64, 90, 60, 63, 61, 52, 60, 68, 52, 60, 69, 61, 52, 49, 69, 100, 84, 67, 63, 62, 90, 82, 78, 55, 69, 90, 64, 55, 100, 42, 58, 65, 56, 67, 85, 67, 46, 58, 74, 75, 49, 78, 49, 52, 52, 77, 62, 53, 65, 45, 62, 69, 47, 60, 71, 58, 43, 68, 46, 56, 63, 57, 55, 63, 51, 56, 86, 87, 66, 61, 61, 95, 71, 58, 65, 56, 70, 66, 49, 85, 92, 59, 61, 64, 69, 58, 53, 53, 62, 102, 45, 59, 52, 66, 69, 67, 81, 56, 75, 54, 47, 73, 75, 62, 63, 71, 49, 93, 79, 52, 51, 74, 68, 64, 67, 50, 72, 66, 60, 64, 70, 64, 66, 72, 79, 64, 60, 90, 65, 75, 73, 83, 77, 66, 77, 77, 67, 66, 77, 65, 82, 48, 90, 47, 63, 58, 74, 63, 67, 85, 94, 63, 63, 61, 71, 71, 59, 58, 66, 65, 67, 68, 55, 70, 50, 94, 72, 72, 59, 83, 49, 52, 58, 66, 51, 44, 50, 64, 65, 64, 64, 75, 67, 65, 56, 59, 55, 68, 54, 80, 78, 57, 56, 96, 54, 63, 74, 63, 58, 71, 59, 71, 54, 55, 78, 59, 45, 65, 83, 69, 68, 36, 84, 72, 53, 52, 57, 73, 79, 59, 51, 36, 85, 61, 70, 47, 65, 72, 88, 47, 56, 62, 84, 61, 62, 67, 63, 68, 66, 62, 74, 66, 70, 49, 72, 65, 66, 75, 70, 65, 70, 91, 38, 61, 66, 45, 69, 56, 69, 40, 61, 91, 79, 83, 64, 54, 67, 77, 62, 57, 47, 58, 62, 114, 83, 60, 53, 65, 61, 70, 69, 100, 78, 59, 69, 47, 74, 60, 75, 48, 74, 65, 65, 79, 53, 60, 38, 71, 73, 55, 69, 62, 53, 101, 68, 47, 66, 61, 91, 59, 70, 66, 85, 53, 60, 57, 59, 69, 79, 74, 77, 56, 86, 83, 69, 52, 59, 59, 65, 57, 72, 66, 100, 61, 75, 79, 63, 49, 60, 63, 63, 60, 47, 35, 71, 64, 58, 69, 103, 76, 111, 64, 69, 82, 85, 60, 66, 56, 48, 68, 56, 72, 69, 68, 52, 56, 62, 60, 78, 91, 67, 48, 79, 69, 52, 64, 63, 60, 63, 48, 84, 68, 74, 69, 76, 57, 81, 57, 71, 81, 47, 50, 66, 64, 90, 62, 75, 77, 61, 81, 65, 67, 89, 68, 77, 54, 53, 71, 63, 68, 56, 73, 57, 69, 59, 65, 71, 71, 64, 56, 63, 65, 74, 66, 65, 77, 87, 67, 67, 69, 57, 77, 59, 82, 61, 98, 59, 63, 90, 54, 84, 66, 48, 56, 69, 60, 52, 59, 104, 96, 66, 85, 65, 64, 61, 55, 73, 68, 53, 89, 61, 94, 61, 58, 60, 47, 49, 85, 73, 63, 87, 71, 68, 73, 87, 56, 63, 49, 71, 59, 59, 71, 68, 63, 90, 79, 56, 58, 64, 59, 120, 60, 75, 110, 68, 78, 62, 58, 60, 62, 73, 66, 48, 67, 95, 57, 68, 72, 63, 67, 91, 63, 74, 53, 78, 64, 74, 57, 40, 70, 63, 93, 68, 63, 56, 68, 72, 61, 58, 57, 59, 67, 38, 60, 57, 64, 73, 95, 56, 88, 57, 48, 67, 75, 49, 66, 87, 56, 70, 76, 116, 60, 57, 53, 58, 58, 63, 92, 56, 81, 91, 72, 65, 60, 65, 63, 72, 116, 74, 78, 56, 41, 42, 53, 47, 71, 62, 62, 75, 65, 70, 66, 64, 104, 57, 50, 67, 80, 80, 128, 56, 53, 54, 54, 67, 55, 73, 62, 68, 50, 58, 56, 66, 81, 55, 55, 83, 66, 50, 65, 74, 52, 61, 65, 60, 54, 68, 100, 51, 46, 78, 106, 74, 66, 69, 59, 68, 82, 65, 45, 58, 50, 62, 56, 80, 45, 81, 59, 69, 102, 66, 72, 70, 62, 50, 69, 70, 47, 56, 60, 64, 60, 49, 57, 65, 57, 55, 62, 79, 65, 53, 62, 71, 48, 82, 76, 72, 54, 50, 57, 60, 53, 54, 56, 65, 68, 51, 68, 61, 75, 64, 61, 70, 61, 62, 81, 66, 61, 58, 80, 83, 64, 58, 71, 51, 54, 48, 55, 48, 48, 61, 67, 36, 56, 76, 55, 47, 58, 85, 84, 61, 68, 51, 66, 71, 60, 62, 64, 66, 63, 67, 54, 58, 39, 82, 67, 59, 52, 80, 61, 56, 63, 67, 62, 77, 74, 62, 69, 61, 48, 51, 71, 71, 64, 89, 70, 55, 53, 61, 61, 64, 75, 57, 72, 65, 75, 58, 84, 77, 42, 68, 50, 75, 53, 53, 56, 64, 92, 58, 49, 90, 55, 51, 58, 86, 68, 42, 80, 94, 72, 68, 58, 65, 47, 57, 54, 85, 85, 54, 70, 60, 72, 62, 71, 73, 44, 148, 83, 54, 61, 54, 77, 60, 88, 68, 58, 57, 53, 68, 49, 64, 92, 77, 62, 61, 67, 53, 53, 89, 54, 71, 56, 84, 54, 65, 55, 49, 69, 59, 66, 62, 59, 73, 59, 70, 76, 60, 68, 79, 63, 67, 61, 51, 54, 60, 83, 44, 58, 101, 63, 105, 75, 63, 63, 76, 70, 50, 92, 61, 63, 65, 93, 80, 54, 62, 55, 65, 74, 73, 65, 53, 86, 83, 63, 78, 69, 61, 59, 67, 67, 75, 56, 57, 54, 61, 77, 65, 57, 50, 68, 46, 53, 58, 65, 67, 83, 56, 69, 69, 57, 53, 86, 93, 65, 54, 60, 67, 67, 64, 72, 56, 63, 88, 53, 69, 64, 77, 85, 63, 61, 64, 110, 78, 66, 87, 82, 68, 63, 65, 56, 73, 66, 88, 65, 76, 60, 61, 68, 57, 53, 90, 65, 60, 59, 59, 52, 62, 57, 51, 65, 69, 49, 57, 82, 69, 59, 59, 85, 52, 72, 70, 82, 78, 69, 53, 62, 59, 64, 62, 89, 64, 63, 91, 47, 65, 54, 75, 77, 49, 55, 69, 66, 71, 79, 57, 65, 75, 73, 58, 56, 78, 53, 55, 57, 69, 42, 51, 60, 78, 70, 60, 60, 81, 69, 54, 65, 78, 68, 69, 75, 64, 54, 60, 57, 69, 58, 86, 96, 48, 102, 54, 54, 58, 75, 72, 54, 74, 59, 78, 55, 54, 62, 51, 51, 66, 61, 67, 57, 55, 87, 53, 89, 70, 89, 61, 73, 46, 52, 55, 56, 64, 66, 79, 67, 75, 70, 58, 66, 74, 54, 100, 66, 60, 121, 53, 57, 51, 67, 60, 75, 55, 48, 59, 60, 81, 73, 59, 51, 61, 68, 69, 50, 50, 54, 75, 69, 68, 64, 58, 80, 69, 68, 63, 61, 68, 88, 63, 69, 80, 67, 57, 65, 70, 56, 64, 67, 62, 71, 73, 60, 57, 55, 60, 87, 53, 61, 72, 56, 46, 74, 70, 87, 72, 69, 62, 78, 80, 61, 54, 76, 75, 57, 110, 55, 44, 72, 66, 60, 48, 59, 59, 64, 78, 74, 64, 72, 76, 57, 81, 68, 66, 56, 57, 64, 72, 56, 66, 70, 65, 67, 64, 65, 96, 55, 80, 100, 87, 62, 52, 62, 51, 72, 65, 71, 105, 62, 76, 54, 51, 62, 46, 63, 72, 48, 65, 75, 70, 66, 63, 76, 54, 55, 61, 70, 50, 25, 71, 70, 54, 56, 58, 87, 84, 58, 56, 60, 79, 80, 62, 72, 97, 43, 56, 46, 43, 62, 62, 133, 66, 64, 52, 68, 87, 50, 48, 66, 70, 78, 52, 70, 64, 128, 52, 59, 71, 75, 78, 57, 65, 76, 60, 57, 68, 50, 54, 50, 76, 66, 54, 65, 78, 61, 118, 80, 68, 74, 80, 56, 47, 74, 60, 58, 59, 57, 60, 54, 48, 87, 57, 55, 101, 85, 60, 66, 98, 62, 66, 60, 63, 67, 58, 57, 81, 66, 56, 52, 73, 57, 84, 69, 63, 64, 77, 60, 62, 65, 61, 51, 74, 89, 79, 59, 58, 63, 67, 82, 64, 88, 56, 72, 64, 63, 58, 60, 57, 82, 65, 75, 65, 55, 51, 58, 77, 80, 57, 66, 49, 63, 68, 59, 64, 47, 84, 74, 60, 41, 64, 60, 70, 49, 53, 58, 61, 68, 54, 57, 86, 65, 68, 71, 80, 73, 67, 55, 63, 54, 70, 60, 76, 43, 63, 63, 64, 60, 68, 57, 73, 74, 72, 59, 86, 73, 93, 66, 143, 60, 59, 56, 76, 79, 69, 75, 73, 121, 55, 61, 84, 54, 59, 63, 78, 77, 70, 93, 67, 59, 80, 62, 67, 62, 54, 66, 67, 52, 80, 71, 85, 70, 72, 57, 84, 58, 67, 87, 53, 52, 63, 58, 72, 66, 57, 47, 57, 67, 70, 55, 49, 74, 64, 55, 60, 64, 43, 78, 65, 59, 76, 41, 45, 51, 94, 55, 75, 66, 56, 44, 74, 52, 56, 49, 67, 67, 74, 71, 72, 75, 72, 57, 64, 92, 62, 63, 47, 111, 46, 73, 56, 59, 72, 59, 43, 56, 57, 47, 51, 57, 59, 57, 78, 64, 73, 58, 56, 40, 62, 89, 70, 59, 63, 63, 55, 40, 50, 54, 53, 66, 57, 55, 68, 45, 58, 62, 52, 50, 69, 83, 66, 79, 76, 42, 55, 69, 57, 55, 76, 50, 69, 60, 76, 83, 92, 71, 65, 65, 70, 71, 61, 67, 55, 80, 53, 59, 60, 59, 74, 85, 91, 42, 83, 58, 61, 75, 58, 48, 78, 53, 60, 73, 47, 83, 79, 71, 55, 70, 68, 73, 69, 56, 59, 94, 82, 56, 86, 62, 63, 59, 61, 68, 78, 54, 104, 55, 51, 43, 75, 76, 59, 64, 53, 52, 59, 55, 61, 56, 76, 58, 58, 55, 64, 54, 61, 80, 54, 65, 58, 54, 71, 70, 47, 65, 60, 67, 65, 61, 51, 86, 74, 49, 67, 72, 74, 45, 71, 59, 61, 61, 58, 62, 59, 73, 70, 85, 58, 124, 68, 46, 61, 45, 88, 57, 67, 59, 67, 62, 55, 66, 74, 66, 54, 64, 80, 82, 65, 46, 75, 105, 51, 72, 59, 71, 51, 68, 58, 63, 74, 61, 60, 46, 77, 42, 55, 90, 62, 102, 77, 59, 62, 65, 59, 80, 73, 76, 69, 54, 74, 90, 68, 73, 67, 63, 63, 72, 73, 70, 74, 52, 66, 50, 59, 68, 70, 67, 63, 70, 63, 77, 55, 57, 68, 78, 67, 69, 64, 59, 62, 57, 57, 55, 65, 72, 57, 44, 60, 52, 59, 67, 62, 63, 54, 47, 60, 51, 107, 63, 58, 64, 70, 88, 86, 86, 55, 70, 64, 75, 55, 79, 69, 49, 68, 54, 47, 52, 80, 52, 64, 97, 63, 76, 67, 100, 59, 63, 72, 61, 59, 63, 51, 78, 61, 73, 61, 71, 87, 49, 61, 67, 68, 65, 60, 59, 53, 65, 67, 76, 70, 103, 59, 63, 55, 54, 77, 75, 66, 56, 62, 53, 78, 73, 48, 46, 87, 70, 56, 60, 82, 59, 71, 62, 66, 66, 75, 88, 74, 49, 61, 59, 72, 43, 62, 51, 54, 61, 68, 75, 78, 64, 48, 67, 53, 81, 125, 52, 61, 62, 86, 54, 62, 66, 70, 73, 51, 50, 86, 61, 78, 73, 64, 75, 58, 61, 62, 67, 76, 81, 41, 60, 71, 64, 79, 61, 94, 43, 47, 62, 60, 60, 109, 50, 48, 85, 68, 76, 82, 67, 60, 66, 69, 67, 85, 87, 58, 75, 58, 89, 70, 55, 62, 75, 66, 62, 69, 59, 66, 74, 64, 91, 144, 57, 39, 62, 59, 82, 52, 63, 66, 56, 82, 88, 76, 50, 71, 65, 80, 72, 58, 57, 57, 68, 60, 42, 77, 55, 58, 61, 69, 67, 58, 73, 57, 81, 56, 71, 99, 59, 62, 57, 67, 83, 87, 61, 61, 85, 44, 58, 79, 82, 52, 64, 66, 42, 51, 65, 56, 51, 60, 96, 46, 59, 89, 70, 90, 72, 57, 42, 47, 89, 104, 57, 85, 67, 80, 68, 72, 74, 70, 74, 76, 65, 73, 47, 60, 88, 55, 45, 82, 64, 71, 62, 72, 79, 47, 82, 63, 61, 63, 69, 62, 64, 90, 60, 76, 52, 57, 82, 59, 50, 78, 70, 81, 71, 48, 69, 62, 41, 57, 66, 56, 55, 76, 66, 54, 60, 80, 61, 61, 70, 76, 71, 45, 42, 48, 55, 73, 63, 64, 64, 83, 62, 50, 65, 71, 86, 57, 59, 43, 67, 74, 55, 41, 63, 85, 69, 67, 80, 46, 69, 69, 69, 64, 43, 62, 57, 51, 74, 67, 54, 57, 62, 53, 50, 53, 49, 74, 68, 80, 53, 97, 51, 70, 62, 67, 60, 51, 57, 57, 62, 78, 55, 66, 81, 53, 51, 66, 51, 96, 62, 69, 121, 78, 76, 65, 64, 69, 80, 70, 58, 65, 54, 81, 86, 59, 55, 54, 89, 67, 42, 47, 62, 64, 63, 75, 80, 79, 55, 78, 48, 60, 71, 63, 82, 58, 58, 53, 58, 63, 67, 66, 71, 71, 62, 64, 61, 67, 65, 70, 63, 58, 93, 66, 70, 89, 57, 48, 56, 58, 85, 47, 66, 64, 76, 97, 74, 51, 65, 75, 49, 95, 57, 81, 38, 52, 63, 62, 68, 73, 57, 63, 61, 53, 56, 69, 65, 64, 67, 54, 68, 59, 66, 59, 83, 68, 90, 55, 64, 54, 73, 102, 70, 54, 116, 96, 77, 69, 41, 66, 46, 68, 65, 76, 53, 78, 71, 59, 63, 56, 58, 78, 78, 61, 69, 58, 74, 53, 82, 59, 99, 61, 105, 63, 57, 59, 57, 69, 59, 61, 62, 67, 56, 62, 47, 51, 78, 63, 64, 64, 74, 61, 57, 72, 50, 71, 132, 62, 81, 66, 70, 69, 64, 63, 60, 84, 64, 53, 56, 62, 119, 68, 68, 67, 64, 111, 52, 68, 71, 61, 60, 56, 50, 49, 81, 67, 78, 56, 39, 54, 59, 145, 64, 63, 97, 88, 58, 80, 71, 56, 68, 56, 57, 72, 57, 52, 97, 70, 74, 110, 70, 52, 55, 90, 63, 73, 60, 72, 43, 66, 63, 57, 57, 60, 75, 52, 49, 79, 69, 81, 66, 65, 89, 64, 94, 54, 79, 67, 86, 65, 60, 62, 75, 65, 84, 54, 87, 58, 61, 58, 65, 52, 53, 43, 61, 55, 98, 70, 64, 54, 66, 62, 78, 71, 77, 60, 62, 65, 67, 69, 65, 69, 58, 69, 55, 49, 48, 46, 79, 61, 58, 69, 52, 60, 62, 90, 69, 86, 63, 61, 66, 66, 69, 64, 61, 81, 66, 60, 67, 88, 73, 61, 66, 68, 58, 58, 65, 68, 61, 76, 68, 99, 52, 129, 66, 65, 63, 86, 70, 80, 62, 64, 106, 53, 56, 49, 62, 53, 83, 65, 70, 79, 92, 65, 55, 72, 71, 59, 61, 44, 50, 60, 60, 92, 63, 65, 79, 62, 69, 47, 54, 73, 70, 70, 59, 82, 55, 66, 51, 53, 51, 71, 91, 69, 73, 76, 52, 76, 46, 68, 78, 51, 83, 55, 68, 68, 46, 67, 60, 79, 61, 59, 57, 64, 70, 97, 62, 67, 56, 56, 63, 61, 57, 42, 52, 54, 43, 61, 102, 77, 73, 62, 130, 68, 102, 62, 75, 62, 72, 80, 68, 54, 58, 56, 70, 57, 67, 71, 57, 68, 67, 52, 81, 63, 75, 60, 65, 65, 58, 63, 57, 67, 63, 79, 73, 54, 54, 62, 60, 63, 63, 67, 81, 65, 56, 88, 56, 77, 66, 58, 65, 69, 58, 56, 63, 68, 88, 62, 68, 78, 78, 44, 71, 63, 65, 68, 63, 48, 71, 55, 65, 62, 75, 72, 84, 68, 54, 54, 83, 62, 49, 84, 66, 77, 81, 60, 53, 63, 90, 86, 78, 54, 69, 70, 47, 66, 61, 57, 110, 57, 74, 77, 71, 63, 59, 55, 68, 76, 63, 81, 58, 65, 72, 70, 63, 61, 75, 51, 56, 81, 77, 67, 43, 81, 74, 50, 75, 61, 49, 71, 68, 52, 81, 59, 91, 76, 55, 56, 58, 72, 73, 80, 65, 75, 82, 66, 65, 57, 44, 70, 42, 62, 47, 70, 49, 67, 54, 72, 72, 69, 83, 65, 95, 63, 71, 60, 46, 72, 79, 67, 60, 62, 57, 47, 65, 63, 65, 56, 56, 102, 84, 64, 54, 82, 85, 39, 76, 69, 71, 69, 61, 66, 63, 80, 50, 71, 58, 87, 64, 77, 46, 56, 81, 59, 70, 62, 49, 50, 52, 69, 61, 64, 66, 78, 81, 41, 72, 67, 61, 80, 56, 62, 63, 69, 52, 80, 72, 50, 68, 91, 46, 61, 50, 55, 68, 74, 53, 65, 52, 69, 68, 64, 73, 70, 58, 76, 55, 57, 63, 60, 64, 67, 78, 52, 55, 64, 60, 67, 73, 85, 77, 93, 81, 122, 76, 63, 73, 53, 86, 49, 77, 57, 81, 62, 84, 58, 50, 57, 59, 52, 59, 63, 51, 57, 92, 60, 68, 67, 80, 62, 49, 83, 68, 82, 76, 60, 58, 65, 83, 59, 46, 76, 65, 75, 57, 74, 53, 67, 65, 63, 56, 66, 60, 63, 106, 60, 87, 65, 66, 67, 72, 55, 76, 70, 76, 75, 64, 63, 61, 78, 77, 53, 69, 63, 68, 49, 70, 68, 61, 77, 95, 54, 55, 52, 65, 58, 50, 64, 105, 86, 62, 51, 55, 60, 80, 31, 60, 77, 69, 131, 57, 70, 62, 70, 64, 52, 68, 61, 66, 56, 59, 80, 59, 60, 89, 90, 50, 55, 60, 67, 67, 84, 52, 70, 71, 93, 62, 64, 74, 72, 62, 70, 64, 70, 55, 90, 68, 81, 68, 52, 65, 60, 75, 66, 57, 55, 60, 59, 82, 58, 97, 61, 67, 59, 60, 74, 58, 58, 53, 76, 62, 61, 66, 66, 82, 146, 53, 74, 75, 65, 60, 39, 57, 56, 63, 64, 64, 49, 72, 57, 49, 72, 61, 45, 59, 57, 69, 58, 48, 52, 56, 79, 73, 73, 79, 65, 69, 65, 62, 66, 56, 98, 53, 52, 64, 52, 46, 80, 66, 53, 90, 45, 74, 44, 60, 75, 70, 71, 63, 96, 84, 62, 49, 63, 61, 64, 61, 74, 71, 80, 69, 46, 57, 45, 92, 85, 45, 58, 76, 76, 75, 51, 49, 62, 56, 56, 71, 61, 47, 57, 57, 120, 55, 60, 65, 64, 54, 63, 78, 59, 54, 82, 70, 63, 54, 54, 63, 70, 55, 74, 65, 52, 94, 67, 61, 54, 79, 51, 67, 67, 44, 92, 49, 66, 60, 66, 80, 63, 75, 65, 70, 54, 62, 68, 59, 65, 71, 51, 136, 52, 69, 63, 45, 77, 74, 49, 88, 83, 67, 67, 58, 55, 70, 55, 73, 78, 48, 56, 83, 62, 83, 55, 84, 50, 88, 90, 75, 76, 69, 53, 63, 71, 63, 71, 66, 86, 69, 82, 56, 54, 80, 60, 74, 73, 79, 75, 80, 46, 69, 70, 53, 57, 57, 60, 77, 68, 47, 40, 78, 54, 54, 54, 58, 48, 41, 65, 82, 61, 60, 50, 65, 59, 128, 73, 81, 59, 58, 75, 69, 73, 62, 58, 87, 64, 59, 53, 148, 66, 55, 68, 57, 61, 58, 66, 77, 53, 73, 59, 85, 69, 55, 37, 66, 70, 63, 80, 49, 85, 67, 56, 51, 78, 56, 70, 79, 58, 121, 70, 73, 80, 58, 47, 67, 76, 84, 81, 70, 49, 56, 111, 68, 53, 68, 69, 81, 62, 66, 65, 75, 64, 66, 52, 78, 61, 47, 64, 66, 65, 68, 56, 67, 52, 81, 62, 33, 57, 56, 69, 42, 88, 53, 56, 96, 57, 63, 92, 52, 78, 66, 98, 102, 94, 61, 73, 54, 64, 78, 44, 70, 68, 66, 47, 61, 67, 62, 59, 66, 65, 51, 83, 57, 52, 63, 52, 44, 46, 78, 67, 80, 94, 66, 53, 72, 58, 64, 65, 39, 55, 58, 59, 46, 58, 61, 60, 57, 58, 57, 48, 54, 76, 60, 73, 55, 77, 68, 66, 71, 73, 86, 61, 64, 57, 49, 57, 91, 58, 57, 50, 69, 57, 66, 72, 64, 60, 55, 78, 46, 58, 65, 65, 64, 60, 73, 64, 52, 107, 61, 57, 77, 117, 55, 96, 54, 54, 65, 72, 51, 67, 54, 75, 89, 85, 60, 63, 94, 62, 49, 78, 46, 54, 89, 67, 60, 59, 64, 88, 61, 72, 59, 44, 59, 64, 49, 59, 60, 46, 59, 57, 78, 62, 100, 66, 67, 50, 65, 61, 64, 53, 89, 57, 49, 55, 60, 61, 80, 74, 79, 54, 57, 50, 78, 70, 47, 52, 61, 49, 85, 56, 60, 39, 51, 83, 54, 101, 60, 62, 67, 74, 58, 64, 53, 61, 100, 70, 73, 62, 62, 61, 65, 57, 66, 50, 71, 53, 72, 70, 98, 71, 63, 67, 65, 59, 56, 67, 64, 100, 48, 61, 60, 69, 94, 50, 50, 60, 76, 68, 62, 61, 74, 65, 76, 102, 72, 68, 59, 59, 69, 60, 74, 66, 55, 62, 47, 52, 74, 64, 57, 68, 78, 56, 80, 59, 58, 48, 69, 62, 67, 62, 60, 79, 55, 81, 60, 55, 71, 81, 77, 57, 42, 72, 88, 57, 50, 62, 53, 62, 109, 64, 35, 76, 74, 48, 73, 67, 51, 55, 84, 52, 62, 74, 123, 58, 71, 131, 81, 42, 55, 74, 64, 86, 49, 78, 72, 43, 77, 61, 61, 107, 50, 71, 69, 93, 140, 102, 53, 88, 81, 74, 53, 60, 58, 61, 51, 70, 67, 69, 112, 65, 63, 70, 69, 67, 51, 40, 60, 71, 83, 71, 94, 69, 62, 67, 65, 72, 88, 65, 71, 84, 61, 61, 62, 52, 59, 90, 46, 62, 70, 56, 73, 72, 64, 68, 58, 53, 57, 54, 75, 45, 68, 56, 79, 52, 65, 66, 52, 83, 74, 65, 62, 51, 58, 48, 63, 73, 52, 53, 51, 54, 53, 100, 37, 90, 66, 55, 53, 71, 55, 61, 63, 63, 82, 55, 47, 81, 94, 64, 71, 54, 53, 71, 51, 62, 97, 55, 47, 80, 83, 61, 87, 57, 76, 92, 50, 61, 65, 82, 71, 72, 64, 65, 57, 81, 71, 75, 61, 71, 78, 72, 59, 58, 60, 60, 72, 47, 84, 65, 62, 68, 90, 74, 60, 51, 83, 69, 55, 95, 87, 52, 71, 59, 57, 59, 81, 67, 81, 53, 51, 74, 52, 76, 64, 64, 56, 66, 82, 52, 88, 68, 68, 64, 73, 74, 86, 49, 72, 68, 83, 69, 61, 59, 54, 60, 59, 66, 72, 50, 37, 111, 94, 52, 61, 70, 52, 52, 94, 40, 64, 54, 88, 56, 90, 80, 50, 69, 108, 47, 65, 91, 63, 55, 91, 57, 65, 49, 92, 58, 69, 60, 66, 88, 59, 70, 51, 52, 61, 86, 54, 59, 50, 65, 71, 84, 49, 69, 63, 52, 58, 52, 72, 64, 69, 64, 53, 71, 75, 64, 95, 66, 63, 65, 72, 50, 52, 59, 60, 48, 48, 64, 63, 53, 56, 58, 48, 67, 55, 76, 61, 56, 60, 60, 45, 91, 83, 51, 59, 56, 65, 62, 49, 53, 57, 56, 55, 65, 47, 81, 55, 58, 85, 60, 80, 62, 64, 70, 71, 71, 63, 59, 49, 64, 69, 58, 71, 51, 59, 69, 58, 62, 70, 71, 63, 61, 59, 42, 56, 58, 50, 63, 148, 77, 73, 64, 50, 63, 58, 68, 56, 47, 87, 79, 72, 59, 65, 75, 52, 64, 70, 50, 67, 55, 54, 60, 50, 70, 62, 93, 65, 58, 52, 81, 50, 87, 48, 63, 70, 66, 61, 59, 54, 66, 63, 62, 49, 135, 77, 51, 53, 67, 79, 74, 68, 75, 68, 62, 58, 66, 62, 67, 111, 57, 57, 64, 67, 72, 105, 73, 75, 66, 58, 79, 67, 79, 71, 72, 49, 64, 59, 64, 76, 63, 82, 92, 87, 53, 65, 65, 55, 75, 66, 61, 64, 65, 62, 71, 52, 77, 70, 94, 36, 75, 67, 45, 69, 68, 57, 78, 91, 39, 61, 56, 55, 62, 59, 62, 69, 66, 55, 72, 62, 56, 76, 70, 84, 62, 70, 56, 69, 57, 64, 60, 61, 73, 59, 51, 54, 71, 46, 59, 62, 73, 65, 59, 92, 67, 46, 53, 64, 66, 70, 78, 47, 82, 54, 64, 59, 53, 55, 49, 43, 60, 53, 61, 75, 54, 59, 48, 61, 76, 48, 57, 79, 79, 62, 63, 79, 122, 46, 78, 53, 56, 64, 57, 61, 67, 93, 51, 69, 110, 76, 63, 62, 61, 76, 66, 58, 64, 69, 61, 67, 60, 60, 59, 64, 65, 64, 71, 67, 54, 82, 82, 51, 59, 71, 89, 57, 58, 65, 66, 69, 63, 71, 79, 60, 67, 101, 64, 53, 55, 54, 86, 72, 62, 67, 66, 62, 56, 76, 53, 72, 74, 53, 63, 73, 63, 61, 61, 68, 66, 57, 98, 48, 83, 57, 70, 75, 60, 62, 60, 53, 60, 58, 63, 70, 72, 75, 62, 67, 70, 58, 66, 69, 77, 60, 70, 54, 65, 58, 74, 65, 58, 50, 61, 151, 64, 55, 68, 82, 69, 59, 61, 74, 99, 64, 58, 85, 64, 78, 52, 66, 85, 43, 81, 82, 54, 71, 86, 69, 76, 60, 55, 132, 65, 85, 65, 68, 53, 55, 55, 74, 66, 38, 75, 80, 55, 61, 70, 52, 45, 70, 51, 54, 61, 79, 77, 54, 66, 59, 67, 52, 63, 56, 71, 72, 49, 60, 61, 66, 80, 69, 56, 102, 77, 56, 67, 59, 63, 67, 54, 42, 49, 119, 57, 66, 56, 67, 81, 72, 67, 45, 60, 72, 60, 79, 35, 62, 56, 65, 70, 73, 105, 45, 67, 52, 63, 56, 64, 79, 61, 47, 99, 59, 53, 52, 74, 66, 71, 71, 65, 84, 66, 69, 60, 76, 58, 60, 75, 80, 71, 68, 60, 62, 68, 73, 99, 42, 45, 68, 54, 66, 55, 85, 70, 61, 59, 50, 49, 66, 57, 73, 63, 61, 47, 78, 58, 71, 45, 69, 86, 72, 64, 66, 62, 45, 74, 68, 59, 84, 66, 80, 88, 39, 53, 55, 67, 82, 57, 68, 70, 58, 76, 62, 51, 93, 56, 87, 68, 67, 64, 50, 67, 73, 60, 63, 66, 66, 68, 82, 68, 95, 69, 56, 72, 48, 69, 60, 61, 56, 59, 63, 53, 52, 55, 51, 65, 53, 53, 81, 47, 62, 101, 63, 77, 74, 64, 56, 52, 69, 52, 55, 53, 65, 70, 79, 48, 64, 64, 76, 65, 78, 60, 53, 55, 70, 74, 65, 58, 62, 64, 66, 110, 43, 73, 80, 64, 57, 72, 64, 70, 63, 81, 68, 71, 69, 61, 58, 57, 67, 46, 53, 61, 81, 117, 86, 61, 73, 65, 74, 74, 55, 61, 58, 56, 52, 59, 41, 50, 58, 74, 73, 89, 80, 64, 52, 75, 58, 83, 58, 61, 80, 67, 57, 55, 64, 54, 58, 69, 65, 61, 69, 59, 73, 56, 66, 85, 55, 54, 73, 45, 59, 70, 64, 68, 74, 66, 54, 66, 70, 61, 70, 63, 49, 66, 77, 59, 62, 162, 59, 55, 63, 69, 39, 72, 65, 71, 68, 63, 60, 47, 51, 74, 57, 50, 64, 69, 66, 53, 65, 90, 61, 79, 95, 55, 64, 55, 76, 66, 69, 64, 66, 116, 55, 69, 77, 72, 60, 74, 46, 60, 58, 61, 85, 68, 60, 67, 46, 50, 49, 77, 50, 60, 64, 97, 62, 67, 66, 85, 65, 74, 63, 73, 61, 69, 56, 68, 70, 76, 62, 90, 50, 85, 62, 67, 69, 103, 64, 46, 69, 97, 44, 64, 47, 69, 54, 53, 64, 56, 50, 68, 51, 68, 70, 68, 69, 60, 74, 65, 47, 53, 56, 57, 93, 55, 70, 65, 57, 57, 68, 55, 61, 56, 58, 88, 76, 59, 47, 71, 40, 59, 66, 72, 51, 68, 69, 60, 55, 77, 67, 52, 53, 67, 49, 69, 59, 68, 75, 60, 63, 70, 66, 48, 50, 116, 69, 70, 56, 76, 61, 63, 54, 47, 69, 72, 82, 67, 63, 52, 60, 55, 66, 70, 83, 71, 59, 69, 28, 59, 52, 61, 60, 81, 55, 101, 76, 55, 56, 53, 78, 60, 65, 62, 55, 68, 53, 62, 57, 62, 89, 62, 62, 59, 63, 67, 61, 57, 60, 75, 72, 81, 62, 61, 36, 79, 52, 84, 54, 68, 58, 77, 47, 55, 77, 68, 56, 57, 68, 61, 48, 74, 83, 66, 47, 60, 71, 60, 63, 64, 58, 74, 54, 75, 64, 60, 67, 68, 69, 56, 52, 66, 66, 67, 115, 70, 51, 67, 66, 63, 77, 72, 75, 57, 82, 44, 57, 73, 54, 79, 56, 72, 74, 62, 62, 50, 65, 58, 63, 66, 61, 65, 66, 62, 43, 64, 151, 67, 57, 80, 59, 77, 76, 59, 74, 72, 63, 61, 80, 65, 57, 59, 60, 49, 94, 63, 64, 125, 68, 76, 57, 57, 86, 53, 71, 59, 58, 74, 66, 58, 72, 78, 66, 69, 54, 67, 54, 66, 71, 73, 71, 87, 75, 62, 71, 64, 64, 38, 64, 57, 71, 68, 62, 70, 71, 57, 57, 56, 59, 59, 58, 102, 65, 98, 73, 86, 75, 47, 67, 59, 54, 76, 50, 64, 47, 54, 55, 69, 65, 56, 60, 66, 66, 50, 88, 65, 73, 45, 65, 77, 58, 56, 70, 66, 58, 75, 53, 56, 72, 51, 79, 90, 68, 57, 78, 50, 56, 56, 76, 62, 78, 55, 67, 59, 97, 68, 68, 67, 72, 71, 80, 50, 75, 61, 63, 61, 70, 60, 80, 52, 68, 81, 87, 78, 61, 69, 78, 104, 67, 71, 56, 79, 81, 59, 72, 76, 56, 72, 70, 59, 58, 56, 52, 68, 53, 75, 78, 59, 63, 58, 65, 53, 76, 60, 64, 67, 59, 76, 66, 58, 69, 138, 69, 53, 40, 61, 72, 62, 73, 71, 54, 67, 66, 70, 65, 65, 79, 63, 72, 70, 68, 82, 42, 54, 58, 84, 57, 58, 77, 53, 55, 67, 83, 53, 71, 55, 67, 63, 56, 67, 74, 55, 61, 139, 54, 55, 59, 66, 62, 56, 63, 51, 69, 61, 84, 107, 53, 87, 69, 64, 68, 80, 81, 73, 56, 76, 60, 67, 58, 56, 63, 66, 59, 55, 57, 66, 179, 60, 41, 83, 57, 65, 100, 57, 57, 54, 62, 61, 55, 76, 74, 60, 63, 54, 61, 63, 64, 58, 64, 71, 69, 49, 99, 66, 66, 69, 69, 62, 72, 75, 58, 62, 67, 58, 89, 70, 78, 79, 73, 94, 67, 69, 58, 54, 49, 57, 95, 79, 58, 75, 71, 53, 68, 68, 86, 65, 39, 64, 60, 66, 52, 65, 68, 62, 75, 53, 82, 59, 64, 50, 60, 33, 56, 52, 52, 64, 59, 55, 48, 56, 55, 60, 80, 63, 86, 69, 60, 65, 63, 86, 66, 74, 62, 65, 58, 54, 63, 57, 58, 73, 66, 74, 61, 71, 43, 46, 46, 67, 69, 56, 86, 87, 42, 58, 73, 79, 90, 65, 55, 57, 56, 72, 66, 64, 61, 56, 60, 79, 57, 75, 58, 70, 70, 75, 49, 36, 51, 75, 62, 62, 109, 55, 72, 78, 63, 62, 75, 60, 60, 54, 55, 107, 64, 45, 65, 74, 57, 55, 71, 81, 87, 67, 64, 66, 113, 50, 70, 51, 60, 57, 68, 51, 76, 71, 57, 53, 89, 83, 63, 65, 71, 86, 76, 42, 65, 109, 54, 51, 77, 46, 68, 88, 65, 53, 74, 58, 42, 51, 63, 48, 67, 57, 55, 54, 30, 68, 51, 53, 47, 63, 83, 96, 53, 58, 58, 74, 65, 77, 60, 75, 118, 89, 58, 65, 77, 82, 68, 49, 44, 75, 68, 81, 52, 70, 66, 61, 63, 61, 116, 58, 42, 60, 58, 89, 69, 109, 93, 75, 57, 58, 67, 74, 56, 62, 87, 85, 52, 132, 66, 92, 55, 56, 55, 62, 56, 61, 70, 68, 60, 52, 76, 48, 48, 93, 66, 44, 59, 68, 50, 51, 97, 83, 79, 72, 58, 61, 54, 62, 61, 69, 128, 98, 64, 62, 55, 40, 85, 58, 91, 76, 56, 48, 76, 50, 64, 68, 52, 97, 69, 45, 53, 61, 55, 57, 76, 64, 57, 71, 61, 76, 52, 84, 50, 51, 74, 53, 68, 84, 63, 59, 52, 66, 75, 61, 71, 51, 61, 85, 64, 53, 69, 75, 113, 61, 43, 61, 54, 70, 66, 98, 72, 74, 56, 64, 91, 43, 122, 56, 71, 64, 68, 65, 50, 113, 51, 63, 60, 69, 28, 60, 48, 52, 66, 72, 65, 62, 53, 103, 105, 58, 56, 85, 70, 74, 61, 57, 60, 54, 70, 66, 58, 62, 63, 68, 64, 67, 62, 63, 46, 74, 65, 69, 92, 53, 44, 97, 71, 94, 64, 104, 60, 66, 86, 79, 53, 89, 45, 95, 59, 68, 61, 60, 66, 73, 45, 42, 54, 60, 68, 61, 92, 43, 68, 63, 66, 60, 69, 52, 81, 47, 64, 56, 66, 71, 67, 61, 55, 57, 106, 79, 64, 98, 56, 58, 54, 60, 57, 70, 81, 54, 45, 68, 52, 70, 65, 54, 55, 55, 49, 71, 63, 74, 79, 55, 60, 71, 42, 54, 84, 65, 76, 60, 64, 58, 59, 54, 70, 72, 46, 54, 85, 56, 62, 58, 61, 80, 58, 70, 79, 75, 90, 63, 46, 78, 83, 66, 62, 82, 68, 49, 61, 90, 54, 81, 56, 68, 74, 60, 88, 71, 63, 85, 63, 70, 84, 54, 49, 73, 60, 74, 57, 55, 53, 86, 59, 69, 70, 62, 97, 61, 62, 63, 67, 71, 70, 47, 65, 64, 75, 52, 51, 61, 83, 72, 63, 54, 54, 45, 56, 67, 57, 65, 65, 48, 78, 89, 67, 53, 62, 61, 55, 53, 56, 97, 57, 78, 67, 55, 53, 71, 54, 74, 56, 68, 102, 57, 58, 51, 75, 65, 83, 45, 66, 61, 64, 50, 53, 77, 83, 61, 89, 46, 81, 76, 64, 74, 67, 62, 53, 59, 85, 62, 48, 47, 61, 82, 67, 68, 60, 73, 56, 56, 62, 87, 51, 67, 61, 48, 70, 58, 52, 36, 47, 43, 63, 60, 56, 57, 46, 66, 43, 86, 92, 78, 71, 78, 89, 88, 67, 64, 49, 64, 60, 48, 63, 60, 53, 76, 82, 46, 47, 71, 42, 52, 59, 61, 67, 72, 53, 82, 60, 59, 71, 55, 79, 49, 69, 56, 100, 76, 51, 59, 69, 63, 64, 64, 58, 84, 63, 81, 65, 49, 90, 71, 73, 104, 54, 69, 72, 58, 79, 56, 64, 57, 76, 43, 49, 72, 77, 87, 59, 67, 61, 77, 62, 45, 58, 69, 38, 65, 43, 68, 58, 87, 43, 75, 54, 66, 75, 46, 52, 75, 102, 61, 73, 75, 76, 66, 92, 68, 55, 77, 79, 67, 71, 62, 66, 68, 68, 115, 70, 55, 61, 77, 54, 58, 67, 52, 69, 81, 79, 70, 53, 59, 77, 68, 88, 58, 56, 85, 98, 76, 59, 72, 49, 58, 57, 54, 63, 66, 62, 58, 50, 55, 66, 83, 67, 54, 89, 62, 63, 105, 46, 63, 32, 71, 89, 58, 60, 90, 70, 88, 91, 53, 51, 64, 65, 48, 75, 72, 54, 49, 76, 64, 86, 66, 63, 69, 69, 61, 72, 61, 66, 62, 68, 82, 71, 35, 95, 52, 53, 104, 71, 53, 55, 45, 45, 87, 66, 91, 72, 50, 51, 51, 60, 64, 64, 76, 67, 57, 58, 72, 66, 87, 69, 77, 59, 71, 82, 49, 87, 73, 60, 72, 86, 54, 67, 83, 69, 55, 40, 59, 75, 70, 76, 61, 77, 63, 57, 49, 69, 68, 65, 71, 64, 59, 65, 85, 52, 59, 79, 81, 67, 57, 67, 83, 61, 68, 71, 63, 113, 49, 86, 58, 60, 89, 178, 51, 39, 51, 83, 79, 57, 48, 61, 74, 49, 57, 46, 73, 37, 73, 47, 55, 47, 39, 74, 83, 64, 54, 78, 59, 67, 62, 52, 73, 63, 67, 76, 66, 99, 71, 62, 69, 75, 122, 45, 53, 58, 55, 65, 53, 47, 76, 108, 68, 56, 79, 54, 90, 129, 71, 56, 61, 63, 96, 62, 60, 64, 55, 61, 53, 65, 61, 50, 68, 80, 50, 75, 86, 70, 102, 50, 70, 58, 72, 82, 83, 61, 58, 48, 77, 58, 53, 59, 76, 62, 47, 63, 97, 62, 83, 59, 74, 77, 87, 67, 67, 61, 65, 84, 90, 63, 57, 69, 57, 71, 47, 61, 67, 53, 65, 84, 113, 64, 61, 51, 42, 68, 69, 68, 61, 46, 48, 49, 58, 95, 54, 119, 142, 88, 71, 76, 54, 72, 124, 86, 75, 55, 71, 61, 76, 57, 62, 66, 65, 75, 47, 46, 71, 62, 35, 92, 81, 102, 54, 108, 59, 67, 59, 81, 71, 56, 64, 59, 57, 73, 59, 66, 62, 49, 63, 57, 61, 52, 61, 58, 54, 70, 56, 67, 57, 75, 89, 60, 54, 59, 65, 37, 51, 71, 69, 78, 59, 94, 57, 73, 56, 84, 57, 108, 65, 75, 56, 66, 79, 58, 53, 68, 48, 71, 72, 98, 64, 64, 60, 69, 76, 66, 51, 64, 79, 72, 60, 87, 77, 51, 68, 62, 56, 61, 84, 59, 96, 52, 63, 72, 74, 69, 57, 52, 75, 74, 89, 54, 72, 57, 62, 65, 65, 69, 86, 46, 67, 63, 54, 62, 54, 69, 57, 52, 83, 52, 59, 59, 94, 58, 78, 61, 63, 69, 73, 87, 51, 92, 57, 80, 69, 89, 68, 60, 53, 80, 45, 56, 65, 51, 60, 68, 61, 53, 95, 54, 71, 63, 63, 73, 81, 60, 56, 65, 72, 150, 59, 59, 83, 67, 80, 70, 64, 64, 59, 59, 63, 57, 73, 44, 70, 73, 52, 61, 47, 66, 56, 71, 75, 54, 72, 79, 64, 63, 73, 67, 74, 73, 57, 59, 58, 62, 91, 69, 65, 66, 84, 67, 63, 73, 57, 87, 64, 66, 56, 95, 77, 58, 102, 55, 46, 73, 50, 123, 66, 59, 55, 63, 93, 67, 54, 72, 55, 52, 68, 52, 63, 66, 56, 51, 55, 77, 55, 55, 81, 51, 75, 54, 67, 60, 92, 66, 66, 62, 55, 53, 63, 63, 86, 68, 64, 49, 56, 71, 58, 61, 57, 51, 72, 62, 75, 63, 61, 49, 68, 63, 59, 75, 47, 60, 71, 66, 58, 54, 44, 63, 56, 50, 53, 86, 49, 61, 54, 70, 70, 51, 56, 53, 47, 89, 67, 64, 56, 60, 71, 54, 53, 56, 61, 59, 61, 80, 54, 64, 44, 76, 65, 46, 55, 74, 63, 82, 58, 56, 62, 81, 51, 61, 56, 67, 57, 78, 69, 94, 64, 72, 68, 79, 69, 52, 43, 75, 51, 67, 77, 56, 57, 63, 63, 56, 72, 60, 59, 67, 131, 69, 81, 118, 104, 62, 115, 85, 51, 64, 85, 64, 63, 80, 70, 98, 53, 68, 74, 67, 62, 59, 91, 69, 65, 70, 78, 51, 56, 88, 60, 58, 64, 41, 65, 62, 104, 55, 77, 64, 46, 91, 84, 79, 58, 139, 55, 61, 60, 70, 104, 81, 60, 95, 60, 72, 59, 57, 64, 66, 88, 56, 53, 67, 57, 121, 64, 66, 59, 69, 64, 64, 68, 61, 72, 84, 72, 51, 56, 60, 76, 60, 68, 57, 59, 68, 59, 63, 43, 61, 72, 51, 67, 66, 77, 56, 120, 73, 40, 42, 78, 55, 71, 58, 60, 53, 64, 49, 65, 73, 56, 56, 50, 67, 63, 76, 53, 65, 62, 70, 76, 66, 61, 53, 80, 77, 81, 69, 54, 58, 47, 71, 46, 55, 70, 68, 57, 58, 57, 55, 65, 92, 63, 66, 60, 64, 50, 71, 60, 70, 70, 69, 48, 62, 53, 66, 54, 76, 72, 70, 87, 97, 105, 65, 58, 58, 65, 53, 59, 61, 57, 77, 56, 65, 60, 64, 65, 55, 73, 56, 55, 54, 56, 85, 73, 69, 63, 75, 90, 69, 74, 49, 48, 50, 53, 73, 60, 91, 73, 55, 61, 57, 62, 74, 69, 76, 53, 81, 66, 56, 63, 92, 66, 48, 55, 58, 63, 76, 55, 53, 53, 70, 53, 58, 48, 82, 64, 67, 59, 64, 72, 70, 78, 57, 51, 57, 80, 52, 93, 47, 88, 64, 85, 83, 64, 65, 78, 61, 69, 62, 52, 58, 69, 73, 59, 60, 70, 72, 50, 82, 57, 56, 64, 63, 64, 69, 61, 63, 53, 85, 77, 50, 75, 74, 66, 56, 57, 79, 64, 69, 79, 59, 50, 91, 87, 56, 59, 61, 87, 57, 73, 67, 84, 69, 66, 68, 55, 47, 64, 63, 57, 63, 56, 69, 88, 60, 64, 80, 67, 56, 78, 132, 65, 51, 53, 78, 52, 66, 67, 58, 56, 90, 61, 64, 62, 51, 63, 54, 62, 70, 50, 94, 52, 71, 59, 45, 74, 56, 58, 74, 72, 92, 65, 70, 68, 60, 70, 59, 53, 58, 75, 55, 84, 59, 57, 62, 58, 57, 66, 39, 57, 74, 75, 72, 63, 69, 79, 69, 55, 88, 63, 114, 63, 66, 61, 71, 59, 66, 85, 62, 52, 58, 56, 52, 46, 58, 59, 64, 60, 60, 79, 66, 69, 62, 51, 43, 66, 53, 59, 67, 44, 62, 56, 67, 75, 66, 56, 79, 52, 75, 71, 56, 47, 80, 122, 65, 63, 71, 65, 81, 50, 70, 71, 70, 76, 105, 70, 62, 60, 54, 65, 72, 54, 60, 64, 59, 56, 73, 39, 66, 61, 59, 59, 80, 60, 71, 57, 57, 60, 69, 64, 68, 66, 57, 71, 62, 60, 73, 85, 54, 67, 101, 76, 60, 74, 54, 58, 55, 58, 71, 68, 48, 110, 67, 50, 56, 79, 65, 84, 52, 57, 62, 66, 66, 62, 60, 82, 59, 69, 66, 53, 59, 58, 60, 74, 82, 64, 59, 64, 60, 87, 49, 55, 54, 48, 81, 64, 53, 60, 63, 76, 50, 82, 51, 76, 70, 64, 57, 62, 74, 72, 71, 72, 108, 60, 78, 71, 69, 75, 78, 67, 54, 69, 67, 74, 65, 68, 60, 79, 60, 57, 68, 57, 55, 57, 76, 44, 59, 65, 72, 66, 48, 91, 66, 64, 60, 63, 89, 55, 153, 45, 57, 57, 58, 76, 86, 70, 89, 102, 60, 61, 72, 70, 57, 54, 62, 69, 50, 68, 66, 61, 58, 56, 60, 73, 63, 58, 58, 54, 91, 64, 64, 68, 67, 82, 56, 57, 61, 73, 67, 58, 40, 60, 61, 57, 56, 69, 86, 67, 60, 130, 52, 68, 49, 61, 58, 69, 55, 63, 64, 52, 92, 122, 66, 55, 75, 67, 81, 64, 55, 50, 122, 60, 45, 71, 51, 80, 57, 62, 47, 72, 62, 59, 57, 54, 85, 64, 69, 58, 62, 72, 70, 63, 70, 60, 52, 57, 48, 55, 91, 55, 52, 58, 69, 59, 57, 58, 67, 59, 113, 58, 59, 62, 58, 63, 62, 69, 61, 64, 57, 67, 56, 43, 83, 47, 59, 69, 81, 61, 73, 63, 48, 66, 69, 62, 62, 58, 72, 68, 65, 82, 78, 74, 48, 85, 51, 49, 75, 91, 59, 55, 57, 88, 69, 69, 44, 60, 71, 61, 73, 53, 66, 54, 65, 46, 54, 58, 123, 65, 55, 58, 68, 69, 77, 66, 47, 74, 63, 62, 69, 64, 93, 67, 54, 61, 58, 52, 62, 56, 73, 89, 37, 55, 63, 75, 47, 55, 69, 69, 49, 50, 60, 70, 57, 82, 62, 60, 46, 61, 81, 93, 46, 66, 69, 57, 61, 79, 47, 95, 46, 60, 52, 98, 65, 57, 76, 74, 83, 67, 77, 67, 55, 52, 52, 73, 65, 72, 68, 89, 60, 66, 48, 61, 57, 67, 65, 59, 81, 64, 60, 64, 66, 67, 69, 71, 52, 77, 60, 77, 87, 55, 84, 58, 58, 104, 81, 50, 55, 75, 67, 57, 64, 63, 49, 68, 47, 63, 68, 82, 88, 65, 72, 70, 63, 54, 66, 57, 66, 58, 53, 65, 62, 100, 70, 59, 51, 52, 66, 64, 63, 69, 56, 60, 83, 64, 58, 62, 86, 51, 56, 52, 60, 103, 87, 79, 128, 43, 65, 62, 61, 84, 56, 50, 86, 49, 111, 63, 64, 59, 75, 68, 51, 57, 126, 71, 58, 77, 57, 136, 56, 64, 58, 67, 77, 90, 57, 62, 57, 53, 64, 48, 56, 59, 77, 66, 77, 88, 61, 63, 60, 57, 59, 68, 69, 62, 59, 61, 66, 75, 71, 65, 77, 61, 61, 83, 56, 62, 77, 87, 44, 56, 72, 80, 53, 64, 71, 84, 58, 49, 67, 76, 63, 71, 59, 49, 53, 70, 55, 64, 103, 54, 62, 55, 74, 63, 58, 64, 35, 128, 55, 55, 52, 50, 62, 93, 55, 66, 57, 68, 55, 60, 82, 66, 63, 63, 84, 76, 52, 57, 79, 57, 55, 63, 52, 79, 72, 72, 55, 59, 64, 78, 67, 68, 83, 59, 59, 69, 52, 68, 66, 71, 61, 67, 61, 72, 51, 66, 59, 58, 76, 66, 75, 97, 67, 120, 67, 64, 49, 112, 76, 72, 66, 64, 73, 58, 76, 80, 55, 54, 64, 51, 65, 66, 70, 63, 76, 73, 64, 63, 52, 56, 50, 58, 65, 60, 70, 51, 69, 111, 60, 69, 77, 57, 70, 97, 58, 80, 61, 42, 92, 56, 68, 84, 59, 68, 59, 77, 95, 49, 75, 74, 57, 67, 62, 67, 53, 61, 69, 66, 69, 61, 56, 57, 61, 56, 110, 71, 73, 43, 47, 60, 55, 85, 63, 64, 74, 52, 70, 68, 60, 70, 71, 55, 66, 74, 55, 59, 59, 84, 58, 102, 69, 45, 52, 65, 58, 63, 57, 63, 58, 51, 57, 66, 85, 58, 52, 64, 63, 48, 55, 53, 69, 60, 67, 101, 79, 41, 64, 58, 53, 75, 77, 62, 72, 51, 56, 81, 75, 62, 70, 69, 74, 63, 60, 61, 82, 77, 60, 59, 67, 75, 58, 74, 59, 108, 52, 41, 52, 52, 52, 87, 55, 58, 48, 75, 122, 64, 57, 66, 69, 59, 64, 64, 54, 56, 98, 60, 59, 88, 70, 74, 50, 54, 60, 65, 62, 85, 60, 66, 65, 60, 70, 71, 62, 63, 59, 71, 59, 42, 54, 55, 60, 63, 51, 70, 83, 74, 51, 71, 68, 80, 57, 71, 70, 75, 61, 64, 70, 57, 68, 72, 67, 58, 59, 77, 70, 53, 49, 52, 76, 60, 67, 82, 53, 62, 82, 67, 83, 61, 55, 67, 63, 58, 92, 65, 73, 61, 77, 56, 62, 64, 52, 68, 57, 63, 58, 61, 103, 67, 60, 47, 57, 63, 52, 59, 74, 83, 63, 121, 50, 58, 59, 81, 62, 63, 69, 60, 59, 53, 45, 48, 71, 80, 53, 60, 65, 71, 94, 64, 57, 60, 60, 48, 81, 54, 59, 100, 61, 59, 72, 67, 65, 60, 89, 56, 50, 64, 65, 56, 57, 64, 88, 51, 74, 82, 78, 68, 54, 52, 72, 66, 48, 83, 65, 67, 57, 37, 66, 62, 67, 51, 57, 59, 60, 70, 72, 62, 48, 56, 69, 73, 58, 33, 65, 69, 68, 60, 70, 65, 67, 65, 71, 70, 54, 88, 78, 50, 67, 62, 68, 67, 67, 61, 49, 69, 70, 66, 76, 51, 56, 67, 67, 56, 69, 62, 78, 49, 73, 71, 66, 61, 56, 66, 69, 69, 67, 107, 50, 65, 53, 61, 62, 73, 59, 56, 61, 58, 64, 60, 74, 68, 70, 76, 83, 44, 57, 67, 78, 57, 71, 74, 75, 70, 55, 106, 92, 70, 67, 118, 68, 56, 62, 63, 103, 74, 65, 61, 81, 67, 60, 66, 47, 89, 71, 68, 61, 59, 62, 64, 82, 50, 57, 72, 71, 75, 63, 54, 51, 74, 59, 73, 79, 69, 53, 52, 88, 69, 57, 74, 60, 59, 68, 101, 61, 62, 55, 93, 52, 74, 55, 65, 46, 66, 69, 88, 75, 66, 67, 58, 53, 97, 86, 54, 52, 74, 70, 58, 75, 44, 63, 65, 50, 61, 73, 56, 57, 62, 58, 64, 46, 57, 55, 70, 65, 114, 65, 60, 80, 70, 67, 64, 57, 66, 68, 54, 63, 66, 73, 68, 57, 82, 56, 58, 43, 73, 61, 55, 143, 67, 100, 76, 61, 66, 80, 60, 73, 61, 56, 74, 68, 84, 55, 67, 91, 67, 72, 73, 51, 52, 76, 48, 76, 94, 90, 61, 54, 82, 85, 58, 87, 76, 59, 59, 68, 69, 63, 50, 54, 51, 55, 59, 81, 108, 81, 66, 57, 56, 68, 55, 62, 58, 57, 64, 64, 65, 75, 73, 105, 84, 75, 66, 63, 61, 59, 84, 68, 59, 60, 59, 68, 56, 67, 69, 61, 55, 73, 47, 49, 53, 55, 66, 77, 61, 60, 66, 61, 48, 75, 52, 72, 86, 61, 68, 53, 56, 74, 103, 60, 72, 64, 63, 65, 50, 60, 54, 77, 85, 47, 50, 68, 48, 63, 53, 65, 83, 72, 55, 125, 58, 101, 49, 106, 64, 74, 71, 66, 101, 58, 60, 68, 70, 43, 49, 53, 61, 55, 74, 69, 66, 63, 61, 66, 68, 126, 56, 68, 53, 73, 71, 73, 65, 65, 56, 68, 60, 69, 53, 68, 62, 64, 119, 62, 86, 50, 69, 60, 56, 68, 65, 73, 71, 101, 69, 46, 86, 39, 39, 81, 51, 58, 45, 58, 72, 81, 56, 61, 82, 69, 61, 56, 66, 58, 58, 76, 70, 64, 57, 70, 59, 40, 52, 92, 55, 53, 73, 57, 48, 71, 69, 51, 61, 55, 48, 77, 67, 58, 50, 134, 57, 49, 62, 73, 60, 67, 61, 27, 42, 62, 84, 68, 44, 59, 72, 59, 61, 70, 59, 72, 84, 65, 58, 79, 49, 70, 66, 58, 70, 79, 42, 44, 62, 86, 102, 56, 72, 57, 93, 67, 62, 67, 62, 56, 58, 70, 70, 120, 61, 66, 69, 70, 52, 71, 57, 65, 56, 95, 65, 42, 53, 61, 78, 74, 69, 62, 74, 83, 64, 72, 76, 71, 70, 70, 79, 57, 64, 77, 55, 57, 89, 63, 61, 67, 50, 48, 89, 57, 130, 63, 53, 64, 57, 63, 62, 63, 57, 74, 60, 60, 61, 70, 71, 59, 64, 52, 55, 75, 73, 58, 71, 78, 61, 62, 58, 60, 78, 65, 69, 70, 53, 74, 65, 65, 64, 55, 74, 63, 60, 99, 54, 53, 73, 113, 58, 38, 60, 67, 66, 75, 57, 113, 57, 70, 57, 60, 87, 69, 66, 87, 47, 66, 72, 56, 114, 60, 82, 65, 77, 73, 69, 49, 57, 76, 133, 62, 57, 50, 59, 55, 89, 56, 67, 65, 57, 69, 76, 59, 58, 56, 56, 54, 61, 69, 96, 47, 57, 82, 58, 82, 119, 59, 78, 55, 81, 94, 63, 51, 56, 52, 75, 55, 94, 51, 59, 67, 56, 79, 65, 66, 57, 49, 83, 71, 66, 67, 50, 42, 72, 49, 57, 55, 48, 98, 86, 67, 71, 55, 57, 58, 94, 38, 60, 59, 61, 59, 57, 66, 71, 48, 66, 47, 54, 65, 60, 58, 62, 83, 54, 75, 47, 60, 47, 57, 69, 52, 70, 76, 73, 85, 69, 56, 76, 122, 72, 58, 66, 56, 58, 51, 85, 52, 65, 73, 58, 45, 50, 57, 46, 55, 86, 60, 52, 63, 91, 55, 89, 60, 70, 73, 58, 69, 58, 57, 43, 53, 61, 108, 88, 62, 55, 47, 87, 68, 71, 73, 46, 64, 63, 60, 56, 55, 62, 49, 30, 55, 52, 52, 65, 75, 62, 66, 56, 68, 57, 64, 46, 71, 63, 70, 58, 69, 43, 61, 64, 60, 69, 59, 66, 64, 69, 97, 71, 65, 77, 75, 56, 64, 63, 73, 73, 80, 64, 65, 50, 68, 69, 65, 42, 67, 71, 63, 75, 82, 70, 71, 54, 53, 58, 62, 56, 55, 51, 55, 78, 51, 50, 74, 62, 54, 60, 50, 70, 68, 95, 59, 63, 94, 50, 60, 71, 75, 55, 46, 52, 68, 83, 68, 70, 60, 96, 45, 108, 80, 94, 86, 66, 63, 50, 67, 58, 65, 84, 59, 54, 69, 57, 53, 62, 89, 56, 85, 65, 80, 74, 60, 70, 67, 55, 73, 91, 50, 60, 59, 61, 45, 67, 62, 62, 58, 58, 75, 52, 59, 68, 58, 66, 62, 49, 78, 49, 55, 82, 61, 60, 72, 54, 71, 51, 60, 79, 66, 56, 56, 66, 56, 64, 61, 61, 81, 56, 55, 69, 54, 74, 60, 61, 59, 50, 62, 65, 51, 64, 70, 68, 88, 60, 82, 64, 59, 87, 82, 85, 84, 58, 55, 53, 99, 61, 54, 60, 109, 50, 56, 58, 59, 109, 51, 63, 54, 115, 55, 57, 78, 54, 58, 66, 66, 45, 59, 77, 59, 52, 64, 68, 54, 101, 66, 51, 56, 81, 68, 85, 65, 67, 70, 103, 81, 54, 49, 66, 50, 103, 59, 74, 67, 48, 67, 67, 91, 77, 52, 63, 65, 56, 54, 71, 78, 81, 78, 80, 70, 58, 68, 66, 66, 79, 68, 74, 73, 64, 65, 75, 57, 57, 51, 55, 54, 73, 55, 49, 61, 62, 59, 70, 61, 72, 116, 54, 57, 55, 108, 63, 66, 56, 75, 117, 68, 70, 51, 63, 64, 50, 60, 58, 54, 61, 61, 80, 74, 56, 106, 59, 50, 65, 70, 62, 99, 56, 50, 63, 83, 55, 60, 76, 76, 56, 50, 67, 52, 80, 87, 68, 97, 56, 57, 70, 63, 59, 61, 60, 94, 71, 60, 68, 68, 59, 48, 49, 66, 66, 48, 77, 59, 68, 76, 79, 61, 44, 52, 71, 61, 64, 53, 62, 88, 65, 64, 75, 78, 60, 71, 72, 60, 64, 62, 53, 68, 83, 47, 45, 57, 75, 64, 64, 47, 95, 59, 59, 77, 65, 100, 63, 59, 60, 66, 63, 86, 62, 70, 75, 56, 48, 81, 67, 73, 65, 54, 80, 57, 66, 62, 77, 59, 62, 59, 102, 66, 61, 87, 60, 57, 57, 59, 62, 54, 70, 44, 62, 79, 67, 76, 66, 86, 63, 66, 60, 51, 71, 87, 69, 72, 52, 77, 58, 58, 68, 52, 51, 50, 96, 59, 67, 72, 74, 59, 65, 58, 69, 78, 79, 91, 67, 82, 43, 74, 79, 65, 61, 58, 64, 48, 56, 59, 67, 58, 59, 85, 60, 60, 68, 82, 70, 48, 47, 72, 61, 68, 51, 58, 83, 64, 45, 68, 71, 53, 58, 61, 69, 75, 48, 49, 57, 113, 70, 41, 64, 64, 67, 80, 78, 72, 58, 51, 77, 85, 64, 63, 61, 62, 58, 53, 78, 60, 66, 80, 67, 69, 53, 81, 79, 50, 65, 58, 61, 58, 78, 69, 52, 69, 49, 56, 58, 76, 54, 88, 71, 56, 149, 57, 55, 56, 54, 60, 60, 65, 81, 72, 65, 63, 88, 70, 61, 96, 61, 64, 60, 57, 63, 40, 71, 90, 79, 56, 47, 64, 65, 63, 45, 65, 51, 90, 61, 84, 93, 50, 77, 81, 52, 63, 65, 53, 85, 56, 71, 61, 60, 62, 74, 69, 79, 69, 89, 64, 93, 78, 54, 64, 37, 72, 62, 71, 66, 49, 71, 63, 61, 82, 59, 58, 50, 90, 64, 72, 85, 101, 83, 54, 82, 64, 56, 83, 67, 63, 63, 40, 61, 71, 67, 52, 59, 80, 64, 71, 55, 56, 46, 64, 62, 80, 76, 58, 79, 69, 50, 70, 77, 49, 62, 62, 66, 63, 56, 114, 85, 68, 78, 58, 50, 50, 70, 53, 70, 111, 65, 51, 81, 73, 58, 51, 77, 56, 55, 60, 69, 56, 54, 73, 61, 57, 70, 71, 69, 78, 62, 59, 83, 60, 60, 66, 63, 75, 57, 54, 52, 74, 63, 65, 74, 64, 64, 60, 57, 51, 68, 55, 56, 72, 61, 55, 60, 75, 63, 52, 69, 68, 64, 81, 69, 69, 72, 50, 95, 54, 68, 67, 66, 49, 57, 74, 61, 63, 78, 76, 50, 56, 64, 70, 67, 57, 67, 98, 73, 58, 81, 55, 64, 65, 54, 67, 68, 57, 76, 53, 57, 66, 64, 67, 66, 57, 64, 66, 52, 51, 51, 71, 65, 74, 64, 49, 57, 79, 58, 51, 55, 62, 102, 77, 58, 61, 66, 60, 64, 77, 77, 57, 68, 59, 65, 66, 63, 94, 61, 66, 62, 57, 51, 61, 64, 55, 69, 84, 60, 86, 59, 72, 66, 70, 60, 57, 110, 72, 48, 78, 42, 52, 67, 55, 69, 62, 97, 67, 62, 81, 66, 59, 62, 61, 72, 80, 81, 77, 66, 61, 71, 60, 77, 54, 77, 67, 54, 60, 71, 80, 62, 57, 68, 61, 66, 63, 66, 76, 87, 70, 62, 102, 65, 49, 71, 64, 55, 57, 55, 49, 51, 57, 74, 61, 57, 76, 63, 86, 61, 82, 65, 82, 58, 65, 71, 134, 72, 62, 61, 101, 64, 77, 64, 65, 63, 84, 61, 67, 53, 108, 71, 52, 57, 63, 79, 90, 56, 61, 56, 65, 57, 61, 58, 59, 65, 68, 64, 63, 82, 87, 63, 74, 60, 51, 64, 64, 74, 77, 66, 68, 66, 66, 61, 66, 49, 59, 55, 75, 59, 83, 50, 59, 59, 78, 58, 51, 91, 55, 61, 53, 67, 125, 54, 72, 43, 49, 61, 64, 52, 73, 67, 94, 56, 58, 55, 62, 57, 47, 71, 52, 46, 72, 74, 61, 66, 63, 63, 75, 88, 66, 89, 69, 79, 57, 67, 60, 57, 61, 71, 54, 64, 73, 61, 57, 65, 46, 57, 64, 60, 61, 68, 107, 72, 69, 59, 73, 66, 61, 78, 57, 50, 64, 59, 70, 44, 74, 73, 120, 46, 57, 69, 75, 69, 70, 104, 66, 70, 63, 50, 60, 59, 74, 56, 78, 60, 49, 73, 72, 53, 64, 74, 65, 53, 60, 48, 64, 62, 80, 52, 71, 76, 61, 61, 49, 84, 64, 103, 71, 57, 74, 77, 61, 65, 62, 67, 57, 62, 79, 59, 56, 69, 59, 82, 91, 49, 71, 83, 73, 86, 75, 61, 87, 110, 64, 55, 62, 64, 71, 65, 71, 58, 53, 54, 55, 81, 66, 54, 67, 45, 67, 51, 79, 75, 59, 64, 47, 58, 63, 67, 70, 55, 61, 54, 74, 70, 75, 68, 88, 65, 66, 74, 58, 73, 72, 63, 63, 69, 67, 51, 62, 72, 52, 83, 53, 65, 62, 88, 56, 52, 64, 62, 74, 69, 50, 90, 47, 78, 66, 64, 78, 59, 55, 54, 74, 60, 70, 58, 74, 71, 69, 68, 69, 65, 63, 70, 64, 70, 103, 69, 65, 60, 69, 64, 59, 59, 63, 57, 55, 68, 60, 59, 70, 60, 67, 64, 97, 72, 65, 72, 83, 61, 60, 65, 59, 63, 71, 71, 78, 57, 48, 76, 71, 66, 74, 69, 84, 53, 77, 54, 70, 60, 58, 61, 61, 57, 68, 63, 72, 57, 52, 57, 59, 78, 107, 76, 75, 60, 84, 55, 59, 72, 57, 51, 58, 44, 49, 66, 42, 63, 70, 56, 46, 73, 54, 70, 63, 66, 62, 73, 124, 77, 63, 72, 51, 72, 78, 64, 83, 81, 86, 61, 57, 68, 62, 57, 48, 73, 74, 71, 72, 56, 78, 55, 62, 38, 61, 71, 93, 61, 67, 65, 69, 54, 51, 54, 59, 53, 49, 70, 64, 67, 63, 65, 74, 64, 63, 51, 58, 66, 58, 65, 59, 55, 80, 65, 63, 64, 131, 69, 59, 56, 57, 100, 59, 71, 62, 67, 86, 64, 81, 68, 57, 62, 66, 99, 53, 72, 60, 65, 58, 41, 49, 57, 51, 86, 66, 55, 51, 90, 70, 64, 63, 67, 66, 52, 78, 62, 69, 58, 54, 51, 75, 54, 56, 85, 85, 74, 52, 50, 54, 65, 57, 54, 82, 58, 95, 103, 60, 54, 68, 52, 62, 60, 62, 59, 65, 64, 77, 51, 59, 67, 59, 57, 93, 75, 52, 66, 61, 84, 61, 55, 52, 61, 49, 91, 62, 50, 61, 63, 54, 55, 62, 67, 61, 60, 54, 55, 83, 78, 81, 85, 75, 73, 65, 71, 84, 72, 59, 70, 68, 122, 71, 86, 67, 59, 55, 72, 55, 67, 51, 81, 52, 51, 83, 72, 73, 79, 62, 53, 70, 70, 95, 64, 68, 53, 57, 77, 76, 63, 67, 69, 71, 66, 57, 74, 64, 67, 73, 66, 65, 102, 56, 90, 70, 58, 62, 69, 68, 52, 53, 81, 61, 75, 67, 78, 90, 62, 65, 70, 56, 81, 59, 49, 93, 80, 89, 62, 56, 66, 63, 64, 66, 53, 69, 65, 68, 58, 57, 65, 52, 75, 66, 65, 59, 85, 53, 102, 63, 68, 50, 100, 83, 45, 54, 59, 65, 65, 72, 96, 63, 82, 68, 49, 54, 75, 65, 89, 76, 63, 66, 68, 58, 66, 69, 76, 73, 97, 54, 49, 71, 56, 66, 61, 67, 61, 60, 58, 62, 74, 57, 56, 62, 55, 64, 70, 57, 92, 66, 76, 78, 83, 67, 80, 81, 69, 58, 57, 71, 62, 46, 46, 85, 62, 64, 56, 53, 62, 61, 90, 59, 57, 71, 64, 64, 52, 59, 86, 50, 52, 58, 68, 54, 61, 59, 59, 61, 64, 60, 60, 59, 68, 79, 58, 88, 72, 110, 55, 93, 75, 78, 64, 63, 83, 57, 50, 61, 61, 72, 78, 63, 56, 63, 122, 73, 60, 67, 57, 64, 55, 65, 58, 62, 79, 62, 54, 70, 62, 59, 73, 88, 46, 63, 76, 41, 68, 55, 68, 56, 62, 75, 65, 56, 56, 60, 63, 101, 51, 58, 71, 67, 65, 66, 59, 67, 50, 50, 55, 60, 58, 77, 75, 68, 74, 53, 75, 72, 54, 66, 67, 79, 67, 57, 72, 83, 66, 49, 68, 60, 44, 63, 77, 44, 54, 70, 72, 62, 76, 60, 67, 68, 71, 60, 58, 62, 69, 68, 59, 71, 63, 59, 67, 57, 58, 65, 58, 59, 60, 52, 70, 58, 60, 67, 58, 58, 70, 72, 65, 71, 67, 60, 74, 68, 71, 59, 52, 61, 69, 81, 59, 57, 62, 58, 65, 63, 48, 63, 67, 50, 50, 82, 66, 100, 76, 63, 81, 74, 77, 62, 57, 68, 62, 59, 52, 67, 56, 75, 68, 55, 73, 55, 83, 71, 64, 67, 55, 59, 104, 61, 54, 68, 75, 55, 58, 52, 54, 59, 74, 67, 85, 61, 59, 48, 59, 63, 78, 70, 56, 70, 73, 61, 64, 86, 63, 61, 65, 64, 61, 54, 69, 71, 63, 59, 61, 60, 67, 63, 82, 82, 53, 56, 63, 112, 52, 52, 121, 70, 61, 59, 62, 62, 75, 70, 60, 74, 70, 112, 58, 65, 61, 58, 58, 69, 67, 55, 69, 58, 80, 59, 81, 57, 66, 62, 62, 65, 74, 59, 65, 73, 58, 57, 114, 68, 74, 62, 61, 68, 68, 84, 56, 60, 63, 58, 67, 65, 70, 86, 68, 73, 51, 58, 70, 120, 64, 61, 62, 75, 65, 65, 81, 68, 66, 67, 72, 61, 79, 72, 66, 58, 60, 57, 60, 66, 77, 72, 62, 66, 62, 69, 80, 63, 58, 65, 55, 50, 70, 63, 71, 69, 56, 65, 53, 71, 70, 50, 62, 61, 71, 70, 74, 76, 74, 59, 65, 65, 85, 73, 75, 67, 47, 70, 58, 61, 71, 63, 56, 62, 66, 69, 64, 59, 60, 54, 68, 69, 56, 60, 69, 63, 73, 62, 66, 76, 60, 60, 58, 84, 59, 116, 58, 53, 57, 63, 57, 54, 29, 79, 76, 50, 63, 68, 62, 70, 73, 65, 53, 79, 66, 58, 53, 89, 66, 61, 63, 56, 55, 74, 67, 71, 59, 60, 58, 81, 77, 78, 64, 66, 63, 76, 93, 54, 60, 51, 58, 70, 93, 65, 67, 64, 68, 83, 63, 66, 61, 65, 74, 58, 74, 73, 57, 66, 58, 61, 64, 106, 57, 62, 42, 103, 55, 63, 68, 74, 81, 51, 64, 65, 65, 70, 56, 64, 68, 59, 59, 65, 79, 49, 75, 52, 141, 63, 51, 63, 63, 68, 83, 59, 65, 58, 64, 59, 56, 63, 64, 62, 55, 75, 69, 70, 64, 60, 66, 54, 65, 64, 58, 57, 60, 77, 65, 51, 96, 73, 85, 64, 58, 61, 76, 52, 124, 77, 49, 72, 76, 69, 59, 56, 67, 64, 65, 66, 70, 67, 89, 81, 53, 47, 66, 53, 67, 68, 90, 92, 61, 46, 80, 83, 49, 56, 71, 59, 53, 66, 68, 61, 75, 61, 61, 90, 63, 90, 68, 60, 65, 66, 66, 58, 42, 63, 65, 81, 71, 66, 70, 62, 62, 49, 55, 52, 61, 58, 64, 69, 62, 58, 74, 71, 95, 81, 96, 70, 68, 68, 54, 64, 59, 87, 54, 70, 68, 73, 69, 64, 71, 67, 50, 88, 60, 72, 71, 69, 58, 64, 76, 60, 59, 63, 62, 77, 63, 65, 71, 62, 77, 58, 61, 70, 68, 63, 68, 73, 66, 63, 103, 54, 68, 67, 65, 69, 65, 71, 84, 75, 74, 70, 67, 71, 59, 70, 63, 86, 67, 77, 60, 66, 63, 66, 61, 53, 73, 69, 67, 73, 71, 50, 57, 83, 57, 65, 63, 59, 78, 81, 59, 54, 58, 55, 64, 76, 68, 65, 59, 59, 50, 75, 71, 73, 58, 73, 71, 60, 64, 68, 65, 64, 62, 57, 69, 93, 75, 63, 80, 60, 59, 66, 79, 71, 76, 63, 53, 80, 62, 61, 62, 65, 93, 70, 63, 71, 58, 63, 56, 55, 61, 73, 74, 63, 59, 82, 55, 80, 74, 63, 70, 72, 111, 57, 64, 69, 62, 63, 73, 43, 75, 73, 118, 89, 71, 61, 67, 76, 72, 89, 58, 56, 61, 84, 63, 61, 58, 84, 62, 56, 66, 73, 60, 48, 45, 62, 59, 58, 74, 75, 58, 65, 50, 57, 79, 70, 69, 70, 63, 73, 68, 64, 64, 83, 61, 56, 54, 59, 65, 64, 61, 74, 67, 84, 63, 53, 64, 59, 61, 57, 65, 78, 69, 64, 64, 61, 68, 94, 65, 72, 79, 98, 56, 53, 67, 77, 55, 59, 59, 62, 74, 61, 67, 58, 97, 77, 57, 53, 64, 66, 50, 62, 67, 56, 70, 59, 71, 70, 53, 110, 73, 83, 49, 66, 65, 54, 61, 66, 60, 65, 65, 56, 69, 81, 79, 67, 63, 69, 65, 63, 68, 72, 83, 94, 56, 52, 69, 44, 74, 66, 73, 60, 52, 64, 57, 58, 52, 63, 44, 58, 106, 55, 45, 81, 62, 56, 52, 71, 71, 70, 62, 47, 76, 69, 66, 75, 70, 62, 62, 77, 52, 69, 75, 91, 65, 64, 68, 62, 71, 73, 54, 70, 63, 59, 69, 77, 65, 63, 62, 53, 65, 81, 54, 67, 74, 78, 61, 60, 70, 62, 60, 61, 80, 73, 69, 63, 62, 108, 58, 52, 52, 66, 69, 58, 42, 82, 62, 53, 73, 55, 65, 60, 86, 62, 59, 69, 72, 66, 56, 60, 55, 52, 54, 63, 68, 69, 58, 69, 58, 57, 58, 68, 115, 50, 66, 123, 58, 78, 74, 60, 72, 42, 69, 52, 52, 53, 65, 86, 71, 57, 58, 81, 68, 65, 65, 67, 65, 73, 56, 71, 50, 69, 55, 57, 51, 57, 84, 68, 57, 66, 107, 69, 56, 66, 53, 61, 63, 79, 68, 70, 65, 51, 62, 61, 78, 58, 68, 65, 73, 68, 69, 60, 56, 63, 51, 61, 73, 72, 52, 51, 66, 46, 59, 71, 51, 61, 54, 61, 66, 79, 70, 61, 77, 65, 74, 55, 70, 55, 55, 60, 57, 57, 82, 48, 53, 59, 55, 63, 55, 73, 70, 56, 56, 65, 65, 58, 89, 61, 79, 66, 69, 55, 78, 54, 63, 63, 69, 84, 63, 70, 62, 55, 72, 56, 58, 64, 69, 69, 66, 69, 65, 60, 61, 59, 64, 61, 66, 67, 53, 65, 60, 69, 46, 57, 68, 86, 77, 58, 71, 65, 70, 80, 76, 61, 56, 55, 49, 55, 67, 62, 86, 61, 79, 48, 47, 49, 54, 78, 69, 81, 52, 58, 68, 111, 74, 88, 72, 62, 78, 47, 54, 76, 51, 62, 80, 70, 60, 99, 72, 52, 48, 70, 64, 75, 55, 56, 55, 80, 62, 86, 61, 48, 61, 81, 63, 64, 74, 45, 50, 63, 110, 51, 46, 67, 51, 62, 60, 64, 57, 88, 67, 88, 49, 17, 65, 45, 58, 77, 66, 70, 68, 62, 73, 46, 41, 101, 73, 64, 114, 46, 45, 67, 65, 78, 59, 50, 69, 59, 41, 53, 62, 83, 82, 52, 74, 84, 64, 71, 69, 72, 121, 83, 44, 48, 77, 92, 87, 60, 54, 77, 60, 65, 57, 129, 83, 61, 57, 74, 55, 57, 56, 81, 75, 110, 72, 46, 60, 47, 61, 67, 46, 56, 46, 84, 59, 81, 74, 92, 43, 83, 34, 56, 69, 51, 70, 83, 43, 54, 50, 72, 53, 63, 70, 67, 52, 71, 45, 59, 53, 55, 80, 59, 58, 63, 63, 61, 57, 76, 49, 72, 74, 65, 63, 57, 55, 57, 64, 64, 118, 97, 58, 63, 60, 52, 62, 94, 68, 43, 72, 51, 56, 75, 62, 57, 73, 45, 74, 80, 43, 58, 57, 84, 55, 76, 63, 52, 66, 84, 76, 56, 52, 59, 59, 68, 73, 48, 70, 50, 62, 81, 57, 63, 51, 65, 68, 65, 66, 67, 74, 59, 58, 138, 64, 41, 54, 66, 57, 77, 72, 103, 57, 78, 72, 81, 80, 56, 102, 77, 63, 53, 85, 57, 67, 79, 78, 68, 63, 44, 53, 61, 69, 91, 62, 74, 74, 53, 55, 48, 77, 39, 89, 56, 52, 75, 58, 64, 58, 72, 71, 132, 39, 54, 79, 55, 44, 84, 84, 71, 60, 64, 50, 80, 54, 60, 55, 68, 60, 55, 86, 97, 65, 107, 59, 69, 66, 78, 33, 69, 45, 55, 53, 52, 57, 65, 58, 52, 51, 78, 74, 96, 77, 57, 53, 56, 65, 91, 45, 41, 86, 62, 73, 54, 57, 62, 86, 70, 43, 54, 55, 115, 79, 72, 74, 68, 56, 89, 50, 49, 73, 77, 43, 45, 68, 61, 38, 61, 59, 63, 90, 78, 54, 71, 74, 61, 55, 74, 60, 77, 52, 36, 31, 62, 65, 51, 56, 58, 69, 85, 51, 59, 74, 57, 64, 91, 59, 81, 66, 51, 63, 58, 99, 84, 48, 79, 51, 66, 60, 101, 81, 32, 89, 75, 86, 88, 121, 62, 84, 69, 62, 59, 58, 67, 63, 50, 75, 88, 66, 70, 52, 92, 53, 64, 58, 83, 54, 110, 75, 59, 64, 55, 68, 66, 75, 53, 69, 58, 39, 69, 68, 64, 66, 45, 71, 96, 47, 50, 74, 49, 58, 56, 73, 69, 64, 67, 124, 64, 79, 66, 71, 41, 59, 60, 92, 64, 67, 100, 81, 65, 52, 41, 56, 72, 71, 52, 44, 61, 63, 64, 66, 54, 99, 65, 67, 74, 59, 51, 56, 63, 80, 45, 72, 60, 180, 71, 52, 62, 46, 59, 63, 72, 73, 68, 76, 65, 66, 69, 52, 62, 63, 72, 63, 69, 113, 53, 53, 54, 60, 73, 70, 69, 79, 59, 63, 58, 50, 48, 32, 65, 52, 59, 81, 72, 65, 59, 63, 84, 86, 75, 74, 91, 60, 68, 81, 68, 88, 47, 122, 84, 55, 33, 83, 120, 66, 57, 70, 50, 72, 57, 62, 58, 61, 74, 61, 60, 84, 60, 56, 65, 54, 82, 56, 67, 56, 50, 68, 79, 57, 70, 44, 69, 66, 82, 59, 54, 66, 76, 72, 81, 37, 70, 93, 64, 87, 46, 52, 53, 83, 63, 75, 55, 77, 54, 71, 68, 45, 73, 59, 64, 65, 65, 55, 50, 69, 57, 72, 43, 77, 72, 64, 74, 64, 36, 63, 68, 64, 71, 52, 53, 71, 74, 58, 95, 74, 77, 76, 75, 57, 62, 72, 47, 63, 65, 61, 60, 79, 43, 46, 62, 69, 65, 78, 67, 51, 51, 62, 73, 87, 59, 64, 58, 53, 51, 70, 95, 59, 62, 66, 40, 88, 54, 56, 99, 66, 56, 70, 50, 55, 58, 70, 60, 68, 75, 72, 89, 56, 57, 47, 55, 33, 78, 54, 59, 64, 60, 75, 78, 60, 86, 61, 67, 85, 86, 69, 48, 76, 99, 81, 51, 71, 88, 85, 94, 68, 66, 66, 52, 56, 85, 47, 64, 58, 72, 71, 74, 70, 60, 69, 47, 81, 72, 70, 76, 58, 60, 81, 63, 36, 80, 91, 92, 49, 55, 54, 68, 66, 62, 77, 60, 52, 69, 80, 56, 60, 58, 50, 52, 48, 64, 37, 67, 71, 89, 54, 47, 62, 42, 70, 74, 51, 85, 126, 55, 70, 54, 91, 49, 71, 61, 64, 60, 64, 58, 97, 78, 54, 64, 53, 60, 52, 86, 50, 59, 73, 68, 87, 70, 55, 93, 79, 87, 49, 50, 37, 51, 78, 62, 67, 63, 74, 67, 66, 74, 66, 66, 60, 77, 58, 62, 76, 72, 77, 60, 50, 55, 69, 58, 58, 64, 62, 71, 67, 72, 93, 64, 55, 60, 85, 56, 99, 83, 58, 70, 63, 50, 67, 54, 69, 47, 64, 59, 64, 54, 69, 79, 58, 51, 70, 57, 48, 79, 71, 73, 58, 57, 65, 81, 90, 82, 71, 72, 56, 53, 46, 79, 54, 73, 64, 74, 77, 95, 56, 70, 72, 75, 61, 60, 78, 87, 62, 60, 60, 53, 51, 59, 78, 56, 84, 74, 56, 97, 50, 59, 48, 63, 95, 61, 66, 51, 62, 52, 43, 74, 82, 85, 53, 58, 87, 80, 80, 79, 60, 58, 58, 74, 67, 72, 50, 79, 45, 53, 75, 42, 58, 50, 56, 67, 77, 93, 92, 68, 67, 48, 63, 58, 64, 58, 51, 58, 82, 66, 63, 61, 61, 71, 50, 90, 66, 60, 95, 67, 56, 69, 55, 73, 96, 65, 41, 80, 71, 59, 74, 62, 100, 76, 61, 68, 67, 58, 59, 55, 75, 89, 105, 75, 81, 88, 64, 72, 97, 80, 87, 64, 73, 52, 36, 57, 55, 69, 61, 65, 51, 49, 63, 70, 64, 84, 47, 57, 65, 70, 69, 69, 87, 53, 62, 49, 64, 76, 91, 64, 68, 63, 59, 44, 90, 58, 53, 69, 65, 95, 63, 65, 61, 95, 72, 57, 58, 86, 57, 50, 84, 78, 72, 68, 59, 36, 54, 58, 81, 64, 52, 83, 55, 77, 61, 103, 63, 92, 59, 57, 66, 66, 68, 70, 71, 95, 51, 78, 79, 76, 61, 70, 44, 50, 57, 57, 44, 72, 65, 65, 80, 82, 41, 76, 76, 67, 64, 75, 74, 65, 85, 48, 63, 58, 79, 35, 64, 56, 69, 59, 84, 60, 44, 53, 69, 70, 87, 69, 65, 50, 52, 72, 82, 55, 62, 52, 61, 72, 75, 61, 53, 63, 52, 66, 45, 40, 54, 67, 66, 62, 56, 71, 55, 63, 51, 59, 61, 75, 72, 75, 96, 65, 50, 63, 73, 55, 57, 62, 53, 68, 47, 68, 84, 56, 64, 75, 36, 61, 110, 62, 121, 71, 71, 66, 65, 64, 66, 63, 65, 67, 64, 85, 76, 56, 60, 61, 70, 53, 62, 73, 75, 52, 80, 51, 63, 61, 69, 62, 61, 60, 50, 62, 59, 69, 97, 80, 70, 61, 50, 51, 46, 83, 78, 57, 71, 59, 50, 68, 46, 79, 69, 64, 70, 76, 69, 88, 61, 73, 43, 49, 70, 72, 57, 61, 72, 116, 77, 59, 80, 66, 64, 49, 69, 59, 61, 53, 54, 57, 72, 45, 80, 73, 68, 65, 78, 47, 74, 70, 46, 60, 56, 71, 66, 62, 73, 67, 47, 61, 61, 65, 76, 53, 62, 51, 62, 73, 64, 60, 95, 57, 62, 111, 95, 51, 80, 43, 75, 69, 67, 55, 85, 74, 70, 80, 96, 73, 56, 80, 62, 55, 77, 57, 50, 67, 65, 49, 60, 60, 68, 110, 50, 70, 62, 86, 58, 69, 54, 57, 81, 93, 76, 99, 55, 84, 71, 62, 74, 81, 69, 76, 61, 78, 70, 52, 85, 91, 60, 65, 51, 61, 71, 85, 76, 84, 54, 62, 60, 71, 64, 56, 91, 67, 76, 67, 60, 65, 57, 59, 64, 57, 77, 65, 84, 89, 83, 73, 103, 70, 57, 80, 60, 61, 66, 60, 68, 55, 114, 49, 44, 75, 42, 72, 71, 101, 50, 74, 70, 43, 87, 73, 76, 68, 63, 53, 45, 60, 55, 75, 75, 66, 51, 88, 62, 46, 46, 65, 63, 63, 78, 48, 75, 59, 58, 45, 55, 75, 76, 69, 67, 52, 56, 56, 65, 69, 75, 66, 82, 73, 58, 68, 51, 51, 67, 58, 71, 54, 88, 83, 62, 63, 57, 48, 71, 64, 94, 70, 64, 83, 91, 43, 66, 69, 55, 67, 114, 71, 77, 89, 70, 72, 68, 62, 56, 62, 52, 55, 76, 69, 80, 68, 77, 81, 48, 58, 71, 51, 58, 54, 66, 57, 61, 64, 62, 52, 54, 74, 63, 51, 60, 68, 56, 91, 62, 58, 61, 79, 77, 74, 65, 52, 64, 54, 71, 62, 59, 58, 69, 48, 53, 77, 46, 61, 77, 56, 57, 87, 56, 87, 73, 53, 58, 56, 54, 78, 51, 78, 74, 60, 61, 78, 90, 69, 71, 79, 50, 57, 86, 64, 69, 41, 61, 52, 85, 71, 56, 73, 107, 71, 54, 75, 67, 75, 62, 81, 90, 84, 63, 80, 52, 52, 71, 80, 52, 73, 104, 63, 65, 70, 59, 76, 51, 72, 48, 67, 81, 60, 58, 69, 59, 68, 62, 76, 62, 53, 64, 59, 57, 72, 55, 62, 64, 77, 51, 67, 77, 51, 61, 75, 60, 66, 59, 47, 74, 76, 60, 71, 47, 54, 66, 100, 47, 56, 58, 68, 56, 56, 53, 57, 77, 75, 70, 59, 53, 71, 46, 75, 75, 50, 58, 86, 65, 73, 64, 69, 77, 56, 64, 60, 70, 52, 58, 45, 50, 61, 45, 72, 62, 65, 70, 65, 95, 81, 70, 78, 62, 40, 55, 62, 52, 69, 72, 44, 111, 70, 59, 58, 83, 77, 42, 78, 88, 65, 54, 66, 52, 65, 46, 122, 78, 44, 69, 71, 59, 66, 62, 67, 66, 50, 42, 74, 105, 68, 77, 68, 52, 58, 79, 54, 64, 51, 64, 59, 71, 74, 67, 69, 57, 54, 110, 48, 62, 69, 104, 136, 83, 56, 67, 67, 51, 60, 55, 86, 47, 64, 62, 69, 45, 48, 70, 78, 58, 56, 54, 85, 69, 66, 66, 57, 65, 54, 52, 66, 56, 33, 56, 38, 51, 88, 69, 50, 64, 79, 86, 54, 61, 80, 69, 76, 84, 65, 73, 88, 68, 93, 96, 53, 46, 74, 71, 78, 54, 74, 63, 58, 67, 68, 58, 114, 62, 65, 74, 60, 52, 54, 105, 65, 77, 85, 39, 80, 56, 63, 58, 84, 69, 70, 59, 60, 67, 51, 89, 45, 49, 58, 51, 31, 42, 53, 95, 70, 71, 65, 64, 65, 66, 61, 52, 67, 65, 64, 52, 58, 56, 59, 64, 53, 73, 66, 93, 65, 58, 55, 53, 41, 69, 58, 72, 65, 70, 78, 66, 61, 67, 53, 84, 80, 85, 55, 64, 71, 51, 56, 59, 60, 73, 57, 71, 58, 68, 79, 70, 85, 60, 64, 53, 102, 96, 76, 57, 78, 79, 48, 66, 62, 89, 56, 81, 50, 66, 59, 63, 57, 51, 70, 59, 57, 58, 63, 50, 55, 65, 63, 64, 88, 55, 76, 60, 75, 55, 63, 78, 74, 57, 87, 61, 78, 55, 55, 73, 68, 69, 72, 53, 46, 65, 73, 63, 84, 60, 56, 61, 50, 75, 87, 58, 84, 54, 53, 61, 50, 68, 81, 65, 80, 67, 65, 62, 72, 104, 62, 56, 54, 62, 62, 49, 110, 69, 38, 74, 109, 79, 61, 76, 55, 71, 89, 45, 64, 69, 93, 57, 58, 65, 67, 64, 58, 69, 85, 68, 76, 63, 52, 52, 61, 52, 52, 65, 54, 59, 46, 65, 59, 100, 97, 64, 53, 63, 63, 74, 67, 78, 65, 50, 70, 66, 41, 71, 69, 119, 48, 76, 69, 76, 67, 49, 66, 101, 45, 88, 64, 45, 61, 56, 59, 40, 100, 72, 51, 48, 42, 71, 62, 58, 60, 67, 54, 72, 66, 74, 86, 67, 57, 47, 75, 76, 60, 68, 70, 58, 67, 63, 69, 66, 53, 71, 76, 94, 65, 49, 66, 67, 60, 89, 56, 69, 57, 58, 62, 88, 62, 64, 47, 61, 105, 40, 74, 71, 61, 65, 70, 64, 143, 56, 63, 86, 56, 64, 64, 67, 51, 74, 58, 97, 60, 60, 59, 63, 76, 74, 71, 60, 64, 68, 63, 58, 78, 64, 89, 62, 76, 55, 63, 52, 58, 70, 50, 66, 76, 68, 57, 62, 74, 54, 62, 68, 58, 64, 67, 68, 83, 53, 100, 84, 53, 84, 82, 68, 54, 61, 66, 50, 57, 77, 68, 42, 62, 62, 47, 85, 47, 69, 48, 59, 50, 79, 40, 90, 55, 78, 55, 91, 90, 52, 74, 54, 64, 76, 61, 86, 64, 78, 82, 79, 84, 65, 76, 68, 56, 72, 42, 70, 67, 46, 62, 73, 70, 86, 67, 69, 75, 55, 53, 91, 57, 79, 67, 66, 59, 69, 66, 71, 58, 61, 114, 141, 58, 64, 67, 54, 74, 74, 72, 72, 67, 56, 52, 55, 67, 48, 52, 60, 58, 55, 62, 86, 75, 67, 69, 70, 63, 52, 60, 69, 57, 62, 58, 58, 59, 47, 65, 59, 46, 111, 62, 50, 59, 58, 78, 56, 63, 54, 61, 62, 68, 67, 61, 89, 70, 84, 55, 50, 77, 64, 47, 53, 80, 54, 56, 66, 81, 97, 70, 97, 70, 64, 53, 68, 59, 56, 52, 56, 68, 76, 70, 76, 59, 53, 77, 65, 47, 86, 55, 65, 62, 71, 60, 87, 59, 61, 85, 71, 67, 55, 76, 59, 78, 67, 63, 66, 73, 82, 36, 68, 55, 62, 68, 67, 59, 82, 52, 53, 74, 64, 65, 52, 56, 72, 52, 54, 77, 60, 75, 71, 64, 70, 59, 54, 81, 62, 79, 62, 72, 68, 56, 64, 69, 90, 112, 55, 79, 50, 54, 57, 58, 63, 61, 79, 78, 76, 74, 71, 74, 53, 41, 57, 74, 49, 40, 71, 98, 60, 63, 64, 88, 60, 65, 64, 76, 60, 50, 53, 60, 56, 74, 70, 64, 67, 67, 79, 51, 58, 59, 52, 65, 67, 105, 73, 75, 89, 64, 60, 62, 70, 45, 71, 66, 68, 65, 74, 79, 63, 57, 56, 76, 70, 48, 56, 64, 51, 72, 54, 72, 75, 87, 54, 46, 89, 70, 51, 55, 75, 63, 70, 49, 69, 76, 60, 68, 100, 64, 66, 67, 58, 72, 73, 50, 75, 48, 73, 58, 62, 86, 56, 48, 55, 68, 68, 62, 88, 64, 63, 55, 75, 50, 69, 68, 55, 66, 64, 59, 65, 58, 76, 76, 75, 76, 66, 62, 62, 51, 78, 53, 60, 77, 60, 57, 67, 69, 52, 70, 65, 55, 52, 66, 41, 54, 61, 50, 72, 64, 75, 77, 70, 50, 78, 56, 83, 50, 78, 59, 76, 56, 50, 39, 78, 65, 78, 60, 60, 61, 75, 60, 63, 62, 69, 32, 36, 58, 63, 61, 70, 70, 68, 57, 73, 50, 48, 77, 73, 58, 56, 46, 61, 67, 46, 100, 75, 55, 63, 63, 73, 72, 54, 56, 78, 84, 63, 61, 72, 58, 102, 57, 82, 71, 65, 80, 66, 47, 51, 46, 67, 44, 74, 68, 73, 68, 67, 62, 52, 93, 113, 52, 69, 56, 62, 66, 57, 63, 77, 58, 59, 69, 64, 50, 51, 69, 52, 75, 71, 60, 63, 69, 76, 59, 62, 69, 61, 71, 101, 59, 62, 62, 68, 50, 65, 90, 67, 42, 55, 70, 68, 64, 55, 55, 60, 110, 79, 55, 56, 61, 81, 71, 68, 59, 69, 78, 60, 65, 70, 61, 73, 49, 79, 94, 60, 52, 39, 92, 51, 56, 65, 70, 62, 56, 93, 54, 67, 102, 80, 67, 81, 57, 74, 83, 57, 66, 62, 72, 72, 56, 66, 68, 70, 56, 53, 72, 60, 65, 57, 62, 59, 99, 71, 77, 67, 67, 66, 69, 51, 95, 61, 63, 62, 67, 69, 57, 111, 60, 68, 71, 77, 63, 61, 75, 64, 54, 68, 61, 65, 66, 80, 60, 63, 59, 75, 59, 65, 70, 60, 71, 40, 57, 48, 59, 91, 58, 51, 59, 59, 64, 105, 71, 58, 70, 80, 67, 89, 58, 56, 56, 63, 68, 60, 71, 78, 67, 72, 81, 55, 73, 53, 77, 66, 90, 66, 75, 60, 59, 50, 76, 63, 65, 71, 74, 61, 67, 59, 67, 68, 65, 46, 68, 66, 81, 52, 52, 64, 76, 71, 87, 71, 44, 56, 53, 56, 63, 86, 62, 78, 60, 67, 61, 68, 59, 72, 56, 87, 61, 57, 66, 72, 77, 66, 60, 64, 73, 73, 110, 55, 61, 73, 60, 53, 118, 52, 79, 70, 48, 64, 77, 53, 59, 67, 66, 70, 61, 53, 70, 66, 64, 82, 39, 80, 57, 73, 55, 64, 69, 56, 58, 82, 67, 86, 63, 65, 71, 64, 64, 53, 72, 90, 80, 56, 53, 102, 64, 47, 54, 67, 80, 61, 67, 65, 51, 49, 76, 53, 62, 53, 65, 70, 71, 81, 59, 70, 47, 77, 72, 48, 59, 45, 67, 52, 97, 68, 64, 57, 66, 56, 59, 62, 50, 56, 63, 60, 71, 80, 64, 61, 77, 69, 42, 43, 55, 104, 67, 87, 107, 54, 75, 61, 70, 72, 64, 62, 57, 65, 76, 52, 66, 70, 57, 55, 62, 58, 68, 58, 65, 58, 66, 71, 69, 53, 77, 83, 54, 53, 84, 78, 55, 70, 56, 67, 123, 59, 62, 91, 56, 68, 59, 64, 68, 54, 72, 44, 60, 66, 74, 63, 76, 49, 76, 64, 63, 83, 70, 61, 70, 75, 93, 58, 64, 109, 78, 68, 62, 92, 63, 65, 80, 60, 49, 47, 43, 70, 75, 90, 52, 80, 57, 64, 66, 89, 92, 45, 53, 62, 62, 52, 55, 65, 70, 55, 62, 66, 64, 62, 75, 71, 62, 71, 67, 56, 69, 68, 57, 78, 68, 68, 48, 48, 60, 79, 64, 93, 63, 107, 75, 75, 62, 66, 70, 56, 59, 53, 68, 67, 57, 75, 64, 87, 35, 60, 57, 52, 64, 69, 56, 54, 80, 72, 82, 56, 71, 49, 46, 75, 65, 71, 56, 66, 58, 61, 60, 68, 69, 64, 68, 64, 53, 69, 81, 54, 54, 67, 45, 62, 73, 79, 58, 85, 65, 60, 60, 71, 67, 101, 74, 59, 59, 57, 58, 71, 71, 62, 64, 69, 113, 83, 76, 57, 49, 66, 61, 52, 50, 73, 62, 52, 62, 65, 73, 65, 52, 59, 58, 111, 63, 57, 57, 63, 60, 74, 47, 55, 58, 71, 51, 62, 82, 62, 54, 60, 70, 57, 66, 61, 51, 64, 62, 63, 65, 101, 56, 54, 64, 64, 114, 83, 62, 65, 57, 70, 82, 111, 99, 51, 80, 64, 71, 66, 63, 54, 57, 65, 59, 69, 65, 64, 71, 65, 64, 59, 59, 47, 68, 70, 55, 64, 100, 71, 70, 57, 60, 93, 58, 75, 62, 64, 60, 59, 57, 68, 63, 64, 65, 62, 57, 60, 65, 72, 72, 63, 53, 64, 50, 70, 56, 56, 72, 56, 57, 67, 60, 59, 61, 69, 73, 63, 75, 55, 62, 72, 63, 59, 49, 64, 71, 105, 77, 60, 55, 60, 54, 65, 75, 62, 55, 60, 77, 71, 79, 62, 56, 76, 71, 65, 70, 73, 57, 73, 63, 60, 60, 74, 64, 51, 64, 59, 72, 49, 74, 62, 61, 49, 70, 67, 64, 59, 66, 71, 72, 62, 65, 57, 61, 70, 105, 57, 63, 88, 62, 58, 70, 62, 68, 51, 55, 67, 119, 71, 71, 58, 57, 53, 78, 102, 71, 58, 56, 74, 73, 57, 52, 60, 58, 77, 64, 64, 61, 67, 118, 57, 56, 63, 68, 53, 73, 56, 65, 83, 57, 56, 62, 72, 63, 96, 65, 72, 63, 63, 59, 62, 74, 59, 91, 59, 51, 71, 45, 80, 72, 90, 82, 56, 94, 61, 70, 99, 53, 71, 64, 68, 51, 61, 69, 57, 50, 66, 54, 56, 49, 47, 68, 57, 55, 50, 68, 66, 71, 55, 101, 85, 79, 73, 71, 70, 88, 101, 46, 54, 94, 69, 45, 61, 95, 70, 51, 68, 71, 56, 87, 59, 53, 66, 54, 52, 108, 84, 65, 51, 61, 59, 64, 88, 73, 54, 54, 56, 57, 77, 71, 64, 63, 57, 68, 56, 90, 89, 73, 65, 65, 57, 68, 62, 61, 69, 71, 75, 62, 68, 46, 63, 66, 46, 64, 52, 68, 67, 64, 61, 67, 80, 64, 63, 58, 60, 60, 73, 60, 68, 66, 65, 67, 67, 60, 60, 57, 57, 57, 66, 63, 84, 57, 60, 71, 62, 62, 60, 63, 76, 90, 59, 61, 65, 67, 49, 67, 62, 83, 61, 67, 59, 73, 68, 61, 63, 69, 49, 51, 76, 59, 78, 55, 74, 59, 79, 64, 52, 59, 70, 62, 111, 74, 72, 50, 59, 74, 62, 71, 62, 74, 62, 61, 58, 49, 61, 64, 65, 72, 57, 62, 71, 59, 67, 73, 54, 83, 70, 65, 50, 72, 66, 60, 66, 72, 59, 80, 71, 71, 67, 66, 62, 52, 98, 57, 97, 61, 64, 73, 62, 44, 67, 97, 46, 71, 87, 66, 69, 56, 50, 60, 69, 66, 60, 54, 60, 51, 72, 60, 64, 54, 59, 64, 64, 67, 75, 63, 106, 48, 47, 58, 54, 73, 85, 63, 70, 65, 67, 136, 84, 58, 67, 72, 104, 53, 57, 57, 56, 63, 60, 60, 66, 67, 93, 56, 56, 67, 71, 64, 76, 68, 78, 63, 63, 80, 116, 61, 67, 63, 66, 72, 69, 51, 59, 44, 82, 83, 63, 71, 65, 61, 80, 47, 51, 85, 57, 66, 52, 71, 60, 44, 77, 58, 47, 47, 66, 67, 57, 96, 63, 60, 50, 67, 84, 54, 57, 58, 78, 51, 70, 47, 72, 61, 66, 49, 51, 67, 75, 69, 51, 48, 64, 61, 50, 71, 56, 59, 79, 59, 62, 86, 67, 83, 51, 35, 60, 48, 66, 98, 59, 60, 67, 65, 65, 68, 60, 71, 59, 65, 60, 70, 63, 66, 68, 44, 69, 67, 81, 63, 65, 84, 92, 73, 51, 57, 72, 55, 80, 51, 72, 60, 61, 66, 72, 91, 64, 60, 85, 63, 57, 57, 64, 64, 53, 53, 90, 67, 56, 74, 63, 65, 74, 55, 62, 66, 65, 41, 76, 61, 53, 63, 66, 55, 57, 61, 73, 63, 58, 77, 95, 64, 74, 61, 72, 53, 58, 62, 80, 65, 55, 79, 63, 59, 78, 55, 62, 74, 60, 80, 71, 72, 58, 70, 66, 57, 67, 70, 68, 56, 60, 90, 54, 81, 56, 65, 65, 55, 69, 76, 56, 54, 69, 84, 67, 44, 51, 52, 59, 57, 79, 69, 81, 61, 58, 60, 62, 62, 58, 77, 83, 91, 87, 63, 51, 130, 61, 50, 70, 63, 56, 55, 69, 80, 52, 48, 59, 46, 65, 95, 67, 53, 81, 49, 91, 62, 56, 66, 82, 74, 62, 64, 96, 63, 76, 67, 56, 64, 66, 56, 89, 64, 63, 53, 68, 85, 62, 71, 61, 70, 61, 57, 46, 58, 69, 67, 62, 60, 87, 44, 57, 50, 62, 78, 58, 59, 53, 67, 69, 72, 60, 66, 70, 48, 80, 79, 58, 72, 64, 54, 68, 62, 53, 56, 69, 57, 58, 65, 64, 76, 68, 66, 61, 58, 66, 76, 68, 68, 75, 75, 58, 61, 60, 92, 67, 55, 60, 64, 74, 76, 74, 73, 57, 57, 68, 52, 68, 65, 45, 74, 69, 59, 65, 58, 47, 67, 66, 68, 67, 63, 63, 50, 55, 61, 57, 57, 54, 60, 94, 77, 65, 48, 66, 59, 51, 78, 70, 88, 60, 67, 66, 69, 70, 109, 54, 82, 56, 103, 67, 65, 66, 49, 59, 74, 70, 80, 81, 62, 52, 64, 41, 70, 65, 41, 66, 51, 109, 74, 53, 65, 94, 65, 57, 72, 65, 73, 73, 55, 84, 61, 68, 55, 77, 60, 71, 74, 51, 54, 81, 56, 60, 72, 88, 56, 50, 69, 54, 62, 79, 72, 73, 70, 81, 53, 65, 61, 65, 55, 64, 68, 47, 64, 66, 58, 48, 60, 51, 56, 65, 73, 46, 44, 63, 59, 80, 65, 65, 61, 57, 55, 61, 72, 63, 64, 82, 56, 59, 85, 45, 52, 72, 65, 90, 88, 58, 72, 46, 63, 70, 65, 86, 71, 85, 54, 52, 58, 61, 70, 65, 77, 57, 62, 64, 66, 59, 65, 64, 63, 71, 60, 67, 65, 50, 67, 58, 62, 62, 69, 64, 53, 92, 67, 51, 53, 69, 76, 86, 53, 90, 42, 52, 128, 105, 88, 59, 69, 75, 85, 63, 48, 76, 72, 77, 59, 57, 54, 61, 55, 63, 50, 82, 68, 47, 54, 82, 65, 72, 73, 60, 78, 81, 70, 64, 71, 50, 66, 86, 62, 54, 56, 59, 56, 64, 79, 68, 80, 63, 65, 52, 51, 50, 62, 52, 71, 70, 74, 88, 84, 52, 99, 55, 74, 67, 57, 61, 62, 66, 48, 53, 55, 52, 81, 60, 87, 47, 54, 47, 74, 78, 58, 63, 52, 67, 86, 102, 57, 62, 60, 100, 57, 50, 88, 72, 56, 77, 70, 62, 77, 89, 67, 50, 63, 63, 56, 68, 47, 62, 69, 56, 63, 68, 57, 75, 85, 68, 49, 51, 75, 63, 48, 51, 60, 63, 86, 76, 78, 72, 57, 54, 36, 63, 88, 131, 45, 64, 56, 62, 66, 72, 60, 57, 57, 63, 90, 68, 63, 55, 60, 67, 41, 61, 71, 60, 80, 68, 59, 67, 61, 79, 62, 81, 61, 61, 28, 65, 76, 56, 63, 71, 67, 73, 55, 67, 61, 59, 52, 56, 56, 58, 61, 75, 69, 71, 123, 113, 69, 70, 63, 46, 62, 54, 54, 65, 75, 47, 52, 56, 56, 70, 42, 92, 55, 79, 65, 65, 53, 59, 71, 50, 76, 93, 46, 69, 54, 70, 73, 65, 47, 56, 83, 51, 54, 66, 64, 68, 56, 60, 63, 60, 65, 65, 64, 61, 69, 55, 90, 60, 68, 73, 85, 45, 63, 63, 72, 55, 75, 57, 68, 62, 65, 48, 62, 84, 65, 73, 56, 61, 70, 46, 80, 51, 67, 53, 58, 61, 80, 58, 63, 43, 53, 56, 49, 93, 68, 65, 79, 78, 77, 59, 63, 56, 54, 95, 45, 100, 82, 53, 72, 56, 69, 58, 67, 62, 84, 57, 57, 68, 74, 61, 55, 57, 70, 64, 61, 72, 55, 51, 57, 58, 83, 79, 63, 57, 62, 64, 48, 78, 69, 57, 46, 64, 36, 93, 66, 72, 59, 77, 64, 64, 77, 69, 81, 80, 60, 72, 65, 74, 81, 60, 64, 65, 71, 60, 79, 70, 69, 58, 54, 62, 72, 84, 57, 47, 55, 59, 74, 54, 53, 64, 70, 50, 65, 60, 66, 68, 46, 92, 54, 65, 58, 61, 66, 62, 78, 53, 85, 61, 86, 80, 66, 76, 62, 60, 73, 65, 69, 87, 87, 72, 68, 58, 53, 62, 107, 62, 58, 89, 59, 60, 63, 72, 69, 72, 73, 81, 72, 83, 54, 70, 64, 50, 76, 75, 60, 81, 60, 81, 72, 58, 61, 65, 71, 55, 78, 55, 56, 58, 47, 71, 51, 47, 103, 64, 71, 63, 64, 80, 50, 65, 57, 57, 68, 89, 68, 63, 46, 91, 62, 51, 73, 75, 60, 57, 59, 82, 61, 65, 67, 51, 44, 80, 56, 67, 92, 51, 69, 85, 66, 71, 62, 58, 57, 55, 100, 76, 51, 55, 66, 106, 56, 64, 58, 70, 56, 50, 60, 54, 65, 85, 70, 81, 64, 59, 95, 70, 82, 44, 61, 55, 60, 62, 59, 63, 55, 105, 60, 69, 63, 89, 53, 63, 77, 84, 76, 57, 74, 63, 58, 70, 65, 80, 64, 71, 59, 58, 67, 72, 56, 57, 54, 67, 67, 56, 62, 61, 64, 81, 61, 52, 77, 61, 84, 105, 83, 67, 73, 53, 60, 57, 75, 65, 64, 63, 58, 57, 62, 55, 56, 75, 75, 60, 58, 79, 65, 82, 58, 53, 64, 62, 63, 58, 61, 60, 71, 73, 75, 69, 38, 41, 69, 71, 94, 63, 68, 62, 96, 57, 90, 81, 56, 59, 62, 54, 56, 51, 72, 108, 44, 69, 61, 80, 60, 107, 44, 56, 55, 64, 62, 63, 41, 58, 54, 76, 74, 64, 67, 52, 93, 63, 54, 66, 92, 74, 65, 67, 59, 54, 75, 106, 65, 71, 74, 78, 53, 73, 60, 59, 58, 65, 62, 58, 84, 64, 82, 63, 60, 61, 85, 79, 59, 65, 65, 66, 46, 37, 66, 73, 69, 68, 62, 88, 54, 56, 59, 66, 61, 63, 55, 62, 65, 64, 64, 72, 81, 79, 143, 76, 105, 78, 81, 47, 67, 70, 74, 65, 77, 54, 70, 53, 65, 52, 92, 56, 68, 65, 78, 67, 56, 58, 65, 59, 83, 64, 47, 66, 57, 58, 53, 71, 64, 63, 62, 61, 65, 66, 61, 57, 53, 67, 69, 55, 71, 50, 51, 82, 49, 67, 51, 60, 94, 59, 78, 51, 65, 66, 59, 62, 72, 74, 61, 60, 77, 51, 74, 75, 60, 75, 60, 70, 116, 55, 62, 57, 79, 62, 44, 82, 70, 54, 68, 93, 51, 70, 55, 74, 62, 59, 49, 57, 60, 78, 72, 62, 67, 53, 60, 58, 50, 77, 80, 78, 78, 110, 65, 52, 50, 52, 58, 52, 76, 62, 54, 57, 68, 52, 54, 70, 62, 56, 64, 53, 62, 56, 56, 49, 74, 89, 53, 77, 56, 62, 64, 52, 71, 61, 78, 120, 65, 61, 55, 77, 66, 79, 90, 75, 53, 59, 65, 70, 65, 52, 68, 65, 53, 68, 68, 59, 59, 54, 54, 58, 54, 69, 59, 69, 78, 70, 58, 106, 72, 55, 55, 71, 56, 68, 61, 58, 76, 59, 66, 60, 65, 82, 88, 64, 80, 71, 69, 85, 45, 66, 54, 65, 67, 49, 80, 68, 80, 60, 50, 64, 59, 67, 49, 54, 104, 64, 62, 72, 77, 71, 62, 53, 58, 48, 75, 45, 68, 63, 44, 103, 93, 54, 62, 69, 52, 62, 60, 94, 65, 68, 61, 78, 53, 68, 57, 46, 67, 59, 64, 73, 73, 63, 60, 59, 53, 69, 78, 78, 70, 63, 46, 53, 60, 54, 60, 56, 61, 60, 66, 62, 52, 56, 55, 72, 63, 72, 60, 75, 56, 74, 84, 55, 58, 59, 86, 75, 68, 62, 69, 70, 67, 66, 68, 93, 49, 71, 56, 66, 56, 69, 79, 90, 55, 63, 66, 76, 84, 50, 95, 58, 72, 62, 54, 56, 69, 81, 62, 63, 72, 66, 58, 65, 77, 54, 65, 60, 56, 61, 88, 70, 67, 63, 59, 73, 65, 67, 53, 66, 50, 58, 54, 86, 79, 115, 72, 78, 72, 34, 55, 110, 68, 68, 73, 57, 74, 101, 67, 77, 48, 57, 71, 60, 55, 71, 52, 66, 64, 63, 47, 53, 87, 54, 57, 120, 71, 60, 66, 59, 51, 57, 64, 71, 56, 68, 70, 52, 63, 64, 72, 67, 68, 62, 64, 50, 75, 44, 41, 73, 61, 86, 67, 53, 55, 59, 56, 74, 59, 74, 41, 75, 59, 72, 93, 58, 60, 68, 71, 68, 69, 55, 71, 59, 53, 65, 45, 57, 70, 76, 48, 92, 38, 48, 54, 54, 74, 67, 75, 67, 80, 64, 55, 139, 65, 84, 93, 73, 80, 62, 79, 62, 73, 46, 73, 80, 57, 64, 81, 73, 43, 75, 69, 55, 64, 63, 55, 72, 87, 64, 59, 84, 38, 67, 73, 67, 53, 74, 64, 63, 56, 82, 61, 80, 64, 71, 65, 114, 29, 50, 71, 69, 72, 81, 82, 57, 56, 77, 59, 68, 93, 64, 69, 63, 64, 61, 89, 56, 75, 73, 39, 57, 52, 68, 57, 75, 80, 68, 89, 64, 48, 58, 72, 47, 48, 66, 76, 51, 78, 79, 61, 60, 81, 69, 70, 74, 68, 89, 46, 85, 70, 71, 63, 77, 122, 62, 79, 52, 96, 62, 67, 45, 73, 65, 76, 65, 58, 59, 87, 68, 48, 48, 69, 61, 68, 77, 59, 53, 72, 61, 65, 80, 39, 62, 66, 46, 52, 60, 72, 70, 76, 72, 57, 54, 54, 63, 66, 61, 59, 60, 43, 58, 59, 66, 58, 105, 53, 67, 66, 54, 51, 99, 68, 53, 69, 47, 42, 71, 49, 65, 67, 71, 69, 64, 66, 61, 54, 52, 65, 52, 59, 69, 67, 65, 71, 68, 46, 72, 68, 98, 100, 43, 48, 54, 60, 76, 65, 85, 75, 58, 95, 69, 66, 65, 79, 63, 46, 62, 79, 64, 74, 62, 60, 53, 60, 59, 67, 62, 69, 70, 67, 56, 55, 56, 79, 44, 78, 63, 105, 84, 59, 54, 81, 63, 74, 64, 72, 74, 44, 65, 72, 61, 84, 54, 75, 70, 45, 87, 69, 61, 45, 42, 52, 51, 57, 67, 103, 88, 59, 115, 76, 67, 73, 70, 53, 40, 57, 56, 54, 81, 52, 60, 70, 55, 70, 74, 53, 65, 44, 63, 81, 106, 49, 63, 57, 118, 64, 56, 54, 53, 64, 77, 56, 56, 67, 61, 70, 62, 55, 63, 52, 51, 51, 46, 70, 60, 67, 57, 80, 60, 69, 63, 59, 53, 51, 79, 62, 50, 63, 59, 58, 89, 48, 67, 78, 54, 55, 46, 63, 71, 34, 98, 62, 75, 47, 55, 88, 50, 80, 56, 52, 79, 119, 88, 95, 61, 75, 79, 76, 55, 75, 83, 53, 83, 53, 56, 65, 69, 72, 75, 97, 99, 89, 52, 72, 116, 65, 71, 69, 40, 104, 62, 84, 55, 92, 40, 64, 61, 61, 55, 70, 62, 60, 72, 49, 58, 55, 76, 112, 72, 61, 62, 88, 83, 75, 60, 61, 48, 80, 75, 43, 58, 66, 97, 94, 51, 93, 57, 76, 65, 62, 61, 56, 37, 54, 55, 64, 63, 51, 61, 72, 58, 56, 116, 89, 56, 50, 44, 51, 64, 56, 70, 58, 70, 64, 80, 47, 63, 75, 57, 63, 60, 53, 50, 57, 64, 37, 58, 43, 58, 88, 96, 67, 78, 70, 58, 57, 103, 64, 69, 65, 52, 62, 67, 76, 52, 60, 58, 78, 59, 57, 66, 58, 91, 73, 85, 47, 73, 44, 63, 65, 63, 64, 55, 56, 57, 48, 73, 67, 54, 106, 73, 48, 76, 61, 80, 50, 63, 42, 69, 94, 66, 65, 75, 51, 66, 86, 60, 59, 77, 58, 36, 31, 56, 65, 57, 58, 99, 64, 58, 49, 57, 74, 62, 50, 73, 63, 85, 56, 59, 83, 56, 67, 59, 58, 55, 47, 65, 63, 127, 54, 54, 64, 56, 77, 84, 46, 56, 59, 64, 69, 64, 55, 62, 45, 98, 51, 64, 82, 58, 70, 57, 116, 79, 57, 64, 62, 107, 68, 46, 57, 53, 56, 51, 45, 63, 77, 68, 74, 95, 69, 54, 121, 96, 55, 21, 68, 74, 89, 58, 128, 63, 78, 75, 49, 68, 67, 54, 101, 56, 67, 58, 76, 73, 49, 68, 40, 65, 53, 61, 54, 54, 81, 80, 66, 74, 57, 71, 58, 63, 59, 64, 53, 93, 60, 50, 86, 61, 51, 97, 73, 43, 69, 67, 54, 58, 89, 65, 79, 96, 85, 68, 62, 41, 84, 49, 53, 60, 63, 55, 69, 60, 56, 58, 71, 59, 48, 50, 86, 68, 57, 72, 65, 49, 80, 68, 64, 63, 62, 61, 66, 56, 45, 68, 57, 76, 57, 65, 49, 54, 60, 84, 56, 50, 68, 68, 68, 68, 68, 90, 50, 54, 64, 56, 60, 53, 80, 106, 60, 56, 80, 55, 62, 70, 70, 54, 79, 57, 64, 57, 58, 51, 86, 87, 77, 57, 57, 65, 62, 44, 73, 78, 66, 74, 64, 67, 99, 54, 79, 76, 58, 81, 59, 52, 66, 58, 54, 58, 54, 64, 66, 64, 109, 53, 75, 67, 72, 66, 47, 75, 83, 54, 108, 61, 63, 55, 59, 72, 80, 61, 69, 58, 61, 83, 89, 57, 56, 56, 94, 66, 54, 79, 92, 50, 71, 66, 70, 90, 64, 87, 91, 56, 66, 63, 60, 83, 61, 74, 68, 75, 73, 61, 104, 56, 45, 53, 73, 84, 58, 52, 69, 45, 57, 60, 90, 62, 53, 66, 51, 48, 60, 32, 91, 86, 53, 76, 47, 66, 66, 55, 54, 56, 78, 61, 59, 61, 75, 68, 60, 72, 63, 61, 55, 64, 74, 71, 79, 57, 42, 55, 61, 57, 48, 52, 69, 56, 60, 81, 88, 49, 53, 68, 56, 75, 73, 69, 64, 69, 47, 62, 67, 76, 49, 62, 65, 57, 87, 75, 65, 52, 63, 66, 68, 59, 63, 74, 59, 93, 67, 53, 75, 65, 62, 58, 62, 54, 87, 42, 54, 54, 65, 65, 69, 48, 70, 78, 63, 72, 54, 73, 64, 77, 61, 54, 66, 81, 88, 62, 72, 75, 66, 58, 67, 63, 74, 75, 77, 74, 58, 66, 67, 54, 72, 75, 62, 52, 60, 98, 52, 66, 49, 59, 44, 71, 82, 65, 70, 60, 75, 67, 67, 62, 45, 59, 88, 57, 64, 61, 99, 64, 87, 83, 68, 99, 83, 92, 46, 62, 74, 53, 40, 63, 122, 85, 61, 64, 76, 49, 74, 65, 57, 41, 54, 66, 46, 55, 54, 150, 56, 62, 57, 54, 66, 59, 68, 95, 59, 58, 88, 51, 65, 80, 62, 79, 56, 73, 64, 76, 75, 76, 76, 66, 51, 45, 56, 87, 65, 55, 96, 62, 98, 56, 78, 99, 58, 79, 57, 65, 69, 56, 47, 79, 60, 48, 77, 79, 55, 58, 74, 70, 78, 73, 39, 54, 76, 48, 54, 52, 64, 73, 67, 99, 48, 59, 58, 68, 70, 70, 55, 78, 88, 54, 78, 89, 54, 60, 56, 63, 60, 85, 78, 58, 80, 67, 37, 55, 59, 64, 74, 56, 59, 82, 72, 59, 46, 60, 62, 62, 81, 120, 68, 53, 56, 66, 65, 67, 55, 58, 60, 81, 48, 77, 67, 81, 58, 66, 60, 67, 76, 86, 59, 79, 82, 59, 54, 59, 54, 108, 81, 50, 64, 70, 56, 133, 62, 59, 77, 48, 69, 67, 92, 78, 47, 85, 54, 98, 59, 93, 67, 73, 84, 81, 47, 77, 67, 65, 73, 52, 76, 63, 85, 64, 75, 38, 47, 62, 75, 61, 66, 101, 43, 61, 70, 52, 64, 91, 63, 60, 60, 65, 78, 31, 82, 63, 50, 72, 118, 60, 59, 62, 52, 91, 60, 63, 62, 67, 95, 74, 65, 65, 56, 61, 75, 48, 66, 51, 59, 68, 52, 41, 70, 64, 31, 34, 74, 78, 71, 65, 68, 59, 73, 74, 81, 58, 62, 87, 70, 58, 68, 59, 53, 64, 153, 56, 94, 39, 53, 60, 73, 63, 69, 77, 77, 64, 62, 54, 71, 52, 65, 73, 72, 82, 62, 59, 77, 53, 58, 79, 48, 40, 75, 80, 56, 75, 65, 80, 53, 68, 40, 55, 85, 64, 76, 85, 65, 83, 70, 58, 43, 57, 94, 72, 48, 51, 68, 75, 62, 75, 68, 60, 65, 67, 73, 66, 75, 67, 69, 84, 60, 60, 54, 49, 56, 47, 70, 55, 78, 74, 49, 69, 65, 74, 67, 60, 153, 74, 68, 54, 49, 53, 73, 56, 52, 50, 56, 102, 68, 55, 59, 71, 60, 50, 69, 53, 64, 82, 54, 43, 92, 63, 55, 58, 74, 62, 48, 59, 64, 54, 88, 68, 72, 72, 86, 66, 66, 73, 60, 83, 156, 78, 73, 76, 97, 49, 63, 63, 45, 93, 54, 56, 51, 54, 71, 103, 76, 57, 65, 68, 54, 87, 78, 70, 48, 67, 47, 44, 88, 67, 70, 77, 70, 51, 74, 43, 81, 54, 49, 74, 73, 51, 46, 74, 62, 42, 72, 44, 85, 65, 63, 70, 66, 82, 53, 43, 81, 66, 69, 53, 66, 69, 88, 58, 64, 46, 67, 65, 70, 56, 55, 72, 56, 64, 89, 57, 72, 80, 62, 43, 59, 56, 65, 46, 41, 87, 50, 48, 65, 75, 58, 36, 76, 55, 86, 74, 79, 61, 44, 73, 72, 74, 68, 66, 51, 51, 50, 82, 48, 80, 85, 57, 50, 61, 57, 69, 88, 42, 77, 71, 73, 63, 88, 69, 63, 69, 46, 76, 71, 78, 47, 63, 53, 51, 92, 65, 54, 66, 55, 100, 81, 61, 60, 90, 51, 73, 55, 50, 66, 59, 63, 79, 60, 58, 62, 63, 42, 47, 69, 85, 68, 26, 64, 75, 44, 59, 58, 47, 58, 67, 71, 53, 81, 78, 53, 83, 70, 74, 48, 66, 64, 62, 72, 69, 39, 64, 51, 61, 62, 108, 56, 59, 51, 68, 77, 69, 56, 51, 69, 64, 60, 67, 71, 85, 75, 59, 87, 57, 68, 72, 78, 40, 50, 58, 81, 96, 64, 59, 61, 60, 56, 70, 73, 66, 78, 57, 74, 46, 73, 90, 65, 91, 89, 51, 53, 61, 73, 49, 47, 58, 47, 70, 70, 65, 85, 69, 61, 66, 62, 85, 68, 57, 70, 54, 43, 84, 69, 67, 84, 66, 61, 89, 56, 53, 64, 42, 79, 64, 70, 80, 63, 59, 33, 71, 74, 65, 82, 62, 51, 69, 69, 115, 76, 111, 68, 64, 66, 45, 78, 47, 81, 59, 58, 61, 81, 54, 71, 32, 62, 82, 92, 123, 88, 62, 62, 71, 54, 80, 54, 61, 52, 66, 46, 75, 84, 67, 51, 54, 52, 76, 60, 59, 88, 43, 72, 72, 58, 94, 67, 39, 89, 63, 66, 51, 50, 71, 53, 61, 61, 52, 77, 52, 80, 55, 47, 61, 62, 69, 73, 60, 84, 79, 62, 52, 51, 67, 67, 71, 69, 83, 55, 59, 63, 51, 69, 46, 57, 81, 61, 59, 52, 54, 55, 57, 52, 97, 45, 64, 67, 49, 81, 44, 76, 95, 104, 52, 70, 54, 76, 54, 71, 64, 47, 69, 65, 59, 62, 77, 58, 90, 52, 75, 86, 66, 50, 47, 60, 58, 82, 74, 54, 71, 71, 69, 62, 69, 56, 63, 71, 43, 52, 61, 44, 36, 53, 70, 93, 74, 58, 98, 77, 65, 74, 84, 60, 60, 136, 55, 69, 66, 54, 64, 70, 65, 45, 47, 78, 88, 77, 75, 57, 63, 67, 56, 61, 75, 51, 60, 54, 66, 69, 65, 46, 109, 64, 62, 63, 76, 90, 79, 62, 66, 54, 54, 52, 55, 44, 48, 64, 50, 82, 73, 77, 58, 77, 75, 81, 62, 67, 62, 69, 72, 58, 65, 66, 66, 73, 29, 72, 86, 64, 88, 94, 59, 53, 49, 59, 58, 68, 61, 70, 82, 57, 87, 71, 60, 44, 64, 72, 63, 51, 67, 79, 84, 102, 69, 47, 49, 78, 47, 52, 55, 66, 48, 76, 93, 69, 71, 74, 51, 73, 74, 47, 60, 58, 64, 69, 106, 46, 55, 66, 76, 58, 83, 53, 56, 85, 73, 57, 65, 82, 72, 69, 58, 54, 53, 78, 69, 77, 79, 45, 82, 91, 74, 50, 59, 60, 95, 63, 34, 61, 64, 73, 62, 45, 87, 59, 61, 60, 67, 82, 54, 51, 61, 50, 81, 86, 65, 54, 77, 55, 82, 63, 81, 62, 44, 63, 80, 53, 84, 61, 28, 70, 64, 48, 65, 72, 62, 67, 75, 63, 51, 54, 112, 72, 60, 76, 72, 77, 86, 98, 70, 75, 53, 104, 59, 55, 92, 69, 58, 62, 87, 87, 64, 46, 44, 68, 57, 125, 50, 91, 56, 57, 100, 60, 65, 67, 69, 87, 73, 78, 72, 79, 79, 83, 64, 63, 48, 62, 51, 77, 83, 50, 50, 62, 71, 66, 54, 43, 120, 82, 52, 57, 76, 55, 54, 46, 58, 50, 64, 59, 64, 57, 80, 62, 67, 76, 79, 61, 65, 53, 50, 79, 62, 63, 66, 58, 60, 62, 42, 69, 54, 58, 96, 53, 73, 66, 53, 75, 81, 69, 74, 85, 59, 68, 72, 63, 79, 92, 87, 65, 52, 67, 66, 87, 97, 67, 50, 64, 74, 72, 97, 81, 70, 66, 114, 60, 56, 68, 68, 82, 60, 56, 56, 61, 61, 86, 96, 60, 64, 62, 58, 63, 71, 68, 64, 69, 51, 36, 62, 66, 73, 65, 64, 57, 61, 58, 99, 61, 55, 43, 59, 59, 55, 69, 104, 69, 91, 93, 50, 66, 62, 65, 53, 61, 60, 49, 67, 79, 30, 67, 73, 47, 57, 87, 53, 55, 47, 51, 79, 62, 48, 38, 64, 75, 53, 75, 66, 112, 68, 58, 55, 70, 60, 52, 83, 72, 76, 72, 69, 61, 53, 65, 80, 72, 115, 76, 62, 65, 65, 63, 53, 77, 62, 59, 63, 66, 62, 57, 54, 76, 63, 85, 71, 79, 58, 46, 74, 52, 53, 64, 59, 69, 61, 49, 60, 37, 58, 44, 68, 55, 71, 57, 55, 50, 71, 60, 54, 54, 67, 68, 60, 52, 69, 74, 34, 71, 55, 66, 69, 63, 68, 69, 59, 71, 51, 54, 64, 123, 62, 65, 59, 68, 69, 77, 51, 53, 75, 52, 77, 56, 65, 79, 65, 59, 89, 74, 58, 80, 71, 49, 89, 69, 89, 94, 83, 61, 70, 54, 61, 52, 74, 69, 82, 78, 74, 52, 59, 54, 65, 60, 129, 56, 61, 53, 62, 58, 65, 53, 65, 57, 66, 67, 124, 67, 48, 67, 47, 67, 72, 82, 69, 59, 58, 53, 56, 89, 53, 55, 66, 72, 66, 62, 65, 83, 77, 54, 65, 72, 65, 72, 46, 55, 53, 55, 62, 56, 83, 60, 69, 57, 37, 57, 51, 57, 63, 57, 54, 80, 60, 63, 63, 107, 68, 68, 81, 48, 61, 60, 57, 81, 86, 76, 51, 59, 65, 81, 55, 65, 55, 63, 60, 65, 73, 102, 147, 87, 58, 85, 53, 68, 67, 58, 70, 54, 52, 44, 67, 51, 73, 72, 59, 52, 74, 67, 70, 109, 60, 52, 53, 49, 83, 65, 129, 65, 56, 66, 59, 71, 41, 62, 92, 60, 67, 64, 78, 49, 80, 52, 60, 69, 55, 63, 63, 55, 55, 63, 85, 82, 70, 76, 54, 60, 53, 64, 70, 75, 64, 95, 50, 76, 42, 66, 70, 54, 58, 78, 89, 82, 34, 58, 102, 59, 67, 59, 61, 47, 60, 47, 81, 82, 50, 72, 74, 47, 53, 56, 51, 60, 81, 68, 54, 56, 66, 75, 50, 88, 63, 58, 81, 83, 60, 70, 71, 95, 51, 57, 86, 62, 69, 66, 49, 53, 75, 40, 55, 59, 67, 51, 57, 61, 56, 65, 69, 64, 48, 51, 67, 87, 54, 41, 58, 61, 51, 111, 66, 61, 47, 72, 65, 53, 75, 92, 58, 78, 60, 58, 65, 76, 59, 41, 74, 56, 56, 57, 65, 73, 68, 50, 67, 77, 51, 71, 51, 55, 54, 101, 64, 70, 92, 57, 66, 67, 56, 67, 50, 128, 54, 54, 80, 50, 78, 55, 71, 43, 50, 51, 55, 56, 37, 60, 101, 76, 59, 87, 57, 57, 62, 70, 77, 58, 81, 70, 58, 88, 76, 70, 76, 68, 81, 72, 67, 50, 67, 60, 67, 65, 59, 63, 72, 97, 69, 60, 75, 45, 52, 57, 51, 42, 91, 63, 48, 70, 51, 49, 92, 61, 55, 77, 66, 76, 70, 107, 49, 44, 62, 49, 60, 67, 64, 53, 62, 61, 65, 64, 73, 61, 72, 67, 61, 64, 56, 56, 77, 59, 53, 58, 55, 87, 58, 86, 78, 64, 86, 55, 97, 57, 102, 70, 50, 69, 60, 79, 39, 75, 62, 66, 41, 71, 68, 77, 50, 72, 81, 70, 74, 106, 67, 51, 60, 45, 53, 52, 97, 69, 55, 53, 54, 61, 78, 58, 61, 55, 81, 65, 68, 53, 58, 62, 73, 55, 73, 61, 63, 46, 70, 64, 47, 80, 85, 70, 82, 94, 70, 67, 56, 55, 80, 43, 51, 45, 51, 54, 81, 55, 51, 59, 60, 43, 54, 63, 75, 74, 51, 66, 58, 54, 59, 66, 46, 58, 74, 57, 66, 53, 83, 93, 72, 68, 49, 62, 61, 64, 64, 60, 64, 65, 51, 82, 68, 59, 91, 72, 62, 64, 79, 51, 80, 59, 58, 51, 61, 65, 73, 59, 58, 59, 57, 76, 74, 66, 80, 69, 79, 49, 78, 95, 56, 57, 64, 63, 66, 56, 76, 67, 44, 67, 54, 43, 64, 66, 54, 37, 80, 92, 72, 55, 45, 60, 86, 64, 73, 43, 56, 62, 94, 62, 56, 64, 57, 51, 65, 57, 79, 64, 34, 51, 65, 58, 71, 66, 56, 64, 52, 70, 52, 54, 91, 50, 47, 44, 60, 90, 49, 66, 81, 50, 58, 67, 47, 50, 73, 91, 57, 54, 71, 65, 67, 77, 68, 69, 35, 68, 57, 76, 67, 59, 75, 49, 50, 93, 63, 56, 72, 83, 100, 95, 59, 51, 54, 50, 65, 82, 93, 75, 69, 66, 77, 78, 49, 54, 74, 59, 89, 66, 69, 78, 66, 59, 49, 71, 59, 76, 56, 43, 70, 58, 59, 91, 80, 64, 73, 60, 63, 70, 81, 63, 85, 55, 49, 70, 67, 46, 83, 68, 89, 68, 60, 76, 49, 63, 71, 64, 68, 52, 88, 61, 68, 55, 93, 63, 73, 64, 73, 67, 66, 55, 53, 64, 66, 53, 63, 53, 83, 83, 56, 65, 56, 105, 64, 53, 51, 67, 71, 42, 58, 76, 42, 61, 56, 79, 70, 50, 60, 109, 71, 69, 60, 71, 68, 59, 66, 68, 82, 91, 51, 69, 103, 62, 59, 71, 115, 75, 60, 64, 66, 58, 88, 64, 75, 67, 68, 90, 66, 101, 68, 74, 82, 67, 79, 57, 86, 70, 68, 85, 53, 68, 66, 76, 68, 69, 64, 116, 45, 52, 101, 69, 77, 65, 52, 60, 53, 82, 64, 54, 79, 73, 63, 72, 62, 56, 72, 69, 51, 56, 48, 47, 69, 85, 62, 45, 61, 68, 72, 74, 64, 62, 58, 91, 51, 62, 58, 66, 59, 46, 59, 54, 50, 67, 70, 69, 59, 67, 65, 77, 63, 65, 63, 52, 41, 67, 68, 69, 58, 60, 64, 69, 90, 67, 62, 74, 33, 51, 69, 67, 82, 91, 53, 88, 78, 54, 52, 90, 55, 88, 83, 69, 62, 83, 112, 77, 77, 63, 66, 71, 70, 89, 71, 70, 50, 61, 74, 61, 76, 73, 56, 84, 75, 71, 54, 71, 99, 62, 67, 74, 61, 59, 70, 82, 74, 56, 59, 69, 74, 76, 58, 65, 61, 67, 57, 68, 34, 67, 93, 59, 58, 53, 62, 82, 64, 58, 70, 80, 69, 54, 86, 65, 62, 78, 46, 88, 66, 57, 70, 48, 70, 74, 79, 69, 50, 69, 49, 47, 74, 41, 78, 66, 62, 49, 62, 69, 51, 84, 69, 43, 86, 74, 90, 67, 90, 38, 73, 75, 62, 59, 56, 57, 69, 68, 102, 63, 53, 107, 62, 68, 73, 57, 50, 46, 60, 77, 61, 58, 51, 67, 49, 92, 61, 67, 66, 56, 59, 82, 61, 81, 60, 77, 72, 72, 61, 52, 30, 52, 80, 55, 99, 63, 50, 75, 61, 58, 47, 105, 49, 79, 58, 72, 58, 40, 64, 76, 65, 71, 51, 43, 67, 66, 55, 81, 63, 60, 71, 91, 87, 76, 56, 49, 76, 78, 83, 64, 68, 49, 75, 62, 51, 77, 65, 73, 53, 57, 55, 40, 61, 65, 97, 69, 64, 58, 76, 55, 66, 66, 67, 89, 73, 98, 64, 61, 117, 56, 60, 55, 61, 54, 129, 64, 62, 66, 71, 61, 81, 62, 64, 67, 67, 77, 58, 64, 71, 57, 47, 57, 60, 66, 87, 88, 72, 70, 52, 84, 70, 56, 57, 78, 53, 69, 56, 68, 78, 58, 61, 46, 57, 95, 117, 73, 53, 80, 66, 58, 56, 56, 73, 75, 65, 67, 56, 63, 63, 44, 76, 75, 86, 55, 60, 70, 55, 80, 72, 50, 62, 46, 62, 55, 60, 59, 63, 82, 61, 61, 70, 73, 47, 67, 75, 55, 64, 81, 110, 46, 49, 76, 67, 50, 77, 64, 91, 62, 67, 36, 65, 62, 85, 51, 83, 52, 56, 57, 63, 110, 62, 72, 70, 55, 74, 106, 85, 61, 56, 67, 46, 58, 62, 56, 88, 63, 70, 89, 62, 53, 81, 57, 67, 59, 66, 69, 55, 62, 78, 56, 74, 80, 71, 56, 52, 76, 79, 79, 87, 73, 68, 98, 64, 64, 66, 65, 45, 55, 63, 109, 67, 76, 118, 85, 67, 53, 55, 62, 58, 92, 68, 54, 112, 56, 65, 85, 76, 64, 67, 66, 82, 48, 66, 67, 83, 73, 89, 78, 63, 74, 55, 59, 53, 58, 67, 68, 80, 102, 47, 81, 93, 63, 61, 79, 51, 51, 72, 66, 65, 84, 52, 49, 56, 58, 58, 57, 83, 76, 81, 67, 64, 76, 50, 65, 58, 82, 79, 83, 79, 57, 80, 66, 47, 99, 80, 82, 69, 55, 60, 63, 79, 58, 77, 76, 62, 79, 71, 63, 69, 55, 61, 34, 54, 71, 59, 61, 58, 78, 48, 74, 74, 64, 65, 61, 69, 60, 48, 57, 71, 104, 72, 68, 59, 68, 67, 74, 93, 64, 64, 69, 69, 74, 54, 87, 66, 72, 77, 61, 61, 68, 69, 55, 55, 85, 66, 72, 48, 93, 62, 52, 71, 45, 59, 56, 56, 59, 42, 80, 75, 52, 84, 67, 79, 90, 66, 73, 61, 50, 54, 45, 59, 81, 82, 73, 54, 61, 74, 65, 78, 61, 84, 71, 58, 79, 64, 118, 59, 41, 56, 80, 56, 75, 51, 53, 57, 68, 53, 57, 67, 76, 82, 57, 56, 60, 82, 67, 79, 87, 54, 63, 68, 64, 77, 72, 80, 61, 80, 65, 85, 60, 79, 67, 80, 53, 63, 81, 61, 69, 63, 70, 75, 71, 61, 61, 60, 53, 46, 56, 55, 81, 48, 75, 56, 71, 59, 61, 70, 52, 60, 54, 84, 55, 76, 78, 59, 58, 58, 63, 84, 81, 57, 38, 65, 47, 75, 69, 77, 62, 72, 73, 86, 50, 75, 53, 58, 39, 78, 60, 68, 66, 65, 67, 65, 49, 68, 87, 48, 56, 66, 45, 58, 57, 63, 62, 50, 59, 71, 104, 85, 52, 75, 63, 66, 71, 89, 52, 84, 90, 82, 86, 66, 44, 63, 70, 87, 58, 66, 55, 65, 84, 67, 63, 48, 59, 94, 49, 69, 84, 45, 59, 74, 52, 48, 77, 68, 64, 57, 75, 106, 65, 64, 69, 60, 67, 59, 52, 71, 50, 80, 83, 60, 51, 44, 90, 63, 64, 66, 81, 54, 71, 51, 65, 29, 44, 60, 58, 79, 86, 59, 66, 66, 39, 71, 39, 51, 63, 86, 76, 59, 82, 73, 61, 67, 77, 73, 64, 66, 63, 45, 66, 64, 53, 61, 67, 54, 66, 60, 56, 49, 51, 99, 81, 84, 56, 63, 63, 55, 73, 90, 62, 69, 73, 56, 65, 59, 77, 83, 52, 55, 64, 59, 97, 48, 68, 44, 95, 38, 52, 49, 70, 77, 66, 59, 70, 77, 60, 67, 72, 78, 43, 53, 66, 80, 54, 62, 68, 75, 67, 100, 65, 83, 66, 58, 62, 59, 63, 49, 92, 57, 48, 86, 61, 61, 59, 56, 60, 53, 63, 57, 40, 71, 97, 64, 53, 76, 56, 52, 97, 80, 57, 73, 51, 33, 47, 59, 52, 50, 67, 65, 66, 56, 60, 68, 84, 56, 54, 52, 78, 84, 90, 82, 62, 83, 83, 51, 59, 68, 62, 54, 45, 55, 62, 70, 64, 77, 58, 74, 51, 78, 74, 59, 100, 49, 72, 62, 83, 72, 68, 71, 70, 47, 59, 56, 72, 56, 84, 69, 115, 76, 77, 70, 70, 79, 70, 60, 69, 52, 54, 71, 49, 87, 55, 68, 32, 55, 57, 55, 70, 73, 52, 112, 59, 54, 79, 76, 79, 70, 103, 60, 95, 57, 72, 50, 66, 82, 59, 63, 76, 58, 53, 53, 53, 53, 86, 86, 103, 47, 55, 70, 50, 59, 66, 62, 54, 56, 88, 84, 55, 50, 74, 47, 65, 76, 56, 65, 57, 81, 55, 83, 51, 53, 56, 59, 60, 37, 68, 54, 61, 86, 62, 64, 75, 69, 73, 86, 55, 50, 58, 54, 76, 71, 81, 69, 80, 44, 53, 53, 66, 50, 80, 51, 80, 82, 54, 55, 80, 60, 54, 69, 73, 79, 79, 58, 56, 57, 78, 72, 86, 70, 79, 89, 76, 54, 61, 68, 63, 79, 68, 51, 61, 64, 60, 105, 56, 74, 65, 64, 43, 67, 76, 55, 84, 65, 65, 87, 46, 65, 55, 66, 80, 82, 73, 46, 59, 116, 66, 41, 60, 55, 56, 53, 62, 79, 40, 71, 78, 74, 35, 61, 57, 74, 53, 69, 55, 67, 46, 58, 71, 53, 57, 64, 54, 81, 69, 50, 94, 63, 78, 69, 55, 54, 75, 67, 41, 54, 62, 133, 85, 57, 94, 56, 61, 59, 68, 69, 60, 76, 75, 69, 69, 54, 74, 76, 39, 59, 70, 70, 63, 67, 102, 66, 72, 137, 65, 62, 67, 59, 54, 69, 44, 56, 64, 96, 74, 71, 71, 63, 66, 62, 55, 55, 70, 77, 64, 59, 71, 65, 60, 65, 75, 88, 73, 92, 61, 83, 66, 68, 99, 62, 60, 65, 61, 58, 67, 84, 64, 54, 63, 70, 77, 69, 77, 87, 70, 62, 50, 61, 62, 65, 90, 67, 66, 57, 69, 71, 69, 51, 61, 56, 79, 68, 69, 75, 97, 87, 122, 64, 67, 75, 79, 63, 58, 51, 52, 49, 50, 51, 49, 63, 63, 59, 63, 62, 75, 67, 115, 83, 71, 87, 64, 70, 56, 103, 69, 83, 58, 105, 56, 56, 59, 76, 54, 73, 78, 71, 45, 73, 71, 96, 56, 61, 73, 65, 60, 71, 65, 85, 66, 67, 50, 48, 70, 59, 55, 68, 52, 67, 74, 61, 65, 51, 68, 52, 83, 61, 53, 49, 92, 48, 56, 59, 42, 68, 61, 61, 73, 37, 99, 73, 57, 58, 48, 69, 63, 100, 72, 66, 70, 56, 61, 54, 80, 63, 56, 67, 77, 74, 77, 71, 59, 57, 79, 65, 66, 78, 68, 49, 70, 67, 69, 57, 111, 44, 65, 50, 86, 51, 53, 54, 73, 64, 79, 92, 58, 79, 67, 61, 68, 120, 48, 64, 49, 71, 60, 85, 76, 55, 56, 54, 70, 89, 88, 52, 83, 80, 50, 54, 55, 91, 71, 61, 73, 61, 41, 73, 66, 66, 47, 79, 66, 65, 71, 63, 57, 103, 61, 70, 92, 69, 56, 47, 55, 75, 57, 43, 53, 58, 54, 59, 80, 49, 63, 76, 81, 81, 67, 57, 49, 56, 47, 81, 47, 69, 75, 65, 68, 93, 53, 72, 53, 56, 73, 72, 52, 54, 77, 118, 59, 80, 68, 56, 56, 59, 78, 90, 48, 71, 54, 52, 52, 100, 55, 75, 71, 63, 55, 58, 55, 91, 61, 75, 65, 60, 64, 74, 75, 77, 97, 102, 60, 58, 39, 58, 62, 72, 50, 54, 58, 47, 59, 63, 60, 67, 58, 48, 83, 55, 95, 65, 51, 79, 79, 93, 53, 73, 62, 61, 56, 52, 109, 75, 79, 50, 55, 71, 60, 56, 50, 68, 79, 84, 63, 83, 57, 54, 105, 53, 89, 65, 57, 54, 68, 58, 82, 49, 55, 86, 92, 59, 59, 69, 49, 75, 86, 55, 77, 43, 49, 56, 75, 76, 42, 65, 67, 58, 64, 81, 70, 62, 69, 64, 61, 63, 52, 83, 57, 62, 75, 58, 43, 65, 77, 59, 56, 76, 60, 55, 56, 49, 56, 73, 63, 55, 61, 69, 89, 64, 82, 56, 64, 64, 45, 66, 71, 50, 80, 102, 67, 57, 49, 47, 73, 114, 78, 87, 48, 67, 56, 81, 86, 49, 72, 73, 59, 53, 76, 65, 52, 60, 61, 72, 74, 66, 52, 60, 69, 51, 66, 56, 62, 66, 46, 61, 69, 63, 79, 58, 87, 64, 53, 34, 90, 66, 53, 103, 47, 68, 71, 44, 62, 40, 64, 94, 69, 72, 58, 90, 73, 91, 55, 60, 59, 66, 78, 67, 63, 56, 66, 39, 59, 61, 66, 65, 48, 89, 82, 93, 51, 69, 73, 58, 52, 51, 65, 81, 55, 100, 57, 74, 75, 137, 93, 66, 73, 66, 73, 61, 58, 67, 62, 46, 39, 41, 75, 79, 62, 81, 72, 93, 52, 48, 70, 55, 54, 78, 52, 54, 65, 77, 56, 61, 62, 63, 70, 64, 51, 58, 62, 95, 55, 47, 69, 95, 58, 65, 70, 51, 61, 46, 67, 40, 83, 69, 55, 72, 84, 64, 72, 51, 52, 59, 48, 69, 69, 49, 58, 81, 68, 53, 46, 68, 67, 91, 49, 63, 59, 59, 38, 60, 63, 48, 60, 49, 82, 63, 79, 76, 69, 58, 65, 64, 73, 52, 44, 73, 74, 116, 74, 45, 75, 91, 106, 79, 80, 75, 63, 83, 89, 105, 88, 64, 63, 75, 88, 74, 68, 53, 71, 54, 64, 119, 64, 65, 45, 75, 85, 55, 46, 76, 70, 55, 47, 65, 66, 94, 75, 52, 80, 53, 74, 75, 58, 108, 80, 81, 74, 56, 47, 64, 86, 59, 63, 65, 55, 84, 41, 61, 69, 48, 60, 45, 63, 68, 52, 91, 96, 65, 82, 53, 57, 57, 85, 103, 59, 50, 59, 49, 82, 65, 58, 70, 74, 72, 57, 32, 42, 71, 61, 75, 62, 58, 72, 65, 49, 72, 58, 58, 53, 45, 52, 90, 79, 53, 75, 64, 98, 63, 73, 61, 67, 60, 69, 68, 54, 57, 56, 41, 58, 60, 58, 58, 61, 63, 74, 50, 80, 77, 83, 70, 56, 52, 59, 85, 75, 66, 44, 61, 75, 68, 87, 77, 73, 101, 59, 78, 61, 43, 68, 72, 71, 69, 59, 68, 59, 51, 67, 75, 83, 56, 63, 67, 69, 62, 59, 83, 69, 85, 88, 46, 47, 70, 60, 71, 106, 60, 51, 73, 47, 63, 60, 75, 68, 63, 70, 78, 57, 56, 66, 69, 73, 43, 54, 55, 73, 56, 53, 85, 72, 82, 65, 68, 52, 66, 56, 67, 66, 50, 53, 65, 59, 57, 56, 60, 82, 59, 87, 60, 57, 70, 56, 81, 62, 67, 40, 59, 61, 49, 48, 74, 83, 68, 60, 60, 62, 59, 46, 64, 63, 67, 59, 69, 54, 74, 75, 95, 64, 69, 70, 62, 64, 64, 99, 57, 65, 60, 83, 101, 55, 61, 94, 49, 79, 69, 56, 70, 78, 46, 61, 86, 72, 67, 81, 70, 74, 72, 51, 63, 68, 45, 48, 78, 90, 67, 44, 75, 72, 62, 63, 66, 81, 59, 129, 72, 58, 57, 97, 62, 79, 49, 75, 55, 61, 68, 67, 69, 77, 79, 47, 65, 73, 61, 79, 72, 76, 73, 80, 58, 67, 78, 79, 61, 68, 75, 68, 64, 64, 149, 62, 67, 69, 55, 60, 69, 56, 65, 72, 61, 75, 63, 59, 62, 58, 67, 72, 69, 75, 62, 80, 49, 59, 69, 53, 69, 41, 37, 66, 77, 55, 63, 62, 73, 63, 63, 58, 76, 61, 130, 70, 75, 71, 63, 71, 72, 55, 59, 59, 60, 69, 49, 62, 46, 70, 70, 44, 63, 48, 92, 48, 49, 71, 42, 105, 67, 75, 66, 77, 67, 82, 75, 70, 77, 72, 50, 84, 63, 63, 58, 82, 70, 76, 57, 60, 60, 52, 60, 69, 56, 64, 60, 75, 98, 78, 58, 53, 62, 70, 68, 75, 74, 59, 83, 78, 57, 59, 68, 49, 62, 37, 60, 65, 64, 65, 104, 60, 61, 67, 65, 81, 47, 76, 58, 61, 79, 75, 67, 75, 61, 79, 55, 65, 49, 58, 65, 74, 88, 60, 70, 66, 60, 74, 56, 76, 63, 67, 61, 45, 70, 82, 57, 90, 53, 67, 74, 53, 62, 79, 56, 74, 46, 58, 81, 69, 62, 58, 69, 58, 62, 61, 62, 70, 73, 52, 64, 77, 75, 65, 70, 64, 78, 56, 63, 47, 53, 77, 78, 76, 67, 67, 59, 51, 46, 65, 66, 83, 60, 76, 72, 50, 61, 102, 64, 102, 61, 80, 49, 71, 92, 97, 53, 68, 68, 72, 67, 57, 65, 65, 60, 95, 63, 61, 70, 67, 64, 64, 46, 61, 58, 89, 62, 79, 68, 51, 55, 63, 96, 64, 78, 80, 75, 52, 77, 72, 64, 64, 74, 50, 62, 70, 69, 52, 55, 52, 77, 49, 56, 54, 61, 75, 68, 64, 80, 64, 67, 76, 63, 54, 54, 61, 51, 62, 67, 70, 96, 66, 52, 68, 59, 97, 63, 61, 48, 60, 57, 45, 62, 65, 53, 63, 79, 75, 56, 62, 36, 50, 63, 58, 47, 54, 62, 68, 71, 53, 80, 67, 81, 60, 51, 78, 54, 82, 62, 54, 78, 60, 72, 43, 55, 56, 98, 75, 59, 72, 99, 82, 69, 61, 69, 63, 51, 65, 68, 71, 73, 65, 72, 57, 63, 53, 86, 66, 69, 48, 83, 82, 60, 55, 61, 55, 74, 66, 71, 68, 43, 67, 58, 74, 74, 68, 57, 57, 66, 44, 46, 75, 51, 61, 62, 75, 52, 69, 76, 56, 80, 44, 78, 69, 73, 93, 55, 77, 57, 71, 65, 93, 49, 94, 69, 87, 63, 68, 63, 65, 66, 67, 67, 93, 71, 55, 85, 90, 76, 62, 71, 65, 68, 58, 55, 57, 56, 72, 70, 84, 71, 44, 64, 64, 62, 68, 72, 71, 72, 103, 58, 57, 68, 60, 58, 59, 73, 65, 42, 41, 78, 59, 65, 59, 59, 69, 67, 73, 63, 72, 69, 58, 66, 71, 78, 47, 51, 55, 75, 58, 59, 55, 57, 40, 63, 63, 58, 63, 75, 62, 50, 38, 80, 77, 62, 68, 41, 73, 70, 60, 36, 92, 72, 65, 67, 53, 54, 64, 39, 58, 70, 65, 57, 81, 81, 56, 70, 63, 48, 58, 47, 75, 57, 55, 74, 56, 56, 50, 58, 53, 59, 96, 102, 59, 53, 66, 64, 66, 107, 51, 52, 87, 72, 79, 91, 87, 60, 65, 86, 85, 50, 67, 67, 73, 63, 60, 84, 55, 50, 44, 77, 74, 58, 55, 74, 77, 49, 65, 87, 69, 84, 57, 59, 71, 56, 48, 74, 56, 78, 81, 64, 60, 52, 81, 87, 69, 125, 62, 57, 60, 60, 79, 59, 75, 53, 59, 79, 76, 70, 63, 60, 69, 123, 57, 55, 86, 77, 78, 75, 74, 73, 66, 57, 63, 58, 74, 66, 51, 72, 74, 53, 53, 56, 74, 56, 67, 61, 62, 71, 58, 64, 60, 77, 62, 68, 69, 56, 55, 71, 63, 61, 72, 54, 55, 62, 53, 70, 57, 84, 57, 62, 56, 61, 66, 84, 72, 55, 54, 85, 66, 53, 52, 65, 84, 67, 56, 88, 85, 68, 56, 61, 53, 62, 68, 59, 68, 66, 53, 69, 83, 55, 64, 60, 55, 63, 83, 53, 68, 79, 95, 65, 87, 58, 74, 60, 74, 55, 53, 90, 64, 62, 64, 62, 69, 68, 57, 73, 75, 62, 82, 67, 54, 52, 81, 51, 71, 88, 64, 77, 61, 67, 67, 54, 63, 71, 68, 59, 56, 52, 67, 65, 61, 53, 58, 75, 64, 85, 60, 73, 73, 56, 52, 90, 65, 66, 53, 70, 69, 55, 70, 53, 57, 90, 72, 71, 50, 59, 62, 60, 71, 60, 70, 88, 74, 60, 70, 62, 77, 78, 55, 48, 70, 57, 49, 62, 61, 69, 71, 44, 79, 76, 61, 40, 65, 67, 67, 66, 64, 50, 65, 63, 59, 64, 57, 64, 71, 62, 65, 91, 68, 51, 65, 69, 60, 59, 55, 62, 60, 57, 102, 67, 61, 60, 55, 68, 65, 40, 60, 59, 69, 57, 60, 67, 79, 84, 60, 56, 57, 57, 60, 46, 98, 73, 64, 59, 83, 68, 75, 81, 66, 78, 59, 66, 79, 72, 60, 66, 71, 40, 65, 56, 67, 54, 73, 80, 70, 71, 65, 84, 54, 74, 63, 65, 72, 63, 68, 66, 53, 43, 63, 57, 60, 67, 65, 66, 40, 68, 72, 60, 64, 57, 64, 58, 69, 53, 76, 51, 57, 56, 60, 46, 85, 67, 55, 54, 61, 66, 55, 86, 68, 55, 73, 77, 66, 71, 88, 68, 64, 46, 54, 58, 46, 60, 49, 72, 69, 85, 41, 76, 60, 78, 74, 64, 78, 55, 78, 71, 58, 67, 49, 55, 45, 40, 47, 60, 57, 84, 57, 57, 77, 52, 57, 49, 59, 98, 96, 64, 65, 89, 85, 52, 60, 47, 67, 47, 115, 73, 110, 98, 88, 68, 62, 68, 55, 62, 65, 50, 63, 63, 93, 74, 57, 60, 53, 61, 46, 62, 68, 94, 54, 97, 53, 101, 54, 66, 78, 59, 49, 61, 78, 62, 61, 63, 61, 83, 62, 52, 72, 62, 96, 41, 60, 55, 66, 50, 70, 72, 112, 74, 67, 49, 73, 50, 52, 63, 85, 68, 102, 70, 63, 70, 70, 61, 59, 59, 49, 48, 60, 54, 36, 56, 81, 87, 63, 45, 56, 80, 88, 47, 87, 69, 59, 65, 55, 48, 41, 62, 50, 50, 43, 57, 74, 73, 74, 58, 67, 65, 64, 67, 71, 61, 72, 49, 65, 67, 80, 61, 83, 57, 55, 87, 61, 75, 55, 56, 51, 87, 92, 59, 66, 46, 58, 85, 71, 71, 66, 79, 71, 86, 58, 74, 44, 71, 67, 53, 61, 74, 76, 65, 67, 66, 76, 57, 66, 74, 37, 52, 50, 58, 87, 54, 71, 95, 45, 68, 89, 53, 86, 63, 54, 52, 89, 51, 63, 55, 63, 66, 55, 58, 77, 68, 50, 66, 68, 73, 83, 63, 63, 65, 106, 78, 57, 71, 67, 49, 76, 76, 56, 45, 75, 81, 63, 66, 58, 93, 106, 30, 64, 56, 67, 60, 77, 68, 70, 100, 151, 79, 66, 72, 67, 67, 90, 47, 74, 76, 68, 42, 54, 46, 61, 64, 94, 76, 59, 56, 71, 71, 61, 64, 68, 80, 121, 58, 54, 73, 46, 54, 74, 56, 60, 56, 56, 64, 50, 45, 72, 47, 50, 64, 78, 47, 59, 63, 49, 58, 82, 53, 47, 61, 96, 51, 52, 65, 51, 62, 94, 67, 65, 63, 62, 68, 87, 69, 87, 74, 71, 69, 97, 78, 60, 60, 70, 67, 54, 60, 68, 42, 76, 77, 64, 60, 68, 42, 84, 70, 57, 80, 67, 65, 88, 52, 55, 72, 60, 47, 104, 98, 50, 86, 53, 83, 77, 46, 160, 55, 44, 73, 72, 58, 89, 69, 62, 92, 54, 54, 77, 72, 88, 64, 66, 70, 69, 61, 81, 64, 59, 73, 44, 83, 74, 68, 68, 65, 58, 53, 71, 53, 86, 59, 59, 94, 54, 82, 57, 54, 46, 63, 58, 51, 68, 43, 73, 72, 60, 71, 66, 61, 57, 52, 57, 45, 69, 28, 56, 58, 61, 60, 65, 99, 68, 46, 68, 59, 73, 62, 45, 82, 64, 94, 44, 64, 90, 59, 98, 61, 63, 83, 98, 79, 60, 57, 53, 57, 83, 84, 65, 65, 53, 60, 53, 48, 65, 59, 76, 46, 52, 41, 100, 61, 116, 81, 56, 71, 45, 58, 54, 81, 61, 56, 57, 43, 81, 70, 93, 38, 70, 57, 83, 70, 69, 58, 77, 72, 61, 84, 79, 65, 61, 66, 65, 66, 66, 62, 54, 83, 81, 79, 73, 58, 59, 72, 56, 61, 65, 65, 52, 39, 76, 58, 51, 92, 56, 61, 53, 59, 62, 49, 52, 58, 62, 40, 58, 65, 50, 85, 63, 83, 55, 56, 51, 61, 54, 94, 76, 86, 63, 116, 44, 65, 79, 51, 61, 72, 74, 69, 60, 65, 71, 76, 52, 146, 49, 42, 79, 75, 85, 84, 80, 61, 70, 63, 66, 58, 76, 57, 74, 54, 44, 69, 75, 66, 58, 51, 53, 57, 55, 48, 97, 57, 90, 69, 63, 38, 71, 64, 52, 108, 78, 73, 59, 56, 46, 48, 66, 70, 62, 74, 60, 61, 56, 65, 48, 83, 55, 55, 63, 109, 47, 68, 50, 41, 82, 74, 54, 77, 71, 78, 56, 76, 61, 58, 43, 63, 71, 76, 63, 56, 60, 76, 90, 82, 53, 53, 55, 78, 67, 60, 62, 61, 82, 55, 62, 72, 55, 66, 61, 63, 60, 41, 99, 89, 64, 82, 47, 48, 47, 53, 69, 75, 80, 79, 54, 59, 74, 73, 52, 47, 63, 68, 67, 60, 74, 67, 86, 45, 101, 71, 62, 73, 75, 83, 79, 79, 66, 62, 64, 66, 97, 66, 75, 56, 54, 92, 64, 65, 77, 65, 55, 71, 56, 68, 99, 74, 58, 78, 71, 47, 76, 55, 63, 68, 49, 62, 73, 54, 63, 51, 59, 62, 53, 72, 47, 62, 77, 86, 52, 71, 40, 57, 55, 122, 44, 66, 66, 68, 44, 69, 69, 84, 74, 70, 81, 85, 54, 76, 50, 79, 70, 76, 48, 70, 64, 65, 60, 57, 61, 70, 49, 82, 67, 64, 58, 56, 35, 60, 48, 69, 71, 103, 63, 74, 60, 70, 104, 78, 68, 101, 67, 54, 62, 57, 76, 80, 60, 94, 51, 59, 130, 56, 54, 71, 89, 61, 52, 63, 50, 71, 65, 76, 60, 60, 70, 80, 51, 78, 54, 62, 94, 70, 87, 68, 37, 67, 45, 56, 53, 76, 66, 64, 74, 48, 51, 61, 73, 74, 58, 70, 86, 57, 60, 44, 66, 57, 71, 84, 64, 52, 79, 64, 59, 59, 58, 49, 72, 96, 57, 103, 57, 61, 54, 58, 57, 50, 70, 77, 64, 56, 56, 51, 61, 77, 66, 60, 47, 53, 73, 72, 63, 40, 77, 58, 45, 66, 67, 72, 45, 86, 107, 74, 85, 69, 42, 81, 54, 62, 45, 94, 59, 82, 65, 62, 54, 62, 75, 68, 67, 55, 64, 71, 71, 66, 59, 53, 62, 56, 41, 61, 73, 62, 63, 103, 45, 55, 79, 50, 72, 70, 83, 67, 75, 67, 81, 50, 59, 39, 95, 66, 65, 56, 86, 54, 61, 40, 49, 84, 90, 58, 63, 80, 56, 48, 76, 48, 61, 63, 54, 58, 74, 64, 59, 79, 138, 62, 72, 75, 69, 51, 50, 74, 41, 64, 64, 55, 47, 63, 55, 63, 68, 71, 154, 58, 92, 54, 56, 59, 56, 90, 75, 61, 71, 63, 50, 64, 67, 72, 67, 89, 78, 76, 68, 48, 68, 57, 60, 74, 58, 76, 52, 79, 51, 53, 67, 79, 59, 59, 59, 49, 82, 49, 54, 81, 64, 60, 115, 53, 60, 77, 69, 64, 58, 72, 63, 71, 64, 93, 68, 60, 65, 63, 62, 81, 81, 91, 62, 57, 56, 58, 63, 55, 73, 61, 63, 61, 53, 46, 70, 57, 32, 68, 84, 68, 64, 53, 49, 62, 53, 67, 78, 64, 53, 62, 67, 66, 72, 61, 58, 53, 57, 58, 55, 95, 79, 65, 47, 57, 57, 66, 125, 63, 77, 64, 65, 69, 68, 84, 72, 60, 93, 63, 50, 104, 76, 91, 84, 61, 62, 73, 83, 66, 59, 59, 80, 63, 56, 64, 63, 59, 126, 85, 59, 71, 71, 58, 61, 95, 87, 51, 76, 68, 64, 68, 79, 60, 69, 80, 67, 62, 63, 50, 66, 86, 78, 74, 62, 59, 41, 75, 72, 54, 47, 83, 72, 55, 66, 60, 82, 58, 43, 58, 53, 67, 68, 122, 61, 70, 82, 46, 57, 85, 68, 59, 69, 76, 73, 63, 91, 56, 70, 86, 75, 61, 58, 41, 44, 49, 69, 58, 58, 61, 75, 69, 55, 55, 60, 68, 68, 62, 61, 63, 48, 69, 78, 111, 55, 78, 74, 72, 81, 94, 66, 36, 60, 64, 69, 93, 53, 59, 88, 62, 65, 69, 68, 59, 66, 76, 76, 57, 64, 56, 76, 65, 59, 73, 88, 77, 62, 56, 79, 69, 55, 75, 75, 54, 73, 56, 65, 47, 57, 57, 66, 46, 44, 42, 65, 77, 60, 71, 76, 52, 61, 57, 62, 69, 57, 54, 46, 44, 62, 76, 68, 44, 60, 64, 53, 62, 53, 73, 41, 61, 68, 65, 66, 69, 117, 78, 65, 52, 56, 71, 76, 46, 65, 82, 80, 47, 64, 59, 54, 52, 73, 64, 65, 47, 59, 60, 57, 71, 70, 58, 49, 70, 55, 63, 53, 58, 59, 63, 47, 73, 51, 97, 55, 90, 80, 72, 92, 65, 54, 58, 59, 75, 57, 52, 51, 68, 61, 67, 65, 53, 55, 64, 67, 84, 68, 76, 73, 55, 53, 69, 50, 74, 68, 50, 58, 69, 79, 71, 49, 57, 67, 77, 73, 56, 70, 72, 113, 67, 78, 54, 57, 48, 88, 73, 58, 62, 74, 71, 62, 63, 51, 55, 55, 69, 101, 72, 68, 71, 57, 64, 75, 67, 77, 68, 58, 86, 68, 62, 70, 68, 75, 85, 67, 67, 86, 89, 71, 78, 68, 64, 61, 73, 50, 80, 41, 62, 57, 67, 64, 57, 63, 53, 75, 70, 56, 97, 61, 67, 52, 73, 70, 56, 62, 101, 61, 51, 53, 63, 74, 67, 62, 64, 59, 56, 67, 65, 51, 71, 75, 67, 58, 58, 76, 48, 68, 49, 62, 62, 60, 73, 72, 59, 84, 65, 61, 52, 57, 57, 66, 53, 74, 95, 62, 76, 63, 49, 96, 73, 62, 60, 66, 63, 65, 54, 37, 77, 51, 47, 63, 93, 54, 60, 49, 73, 57, 67, 57, 54, 46, 62, 55, 60, 55, 49, 71, 59, 51, 59, 80, 58, 31, 64, 67, 58, 70, 72, 54, 61, 53, 72, 51, 57, 66, 57, 62, 55, 70, 57, 61, 68, 81, 47, 92, 50, 71, 99, 83, 61, 56, 55, 46, 50, 54, 71, 72, 71, 62, 46, 88, 57, 47, 70, 46, 71, 54, 65, 86, 77, 61, 66, 57, 66, 78, 68, 54, 90, 69, 67, 71, 65, 58, 45, 63, 63, 45, 59, 67, 59, 60, 54, 83, 66, 69, 53, 62, 72, 47, 90, 96, 55, 61, 65, 67, 68, 69, 69, 47, 49, 86, 59, 65, 53, 45, 65, 59, 55, 68, 50, 59, 70, 55, 53, 53, 71, 63, 42, 63, 90, 71, 68, 77, 78, 48, 52, 89, 57, 76, 70, 55, 63, 69, 55, 48, 64, 95, 83, 71, 49, 55, 110, 75, 61, 64, 70, 70, 57, 61, 68, 59, 50, 62, 63, 38, 78, 52, 61, 57, 68, 70, 62, 62, 55, 71, 92, 67, 81, 57, 53, 65, 50, 63, 68, 73, 52, 79, 64, 43, 105, 61, 79, 59, 69, 59, 71, 62, 59, 56, 79, 59, 50, 94, 40, 70, 68, 44, 61, 75, 62, 115, 51, 78, 49, 61, 108, 91, 97, 63, 60, 83, 64, 79, 61, 55, 51, 60, 64, 64, 74, 46, 62, 77, 53, 60, 70, 62, 52, 69, 60, 71, 65, 65, 66, 55, 71, 71, 91, 81, 58, 62, 67, 75, 74, 69, 68, 81, 63, 72, 64, 72, 77, 67, 57, 56, 52, 79, 66, 49, 66, 82, 93, 85, 46, 61, 56, 60, 58, 47, 60, 71, 43, 72, 79, 69, 71, 63, 62, 65, 82, 71, 75, 50, 66, 101, 69, 50, 79, 66, 70, 78, 53, 50, 50, 77, 53, 52, 44, 68, 50, 73, 85, 74, 47, 80, 66, 51, 69, 54, 62, 66, 74, 64, 99, 64, 78, 70, 74, 60, 58, 59, 71, 61, 61, 59, 58, 72, 51, 66, 68, 77, 71, 79, 60, 63, 53, 61, 55, 75, 106, 63, 56, 85, 66, 76, 61, 61, 65, 60, 55, 59, 75, 74, 58, 71, 57, 69, 77, 71, 54, 56, 57, 83, 58, 54, 65, 56, 67, 59, 63, 61, 66, 72, 98, 50, 63, 70, 54, 65, 91, 62, 72, 57, 55, 50, 88, 79, 67, 79, 49, 56, 53, 69, 119, 66, 50, 68, 55, 59, 95, 71, 48, 57, 49, 74, 60, 62, 76, 107, 44, 59, 56, 92, 52, 62, 51, 56, 52, 58, 77, 78, 83, 51, 95, 64, 73, 60, 60, 69, 62, 66, 70, 59, 60, 51, 56, 49, 42, 61, 59, 73, 65, 65, 77, 54, 56, 70, 59, 57, 58, 78, 69, 68, 48, 58, 73, 65, 63, 69, 79, 66, 48, 60, 56, 43, 60, 56, 56, 58, 61, 58, 70, 66, 59, 46, 69, 74, 74, 45, 71, 56, 82, 60, 58, 68, 61, 58, 68, 62, 85, 64, 53, 69, 48, 58, 76, 60, 80, 67, 69, 58, 63, 63, 79, 52, 59, 91, 66, 54, 55, 78, 68, 87, 53, 72, 48, 52, 82, 61, 61, 57, 77, 75, 63, 54, 69, 61, 70, 36, 68, 79, 53, 55, 65, 65, 69, 69, 69, 61, 60, 58, 77, 74, 85, 103, 92, 69, 66, 67, 70, 65, 56, 70, 61, 64, 57, 77, 51, 59, 53, 72, 62, 74, 63, 52, 50, 59, 51, 58, 57, 59, 62, 73, 58, 71, 59, 77, 72, 68, 56, 60, 62, 63, 65, 56, 64, 64, 60, 62, 53, 47, 84, 56, 68, 77, 57, 73, 82, 57, 62, 74, 67, 63, 61, 62, 74, 81, 59, 56, 65, 65, 82, 65, 90, 56, 72, 79, 59, 73, 56, 64, 68, 74, 60, 49, 59, 48, 52, 46, 73, 49, 67, 61, 59, 46, 58, 47, 61, 74, 56, 57, 84, 62, 61, 68, 66, 78, 45, 89, 97, 80, 51, 82, 66, 62, 59, 74, 69, 98, 72, 54, 62, 69, 64, 48, 68, 67, 71, 67, 69, 141, 71, 71, 57, 78, 70, 55, 76, 71, 59, 55, 69, 56, 65, 50, 52, 80, 44, 79, 96, 64, 58, 53, 72, 66, 64, 59, 73, 73, 63, 57, 59, 74, 79, 69, 61, 60, 61, 76, 85, 74, 58, 72, 70, 91, 70, 66, 70, 70, 90, 63, 50, 55, 65, 54, 53, 60, 51, 68, 83, 82, 64, 75, 61, 69, 59, 53, 55, 61, 53, 66, 58, 84, 75, 65, 61, 62, 48, 73, 50, 69, 57, 74, 66, 56, 72, 63, 79, 54, 54, 62, 67, 62, 55, 69, 56, 48, 83, 61, 56, 64, 59, 71, 54, 67, 62, 61, 53, 54, 58, 74, 58, 66, 69, 68, 59, 73, 59, 73, 53, 67, 75, 40, 51, 66, 62, 64, 69, 64, 63, 53, 42, 86, 62, 72, 70, 129, 90, 71, 48, 69, 79, 57, 61, 54, 81, 99, 57, 58, 80, 57, 60, 55, 83, 59, 58, 88, 45, 66, 60, 96, 58, 59, 58, 72, 67, 62, 57, 60, 53, 76, 62, 94, 65, 58, 71, 66, 60, 61, 57, 59, 60, 68, 76, 75, 46, 62, 65, 70, 52, 87, 73, 76, 52, 71, 68, 51, 71, 64, 61, 42, 57, 56, 60, 93, 54, 48, 58, 74, 67, 52, 64, 62, 59, 59, 50, 47, 60, 84, 84, 68, 65, 60, 54, 79, 56, 65, 53, 93, 73, 57, 81, 73, 70, 39, 84, 67, 59, 54, 80, 64, 48, 64, 69, 65, 70, 61, 66, 79, 76, 49, 72, 73, 62, 88, 63, 79, 64, 57, 57, 59, 86, 65, 63, 63, 71, 96, 53, 64, 69, 40, 67, 58, 66, 76, 68, 70, 65, 45, 80, 79, 77, 69, 72, 62, 64, 52, 63, 73, 65, 68, 85, 54, 81, 66, 49, 70, 75, 58, 52, 60, 69, 48, 47, 48, 81, 60, 58, 58, 64, 57, 57, 73, 53, 87, 81, 63, 58, 66, 77, 67, 54, 57, 71, 70, 74, 57, 63, 58, 68, 83, 71, 118, 74, 77, 54, 101, 74, 71, 75, 69, 66, 66, 67, 66, 84, 39, 63, 63, 71, 68, 68, 65, 63, 45, 58, 61, 47, 63, 78, 83, 57, 88, 54, 56, 67, 64, 58, 57, 72, 55, 82, 83, 69, 43, 67, 72, 64, 72, 70, 59, 58, 66, 80, 57, 70, 72, 47, 85, 79, 59, 75, 56, 63, 59, 74, 76, 71, 73, 69, 70, 80, 57, 58, 83, 67, 79, 68, 52, 78, 59, 97, 54, 61, 63, 60, 98, 72, 85, 58, 72, 62, 61, 88, 62, 77, 65, 57, 54, 67, 69, 59, 73, 39, 65, 63, 92, 75, 84, 53, 57, 70, 73, 71, 96, 52, 55, 47, 101, 70, 69, 78, 69, 70, 67, 67, 79, 55, 60, 63, 79, 78, 105, 64, 70, 54, 59, 56, 74, 64, 60, 74, 76, 73, 56, 50, 61, 31, 36, 72, 73, 56, 62, 51, 55, 49, 60, 75, 53, 78, 78, 52, 55, 53, 52, 80, 123, 61, 84, 62, 53, 52, 89, 66, 69, 69, 65, 60, 75, 77, 80, 83, 67, 91, 47, 82, 45, 67, 63, 59, 55, 71, 59, 90, 77, 58, 55, 51, 70, 84, 88, 94, 57, 74, 69, 53, 50, 66, 64, 75, 81, 65, 80, 100, 54, 59, 83, 91, 52, 68, 67, 61, 54, 62, 88, 47, 57, 54, 87, 62, 77, 61, 68, 57, 52, 42, 67, 71, 59, 59, 37, 76, 70, 58, 54, 77, 61, 73, 35, 76, 79, 50, 59, 69, 56, 72, 48, 50, 69, 58, 56, 68, 67, 61, 52, 63, 70, 51, 79, 108, 42, 59, 71, 71, 69, 54, 63, 68, 74, 73, 79, 85, 51, 50, 69, 113, 66, 66, 50, 65, 65, 72, 55, 68, 74, 66, 72, 78, 66, 79, 49, 52, 59, 49, 60, 56, 97, 51, 54, 61, 97, 63, 45, 79, 64, 81, 62, 63, 73, 46, 65, 61, 60, 51, 53, 63, 54, 56, 42, 83, 81, 59, 75, 54, 75, 74, 80, 72, 50, 61, 56, 75, 86, 78, 43, 63, 52, 65, 60, 60, 78, 58, 81, 85, 63, 65, 50, 72, 61, 59, 70, 64, 82, 61, 77, 79, 64, 59, 73, 64, 64, 66, 77, 48, 63, 64, 69, 72, 66, 52, 77, 63, 60, 54, 54, 69, 60, 65, 76, 54, 70, 73, 56, 65, 60, 59, 53, 71, 58, 45, 51, 76, 81, 81, 60, 55, 67, 54, 64, 73, 58, 62, 62, 63, 75, 69, 68, 67, 68, 66, 64, 57, 72, 63, 88, 76, 58, 66, 70, 64, 59, 69, 61, 60, 58, 54, 68, 59, 49, 64, 51, 64, 68, 58, 56, 57, 59, 75, 75, 64, 93, 67, 75, 71, 56, 98, 59, 59, 69, 72, 51, 54, 63, 64, 63, 54, 57, 54, 58, 66, 61, 60, 59, 60, 96, 65, 63, 49, 57, 66, 57, 82, 56, 60, 69, 82, 58, 59, 55, 68, 54, 62, 74, 45, 65, 64, 56, 52, 87, 57, 86, 75, 60, 80, 65, 70, 54, 63, 74, 56, 65, 60, 66, 68, 71, 62, 68, 62, 64, 71, 49, 81, 82, 74, 68, 58, 66, 74, 65, 57, 62, 98, 54, 59, 76, 75, 82, 83, 55, 66, 69, 56, 51, 71, 78, 79, 87, 62, 64, 57, 46, 64, 74, 57, 61, 55, 69, 61, 44, 48, 59, 91, 86, 54, 49, 63, 59, 68, 78, 66, 90, 74, 69, 61, 64, 55, 70, 84, 58, 74, 69, 73, 66, 59, 72, 85, 68, 61, 55, 61, 68, 111, 51, 61, 61, 62, 62, 67, 63, 68, 62, 63, 52, 60, 55, 70, 59, 64, 73, 59, 65, 65, 60, 62, 60, 107, 62, 69, 55, 64, 56, 62, 76, 61, 83, 74, 70, 59, 62, 56, 58, 137, 59, 53, 104, 97, 68, 61, 42, 53, 73, 59, 142, 108, 62, 62, 58, 62, 57, 65, 92, 55, 63, 75, 78, 77, 59, 69, 69, 68, 66, 70, 69, 58, 67, 73, 59, 52, 57, 48, 58, 58, 67, 67, 74, 62, 82, 59, 68, 75, 61, 76, 90, 62, 97, 55, 62, 59, 66, 97, 83, 57, 62, 105, 90, 55, 61, 70, 68, 61, 57, 69, 64, 66, 49, 61, 62, 61, 52, 51, 66, 74, 64, 106, 65, 62, 63, 67, 62, 59, 56, 56, 64, 69, 56, 66, 60, 66, 76, 62, 73, 62, 76, 54, 61, 62, 71, 78, 65, 50, 68, 62, 70, 70, 70, 68, 64, 56, 63, 62, 50, 54, 76, 71, 61, 67, 58, 61, 59, 68, 64, 61, 65, 57, 64, 62, 72, 58, 90, 62, 66, 58, 47, 67, 74, 66, 60, 59, 71, 70, 87, 64, 52, 87, 57, 60, 63, 64, 59, 58, 60, 112, 57, 60, 59, 65, 64, 108, 58, 63, 67, 61, 65, 67, 61, 58, 79, 77, 57, 56, 66, 73, 50, 88, 65, 64, 64, 60, 59, 63, 65, 60, 64, 63, 58, 59, 59, 53, 58, 67, 75, 64, 60, 67, 80, 71, 63, 66, 62, 79, 83, 65, 74, 59, 64, 59, 75, 56, 60, 65, 67, 65, 61, 65, 120, 69, 56, 65, 74, 73, 56, 107, 60, 76, 76, 68, 65, 68, 53, 60, 69, 62, 77, 64, 71, 70, 66, 63, 72, 63, 55, 62, 62, 68, 74, 50, 63, 57, 66, 62, 64, 85, 64, 69, 56, 66, 61, 59, 65, 62, 74, 50, 69, 70, 60, 68, 68, 60, 68, 65, 57, 66, 66, 63, 59, 55, 62, 63, 61, 66, 69, 71, 101, 62, 62, 69, 74, 60, 56, 53, 82, 79, 55, 64, 45, 79, 79, 68, 60, 58, 66, 75, 64, 63, 65, 52, 78, 67, 74, 66, 71, 38, 73, 63, 63, 62, 54, 63, 65, 69, 58, 72, 60, 53, 86, 66, 60, 62, 51, 61, 59, 66, 65, 66, 59, 69, 61, 63, 66, 56, 61, 61, 59, 50, 56, 61, 57, 59, 65, 61, 72, 51, 65, 58, 61, 62, 87, 54, 60, 68, 50, 65, 67, 62, 55, 64, 57, 69, 72, 45, 62, 86, 62, 58, 50, 58, 67, 52, 92, 63, 70, 72, 65, 86, 67, 62, 70, 63, 66, 65, 58, 77, 60, 67, 61, 66, 88, 82, 63, 64, 56, 65, 61, 68, 61, 66, 59, 56, 67, 61, 61, 55, 75, 47, 74, 68, 67, 60, 76, 66, 61, 62, 65, 61, 56, 84, 90, 59, 59, 65, 50, 60, 72, 61, 60, 61, 55, 62, 58, 64, 57, 57, 70, 50, 57, 36, 56, 58, 71, 60, 63, 55, 54, 55, 87, 72, 66, 53, 94, 66, 55, 58, 65, 59, 68, 56, 57, 59, 61, 73, 55, 56, 77, 73, 59, 76, 62, 55, 81, 71, 66, 58, 67, 60, 61, 65, 63, 61, 60, 71, 74, 67, 53, 47, 58, 62, 70, 52, 114, 62, 57, 57, 68, 68, 70, 60, 75, 75, 53, 60, 55, 61, 60, 59, 69, 69, 54, 65, 65, 56, 65, 61, 53, 55, 62, 58, 59, 60, 70, 67, 65, 59, 59, 56, 77, 60, 72, 76, 62, 58, 77, 72, 91, 62, 56, 71, 60, 62, 56, 57, 64, 66, 67, 69, 61, 62, 61, 61, 72, 48, 57, 65, 64, 60, 68, 61, 60, 72, 82, 63, 60, 70, 66, 53, 67, 69, 58, 60, 67, 60, 125, 67, 64, 63, 50, 59, 77, 54, 62, 59, 60, 77, 72, 64, 79, 80, 66, 83, 76, 58, 67, 77, 65, 57, 72, 78, 84, 57, 45, 66, 60, 67, 64, 93, 79, 58, 71, 71, 67, 80, 51, 66, 82, 65, 63, 65, 71, 74, 64, 65, 114, 57, 90, 60, 58, 71, 65, 81, 59, 68, 68, 57, 71, 67, 60, 50, 58, 59, 69, 64, 65, 59, 74, 66, 92, 54, 58, 66, 54, 61, 61, 61, 65, 65, 62, 65, 70, 66, 65, 63, 62, 63, 73, 61, 67, 63, 62, 64, 72, 74, 56, 105, 74, 57, 62, 63, 60, 55, 62, 95, 76, 59, 67, 75, 66, 51, 62, 68, 56, 67, 66, 69, 69, 66, 65, 76, 68, 57, 60, 81, 58, 72, 86, 77, 53, 61, 73, 65, 77, 60, 58, 59, 66, 64, 73, 62, 63, 71, 53, 79, 71, 62, 67, 67, 62, 55, 61, 71, 63, 64, 66, 59, 65, 75, 64, 64, 56, 106, 77, 62, 71, 62, 69, 65, 61, 65, 59, 61, 61, 53, 57, 60, 63, 70, 91, 74, 69, 58, 57, 61, 64, 70, 58, 71, 85, 72, 60, 76, 58, 74, 83, 51, 69, 56, 84, 61, 68, 68, 62, 62, 58, 59, 71, 81, 82, 75, 106, 67, 63, 48, 59, 52, 75, 58, 56, 66, 64, 57, 52, 60, 66, 88, 58, 68, 71, 60, 71, 53, 55, 58, 59, 59, 64, 58, 60, 97, 64, 55, 103, 73, 56, 64, 61, 63, 63, 61, 62, 65, 57, 63, 61, 58, 61, 68, 65, 88, 75, 62, 56, 58, 59, 51, 63, 59, 70, 72, 65, 64, 59, 73, 52, 82, 65, 65, 61, 54, 60, 84, 64, 54, 75, 47, 65, 55, 57, 60, 64, 58, 60, 60, 51, 58, 72, 60, 78, 68, 65, 71, 74, 67, 55, 70, 63, 54, 61, 57, 55, 76, 58, 56, 68, 103, 50, 78, 60, 67, 61, 60, 59, 69, 56, 94, 60, 63, 71, 82, 50, 60, 53, 57, 56, 51, 60, 59, 63, 68, 66, 56, 62, 63, 52, 61, 66, 63, 70, 66, 65, 56, 69, 69, 62, 70, 59, 52, 65, 58, 53, 63, 68, 86, 54, 122, 122, 58, 97, 63, 66, 76, 55, 65, 56, 89, 88, 59, 64, 57, 50, 64, 77, 83, 75, 75, 74, 70, 65, 80, 74, 53, 64, 69, 79, 46, 80, 63, 76, 60, 51, 63, 54, 78, 69, 55, 60, 59, 80, 68, 49, 54, 76, 74, 50, 64, 58, 74, 82, 46, 69, 56, 78, 42, 56, 79, 62, 50, 53, 86, 68, 57, 61, 88, 47, 69, 73, 86, 64, 49, 61, 69, 73, 77, 68, 60, 76, 57, 81, 68, 75, 69, 60, 53, 56, 65, 63, 54, 58, 69, 62, 67, 60, 79, 64, 64, 101, 85, 50, 42, 48, 62, 59, 70, 66, 72, 64, 65, 79, 52, 63, 60, 88, 62, 66, 51, 66, 64, 54, 59, 68, 54, 51, 74, 83, 59, 75, 63, 71, 47, 56, 71, 79, 74, 49, 90, 80, 72, 56, 69, 47, 106, 75, 65, 62, 91, 75, 67, 47, 67, 57, 59, 60, 69, 51, 40, 56, 54, 87, 63, 54, 56, 50, 61, 50, 79, 83, 54, 85, 37, 52, 78, 60, 47, 87, 56, 69, 76, 79, 70, 67, 103, 49, 68, 56, 73, 79, 51, 54, 106, 61, 57, 78, 60, 66, 56, 81, 65, 63, 71, 40, 111, 99, 56, 72, 62, 74, 67, 41, 72, 77, 73, 62, 48, 66, 61, 66, 59, 60, 65, 57, 51, 70, 65, 82, 65, 52, 55, 64, 64, 58, 47, 78, 54, 85, 84, 51, 68, 77, 75, 66, 53, 69, 54, 54, 65, 86, 91, 55, 90, 81, 62, 66, 82, 57, 89, 68, 60, 77, 53, 73, 69, 71, 74, 57, 59, 52, 65, 74, 56, 46, 53, 66, 58, 47, 71, 51, 78, 69, 78, 65, 48, 85, 58, 49, 64, 55, 64, 71, 53, 77, 63, 52, 53, 72, 89, 62, 56, 63, 104, 62, 66, 72, 47, 59, 77, 55, 58, 72, 82, 74, 60, 70, 54, 58, 61, 77, 68, 77, 48, 61, 73, 56, 59, 50, 68, 70, 69, 49, 69, 49, 62, 72, 58, 57, 50, 60, 42, 69, 45, 68, 67, 80, 63, 97, 66, 59, 58, 93, 57, 90, 80, 64, 88, 64, 72, 65, 74, 34, 67, 60, 50, 49, 57, 58, 79, 69, 83, 72, 62, 56, 89, 62, 64, 56, 61, 64, 69, 96, 92, 69, 68, 64, 64, 66, 46, 101, 58, 56, 53, 65, 68, 59, 51, 54, 74, 54, 55, 47, 78, 52, 83, 65, 65, 65, 64, 76, 54, 54, 72, 69, 60, 69, 51, 56, 60, 51, 62, 52, 53, 73, 67, 52, 73, 66, 82, 65, 67, 55, 81, 80, 48, 50, 70, 91, 54, 59, 85, 64, 53, 76, 71, 55, 54, 62, 57, 47, 75, 74, 59, 66, 115, 68, 68, 68, 58, 71, 69, 69, 58, 57, 70, 79, 66, 53, 49, 54, 64, 65, 73, 70, 40, 63, 53, 65, 93, 64, 44, 41, 73, 50, 60, 65, 45, 38, 62, 65, 79, 63, 50, 47, 60, 57, 86, 72, 65, 67, 80, 71, 48, 73, 49, 70, 50, 85, 74, 59, 72, 86, 76, 65, 69, 58, 68, 58, 69, 58, 78, 91, 48, 77, 56, 79, 67, 62, 72, 56, 67, 96, 55, 61, 67, 68, 58, 52, 76, 61, 63, 65, 65, 94, 69, 72, 84, 69, 66, 58, 76, 73, 63, 90, 49, 56, 56, 59, 73, 36, 78, 68, 78, 58, 64, 65, 58, 49, 80, 100, 67, 56, 66, 64, 63, 54, 65, 74, 65, 52, 84, 53, 56, 66, 101, 64, 81, 53, 55, 62, 69, 57, 80, 59, 71, 54, 58, 72, 64, 62, 70, 67, 58, 72, 96, 79, 58, 53, 72, 56, 73, 63, 55, 51, 70, 42, 59, 57, 65, 65, 144, 76, 59, 68, 75, 60, 55, 61, 65, 66, 65, 72, 50, 59, 60, 67, 70, 61, 59, 63, 47, 79, 70, 72, 67, 66, 90, 55, 88, 56, 50, 98, 55, 51, 49, 59, 65, 60, 93, 92, 55, 81, 67, 66, 75, 100, 71, 54, 92, 80, 72, 82, 47, 72, 45, 52, 56, 67, 70, 123, 63, 64, 100, 63, 62, 66, 61, 57, 66, 84, 88, 61, 45, 71, 62, 66, 72, 79, 71, 65, 86, 61, 82, 79, 70, 62, 53, 46, 70, 58, 63, 41, 62, 74, 58, 65, 70, 62, 57, 58, 84, 65, 68, 52, 57, 62, 61, 81, 58, 67, 78, 63, 63, 78, 65, 61, 69, 80, 51, 69, 74, 46, 73, 50, 74, 94, 65, 52, 67, 60, 42, 61, 72, 64, 52, 47, 59, 65, 77, 55, 67, 65, 73, 51, 68, 66, 79, 82, 62, 53, 56, 69, 77, 69, 65, 59, 61, 72, 69, 49, 69, 56, 50, 60, 68, 53, 57, 58, 50, 82, 72, 64, 72, 57, 69, 62, 78, 67, 61, 48, 69, 65, 65, 51, 49, 101, 86, 60, 74, 77, 64, 53, 58, 63, 68, 72, 67, 61, 59, 76, 55, 87, 36, 63, 58, 59, 74, 64, 68, 72, 61, 72, 53, 63, 58, 75, 46, 61, 63, 91, 57, 60, 55, 70, 66, 82, 61, 69, 103, 78, 71, 65, 53, 74, 61, 55, 37, 56, 59, 56, 79, 61, 74, 87, 56, 75, 63, 53, 70, 77, 82, 49, 57, 65, 71, 50, 66, 69, 85, 72, 83, 62, 87, 49, 48, 70, 51, 77, 64, 45, 63, 64, 72, 55, 61, 58, 86, 50, 47, 118, 49, 43, 45, 62, 56, 50, 71, 57, 47, 72, 77, 75, 54, 70, 65, 66, 92, 91, 67, 61, 57, 47, 62, 68, 85, 58, 55, 45, 55, 59, 84, 71, 73, 66, 85, 52, 96, 61, 44, 59, 58, 64, 48, 67, 67, 69, 72, 66, 56, 78, 58, 54, 47, 53, 62, 63, 74, 53, 67, 59, 46, 52, 64, 59, 65, 55, 63, 63, 67, 60, 75, 64, 62, 61, 69, 66, 60, 66, 50, 53, 84, 69, 62, 40, 70, 58, 87, 60, 65, 69, 67, 48, 80, 81, 66, 56, 73, 93, 79, 67, 60, 42, 102, 64, 80, 90, 74, 67, 59, 75, 70, 103, 113, 66, 57, 53, 62, 73, 62, 64, 76, 53, 94, 60, 61, 48, 57, 72, 70, 44, 66, 59, 62, 49, 69, 74, 66, 66, 79, 58, 89, 79, 66, 57, 73, 62, 79, 68, 66, 61, 60, 63, 54, 67, 60, 53, 52, 61, 64, 70, 83, 66, 56, 71, 64, 62, 51, 84, 73, 76, 69, 59, 68, 63, 55, 62, 55, 72, 64, 89, 61, 68, 65, 64, 58, 67, 104, 78, 63, 69, 63, 64, 55, 103, 61, 60, 73, 77, 74, 59, 102, 66, 51, 65, 56, 55, 72, 54, 105, 58, 76, 50, 84, 84, 51, 51, 69, 78, 65, 82, 69, 67, 68, 90, 74, 73, 52, 62, 76, 74, 54, 65, 58, 59, 58, 54, 58, 79, 67, 87, 75, 59, 67, 49, 61, 65, 62, 57, 73, 52, 77, 63, 57, 58, 58, 66, 59, 50, 60, 52, 45, 49, 72, 65, 50, 51, 62, 73, 129, 52, 61, 87, 63, 63, 97, 76, 63, 60, 63, 68, 57, 64, 54, 66, 76, 75, 49, 92, 71, 40, 63, 68, 66, 80, 57, 74, 44, 66, 60, 62, 60, 56, 62, 69, 80, 63, 48, 70, 112, 71, 65, 68, 64, 70, 56, 69, 47, 70, 45, 52, 64, 61, 61, 71, 67, 59, 70, 66, 82, 81, 53, 53, 59, 42, 53, 57, 64, 86, 77, 54, 65, 71, 62, 54, 54, 57, 99, 66, 63, 67, 54, 52, 51, 59, 61, 48, 54, 57, 46, 56, 64, 59, 75, 56, 62, 54, 66, 54, 78, 72, 51, 65, 70, 60, 73, 37, 60, 53, 61, 68, 58, 69, 46, 51, 67, 63, 73, 49, 73, 123, 84, 68, 83, 59, 79, 79, 67, 68, 84, 62, 55, 82, 68, 67, 56, 78, 68, 61, 44, 74, 71, 46, 64, 63, 60, 54, 105, 49, 65, 85, 68, 75, 59, 64, 76, 65, 80, 53, 70, 63, 53, 69, 77, 76, 69, 59, 68, 64, 56, 62, 66, 60, 39, 52, 60, 53, 80, 55, 66, 57, 55, 61, 57, 87, 70, 63, 76, 77, 76, 52, 76, 70, 98, 69, 74, 53, 62, 48, 60, 74, 67, 72, 59, 83, 66, 62, 89, 83, 80, 72, 82, 62, 58, 50, 51, 57, 49, 48, 47, 99, 73, 59, 60, 67, 58, 51, 54, 59, 63, 71, 76, 57, 58, 69, 65, 69, 92, 61, 72, 82, 62, 76, 64, 59, 65, 69, 66, 69, 62, 80, 81, 60, 65, 70, 59, 68, 44, 62, 69, 55, 57, 57, 59, 60, 58, 67, 59, 61, 97, 66, 59, 77, 59, 59, 37, 56, 55, 61, 67, 61, 57, 62, 64, 62, 72, 51, 58, 65, 66, 67, 66, 72, 74, 62, 50, 52, 68, 57, 70, 55, 61, 54, 66, 64, 57, 77, 89, 68, 104, 60, 55, 54, 58, 91, 104, 83, 84, 74, 65, 79, 65, 79, 52, 49, 63, 70, 62, 53, 70, 73, 55, 70, 73, 62, 74, 60, 65, 75, 48, 68, 67, 45, 81, 73, 65, 64, 56, 54, 61, 69, 72, 71, 67, 78, 97, 64, 65, 65, 64, 67, 63, 75, 56, 49, 65, 80, 67, 82, 65, 50, 70, 36, 73, 62, 61, 55, 53, 56, 66, 66, 58, 91, 58, 64, 65, 47, 48, 58, 67, 62, 59, 63, 61, 54, 55, 69, 72, 59, 66, 61, 78, 71, 67, 53, 77, 75, 56, 68, 66, 56, 67, 68, 70, 65, 65, 88, 64, 65, 71, 48, 63, 62, 104, 108, 65, 55, 64, 73, 78, 64, 64, 81, 82, 88, 64, 89, 81, 70, 57, 63, 62, 59, 70, 62, 60, 61, 71, 65, 51, 63, 64, 65, 65, 47, 48, 57, 58, 60, 66, 43, 64, 56, 66, 63, 53, 67, 70, 70, 58, 44, 102, 73, 50, 59, 119, 87, 62, 81, 36, 63, 53, 70, 55, 58, 55, 43, 72, 69, 72, 50, 43, 75, 62, 52, 64, 65, 47, 54, 77, 61, 63, 84, 44, 59, 69, 70, 105, 67, 57, 76, 55, 76, 83, 65, 65, 58, 65, 62, 82, 66, 54, 51, 87, 41, 74, 90, 57, 61, 62, 71, 72, 64, 63, 114, 61, 58, 65, 60, 86, 57, 56, 66, 55, 53, 50, 57, 52, 80, 63, 57, 56, 65, 55, 69, 66, 57, 64, 54, 69, 70, 56, 61, 51, 61, 58, 56, 65, 51, 55, 74, 90, 64, 71, 60, 54, 58, 54, 64, 64, 66, 61, 71, 67, 67, 64, 66, 67, 68, 54, 71, 72, 64, 68, 85, 58, 62, 66, 53, 52, 75, 60, 53, 55, 65, 75, 80, 60, 69, 54, 59, 77, 48, 55, 56, 83, 67, 53, 56, 66, 70, 94, 53, 59, 68, 70, 63, 58, 70, 48, 57, 55, 61, 63, 66, 86, 56, 76, 67, 62, 66, 76, 70, 75, 60, 66, 49, 84, 68, 88, 64, 80, 63, 65, 47, 61, 48, 59, 56, 70, 126, 55, 59, 55, 62, 68, 68, 58, 57, 63, 34, 56, 86, 79, 71, 56, 97, 62, 58, 43, 68, 68, 57, 71, 73, 74, 54, 55, 57, 56, 60, 62, 57, 85, 40, 76, 69, 64, 61, 74, 61, 49, 58, 57, 60, 57, 90, 71, 56, 79, 66, 64, 59, 76, 59, 63, 50, 54, 68, 60, 53, 76, 58, 55, 67, 61, 66, 105, 49, 76, 59, 72, 67, 76, 61, 67, 51, 84, 76, 66, 54, 71, 60, 65, 67, 55, 66, 65, 60, 62, 65, 61, 65, 71, 98, 77, 63, 73, 61, 61, 69, 61, 60, 60, 64, 74, 55, 55, 66, 56, 67, 72, 67, 64, 65, 70, 62, 109, 52, 52, 92, 73, 54, 62, 90, 73, 63, 73, 62, 69, 60, 71, 70, 60, 49, 70, 43, 72, 56, 51, 67, 59, 60, 62, 58, 58, 82, 73, 65, 75, 90, 80, 66, 65, 55, 70, 90, 52, 68, 41, 61, 70, 61, 58, 62, 53, 68, 40, 59, 79, 54, 83, 63, 67, 53, 59, 78, 64, 60, 60, 39, 81, 71, 59, 86, 57, 75, 72, 54, 58, 77, 69, 62, 76, 60, 60, 69, 85, 69, 65, 70, 59, 76, 61, 50, 69, 75, 62, 66, 54, 72, 59, 69, 61, 59, 60, 45, 63, 51, 70, 45, 67, 70, 67, 52, 64, 67, 57, 50, 62, 69, 60, 62, 55, 86, 68, 73, 70, 58, 68, 42, 85, 54, 69, 70, 51, 55, 59, 54, 68, 83, 61, 52, 58, 87, 60, 77, 51, 73, 63, 65, 58, 50, 65, 60, 59, 64, 58, 68, 58, 78, 80, 114, 59, 41, 89, 55, 69, 57, 67, 71, 91, 53, 65, 83, 72, 61, 67, 54, 61, 80, 50, 73, 68, 91, 93, 69, 51, 45, 59, 54, 64, 63, 98, 53, 65, 78, 73, 73, 56, 50, 76, 85, 60, 56, 81, 66, 67, 57, 69, 60, 63, 51, 81, 70, 52, 79, 75, 68, 48, 57, 49, 57, 67, 77, 64, 63, 66, 88, 57, 80, 50, 71, 55, 92, 81, 76, 58, 56, 46, 61, 64, 74, 55, 72, 62, 55, 79, 71, 63, 93, 55, 70, 68, 37, 69, 66, 73, 53, 68, 72, 68, 65, 79, 73, 62, 60, 64, 75, 80, 69, 70, 70, 51, 81, 58, 66, 61, 64, 82, 51, 92, 61, 60, 57, 82, 70, 59, 66, 60, 61, 84, 71, 88, 66, 63, 72, 76, 62, 55, 54, 72, 69, 67, 81, 64, 87, 75, 73, 67, 55, 69, 68, 69, 43, 68, 66, 68, 59, 61, 65, 61, 63, 45, 73, 59, 70, 77, 62, 60, 57, 90, 55, 66, 51, 56, 65, 57, 50, 59, 49, 65, 60, 83, 90, 61, 77, 82, 76, 61, 65, 65, 54, 72, 69, 76, 52, 55, 53, 80, 54, 70, 79, 81, 53, 80, 57, 64, 71, 62, 80, 67, 81, 70, 81, 62, 37, 58, 51, 63, 77, 70, 68, 65, 65, 65, 62, 51, 72, 61, 57, 54, 72, 91, 65, 73, 71, 57, 54, 76, 78, 87, 68, 63, 46, 67, 84, 67, 45, 68, 71, 90, 50, 66, 65, 51, 75, 77, 63, 73, 54, 75, 53, 90, 57, 62, 61, 80, 78, 67, 66, 58, 60, 78, 59, 53, 60, 72, 53, 62, 56, 68, 66, 56, 58, 63, 59, 55, 60, 59, 65, 63, 68, 62, 59, 55, 90, 70, 55, 66, 79, 51, 126, 59, 49, 62, 77, 60, 48, 46, 73, 54, 72, 50, 68, 61, 53, 68, 59, 63, 53, 55, 85, 68, 74, 55, 65, 47, 62, 71, 75, 83, 86, 60, 55, 77, 72, 83, 46, 72, 123, 69, 56, 54, 78, 62, 71, 49, 57, 86, 63, 78, 86, 72, 80, 99, 50, 67, 79, 63, 67, 78, 65, 87, 66, 61, 63, 58, 55, 50, 59, 71, 73, 72, 72, 55, 72, 94, 73, 82, 74, 69, 55, 57, 59, 64, 61, 60, 72, 73, 56, 59, 71, 72, 53, 65, 65, 58, 66, 66, 82, 65, 80, 139, 48, 92, 71, 56, 75, 60, 47, 69, 86, 68, 90, 61, 55, 62, 71, 77, 58, 73, 66, 55, 78, 66, 58, 43, 82, 60, 50, 65, 69, 56, 80, 61, 49, 68, 43, 72, 62, 59, 65, 60, 63, 56, 67, 81, 49, 58, 65, 61, 65, 76, 54, 65, 98, 70, 54, 73, 73, 75, 69, 50, 56, 80, 45, 70, 61, 68, 68, 80, 68, 72, 77, 65, 80, 47, 69, 52, 51, 58, 52, 47, 86, 64, 59, 59, 78, 77, 72, 68, 76, 93, 81, 75, 57, 64, 66, 44, 66, 68, 70, 62, 61, 62, 66, 65, 83, 73, 70, 66, 48, 57, 65, 65, 64, 64, 107, 72, 82, 57, 110, 65, 76, 60, 77, 62, 86, 77, 91, 55, 96, 71, 60, 75, 71, 68, 59, 70, 65, 69, 64, 65, 77, 52, 72, 48, 44, 62, 54, 83, 76, 109, 77, 70, 59, 65, 66, 81, 62, 67, 99, 59, 76, 66, 52, 52, 54, 124, 84, 40, 50, 80, 65, 46, 56, 65, 76, 46, 66, 61, 73, 51, 44, 53, 65, 71, 58, 64, 74, 68, 59, 61, 56, 83, 44, 50, 41, 75, 67, 83, 40, 60, 55, 65, 57, 91, 62, 62, 78, 57, 69, 74, 81, 53, 70, 132, 60, 72, 58, 124, 53, 69, 55, 66, 64, 51, 104, 55, 66, 54, 79, 58, 59, 54, 59, 67, 62, 63, 50, 77, 71, 55, 45, 68, 70, 55, 62, 61, 60, 43, 68, 80, 52, 64, 53, 54, 53, 81, 55, 52, 71, 57, 53, 88, 58, 69, 68, 52, 59, 47, 31, 50, 87, 56, 71, 70, 88, 58, 60, 56, 54, 83, 72, 56, 73, 82, 65, 71, 39, 79, 53, 58, 47, 72, 78, 57, 68, 47, 82, 47, 70, 77, 60, 49, 73, 63, 62, 75, 79, 70, 106, 58, 49, 63, 75, 53, 59, 65, 64, 57, 67, 68, 69, 57, 52, 59, 59, 52, 56, 74, 70, 58, 58, 56, 89, 57, 49, 65, 58, 58, 69, 59, 62, 51, 53, 59, 84, 74, 71, 56, 57, 68, 107, 53, 71, 61, 70, 71, 71, 71, 60, 80, 46, 79, 74, 85, 63, 53, 97, 87, 62, 67, 53, 59, 76, 45, 54, 55, 66, 68, 63, 64, 96, 59, 54, 46, 71, 55, 61, 89, 75, 63, 70, 54, 59, 82, 63, 59, 58, 60, 56, 54, 44, 83, 53, 55, 82, 56, 60, 79, 74, 71, 71, 65, 63, 78, 62, 60, 62, 83, 90, 55, 57, 63, 59, 54, 58, 44, 77, 74, 74, 77, 59, 57, 76, 70, 58, 45, 80, 76, 76, 61, 57, 59, 81, 65, 107, 51, 69, 69, 77, 59, 74, 80, 63, 88, 56, 80, 61, 70, 62, 69, 61, 85, 61, 63, 52, 53, 35, 68, 59, 53, 88, 75, 59, 49, 80, 83, 50, 49, 40, 64, 66, 77, 41, 48, 44, 48, 65, 67, 51, 45, 59, 59, 67, 68, 69, 46, 31, 54, 59, 73, 105, 58, 34, 107, 85, 71, 82, 64, 84, 61, 62, 52, 63, 67, 75, 45, 77, 74, 57, 68, 61, 53, 60, 87, 72, 77, 85, 79, 78, 69, 70, 78, 49, 62, 75, 64, 79, 69, 66, 61, 73, 62, 53, 65, 61, 54, 65, 50, 49, 76, 94, 64, 46, 63, 96, 55, 62, 71, 61, 37, 68, 64, 63, 61, 51, 54, 79, 63, 47, 50, 70, 65, 89, 63, 61, 63, 45, 75, 52, 73, 51, 43, 58, 84, 62, 67, 76, 70, 61, 56, 54, 85, 55, 78, 59, 67, 71, 69, 60, 58, 49, 61, 55, 69, 71, 72, 78, 70, 77, 62, 66, 62, 53, 71, 78, 61, 56, 59, 68, 66, 91, 58, 60, 60, 90, 77, 64, 73, 56, 56, 65, 81, 66, 55, 67, 53, 58, 58, 86, 77, 76, 77, 65, 72, 72, 89, 62, 57, 68, 53, 68, 43, 63, 87, 55, 65, 75, 65, 72, 64, 52, 66, 73, 61, 57, 60, 56, 78, 95, 53, 42, 63, 54, 64, 69, 91, 65, 72, 61, 56, 77, 64, 57, 42, 73, 73, 58, 54, 70, 77, 81, 54, 74, 84, 54, 61, 72, 60, 71, 86, 70, 60, 54, 50, 63, 82, 59, 64, 68, 47, 68, 52, 79, 110, 76, 68, 63, 77, 58, 61, 54, 71, 41, 69, 68, 64, 66, 57, 55, 50, 57, 68, 62, 70, 55, 71, 66, 69, 64, 59, 76, 46, 53, 76, 46, 61, 61, 56, 67, 73, 49, 61, 62, 108, 57, 60, 43, 60, 65, 64, 66, 67, 60, 52, 72, 68, 67, 73, 82, 63, 62, 64, 65, 61, 71, 53, 65, 82, 30, 51, 56, 60, 93, 69, 88, 68, 80, 64, 62, 85, 64, 84, 60, 62, 56, 57, 60, 61, 81, 64, 65, 71, 60, 57, 76, 68, 82, 61, 69, 53, 71, 77, 63, 67, 61, 64, 62, 65, 66, 87, 92, 61, 72, 79, 69, 70, 58, 34, 63, 60, 83, 53, 61, 57, 73, 48, 61, 58, 49, 58, 67, 56, 69, 83, 69, 62, 56, 58, 72, 61, 87, 62, 64, 58, 84, 57, 62, 70, 71, 60, 65, 65, 68, 49, 51, 66, 73, 58, 75, 66, 71, 73, 72, 73, 58, 64, 59, 63, 70, 71, 62, 71, 57, 70, 55, 71, 54, 68, 74, 54, 85, 67, 69, 85, 61, 59, 77, 59, 49, 58, 62, 64, 53, 72, 62, 59, 71, 75, 60, 62, 61, 49, 52, 57, 64, 56, 92, 67, 81, 49, 69, 75, 135, 56, 61, 86, 76, 83, 54, 69, 74, 58, 69, 70, 77, 64, 87, 67, 74, 61, 62, 58, 57, 60, 72, 56, 58, 54, 63, 84, 56, 115, 66, 59, 58, 51, 66, 57, 53, 65, 77, 71, 73, 74, 82, 57, 79, 61, 70, 92, 58, 57, 85, 51, 66, 92, 59, 63, 64, 64, 72, 59, 72, 52, 54, 70, 58, 61, 45, 46, 73, 59, 67, 58, 55, 66, 53, 67, 73, 103, 71, 51, 52, 64, 61, 87, 67, 69, 58, 109, 63, 75, 76, 67, 92, 70, 48, 71, 80, 82, 85, 80, 70, 93, 52, 64, 46, 62, 75, 90, 71, 84, 84, 46, 60, 68, 67, 85, 56, 62, 65, 65, 59, 108, 60, 60, 55, 73, 68, 62, 70, 81, 50, 95, 79, 72, 59, 55, 69, 88, 59, 71, 63, 63, 50, 66, 69, 80, 50, 78, 57, 62, 72, 68, 59, 60, 58, 50, 70, 60, 87, 63, 86, 41, 65, 71, 67, 64, 54, 62, 72, 48, 64, 63, 80, 73, 54, 67, 56, 56, 56, 60, 74, 60, 67, 73, 50, 57, 51, 53, 66, 65, 84, 59, 61, 95, 38, 77, 56, 73, 75, 58, 62, 56, 53, 61, 69, 57, 73, 71, 90, 63, 55, 69, 59, 73, 56, 94, 108, 64, 65, 63, 125, 82, 59, 61, 64, 76, 89, 56, 79, 72, 50, 83, 86, 73, 72, 56, 44, 66, 64, 57, 62, 68, 63, 57, 62, 63, 60, 49, 54, 54, 51, 74, 75, 42, 54, 58, 62, 70, 48, 47, 103, 60, 59, 50, 77, 52, 95, 141, 79, 65, 86, 68, 60, 50, 55, 69, 71, 72, 50, 75, 69, 56, 50, 60, 62, 61, 59, 61, 48, 79, 65, 66, 75, 79, 60, 72, 65, 60, 44, 98, 64, 57, 79, 65, 70, 67, 62, 68, 50, 48, 65, 84, 62, 66, 68, 109, 62, 62, 82, 82, 78, 54, 48, 88, 69, 59, 112, 48, 70, 73, 78, 76, 53, 50, 36, 59, 57, 60, 68, 58, 84, 51, 39, 48, 101, 71, 48, 62, 58, 69, 64, 65, 59, 53, 50, 73, 71, 73, 67, 58, 73, 42, 68, 75, 62, 63, 61, 61, 77, 66, 46, 58, 65, 75, 65, 61, 80, 84, 68, 61, 50, 84, 65, 47, 59, 76, 61, 67, 64, 70, 67, 63, 84, 83, 57, 73, 84, 57, 61, 56, 69, 53, 64, 69, 63, 49, 54, 74, 77, 49, 63, 48, 70, 79, 77, 66, 69, 63, 69, 54, 75, 78, 78, 61, 63, 43, 65, 102, 69, 62, 68, 54, 62, 44, 68, 39, 59, 63, 71, 71, 52, 63, 60, 86, 54, 89, 53, 75, 59, 55, 85, 78, 128, 68, 74, 62, 81, 76, 65, 72, 75, 82, 52, 61, 66, 79, 49, 53, 104, 67, 71, 58, 53, 65, 64, 46, 61, 57, 59, 54, 72, 46, 66, 50, 57, 56, 82, 66, 61, 72, 57, 65, 50, 78, 64, 82, 64, 56, 78, 60, 78, 73, 68, 75, 65, 63, 62, 55, 76, 70, 61, 59, 51, 69, 50, 75, 74, 71, 57, 50, 87, 45, 74, 77, 62, 49, 46, 44, 65, 70, 67, 51, 81, 62, 73, 54, 58, 58, 66, 58, 58, 62, 60, 45, 67, 43, 98, 69, 62, 66, 53, 58, 72, 73, 59, 44, 58, 54, 51, 47, 55, 59, 59, 78, 83, 92, 65, 64, 53, 75, 72, 62, 90, 69, 56, 68, 87, 88, 57, 53, 62, 61, 50, 79, 71, 79, 61, 60, 89, 40, 78, 57, 62, 64, 55, 72, 59, 65, 39, 61, 52, 76, 112, 38, 65, 84, 83, 58, 71, 84, 62, 70, 70, 64, 61, 70, 79, 36, 69, 64, 78, 66, 53, 86, 55, 67, 62, 66, 57, 47, 57, 54, 60, 97, 50, 67, 93, 83, 68, 63, 54, 67, 86, 59, 74, 75, 62, 61, 87, 71, 65, 61, 67, 66, 83, 61, 66, 62, 109, 66, 53, 109, 52, 65, 66, 43, 49, 67, 66, 75, 41, 78, 77, 59, 86, 66, 61, 78, 60, 36, 65, 61, 70, 73, 73, 48, 70, 78, 72, 51, 60, 62, 66, 63, 62, 64, 63, 61, 68, 59, 62, 59, 54, 62, 62, 61, 89, 62, 68, 56, 67, 69, 63, 65, 62, 45, 64, 81, 59, 71, 78, 63, 64, 57, 61, 61, 91, 79, 64, 48, 59, 62, 71, 79, 67, 69, 74, 95, 57, 51, 94, 58, 75, 66, 56, 81, 54, 87, 54, 60, 80, 73, 61, 111, 56, 65, 56, 74, 56, 59, 89, 48, 62, 62, 76, 55, 68, 85, 81, 56, 102, 65, 49, 49, 51, 66, 78, 72, 69, 52, 120, 55, 46, 65, 64, 71, 73, 66, 70, 61, 68, 58, 73, 52, 60, 76, 69, 70, 47, 63, 96, 71, 68, 67, 59, 60, 47, 57, 72, 66, 48, 59, 68, 85, 65, 142, 57, 58, 66, 73, 61, 96, 72, 45, 72, 65, 60, 53, 66, 61, 79, 52, 70, 65, 82, 60, 59, 57, 68, 55, 66, 51, 51, 70, 66, 52, 58, 78, 58, 50, 53, 58, 72, 55, 72, 122, 60, 69, 77, 55, 61, 75, 60, 61, 71, 59, 75, 63, 68, 58, 78, 65, 74, 64, 58, 83, 90, 60, 70, 68, 74, 91, 59, 56, 73, 57, 66, 64, 65, 56, 61, 54, 60, 72, 60, 67, 71, 66, 59, 46, 52, 65, 65, 54, 55, 77, 69, 55, 66, 64, 62, 70, 56, 67, 135, 55, 59, 61, 49, 75, 58, 67, 68, 78, 67, 64, 52, 56, 67, 87, 64, 56, 69, 51, 58, 67, 99, 65, 64, 69, 73, 73, 79, 57, 46, 66, 47, 60, 62, 70, 57, 73, 80, 91, 59, 52, 75, 72, 59, 70, 76, 71, 51, 107, 73, 65, 72, 51, 65, 65, 63, 63, 63, 78, 56, 79, 70, 62, 72, 69, 77, 60, 89, 61, 74, 54, 51, 62, 78, 81, 58, 72, 60, 64, 63, 62, 71, 61, 58, 54, 70, 63, 60, 55, 76, 71, 51, 59, 83, 62, 72, 75, 71, 55, 57, 55, 63, 71, 109, 65, 63, 50, 60, 109, 62, 87, 74, 77, 89, 82, 64, 74, 54, 62, 54, 58, 64, 61, 62, 59, 70, 60, 45, 70, 68, 79, 73, 58, 64, 63, 57, 82, 85, 63, 61, 62, 59, 63, 57, 69, 60, 67, 70, 75, 72, 91, 73, 77, 51, 75, 68, 64, 56, 43, 63, 70, 56, 73, 61, 57, 61, 68, 76, 54, 66, 57, 61, 64, 66, 60, 66, 52, 70, 56, 83, 59, 67, 68, 67, 69, 59, 62, 60, 56, 63, 79, 65, 52, 61, 58, 68, 101, 68, 55, 68, 51, 74, 50, 64, 51, 61, 66, 74, 65, 68, 50, 60, 84, 114, 48, 56, 84, 57, 76, 66, 61, 71, 70, 66, 55, 63, 83, 69, 53, 59, 68, 65, 60, 70, 54, 58, 59, 58, 58, 60, 54, 61, 70, 63, 70, 59, 76, 72, 62, 53, 50, 50, 76, 63, 48, 63, 60, 57, 73, 70, 61, 65, 65, 52, 73, 72, 67, 57, 58, 52, 59, 80, 55, 47, 56, 56, 50, 81, 58, 72, 61, 60, 65, 57, 63, 62, 53, 93, 66, 59, 62, 60, 48, 59, 71, 53, 76, 54, 68, 84, 78, 59, 75, 66, 62, 70, 75, 65, 68, 68, 50, 58, 61, 72, 57, 82, 59, 57, 67, 53, 64, 64, 60, 63, 67, 78, 74, 87, 52, 72, 87, 74, 56, 74, 76, 49, 56, 61, 68, 53, 66, 61, 63, 56, 69, 90, 64, 67, 57, 58, 87, 54, 61, 63, 56, 53, 83, 57, 66, 50, 75, 69, 77, 69, 62, 52, 65, 66, 69, 70, 56, 44, 74, 77, 49, 48, 57, 92, 50, 52, 75, 53, 58, 61, 68, 80, 71, 56, 57, 64, 64, 55, 62, 63, 72, 69, 59, 56, 52, 72, 85, 70, 67, 81, 79, 50, 53, 78, 65, 72, 62, 94, 60, 56, 115, 56, 61, 65, 65, 70, 52, 97, 70, 58, 94, 54, 79, 67, 78, 58, 61, 51, 59, 64, 56, 52, 55, 49, 60, 46, 75, 72, 67, 78, 59, 72, 63, 65, 45, 76, 53, 59, 60, 83, 83, 60, 47, 53, 59, 87, 63, 64, 71, 50, 70, 63, 56, 54, 60, 60, 65, 76, 49, 63, 74, 79, 65, 80, 77, 57, 63, 94, 63, 69, 61, 74, 71, 52, 60, 62, 69, 43, 81, 57, 58, 55, 67, 67, 64, 66, 65, 81, 66, 65, 65, 66, 65, 72, 52, 71, 48, 57, 53, 70, 51, 60, 90, 52, 54, 50, 68, 68, 68, 70, 61, 68, 64, 63, 67, 62, 68, 53, 81, 54, 53, 64, 70, 69, 52, 55, 60, 63, 76, 69, 75, 71, 50, 79, 71, 64, 75, 65, 45, 51, 65, 55, 64, 59, 61, 70, 77, 83, 54, 52, 72, 74, 67, 63, 62, 65, 66, 58, 76, 70, 88, 72, 60, 81, 53, 68, 60, 54, 52, 81, 66, 64, 59, 52, 57, 62, 66, 49, 51, 62, 91, 57, 56, 75, 67, 68, 68, 56, 60, 66, 60, 65, 56, 63, 62, 60, 50, 65, 70, 57, 81, 65, 76, 71, 76, 64, 78, 57, 65, 63, 59, 59, 68, 58, 57, 68, 63, 52, 50, 68, 42, 65, 61, 62, 91, 64, 59, 70, 53, 77, 67, 55, 62, 68, 68, 64, 56, 59, 70, 52, 63, 120, 63, 59, 56, 67, 56, 75, 54, 53, 50, 63, 76, 64, 51, 58, 70, 61, 77, 58, 78, 67, 59, 58, 90, 67, 55, 48, 76, 93, 59, 73, 56, 52, 65, 63, 64, 65, 91, 58, 57, 51, 59, 65, 46, 74, 46, 69, 56, 59, 59, 55, 54, 70, 48, 72, 82, 79, 70, 65, 86, 65, 77, 61, 87, 55, 52, 58, 56, 51, 74, 73, 63, 56, 48, 56, 76, 93, 60, 68, 69, 52, 75, 65, 76, 51, 63, 72, 66, 60, 71, 58, 63, 65, 45, 59, 60, 66, 83, 67, 60, 55, 61, 74, 58, 51, 54, 61, 74, 61, 71, 59, 62, 69, 89, 73, 91, 69, 60, 49, 65, 66, 78, 64, 67, 62, 70, 70, 71, 60, 61, 51, 51, 47, 72, 50, 75, 57, 60, 61, 69, 57, 66, 65, 68, 75, 74, 58, 83, 60, 66, 62, 57, 53, 66, 95, 63, 68, 59, 82, 81, 70, 55, 61, 66, 68, 65, 60, 56, 61, 57, 76, 77, 69, 56, 71, 62, 77, 54, 64, 66, 55, 75, 66, 63, 56, 66, 73, 93, 78, 64, 55, 72, 56, 55, 79, 64, 90, 52, 111, 80, 61, 78, 57, 56, 49, 54, 62, 52, 68, 82, 87, 63, 68, 63, 106, 69, 63, 69, 81, 50, 128, 44, 73, 56, 66, 73, 67, 77, 82, 62, 77, 69, 70, 64, 86, 109, 72, 94, 117, 65, 82, 66, 61, 54, 47, 50, 65, 50, 60, 44, 70, 64, 63, 74, 61, 87, 79, 62, 59, 55, 74, 61, 59, 78, 72, 67, 73, 101, 71, 174, 71, 63, 77, 79, 57, 71, 56, 74, 60, 54, 49, 54, 69, 68, 58, 62, 64, 72, 55, 63, 78, 43, 62, 53, 66, 58, 54, 64, 38, 83, 72, 56, 60, 59, 62, 54, 58, 70, 59, 50, 58, 60, 65, 55, 71, 59, 54, 54, 59, 64, 63, 52, 63, 60, 103, 65, 63, 64, 57, 110, 74, 53, 51, 50, 50, 54, 54, 60, 88, 68, 71, 68, 69, 58, 69, 65, 71, 110, 71, 62, 48, 67, 71, 66, 69, 61, 53, 65, 47, 67, 56, 69, 63, 57, 55, 60, 58, 53, 67, 68, 67, 62, 60, 56, 58, 65, 61, 101, 70, 60, 58, 61, 81, 72, 65, 63, 54, 72, 61, 72, 50, 62, 57, 50, 59, 102, 88, 52, 70, 72, 71, 63, 62, 54, 55, 59, 63, 78, 79, 69, 100, 56, 76, 78, 62, 62, 48, 95, 53, 64, 81, 57, 63, 47, 73, 64, 55, 58, 64, 55, 79, 44, 58, 61, 50, 58, 63, 66, 54, 62, 77, 61, 56, 98, 63, 68, 67, 105, 58, 41, 49, 41, 77, 61, 68, 62, 68, 49, 53, 57, 56, 69, 78, 71, 65, 62, 104, 63, 59, 65, 57, 49, 73, 64, 45, 62, 64, 53, 70, 61, 66, 57, 69, 56, 68, 72, 61, 64, 49, 62, 120, 63, 71, 61, 54, 62, 51, 61, 68, 71, 56, 65, 86, 79, 45, 58, 68, 85, 56, 58, 54, 68, 66, 82, 68, 53, 69, 70, 64, 56, 77, 75, 72, 98, 79, 79, 63, 71, 89, 75, 57, 80, 63, 58, 57, 69, 72, 61, 59, 69, 64, 77, 60, 52, 72, 57, 61, 60, 64, 63, 58, 58, 72, 69, 59, 46, 60, 57, 64, 51, 61, 61, 64, 56, 64, 70, 67, 61, 54, 62, 60, 55, 67, 80, 68, 78, 83, 63, 79, 63, 58, 75, 64, 67, 53, 60, 91, 66, 44, 65, 62, 58, 47, 74, 77, 56, 53, 62, 75, 72, 57, 69, 64, 54, 64, 62, 72, 66, 62, 64, 50, 67, 58, 74, 64, 71, 60, 77, 63, 60, 76, 52, 70, 67, 71, 71, 63, 72, 49, 70, 60, 59, 62, 66, 60, 75, 86, 59, 90, 61, 45, 64, 62, 63, 72, 63, 68, 65, 50, 65, 80, 71, 79, 53, 77, 65, 48, 57, 64, 53, 79, 77, 81, 85, 59, 49, 56, 51, 68, 60, 79, 84, 67, 56, 51, 78, 53, 112, 57, 52, 61, 78, 62, 69, 47, 55, 43, 73, 69, 68, 59, 50, 58, 67, 79, 70, 87, 54, 66, 58, 61, 83, 45, 60, 44, 70, 47, 58, 63, 89, 85, 58, 89, 42, 79, 55, 54, 70, 93, 79, 60, 60, 63, 59, 65, 60, 60, 67, 57, 89, 70, 54, 61, 46, 54, 52, 73, 73, 52, 64, 81, 82, 74, 54, 84, 63, 49, 61, 65, 60, 70, 53, 82, 65, 74, 56, 58, 78, 46, 71, 72, 58, 82, 52, 74, 70, 58, 58, 74, 44, 82, 70, 62, 72, 42, 52, 51, 71, 76, 68, 86, 69, 80, 61, 54, 61, 64, 57, 64, 50, 95, 69, 50, 54, 69, 62, 90, 63, 79, 55, 55, 66, 50, 58, 76, 72, 54, 57, 56, 69, 58, 82, 59, 68, 48, 136, 59, 54, 89, 59, 61, 80, 69, 65, 57, 56, 53, 59, 106, 58, 57, 56, 53, 77, 59, 59, 71, 53, 77, 61, 55, 77, 66, 65, 66, 65, 83, 61, 55, 80, 62, 68, 63, 76, 60, 82, 65, 72, 73, 57, 75, 51, 60, 58, 93, 57, 59, 59, 81, 85, 65, 56, 59, 60, 84, 52, 66, 96, 63, 68, 89, 49, 70, 77, 62, 42, 52, 67, 93, 61, 53, 80, 61, 89, 57, 77, 59, 67, 53, 62, 52, 53, 44, 65, 60, 68, 58, 51, 72, 65, 76, 67, 67, 63, 65, 49, 66, 92, 66, 66, 93, 51, 82, 71, 57, 112, 76, 63, 64, 55, 52, 60, 65, 73, 70, 72, 66, 73, 72, 62, 54, 64, 63, 101, 64, 66, 60, 67, 54, 56, 50, 93, 52, 54, 77, 61, 53, 61, 61, 60, 52, 74, 61, 49, 70, 53, 51, 64, 67, 66, 60, 61, 61, 60, 58, 54, 60, 62, 53, 64, 92, 75, 55, 53, 73, 60, 63, 80, 91, 69, 60, 55, 62, 57, 71, 49, 57, 52, 72, 76, 58, 61, 62, 78, 67, 69, 58, 88, 85, 57, 64, 65, 69, 77, 58, 63, 55, 53, 61, 71, 55, 76, 66, 66, 67, 46, 65, 60, 83, 72, 60, 72, 59, 54, 69, 84, 65, 57, 58, 64, 53, 75, 61, 86, 47, 53, 65, 85, 59, 72, 66, 71, 65, 56, 85, 57, 76, 62, 61, 73, 69, 61, 86, 66, 68, 53, 84, 61, 59, 67, 56, 60, 49, 58, 70, 61, 61, 52, 61, 51, 119, 41, 74, 77, 62, 55, 89, 52, 63, 84, 59, 65, 49, 57, 68, 64, 72, 51, 60, 62, 44, 64, 60, 65, 66, 76, 54, 72, 66, 61, 61, 70, 60, 60, 59, 60, 64, 64, 54, 55, 78, 71, 61, 45, 52, 63, 69, 72, 68, 61, 57, 66, 50, 58, 46, 78, 61, 61, 74, 57, 59, 99, 58, 70, 62, 64, 58, 120, 83, 71, 71, 62, 70, 70, 60, 56, 71, 94, 80, 100, 81, 81, 66, 68, 63, 79, 63, 87, 58, 50, 79, 58, 59, 57, 96, 87, 60, 66, 66, 41, 66, 66, 65, 54, 45, 59, 53, 69, 89, 49, 64, 61, 55, 47, 65, 66, 49, 63, 55, 61, 80, 65, 56, 73, 75, 52, 58, 61, 60, 77, 82, 78, 54, 51, 51, 59, 59, 89, 76, 62, 64, 78, 45, 92, 72, 58, 71, 55, 71, 60, 54, 74, 58, 93, 59, 79, 62, 73, 59, 56, 54, 107, 72, 61, 63, 54, 69, 71, 62, 65, 59, 70, 58, 52, 60, 65, 79, 62, 58, 84, 43, 64, 45, 73, 71, 86, 72, 82, 75, 63, 52, 80, 53, 65, 53, 55, 53, 57, 52, 63, 73, 70, 90, 56, 69, 65, 70, 62, 62, 67, 75, 44, 65, 62, 71, 58, 72, 69, 58, 60, 60, 55, 68, 69, 62, 131, 61, 71, 70, 59, 73, 64, 57, 55, 68, 49, 61, 67, 56, 61, 51, 149, 72, 39, 58, 67, 53, 71, 73, 57, 52, 63, 45, 75, 63, 59, 66, 81, 70, 73, 86, 64, 61, 64, 65, 111, 56, 60, 64, 49, 74, 78, 60, 50, 50, 64, 57, 70, 56, 55, 104, 93, 61, 57, 53, 68, 63, 75, 69, 61, 66, 73, 83, 43, 78, 52, 72, 62, 70, 51, 54, 53, 72, 86, 54, 53, 63, 70, 89, 62, 66, 87, 64, 49, 64, 61, 71, 59, 69, 74, 91, 71, 81, 64, 44, 80, 58, 50, 44, 43, 51, 73, 90, 69, 57, 59, 117, 68, 95, 58, 67, 60, 57, 73, 79, 62, 61, 56, 63, 91, 64, 58, 67, 87, 68, 81, 59, 63, 72, 57, 69, 52, 49, 80, 64, 69, 65, 58, 54, 53, 54, 60, 62, 68, 64, 69, 66, 73, 57, 45, 51, 57, 62, 57, 83, 64, 50, 58, 57, 71, 101, 54, 77, 79, 43, 57, 78, 36, 50, 64, 66, 97, 70, 59, 65, 69, 66, 83, 86, 51, 50, 54, 78, 62, 71, 68, 52, 58, 70, 59, 51, 44, 100, 48, 65, 67, 68, 77, 77, 75, 81, 56, 54, 71, 54, 71, 53, 59, 52, 70, 86, 54, 74, 68, 70, 65, 73, 69, 56, 62, 81, 51, 49, 69, 80, 73, 65, 48, 62, 110, 68, 63, 37, 48, 60, 47, 56, 55, 79, 56, 88, 69, 61, 57, 87, 54, 61, 53, 62, 52, 72, 48, 70, 65, 66, 80, 70, 49, 64, 55, 55, 66, 64, 73, 67, 66, 79, 68, 64, 66, 45, 84, 65, 52, 63, 77, 83, 60, 59, 48, 64, 58, 82, 61, 60, 68, 66, 67, 70, 67, 56, 58, 44, 68, 62, 70, 66, 65, 72, 77, 103, 45, 67, 49, 76, 52, 60, 71, 71, 64, 69, 58, 60, 64, 62, 73, 84, 62, 63, 82, 88, 81, 59, 73, 48, 57, 57, 61, 64, 45, 49, 70, 67, 63, 43, 50, 58, 59, 84, 50, 46, 68, 61, 62, 103, 68, 69, 71, 86, 64, 66, 68, 74, 64, 73, 67, 65, 74, 61, 76, 69, 72, 75, 52, 63, 67, 75, 57, 53, 75, 90, 62, 44, 58, 73, 52, 71, 40, 58, 77, 72, 59, 56, 72, 65, 68, 88, 58, 67, 68, 59, 83, 69, 63, 65, 92, 56, 75, 59, 102, 62, 72, 82, 44, 61, 60, 68, 56, 54, 83, 90, 72, 111, 74, 60, 56, 65, 54, 81, 63, 66, 68, 63, 55, 79, 81, 66, 52, 61, 84, 91, 81, 69, 68, 68, 74, 73, 96, 60, 50, 73, 48, 59, 84, 40, 48, 60, 70, 63, 57, 72, 49, 53, 61, 56, 50, 62, 83, 67, 65, 73, 74, 51, 56, 67, 87, 67, 61, 45, 54, 65, 52, 53, 53, 60, 70, 60, 65, 76, 66, 60, 67, 62, 68, 81, 61, 63, 79, 74, 56, 68, 30, 56, 74, 53, 59, 68, 80, 60, 59, 69, 78, 58, 72, 65, 52, 50, 66, 57, 72, 56, 60, 68, 52, 62, 73, 67, 70, 62, 63, 67, 67, 67, 103, 43, 61, 62, 52, 72, 82, 52, 71, 70, 66, 56, 58, 69, 86, 62, 48, 52, 51, 47, 40, 61, 60, 61, 81, 72, 70, 54, 61, 87, 85, 88, 70, 70, 77, 78, 64, 65, 67, 78, 81, 67, 64, 77, 67, 85, 72, 76, 70, 56, 49, 61, 65, 71, 45, 64, 53, 70, 71, 49, 63, 53, 37, 57, 88, 66, 57, 127, 58, 46, 70, 60, 53, 90, 62, 67, 76, 68, 67, 104, 63, 58, 31, 85, 64, 75, 67, 66, 67, 67, 73, 118, 46, 64, 80, 54, 76, 54, 59, 67, 60, 55, 77, 105, 62, 59, 58, 74, 69, 57, 66, 59, 66, 70, 66, 90, 62, 64, 55, 75, 78, 78, 81, 89, 73, 61, 65, 65, 121, 54, 68, 89, 67, 61, 56, 47, 58, 67, 57, 80, 60, 79, 85, 46, 45, 70, 64, 71, 79, 64, 82, 146, 91, 70, 60, 66, 67, 58, 65, 65, 43, 64, 88, 60, 65, 67, 67, 74, 67, 66, 94, 68, 63, 63, 60, 60, 36, 79, 66, 94, 70, 66, 58, 48, 57, 60, 51, 69, 62, 41, 86, 51, 85, 100, 77, 63, 50, 55, 60, 78, 76, 62, 67, 119, 60, 90, 83, 74, 57, 66, 62, 42, 56, 63, 81, 48, 66, 50, 56, 89, 69, 85, 50, 88, 65, 48, 70, 65, 95, 51, 61, 71, 80, 71, 47, 64, 50, 67, 51, 91, 91, 69, 55, 65, 47, 42, 76, 56, 60, 73, 82, 61, 84, 67, 66, 72, 55, 47, 50, 59, 75, 47, 49, 55, 120, 50, 47, 126, 81, 55, 67, 44, 60, 68, 69, 52, 77, 45, 59, 66, 78, 76, 83, 54, 75, 65, 84, 53, 50, 68, 52, 54, 82, 58, 61, 88, 69, 60, 57, 59, 53, 61, 55, 71, 63, 41, 70, 72, 55, 85, 44, 62, 82, 72, 81, 59, 88, 70, 58, 102, 53, 79, 74, 66, 75, 70, 75, 71, 60, 39, 71, 74, 60, 31, 41, 49, 67, 76, 61, 80, 65, 60, 76, 62, 73, 71, 71, 61, 108, 61, 74, 55, 134, 54, 42, 75, 67, 47, 45, 60, 57, 45, 50, 72, 75, 83, 64, 69, 67, 62, 62, 59, 67, 52, 71, 95, 63, 60, 75, 64, 88, 80, 64, 74, 66, 108, 50, 63, 56, 58, 62, 118, 57, 54, 52, 53, 59, 59, 50, 53, 90, 88, 73, 53, 82, 60, 74, 53, 76, 84, 65, 51, 61, 56, 54, 67, 56, 80, 62, 52, 48, 77, 59, 67, 69, 45, 76, 60, 43, 96, 61, 71, 33, 67, 77, 96, 54, 67, 55, 66, 42, 77, 54, 70, 152, 68, 60, 114, 72, 50, 71, 57, 52, 102, 68, 66, 69, 68, 69, 77, 83, 55, 46, 62, 55, 72, 59, 52, 53, 70, 64, 52, 58, 40, 49, 54, 63, 57, 58, 52, 78, 58, 47, 56, 57, 78, 73, 60, 56, 57, 54, 61, 97, 54, 68, 70, 56, 59, 61, 58, 54, 62, 68, 66, 67, 86, 77, 54, 70, 70, 79, 56, 60, 74, 59, 86, 54, 57, 68, 57, 50, 60, 73, 74, 62, 74, 56, 69, 98, 30, 58, 72, 53, 78, 62, 70, 94, 79, 72, 68, 59, 72, 60, 57, 61, 59, 60, 65, 64, 75, 55, 44, 66, 76, 80, 50, 67, 63, 61, 86, 59, 74, 66, 68, 48, 57, 55, 50, 59, 55, 71, 69, 87, 71, 96, 81, 86, 36, 66, 57, 55, 52, 61, 77, 47, 66, 28, 56, 77, 74, 66, 101, 95, 60, 63, 71, 51, 70, 57, 61, 47, 66, 66, 55, 76, 69, 61, 68, 49, 74, 64, 60, 84, 110, 68, 57, 70, 61, 71, 68, 65, 72, 68, 55, 62, 74, 106, 64, 61, 65, 58, 49, 78, 65, 60, 56, 115, 73, 56, 68, 64, 67, 82, 47, 42, 70, 72, 66, 54, 72, 54, 72, 59, 75, 62, 85, 59, 51, 111, 80, 48, 65, 63, 69, 54, 65, 92, 58, 96, 57, 47, 60, 63, 58, 71, 67, 60, 78, 66, 54, 52, 47, 60, 58, 61, 134, 68, 73, 68, 74, 71, 63, 49, 62, 52, 42, 56, 96, 50, 80, 46, 80, 62, 57, 63, 87, 71, 72, 74, 76, 54, 71, 47, 72, 62, 74, 77, 72, 65, 67, 60, 59, 61, 56, 78, 56, 80, 78, 95, 73, 69, 82, 78, 75, 45, 81, 51, 82, 77, 84, 71, 57, 71, 67, 65, 63, 70, 66, 65, 75, 64, 44, 82, 62, 55, 51, 70, 58, 62, 58, 80, 61, 67, 60, 61, 61, 76, 57, 78, 74, 58, 62, 57, 54, 72, 61, 54, 56, 39, 75, 102, 85, 62, 65, 81, 54, 64, 54, 62, 44, 56, 55, 76, 56, 68, 53, 61, 80, 54, 58, 78, 50, 105, 68, 68, 47, 58, 54, 66, 78, 86, 68, 85, 47, 69, 56, 59, 60, 64, 69, 68, 76, 78, 69, 68, 39, 60, 78, 89, 73, 57, 52, 75, 50, 57, 64, 79, 71, 70, 61, 53, 74, 71, 51, 59, 70, 59, 65, 73, 54, 53, 74, 61, 63, 97, 60, 67, 60, 61, 61, 71, 82, 34, 56, 50, 59, 59, 67, 59, 65, 83, 84, 63, 68, 61, 69, 69, 37, 46, 56, 50, 68, 64, 52, 66, 52, 51, 44, 59, 65, 70, 69, 68, 48, 51, 51, 78, 61, 56, 67, 74, 62, 61, 78, 57, 63, 44, 64, 55, 68, 66, 75, 67, 63, 78, 65, 68, 50, 70, 49, 59, 80, 65, 62, 61, 69, 56, 76, 83, 78, 56, 82, 49, 48, 66, 61, 57, 51, 53, 60, 65, 65, 123, 70, 63, 62, 72, 77, 76, 67, 69, 50, 47, 80, 53, 67, 67, 54, 65, 70, 70, 51, 67, 85, 78, 50, 76, 85, 73, 58, 52, 71, 71, 59, 57, 74, 62, 63, 88, 71, 71, 60, 64, 65, 70, 84, 63, 70, 67, 56, 75, 73, 57, 81, 78, 80, 63, 61, 74, 57, 68, 117, 49, 64, 50, 54, 53, 74, 50, 77, 50, 71, 59, 48, 57, 147, 64, 66, 62, 63, 69, 58, 88, 62, 81, 70, 66, 66, 101, 66, 81, 85, 59, 48, 67, 83, 73, 63, 59, 51, 80, 94, 87, 61, 61, 78, 42, 64, 81, 65, 46, 68, 59, 73, 62, 60, 68, 62, 79, 52, 61, 59, 61, 86, 50, 57, 68, 54, 74, 53, 53, 51, 61, 79, 76, 78, 53, 46, 87, 42, 77, 25, 65, 57, 54, 84, 90, 56, 62, 55, 82, 57, 52, 77, 60, 52, 69, 100, 98, 58, 72, 50, 66, 86, 44, 57, 99, 72, 47, 65, 60, 46, 42, 64, 72, 54, 84, 76, 69, 55, 67, 56, 100, 71, 63, 73, 83, 75, 83, 80, 73, 53, 91, 45, 53, 55, 97, 68, 54, 67, 82, 60, 45, 65, 71, 67, 56, 73, 69, 62, 78, 60, 59, 65, 80, 64, 41, 57, 57, 65, 50, 78, 60, 54, 72, 72, 53, 76, 51, 76, 59, 58, 67, 63, 67, 74, 66, 65, 61, 59, 74, 85, 65, 68, 45, 78, 56, 81, 61, 69, 85, 65, 61, 60, 57, 73, 80, 71, 45, 52, 56, 57, 47, 66, 82, 77, 64, 50, 58, 82, 71, 78, 89, 37, 64, 70, 61, 66, 62, 63, 65, 53, 56, 53, 74, 72, 53, 64, 60, 51, 60, 69, 72, 70, 63, 71, 64, 59, 89, 80, 74, 61, 76, 80, 70, 58, 61, 57, 68, 60, 65, 60, 74, 65, 54, 58, 82, 70, 98, 63, 90, 49, 64, 57, 77, 57, 65, 66, 64, 74, 56, 88, 65, 54, 56, 88, 48, 51, 61, 80, 44, 37, 41, 65, 51, 59, 81, 65, 55, 67, 76, 69, 74, 46, 87, 74, 89, 56, 58, 54, 87, 98, 69, 54, 53, 69, 67, 66, 101, 66, 73, 64, 46, 59, 75, 75, 59, 67, 74, 71, 79, 80, 75, 53, 95, 72, 40, 74, 88, 63, 48, 64, 56, 43, 94, 51, 92, 60, 119, 73, 59, 50, 78, 53, 61, 43, 98, 64, 63, 88, 48, 61, 72, 55, 61, 62, 51, 45, 60, 61, 61, 54, 55, 50, 57, 65, 54, 67, 69, 49, 80, 74, 109, 59, 73, 60, 76, 66, 62, 56, 85, 59, 72, 72, 52, 61, 65, 60, 63, 57, 60, 63, 59, 68, 65, 64, 58, 69, 59, 60, 63, 66, 57, 80, 65, 57, 61, 51, 63, 60, 69, 83, 52, 59, 74, 60, 73, 59, 86, 71, 62, 63, 76, 95, 87, 103, 58, 61, 53, 68, 72, 70, 64, 76, 51, 57, 66, 60, 67, 73, 65, 65, 74, 60, 63, 65, 78, 70, 60, 51, 50, 50, 86, 59, 69, 60, 61, 78, 66, 63, 81, 69, 59, 61, 68, 76, 85, 85, 67, 66, 43, 83, 64, 50, 64, 93, 61, 45, 63, 66, 70, 66, 99, 82, 55, 55, 75, 58, 62, 157, 63, 66, 81, 56, 86, 72, 72, 60, 79, 61, 62, 63, 69, 54, 74, 56, 57, 75, 95, 73, 55, 54, 51, 60, 72, 64, 80, 70, 64, 105, 70, 60, 77, 66, 70, 56, 71, 74, 77, 71, 60, 67, 52, 54, 76, 57, 43, 45, 51, 56, 46, 62, 89, 64, 57, 56, 85, 68, 59, 75, 75, 54, 77, 61, 71, 55, 69, 72, 75, 76, 75, 56, 66, 86, 60, 54, 44, 49, 62, 65, 55, 65, 52, 78, 52, 67, 75, 68, 59, 68, 83, 68, 65, 79, 69, 59, 59, 71, 52, 56, 85, 68, 60, 61, 53, 78, 66, 66, 71, 73, 85, 69, 66, 61, 60, 65, 51, 71, 65, 54, 93, 80, 68, 71, 71, 53, 56, 68, 63, 76, 59, 66, 77, 80, 64, 103, 86, 52, 82, 74, 74, 74, 66, 82, 76, 86, 72, 50, 75, 64, 62, 75, 52, 54, 73, 70, 79, 74, 66, 63, 85, 77, 71, 66, 62, 55, 71, 64, 59, 64, 50, 64, 63, 59, 76, 82, 75, 67, 51, 60, 56, 73, 97, 50, 67, 74, 74, 73, 68, 86, 86, 58, 60, 71, 60, 56, 62, 91, 57, 75, 68, 58, 67, 80, 63, 65, 50, 64, 61, 78, 60, 67, 50, 50, 61, 77, 67, 63, 53, 63, 82, 58, 57, 120, 64, 76, 59, 58, 77, 81, 61, 76, 57, 50, 62, 65, 82, 75, 62, 72, 44, 60, 63, 66, 43, 60, 66, 75, 52, 60, 72, 55, 69, 48, 79, 73, 54, 75, 73, 53, 84, 61, 45, 81, 77, 54, 77, 70, 56, 67, 53, 80, 82, 63, 75, 64, 61, 68, 63, 58, 66, 51, 76, 54, 52, 72, 60, 40, 47, 82, 60, 70, 81, 58, 59, 55, 68, 62, 51, 64, 75, 60, 76, 58, 66, 51, 57, 63, 116, 61, 64, 64, 41, 61, 84, 62, 57, 82, 92, 69, 59, 56, 79, 57, 80, 67, 66, 74, 71, 58, 59, 93, 74, 56, 57, 65, 57, 51, 58, 65, 53, 76, 64, 37, 86, 79, 77, 57, 60, 57, 66, 57, 57, 57, 58, 58, 63, 75, 73, 83, 56, 60, 56, 59, 67, 61, 66, 50, 53, 69, 70, 80, 56, 68, 89, 54, 54, 94, 72, 78, 65, 69, 60, 69, 66, 82, 62, 70, 58, 75, 68, 55, 57, 64, 49, 87, 72, 73, 68, 58, 82, 48, 56, 78, 56, 64, 70, 58, 76, 49, 66, 49, 57, 52, 83, 89, 62, 58, 67, 63, 58, 66, 64, 66, 62, 62, 81, 48, 80, 71, 76, 60, 61, 68, 71, 58, 63, 51, 49, 76, 89, 69, 59, 93, 67, 64, 77, 58, 55, 74, 65, 53, 56, 54, 62, 60, 57, 52, 61, 60, 62, 62, 87, 74, 75, 59, 84, 64, 67, 60, 62, 56, 55, 57, 62, 73, 51, 72, 86, 81, 74, 65, 55, 69, 61, 63, 76, 59, 62, 58, 71, 51, 64, 64, 90, 122, 59, 57, 63, 59, 61, 79, 58, 67, 89, 68, 55, 65, 64, 73, 77, 78, 72, 65, 68, 68, 73, 58, 59, 58, 63, 69, 65, 64, 70, 48, 53, 70, 74, 67, 65, 74, 82, 42, 59, 69, 61, 62, 73, 71, 80, 64, 68, 50, 58, 113, 84, 73, 52, 77, 62, 61, 77, 98, 67, 63, 59, 61, 65, 88, 62, 63, 60, 62, 58, 60, 62, 45, 56, 57, 73, 59, 104, 45, 61, 71, 49, 70, 53, 55, 69, 89, 74, 65, 64, 65, 67, 65, 52, 74, 59, 53, 73, 59, 57, 83, 52, 107, 70, 59, 50, 45, 65, 51, 68, 91, 67, 71, 58, 76, 75, 70, 67, 79, 70, 58, 86, 47, 69, 62, 57, 72, 67, 60, 56, 75, 56, 59, 88, 60, 65, 60, 51, 77, 70, 104, 65, 66, 82, 76, 62, 52, 110, 65, 52, 53, 66, 68, 58, 74, 77, 125, 58, 49, 72, 68, 85, 51, 60, 68, 53, 71, 54, 50, 61, 66, 91, 63, 72, 41, 72, 87, 62, 54, 63, 75, 57, 57, 63, 55, 64, 82, 60, 73, 86, 51, 69, 54, 57, 80, 71, 72, 63, 64, 66, 57, 71, 71, 64, 73, 60, 62, 76, 71, 69, 63, 66, 74, 55, 100, 69, 71, 68, 75, 72, 70, 55, 77, 52, 50, 61, 59, 49, 97, 64, 60, 89, 60, 59, 52, 70, 78, 68, 84, 63, 68, 69, 80, 72, 56, 54, 65, 61, 64, 54, 51, 52, 134, 81, 54, 38, 63, 47, 60, 54, 51, 59, 57, 65, 70, 75, 46, 62, 68, 49, 62, 72, 74, 54, 79, 68, 58, 58, 75, 63, 52, 59, 69, 66, 64, 87, 71, 65, 52, 66, 56, 65, 53, 65, 59, 65, 61, 75, 66, 67, 59, 55, 67, 68, 59, 69, 65, 68, 98, 65, 61, 81, 49, 76, 52, 77, 60, 69, 79, 53, 61, 100, 57, 63, 42, 57, 70, 63, 54, 65, 54, 65, 63, 69, 74, 69, 66, 58, 71, 69, 59, 61, 62, 71, 64, 58, 82, 68, 69, 74, 55, 66, 64, 61, 72, 79, 91, 79, 48, 72, 67, 58, 60, 61, 64, 41, 61, 58, 72, 89, 75, 59, 80, 77, 75, 64, 64, 67, 90, 56, 62, 72, 59, 74, 60, 65, 69, 69, 46, 68, 59, 86, 48, 61, 60, 62, 66, 93, 61, 65, 76, 86, 79, 58, 61, 56, 67, 51, 50, 66, 54, 62, 67, 66, 56, 73, 59, 77, 75, 58, 67, 129, 75, 51, 61, 87, 58, 60, 91, 61, 64, 65, 75, 64, 66, 64, 69, 80, 71, 60, 69, 69, 64, 79, 54, 64, 80, 50, 70, 55, 63, 72, 53, 59, 65, 66, 54, 61, 49, 69, 73, 59, 64, 57, 72, 75, 61, 57, 71, 78, 55, 80, 79, 63, 64, 69, 75, 83, 59, 59, 77, 69, 65, 61, 55, 67, 69, 57, 62, 68, 58, 75, 63, 73, 52, 57, 62, 63, 48, 53, 65, 60, 79, 80, 50, 61, 54, 56, 64, 66, 75, 65, 50, 66, 70, 67, 97, 70, 79, 59, 61, 59, 74, 75, 90, 58, 61, 61, 69, 58, 54, 57, 50, 51, 62, 63, 56, 59, 65, 66, 59, 130, 57, 63, 61, 70, 69, 41, 66, 62, 72, 54, 57, 73, 74, 74, 59, 66, 75, 64, 61, 60, 72, 56, 59, 66, 71, 65, 68, 59, 64, 50, 75, 60, 77, 81, 100, 60, 84, 56, 56, 58, 59, 65, 61, 59, 63, 76, 69, 70, 64, 75, 67, 65, 58, 67, 67, 76, 59, 76, 63, 53, 65, 67, 72, 82, 59, 74, 65, 62, 68, 76, 76, 70, 65, 58, 50, 57, 50, 73, 54, 57, 66, 55, 67, 67, 74, 53, 113, 84, 66, 69, 63, 70, 80, 61, 56, 66, 74, 64, 66, 45, 59, 73, 63, 82, 66, 40, 63, 59, 61, 80, 55, 64, 65, 71, 55, 62, 53, 66, 76, 57, 56, 67, 67, 59, 59, 53, 74, 60, 54, 58, 67, 83, 73, 73, 77, 62, 61, 70, 58, 71, 69, 69, 59, 69, 59, 71, 64, 69, 57, 59, 73, 58, 71, 66, 63, 54, 39, 56, 67, 76, 57, 58, 74, 75, 61, 185, 69, 63, 72, 67, 61, 58, 68, 65, 60, 62, 59, 63, 69, 77, 70, 64, 64, 57, 59, 65, 79, 65, 54, 57, 44, 70, 54, 73, 73, 65, 63, 55, 51, 74, 65, 67, 56, 65, 65, 51, 77, 112, 58, 70, 93, 82, 73, 59, 87, 85, 64, 76, 65, 58, 56, 58, 49, 67, 66, 61, 64, 80, 80, 72, 79, 68, 72, 66, 73, 98, 65, 67, 55, 49, 83, 50, 66, 72, 75, 73, 63, 68, 56, 73, 66, 68, 58, 59, 86, 48, 57, 92, 64, 59, 64, 49, 61, 63, 62, 73, 71, 94, 68, 55, 61, 73, 96, 70, 49, 49, 62, 59, 67, 65, 82, 86, 66, 66, 58, 65, 72, 53, 74, 64, 67, 53, 79, 71, 66, 73, 56, 70, 64, 67, 71, 99, 70, 55, 67, 63, 60, 57, 64, 56, 53, 50, 70, 69, 45, 51, 68, 68, 60, 59, 68, 71, 86, 58, 104, 71, 68, 99, 65, 52, 99, 57, 55, 62, 57, 52, 58, 61, 57, 48, 58, 78, 65, 56, 58, 65, 60, 75, 68, 62, 68, 72, 64, 59, 68, 64, 89, 68, 63, 67, 82, 65, 66, 57, 60, 68, 68, 64, 56, 59, 46, 56, 71, 59, 100, 58, 58, 71, 64, 59, 58, 49, 64, 54, 66, 60, 86, 58, 60, 60, 59, 75, 68, 57, 83, 66, 69, 54, 54, 55, 49, 58, 73, 81, 94, 80, 69, 48, 69, 72, 78, 73, 58, 54, 78, 71, 76, 76, 71, 55, 71, 69, 66, 64, 82, 43, 59, 56, 57, 54, 94, 60, 86, 64, 52, 95, 58, 56, 57, 63, 64, 65, 55, 59, 68, 56, 51, 65, 81, 73, 57, 62, 59, 71, 69, 68, 54, 59, 62, 65, 58, 60, 63, 90, 56, 81, 77, 64, 69, 46, 57, 59, 61, 76, 78, 54, 62, 70, 71, 50, 55, 39, 72, 68, 91, 68, 57, 62, 77, 57, 76, 61, 64, 57, 69, 64, 71, 81, 60, 72, 68, 60, 58, 50, 58, 51, 70, 67, 71, 63, 74, 59, 67, 67, 63, 58, 121, 63, 83, 114, 61, 67, 56, 69, 81, 48, 63, 64, 67, 93, 62, 74, 88, 57, 68, 67, 55, 57, 62, 61, 72, 56, 61, 100, 55, 65, 60, 52, 57, 78, 65, 50, 55, 58, 73, 50, 57, 58, 62, 69, 66, 81, 69, 59, 65, 64, 65, 60, 59, 69, 54, 74, 65, 60, 70, 54, 55, 68, 40, 75, 70, 71, 66, 74, 62, 76, 56, 64, 72, 60, 67, 66, 62, 61, 60, 72, 80, 82, 64, 64, 63, 65, 63, 56, 73, 72, 59, 49, 55, 50, 59, 58, 52, 113, 68, 50, 61, 59, 73, 63, 70, 62, 59, 51, 74, 62, 56, 139, 61, 70, 57, 67, 49, 58, 59, 60, 62, 58, 61, 62, 66, 62, 67, 71, 71, 52, 67, 63, 77, 125, 50, 61, 69, 119, 74, 64, 54, 66, 68, 71, 59, 80, 60, 56, 75, 105, 68, 53, 83, 67, 69, 58, 54, 68, 66, 84, 71, 57, 67, 78, 54, 64, 95, 56, 59, 68, 60, 46, 85, 62, 66, 55, 60, 56, 68, 58, 53, 52, 60, 61, 61, 63, 61, 79, 60, 57, 78, 69, 73, 81, 66, 65, 74, 62, 54, 62, 61, 54, 56, 63, 69, 52, 68, 83, 70, 67, 110, 70, 83, 81, 83, 68, 57, 68, 53, 64, 66, 53, 58, 62, 75, 51, 59, 38, 73, 63, 67, 60, 62, 58, 63, 58, 72, 96, 53, 72, 74, 50, 63, 56, 58, 64, 59, 44, 63, 61, 58, 64, 64, 64, 65, 51, 60, 68, 58, 62, 51, 64, 46, 67, 59, 57, 53, 59, 58, 58, 66, 53, 63, 59, 58, 62, 47, 70, 68, 66, 59, 113, 65, 61, 75, 65, 68, 56, 66, 70, 59, 54, 56, 74, 71, 73, 142, 63, 84, 62, 66, 61, 62, 74, 63, 46, 61, 57, 58, 69, 65, 75, 47, 73, 65, 55, 78, 60, 63, 58, 62, 61, 100, 64, 58, 75, 59, 60, 51, 61, 62, 50, 62, 67, 38, 57, 86, 96, 78, 70, 45, 80, 72, 65, 63, 71, 72, 50, 56, 64, 69, 66, 61, 71, 64, 64, 70, 60, 61, 83, 55, 55, 61, 58, 60, 59, 68, 58, 56, 61, 62, 84, 65, 67, 64, 57, 79, 59, 72, 56, 55, 65, 63, 59, 79, 74, 88, 53, 51, 60, 65, 81, 64, 67, 55, 82, 48, 65, 96, 58, 63, 64, 52, 59, 58, 81, 53, 94, 62, 89, 61, 65, 69, 84, 51, 71, 62, 59, 71, 61, 61, 64, 61, 61, 92, 67, 70, 65, 61, 64, 56, 59, 63, 53, 58, 62, 63, 65, 69, 58, 56, 67, 59, 72, 74, 52, 84, 67, 71, 78, 63, 82, 53, 61, 63, 83, 73, 60, 72, 71, 61, 92, 71, 91, 82, 57, 66, 49, 69, 63, 73, 53, 68, 65, 60, 71, 65, 66, 60, 62, 76, 60, 69, 64, 59, 69, 76, 56, 60, 58, 65, 64, 65, 88, 79, 60, 54, 51, 84, 70, 73, 84, 61, 57, 57, 76, 55, 41, 58, 69, 61, 66, 64, 68, 50, 87, 61, 95, 54, 57, 66, 72, 62, 60, 57, 68, 75, 59, 97, 52, 58, 53, 63, 60, 43, 62, 50, 75, 82, 68, 67, 58, 99, 67, 60, 56, 80, 61, 57, 57, 52, 63, 65, 69, 59, 52, 62, 62, 118, 57, 54, 75, 59, 64, 76, 90, 60, 73, 68, 70, 61, 68, 87, 51, 58, 60, 74, 70, 55, 52, 64, 115, 48, 103, 57, 84, 68, 71, 75, 75, 64, 63, 66, 81, 119, 67, 67, 63, 63, 58, 63, 59, 80, 74, 72, 61, 62, 54, 79, 63, 72, 61, 65, 57, 69, 60, 65, 66, 69, 70, 63, 67, 67, 52, 56, 63, 76, 58, 59, 48, 42, 72, 76, 50, 61, 60, 82, 51, 64, 55, 86, 66, 61, 60, 54, 68, 58, 72, 69, 64, 81, 56, 52, 63, 64, 62, 61, 59, 54, 64, 85, 67, 45, 62, 55, 48, 50, 68, 72, 55, 65, 63, 76, 76, 57, 69, 64, 58, 70, 59, 79, 67, 62, 57, 78, 62, 76, 68, 68, 66, 64, 56, 68, 50, 72, 93, 65, 80, 90, 70, 64, 63, 62, 57, 63, 67, 79, 53, 58, 63, 58, 65, 77, 68, 73, 80, 81, 61, 66, 64, 69, 63, 79, 65, 57, 70, 66, 64, 54, 61, 55, 69, 63, 62, 76, 59, 69, 56, 62, 58, 61, 73, 59, 62, 69, 93, 63, 59, 68, 73, 60, 74, 64, 60, 63, 75, 59, 54, 81, 59, 52, 63, 43, 60, 61, 60, 55, 77, 71, 66, 77, 53, 61, 59, 54, 57, 55, 61, 49, 60, 112, 52, 73, 60, 61, 70, 81, 58, 60, 53, 60, 67, 58, 60, 56, 62, 63, 49, 72, 70, 67, 70, 70, 65, 60, 71, 61, 65, 62, 69, 77, 82, 64, 49, 93, 62, 54, 61, 63, 58, 71, 52, 63, 63, 57, 64, 61, 65, 42, 70, 79, 78, 81, 65, 58, 78, 52, 69, 61, 61, 62, 66, 62, 65, 65, 60, 61, 65, 59, 62, 109, 84, 79, 60, 77, 60, 59, 55, 85, 79, 68, 70, 66, 70, 72, 56, 64, 81, 74, 72, 59, 57, 58, 59, 67, 67, 69, 91, 50, 61, 62, 64, 85, 50, 70, 62, 68, 75, 57, 69, 64, 63, 80, 60, 71, 61, 53, 68, 73, 56, 80, 60, 60, 57, 59, 72, 67, 65, 63, 66, 86, 75, 87, 69, 58, 51, 66, 56, 69, 63, 60, 63, 84, 67, 57, 73, 70, 69, 65, 59, 52, 54, 80, 67, 65, 69, 77, 61, 64, 61, 84, 62, 56, 62, 97, 76, 77, 73, 57, 65, 68, 64, 61, 56, 61, 60, 54, 71, 66, 63, 56, 70, 70, 70, 64, 77, 62, 64, 53, 63, 82, 67, 74, 67, 66, 61, 53, 76, 68, 67, 56, 63, 59, 58, 64, 56, 56, 78, 119, 58, 59, 66, 53, 57, 83, 51, 58, 60, 50, 66, 62, 63, 53, 56, 58, 59, 83, 62, 72, 63, 81, 54, 78, 60, 53, 72, 62, 93, 68, 57, 58, 59, 69, 69, 97, 46, 129, 62, 52, 53, 53, 56, 59, 100, 59, 67, 79, 63, 64, 50, 60, 63, 59, 65, 70, 60, 71, 81, 58, 56, 73, 68, 74, 54, 72, 73, 50, 73, 72, 65, 57, 67, 74, 73, 97, 53, 51, 71, 65, 59, 90, 77, 75, 78, 67, 61, 86, 57, 70, 53, 60, 78, 60, 80, 75, 63, 88, 72, 65, 65, 57, 61, 65, 80, 60, 69, 58, 67, 59, 100, 69, 63, 49, 61, 126, 65, 67, 48, 77, 69, 61, 49, 55, 49, 69, 47, 57, 60, 53, 64, 57, 85, 64, 72, 60, 60, 53, 55, 60, 59, 56, 61, 59, 95, 60, 83, 56, 58, 71, 63, 61, 68, 50, 87, 55, 73, 71, 56, 57, 63, 67, 60, 73, 62, 67, 59, 60, 61, 71, 75, 59, 57, 58, 55, 53, 62, 63, 67, 72, 109, 58, 66, 109, 87, 61, 63, 63, 66, 64, 66, 52, 47, 57, 77, 64, 44, 53, 58, 61, 63, 54, 71, 56, 46, 60, 71, 91, 65, 70, 60, 66, 68, 64, 79, 69, 70, 66, 62, 63, 52, 63, 61, 70, 53, 67, 48, 62, 59, 51, 68, 55, 60, 82, 89, 61, 78, 55, 48, 58, 68, 62, 90, 65, 59, 58, 63, 67, 63, 54, 55, 83, 69, 67, 63, 55, 79, 56, 64, 69, 75, 69, 61, 64, 60, 65, 63, 68, 51, 53, 72, 78, 63, 59, 62, 46, 69, 67, 52, 61, 64, 65, 62, 59, 60, 65, 59, 65, 57, 69, 96, 71, 68, 72, 75, 77, 76, 61, 72, 81, 64, 63, 65, 66, 74, 76, 61, 64, 64, 67, 58, 89, 69, 62, 62, 72, 75, 62, 64, 53, 88, 70, 69, 60, 61, 57, 59, 64, 60, 70, 62, 65, 64, 68, 77, 59, 108, 64, 88, 52, 53, 61, 59, 79, 62, 67, 62, 66, 62, 63, 74, 62, 54, 60, 57, 80, 62, 52, 68, 75, 66, 69, 60, 67, 59, 55, 65, 54, 58, 76, 59, 57, 65, 77, 79, 51, 60, 66, 66, 59, 56, 84, 54, 76, 56, 52, 64, 52, 65, 62, 56, 58, 61, 58, 60, 64, 63, 66, 64, 60, 69, 66, 53, 63, 89, 63, 65, 59, 54, 69, 77, 53, 66, 60, 65, 81, 81, 68, 60, 59, 70, 67, 50, 75, 67, 69, 53, 69, 64, 66, 77, 77, 56, 52, 57, 114, 66, 52, 61, 53, 67, 60, 52, 67, 61, 62, 55, 69, 66, 47, 61, 67, 49, 68, 64, 63, 62, 65, 58, 90, 57, 59, 56, 63, 50, 61, 62, 65, 51, 69, 64, 76, 61, 61, 82, 69, 64, 71, 59, 57, 68, 55, 69, 75, 57, 64, 57, 60, 64, 67, 59, 62, 60, 95, 49, 62, 73, 69, 73, 60, 45, 63, 78, 74, 90, 50, 56, 43, 45, 53, 58, 54, 75, 84, 82, 103, 62, 56, 63, 61, 80, 66, 65, 52, 64, 65, 58, 79, 58, 69, 69, 60, 58, 126, 83, 54, 62, 61, 63, 73, 67, 65, 66, 67, 52, 56, 67, 69, 77, 75, 56, 89, 45, 67, 66, 56, 78, 52, 55, 72, 59, 81, 59, 59, 51, 62, 51, 54, 59, 62, 71, 76, 48, 70, 61, 62, 53, 91, 70, 54, 68, 58, 64, 65, 59, 60, 60, 60, 68, 59, 64, 56, 52, 133, 63, 52, 66, 63, 59, 68, 71, 81, 57, 62, 57, 132, 55, 60, 58, 49, 51, 55, 52, 68, 117, 53, 64, 65, 72, 45, 65, 54, 66, 74, 59, 55, 57, 56, 67, 123, 61, 51, 76, 64, 74, 61, 53, 63, 57, 52, 66, 64, 65, 80, 56, 73, 54, 62, 64, 64, 57, 69, 54, 137, 100, 65, 63, 116, 60, 57, 87, 64, 60, 62, 64, 64, 75, 58, 68, 65, 65, 56, 82, 56, 56, 63, 50, 62, 73, 63, 62, 67, 75, 59, 59, 64, 80, 53, 65, 65, 58, 63, 53, 70, 77, 84, 60, 71, 58, 61, 66, 51, 67, 63, 60, 67, 61, 59, 57, 64, 73, 77, 77, 49, 65, 108, 49, 54, 63, 58, 58, 62, 69, 68, 66, 58, 67, 59, 57, 127, 48, 55, 52, 52, 77, 64, 69, 74, 64, 57, 57, 73, 59, 72, 63, 80, 62, 61, 69, 61, 86, 59, 64, 116, 70, 57, 48, 61, 72, 59, 56, 63, 90, 77, 55, 74, 75, 53, 74, 64, 67, 69, 79, 69, 59, 87, 54, 66, 66, 58, 87, 64, 62, 62, 76, 58, 59, 81, 65, 60, 58, 52, 56, 66, 49, 77, 55, 53, 57, 68, 55, 87, 64, 58, 56, 55, 64, 67, 69, 60, 76, 52, 66, 70, 59, 101, 58, 80, 66, 67, 54, 55, 59, 55, 95, 71, 71, 59, 67, 54, 57, 60, 69, 66, 55, 59, 61, 67, 58, 61, 53, 62, 62, 71, 62, 42, 81, 61, 63, 90, 64, 62, 88, 59, 57, 64, 62, 59, 59, 60, 68, 90, 72, 58, 68, 64, 58, 58, 65, 90, 49, 75, 63, 85, 60, 68, 59, 68, 62, 55, 65, 66, 60, 62, 57, 61, 67, 70, 57, 66, 73, 67, 85, 66, 77, 57, 72, 55, 67, 67, 68, 64, 60, 71, 59, 64, 60, 64, 62, 58, 52, 68, 78, 57, 56, 64, 64, 60, 53, 69, 62, 98, 80, 59, 90, 60, 62, 56, 53, 64, 64, 55, 56, 84, 56, 57, 62, 54, 68, 57, 52, 57, 83, 63, 63, 74, 48, 73, 47, 96, 63, 80, 61, 55, 57, 65, 53, 70, 70, 94, 70, 85, 71, 61, 62, 56, 90, 82, 59, 68, 67, 55, 64, 77, 60, 73, 62, 62, 69, 64, 61, 79, 64, 55, 60, 59, 74, 57, 53, 61, 57, 48, 80, 54, 92, 57, 67, 68, 79, 50, 51, 64, 68, 71, 73, 60, 60, 91, 59, 58, 62, 67, 61, 62, 54, 57, 62, 68, 52, 68, 63, 52, 85, 61, 92, 54, 74, 62, 60, 71, 58, 69, 79, 61, 58, 45, 64, 61, 110, 61, 61, 69, 88, 52, 59, 62, 64, 65, 52, 56, 71, 65, 64, 60, 60, 68, 61, 71, 52, 82, 53, 64, 59, 48, 61, 53, 67, 58, 88, 55, 57, 53, 93, 68, 59, 65, 61, 54, 56, 62, 67, 66, 67, 57, 63, 66, 59, 52, 63, 88, 55, 143, 61, 88, 64, 60, 65, 97, 64, 71, 74, 67, 78, 68, 61, 62, 53, 59, 63, 60, 61, 62, 58, 60, 109, 63, 61, 61, 59, 76, 64, 43, 63, 60, 57, 67, 80, 89, 79, 85, 76, 58, 65, 56, 56, 60, 60, 55, 51, 85, 66, 61, 70, 68, 52, 63, 62, 66, 56, 54, 63, 60, 69, 64, 65, 74, 69, 65, 69, 72, 61, 117, 58, 66, 63, 69, 53, 53, 62, 65, 91, 54, 59, 62, 66, 70, 67, 81, 73, 51, 67, 86, 68, 59, 67, 63, 52, 68, 81, 52, 60, 57, 51, 72, 58, 55, 64, 85, 62, 66, 50, 72, 61, 59, 46, 67, 71, 87, 49, 65, 71, 67, 65, 63, 57, 61, 55, 62, 60, 60, 62, 60, 69, 77, 95, 66, 65, 102, 57, 59, 59, 99, 59, 61, 56, 59, 64, 69, 50, 66, 56, 64, 70, 62, 57, 48, 62, 59, 62, 61, 81, 61, 60, 54, 60, 60, 66, 59, 68, 75, 59, 70, 74, 67, 56, 63, 55, 57, 52, 77, 62, 85, 74, 61, 66, 65, 72, 54, 52, 64, 67, 87, 60, 47, 73, 55, 62, 56, 68, 65, 59, 63, 59, 61, 60, 53, 69, 58, 59, 40, 85, 58, 54, 59, 61, 54, 58, 66, 59, 50, 69, 61, 49, 83, 72, 69, 74, 102, 121, 54, 54, 65, 52, 65, 51, 64, 68, 64, 54, 52, 58, 64, 65, 78, 64, 57, 64, 58, 51, 61, 93, 74, 65, 62, 69, 68, 60, 57, 86, 68, 51, 62, 57, 79, 63, 62, 60, 60, 56, 57, 61, 60, 64, 61, 56, 59, 55, 60, 57, 65, 69, 83, 54, 57, 63, 64, 51, 51, 66, 59, 61, 65, 64, 64, 50, 57, 52, 72, 60, 64, 69, 59, 62, 80, 63, 62, 51, 101, 58, 75, 94, 85, 47, 66, 66, 80, 68, 73, 65, 53, 84, 74, 65, 76, 65, 60, 62, 53, 71, 59, 57, 66, 69, 61, 65, 68, 60, 62, 60, 67, 61, 75, 51, 67, 51, 46, 69, 68, 55, 79, 56, 71, 69, 73, 101, 89, 80, 74, 77, 62, 68, 65, 82, 83, 64, 65, 74, 68, 68, 71, 104, 82, 82, 75, 81, 65, 58, 68, 77, 76, 64, 57, 67, 49, 72, 59, 65, 68, 63, 74, 53, 71, 58, 54, 72, 73, 67, 67, 55, 61, 52, 57, 65, 78, 46, 71, 65, 51, 83, 39, 75, 119, 56, 57, 80, 57, 47, 81, 89, 59, 63, 65, 70, 60, 79, 65, 55, 47, 62, 57, 69, 79, 63, 73, 99, 61, 91, 63, 112, 58, 75, 50, 51, 55, 66, 34, 71, 42, 75, 56, 59, 99, 75, 60, 49, 66, 69, 95, 64, 80, 73, 79, 46, 62, 62, 57, 66, 175, 72, 66, 44, 63, 71, 59, 56, 61, 87, 89, 49, 57, 80, 62, 57, 52, 137, 51, 67, 56, 52, 44, 55, 46, 47, 54, 63, 107, 53, 64, 68, 60, 68, 62, 50, 92, 68, 64, 60, 82, 48, 67, 66, 59, 49, 52, 84, 51, 53, 51, 57, 67, 63, 46, 57, 46, 69, 74, 57, 57, 93, 59, 75, 69, 42, 43, 45, 58, 90, 76, 71, 63, 48, 119, 48, 55, 66, 99, 77, 53, 46, 75, 58, 58, 75, 65, 55, 71, 57, 67, 69, 71, 59, 51, 70, 56, 52, 55, 68, 87, 100, 54, 77, 61, 72, 66, 65, 66, 80, 48, 65, 62, 81, 66, 71, 59, 70, 59, 52, 63, 80, 58, 75, 75, 61, 57, 83, 64, 63, 51, 92, 82, 63, 69, 76, 45, 64, 79, 48, 60, 63, 81, 57, 59, 70, 67, 57, 43, 78, 45, 58, 75, 77, 41, 68, 61, 78, 57, 74, 101, 88, 63, 75, 62, 49, 63, 66, 75, 74, 75, 82, 62, 65, 56, 33, 67, 53, 76, 37, 56, 74, 65, 72, 42, 64, 40, 43, 91, 92, 86, 51, 81, 59, 88, 69, 72, 74, 65, 52, 60, 63, 84, 58, 88, 91, 76, 78, 61, 71, 65, 78, 79, 68, 76, 47, 71, 114, 61, 62, 68, 73, 44, 67, 74, 72, 69, 74, 47, 66, 76, 61, 66, 62, 76, 64, 56, 63, 71, 54, 54, 58, 79, 67, 43, 119, 62, 64, 48, 62, 55, 69, 43, 84, 56, 80, 84, 55, 62, 40, 99, 72, 65, 53, 90, 65, 63, 42, 44, 68, 68, 63, 60, 65, 75, 53, 63, 55, 72, 80, 59, 50, 60, 62, 51, 82, 57, 78, 40, 62, 54, 58, 73, 67, 64, 75, 66, 58, 65, 59, 38, 86, 45, 70, 60, 69, 79, 84, 67, 63, 71, 80, 64, 54, 55, 52, 73, 76, 69, 56, 63, 68, 66, 75, 65, 64, 57, 72, 58, 58, 79, 63, 45, 85, 82, 74, 67, 74, 83, 65, 76, 66, 57, 71, 75, 64, 52, 82, 66, 72, 50, 70, 47, 83, 60, 55, 75, 45, 84, 63, 43, 58, 62, 54, 54, 70, 90, 73, 76, 79, 62, 74, 60, 73, 60, 52, 65, 61, 70, 76, 70, 61, 83, 67, 57, 49, 52, 64, 83, 61, 63, 112, 75, 96, 59, 77, 56, 39, 72, 62, 62, 47, 50, 54, 57, 94, 57, 94, 57, 65, 64, 77, 46, 57, 68, 47, 89, 62, 44, 68, 65, 52, 84, 56, 94, 59, 46, 68, 81, 61, 57, 73, 99, 65, 76, 61, 63, 61, 49, 73, 68, 62, 52, 67, 66, 51, 77, 68, 72, 72, 74, 56, 66, 71, 62, 75, 63, 66, 68, 61, 67, 53, 50, 99, 55, 54, 64, 53, 49, 67, 71, 69, 86, 73, 59, 54, 84, 41, 69, 78, 77, 80, 55, 48, 63, 90, 72, 88, 55, 59, 59, 73, 80, 48, 106, 42, 72, 48, 73, 68, 60, 88, 63, 94, 61, 59, 62, 70, 85, 92, 65, 72, 65, 58, 129, 56, 54, 61, 70, 57, 63, 55, 61, 70, 31, 62, 53, 55, 65, 68, 50, 64, 66, 62, 69, 69, 60, 65, 70, 103, 62, 67, 82, 62, 67, 65, 80, 63, 61, 53, 57, 52, 58, 81, 60, 52, 57, 61, 67, 62, 57, 54, 56, 62, 79, 63, 53, 49, 83, 82, 83, 79, 77, 69, 86, 66, 40, 58, 69, 71, 78, 79, 56, 85, 61, 63, 79, 74, 61, 53, 74, 47, 60, 50, 64, 57, 56, 56, 73, 62, 74, 69, 62, 65, 72, 83, 56, 53, 72, 57, 47, 43, 62, 72, 54, 76, 64, 56, 80, 50, 83, 78, 57, 71, 53, 76, 100, 60, 76, 91, 70, 62, 49, 56, 67, 59, 66, 113, 47, 90, 61, 57, 69, 58, 87, 68, 59, 86, 61, 69, 63, 65, 81, 34, 87, 65, 51, 65, 75, 54, 64, 45, 76, 73, 78, 67, 61, 49, 56, 59, 89, 80, 72, 72, 73, 98, 57, 177, 58, 66, 58, 49, 65, 72, 88, 62, 66, 65, 54, 60, 68, 34, 57, 59, 56, 68, 57, 54, 71, 72, 55, 53, 69, 67, 64, 69, 71, 73, 63, 70, 66, 71, 70, 44, 80, 61, 58, 63, 64, 49, 94, 47, 64, 56, 71, 45, 59, 68, 74, 57, 53, 50, 73, 77, 68, 52, 79, 63, 73, 78, 83, 51, 51, 67, 48, 93, 41, 72, 64, 59, 65, 57, 86, 77, 65, 64, 58, 58, 56, 50, 89, 57, 68, 88, 57, 87, 84, 35, 53, 79, 80, 52, 54, 80, 54, 79, 42, 69, 79, 69, 70, 129, 51, 78, 88, 115, 64, 53, 58, 65, 52, 62, 65, 74, 78, 64, 74, 68, 52, 63, 51, 80, 58, 71, 69, 63, 63, 56, 61, 61, 48, 37, 79, 93, 66, 65, 71, 57, 66, 79, 66, 114, 85, 60, 69, 57, 71, 54, 79, 67, 72, 64, 72, 74, 78, 64, 87, 92, 67, 57, 102, 70, 81, 62, 54, 74, 59, 64, 80, 73, 70, 56, 54, 63, 46, 58, 50, 54, 40, 61, 34, 58, 97, 79, 80, 79, 47, 61, 69, 58, 55, 71, 78, 57, 50, 65, 70, 56, 58, 68, 36, 66, 75, 68, 66, 46, 67, 49, 74, 78, 65, 71, 70, 55, 58, 78, 73, 82, 49, 44, 76, 73, 65, 67, 55, 60, 49, 48, 73, 54, 61, 63, 62, 55, 70, 77, 62, 55, 74, 52, 98, 64, 91, 67, 71, 56, 90, 56, 79, 87, 63, 60, 67, 52, 58, 52, 60, 84, 52, 57, 71, 48, 58, 57, 69, 72, 81, 60, 69, 57, 62, 61, 84, 71, 59, 76, 57, 57, 47, 52, 61, 55, 82, 61, 57, 69, 79, 66, 80, 60, 58, 58, 80, 64, 53, 74, 62, 61, 55, 74, 62, 55, 69, 63, 70, 82, 50, 70, 53, 58, 94, 78, 63, 70, 51, 54, 72, 73, 43, 86, 67, 55, 76, 102, 53, 55, 74, 59, 78, 69, 57, 55, 64, 57, 67, 59, 71, 67, 76, 56, 74, 69, 57, 61, 82, 45, 46, 73, 57, 63, 49, 84, 44, 62, 78, 77, 67, 76, 90, 65, 44, 111, 65, 72, 65, 49, 83, 67, 59, 67, 56, 71, 58, 56, 60, 73, 59, 72, 86, 83, 71, 61, 59, 86, 43, 58, 66, 70, 100, 62, 54, 102, 57, 56, 70, 40, 80, 69, 68, 58, 77, 77, 40, 58, 75, 60, 52, 56, 47, 76, 67, 47, 101, 69, 65, 66, 60, 62, 51, 86, 76, 71, 81, 59, 83, 60, 56, 80, 55, 77, 89, 68, 52, 53, 82, 126, 73, 56, 54, 61, 81, 53, 59, 49, 63, 73, 79, 64, 53, 45, 73, 56, 60, 42, 62, 63, 78, 62, 60, 63, 83, 55, 56, 54, 63, 70, 69, 60, 65, 69, 61, 66, 73, 73, 47, 56, 83, 104, 76, 57, 63, 78, 93, 59, 55, 70, 63, 48, 61, 53, 53, 94, 70, 56, 60, 72, 72, 58, 58, 69, 53, 92, 54, 56, 62, 69, 51, 65, 63, 53, 59, 57, 53, 53, 49, 59, 65, 75, 104, 67, 62, 68, 90, 39, 63, 62, 57, 71, 53, 65, 65, 60, 71, 58, 58, 98, 60, 56, 78, 62, 73, 59, 59, 65, 69, 64, 72, 59, 57, 62, 45, 56, 67, 56, 59, 44, 81, 53, 60, 47, 77, 85, 71, 69, 57, 61, 62, 47, 59, 74, 87, 67, 60, 54, 66, 66, 63, 71, 47, 38, 74, 60, 72, 61, 76, 89, 63, 91, 60, 69, 50, 56, 47, 76, 85, 59, 58, 58, 75, 52, 70, 52, 43, 57, 82, 69, 62, 65, 80, 47, 62, 59, 81, 101, 82, 50, 69, 62, 61, 54, 73, 94, 69, 61, 63, 61, 78, 62, 62, 65, 57, 60, 75, 63, 57, 43, 71, 66, 68, 72, 66, 79, 73, 65, 53, 59, 47, 76, 113, 59, 54, 67, 55, 57, 67, 56, 62, 67, 56, 58, 102, 48, 73, 55, 61, 59, 62, 64, 55, 55, 73, 58, 63, 70, 74, 71, 106, 65, 67, 65, 69, 78, 51, 75, 71, 62, 73, 75, 68, 63, 39, 74, 71, 64, 57, 80, 51, 64, 75, 66, 58, 70, 102, 63, 68, 64, 84, 61, 63, 52, 54, 72, 65, 78, 63, 43, 40, 64, 39, 65, 79, 66, 57, 58, 52, 92, 68, 88, 69, 70, 80, 44, 60, 65, 68, 55, 65, 53, 59, 81, 77, 54, 81, 71, 62, 60, 139, 59, 74, 66, 52, 82, 50, 51, 52, 62, 69, 68, 82, 91, 82, 60, 61, 61, 57, 78, 74, 88, 90, 70, 37, 85, 88, 79, 40, 64, 81, 68, 54, 62, 50, 61, 83, 62, 65, 54, 48, 49, 78, 62, 70, 71, 69, 119, 110, 65, 61, 40, 81, 41, 146, 77, 91, 57, 83, 53, 68, 46, 70, 68, 138, 101, 51, 70, 60, 57, 85, 77, 77, 45, 51, 82, 42, 97, 63, 82, 70, 54, 61, 64, 57, 54, 79, 70, 69, 67, 70, 60, 54, 49, 61, 53, 63, 67, 78, 58, 88, 60, 49, 86, 64, 52, 56, 83, 71, 64, 95, 64, 72, 91, 62, 61, 60, 77, 56, 63, 56, 66, 56, 59, 86, 71, 66, 63, 78, 69, 63, 71, 61, 63, 69, 64, 59, 88, 66, 79, 74, 86, 55, 64, 54, 54, 64, 68, 63, 52, 55, 67, 53, 72, 58, 56, 87, 49, 58, 85, 68, 58, 56, 61, 82, 151, 57, 60, 74, 73, 73, 109, 78, 60, 109, 81, 63, 52, 48, 98, 94, 41, 63, 72, 47, 57, 86, 59, 71, 57, 58, 57, 69, 74, 46, 54, 76, 81, 63, 65, 50, 74, 44, 61, 61, 68, 72, 69, 52, 29, 54, 57, 64, 47, 55, 50, 72, 54, 64, 72, 70, 61, 51, 48, 72, 92, 48, 57, 37, 52, 45, 89, 61, 65, 110, 53, 65, 58, 58, 64, 84, 68, 100, 60, 48, 65, 71, 55, 77, 71, 68, 53, 74, 66, 66, 63, 57, 59, 59, 52, 64, 52, 62, 63, 67, 56, 105, 61, 65, 54, 72, 61, 51, 69, 65, 53, 51, 61, 72, 55, 59, 60, 63, 52, 57, 60, 67, 57, 72, 53, 52, 46, 37, 69, 58, 89, 132, 50, 47, 78, 75, 74, 54, 55, 62, 54, 45, 56, 60, 64, 67, 72, 92, 59, 72, 87, 68, 72, 59, 59, 61, 57, 61, 75, 55, 55, 77, 82, 61, 69, 72, 86, 59, 68, 57, 53, 65, 65, 126, 83, 50, 80, 53, 62, 55, 67, 68, 60, 78, 58, 37, 52, 82, 96, 60, 69, 52, 80, 66, 56, 56, 70, 65, 56, 60, 52, 58, 54, 70, 67, 92, 64, 65, 52, 51, 61, 62, 85, 61, 101, 74, 72, 71, 103, 110, 59, 77, 66, 64, 61, 56, 67, 77, 53, 85, 59, 83, 54, 99, 65, 54, 80, 58, 93, 79, 100, 57, 61, 73, 36, 76, 61, 54, 57, 61, 68, 113, 51, 69, 69, 73, 63, 73, 60, 64, 95, 60, 57, 64, 97, 63, 64, 67, 95, 42, 79, 71, 85, 48, 52, 79, 59, 60, 58, 67, 84, 101, 70, 43, 43, 57, 73, 60, 54, 62, 49, 68, 62, 95, 53, 61, 50, 78, 56, 74, 67, 47, 60, 55, 61, 67, 42, 66, 69, 68, 58, 76, 79, 57, 59, 60, 51, 55, 63, 62, 33, 42, 52, 97, 67, 75, 71, 57, 53, 62, 64, 59, 51, 59, 87, 57, 75, 57, 80, 58, 66, 59, 67, 52, 70, 60, 83, 73, 66, 86, 69, 61, 50, 86, 74, 71, 64, 61, 58, 109, 63, 61, 54, 61, 58, 68, 67, 56, 64, 65, 50, 58, 65, 64, 67, 58, 70, 36, 68, 61, 58, 80, 80, 76, 53, 68, 46, 57, 70, 66, 66, 56, 68, 72, 54, 78, 64, 59, 70, 68, 62, 67, 87, 39, 47, 75, 67, 80, 61, 67, 55, 62, 48, 87, 87, 61, 69, 74, 69, 58, 70, 54, 57, 95, 66, 46, 101, 75, 72, 78, 64, 82, 72, 67, 55, 65, 70, 67, 61, 69, 65, 67, 72, 71, 73, 68, 56, 82, 64, 65, 52, 77, 45, 59, 32, 59, 63, 66, 63, 65, 70, 60, 66, 61, 91, 62, 77, 53, 48, 65, 76, 56, 46, 55, 58, 84, 77, 56, 68, 45, 52, 74, 78, 82, 63, 65, 60, 59, 60, 66, 59, 64, 63, 61, 65, 56, 66, 66, 52, 58, 77, 127, 47, 66, 57, 63, 45, 50, 53, 42, 62, 75, 58, 69, 57, 54, 71, 75, 55, 54, 80, 67, 78, 60, 56, 41, 61, 70, 53, 43, 66, 78, 75, 59, 63, 80, 55, 61, 70, 57, 35, 46, 63, 56, 82, 56, 53, 88, 110, 62, 53, 50, 60, 107, 57, 71, 61, 61, 53, 61, 61, 58, 76, 55, 81, 55, 43, 84, 60, 70, 61, 66, 64, 77, 65, 67, 68, 85, 73, 77, 52, 80, 65, 72, 70, 82, 64, 70, 48, 69, 97, 62, 64, 66, 65, 57, 56, 68, 78, 69, 60, 54, 53, 58, 91, 62, 53, 70, 59, 51, 70, 74, 59, 70, 61, 72, 61, 57, 61, 64, 50, 62, 64, 53, 53, 78, 84, 80, 56, 67, 59, 76, 45, 55, 77, 68, 62, 64, 47, 91, 60, 52, 56, 61, 55, 74, 60, 66, 56, 59, 64, 59, 66, 82, 69, 54, 60, 54, 60, 54, 64, 54, 61, 111, 79, 53, 55, 51, 66, 59, 82, 55, 64, 65, 89, 70, 69, 45, 58, 67, 70, 58, 76, 56, 71, 84, 47, 84, 67, 81, 52, 58, 61, 81, 61, 58, 119, 87, 51, 65, 55, 82, 67, 45, 68, 66, 70, 76, 56, 67, 56, 47, 53, 73, 68, 60, 56, 56, 57, 70, 56, 81, 75, 74, 53, 108, 44, 73, 62, 62, 58, 77, 52, 56, 59, 58, 77, 65, 67, 86, 43, 67, 54, 62, 79, 109, 70, 63, 59, 87, 86, 59, 56, 62, 62, 58, 64, 70, 53, 73, 48, 71, 67, 48, 57, 65, 56, 55, 67, 71, 80, 53, 54, 78, 65, 59, 76, 63, 69, 75, 61, 59, 64, 63, 69, 81, 61, 52, 58, 49, 87, 59, 56, 78, 78, 52, 112, 91, 56, 73, 56, 87, 61, 55, 57, 59, 78, 59, 60, 53, 57, 66, 67, 67, 86, 85, 76, 55, 72, 59, 62, 55, 58, 76, 64, 81, 58, 105, 76, 60, 52, 62, 74, 54, 61, 64, 51, 63, 91, 50, 68, 67, 61, 74, 61, 70, 71, 61, 51, 67, 55, 63, 58, 58, 68, 75, 59, 57, 62, 47, 74, 58, 67, 65, 62, 63, 84, 62, 51, 54, 81, 102, 58, 104, 69, 68, 55, 70, 58, 76, 83, 68, 47, 83, 78, 73, 58, 59, 54, 74, 63, 70, 54, 56, 64, 93, 59, 63, 64, 81, 59, 53, 79, 63, 72, 66, 57, 56, 50, 61, 59, 62, 64, 45, 62, 113, 63, 65, 67, 58, 88, 65, 47, 49, 61, 67, 54, 62, 67, 68, 72, 81, 61, 57, 60, 51, 60, 62, 79, 93, 75, 83, 68, 73, 73, 64, 84, 69, 67, 57, 76, 81, 62, 74, 62, 61, 78, 66, 81, 76, 68, 69, 57, 56, 51, 78, 51, 57, 51, 111, 69, 70, 82, 68, 81, 63, 80, 63, 54, 88, 59, 66, 57, 103, 87, 51, 65, 68, 68, 53, 72, 51, 91, 50, 63, 51, 65, 63, 70, 70, 67, 62, 72, 59, 52, 73, 48, 64, 52, 55, 66, 84, 54, 63, 62, 76, 60, 60, 63, 68, 64, 60, 49, 47, 82, 58, 105, 63, 67, 63, 74, 42, 79, 104, 102, 78, 54, 59, 68, 60, 61, 75, 72, 58, 56, 44, 60, 60, 59, 38, 65, 52, 66, 51, 58, 67, 87, 74, 75, 57, 56, 67, 61, 89, 68, 55, 77, 121, 58, 59, 62, 79, 60, 102, 63, 53, 59, 84, 64, 59, 97, 65, 70, 68, 66, 85, 53, 52, 44, 70, 54, 62, 64, 58, 74, 74, 48, 63, 65, 74, 64, 63, 65, 58, 65, 96, 51, 63, 63, 81, 71, 90, 61, 55, 56, 68, 53, 56, 62, 68, 58, 56, 56, 41, 85, 107, 69, 104, 69, 59, 64, 48, 79, 52, 73, 55, 51, 101, 57, 70, 75, 70, 70, 61, 60, 57, 56, 79, 64, 55, 63, 89, 66, 61, 70, 72, 56, 41, 67, 60, 78, 78, 55, 49, 85, 66, 57, 62, 59, 58, 54, 75, 58, 59, 51, 68, 68, 59, 52, 82, 50, 59, 81, 83, 94, 60, 61, 53, 54, 69, 68, 55, 61, 58, 53, 66, 70, 136, 58, 66, 61, 51, 80, 50, 60, 57, 61, 65, 62, 72, 68, 55, 68, 83, 68, 96, 65, 58, 60, 65, 51, 70, 77, 71, 100, 57, 65, 76, 56, 52, 72, 63, 69, 50, 57, 51, 54, 66, 66, 76, 60, 72, 150, 58, 49, 67, 59, 93, 51, 59, 59, 65, 56, 46, 64, 54, 64, 88, 58, 72, 71, 63, 77, 57, 95, 78, 68, 56, 66, 58, 57, 52, 76, 69, 72, 106, 69, 48, 62, 59, 61, 126, 68, 69, 54, 61, 61, 59, 66, 57, 60, 55, 58, 50, 61, 69, 68, 75, 54, 63, 49, 57, 57, 71, 65, 60, 60, 84, 55, 57, 85, 71, 92, 84, 48, 57, 65, 98, 57, 56, 61, 57, 60, 51, 81, 86, 63, 58, 52, 45, 87, 49, 52, 59, 57, 61, 65, 96, 65, 69, 61, 70, 72, 68, 122, 79, 67, 61, 46, 67, 54, 64, 70, 83, 61, 61, 78, 66, 70, 66, 83, 58, 45, 47, 67, 64, 56, 67, 66, 54, 48, 80, 68, 67, 98, 107, 63, 62, 65, 84, 85, 57, 62, 94, 68, 66, 75, 81, 66, 59, 61, 63, 44, 61, 88, 88, 54, 60, 64, 64, 55, 49, 65, 50, 64, 54, 60, 80, 63, 75, 77, 60, 37, 44, 58, 69, 55, 90, 55, 68, 39, 62, 75, 72, 89, 43, 35, 63, 60, 94, 51, 55, 51, 63, 53, 58, 97, 75, 87, 67, 45, 61, 45, 57, 66, 67, 62, 46, 76, 56, 47, 67, 70, 56, 69, 63, 84, 69, 40, 64, 58, 77, 55, 67, 54, 62, 88, 60, 53, 57, 59, 74, 50, 63, 56, 64, 50, 68, 84, 55, 64, 51, 60, 50, 47, 62, 80, 53, 52, 59, 75, 51, 59, 81, 65, 67, 76, 70, 100, 64, 69, 70, 75, 68, 43, 46, 50, 82, 55, 39, 53, 57, 67, 58, 57, 48, 68, 80, 49, 45, 78, 67, 66, 129, 82, 58, 39, 56, 144, 53, 72, 49, 59, 71, 58, 55, 54, 81, 66, 51, 53, 54, 69, 72, 78, 57, 59, 53, 72, 48, 61, 58, 69, 58, 79, 56, 69, 51, 95, 121, 54, 54, 60, 56, 48, 59, 54, 86, 73, 58, 49, 58, 135, 37, 63, 63, 91, 107, 34, 33, 46, 68, 70, 54, 53, 52, 62, 53, 50, 68, 71, 76, 81, 73, 57, 68, 62, 97, 57, 52, 52, 72, 87, 89, 60, 58, 60, 57, 62, 72, 105, 66, 52, 58, 123, 80, 53, 65, 44, 64, 53, 62, 64, 63, 49, 68, 52, 70, 87, 52, 54, 57, 72, 93, 87, 65, 58, 69, 47, 68, 55, 61, 83, 63, 70, 63, 80, 65, 84, 82, 68, 54, 58, 81, 74, 46, 68, 64, 81, 52, 64, 62, 79, 59, 53, 85, 58, 71, 51, 50, 79, 75, 64, 56, 71, 53, 71, 58, 44, 60, 52, 78, 59, 75, 88, 50, 66, 59, 85, 58, 54, 54, 74, 65, 63, 65, 43, 78, 82, 70, 56, 44, 80, 74, 64, 68, 75, 70, 85, 77, 55, 72, 64, 71, 127, 138, 68, 62, 55, 40, 92, 61, 63, 77, 78, 54, 59, 74, 55, 66, 80, 80, 98, 45, 69, 66, 61, 63, 58, 48, 62, 87, 62, 64, 65, 51, 83, 43, 51, 65, 50, 80, 52, 69, 68, 61, 76, 72, 80, 64, 63, 66, 67, 86, 71, 67, 61, 53, 46, 78, 70, 60, 63, 69, 73, 50, 52, 61, 61, 68, 60, 42, 51, 59, 54, 91, 57, 55, 79, 55, 68, 70, 105, 59, 72, 62, 80, 88, 51, 64, 64, 53, 109, 61, 47, 68, 70, 51, 62, 49, 52, 68, 52, 100, 50, 57, 79, 68, 64, 77, 79, 93, 49, 70, 96, 65, 71, 90, 74, 50, 99, 50, 51, 69, 65, 79, 62, 51, 72, 49, 40, 56, 66, 64, 79, 53, 56, 99, 62, 71, 59, 58, 44, 100, 45, 50, 45, 51, 80, 69, 63, 67, 64, 65, 75, 87, 75, 58, 58, 69, 71, 51, 83, 54, 69, 61, 56, 65, 55, 80, 52, 51, 59, 94, 49, 54, 55, 85, 83, 66, 63, 89, 57, 77, 52, 96, 62, 84, 63, 56, 48, 59, 59, 58, 86, 92, 57, 86, 63, 60, 73, 49, 60, 60, 72, 55, 60, 51, 74, 88, 57, 50, 48, 86, 55, 76, 76, 62, 54, 53, 71, 112, 59, 66, 60, 44, 78, 103, 65, 62, 65, 59, 66, 76, 77, 85, 51, 77, 48, 58, 59, 66, 56, 87, 64, 92, 69, 46, 65, 66, 75, 66, 68, 65, 62, 66, 48, 71, 54, 93, 44, 52, 58, 57, 60, 92, 59, 50, 59, 58, 87, 66, 63, 98, 40, 62, 58, 75, 67, 69, 109, 48, 46, 62, 49, 66, 67, 70, 65, 55, 59, 54, 91, 65, 63, 56, 78, 77, 67, 77, 95, 61, 45, 55, 70, 55, 83, 63, 85, 70, 68, 64, 50, 63, 72, 56, 66, 60, 59, 112, 88, 53, 35, 65, 65, 57, 39, 85, 72, 76, 59, 91, 72, 62, 58, 71, 65, 83, 71, 49, 63, 67, 60, 52, 70, 100, 59, 70, 69, 93, 58, 47, 78, 100, 53, 47, 73, 77, 52, 72, 64, 83, 54, 59, 65, 66, 66, 46, 68, 73, 47, 52, 58, 52, 51, 77, 64, 99, 58, 63, 81, 40, 42, 51, 65, 59, 80, 66, 43, 66, 72, 64, 51, 66, 67, 65, 59, 60, 70, 42, 60, 73, 60, 66, 75, 42, 44, 66, 32, 72, 59, 88, 64, 93, 60, 67, 50, 54, 69, 73, 66, 70, 58, 62, 71, 62, 60, 63, 62, 61, 59, 69, 54, 59, 56, 75, 62, 78, 62, 80, 65, 103, 61, 54, 61, 43, 107, 63, 75, 53, 74, 64, 56, 53, 83, 62, 59, 55, 56, 64, 50, 46, 69, 61, 69, 46, 64, 65, 128, 129, 54, 91, 60, 83, 83, 68, 62, 60, 65, 60, 46, 80, 64, 69, 58, 79, 67, 69, 81, 80, 65, 57, 55, 62, 56, 61, 60, 61, 86, 38, 62, 52, 50, 60, 87, 54, 83, 59, 73, 65, 50, 129, 40, 95, 50, 49, 91, 53, 50, 60, 59, 70, 68, 56, 57, 44, 57, 79, 65, 64, 76, 60, 50, 61, 48, 67, 55, 37, 91, 56, 58, 69, 70, 39, 85, 59, 59, 49, 42, 76, 67, 48, 55, 87, 57, 104, 117, 77, 62, 65, 70, 58, 60, 69, 72, 84, 58, 64, 109, 55, 62, 70, 54, 40, 77, 63, 58, 73, 81, 78, 54, 50, 51, 48, 70, 77, 47, 67, 49, 103, 100, 68, 58, 75, 49, 98, 52, 62, 96, 63, 75, 60, 96, 56, 61, 68, 42, 63, 59, 75, 74, 67, 77, 70, 70, 79, 58, 46, 65, 61, 43, 45, 65, 69, 57, 105, 71, 61, 74, 41, 81, 87, 53, 80, 44, 79, 32, 82, 68, 76, 55, 54, 63, 81, 58, 66, 55, 57, 75, 63, 51, 62, 67, 60, 65, 63, 52, 65, 99, 76, 69, 66, 56, 62, 58, 59, 78, 45, 64, 71, 58, 66, 61, 76, 53, 58, 60, 69, 54, 56, 58, 51, 70, 61, 79, 57, 65, 79, 61, 60, 63, 69, 65, 72, 63, 86, 68, 64, 81, 59, 57, 69, 71, 60, 50, 58, 68, 78, 49, 85, 44, 62, 69, 58, 56, 76, 64, 60, 69, 79, 75, 72, 51, 73, 58, 59, 61, 57, 79, 58, 58, 54, 61, 66, 51, 59, 69, 83, 80, 70, 56, 67, 71, 75, 52, 62, 57, 71, 59, 61, 70, 58, 84, 58, 49, 74, 55, 57, 69, 79, 63, 72, 80, 78, 61, 68, 91, 72, 52, 92, 46, 67, 63, 68, 72, 74, 77, 58, 66, 94, 56, 70, 63, 51, 69, 53, 65, 59, 62, 62, 83, 82, 77, 63, 74, 67, 52, 53, 67, 62, 76, 74, 59, 64, 67, 61, 89, 73, 65, 49, 74, 70, 66, 69, 75, 62, 71, 74, 59, 57, 59, 72, 66, 78, 55, 54, 59, 71, 61, 69, 56, 58, 57, 83, 80, 72, 81, 69, 55, 63, 63, 53, 58, 71, 69, 76, 75, 59, 44, 62, 70, 57, 59, 65, 55, 60, 68, 58, 54, 55, 74, 56, 92, 63, 83, 60, 72, 77, 75, 77, 62, 74, 78, 64, 72, 53, 70, 61, 71, 62, 98, 62, 65, 65, 53, 64, 65, 67, 64, 63, 70, 92, 64, 72, 55, 62, 49, 54, 66, 67, 61, 62, 61, 62, 46, 59, 60, 60, 61, 52, 62, 65, 61, 66, 64, 68, 48, 53, 63, 72, 69, 67, 73, 94, 61, 71, 60, 75, 82, 51, 80, 67, 63, 65, 63, 66, 67, 71, 55, 59, 60, 69, 90, 57, 63, 56, 54, 80, 80, 60, 62, 54, 59, 60, 54, 106, 61, 64, 47, 69, 58, 62, 55, 84, 56, 47, 66, 76, 58, 70, 75, 55, 58, 77, 73, 56, 50, 60, 70, 61, 76, 73, 59, 73, 60, 69, 60, 59, 55, 59, 80, 74, 58, 71, 59, 104, 75, 58, 51, 58, 63, 58, 58, 97, 64, 57, 56, 92, 51, 52, 73, 63, 61, 61, 76, 78, 58, 50, 72, 60, 64, 54, 64, 70, 67, 66, 61, 61, 54, 62, 74, 75, 60, 79, 73, 68, 67, 76, 57, 70, 70, 77, 55, 56, 64, 63, 79, 61, 43, 60, 65, 56, 67, 71, 51, 68, 55, 74, 78, 71, 54, 52, 79, 71, 61, 56, 56, 56, 65, 83, 76, 64, 56, 67, 55, 67, 69, 67, 74, 75, 60, 54, 64, 62, 62, 64, 59, 59, 68, 67, 61, 65, 63, 58, 44, 61, 49, 61, 63, 76, 66, 63, 97, 73, 64, 61, 81, 61, 56, 79, 56, 95, 59, 59, 44, 62, 79, 56, 77, 62, 50, 60, 72, 98, 62, 85, 56, 94, 64, 56, 52, 58, 63, 57, 45, 56, 58, 65, 53, 69, 79, 49, 68, 51, 79, 67, 64, 70, 64, 73, 72, 68, 63, 114, 64, 76, 57, 79, 74, 57, 76, 66, 69, 68, 74, 62, 56, 73, 64, 62, 71, 62, 54, 71, 74, 65, 47, 63, 54, 92, 61, 63, 68, 50, 85, 44, 63, 70, 72, 65, 68, 69, 63, 52, 67, 86, 55, 76, 66, 52, 87, 63, 51, 73, 45, 63, 60, 88, 81, 134, 68, 77, 61, 70, 58, 63, 53, 58, 70, 73, 61, 67, 61, 74, 61, 76, 63, 66, 84, 109, 56, 62, 57, 91, 61, 54, 55, 67, 55, 71, 55, 78, 52, 37, 54, 53, 61, 73, 54, 75, 64, 88, 60, 70, 59, 80, 54, 60, 81, 55, 93, 66, 54, 67, 66, 56, 67, 63, 60, 162, 63, 73, 58, 58, 68, 64, 66, 53, 63, 70, 60, 61, 57, 51, 61, 54, 66, 65, 60, 57, 94, 60, 59, 52, 65, 52, 80, 54, 94, 63, 66, 54, 61, 67, 64, 83, 56, 49, 52, 83, 64, 66, 52, 65, 74, 57, 58, 65, 68, 73, 59, 71, 55, 84, 62, 53, 59, 67, 65, 60, 85, 54, 73, 82, 73, 52, 57, 61, 66, 73, 64, 47, 51, 76, 100, 61, 48, 83, 57, 66, 71, 116, 65, 65, 53, 73, 60, 59, 50, 65, 51, 72, 58, 67, 53, 62, 62, 39, 61, 69, 111, 64, 53, 52, 60, 83, 74, 61, 70, 83, 61, 63, 49, 68, 58, 64, 57, 80, 57, 66, 131, 53, 58, 68, 109, 76, 103, 64, 72, 69, 63, 84, 67, 58, 68, 72, 72, 59, 57, 59, 69, 63, 61, 70, 57, 73, 64, 59, 69, 110, 69, 49, 52, 62, 67, 64, 62, 68, 56, 49, 65, 68, 66, 57, 76, 75, 62, 71, 86, 54, 85, 73, 73, 47, 52, 51, 75, 87, 68, 53, 92, 121, 52, 52, 49, 58, 56, 67, 66, 71, 71, 81, 59, 77, 55, 65, 65, 56, 68, 70, 60, 65, 89, 67, 80, 70, 84, 62, 61, 77, 86, 66, 68, 70, 60, 78, 60, 64, 59, 58, 67, 59, 76, 78, 64, 60, 64, 63, 62, 60, 72, 67, 63, 60, 73, 54, 63, 74, 59, 53, 57, 74, 56, 54, 66, 59, 61, 56, 63, 62, 54, 55, 58, 69, 123, 55, 57, 79, 71, 64, 55, 63, 67, 54, 63, 79, 67, 53, 90, 64, 77, 64, 70, 62, 80, 111, 46, 62, 50, 62, 62, 70, 63, 51, 57, 60, 48, 55, 54, 63, 62, 48, 83, 75, 54, 64, 72, 59, 57, 50, 67, 78, 68, 56, 72, 54, 78, 57, 67, 76, 57, 73, 61, 67, 59, 63, 61, 53, 64, 65, 61, 66, 75, 78, 58, 75, 65, 72, 57, 58, 73, 57, 63, 73, 57, 83, 59, 86, 59, 52, 63, 56, 48, 60, 52, 69, 83, 53, 57, 59, 75, 45, 40, 64, 73, 62, 60, 63, 69, 64, 84, 71, 63, 70, 58, 73, 56, 54, 58, 62, 67, 62, 70, 66, 61, 50, 92, 58, 60, 65, 56, 65, 53, 71, 59, 77, 47, 111, 89, 58, 47, 63, 64, 55, 59, 58, 53, 71, 61, 58, 54, 65, 67, 70, 91, 65, 68, 67, 67, 63, 76, 56, 56, 74, 68, 61, 76, 46, 57, 62, 63, 61, 78, 66, 60, 58, 60, 76, 51, 63, 74, 73, 72, 57, 65, 68, 80, 66, 60, 70, 59, 66, 64, 69, 62, 75, 64, 65, 56, 92, 60, 54, 58, 55, 86, 59, 62, 48, 81, 61, 74, 65, 72, 88, 58, 53, 56, 45, 65, 57, 58, 71, 60, 72, 60, 86, 61, 71, 63, 76, 66, 53, 59, 64, 54, 85, 69, 77, 62, 71, 94, 73, 94, 95, 94, 70, 62, 77, 72, 66, 66, 59, 71, 46, 68, 41, 76, 55, 67, 60, 55, 66, 57, 55, 54, 65, 81, 79, 75, 57, 52, 83, 69, 73, 89, 57, 52, 57, 87, 54, 78, 59, 56, 105, 69, 59, 63, 68, 69, 49, 68, 60, 63, 64, 69, 73, 90, 74, 62, 60, 56, 55, 68, 58, 63, 75, 68, 65, 50, 50, 71, 63, 52, 73, 73, 56, 99, 59, 61, 50, 51, 65, 70, 62, 71, 61, 68, 66, 65, 61, 66, 53, 55, 64, 73, 55, 101, 66, 96, 69, 54, 62, 70, 92, 61, 61, 67, 60, 72, 78, 74, 57, 66, 53, 66, 59, 54, 61, 56, 48, 57, 61, 70, 58, 69, 70, 54, 67, 80, 74, 70, 70, 66, 63, 66, 63, 64, 73, 76, 63, 55, 59, 52, 64, 95, 70, 56, 52, 68, 59, 54, 69, 72, 71, 72, 84, 82, 76, 65, 68, 96, 72, 63, 77, 73, 63, 59, 66, 75, 61, 69, 74, 58, 76, 61, 56, 68, 64, 63, 49, 56, 56, 58, 76, 59, 61, 68, 63, 64, 72, 60, 68, 49, 77, 61, 66, 56, 53, 55, 48, 59, 68, 63, 60, 72, 62, 75, 47, 67, 65, 72, 65, 80, 67, 58, 56, 64, 64, 71, 77, 70, 52, 66, 82, 53, 63, 78, 63, 59, 65, 64, 65, 60, 71, 78, 77, 57, 68, 60, 53, 59, 75, 67, 62, 58, 64, 53, 76, 62, 62, 59, 63, 67, 71, 58, 67, 61, 82, 64, 66, 58, 66, 45, 78, 57, 49, 64, 76, 68, 55, 60, 83, 73, 61, 70, 73, 78, 57, 59, 79, 55, 68, 66, 57, 57, 71, 57, 65, 52, 72, 69, 59, 55, 85, 81, 50, 69, 62, 80, 68, 67, 72, 77, 60, 70, 56, 106, 64, 64, 67, 71, 65, 58, 61, 63, 79, 74, 66, 71, 68, 73, 90, 66, 53, 67, 69, 76, 61, 68, 61, 54, 57, 52, 45, 88, 56, 62, 51, 65, 60, 67, 83, 70, 50, 75, 54, 64, 46, 58, 78, 47, 71, 85, 74, 47, 45, 86, 58, 62, 86, 66, 61, 66, 80, 78, 71, 75, 56, 56, 63, 72, 81, 58, 66, 72, 89, 75, 64, 69, 58, 89, 64, 50, 62, 62, 59, 72, 69, 59, 57, 75, 48, 71, 67, 75, 61, 64, 56, 94, 60, 60, 80, 59, 51, 48, 72, 62, 79, 73, 65, 65, 72, 50, 47, 67, 52, 58, 72, 59, 64, 71, 64, 59, 52, 85, 63, 63, 64, 80, 55, 67, 61, 77, 72, 74, 59, 55, 50, 105, 68, 57, 69, 64, 68, 70, 50, 53, 68, 68, 75, 89, 63, 60, 56, 57, 67, 54, 63, 64, 93, 65, 68, 73, 64, 92, 59, 89, 56, 74, 58, 82, 84, 57, 123, 63, 75, 54, 55, 54, 54, 58, 62, 60, 87, 64, 62, 74, 78, 67, 58, 54, 92, 78, 58, 60, 48, 77, 91, 60, 60, 63, 101, 79, 53, 80, 45, 67, 65, 57, 83, 68, 67, 64, 60, 62, 82, 49, 60, 54, 59, 58, 59, 65, 67, 54, 65, 80, 59, 69, 62, 67, 65, 76, 168, 59, 50, 65, 56, 82, 58, 82, 59, 63, 64, 70, 59, 92, 50, 56, 56, 83, 52, 67, 63, 55, 57, 66, 74, 55, 47, 65, 78, 58, 60, 69, 57, 70, 56, 62, 55, 64, 70, 67, 56, 60, 58, 70, 57, 71, 60, 67, 60, 62, 72, 65, 75, 61, 77, 71, 62, 53, 58, 53, 64, 102, 67, 64, 98, 78, 45, 53, 67, 46, 61, 70, 62, 62, 113, 106, 67, 68, 61, 72, 62, 81, 58, 65, 61, 60, 74, 68, 63, 61, 49, 53, 71, 61, 74, 64, 90, 64, 54, 87, 60, 55, 130, 61, 102, 69, 65, 72, 58, 49, 51, 77, 63, 63, 82, 54, 66, 66, 68, 54, 72, 159, 54, 64, 64, 62, 67, 75, 68, 66, 57, 68, 55, 65, 54, 69, 61, 83, 66, 62, 55, 45, 67, 59, 64, 69, 65, 68, 54, 78, 45, 51, 87, 68, 89, 58, 60, 66, 68, 65, 67, 64, 86, 84, 78, 51, 70, 87, 52, 53, 57, 82, 62, 47, 67, 65, 58, 59, 72, 64, 55, 64, 59, 63, 65, 59, 58, 71, 59, 64, 64, 67, 56, 65, 60, 62, 89, 57, 55, 62, 65, 74, 67, 84, 59, 57, 71, 53, 70, 70, 58, 66, 58, 86, 62, 90, 63, 65, 63, 65, 60, 78, 49, 65, 68, 59, 62, 66, 74, 66, 68, 56, 56, 77, 49, 94, 73, 67, 58, 65, 69, 63, 71, 51, 50, 79, 62, 53, 61, 68, 65, 56, 65, 75, 68, 107, 59, 64, 52, 68, 60, 45, 62, 60, 46, 64, 73, 56, 59, 71, 62, 94, 65, 76, 79, 60, 47, 46, 47, 71, 68, 64, 86, 62, 55, 74, 53, 69, 57, 62, 71, 58, 61, 80, 64, 80, 51, 62, 62, 58, 65, 55, 61, 63, 75, 70, 60, 58, 68, 64, 72, 91, 75, 56, 54, 51, 70, 53, 67, 64, 48, 66, 86, 69, 60, 62, 70, 53, 69, 60, 74, 69, 65, 93, 74, 63, 59, 72, 79, 71, 50, 62, 47, 62, 66, 60, 73, 74, 82, 70, 69, 52, 58, 81, 57, 72, 65, 81, 61, 42, 69, 77, 59, 87, 85, 52, 57, 49, 57, 75, 82, 47, 56, 108, 51, 74, 134, 106, 66, 63, 45, 56, 52, 39, 56, 68, 85, 79, 67, 70, 66, 65, 54, 80, 71, 55, 55, 54, 75, 65, 52, 61, 62, 80, 64, 57, 89, 66, 61, 56, 77, 71, 73, 72, 75, 59, 60, 66, 64, 75, 63, 59, 87, 48, 65, 65, 63, 61, 77, 78, 65, 99, 113, 68, 63, 75, 54, 73, 74, 71, 74, 74, 66, 57, 52, 64, 74, 73, 67, 64, 64, 65, 62, 60, 58, 78, 68, 69, 58, 61, 55, 73, 53, 69, 105, 57, 59, 72, 62, 59, 58, 56, 60, 52, 64, 56, 59, 65, 62, 57, 62, 78, 51, 48, 74, 55, 79, 63, 78, 59, 61, 58, 61, 77, 86, 55, 66, 68, 70, 69, 84, 57, 72, 59, 66, 73, 68, 83, 60, 63, 64, 77, 66, 63, 79, 84, 55, 48, 83, 71, 66, 55, 63, 67, 60, 100, 78, 57, 55, 81, 55, 61, 60, 53, 94, 60, 64, 79, 56, 45, 50, 55, 62, 70, 69, 82, 73, 54, 69, 65, 48, 75, 60, 65, 59, 67, 55, 77, 63, 44, 57, 60, 52, 72, 77, 77, 60, 49, 71, 50, 81, 51, 62, 77, 63, 64, 65, 60, 74, 53, 59, 63, 51, 72, 69, 66, 80, 66, 71, 67, 57, 47, 86, 57, 50, 86, 53, 62, 62, 66, 79, 56, 63, 58, 68, 68, 69, 73, 56, 58, 75, 66, 64, 64, 66, 58, 60, 51, 112, 83, 63, 65, 58, 56, 72, 65, 56, 71, 71, 53, 68, 62, 56, 71, 66, 96, 78, 56, 57, 56, 59, 61, 53, 67, 76, 68, 84, 72, 49, 57, 49, 96, 66, 64, 64, 70, 62, 79, 66, 55, 62, 63, 57, 63, 58, 54, 52, 57, 64, 57, 78, 62, 62, 67, 85, 64, 58, 72, 82, 65, 61, 76, 50, 49, 57, 91, 66, 62, 64, 57, 64, 73, 71, 64, 63, 76, 56, 101, 66, 66, 55, 61, 67, 59, 65, 67, 77, 60, 59, 57, 91, 63, 61, 56, 76, 56, 62, 79, 93, 63, 64, 60, 67, 56, 73, 67, 70, 57, 75, 75, 76, 51, 70, 52, 54, 65, 48, 71, 73, 60, 72, 55, 53, 61, 72, 67, 64, 61, 102, 65, 97, 69, 63, 76, 79, 64, 60, 41, 89, 66, 67, 55, 91, 60, 60, 66, 63, 53, 57, 57, 76, 80, 56, 65, 56, 65, 60, 76, 71, 56, 67, 50, 81, 58, 63, 65, 55, 56, 76, 59, 59, 48, 68, 58, 60, 71, 62, 56, 75, 57, 105, 89, 68, 67, 55, 62, 67, 99, 65, 81, 58, 77, 102, 74, 57, 77, 69, 56, 61, 67, 70, 62, 70, 66, 62, 61, 89, 69, 75, 76, 56, 72, 59, 53, 66, 56, 63, 65, 86, 53, 59, 79, 31, 65, 70, 56, 57, 60, 59, 58, 63, 73, 65, 75, 63, 52, 68, 61, 50, 69, 52, 54, 65, 72, 84, 59, 65, 80, 52, 64, 63, 69, 64, 76, 67, 61, 54, 71, 73, 58, 56, 72, 68, 70, 54, 88, 59, 60, 57, 70, 79, 64, 66, 65, 75, 75, 67, 57, 61, 77, 61, 74, 62, 75, 64, 59, 54, 67, 124, 50, 67, 62, 73, 60, 58, 57, 59, 61, 46, 66, 64, 58, 69, 68, 70, 64, 63, 70, 63, 77, 52, 67, 58, 48, 57, 72, 62, 60, 68, 76, 63, 54, 71, 56, 60, 51, 65, 55, 64, 70, 87, 58, 46, 62, 58, 62, 63, 62, 59, 66, 68, 63, 54, 60, 88, 70, 73, 61, 69, 60, 62, 68, 62, 61, 72, 60, 55, 92, 60, 54, 63, 65, 75, 63, 81, 56, 88, 55, 101, 84, 70, 57, 60, 44, 65, 57, 55, 65, 83, 63, 66, 64, 59, 57, 51, 67, 122, 57, 76, 60, 59, 65, 61, 84, 59, 69, 55, 58, 57, 62, 72, 70, 58, 48, 124, 70, 60, 63, 51, 71, 61, 74, 75, 76, 73, 62, 45, 87, 66, 69, 56, 60, 56, 57, 61, 64, 56, 56, 52, 54, 67, 56, 48, 63, 65, 68, 91, 70, 56, 73, 60, 67, 82, 57, 52, 61, 67, 58, 61, 57, 55, 62, 67, 57, 77, 60, 79, 56, 64, 51, 62, 114, 66, 61, 44, 66, 56, 56, 75, 70, 66, 74, 90, 62, 49, 55, 63, 59, 74, 86, 60, 59, 90, 64, 70, 69, 67, 59, 82, 63, 67, 69, 56, 44, 89, 56, 63, 67, 60, 60, 66, 61, 56, 60, 56, 61, 86, 56, 58, 67, 60, 63, 60, 69, 76, 74, 71, 70, 54, 58, 53, 64, 52, 53, 61, 159, 78, 51, 51, 149, 60, 63, 65, 63, 58, 62, 62, 57, 65, 88, 69, 65, 63, 72, 76, 62, 57, 55, 70, 55, 90, 56, 55, 79, 46, 53, 58, 48, 71, 70, 67, 86, 69, 49, 68, 58, 85, 68, 55, 61, 77, 73, 88, 54, 71, 65, 66, 70, 63, 59, 65, 70, 92, 81, 51, 78, 61, 56, 62, 53, 59, 65, 62, 54, 52, 61, 82, 61, 60, 57, 68, 56, 79, 56, 55, 81, 57, 57, 62, 69, 63, 68, 64, 59, 61, 51, 71, 61, 54, 91, 60, 74, 69, 50, 65, 55, 56, 59, 66, 65, 62, 64, 79, 55, 54, 62, 63, 74, 67, 90, 54, 67, 93, 68, 52, 53, 69, 73, 41, 84, 75, 63, 90, 62, 68, 60, 75, 60, 65, 152, 67, 59, 60, 79, 62, 61, 77, 60, 76, 63, 91, 56, 66, 61, 59, 60, 57, 66, 59, 63, 69, 88, 57, 56, 68, 58, 88, 59, 97, 59, 63, 63, 60, 59, 47, 63, 80, 78, 58, 59, 61, 63, 74, 85, 75, 53, 68, 65, 54, 64, 53, 56, 59, 56, 70, 64, 81, 60, 47, 60, 68, 60, 54, 59, 62, 56, 80, 75, 53, 61, 71, 73, 60, 57, 63, 77, 54, 69, 85, 53, 54, 61, 67, 47, 62, 61, 51, 57, 89, 81, 59, 55, 66, 65, 60, 59, 63, 66, 67, 75, 62, 55, 55, 69, 56, 81, 56, 70, 65, 67, 59, 58, 57, 99, 58, 61, 67, 78, 57, 62, 56, 72, 62, 82, 52, 58, 54, 84, 55, 62, 77, 53, 101, 65, 59, 70, 75, 70, 76, 54, 63, 64, 91, 68, 54, 74, 67, 70, 91, 53, 70, 52, 61, 63, 54, 68, 76, 58, 65, 69, 58, 66, 70, 77, 53, 81, 62, 57, 66, 66, 52, 61, 69, 55, 70, 94, 93, 53, 59, 75, 68, 81, 68, 71, 50, 66, 79, 86, 65, 63, 56, 48, 74, 62, 72, 66, 69, 85, 63, 79, 63, 65, 62, 69, 54, 49, 52, 75, 90, 48, 53, 54, 50, 60, 67, 76, 62, 70, 66, 78, 54, 60, 59, 66, 65, 70, 52, 55, 52, 56, 54, 59, 69, 77, 63, 57, 75, 58, 86, 62, 80, 50, 51, 53, 90, 62, 72, 73, 73, 68, 73, 83, 47, 55, 75, 82, 65, 36, 42, 56, 86, 86, 80, 57, 57, 64, 89, 55, 73, 142, 91, 59, 119, 70, 60, 95, 57, 74, 80, 65, 79, 82, 50, 62, 48, 66, 59, 52, 69, 64, 62, 62, 75, 52, 72, 63, 55, 62, 59, 55, 60, 53, 61, 68, 55, 61, 58, 77, 82, 67, 68, 61, 75, 50, 64, 71, 79, 76, 57, 86, 56, 58, 75, 64, 51, 75, 70, 73, 79, 47, 65, 53, 62, 75, 119, 70, 84, 56, 62, 69, 59, 76, 57, 86, 70, 68, 45, 62, 76, 68, 59, 63, 105, 68, 64, 61, 63, 58, 60, 55, 54, 58, 87, 62, 66, 71, 50, 75, 69, 63, 56, 49, 49, 79, 69, 56, 53, 71, 62, 62, 44, 62, 64, 54, 72, 47, 57, 67, 56, 80, 98, 65, 57, 74, 71, 59, 68, 77, 54, 48, 54, 49, 67, 66, 54, 66, 77, 57, 54, 53, 81, 64, 45, 58, 88, 49, 42, 62, 76, 49, 60, 76, 66, 50, 59, 72, 46, 68, 57, 78, 53, 53, 59, 51, 66, 59, 69, 60, 67, 62, 63, 94, 63, 65, 67, 53, 70, 71, 62, 49, 64, 58, 80, 58, 101, 64, 57, 46, 54, 52, 74, 59, 57, 52, 72, 84, 58, 59, 86, 59, 71, 53, 65, 80, 47, 60, 77, 58, 76, 75, 52, 70, 68, 55, 58, 74, 54, 77, 49, 68, 62, 65, 52, 77, 86, 74, 61, 70, 60, 81, 83, 58, 83, 58, 62, 66, 93, 61, 52, 81, 80, 55, 56, 60, 74, 56, 69, 71, 61, 43, 73, 72, 73, 57, 52, 95, 66, 49, 76, 51, 63, 66, 57, 69, 66, 54, 72, 68, 59, 72, 58, 82, 58, 79, 91, 64, 60, 53, 55, 63, 59, 60, 57, 57, 51, 64, 96, 61, 46, 73, 48, 86, 89, 66, 67, 48, 58, 54, 42, 74, 69, 82, 77, 48, 56, 74, 54, 61, 55, 51, 52, 52, 58, 51, 65, 82, 51, 72, 62, 52, 56, 72, 59, 74, 77, 62, 49, 64, 65, 69, 50, 80, 75, 59, 63, 87, 116, 70, 60, 93, 74, 60, 68, 58, 64, 123, 74, 64, 64, 51, 61, 65, 61, 54, 58, 65, 53, 91, 52, 54, 61, 61, 75, 107, 81, 67, 95, 51, 66, 55, 69, 63, 61, 63, 71, 130, 50, 67, 60, 71, 69, 50, 75, 52, 71, 76, 45, 63, 62, 64, 66, 56, 78, 70, 65, 63, 79, 76, 52, 46, 59, 66, 48, 54, 64, 54, 60, 57, 41, 64, 61, 65, 84, 73, 55, 98, 84, 56, 53, 67, 63, 63, 47, 56, 76, 63, 57, 49, 64, 96, 52, 71, 74, 52, 56, 70, 75, 54, 93, 53, 97, 63, 59, 58, 55, 99, 57, 97, 62, 76, 77, 54, 95, 53, 88, 58, 71, 60, 79, 60, 72, 62, 70, 51, 76, 57, 72, 79, 74, 59, 56, 55, 72, 45, 72, 57, 82, 54, 67, 97, 76, 76, 77, 71, 67, 54, 48, 85, 84, 82, 69, 52, 57, 67, 78, 54, 70, 75, 167, 61, 71, 59, 68, 60, 81, 70, 56, 73, 63, 72, 98, 55, 57, 66, 54, 64, 62, 67, 53, 55, 47, 57, 53, 81, 41, 46, 89, 68, 73, 62, 72, 65, 56, 67, 53, 60, 95, 49, 85, 66, 49, 69, 65, 54, 62, 70, 62, 59, 77, 60, 55, 71, 51, 63, 56, 62, 56, 72, 73, 67, 67, 63, 95, 71, 54, 48, 50, 69, 92, 54, 48, 61, 75, 114, 71, 62, 70, 58, 91, 104, 66, 62, 91, 49, 70, 90, 86, 63, 92, 65, 60, 82, 58, 67, 71, 57, 60, 74, 52, 56, 74, 62, 65, 62, 61, 94, 63, 63, 56, 68, 71, 73, 81, 51, 63, 67, 58, 50, 66, 101, 41, 54, 70, 133, 56, 73, 66, 72, 62, 71, 69, 51, 61, 60, 110, 75, 62, 71, 68, 69, 57, 101, 49, 94, 59, 69, 71, 56, 59, 70, 40, 75, 53, 78, 69, 50, 72, 79, 82, 63, 90, 78, 65, 61, 43, 68, 81, 77, 53, 63, 66, 68, 70, 66, 53, 60, 70, 61, 69, 64, 81, 51, 74, 57, 65, 77, 75, 55, 71, 82, 59, 52, 56, 68, 80, 52, 72, 65, 81, 71, 60, 55, 63, 63, 93, 62, 68, 59, 56, 65, 111, 71, 52, 63, 58, 63, 49, 40, 50, 58, 52, 63, 74, 83, 61, 49, 52, 63, 62, 80, 52, 57, 53, 105, 60, 55, 74, 57, 77, 51, 64, 63, 53, 55, 109, 70, 82, 57, 67, 70, 69, 67, 71, 118, 68, 69, 65, 61, 75, 59, 61, 61, 70, 53, 83, 53, 110, 66, 71, 85, 82, 60, 56, 66, 89, 57, 51, 65, 60, 57, 83, 65, 59, 79, 80, 62, 73, 71, 82, 65, 89, 97, 55, 62, 53, 65, 66, 51, 82, 68, 44, 67, 78, 69, 81, 73, 57, 61, 51, 71, 72, 64, 62, 46, 60, 62, 68, 46, 57, 59, 100, 71, 62, 53, 68, 50, 62, 64, 52, 74, 66, 70, 68, 58, 73, 74, 103, 63, 75, 69, 71, 57, 53, 106, 59, 60, 60, 56, 51, 57, 47, 70, 71, 80, 58, 89, 51, 74, 56, 76, 66, 57, 66, 63, 57, 56, 63, 83, 65, 78, 59, 60, 59, 57, 76, 54, 62, 74, 67, 69, 95, 68, 67, 59, 86, 59, 53, 64, 62, 63, 71, 77, 59, 66, 69, 53, 80, 47, 74, 71, 60, 92, 67, 53, 63, 76, 51, 70, 56, 56, 54, 60, 49, 67, 68, 51, 64, 62, 40, 68, 67, 57, 56, 61, 52, 82, 59, 77, 61, 65, 72, 47, 67, 61, 62, 92, 68, 54, 62, 85, 37, 88, 51, 47, 85, 65, 67, 56, 77, 58, 90, 61, 64, 58, 66, 66, 66, 64, 61, 79, 61, 83, 89, 50, 63, 82, 62, 63, 75, 61, 71, 62, 74, 83, 76, 59, 61, 64, 60, 67, 71, 71, 50, 54, 54, 55, 100, 57, 40, 56, 70, 57, 75, 68, 58, 50, 81, 61, 51, 69, 77, 72, 57, 81, 65, 73, 64, 61, 58, 55, 71, 71, 62, 77, 68, 90, 58, 60, 79, 68, 52, 61, 90, 70, 75, 66, 96, 79, 66, 56, 61, 68, 81, 65, 70, 69, 71, 65, 56, 77, 65, 69, 51, 69, 82, 50, 83, 59, 61, 54, 51, 48, 61, 58, 59, 75, 63, 75, 49, 63, 66, 66, 58, 54, 70, 77, 95, 54, 61, 48, 58, 52, 57, 105, 62, 71, 56, 73, 74, 65, 61, 74, 60, 66, 68, 59, 82, 50, 62, 58, 86, 66, 79, 51, 68, 75, 49, 61, 67, 54, 56, 47, 62, 72, 58, 60, 67, 51, 91, 65, 48, 83, 108, 64, 68, 80, 86, 97, 64, 70, 68, 40, 65, 63, 50, 54, 76, 62, 77, 62, 63, 56, 49, 87, 59, 68, 54, 62, 71, 77, 68, 71, 67, 71, 91, 58, 72, 67, 44, 64, 55, 89, 57, 68, 64, 96, 68, 68, 65, 57, 58, 67, 53, 54, 69, 49, 63, 59, 88, 71, 49, 61, 68, 73, 81, 45, 90, 71, 74, 92, 56, 81, 67, 76, 74, 58, 51, 59, 71, 51, 92, 59, 56, 66, 87, 115, 58, 49, 71, 74, 75, 77, 67, 51, 62, 68, 54, 72, 72, 83, 61, 64, 80, 56, 47, 82, 58, 48, 56, 48, 57, 68, 72, 55, 75, 50, 67, 63, 82, 60, 80, 57, 70, 63, 47, 48, 58, 70, 71, 92, 65, 52, 62, 68, 65, 68, 55, 65, 82, 64, 50, 77, 66, 76, 52, 81, 68, 64, 59, 67, 66, 53, 65, 81, 76, 58, 71, 72, 79, 74, 69, 64, 62, 75, 72, 61, 68, 57, 61, 36, 56, 71, 83, 83, 64, 63, 80, 81, 55, 47, 81, 62, 60, 62, 58, 68, 77, 49, 79, 77, 60, 63, 58, 48, 65, 58, 68, 54, 68, 68, 69, 45, 69, 58, 50, 55, 49, 56, 77, 64, 80, 57, 73, 65, 65, 75, 50, 57, 84, 62, 68, 57, 60, 62, 49, 71, 71, 63, 58, 65, 68, 46, 67, 62, 83, 57, 72, 55, 59, 75, 68, 55, 72, 79, 77, 65, 63, 54, 69, 69, 70, 69, 55, 66, 83, 82, 53, 54, 75, 68, 56, 79, 56, 64, 53, 71, 53, 83, 42, 73, 58, 86, 55, 67, 59, 83, 64, 109, 62, 52, 72, 81, 55, 48, 67, 69, 68, 95, 54, 58, 68, 60, 60, 64, 55, 66, 61, 62, 50, 70, 52, 59, 65, 82, 66, 79, 51, 52, 71, 79, 45, 90, 38, 60, 77, 63, 62, 94, 60, 47, 48, 57, 78, 77, 86, 72, 67, 67, 51, 56, 60, 60, 61, 70, 59, 68, 65, 65, 95, 68, 67, 65, 50, 74, 67, 78, 69, 87, 45, 52, 70, 50, 85, 69, 71, 63, 59, 59, 62, 54, 48, 73, 67, 77, 53, 67, 109, 72, 71, 51, 63, 84, 96, 66, 63, 49, 66, 63, 71, 57, 78, 54, 58, 62, 64, 61, 74, 40, 74, 61, 53, 60, 75, 47, 119, 147, 76, 76, 72, 65, 56, 58, 87, 55, 78, 84, 51, 61, 65, 62, 107, 59, 139, 70, 60, 62, 66, 110, 60, 81, 67, 48, 68, 87, 56, 67, 39, 60, 75, 69, 63, 73, 81, 65, 65, 56, 65, 51, 70, 86, 62, 52, 64, 68, 74, 81, 64, 76, 60, 55, 59, 59, 57, 60, 64, 71, 55, 70, 66, 82, 77, 52, 76, 62, 78, 53, 56, 68, 57, 60, 56, 90, 52, 71, 58, 83, 88, 67, 73, 50, 86, 81, 68, 59, 79, 55, 53, 100, 62, 70, 70, 64, 65, 77, 60, 58, 49, 49, 95, 98, 74, 57, 91, 81, 77, 52, 57, 94, 64, 78, 53, 72, 51, 67, 97, 63, 61, 59, 56, 113, 75, 53, 77, 60, 55, 68, 71, 75, 58, 67, 65, 92, 59, 114, 51, 63, 58, 69, 61, 41, 68, 56, 66, 76, 80, 61, 76, 88, 78, 34, 68, 66, 70, 51, 69, 59, 61, 54, 72, 60, 67, 49, 57, 62, 60, 82, 48, 49, 53, 60, 98, 63, 65, 61, 82, 64, 61, 62, 71, 65, 42, 54, 65, 73, 59, 46, 70, 74, 60, 73, 86, 57, 64, 91, 92, 75, 70, 53, 76, 72, 55, 68, 55, 59, 67, 62, 105, 59, 68, 65, 58, 68, 64, 78, 70, 66, 60, 55, 64, 55, 68, 68, 66, 77, 47, 102, 63, 78, 78, 48, 68, 75, 54, 71, 68, 59, 63, 67, 47, 59, 75, 61, 46, 62, 65, 50, 52, 87, 73, 52, 94, 64, 68, 67, 79, 59, 117, 53, 64, 65, 80, 58, 53, 66, 76, 77, 78, 136, 56, 59, 68, 61, 53, 110, 88, 51, 58, 61, 60, 56, 50, 46, 73, 84, 71, 56, 61, 58, 65, 80, 68, 74, 57, 62, 61, 51, 53, 62, 52, 52, 66, 69, 70, 69, 59, 72, 60, 61, 77, 66, 75, 71, 49, 59, 62, 61, 52, 68, 73, 80, 62, 65, 67, 79, 62, 57, 79, 74, 72, 71, 73, 56, 65, 58, 53, 64, 62, 56, 98, 73, 71, 64, 54, 66, 66, 87, 79, 51, 63, 52, 62, 64, 48, 56, 51, 66, 61, 64, 72, 58, 81, 64, 53, 80, 57, 44, 50, 64, 80, 60, 54, 68, 76, 64, 69, 90, 45, 71, 62, 58, 60, 70, 52, 76, 56, 73, 57, 54, 61, 101, 59, 63, 84, 56, 68, 101, 64, 56, 58, 66, 70, 45, 78, 66, 71, 69, 85, 57, 73, 64, 66, 60, 74, 57, 82, 53, 61, 54, 49, 69, 71, 67, 76, 40, 48, 63, 69, 54, 57, 58, 62, 82, 42, 76, 68, 70, 67, 72, 48, 65, 65, 64, 60, 59, 77, 66, 56, 59, 55, 98, 62, 62, 65, 90, 57, 86, 60, 66, 80, 71, 80, 81, 60, 57, 86, 52, 55, 64, 65, 73, 66, 69, 58, 78, 47, 68, 94, 71, 65, 83, 76, 59, 74, 69, 65, 79, 72, 55, 78, 65, 64, 56, 51, 91, 56, 65, 55, 60, 93, 60, 50, 52, 68, 57, 94, 60, 68, 53, 80, 71, 75, 57, 70, 52, 68, 64, 48, 62, 80, 70, 64, 65, 75, 70, 75, 53, 72, 65, 56, 71, 80, 68, 77, 61, 90, 68, 64, 47, 64, 51, 62, 90, 73, 73, 58, 67, 70, 53, 47, 62, 67, 59, 60, 78, 71, 63, 90, 65, 74, 89, 78, 57, 57, 49, 62, 55, 81, 64, 72, 72, 66, 43, 59, 60, 72, 62, 56, 76, 63, 69, 51, 64, 59, 78, 62, 56, 50, 58, 61, 62, 77, 73, 61, 67, 67, 71, 69, 74, 58, 76, 80, 52, 58, 61, 59, 56, 86, 62, 55, 52, 89, 72, 88, 53, 62, 64, 50, 62, 66, 72, 64, 63, 91, 63, 58, 51, 69, 78, 60, 55, 76, 78, 71, 53, 46, 55, 91, 57, 49, 74, 77, 58, 63, 53, 58, 57, 62, 82, 53, 73, 62, 62, 53, 53, 64, 63, 108, 81, 72, 71, 63, 58, 79, 79, 69, 64, 62, 57, 55, 59, 57, 87, 64, 59, 63, 54, 102, 88, 71, 65, 65, 70, 73, 71, 56, 70, 67, 60, 62, 81, 65, 67, 60, 71, 63, 61, 54, 91, 58, 67, 69, 62, 63, 68, 101, 66, 67, 69, 61, 103, 52, 62, 67, 58, 83, 69, 92, 56, 77, 72, 72, 81, 103, 59, 69, 66, 67, 55, 71, 58, 57, 71, 73, 68, 55, 76, 46, 73, 80, 77, 60, 59, 104, 64, 55, 59, 90, 70, 59, 63, 78, 65, 65, 60, 45, 70, 64, 77, 72, 51, 80, 63, 68, 56, 46, 55, 60, 56, 76, 60, 65, 65, 67, 59, 82, 65, 55, 57, 63, 60, 57, 74, 64, 82, 66, 69, 64, 65, 57, 60, 58, 66, 65, 94, 61, 56, 44, 94, 123, 69, 59, 70, 72, 55, 82, 52, 56, 58, 56, 41, 65, 58, 80, 90, 51, 50, 57, 74, 49, 84, 54, 107, 51, 42, 82, 75, 49, 65, 65, 62, 62, 74, 52, 61, 70, 58, 61, 85, 63, 51, 54, 55, 62, 62, 57, 89, 65, 79, 70, 58, 60, 52, 60, 71, 67, 58, 64, 70, 57, 50, 52, 50, 64, 69, 67, 59, 90, 62, 70, 62, 49, 70, 62, 54, 56, 54, 72, 41, 77, 69, 66, 61, 71, 61, 62, 71, 69, 49, 76, 63, 57, 64, 79, 101, 70, 82, 79, 67, 69, 76, 71, 76, 49, 77, 70, 86, 79, 80, 60, 64, 68, 71, 72, 54, 57, 63, 61, 53, 64, 57, 97, 56, 46, 61, 64, 72, 63, 71, 50, 79, 57, 64, 57, 60, 72, 80, 56, 57, 65, 58, 53, 82, 55, 49, 77, 52, 49, 61, 61, 58, 63, 58, 65, 68, 73, 96, 66, 66, 61, 90, 59, 61, 82, 73, 79, 54, 72, 74, 69, 66, 55, 98, 82, 72, 60, 66, 115, 63, 61, 53, 58, 67, 69, 70, 51, 69, 68, 64, 67, 45, 84, 57, 45, 75, 61, 79, 84, 52, 63, 55, 107, 63, 64, 63, 88, 72, 61, 52, 79, 90, 71, 71, 67, 58, 62, 56, 56, 72, 93, 48, 105, 62, 59, 65, 55, 71, 77, 62, 60, 66, 70, 71, 70, 49, 57, 62, 132, 79, 56, 59, 69, 69, 66, 77, 75, 59, 48, 67, 64, 76, 58, 63, 63, 68, 73, 61, 84, 69, 63, 51, 59, 66, 44, 73, 65, 74, 60, 61, 70, 86, 69, 73, 78, 66, 68, 90, 76, 65, 76, 62, 54, 58, 60, 66, 69, 60, 75, 89, 73, 59, 75, 40, 82, 89, 69, 92, 72, 74, 61, 54, 101, 62, 43, 66, 60, 70, 66, 57, 49, 64, 76, 71, 44, 62, 87, 64, 75, 71, 70, 73, 54, 60, 56, 54, 70, 67, 79, 45, 64, 60, 61, 48, 68, 63, 61, 61, 72, 75, 98, 69, 48, 55, 87, 60, 72, 55, 67, 75, 70, 67, 73, 65, 63, 55, 71, 51, 64, 169, 51, 61, 75, 66, 62, 73, 69, 62, 57, 55, 76, 65, 85, 67, 68, 63, 66, 71, 57, 82, 64, 70, 77, 52, 44, 51, 53, 49, 83, 55, 54, 47, 57, 56, 70, 114, 69, 47, 62, 53, 85, 48, 61, 69, 83, 84, 65, 84, 55, 71, 62, 60, 60, 75, 64, 82, 70, 69, 66, 89, 61, 77, 47, 60, 79, 67, 62, 65, 44, 69, 76, 70, 66, 47, 74, 58, 57, 79, 69, 65, 71, 57, 73, 63, 61, 80, 62, 66, 52, 71, 71, 55, 78, 77, 59, 66, 61, 63, 60, 65, 64, 73, 88, 60, 86, 66, 66, 68, 63, 54, 65, 48, 56, 84, 62, 69, 78, 60, 69, 55, 74, 68, 70, 64, 53, 78, 67, 49, 49, 63, 74, 66, 73, 114, 61, 41, 67, 63, 56, 74, 98, 59, 62, 60, 80, 65, 61, 94, 58, 72, 64, 72, 57, 66, 82, 71, 69, 67, 71, 80, 78, 63, 61, 50, 57, 71, 79, 65, 64, 70, 76, 48, 64, 67, 49, 56, 94, 50, 57, 76, 58, 64, 67, 55, 49, 59, 55, 66, 79, 76, 58, 50, 64, 85, 43, 52, 61, 59, 40, 61, 38, 62, 66, 60, 100, 58, 68, 56, 73, 72, 53, 73, 75, 48, 62, 52, 77, 67, 37, 56, 59, 49, 80, 81, 52, 68, 63, 50, 37, 65, 64, 48, 64, 75, 86, 59, 74, 96, 60, 56, 62, 56, 87, 65, 73, 69, 55, 77, 66, 51, 53, 52, 76, 78, 84, 102, 73, 57, 65, 67, 62, 87, 71, 69, 97, 61, 108, 77, 80, 55, 75, 72, 58, 66, 70, 53, 70, 59, 96, 57, 82, 58, 65, 63, 58, 64, 42, 67, 57, 57, 85, 62, 87, 64, 71, 59, 55, 55, 54, 59, 53, 69, 67, 92, 65, 99, 55, 90, 71, 73, 93, 79, 53, 65, 75, 113, 73, 73, 79, 67, 65, 54, 66, 58, 70, 60, 60, 57, 85, 65, 53, 63, 56, 96, 55, 60, 50, 48, 74, 74, 67, 66, 59, 51, 71, 63, 59, 54, 113, 67, 61, 64, 73, 58, 72, 48, 77, 61, 67, 65, 77, 55, 67, 57, 65, 55, 51, 57, 63, 74, 56, 82, 46, 72, 90, 54, 106, 56, 73, 73, 51, 52, 65, 57, 56, 77, 55, 69, 74, 70, 64, 59, 48, 63, 66, 53, 105, 47, 63, 81, 77, 63, 74, 67, 53, 65, 58, 80, 59, 89, 63, 57, 93, 74, 36, 55, 55, 54, 74, 59, 50, 58, 63, 81, 70, 70, 61, 67, 60, 63, 70, 66, 63, 51, 59, 49, 67, 75, 50, 76, 58, 86, 93, 58, 67, 74, 49, 80, 71, 70, 62, 62, 56, 66, 51, 50, 94, 66, 53, 58, 63, 50, 68, 45, 58, 64, 72, 59, 69, 51, 40, 53, 63, 85, 58, 119, 55, 72, 85, 71, 45, 52, 42, 63, 66, 58, 54, 60, 39, 80, 68, 75, 60, 65, 65, 53, 56, 92, 56, 63, 76, 61, 69, 63, 37, 68, 76, 71, 63, 52, 57, 64, 72, 81, 85, 45, 64, 57, 60, 65, 57, 61, 108, 64, 76, 55, 47, 54, 76, 50, 79, 65, 69, 71, 70, 53, 82, 52, 63, 50, 61, 77, 63, 55, 72, 68, 58, 51, 71, 72, 57, 71, 74, 64, 61, 44, 84, 70, 51, 101, 85, 59, 58, 77, 75, 73, 53, 55, 53, 67, 76, 62, 55, 70, 60, 97, 71, 64, 63, 70, 71, 51, 57, 56, 55, 67, 71, 77, 65, 76, 53, 83, 63, 64, 56, 75, 54, 64, 66, 55, 53, 59, 50, 72, 53, 59, 63, 76, 58, 70, 94, 68, 62, 73, 49, 54, 74, 78, 44, 72, 60, 76, 51, 57, 65, 68, 74, 59, 56, 68, 73, 62, 56, 47, 71, 82, 85, 73, 60, 64, 49, 52, 52, 74, 77, 74, 66, 68, 67, 77, 62, 65, 53, 47, 91, 58, 66, 56, 65, 88, 82, 68, 71, 82, 56, 54, 52, 64, 55, 38, 78, 59, 42, 63, 62, 65, 72, 65, 90, 82, 57, 58, 57, 75, 62, 50, 51, 63, 49, 71, 96, 68, 69, 76, 56, 60, 71, 72, 57, 66, 56, 81, 55, 57, 73, 60, 59, 80, 53, 60, 81, 59, 71, 58, 46, 73, 86, 54, 65, 78, 74, 56, 62, 64, 56, 49, 64, 74, 71, 62, 72, 60, 89, 55, 61, 56, 76, 55, 67, 56, 83, 74, 62, 64, 81, 69, 61, 69, 75, 82, 55, 62, 79, 64, 93, 72, 72, 64, 81, 80, 65, 86, 55, 67, 74, 76, 65, 103, 60, 49, 66, 63, 78, 55, 89, 58, 54, 95, 71, 59, 54, 50, 58, 63, 56, 73, 70, 71, 56, 67, 85, 62, 89, 81, 63, 50, 51, 76, 51, 43, 75, 77, 70, 98, 68, 61, 47, 83, 59, 38, 46, 63, 65, 61, 42, 62, 50, 83, 51, 80, 64, 75, 55, 52, 56, 62, 63, 65, 67, 57, 81, 81, 57, 55, 45, 48, 54, 63, 69, 67, 89, 54, 121, 46, 77, 81, 58, 60, 54, 60, 68, 93, 65, 73, 65, 74, 80, 53, 57, 58, 71, 74, 71, 53, 49, 52, 75, 58, 58, 57, 61, 49, 81, 56, 54, 66, 91, 68, 80, 63, 69, 46, 75, 58, 94, 55, 57, 73, 49, 49, 53, 89, 60, 73, 64, 60, 75, 57, 66, 76, 49, 46, 152, 59, 52, 48, 50, 71, 70, 84, 68, 59, 49, 64, 70, 84, 78, 58, 83, 63, 53, 49, 62, 63, 60, 50, 55, 49, 49, 71, 56, 56, 59, 62, 55, 48, 73, 118, 65, 50, 89, 57, 52, 65, 60, 73, 55, 56, 53, 81, 74, 60, 64, 57, 78, 55, 55, 81, 52, 60, 69, 78, 50, 82, 72, 90, 90, 87, 68, 59, 55, 76, 50, 58, 57, 58, 118, 81, 68, 72, 107, 62, 63, 100, 48, 70, 53, 61, 71, 60, 57, 53, 72, 57, 53, 89, 104, 66, 110, 56, 66, 61, 36, 60, 60, 57, 72, 64, 100, 52, 55, 63, 71, 62, 87, 88, 72, 68, 51, 60, 61, 67, 81, 125, 43, 62, 67, 75, 80, 69, 69, 62, 57, 40, 58, 50, 58, 66, 63, 61, 79, 69, 64, 68, 51, 71, 63, 99, 98, 59, 66, 45, 67, 50, 45, 68, 79, 61, 58, 72, 74, 67, 76, 57, 91, 56, 67, 87, 75, 79, 93, 77, 60, 56, 63, 68, 66, 78, 60, 53, 67, 79, 56, 50, 53, 66, 68, 76, 76, 56, 64, 68, 56, 49, 49, 87, 67, 53, 41, 56, 81, 63, 67, 56, 86, 57, 57, 45, 68, 63, 53, 75, 54, 57, 79, 70, 59, 76, 76, 68, 62, 57, 74, 67, 63, 62, 85, 49, 73, 61, 82, 54, 73, 72, 64, 57, 58, 113, 65, 54, 55, 60, 75, 54, 80, 77, 32, 67, 71, 67, 67, 69, 68, 60, 46, 82, 66, 121, 54, 62, 58, 56, 54, 67, 53, 61, 70, 52, 72, 50, 76, 71, 62, 73, 62, 77, 54, 53, 79, 70, 52, 62, 50, 66, 43, 68, 49, 61, 59, 52, 78, 61, 93, 52, 85, 85, 56, 62, 49, 48, 57, 86, 69, 52, 73, 69, 57, 55, 63, 67, 81, 95, 64, 67, 67, 54, 69, 57, 77, 56, 59, 78, 49, 58, 57, 62, 61, 70, 69, 45, 63, 59, 63, 66, 57, 66, 79, 52, 61, 52, 54, 51, 60, 74, 55, 59, 69, 52, 58, 65, 57, 71, 68, 55, 77, 85, 54, 72, 69, 82, 78, 78, 63, 59, 60, 63, 87, 75, 61, 77, 57, 83, 48, 64, 48, 58, 68, 69, 70, 79, 61, 65, 60, 64, 53, 72, 83, 69, 73, 85, 82, 52, 84, 62, 60, 84, 91, 73, 66, 79, 83, 65, 86, 57, 65, 90, 55, 59, 79, 54, 56, 67, 81, 66, 88, 71, 66, 58, 61, 62, 70, 70, 65, 61, 50, 77, 67, 76, 60, 70, 67, 57, 64, 84, 84, 41, 75, 66, 86, 71, 67, 51, 72, 83, 58, 88, 67, 74, 79, 63, 63, 76, 78, 52, 82, 93, 57, 84, 65, 58, 56, 79, 62, 61, 77, 61, 78, 71, 43, 73, 42, 56, 78, 64, 69, 63, 94, 65, 52, 74, 82, 55, 76, 72, 54, 59, 63, 56, 44, 45, 59, 69, 62, 49, 62, 72, 86, 80, 51, 55, 51, 62, 97, 77, 63, 77, 64, 58, 59, 49, 42, 46, 83, 71, 56, 61, 71, 61, 70, 91, 45, 55, 66, 96, 56, 94, 48, 48, 64, 71, 57, 76, 69, 57, 72, 65, 87, 44, 45, 64, 65, 68, 81, 59, 63, 57, 63, 76, 49, 66, 52, 69, 200, 62, 71, 66, 66, 53, 73, 43, 70, 93, 57, 54, 45, 69, 60, 53, 53, 129, 73, 68, 64, 50, 55, 84, 78, 71, 51, 60, 109, 57, 75, 85, 71, 63, 55, 61, 51, 59, 52, 61, 72, 61, 59, 59, 80, 70, 71, 84, 59, 58, 65, 41, 57, 67, 61, 61, 69, 68, 74, 47, 57, 62, 66, 65, 54, 43, 65, 48, 50, 77, 85, 68, 59, 71, 100, 67, 56, 53, 89, 55, 63, 76, 59, 47, 49, 69, 65, 57, 94, 79, 64, 63, 59, 64, 65, 75, 62, 53, 53, 66, 84, 101, 78, 74, 75, 50, 69, 62, 64, 65, 64, 54, 67, 75, 59, 76, 70, 69, 78, 68, 67, 74, 60, 93, 67, 56, 66, 55, 59, 52, 42, 88, 79, 68, 76, 46, 50, 42, 55, 61, 71, 55, 63, 66, 71, 62, 61, 67, 65, 78, 79, 91, 72, 57, 50, 73, 77, 81, 68, 81, 68, 65, 69, 59, 57, 67, 77, 58, 69, 65, 62, 71, 69, 86, 62, 63, 84, 50, 73, 60, 68, 85, 61, 70, 52, 63, 54, 66, 106, 71, 80, 47, 79, 60, 96, 60, 69, 71, 48, 64, 64, 48, 85, 77, 71, 60, 53, 59, 59, 75, 64, 88, 78, 59, 52, 58, 49, 129, 53, 72, 72, 83, 70, 94, 40, 78, 71, 91, 59, 60, 63, 70, 57, 45, 65, 41, 69, 62, 60, 59, 69, 88, 78, 70, 53, 131, 50, 43, 77, 70, 65, 76, 59, 78, 64, 62, 51, 59, 67, 60, 71, 46, 75, 58, 54, 65, 40, 53, 42, 64, 50, 79, 69, 71, 72, 73, 68, 77, 67, 77, 76, 46, 61, 55, 57, 59, 70, 63, 67, 42, 62, 63, 58, 65, 76, 59, 58, 48, 54, 61, 52, 72, 70, 51, 72, 71, 68, 76, 96, 60, 80, 70, 70, 69, 51, 57, 53, 68, 91, 61, 65, 68, 43, 60, 88, 60, 47, 64, 60, 66, 81, 57, 45, 73, 76, 77, 64, 59, 61, 45, 75, 54, 56, 85, 63, 72, 45, 84, 89, 66, 58, 86, 58, 74, 58, 61, 73, 59, 80, 69, 62, 63, 57, 46, 40, 70, 76, 58, 68, 88, 55, 59, 81, 84, 60, 66, 66, 53, 68, 58, 64, 60, 66, 78, 84, 55, 68, 61, 108, 65, 67, 113, 74, 106, 69, 77, 59, 51, 66, 55, 65, 59, 38, 48, 68, 85, 66, 80, 44, 65, 79, 82, 60, 61, 73, 61, 69, 76, 62, 58, 75, 47, 82, 72, 94, 71, 76, 58, 69, 51, 66, 59, 76, 59, 65, 49, 61, 79, 65, 61, 75, 65, 55, 81, 63, 59, 82, 65, 59, 71, 61, 54, 75, 63, 61, 68, 85, 41, 56, 65, 74, 78, 56, 78, 77, 66, 76, 44, 62, 64, 75, 50, 71, 74, 87, 95, 77, 56, 34, 61, 78, 63, 55, 63, 63, 53, 66, 91, 56, 62, 69, 63, 64, 64, 109, 52, 63, 64, 96, 46, 65, 86, 79, 86, 64, 80, 54, 83, 97, 93, 73, 86, 65, 47, 124, 57, 60, 61, 79, 56, 52, 60, 67, 57, 53, 66, 66, 48, 60, 68, 79, 61, 75, 56, 70, 66, 63, 74, 68, 86, 57, 72, 84, 66, 87, 60, 80, 42, 69, 71, 58, 68, 61, 56, 50, 54, 71, 62, 59, 44, 69, 73, 64, 72, 104, 81, 55, 60, 77, 64, 82, 92, 54, 75, 70, 68, 59, 59, 69, 61, 54, 78, 83, 66, 62, 50, 80, 72, 52, 81, 61, 68, 75, 78, 58, 49, 57, 49, 78, 49, 51, 69, 44, 48, 57, 106, 66, 68, 79, 77, 63, 64, 61, 82, 68, 55, 50, 55, 79, 69, 71, 71, 49, 42, 77, 86, 90, 74, 64, 77, 63, 74, 85, 52, 75, 85, 67, 115, 51, 55, 55, 53, 80, 61, 60, 48, 69, 49, 74, 76, 44, 44, 79, 64, 83, 57, 60, 57, 65, 60, 54, 59, 56, 70, 73, 46, 71, 82, 58, 77, 98, 60, 72, 69, 85, 88, 47, 141, 54, 78, 63, 75, 75, 66, 64, 50, 61, 72, 59, 49, 65, 49, 55, 63, 69, 59, 65, 66, 71, 66, 59, 58, 69, 83, 71, 61, 63, 58, 59, 72, 77, 66, 64, 57, 81, 59, 52, 76, 77, 65, 81, 61, 52, 68, 87, 67, 61, 63, 81, 61, 56, 64, 84, 67, 63, 68, 57, 69, 66, 78, 63, 58, 70, 57, 80, 82, 75, 66, 75, 60, 69, 64, 67, 77, 59, 64, 60, 58, 69, 59, 90, 55, 76, 61, 70, 61, 76, 60, 59, 65, 62, 75, 65, 66, 54, 56, 68, 59, 80, 78, 52, 135, 58, 63, 73, 114, 67, 62, 47, 63, 67, 54, 58, 59, 81, 55, 72, 76, 43, 73, 74, 61, 49, 68, 67, 92, 69, 57, 39, 54, 52, 78, 85, 77, 55, 57, 67, 53, 57, 86, 78, 108, 76, 70, 65, 52, 81, 55, 68, 49, 57, 66, 64, 62, 68, 86, 65, 83, 73, 56, 96, 45, 73, 59, 66, 60, 86, 57, 61, 56, 63, 57, 72, 51, 52, 44, 61, 76, 48, 53, 57, 83, 47, 76, 76, 75, 53, 67, 56, 54, 62, 60, 72, 50, 87, 56, 64, 64, 35, 60, 56, 74, 44, 71, 67, 68, 67, 55, 92, 73, 62, 57, 71, 71, 47, 65, 64, 67, 63, 44, 67, 86, 59, 66, 82, 54, 61, 77, 62, 64, 72, 28, 107, 64, 54, 62, 72, 48, 61, 90, 52, 80, 48, 73, 66, 69, 86, 71, 68, 71, 69, 66, 98, 54, 82, 73, 86, 73, 67, 79, 93, 84, 66, 73, 40, 58, 50, 63, 39, 35, 63, 76, 73, 114, 45, 44, 86, 73, 61, 38, 60, 49, 79, 135, 59, 54, 60, 75, 69, 74, 56, 52, 59, 65, 74, 58, 74, 70, 60, 86, 67, 67, 57, 57, 76, 60, 54, 52, 77, 62, 65, 51, 53, 58, 73, 57, 56, 71, 56, 88, 78, 68, 96, 55, 77, 65, 55, 65, 66, 80, 67, 69, 58, 52, 72, 43, 66, 71, 59, 69, 71, 64, 67, 96, 67, 69, 78, 55, 101, 50, 88, 51, 55, 80, 70, 69, 67, 75, 96, 50, 49, 58, 69, 65, 53, 67, 66, 70, 93, 68, 79, 60, 68, 103, 76, 69, 54, 56, 71, 72, 55, 55, 55, 54, 60, 83, 57, 58, 52, 68, 50, 76, 65, 71, 57, 78, 78, 39, 72, 74, 74, 74, 61, 120, 81, 70, 61, 55, 77, 63, 60, 60, 55, 64, 63, 74, 65, 67, 56, 67, 76, 64, 67, 41, 50, 74, 84, 77, 54, 69, 54, 90, 55, 58, 62, 66, 46, 66, 63, 84, 74, 60, 52, 58, 78, 73, 68, 115, 55, 75, 56, 71, 46, 48, 44, 91, 52, 70, 69, 77, 83, 82, 61, 50, 36, 72, 64, 74, 67, 52, 65, 67, 60, 74, 49, 54, 43, 76, 68, 79, 56, 55, 74, 86, 103, 78, 50, 69, 69, 60, 59, 57, 67, 84, 84, 68, 35, 70, 72, 61, 62, 61, 62, 68, 79, 69, 49, 69, 78, 85, 67, 57, 65, 67, 128, 70, 80, 61, 87, 44, 45, 62, 49, 59, 77, 84, 62, 53, 61, 58, 79, 78, 103, 117, 85, 121, 55, 61, 87, 67, 68, 89, 76, 62, 44, 56, 65, 66, 57, 83, 70, 53, 75, 51, 73, 58, 59, 102, 75, 76, 60, 68, 83, 68, 59, 53, 30, 82, 62, 47, 57, 58, 74, 102, 49, 67, 54, 91, 54, 59, 66, 49, 77, 71, 50, 105, 60, 65, 57, 84, 69, 95, 56, 76, 67, 46, 74, 64, 66, 59, 74, 59, 116, 67, 85, 45, 68, 61, 77, 68, 55, 85, 91, 64, 86, 45, 58, 62, 60, 57, 58, 79, 55, 64, 60, 68, 74, 72, 97, 57, 65, 69, 56, 64, 58, 44, 43, 42, 59, 57, 78, 53, 51, 61, 73, 104, 76, 69, 80, 45, 87, 71, 62, 57, 64, 66, 113, 54, 77, 90, 76, 62, 70, 76, 72, 95, 51, 43, 94, 76, 60, 57, 62, 53, 49, 68, 76, 68, 57, 49, 55, 51, 71, 70, 76, 56, 55, 62, 62, 82, 70, 67, 77, 48, 64, 33, 49, 53, 52, 65, 64, 56, 53, 56, 90, 64, 86, 79, 54, 58, 63, 69, 46, 65, 149, 84, 65, 53, 55, 50, 50, 67, 73, 44, 49, 82, 60, 84, 51, 80, 61, 76, 60, 64, 78, 53, 43, 72, 95, 77, 64, 69, 43, 75, 59, 109, 72, 62, 71, 65, 72, 69, 66, 65, 75, 60, 93, 63, 73, 56, 54, 52, 67, 68, 92, 75, 90, 70, 67, 51, 56, 65, 109, 70, 101, 88, 67, 66, 56, 61, 71, 65, 69, 38, 65, 70, 80, 78, 64, 75, 46, 66, 39, 66, 70, 63, 70, 53, 59, 44, 45, 54, 79, 84, 102, 64, 87, 66, 58, 49, 74, 47, 61, 69, 62, 75, 62, 90, 59, 43, 60, 40, 55, 79, 66, 61, 51, 69, 54, 107, 46, 51, 69, 54, 67, 51, 57, 55, 62, 61, 61, 75, 63, 59, 62, 77, 78, 87, 56, 94, 64, 61, 57, 80, 64, 69, 58, 82, 64, 47, 93, 39, 53, 47, 74, 56, 72, 58, 68, 43, 55, 60, 72, 63, 85, 66, 54, 71, 61, 55, 65, 65, 103, 61, 68, 80, 103, 81, 60, 56, 66, 93, 80, 77, 53, 55, 107, 52, 47, 56, 65, 99, 56, 54, 80, 65, 43, 74, 54, 62, 50, 35, 50, 72, 100, 50, 69, 42, 50, 69, 38, 60, 52, 63, 83, 63, 41, 57, 63, 66, 63, 74, 54, 66, 66, 54, 54, 65, 68, 65, 59, 64, 109, 58, 67, 38, 72, 68, 59, 67, 48, 94, 79, 86, 122, 82, 83, 60, 55, 96, 58, 60, 57, 52, 65, 69, 29, 61, 64, 57, 73, 87, 42, 81, 68, 62, 52, 54, 71, 42, 63, 55, 57, 52, 46, 72, 44, 52, 64, 65, 73, 70, 52, 57, 42, 55, 50, 49, 52, 55, 45, 126, 43, 68, 54, 58, 56, 53, 62, 64, 53, 61, 43, 71, 54, 85, 79, 68, 69, 88, 59, 52, 91, 54, 75, 64, 63, 51, 72, 79, 85, 67, 84, 66, 63, 63, 75, 57, 64, 78, 93, 80, 81, 68, 56, 54, 60, 84, 43, 79, 105, 61, 44, 47, 55, 78, 85, 68, 87, 69, 49, 66, 52, 72, 40, 104, 77, 59, 64, 87, 60, 56, 67, 53, 56, 84, 78, 90, 61, 55, 58, 63, 74, 58, 40, 55, 50, 51, 50, 66, 42, 91, 73, 48, 87, 76, 88, 83, 19, 61, 66, 69, 42, 79, 68, 43, 70, 62, 73, 63, 34, 66, 87, 67, 81, 74, 86, 64, 61, 59, 60, 93, 65, 70, 54, 48, 56, 55, 55, 62, 59, 52, 55, 69, 81, 61, 54, 66, 70, 92, 51, 61, 67, 118, 53, 55, 85, 75, 79, 53, 73, 58, 54, 89, 52, 37, 85, 86, 61, 66, 57, 80, 53, 62, 57, 56, 70, 87, 65, 54, 50, 47, 84, 58, 50, 102, 61, 47, 71, 61, 46, 59, 72, 76, 63, 58, 49, 60, 57, 92, 52, 85, 76, 57, 68, 75, 75, 71, 70, 67, 69, 48, 59, 56, 58, 76, 52, 50, 29, 66, 63, 68, 78, 62, 22, 93, 59, 50, 53, 71, 60, 44, 48, 66, 85, 120, 61, 88, 113, 79, 137, 67, 77, 52, 54, 111, 54, 73, 55, 47, 66, 71, 69, 69, 61]) # parameters
i86 = Internal("op86", "TENSOR_QUANT8_ASYMM", "{1, 1001}, 0.14927123487, 69") # intermediate result
i87 = Parameter("op87", "TENSOR_INT32", "{2}, 0, 0", [1, 1001]) # parameters
-model = model.Conv(i88, i2, i1, i104, i105, i106, i107).To(i0)
-model = model.DepthWiseConv(i0, i29, i28, i108, i109, i110, i111, i112).To(i27)
-model = model.Conv(i27, i32, i31, i113, i114, i115, i116).To(i30)
-model = model.DepthWiseConv(i30, i35, i34, i117, i118, i119, i120, i121).To(i33)
-model = model.Conv(i33, i38, i37, i122, i123, i124, i125).To(i36)
-model = model.DepthWiseConv(i36, i41, i40, i126, i127, i128, i129, i130).To(i39)
-model = model.Conv(i39, i44, i43, i131, i132, i133, i134).To(i42)
-model = model.DepthWiseConv(i42, i47, i46, i135, i136, i137, i138, i139).To(i45)
-model = model.Conv(i45, i50, i49, i140, i141, i142, i143).To(i48)
-model = model.DepthWiseConv(i48, i53, i52, i144, i145, i146, i147, i148).To(i51)
-model = model.Conv(i51, i56, i55, i149, i150, i151, i152).To(i54)
-model = model.DepthWiseConv(i54, i59, i58, i153, i154, i155, i156, i157).To(i57)
-model = model.Conv(i57, i62, i61, i158, i159, i160, i161).To(i60)
-model = model.DepthWiseConv(i60, i65, i64, i162, i163, i164, i165, i166).To(i63)
-model = model.Conv(i63, i68, i67, i167, i168, i169, i170).To(i66)
-model = model.DepthWiseConv(i66, i71, i70, i171, i172, i173, i174, i175).To(i69)
-model = model.Conv(i69, i74, i73, i176, i177, i178, i179).To(i72)
-model = model.DepthWiseConv(i72, i77, i76, i180, i181, i182, i183, i184).To(i75)
-model = model.Conv(i75, i80, i79, i185, i186, i187, i188).To(i78)
-model = model.DepthWiseConv(i78, i5, i4, i189, i190, i191, i192, i193).To(i3)
-model = model.Conv(i3, i8, i7, i194, i195, i196, i197).To(i6)
-model = model.DepthWiseConv(i6, i11, i10, i198, i199, i200, i201, i202).To(i9)
-model = model.Conv(i9, i14, i13, i203, i204, i205, i206).To(i12)
-model = model.DepthWiseConv(i12, i17, i16, i207, i208, i209, i210, i211).To(i15)
-model = model.Conv(i15, i20, i19, i212, i213, i214, i215).To(i18)
-model = model.DepthWiseConv(i18, i23, i22, i216, i217, i218, i219, i220).To(i21)
-model = model.Conv(i21, i26, i25, i221, i222, i223, i224).To(i24)
-model = model.AveragePool(i24, i225, i226, i227, i228, i229, i230).To(i81)
-model = model.Conv(i81, i84, i83, i231, i232, i233, i234).To(i82)
-model = model.Reshape(i82, i87).To(i86)
-model = model.SoftMax(i86, i235).To(i85)
+model = model.Operation("CONV_2D", i88, i2, i1, i104, i105, i106, i107).To(i0)
+model = model.Operation("DEPTHWISE_CONV_2D", i0, i29, i28, i108, i109, i110, i111, i112).To(i27)
+model = model.Operation("CONV_2D", i27, i32, i31, i113, i114, i115, i116).To(i30)
+model = model.Operation("DEPTHWISE_CONV_2D", i30, i35, i34, i117, i118, i119, i120, i121).To(i33)
+model = model.Operation("CONV_2D", i33, i38, i37, i122, i123, i124, i125).To(i36)
+model = model.Operation("DEPTHWISE_CONV_2D", i36, i41, i40, i126, i127, i128, i129, i130).To(i39)
+model = model.Operation("CONV_2D", i39, i44, i43, i131, i132, i133, i134).To(i42)
+model = model.Operation("DEPTHWISE_CONV_2D", i42, i47, i46, i135, i136, i137, i138, i139).To(i45)
+model = model.Operation("CONV_2D", i45, i50, i49, i140, i141, i142, i143).To(i48)
+model = model.Operation("DEPTHWISE_CONV_2D", i48, i53, i52, i144, i145, i146, i147, i148).To(i51)
+model = model.Operation("CONV_2D", i51, i56, i55, i149, i150, i151, i152).To(i54)
+model = model.Operation("DEPTHWISE_CONV_2D", i54, i59, i58, i153, i154, i155, i156, i157).To(i57)
+model = model.Operation("CONV_2D", i57, i62, i61, i158, i159, i160, i161).To(i60)
+model = model.Operation("DEPTHWISE_CONV_2D", i60, i65, i64, i162, i163, i164, i165, i166).To(i63)
+model = model.Operation("CONV_2D", i63, i68, i67, i167, i168, i169, i170).To(i66)
+model = model.Operation("DEPTHWISE_CONV_2D", i66, i71, i70, i171, i172, i173, i174, i175).To(i69)
+model = model.Operation("CONV_2D", i69, i74, i73, i176, i177, i178, i179).To(i72)
+model = model.Operation("DEPTHWISE_CONV_2D", i72, i77, i76, i180, i181, i182, i183, i184).To(i75)
+model = model.Operation("CONV_2D", i75, i80, i79, i185, i186, i187, i188).To(i78)
+model = model.Operation("DEPTHWISE_CONV_2D", i78, i5, i4, i189, i190, i191, i192, i193).To(i3)
+model = model.Operation("CONV_2D", i3, i8, i7, i194, i195, i196, i197).To(i6)
+model = model.Operation("DEPTHWISE_CONV_2D", i6, i11, i10, i198, i199, i200, i201, i202).To(i9)
+model = model.Operation("CONV_2D", i9, i14, i13, i203, i204, i205, i206).To(i12)
+model = model.Operation("DEPTHWISE_CONV_2D", i12, i17, i16, i207, i208, i209, i210, i211).To(i15)
+model = model.Operation("CONV_2D", i15, i20, i19, i212, i213, i214, i215).To(i18)
+model = model.Operation("DEPTHWISE_CONV_2D", i18, i23, i22, i216, i217, i218, i219, i220).To(i21)
+model = model.Operation("CONV_2D", i21, i26, i25, i221, i222, i223, i224).To(i24)
+model = model.Operation("AVERAGE_POOL_2D", i24, i225, i226, i227, i228, i229, i230).To(i81)
+model = model.Operation("CONV_2D", i81, i84, i83, i231, i232, i233, i234).To(i82)
+model = model.Operation("RESHAPE", i82, i87).To(i86)
+model = model.Operation("SOFTMAX", i86, i235).To(i85)
input0 = {i88: [132, 137, 131, 144, 155, 144, 119, 126, 110, 128, 137, 121, 110, 120, 103, 105, 114, 99, 133, 145, 129, 124, 136, 118, 124, 136, 120, 112, 124, 111, 121, 132, 119, 125, 137, 121, 133, 140, 126, 133, 139, 128, 124, 129, 112, 117, 123, 110, 88, 95, 79, 96, 103, 87, 105, 113, 101, 83, 90, 74, 99, 106, 92, 103, 109, 100, 88, 94, 83, 86, 93, 80, 73, 76, 68, 59, 64, 56, 17, 19, 9, 17, 19, 9, 0, 0, 0, 20, 21, 12, 55, 56, 49, 105, 106, 97, 99, 99, 86, 116, 113, 103, 129, 126, 116, 135, 132, 121, 121, 118, 107, 120, 120, 109, 111, 108, 100, 117, 114, 106, 101, 96, 89, 110, 107, 99, 92, 86, 79, 96, 95, 86, 96, 93, 83, 94, 89, 80, 109, 106, 96, 140, 134, 125, 130, 125, 116, 126, 121, 111, 146, 138, 129, 163, 156, 145, 159, 152, 140, 181, 175, 166, 175, 167, 159, 181, 173, 164, 174, 171, 161, 151, 148, 138, 167, 164, 154, 153, 152, 141, 148, 141, 132, 162, 157, 147, 153, 148, 136, 161, 156, 144, 156, 153, 140, 138, 136, 123, 131, 135, 123, 115, 112, 100, 123, 128, 112, 118, 117, 106, 104, 106, 94, 120, 121, 112, 112, 112, 100, 120, 122, 110, 112, 114, 100, 102, 105, 90, 109, 112, 97, 106, 107, 91, 127, 129, 115, 130, 128, 113, 110, 108, 93, 121, 119, 103, 99, 96, 81, 87, 85, 70, 112, 113, 95, 118, 119, 100, 106, 107, 89, 131, 129, 114, 113, 111, 93, 129, 120, 107, 122, 120, 105, 111, 111, 95, 96, 95, 84, 90, 90, 76, 96, 98, 72, 85, 87, 75, 80, 82, 72, 13, 13, 0, 24, 23, 12, 6, 6, 0, 52, 51, 40, 104, 103, 94, 118, 120, 107, 125, 124, 115, 133, 135, 123, 120, 121, 114, 136, 139, 131, 150, 153, 145, 144, 145, 138, 148, 151, 143, 154, 155, 148, 130, 133, 126, 138, 142, 134, 149, 152, 144, 150, 153, 145, 156, 159, 151, 159, 165, 156, 151, 157, 148, 145, 151, 142, 149, 155, 147, 161, 167, 158, 159, 165, 156, 146, 152, 143, 150, 153, 147, 166, 165, 154, 227, 223, 206, 249, 245, 226, 255, 251, 234, 255, 253, 236, 255, 255, 239, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 240, 238, 236, 221, 206, 207, 191, 114, 111, 98, 143, 140, 129, 230, 228, 215, 245, 243, 230, 216, 216, 202, 158, 158, 144, 103, 103, 89, 115, 118, 103, 222, 221, 208, 255, 255, 247, 255, 255, 255, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 251, 250, 248, 235, 240, 237, 222, 251, 249, 234, 255, 255, 243, 255, 255, 246, 255, 255, 247, 255, 255, 246, 245, 241, 224, 49, 45, 28, 12, 7, 0, 227, 223, 206, 255, 255, 243, 255, 251, 236, 236, 232, 218, 235, 231, 216, 239, 234, 220, 243, 239, 224, 255, 255, 242, 255, 255, 246, 255, 255, 245, 255, 255, 245, 255, 255, 245, 255, 255, 245, 255, 255, 245, 255, 255, 245, 255, 255, 244, 255, 255, 245, 255, 255, 245, 255, 255, 244, 255, 255, 247, 255, 255, 248, 255, 255, 247, 255, 255, 247, 255, 255, 247, 255, 255, 247, 255, 255, 245, 255, 255, 244, 255, 255, 244, 255, 255, 245, 255, 255, 244, 255, 255, 244, 255, 255, 244, 255, 255, 247, 255, 255, 245, 255, 255, 244, 255, 255, 244, 255, 255, 244, 255, 255, 241, 255, 255, 241, 255, 255, 241, 255, 255, 240, 255, 255, 241, 255, 255, 241, 255, 255, 241, 255, 255, 241, 254, 251, 236, 242, 240, 224, 210, 208, 193, 219, 216, 201, 234, 232, 216, 255, 255, 244, 255, 255, 247, 255, 255, 251, 255, 255, 251, 255, 255, 250, 255, 255, 255, 19, 19, 3, 25, 25, 11, 101, 104, 87, 134, 147, 127, 146, 158, 140, 165, 175, 158, 167, 177, 160, 159, 168, 154, 151, 160, 146, 140, 152, 136, 146, 157, 145, 146, 157, 145, 147, 158, 148, 155, 163, 152, 169, 177, 165, 166, 172, 161, 163, 170, 159, 156, 163, 149, 153, 160, 146, 139, 146, 132, 128, 135, 124, 135, 142, 130, 155, 162, 148, 152, 158, 145, 128, 135, 124, 127, 134, 122, 130, 134, 124, 121, 125, 114, 40, 46, 35, 41, 40, 32, 0, 0, 0, 12, 13, 6, 90, 91, 82, 105, 106, 97, 133, 132, 124, 148, 148, 136, 153, 152, 141, 158, 155, 145, 149, 146, 135, 153, 151, 140, 154, 154, 142, 153, 151, 140, 145, 142, 134, 146, 143, 135, 136, 133, 125, 135, 131, 124, 135, 131, 124, 158, 155, 145, 138, 135, 125, 151, 148, 138, 158, 155, 145, 158, 155, 145, 182, 176, 167, 174, 168, 159, 174, 169, 157, 172, 167, 158, 167, 161, 152, 179, 174, 164, 171, 168, 158, 178, 175, 164, 181, 178, 168, 158, 155, 145, 168, 168, 156, 167, 161, 152, 156, 153, 142, 153, 151, 138, 160, 158, 145, 157, 154, 142, 147, 145, 132, 138, 141, 126, 135, 135, 122, 138, 141, 126, 135, 135, 122, 134, 136, 124, 127, 129, 117, 125, 127, 114, 135, 137, 125, 142, 144, 130, 145, 148, 133, 140, 140, 126, 137, 137, 121, 145, 145, 129, 147, 145, 130, 148, 148, 132, 147, 145, 130, 137, 137, 121, 152, 152, 136, 146, 146, 130, 135, 136, 120, 138, 136, 119, 130, 130, 114, 139, 137, 122, 139, 139, 123, 141, 141, 126, 151, 151, 135, 137, 136, 123, 127, 128, 112, 114, 115, 97, 108, 110, 98, 77, 79, 67, 12, 10, 0, 41, 41, 27, 0, 0, 0, 84, 86, 74, 102, 104, 92, 138, 140, 128, 156, 158, 146, 142, 144, 132, 141, 142, 133, 147, 148, 141, 138, 142, 134, 145, 147, 139, 151, 155, 147, 146, 150, 142, 152, 156, 148, 141, 147, 138, 145, 151, 142, 144, 147, 141, 153, 159, 150, 161, 167, 158, 154, 160, 151, 159, 165, 156, 159, 165, 156, 170, 176, 167, 170, 173, 165, 132, 135, 130, 167, 171, 163, 166, 164, 151, 254, 249, 235, 255, 251, 234, 255, 255, 238, 255, 255, 240, 255, 255, 242, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 251, 238, 245, 243, 230, 185, 182, 169, 164, 161, 151, 137, 135, 122, 247, 244, 231, 239, 239, 225, 229, 228, 215, 141, 141, 128, 73, 76, 61, 106, 106, 95, 254, 251, 238, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 246, 250, 248, 235, 244, 242, 229, 255, 255, 240, 255, 255, 250, 255, 255, 250, 255, 255, 251, 255, 255, 250, 42, 38, 21, 28, 26, 8, 180, 178, 161, 255, 255, 248, 255, 255, 249, 252, 247, 233, 240, 235, 221, 242, 238, 223, 245, 240, 228, 255, 255, 245, 255, 255, 250, 255, 255, 251, 255, 255, 251, 255, 255, 251, 255, 255, 250, 255, 255, 250, 255, 255, 250, 255, 255, 250, 255, 255, 250, 255, 255, 249, 255, 255, 252, 255, 255, 252, 255, 255, 252, 255, 255, 252, 255, 255, 253, 255, 255, 253, 255, 255, 252, 255, 255, 251, 255, 255, 250, 255, 255, 249, 255, 255, 249, 255, 255, 249, 255, 255, 247, 255, 255, 246, 255, 255, 247, 255, 255, 249, 255, 255, 247, 255, 255, 246, 255, 255, 247, 255, 255, 247, 255, 255, 249, 255, 255, 247, 255, 255, 247, 255, 255, 247, 255, 255, 246, 255, 255, 247, 255, 255, 246, 255, 255, 242, 241, 238, 223, 214, 212, 197, 227, 223, 208, 243, 241, 226, 255, 255, 245, 255, 255, 254, 255, 255, 253, 255, 255, 253, 255, 255, 254, 182, 180, 165, 18, 16, 3, 30, 29, 16, 255, 255, 245, 162, 168, 161, 163, 171, 162, 165, 174, 162, 161, 172, 160, 163, 172, 160, 151, 162, 149, 153, 164, 152, 154, 162, 152, 157, 167, 157, 165, 173, 164, 166, 175, 165, 170, 178, 169, 176, 182, 173, 167, 173, 164, 170, 177, 165, 160, 167, 153, 164, 171, 160, 161, 167, 156, 166, 172, 161, 168, 174, 163, 168, 172, 162, 155, 158, 148, 157, 159, 149, 145, 147, 137, 130, 134, 124, 31, 33, 23, 15, 17, 7, 3, 2, 0, 100, 102, 92, 116, 115, 106, 135, 135, 126, 146, 145, 134, 149, 149, 138, 168, 168, 156, 168, 166, 155, 161, 159, 148, 165, 162, 152, 153, 152, 141, 151, 148, 140, 156, 153, 145, 155, 154, 146, 163, 159, 152, 152, 149, 141, 153, 150, 142, 159, 156, 146, 159, 156, 146, 170, 167, 156, 161, 159, 148, 183, 178, 168, 184, 179, 169, 163, 158, 148, 161, 156, 146, 168, 162, 153, 167, 164, 154, 170, 167, 156, 168, 166, 155, 157, 154, 144, 160, 158, 147, 167, 164, 154, 167, 164, 154, 161, 159, 148, 161, 159, 148, 157, 154, 142, 156, 153, 142, 156, 156, 143, 146, 145, 134, 131, 131, 117, 141, 141, 128, 131, 130, 119, 140, 140, 128, 142, 142, 131, 137, 136, 125, 133, 135, 123, 134, 136, 124, 145, 148, 133, 145, 148, 133, 146, 146, 132, 140, 142, 128, 145, 145, 129, 145, 145, 129, 145, 145, 129, 154, 154, 138, 135, 138, 121, 152, 152, 136, 154, 154, 138, 142, 145, 128, 148, 148, 132, 144, 144, 128, 151, 149, 134, 151, 151, 135, 154, 154, 138, 151, 151, 135, 151, 148, 136, 145, 145, 131, 144, 144, 128, 130, 133, 118, 109, 111, 99, 22, 24, 10, 45, 45, 31, 20, 20, 8, 98, 100, 88, 112, 112, 100, 156, 158, 146, 147, 149, 136, 149, 151, 141, 154, 155, 148, 152, 156, 148, 143, 146, 138, 145, 149, 141, 144, 147, 139, 138, 142, 134, 131, 134, 129, 135, 141, 132, 133, 139, 130, 149, 155, 149, 154, 160, 153, 160, 165, 159, 161, 167, 160, 163, 169, 162, 175, 179, 171, 170, 173, 165, 171, 174, 166, 156, 159, 151, 184, 187, 177, 222, 220, 207, 255, 252, 237, 255, 253, 236, 255, 255, 240, 255, 255, 243, 255, 255, 244, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 242, 231, 231, 217, 132, 130, 117, 166, 166, 152, 170, 167, 154, 251, 251, 237, 231, 231, 217, 225, 225, 211, 89, 89, 75, 111, 110, 101, 227, 227, 214, 252, 250, 237, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 245, 245, 243, 230, 240, 237, 222, 255, 255, 247, 255, 255, 254, 255, 255, 251, 255, 255, 255, 54, 52, 37, 23, 21, 8, 110, 109, 91, 255, 255, 251, 255, 255, 252, 255, 255, 243, 246, 241, 227, 247, 242, 230, 248, 245, 230, 255, 255, 242, 255, 255, 253, 255, 255, 253, 255, 255, 253, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 253, 255, 255, 253, 255, 255, 255, 255, 255, 253, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 254, 255, 255, 255, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 253, 255, 255, 254, 255, 255, 253, 255, 255, 250, 255, 255, 251, 255, 255, 251, 255, 255, 251, 255, 255, 251, 255, 255, 250, 255, 255, 250, 255, 255, 250, 255, 255, 251, 255, 255, 252, 255, 255, 251, 255, 255, 251, 255, 255, 252, 255, 255, 249, 255, 255, 246, 244, 242, 227, 225, 223, 208, 229, 227, 214, 251, 249, 236, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 29, 27, 9, 30, 29, 20, 27, 27, 13, 255, 255, 255, 147, 165, 154, 153, 166, 155, 165, 176, 165, 158, 169, 158, 168, 179, 169, 163, 174, 164, 166, 176, 168, 158, 168, 160, 169, 180, 170, 167, 176, 166, 167, 176, 166, 179, 187, 178, 174, 183, 173, 173, 179, 170, 156, 162, 154, 162, 168, 159, 160, 166, 157, 169, 175, 166, 163, 169, 160, 167, 173, 164, 146, 150, 140, 148, 151, 141, 159, 161, 152, 145, 147, 137, 24, 26, 16, 19, 18, 9, 6, 7, 0, 90, 89, 80, 112, 113, 104, 128, 128, 119, 156, 156, 145, 153, 152, 141, 155, 155, 144, 167, 164, 154, 160, 158, 147, 157, 154, 144, 161, 159, 148, 160, 159, 148, 165, 162, 152, 153, 152, 141, 165, 164, 153, 168, 166, 155, 164, 161, 151, 166, 163, 153, 167, 164, 154, 177, 174, 163, 165, 162, 152, 168, 166, 155, 175, 173, 162, 164, 161, 151, 159, 153, 144, 160, 158, 145, 167, 165, 152, 164, 161, 151, 173, 170, 160, 163, 160, 149, 161, 159, 148, 166, 163, 153, 174, 171, 161, 166, 165, 154, 161, 159, 148, 152, 149, 139, 170, 167, 154, 168, 166, 153, 147, 147, 133, 156, 156, 143, 148, 148, 136, 141, 141, 128, 152, 151, 140, 140, 140, 128, 139, 138, 127, 155, 155, 144, 139, 138, 127, 145, 144, 133, 148, 148, 134, 148, 150, 136, 150, 153, 138, 153, 155, 140, 144, 147, 130, 128, 131, 114, 145, 148, 131, 140, 142, 126, 140, 142, 126, 152, 152, 136, 149, 150, 134, 147, 147, 131, 152, 152, 136, 155, 156, 140, 146, 146, 130, 149, 150, 134, 166, 166, 150, 162, 162, 147, 158, 158, 144, 156, 156, 143, 153, 153, 139, 151, 154, 139, 130, 132, 120, 37, 40, 25, 34, 34, 20, 21, 23, 9, 102, 104, 92, 116, 118, 106, 140, 142, 129, 142, 144, 134, 153, 154, 145, 159, 163, 155, 156, 159, 151, 135, 138, 130, 151, 155, 147, 143, 146, 138, 138, 142, 134, 159, 162, 157, 151, 154, 149, 151, 156, 150, 154, 160, 153, 166, 169, 164, 163, 166, 158, 163, 166, 160, 174, 179, 173, 173, 177, 169, 170, 173, 165, 184, 187, 179, 156, 159, 151, 175, 179, 169, 242, 240, 224, 255, 254, 238, 255, 255, 240, 255, 255, 244, 255, 255, 245, 255, 255, 247, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 249, 249, 236, 222, 221, 208, 132, 132, 118, 169, 169, 156, 135, 137, 125, 202, 202, 188, 229, 228, 215, 103, 102, 91, 134, 135, 126, 161, 161, 147, 249, 247, 231, 254, 251, 236, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 248, 235, 244, 242, 227, 249, 246, 233, 255, 255, 252, 255, 255, 255, 255, 255, 255, 38, 36, 21, 31, 31, 17, 110, 108, 93, 255, 255, 255, 255, 255, 255, 255, 255, 253, 248, 245, 232, 249, 244, 232, 249, 246, 233, 255, 255, 242, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 253, 255, 255, 253, 255, 255, 253, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 255, 255, 255, 251, 255, 255, 247, 243, 241, 228, 233, 228, 216, 234, 231, 218, 255, 255, 244, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 228, 226, 209, 21, 20, 12, 31, 34, 19, 221, 224, 207, 255, 255, 255, 154, 167, 158, 148, 159, 151, 157, 167, 159, 158, 168, 160, 157, 167, 159, 167, 177, 169, 159, 169, 161, 162, 173, 165, 168, 179, 171, 171, 179, 172, 176, 184, 176, 176, 182, 175, 175, 181, 174, 174, 179, 173, 178, 184, 178, 161, 167, 158, 176, 182, 173, 171, 177, 169, 169, 175, 164, 170, 177, 165, 160, 164, 154, 153, 157, 147, 155, 156, 147, 144, 148, 137, 37, 39, 29, 16, 18, 8, 2, 4, 0, 123, 125, 115, 107, 109, 99, 149, 149, 138, 168, 168, 156, 156, 156, 145, 168, 166, 155, 171, 168, 158, 158, 155, 145, 158, 155, 145, 158, 155, 145, 153, 151, 140, 173, 170, 160, 165, 162, 152, 171, 168, 158, 180, 177, 167, 180, 177, 167, 173, 170, 160, 172, 169, 159, 174, 171, 161, 171, 168, 158, 164, 161, 151, 173, 170, 160, 158, 155, 145, 164, 161, 151, 161, 159, 146, 161, 159, 146, 160, 158, 145, 161, 159, 146, 161, 159, 146, 170, 167, 154, 165, 162, 150, 163, 160, 149, 156, 156, 145, 148, 148, 136, 165, 164, 153, 174, 174, 160, 177, 177, 166, 173, 173, 159, 170, 170, 157, 167, 167, 153, 152, 152, 138, 158, 157, 146, 146, 145, 134, 146, 145, 134, 140, 140, 128, 149, 149, 138, 142, 142, 131, 142, 142, 131, 140, 140, 126, 141, 141, 128, 147, 149, 134, 145, 148, 131, 150, 153, 136, 136, 139, 122, 145, 148, 131, 149, 152, 135, 152, 152, 136, 155, 158, 141, 161, 163, 147, 160, 160, 144, 140, 140, 124, 140, 140, 124, 151, 151, 135, 151, 151, 135, 162, 162, 147, 180, 180, 164, 167, 167, 153, 164, 166, 152, 168, 168, 154, 146, 145, 134, 61, 61, 47, 32, 31, 20, 27, 29, 17, 95, 97, 85, 94, 96, 84, 153, 155, 142, 142, 144, 134, 149, 153, 142, 160, 164, 156, 143, 146, 138, 143, 146, 138, 150, 153, 145, 166, 170, 162, 166, 170, 162, 165, 168, 162, 135, 141, 132, 159, 165, 156, 163, 169, 162, 167, 172, 166, 166, 171, 165, 181, 186, 180, 186, 191, 183, 173, 179, 170, 174, 180, 171, 181, 187, 178, 151, 157, 148, 178, 182, 171, 241, 238, 223, 255, 255, 243, 255, 255, 241, 255, 255, 245, 255, 255, 247, 255, 255, 248, 255, 255, 249, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 252, 238, 216, 216, 202, 130, 129, 116, 165, 167, 155, 122, 124, 112, 197, 197, 185, 130, 129, 118, 143, 145, 135, 95, 95, 81, 253, 253, 239, 253, 253, 239, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 247, 244, 229, 243, 241, 226, 255, 255, 244, 255, 255, 255, 255, 255, 255, 33, 32, 14, 24, 27, 12, 151, 149, 134, 255, 255, 254, 255, 255, 255, 255, 255, 253, 249, 246, 233, 250, 248, 235, 252, 250, 237, 255, 255, 245, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 249, 244, 242, 229, 234, 231, 218, 240, 237, 224, 255, 255, 245, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 46, 45, 27, 45, 48, 33, 16, 18, 6, 255, 255, 255, 255, 255, 255, 155, 164, 168, 150, 162, 158, 149, 162, 153, 141, 152, 144, 159, 169, 161, 163, 174, 166, 162, 173, 165, 169, 180, 172, 173, 181, 174, 173, 181, 174, 176, 184, 176, 177, 185, 176, 166, 172, 163, 171, 177, 169, 166, 172, 163, 164, 170, 162, 159, 165, 156, 168, 174, 165, 173, 179, 168, 169, 175, 164, 162, 168, 157, 152, 158, 149, 156, 159, 149, 76, 80, 70, 6, 7, 0, 17, 19, 9, 62, 63, 54, 119, 119, 107, 144, 146, 136, 159, 158, 147, 165, 164, 153, 167, 164, 154, 165, 162, 152, 165, 162, 152, 165, 162, 150, 155, 152, 141, 157, 154, 144, 159, 156, 146, 157, 154, 144, 158, 155, 145, 181, 178, 168, 179, 176, 166, 167, 164, 154, 167, 164, 154, 175, 173, 162, 178, 175, 164, 186, 183, 173, 184, 181, 170, 178, 175, 164, 173, 170, 160, 180, 177, 167, 180, 178, 165, 178, 175, 164, 181, 179, 166, 178, 178, 165, 181, 181, 167, 177, 174, 161, 167, 167, 153, 173, 173, 159, 169, 169, 158, 170, 170, 159, 167, 167, 153, 167, 166, 155, 170, 170, 157, 168, 168, 156, 159, 158, 147, 155, 155, 142, 161, 161, 147, 158, 158, 144, 144, 143, 130, 146, 145, 134, 145, 145, 131, 151, 151, 137, 146, 145, 134, 157, 159, 145, 160, 160, 146, 150, 153, 138, 159, 162, 147, 159, 162, 147, 164, 166, 152, 158, 161, 144, 154, 156, 142, 155, 157, 143, 159, 162, 147, 157, 159, 145, 159, 162, 145, 157, 160, 143, 164, 167, 150, 154, 154, 140, 144, 143, 130, 159, 159, 143, 163, 165, 151, 168, 170, 156, 169, 169, 156, 165, 168, 153, 166, 169, 154, 142, 144, 130, 66, 68, 56, 45, 44, 33, 21, 23, 9, 99, 101, 89, 104, 106, 94, 149, 153, 140, 140, 144, 134, 163, 166, 156, 149, 152, 144, 128, 131, 123, 144, 147, 139, 153, 157, 147, 159, 165, 156, 159, 165, 156, 149, 155, 147, 148, 154, 147, 162, 168, 161, 174, 180, 171, 162, 168, 159, 180, 186, 177, 180, 186, 177, 186, 191, 183, 182, 188, 179, 181, 187, 178, 171, 177, 169, 147, 153, 144, 191, 195, 182, 222, 221, 208, 255, 255, 245, 255, 255, 247, 255, 255, 248, 255, 255, 249, 255, 255, 249, 255, 255, 250, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 245, 231, 231, 217, 162, 162, 151, 165, 167, 155, 107, 109, 97, 119, 121, 109, 170, 171, 162, 92, 94, 80, 253, 253, 239, 254, 254, 240, 254, 254, 240, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 248, 233, 245, 243, 228, 243, 241, 226, 255, 255, 255, 255, 255, 255, 18, 18, 2, 18, 18, 4, 210, 210, 194, 255, 255, 255, 255, 255, 255, 255, 255, 251, 252, 250, 237, 254, 251, 238, 255, 255, 243, 255, 255, 245, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 247, 244, 231, 240, 237, 224, 251, 249, 236, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 254, 240, 33, 33, 19, 53, 56, 41, 192, 195, 178, 255, 255, 255, 255, 255, 255, 149, 161, 159, 154, 166, 160, 159, 171, 162, 149, 162, 153, 154, 165, 157, 154, 165, 157, 144, 154, 146, 147, 155, 147, 151, 159, 152, 159, 167, 160, 159, 167, 160, 162, 170, 162, 162, 168, 161, 163, 169, 162, 168, 174, 165, 156, 162, 156, 166, 172, 163, 168, 174, 165, 169, 175, 164, 168, 174, 163, 165, 169, 159, 146, 150, 140, 161, 165, 155, 48, 52, 42, 10, 12, 2, 15, 17, 7, 94, 96, 86, 116, 115, 106, 151, 150, 141, 155, 155, 144, 166, 165, 154, 156, 153, 142, 160, 158, 147, 161, 159, 146, 146, 143, 133, 160, 158, 147, 160, 158, 147, 153, 151, 140, 135, 132, 121, 163, 160, 149, 155, 152, 141, 171, 168, 158, 173, 170, 160, 171, 168, 158, 166, 163, 153, 161, 159, 148, 164, 161, 151, 173, 170, 160, 171, 168, 158, 166, 163, 153, 177, 174, 161, 172, 169, 157, 167, 165, 152, 175, 173, 160, 171, 171, 158, 169, 169, 156, 158, 158, 144, 171, 171, 158, 168, 168, 154, 161, 161, 147, 168, 168, 154, 168, 168, 154, 163, 163, 150, 175, 177, 164, 168, 170, 156, 165, 164, 151, 156, 156, 145, 161, 161, 147, 150, 153, 138, 145, 145, 131, 151, 154, 139, 149, 149, 136, 153, 153, 139, 150, 153, 138, 157, 159, 145, 150, 153, 138, 157, 159, 145, 156, 158, 144, 163, 165, 151, 164, 166, 152, 155, 158, 141, 159, 162, 147, 158, 161, 146, 135, 139, 124, 157, 161, 146, 159, 162, 147, 149, 153, 138, 152, 157, 142, 164, 166, 152, 162, 164, 150, 153, 155, 140, 161, 163, 149, 162, 164, 150, 156, 158, 144, 159, 161, 149, 157, 159, 147, 142, 144, 132, 51, 54, 39, 29, 31, 19, 15, 17, 3, 99, 101, 89, 118, 120, 107, 155, 159, 146, 155, 158, 148, 150, 154, 143, 152, 156, 148, 149, 155, 147, 153, 159, 150, 149, 155, 147, 143, 149, 141, 142, 148, 139, 149, 155, 147, 153, 159, 150, 160, 166, 157, 175, 181, 172, 170, 176, 167, 171, 177, 169, 178, 184, 176, 174, 180, 171, 175, 181, 172, 175, 181, 172, 171, 177, 169, 149, 155, 147, 189, 193, 183, 204, 204, 190, 255, 255, 249, 255, 255, 248, 255, 255, 249, 255, 255, 250, 255, 255, 251, 255, 255, 251, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 249, 240, 240, 228, 188, 187, 176, 155, 157, 145, 152, 151, 140, 165, 167, 155, 133, 133, 119, 255, 255, 246, 255, 255, 242, 255, 255, 245, 255, 255, 246, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 251, 236, 245, 243, 228, 245, 243, 228, 255, 255, 255, 255, 253, 240, 30, 30, 14, 26, 26, 12, 253, 253, 237, 255, 255, 255, 255, 255, 255, 255, 255, 249, 255, 255, 242, 255, 255, 243, 255, 255, 246, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 247, 249, 246, 233, 243, 241, 228, 255, 255, 244, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 106, 106, 93, 27, 27, 15, 27, 29, 17, 255, 255, 255, 255, 255, 255, 255, 255, 255, 145, 156, 145, 146, 157, 149, 151, 161, 153, 153, 164, 156, 163, 174, 168, 162, 170, 162, 166, 174, 167, 156, 164, 157, 165, 173, 166, 147, 155, 147, 163, 171, 164, 164, 172, 165, 163, 169, 162, 146, 152, 145, 141, 147, 140, 140, 146, 139, 155, 161, 154, 158, 163, 155, 170, 173, 165, 159, 165, 154, 148, 151, 143, 150, 154, 143, 146, 150, 140, 29, 33, 22, 14, 16, 6, 20, 21, 12, 105, 105, 94, 117, 116, 107, 170, 172, 160, 175, 174, 163, 171, 171, 160, 170, 167, 154, 165, 162, 150, 156, 153, 140, 167, 165, 152, 168, 166, 153, 155, 152, 139, 155, 152, 139, 156, 153, 142, 160, 158, 147, 165, 162, 152, 179, 176, 166, 184, 181, 170, 179, 176, 166, 165, 162, 152, 167, 164, 154, 187, 184, 174, 179, 176, 166, 177, 174, 161, 174, 172, 159, 178, 175, 162, 185, 185, 172, 181, 180, 169, 173, 173, 159, 169, 169, 156, 165, 164, 151, 162, 162, 149, 170, 170, 157, 167, 166, 155, 165, 164, 153, 167, 166, 155, 169, 169, 158, 175, 177, 164, 174, 173, 162, 166, 165, 154, 159, 158, 147, 149, 149, 138, 159, 162, 147, 154, 154, 140, 153, 153, 139, 149, 151, 137, 158, 161, 146, 151, 153, 141, 153, 155, 140, 149, 151, 137, 166, 168, 156, 158, 163, 147, 159, 164, 149, 151, 154, 139, 151, 154, 139, 159, 164, 149, 160, 165, 150, 146, 151, 136, 139, 144, 129, 157, 161, 146, 145, 150, 134, 151, 156, 140, 146, 151, 136, 140, 145, 130, 166, 171, 156, 167, 172, 157, 163, 167, 154, 151, 156, 140, 149, 153, 140, 155, 159, 146, 151, 155, 142, 120, 122, 110, 49, 51, 39, 48, 53, 40, 19, 24, 11, 108, 112, 99, 131, 135, 123, 153, 158, 145, 156, 160, 147, 157, 161, 150, 151, 155, 145, 148, 151, 141, 145, 151, 142, 135, 141, 132, 148, 154, 145, 145, 151, 142, 153, 159, 150, 153, 159, 148, 163, 170, 159, 167, 173, 162, 169, 175, 164, 168, 174, 163, 175, 181, 170, 176, 182, 171, 176, 182, 171, 173, 179, 168, 170, 177, 165, 155, 161, 150, 186, 192, 181, 184, 186, 172, 255, 255, 250, 255, 255, 249, 255, 255, 251, 255, 255, 251, 255, 255, 251, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 249, 251, 239, 240, 240, 226, 204, 204, 190, 167, 166, 155, 137, 136, 125, 177, 177, 164, 223, 223, 209, 210, 210, 196, 163, 163, 150, 227, 228, 212, 255, 255, 244, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 243, 245, 243, 228, 245, 243, 228, 255, 255, 244, 205, 202, 187, 50, 47, 34, 30, 30, 12, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 249, 255, 255, 244, 255, 255, 246, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 243, 248, 245, 232, 247, 244, 231, 255, 255, 249, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 35, 35, 24, 49, 50, 41, 250, 252, 240, 255, 255, 255, 255, 255, 255, 255, 255, 255, 155, 166, 158, 157, 167, 159, 160, 171, 162, 167, 175, 168, 158, 166, 159, 144, 155, 144, 134, 142, 132, 165, 173, 166, 147, 155, 147, 158, 163, 157, 143, 151, 144, 135, 143, 136, 122, 130, 123, 131, 136, 130, 134, 140, 131, 125, 131, 122, 132, 136, 128, 148, 151, 143, 149, 152, 144, 152, 156, 146, 149, 153, 142, 136, 143, 132, 132, 136, 126, 59, 63, 52, 0, 0, 0, 72, 74, 64, 123, 125, 113, 145, 144, 133, 171, 173, 161, 174, 173, 162, 181, 179, 166, 174, 172, 159, 164, 161, 151, 169, 164, 154, 175, 173, 162, 170, 165, 153, 170, 167, 156, 178, 175, 164, 174, 171, 161, 180, 178, 165, 184, 181, 168, 185, 182, 171, 188, 185, 175, 177, 174, 163, 174, 171, 161, 186, 183, 173, 191, 188, 177, 182, 182, 168, 175, 173, 160, 170, 167, 156, 171, 171, 158, 180, 178, 165, 178, 175, 162, 178, 178, 165, 173, 173, 159, 169, 169, 158, 165, 164, 153, 174, 173, 162, 162, 162, 151, 166, 165, 154, 176, 176, 164, 171, 171, 160, 183, 183, 171, 167, 166, 155, 160, 159, 148, 155, 155, 144, 176, 176, 162, 168, 168, 154, 164, 166, 152, 155, 155, 142, 165, 164, 151, 165, 168, 153, 155, 157, 143, 168, 170, 156, 157, 161, 146, 157, 159, 145, 161, 163, 149, 166, 171, 156, 150, 154, 139, 145, 150, 134, 156, 160, 147, 163, 167, 152, 152, 156, 144, 164, 166, 154, 159, 164, 149, 157, 161, 146, 159, 164, 149, 157, 161, 146, 160, 165, 150, 164, 168, 153, 158, 162, 149, 159, 163, 151, 152, 156, 144, 144, 148, 135, 141, 143, 131, 154, 156, 144, 129, 131, 119, 16, 18, 6, 38, 40, 28, 27, 29, 17, 102, 104, 92, 142, 144, 132, 158, 162, 149, 171, 175, 162, 159, 163, 151, 153, 157, 147, 163, 170, 159, 162, 168, 157, 148, 155, 143, 163, 170, 159, 159, 165, 154, 161, 167, 156, 171, 178, 167, 170, 177, 165, 171, 178, 167, 173, 179, 168, 177, 184, 172, 168, 174, 163, 166, 172, 161, 169, 175, 164, 164, 171, 160, 167, 173, 162, 171, 175, 164, 186, 190, 179, 178, 180, 166, 255, 255, 248, 255, 255, 250, 255, 255, 251, 255, 255, 252, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 249, 249, 236, 250, 248, 235, 198, 198, 187, 163, 163, 150, 171, 173, 161, 175, 174, 163, 148, 148, 136, 174, 173, 162, 139, 139, 125, 148, 148, 134, 255, 255, 244, 255, 255, 255, 255, 255, 255, 255, 255, 255, 242, 238, 223, 244, 242, 227, 255, 255, 240, 26, 24, 9, 26, 26, 12, 204, 205, 186, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 245, 255, 255, 246, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 255, 255, 244, 250, 248, 235, 250, 248, 235, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 53, 53, 39, 30, 31, 24, 43, 46, 27, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 160, 173, 162, 154, 167, 156, 159, 169, 161, 163, 174, 166, 159, 170, 159, 144, 153, 143, 152, 161, 151, 144, 152, 145, 133, 141, 133, 149, 157, 148, 149, 157, 150, 143, 151, 144, 140, 146, 139, 127, 133, 126, 133, 139, 132, 131, 134, 129, 136, 139, 131, 147, 148, 141, 160, 164, 156, 155, 156, 147, 155, 156, 147, 168, 173, 160, 130, 134, 121, 44, 46, 36, 9, 8, 0, 68, 67, 58, 113, 113, 102, 156, 156, 145, 162, 162, 151, 181, 179, 166, 168, 166, 153, 179, 176, 166, 175, 169, 160, 174, 168, 159, 175, 169, 160, 177, 172, 160, 184, 179, 167, 173, 171, 158, 187, 185, 172, 193, 191, 178, 193, 191, 178, 192, 189, 179, 178, 175, 164, 184, 181, 170, 180, 177, 167, 180, 177, 167, 192, 189, 179, 153, 152, 141, 168, 166, 155, 168, 168, 156, 169, 169, 156, 173, 173, 159, 177, 174, 163, 176, 176, 162, 173, 172, 161, 174, 173, 162, 169, 169, 156, 165, 164, 151, 168, 168, 156, 175, 174, 163, 177, 177, 166, 184, 184, 173, 188, 187, 176, 167, 166, 155, 159, 158, 147, 156, 156, 145, 165, 164, 153, 165, 164, 151, 154, 156, 142, 159, 159, 145, 163, 163, 150, 169, 169, 156, 164, 166, 152, 169, 171, 157, 170, 174, 159, 171, 173, 159, 171, 175, 160, 158, 163, 147, 157, 161, 146, 156, 158, 144, 159, 161, 149, 162, 164, 152, 151, 155, 142, 165, 169, 156, 165, 170, 154, 152, 157, 142, 155, 159, 146, 152, 157, 142, 163, 167, 152, 166, 170, 158, 159, 163, 151, 156, 160, 147, 148, 152, 139, 142, 146, 133, 143, 145, 133, 134, 136, 124, 63, 65, 53, 36, 38, 26, 24, 26, 14, 60, 64, 52, 113, 117, 104, 174, 178, 166, 160, 164, 152, 161, 166, 153, 161, 168, 154, 158, 164, 151, 156, 163, 152, 163, 170, 159, 168, 174, 163, 170, 177, 165, 173, 179, 168, 173, 179, 168, 180, 186, 175, 168, 174, 163, 167, 173, 162, 168, 174, 163, 161, 167, 156, 158, 164, 153, 162, 169, 155, 158, 164, 151, 164, 171, 158, 164, 171, 158, 163, 167, 154, 157, 161, 148, 159, 159, 145, 255, 255, 251, 255, 255, 251, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 242, 248, 248, 235, 238, 238, 224, 220, 220, 207, 106, 106, 93, 170, 170, 159, 161, 161, 149, 156, 158, 146, 175, 174, 163, 128, 128, 117, 152, 152, 138, 255, 255, 255, 255, 255, 255, 255, 255, 252, 235, 233, 218, 241, 238, 223, 221, 219, 203, 23, 21, 6, 25, 23, 8, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 249, 255, 255, 249, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 249, 255, 255, 243, 255, 252, 239, 255, 255, 245, 255, 255, 255, 255, 255, 255, 255, 255, 255, 220, 220, 207, 51, 54, 39, 41, 40, 32, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 163, 176, 165, 168, 181, 170, 174, 184, 176, 169, 180, 170, 165, 176, 165, 162, 170, 160, 164, 172, 165, 170, 173, 167, 157, 160, 154, 153, 156, 151, 163, 166, 160, 164, 167, 161, 161, 161, 157, 150, 151, 144, 150, 151, 144, 142, 141, 133, 140, 139, 130, 134, 130, 122, 132, 129, 121, 127, 124, 113, 121, 118, 107, 118, 115, 107, 121, 118, 107, 118, 115, 107, 163, 156, 149, 155, 149, 142, 171, 166, 159, 185, 182, 174, 185, 182, 174, 199, 195, 188, 202, 196, 189, 202, 199, 191, 201, 198, 190, 201, 198, 190, 201, 198, 190, 202, 195, 191, 196, 193, 187, 195, 191, 188, 193, 189, 186, 191, 187, 183, 198, 194, 190, 191, 190, 186, 196, 192, 189, 190, 189, 183, 198, 197, 191, 192, 191, 185, 196, 195, 188, 194, 192, 186, 195, 192, 186, 196, 193, 185, 194, 193, 184, 196, 193, 185, 195, 194, 185, 205, 201, 193, 184, 183, 175, 183, 182, 174, 171, 171, 162, 174, 173, 164, 165, 164, 155, 156, 156, 145, 147, 146, 137, 135, 135, 126, 137, 134, 124, 134, 134, 123, 127, 127, 116, 128, 128, 117, 140, 140, 128, 145, 144, 133, 151, 151, 137, 167, 167, 153, 173, 173, 159, 162, 162, 149, 165, 168, 153, 178, 180, 166, 165, 170, 154, 176, 178, 164, 171, 175, 160, 171, 175, 160, 169, 171, 157, 168, 170, 156, 168, 170, 156, 165, 170, 154, 173, 177, 164, 164, 168, 153, 168, 173, 158, 151, 155, 142, 157, 161, 148, 160, 164, 152, 173, 177, 164, 153, 158, 145, 150, 154, 141, 143, 147, 134, 136, 140, 127, 130, 134, 121, 142, 146, 133, 125, 130, 117, 16, 18, 6, 24, 28, 15, 22, 26, 13, 99, 103, 90, 156, 160, 147, 157, 161, 148, 159, 165, 152, 162, 169, 155, 158, 164, 151, 164, 171, 158, 162, 169, 155, 164, 171, 158, 178, 185, 171, 171, 178, 167, 168, 174, 163, 167, 177, 162, 164, 171, 158, 166, 172, 159, 153, 160, 146, 148, 155, 141, 146, 153, 139, 151, 157, 144, 158, 162, 149, 152, 158, 145, 157, 161, 146, 149, 153, 138, 166, 171, 156, 133, 135, 121, 174, 174, 160, 255, 255, 254, 255, 255, 254, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 248, 248, 235, 245, 244, 233, 212, 212, 199, 146, 146, 132, 128, 128, 117, 116, 115, 104, 120, 120, 109, 100, 100, 89, 99, 96, 83, 200, 197, 185, 255, 255, 252, 255, 255, 252, 255, 255, 242, 232, 227, 213, 246, 242, 225, 83, 81, 66, 45, 43, 28, 123, 121, 104, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 250, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 247, 255, 255, 243, 255, 252, 239, 255, 255, 251, 255, 255, 255, 255, 255, 255, 241, 241, 228, 38, 41, 26, 26, 28, 15, 154, 155, 146, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 178, 187, 175, 187, 196, 186, 170, 176, 167, 176, 182, 173, 183, 189, 180, 187, 191, 183, 170, 171, 164, 141, 140, 134, 144, 142, 136, 127, 123, 115, 143, 139, 134, 160, 157, 151, 188, 185, 179, 192, 188, 183, 209, 206, 198, 215, 212, 202, 212, 207, 197, 219, 214, 204, 214, 209, 199, 218, 213, 201, 219, 214, 202, 217, 212, 200, 220, 215, 203, 217, 209, 201, 209, 201, 192, 210, 202, 194, 205, 198, 187, 202, 197, 185, 198, 193, 181, 192, 187, 175, 205, 200, 188, 205, 200, 188, 197, 192, 180, 191, 186, 174, 184, 179, 169, 183, 178, 168, 181, 176, 164, 186, 181, 169, 179, 174, 162, 181, 176, 164, 184, 179, 165, 181, 176, 164, 185, 181, 166, 184, 181, 168, 180, 178, 165, 182, 180, 167, 186, 183, 173, 181, 179, 166, 182, 180, 167, 180, 178, 165, 180, 178, 165, 177, 174, 161, 180, 178, 165, 182, 180, 167, 182, 180, 167, 186, 183, 171, 182, 180, 169, 185, 182, 169, 190, 187, 174, 190, 187, 174, 190, 187, 174, 194, 192, 179, 192, 189, 177, 193, 191, 178, 195, 193, 180, 202, 200, 187, 198, 195, 182, 196, 194, 181, 196, 194, 183, 206, 203, 192, 208, 205, 195, 210, 207, 199, 203, 202, 193, 204, 205, 198, 191, 192, 185, 177, 178, 171, 165, 164, 155, 146, 145, 136, 131, 130, 121, 124, 123, 112, 125, 125, 113, 130, 132, 120, 162, 164, 150, 156, 160, 147, 164, 168, 155, 164, 168, 153, 171, 175, 162, 165, 169, 156, 173, 177, 164, 157, 161, 148, 153, 158, 145, 145, 149, 136, 146, 150, 138, 150, 154, 141, 144, 148, 135, 51, 55, 42, 29, 31, 19, 14, 18, 5, 67, 71, 59, 120, 124, 111, 164, 168, 155, 157, 161, 148, 146, 153, 139, 160, 167, 153, 164, 171, 158, 174, 180, 167, 174, 180, 167, 167, 173, 160, 174, 180, 167, 167, 173, 160, 154, 161, 145, 153, 160, 144, 153, 158, 143, 152, 157, 142, 156, 160, 145, 152, 157, 142, 145, 150, 134, 157, 159, 145, 162, 164, 150, 164, 167, 150, 168, 170, 156, 158, 161, 144, 167, 167, 151, 165, 165, 149, 255, 255, 249, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 252, 238, 247, 247, 233, 241, 241, 230, 185, 185, 172, 108, 105, 93, 126, 126, 114, 165, 164, 153, 160, 159, 148, 130, 127, 117, 243, 241, 228, 255, 255, 250, 255, 255, 248, 240, 235, 221, 222, 218, 203, 212, 209, 194, 36, 34, 18, 35, 33, 17, 255, 255, 247, 255, 255, 255, 255, 255, 254, 255, 255, 252, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 247, 255, 255, 244, 255, 252, 239, 255, 255, 255, 255, 255, 255, 255, 255, 255, 52, 55, 40, 56, 56, 45, 28, 28, 17, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 142, 144, 132, 149, 148, 140, 158, 157, 148, 183, 182, 174, 212, 208, 200, 229, 226, 218, 227, 222, 212, 247, 242, 232, 249, 246, 233, 255, 252, 239, 255, 255, 245, 255, 255, 245, 255, 250, 238, 255, 251, 239, 249, 244, 232, 247, 242, 230, 243, 238, 226, 241, 236, 224, 240, 235, 223, 241, 236, 224, 240, 235, 223, 247, 242, 230, 252, 245, 233, 245, 238, 226, 253, 246, 235, 245, 240, 228, 220, 216, 201, 196, 191, 179, 188, 183, 171, 153, 148, 134, 129, 124, 112, 113, 108, 96, 91, 88, 77, 83, 77, 68, 120, 114, 107, 80, 75, 65, 75, 69, 62, 76, 70, 63, 77, 71, 64, 72, 67, 57, 79, 74, 62, 67, 64, 54, 72, 69, 59, 78, 78, 67, 67, 64, 54, 61, 61, 47, 66, 65, 54, 73, 70, 60, 71, 68, 55, 63, 60, 47, 86, 83, 70, 86, 84, 68, 111, 109, 96, 109, 107, 92, 135, 133, 117, 143, 141, 124, 159, 157, 142, 172, 170, 155, 178, 176, 159, 193, 191, 176, 190, 188, 170, 205, 203, 185, 214, 212, 195, 207, 205, 188, 209, 207, 190, 203, 202, 184, 203, 201, 186, 198, 195, 180, 209, 204, 190, 207, 203, 188, 209, 204, 192, 210, 205, 191, 214, 209, 197, 211, 206, 194, 217, 212, 200, 217, 212, 200, 214, 209, 197, 221, 216, 204, 219, 216, 203, 223, 221, 208, 210, 208, 195, 216, 213, 203, 220, 220, 209, 211, 211, 199, 216, 215, 206, 217, 216, 207, 208, 207, 198, 187, 189, 179, 175, 176, 167, 149, 151, 141, 118, 119, 110, 114, 116, 104, 111, 113, 100, 114, 118, 105, 109, 111, 99, 47, 49, 34, 37, 39, 27, 30, 32, 20, 110, 114, 102, 153, 160, 146, 160, 164, 152, 162, 169, 155, 158, 164, 151, 180, 186, 173, 170, 177, 163, 174, 180, 167, 167, 173, 160, 154, 161, 145, 158, 165, 149, 154, 161, 145, 158, 163, 147, 171, 173, 159, 171, 174, 157, 163, 164, 148, 159, 159, 143, 153, 154, 135, 153, 153, 137, 149, 150, 132, 149, 150, 134, 180, 178, 163, 192, 193, 177, 222, 222, 206, 241, 242, 226, 251, 251, 235, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 253, 239, 241, 241, 228, 230, 230, 216, 99, 99, 88, 156, 156, 145, 158, 157, 146, 166, 165, 156, 172, 169, 159, 255, 255, 244, 255, 255, 245, 255, 255, 245, 214, 212, 197, 207, 203, 188, 18, 16, 1, 53, 51, 36, 124, 122, 107, 255, 255, 255, 255, 255, 254, 255, 255, 249, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 255, 245, 255, 255, 242, 255, 255, 249, 255, 255, 255, 255, 255, 255, 174, 172, 159, 105, 106, 99, 40, 37, 31, 242, 244, 232, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 241, 238, 223, 242, 240, 224, 242, 239, 226, 245, 243, 230, 250, 248, 235, 255, 255, 247, 255, 255, 255, 255, 255, 251, 255, 255, 255, 222, 220, 207, 162, 157, 145, 70, 65, 53, 56, 51, 39, 56, 51, 39, 80, 75, 64, 110, 105, 93, 126, 121, 109, 129, 124, 112, 135, 130, 118, 122, 117, 105, 134, 129, 117, 134, 129, 117, 127, 120, 109, 126, 121, 109, 130, 126, 114, 120, 113, 102, 120, 115, 103, 118, 113, 101, 111, 106, 94, 112, 107, 95, 98, 93, 81, 113, 108, 96, 104, 102, 91, 108, 103, 96, 106, 100, 93, 105, 99, 92, 94, 89, 82, 98, 92, 85, 93, 88, 80, 91, 85, 78, 74, 71, 63, 83, 80, 72, 82, 79, 71, 77, 76, 68, 92, 92, 83, 89, 88, 79, 85, 85, 76, 88, 87, 78, 97, 95, 84, 89, 88, 79, 99, 96, 85, 94, 91, 81, 89, 86, 76, 82, 81, 70, 92, 92, 79, 77, 77, 65, 82, 81, 70, 78, 78, 67, 94, 91, 81, 91, 88, 77, 92, 89, 78, 92, 89, 78, 94, 92, 79, 99, 96, 85, 97, 95, 84, 91, 88, 77, 103, 100, 90, 104, 102, 91, 91, 88, 77, 91, 85, 78, 94, 89, 82, 79, 74, 66, 72, 67, 57, 62, 62, 48, 86, 83, 70, 122, 119, 107, 150, 148, 132, 196, 194, 179, 228, 226, 211, 245, 241, 224, 247, 245, 227, 254, 250, 233, 248, 245, 230, 243, 239, 224, 237, 235, 220, 233, 230, 215, 235, 230, 218, 227, 224, 211, 228, 226, 211, 229, 227, 214, 230, 228, 215, 235, 232, 220, 233, 230, 217, 233, 230, 217, 225, 223, 210, 227, 224, 211, 223, 221, 208, 220, 217, 206, 210, 209, 198, 202, 201, 190, 184, 186, 174, 157, 161, 148, 136, 140, 127, 120, 124, 111, 117, 121, 109, 124, 129, 114, 142, 146, 131, 148, 152, 137, 161, 163, 147, 169, 171, 155, 188, 189, 170, 213, 214, 196, 244, 244, 226, 255, 255, 239, 255, 255, 245, 255, 255, 250, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 243, 242, 239, 226, 239, 238, 227, 214, 211, 199, 147, 145, 134, 94, 91, 81, 145, 143, 130, 255, 255, 247, 255, 255, 247, 252, 250, 235, 222, 220, 207, 42, 40, 24, 56, 54, 38, 52, 50, 35, 255, 255, 255, 255, 255, 255, 255, 255, 250, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 250, 255, 255, 245, 255, 255, 245, 255, 255, 255, 255, 255, 255, 255, 255, 253, 53, 52, 41, 63, 65, 53, 107, 105, 88, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 161, 163, 151, 89, 88, 77, 52, 49, 39, 50, 47, 39, 106, 103, 92, 114, 111, 100, 149, 146, 135, 147, 141, 132, 149, 144, 134, 136, 131, 119, 136, 131, 119, 151, 146, 134, 143, 138, 126, 142, 137, 125, 136, 131, 119, 142, 137, 125, 137, 132, 121, 141, 136, 124, 134, 129, 117, 144, 139, 128, 154, 149, 137, 127, 120, 109, 120, 115, 103, 130, 123, 111, 119, 112, 101, 121, 116, 104, 119, 114, 102, 121, 117, 102, 113, 108, 96, 113, 108, 96, 106, 101, 89, 107, 102, 92, 112, 106, 99, 98, 92, 85, 107, 102, 94, 110, 107, 99, 106, 100, 93, 105, 99, 92, 106, 100, 93, 106, 100, 93, 103, 97, 90, 97, 94, 86, 92, 88, 80, 102, 99, 91, 100, 96, 91, 91, 87, 81, 92, 91, 85, 89, 88, 81, 92, 92, 83, 91, 90, 82, 82, 81, 70, 95, 94, 83, 84, 84, 72, 85, 85, 74, 92, 92, 81, 96, 95, 84, 88, 88, 74, 84, 84, 70, 90, 90, 78, 84, 84, 70, 87, 87, 73, 81, 81, 67, 83, 81, 68, 88, 86, 73, 77, 74, 61, 94, 92, 79, 88, 86, 73, 93, 88, 76, 99, 96, 83, 93, 88, 76, 99, 94, 82, 91, 86, 74, 102, 100, 87, 103, 101, 88, 102, 100, 87, 114, 111, 98, 107, 104, 92, 115, 112, 100, 135, 132, 119, 132, 130, 117, 123, 121, 108, 121, 118, 105, 118, 116, 103, 106, 103, 90, 89, 87, 74, 67, 65, 52, 68, 66, 53, 79, 76, 64, 153, 151, 136, 214, 212, 197, 250, 248, 233, 251, 249, 234, 255, 255, 242, 255, 255, 243, 252, 250, 237, 249, 247, 231, 245, 243, 230, 242, 242, 229, 238, 238, 224, 234, 234, 221, 236, 235, 222, 236, 239, 224, 237, 237, 223, 234, 234, 221, 233, 233, 220, 229, 232, 217, 227, 229, 215, 222, 224, 212, 220, 222, 210, 212, 211, 203, 183, 183, 171, 168, 167, 159, 162, 162, 151, 174, 174, 160, 208, 207, 194, 240, 242, 228, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 251, 240, 242, 239, 228, 237, 235, 222, 135, 132, 121, 99, 96, 85, 115, 112, 100, 255, 255, 242, 255, 255, 247, 225, 223, 208, 82, 80, 67, 65, 63, 48, 51, 49, 34, 245, 244, 226, 255, 255, 255, 255, 255, 249, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 247, 255, 255, 246, 255, 255, 252, 255, 255, 255, 255, 255, 255, 141, 141, 128, 19, 24, 18, 69, 69, 55, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 135, 136, 120, 149, 150, 134, 153, 151, 138, 151, 148, 136, 136, 133, 121, 144, 141, 129, 149, 146, 133, 146, 144, 131, 150, 147, 134, 147, 142, 130, 148, 143, 131, 156, 151, 139, 154, 149, 137, 144, 139, 128, 147, 142, 130, 141, 136, 124, 149, 144, 134, 146, 140, 131, 141, 136, 124, 133, 128, 118, 137, 132, 123, 132, 126, 117, 96, 90, 81, 89, 86, 76, 76, 70, 61, 69, 64, 52, 91, 86, 76, 113, 108, 96, 61, 55, 46, 72, 67, 57, 63, 58, 48, 65, 60, 50, 81, 78, 70, 79, 74, 66, 60, 54, 45, 70, 64, 57, 66, 61, 54, 53, 47, 40, 85, 80, 70, 68, 62, 53, 72, 67, 57, 71, 68, 57, 79, 76, 65, 77, 74, 63, 70, 69, 61, 64, 64, 55, 61, 60, 51, 61, 60, 53, 63, 62, 54, 48, 47, 39, 73, 72, 63, 62, 62, 50, 74, 73, 62, 77, 77, 65, 69, 69, 57, 54, 53, 42, 68, 67, 56, 69, 69, 55, 64, 64, 53, 68, 66, 53, 77, 74, 63, 80, 77, 67, 74, 71, 61, 80, 77, 67, 86, 83, 72, 89, 86, 76, 92, 89, 78, 93, 90, 80, 105, 100, 86, 90, 85, 71, 94, 92, 77, 94, 90, 76, 112, 107, 95, 101, 98, 86, 103, 101, 88, 101, 98, 86, 104, 102, 89, 104, 102, 89, 99, 96, 83, 110, 108, 95, 112, 107, 95, 111, 106, 94, 116, 114, 101, 124, 119, 107, 118, 116, 103, 122, 119, 107, 130, 128, 115, 123, 121, 108, 114, 111, 98, 123, 121, 108, 118, 116, 103, 124, 122, 107, 127, 124, 111, 113, 110, 97, 93, 90, 78, 77, 74, 61, 86, 83, 70, 83, 83, 69, 98, 98, 84, 151, 151, 137, 210, 210, 196, 240, 240, 226, 255, 255, 242, 255, 255, 243, 255, 255, 242, 255, 255, 240, 245, 245, 231, 242, 242, 229, 244, 244, 230, 245, 245, 231, 242, 242, 229, 241, 241, 228, 236, 235, 222, 237, 237, 223, 239, 239, 225, 234, 234, 221, 234, 234, 223, 234, 234, 223, 223, 222, 211, 211, 210, 201, 197, 196, 190, 177, 178, 169, 166, 168, 159, 166, 168, 159, 179, 181, 171, 213, 215, 205, 237, 239, 229, 253, 254, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 244, 251, 249, 236, 225, 223, 212, 168, 166, 153, 215, 213, 200, 255, 255, 245, 252, 250, 237, 123, 121, 108, 57, 54, 41, 36, 33, 20, 191, 189, 173, 255, 255, 255, 255, 255, 249, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 255, 247, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 23, 22, 9, 42, 41, 33, 222, 225, 210, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 158, 158, 142, 158, 156, 143, 150, 147, 134, 149, 146, 133, 166, 163, 153, 158, 155, 145, 134, 131, 120, 126, 121, 111, 124, 118, 109, 165, 160, 151, 193, 189, 177, 255, 255, 255, 255, 255, 255, 114, 109, 99, 135, 130, 120, 97, 91, 82, 129, 126, 116, 78, 73, 63, 80, 75, 65, 94, 91, 81, 96, 90, 81, 79, 74, 64, 93, 88, 78, 93, 88, 78, 77, 72, 62, 57, 54, 43, 68, 62, 53, 83, 81, 70, 89, 83, 74, 74, 68, 59, 84, 79, 69, 93, 88, 78, 98, 92, 85, 77, 71, 64, 86, 81, 71, 65, 60, 52, 87, 82, 72, 121, 116, 106, 75, 72, 64, 73, 70, 62, 82, 79, 69, 61, 60, 51, 61, 60, 49, 71, 71, 62, 63, 62, 54, 80, 79, 70, 60, 59, 50, 71, 71, 62, 131, 130, 121, 69, 68, 60, 83, 82, 73, 62, 62, 50, 59, 58, 47, 92, 92, 81, 84, 84, 70, 57, 57, 44, 57, 57, 44, 77, 77, 64, 70, 70, 57, 68, 68, 54, 67, 65, 52, 56, 53, 40, 75, 73, 60, 53, 51, 38, 63, 60, 47, 72, 69, 57, 67, 65, 52, 68, 63, 51, 71, 66, 54, 75, 73, 60, 78, 75, 62, 74, 72, 59, 83, 81, 68, 75, 73, 58, 78, 79, 63, 75, 75, 61, 84, 84, 70, 85, 85, 72, 82, 80, 65, 109, 107, 92, 117, 115, 102, 120, 117, 104, 118, 116, 103, 120, 117, 104, 125, 120, 108, 117, 115, 102, 120, 117, 104, 132, 130, 117, 121, 118, 105, 128, 123, 111, 129, 127, 112, 117, 115, 100, 113, 110, 97, 124, 122, 107, 136, 134, 119, 121, 119, 103, 121, 118, 105, 130, 128, 115, 116, 116, 102, 124, 124, 110, 121, 121, 108, 123, 123, 109, 126, 126, 112, 119, 119, 105, 130, 129, 116, 133, 133, 119, 99, 99, 86, 78, 78, 65, 63, 63, 50, 60, 60, 46, 92, 93, 75, 153, 154, 135, 204, 205, 186, 240, 241, 223, 252, 253, 234, 253, 253, 237, 255, 255, 241, 254, 254, 242, 254, 254, 240, 247, 247, 235, 246, 245, 234, 249, 249, 238, 244, 246, 234, 249, 249, 238, 248, 248, 237, 248, 248, 237, 247, 247, 235, 238, 237, 228, 240, 239, 231, 239, 238, 229, 232, 231, 223, 234, 234, 225, 230, 232, 223, 215, 217, 207, 211, 212, 205, 200, 201, 194, 187, 186, 177, 178, 178, 169, 167, 166, 157, 173, 172, 163, 170, 169, 163, 168, 167, 159, 186, 188, 178, 202, 204, 192, 218, 220, 207, 235, 237, 225, 248, 249, 240, 255, 255, 252, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 246, 255, 255, 249, 255, 255, 254, 255, 255, 245, 151, 148, 136, 68, 68, 54, 49, 46, 33, 215, 213, 200, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 253, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 80, 78, 65, 32, 30, 17, 46, 40, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 85, 85, 76, 91, 90, 82, 95, 94, 85, 91, 90, 82, 109, 106, 98, 107, 104, 94, 108, 105, 95, 104, 102, 91, 122, 119, 109, 132, 126, 117, 191, 186, 174, 255, 255, 255, 206, 201, 191, 137, 132, 123, 108, 103, 94, 108, 103, 94, 126, 121, 111, 119, 114, 104, 92, 87, 77, 97, 91, 82, 109, 106, 96, 108, 103, 94, 122, 117, 107, 157, 152, 142, 175, 169, 160, 203, 197, 188, 122, 117, 107, 174, 168, 159, 208, 205, 195, 205, 201, 193, 213, 210, 199, 213, 209, 201, 215, 212, 204, 225, 222, 216, 228, 224, 219, 225, 223, 222, 227, 226, 222, 232, 230, 228, 232, 230, 226, 232, 230, 228, 232, 230, 228, 236, 234, 232, 233, 231, 230, 233, 232, 228, 233, 232, 228, 234, 235, 230, 229, 230, 225, 227, 228, 223, 225, 225, 221, 228, 229, 224, 229, 231, 223, 145, 147, 135, 45, 47, 37, 121, 123, 111, 106, 108, 96, 113, 115, 103, 89, 89, 75, 66, 65, 52, 105, 105, 92, 68, 68, 54, 83, 83, 69, 82, 82, 68, 70, 70, 57, 114, 111, 98, 78, 75, 62, 85, 82, 69, 101, 98, 86, 83, 81, 70, 77, 74, 61, 93, 90, 78, 107, 104, 92, 75, 73, 60, 94, 92, 79, 85, 82, 69, 92, 92, 79, 95, 93, 78, 73, 71, 58, 87, 85, 72, 88, 86, 73, 75, 75, 61, 99, 96, 83, 83, 81, 68, 85, 82, 69, 101, 98, 86, 89, 87, 74, 107, 104, 94, 99, 96, 85, 109, 106, 96, 125, 123, 112, 134, 131, 120, 146, 144, 131, 150, 148, 132, 155, 152, 139, 158, 156, 143, 146, 144, 131, 122, 119, 107, 121, 121, 108, 119, 119, 105, 110, 110, 96, 120, 120, 107, 125, 123, 110, 130, 127, 117, 123, 122, 111, 124, 123, 112, 131, 131, 117, 128, 125, 112, 134, 134, 121, 130, 129, 116, 132, 130, 117, 135, 135, 122, 138, 137, 126, 135, 135, 124, 140, 140, 128, 144, 143, 132, 130, 127, 117, 124, 124, 110, 94, 93, 82, 90, 90, 78, 59, 58, 47, 42, 42, 31, 50, 50, 39, 88, 87, 76, 146, 146, 130, 195, 195, 181, 222, 221, 208, 247, 247, 231, 240, 240, 224, 253, 253, 239, 255, 255, 244, 255, 255, 245, 251, 250, 239, 249, 249, 238, 249, 249, 238, 244, 243, 232, 244, 243, 232, 241, 241, 230, 240, 240, 228, 238, 237, 226, 236, 235, 224, 238, 238, 224, 236, 233, 223, 234, 234, 223, 229, 228, 217, 226, 226, 215, 227, 227, 218, 229, 228, 219, 227, 227, 218, 224, 223, 216, 224, 221, 215, 227, 223, 218, 230, 227, 221, 227, 226, 220, 225, 222, 216, 220, 219, 215, 220, 216, 210, 209, 207, 203, 208, 206, 202, 203, 201, 197, 205, 204, 200, 185, 184, 180, 187, 185, 181, 180, 178, 172, 169, 168, 162, 169, 168, 162, 160, 159, 150, 168, 167, 159, 155, 156, 147, 148, 149, 142, 145, 147, 139, 150, 151, 144, 153, 152, 145, 145, 147, 139, 145, 143, 137, 148, 149, 142, 140, 141, 134, 140, 141, 134, 135, 136, 129, 141, 140, 134, 144, 142, 136, 138, 136, 130, 144, 142, 136, 146, 145, 138, 142, 141, 135, 136, 137, 130, 134, 135, 128, 140, 141, 134, 141, 142, 135, 134, 135, 128, 130, 132, 124, 130, 132, 124, 131, 132, 128, 134, 135, 128, 134, 135, 130, 138, 136, 130, 133, 131, 128, 131, 129, 125, 126, 125, 119, 125, 124, 117, 127, 126, 120, 130, 129, 120, 131, 128, 120, 125, 122, 114, 125, 123, 112, 129, 126, 116, 127, 124, 113, 129, 126, 116, 123, 120, 110, 118, 116, 105, 115, 110, 100, 128, 123, 113, 126, 121, 111, 131, 128, 118, 153, 147, 138, 150, 145, 135, 150, 145, 137, 159, 153, 146, 167, 161, 154, 165, 161, 156, 177, 173, 165, 179, 176, 168, 182, 179, 171, 190, 186, 178, 186, 183, 173, 198, 198, 187, 102, 99, 91, 92, 88, 80, 131, 128, 118, 116, 113, 103, 93, 90, 80, 102, 99, 89, 96, 93, 83, 97, 91, 82, 105, 100, 90, 125, 119, 110, 179, 174, 164, 175, 173, 162, 191, 186, 176, 205, 200, 190, 205, 200, 190, 216, 213, 203, 215, 212, 202, 220, 215, 205, 221, 218, 207, 224, 221, 211, 221, 218, 207, 227, 224, 213, 229, 224, 215, 231, 225, 216, 223, 220, 210, 233, 230, 219, 58, 55, 45, 237, 234, 224, 233, 230, 219, 234, 231, 220, 235, 232, 224, 235, 232, 222, 231, 230, 221, 237, 236, 227, 233, 232, 224, 234, 230, 224, 232, 231, 223, 230, 228, 222, 230, 228, 222, 234, 234, 225, 232, 231, 224, 229, 231, 223, 231, 230, 223, 230, 229, 220, 230, 228, 222, 228, 229, 222, 230, 229, 220, 228, 229, 222, 228, 229, 222, 230, 232, 223, 229, 231, 221, 115, 118, 103, 223, 222, 211, 228, 230, 218, 229, 228, 217, 227, 227, 216, 226, 226, 215, 224, 223, 212, 226, 226, 215, 231, 230, 219, 226, 226, 215, 233, 230, 219, 227, 227, 216, 227, 227, 216, 227, 226, 220, 231, 230, 223, 233, 232, 226, 233, 232, 228, 236, 234, 228, 232, 230, 226, 229, 227, 223, 217, 215, 211, 222, 220, 216, 212, 211, 205, 194, 192, 186, 181, 180, 173, 177, 176, 172, 160, 159, 150, 167, 166, 155, 130, 130, 114, 130, 129, 118, 132, 131, 120, 112, 112, 100, 113, 112, 104, 108, 107, 96, 97, 94, 86, 94, 93, 84, 95, 94, 85, 106, 105, 97, 100, 100, 91, 106, 105, 97, 130, 129, 118, 119, 119, 107, 142, 142, 131, 147, 147, 135, 134, 134, 123, 127, 127, 116, 145, 144, 133, 139, 138, 127, 147, 147, 135, 140, 140, 128, 123, 122, 113, 119, 119, 107, 124, 123, 112, 128, 128, 115, 132, 130, 117, 136, 133, 121, 138, 138, 124, 136, 133, 121, 145, 145, 131, 139, 139, 125, 141, 141, 128, 153, 153, 139, 147, 147, 133, 144, 143, 130, 141, 141, 128, 138, 137, 126, 142, 139, 128, 144, 143, 130, 145, 144, 133, 148, 148, 136, 149, 149, 138, 148, 148, 136, 151, 150, 139, 142, 142, 131, 138, 137, 126, 151, 150, 141, 156, 156, 145, 133, 133, 121, 126, 125, 117, 94, 93, 84, 77, 77, 65, 67, 66, 57, 56, 56, 45, 59, 58, 47, 50, 47, 36, 52, 49, 39, 87, 84, 74, 110, 107, 97, 152, 150, 137, 186, 183, 173, 215, 215, 199, 231, 231, 217, 233, 233, 218, 234, 231, 218, 238, 236, 223, 244, 244, 230, 245, 243, 230, 244, 241, 231, 247, 244, 233, 247, 244, 233, 244, 241, 231, 243, 240, 230, 241, 241, 230, 240, 240, 228, 236, 235, 224, 234, 234, 223, 236, 235, 224, 239, 238, 227, 236, 235, 224, 232, 232, 220, 236, 235, 224, 233, 233, 222, 230, 229, 218, 226, 226, 215, 226, 226, 215, 224, 223, 214, 222, 224, 212, 224, 223, 214, 224, 223, 214, 223, 222, 213, 222, 221, 212, 223, 222, 213, 222, 219, 211, 223, 220, 212, 221, 218, 210, 227, 223, 216, 227, 223, 216, 227, 223, 216, 228, 225, 217, 222, 218, 213, 228, 225, 217, 229, 226, 218, 224, 223, 216, 222, 220, 214, 227, 223, 216, 227, 227, 218, 226, 225, 219, 227, 227, 218, 225, 224, 216, 222, 221, 212, 224, 223, 214, 227, 226, 220, 230, 228, 222, 229, 227, 221, 227, 226, 220, 225, 224, 218, 234, 230, 223, 236, 232, 227, 235, 231, 226, 234, 230, 223, 236, 233, 225, 238, 235, 227, 240, 236, 228, 238, 235, 225, 233, 230, 219, 236, 233, 223, 238, 235, 225, 245, 239, 230, 246, 240, 231, 247, 242, 232, 245, 239, 230, 243, 238, 228, 248, 243, 233, 250, 245, 238, 252, 246, 239, 247, 241, 234, 254, 248, 241, 253, 247, 240, 254, 249, 239, 250, 245, 235, 255, 251, 241, 255, 251, 241, 255, 251, 241, 181, 180, 169, 201, 200, 189, 239, 238, 227, 135, 132, 121, 192, 192, 181, 203, 201, 190, 208, 205, 195, 209, 206, 196, 210, 208, 197, 219, 216, 205, 220, 217, 206, 227, 224, 213, 225, 223, 212, 217, 211, 202, 228, 225, 215, 234, 231, 220, 234, 229, 219, 233, 230, 219, 235, 232, 222, 239, 233, 224, 239, 234, 222, 242, 237, 227, 242, 237, 225, 241, 236, 224, 229, 224, 213, 152, 150, 137, 203, 198, 186, 241, 238, 225, 240, 237, 224, 240, 237, 224, 240, 237, 224, 240, 237, 224, 238, 236, 223, 234, 231, 218, 234, 235, 219, 233, 233, 218, 233, 233, 218, 231, 231, 217, 230, 230, 216, 234, 234, 221, 231, 231, 217, 231, 231, 217, 230, 230, 216, 230, 230, 216, 230, 230, 216, 230, 230, 216, 227, 227, 214, 229, 228, 215, 230, 230, 216, 230, 230, 214, 230, 230, 214, 81, 85, 65, 255, 255, 241, 227, 228, 212, 230, 230, 214, 231, 231, 215, 231, 231, 215, 231, 231, 215, 229, 229, 213, 232, 232, 216, 234, 232, 216, 233, 233, 220, 235, 232, 220, 236, 234, 221, 236, 234, 221, 236, 235, 222, 213, 210, 199, 237, 234, 224, 238, 237, 226, 238, 235, 225, 237, 236, 225, 237, 236, 225, 236, 235, 226, 236, 235, 226, 237, 236, 227, 237, 236, 227, 237, 236, 227, 237, 236, 227, 241, 241, 232, 0, 0, 0, 239, 238, 231, 237, 235, 229, 233, 232, 226, 226, 225, 219, 217, 216, 209, 187, 186, 177, 171, 170, 164, 180, 179, 170, 185, 185, 176, 166, 165, 156, 192, 192, 183, 209, 208, 199, 195, 194, 183, 181, 180, 171, 187, 186, 177, 154, 153, 145, 130, 129, 120, 135, 135, 126, 147, 146, 137, 154, 153, 145, 160, 159, 150, 118, 117, 108, 106, 105, 97, 106, 106, 95, 94, 93, 80, 128, 125, 112, 132, 130, 117, 147, 145, 132, 142, 139, 126, 126, 126, 112, 143, 140, 128, 147, 147, 133, 154, 154, 140, 152, 150, 137, 153, 153, 139, 143, 140, 128, 149, 146, 133, 145, 145, 131, 151, 151, 137, 149, 149, 136, 148, 148, 134, 151, 150, 139, 153, 152, 141, 155, 155, 144, 144, 143, 134, 165, 164, 155, 146, 145, 136, 148, 148, 136, 145, 144, 133, 148, 148, 136, 147, 147, 135, 145, 144, 133, 153, 152, 141, 146, 145, 134, 141, 141, 129, 144, 143, 132, 143, 140, 129, 141, 138, 127, 137, 134, 124, 143, 140, 129, 139, 136, 126, 132, 131, 120, 137, 136, 125, 127, 127, 116, 131, 130, 119, 114, 114, 103, 109, 108, 97, 95, 92, 82, 88, 85, 75, 86, 83, 72, 74, 71, 61, 68, 67, 58, 55, 54, 46, 49, 48, 40, 47, 46, 37, 48, 47, 39, 47, 46, 37, 48, 47, 39, 36, 36, 27, 42, 41, 33, 38, 37, 28, 48, 48, 36, 35, 35, 24, 60, 59, 48, 35, 35, 24, 61, 60, 49, 51, 53, 41, 52, 54, 42, 76, 78, 65, 92, 94, 82, 99, 99, 88, 105, 105, 94, 121, 121, 112, 109, 108, 99, 128, 128, 119, 126, 125, 117, 139, 138, 129, 139, 138, 129, 145, 144, 135, 155, 154, 146, 160, 159, 150, 161, 160, 152, 160, 159, 150, 165, 164, 155, 161, 158, 150, 165, 164, 155, 163, 162, 154, 161, 160, 152, 159, 158, 149, 162, 161, 153, 161, 162, 153, 162, 163, 154, 167, 166, 157, 160, 159, 150, 163, 162, 154, 166, 165, 156, 166, 165, 156, 162, 161, 153, 167, 166, 157, 168, 167, 159, 160, 159, 150, 154, 153, 145, 142, 141, 133, 145, 144, 135, 138, 137, 126, 131, 130, 119, 132, 129, 119, 128, 125, 114, 131, 128, 118, 111, 109, 98, 128, 123, 113, 103, 97, 88, 112, 107, 97, 97, 91, 82, 98, 91, 82, 82, 76, 67, 78, 73, 63, 77, 72, 62, 58, 53, 43, 48, 43, 33, 41, 36, 26, 37, 32, 20, 46, 37, 26, 70, 53, 45, 226, 226, 215, 234, 234, 223, 214, 211, 201, 203, 201, 188, 224, 222, 209, 227, 224, 211, 233, 230, 217, 240, 237, 224, 242, 237, 225, 247, 244, 231, 244, 242, 229, 252, 250, 237, 242, 239, 226, 255, 255, 246, 255, 255, 247, 255, 255, 247, 255, 255, 252, 255, 255, 251, 255, 255, 247, 255, 255, 247, 255, 255, 249, 255, 255, 247, 255, 255, 246, 255, 252, 239, 244, 242, 229, 91, 88, 75, 255, 255, 255, 254, 251, 238, 251, 249, 234, 250, 248, 233, 250, 248, 233, 248, 245, 230, 245, 243, 228, 242, 243, 225, 240, 241, 223, 240, 240, 224, 241, 242, 224, 238, 239, 220, 239, 240, 221, 238, 239, 220, 237, 237, 219, 238, 239, 220, 237, 237, 219, 237, 237, 219, 237, 237, 219, 236, 236, 218, 237, 237, 219, 236, 237, 216, 237, 238, 217, 237, 238, 217, 238, 239, 218, 67, 68, 47, 255, 255, 255, 240, 241, 220, 240, 238, 220, 241, 239, 221, 242, 240, 223, 243, 241, 224, 244, 242, 225, 247, 245, 227, 242, 240, 223, 245, 243, 228, 245, 243, 228, 243, 241, 226, 244, 242, 227, 217, 215, 202, 221, 219, 203, 252, 250, 237, 249, 246, 233, 245, 245, 231, 244, 243, 232, 246, 245, 234, 248, 248, 237, 248, 248, 237, 248, 248, 237, 249, 249, 238, 251, 250, 239, 249, 249, 238, 253, 252, 241, 7, 7, 0, 255, 255, 246, 248, 248, 239, 246, 245, 236, 245, 244, 237, 245, 244, 237, 242, 243, 236, 236, 238, 230, 240, 241, 234, 237, 239, 231, 234, 233, 227, 237, 235, 229, 236, 234, 228, 236, 234, 228, 232, 233, 226, 211, 212, 205, 192, 191, 185, 177, 176, 170, 184, 185, 176, 183, 182, 174, 232, 231, 223, 255, 255, 250, 215, 214, 205, 116, 115, 104, 94, 93, 82, 97, 95, 82, 91, 88, 75, 114, 114, 101, 144, 143, 130, 141, 138, 125, 147, 145, 132, 152, 152, 138, 149, 149, 136, 151, 151, 137, 152, 152, 138, 152, 152, 138, 154, 154, 140, 236, 234, 221, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 211, 210, 201, 190, 189, 183, 149, 148, 140, 162, 161, 153, 160, 159, 150, 160, 159, 148, 154, 154, 142, 160, 159, 148, 148, 148, 136, 151, 151, 137, 152, 152, 138, 151, 150, 139, 140, 140, 128, 141, 141, 129, 144, 141, 131, 144, 141, 131, 132, 129, 119, 139, 136, 126, 134, 131, 120, 134, 134, 123, 132, 129, 119, 138, 137, 126, 139, 136, 126, 139, 136, 126, 133, 133, 121, 135, 132, 121, 125, 123, 112, 128, 125, 117, 125, 122, 114, 121, 118, 110, 119, 118, 110, 124, 123, 114, 119, 119, 107, 114, 114, 103, 112, 112, 100, 113, 113, 102, 109, 108, 97, 111, 110, 101, 114, 114, 105, 114, 114, 105, 112, 111, 103, 113, 113, 102, 109, 108, 97, 110, 109, 100, 108, 107, 98, 116, 115, 106, 110, 109, 100, 110, 109, 100, 112, 111, 103, 105, 104, 96, 109, 108, 99, 110, 109, 98, 113, 110, 99, 108, 105, 97, 113, 112, 104, 112, 111, 103, 108, 105, 97, 109, 108, 99, 102, 101, 92, 111, 110, 103, 113, 109, 101, 123, 119, 114, 116, 112, 107, 116, 112, 107, 116, 115, 106, 120, 119, 111, 112, 111, 103, 119, 118, 110, 121, 121, 112, 116, 115, 106, 128, 127, 121, 111, 110, 103, 117, 116, 109, 114, 113, 107, 124, 123, 116, 120, 119, 113, 128, 127, 121, 126, 125, 117, 130, 129, 120, 117, 116, 107, 126, 125, 117, 128, 128, 119, 128, 128, 119, 128, 128, 119, 121, 121, 112, 138, 135, 125, 145, 142, 132, 131, 128, 118, 133, 128, 118, 134, 129, 119, 141, 136, 126, 142, 137, 127, 134, 129, 119, 149, 144, 134, 154, 149, 139, 148, 143, 133, 148, 143, 131, 159, 153, 144, 156, 151, 141, 169, 164, 152, 157, 152, 140, 162, 157, 145, 255, 255, 249, 255, 255, 245, 86, 81, 69, 255, 255, 242, 255, 255, 251, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 30, 29, 16, 255, 255, 255, 255, 255, 254, 255, 255, 254, 255, 255, 252, 255, 255, 253, 255, 255, 249, 255, 255, 247, 255, 255, 249, 255, 255, 245, 255, 255, 245, 255, 255, 244, 255, 255, 242, 255, 255, 240, 246, 246, 230, 248, 249, 233, 249, 250, 234, 246, 246, 230, 242, 243, 225, 245, 246, 227, 246, 247, 228, 246, 247, 228, 247, 248, 229, 246, 247, 226, 248, 250, 229, 252, 253, 232, 77, 78, 57, 255, 255, 252, 252, 251, 231, 255, 253, 233, 255, 254, 236, 255, 254, 236, 255, 255, 238, 255, 255, 239, 255, 255, 240, 255, 255, 239, 255, 255, 239, 255, 255, 245, 255, 255, 242, 255, 255, 240, 255, 255, 245, 255, 255, 245, 255, 255, 246, 255, 255, 247, 255, 255, 248, 255, 255, 248, 255, 255, 250, 255, 255, 249, 255, 255, 253, 255, 255, 253, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 14, 15, 0, 255, 255, 255, 255, 255, 249, 255, 255, 249, 255, 255, 254, 255, 255, 253, 255, 255, 252, 255, 255, 250, 251, 253, 243, 255, 255, 249, 255, 255, 247, 255, 255, 248, 247, 246, 238, 246, 247, 238, 252, 251, 244, 251, 253, 245, 248, 249, 242, 246, 247, 240, 244, 246, 238, 244, 246, 238, 241, 242, 235, 244, 242, 236, 245, 244, 237, 243, 245, 235, 188, 187, 178, 82, 81, 72, 77, 77, 65, 75, 74, 63, 117, 116, 105, 141, 141, 128, 139, 138, 127, 147, 147, 135, 148, 148, 136, 144, 143, 132, 147, 145, 134, 128, 128, 117, 81, 80, 69, 82, 81, 70, 113, 110, 99, 102, 101, 90, 155, 156, 147, 197, 198, 191, 255, 255, 255, 142, 143, 136, 128, 129, 122, 131, 133, 126, 138, 136, 130, 146, 145, 138, 144, 143, 134, 137, 136, 127, 134, 133, 125, 109, 108, 99, 110, 109, 100, 116, 115, 104, 120, 119, 111, 106, 106, 95, 109, 108, 97, 124, 121, 111, 124, 121, 111, 117, 114, 104, 142, 139, 128, 122, 119, 109, 144, 141, 131, 145, 142, 132, 131, 130, 119, 137, 136, 125, 134, 134, 123, 133, 132, 124, 134, 134, 123, 131, 130, 121, 127, 127, 116, 124, 123, 112, 124, 123, 114, 123, 122, 113, 131, 130, 121, 112, 111, 103, 113, 113, 102, 112, 112, 100, 110, 109, 100, 111, 110, 101, 112, 111, 103, 104, 103, 94, 106, 105, 97, 109, 108, 99, 109, 108, 99, 109, 108, 97, 108, 107, 96, 103, 102, 91, 135, 135, 124, 102, 101, 92, 114, 114, 105, 95, 94, 83, 105, 104, 96, 105, 104, 96, 104, 103, 94, 109, 108, 99, 110, 109, 100, 109, 106, 98, 108, 107, 98, 109, 108, 99, 108, 107, 98, 109, 106, 98, 110, 107, 99, 114, 111, 103, 121, 118, 110, 118, 115, 107, 117, 114, 106, 122, 119, 111, 108, 107, 98, 108, 107, 98, 110, 107, 99, 117, 116, 107, 131, 130, 121, 111, 110, 101, 114, 113, 107, 114, 114, 105, 125, 124, 117, 118, 117, 110, 114, 113, 107, 119, 118, 112, 123, 121, 115, 113, 112, 106, 124, 123, 114, 117, 116, 107, 112, 111, 103, 125, 124, 115, 145, 144, 135, 128, 128, 119, 127, 126, 118, 129, 126, 116, 137, 134, 126, 130, 125, 116, 142, 137, 127, 147, 141, 132, 139, 133, 124, 151, 146, 136, 151, 146, 136, 156, 151, 141, 161, 156, 144, 155, 150, 138, 150, 145, 133, 155, 152, 139, 172, 167, 156, 163, 158, 146, 163, 160, 147, 255, 255, 255, 255, 255, 249, 126, 121, 109, 255, 255, 255, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 247, 245, 245, 231, 227, 227, 214, 225, 225, 211, 223, 223, 209, 224, 224, 210, 215, 214, 201, 215, 213, 200, 219, 219, 205, 152, 152, 138, 182, 180, 167, 217, 217, 203, 218, 218, 204, 213, 213, 200, 217, 217, 203, 215, 214, 201, 210, 210, 194, 211, 211, 195, 204, 204, 188, 208, 208, 192, 195, 198, 181, 205, 206, 190, 204, 204, 188, 203, 203, 187, 199, 200, 184, 199, 200, 184, 194, 194, 178, 195, 195, 177, 191, 192, 174, 190, 191, 173, 184, 185, 164, 189, 190, 169, 185, 187, 166, 180, 181, 160, 180, 179, 159, 130, 129, 107, 229, 227, 210, 179, 178, 156, 173, 172, 150, 184, 180, 161, 181, 177, 156, 183, 179, 160, 181, 177, 158, 186, 183, 163, 204, 200, 181, 213, 209, 190, 233, 231, 211, 245, 244, 224, 255, 255, 240, 255, 255, 244, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 19, 21, 7, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 253, 255, 255, 249, 255, 255, 248, 255, 255, 250, 255, 255, 247, 246, 245, 236, 206, 206, 197, 78, 78, 67, 124, 123, 112, 83, 83, 71, 142, 142, 131, 140, 140, 128, 145, 144, 133, 135, 135, 124, 146, 145, 134, 155, 155, 144, 68, 67, 56, 109, 108, 97, 78, 78, 65, 89, 88, 77, 94, 93, 84, 141, 142, 135, 180, 182, 174, 191, 192, 185, 183, 184, 177, 194, 196, 188, 209, 211, 203, 175, 176, 169, 165, 163, 157, 153, 152, 143, 142, 141, 135, 137, 135, 129, 118, 117, 108, 110, 109, 102, 126, 125, 117, 110, 109, 100, 118, 117, 108, 109, 108, 99, 113, 113, 102, 114, 114, 103, 91, 88, 77, 92, 89, 78, 102, 99, 89, 103, 100, 90, 106, 102, 94, 110, 107, 97, 113, 112, 104, 112, 111, 103, 108, 107, 98, 102, 101, 92, 103, 102, 93, 104, 103, 94, 126, 126, 114, 106, 108, 98, 113, 114, 105, 109, 108, 97, 96, 95, 86, 101, 103, 93, 103, 102, 93, 98, 97, 89, 98, 97, 89, 97, 96, 87, 98, 97, 89, 98, 97, 89, 100, 100, 91, 94, 93, 84, 97, 96, 85, 108, 107, 98, 106, 105, 97, 104, 104, 92, 90, 90, 78, 104, 103, 94, 109, 108, 99, 104, 103, 94, 97, 96, 87, 105, 104, 96, 87, 86, 77, 102, 101, 92, 99, 98, 90, 95, 94, 85, 96, 95, 86, 108, 105, 97, 95, 94, 85, 107, 104, 96, 108, 105, 97, 97, 95, 84, 108, 105, 95, 109, 106, 96, 107, 104, 94, 103, 100, 92, 114, 111, 103, 109, 106, 98, 114, 114, 105, 126, 125, 117, 118, 117, 108, 108, 106, 100, 108, 106, 100, 118, 117, 110, 111, 110, 103, 120, 119, 113, 111, 110, 103, 116, 115, 106, 106, 105, 99, 108, 107, 98, 94, 93, 84, 97, 96, 87, 103, 102, 93, 119, 118, 110, 135, 135, 126, 126, 125, 117, 134, 130, 122, 141, 138, 127, 129, 126, 116, 149, 146, 135, 134, 131, 120, 144, 139, 129, 153, 147, 138, 154, 149, 139, 162, 157, 145, 162, 157, 145, 157, 152, 140, 164, 159, 147, 171, 166, 154, 170, 165, 153, 163, 159, 144, 171, 167, 152, 255, 255, 255, 255, 255, 255, 147, 142, 130, 255, 255, 252, 255, 255, 246, 247, 247, 233, 213, 213, 200, 208, 206, 193, 195, 195, 181, 196, 196, 182, 191, 191, 178, 199, 199, 186, 196, 196, 182, 202, 202, 188, 183, 183, 169, 187, 185, 172, 178, 178, 165, 153, 153, 139, 119, 119, 105, 108, 107, 94, 112, 112, 98, 78, 78, 67, 75, 74, 63, 74, 73, 62, 57, 57, 46, 40, 39, 28, 0, 0, 0, 59, 58, 47, 55, 55, 43, 52, 51, 40, 61, 60, 51, 41, 41, 27, 116, 116, 102, 76, 76, 62, 94, 93, 80, 59, 58, 45, 92, 92, 79, 60, 60, 44, 80, 80, 64, 67, 70, 53, 93, 96, 79, 67, 70, 53, 64, 65, 49, 47, 49, 32, 53, 56, 39, 50, 51, 35, 47, 48, 29, 94, 95, 74, 43, 45, 24, 62, 63, 42, 56, 54, 36, 63, 61, 43, 66, 64, 49, 64, 62, 44, 96, 95, 77, 77, 75, 57, 70, 68, 50, 89, 88, 70, 103, 99, 80, 110, 109, 89, 127, 125, 105, 128, 127, 106, 137, 136, 116, 137, 136, 116, 144, 143, 123, 149, 147, 127, 139, 138, 118, 149, 150, 129, 156, 158, 137, 151, 152, 131, 155, 156, 135, 173, 174, 153, 170, 171, 151, 170, 171, 155, 167, 168, 149, 171, 172, 156, 178, 179, 163, 183, 185, 169, 204, 206, 192, 50, 53, 36, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 242, 242, 231, 47, 46, 35, 73, 76, 61, 69, 69, 55, 139, 138, 127, 142, 142, 131, 142, 142, 131, 140, 140, 128, 145, 144, 133, 126, 126, 114, 84, 84, 72, 116, 115, 104, 120, 120, 109, 191, 191, 180, 236, 235, 226, 236, 238, 230, 243, 245, 237, 243, 245, 237, 239, 240, 233, 241, 242, 235, 236, 238, 230, 234, 235, 228, 233, 234, 227, 230, 232, 224, 230, 232, 224, 214, 215, 208, 209, 211, 203, 189, 190, 183, 137, 139, 129, 95, 94, 85, 106, 106, 93, 95, 95, 79, 97, 97, 81, 118, 118, 104, 100, 97, 84, 92, 90, 74, 100, 98, 82, 96, 94, 81, 107, 104, 94, 125, 124, 115, 154, 153, 145, 138, 137, 128, 130, 128, 122, 108, 107, 98, 104, 103, 96, 108, 107, 98, 134, 133, 125, 102, 104, 94, 109, 111, 101, 100, 102, 92, 104, 105, 96, 99, 100, 91, 90, 91, 82, 84, 85, 76, 91, 90, 82, 91, 90, 82, 89, 88, 79, 95, 94, 85, 107, 104, 94, 105, 105, 94, 130, 129, 118, 131, 130, 119, 127, 127, 116, 145, 144, 133, 125, 125, 113, 114, 114, 103, 120, 120, 109, 120, 119, 111, 85, 85, 76, 84, 83, 75, 71, 71, 62, 87, 86, 77, 92, 92, 83, 92, 92, 83, 84, 84, 72, 78, 78, 67, 83, 81, 70, 128, 125, 114, 100, 97, 86, 111, 109, 96, 109, 106, 96, 94, 91, 81, 94, 91, 81, 95, 92, 82, 93, 90, 80, 96, 93, 85, 97, 96, 87, 98, 97, 89, 102, 101, 92, 128, 128, 119, 98, 97, 89, 117, 116, 109, 110, 109, 102, 103, 102, 93, 105, 104, 98, 92, 92, 83, 100, 100, 91, 111, 110, 101, 111, 110, 101, 103, 102, 93, 91, 90, 82, 87, 86, 77, 134, 133, 125, 128, 128, 117, 144, 141, 131, 130, 127, 117, 130, 129, 118, 136, 133, 123, 132, 129, 119, 149, 144, 134, 151, 146, 136, 156, 151, 141, 150, 145, 133, 163, 158, 146, 151, 146, 134, 165, 160, 149, 172, 167, 156, 174, 169, 157, 175, 173, 160, 165, 160, 149, 173, 173, 157, 177, 178, 162, 42, 37, 25, 169, 169, 156, 177, 174, 161, 175, 175, 161, 160, 160, 146, 161, 159, 146, 123, 123, 109, 117, 117, 103, 82, 80, 67, 66, 64, 49, 74, 69, 55, 51, 47, 32, 55, 50, 38, 70, 65, 51, 100, 95, 83, 138, 136, 121, 134, 131, 116, 207, 204, 192, 146, 144, 129, 152, 150, 137, 139, 137, 124, 144, 141, 129, 102, 102, 88, 119, 119, 107, 135, 135, 124, 110, 109, 100, 113, 112, 104, 133, 134, 127, 143, 144, 137, 151, 153, 145, 154, 155, 148, 151, 153, 145, 165, 166, 159, 142, 143, 136, 143, 144, 137, 104, 103, 94, 136, 138, 128, 144, 146, 136, 155, 156, 149, 149, 150, 143, 161, 159, 156, 159, 161, 154, 144, 145, 140, 134, 135, 130, 135, 136, 129, 137, 133, 128, 121, 120, 114, 113, 112, 104, 120, 119, 113, 100, 100, 91, 181, 180, 171, 106, 105, 97, 119, 118, 110, 102, 101, 92, 91, 87, 79, 70, 69, 61, 75, 74, 65, 73, 72, 63, 72, 69, 61, 72, 69, 59, 71, 68, 55, 70, 67, 54, 59, 57, 42, 81, 79, 64, 40, 40, 24, 48, 48, 32, 40, 40, 24, 34, 34, 18, 41, 42, 23, 54, 54, 38, 80, 80, 64, 74, 74, 58, 82, 82, 68, 118, 118, 104, 112, 114, 100, 112, 114, 100, 151, 154, 139, 60, 63, 48, 168, 170, 156, 158, 158, 142, 178, 179, 163, 169, 171, 155, 170, 173, 156, 181, 181, 165, 180, 183, 168, 199, 201, 187, 213, 215, 203, 243, 245, 233, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 235, 237, 223, 84, 84, 70, 88, 87, 76, 78, 80, 68, 138, 137, 126, 139, 138, 127, 144, 143, 132, 152, 151, 140, 147, 147, 135, 132, 131, 122, 73, 72, 63, 78, 78, 69, 81, 80, 69, 242, 242, 231, 234, 236, 226, 244, 246, 238, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 251, 246, 247, 240, 218, 219, 212, 247, 248, 241, 251, 253, 245, 249, 250, 243, 248, 249, 242, 250, 252, 244, 246, 245, 236, 199, 199, 188, 70, 71, 55, 101, 99, 84, 103, 101, 86, 108, 106, 91, 99, 96, 81, 107, 105, 90, 106, 103, 90, 165, 162, 150, 92, 89, 78, 187, 184, 176, 231, 230, 221, 238, 237, 228, 224, 223, 214, 226, 225, 219, 221, 222, 215, 223, 225, 218, 218, 219, 212, 205, 206, 199, 207, 208, 201, 200, 201, 194, 198, 199, 192, 197, 198, 191, 180, 182, 174, 180, 182, 174, 194, 193, 184, 170, 169, 161, 173, 172, 163, 100, 100, 89, 195, 194, 185, 198, 197, 189, 188, 187, 178, 177, 176, 168, 175, 174, 165, 175, 174, 163, 177, 176, 168, 165, 164, 153, 174, 173, 162, 153, 152, 143, 161, 160, 152, 168, 167, 159, 151, 150, 141, 139, 138, 129, 141, 140, 132, 123, 122, 113, 127, 127, 116, 71, 71, 60, 94, 91, 81, 108, 105, 93, 107, 104, 92, 101, 98, 86, 121, 118, 107, 107, 104, 94, 96, 93, 83, 99, 96, 85, 129, 126, 116, 121, 118, 110, 111, 110, 101, 133, 132, 124, 133, 132, 124, 155, 154, 148, 135, 134, 128, 155, 154, 148, 133, 132, 126, 138, 137, 128, 149, 148, 142, 140, 141, 132, 168, 167, 160, 156, 155, 149, 163, 162, 154, 106, 105, 97, 102, 99, 91, 85, 85, 76, 89, 88, 79, 81, 80, 71, 145, 144, 133, 149, 146, 135, 142, 142, 131, 146, 143, 133, 151, 148, 138, 151, 146, 136, 153, 147, 138, 157, 152, 142, 157, 152, 142, 161, 156, 144, 163, 160, 147, 170, 165, 153, 172, 167, 156, 170, 167, 154, 167, 162, 148, 160, 158, 143, 101, 97, 80, 96, 94, 79, 90, 85, 73, 0, 0, 0, 57, 54, 41, 59, 57, 44, 82, 80, 67, 135, 131, 116, 164, 159, 147, 174, 169, 155, 181, 176, 162, 188, 181, 167, 194, 187, 173, 192, 188, 173, 191, 186, 174, 185, 180, 168, 170, 163, 152, 144, 139, 128, 139, 134, 122, 139, 134, 122, 134, 131, 120, 131, 128, 118, 125, 125, 113, 131, 130, 119, 129, 131, 119, 101, 103, 93, 125, 126, 117, 114, 116, 106, 121, 123, 113, 114, 115, 108, 121, 122, 115, 128, 130, 120, 109, 111, 103, 125, 125, 121, 111, 111, 107, 114, 115, 110, 120, 121, 116, 121, 122, 117, 114, 115, 110, 101, 102, 99, 119, 119, 117, 116, 116, 116, 118, 121, 120, 101, 101, 101, 110, 113, 110, 97, 100, 97, 111, 111, 109, 112, 112, 110, 106, 107, 102, 112, 113, 106, 118, 116, 112, 111, 109, 105, 106, 105, 101, 111, 110, 103, 104, 104, 100, 103, 101, 95, 95, 93, 87, 105, 104, 98, 96, 95, 88, 137, 135, 129, 121, 120, 114, 109, 107, 101, 134, 133, 127, 144, 142, 138, 143, 144, 139, 134, 135, 130, 122, 123, 118, 128, 129, 124, 123, 124, 119, 98, 98, 94, 105, 106, 101, 105, 104, 100, 84, 83, 77, 126, 125, 119, 119, 120, 113, 130, 129, 120, 108, 110, 100, 102, 104, 94, 74, 76, 68, 115, 116, 111, 48, 49, 40, 70, 71, 62, 58, 60, 48, 74, 77, 62, 69, 71, 59, 88, 88, 74, 86, 89, 74, 106, 108, 96, 125, 127, 112, 141, 143, 129, 138, 141, 126, 163, 165, 151, 159, 162, 147, 170, 172, 158, 170, 172, 158, 165, 168, 151, 173, 176, 161, 175, 177, 162, 180, 183, 168, 197, 199, 185, 214, 216, 202, 255, 255, 243, 255, 255, 255, 209, 212, 197, 118, 120, 103, 41, 43, 31, 100, 101, 96, 90, 91, 84, 143, 145, 135, 133, 134, 125, 145, 144, 133, 151, 150, 141, 142, 142, 131, 147, 146, 137, 108, 107, 98, 112, 111, 105, 64, 64, 55, 247, 246, 238, 241, 242, 233, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 236, 238, 230, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 245, 244, 233, 52, 52, 36, 105, 105, 92, 110, 111, 92, 101, 99, 84, 95, 93, 78, 94, 92, 75, 107, 105, 90, 47, 45, 32, 216, 214, 201, 240, 237, 226, 239, 238, 229, 247, 246, 238, 247, 246, 238, 251, 249, 243, 248, 247, 241, 242, 243, 236, 242, 243, 236, 244, 246, 238, 239, 240, 233, 225, 226, 219, 236, 238, 230, 236, 238, 228, 235, 237, 227, 236, 238, 228, 237, 236, 227, 234, 234, 225, 239, 238, 229, 40, 40, 26, 239, 238, 229, 240, 239, 231, 234, 234, 223, 234, 234, 223, 236, 235, 224, 234, 234, 223, 232, 232, 220, 235, 232, 222, 239, 238, 227, 229, 228, 217, 229, 228, 217, 240, 236, 228, 233, 233, 222, 234, 234, 225, 232, 232, 220, 232, 232, 220, 232, 232, 220, 233, 230, 217, 82, 79, 69, 103, 100, 90, 89, 86, 76, 86, 83, 70, 100, 97, 86, 93, 90, 80, 96, 93, 83, 130, 127, 117, 118, 116, 105, 216, 213, 205, 223, 220, 212, 229, 228, 219, 224, 223, 214, 229, 227, 221, 229, 227, 221, 230, 228, 222, 232, 231, 224, 224, 223, 216, 231, 230, 223, 227, 227, 218, 229, 228, 219, 230, 229, 220, 233, 232, 224, 239, 238, 229, 212, 211, 203, 77, 76, 68, 68, 67, 58, 81, 80, 69, 112, 112, 100, 137, 136, 125, 137, 136, 125, 145, 142, 132, 146, 143, 133, 145, 142, 132, 150, 145, 135, 169, 164, 154, 163, 158, 146, 168, 162, 153, 169, 164, 152, 170, 165, 153, 174, 169, 157, 171, 166, 154, 169, 164, 150, 151, 147, 132, 133, 129, 110, 112, 108, 91, 139, 134, 120, 150, 145, 133, 141, 138, 125, 135, 132, 119, 143, 138, 126, 153, 148, 134, 148, 143, 129, 140, 135, 121, 161, 156, 142, 151, 147, 132, 149, 144, 130, 142, 138, 123, 147, 140, 129, 147, 142, 130, 144, 139, 128, 142, 139, 126, 132, 130, 117, 128, 125, 117, 128, 125, 117, 135, 131, 124, 102, 101, 92, 114, 113, 107, 154, 155, 148, 115, 116, 109, 119, 120, 111, 119, 120, 113, 112, 113, 104, 135, 136, 129, 135, 137, 127, 116, 118, 108, 145, 143, 137, 112, 111, 105, 125, 123, 119, 124, 122, 118, 119, 118, 114, 123, 124, 119, 127, 128, 123, 122, 123, 118, 114, 115, 108, 119, 120, 115, 115, 116, 111, 116, 117, 112, 129, 130, 125, 119, 120, 113, 108, 109, 102, 123, 124, 119, 104, 104, 100, 120, 121, 116, 144, 142, 138, 111, 112, 105, 116, 117, 112, 108, 106, 102, 114, 113, 109, 135, 136, 131, 113, 112, 108, 112, 111, 105, 111, 110, 103, 119, 118, 112, 106, 105, 99, 116, 114, 108, 113, 112, 106, 123, 121, 115, 110, 108, 104, 105, 106, 101, 105, 104, 100, 123, 124, 121, 107, 108, 105, 104, 104, 102, 108, 109, 106, 109, 110, 107, 121, 119, 118, 113, 113, 111, 127, 127, 125, 133, 133, 131, 129, 129, 127, 150, 151, 148, 135, 135, 133, 151, 152, 149, 143, 144, 139, 150, 151, 144, 157, 159, 149, 150, 152, 140, 127, 129, 117, 133, 134, 125, 141, 143, 131, 136, 138, 126, 147, 149, 136, 144, 146, 134, 142, 142, 131, 119, 119, 107, 97, 96, 85, 80, 79, 68, 59, 59, 43, 57, 58, 42, 88, 88, 74, 67, 70, 53, 74, 77, 60, 102, 106, 86, 94, 97, 82, 116, 119, 102, 127, 129, 115, 100, 102, 88, 66, 68, 56, 105, 105, 103, 129, 129, 129, 127, 128, 123, 141, 142, 135, 149, 148, 140, 146, 145, 136, 148, 147, 139, 142, 141, 133, 146, 145, 136, 114, 113, 107, 127, 126, 122, 71, 71, 62, 255, 255, 247, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 248, 112, 112, 98, 93, 91, 76, 148, 148, 132, 113, 110, 95, 106, 104, 88, 91, 88, 73, 109, 109, 93, 163, 161, 145, 244, 244, 230, 236, 235, 222, 249, 249, 240, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 246, 226, 227, 218, 255, 255, 255, 255, 255, 248, 255, 255, 249, 255, 255, 248, 255, 255, 246, 254, 254, 242, 255, 255, 249, 59, 57, 44, 255, 254, 244, 255, 255, 246, 255, 255, 247, 255, 255, 244, 255, 255, 244, 255, 255, 245, 255, 253, 240, 247, 244, 231, 223, 221, 208, 240, 237, 224, 235, 232, 220, 255, 255, 245, 255, 253, 242, 254, 254, 242, 255, 252, 241, 252, 251, 240, 255, 255, 249, 255, 255, 243, 168, 166, 153, 64, 61, 48, 100, 97, 84, 87, 84, 76, 88, 87, 76, 84, 84, 72, 121, 118, 107, 143, 140, 129, 177, 174, 161, 229, 226, 216, 230, 229, 220, 240, 239, 231, 249, 249, 240, 251, 250, 239, 251, 250, 239, 218, 217, 208, 255, 255, 247, 247, 246, 238, 227, 227, 218, 230, 229, 220, 255, 255, 247, 251, 250, 241, 255, 255, 245, 255, 255, 249, 246, 245, 234, 144, 143, 134, 64, 64, 55, 71, 71, 60, 100, 100, 89, 145, 144, 133, 152, 149, 139, 147, 145, 132, 146, 144, 131, 160, 158, 145, 151, 148, 138, 155, 150, 140, 142, 137, 125, 167, 164, 154, 169, 164, 152, 165, 160, 149, 174, 169, 157, 170, 165, 153, 171, 166, 154, 162, 157, 145, 114, 113, 92, 127, 123, 106, 144, 137, 123, 126, 119, 108, 141, 136, 122, 141, 136, 124, 140, 135, 123, 143, 139, 124, 135, 131, 116, 161, 156, 142, 149, 144, 130, 141, 134, 121, 146, 139, 126, 151, 147, 132, 133, 128, 116, 133, 128, 116, 143, 138, 126, 149, 144, 134, 128, 125, 114, 137, 134, 126, 148, 147, 141, 132, 131, 122, 125, 124, 115, 123, 121, 115, 120, 121, 114, 123, 122, 113, 112, 111, 105, 119, 118, 112, 121, 122, 115, 113, 112, 104, 62, 61, 54, 81, 80, 71, 89, 88, 79, 76, 75, 66, 70, 69, 63, 82, 81, 72, 67, 66, 57, 68, 67, 58, 61, 60, 51, 79, 76, 68, 87, 84, 76, 76, 75, 66, 82, 79, 73, 67, 64, 58, 74, 71, 65, 88, 86, 80, 73, 75, 65, 116, 118, 110, 108, 109, 104, 119, 118, 114, 111, 109, 105, 114, 113, 109, 114, 113, 109, 123, 121, 117, 121, 120, 116, 106, 105, 99, 104, 105, 98, 113, 112, 106, 113, 112, 106, 109, 107, 101, 106, 105, 99, 126, 125, 119, 118, 117, 110, 105, 104, 98, 109, 107, 101, 119, 118, 114, 106, 107, 102, 100, 101, 96, 105, 106, 101, 113, 114, 109, 106, 107, 102, 119, 119, 117, 118, 118, 116, 115, 116, 113, 123, 124, 121, 116, 114, 112, 113, 113, 111, 126, 126, 124, 130, 131, 126, 127, 128, 123, 123, 124, 119, 128, 129, 122, 125, 126, 119, 135, 136, 129, 133, 134, 127, 131, 133, 126, 127, 128, 121, 135, 136, 129, 151, 153, 145, 148, 149, 142, 153, 154, 147, 161, 160, 154, 155, 156, 147, 156, 158, 148, 142, 144, 134, 140, 141, 132, 144, 143, 134, 168, 170, 158, 161, 163, 151, 161, 163, 151, 155, 157, 145, 135, 137, 125, 111, 113, 100, 81, 83, 75, 108, 109, 102, 130, 131, 128, 135, 135, 135, 142, 143, 136, 141, 142, 133, 151, 150, 141, 152, 151, 142, 148, 147, 139, 148, 147, 139, 146, 145, 138, 135, 134, 128, 140, 139, 132, 145, 143, 137, 44, 46, 36, 213, 215, 203, 187, 190, 175, 166, 168, 156, 189, 191, 179, 212, 214, 202, 239, 240, 231, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 223, 223, 209, 98, 98, 84, 61, 61, 45, 71, 71, 58, 116, 116, 102, 81, 81, 67, 64, 64, 53, 123, 122, 111, 91, 91, 78, 240, 240, 228, 255, 255, 248, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 57, 58, 42, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 254, 255, 255, 255, 255, 255, 254, 255, 255, 255, 255, 255, 255, 166, 164, 151, 56, 53, 42, 93, 90, 82, 110, 109, 102, 130, 128, 122, 113, 112, 104, 119, 118, 110, 134, 134, 121, 181, 179, 166, 240, 237, 224, 234, 234, 221, 255, 255, 244, 255, 255, 249, 255, 255, 250, 255, 255, 250, 255, 255, 252, 255, 255, 253, 255, 255, 251, 255, 255, 252, 255, 255, 254, 255, 255, 254, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 131, 130, 119, 74, 73, 64, 88, 87, 76, 87, 86, 75, 165, 162, 152, 153, 151, 140, 153, 151, 138, 153, 151, 140, 160, 158, 147, 157, 154, 142, 164, 161, 149, 160, 158, 145, 168, 166, 153, 168, 162, 153, 170, 165, 153, 167, 162, 150, 176, 171, 159, 160, 155, 143, 170, 165, 153, 136, 128, 110, 139, 130, 115, 146, 139, 126, 126, 119, 108, 128, 123, 111, 139, 134, 122, 135, 130, 118, 141, 136, 122, 147, 142, 128, 137, 133, 119, 143, 139, 124, 126, 121, 107, 160, 155, 141, 143, 138, 126, 126, 121, 109, 135, 130, 118, 164, 159, 147, 150, 147, 136, 131, 128, 118, 141, 140, 132, 127, 123, 115, 127, 126, 118, 125, 124, 115, 113, 112, 104, 134, 133, 125, 118, 117, 108, 108, 107, 96, 66, 65, 56, 81, 80, 69, 76, 76, 64, 73, 72, 63, 60, 59, 50, 85, 81, 73, 89, 86, 80, 99, 95, 87, 81, 80, 71, 73, 70, 62, 81, 79, 73, 99, 98, 90, 66, 63, 55, 91, 91, 80, 88, 85, 77, 91, 87, 83, 81, 78, 70, 91, 88, 77, 94, 91, 83, 67, 64, 56, 71, 68, 57, 101, 98, 90, 115, 111, 108, 113, 112, 108, 106, 105, 101, 114, 113, 109, 114, 113, 109, 112, 111, 107, 123, 121, 115, 111, 110, 103, 113, 112, 106, 112, 111, 105, 120, 119, 113, 124, 121, 115, 115, 111, 106, 120, 119, 113, 119, 118, 112, 113, 112, 108, 117, 115, 111, 109, 107, 103, 109, 107, 103, 112, 111, 107, 113, 113, 111, 104, 104, 102, 107, 108, 105, 119, 120, 115, 121, 121, 119, 119, 119, 117, 132, 130, 126, 121, 121, 119, 129, 130, 125, 128, 129, 124, 131, 132, 128, 131, 132, 128, 129, 130, 123, 135, 136, 129, 138, 140, 132, 135, 136, 129, 134, 135, 128, 134, 135, 128, 134, 135, 128, 133, 132, 126, 134, 133, 127, 134, 135, 128, 138, 137, 128, 142, 143, 136, 146, 145, 138, 142, 141, 133, 142, 141, 135, 141, 140, 132, 142, 143, 136, 148, 149, 142, 155, 154, 148, 157, 158, 151, 151, 153, 145, 149, 150, 145, 135, 136, 131, 136, 137, 132, 137, 139, 131, 148, 149, 144, 143, 144, 137, 137, 139, 129, 151, 149, 143, 148, 147, 141, 148, 147, 141, 148, 147, 141, 145, 144, 135, 149, 148, 142, 135, 134, 128, 151, 149, 143, 152, 151, 144, 76, 77, 68, 70, 71, 62, 88, 90, 80, 112, 111, 103, 118, 120, 107, 126, 128, 116, 140, 140, 128, 148, 150, 138, 150, 152, 140, 150, 153, 138, 159, 162, 147, 161, 163, 149, 154, 156, 140, 161, 163, 147, 138, 141, 124, 122, 125, 108, 0, 0, 0, 118, 119, 110, 101, 102, 95, 99, 100, 93, 119, 120, 115, 78, 79, 72, 114, 116, 106, 30, 32, 20, 212, 214, 202, 222, 224, 212, 212, 212, 201, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 46, 46, 30, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 244, 242, 229, 157, 154, 144, 81, 78, 70, 114, 110, 105, 101, 102, 95, 99, 100, 93, 98, 97, 91, 121, 120, 114, 59, 61, 49, 212, 212, 199, 244, 244, 230, 247, 247, 231, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 177, 177, 166, 118, 117, 106, 100, 100, 89, 116, 114, 108, 109, 108, 99, 135, 132, 121, 161, 161, 149, 149, 146, 133, 153, 151, 140, 157, 154, 142, 156, 153, 140, 154, 149, 137, 163, 158, 148, 165, 160, 149, 165, 160, 151, 167, 161, 152, 177, 170, 159, 167, 160, 149, 161, 154, 143, 167, 160, 147, 142, 136, 118, 132, 126, 110, 139, 134, 120, 120, 113, 102, 133, 128, 116, 140, 135, 123, 141, 136, 122, 132, 130, 117, 136, 131, 119, 150, 145, 133, 142, 139, 126, 138, 136, 123, 134, 131, 118, 135, 132, 119, 142, 139, 126, 149, 146, 133, 162, 157, 147, 153, 151, 140, 128, 125, 114, 135, 131, 124, 138, 137, 128, 135, 134, 128, 127, 126, 120, 131, 129, 123, 127, 126, 118, 113, 112, 104, 59, 58, 45, 75, 75, 61, 83, 83, 71, 91, 90, 82, 112, 111, 105, 137, 135, 131, 140, 138, 136, 175, 173, 171, 160, 158, 156, 169, 167, 166, 176, 174, 173, 168, 166, 166, 161, 159, 159, 162, 160, 159, 170, 168, 167, 166, 164, 162, 148, 148, 146, 133, 131, 129, 63, 62, 54, 80, 77, 67, 111, 109, 96, 80, 75, 65, 79, 76, 65, 78, 77, 71, 113, 112, 108, 113, 109, 105, 114, 113, 109, 114, 113, 109, 120, 119, 115, 128, 127, 121, 114, 113, 107, 116, 114, 108, 117, 116, 109, 104, 101, 95, 130, 127, 121, 121, 117, 112, 127, 123, 117, 120, 116, 110, 114, 113, 109, 116, 114, 110, 109, 107, 103, 110, 108, 104, 117, 115, 113, 114, 112, 111, 115, 116, 111, 133, 133, 131, 128, 128, 126, 125, 125, 121, 131, 132, 128, 123, 124, 119, 123, 121, 117, 133, 133, 129, 127, 126, 122, 127, 128, 123, 119, 120, 115, 126, 127, 122, 129, 130, 123, 133, 134, 127, 131, 133, 126, 135, 136, 129, 134, 135, 128, 145, 143, 137, 139, 138, 131, 139, 138, 131, 133, 132, 126, 163, 162, 154, 142, 141, 133, 132, 131, 124, 142, 141, 133, 142, 141, 133, 140, 139, 130, 152, 151, 144, 139, 138, 131, 146, 145, 138, 137, 135, 129, 146, 145, 138, 142, 141, 135, 145, 143, 137, 142, 141, 135, 153, 152, 145, 148, 147, 141, 148, 147, 141, 144, 142, 136, 146, 145, 138, 149, 148, 142, 146, 145, 138, 151, 149, 143, 151, 149, 143, 152, 151, 144, 148, 147, 141, 149, 148, 142, 149, 148, 142, 175, 174, 167, 170, 171, 166, 165, 166, 159, 164, 165, 160, 154, 155, 150, 144, 145, 140, 135, 136, 131, 108, 109, 102, 111, 111, 107, 106, 105, 99, 85, 84, 78, 75, 74, 67, 62, 63, 56, 77, 78, 69, 76, 77, 70, 83, 83, 79, 127, 127, 125, 114, 116, 113, 111, 114, 111, 122, 125, 121, 120, 122, 121, 110, 113, 110, 116, 119, 116, 118, 118, 116, 64, 65, 60, 51, 53, 43, 113, 115, 103, 123, 125, 115, 133, 135, 123, 125, 127, 114, 119, 121, 109, 129, 131, 119, 129, 131, 117, 128, 130, 118, 133, 135, 123, 134, 137, 120, 133, 135, 121, 130, 133, 116, 136, 139, 122, 136, 139, 122, 138, 138, 122, 144, 144, 128, 165, 165, 147, 33, 33, 17, 168, 168, 152, 160, 161, 142, 159, 160, 139, 158, 159, 138, 144, 144, 126, 153, 152, 134, 150, 149, 128, 159, 155, 135, 146, 141, 125, 153, 149, 129, 162, 158, 139, 157, 153, 136, 157, 155, 138, 149, 147, 129, 161, 157, 140, 167, 166, 148, 174, 172, 157, 135, 132, 121, 38, 34, 29, 104, 102, 98, 120, 119, 115, 122, 123, 118, 122, 123, 118, 116, 114, 110, 108, 106, 102, 109, 111, 103, 59, 58, 45, 155, 156, 140, 160, 160, 144, 155, 155, 142, 160, 160, 144, 165, 165, 149, 168, 168, 152, 166, 166, 150, 189, 189, 175, 187, 187, 173, 189, 191, 177, 191, 193, 179, 188, 188, 174, 187, 190, 175, 192, 194, 182, 156, 158, 146, 70, 72, 58, 73, 72, 63, 128, 127, 123, 129, 130, 125, 148, 147, 139, 156, 156, 145, 145, 144, 133, 155, 152, 139, 146, 144, 131, 158, 156, 143, 156, 153, 140, 167, 162, 150, 167, 161, 152, 165, 160, 149, 162, 157, 145, 170, 165, 155, 168, 163, 151, 172, 167, 156, 175, 170, 158, 162, 157, 145, 137, 134, 114, 135, 131, 114, 126, 119, 106, 137, 133, 119, 146, 139, 126, 130, 126, 112, 137, 132, 121, 150, 147, 134, 147, 145, 134, 144, 141, 131, 155, 152, 141, 130, 127, 117, 152, 149, 139, 143, 140, 128, 135, 132, 121, 156, 153, 142, 139, 136, 126, 139, 136, 126, 136, 133, 125, 144, 141, 133, 134, 133, 127, 142, 141, 135, 128, 127, 121, 132, 131, 124, 114, 113, 107, 67, 66, 55, 66, 65, 52, 83, 83, 71, 206, 206, 193, 233, 232, 226, 237, 235, 231, 237, 235, 229, 234, 233, 229, 236, 234, 230, 236, 234, 230, 234, 233, 229, 201, 199, 195, 238, 236, 232, 232, 230, 228, 234, 232, 233, 234, 232, 231, 236, 234, 232, 230, 228, 226, 241, 236, 235, 242, 241, 237, 212, 208, 202, 0, 0, 0, 110, 107, 97, 88, 85, 77, 74, 71, 65, 117, 113, 110, 117, 115, 111, 116, 114, 110, 117, 113, 110, 127, 126, 120, 114, 113, 107, 117, 114, 108, 123, 119, 114, 125, 122, 116, 115, 111, 106, 116, 112, 107, 125, 122, 116, 121, 117, 114, 114, 113, 109, 121, 120, 116, 128, 127, 123, 126, 125, 121, 113, 112, 108, 135, 134, 130, 114, 115, 110, 126, 127, 122, 127, 128, 123, 128, 129, 124, 130, 131, 126, 126, 127, 122, 123, 124, 119, 133, 131, 128, 151, 149, 145, 125, 123, 119, 133, 131, 128, 134, 133, 129, 133, 131, 128, 138, 136, 130, 133, 132, 126, 138, 136, 130, 135, 134, 128, 135, 134, 128, 139, 138, 131, 134, 133, 127, 138, 136, 130, 142, 141, 133, 144, 141, 133, 153, 152, 143, 206, 200, 193, 153, 150, 142, 152, 151, 142, 146, 143, 137, 146, 143, 135, 145, 144, 135, 150, 146, 141, 152, 151, 142, 148, 147, 141, 153, 152, 145, 147, 146, 139, 145, 143, 137, 151, 149, 143, 149, 148, 142, 155, 154, 148, 158, 156, 150, 151, 149, 143, 155, 156, 149, 146, 145, 138, 148, 147, 141, 154, 153, 147, 154, 153, 147, 146, 145, 138, 147, 146, 139, 146, 145, 138, 144, 142, 136, 146, 145, 138, 146, 144, 140, 145, 143, 139, 146, 144, 140, 157, 158, 151, 145, 146, 141, 147, 147, 143, 148, 149, 144, 157, 158, 153, 150, 151, 146, 143, 144, 139, 151, 152, 147, 150, 151, 148, 155, 155, 155, 156, 158, 159, 150, 152, 151, 144, 146, 145, 151, 153, 155, 158, 160, 161, 143, 145, 146, 135, 140, 138, 139, 144, 142, 142, 144, 143, 143, 145, 142, 137, 140, 136, 83, 86, 83, 88, 91, 86, 81, 84, 79, 82, 85, 80, 68, 71, 66, 66, 69, 62, 55, 56, 51, 55, 59, 53, 59, 60, 55, 60, 62, 54, 59, 61, 51, 66, 65, 54, 52, 54, 42, 49, 51, 39, 62, 62, 48, 49, 49, 36, 77, 77, 64, 28, 28, 15, 53, 53, 37, 42, 43, 27, 48, 49, 30, 49, 50, 32, 58, 56, 39, 44, 42, 25, 54, 53, 35, 52, 50, 33, 58, 54, 37, 54, 49, 33, 66, 62, 48, 84, 79, 65, 70, 66, 49, 39, 37, 22, 67, 65, 48, 60, 59, 41, 70, 67, 52, 132, 131, 122, 130, 128, 124, 131, 132, 129, 131, 132, 129, 136, 137, 132, 134, 135, 130, 141, 142, 137, 140, 140, 136, 137, 138, 133, 138, 140, 132, 125, 124, 115, 78, 78, 65, 57, 58, 42, 50, 51, 35, 42, 43, 27, 46, 44, 31, 47, 47, 33, 50, 50, 37, 48, 48, 34, 63, 63, 50, 62, 62, 50, 43, 43, 30, 68, 67, 56, 71, 71, 60, 99, 99, 88, 105, 104, 96, 131, 129, 125, 147, 145, 141, 128, 128, 119, 163, 162, 154, 149, 149, 138, 152, 149, 139, 151, 148, 138, 152, 150, 137, 153, 151, 138, 157, 154, 142, 169, 164, 152, 164, 161, 149, 166, 163, 153, 159, 153, 144, 155, 150, 140, 165, 160, 151, 165, 160, 151, 177, 170, 159, 175, 168, 157, 127, 125, 107, 117, 115, 100, 131, 129, 116, 127, 122, 110, 125, 120, 108, 112, 107, 97, 138, 135, 127, 137, 134, 126, 156, 153, 145, 117, 114, 104, 144, 141, 131, 136, 133, 123, 145, 142, 132, 131, 128, 118, 159, 156, 146, 130, 127, 117, 125, 123, 112, 133, 132, 124, 140, 139, 130, 123, 122, 113, 132, 131, 122, 148, 147, 139, 130, 128, 122, 134, 133, 127, 91, 90, 84, 64, 64, 55, 83, 83, 71, 189, 189, 175, 250, 248, 235, 241, 241, 228, 237, 236, 225, 238, 235, 225, 236, 235, 224, 237, 236, 225, 244, 242, 229, 201, 199, 186, 208, 206, 193, 228, 225, 213, 238, 235, 225, 234, 234, 225, 236, 233, 225, 234, 234, 225, 236, 235, 226, 239, 238, 227, 250, 247, 239, 255, 255, 250, 136, 133, 123, 0, 0, 0, 124, 121, 113, 112, 111, 105, 97, 95, 91, 120, 119, 115, 114, 113, 109, 119, 118, 114, 130, 128, 122, 118, 117, 110, 117, 116, 109, 124, 123, 116, 122, 118, 113, 119, 118, 112, 120, 116, 112, 128, 124, 121, 118, 114, 111, 128, 126, 125, 126, 124, 123, 117, 115, 113, 127, 125, 124, 113, 111, 110, 116, 117, 114, 119, 119, 117, 124, 127, 124, 143, 143, 141, 127, 127, 125, 120, 120, 118, 121, 121, 119, 129, 130, 125, 124, 122, 120, 123, 121, 119, 127, 126, 122, 123, 121, 117, 130, 128, 124, 138, 136, 132, 130, 128, 124, 137, 135, 129, 147, 146, 139, 137, 135, 129, 140, 139, 132, 137, 135, 129, 142, 141, 135, 147, 146, 137, 141, 140, 132, 144, 141, 133, 143, 140, 132, 145, 142, 134, 149, 145, 137, 152, 149, 141, 152, 149, 141, 147, 146, 139, 144, 142, 136, 147, 146, 139, 146, 145, 138, 149, 145, 137, 149, 148, 142, 151, 149, 143, 156, 155, 149, 147, 146, 139, 146, 145, 138, 147, 146, 139, 149, 148, 142, 153, 152, 145, 144, 142, 136, 151, 149, 143, 151, 149, 143, 142, 141, 135, 146, 145, 138, 154, 153, 147, 145, 143, 137, 144, 142, 136, 141, 140, 134, 144, 142, 136, 139, 137, 133, 145, 143, 139, 145, 146, 141, 143, 144, 137, 131, 129, 125, 138, 136, 132, 136, 137, 132, 137, 135, 129, 125, 124, 117, 140, 139, 132, 139, 138, 131, 141, 140, 134, 141, 140, 134, 137, 135, 131, 136, 137, 132, 144, 145, 142, 142, 142, 140, 131, 132, 129, 136, 136, 134, 144, 145, 142, 144, 145, 142, 152, 155, 152, 145, 148, 145, 151, 154, 151, 155, 157, 154, 159, 161, 161, 157, 159, 158, 157, 159, 158, 151, 153, 153, 144, 146, 145, 151, 153, 153, 142, 142, 144, 147, 146, 149, 138, 138, 138, 125, 125, 123, 108, 109, 102, 135, 136, 129, 101, 102, 95, 112, 113, 106, 110, 109, 100, 99, 100, 91, 105, 106, 97, 111, 110, 101, 93, 95, 83, 120, 120, 107, 104, 104, 88, 134, 134, 119, 131, 129, 116, 106, 103, 90, 126, 121, 109, 112, 107, 95, 130, 126, 114, 114, 109, 97, 103, 98, 86, 96, 91, 77, 99, 96, 85, 112, 107, 93, 141, 138, 123, 91, 88, 75, 83, 82, 73, 87, 85, 79, 73, 74, 69, 87, 85, 81, 77, 76, 72, 74, 75, 70, 68, 66, 62, 83, 83, 79, 73, 74, 67, 95, 93, 87, 98, 97, 91, 81, 80, 71, 137, 134, 126, 114, 111, 103, 147, 146, 137, 118, 117, 106, 100, 100, 87, 109, 109, 95, 98, 98, 86, 109, 108, 97, 129, 131, 119, 97, 96, 87, 103, 102, 93, 84, 83, 75, 137, 135, 129, 153, 152, 145, 142, 141, 135, 140, 139, 132, 146, 145, 136, 146, 145, 136, 148, 148, 136, 146, 143, 133, 156, 153, 140, 150, 147, 136, 151, 148, 136, 157, 154, 142, 161, 159, 148, 165, 162, 152, 164, 161, 153, 167, 164, 156, 158, 155, 147, 166, 163, 155, 167, 161, 152, 168, 162, 153, 167, 162, 150, 133, 134, 115, 108, 107, 94, 112, 112, 100, 120, 116, 108, 139, 138, 129, 123, 122, 113, 114, 114, 105, 107, 104, 96, 113, 109, 101, 129, 126, 118, 134, 134, 123, 140, 140, 128, 151, 148, 138, 147, 145, 134, 142, 139, 128, 145, 142, 132, 137, 134, 124, 156, 153, 145, 134, 130, 122, 126, 125, 117, 133, 132, 124, 126, 125, 117, 138, 136, 130, 130, 128, 122, 135, 134, 130, 124, 123, 116, 170, 169, 161, 211, 211, 197, 255, 255, 255, 252, 252, 236, 246, 246, 230, 240, 240, 224, 243, 241, 226, 243, 241, 224, 236, 236, 218, 229, 227, 210, 238, 237, 219, 205, 203, 185, 238, 237, 219, 236, 234, 219, 238, 236, 221, 236, 234, 219, 237, 235, 220, 245, 243, 228, 255, 255, 248, 219, 216, 201, 138, 136, 123, 0, 0, 0, 166, 164, 158, 123, 121, 117, 131, 129, 125, 112, 111, 107, 121, 120, 116, 121, 120, 114, 119, 118, 112, 126, 125, 119, 118, 117, 110, 120, 119, 113, 122, 118, 113, 127, 126, 120, 122, 117, 117, 118, 115, 116, 114, 112, 113, 124, 121, 124, 126, 125, 128, 123, 123, 125, 122, 121, 126, 113, 112, 117, 117, 118, 125, 124, 125, 132, 144, 145, 151, 127, 128, 132, 133, 135, 137, 123, 124, 121, 123, 124, 119, 125, 123, 119, 124, 122, 118, 128, 127, 123, 125, 123, 119, 128, 127, 123, 128, 128, 126, 125, 125, 123, 126, 127, 122, 137, 138, 135, 142, 142, 140, 142, 143, 138, 135, 135, 133, 136, 136, 134, 137, 138, 133, 140, 140, 140, 132, 130, 126, 141, 140, 134, 148, 147, 139, 146, 143, 133, 131, 130, 121, 149, 148, 140, 145, 142, 134, 144, 142, 136, 148, 147, 141, 142, 141, 135, 151, 149, 143, 147, 146, 139, 154, 153, 147, 152, 151, 144, 162, 163, 156, 152, 151, 144, 152, 151, 144, 154, 155, 148, 153, 153, 149, 151, 152, 147, 145, 147, 139, 147, 147, 143, 153, 151, 147, 148, 147, 141, 143, 144, 137, 148, 147, 141, 135, 134, 128, 137, 135, 129, 147, 146, 139, 151, 149, 145, 144, 142, 138, 144, 142, 138, 142, 141, 137, 149, 148, 144, 142, 143, 138, 139, 137, 133, 137, 138, 133, 135, 134, 130, 135, 134, 128, 140, 139, 132, 144, 142, 136, 140, 139, 132, 142, 141, 135, 135, 134, 128, 133, 132, 126, 133, 131, 128, 139, 137, 133, 132, 131, 124, 142, 141, 137, 139, 137, 133, 145, 143, 139, 138, 139, 134, 142, 143, 138, 127, 128, 123, 133, 136, 133, 131, 134, 131, 130, 133, 129, 127, 129, 126, 142, 144, 141, 125, 128, 125, 133, 136, 133, 135, 137, 134, 125, 125, 125, 122, 122, 122, 124, 126, 126, 127, 127, 125, 125, 125, 123, 133, 133, 131, 118, 118, 116, 116, 117, 114, 122, 123, 118, 121, 121, 119, 116, 117, 114, 125, 123, 119, 138, 139, 134, 118, 117, 108, 120, 120, 109, 136, 133, 123, 128, 125, 114, 129, 126, 118, 130, 127, 119, 127, 123, 115, 128, 128, 119, 127, 126, 118, 125, 124, 117, 122, 118, 113, 138, 135, 127, 134, 130, 124, 127, 126, 118, 123, 125, 117, 130, 131, 128, 127, 127, 125, 122, 123, 120, 122, 123, 118, 127, 126, 122, 133, 131, 128, 133, 133, 129, 137, 138, 133, 131, 132, 128, 131, 132, 128, 141, 140, 136, 134, 135, 130, 133, 133, 129, 129, 130, 125, 134, 135, 130, 130, 128, 122, 137, 139, 131, 119, 120, 115, 131, 132, 128, 129, 130, 125, 129, 130, 125, 135, 136, 131, 129, 130, 125, 133, 134, 127, 143, 144, 137, 135, 134, 128, 138, 136, 130, 151, 150, 141, 140, 139, 130, 151, 148, 138, 150, 147, 136, 157, 154, 144, 146, 144, 131, 166, 163, 153, 158, 155, 147, 163, 157, 150, 157, 153, 148, 163, 162, 156, 156, 155, 149, 161, 160, 154, 147, 146, 139, 165, 163, 157, 163, 160, 149, 166, 163, 153, 111, 109, 98, 128, 128, 119, 125, 125, 121, 125, 126, 119, 121, 122, 115, 120, 119, 113, 112, 113, 106, 124, 123, 116, 120, 119, 113, 99, 98, 92, 62, 62, 58, 135, 134, 128, 142, 138, 130, 158, 155, 147, 139, 136, 128, 143, 140, 132, 135, 131, 124, 137, 134, 126, 139, 136, 128, 128, 125, 117, 133, 132, 124, 126, 125, 117, 125, 124, 117, 124, 123, 116, 141, 140, 136, 160, 158, 156, 141, 140, 136, 182, 182, 168, 255, 255, 255, 255, 255, 248, 253, 253, 237, 253, 253, 237, 251, 251, 233, 251, 251, 233, 251, 251, 233, 249, 250, 232, 252, 251, 233, 254, 252, 234, 254, 252, 234, 250, 248, 231, 250, 248, 231, 248, 246, 226, 250, 249, 229, 255, 255, 249, 255, 255, 250, 194, 193, 173, 125, 123, 108, 93, 90, 82, 138, 139, 134, 127, 126, 122, 124, 122, 118, 119, 118, 114, 114, 113, 109, 121, 120, 114, 114, 113, 107, 116, 114, 108, 156, 155, 149, 115, 111, 106, 127, 126, 120, 114, 113, 107, 131, 129, 127, 125, 124, 127, 126, 125, 130, 121, 118, 124, 121, 120, 125, 120, 121, 125, 121, 121, 130, 119, 122, 130, 116, 118, 129, 117, 120, 128, 120, 120, 131, 118, 121, 131, 123, 126, 134, 85, 85, 83, 123, 121, 117, 123, 121, 117, 123, 121, 117, 128, 127, 123, 125, 123, 119, 90, 88, 84, 136, 138, 139, 129, 130, 134, 124, 126, 130, 125, 127, 131, 121, 122, 126, 116, 119, 125, 123, 126, 132, 119, 123, 128, 116, 120, 123, 121, 125, 130, 113, 117, 120, 122, 124, 123, 111, 114, 109, 149, 148, 140, 141, 137, 129, 147, 144, 136, 140, 139, 130, 155, 151, 145, 149, 148, 142, 115, 116, 113, 116, 116, 116, 122, 122, 122, 118, 118, 118, 131, 131, 131, 130, 132, 131, 141, 141, 141, 151, 151, 151, 143, 143, 141, 131, 133, 133, 149, 149, 147, 134, 135, 130, 156, 156, 154, 156, 155, 151, 141, 140, 134, 146, 145, 138, 149, 148, 142, 152, 150, 146, 148, 146, 145, 136, 137, 132, 135, 135, 133, 147, 147, 145, 162, 162, 160, 146, 144, 142, 144, 145, 142, 138, 139, 136, 142, 140, 139, 140, 140, 138, 140, 140, 136, 138, 140, 132, 138, 136, 130, 135, 134, 128, 138, 136, 130, 142, 138, 130, 137, 135, 129, 127, 126, 120, 134, 133, 129, 135, 134, 130, 142, 143, 138, 142, 140, 139, 131, 132, 129, 137, 134, 133, 140, 140, 138, 142, 144, 141, 135, 137, 134, 127, 129, 126, 124, 127, 124, 118, 121, 118, 127, 129, 126, 128, 128, 126, 126, 126, 124, 128, 128, 126, 131, 132, 129, 123, 123, 123, 132, 135, 134, 122, 124, 123, 124, 126, 128, 125, 128, 127, 125, 128, 127, 115, 117, 118, 121, 121, 121, 114, 116, 117, 107, 109, 110, 99, 100, 102, 109, 109, 109, 107, 107, 107, 121, 122, 117, 118, 117, 110, 121, 118, 110, 132, 129, 123, 118, 115, 107, 131, 128, 122, 95, 93, 87, 113, 114, 109, 97, 97, 93, 104, 104, 102, 115, 116, 113, 111, 109, 107, 112, 110, 109, 114, 114, 114, 104, 104, 104, 102, 102, 102, 102, 102, 102, 104, 104, 104, 111, 111, 109, 128, 127, 123, 130, 128, 122, 132, 131, 124, 135, 134, 128, 136, 137, 132, 128, 129, 124, 96, 99, 96, 114, 116, 113, 110, 112, 112, 111, 114, 111, 106, 108, 107, 108, 110, 109, 102, 104, 104, 100, 102, 101, 96, 98, 98, 96, 98, 98, 104, 107, 106, 101, 103, 102, 102, 105, 102, 136, 137, 132, 128, 129, 122, 138, 136, 130, 135, 135, 126, 148, 147, 139, 142, 141, 133, 155, 152, 141, 159, 156, 146, 156, 153, 142, 153, 151, 140, 156, 153, 142, 165, 161, 156, 66, 64, 60, 134, 135, 130, 116, 117, 112, 126, 127, 122, 132, 130, 126, 130, 131, 126, 124, 122, 118, 133, 133, 129, 127, 126, 122, 114, 115, 108, 118, 119, 112, 123, 121, 115, 163, 162, 156, 129, 130, 123, 111, 112, 105, 126, 127, 120, 120, 121, 114, 122, 123, 116, 108, 109, 102, 58, 59, 54, 64, 65, 60, 79, 75, 70, 61, 58, 52, 15, 12, 4, 78, 74, 66, 147, 144, 136, 135, 131, 124, 134, 130, 122, 146, 145, 138, 134, 133, 127, 134, 133, 127, 130, 132, 124, 134, 135, 130, 127, 128, 123, 149, 149, 147, 162, 162, 162, 0, 0, 0, 182, 182, 168, 237, 237, 221, 222, 222, 206, 224, 224, 208, 218, 219, 200, 219, 220, 201, 223, 221, 206, 212, 210, 192, 213, 211, 193, 210, 209, 191, 203, 202, 184, 209, 204, 188, 209, 208, 188, 205, 203, 183, 205, 201, 182, 185, 184, 163, 198, 196, 176, 117, 116, 96, 75, 74, 63, 159, 157, 153, 133, 133, 129, 125, 125, 121, 118, 118, 114, 123, 121, 117, 119, 118, 114, 120, 119, 115, 112, 111, 105, 118, 117, 110, 119, 118, 112, 120, 119, 111, 130, 128, 122, 97, 96, 89, 130, 127, 130, 127, 126, 131, 126, 125, 132, 187, 186, 193, 200, 199, 204, 207, 208, 212, 136, 137, 143, 127, 128, 134, 114, 118, 124, 120, 124, 131, 121, 121, 130, 120, 124, 131, 118, 121, 129, 65, 67, 64, 16, 14, 8, 0, 0, 0, 0, 0, 0, 40, 39, 32, 84, 85, 78, 92, 93, 88, 131, 133, 137, 137, 138, 144, 142, 142, 151, 133, 135, 141, 126, 130, 135, 126, 129, 137, 120, 125, 132, 119, 124, 131, 113, 118, 126, 119, 125, 129, 113, 118, 126, 113, 118, 126, 116, 122, 129, 157, 159, 149, 151, 150, 141, 146, 145, 136, 151, 150, 141, 90, 89, 82, 120, 121, 114, 123, 125, 125, 122, 123, 127, 127, 128, 134, 125, 127, 133, 127, 128, 134, 123, 126, 134, 125, 128, 136, 133, 136, 144, 131, 134, 142, 120, 124, 131, 121, 125, 132, 124, 127, 135, 109, 111, 113, 145, 143, 141, 160, 158, 154, 159, 158, 151, 146, 145, 138, 85, 84, 80, 107, 107, 109, 120, 119, 124, 128, 129, 135, 132, 133, 140, 132, 134, 138, 134, 133, 140, 131, 131, 135, 130, 130, 134, 125, 127, 131, 122, 123, 127, 117, 119, 123, 119, 123, 124, 151, 153, 143, 140, 139, 132, 138, 136, 130, 140, 139, 132, 126, 125, 119, 99, 98, 94, 116, 117, 114, 113, 112, 115, 119, 119, 119, 121, 121, 123, 121, 121, 123, 122, 122, 124, 116, 118, 120, 115, 117, 118, 118, 120, 122, 110, 112, 114, 109, 111, 115, 89, 92, 89, 126, 127, 122, 116, 119, 114, 123, 124, 119, 126, 127, 122, 122, 123, 120, 51, 52, 49, 123, 125, 125, 118, 121, 120, 133, 135, 137, 120, 121, 123, 137, 139, 140, 121, 123, 124, 127, 126, 131, 115, 116, 120, 114, 115, 119, 109, 110, 117, 106, 107, 111, 113, 113, 113, 91, 92, 87, 127, 126, 118, 124, 123, 114, 119, 118, 110, 78, 77, 71, 59, 57, 51, 84, 85, 80, 115, 116, 111, 115, 116, 113, 116, 119, 116, 112, 112, 110, 115, 116, 113, 112, 112, 110, 106, 108, 107, 102, 104, 104, 107, 109, 108, 109, 109, 111, 101, 103, 104, 102, 103, 100, 124, 123, 116, 119, 118, 112, 118, 117, 110, 130, 128, 122, 62, 61, 54, 55, 56, 51, 87, 90, 86, 116, 119, 116, 100, 105, 104, 116, 120, 119, 109, 113, 112, 117, 121, 120, 106, 111, 109, 98, 105, 102, 96, 101, 99, 106, 111, 109, 99, 104, 102, 93, 100, 98, 100, 102, 99, 133, 133, 129, 125, 126, 119, 132, 131, 122, 127, 126, 118, 139, 138, 129, 146, 143, 135, 151, 148, 140, 156, 153, 145, 156, 156, 145, 165, 164, 155, 54, 53, 44, 53, 54, 49, 48, 52, 46, 130, 131, 126, 120, 121, 116, 218, 218, 214, 231, 230, 223, 226, 225, 219, 255, 252, 247, 218, 217, 210, 196, 195, 188, 120, 120, 118, 126, 127, 120, 134, 133, 127, 215, 213, 207, 183, 184, 177, 197, 196, 190, 184, 183, 177, 118, 119, 112, 115, 116, 109, 113, 114, 107, 63, 64, 57, 64, 65, 60, 54, 52, 48, 75, 71, 68, 82, 79, 73, 0, 0, 0, 0, 0, 0, 132, 129, 121, 135, 131, 124, 138, 136, 130, 137, 135, 129, 146, 145, 138, 138, 136, 132, 136, 137, 132, 129, 131, 128, 138, 139, 136, 190, 190, 190, 200, 201, 198, 112, 111, 103, 109, 109, 93, 167, 168, 147, 100, 101, 83, 91, 94, 75, 62, 65, 46, 91, 92, 74, 82, 82, 66, 74, 72, 57, 79, 77, 60, 94, 92, 75, 81, 79, 64, 71, 69, 53, 104, 103, 85, 70, 66, 49, 85, 83, 65, 118, 118, 104, 149, 150, 143, 158, 159, 154, 134, 135, 130, 113, 114, 109, 120, 121, 116, 119, 118, 114, 119, 118, 114, 118, 118, 114, 119, 120, 115, 111, 110, 103, 117, 116, 109, 104, 105, 98, 0, 0, 0, 88, 90, 82, 71, 72, 67, 121, 121, 123, 125, 124, 128, 129, 128, 133, 244, 244, 248, 161, 160, 165, 238, 240, 244, 180, 182, 184, 215, 216, 220, 203, 204, 211, 204, 208, 213, 107, 107, 116, 113, 116, 124, 112, 115, 123, 102, 104, 104, 23, 21, 17, 0, 0, 0, 0, 0, 0, 57, 58, 51, 63, 64, 57, 58, 59, 52, 131, 133, 135, 136, 137, 141, 133, 135, 139, 208, 209, 213, 255, 255, 255, 216, 220, 223, 192, 196, 201, 192, 196, 199, 253, 255, 255, 133, 137, 142, 109, 113, 116, 115, 121, 126, 113, 117, 122, 32, 36, 28, 0, 0, 0, 11, 10, 1, 88, 87, 78, 80, 81, 74, 70, 71, 66, 64, 66, 65, 113, 114, 118, 132, 133, 140, 119, 123, 128, 208, 211, 217, 164, 168, 174, 140, 143, 151, 127, 130, 138, 171, 175, 183, 134, 137, 145, 120, 123, 133, 126, 129, 137, 94, 95, 101, 138, 139, 134, 135, 136, 131, 135, 136, 131, 63, 64, 59, 76, 76, 72, 84, 84, 82, 123, 125, 128, 124, 125, 132, 138, 139, 146, 138, 140, 144, 131, 132, 139, 137, 138, 144, 125, 128, 134, 131, 134, 140, 140, 144, 149, 124, 127, 133, 120, 126, 130, 140, 144, 138, 165, 164, 155, 154, 153, 145, 110, 107, 99, 71, 71, 62, 55, 53, 49, 129, 129, 129, 124, 126, 130, 122, 121, 126, 128, 129, 131, 135, 136, 140, 127, 128, 132, 125, 129, 132, 123, 125, 128, 131, 133, 137, 121, 122, 128, 118, 119, 126, 93, 95, 92, 144, 145, 138, 143, 144, 137, 112, 113, 106, 116, 118, 110, 59, 60, 55, 55, 55, 53, 125, 128, 127, 125, 128, 127, 122, 124, 123, 116, 118, 120, 179, 181, 182, 137, 139, 140, 125, 127, 129, 128, 129, 131, 135, 136, 138, 110, 112, 116, 104, 106, 110, 104, 106, 108, 112, 112, 110, 90, 89, 80, 23, 21, 15, 43, 42, 36, 54, 53, 46, 64, 63, 59, 59, 60, 55, 122, 123, 118, 116, 117, 112, 106, 107, 102, 229, 230, 227, 209, 210, 207, 191, 191, 189, 194, 195, 192, 171, 173, 170, 193, 193, 193, 108, 108, 110, 102, 104, 104, 101, 104, 100, 129, 130, 123, 0, 0, 0, 42, 41, 35, 63, 64, 57, 51, 52, 45, 60, 61, 56, 60, 63, 60, 114, 114, 112, 160, 163, 160, 172, 175, 171, 144, 147, 143, 110, 113, 110, 118, 121, 118, 203, 206, 203, 158, 161, 158, 200, 202, 199, 202, 205, 202, 97, 100, 97, 109, 112, 109, 81, 82, 77, 107, 108, 101, 129, 130, 123, 134, 133, 125, 138, 136, 130, 132, 131, 122, 136, 133, 125, 21, 18, 7, 42, 41, 33, 56, 57, 48, 50, 51, 44, 48, 52, 44, 41, 47, 40, 110, 113, 108, 123, 125, 117, 131, 132, 128, 133, 132, 126, 148, 147, 141, 138, 136, 130, 169, 168, 162, 121, 121, 112, 121, 122, 115, 144, 145, 140, 124, 123, 116, 110, 109, 102, 135, 136, 129, 118, 119, 110, 112, 113, 104, 113, 114, 107, 116, 120, 112, 58, 59, 52, 44, 45, 38, 57, 56, 52, 53, 51, 47, 54, 52, 48, 74, 72, 66, 66, 64, 58, 0, 0, 0, 0, 0, 0, 131, 130, 121, 146, 145, 138, 137, 139, 131, 139, 137, 133, 133, 133, 129, 140, 140, 136, 130, 133, 129, 136, 138, 135, 135, 137, 134, 132, 135, 132, 137, 138, 135, 145, 146, 141, 138, 139, 134, 139, 137, 133, 130, 131, 128, 125, 125, 121, 119, 120, 115, 116, 117, 114, 115, 115, 115, 125, 125, 125, 135, 135, 137, 142, 140, 141, 151, 148, 149, 151, 148, 149, 146, 143, 144, 130, 130, 130, 126, 126, 124, 122, 123, 120, 122, 123, 118, 123, 124, 119, 122, 123, 118, 125, 125, 121, 116, 117, 112, 122, 123, 118, 118, 116, 112, 130, 128, 124, 121, 120, 114, 98, 99, 92, 0, 0, 0, 96, 95, 84, 82, 81, 72, 83, 84, 77, 133, 133, 131, 125, 124, 128, 123, 123, 127, 130, 130, 134, 120, 121, 125, 117, 119, 123, 124, 125, 132, 116, 117, 124, 120, 121, 127, 125, 128, 134, 128, 132, 139, 118, 121, 131, 129, 133, 141, 131, 133, 135, 30, 28, 24, 0, 0, 0, 18, 17, 8, 36, 36, 27, 53, 55, 46, 44, 45, 38, 75, 78, 77, 132, 134, 138, 133, 134, 143, 128, 129, 135, 138, 139, 146, 136, 137, 143, 140, 144, 149, 166, 169, 175, 128, 132, 137, 237, 240, 246, 119, 123, 128, 117, 123, 130, 112, 116, 121, 36, 39, 33, 0, 0, 0, 60, 62, 54, 56, 55, 47, 76, 75, 68, 67, 69, 62, 43, 43, 41, 131, 133, 137, 118, 119, 126, 118, 119, 126, 187, 189, 193, 118, 119, 126, 214, 218, 223, 191, 195, 200, 175, 179, 184, 124, 127, 133, 128, 132, 139, 123, 126, 134, 128, 132, 139, 0, 0, 0, 69, 69, 65, 66, 67, 62, 64, 65, 60, 64, 65, 60, 50, 50, 50, 130, 130, 134, 124, 126, 130, 128, 129, 133, 191, 195, 198, 187, 189, 193, 225, 227, 231, 212, 216, 219, 224, 228, 231, 244, 248, 251, 123, 127, 130, 113, 117, 120, 114, 119, 118, 0, 0, 0, 58, 60, 50, 54, 53, 44, 59, 58, 49, 48, 49, 44, 135, 135, 133, 127, 127, 129, 125, 125, 125, 171, 171, 173, 192, 192, 192, 223, 223, 225, 223, 225, 227, 209, 211, 213, 195, 197, 197, 127, 128, 130, 111, 113, 115, 115, 117, 116, 115, 118, 112, 0, 0, 0, 38, 40, 32, 45, 47, 39, 42, 43, 36, 43, 44, 39, 95, 97, 97, 127, 129, 128, 125, 128, 127, 124, 126, 126, 210, 213, 212, 216, 218, 218, 160, 163, 162, 203, 206, 205, 196, 198, 200, 97, 99, 101, 100, 102, 103, 102, 104, 106, 89, 92, 91, 108, 110, 100, 0, 0, 0, 22, 23, 16, 33, 34, 27, 41, 42, 37, 22, 23, 18, 120, 121, 116, 114, 117, 111, 116, 117, 114, 144, 145, 142, 95, 96, 93, 104, 107, 104, 141, 141, 139, 111, 114, 111, 115, 117, 114, 99, 101, 100, 96, 98, 98, 102, 105, 102, 74, 76, 68, 0, 0, 0, 9, 10, 5, 29, 30, 25, 33, 34, 27, 22, 25, 19, 42, 44, 41, 110, 113, 110, 113, 115, 112, 99, 101, 98, 104, 107, 104, 103, 108, 102, 106, 111, 107, 99, 104, 100, 91, 97, 90, 99, 101, 98, 87, 90, 86, 92, 97, 95, 114, 116, 113, 106, 107, 102, 19, 19, 15, 130, 129, 120, 137, 135, 129, 152, 151, 144, 141, 140, 132, 68, 67, 58, 0, 0, 0, 41, 42, 33, 38, 41, 34, 44, 47, 39, 36, 39, 31, 39, 43, 35, 58, 61, 55, 124, 127, 122, 125, 125, 121, 168, 167, 160, 206, 206, 197, 245, 244, 235, 226, 225, 217, 247, 246, 238, 125, 129, 119, 116, 118, 108, 118, 119, 110, 140, 139, 132, 169, 170, 163, 118, 119, 110, 108, 110, 100, 114, 116, 106, 74, 78, 70, 51, 52, 45, 62, 63, 56, 57, 58, 53, 50, 51, 46, 58, 59, 54, 60, 62, 54, 57, 58, 53, 0, 0, 0, 0, 0, 0, 137, 136, 127, 131, 133, 126, 134, 135, 128, 130, 132, 124, 135, 136, 131, 137, 138, 133, 133, 136, 133, 135, 135, 133, 138, 141, 138, 131, 134, 131, 130, 133, 129, 133, 133, 131, 131, 131, 131, 128, 128, 126, 132, 135, 134, 127, 127, 129, 124, 126, 128, 127, 128, 132, 127, 127, 129, 125, 127, 129, 127, 126, 131, 131, 131, 133, 133, 130, 133, 130, 130, 132, 133, 132, 135, 128, 130, 129, 131, 132, 129, 131, 132, 129, 129, 130, 125, 122, 123, 120, 121, 122, 117, 129, 130, 125, 120, 121, 116, 125, 125, 121, 136, 137, 132, 132, 131, 124, 145, 143, 137, 0, 0, 0, 0, 0, 0, 85, 85, 74, 90, 90, 78, 83, 83, 71, 121, 121, 110, 121, 121, 119, 119, 116, 120, 123, 123, 127, 119, 118, 125, 117, 119, 123, 119, 118, 125, 127, 128, 134, 122, 123, 129, 121, 121, 130, 118, 119, 128, 116, 118, 129, 111, 114, 122, 124, 126, 130, 17, 18, 15, 55, 54, 48, 0, 0, 0, 30, 29, 20, 13, 12, 4, 42, 43, 36, 48, 48, 48, 138, 138, 140, 137, 137, 141, 141, 140, 147, 130, 131, 137, 129, 130, 134, 121, 122, 126, 123, 124, 130, 116, 119, 125, 112, 116, 121, 118, 122, 127, 117, 120, 126, 119, 123, 126, 99, 101, 100, 33, 37, 29, 0, 2, 0, 43, 43, 34, 55, 56, 47, 76, 77, 70, 34, 35, 30, 132, 134, 138, 124, 126, 130, 133, 135, 141, 125, 127, 133, 131, 132, 139, 123, 126, 132, 124, 127, 135, 127, 130, 138, 125, 128, 136, 134, 137, 147, 126, 129, 137, 124, 125, 134, 104, 107, 106, 22, 22, 20, 41, 42, 37, 29, 30, 25, 49, 49, 47, 34, 34, 34, 138, 140, 142, 128, 129, 135, 125, 127, 133, 127, 128, 134, 136, 140, 144, 139, 143, 146, 128, 132, 137, 120, 124, 127, 121, 125, 128, 117, 120, 126, 117, 120, 126, 136, 138, 139, 29, 30, 23, 17, 21, 13, 12, 13, 6, 28, 29, 22, 52, 53, 48, 120, 120, 118, 131, 131, 133, 128, 128, 130, 125, 124, 127, 130, 130, 132, 182, 181, 184, 123, 125, 127, 112, 116, 117, 117, 119, 121, 118, 120, 124, 120, 121, 125, 116, 118, 120, 83, 87, 79, 7, 10, 2, 12, 13, 6, 30, 31, 24, 41, 42, 35, 24, 25, 20, 74, 77, 74, 130, 132, 131, 122, 124, 123, 117, 119, 119, 120, 122, 121, 121, 123, 122, 109, 111, 113, 107, 112, 113, 108, 109, 113, 100, 101, 105, 100, 101, 105, 97, 99, 103, 109, 112, 109, 103, 102, 93, 0, 0, 0, 6, 7, 2, 21, 22, 17, 21, 24, 18, 22, 23, 18, 116, 119, 114, 111, 114, 109, 114, 117, 111, 115, 117, 114, 108, 111, 107, 108, 110, 109, 109, 111, 113, 109, 111, 111, 109, 111, 113, 102, 104, 106, 101, 103, 104, 106, 108, 107, 57, 57, 55, 0, 0, 0, 0, 0, 0, 10, 11, 6, 26, 29, 24, 9, 12, 6, 18, 21, 18, 117, 120, 117, 107, 109, 106, 111, 114, 111, 122, 125, 121, 113, 115, 114, 107, 109, 108, 102, 106, 105, 100, 102, 101, 105, 110, 108, 101, 103, 102, 103, 106, 105, 101, 104, 100, 90, 90, 86, 0, 0, 0, 137, 135, 129, 134, 133, 127, 142, 141, 133, 146, 145, 136, 116, 115, 106, 0, 0, 0, 68, 67, 58, 61, 65, 55, 60, 64, 56, 18, 22, 14, 32, 36, 28, 33, 37, 29, 128, 129, 122, 133, 132, 126, 131, 129, 123, 153, 152, 145, 130, 128, 122, 130, 128, 122, 130, 129, 120, 127, 134, 120, 130, 132, 120, 134, 134, 123, 128, 130, 118, 119, 119, 107, 103, 102, 93, 72, 74, 64, 102, 104, 94, 148, 149, 142, 51, 52, 45, 44, 45, 38, 47, 48, 41, 79, 80, 75, 52, 53, 48, 55, 56, 51, 69, 69, 65, 0, 0, 0, 77, 78, 71, 138, 137, 128, 133, 132, 126, 145, 143, 137, 131, 133, 126, 134, 135, 128, 136, 137, 132, 132, 135, 132, 124, 127, 124, 132, 135, 134, 131, 133, 133, 129, 131, 130, 135, 135, 135, 135, 135, 135, 133, 133, 133, 128, 129, 131, 127, 128, 132, 132, 134, 138, 133, 135, 139, 135, 136, 138, 125, 127, 131, 125, 127, 131, 129, 131, 132, 129, 129, 131, 129, 131, 132, 131, 133, 135, 129, 131, 130, 128, 130, 127, 125, 125, 121, 114, 115, 110, 129, 130, 125, 118, 118, 114, 119, 120, 115, 119, 120, 115, 122, 123, 118, 120, 121, 116, 135, 134, 128, 126, 125, 119, 0, 0, 0, 0, 0, 0, 84, 84, 72, 91, 92, 74, 102, 100, 83, 112, 112, 96, 69, 69, 53, 147, 146, 128, 144, 144, 128, 149, 149, 138, 131, 133, 124, 133, 134, 127, 125, 126, 119, 120, 119, 115, 123, 121, 119, 125, 123, 121, 119, 120, 115, 133, 133, 129, 116, 117, 112, 13, 12, 8, 56, 57, 50, 0, 0, 0, 12, 11, 2, 0, 0, 0, 59, 57, 51, 44, 45, 40, 94, 91, 90, 144, 144, 144, 140, 138, 138, 136, 136, 138, 131, 131, 133, 134, 133, 136, 111, 113, 115, 125, 127, 129, 124, 128, 129, 114, 119, 120, 116, 120, 121, 118, 123, 121, 115, 118, 112, 26, 30, 22, 0, 0, 0, 52, 54, 44, 55, 56, 49, 88, 90, 82, 49, 50, 43, 108, 111, 103, 123, 126, 123, 131, 133, 133, 117, 119, 121, 119, 123, 124, 120, 124, 127, 123, 126, 132, 124, 127, 135, 120, 124, 131, 126, 129, 137, 117, 120, 128, 126, 130, 133, 68, 71, 70, 0, 0, 0, 55, 53, 51, 21, 21, 19, 59, 60, 57, 37, 37, 37, 52, 52, 52, 122, 124, 125, 117, 119, 123, 128, 132, 135, 134, 138, 141, 135, 139, 142, 132, 135, 141, 128, 132, 135, 129, 133, 137, 126, 130, 133, 119, 123, 126, 117, 122, 118, 53, 57, 49, 14, 17, 9, 0, 0, 0, 44, 45, 38, 30, 31, 28, 94, 95, 90, 130, 131, 128, 123, 125, 125, 129, 129, 129, 128, 128, 130, 133, 132, 135, 127, 128, 132, 127, 128, 132, 124, 126, 128, 124, 126, 130, 121, 122, 126, 120, 122, 121, 47, 51, 43, 71, 72, 65, 0, 0, 0, 0, 0, 0, 42, 43, 36, 36, 39, 31, 22, 25, 19, 116, 119, 116, 122, 125, 121, 121, 123, 120, 116, 118, 118, 129, 131, 130, 114, 116, 115, 111, 113, 115, 109, 111, 113, 100, 101, 105, 99, 100, 102, 100, 102, 101, 110, 113, 110, 80, 81, 74, 0, 0, 0, 0, 0, 0, 17, 19, 11, 12, 12, 8, 12, 12, 8, 106, 107, 102, 120, 121, 116, 113, 114, 109, 112, 112, 110, 111, 111, 109, 106, 106, 104, 100, 102, 99, 111, 111, 111, 109, 110, 107, 103, 106, 105, 104, 104, 104, 97, 100, 97, 51, 52, 49, 1, 2, 0, 0, 0, 0, 5, 9, 3, 35, 38, 32, 14, 17, 11, 0, 0, 0, 118, 121, 116, 125, 128, 123, 117, 120, 115, 113, 116, 110, 109, 112, 107, 111, 114, 109, 97, 100, 97, 103, 106, 103, 103, 106, 103, 108, 111, 107, 95, 98, 95, 86, 89, 83, 66, 67, 60, 3, 5, 0, 121, 121, 112, 141, 140, 132, 146, 145, 136, 148, 147, 139, 119, 118, 110, 0, 0, 0, 30, 32, 22, 46, 50, 42, 35, 38, 28, 22, 25, 17, 25, 29, 21, 40, 44, 36, 138, 136, 130, 148, 147, 141, 135, 134, 128, 134, 133, 127, 134, 133, 125, 130, 129, 120, 130, 129, 120, 78, 75, 64, 71, 71, 60, 78, 78, 67, 74, 76, 64, 78, 80, 68, 74, 76, 64, 107, 109, 99, 87, 89, 79, 93, 95, 85, 81, 83, 73, 55, 56, 49, 58, 59, 52, 57, 58, 51, 52, 53, 48, 38, 41, 34, 0, 0, 0, 0, 0, 0, 130, 129, 120, 134, 135, 128, 130, 132, 124, 135, 136, 129, 125, 124, 117, 137, 138, 133, 131, 132, 128, 130, 131, 128, 137, 139, 138, 121, 123, 122, 131, 133, 133, 124, 126, 126, 124, 126, 126, 132, 135, 134, 131, 133, 133, 129, 131, 132, 125, 127, 129, 124, 126, 128, 130, 131, 135, 123, 125, 127, 137, 139, 140, 125, 127, 129, 129, 131, 132, 133, 132, 135, 125, 127, 129, 129, 131, 132, 128, 129, 131, 123, 125, 125, 118, 121, 118, 121, 121, 119, 123, 124, 121, 127, 127, 125, 133, 133, 129, 130, 131, 126, 120, 121, 116, 124, 122, 118, 125, 126, 119, 133, 132, 126, 0, 0, 0, 40, 39, 28, 59, 58, 47, 82, 81, 70, 88, 85, 75, 43, 43, 30, 77, 77, 64, 53, 50, 40, 53, 52, 41, 49, 49, 38, 53, 52, 41, 60, 59, 48, 43, 43, 30, 59, 58, 45, 63, 60, 49, 74, 73, 62, 50, 49, 43, 57, 57, 48, 67, 66, 57, 61, 60, 51, 45, 44, 35, 0, 0, 0, 60, 59, 50, 5, 4, 0, 54, 53, 44, 132, 131, 122, 62, 61, 52, 73, 72, 63, 73, 71, 65, 69, 68, 62, 70, 69, 63, 60, 62, 54, 52, 54, 46, 57, 58, 51, 48, 52, 44, 61, 65, 57, 61, 65, 57, 67, 71, 63, 73, 76, 68, 7, 10, 2, 0, 0, 0, 52, 54, 44, 37, 39, 29, 79, 81, 71, 56, 57, 48, 59, 61, 51, 72, 74, 64, 65, 66, 59, 76, 80, 72, 92, 93, 86, 81, 85, 77, 102, 106, 98, 118, 118, 114, 108, 109, 104, 111, 111, 109, 101, 102, 97, 83, 83, 81, 10, 11, 8, 0, 0, 0, 27, 27, 25, 27, 28, 23, 45, 46, 41, 45, 46, 41, 95, 96, 91, 88, 91, 88, 76, 79, 76, 100, 102, 101, 115, 117, 116, 138, 140, 140, 135, 137, 136, 163, 165, 162, 138, 140, 140, 136, 138, 135, 128, 131, 125, 132, 135, 130, 38, 40, 32, 42, 43, 36, 0, 0, 0, 13, 14, 7, 37, 38, 31, 105, 106, 99, 138, 140, 132, 150, 151, 144, 137, 139, 131, 137, 139, 131, 140, 139, 132, 140, 138, 134, 133, 131, 128, 123, 121, 117, 125, 126, 119, 104, 105, 98, 114, 116, 106, 40, 41, 32, 95, 97, 89, 0, 0, 0, 7, 10, 2, 43, 44, 37, 23, 24, 17, 51, 52, 45, 74, 76, 68, 116, 115, 106, 121, 121, 112, 125, 126, 117, 120, 119, 111, 114, 114, 103, 105, 106, 97, 98, 99, 92, 83, 87, 81, 71, 74, 66, 64, 65, 58, 73, 74, 67, 55, 54, 46, 0, 0, 0, 0, 1, 0, 8, 9, 2, 17, 19, 11, 37, 38, 31, 58, 59, 52, 63, 64, 57, 62, 63, 56, 59, 61, 53, 49, 50, 45, 53, 54, 49, 52, 54, 46, 49, 50, 43, 51, 52, 45, 52, 54, 46, 63, 64, 57, 55, 56, 51, 56, 57, 52, 48, 49, 44, 0, 0, 0, 0, 0, 0, 30, 33, 27, 11, 14, 9, 110, 114, 106, 45, 48, 41, 52, 55, 48, 52, 55, 48, 45, 49, 39, 48, 52, 44, 55, 56, 47, 42, 45, 37, 53, 56, 51, 51, 54, 48, 53, 56, 51, 61, 65, 57, 59, 63, 52, 40, 44, 34, 35, 36, 27, 126, 125, 117, 144, 143, 134, 140, 139, 130, 145, 144, 135, 131, 130, 121, 0, 0, 0, 0, 2, 0, 48, 52, 42, 23, 25, 15, 28, 31, 23, 15, 17, 7, 102, 104, 94, 77, 76, 68, 88, 85, 77, 83, 84, 75, 90, 89, 80, 85, 85, 76, 92, 92, 81, 77, 77, 65, 75, 73, 60, 89, 87, 74, 72, 69, 59, 70, 70, 59, 74, 76, 64, 72, 74, 62, 64, 66, 54, 88, 90, 78, 65, 67, 55, 98, 99, 90, 48, 49, 42, 53, 57, 49, 70, 71, 64, 59, 62, 54, 31, 35, 27, 0, 0, 0, 94, 96, 88, 116, 118, 110, 131, 133, 126, 123, 125, 117, 140, 141, 134, 143, 144, 137, 133, 133, 131, 136, 136, 134, 128, 130, 127, 122, 125, 121, 131, 133, 133, 133, 136, 135, 136, 138, 137, 139, 141, 141, 138, 140, 140, 137, 139, 140, 131, 133, 135, 127, 128, 130, 129, 131, 132, 135, 136, 138, 136, 138, 139, 137, 139, 140, 125, 127, 129, 127, 128, 132, 137, 137, 139, 128, 129, 131, 125, 127, 129, 157, 157, 159, 120, 120, 120, 119, 119, 117, 127, 127, 125, 123, 124, 121, 128, 128, 126, 125, 125, 123, 133, 133, 129, 120, 121, 116, 123, 124, 119, 119, 120, 115, 127, 126, 120, 0, 0, 0, 0, 0, 0, 83, 83, 71, 86, 83, 70, 155, 152, 141, 53, 50, 40, 50, 47, 34, 54, 52, 41, 61, 60, 49, 43, 43, 32, 52, 51, 40, 53, 53, 39, 234, 231, 218, 162, 162, 149, 143, 140, 128, 54, 53, 42, 66, 65, 56, 59, 58, 49, 56, 55, 47, 62, 61, 52, 6, 5, 0, 0, 0, 0, 38, 37, 28, 47, 46, 37, 42, 42, 31, 81, 80, 69, 66, 65, 56, 56, 55, 47, 73, 72, 63, 63, 62, 54, 57, 57, 48, 123, 122, 113, 109, 111, 101, 109, 111, 101, 71, 73, 63, 68, 72, 62, 53, 57, 47, 74, 78, 68, 69, 73, 65, 17, 21, 13, 0, 0, 0, 56, 57, 48, 42, 43, 34, 59, 61, 51, 131, 133, 124, 78, 80, 70, 72, 74, 64, 78, 80, 70, 77, 78, 69, 72, 74, 64, 88, 90, 80, 81, 83, 73, 90, 91, 82, 68, 72, 64, 73, 76, 68, 67, 69, 62, 70, 71, 66, 64, 65, 60, 49, 50, 45, 30, 31, 26, 27, 28, 23, 53, 55, 48, 47, 48, 41, 106, 107, 102, 77, 78, 73, 65, 68, 62, 50, 53, 47, 66, 69, 64, 66, 69, 64, 68, 71, 66, 67, 71, 63, 61, 64, 59, 59, 62, 54, 64, 67, 59, 64, 65, 60, 50, 51, 44, 12, 13, 6, 9, 10, 3, 6, 7, 0, 2, 4, 0, 131, 130, 121, 78, 80, 70, 81, 80, 71, 70, 71, 62, 77, 76, 68, 84, 83, 75, 83, 83, 71, 87, 86, 75, 82, 81, 70, 84, 84, 72, 81, 83, 71, 62, 64, 52, 81, 83, 71, 58, 60, 50, 0, 0, 0, 3, 7, 0, 31, 33, 23, 8, 10, 0, 122, 124, 114, 65, 67, 55, 76, 77, 68, 82, 81, 70, 75, 74, 63, 84, 84, 72, 88, 87, 76, 78, 78, 67, 71, 73, 61, 73, 75, 65, 69, 70, 61, 66, 68, 58, 67, 69, 62, 48, 49, 40, 17, 19, 11, 0, 0, 0, 0, 0, 0, 7, 8, 1, 91, 92, 85, 66, 67, 60, 63, 62, 54, 63, 62, 54, 61, 60, 51, 96, 95, 86, 62, 61, 52, 67, 66, 57, 77, 79, 67, 58, 60, 50, 65, 67, 57, 64, 66, 56, 58, 60, 50, 74, 76, 68, 66, 67, 62, 0, 0, 0, 7, 10, 4, 9, 12, 4, 30, 33, 27, 35, 38, 32, 55, 59, 51, 55, 59, 51, 55, 59, 51, 57, 61, 50, 57, 61, 50, 56, 57, 48, 57, 59, 49, 57, 61, 50, 59, 61, 51, 54, 58, 48, 59, 63, 52, 46, 50, 40, 60, 62, 52, 88, 87, 78, 102, 101, 92, 146, 145, 136, 149, 148, 140, 147, 148, 139, 141, 140, 132, 70, 69, 61, 0, 0, 0, 22, 26, 15, 35, 34, 26, 34, 33, 25, 128, 128, 119, 67, 64, 54, 77, 76, 68, 87, 86, 77, 78, 78, 69, 75, 74, 63, 80, 79, 70, 78, 78, 67, 81, 80, 69, 88, 81, 69, 69, 62, 51, 138, 135, 125, 255, 255, 243, 66, 69, 54, 72, 74, 62, 76, 76, 64, 74, 76, 64, 67, 69, 57, 109, 111, 101, 144, 146, 136, 59, 61, 51, 60, 64, 56, 38, 40, 32, 17, 19, 11, 40, 41, 34, 111, 112, 105, 122, 123, 116, 131, 133, 126, 141, 142, 135, 141, 142, 135, 133, 133, 129, 132, 135, 132, 130, 131, 128, 141, 141, 139, 133, 136, 133, 121, 123, 122, 132, 135, 134, 129, 131, 130, 132, 135, 134, 139, 141, 141, 137, 139, 140, 130, 132, 133, 135, 136, 138, 135, 136, 138, 127, 128, 130, 135, 136, 140, 125, 127, 129, 124, 126, 128, 125, 127, 131, 131, 131, 133, 128, 129, 131, 129, 131, 132, 127, 128, 130, 126, 125, 128, 129, 129, 129, 119, 119, 119, 126, 126, 126, 127, 127, 125, 139, 137, 135, 128, 126, 125, 128, 126, 125, 124, 122, 118, 120, 119, 115, 140, 139, 132, 13, 12, 4, 0, 0, 0, 59, 56, 46, 93, 90, 78, 32, 30, 17, 43, 40, 27, 50, 47, 36, 74, 68, 59, 57, 54, 43, 54, 52, 41, 53, 50, 40, 56, 53, 40, 217, 215, 202, 255, 255, 245, 253, 252, 241, 64, 64, 53, 62, 61, 52, 66, 65, 56, 67, 66, 57, 54, 53, 44, 48, 48, 36, 42, 42, 31, 10, 9, 0, 73, 72, 61, 94, 93, 82, 59, 58, 49, 71, 71, 62, 62, 61, 52, 55, 54, 46, 63, 62, 54, 62, 61, 52, 202, 204, 194, 60, 62, 52, 223, 225, 216, 65, 69, 58, 61, 65, 55, 60, 64, 56, 68, 72, 64, 60, 64, 56, 60, 64, 56, 1, 3, 0, 37, 39, 29, 44, 46, 36, 38, 40, 30, 81, 83, 73, 70, 71, 62, 70, 71, 64, 66, 68, 58, 69, 70, 61, 72, 74, 64, 249, 251, 241, 176, 177, 168, 255, 255, 254, 65, 69, 58, 66, 69, 62, 62, 66, 56, 76, 80, 70, 69, 70, 63, 60, 62, 54, 9, 10, 3, 30, 31, 24, 38, 40, 32, 118, 119, 112, 76, 77, 70, 65, 66, 61, 65, 68, 62, 62, 62, 58, 62, 65, 60, 46, 50, 42, 210, 214, 206, 206, 210, 199, 106, 109, 99, 54, 58, 50, 64, 65, 58, 62, 63, 56, 67, 69, 62, 51, 52, 45, 3, 5, 0, 21, 22, 15, 77, 76, 68, 82, 81, 72, 75, 74, 65, 76, 76, 64, 71, 71, 60, 83, 83, 69, 226, 226, 213, 167, 167, 153, 178, 178, 165, 240, 240, 226, 66, 69, 54, 74, 76, 64, 73, 75, 63, 79, 81, 69, 64, 66, 54, 0, 0, 0, 0, 0, 0, 23, 27, 16, 44, 48, 37, 69, 70, 61, 76, 77, 68, 74, 73, 64, 78, 78, 67, 72, 74, 62, 239, 238, 227, 170, 170, 159, 159, 158, 147, 148, 148, 136, 62, 63, 54, 63, 64, 55, 73, 74, 67, 69, 70, 63, 72, 74, 64, 66, 67, 60, 0, 0, 0, 0, 0, 0, 69, 70, 61, 52, 54, 44, 53, 55, 46, 59, 58, 49, 70, 69, 61, 41, 40, 32, 189, 188, 179, 131, 130, 121, 71, 71, 62, 158, 157, 146, 64, 64, 55, 50, 52, 42, 50, 51, 44, 55, 56, 47, 70, 71, 64, 77, 78, 73, 0, 0, 0, 17, 20, 15, 14, 17, 9, 107, 110, 104, 47, 50, 45, 48, 52, 44, 48, 52, 44, 61, 65, 57, 44, 47, 39, 57, 61, 50, 37, 41, 30, 59, 62, 54, 55, 59, 49, 55, 59, 49, 55, 56, 47, 74, 78, 68, 59, 61, 51, 59, 61, 51, 76, 77, 68, 33, 32, 23, 135, 135, 126, 131, 130, 119, 148, 149, 140, 183, 182, 174, 87, 86, 77, 0, 0, 0, 20, 21, 12, 21, 20, 12, 53, 50, 40, 60, 59, 48, 80, 79, 68, 76, 75, 66, 67, 69, 57, 66, 68, 56, 74, 73, 62, 73, 72, 61, 65, 67, 55, 73, 75, 63, 91, 86, 72, 61, 59, 44, 166, 164, 151, 102, 99, 89, 77, 74, 63, 71, 71, 60, 81, 80, 69, 70, 70, 59, 76, 77, 68, 66, 68, 56, 83, 85, 72, 94, 98, 87, 59, 61, 51, 0, 0, 0, 0, 0, 0, 153, 154, 147, 115, 116, 109, 118, 119, 112, 130, 132, 124, 131, 133, 126, 130, 133, 128, 138, 139, 134, 137, 138, 135, 141, 141, 139, 130, 133, 129, 136, 138, 135, 137, 140, 136, 131, 133, 133, 136, 138, 137, 135, 137, 136, 132, 135, 134, 136, 138, 139, 128, 129, 131, 137, 139, 140, 132, 134, 136, 153, 155, 157, 125, 127, 129, 127, 128, 130, 125, 127, 129, 125, 127, 129, 131, 133, 135, 130, 132, 133, 129, 131, 132, 123, 125, 127, 123, 123, 125, 126, 126, 126, 130, 130, 130, 120, 120, 120, 128, 128, 128, 126, 124, 123, 118, 116, 114, 123, 121, 119, 144, 142, 138, 131, 129, 125, 151, 153, 145, 41, 42, 33, 0, 0, 0, 91, 91, 80, 162, 162, 151, 58, 55, 45, 64, 61, 50, 37, 34, 24, 56, 53, 42, 58, 55, 45, 54, 52, 41, 67, 64, 54, 52, 51, 40, 49, 49, 36, 48, 50, 36, 58, 60, 48, 55, 55, 43, 44, 46, 34, 60, 62, 52, 60, 59, 50, 56, 55, 47, 67, 66, 57, 33, 32, 23, 0, 0, 0, 47, 46, 35, 113, 113, 102, 61, 58, 50, 57, 57, 48, 61, 60, 51, 61, 60, 51, 56, 55, 47, 53, 52, 43, 85, 87, 77, 64, 66, 56, 50, 54, 43, 61, 65, 55, 51, 54, 46, 58, 61, 53, 55, 59, 51, 29, 33, 22, 29, 32, 24, 8, 9, 2, 0, 0, 0, 38, 40, 30, 133, 134, 125, 58, 59, 52, 65, 67, 57, 66, 68, 58, 67, 69, 60, 80, 82, 72, 58, 60, 50, 229, 231, 221, 232, 233, 226, 248, 249, 242, 75, 79, 71, 72, 76, 65, 69, 70, 61, 63, 64, 57, 63, 64, 57, 53, 55, 48, 0, 0, 0, 28, 29, 22, 10, 12, 4, 92, 93, 86, 45, 47, 39, 57, 58, 53, 55, 59, 51, 56, 57, 50, 51, 54, 46, 51, 55, 44, 157, 161, 150, 220, 223, 213, 96, 100, 90, 79, 82, 74, 52, 55, 48, 60, 64, 56, 51, 52, 45, 59, 61, 51, 5, 6, 0, 31, 33, 23, 118, 117, 108, 61, 60, 51, 67, 66, 55, 82, 81, 70, 70, 69, 61, 77, 77, 65, 234, 234, 221, 69, 69, 55, 222, 221, 208, 232, 232, 218, 66, 68, 56, 81, 83, 71, 76, 77, 68, 70, 71, 62, 80, 82, 72, 50, 52, 42, 0, 0, 0, 14, 17, 7, 111, 115, 105, 69, 70, 61, 59, 61, 51, 55, 57, 45, 61, 60, 49, 54, 53, 42, 174, 173, 162, 71, 71, 60, 64, 64, 53, 61, 60, 49, 69, 68, 60, 63, 64, 55, 86, 88, 78, 71, 73, 63, 76, 77, 68, 69, 70, 61, 0, 0, 0, 0, 0, 0, 64, 66, 56, 61, 60, 51, 54, 53, 44, 55, 54, 46, 61, 60, 51, 73, 72, 63, 35, 34, 26, 94, 93, 82, 124, 124, 110, 41, 41, 29, 50, 52, 42, 41, 42, 33, 47, 48, 41, 40, 41, 32, 44, 45, 38, 64, 65, 58, 0, 0, 0, 0, 2, 0, 118, 124, 117, 37, 40, 32, 52, 55, 49, 58, 61, 53, 51, 54, 46, 52, 55, 48, 50, 54, 43, 36, 40, 29, 62, 66, 56, 59, 63, 52, 55, 59, 49, 47, 51, 41, 65, 67, 57, 57, 61, 50, 62, 63, 56, 67, 69, 60, 71, 73, 63, 11, 10, 1, 125, 124, 115, 133, 132, 124, 144, 146, 136, 147, 146, 137, 117, 116, 107, 0, 0, 0, 37, 39, 29, 33, 30, 22, 72, 69, 59, 77, 74, 63, 63, 60, 49, 73, 72, 61, 68, 67, 58, 66, 65, 54, 66, 65, 54, 73, 72, 61, 75, 74, 63, 64, 64, 53, 97, 97, 83, 67, 67, 53, 97, 96, 85, 68, 68, 54, 71, 71, 58, 67, 69, 57, 80, 82, 70, 58, 60, 48, 58, 62, 49, 72, 76, 65, 64, 68, 55, 91, 92, 83, 31, 35, 25, 0, 0, 0, 72, 75, 67, 81, 83, 73, 122, 123, 116, 136, 137, 130, 120, 121, 114, 129, 132, 124, 139, 142, 137, 131, 134, 129, 140, 143, 140, 127, 129, 126, 128, 130, 127, 139, 142, 139, 125, 128, 125, 131, 133, 133, 132, 135, 134, 138, 140, 140, 129, 129, 129, 133, 132, 135, 141, 140, 143, 141, 140, 143, 136, 136, 138, 135, 135, 137, 133, 132, 135, 134, 133, 138, 129, 131, 132, 128, 129, 131, 129, 131, 132, 128, 129, 131, 131, 133, 135, 125, 127, 129, 127, 128, 130, 125, 127, 129, 129, 129, 129, 130, 130, 130, 126, 126, 126, 126, 124, 125, 134, 132, 131, 131, 132, 129, 131, 129, 125, 130, 128, 124, 126, 125, 121, 65, 66, 59, 0, 0, 0, 75, 74, 65, 51, 53, 41, 60, 59, 48, 41, 41, 29, 40, 40, 26, 46, 44, 31, 65, 62, 52, 49, 46, 35, 80, 77, 67, 64, 64, 51, 81, 81, 67, 57, 59, 45, 55, 57, 45, 67, 69, 60, 57, 59, 49, 42, 43, 34, 47, 48, 39, 54, 53, 44, 53, 55, 43, 37, 39, 29, 0, 0, 0, 85, 87, 77, 43, 45, 33, 53, 52, 41, 57, 57, 48, 52, 51, 42, 50, 49, 43, 50, 51, 44, 45, 43, 37, 52, 54, 44, 38, 40, 30, 51, 55, 44, 55, 59, 49, 50, 54, 43, 57, 60, 52, 25, 29, 19, 42, 45, 37, 46, 50, 42, 48, 52, 44, 0, 0, 0, 50, 52, 42, 79, 81, 71, 67, 69, 60, 55, 56, 47, 55, 56, 47, 77, 78, 69, 50, 52, 42, 53, 55, 46, 64, 66, 56, 72, 74, 64, 73, 75, 65, 70, 71, 64, 78, 81, 71, 60, 64, 54, 58, 59, 52, 65, 66, 59, 47, 51, 43, 0, 0, 0, 27, 28, 21, 104, 105, 98, 53, 55, 48, 52, 54, 46, 50, 51, 46, 51, 52, 45, 60, 64, 56, 54, 58, 50, 51, 54, 46, 46, 50, 42, 48, 52, 42, 54, 58, 50, 62, 66, 58, 68, 72, 64, 44, 47, 39, 55, 59, 51, 66, 68, 58, 13, 14, 5, 122, 124, 114, 70, 71, 62, 80, 79, 70, 64, 64, 55, 61, 60, 49, 94, 93, 82, 69, 69, 57, 63, 63, 50, 59, 58, 45, 67, 67, 53, 64, 66, 52, 71, 73, 61, 77, 79, 67, 72, 74, 62, 77, 79, 67, 87, 89, 77, 63, 64, 57, 0, 0, 0, 10, 12, 2, 50, 54, 43, 62, 63, 54, 59, 61, 51, 63, 64, 55, 74, 76, 66, 57, 57, 48, 62, 62, 50, 62, 62, 50, 52, 51, 42, 61, 60, 49, 86, 88, 78, 55, 56, 47, 66, 68, 58, 60, 62, 52, 45, 47, 37, 64, 66, 56, 0, 0, 0, 108, 110, 100, 48, 47, 39, 50, 50, 41, 61, 60, 51, 56, 55, 47, 45, 44, 35, 55, 54, 46, 47, 48, 39, 54, 53, 44, 44, 46, 36, 36, 40, 27, 50, 52, 42, 51, 52, 45, 74, 76, 66, 67, 69, 60, 69, 70, 61, 58, 61, 53, 0, 0, 0, 38, 41, 34, 59, 64, 56, 44, 47, 39, 45, 48, 41, 39, 43, 33, 39, 43, 35, 40, 44, 36, 54, 58, 48, 51, 55, 44, 46, 50, 40, 44, 46, 36, 42, 45, 35, 55, 56, 47, 54, 58, 50, 57, 61, 50, 56, 57, 50, 63, 64, 55, 72, 74, 64, 0, 0, 0, 119, 118, 110, 129, 131, 121, 121, 123, 113, 146, 145, 136, 149, 148, 140, 3, 2, 0, 18, 17, 8, 117, 114, 104, 63, 59, 51, 70, 67, 56, 66, 63, 53, 68, 66, 55, 50, 50, 41, 67, 66, 57, 54, 53, 44, 64, 64, 55, 69, 69, 57, 64, 64, 53, 10, 14, 6, 35, 36, 27, 0, 0, 0, 2, 4, 0, 7, 9, 0, 3, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 74, 76, 66, 93, 96, 88, 114, 117, 109, 122, 123, 116, 130, 133, 126, 130, 133, 128, 133, 139, 132, 133, 137, 131, 132, 135, 130, 131, 134, 131, 137, 140, 134, 132, 135, 132, 133, 136, 133, 144, 147, 143, 132, 135, 134, 135, 137, 136, 135, 135, 137, 126, 125, 128, 140, 139, 142, 137, 137, 139, 128, 128, 130, 143, 143, 145, 137, 137, 139, 131, 131, 133, 129, 129, 131, 131, 133, 135, 129, 131, 132, 125, 127, 129, 125, 127, 129, 130, 130, 132, 139, 141, 143, 121, 123, 124, 128, 129, 131, 128, 130, 129, 128, 128, 128, 128, 128, 128, 121, 121, 121, 127, 125, 124, 126, 126, 124, 131, 129, 125, 132, 130, 126, 130, 131, 126, 140, 140, 136, 0, 0, 0, 8, 9, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 3, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 16, 9, 12, 13, 6, 9, 11, 1, 10, 12, 2, 20, 21, 14, 15, 16, 9, 30, 31, 24, 16, 19, 11, 10, 14, 6, 26, 30, 22, 52, 55, 48, 45, 48, 41, 33, 37, 29, 26, 30, 22, 28, 31, 23, 33, 37, 29, 23, 26, 18, 0, 0, 0, 47, 51, 43, 31, 35, 27, 46, 49, 44, 51, 54, 46, 42, 45, 37, 43, 46, 38, 51, 54, 46, 53, 57, 49, 60, 64, 56, 61, 65, 57, 67, 71, 61, 53, 57, 49, 58, 60, 50, 76, 80, 70, 65, 69, 58, 42, 45, 35, 65, 66, 59, 6, 7, 0, 0, 0, 0, 35, 36, 29, 55, 56, 49, 50, 51, 44, 64, 65, 58, 58, 61, 53, 61, 65, 57, 59, 63, 52, 61, 65, 57, 55, 59, 49, 72, 76, 65, 62, 66, 56, 76, 80, 70, 45, 49, 39, 51, 54, 46, 59, 63, 52, 65, 69, 58, 12, 13, 4, 85, 87, 77, 55, 56, 47, 64, 64, 55, 52, 51, 42, 63, 62, 54, 75, 74, 65, 66, 65, 56, 53, 55, 43, 56, 58, 46, 56, 58, 46, 57, 61, 48, 58, 60, 48, 66, 68, 58, 53, 55, 46, 53, 55, 46, 62, 63, 54, 52, 55, 48, 0, 0, 0, 82, 86, 76, 45, 48, 41, 45, 48, 41, 35, 38, 30, 26, 30, 20, 45, 47, 39, 31, 29, 23, 48, 47, 39, 35, 34, 26, 16, 15, 6, 20, 21, 12, 17, 19, 9, 20, 19, 11, 27, 28, 19, 13, 12, 4, 6, 7, 0, 0, 0, 0, 0, 0, 0, 16, 18, 8, 24, 26, 16, 10, 12, 2, 9, 11, 1, 12, 11, 4, 0, 0, 0, 0, 0, 0, 4, 3, 0, 0, 0, 0, 90, 69, 60, 3, 0, 0, 39, 36, 28, 2, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 7, 0, 1, 4, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 8, 11, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 105, 96, 135, 137, 127, 122, 124, 114, 132, 131, 122, 152, 151, 142, 59, 58, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 125, 130, 115, 113, 112, 104, 125, 128, 123, 112, 120, 110, 111, 117, 108, 118, 119, 110, 124, 123, 114, 127, 126, 120, 112, 111, 105, 108, 106, 100, 130, 128, 122, 113, 114, 105, 113, 112, 104, 108, 106, 100, 114, 115, 108, 127, 128, 121, 127, 130, 122, 136, 139, 131, 129, 132, 126, 127, 133, 126, 131, 136, 130, 126, 132, 125, 132, 135, 130, 129, 131, 128, 135, 137, 134, 130, 133, 129, 132, 135, 132, 134, 134, 134, 127, 127, 127, 123, 123, 123, 130, 130, 130, 141, 141, 141, 129, 129, 131, 127, 127, 129, 140, 139, 142, 134, 133, 136, 135, 135, 137, 134, 133, 136, 136, 136, 138, 128, 129, 131, 177, 176, 179, 125, 124, 127, 136, 138, 139, 128, 128, 130, 118, 120, 122, 132, 134, 136, 126, 125, 128, 126, 126, 126, 126, 126, 126, 120, 120, 120, 137, 134, 135, 127, 127, 127, 129, 129, 129, 125, 123, 121, 119, 119, 117, 144, 147, 143, 122, 125, 121, 133, 136, 135, 107, 107, 107, 92, 93, 88, 89, 87, 83, 103, 102, 93, 106, 105, 99, 92, 91, 85, 111, 110, 103, 109, 107, 101, 135, 134, 130, 102, 100, 94, 115, 116, 109, 112, 113, 108, 108, 109, 102, 86, 87, 82, 108, 109, 102, 111, 109, 105, 124, 122, 118, 119, 119, 117, 136, 136, 134, 131, 133, 135, 121, 123, 124, 107, 108, 105, 102, 105, 100, 96, 100, 92, 97, 98, 91, 81, 82, 77, 85, 86, 81, 67, 70, 65, 66, 69, 64, 67, 70, 65, 55, 56, 51, 56, 57, 52, 39, 42, 37, 47, 48, 41, 38, 41, 34, 23, 26, 18, 40, 44, 36, 52, 55, 49, 0, 1, 0, 9, 12, 8, 8, 10, 7, 29, 34, 33, 3, 6, 3, 0, 0, 0, 21, 26, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 0, 0, 1, 0, 9, 12, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 1, 0, 16, 17, 10, 8, 11, 3, 0, 3, 0, 5, 9, 3, 14, 17, 9, 0, 0, 0, 2, 5, 0, 0, 0, 0, 5, 9, 1, 0, 2, 0, 7, 10, 4, 7, 10, 4, 29, 32, 26, 17, 18, 13, 26, 29, 24, 17, 20, 15, 19, 23, 15, 27, 28, 23, 33, 33, 29, 50, 51, 44, 66, 68, 58, 51, 53, 43, 52, 54, 44, 66, 68, 58, 65, 67, 57, 98, 99, 92, 100, 101, 94, 95, 96, 91, 97, 97, 93, 113, 114, 107, 94, 96, 88, 81, 83, 73, 78, 79, 72, 77, 78, 69, 94, 92, 86, 95, 96, 91, 92, 92, 90, 90, 90, 88, 71, 71, 69, 69, 74, 72, 86, 86, 86, 83, 83, 83, 81, 81, 81, 92, 92, 92, 101, 103, 102, 103, 107, 108, 116, 120, 121, 109, 116, 118, 96, 103, 103, 72, 79, 79, 77, 84, 81, 77, 82, 80, 85, 90, 88, 76, 81, 79, 78, 83, 81, 98, 103, 101, 95, 100, 96, 93, 98, 97, 76, 81, 77, 76, 81, 77, 93, 95, 92, 80, 82, 81, 109, 111, 113, 128, 128, 126, 129, 129, 127, 142, 143, 136, 140, 144, 136, 140, 141, 134, 136, 137, 130, 129, 131, 121, 137, 136, 127, 138, 137, 128, 140, 139, 130, 132, 131, 122, 121, 118, 110, 152, 149, 139, 125, 123, 110, 124, 122, 109, 137, 132, 121, 134, 131, 116, 115, 110, 98, 127, 124, 113, 131, 128, 118, 131, 128, 120, 123, 120, 112, 97, 97, 93, 121, 122, 117, 93, 99, 95, 88, 96, 93, 91, 98, 95, 96, 100, 101, 103, 107, 108, 116, 118, 120, 128, 129, 133, 94, 96, 97, 125, 125, 123, 126, 126, 124, 120, 121, 114, 122, 123, 116, 133, 134, 127, 142, 145, 139, 127, 133, 126, 125, 130, 124, 124, 129, 123, 126, 132, 125, 125, 128, 123, 121, 124, 118, 131, 134, 131, 134, 135, 130, 128, 128, 126, 135, 135, 133, 140, 140, 140, 134, 134, 134, 127, 127, 127, 134, 133, 136, 126, 125, 128, 128, 128, 128, 131, 131, 133, 176, 175, 178, 135, 135, 137, 131, 131, 133, 127, 127, 129, 125, 124, 127, 129, 129, 131, 138, 140, 142, 139, 141, 143, 124, 126, 128, 135, 135, 137, 117, 119, 121, 117, 119, 123, 123, 125, 127, 119, 118, 121, 125, 124, 127, 120, 119, 122, 135, 135, 137, 127, 127, 129, 121, 121, 123, 129, 129, 131, 121, 121, 121, 118, 117, 120, 130, 132, 133, 121, 122, 126, 124, 126, 130, 118, 120, 124, 111, 113, 117, 109, 111, 115, 109, 111, 115, 120, 119, 124, 118, 117, 122, 114, 115, 119, 108, 109, 113, 107, 108, 112, 119, 118, 123, 107, 108, 112, 107, 108, 112, 100, 100, 102, 127, 125, 124, 121, 121, 119, 123, 121, 119, 125, 123, 121, 133, 133, 133, 135, 135, 137, 51, 52, 56, 130, 131, 139, 128, 129, 135, 130, 131, 139, 128, 128, 137, 123, 124, 132, 115, 116, 124, 109, 110, 119, 109, 110, 117, 107, 111, 117, 103, 106, 112, 99, 103, 106, 101, 103, 104, 97, 100, 99, 123, 124, 121, 130, 132, 124, 116, 117, 112, 142, 143, 138, 161, 161, 157, 137, 139, 131, 31, 34, 31, 144, 146, 147, 139, 141, 143, 138, 140, 144, 136, 137, 141, 129, 133, 137, 125, 127, 131, 116, 120, 123, 119, 123, 128, 116, 119, 125, 113, 116, 124, 119, 122, 130, 114, 118, 124, 99, 104, 104, 136, 139, 133, 135, 136, 129, 143, 144, 137, 137, 138, 133, 138, 140, 132, 105, 106, 101, 111, 114, 111, 104, 107, 106, 102, 105, 102, 109, 111, 111, 114, 116, 115, 132, 135, 130, 117, 120, 115, 104, 107, 104, 111, 114, 111, 106, 108, 105, 110, 113, 108, 104, 107, 102, 133, 133, 129, 137, 138, 133, 138, 140, 132, 136, 137, 130, 123, 125, 117, 123, 125, 117, 81, 82, 77, 99, 99, 97, 114, 114, 112, 109, 109, 109, 111, 111, 111, 106, 108, 107, 118, 121, 120, 102, 104, 104, 96, 98, 100, 103, 105, 107, 102, 104, 104, 111, 113, 115, 103, 105, 107, 136, 138, 139, 135, 137, 136, 129, 131, 130, 113, 115, 112, 120, 120, 118, 123, 126, 123, 86, 88, 87, 101, 103, 102, 113, 114, 116, 102, 104, 106, 102, 104, 106, 101, 103, 104, 110, 112, 114, 99, 100, 104, 130, 131, 135, 103, 105, 109, 99, 100, 106, 88, 90, 94, 99, 100, 104, 132, 135, 134, 125, 125, 121, 104, 104, 102, 107, 108, 103, 113, 112, 108, 97, 97, 93, 28, 28, 26, 124, 126, 126, 124, 128, 129, 125, 129, 132, 124, 128, 131, 123, 127, 130, 124, 127, 133, 128, 132, 137, 124, 127, 133, 116, 122, 127, 120, 126, 130, 121, 127, 134, 120, 125, 132, 122, 130, 136, 128, 133, 141, 123, 129, 136, 115, 121, 128, 123, 129, 134, 121, 127, 132, 114, 120, 125, 119, 125, 129, 113, 119, 124, 110, 116, 121, 116, 122, 127, 117, 120, 128, 113, 117, 122, 113, 117, 122, 109, 112, 118, 114, 118, 124, 113, 117, 122, 108, 110, 111, 113, 118, 114, 85, 88, 80, 137, 140, 132, 134, 135, 128, 136, 137, 130, 136, 137, 130, 137, 136, 127, 134, 133, 127, 117, 114, 106, 151, 148, 140, 146, 143, 135, 143, 140, 132, 153, 151, 140, 165, 160, 153, 38, 35, 27, 123, 119, 116, 98, 96, 93, 116, 112, 109, 121, 117, 114, 126, 119, 117, 116, 117, 114, 86, 86, 86, 90, 94, 95, 84, 90, 90, 88, 92, 93, 93, 95, 96, 96, 98, 100, 97, 99, 103, 96, 98, 102, 90, 92, 98, 132, 134, 138, 51, 52, 58, 128, 130, 129, 131, 132, 129, 125, 128, 123, 131, 134, 129, 119, 125, 118, 118, 123, 119, 119, 125, 118, 121, 127, 123, 113, 118, 114, 118, 124, 117, 128, 130, 127, 130, 133, 128, 122, 125, 121, 130, 131, 128, 123, 123, 123, 134, 134, 134, 130, 130, 130, 129, 129, 129, 125, 125, 125, 133, 133, 133, 133, 132, 135, 134, 133, 136, 126, 125, 128, 129, 128, 133, 127, 126, 131, 131, 131, 135, 126, 125, 128, 131, 131, 133, 133, 132, 135, 138, 138, 140, 121, 123, 124, 115, 116, 120, 116, 118, 122, 115, 116, 120, 116, 118, 122, 108, 109, 113, 117, 121, 124, 120, 124, 127, 112, 116, 119, 98, 102, 105, 104, 108, 111, 102, 106, 109, 102, 106, 109, 98, 102, 105, 97, 101, 104, 100, 104, 110, 107, 108, 114, 101, 102, 108, 102, 103, 112, 96, 97, 106, 95, 96, 105, 97, 98, 107, 101, 102, 110, 106, 106, 115, 106, 107, 113, 109, 110, 117, 107, 108, 114, 106, 110, 115, 128, 129, 135, 81, 82, 79, 129, 129, 127, 162, 161, 157, 119, 120, 115, 121, 121, 119, 35, 35, 33, 25, 28, 27, 136, 137, 143, 127, 127, 136, 124, 125, 136, 122, 123, 131, 120, 120, 131, 111, 112, 123, 115, 115, 126, 102, 104, 115, 99, 102, 112, 95, 97, 108, 99, 102, 112, 92, 96, 103, 100, 99, 104, 116, 116, 116, 133, 133, 129, 141, 142, 135, 147, 148, 141, 134, 135, 130, 37, 38, 33, 14, 16, 13, 123, 125, 127, 130, 132, 133, 135, 136, 140, 135, 136, 140, 130, 131, 135, 126, 130, 135, 120, 124, 129, 125, 128, 136, 113, 116, 126, 113, 116, 126, 109, 111, 122, 105, 108, 118, 113, 117, 122, 146, 149, 146, 140, 144, 136, 145, 148, 143, 146, 149, 144, 113, 114, 107, 45, 47, 39, 108, 110, 111, 100, 101, 105, 111, 112, 119, 114, 115, 119, 124, 126, 130, 116, 118, 122, 110, 112, 116, 117, 121, 124, 114, 118, 122, 113, 114, 118, 109, 111, 113, 103, 106, 105, 99, 101, 98, 145, 148, 143, 143, 144, 139, 138, 140, 132, 119, 120, 113, 21, 22, 17, 64, 64, 62, 113, 113, 113, 121, 121, 123, 129, 129, 131, 133, 135, 139, 125, 127, 133, 121, 121, 130, 118, 119, 126, 114, 118, 126, 110, 111, 120, 108, 109, 115, 105, 108, 116, 101, 102, 108, 120, 121, 125, 139, 142, 139, 131, 132, 128, 135, 135, 133, 116, 117, 112, 38, 39, 36, 60, 62, 62, 128, 129, 131, 129, 130, 134, 121, 125, 130, 111, 115, 120, 117, 120, 126, 103, 106, 112, 106, 110, 115, 102, 105, 111, 100, 104, 110, 94, 95, 103, 90, 94, 99, 90, 92, 98, 82, 84, 86, 116, 117, 114, 120, 121, 116, 112, 112, 110, 56, 57, 52, 21, 21, 19, 13, 13, 13, 123, 125, 127, 120, 124, 127, 121, 127, 130, 120, 124, 127, 116, 122, 127, 121, 127, 134, 123, 129, 136, 122, 130, 136, 119, 127, 134, 116, 122, 131, 113, 121, 130, 117, 125, 133, 117, 123, 132, 120, 125, 134, 119, 124, 133, 117, 123, 132, 116, 122, 131, 110, 118, 126, 113, 118, 128, 112, 117, 126, 110, 115, 127, 112, 120, 131, 109, 114, 126, 109, 114, 126, 109, 115, 124, 113, 118, 126, 106, 112, 117, 108, 114, 119, 110, 113, 119, 112, 116, 119, 106, 113, 109, 75, 80, 76, 131, 135, 127, 128, 129, 122, 127, 128, 121, 137, 135, 129, 138, 136, 130, 138, 136, 130, 144, 143, 134, 153, 152, 143, 152, 149, 141, 160, 158, 147, 54, 52, 41, 12, 11, 2, 25, 24, 17, 97, 97, 93, 98, 98, 94, 111, 109, 105, 113, 114, 109, 120, 121, 116, 67, 64, 65, 89, 92, 91, 79, 84, 85, 81, 85, 86, 89, 94, 92, 89, 91, 93, 104, 106, 108, 255, 255, 255, 78, 79, 83, 85, 89, 92, 91, 95, 100, 166, 169, 175, 35, 40, 40, 137, 138, 135, 159, 160, 156, 130, 133, 126, 107, 113, 107, 120, 126, 119, 110, 115, 109, 106, 112, 105, 116, 121, 117, 104, 109, 105, 118, 121, 118, 127, 129, 126, 134, 134, 132, 134, 134, 132, 119, 119, 119, 125, 125, 125, 126, 126, 126, 129, 129, 129, 119, 119, 119, 138, 138, 138, 130, 130, 132, 127, 127, 129, 130, 130, 132, 121, 121, 123, 123, 123, 127, 123, 123, 127, 128, 127, 132, 127, 127, 129, 133, 132, 135, 122, 122, 124, 119, 118, 121, 123, 125, 128, 120, 121, 125, 114, 118, 122, 114, 118, 122, 124, 128, 131, 105, 109, 112, 107, 111, 115, 105, 109, 112, 100, 104, 108, 97, 101, 104, 96, 100, 103, 93, 97, 101, 99, 103, 106, 90, 92, 96, 227, 228, 230, 93, 94, 98, 107, 108, 114, 96, 97, 104, 94, 95, 103, 106, 106, 115, 97, 98, 107, 99, 99, 110, 98, 101, 111, 102, 105, 113, 100, 104, 112, 102, 105, 113, 96, 99, 105, 107, 111, 117, 70, 70, 68, 162, 163, 158, 133, 132, 124, 118, 118, 114, 51, 52, 47, 94, 95, 92, 47, 49, 51, 131, 132, 139, 131, 132, 141, 125, 126, 135, 123, 124, 132, 115, 116, 124, 113, 113, 124, 255, 255, 255, 118, 121, 129, 104, 107, 115, 98, 101, 109, 95, 97, 108, 93, 96, 107, 92, 93, 99, 66, 66, 66, 166, 164, 160, 150, 151, 144, 114, 115, 108, 42, 41, 35, 44, 45, 38, 29, 31, 28, 100, 102, 103, 131, 133, 135, 133, 135, 137, 133, 135, 137, 128, 129, 131, 123, 127, 130, 148, 152, 155, 111, 115, 118, 114, 118, 126, 111, 114, 124, 109, 112, 120, 106, 109, 119, 111, 114, 124, 156, 158, 157, 148, 149, 144, 136, 139, 133, 37, 40, 34, 53, 57, 49, 35, 36, 29, 88, 90, 92, 109, 111, 115, 108, 109, 113, 109, 112, 118, 119, 123, 126, 121, 122, 126, 210, 214, 217, 121, 122, 126, 116, 118, 122, 116, 118, 122, 109, 111, 115, 106, 107, 109, 102, 104, 104, 145, 148, 143, 144, 145, 140, 42, 43, 38, 26, 26, 22, 24, 25, 22, 16, 17, 14, 127, 127, 127, 116, 116, 120, 128, 128, 130, 130, 132, 133, 129, 130, 134, 113, 114, 120, 123, 124, 130, 115, 116, 122, 106, 110, 115, 108, 109, 115, 101, 102, 110, 106, 107, 113, 76, 78, 82, 144, 147, 141, 113, 116, 108, 72, 75, 69, 9, 12, 6, 29, 31, 28, 29, 31, 30, 125, 127, 131, 128, 129, 135, 121, 125, 128, 120, 124, 127, 113, 117, 122, 103, 106, 112, 158, 161, 167, 109, 112, 118, 98, 102, 107, 90, 94, 99, 92, 92, 101, 89, 91, 95, 53, 56, 53, 69, 71, 59, 24, 26, 18, 22, 23, 18, 8, 9, 4, 24, 25, 20, 22, 22, 20, 118, 121, 120, 123, 127, 130, 119, 125, 127, 120, 124, 129, 121, 127, 132, 119, 125, 129, 117, 123, 130, 114, 120, 127, 116, 122, 129, 117, 123, 130, 114, 120, 127, 113, 118, 126, 117, 123, 130, 107, 113, 120, 122, 128, 135, 119, 124, 131, 102, 108, 115, 115, 121, 128, 115, 121, 128, 117, 123, 130, 110, 116, 123, 116, 122, 129, 106, 111, 119, 113, 118, 126, 112, 118, 122, 110, 113, 119, 112, 118, 120, 112, 118, 120, 113, 117, 120, 111, 115, 116, 110, 114, 113, 110, 113, 108, 55, 59, 51, 121, 122, 115, 113, 114, 107, 125, 126, 119, 128, 127, 121, 142, 141, 135, 138, 137, 128, 69, 68, 60, 15, 17, 5, 20, 22, 10, 7, 14, 0, 15, 18, 10, 16, 19, 13, 80, 83, 77, 93, 96, 90, 106, 107, 102, 116, 114, 110, 117, 115, 111, 72, 74, 75, 75, 78, 77, 80, 82, 83, 82, 86, 87, 79, 84, 85, 88, 90, 92, 76, 78, 80, 92, 94, 95, 72, 73, 77, 64, 68, 71, 75, 79, 82, 123, 130, 132, 27, 31, 32, 50, 52, 51, 90, 93, 90, 12, 16, 10, 0, 0, 0, 51, 54, 50, 103, 106, 101, 114, 120, 116, 111, 114, 111, 110, 113, 110, 118, 121, 118, 117, 120, 117, 136, 136, 134, 125, 125, 123, 123, 124, 121, 120, 120, 120, 127, 127, 127, 113, 113, 113, 129, 129, 129, 141, 140, 143, 119, 118, 121, 133, 132, 137, 123, 123, 127, 123, 123, 127, 122, 121, 126, 141, 140, 145, 127, 126, 131, 126, 125, 130, 126, 125, 128, 123, 123, 125, 123, 123, 125, 127, 128, 130, 135, 139, 142, 123, 127, 130, 126, 130, 133, 95, 99, 102, 104, 108, 111, 104, 108, 111, 99, 103, 106, 88, 92, 95, 96, 100, 103, 95, 99, 102, 93, 97, 101, 95, 99, 102, 95, 99, 102, 167, 169, 173, 255, 255, 255, 90, 92, 96, 89, 91, 95, 95, 96, 105, 97, 98, 107, 96, 97, 106, 92, 96, 103, 95, 98, 106, 93, 96, 107, 97, 100, 108, 97, 100, 110, 97, 100, 110, 90, 93, 101, 5, 8, 7, 45, 48, 45, 0, 0, 0, 15, 16, 11, 1, 3, 0, 38, 41, 38, 55, 58, 57, 110, 113, 119, 131, 134, 142, 128, 128, 137, 120, 120, 129, 118, 121, 129, 114, 118, 126, 106, 106, 115, 106, 109, 117, 95, 98, 106, 97, 100, 108, 99, 103, 110, 92, 96, 103, 89, 92, 98, 45, 49, 50, 2, 5, 1, 0, 0, 0, 50, 51, 44, 28, 31, 25, 8, 11, 5, 0, 0, 0, 23, 25, 26, 133, 135, 137, 133, 135, 137, 136, 138, 139, 131, 133, 137, 132, 134, 138, 106, 107, 111, 175, 179, 180, 119, 123, 126, 111, 114, 122, 109, 112, 120, 106, 109, 119, 102, 104, 115, 36, 41, 41, 0, 0, 0, 17, 19, 11, 11, 14, 9, 21, 24, 18, 19, 19, 15, 0, 0, 0, 120, 121, 123, 114, 115, 119, 114, 115, 119, 123, 127, 130, 128, 132, 135, 106, 110, 113, 135, 140, 140, 121, 122, 126, 114, 115, 119, 111, 113, 117, 109, 111, 115, 108, 110, 111, 0, 0, 0, 0, 0, 0, 4, 7, 4, 17, 20, 17, 14, 16, 13, 10, 13, 10, 121, 121, 121, 121, 121, 123, 123, 123, 125, 125, 127, 129, 120, 121, 125, 142, 143, 145, 133, 135, 139, 120, 121, 125, 122, 123, 129, 111, 112, 119, 104, 106, 112, 102, 103, 110, 87, 89, 90, 0, 0, 0, 59, 62, 56, 10, 13, 8, 2, 5, 0, 0, 1, 0, 0, 0, 0, 131, 133, 137, 131, 135, 138, 126, 130, 133, 117, 121, 124, 110, 114, 117, 93, 97, 101, 201, 205, 208, 102, 105, 111, 96, 99, 105, 95, 98, 104, 93, 97, 103, 89, 91, 95, 32, 34, 36, 0, 0, 0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 0, 1, 0, 8, 8, 8, 100, 102, 103, 120, 124, 127, 120, 126, 128, 121, 125, 130, 119, 123, 128, 117, 120, 126, 116, 122, 129, 114, 120, 127, 114, 120, 127, 114, 120, 127, 110, 116, 123, 110, 116, 123, 110, 116, 123, 194, 198, 201, 204, 209, 207, 166, 171, 165, 252, 255, 250, 229, 232, 224, 216, 219, 214, 213, 216, 210, 185, 188, 182, 225, 229, 223, 252, 255, 250, 234, 236, 233, 241, 246, 242, 211, 216, 214, 239, 244, 240, 242, 249, 247, 248, 253, 251, 104, 109, 107, 111, 116, 114, 124, 127, 124, 0, 0, 0, 79, 80, 75, 97, 100, 97, 118, 118, 114, 121, 122, 117, 125, 126, 119, 37, 39, 29, 0, 0, 0, 18, 22, 14, 11, 17, 8, 6, 12, 3, 23, 26, 20, 14, 17, 11, 22, 28, 22, 109, 112, 107, 100, 101, 96, 156, 155, 151, 191, 187, 181, 78, 85, 85, 83, 87, 88, 85, 90, 90, 72, 77, 78, 78, 83, 83, 104, 107, 106, 79, 81, 80, 71, 76, 76, 61, 63, 67, 71, 75, 78, 74, 78, 81, 102, 106, 109, 50, 55, 55, 29, 31, 32, 62, 65, 64, 57, 60, 54, 54, 57, 52, 0, 0, 0, 0, 0, 0, 111, 116, 112, 118, 123, 119, 108, 111, 107, 113, 113, 111, 116, 119, 116, 122, 123, 120, 121, 121, 119, 119, 119, 117, 113, 113, 113, 115, 115, 115, 123, 123, 123, 125, 125, 125, 123, 123, 125, 133, 132, 137, 130, 130, 134, 123, 123, 127, 128, 127, 134, 121, 120, 127, 121, 120, 127, 123, 123, 127, 121, 120, 125, 125, 124, 128, 126, 125, 130, 123, 125, 127, 148, 149, 153, 136, 141, 142, 111, 115, 118, 100, 104, 108, 103, 107, 110, 104, 108, 111, 106, 110, 113, 102, 106, 109, 96, 99, 105, 93, 97, 101, 96, 100, 103, 93, 97, 101, 86, 90, 96, 96, 100, 103, 102, 106, 109, 114, 115, 119, 92, 93, 97, 97, 99, 103, 95, 98, 104, 95, 96, 105, 95, 98, 106, 95, 97, 108, 93, 97, 105, 97, 100, 110, 90, 93, 103, 93, 96, 107, 95, 97, 108, 96, 99, 107, 0, 0, 0, 1, 3, 0, 0, 0, 0, 1, 4, 0, 0, 0, 0, 1, 3, 0, 32, 37, 35, 99, 100, 104, 136, 137, 143, 131, 132, 141, 128, 128, 137, 120, 120, 129, 118, 119, 126, 146, 150, 155, 159, 162, 168, 96, 99, 105, 105, 109, 114, 89, 92, 98, 96, 99, 107, 90, 94, 99, 109, 113, 116, 8, 10, 7, 0, 0, 0, 38, 41, 34, 0, 0, 0, 7, 9, 6, 0, 0, 0, 0, 0, 0, 145, 147, 149, 133, 135, 137, 136, 138, 139, 133, 135, 137, 131, 133, 135, 146, 148, 150, 195, 197, 199, 129, 131, 132, 109, 110, 117, 113, 117, 122, 106, 109, 119, 108, 108, 119, 96, 99, 105, 0, 0, 0, 0, 0, 0, 4, 7, 2, 0, 0, 0, 28, 31, 25, 0, 0, 0, 123, 125, 127, 108, 109, 113, 121, 122, 126, 124, 126, 130, 129, 130, 134, 115, 116, 120, 148, 149, 151, 116, 120, 121, 109, 111, 115, 115, 116, 120, 108, 110, 111, 102, 104, 106, 7, 9, 8, 23, 26, 22, 0, 0, 0, 0, 0, 0, 9, 12, 8, 0, 0, 0, 66, 68, 67, 117, 119, 121, 121, 121, 123, 125, 127, 129, 125, 127, 129, 127, 128, 130, 131, 133, 135, 116, 118, 120, 120, 121, 127, 107, 111, 117, 104, 105, 114, 101, 102, 108, 117, 119, 123, 64, 66, 63, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 115, 119, 129, 130, 134, 128, 132, 135, 121, 125, 128, 106, 110, 115, 114, 118, 122, 104, 108, 111, 98, 102, 107, 98, 102, 107, 93, 97, 103, 88, 91, 97, 86, 91, 94, 62, 66, 67, 17, 20, 17, 0, 0, 0, 0, 0, 0, 2, 5, 1, 0, 0, 0, 0, 0, 0, 82, 84, 86, 127, 128, 132, 123, 126, 132, 119, 123, 128, 117, 123, 128, 114, 118, 124, 120, 124, 131, 114, 120, 127, 114, 120, 127, 112, 117, 124, 112, 117, 124, 112, 115, 123, 118, 121, 129, 138, 142, 145, 253, 255, 255, 146, 148, 152, 246, 250, 251, 180, 184, 185, 250, 252, 253, 202, 204, 206, 220, 225, 223, 255, 255, 255, 255, 255, 255, 192, 197, 195, 234, 239, 239, 189, 194, 192, 214, 219, 218, 118, 123, 121, 196, 201, 199, 111, 116, 114, 106, 113, 113, 114, 119, 118, 43, 45, 44, 31, 34, 31, 103, 106, 101, 105, 106, 101, 114, 115, 110, 122, 123, 116, 59, 61, 53, 0, 0, 0, 17, 20, 15, 15, 18, 12, 19, 22, 17, 24, 27, 22, 16, 19, 13, 14, 17, 11, 113, 113, 111, 100, 101, 96, 118, 115, 109, 173, 171, 165, 80, 82, 81, 80, 82, 81, 85, 87, 86, 88, 88, 90, 225, 225, 225, 128, 130, 127, 69, 72, 69, 61, 66, 64, 62, 66, 67, 63, 68, 68, 78, 83, 83, 95, 99, 102, 29, 34, 35, 0, 0, 0, 17, 22, 18, 53, 54, 49, 57, 59, 56, 32, 35, 32, 0, 0, 0, 0, 0, 0, 116, 119, 116, 116, 119, 116, 113, 115, 112, 116, 119, 116, 111, 111, 109, 142, 142, 142, 115, 115, 115, 133, 133, 133, 130, 130, 130, 121, 121, 121, 126, 125, 128, 125, 124, 127, 128, 127, 132, 116, 116, 120, 128, 127, 132, 135, 134, 141, 125, 123, 130, 121, 120, 127, 122, 121, 128, 122, 121, 128, 123, 122, 129, 140, 139, 144, 145, 147, 149, 112, 112, 110, 63, 68, 68, 105, 109, 110, 103, 107, 110, 98, 102, 105, 104, 108, 111, 98, 102, 105, 100, 104, 108, 86, 90, 96, 98, 102, 107, 100, 101, 105, 92, 96, 101, 91, 95, 98, 92, 96, 99, 86, 91, 92, 216, 220, 221, 110, 112, 116, 88, 92, 95, 94, 95, 101, 91, 94, 102, 91, 94, 102, 93, 97, 105, 93, 96, 107, 93, 97, 105, 95, 98, 106, 95, 97, 108, 95, 95, 107, 95, 95, 107, 60, 64, 67, 0, 0, 0, 0, 0, 0, 14, 17, 11, 0, 1, 0, 0, 0, 0, 71, 77, 72, 33, 37, 40, 128, 132, 137, 127, 127, 136, 125, 127, 133, 121, 122, 128, 122, 123, 129, 225, 227, 231, 184, 185, 189, 105, 109, 112, 103, 106, 112, 93, 97, 101, 91, 94, 102, 95, 98, 104, 113, 117, 120, 0, 0, 0, 0, 0, 0, 31, 36, 30, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 132, 135, 134, 137, 139, 138, 133, 135, 137, 133, 135, 137, 129, 131, 132, 172, 174, 173, 161, 164, 163, 255, 255, 255, 125, 127, 131, 113, 114, 120, 110, 111, 120, 100, 104, 112, 132, 133, 140, 0, 0, 0, 1, 4, 0, 0, 0, 0, 0, 0, 0, 38, 41, 36, 0, 0, 0, 108, 110, 109, 116, 118, 122, 117, 119, 121, 123, 125, 127, 124, 126, 128, 206, 208, 209, 113, 114, 116, 227, 228, 230, 124, 126, 128, 111, 113, 117, 109, 111, 113, 106, 107, 109, 86, 88, 87, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 24, 21, 0, 2, 0, 0, 0, 0, 123, 125, 125, 113, 113, 113, 131, 133, 135, 128, 129, 131, 118, 121, 120, 121, 123, 124, 144, 146, 147, 108, 109, 113, 114, 115, 119, 104, 106, 112, 101, 102, 108, 105, 104, 111, 29, 31, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 94, 96, 97, 128, 129, 133, 127, 132, 132, 124, 128, 131, 114, 118, 122, 206, 211, 211, 235, 237, 238, 234, 239, 239, 111, 115, 118, 96, 100, 103, 89, 92, 98, 86, 91, 94, 101, 102, 106, 50, 52, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 29, 31, 121, 122, 126, 123, 125, 128, 117, 121, 124, 119, 123, 128, 118, 122, 127, 116, 117, 124, 116, 119, 125, 114, 118, 124, 114, 118, 124, 118, 122, 127, 113, 116, 124, 122, 123, 131, 114, 118, 124, 114, 118, 126, 120, 124, 131, 111, 114, 124, 117, 120, 130, 118, 121, 131, 117, 120, 130, 111, 114, 124, 110, 113, 121, 109, 112, 118, 114, 118, 124, 113, 117, 120, 116, 120, 123, 109, 113, 116, 111, 115, 118, 111, 115, 116, 107, 112, 113, 113, 120, 120, 113, 118, 116, 46, 49, 46, 0, 0, 0, 106, 109, 103, 105, 106, 101, 106, 107, 100, 138, 140, 132, 102, 104, 96, 0, 0, 0, 5, 9, 3, 11, 14, 9, 14, 16, 13, 7, 13, 6, 5, 9, 3, 19, 22, 17, 123, 124, 119, 110, 108, 104, 106, 102, 98, 124, 120, 117, 72, 77, 76, 76, 79, 78, 87, 87, 87, 86, 86, 88, 182, 182, 180, 221, 221, 219, 78, 78, 78, 68, 71, 70, 62, 64, 66, 68, 71, 70, 79, 81, 82, 103, 106, 105, 25, 28, 27, 25, 30, 28, 28, 33, 29, 15, 17, 14, 50, 52, 49, 53, 56, 53, 32, 35, 32, 0, 0, 0, 17, 20, 17, 120, 122, 119, 110, 113, 110, 113, 113, 111, 118, 118, 116, 126, 126, 126, 120, 120, 120, 120, 120, 120, 123, 123, 125, 118, 117, 120, 126, 125, 128, 128, 128, 130, 112, 111, 116, 118, 117, 122, 121, 120, 125, 125, 123, 130, 121, 120, 127, 120, 121, 127, 114, 115, 121, 127, 126, 133, 137, 137, 141, 129, 129, 129, 70, 71, 66, 10, 12, 4, 52, 56, 55, 99, 104, 102, 104, 108, 111, 104, 108, 111, 104, 106, 110, 104, 106, 110, 97, 99, 103, 97, 99, 103, 96, 97, 104, 95, 97, 101, 96, 98, 102, 93, 97, 101, 96, 98, 102, 94, 95, 99, 206, 211, 209, 97, 99, 101, 95, 97, 101, 95, 96, 105, 94, 95, 103, 91, 94, 102, 90, 93, 101, 92, 95, 105, 89, 92, 102, 95, 98, 106, 95, 96, 105, 99, 99, 110, 96, 97, 106, 127, 128, 132, 0, 1, 0, 22, 25, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 81, 86, 82, 0, 0, 0, 127, 128, 134, 127, 127, 136, 118, 119, 126, 122, 123, 129, 113, 114, 120, 149, 150, 154, 200, 202, 201, 255, 255, 255, 105, 109, 114, 98, 102, 107, 98, 101, 109, 85, 89, 94, 91, 95, 98, 0, 0, 0, 0, 0, 0, 10, 15, 9, 0, 0, 0, 0, 0, 0, 18, 23, 19, 0, 0, 0, 94, 97, 93, 133, 136, 135, 136, 138, 139, 138, 140, 140, 132, 135, 134, 123, 123, 123, 238, 241, 240, 235, 237, 238, 127, 128, 132, 116, 117, 124, 114, 114, 123, 105, 108, 116, 109, 110, 117, 36, 38, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 34, 29, 1, 3, 0, 19, 22, 21, 116, 118, 120, 115, 117, 118, 124, 126, 128, 127, 128, 130, 113, 115, 114, 211, 214, 213, 255, 255, 255, 137, 139, 140, 111, 113, 115, 104, 106, 110, 108, 110, 111, 118, 121, 120, 0, 0, 0, 2, 5, 1, 0, 0, 0, 1, 3, 0, 0, 0, 0, 0, 2, 1, 104, 107, 106, 114, 116, 115, 120, 121, 123, 123, 125, 127, 125, 127, 129, 113, 114, 116, 103, 105, 107, 117, 119, 123, 111, 113, 117, 108, 109, 117, 104, 102, 110, 99, 100, 106, 19, 19, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 40, 40, 131, 135, 138, 123, 127, 128, 120, 124, 127, 113, 117, 120, 113, 117, 120, 97, 99, 101, 103, 105, 109, 104, 106, 110, 95, 97, 101, 95, 97, 101, 88, 90, 94, 93, 94, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, 114, 116, 117, 119, 121, 125, 127, 129, 124, 126, 128, 123, 125, 128, 117, 119, 123, 121, 122, 126, 120, 121, 125, 121, 122, 126, 118, 122, 125, 117, 118, 125, 120, 121, 127, 117, 120, 126, 117, 120, 128, 116, 119, 127, 118, 121, 129, 117, 120, 128, 117, 120, 128, 118, 121, 131, 114, 120, 127, 110, 116, 121, 110, 117, 117, 110, 117, 119, 112, 118, 118, 109, 116, 116, 108, 115, 115, 109, 116, 114, 112, 119, 116, 109, 113, 112, 105, 112, 109, 110, 118, 113, 79, 87, 82, 0, 0, 0, 90, 94, 88, 109, 110, 105, 109, 107, 103, 109, 107, 101, 114, 113, 107, 0, 0, 0, 3, 4, 0, 8, 14, 8, 1, 7, 3, 22, 28, 24, 0, 6, 0, 0, 0, 0, 91, 91, 89, 108, 106, 102, 102, 98, 97, 120, 116, 112, 82, 78, 77, 82, 80, 78, 86, 87, 84, 83, 83, 83, 77, 77, 75, 70, 70, 68, 72, 73, 70, 73, 76, 72, 67, 68, 67, 69, 72, 69, 101, 102, 99, 22, 24, 21, 21, 23, 20, 36, 38, 37, 33, 36, 33, 18, 21, 18, 36, 38, 35, 38, 41, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 106, 103, 103, 106, 103, 108, 111, 107, 112, 112, 110, 115, 116, 113, 113, 113, 111, 127, 127, 127, 116, 116, 116, 123, 123, 125, 126, 125, 128, 122, 122, 124, 118, 117, 122, 121, 120, 125, 119, 118, 123, 121, 120, 127, 114, 113, 120, 119, 118, 125, 122, 121, 128, 129, 128, 135, 128, 128, 128, 26, 26, 22, 0, 0, 0, 76, 76, 74, 43, 45, 42, 97, 100, 99, 103, 105, 107, 107, 109, 110, 101, 103, 104, 97, 99, 101, 103, 105, 107, 101, 102, 106, 100, 101, 105, 96, 97, 104, 97, 101, 106, 91, 95, 100, 93, 94, 98, 86, 87, 93, 81, 83, 87, 92, 96, 99, 94, 95, 101, 94, 95, 101, 94, 95, 101, 89, 92, 98, 92, 96, 103, 95, 98, 104, 91, 95, 100, 92, 96, 101, 95, 96, 103, 93, 94, 100, 94, 95, 99, 115, 117, 118, 0, 0, 0, 52, 55, 49, 0, 0, 0, 3, 6, 3, 0, 0, 0, 4, 9, 5, 0, 0, 0, 128, 132, 135, 127, 131, 136, 131, 133, 137, 120, 121, 125, 116, 120, 123, 114, 118, 122, 109, 111, 113, 105, 109, 110, 97, 101, 104, 99, 103, 106, 97, 101, 106, 90, 94, 97, 83, 87, 90, 45, 50, 46, 5, 11, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 30, 26, 0, 0, 0, 55, 58, 55, 135, 137, 134, 132, 135, 132, 133, 136, 135, 133, 136, 133, 135, 137, 136, 125, 128, 127, 123, 125, 125, 122, 124, 125, 114, 115, 119, 111, 112, 119, 104, 108, 113, 104, 108, 111, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 18, 21, 16, 0, 0, 0, 122, 124, 123, 115, 117, 116, 116, 118, 118, 122, 124, 123, 123, 125, 125, 115, 117, 116, 131, 133, 133, 123, 125, 127, 113, 115, 114, 111, 114, 113, 113, 115, 114, 106, 108, 107, 31, 34, 31, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 50, 52, 49, 110, 112, 112, 117, 119, 119, 123, 125, 125, 125, 128, 127, 122, 124, 123, 122, 124, 123, 116, 118, 120, 114, 115, 119, 107, 109, 110, 104, 106, 110, 95, 96, 103, 94, 97, 93, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 135, 140, 140, 128, 129, 131, 127, 128, 130, 118, 120, 122, 115, 117, 118, 106, 108, 107, 103, 105, 107, 103, 105, 109, 96, 98, 102, 92, 93, 97, 93, 94, 98, 86, 88, 89, 0, 0, 0, 2, 5, 0, 0, 0, 0, 0, 0, 0, 3, 6, 1, 0, 0, 0, 0, 0, 0, 77, 77, 75, 125, 128, 127, 123, 126, 123, 117, 120, 117, 127, 127, 125, 117, 120, 117, 129, 129, 129, 121, 123, 122, 122, 124, 125, 123, 125, 127, 120, 121, 123, 118, 120, 124, 123, 127, 130, 111, 115, 118, 120, 124, 127, 114, 118, 122, 110, 114, 117, 117, 121, 122, 118, 122, 125, 120, 124, 127, 115, 122, 122, 114, 119, 120, 119, 123, 124, 115, 122, 122, 118, 122, 123, 109, 113, 112, 113, 118, 116, 102, 110, 107, 112, 117, 115, 109, 116, 114, 107, 113, 109, 115, 123, 116, 3, 8, 2, 86, 87, 82, 114, 113, 109, 112, 111, 107, 110, 109, 102, 104, 103, 96, 35, 34, 28, 0, 0, 0, 14, 15, 10, 0, 4, 0, 3, 6, 1, 0, 0, 0, 0, 0, 0, 30, 31, 26, 100, 101, 96, 98, 96, 93, 108, 106, 102, 77, 75, 74, 76, 74, 72, 72, 73, 68, 75, 73, 69, 77, 78, 73, 75, 73, 69, 63, 64, 59, 67, 68, 64, 53, 54, 49, 59, 60, 55, 95, 96, 91, 28, 29, 22, 31, 32, 29, 33, 36, 33, 21, 26, 22, 33, 36, 35, 52, 57, 53, 25, 30, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 97, 102, 98, 103, 108, 104, 109, 112, 109, 113, 113, 111, 122, 123, 120, 113, 113, 111, 111, 111, 111, 113, 112, 115, 112, 111, 114, 106, 106, 108, 118, 117, 120, 113, 112, 115, 116, 116, 118, 120, 119, 126, 115, 114, 119, 119, 118, 125, 113, 112, 119, 129, 128, 135, 126, 125, 128, 71, 71, 69, 0, 0, 0, 91, 92, 85, 56, 57, 50, 44, 47, 41, 100, 102, 101, 100, 102, 101, 97, 100, 99, 107, 109, 108, 115, 117, 118, 106, 108, 107, 104, 106, 108, 107, 109, 110, 100, 101, 105, 94, 95, 99, 91, 95, 98, 90, 94, 95, 90, 94, 97, 92, 94, 95, 94, 96, 97, 94, 96, 97, 93, 94, 98, 96, 98, 102, 92, 93, 97, 84, 88, 91, 92, 96, 99, 89, 93, 96, 89, 91, 95, 86, 88, 89, 86, 88, 89, 90, 93, 92, 97, 100, 99, 0, 0, 0, 23, 26, 20, 0, 0, 0, 0, 2, 0, 0, 0, 0, 31, 36, 32, 20, 25, 23, 70, 75, 73, 126, 131, 129, 135, 140, 138, 118, 123, 121, 113, 118, 114, 109, 113, 112, 106, 111, 107, 98, 103, 99, 98, 103, 99, 91, 96, 94, 85, 90, 88, 90, 94, 95, 96, 101, 99, 113, 118, 114, 0, 0, 0, 0, 0, 0, 11, 15, 7, 0, 0, 0, 46, 51, 45, 0, 0, 0, 54, 59, 53, 173, 178, 172, 121, 127, 121, 131, 136, 132, 133, 139, 132, 138, 141, 138, 136, 138, 135, 127, 129, 128, 128, 130, 129, 127, 129, 128, 115, 117, 116, 110, 114, 113, 100, 102, 101, 64, 66, 63, 35, 38, 32, 8, 11, 5, 0, 0, 0, 0, 0, 0, 25, 28, 23, 0, 0, 0, 100, 102, 99, 107, 109, 108, 116, 118, 118, 117, 119, 119, 122, 125, 121, 127, 129, 128, 116, 118, 118, 118, 121, 120, 113, 115, 114, 113, 115, 114, 123, 125, 125, 108, 111, 107, 51, 54, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 7, 9, 6, 135, 137, 134, 111, 114, 111, 114, 116, 113, 125, 128, 125, 125, 128, 127, 128, 130, 129, 117, 119, 119, 120, 122, 121, 110, 112, 112, 110, 112, 112, 99, 100, 102, 118, 121, 118, 15, 18, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 115, 117, 114, 143, 145, 142, 137, 140, 136, 125, 128, 125, 118, 121, 120, 114, 116, 115, 103, 106, 105, 107, 109, 110, 97, 99, 101, 92, 94, 95, 65, 67, 66, 81, 84, 81, 0, 0, 0, 16, 19, 13, 0, 0, 0, 0, 0, 0, 2, 3, 0, 0, 0, 0, 75, 78, 73, 17, 20, 15, 37, 40, 36, 22, 24, 21, 22, 24, 23, 38, 41, 38, 24, 27, 24, 12, 15, 12, 21, 23, 20, 21, 23, 20, 33, 36, 33, 29, 31, 28, 26, 29, 26, 40, 45, 39, 42, 48, 41, 71, 77, 70, 47, 52, 46, 53, 58, 52, 68, 73, 67, 60, 65, 59, 100, 106, 100, 62, 67, 63, 43, 49, 45, 52, 56, 55, 56, 61, 57, 54, 59, 55, 41, 46, 42, 35, 40, 36, 24, 31, 29, 10, 18, 13, 24, 32, 27, 14, 21, 17, 30, 38, 33, 13, 18, 14, 48, 52, 46, 114, 113, 109, 116, 114, 110, 108, 106, 100, 108, 106, 100, 61, 60, 53, 0, 0, 0, 1, 2, 0, 0, 3, 0, 16, 19, 13, 0, 0, 0, 0, 0, 0, 99, 102, 96, 42, 45, 39, 130, 133, 118, 209, 210, 189, 21, 22, 17, 30, 31, 24, 40, 41, 34, 62, 63, 56, 73, 74, 67, 66, 67, 60, 58, 59, 52, 49, 50, 43, 33, 34, 27, 44, 45, 38, 21, 22, 17, 37, 38, 33, 33, 33, 31, 48, 51, 48, 48, 51, 48, 67, 70, 67, 65, 67, 64, 38, 43, 39, 0, 0, 0, 0, 0, 0, 28, 30, 27, 89, 94, 90, 102, 105, 102, 107, 110, 104, 103, 106, 103, 107, 108, 105, 109, 110, 107, 109, 111, 111, 105, 105, 105, 109, 109, 111, 115, 115, 117, 108, 108, 110, 113, 112, 117, 114, 114, 116, 112, 111, 116, 112, 111, 116, 114, 113, 118, 115, 116, 120, 122, 122, 124, 120, 120, 118, 0, 0, 0, 50, 49, 43, 66, 68, 58, 57, 58, 51, 37, 38, 31, 7, 8, 3, 72, 73, 68, 91, 92, 85, 105, 106, 101, 93, 96, 90, 93, 96, 90, 93, 96, 90, 90, 94, 88, 96, 99, 94, 85, 88, 82, 86, 89, 83, 86, 89, 83, 82, 85, 80, 83, 87, 81, 96, 99, 94, 108, 111, 103, 113, 116, 108, 114, 117, 109, 106, 109, 101, 101, 104, 96, 71, 74, 66, 32, 38, 29, 10, 14, 6, 9, 12, 4, 0, 5, 0, 8, 14, 8, 29, 32, 26, 0, 3, 0, 3, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 48, 54, 47, 6, 12, 5, 34, 40, 33, 31, 37, 28, 26, 32, 23, 17, 23, 14, 25, 31, 22, 17, 23, 14, 18, 24, 15, 22, 28, 20, 26, 32, 25, 35, 41, 34, 21, 27, 20, 41, 47, 40, 34, 39, 35, 0, 0, 0, 0, 0, 0, 24, 28, 20, 0, 0, 0, 32, 37, 31, 0, 6, 0, 116, 122, 113, 46, 51, 45, 48, 54, 47, 28, 36, 29, 40, 45, 41, 36, 43, 39, 46, 51, 45, 36, 42, 38, 45, 50, 46, 50, 56, 51, 41, 46, 44, 43, 49, 45, 43, 49, 45, 14, 19, 15, 83, 87, 81, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 21, 16, 85, 88, 82, 17, 20, 15, 18, 21, 16, 22, 25, 19, 30, 33, 27, 19, 22, 17, 19, 22, 17, 22, 25, 19, 29, 32, 26, 21, 24, 18, 9, 12, 6, 12, 16, 10, 17, 20, 15, 9, 12, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 8, 2, 0, 2, 0, 113, 116, 110, 32, 37, 31, 29, 35, 31, 27, 32, 28, 19, 24, 18, 18, 23, 19, 21, 27, 20, 29, 35, 31, 20, 25, 21, 37, 40, 36, 28, 31, 25, 30, 33, 27, 28, 31, 25, 45, 48, 43, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 106, 100, 34, 40, 33, 31, 36, 30, 27, 33, 26, 14, 20, 13, 23, 26, 20, 23, 26, 22, 21, 24, 18, 19, 22, 17, 32, 35, 30, 26, 29, 24, 29, 31, 28, 17, 20, 15, 13, 14, 9, 35, 38, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 12, 16, 10, 36, 36, 34, 34, 34, 32, 33, 36, 33, 39, 42, 39, 29, 35, 31, 26, 31, 27, 32, 35, 32, 35, 37, 34, 31, 34, 29, 28, 34, 25, 33, 36, 31, 24, 28, 20, 29, 35, 27, 29, 35, 27, 26, 32, 23, 20, 26, 19, 28, 34, 27, 34, 40, 33, 32, 37, 33, 29, 37, 32, 28, 33, 29, 31, 36, 32, 27, 34, 31, 35, 40, 38, 38, 43, 39, 29, 35, 31, 29, 35, 31, 32, 37, 33, 31, 35, 34, 37, 45, 40, 28, 35, 31, 45, 53, 48, 32, 35, 30, 0, 0, 0, 116, 117, 112, 116, 117, 112, 113, 114, 107, 100, 99, 93, 87, 89, 81, 0, 0, 0, 1, 4, 0, 0, 0, 0, 5, 9, 3, 0, 0, 0, 81, 84, 81, 33, 36, 31, 52, 55, 49, 47, 50, 45, 49, 50, 45, 41, 40, 32, 42, 41, 33, 35, 36, 27, 47, 48, 39, 35, 36, 27, 44, 47, 39, 29, 32, 24, 47, 51, 43, 45, 48, 41, 43, 44, 39, 47, 47, 43, 23, 24, 17, 39, 42, 37, 63, 64, 59, 50, 53, 47, 84, 85, 80, 67, 70, 65, 11, 16, 10, 0, 0, 0, 0, 0, 0, 73, 76, 70, 97, 100, 95, 97, 100, 97, 103, 106, 103, 107, 109, 106, 105, 105, 103, 104, 104, 102, 106, 108, 107, 106, 106, 106, 107, 107, 109, 104, 103, 106, 109, 109, 111, 112, 111, 114, 112, 111, 114, 115, 114, 119, 109, 109, 111, 113, 112, 115, 121, 121, 123, 115, 115, 115, 108, 109, 106, 0, 0, 0, 83, 84, 77, 94, 96, 88, 55, 56, 49, 102, 104, 96, 23, 24, 19, 0, 0, 0, 2, 5, 0, 11, 14, 9, 3, 6, 1, 9, 12, 8, 8, 11, 5, 15, 18, 12, 11, 14, 9, 10, 15, 9, 5, 11, 4, 7, 13, 6, 10, 16, 7, 13, 19, 12, 11, 17, 8, 11, 17, 8, 23, 26, 18, 23, 26, 18, 14, 17, 7, 22, 26, 15, 22, 28, 20, 11, 17, 6, 12, 18, 7, 28, 34, 25, 31, 37, 28, 34, 40, 31, 11, 17, 8, 19, 25, 16, 11, 14, 9, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 121, 115, 28, 34, 27, 33, 39, 32, 27, 35, 27, 29, 35, 29, 43, 51, 44, 26, 32, 23, 41, 47, 40, 38, 47, 37, 31, 37, 28, 35, 41, 32, 31, 37, 28, 29, 35, 29, 28, 34, 27, 38, 43, 37, 0, 0, 0, 0, 0, 0, 9, 12, 4, 0, 0, 0, 0, 0, 0, 118, 124, 115, 28, 34, 25, 39, 44, 38, 35, 41, 34, 38, 46, 39, 39, 44, 38, 35, 43, 36, 29, 37, 30, 36, 42, 36, 40, 45, 39, 47, 52, 46, 21, 26, 22, 36, 42, 36, 42, 48, 41, 32, 37, 31, 3, 6, 1, 0, 0, 0, 4, 7, 2, 0, 0, 0, 8, 14, 8, 38, 43, 37, 29, 35, 27, 24, 28, 20, 28, 31, 23, 35, 41, 34, 26, 30, 22, 26, 29, 24, 35, 38, 30, 33, 36, 31, 29, 32, 26, 28, 31, 25, 28, 31, 25, 29, 32, 26, 21, 24, 18, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 120, 116, 40, 45, 41, 26, 32, 25, 20, 25, 21, 31, 36, 30, 33, 39, 32, 25, 30, 24, 20, 26, 19, 32, 37, 31, 27, 33, 26, 28, 34, 27, 42, 45, 39, 21, 24, 18, 28, 31, 25, 2, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 10, 4, 17, 20, 15, 37, 40, 34, 47, 51, 43, 38, 41, 34, 48, 52, 44, 46, 50, 42, 50, 53, 45, 45, 48, 41, 36, 38, 28, 51, 54, 46, 41, 42, 35, 50, 51, 44, 8, 11, 3, 19, 19, 15, 42, 43, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 94, 95, 90, 35, 38, 32, 38, 41, 36, 42, 44, 41, 22, 25, 19, 23, 26, 22, 18, 21, 18, 46, 49, 46, 40, 43, 40, 19, 22, 17, 36, 39, 33, 28, 31, 25, 32, 35, 30, 30, 33, 25, 17, 20, 15, 28, 31, 23, 33, 39, 32, 40, 44, 36, 43, 46, 40, 31, 36, 32, 32, 37, 33, 26, 31, 27, 36, 42, 38, 18, 23, 19, 15, 21, 17, 19, 24, 22, 23, 31, 26, 8, 14, 8, 12, 17, 13, 4, 9, 3, 19, 24, 20, 16, 24, 17, 32, 40, 33, 36, 44, 37, 33, 36, 31, 0, 0, 0, 117, 115, 111, 117, 115, 113, 114, 113, 109, 110, 109, 102, 97, 96, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 0, 30, 33, 27, 15, 18, 12, 44, 45, 40, 53, 54, 49, 50, 50, 48, 55, 53, 49, 11, 14, 9, 31, 34, 29, 21, 22, 17, 15, 18, 10, 29, 30, 25, 42, 43, 38, 38, 39, 34, 44, 45, 38, 64, 65, 58, 47, 48, 41, 49, 50, 43, 55, 56, 49, 41, 42, 35, 57, 58, 51, 73, 74, 67, 97, 98, 91, 76, 77, 70, 0, 0, 0, 22, 25, 19, 8, 11, 5, 86, 89, 83, 92, 95, 89, 100, 103, 97, 100, 103, 97, 97, 100, 97, 96, 99, 96, 102, 102, 102, 102, 102, 102, 112, 112, 112, 107, 107, 109, 105, 104, 107, 102, 102, 104, 111, 110, 113, 111, 110, 115, 111, 108, 113, 111, 110, 113, 104, 103, 106, 108, 108, 108, 116, 117, 114, 50, 51, 46, 0, 1, 0, 95, 97, 89, 62, 63, 54, 45, 47, 39, 37, 38, 31, 24, 26, 18, 11, 15, 7, 11, 14, 9, 11, 14, 9, 3, 8, 2, 4, 9, 3, 5, 11, 4, 13, 19, 12, 3, 8, 2, 24, 29, 23, 8, 14, 8, 0, 0, 0, 4, 9, 5, 0, 0, 0, 0, 2, 0, 5, 11, 4, 18, 22, 14, 18, 22, 14, 42, 45, 37, 28, 31, 21, 33, 37, 29, 15, 21, 13, 21, 28, 16, 7, 13, 4, 3, 9, 0, 0, 6, 0, 5, 11, 4, 0, 0, 0, 18, 21, 16, 4, 7, 2, 4, 9, 3, 4, 9, 3, 97, 102, 96, 0, 0, 0, 0, 0, 0, 0, 8, 1, 0, 2, 0, 55, 63, 55, 30, 38, 29, 31, 40, 28, 36, 44, 35, 28, 34, 25, 24, 30, 21, 12, 18, 9, 0, 0, 0, 0, 2, 0, 0, 4, 0, 0, 1, 0, 4, 7, 2, 54, 58, 50, 0, 0, 0, 12, 16, 6, 46, 50, 42, 6, 12, 3, 0, 5, 1, 0, 1, 0, 0, 0, 0, 27, 32, 28, 45, 50, 46, 36, 42, 36, 50, 56, 49, 32, 37, 31, 26, 32, 25, 0, 2, 0, 0, 1, 0, 0, 0, 0, 1, 7, 3, 0, 5, 0, 0, 2, 0, 0, 6, 0, 6, 12, 5, 19, 22, 17, 91, 97, 88, 4, 10, 1, 8, 11, 5, 8, 14, 8, 14, 17, 11, 24, 30, 21, 31, 34, 29, 36, 39, 33, 30, 33, 27, 23, 26, 20, 9, 12, 4, 5, 9, 3, 10, 13, 8, 10, 13, 8, 3, 6, 1, 17, 20, 15, 0, 0, 0, 1, 7, 1, 110, 115, 109, 10, 15, 9, 25, 30, 24, 15, 21, 15, 15, 21, 15, 21, 27, 20, 42, 48, 41, 38, 43, 39, 35, 41, 34, 39, 45, 36, 27, 33, 24, 21, 24, 18, 16, 19, 11, 25, 29, 21, 48, 52, 46, 17, 20, 15, 10, 13, 8, 0, 0, 0, 2, 5, 1, 76, 80, 74, 17, 20, 15, 16, 19, 11, 50, 53, 45, 35, 38, 30, 30, 33, 25, 35, 38, 30, 39, 43, 33, 38, 42, 32, 73, 77, 66, 42, 45, 35, 80, 82, 72, 24, 26, 16, 29, 31, 21, 21, 22, 15, 47, 48, 41, 0, 0, 0, 2, 3, 0, 99, 100, 95, 10, 11, 6, 16, 17, 12, 4, 7, 2, 19, 19, 17, 24, 27, 24, 26, 29, 26, 28, 30, 27, 12, 16, 10, 51, 54, 48, 50, 53, 47, 50, 53, 47, 22, 25, 19, 47, 51, 43, 23, 26, 18, 18, 21, 16, 1, 4, 0, 16, 19, 13, 14, 17, 9, 19, 22, 17, 2, 5, 1, 7, 9, 8, 1, 6, 5, 8, 10, 9, 7, 9, 6, 5, 10, 6, 12, 17, 15, 34, 39, 37, 13, 18, 14, 18, 23, 19, 20, 25, 21, 12, 17, 15, 15, 20, 19, 17, 22, 18, 48, 55, 50, 43, 49, 43, 0, 0, 0, 97, 97, 93, 112, 113, 108, 119, 118, 114, 114, 115, 110, 107, 108, 101, 0, 0, 0, 5, 9, 3, 0, 3, 0, 26, 29, 24, 11, 14, 9, 7, 10, 4, 17, 18, 13, 16, 17, 12, 22, 22, 20, 24, 27, 22, 8, 9, 6, 0, 0, 0, 0, 2, 0, 4, 7, 2, 8, 11, 5, 4, 7, 2, 15, 18, 12, 25, 29, 21, 23, 24, 17, 22, 23, 16, 23, 24, 19, 7, 8, 1, 83, 84, 77, 60, 62, 54, 91, 92, 85, 108, 109, 102, 22, 25, 17, 0, 0, 0, 0, 0, 0, 89, 92, 87, 85, 88, 82, 96, 99, 94, 97, 100, 95, 101, 104, 98, 101, 104, 100, 100, 100, 98, 107, 108, 105, 101, 101, 101, 104, 104, 104, 130, 130, 130, 108, 108, 108, 119, 116, 120, 109, 109, 111, 123, 120, 123, 116, 113, 116, 111, 108, 111, 110, 107, 108, 113, 111, 110, 114, 115, 110, 36, 35, 29, 0, 0, 0, 77, 78, 71, 55, 54, 46, 121, 123, 113, 0, 1, 0, 19, 19, 15, 8, 9, 4, 7, 7, 5, 5, 8, 5, 2, 5, 1, 0, 4, 0, 0, 0, 0, 0, 3, 0, 0, 5, 1, 0, 0, 0, 5, 10, 8, 0, 1, 0, 0, 0, 0, 0, 0, 0, 8, 14, 10, 0, 4, 0, 7, 10, 4, 22, 25, 17, 39, 43, 35, 32, 38, 29, 37, 40, 32, 28, 35, 23, 7, 14, 2, 4, 10, 1, 0, 0, 0, 0, 3, 1, 4, 9, 7, 0, 2, 0, 0, 5, 1, 13, 18, 14, 0, 0, 0, 34, 40, 33, 10, 15, 11, 0, 0, 0, 0, 0, 0, 0, 2, 7, 0, 0, 0, 0, 0, 0, 25, 30, 24, 32, 38, 29, 49, 56, 44, 34, 40, 31, 28, 34, 25, 0, 3, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 9, 12, 8, 0, 0, 0, 30, 33, 27, 42, 45, 37, 19, 22, 19, 0, 0, 0, 0, 5, 5, 0, 2, 0, 0, 0, 0, 32, 37, 33, 33, 39, 32, 33, 39, 30, 22, 28, 22, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 7, 6, 7, 12, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 36, 33, 0, 4, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0, 12, 17, 13, 10, 15, 9, 26, 29, 26, 42, 45, 39, 30, 33, 27, 21, 24, 18, 3, 7, 0, 3, 6, 1, 7, 10, 4, 2, 5, 0, 7, 9, 6, 8, 10, 7, 0, 0, 0, 36, 38, 35, 10, 15, 11, 0, 2, 0, 4, 9, 5, 0, 5, 1, 0, 5, 3, 0, 5, 1, 0, 0, 0, 34, 39, 35, 32, 37, 31, 44, 47, 39, 26, 30, 22, 35, 36, 29, 17, 21, 13, 15, 18, 12, 22, 25, 19, 24, 27, 24, 50, 52, 49, 7, 9, 8, 67, 69, 69, 38, 40, 40, 45, 47, 47, 40, 43, 42, 60, 62, 62, 44, 46, 45, 28, 30, 29, 61, 64, 63, 75, 78, 75, 43, 46, 38, 37, 39, 29, 23, 27, 16, 40, 41, 34, 29, 30, 23, 12, 13, 4, 30, 31, 24, 111, 111, 107, 99, 101, 98, 93, 94, 91, 89, 92, 89, 72, 74, 71, 79, 81, 80, 86, 88, 89, 79, 81, 80, 95, 97, 99, 95, 97, 99, 101, 103, 104, 115, 117, 118, 117, 119, 119, 23, 26, 22, 30, 33, 27, 14, 17, 11, 9, 12, 4, 24, 27, 22, 14, 17, 11, 0, 0, 0, 86, 88, 87, 72, 74, 73, 61, 64, 63, 73, 75, 74, 65, 67, 66, 47, 49, 51, 46, 48, 50, 79, 81, 82, 77, 82, 82, 78, 82, 85, 99, 103, 106, 82, 86, 87, 93, 97, 101, 105, 109, 112, 107, 111, 115, 113, 117, 122, 110, 114, 117, 104, 108, 109, 58, 61, 57, 0, 0, 0, 64, 65, 60, 125, 123, 119, 120, 119, 115, 109, 110, 105, 111, 110, 103, 12, 16, 8, 0, 0, 0, 0, 0, 0, 44, 47, 41, 12, 15, 12, 11, 14, 9, 18, 21, 16, 16, 19, 15, 1, 2, 0, 3, 7, 0, 12, 12, 10, 10, 11, 8, 5, 5, 3, 13, 13, 11, 5, 5, 1, 26, 26, 24, 49, 50, 45, 78, 81, 75, 0, 0, 0, 0, 0, 0, 7, 8, 3, 0, 0, 0, 0, 0, 0, 76, 77, 68, 126, 127, 118, 69, 70, 63, 0, 0, 0, 0, 0, 0, 30, 33, 27, 87, 90, 84, 90, 94, 88, 106, 107, 102, 100, 101, 96, 105, 106, 101, 101, 102, 99, 97, 97, 95, 101, 102, 99, 102, 102, 102, 104, 104, 104, 105, 105, 105, 112, 112, 112, 114, 112, 113, 118, 115, 118, 121, 119, 122, 126, 123, 127, 131, 128, 129, 113, 113, 113, 109, 107, 105, 204, 202, 198, 55, 56, 49, 0, 0, 0, 87, 89, 81, 113, 114, 107, 17, 19, 11, 23, 26, 18, 35, 37, 34, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 4, 8, 9, 21, 26, 26, 63, 67, 70, 82, 86, 89, 96, 100, 103, 106, 111, 111, 112, 116, 117, 102, 107, 103, 10, 14, 6, 18, 22, 12, 17, 21, 11, 24, 28, 20, 11, 17, 6, 8, 12, 1, 3, 9, 0, 100, 106, 102, 128, 132, 137, 125, 128, 138, 135, 138, 148, 145, 147, 158, 126, 129, 137, 126, 129, 139, 112, 115, 125, 113, 116, 126, 113, 116, 126, 113, 116, 126, 107, 110, 121, 106, 109, 119, 99, 103, 110, 46, 51, 47, 31, 34, 29, 35, 41, 32, 53, 59, 48, 6, 12, 3, 0, 0, 0, 3, 8, 2, 53, 58, 54, 139, 144, 140, 142, 147, 145, 144, 146, 145, 133, 135, 137, 135, 136, 138, 131, 133, 133, 127, 128, 130, 124, 126, 130, 117, 119, 123, 110, 114, 117, 110, 114, 117, 99, 103, 106, 131, 133, 135, 39, 44, 38, 34, 40, 31, 15, 21, 13, 0, 0, 0, 0, 0, 0, 11, 16, 10, 15, 21, 17, 148, 152, 155, 142, 146, 149, 153, 157, 162, 142, 146, 151, 139, 143, 146, 136, 140, 146, 135, 139, 144, 132, 135, 143, 121, 125, 130, 119, 122, 130, 111, 114, 122, 113, 117, 122, 109, 111, 113, 42, 45, 39, 22, 23, 16, 5, 9, 1, 2, 5, 0, 0, 0, 0, 9, 12, 6, 125, 128, 125, 121, 123, 122, 121, 123, 122, 139, 141, 141, 139, 141, 141, 140, 143, 142, 134, 139, 137, 138, 142, 143, 134, 139, 139, 133, 135, 137, 121, 122, 126, 113, 114, 118, 120, 121, 123, 16, 19, 15, 25, 29, 21, 7, 10, 2, 5, 9, 1, 28, 31, 23, 40, 43, 38, 73, 76, 72, 130, 132, 131, 135, 136, 138, 131, 133, 135, 127, 128, 132, 124, 126, 130, 121, 122, 126, 110, 112, 116, 111, 112, 119, 106, 107, 113, 107, 108, 114, 100, 100, 109, 100, 101, 105, 78, 80, 79, 24, 26, 18, 0, 0, 0, 0, 0, 0, 63, 64, 57, 63, 64, 55, 47, 48, 41, 137, 138, 133, 130, 133, 129, 131, 134, 131, 127, 129, 126, 125, 128, 127, 115, 117, 116, 151, 153, 153, 108, 110, 111, 107, 109, 110, 103, 105, 107, 102, 104, 106, 102, 104, 108, 102, 102, 106, 36, 41, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 17, 11, 22, 24, 21, 31, 33, 33, 124, 126, 128, 125, 124, 127, 124, 126, 128, 127, 128, 130, 122, 124, 125, 119, 123, 124, 115, 116, 120, 111, 115, 118, 175, 179, 180, 132, 136, 139, 103, 107, 110, 110, 114, 117, 104, 108, 113, 96, 99, 105, 99, 103, 108, 97, 100, 108, 98, 102, 107, 100, 101, 105, 13, 13, 11, 0, 0, 0, 33, 31, 27, 117, 115, 111, 120, 119, 113, 119, 118, 112, 118, 117, 110, 79, 80, 73, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 8, 0, 0, 0, 79, 81, 78, 74, 77, 72, 81, 84, 79, 65, 68, 62, 62, 65, 60, 72, 73, 68, 64, 64, 62, 66, 67, 62, 62, 60, 58, 84, 83, 79, 75, 73, 69, 117, 115, 111, 88, 86, 82, 102, 100, 96, 98, 99, 92, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 93, 94, 87, 87, 89, 81, 102, 103, 98, 102, 104, 96, 104, 104, 100, 102, 103, 98, 153, 153, 149, 113, 113, 111, 100, 100, 98, 105, 105, 103, 111, 111, 109, 110, 108, 106, 116, 114, 112, 112, 110, 111, 128, 126, 127, 134, 132, 133, 125, 123, 123, 121, 119, 120, 142, 142, 142, 118, 116, 114, 116, 114, 112, 31, 32, 27, 0, 0, 0, 3, 7, 0, 5, 9, 1, 17, 21, 13, 54, 57, 52, 95, 99, 100, 102, 105, 111, 103, 106, 112, 96, 99, 105, 97, 101, 106, 98, 102, 107, 89, 92, 98, 90, 94, 97, 95, 99, 102, 95, 99, 102, 93, 97, 101, 97, 101, 106, 92, 96, 101, 90, 94, 97, 88, 91, 97, 93, 97, 103, 99, 104, 102, 14, 19, 15, 64, 67, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 33, 25, 54, 60, 51, 62, 67, 63, 119, 123, 128, 127, 130, 140, 126, 128, 141, 121, 121, 134, 116, 116, 129, 116, 118, 131, 105, 108, 120, 102, 104, 117, 99, 102, 114, 98, 100, 113, 97, 99, 112, 96, 98, 111, 93, 96, 107, 131, 135, 138, 1, 3, 0, 0, 0, 0, 0, 0, 0, 50, 53, 45, 33, 37, 29, 34, 40, 31, 22, 24, 21, 149, 151, 150, 143, 145, 144, 138, 140, 142, 133, 135, 137, 129, 131, 132, 122, 123, 127, 117, 119, 123, 110, 111, 118, 114, 113, 118, 115, 116, 122, 107, 107, 116, 102, 103, 110, 104, 106, 112, 0, 0, 0, 75, 81, 72, 0, 0, 0, 53, 58, 52, 48, 54, 45, 63, 69, 62, 32, 37, 33, 121, 125, 128, 143, 147, 153, 145, 148, 154, 142, 146, 151, 141, 145, 150, 136, 140, 146, 133, 137, 142, 125, 128, 136, 119, 122, 130, 118, 121, 129, 113, 116, 124, 104, 107, 115, 121, 125, 132, 26, 29, 28, 0, 0, 0, 87, 90, 84, 93, 96, 88, 37, 40, 32, 60, 63, 58, 83, 83, 81, 106, 108, 107, 120, 122, 121, 125, 128, 127, 130, 130, 130, 132, 134, 136, 130, 132, 133, 130, 132, 133, 127, 128, 132, 121, 122, 126, 107, 108, 112, 109, 110, 117, 106, 107, 111, 43, 45, 46, 57, 60, 52, 46, 50, 42, 65, 69, 58, 80, 83, 77, 44, 47, 39, 32, 35, 32, 138, 140, 142, 135, 136, 140, 135, 136, 140, 133, 135, 139, 130, 131, 135, 127, 128, 132, 255, 255, 255, 123, 124, 130, 110, 111, 118, 102, 103, 112, 102, 102, 114, 97, 98, 105, 115, 116, 120, 81, 83, 73, 0, 0, 0, 38, 40, 30, 80, 82, 72, 70, 71, 64, 22, 23, 16, 119, 120, 115, 132, 135, 132, 133, 136, 133, 130, 133, 129, 123, 126, 123, 117, 119, 119, 174, 177, 176, 179, 181, 180, 115, 117, 118, 107, 109, 110, 106, 107, 109, 99, 100, 102, 99, 100, 104, 71, 73, 74, 35, 35, 33, 47, 47, 43, 19, 19, 15, 40, 40, 36, 24, 25, 22, 3, 5, 5, 129, 131, 132, 125, 124, 128, 125, 127, 131, 123, 125, 127, 122, 123, 127, 121, 122, 126, 120, 121, 123, 111, 113, 117, 123, 125, 125, 152, 154, 156, 110, 114, 115, 106, 107, 111, 105, 109, 114, 104, 108, 113, 99, 103, 108, 99, 103, 108, 96, 99, 105, 96, 99, 105, 30, 32, 33, 37, 37, 37, 0, 0, 0, 102, 100, 96, 124, 123, 116, 125, 122, 116, 117, 114, 108, 101, 98, 90, 0, 0, 0, 0, 0, 0, 14, 17, 11, 0, 3, 0, 0, 0, 0, 0, 0, 0, 12, 15, 12, 76, 80, 74, 87, 88, 83, 64, 65, 58, 56, 57, 52, 69, 69, 67, 68, 71, 68, 62, 65, 62, 79, 80, 77, 59, 60, 57, 123, 121, 117, 95, 93, 87, 89, 86, 78, 114, 111, 103, 145, 142, 132, 127, 124, 113, 135, 135, 126, 106, 107, 100, 0, 0, 0, 0, 0, 0, 49, 50, 43, 98, 99, 92, 99, 100, 93, 105, 106, 99, 108, 109, 102, 109, 110, 105, 105, 106, 101, 108, 109, 104, 127, 128, 123, 105, 106, 101, 121, 122, 117, 102, 103, 98, 116, 117, 114, 118, 118, 116, 120, 120, 118, 115, 115, 115, 120, 118, 119, 121, 119, 120, 117, 114, 115, 117, 114, 115, 116, 114, 112, 118, 116, 114, 74, 71, 65, 0, 0, 0, 60, 62, 54, 78, 81, 75, 53, 57, 49, 52, 55, 49, 96, 100, 101, 105, 108, 116, 97, 101, 106, 97, 101, 106, 95, 98, 104, 95, 98, 104, 91, 95, 100, 91, 95, 100, 91, 95, 100, 92, 96, 99, 91, 95, 98, 93, 97, 103, 91, 95, 98, 92, 96, 99, 97, 99, 103, 88, 92, 95, 92, 96, 99, 0, 0, 0, 13, 19, 12, 38, 41, 34, 0, 0, 0, 55, 59, 51, 14, 17, 9, 32, 36, 28, 44, 47, 43, 125, 127, 133, 128, 128, 139, 128, 128, 139, 121, 121, 132, 115, 115, 126, 115, 115, 126, 108, 108, 121, 103, 103, 117, 100, 100, 113, 98, 100, 113, 95, 97, 108, 95, 97, 108, 93, 96, 109, 107, 111, 119, 0, 0, 0, 47, 51, 43, 0, 0, 0, 24, 28, 20, 18, 22, 14, 23, 26, 18, 15, 18, 12, 120, 122, 119, 143, 145, 144, 138, 138, 140, 136, 138, 139, 127, 128, 130, 120, 119, 124, 118, 117, 122, 115, 114, 119, 109, 110, 117, 106, 107, 113, 103, 104, 111, 101, 102, 108, 95, 96, 103, 122, 124, 125, 17, 21, 13, 0, 0, 0, 8, 14, 8, 43, 49, 41, 54, 60, 51, 10, 15, 11, 84, 89, 89, 140, 144, 149, 132, 135, 141, 145, 149, 152, 140, 144, 147, 144, 145, 151, 125, 128, 134, 120, 124, 129, 116, 119, 125, 113, 117, 122, 110, 113, 119, 109, 112, 120, 103, 104, 113, 7, 12, 12, 0, 0, 0, 38, 41, 36, 7, 10, 4, 36, 39, 33, 23, 26, 20, 45, 48, 45, 117, 119, 119, 118, 118, 118, 127, 127, 127, 129, 131, 130, 133, 136, 135, 132, 135, 134, 120, 121, 123, 125, 127, 131, 117, 119, 123, 114, 115, 119, 108, 109, 113, 107, 108, 112, 111, 113, 115, 123, 126, 121, 0, 0, 0, 10, 13, 8, 29, 32, 26, 50, 52, 49, 18, 21, 20, 125, 127, 129, 135, 136, 140, 136, 137, 141, 132, 134, 138, 130, 131, 135, 127, 128, 132, 237, 239, 243, 121, 122, 126, 107, 108, 114, 104, 106, 112, 104, 105, 114, 99, 99, 108, 94, 95, 101, 13, 14, 9, 12, 13, 4, 10, 12, 4, 63, 64, 55, 51, 52, 45, 21, 22, 15, 87, 88, 83, 138, 139, 134, 133, 136, 133, 127, 129, 126, 130, 132, 131, 122, 124, 123, 115, 117, 116, 223, 224, 223, 111, 113, 115, 107, 108, 112, 101, 102, 106, 101, 100, 105, 96, 98, 102, 103, 105, 107, 31, 34, 29, 0, 0, 0, 19, 19, 15, 15, 16, 11, 14, 14, 12, 0, 2, 1, 99, 100, 102, 129, 128, 133, 124, 126, 130, 123, 125, 128, 123, 125, 128, 120, 121, 125, 117, 119, 123, 120, 121, 125, 96, 98, 100, 209, 211, 215, 100, 104, 108, 105, 109, 112, 100, 104, 110, 100, 104, 110, 99, 103, 108, 99, 100, 106, 96, 99, 105, 88, 91, 97, 55, 57, 59, 28, 28, 28, 0, 0, 0, 103, 99, 96, 132, 129, 123, 125, 122, 116, 118, 115, 109, 123, 120, 112, 0, 0, 0, 0, 0, 0, 14, 20, 13, 0, 3, 0, 1, 7, 1, 22, 24, 21, 7, 9, 6, 109, 112, 107, 76, 77, 70, 50, 52, 42, 52, 54, 46, 57, 59, 56, 58, 59, 56, 76, 76, 74, 77, 77, 75, 92, 90, 89, 94, 92, 86, 81, 80, 71, 95, 94, 83, 128, 129, 113, 156, 154, 136, 166, 164, 149, 155, 156, 147, 0, 0, 0, 0, 1, 0, 0, 0, 0, 98, 99, 92, 92, 93, 86, 91, 92, 85, 112, 113, 106, 106, 107, 100, 107, 108, 101, 142, 143, 136, 114, 115, 108, 106, 107, 100, 112, 113, 108, 106, 107, 102, 102, 103, 98, 133, 131, 129, 121, 119, 118, 124, 122, 120, 115, 116, 113, 119, 117, 118, 119, 117, 118, 124, 121, 122, 111, 109, 109, 104, 104, 104, 110, 108, 106, 52, 50, 46, 0, 0, 0, 90, 90, 86, 79, 80, 73, 65, 68, 60, 44, 47, 41, 91, 96, 94, 95, 99, 102, 100, 104, 110, 96, 99, 105, 95, 98, 104, 93, 97, 101, 91, 95, 98, 95, 97, 101, 82, 84, 88, 103, 105, 109, 95, 97, 101, 93, 94, 98, 99, 100, 106, 93, 94, 98, 92, 93, 97, 91, 95, 98, 93, 97, 101, 0, 0, 0, 40, 45, 41, 11, 16, 10, 0, 0, 0, 0, 0, 0, 32, 35, 30, 0, 0, 0, 0, 0, 0, 118, 119, 126, 129, 129, 140, 128, 128, 139, 120, 120, 131, 118, 116, 128, 211, 212, 223, 107, 107, 118, 107, 107, 118, 208, 208, 219, 104, 105, 116, 96, 96, 108, 96, 96, 108, 98, 101, 111, 91, 94, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 9, 12, 6, 0, 0, 0, 0, 2, 0, 110, 114, 113, 143, 145, 144, 139, 141, 143, 130, 132, 133, 122, 124, 125, 118, 120, 122, 101, 100, 105, 220, 221, 223, 121, 122, 126, 106, 107, 113, 104, 106, 112, 102, 103, 112, 96, 97, 104, 128, 129, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 1, 6, 5, 7, 12, 10, 143, 147, 151, 140, 144, 147, 143, 147, 153, 146, 150, 155, 129, 133, 137, 255, 255, 255, 201, 205, 208, 255, 255, 255, 120, 124, 129, 109, 112, 118, 109, 112, 120, 104, 108, 113, 93, 98, 99, 0, 0, 0, 8, 11, 5, 0, 0, 0, 25, 28, 23, 18, 21, 18, 23, 26, 22, 131, 133, 133, 120, 120, 120, 125, 125, 125, 129, 131, 130, 142, 144, 143, 255, 255, 255, 255, 255, 255, 237, 237, 237, 122, 121, 126, 116, 115, 122, 109, 111, 115, 107, 108, 114, 115, 117, 118, 1, 4, 0, 0, 0, 0, 0, 0, 0, 12, 15, 12, 38, 41, 38, 16, 18, 17, 80, 82, 83, 131, 133, 137, 131, 133, 137, 133, 135, 139, 131, 133, 137, 132, 134, 138, 123, 125, 128, 223, 225, 229, 113, 114, 120, 107, 108, 114, 101, 101, 112, 100, 101, 107, 97, 98, 105, 28, 28, 28, 24, 26, 18, 5, 6, 0, 30, 31, 24, 25, 29, 21, 36, 37, 32, 67, 68, 64, 132, 135, 132, 135, 137, 134, 128, 130, 127, 127, 129, 126, 116, 118, 118, 186, 189, 185, 107, 109, 106, 106, 108, 107, 108, 109, 113, 101, 102, 106, 100, 101, 105, 100, 101, 105, 108, 109, 113, 8, 10, 7, 3, 6, 3, 0, 0, 0, 0, 0, 0, 15, 16, 13, 0, 0, 0, 100, 100, 102, 130, 130, 134, 123, 123, 127, 123, 125, 128, 122, 123, 127, 115, 116, 120, 117, 119, 123, 117, 119, 123, 97, 99, 101, 222, 224, 225, 113, 114, 116, 108, 109, 113, 104, 108, 113, 104, 108, 113, 106, 107, 113, 98, 102, 107, 93, 97, 103, 92, 96, 101, 102, 106, 107, 24, 26, 26, 0, 0, 0, 64, 63, 59, 122, 118, 113, 122, 118, 113, 122, 117, 110, 114, 111, 103, 14, 13, 7, 0, 0, 0, 11, 16, 10, 8, 14, 8, 15, 17, 14, 20, 20, 18, 16, 17, 14, 121, 124, 118, 72, 73, 66, 58, 60, 48, 49, 50, 41, 65, 66, 61, 64, 65, 60, 80, 81, 78, 58, 59, 56, 124, 123, 116, 81, 80, 71, 99, 96, 85, 116, 114, 99, 158, 157, 137, 174, 173, 155, 142, 140, 122, 19, 23, 13, 8, 9, 2, 0, 0, 0, 79, 83, 72, 94, 96, 86, 123, 125, 117, 102, 104, 94, 119, 118, 110, 114, 114, 105, 100, 102, 92, 127, 126, 118, 120, 119, 113, 114, 115, 108, 111, 111, 107, 117, 115, 111, 104, 102, 98, 112, 110, 109, 134, 132, 131, 124, 122, 120, 111, 109, 107, 124, 121, 122, 120, 118, 119, 126, 124, 125, 122, 122, 122, 111, 111, 109, 100, 100, 98, 70, 71, 64, 0, 0, 0, 81, 84, 79, 59, 62, 54, 60, 66, 57, 29, 35, 29, 91, 96, 92, 92, 96, 101, 85, 89, 94, 91, 95, 100, 93, 97, 103, 90, 94, 99, 89, 90, 97, 255, 255, 255, 96, 98, 102, 153, 155, 159, 132, 134, 138, 93, 94, 98, 93, 97, 101, 90, 92, 96, 89, 91, 95, 85, 86, 90, 89, 91, 95, 0, 0, 0, 0, 0, 0, 12, 18, 11, 0, 0, 0, 0, 0, 0, 12, 16, 10, 3, 6, 3, 0, 0, 0, 111, 112, 119, 123, 123, 134, 125, 122, 136, 121, 121, 134, 111, 112, 123, 113, 113, 124, 177, 177, 188, 117, 118, 127, 255, 255, 255, 105, 108, 118, 90, 91, 102, 88, 88, 102, 94, 94, 105, 94, 94, 105, 0, 0, 5, 0, 0, 0, 4, 7, 2, 0, 0, 0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 82, 85, 84, 142, 144, 143, 137, 139, 140, 130, 132, 133, 122, 124, 125, 113, 114, 116, 112, 111, 114, 255, 255, 255, 128, 127, 132, 103, 104, 111, 113, 112, 119, 100, 100, 109, 99, 100, 106, 101, 102, 108, 0, 0, 0, 59, 64, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 123, 127, 130, 138, 142, 145, 140, 142, 148, 145, 149, 152, 125, 127, 131, 251, 253, 255, 124, 126, 130, 145, 147, 151, 191, 192, 196, 125, 127, 133, 110, 111, 120, 109, 110, 119, 122, 123, 127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 16, 13, 28, 30, 27, 91, 91, 91, 113, 113, 113, 121, 121, 121, 129, 129, 129, 136, 136, 136, 125, 125, 125, 255, 255, 255, 255, 255, 255, 122, 122, 124, 116, 116, 120, 111, 113, 117, 104, 106, 110, 103, 105, 107, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 4, 7, 6, 8, 10, 9, 142, 143, 145, 133, 135, 137, 136, 138, 139, 135, 136, 140, 132, 134, 138, 122, 123, 127, 255, 255, 255, 122, 123, 129, 109, 110, 117, 107, 107, 116, 94, 95, 101, 95, 96, 103, 89, 92, 91, 7, 9, 6, 0, 0, 0, 0, 3, 0, 5, 9, 3, 5, 9, 3, 5, 8, 5, 123, 126, 123, 131, 134, 131, 131, 134, 131, 128, 130, 127, 122, 124, 123, 152, 155, 152, 255, 255, 255, 113, 115, 114, 109, 111, 115, 103, 105, 109, 101, 100, 105, 96, 98, 102, 102, 104, 108, 9, 12, 8, 12, 15, 12, 0, 0, 0, 0, 0, 0, 23, 26, 22, 2, 4, 3, 74, 76, 78, 130, 131, 135, 128, 128, 130, 126, 125, 130, 124, 126, 130, 120, 121, 125, 120, 121, 123, 122, 124, 125, 206, 208, 209, 148, 149, 151, 108, 110, 111, 113, 114, 116, 98, 102, 105, 103, 106, 112, 99, 100, 106, 93, 97, 103, 99, 100, 106, 95, 97, 101, 106, 108, 107, 32, 35, 32, 4, 7, 2, 22, 23, 16, 113, 112, 106, 123, 119, 114, 123, 119, 114, 123, 120, 112, 99, 95, 87, 0, 0, 0, 8, 14, 8, 3, 8, 4, 10, 15, 11, 19, 22, 19, 18, 23, 19, 82, 85, 80, 75, 78, 73, 52, 54, 42, 62, 63, 54, 58, 59, 52, 56, 57, 52, 63, 64, 57, 149, 150, 145, 81, 80, 71, 87, 86, 75, 101, 98, 86, 135, 133, 117, 159, 158, 138, 161, 157, 140, 144, 142, 127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 111, 112, 105, 115, 116, 109, 97, 98, 89, 108, 107, 98, 127, 128, 119, 114, 116, 106, 104, 105, 96, 112, 113, 106, 118, 119, 112, 107, 108, 101, 120, 119, 113, 113, 112, 108, 104, 102, 98, 108, 105, 104, 134, 132, 131, 113, 111, 110, 110, 108, 106, 117, 114, 115, 120, 118, 117, 118, 116, 114, 117, 115, 113, 109, 110, 107, 112, 112, 110, 62, 62, 58, 0, 0, 0, 93, 96, 90, 61, 65, 57, 79, 82, 74, 40, 43, 38, 106, 111, 107, 96, 100, 103, 93, 97, 103, 93, 97, 103, 90, 94, 99, 94, 95, 101, 90, 92, 96, 87, 88, 92, 158, 159, 163, 255, 255, 255, 94, 95, 99, 86, 87, 91, 89, 91, 95, 89, 91, 95, 88, 90, 94, 88, 90, 94, 89, 91, 95, 19, 21, 23, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 99, 100, 104, 124, 125, 134, 120, 118, 130, 120, 120, 133, 121, 119, 131, 104, 102, 112, 255, 255, 255, 202, 203, 212, 255, 255, 255, 109, 110, 119, 100, 100, 109, 94, 94, 105, 94, 94, 105, 94, 95, 103, 96, 97, 104, 0, 0, 0, 24, 27, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 1, 13, 18, 16, 129, 131, 130, 136, 138, 137, 133, 135, 137, 121, 123, 124, 108, 110, 111, 114, 114, 116, 189, 191, 193, 107, 108, 112, 110, 111, 118, 105, 104, 111, 106, 105, 112, 101, 102, 110, 102, 103, 110, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 0, 0, 0, 86, 91, 92, 134, 138, 141, 136, 137, 141, 138, 142, 145, 146, 148, 152, 127, 128, 132, 204, 208, 211, 168, 170, 174, 122, 123, 127, 118, 120, 124, 113, 113, 122, 113, 114, 120, 106, 107, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 1, 12, 15, 12, 30, 33, 29, 129, 129, 129, 122, 122, 122, 129, 129, 129, 134, 134, 134, 125, 125, 125, 253, 253, 253, 148, 148, 148, 127, 127, 129, 113, 112, 115, 116, 116, 120, 108, 109, 113, 97, 99, 103, 71, 73, 72, 29, 31, 28, 0, 0, 0, 0, 1, 0, 0, 1, 0, 11, 14, 13, 12, 17, 15, 136, 138, 139, 125, 127, 129, 135, 136, 138, 135, 136, 138, 132, 134, 136, 124, 126, 128, 248, 250, 249, 123, 125, 128, 111, 113, 117, 106, 107, 113, 97, 98, 105, 90, 92, 98, 120, 121, 123, 23, 25, 24, 0, 0, 0, 0, 0, 0, 3, 6, 1, 65, 68, 62, 0, 0, 0, 131, 134, 131, 132, 135, 132, 129, 131, 128, 130, 133, 129, 118, 121, 120, 120, 122, 119, 230, 231, 226, 111, 114, 111, 103, 105, 107, 102, 104, 108, 100, 101, 105, 92, 93, 97, 97, 99, 103, 0, 2, 1, 14, 16, 13, 0, 0, 0, 5, 8, 5, 29, 29, 29, 0, 0, 0, 53, 55, 57, 121, 123, 124, 122, 124, 125, 125, 127, 129, 127, 128, 130, 115, 117, 118, 121, 123, 124, 115, 117, 118, 111, 114, 113, 255, 255, 255, 138, 140, 140, 99, 100, 102, 97, 101, 104, 99, 103, 108, 96, 99, 105, 93, 97, 101, 93, 97, 103, 85, 89, 92, 93, 95, 94, 25, 30, 26, 23, 26, 18, 0, 0, 0, 113, 112, 104, 121, 118, 110, 124, 121, 113, 118, 115, 107, 110, 107, 99, 0, 0, 0, 31, 36, 30, 0, 3, 0, 0, 1, 0, 0, 0, 0, 5, 10, 6, 29, 35, 29, 101, 104, 96, 53, 57, 45, 52, 54, 44, 62, 63, 54, 90, 90, 86, 60, 59, 50, 90, 90, 78, 97, 96, 85, 117, 115, 102, 113, 110, 95, 149, 144, 130, 161, 160, 140, 165, 163, 146, 33, 35, 22, 0, 0, 0, 0, 0, 0, 111, 112, 105, 70, 71, 64, 87, 89, 79, 129, 131, 121, 101, 103, 93, 120, 121, 112, 112, 111, 103, 116, 114, 108, 118, 117, 108, 113, 112, 106, 128, 127, 121, 110, 109, 102, 116, 114, 110, 111, 109, 105, 103, 101, 99, 174, 172, 170, 98, 96, 95, 108, 105, 106, 104, 102, 102, 105, 103, 102, 104, 102, 100, 110, 108, 106, 101, 102, 99, 104, 104, 100, 62, 60, 56, 0, 0, 0, 83, 87, 79, 69, 73, 65, 68, 72, 64, 32, 35, 30, 107, 113, 109, 94, 95, 99, 93, 94, 98, 95, 96, 103, 93, 94, 100, 95, 97, 101, 99, 100, 104, 95, 96, 103, 82, 84, 88, 255, 255, 255, 92, 93, 97, 92, 93, 97, 88, 90, 94, 87, 88, 92, 85, 86, 90, 88, 90, 94, 83, 85, 89, 115, 117, 118, 0, 0, 0, 0, 0, 0, 93, 99, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 36, 38, 39, 131, 132, 141, 127, 125, 137, 121, 119, 131, 118, 116, 128, 112, 110, 122, 106, 106, 117, 106, 106, 115, 103, 104, 113, 101, 102, 110, 100, 100, 109, 95, 95, 107, 95, 96, 105, 97, 98, 107, 124, 125, 132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 3, 1, 0, 0, 0, 109, 113, 112, 139, 141, 143, 132, 134, 136, 127, 128, 130, 116, 118, 120, 113, 114, 118, 113, 114, 118, 107, 108, 114, 109, 108, 115, 108, 107, 114, 106, 106, 115, 102, 103, 112, 93, 94, 100, 134, 139, 137, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 6, 5, 22, 27, 28, 136, 141, 142, 133, 135, 139, 136, 140, 144, 143, 144, 148, 144, 145, 149, 133, 137, 140, 121, 125, 128, 109, 111, 115, 115, 116, 120, 111, 113, 117, 109, 110, 117, 109, 111, 113, 107, 109, 108, 21, 26, 22, 16, 19, 15, 0, 0, 0, 1, 7, 3, 8, 10, 7, 11, 14, 11, 111, 114, 113, 115, 117, 116, 119, 119, 119, 129, 129, 129, 131, 131, 131, 137, 137, 137, 136, 136, 136, 128, 128, 128, 121, 121, 123, 118, 117, 120, 109, 109, 113, 103, 105, 109, 120, 122, 121, 11, 14, 13, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 4, 2, 17, 21, 20, 103, 106, 105, 118, 120, 122, 137, 139, 140, 133, 135, 137, 132, 135, 134, 128, 130, 129, 255, 255, 255, 122, 124, 125, 108, 109, 113, 108, 110, 111, 99, 100, 106, 92, 93, 97, 96, 98, 100, 0, 0, 0, 17, 19, 19, 0, 0, 0, 5, 11, 4, 11, 14, 9, 10, 13, 8, 125, 128, 123, 135, 138, 132, 132, 135, 132, 130, 133, 129, 125, 128, 125, 118, 121, 118, 144, 147, 143, 135, 137, 134, 108, 110, 109, 104, 106, 110, 106, 107, 109, 95, 97, 101, 92, 94, 95, 14, 16, 15, 24, 27, 24, 0, 0, 0, 4, 7, 2, 5, 8, 5, 0, 1, 0, 1, 3, 2, 114, 116, 115, 128, 130, 129, 125, 128, 127, 121, 121, 121, 116, 118, 120, 119, 119, 119, 110, 112, 114, 113, 114, 116, 107, 109, 108, 230, 232, 232, 122, 124, 123, 111, 116, 114, 100, 105, 106, 95, 99, 100, 93, 98, 99, 90, 94, 97, 88, 92, 95, 77, 82, 80, 38, 43, 39, 42, 45, 39, 5, 6, 0, 95, 93, 87, 125, 124, 115, 128, 128, 119, 130, 127, 119, 132, 129, 121, 15, 12, 4, 14, 17, 11, 8, 14, 10, 0, 0, 0, 0, 0, 0, 0, 3, 0, 1, 7, 1, 104, 110, 101, 48, 52, 42, 57, 59, 49, 71, 71, 62, 45, 43, 37, 158, 157, 146, 83, 81, 68, 91, 88, 75, 156, 152, 137, 127, 122, 110, 160, 155, 141, 151, 150, 129, 120, 122, 108, 0, 0, 0, 0, 0, 0, 13, 14, 7, 81, 83, 75, 131, 133, 126, 95, 97, 87, 100, 102, 92, 112, 113, 104, 112, 113, 104, 93, 95, 85, 106, 105, 97, 120, 121, 112, 149, 148, 142, 102, 100, 96, 116, 114, 110, 108, 105, 104, 109, 107, 105, 108, 105, 104, 95, 96, 93, 104, 104, 102, 105, 103, 102, 109, 110, 107, 105, 105, 103, 109, 110, 107, 100, 100, 98, 107, 108, 103, 92, 93, 88, 60, 62, 54, 0, 0, 0, 88, 90, 82, 73, 76, 68, 80, 84, 73, 32, 36, 28, 110, 113, 108, 92, 94, 95, 92, 93, 99, 95, 96, 103, 93, 94, 98, 92, 93, 97, 90, 92, 96, 88, 90, 94, 90, 92, 96, 83, 85, 89, 82, 84, 88, 86, 87, 91, 88, 90, 92, 86, 88, 89, 89, 91, 93, 89, 91, 93, 85, 86, 90, 107, 109, 110, 0, 0, 0, 0, 0, 0, 31, 34, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 7, 8, 128, 129, 135, 129, 128, 137, 126, 124, 136, 120, 118, 130, 115, 113, 125, 111, 112, 123, 110, 111, 122, 102, 102, 114, 101, 102, 110, 95, 96, 105, 93, 92, 101, 92, 93, 99, 90, 92, 98, 108, 109, 113, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 110, 113, 110, 137, 139, 138, 133, 136, 135, 129, 131, 130, 124, 126, 126, 122, 122, 124, 117, 119, 123, 113, 112, 117, 112, 111, 118, 108, 107, 114, 108, 107, 114, 103, 104, 113, 97, 98, 105, 116, 118, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 14, 19, 17, 139, 143, 144, 129, 133, 137, 132, 136, 137, 135, 139, 142, 139, 143, 146, 136, 140, 144, 126, 130, 133, 125, 129, 132, 113, 114, 118, 116, 117, 124, 114, 115, 119, 107, 109, 108, 129, 131, 130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 9, 5, 16, 19, 15, 52, 55, 51, 107, 109, 106, 116, 117, 114, 126, 126, 126, 133, 133, 133, 139, 137, 135, 137, 134, 133, 128, 128, 128, 120, 120, 120, 122, 122, 124, 113, 113, 113, 108, 110, 111, 106, 108, 107, 9, 12, 8, 1, 3, 0, 0, 0, 0, 0, 3, 0, 4, 9, 7, 4, 9, 7, 41, 46, 44, 126, 131, 129, 129, 131, 128, 132, 135, 132, 131, 134, 131, 129, 135, 129, 99, 102, 96, 102, 104, 104, 105, 110, 108, 98, 102, 103, 98, 103, 101, 89, 92, 91, 90, 92, 94, 16, 18, 17, 25, 28, 25, 0, 0, 0, 10, 13, 8, 22, 25, 19, 15, 18, 12, 88, 91, 86, 143, 146, 140, 138, 141, 136, 128, 130, 127, 120, 122, 119, 114, 116, 113, 114, 117, 111, 114, 116, 113, 109, 111, 111, 101, 103, 102, 104, 106, 108, 102, 104, 104, 95, 97, 99, 54, 57, 54, 7, 9, 6, 0, 0, 0, 0, 0, 0, 8, 10, 7, 12, 15, 12, 0, 1, 0, 117, 120, 117, 126, 126, 124, 124, 126, 126, 125, 125, 125, 117, 119, 119, 118, 120, 122, 121, 123, 122, 114, 116, 115, 111, 111, 111, 111, 114, 113, 101, 103, 102, 106, 111, 109, 98, 103, 101, 98, 103, 101, 96, 100, 101, 91, 96, 96, 94, 96, 97, 73, 75, 74, 131, 135, 134, 29, 32, 26, 0, 0, 0, 76, 77, 70, 118, 117, 108, 128, 128, 119, 131, 130, 121, 141, 137, 129, 47, 46, 37, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 118, 124, 113, 56, 62, 53, 57, 58, 51, 63, 62, 56, 81, 78, 70, 109, 106, 96, 89, 84, 72, 104, 102, 89, 130, 126, 110, 150, 146, 129, 155, 150, 136, 161, 160, 140, 6, 8, 0, 0, 0, 0, 0, 0, 0, 99, 100, 91, 92, 93, 86, 95, 94, 85, 91, 92, 83, 111, 112, 103, 100, 100, 89, 117, 116, 107, 110, 109, 98, 99, 100, 91, 111, 112, 105, 117, 116, 109, 91, 90, 86, 112, 111, 107, 104, 104, 102, 108, 105, 104, 98, 98, 96, 100, 100, 98, 99, 97, 96, 95, 96, 93, 105, 105, 103, 100, 100, 98, 113, 113, 111, 105, 105, 103, 114, 115, 110, 95, 96, 91, 76, 77, 70, 0, 0, 0, 87, 89, 81, 83, 87, 79, 75, 79, 71, 33, 37, 29, 114, 117, 111, 87, 88, 92, 92, 93, 99, 90, 92, 98, 90, 92, 96, 89, 91, 95, 90, 92, 98, 89, 91, 95, 88, 90, 94, 86, 87, 91, 91, 90, 95, 85, 86, 90, 87, 89, 90, 83, 85, 87, 89, 91, 93, 75, 77, 79, 80, 82, 83, 95, 97, 99, 0, 0, 0, 0, 0, 0, 5, 10, 6, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 4, 0, 121, 123, 124, 130, 130, 134, 119, 118, 125, 121, 120, 129, 119, 117, 127, 111, 112, 121, 113, 113, 122, 102, 102, 114, 103, 104, 113, 99, 100, 106, 97, 98, 105, 96, 98, 100, 89, 91, 93, 86, 88, 87, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 95, 98, 93, 138, 141, 138, 137, 140, 134, 131, 134, 131, 128, 130, 127, 118, 120, 122, 117, 119, 121, 115, 116, 120, 106, 107, 111, 100, 101, 107, 107, 108, 114, 102, 104, 108, 96, 98, 102, 100, 102, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 6, 11, 7, 0, 5, 1, 52, 57, 53, 66, 70, 69, 114, 121, 119, 139, 146, 146, 142, 147, 147, 145, 149, 150, 126, 132, 132, 121, 128, 128, 117, 124, 124, 111, 116, 114, 112, 117, 115, 100, 106, 102, 171, 179, 174, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 12, 8, 3, 8, 4, 16, 19, 15, 125, 128, 125, 107, 109, 106, 128, 128, 126, 127, 127, 125, 126, 126, 124, 133, 133, 131, 137, 138, 135, 118, 118, 116, 115, 116, 113, 113, 113, 111, 108, 109, 106, 100, 102, 99, 0, 0, 0, 2, 5, 1, 0, 0, 0, 0, 0, 0, 1, 7, 3, 1, 7, 3, 158, 163, 157, 85, 91, 84, 113, 119, 112, 171, 177, 171, 178, 181, 175, 206, 209, 201, 167, 171, 163, 155, 158, 152, 139, 144, 138, 142, 144, 141, 142, 147, 143, 72, 78, 74, 21, 26, 24, 37, 40, 36, 21, 26, 22, 0, 5, 0, 10, 15, 9, 67, 72, 66, 1, 7, 1, 58, 61, 55, 22, 28, 22, 26, 32, 25, 51, 54, 50, 90, 93, 90, 115, 118, 112, 92, 98, 91, 85, 90, 86, 76, 81, 77, 49, 54, 52, 28, 33, 29, 12, 17, 13, 33, 36, 33, 54, 57, 54, 8, 11, 5, 0, 0, 0, 0, 4, 0, 3, 6, 1, 5, 9, 3, 37, 40, 36, 0, 0, 0, 32, 35, 32, 51, 54, 50, 66, 69, 65, 104, 107, 104, 102, 107, 101, 102, 105, 102, 99, 101, 98, 110, 113, 108, 103, 108, 104, 100, 106, 102, 95, 100, 96, 69, 74, 72, 41, 46, 44, 25, 29, 30, 32, 39, 39, 35, 40, 38, 29, 34, 33, 42, 47, 45, 10, 13, 10, 0, 0, 0, 48, 49, 42, 131, 129, 123, 131, 130, 121, 134, 130, 122, 129, 126, 118, 84, 83, 75, 0, 0, 0, 12, 15, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 158, 164, 151, 53, 62, 52, 94, 96, 86, 63, 59, 51, 184, 179, 167, 86, 81, 71, 101, 96, 84, 121, 117, 102, 154, 150, 131, 159, 154, 138, 156, 152, 135, 60, 60, 44, 0, 0, 0, 0, 0, 0, 83, 87, 79, 87, 89, 81, 103, 102, 93, 103, 101, 95, 85, 85, 76, 100, 100, 91, 113, 112, 104, 112, 111, 103, 95, 94, 85, 139, 138, 129, 108, 107, 98, 131, 129, 123, 118, 116, 112, 109, 107, 103, 98, 96, 93, 105, 104, 100, 125, 123, 121, 99, 99, 97, 99, 99, 97, 109, 107, 105, 102, 103, 100, 104, 104, 102, 104, 104, 100, 106, 107, 102, 104, 104, 100, 94, 95, 90, 71, 72, 67, 0, 0, 0, 90, 91, 84, 72, 75, 67, 115, 119, 108, 39, 43, 35, 106, 109, 101, 80, 82, 83, 94, 96, 97, 97, 99, 103, 87, 88, 92, 92, 91, 96, 91, 90, 95, 88, 90, 94, 93, 93, 95, 90, 89, 92, 93, 93, 95, 88, 90, 92, 90, 90, 90, 89, 92, 91, 76, 79, 78, 76, 76, 76, 51, 53, 52, 135, 137, 136, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 57, 63, 54, 0, 4, 0, 44, 48, 37, 143, 147, 134, 189, 194, 179, 210, 215, 202, 207, 211, 198, 182, 186, 178, 166, 169, 164, 195, 199, 191, 185, 186, 179, 170, 173, 165, 177, 180, 170, 68, 72, 64, 24, 26, 18, 16, 19, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 7, 1, 13, 19, 10, 59, 63, 52, 32, 35, 30, 35, 38, 32, 20, 26, 19, 26, 32, 25, 11, 14, 9, 9, 12, 6, 21, 24, 18, 18, 21, 16, 4, 7, 2, 0, 5, 1, 14, 19, 15, 29, 35, 31, 42, 47, 43, 21, 26, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 181, 186, 182, 31, 36, 32, 50, 57, 55, 42, 52, 49, 34, 44, 41, 33, 43, 40, 33, 43, 40, 44, 53, 52, 35, 42, 41, 39, 48, 47, 34, 41, 38, 38, 45, 43, 45, 52, 52, 37, 44, 42, 27, 34, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 19, 15, 5, 11, 4, 93, 99, 93, 49, 55, 48, 31, 36, 30, 39, 42, 39, 35, 38, 32, 40, 45, 39, 30, 33, 25, 35, 38, 32, 34, 40, 31, 33, 37, 29, 43, 46, 40, 39, 42, 37, 40, 43, 40, 36, 38, 35, 15, 17, 14, 0, 0, 0, 0, 0, 0, 13, 18, 14, 53, 58, 54, 26, 32, 25, 42, 48, 41, 54, 59, 53, 46, 51, 45, 46, 51, 45, 41, 47, 40, 57, 63, 56, 39, 44, 40, 48, 53, 49, 53, 58, 54, 48, 53, 49, 55, 60, 56, 49, 54, 52, 47, 52, 50, 19, 24, 20, 0, 0, 0, 15, 21, 15, 17, 23, 14, 100, 106, 100, 46, 51, 45, 50, 56, 49, 54, 59, 53, 48, 54, 47, 50, 56, 51, 32, 40, 33, 42, 48, 41, 42, 49, 45, 39, 44, 40, 47, 52, 48, 35, 40, 36, 38, 43, 39, 36, 42, 38, 38, 43, 39, 32, 37, 33, 0, 0, 0, 18, 21, 18, 12, 15, 12, 10, 13, 10, 25, 28, 23, 22, 28, 24, 30, 33, 29, 24, 27, 24, 25, 30, 28, 26, 31, 29, 31, 35, 34, 19, 24, 22, 33, 38, 36, 37, 39, 38, 28, 33, 31, 29, 36, 34, 31, 35, 34, 28, 33, 31, 25, 33, 28, 32, 36, 37, 36, 43, 41, 38, 42, 41, 36, 42, 38, 32, 40, 37, 68, 71, 68, 25, 28, 25, 0, 0, 0, 109, 107, 101, 132, 131, 122, 141, 140, 132, 138, 135, 127, 121, 120, 114, 0, 0, 0, 11, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, 171, 164, 42, 50, 43, 60, 66, 57, 150, 154, 143, 159, 156, 148, 83, 78, 66, 99, 94, 84, 118, 113, 101, 140, 135, 121, 147, 143, 124, 159, 154, 140, 153, 148, 134, 8, 10, 0, 0, 0, 0, 0, 0, 0, 79, 80, 73, 92, 94, 84, 98, 99, 90, 120, 119, 111, 105, 105, 94, 121, 121, 110, 120, 117, 106, 117, 114, 104, 89, 83, 74, 141, 140, 132, 113, 109, 101, 108, 106, 100, 98, 97, 91, 108, 106, 102, 103, 101, 97, 98, 96, 93, 103, 101, 99, 93, 94, 91, 111, 109, 107, 105, 103, 102, 102, 103, 100, 101, 102, 97, 101, 102, 97, 104, 104, 100, 104, 105, 98, 94, 95, 90, 70, 71, 66, 0, 0, 0, 87, 89, 81, 66, 70, 60, 76, 77, 68, 22, 26, 15, 123, 127, 117, 57, 57, 55, 74, 75, 74, 72, 72, 74, 84, 83, 86, 73, 73, 75, 77, 77, 77, 80, 80, 80, 85, 87, 86, 72, 72, 72, 64, 64, 62, 63, 63, 61, 74, 77, 74, 67, 70, 65, 38, 39, 36, 19, 19, 17, 25, 28, 25, 10, 13, 10, 46, 49, 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 15, 21, 13, 117, 125, 104, 17, 21, 11, 26, 30, 20, 24, 28, 20, 28, 31, 25, 39, 42, 37, 33, 36, 31, 35, 38, 32, 31, 34, 29, 39, 42, 37, 36, 39, 31, 38, 41, 34, 30, 33, 25, 37, 40, 32, 25, 28, 23, 33, 36, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 159, 163, 155, 24, 28, 20, 35, 41, 34, 38, 43, 37, 52, 55, 49, 40, 46, 37, 29, 37, 28, 38, 47, 37, 45, 50, 44, 35, 42, 38, 27, 34, 29, 41, 48, 45, 41, 48, 45, 38, 45, 43, 37, 45, 40, 27, 32, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 75, 68, 17, 22, 16, 37, 45, 38, 39, 47, 44, 28, 36, 29, 41, 52, 44, 31, 42, 31, 60, 71, 60, 26, 36, 31, 40, 53, 44, 32, 42, 36, 38, 46, 41, 42, 49, 45, 41, 51, 48, 31, 40, 37, 29, 36, 34, 0, 4, 0, 0, 0, 0, 0, 0, 0, 13, 19, 12, 132, 137, 131, 46, 51, 45, 36, 42, 36, 38, 43, 37, 33, 37, 29, 14, 21, 9, 18, 24, 15, 95, 99, 89, 35, 38, 30, 36, 42, 36, 35, 41, 32, 57, 63, 54, 33, 36, 31, 45, 50, 46, 32, 37, 33, 21, 26, 22, 0, 0, 0, 0, 0, 0, 21, 27, 20, 76, 82, 75, 56, 62, 55, 48, 54, 47, 48, 54, 47, 52, 57, 51, 55, 61, 54, 36, 42, 36, 38, 43, 37, 34, 40, 33, 61, 66, 62, 31, 36, 32, 42, 47, 43, 45, 50, 46, 38, 43, 39, 40, 45, 41, 41, 46, 42, 0, 0, 0, 42, 48, 41, 125, 130, 124, 36, 42, 34, 50, 58, 51, 49, 55, 48, 51, 59, 52, 43, 49, 43, 56, 64, 56, 43, 49, 43, 45, 50, 46, 24, 29, 23, 22, 28, 24, 20, 26, 19, 21, 27, 20, 25, 30, 24, 18, 23, 17, 26, 29, 26, 51, 54, 48, 0, 1, 0, 18, 21, 16, 12, 16, 10, 103, 106, 101, 18, 21, 16, 16, 19, 15, 10, 13, 10, 14, 16, 15, 14, 19, 17, 12, 15, 12, 20, 25, 23, 21, 26, 24, 4, 9, 7, 27, 32, 30, 10, 14, 13, 8, 15, 13, 0, 4, 2, 15, 20, 19, 10, 17, 15, 14, 21, 21, 8, 13, 12, 15, 20, 19, 37, 40, 36, 42, 42, 40, 45, 45, 45, 35, 33, 32, 0, 0, 0, 110, 109, 102, 125, 124, 115, 135, 135, 126, 144, 141, 133, 142, 141, 133, 0, 0, 0, 5, 9, 3, 0, 3, 0, 0, 0, 0, 162, 167, 163, 24, 32, 27, 59, 68, 65, 61, 67, 58, 54, 58, 50, 95, 97, 87, 114, 114, 105, 95, 92, 84, 121, 118, 105, 143, 141, 126, 159, 155, 135, 157, 155, 138, 24, 24, 10, 0, 0, 0, 0, 0, 0, 138, 142, 134, 84, 85, 76, 93, 95, 85, 113, 114, 105, 109, 108, 99, 98, 98, 86, 125, 122, 114, 103, 102, 91, 85, 85, 74, 115, 112, 102, 139, 138, 127, 105, 104, 96, 103, 101, 95, 109, 107, 101, 123, 121, 115, 94, 92, 88, 99, 98, 94, 97, 95, 93, 100, 100, 98, 96, 94, 90, 94, 95, 90, 109, 110, 105, 102, 100, 96, 108, 109, 104, 102, 100, 96, 102, 104, 96, 88, 90, 82, 71, 72, 67, 0, 0, 0, 89, 93, 85, 74, 78, 68, 90, 91, 82, 68, 72, 64, 0, 1, 0, 10, 12, 4, 20, 21, 16, 1, 2, 0, 7, 8, 3, 19, 19, 15, 17, 18, 13, 7, 8, 1, 12, 13, 6, 20, 21, 14, 10, 12, 4, 16, 17, 12, 27, 28, 21, 38, 39, 34, 31, 32, 27, 45, 46, 41, 28, 31, 25, 37, 40, 34, 29, 32, 26, 53, 56, 51, 7, 10, 4, 0, 0, 0, 0, 0, 0, 0, 2, 0, 152, 158, 149, 48, 52, 42, 53, 57, 47, 111, 116, 103, 255, 255, 255, 18, 22, 10, 229, 233, 220, 58, 62, 47, 61, 66, 51, 52, 57, 41, 75, 80, 65, 51, 56, 40, 90, 95, 80, 67, 71, 59, 22, 26, 15, 36, 42, 34, 40, 46, 37, 48, 52, 44, 0, 0, 0, 0, 0, 0, 3, 9, 0, 124, 130, 119, 37, 41, 30, 51, 55, 42, 36, 43, 29, 81, 87, 74, 221, 225, 212, 242, 246, 231, 26, 31, 16, 181, 186, 171, 142, 146, 131, 48, 54, 36, 192, 196, 181, 222, 227, 211, 18, 21, 16, 40, 45, 39, 20, 25, 21, 24, 27, 24, 25, 30, 26, 59, 64, 60, 0, 0, 0, 52, 57, 53, 25, 33, 26, 40, 45, 39, 48, 53, 49, 37, 45, 38, 212, 221, 206, 229, 239, 220, 0, 2, 0, 0, 4, 0, 65, 75, 56, 0, 0, 0, 205, 214, 197, 124, 133, 121, 198, 206, 198, 6, 13, 8, 0, 6, 3, 8, 13, 12, 32, 37, 33, 45, 48, 45, 11, 14, 9, 136, 139, 133, 61, 65, 57, 44, 47, 39, 39, 43, 35, 25, 32, 18, 214, 218, 205, 175, 177, 162, 16, 19, 4, 21, 25, 12, 7, 11, 0, 181, 186, 171, 122, 126, 113, 36, 40, 29, 12, 16, 10, 35, 37, 34, 25, 30, 26, 32, 37, 33, 0, 0, 0, 0, 0, 0, 140, 146, 139, 29, 35, 29, 57, 63, 56, 71, 77, 70, 67, 72, 66, 64, 70, 64, 55, 61, 54, 40, 45, 39, 17, 22, 16, 41, 46, 42, 27, 32, 28, 10, 14, 13, 7, 12, 10, 3, 7, 6, 22, 27, 28, 4, 9, 7, 29, 34, 33, 6, 11, 7, 103, 108, 104, 86, 92, 88, 39, 44, 40, 21, 26, 22, 34, 39, 35, 40, 45, 41, 42, 48, 41, 49, 55, 48, 47, 50, 45, 22, 25, 19, 12, 15, 12, 15, 17, 14, 25, 28, 25, 14, 19, 15, 2, 4, 3, 6, 11, 7, 12, 17, 15, 12, 17, 15, 0, 2, 0, 4, 7, 4, 33, 36, 33, 2, 5, 1, 0, 0, 0, 4, 9, 7, 16, 18, 17, 9, 11, 12, 4, 8, 9, 9, 11, 10, 6, 11, 9, 6, 11, 9, 0, 2, 0, 12, 16, 17, 18, 20, 22, 9, 11, 10, 11, 16, 14, 28, 33, 31, 12, 17, 15, 7, 12, 12, 15, 20, 19, 7, 12, 10, 17, 19, 19, 23, 23, 23, 47, 47, 45, 50, 50, 48, 0, 0, 0, 70, 71, 64, 125, 124, 115, 148, 147, 139, 138, 137, 128, 127, 126, 118, 21, 22, 15, 11, 14, 9, 0, 0, 0, 155, 161, 154, 36, 42, 38, 63, 70, 65, 55, 62, 59, 48, 54, 45, 51, 54, 46, 58, 59, 52, 74, 73, 64, 115, 112, 104, 144, 142, 127, 144, 142, 127, 157, 156, 135, 156, 154, 138, 0, 0, 0, 0, 0, 0, 33, 37, 29, 88, 92, 82, 109, 111, 103, 104, 105, 96, 94, 96, 86, 120, 121, 112, 125, 125, 113, 103, 102, 91, 103, 102, 91, 118, 117, 106, 99, 98, 90, 151, 148, 140, 103, 100, 92, 111, 110, 103, 97, 96, 87, 94, 92, 86, 100, 99, 93, 128, 127, 123, 100, 98, 97, 101, 102, 99, 102, 100, 96, 95, 93, 89, 103, 101, 97, 127, 126, 122, 103, 101, 97, 110, 108, 104, 105, 104, 98, 93, 94, 87, 72, 73, 66, 0, 0, 0, 91, 92, 85, 79, 82, 74, 83, 84, 77, 96, 100, 90, 21, 23, 13, 24, 26, 18, 19, 23, 15, 15, 16, 11, 93, 95, 85, 64, 66, 56, 31, 33, 21, 108, 111, 96, 47, 49, 34, 91, 93, 79, 122, 125, 108, 149, 151, 137, 194, 197, 182, 6, 7, 2, 24, 25, 22, 33, 33, 31, 19, 22, 19, 26, 29, 26, 16, 19, 15, 7, 10, 4, 17, 20, 15, 0, 0, 0, 0, 0, 0, 81, 85, 77, 29, 32, 24, 30, 32, 22, 60, 62, 50, 163, 167, 154, 243, 247, 225, 100, 104, 82, 237, 243, 220, 94, 99, 79, 214, 220, 197, 222, 228, 205, 64, 69, 49, 143, 148, 130, 61, 66, 51, 208, 212, 201, 121, 124, 116, 12, 16, 10, 16, 19, 13, 18, 21, 16, 28, 31, 25, 0, 0, 0, 34, 40, 31, 20, 26, 15, 46, 50, 38, 52, 56, 43, 47, 53, 42, 49, 56, 42, 65, 69, 54, 40, 45, 32, 19, 26, 12, 9, 13, 0, 166, 170, 158, 29, 33, 22, 157, 160, 152, 177, 178, 173, 14, 15, 10, 4, 7, 4, 15, 17, 16, 14, 16, 15, 28, 30, 27, 90, 95, 93, 0, 0, 0, 142, 147, 145, 10, 15, 11, 56, 62, 55, 54, 60, 51, 42, 48, 39, 18, 24, 13, 31, 40, 28, 2, 11, 1, 0, 0, 0, 100, 107, 96, 0, 4, 0, 1, 7, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 10, 7, 9, 8, 2, 4, 3, 18, 20, 22, 14, 16, 15, 60, 63, 60, 48, 54, 47, 29, 30, 23, 21, 23, 13, 35, 37, 23, 0, 2, 0, 12, 13, 4, 83, 83, 79, 77, 77, 75, 63, 63, 61, 50, 52, 51, 65, 67, 68, 69, 71, 75, 75, 77, 81, 78, 79, 83, 101, 102, 106, 96, 98, 102, 109, 110, 117, 137, 138, 144, 132, 136, 139, 49, 54, 54, 26, 29, 24, 15, 18, 12, 3, 6, 1, 0, 0, 0, 0, 1, 0, 13, 18, 14, 154, 158, 159, 148, 152, 155, 154, 158, 161, 146, 150, 153, 146, 150, 153, 140, 144, 147, 131, 134, 140, 131, 135, 138, 125, 129, 132, 114, 118, 124, 116, 119, 125, 106, 107, 113, 100, 104, 110, 121, 125, 128, 61, 66, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 36, 34, 52, 55, 51, 49, 49, 47, 110, 112, 112, 125, 128, 127, 129, 131, 132, 128, 129, 131, 131, 133, 135, 138, 138, 140, 131, 131, 135, 135, 136, 140, 131, 132, 139, 137, 138, 144, 132, 134, 138, 136, 137, 141, 143, 144, 148, 127, 128, 132, 134, 138, 141, 142, 146, 149, 134, 138, 141, 132, 136, 139, 133, 135, 139, 142, 143, 145, 139, 141, 143, 138, 140, 142, 138, 140, 142, 125, 127, 129, 133, 135, 137, 139, 143, 144, 136, 138, 137, 138, 140, 140, 120, 122, 121, 86, 87, 84, 0, 0, 0, 0, 0, 0, 58, 59, 52, 127, 128, 119, 144, 141, 131, 133, 132, 124, 132, 131, 120, 114, 113, 107, 0, 0, 0, 80, 83, 75, 67, 71, 63, 53, 58, 52, 71, 78, 74, 66, 76, 68, 24, 36, 18, 27, 36, 21, 26, 27, 20, 23, 24, 17, 63, 62, 54, 62, 62, 50, 154, 154, 140, 155, 156, 140, 9, 11, 1, 2, 4, 0, 0, 0, 0, 113, 114, 107, 84, 85, 78, 86, 87, 80, 104, 103, 94, 102, 101, 92, 109, 108, 99, 130, 129, 118, 108, 107, 98, 113, 112, 104, 107, 104, 96, 107, 104, 96, 102, 101, 92, 102, 99, 91, 116, 115, 106, 88, 86, 80, 124, 121, 115, 98, 96, 93, 99, 98, 94, 98, 96, 95, 102, 100, 96, 96, 94, 90, 90, 88, 84, 100, 99, 95, 102, 103, 98, 104, 102, 98, 99, 98, 94, 110, 108, 104, 100, 101, 94, 52, 54, 46, 0, 0, 0, 101, 104, 96, 85, 88, 80, 120, 123, 113, 22, 25, 17, 31, 34, 29, 26, 29, 24, 15, 18, 10, 0, 0, 0, 121, 122, 115, 133, 134, 127, 0, 0, 0, 6, 8, 0, 64, 67, 48, 111, 114, 92, 215, 219, 197, 155, 158, 137, 149, 153, 131, 0, 0, 0, 19, 19, 15, 3, 4, 3, 11, 14, 13, 9, 9, 9, 16, 16, 16, 16, 18, 17, 8, 10, 9, 0, 0, 0, 93, 95, 92, 85, 88, 82, 26, 29, 24, 30, 33, 25, 36, 40, 29, 47, 51, 41, 38, 42, 29, 59, 63, 52, 18, 22, 12, 3, 7, 0, 39, 41, 43, 43, 44, 50, 24, 25, 32, 12, 14, 18, 17, 19, 23, 26, 28, 32, 33, 34, 41, 31, 33, 37, 35, 36, 40, 40, 42, 46, 53, 55, 57, 74, 76, 78, 94, 96, 97, 140, 143, 142, 2, 5, 0, 17, 21, 13, 33, 37, 27, 12, 16, 6, 0, 1, 0, 0, 0, 0, 17, 21, 11, 48, 52, 44, 151, 152, 147, 149, 149, 149, 154, 153, 156, 149, 149, 151, 155, 154, 157, 150, 150, 152, 150, 149, 154, 150, 149, 154, 149, 150, 156, 138, 139, 148, 136, 136, 147, 123, 124, 132, 120, 120, 129, 62, 66, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 29, 22, 50, 56, 49, 164, 166, 165, 136, 138, 139, 143, 144, 148, 145, 147, 151, 144, 145, 149, 138, 139, 146, 131, 132, 139, 125, 127, 131, 121, 122, 128, 122, 123, 127, 114, 113, 118, 116, 118, 122, 111, 112, 119, 92, 94, 95, 0, 0, 0, 0, 0, 0, 59, 61, 49, 94, 97, 82, 92, 94, 82, 78, 80, 68, 128, 129, 122, 121, 121, 121, 134, 133, 136, 133, 132, 137, 136, 135, 140, 135, 134, 139, 138, 139, 146, 135, 136, 142, 131, 132, 141, 125, 126, 135, 124, 125, 134, 121, 121, 132, 114, 114, 125, 132, 133, 144, 37, 40, 36, 40, 43, 38, 79, 82, 76, 89, 93, 85, 80, 83, 75, 45, 50, 44, 102, 104, 104, 129, 130, 134, 136, 140, 146, 134, 138, 141, 133, 137, 140, 130, 131, 135, 128, 129, 133, 110, 114, 117, 124, 128, 131, 117, 118, 125, 111, 112, 119, 109, 110, 117, 106, 107, 113, 114, 115, 121, 6, 11, 11, 0, 0, 0, 0, 0, 0, 53, 56, 51, 70, 71, 66, 58, 61, 55, 28, 28, 26, 94, 96, 95, 121, 123, 124, 128, 129, 131, 130, 130, 132, 129, 131, 132, 130, 130, 134, 131, 131, 135, 129, 130, 136, 124, 125, 132, 125, 127, 133, 125, 123, 130, 129, 130, 136, 125, 127, 133, 124, 125, 132, 117, 118, 125, 118, 119, 126, 121, 122, 128, 121, 122, 126, 116, 116, 120, 119, 118, 123, 116, 116, 118, 118, 117, 120, 115, 115, 117, 114, 114, 116, 113, 112, 115, 108, 110, 111, 110, 112, 112, 109, 111, 111, 115, 117, 116, 122, 124, 123, 10, 13, 10, 2, 5, 0, 67, 65, 59, 120, 121, 112, 125, 122, 114, 139, 136, 126, 141, 138, 127, 127, 126, 118, 134, 133, 125, 92, 92, 81, 80, 79, 68, 60, 62, 50, 39, 39, 25, 54, 54, 40, 93, 99, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 12, 13, 14, 7, 0, 0, 0, 0, 0, 0, 72, 73, 66, 80, 81, 74, 71, 72, 65, 89, 88, 79, 114, 114, 105, 124, 123, 114, 104, 103, 94, 113, 112, 104, 108, 107, 98, 120, 116, 110, 109, 108, 99, 114, 110, 105, 113, 109, 103, 147, 145, 141, 105, 104, 100, 113, 112, 108, 136, 132, 129, 108, 106, 102, 126, 124, 123, 113, 111, 110, 95, 93, 91, 96, 94, 92, 97, 95, 91, 103, 101, 97, 91, 90, 86, 99, 98, 94, 99, 98, 94, 103, 101, 97, 123, 121, 115, 81, 83, 75, 0, 0, 0, 137, 141, 130, 106, 109, 99, 53, 57, 49, 17, 21, 13, 9, 12, 6, 12, 16, 17, 18, 23, 19, 28, 29, 33, 50, 51, 57, 59, 59, 70, 48, 49, 58, 87, 88, 96, 103, 104, 111, 116, 117, 124, 136, 137, 143, 159, 158, 165, 156, 155, 162, 153, 151, 159, 144, 145, 151, 141, 139, 149, 129, 130, 138, 116, 117, 126, 111, 112, 121, 118, 119, 128, 116, 117, 126, 114, 114, 123, 106, 106, 115, 107, 108, 114, 69, 71, 73, 30, 33, 29, 38, 41, 34, 0, 0, 0, 0, 0, 0, 37, 41, 28, 61, 65, 57, 31, 34, 31, 102, 103, 110, 142, 142, 155, 133, 133, 149, 127, 127, 140, 133, 133, 147, 122, 122, 135, 117, 117, 131, 118, 119, 130, 117, 118, 129, 117, 118, 129, 113, 113, 122, 113, 113, 122, 110, 111, 120, 108, 109, 115, 124, 125, 132, 11, 14, 11, 46, 50, 40, 0, 0, 0, 84, 91, 77, 100, 104, 91, 102, 106, 96, 40, 44, 36, 151, 152, 149, 143, 143, 145, 137, 137, 139, 142, 142, 144, 140, 139, 144, 138, 134, 142, 137, 133, 141, 131, 130, 139, 131, 130, 139, 122, 122, 133, 123, 123, 134, 122, 122, 133, 118, 118, 132, 142, 142, 151, 0, 2, 3, 62, 68, 61, 19, 25, 16, 71, 77, 70, 76, 82, 73, 50, 56, 49, 86, 88, 87, 133, 135, 139, 145, 146, 153, 146, 147, 154, 148, 149, 155, 135, 136, 142, 136, 137, 143, 130, 131, 137, 124, 125, 132, 121, 120, 127, 121, 120, 127, 115, 114, 121, 108, 107, 116, 121, 120, 127, 1, 3, 2, 0, 0, 0, 60, 62, 50, 77, 79, 65, 88, 90, 78, 74, 76, 64, 92, 93, 86, 123, 124, 121, 125, 124, 127, 133, 132, 137, 137, 137, 141, 141, 140, 147, 134, 133, 138, 118, 116, 124, 131, 130, 139, 129, 129, 140, 127, 127, 138, 120, 120, 133, 117, 117, 131, 125, 126, 137, 2, 4, 5, 17, 20, 17, 18, 21, 16, 79, 82, 74, 81, 84, 79, 80, 83, 77, 50, 52, 49, 127, 128, 132, 132, 136, 139, 134, 138, 141, 133, 135, 139, 131, 133, 137, 110, 112, 116, 175, 179, 180, 124, 126, 130, 116, 117, 124, 114, 113, 120, 111, 112, 119, 108, 107, 114, 103, 104, 111, 25, 27, 29, 25, 28, 27, 0, 0, 0, 4, 7, 2, 40, 43, 40, 31, 34, 31, 36, 36, 34, 54, 57, 56, 126, 126, 126, 134, 133, 136, 130, 130, 132, 130, 130, 132, 130, 130, 134, 128, 127, 132, 128, 127, 132, 125, 123, 130, 127, 126, 131, 125, 123, 130, 121, 120, 127, 121, 120, 127, 121, 120, 127, 121, 120, 127, 110, 112, 116, 118, 120, 124, 114, 113, 118, 119, 118, 123, 122, 122, 124, 116, 116, 118, 116, 116, 118, 114, 114, 116, 116, 116, 118, 114, 114, 116, 116, 116, 118, 114, 114, 114, 111, 114, 113, 113, 113, 113, 114, 114, 114, 24, 24, 24, 28, 28, 26, 0, 0, 0, 114, 114, 105, 125, 122, 114, 144, 141, 131, 139, 136, 126, 149, 146, 135, 137, 134, 124, 138, 135, 125, 137, 135, 122, 132, 130, 117, 134, 132, 114, 134, 130, 113, 78, 78, 69, 109, 111, 101, 121, 122, 115, 107, 108, 101, 109, 110, 105, 138, 139, 134, 25, 28, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 97, 89, 90, 89, 82, 92, 92, 83, 100, 100, 91, 110, 109, 100, 94, 93, 84, 106, 105, 97, 106, 105, 97, 110, 109, 102, 123, 121, 115, 138, 136, 130, 142, 141, 135, 141, 137, 133, 139, 137, 133, 145, 143, 139, 144, 141, 140, 121, 119, 118, 131, 129, 127, 140, 138, 138, 121, 119, 120, 120, 118, 119, 105, 103, 104, 114, 112, 111, 98, 96, 95, 98, 96, 95, 102, 100, 96, 99, 98, 94, 111, 110, 103, 106, 105, 99, 71, 74, 66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 88, 94, 83, 54, 60, 51, 79, 85, 81, 102, 105, 113, 104, 107, 117, 118, 120, 133, 104, 106, 119, 99, 102, 114, 107, 107, 120, 106, 106, 119, 102, 102, 116, 104, 105, 116, 112, 110, 122, 108, 106, 118, 99, 97, 109, 108, 107, 116, 107, 105, 117, 108, 107, 116, 109, 109, 121, 113, 111, 123, 111, 109, 121, 113, 113, 124, 107, 107, 118, 110, 111, 122, 111, 112, 123, 109, 110, 119, 155, 155, 164, 4, 6, 8, 48, 51, 48, 0, 0, 0, 117, 121, 111, 85, 88, 78, 73, 76, 68, 26, 29, 26, 59, 60, 64, 140, 140, 156, 138, 138, 154, 132, 132, 148, 126, 128, 143, 123, 123, 138, 117, 119, 134, 113, 113, 126, 109, 109, 123, 107, 107, 120, 103, 103, 117, 103, 104, 115, 102, 102, 114, 101, 101, 112, 117, 118, 125, 0, 0, 0, 5, 11, 4, 0, 0, 0, 55, 59, 49, 55, 60, 47, 62, 66, 56, 52, 55, 48, 114, 116, 113, 144, 144, 146, 142, 141, 146, 145, 141, 147, 140, 139, 144, 138, 138, 142, 137, 136, 143, 135, 134, 141, 131, 130, 139, 128, 127, 136, 126, 124, 134, 121, 121, 132, 120, 120, 131, 120, 120, 131, 0, 0, 4, 14, 19, 15, 0, 0, 0, 13, 19, 12, 43, 49, 43, 53, 58, 52, 48, 51, 50, 156, 157, 161, 137, 138, 142, 143, 144, 148, 145, 147, 151, 144, 143, 150, 133, 135, 141, 130, 131, 137, 131, 132, 139, 120, 119, 126, 119, 118, 125, 116, 115, 122, 112, 111, 118, 111, 109, 117, 20, 19, 22, 49, 50, 45, 0, 0, 0, 77, 79, 67, 76, 78, 65, 73, 75, 65, 62, 63, 54, 137, 138, 135, 122, 122, 124, 128, 127, 132, 140, 137, 142, 142, 141, 148, 140, 138, 146, 255, 255, 255, 129, 128, 135, 132, 133, 142, 125, 126, 137, 121, 121, 132, 115, 115, 128, 114, 114, 125, 31, 33, 37, 23, 25, 24, 0, 0, 0, 39, 43, 35, 28, 31, 25, 36, 39, 33, 23, 25, 24, 131, 133, 137, 129, 130, 134, 135, 136, 140, 137, 138, 142, 131, 133, 137, 127, 128, 134, 127, 128, 132, 116, 117, 124, 119, 118, 125, 113, 112, 119, 111, 109, 117, 109, 108, 115, 109, 108, 115, 86, 87, 91, 2, 4, 3, 0, 0, 0, 0, 0, 0, 19, 22, 19, 39, 42, 39, 15, 17, 14, 10, 10, 10, 118, 117, 120, 125, 124, 127, 131, 131, 133, 129, 129, 131, 129, 128, 133, 126, 125, 130, 126, 125, 130, 128, 127, 132, 124, 125, 132, 119, 118, 125, 127, 126, 133, 119, 118, 125, 121, 120, 127, 119, 118, 125, 116, 118, 122, 113, 114, 120, 113, 112, 117, 104, 103, 108, 255, 255, 255, 215, 215, 217, 109, 109, 111, 114, 114, 116, 144, 144, 144, 115, 115, 115, 112, 111, 114, 114, 114, 114, 107, 107, 107, 116, 116, 118, 115, 115, 115, 71, 71, 71, 27, 27, 27, 0, 0, 0, 103, 100, 94, 141, 138, 127, 135, 132, 121, 138, 136, 123, 131, 129, 116, 137, 135, 120, 138, 136, 123, 136, 133, 121, 143, 141, 126, 137, 135, 120, 146, 145, 127, 93, 95, 83, 148, 148, 136, 135, 135, 126, 116, 118, 108, 89, 88, 81, 83, 83, 79, 99, 100, 93, 19, 20, 13, 0, 0, 0, 114, 115, 108, 71, 72, 65, 100, 100, 91, 94, 93, 84, 113, 112, 104, 103, 100, 92, 118, 117, 108, 130, 129, 120, 136, 133, 125, 144, 142, 136, 149, 148, 144, 161, 159, 156, 156, 155, 151, 157, 153, 150, 151, 149, 147, 147, 145, 143, 161, 159, 158, 141, 139, 138, 159, 156, 159, 168, 166, 166, 160, 157, 160, 140, 137, 140, 135, 133, 136, 121, 116, 120, 110, 107, 108, 98, 96, 95, 97, 95, 91, 100, 99, 95, 95, 97, 89, 102, 106, 96, 61, 65, 57, 0, 0, 0, 95, 101, 90, 79, 86, 75, 61, 67, 56, 42, 48, 39, 83, 88, 84, 100, 104, 112, 103, 106, 116, 106, 109, 121, 99, 102, 112, 103, 106, 116, 102, 102, 114, 106, 106, 117, 104, 105, 116, 103, 104, 115, 100, 100, 111, 103, 104, 113, 104, 105, 116, 104, 105, 116, 104, 105, 116, 106, 106, 117, 104, 105, 116, 103, 104, 115, 109, 109, 121, 107, 107, 118, 107, 107, 118, 107, 107, 118, 107, 107, 118, 110, 111, 122, 118, 119, 126, 0, 2, 3, 29, 31, 30, 18, 21, 16, 0, 0, 0, 17, 21, 13, 50, 53, 47, 12, 17, 15, 3, 7, 10, 137, 137, 150, 135, 135, 148, 131, 131, 147, 125, 125, 139, 122, 122, 135, 118, 118, 132, 114, 114, 127, 113, 113, 126, 104, 104, 118, 106, 106, 117, 101, 101, 112, 100, 100, 111, 99, 99, 108, 97, 98, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 9, 1, 95, 99, 89, 66, 69, 62, 48, 52, 44, 64, 67, 61, 144, 144, 144, 142, 139, 145, 145, 141, 147, 145, 141, 147, 145, 141, 147, 204, 200, 208, 159, 156, 161, 206, 205, 212, 130, 129, 138, 128, 126, 138, 123, 122, 133, 124, 125, 136, 115, 115, 126, 33, 37, 42, 0, 0, 0, 0, 0, 0, 5, 10, 6, 49, 55, 48, 28, 33, 29, 46, 49, 46, 115, 117, 118, 143, 142, 147, 143, 142, 149, 145, 144, 151, 143, 142, 147, 115, 114, 121, 244, 243, 250, 140, 138, 146, 118, 116, 124, 118, 116, 124, 118, 116, 124, 113, 112, 119, 107, 106, 113, 121, 122, 126, 18, 21, 18, 0, 0, 0, 33, 37, 29, 55, 56, 47, 56, 57, 48, 49, 50, 43, 111, 111, 107, 128, 128, 130, 130, 130, 134, 137, 137, 141, 145, 144, 151, 140, 138, 146, 141, 140, 145, 142, 141, 148, 130, 129, 138, 124, 125, 136, 118, 119, 130, 121, 121, 134, 114, 114, 125, 106, 106, 115, 12, 16, 17, 0, 0, 0, 62, 68, 61, 32, 35, 30, 14, 16, 15, 15, 20, 19, 82, 84, 88, 136, 137, 141, 131, 135, 138, 132, 134, 138, 132, 134, 138, 127, 128, 132, 124, 126, 130, 125, 123, 130, 118, 116, 124, 114, 113, 120, 112, 111, 118, 108, 107, 114, 107, 106, 113, 128, 129, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 17, 14, 39, 42, 39, 18, 21, 20, 13, 13, 13, 115, 115, 117, 123, 123, 125, 127, 127, 129, 130, 130, 134, 128, 127, 132, 126, 125, 130, 126, 125, 132, 123, 122, 129, 123, 122, 129, 116, 115, 122, 120, 119, 126, 119, 118, 125, 116, 115, 122, 115, 114, 121, 114, 113, 120, 108, 107, 114, 114, 113, 118, 98, 97, 102, 255, 255, 255, 143, 143, 143, 213, 214, 209, 255, 255, 255, 238, 236, 234, 255, 255, 255, 255, 255, 255, 255, 255, 255, 119, 119, 119, 108, 108, 108, 111, 111, 111, 106, 106, 106, 19, 22, 19, 0, 0, 0, 87, 83, 78, 129, 126, 118, 143, 140, 129, 142, 137, 125, 143, 141, 126, 149, 144, 130, 149, 144, 130, 147, 145, 130, 137, 135, 120, 139, 138, 120, 145, 143, 126, 126, 126, 112, 148, 148, 134, 100, 100, 89, 93, 95, 85, 92, 93, 86, 107, 108, 101, 0, 0, 0, 0, 0, 0, 6, 5, 0, 89, 88, 79, 119, 119, 107, 104, 103, 94, 94, 93, 84, 106, 105, 97, 122, 119, 111, 146, 143, 135, 127, 123, 115, 129, 125, 120, 141, 137, 131, 132, 131, 124, 139, 138, 131, 111, 110, 103, 118, 117, 110, 105, 104, 98, 118, 117, 110, 130, 128, 124, 162, 160, 159, 168, 166, 164, 160, 155, 159, 164, 159, 163, 168, 163, 167, 172, 167, 171, 159, 154, 158, 145, 140, 144, 125, 120, 124, 105, 103, 102, 116, 114, 112, 112, 113, 106, 109, 113, 103, 37, 40, 32, 0, 0, 0, 114, 121, 110, 93, 99, 91, 66, 72, 61, 57, 63, 54, 84, 89, 85, 102, 105, 111, 95, 98, 106, 102, 104, 115, 102, 104, 115, 102, 104, 115, 93, 96, 107, 96, 99, 109, 99, 99, 110, 101, 101, 112, 103, 104, 113, 103, 104, 111, 99, 99, 108, 102, 103, 112, 102, 103, 110, 103, 104, 113, 104, 105, 114, 103, 104, 113, 105, 103, 115, 106, 104, 116, 106, 104, 116, 103, 104, 115, 104, 105, 116, 102, 102, 114, 95, 96, 105, 0, 0, 0, 0, 3, 1, 95, 100, 94, 0, 0, 0, 0, 2, 0, 66, 70, 69, 6, 11, 11, 3, 6, 12, 127, 130, 142, 136, 135, 151, 127, 126, 142, 122, 121, 137, 118, 118, 134, 116, 116, 129, 118, 118, 132, 158, 158, 169, 96, 96, 108, 111, 112, 123, 99, 99, 110, 100, 100, 111, 96, 97, 106, 96, 97, 106, 101, 102, 106, 0, 0, 0, 0, 1, 0, 0, 0, 0, 17, 20, 15, 30, 33, 25, 28, 31, 23, 26, 30, 22, 150, 150, 150, 144, 140, 146, 146, 143, 148, 145, 141, 147, 146, 143, 148, 155, 152, 158, 255, 255, 255, 140, 138, 146, 128, 127, 134, 126, 124, 134, 125, 123, 134, 123, 122, 133, 114, 114, 125, 139, 140, 149, 0, 5, 5, 0, 0, 0, 31, 34, 31, 6, 11, 7, 21, 26, 22, 40, 43, 40, 60, 62, 62, 138, 140, 142, 142, 141, 146, 147, 145, 153, 140, 138, 146, 138, 137, 144, 255, 255, 255, 130, 130, 134, 114, 113, 120, 115, 114, 121, 115, 114, 121, 114, 113, 120, 112, 111, 118, 112, 111, 116, 0, 0, 0, 24, 27, 24, 0, 0, 0, 43, 44, 39, 52, 55, 49, 47, 47, 43, 62, 62, 60, 129, 129, 131, 133, 132, 137, 131, 128, 133, 140, 139, 144, 141, 138, 144, 130, 126, 132, 128, 127, 134, 119, 118, 125, 127, 125, 137, 123, 122, 133, 120, 118, 130, 115, 115, 126, 123, 124, 132, 11, 13, 15, 0, 0, 0, 0, 0, 0, 24, 27, 24, 26, 29, 28, 16, 18, 17, 21, 22, 24, 132, 134, 138, 131, 133, 137, 133, 135, 139, 131, 133, 137, 131, 131, 135, 118, 117, 122, 255, 255, 255, 115, 114, 119, 116, 115, 122, 112, 111, 116, 109, 108, 115, 107, 106, 113, 107, 108, 112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 4, 28, 33, 29, 6, 11, 9, 7, 7, 7, 109, 109, 109, 136, 136, 138, 128, 128, 130, 128, 128, 130, 131, 131, 135, 126, 125, 130, 127, 126, 131, 128, 127, 132, 122, 121, 126, 125, 124, 128, 122, 121, 128, 116, 116, 120, 115, 114, 121, 111, 110, 115, 114, 113, 118, 109, 111, 115, 112, 111, 116, 100, 99, 104, 105, 104, 107, 111, 110, 113, 115, 115, 117, 100, 100, 100, 102, 102, 102, 111, 111, 111, 102, 102, 102, 104, 104, 104, 105, 105, 105, 105, 105, 105, 106, 106, 106, 125, 125, 125, 22, 22, 20, 8, 9, 4, 26, 25, 18, 130, 127, 117, 145, 143, 130, 149, 144, 132, 146, 144, 129, 149, 147, 131, 159, 154, 140, 153, 151, 136, 144, 142, 127, 146, 145, 127, 141, 142, 124, 148, 141, 130, 141, 136, 124, 110, 109, 98, 92, 92, 83, 90, 91, 84, 44, 45, 38, 10, 12, 4, 0, 0, 0, 99, 98, 90, 78, 78, 69, 88, 85, 77, 117, 116, 105, 199, 199, 190, 106, 102, 94, 119, 118, 112, 152, 149, 141, 147, 144, 136, 100, 96, 91, 166, 164, 158, 168, 167, 160, 141, 140, 134, 128, 127, 121, 124, 122, 118, 124, 123, 116, 120, 121, 116, 137, 139, 131, 171, 170, 166, 183, 182, 178, 0, 0, 0, 155, 150, 149, 160, 155, 159, 163, 158, 161, 161, 156, 160, 161, 156, 160, 152, 147, 151, 128, 123, 127, 102, 100, 98, 106, 107, 102, 99, 100, 91, 50, 53, 45, 0, 0, 0, 92, 99, 87, 83, 89, 80, 79, 86, 75, 29, 36, 25, 74, 79, 73, 96, 99, 105, 100, 103, 114, 91, 94, 104, 92, 95, 105, 91, 94, 104, 93, 96, 107, 91, 94, 104, 98, 101, 111, 101, 101, 112, 98, 101, 109, 101, 102, 108, 118, 119, 126, 96, 97, 104, 178, 179, 185, 92, 92, 101, 102, 103, 112, 101, 102, 110, 104, 102, 114, 105, 103, 115, 105, 103, 115, 105, 103, 115, 105, 103, 115, 103, 104, 115, 106, 106, 115, 22, 23, 27, 0, 5, 3, 14, 19, 15, 0, 0, 0, 0, 0, 0, 0, 5, 1, 3, 7, 8, 6, 10, 15, 106, 109, 121, 138, 138, 154, 133, 133, 149, 121, 121, 134, 118, 118, 132, 115, 115, 128, 121, 121, 134, 109, 109, 121, 97, 95, 107, 116, 117, 126, 90, 91, 102, 93, 94, 102, 94, 95, 103, 96, 97, 104, 113, 114, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 16, 19, 13, 25, 30, 24, 31, 36, 32, 142, 142, 142, 141, 138, 144, 146, 143, 148, 142, 139, 145, 141, 138, 144, 123, 123, 127, 183, 182, 187, 255, 255, 255, 137, 136, 143, 140, 138, 148, 123, 122, 131, 121, 119, 131, 115, 115, 126, 118, 119, 128, 0, 0, 0, 3, 5, 5, 0, 0, 0, 0, 4, 2, 21, 26, 22, 22, 24, 21, 35, 35, 35, 148, 147, 150, 136, 135, 140, 140, 139, 144, 141, 140, 145, 136, 135, 142, 120, 119, 124, 255, 255, 255, 232, 231, 236, 130, 130, 134, 119, 118, 125, 114, 113, 120, 112, 111, 118, 102, 102, 106, 28, 28, 30, 0, 0, 0, 0, 0, 0, 6, 6, 4, 29, 30, 25, 40, 43, 40, 37, 37, 37, 138, 135, 138, 118, 117, 122, 127, 124, 130, 131, 131, 135, 146, 143, 148, 134, 133, 138, 255, 255, 255, 127, 126, 133, 136, 135, 144, 125, 123, 134, 117, 117, 131, 115, 115, 128, 115, 116, 124, 55, 57, 63, 8, 10, 9, 0, 0, 0, 3, 6, 3, 11, 14, 13, 15, 17, 16, 14, 15, 17, 130, 132, 133, 129, 130, 134, 132, 134, 138, 131, 133, 137, 130, 131, 135, 126, 125, 130, 125, 124, 128, 123, 123, 127, 115, 114, 121, 114, 113, 120, 109, 109, 113, 105, 104, 111, 102, 104, 108, 12, 15, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 21, 17, 0, 4, 2, 0, 1, 0, 88, 90, 92, 128, 128, 130, 128, 128, 130, 128, 128, 130, 129, 129, 131, 130, 130, 132, 126, 125, 130, 128, 127, 132, 122, 121, 126, 123, 123, 127, 121, 120, 127, 115, 116, 122, 111, 113, 117, 113, 114, 118, 111, 113, 117, 109, 111, 115, 110, 112, 116, 107, 109, 110, 112, 111, 114, 109, 109, 111, 111, 110, 115, 106, 106, 108, 107, 107, 109, 108, 108, 110, 109, 109, 111, 108, 108, 110, 100, 100, 100, 113, 113, 113, 109, 109, 109, 115, 116, 113, 16, 17, 12, 16, 17, 10, 0, 0, 0, 131, 130, 119, 139, 136, 126, 141, 138, 123, 169, 164, 150, 149, 144, 130, 152, 151, 133, 139, 138, 120, 145, 143, 126, 144, 142, 125, 149, 147, 127, 146, 144, 129, 124, 122, 107, 105, 105, 94, 95, 94, 83, 122, 124, 114, 0, 0, 0, 0, 0, 0, 67, 69, 60, 73, 72, 61, 88, 87, 78, 128, 125, 114, 110, 109, 98, 96, 95, 86, 100, 99, 93, 32, 31, 24, 134, 133, 127, 117, 116, 109, 77, 76, 68, 11, 10, 1, 9, 8, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 7, 7, 0, 36, 36, 25, 57, 57, 44, 126, 128, 114, 219, 219, 207, 179, 174, 176, 178, 173, 175, 45, 41, 38, 160, 155, 159, 151, 146, 150, 156, 150, 157, 141, 135, 139, 121, 119, 122, 104, 102, 100, 95, 93, 89, 102, 104, 96, 69, 69, 65, 0, 0, 0, 93, 97, 86, 80, 84, 73, 104, 110, 99, 34, 40, 31, 69, 75, 66, 92, 98, 103, 94, 99, 109, 97, 100, 110, 96, 98, 111, 96, 99, 109, 93, 96, 107, 96, 99, 109, 97, 100, 110, 98, 101, 111, 97, 100, 108, 90, 92, 98, 255, 255, 255, 85, 85, 94, 111, 112, 119, 146, 147, 154, 104, 105, 114, 92, 92, 101, 109, 108, 117, 104, 102, 112, 104, 105, 114, 103, 104, 113, 104, 105, 114, 102, 103, 112, 97, 98, 107, 142, 146, 151, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 5, 9, 12, 95, 97, 110, 138, 138, 152, 129, 129, 142, 118, 118, 132, 120, 120, 133, 117, 117, 131, 118, 119, 130, 107, 107, 118, 122, 123, 131, 100, 100, 109, 96, 96, 108, 95, 96, 105, 95, 96, 105, 94, 95, 103, 92, 93, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 26, 19, 20, 26, 19, 19, 24, 20, 72, 74, 75, 140, 137, 140, 138, 138, 142, 140, 137, 142, 142, 141, 146, 135, 132, 138, 136, 135, 142, 135, 134, 141, 128, 127, 136, 129, 127, 139, 123, 122, 131, 121, 121, 132, 118, 119, 130, 113, 113, 122, 26, 28, 30, 0, 2, 1, 42, 47, 45, 0, 0, 0, 10, 15, 11, 10, 12, 12, 31, 33, 33, 107, 107, 109, 140, 138, 146, 137, 136, 143, 140, 138, 146, 140, 139, 144, 131, 130, 137, 133, 132, 137, 120, 119, 124, 115, 114, 119, 118, 116, 124, 115, 114, 121, 114, 113, 120, 112, 111, 118, 131, 133, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 23, 20, 18, 21, 16, 33, 36, 33, 91, 91, 89, 120, 119, 122, 125, 122, 127, 133, 132, 137, 137, 133, 139, 139, 136, 141, 129, 128, 133, 130, 129, 136, 130, 129, 138, 123, 123, 134, 121, 121, 132, 116, 116, 128, 110, 111, 118, 132, 136, 139, 5, 10, 8, 0, 4, 0, 0, 0, 0, 42, 44, 43, 12, 15, 14, 14, 16, 13, 62, 67, 65, 130, 132, 131, 129, 131, 132, 128, 129, 133, 130, 130, 134, 125, 124, 128, 125, 124, 128, 120, 119, 124, 120, 119, 124, 114, 113, 118, 109, 109, 113, 104, 106, 110, 96, 98, 100, 65, 67, 66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 12, 8, 10, 15, 11, 6, 11, 7, 57, 59, 58, 128, 130, 127, 127, 129, 128, 130, 130, 130, 132, 135, 134, 130, 130, 130, 125, 127, 129, 121, 123, 124, 122, 122, 124, 122, 123, 127, 123, 125, 128, 122, 123, 127, 120, 121, 125, 115, 117, 118, 116, 118, 120, 111, 113, 115, 108, 108, 110, 108, 108, 110, 109, 109, 111, 112, 111, 114, 111, 110, 113, 109, 109, 111, 108, 108, 108, 111, 111, 111, 108, 108, 108, 111, 111, 111, 111, 111, 111, 108, 108, 108, 104, 107, 104, 100, 103, 97, 26, 29, 24, 27, 28, 23, 0, 0, 0, 114, 114, 105, 141, 138, 127, 139, 134, 122, 143, 139, 124, 150, 146, 131, 149, 145, 128, 144, 142, 125, 156, 154, 136, 165, 163, 146, 139, 138, 118, 157, 154, 134, 115, 113, 98, 103, 103, 89, 103, 102, 91, 0, 0, 0, 0, 0, 0, 0, 0, 0, 78, 78, 67, 83, 83, 71, 96, 93, 83, 95, 93, 78, 109, 109, 95, 81, 80, 69, 18, 17, 10, 145, 143, 137, 104, 103, 96, 28, 27, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 188, 184, 183, 198, 192, 196, 110, 107, 101, 128, 123, 127, 145, 140, 144, 143, 138, 142, 123, 118, 122, 113, 107, 111, 103, 100, 101, 75, 74, 67, 91, 92, 85, 59, 60, 55, 0, 0, 0, 90, 94, 86, 92, 95, 87, 74, 80, 69, 39, 45, 36, 76, 82, 75, 90, 94, 99, 96, 99, 107, 95, 97, 108, 93, 96, 107, 92, 95, 105, 99, 102, 112, 91, 94, 104, 93, 96, 107, 97, 100, 110, 95, 98, 106, 99, 103, 110, 167, 168, 175, 102, 103, 110, 96, 97, 104, 106, 107, 113, 100, 100, 109, 101, 102, 110, 104, 102, 112, 104, 102, 112, 105, 103, 115, 107, 107, 118, 102, 102, 114, 103, 104, 115, 106, 106, 115, 122, 123, 131, 0, 0, 0, 0, 0, 0, 57, 63, 58, 0, 0, 0, 0, 0, 0, 0, 6, 3, 0, 0, 0, 33, 36, 46, 130, 130, 143, 128, 127, 143, 129, 129, 142, 114, 114, 127, 116, 116, 128, 117, 118, 129, 116, 114, 126, 109, 109, 121, 111, 112, 123, 103, 104, 113, 100, 100, 109, 94, 94, 105, 94, 95, 103, 96, 97, 104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 20, 13, 15, 21, 17, 21, 27, 20, 12, 15, 14, 141, 140, 143, 142, 139, 145, 143, 142, 147, 140, 137, 142, 136, 135, 142, 131, 130, 137, 133, 131, 141, 133, 129, 139, 127, 125, 137, 125, 123, 134, 127, 125, 137, 118, 119, 130, 117, 118, 127, 143, 145, 146, 0, 0, 0, 1, 7, 3, 0, 0, 0, 0, 0, 0, 15, 20, 19, 13, 18, 16, 66, 68, 69, 135, 134, 139, 135, 134, 139, 137, 137, 141, 138, 138, 142, 137, 137, 141, 129, 128, 133, 126, 125, 130, 122, 121, 126, 116, 116, 120, 114, 113, 120, 112, 111, 118, 111, 109, 117, 106, 107, 109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 24, 18, 25, 28, 23, 29, 30, 25, 42, 43, 36, 127, 127, 125, 127, 127, 129, 136, 135, 140, 138, 134, 140, 135, 134, 139, 135, 134, 139, 135, 134, 139, 131, 130, 137, 127, 126, 133, 124, 125, 132, 116, 117, 124, 115, 116, 120, 114, 116, 115, 0, 0, 0, 0, 0, 0, 0, 2, 1, 24, 27, 24, 35, 37, 34, 5, 8, 5, 31, 34, 31, 128, 133, 129, 112, 117, 113, 124, 128, 127, 123, 125, 125, 121, 123, 122, 120, 119, 122, 115, 117, 118, 112, 111, 114, 109, 109, 111, 100, 102, 103, 97, 99, 101, 78, 80, 79, 126, 131, 129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 11, 32, 37, 33, 3, 8, 4, 16, 19, 15, 108, 111, 107, 132, 135, 132, 137, 137, 137, 133, 133, 133, 124, 126, 126, 129, 129, 129, 130, 130, 130, 124, 126, 126, 124, 126, 128, 120, 121, 123, 121, 123, 122, 117, 119, 119, 120, 122, 121, 114, 116, 115, 109, 111, 111, 116, 118, 120, 117, 119, 121, 109, 111, 113, 108, 108, 110, 109, 109, 109, 111, 111, 111, 113, 113, 113, 111, 111, 111, 113, 113, 113, 108, 108, 108, 108, 108, 108, 109, 110, 107, 106, 108, 105, 110, 113, 108, 88, 91, 86, 21, 22, 17, 0, 0, 0, 98, 97, 91, 137, 134, 126, 150, 147, 136, 147, 142, 128, 144, 140, 126, 157, 153, 136, 159, 158, 140, 158, 156, 139, 157, 156, 135, 142, 143, 123, 140, 136, 117, 117, 115, 100, 106, 106, 93, 113, 113, 102, 2, 1, 0, 0, 0, 0, 85, 87, 77, 73, 71, 58, 81, 81, 67, 125, 123, 110, 116, 116, 100, 83, 83, 69, 9, 7, 1, 113, 112, 106, 146, 145, 138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 6, 0, 0, 0, 0, 6, 7, 0, 57, 58, 53, 51, 52, 45, 14, 15, 8, 3, 5, 0, 57, 58, 51, 29, 30, 23, 17, 15, 13, 207, 202, 204, 202, 197, 199, 36, 31, 31, 118, 114, 115, 134, 129, 130, 135, 130, 131, 130, 125, 127, 102, 98, 97, 91, 89, 88, 81, 79, 73, 94, 96, 88, 58, 59, 52, 0, 0, 0, 81, 85, 75, 83, 87, 77, 76, 82, 71, 29, 35, 27, 69, 75, 68, 88, 94, 99, 88, 94, 101, 91, 96, 105, 93, 96, 109, 92, 95, 105, 97, 100, 110, 95, 97, 108, 93, 96, 107, 88, 91, 101, 95, 97, 108, 93, 97, 105, 92, 96, 103, 99, 99, 108, 96, 97, 106, 99, 99, 108, 96, 97, 106, 99, 99, 108, 101, 100, 109, 102, 101, 110, 99, 99, 108, 102, 103, 112, 106, 106, 117, 102, 103, 112, 101, 102, 110, 92, 96, 101, 0, 0, 0, 0, 0, 0, 28, 34, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 132, 135, 143, 130, 130, 141, 129, 129, 140, 128, 128, 139, 121, 121, 130, 113, 112, 119, 101, 102, 108, 97, 98, 105, 100, 101, 107, 99, 100, 104, 106, 107, 113, 99, 100, 106, 92, 93, 99, 95, 97, 101, 95, 98, 95, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 14, 20, 13, 14, 19, 15, 6, 11, 7, 142, 144, 143, 134, 134, 134, 140, 137, 140, 139, 136, 139, 137, 137, 141, 133, 132, 137, 133, 132, 137, 130, 129, 136, 128, 127, 136, 125, 123, 132, 122, 121, 130, 120, 120, 129, 118, 119, 126, 107, 111, 115, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 18, 14, 18, 23, 19, 13, 18, 14, 128, 130, 129, 125, 127, 129, 130, 130, 134, 131, 131, 133, 130, 130, 132, 127, 127, 127, 127, 127, 129, 122, 122, 124, 119, 119, 119, 114, 114, 114, 112, 112, 112, 107, 109, 108, 105, 105, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 16, 8, 28, 29, 22, 58, 60, 50, 48, 49, 42, 38, 39, 34, 62, 62, 60, 59, 60, 55, 120, 122, 119, 136, 139, 133, 179, 182, 174, 196, 200, 190, 197, 201, 191, 148, 151, 141, 113, 120, 108, 20, 26, 17, 31, 36, 30, 15, 17, 14, 8, 10, 7, 0, 0, 0, 12, 15, 12, 38, 41, 36, 7, 10, 4, 151, 154, 151, 35, 40, 36, 39, 44, 38, 17, 22, 18, 28, 33, 29, 25, 30, 26, 21, 23, 20, 22, 24, 21, 35, 37, 34, 17, 22, 18, 25, 30, 26, 37, 40, 36, 33, 38, 34, 34, 39, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 4, 16, 19, 15, 0, 0, 0, 134, 139, 135, 24, 27, 24, 31, 33, 33, 28, 33, 31, 17, 22, 18, 10, 15, 11, 21, 26, 22, 28, 33, 31, 42, 45, 39, 64, 67, 61, 72, 75, 69, 102, 105, 100, 89, 92, 87, 81, 84, 79, 114, 117, 111, 97, 100, 97, 100, 102, 99, 99, 101, 100, 100, 102, 99, 104, 104, 102, 102, 105, 102, 101, 102, 99, 98, 98, 96, 91, 91, 89, 86, 88, 85, 95, 98, 95, 78, 80, 77, 99, 102, 96, 118, 121, 116, 161, 165, 159, 76, 80, 72, 26, 26, 22, 0, 0, 0, 60, 58, 52, 146, 143, 135, 147, 141, 132, 160, 158, 143, 145, 143, 126, 163, 159, 142, 161, 160, 142, 171, 170, 149, 171, 170, 149, 171, 170, 147, 121, 120, 99, 118, 116, 101, 132, 132, 118, 0, 0, 0, 0, 0, 0, 19, 19, 7, 118, 117, 106, 83, 83, 69, 139, 140, 121, 110, 109, 91, 74, 72, 57, 90, 90, 78, 64, 60, 54, 161, 160, 154, 0, 0, 0, 0, 0, 0, 44, 45, 38, 145, 147, 139, 82, 81, 74, 108, 106, 102, 98, 96, 93, 73, 71, 67, 57, 56, 52, 83, 81, 79, 141, 140, 136, 128, 128, 126, 126, 124, 123, 85, 83, 82, 181, 177, 178, 116, 111, 113, 89, 85, 84, 114, 109, 109, 114, 110, 107, 110, 106, 103, 132, 129, 123, 86, 82, 77, 91, 90, 84, 92, 91, 85, 111, 110, 103, 53, 55, 48, 0, 0, 0, 82, 86, 78, 87, 91, 80, 91, 97, 86, 28, 34, 25, 59, 64, 58, 86, 90, 96, 95, 97, 108, 90, 93, 103, 92, 95, 107, 95, 97, 108, 88, 91, 101, 96, 98, 111, 91, 94, 104, 92, 95, 105, 92, 95, 105, 95, 97, 108, 96, 96, 108, 97, 98, 107, 93, 94, 102, 97, 98, 107, 94, 95, 103, 96, 97, 106, 97, 98, 105, 100, 99, 106, 97, 98, 105, 97, 98, 105, 100, 101, 107, 97, 101, 106, 95, 99, 102, 98, 102, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 2, 0, 0, 0, 84, 91, 88, 118, 120, 122, 126, 125, 130, 121, 122, 126, 123, 125, 127, 109, 111, 113, 121, 123, 120, 110, 113, 110, 106, 111, 107, 111, 116, 112, 95, 100, 96, 84, 89, 85, 78, 83, 79, 61, 66, 62, 55, 60, 56, 0, 0, 0, 0, 0, 0, 27, 33, 26, 0, 2, 0, 33, 39, 30, 31, 37, 28, 52, 57, 51, 21, 27, 18, 118, 124, 115, 140, 144, 138, 166, 169, 164, 164, 167, 159, 136, 139, 131, 157, 160, 152, 148, 151, 143, 167, 171, 161, 152, 156, 146, 152, 158, 147, 111, 117, 106, 38, 44, 33, 35, 41, 34, 22, 28, 22, 0, 0, 0, 0, 2, 0, 0, 0, 0, 19, 24, 18, 69, 75, 68, 62, 68, 61, 22, 28, 22, 25, 30, 26, 17, 22, 18, 17, 22, 18, 17, 22, 18, 19, 22, 19, 3, 8, 2, 17, 20, 15, 10, 13, 8, 31, 34, 29, 29, 32, 26, 31, 36, 30, 33, 39, 32, 39, 42, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 26, 20, 15, 18, 10, 0, 3, 0, 32, 36, 28, 37, 40, 32, 43, 46, 38, 42, 45, 37, 42, 45, 37, 37, 40, 32, 27, 33, 26, 42, 48, 39, 45, 51, 42, 40, 46, 37, 42, 48, 39, 39, 44, 38, 42, 48, 41, 52, 57, 53, 14, 19, 15, 0, 0, 0, 0, 1, 0, 14, 17, 11, 153, 156, 151, 45, 48, 43, 47, 52, 46, 46, 51, 45, 49, 54, 50, 61, 66, 62, 46, 51, 47, 42, 47, 43, 41, 46, 42, 35, 41, 34, 43, 49, 43, 42, 48, 41, 41, 47, 40, 48, 52, 46, 50, 56, 49, 46, 51, 45, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 2, 0, 136, 142, 138, 19, 24, 20, 45, 50, 46, 35, 40, 38, 29, 36, 34, 42, 49, 47, 36, 43, 41, 29, 36, 34, 34, 41, 38, 21, 28, 26, 22, 27, 26, 27, 32, 30, 32, 37, 33, 22, 28, 24, 28, 33, 29, 34, 39, 35, 30, 33, 27, 29, 42, 35, 29, 35, 31, 31, 36, 32, 32, 37, 33, 31, 35, 34, 35, 40, 38, 35, 40, 38, 33, 38, 36, 31, 35, 34, 35, 40, 38, 34, 39, 37, 34, 39, 35, 38, 43, 39, 35, 40, 36, 42, 47, 43, 25, 28, 25, 0, 0, 0, 16, 14, 8, 163, 160, 149, 164, 160, 145, 159, 158, 140, 164, 162, 145, 168, 164, 147, 159, 158, 138, 177, 175, 155, 174, 173, 151, 174, 173, 151, 121, 120, 99, 123, 123, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 94, 93, 82, 77, 77, 65, 83, 81, 66, 142, 140, 122, 83, 81, 66, 104, 102, 87, 50, 47, 39, 153, 151, 147, 35, 34, 30, 0, 0, 0, 0, 0, 0, 108, 109, 102, 142, 143, 138, 136, 137, 132, 144, 142, 138, 141, 140, 136, 99, 98, 94, 145, 143, 139, 149, 145, 141, 137, 134, 133, 158, 156, 154, 154, 152, 151, 165, 161, 160, 142, 137, 138, 0, 0, 0, 107, 103, 100, 81, 77, 76, 85, 81, 75, 102, 98, 93, 129, 126, 118, 85, 81, 75, 77, 76, 70, 94, 92, 86, 92, 93, 86, 84, 85, 78, 0, 0, 0, 88, 94, 85, 79, 85, 77, 95, 101, 90, 26, 32, 23, 59, 64, 56, 78, 82, 87, 92, 96, 103, 91, 94, 104, 93, 97, 105, 92, 96, 103, 90, 93, 101, 90, 93, 103, 88, 91, 99, 89, 92, 100, 89, 92, 100, 92, 96, 103, 91, 94, 102, 93, 94, 100, 95, 96, 103, 93, 94, 100, 99, 100, 104, 99, 100, 104, 94, 96, 97, 92, 94, 95, 79, 81, 82, 68, 73, 71, 52, 57, 53, 49, 56, 51, 107, 115, 108, 164, 170, 164, 1, 9, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0, 56, 63, 58, 3, 8, 2, 15, 21, 17, 1, 7, 1, 0, 1, 0, 10, 15, 9, 33, 39, 30, 45, 51, 42, 17, 23, 14, 7, 13, 4, 13, 21, 11, 11, 16, 10, 24, 32, 25, 37, 45, 40, 33, 38, 34, 40, 45, 41, 7, 12, 8, 0, 0, 0, 0, 0, 0, 0, 4, 0, 13, 19, 10, 13, 21, 11, 171, 177, 171, 36, 42, 36, 42, 48, 39, 41, 47, 40, 40, 46, 37, 33, 39, 30, 40, 46, 37, 39, 45, 36, 35, 41, 32, 42, 48, 39, 46, 52, 43, 38, 44, 35, 42, 48, 41, 45, 51, 42, 40, 45, 39, 35, 41, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 13, 6, 36, 42, 36, 33, 39, 32, 29, 35, 29, 39, 44, 38, 38, 43, 37, 39, 47, 40, 33, 38, 34, 32, 37, 33, 38, 43, 37, 35, 41, 34, 38, 43, 37, 53, 58, 52, 35, 41, 34, 33, 39, 32, 35, 41, 34, 33, 39, 32, 1, 7, 1, 0, 0, 0, 0, 0, 0, 54, 58, 48, 94, 98, 87, 35, 38, 28, 31, 35, 27, 69, 73, 63, 48, 54, 43, 42, 45, 35, 43, 50, 39, 41, 47, 38, 38, 44, 35, 32, 38, 29, 35, 41, 32, 28, 34, 25, 26, 32, 23, 43, 49, 43, 29, 35, 29, 29, 35, 31, 10, 13, 8, 0, 0, 0, 8, 11, 5, 130, 133, 128, 24, 27, 22, 23, 26, 22, 5, 10, 6, 17, 22, 18, 8, 14, 10, 21, 26, 22, 34, 39, 35, 39, 44, 40, 36, 42, 36, 43, 49, 43, 55, 61, 54, 51, 54, 48, 33, 39, 30, 24, 29, 23, 18, 23, 17, 24, 27, 22, 28, 31, 23, 0, 0, 0, 1, 7, 1, 113, 119, 112, 3, 6, 3, 13, 19, 12, 45, 48, 45, 5, 10, 6, 12, 17, 15, 8, 13, 12, 7, 12, 10, 10, 14, 13, 24, 31, 29, 10, 14, 13, 19, 24, 22, 49, 54, 50, 74, 79, 73, 10, 15, 11, 11, 16, 12, 24, 29, 25, 40, 45, 39, 28, 33, 29, 54, 59, 55, 30, 33, 29, 21, 23, 20, 40, 43, 40, 35, 37, 36, 37, 39, 38, 31, 33, 33, 25, 30, 28, 25, 28, 25, 14, 16, 13, 48, 51, 48, 29, 31, 28, 53, 56, 53, 30, 32, 31, 53, 56, 53, 7, 9, 6, 0, 0, 0, 134, 131, 120, 141, 138, 123, 153, 148, 132, 162, 158, 141, 168, 167, 149, 170, 166, 147, 164, 163, 142, 165, 162, 140, 177, 174, 152, 127, 124, 111, 75, 75, 61, 0, 0, 0, 0, 0, 0, 75, 74, 63, 71, 71, 62, 84, 84, 72, 138, 138, 122, 115, 113, 98, 85, 83, 65, 35, 33, 17, 85, 81, 73, 117, 116, 109, 0, 0, 0, 70, 71, 64, 106, 107, 100, 107, 108, 101, 100, 101, 94, 141, 142, 135, 115, 116, 109, 106, 107, 102, 113, 112, 108, 118, 116, 112, 120, 118, 117, 134, 132, 131, 163, 161, 160, 89, 87, 85, 87, 83, 82, 79, 74, 74, 29, 24, 24, 53, 49, 46, 61, 57, 54, 71, 67, 62, 96, 95, 88, 145, 144, 135, 73, 71, 65, 78, 78, 69, 91, 90, 82, 92, 91, 85, 80, 82, 72, 0, 0, 0, 82, 86, 78, 77, 83, 74, 84, 90, 81, 33, 39, 30, 55, 61, 52, 61, 65, 66, 69, 73, 74, 62, 66, 69, 76, 80, 85, 69, 73, 78, 71, 75, 78, 68, 72, 75, 69, 73, 76, 67, 71, 74, 52, 56, 59, 36, 41, 41, 33, 37, 38, 49, 54, 52, 52, 57, 53, 79, 85, 81, 113, 116, 110, 100, 103, 97, 37, 40, 34, 25, 28, 23, 12, 16, 10, 12, 18, 11, 27, 33, 26, 25, 33, 26, 25, 30, 24, 25, 33, 26, 36, 42, 38, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 4, 0, 1, 9, 2, 36, 42, 36, 39, 47, 40, 23, 31, 22, 37, 45, 36, 34, 40, 31, 29, 37, 28, 43, 51, 42, 25, 31, 22, 25, 34, 24, 63, 71, 62, 34, 42, 34, 29, 37, 30, 46, 54, 49, 30, 38, 33, 44, 52, 47, 46, 51, 47, 0, 0, 0, 0, 0, 0, 14, 17, 9, 27, 33, 24, 99, 105, 96, 29, 35, 27, 40, 46, 37, 38, 44, 35, 38, 43, 37, 55, 63, 53, 39, 45, 36, 42, 48, 39, 67, 73, 64, 38, 44, 35, 39, 45, 36, 22, 28, 20, 19, 25, 16, 19, 24, 18, 19, 25, 16, 20, 26, 19, 31, 36, 30, 33, 39, 32, 14, 17, 11, 32, 35, 30, 0, 0, 0, 140, 146, 137, 24, 29, 23, 8, 14, 6, 20, 26, 19, 33, 39, 32, 32, 37, 31, 36, 42, 36, 40, 45, 39, 52, 57, 51, 32, 35, 30, 25, 28, 23, 8, 14, 8, 20, 26, 19, 26, 32, 25, 21, 27, 20, 29, 35, 31, 14, 20, 13, 42, 48, 41, 0, 0, 0, 151, 157, 148, 25, 29, 21, 25, 29, 21, 42, 45, 37, 26, 30, 22, 31, 34, 29, 16, 19, 11, 43, 46, 38, 36, 42, 34, 42, 48, 39, 48, 54, 45, 32, 38, 29, 22, 28, 20, 41, 47, 38, 5, 11, 4, 11, 16, 12, 10, 15, 11, 18, 21, 18, 0, 0, 0, 75, 78, 75, 10, 13, 10, 4, 7, 4, 18, 21, 20, 15, 17, 16, 4, 7, 6, 4, 7, 6, 3, 5, 5, 6, 11, 9, 20, 25, 23, 15, 21, 17, 34, 40, 33, 48, 54, 47, 74, 80, 71, 19, 25, 16, 53, 57, 49, 37, 40, 32, 0, 3, 0, 0, 0, 0, 0, 0, 0, 72, 74, 73, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 4, 3, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 4, 2, 0, 0, 0, 2, 9, 9, 11, 16, 14, 26, 31, 29, 5, 10, 8, 15, 20, 19, 11, 16, 14, 0, 2, 0, 0, 4, 0, 34, 39, 37, 12, 17, 13, 23, 25, 24, 29, 35, 31, 8, 10, 7, 15, 17, 16, 5, 7, 9, 11, 13, 15, 12, 14, 18, 0, 1, 0, 3, 5, 5, 12, 15, 12, 15, 17, 14, 7, 9, 8, 0, 0, 0, 52, 55, 51, 45, 48, 45, 36, 39, 33, 0, 0, 0, 113, 110, 99, 146, 141, 127, 151, 147, 130, 146, 145, 125, 172, 169, 149, 165, 162, 142, 157, 154, 134, 179, 176, 154, 175, 171, 150, 139, 139, 123, 9, 8, 0, 0, 0, 0, 0, 0, 0, 84, 84, 72, 66, 65, 54, 84, 84, 70, 131, 131, 113, 81, 80, 60, 95, 93, 76, 47, 45, 32, 110, 107, 99, 13, 14, 7, 95, 96, 91, 99, 100, 93, 84, 85, 78, 77, 78, 71, 80, 81, 74, 77, 78, 69, 78, 80, 70, 79, 81, 71, 84, 85, 76, 95, 97, 89, 114, 115, 110, 63, 62, 58, 26, 24, 20, 109, 105, 102, 101, 97, 94, 0, 0, 0, 122, 118, 113, 56, 52, 46, 59, 55, 52, 56, 55, 49, 88, 85, 79, 136, 133, 125, 95, 94, 85, 78, 74, 66, 90, 89, 80, 113, 112, 104, 87, 86, 77, 0, 0, 0, 72, 75, 67, 76, 80, 72, 79, 82, 74, 12, 18, 9, 195, 201, 192, 0, 0, 0, 0, 1, 0, 7, 12, 8, 18, 23, 21, 0, 4, 2, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 28, 34, 27, 7, 12, 8, 14, 20, 13, 5, 11, 4, 7, 13, 6, 18, 23, 19, 13, 18, 14, 19, 24, 18, 22, 28, 24, 19, 24, 20, 28, 34, 27, 25, 30, 26, 27, 33, 26, 22, 30, 25, 27, 34, 29, 31, 36, 30, 24, 32, 27, 21, 26, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 149, 157, 150, 31, 39, 32, 15, 23, 16, 31, 39, 32, 7, 15, 8, 28, 34, 25, 33, 39, 30, 50, 56, 48, 45, 51, 42, 39, 45, 36, 61, 67, 58, 13, 19, 10, 3, 11, 4, 0, 7, 0, 10, 18, 11, 17, 25, 20, 0, 6, 1, 0, 5, 0, 22, 30, 25, 25, 30, 24, 10, 16, 7, 28, 34, 25, 60, 66, 57, 10, 16, 7, 13, 19, 12, 15, 21, 15, 14, 20, 13, 19, 24, 18, 32, 38, 29, 38, 44, 35, 42, 48, 39, 38, 44, 35, 15, 21, 13, 24, 30, 21, 13, 19, 12, 25, 31, 22, 13, 19, 10, 23, 26, 20, 15, 18, 12, 21, 24, 18, 30, 31, 26, 2, 3, 0, 131, 134, 129, 0, 2, 0, 4, 7, 2, 4, 9, 5, 11, 16, 12, 1, 7, 3, 22, 28, 24, 20, 25, 21, 25, 30, 24, 38, 43, 37, 14, 20, 13, 11, 14, 9, 14, 20, 13, 13, 19, 10, 13, 19, 12, 1, 7, 3, 0, 2, 1, 3, 8, 4, 29, 31, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 15, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 18, 21, 16, 106, 107, 109, 132, 134, 138, 136, 135, 142, 137, 136, 143, 138, 138, 142, 140, 138, 146, 144, 145, 151, 158, 157, 166, 152, 153, 162, 158, 157, 166, 149, 148, 157, 161, 159, 169, 150, 149, 158, 140, 142, 148, 104, 106, 108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 28, 20, 67, 70, 65, 69, 73, 67, 73, 75, 74, 152, 154, 158, 153, 152, 157, 155, 154, 161, 150, 151, 157, 153, 151, 160, 144, 145, 153, 149, 150, 156, 153, 151, 159, 148, 148, 157, 148, 148, 157, 143, 143, 152, 143, 146, 157, 140, 143, 153, 131, 134, 144, 141, 144, 154, 151, 151, 162, 152, 153, 164, 149, 152, 162, 151, 153, 164, 148, 151, 159, 158, 158, 169, 159, 159, 171, 156, 156, 165, 153, 154, 163, 151, 152, 159, 164, 165, 169, 168, 169, 176, 158, 159, 165, 156, 157, 163, 158, 159, 165, 163, 164, 168, 157, 156, 161, 142, 143, 147, 144, 145, 149, 142, 143, 147, 121, 121, 123, 12, 15, 14, 0, 0, 0, 0, 0, 0, 86, 83, 72, 137, 135, 120, 167, 162, 146, 161, 157, 140, 157, 153, 136, 159, 155, 135, 177, 173, 154, 172, 169, 147, 183, 180, 156, 61, 55, 46, 0, 0, 0, 0, 0, 0, 90, 90, 78, 64, 60, 52, 63, 60, 49, 139, 137, 124, 81, 80, 62, 80, 78, 61, 60, 58, 43, 87, 84, 74, 145, 144, 135, 0, 0, 0, 112, 113, 106, 107, 108, 101, 120, 121, 114, 106, 107, 100, 118, 119, 112, 114, 118, 107, 113, 116, 106, 109, 113, 103, 109, 111, 101, 98, 99, 92, 100, 101, 94, 10, 12, 4, 0, 0, 0, 97, 96, 87, 123, 120, 112, 0, 0, 0, 99, 95, 87, 35, 31, 25, 35, 31, 25, 57, 53, 46, 102, 101, 92, 144, 141, 133, 78, 74, 66, 78, 78, 69, 109, 108, 99, 108, 107, 98, 70, 71, 62, 0, 0, 0, 74, 78, 70, 68, 72, 62, 58, 62, 51, 131, 137, 128, 11, 17, 8, 1, 7, 1, 17, 20, 15, 8, 14, 8, 10, 15, 11, 15, 18, 12, 0, 5, 1, 0, 5, 1, 18, 23, 19, 8, 14, 10, 4, 9, 5, 0, 2, 0, 15, 21, 17, 13, 18, 14, 17, 22, 16, 14, 19, 15, 18, 23, 19, 22, 28, 22, 25, 30, 26, 16, 24, 19, 33, 38, 34, 24, 32, 25, 43, 49, 43, 34, 40, 33, 25, 33, 26, 33, 39, 32, 15, 23, 18, 3, 8, 4, 34, 39, 35, 8, 10, 7, 0, 0, 0, 39, 44, 40, 0, 8, 1, 0, 6, 1, 0, 4, 0, 63, 69, 62, 9, 17, 10, 32, 38, 29, 50, 56, 48, 63, 69, 60, 56, 63, 51, 41, 47, 38, 20, 26, 17, 0, 6, 0, 0, 3, 0, 0, 5, 0, 0, 1, 0, 0, 3, 0, 0, 2, 0, 0, 5, 1, 0, 0, 0, 32, 37, 33, 0, 0, 0, 163, 166, 160, 0, 0, 0, 2, 5, 1, 3, 6, 3, 11, 14, 11, 19, 22, 17, 22, 24, 21, 7, 10, 4, 35, 41, 32, 36, 42, 34, 35, 41, 32, 26, 32, 23, 15, 21, 13, 10, 16, 7, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 25, 28, 7, 6, 11, 21, 21, 23, 62, 61, 66, 78, 77, 82, 120, 121, 127, 155, 156, 162, 152, 153, 160, 151, 152, 159, 142, 143, 149, 158, 159, 163, 8, 10, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 0, 32, 35, 30, 99, 102, 96, 101, 102, 99, 107, 109, 110, 127, 127, 129, 138, 138, 140, 147, 144, 149, 147, 142, 146, 150, 144, 151, 149, 143, 151, 153, 147, 156, 149, 142, 153, 143, 136, 148, 144, 139, 152, 133, 131, 141, 147, 144, 149, 6, 6, 4, 0, 0, 0, 43, 44, 37, 88, 90, 82, 90, 91, 84, 66, 67, 64, 48, 48, 46, 150, 149, 156, 123, 122, 131, 134, 132, 144, 138, 137, 146, 135, 133, 143, 141, 139, 149, 177, 175, 185, 246, 244, 254, 140, 136, 146, 134, 132, 144, 136, 134, 146, 131, 130, 141, 123, 123, 134, 147, 150, 158, 0, 0, 0, 19, 24, 20, 0, 0, 0, 79, 82, 74, 79, 82, 74, 55, 59, 53, 36, 38, 37, 125, 124, 128, 150, 149, 158, 153, 151, 160, 158, 156, 168, 153, 151, 162, 148, 146, 156, 151, 149, 161, 150, 148, 160, 148, 145, 160, 144, 144, 157, 145, 145, 160, 148, 147, 163, 140, 140, 156, 140, 140, 156, 139, 139, 155, 136, 135, 151, 137, 136, 152, 138, 138, 152, 137, 136, 152, 138, 138, 154, 136, 139, 152, 136, 136, 149, 136, 136, 149, 137, 137, 150, 136, 136, 149, 138, 138, 150, 135, 135, 146, 135, 135, 144, 132, 133, 142, 137, 138, 146, 130, 131, 139, 129, 130, 138, 130, 131, 139, 137, 138, 146, 128, 128, 137, 133, 135, 141, 104, 103, 106, 7, 9, 6, 0, 0, 0, 66, 65, 54, 139, 137, 122, 166, 165, 147, 164, 160, 143, 152, 151, 133, 160, 156, 137, 159, 155, 135, 163, 160, 138, 176, 173, 151, 0, 2, 0, 0, 0, 0, 23, 22, 13, 71, 71, 62, 51, 48, 38, 99, 96, 83, 168, 166, 151, 87, 85, 68, 95, 93, 78, 49, 46, 31, 63, 60, 47, 7, 7, 0, 77, 78, 69, 100, 102, 92, 58, 60, 50, 51, 53, 43, 57, 58, 51, 56, 57, 48, 45, 47, 37, 48, 52, 42, 42, 45, 35, 40, 41, 29, 100, 102, 90, 95, 97, 85, 0, 1, 0, 8, 10, 0, 67, 67, 53, 36, 33, 22, 24, 22, 9, 49, 46, 35, 67, 64, 56, 40, 37, 29, 61, 60, 51, 88, 85, 77, 130, 127, 117, 80, 77, 67, 87, 84, 74, 101, 98, 88, 108, 107, 96, 92, 92, 83, 0, 0, 0, 79, 83, 72, 74, 78, 68, 157, 161, 150, 15, 18, 10, 28, 31, 25, 4, 7, 2, 12, 16, 10, 16, 19, 13, 18, 21, 18, 1, 3, 0, 0, 0, 0, 4, 9, 5, 7, 12, 10, 0, 4, 2, 6, 11, 9, 3, 7, 6, 3, 7, 6, 5, 10, 8, 0, 2, 0, 7, 12, 10, 0, 2, 0, 0, 3, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 4, 5, 1, 6, 9, 0, 2, 3, 0, 0, 0, 0, 0, 0, 2, 3, 9, 11, 12, 19, 0, 4, 11, 0, 1, 9, 53, 54, 65, 80, 80, 91, 111, 112, 119, 145, 147, 151, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 8, 165, 166, 163, 176, 175, 178, 163, 162, 167, 170, 167, 173, 170, 167, 173, 177, 174, 182, 171, 168, 176, 160, 156, 166, 154, 150, 160, 149, 142, 153, 144, 140, 150, 134, 130, 143, 130, 125, 138, 128, 126, 138, 136, 135, 144, 0, 0, 1, 15, 18, 12, 0, 0, 0, 47, 53, 44, 82, 86, 78, 93, 96, 90, 64, 67, 61, 149, 151, 150, 143, 142, 151, 150, 148, 160, 143, 141, 155, 147, 145, 157, 154, 152, 164, 146, 141, 156, 143, 141, 155, 144, 142, 156, 141, 138, 153, 133, 130, 145, 129, 126, 143, 127, 124, 141, 127, 127, 140, 0, 0, 0, 82, 85, 84, 0, 0, 0, 106, 107, 100, 107, 108, 101, 83, 84, 77, 71, 72, 67, 114, 112, 111, 110, 107, 110, 113, 110, 118, 135, 129, 137, 146, 140, 149, 156, 150, 159, 188, 183, 189, 190, 184, 192, 171, 164, 176, 156, 149, 162, 155, 147, 163, 144, 137, 153, 139, 134, 149, 149, 145, 160, 0, 0, 8, 0, 0, 0, 0, 0, 0, 62, 63, 56, 55, 56, 51, 52, 53, 50, 47, 47, 47, 126, 125, 130, 129, 128, 137, 136, 134, 146, 143, 142, 151, 149, 145, 156, 149, 145, 158, 255, 255, 255, 121, 117, 128, 139, 135, 147, 139, 135, 147, 135, 133, 145, 134, 130, 143, 128, 126, 138, 127, 127, 138, 7, 8, 14, 24, 27, 24, 0, 0, 0, 14, 17, 11, 24, 27, 22, 32, 35, 30, 37, 37, 39, 66, 65, 72, 150, 149, 158, 153, 151, 160, 151, 150, 159, 150, 149, 158, 148, 146, 158, 145, 144, 155, 149, 147, 161, 154, 151, 166, 150, 148, 162, 145, 143, 159, 142, 141, 157, 149, 148, 164, 145, 145, 160, 142, 141, 157, 140, 140, 156, 143, 142, 158, 140, 142, 157, 141, 143, 158, 132, 134, 149, 134, 136, 151, 133, 135, 150, 133, 133, 147, 132, 132, 146, 138, 138, 152, 136, 136, 149, 131, 131, 143, 128, 128, 137, 125, 126, 135, 133, 131, 139, 128, 129, 135, 128, 127, 134, 130, 131, 139, 136, 137, 145, 131, 130, 139, 131, 132, 141, 137, 138, 144, 32, 34, 36, 24, 25, 22, 45, 44, 35, 118, 116, 103, 136, 132, 117, 157, 153, 136, 157, 154, 134, 185, 181, 162, 155, 151, 132, 177, 173, 154, 151, 148, 128, 0, 0, 0, 0, 0, 0, 108, 107, 98, 85, 81, 73, 63, 60, 49, 123, 121, 106, 120, 118, 100, 96, 95, 77, 177, 175, 155, 70, 68, 50, 59, 58, 45, 0, 0, 0, 104, 108, 98, 98, 99, 90, 92, 95, 85, 104, 108, 98, 101, 105, 94, 101, 105, 94, 90, 94, 84, 82, 87, 74, 94, 98, 85, 99, 101, 89, 92, 94, 80, 85, 87, 75, 0, 0, 0, 184, 185, 164, 188, 189, 168, 0, 0, 0, 79, 76, 64, 53, 50, 40, 42, 38, 30, 31, 28, 20, 79, 76, 68, 106, 103, 92, 129, 126, 116, 87, 84, 74, 91, 88, 77, 110, 108, 95, 107, 104, 92, 100, 100, 91, 0, 0, 0, 86, 88, 78, 163, 165, 153, 9, 13, 2, 25, 29, 21, 25, 28, 25, 6, 11, 9, 12, 17, 13, 6, 11, 7, 1, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 5, 12, 54, 55, 62, 93, 94, 100, 117, 118, 125, 148, 147, 154, 171, 169, 181, 169, 167, 179, 159, 159, 173, 164, 163, 179, 156, 155, 171, 151, 149, 165, 143, 142, 158, 145, 144, 163, 143, 142, 160, 144, 143, 162, 142, 140, 161, 144, 143, 164, 143, 141, 162, 147, 143, 165, 142, 139, 158, 138, 135, 157, 142, 140, 154, 50, 51, 55, 7, 9, 8, 38, 41, 34, 0, 0, 0, 0, 0, 0, 89, 93, 85, 56, 62, 53, 53, 56, 51, 69, 69, 67, 168, 170, 174, 160, 156, 164, 166, 162, 170, 158, 152, 163, 155, 151, 162, 140, 136, 146, 133, 129, 141, 139, 132, 148, 136, 129, 145, 132, 127, 142, 131, 126, 141, 127, 122, 140, 126, 121, 138, 125, 123, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 87, 79, 83, 87, 79, 71, 77, 68, 62, 65, 60, 78, 79, 81, 141, 138, 153, 138, 136, 150, 150, 148, 162, 154, 151, 166, 153, 150, 164, 157, 155, 169, 162, 159, 174, 255, 255, 255, 138, 136, 150, 133, 130, 147, 138, 136, 152, 129, 126, 143, 123, 121, 137, 152, 153, 162, 3, 5, 7, 0, 0, 0, 59, 61, 53, 83, 84, 77, 72, 73, 66, 60, 61, 56, 53, 54, 51, 104, 101, 104, 109, 105, 113, 135, 132, 140, 151, 145, 154, 168, 162, 171, 159, 153, 162, 168, 162, 171, 255, 255, 255, 173, 166, 180, 156, 149, 164, 145, 140, 153, 140, 135, 150, 141, 136, 154, 158, 153, 168, 12, 11, 14, 0, 0, 0, 57, 58, 51, 50, 51, 46, 43, 43, 41, 37, 39, 38, 28, 28, 30, 143, 142, 151, 131, 130, 139, 144, 140, 150, 146, 142, 152, 148, 144, 157, 141, 137, 148, 125, 121, 133, 146, 141, 154, 141, 137, 150, 139, 135, 147, 133, 129, 141, 128, 126, 138, 123, 122, 133, 60, 60, 72, 8, 10, 9, 0, 0, 0, 0, 0, 0, 42, 48, 41, 17, 22, 18, 36, 38, 39, 2, 3, 7, 151, 152, 159, 148, 146, 156, 153, 149, 159, 151, 150, 159, 153, 151, 162, 151, 149, 161, 148, 145, 160, 150, 148, 162, 147, 144, 159, 148, 145, 162, 144, 143, 159, 137, 136, 152, 139, 139, 155, 142, 141, 157, 142, 141, 157, 140, 140, 156, 142, 141, 157, 140, 140, 156, 135, 137, 152, 136, 138, 154, 132, 132, 146, 133, 135, 148, 133, 133, 149, 131, 131, 145, 132, 132, 146, 130, 131, 139, 186, 187, 193, 144, 143, 150, 151, 151, 155, 128, 129, 135, 131, 131, 135, 164, 163, 170, 217, 218, 225, 116, 117, 126, 129, 130, 138, 125, 126, 135, 25, 26, 33, 20, 20, 20, 0, 0, 0, 109, 106, 96, 157, 153, 138, 149, 145, 128, 156, 152, 133, 156, 152, 133, 149, 145, 126, 159, 155, 135, 168, 164, 145, 0, 0, 0, 58, 61, 53, 66, 65, 56, 79, 76, 68, 83, 78, 66, 143, 141, 124, 147, 146, 128, 135, 133, 115, 103, 102, 84, 75, 74, 54, 60, 60, 44, 0, 0, 0, 121, 123, 113, 100, 104, 93, 92, 95, 85, 118, 119, 110, 93, 95, 85, 85, 88, 78, 94, 98, 87, 94, 96, 84, 97, 99, 86, 108, 110, 98, 98, 100, 88, 66, 69, 54, 0, 0, 0, 111, 115, 90, 181, 182, 163, 0, 0, 0, 113, 110, 95, 44, 41, 29, 31, 28, 18, 28, 25, 14, 61, 59, 48, 152, 150, 137, 117, 115, 102, 108, 105, 95, 82, 80, 67, 113, 110, 97, 102, 102, 88, 102, 99, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 0, 145, 148, 154, 167, 168, 179, 155, 158, 168, 153, 154, 165, 148, 148, 159, 146, 147, 158, 124, 125, 136, 138, 138, 150, 138, 138, 150, 133, 134, 145, 136, 136, 147, 136, 134, 146, 143, 141, 153, 143, 141, 153, 143, 141, 153, 146, 141, 156, 140, 135, 150, 144, 139, 154, 147, 142, 157, 146, 141, 156, 146, 141, 158, 144, 138, 158, 147, 143, 163, 141, 137, 159, 141, 137, 159, 143, 140, 161, 151, 148, 169, 153, 149, 173, 149, 145, 169, 144, 140, 164, 144, 140, 164, 143, 139, 163, 142, 138, 162, 140, 136, 160, 138, 135, 159, 142, 138, 162, 136, 133, 154, 182, 180, 191, 6, 5, 8, 19, 22, 17, 0, 0, 0, 0, 0, 0, 46, 50, 42, 81, 87, 78, 43, 49, 43, 44, 45, 42, 138, 138, 142, 167, 163, 171, 163, 159, 170, 165, 159, 170, 162, 158, 169, 151, 147, 157, 144, 137, 151, 139, 135, 145, 130, 125, 140, 131, 126, 141, 126, 121, 136, 126, 121, 136, 125, 122, 138, 116, 114, 128, 140, 141, 152, 3, 5, 7, 0, 4, 0, 0, 0, 0, 18, 24, 15, 62, 68, 59, 28, 34, 27, 15, 20, 19, 161, 159, 171, 142, 140, 154, 145, 143, 157, 154, 151, 166, 155, 153, 167, 136, 134, 148, 130, 125, 140, 255, 255, 255, 161, 158, 173, 147, 144, 159, 133, 130, 145, 129, 126, 143, 126, 123, 140, 116, 114, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 48, 37, 66, 67, 60, 43, 44, 39, 48, 48, 46, 133, 131, 131, 113, 110, 116, 118, 112, 121, 138, 132, 141, 157, 151, 160, 166, 160, 171, 161, 155, 166, 153, 147, 158, 163, 156, 169, 157, 150, 166, 151, 144, 160, 144, 139, 154, 135, 130, 148, 149, 144, 162, 0, 0, 0, 0, 0, 0, 15, 16, 11, 20, 21, 16, 21, 23, 20, 24, 24, 24, 26, 28, 30, 151, 150, 159, 121, 117, 130, 142, 140, 152, 147, 143, 155, 149, 147, 159, 151, 147, 157, 147, 143, 155, 140, 136, 148, 144, 139, 152, 139, 135, 147, 130, 128, 140, 123, 122, 133, 125, 123, 134, 149, 147, 159, 0, 0, 0, 45, 48, 45, 0, 0, 0, 23, 26, 20, 1, 7, 3, 0, 2, 1, 3, 5, 9, 124, 125, 132, 150, 151, 159, 150, 148, 160, 152, 147, 160, 150, 148, 160, 151, 146, 161, 149, 147, 161, 148, 145, 160, 145, 143, 159, 147, 144, 160, 143, 140, 157, 143, 142, 158, 140, 140, 156, 142, 141, 157, 141, 143, 158, 137, 136, 152, 140, 142, 157, 136, 138, 154, 135, 137, 152, 133, 135, 150, 133, 133, 149, 135, 134, 150, 135, 135, 148, 133, 133, 147, 128, 128, 139, 127, 125, 137, 155, 154, 161, 255, 255, 255, 242, 241, 246, 255, 255, 255, 255, 254, 255, 248, 247, 252, 255, 255, 255, 131, 130, 139, 126, 124, 136, 130, 129, 138, 80, 81, 87, 25, 28, 27, 0, 0, 0, 71, 67, 60, 144, 140, 126, 142, 138, 123, 163, 159, 140, 159, 155, 135, 155, 151, 132, 152, 151, 131, 150, 149, 128, 0, 0, 0, 91, 92, 83, 95, 92, 82, 80, 77, 67, 101, 99, 84, 174, 173, 155, 129, 127, 110, 129, 127, 110, 18, 17, 0, 91, 89, 71, 150, 153, 136, 0, 0, 0, 73, 75, 65, 98, 100, 88, 98, 100, 88, 117, 121, 111, 109, 113, 100, 111, 113, 100, 106, 110, 97, 102, 106, 94, 105, 107, 95, 88, 91, 76, 60, 65, 50, 0, 0, 0, 2, 5, 0, 58, 62, 40, 255, 255, 233, 0, 0, 0, 101, 99, 82, 42, 39, 26, 57, 54, 41, 62, 56, 47, 91, 88, 75, 163, 160, 147, 107, 103, 88, 84, 79, 67, 89, 87, 74, 94, 92, 77, 124, 122, 109, 108, 105, 95, 0, 0, 0, 130, 129, 118, 115, 117, 105, 81, 85, 72, 67, 71, 63, 44, 47, 41, 114, 114, 123, 136, 136, 149, 134, 136, 151, 136, 135, 151, 129, 129, 142, 136, 136, 149, 142, 142, 155, 137, 137, 150, 137, 137, 150, 137, 135, 149, 144, 142, 156, 141, 139, 151, 141, 138, 153, 141, 138, 153, 142, 140, 154, 142, 138, 153, 144, 139, 154, 141, 136, 154, 140, 135, 152, 138, 132, 150, 140, 135, 152, 140, 134, 155, 139, 133, 156, 142, 136, 159, 146, 140, 162, 141, 137, 161, 140, 136, 160, 145, 142, 166, 141, 137, 161, 142, 138, 162, 142, 138, 162, 142, 138, 162, 137, 133, 158, 141, 137, 161, 141, 137, 161, 146, 139, 164, 140, 136, 158, 145, 143, 159, 0, 0, 2, 0, 1, 0, 21, 24, 18, 0, 0, 0, 15, 18, 12, 69, 75, 66, 28, 31, 25, 43, 46, 40, 78, 78, 80, 160, 156, 164, 165, 161, 171, 166, 160, 171, 163, 156, 167, 150, 143, 155, 231, 225, 236, 158, 152, 163, 143, 136, 148, 255, 255, 255, 144, 139, 154, 130, 123, 139, 123, 118, 133, 118, 115, 129, 118, 118, 132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 16, 10, 25, 30, 26, 22, 27, 28, 106, 104, 116, 145, 144, 155, 149, 147, 161, 149, 147, 161, 155, 151, 166, 158, 156, 172, 144, 142, 156, 150, 148, 162, 141, 138, 153, 137, 135, 151, 139, 134, 149, 128, 125, 142, 125, 122, 138, 119, 116, 131, 107, 106, 113, 0, 0, 0, 0, 0, 0, 23, 24, 17, 27, 28, 23, 15, 16, 11, 28, 29, 24, 35, 35, 35, 123, 119, 125, 123, 117, 126, 132, 126, 135, 146, 140, 151, 159, 153, 164, 165, 157, 169, 164, 155, 169, 166, 159, 173, 156, 149, 162, 151, 144, 160, 139, 134, 149, 134, 129, 147, 131, 129, 145, 105, 103, 115, 10, 10, 10, 0, 0, 0, 9, 12, 8, 9, 12, 8, 9, 11, 10, 14, 15, 17, 80, 79, 86, 136, 134, 146, 130, 128, 140, 141, 137, 148, 147, 145, 157, 152, 148, 158, 163, 159, 170, 126, 122, 132, 142, 138, 151, 139, 135, 147, 132, 128, 140, 128, 126, 138, 126, 124, 136, 134, 132, 144, 15, 17, 18, 23, 26, 22, 0, 0, 0, 0, 2, 0, 0, 4, 0, 0, 4, 2, 1, 6, 7, 38, 42, 45, 144, 144, 155, 154, 152, 164, 150, 148, 160, 153, 151, 162, 149, 147, 159, 149, 147, 161, 148, 145, 160, 147, 144, 159, 145, 143, 159, 143, 142, 158, 143, 142, 158, 144, 143, 159, 143, 142, 158, 138, 140, 155, 142, 144, 159, 139, 141, 156, 136, 138, 154, 135, 137, 152, 134, 137, 149, 135, 138, 150, 132, 134, 147, 137, 137, 150, 135, 135, 148, 133, 134, 145, 132, 133, 142, 129, 130, 138, 137, 138, 144, 130, 129, 136, 127, 126, 133, 130, 129, 136, 121, 120, 127, 128, 128, 137, 125, 127, 133, 128, 128, 137, 131, 132, 141, 144, 143, 152, 21, 26, 26, 0, 0, 0, 39, 36, 28, 157, 150, 136, 142, 138, 123, 162, 158, 141, 154, 150, 133, 160, 156, 137, 147, 146, 126, 171, 170, 149, 118, 119, 112, 97, 96, 87, 77, 74, 63, 92, 89, 76, 138, 136, 121, 115, 111, 95, 87, 83, 66, 99, 97, 79, 0, 0, 0, 126, 127, 108, 121, 124, 107, 0, 0, 0, 0, 0, 0, 20, 22, 10, 59, 61, 49, 79, 81, 71, 69, 74, 61, 88, 92, 80, 83, 85, 72, 67, 69, 57, 33, 35, 22, 23, 25, 13, 0, 0, 0, 0, 0, 0, 84, 85, 66, 60, 61, 40, 182, 183, 160, 0, 0, 0, 63, 61, 43, 54, 52, 37, 43, 39, 24, 74, 69, 57, 87, 82, 70, 183, 178, 164, 83, 81, 66, 96, 94, 81, 103, 101, 88, 135, 133, 117, 115, 112, 100, 112, 112, 100, 0, 0, 0, 154, 154, 140, 101, 106, 90, 68, 73, 60, 69, 73, 63, 45, 48, 43, 124, 125, 132, 139, 142, 152, 138, 138, 154, 137, 136, 152, 137, 137, 150, 137, 136, 152, 137, 136, 152, 140, 140, 154, 144, 144, 157, 142, 140, 154, 145, 143, 157, 144, 142, 156, 140, 137, 152, 133, 130, 145, 138, 136, 150, 138, 133, 148, 139, 134, 149, 135, 131, 146, 140, 135, 150, 141, 136, 154, 140, 135, 152, 159, 153, 173, 139, 133, 156, 135, 129, 152, 134, 128, 151, 136, 132, 156, 136, 132, 156, 136, 132, 156, 137, 133, 158, 141, 137, 161, 142, 138, 162, 138, 135, 159, 137, 133, 158, 140, 136, 160, 138, 135, 157, 138, 135, 157, 137, 134, 156, 136, 133, 152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 47, 40, 20, 26, 19, 24, 27, 22, 48, 51, 50, 166, 165, 172, 161, 158, 165, 165, 159, 170, 155, 148, 159, 134, 128, 136, 255, 255, 255, 234, 227, 238, 136, 130, 139, 117, 111, 122, 132, 128, 140, 126, 121, 136, 120, 117, 134, 122, 119, 136, 117, 117, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, 3, 8, 4, 6, 11, 11, 19, 21, 27, 165, 164, 173, 150, 148, 162, 155, 151, 166, 154, 149, 164, 151, 149, 163, 148, 145, 160, 143, 140, 157, 143, 140, 157, 141, 138, 155, 131, 129, 145, 133, 130, 147, 127, 124, 141, 122, 120, 134, 130, 129, 138, 0, 0, 0, 0, 0, 0, 0, 2, 0, 8, 11, 5, 21, 24, 18, 18, 21, 16, 12, 15, 12, 146, 143, 146, 120, 117, 125, 124, 118, 127, 149, 142, 153, 156, 149, 160, 161, 155, 168, 161, 155, 168, 168, 162, 175, 160, 154, 167, 153, 146, 162, 147, 142, 157, 140, 135, 152, 135, 132, 149, 158, 156, 170, 0, 0, 0, 24, 25, 20, 10, 11, 8, 5, 8, 5, 5, 5, 3, 7, 8, 10, 3, 2, 9, 142, 140, 150, 119, 117, 129, 137, 133, 143, 139, 135, 145, 148, 144, 155, 255, 255, 255, 130, 126, 134, 140, 136, 146, 140, 136, 148, 134, 130, 143, 129, 127, 139, 127, 125, 137, 118, 116, 128, 33, 35, 37, 5, 8, 5, 0, 0, 0, 4, 9, 5, 0, 0, 0, 0, 0, 0, 1, 6, 7, 0, 0, 0, 142, 142, 153, 145, 144, 155, 149, 147, 159, 147, 145, 157, 151, 149, 163, 148, 145, 160, 148, 145, 160, 144, 144, 157, 143, 142, 158, 143, 142, 158, 140, 140, 156, 142, 141, 157, 136, 135, 151, 141, 143, 158, 136, 138, 154, 133, 135, 150, 136, 138, 154, 134, 136, 151, 133, 135, 150, 131, 133, 148, 132, 134, 147, 132, 132, 146, 136, 136, 149, 128, 128, 139, 129, 129, 140, 125, 126, 135, 133, 134, 143, 129, 130, 138, 125, 126, 135, 131, 132, 141, 128, 128, 137, 132, 133, 142, 128, 128, 137, 125, 126, 135, 129, 130, 138, 125, 126, 135, 42, 47, 47, 5, 8, 5, 21, 20, 12, 151, 147, 132, 157, 150, 136, 151, 147, 130, 165, 161, 145, 161, 157, 140, 174, 170, 151, 144, 143, 123, 88, 85, 77, 70, 70, 59, 86, 83, 70, 87, 85, 70, 242, 240, 223, 136, 134, 117, 107, 105, 88, 94, 92, 75, 22, 20, 5, 55, 56, 37, 42, 45, 28, 0, 0, 0, 1, 3, 0, 13, 15, 3, 15, 17, 5, 29, 31, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 16, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 158, 159, 138, 64, 62, 42, 93, 92, 69, 21, 19, 1, 58, 56, 39, 54, 53, 35, 134, 130, 113, 87, 83, 68, 134, 129, 115, 142, 138, 121, 80, 75, 64, 105, 100, 86, 117, 115, 100, 125, 123, 108, 95, 93, 78, 105, 105, 92, 0, 0, 0, 132, 137, 122, 102, 106, 94, 77, 83, 72, 72, 79, 68, 86, 92, 86, 128, 132, 139, 132, 132, 146, 135, 134, 150, 139, 139, 155, 140, 137, 152, 136, 134, 148, 138, 136, 152, 134, 132, 146, 137, 135, 149, 135, 133, 147, 144, 142, 156, 139, 134, 149, 133, 130, 145, 135, 133, 147, 134, 132, 146, 131, 126, 141, 133, 128, 143, 135, 131, 146, 255, 255, 255, 163, 159, 174, 149, 145, 160, 225, 220, 235, 139, 133, 154, 134, 128, 151, 131, 125, 147, 133, 127, 152, 131, 124, 149, 133, 129, 153, 135, 131, 155, 134, 130, 154, 135, 131, 155, 135, 131, 155, 134, 130, 154, 129, 125, 149, 138, 135, 157, 137, 131, 153, 135, 131, 153, 135, 132, 149, 111, 112, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 23, 14, 11, 16, 10, 14, 19, 15, 17, 21, 20, 136, 135, 142, 156, 153, 161, 153, 147, 158, 155, 148, 159, 145, 139, 150, 201, 195, 206, 155, 148, 159, 255, 255, 255, 138, 131, 145, 255, 255, 255, 147, 142, 157, 125, 120, 137, 118, 113, 130, 118, 115, 131, 152, 153, 164, 0, 0, 0, 10, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 9, 7, 1, 6, 9, 107, 108, 114, 150, 148, 160, 153, 148, 163, 153, 150, 164, 155, 151, 166, 151, 149, 163, 149, 145, 160, 144, 142, 156, 149, 147, 161, 135, 133, 147, 128, 126, 140, 128, 126, 140, 123, 121, 135, 119, 117, 129, 33, 32, 35, 72, 74, 71, 0, 0, 0, 0, 0, 0, 10, 13, 8, 10, 13, 10, 16, 19, 15, 21, 21, 23, 147, 144, 149, 124, 118, 129, 134, 127, 138, 147, 141, 152, 156, 147, 161, 155, 148, 161, 145, 138, 152, 152, 146, 159, 150, 143, 159, 145, 140, 155, 137, 131, 149, 136, 133, 150, 133, 130, 145, 42, 41, 46, 71, 72, 67, 0, 0, 0, 5, 8, 5, 2, 5, 1, 3, 6, 3, 3, 5, 9, 140, 141, 150, 123, 122, 131, 125, 121, 131, 137, 136, 145, 127, 123, 134, 147, 143, 151, 128, 125, 135, 140, 136, 146, 139, 135, 145, 134, 132, 142, 135, 133, 143, 125, 123, 132, 122, 121, 130, 135, 134, 139, 14, 16, 15, 0, 0, 0, 0, 0, 0, 11, 14, 11, 0, 0, 0, 0, 4, 2, 0, 0, 0, 115, 116, 122, 143, 142, 149, 147, 145, 155, 145, 144, 153, 145, 144, 153, 151, 149, 161, 147, 145, 157, 148, 148, 161, 143, 143, 156, 142, 142, 155, 145, 145, 160, 140, 140, 154, 139, 141, 154, 134, 137, 149, 134, 137, 149, 133, 135, 148, 133, 135, 148, 132, 134, 147, 129, 132, 145, 132, 134, 147, 132, 133, 144, 131, 131, 143, 128, 128, 141, 130, 130, 141, 130, 131, 139, 128, 128, 137, 125, 126, 135, 127, 127, 136, 127, 127, 136, 128, 128, 137, 130, 131, 139, 127, 127, 136, 127, 127, 136, 125, 126, 135, 124, 125, 134, 121, 122, 128, 103, 107, 108, 21, 26, 22, 0, 0, 0, 94, 92, 79, 132, 127, 113, 152, 145, 129, 159, 152, 138, 155, 151, 134, 153, 149, 129, 160, 156, 135, 74, 72, 66, 70, 69, 61, 125, 125, 111, 118, 117, 99, 177, 175, 157, 111, 110, 92, 108, 106, 89, 85, 83, 65, 58, 56, 39, 62, 62, 46, 6, 9, 0, 0, 0, 0, 23, 25, 13, 49, 51, 39, 101, 104, 89, 149, 151, 139, 93, 96, 81, 70, 72, 60, 41, 43, 31, 34, 36, 22, 43, 45, 33, 43, 45, 31, 102, 105, 88, 109, 109, 91, 182, 183, 160, 87, 86, 66, 22, 21, 0, 78, 76, 58, 91, 89, 69, 60, 59, 41, 93, 91, 76, 120, 116, 99, 168, 164, 147, 121, 119, 101, 96, 94, 79, 89, 87, 72, 97, 95, 80, 116, 114, 99, 137, 135, 120, 109, 109, 95, 0, 0, 0, 138, 142, 129, 91, 98, 84, 68, 72, 62, 71, 74, 66, 48, 51, 48, 118, 119, 128, 132, 132, 146, 137, 135, 151, 136, 133, 150, 133, 130, 145, 135, 133, 147, 129, 127, 141, 135, 133, 147, 133, 130, 145, 133, 130, 145, 134, 132, 146, 134, 129, 145, 133, 130, 145, 137, 135, 149, 133, 130, 145, 131, 126, 141, 126, 121, 136, 139, 134, 149, 255, 255, 255, 127, 123, 138, 255, 255, 255, 255, 255, 255, 148, 143, 163, 130, 124, 146, 133, 127, 150, 123, 120, 144, 130, 123, 148, 133, 129, 153, 131, 128, 152, 131, 128, 152, 131, 128, 152, 135, 131, 155, 134, 130, 154, 131, 128, 152, 133, 129, 153, 134, 130, 152, 130, 127, 149, 133, 130, 147, 158, 158, 171, 0, 0, 0, 0, 0, 0, 82, 87, 81, 0, 0, 0, 12, 18, 11, 1, 7, 3, 8, 10, 7, 6, 11, 9, 99, 100, 104, 153, 151, 159, 151, 145, 156, 153, 147, 158, 147, 143, 153, 132, 126, 137, 131, 125, 136, 138, 131, 145, 134, 130, 143, 121, 117, 130, 125, 120, 137, 121, 117, 132, 121, 116, 134, 113, 110, 127, 122, 120, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 0, 1, 151, 152, 160, 143, 141, 155, 156, 154, 168, 153, 150, 166, 155, 153, 167, 142, 140, 154, 255, 255, 255, 131, 129, 143, 133, 130, 145, 134, 131, 148, 129, 126, 143, 128, 126, 140, 121, 119, 131, 143, 142, 149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 10, 4, 1, 2, 0, 1, 3, 0, 0, 0, 0, 94, 94, 98, 111, 107, 115, 131, 125, 136, 139, 133, 144, 149, 142, 153, 151, 144, 158, 161, 155, 166, 160, 154, 165, 153, 149, 161, 147, 142, 157, 141, 138, 155, 140, 137, 154, 134, 132, 144, 151, 150, 157, 58, 59, 54, 12, 16, 10, 0, 0, 0, 11, 14, 11, 21, 23, 20, 0, 0, 0, 14, 15, 17, 130, 131, 135, 122, 123, 129, 126, 125, 132, 127, 126, 131, 141, 140, 145, 141, 140, 147, 141, 140, 147, 138, 138, 142, 132, 134, 138, 131, 133, 137, 129, 128, 133, 129, 130, 134, 130, 130, 132, 4, 7, 6, 0, 0, 0, 0, 0, 0, 7, 13, 6, 13, 18, 14, 9, 11, 10, 0, 0, 0, 100, 103, 97, 136, 139, 131, 225, 229, 219, 196, 200, 190, 167, 171, 163, 164, 167, 161, 158, 161, 158, 141, 145, 146, 132, 136, 139, 134, 138, 143, 126, 130, 135, 128, 132, 137, 138, 141, 147, 133, 137, 142, 134, 138, 141, 136, 140, 146, 136, 140, 146, 129, 133, 139, 124, 127, 133, 128, 129, 135, 133, 135, 141, 131, 132, 141, 132, 133, 142, 129, 130, 138, 131, 132, 139, 128, 129, 133, 125, 129, 132, 125, 127, 131, 122, 123, 127, 130, 131, 137, 129, 130, 136, 129, 130, 134, 128, 129, 135, 124, 126, 130, 127, 128, 132, 128, 129, 131, 131, 135, 134, 29, 31, 28, 16, 17, 10, 82, 82, 68, 147, 140, 127, 142, 138, 121, 148, 144, 127, 149, 145, 126, 185, 182, 160, 179, 176, 154, 62, 63, 54, 92, 92, 81, 106, 104, 88, 150, 146, 129, 137, 135, 120, 101, 99, 84, 94, 90, 76, 44, 41, 29, 80, 78, 63, 64, 64, 51, 32, 32, 16, 0, 0, 0, 31, 33, 21, 53, 55, 43, 52, 55, 40, 85, 87, 73, 93, 96, 81, 98, 98, 82, 92, 93, 77, 95, 98, 81, 74, 77, 60, 77, 80, 63, 112, 113, 95, 59, 59, 41, 141, 142, 121, 78, 76, 56, 3, 2, 0, 112, 108, 89, 98, 94, 75, 83, 82, 64, 80, 76, 60, 125, 121, 102, 176, 172, 155, 130, 126, 112, 106, 103, 90, 94, 92, 79, 101, 98, 86, 132, 133, 114, 108, 108, 90, 116, 118, 106, 0, 0, 0, 115, 119, 108, 76, 82, 71, 54, 58, 48, 97, 101, 93, 39, 44, 38, 120, 120, 131, 131, 131, 147, 133, 130, 147, 131, 129, 145, 130, 128, 142, 134, 131, 148, 136, 134, 148, 134, 129, 145, 135, 131, 146, 132, 127, 142, 133, 128, 143, 133, 128, 143, 131, 126, 141, 129, 127, 141, 123, 118, 133, 127, 123, 138, 127, 123, 138, 142, 138, 153, 225, 220, 235, 134, 129, 145, 131, 126, 143, 219, 214, 229, 133, 128, 148, 126, 123, 142, 126, 120, 143, 131, 128, 152, 131, 125, 147, 122, 118, 142, 130, 127, 151, 128, 124, 148, 131, 128, 152, 131, 128, 152, 133, 129, 153, 133, 129, 153, 134, 130, 154, 134, 130, 154, 125, 121, 143, 127, 124, 143, 131, 131, 145, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 4, 9, 5, 1, 7, 3, 0, 3, 0, 16, 17, 21, 141, 140, 145, 153, 147, 158, 151, 145, 156, 140, 136, 146, 139, 135, 147, 139, 133, 146, 131, 126, 141, 134, 129, 145, 128, 123, 141, 126, 121, 138, 127, 122, 140, 123, 120, 140, 119, 116, 135, 122, 120, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 6, 5, 0, 0, 0, 156, 160, 165, 138, 137, 148, 144, 142, 156, 141, 138, 153, 147, 144, 159, 147, 144, 159, 147, 144, 159, 138, 133, 148, 134, 132, 144, 134, 132, 144, 129, 127, 141, 123, 122, 133, 121, 119, 131, 120, 119, 122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 68, 71, 66, 50, 56, 49, 2, 5, 1, 0, 2, 0, 121, 121, 121, 98, 95, 101, 123, 119, 125, 138, 134, 142, 141, 140, 147, 155, 154, 161, 137, 136, 143, 141, 140, 147, 142, 141, 148, 125, 123, 130, 129, 128, 135, 130, 130, 132, 191, 191, 191, 10, 11, 6, 16, 19, 11, 0, 0, 0, 0, 3, 0, 15, 18, 12, 0, 1, 0, 149, 152, 146, 24, 29, 23, 8, 14, 8, 27, 33, 26, 34, 40, 33, 60, 66, 57, 81, 86, 80, 81, 86, 80, 85, 91, 84, 60, 65, 61, 35, 41, 34, 14, 17, 11, 0, 0, 0, 19, 22, 17, 0, 0, 0, 19, 22, 17, 58, 59, 52, 3, 5, 0, 7, 9, 6, 57, 63, 58, 15, 21, 17, 0, 0, 0, 13, 18, 14, 5, 10, 6, 4, 9, 3, 15, 21, 17, 0, 5, 1, 0, 3, 1, 10, 14, 13, 4, 9, 7, 6, 11, 9, 12, 17, 15, 26, 31, 29, 5, 10, 8, 15, 20, 19, 17, 21, 20, 1, 6, 5, 14, 19, 15, 1, 6, 5, 0, 0, 0, 14, 16, 13, 26, 29, 28, 32, 35, 34, 38, 41, 38, 39, 42, 39, 36, 42, 38, 36, 41, 40, 45, 50, 46, 57, 63, 58, 43, 49, 45, 42, 47, 45, 35, 40, 38, 38, 42, 41, 43, 48, 47, 45, 49, 48, 39, 44, 40, 27, 32, 30, 25, 30, 26, 18, 21, 18, 63, 64, 57, 59, 56, 48, 153, 148, 134, 159, 152, 138, 153, 146, 130, 159, 153, 134, 163, 160, 138, 195, 191, 170, 87, 84, 74, 96, 94, 81, 175, 173, 158, 149, 147, 129, 144, 142, 127, 116, 114, 99, 73, 71, 56, 28, 25, 12, 103, 103, 89, 54, 54, 40, 10, 10, 0, 0, 0, 0, 35, 37, 25, 71, 73, 61, 66, 69, 54, 85, 85, 72, 94, 97, 82, 109, 112, 97, 98, 98, 82, 94, 94, 76, 80, 80, 62, 92, 93, 75, 77, 78, 60, 93, 91, 74, 83, 84, 63, 123, 122, 102, 0, 0, 0, 118, 117, 97, 119, 115, 96, 73, 72, 50, 97, 93, 72, 149, 146, 124, 126, 122, 103, 107, 103, 86, 103, 101, 86, 78, 76, 60, 95, 93, 78, 122, 120, 103, 126, 127, 108, 137, 136, 125, 0, 0, 0, 108, 112, 101, 69, 75, 64, 76, 80, 70, 61, 65, 57, 36, 39, 33, 113, 113, 122, 130, 129, 145, 131, 129, 145, 130, 128, 142, 131, 129, 143, 133, 130, 145, 132, 127, 142, 132, 127, 142, 134, 129, 145, 132, 127, 142, 128, 124, 139, 132, 127, 142, 128, 124, 139, 128, 126, 140, 126, 123, 138, 128, 124, 139, 130, 125, 140, 127, 123, 138, 124, 119, 134, 128, 123, 141, 125, 120, 137, 127, 122, 140, 127, 122, 142, 126, 123, 142, 130, 124, 146, 132, 126, 151, 130, 123, 148, 122, 118, 142, 128, 124, 148, 130, 127, 151, 131, 128, 152, 133, 127, 152, 137, 133, 158, 133, 129, 153, 133, 129, 153, 134, 130, 152, 134, 130, 152, 136, 133, 152, 129, 129, 142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 37, 31, 42, 48, 41, 12, 18, 11, 0, 4, 0, 0, 0, 0, 147, 146, 149, 149, 143, 151, 142, 136, 144, 142, 138, 149, 144, 138, 149, 137, 131, 142, 137, 132, 145, 131, 126, 139, 131, 126, 141, 126, 121, 136, 126, 121, 136, 126, 123, 138, 121, 119, 133, 116, 114, 126, 164, 163, 170, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 3, 0, 0, 0, 56, 61, 59, 142, 143, 149, 142, 142, 151, 143, 141, 153, 140, 138, 150, 141, 139, 151, 137, 136, 145, 130, 129, 138, 121, 120, 127, 126, 124, 134, 127, 128, 134, 149, 150, 154, 187, 190, 187, 88, 91, 88, 0, 0, 0, 78, 81, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 19, 23, 15, 151, 155, 147, 23, 26, 20, 8, 14, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, 0, 0, 0, 8, 14, 10, 10, 13, 10, 8, 10, 7, 4, 7, 4, 3, 6, 1, 0, 0, 0, 0, 2, 0, 74, 78, 70, 23, 26, 18, 0, 0, 0, 4, 10, 1, 124, 130, 121, 3, 9, 0, 0, 0, 0, 4, 10, 1, 18, 24, 15, 41, 47, 38, 29, 35, 27, 45, 51, 42, 0, 0, 0, 13, 19, 10, 57, 63, 54, 38, 41, 34, 1, 4, 0, 17, 21, 13, 24, 28, 20, 32, 35, 30, 5, 9, 1, 83, 84, 75, 58, 61, 53, 15, 18, 10, 37, 40, 32, 8, 11, 3, 21, 24, 18, 14, 17, 11, 8, 14, 6, 10, 14, 6, 5, 11, 2, 20, 26, 19, 8, 11, 5, 0, 0, 0, 0, 2, 0, 12, 18, 11, 0, 5, 0, 0, 4, 0, 0, 6, 0, 34, 40, 33, 6, 12, 5, 1, 7, 1, 35, 41, 34, 55, 61, 54, 1, 7, 1, 0, 2, 0, 10, 15, 9, 0, 6, 0, 0, 1, 0, 37, 40, 34, 0, 4, 0, 13, 19, 12, 0, 0, 0, 88, 93, 87, 1, 7, 1, 0, 0, 0, 24, 29, 25, 6, 11, 7, 0, 0, 0, 4, 9, 3, 44, 47, 41, 35, 37, 34, 33, 36, 33, 2, 5, 0, 29, 30, 23, 23, 21, 8, 137, 133, 119, 144, 140, 124, 150, 147, 127, 162, 156, 138, 160, 156, 135, 190, 185, 164, 91, 91, 78, 92, 89, 76, 142, 140, 124, 185, 181, 164, 102, 100, 85, 75, 73, 58, 65, 62, 50, 114, 111, 98, 116, 114, 101, 50, 51, 35, 0, 0, 0, 0, 0, 0, 40, 41, 29, 33, 32, 21, 87, 89, 77, 95, 95, 81, 50, 50, 37, 125, 125, 109, 89, 89, 73, 78, 79, 63, 108, 108, 92, 84, 85, 66, 79, 77, 60, 81, 80, 62, 50, 48, 28, 95, 94, 74, 6, 4, 0, 156, 155, 132, 127, 124, 102, 141, 138, 116, 139, 135, 114, 162, 159, 137, 97, 96, 76, 75, 73, 58, 83, 81, 66, 91, 88, 73, 93, 91, 76, 122, 120, 103, 132, 131, 113, 121, 123, 111, 0, 0, 0, 79, 82, 74, 76, 80, 72, 61, 65, 55, 54, 58, 50, 46, 49, 44, 97, 98, 107, 124, 124, 138, 129, 126, 143, 127, 127, 140, 128, 126, 140, 128, 126, 140, 129, 126, 143, 129, 127, 141, 131, 126, 141, 128, 124, 139, 126, 121, 136, 128, 124, 139, 128, 124, 139, 124, 119, 134, 127, 125, 139, 131, 126, 141, 130, 124, 142, 127, 122, 140, 127, 122, 140, 128, 123, 141, 132, 126, 147, 130, 124, 144, 128, 123, 143, 126, 123, 142, 124, 118, 140, 127, 123, 145, 131, 125, 147, 128, 124, 148, 129, 125, 149, 136, 132, 156, 133, 127, 152, 135, 131, 155, 133, 129, 151, 127, 123, 145, 131, 128, 148, 136, 133, 150, 129, 128, 147, 133, 134, 145, 106, 106, 115, 57, 62, 62, 0, 0, 0, 0, 0, 0, 1, 7, 1, 0, 0, 0, 109, 112, 107, 47, 52, 46, 7, 13, 6, 4, 7, 4, 120, 122, 119, 142, 139, 143, 141, 138, 142, 144, 140, 146, 137, 133, 141, 135, 132, 138, 133, 129, 137, 132, 128, 136, 130, 126, 134, 121, 118, 126, 108, 107, 112, 98, 97, 102, 93, 94, 98, 142, 144, 143, 54, 57, 56, 0, 0, 0, 0, 1, 0, 0, 0, 0, 6, 11, 7, 0, 0, 0, 0, 0, 0, 0, 1, 0, 142, 147, 143, 0, 5, 1, 28, 33, 29, 11, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 6, 1, 0, 0, 0, 3, 6, 1, 0, 3, 0, 2, 5, 0, 0, 0, 0, 29, 32, 26, 10, 13, 8, 0, 0, 0, 11, 14, 9, 0, 0, 0, 0, 3, 0, 146, 150, 140, 1, 5, 0, 4, 8, 0, 16, 19, 11, 51, 54, 46, 0, 6, 0, 42, 48, 39, 0, 2, 0, 2, 5, 0, 0, 1, 0, 0, 6, 0, 104, 108, 100, 40, 44, 36, 37, 40, 32, 29, 32, 24, 38, 41, 34, 0, 0, 0, 133, 137, 129, 38, 41, 34, 135, 138, 130, 12, 16, 8, 18, 22, 12, 90, 94, 84, 51, 54, 46, 5, 9, 0, 18, 24, 15, 3, 9, 0, 60, 64, 54, 32, 38, 27, 41, 47, 38, 0, 2, 0, 30, 33, 25, 15, 18, 10, 50, 53, 45, 36, 37, 30, 7, 8, 1, 80, 81, 74, 45, 47, 37, 9, 11, 1, 28, 29, 22, 66, 69, 62, 40, 44, 36, 29, 32, 24, 57, 60, 52, 106, 109, 101, 9, 12, 4, 40, 44, 36, 61, 66, 60, 25, 30, 24, 0, 0, 0, 0, 1, 0, 3, 8, 2, 7, 13, 6, 7, 13, 6, 57, 63, 54, 0, 0, 0, 6, 12, 3, 24, 29, 23, 16, 19, 13, 3, 6, 1, 2, 5, 0, 32, 35, 30, 21, 24, 18, 18, 21, 16, 0, 0, 0, 2, 5, 1, 1, 4, 0, 0, 0, 0, 6, 11, 7, 24, 27, 22, 0, 3, 0, 4, 9, 5, 4, 9, 5, 8, 11, 5, 86, 92, 88, 73, 76, 70, 9, 12, 6, 0, 3, 0, 16, 19, 13, 28, 31, 25, 13, 14, 7, 0, 0, 0, 135, 131, 116, 141, 135, 119, 160, 154, 135, 173, 167, 148, 175, 169, 149, 176, 171, 150, 92, 89, 76, 114, 111, 98, 129, 127, 112, 191, 187, 170, 101, 99, 84, 75, 73, 60, 77, 74, 61, 98, 98, 84, 134, 131, 118, 48, 48, 34, 124, 124, 110, 58, 60, 48, 21, 23, 11, 27, 29, 17, 47, 48, 36, 88, 91, 76, 87, 90, 75, 187, 187, 173, 85, 86, 70, 82, 82, 66, 81, 81, 65, 95, 95, 77, 90, 91, 72, 151, 150, 129, 66, 65, 45, 161, 158, 136, 134, 131, 109, 175, 171, 150, 147, 144, 120, 155, 152, 130, 185, 182, 160, 133, 130, 106, 97, 93, 76, 80, 76, 62, 100, 98, 82, 92, 90, 74, 91, 88, 75, 111, 110, 92, 97, 96, 78, 135, 137, 125, 0, 0, 0, 78, 81, 71, 67, 71, 61, 61, 65, 55, 72, 76, 65, 42, 45, 39, 108, 109, 115, 117, 117, 131, 124, 124, 138, 129, 127, 141, 128, 126, 140, 128, 126, 140, 125, 122, 136, 127, 123, 138, 127, 123, 138, 131, 126, 139, 124, 119, 134, 130, 128, 142, 129, 127, 139, 127, 125, 139, 127, 125, 139, 131, 129, 143, 129, 127, 141, 128, 125, 142, 125, 122, 138, 129, 126, 143, 128, 125, 142, 128, 125, 142, 134, 131, 148, 129, 126, 143, 129, 126, 143, 129, 127, 141, 128, 126, 140, 129, 127, 139, 113, 111, 123, 102, 101, 108, 94, 94, 98, 97, 96, 99, 144, 146, 145, 75, 78, 77, 19, 22, 21, 15, 21, 17, 4, 9, 7, 1, 7, 3, 14, 20, 13, 18, 23, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 11, 3, 58, 61, 53, 21, 24, 18, 0, 0, 0, 1, 4, 0, 7, 10, 4, 8, 11, 5, 0, 2, 0, 1, 4, 0, 2, 5, 0, 0, 0, 0, 1, 4, 0, 10, 13, 8, 23, 26, 20, 23, 26, 20, 0, 0, 0, 8, 11, 5, 50, 53, 47, 48, 52, 46, 15, 17, 14, 0, 0, 0, 0, 0, 0, 9, 12, 8, 0, 0, 0, 36, 42, 38, 86, 92, 88, 0, 4, 0, 0, 0, 0, 48, 54, 47, 18, 21, 16, 0, 3, 0, 0, 0, 0, 0, 0, 0, 67, 70, 65, 0, 0, 0, 50, 53, 45, 0, 3, 0, 42, 45, 37, 16, 19, 11, 0, 0, 0, 1, 4, 0, 7, 10, 2, 0, 0, 0, 14, 17, 9, 43, 46, 38, 100, 104, 93, 43, 47, 36, 36, 40, 29, 28, 31, 21, 18, 22, 12, 52, 56, 46, 47, 51, 43, 38, 44, 35, 4, 8, 0, 7, 10, 2, 21, 24, 14, 8, 11, 3, 0, 4, 0, 57, 63, 54, 0, 0, 0, 38, 41, 34, 33, 37, 27, 3, 7, 0, 19, 23, 13, 51, 55, 44, 28, 31, 21, 1, 4, 0, 21, 24, 16, 16, 19, 11, 14, 17, 9, 22, 25, 17, 25, 31, 22, 1, 7, 0, 17, 21, 13, 49, 55, 46, 0, 0, 0, 57, 60, 52, 26, 30, 22, 16, 17, 10, 38, 40, 30, 12, 13, 4, 27, 28, 19, 40, 41, 32, 0, 0, 0, 37, 38, 31, 15, 19, 8, 30, 33, 25, 48, 49, 42, 50, 53, 45, 26, 30, 22, 11, 15, 7, 38, 41, 34, 32, 38, 29, 9, 12, 4, 0, 0, 0, 25, 29, 21, 5, 9, 1, 36, 39, 31, 23, 26, 18, 0, 0, 0, 48, 52, 44, 51, 55, 44, 42, 45, 35, 0, 0, 0, 2, 6, 0, 34, 35, 28, 14, 15, 8, 99, 100, 93, 31, 35, 27, 21, 24, 16, 3, 7, 0, 40, 46, 37, 6, 12, 3, 38, 44, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 14, 6, 0, 0, 0, 3, 5, 0, 8, 9, 2, 0, 3, 0, 7, 8, 1, 27, 28, 21, 34, 35, 28, 20, 21, 12, 0, 0, 0, 101, 97, 82, 141, 135, 117, 180, 174, 153, 174, 168, 147, 188, 182, 161, 179, 173, 152, 104, 104, 88, 138, 136, 119, 123, 122, 102, 171, 168, 148, 108, 106, 91, 78, 75, 62, 60, 59, 48, 139, 139, 125, 49, 46, 33, 66, 65, 52, 45, 47, 35, 35, 37, 25, 0, 0, 0, 33, 35, 22, 23, 25, 13, 86, 89, 74, 84, 86, 72, 87, 90, 75, 81, 84, 67, 88, 91, 74, 61, 61, 45, 85, 88, 69, 138, 138, 120, 70, 68, 48, 117, 116, 96, 6, 4, 0, 111, 107, 88, 182, 178, 157, 168, 164, 143, 174, 170, 149, 148, 145, 123, 110, 106, 85, 75, 70, 54, 126, 121, 107, 79, 77, 62, 86, 84, 68, 99, 96, 81, 124, 123, 105, 114, 112, 95, 124, 124, 110, 0, 0, 0, 99, 102, 92, 57, 61, 48, 51, 55, 44, 50, 54, 43, 39, 43, 35, 107, 109, 110, 110, 112, 116, 124, 125, 132, 117, 118, 127, 122, 123, 131, 111, 112, 121, 120, 120, 129, 130, 129, 138, 120, 118, 128, 125, 126, 135, 118, 119, 128, 120, 120, 129, 125, 126, 135, 128, 127, 136, 125, 127, 133, 126, 125, 132, 115, 116, 122, 117, 119, 123, 80, 81, 85, 87, 88, 92, 67, 69, 73, 55, 57, 59, 76, 78, 80, 95, 95, 97, 45, 45, 47, 12, 15, 14, 2, 4, 3, 3, 6, 3, 14, 16, 13, 31, 34, 31, 2, 5, 0, 25, 28, 23, 15, 21, 15, 5, 11, 4, 18, 23, 17, 25, 30, 24, 39, 44, 38, 13, 19, 12, 15, 18, 12, 31, 36, 30, 8, 14, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 20, 15, 19, 23, 15, 35, 38, 30, 99, 102, 94, 23, 26, 18, 23, 26, 18, 108, 111, 103, 50, 53, 45, 11, 15, 7, 35, 38, 30, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 43, 37, 0, 0, 0, 42, 45, 39, 3, 6, 1, 19, 22, 19, 11, 16, 10, 0, 0, 0, 0, 0, 0, 25, 30, 24, 103, 108, 102, 0, 0, 0, 0, 2, 0, 0, 1, 0, 0, 6, 0, 17, 20, 15, 5, 9, 3, 0, 0, 0, 2, 5, 0, 0, 0, 0, 5, 9, 3, 0, 2, 0, 0, 0, 0, 7, 10, 2, 0, 0, 0, 0, 0, 0, 25, 29, 21, 3, 7, 0, 23, 24, 17, 0, 0, 0, 50, 53, 45, 29, 33, 22, 19, 23, 13, 48, 54, 43, 18, 22, 12, 18, 22, 12, 0, 0, 0, 22, 28, 20, 0, 0, 0, 5, 11, 2, 18, 24, 15, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 16, 10, 12, 16, 8, 25, 28, 23, 59, 62, 56, 83, 87, 81, 104, 107, 102, 126, 132, 123, 125, 129, 121, 146, 150, 142, 128, 131, 123, 148, 154, 145, 141, 147, 138, 140, 144, 134, 138, 144, 133, 142, 145, 137, 138, 142, 132, 144, 146, 136, 150, 152, 142, 156, 158, 146, 149, 151, 139, 164, 166, 156, 176, 178, 166, 165, 167, 155, 156, 158, 146, 87, 89, 79, 4, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 87, 79, 111, 115, 107, 129, 133, 122, 141, 143, 131, 149, 151, 139, 144, 146, 134, 147, 149, 136, 147, 149, 136, 147, 149, 136, 148, 149, 140, 138, 140, 130, 135, 137, 125, 148, 150, 138, 154, 156, 144, 149, 151, 139, 154, 156, 144, 151, 155, 145, 153, 158, 145, 160, 164, 152, 156, 160, 147, 145, 150, 134, 158, 162, 149, 135, 137, 125, 131, 133, 121, 116, 118, 106, 105, 107, 95, 92, 92, 79, 91, 91, 78, 83, 83, 69, 60, 62, 50, 45, 44, 33, 62, 63, 54, 41, 42, 33, 21, 24, 14, 0, 0, 0, 65, 62, 52, 151, 147, 130, 183, 179, 160, 166, 160, 141, 195, 189, 170, 189, 183, 164, 100, 98, 80, 110, 109, 89, 136, 133, 111, 186, 183, 163, 144, 142, 125, 101, 98, 88, 116, 116, 102, 165, 164, 151, 31, 29, 14, 108, 108, 92, 63, 63, 50, 51, 53, 41, 135, 137, 125, 142, 144, 132, 67, 69, 57, 66, 68, 56, 40, 41, 29, 57, 61, 46, 57, 59, 45, 71, 74, 57, 95, 98, 81, 72, 75, 58, 52, 52, 36, 82, 81, 63, 0, 0, 0, 96, 95, 75, 108, 105, 83, 150, 147, 125, 154, 151, 127, 150, 147, 125, 112, 109, 87, 119, 115, 96, 90, 86, 69, 96, 94, 79, 80, 78, 61, 92, 90, 74, 97, 95, 80, 109, 107, 90, 114, 112, 96, 120, 120, 107, 0, 0, 0, 109, 113, 100, 72, 76, 63, 54, 59, 46, 48, 52, 42, 45, 48, 41, 40, 45, 41, 42, 47, 43, 37, 39, 38, 33, 36, 35, 45, 47, 47, 27, 32, 30, 27, 32, 30, 43, 45, 44, 17, 19, 19, 26, 28, 30, 2, 4, 3, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 10, 13, 8, 16, 19, 11, 21, 24, 16, 14, 17, 9, 7, 10, 2, 2, 5, 0, 40, 44, 36, 15, 18, 10, 18, 22, 14, 23, 26, 20, 3, 7, 0, 46, 50, 42, 2, 5, 0, 28, 31, 25, 53, 56, 51, 10, 13, 8, 0, 2, 0, 3, 8, 2, 3, 6, 1, 7, 13, 6, 0, 3, 0, 0, 0, 0, 0, 6, 0, 49, 55, 48, 12, 18, 11, 14, 20, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 39, 31, 79, 82, 76, 4, 8, 0, 8, 11, 3, 31, 35, 27, 10, 14, 6, 39, 43, 35, 8, 11, 3, 47, 48, 41, 25, 29, 21, 16, 19, 11, 21, 24, 16, 52, 55, 48, 18, 21, 16, 26, 29, 24, 15, 18, 12, 0, 2, 0, 14, 17, 11, 0, 0, 0, 31, 34, 29, 0, 0, 0, 11, 14, 9, 22, 25, 19, 23, 26, 20, 0, 0, 0, 7, 10, 4, 0, 3, 0, 24, 27, 22, 12, 16, 10, 0, 0, 0, 32, 36, 28, 19, 23, 15, 15, 18, 12, 60, 64, 56, 18, 22, 14, 66, 70, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 16, 58, 59, 52, 98, 99, 92, 120, 121, 114, 150, 152, 142, 24, 28, 18, 28, 31, 21, 2, 6, 0, 19, 23, 15, 0, 3, 0, 0, 6, 0, 0, 4, 0, 0, 0, 0, 64, 70, 62, 128, 131, 121, 139, 143, 133, 148, 151, 141, 123, 127, 117, 152, 156, 146, 140, 144, 134, 137, 141, 130, 125, 129, 119, 144, 148, 137, 137, 141, 130, 130, 134, 124, 132, 136, 126, 114, 118, 107, 137, 141, 130, 135, 138, 128, 131, 135, 125, 125, 129, 119, 132, 136, 126, 135, 138, 128, 139, 143, 133, 133, 137, 127, 141, 142, 133, 150, 152, 142, 161, 163, 151, 155, 157, 145, 130, 132, 120, 159, 162, 147, 143, 145, 133, 156, 158, 144, 164, 166, 154, 155, 156, 147, 36, 39, 31, 0, 0, 0, 42, 45, 37, 14, 17, 9, 12, 16, 8, 30, 33, 25, 17, 21, 13, 95, 98, 91, 100, 104, 93, 141, 143, 131, 149, 151, 137, 137, 140, 125, 153, 155, 138, 138, 141, 124, 135, 138, 121, 137, 140, 123, 140, 142, 126, 138, 141, 124, 140, 140, 124, 130, 133, 116, 134, 137, 120, 144, 147, 132, 142, 145, 128, 143, 146, 129, 143, 146, 129, 147, 149, 134, 136, 139, 122, 146, 146, 130, 157, 160, 143, 153, 155, 138, 142, 145, 128, 148, 148, 132, 151, 151, 133, 153, 154, 135, 162, 162, 147, 166, 166, 148, 175, 173, 158, 174, 175, 156, 184, 181, 168, 202, 201, 190, 26, 28, 15, 6, 5, 0, 54, 53, 42, 159, 158, 140, 153, 148, 132, 190, 186, 167, 181, 177, 158, 181, 177, 158, 122, 121, 100, 138, 137, 115, 144, 143, 119, 172, 169, 147, 185, 184, 163, 131, 129, 114, 139, 137, 122, 157, 155, 138, 185, 183, 165, 100, 101, 83, 0, 0, 0, 121, 122, 106, 66, 69, 54, 60, 62, 50, 58, 60, 48, 84, 86, 72, 53, 56, 41, 58, 61, 46, 53, 56, 41, 60, 63, 46, 74, 77, 60, 39, 39, 23, 0, 0, 0, 82, 81, 63, 135, 133, 115, 127, 125, 105, 107, 106, 85, 124, 123, 103, 136, 135, 114, 97, 96, 76, 80, 79, 59, 92, 90, 72, 88, 87, 69, 89, 88, 70, 83, 82, 64, 87, 85, 68, 125, 124, 106, 106, 104, 86, 115, 113, 98, 131, 131, 117, 0, 0, 0, 79, 81, 69, 99, 103, 90, 55, 60, 47, 42, 45, 37, 59, 62, 54, 5, 11, 4, 3, 8, 4, 0, 1, 0, 0, 0, 0, 0, 3, 0, 0, 2, 0, 0, 0, 0, 11, 16, 12, 0, 0, 0, 0, 6, 0, 32, 37, 31, 0, 2, 0, 30, 33, 27, 3, 7, 0, 0, 0, 0, 9, 13, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 145, 149, 136, 0, 0, 0, 8, 12, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 48, 43, 38, 41, 36, 0, 0, 0, 17, 20, 15, 0, 0, 0, 7, 12, 8, 0, 3, 0, 15, 21, 15, 69, 73, 67, 0, 0, 0, 3, 6, 1, 0, 1, 0, 0, 0, 0, 1, 4, 0, 0, 5, 0, 24, 27, 22, 0, 0, 0, 10, 14, 6, 62, 66, 58, 0, 0, 0, 11, 15, 7, 3, 7, 0, 9, 12, 4, 35, 38, 30, 37, 40, 32, 31, 35, 27, 0, 1, 0, 14, 17, 9, 0, 2, 0, 0, 0, 0, 12, 16, 8, 5, 9, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 36, 33, 111, 114, 111, 156, 158, 155, 159, 162, 159, 164, 166, 165, 156, 158, 155, 144, 147, 143, 123, 126, 123, 128, 131, 125, 132, 135, 130, 118, 121, 116, 122, 125, 117, 120, 121, 114, 125, 126, 117, 129, 131, 121, 137, 141, 130, 118, 122, 112, 134, 135, 126, 142, 144, 134, 150, 152, 140, 144, 146, 134, 143, 145, 133, 148, 150, 136, 142, 144, 132, 44, 48, 37, 44, 48, 37, 0, 0, 0, 71, 75, 64, 54, 58, 48, 22, 28, 20, 75, 81, 70, 48, 54, 45, 125, 131, 120, 164, 168, 157, 133, 137, 127, 139, 143, 131, 123, 127, 117, 139, 143, 131, 136, 140, 129, 128, 131, 121, 130, 134, 124, 132, 138, 127, 135, 138, 128, 132, 138, 127, 135, 138, 128, 133, 137, 127, 135, 138, 128, 133, 137, 127, 140, 144, 134, 140, 144, 134, 135, 138, 128, 130, 134, 124, 133, 134, 125, 136, 140, 127, 121, 125, 112, 132, 137, 124, 137, 141, 128, 129, 133, 120, 134, 136, 124, 136, 138, 126, 138, 142, 129, 139, 143, 131, 141, 143, 131, 148, 152, 139, 16, 20, 9, 22, 25, 17, 29, 32, 24, 0, 3, 0, 52, 55, 48, 57, 60, 52, 1, 4, 0, 52, 55, 48, 100, 102, 92, 134, 136, 122, 142, 144, 130, 141, 143, 127, 134, 134, 119, 135, 136, 120, 140, 140, 124, 135, 136, 120, 139, 139, 123, 131, 131, 115, 137, 137, 119, 140, 141, 122, 135, 136, 118, 138, 139, 118, 140, 141, 122, 145, 145, 127, 140, 140, 124, 139, 139, 123, 145, 145, 129, 152, 152, 136, 146, 146, 130, 147, 147, 131, 151, 151, 135, 151, 151, 135, 153, 153, 137, 151, 151, 133, 154, 155, 136, 166, 166, 150, 167, 167, 151, 174, 174, 158, 195, 195, 179, 192, 192, 179, 18, 17, 6, 26, 25, 18, 35, 35, 24, 165, 163, 146, 184, 180, 163, 182, 178, 159, 180, 174, 155, 176, 170, 152, 110, 111, 88, 187, 186, 164, 175, 171, 150, 169, 166, 144, 188, 184, 162, 190, 186, 167, 174, 170, 151, 187, 186, 166, 159, 158, 138, 146, 145, 127, 123, 123, 105, 111, 112, 93, 120, 123, 106, 81, 81, 67, 0, 0, 0, 0, 0, 0, 33, 33, 19, 0, 0, 0, 0, 0, 0, 34, 36, 20, 80, 83, 66, 77, 80, 63, 77, 78, 60, 75, 75, 59, 80, 80, 62, 88, 88, 70, 82, 83, 64, 96, 95, 77, 120, 121, 100, 78, 76, 58, 113, 111, 93, 81, 80, 62, 80, 78, 61, 115, 113, 96, 79, 77, 60, 92, 90, 72, 109, 107, 90, 145, 143, 126, 130, 128, 111, 132, 132, 116, 0, 0, 0, 105, 107, 95, 74, 76, 64, 50, 54, 41, 123, 127, 119, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 23, 19, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 9, 3, 24, 29, 23, 0, 0, 0, 14, 20, 13, 27, 33, 26, 0, 1, 0, 2, 5, 0, 0, 0, 0, 0, 3, 0, 167, 171, 163, 156, 160, 147, 0, 0, 0, 155, 159, 144, 173, 178, 160, 129, 134, 116, 12, 14, 0, 166, 171, 156, 167, 172, 157, 0, 1, 0, 21, 24, 18, 18, 21, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 0, 0, 0, 0, 0, 0, 0, 29, 31, 28, 10, 13, 8, 14, 17, 11, 0, 2, 0, 29, 32, 26, 9, 12, 6, 10, 13, 8, 0, 0, 0, 23, 24, 19, 30, 31, 24, 1, 2, 0, 0, 0, 0, 0, 0, 0, 5, 9, 1, 0, 0, 0, 18, 22, 14, 9, 12, 4, 7, 10, 4, 7, 10, 2, 8, 11, 5, 6, 12, 5, 0, 3, 0, 53, 56, 51, 109, 110, 105, 129, 129, 127, 115, 118, 112, 142, 143, 138, 127, 128, 123, 131, 132, 128, 126, 127, 122, 128, 129, 124, 137, 138, 133, 130, 132, 124, 140, 141, 134, 133, 134, 127, 134, 135, 128, 138, 142, 134, 140, 141, 132, 143, 145, 135, 133, 135, 123, 145, 149, 136, 144, 148, 135, 149, 153, 140, 150, 152, 140, 142, 144, 132, 156, 158, 146, 155, 157, 145, 140, 142, 129, 140, 142, 129, 143, 145, 131, 145, 148, 133, 151, 151, 137, 136, 138, 126, 158, 161, 146, 127, 131, 118, 44, 48, 35, 0, 0, 0, 21, 24, 14, 47, 53, 42, 67, 73, 62, 32, 36, 26, 10, 16, 5, 35, 42, 30, 125, 129, 119, 138, 142, 132, 124, 128, 116, 155, 159, 146, 135, 139, 126, 127, 131, 118, 129, 133, 122, 117, 121, 111, 107, 114, 103, 127, 130, 120, 134, 141, 129, 121, 125, 114, 125, 129, 119, 123, 127, 117, 127, 130, 120, 133, 137, 127, 137, 141, 128, 117, 121, 109, 130, 134, 121, 135, 139, 126, 133, 138, 125, 127, 131, 118, 126, 128, 116, 128, 132, 119, 124, 128, 116, 129, 131, 119, 138, 140, 128, 121, 123, 111, 133, 135, 123, 137, 141, 128, 131, 135, 123, 40, 44, 34, 110, 114, 104, 0, 0, 0, 0, 3, 0, 40, 44, 36, 2, 5, 0, 15, 18, 10, 0, 0, 0, 73, 75, 65, 135, 137, 125, 132, 132, 118, 138, 141, 126, 139, 139, 125, 127, 129, 115, 131, 131, 115, 137, 137, 121, 137, 137, 121, 142, 143, 127, 135, 138, 121, 138, 138, 120, 136, 139, 120, 137, 140, 121, 142, 143, 125, 146, 146, 128, 147, 148, 129, 144, 147, 130, 142, 143, 127, 144, 144, 128, 149, 150, 134, 135, 136, 120, 154, 154, 138, 125, 125, 109, 162, 163, 145, 153, 153, 137, 144, 144, 128, 146, 146, 130, 160, 160, 144, 154, 155, 136, 187, 187, 171, 183, 183, 167, 32, 31, 20, 39, 38, 31, 0, 0, 0, 106, 104, 88, 176, 172, 155, 181, 177, 158, 180, 174, 155, 191, 187, 168, 122, 121, 96, 136, 135, 110, 154, 151, 127, 150, 147, 125, 160, 159, 137, 186, 183, 161, 185, 184, 161, 180, 179, 159, 172, 170, 153, 150, 149, 128, 106, 107, 89, 112, 113, 95, 102, 102, 84, 111, 112, 93, 98, 99, 80, 138, 138, 120, 98, 98, 82, 83, 83, 67, 63, 65, 49, 76, 76, 60, 73, 76, 59, 65, 68, 51, 63, 64, 46, 73, 73, 55, 80, 80, 62, 71, 72, 54, 83, 82, 64, 115, 113, 96, 67, 65, 48, 100, 98, 80, 89, 88, 70, 87, 85, 70, 87, 83, 66, 83, 79, 62, 136, 132, 115, 124, 119, 105, 127, 123, 106, 137, 134, 114, 128, 124, 107, 123, 121, 106, 0, 0, 0, 101, 104, 89, 58, 60, 48, 122, 124, 114, 23, 26, 18, 19, 22, 17, 0, 2, 0, 32, 35, 30, 7, 9, 6, 13, 18, 14, 6, 11, 7, 0, 0, 0, 19, 24, 20, 0, 0, 0, 11, 16, 10, 0, 0, 0, 28, 34, 27, 0, 5, 0, 12, 18, 11, 0, 3, 0, 4, 9, 3, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 9, 3, 8, 11, 3, 15, 18, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 10, 7, 50, 52, 49, 86, 88, 85, 136, 138, 135, 149, 151, 148, 166, 169, 166, 152, 155, 152, 151, 154, 151, 140, 143, 140, 130, 133, 129, 122, 125, 119, 121, 122, 117, 121, 122, 117, 118, 118, 114, 106, 107, 102, 119, 120, 115, 120, 121, 114, 115, 116, 109, 118, 119, 112, 0, 0, 0, 28, 31, 23, 0, 0, 0, 0, 0, 0, 24, 28, 20, 29, 32, 24, 9, 12, 4, 7, 10, 2, 51, 54, 46, 131, 133, 124, 151, 153, 143, 131, 133, 121, 134, 135, 126, 128, 130, 120, 126, 127, 120, 129, 131, 121, 136, 138, 128, 136, 138, 128, 130, 132, 122, 127, 129, 117, 134, 135, 126, 129, 131, 121, 121, 123, 111, 135, 137, 125, 135, 137, 125, 132, 137, 124, 129, 131, 117, 132, 137, 122, 135, 138, 121, 131, 136, 121, 129, 133, 118, 129, 131, 117, 119, 121, 109, 127, 129, 115, 120, 122, 108, 122, 125, 110, 134, 136, 122, 150, 153, 138, 147, 149, 134, 155, 157, 143, 162, 164, 150, 0, 0, 0, 0, 0, 0, 60, 64, 54, 35, 38, 28, 110, 114, 104, 29, 36, 25, 68, 74, 63, 0, 6, 0, 55, 59, 49, 136, 140, 127, 110, 114, 102, 117, 121, 109, 121, 125, 112, 122, 126, 113, 123, 127, 117, 120, 123, 113, 116, 122, 111, 120, 123, 113, 118, 122, 112, 117, 123, 112, 119, 125, 114, 115, 119, 108, 116, 120, 110, 121, 125, 114, 115, 119, 106, 114, 118, 105, 129, 131, 119, 124, 128, 116, 122, 124, 112, 122, 124, 112, 120, 122, 110, 119, 121, 109, 128, 130, 118, 121, 123, 111, 122, 126, 113, 119, 121, 109, 127, 131, 118, 132, 137, 124, 132, 137, 124, 80, 84, 73, 93, 97, 86, 11, 15, 5, 0, 0, 0, 37, 40, 32, 12, 16, 8, 19, 23, 15, 26, 30, 22, 77, 78, 69, 127, 128, 119, 123, 122, 111, 121, 121, 108, 120, 120, 107, 123, 123, 109, 124, 124, 108, 125, 125, 109, 132, 132, 116, 142, 143, 125, 128, 129, 111, 137, 137, 119, 142, 143, 125, 146, 146, 128, 132, 133, 114, 142, 143, 125, 134, 135, 117, 127, 128, 110, 255, 255, 255, 158, 158, 142, 155, 156, 138, 128, 129, 113, 255, 255, 255, 152, 152, 134, 244, 244, 228, 255, 255, 255, 255, 255, 255, 242, 243, 225, 153, 153, 137, 168, 166, 151, 163, 164, 146, 173, 173, 157, 40, 39, 28, 38, 36, 32, 0, 0, 0, 101, 99, 84, 168, 164, 147, 175, 171, 152, 150, 147, 127, 165, 162, 142, 130, 127, 105, 111, 111, 88, 122, 119, 97, 142, 139, 117, 125, 121, 100, 126, 123, 101, 134, 131, 109, 163, 159, 140, 141, 139, 119, 135, 133, 113, 128, 129, 111, 94, 94, 76, 81, 81, 63, 97, 98, 79, 123, 123, 105, 95, 93, 76, 100, 98, 80, 82, 83, 64, 74, 74, 56, 90, 91, 72, 73, 73, 55, 70, 71, 53, 87, 87, 69, 102, 102, 84, 66, 66, 50, 67, 67, 51, 49, 50, 34, 69, 69, 53, 68, 66, 51, 77, 74, 59, 103, 101, 86, 93, 91, 76, 73, 71, 56, 83, 81, 66, 191, 187, 170, 83, 79, 62, 122, 118, 101, 175, 171, 154, 103, 98, 82, 67, 65, 50, 0, 0, 0, 91, 93, 79, 144, 146, 134, 5, 9, 0, 8, 11, 5, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 4, 0, 3, 8, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 33, 26, 62, 68, 61, 146, 152, 145, 146, 152, 145, 152, 155, 150, 142, 144, 141, 116, 119, 116, 85, 87, 84, 93, 95, 92, 68, 71, 68, 67, 70, 67, 75, 78, 77, 74, 77, 74, 97, 97, 95, 101, 104, 100, 101, 104, 100, 103, 106, 103, 103, 106, 103, 109, 112, 109, 108, 111, 107, 110, 113, 110, 110, 113, 110, 106, 108, 105, 121, 123, 120, 115, 117, 114, 113, 115, 112, 118, 121, 118, 122, 125, 119, 116, 119, 114, 120, 123, 117, 114, 116, 113, 121, 122, 117, 121, 122, 117, 119, 120, 115, 113, 114, 107, 114, 115, 108, 118, 119, 112, 120, 121, 114, 122, 123, 116, 120, 121, 114, 22, 25, 17, 44, 45, 38, 0, 0, 0, 0, 0, 0, 38, 41, 34, 23, 26, 18, 26, 30, 22, 57, 60, 52, 148, 151, 141, 151, 153, 141, 138, 140, 128, 130, 132, 120, 134, 136, 124, 143, 145, 133, 131, 133, 121, 133, 135, 123, 131, 133, 121, 134, 136, 124, 138, 137, 126, 137, 139, 127, 135, 137, 125, 128, 130, 118, 130, 132, 120, 130, 133, 118, 127, 129, 115, 130, 133, 118, 149, 151, 137, 129, 131, 117, 133, 135, 121, 133, 135, 121, 136, 138, 126, 123, 125, 113, 121, 123, 111, 124, 124, 110, 127, 127, 114, 135, 135, 122, 132, 132, 118, 130, 129, 116, 114, 115, 99, 154, 156, 140, 144, 147, 132, 102, 105, 90, 60, 62, 50, 0, 0, 0, 30, 34, 23, 25, 29, 19, 24, 30, 19, 36, 43, 32, 42, 49, 37, 120, 123, 113, 149, 153, 140, 121, 125, 112, 111, 118, 104, 120, 124, 111, 120, 124, 111, 117, 123, 110, 127, 134, 120, 123, 127, 117, 110, 116, 105, 118, 122, 112, 114, 121, 110, 104, 110, 99, 111, 115, 105, 120, 124, 111, 124, 128, 116, 124, 128, 116, 111, 116, 103, 130, 134, 121, 116, 120, 107, 118, 123, 110, 124, 128, 116, 121, 125, 112, 120, 124, 111, 120, 122, 110, 117, 121, 109, 125, 130, 117, 114, 118, 105, 115, 119, 106, 124, 128, 116, 132, 137, 124, 101, 105, 92, 79, 83, 70, 0, 0, 0, 0, 1, 0, 11, 15, 7, 25, 29, 21, 53, 57, 49, 4, 8, 0, 64, 68, 57, 114, 116, 104, 110, 109, 98, 121, 121, 108, 121, 121, 108, 123, 123, 109, 127, 127, 114, 123, 123, 109, 128, 129, 111, 138, 138, 120, 119, 120, 101, 134, 135, 117, 133, 134, 115, 135, 136, 118, 135, 136, 118, 147, 148, 129, 138, 138, 120, 255, 255, 255, 138, 138, 122, 120, 121, 105, 255, 255, 242, 255, 255, 255, 237, 237, 221, 149, 150, 134, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 150, 148, 132, 161, 159, 144, 151, 151, 135, 170, 171, 155, 39, 38, 27, 28, 27, 21, 0, 0, 0, 49, 49, 38, 167, 165, 150, 167, 166, 148, 177, 173, 154, 164, 163, 140, 109, 104, 80, 122, 119, 95, 117, 113, 92, 105, 102, 80, 107, 106, 83, 128, 127, 106, 116, 115, 95, 113, 111, 91, 88, 87, 69, 113, 111, 93, 90, 91, 72, 82, 83, 64, 87, 85, 70, 96, 97, 78, 96, 97, 78, 92, 90, 72, 96, 95, 77, 79, 77, 60, 84, 85, 66, 75, 75, 59, 68, 68, 52, 83, 84, 65, 70, 71, 53, 62, 63, 44, 61, 61, 45, 64, 65, 49, 64, 62, 46, 78, 76, 60, 86, 84, 68, 77, 74, 59, 68, 66, 51, 121, 119, 103, 94, 92, 77, 119, 114, 102, 94, 90, 76, 104, 100, 83, 174, 169, 155, 125, 121, 102, 92, 88, 71, 99, 96, 83, 0, 0, 0, 56, 58, 46, 0, 0, 0, 0, 0, 0, 90, 93, 90, 144, 147, 143, 109, 112, 109, 87, 90, 86, 64, 70, 65, 69, 74, 70, 61, 66, 62, 79, 85, 81, 68, 73, 67, 64, 67, 61, 73, 76, 70, 63, 69, 62, 81, 85, 77, 68, 71, 66, 72, 78, 72, 82, 87, 81, 88, 91, 86, 87, 90, 84, 86, 87, 82, 98, 98, 94, 93, 94, 89, 165, 166, 161, 97, 100, 95, 90, 94, 88, 117, 120, 115, 115, 116, 111, 107, 110, 104, 115, 116, 113, 114, 114, 112, 124, 127, 124, 119, 119, 117, 124, 127, 124, 117, 120, 117, 113, 115, 112, 111, 114, 111, 113, 115, 112, 117, 120, 117, 123, 126, 121, 116, 119, 114, 116, 119, 114, 114, 117, 111, 122, 125, 119, 115, 118, 110, 123, 127, 119, 122, 123, 116, 133, 134, 127, 119, 120, 111, 120, 121, 114, 121, 122, 115, 122, 124, 114, 121, 123, 113, 120, 121, 114, 113, 114, 107, 2, 4, 0, 0, 1, 0, 0, 0, 0, 5, 9, 1, 0, 0, 0, 67, 71, 63, 25, 29, 21, 67, 71, 63, 123, 125, 115, 130, 132, 120, 133, 133, 119, 137, 139, 127, 128, 130, 118, 153, 152, 141, 140, 142, 128, 127, 129, 115, 138, 141, 126, 133, 133, 119, 133, 135, 123, 140, 142, 129, 128, 130, 118, 127, 129, 115, 148, 150, 136, 144, 147, 132, 136, 138, 124, 126, 128, 114, 131, 134, 119, 136, 138, 124, 135, 137, 123, 136, 138, 124, 140, 142, 128, 141, 143, 129, 128, 130, 116, 147, 149, 132, 144, 144, 128, 126, 128, 112, 141, 143, 127, 130, 133, 116, 122, 125, 108, 156, 159, 142, 163, 165, 151, 64, 66, 52, 0, 0, 0, 0, 0, 0, 4, 8, 0, 15, 19, 8, 14, 17, 7, 13, 19, 8, 67, 73, 62, 145, 151, 140, 131, 137, 124, 142, 149, 135, 140, 147, 133, 133, 138, 125, 130, 134, 121, 135, 139, 126, 129, 136, 125, 125, 132, 118, 140, 144, 134, 117, 123, 112, 117, 123, 110, 118, 123, 110, 111, 116, 101, 139, 144, 129, 161, 166, 151, 202, 207, 192, 235, 239, 224, 140, 145, 130, 128, 132, 119, 121, 125, 112, 120, 122, 110, 122, 126, 113, 122, 126, 113, 122, 124, 112, 120, 122, 110, 122, 124, 112, 127, 129, 117, 127, 129, 117, 128, 132, 119, 128, 132, 119, 68, 72, 62, 0, 0, 0, 12, 16, 8, 8, 11, 3, 40, 44, 36, 16, 19, 11, 0, 1, 0, 95, 99, 89, 116, 118, 106, 130, 129, 116, 120, 120, 107, 124, 123, 112, 121, 121, 108, 119, 119, 105, 118, 118, 104, 119, 119, 103, 124, 124, 108, 124, 124, 108, 125, 125, 109, 132, 132, 116, 141, 141, 126, 148, 149, 130, 140, 141, 122, 138, 138, 120, 124, 124, 106, 255, 255, 247, 197, 198, 179, 140, 141, 122, 132, 133, 114, 127, 128, 110, 141, 139, 121, 139, 140, 121, 147, 148, 129, 244, 244, 226, 148, 149, 130, 151, 151, 133, 161, 162, 143, 173, 173, 155, 169, 169, 154, 61, 60, 49, 19, 17, 13, 17, 18, 13, 10, 9, 0, 142, 143, 127, 161, 160, 140, 154, 150, 131, 140, 142, 118, 118, 114, 95, 99, 96, 74, 126, 123, 101, 107, 103, 84, 108, 105, 85, 97, 93, 76, 99, 95, 78, 85, 83, 65, 88, 87, 69, 110, 109, 91, 91, 88, 73, 86, 84, 68, 108, 108, 92, 94, 92, 75, 103, 102, 84, 78, 76, 58, 80, 78, 61, 91, 89, 71, 77, 75, 57, 82, 82, 66, 76, 76, 60, 108, 108, 92, 78, 79, 63, 75, 75, 59, 63, 60, 45, 61, 59, 44, 61, 59, 46, 67, 65, 52, 73, 71, 58, 56, 53, 40, 63, 60, 47, 79, 76, 64, 168, 166, 153, 92, 89, 76, 90, 85, 73, 151, 146, 134, 98, 93, 79, 113, 110, 95, 128, 126, 110, 0, 0, 0, 0, 0, 0, 148, 150, 138, 88, 90, 82, 95, 96, 91, 120, 123, 117, 81, 84, 81, 101, 104, 100, 102, 105, 102, 99, 101, 98, 93, 96, 90, 102, 105, 102, 110, 113, 108, 106, 109, 103, 109, 112, 107, 100, 103, 97, 107, 110, 102, 109, 113, 105, 111, 115, 107, 113, 116, 108, 109, 112, 107, 106, 107, 102, 122, 123, 118, 111, 111, 107, 115, 116, 111, 107, 108, 101, 116, 117, 112, 109, 112, 107, 104, 107, 102, 117, 120, 115, 115, 116, 111, 114, 115, 110, 113, 114, 109, 121, 124, 118, 112, 113, 108, 115, 116, 111, 109, 112, 107, 122, 125, 119, 127, 130, 124, 124, 127, 122, 117, 120, 115, 118, 121, 116, 118, 121, 116, 122, 125, 119, 120, 123, 115, 123, 127, 119, 123, 127, 119, 117, 121, 113, 130, 134, 124, 129, 131, 121, 122, 124, 114, 122, 124, 114, 120, 121, 112, 122, 124, 114, 127, 128, 119, 120, 121, 112, 120, 121, 112, 136, 139, 131, 46, 50, 42, 54, 58, 50, 0, 0, 0, 9, 12, 4, 23, 26, 18, 0, 0, 0, 0, 2, 0, 21, 24, 16, 59, 63, 52, 141, 143, 131, 134, 136, 122, 145, 148, 133, 110, 110, 96, 151, 151, 137, 123, 123, 109, 141, 141, 128, 150, 153, 138, 138, 137, 126, 149, 149, 136, 141, 141, 128, 129, 131, 117, 121, 121, 108, 141, 141, 126, 164, 166, 152, 129, 131, 117, 140, 142, 128, 163, 165, 151, 130, 133, 118, 138, 141, 126, 135, 137, 123, 130, 133, 118, 131, 134, 119, 134, 136, 122, 133, 135, 119, 122, 125, 108, 138, 141, 124, 139, 139, 123, 147, 148, 129, 136, 139, 122, 151, 154, 137, 149, 152, 133, 1, 4, 0, 0, 0, 0, 21, 23, 13, 16, 20, 9, 47, 51, 41, 45, 49, 39, 5, 11, 2, 8, 14, 6, 64, 71, 57, 119, 126, 112, 133, 140, 126, 125, 132, 118, 117, 123, 110, 135, 139, 126, 121, 125, 112, 140, 145, 132, 114, 121, 107, 118, 123, 110, 123, 127, 114, 119, 126, 112, 127, 131, 118, 116, 121, 105, 255, 255, 255, 255, 255, 255, 255, 255, 255, 132, 137, 122, 255, 255, 251, 117, 122, 107, 115, 117, 105, 115, 119, 106, 121, 125, 110, 130, 133, 118, 125, 127, 112, 129, 131, 117, 130, 133, 118, 120, 122, 108, 122, 125, 110, 119, 121, 109, 138, 140, 128, 38, 40, 30, 8, 12, 1, 0, 0, 0, 0, 2, 0, 26, 30, 22, 9, 12, 4, 0, 3, 0, 110, 114, 104, 119, 121, 109, 113, 113, 100, 121, 123, 109, 119, 119, 105, 123, 123, 109, 121, 121, 108, 128, 128, 115, 125, 125, 111, 118, 118, 102, 118, 118, 102, 127, 128, 112, 135, 136, 120, 131, 131, 115, 149, 150, 132, 135, 136, 118, 139, 140, 121, 144, 144, 126, 140, 141, 122, 147, 148, 129, 140, 141, 122, 153, 152, 134, 151, 149, 132, 156, 154, 136, 149, 150, 132, 153, 152, 134, 155, 153, 135, 149, 147, 129, 153, 152, 134, 156, 154, 136, 160, 161, 142, 159, 159, 143, 56, 56, 45, 24, 26, 18, 0, 0, 0, 13, 13, 2, 144, 144, 128, 178, 176, 159, 174, 170, 149, 190, 189, 166, 114, 111, 89, 112, 108, 89, 107, 103, 84, 97, 93, 74, 86, 83, 63, 106, 102, 85, 79, 75, 58, 82, 80, 65, 94, 92, 77, 89, 87, 72, 94, 92, 77, 87, 85, 70, 91, 88, 73, 101, 99, 82, 91, 89, 71, 107, 105, 88, 89, 88, 70, 79, 77, 60, 89, 88, 70, 103, 101, 86, 80, 78, 63, 80, 80, 64, 62, 62, 46, 66, 64, 49, 71, 69, 53, 74, 72, 57, 67, 65, 52, 64, 61, 48, 74, 72, 59, 67, 65, 52, 95, 95, 81, 108, 105, 93, 75, 73, 62, 81, 78, 68, 91, 88, 77, 144, 141, 131, 94, 91, 81, 97, 96, 85, 0, 0, 0, 121, 121, 108, 112, 112, 100, 122, 124, 114, 70, 71, 64, 115, 118, 112, 83, 87, 81, 87, 90, 86, 95, 98, 93, 93, 96, 90, 95, 98, 93, 96, 99, 94, 99, 102, 96, 95, 98, 93, 99, 102, 94, 99, 102, 94, 99, 102, 94, 97, 101, 93, 94, 97, 89, 103, 107, 99, 96, 100, 92, 105, 106, 99, 97, 98, 91, 107, 108, 101, 112, 113, 106, 111, 112, 105, 108, 109, 102, 113, 114, 107, 110, 114, 106, 110, 114, 106, 110, 113, 108, 113, 114, 109, 115, 118, 112, 111, 112, 105, 115, 116, 109, 115, 116, 109, 116, 117, 112, 116, 117, 112, 118, 121, 116, 116, 119, 114, 118, 121, 116, 117, 121, 113, 122, 125, 119, 120, 123, 115, 113, 116, 108, 122, 125, 117, 130, 134, 124, 122, 124, 114, 130, 132, 122, 122, 124, 114, 122, 124, 114, 126, 127, 118, 122, 124, 114, 120, 121, 112, 127, 128, 119, 123, 125, 115, 125, 126, 117, 126, 127, 118, 125, 129, 119, 7, 10, 2, 48, 52, 44, 0, 0, 0, 19, 23, 15, 48, 52, 44, 3, 7, 0, 14, 17, 9, 8, 11, 3, 42, 45, 35, 144, 146, 134, 134, 134, 121, 151, 151, 137, 147, 147, 133, 125, 125, 111, 134, 134, 121, 152, 152, 138, 156, 156, 143, 160, 160, 146, 145, 145, 131, 148, 148, 134, 154, 156, 140, 116, 119, 102, 158, 161, 144, 147, 149, 132, 142, 145, 128, 161, 163, 149, 123, 126, 111, 150, 153, 138, 137, 140, 125, 147, 149, 134, 143, 145, 131, 120, 122, 108, 125, 127, 110, 133, 135, 119, 145, 148, 131, 135, 138, 121, 150, 153, 134, 153, 154, 135, 130, 130, 112, 157, 160, 141, 153, 156, 136, 69, 71, 55, 60, 64, 52, 0, 0, 0, 11, 15, 7, 15, 18, 10, 18, 24, 15, 10, 16, 7, 28, 34, 25, 103, 109, 98, 131, 137, 124, 127, 134, 120, 129, 133, 120, 136, 140, 127, 124, 128, 116, 125, 130, 117, 121, 125, 112, 112, 119, 105, 118, 123, 110, 123, 127, 114, 120, 124, 111, 129, 133, 120, 129, 133, 118, 255, 255, 255, 113, 117, 102, 74, 79, 64, 113, 117, 102, 255, 255, 255, 117, 122, 107, 122, 127, 111, 111, 113, 98, 127, 131, 116, 130, 133, 118, 133, 135, 121, 126, 128, 114, 128, 130, 116, 125, 127, 112, 123, 126, 111, 122, 124, 112, 142, 144, 132, 19, 20, 11, 51, 55, 44, 0, 0, 0, 2, 5, 0, 58, 61, 53, 15, 18, 10, 19, 23, 15, 79, 83, 72, 101, 103, 91, 120, 120, 107, 118, 120, 105, 117, 117, 103, 120, 122, 108, 123, 123, 109, 119, 119, 105, 121, 121, 108, 124, 124, 110, 125, 125, 111, 124, 124, 108, 134, 134, 119, 139, 139, 123, 144, 144, 126, 141, 141, 126, 141, 142, 124, 141, 142, 124, 142, 143, 125, 149, 150, 132, 127, 128, 110, 151, 149, 132, 155, 153, 135, 152, 151, 131, 153, 152, 134, 149, 147, 127, 137, 136, 116, 152, 151, 133, 149, 150, 132, 162, 163, 145, 167, 168, 149, 156, 159, 142, 38, 40, 28, 12, 13, 6, 16, 17, 10, 0, 0, 0, 152, 150, 135, 164, 162, 145, 166, 165, 143, 178, 177, 154, 87, 84, 64, 99, 95, 76, 91, 87, 70, 115, 111, 95, 92, 90, 72, 93, 89, 74, 93, 88, 76, 104, 102, 89, 87, 85, 72, 86, 83, 70, 88, 86, 73, 91, 88, 75, 94, 92, 79, 99, 96, 83, 93, 91, 76, 97, 95, 80, 78, 76, 60, 79, 76, 64, 75, 73, 60, 81, 79, 66, 78, 75, 62, 74, 72, 59, 63, 60, 47, 78, 76, 60, 81, 79, 64, 66, 64, 51, 68, 66, 53, 77, 74, 61, 74, 71, 61, 73, 70, 60, 83, 81, 70, 81, 78, 68, 88, 85, 77, 106, 102, 94, 97, 94, 86, 82, 79, 71, 59, 58, 49, 0, 0, 0, 111, 110, 99, 108, 107, 96, 124, 123, 114, 85, 86, 81, 106, 107, 102, 76, 76, 72, 78, 81, 75, 87, 90, 86, 93, 96, 90, 83, 87, 81, 94, 97, 91, 88, 91, 86, 99, 102, 96, 97, 100, 95, 96, 99, 94, 99, 102, 94, 89, 93, 85, 100, 103, 95, 100, 101, 94, 101, 102, 95, 112, 113, 106, 102, 104, 96, 101, 102, 95, 104, 105, 96, 106, 107, 100, 109, 111, 103, 114, 115, 108, 113, 114, 107, 114, 115, 108, 104, 105, 98, 108, 109, 104, 111, 112, 105, 112, 113, 108, 114, 115, 108, 116, 118, 110, 116, 118, 110, 118, 118, 114, 115, 116, 109, 121, 122, 115, 121, 122, 115, 121, 122, 115, 122, 125, 117, 123, 125, 117, 123, 125, 117, 122, 123, 116, 118, 119, 112, 120, 121, 112, 122, 124, 114, 130, 132, 122, 127, 128, 119, 127, 128, 119, 125, 126, 117, 120, 121, 112, 119, 120, 111, 123, 125, 115, 128, 130, 118, 119, 120, 111, 121, 123, 113, 120, 121, 112, 12, 16, 6, 14, 17, 7, 0, 0, 0, 0, 0, 0, 3, 7, 0, 55, 59, 51, 11, 15, 7, 52, 55, 48, 121, 125, 114, 134, 136, 124, 144, 143, 130, 138, 138, 124, 135, 135, 122, 151, 151, 137, 147, 147, 133, 148, 148, 134, 158, 158, 144, 142, 142, 129, 137, 136, 123, 154, 154, 138, 147, 149, 134, 140, 142, 126, 158, 161, 144, 128, 130, 116, 107, 109, 95, 125, 127, 112, 255, 255, 255, 130, 133, 118, 123, 126, 111, 153, 155, 140, 155, 158, 141, 161, 163, 147, 163, 166, 149, 165, 168, 151, 154, 155, 136, 139, 140, 121, 141, 142, 124, 143, 146, 127, 151, 151, 133, 147, 150, 130, 147, 150, 130, 114, 117, 100, 71, 73, 63, 0, 0, 0, 7, 10, 2, 1, 4, 0, 0, 0, 0, 10, 15, 9, 21, 27, 20, 124, 130, 119, 149, 156, 142, 136, 140, 125, 138, 143, 128, 139, 144, 129, 127, 131, 118, 124, 128, 116, 125, 130, 115, 128, 132, 119, 114, 118, 105, 115, 119, 106, 125, 130, 115, 114, 118, 103, 127, 131, 116, 182, 187, 172, 170, 174, 159, 194, 199, 183, 231, 236, 221, 255, 255, 255, 144, 149, 131, 133, 135, 121, 131, 134, 119, 133, 135, 119, 131, 134, 119, 120, 123, 106, 125, 127, 110, 134, 136, 122, 134, 136, 122, 120, 122, 108, 138, 140, 128, 119, 121, 109, 44, 46, 36, 123, 127, 117, 0, 0, 0, 0, 2, 0, 58, 61, 53, 1, 4, 0, 54, 58, 50, 46, 50, 42, 106, 108, 96, 121, 123, 109, 124, 124, 110, 119, 119, 105, 120, 120, 107, 125, 125, 111, 131, 131, 117, 127, 127, 114, 121, 121, 108, 125, 125, 109, 130, 130, 114, 123, 123, 107, 130, 130, 114, 138, 138, 122, 140, 141, 122, 141, 142, 124, 141, 142, 124, 146, 146, 128, 140, 141, 122, 149, 147, 129, 161, 160, 142, 145, 143, 126, 145, 143, 126, 152, 151, 133, 158, 156, 139, 151, 149, 132, 153, 152, 134, 161, 162, 143, 167, 168, 149, 160, 160, 144, 120, 122, 110, 6, 7, 0, 12, 12, 8, 23, 24, 17, 0, 0, 0, 147, 147, 133, 164, 162, 147, 152, 151, 129, 158, 157, 135, 66, 62, 57, 92, 87, 77, 77, 72, 60, 96, 94, 81, 89, 87, 72, 86, 81, 71, 104, 101, 93, 100, 97, 86, 95, 92, 82, 91, 88, 77, 97, 95, 84, 83, 80, 72, 99, 95, 87, 82, 79, 71, 85, 81, 73, 87, 84, 76, 67, 64, 56, 75, 72, 64, 74, 71, 63, 79, 76, 68, 71, 67, 60, 85, 81, 73, 67, 64, 56, 74, 71, 63, 54, 51, 43, 86, 83, 72, 80, 77, 69, 80, 79, 70, 66, 65, 56, 77, 73, 65, 82, 79, 73, 87, 85, 79, 80, 78, 72, 109, 106, 98, 92, 88, 80, 134, 134, 123, 0, 0, 0, 135, 137, 127, 131, 133, 124, 97, 96, 89, 94, 95, 90, 104, 104, 100, 67, 68, 64, 72, 75, 69, 71, 74, 68, 89, 92, 87, 89, 92, 87, 89, 92, 87, 95, 98, 93, 86, 92, 86, 88, 93, 87, 88, 92, 84, 94, 97, 89, 93, 96, 88, 94, 97, 89, 97, 101, 93, 92, 95, 87, 88, 92, 84, 98, 99, 92, 101, 104, 96, 100, 103, 95, 102, 104, 94, 105, 106, 99, 112, 113, 106, 109, 111, 103, 98, 99, 92, 105, 106, 99, 114, 115, 108, 113, 114, 107, 107, 108, 101, 114, 115, 108, 109, 111, 103, 114, 117, 109, 119, 120, 111, 120, 121, 114, 121, 122, 115, 121, 122, 115, 120, 121, 114, 123, 125, 115, 126, 127, 118, 115, 117, 107, 122, 124, 114, 123, 125, 115, 119, 120, 111, 127, 128, 119, 134, 136, 124, 120, 122, 110, 130, 134, 121, 121, 123, 111, 131, 133, 121, 122, 124, 112, 127, 129, 117, 121, 123, 111, 131, 133, 121, 127, 129, 117, 129, 131, 121, 133, 135, 123, 87, 91, 80, 7, 10, 0, 22, 25, 17, 140, 144, 134, 18, 22, 14, 15, 18, 10, 53, 57, 49, 9, 12, 4, 90, 94, 84, 140, 145, 132, 148, 150, 136, 135, 135, 122, 152, 152, 138, 139, 139, 125, 146, 146, 132, 154, 154, 138, 148, 148, 132, 139, 139, 123, 159, 159, 143, 148, 148, 132, 140, 142, 126, 135, 138, 121, 150, 153, 136, 126, 128, 112, 254, 255, 240, 235, 237, 223, 118, 118, 104, 125, 127, 112, 161, 163, 149, 154, 156, 142, 164, 167, 150, 148, 148, 132, 121, 124, 105, 159, 162, 145, 149, 150, 132, 144, 144, 126, 135, 136, 118, 149, 150, 132, 152, 152, 134, 151, 151, 133, 138, 138, 120, 138, 141, 124, 14, 16, 6, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 6, 12, 5, 14, 20, 13, 11, 17, 8, 98, 105, 91, 132, 137, 122, 130, 135, 119, 130, 133, 118, 126, 128, 114, 125, 127, 114, 138, 141, 126, 129, 131, 117, 125, 130, 117, 128, 130, 116, 118, 123, 108, 114, 118, 103, 129, 133, 118, 120, 124, 109, 131, 136, 121, 132, 137, 122, 128, 133, 115, 129, 134, 116, 130, 135, 117, 129, 132, 115, 128, 130, 116, 144, 147, 130, 137, 140, 123, 143, 146, 129, 129, 132, 115, 123, 126, 109, 136, 138, 124, 129, 131, 117, 135, 137, 123, 126, 128, 116, 80, 82, 72, 58, 62, 51, 0, 0, 0, 11, 15, 7, 66, 67, 60, 58, 61, 53, 14, 17, 9, 17, 21, 13, 82, 87, 74, 120, 122, 110, 125, 127, 112, 122, 125, 110, 127, 128, 112, 127, 128, 112, 123, 123, 107, 127, 128, 112, 129, 132, 115, 120, 123, 106, 125, 125, 109, 125, 125, 109, 132, 132, 116, 132, 132, 116, 135, 136, 120, 138, 138, 122, 141, 141, 126, 142, 143, 127, 149, 150, 132, 144, 142, 127, 143, 141, 124, 149, 147, 129, 142, 140, 120, 147, 148, 129, 155, 153, 133, 156, 154, 136, 146, 145, 125, 163, 161, 141, 170, 171, 153, 177, 177, 164, 43, 44, 37, 5, 5, 1, 7, 8, 3, 9, 10, 3, 0, 0, 0, 144, 143, 132, 137, 135, 118, 149, 147, 127, 166, 165, 143, 80, 76, 75, 82, 78, 75, 79, 75, 70, 77, 73, 67, 71, 68, 57, 82, 79, 71, 101, 97, 92, 83, 80, 74, 83, 80, 72, 96, 93, 87, 82, 79, 71, 86, 82, 77, 86, 82, 77, 79, 76, 68, 94, 90, 85, 77, 73, 67, 88, 85, 79, 74, 71, 65, 83, 80, 74, 88, 85, 79, 92, 88, 82, 83, 80, 74, 85, 81, 75, 85, 81, 75, 87, 83, 78, 91, 87, 81, 88, 84, 81, 88, 84, 81, 91, 87, 81, 98, 97, 91, 88, 86, 80, 106, 105, 99, 100, 96, 91, 96, 93, 85, 40, 39, 30, 50, 50, 41, 100, 100, 91, 79, 80, 73, 70, 71, 64, 99, 100, 95, 97, 97, 93, 79, 80, 75, 75, 78, 73, 82, 85, 80, 81, 84, 79, 83, 87, 81, 90, 94, 86, 83, 87, 81, 89, 92, 87, 89, 95, 86, 100, 103, 95, 94, 97, 89, 88, 92, 84, 93, 96, 88, 95, 98, 91, 90, 94, 84, 94, 96, 86, 97, 98, 91, 86, 90, 79, 100, 104, 93, 102, 104, 94, 101, 103, 93, 102, 104, 94, 101, 103, 93, 105, 106, 97, 108, 109, 102, 106, 107, 100, 113, 114, 107, 111, 112, 105, 109, 111, 103, 108, 110, 100, 122, 124, 114, 107, 109, 99, 111, 112, 103, 127, 128, 119, 119, 120, 111, 118, 119, 110, 114, 116, 106, 120, 121, 112, 125, 126, 117, 126, 127, 118, 125, 126, 117, 109, 111, 101, 126, 127, 118, 125, 126, 117, 127, 129, 117, 133, 135, 123, 126, 128, 116, 125, 127, 114, 126, 128, 116, 128, 130, 118, 129, 131, 119, 126, 128, 116, 127, 129, 117, 134, 136, 124, 123, 127, 114, 132, 137, 124, 161, 165, 155, 7, 10, 2, 78, 81, 73, 0, 0, 0, 0, 2, 0, 0, 0, 0, 38, 41, 34, 25, 28, 23, 38, 42, 32, 96, 100, 88, 138, 141, 126, 145, 145, 131, 134, 134, 121, 160, 160, 146, 147, 147, 131, 139, 139, 123, 156, 157, 141, 148, 148, 132, 141, 141, 126, 177, 178, 162, 153, 153, 137, 166, 166, 150, 167, 167, 151, 126, 128, 114, 255, 255, 255, 131, 134, 119, 255, 255, 255, 214, 217, 200, 251, 254, 237, 134, 137, 120, 154, 156, 140, 141, 144, 125, 147, 150, 130, 159, 163, 143, 165, 165, 147, 141, 142, 124, 132, 133, 114, 137, 137, 119, 146, 146, 128, 144, 144, 126, 159, 159, 141, 145, 145, 129, 21, 23, 13, 59, 63, 52, 0, 0, 0, 62, 68, 61, 0, 0, 0, 13, 19, 12, 29, 35, 29, 35, 41, 34, 69, 75, 64, 138, 144, 131, 131, 134, 119, 133, 135, 121, 140, 142, 128, 138, 141, 126, 131, 134, 119, 134, 136, 122, 126, 128, 114, 140, 142, 128, 130, 133, 118, 130, 135, 119, 131, 136, 121, 132, 137, 122, 127, 131, 116, 128, 132, 117, 140, 145, 128, 118, 123, 106, 132, 137, 120, 131, 136, 119, 131, 134, 117, 128, 131, 114, 133, 135, 119, 140, 142, 126, 138, 138, 122, 134, 137, 120, 129, 132, 115, 136, 139, 122, 138, 141, 126, 129, 131, 117, 118, 120, 107, 72, 73, 66, 0, 0, 0, 0, 0, 0, 42, 43, 36, 25, 29, 21, 52, 55, 48, 15, 18, 10, 68, 72, 62, 129, 131, 119, 123, 126, 111, 127, 129, 115, 137, 140, 125, 129, 132, 115, 128, 130, 116, 127, 129, 115, 126, 128, 114, 126, 128, 114, 123, 126, 111, 124, 124, 108, 118, 118, 104, 126, 126, 112, 128, 129, 113, 141, 141, 126, 141, 141, 126, 138, 138, 122, 137, 137, 121, 142, 143, 127, 144, 144, 128, 145, 143, 126, 142, 143, 125, 147, 148, 129, 138, 138, 120, 137, 135, 118, 166, 165, 147, 155, 156, 140, 170, 172, 158, 65, 68, 62, 10, 13, 8, 10, 13, 8, 10, 11, 6, 0, 0, 0, 0, 0, 0, 144, 146, 134, 141, 142, 124, 149, 147, 127, 171, 173, 150, 82, 77, 81, 95, 91, 90, 88, 84, 81, 93, 90, 82, 77, 73, 65, 83, 80, 74, 85, 81, 75, 81, 77, 74, 75, 72, 66, 84, 83, 77, 90, 89, 82, 81, 79, 75, 85, 84, 78, 81, 79, 73, 78, 74, 70, 84, 83, 79, 82, 80, 76, 81, 77, 74, 89, 85, 82, 96, 92, 89, 89, 85, 82, 104, 102, 98, 87, 85, 81, 88, 84, 81, 97, 95, 91, 91, 90, 86, 93, 88, 88, 96, 94, 92, 96, 94, 90, 70, 69, 63, 49, 48, 42, 83, 82, 75, 0, 0, 0, 57, 56, 52, 92, 91, 87, 104, 104, 100, 97, 97, 93, 87, 88, 83, 114, 117, 111, 88, 91, 86, 78, 79, 74, 85, 88, 82, 87, 90, 84, 81, 84, 79, 81, 84, 79, 78, 81, 75, 79, 82, 76, 83, 87, 79, 85, 88, 80, 87, 90, 82, 85, 88, 80, 88, 92, 84, 95, 98, 91, 89, 93, 85, 93, 97, 86, 94, 97, 89, 94, 97, 89, 94, 97, 89, 87, 90, 82, 100, 102, 92, 93, 96, 88, 94, 98, 87, 106, 109, 99, 101, 103, 93, 105, 106, 97, 107, 108, 101, 94, 96, 88, 109, 111, 103, 108, 110, 100, 101, 103, 93, 109, 111, 101, 111, 112, 103, 116, 118, 108, 116, 118, 108, 115, 117, 107, 114, 116, 106, 121, 123, 113, 119, 120, 111, 120, 121, 112, 125, 127, 114, 123, 125, 113, 127, 129, 117, 128, 130, 118, 120, 122, 110, 119, 121, 109, 123, 125, 113, 129, 131, 119, 128, 130, 118, 140, 142, 129, 126, 128, 116, 122, 124, 112, 119, 121, 109, 128, 130, 118, 129, 131, 119, 125, 127, 114, 125, 127, 114, 138, 142, 129, 163, 166, 156, 0, 2, 0, 0, 3, 0, 0, 0, 0, 14, 17, 11, 0, 0, 0, 54, 57, 52, 4, 7, 2, 10, 14, 6, 52, 56, 46, 149, 151, 137, 149, 149, 136, 131, 131, 117, 145, 145, 131, 173, 173, 157, 141, 141, 126, 146, 146, 130, 131, 131, 115, 140, 140, 124, 165, 165, 149, 159, 159, 143, 149, 150, 134, 154, 154, 138, 134, 137, 120, 125, 127, 110, 152, 152, 136, 160, 160, 144, 152, 152, 136, 168, 168, 152, 134, 137, 120, 162, 163, 145, 130, 130, 112, 173, 173, 155, 145, 145, 129, 141, 142, 124, 140, 141, 122, 140, 141, 122, 151, 151, 133, 139, 140, 121, 135, 136, 118, 156, 157, 139, 141, 141, 126, 112, 114, 102, 81, 85, 75, 0, 0, 0, 22, 25, 19, 9, 12, 6, 0, 2, 0, 33, 39, 32, 41, 47, 40, 109, 115, 104, 156, 163, 149, 137, 142, 126, 131, 134, 119, 140, 142, 128, 138, 141, 126, 134, 136, 122, 134, 136, 122, 134, 136, 122, 130, 133, 118, 142, 144, 130, 125, 130, 115, 133, 135, 121, 128, 133, 115, 123, 128, 112, 131, 136, 119, 133, 138, 121, 132, 137, 120, 131, 136, 119, 134, 137, 120, 141, 143, 127, 133, 135, 119, 131, 131, 115, 145, 145, 129, 150, 153, 136, 142, 145, 128, 145, 148, 131, 144, 147, 130, 147, 149, 132, 144, 147, 132, 120, 122, 110, 60, 62, 52, 7, 8, 1, 29, 32, 24, 55, 59, 51, 34, 35, 28, 40, 44, 36, 28, 31, 23, 88, 90, 80, 123, 125, 113, 131, 134, 119, 131, 134, 119, 130, 133, 118, 134, 136, 122, 136, 138, 124, 124, 124, 110, 122, 125, 108, 125, 127, 112, 125, 125, 111, 131, 134, 119, 129, 132, 115, 128, 129, 113, 138, 138, 122, 144, 144, 128, 141, 143, 127, 145, 148, 131, 145, 148, 131, 145, 148, 131, 137, 140, 123, 159, 162, 145, 148, 151, 132, 150, 153, 134, 172, 175, 156, 205, 207, 193, 137, 140, 123, 23, 27, 14, 19, 22, 17, 17, 22, 16, 15, 21, 15, 7, 9, 6, 28, 31, 25, 0, 0, 0, 23, 25, 13, 155, 155, 142, 151, 151, 135, 151, 152, 131, 149, 150, 129, 93, 98, 69, 171, 174, 144, 156, 155, 134, 108, 106, 91, 111, 109, 98, 87, 86, 75, 77, 73, 65, 86, 83, 75, 85, 85, 74, 76, 75, 66, 88, 86, 80, 77, 76, 70, 81, 79, 73, 83, 82, 75, 71, 70, 64, 77, 76, 72, 77, 76, 72, 81, 79, 75, 83, 81, 77, 77, 72, 69, 87, 85, 81, 94, 92, 88, 68, 66, 62, 50, 49, 43, 68, 67, 60, 49, 48, 42, 53, 51, 47, 60, 58, 52, 34, 33, 27, 19, 17, 13, 6, 5, 1, 98, 96, 95, 90, 88, 86, 87, 85, 83, 84, 84, 82, 88, 89, 86, 121, 122, 117, 95, 98, 93, 82, 85, 80, 90, 94, 88, 71, 74, 66, 83, 87, 79, 80, 83, 75, 86, 89, 81, 83, 87, 79, 85, 88, 80, 82, 86, 78, 85, 88, 80, 87, 90, 82, 87, 90, 82, 89, 93, 85, 86, 90, 79, 99, 102, 92, 90, 94, 84, 93, 97, 86, 92, 95, 85, 95, 97, 87, 92, 95, 85, 94, 97, 89, 94, 98, 87, 97, 101, 91, 100, 104, 93, 90, 91, 82, 105, 106, 97, 105, 106, 97, 106, 108, 98, 109, 111, 103, 112, 113, 104, 115, 117, 107, 119, 120, 111, 113, 114, 105, 111, 112, 103, 115, 117, 107, 107, 109, 99, 121, 125, 114, 111, 112, 103, 115, 119, 108, 117, 121, 111, 120, 121, 112, 117, 121, 109, 124, 128, 116, 120, 124, 111, 118, 120, 107, 133, 135, 123, 121, 123, 111, 128, 130, 116, 125, 127, 112, 134, 136, 122, 134, 136, 124, 130, 132, 120, 147, 149, 134, 125, 127, 112, 130, 133, 118, 131, 133, 121, 135, 137, 125, 144, 147, 132, 129, 131, 119, 132, 136, 126, 19, 23, 13, 33, 34, 27, 0, 0, 0, 97, 100, 95, 0, 0, 0, 42, 45, 39, 17, 20, 15, 30, 33, 27, 61, 65, 57, 145, 147, 135, 138, 138, 124, 137, 136, 123, 144, 143, 130, 145, 145, 129, 167, 167, 151, 133, 133, 117, 161, 161, 145, 139, 139, 123, 156, 157, 141, 187, 187, 171, 155, 156, 140, 154, 155, 136, 165, 165, 147, 155, 156, 138, 149, 150, 132, 142, 145, 126, 155, 156, 138, 168, 169, 150, 182, 183, 164, 159, 159, 143, 167, 168, 149, 160, 160, 144, 174, 175, 156, 167, 168, 149, 156, 157, 139, 166, 166, 148, 153, 154, 135, 142, 143, 125, 138, 138, 120, 135, 136, 118, 154, 154, 138, 119, 121, 109, 19, 20, 13, 0, 0, 0, 25, 28, 23, 32, 35, 30, 0, 0, 0, 24, 29, 23, 35, 41, 34, 173, 177, 169, 99, 102, 92, 128, 132, 119, 129, 131, 117, 130, 133, 118, 131, 134, 119, 128, 130, 116, 135, 137, 123, 141, 143, 129, 137, 140, 125, 134, 136, 122, 130, 133, 118, 128, 131, 114, 142, 147, 129, 150, 155, 137, 140, 142, 126, 143, 146, 129, 133, 135, 119, 137, 142, 124, 131, 134, 117, 144, 147, 130, 135, 138, 121, 140, 142, 126, 147, 147, 131, 141, 141, 126, 141, 143, 127, 144, 147, 130, 141, 143, 127, 149, 152, 135, 154, 156, 140, 141, 143, 131, 26, 27, 18, 60, 62, 54, 1, 4, 0, 0, 0, 0, 31, 35, 27, 33, 37, 29, 33, 37, 29, 99, 100, 91, 121, 123, 111, 138, 141, 126, 134, 136, 122, 131, 134, 119, 136, 138, 124, 129, 131, 117, 130, 133, 118, 121, 123, 109, 121, 123, 109, 120, 122, 108, 106, 108, 94, 137, 136, 123, 153, 153, 139, 166, 169, 154, 205, 208, 191, 200, 204, 189, 149, 153, 138, 83, 88, 75, 46, 52, 41, 20, 26, 15, 26, 32, 21, 17, 23, 12, 48, 52, 44, 45, 48, 41, 46, 50, 42, 44, 47, 39, 32, 38, 29, 35, 41, 34, 38, 43, 37, 14, 20, 13, 26, 32, 25, 13, 18, 14, 0, 0, 0, 80, 83, 68, 141, 141, 126, 147, 148, 129, 134, 135, 114, 136, 140, 118, 92, 89, 95, 80, 77, 80, 81, 81, 81, 87, 87, 87, 45, 45, 45, 28, 26, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 31, 31, 31, 31, 33, 80, 77, 80, 100, 98, 101, 100, 98, 101, 90, 87, 90, 90, 87, 90, 94, 91, 92, 90, 88, 88, 81, 81, 81, 110, 113, 110, 104, 107, 104, 72, 75, 69, 76, 80, 74, 80, 83, 75, 85, 88, 80, 182, 186, 178, 88, 92, 84, 82, 86, 78, 88, 92, 84, 85, 88, 80, 82, 86, 78, 83, 87, 79, 88, 92, 84, 78, 81, 73, 86, 89, 81, 88, 92, 84, 87, 90, 82, 95, 99, 89, 87, 91, 80, 136, 140, 129, 90, 94, 84, 93, 97, 86, 73, 77, 66, 88, 92, 82, 97, 101, 91, 95, 99, 89, 104, 108, 98, 102, 104, 94, 105, 106, 97, 105, 106, 97, 106, 108, 98, 106, 108, 98, 111, 112, 103, 113, 114, 105, 114, 116, 106, 121, 123, 113, 113, 116, 106, 109, 113, 103, 118, 122, 112, 123, 127, 117, 108, 112, 99, 117, 121, 109, 123, 127, 114, 118, 123, 110, 120, 124, 111, 128, 132, 119, 128, 130, 118, 125, 127, 114, 122, 124, 112, 119, 121, 109, 127, 129, 115, 138, 140, 128, 129, 131, 119, 134, 136, 124, 137, 140, 125, 134, 136, 122, 129, 131, 117, 148, 150, 136, 137, 140, 125, 138, 141, 126, 135, 137, 123, 131, 133, 121, 117, 121, 109, 95, 99, 89, 25, 29, 21, 18, 21, 16, 0, 0, 0, 0, 0, 0, 53, 56, 51, 22, 25, 19, 23, 26, 20, 158, 161, 156, 156, 158, 146, 141, 141, 129, 152, 152, 138, 142, 143, 127, 147, 147, 131, 151, 151, 135, 131, 131, 115, 140, 140, 124, 137, 137, 121, 158, 158, 142, 147, 147, 131, 163, 164, 148, 174, 175, 156, 176, 176, 160, 180, 180, 162, 140, 141, 122, 158, 158, 140, 148, 149, 130, 171, 172, 154, 152, 152, 134, 176, 177, 159, 153, 154, 135, 152, 152, 134, 144, 144, 126, 155, 156, 138, 153, 154, 135, 149, 150, 132, 133, 134, 115, 147, 148, 129, 167, 168, 149, 153, 154, 135, 148, 149, 130, 143, 146, 129, 2, 4, 0, 26, 27, 20, 0, 0, 0, 62, 65, 60, 0, 0, 0, 30, 33, 27, 32, 35, 30, 97, 100, 95, 79, 83, 72, 143, 145, 133, 147, 149, 134, 135, 137, 123, 126, 128, 112, 138, 141, 126, 131, 134, 119, 144, 147, 132, 133, 135, 119, 138, 141, 124, 140, 142, 126, 136, 139, 122, 137, 140, 123, 142, 145, 128, 130, 133, 116, 135, 138, 121, 133, 135, 119, 137, 140, 123, 135, 138, 121, 147, 149, 132, 137, 140, 123, 140, 142, 126, 162, 165, 146, 136, 139, 122, 142, 145, 128, 157, 160, 143, 150, 153, 136, 158, 161, 144, 147, 149, 132, 140, 145, 130, 23, 24, 17, 85, 87, 77, 0, 3, 0, 8, 11, 3, 55, 59, 51, 50, 53, 45, 19, 23, 15, 120, 121, 112, 141, 143, 131, 131, 136, 121, 141, 143, 129, 144, 148, 135, 165, 170, 154, 149, 153, 138, 113, 117, 104, 55, 60, 47, 19, 23, 13, 31, 35, 25, 46, 50, 40, 40, 44, 34, 47, 52, 39, 51, 55, 44, 40, 44, 34, 48, 52, 44, 38, 41, 34, 48, 54, 45, 39, 44, 38, 42, 48, 41, 39, 47, 40, 45, 50, 44, 44, 52, 45, 49, 57, 49, 55, 61, 54, 41, 47, 40, 41, 49, 41, 38, 43, 37, 33, 39, 32, 32, 37, 31, 33, 39, 32, 0, 0, 0, 0, 0, 0, 113, 113, 100, 147, 147, 131, 171, 172, 154, 165, 166, 145, 144, 148, 126, 78, 77, 84, 81, 82, 91, 75, 76, 85, 78, 78, 87, 74, 75, 84, 83, 81, 91, 83, 81, 91, 92, 90, 100, 86, 85, 92, 88, 87, 94, 88, 88, 92, 90, 89, 94, 85, 84, 89, 88, 88, 92, 87, 87, 91, 86, 85, 90, 95, 95, 99, 100, 99, 104, 79, 79, 81, 84, 83, 86, 94, 94, 96, 81, 81, 83, 88, 88, 92, 92, 91, 96, 79, 78, 83, 90, 89, 94, 87, 83, 89, 97, 94, 99, 103, 100, 105, 84, 82, 83, 89, 86, 87, 130, 131, 128, 124, 122, 120, 87, 87, 87, 74, 77, 74, 81, 84, 79, 83, 87, 81, 83, 87, 79, 100, 103, 95, 79, 85, 77, 76, 80, 72, 72, 78, 70, 76, 82, 73, 96, 100, 92, 88, 92, 84, 83, 87, 79, 85, 88, 80, 85, 88, 80, 85, 88, 80, 89, 93, 83, 88, 94, 83, 79, 83, 72, 88, 94, 83, 88, 94, 83, 89, 93, 83, 92, 95, 85, 93, 97, 86, 92, 95, 85, 95, 99, 89, 93, 97, 86, 89, 93, 83, 102, 106, 96, 103, 107, 97, 107, 109, 99, 109, 111, 101, 107, 109, 99, 107, 109, 99, 104, 105, 96, 111, 112, 103, 116, 118, 108, 114, 116, 106, 113, 116, 106, 115, 119, 108, 114, 116, 106, 117, 121, 109, 121, 125, 112, 120, 124, 111, 111, 116, 103, 120, 124, 111, 124, 128, 116, 123, 127, 114, 118, 120, 107, 125, 130, 117, 126, 128, 114, 130, 133, 118, 128, 130, 116, 130, 133, 118, 129, 131, 117, 133, 135, 121, 128, 130, 116, 131, 134, 119, 129, 131, 117, 131, 134, 119, 127, 129, 115, 131, 134, 119, 130, 133, 118, 140, 142, 128, 131, 135, 123, 150, 154, 143, 42, 45, 37, 12, 16, 10, 0, 0, 0, 38, 41, 36, 59, 62, 56, 22, 25, 19, 19, 22, 17, 82, 85, 80, 165, 167, 157, 151, 151, 137, 140, 140, 126, 160, 160, 144, 154, 154, 138, 156, 157, 141, 152, 152, 136, 141, 141, 126, 134, 134, 119, 166, 166, 150, 162, 162, 147, 171, 172, 156, 158, 158, 140, 174, 175, 156, 145, 145, 127, 152, 152, 134, 155, 156, 138, 171, 172, 154, 173, 173, 155, 158, 158, 140, 173, 173, 155, 161, 162, 143, 141, 144, 125, 148, 151, 132, 134, 135, 117, 148, 149, 130, 145, 149, 129, 169, 170, 152, 151, 154, 135, 145, 149, 129, 155, 158, 139, 157, 160, 141, 147, 149, 132, 51, 53, 41, 78, 79, 72, 0, 0, 0, 40, 40, 36, 0, 0, 0, 46, 49, 44, 33, 36, 31, 72, 75, 67, 128, 131, 121, 149, 153, 140, 149, 151, 137, 136, 138, 124, 135, 137, 123, 134, 136, 122, 134, 137, 120, 140, 142, 128, 138, 141, 124, 112, 114, 98, 148, 150, 134, 142, 145, 128, 137, 140, 123, 154, 156, 140, 141, 143, 129, 158, 161, 144, 138, 141, 124, 142, 145, 128, 149, 152, 135, 136, 139, 122, 142, 145, 128, 137, 140, 125, 148, 150, 136, 147, 149, 134, 153, 158, 143, 142, 147, 129, 132, 137, 120, 146, 151, 136, 143, 150, 136, 143, 147, 136, 42, 45, 37, 37, 40, 32, 0, 0, 0, 1, 4, 0, 0, 0, 0, 43, 47, 36, 36, 40, 29, 70, 71, 62, 58, 62, 51, 44, 48, 37, 51, 55, 44, 55, 61, 50, 52, 58, 47, 46, 52, 43, 49, 56, 44, 40, 46, 35, 36, 42, 34, 45, 51, 42, 43, 49, 41, 55, 61, 52, 39, 45, 36, 44, 47, 39, 42, 48, 41, 43, 49, 41, 41, 47, 38, 52, 57, 51, 46, 51, 45, 41, 47, 40, 59, 64, 58, 42, 48, 41, 56, 62, 53, 54, 60, 51, 52, 57, 49, 69, 75, 68, 39, 44, 38, 43, 49, 43, 50, 56, 49, 53, 58, 52, 0, 0, 0, 0, 0, 0, 103, 102, 93, 153, 153, 139, 141, 141, 128, 159, 159, 143, 128, 129, 113, 133, 135, 119, 80, 80, 84, 81, 80, 89, 84, 82, 94, 93, 91, 103, 90, 88, 100, 91, 89, 101, 87, 85, 99, 91, 89, 101, 94, 94, 105, 92, 90, 102, 86, 84, 96, 90, 88, 100, 102, 101, 110, 78, 77, 84, 88, 87, 94, 87, 86, 93, 88, 87, 94, 94, 93, 100, 91, 90, 97, 85, 84, 91, 92, 91, 96, 85, 84, 91, 81, 80, 87, 79, 78, 85, 83, 81, 89, 74, 74, 76, 97, 96, 101, 123, 120, 123, 131, 128, 129, 106, 106, 104, 100, 100, 98, 77, 78, 73, 73, 74, 69, 102, 103, 98, 85, 86, 81, 82, 86, 78, 83, 87, 81, 75, 79, 71, 77, 83, 74, 89, 93, 85, 97, 103, 94, 76, 82, 73, 84, 90, 81, 81, 87, 78, 80, 83, 75, 88, 92, 84, 81, 87, 78, 87, 90, 82, 82, 86, 78, 84, 90, 81, 84, 90, 81, 83, 89, 80, 76, 82, 71, 82, 88, 79, 93, 97, 86, 87, 91, 80, 90, 94, 84, 92, 95, 85, 97, 101, 91, 97, 101, 91, 102, 104, 94, 97, 101, 91, 97, 101, 91, 116, 118, 106, 108, 110, 100, 104, 105, 96, 105, 106, 97, 116, 118, 108, 109, 111, 101, 114, 116, 106, 115, 117, 107, 113, 116, 106, 109, 113, 103, 115, 119, 108, 123, 127, 117, 117, 121, 109, 121, 125, 112, 117, 121, 109, 122, 126, 115, 118, 123, 110, 121, 125, 112, 122, 126, 113, 127, 131, 118, 128, 130, 118, 128, 130, 118, 127, 129, 115, 128, 130, 116, 131, 133, 121, 143, 145, 131, 129, 131, 117, 127, 129, 115, 135, 137, 123, 136, 138, 124, 136, 138, 124, 135, 139, 124, 138, 141, 126, 137, 140, 125, 137, 141, 128, 143, 147, 136, 2, 5, 0, 26, 29, 24, 0, 0, 0, 44, 47, 41, 13, 14, 9, 15, 18, 12, 8, 11, 5, 40, 43, 38, 95, 97, 89, 159, 162, 147, 139, 139, 125, 140, 140, 124, 146, 146, 130, 159, 159, 143, 153, 153, 137, 158, 158, 142, 170, 171, 155, 149, 150, 134, 158, 158, 142, 167, 167, 151, 158, 158, 142, 151, 151, 133, 163, 164, 146, 173, 173, 155, 180, 180, 162, 158, 158, 140, 146, 146, 128, 138, 138, 120, 152, 152, 134, 153, 154, 135, 145, 145, 127, 143, 146, 127, 130, 130, 112, 173, 177, 157, 147, 150, 130, 158, 161, 142, 144, 147, 128, 142, 145, 126, 137, 140, 121, 144, 147, 128, 143, 146, 129, 141, 143, 131, 33, 37, 29, 0, 0, 0, 0, 0, 0, 53, 57, 49, 29, 32, 24, 43, 46, 38, 62, 66, 56, 145, 149, 136, 153, 158, 143, 138, 143, 128, 126, 128, 114, 128, 131, 114, 136, 139, 122, 144, 147, 132, 137, 140, 123, 156, 159, 142, 143, 146, 129, 130, 133, 116, 145, 148, 131, 144, 147, 130, 138, 143, 128, 140, 145, 130, 135, 137, 123, 135, 139, 124, 138, 143, 128, 150, 154, 139, 144, 149, 133, 166, 171, 156, 149, 151, 137, 102, 106, 94, 45, 49, 36, 49, 56, 42, 77, 83, 72, 52, 58, 47, 45, 51, 42, 54, 60, 51, 54, 60, 51, 55, 61, 52, 67, 73, 64, 0, 0, 0, 47, 51, 43, 66, 70, 60, 35, 39, 26, 139, 143, 131, 66, 68, 58, 65, 69, 56, 59, 63, 52, 55, 61, 50, 65, 69, 56, 66, 70, 57, 51, 55, 42, 55, 59, 49, 52, 56, 46, 61, 65, 55, 61, 65, 55, 44, 48, 37, 54, 58, 48, 57, 63, 54, 57, 60, 52, 55, 59, 51, 48, 54, 45, 47, 51, 43, 58, 61, 55, 55, 59, 51, 52, 57, 49, 47, 52, 46, 47, 52, 46, 64, 70, 64, 42, 48, 41, 59, 62, 56, 53, 58, 52, 60, 68, 61, 45, 53, 46, 0, 6, 0, 0, 0, 0, 0, 0, 0, 100, 100, 89, 162, 162, 149, 146, 146, 132, 156, 153, 140, 144, 143, 130, 146, 146, 132, 135, 135, 122, 111, 108, 111, 119, 116, 120, 120, 119, 124, 112, 111, 116, 120, 119, 124, 120, 119, 126, 129, 128, 135, 125, 123, 130, 129, 128, 135, 127, 128, 134, 128, 127, 134, 127, 126, 133, 123, 122, 129, 136, 135, 142, 131, 131, 135, 129, 128, 133, 122, 121, 126, 127, 127, 129, 122, 122, 124, 121, 121, 123, 130, 130, 132, 134, 133, 136, 123, 123, 125, 115, 117, 118, 114, 114, 116, 98, 98, 98, 88, 89, 86, 85, 86, 81, 81, 83, 75, 80, 81, 74, 85, 86, 79, 69, 70, 63, 91, 92, 85, 87, 89, 81, 87, 90, 84, 85, 88, 80, 81, 84, 79, 86, 89, 81, 65, 68, 60, 78, 84, 75, 78, 84, 75, 82, 88, 79, 83, 89, 80, 76, 82, 73, 77, 83, 74, 79, 85, 77, 84, 90, 81, 78, 84, 75, 79, 85, 77, 83, 89, 80, 84, 90, 81, 82, 88, 79, 81, 87, 78, 86, 93, 82, 82, 86, 76, 97, 101, 91, 101, 105, 94, 99, 102, 92, 102, 106, 96, 82, 86, 76, 96, 100, 90, 93, 97, 86, 92, 96, 83, 107, 109, 97, 101, 103, 91, 119, 121, 109, 106, 108, 96, 116, 118, 106, 107, 109, 97, 118, 119, 110, 120, 121, 112, 115, 117, 107, 122, 126, 113, 115, 119, 108, 121, 125, 114, 117, 121, 109, 110, 114, 102, 128, 132, 119, 121, 125, 112, 124, 128, 116, 118, 123, 110, 140, 145, 132, 129, 133, 120, 129, 131, 119, 128, 130, 116, 129, 131, 119, 130, 133, 118, 125, 127, 112, 127, 129, 115, 136, 138, 124, 131, 134, 119, 138, 141, 126, 143, 145, 131, 123, 126, 111, 138, 141, 126, 132, 137, 124, 113, 117, 104, 142, 145, 135, 155, 156, 147, 36, 39, 31, 57, 60, 54, 0, 0, 0, 31, 34, 29, 0, 0, 0, 54, 57, 52, 28, 30, 27, 22, 25, 19, 28, 30, 20, 148, 150, 136, 151, 151, 135, 152, 152, 136, 149, 150, 134, 144, 144, 128, 151, 151, 135, 149, 150, 134, 159, 159, 143, 141, 141, 126, 152, 152, 136, 159, 159, 141, 166, 166, 148, 165, 165, 147, 128, 129, 111, 160, 161, 142, 131, 131, 113, 151, 154, 135, 161, 164, 145, 157, 160, 141, 152, 152, 134, 148, 148, 132, 147, 150, 130, 145, 149, 129, 154, 157, 138, 147, 150, 130, 147, 150, 130, 144, 144, 126, 144, 147, 128, 157, 160, 141, 157, 160, 141, 148, 151, 132, 142, 145, 128, 160, 165, 150, 3, 7, 0, 0, 0, 0, 0, 0, 0, 30, 33, 25, 25, 29, 21, 71, 75, 62, 89, 94, 81, 138, 143, 128, 103, 107, 95, 155, 157, 143, 143, 147, 132, 143, 145, 131, 135, 137, 123, 121, 123, 109, 134, 136, 122, 137, 140, 125, 144, 147, 132, 142, 144, 130, 164, 166, 152, 167, 171, 159, 136, 140, 127, 97, 101, 91, 45, 51, 40, 74, 80, 69, 26, 32, 21, 63, 69, 60, 45, 51, 40, 50, 57, 46, 62, 68, 57, 80, 84, 73, 55, 61, 52, 48, 54, 45, 60, 66, 57, 43, 51, 42, 56, 64, 54, 45, 54, 44, 58, 66, 57, 55, 61, 52, 30, 33, 25, 16, 19, 11, 9, 13, 2, 11, 16, 3, 99, 103, 90, 67, 71, 59, 68, 73, 60, 92, 96, 83, 64, 68, 55, 71, 75, 64, 60, 64, 52, 67, 71, 59, 61, 65, 55, 59, 63, 52, 53, 57, 47, 74, 78, 68, 67, 71, 61, 78, 81, 71, 82, 86, 76, 66, 70, 60, 78, 81, 71, 58, 62, 51, 71, 75, 64, 68, 72, 64, 59, 62, 54, 74, 78, 70, 76, 82, 73, 81, 85, 77, 69, 73, 65, 107, 110, 102, 79, 82, 76, 73, 76, 70, 22, 25, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, 52, 40, 161, 161, 147, 158, 158, 144, 156, 153, 140, 159, 157, 144, 148, 148, 134, 144, 143, 130, 152, 152, 138, 125, 125, 111, 105, 105, 94, 99, 99, 88, 93, 95, 83, 84, 86, 74, 85, 87, 77, 106, 107, 100, 98, 99, 92, 88, 91, 86, 106, 107, 102, 97, 97, 93, 86, 89, 83, 90, 93, 90, 85, 87, 84, 81, 84, 79, 75, 78, 75, 83, 86, 83, 78, 81, 75, 74, 75, 70, 87, 88, 83, 83, 83, 79, 76, 76, 72, 85, 86, 81, 80, 83, 75, 82, 86, 78, 81, 84, 79, 78, 81, 73, 81, 83, 75, 76, 77, 70, 93, 95, 85, 79, 83, 72, 86, 89, 81, 86, 89, 81, 85, 86, 79, 85, 88, 80, 93, 96, 88, 79, 82, 74, 79, 82, 74, 71, 77, 68, 76, 80, 72, 82, 86, 78, 77, 83, 74, 79, 85, 77, 83, 89, 80, 53, 59, 50, 75, 81, 72, 79, 85, 77, 88, 94, 85, 78, 84, 75, 82, 88, 79, 79, 85, 77, 82, 88, 79, 82, 88, 79, 86, 92, 84, 86, 90, 79, 93, 97, 86, 83, 87, 77, 95, 99, 89, 94, 98, 87, 87, 91, 80, 99, 102, 92, 104, 109, 96, 99, 103, 90, 104, 109, 96, 97, 99, 86, 105, 107, 95, 102, 104, 92, 109, 111, 99, 114, 116, 104, 116, 118, 106, 105, 107, 95, 115, 117, 105, 114, 116, 104, 118, 120, 107, 108, 110, 98, 114, 116, 104, 120, 124, 111, 121, 125, 114, 123, 127, 114, 118, 123, 110, 121, 125, 112, 122, 124, 112, 123, 125, 113, 128, 130, 118, 125, 127, 114, 122, 124, 112, 138, 140, 128, 137, 139, 127, 136, 138, 124, 136, 138, 124, 135, 137, 123, 137, 140, 125, 143, 145, 131, 131, 134, 119, 134, 136, 122, 143, 145, 131, 138, 140, 128, 131, 135, 123, 133, 138, 125, 133, 138, 125, 44, 48, 37, 9, 12, 4, 18, 21, 16, 7, 10, 2, 0, 0, 0, 50, 53, 45, 39, 42, 37, 28, 31, 25, 66, 68, 58, 170, 170, 157, 155, 156, 140, 161, 161, 145, 146, 146, 130, 156, 157, 141, 149, 150, 134, 149, 150, 134, 146, 146, 130, 173, 173, 157, 135, 136, 120, 174, 175, 156, 138, 138, 120, 166, 166, 148, 151, 151, 133, 170, 171, 153, 157, 160, 141, 149, 152, 133, 151, 154, 135, 162, 163, 145, 154, 155, 136, 163, 166, 147, 147, 147, 131, 161, 164, 145, 151, 154, 135, 140, 143, 124, 137, 140, 121, 145, 149, 129, 141, 144, 125, 147, 150, 130, 140, 143, 124, 135, 138, 119, 137, 140, 123, 138, 143, 128, 17, 21, 13, 81, 85, 77, 0, 0, 0, 17, 21, 13, 34, 40, 29, 62, 67, 54, 88, 92, 80, 116, 120, 107, 81, 85, 72, 150, 154, 139, 130, 134, 121, 107, 111, 98, 69, 73, 63, 39, 43, 33, 42, 49, 37, 47, 51, 43, 55, 59, 49, 45, 51, 40, 55, 61, 50, 63, 70, 58, 64, 71, 60, 54, 60, 51, 60, 66, 57, 55, 61, 52, 56, 62, 53, 63, 71, 62, 69, 75, 66, 53, 59, 48, 50, 58, 49, 67, 73, 64, 57, 63, 54, 60, 66, 57, 68, 74, 65, 61, 67, 58, 71, 77, 68, 69, 75, 66, 70, 76, 67, 65, 69, 58, 61, 65, 55, 57, 61, 50, 0, 0, 0, 33, 37, 27, 111, 116, 103, 101, 103, 91, 105, 107, 95, 122, 124, 112, 89, 94, 81, 90, 95, 82, 110, 114, 102, 84, 86, 74, 83, 85, 72, 109, 111, 99, 104, 109, 96, 113, 116, 106, 124, 128, 116, 95, 99, 86, 102, 106, 96, 106, 109, 99, 121, 123, 113, 98, 99, 90, 91, 90, 82, 57, 59, 49, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 17, 8, 140, 142, 128, 9, 8, 0, 67, 67, 53, 126, 126, 112, 168, 166, 153, 163, 160, 147, 160, 158, 143, 157, 155, 140, 145, 143, 130, 151, 148, 136, 147, 147, 133, 130, 129, 116, 124, 124, 110, 122, 125, 110, 108, 105, 93, 104, 104, 90, 101, 103, 91, 106, 108, 96, 100, 102, 90, 105, 106, 97, 101, 103, 91, 94, 96, 86, 95, 97, 87, 93, 97, 86, 93, 95, 85, 90, 94, 86, 88, 92, 84, 85, 86, 79, 86, 89, 81, 90, 91, 84, 87, 91, 80, 85, 88, 78, 81, 83, 73, 88, 92, 82, 93, 94, 87, 99, 100, 93, 82, 86, 76, 81, 85, 77, 82, 86, 78, 82, 86, 76, 76, 77, 68, 87, 89, 79, 79, 81, 71, 79, 82, 74, 85, 88, 78, 78, 81, 73, 80, 83, 75, 71, 74, 66, 88, 92, 84, 82, 86, 78, 76, 80, 72, 72, 78, 70, 81, 85, 77, 78, 81, 73, 76, 80, 72, 77, 83, 74, 84, 90, 81, 83, 87, 79, 78, 84, 75, 75, 80, 74, 74, 80, 71, 81, 87, 78, 82, 88, 79, 82, 88, 79, 82, 88, 79, 88, 94, 85, 86, 92, 84, 74, 80, 71, 83, 87, 77, 93, 97, 86, 100, 104, 93, 97, 101, 91, 102, 106, 96, 100, 104, 93, 96, 100, 88, 101, 105, 92, 99, 103, 90, 106, 110, 97, 106, 108, 96, 105, 107, 95, 108, 110, 98, 107, 109, 97, 119, 121, 109, 105, 107, 95, 113, 115, 103, 114, 116, 104, 112, 114, 102, 112, 114, 102, 137, 139, 129, 126, 128, 116, 124, 128, 116, 116, 120, 107, 123, 127, 114, 117, 121, 109, 124, 128, 116, 128, 130, 118, 137, 139, 127, 133, 135, 123, 150, 152, 140, 138, 140, 128, 128, 130, 118, 131, 134, 119, 143, 145, 131, 123, 126, 111, 121, 123, 109, 131, 134, 119, 131, 134, 119, 135, 137, 123, 141, 143, 129, 136, 138, 124, 137, 140, 125, 139, 143, 131, 135, 137, 125, 101, 105, 94, 4, 8, 0, 67, 71, 63, 0, 0, 0, 0, 0, 0, 32, 36, 28, 40, 43, 38, 21, 24, 18, 164, 166, 154, 172, 175, 158, 159, 159, 143, 189, 190, 171, 162, 162, 147, 132, 132, 116, 160, 160, 144, 156, 157, 141, 158, 158, 142, 156, 157, 141, 154, 154, 138, 175, 175, 159, 134, 135, 117, 178, 179, 161, 153, 154, 135, 184, 187, 168, 161, 164, 145, 162, 165, 146, 162, 165, 146, 149, 152, 133, 137, 140, 121, 168, 171, 152, 122, 125, 108, 144, 147, 128, 143, 146, 127, 141, 144, 125, 150, 153, 134, 156, 159, 140, 141, 144, 125, 141, 144, 125, 126, 129, 110, 125, 127, 110, 138, 143, 128, 132, 137, 124, 61, 65, 57, 32, 36, 28, 0, 0, 0, 0, 0, 0, 15, 21, 13, 56, 63, 51, 67, 73, 60, 163, 170, 156, 79, 86, 72, 59, 65, 54, 59, 65, 54, 64, 71, 60, 49, 55, 46, 59, 64, 56, 91, 97, 88, 69, 75, 66, 56, 62, 53, 98, 104, 95, 57, 63, 54, 74, 80, 69, 48, 54, 45, 49, 55, 46, 74, 80, 69, 76, 82, 73, 60, 66, 57, 68, 74, 63, 72, 79, 68, 66, 71, 63, 68, 74, 63, 78, 85, 73, 71, 77, 68, 72, 78, 70, 52, 57, 49, 93, 99, 91, 73, 76, 68, 88, 92, 84, 73, 76, 68, 87, 91, 80, 88, 92, 80, 67, 71, 61, 0, 0, 0, 131, 135, 123, 116, 118, 106, 121, 123, 111, 101, 103, 91, 107, 111, 98, 93, 97, 84, 123, 127, 114, 92, 96, 83, 118, 123, 110, 113, 117, 104, 104, 106, 94, 72, 74, 62, 11, 15, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 21, 10, 71, 71, 60, 94, 93, 80, 141, 138, 125, 161, 159, 144, 147, 145, 132, 147, 145, 130, 164, 162, 147, 149, 147, 131, 152, 150, 135, 167, 165, 150, 159, 157, 142, 144, 142, 127, 150, 147, 134, 143, 140, 128, 129, 126, 114, 149, 146, 133, 147, 145, 134, 102, 101, 90, 100, 100, 87, 92, 92, 79, 101, 103, 91, 99, 101, 89, 90, 92, 80, 99, 100, 91, 101, 103, 93, 90, 91, 82, 97, 98, 89, 90, 94, 84, 109, 111, 103, 81, 85, 77, 73, 76, 68, 81, 85, 77, 81, 85, 75, 78, 81, 71, 92, 95, 85, 83, 84, 75, 82, 86, 76, 87, 91, 80, 83, 87, 77, 81, 83, 73, 86, 90, 79, 72, 76, 65, 85, 88, 78, 83, 87, 77, 87, 91, 80, 82, 86, 76, 80, 84, 73, 79, 83, 72, 79, 83, 72, 79, 82, 74, 78, 81, 73, 86, 89, 81, 75, 79, 71, 78, 81, 73, 81, 85, 77, 78, 81, 73, 75, 79, 71, 81, 85, 77, 78, 81, 73, 74, 78, 70, 82, 86, 78, 80, 83, 75, 77, 83, 74, 79, 85, 79, 85, 91, 82, 79, 85, 77, 81, 87, 78, 83, 89, 80, 85, 88, 80, 96, 100, 92, 95, 99, 89, 83, 87, 77, 90, 94, 84, 86, 90, 79, 94, 98, 87, 99, 102, 92, 92, 95, 85, 90, 94, 84, 95, 99, 89, 103, 107, 95, 107, 109, 97, 104, 109, 96, 109, 111, 99, 106, 108, 96, 111, 113, 100, 112, 114, 102, 109, 111, 99, 113, 115, 103, 114, 116, 104, 116, 118, 106, 114, 116, 104, 119, 120, 111, 115, 117, 107, 119, 121, 109, 118, 120, 107, 122, 124, 112, 121, 125, 112, 116, 120, 107, 121, 123, 111, 140, 142, 129, 123, 125, 113, 131, 133, 121, 155, 157, 145, 115, 117, 105, 138, 140, 128, 137, 139, 127, 141, 143, 129, 142, 144, 130, 135, 137, 123, 133, 135, 121, 135, 137, 123, 137, 140, 125, 135, 137, 123, 144, 147, 132, 140, 142, 128, 141, 143, 131, 146, 150, 138, 151, 155, 142, 22, 25, 17, 55, 59, 51, 0, 0, 0, 130, 133, 126, 48, 49, 42, 26, 29, 24, 40, 43, 38, 173, 175, 163, 175, 177, 160, 152, 152, 136, 165, 165, 149, 137, 137, 121, 159, 159, 143, 155, 156, 140, 160, 160, 144, 177, 178, 162, 144, 144, 126, 154, 155, 136, 159, 163, 143, 162, 165, 146, 136, 139, 120, 150, 153, 134, 144, 147, 128, 137, 140, 121, 154, 157, 138, 154, 157, 138, 153, 156, 136, 135, 138, 119, 168, 171, 152, 157, 160, 143, 149, 154, 136, 158, 163, 147, 147, 154, 138, 141, 148, 132, 125, 132, 118, 106, 113, 99, 93, 97, 84, 69, 75, 64, 43, 50, 39, 43, 50, 39, 57, 60, 52, 43, 49, 41, 4, 10, 1, 89, 93, 85, 0, 0, 0, 28, 31, 21, 88, 94, 83, 129, 136, 123, 69, 75, 64, 67, 73, 62, 60, 66, 55, 70, 77, 65, 60, 66, 55, 77, 83, 74, 75, 81, 72, 56, 63, 51, 86, 92, 84, 59, 65, 54, 75, 81, 70, 136, 143, 132, 71, 78, 66, 82, 88, 79, 93, 96, 88, 69, 73, 65, 71, 74, 66, 65, 68, 60, 62, 68, 57, 81, 87, 78, 87, 90, 82, 80, 83, 75, 97, 101, 91, 92, 99, 87, 92, 95, 87, 87, 90, 82, 86, 90, 79, 96, 100, 90, 109, 111, 103, 108, 110, 100, 117, 121, 111, 103, 107, 97, 105, 106, 99, 0, 0, 0, 11, 15, 5, 71, 73, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 25, 15, 69, 71, 59, 107, 109, 97, 146, 145, 134, 150, 147, 136, 147, 145, 134, 150, 147, 136, 139, 138, 127, 142, 139, 126, 137, 135, 122, 173, 171, 158, 168, 167, 149, 150, 148, 130, 131, 129, 112, 146, 145, 127, 139, 138, 120, 159, 158, 140, 163, 161, 143, 161, 159, 144, 146, 144, 129, 156, 154, 138, 168, 166, 151, 142, 139, 126, 158, 155, 145, 147, 145, 134, 142, 139, 128, 123, 121, 108, 127, 124, 113, 131, 128, 118, 113, 113, 100, 104, 104, 90, 106, 108, 94, 98, 100, 88, 95, 97, 85, 94, 96, 84, 92, 94, 84, 107, 109, 99, 90, 91, 82, 90, 91, 82, 90, 91, 84, 102, 104, 94, 87, 90, 82, 69, 73, 65, 96, 100, 92, 86, 90, 79, 85, 87, 77, 84, 85, 76, 81, 83, 73, 87, 89, 79, 83, 84, 75, 69, 73, 63, 88, 90, 80, 79, 83, 72, 79, 83, 72, 80, 84, 73, 81, 85, 75, 69, 73, 63, 81, 85, 75, 80, 84, 73, 78, 81, 71, 79, 82, 74, 83, 87, 79, 76, 80, 72, 76, 80, 72, 76, 82, 73, 81, 85, 77, 73, 76, 68, 82, 86, 78, 80, 83, 75, 79, 82, 74, 81, 85, 77, 71, 74, 66, 80, 83, 75, 83, 87, 79, 66, 69, 62, 79, 85, 77, 81, 87, 78, 76, 82, 73, 79, 85, 77, 81, 87, 78, 89, 93, 85, 85, 88, 80, 88, 92, 82, 90, 94, 84, 94, 98, 87, 88, 92, 82, 94, 98, 87, 93, 97, 86, 102, 106, 96, 102, 106, 96, 101, 105, 94, 111, 112, 103, 104, 106, 94, 107, 111, 100, 108, 110, 98, 101, 103, 91, 108, 110, 98, 121, 123, 111, 112, 114, 102, 115, 117, 105, 118, 120, 107, 112, 114, 102, 114, 116, 104, 116, 118, 108, 114, 116, 104, 119, 121, 109, 120, 122, 110, 131, 133, 121, 122, 126, 113, 121, 125, 112, 136, 140, 127, 126, 128, 116, 133, 135, 123, 134, 136, 124, 125, 127, 114, 128, 130, 118, 134, 136, 124, 134, 136, 124, 127, 129, 117, 137, 140, 125, 141, 143, 129, 141, 143, 129, 140, 142, 128, 134, 136, 122, 130, 133, 118, 148, 150, 136, 140, 142, 128, 133, 135, 123, 131, 133, 121, 153, 155, 142, 4, 8, 0, 7, 10, 2, 2, 4, 0, 18, 22, 14, 54, 58, 50, 17, 20, 15, 39, 43, 35, 86, 90, 79, 125, 127, 112, 152, 152, 136, 191, 192, 174, 155, 156, 140, 151, 151, 135, 160, 161, 142, 161, 161, 145, 168, 168, 152, 174, 175, 156, 161, 164, 145, 165, 168, 149, 157, 160, 141, 172, 175, 156, 153, 159, 139, 164, 169, 149, 150, 155, 137, 150, 154, 139, 170, 174, 159, 158, 163, 147, 135, 139, 124, 101, 105, 92, 90, 97, 83, 71, 78, 66, 36, 43, 32, 52, 57, 49, 50, 57, 46, 47, 53, 42, 41, 47, 38, 54, 60, 51, 55, 61, 52, 70, 76, 67, 59, 64, 56, 67, 71, 63, 51, 54, 46, 53, 57, 49, 33, 37, 29, 0, 0, 0, 31, 35, 27, 75, 79, 69, 79, 86, 75, 82, 88, 77, 95, 101, 90, 90, 96, 85, 112, 118, 107, 82, 88, 77, 64, 70, 62, 70, 77, 65, 71, 78, 66, 72, 78, 70, 105, 111, 102, 70, 77, 65, 71, 77, 68, 84, 90, 81, 86, 92, 84, 85, 88, 80, 97, 101, 93, 100, 103, 95, 109, 113, 105, 100, 103, 95, 93, 96, 88, 93, 96, 88, 95, 97, 89, 144, 147, 139, 75, 79, 71, 107, 110, 102, 89, 93, 85, 55, 56, 49, 8, 9, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 66, 57, 80, 79, 70, 124, 123, 116, 137, 136, 127, 133, 132, 124, 156, 156, 147, 146, 145, 136, 154, 154, 142, 153, 152, 141, 149, 149, 138, 163, 160, 149, 138, 135, 125, 155, 152, 141, 139, 133, 124, 143, 140, 129, 136, 133, 123, 141, 138, 127, 136, 131, 121, 131, 128, 118, 137, 135, 122, 142, 139, 126, 149, 146, 133, 130, 128, 115, 143, 140, 128, 144, 141, 129, 142, 139, 126, 127, 124, 111, 137, 135, 122, 137, 135, 122, 129, 126, 116, 125, 123, 112, 134, 131, 120, 144, 141, 131, 157, 154, 144, 165, 162, 152, 164, 161, 151, 150, 147, 136, 101, 98, 90, 29, 26, 18, 104, 106, 90, 92, 94, 80, 100, 102, 90, 99, 101, 89, 101, 103, 91, 98, 100, 88, 95, 99, 89, 89, 93, 83, 87, 89, 79, 88, 92, 82, 87, 91, 80, 89, 93, 85, 80, 84, 73, 90, 94, 84, 89, 93, 83, 85, 87, 77, 74, 76, 66, 93, 95, 85, 88, 90, 80, 83, 84, 75, 81, 83, 73, 80, 84, 73, 83, 87, 77, 83, 87, 77, 76, 80, 70, 75, 79, 69, 82, 86, 76, 88, 92, 82, 81, 85, 75, 74, 78, 68, 79, 83, 72, 71, 74, 66, 74, 78, 70, 75, 81, 72, 72, 75, 67, 74, 80, 71, 72, 78, 70, 78, 81, 73, 74, 78, 70, 80, 83, 75, 79, 82, 74, 76, 80, 72, 79, 82, 74, 80, 83, 75, 74, 78, 70, 78, 84, 75, 75, 81, 72, 82, 88, 79, 79, 85, 77, 79, 85, 77, 84, 90, 81, 83, 87, 79, 87, 91, 80, 90, 94, 84, 88, 92, 82, 95, 99, 89, 92, 95, 85, 104, 108, 98, 94, 98, 85, 95, 99, 89, 106, 108, 96, 100, 104, 93, 99, 102, 92, 99, 100, 91, 105, 106, 97, 109, 111, 101, 114, 116, 104, 109, 111, 101, 114, 116, 106, 111, 113, 100, 118, 120, 107, 114, 116, 104, 111, 113, 100, 122, 124, 112, 119, 121, 109, 120, 122, 110, 116, 118, 106, 114, 116, 104, 116, 120, 107, 122, 126, 113, 123, 125, 113, 123, 127, 114, 127, 131, 118, 129, 131, 119, 136, 138, 126, 131, 133, 121, 133, 135, 123, 137, 139, 127, 128, 130, 116, 135, 137, 123, 142, 144, 130, 138, 141, 126, 138, 141, 126, 141, 143, 129, 142, 144, 130, 137, 140, 125, 138, 141, 126, 135, 137, 123, 135, 137, 125, 141, 143, 131, 15, 19, 8, 15, 18, 10, 51, 54, 46, 2, 4, 0, 21, 24, 16, 24, 28, 20, 42, 45, 37, 87, 91, 80, 76, 78, 65, 175, 175, 159, 149, 150, 134, 176, 176, 160, 137, 137, 121, 156, 157, 141, 165, 165, 149, 155, 156, 140, 184, 186, 172, 159, 162, 147, 168, 170, 156, 151, 156, 140, 109, 114, 98, 90, 95, 80, 79, 86, 72, 64, 71, 57, 67, 73, 62, 53, 59, 48, 56, 63, 51, 48, 56, 46, 56, 64, 54, 42, 48, 39, 63, 70, 58, 51, 59, 50, 59, 64, 56, 47, 53, 44, 62, 70, 60, 54, 60, 51, 50, 56, 48, 53, 59, 50, 120, 126, 117, 80, 83, 75, 132, 136, 126, 93, 100, 89, 62, 66, 58, 72, 76, 65, 0, 0, 0, 0, 0, 0, 81, 87, 76, 111, 117, 106, 75, 81, 70, 93, 100, 89, 75, 81, 70, 73, 77, 66, 108, 112, 101, 88, 94, 85, 92, 98, 89, 120, 126, 117, 91, 97, 86, 79, 85, 77, 98, 104, 95, 90, 96, 87, 88, 94, 85, 76, 82, 73, 103, 109, 100, 106, 109, 101, 92, 95, 87, 71, 74, 68, 28, 31, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 57, 51, 122, 123, 116, 162, 161, 155, 161, 159, 156, 159, 157, 153, 151, 149, 145, 163, 162, 158, 171, 167, 162, 139, 136, 130, 156, 152, 147, 160, 157, 151, 146, 143, 137, 156, 152, 147, 128, 127, 121, 173, 169, 164, 149, 145, 137, 127, 123, 115, 160, 157, 149, 155, 151, 143, 149, 145, 137, 150, 147, 139, 159, 156, 148, 133, 127, 120, 123, 120, 110, 129, 126, 118, 130, 127, 117, 134, 131, 120, 124, 121, 113, 127, 124, 113, 122, 119, 109, 130, 127, 117, 151, 148, 138, 178, 175, 164, 152, 149, 141, 75, 72, 64, 32, 29, 21, 0, 0, 0, 40, 37, 29, 71, 67, 62, 36, 32, 27, 42, 38, 32, 161, 158, 154, 147, 143, 140, 202, 199, 193, 236, 232, 227, 222, 219, 211, 221, 218, 210, 223, 220, 212, 95, 98, 83, 98, 100, 88, 104, 106, 94, 100, 102, 90, 91, 92, 83, 98, 99, 90, 93, 97, 86, 93, 97, 86, 92, 95, 85, 83, 87, 79, 87, 91, 80, 87, 91, 80, 89, 93, 85, 87, 91, 80, 98, 99, 90, 86, 88, 78, 91, 92, 83, 87, 89, 79, 84, 85, 76, 82, 86, 76, 82, 86, 76, 85, 88, 78, 78, 81, 71, 83, 87, 77, 94, 98, 87, 75, 79, 69, 83, 87, 77, 71, 75, 64, 79, 83, 72, 82, 86, 76, 72, 76, 65, 69, 75, 64, 64, 70, 62, 75, 81, 72, 69, 75, 66, 75, 79, 71, 63, 69, 60, 89, 93, 85, 75, 79, 71, 79, 82, 74, 83, 87, 79, 83, 87, 79, 75, 79, 71, 72, 75, 67, 87, 90, 82, 81, 87, 78, 71, 77, 68, 75, 81, 72, 84, 90, 81, 86, 92, 84, 82, 88, 79, 110, 116, 105, 86, 92, 84, 88, 92, 82, 86, 89, 81, 97, 101, 93, 87, 91, 80, 89, 93, 83, 92, 95, 85, 110, 114, 104, 106, 109, 99, 103, 107, 97, 100, 104, 93, 101, 103, 93, 106, 108, 98, 105, 106, 97, 102, 104, 94, 115, 117, 107, 112, 113, 104, 108, 110, 100, 116, 118, 108, 116, 118, 108, 117, 121, 111, 118, 119, 110, 116, 118, 108, 118, 119, 110, 123, 125, 113, 127, 129, 117, 128, 130, 118, 123, 125, 113, 130, 132, 120, 126, 128, 116, 127, 129, 117, 131, 133, 121, 133, 135, 123, 151, 153, 141, 141, 143, 129, 131, 134, 119, 131, 134, 119, 155, 157, 145, 142, 144, 130, 135, 137, 123, 140, 142, 128, 128, 130, 116, 148, 150, 136, 142, 144, 130, 141, 143, 129, 143, 145, 131, 145, 147, 135, 131, 133, 121, 50, 54, 43, 12, 16, 8, 35, 38, 30, 0, 0, 0, 0, 0, 0, 40, 44, 36, 39, 43, 35, 90, 94, 86, 42, 43, 34, 207, 209, 197, 191, 192, 176, 175, 175, 161, 166, 166, 152, 135, 135, 122, 91, 93, 81, 45, 47, 35, 51, 55, 44, 59, 63, 52, 59, 63, 52, 41, 47, 36, 48, 54, 43, 72, 79, 68, 50, 56, 48, 62, 68, 57, 56, 62, 53, 55, 61, 52, 59, 64, 56, 61, 67, 58, 56, 62, 53, 59, 65, 54, 61, 67, 56, 68, 74, 63, 53, 59, 48, 64, 71, 60, 72, 79, 68, 71, 78, 66, 66, 72, 61, 90, 94, 84, 72, 76, 65, 109, 113, 103, 85, 88, 78, 109, 113, 103, 93, 97, 86, 92, 95, 85, 19, 23, 13, 0, 0, 0, 114, 118, 107, 116, 120, 110, 125, 129, 119, 117, 123, 112, 109, 113, 103, 89, 95, 84, 109, 115, 104, 99, 102, 94, 82, 88, 79, 76, 80, 72, 26, 32, 23, 31, 35, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 91, 90, 86, 139, 138, 131, 157, 153, 148, 143, 139, 134, 151, 147, 142, 153, 150, 144, 153, 146, 142, 152, 149, 143, 131, 128, 122, 144, 142, 136, 144, 142, 136, 149, 148, 142, 150, 146, 143, 142, 141, 137, 153, 149, 148, 157, 152, 152, 141, 137, 133, 145, 141, 138, 150, 146, 143, 151, 147, 144, 139, 135, 134, 166, 162, 161, 142, 137, 136, 142, 137, 136, 144, 139, 139, 137, 133, 130, 132, 128, 125, 130, 126, 123, 139, 135, 132, 142, 138, 132, 170, 166, 159, 177, 174, 163, 110, 108, 95, 44, 41, 29, 0, 0, 0, 220, 218, 202, 106, 101, 87, 102, 99, 89, 196, 190, 185, 252, 249, 243, 245, 242, 236, 249, 245, 240, 242, 238, 233, 233, 229, 223, 236, 232, 227, 229, 225, 222, 235, 231, 228, 235, 231, 228, 233, 229, 225, 234, 230, 226, 233, 229, 225, 227, 223, 220, 233, 229, 225, 231, 227, 224, 227, 223, 218, 224, 221, 215, 221, 217, 212, 108, 112, 99, 105, 107, 95, 99, 101, 89, 100, 102, 90, 97, 101, 91, 89, 93, 83, 97, 101, 91, 89, 93, 83, 87, 90, 82, 85, 88, 80, 90, 91, 84, 82, 86, 78, 91, 92, 85, 87, 89, 81, 88, 90, 80, 80, 82, 72, 79, 83, 72, 82, 86, 76, 80, 82, 72, 80, 84, 73, 69, 70, 61, 60, 62, 52, 76, 80, 70, 74, 78, 68, 73, 75, 65, 80, 84, 73, 80, 84, 73, 73, 77, 66, 71, 75, 64, 74, 78, 68, 81, 85, 75, 64, 67, 59, 101, 104, 96, 82, 86, 78, 68, 74, 65, 71, 77, 68, 75, 79, 71, 76, 82, 73, 89, 93, 85, 81, 85, 77, 82, 86, 78, 76, 80, 72, 74, 78, 70, 80, 83, 75, 81, 85, 77, 82, 86, 78, 81, 85, 77, 85, 88, 80, 78, 84, 75, 90, 96, 87, 89, 95, 86, 92, 98, 89, 85, 91, 82, 88, 94, 83, 90, 96, 87, 86, 89, 81, 94, 98, 87, 100, 104, 93, 107, 111, 100, 100, 104, 93, 101, 105, 94, 100, 104, 93, 104, 108, 98, 106, 109, 99, 107, 111, 98, 114, 116, 104, 113, 114, 105, 111, 112, 103, 113, 115, 103, 115, 117, 107, 112, 113, 104, 118, 122, 112, 118, 122, 112, 122, 124, 114, 123, 125, 115, 119, 121, 109, 136, 138, 126, 123, 125, 113, 116, 118, 106, 126, 128, 116, 126, 128, 116, 127, 129, 117, 133, 135, 123, 125, 130, 117, 133, 135, 123, 134, 136, 122, 131, 134, 119, 141, 143, 129, 127, 129, 115, 136, 138, 124, 131, 134, 119, 143, 145, 131, 137, 140, 125, 147, 149, 134, 143, 145, 131, 138, 141, 126, 143, 145, 131, 143, 145, 131, 140, 142, 128, 144, 147, 132, 103, 107, 97, 17, 21, 13, 48, 52, 44, 0, 0, 0, 4, 8, 0, 101, 104, 96, 44, 47, 39, 87, 91, 80, 140, 141, 132, 66, 68, 58, 56, 57, 48, 64, 66, 56, 57, 61, 50, 61, 65, 57, 44, 48, 37, 67, 71, 63, 48, 54, 45, 55, 61, 52, 78, 85, 73, 55, 61, 52, 66, 71, 63, 61, 67, 56, 84, 91, 79, 52, 58, 47, 48, 54, 43, 48, 54, 43, 66, 72, 61, 68, 74, 63, 60, 66, 55, 69, 75, 64, 71, 78, 66, 69, 75, 64, 68, 74, 63, 81, 87, 76, 85, 92, 80, 76, 82, 71, 95, 101, 90, 76, 80, 70, 90, 94, 84, 97, 101, 91, 107, 111, 100, 108, 112, 101, 111, 115, 105, 83, 87, 77, 89, 93, 83, 0, 0, 0, 36, 40, 29, 72, 76, 65, 29, 33, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 6, 0, 103, 101, 95, 147, 146, 139, 152, 150, 146, 151, 149, 145, 123, 121, 117, 152, 150, 146, 134, 133, 129, 155, 151, 147, 145, 141, 138, 139, 135, 132, 156, 150, 147, 144, 138, 136, 155, 149, 146, 147, 140, 138, 148, 142, 137, 150, 144, 139, 139, 132, 130, 143, 139, 136, 145, 141, 138, 147, 143, 140, 144, 140, 137, 141, 136, 135, 135, 130, 129, 149, 144, 143, 141, 136, 135, 142, 137, 136, 136, 131, 131, 146, 142, 141, 152, 148, 145, 190, 186, 182, 180, 176, 173, 113, 109, 105, 32, 30, 26, 52, 48, 45, 58, 54, 49, 25, 22, 14, 186, 183, 171, 200, 197, 187, 250, 246, 241, 227, 221, 216, 233, 229, 223, 225, 222, 216, 215, 211, 206, 222, 218, 213, 228, 224, 219, 224, 220, 217, 220, 216, 212, 221, 217, 214, 223, 219, 216, 224, 220, 217, 213, 209, 205, 235, 231, 228, 234, 230, 226, 233, 229, 225, 235, 231, 228, 228, 224, 221, 231, 227, 224, 230, 226, 223, 229, 225, 222, 231, 227, 224, 230, 226, 223, 229, 225, 222, 225, 222, 218, 228, 224, 219, 227, 223, 218, 102, 106, 94, 106, 108, 96, 94, 98, 87, 97, 102, 89, 96, 100, 90, 92, 95, 85, 90, 94, 86, 87, 90, 82, 87, 90, 82, 89, 93, 85, 85, 88, 80, 88, 92, 84, 86, 87, 80, 84, 85, 78, 84, 85, 78, 76, 77, 68, 80, 82, 72, 87, 89, 79, 84, 85, 76, 83, 84, 75, 80, 82, 72, 80, 82, 72, 80, 82, 72, 74, 78, 68, 66, 68, 58, 74, 78, 68, 88, 90, 80, 85, 87, 77, 74, 78, 68, 73, 75, 65, 74, 78, 68, 80, 83, 75, 72, 78, 70, 77, 83, 74, 78, 81, 73, 68, 74, 65, 75, 81, 72, 73, 76, 68, 75, 79, 71, 78, 81, 73, 75, 79, 71, 87, 90, 82, 87, 90, 82, 80, 83, 75, 89, 93, 85, 88, 92, 84, 82, 86, 78, 85, 88, 80, 78, 84, 75, 86, 92, 84, 84, 90, 81, 76, 82, 73, 84, 90, 81, 91, 97, 88, 90, 94, 84, 92, 95, 85, 93, 97, 86, 97, 101, 91, 97, 101, 91, 97, 101, 91, 99, 102, 92, 100, 104, 93, 106, 109, 99, 94, 98, 87, 110, 114, 102, 104, 109, 96, 107, 109, 97, 113, 115, 103, 113, 114, 105, 107, 109, 99, 112, 113, 104, 121, 123, 113, 120, 121, 112, 120, 121, 112, 115, 117, 107, 126, 128, 116, 128, 130, 118, 128, 130, 118, 133, 135, 123, 128, 130, 118, 130, 132, 120, 131, 133, 121, 131, 133, 121, 127, 129, 115, 134, 136, 124, 136, 138, 124, 130, 133, 118, 149, 151, 137, 119, 121, 107, 125, 127, 112, 135, 137, 123, 149, 151, 137, 150, 153, 138, 168, 170, 156, 154, 156, 142, 150, 153, 138, 144, 147, 132, 130, 133, 118, 124, 129, 114, 138, 140, 128, 114, 118, 107, 7, 10, 0, 65, 66, 59, 0, 0, 0, 2, 4, 0, 58, 61, 53, 50, 53, 45, 71, 73, 63, 73, 75, 65, 51, 55, 44, 55, 59, 51, 61, 65, 55, 83, 87, 77, 62, 66, 56, 61, 65, 57, 62, 68, 59, 55, 61, 52, 78, 85, 73, 103, 109, 98, 67, 73, 62, 59, 65, 54, 92, 99, 85, 74, 80, 69, 93, 100, 86, 120, 123, 113, 75, 79, 69, 92, 99, 87, 82, 88, 77, 155, 161, 150, 90, 97, 83, 103, 110, 96, 93, 100, 86, 114, 118, 105, 89, 94, 81, 86, 93, 82, 114, 118, 107, 81, 85, 75, 103, 107, 97, 87, 91, 80, 88, 92, 82, 23, 27, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 92, 91, 85, 149, 148, 140, 159, 158, 149, 170, 169, 163, 133, 132, 126, 165, 161, 156, 178, 177, 171, 141, 137, 131, 159, 156, 150, 173, 169, 166, 142, 138, 134, 144, 140, 137, 134, 130, 126, 139, 135, 134, 142, 137, 136, 146, 139, 139, 136, 130, 128, 132, 125, 123, 134, 130, 126, 140, 133, 131, 142, 135, 135, 149, 143, 140, 161, 154, 152, 175, 169, 164, 139, 133, 128, 144, 138, 134, 129, 125, 120, 158, 154, 149, 174, 171, 165, 138, 134, 131, 37, 33, 30, 39, 35, 32, 18, 14, 13, 15, 10, 10, 82, 78, 79, 168, 164, 165, 235, 230, 232, 251, 246, 250, 241, 239, 240, 243, 238, 242, 238, 236, 236, 236, 233, 234, 238, 234, 235, 213, 208, 209, 235, 230, 232, 221, 216, 218, 227, 222, 223, 220, 215, 214, 234, 229, 230, 225, 221, 220, 225, 223, 222, 228, 223, 223, 225, 222, 218, 223, 219, 216, 229, 225, 222, 224, 220, 217, 231, 227, 224, 228, 224, 221, 219, 215, 209, 224, 220, 217, 229, 225, 222, 236, 232, 229, 234, 230, 226, 234, 230, 226, 231, 227, 224, 233, 229, 225, 228, 224, 221, 230, 226, 223, 231, 227, 224, 230, 226, 223, 229, 225, 222, 229, 225, 222, 222, 218, 215, 219, 215, 211, 105, 106, 97, 100, 102, 92, 96, 100, 90, 100, 104, 93, 102, 106, 96, 104, 108, 100, 92, 95, 87, 94, 96, 88, 90, 94, 86, 87, 89, 81, 87, 89, 81, 86, 88, 78, 84, 85, 78, 87, 89, 81, 81, 83, 75, 87, 89, 79, 88, 90, 82, 79, 81, 71, 90, 91, 82, 91, 92, 83, 75, 79, 69, 81, 83, 73, 73, 75, 65, 71, 75, 64, 72, 74, 64, 68, 72, 62, 69, 70, 61, 87, 89, 79, 48, 52, 42, 76, 77, 68, 68, 72, 64, 71, 74, 66, 71, 74, 66, 68, 72, 64, 68, 72, 64, 83, 87, 79, 72, 75, 67, 97, 101, 93, 78, 81, 73, 80, 83, 75, 79, 82, 74, 81, 85, 77, 78, 81, 73, 82, 86, 78, 83, 87, 79, 87, 90, 82, 74, 78, 70, 81, 85, 77, 81, 87, 78, 83, 87, 79, 83, 89, 80, 83, 89, 80, 91, 97, 88, 97, 101, 93, 93, 96, 88, 92, 95, 87, 100, 103, 95, 90, 94, 86, 94, 98, 87, 97, 101, 91, 102, 106, 96, 102, 106, 96, 103, 107, 97, 102, 104, 94, 98, 100, 88, 111, 112, 103, 101, 103, 91, 112, 114, 102, 113, 114, 105, 113, 115, 103, 113, 114, 105, 123, 125, 115, 121, 123, 111, 116, 118, 108, 127, 127, 116, 118, 117, 106, 100, 102, 90, 138, 137, 126, 128, 130, 118, 137, 136, 123, 126, 128, 114, 145, 147, 135, 168, 168, 154, 164, 166, 152, 169, 171, 157, 178, 178, 165, 182, 184, 169, 158, 161, 146, 158, 161, 146, 128, 130, 116, 127, 129, 115, 113, 115, 101, 102, 105, 90, 91, 93, 79, 102, 105, 90, 69, 71, 57, 79, 81, 67, 88, 90, 78, 79, 81, 69, 90, 92, 80, 73, 77, 66, 65, 69, 58, 31, 35, 25, 45, 48, 41, 0, 0, 0, 48, 49, 42, 75, 79, 69, 105, 107, 95, 63, 65, 53, 63, 65, 53, 64, 68, 57, 92, 95, 85, 66, 70, 60, 69, 73, 63, 72, 76, 65, 63, 70, 58, 61, 67, 56, 78, 85, 73, 57, 64, 52, 68, 74, 63, 90, 96, 85, 96, 102, 91, 114, 121, 107, 99, 103, 90, 117, 121, 109, 116, 120, 107, 94, 98, 85, 86, 93, 80, 86, 93, 80, 68, 75, 61, 96, 102, 89, 136, 143, 129, 70, 77, 65, 14, 21, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 23, 13, 148, 149, 140, 170, 169, 161, 154, 153, 145, 169, 168, 160, 193, 190, 182, 167, 166, 157, 138, 135, 127, 149, 145, 137, 153, 152, 143, 163, 162, 154, 146, 143, 135, 147, 144, 138, 142, 138, 132, 145, 142, 136, 157, 153, 150, 155, 150, 149, 147, 143, 140, 128, 123, 123, 141, 136, 135, 134, 129, 128, 131, 127, 126, 131, 127, 126, 136, 129, 129, 131, 126, 128, 130, 124, 124, 126, 119, 119, 157, 151, 149, 191, 185, 182, 143, 137, 134, 16, 11, 4, 137, 115, 100, 33, 19, 8, 124, 116, 110, 153, 146, 144, 206, 201, 200, 251, 247, 246, 255, 251, 251, 248, 243, 244, 237, 232, 234, 236, 231, 233, 240, 235, 236, 240, 235, 236, 223, 220, 221, 237, 234, 235, 235, 230, 234, 213, 207, 211, 224, 221, 224, 233, 230, 234, 239, 236, 239, 242, 237, 241, 255, 255, 255, 209, 204, 206, 218, 216, 216, 232, 230, 230, 223, 220, 221, 231, 229, 227, 211, 209, 207, 212, 210, 209, 220, 218, 217, 224, 222, 218, 217, 215, 211, 218, 216, 212, 220, 216, 212, 217, 213, 210, 230, 226, 223, 228, 224, 221, 228, 224, 221, 229, 225, 222, 231, 227, 224, 233, 229, 225, 233, 229, 225, 233, 229, 225, 230, 226, 223, 230, 226, 223, 231, 227, 224, 223, 219, 216, 227, 223, 220, 222, 218, 215, 223, 219, 216, 225, 222, 218, 221, 217, 214, 106, 109, 101, 96, 100, 92, 90, 94, 86, 97, 101, 93, 93, 97, 86, 93, 96, 88, 90, 91, 84, 87, 90, 82, 89, 93, 85, 90, 91, 84, 81, 83, 75, 88, 90, 82, 84, 85, 78, 81, 83, 75, 84, 85, 78, 80, 81, 74, 85, 86, 79, 78, 80, 70, 80, 82, 72, 73, 75, 65, 78, 80, 70, 76, 77, 68, 79, 81, 71, 80, 82, 72, 73, 75, 65, 79, 81, 71, 81, 83, 73, 83, 84, 75, 82, 86, 76, 86, 87, 80, 73, 76, 68, 76, 80, 72, 74, 78, 70, 75, 79, 71, 72, 75, 67, 71, 74, 66, 78, 81, 73, 75, 79, 71, 80, 83, 75, 61, 65, 57, 74, 78, 70, 79, 82, 74, 76, 80, 72, 75, 79, 69, 85, 88, 78, 79, 82, 74, 78, 84, 75, 82, 88, 79, 83, 89, 80, 81, 85, 77, 82, 88, 79, 86, 92, 84, 86, 92, 84, 86, 92, 84, 94, 98, 87, 94, 98, 87, 96, 100, 90, 96, 100, 90, 93, 97, 86, 101, 105, 94, 101, 103, 93, 95, 97, 87, 102, 104, 94, 104, 105, 96, 107, 109, 99, 108, 110, 98, 113, 115, 103, 120, 122, 110, 105, 107, 95, 129, 131, 119, 130, 129, 118, 128, 128, 117, 156, 156, 145, 165, 164, 153, 156, 156, 143, 147, 147, 133, 171, 171, 158, 178, 178, 165, 146, 146, 132, 147, 147, 133, 123, 123, 109, 146, 146, 132, 123, 123, 109, 106, 106, 95, 99, 99, 86, 105, 105, 92, 79, 81, 69, 84, 86, 72, 102, 105, 90, 86, 89, 74, 83, 85, 70, 83, 85, 70, 91, 93, 79, 73, 75, 63, 101, 103, 91, 100, 102, 88, 87, 89, 77, 85, 87, 75, 88, 90, 78, 78, 80, 68, 76, 80, 70, 93, 97, 86, 47, 51, 41, 51, 55, 44, 0, 0, 0, 73, 77, 66, 121, 123, 113, 102, 104, 92, 74, 76, 64, 100, 102, 90, 75, 80, 67, 74, 78, 65, 111, 116, 103, 113, 117, 104, 111, 116, 103, 92, 96, 83, 95, 99, 89, 119, 126, 112, 100, 107, 94, 111, 118, 104, 105, 112, 98, 112, 119, 103, 116, 121, 105, 107, 111, 98, 119, 121, 109, 106, 110, 97, 37, 41, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 125, 125, 113, 145, 144, 133, 153, 152, 141, 158, 156, 143, 191, 188, 175, 182, 180, 167, 167, 165, 152, 178, 175, 164, 160, 158, 147, 174, 171, 161, 145, 142, 134, 159, 156, 148, 146, 143, 135, 153, 150, 142, 170, 166, 160, 143, 139, 134, 156, 152, 147, 145, 142, 136, 145, 141, 140, 146, 141, 143, 147, 142, 146, 136, 131, 133, 144, 139, 139, 144, 139, 139, 143, 138, 138, 143, 138, 138, 182, 178, 177, 172, 168, 167, 63, 58, 57, 7, 2, 1, 54, 50, 49, 17, 13, 12, 104, 100, 99, 219, 212, 214, 248, 241, 241, 252, 245, 245, 245, 238, 238, 240, 233, 235, 238, 231, 231, 238, 230, 233, 235, 228, 228, 242, 235, 235, 241, 236, 237, 237, 232, 234, 236, 232, 231, 240, 235, 236, 238, 234, 235, 241, 236, 237, 240, 234, 238, 240, 234, 238, 240, 234, 238, 238, 235, 238, 237, 234, 237, 237, 234, 237, 242, 240, 243, 242, 240, 243, 239, 237, 237, 234, 232, 233, 237, 234, 235, 234, 232, 233, 237, 234, 235, 225, 223, 223, 229, 226, 227, 224, 222, 220, 224, 222, 220, 205, 203, 202, 227, 226, 222, 217, 215, 211, 192, 191, 187, 215, 213, 209, 224, 222, 218, 222, 220, 216, 229, 227, 223, 226, 225, 221, 230, 228, 224, 230, 228, 224, 222, 218, 215, 224, 222, 218, 239, 237, 233, 242, 241, 237, 255, 254, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 203, 201, 197, 114, 110, 107, 54, 50, 47, 113, 116, 110, 99, 100, 93, 94, 97, 89, 90, 94, 86, 98, 99, 90, 94, 97, 89, 95, 99, 89, 88, 90, 80, 101, 103, 93, 93, 95, 85, 83, 84, 75, 79, 82, 74, 81, 83, 75, 78, 79, 72, 86, 87, 80, 77, 78, 71, 80, 81, 74, 78, 79, 72, 78, 79, 72, 78, 79, 72, 71, 73, 63, 81, 83, 73, 81, 83, 73, 77, 78, 69, 72, 74, 64, 72, 74, 64, 76, 77, 68, 76, 77, 68, 74, 78, 68, 73, 74, 67, 69, 73, 65, 72, 75, 67, 81, 85, 77, 94, 97, 89, 76, 80, 72, 83, 87, 81, 73, 76, 68, 84, 85, 78, 71, 74, 66, 76, 80, 72, 74, 78, 70, 81, 85, 77, 81, 85, 77, 80, 83, 75, 87, 90, 82, 80, 83, 75, 83, 87, 79, 80, 83, 75, 88, 92, 84, 85, 88, 80, 87, 90, 82, 88, 92, 84, 59, 63, 52, 85, 88, 80, 76, 80, 72, 85, 88, 78, 85, 88, 78, 76, 80, 70, 90, 94, 84, 94, 98, 87, 101, 103, 93, 119, 120, 111, 99, 100, 91, 88, 90, 80, 70, 70, 59, 95, 97, 85, 69, 71, 59, 63, 65, 53, 80, 82, 70, 80, 79, 68, 89, 88, 77, 80, 79, 68, 76, 76, 64, 87, 87, 73, 95, 95, 81, 83, 83, 69, 77, 77, 64, 85, 85, 72, 116, 116, 102, 103, 103, 89, 90, 90, 76, 101, 104, 89, 98, 100, 86, 105, 105, 92, 92, 94, 80, 109, 112, 97, 109, 112, 97, 107, 109, 95, 125, 127, 112, 104, 106, 92, 99, 101, 87, 87, 90, 75, 105, 107, 93, 107, 109, 95, 105, 107, 93, 105, 107, 93, 85, 87, 73, 95, 97, 85, 113, 115, 103, 84, 86, 74, 131, 135, 123, 132, 137, 124, 100, 104, 93, 12, 16, 6, 0, 0, 0, 59, 63, 52, 78, 80, 70, 122, 124, 112, 99, 101, 89, 116, 118, 106, 137, 140, 125, 114, 116, 104, 122, 127, 111, 106, 110, 95, 106, 110, 95, 125, 132, 116, 111, 116, 103, 95, 99, 86, 82, 89, 75, 19, 23, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 30, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 36, 24, 118, 120, 105, 159, 162, 145, 148, 150, 134, 183, 183, 167, 161, 161, 145, 189, 190, 171, 154, 155, 136, 153, 154, 135, 151, 150, 129, 165, 163, 146, 168, 166, 151, 163, 160, 147, 142, 139, 126, 171, 168, 156, 156, 153, 140, 142, 139, 128, 177, 174, 161, 142, 138, 130, 153, 150, 142, 156, 152, 147, 137, 133, 128, 138, 135, 129, 146, 142, 139, 132, 128, 125, 155, 151, 147, 165, 161, 156, 120, 116, 110, 25, 21, 18, 30, 26, 23, 79, 74, 74, 66, 62, 61, 208, 204, 203, 254, 249, 250, 252, 248, 249, 241, 236, 237, 240, 235, 236, 235, 230, 232, 236, 231, 233, 234, 229, 230, 236, 231, 233, 234, 229, 230, 233, 228, 229, 237, 232, 234, 237, 232, 234, 240, 233, 235, 239, 232, 234, 241, 234, 236, 240, 235, 236, 237, 232, 234, 237, 232, 234, 237, 232, 234, 238, 234, 235, 240, 235, 236, 240, 234, 238, 238, 234, 235, 237, 232, 236, 239, 236, 239, 236, 233, 236, 236, 233, 236, 236, 233, 236, 213, 211, 214, 220, 218, 221, 224, 221, 224, 231, 229, 229, 227, 225, 226, 230, 227, 228, 224, 221, 222, 226, 224, 225, 230, 227, 228, 230, 227, 228, 231, 229, 227, 231, 229, 227, 236, 234, 232, 247, 246, 242, 255, 255, 255, 255, 255, 255, 255, 255, 255, 212, 211, 205, 82, 81, 74, 55, 54, 48, 60, 58, 52, 53, 52, 45, 57, 56, 50, 62, 60, 56, 56, 55, 51, 64, 63, 59, 71, 70, 66, 71, 70, 66, 73, 71, 67, 66, 64, 60, 75, 73, 69, 75, 73, 71, 74, 72, 70, 78, 73, 72, 68, 66, 64, 123, 127, 117, 116, 120, 110, 103, 107, 97, 103, 107, 97, 102, 106, 96, 99, 102, 92, 102, 104, 94, 97, 98, 89, 94, 96, 86, 85, 87, 77, 88, 90, 80, 87, 89, 79, 86, 87, 80, 84, 85, 78, 77, 78, 71, 76, 77, 68, 74, 76, 68, 70, 71, 62, 83, 84, 75, 76, 77, 68, 72, 74, 64, 78, 79, 72, 65, 67, 57, 70, 71, 62, 77, 78, 69, 78, 80, 70, 67, 69, 60, 72, 74, 64, 69, 70, 61, 65, 66, 59, 73, 74, 67, 66, 67, 60, 65, 66, 59, 65, 66, 59, 53, 57, 49, 68, 72, 64, 62, 63, 56, 63, 64, 57, 49, 50, 43, 55, 56, 49, 56, 57, 50, 55, 59, 51, 49, 50, 43, 184, 185, 178, 43, 44, 37, 42, 43, 36, 57, 58, 51, 53, 57, 49, 42, 43, 36, 29, 30, 23, 41, 42, 35, 48, 49, 42, 40, 41, 34, 44, 45, 38, 43, 44, 37, 45, 49, 39, 37, 41, 30, 37, 41, 30, 39, 43, 33, 44, 46, 36, 47, 48, 39, 44, 46, 36, 31, 33, 23, 33, 34, 25, 47, 46, 35, 29, 31, 21, 47, 48, 36, 52, 54, 42, 60, 62, 50, 72, 74, 62, 69, 69, 57, 102, 101, 90, 92, 92, 81, 89, 89, 75, 88, 88, 74, 94, 93, 80, 78, 78, 65, 99, 99, 86, 111, 111, 97, 109, 109, 95, 100, 100, 87, 95, 95, 81, 84, 84, 70, 119, 119, 105, 113, 113, 100, 126, 126, 112, 136, 138, 124, 106, 108, 94, 108, 111, 96, 126, 128, 114, 147, 149, 134, 130, 133, 118, 113, 115, 101, 107, 109, 95, 112, 114, 100, 136, 138, 124, 129, 131, 117, 131, 134, 119, 125, 127, 112, 126, 128, 116, 133, 138, 125, 118, 123, 110, 129, 131, 121, 104, 105, 96, 0, 0, 0, 121, 123, 113, 163, 165, 153, 128, 130, 118, 113, 115, 101, 135, 137, 123, 142, 146, 131, 108, 112, 99, 73, 77, 64, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 8, 0, 28, 30, 18, 134, 134, 123, 184, 184, 171, 189, 189, 175, 180, 180, 164, 177, 178, 162, 156, 157, 141, 181, 181, 165, 172, 170, 155, 193, 191, 176, 175, 173, 158, 160, 158, 143, 170, 168, 150, 159, 158, 140, 156, 154, 136, 156, 154, 138, 164, 162, 147, 136, 134, 119, 156, 153, 140, 152, 150, 137, 182, 180, 167, 155, 152, 139, 141, 138, 127, 134, 131, 120, 139, 136, 128, 178, 175, 167, 180, 177, 169, 36, 33, 25, 6, 3, 0, 35, 32, 19, 222, 220, 207, 207, 204, 196, 233, 229, 225, 241, 236, 235, 227, 222, 223, 236, 231, 233, 238, 234, 235, 240, 234, 238, 233, 228, 229, 238, 234, 235, 236, 231, 233, 238, 234, 235, 238, 234, 235, 236, 231, 233, 235, 230, 232, 235, 230, 232, 235, 230, 232, 236, 231, 233, 236, 231, 233, 236, 231, 233, 237, 233, 232, 234, 229, 230, 230, 225, 227, 236, 232, 231, 235, 230, 230, 236, 232, 231, 236, 231, 233, 236, 231, 233, 236, 231, 233, 237, 232, 234, 237, 232, 234, 234, 229, 230, 236, 231, 233, 237, 234, 235, 236, 233, 234, 236, 233, 234, 234, 232, 233, 229, 226, 227, 224, 221, 222, 238, 236, 236, 255, 255, 255, 255, 255, 255, 255, 255, 255, 237, 235, 231, 158, 156, 152, 119, 118, 114, 59, 57, 53, 60, 58, 54, 52, 50, 48, 52, 53, 48, 64, 63, 59, 59, 57, 53, 66, 64, 60, 67, 65, 61, 61, 59, 55, 83, 81, 77, 83, 81, 77, 70, 69, 65, 85, 86, 81, 78, 77, 73, 82, 80, 76, 80, 78, 74, 80, 78, 76, 85, 84, 80, 89, 87, 85, 77, 76, 72, 88, 86, 84, 83, 81, 77, 94, 92, 88, 82, 80, 76, 80, 78, 74, 94, 95, 90, 101, 105, 92, 120, 124, 111, 105, 106, 97, 116, 118, 108, 109, 111, 101, 128, 130, 120, 104, 105, 96, 109, 111, 101, 112, 113, 104, 105, 106, 97, 109, 111, 101, 83, 84, 75, 86, 88, 78, 66, 68, 58, 84, 85, 76, 72, 74, 64, 81, 83, 73, 80, 82, 72, 67, 69, 60, 64, 66, 56, 64, 66, 56, 59, 61, 51, 59, 61, 51, 62, 63, 54, 62, 63, 54, 57, 59, 49, 57, 59, 49, 55, 56, 47, 55, 56, 47, 84, 85, 78, 77, 78, 71, 53, 55, 48, 61, 60, 53, 38, 40, 32, 50, 51, 44, 58, 59, 52, 59, 61, 53, 47, 48, 41, 55, 56, 49, 29, 30, 23, 36, 35, 29, 38, 40, 32, 32, 31, 24, 31, 33, 25, 35, 34, 28, 31, 33, 25, 24, 26, 18, 49, 50, 43, 19, 20, 13, 33, 32, 25, 42, 43, 36, 35, 36, 29, 37, 38, 31, 35, 36, 29, 34, 35, 28, 47, 51, 43, 25, 29, 19, 38, 40, 30, 31, 33, 23, 44, 46, 36, 87, 86, 77, 43, 43, 34, 53, 52, 43, 53, 52, 43, 52, 51, 40, 42, 42, 31, 61, 60, 49, 75, 74, 63, 74, 73, 62, 76, 76, 64, 82, 82, 68, 103, 103, 89, 120, 117, 104, 145, 145, 131, 111, 109, 96, 122, 119, 107, 113, 114, 98, 130, 130, 114, 116, 116, 100, 96, 96, 80, 109, 109, 93, 113, 114, 98, 123, 123, 109, 131, 131, 115, 142, 143, 127, 126, 126, 112, 139, 139, 125, 121, 121, 108, 142, 143, 127, 159, 159, 145, 131, 134, 119, 125, 127, 112, 133, 135, 121, 125, 125, 111, 144, 143, 130, 144, 147, 132, 147, 149, 134, 135, 137, 125, 140, 142, 129, 137, 139, 127, 142, 144, 130, 140, 145, 132, 140, 142, 129, 168, 169, 160, 21, 24, 14, 0, 0, 0, 55, 56, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 67, 55, 144, 146, 134, 184, 186, 172, 194, 194, 180, 159, 159, 143, 191, 192, 176, 161, 161, 145, 160, 161, 142, 163, 164, 148, 165, 163, 146, 170, 168, 150, 172, 170, 153, 181, 179, 164, 182, 181, 163, 134, 131, 116, 147, 145, 130, 141, 138, 123, 152, 150, 135, 149, 147, 131, 188, 186, 171, 153, 151, 136, 157, 154, 142, 136, 133, 121, 155, 152, 139, 172, 169, 157, 190, 187, 174, 89, 87, 74, 14, 11, 0, 83, 80, 72, 22, 18, 13, 175, 171, 170, 255, 255, 254, 252, 248, 249, 236, 231, 233, 238, 234, 235, 238, 233, 237, 238, 233, 237, 240, 234, 238, 240, 234, 238, 234, 228, 232, 238, 233, 237, 236, 231, 235, 230, 225, 229, 236, 231, 233, 236, 231, 233, 237, 232, 234, 235, 230, 232, 238, 234, 235, 236, 231, 233, 236, 231, 233, 235, 230, 232, 235, 230, 232, 237, 232, 234, 234, 229, 230, 234, 229, 230, 236, 231, 233, 234, 229, 230, 235, 230, 232, 234, 229, 230, 235, 230, 230, 236, 232, 231, 236, 232, 231, 237, 233, 232, 241, 236, 235, 236, 232, 231, 228, 223, 223, 229, 225, 224, 255, 251, 251, 255, 255, 255, 255, 255, 255, 216, 214, 212, 88, 86, 82, 96, 94, 90, 68, 66, 62, 66, 64, 62, 62, 60, 60, 69, 67, 67, 70, 68, 69, 77, 75, 74, 70, 68, 67, 75, 72, 73, 76, 74, 74, 66, 67, 64, 73, 74, 71, 73, 71, 69, 78, 76, 75, 70, 68, 67, 82, 80, 76, 99, 98, 94, 95, 93, 87, 89, 88, 81, 76, 75, 68, 99, 98, 92, 91, 90, 86, 96, 95, 88, 78, 77, 73, 104, 102, 98, 92, 93, 88, 103, 101, 97, 92, 91, 87, 95, 93, 89, 91, 90, 86, 105, 106, 101, 91, 92, 85, 102, 100, 94, 78, 79, 72, 95, 97, 89, 79, 83, 72, 117, 117, 103, 120, 122, 110, 120, 120, 109, 112, 114, 102, 111, 113, 100, 113, 115, 103, 113, 114, 105, 118, 120, 107, 102, 104, 94, 91, 92, 83, 95, 97, 87, 86, 88, 76, 90, 91, 82, 91, 92, 83, 85, 87, 77, 67, 69, 60, 73, 75, 65, 78, 80, 70, 69, 70, 61, 71, 73, 63, 83, 84, 75, 72, 74, 64, 55, 56, 47, 81, 83, 73, 64, 66, 56, 62, 63, 54, 57, 59, 49, 53, 55, 46, 44, 45, 38, 45, 47, 39, 97, 98, 91, 72, 73, 66, 60, 62, 54, 43, 42, 36, 52, 54, 46, 47, 48, 41, 37, 38, 31, 60, 62, 54, 40, 41, 34, 33, 34, 27, 49, 50, 43, 52, 54, 46, 35, 34, 28, 51, 52, 45, 66, 64, 58, 46, 45, 38, 46, 45, 38, 46, 45, 38, 51, 52, 45, 98, 97, 91, 39, 38, 31, 49, 50, 43, 45, 47, 39, 48, 47, 41, 47, 48, 39, 36, 38, 28, 56, 57, 48, 52, 54, 44, 66, 68, 56, 63, 65, 53, 49, 49, 38, 92, 92, 83, 90, 90, 78, 96, 95, 84, 90, 90, 78, 90, 90, 78, 108, 107, 94, 113, 113, 102, 104, 104, 90, 104, 104, 90, 128, 125, 112, 106, 103, 90, 127, 124, 111, 117, 115, 102, 145, 143, 128, 108, 108, 92, 153, 153, 139, 193, 191, 176, 134, 134, 121, 135, 135, 122, 130, 129, 116, 147, 147, 133, 142, 142, 129, 151, 151, 137, 160, 160, 146, 171, 171, 158, 149, 149, 136, 160, 160, 146, 141, 141, 128, 153, 153, 139, 130, 129, 116, 155, 157, 143, 162, 164, 150, 165, 168, 153, 153, 155, 140, 162, 164, 150, 114, 116, 102, 158, 160, 148, 164, 166, 154, 113, 115, 103, 90, 91, 82, 13, 14, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 6, 0, 78, 80, 68, 145, 148, 133, 190, 192, 176, 153, 153, 137, 162, 162, 147, 190, 191, 173, 191, 192, 174, 137, 137, 119, 179, 177, 160, 149, 147, 129, 175, 174, 154, 149, 147, 127, 147, 146, 128, 174, 173, 153, 167, 166, 148, 187, 185, 168, 175, 174, 156, 167, 166, 148, 153, 151, 136, 164, 162, 147, 135, 132, 119, 165, 162, 150, 157, 154, 144, 174, 171, 161, 158, 155, 145, 178, 175, 164, 54, 51, 43, 92, 88, 80, 0, 0, 0, 77, 73, 67, 217, 213, 210, 255, 251, 247, 245, 241, 238, 237, 233, 232, 236, 232, 229, 233, 228, 227, 240, 235, 236, 237, 232, 234, 238, 234, 235, 238, 234, 235, 239, 236, 239, 238, 233, 237, 234, 228, 232, 238, 233, 237, 242, 237, 241, 241, 235, 239, 240, 234, 238, 237, 232, 236, 236, 231, 235, 236, 231, 233, 235, 230, 234, 231, 227, 228, 235, 230, 232, 237, 232, 234, 236, 231, 233, 234, 229, 230, 234, 229, 230, 234, 229, 230, 234, 229, 230, 235, 230, 230, 235, 230, 230, 234, 230, 226, 217, 213, 210, 230, 228, 224, 229, 227, 223, 246, 245, 238, 255, 255, 255, 255, 255, 255, 212, 208, 202, 99, 95, 89, 66, 62, 59, 63, 59, 55, 73, 69, 68, 67, 63, 62, 70, 68, 67, 73, 71, 69, 77, 75, 74, 82, 80, 78, 82, 80, 78, 84, 82, 81, 89, 87, 85, 76, 76, 74, 87, 85, 83, 85, 83, 84, 81, 78, 79, 102, 100, 98, 92, 90, 89, 76, 74, 72, 89, 87, 83, 104, 104, 100, 100, 101, 96, 87, 88, 83, 90, 88, 84, 86, 87, 80, 87, 85, 79, 104, 103, 96, 97, 96, 87, 89, 88, 79, 91, 90, 82, 103, 102, 93, 91, 90, 84, 89, 88, 81, 97, 96, 89, 91, 90, 84, 81, 83, 75, 83, 82, 73, 98, 97, 89, 90, 91, 82, 96, 95, 84, 77, 76, 68, 87, 89, 79, 81, 83, 73, 84, 85, 76, 80, 82, 70, 95, 99, 86, 141, 141, 128, 148, 148, 136, 127, 129, 117, 122, 124, 114, 115, 117, 105, 113, 115, 103, 111, 113, 100, 107, 109, 97, 98, 100, 88, 109, 111, 101, 112, 113, 104, 100, 102, 92, 91, 93, 81, 95, 97, 87, 99, 100, 91, 84, 85, 76, 87, 89, 79, 98, 99, 90, 87, 89, 79, 64, 66, 56, 78, 80, 70, 85, 87, 77, 79, 81, 71, 63, 64, 55, 66, 68, 58, 65, 67, 57, 67, 69, 62, 67, 69, 62, 57, 59, 49, 73, 74, 67, 58, 59, 52, 69, 68, 60, 45, 47, 39, 74, 76, 68, 43, 44, 37, 45, 47, 39, 53, 55, 48, 48, 49, 42, 55, 56, 49, 35, 36, 29, 50, 51, 44, 35, 36, 29, 47, 46, 39, 47, 48, 41, 47, 48, 41, 46, 45, 38, 43, 42, 36, 63, 62, 56, 33, 34, 27, 54, 53, 46, 56, 55, 49, 56, 57, 50, 48, 49, 42, 49, 50, 41, 50, 52, 42, 50, 52, 42, 62, 63, 54, 63, 64, 55, 70, 70, 59, 71, 71, 60, 60, 59, 48, 60, 59, 48, 83, 83, 71, 81, 80, 69, 81, 80, 69, 95, 94, 83, 82, 81, 70, 99, 99, 86, 100, 100, 87, 104, 104, 90, 111, 111, 97, 128, 128, 115, 121, 118, 105, 109, 107, 94, 104, 104, 88, 138, 136, 121, 151, 151, 135, 151, 149, 134, 155, 152, 137, 158, 158, 142, 168, 168, 152, 147, 147, 131, 138, 138, 122, 141, 141, 126, 158, 158, 142, 138, 138, 122, 178, 179, 163, 163, 163, 150, 158, 158, 144, 167, 167, 153, 165, 164, 151, 162, 164, 150, 142, 144, 130, 81, 81, 67, 31, 33, 21, 0, 0, 0, 0, 0, 0, 12, 13, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 90, 89, 82, 144, 143, 134, 136, 133, 123, 153, 151, 136, 182, 181, 163, 167, 166, 148, 174, 173, 155, 165, 163, 146, 161, 162, 143, 187, 187, 169, 174, 175, 156, 113, 111, 93, 174, 173, 153, 163, 161, 141, 153, 152, 132, 188, 187, 167, 166, 165, 147, 150, 149, 128, 155, 156, 138, 155, 153, 135, 167, 166, 148, 172, 170, 153, 153, 151, 136, 151, 149, 134, 143, 140, 128, 159, 157, 142, 195, 192, 182, 146, 143, 135, 0, 0, 0, 0, 0, 0, 71, 67, 64, 227, 223, 220, 255, 254, 251, 248, 244, 240, 234, 230, 226, 237, 233, 230, 241, 237, 233, 240, 235, 234, 238, 234, 233, 237, 233, 232, 236, 232, 231, 238, 234, 235, 238, 234, 235, 237, 232, 234, 237, 232, 234, 238, 234, 235, 237, 232, 234, 237, 232, 234, 238, 234, 235, 237, 232, 236, 237, 232, 236, 238, 233, 237, 237, 232, 236, 235, 230, 234, 236, 231, 235, 236, 231, 235, 237, 232, 236, 234, 229, 230, 234, 228, 232, 235, 230, 232, 231, 227, 228, 237, 232, 234, 230, 226, 225, 229, 225, 224, 227, 226, 222, 225, 223, 220, 255, 255, 255, 255, 255, 255, 188, 187, 180, 89, 88, 81, 54, 52, 48, 71, 70, 66, 66, 64, 62, 71, 69, 68, 75, 71, 70, 72, 67, 67, 79, 74, 74, 82, 78, 77, 85, 80, 79, 74, 70, 69, 85, 83, 82, 85, 83, 82, 76, 74, 72, 77, 75, 74, 99, 98, 94, 80, 78, 74, 89, 87, 83, 77, 76, 72, 76, 74, 70, 89, 87, 83, 90, 88, 84, 87, 85, 81, 76, 74, 72, 97, 95, 91, 83, 81, 79, 84, 84, 82, 86, 87, 82, 76, 76, 72, 90, 89, 82, 91, 92, 85, 91, 92, 85, 144, 142, 136, 83, 82, 73, 92, 92, 83, 88, 87, 78, 84, 83, 75, 131, 130, 121, 87, 86, 77, 118, 117, 108, 84, 83, 75, 92, 92, 83, 88, 87, 78, 83, 83, 71, 81, 80, 69, 87, 86, 75, 88, 87, 78, 73, 72, 61, 104, 104, 92, 81, 83, 71, 86, 88, 76, 85, 87, 75, 80, 84, 71, 146, 145, 134, 121, 121, 110, 114, 116, 104, 112, 113, 104, 113, 115, 103, 120, 120, 109, 114, 116, 106, 118, 119, 110, 120, 121, 112, 104, 105, 96, 100, 102, 92, 95, 97, 87, 84, 86, 74, 92, 94, 84, 93, 95, 83, 90, 91, 82, 83, 84, 75, 80, 82, 72, 79, 81, 71, 102, 104, 94, 65, 69, 58, 67, 69, 60, 72, 74, 64, 69, 70, 61, 69, 70, 61, 59, 61, 53, 67, 69, 62, 79, 81, 71, 71, 73, 63, 52, 54, 46, 84, 83, 77, 49, 50, 41, 81, 79, 73, 166, 164, 158, 58, 59, 52, 62, 63, 56, 83, 84, 77, 52, 54, 46, 69, 70, 63, 60, 62, 54, 60, 62, 54, 89, 88, 81, 53, 52, 45, 52, 54, 46, 49, 50, 43, 60, 58, 52, 88, 90, 82, 49, 50, 43, 47, 48, 41, 42, 43, 36, 74, 76, 66, 57, 57, 48, 58, 60, 50, 76, 77, 68, 70, 69, 61, 60, 62, 52, 80, 79, 70, 74, 73, 62, 102, 101, 90, 77, 77, 65, 66, 65, 54, 74, 73, 62, 94, 93, 82, 102, 101, 90, 108, 107, 96, 130, 129, 116, 120, 120, 107, 114, 114, 101, 126, 126, 112, 120, 117, 104, 131, 129, 116, 117, 115, 102, 150, 148, 132, 125, 125, 109, 155, 152, 137, 167, 165, 150, 163, 161, 145, 175, 175, 159, 148, 148, 134, 147, 147, 133, 176, 176, 162, 165, 164, 151, 168, 168, 154, 147, 147, 133, 131, 131, 117, 117, 117, 103, 20, 20, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 11, 4, 56, 55, 47, 113, 113, 102, 109, 111, 99, 136, 138, 124, 158, 161, 146, 158, 158, 142, 151, 149, 134, 167, 165, 150, 157, 155, 138, 152, 151, 133, 139, 138, 120, 142, 140, 122, 150, 149, 128, 172, 171, 151, 173, 172, 152, 138, 137, 117, 191, 189, 169, 152, 151, 131, 172, 170, 153, 149, 147, 129, 155, 153, 135, 139, 138, 120, 150, 148, 132, 139, 137, 122, 143, 141, 126, 162, 162, 149, 191, 188, 175, 74, 72, 59, 2, 0, 0, 26, 24, 13, 92, 88, 80, 181, 178, 172, 251, 247, 244, 244, 240, 235, 233, 229, 225, 233, 229, 225, 237, 233, 232, 238, 234, 233, 238, 234, 233, 236, 232, 231, 237, 233, 232, 241, 236, 235, 237, 233, 232, 231, 227, 226, 237, 233, 232, 237, 233, 232, 240, 235, 234, 233, 228, 227, 235, 230, 232, 238, 234, 235, 238, 234, 235, 238, 234, 235, 238, 234, 235, 237, 234, 235, 238, 233, 237, 238, 233, 237, 235, 230, 234, 236, 231, 235, 236, 231, 235, 234, 232, 235, 236, 231, 233, 236, 231, 233, 236, 231, 233, 233, 228, 229, 222, 217, 219, 245, 241, 242, 255, 255, 255, 255, 255, 255, 123, 121, 117, 70, 69, 65, 74, 72, 68, 62, 60, 56, 68, 66, 62, 77, 76, 72, 76, 75, 68, 76, 74, 70, 80, 78, 72, 71, 70, 64, 81, 79, 75, 81, 79, 75, 81, 79, 75, 91, 87, 83, 88, 84, 81, 94, 90, 87, 94, 90, 87, 94, 90, 87, 89, 85, 82, 82, 80, 76, 98, 96, 93, 91, 90, 86, 105, 104, 100, 92, 91, 87, 81, 79, 75, 100, 99, 93, 96, 95, 88, 81, 79, 73, 85, 85, 76, 94, 92, 86, 88, 86, 80, 90, 89, 82, 89, 88, 81, 89, 88, 81, 91, 90, 84, 83, 84, 77, 87, 85, 79, 71, 70, 64, 94, 93, 84, 90, 89, 80, 95, 94, 85, 75, 74, 65, 88, 87, 78, 77, 76, 68, 82, 81, 72, 98, 97, 89, 92, 92, 83, 84, 83, 75, 91, 90, 82, 84, 83, 75, 111, 110, 99, 82, 81, 72, 89, 88, 77, 95, 94, 83, 95, 94, 83, 110, 109, 98, 84, 84, 72, 109, 111, 99, 77, 79, 67, 104, 106, 94, 85, 85, 74, 122, 119, 107, 130, 129, 118, 120, 120, 109, 119, 119, 107, 116, 115, 104, 106, 108, 96, 105, 106, 97, 102, 104, 94, 95, 97, 87, 121, 123, 113, 96, 100, 90, 106, 109, 99, 93, 95, 85, 102, 104, 94, 95, 97, 87, 87, 89, 79, 92, 94, 84, 85, 88, 78, 83, 87, 77, 73, 75, 65, 73, 77, 66, 69, 73, 63, 76, 77, 68, 71, 72, 65, 82, 86, 78, 71, 72, 65, 70, 71, 62, 62, 63, 56, 80, 81, 74, 99, 100, 93, 63, 64, 57, 79, 80, 73, 71, 72, 65, 78, 79, 72, 81, 83, 75, 70, 71, 64, 62, 66, 58, 68, 72, 64, 99, 100, 93, 71, 72, 65, 72, 73, 66, 55, 56, 49, 57, 58, 51, 67, 69, 62, 56, 57, 50, 55, 56, 49, 64, 65, 58, 70, 71, 64, 69, 70, 63, 64, 63, 57, 47, 48, 41, 51, 52, 45, 64, 66, 56, 51, 53, 43, 70, 71, 62, 66, 68, 58, 90, 89, 80, 88, 87, 78, 70, 70, 59, 59, 58, 47, 100, 100, 89, 100, 100, 89, 133, 133, 121, 108, 107, 96, 82, 81, 70, 131, 131, 117, 132, 131, 120, 149, 149, 138, 135, 132, 121, 166, 166, 152, 147, 147, 133, 152, 152, 138, 165, 164, 151, 170, 170, 157, 169, 169, 156, 142, 142, 129, 113, 113, 102, 68, 67, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 30, 19, 104, 104, 92, 123, 122, 111, 125, 125, 113, 132, 132, 118, 146, 146, 132, 139, 139, 125, 139, 139, 125, 140, 140, 124, 152, 152, 136, 140, 140, 124, 140, 140, 124, 141, 141, 126, 138, 138, 120, 166, 165, 147, 168, 167, 149, 143, 142, 121, 154, 150, 133, 131, 129, 112, 168, 167, 149, 159, 158, 140, 170, 168, 150, 161, 159, 144, 159, 157, 142, 167, 165, 150, 156, 154, 138, 153, 151, 136, 178, 176, 160, 93, 91, 76, 3, 0, 0, 181, 179, 164, 69, 67, 64, 135, 134, 128, 255, 255, 251, 240, 239, 235, 238, 234, 231, 238, 234, 231, 236, 232, 231, 240, 235, 234, 231, 227, 226, 235, 230, 230, 235, 230, 230, 238, 234, 233, 236, 232, 231, 240, 235, 234, 236, 232, 229, 237, 233, 232, 236, 232, 231, 229, 225, 224, 237, 233, 232, 236, 232, 231, 237, 233, 232, 236, 232, 231, 236, 232, 231, 234, 229, 228, 238, 234, 233, 234, 229, 230, 235, 230, 232, 236, 231, 233, 240, 235, 236, 238, 236, 236, 231, 228, 231, 233, 230, 234, 237, 234, 237, 233, 227, 231, 238, 235, 238, 241, 236, 237, 255, 255, 255, 255, 255, 255, 108, 104, 103, 102, 100, 98, 70, 65, 64, 67, 65, 63, 71, 69, 68, 70, 68, 67, 77, 75, 74, 80, 78, 76, 80, 78, 76, 88, 86, 84, 84, 83, 79, 75, 73, 69, 82, 80, 76, 88, 86, 80, 84, 83, 79, 85, 84, 78, 88, 86, 80, 98, 97, 91, 87, 83, 78, 93, 89, 84, 93, 89, 84, 100, 96, 91, 86, 82, 77, 93, 89, 86, 93, 89, 86, 98, 96, 93, 94, 92, 88, 109, 107, 101, 95, 93, 87, 85, 84, 78, 95, 93, 87, 87, 85, 79, 94, 92, 86, 87, 86, 77, 97, 96, 87, 83, 82, 73, 95, 94, 85, 87, 86, 77, 91, 90, 82, 92, 92, 83, 103, 102, 93, 87, 89, 79, 84, 83, 75, 97, 96, 87, 85, 85, 76, 84, 83, 75, 81, 80, 71, 95, 94, 85, 98, 97, 89, 78, 78, 69, 88, 87, 76, 83, 82, 73, 114, 114, 105, 133, 132, 124, 91, 90, 82, 87, 86, 77, 84, 83, 75, 91, 91, 80, 76, 75, 66, 66, 65, 56, 97, 96, 85, 111, 110, 99, 76, 75, 66, 74, 73, 62, 113, 113, 102, 82, 81, 70, 76, 76, 64, 131, 129, 116, 127, 127, 116, 117, 116, 105, 116, 115, 104, 114, 114, 103, 104, 105, 96, 98, 99, 90, 100, 102, 92, 103, 107, 99, 107, 109, 99, 96, 100, 90, 85, 88, 78, 95, 97, 87, 88, 92, 82, 91, 92, 83, 88, 92, 82, 114, 118, 107, 92, 94, 84, 82, 86, 76, 83, 84, 77, 75, 79, 69, 74, 78, 70, 71, 72, 65, 73, 74, 67, 78, 81, 75, 65, 68, 60, 67, 69, 62, 57, 60, 52, 77, 78, 71, 72, 73, 66, 71, 72, 65, 67, 69, 62, 95, 97, 89, 67, 69, 62, 77, 78, 71, 59, 61, 53, 69, 70, 63, 66, 69, 62, 74, 75, 70, 173, 175, 167, 73, 74, 67, 58, 59, 52, 57, 58, 51, 80, 81, 74, 71, 72, 65, 45, 47, 39, 59, 61, 53, 67, 69, 62, 58, 59, 52, 65, 66, 59, 84, 85, 76, 57, 59, 49, 71, 73, 63, 70, 71, 62, 67, 69, 60, 63, 62, 54, 85, 85, 76, 75, 74, 65, 96, 95, 86, 105, 104, 96, 94, 93, 82, 104, 104, 92, 124, 123, 112, 134, 134, 123, 105, 105, 94, 99, 99, 88, 41, 41, 29, 18, 17, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 12, 6, 73, 71, 65, 130, 129, 120, 111, 110, 99, 137, 136, 125, 145, 145, 131, 137, 136, 123, 141, 141, 128, 147, 147, 133, 152, 152, 138, 140, 140, 126, 144, 143, 130, 180, 180, 164, 144, 142, 127, 140, 140, 124, 136, 134, 119, 129, 127, 112, 131, 131, 113, 152, 152, 134, 156, 157, 139, 137, 135, 118, 140, 141, 122, 122, 120, 103, 144, 142, 125, 142, 140, 122, 135, 133, 113, 130, 128, 111, 123, 121, 106, 155, 152, 139, 179, 176, 164, 97, 95, 84, 0, 0, 0, 54, 51, 43, 114, 111, 103, 168, 165, 159, 237, 234, 228, 254, 250, 246, 235, 231, 228, 236, 234, 230, 238, 236, 234, 233, 231, 230, 235, 230, 230, 233, 231, 230, 237, 235, 233, 233, 228, 227, 237, 233, 232, 238, 234, 233, 237, 233, 232, 237, 233, 232, 237, 233, 232, 236, 232, 229, 235, 231, 228, 235, 231, 228, 237, 233, 232, 236, 232, 231, 234, 229, 228, 235, 230, 230, 236, 232, 231, 236, 232, 231, 234, 229, 228, 233, 228, 227, 235, 230, 230, 235, 230, 230, 234, 229, 230, 231, 227, 228, 231, 227, 228, 236, 233, 234, 230, 227, 228, 248, 246, 247, 255, 255, 255, 255, 255, 255, 133, 131, 129, 121, 119, 118, 68, 66, 64, 71, 69, 70, 66, 63, 64, 69, 67, 67, 76, 74, 74, 80, 77, 78, 84, 82, 83, 83, 81, 81, 82, 80, 78, 88, 85, 86, 89, 87, 85, 75, 73, 71, 92, 90, 89, 87, 85, 81, 78, 77, 73, 88, 86, 80, 102, 100, 94, 89, 88, 81, 100, 99, 93, 88, 86, 80, 98, 97, 91, 90, 89, 82, 96, 93, 87, 86, 83, 75, 110, 107, 99, 117, 114, 106, 99, 95, 87, 88, 85, 77, 87, 84, 76, 89, 86, 78, 91, 90, 82, 95, 94, 85, 87, 85, 79, 85, 84, 78, 84, 83, 77, 83, 82, 75, 99, 98, 92, 89, 88, 79, 82, 81, 72, 96, 95, 86, 76, 76, 64, 88, 87, 78, 96, 95, 86, 94, 93, 84, 95, 94, 85, 86, 88, 78, 91, 90, 82, 85, 87, 77, 84, 83, 75, 85, 85, 76, 89, 88, 79, 87, 86, 77, 88, 87, 78, 110, 109, 98, 74, 73, 64, 90, 90, 78, 110, 109, 98, 84, 83, 75, 116, 115, 104, 91, 91, 80, 92, 92, 81, 89, 88, 77, 97, 96, 85, 67, 66, 55, 105, 104, 96, 102, 101, 92, 92, 92, 83, 91, 92, 83, 81, 80, 71, 67, 66, 57, 99, 98, 90, 112, 112, 100, 120, 120, 109, 112, 113, 104, 122, 124, 114, 120, 121, 112, 119, 120, 111, 99, 100, 93, 105, 106, 99, 93, 94, 87, 97, 98, 91, 74, 76, 68, 111, 115, 105, 91, 92, 83, 93, 97, 86, 98, 99, 90, 100, 102, 92, 91, 92, 83, 79, 80, 73, 76, 80, 72, 77, 78, 71, 78, 81, 73, 67, 68, 64, 77, 78, 71, 69, 73, 65, 80, 81, 76, 60, 64, 56, 81, 83, 75, 70, 71, 64, 107, 108, 101, 73, 74, 67, 67, 69, 62, 67, 69, 62, 65, 66, 59, 87, 89, 81, 74, 78, 70, 58, 61, 53, 51, 54, 46, 58, 61, 53, 67, 69, 62, 67, 69, 62, 70, 71, 64, 69, 70, 63, 57, 58, 51, 70, 71, 64, 72, 73, 66, 67, 69, 62, 65, 66, 59, 66, 67, 60, 59, 61, 53, 58, 59, 52, 19, 20, 13, 17, 19, 9, 14, 16, 6, 0, 0, 0, 0, 0, 0, 14, 14, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 32, 20, 76, 75, 66, 105, 107, 95, 102, 104, 92, 111, 110, 99, 103, 102, 91, 100, 100, 89, 117, 117, 103, 119, 119, 107, 116, 115, 104, 128, 128, 117, 162, 162, 151, 165, 164, 151, 146, 146, 132, 133, 133, 119, 125, 125, 111, 120, 120, 107, 113, 113, 100, 196, 196, 180, 116, 116, 100, 126, 126, 110, 123, 123, 107, 134, 134, 119, 143, 141, 126, 159, 159, 143, 120, 117, 102, 147, 145, 130, 124, 124, 106, 138, 136, 121, 134, 134, 119, 116, 116, 100, 134, 131, 118, 178, 178, 165, 81, 79, 66, 21, 18, 5, 225, 223, 208, 210, 208, 197, 145, 142, 136, 245, 241, 238, 255, 251, 247, 250, 246, 243, 238, 234, 231, 241, 237, 233, 245, 241, 238, 241, 236, 235, 241, 236, 235, 237, 233, 232, 237, 235, 233, 236, 234, 232, 239, 237, 235, 233, 231, 230, 232, 230, 228, 238, 236, 234, 236, 234, 232, 237, 233, 232, 237, 233, 232, 241, 236, 235, 236, 232, 229, 233, 228, 227, 234, 229, 228, 234, 229, 228, 233, 228, 227, 233, 228, 227, 233, 228, 227, 233, 228, 227, 233, 228, 227, 235, 230, 230, 236, 232, 231, 236, 232, 231, 236, 232, 231, 231, 227, 224, 235, 230, 230, 255, 251, 247, 255, 255, 255, 210, 206, 203, 35, 30, 29, 68, 66, 64, 63, 58, 59, 64, 62, 63, 62, 60, 60, 74, 71, 72, 81, 78, 79, 91, 88, 92, 90, 88, 88, 97, 94, 97, 87, 84, 87, 88, 85, 86, 103, 100, 101, 102, 99, 100, 84, 82, 83, 74, 72, 70, 83, 81, 79, 100, 98, 97, 99, 99, 97, 83, 81, 77, 84, 85, 78, 103, 101, 95, 89, 88, 79, 84, 83, 75, 108, 107, 98, 111, 110, 103, 104, 103, 94, 76, 75, 68, 73, 72, 63, 89, 88, 79, 82, 79, 71, 101, 98, 90, 104, 102, 91, 94, 91, 81, 97, 95, 84, 85, 82, 71, 88, 85, 77, 107, 104, 96, 91, 91, 80, 94, 93, 84, 90, 89, 80, 92, 91, 85, 83, 82, 73, 99, 98, 90, 88, 87, 78, 102, 101, 90, 98, 97, 89, 83, 83, 71, 95, 94, 83, 80, 79, 70, 91, 90, 82, 95, 94, 83, 88, 87, 76, 88, 87, 76, 95, 94, 83, 83, 83, 71, 77, 76, 68, 87, 86, 77, 95, 94, 83, 103, 102, 91, 83, 83, 71, 85, 85, 74, 91, 91, 80, 103, 102, 91, 92, 92, 81, 90, 90, 78, 80, 79, 68, 110, 109, 98, 81, 80, 69, 104, 104, 92, 94, 93, 82, 74, 73, 62, 74, 73, 64, 119, 118, 110, 69, 68, 60, 74, 73, 64, 82, 81, 72, 102, 101, 92, 91, 90, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 12, 6, 11, 14, 9, 35, 38, 32, 31, 32, 27, 33, 34, 27, 32, 36, 28, 77, 78, 71, 49, 50, 43, 41, 42, 35, 48, 49, 42, 54, 58, 50, 38, 40, 32, 51, 54, 46, 26, 29, 24, 33, 37, 29, 33, 37, 29, 25, 28, 23, 14, 17, 11, 11, 14, 9, 5, 9, 3, 5, 9, 1, 3, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, 62, 50, 99, 100, 91, 95, 94, 83, 90, 92, 80, 97, 96, 85, 67, 69, 57, 87, 90, 75, 103, 102, 91, 95, 94, 83, 106, 106, 95, 123, 126, 111, 88, 88, 74, 195, 195, 181, 113, 113, 100, 117, 117, 103, 130, 129, 116, 123, 123, 109, 104, 104, 90, 128, 128, 117, 90, 90, 76, 124, 124, 110, 88, 88, 74, 113, 113, 102, 133, 133, 119, 139, 139, 125, 141, 141, 126, 141, 141, 128, 112, 112, 96, 119, 119, 103, 102, 102, 88, 108, 107, 94, 116, 116, 102, 134, 134, 121, 113, 113, 100, 137, 135, 122, 143, 140, 128, 144, 144, 128, 147, 145, 132, 74, 72, 57, 17, 17, 3, 6, 6, 0, 253, 252, 243, 106, 105, 97, 241, 240, 234, 247, 246, 240, 247, 243, 237, 243, 239, 236, 241, 237, 231, 239, 237, 233, 238, 236, 234, 239, 237, 233, 241, 236, 235, 238, 236, 234, 239, 237, 235, 237, 233, 232, 238, 234, 233, 240, 238, 237, 234, 232, 231, 238, 234, 233, 238, 234, 233, 236, 234, 232, 233, 231, 230, 223, 221, 219, 235, 230, 230, 235, 230, 230, 235, 230, 230, 233, 228, 227, 233, 228, 227, 234, 229, 228, 229, 225, 224, 229, 225, 224, 230, 226, 225, 227, 223, 220, 229, 225, 222, 229, 225, 222, 235, 231, 228, 235, 231, 228, 249, 245, 242, 255, 255, 255, 243, 239, 238, 75, 71, 70, 58, 53, 53, 68, 66, 64, 63, 61, 60, 67, 64, 65, 75, 72, 73, 87, 84, 85, 83, 81, 81, 94, 89, 91, 90, 88, 88, 103, 100, 103, 103, 100, 103, 110, 107, 110, 90, 87, 90, 97, 95, 95, 104, 101, 104, 99, 97, 98, 99, 97, 98, 88, 85, 86, 98, 96, 97, 80, 78, 76, 91, 89, 90, 83, 83, 81, 84, 85, 80, 104, 105, 98, 109, 108, 99, 104, 103, 94, 85, 85, 76, 92, 92, 83, 95, 94, 85, 102, 99, 91, 83, 82, 73, 110, 109, 100, 111, 108, 100, 108, 107, 98, 102, 101, 92, 106, 106, 95, 79, 76, 68, 96, 95, 84, 102, 99, 89, 86, 83, 72, 100, 97, 86, 88, 85, 75, 93, 90, 80, 91, 90, 82, 91, 88, 77, 87, 86, 77, 99, 95, 87, 95, 92, 84, 80, 79, 70, 90, 89, 80, 83, 83, 71, 91, 91, 80, 96, 95, 84, 84, 84, 72, 71, 71, 60, 87, 86, 75, 102, 101, 90, 91, 91, 80, 89, 88, 77, 98, 98, 86, 97, 96, 85, 92, 92, 81, 90, 90, 78, 81, 80, 69, 90, 90, 78, 92, 92, 81, 95, 94, 83, 88, 87, 76, 95, 94, 83, 95, 94, 83, 71, 71, 60, 87, 86, 75, 96, 95, 84, 81, 80, 69, 94, 93, 82, 98, 98, 86, 83, 82, 73, 85, 85, 76, 96, 95, 86, 82, 81, 72, 68, 67, 58, 91, 90, 82, 74, 73, 64, 100, 100, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, 114, 107, 0, 0, 0, 10, 12, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 16, 55, 54, 48, 77, 76, 70, 84, 83, 77, 88, 86, 80, 83, 84, 77, 85, 86, 79, 71, 72, 65, 69, 70, 61, 73, 75, 65, 59, 61, 51, 56, 57, 48, 40, 41, 32, 83, 85, 72, 66, 68, 56, 70, 70, 59, 66, 65, 54, 64, 64, 55, 62, 62, 50, 71, 71, 60, 83, 83, 71, 92, 94, 80, 70, 72, 58, 71, 73, 59, 105, 105, 92, 91, 93, 79, 90, 90, 76, 85, 87, 73, 100, 100, 87, 89, 89, 75, 124, 124, 110, 116, 116, 100, 96, 96, 82, 110, 110, 94, 187, 187, 173, 90, 90, 76, 118, 118, 104, 90, 90, 76, 127, 127, 114, 120, 120, 109, 99, 99, 86, 109, 109, 95, 109, 109, 95, 110, 110, 96, 127, 127, 114, 127, 127, 114, 104, 104, 88, 109, 109, 95, 127, 127, 114, 125, 125, 109, 142, 143, 127, 130, 129, 116, 45, 44, 33, 27, 27, 15, 6, 6, 0, 69, 69, 57, 141, 141, 128, 225, 223, 212, 242, 239, 228, 245, 242, 234, 236, 233, 225, 241, 237, 231, 237, 234, 228, 233, 232, 228, 233, 232, 228, 236, 234, 230, 238, 236, 232, 241, 237, 233, 236, 234, 230, 234, 232, 231, 237, 235, 231, 238, 236, 234, 239, 237, 235, 238, 236, 234, 242, 240, 239, 234, 232, 231, 240, 238, 237, 234, 232, 231, 236, 234, 232, 236, 234, 232, 234, 232, 231, 232, 230, 228, 231, 229, 227, 235, 230, 230, 233, 228, 227, 231, 227, 226, 234, 229, 228, 230, 226, 225, 231, 227, 226, 230, 226, 225, 225, 221, 220, 242, 238, 235, 255, 255, 255, 255, 255, 255, 145, 141, 138, 35, 30, 29, 57, 53, 49, 67, 63, 60, 72, 67, 67, 82, 78, 77, 78, 73, 74, 80, 75, 77, 100, 95, 97, 98, 96, 97, 96, 93, 94, 92, 90, 93, 96, 93, 94, 95, 92, 93, 91, 89, 90, 106, 104, 105, 94, 91, 92, 78, 76, 75, 102, 100, 98, 67, 64, 65, 110, 108, 106, 80, 78, 74, 78, 77, 73, 100, 99, 95, 116, 114, 110, 99, 97, 96, 99, 97, 96, 91, 89, 88, 84, 83, 79, 112, 111, 105, 104, 103, 94, 102, 104, 94, 112, 112, 100, 78, 80, 68, 105, 104, 96, 109, 108, 99, 92, 92, 83, 124, 121, 111, 90, 90, 78, 89, 86, 78, 110, 107, 99, 90, 89, 80, 100, 97, 89, 81, 78, 70, 99, 96, 85, 87, 84, 74, 85, 82, 71, 85, 82, 71, 83, 81, 70, 96, 93, 83, 81, 78, 68, 102, 99, 89, 87, 84, 74, 85, 81, 73, 89, 86, 78, 100, 97, 86, 85, 81, 73, 112, 112, 100, 102, 101, 90, 84, 84, 72, 82, 81, 70, 87, 86, 75, 97, 96, 85, 89, 88, 77, 91, 91, 80, 90, 90, 78, 87, 86, 75, 98, 98, 86, 95, 94, 83, 81, 80, 69, 84, 84, 72, 87, 86, 75, 80, 79, 68, 78, 78, 67, 87, 86, 75, 85, 85, 74, 80, 79, 68, 104, 104, 92, 98, 98, 86, 84, 84, 72, 82, 81, 70, 83, 83, 71, 103, 102, 91, 102, 101, 92, 87, 86, 77, 91, 90, 82, 91, 90, 82, 105, 104, 96, 59, 58, 49, 91, 90, 82, 87, 89, 77, 78, 80, 68, 87, 86, 77, 83, 82, 73, 98, 99, 90, 64, 65, 58, 74, 76, 68, 62, 63, 56, 53, 55, 48, 69, 70, 63, 69, 70, 63, 60, 62, 54, 60, 62, 54, 62, 63, 56, 73, 74, 67, 60, 62, 54, 67, 69, 62, 58, 59, 52, 58, 59, 52, 49, 50, 43, 62, 63, 56, 62, 63, 56, 66, 68, 58, 59, 61, 51, 47, 48, 41, 56, 57, 50, 55, 56, 49, 52, 54, 46, 62, 63, 56, 62, 63, 56, 63, 64, 57, 67, 66, 57, 68, 67, 60, 61, 60, 51, 60, 62, 52, 66, 68, 58, 64, 65, 58, 66, 67, 60, 66, 64, 58, 60, 58, 52, 48, 47, 41, 52, 51, 42, 78, 78, 69, 60, 59, 50, 53, 52, 43, 62, 61, 52, 57, 59, 49, 94, 96, 86, 66, 65, 54, 68, 67, 56, 66, 65, 54, 66, 68, 56, 65, 67, 55, 54, 53, 42, 87, 86, 75, 108, 107, 94, 73, 72, 61, 67, 66, 55, 144, 143, 132, 96, 95, 84, 80, 82, 70, 81, 80, 69, 66, 65, 52, 82, 82, 68, 69, 69, 55, 85, 85, 72, 68, 68, 54, 90, 90, 76, 89, 89, 75, 88, 88, 72, 108, 107, 94, 96, 96, 82, 100, 100, 87, 112, 112, 96, 110, 110, 96, 108, 107, 94, 133, 133, 119, 113, 113, 102, 119, 119, 107, 104, 104, 92, 97, 97, 83, 120, 120, 109, 118, 117, 106, 135, 135, 122, 141, 141, 129, 74, 73, 62, 114, 114, 103, 28, 28, 17, 89, 88, 77, 75, 74, 63, 155, 154, 146, 239, 238, 229, 234, 234, 225, 230, 228, 222, 237, 235, 229, 239, 238, 229, 233, 232, 226, 242, 241, 235, 234, 233, 227, 237, 235, 231, 236, 234, 230, 236, 234, 230, 237, 235, 231, 238, 236, 232, 238, 236, 232, 240, 239, 235, 238, 236, 232, 237, 235, 231, 238, 236, 232, 237, 235, 231, 239, 237, 233, 237, 235, 231, 238, 234, 231, 238, 234, 231, 237, 233, 232, 240, 235, 234, 237, 233, 232, 231, 227, 226, 236, 232, 231, 233, 228, 227, 231, 227, 226, 234, 229, 228, 235, 230, 230, 232, 230, 228, 235, 230, 230, 233, 228, 227, 255, 255, 255, 255, 255, 255, 215, 211, 208, 156, 152, 149, 56, 52, 48, 70, 65, 62, 77, 72, 71, 79, 74, 74, 77, 72, 71, 88, 84, 81, 108, 105, 104, 99, 94, 93, 85, 80, 81, 97, 93, 94, 88, 85, 86, 106, 104, 105, 94, 91, 90, 78, 76, 77, 104, 102, 100, 87, 85, 83, 92, 90, 89, 82, 80, 78, 97, 95, 93, 105, 103, 102, 99, 97, 96, 105, 104, 100, 106, 105, 101, 89, 87, 83, 104, 102, 98, 96, 95, 88, 104, 103, 96, 89, 88, 81, 103, 101, 95, 92, 91, 85, 99, 98, 94, 77, 76, 72, 88, 86, 80, 105, 104, 96, 76, 76, 64, 92, 92, 83, 91, 91, 80, 80, 79, 68, 111, 110, 101, 88, 87, 78, 78, 78, 67, 111, 110, 99, 78, 78, 67, 87, 86, 75, 95, 94, 83, 97, 96, 87, 95, 92, 84, 77, 77, 65, 84, 84, 72, 95, 92, 82, 77, 74, 63, 89, 86, 76, 91, 88, 75, 89, 88, 77, 102, 99, 89, 95, 92, 82, 73, 70, 60, 99, 96, 85, 91, 87, 79, 96, 93, 83, 100, 97, 86, 92, 89, 78, 104, 104, 92, 102, 101, 90, 95, 94, 83, 87, 84, 74, 97, 96, 85, 88, 87, 76, 90, 90, 78, 98, 98, 86, 66, 65, 54, 90, 90, 78, 81, 80, 69, 92, 92, 81, 109, 108, 97, 94, 93, 82, 94, 93, 82, 97, 95, 84, 87, 86, 75, 94, 93, 82, 89, 88, 77, 84, 84, 72, 109, 108, 97, 77, 77, 65, 87, 86, 75, 82, 81, 72, 89, 88, 77, 96, 95, 84, 87, 86, 75, 100, 100, 89, 75, 74, 65, 72, 74, 64, 111, 112, 103, 92, 94, 84, 84, 84, 72, 73, 72, 61, 92, 88, 80, 108, 105, 97, 88, 85, 77, 85, 81, 73, 82, 81, 72, 78, 78, 69, 60, 62, 54, 94, 96, 88, 70, 71, 64, 68, 67, 60, 69, 70, 63, 77, 78, 71, 56, 57, 50, 62, 63, 54, 66, 68, 58, 64, 64, 55, 77, 76, 70, 66, 65, 56, 66, 67, 60, 64, 63, 57, 49, 50, 41, 63, 64, 55, 53, 55, 46, 57, 59, 49, 62, 61, 52, 49, 50, 41, 62, 63, 54, 72, 74, 64, 64, 66, 56, 43, 43, 34, 45, 44, 35, 57, 57, 48, 64, 64, 55, 68, 67, 58, 64, 64, 55, 77, 76, 68, 72, 69, 61, 70, 66, 58, 102, 99, 89, 71, 68, 57, 82, 81, 70, 89, 88, 77, 64, 64, 53, 99, 99, 88, 63, 63, 52, 63, 63, 52, 77, 79, 67, 60, 59, 48, 84, 84, 72, 71, 71, 60, 70, 72, 60, 69, 69, 57, 82, 81, 70, 76, 76, 62, 97, 95, 84, 71, 68, 55, 74, 73, 62, 108, 105, 95, 83, 83, 71, 103, 102, 91, 102, 102, 88, 90, 90, 76, 76, 76, 64, 73, 72, 59, 104, 104, 90, 94, 93, 80, 69, 69, 55, 85, 85, 72, 105, 105, 92, 110, 110, 96, 104, 104, 90, 95, 95, 79, 132, 132, 118, 132, 131, 120, 105, 105, 94, 53, 52, 43, 0, 0, 0, 0, 0, 0, 38, 37, 28, 46, 45, 36, 100, 102, 92, 197, 196, 190, 245, 244, 237, 238, 237, 230, 219, 218, 212, 228, 229, 222, 227, 228, 221, 222, 223, 216, 227, 228, 221, 234, 233, 227, 232, 233, 226, 233, 234, 227, 234, 233, 227, 232, 231, 224, 234, 233, 229, 232, 230, 226, 234, 233, 229, 232, 230, 226, 236, 234, 230, 236, 234, 230, 236, 234, 230, 237, 235, 231, 239, 237, 233, 236, 234, 230, 236, 234, 230, 236, 234, 230, 233, 232, 228, 233, 231, 230, 234, 233, 229, 236, 234, 232, 236, 232, 231, 236, 234, 232, 236, 232, 231, 236, 232, 231, 238, 234, 233, 225, 221, 220, 235, 230, 230, 225, 222, 218, 242, 238, 235, 255, 255, 255, 255, 255, 255, 95, 91, 88, 75, 71, 68, 71, 67, 64, 83, 79, 78, 71, 66, 65, 85, 80, 79, 82, 78, 77, 93, 88, 88, 89, 85, 84, 89, 85, 84, 88, 84, 81, 96, 92, 91, 103, 99, 98, 88, 86, 82, 102, 100, 98, 93, 88, 88, 89, 87, 85, 89, 87, 85, 80, 78, 76, 97, 95, 93, 88, 86, 82, 96, 94, 90, 89, 88, 81, 90, 89, 82, 90, 89, 80, 104, 103, 96, 98, 97, 91, 112, 111, 105, 102, 100, 94, 104, 103, 96, 97, 96, 89, 100, 100, 91, 105, 104, 96, 68, 67, 58, 69, 68, 60, 91, 90, 82, 90, 89, 80, 111, 110, 101, 85, 85, 76, 88, 87, 78, 85, 85, 74, 104, 104, 92, 106, 106, 95, 88, 87, 76, 99, 99, 88, 82, 81, 70, 88, 87, 76, 73, 72, 61, 94, 91, 81, 82, 81, 70, 87, 84, 74, 97, 95, 84, 88, 87, 76, 87, 86, 75, 83, 83, 71, 91, 88, 77, 99, 99, 88, 91, 88, 77, 89, 86, 76, 99, 99, 88, 102, 99, 89, 83, 81, 68, 87, 85, 72, 95, 92, 82, 100, 97, 86, 92, 89, 78, 95, 92, 82, 94, 91, 81, 100, 100, 89, 80, 79, 68, 69, 69, 57, 100, 100, 89, 92, 92, 79, 104, 104, 92, 95, 94, 83, 80, 79, 68, 102, 99, 89, 105, 105, 94, 79, 76, 65, 90, 90, 78, 82, 81, 70, 88, 85, 75, 111, 109, 98, 77, 77, 65, 95, 92, 82, 88, 85, 75, 82, 81, 70, 98, 98, 86, 96, 95, 84, 73, 72, 61, 94, 93, 82, 102, 101, 90, 71, 71, 60, 88, 87, 76, 60, 59, 48, 97, 96, 87, 88, 87, 78, 96, 95, 84, 88, 87, 78, 76, 75, 66, 91, 90, 82, 75, 74, 63, 69, 69, 57, 114, 108, 101, 78, 74, 66, 87, 86, 77, 73, 72, 63, 109, 108, 99, 73, 72, 63, 71, 71, 62, 71, 70, 64, 113, 112, 106, 91, 90, 84, 76, 77, 68, 59, 61, 51, 70, 69, 61, 73, 75, 65, 78, 78, 69, 67, 65, 59, 77, 76, 70, 70, 69, 61, 59, 57, 51, 62, 61, 52, 74, 73, 64, 67, 69, 60, 56, 57, 48, 67, 69, 60, 59, 61, 51, 83, 84, 75, 70, 71, 62, 55, 56, 47, 80, 78, 72, 61, 60, 51, 56, 55, 47, 99, 98, 90, 82, 79, 69, 88, 85, 75, 84, 83, 75, 49, 49, 38, 89, 88, 77, 63, 60, 49, 61, 58, 50, 70, 70, 59, 68, 67, 58, 70, 70, 59, 63, 63, 52, 70, 70, 59, 106, 106, 95, 68, 67, 56, 77, 77, 65, 103, 102, 91, 80, 79, 68, 71, 71, 60, 91, 91, 80, 84, 84, 72, 80, 77, 67, 77, 77, 64, 83, 81, 68, 94, 92, 79, 83, 83, 71, 89, 88, 77, 84, 84, 72, 75, 74, 63, 73, 72, 61, 94, 93, 82, 94, 93, 82, 82, 81, 70, 25, 24, 13, 0, 0, 0, 0, 0, 0, 2, 0, 0, 31, 28, 18, 202, 202, 188, 123, 122, 111, 192, 192, 183, 245, 244, 237, 232, 231, 224, 239, 238, 231, 211, 209, 205, 226, 225, 221, 222, 220, 216, 236, 234, 230, 222, 223, 218, 222, 223, 218, 226, 224, 223, 220, 221, 216, 220, 221, 216, 223, 224, 220, 227, 228, 223, 225, 225, 221, 228, 229, 224, 221, 222, 217, 229, 230, 225, 232, 230, 226, 236, 234, 230, 234, 233, 229, 232, 230, 226, 233, 232, 228, 233, 232, 228, 233, 232, 228, 238, 236, 232, 237, 235, 231, 233, 232, 228, 234, 233, 229, 233, 232, 228, 234, 233, 229, 234, 233, 229, 238, 236, 232, 234, 233, 229, 237, 235, 231, 230, 228, 224, 233, 232, 228, 237, 235, 231, 231, 229, 227, 229, 227, 223, 255, 255, 255, 255, 255, 255, 193, 189, 186, 248, 244, 240, 97, 94, 90, 77, 72, 73, 83, 79, 80, 79, 74, 76, 75, 70, 74, 75, 70, 74, 101, 96, 100, 94, 89, 91, 75, 71, 72, 106, 101, 102, 99, 97, 96, 107, 103, 100, 81, 77, 74, 88, 86, 82, 100, 99, 95, 100, 96, 91, 95, 92, 86, 84, 83, 77, 91, 90, 84, 97, 96, 89, 98, 96, 93, 75, 73, 69, 108, 104, 101, 85, 84, 78, 94, 92, 86, 90, 89, 80, 88, 87, 78, 83, 82, 73, 97, 96, 87, 88, 87, 78, 97, 96, 87, 110, 109, 100, 100, 99, 93, 89, 88, 81, 85, 84, 78, 100, 102, 92, 68, 67, 58, 113, 112, 104, 91, 90, 82, 89, 88, 79, 89, 88, 79, 110, 109, 100, 111, 110, 101, 80, 79, 70, 81, 80, 71, 99, 99, 88, 104, 104, 92, 105, 105, 94, 75, 74, 63, 100, 100, 89, 97, 96, 85, 104, 104, 92, 70, 70, 59, 92, 92, 81, 106, 106, 95, 95, 92, 82, 74, 73, 62, 91, 91, 80, 85, 82, 71, 86, 83, 72, 94, 92, 79, 91, 91, 80, 94, 92, 79, 101, 98, 88, 98, 98, 84, 104, 102, 89, 106, 103, 90, 96, 94, 81, 77, 74, 63, 82, 79, 69, 91, 88, 77, 114, 111, 100, 100, 97, 86, 94, 92, 79, 97, 97, 83, 104, 104, 90, 63, 63, 50, 95, 95, 81, 97, 96, 85, 85, 85, 74, 114, 114, 103, 92, 89, 78, 106, 106, 95, 88, 87, 76, 80, 79, 68, 90, 90, 78, 86, 83, 72, 96, 93, 83, 95, 92, 82, 99, 99, 88, 124, 121, 113, 68, 67, 58, 117, 116, 107, 83, 82, 73, 84, 83, 75, 74, 73, 64, 105, 104, 96, 80, 79, 68, 100, 100, 91, 96, 95, 86, 98, 97, 89, 106, 105, 97, 92, 92, 83, 83, 82, 73, 71, 71, 62, 89, 88, 79, 78, 80, 68, 77, 79, 67, 92, 86, 79, 87, 84, 76, 89, 86, 76, 90, 90, 78, 83, 82, 73, 85, 85, 76, 76, 75, 66, 75, 74, 67, 96, 95, 88, 85, 84, 78, 81, 80, 71, 74, 73, 64, 87, 86, 77, 67, 66, 55, 90, 89, 80, 69, 68, 60, 70, 69, 61, 74, 73, 64, 74, 73, 64, 61, 60, 51, 125, 125, 113, 48, 47, 39, 63, 63, 52, 63, 62, 54, 68, 67, 58, 69, 68, 60, 71, 71, 62, 74, 73, 64, 78, 78, 69, 79, 76, 68, 79, 76, 68, 72, 69, 59, 77, 74, 63, 82, 79, 69, 72, 69, 59, 74, 71, 61, 88, 87, 76, 73, 70, 60, 77, 74, 63, 85, 82, 71, 194, 191, 181, 96, 93, 83, 80, 79, 68, 75, 74, 65, 75, 74, 63, 70, 70, 59, 71, 71, 60, 91, 91, 80, 97, 96, 85, 89, 88, 77, 102, 101, 90, 78, 78, 65, 42, 42, 29, 33, 31, 18, 80, 77, 67, 0, 0, 0, 91, 91, 80, 19, 17, 6, 6, 6, 0, 20, 20, 8, 103, 102, 91, 174, 173, 162, 230, 229, 218, 232, 232, 220, 227, 227, 216, 225, 225, 213, 215, 214, 203, 218, 217, 208, 217, 216, 207, 225, 224, 216, 218, 217, 210, 223, 221, 215, 223, 221, 217, 222, 223, 218, 223, 221, 217, 213, 214, 209, 223, 224, 220, 226, 226, 224, 236, 237, 232, 223, 224, 220, 226, 226, 224, 223, 224, 222, 225, 225, 223, 226, 226, 224, 225, 225, 223, 226, 226, 224, 226, 227, 222, 222, 223, 218, 229, 230, 225, 230, 231, 226, 227, 226, 222, 233, 232, 228, 232, 230, 226, 230, 228, 224, 227, 226, 222, 234, 233, 229, 232, 230, 226, 232, 230, 226, 233, 232, 228, 232, 230, 226, 230, 228, 224, 236, 234, 230, 234, 233, 229, 233, 232, 228, 231, 229, 225, 229, 225, 222, 253, 251, 247, 255, 255, 255, 210, 206, 205, 46, 42, 41, 54, 50, 49, 70, 65, 66, 71, 69, 70, 73, 70, 71, 85, 83, 84, 96, 93, 94, 90, 87, 90, 91, 85, 89, 104, 101, 104, 85, 83, 86, 97, 95, 95, 99, 97, 98, 104, 102, 100, 94, 91, 90, 104, 102, 100, 96, 94, 92, 100, 98, 97, 93, 89, 86, 100, 96, 93, 77, 73, 67, 94, 90, 85, 75, 72, 64, 102, 99, 91, 110, 107, 99, 92, 92, 83, 97, 96, 89, 104, 103, 96, 98, 97, 91, 95, 92, 84, 95, 93, 87, 97, 96, 87, 95, 94, 85, 96, 95, 84, 89, 88, 77, 90, 89, 80, 108, 107, 98, 78, 78, 67, 90, 89, 80, 95, 94, 85, 106, 105, 97, 114, 114, 105, 102, 101, 92, 92, 92, 83, 103, 102, 91, 77, 76, 68, 89, 88, 79, 78, 78, 67, 77, 77, 65, 83, 82, 73, 85, 85, 74, 102, 101, 90, 83, 83, 71, 76, 76, 64, 71, 71, 58, 78, 78, 65, 108, 107, 96, 95, 94, 83, 91, 91, 80, 87, 87, 73, 95, 95, 81, 108, 107, 94, 90, 90, 76, 76, 76, 62, 89, 89, 75, 102, 100, 87, 89, 87, 74, 84, 84, 70, 107, 104, 92, 103, 101, 88, 99, 96, 83, 93, 90, 78, 87, 85, 72, 100, 97, 84, 86, 83, 70, 71, 68, 57, 109, 106, 96, 109, 106, 96, 108, 105, 95, 85, 82, 69, 108, 107, 94, 120, 117, 106, 113, 113, 102, 99, 99, 88, 83, 83, 71, 100, 100, 89, 117, 116, 107, 116, 115, 106, 100, 97, 86, 98, 98, 86, 110, 107, 99, 125, 122, 114, 117, 114, 104, 96, 93, 83, 89, 86, 76, 95, 92, 82, 80, 77, 67, 97, 94, 86, 104, 103, 94, 78, 78, 69, 104, 103, 94, 97, 96, 87, 88, 87, 78, 100, 100, 91, 83, 83, 71, 92, 92, 83, 91, 91, 80, 97, 96, 85, 99, 98, 90, 112, 111, 103, 87, 86, 77, 78, 78, 69, 84, 83, 75, 85, 85, 74, 82, 81, 70, 104, 102, 91, 102, 99, 89, 91, 88, 77, 95, 92, 82, 89, 86, 76, 88, 87, 78, 95, 94, 85, 106, 105, 97, 114, 114, 105, 89, 88, 79, 100, 100, 91, 64, 64, 55, 88, 85, 75, 85, 81, 73, 89, 86, 78, 80, 79, 70, 81, 80, 71, 84, 83, 75, 78, 78, 69, 95, 94, 85, 83, 83, 71, 74, 73, 64, 98, 98, 86, 68, 67, 56, 74, 73, 62, 73, 72, 61, 85, 85, 74, 85, 85, 74, 72, 69, 59, 99, 96, 85, 108, 105, 97, 80, 77, 67, 83, 81, 70, 87, 84, 74, 84, 83, 75, 74, 73, 64, 62, 62, 50, 32, 31, 20, 21, 20, 12, 0, 0, 0, 14, 10, 2, 93, 90, 82, 0, 0, 0, 0, 0, 0, 2, 1, 0, 70, 70, 59, 43, 43, 32, 87, 86, 75, 123, 122, 111, 131, 130, 119, 184, 184, 173, 215, 214, 203, 222, 219, 209, 209, 207, 194, 219, 216, 205, 210, 209, 198, 207, 204, 194, 209, 206, 196, 205, 205, 194, 208, 207, 196, 209, 208, 197, 211, 211, 199, 209, 208, 197, 218, 218, 206, 213, 213, 202, 216, 215, 206, 222, 219, 211, 218, 217, 208, 219, 218, 212, 219, 218, 212, 225, 224, 218, 223, 224, 220, 221, 222, 217, 220, 219, 215, 221, 222, 217, 226, 226, 224, 220, 220, 218, 225, 225, 223, 222, 223, 220, 222, 223, 220, 222, 223, 220, 224, 222, 220, 223, 224, 222, 229, 230, 227, 222, 223, 220, 226, 226, 224, 225, 225, 223, 226, 227, 222, 225, 225, 221, 226, 227, 222, 227, 226, 222, 227, 226, 222, 229, 227, 223, 233, 232, 228, 231, 229, 225, 233, 232, 228, 232, 230, 226, 234, 233, 229, 237, 235, 231, 234, 233, 227, 238, 236, 232, 255, 255, 255, 255, 255, 255, 71, 70, 66, 55, 53, 49, 82, 78, 77, 88, 86, 84, 74, 71, 72, 87, 84, 85, 90, 87, 90, 95, 92, 95, 96, 91, 95, 98, 95, 99, 97, 93, 94, 96, 93, 94, 106, 104, 105, 100, 95, 95, 104, 102, 102, 97, 95, 95, 99, 97, 96, 103, 101, 99, 92, 87, 86, 92, 88, 84, 87, 83, 80, 94, 90, 87, 91, 87, 83, 90, 88, 84, 95, 93, 87, 95, 92, 86, 93, 90, 82, 97, 94, 86, 78, 74, 66, 97, 94, 86, 103, 100, 92, 87, 84, 76, 79, 76, 68, 103, 102, 93, 82, 79, 71, 81, 80, 71, 80, 77, 69, 101, 98, 90, 100, 100, 91, 89, 86, 76, 109, 108, 97, 92, 89, 78, 83, 82, 73, 71, 71, 62, 110, 109, 100, 95, 94, 85, 103, 102, 93, 85, 85, 74, 83, 83, 71, 95, 94, 83, 98, 98, 86, 104, 104, 92, 77, 77, 65, 89, 88, 77, 105, 105, 94, 89, 88, 77, 91, 91, 80, 105, 105, 94, 97, 96, 85, 103, 102, 91, 87, 86, 75, 103, 102, 91, 82, 82, 68, 68, 67, 56, 90, 90, 78, 94, 93, 80, 88, 87, 76, 84, 84, 70, 104, 104, 90, 98, 98, 84, 88, 88, 74, 105, 105, 92, 78, 75, 62, 89, 89, 75, 92, 89, 76, 88, 88, 74, 82, 80, 67, 77, 74, 61, 85, 82, 69, 81, 79, 66, 99, 96, 83, 91, 88, 75, 91, 88, 77, 89, 86, 76, 104, 102, 91, 93, 90, 80, 116, 113, 103, 100, 97, 86, 99, 99, 88, 74, 73, 62, 85, 85, 74, 98, 98, 86, 87, 86, 75, 89, 88, 79, 118, 117, 108, 87, 86, 75, 89, 86, 78, 77, 76, 68, 116, 113, 105, 72, 69, 59, 95, 92, 84, 78, 78, 67, 75, 73, 62, 88, 85, 77, 73, 72, 63, 108, 107, 98, 95, 94, 85, 92, 92, 83, 91, 90, 82, 99, 98, 90, 82, 81, 72, 64, 64, 55, 98, 97, 89, 92, 92, 83, 94, 93, 84, 100, 100, 91, 94, 93, 84, 84, 83, 75, 83, 82, 73, 74, 73, 64, 92, 92, 83, 221, 216, 204, 209, 204, 192, 170, 165, 153, 147, 145, 134, 101, 98, 90, 78, 74, 66, 74, 71, 63, 92, 88, 80, 83, 80, 72, 30, 27, 19, 83, 81, 70, 56, 53, 42, 79, 76, 68, 33, 31, 20, 43, 40, 29, 91, 88, 77, 65, 62, 54, 50, 47, 39, 66, 63, 55, 180, 177, 169, 85, 82, 71, 63, 60, 49, 71, 68, 57, 75, 73, 62, 91, 88, 77, 132, 129, 119, 182, 180, 169, 155, 152, 141, 191, 188, 177, 194, 191, 181, 205, 202, 191, 203, 201, 190, 212, 209, 198, 209, 206, 196, 210, 208, 197, 210, 209, 198, 216, 215, 204, 205, 204, 196, 195, 194, 185, 208, 207, 198, 203, 202, 191, 208, 207, 198, 204, 203, 194, 204, 204, 192, 204, 204, 192, 203, 202, 191, 198, 198, 187, 203, 202, 191, 205, 205, 194, 201, 200, 189, 208, 207, 196, 205, 205, 192, 209, 208, 197, 205, 205, 192, 204, 204, 190, 208, 207, 196, 210, 209, 198, 210, 209, 198, 215, 214, 205, 209, 208, 199, 212, 211, 203, 212, 212, 201, 209, 208, 199, 212, 211, 203, 210, 209, 200, 219, 218, 210, 216, 214, 208, 217, 216, 209, 218, 216, 212, 217, 215, 211, 224, 222, 218, 220, 219, 215, 220, 218, 217, 219, 219, 217, 218, 218, 216, 222, 219, 218, 213, 213, 211, 220, 220, 218, 219, 219, 217, 225, 225, 223, 221, 221, 219, 216, 217, 214, 222, 223, 220, 224, 222, 220, 223, 221, 219, 220, 218, 217, 226, 224, 223, 227, 225, 224, 226, 224, 223, 230, 228, 226, 227, 226, 222, 222, 220, 216, 223, 221, 217, 239, 237, 233, 255, 255, 255, 255, 255, 255, 160, 158, 154, 35, 34, 30, 54, 52, 48, 91, 90, 86, 78, 77, 73, 88, 86, 84, 91, 89, 88, 83, 81, 79, 98, 96, 95, 100, 98, 97, 98, 96, 95, 96, 92, 93, 102, 97, 99, 95, 90, 92, 102, 97, 99, 91, 89, 90, 96, 92, 91, 96, 92, 91, 102, 98, 97, 91, 86, 85, 98, 96, 95, 105, 104, 100, 93, 89, 86, 82, 81, 74, 97, 96, 89, 100, 96, 91, 93, 89, 84, 79, 75, 70, 88, 85, 79, 74, 71, 65, 74, 72, 66, 91, 87, 81, 113, 109, 103, 98, 97, 89, 77, 73, 65, 97, 94, 86, 104, 101, 93, 82, 79, 71, 88, 85, 75, 81, 80, 69, 78, 78, 67, 78, 75, 64, 102, 99, 91, 107, 104, 94, 85, 82, 71, 100, 97, 89, 102, 99, 89, 99, 95, 87, 88, 85, 75, 74, 71, 63, 105, 104, 96, 95, 94, 85, 76, 75, 66, 91, 91, 80, 84, 84, 72, 81, 80, 69, 94, 96, 84, 85, 87, 75, 78, 78, 67, 104, 104, 92, 82, 81, 70, 78, 78, 67, 97, 96, 85, 102, 101, 90, 70, 72, 60, 85, 85, 74, 91, 91, 78, 92, 92, 81, 91, 91, 78, 96, 96, 82, 89, 88, 77, 89, 88, 77, 99, 99, 86, 97, 97, 83, 90, 90, 76, 84, 84, 70, 96, 95, 84, 74, 73, 62, 98, 98, 84, 116, 115, 104, 94, 93, 80, 96, 96, 82, 108, 105, 93, 91, 91, 78, 99, 96, 83, 96, 94, 81, 103, 101, 88, 107, 104, 92, 80, 78, 65, 120, 117, 104, 113, 110, 99, 81, 79, 66, 96, 93, 83, 111, 109, 98, 92, 89, 78, 110, 109, 98, 114, 114, 103, 98, 98, 86, 87, 86, 75, 80, 79, 68, 107, 104, 94, 69, 68, 60, 75, 74, 65, 91, 90, 82, 84, 83, 75, 96, 93, 85, 98, 97, 89, 109, 106, 96, 96, 95, 84, 67, 64, 56, 83, 80, 72, 76, 75, 66, 107, 104, 96, 90, 89, 80, 69, 68, 60, 73, 72, 63, 91, 90, 82, 102, 101, 92, 78, 74, 66, 67, 66, 57, 97, 96, 87, 99, 98, 90, 106, 105, 97, 88, 87, 78, 95, 94, 85, 98, 97, 89, 89, 88, 79, 94, 93, 84, 235, 232, 222, 233, 230, 219, 228, 225, 215, 220, 217, 206, 229, 226, 216, 222, 219, 209, 228, 225, 215, 225, 223, 212, 219, 216, 205, 224, 221, 211, 225, 223, 212, 223, 220, 210, 221, 218, 207, 223, 220, 210, 219, 216, 205, 220, 217, 206, 214, 211, 201, 216, 213, 203, 217, 214, 206, 214, 211, 203, 217, 214, 204, 209, 206, 196, 216, 213, 203, 212, 209, 198, 213, 210, 199, 214, 211, 201, 202, 199, 189, 209, 208, 197, 210, 208, 197, 212, 209, 198, 210, 209, 198, 212, 209, 198, 212, 209, 198, 208, 207, 196, 210, 209, 198, 208, 207, 198, 204, 204, 192, 208, 207, 196, 202, 201, 190, 208, 207, 196, 205, 205, 194, 205, 204, 196, 206, 206, 195, 205, 204, 196, 205, 205, 194, 204, 204, 192, 203, 202, 191, 203, 202, 191, 203, 202, 191, 203, 202, 191, 205, 205, 194, 203, 203, 189, 204, 204, 192, 204, 204, 192, 208, 207, 196, 205, 205, 194, 210, 209, 198, 210, 209, 200, 211, 210, 201, 211, 210, 201, 210, 209, 200, 226, 225, 217, 211, 210, 201, 212, 211, 203, 205, 204, 196, 216, 214, 208, 217, 216, 209, 216, 214, 210, 216, 214, 210, 218, 216, 212, 220, 218, 217, 218, 216, 214, 219, 217, 216, 219, 217, 216, 218, 218, 216, 219, 219, 217, 220, 220, 218, 220, 220, 218, 219, 219, 217, 217, 214, 215, 222, 223, 220, 216, 217, 214, 220, 218, 217, 223, 221, 219, 230, 228, 226, 211, 209, 207, 219, 217, 216, 244, 242, 238, 255, 255, 255, 255, 255, 255, 133, 132, 126, 168, 167, 160, 47, 46, 39, 94, 92, 88, 80, 78, 74, 76, 75, 68, 82, 80, 76, 88, 86, 84, 85, 83, 82, 127, 125, 124, 84, 82, 81, 92, 90, 91, 90, 88, 86, 98, 96, 97, 95, 92, 93, 88, 86, 84, 98, 96, 95, 102, 100, 98, 89, 85, 84, 97, 95, 93, 88, 86, 84, 94, 90, 89, 96, 94, 92, 96, 94, 90, 88, 86, 82, 87, 85, 81, 104, 102, 98, 96, 95, 88, 91, 90, 84, 97, 96, 89, 100, 99, 93, 81, 78, 72, 77, 76, 70, 91, 90, 84, 86, 82, 77, 85, 84, 78, 96, 95, 88, 81, 80, 71, 80, 79, 70, 87, 86, 77, 87, 84, 76, 103, 100, 92, 83, 80, 72, 114, 111, 103, 104, 102, 91, 83, 81, 70, 102, 99, 89, 91, 88, 77, 102, 99, 89, 83, 81, 70, 86, 83, 72, 89, 86, 76, 67, 64, 54, 108, 105, 97, 89, 86, 76, 106, 106, 95, 75, 74, 65, 69, 69, 57, 95, 94, 83, 100, 100, 89, 109, 108, 97, 87, 86, 77, 64, 64, 53, 88, 87, 76, 90, 90, 78, 105, 105, 94, 92, 92, 81, 83, 83, 71, 97, 96, 85, 64, 66, 54, 105, 105, 94, 94, 93, 82, 84, 84, 72, 87, 86, 75, 89, 88, 77, 99, 99, 88, 74, 74, 60, 74, 73, 62, 88, 88, 74, 95, 95, 81, 110, 110, 96, 70, 70, 57, 97, 96, 85, 95, 95, 81, 95, 95, 81, 81, 81, 67, 87, 87, 73, 103, 103, 89, 113, 110, 97, 99, 96, 83, 92, 89, 76, 97, 95, 82, 106, 103, 90, 123, 121, 108, 81, 79, 66, 77, 74, 63, 83, 81, 70, 100, 97, 86, 94, 91, 81, 87, 84, 74, 106, 106, 95, 109, 106, 96, 96, 95, 84, 98, 98, 86, 112, 112, 98, 98, 98, 86, 83, 81, 70, 63, 62, 54, 112, 111, 103, 88, 87, 78, 87, 86, 77, 86, 83, 75, 77, 76, 68, 114, 114, 105, 83, 82, 73, 83, 82, 73, 104, 103, 94, 118, 117, 108, 85, 85, 76, 97, 96, 87, 98, 97, 89, 97, 96, 87, 88, 87, 78, 78, 78, 69, 108, 105, 97, 107, 104, 96, 91, 90, 84, 82, 79, 71, 71, 67, 60, 81, 79, 73, 81, 80, 71, 90, 89, 80, 75, 74, 65, 75, 74, 65, 228, 225, 213, 231, 226, 214, 232, 227, 215, 233, 230, 219, 230, 227, 217, 230, 227, 217, 228, 225, 215, 230, 227, 217, 228, 225, 215, 223, 220, 210, 220, 217, 206, 221, 218, 207, 213, 210, 199, 221, 218, 207, 219, 216, 205, 221, 218, 207, 221, 218, 207, 216, 213, 203, 216, 213, 203, 214, 211, 201, 213, 210, 199, 213, 210, 199, 214, 211, 201, 214, 211, 201, 214, 211, 201, 215, 212, 202, 214, 211, 201, 214, 211, 201, 212, 209, 198, 213, 210, 199, 209, 208, 197, 211, 211, 199, 206, 206, 195, 204, 204, 192, 210, 209, 198, 210, 209, 198, 209, 208, 197, 209, 208, 197, 210, 209, 198, 209, 208, 197, 204, 204, 192, 204, 204, 192, 202, 201, 190, 209, 208, 197, 205, 205, 194, 204, 204, 192, 204, 204, 192, 203, 202, 191, 203, 202, 191, 203, 202, 191, 204, 204, 192, 201, 200, 189, 206, 206, 195, 205, 205, 194, 206, 206, 195, 202, 201, 190, 205, 204, 196, 202, 201, 192, 210, 209, 200, 208, 207, 198, 206, 206, 197, 210, 209, 202, 209, 208, 199, 208, 206, 200, 211, 209, 205, 211, 210, 203, 211, 209, 205, 211, 209, 205, 216, 214, 212, 215, 212, 211, 220, 218, 217, 216, 214, 212, 219, 217, 218, 218, 216, 216, 218, 216, 216, 218, 216, 216, 215, 216, 213, 215, 212, 213, 218, 216, 214, 217, 215, 213, 215, 212, 211, 255, 255, 255, 255, 255, 255, 127, 126, 122, 255, 255, 252, 30, 28, 24, 76, 74, 72, 70, 68, 67, 82, 79, 80, 83, 81, 79, 79, 80, 77, 83, 83, 81, 85, 83, 82, 89, 87, 85, 90, 88, 86, 89, 87, 85, 80, 78, 76, 91, 90, 86, 90, 88, 84, 88, 86, 84, 82, 80, 78, 76, 74, 72, 92, 90, 89, 105, 103, 102, 99, 97, 96, 90, 88, 84, 90, 88, 84, 84, 83, 79, 102, 100, 94, 102, 98, 93, 97, 94, 88, 85, 84, 78, 89, 86, 80, 98, 97, 91, 88, 86, 80, 89, 87, 83, 97, 95, 91, 99, 98, 92, 94, 92, 86, 90, 89, 82, 100, 100, 91, 92, 91, 85, 91, 90, 82, 89, 88, 79, 99, 95, 87, 94, 93, 84, 94, 93, 84, 105, 104, 96, 111, 110, 101, 90, 90, 78, 89, 88, 77, 66, 63, 53, 95, 92, 82, 87, 84, 74, 89, 86, 76, 73, 70, 60, 80, 77, 67, 92, 89, 78, 97, 95, 84, 96, 93, 83, 65, 62, 52, 103, 100, 90, 100, 97, 86, 75, 73, 62, 100, 97, 86, 108, 105, 95, 70, 70, 59, 82, 81, 70, 80, 79, 68, 109, 108, 97, 84, 84, 72, 90, 90, 78, 81, 80, 69, 103, 102, 91, 96, 95, 84, 100, 100, 89, 69, 69, 57, 91, 91, 80, 104, 104, 92, 94, 93, 82, 84, 84, 72, 82, 81, 70, 91, 91, 80, 74, 73, 62, 69, 69, 57, 89, 88, 77, 87, 86, 75, 96, 95, 84, 88, 88, 74, 90, 90, 76, 104, 104, 90, 102, 102, 88, 94, 93, 82, 71, 71, 58, 83, 83, 69, 80, 79, 66, 108, 107, 94, 97, 97, 83, 110, 110, 96, 103, 101, 88, 114, 111, 98, 116, 114, 101, 74, 72, 59, 94, 92, 79, 85, 82, 69, 97, 95, 84, 111, 109, 98, 71, 68, 55, 121, 118, 105, 95, 93, 80, 120, 117, 106, 111, 109, 96, 64, 61, 48, 74, 71, 61, 95, 94, 83, 121, 118, 107, 96, 93, 83, 95, 94, 83, 79, 76, 68, 117, 116, 107, 99, 98, 90, 96, 93, 85, 99, 98, 90, 123, 122, 113, 85, 85, 76, 102, 101, 92, 77, 77, 65, 112, 112, 100, 96, 93, 85, 77, 77, 65, 76, 76, 64, 109, 108, 97, 73, 72, 63, 95, 92, 84, 75, 74, 65, 77, 76, 68, 73, 72, 63, 68, 67, 58, 78, 78, 69, 74, 73, 64, 77, 76, 68, 111, 110, 101, 100, 100, 91, 95, 94, 85, 230, 228, 215, 236, 234, 221, 232, 227, 215, 230, 227, 217, 230, 227, 217, 233, 230, 219, 228, 225, 215, 227, 224, 213, 225, 223, 212, 224, 221, 211, 223, 220, 210, 223, 220, 210, 222, 219, 209, 225, 223, 212, 220, 217, 206, 221, 218, 207, 217, 214, 204, 219, 216, 205, 220, 217, 206, 216, 213, 203, 217, 214, 204, 219, 216, 205, 215, 212, 202, 209, 206, 196, 214, 211, 201, 216, 213, 203, 219, 216, 205, 213, 210, 199, 212, 209, 198, 213, 210, 199, 212, 212, 201, 206, 206, 195, 210, 209, 198, 209, 208, 197, 210, 209, 198, 210, 209, 198, 209, 208, 197, 208, 207, 196, 211, 211, 199, 206, 206, 195, 205, 205, 194, 206, 206, 195, 208, 207, 196, 203, 201, 190, 210, 209, 198, 203, 202, 191, 206, 203, 192, 203, 201, 190, 206, 206, 195, 204, 204, 192, 208, 207, 196, 204, 204, 192, 205, 205, 194, 205, 205, 194, 205, 204, 196, 206, 206, 197, 201, 200, 191, 203, 202, 193, 208, 206, 200, 208, 206, 200, 209, 208, 201, 210, 209, 202, 213, 212, 206, 212, 211, 207, 211, 209, 205, 210, 208, 204, 212, 210, 209, 211, 209, 207, 215, 212, 211, 215, 212, 211, 211, 209, 209, 212, 210, 211, 215, 212, 211, 215, 212, 211, 212, 210, 209, 253, 251, 247, 255, 255, 255, 192, 191, 187, 18, 16, 12, 138, 136, 134, 85, 83, 82, 80, 78, 76, 82, 80, 78, 89, 87, 85, 92, 90, 89, 89, 86, 87, 83, 81, 81, 91, 91, 91, 86, 86, 86, 79, 79, 79, 83, 83, 83, 85, 85, 83, 84, 84, 82, 84, 84, 82, 85, 85, 83, 85, 84, 80, 85, 83, 82, 91, 90, 86, 80, 78, 74, 77, 76, 72, 97, 95, 91, 91, 90, 86, 87, 85, 79, 116, 114, 110, 78, 77, 71, 105, 104, 98, 84, 83, 77, 106, 102, 94, 94, 91, 83, 87, 85, 79, 97, 94, 86, 92, 88, 80, 103, 100, 92, 80, 78, 72, 92, 91, 85, 81, 79, 73, 90, 89, 82, 97, 96, 89, 68, 67, 60, 99, 98, 90, 88, 87, 78, 70, 69, 61, 80, 79, 70, 105, 104, 96, 86, 83, 75, 87, 86, 75, 93, 90, 80, 96, 93, 85, 83, 83, 71, 91, 88, 77, 89, 86, 76, 59, 56, 46, 77, 74, 63, 71, 68, 57, 103, 102, 91, 96, 93, 83, 95, 92, 82, 93, 90, 80, 103, 100, 90, 99, 96, 85, 96, 93, 83, 95, 92, 82, 88, 85, 75, 87, 84, 74, 75, 73, 62, 88, 85, 75, 84, 84, 72, 99, 98, 90, 91, 90, 82, 99, 99, 88, 94, 93, 84, 71, 71, 60, 88, 87, 78, 69, 68, 60, 80, 79, 68, 110, 109, 98, 73, 72, 61, 102, 101, 90, 100, 100, 89, 80, 79, 68, 104, 104, 92, 109, 108, 97, 74, 74, 60, 91, 91, 80, 98, 98, 86, 81, 80, 69, 77, 77, 64, 69, 69, 57, 73, 72, 59, 73, 72, 59, 89, 89, 75, 78, 78, 65, 100, 97, 86, 102, 99, 89, 89, 89, 75, 97, 97, 83, 83, 81, 68, 105, 105, 92, 96, 96, 82, 89, 89, 75, 87, 85, 72, 107, 104, 92, 101, 98, 86, 99, 96, 85, 116, 113, 103, 116, 113, 103, 72, 69, 59, 125, 123, 112, 108, 105, 95, 127, 124, 113, 77, 74, 63, 89, 86, 76, 66, 63, 53, 103, 102, 91, 108, 105, 95, 111, 109, 98, 59, 56, 46, 99, 95, 87, 114, 111, 100, 78, 74, 66, 109, 106, 98, 137, 136, 127, 90, 89, 80, 78, 78, 69, 84, 83, 75, 101, 98, 90, 68, 67, 58, 108, 107, 96, 91, 88, 77, 75, 72, 64, 107, 104, 94, 82, 79, 69, 100, 100, 89, 62, 61, 52, 99, 95, 87, 102, 99, 91, 67, 64, 54, 81, 78, 70, 89, 88, 79, 113, 112, 104, 100, 100, 91, 69, 68, 60, 78, 78, 67, 90, 90, 78, 228, 225, 213, 224, 222, 209, 230, 228, 215, 229, 224, 215, 230, 227, 217, 230, 227, 217, 227, 224, 213, 227, 224, 213, 227, 224, 213, 224, 221, 211, 223, 220, 210, 224, 221, 211, 223, 220, 210, 221, 218, 207, 220, 217, 206, 223, 220, 210, 217, 214, 204, 220, 217, 206, 221, 218, 207, 220, 217, 206, 220, 217, 206, 220, 217, 206, 216, 213, 203, 212, 209, 198, 214, 211, 201, 215, 212, 202, 216, 213, 203, 213, 213, 202, 210, 209, 198, 214, 211, 201, 211, 211, 199, 209, 208, 197, 212, 212, 201, 210, 209, 198, 210, 209, 198, 210, 209, 198, 215, 214, 205, 212, 212, 201, 208, 207, 198, 206, 206, 195, 204, 204, 192, 205, 205, 194, 206, 206, 195, 207, 204, 194, 199, 196, 185, 209, 206, 196, 202, 199, 189, 201, 198, 188, 211, 211, 199, 201, 200, 189, 199, 199, 188, 208, 207, 196, 202, 201, 192, 208, 207, 198, 198, 197, 189, 204, 203, 194, 210, 209, 200, 204, 203, 194, 205, 204, 198, 206, 205, 199, 209, 208, 201, 205, 204, 198, 208, 206, 200, 206, 205, 201, 212, 211, 207, 211, 209, 205, 212, 211, 207, 205, 204, 200, 210, 208, 204, 211, 209, 205, 246, 244, 240, 255, 255, 255, 127, 126, 122, 188, 186, 184, 78, 76, 75, 99, 97, 96, 83, 81, 79, 94, 91, 92, 83, 81, 81, 89, 86, 87, 88, 85, 86, 90, 88, 88, 87, 84, 85, 86, 87, 84, 85, 85, 83, 90, 88, 86, 78, 78, 76, 84, 84, 82, 85, 85, 83, 85, 85, 83, 80, 81, 78, 83, 83, 81, 83, 83, 81, 88, 89, 86, 86, 87, 84, 92, 90, 89, 93, 94, 89, 91, 90, 84, 91, 90, 84, 96, 95, 86, 85, 85, 76, 95, 94, 85, 96, 95, 86, 92, 92, 83, 89, 88, 79, 90, 89, 80, 94, 93, 84, 106, 102, 96, 83, 80, 74, 83, 82, 75, 111, 108, 100, 108, 107, 98, 84, 83, 75, 97, 96, 87, 96, 95, 86, 82, 81, 74, 87, 85, 79, 109, 108, 99, 82, 81, 72, 92, 92, 83, 87, 86, 77, 106, 106, 95, 100, 100, 89, 99, 99, 88, 75, 74, 63, 91, 91, 80, 110, 109, 98, 109, 106, 96, 92, 92, 81, 98, 98, 86, 65, 62, 52, 97, 96, 85, 101, 98, 88, 88, 85, 75, 83, 81, 70, 83, 83, 71, 64, 61, 50, 78, 75, 64, 86, 83, 72, 89, 86, 76, 108, 105, 95, 81, 78, 68, 111, 109, 98, 77, 74, 63, 79, 76, 65, 113, 110, 99, 81, 80, 69, 74, 73, 62, 95, 94, 85, 77, 76, 68, 109, 108, 99, 90, 89, 80, 95, 94, 83, 96, 95, 84, 91, 90, 82, 75, 74, 65, 106, 106, 95, 102, 101, 90, 66, 65, 54, 99, 96, 85, 84, 84, 72, 92, 92, 81, 78, 78, 67, 85, 85, 72, 108, 107, 96, 75, 74, 63, 103, 102, 91, 78, 78, 67, 97, 97, 83, 97, 95, 82, 87, 85, 72, 92, 89, 78, 100, 97, 84, 104, 102, 89, 109, 107, 94, 102, 99, 89, 115, 112, 102, 78, 75, 64, 79, 76, 65, 118, 116, 103, 88, 85, 75, 115, 112, 100, 123, 121, 108, 100, 97, 84, 107, 104, 94, 83, 81, 70, 93, 90, 80, 132, 129, 119, 96, 93, 83, 85, 82, 71, 121, 118, 105, 123, 121, 108, 122, 119, 107, 77, 74, 61, 82, 79, 69, 81, 78, 68, 124, 121, 111, 118, 116, 105, 79, 76, 65, 138, 137, 126, 95, 94, 85, 74, 71, 63, 106, 105, 97, 91, 87, 79, 76, 75, 66, 104, 103, 94, 110, 109, 100, 109, 108, 97, 120, 117, 106, 108, 107, 96, 75, 74, 63, 116, 115, 104, 91, 88, 77, 117, 116, 105, 89, 88, 77, 77, 77, 65, 90, 90, 78, 81, 80, 69, 95, 94, 85, 75, 74, 65, 84, 83, 75, 105, 105, 94, 91, 91, 80, 76, 76, 64, 231, 229, 216, 229, 227, 214, 230, 228, 215, 229, 226, 216, 229, 226, 216, 229, 226, 216, 223, 220, 210, 229, 226, 216, 225, 223, 212, 225, 223, 212, 227, 224, 213, 225, 223, 212, 227, 224, 213, 222, 219, 209, 221, 218, 207, 224, 221, 211, 218, 218, 206, 215, 214, 203, 216, 215, 204, 217, 216, 205, 214, 211, 201, 215, 212, 202, 217, 214, 204, 219, 216, 205, 215, 212, 202, 214, 211, 201, 220, 217, 206, 216, 213, 203, 212, 209, 198, 215, 212, 202, 213, 213, 202, 212, 212, 201, 211, 211, 199, 211, 211, 199, 208, 207, 198, 209, 208, 197, 206, 206, 197, 211, 210, 201, 202, 201, 192, 211, 211, 199, 209, 208, 199, 210, 209, 198, 202, 201, 190, 202, 201, 190, 209, 206, 196, 208, 205, 195, 203, 202, 191, 205, 205, 194, 206, 206, 195, 204, 204, 192, 204, 204, 192, 204, 203, 194, 203, 202, 193, 206, 206, 197, 203, 202, 193, 202, 201, 192, 203, 202, 193, 202, 201, 194, 205, 204, 198, 205, 204, 198, 206, 205, 199, 208, 206, 200, 206, 205, 199, 208, 206, 200, 215, 213, 207, 247, 246, 240, 218, 216, 212, 0, 0, 0, 48, 46, 45, 78, 76, 75, 111, 109, 107, 88, 85, 86, 92, 90, 91, 97, 95, 95, 88, 85, 86, 88, 85, 86, 88, 85, 86, 91, 89, 88, 90, 88, 86, 89, 87, 85, 91, 89, 88, 87, 85, 83, 87, 85, 83, 80, 81, 78, 87, 85, 83, 86, 87, 84, 97, 97, 95, 85, 85, 83, 84, 84, 82, 87, 88, 85, 81, 82, 77, 85, 86, 81, 90, 90, 86, 88, 89, 84, 82, 80, 76, 76, 74, 70, 87, 85, 79, 78, 78, 69, 111, 110, 101, 110, 109, 100, 103, 102, 93, 98, 97, 89, 74, 73, 64, 75, 74, 65, 98, 97, 89, 89, 88, 79, 85, 84, 78, 90, 89, 80, 84, 83, 77, 99, 98, 90, 96, 95, 86, 80, 79, 70, 100, 100, 91, 81, 80, 71, 78, 78, 69, 84, 83, 75, 100, 100, 91, 80, 79, 70, 105, 104, 96, 92, 92, 83, 75, 74, 63, 97, 96, 87, 85, 85, 74, 84, 84, 72, 90, 90, 78, 81, 80, 69, 100, 100, 89, 95, 94, 83, 104, 104, 92, 102, 101, 90, 104, 104, 92, 103, 102, 91, 98, 98, 84, 76, 76, 64, 85, 85, 74, 102, 101, 90, 80, 77, 67, 103, 100, 90, 85, 82, 71, 107, 104, 94, 82, 79, 69, 80, 77, 67, 102, 99, 89, 93, 90, 80, 113, 110, 99, 89, 88, 77, 109, 108, 97, 78, 78, 67, 63, 63, 52, 90, 90, 78, 102, 101, 90, 80, 79, 70, 99, 99, 88, 80, 79, 68, 98, 98, 86, 94, 93, 82, 102, 101, 90, 83, 83, 71, 105, 105, 94, 104, 104, 92, 74, 73, 62, 77, 77, 65, 68, 67, 56, 76, 76, 64, 87, 87, 73, 111, 110, 99, 89, 88, 77, 85, 85, 74, 102, 101, 90, 89, 88, 77, 102, 100, 87, 101, 98, 86, 88, 85, 75, 92, 89, 78, 86, 83, 70, 72, 69, 59, 115, 112, 102, 113, 110, 99, 80, 77, 67, 98, 98, 84, 93, 90, 80, 108, 105, 95, 100, 97, 86, 98, 98, 86, 121, 118, 105, 92, 89, 78, 104, 102, 91, 95, 92, 82, 93, 90, 80, 86, 83, 72, 87, 84, 74, 80, 77, 67, 80, 77, 67, 131, 128, 120, 77, 74, 63, 104, 102, 91, 80, 77, 67, 75, 73, 62, 108, 107, 98, 90, 89, 80, 74, 71, 63, 67, 64, 56, 85, 85, 76, 113, 113, 102, 109, 108, 97, 99, 98, 90, 78, 78, 67, 77, 77, 65, 78, 78, 69, 110, 109, 98, 120, 119, 111, 120, 117, 106, 90, 90, 78, 117, 116, 105, 95, 94, 83, 81, 80, 71, 74, 73, 62, 96, 95, 84, 97, 96, 87, 70, 69, 61, 105, 105, 94, 68, 67, 56, 80, 79, 68, 94, 93, 82, 227, 224, 211, 229, 224, 213, 231, 229, 216, 230, 227, 217, 229, 226, 216, 229, 226, 216, 228, 225, 215, 227, 224, 213, 223, 220, 210, 224, 221, 211, 229, 226, 216, 225, 223, 212, 224, 221, 211, 223, 220, 210, 222, 219, 209, 224, 221, 211, 224, 221, 211, 213, 213, 202, 217, 216, 205, 217, 214, 204, 219, 216, 205, 215, 214, 203, 217, 214, 204, 222, 219, 209, 217, 214, 204, 219, 216, 205, 219, 216, 205, 217, 214, 204, 216, 213, 203, 217, 214, 204, 216, 213, 203, 216, 215, 204, 215, 214, 203, 212, 212, 201, 209, 208, 199, 209, 208, 197, 210, 209, 200, 210, 209, 200, 206, 206, 197, 210, 209, 200, 205, 205, 194, 204, 204, 192, 209, 208, 197, 207, 204, 194, 206, 203, 192, 204, 204, 192, 209, 206, 196, 208, 205, 195, 198, 195, 184, 203, 202, 191, 202, 201, 192, 206, 206, 197, 202, 201, 192, 198, 197, 189, 201, 199, 193, 208, 207, 198, 203, 202, 193, 198, 197, 189, 199, 199, 190, 208, 206, 200, 202, 201, 192, 246, 245, 238, 161, 160, 154, 32, 31, 24, 48, 47, 41, 96, 94, 90, 97, 95, 93, 94, 91, 90, 94, 91, 92, 90, 88, 86, 89, 87, 85, 84, 84, 84, 88, 88, 88, 88, 86, 84, 82, 80, 78, 90, 88, 86, 88, 86, 84, 87, 85, 83, 87, 85, 83, 92, 90, 89, 84, 82, 81, 84, 82, 81, 87, 85, 83, 87, 88, 85, 91, 90, 86, 95, 93, 89, 84, 85, 80, 93, 94, 89, 84, 85, 80, 79, 80, 75, 100, 99, 93, 83, 84, 77, 98, 97, 91, 66, 64, 58, 78, 77, 71, 95, 93, 87, 102, 101, 92, 100, 100, 91, 96, 95, 86, 74, 73, 64, 82, 81, 72, 95, 94, 85, 82, 81, 72, 112, 111, 103, 82, 81, 72, 90, 89, 80, 108, 107, 98, 81, 80, 71, 98, 97, 89, 81, 80, 71, 78, 78, 67, 94, 93, 82, 78, 78, 69, 99, 99, 88, 105, 104, 96, 91, 90, 82, 94, 93, 84, 91, 90, 82, 98, 97, 89, 76, 75, 66, 77, 77, 65, 88, 87, 78, 97, 96, 85, 103, 102, 91, 82, 79, 69, 68, 67, 56, 76, 76, 64, 105, 105, 92, 99, 99, 86, 89, 89, 75, 95, 95, 81, 92, 92, 79, 76, 76, 62, 99, 99, 86, 92, 92, 79, 81, 80, 69, 100, 97, 86, 82, 81, 70, 91, 88, 77, 101, 98, 88, 91, 88, 77, 100, 97, 86, 67, 64, 54, 102, 101, 90, 77, 77, 65, 77, 77, 65, 89, 88, 77, 102, 101, 90, 102, 101, 90, 64, 64, 55, 67, 66, 55, 94, 93, 84, 82, 81, 72, 70, 70, 59, 92, 89, 78, 90, 90, 78, 95, 94, 83, 70, 70, 59, 100, 100, 89, 70, 67, 56, 92, 92, 81, 69, 69, 57, 71, 71, 60, 75, 74, 63, 77, 77, 65, 91, 91, 80, 94, 93, 82, 111, 110, 99, 101, 98, 88, 74, 73, 62, 104, 102, 91, 72, 69, 57, 71, 68, 57, 78, 75, 64, 101, 98, 88, 94, 91, 81, 73, 70, 60, 92, 89, 78, 111, 109, 98, 120, 117, 106, 115, 112, 102, 102, 99, 89, 92, 89, 78, 113, 113, 102, 93, 90, 80, 108, 105, 95, 78, 75, 64, 117, 114, 104, 109, 106, 96, 75, 73, 62, 123, 120, 110, 121, 118, 107, 75, 74, 65, 117, 114, 106, 121, 121, 112, 103, 100, 92, 115, 112, 104, 100, 100, 91, 72, 69, 61, 132, 131, 122, 139, 138, 129, 89, 88, 79, 73, 72, 63, 134, 133, 125, 66, 65, 54, 102, 101, 90, 68, 67, 58, 75, 74, 63, 92, 92, 83, 74, 73, 64, 116, 115, 106, 120, 116, 108, 95, 94, 85, 99, 98, 90, 66, 65, 56, 71, 71, 60, 100, 100, 89, 73, 72, 61, 94, 93, 84, 98, 98, 86, 110, 109, 98, 94, 93, 82, 78, 78, 67, 92, 92, 81, 234, 231, 220, 224, 221, 211, 234, 231, 220, 230, 227, 217, 230, 227, 217, 229, 226, 216, 227, 224, 213, 230, 227, 217, 225, 223, 212, 229, 226, 218, 221, 218, 210, 224, 221, 213, 227, 223, 216, 220, 217, 206, 220, 216, 208, 216, 215, 204, 221, 218, 207, 220, 217, 206, 215, 214, 203, 218, 218, 206, 220, 217, 206, 216, 215, 204, 217, 216, 205, 213, 210, 199, 219, 219, 207, 220, 217, 206, 219, 216, 205, 220, 217, 206, 222, 219, 209, 212, 209, 198, 214, 211, 201, 215, 212, 202, 213, 213, 202, 217, 216, 205, 214, 211, 201, 216, 213, 203, 216, 215, 204, 216, 215, 204, 212, 209, 198, 210, 209, 198, 209, 208, 199, 209, 208, 197, 208, 207, 198, 209, 208, 197, 206, 206, 195, 194, 193, 182, 205, 202, 191, 210, 208, 197, 205, 205, 194, 204, 203, 194, 202, 201, 192, 206, 206, 197, 206, 206, 197, 199, 199, 190, 199, 199, 190, 197, 196, 188, 196, 195, 186, 238, 237, 230, 77, 76, 70, 0, 0, 0, 89, 88, 81, 106, 105, 99, 91, 90, 84, 87, 85, 81, 84, 83, 79, 84, 83, 79, 88, 86, 84, 85, 83, 82, 88, 86, 84, 74, 75, 72, 84, 84, 82, 85, 85, 83, 85, 85, 83, 78, 76, 75, 90, 88, 86, 85, 84, 80, 84, 83, 79, 84, 83, 79, 85, 84, 80, 82, 80, 76, 78, 77, 73, 91, 90, 86, 91, 90, 86, 94, 92, 88, 94, 96, 88, 84, 85, 78, 100, 99, 93, 85, 85, 76, 95, 94, 85, 110, 109, 100, 91, 90, 82, 81, 80, 71, 83, 82, 73, 69, 68, 60, 84, 83, 75, 103, 102, 93, 76, 75, 66, 92, 92, 83, 83, 82, 73, 81, 80, 71, 85, 85, 76, 89, 88, 79, 105, 104, 96, 89, 88, 77, 94, 93, 84, 69, 69, 57, 100, 100, 89, 90, 89, 80, 80, 79, 70, 96, 95, 86, 92, 92, 81, 90, 90, 78, 83, 82, 73, 97, 96, 85, 97, 96, 87, 70, 69, 61, 91, 90, 82, 87, 86, 77, 61, 60, 51, 67, 66, 57, 94, 93, 84, 88, 87, 78, 83, 83, 71, 114, 114, 103, 87, 86, 75, 75, 74, 63, 103, 102, 91, 84, 84, 70, 78, 78, 65, 74, 74, 60, 77, 77, 64, 56, 56, 43, 68, 68, 54, 87, 87, 73, 108, 107, 96, 91, 91, 80, 82, 79, 69, 103, 102, 91, 97, 96, 85, 75, 74, 63, 99, 99, 88, 108, 107, 96, 105, 105, 94, 90, 90, 78, 82, 81, 70, 100, 100, 89, 103, 102, 91, 83, 83, 71, 84, 84, 72, 66, 65, 54, 91, 90, 82, 99, 98, 90, 80, 79, 68, 104, 103, 94, 99, 96, 85, 95, 92, 82, 80, 79, 70, 93, 90, 80, 110, 109, 98, 88, 87, 76, 78, 78, 67, 102, 101, 90, 96, 95, 84, 100, 100, 89, 106, 106, 95, 89, 88, 77, 98, 98, 84, 91, 91, 78, 70, 67, 56, 59, 58, 45, 118, 116, 103, 98, 98, 84, 96, 95, 84, 91, 88, 77, 60, 57, 47, 113, 110, 99, 83, 81, 68, 109, 107, 94, 68, 66, 55, 107, 104, 92, 73, 71, 58, 91, 88, 75, 119, 119, 107, 97, 95, 82, 110, 107, 97, 79, 76, 65, 73, 70, 60, 93, 90, 80, 77, 77, 65, 69, 69, 57, 85, 82, 71, 121, 121, 110, 133, 132, 124, 76, 76, 64, 82, 81, 72, 70, 69, 61, 113, 112, 104, 113, 112, 104, 75, 72, 64, 112, 111, 103, 127, 126, 118, 74, 73, 64, 97, 96, 87, 80, 79, 68, 127, 127, 116, 117, 116, 105, 69, 69, 57, 106, 106, 95, 118, 117, 108, 68, 67, 58, 103, 102, 93, 112, 111, 103, 73, 72, 63, 111, 110, 101, 92, 92, 83, 119, 118, 110, 91, 90, 82, 87, 86, 77, 85, 85, 76, 112, 112, 100, 77, 77, 65, 73, 72, 61, 61, 60, 49, 76, 76, 64, 46, 45, 38, 60, 57, 51, 42, 38, 32, 43, 37, 32, 23, 21, 15, 14, 14, 5, 12, 9, 1, 19, 18, 9, 18, 17, 8, 12, 9, 3, 22, 18, 13, 19, 16, 10, 30, 26, 21, 45, 42, 34, 73, 69, 64, 70, 69, 61, 107, 104, 96, 140, 140, 128, 184, 184, 173, 225, 225, 213, 246, 245, 234, 215, 214, 203, 219, 219, 207, 209, 208, 197, 216, 215, 204, 220, 217, 206, 220, 217, 206, 221, 218, 207, 219, 216, 205, 216, 215, 204, 217, 214, 204, 214, 211, 201, 216, 213, 203, 212, 212, 201, 212, 209, 198, 219, 216, 205, 215, 212, 202, 211, 211, 199, 215, 214, 205, 211, 210, 201, 213, 213, 202, 205, 204, 196, 205, 205, 194, 206, 206, 195, 211, 210, 201, 208, 207, 196, 212, 211, 203, 204, 204, 192, 208, 207, 198, 209, 208, 199, 204, 203, 194, 203, 202, 193, 199, 199, 190, 194, 192, 186, 234, 234, 225, 28, 27, 21, 91, 90, 84, 92, 91, 85, 85, 84, 78, 87, 85, 79, 84, 83, 77, 87, 85, 79, 84, 83, 77, 84, 83, 79, 94, 92, 88, 84, 83, 79, 83, 81, 77, 83, 81, 79, 84, 82, 81, 84, 82, 81, 83, 83, 81, 86, 87, 84, 80, 81, 78, 88, 86, 82, 83, 81, 77, 90, 89, 82, 81, 79, 73, 78, 77, 71, 97, 96, 89, 85, 84, 80, 78, 77, 73, 99, 98, 94, 99, 98, 94, 89, 88, 79, 80, 79, 70, 77, 78, 69, 88, 90, 80, 80, 82, 72, 94, 93, 84, 88, 87, 78, 90, 89, 80, 95, 94, 85, 82, 81, 72, 87, 86, 77, 85, 85, 76, 103, 102, 93, 94, 93, 84, 91, 90, 82, 89, 88, 79, 92, 92, 83, 74, 73, 64, 87, 86, 77, 105, 104, 96, 101, 98, 88, 81, 80, 69, 97, 95, 84, 109, 106, 96, 91, 88, 77, 104, 102, 91, 88, 85, 75, 94, 93, 82, 99, 99, 88, 80, 79, 68, 87, 86, 75, 88, 87, 76, 89, 88, 79, 95, 94, 85, 97, 96, 87, 89, 88, 79, 78, 78, 69, 92, 92, 83, 95, 94, 85, 81, 80, 71, 90, 89, 80, 95, 94, 83, 83, 83, 71, 99, 99, 86, 103, 103, 89, 100, 100, 87, 63, 63, 50, 82, 82, 68, 90, 90, 76, 119, 119, 105, 92, 92, 79, 71, 71, 60, 80, 79, 68, 112, 112, 100, 88, 87, 76, 80, 79, 68, 106, 106, 95, 74, 73, 62, 78, 78, 67, 102, 101, 90, 108, 107, 96, 62, 62, 50, 103, 102, 91, 88, 87, 76, 87, 86, 75, 103, 102, 91, 104, 103, 94, 71, 71, 62, 77, 76, 68, 64, 64, 55, 71, 71, 62, 100, 100, 89, 96, 93, 83, 66, 65, 54, 92, 92, 81, 96, 95, 84, 73, 72, 61, 100, 100, 89, 92, 92, 81, 109, 108, 97, 87, 86, 75, 104, 104, 92, 99, 99, 88, 98, 98, 84, 82, 82, 68, 67, 67, 53, 93, 90, 78, 95, 93, 80, 115, 112, 100, 73, 70, 60, 73, 72, 61, 104, 104, 92, 64, 61, 50, 91, 88, 77, 92, 89, 78, 117, 115, 102, 84, 84, 70, 76, 76, 64, 108, 105, 95, 103, 100, 90, 95, 94, 83, 121, 118, 107, 78, 75, 64, 110, 107, 97, 97, 95, 84, 64, 64, 53, 109, 108, 97, 91, 88, 77, 136, 133, 123, 83, 82, 73, 80, 79, 70, 109, 108, 99, 87, 86, 77, 95, 94, 85, 124, 121, 113, 88, 85, 77, 112, 111, 103, 103, 102, 93, 67, 66, 57, 73, 72, 63, 100, 100, 89, 130, 129, 118, 80, 79, 68, 76, 76, 64, 70, 69, 61, 67, 66, 57, 88, 87, 78, 88, 87, 78, 100, 100, 91, 77, 76, 68, 77, 76, 68, 100, 100, 91, 108, 107, 98, 61, 60, 51, 104, 103, 94, 116, 115, 106, 78, 78, 67, 84, 84, 72, 88, 87, 76, 71, 71, 60, 84, 84, 72, 255, 255, 255, 255, 255, 255, 255, 255, 253, 21, 17, 9, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 240, 236, 230, 218, 217, 210, 195, 194, 185, 156, 156, 147, 183, 182, 174, 60, 59, 50, 33, 32, 23, 0, 0, 0, 0, 0, 0, 47, 46, 35, 119, 121, 109, 244, 243, 232, 210, 209, 198, 219, 216, 205, 224, 223, 212, 221, 218, 207, 219, 219, 207, 216, 215, 204, 222, 221, 210, 219, 216, 205, 215, 212, 202, 220, 217, 206, 221, 218, 207, 217, 214, 204, 214, 211, 201, 221, 218, 207, 212, 212, 201, 216, 215, 204, 209, 208, 197, 216, 215, 204, 188, 187, 176, 212, 212, 201, 209, 208, 199, 212, 211, 203, 210, 209, 200, 208, 207, 198, 205, 204, 196, 203, 202, 193, 201, 200, 191, 236, 235, 226, 39, 38, 29, 153, 152, 145, 84, 83, 77, 92, 91, 85, 90, 89, 82, 89, 88, 81, 85, 84, 78, 81, 79, 73, 90, 89, 82, 83, 82, 75, 82, 81, 74, 83, 83, 79, 81, 79, 75, 81, 79, 75, 78, 79, 74, 74, 75, 70, 83, 81, 79, 80, 78, 76, 81, 82, 79, 80, 81, 76, 84, 85, 80, 85, 86, 81, 85, 84, 78, 85, 84, 78, 82, 81, 74, 83, 82, 75, 92, 91, 85, 91, 90, 84, 91, 90, 84, 94, 92, 86, 95, 93, 87, 71, 73, 63, 84, 85, 76, 83, 84, 75, 80, 79, 70, 91, 91, 80, 91, 92, 83, 91, 91, 80, 84, 84, 72, 89, 88, 79, 90, 90, 78, 94, 93, 84, 103, 102, 93, 99, 98, 90, 90, 89, 80, 81, 80, 71, 100, 100, 91, 81, 80, 69, 90, 90, 78, 73, 70, 60, 87, 86, 75, 82, 81, 70, 83, 83, 71, 100, 100, 89, 87, 86, 75, 71, 68, 57, 79, 76, 65, 91, 88, 77, 92, 92, 81, 85, 85, 74, 100, 100, 91, 73, 72, 61, 80, 79, 68, 87, 86, 77, 92, 92, 83, 105, 104, 96, 88, 87, 78, 95, 94, 85, 100, 100, 91, 88, 87, 78, 85, 85, 74, 71, 71, 60, 104, 104, 92, 85, 85, 74, 81, 80, 69, 68, 68, 54, 64, 64, 51, 108, 107, 94, 74, 74, 60, 64, 64, 51, 99, 99, 86, 80, 79, 66, 77, 77, 65, 64, 64, 53, 89, 88, 77, 94, 93, 82, 96, 95, 84, 71, 68, 57, 82, 81, 70, 109, 106, 96, 106, 106, 95, 74, 71, 61, 103, 102, 91, 105, 105, 94, 70, 70, 59, 110, 109, 98, 100, 100, 89, 82, 81, 70, 64, 64, 53, 88, 87, 76, 110, 109, 98, 109, 106, 96, 68, 65, 57, 114, 111, 100, 114, 114, 105, 104, 104, 92, 88, 85, 75, 64, 64, 53, 106, 106, 95, 103, 102, 91, 104, 104, 92, 78, 78, 67, 83, 83, 71, 62, 62, 50, 82, 82, 68, 71, 71, 58, 105, 105, 92, 109, 109, 95, 80, 78, 65, 114, 114, 101, 114, 114, 101, 69, 69, 57, 85, 85, 72, 94, 93, 82, 89, 88, 77, 80, 79, 66, 108, 107, 94, 81, 81, 67, 117, 114, 104, 118, 118, 104, 74, 74, 60, 116, 116, 102, 61, 59, 46, 78, 78, 67, 83, 81, 70, 58, 55, 45, 120, 116, 108, 85, 82, 71, 131, 128, 118, 89, 88, 79, 126, 126, 114, 113, 112, 104, 89, 88, 79, 76, 75, 66, 132, 131, 122, 110, 109, 100, 111, 108, 100, 53, 52, 41, 94, 93, 84, 96, 95, 84, 103, 102, 91, 69, 69, 57, 116, 115, 104, 130, 129, 118, 99, 99, 88, 78, 78, 67, 119, 119, 107, 108, 105, 97, 118, 117, 108, 96, 93, 85, 116, 115, 106, 100, 100, 89, 118, 117, 108, 70, 69, 61, 110, 109, 100, 90, 90, 78, 99, 98, 90, 100, 100, 91, 87, 84, 76, 96, 93, 85, 70, 67, 56, 77, 71, 64, 255, 255, 247, 254, 253, 245, 255, 255, 250, 18, 15, 9, 255, 255, 255, 251, 250, 241, 251, 250, 241, 249, 249, 240, 253, 252, 243, 252, 251, 242, 251, 250, 241, 251, 250, 241, 253, 252, 243, 251, 250, 241, 254, 253, 245, 253, 252, 243, 253, 252, 241, 254, 253, 245, 253, 252, 243, 247, 249, 237, 251, 253, 243, 255, 255, 247, 62, 64, 52, 0, 0, 0, 255, 255, 245, 220, 220, 209, 219, 219, 207, 222, 219, 209, 225, 223, 212, 219, 219, 207, 220, 217, 206, 217, 214, 204, 219, 216, 205, 212, 209, 196, 217, 214, 204, 214, 211, 201, 217, 214, 204, 220, 217, 206, 217, 214, 204, 212, 212, 201, 217, 216, 205, 215, 214, 203, 213, 213, 204, 212, 212, 201, 210, 209, 198, 217, 216, 205, 206, 206, 197, 198, 197, 189, 218, 217, 208, 40, 39, 30, 97, 96, 89, 88, 86, 80, 89, 88, 81, 88, 86, 80, 82, 80, 76, 83, 81, 77, 81, 79, 75, 83, 81, 77, 77, 78, 71, 81, 82, 77, 77, 78, 71, 79, 80, 73, 82, 81, 74, 79, 80, 73, 77, 78, 73, 80, 78, 72, 76, 76, 72, 74, 75, 70, 77, 76, 72, 79, 80, 75, 78, 79, 74, 78, 79, 74, 84, 85, 78, 80, 81, 74, 87, 89, 81, 84, 83, 75, 92, 92, 83, 82, 81, 74, 81, 79, 73, 88, 86, 80, 92, 91, 85, 80, 78, 72, 97, 96, 89, 84, 83, 75, 83, 82, 73, 81, 80, 69, 85, 85, 76, 91, 90, 82, 99, 98, 90, 83, 83, 71, 99, 99, 88, 82, 81, 70, 76, 75, 66, 75, 74, 65, 81, 80, 71, 84, 83, 75, 71, 71, 62, 92, 92, 83, 95, 94, 85, 96, 95, 84, 82, 81, 72, 80, 79, 70, 104, 104, 92, 91, 91, 80, 83, 83, 71, 97, 95, 84, 99, 99, 88, 85, 85, 74, 72, 69, 59, 88, 85, 75, 70, 70, 59, 91, 91, 80, 82, 81, 72, 84, 83, 75, 100, 100, 91, 92, 92, 83, 97, 96, 87, 111, 110, 101, 82, 81, 72, 94, 93, 82, 63, 63, 52, 92, 92, 81, 92, 92, 81, 81, 78, 68, 62, 62, 50, 75, 74, 63, 92, 92, 79, 81, 81, 67, 112, 112, 98, 116, 116, 102, 59, 58, 45, 60, 60, 46, 99, 99, 86, 117, 117, 103, 90, 90, 78, 98, 98, 86, 99, 99, 88, 82, 81, 70, 94, 93, 82, 63, 63, 52, 88, 87, 76, 74, 73, 62, 91, 91, 80, 99, 99, 88, 66, 65, 54, 70, 70, 59, 91, 91, 80, 111, 110, 99, 87, 86, 75, 79, 76, 65, 84, 84, 72, 112, 112, 100, 96, 95, 84, 79, 76, 65, 104, 104, 92, 102, 99, 89, 86, 83, 72, 89, 86, 76, 80, 77, 67, 92, 89, 78, 68, 66, 55, 97, 96, 85, 80, 77, 67, 82, 79, 69, 91, 91, 80, 115, 112, 102, 102, 100, 87, 96, 94, 81, 104, 102, 89, 109, 107, 94, 109, 107, 94, 92, 89, 78, 89, 86, 76, 124, 123, 112, 89, 88, 77, 111, 110, 99, 67, 66, 55, 57, 57, 44, 109, 108, 97, 77, 77, 64, 108, 107, 96, 83, 81, 70, 95, 92, 82, 73, 72, 61, 60, 57, 47, 74, 71, 61, 69, 69, 57, 114, 111, 100, 73, 72, 61, 55, 55, 43, 100, 100, 89, 95, 94, 85, 85, 85, 74, 125, 125, 113, 127, 127, 116, 109, 108, 97, 84, 83, 75, 106, 105, 97, 96, 93, 85, 114, 114, 105, 68, 67, 56, 108, 107, 96, 135, 135, 124, 111, 110, 99, 103, 102, 91, 89, 88, 77, 54, 53, 42, 78, 78, 67, 91, 91, 80, 104, 101, 93, 76, 75, 66, 103, 100, 92, 105, 104, 96, 75, 72, 64, 83, 80, 72, 128, 128, 119, 84, 83, 75, 130, 129, 118, 96, 95, 86, 108, 105, 97, 110, 107, 99, 92, 88, 80, 80, 77, 69, 107, 102, 92, 234, 232, 231, 236, 234, 232, 173, 171, 169, 35, 34, 30, 238, 236, 234, 241, 240, 236, 242, 241, 237, 239, 237, 233, 238, 236, 232, 245, 243, 239, 244, 242, 238, 246, 245, 238, 244, 242, 236, 246, 245, 238, 246, 245, 238, 245, 244, 237, 243, 245, 237, 243, 245, 235, 242, 244, 234, 249, 251, 241, 249, 251, 241, 251, 253, 241, 249, 251, 239, 168, 170, 158, 0, 0, 0, 255, 255, 252, 220, 220, 209, 222, 219, 209, 222, 219, 209, 222, 219, 209, 222, 219, 209, 220, 217, 206, 216, 213, 203, 227, 224, 213, 223, 220, 210, 221, 218, 205, 217, 214, 204, 224, 221, 211, 223, 220, 210, 224, 221, 211, 221, 218, 207, 220, 217, 206, 218, 218, 206, 217, 214, 204, 215, 214, 205, 206, 206, 197, 230, 229, 220, 34, 33, 25, 69, 68, 60, 76, 75, 68, 82, 81, 74, 83, 82, 75, 83, 82, 75, 84, 83, 79, 83, 81, 77, 88, 86, 82, 82, 80, 76, 77, 76, 70, 74, 76, 68, 77, 78, 71, 73, 74, 67, 74, 76, 68, 81, 83, 75, 76, 77, 70, 70, 71, 64, 76, 77, 70, 84, 83, 77, 75, 74, 67, 76, 75, 68, 84, 85, 78, 74, 76, 68, 74, 76, 68, 77, 78, 71, 81, 83, 75, 83, 82, 73, 84, 83, 75, 91, 90, 82, 80, 79, 70, 100, 100, 91, 90, 89, 82, 83, 82, 73, 77, 76, 68, 92, 92, 83, 108, 107, 98, 89, 88, 77, 99, 99, 88, 77, 77, 65, 102, 101, 90, 87, 86, 75, 80, 79, 68, 71, 71, 60, 75, 74, 63, 104, 104, 92, 88, 87, 78, 64, 64, 55, 88, 87, 78, 102, 101, 92, 103, 102, 93, 87, 86, 75, 95, 94, 83, 92, 92, 81, 94, 93, 84, 77, 77, 65, 95, 94, 83, 70, 70, 59, 78, 75, 64, 99, 96, 85, 88, 85, 75, 114, 111, 100, 106, 103, 92, 89, 88, 77, 94, 91, 81, 100, 100, 91, 91, 87, 79, 73, 70, 62, 84, 83, 75, 85, 85, 76, 66, 65, 56, 96, 95, 84, 97, 96, 85, 106, 106, 95, 71, 71, 60, 82, 81, 70, 90, 90, 78, 75, 74, 63, 96, 95, 84, 112, 112, 98, 119, 119, 105, 108, 107, 96, 83, 83, 69, 90, 90, 76, 78, 78, 67, 106, 106, 93, 110, 109, 98, 88, 88, 74, 77, 77, 65, 73, 72, 61, 73, 72, 61, 67, 66, 55, 100, 100, 89, 103, 100, 90, 75, 73, 62, 97, 96, 85, 70, 70, 59, 106, 106, 95, 103, 102, 91, 111, 109, 98, 78, 78, 67, 80, 77, 67, 106, 106, 95, 117, 114, 104, 80, 79, 68, 64, 64, 53, 110, 107, 97, 68, 66, 55, 84, 84, 72, 104, 104, 92, 102, 99, 89, 78, 78, 67, 54, 53, 42, 95, 94, 83, 113, 113, 102, 88, 87, 76, 100, 100, 89, 64, 64, 53, 109, 108, 97, 106, 103, 90, 68, 66, 53, 89, 87, 74, 113, 110, 97, 114, 111, 98, 68, 66, 53, 78, 75, 64, 96, 95, 84, 67, 67, 53, 90, 90, 78, 113, 113, 102, 116, 116, 102, 116, 116, 102, 85, 85, 72, 87, 87, 73, 68, 67, 56, 106, 106, 95, 128, 125, 114, 92, 89, 78, 90, 90, 78, 89, 88, 77, 121, 118, 107, 117, 116, 105, 112, 111, 103, 98, 98, 86, 83, 83, 71, 117, 116, 107, 74, 71, 61, 78, 78, 67, 70, 66, 58, 106, 105, 97, 108, 107, 96, 132, 129, 119, 97, 96, 87, 120, 119, 111, 151, 150, 139, 112, 112, 100, 135, 135, 124, 78, 78, 67, 88, 87, 76, 113, 113, 102, 81, 78, 68, 86, 83, 72, 72, 69, 59, 71, 67, 60, 110, 107, 99, 80, 79, 70, 92, 92, 83, 110, 109, 100, 114, 114, 105, 84, 83, 75, 88, 87, 78, 87, 86, 77, 74, 73, 64, 101, 98, 90, 61, 58, 50, 77, 73, 65, 111, 108, 100, 241, 239, 242, 255, 255, 255, 11, 8, 13, 252, 249, 254, 247, 244, 250, 245, 242, 247, 247, 244, 250, 245, 242, 247, 246, 243, 246, 245, 242, 247, 245, 242, 245, 242, 240, 243, 242, 240, 243, 216, 213, 216, 244, 241, 242, 240, 240, 240, 243, 243, 243, 249, 250, 247, 255, 255, 253, 244, 246, 238, 232, 233, 226, 253, 255, 242, 255, 255, 245, 243, 245, 233, 31, 33, 21, 0, 0, 0, 217, 216, 205, 221, 218, 207, 222, 219, 209, 224, 221, 211, 216, 213, 203, 221, 218, 207, 221, 218, 207, 222, 219, 209, 221, 218, 207, 224, 222, 209, 221, 218, 205, 225, 223, 212, 228, 225, 215, 221, 218, 207, 225, 223, 212, 223, 220, 210, 218, 218, 206, 218, 218, 206, 219, 218, 210, 188, 187, 178, 68, 67, 58, 85, 84, 78, 75, 74, 67, 81, 79, 73, 81, 79, 73, 85, 84, 78, 83, 82, 75, 87, 85, 79, 82, 81, 74, 80, 78, 72, 76, 75, 68, 78, 79, 72, 74, 76, 68, 69, 69, 65, 76, 77, 70, 71, 72, 65, 76, 77, 70, 75, 74, 65, 91, 90, 84, 70, 69, 61, 80, 78, 72, 81, 80, 71, 72, 74, 64, 82, 81, 72, 81, 83, 73, 77, 78, 69, 83, 84, 75, 92, 94, 84, 88, 87, 78, 90, 91, 82, 89, 88, 77, 82, 81, 72, 98, 97, 89, 81, 80, 71, 90, 89, 80, 71, 71, 62, 80, 79, 70, 64, 64, 53, 95, 94, 83, 90, 90, 78, 81, 80, 69, 106, 106, 95, 92, 92, 81, 71, 71, 60, 102, 101, 90, 98, 98, 86, 78, 78, 67, 89, 88, 79, 81, 80, 71, 103, 102, 93, 102, 101, 92, 74, 73, 64, 95, 94, 83, 98, 98, 86, 81, 80, 69, 82, 81, 70, 92, 92, 81, 68, 67, 56, 102, 101, 90, 94, 93, 82, 95, 94, 83, 82, 81, 70, 97, 95, 84, 62, 62, 50, 90, 90, 78, 76, 75, 66, 104, 104, 92, 104, 103, 94, 98, 97, 89, 81, 80, 69, 64, 61, 50, 64, 64, 53, 85, 85, 74, 114, 111, 100, 105, 105, 94, 74, 73, 62, 68, 67, 56, 92, 92, 81, 76, 76, 64, 66, 65, 52, 88, 88, 74, 87, 87, 73, 103, 103, 89, 60, 60, 46, 76, 76, 62, 87, 86, 75, 99, 99, 86, 66, 65, 54, 82, 82, 68, 102, 102, 88, 55, 55, 43, 97, 97, 83, 82, 81, 70, 102, 101, 90, 83, 83, 71, 75, 74, 63, 105, 105, 94, 81, 80, 69, 61, 60, 49, 83, 83, 71, 96, 95, 84, 68, 66, 55, 115, 112, 102, 89, 88, 77, 66, 65, 54, 78, 78, 69, 62, 62, 50, 62, 61, 52, 92, 92, 81, 106, 106, 95, 96, 95, 84, 75, 74, 63, 67, 66, 55, 73, 72, 61, 71, 71, 60, 80, 79, 68, 100, 100, 89, 88, 87, 76, 109, 106, 96, 110, 109, 98, 102, 99, 89, 75, 73, 60, 80, 78, 65, 96, 94, 81, 78, 75, 62, 117, 115, 102, 91, 88, 75, 88, 85, 75, 102, 102, 88, 112, 112, 98, 95, 95, 81, 110, 109, 98, 110, 110, 96, 134, 134, 121, 77, 77, 64, 113, 113, 102, 113, 113, 102, 83, 83, 71, 89, 86, 76, 116, 113, 103, 142, 142, 131, 118, 116, 105, 70, 67, 56, 81, 80, 69, 76, 76, 64, 82, 81, 70, 61, 60, 51, 122, 119, 111, 60, 59, 50, 118, 115, 107, 80, 79, 70, 108, 107, 98, 80, 77, 67, 121, 118, 107, 71, 68, 57, 105, 105, 94, 57, 57, 46, 84, 84, 72, 127, 127, 116, 111, 109, 98, 82, 81, 70, 73, 70, 60, 68, 66, 55, 99, 95, 87, 73, 70, 62, 78, 74, 66, 102, 99, 91, 78, 78, 69, 114, 114, 105, 70, 69, 61, 77, 76, 68, 97, 96, 87, 76, 75, 66, 91, 90, 82, 97, 96, 87, 110, 109, 100, 103, 100, 92, 69, 63, 56, 247, 246, 240, 76, 75, 66, 251, 250, 241, 252, 251, 244, 254, 253, 245, 253, 252, 243, 251, 250, 241, 253, 252, 243, 254, 253, 247, 252, 251, 244, 251, 249, 243, 250, 252, 244, 250, 252, 244, 247, 248, 241, 244, 246, 238, 242, 243, 238, 239, 240, 233, 240, 241, 234, 251, 253, 245, 255, 255, 255, 255, 255, 255, 237, 239, 227, 255, 255, 246, 255, 255, 247, 226, 228, 216, 0, 0, 0, 247, 247, 235, 219, 216, 205, 227, 224, 213, 223, 220, 210, 219, 216, 205, 221, 218, 205, 225, 223, 212, 227, 224, 211, 230, 228, 215, 233, 230, 217, 227, 224, 213, 230, 227, 217, 230, 227, 217, 231, 228, 218, 224, 221, 211, 227, 223, 216, 220, 220, 211, 0, 0, 0, 77, 78, 71, 78, 77, 71, 82, 81, 74, 80, 78, 72, 83, 82, 75, 85, 84, 78, 82, 81, 74, 81, 79, 73, 74, 72, 66, 83, 84, 77, 83, 84, 77, 72, 73, 66, 76, 77, 70, 79, 80, 73, 72, 73, 66, 74, 76, 68, 71, 73, 63, 69, 70, 61, 73, 72, 63, 82, 81, 72, 82, 81, 72, 75, 74, 65, 82, 81, 72, 70, 70, 59, 83, 83, 71, 78, 78, 67, 75, 74, 63, 87, 89, 77, 80, 82, 70, 79, 81, 69, 81, 83, 73, 80, 79, 68, 84, 84, 72, 105, 105, 94, 87, 86, 75, 80, 79, 68, 90, 90, 78, 73, 72, 61, 75, 74, 63, 96, 95, 84, 67, 67, 53, 89, 88, 77, 82, 81, 70, 84, 84, 72, 89, 88, 77, 77, 77, 65, 103, 102, 91, 88, 87, 76, 76, 76, 64, 84, 83, 75, 100, 100, 91, 99, 98, 90, 91, 90, 82, 89, 88, 79, 87, 86, 75, 83, 83, 71, 85, 85, 76, 81, 80, 69, 81, 80, 69, 88, 87, 76, 98, 98, 86, 73, 72, 61, 77, 77, 65, 73, 72, 61, 89, 88, 79, 80, 79, 68, 92, 92, 81, 71, 71, 60, 89, 88, 77, 97, 96, 85, 71, 71, 60, 84, 84, 72, 94, 93, 82, 71, 71, 60, 68, 67, 56, 102, 101, 90, 96, 95, 84, 100, 100, 89, 63, 63, 52, 54, 53, 42, 90, 90, 78, 99, 99, 86, 73, 72, 59, 71, 71, 58, 113, 113, 100, 75, 75, 61, 91, 91, 78, 106, 106, 95, 106, 106, 93, 95, 95, 81, 95, 94, 83, 73, 72, 59, 99, 99, 86, 102, 101, 90, 75, 74, 63, 78, 78, 67, 68, 67, 56, 87, 86, 75, 95, 94, 83, 84, 84, 72, 85, 85, 76, 103, 102, 91, 110, 107, 97, 74, 73, 64, 101, 98, 88, 98, 98, 86, 118, 117, 106, 109, 108, 97, 71, 71, 60, 78, 78, 67, 104, 104, 92, 81, 80, 69, 69, 69, 57, 111, 110, 99, 87, 86, 75, 60, 59, 48, 90, 90, 78, 95, 94, 83, 76, 76, 64, 65, 62, 52, 124, 121, 111, 75, 73, 62, 71, 68, 57, 71, 68, 55, 70, 67, 54, 108, 105, 93, 109, 107, 94, 83, 81, 68, 102, 100, 87, 65, 62, 50, 81, 78, 68, 99, 99, 86, 54, 53, 42, 96, 94, 81, 78, 78, 65, 94, 91, 81, 102, 102, 88, 121, 121, 110, 89, 88, 77, 77, 74, 63, 113, 110, 99, 127, 127, 116, 105, 105, 94, 83, 81, 70, 100, 97, 86, 92, 92, 81, 59, 56, 46, 79, 76, 65, 110, 109, 100, 68, 67, 58, 116, 115, 106, 128, 125, 117, 114, 114, 105, 88, 87, 78, 120, 117, 106, 73, 70, 62, 77, 77, 65, 116, 115, 104, 96, 95, 84, 65, 62, 52, 109, 106, 96, 123, 120, 110, 114, 111, 100, 110, 107, 97, 130, 127, 117, 127, 124, 113, 82, 79, 71, 128, 125, 117, 74, 71, 63, 108, 107, 98, 89, 88, 79, 95, 94, 85, 78, 78, 69, 71, 71, 62, 81, 80, 71, 66, 65, 56, 76, 75, 66, 113, 112, 104, 102, 99, 89, 97, 95, 84, 249, 249, 238, 11, 10, 0, 255, 255, 255, 255, 255, 249, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 247, 255, 255, 249, 255, 255, 248, 255, 255, 248, 255, 255, 248, 255, 255, 247, 255, 255, 247, 242, 243, 236, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 244, 255, 255, 251, 255, 255, 255, 25, 24, 13, 255, 255, 255, 225, 223, 212, 220, 217, 206, 222, 221, 210, 227, 224, 213, 227, 224, 213, 228, 225, 215, 230, 227, 217, 235, 232, 222, 231, 229, 216, 225, 223, 212, 235, 232, 222, 238, 235, 225, 233, 230, 219, 229, 226, 218, 186, 183, 175, 0, 0, 0, 85, 85, 76, 84, 83, 77, 83, 82, 75, 81, 79, 73, 79, 80, 73, 83, 82, 75, 84, 83, 77, 83, 82, 75, 82, 81, 74, 78, 77, 71, 74, 76, 68, 69, 70, 63, 81, 83, 75, 69, 70, 63, 72, 73, 66, 71, 72, 65, 70, 71, 64, 75, 74, 65, 71, 71, 62, 73, 72, 63, 90, 89, 80, 77, 76, 68, 82, 81, 72, 82, 81, 70, 78, 78, 67, 69, 69, 57, 90, 90, 78, 87, 86, 75, 83, 83, 71, 75, 74, 63, 92, 92, 81, 77, 77, 65, 94, 93, 82, 94, 93, 82, 104, 104, 92, 89, 88, 77, 89, 88, 77, 95, 94, 83, 94, 93, 82, 97, 95, 84, 78, 75, 64, 83, 83, 69, 104, 102, 89, 90, 90, 78, 66, 65, 52, 96, 96, 82, 100, 100, 87, 71, 71, 60, 82, 81, 70, 82, 81, 70, 95, 94, 83, 78, 78, 67, 77, 77, 65, 83, 83, 71, 59, 58, 47, 80, 79, 68, 73, 72, 61, 68, 67, 56, 102, 101, 90, 97, 96, 85, 88, 87, 76, 83, 83, 71, 69, 69, 57, 108, 107, 96, 94, 93, 84, 99, 98, 90, 99, 98, 90, 76, 76, 64, 89, 88, 79, 80, 79, 70, 91, 88, 77, 88, 85, 75, 97, 96, 85, 82, 81, 70, 104, 104, 92, 104, 104, 92, 110, 109, 98, 77, 77, 65, 73, 72, 61, 80, 79, 68, 100, 100, 89, 106, 106, 95, 88, 88, 74, 73, 72, 59, 97, 96, 85, 99, 99, 88, 74, 74, 60, 62, 62, 48, 71, 71, 58, 95, 95, 81, 103, 103, 89, 63, 63, 52, 60, 60, 46, 71, 71, 58, 97, 95, 82, 70, 70, 57, 92, 92, 81, 61, 60, 49, 77, 77, 65, 71, 71, 60, 109, 108, 97, 94, 93, 82, 95, 94, 85, 82, 79, 71, 82, 81, 72, 112, 111, 103, 82, 81, 72, 75, 74, 65, 62, 61, 52, 83, 83, 71, 67, 66, 55, 110, 109, 98, 98, 98, 86, 111, 110, 99, 88, 87, 76, 104, 102, 91, 106, 106, 95, 81, 80, 69, 70, 70, 59, 96, 93, 83, 111, 109, 98, 95, 92, 82, 103, 100, 90, 99, 96, 85, 114, 111, 98, 83, 81, 68, 73, 71, 58, 73, 71, 58, 100, 97, 84, 64, 61, 48, 117, 114, 104, 132, 129, 119, 101, 98, 86, 89, 87, 74, 102, 100, 87, 128, 125, 112, 82, 81, 70, 103, 103, 89, 118, 117, 106, 83, 83, 71, 133, 133, 121, 100, 97, 86, 88, 85, 75, 88, 85, 75, 120, 117, 106, 94, 91, 81, 95, 94, 83, 99, 96, 85, 158, 155, 145, 109, 106, 98, 92, 88, 80, 116, 115, 106, 97, 96, 87, 111, 110, 101, 130, 129, 120, 87, 86, 77, 94, 93, 84, 121, 121, 112, 98, 98, 86, 68, 67, 56, 123, 122, 111, 77, 74, 63, 102, 99, 89, 95, 92, 82, 80, 77, 67, 57, 57, 46, 95, 94, 83, 79, 76, 68, 135, 131, 124, 120, 119, 111, 140, 139, 130, 140, 139, 130, 73, 72, 63, 77, 76, 68, 73, 72, 63, 62, 61, 52, 63, 62, 54, 97, 95, 84, 118, 117, 106, 85, 85, 74, 84, 84, 70, 255, 255, 247, 14, 14, 3, 255, 255, 250, 255, 255, 248, 255, 255, 247, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 248, 191, 191, 180, 255, 255, 253, 255, 255, 246, 255, 255, 246, 255, 255, 249, 255, 255, 247, 255, 255, 246, 255, 255, 245, 250, 252, 242, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, 252, 240, 255, 255, 249, 255, 255, 255, 4, 3, 0, 255, 255, 255, 223, 220, 210, 225, 223, 212, 233, 230, 217, 230, 227, 217, 230, 228, 215, 238, 236, 223, 235, 232, 220, 238, 236, 223, 235, 232, 220, 238, 236, 223, 238, 235, 225, 238, 235, 225, 233, 230, 219, 0, 0, 0, 86, 83, 75, 87, 85, 79, 80, 78, 72, 79, 80, 73, 79, 80, 73, 85, 84, 78, 80, 78, 72, 82, 81, 74, 82, 81, 74, 76, 77, 70, 80, 82, 72, 78, 79, 72, 72, 74, 64, 72, 73, 66, 72, 74, 64, 63, 64, 55, 71, 73, 63, 67, 69, 60, 74, 76, 66, 68, 67, 58, 90, 89, 80, 66, 65, 54, 81, 80, 69, 71, 71, 60, 78, 78, 67, 78, 78, 67, 76, 76, 64, 80, 79, 68, 78, 78, 67, 87, 86, 75, 74, 73, 62, 100, 100, 87, 92, 92, 79, 113, 113, 102, 91, 91, 78, 87, 86, 75, 76, 76, 64, 80, 79, 66, 98, 98, 86, 103, 102, 91, 63, 63, 52, 77, 74, 63, 95, 92, 82, 73, 71, 58, 93, 90, 78, 110, 110, 96, 78, 78, 65, 75, 73, 60, 108, 105, 95, 96, 96, 82, 76, 76, 64, 69, 69, 57, 80, 79, 68, 94, 93, 82, 94, 93, 82, 84, 84, 72, 67, 66, 55, 94, 93, 82, 80, 79, 68, 96, 95, 84, 84, 84, 72, 67, 66, 55, 75, 74, 63, 99, 99, 88, 78, 78, 67, 75, 73, 62, 83, 82, 73, 91, 90, 82, 68, 67, 58, 100, 100, 89, 99, 99, 88, 88, 87, 76, 85, 85, 74, 92, 92, 81, 98, 98, 86, 74, 71, 61, 118, 117, 106, 103, 102, 91, 85, 85, 74, 98, 98, 86, 114, 114, 103, 98, 98, 86, 90, 90, 76, 74, 73, 62, 70, 70, 57, 76, 76, 62, 98, 98, 86, 54, 54, 40, 82, 82, 68, 103, 103, 89, 106, 106, 93, 73, 72, 59, 108, 107, 96, 66, 65, 54, 103, 101, 88, 120, 120, 107, 95, 93, 80, 49, 49, 36, 105, 105, 92, 66, 65, 52, 106, 106, 95, 76, 76, 64, 69, 69, 57, 85, 85, 76, 110, 109, 100, 119, 119, 107, 67, 66, 57, 102, 101, 92, 81, 78, 70, 120, 120, 109, 61, 60, 49, 94, 93, 82, 88, 87, 76, 113, 113, 102, 118, 117, 106, 70, 70, 59, 96, 95, 84, 100, 100, 89, 98, 98, 86, 63, 60, 49, 81, 78, 68, 96, 93, 83, 57, 54, 43, 64, 61, 50, 115, 112, 102, 103, 100, 90, 104, 102, 89, 81, 79, 66, 114, 111, 98, 115, 112, 100, 72, 69, 59, 86, 83, 70, 106, 103, 92, 83, 83, 71, 96, 96, 82, 115, 112, 100, 80, 78, 65, 45, 45, 31, 111, 110, 99, 130, 129, 118, 102, 101, 90, 95, 94, 83, 97, 95, 84, 103, 100, 90, 75, 73, 62, 97, 96, 85, 67, 64, 54, 95, 94, 83, 95, 94, 83, 127, 127, 116, 74, 71, 63, 69, 68, 60, 82, 79, 71, 127, 123, 115, 104, 103, 94, 53, 52, 43, 124, 123, 114, 168, 167, 159, 70, 70, 59, 74, 73, 62, 110, 109, 98, 82, 81, 70, 86, 83, 72, 93, 90, 80, 131, 130, 119, 102, 101, 90, 79, 76, 65, 76, 76, 64, 128, 125, 117, 83, 82, 73, 73, 70, 62, 69, 68, 60, 100, 100, 89, 128, 128, 117, 82, 81, 70, 74, 73, 64, 73, 72, 63, 104, 103, 94, 89, 88, 79, 90, 89, 80, 86, 88, 78, 111, 113, 100, 48, 53, 40, 251, 250, 241, 39, 36, 28, 255, 255, 252, 255, 255, 249, 255, 255, 245, 255, 255, 245, 255, 255, 246, 255, 255, 246, 255, 255, 248, 255, 255, 252, 190, 190, 179, 255, 255, 248, 255, 255, 250, 255, 255, 249, 255, 255, 250, 255, 255, 248, 255, 255, 246, 251, 253, 241, 245, 244, 233, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 246, 255, 255, 249, 255, 255, 255, 0, 0, 0, 255, 255, 245, 227, 224, 213, 227, 224, 213, 230, 227, 217, 233, 233, 220, 233, 233, 222, 235, 232, 222, 235, 232, 222, 241, 241, 228, 240, 237, 224, 242, 239, 226, 237, 234, 224, 235, 232, 222, 0, 0, 0, 82, 81, 72, 89, 88, 81, 87, 85, 79, 81, 79, 73, 81, 83, 75, 83, 84, 77, 76, 77, 70, 70, 71, 64, 80, 78, 72, 81, 83, 75, 71, 72, 65, 78, 80, 70, 71, 73, 63, 69, 70, 61, 75, 74, 65, 74, 73, 64, 70, 71, 62, 62, 63, 54, 71, 71, 62, 70, 69, 61, 70, 69, 61, 68, 67, 58, 68, 67, 56, 73, 72, 61, 90, 90, 78, 75, 74, 63, 74, 73, 62, 76, 76, 64, 91, 91, 80, 83, 83, 71, 78, 78, 67, 76, 76, 64, 77, 77, 64, 104, 104, 92, 95, 94, 83, 75, 75, 61, 76, 76, 64, 74, 74, 60, 105, 105, 94, 75, 74, 63, 90, 90, 78, 120, 117, 106, 111, 109, 98, 102, 99, 89, 68, 66, 53, 74, 72, 59, 88, 86, 73, 75, 75, 61, 94, 93, 80, 68, 68, 54, 89, 89, 75, 70, 70, 59, 89, 88, 77, 81, 80, 69, 92, 92, 81, 98, 98, 86, 85, 85, 74, 85, 85, 74, 94, 91, 81, 80, 79, 68, 81, 80, 69, 74, 73, 62, 64, 64, 53, 87, 86, 75, 81, 80, 69, 99, 99, 88, 76, 76, 64, 91, 91, 80, 96, 95, 84, 99, 98, 90, 82, 81, 70, 68, 67, 58, 63, 63, 52, 75, 74, 63, 98, 98, 86, 92, 92, 81, 83, 83, 71, 75, 74, 63, 75, 74, 63, 66, 65, 54, 76, 76, 64, 108, 107, 96, 103, 102, 91, 76, 76, 64, 74, 74, 60, 104, 104, 90, 85, 85, 72, 64, 64, 51, 85, 85, 72, 111, 111, 97, 77, 77, 64, 78, 78, 65, 69, 69, 57, 74, 73, 62, 113, 113, 100, 115, 112, 100, 96, 96, 82, 70, 70, 57, 105, 105, 92, 97, 97, 83, 73, 72, 59, 98, 98, 84, 118, 117, 106, 71, 71, 60, 99, 99, 88, 103, 102, 91, 67, 66, 55, 92, 92, 83, 88, 87, 78, 99, 98, 90, 69, 68, 60, 86, 83, 75, 111, 110, 101, 105, 105, 94, 55, 55, 43, 84, 84, 72, 70, 70, 59, 95, 94, 83, 117, 114, 104, 90, 90, 78, 108, 105, 95, 60, 57, 47, 85, 82, 71, 94, 91, 81, 78, 75, 64, 115, 112, 102, 88, 85, 75, 117, 114, 104, 110, 108, 95, 121, 118, 105, 118, 116, 103, 78, 75, 62, 105, 105, 92, 102, 100, 87, 61, 61, 47, 76, 76, 62, 81, 79, 66, 132, 130, 117, 85, 82, 71, 85, 85, 74, 116, 115, 104, 133, 133, 121, 94, 93, 82, 82, 79, 69, 83, 81, 70, 112, 112, 100, 115, 112, 102, 123, 120, 110, 144, 143, 132, 106, 106, 95, 127, 126, 118, 95, 94, 85, 87, 84, 76, 101, 98, 90, 110, 107, 99, 61, 58, 50, 72, 69, 61, 83, 82, 73, 115, 112, 104, 88, 87, 76, 95, 94, 83, 114, 114, 103, 87, 86, 75, 67, 64, 54, 60, 59, 48, 126, 126, 114, 85, 82, 71, 107, 104, 94, 162, 161, 153, 89, 86, 76, 73, 70, 62, 102, 99, 91, 75, 73, 62, 81, 80, 69, 80, 79, 70, 74, 73, 62, 100, 100, 89, 120, 120, 109, 87, 86, 77, 111, 112, 103, 109, 108, 99, 73, 75, 65, 91, 93, 81, 109, 111, 99, 168, 166, 155, 206, 202, 194, 255, 255, 252, 255, 255, 249, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 246, 255, 255, 255, 189, 189, 177, 255, 255, 255, 255, 255, 247, 255, 255, 248, 255, 255, 248, 255, 255, 250, 255, 255, 248, 255, 255, 246, 251, 250, 239, 253, 252, 241, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 254, 242, 255, 255, 248, 215, 214, 203, 0, 0, 0, 244, 243, 232, 227, 224, 213, 230, 227, 217, 231, 231, 217, 234, 234, 223, 239, 238, 227, 238, 238, 224, 241, 241, 230, 241, 241, 230, 244, 244, 230, 254, 251, 240, 240, 237, 224, 44, 41, 31, 88, 87, 78, 81, 80, 71, 83, 82, 75, 82, 81, 74, 80, 78, 72, 80, 78, 72, 83, 84, 77, 84, 83, 77, 70, 69, 63, 72, 73, 66, 73, 74, 67, 69, 70, 63, 72, 73, 66, 68, 67, 58, 71, 72, 65, 73, 74, 67, 77, 76, 68, 69, 68, 60, 73, 72, 63, 76, 75, 66, 70, 69, 61, 71, 71, 62, 75, 74, 65, 84, 84, 72, 75, 74, 63, 74, 73, 62, 80, 79, 68, 73, 72, 61, 82, 81, 70, 87, 86, 75, 71, 71, 60, 90, 90, 78, 75, 74, 63, 100, 100, 89, 103, 102, 91, 105, 105, 94, 73, 70, 60, 99, 96, 85, 91, 91, 78, 102, 101, 90, 96, 95, 84, 106, 106, 95, 113, 113, 102, 104, 104, 92, 66, 64, 51, 89, 87, 74, 100, 97, 84, 89, 87, 74, 64, 61, 48, 75, 73, 60, 85, 85, 72, 74, 74, 60, 91, 88, 77, 88, 87, 76, 68, 67, 56, 81, 80, 69, 76, 76, 64, 68, 67, 56, 97, 96, 85, 88, 87, 76, 94, 93, 82, 61, 60, 49, 87, 86, 75, 99, 96, 85, 76, 76, 64, 94, 93, 84, 81, 80, 69, 63, 62, 54, 53, 52, 43, 75, 74, 65, 102, 101, 92, 102, 99, 91, 89, 86, 78, 100, 100, 91, 102, 101, 90, 87, 86, 75, 81, 80, 69, 85, 85, 74, 105, 105, 94, 71, 71, 60, 68, 67, 56, 98, 98, 86, 108, 107, 96, 70, 70, 59, 68, 67, 56, 62, 62, 48, 74, 74, 60, 78, 78, 65, 76, 76, 64, 116, 116, 102, 108, 107, 96, 95, 94, 83, 117, 116, 105, 77, 77, 65, 91, 91, 80, 125, 125, 111, 115, 112, 100, 89, 89, 75, 87, 87, 73, 70, 70, 57, 76, 76, 62, 102, 102, 88, 123, 123, 109, 85, 85, 74, 71, 71, 60, 69, 69, 57, 64, 64, 55, 76, 75, 66, 110, 107, 99, 78, 74, 66, 75, 72, 64, 56, 55, 47, 100, 97, 89, 87, 86, 77, 87, 84, 74, 57, 57, 46, 75, 74, 63, 110, 109, 98, 106, 105, 97, 90, 90, 78, 112, 112, 100, 91, 91, 80, 88, 87, 76, 78, 78, 67, 92, 92, 81, 109, 108, 97, 70, 67, 56, 94, 93, 82, 102, 102, 88, 99, 99, 88, 96, 94, 81, 75, 73, 60, 110, 109, 98, 121, 118, 105, 107, 104, 92, 86, 83, 70, 113, 110, 97, 108, 105, 93, 120, 117, 104, 102, 102, 88, 90, 90, 76, 81, 81, 67, 120, 120, 107, 98, 98, 84, 66, 65, 54, 124, 123, 112, 132, 131, 120, 91, 91, 80, 96, 95, 84, 116, 115, 104, 108, 107, 96, 75, 72, 64, 71, 67, 60, 86, 83, 75, 114, 111, 103, 77, 73, 65, 66, 63, 55, 78, 74, 66, 147, 144, 136, 104, 103, 94, 75, 74, 63, 95, 94, 83, 71, 71, 60, 103, 102, 91, 103, 102, 91, 108, 105, 95, 91, 91, 80, 74, 73, 62, 81, 80, 69, 92, 92, 81, 123, 122, 113, 67, 64, 56, 113, 110, 99, 87, 84, 74, 99, 96, 85, 138, 137, 126, 84, 84, 72, 74, 73, 62, 102, 101, 90, 107, 109, 99, 76, 77, 68, 108, 110, 100, 67, 69, 60, 113, 115, 103, 119, 121, 109, 28, 25, 14, 255, 255, 255, 255, 255, 252, 255, 255, 247, 255, 255, 247, 255, 255, 247, 255, 255, 248, 255, 255, 248, 255, 255, 247, 255, 255, 247, 255, 255, 248, 255, 255, 248, 255, 255, 249, 255, 255, 249, 255, 255, 251, 255, 255, 248, 255, 255, 247, 241, 238, 225, 237, 235, 222, 248, 245, 234, 255, 253, 240, 242, 239, 228, 255, 255, 252, 255, 255, 247, 45, 42, 32, 42, 42, 31, 226, 226, 215, 228, 225, 215, 236, 233, 223, 236, 235, 224, 240, 240, 228, 241, 241, 230, 246, 246, 232, 242, 242, 229, 244, 243, 232, 246, 246, 232, 229, 228, 217, 101, 96, 86, 80, 79, 70, 88, 87, 78, 83, 84, 77, 78, 79, 72, 79, 80, 73, 78, 79, 72, 74, 76, 68, 80, 81, 74, 73, 74, 67, 71, 72, 65, 77, 78, 71, 78, 79, 72, 72, 73, 66, 67, 69, 60, 67, 69, 60, 71, 71, 62, 71, 71, 62, 71, 71, 62, 73, 72, 63, 69, 68, 60, 71, 71, 62, 72, 69, 61, 68, 67, 58, 75, 74, 65, 71, 71, 60, 77, 74, 63, 77, 76, 68, 91, 90, 82, 66, 65, 54, 64, 64, 53, 70, 70, 59, 77, 77, 65, 84, 84, 72, 74, 73, 62, 74, 73, 62, 78, 75, 64, 103, 100, 90, 80, 79, 68, 83, 83, 69, 115, 112, 100, 57, 54, 43, 99, 96, 85, 83, 83, 71, 111, 110, 99, 101, 98, 88, 70, 67, 56, 66, 64, 51, 95, 93, 80, 74, 72, 59, 96, 94, 81, 103, 101, 88, 91, 88, 75, 83, 81, 70, 75, 73, 62, 92, 92, 81, 92, 92, 81, 83, 83, 71, 99, 99, 88, 94, 93, 82, 95, 94, 83, 80, 79, 68, 83, 81, 70, 81, 80, 69, 97, 96, 87, 96, 95, 86, 97, 96, 85, 74, 73, 64, 84, 83, 75, 77, 76, 68, 85, 85, 76, 102, 101, 92, 66, 65, 56, 89, 88, 79, 71, 68, 57, 102, 101, 90, 83, 83, 71, 98, 98, 86, 95, 94, 83, 84, 84, 72, 84, 83, 75, 103, 102, 91, 82, 81, 70, 57, 57, 48, 108, 107, 96, 70, 70, 59, 54, 53, 42, 91, 91, 78, 78, 78, 65, 63, 63, 50, 90, 90, 76, 106, 106, 95, 99, 99, 88, 91, 91, 80, 105, 105, 94, 104, 104, 92, 80, 79, 68, 46, 45, 34, 117, 115, 102, 106, 106, 93, 73, 72, 59, 106, 106, 93, 77, 77, 64, 92, 92, 79, 87, 87, 73, 68, 68, 54, 123, 122, 111, 140, 140, 128, 45, 42, 32, 92, 88, 80, 103, 100, 92, 74, 71, 63, 104, 101, 93, 81, 78, 70, 71, 68, 57, 66, 63, 55, 91, 91, 80, 106, 106, 95, 120, 120, 109, 83, 83, 71, 73, 72, 61, 87, 86, 75, 108, 107, 96, 75, 74, 63, 105, 105, 94, 111, 110, 99, 70, 70, 59, 89, 89, 75, 119, 119, 107, 123, 123, 109, 109, 109, 95, 89, 89, 75, 73, 71, 58, 76, 76, 62, 119, 119, 107, 80, 79, 66, 131, 131, 117, 113, 110, 97, 108, 105, 93, 95, 93, 80, 117, 115, 102, 111, 109, 96, 68, 68, 54, 113, 113, 102, 82, 81, 70, 90, 90, 78, 112, 112, 100, 92, 92, 81, 135, 135, 124, 65, 62, 52, 96, 95, 84, 110, 109, 100, 103, 102, 91, 135, 131, 124, 149, 148, 140, 79, 76, 68, 123, 120, 112, 96, 93, 85, 93, 90, 82, 88, 85, 77, 107, 104, 96, 66, 65, 56, 90, 89, 80, 106, 106, 95, 66, 65, 54, 110, 107, 97, 85, 82, 71, 114, 111, 100, 96, 95, 84, 78, 75, 64, 69, 69, 57, 61, 59, 48, 83, 81, 70, 87, 84, 74, 66, 63, 53, 89, 86, 76, 81, 78, 68, 60, 57, 47, 96, 95, 84, 117, 116, 105, 108, 107, 98, 100, 100, 89, 114, 116, 106, 90, 91, 82, 80, 82, 72, 53, 55, 46, 97, 99, 86, 28, 24, 18, 255, 255, 255, 255, 255, 249, 255, 255, 248, 255, 255, 248, 255, 255, 249, 255, 255, 252, 255, 255, 251, 255, 255, 253, 255, 255, 252, 255, 255, 253, 255, 255, 251, 255, 255, 252, 255, 255, 251, 255, 255, 249, 255, 255, 242, 249, 246, 233, 186, 181, 169, 189, 184, 172, 207, 204, 192, 223, 221, 208, 255, 255, 244, 255, 255, 254, 254, 254, 242, 2, 1, 0, 255, 255, 252, 225, 225, 213, 233, 233, 222, 232, 232, 220, 242, 242, 231, 248, 248, 237, 247, 247, 235, 251, 250, 239, 246, 245, 234, 251, 250, 239, 232, 232, 220, 46, 45, 34, 70, 70, 59, 88, 87, 78, 78, 80, 70, 83, 84, 77, 78, 79, 72, 79, 80, 73, 74, 76, 68, 73, 74, 67, 77, 78, 71, 69, 70, 63, 76, 77, 70, 72, 73, 66, 73, 72, 63, 69, 68, 60, 71, 71, 62, 69, 68, 60, 64, 64, 55, 69, 68, 60, 69, 69, 57, 70, 69, 61, 67, 66, 57, 67, 66, 57, 69, 68, 60, 69, 68, 60, 74, 71, 61, 68, 67, 58, 71, 71, 62, 77, 76, 68, 80, 79, 70, 82, 81, 70, 78, 78, 67, 100, 100, 89, 89, 88, 77, 71, 71, 60, 78, 78, 67, 96, 94, 81, 82, 80, 67, 87, 85, 72, 77, 74, 61, 73, 72, 61, 85, 82, 71, 80, 79, 68, 85, 85, 74, 85, 85, 74, 80, 77, 67, 77, 77, 65, 89, 86, 76, 84, 84, 72, 95, 95, 81, 102, 100, 87, 82, 82, 68, 71, 68, 55, 80, 79, 66, 95, 94, 83, 73, 72, 61, 74, 73, 62, 91, 91, 80, 83, 81, 70, 96, 95, 84, 81, 80, 69, 82, 81, 70, 104, 104, 92, 84, 84, 72, 84, 84, 72, 80, 79, 70, 94, 93, 84, 83, 82, 73, 97, 96, 87, 94, 93, 84, 99, 98, 90, 99, 98, 90, 63, 62, 54, 98, 97, 89, 73, 72, 61, 99, 96, 85, 68, 67, 58, 85, 85, 74, 109, 108, 97, 68, 67, 56, 110, 109, 100, 97, 96, 87, 66, 65, 56, 76, 75, 66, 88, 87, 76, 85, 85, 74, 99, 99, 88, 109, 108, 97, 63, 63, 50, 71, 71, 58, 110, 107, 97, 91, 91, 80, 116, 116, 102, 62, 62, 50, 97, 96, 85, 100, 100, 89, 80, 79, 68, 71, 71, 60, 81, 80, 69, 116, 115, 104, 121, 121, 108, 74, 72, 59, 70, 70, 57, 71, 71, 58, 88, 88, 74, 59, 58, 47, 88, 87, 76, 110, 109, 98, 77, 74, 63, 80, 79, 70, 102, 99, 89, 83, 81, 70, 95, 94, 85, 77, 74, 63, 67, 64, 56, 63, 63, 52, 57, 57, 48, 104, 102, 91, 75, 74, 63, 55, 55, 43, 63, 60, 49, 92, 89, 78, 68, 67, 56, 91, 91, 80, 90, 90, 78, 77, 77, 65, 91, 91, 80, 77, 77, 64, 114, 114, 101, 70, 70, 59, 110, 109, 98, 64, 64, 53, 98, 98, 86, 109, 109, 95, 83, 83, 69, 97, 97, 83, 106, 106, 93, 71, 68, 55, 97, 95, 82, 80, 78, 65, 123, 121, 108, 101, 98, 86, 137, 135, 122, 123, 123, 109, 90, 90, 76, 80, 79, 66, 99, 99, 86, 123, 120, 110, 63, 63, 52, 129, 126, 116, 139, 136, 126, 109, 108, 97, 82, 81, 72, 121, 118, 107, 124, 121, 113, 95, 92, 84, 156, 153, 142, 96, 93, 85, 113, 109, 101, 92, 88, 80, 103, 100, 92, 71, 71, 62, 88, 87, 78, 77, 76, 68, 78, 74, 66, 92, 92, 81, 114, 114, 103, 116, 115, 104, 128, 125, 114, 83, 81, 70, 65, 62, 52, 111, 109, 98, 121, 118, 107, 77, 74, 63, 138, 135, 125, 123, 120, 110, 102, 99, 89, 139, 136, 126, 118, 116, 105, 85, 82, 71, 97, 96, 85, 105, 105, 94, 91, 91, 80, 78, 78, 67, 77, 77, 65, 71, 73, 61, 102, 101, 90, 82, 81, 72, 14, 14, 3, 254, 254, 242, 234, 234, 223, 241, 241, 230, 239, 238, 227, 234, 234, 223, 242, 242, 231, 245, 244, 233, 244, 243, 232, 241, 241, 230, 246, 245, 234, 246, 245, 234, 251, 251, 237, 252, 252, 238, 252, 252, 238, 233, 233, 220, 191, 188, 175, 156, 151, 139, 213, 208, 196, 177, 172, 160, 252, 250, 237, 255, 255, 251, 248, 248, 237, 160, 159, 148, 78, 78, 67, 231, 230, 219, 234, 234, 223, 231, 230, 219, 243, 240, 230, 246, 245, 234, 248, 248, 237, 247, 247, 235, 252, 251, 240, 253, 252, 241, 247, 247, 235, 40, 39, 28, 84, 84, 72, 81, 83, 71, 80, 82, 72, 86, 88, 78, 76, 77, 68, 77, 78, 71, 77, 78, 71, 72, 73, 66, 76, 77, 70, 74, 76, 68, 67, 69, 62, 71, 72, 65, 72, 74, 64, 71, 73, 63, 66, 65, 56, 67, 69, 57, 70, 69, 61, 69, 68, 60, 69, 68, 60, 70, 70, 59, 74, 73, 64, 70, 70, 59, 71, 71, 62, 67, 66, 57, 68, 67, 58, 72, 69, 61, 79, 76, 68, 75, 74, 65, 73, 72, 63, 83, 82, 73, 77, 76, 68, 84, 84, 72, 90, 90, 78, 82, 81, 70, 73, 72, 61, 88, 87, 76, 91, 91, 78, 74, 72, 59, 80, 77, 67, 92, 89, 76, 83, 81, 70, 67, 66, 55, 107, 104, 94, 87, 84, 74, 78, 78, 67, 88, 85, 75, 67, 64, 54, 106, 103, 92, 97, 96, 85, 81, 78, 68, 74, 71, 61, 87, 85, 72, 81, 78, 68, 92, 92, 81, 88, 85, 75, 71, 68, 57, 92, 92, 81, 63, 63, 52, 94, 93, 82, 68, 67, 56, 70, 70, 59, 91, 91, 80, 73, 72, 61, 66, 65, 54, 78, 78, 67, 85, 85, 74, 81, 80, 71, 80, 79, 68, 98, 97, 89, 92, 92, 83, 96, 95, 86, 78, 78, 69, 81, 80, 71, 91, 90, 82, 80, 79, 70, 78, 78, 69, 96, 95, 86, 91, 91, 80, 84, 84, 72, 78, 78, 69, 108, 107, 96, 78, 78, 69, 68, 67, 58, 92, 89, 78, 81, 80, 69, 85, 85, 74, 93, 90, 80, 75, 73, 62, 96, 94, 81, 102, 101, 90, 64, 61, 50, 65, 62, 52, 83, 83, 71, 82, 81, 70, 87, 86, 75, 102, 101, 90, 121, 121, 110, 96, 95, 84, 95, 94, 83, 73, 72, 61, 116, 115, 104, 92, 92, 79, 81, 81, 67, 64, 64, 51, 108, 107, 94, 88, 88, 74, 91, 91, 80, 97, 96, 85, 106, 103, 92, 121, 118, 107, 114, 111, 100, 108, 107, 96, 103, 100, 90, 67, 64, 54, 66, 63, 55, 81, 78, 68, 92, 89, 78, 77, 74, 63, 86, 83, 72, 91, 91, 80, 113, 110, 99, 72, 69, 59, 81, 80, 69, 68, 67, 56, 113, 113, 102, 104, 104, 92, 74, 73, 62, 91, 91, 78, 119, 119, 105, 70, 70, 59, 88, 87, 76, 88, 87, 76, 126, 126, 114, 105, 105, 94, 119, 119, 107, 67, 66, 55, 116, 115, 104, 115, 112, 100, 63, 60, 47, 72, 69, 57, 129, 126, 114, 150, 147, 136, 75, 73, 60, 121, 118, 105, 94, 93, 82, 94, 91, 81, 96, 93, 83, 115, 112, 102, 116, 113, 103, 103, 100, 90, 120, 117, 106, 67, 64, 54, 144, 143, 132, 134, 134, 123, 110, 107, 99, 91, 88, 77, 142, 138, 130, 88, 85, 77, 116, 113, 105, 95, 92, 84, 70, 66, 58, 104, 103, 94, 118, 117, 108, 88, 87, 76, 121, 121, 112, 130, 129, 120, 74, 73, 62, 81, 78, 70, 147, 147, 135, 78, 78, 67, 155, 155, 144, 62, 62, 50, 110, 109, 98, 135, 135, 124, 95, 92, 82, 137, 135, 122, 85, 85, 74, 122, 119, 109, 110, 107, 97, 97, 95, 84, 113, 113, 102, 131, 130, 119, 75, 74, 63, 102, 101, 90, 75, 74, 63, 127, 126, 118, 124, 123, 114, 105, 104, 96, 68, 68, 54, 246, 245, 234, 223, 222, 211, 218, 218, 206, 220, 220, 209, 213, 213, 202, 212, 212, 201, 211, 211, 199, 212, 212, 201, 208, 207, 196, 208, 207, 196, 209, 209, 195, 203, 202, 191, 202, 202, 188, 216, 214, 201, 194, 192, 179, 178, 175, 162, 171, 168, 156, 188, 186, 173, 255, 255, 244, 255, 255, 249, 255, 255, 254, 239, 238, 227, 0, 0, 0, 247, 247, 235, 237, 236, 225, 238, 237, 226, 239, 238, 227, 247, 247, 235, 248, 248, 237, 246, 245, 234, 255, 255, 245, 255, 255, 246, 244, 243, 232, 147, 146, 137, 74, 73, 64, 87, 86, 77, 75, 74, 65, 81, 83, 73, 76, 77, 68, 79, 81, 71, 74, 76, 68, 80, 81, 74, 78, 79, 72, 70, 71, 64, 79, 80, 73, 72, 73, 66, 69, 70, 63, 74, 76, 68, 67, 69, 60, 68, 67, 58, 64, 64, 55, 68, 67, 58, 71, 71, 62, 60, 59, 50, 66, 65, 54, 72, 69, 61, 69, 69, 57, 62, 62, 50, 74, 73, 64, 70, 70, 59, 72, 69, 61, 61, 60, 49, 73, 72, 63, 71, 71, 62, 71, 67, 60, 70, 69, 61, 80, 79, 68, 83, 81, 70, 66, 65, 54, 106, 103, 92, 80, 78, 65, 64, 64, 51, 81, 79, 66, 104, 102, 91, 94, 93, 80, 62, 62, 50, 108, 105, 95, 97, 96, 85, 82, 81, 70, 77, 77, 65, 61, 60, 49, 100, 100, 89, 85, 85, 74, 96, 95, 84, 95, 94, 83, 89, 88, 77, 78, 78, 67, 84, 84, 72, 84, 84, 72, 75, 74, 63, 81, 80, 69, 80, 79, 68, 74, 73, 62, 104, 104, 92, 85, 85, 74, 87, 86, 75, 90, 90, 78, 88, 87, 76, 77, 77, 65, 88, 87, 76, 100, 100, 89, 94, 93, 82, 92, 92, 83, 82, 81, 72, 92, 92, 83, 87, 86, 77, 64, 64, 55, 88, 87, 78, 85, 85, 74, 89, 88, 77, 89, 88, 77, 100, 100, 89, 82, 81, 70, 82, 81, 70, 63, 63, 52, 87, 86, 77, 90, 89, 80, 62, 62, 50, 66, 65, 54, 85, 85, 74, 90, 90, 78, 127, 127, 116, 111, 110, 99, 69, 69, 57, 69, 69, 57, 75, 74, 63, 95, 94, 83, 65, 62, 52, 55, 55, 43, 64, 64, 53, 76, 76, 64, 134, 134, 123, 99, 99, 88, 73, 72, 61, 60, 59, 48, 82, 82, 68, 67, 67, 53, 85, 82, 71, 102, 102, 88, 115, 112, 100, 102, 101, 90, 81, 80, 69, 120, 120, 109, 60, 59, 48, 124, 123, 112, 109, 106, 96, 79, 76, 65, 100, 97, 86, 97, 95, 84, 73, 72, 61, 57, 57, 46, 124, 121, 111, 86, 83, 72, 104, 102, 91, 109, 108, 97, 72, 69, 59, 57, 54, 43, 96, 93, 83, 73, 72, 61, 112, 112, 100, 88, 87, 76, 82, 81, 70, 103, 103, 89, 91, 91, 80, 114, 114, 103, 71, 71, 60, 111, 109, 98, 80, 79, 68, 78, 78, 67, 71, 71, 60, 77, 77, 65, 123, 121, 108, 92, 89, 76, 75, 73, 60, 92, 89, 76, 87, 85, 72, 80, 78, 65, 120, 117, 106, 103, 100, 90, 122, 119, 109, 92, 89, 78, 132, 129, 119, 136, 133, 123, 115, 112, 102, 101, 98, 88, 109, 106, 96, 116, 113, 103, 82, 79, 69, 80, 77, 69, 82, 79, 69, 73, 70, 60, 117, 114, 106, 109, 106, 96, 75, 72, 64, 106, 103, 92, 64, 64, 53, 104, 104, 92, 94, 93, 84, 124, 121, 113, 82, 81, 72, 73, 72, 61, 125, 125, 113, 74, 73, 62, 114, 114, 103, 131, 130, 119, 130, 127, 117, 149, 149, 136, 124, 123, 112, 149, 146, 135, 87, 84, 74, 67, 65, 52, 89, 89, 75, 88, 85, 75, 79, 76, 64, 116, 113, 103, 85, 82, 71, 87, 86, 75, 73, 72, 61, 92, 92, 81, 98, 98, 86, 124, 123, 112, 106, 106, 95, 114, 114, 105, 118, 118, 104, 255, 255, 255, 255, 255, 245, 252, 251, 240, 255, 255, 244, 249, 249, 238, 255, 255, 245, 255, 255, 248, 255, 255, 246, 255, 255, 249, 255, 255, 249, 255, 255, 245, 241, 241, 228, 255, 255, 245, 255, 255, 246, 255, 255, 243, 255, 255, 243, 255, 255, 249, 255, 255, 244, 252, 251, 240, 255, 255, 251, 254, 254, 242, 24, 23, 12, 255, 255, 255, 240, 240, 228, 241, 241, 230, 244, 243, 232, 246, 245, 234, 251, 250, 239, 245, 244, 233, 254, 254, 242, 255, 255, 244, 255, 255, 244, 219, 219, 207, 87, 86, 77, 84, 83, 75, 87, 86, 77, 81, 83, 73, 80, 82, 72, 78, 80, 70, 76, 77, 68, 74, 76, 68, 74, 76, 68, 77, 78, 71, 69, 70, 63, 59, 61, 53, 66, 67, 60, 67, 69, 62, 75, 74, 65, 66, 68, 58, 63, 64, 55, 68, 67, 58, 68, 67, 58, 57, 57, 48, 69, 68, 60, 68, 66, 55, 67, 66, 55, 68, 67, 58, 64, 64, 53, 77, 77, 65, 60, 59, 48, 73, 72, 61, 67, 66, 57, 66, 65, 56, 60, 59, 48, 74, 73, 64, 74, 73, 64, 69, 69, 57, 76, 76, 64, 86, 83, 72, 95, 94, 83, 74, 71, 61, 60, 58, 45, 81, 78, 68, 83, 81, 68, 90, 90, 78, 85, 85, 74, 89, 88, 77, 68, 66, 55, 84, 84, 72, 98, 98, 86, 57, 57, 46, 93, 90, 80, 100, 97, 86, 73, 72, 61, 80, 79, 68, 89, 88, 77, 81, 80, 69, 94, 93, 82, 67, 66, 55, 90, 90, 78, 83, 83, 71, 66, 65, 54, 90, 90, 78, 100, 100, 89, 80, 79, 68, 91, 91, 80, 78, 78, 67, 89, 88, 77, 94, 93, 82, 82, 81, 70, 68, 67, 56, 95, 94, 83, 68, 67, 56, 78, 78, 67, 87, 86, 75, 92, 92, 81, 94, 93, 82, 91, 91, 80, 106, 106, 95, 74, 73, 62, 97, 96, 87, 76, 76, 64, 71, 71, 60, 70, 70, 59, 104, 104, 92, 68, 67, 56, 63, 63, 52, 89, 88, 77, 62, 62, 50, 98, 98, 86, 110, 107, 97, 109, 108, 97, 104, 102, 91, 96, 94, 81, 78, 75, 64, 92, 89, 78, 106, 103, 92, 102, 101, 90, 63, 63, 52, 100, 100, 89, 113, 113, 102, 76, 76, 64, 90, 90, 78, 54, 53, 42, 88, 87, 76, 96, 95, 84, 83, 81, 68, 66, 64, 51, 137, 134, 124, 67, 66, 55, 66, 65, 52, 100, 97, 86, 106, 106, 95, 131, 130, 119, 117, 116, 105, 121, 121, 110, 69, 69, 57, 121, 121, 110, 120, 120, 109, 113, 113, 102, 108, 107, 96, 83, 83, 71, 80, 79, 68, 64, 64, 53, 65, 62, 52, 50, 47, 39, 97, 95, 84, 53, 50, 40, 75, 73, 62, 85, 85, 74, 52, 51, 40, 106, 106, 95, 70, 70, 59, 111, 109, 96, 80, 77, 67, 89, 88, 77, 121, 118, 107, 57, 54, 43, 82, 81, 70, 127, 127, 116, 75, 74, 63, 67, 65, 52, 116, 114, 101, 85, 82, 69, 103, 101, 88, 99, 96, 83, 65, 62, 52, 93, 90, 80, 85, 82, 71, 121, 118, 107, 127, 124, 113, 97, 95, 84, 70, 67, 56, 129, 126, 116, 85, 82, 71, 134, 131, 120, 92, 89, 78, 91, 88, 77, 101, 98, 88, 96, 93, 83, 122, 119, 111, 75, 73, 62, 80, 77, 67, 79, 76, 65, 68, 65, 57, 102, 99, 91, 118, 117, 108, 113, 112, 104, 81, 80, 71, 81, 80, 71, 102, 101, 92, 160, 159, 148, 97, 96, 85, 105, 105, 94, 83, 81, 70, 89, 86, 76, 104, 104, 92, 59, 56, 46, 87, 85, 72, 94, 91, 81, 60, 58, 45, 118, 117, 106, 75, 73, 62, 125, 123, 112, 78, 75, 64, 88, 85, 75, 125, 125, 113, 158, 157, 146, 85, 85, 74, 85, 85, 74, 123, 122, 111, 119, 119, 107, 117, 116, 105, 255, 255, 232, 255, 255, 250, 255, 255, 245, 255, 255, 245, 255, 255, 247, 255, 255, 244, 255, 255, 244, 253, 252, 243, 254, 254, 242, 253, 252, 241, 252, 251, 240, 252, 251, 240, 255, 255, 248, 255, 255, 247, 255, 255, 244, 255, 255, 244, 255, 255, 245, 255, 255, 247, 255, 255, 248, 253, 252, 243, 171, 171, 162, 0, 0, 0, 255, 255, 254, 231, 230, 219, 242, 242, 231, 244, 243, 232, 247, 247, 235, 249, 249, 238, 253, 252, 241, 254, 254, 242, 255, 255, 245, 255, 255, 248, 255, 255, 245, 35, 34, 26, 84, 83, 75, 85, 85, 76, 85, 85, 76, 91, 90, 82, 81, 83, 73, 80, 82, 72, 74, 76, 66, 72, 73, 66, 70, 71, 62, 71, 72, 65, 65, 66, 59, 67, 69, 60, 58, 60, 50, 65, 67, 57, 64, 64, 55, 65, 67, 57, 67, 66, 55, 64, 64, 53, 67, 64, 56, 61, 60, 51, 61, 60, 51, 63, 63, 52, 68, 65, 57, 59, 58, 49, 63, 63, 52, 73, 70, 60, 75, 74, 63, 74, 73, 62, 68, 67, 56, 63, 63, 52, 68, 67, 56, 69, 69, 57, 82, 81, 70, 82, 81, 70, 78, 78, 67, 77, 74, 63, 89, 86, 76, 74, 71, 61, 91, 88, 77, 79, 76, 65, 67, 64, 54, 60, 57, 47, 98, 98, 86, 78, 75, 64, 70, 67, 56, 103, 102, 91, 77, 77, 65, 76, 76, 64, 90, 90, 78, 70, 67, 56, 78, 78, 67, 89, 88, 79, 73, 72, 61, 87, 86, 77, 77, 77, 65, 74, 73, 64, 92, 92, 83, 87, 86, 75, 78, 78, 67, 89, 88, 77, 85, 85, 74, 83, 83, 71, 78, 78, 67, 100, 100, 89, 99, 99, 88, 60, 59, 48, 91, 91, 80, 84, 84, 72, 100, 100, 89, 84, 84, 72, 91, 91, 80, 87, 86, 75, 96, 95, 84, 94, 93, 82, 74, 73, 62, 82, 81, 70, 68, 67, 56, 69, 69, 57, 83, 83, 71, 63, 63, 52, 95, 94, 83, 87, 86, 75, 98, 98, 86, 49, 51, 39, 105, 105, 94, 70, 70, 59, 97, 96, 85, 106, 103, 92, 97, 96, 85, 79, 76, 65, 74, 72, 59, 96, 95, 84, 105, 105, 94, 92, 92, 81, 72, 69, 59, 108, 107, 96, 114, 114, 103, 76, 76, 64, 63, 63, 52, 78, 78, 67, 105, 105, 94, 140, 140, 128, 97, 96, 85, 107, 104, 94, 55, 55, 41, 110, 107, 97, 80, 77, 67, 92, 92, 79, 113, 110, 99, 68, 66, 55, 117, 116, 105, 99, 99, 88, 117, 116, 105, 82, 79, 69, 105, 105, 94, 66, 65, 54, 84, 84, 72, 71, 71, 60, 61, 60, 49, 66, 63, 53, 103, 102, 91, 66, 63, 53, 77, 73, 65, 121, 118, 107, 68, 66, 55, 60, 57, 47, 102, 99, 89, 88, 85, 75, 114, 111, 100, 84, 84, 72, 71, 68, 57, 110, 107, 97, 100, 97, 86, 97, 95, 84, 80, 77, 67, 66, 65, 54, 116, 115, 104, 64, 64, 53, 90, 90, 78, 73, 71, 58, 97, 95, 84, 103, 100, 90, 103, 101, 88, 82, 79, 69, 104, 102, 91, 109, 106, 96, 123, 120, 110, 120, 117, 106, 99, 96, 85, 79, 76, 65, 107, 104, 94, 143, 140, 129, 79, 76, 65, 79, 76, 65, 102, 99, 89, 93, 90, 80, 72, 69, 59, 92, 88, 80, 78, 78, 67, 81, 78, 68, 92, 92, 81, 106, 102, 94, 106, 102, 94, 103, 102, 93, 127, 126, 118, 74, 71, 61, 97, 96, 85, 134, 134, 123, 138, 137, 126, 151, 150, 139, 69, 69, 57, 97, 96, 85, 123, 122, 111, 71, 71, 60, 120, 117, 106, 67, 64, 54, 117, 115, 102, 155, 152, 139, 89, 88, 77, 95, 92, 82, 131, 130, 119, 89, 86, 76, 83, 83, 71, 99, 98, 90, 107, 104, 94, 95, 94, 83, 116, 115, 104, 92, 92, 81, 76, 76, 64, 114, 114, 103, 5, 4, 0, 185, 184, 178, 216, 215, 206, 225, 224, 216, 248, 248, 239, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 225, 224, 216, 53, 52, 43, 0, 0, 0, 206, 206, 197, 227, 227, 218, 242, 242, 231, 240, 240, 228, 246, 245, 234, 247, 247, 235, 248, 248, 237, 251, 250, 239, 254, 254, 242, 255, 255, 245, 255, 255, 248, 255, 255, 247, 190, 190, 179, 80, 79, 70, 81, 80, 71, 78, 78, 69, 92, 92, 83, 76, 77, 70, 82, 81, 74, 74, 76, 68, 76, 77, 68, 69, 70, 61, 71, 73, 63, 74, 76, 66, 59, 61, 53, 60, 62, 54, 60, 62, 52, 63, 64, 55, 67, 69, 60, 63, 64, 55, 61, 60, 51, 60, 62, 50, 59, 58, 47, 56, 55, 47, 66, 65, 56, 63, 63, 52, 57, 57, 48, 66, 65, 56, 69, 69, 57, 66, 65, 56, 61, 60, 49, 56, 56, 45, 56, 56, 45, 62, 62, 50, 64, 64, 53, 68, 67, 56, 69, 69, 57, 75, 74, 63, 87, 86, 75, 62, 62, 50, 82, 79, 69, 91, 88, 77, 87, 84, 74, 74, 71, 61, 80, 77, 67, 93, 90, 78, 81, 80, 69, 77, 77, 65, 85, 85, 74, 99, 99, 88, 74, 73, 62, 81, 80, 69, 84, 84, 72, 90, 90, 78, 82, 81, 70, 82, 81, 70, 68, 67, 56, 82, 81, 70, 81, 80, 69, 74, 73, 62, 69, 69, 57, 78, 78, 67, 81, 80, 69, 81, 80, 69, 76, 76, 64, 85, 85, 74, 89, 88, 77, 73, 72, 61, 80, 79, 68, 91, 91, 80, 91, 91, 80, 89, 88, 77, 69, 69, 57, 88, 87, 76, 83, 83, 71, 105, 104, 96, 82, 81, 70, 80, 79, 68, 90, 90, 78, 104, 104, 92, 95, 97, 85, 99, 99, 88, 69, 69, 57, 121, 121, 110, 94, 93, 82, 91, 91, 80, 97, 96, 85, 71, 71, 60, 67, 66, 55, 71, 71, 60, 88, 87, 76, 67, 66, 55, 49, 49, 38, 102, 101, 90, 72, 69, 59, 71, 68, 57, 125, 125, 113, 103, 102, 91, 76, 76, 64, 124, 123, 112, 89, 88, 77, 69, 69, 57, 90, 90, 78, 90, 90, 78, 103, 102, 91, 91, 91, 80, 99, 96, 85, 97, 95, 84, 103, 100, 90, 78, 75, 64, 74, 71, 61, 83, 83, 71, 74, 71, 61, 66, 65, 54, 96, 95, 84, 64, 64, 53, 112, 112, 100, 56, 53, 42, 113, 110, 99, 67, 66, 55, 64, 64, 53, 63, 60, 49, 78, 78, 67, 113, 110, 99, 71, 68, 57, 141, 141, 129, 92, 92, 83, 88, 87, 78, 102, 99, 89, 129, 126, 116, 104, 102, 91, 86, 83, 72, 106, 103, 92, 78, 78, 67, 104, 102, 91, 79, 76, 64, 111, 109, 98, 104, 102, 91, 95, 93, 80, 123, 120, 110, 96, 95, 84, 63, 63, 52, 122, 119, 109, 124, 121, 111, 115, 112, 102, 75, 74, 63, 125, 125, 113, 104, 102, 91, 139, 136, 126, 139, 136, 126, 130, 127, 117, 127, 124, 113, 80, 77, 67, 99, 96, 85, 93, 90, 80, 97, 95, 84, 101, 98, 88, 137, 134, 124, 83, 81, 70, 95, 93, 80, 97, 95, 84, 69, 69, 57, 98, 98, 86, 139, 136, 126, 119, 119, 107, 125, 123, 112, 160, 158, 147, 124, 123, 112, 99, 95, 87, 63, 63, 52, 131, 130, 119, 132, 131, 120, 95, 94, 83, 66, 63, 53, 103, 102, 91, 116, 115, 104, 116, 113, 103, 127, 124, 113, 78, 75, 64, 143, 140, 128, 72, 69, 57, 78, 75, 62, 131, 128, 118, 120, 117, 106, 126, 126, 114, 53, 50, 40, 153, 152, 141, 126, 126, 114, 120, 120, 109, 131, 130, 119, 90, 90, 78, 68, 67, 56, 78, 78, 65, 81, 81, 67, 222, 221, 210, 236, 235, 224, 223, 222, 211, 234, 234, 223, 241, 238, 227, 229, 226, 218, 237, 234, 224, 221, 218, 210, 205, 204, 196, 194, 191, 183, 192, 189, 181, 178, 175, 167, 121, 118, 110, 127, 123, 115, 98, 97, 89, 108, 107, 98, 181, 180, 171, 242, 242, 233, 238, 237, 228, 220, 220, 211, 234, 234, 225, 236, 235, 226, 244, 243, 232, 245, 244, 233, 247, 247, 235, 244, 243, 232, 255, 255, 245, 255, 255, 245, 255, 255, 246, 255, 255, 252, 255, 255, 248, 255, 255, 245, 19, 19, 7, 94, 93, 84, 85, 85, 76, 84, 83, 75, 89, 88, 79, 86, 88, 78, 81, 80, 71, 76, 77, 68, 73, 75, 65, 67, 69, 60, 78, 80, 70, 73, 72, 63, 69, 68, 60, 70, 71, 62, 66, 65, 56, 63, 62, 54, 59, 61, 51, 63, 64, 55, 61, 60, 49, 64, 64, 53, 61, 60, 49, 63, 63, 52, 62, 62, 50, 66, 65, 56, 61, 60, 51, 64, 64, 55, 62, 61, 52, 64, 64, 55, 61, 60, 49, 66, 65, 54, 66, 65, 54, 56, 56, 45, 67, 66, 55, 62, 62, 50, 73, 72, 61, 50, 50, 39, 83, 83, 71, 66, 65, 54, 66, 65, 54, 74, 73, 62, 92, 92, 81, 94, 91, 81, 92, 89, 78, 75, 73, 62, 66, 65, 54, 78, 78, 67, 66, 65, 54, 108, 107, 96, 90, 90, 78, 64, 64, 53, 68, 67, 56, 73, 72, 61, 95, 94, 83, 77, 77, 65, 67, 66, 55, 102, 101, 90, 85, 85, 74, 62, 62, 50, 78, 78, 67, 80, 79, 68, 91, 91, 80, 73, 72, 61, 77, 77, 65, 88, 87, 76, 80, 79, 68, 76, 76, 64, 63, 63, 52, 88, 87, 76, 89, 88, 77, 63, 63, 52, 94, 93, 82, 99, 99, 88, 89, 88, 77, 70, 70, 59, 81, 80, 69, 97, 96, 85, 68, 67, 56, 103, 102, 91, 99, 99, 88, 94, 93, 82, 67, 66, 55, 90, 90, 78, 61, 60, 49, 106, 106, 95, 78, 78, 67, 100, 100, 89, 69, 69, 57, 84, 84, 72, 63, 63, 52, 117, 116, 105, 106, 106, 95, 111, 110, 99, 100, 100, 89, 84, 84, 72, 113, 113, 102, 125, 125, 111, 112, 112, 100, 128, 128, 117, 114, 114, 103, 91, 91, 80, 108, 107, 96, 120, 120, 109, 86, 83, 72, 69, 69, 57, 49, 46, 35, 78, 75, 64, 113, 110, 99, 99, 96, 85, 111, 109, 98, 88, 87, 76, 95, 92, 82, 92, 92, 81, 118, 117, 106, 80, 79, 68, 98, 98, 86, 119, 119, 107, 75, 74, 63, 56, 56, 45, 77, 77, 65, 85, 85, 74, 116, 115, 104, 67, 66, 55, 83, 83, 71, 116, 115, 104, 110, 109, 98, 109, 106, 96, 101, 98, 88, 113, 110, 99, 120, 117, 106, 72, 69, 59, 65, 62, 52, 99, 96, 85, 135, 132, 121, 101, 98, 88, 111, 109, 98, 94, 91, 81, 79, 76, 65, 68, 67, 56, 60, 59, 48, 117, 116, 105, 114, 114, 103, 82, 81, 70, 91, 88, 77, 75, 73, 62, 81, 78, 68, 64, 61, 50, 92, 89, 78, 101, 98, 88, 78, 75, 64, 122, 119, 109, 79, 76, 65, 103, 100, 90, 130, 127, 117, 120, 117, 106, 66, 63, 53, 82, 79, 69, 132, 129, 119, 109, 106, 96, 74, 73, 62, 138, 137, 126, 114, 114, 103, 78, 78, 67, 97, 96, 85, 80, 79, 70, 70, 69, 61, 85, 85, 74, 67, 66, 55, 82, 81, 70, 160, 159, 148, 135, 135, 124, 81, 80, 69, 71, 71, 60, 138, 137, 126, 95, 92, 82, 83, 81, 70, 116, 114, 101, 128, 125, 112, 125, 123, 110, 61, 59, 46, 132, 130, 117, 75, 73, 60, 97, 95, 84, 153, 151, 140, 151, 148, 138, 80, 79, 68, 76, 75, 66, 120, 119, 111, 67, 66, 55, 92, 92, 81, 105, 105, 94, 134, 134, 123, 62, 64, 50, 222, 219, 209, 219, 216, 205, 230, 227, 217, 229, 226, 216, 229, 226, 218, 231, 228, 220, 229, 226, 218, 229, 226, 218, 233, 229, 221, 233, 229, 221, 230, 227, 219, 233, 229, 221, 229, 226, 218, 231, 228, 220, 231, 228, 220, 233, 229, 221, 231, 228, 220, 235, 232, 224, 233, 232, 224, 236, 235, 226, 236, 233, 225, 239, 238, 229, 242, 242, 231, 244, 243, 232, 249, 249, 238, 252, 251, 240, 254, 254, 242, 255, 255, 251, 255, 255, 249, 255, 255, 251, 255, 255, 244, 162, 162, 151, 74, 73, 64, 80, 82, 72, 84, 85, 76, 88, 90, 80, 80, 82, 72, 73, 75, 65, 78, 80, 70, 73, 75, 65, 71, 70, 64, 65, 67, 57, 63, 62, 54, 65, 67, 57, 63, 62, 56, 70, 69, 61, 56, 57, 48, 59, 58, 49, 60, 59, 48, 59, 61, 49, 63, 63, 52, 59, 58, 47, 63, 63, 52, 54, 53, 42, 68, 67, 56, 53, 52, 41, 60, 59, 50, 63, 62, 54, 62, 61, 52, 61, 60, 49, 67, 66, 55, 64, 64, 53, 67, 66, 55, 64, 64, 53, 66, 65, 54, 70, 70, 59, 62, 62, 50, 59, 58, 47, 78, 78, 67, 63, 63, 52, 73, 72, 61, 80, 79, 68, 95, 94, 83, 66, 65, 54, 90, 90, 78, 56, 56, 45, 94, 93, 82, 83, 83, 71, 99, 99, 88, 71, 71, 60, 96, 95, 84, 77, 77, 65, 67, 66, 55, 90, 90, 78, 71, 71, 60, 73, 72, 63, 89, 88, 79, 84, 83, 75, 94, 93, 82, 74, 73, 62, 71, 71, 60, 73, 72, 61, 82, 81, 70, 66, 65, 54, 91, 91, 80, 78, 78, 67, 75, 74, 63, 78, 78, 67, 83, 83, 71, 74, 73, 62, 75, 74, 63, 68, 67, 56, 71, 71, 60, 89, 88, 77, 80, 79, 68, 104, 104, 92, 77, 77, 65, 64, 64, 53, 102, 101, 90, 75, 74, 63, 61, 60, 49, 95, 94, 83, 96, 95, 84, 61, 60, 49, 123, 122, 111, 68, 67, 56, 71, 73, 61, 103, 102, 91, 116, 115, 104, 70, 70, 59, 111, 110, 99, 63, 65, 53, 98, 98, 86, 61, 60, 49, 85, 85, 74, 104, 104, 90, 109, 108, 97, 120, 120, 109, 70, 70, 59, 61, 60, 49, 75, 74, 63, 116, 115, 104, 106, 106, 95, 67, 66, 55, 111, 110, 99, 113, 113, 102, 109, 106, 96, 96, 93, 83, 82, 79, 69, 130, 127, 117, 111, 109, 98, 67, 64, 54, 133, 133, 121, 87, 86, 75, 70, 70, 59, 99, 99, 88, 118, 117, 106, 117, 116, 105, 52, 51, 40, 100, 100, 89, 83, 83, 71, 62, 62, 50, 83, 83, 71, 124, 123, 112, 79, 76, 65, 119, 119, 107, 102, 101, 90, 99, 96, 85, 72, 69, 59, 107, 104, 94, 110, 107, 97, 134, 131, 120, 60, 59, 48, 106, 106, 95, 68, 67, 56, 116, 115, 104, 56, 56, 45, 83, 83, 71, 64, 64, 53, 104, 104, 92, 137, 136, 125, 95, 94, 83, 114, 114, 103, 74, 73, 62, 87, 84, 74, 77, 74, 63, 88, 85, 75, 133, 133, 121, 93, 90, 80, 92, 89, 78, 144, 141, 131, 145, 142, 132, 70, 67, 56, 117, 114, 104, 131, 129, 116, 131, 128, 118, 59, 56, 46, 79, 76, 65, 117, 114, 104, 117, 114, 104, 103, 102, 91, 90, 90, 78, 68, 67, 56, 85, 85, 74, 149, 149, 138, 107, 104, 94, 124, 121, 111, 102, 101, 90, 80, 79, 68, 130, 129, 118, 139, 138, 127, 83, 81, 70, 82, 79, 69, 136, 133, 123, 65, 62, 52, 135, 132, 121, 95, 92, 82, 72, 69, 57, 97, 95, 82, 71, 68, 55, 87, 82, 70, 111, 109, 96, 78, 75, 64, 123, 120, 110, 130, 127, 117, 108, 105, 97, 105, 104, 96, 154, 154, 142, 111, 110, 99, 75, 74, 63, 106, 106, 95, 72, 69, 57, 57, 57, 44, 81, 79, 66, 229, 227, 214, 231, 229, 216, 229, 226, 216, 229, 226, 216, 229, 226, 216, 229, 226, 218, 233, 229, 221, 228, 225, 217, 230, 227, 219, 225, 222, 214, 231, 228, 220, 234, 230, 223, 233, 229, 221, 234, 230, 223, 233, 229, 221, 230, 227, 219, 231, 228, 220, 236, 233, 225, 235, 232, 224, 235, 232, 224, 238, 237, 228, 242, 242, 231, 245, 244, 235, 248, 248, 237, 252, 251, 240, 254, 254, 242, 255, 255, 247, 255, 255, 249, 255, 255, 254, 255, 255, 255, 208, 210, 198, 22, 24, 14, 84, 85, 76, 80, 82, 72, 79, 81, 71, 86, 88, 78, 78, 80, 70, 86, 88, 78, 80, 82, 72, 71, 72, 65, 69, 70, 63, 64, 66, 56, 64, 64, 55, 63, 64, 55, 64, 64, 55, 63, 62, 54, 66, 65, 56, 69, 68, 60, 57, 57, 48, 50, 50, 41, 64, 64, 55, 63, 63, 52, 63, 63, 52, 61, 60, 49, 61, 60, 49, 50, 50, 39, 61, 60, 49, 63, 63, 52, 60, 59, 48, 68, 67, 56, 54, 53, 42, 59, 61, 49, 60, 62, 50, 61, 60, 49, 60, 59, 48, 64, 64, 53, 74, 73, 62, 62, 62, 50, 70, 70, 59, 82, 81, 70, 69, 69, 57, 48, 48, 36, 80, 79, 68, 91, 91, 80, 83, 83, 71, 83, 83, 71, 75, 74, 63, 85, 85, 74, 98, 98, 86, 92, 92, 81, 69, 69, 57, 96, 95, 84, 103, 102, 91, 77, 77, 65, 74, 73, 62, 90, 90, 78, 75, 74, 65, 69, 68, 60, 78, 78, 67, 80, 79, 68, 75, 74, 63, 75, 74, 63, 78, 78, 67, 83, 83, 71, 70, 70, 59, 84, 84, 72, 95, 94, 83, 88, 87, 76, 91, 91, 80, 97, 96, 85, 73, 72, 61, 73, 72, 61, 91, 91, 80, 66, 65, 54, 76, 76, 64, 56, 56, 45, 88, 87, 76, 85, 85, 74, 55, 55, 43, 80, 79, 68, 98, 98, 86, 81, 83, 71, 70, 70, 59, 112, 112, 100, 97, 99, 86, 63, 63, 52, 95, 97, 85, 108, 107, 96, 59, 58, 47, 110, 109, 98, 99, 101, 89, 91, 93, 81, 59, 58, 47, 81, 80, 69, 109, 108, 97, 110, 109, 98, 67, 66, 55, 73, 72, 61, 60, 59, 48, 83, 85, 72, 94, 93, 82, 119, 119, 107, 73, 72, 61, 61, 60, 49, 75, 74, 63, 114, 114, 103, 56, 56, 45, 104, 102, 91, 112, 112, 100, 133, 133, 121, 82, 81, 70, 108, 107, 96, 111, 109, 98, 100, 100, 89, 66, 65, 54, 69, 69, 57, 119, 119, 105, 57, 57, 46, 96, 95, 84, 87, 86, 75, 74, 73, 62, 56, 56, 45, 42, 42, 31, 124, 123, 112, 99, 99, 88, 106, 106, 95, 73, 72, 61, 83, 81, 70, 94, 91, 81, 122, 119, 109, 87, 84, 74, 114, 111, 100, 102, 99, 89, 71, 71, 60, 118, 117, 106, 67, 66, 55, 139, 138, 127, 89, 88, 77, 63, 63, 52, 125, 125, 113, 125, 125, 113, 99, 99, 88, 75, 74, 63, 64, 64, 53, 108, 107, 96, 70, 70, 59, 124, 121, 111, 74, 73, 62, 64, 61, 50, 124, 121, 111, 86, 83, 72, 95, 92, 82, 135, 132, 121, 96, 93, 83, 82, 79, 69, 72, 69, 59, 129, 126, 116, 80, 77, 67, 80, 77, 67, 110, 109, 98, 133, 133, 121, 128, 128, 117, 118, 117, 106, 76, 76, 64, 100, 100, 89, 108, 107, 96, 100, 100, 89, 82, 81, 70, 108, 107, 96, 96, 93, 83, 74, 71, 61, 150, 147, 136, 77, 74, 63, 65, 62, 52, 127, 124, 113, 116, 114, 101, 132, 130, 117, 78, 75, 62, 137, 135, 122, 149, 146, 133, 80, 78, 65, 65, 62, 52, 67, 64, 54, 78, 75, 64, 130, 127, 119, 153, 150, 142, 103, 100, 92, 95, 94, 85, 132, 131, 122, 91, 90, 82, 97, 95, 84, 97, 95, 84, 95, 93, 80, 145, 143, 128, 229, 227, 214, 229, 227, 214, 228, 225, 215, 229, 226, 216, 229, 226, 216, 225, 223, 212, 229, 226, 216, 233, 229, 221, 233, 229, 221, 231, 228, 220, 234, 230, 223, 233, 229, 221, 233, 229, 221, 230, 227, 219, 234, 230, 223, 231, 228, 220, 231, 228, 220, 234, 234, 225, 236, 233, 225, 234, 234, 225, 242, 242, 233, 245, 244, 235, 248, 248, 239, 255, 255, 246, 255, 255, 245, 255, 255, 247, 255, 255, 251, 255, 255, 254, 255, 255, 255, 255, 255, 255, 9, 11, 1, 86, 88, 76, 76, 77, 68, 77, 78, 69, 87, 89, 79, 77, 78, 69, 80, 82, 72, 76, 77, 68, 80, 82, 72, 70, 71, 64, 73, 71, 65, 64, 66, 56, 73, 72, 63, 62, 63, 54, 55, 54, 46, 57, 59, 49, 61, 60, 51, 61, 60, 51, 63, 62, 54, 60, 59, 50, 68, 67, 58, 55, 54, 46, 62, 61, 52, 57, 57, 48, 60, 59, 50, 61, 60, 49, 54, 53, 44, 69, 68, 60, 47, 46, 35, 62, 62, 50, 57, 57, 46, 58, 60, 48, 58, 60, 48, 64, 64, 53, 63, 63, 52, 67, 66, 55, 75, 74, 63, 70, 70, 59, 62, 62, 50, 63, 63, 52, 66, 65, 54, 57, 57, 46, 81, 80, 69, 97, 96, 85, 78, 78, 67, 78, 78, 67, 100, 100, 89, 91, 91, 80, 69, 69, 57, 71, 71, 60, 91, 91, 80, 71, 71, 60, 83, 83, 71, 88, 87, 78, 83, 82, 73, 95, 94, 85, 94, 93, 84, 88, 87, 78, 85, 85, 74, 87, 86, 75, 54, 53, 42, 69, 69, 57, 85, 85, 74, 87, 86, 75, 84, 84, 72, 77, 77, 65, 95, 94, 83, 85, 85, 74, 67, 66, 55, 99, 99, 88, 84, 84, 72, 70, 70, 59, 94, 93, 82, 82, 81, 70, 83, 83, 71, 83, 82, 73, 90, 90, 78, 64, 64, 53, 84, 83, 75, 89, 88, 79, 68, 67, 56, 84, 84, 72, 114, 114, 105, 67, 66, 55, 75, 74, 63, 117, 116, 105, 104, 104, 92, 111, 110, 99, 89, 88, 77, 73, 72, 61, 78, 78, 67, 70, 70, 59, 96, 95, 84, 117, 116, 105, 67, 66, 55, 85, 85, 74, 110, 109, 98, 108, 107, 96, 73, 72, 61, 99, 101, 89, 95, 97, 85, 119, 119, 107, 109, 108, 97, 99, 99, 88, 73, 72, 61, 131, 130, 119, 69, 69, 57, 69, 69, 57, 66, 65, 54, 40, 39, 28, 68, 67, 56, 63, 63, 52, 139, 138, 127, 61, 60, 49, 123, 122, 111, 109, 109, 95, 77, 74, 63, 96, 95, 84, 62, 62, 50, 56, 56, 45, 120, 120, 109, 119, 119, 107, 70, 70, 59, 77, 77, 65, 106, 106, 95, 84, 84, 72, 110, 109, 98, 125, 123, 112, 111, 109, 98, 91, 88, 77, 101, 98, 88, 104, 104, 92, 120, 117, 106, 110, 109, 98, 46, 45, 34, 104, 104, 92, 114, 114, 103, 111, 110, 99, 80, 79, 68, 116, 115, 104, 98, 98, 86, 112, 112, 100, 56, 56, 45, 76, 76, 64, 71, 71, 60, 132, 131, 120, 104, 102, 91, 85, 82, 71, 68, 66, 55, 86, 83, 72, 87, 84, 74, 85, 82, 71, 88, 87, 76, 82, 81, 70, 94, 93, 82, 90, 90, 78, 118, 116, 105, 91, 91, 80, 110, 109, 98, 100, 100, 89, 141, 141, 129, 146, 145, 134, 89, 88, 77, 128, 128, 117, 125, 125, 113, 118, 117, 106, 70, 70, 59, 92, 92, 79, 99, 96, 83, 132, 130, 117, 142, 139, 126, 89, 86, 76, 123, 120, 110, 86, 83, 72, 80, 77, 67, 99, 96, 85, 99, 96, 85, 82, 79, 69, 110, 108, 95, 123, 120, 110, 125, 119, 110, 123, 120, 110, 79, 76, 65, 107, 104, 94, 92, 89, 78, 152, 149, 141, 123, 122, 113, 72, 69, 61, 85, 82, 71, 65, 62, 54, 106, 106, 95, 123, 122, 111, 93, 90, 78, 102, 100, 85, 231, 228, 218, 231, 228, 218, 229, 226, 216, 229, 226, 216, 229, 226, 218, 228, 225, 215, 234, 230, 223, 231, 228, 220, 230, 227, 219, 227, 223, 216, 234, 230, 223, 233, 229, 221, 227, 223, 216, 230, 227, 219, 229, 226, 218, 230, 227, 219, 234, 234, 225, 233, 232, 224, 231, 230, 221, 231, 230, 221, 242, 242, 233, 246, 245, 236, 248, 248, 239, 253, 252, 243, 255, 255, 247, 255, 255, 252, 255, 255, 253, 255, 255, 254, 255, 255, 252, 233, 235, 223, 41, 43, 31, 78, 80, 70, 84, 85, 76, 93, 95, 85, 76, 77, 68, 86, 88, 78, 64, 66, 56, 66, 68, 58, 80, 82, 72, 64, 66, 56, 64, 65, 58, 66, 67, 60, 60, 62, 54, 62, 63, 56, 57, 57, 48, 64, 64, 55, 59, 58, 49, 59, 58, 49, 61, 60, 51, 60, 59, 50, 60, 59, 50, 57, 57, 48, 68, 65, 57, 55, 54, 46, 62, 61, 52, 49, 48, 40, 56, 56, 45, 48, 48, 36, 50, 50, 41, 55, 56, 47, 60, 62, 52, 59, 58, 47, 60, 62, 52, 58, 60, 50, 62, 64, 52, 61, 60, 49, 57, 57, 46, 74, 73, 62, 62, 62, 50, 80, 79, 68, 73, 70, 60, 77, 77, 65, 85, 85, 74, 71, 68, 57, 73, 70, 60, 87, 86, 75, 100, 100, 89, 77, 77, 65, 92, 92, 81, 94, 93, 82, 66, 65, 54, 88, 87, 76, 66, 65, 54, 84, 83, 75, 82, 81, 72, 75, 74, 65, 82, 81, 72, 90, 90, 78, 88, 87, 76, 71, 71, 60, 81, 80, 69, 83, 83, 71, 67, 69, 57, 84, 84, 72, 87, 86, 75, 92, 92, 81, 77, 77, 65, 69, 69, 57, 84, 84, 72, 77, 77, 65, 67, 66, 55, 91, 91, 80, 70, 70, 59, 85, 85, 74, 95, 94, 83, 64, 64, 55, 75, 74, 65, 80, 79, 70, 108, 107, 98, 66, 65, 56, 87, 86, 75, 98, 97, 89, 78, 78, 69, 85, 85, 74, 110, 109, 98, 78, 78, 67, 95, 94, 83, 71, 71, 60, 64, 64, 53, 66, 65, 54, 99, 99, 88, 87, 86, 75, 94, 93, 82, 106, 106, 95, 104, 104, 92, 75, 74, 63, 89, 88, 77, 118, 117, 106, 95, 94, 83, 64, 64, 55, 108, 107, 96, 112, 112, 100, 141, 141, 129, 100, 100, 89, 100, 100, 89, 62, 62, 50, 70, 70, 59, 99, 99, 88, 80, 79, 68, 70, 70, 59, 68, 67, 56, 115, 112, 102, 78, 78, 67, 73, 70, 60, 92, 89, 78, 123, 122, 111, 114, 111, 100, 83, 81, 70, 113, 113, 102, 109, 108, 97, 54, 53, 42, 76, 76, 64, 100, 100, 89, 83, 83, 71, 123, 122, 111, 113, 113, 102, 100, 100, 89, 123, 122, 111, 88, 85, 75, 113, 110, 99, 101, 98, 88, 110, 107, 97, 68, 66, 55, 85, 85, 74, 134, 134, 123, 90, 90, 78, 104, 104, 92, 88, 87, 76, 111, 110, 101, 115, 112, 102, 82, 81, 70, 63, 63, 52, 89, 86, 76, 81, 80, 69, 94, 93, 82, 70, 70, 59, 104, 104, 92, 70, 67, 56, 109, 106, 96, 86, 83, 72, 80, 77, 67, 97, 95, 84, 90, 90, 78, 87, 86, 75, 141, 138, 127, 104, 104, 92, 114, 114, 103, 91, 91, 80, 73, 72, 61, 70, 70, 59, 85, 85, 74, 145, 144, 133, 120, 120, 109, 112, 112, 100, 132, 131, 120, 84, 84, 72, 73, 72, 61, 87, 87, 73, 94, 92, 79, 144, 141, 129, 123, 120, 110, 128, 125, 114, 77, 74, 63, 86, 83, 72, 73, 70, 60, 97, 95, 84, 75, 73, 62, 123, 120, 110, 103, 100, 90, 91, 88, 77, 113, 110, 99, 137, 134, 124, 95, 92, 82, 131, 128, 118, 118, 116, 105, 145, 142, 132, 78, 75, 64, 70, 66, 58, 92, 88, 80, 129, 126, 118, 63, 59, 51, 138, 135, 125, 75, 73, 60, 58, 56, 41, 229, 228, 215, 225, 225, 213, 229, 226, 216, 230, 227, 219, 233, 229, 221, 229, 226, 216, 230, 227, 219, 233, 229, 221, 234, 230, 223, 230, 227, 219, 231, 228, 220, 229, 226, 218, 228, 225, 217, 228, 225, 217, 230, 227, 219, 231, 228, 220, 231, 228, 220, 236, 235, 226, 233, 232, 224, 239, 238, 229, 242, 242, 233, 242, 242, 233, 249, 249, 240, 252, 251, 242, 255, 255, 248, 255, 255, 252, 255, 255, 254, 255, 255, 255, 255, 255, 255, 67, 69, 60, 79, 81, 71, 84, 86, 74, 90, 91, 82, 83, 82, 73, 91, 92, 83, 76, 77, 68, 70, 71, 62, 70, 71, 62, 60, 62, 52, 47, 48, 39, 63, 64, 57, 59, 61, 53, 60, 62, 54, 62, 61, 52, 57, 56, 50, 64, 64, 55, 64, 64, 55, 62, 61, 54, 57, 57, 48, 45, 44, 35, 60, 59, 50, 59, 58, 49, 59, 58, 49, 57, 57, 48, 56, 55, 47, 60, 59, 50, 57, 57, 48, 56, 55, 47, 55, 56, 47, 57, 59, 49, 57, 59, 49, 57, 57, 46, 63, 63, 52, 56, 56, 45, 54, 53, 42, 70, 70, 59, 73, 72, 61, 66, 65, 54, 82, 81, 70, 70, 70, 59, 56, 56, 45, 79, 76, 68, 61, 59, 48, 74, 71, 61, 107, 104, 94, 71, 68, 57, 103, 100, 90, 60, 59, 48, 60, 59, 48, 75, 74, 63, 82, 81, 70, 74, 73, 64, 76, 75, 66, 88, 87, 78, 108, 107, 98, 73, 72, 63, 70, 69, 61, 69, 68, 60, 56, 56, 45, 77, 77, 65, 69, 68, 60, 78, 78, 67, 94, 93, 82, 75, 74, 65, 85, 85, 74, 84, 84, 72, 90, 90, 78, 62, 62, 50, 74, 73, 62, 87, 86, 75, 82, 81, 70, 63, 63, 52, 95, 94, 83, 95, 94, 83, 74, 73, 62, 90, 89, 80, 84, 83, 75, 64, 64, 55, 91, 90, 82, 74, 73, 64, 80, 79, 70, 73, 72, 63, 95, 94, 85, 84, 84, 72, 82, 81, 70, 111, 110, 99, 80, 79, 68, 103, 102, 91, 108, 107, 96, 71, 71, 60, 62, 62, 50, 95, 94, 83, 90, 90, 78, 63, 63, 52, 92, 92, 81, 97, 96, 85, 118, 117, 106, 127, 127, 116, 78, 80, 68, 130, 129, 118, 97, 96, 85, 100, 100, 89, 69, 69, 57, 97, 96, 85, 84, 84, 72, 71, 73, 61, 135, 135, 124, 98, 98, 86, 75, 74, 63, 104, 104, 92, 94, 93, 82, 101, 98, 88, 113, 110, 97, 97, 95, 84, 118, 117, 106, 72, 69, 59, 72, 69, 59, 97, 95, 84, 110, 109, 98, 64, 61, 50, 114, 114, 103, 94, 93, 82, 77, 77, 65, 94, 93, 82, 62, 62, 50, 120, 120, 109, 106, 103, 92, 99, 96, 85, 83, 81, 70, 120, 117, 106, 67, 64, 54, 116, 113, 103, 124, 121, 111, 89, 86, 76, 68, 66, 55, 83, 83, 71, 83, 83, 71, 87, 86, 77, 76, 75, 66, 82, 79, 71, 63, 63, 52, 87, 86, 75, 147, 145, 134, 75, 73, 62, 63, 63, 52, 60, 59, 48, 114, 111, 100, 71, 71, 60, 109, 106, 96, 101, 98, 88, 148, 148, 136, 115, 112, 102, 74, 71, 61, 128, 128, 117, 99, 99, 88, 77, 77, 65, 94, 93, 82, 57, 57, 46, 70, 70, 59, 121, 121, 110, 142, 139, 128, 108, 107, 96, 102, 101, 90, 131, 130, 119, 105, 105, 94, 81, 80, 69, 97, 96, 85, 70, 70, 57, 92, 89, 78, 72, 69, 59, 108, 105, 93, 125, 123, 112, 90, 84, 75, 95, 92, 82, 116, 113, 103, 130, 127, 117, 101, 98, 88, 68, 66, 55, 131, 128, 118, 123, 120, 110, 70, 67, 56, 127, 124, 113, 144, 141, 131, 72, 69, 59, 86, 83, 72, 72, 69, 61, 74, 73, 62, 137, 134, 126, 117, 114, 106, 85, 85, 76, 135, 135, 126, 76, 76, 64, 116, 114, 101, 88, 88, 72, 233, 233, 222, 230, 229, 218, 234, 231, 220, 228, 225, 217, 231, 228, 220, 225, 222, 214, 229, 226, 218, 235, 232, 224, 233, 229, 221, 234, 230, 223, 229, 226, 218, 229, 226, 218, 229, 226, 218, 198, 194, 186, 230, 227, 219, 233, 229, 221, 231, 230, 221, 231, 230, 221, 234, 234, 223, 240, 239, 231, 241, 241, 230, 246, 245, 236, 253, 252, 241, 252, 251, 242, 255, 255, 254, 255, 255, 252, 255, 255, 255, 255, 255, 255, 253, 252, 241, 76, 76, 64, 81, 83, 71, 88, 90, 78, 90, 89, 80, 91, 90, 82, 90, 89, 80, 77, 76, 68, 76, 75, 66, 58, 60, 50, 60, 62, 52, 55, 56, 49, 59, 61, 53, 62, 63, 56, 62, 63, 56, 62, 63, 54, 53, 52, 45, 49, 48, 42, 57, 57, 48, 60, 59, 50, 55, 54, 46, 60, 59, 50, 50, 50, 41, 56, 55, 47, 56, 55, 47, 57, 57, 48, 61, 60, 51, 56, 55, 47, 59, 61, 51, 53, 55, 46, 53, 55, 43, 52, 54, 42, 47, 48, 39, 43, 45, 33, 57, 57, 48, 59, 61, 49, 56, 57, 48, 55, 56, 47, 61, 60, 49, 62, 62, 50, 73, 72, 61, 66, 65, 56, 55, 55, 43, 80, 77, 67, 77, 74, 63, 96, 93, 83, 71, 68, 57, 59, 56, 46, 70, 67, 56, 75, 74, 63, 52, 51, 42, 76, 75, 66, 100, 100, 91, 95, 94, 85, 74, 73, 64, 100, 100, 91, 83, 82, 73, 84, 83, 75, 64, 64, 55, 90, 89, 80, 92, 92, 81, 82, 81, 70, 85, 85, 76, 68, 67, 56, 80, 79, 68, 68, 67, 56, 75, 74, 63, 76, 76, 64, 71, 71, 60, 69, 69, 57, 70, 70, 59, 62, 62, 50, 80, 79, 68, 92, 92, 81, 88, 87, 76, 89, 88, 77, 92, 92, 81, 80, 79, 70, 60, 59, 50, 83, 82, 73, 85, 85, 76, 106, 105, 97, 73, 72, 63, 62, 61, 52, 102, 101, 92, 73, 72, 63, 95, 94, 83, 100, 100, 89, 88, 87, 76, 88, 87, 76, 60, 59, 48, 82, 81, 70, 98, 98, 86, 57, 57, 46, 104, 104, 92, 118, 117, 106, 66, 65, 54, 127, 127, 116, 98, 98, 86, 78, 78, 67, 114, 114, 103, 81, 80, 69, 63, 63, 52, 84, 84, 72, 123, 122, 111, 68, 67, 56, 81, 80, 69, 94, 93, 82, 120, 120, 109, 62, 62, 50, 70, 70, 59, 105, 105, 94, 80, 79, 68, 98, 98, 86, 71, 71, 60, 125, 123, 112, 60, 57, 47, 120, 120, 109, 153, 151, 140, 96, 95, 84, 89, 88, 77, 81, 78, 70, 83, 83, 71, 116, 115, 104, 55, 55, 43, 60, 59, 48, 127, 127, 116, 68, 67, 56, 102, 101, 90, 72, 69, 59, 103, 100, 90, 116, 113, 103, 73, 70, 60, 131, 128, 118, 107, 104, 94, 76, 75, 66, 86, 83, 75, 113, 109, 101, 124, 123, 114, 80, 79, 70, 131, 130, 121, 62, 62, 50, 81, 78, 68, 80, 79, 68, 138, 137, 126, 92, 92, 81, 104, 104, 92, 81, 80, 69, 71, 71, 60, 146, 144, 131, 124, 121, 111, 72, 69, 59, 141, 138, 127, 110, 107, 97, 77, 74, 63, 115, 112, 102, 104, 104, 92, 74, 73, 62, 84, 84, 72, 104, 104, 92, 62, 62, 50, 83, 83, 71, 94, 91, 81, 66, 65, 54, 135, 135, 124, 84, 84, 72, 95, 94, 83, 67, 64, 54, 71, 71, 60, 91, 91, 78, 92, 89, 76, 85, 82, 69, 117, 115, 102, 67, 65, 52, 74, 71, 61, 81, 78, 68, 117, 114, 104, 111, 109, 98, 122, 119, 109, 123, 120, 110, 118, 116, 105, 73, 70, 60, 156, 153, 142, 147, 145, 134, 134, 131, 120, 137, 134, 124, 115, 112, 102, 70, 67, 56, 70, 67, 56, 104, 102, 91, 71, 68, 57, 99, 96, 85, 65, 62, 52, 149, 146, 133, 85, 82, 69, 80, 78, 65, 227, 228, 212, 231, 231, 217, 231, 230, 219, 231, 228, 218, 231, 228, 218, 233, 229, 221, 230, 227, 219, 233, 229, 221, 228, 225, 215, 225, 222, 214, 227, 223, 216, 230, 227, 217, 229, 226, 218, 228, 225, 217, 233, 229, 221, 226, 225, 217, 232, 231, 223, 229, 228, 217, 240, 240, 228, 241, 241, 230, 242, 242, 233, 246, 245, 234, 249, 249, 238, 253, 252, 241, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 119, 119, 107, 76, 77, 68, 87, 89, 79, 81, 83, 73, 76, 77, 68, 87, 86, 77, 70, 69, 61, 81, 80, 71, 77, 78, 69, 63, 64, 55, 63, 64, 55, 65, 66, 59, 51, 52, 45, 53, 55, 46, 56, 57, 50, 57, 58, 51, 48, 47, 39, 64, 64, 55, 63, 62, 54, 59, 58, 49, 57, 57, 48, 57, 57, 48, 57, 57, 48, 62, 61, 52, 49, 48, 40, 62, 61, 52, 61, 60, 51, 63, 63, 52, 60, 59, 50, 55, 54, 46, 54, 53, 42, 55, 54, 46, 49, 48, 40, 63, 62, 54, 56, 55, 47, 50, 50, 41, 68, 67, 58, 66, 68, 58, 63, 63, 52, 67, 66, 55, 63, 62, 54, 69, 68, 60, 73, 75, 65, 94, 93, 82, 73, 70, 60, 87, 84, 76, 72, 69, 59, 91, 88, 77, 66, 63, 53, 103, 100, 90, 77, 73, 65, 63, 62, 54, 94, 93, 82, 53, 52, 43, 70, 70, 59, 99, 99, 88, 92, 92, 83, 74, 73, 64, 78, 78, 69, 80, 79, 68, 77, 76, 68, 69, 68, 60, 66, 65, 54, 85, 87, 75, 81, 80, 69, 81, 80, 69, 78, 78, 67, 81, 80, 69, 76, 76, 64, 68, 67, 56, 73, 72, 63, 69, 69, 57, 77, 77, 65, 73, 72, 63, 92, 92, 81, 87, 86, 77, 91, 91, 80, 60, 59, 48, 88, 87, 76, 84, 83, 75, 85, 85, 76, 99, 98, 90, 63, 63, 52, 80, 79, 70, 95, 94, 83, 91, 91, 80, 105, 105, 94, 73, 72, 61, 74, 73, 62, 99, 99, 88, 103, 102, 91, 96, 95, 84, 56, 56, 45, 71, 71, 60, 97, 96, 85, 64, 64, 53, 79, 76, 65, 105, 105, 94, 90, 90, 78, 61, 60, 49, 70, 70, 59, 114, 114, 103, 102, 101, 90, 75, 74, 63, 85, 85, 74, 62, 62, 50, 127, 127, 116, 92, 92, 81, 68, 67, 56, 127, 127, 116, 97, 96, 85, 50, 50, 39, 125, 125, 113, 123, 122, 111, 97, 95, 84, 65, 62, 50, 62, 62, 50, 96, 95, 84, 63, 60, 49, 55, 55, 43, 118, 117, 106, 103, 100, 90, 58, 55, 45, 87, 84, 74, 63, 63, 52, 63, 60, 49, 78, 78, 67, 83, 83, 71, 67, 64, 54, 65, 62, 52, 121, 118, 107, 145, 142, 132, 70, 67, 56, 58, 55, 45, 82, 79, 71, 146, 143, 133, 111, 109, 98, 136, 133, 123, 70, 66, 58, 79, 76, 68, 110, 107, 99, 130, 127, 119, 71, 71, 62, 136, 133, 125, 130, 127, 117, 103, 100, 90, 74, 71, 61, 96, 95, 84, 125, 123, 110, 103, 101, 88, 92, 89, 76, 165, 162, 152, 99, 99, 86, 101, 98, 88, 87, 86, 75, 101, 98, 88, 134, 134, 123, 135, 135, 122, 87, 84, 74, 93, 90, 80, 116, 113, 103, 73, 72, 61, 78, 78, 67, 85, 85, 74, 143, 140, 129, 99, 96, 85, 128, 128, 117, 87, 87, 73, 113, 110, 97, 157, 154, 142, 109, 106, 96, 68, 66, 53, 77, 74, 63, 113, 110, 99, 61, 59, 48, 145, 142, 132, 77, 74, 63, 142, 139, 128, 143, 140, 132, 123, 120, 110, 76, 70, 63, 82, 76, 69, 164, 159, 152, 94, 89, 80, 101, 98, 88, 96, 93, 83, 122, 119, 109, 122, 119, 109, 63, 60, 49, 96, 93, 83, 109, 106, 96, 127, 124, 113, 111, 109, 98, 94, 91, 81, 122, 119, 107, 72, 69, 57, 232, 232, 218, 237, 236, 225, 232, 232, 220, 231, 230, 221, 230, 229, 220, 232, 232, 220, 230, 229, 218, 233, 229, 221, 229, 226, 216, 230, 227, 217, 227, 224, 213, 229, 226, 218, 230, 227, 219, 229, 226, 218, 225, 222, 214, 233, 229, 221, 233, 229, 221, 236, 233, 223, 239, 238, 227, 239, 238, 227, 245, 244, 235, 244, 243, 232, 249, 249, 238, 255, 255, 246, 255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 251, 47, 46, 35, 77, 77, 65, 73, 72, 61, 84, 86, 74, 76, 75, 66, 84, 83, 75, 73, 72, 63, 81, 80, 71, 71, 71, 62, 62, 61, 54, 57, 58, 51, 64, 65, 58, 60, 62, 54, 51, 52, 45, 58, 59, 52, 60, 62, 52, 53, 55, 48, 62, 61, 52, 59, 58, 49, 53, 52, 43, 55, 54, 46, 59, 58, 49, 55, 54, 46, 61, 60, 51, 62, 61, 52, 55, 54, 46, 55, 54, 46, 54, 53, 44, 51, 53, 43, 47, 48, 39, 53, 52, 41, 57, 59, 47, 55, 57, 45, 54, 53, 42, 57, 57, 46, 51, 53, 41, 59, 61, 49, 57, 57, 46, 68, 67, 58, 56, 57, 48, 68, 67, 58, 80, 79, 70, 73, 72, 63, 74, 73, 64, 76, 76, 64, 77, 74, 63, 82, 79, 69, 81, 78, 68, 73, 70, 60, 81, 78, 68, 83, 81, 70, 70, 66, 58, 92, 92, 83, 60, 57, 47, 83, 83, 71, 89, 88, 77, 88, 87, 76, 71, 71, 62, 70, 69, 61, 76, 75, 66, 71, 71, 62, 88, 87, 78, 56, 56, 45, 73, 72, 61, 80, 79, 68, 74, 73, 62, 76, 76, 64, 77, 77, 65, 77, 77, 65, 57, 57, 46, 78, 78, 69, 87, 86, 77, 59, 58, 49, 88, 87, 78, 66, 65, 56, 62, 61, 52, 89, 88, 77, 75, 74, 65, 78, 78, 69, 89, 88, 77, 81, 80, 71, 89, 88, 79, 74, 73, 64, 92, 92, 83, 84, 84, 72, 84, 84, 72, 67, 66, 55, 69, 69, 57, 71, 71, 60, 60, 57, 47, 113, 110, 99, 74, 73, 62, 109, 108, 97, 88, 87, 76, 58, 55, 45, 86, 83, 72, 103, 100, 90, 61, 60, 49, 113, 113, 102, 94, 93, 82, 70, 70, 59, 60, 59, 48, 114, 114, 103, 76, 76, 64, 81, 80, 69, 81, 80, 69, 96, 95, 84, 74, 73, 62, 62, 62, 50, 45, 44, 33, 74, 73, 62, 96, 95, 84, 119, 119, 107, 109, 108, 97, 132, 131, 120, 92, 89, 78, 63, 63, 52, 83, 83, 71, 70, 66, 58, 56, 53, 42, 131, 130, 119, 58, 55, 47, 65, 62, 52, 53, 50, 40, 89, 88, 77, 111, 109, 98, 126, 126, 114, 76, 76, 64, 93, 90, 80, 106, 103, 92, 108, 105, 95, 91, 88, 77, 138, 135, 125, 77, 74, 63, 134, 131, 120, 115, 112, 102, 82, 79, 71, 104, 101, 93, 77, 76, 68, 70, 67, 56, 87, 86, 77, 106, 102, 94, 59, 56, 46, 97, 95, 84, 81, 78, 68, 72, 69, 59, 64, 61, 50, 128, 125, 114, 64, 61, 50, 80, 79, 68, 96, 93, 83, 75, 73, 62, 71, 68, 57, 86, 83, 72, 114, 111, 100, 142, 139, 128, 134, 131, 120, 87, 84, 74, 58, 55, 43, 102, 99, 89, 145, 143, 130, 100, 97, 86, 127, 124, 113, 66, 65, 54, 97, 95, 84, 80, 77, 67, 78, 75, 64, 148, 148, 136, 74, 73, 62, 67, 64, 54, 75, 73, 62, 136, 133, 123, 143, 140, 129, 128, 125, 114, 156, 153, 142, 127, 124, 113, 92, 89, 78, 94, 91, 81, 78, 74, 66, 91, 88, 77, 97, 95, 84, 114, 111, 103, 72, 69, 61, 127, 123, 115, 79, 76, 65, 128, 125, 114, 146, 143, 133, 70, 67, 56, 144, 141, 131, 95, 92, 82, 100, 97, 86, 83, 81, 70, 93, 90, 80, 110, 107, 97, 85, 82, 71, 78, 75, 64, 246, 247, 226, 236, 235, 222, 234, 234, 223, 232, 232, 220, 230, 229, 218, 229, 228, 217, 224, 223, 212, 234, 234, 223, 227, 227, 218, 229, 226, 216, 230, 227, 217, 231, 228, 218, 224, 221, 213, 230, 227, 217, 225, 223, 212, 237, 234, 224, 233, 233, 222, 237, 236, 225, 240, 240, 228, 240, 240, 228, 241, 241, 230, 247, 247, 235, 254, 254, 242, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 177, 177, 166, 74, 73, 62, 82, 81, 70, 88, 87, 76, 76, 76, 64, 69, 69, 57, 89, 88, 79, 88, 87, 78, 74, 72, 66, 78, 74, 68, 63, 62, 56, 54, 51, 45, 68, 67, 60, 73, 71, 65, 60, 59, 50, 63, 64, 55, 54, 53, 44, 56, 55, 49, 64, 64, 55, 55, 54, 46, 50, 50, 41, 62, 61, 52, 64, 64, 55, 57, 57, 48, 49, 48, 40, 52, 51, 42, 63, 62, 54, 48, 47, 39, 40, 41, 29, 50, 50, 41, 57, 57, 46, 55, 55, 43, 59, 58, 47, 68, 67, 56, 50, 50, 39, 51, 53, 41, 59, 58, 47, 60, 59, 48, 74, 76, 66, 78, 78, 69, 59, 61, 51, 64, 66, 56, 70, 71, 62, 51, 53, 43, 98, 97, 89, 71, 71, 62, 69, 68, 60, 74, 71, 63, 57, 57, 48, 95, 92, 82, 82, 81, 72, 68, 65, 57, 95, 92, 84, 67, 66, 57, 73, 70, 62, 88, 87, 78, 62, 62, 50, 90, 90, 78, 62, 61, 52, 59, 58, 49, 74, 73, 64, 80, 79, 70, 57, 57, 48, 90, 90, 78, 77, 77, 65, 84, 84, 72, 76, 76, 64, 88, 87, 76, 67, 64, 54, 83, 83, 71, 80, 79, 68, 75, 74, 63, 87, 86, 75, 80, 79, 70, 63, 62, 54, 80, 79, 70, 82, 81, 72, 82, 81, 72, 64, 64, 55, 85, 85, 76, 80, 79, 70, 88, 87, 78, 81, 80, 71, 99, 98, 90, 64, 64, 55, 97, 95, 84, 91, 91, 80, 69, 68, 60, 106, 106, 95, 82, 79, 69, 73, 70, 60, 77, 74, 63, 100, 97, 86, 72, 69, 59, 89, 86, 76, 95, 92, 82, 120, 117, 106, 114, 111, 100, 60, 57, 47, 100, 97, 86, 110, 109, 98, 67, 66, 55, 64, 64, 53, 114, 114, 103, 113, 113, 102, 75, 74, 63, 67, 66, 55, 89, 88, 79, 78, 78, 67, 67, 66, 55, 57, 57, 46, 81, 80, 69, 91, 91, 80, 74, 73, 62, 89, 88, 77, 59, 58, 47, 62, 62, 50, 123, 122, 111, 64, 61, 50, 110, 107, 97, 115, 112, 104, 63, 63, 52, 78, 78, 69, 100, 100, 89, 117, 116, 105, 96, 95, 84, 66, 65, 54, 131, 128, 118, 134, 131, 120, 145, 142, 132, 113, 110, 97, 80, 77, 67, 97, 95, 84, 87, 84, 74, 125, 123, 112, 110, 107, 97, 71, 68, 57, 115, 112, 102, 125, 122, 114, 99, 96, 85, 139, 136, 126, 125, 122, 114, 99, 95, 87, 139, 136, 126, 60, 57, 47, 85, 82, 71, 72, 69, 59, 69, 69, 57, 108, 105, 95, 67, 64, 54, 92, 89, 78, 118, 116, 105, 106, 103, 92, 110, 107, 97, 83, 81, 70, 113, 110, 99, 144, 141, 131, 79, 76, 64, 73, 70, 60, 76, 71, 59, 86, 83, 70, 124, 121, 111, 110, 107, 97, 129, 126, 116, 88, 85, 75, 94, 91, 81, 111, 109, 98, 89, 88, 77, 127, 124, 113, 127, 127, 116, 75, 73, 62, 136, 133, 123, 94, 91, 81, 63, 60, 49, 106, 103, 92, 106, 103, 92, 145, 142, 132, 106, 103, 92, 86, 83, 72, 86, 83, 72, 83, 81, 70, 83, 81, 70, 84, 83, 75, 111, 109, 98, 74, 71, 63, 77, 74, 63, 82, 79, 69, 73, 70, 60, 78, 75, 64, 100, 97, 86, 75, 73, 62, 145, 142, 132, 78, 75, 64, 101, 98, 88, 50, 47, 36, 68, 66, 55, 74, 71, 61, 252, 254, 228, 234, 235, 219, 226, 226, 215, 231, 230, 219, 231, 230, 219, 229, 228, 217, 227, 227, 216, 223, 222, 211, 232, 232, 220, 230, 229, 218, 227, 224, 213, 229, 226, 216, 229, 226, 216, 229, 226, 216, 233, 230, 219, 233, 233, 222, 239, 238, 227, 239, 238, 227, 241, 241, 230, 245, 244, 233, 213, 213, 202, 248, 248, 237, 253, 252, 241, 255, 255, 248, 255, 255, 254, 255, 255, 255, 255, 255, 255, 3, 2, 0, 81, 80, 69, 80, 79, 68, 81, 80, 69, 90, 90, 78, 71, 71, 62, 80, 79, 70, 70, 69, 61, 63, 62, 54, 59, 57, 51, 73, 72, 63, 61, 60, 53, 59, 57, 51, 53, 52, 45, 56, 55, 47, 55, 54, 46, 48, 48, 36, 50, 50, 41, 54, 53, 44, 55, 54, 46, 60, 59, 50, 53, 52, 43, 54, 53, 44, 63, 62, 54, 53, 52, 45, 55, 54, 46, 62, 61, 54, 50, 50, 41, 53, 52, 43, 39, 38, 29, 63, 62, 54, 57, 57, 46, 42, 42, 31, 55, 55, 43, 63, 63, 52, 57, 57, 48, 57, 57, 48, 56, 55, 47, 66, 65, 56, 58, 60, 50, 56, 55, 47, 64, 64, 55, 85, 85, 76, 63, 62, 54, 59, 58, 49, 75, 74, 65, 81, 80, 71, 71, 67, 60, 86, 83, 75, 83, 80, 72, 96, 93, 83, 102, 99, 89, 78, 75, 64, 57, 57, 46, 83, 81, 70, 74, 73, 62, 75, 74, 63, 74, 73, 62, 83, 82, 73, 92, 92, 83, 63, 63, 52, 69, 68, 60, 90, 89, 80, 73, 72, 61, 62, 62, 50, 77, 77, 65, 85, 85, 74, 89, 88, 77, 77, 77, 65, 71, 71, 60, 61, 60, 49, 72, 69, 59, 83, 83, 71, 69, 68, 60, 69, 68, 60, 87, 86, 77, 85, 85, 76, 82, 81, 70, 66, 65, 56, 89, 88, 79, 83, 83, 71, 81, 80, 71, 82, 81, 72, 87, 86, 75, 90, 90, 78, 96, 95, 84, 85, 85, 74, 96, 93, 83, 103, 100, 90, 85, 81, 73, 63, 60, 49, 96, 93, 83, 64, 61, 50, 86, 83, 72, 74, 71, 61, 93, 90, 80, 86, 83, 72, 64, 64, 53, 63, 60, 49, 110, 109, 98, 61, 60, 49, 67, 66, 55, 62, 62, 50, 105, 105, 94, 100, 100, 89, 74, 73, 62, 120, 120, 109, 106, 105, 97, 99, 99, 88, 73, 72, 63, 114, 114, 103, 102, 101, 90, 89, 88, 77, 71, 71, 60, 50, 50, 39, 77, 77, 65, 87, 86, 75, 118, 117, 106, 81, 80, 69, 69, 69, 57, 96, 93, 83, 98, 98, 86, 89, 88, 77, 140, 140, 128, 76, 76, 64, 76, 76, 64, 65, 62, 52, 127, 124, 113, 113, 110, 99, 124, 121, 111, 86, 83, 70, 113, 110, 97, 63, 60, 47, 93, 90, 78, 104, 102, 91, 66, 63, 53, 134, 131, 120, 72, 69, 59, 76, 76, 64, 113, 110, 99, 61, 59, 48, 108, 105, 97, 72, 69, 61, 83, 81, 70, 74, 71, 61, 108, 105, 95, 88, 85, 75, 77, 74, 63, 96, 93, 83, 125, 123, 112, 100, 97, 86, 128, 125, 114, 114, 111, 100, 115, 112, 102, 99, 96, 85, 77, 74, 63, 136, 133, 123, 91, 88, 75, 131, 129, 116, 75, 73, 62, 103, 101, 88, 72, 69, 57, 124, 121, 111, 125, 123, 112, 97, 95, 84, 100, 97, 86, 123, 122, 111, 87, 84, 74, 87, 84, 74, 64, 61, 50, 56, 53, 42, 111, 109, 98, 151, 148, 138, 136, 133, 123, 77, 74, 61, 143, 140, 129, 58, 55, 45, 110, 107, 97, 72, 69, 59, 86, 83, 72, 60, 57, 47, 130, 127, 117, 91, 88, 77, 81, 78, 70, 126, 126, 114, 100, 97, 86, 89, 86, 76, 83, 81, 70, 114, 111, 100, 89, 86, 76, 141, 138, 127, 78, 75, 64, 72, 69, 59, 68, 66, 55, 108, 105, 95, 93, 90, 80, 118, 116, 105, 237, 237, 223, 237, 237, 223, 232, 232, 220, 226, 226, 215, 231, 230, 219, 231, 230, 219, 231, 230, 219, 230, 229, 218, 230, 229, 218, 229, 228, 217, 229, 228, 217, 230, 227, 217, 231, 228, 218, 233, 230, 219, 234, 231, 220, 236, 233, 223, 240, 240, 228, 241, 241, 230, 237, 236, 225, 242, 242, 231, 242, 242, 231, 246, 245, 234, 253, 252, 241, 255, 255, 248, 255, 255, 255, 255, 255, 255, 209, 208, 197, 66, 65, 54, 83, 83, 71, 83, 83, 71, 90, 90, 78, 81, 80, 69, 81, 80, 69, 68, 67, 58, 68, 67, 58, 64, 63, 57, 67, 65, 59, 56, 55, 49, 60, 59, 50, 52, 48, 43, 51, 48, 40, 57, 57, 48, 61, 60, 51, 63, 62, 54, 60, 59, 50, 55, 54, 46, 60, 59, 50, 50, 50, 41, 62, 61, 52, 46, 45, 36, 60, 62, 52, 62, 61, 52, 56, 55, 47, 64, 64, 55, 56, 55, 47, 61, 60, 51, 59, 58, 47, 56, 55, 47, 49, 49, 38, 60, 57, 47, 53, 52, 41, 46, 45, 34, 55, 54, 46, 62, 61, 52, 52, 51, 42, 71, 71, 62, 67, 66, 57, 76, 75, 66, 71, 71, 62, 61, 60, 51, 69, 68, 60, 69, 68, 60, 81, 80, 71, 83, 82, 73, 84, 83, 75, 76, 75, 66, 93, 90, 82, 92, 92, 83, 90, 90, 78, 92, 92, 81, 63, 63, 52, 82, 81, 70, 88, 87, 76, 70, 70, 59, 81, 80, 69, 54, 53, 42, 85, 85, 74, 82, 81, 70, 80, 79, 70, 76, 76, 64, 73, 72, 61, 77, 77, 65, 69, 69, 57, 80, 77, 67, 76, 76, 64, 63, 63, 52, 75, 73, 62, 89, 86, 76, 82, 81, 70, 75, 74, 63, 97, 96, 85, 68, 67, 58, 98, 97, 89, 96, 95, 86, 85, 85, 76, 91, 90, 82, 77, 76, 68, 91, 87, 79, 61, 60, 51, 100, 100, 91, 88, 87, 78, 55, 55, 43, 56, 56, 45, 70, 70, 59, 95, 94, 83, 77, 74, 63, 56, 53, 42, 100, 97, 86, 109, 106, 96, 102, 99, 89, 65, 62, 52, 79, 76, 65, 70, 67, 56, 57, 54, 43, 77, 74, 63, 89, 86, 76, 112, 112, 100, 103, 102, 91, 87, 86, 75, 102, 101, 90, 61, 60, 49, 62, 62, 50, 111, 110, 99, 78, 78, 67, 125, 125, 113, 110, 109, 98, 109, 108, 97, 73, 72, 61, 67, 64, 54, 83, 83, 71, 82, 81, 70, 80, 77, 67, 72, 69, 59, 57, 57, 46, 79, 76, 65, 67, 64, 54, 109, 106, 98, 70, 70, 59, 93, 90, 80, 89, 88, 79, 120, 120, 109, 59, 58, 47, 74, 73, 62, 74, 71, 61, 131, 130, 119, 116, 114, 101, 87, 84, 74, 82, 80, 67, 106, 103, 92, 123, 120, 110, 102, 100, 87, 79, 76, 65, 82, 79, 69, 71, 68, 57, 110, 109, 98, 68, 66, 55, 116, 113, 103, 119, 119, 107, 110, 109, 98, 53, 50, 40, 107, 104, 94, 117, 114, 104, 94, 91, 81, 123, 120, 110, 93, 90, 80, 142, 139, 128, 96, 93, 83, 74, 71, 61, 72, 69, 59, 81, 78, 68, 78, 75, 64, 79, 76, 65, 141, 138, 127, 79, 76, 65, 100, 97, 86, 150, 147, 136, 70, 67, 56, 64, 64, 51, 96, 95, 84, 75, 75, 61, 69, 69, 57, 87, 87, 73, 73, 72, 61, 160, 159, 148, 63, 63, 52, 165, 162, 152, 135, 132, 121, 63, 60, 49, 143, 140, 129, 71, 68, 57, 88, 85, 75, 73, 71, 58, 77, 74, 61, 87, 85, 72, 131, 128, 118, 83, 81, 68, 113, 110, 97, 83, 81, 70, 118, 116, 105, 79, 76, 65, 102, 99, 89, 82, 81, 70, 85, 82, 71, 108, 105, 95, 73, 72, 61, 132, 129, 119, 110, 107, 97, 75, 73, 62, 80, 77, 67, 86, 83, 72, 93, 90, 80, 125, 123, 112, 76, 76, 62, 74, 72, 59, 230, 229, 218, 236, 235, 224, 232, 232, 220, 227, 227, 216, 229, 228, 215, 230, 229, 218, 232, 232, 220, 230, 229, 218, 227, 227, 216, 227, 227, 216, 230, 229, 218, 230, 227, 217, 233, 233, 222, 231, 228, 218, 232, 232, 220, 238, 237, 226, 238, 237, 226, 238, 237, 226, 237, 236, 225, 240, 240, 228, 241, 241, 230, 248, 248, 237, 255, 255, 244, 255, 255, 253, 255, 255, 255, 255, 255, 253, 109, 106, 96, 80, 79, 68, 81, 80, 69, 97, 96, 85, 91, 91, 80, 87, 86, 75, 84, 84, 72, 73, 72, 61, 67, 66, 57, 69, 68, 62, 70, 66, 58, 61, 58, 50, 53, 50, 42, 56, 52, 44, 67, 66, 57, 49, 45, 37, 60, 59, 50, 52, 51, 42, 59, 58, 49, 59, 58, 49, 60, 59, 50, 53, 52, 43, 62, 62, 50, 54, 53, 44, 55, 54, 46, 62, 61, 52, 43, 45, 35, 60, 59, 50, 35, 34, 26, 51, 53, 43, 53, 52, 41, 60, 59, 50, 52, 51, 40, 59, 58, 47, 42, 42, 31, 54, 53, 44, 61, 60, 51, 74, 73, 64, 77, 76, 68, 70, 69, 61, 60, 59, 50, 61, 60, 51, 67, 66, 57, 84, 83, 75, 60, 59, 50, 73, 72, 63, 83, 82, 73, 77, 76, 68, 75, 74, 65, 68, 67, 58, 73, 72, 63, 94, 93, 84, 85, 85, 74, 89, 88, 77, 61, 60, 49, 90, 89, 80, 60, 59, 50, 74, 73, 62, 87, 86, 75, 69, 68, 60, 76, 76, 64, 80, 79, 68, 90, 90, 78, 81, 80, 69, 67, 66, 55, 75, 74, 63, 86, 83, 72, 92, 89, 78, 58, 55, 45, 64, 61, 50, 83, 81, 70, 78, 75, 64, 86, 83, 72, 64, 64, 53, 85, 85, 76, 61, 60, 51, 59, 58, 49, 80, 79, 70, 89, 88, 79, 85, 85, 76, 69, 68, 60, 63, 62, 54, 67, 66, 57, 61, 60, 51, 90, 89, 80, 81, 80, 71, 77, 76, 68, 102, 101, 92, 106, 106, 95, 73, 72, 61, 91, 91, 80, 108, 105, 95, 69, 69, 57, 92, 89, 78, 61, 59, 48, 60, 59, 48, 73, 72, 61, 104, 104, 92, 83, 83, 71, 106, 106, 95, 73, 72, 61, 70, 70, 59, 63, 63, 52, 73, 72, 61, 83, 83, 71, 54, 53, 42, 87, 86, 75, 144, 143, 134, 111, 110, 99, 110, 109, 98, 64, 64, 55, 109, 106, 96, 57, 57, 46, 90, 90, 78, 77, 74, 61, 99, 99, 88, 94, 91, 81, 98, 98, 86, 91, 88, 77, 85, 82, 71, 73, 70, 60, 80, 77, 67, 94, 91, 81, 69, 69, 57, 64, 64, 53, 74, 71, 61, 92, 92, 81, 82, 81, 70, 71, 68, 57, 65, 62, 50, 85, 82, 71, 96, 94, 81, 134, 131, 120, 53, 51, 38, 124, 121, 111, 129, 126, 116, 92, 89, 78, 81, 78, 68, 66, 63, 53, 94, 91, 81, 135, 135, 124, 120, 120, 109, 83, 81, 70, 70, 67, 56, 53, 52, 41, 94, 91, 81, 78, 75, 64, 101, 98, 88, 59, 56, 46, 64, 61, 50, 143, 140, 129, 77, 74, 63, 113, 110, 99, 71, 68, 57, 108, 105, 95, 128, 125, 114, 107, 104, 94, 129, 126, 116, 68, 66, 55, 95, 94, 83, 119, 119, 107, 127, 127, 116, 138, 137, 126, 64, 64, 53, 84, 84, 72, 102, 101, 90, 75, 73, 62, 128, 125, 114, 80, 77, 67, 123, 122, 111, 161, 161, 149, 97, 96, 85, 132, 131, 120, 73, 72, 61, 67, 66, 55, 131, 130, 119, 160, 160, 146, 96, 94, 81, 56, 53, 42, 95, 93, 80, 77, 74, 61, 106, 103, 92, 125, 123, 112, 126, 126, 114, 100, 97, 86, 74, 73, 62, 78, 75, 64, 85, 82, 71, 160, 158, 147, 139, 136, 126, 75, 74, 63, 74, 71, 61, 137, 134, 124, 105, 105, 94, 118, 116, 105, 70, 70, 59, 125, 125, 111, 74, 74, 60, 232, 232, 220, 230, 229, 218, 232, 232, 220, 233, 233, 222, 230, 229, 218, 229, 228, 217, 229, 228, 217, 229, 228, 217, 227, 227, 216, 227, 227, 216, 226, 226, 215, 223, 222, 211, 231, 228, 218, 230, 227, 217, 235, 232, 222, 232, 232, 220, 239, 238, 227, 240, 240, 228, 242, 242, 231, 237, 236, 225, 242, 242, 231, 248, 248, 237, 254, 254, 242, 255, 255, 249, 255, 255, 251, 254, 254, 242, 0, 0, 0, 81, 80, 69, 80, 79, 68, 92, 92, 81, 76, 75, 66, 94, 93, 84, 82, 81, 72, 57, 57, 48, 56, 55, 47, 66, 63, 55, 50, 47, 39, 43, 43, 34, 49, 45, 37, 50, 47, 39, 68, 67, 58, 84, 83, 75, 54, 53, 44, 61, 60, 51, 54, 53, 44, 54, 53, 44, 55, 54, 46, 50, 50, 41, 56, 56, 45, 55, 54, 46, 43, 43, 34, 52, 51, 42, 59, 58, 49, 48, 47, 39, 52, 51, 42, 49, 48, 40, 53, 52, 41, 48, 47, 39, 48, 48, 36, 59, 56, 46, 52, 51, 42, 67, 66, 57, 64, 66, 56, 59, 61, 51, 50, 52, 42, 62, 61, 52, 74, 73, 64, 70, 69, 61, 75, 74, 65, 67, 66, 57, 66, 65, 56, 64, 64, 55, 82, 81, 72, 59, 61, 51, 87, 86, 77, 70, 69, 61, 69, 68, 60, 78, 78, 69, 82, 81, 72, 82, 81, 70, 74, 73, 64, 77, 76, 68, 84, 83, 75, 69, 68, 60, 63, 62, 54, 87, 86, 75, 77, 77, 65, 64, 64, 53, 78, 78, 67, 63, 63, 52, 67, 66, 55, 87, 86, 75, 70, 70, 59, 66, 65, 54, 71, 71, 60, 57, 57, 46, 49, 49, 38, 95, 94, 83, 66, 65, 56, 82, 81, 70, 70, 69, 61, 87, 86, 77, 82, 81, 72, 63, 62, 54, 94, 93, 84, 77, 76, 68, 96, 95, 88, 84, 83, 75, 85, 84, 78, 69, 68, 62, 70, 69, 61, 47, 46, 37, 108, 107, 98, 92, 92, 83, 100, 100, 89, 77, 77, 65, 92, 92, 81, 59, 56, 46, 86, 83, 72, 74, 71, 61, 94, 91, 81, 55, 55, 43, 99, 99, 88, 55, 55, 43, 70, 70, 59, 63, 63, 52, 92, 92, 81, 54, 53, 42, 106, 106, 95, 110, 109, 98, 68, 66, 55, 113, 113, 102, 104, 104, 92, 81, 80, 69, 105, 104, 96, 116, 113, 103, 88, 87, 76, 67, 66, 55, 102, 101, 90, 110, 110, 96, 68, 66, 53, 59, 58, 47, 54, 53, 42, 76, 76, 62, 114, 111, 100, 80, 77, 67, 81, 80, 71, 75, 74, 65, 113, 109, 101, 96, 95, 84, 117, 116, 107, 57, 54, 43, 64, 64, 53, 131, 130, 119, 96, 95, 84, 69, 69, 55, 78, 75, 62, 81, 79, 66, 130, 128, 115, 93, 90, 80, 127, 124, 113, 89, 88, 77, 81, 78, 68, 99, 96, 85, 53, 50, 40, 85, 85, 74, 113, 113, 102, 120, 117, 106, 106, 106, 95, 108, 105, 95, 71, 68, 55, 117, 115, 102, 71, 71, 60, 92, 89, 76, 74, 72, 59, 101, 98, 88, 91, 91, 80, 71, 71, 60, 88, 85, 75, 64, 61, 50, 114, 111, 100, 104, 102, 91, 94, 91, 81, 95, 94, 83, 110, 109, 98, 167, 164, 154, 100, 97, 86, 84, 84, 72, 96, 95, 84, 114, 114, 103, 138, 137, 126, 105, 105, 94, 94, 91, 81, 67, 64, 54, 142, 139, 128, 129, 126, 116, 73, 72, 61, 146, 145, 134, 101, 98, 88, 89, 88, 77, 87, 86, 75, 69, 69, 57, 75, 74, 63, 113, 110, 97, 111, 109, 96, 120, 117, 104, 74, 72, 59, 61, 61, 47, 156, 156, 145, 130, 129, 118, 73, 72, 61, 112, 112, 100, 123, 122, 111, 127, 124, 113, 83, 81, 70, 85, 82, 71, 77, 74, 63, 65, 62, 52, 101, 98, 88, 73, 72, 61, 74, 73, 62, 71, 71, 60, 87, 87, 73, 84, 86, 72, 230, 229, 218, 231, 230, 219, 226, 226, 215, 227, 227, 216, 223, 222, 211, 225, 225, 213, 226, 226, 215, 223, 222, 211, 225, 223, 212, 228, 225, 215, 227, 227, 216, 227, 227, 216, 230, 229, 218, 233, 230, 219, 236, 233, 223, 240, 240, 228, 239, 238, 227, 238, 237, 226, 237, 236, 225, 237, 236, 225, 245, 244, 233, 255, 255, 244, 255, 255, 249, 255, 255, 252, 253, 252, 241, 128, 128, 117, 78, 78, 67, 85, 82, 71, 87, 84, 74, 87, 86, 75, 87, 86, 75, 73, 72, 63, 69, 68, 60, 66, 68, 58, 61, 60, 51, 56, 55, 47, 60, 57, 49, 68, 67, 58, 52, 51, 40, 47, 46, 37, 68, 67, 58, 56, 56, 45, 56, 55, 47, 52, 51, 42, 53, 52, 43, 53, 52, 43, 52, 51, 42, 56, 55, 47, 61, 60, 49, 50, 50, 41, 54, 53, 44, 52, 51, 40, 36, 36, 27, 33, 32, 23, 50, 50, 41, 55, 55, 43, 60, 59, 50, 49, 48, 40, 46, 45, 36, 60, 59, 50, 48, 47, 39, 43, 45, 35, 50, 50, 41, 55, 56, 47, 59, 61, 51, 62, 61, 52, 70, 69, 61, 66, 65, 56, 69, 68, 60, 60, 59, 50, 80, 82, 72, 70, 71, 62, 70, 71, 62, 74, 76, 66, 77, 76, 68, 78, 78, 69, 78, 80, 68, 76, 77, 68, 83, 84, 75, 66, 68, 58, 57, 57, 48, 76, 75, 66, 90, 89, 80, 63, 63, 52, 89, 88, 77, 61, 60, 51, 57, 57, 46, 78, 78, 67, 62, 62, 50, 53, 52, 41, 80, 77, 67, 69, 69, 57, 63, 63, 52, 69, 69, 57, 47, 46, 35, 83, 83, 71, 81, 80, 71, 88, 87, 78, 56, 55, 47, 71, 71, 60, 85, 85, 76, 70, 69, 61, 90, 89, 80, 80, 79, 70, 88, 87, 78, 80, 79, 70, 62, 61, 54, 85, 84, 78, 78, 77, 71, 60, 58, 52, 87, 86, 77, 84, 83, 75, 84, 83, 75, 97, 96, 87, 103, 102, 91, 68, 67, 56, 70, 70, 59, 60, 57, 47, 79, 76, 65, 71, 68, 57, 103, 102, 91, 99, 99, 88, 64, 64, 53, 88, 87, 76, 113, 113, 100, 81, 80, 69, 63, 63, 52, 71, 71, 60, 75, 74, 63, 60, 59, 48, 64, 61, 50, 87, 86, 75, 112, 112, 100, 74, 71, 61, 86, 83, 72, 109, 108, 97, 73, 70, 60, 86, 83, 72, 117, 114, 104, 114, 111, 100, 71, 68, 55, 61, 59, 48, 90, 90, 78, 82, 79, 69, 105, 105, 94, 77, 77, 65, 70, 67, 56, 86, 83, 72, 86, 83, 72, 66, 65, 54, 80, 77, 67, 109, 108, 97, 87, 86, 75, 57, 57, 46, 99, 99, 88, 92, 92, 81, 84, 84, 72, 85, 85, 74, 75, 74, 63, 68, 66, 55, 115, 112, 102, 121, 121, 110, 81, 78, 68, 86, 83, 72, 68, 67, 56, 110, 107, 97, 111, 109, 98, 117, 116, 105, 125, 123, 112, 126, 126, 114, 142, 142, 131, 102, 100, 87, 84, 84, 70, 122, 119, 109, 142, 139, 128, 123, 121, 108, 63, 60, 49, 83, 81, 70, 118, 116, 105, 135, 135, 124, 140, 140, 128, 80, 79, 68, 62, 62, 50, 130, 129, 118, 109, 108, 97, 72, 69, 61, 78, 78, 67, 127, 127, 116, 75, 74, 63, 87, 86, 75, 103, 102, 91, 92, 89, 78, 104, 102, 91, 123, 120, 110, 79, 76, 65, 77, 77, 65, 146, 145, 134, 111, 109, 98, 77, 74, 63, 155, 155, 144, 148, 148, 136, 56, 56, 45, 144, 143, 132, 75, 73, 62, 61, 60, 49, 66, 64, 51, 129, 126, 114, 128, 128, 117, 89, 89, 75, 80, 79, 66, 127, 124, 111, 128, 128, 117, 41, 41, 29, 100, 97, 86, 82, 79, 69, 100, 97, 86, 88, 85, 75, 102, 99, 89, 71, 68, 57, 104, 104, 92, 112, 112, 100, 87, 86, 75, 116, 116, 102, 103, 103, 89, 231, 231, 217, 226, 226, 213, 226, 226, 215, 225, 225, 213, 224, 223, 212, 224, 223, 212, 225, 225, 213, 226, 226, 215, 225, 223, 212, 231, 228, 218, 229, 226, 216, 230, 229, 218, 230, 227, 217, 233, 233, 222, 238, 235, 225, 241, 241, 230, 240, 240, 228, 237, 236, 225, 237, 236, 225, 238, 237, 226, 243, 240, 230, 248, 248, 237, 255, 255, 247, 255, 255, 253, 245, 242, 232, 15, 12, 2, 82, 79, 69, 86, 83, 72, 90, 90, 78, 62, 62, 50, 69, 68, 60, 81, 80, 69, 70, 69, 61, 65, 67, 57, 63, 62, 54, 48, 49, 40, 52, 51, 42, 55, 54, 46, 57, 57, 46, 57, 57, 46, 49, 49, 38, 56, 56, 45, 56, 55, 47, 54, 53, 44, 53, 52, 43, 63, 62, 54, 38, 35, 25, 60, 57, 47, 63, 63, 52, 52, 51, 40, 56, 53, 42, 52, 51, 40, 65, 62, 52, 56, 56, 45, 50, 50, 41, 69, 68, 60, 59, 58, 49, 41, 42, 33, 59, 58, 49, 52, 51, 42, 39, 38, 29, 51, 53, 43, 49, 50, 41, 61, 60, 51, 56, 57, 48, 61, 60, 51, 67, 66, 57, 69, 68, 60, 53, 55, 46, 62, 63, 54, 67, 69, 60, 62, 61, 52, 77, 76, 68, 66, 68, 58, 69, 68, 60, 67, 66, 57, 82, 81, 70, 90, 89, 80, 82, 81, 72, 70, 69, 61, 85, 85, 76, 84, 83, 75, 81, 80, 71, 70, 69, 61, 81, 80, 69, 100, 100, 91, 80, 79, 70, 54, 53, 44, 82, 81, 70, 78, 78, 67, 60, 59, 48, 63, 63, 52, 78, 78, 67, 64, 64, 53, 83, 82, 73, 60, 59, 50, 67, 66, 57, 62, 61, 52, 78, 78, 69, 71, 71, 62, 62, 61, 52, 87, 86, 77, 69, 68, 60, 76, 75, 66, 85, 85, 76, 73, 72, 63, 75, 74, 67, 80, 78, 72, 75, 74, 67, 82, 81, 74, 75, 74, 65, 83, 82, 73, 83, 82, 73, 68, 67, 58, 82, 81, 70, 97, 96, 85, 77, 77, 65, 94, 93, 82, 74, 73, 62, 80, 79, 68, 62, 62, 50, 71, 71, 60, 88, 87, 76, 84, 84, 72, 78, 78, 67, 99, 99, 88, 74, 73, 62, 62, 64, 52, 70, 70, 59, 75, 74, 63, 88, 87, 76, 60, 59, 48, 59, 58, 47, 64, 64, 53, 94, 91, 81, 65, 62, 52, 58, 55, 45, 107, 104, 94, 59, 56, 46, 95, 92, 82, 80, 77, 67, 100, 100, 89, 134, 134, 123, 68, 66, 55, 67, 64, 54, 119, 119, 107, 103, 102, 91, 82, 81, 70, 103, 100, 90, 45, 44, 33, 103, 102, 91, 82, 79, 69, 61, 60, 49, 98, 98, 86, 104, 104, 90, 90, 90, 76, 73, 72, 61, 91, 91, 80, 121, 121, 110, 108, 107, 96, 91, 88, 77, 67, 66, 55, 99, 99, 88, 81, 78, 68, 101, 98, 88, 63, 60, 49, 71, 68, 57, 50, 50, 39, 99, 99, 88, 91, 88, 77, 133, 133, 121, 67, 64, 54, 76, 76, 64, 106, 103, 92, 93, 90, 80, 66, 63, 53, 107, 104, 94, 84, 84, 72, 87, 86, 75, 69, 69, 57, 64, 64, 53, 90, 90, 78, 75, 74, 63, 121, 121, 110, 77, 77, 65, 81, 80, 69, 81, 80, 71, 130, 127, 117, 77, 77, 65, 103, 100, 90, 70, 67, 56, 81, 78, 68, 109, 106, 96, 87, 84, 74, 113, 110, 99, 138, 135, 125, 125, 125, 113, 99, 99, 88, 97, 96, 85, 122, 119, 109, 67, 66, 55, 78, 78, 67, 71, 71, 60, 135, 135, 124, 109, 108, 97, 128, 128, 115, 70, 70, 57, 88, 86, 73, 73, 70, 60, 145, 143, 130, 93, 90, 78, 60, 57, 47, 87, 84, 74, 78, 78, 67, 106, 103, 92, 104, 102, 91, 83, 81, 70, 65, 62, 52, 82, 79, 69, 124, 121, 111, 70, 70, 59, 75, 74, 63, 95, 94, 83, 97, 96, 85, 231, 231, 217, 229, 228, 215, 231, 230, 219, 219, 219, 207, 223, 220, 210, 223, 222, 211, 225, 225, 213, 231, 228, 218, 233, 230, 219, 225, 223, 212, 222, 221, 210, 229, 226, 216, 230, 227, 217, 240, 237, 226, 236, 235, 224, 240, 237, 226, 238, 235, 225, 240, 240, 228, 231, 228, 218, 244, 242, 229, 245, 242, 232, 253, 253, 239, 255, 255, 248, 255, 255, 244, 147, 145, 134, 101, 98, 88, 79, 76, 65, 82, 79, 69, 82, 81, 70, 68, 67, 56, 83, 83, 71, 68, 67, 56, 61, 60, 51, 66, 68, 58, 59, 61, 51, 62, 63, 54, 59, 58, 49, 53, 52, 43, 52, 51, 40, 54, 53, 44, 50, 50, 41, 59, 58, 47, 40, 39, 30, 66, 65, 56, 59, 58, 49, 55, 54, 46, 59, 58, 49, 54, 53, 44, 75, 74, 63, 62, 61, 52, 60, 59, 50, 61, 59, 48, 52, 51, 40, 59, 58, 49, 50, 50, 41, 53, 52, 41, 59, 61, 51, 58, 60, 50, 50, 52, 42, 51, 53, 43, 47, 48, 39, 51, 53, 43, 64, 64, 55, 63, 65, 53, 52, 51, 42, 62, 63, 54, 69, 68, 60, 65, 67, 55, 63, 64, 55, 64, 66, 56, 72, 74, 64, 64, 66, 56, 63, 64, 55, 77, 78, 69, 77, 78, 69, 72, 74, 62, 64, 66, 56, 75, 74, 65, 67, 66, 57, 83, 84, 75, 91, 90, 82, 77, 76, 68, 80, 79, 68, 64, 64, 53, 89, 88, 77, 61, 60, 49, 76, 75, 66, 78, 78, 67, 72, 69, 59, 64, 64, 53, 74, 73, 62, 63, 63, 52, 92, 92, 81, 88, 87, 76, 62, 61, 52, 66, 65, 54, 67, 66, 57, 70, 69, 61, 73, 72, 63, 82, 81, 72, 87, 86, 77, 78, 80, 70, 61, 60, 51, 59, 58, 49, 61, 60, 51, 75, 74, 65, 59, 61, 51, 71, 71, 62, 73, 72, 63, 86, 87, 80, 53, 55, 46, 88, 87, 78, 66, 65, 56, 89, 88, 77, 77, 76, 68, 103, 102, 91, 60, 59, 48, 83, 81, 70, 101, 98, 88, 80, 79, 68, 82, 79, 69, 92, 89, 78, 56, 56, 45, 70, 70, 59, 110, 109, 98, 110, 109, 98, 102, 104, 92, 108, 107, 96, 87, 86, 75, 70, 70, 59, 67, 66, 55, 104, 104, 92, 88, 87, 76, 91, 91, 80, 99, 96, 85, 95, 92, 82, 109, 108, 97, 67, 64, 54, 65, 62, 52, 108, 105, 95, 113, 110, 97, 77, 74, 63, 56, 53, 42, 100, 97, 86, 81, 78, 68, 100, 97, 86, 65, 62, 52, 71, 71, 60, 70, 70, 59, 106, 106, 95, 109, 108, 97, 116, 115, 104, 123, 122, 111, 70, 70, 59, 85, 85, 74, 60, 59, 48, 83, 83, 71, 81, 80, 69, 124, 123, 112, 84, 84, 72, 68, 67, 56, 64, 64, 53, 62, 62, 50, 73, 72, 61, 129, 126, 116, 87, 86, 75, 109, 106, 96, 110, 107, 97, 96, 95, 84, 102, 101, 90, 60, 59, 48, 133, 133, 121, 118, 117, 106, 123, 122, 111, 82, 81, 70, 74, 71, 61, 109, 106, 96, 142, 139, 128, 111, 110, 99, 61, 60, 49, 121, 121, 110, 64, 64, 53, 69, 69, 57, 54, 53, 42, 119, 119, 107, 107, 104, 96, 116, 113, 103, 91, 91, 80, 142, 138, 130, 73, 70, 60, 81, 78, 68, 118, 116, 105, 147, 145, 134, 88, 85, 75, 83, 81, 68, 68, 66, 55, 67, 64, 54, 104, 102, 91, 81, 78, 68, 83, 83, 71, 122, 119, 109, 106, 106, 95, 127, 127, 116, 113, 113, 102, 121, 121, 110, 119, 119, 107, 141, 141, 129, 71, 68, 57, 71, 71, 60, 120, 117, 106, 72, 69, 57, 147, 145, 132, 114, 111, 98, 125, 123, 110, 113, 110, 99, 73, 70, 60, 67, 64, 54, 68, 66, 55, 121, 118, 107, 97, 95, 84, 80, 79, 68, 129, 126, 118, 123, 122, 111, 77, 74, 63, 230, 230, 216, 229, 228, 217, 217, 216, 205, 222, 221, 210, 228, 225, 215, 224, 221, 211, 224, 223, 212, 221, 218, 207, 224, 221, 211, 225, 223, 210, 222, 219, 209, 229, 226, 216, 231, 228, 218, 237, 234, 224, 237, 234, 224, 236, 235, 222, 239, 238, 227, 238, 235, 225, 239, 238, 227, 241, 241, 230, 255, 253, 240, 255, 253, 242, 255, 255, 247, 254, 254, 242, 2, 0, 0, 82, 79, 69, 79, 76, 65, 80, 77, 67, 66, 65, 54, 75, 74, 63, 74, 73, 64, 69, 69, 57, 70, 69, 61, 62, 63, 54, 53, 52, 43, 49, 48, 40, 69, 68, 60, 52, 51, 40, 52, 51, 40, 45, 44, 33, 50, 50, 39, 49, 48, 40, 53, 52, 43, 52, 51, 42, 56, 55, 47, 46, 45, 36, 50, 50, 41, 57, 57, 48, 49, 48, 40, 56, 55, 47, 55, 54, 46, 53, 52, 43, 47, 46, 37, 42, 41, 33, 43, 45, 35, 34, 33, 25, 44, 46, 36, 42, 43, 34, 52, 54, 44, 53, 55, 46, 48, 49, 40, 58, 60, 50, 60, 59, 48, 53, 55, 43, 63, 65, 53, 67, 69, 57, 57, 59, 47, 65, 67, 55, 50, 52, 42, 62, 63, 54, 65, 67, 57, 59, 61, 51, 62, 63, 54, 70, 71, 62, 59, 61, 51, 72, 74, 64, 70, 71, 62, 73, 75, 65, 60, 62, 52, 77, 78, 69, 70, 69, 61, 88, 87, 78, 68, 67, 58, 61, 60, 49, 74, 73, 62, 66, 65, 56, 84, 84, 72, 66, 63, 53, 70, 67, 56, 81, 78, 68, 88, 85, 75, 64, 64, 53, 57, 57, 48, 81, 80, 71, 77, 76, 68, 70, 69, 61, 78, 78, 69, 88, 87, 78, 77, 73, 65, 77, 76, 68, 82, 81, 72, 74, 73, 64, 70, 69, 61, 69, 68, 60, 78, 78, 69, 60, 59, 50, 77, 78, 69, 59, 58, 49, 89, 88, 79, 54, 53, 46, 102, 100, 94, 77, 76, 68, 63, 62, 54, 49, 48, 40, 74, 73, 62, 62, 62, 50, 100, 100, 89, 102, 101, 90, 58, 55, 45, 111, 109, 98, 85, 82, 71, 70, 67, 56, 110, 107, 97, 90, 90, 78, 82, 81, 70, 56, 56, 45, 63, 63, 52, 95, 94, 83, 62, 62, 50, 59, 58, 47, 96, 95, 84, 53, 52, 41, 109, 108, 97, 56, 56, 45, 67, 66, 55, 114, 111, 100, 80, 79, 68, 74, 71, 61, 80, 79, 68, 58, 55, 45, 109, 106, 96, 60, 59, 48, 111, 109, 98, 70, 67, 56, 114, 111, 100, 69, 69, 57, 97, 96, 85, 49, 49, 38, 89, 88, 77, 80, 77, 67, 113, 110, 99, 69, 69, 57, 68, 67, 56, 57, 57, 46, 114, 114, 103, 57, 57, 46, 68, 67, 56, 75, 74, 63, 118, 116, 105, 84, 84, 72, 75, 73, 62, 118, 116, 105, 68, 67, 56, 59, 58, 47, 125, 122, 114, 56, 56, 45, 100, 100, 89, 94, 93, 82, 95, 94, 83, 78, 78, 67, 67, 66, 55, 104, 104, 92, 78, 78, 67, 95, 94, 83, 89, 88, 77, 78, 75, 64, 75, 73, 62, 59, 56, 46, 103, 102, 91, 75, 74, 63, 67, 66, 55, 80, 79, 68, 69, 69, 57, 127, 127, 116, 117, 116, 105, 70, 70, 59, 95, 94, 83, 105, 105, 94, 132, 131, 120, 116, 113, 103, 104, 102, 91, 137, 134, 124, 104, 102, 91, 91, 88, 77, 110, 107, 97, 73, 70, 60, 83, 81, 70, 158, 155, 145, 134, 131, 120, 73, 70, 60, 89, 86, 76, 90, 90, 78, 146, 143, 133, 88, 85, 75, 130, 129, 118, 97, 96, 85, 70, 70, 59, 95, 92, 82, 86, 83, 72, 81, 78, 68, 73, 72, 61, 68, 66, 55, 77, 74, 63, 115, 112, 102, 144, 141, 131, 100, 97, 86, 138, 135, 125, 138, 135, 125, 63, 60, 49, 118, 116, 105, 123, 120, 110, 118, 116, 105, 74, 73, 62, 73, 70, 60, 226, 226, 213, 225, 225, 211, 224, 224, 210, 220, 220, 207, 223, 223, 209, 224, 224, 210, 222, 221, 210, 225, 223, 210, 224, 222, 209, 225, 223, 210, 229, 226, 216, 230, 229, 218, 233, 230, 219, 238, 235, 225, 241, 238, 225, 236, 235, 224, 240, 237, 226, 235, 232, 220, 236, 235, 224, 242, 239, 228, 249, 249, 238, 255, 255, 244, 255, 253, 242, 222, 219, 209, 80, 77, 67, 89, 86, 76, 86, 83, 72, 96, 93, 83, 92, 89, 78, 80, 79, 68, 76, 76, 64, 53, 52, 41, 67, 66, 57, 56, 55, 47, 56, 55, 47, 67, 66, 57, 55, 54, 46, 59, 58, 49, 49, 49, 38, 43, 43, 32, 56, 56, 45, 53, 52, 41, 42, 42, 31, 50, 50, 39, 60, 62, 52, 43, 45, 35, 48, 47, 39, 52, 51, 42, 59, 61, 51, 45, 44, 35, 61, 60, 51, 45, 42, 34, 56, 55, 47, 52, 51, 42, 47, 46, 37, 49, 48, 40, 48, 49, 40, 44, 46, 36, 34, 35, 26, 53, 55, 46, 53, 55, 46, 58, 60, 50, 46, 45, 34, 55, 57, 45, 64, 64, 53, 64, 64, 53, 52, 54, 42, 66, 65, 54, 65, 67, 57, 80, 82, 72, 64, 66, 56, 80, 82, 70, 78, 80, 68, 65, 67, 57, 51, 53, 43, 62, 64, 52, 59, 61, 51, 56, 58, 46, 65, 67, 57, 73, 72, 63, 71, 71, 62, 55, 54, 46, 55, 54, 46, 78, 78, 67, 41, 41, 29, 83, 83, 71, 78, 78, 67, 77, 77, 65, 59, 58, 47, 82, 81, 70, 80, 79, 68, 96, 95, 84, 80, 79, 68, 95, 94, 83, 52, 51, 42, 59, 58, 47, 73, 72, 63, 92, 92, 83, 66, 65, 56, 88, 87, 78, 63, 62, 54, 75, 74, 65, 75, 74, 65, 52, 51, 42, 75, 74, 65, 80, 82, 72, 59, 61, 51, 59, 61, 51, 67, 69, 60, 72, 74, 64, 83, 82, 73, 57, 59, 49, 70, 71, 62, 81, 80, 71, 50, 50, 39, 91, 91, 80, 63, 62, 54, 71, 71, 60, 83, 81, 70, 106, 103, 92, 61, 59, 48, 95, 92, 82, 92, 89, 78, 60, 57, 47, 70, 67, 56, 64, 61, 50, 94, 93, 82, 77, 77, 65, 50, 50, 39, 63, 63, 52, 95, 94, 83, 62, 62, 50, 77, 77, 65, 106, 106, 95, 64, 64, 53, 62, 62, 50, 75, 74, 63, 67, 64, 54, 51, 48, 38, 88, 85, 75, 39, 36, 26, 78, 75, 64, 92, 89, 78, 81, 78, 68, 91, 91, 80, 117, 114, 104, 69, 69, 57, 110, 109, 98, 80, 79, 68, 61, 60, 49, 74, 71, 61, 85, 85, 74, 95, 94, 83, 93, 90, 80, 84, 84, 72, 106, 106, 95, 94, 93, 82, 81, 80, 69, 77, 74, 63, 66, 65, 54, 125, 123, 112, 114, 114, 103, 92, 92, 83, 85, 82, 71, 90, 90, 78, 80, 77, 67, 49, 49, 38, 96, 95, 84, 69, 69, 57, 81, 80, 71, 85, 85, 74, 69, 69, 57, 103, 102, 91, 64, 64, 53, 69, 69, 57, 96, 93, 83, 124, 121, 111, 71, 68, 57, 75, 73, 62, 81, 78, 68, 138, 137, 126, 91, 88, 77, 127, 127, 116, 97, 96, 85, 49, 49, 38, 73, 72, 61, 73, 72, 61, 77, 74, 63, 166, 163, 153, 99, 96, 85, 149, 146, 135, 137, 134, 124, 73, 70, 60, 68, 66, 55, 77, 74, 63, 65, 60, 50, 117, 114, 104, 129, 126, 116, 145, 142, 132, 99, 96, 85, 129, 126, 116, 81, 80, 71, 122, 119, 109, 145, 142, 132, 113, 113, 102, 55, 55, 43, 102, 101, 90, 138, 135, 125, 85, 85, 74, 88, 85, 75, 117, 116, 105, 91, 88, 77, 149, 146, 135, 91, 88, 77, 73, 70, 60, 89, 86, 76, 101, 98, 88, 85, 82, 71, 127, 127, 116, 138, 137, 126, 83, 83, 71, 121, 118, 107, 85, 82, 71, 87, 87, 73, 232, 232, 218, 226, 226, 213, 222, 221, 208, 218, 218, 204, 225, 223, 210, 223, 221, 208, 220, 220, 207, 217, 215, 202, 219, 216, 203, 221, 218, 205, 227, 224, 211, 229, 228, 215, 233, 230, 217, 235, 232, 220, 235, 232, 220, 236, 235, 222, 237, 236, 225, 236, 233, 223, 234, 234, 223, 245, 244, 233, 249, 249, 238, 249, 246, 233, 255, 252, 241, 72, 69, 59, 97, 95, 84, 91, 88, 77, 95, 92, 82, 88, 85, 75, 86, 83, 72, 78, 75, 64, 77, 77, 65, 63, 63, 52, 57, 59, 49, 63, 63, 52, 53, 52, 43, 52, 54, 44, 52, 51, 42, 50, 50, 39, 56, 56, 45, 56, 56, 45, 38, 37, 26, 63, 63, 52, 54, 53, 42, 59, 58, 47, 51, 53, 43, 55, 56, 47, 49, 50, 41, 50, 52, 42, 49, 50, 41, 53, 55, 46, 47, 46, 39, 53, 52, 43, 53, 52, 45, 48, 47, 39, 64, 64, 55, 49, 48, 40, 50, 52, 42, 50, 52, 42, 55, 56, 47, 47, 48, 39, 41, 42, 33, 50, 52, 40, 48, 48, 36, 62, 64, 52, 62, 64, 52, 65, 67, 55, 53, 55, 43, 63, 63, 52, 66, 68, 56, 55, 57, 45, 60, 62, 50, 80, 82, 70, 64, 66, 56, 77, 79, 67, 74, 76, 64, 56, 58, 46, 69, 69, 57, 64, 66, 56, 58, 60, 50, 69, 70, 61, 57, 59, 49, 82, 81, 72, 77, 76, 68, 55, 55, 43, 50, 50, 39, 61, 60, 49, 47, 46, 35, 82, 81, 70, 63, 63, 52, 88, 87, 76, 61, 60, 49, 97, 95, 84, 73, 70, 60, 87, 86, 75, 64, 60, 52, 83, 81, 70, 96, 93, 85, 56, 52, 44, 82, 79, 71, 62, 61, 52, 67, 66, 57, 67, 66, 57, 83, 82, 73, 76, 75, 66, 61, 60, 51, 79, 81, 71, 73, 75, 65, 80, 79, 70, 79, 81, 71, 74, 76, 66, 63, 64, 55, 87, 86, 77, 57, 57, 48, 60, 59, 50, 81, 80, 69, 92, 92, 81, 84, 84, 72, 62, 62, 50, 67, 64, 54, 94, 91, 81, 87, 84, 74, 75, 73, 62, 102, 99, 89, 103, 100, 90, 79, 76, 65, 94, 91, 81, 54, 53, 42, 84, 84, 72, 69, 69, 57, 85, 85, 74, 57, 57, 48, 64, 64, 53, 91, 91, 80, 96, 95, 84, 103, 102, 91, 109, 108, 97, 70, 70, 59, 66, 63, 53, 61, 60, 49, 99, 96, 85, 70, 67, 56, 66, 65, 54, 49, 46, 35, 105, 105, 94, 67, 66, 55, 70, 70, 59, 116, 115, 104, 69, 69, 57, 89, 88, 77, 72, 69, 59, 66, 63, 53, 52, 49, 39, 88, 87, 76, 87, 86, 75, 71, 68, 57, 107, 104, 94, 63, 63, 52, 97, 96, 85, 82, 79, 69, 96, 95, 84, 85, 81, 73, 111, 110, 101, 114, 114, 105, 81, 78, 70, 66, 65, 54, 102, 101, 92, 95, 94, 83, 60, 59, 48, 68, 67, 56, 82, 81, 70, 112, 112, 100, 60, 59, 48, 96, 95, 84, 112, 112, 100, 85, 85, 74, 123, 120, 110, 107, 104, 94, 116, 113, 103, 144, 141, 131, 142, 139, 128, 107, 104, 94, 138, 135, 125, 78, 75, 64, 75, 74, 63, 78, 78, 67, 114, 114, 103, 82, 81, 70, 94, 91, 81, 125, 123, 112, 89, 86, 76, 70, 67, 56, 97, 95, 84, 104, 102, 91, 79, 76, 65, 89, 86, 76, 141, 138, 127, 73, 70, 60, 125, 123, 112, 78, 75, 64, 100, 97, 89, 75, 73, 62, 74, 71, 61, 95, 92, 82, 138, 135, 125, 75, 74, 63, 84, 84, 72, 94, 91, 81, 64, 64, 53, 103, 102, 91, 129, 126, 116, 88, 87, 76, 158, 155, 145, 120, 120, 109, 72, 69, 57, 137, 134, 124, 95, 92, 82, 71, 68, 57, 108, 107, 96, 74, 73, 64, 102, 101, 92, 111, 110, 99, 95, 94, 85, 84, 84, 72, 113, 110, 99, 226, 226, 213, 224, 224, 210, 220, 220, 207, 222, 221, 208, 223, 223, 209, 220, 217, 204, 222, 220, 207, 222, 220, 207, 222, 220, 207, 222, 220, 207, 228, 225, 213, 231, 229, 216, 236, 234, 221, 235, 232, 220, 235, 232, 220, 237, 234, 224, 240, 237, 226, 234, 231, 218, 240, 237, 226, 247, 244, 233, 252, 250, 237, 249, 249, 236, 241, 241, 228, 0, 0, 0, 89, 87, 74, 89, 86, 76, 60, 57, 47, 79, 76, 65, 88, 85, 75, 73, 70, 60, 59, 58, 47, 63, 63, 52, 75, 74, 65, 59, 58, 49, 49, 48, 40, 34, 33, 25, 67, 66, 57, 38, 40, 28, 35, 36, 27, 64, 64, 55, 50, 50, 39, 48, 49, 40, 58, 60, 50, 47, 48, 39, 48, 49, 40, 45, 47, 39, 52, 54, 44, 55, 56, 47, 73, 72, 63, 68, 67, 58, 63, 62, 54, 59, 56, 48, 56, 52, 44, 52, 51, 42, 44, 46, 36, 52, 51, 42, 50, 50, 41, 60, 62, 52, 53, 55, 46, 49, 51, 39, 41, 43, 31, 53, 52, 41, 33, 35, 22, 56, 58, 46, 60, 62, 50, 58, 60, 48, 65, 67, 55, 63, 65, 53, 62, 64, 52, 66, 68, 56, 65, 67, 55, 66, 68, 56, 62, 64, 52, 66, 68, 56, 67, 66, 55, 82, 81, 70, 39, 38, 27, 83, 85, 72, 76, 75, 66, 62, 61, 52, 80, 79, 68, 75, 74, 65, 70, 69, 61, 67, 66, 57, 71, 71, 60, 81, 80, 69, 99, 99, 88, 71, 71, 60, 78, 78, 67, 77, 77, 65, 89, 88, 77, 63, 63, 52, 63, 59, 51, 78, 74, 66, 71, 67, 60, 87, 84, 76, 86, 83, 75, 67, 64, 56, 81, 78, 70, 63, 62, 54, 54, 53, 44, 69, 68, 60, 76, 75, 66, 75, 74, 65, 73, 72, 63, 85, 87, 77, 69, 70, 61, 40, 41, 32, 70, 71, 62, 78, 78, 69, 82, 81, 72, 78, 78, 69, 62, 61, 52, 74, 73, 64, 84, 84, 72, 80, 77, 67, 78, 75, 64, 84, 84, 72, 103, 100, 90, 87, 84, 74, 68, 66, 55, 66, 63, 53, 68, 66, 55, 96, 93, 83, 73, 70, 60, 45, 42, 32, 89, 86, 76, 88, 87, 78, 69, 69, 57, 75, 72, 64, 66, 65, 54, 75, 74, 63, 91, 90, 82, 78, 78, 67, 70, 70, 59, 80, 77, 67, 76, 76, 64, 117, 116, 105, 120, 117, 106, 65, 62, 52, 79, 76, 65, 85, 85, 74, 56, 56, 45, 105, 105, 94, 82, 81, 70, 82, 79, 69, 68, 67, 56, 50, 50, 39, 101, 98, 88, 63, 63, 52, 92, 89, 78, 54, 52, 41, 95, 94, 83, 60, 59, 48, 117, 114, 104, 95, 94, 83, 117, 116, 105, 69, 69, 57, 67, 66, 55, 97, 96, 85, 81, 80, 69, 82, 81, 70, 52, 51, 40, 120, 120, 109, 82, 81, 70, 89, 88, 77, 118, 117, 106, 117, 116, 105, 109, 108, 97, 121, 121, 110, 70, 70, 59, 138, 137, 126, 118, 116, 105, 42, 39, 28, 106, 106, 95, 127, 124, 113, 63, 63, 52, 64, 61, 50, 106, 103, 92, 134, 131, 120, 135, 132, 121, 124, 121, 111, 115, 112, 102, 131, 128, 118, 138, 137, 126, 78, 78, 67, 108, 107, 96, 94, 91, 81, 107, 104, 94, 75, 73, 62, 86, 83, 70, 78, 75, 64, 71, 68, 57, 85, 82, 71, 91, 88, 77, 142, 139, 128, 116, 113, 103, 145, 142, 132, 131, 128, 118, 82, 79, 69, 101, 98, 88, 137, 134, 124, 114, 111, 100, 131, 128, 118, 146, 143, 133, 90, 90, 78, 67, 64, 54, 119, 119, 107, 121, 121, 110, 81, 78, 68, 85, 85, 74, 151, 148, 138, 78, 75, 64, 95, 92, 82, 59, 56, 46, 96, 95, 84, 74, 73, 62, 92, 92, 83, 73, 72, 63, 100, 100, 91, 88, 87, 78, 62, 61, 52, 126, 126, 114, 86, 88, 76, 227, 227, 214, 227, 227, 214, 223, 223, 209, 225, 223, 210, 217, 217, 203, 224, 224, 210, 223, 220, 210, 222, 220, 207, 224, 224, 210, 227, 224, 211, 224, 222, 209, 222, 220, 207, 222, 220, 207, 242, 239, 226, 234, 231, 218, 237, 235, 222, 235, 232, 220, 236, 233, 223, 239, 238, 227, 251, 251, 237, 251, 248, 238, 250, 248, 235, 187, 184, 174, 56, 53, 42, 94, 92, 79, 66, 63, 53, 68, 66, 55, 80, 77, 67, 68, 66, 55, 67, 64, 54, 72, 69, 59, 61, 60, 49, 60, 59, 48, 52, 51, 42, 45, 44, 35, 61, 60, 51, 56, 56, 45, 54, 53, 42, 41, 40, 32, 49, 48, 40, 40, 39, 28, 48, 47, 39, 53, 55, 46, 58, 60, 50, 49, 50, 41, 43, 45, 35, 51, 53, 43, 42, 43, 34, 62, 63, 54, 47, 48, 39, 46, 45, 36, 41, 40, 32, 49, 48, 40, 48, 47, 39, 52, 51, 42, 47, 46, 37, 53, 52, 43, 50, 50, 41, 30, 32, 20, 54, 53, 42, 50, 52, 40, 54, 53, 42, 59, 61, 49, 57, 59, 47, 65, 68, 53, 64, 66, 52, 62, 64, 52, 70, 72, 60, 62, 64, 52, 52, 55, 40, 51, 53, 41, 83, 85, 72, 66, 68, 56, 68, 67, 56, 69, 71, 59, 76, 76, 64, 73, 72, 63, 62, 62, 50, 76, 76, 64, 75, 74, 63, 85, 85, 74, 52, 51, 40, 60, 59, 48, 55, 55, 43, 73, 72, 61, 54, 53, 42, 89, 88, 77, 75, 74, 63, 68, 67, 56, 91, 91, 80, 83, 81, 70, 86, 83, 72, 85, 82, 71, 87, 84, 74, 87, 84, 74, 65, 62, 54, 73, 70, 62, 85, 81, 73, 78, 74, 66, 59, 58, 49, 66, 63, 55, 88, 87, 78, 67, 66, 57, 64, 64, 55, 80, 79, 70, 56, 57, 48, 71, 73, 63, 51, 53, 43, 82, 81, 72, 73, 72, 63, 80, 79, 70, 83, 82, 73, 85, 85, 76, 84, 83, 75, 74, 73, 64, 78, 78, 69, 63, 63, 52, 76, 76, 64, 78, 78, 67, 83, 81, 70, 72, 69, 59, 80, 77, 67, 83, 81, 70, 73, 70, 60, 83, 81, 70, 94, 91, 81, 94, 91, 83, 52, 49, 41, 61, 60, 51, 73, 72, 63, 94, 93, 84, 100, 100, 89, 82, 81, 70, 98, 98, 86, 54, 53, 42, 80, 79, 68, 81, 80, 69, 99, 99, 88, 118, 117, 106, 85, 85, 74, 93, 90, 80, 69, 69, 57, 66, 65, 54, 103, 102, 91, 83, 81, 70, 101, 98, 88, 46, 45, 34, 75, 73, 62, 61, 59, 48, 83, 81, 70, 58, 55, 45, 105, 105, 94, 110, 109, 98, 99, 96, 85, 91, 88, 77, 62, 62, 50, 102, 101, 90, 51, 48, 38, 91, 90, 82, 73, 72, 63, 78, 78, 69, 62, 61, 52, 116, 115, 104, 95, 94, 83, 66, 65, 54, 103, 102, 91, 50, 50, 39, 117, 116, 105, 76, 76, 64, 110, 109, 98, 119, 119, 107, 112, 112, 100, 81, 80, 69, 106, 103, 92, 111, 110, 99, 104, 102, 91, 83, 81, 70, 96, 93, 83, 120, 117, 104, 129, 126, 116, 79, 76, 65, 122, 119, 109, 100, 97, 86, 88, 85, 75, 85, 85, 74, 119, 119, 107, 128, 128, 117, 67, 66, 55, 77, 74, 63, 106, 106, 95, 94, 91, 81, 63, 60, 49, 87, 84, 74, 144, 141, 131, 127, 124, 113, 114, 111, 100, 106, 103, 92, 75, 73, 62, 65, 62, 52, 151, 148, 140, 109, 108, 99, 72, 69, 59, 99, 95, 87, 145, 142, 132, 99, 95, 87, 106, 105, 97, 70, 66, 58, 99, 99, 88, 142, 142, 131, 128, 125, 114, 138, 135, 125, 97, 94, 86, 81, 80, 69, 61, 60, 49, 135, 135, 124, 88, 85, 75, 96, 95, 84, 91, 90, 82, 99, 100, 91, 73, 75, 65, 85, 86, 79, 101, 105, 94, 66, 68, 58, 74, 78, 65, 223, 223, 207, 224, 222, 209, 221, 218, 205, 223, 221, 208, 219, 216, 203, 222, 221, 208, 219, 219, 205, 222, 220, 207, 223, 223, 209, 224, 222, 209, 224, 222, 209, 224, 222, 209, 225, 223, 210, 230, 228, 215, 229, 227, 214, 230, 228, 215, 237, 235, 222, 236, 233, 223, 244, 242, 229, 249, 246, 233, 251, 248, 238, 252, 249, 239, 26, 24, 13, 92, 89, 78, 80, 77, 67, 85, 82, 71, 70, 67, 56, 85, 82, 71, 72, 69, 59, 58, 55, 45, 64, 64, 53, 34, 34, 22, 59, 58, 47, 49, 51, 39, 60, 59, 48, 47, 46, 35, 50, 50, 39, 61, 60, 49, 57, 57, 48, 52, 51, 40, 53, 52, 41, 50, 50, 41, 45, 44, 35, 48, 49, 40, 44, 46, 36, 49, 50, 43, 44, 46, 36, 43, 45, 35, 43, 43, 34, 45, 44, 35, 48, 47, 39, 35, 34, 26, 62, 61, 52, 59, 58, 49, 49, 48, 40, 50, 50, 41, 49, 48, 40, 42, 44, 32, 38, 40, 28, 55, 57, 45, 52, 51, 40, 52, 51, 40, 48, 50, 38, 56, 58, 46, 62, 64, 52, 71, 73, 59, 60, 63, 48, 62, 64, 52, 59, 61, 49, 63, 65, 53, 58, 60, 48, 65, 67, 55, 76, 76, 64, 70, 72, 60, 73, 72, 61, 57, 57, 46, 78, 78, 67, 70, 69, 61, 46, 45, 36, 77, 77, 65, 63, 63, 52, 77, 77, 65, 61, 60, 49, 64, 64, 53, 57, 57, 46, 96, 95, 84, 71, 71, 60, 67, 66, 55, 60, 59, 48, 78, 78, 67, 73, 70, 60, 82, 79, 69, 52, 49, 39, 65, 62, 52, 80, 77, 67, 70, 66, 58, 68, 65, 57, 73, 70, 62, 81, 78, 70, 80, 79, 70, 64, 64, 55, 52, 51, 42, 56, 57, 48, 65, 67, 57, 80, 82, 72, 51, 53, 43, 63, 64, 55, 69, 70, 61, 87, 89, 79, 75, 74, 65, 69, 68, 60, 81, 80, 71, 83, 82, 73, 75, 74, 63, 66, 65, 54, 70, 70, 59, 63, 63, 52, 94, 93, 82, 62, 62, 50, 70, 70, 59, 58, 55, 45, 61, 59, 48, 92, 92, 81, 48, 48, 36, 86, 83, 72, 130, 129, 118, 121, 121, 110, 93, 90, 82, 70, 67, 56, 85, 85, 76, 73, 72, 63, 66, 65, 54, 52, 51, 40, 71, 71, 60, 95, 92, 82, 97, 96, 85, 75, 74, 63, 47, 46, 35, 91, 91, 80, 60, 59, 48, 106, 106, 95, 67, 66, 55, 91, 91, 80, 75, 74, 63, 59, 56, 46, 75, 73, 62, 102, 101, 90, 88, 87, 76, 71, 71, 60, 89, 88, 77, 73, 72, 61, 57, 57, 46, 68, 67, 56, 74, 73, 64, 102, 99, 91, 90, 89, 80, 78, 78, 69, 96, 95, 86, 98, 97, 89, 77, 76, 68, 73, 72, 63, 75, 74, 65, 95, 94, 83, 85, 85, 74, 112, 112, 100, 84, 84, 72, 83, 81, 70, 76, 76, 64, 101, 98, 88, 122, 119, 109, 121, 118, 110, 72, 69, 59, 56, 56, 45, 105, 105, 94, 87, 86, 75, 46, 45, 34, 118, 116, 105, 114, 111, 100, 108, 105, 95, 93, 90, 80, 96, 93, 83, 106, 103, 92, 68, 66, 55, 124, 121, 111, 68, 67, 56, 64, 61, 50, 108, 107, 96, 118, 117, 106, 84, 84, 72, 87, 84, 74, 135, 132, 119, 79, 76, 65, 100, 97, 86, 71, 68, 57, 151, 148, 138, 127, 124, 113, 99, 96, 85, 110, 107, 97, 54, 53, 42, 151, 150, 139, 106, 105, 97, 86, 83, 72, 124, 121, 113, 82, 81, 72, 114, 111, 103, 142, 142, 131, 167, 164, 154, 129, 126, 116, 78, 78, 67, 68, 66, 55, 145, 144, 133, 95, 92, 82, 109, 108, 97, 109, 108, 97, 76, 76, 64, 121, 118, 110, 120, 119, 111, 112, 111, 103, 56, 57, 50, 88, 90, 82, 76, 77, 68, 57, 61, 50, 61, 65, 55, 57, 64, 52, 224, 222, 209, 227, 224, 211, 218, 218, 204, 220, 217, 204, 216, 216, 202, 220, 217, 204, 222, 220, 207, 223, 221, 208, 230, 228, 215, 227, 224, 211, 229, 227, 214, 227, 224, 211, 224, 222, 209, 222, 220, 207, 223, 221, 208, 227, 224, 211, 229, 227, 214, 233, 230, 219, 248, 245, 232, 251, 249, 236, 247, 244, 233, 224, 221, 211, 0, 0, 0, 85, 82, 71, 80, 77, 67, 60, 57, 47, 64, 61, 50, 68, 66, 55, 71, 68, 57, 78, 78, 65, 61, 60, 49, 60, 59, 48, 64, 64, 55, 43, 43, 32, 47, 46, 35, 39, 38, 27, 40, 39, 28, 59, 58, 47, 52, 51, 42, 29, 26, 15, 54, 52, 41, 48, 47, 39, 57, 57, 48, 56, 55, 47, 46, 45, 36, 54, 53, 46, 57, 57, 48, 40, 41, 32, 63, 62, 54, 49, 49, 38, 60, 59, 50, 50, 50, 39, 45, 44, 35, 52, 51, 40, 54, 53, 42, 48, 47, 39, 40, 37, 29, 48, 48, 36, 50, 50, 41, 60, 59, 48, 49, 49, 38, 55, 55, 43, 56, 56, 45, 57, 59, 45, 51, 54, 39, 65, 68, 53, 67, 70, 55, 73, 75, 63, 69, 71, 59, 66, 68, 56, 57, 59, 47, 67, 66, 55, 63, 65, 53, 59, 61, 49, 59, 62, 47, 66, 65, 54, 56, 56, 45, 73, 72, 61, 78, 78, 67, 47, 46, 37, 77, 77, 65, 64, 64, 53, 45, 44, 33, 73, 72, 63, 54, 53, 42, 85, 82, 71, 64, 61, 50, 56, 53, 42, 92, 89, 78, 79, 76, 65, 71, 68, 57, 63, 60, 49, 80, 77, 67, 66, 63, 53, 77, 73, 65, 59, 56, 48, 53, 50, 42, 58, 55, 47, 54, 51, 43, 88, 87, 78, 64, 64, 55, 67, 66, 57, 60, 59, 50, 89, 88, 79, 67, 69, 60, 56, 55, 47, 54, 53, 44, 78, 80, 70, 72, 74, 64, 67, 66, 57, 66, 65, 56, 82, 81, 72, 75, 72, 64, 55, 55, 43, 73, 72, 61, 82, 81, 70, 61, 60, 49, 75, 74, 63, 78, 78, 67, 54, 53, 42, 72, 69, 59, 81, 80, 69, 56, 56, 45, 59, 58, 47, 112, 112, 100, 59, 58, 47, 81, 80, 71, 80, 79, 70, 113, 109, 101, 78, 78, 69, 59, 58, 49, 73, 72, 61, 87, 86, 75, 62, 62, 50, 88, 87, 76, 88, 87, 76, 85, 85, 74, 60, 59, 48, 104, 104, 92, 66, 65, 54, 81, 80, 69, 108, 107, 96, 98, 98, 86, 63, 63, 52, 92, 92, 81, 93, 90, 80, 88, 87, 76, 94, 93, 82, 83, 81, 70, 54, 53, 42, 95, 94, 83, 60, 59, 48, 85, 85, 74, 97, 96, 87, 68, 65, 57, 76, 75, 66, 94, 93, 84, 111, 110, 101, 104, 105, 96, 100, 102, 92, 96, 95, 86, 84, 83, 75, 68, 67, 58, 84, 84, 72, 55, 55, 43, 99, 99, 88, 71, 71, 60, 97, 95, 84, 74, 73, 62, 78, 78, 67, 68, 66, 55, 68, 67, 56, 87, 86, 75, 97, 96, 85, 100, 100, 89, 124, 123, 112, 78, 75, 64, 99, 96, 85, 85, 85, 74, 63, 60, 49, 100, 100, 89, 79, 76, 65, 96, 95, 84, 138, 137, 126, 88, 87, 76, 82, 81, 72, 95, 94, 85, 142, 142, 131, 70, 67, 56, 136, 133, 123, 78, 75, 64, 128, 125, 114, 102, 99, 89, 115, 112, 102, 92, 89, 78, 74, 71, 61, 85, 82, 71, 94, 91, 81, 95, 92, 82, 87, 84, 74, 77, 76, 68, 86, 83, 72, 81, 78, 70, 92, 88, 80, 81, 78, 70, 133, 132, 124, 78, 75, 64, 102, 101, 92, 87, 84, 76, 87, 84, 74, 120, 119, 111, 128, 128, 119, 68, 67, 58, 76, 76, 64, 95, 94, 85, 104, 103, 94, 71, 71, 62, 57, 56, 50, 59, 64, 56, 52, 54, 46, 80, 84, 73, 58, 61, 53, 36, 40, 29, 43, 47, 36, 223, 221, 208, 223, 223, 209, 222, 220, 207, 222, 220, 207, 219, 216, 203, 216, 214, 201, 220, 217, 204, 221, 218, 207, 222, 220, 207, 224, 222, 209, 227, 224, 211, 227, 224, 211, 227, 224, 211, 229, 227, 214, 225, 223, 210, 224, 222, 209, 221, 218, 205, 231, 229, 216, 244, 242, 229, 243, 240, 230, 254, 251, 240, 92, 89, 78, 53, 50, 40, 71, 68, 57, 57, 54, 43, 82, 79, 69, 80, 77, 67, 79, 76, 65, 67, 64, 54, 48, 48, 36, 57, 57, 46, 55, 57, 45, 61, 60, 49, 72, 74, 64, 51, 53, 43, 36, 36, 27, 52, 51, 42, 57, 57, 48, 53, 52, 43, 50, 50, 41, 38, 37, 28, 52, 51, 42, 54, 53, 44, 46, 45, 36, 38, 37, 28, 52, 51, 42, 52, 51, 42, 46, 45, 36, 47, 46, 37, 53, 52, 43, 52, 51, 42, 41, 40, 32, 47, 46, 35, 46, 45, 36, 43, 43, 32, 52, 51, 40, 53, 50, 40, 46, 45, 34, 54, 53, 42, 64, 61, 50, 57, 57, 46, 62, 62, 48, 61, 60, 49, 56, 56, 45, 62, 62, 48, 53, 53, 39, 63, 65, 51, 47, 48, 36, 67, 69, 57, 69, 69, 57, 65, 67, 55, 46, 45, 34, 71, 71, 60, 61, 60, 49, 76, 76, 64, 77, 77, 65, 57, 57, 46, 67, 66, 55, 67, 69, 60, 82, 81, 72, 56, 56, 45, 76, 76, 64, 77, 77, 65, 68, 67, 56, 96, 95, 84, 62, 62, 50, 50, 50, 39, 64, 61, 50, 70, 67, 56, 58, 55, 45, 80, 77, 67, 86, 83, 72, 70, 67, 56, 60, 57, 47, 77, 74, 63, 88, 85, 77, 92, 88, 80, 85, 81, 73, 56, 52, 44, 70, 69, 61, 53, 52, 43, 77, 76, 68, 76, 75, 66, 75, 74, 65, 84, 83, 75, 69, 68, 60, 69, 70, 61, 56, 55, 47, 78, 80, 70, 66, 65, 56, 76, 75, 66, 78, 78, 69, 70, 69, 61, 60, 59, 50, 82, 81, 72, 64, 64, 53, 66, 65, 56, 68, 67, 56, 64, 64, 53, 100, 100, 91, 59, 58, 49, 64, 64, 53, 70, 69, 61, 68, 67, 56, 59, 58, 47, 61, 60, 49, 78, 78, 67, 71, 71, 60, 100, 100, 89, 75, 73, 62, 60, 57, 47, 84, 84, 72, 67, 66, 55, 61, 60, 49, 94, 93, 82, 53, 52, 41, 61, 60, 49, 98, 98, 86, 84, 84, 72, 68, 67, 56, 108, 107, 96, 59, 58, 47, 74, 73, 62, 73, 72, 61, 57, 57, 46, 97, 96, 85, 62, 62, 50, 112, 112, 100, 76, 76, 64, 68, 67, 56, 82, 79, 69, 82, 81, 70, 90, 89, 80, 83, 82, 73, 50, 50, 41, 97, 96, 87, 68, 67, 58, 100, 102, 92, 102, 104, 94, 77, 78, 69, 57, 59, 49, 76, 77, 68, 66, 65, 56, 77, 77, 65, 69, 68, 60, 106, 106, 95, 71, 68, 57, 57, 57, 46, 82, 81, 70, 96, 93, 85, 64, 64, 53, 75, 74, 63, 98, 98, 86, 64, 64, 53, 64, 64, 53, 94, 93, 80, 96, 93, 83, 71, 71, 58, 88, 87, 76, 99, 99, 88, 67, 66, 55, 90, 90, 78, 118, 117, 106, 70, 70, 59, 103, 102, 91, 83, 82, 73, 92, 92, 81, 84, 84, 72, 66, 63, 53, 63, 60, 49, 78, 75, 64, 132, 129, 119, 107, 104, 94, 117, 114, 104, 108, 105, 95, 106, 103, 92, 86, 83, 72, 59, 56, 46, 106, 106, 95, 88, 87, 76, 113, 112, 104, 100, 97, 86, 115, 112, 104, 66, 65, 56, 140, 139, 130, 75, 74, 65, 61, 58, 50, 82, 81, 72, 66, 64, 58, 117, 116, 109, 88, 90, 82, 67, 69, 62, 72, 73, 66, 60, 62, 54, 66, 68, 58, 60, 62, 54, 38, 44, 35, 48, 52, 46, 63, 69, 60, 43, 49, 41, 34, 40, 31, 35, 41, 32, 46, 52, 43, 49, 57, 48, 227, 225, 209, 222, 220, 207, 220, 217, 204, 220, 217, 204, 219, 216, 203, 220, 217, 204, 221, 218, 205, 222, 220, 207, 220, 217, 204, 225, 223, 210, 227, 224, 211, 229, 227, 214, 227, 225, 209, 228, 225, 213, 227, 225, 209, 233, 230, 217, 237, 235, 222, 234, 231, 218, 242, 239, 226, 250, 248, 235, 242, 239, 228, 29, 26, 15, 79, 76, 65, 47, 45, 34, 66, 63, 53, 82, 79, 69, 83, 81, 70, 67, 64, 54, 59, 56, 48, 63, 62, 54, 68, 67, 58, 35, 34, 26, 34, 33, 25, 52, 51, 42, 55, 54, 46, 48, 47, 39, 42, 42, 31, 48, 48, 36, 59, 58, 47, 62, 62, 50, 48, 48, 36, 61, 60, 51, 41, 40, 32, 31, 30, 21, 55, 54, 46, 47, 46, 37, 47, 48, 39, 47, 48, 39, 44, 46, 36, 44, 46, 36, 47, 48, 39, 47, 48, 39, 52, 51, 40, 48, 47, 39, 68, 67, 58, 53, 52, 43, 45, 44, 33, 59, 58, 47, 46, 45, 36, 57, 57, 46, 52, 51, 40, 55, 55, 43, 50, 50, 37, 57, 57, 46, 69, 69, 57, 67, 66, 55, 57, 59, 47, 55, 57, 45, 58, 60, 48, 68, 67, 56, 69, 69, 57, 63, 63, 52, 64, 64, 53, 75, 74, 63, 61, 60, 49, 53, 52, 41, 73, 72, 61, 65, 67, 55, 71, 73, 61, 64, 64, 53, 85, 85, 74, 64, 64, 53, 78, 78, 67, 61, 60, 49, 63, 63, 52, 80, 79, 68, 53, 52, 41, 70, 70, 59, 80, 77, 67, 65, 62, 52, 73, 70, 60, 70, 67, 56, 80, 77, 67, 73, 70, 62, 75, 72, 64, 64, 61, 50, 70, 66, 58, 70, 66, 58, 69, 68, 60, 78, 78, 69, 82, 81, 72, 83, 82, 73, 57, 57, 48, 78, 78, 69, 66, 65, 56, 64, 66, 56, 67, 69, 60, 97, 96, 87, 74, 76, 66, 77, 76, 68, 68, 67, 58, 84, 83, 75, 67, 66, 57, 70, 69, 61, 70, 69, 61, 80, 79, 70, 56, 55, 47, 75, 74, 63, 76, 76, 64, 83, 82, 73, 71, 68, 57, 66, 65, 54, 71, 71, 60, 91, 91, 80, 60, 59, 48, 91, 90, 82, 73, 72, 61, 77, 77, 65, 69, 68, 60, 75, 73, 62, 53, 50, 40, 56, 53, 42, 77, 74, 63, 82, 79, 69, 57, 57, 46, 56, 56, 45, 70, 70, 59, 99, 99, 88, 47, 46, 35, 85, 85, 74, 94, 93, 82, 94, 93, 82, 73, 72, 61, 64, 64, 55, 89, 88, 77, 78, 78, 67, 105, 105, 94, 94, 93, 82, 78, 78, 67, 42, 42, 31, 74, 73, 62, 83, 83, 71, 119, 118, 110, 77, 76, 68, 91, 90, 82, 55, 56, 47, 99, 100, 91, 96, 95, 86, 62, 63, 54, 77, 76, 68, 66, 68, 58, 99, 98, 90, 90, 91, 82, 102, 101, 90, 82, 81, 70, 89, 88, 77, 94, 93, 82, 75, 74, 63, 57, 57, 46, 73, 72, 61, 114, 114, 103, 64, 64, 53, 64, 64, 53, 55, 55, 43, 79, 76, 65, 82, 79, 69, 83, 83, 71, 97, 95, 84, 42, 42, 31, 108, 107, 96, 110, 109, 98, 117, 116, 105, 114, 114, 103, 119, 119, 107, 83, 83, 71, 76, 76, 64, 64, 64, 53, 83, 83, 71, 108, 107, 96, 76, 76, 64, 61, 59, 48, 64, 64, 53, 64, 61, 50, 138, 135, 125, 80, 79, 68, 135, 135, 124, 117, 116, 105, 133, 133, 121, 114, 114, 103, 133, 132, 124, 125, 124, 115, 93, 90, 80, 90, 89, 80, 112, 111, 103, 62, 61, 52, 79, 81, 71, 81, 83, 73, 62, 63, 56, 58, 59, 54, 79, 80, 75, 68, 71, 66, 65, 68, 62, 43, 46, 40, 51, 54, 48, 47, 50, 45, 42, 45, 39, 45, 50, 44, 42, 48, 41, 45, 50, 44, 41, 47, 38, 34, 40, 31, 51, 59, 50, 54, 60, 51, 58, 68, 58, 229, 227, 214, 222, 220, 207, 216, 214, 201, 220, 217, 204, 219, 216, 203, 217, 214, 204, 220, 217, 204, 225, 223, 210, 221, 218, 205, 221, 218, 205, 227, 224, 211, 225, 223, 210, 230, 228, 215, 222, 220, 207, 228, 225, 213, 229, 227, 214, 237, 235, 222, 234, 231, 218, 236, 234, 219, 240, 237, 224, 217, 214, 204, 16, 13, 3, 80, 77, 67, 50, 47, 36, 92, 89, 78, 74, 71, 61, 79, 76, 65, 66, 63, 55, 65, 62, 54, 63, 62, 54, 46, 45, 36, 73, 72, 63, 56, 55, 47, 53, 52, 43, 50, 50, 41, 36, 36, 27, 42, 42, 31, 46, 45, 34, 56, 56, 45, 32, 31, 22, 47, 46, 37, 56, 55, 47, 60, 59, 50, 35, 34, 26, 47, 46, 39, 49, 50, 43, 41, 42, 33, 43, 45, 35, 47, 48, 39, 42, 43, 34, 43, 45, 35, 35, 36, 27, 52, 54, 44, 38, 40, 30, 38, 40, 30, 46, 45, 36, 42, 41, 33, 52, 51, 42, 54, 53, 42, 49, 49, 38, 52, 51, 40, 48, 48, 36, 49, 49, 38, 46, 45, 34, 52, 51, 40, 53, 55, 43, 69, 69, 57, 63, 63, 52, 56, 58, 46, 57, 57, 46, 67, 69, 57, 47, 46, 35, 69, 71, 59, 55, 55, 43, 76, 78, 65, 76, 78, 65, 60, 59, 48, 70, 72, 60, 64, 64, 53, 67, 66, 55, 62, 62, 50, 61, 60, 49, 67, 66, 55, 67, 66, 55, 85, 82, 71, 81, 78, 68, 82, 81, 70, 64, 61, 50, 65, 62, 52, 58, 55, 45, 51, 48, 38, 60, 57, 47, 79, 76, 65, 75, 73, 62, 77, 73, 65, 82, 79, 71, 68, 65, 57, 54, 53, 44, 77, 76, 68, 88, 87, 78, 55, 54, 46, 77, 76, 68, 54, 53, 44, 69, 68, 60, 62, 61, 52, 74, 73, 64, 75, 74, 65, 80, 79, 70, 72, 74, 64, 67, 66, 57, 49, 48, 40, 68, 67, 58, 89, 88, 79, 69, 68, 60, 82, 81, 72, 69, 68, 60, 52, 51, 42, 57, 57, 48, 71, 71, 62, 75, 74, 65, 70, 69, 61, 75, 74, 63, 83, 83, 71, 74, 73, 62, 82, 81, 70, 56, 56, 45, 77, 77, 65, 88, 87, 76, 101, 98, 88, 79, 76, 65, 97, 95, 84, 91, 88, 77, 73, 70, 60, 72, 69, 59, 77, 74, 63, 55, 55, 43, 88, 87, 76, 86, 83, 72, 106, 103, 92, 86, 83, 72, 62, 62, 50, 70, 70, 59, 73, 72, 61, 64, 64, 53, 84, 84, 72, 119, 119, 107, 71, 71, 60, 81, 80, 69, 69, 69, 57, 56, 56, 45, 90, 90, 78, 68, 67, 56, 63, 62, 54, 73, 72, 63, 85, 85, 76, 102, 101, 92, 85, 85, 76, 73, 72, 63, 95, 94, 85, 57, 57, 48, 106, 105, 97, 90, 89, 80, 70, 69, 61, 56, 56, 45, 95, 94, 83, 66, 63, 53, 55, 55, 43, 61, 60, 49, 116, 115, 104, 92, 89, 78, 102, 99, 89, 94, 91, 81, 49, 49, 38, 88, 85, 75, 110, 107, 97, 85, 85, 72, 70, 70, 57, 85, 85, 74, 53, 52, 41, 91, 91, 80, 59, 58, 47, 110, 109, 98, 75, 74, 63, 85, 85, 74, 66, 65, 54, 94, 91, 81, 125, 124, 115, 116, 115, 104, 69, 68, 60, 70, 70, 59, 97, 96, 85, 116, 115, 104, 110, 109, 98, 70, 67, 56, 76, 76, 64, 114, 114, 103, 106, 106, 95, 90, 90, 78, 104, 101, 93, 62, 61, 52, 106, 105, 97, 83, 83, 71, 52, 51, 42, 78, 80, 70, 63, 64, 55, 57, 63, 54, 74, 80, 71, 55, 61, 52, 49, 55, 48, 41, 47, 40, 40, 45, 39, 39, 44, 38, 47, 52, 46, 40, 43, 38, 52, 55, 49, 44, 47, 41, 52, 55, 49, 53, 58, 52, 61, 67, 58, 66, 69, 62, 63, 69, 62, 67, 73, 64, 75, 81, 72, 78, 84, 77, 225, 225, 211, 216, 214, 201, 220, 217, 204, 212, 209, 196, 215, 213, 200, 220, 217, 206, 220, 217, 206, 216, 214, 201, 224, 221, 211, 225, 223, 210, 224, 222, 209, 224, 222, 209, 228, 225, 213, 227, 224, 211, 233, 230, 217, 230, 228, 215, 238, 236, 223, 241, 238, 225, 241, 238, 225, 230, 228, 215, 121, 118, 105, 73, 70, 60, 83, 83, 71, 80, 79, 68, 66, 63, 53, 72, 69, 59, 85, 82, 71, 56, 52, 44, 61, 60, 51, 57, 57, 48, 47, 46, 37, 42, 41, 33, 45, 44, 35, 41, 40, 34, 43, 43, 34, 56, 55, 47, 47, 46, 35, 58, 55, 47, 53, 52, 43, 43, 43, 34, 49, 48, 40, 55, 54, 46, 45, 44, 35, 49, 48, 40, 50, 49, 43, 39, 38, 31, 54, 53, 46, 63, 62, 54, 39, 38, 29, 47, 48, 39, 42, 43, 34, 33, 34, 25, 45, 44, 35, 55, 54, 46, 39, 38, 29, 42, 41, 33, 41, 41, 29, 56, 56, 45, 48, 47, 39, 55, 55, 43, 50, 50, 39, 59, 58, 47, 57, 57, 46, 52, 51, 40, 53, 52, 41, 60, 59, 48, 63, 63, 52, 54, 53, 42, 68, 67, 56, 64, 64, 53, 68, 67, 56, 66, 65, 54, 62, 62, 50, 64, 64, 53, 56, 56, 45, 63, 63, 52, 66, 65, 54, 73, 72, 61, 60, 59, 48, 69, 69, 57, 64, 64, 53, 71, 71, 58, 73, 72, 61, 61, 61, 47, 86, 83, 70, 71, 71, 58, 77, 74, 61, 58, 55, 45, 81, 79, 66, 64, 61, 50, 64, 61, 50, 63, 60, 49, 92, 89, 78, 49, 46, 35, 100, 97, 86, 57, 53, 46, 52, 49, 41, 79, 76, 68, 60, 59, 50, 74, 73, 64, 74, 73, 64, 66, 65, 56, 83, 82, 73, 90, 89, 80, 85, 85, 74, 74, 76, 64, 78, 78, 67, 74, 73, 64, 65, 67, 57, 63, 64, 55, 81, 80, 71, 71, 73, 63, 66, 65, 56, 73, 72, 63, 59, 58, 49, 83, 82, 73, 64, 64, 55, 69, 68, 60, 83, 82, 73, 77, 76, 68, 49, 48, 40, 74, 73, 62, 90, 90, 78, 82, 81, 70, 78, 78, 67, 56, 56, 45, 70, 70, 59, 75, 74, 63, 72, 69, 59, 85, 82, 71, 89, 86, 76, 43, 40, 29, 94, 91, 81, 68, 66, 55, 54, 53, 42, 80, 79, 68, 67, 64, 54, 74, 71, 61, 64, 64, 53, 82, 81, 70, 74, 73, 62, 78, 78, 67, 82, 81, 70, 167, 166, 155, 95, 94, 83, 100, 100, 89, 60, 59, 48, 78, 78, 67, 78, 78, 67, 50, 50, 39, 78, 78, 67, 111, 110, 99, 78, 78, 69, 61, 60, 51, 84, 83, 75, 61, 60, 51, 73, 72, 63, 71, 71, 62, 110, 109, 100, 95, 94, 85, 64, 64, 55, 82, 81, 72, 77, 77, 65, 55, 55, 43, 97, 96, 85, 62, 62, 50, 62, 62, 50, 83, 83, 71, 103, 102, 91, 89, 86, 76, 76, 76, 64, 81, 78, 68, 91, 91, 80, 124, 121, 111, 106, 103, 92, 81, 80, 69, 78, 78, 67, 91, 91, 80, 95, 94, 83, 69, 69, 57, 70, 70, 59, 128, 128, 117, 61, 60, 49, 128, 128, 117, 89, 88, 77, 91, 91, 80, 94, 93, 84, 61, 60, 49, 113, 113, 102, 108, 107, 96, 114, 114, 103, 118, 117, 106, 108, 107, 96, 104, 103, 94, 86, 83, 72, 68, 67, 56, 77, 77, 65, 83, 82, 73, 83, 82, 73, 87, 86, 77, 80, 79, 70, 79, 81, 71, 69, 70, 61, 53, 57, 49, 61, 65, 57, 43, 49, 43, 41, 49, 39, 41, 47, 40, 40, 45, 39, 46, 51, 45, 43, 49, 43, 42, 48, 41, 57, 59, 56, 54, 57, 52, 67, 68, 64, 63, 64, 59, 55, 59, 51, 86, 89, 83, 90, 94, 88, 78, 84, 75, 95, 100, 92, 137, 140, 132, 255, 255, 255, 255, 255, 255, 223, 221, 208, 216, 216, 202, 219, 216, 203, 217, 214, 204, 222, 219, 209, 217, 214, 204, 219, 216, 205, 220, 217, 204, 221, 218, 205, 222, 220, 207, 223, 221, 208, 223, 220, 210, 225, 223, 210, 224, 222, 209, 223, 221, 208, 234, 231, 218, 238, 236, 223, 241, 238, 225, 237, 235, 222, 216, 214, 201, 12, 10, 0, 88, 85, 75, 78, 78, 67, 84, 86, 74, 72, 69, 59, 67, 66, 55, 64, 64, 53, 59, 58, 47, 60, 57, 49, 61, 60, 51, 60, 59, 50, 59, 58, 49, 57, 53, 46, 56, 52, 44, 50, 49, 43, 45, 44, 35, 38, 37, 28, 41, 40, 32, 57, 57, 48, 45, 44, 35, 42, 41, 33, 53, 52, 43, 47, 46, 37, 47, 46, 37, 53, 52, 43, 41, 40, 32, 38, 37, 28, 47, 46, 37, 53, 52, 43, 42, 41, 33, 39, 38, 29, 44, 46, 36, 64, 64, 55, 67, 66, 57, 48, 47, 39, 33, 32, 23, 40, 39, 28, 47, 46, 35, 48, 50, 38, 52, 51, 40, 54, 53, 42, 49, 49, 38, 57, 57, 46, 56, 56, 45, 59, 58, 47, 69, 69, 57, 60, 59, 48, 62, 64, 52, 64, 64, 53, 64, 64, 53, 60, 62, 50, 70, 70, 59, 64, 66, 54, 60, 62, 50, 58, 60, 48, 57, 59, 47, 81, 80, 69, 64, 66, 54, 70, 70, 59, 58, 60, 48, 64, 64, 53, 63, 63, 52, 73, 72, 59, 60, 60, 46, 62, 62, 48, 54, 52, 39, 51, 48, 36, 77, 74, 61, 64, 61, 48, 59, 56, 46, 50, 47, 34, 61, 59, 48, 59, 56, 46, 66, 65, 54, 71, 68, 57, 83, 81, 70, 96, 93, 85, 53, 52, 43, 100, 97, 89, 73, 72, 63, 61, 60, 51, 73, 70, 62, 70, 70, 59, 94, 93, 82, 46, 45, 34, 70, 69, 61, 89, 88, 77, 77, 76, 68, 47, 48, 39, 70, 69, 61, 50, 52, 42, 48, 47, 39, 77, 76, 68, 76, 75, 66, 78, 78, 69, 62, 61, 52, 56, 55, 47, 80, 79, 68, 68, 67, 58, 60, 59, 50, 78, 78, 67, 69, 69, 57, 78, 78, 67, 87, 86, 75, 66, 65, 54, 66, 65, 54, 68, 67, 56, 64, 64, 53, 88, 87, 76, 97, 96, 85, 75, 74, 63, 80, 77, 67, 59, 56, 46, 54, 52, 41, 92, 89, 78, 72, 69, 57, 75, 73, 62, 75, 73, 62, 74, 71, 61, 71, 71, 60, 74, 73, 62, 66, 63, 53, 63, 63, 52, 92, 92, 81, 94, 93, 82, 69, 69, 57, 83, 83, 71, 89, 88, 77, 68, 67, 56, 57, 57, 46, 99, 99, 88, 94, 93, 82, 85, 85, 76, 74, 73, 64, 90, 89, 80, 87, 86, 77, 61, 60, 51, 99, 98, 90, 87, 89, 79, 82, 81, 72, 71, 71, 62, 71, 71, 62, 80, 79, 70, 70, 70, 59, 64, 64, 53, 62, 62, 50, 83, 83, 71, 59, 58, 47, 84, 84, 72, 107, 104, 94, 63, 63, 52, 59, 56, 46, 83, 83, 71, 73, 70, 60, 104, 102, 91, 61, 59, 48, 108, 105, 95, 71, 68, 57, 85, 85, 74, 85, 85, 74, 73, 72, 61, 78, 78, 67, 77, 77, 65, 82, 81, 70, 70, 70, 59, 68, 67, 58, 70, 69, 61, 64, 64, 53, 88, 87, 78, 140, 139, 130, 106, 105, 97, 106, 105, 97, 82, 81, 70, 99, 98, 90, 89, 88, 79, 87, 86, 77, 69, 70, 61, 79, 81, 71, 63, 64, 57, 64, 64, 55, 57, 59, 49, 39, 43, 33, 36, 39, 31, 44, 47, 39, 45, 48, 43, 48, 52, 46, 33, 39, 32, 46, 49, 44, 46, 51, 45, 50, 56, 49, 59, 62, 56, 58, 61, 55, 82, 85, 80, 80, 83, 77, 79, 82, 76, 99, 102, 96, 94, 97, 91, 161, 165, 159, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 225, 223, 210, 224, 222, 209, 221, 218, 205, 220, 217, 204, 217, 215, 202, 217, 214, 204, 220, 217, 206, 219, 216, 203, 219, 216, 205, 228, 225, 213, 223, 221, 208, 217, 216, 205, 222, 221, 208, 222, 221, 208, 226, 226, 213, 230, 230, 216, 234, 234, 221, 237, 235, 222, 236, 234, 221, 142, 142, 129, 17, 14, 4, 83, 83, 71, 72, 74, 62, 87, 89, 77, 71, 73, 63, 77, 78, 69, 65, 67, 57, 71, 73, 63, 60, 62, 52, 51, 52, 45, 62, 61, 52, 52, 50, 44, 57, 53, 48, 47, 46, 37, 46, 45, 38, 55, 54, 48, 59, 57, 51, 44, 45, 38, 53, 55, 48, 45, 47, 37, 36, 38, 28, 31, 33, 23, 49, 50, 43, 42, 43, 36, 34, 35, 26, 38, 36, 30, 45, 44, 35, 50, 52, 42, 50, 50, 41, 41, 40, 32, 43, 45, 35, 62, 63, 54, 47, 48, 39, 43, 43, 34, 41, 40, 32, 36, 38, 28, 70, 70, 59, 41, 41, 29, 38, 40, 28, 57, 57, 46, 44, 46, 36, 55, 55, 43, 55, 55, 43, 56, 55, 47, 62, 64, 52, 57, 57, 46, 66, 65, 54, 66, 68, 56, 70, 70, 59, 60, 62, 50, 69, 71, 59, 42, 44, 32, 67, 69, 57, 42, 44, 32, 48, 50, 38, 55, 57, 45, 62, 62, 50, 56, 58, 46, 62, 64, 52, 77, 77, 64, 79, 76, 65, 68, 68, 54, 72, 69, 59, 68, 66, 53, 61, 59, 46, 65, 62, 52, 68, 66, 55, 80, 77, 67, 64, 61, 48, 82, 79, 69, 79, 76, 65, 59, 56, 46, 67, 64, 54, 60, 57, 47, 81, 78, 68, 53, 50, 40, 51, 48, 38, 73, 70, 62, 67, 64, 56, 86, 83, 72, 88, 85, 77, 95, 92, 84, 53, 50, 40, 67, 64, 54, 64, 64, 53, 66, 65, 54, 64, 64, 53, 62, 64, 52, 53, 55, 46, 85, 87, 77, 69, 70, 61, 85, 87, 77, 68, 67, 58, 53, 52, 43, 78, 78, 69, 66, 65, 56, 57, 57, 46, 56, 56, 45, 82, 81, 70, 70, 67, 56, 80, 79, 68, 75, 73, 62, 63, 63, 52, 49, 49, 38, 63, 60, 49, 84, 84, 72, 62, 62, 50, 81, 80, 69, 50, 50, 39, 64, 64, 53, 55, 55, 43, 58, 55, 45, 81, 80, 69, 63, 60, 49, 82, 81, 70, 74, 74, 60, 91, 88, 77, 63, 63, 50, 72, 69, 57, 84, 84, 72, 59, 58, 47, 57, 54, 43, 85, 82, 71, 63, 60, 49, 71, 71, 60, 82, 81, 70, 92, 92, 81, 64, 64, 53, 81, 80, 69, 55, 55, 43, 91, 91, 80, 73, 72, 61, 66, 65, 54, 80, 79, 70, 83, 82, 73, 71, 71, 62, 74, 73, 64, 84, 83, 75, 91, 90, 82, 81, 80, 71, 61, 60, 51, 96, 95, 86, 83, 82, 73, 76, 76, 64, 96, 95, 84, 110, 109, 98, 100, 100, 89, 78, 78, 69, 88, 87, 78, 95, 92, 82, 82, 81, 70, 120, 117, 106, 96, 95, 84, 67, 66, 55, 91, 88, 77, 92, 89, 78, 73, 70, 60, 99, 96, 85, 80, 77, 67, 64, 64, 53, 77, 77, 65, 75, 73, 62, 108, 107, 96, 63, 63, 52, 78, 78, 67, 111, 110, 101, 75, 74, 65, 66, 65, 54, 78, 78, 67, 99, 98, 90, 60, 58, 52, 60, 58, 52, 83, 82, 75, 80, 78, 72, 94, 92, 86, 49, 50, 43, 67, 72, 66, 58, 61, 53, 44, 47, 41, 45, 48, 43, 31, 35, 27, 42, 45, 37, 39, 45, 36, 46, 50, 42, 54, 57, 52, 62, 66, 58, 71, 77, 68, 88, 92, 84, 83, 87, 79, 80, 83, 75, 85, 88, 80, 94, 97, 89, 114, 117, 109, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 222, 220, 207, 217, 215, 202, 215, 213, 200, 222, 219, 209, 219, 216, 205, 220, 217, 206, 224, 221, 211, 220, 217, 206, 222, 220, 207, 221, 218, 205, 219, 216, 203, 222, 220, 207, 219, 219, 205, 222, 221, 208, 225, 225, 211, 228, 225, 213, 234, 234, 221, 237, 235, 222, 231, 231, 217, 43, 43, 30, 68, 67, 56, 94, 93, 82, 85, 87, 75, 78, 82, 69, 76, 77, 68, 52, 54, 44, 76, 77, 68, 70, 71, 62, 45, 47, 37, 49, 48, 42, 61, 58, 52, 54, 53, 46, 49, 45, 37, 54, 51, 45, 50, 49, 43, 49, 48, 42, 54, 53, 46, 47, 46, 37, 34, 35, 28, 45, 43, 37, 19, 20, 13, 52, 54, 46, 34, 33, 27, 38, 40, 32, 52, 50, 44, 46, 45, 38, 47, 46, 39, 49, 48, 42, 49, 48, 40, 55, 54, 46, 59, 58, 49, 62, 63, 54, 48, 49, 40, 41, 40, 32, 52, 51, 42, 52, 54, 44, 43, 45, 33, 52, 54, 42, 48, 50, 38, 57, 59, 47, 51, 53, 41, 66, 68, 56, 56, 58, 46, 52, 54, 42, 71, 71, 60, 48, 48, 36, 71, 71, 60, 52, 52, 38, 57, 57, 46, 56, 56, 45, 70, 70, 59, 66, 65, 54, 59, 58, 47, 58, 60, 48, 48, 48, 36, 70, 70, 59, 66, 65, 54, 69, 69, 57, 68, 67, 56, 57, 54, 43, 60, 59, 48, 78, 75, 62, 60, 58, 45, 56, 53, 40, 60, 58, 45, 75, 73, 62, 75, 73, 62, 64, 61, 50, 64, 61, 50, 56, 53, 42, 93, 90, 80, 89, 86, 76, 44, 41, 31, 61, 59, 48, 65, 62, 52, 56, 53, 42, 88, 85, 75, 75, 73, 62, 77, 73, 65, 60, 57, 47, 51, 48, 38, 73, 70, 60, 72, 69, 59, 51, 48, 38, 50, 47, 36, 70, 70, 59, 60, 59, 48, 63, 62, 54, 80, 79, 70, 64, 66, 56, 62, 63, 54, 47, 46, 37, 70, 71, 62, 84, 83, 75, 66, 65, 56, 56, 55, 47, 59, 58, 49, 88, 87, 78, 67, 66, 57, 75, 73, 62, 58, 55, 45, 79, 76, 65, 66, 63, 53, 64, 61, 50, 73, 70, 60, 63, 60, 49, 74, 71, 61, 59, 56, 46, 52, 51, 40, 71, 71, 60, 77, 77, 65, 72, 69, 59, 61, 59, 48, 94, 91, 81, 75, 73, 62, 58, 55, 45, 64, 61, 50, 56, 56, 45, 66, 63, 53, 91, 91, 80, 80, 79, 68, 53, 50, 40, 60, 59, 48, 83, 83, 71, 68, 67, 56, 68, 67, 56, 69, 69, 57, 87, 86, 75, 75, 74, 63, 63, 63, 52, 56, 56, 45, 73, 72, 61, 66, 65, 56, 68, 67, 58, 71, 71, 62, 67, 66, 57, 88, 87, 78, 100, 100, 91, 87, 86, 77, 99, 98, 90, 85, 85, 76, 59, 58, 49, 71, 71, 62, 100, 100, 89, 103, 100, 90, 83, 81, 70, 85, 85, 74, 81, 78, 68, 63, 60, 49, 64, 60, 52, 70, 67, 56, 89, 88, 77, 87, 84, 74, 82, 79, 69, 81, 78, 68, 95, 92, 82, 97, 95, 84, 89, 86, 76, 83, 81, 70, 96, 95, 84, 111, 110, 99, 85, 82, 71, 92, 92, 81, 111, 110, 99, 74, 73, 62, 62, 61, 52, 66, 65, 56, 77, 78, 69, 85, 87, 77, 51, 53, 43, 59, 61, 53, 66, 64, 58, 60, 58, 52, 62, 60, 56, 47, 45, 41, 45, 46, 41, 42, 45, 39, 43, 49, 43, 36, 42, 34, 53, 58, 52, 48, 54, 47, 48, 54, 45, 67, 72, 66, 69, 73, 65, 74, 77, 72, 82, 86, 78, 89, 93, 85, 85, 88, 80, 110, 114, 106, 245, 249, 241, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 223, 223, 209, 220, 217, 204, 217, 217, 203, 216, 215, 204, 216, 215, 204, 219, 216, 205, 221, 218, 207, 222, 221, 210, 220, 217, 206, 220, 217, 206, 219, 219, 205, 222, 220, 207, 219, 219, 207, 223, 223, 209, 225, 225, 211, 231, 229, 216, 240, 240, 226, 231, 229, 216, 197, 197, 183, 12, 12, 0, 93, 90, 80, 92, 92, 81, 72, 74, 64, 64, 66, 54, 71, 73, 63, 64, 66, 56, 72, 74, 64, 45, 47, 37, 43, 45, 35, 35, 36, 27, 51, 47, 42, 47, 46, 39, 59, 55, 50, 54, 51, 43, 34, 33, 27, 45, 41, 36, 50, 49, 43, 59, 57, 51, 41, 40, 34, 48, 47, 41, 43, 43, 34, 46, 45, 36, 44, 45, 38, 30, 31, 24, 46, 45, 38, 17, 19, 11, 44, 45, 38, 51, 52, 45, 57, 58, 51, 38, 40, 30, 47, 46, 37, 56, 55, 47, 50, 50, 41, 50, 50, 41, 53, 52, 43, 40, 41, 32, 43, 45, 33, 44, 46, 34, 50, 52, 40, 53, 55, 46, 51, 53, 41, 48, 50, 38, 53, 55, 43, 67, 69, 57, 54, 53, 44, 56, 56, 45, 64, 64, 53, 62, 62, 50, 54, 53, 42, 68, 67, 56, 62, 62, 50, 74, 73, 62, 66, 65, 54, 45, 44, 33, 73, 72, 61, 55, 54, 46, 69, 69, 57, 56, 56, 45, 54, 53, 42, 69, 68, 60, 64, 64, 53, 67, 64, 54, 64, 64, 51, 67, 64, 54, 73, 70, 60, 66, 63, 53, 57, 54, 43, 74, 71, 61, 51, 48, 38, 74, 71, 61, 86, 83, 72, 63, 60, 49, 54, 52, 41, 70, 67, 56, 72, 69, 59, 86, 83, 72, 66, 63, 53, 67, 64, 54, 58, 55, 45, 68, 66, 55, 47, 45, 34, 59, 57, 44, 54, 52, 39, 74, 72, 59, 77, 74, 63, 81, 80, 69, 94, 93, 80, 64, 66, 54, 98, 98, 86, 83, 82, 73, 50, 52, 42, 55, 57, 45, 89, 88, 79, 56, 55, 47, 59, 58, 49, 66, 65, 56, 64, 64, 55, 78, 78, 69, 76, 76, 64, 70, 69, 61, 80, 77, 67, 80, 77, 67, 75, 73, 62, 92, 89, 78, 60, 57, 47, 71, 68, 57, 50, 47, 36, 56, 53, 42, 86, 83, 72, 57, 54, 43, 86, 83, 72, 75, 73, 62, 57, 54, 43, 50, 47, 36, 81, 78, 68, 70, 67, 56, 75, 73, 62, 73, 70, 60, 68, 67, 56, 84, 84, 72, 82, 81, 70, 83, 83, 71, 84, 84, 72, 70, 69, 61, 88, 87, 78, 66, 65, 56, 60, 59, 50, 69, 69, 57, 70, 69, 61, 59, 58, 49, 92, 92, 83, 73, 72, 63, 75, 74, 65, 68, 67, 58, 87, 86, 77, 74, 73, 64, 64, 64, 55, 75, 74, 65, 81, 80, 71, 90, 89, 80, 80, 79, 70, 82, 81, 72, 89, 88, 79, 60, 59, 50, 73, 72, 61, 84, 84, 72, 64, 64, 53, 71, 71, 62, 91, 90, 82, 81, 80, 71, 76, 76, 64, 103, 102, 91, 62, 62, 50, 89, 86, 76, 111, 109, 98, 74, 71, 61, 79, 76, 65, 79, 76, 65, 88, 85, 75, 85, 82, 71, 94, 91, 81, 101, 98, 88, 87, 84, 74, 102, 101, 90, 68, 67, 56, 89, 88, 79, 73, 75, 65, 60, 62, 52, 53, 57, 47, 52, 56, 46, 55, 56, 49, 49, 50, 43, 50, 53, 45, 47, 50, 45, 44, 47, 41, 36, 42, 36, 46, 51, 45, 62, 68, 61, 60, 66, 57, 50, 56, 49, 62, 68, 59, 58, 61, 53, 73, 76, 68, 80, 83, 75, 93, 96, 88, 186, 189, 181, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 220, 221, 205, 219, 219, 205, 220, 217, 204, 219, 216, 203, 217, 216, 205, 213, 213, 202, 215, 212, 202, 215, 214, 203, 219, 219, 207, 215, 214, 203, 219, 219, 205, 218, 218, 204, 219, 219, 207, 219, 219, 205, 224, 224, 210, 230, 230, 216, 233, 233, 220, 231, 231, 217, 134, 134, 123, 55, 55, 43, 75, 74, 63, 85, 85, 74, 79, 81, 71, 74, 76, 64, 74, 76, 66, 74, 76, 66, 64, 66, 56, 53, 55, 46, 63, 62, 54, 56, 55, 47, 45, 43, 37, 51, 47, 42, 45, 41, 36, 63, 59, 53, 63, 59, 53, 47, 46, 39, 47, 46, 39, 23, 24, 17, 28, 27, 21, 43, 44, 37, 53, 52, 45, 28, 30, 20, 56, 55, 49, 41, 42, 35, 49, 48, 42, 53, 52, 45, 35, 36, 29, 50, 51, 44, 51, 52, 45, 50, 52, 42, 53, 52, 43, 38, 40, 30, 36, 38, 28, 35, 37, 25, 34, 36, 24, 38, 40, 28, 45, 47, 35, 41, 43, 31, 47, 48, 36, 55, 57, 45, 56, 58, 46, 45, 47, 35, 47, 48, 36, 61, 60, 51, 55, 55, 43, 62, 62, 50, 60, 59, 48, 60, 59, 48, 57, 57, 46, 53, 52, 41, 77, 77, 65, 66, 65, 54, 61, 60, 49, 56, 56, 45, 48, 47, 39, 68, 67, 56, 71, 71, 62, 43, 43, 32, 62, 62, 50, 56, 56, 45, 61, 60, 49, 66, 63, 53, 64, 61, 50, 65, 62, 52, 60, 57, 47, 71, 68, 57, 51, 48, 38, 45, 42, 32, 44, 41, 31, 68, 66, 55, 49, 46, 35, 53, 50, 40, 82, 79, 69, 59, 56, 46, 63, 60, 49, 87, 84, 74, 80, 77, 67, 81, 78, 68, 59, 56, 46, 75, 73, 62, 58, 55, 45, 59, 57, 44, 66, 63, 53, 57, 54, 43, 66, 63, 53, 72, 69, 59, 56, 56, 43, 73, 72, 61, 61, 60, 49, 68, 67, 56, 87, 86, 75, 57, 57, 46, 69, 71, 59, 62, 62, 50, 59, 58, 47, 73, 72, 63, 71, 71, 62, 61, 60, 51, 73, 72, 63, 70, 69, 61, 56, 55, 47, 85, 82, 71, 93, 90, 80, 66, 63, 53, 56, 53, 42, 61, 60, 49, 59, 58, 47, 52, 51, 40, 57, 54, 43, 62, 62, 50, 71, 68, 57, 82, 79, 69, 65, 62, 52, 83, 81, 70, 63, 60, 49, 52, 49, 39, 71, 68, 57, 79, 76, 65, 77, 74, 63, 75, 74, 63, 49, 49, 38, 55, 54, 46, 95, 94, 83, 60, 59, 50, 81, 80, 69, 55, 54, 46, 71, 71, 60, 81, 80, 71, 68, 67, 58, 73, 72, 63, 68, 67, 58, 91, 90, 82, 52, 51, 42, 81, 80, 71, 63, 62, 54, 92, 92, 83, 85, 85, 76, 73, 72, 63, 81, 80, 71, 105, 104, 96, 87, 86, 77, 82, 81, 72, 85, 85, 76, 93, 95, 83, 71, 73, 61, 69, 69, 57, 82, 81, 72, 95, 94, 83, 95, 94, 83, 82, 81, 72, 83, 83, 71, 87, 86, 75, 81, 80, 69, 92, 92, 81, 94, 93, 82, 67, 64, 54, 87, 84, 76, 76, 76, 64, 92, 89, 78, 63, 63, 52, 59, 58, 47, 80, 79, 68, 70, 69, 61, 69, 71, 59, 51, 55, 44, 55, 57, 45, 50, 52, 42, 52, 56, 46, 39, 43, 33, 46, 50, 42, 46, 50, 42, 42, 45, 37, 43, 46, 40, 68, 71, 66, 53, 56, 51, 60, 63, 58, 45, 50, 44, 69, 75, 68, 94, 97, 91, 88, 92, 84, 115, 118, 110, 255, 255, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 216, 214, 199, 216, 214, 201, 216, 216, 202, 217, 216, 205, 218, 218, 206, 211, 211, 199, 210, 209, 198, 213, 213, 202, 216, 213, 203, 216, 213, 203, 216, 216, 202, 217, 217, 203, 218, 218, 204, 217, 217, 203, 217, 217, 203, 229, 228, 217, 230, 229, 218, 222, 221, 210, 33, 32, 23, 81, 83, 71, 91, 90, 82, 80, 79, 70, 75, 74, 65, 73, 75, 63, 59, 58, 49, 54, 53, 44, 64, 66, 56, 47, 51, 41, 48, 49, 40, 49, 48, 40, 66, 65, 56, 54, 51, 43, 35, 34, 28, 33, 32, 25, 48, 47, 41, 46, 45, 38, 38, 40, 30, 53, 55, 48, 47, 48, 39, 36, 38, 28, 43, 43, 34, 41, 42, 33, 37, 38, 31, 43, 43, 34, 50, 51, 44, 47, 48, 41, 22, 23, 16, 44, 45, 38, 42, 43, 34, 29, 31, 21, 41, 42, 33, 48, 49, 40, 47, 46, 37, 45, 44, 35, 39, 38, 29, 42, 43, 34, 53, 55, 43, 32, 31, 22, 48, 49, 40, 54, 53, 42, 45, 47, 35, 58, 60, 48, 69, 69, 57, 62, 61, 52, 60, 59, 48, 61, 60, 49, 64, 64, 53, 59, 58, 47, 67, 66, 55, 59, 58, 47, 68, 67, 56, 63, 63, 52, 60, 59, 48, 53, 52, 41, 62, 62, 50, 81, 80, 69, 50, 47, 39, 54, 53, 42, 60, 57, 47, 72, 69, 59, 59, 56, 46, 74, 71, 61, 62, 62, 50, 59, 56, 46, 72, 69, 59, 65, 62, 54, 73, 70, 60, 63, 60, 49, 58, 55, 45, 82, 79, 69, 66, 63, 53, 56, 53, 42, 67, 66, 55, 63, 60, 49, 59, 56, 46, 71, 68, 57, 60, 57, 47, 66, 63, 53, 57, 54, 43, 85, 82, 71, 44, 41, 31, 74, 71, 61, 87, 85, 72, 40, 38, 25, 85, 82, 69, 79, 76, 65, 89, 89, 75, 84, 84, 72, 64, 64, 53, 64, 64, 53, 64, 64, 53, 71, 71, 60, 70, 70, 59, 71, 71, 60, 76, 76, 64, 47, 46, 35, 56, 56, 45, 61, 60, 49, 69, 69, 57, 59, 58, 49, 73, 72, 61, 88, 87, 76, 73, 72, 61, 68, 66, 55, 80, 77, 67, 78, 75, 64, 68, 67, 56, 71, 71, 60, 81, 78, 68, 60, 59, 48, 80, 77, 67, 58, 55, 45, 61, 59, 48, 67, 64, 54, 85, 81, 73, 79, 76, 68, 77, 74, 63, 81, 78, 68, 86, 83, 72, 69, 68, 60, 73, 70, 62, 75, 72, 64, 64, 64, 53, 67, 66, 57, 73, 72, 63, 99, 98, 90, 67, 66, 57, 74, 73, 64, 80, 79, 70, 62, 61, 52, 64, 66, 56, 74, 73, 64, 88, 87, 78, 82, 81, 72, 61, 60, 51, 92, 92, 83, 77, 76, 68, 73, 72, 63, 94, 93, 84, 108, 107, 98, 87, 86, 77, 67, 69, 60, 77, 78, 69, 92, 92, 83, 81, 83, 71, 85, 87, 75, 75, 74, 65, 81, 80, 69, 84, 84, 72, 78, 78, 67, 70, 70, 59, 80, 82, 70, 77, 77, 65, 54, 53, 44, 61, 60, 51, 83, 83, 71, 61, 60, 51, 70, 67, 56, 69, 68, 60, 58, 60, 50, 62, 63, 54, 46, 45, 36, 52, 54, 44, 47, 51, 41, 50, 54, 43, 47, 51, 41, 39, 43, 33, 44, 48, 37, 40, 44, 34, 52, 56, 46, 59, 63, 52, 65, 69, 58, 68, 72, 62, 80, 84, 73, 80, 84, 73, 92, 94, 84, 95, 99, 89, 231, 235, 227, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 219, 216, 201, 219, 216, 203, 219, 216, 203, 217, 214, 204, 221, 218, 207, 215, 214, 203, 216, 213, 203, 212, 212, 201, 213, 213, 202, 217, 216, 205, 217, 215, 202, 216, 216, 202, 213, 213, 200, 216, 216, 202, 217, 217, 203, 227, 227, 214, 224, 223, 212, 183, 183, 169, 17, 19, 9, 81, 83, 71, 83, 85, 72, 63, 65, 53, 74, 76, 66, 76, 78, 65, 63, 65, 53, 65, 67, 57, 53, 55, 46, 48, 47, 39, 25, 24, 15, 56, 58, 46, 54, 53, 44, 40, 39, 30, 43, 42, 36, 47, 46, 39, 54, 53, 44, 40, 39, 32, 40, 39, 30, 35, 36, 27, 45, 44, 35, 36, 36, 27, 41, 40, 32, 45, 44, 35, 27, 28, 19, 54, 53, 44, 31, 30, 21, 46, 45, 36, 33, 32, 23, 62, 63, 54, 52, 51, 42, 41, 40, 32, 28, 27, 19, 48, 47, 39, 45, 44, 35, 49, 48, 40, 50, 50, 41, 61, 60, 51, 52, 54, 44, 46, 45, 36, 39, 38, 29, 57, 53, 46, 46, 45, 34, 54, 53, 42, 50, 54, 41, 34, 36, 24, 56, 56, 45, 54, 53, 42, 61, 60, 49, 59, 58, 47, 61, 60, 49, 56, 56, 45, 63, 63, 52, 76, 76, 62, 55, 55, 43, 61, 60, 49, 66, 63, 53, 55, 54, 46, 56, 56, 45, 59, 58, 47, 59, 56, 46, 82, 79, 69, 73, 70, 60, 67, 64, 54, 54, 53, 42, 52, 49, 39, 73, 70, 60, 68, 65, 57, 65, 62, 52, 53, 50, 40, 65, 62, 52, 57, 54, 43, 63, 60, 49, 75, 73, 62, 53, 50, 42, 78, 75, 64, 73, 72, 61, 52, 49, 39, 62, 62, 50, 82, 79, 69, 49, 46, 35, 60, 57, 47, 53, 50, 40, 59, 58, 47, 83, 81, 68, 65, 62, 52, 57, 54, 43, 56, 53, 42, 91, 91, 78, 61, 60, 49, 49, 49, 38, 83, 81, 70, 72, 69, 59, 94, 93, 82, 77, 74, 63, 61, 59, 48, 35, 34, 26, 74, 73, 64, 75, 74, 63, 73, 72, 61, 66, 65, 56, 46, 45, 34, 54, 53, 42, 80, 79, 68, 67, 64, 54, 79, 76, 65, 85, 82, 71, 70, 70, 59, 73, 72, 61, 67, 64, 54, 79, 76, 65, 74, 71, 61, 64, 61, 50, 74, 71, 63, 51, 48, 40, 58, 55, 45, 78, 74, 66, 74, 73, 64, 74, 71, 63, 88, 85, 77, 80, 79, 70, 71, 71, 62, 63, 62, 54, 67, 66, 57, 57, 57, 48, 54, 53, 44, 82, 81, 72, 70, 69, 61, 59, 58, 49, 81, 80, 71, 74, 73, 64, 63, 62, 54, 76, 75, 66, 90, 89, 80, 81, 80, 71, 85, 85, 76, 69, 68, 60, 83, 82, 73, 87, 86, 77, 61, 58, 50, 89, 88, 79, 89, 88, 79, 59, 58, 49, 76, 77, 68, 86, 88, 78, 90, 91, 82, 75, 74, 63, 94, 93, 82, 90, 90, 78, 82, 81, 70, 85, 85, 74, 68, 67, 56, 81, 80, 69, 77, 77, 65, 77, 76, 68, 82, 81, 72, 113, 114, 105, 62, 61, 52, 71, 67, 60, 64, 64, 55, 36, 38, 28, 58, 60, 50, 48, 49, 40, 55, 56, 47, 47, 48, 39, 47, 51, 41, 51, 55, 44, 59, 63, 52, 58, 62, 51, 71, 75, 64, 65, 69, 56, 73, 77, 66, 86, 90, 77, 88, 92, 80, 100, 102, 90, 146, 150, 138, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 213, 211, 195, 220, 218, 202, 215, 213, 200, 216, 216, 202, 212, 209, 198, 210, 208, 197, 212, 209, 198, 212, 212, 201, 214, 211, 201, 215, 212, 202, 217, 216, 205, 213, 213, 202, 210, 210, 196, 219, 219, 205, 222, 221, 208, 222, 221, 210, 219, 219, 207, 108, 107, 96, 66, 68, 58, 85, 85, 74, 83, 85, 72, 62, 64, 52, 59, 61, 49, 63, 64, 55, 60, 62, 52, 62, 63, 54, 58, 60, 50, 51, 53, 43, 51, 53, 43, 53, 52, 41, 34, 33, 25, 50, 50, 39, 50, 50, 39, 48, 48, 36, 41, 41, 29, 56, 55, 47, 53, 52, 43, 34, 35, 26, 38, 37, 28, 40, 39, 30, 42, 41, 33, 46, 45, 36, 38, 40, 30, 45, 47, 37, 54, 53, 44, 35, 34, 26, 35, 36, 27, 33, 34, 25, 61, 60, 51, 58, 55, 47, 42, 41, 33, 44, 41, 33, 50, 50, 41, 49, 48, 40, 39, 38, 29, 48, 47, 39, 52, 51, 42, 60, 62, 52, 42, 41, 33, 49, 48, 40, 55, 55, 43, 52, 51, 40, 58, 60, 48, 56, 58, 46, 55, 57, 45, 56, 58, 46, 44, 46, 34, 59, 58, 47, 44, 46, 34, 55, 55, 43, 53, 52, 41, 59, 58, 47, 60, 59, 48, 63, 63, 52, 61, 60, 49, 50, 50, 39, 59, 58, 47, 64, 64, 53, 60, 59, 48, 57, 53, 46, 82, 81, 70, 51, 48, 38, 57, 53, 46, 68, 66, 55, 57, 57, 46, 53, 52, 43, 71, 71, 60, 61, 60, 49, 70, 67, 56, 54, 53, 42, 53, 52, 41, 71, 71, 60, 67, 66, 55, 61, 60, 49, 43, 43, 32, 57, 54, 43, 85, 82, 71, 83, 81, 68, 78, 75, 64, 91, 88, 77, 80, 77, 67, 71, 68, 55, 56, 56, 43, 45, 44, 33, 82, 81, 70, 60, 59, 48, 36, 36, 25, 64, 64, 53, 64, 64, 53, 92, 89, 78, 70, 67, 56, 57, 57, 46, 66, 63, 53, 73, 70, 60, 85, 85, 74, 82, 81, 70, 49, 49, 38, 77, 77, 65, 59, 58, 47, 48, 48, 36, 91, 91, 80, 78, 78, 67, 88, 87, 76, 78, 78, 67, 83, 82, 73, 78, 75, 64, 50, 50, 39, 61, 59, 48, 63, 60, 49, 77, 74, 63, 71, 68, 57, 77, 74, 63, 68, 65, 57, 77, 74, 63, 79, 76, 68, 66, 63, 55, 74, 71, 63, 75, 73, 62, 80, 77, 67, 82, 81, 70, 67, 66, 55, 61, 60, 51, 82, 81, 70, 80, 79, 70, 82, 81, 70, 63, 62, 54, 69, 68, 60, 78, 78, 69, 80, 79, 70, 76, 77, 68, 64, 64, 55, 91, 90, 82, 75, 74, 65, 88, 87, 78, 88, 87, 78, 68, 67, 58, 63, 62, 54, 85, 85, 76, 83, 82, 73, 70, 69, 61, 76, 75, 66, 83, 84, 75, 77, 78, 69, 84, 85, 76, 65, 67, 57, 73, 75, 65, 77, 79, 67, 71, 73, 61, 77, 78, 69, 86, 88, 78, 76, 78, 65, 71, 73, 61, 73, 72, 63, 72, 74, 64, 64, 66, 56, 55, 56, 47, 45, 49, 39, 56, 57, 48, 42, 43, 34, 45, 47, 37, 58, 60, 50, 62, 63, 54, 65, 69, 58, 70, 71, 62, 81, 83, 73, 86, 88, 76, 94, 96, 86, 94, 96, 84, 95, 97, 85, 244, 246, 234, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 215, 213, 198, 215, 213, 198, 214, 211, 199, 207, 204, 192, 209, 207, 194, 213, 210, 199, 214, 211, 201, 210, 208, 197, 209, 207, 194, 210, 209, 198, 213, 213, 200, 211, 211, 199, 211, 211, 199, 213, 213, 202, 217, 216, 205, 215, 214, 201, 210, 209, 198, 23, 22, 13, 76, 75, 66, 79, 81, 71, 80, 82, 72, 67, 69, 57, 67, 69, 60, 58, 60, 50, 52, 54, 44, 47, 48, 39, 70, 71, 62, 58, 55, 47, 53, 52, 43, 41, 41, 29, 43, 40, 29, 40, 38, 27, 53, 52, 41, 38, 37, 26, 36, 36, 25, 48, 47, 39, 41, 40, 32, 35, 36, 27, 35, 34, 26, 30, 29, 20, 28, 30, 20, 36, 36, 27, 38, 37, 28, 39, 38, 29, 43, 45, 35, 36, 36, 27, 36, 38, 28, 53, 55, 46, 35, 34, 26, 34, 33, 25, 42, 38, 30, 40, 39, 30, 38, 37, 28, 42, 41, 33, 45, 44, 35, 45, 44, 35, 45, 44, 35, 68, 67, 58, 54, 53, 44, 54, 53, 42, 55, 55, 43, 50, 50, 39, 41, 41, 29, 63, 63, 52, 73, 75, 63, 40, 41, 29, 51, 53, 41, 56, 56, 45, 57, 59, 47, 51, 53, 41, 60, 59, 48, 48, 50, 38, 57, 57, 46, 52, 51, 40, 55, 55, 43, 61, 60, 49, 69, 69, 57, 68, 67, 56, 55, 54, 46, 59, 58, 47, 63, 60, 49, 66, 63, 53, 68, 67, 58, 49, 46, 35, 49, 48, 40, 71, 71, 60, 77, 77, 65, 55, 55, 43, 38, 37, 26, 64, 61, 50, 55, 55, 43, 71, 71, 60, 45, 44, 33, 74, 73, 62, 57, 57, 46, 56, 56, 45, 47, 46, 35, 78, 78, 67, 57, 57, 46, 47, 46, 35, 43, 43, 32, 61, 60, 49, 76, 76, 64, 60, 59, 48, 85, 85, 74, 78, 78, 67, 67, 66, 55, 54, 53, 42, 46, 45, 34, 75, 74, 63, 53, 50, 40, 82, 81, 70, 74, 71, 61, 92, 89, 78, 78, 75, 64, 51, 48, 38, 81, 80, 69, 46, 45, 34, 74, 73, 62, 89, 88, 77, 61, 60, 49, 77, 77, 65, 53, 50, 40, 81, 78, 68, 75, 74, 63, 53, 52, 41, 88, 85, 75, 65, 62, 52, 81, 78, 68, 53, 50, 40, 80, 77, 67, 77, 74, 63, 62, 61, 52, 78, 75, 64, 69, 68, 60, 82, 81, 72, 57, 53, 46, 79, 76, 65, 70, 66, 58, 74, 73, 64, 75, 74, 65, 77, 73, 65, 76, 76, 64, 65, 62, 54, 68, 67, 56, 77, 76, 68, 70, 69, 61, 76, 76, 64, 69, 68, 60, 80, 79, 70, 83, 82, 73, 70, 69, 61, 67, 66, 57, 87, 86, 77, 70, 69, 61, 76, 75, 66, 75, 74, 65, 75, 72, 64, 82, 81, 72, 76, 75, 66, 91, 92, 83, 62, 63, 54, 94, 96, 86, 56, 57, 48, 83, 84, 75, 78, 80, 68, 67, 69, 60, 54, 58, 48, 64, 66, 54, 58, 60, 50, 66, 68, 58, 45, 47, 37, 48, 52, 42, 55, 56, 47, 48, 49, 40, 38, 42, 32, 41, 42, 33, 57, 59, 49, 52, 56, 46, 62, 63, 54, 76, 77, 68, 66, 68, 56, 73, 75, 63, 106, 108, 96, 123, 125, 113, 241, 243, 231, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 217, 215, 202, 216, 214, 201, 213, 210, 197, 210, 208, 195, 210, 208, 195, 213, 210, 199, 213, 210, 199, 212, 209, 198, 212, 209, 198, 208, 207, 196, 206, 206, 193, 210, 209, 198, 210, 209, 198, 213, 210, 199, 212, 212, 201, 203, 202, 191, 183, 183, 171, 68, 67, 56, 87, 86, 75, 77, 78, 69, 70, 72, 60, 72, 74, 62, 67, 69, 57, 65, 67, 55, 47, 51, 41, 52, 54, 44, 50, 52, 42, 45, 47, 37, 38, 40, 30, 56, 58, 46, 29, 31, 19, 34, 34, 22, 56, 53, 42, 46, 45, 34, 31, 33, 21, 47, 46, 37, 40, 41, 32, 59, 58, 49, 31, 30, 21, 28, 30, 20, 49, 50, 41, 29, 30, 23, 54, 53, 46, 41, 40, 34, 41, 40, 32, 43, 43, 34, 48, 49, 40, 34, 33, 25, 57, 53, 46, 28, 28, 17, 46, 43, 35, 49, 45, 37, 53, 50, 42, 43, 43, 34, 59, 58, 49, 53, 52, 43, 57, 57, 46, 43, 43, 32, 47, 46, 35, 49, 49, 38, 47, 46, 35, 55, 55, 43, 59, 58, 47, 64, 64, 53, 65, 67, 55, 57, 59, 47, 40, 41, 29, 53, 55, 43, 44, 46, 34, 53, 55, 43, 50, 52, 40, 52, 54, 42, 53, 52, 41, 53, 52, 41, 62, 62, 50, 57, 57, 46, 66, 65, 54, 64, 64, 53, 60, 59, 48, 50, 50, 41, 55, 54, 46, 55, 55, 43, 61, 58, 50, 63, 63, 52, 63, 60, 49, 56, 56, 45, 57, 57, 46, 61, 60, 49, 60, 57, 47, 64, 64, 53, 59, 58, 47, 57, 57, 46, 49, 49, 38, 39, 38, 27, 57, 57, 46, 70, 70, 59, 57, 57, 46, 67, 66, 55, 62, 62, 50, 67, 64, 54, 56, 56, 45, 59, 58, 47, 59, 58, 47, 69, 71, 59, 56, 58, 46, 53, 52, 41, 73, 72, 61, 48, 48, 36, 70, 70, 59, 68, 67, 56, 64, 64, 53, 39, 36, 26, 74, 71, 61, 89, 86, 76, 81, 78, 68, 82, 81, 70, 45, 42, 32, 66, 65, 54, 80, 79, 68, 51, 48, 38, 68, 67, 56, 78, 78, 67, 64, 64, 53, 81, 78, 68, 77, 74, 63, 61, 59, 48, 50, 50, 39, 74, 71, 61, 75, 73, 62, 75, 73, 62, 86, 83, 72, 71, 68, 57, 92, 88, 80, 73, 70, 62, 69, 68, 60, 75, 74, 63, 77, 76, 68, 59, 56, 46, 68, 65, 57, 76, 75, 66, 68, 67, 56, 73, 72, 63, 85, 85, 74, 70, 69, 61, 66, 65, 54, 73, 72, 61, 76, 76, 64, 76, 75, 66, 77, 77, 65, 77, 76, 68, 84, 83, 75, 84, 85, 76, 73, 75, 65, 95, 94, 85, 81, 80, 71, 67, 66, 57, 77, 76, 70, 74, 73, 64, 74, 72, 66, 75, 74, 67, 67, 66, 57, 63, 64, 55, 72, 74, 64, 63, 64, 55, 69, 70, 61, 64, 66, 56, 55, 56, 47, 55, 57, 45, 51, 53, 41, 48, 49, 40, 38, 40, 28, 46, 50, 38, 49, 50, 41, 52, 56, 46, 53, 57, 45, 87, 89, 77, 61, 65, 53, 71, 75, 62, 90, 92, 80, 105, 107, 95, 156, 158, 146, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 210, 208, 195, 210, 208, 195, 212, 209, 196, 213, 210, 197, 209, 207, 194, 209, 206, 196, 212, 212, 201, 213, 210, 199, 207, 204, 194, 212, 209, 196, 208, 206, 193, 207, 204, 194, 206, 203, 192, 208, 206, 193, 205, 205, 194, 206, 206, 195, 110, 109, 98, 62, 61, 52, 80, 79, 70, 78, 78, 69, 68, 67, 58, 57, 57, 48, 63, 64, 55, 57, 59, 49, 58, 60, 50, 60, 62, 52, 45, 47, 37, 45, 44, 35, 41, 40, 32, 50, 50, 41, 55, 54, 46, 46, 55, 43, 45, 42, 32, 35, 35, 24, 42, 42, 31, 35, 35, 24, 49, 48, 40, 38, 40, 30, 43, 43, 34, 42, 41, 33, 43, 43, 34, 53, 52, 43, 52, 51, 42, 34, 36, 24, 50, 50, 41, 49, 48, 40, 35, 37, 25, 38, 40, 28, 41, 43, 31, 35, 35, 24, 40, 39, 30, 37, 34, 24, 24, 21, 13, 42, 41, 33, 57, 57, 46, 43, 43, 32, 60, 59, 48, 52, 51, 40, 43, 43, 32, 52, 51, 40, 53, 52, 41, 61, 60, 49, 59, 58, 47, 56, 56, 45, 44, 46, 34, 55, 57, 45, 48, 50, 38, 50, 52, 40, 69, 71, 59, 59, 61, 49, 56, 58, 46, 55, 55, 43, 57, 57, 46, 56, 56, 45, 59, 58, 47, 63, 62, 54, 61, 60, 51, 57, 57, 48, 68, 67, 58, 75, 72, 64, 62, 61, 52, 60, 57, 47, 54, 51, 43, 63, 60, 49, 50, 50, 39, 66, 65, 54, 53, 52, 41, 63, 63, 52, 54, 53, 42, 45, 44, 33, 50, 50, 39, 62, 62, 50, 75, 74, 63, 57, 57, 46, 40, 39, 28, 47, 46, 35, 55, 55, 43, 53, 52, 41, 68, 67, 56, 58, 55, 45, 61, 60, 49, 65, 62, 52, 59, 58, 47, 66, 65, 54, 47, 46, 35, 57, 57, 46, 87, 86, 77, 54, 53, 42, 68, 67, 56, 76, 76, 64, 54, 53, 42, 64, 64, 53, 74, 71, 61, 95, 92, 82, 84, 84, 72, 71, 71, 60, 70, 70, 59, 61, 60, 49, 63, 63, 52, 105, 105, 94, 63, 63, 52, 59, 58, 47, 85, 82, 71, 49, 49, 38, 96, 95, 84, 68, 67, 56, 73, 70, 60, 87, 84, 74, 58, 55, 45, 72, 69, 59, 81, 78, 68, 83, 80, 72, 73, 70, 62, 85, 82, 71, 66, 63, 55, 71, 71, 60, 80, 79, 70, 70, 70, 59, 77, 77, 65, 83, 82, 73, 77, 77, 65, 66, 65, 54, 87, 86, 75, 63, 63, 52, 75, 74, 63, 75, 74, 63, 60, 59, 50, 57, 57, 48, 55, 54, 46, 74, 73, 62, 70, 69, 61, 79, 81, 71, 79, 81, 71, 73, 72, 63, 62, 63, 54, 72, 74, 64, 59, 57, 51, 84, 83, 77, 73, 71, 65, 67, 65, 59, 63, 62, 54, 65, 66, 59, 69, 70, 61, 57, 59, 49, 64, 66, 56, 43, 45, 35, 42, 43, 36, 47, 48, 41, 49, 50, 41, 58, 60, 50, 60, 62, 54, 58, 60, 50, 72, 74, 64, 72, 74, 62, 99, 101, 89, 97, 99, 84, 127, 129, 115, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 209, 207, 192, 210, 208, 195, 210, 208, 195, 208, 206, 193, 206, 203, 190, 206, 203, 192, 208, 205, 195, 204, 204, 192, 207, 204, 194, 207, 204, 194, 206, 203, 190, 201, 198, 188, 201, 198, 188, 206, 203, 192, 213, 210, 199, 198, 198, 187, 18, 17, 8, 88, 87, 76, 90, 89, 80, 80, 79, 70, 64, 64, 55, 78, 78, 69, 52, 54, 44, 52, 54, 44, 52, 56, 46, 47, 48, 39, 46, 50, 40, 50, 50, 41, 41, 40, 32, 35, 34, 26, 51, 48, 40, 45, 44, 35, 33, 32, 23, 33, 32, 23, 56, 53, 42, 36, 33, 22, 48, 47, 39, 24, 23, 12, 52, 51, 42, 41, 41, 29, 49, 49, 38, 45, 44, 33, 42, 42, 31, 42, 42, 31, 47, 47, 33, 61, 61, 47, 35, 35, 24, 36, 36, 25, 43, 40, 29, 45, 47, 37, 45, 44, 35, 50, 47, 36, 38, 37, 28, 34, 33, 25, 52, 49, 41, 50, 47, 36, 49, 49, 38, 52, 51, 40, 53, 52, 41, 38, 40, 28, 60, 59, 48, 56, 56, 45, 59, 58, 47, 48, 48, 36, 60, 59, 48, 64, 64, 53, 55, 57, 45, 36, 38, 26, 62, 64, 52, 60, 59, 48, 51, 53, 41, 55, 55, 41, 39, 38, 27, 59, 58, 47, 53, 52, 41, 67, 66, 55, 52, 51, 42, 52, 51, 42, 60, 59, 50, 59, 56, 48, 48, 47, 39, 62, 62, 50, 68, 66, 55, 56, 56, 45, 85, 82, 71, 36, 36, 25, 59, 58, 47, 71, 71, 60, 67, 66, 55, 56, 56, 45, 49, 49, 38, 47, 46, 35, 60, 59, 48, 59, 58, 47, 59, 58, 47, 66, 65, 54, 66, 65, 54, 52, 51, 40, 63, 63, 52, 53, 52, 41, 64, 61, 50, 62, 62, 50, 70, 70, 59, 57, 57, 46, 65, 67, 55, 53, 52, 41, 55, 55, 43, 70, 70, 59, 78, 78, 67, 55, 55, 43, 41, 41, 29, 59, 58, 47, 61, 60, 49, 85, 82, 71, 50, 47, 36, 59, 58, 47, 64, 61, 50, 80, 77, 67, 77, 77, 65, 54, 53, 42, 96, 93, 83, 67, 66, 55, 68, 66, 55, 73, 70, 60, 62, 62, 50, 62, 62, 50, 82, 81, 70, 87, 84, 74, 79, 76, 68, 48, 48, 36, 71, 68, 57, 61, 60, 49, 73, 72, 61, 80, 79, 68, 73, 72, 61, 62, 62, 50, 69, 69, 57, 61, 60, 51, 75, 74, 63, 67, 66, 55, 61, 60, 49, 67, 66, 55, 84, 84, 72, 71, 71, 62, 68, 67, 56, 77, 77, 65, 73, 72, 63, 80, 79, 70, 82, 81, 70, 66, 65, 56, 77, 76, 68, 52, 54, 44, 67, 69, 60, 72, 73, 66, 65, 66, 59, 63, 64, 57, 67, 69, 62, 60, 58, 52, 61, 60, 51, 54, 53, 46, 61, 60, 51, 58, 60, 50, 45, 47, 37, 52, 54, 44, 51, 53, 43, 55, 56, 47, 62, 63, 54, 52, 54, 44, 76, 78, 65, 92, 94, 84, 114, 116, 104, 104, 106, 94, 135, 137, 125, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 207, 205, 190, 207, 205, 190, 208, 206, 193, 209, 207, 194, 203, 203, 189, 200, 197, 185, 203, 201, 190, 203, 202, 191, 206, 203, 192, 203, 201, 190, 202, 199, 189, 199, 196, 185, 199, 196, 185, 207, 204, 194, 195, 192, 182, 167, 164, 156, 19, 18, 9, 82, 81, 72, 68, 67, 58, 74, 73, 64, 69, 68, 60, 57, 59, 49, 62, 63, 54, 47, 48, 39, 45, 47, 37, 47, 46, 37, 48, 49, 40, 45, 47, 37, 41, 40, 32, 49, 45, 37, 38, 35, 27, 57, 53, 46, 43, 40, 32, 36, 36, 27, 51, 48, 38, 40, 38, 27, 39, 36, 26, 31, 30, 19, 42, 42, 31, 28, 25, 14, 34, 34, 22, 57, 57, 44, 32, 32, 18, 42, 42, 29, 28, 28, 15, 55, 55, 41, 30, 33, 18, 42, 44, 30, 47, 52, 39, 30, 34, 21, 30, 27, 17, 46, 43, 33, 24, 21, 11, 54, 53, 42, 52, 49, 39, 60, 57, 47, 43, 40, 29, 54, 53, 42, 48, 48, 36, 57, 57, 46, 53, 52, 41, 50, 50, 39, 55, 55, 43, 52, 54, 42, 47, 46, 35, 55, 57, 45, 52, 54, 42, 51, 53, 41, 55, 55, 43, 58, 55, 45, 57, 57, 46, 56, 56, 45, 56, 56, 45, 42, 44, 32, 49, 49, 38, 43, 43, 32, 55, 55, 43, 52, 51, 42, 56, 55, 47, 53, 52, 43, 70, 66, 58, 46, 45, 36, 56, 55, 47, 60, 59, 48, 61, 58, 50, 50, 50, 39, 59, 58, 47, 56, 56, 45, 62, 62, 50, 61, 60, 49, 61, 60, 49, 56, 56, 45, 45, 44, 33, 61, 60, 49, 46, 45, 34, 61, 60, 49, 56, 56, 45, 43, 43, 34, 63, 63, 52, 52, 51, 40, 57, 57, 46, 64, 64, 53, 47, 46, 35, 50, 50, 39, 78, 80, 68, 62, 62, 50, 56, 56, 45, 59, 61, 49, 54, 53, 42, 57, 57, 46, 88, 87, 76, 53, 55, 43, 71, 71, 60, 80, 79, 68, 66, 65, 54, 57, 57, 46, 52, 51, 40, 70, 70, 59, 77, 77, 65, 76, 76, 64, 81, 78, 68, 63, 60, 49, 64, 61, 50, 77, 74, 61, 62, 62, 50, 66, 63, 53, 63, 63, 52, 59, 58, 47, 48, 47, 39, 48, 48, 36, 69, 69, 57, 80, 79, 70, 88, 87, 76, 56, 56, 45, 55, 55, 43, 63, 63, 52, 89, 88, 77, 63, 63, 52, 65, 67, 55, 74, 76, 64, 50, 52, 40, 60, 59, 48, 87, 86, 77, 71, 67, 60, 81, 80, 71, 64, 64, 53, 85, 81, 73, 94, 93, 82, 59, 58, 49, 73, 70, 62, 87, 84, 76, 83, 84, 75, 58, 60, 50, 67, 65, 59, 52, 54, 46, 59, 57, 51, 52, 54, 46, 44, 46, 36, 50, 51, 44, 47, 48, 39, 50, 50, 41, 58, 61, 46, 66, 69, 54, 66, 69, 54, 91, 93, 79, 91, 93, 79, 98, 100, 86, 108, 111, 96, 214, 216, 202, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 206, 204, 188, 202, 200, 185, 202, 200, 187, 200, 197, 185, 200, 197, 185, 198, 195, 182, 203, 201, 190, 202, 200, 187, 201, 199, 186, 200, 197, 187, 200, 197, 187, 200, 197, 187, 193, 190, 180, 187, 184, 174, 190, 187, 176, 124, 121, 115, 67, 64, 56, 80, 79, 70, 70, 69, 61, 71, 73, 63, 76, 77, 68, 49, 50, 41, 52, 56, 46, 48, 49, 40, 42, 45, 35, 52, 54, 44, 43, 45, 35, 46, 45, 36, 36, 36, 27, 35, 31, 23, 33, 30, 22, 31, 30, 21, 36, 36, 27, 36, 36, 27, 43, 43, 34, 40, 39, 28, 46, 45, 34, 42, 42, 31, 39, 38, 27, 27, 27, 15, 40, 39, 28, 36, 36, 25, 58, 55, 43, 49, 49, 36, 43, 40, 27, 35, 35, 22, 40, 40, 26, 33, 33, 19, 42, 44, 32, 58, 61, 46, 35, 35, 24, 54, 53, 42, 49, 46, 35, 32, 31, 20, 49, 46, 35, 58, 55, 45, 51, 48, 38, 59, 56, 46, 63, 60, 49, 54, 53, 42, 61, 60, 49, 41, 40, 32, 62, 62, 50, 49, 49, 38, 59, 58, 47, 36, 36, 25, 55, 55, 41, 59, 58, 47, 57, 59, 47, 63, 63, 50, 59, 58, 47, 27, 29, 15, 52, 52, 38, 50, 50, 39, 59, 58, 45, 56, 53, 42, 55, 55, 43, 57, 57, 48, 57, 57, 48, 53, 52, 43, 66, 63, 55, 52, 51, 42, 42, 41, 33, 60, 59, 48, 62, 62, 50, 46, 45, 34, 59, 58, 47, 61, 61, 47, 47, 46, 35, 61, 60, 49, 56, 56, 45, 52, 51, 40, 49, 49, 38, 56, 56, 45, 57, 57, 46, 62, 62, 50, 57, 57, 46, 59, 58, 47, 60, 59, 48, 54, 53, 42, 49, 49, 38, 50, 50, 41, 59, 61, 49, 57, 59, 47, 59, 61, 49, 66, 65, 54, 54, 53, 42, 68, 67, 56, 53, 52, 41, 66, 65, 54, 85, 85, 74, 52, 51, 40, 46, 46, 32, 53, 52, 41, 48, 48, 36, 60, 60, 46, 69, 69, 57, 54, 53, 42, 81, 81, 67, 81, 80, 69, 42, 42, 31, 46, 45, 34, 69, 69, 57, 50, 50, 39, 83, 83, 71, 62, 62, 50, 69, 69, 57, 59, 58, 47, 59, 58, 47, 95, 94, 83, 43, 43, 32, 62, 62, 50, 67, 66, 55, 91, 90, 82, 82, 81, 70, 91, 91, 80, 56, 58, 46, 70, 72, 60, 81, 83, 71, 55, 57, 45, 71, 73, 61, 81, 80, 71, 75, 74, 65, 91, 90, 82, 80, 79, 70, 71, 71, 62, 63, 62, 54, 60, 59, 50, 66, 65, 56, 55, 54, 46, 56, 52, 44, 48, 47, 41, 55, 56, 49, 59, 57, 51, 52, 54, 46, 37, 38, 31, 48, 49, 40, 37, 39, 29, 63, 64, 55, 66, 68, 56, 114, 114, 103, 84, 87, 70, 104, 106, 92, 123, 126, 111, 192, 195, 178, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 203, 203, 187, 200, 197, 185, 201, 199, 186, 196, 194, 181, 198, 195, 182, 196, 194, 181, 199, 196, 185, 198, 195, 182, 196, 194, 181, 198, 195, 182, 191, 188, 175, 191, 188, 175, 187, 185, 172, 187, 184, 174, 187, 184, 174, 22, 18, 15, 73, 72, 63, 57, 57, 48, 70, 69, 61, 65, 67, 57, 56, 57, 48, 47, 51, 41, 43, 50, 39, 40, 44, 34, 56, 57, 48, 43, 47, 36, 50, 52, 42, 44, 46, 36, 45, 44, 35, 18, 17, 8, 44, 41, 33, 39, 36, 28, 25, 22, 14, 36, 36, 25, 39, 36, 26, 48, 48, 36, 25, 24, 13, 52, 51, 40, 49, 49, 38, 35, 35, 24, 48, 48, 36, 42, 42, 29, 40, 40, 26, 43, 43, 30, 42, 44, 32, 35, 37, 25, 37, 40, 25, 36, 38, 24, 38, 42, 29, 31, 33, 23, 38, 37, 26, 47, 46, 35, 38, 37, 26, 41, 41, 29, 47, 45, 34, 47, 45, 34, 52, 50, 37, 52, 49, 39, 46, 43, 33, 51, 48, 38, 75, 73, 62, 53, 52, 41, 56, 56, 45, 55, 55, 43, 51, 48, 38, 65, 62, 52, 47, 46, 35, 68, 67, 56, 54, 53, 42, 57, 59, 45, 61, 60, 49, 48, 48, 34, 42, 42, 31, 49, 49, 38, 62, 62, 48, 56, 56, 45, 56, 56, 45, 58, 55, 47, 56, 52, 44, 53, 52, 43, 57, 57, 48, 47, 46, 37, 53, 52, 43, 44, 46, 34, 54, 53, 42, 42, 42, 31, 53, 52, 41, 64, 64, 53, 47, 46, 35, 49, 49, 38, 47, 46, 35, 59, 58, 47, 61, 60, 49, 47, 46, 35, 41, 41, 29, 62, 62, 50, 57, 57, 46, 52, 51, 40, 71, 71, 60, 63, 63, 52, 56, 56, 45, 73, 72, 61, 66, 65, 54, 61, 60, 49, 62, 62, 50, 57, 57, 46, 59, 58, 47, 64, 64, 53, 57, 57, 46, 53, 52, 41, 54, 53, 42, 64, 64, 53, 53, 52, 41, 55, 55, 43, 82, 81, 70, 67, 66, 55, 71, 71, 60, 88, 87, 76, 66, 65, 54, 70, 67, 56, 59, 58, 47, 47, 46, 35, 78, 78, 65, 67, 67, 53, 57, 57, 46, 50, 50, 37, 55, 57, 45, 96, 95, 84, 80, 79, 68, 48, 50, 38, 67, 66, 55, 50, 50, 39, 55, 55, 43, 85, 85, 74, 62, 63, 54, 97, 96, 85, 74, 76, 64, 57, 59, 47, 57, 59, 49, 45, 47, 35, 53, 55, 46, 54, 58, 48, 62, 63, 54, 60, 62, 52, 61, 65, 55, 50, 52, 42, 67, 66, 57, 52, 54, 44, 51, 53, 43, 59, 57, 51, 50, 46, 41, 52, 51, 42, 55, 54, 48, 55, 54, 48, 71, 71, 62, 71, 71, 62, 94, 93, 80, 110, 109, 98, 117, 117, 103, 132, 132, 118, 251, 254, 239, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 205, 202, 187, 201, 199, 184, 193, 191, 178, 198, 195, 182, 198, 195, 182, 196, 194, 181, 196, 194, 183, 201, 198, 188, 194, 192, 179, 191, 188, 175, 187, 185, 172, 185, 182, 169, 188, 186, 173, 182, 180, 169, 158, 157, 146, 32, 27, 31, 85, 85, 76, 60, 62, 50, 57, 57, 48, 51, 53, 43, 56, 55, 47, 53, 55, 46, 45, 47, 37, 40, 41, 32, 47, 48, 39, 45, 49, 39, 33, 34, 25, 38, 37, 28, 47, 46, 37, 44, 46, 36, 31, 30, 21, 40, 39, 30, 45, 44, 35, 35, 35, 24, 37, 34, 26, 33, 32, 23, 48, 47, 39, 27, 26, 18, 30, 32, 22, 44, 46, 34, 43, 45, 33, 38, 38, 24, 43, 43, 30, 41, 41, 27, 52, 55, 40, 56, 53, 42, 26, 26, 14, 39, 38, 27, 36, 38, 26, 40, 39, 28, 41, 41, 29, 42, 44, 32, 50, 52, 40, 50, 50, 39, 60, 59, 48, 35, 35, 22, 53, 50, 40, 52, 49, 39, 56, 53, 42, 66, 63, 53, 64, 61, 50, 47, 45, 34, 65, 62, 52, 42, 42, 31, 52, 49, 41, 46, 43, 33, 54, 53, 42, 54, 53, 42, 52, 54, 42, 56, 56, 45, 69, 69, 57, 57, 57, 46, 63, 63, 52, 62, 62, 50, 50, 50, 39, 53, 52, 41, 53, 52, 41, 38, 37, 26, 42, 42, 31, 48, 48, 36, 62, 61, 52, 54, 53, 44, 57, 57, 48, 59, 58, 49, 46, 45, 36, 52, 51, 40, 54, 53, 44, 53, 52, 43, 55, 55, 43, 63, 63, 52, 54, 53, 42, 59, 58, 47, 47, 46, 35, 61, 60, 49, 53, 55, 43, 71, 71, 60, 52, 51, 40, 53, 52, 41, 47, 46, 35, 62, 63, 54, 61, 60, 49, 50, 52, 42, 64, 64, 55, 59, 61, 49, 59, 58, 47, 56, 56, 45, 44, 46, 34, 70, 70, 59, 49, 51, 37, 73, 75, 63, 64, 64, 53, 69, 69, 57, 73, 72, 61, 63, 63, 52, 71, 71, 60, 46, 45, 34, 52, 54, 42, 55, 55, 43, 64, 64, 53, 62, 62, 50, 95, 94, 83, 62, 62, 50, 52, 51, 40, 82, 81, 70, 55, 55, 43, 73, 72, 61, 55, 55, 43, 71, 73, 61, 67, 66, 55, 137, 136, 125, 89, 88, 79, 78, 80, 70, 71, 73, 63, 74, 73, 64, 57, 57, 48, 60, 62, 52, 69, 70, 61, 65, 67, 57, 59, 61, 51, 52, 54, 44, 52, 54, 44, 53, 55, 46, 59, 61, 51, 55, 56, 47, 48, 49, 40, 49, 48, 40, 47, 46, 37, 41, 40, 34, 50, 51, 44, 54, 53, 44, 65, 62, 54, 70, 70, 59, 95, 94, 83, 102, 101, 90, 132, 132, 118, 125, 127, 114, 255, 255, 242, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 198, 196, 178, 200, 198, 183, 199, 195, 180, 196, 191, 179, 191, 188, 175, 193, 191, 178, 192, 189, 177, 185, 182, 169, 193, 191, 178, 185, 180, 168, 184, 179, 167, 185, 182, 169, 178, 175, 162, 167, 164, 154, 117, 114, 104, 67, 62, 64, 80, 79, 70, 61, 60, 49, 47, 46, 35, 53, 52, 43, 71, 73, 63, 52, 54, 44, 31, 35, 22, 22, 24, 12, 43, 43, 34, 36, 40, 29, 57, 57, 46, 41, 42, 33, 39, 38, 29, 38, 40, 30, 25, 24, 15, 43, 43, 34, 39, 38, 27, 31, 30, 19, 28, 28, 17, 33, 32, 21, 31, 30, 19, 41, 41, 29, 38, 37, 26, 37, 39, 27, 38, 37, 26, 40, 39, 28, 40, 40, 26, 57, 57, 44, 44, 46, 34, 39, 38, 27, 48, 48, 36, 42, 42, 31, 51, 53, 41, 30, 32, 20, 34, 36, 24, 37, 39, 27, 55, 55, 43, 25, 32, 18, 49, 49, 38, 47, 46, 35, 51, 48, 36, 53, 50, 40, 59, 57, 44, 64, 64, 51, 49, 49, 38, 58, 55, 43, 60, 59, 48, 53, 52, 41, 57, 57, 48, 41, 41, 29, 43, 43, 32, 49, 49, 38, 44, 47, 32, 51, 54, 39, 56, 56, 45, 70, 72, 58, 59, 58, 47, 53, 55, 43, 50, 50, 39, 52, 51, 40, 60, 59, 48, 56, 53, 42, 54, 53, 42, 53, 52, 43, 48, 47, 39, 54, 53, 44, 53, 52, 43, 55, 54, 46, 61, 60, 49, 56, 55, 47, 52, 51, 42, 45, 47, 37, 59, 58, 47, 54, 53, 42, 55, 57, 45, 62, 62, 50, 42, 42, 31, 59, 58, 47, 45, 44, 33, 55, 55, 43, 52, 51, 40, 48, 48, 36, 54, 53, 42, 29, 31, 19, 69, 68, 60, 57, 59, 47, 55, 56, 47, 52, 54, 42, 53, 52, 41, 62, 62, 50, 68, 67, 56, 48, 48, 36, 66, 68, 56, 61, 60, 49, 62, 61, 52, 40, 39, 28, 69, 69, 57, 62, 62, 50, 71, 71, 62, 66, 65, 54, 49, 50, 41, 46, 45, 36, 71, 71, 60, 81, 80, 69, 50, 50, 39, 41, 41, 29, 50, 50, 37, 75, 74, 63, 52, 51, 40, 66, 65, 52, 63, 65, 53, 83, 83, 71, 66, 68, 56, 72, 74, 64, 55, 55, 43, 48, 47, 39, 100, 102, 90, 64, 66, 54, 65, 67, 57, 57, 59, 49, 52, 54, 44, 52, 54, 44, 56, 57, 48, 53, 55, 46, 47, 48, 39, 41, 42, 33, 52, 51, 42, 45, 47, 37, 45, 47, 37, 52, 54, 44, 62, 61, 52, 90, 90, 78, 94, 93, 80, 99, 99, 86, 145, 145, 131, 126, 126, 112, 248, 248, 235, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 191, 189, 171, 193, 189, 175, 192, 188, 173, 191, 187, 172, 190, 185, 173, 187, 185, 172, 188, 186, 173, 182, 180, 167, 183, 178, 166, 182, 177, 165, 179, 174, 162, 176, 171, 159, 171, 166, 154, 165, 160, 149, 52, 49, 39, 83, 80, 72, 74, 73, 64, 64, 64, 53, 33, 32, 23, 60, 62, 52, 57, 59, 49, 53, 55, 43, 40, 45, 32, 24, 26, 14, 39, 38, 27, 36, 40, 27, 30, 27, 17, 42, 41, 33, 27, 27, 15, 24, 26, 14, 52, 51, 42, 40, 39, 30, 42, 39, 28, 39, 36, 26, 37, 34, 24, 30, 29, 18, 49, 49, 38, 38, 37, 26, 39, 36, 26, 35, 35, 24, 45, 44, 33, 29, 26, 13, 34, 34, 20, 45, 44, 33, 47, 49, 34, 39, 38, 27, 36, 38, 26, 38, 40, 28, 31, 33, 21, 36, 36, 25, 23, 27, 14, 37, 39, 27, 37, 39, 27, 62, 64, 52, 64, 64, 53, 48, 48, 36, 45, 44, 33, 53, 53, 39, 53, 53, 39, 50, 50, 39, 33, 32, 21, 43, 43, 30, 61, 60, 49, 41, 41, 29, 57, 57, 48, 57, 57, 48, 56, 56, 45, 53, 52, 41, 48, 49, 40, 45, 47, 35, 54, 53, 42, 54, 54, 40, 57, 57, 46, 48, 48, 36, 52, 51, 40, 46, 43, 33, 54, 52, 41, 61, 58, 50, 47, 45, 34, 52, 49, 39, 56, 52, 44, 50, 50, 41, 70, 66, 58, 55, 54, 46, 57, 57, 48, 43, 43, 34, 48, 47, 39, 48, 48, 36, 52, 51, 40, 57, 59, 47, 47, 48, 36, 68, 67, 56, 50, 52, 40, 53, 52, 41, 44, 46, 34, 73, 72, 61, 55, 55, 43, 52, 51, 40, 54, 53, 44, 50, 52, 42, 56, 55, 47, 50, 52, 42, 50, 50, 41, 41, 43, 31, 59, 58, 47, 57, 57, 48, 60, 59, 50, 66, 65, 56, 62, 64, 52, 56, 58, 46, 57, 59, 49, 50, 52, 40, 57, 59, 49, 69, 70, 61, 69, 71, 59, 48, 50, 38, 65, 67, 57, 66, 68, 58, 67, 66, 57, 57, 57, 48, 66, 65, 54, 71, 71, 60, 75, 74, 65, 67, 66, 55, 57, 59, 47, 66, 68, 56, 56, 56, 45, 69, 71, 59, 42, 44, 32, 65, 67, 57, 74, 76, 64, 69, 71, 59, 43, 45, 33, 47, 48, 39, 36, 40, 29, 51, 53, 43, 43, 45, 35, 64, 64, 55, 42, 41, 33, 43, 43, 32, 60, 60, 46, 75, 75, 61, 85, 85, 72, 95, 95, 81, 108, 108, 92, 119, 119, 103, 127, 128, 112, 223, 223, 207, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 191, 189, 171, 192, 188, 171, 198, 192, 178, 189, 182, 171, 183, 178, 166, 184, 179, 167, 181, 176, 164, 175, 170, 158, 167, 162, 150, 172, 167, 156, 174, 167, 156, 163, 158, 146, 160, 155, 143, 134, 129, 119, 9, 6, 0, 80, 75, 64, 62, 62, 50, 49, 49, 38, 52, 51, 40, 41, 41, 29, 66, 65, 54, 52, 51, 40, 35, 39, 26, 44, 46, 34, 49, 49, 38, 18, 17, 8, 40, 37, 29, 48, 49, 40, 45, 47, 35, 42, 44, 32, 48, 47, 41, 31, 33, 21, 30, 32, 20, 16, 18, 6, 22, 24, 12, 38, 40, 28, 38, 37, 26, 54, 53, 42, 32, 31, 20, 46, 45, 34, 36, 36, 23, 35, 37, 23, 30, 29, 16, 33, 35, 20, 36, 38, 24, 47, 48, 36, 47, 46, 35, 42, 42, 31, 52, 51, 40, 36, 33, 22, 38, 37, 26, 30, 29, 18, 57, 57, 46, 53, 52, 43, 55, 55, 43, 47, 46, 35, 51, 48, 38, 46, 43, 33, 46, 44, 31, 51, 48, 38, 49, 46, 35, 43, 43, 32, 45, 44, 33, 47, 46, 35, 54, 53, 42, 46, 45, 34, 52, 51, 40, 52, 51, 40, 43, 43, 32, 51, 53, 41, 48, 48, 36, 55, 55, 41, 57, 57, 46, 52, 51, 40, 50, 50, 39, 59, 56, 46, 59, 56, 46, 54, 53, 42, 57, 54, 43, 54, 51, 43, 48, 47, 39, 71, 71, 62, 53, 52, 43, 52, 51, 42, 51, 53, 43, 59, 61, 51, 66, 65, 56, 48, 50, 38, 53, 52, 41, 50, 52, 40, 56, 58, 46, 55, 55, 43, 34, 36, 24, 46, 45, 34, 56, 56, 45, 53, 52, 41, 56, 56, 45, 62, 61, 52, 43, 43, 34, 53, 52, 43, 60, 59, 50, 62, 61, 52, 60, 62, 52, 34, 35, 26, 64, 64, 55, 54, 53, 44, 50, 50, 39, 69, 69, 57, 72, 74, 62, 62, 63, 54, 55, 56, 47, 59, 61, 51, 66, 68, 58, 58, 60, 50, 53, 55, 43, 71, 73, 61, 60, 62, 52, 58, 60, 50, 71, 73, 63, 61, 60, 51, 83, 83, 71, 78, 78, 69, 59, 58, 47, 63, 63, 52, 52, 54, 44, 48, 50, 38, 59, 61, 49, 48, 50, 38, 51, 55, 42, 48, 49, 40, 47, 48, 36, 41, 42, 33, 53, 55, 43, 45, 47, 37, 45, 47, 37, 48, 49, 40, 57, 57, 48, 54, 53, 42, 80, 79, 66, 90, 90, 76, 87, 87, 73, 127, 127, 114, 119, 119, 103, 182, 182, 166, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 187, 185, 168, 179, 175, 159, 184, 178, 164, 181, 174, 160, 177, 171, 157, 183, 176, 165, 172, 164, 153, 171, 166, 154, 163, 158, 146, 167, 162, 150, 166, 159, 147, 154, 147, 136, 151, 146, 134, 78, 73, 63, 40, 34, 27, 81, 78, 68, 65, 60, 50, 55, 55, 43, 40, 41, 29, 46, 50, 38, 42, 44, 30, 40, 45, 32, 26, 31, 18, 52, 54, 42, 31, 30, 19, 43, 43, 34, 39, 36, 28, 47, 46, 37, 36, 36, 25, 16, 18, 6, 36, 38, 28, 36, 38, 28, 26, 30, 20, 28, 30, 18, 45, 42, 32, 35, 35, 24, 26, 26, 14, 46, 43, 33, 47, 45, 34, 50, 50, 39, 27, 27, 15, 43, 45, 33, 36, 38, 26, 34, 36, 24, 45, 47, 37, 37, 39, 29, 39, 38, 27, 57, 59, 47, 38, 37, 28, 33, 32, 21, 41, 41, 29, 47, 46, 35, 49, 45, 37, 50, 47, 36, 35, 35, 24, 56, 53, 42, 40, 38, 27, 52, 50, 37, 60, 57, 47, 47, 47, 33, 50, 47, 36, 42, 39, 28, 47, 46, 35, 66, 65, 54, 41, 43, 31, 59, 61, 49, 44, 46, 34, 52, 54, 44, 53, 55, 43, 49, 51, 39, 50, 52, 40, 50, 50, 41, 54, 51, 43, 50, 52, 40, 56, 55, 47, 54, 51, 43, 56, 52, 44, 46, 45, 36, 52, 49, 39, 58, 55, 47, 55, 54, 48, 61, 60, 51, 62, 61, 52, 66, 65, 56, 49, 48, 40, 42, 43, 34, 53, 55, 46, 52, 54, 44, 59, 58, 47, 47, 48, 36, 49, 51, 39, 55, 57, 45, 51, 53, 41, 59, 58, 47, 55, 54, 46, 59, 56, 46, 56, 53, 42, 56, 55, 47, 59, 58, 49, 53, 52, 45, 57, 57, 48, 57, 57, 48, 62, 61, 52, 53, 55, 46, 67, 66, 57, 69, 68, 60, 45, 44, 33, 50, 50, 41, 55, 57, 45, 76, 77, 68, 67, 69, 60, 37, 39, 29, 52, 54, 44, 49, 50, 41, 62, 63, 54, 51, 53, 43, 48, 49, 40, 59, 58, 49, 63, 64, 55, 60, 59, 50, 48, 48, 36, 46, 45, 34, 42, 44, 32, 55, 57, 45, 42, 44, 32, 40, 41, 29, 44, 46, 36, 43, 45, 33, 40, 41, 32, 40, 41, 32, 40, 41, 29, 41, 43, 31, 35, 37, 25, 55, 55, 43, 102, 101, 90, 102, 101, 90, 110, 109, 98, 113, 113, 100, 132, 132, 118, 203, 203, 189, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 170, 166, 149, 164, 160, 143, 169, 163, 147, 174, 167, 154, 162, 156, 140, 166, 159, 145, 158, 150, 139, 159, 152, 140, 156, 151, 139, 146, 139, 128, 147, 140, 129, 154, 147, 136, 140, 133, 124, 0, 0, 0, 88, 85, 75, 56, 56, 45, 59, 56, 46, 54, 53, 44, 55, 57, 45, 43, 47, 32, 42, 44, 32, 37, 41, 28, 26, 28, 15, 41, 41, 29, 39, 36, 26, 46, 45, 34, 38, 37, 26, 36, 36, 27, 42, 39, 28, 35, 34, 26, 33, 32, 23, 32, 31, 22, 32, 31, 22, 29, 31, 21, 45, 42, 32, 45, 42, 32, 26, 24, 11, 29, 26, 13, 40, 38, 25, 26, 26, 12, 43, 45, 31, 46, 51, 36, 33, 35, 22, 43, 45, 33, 35, 39, 26, 40, 41, 29, 37, 39, 27, 31, 33, 21, 48, 48, 36, 56, 53, 42, 43, 43, 32, 45, 42, 32, 56, 53, 42, 56, 53, 42, 42, 42, 29, 49, 46, 33, 59, 58, 47, 56, 53, 40, 61, 59, 46, 59, 58, 47, 39, 39, 25, 52, 51, 40, 47, 46, 35, 34, 34, 22, 52, 54, 42, 60, 59, 48, 40, 39, 30, 47, 48, 36, 50, 50, 41, 55, 54, 46, 53, 52, 43, 52, 51, 40, 51, 48, 38, 49, 49, 38, 55, 54, 46, 65, 62, 54, 44, 41, 33, 49, 45, 37, 48, 47, 39, 64, 60, 52, 56, 52, 44, 57, 56, 50, 57, 56, 50, 54, 53, 44, 46, 45, 36, 75, 74, 65, 50, 52, 42, 56, 58, 46, 63, 63, 52, 67, 69, 57, 53, 55, 46, 60, 59, 50, 60, 59, 50, 55, 54, 46, 78, 78, 69, 63, 59, 51, 70, 69, 61, 58, 55, 47, 63, 59, 51, 61, 60, 51, 53, 52, 43, 61, 60, 51, 66, 65, 56, 52, 54, 44, 70, 69, 61, 68, 67, 58, 48, 47, 39, 45, 44, 35, 56, 58, 46, 69, 68, 60, 59, 61, 51, 51, 53, 43, 59, 61, 51, 48, 49, 40, 38, 42, 32, 55, 56, 47, 50, 52, 42, 45, 47, 37, 45, 47, 37, 49, 50, 41, 41, 40, 32, 40, 39, 30, 37, 39, 29, 41, 42, 33, 36, 38, 28, 27, 28, 19, 33, 35, 22, 37, 39, 27, 66, 68, 56, 72, 74, 62, 88, 91, 76, 118, 118, 104, 106, 108, 94, 117, 117, 103, 190, 190, 177, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 165, 156, 141, 165, 156, 141, 155, 149, 133, 155, 149, 135, 154, 147, 134, 147, 140, 127, 148, 141, 128, 142, 135, 124, 135, 128, 117, 132, 125, 114, 139, 132, 121, 127, 122, 110, 111, 110, 99, 9, 8, 0, 77, 76, 68, 61, 60, 49, 49, 49, 38, 59, 61, 51, 45, 49, 39, 31, 37, 24, 51, 55, 42, 27, 33, 22, 30, 34, 21, 33, 35, 22, 35, 37, 25, 24, 26, 14, 37, 39, 29, 35, 34, 26, 32, 31, 22, 32, 31, 22, 35, 31, 23, 40, 39, 30, 36, 36, 27, 37, 34, 26, 38, 35, 27, 37, 39, 27, 43, 40, 27, 41, 41, 27, 33, 33, 19, 41, 43, 29, 35, 35, 22, 38, 37, 26, 36, 38, 26, 30, 32, 20, 32, 36, 24, 37, 41, 26, 43, 45, 33, 28, 32, 17, 28, 30, 18, 50, 52, 40, 36, 36, 25, 47, 46, 35, 39, 38, 27, 52, 49, 39, 31, 30, 19, 46, 43, 33, 56, 56, 45, 50, 50, 37, 50, 47, 34, 53, 52, 41, 45, 44, 33, 52, 51, 40, 42, 44, 32, 50, 50, 39, 47, 48, 36, 47, 46, 35, 52, 54, 42, 40, 41, 29, 37, 39, 27, 50, 52, 40, 49, 51, 39, 40, 39, 28, 48, 48, 36, 38, 40, 30, 58, 55, 47, 52, 49, 41, 53, 50, 42, 60, 59, 50, 52, 49, 41, 54, 53, 44, 56, 55, 47, 49, 48, 42, 53, 55, 48, 54, 53, 44, 41, 40, 34, 56, 57, 48, 52, 54, 44, 45, 47, 37, 55, 56, 47, 52, 54, 44, 41, 42, 33, 53, 52, 43, 59, 61, 51, 76, 75, 66, 50, 50, 41, 45, 43, 37, 49, 50, 41, 56, 55, 47, 69, 68, 60, 55, 54, 48, 61, 58, 50, 39, 38, 31, 56, 55, 49, 49, 48, 40, 57, 57, 48, 53, 52, 43, 54, 53, 44, 56, 57, 48, 86, 88, 78, 34, 35, 26, 47, 48, 39, 33, 34, 25, 37, 38, 31, 40, 41, 32, 44, 46, 36, 40, 41, 32, 31, 33, 23, 54, 53, 44, 42, 42, 31, 33, 32, 23, 37, 39, 27, 45, 47, 35, 47, 46, 35, 53, 52, 41, 76, 76, 62, 77, 77, 64, 111, 111, 97, 110, 110, 96, 120, 120, 107, 123, 123, 107, 227, 228, 212, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 146, 137, 122, 150, 141, 126, 144, 135, 120, 143, 134, 119, 137, 128, 113, 132, 123, 110, 142, 132, 120, 132, 123, 112, 141, 131, 121, 137, 127, 117, 131, 124, 110, 128, 123, 111, 73, 75, 63, 51, 48, 40, 70, 70, 59, 57, 59, 47, 37, 39, 27, 38, 40, 30, 44, 46, 36, 43, 45, 33, 40, 41, 29, 45, 47, 35, 29, 31, 19, 30, 32, 20, 24, 26, 14, 35, 35, 24, 36, 36, 27, 25, 24, 15, 39, 38, 27, 25, 24, 13, 39, 36, 26, 21, 20, 12, 42, 38, 30, 31, 30, 19, 50, 47, 39, 35, 35, 24, 33, 31, 20, 23, 26, 11, 38, 38, 24, 35, 37, 25, 31, 33, 21, 47, 46, 35, 40, 41, 29, 45, 47, 37, 37, 39, 27, 39, 44, 29, 43, 47, 34, 43, 45, 31, 36, 40, 25, 28, 30, 18, 46, 50, 38, 43, 43, 32, 54, 52, 41, 51, 48, 38, 44, 41, 31, 57, 54, 43, 46, 46, 32, 40, 38, 25, 45, 43, 30, 51, 54, 39, 51, 48, 38, 36, 36, 25, 47, 48, 36, 48, 50, 38, 42, 44, 32, 55, 55, 43, 43, 45, 33, 42, 44, 32, 60, 59, 48, 42, 46, 33, 45, 47, 37, 48, 47, 39, 56, 55, 47, 56, 55, 47, 42, 41, 33, 54, 53, 46, 50, 49, 43, 44, 45, 38, 64, 63, 57, 63, 64, 57, 55, 54, 48, 43, 42, 36, 46, 45, 38, 43, 43, 34, 52, 50, 44, 49, 50, 41, 50, 52, 42, 45, 47, 37, 52, 54, 44, 48, 49, 40, 66, 67, 60, 51, 53, 43, 53, 55, 48, 51, 55, 44, 35, 36, 27, 56, 55, 49, 45, 47, 39, 42, 43, 36, 46, 45, 36, 53, 52, 43, 54, 53, 44, 47, 46, 37, 48, 48, 34, 47, 46, 35, 43, 43, 32, 39, 38, 29, 40, 39, 28, 39, 38, 29, 46, 45, 36, 43, 45, 35, 40, 41, 32, 40, 41, 32, 35, 36, 27, 39, 38, 29, 35, 34, 26, 34, 33, 25, 56, 56, 45, 66, 65, 54, 87, 86, 75, 81, 81, 67, 92, 92, 79, 108, 107, 94, 118, 118, 102, 113, 113, 100, 105, 105, 92, 227, 228, 212, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 142, 133, 118, 136, 127, 112, 134, 125, 110, 134, 124, 112, 134, 124, 112, 128, 118, 106, 129, 119, 109, 129, 119, 109, 135, 125, 115, 126, 116, 103, 115, 107, 96, 120, 113, 102, 34, 36, 24, 48, 50, 38, 57, 59, 47, 49, 51, 39, 55, 55, 43, 48, 49, 40, 43, 45, 33, 40, 42, 27, 35, 37, 25, 39, 43, 33, 24, 28, 15, 29, 31, 19, 33, 38, 25, 30, 29, 18, 27, 26, 18, 36, 36, 27, 34, 34, 22, 56, 52, 44, 39, 33, 24, 47, 44, 36, 23, 22, 11, 25, 24, 13, 36, 36, 27, 47, 48, 36, 31, 30, 19, 42, 42, 29, 23, 22, 11, 35, 39, 26, 42, 44, 32, 23, 27, 14, 28, 31, 21, 32, 36, 26, 37, 41, 30, 28, 31, 21, 36, 38, 26, 28, 30, 18, 38, 40, 28, 67, 64, 54, 50, 47, 36, 71, 66, 56, 48, 43, 31, 50, 50, 39, 60, 59, 48, 42, 39, 28, 36, 38, 26, 45, 44, 33, 61, 59, 48, 24, 26, 14, 55, 55, 43, 41, 41, 29, 55, 55, 43, 48, 47, 39, 49, 48, 40, 53, 52, 45, 51, 53, 43, 55, 56, 49, 47, 48, 41, 51, 52, 45, 51, 52, 45, 48, 49, 42, 50, 49, 43, 49, 48, 42, 52, 50, 44, 41, 40, 34, 56, 57, 50, 47, 48, 41, 47, 46, 39, 54, 53, 46, 54, 53, 46, 50, 50, 41, 54, 53, 46, 56, 55, 47, 45, 44, 35, 48, 49, 42, 35, 36, 27, 55, 56, 47, 45, 47, 37, 50, 52, 42, 48, 52, 44, 49, 50, 43, 65, 66, 59, 43, 46, 38, 48, 49, 42, 50, 53, 45, 48, 49, 42, 35, 38, 28, 43, 47, 36, 49, 50, 41, 42, 45, 35, 38, 40, 30, 58, 60, 50, 31, 33, 23, 44, 46, 36, 41, 42, 33, 47, 46, 35, 37, 39, 27, 39, 38, 27, 52, 51, 40, 33, 34, 25, 56, 56, 45, 77, 74, 61, 83, 81, 68, 80, 78, 65, 86, 83, 70, 87, 85, 72, 93, 90, 78, 95, 93, 80, 111, 109, 96, 128, 125, 112, 255, 255, 246, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 127, 121, 105, 134, 124, 112, 130, 121, 108, 121, 111, 99, 120, 110, 98, 128, 118, 106, 123, 113, 103, 128, 118, 108, 119, 112, 101, 120, 110, 100, 112, 105, 94, 99, 92, 81, 0, 0, 0, 74, 76, 64, 57, 61, 46, 44, 46, 34, 51, 53, 43, 39, 43, 33, 37, 41, 28, 43, 47, 36, 44, 48, 35, 33, 37, 27, 40, 45, 32, 29, 33, 20, 34, 36, 24, 34, 36, 24, 36, 36, 25, 43, 43, 32, 27, 29, 17, 33, 32, 21, 34, 29, 19, 42, 42, 31, 25, 24, 13, 26, 26, 14, 28, 27, 19, 33, 34, 25, 42, 38, 32, 41, 40, 32, 40, 39, 28, 27, 28, 21, 43, 45, 35, 25, 29, 17, 42, 43, 36, 46, 45, 36, 40, 39, 30, 41, 42, 33, 36, 38, 28, 33, 35, 22, 44, 46, 36, 40, 35, 23, 60, 51, 40, 42, 42, 31, 51, 46, 36, 56, 53, 40, 46, 45, 34, 49, 48, 40, 49, 49, 38, 45, 44, 33, 48, 48, 36, 59, 61, 49, 53, 55, 43, 29, 31, 19, 51, 53, 41, 50, 52, 40, 45, 47, 35, 41, 42, 33, 47, 48, 39, 48, 49, 40, 41, 42, 33, 39, 43, 33, 49, 50, 43, 44, 46, 36, 43, 44, 37, 45, 47, 39, 49, 50, 43, 63, 62, 56, 35, 36, 29, 50, 51, 44, 52, 54, 46, 41, 42, 35, 47, 48, 41, 43, 42, 36, 37, 38, 31, 40, 41, 34, 45, 47, 39, 48, 49, 40, 43, 44, 37, 42, 43, 36, 44, 46, 36, 50, 52, 42, 39, 43, 35, 44, 45, 38, 36, 37, 30, 40, 44, 36, 43, 49, 41, 43, 46, 38, 27, 33, 24, 46, 52, 41, 37, 38, 31, 34, 35, 26, 40, 41, 32, 38, 37, 28, 24, 23, 12, 33, 32, 21, 34, 34, 22, 32, 31, 20, 92, 92, 81, 59, 58, 47, 109, 109, 95, 88, 86, 73, 97, 95, 82, 121, 119, 103, 156, 153, 140, 113, 110, 95, 149, 147, 131, 129, 126, 114, 230, 228, 213, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 119, 112, 101, 119, 112, 99, 119, 109, 98, 110, 101, 90, 115, 106, 93, 108, 99, 86, 117, 108, 97, 109, 99, 89, 109, 99, 89, 102, 93, 80, 125, 118, 107, 92, 87, 75, 0, 0, 0, 55, 60, 45, 55, 60, 47, 45, 47, 35, 36, 40, 27, 37, 39, 29, 43, 47, 34, 32, 36, 24, 38, 40, 28, 26, 30, 20, 37, 41, 28, 35, 39, 26, 30, 32, 20, 41, 43, 29, 38, 40, 28, 41, 43, 31, 31, 28, 18, 31, 28, 18, 36, 33, 22, 26, 24, 13, 40, 38, 27, 35, 35, 24, 37, 34, 24, 36, 36, 27, 38, 35, 27, 41, 40, 32, 33, 32, 23, 33, 32, 23, 43, 43, 34, 55, 56, 47, 22, 24, 14, 13, 14, 5, 47, 48, 39, 35, 36, 27, 33, 34, 25, 53, 52, 43, 40, 38, 27, 47, 41, 32, 53, 51, 38, 47, 45, 34, 45, 42, 32, 40, 38, 27, 53, 50, 40, 29, 31, 21, 50, 50, 41, 42, 42, 31, 52, 51, 42, 45, 44, 33, 43, 43, 32, 60, 59, 48, 50, 52, 40, 39, 43, 33, 44, 48, 37, 57, 61, 50, 33, 34, 25, 49, 50, 41, 47, 48, 39, 59, 63, 52, 26, 30, 20, 46, 50, 42, 64, 67, 59, 53, 56, 51, 41, 42, 37, 63, 64, 57, 41, 42, 35, 43, 44, 37, 45, 47, 39, 40, 41, 34, 34, 35, 28, 47, 48, 41, 48, 49, 42, 37, 38, 31, 45, 47, 39, 48, 49, 42, 59, 61, 51, 44, 46, 36, 39, 38, 29, 37, 39, 29, 41, 42, 33, 32, 36, 26, 42, 43, 36, 25, 29, 21, 27, 28, 21, 38, 40, 32, 26, 27, 20, 37, 38, 31, 43, 45, 35, 41, 43, 31, 35, 35, 24, 66, 65, 54, 74, 74, 60, 75, 75, 61, 85, 82, 69, 104, 102, 87, 99, 96, 83, 110, 108, 95, 115, 113, 98, 116, 114, 101, 212, 209, 194, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 98, 92, 74, 103, 97, 78, 102, 95, 81, 97, 90, 77, 98, 91, 80, 103, 96, 84, 96, 89, 78, 104, 92, 82, 101, 91, 81, 98, 91, 80, 94, 90, 76, 72, 74, 62, 6, 8, 0, 53, 58, 43, 45, 49, 36, 33, 38, 25, 33, 38, 25, 41, 43, 31, 26, 31, 18, 42, 45, 35, 15, 19, 6, 33, 38, 25, 28, 32, 19, 27, 29, 17, 30, 34, 21, 35, 39, 26, 23, 25, 13, 31, 30, 19, 51, 43, 34, 44, 41, 31, 42, 39, 28, 39, 36, 28, 36, 36, 25, 43, 43, 32, 32, 31, 22, 31, 30, 21, 44, 41, 33, 47, 46, 37, 49, 49, 38, 39, 36, 28, 40, 38, 27, 40, 41, 29, 33, 32, 23, 37, 39, 27, 30, 29, 18, 43, 45, 33, 23, 26, 11, 33, 31, 20, 33, 31, 18, 36, 33, 20, 60, 58, 45, 42, 39, 28, 30, 29, 18, 40, 39, 28, 43, 40, 29, 43, 40, 29, 56, 58, 46, 36, 38, 26, 50, 52, 40, 31, 33, 21, 46, 45, 34, 43, 43, 32, 36, 40, 29, 40, 41, 34, 50, 53, 45, 44, 45, 38, 31, 33, 25, 47, 51, 43, 55, 56, 49, 39, 45, 36, 38, 42, 32, 67, 71, 63, 41, 42, 35, 44, 47, 41, 43, 46, 40, 39, 38, 31, 42, 43, 36, 34, 35, 28, 39, 43, 35, 31, 37, 28, 38, 39, 34, 44, 45, 38, 33, 31, 27, 31, 29, 23, 34, 35, 28, 28, 27, 21, 34, 33, 25, 33, 32, 23, 41, 40, 32, 38, 40, 30, 27, 28, 19, 53, 55, 46, 38, 40, 32, 34, 35, 28, 58, 59, 52, 38, 40, 32, 77, 78, 71, 88, 87, 78, 87, 86, 77, 102, 101, 90, 83, 81, 68, 91, 88, 75, 104, 102, 89, 97, 92, 78, 130, 126, 112, 242, 238, 223, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 96, 91, 79, 95, 88, 76, 87, 85, 72, 96, 89, 76, 103, 98, 86, 84, 77, 66, 97, 90, 79, 89, 89, 75, 91, 86, 76, 86, 83, 70, 85, 82, 71, 41, 43, 31, 57, 61, 48, 48, 53, 38, 26, 33, 19, 51, 53, 41, 38, 40, 30, 48, 48, 36, 34, 36, 24, 30, 32, 22, 36, 36, 25, 30, 32, 20, 33, 34, 25, 36, 38, 26, 37, 39, 27, 44, 41, 31, 23, 22, 11, 17, 21, 8, 44, 27, 21, 31, 33, 21, 36, 36, 25, 49, 45, 37, 26, 25, 16, 29, 31, 19, 39, 36, 28, 32, 31, 20, 42, 39, 28, 46, 43, 35, 32, 31, 20, 35, 34, 26, 40, 37, 29, 32, 36, 24, 26, 27, 18, 29, 33, 22, 33, 32, 21, 22, 29, 15, 49, 49, 36, 58, 43, 36, 31, 29, 16, 47, 37, 29, 35, 39, 26, 21, 23, 11, 33, 38, 25, 38, 42, 29, 39, 46, 30, 57, 59, 47, 35, 39, 26, 48, 50, 38, 40, 41, 29, 32, 37, 22, 45, 49, 36, 41, 41, 29, 45, 44, 33, 35, 36, 27, 50, 54, 43, 32, 36, 28, 31, 33, 25, 40, 44, 36, 38, 41, 34, 31, 34, 29, 52, 54, 46, 38, 41, 36, 52, 55, 49, 57, 60, 54, 38, 43, 37, 55, 56, 51, 38, 41, 36, 45, 46, 41, 34, 35, 30, 44, 47, 39, 37, 38, 31, 45, 43, 37, 42, 41, 35, 48, 47, 39, 34, 35, 28, 40, 39, 32, 82, 81, 72, 68, 67, 58, 70, 69, 61, 90, 89, 80, 84, 84, 72, 86, 83, 75, 91, 90, 82, 88, 85, 77, 108, 107, 96, 104, 102, 91, 102, 99, 89, 115, 112, 100, 186, 181, 169, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 90, 84, 68, 87, 85, 72, 82, 80, 67, 78, 78, 65, 89, 87, 72, 80, 78, 63, 82, 80, 67, 67, 67, 51, 84, 83, 75, 76, 76, 60, 68, 68, 52, 16, 18, 6, 74, 76, 66, 35, 37, 23, 38, 42, 29, 36, 38, 26, 56, 55, 47, 54, 52, 41, 41, 41, 29, 55, 54, 46, 31, 30, 19, 19, 24, 11, 29, 31, 19, 30, 29, 18, 35, 37, 25, 34, 34, 22, 19, 21, 8, 38, 37, 26, 22, 26, 13, 30, 32, 20, 29, 31, 21, 54, 51, 43, 28, 35, 21, 57, 57, 46, 35, 31, 23, 33, 30, 22, 35, 31, 23, 31, 30, 21, 32, 29, 19, 38, 35, 27, 35, 29, 22, 21, 23, 13, 24, 26, 16, 35, 38, 28, 25, 24, 15, 18, 22, 10, 35, 32, 21, 29, 31, 17, 22, 30, 13, 35, 32, 19, 12, 14, 2, 36, 36, 25, 19, 21, 8, 39, 43, 31, 30, 34, 21, 29, 31, 17, 39, 43, 31, 42, 44, 32, 32, 31, 20, 23, 27, 14, 33, 35, 22, 37, 39, 27, 36, 40, 29, 33, 37, 29, 43, 51, 42, 33, 39, 30, 14, 32, 20, 20, 28, 18, 41, 47, 40, 40, 43, 38, 42, 43, 36, 42, 45, 39, 41, 42, 37, 38, 41, 34, 44, 45, 42, 54, 52, 48, 44, 45, 40, 56, 57, 52, 47, 45, 41, 55, 54, 48, 69, 70, 63, 80, 79, 70, 66, 65, 56, 109, 108, 99, 73, 72, 63, 84, 83, 75, 103, 100, 92, 95, 94, 83, 105, 105, 94, 96, 95, 84, 97, 95, 84, 152, 151, 140, 223, 220, 210, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 89, 84, 68, 76, 76, 62, 76, 76, 62, 78, 80, 66, 79, 76, 64, 77, 74, 59, 62, 62, 46, 77, 74, 61, 76, 76, 64, 74, 72, 57, 80, 80, 64, 9, 12, 0, 59, 68, 54, 46, 46, 32, 54, 41, 34, 19, 21, 8, 40, 42, 27, 48, 48, 36, 33, 35, 22, 28, 31, 21, 17, 21, 11, 24, 28, 15, 28, 28, 17, 39, 38, 27, 38, 37, 26, 31, 28, 18, 27, 27, 15, 39, 36, 26, 42, 39, 28, 21, 23, 11, 24, 26, 14, 17, 19, 7, 25, 29, 17, 20, 22, 10, 30, 29, 20, 36, 36, 27, 32, 31, 22, 28, 27, 19, 19, 18, 9, 31, 30, 21, 24, 23, 14, 28, 30, 18, 23, 25, 15, 31, 30, 21, 26, 26, 14, 24, 28, 15, 12, 21, 6, 25, 30, 15, 25, 35, 18, 38, 41, 26, 37, 40, 25, 38, 37, 26, 30, 32, 20, 17, 27, 10, 34, 41, 25, 38, 41, 26, 20, 29, 12, 33, 40, 24, 31, 33, 21, 35, 39, 24, 30, 32, 20, 33, 35, 22, 32, 36, 24, 35, 36, 27, 50, 47, 39, 39, 36, 28, 46, 45, 38, 39, 38, 31, 50, 51, 44, 42, 45, 37, 53, 55, 48, 40, 41, 34, 56, 57, 50, 70, 69, 61, 67, 64, 58, 87, 84, 76, 83, 80, 72, 88, 85, 75, 100, 97, 89, 105, 104, 96, 103, 102, 93, 96, 93, 83, 106, 103, 92, 121, 118, 107, 138, 135, 125, 222, 219, 209, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, ]}
output0 = {i85: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]}
# Instantiate an example
Example((input0, output0))
+"""
diff --git a/tests/nnapi/specs/V1_0/resize_bilinear.mod.py b/tests/nnapi/specs/V1_0/resize_bilinear.mod.py
index c596407ac..17fb34007 100644
--- a/tests/nnapi/specs/V1_0/resize_bilinear.mod.py
+++ b/tests/nnapi/specs/V1_0/resize_bilinear.mod.py
@@ -2,9 +2,9 @@
model = Model()
i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 1}")
i2 = Output("op2", "TENSOR_FLOAT32", "{1, 3, 3, 1}")
-h = Int32Scalar("height", 3) # an int32_t scalar bias
w = Int32Scalar("width", 3)
-model = model.Operation("RESIZE_BILINEAR", i1, h, w).To(i2)
+h = Int32Scalar("height", 3)
+model = model.Operation("RESIZE_BILINEAR", i1, w, h).To(i2)
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/V1_0/resize_bilinear_2.mod.py b/tests/nnapi/specs/V1_0/resize_bilinear_2.mod.py
index 06a33b865..4035b4371 100644
--- a/tests/nnapi/specs/V1_0/resize_bilinear_2.mod.py
+++ b/tests/nnapi/specs/V1_0/resize_bilinear_2.mod.py
@@ -18,9 +18,9 @@
model = Model()
i1 = Input("op1", "TENSOR_FLOAT32", "{1, 2, 2, 2}")
i2 = Output("op2", "TENSOR_FLOAT32", "{1, 3, 3, 2}")
-h = Int32Scalar("height", 3) # an int32_t scalar bias
w = Int32Scalar("width", 3)
-model = model.Operation("RESIZE_BILINEAR", i1, h, w).To(i2)
+h = Int32Scalar("height", 3)
+model = model.Operation("RESIZE_BILINEAR", i1, w, h).To(i2)
# Example 1. Input in operand 0,
input0 = {i1: # input 0
diff --git a/tests/nnapi/specs/V1_0/svdf_bias_present.mod.py b/tests/nnapi/specs/V1_0/svdf_bias_present.mod.py
new file mode 100644
index 000000000..ae7d1e7b8
--- /dev/null
+++ b/tests/nnapi/specs/V1_0/svdf_bias_present.mod.py
@@ -0,0 +1,138 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+batches = 2
+features = 4
+rank = 1
+units = int(features / rank)
+input_size = 3
+memory_size = 10
+
+model = Model()
+
+input = Input("input", "TENSOR_FLOAT32", "{%d, %d}" % (batches, input_size))
+weights_feature = Input("weights_feature", "TENSOR_FLOAT32", "{%d, %d}" % (features, input_size))
+weights_time = Input("weights_time", "TENSOR_FLOAT32", "{%d, %d}" % (features, memory_size))
+bias = Input("bias", "TENSOR_FLOAT32", "{%d}" % (units))
+state_in = Input("state_in", "TENSOR_FLOAT32", "{%d, %d}" % (batches, memory_size*features))
+rank_param = Int32Scalar("rank_param", rank)
+activation_param = Int32Scalar("activation_param", 0)
+state_out = IgnoredOutput("state_out", "TENSOR_FLOAT32", "{%d, %d}" % (batches, memory_size*features))
+output = Output("output", "TENSOR_FLOAT32", "{%d, %d}" % (batches, units))
+
+model = model.Operation("SVDF", input, weights_feature, weights_time, bias, state_in,
+ rank_param, activation_param).To([state_out, output])
+
+input0 = {
+ input: [],
+ weights_feature: [
+ -0.31930989, -0.36118156, 0.0079667, 0.37613347,
+ 0.22197971, 0.12416199, 0.27901134, 0.27557442,
+ 0.3905206, -0.36137494, -0.06634006, -0.10640851
+ ],
+ weights_time: [
+ -0.31930989, 0.37613347, 0.27901134, -0.36137494, -0.36118156,
+ 0.22197971, 0.27557442, -0.06634006, 0.0079667, 0.12416199,
+
+ 0.3905206, -0.10640851, -0.0976817, 0.15294972, 0.39635518,
+ -0.02702999, 0.39296314, 0.15785322, 0.21931258, 0.31053296,
+
+ -0.36916667, 0.38031587, -0.21580373, 0.27072677, 0.23622236,
+ 0.34936687, 0.18174365, 0.35907319, -0.17493086, 0.324846,
+
+ -0.10781813, 0.27201805, 0.14324132, -0.23681851, -0.27115166,
+ -0.01580888, -0.14943552, 0.15465137, 0.09784451, -0.0337657
+ ],
+ bias: [1.0, 2.0, 3.0, 4.0],
+ state_in: [0 for _ in range(batches * memory_size * features)],
+}
+
+test_inputs = [
+ 0.12609188, -0.46347019, -0.89598465,
+ 0.12609188, -0.46347019, -0.89598465,
+
+ 0.14278367, -1.64410412, -0.75222826,
+ 0.14278367, -1.64410412, -0.75222826,
+
+ 0.49837467, 0.19278903, 0.26584083,
+ 0.49837467, 0.19278903, 0.26584083,
+
+ -0.11186574, 0.13164264, -0.05349274,
+ -0.11186574, 0.13164264, -0.05349274,
+
+ -0.68892461, 0.37783599, 0.18263303,
+ -0.68892461, 0.37783599, 0.18263303,
+
+ -0.81299269, -0.86831826, 1.43940818,
+ -0.81299269, -0.86831826, 1.43940818,
+
+ -1.45006323, -0.82251364, -1.69082689,
+ -1.45006323, -0.82251364, -1.69082689,
+
+ 0.03966608, -0.24936394, -0.77526885,
+ 0.03966608, -0.24936394, -0.77526885,
+
+ 0.11771342, -0.23761693, -0.65898693,
+ 0.11771342, -0.23761693, -0.65898693,
+
+ -0.89477462, 1.67204106, -0.53235275,
+ -0.89477462, 1.67204106, -0.53235275
+]
+
+golden_outputs = [
+ 1.014899, 1.9482339, 2.856275, 3.99728117,
+ 1.014899, 1.9482339, 2.856275, 3.99728117,
+
+ 1.068281, 1.837783, 2.847732, 4.00323521,
+ 1.068281, 1.837783, 2.847732, 4.00323521,
+
+ 0.9682179, 1.9666911, 3.0609602, 4.0333759,
+ 0.9682179, 1.9666911, 3.0609602, 4.0333759,
+
+ 0.99376901, 1.922299, 2.608807, 3.9863309,
+ 0.99376901, 1.922299, 2.608807, 3.9863309,
+
+ 1.201551, 1.835393, 2.820538, 3.9407261,
+ 1.201551, 1.835393, 2.820538, 3.9407261,
+
+ 1.0886511, 1.9124599, 2.730717, 4.0281379,
+ 1.0886511, 1.9124599, 2.730717, 4.0281379,
+
+ 0.798826, 1.413855, 2.371376, 3.9669588,
+ 0.798826, 1.413855, 2.371376, 3.9669588,
+
+ 0.9160904, 1.700671, 3.108746, 4.109808,
+ 0.9160904, 1.700671, 3.108746, 4.109808,
+
+ 1.419114, 1.762176, 2.577373, 4.175115,
+ 1.419114, 1.762176, 2.577373, 4.175115,
+
+ 1.36726, 1.477697, 2.543498, 3.824525,
+ 1.36726, 1.477697, 2.543498, 3.824525
+]
+
+output0 = {state_out: [0 for _ in range(batches * memory_size * features)],
+ output: []}
+
+# TODO: enable more data points after fixing the reference issue
+for i in range(1):
+ batch_start = i * input_size * batches
+ batch_end = batch_start + input_size * batches
+ input0[input] = test_inputs[batch_start:batch_end]
+ golden_start = i * units * batches
+ golden_end = golden_start + units * batches
+ output0[output] = golden_outputs[golden_start:golden_end]
+ Example((input0, output0))
diff --git a/tests/nnapi/specs/generate_test.sh b/tests/nnapi/specs/generate_test.sh
index 2d7e5ccf9..2f1afbc66 100755
--- a/tests/nnapi/specs/generate_test.sh
+++ b/tests/nnapi/specs/generate_test.sh
@@ -47,7 +47,7 @@ function generate_one_testcase {
BASENAME=${BASENAME%".mod.py"};
local EXAMPLE="-e $NNAPI_BASE/$TEST_DIR/src/generated/examples/$BASENAME.example.cpp"
- $NNAPI_BASE/externals/nnapi_test_generator/test_generator.py ./`basename $1`\
+ $NNAPI_BASE/$TEST_DIR/nnapi_test_generator/android-p/test_generator.py ./`basename $1`\
-m $NNAPI_BASE/$TEST_DIR/src/generated/models/$BASENAME.model.cpp $EXAMPLE
ret=$?
# Paste these lines into TestGenerated.cpp
diff --git a/tests/nnapi/src/TestValidation.cpp b/tests/nnapi/src/TestValidation.cpp
index 2d605bb7e..19db43800 100644
--- a/tests/nnapi/src/TestValidation.cpp
+++ b/tests/nnapi/src/TestValidation.cpp
@@ -23,6 +23,9 @@
#include <sys/mman.h>
#include <stdio.h>
#include <stdlib.h>
+// Note: neurun is allow to set activation operand constant only,
+// so we change test to set operand #2 to constant. (ANEURALNETWORKS_FUSED_NONE)
+// And model's input is changed: [0, 1, 2] -> [0, 1]
// This file tests all the validations done by the Neural Networks API.
namespace {
@@ -84,6 +87,9 @@ class ValidationTestIdentify : public ValidationTestModel {
ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &scalarType), ANEURALNETWORKS_NO_ERROR);
ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
+ // neurun is allow to set activation operand constant only
+ int32_t act = ANEURALNETWORKS_FUSED_NONE;
+ ASSERT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 2, &act, sizeof(act)), ANEURALNETWORKS_NO_ERROR);
uint32_t inList[3]{0, 1, 2};
uint32_t outList[1]{3};
ASSERT_EQ(ANeuralNetworksModel_addOperation(mModel, ANEURALNETWORKS_ADD, 3, inList, 1,
@@ -112,12 +118,15 @@ protected:
ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &scalarType), ANEURALNETWORKS_NO_ERROR);
ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
+ // neurun is allow to set activation operand constant only
+ int32_t act = ANEURALNETWORKS_FUSED_NONE;
+ ASSERT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 2, &act, sizeof(act)), ANEURALNETWORKS_NO_ERROR);
uint32_t inList[3]{0, 1, 2};
uint32_t outList[1]{3};
ASSERT_EQ(ANeuralNetworksModel_addOperation(mModel, ANEURALNETWORKS_ADD, 3, inList, 1,
outList),
ANEURALNETWORKS_NO_ERROR);
- ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 3, inList, 1, outList),
+ ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 2, inList, 1, outList),
ANEURALNETWORKS_NO_ERROR);
ASSERT_EQ(ANeuralNetworksModel_finish(mModel), ANEURALNETWORKS_NO_ERROR);
@@ -390,44 +399,44 @@ TEST_F(ValidationTestModel, CreateCompilation) {
}
TEST_F(ValidationTestIdentify, Ok) {
- uint32_t inList[3]{0, 1, 2};
+ uint32_t inList[2]{0, 1};
uint32_t outList[1]{3};
- ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 3, inList, 1, outList),
+ ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 2, inList, 1, outList),
ANEURALNETWORKS_NO_ERROR);
ASSERT_EQ(ANeuralNetworksModel_finish(mModel), ANEURALNETWORKS_NO_ERROR);
}
TEST_F(ValidationTestIdentify, InputIsOutput) {
- uint32_t inList[3]{0, 1, 2};
+ uint32_t inList[2]{0, 1};
uint32_t outList[2]{3, 0};
- ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 3, inList, 2, outList),
+ ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 2, inList, 2, outList),
ANEURALNETWORKS_BAD_DATA);
}
TEST_F(ValidationTestIdentify, OutputIsInput) {
- uint32_t inList[4]{0, 1, 2, 3};
+ uint32_t inList[3]{0, 1, 3};
uint32_t outList[1]{3};
- ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 4, inList, 1, outList),
+ ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 3, inList, 1, outList),
ANEURALNETWORKS_BAD_DATA);
}
TEST_F(ValidationTestIdentify, DuplicateInputs) {
- uint32_t inList[4]{0, 1, 2, 0};
+ uint32_t inList[3]{0, 1, 0};
uint32_t outList[1]{3};
- ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 4, inList, 1, outList),
+ ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 3, inList, 1, outList),
ANEURALNETWORKS_BAD_DATA);
}
TEST_F(ValidationTestIdentify, DuplicateOutputs) {
- uint32_t inList[3]{0, 1, 2};
+ uint32_t inList[2]{0, 1};
uint32_t outList[2]{3, 3};
- ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 3, inList, 2, outList),
+ ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 2, inList, 2, outList),
ANEURALNETWORKS_BAD_DATA);
}
diff --git a/tests/scripts/common.sh b/tests/scripts/common.sh
index 12a35fd35..1e7ab39c7 100755
--- a/tests/scripts/common.sh
+++ b/tests/scripts/common.sh
@@ -32,3 +32,31 @@ function switch_nnfw_kernel_env()
fi
done
}
+
+function get_result_of_benchmark_test()
+{
+ local RUN_TEST_SH=$1
+ local DRIVER_BIN=$2
+ local MODEL=$3
+ local LOG_FILE=$4
+
+ local RET=0
+ $RUN_TEST_SH --driverbin=$DRIVER_BIN $MODEL > $LOG_FILE 2>&1
+ RET=$?
+ if [[ $RET -ne 0 ]]; then
+ echo "Testing $MODEL aborted... exit code: $RET"
+ exit $RET
+ fi
+
+ local RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
+ echo "$RESULT"
+}
+
+function print_result_of_benchmark_test()
+{
+ local NAME=$1
+ local RESULT=$2
+ local RESULT_FILE=$3
+
+ echo "$NAME $RESULT" > $RESULT_FILE
+}
diff --git a/tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt b/tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt
new file mode 100644
index 000000000..0de5dd10e
--- /dev/null
+++ b/tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt
@@ -0,0 +1,31 @@
+add
+average_pool_2d
+concat
+conv_2d
+depthwise_conv_2d
+div
+floor
+fullyconnected/fc1
+l2_normalization
+l2_pool_2d
+logistic
+max_pool_2d
+mean
+mul
+pad
+reduce_mean
+relu
+relu6
+reshape
+resize_bilinear
+rsqrt
+softmax
+squeeze
+sqrt
+strided_slice
+sub
+tanh
+transpose
+transpose_conv
+MODELS/inception_module
+MODELS/mobilenet
diff --git a/tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt b/tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt
new file mode 100644
index 000000000..51125da2e
--- /dev/null
+++ b/tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt
@@ -0,0 +1,11 @@
+average_pool_2d
+concat
+conv_2d
+depthwise_conv_2d
+fullyconnected/fc1
+max_pool_2d
+softmax
+reshape/reshape1
+add
+MODELS/inception_module
+MODELS/mobilenet
diff --git a/tests/scripts/neurun_frameworktest_list.txt b/tests/scripts/neurun_frameworktest_list.txt
deleted file mode 100644
index 9c4e97dac..000000000
--- a/tests/scripts/neurun_frameworktest_list.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-average_pool_2d
-concat/concat1
-concat/concat2
-conv_2d
-fullyconnected/fc1
-max_pool_2d
-softmax
-reshape/reshape1
-MODELS/inception_module
diff --git a/tests/scripts/neurun_frameworktest_list.x86-64.cpu.txt b/tests/scripts/neurun_frameworktest_list.x86-64.cpu.txt
new file mode 100644
index 000000000..a5ec1eb5b
--- /dev/null
+++ b/tests/scripts/neurun_frameworktest_list.x86-64.cpu.txt
@@ -0,0 +1,10 @@
+average_pool_2d
+concat
+conv_2d
+depthwise_conv_2d
+fullyconnected/fc1
+max_pool_2d
+softmax
+reshape/reshape1
+add
+MODELS/inception_module
diff --git a/tests/scripts/pacl_frameworktest_list.armv7l-linux.txt b/tests/scripts/pacl_frameworktest_list.armv7l-linux.txt
new file mode 100644
index 000000000..ec52c3669
--- /dev/null
+++ b/tests/scripts/pacl_frameworktest_list.armv7l-linux.txt
@@ -0,0 +1,36 @@
+add
+average_pool_2d
+cast
+concat/concat1
+concat/concat2
+conv2d
+depthwise_conv_2d
+div
+embedding_lookup
+exp
+floor
+fullyconnected/fc1
+gather
+hashtable_lookup
+l2_normalization
+l2_pool_2d
+logistic
+max_pool_2d
+mean
+MODELS/inception_module
+MODELS/mobilenet
+mul
+neg
+pad/4D_2D
+relu
+reul6
+reshape
+resize_bilinear
+softmax
+space_to_depth
+squeeze
+strided_slice
+sub
+tanh
+topk_v2
+transpose
diff --git a/tests/scripts/py/test_driver.py b/tests/scripts/py/test_driver.py
index f9c48e11f..39e5315f6 100755
--- a/tests/scripts/py/test_driver.py
+++ b/tests/scripts/py/test_driver.py
@@ -69,6 +69,7 @@ def get_parsed_options():
dest="benchmark_on",
default=False,
help="(default=off) run benchmark")
+ # TODO Remove deprecated --benchmark_acl option
parser.add_argument(
"--benchmark_acl",
action="store_true",
@@ -270,14 +271,6 @@ def run_benchmarkop(options):
os.system(cmd)
-def run_benchmarkacl(options):
- cmd = "{artifactpath}/tests/scripts/run_benchmark_acl.sh \
- --reportdir={reportdir}/benchmark \
- --bindir={artifactpath}/Product/out/bin".format(
- reportdir=options.reportdir, artifactpath=options.artifactpath)
- os.system(cmd)
-
-
def make_json_for_benchmark_result(options):
cmd = "source {artifactpath}/tests/scripts/print_to_json.sh && ".format(
artifactpath=options.artifactpath)
@@ -377,10 +370,6 @@ def main():
if options.benchmark_on:
run_benchmark(options)
- # benchmark_acl
- if options.benchmarkacl_on:
- run_benchmarkacl(options)
-
# benchmark_op
if options.benchmarkop_on:
run_benchmarkop(options)
diff --git a/tests/scripts/run_benchmark.sh b/tests/scripts/run_benchmark.sh
index 6980475b6..e22b40043 100755
--- a/tests/scripts/run_benchmark.sh
+++ b/tests/scripts/run_benchmark.sh
@@ -51,34 +51,6 @@ do
shift
done
-function get_result_of_benchmark_test()
-{
- local RUN_TEST_SH=$1
- local DRIVER_BIN=$2
- local MODEL=$3
- local LOG_FILE=$4
-
- local RET=0
- $RUN_TEST_SH --driverbin=$DRIVER_BIN $MODEL > $LOG_FILE 2>&1
- RET=$?
- if [[ $RET -ne 0 ]]; then
- echo "Testing $MODEL aborted... exit code: $RET"
- exit $RET
- fi
-
- local RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
- echo "$RESULT"
-}
-
-function print_result_of_benchmark_test()
-{
- local NAME=$1
- local RESULT=$2
- local RESULT_FILE=$3
-
- echo "$NAME $RESULT" > $RESULT_FILE
-}
-
function run_benchmark_test()
{
local DRIVER_BIN=$BENCHMARK_DRIVER_BIN
diff --git a/tests/scripts/run_benchmark_acl.sh b/tests/scripts/run_benchmark_acl.sh
deleted file mode 100755
index c6a643baa..000000000
--- a/tests/scripts/run_benchmark_acl.sh
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-MY_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-source $MY_PATH/common.sh
-
-BENCHMARKACL_BIN_DIR=
-BENCHMARKACL_REPORT_DIR=
-BENCHMARKACL_MODELS_FILE=
-BENCHMARKACL_MODEL_LIST="inceptionv3/inception_nonslim inceptionv3/inception_slim"
-
-function Usage()
-{
- # TODO: Fill this
- echo "Usage: LD_LIBRARY_PATH=Product/out/lib ./$0 --reportdir=report"
-}
-
-for i in "$@"
-do
- case $i in
- -h|--help|help)
- Usage
- exit 1
- ;;
- --reportdir=*)
- BENCHMARKACL_REPORT_DIR=${i#*=}
- BENCHMARKACL_MODELS_FILE=$BENCHMARKACL_REPORT_DIR/benchmarkacl_models.txt
- ;;
- --bindir=*)
- BENCHMARKACL_BIN_DIR=${i#*=}
- ;;
- esac
- shift
-done
-
-function run_benchmark_acl()
-{
- local REPORT_DIR=$BENCHMARKACL_REPORT_DIR
- local DRIVER_DIR=$BENCHMARKACL_BIN_DIR
- local LOG_FILE=""
- local RESULT_FILE=""
- local RESULT=""
- local RET=0
-
- export COUNT=5
- echo "============================================"
- local i=0
- for BENCHMARK_ACL_BIN in $(ls $DRIVER_DIR/benchmark_*); do
- local BENCHMARK_ACL_BIN_BASENAME=$(basename $BENCHMARK_ACL_BIN)
- mkdir -p $REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME
- echo "Benchmark/acl test by $BENCHMARK_ACL_BIN_BASENAME"
- echo $BENCHMARK_ACL_BIN_BASENAME >> $BENCHMARKACL_MODELS_FILE
-
- # ACL(NEON)
- LOG_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_neon.txt
- RESULT_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_neon.result
- echo -n "ACL(NEON)...... "
- $BENCHMARK_ACL_BIN 0 > $LOG_FILE 2>&1
- RET=$?
- if [[ $RET -ne 0 ]]; then
- echo "aborted... exit code: $RET"
- exit $RET
- fi
- RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
- echo "$RESULT ms"
- echo "ACL(NEON)" $RESULT > $RESULT_FILE
-
- # ACL(OpenCL)
- LOG_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_opencl.txt
- RESULT_FILE=$REPORT_DIR/$BENCHMARK_ACL_BIN_BASENAME/acl_opencl.result
- echo -n "ACL(OpenCL).... "
- $BENCHMARK_ACL_BIN 1 > $LOG_FILE 2>&1
- RET=$?
- if [[ $RET -ne 0 ]]; then
- echo "aborted... exit code: $RET"
- exit $RET
- fi
- RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
- echo "$RESULT ms"
- echo "ACL(OpenCL)" $RESULT > $RESULT_FILE
-
- if [[ $i -ne $(ls $DRIVER_DIR/benchmark_* | wc -w)-1 ]]; then
- echo ""
- fi
- i=$((i+1))
- done
- echo "============================================"
- unset COUNT
-}
-
-if [ ! -e "$BENCHMARKACL_REPORT_DIR" ]; then
- mkdir -p $BENCHMARKACL_REPORT_DIR
-fi
-
-rm -rf $BENCHMARKACL_MODELS_FILE
-
-echo ""
-run_benchmark_acl
-echo ""
diff --git a/tests/scripts/run_benchmark_op.sh b/tests/scripts/run_benchmark_op.sh
index 6576d1287..a9c7432c1 100755
--- a/tests/scripts/run_benchmark_op.sh
+++ b/tests/scripts/run_benchmark_op.sh
@@ -129,6 +129,11 @@ function run_benchmark_test()
STATUS="enabled"
source $MODEL_TEST_ROOT_PATH/$MODEL/config.sh
+ # Skip 'disabled' tests
+ if [ $(tr '[:upper:]' '[:lower:]' <<< "$STATUS") == "disabled" ]; then
+ continue
+ fi
+
echo "Benchmark test with `basename $DRIVER_BIN` & `echo $MODEL`"
echo $MODEL >> $BENCHMARK_MODELS_FILE
diff --git a/tests/scripts/run_benchmark_tflite_model.in b/tests/scripts/run_benchmark_tflite_model.in
index 1003ecc13..4a0398685 100644
--- a/tests/scripts/run_benchmark_tflite_model.in
+++ b/tests/scripts/run_benchmark_tflite_model.in
@@ -1 +1,2 @@
MODELS/inception_nonslim --input_layer=Mul --input_layer_shape=1,299,299,3 --num_threads=1 --num_runs=1
+MODELS/mobilenet --input_layer=input --input_layer_shape=1,128,128,3 --num_threads=1 --num_runs=1
diff --git a/tests/scripts/run_benchmark_tflite_model.sh b/tests/scripts/run_benchmark_tflite_model.sh
index 50a2a5fb3..3bc185191 100755
--- a/tests/scripts/run_benchmark_tflite_model.sh
+++ b/tests/scripts/run_benchmark_tflite_model.sh
@@ -18,7 +18,7 @@ SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
NNFW_DIR="$(dirname "$(dirname "${SCRIPT_DIR}")")"
REPORT_DIR="$NNFW_DIR/report/tflite_benchmark_model"
MODEL_ROOT="$NNFW_DIR/tests/framework/tests"
-LD_LIBRARY_PATH="$NNFW_DIR/Product/out/lib"
+LD_LIBRARY_PATH="$NNFW_DIR/Product/out/lib/pureacl:$NNFW_DIR/Product/out/lib"
RUN_TEST=$NNFW_DIR/tests/framework/run_test.sh
MODEL_IN=${BASH_SOURCE[0]%.sh}.in
diff --git a/tests/scripts/run_frameworktest.sh b/tests/scripts/run_frameworktest.sh
index 343fd0292..bf36d0c4f 100755
--- a/tests/scripts/run_frameworktest.sh
+++ b/tests/scripts/run_frameworktest.sh
@@ -23,8 +23,17 @@ FWTEST_TEST_NAME=
function Usage()
{
- # TODO: Fill this
- echo "Usage: LD_LIBRARY_PATH=Product/out/lib ./$0 --reportdir=report"
+ echo "Usage Example:"
+ echo "LD_LIBRARY_PATH=Product/out/lib ./$0 \\"
+ echo " --runtestsh=tests/framework/run_test.sh \\ # Test runner script path"
+ echo " --driverbin=Product/out/bin/tflite_run \\ # Test driver path"
+ echo " --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \\"
+ echo " --reportdir=report \\ # Directory for the report files will be saved"
+ echo " --tapname=framework_test.tap \\ # Tap file name"
+ echo " --logname=framework_test.log \\ # Log file name"
+ echo " --testname=Frameworktest # Name of the test just a label of tests"
+
+ exit 1
}
for i in "$@"
@@ -32,7 +41,6 @@ do
case $i in
-h|--help|help)
Usage
- exit 1
;;
--runtestsh=*)
FWTEST_RUN_TEST_SH=${i#*=}
@@ -59,7 +67,12 @@ do
shift
done
-# TODO: handle exceptions for params
+[ ! -z "$FWTEST_RUN_TEST_SH" ] || Usage
+[ ! -z "$FWTEST_DRIVER_BIN" ] || Usage
+[ ! -z "$FWTEST_REPORT_DIR" ] || Usage
+[ ! -z "$FWTEST_TAP_NAME" ] || Usage
+[ ! -z "$FWTEST_LOG_NAME" ] || Usage
+[ ! -z "$FWTEST_TEST_NAME" ] || Usage
if [ ! -e "$FWTEST_REPORT_DIR" ]; then
mkdir -p $FWTEST_REPORT_DIR
diff --git a/tests/scripts/run_unittest.sh b/tests/scripts/run_unittest.sh
index 5eddfb32c..54a01913b 100755
--- a/tests/scripts/run_unittest.sh
+++ b/tests/scripts/run_unittest.sh
@@ -22,7 +22,7 @@ UNITTEST_RUN_ALL=""
function Usage()
{
# TODO: Fill this
- echo "Usage: LD_LIBRARY_PATH=Product/out/lib ./$0 --reportdir=report --unittestdir=Product/out/bin"
+ echo "Usage: LD_LIBRARY_PATH=Product/out/lib ./$0 --reportdir=report --unittestdir=Product/out/unittest"
}
get_gtest_option()
diff --git a/tests/scripts/test_driver.sh b/tests/scripts/test_driver.sh
index 97a3dab27..a5dbf4aaf 100755
--- a/tests/scripts/test_driver.sh
+++ b/tests/scripts/test_driver.sh
@@ -74,7 +74,6 @@ VERIFICATION_ON="false"
BENCHMARK_ON="false"
BENCHMARK_OP_ON="false"
BENCHMARK_TFLITE_MODEL_ON="false"
-BENCHMARK_ACL_ON="false"
ACL_ENV_ON="false"
PROFILE_ON="false"
REPORT_DIR=""
@@ -149,9 +148,9 @@ do
ALLTEST_ON="false"
BENCHMARK_TFLITE_MODEL_ON="true"
;;
+ # TODO Remove --benchmark_acl option after CI update
--benchmark_acl)
ALLTEST_ON="false"
- BENCHMARK_ACL_ON="true"
;;
--acl_envon)
ACL_ENV_ON="true"
@@ -208,7 +207,7 @@ if [ "$ACL_ENV_ON" == "true" ]; then
switch_nnfw_kernel_env "ON" "acl"
fi
-# Run unittest in each part such as Runtime, ACL
+# Run unittest in each part such as Runtime
if [ "$ALLTEST_ON" == "true" ] || [ "$UNITTEST_ON" == "true" ]; then
if [ "$UNITTESTALL_ON" == "true" ]; then
$TEST_DRIVER_DIR/run_unittest.sh \
@@ -285,15 +284,8 @@ if [ "$BENCHMARK_OP_ON" == "true" ]; then
--modelfilepath=$ARTIFACT_PATH/tests/framework
fi
-# Run benchmark/acl/benchmark_googlenet, mobilenet and inception_v3
-if [ "$BENCHMARK_ACL_ON" == "true" ]; then
- $TEST_DRIVER_DIR/run_benchmark_acl.sh \
- --reportdir=$REPORT_DIR/benchmark \
- --bindir=$ARTIFACT_PATH/Product/out/bin
-fi
-
# Make json file. Actually, this process is only needed on CI. That's why it is in test_driver.sh.
-if [ "$BENCHMARK_ON" == "true" ] || [ "$BENCHMARK_ACL_ON" == "true" ] || [ "$BENCHMARK_OP_ON" == "true" ]; then
+if [ "$BENCHMARK_ON" == "true" ] || [ "$BENCHMARK_OP_ON" == "true" ]; then
# functions to fill json with benchmark results
source $ARTIFACT_PATH/tests/scripts/print_to_json.sh
if [ "$BENCHMARK_OP_ON" == "true" ]; then
diff --git a/tests/scripts/test_performance.sh b/tests/scripts/test_performance.sh
new file mode 100644
index 000000000..29f90b9b8
--- /dev/null
+++ b/tests/scripts/test_performance.sh
@@ -0,0 +1,182 @@
+#!/bin/bash
+
+MY_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+source $MY_PATH/common.sh
+
+PRINT_WIDTH=45
+BACKEND_CNT=3
+# Run profiler BACKEND_CNT+1 times: on each run of the first BACKEND_CNT runs it will
+# collect metrics for one unmeasured backend. On the last run metrics for data transfer
+PROFILING_RUN_CNT=$((BACKEND_CNT+1))
+TEST_DRIVER_DIR="$( cd "$( dirname "${BASH_SOURCE}" )" && pwd )"
+ARTIFACT_PATH="$TEST_DRIVER_DIR/../.."
+BENCHMARK_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_benchmark
+REPORT_DIR=$ARTIFACT_PATH/report
+RUN_TEST_SH=$ARTIFACT_PATH/tests/framework/run_test.sh
+BENCHMARK_MODEL_LIST="MODELS/inception_nonslim MODELS/inception_slim MODELS/mobilenet"
+
+if [ ! -e "$RUN_TEST_SH" ]; then
+ echo "Cannot find $RUN_TEST_SH"
+ exit 1
+fi
+
+
+BENCHMARK_REPORT_DIR=$REPORT_DIR/benchmark
+BENCHMARK_MODELS_FILE=$BENCHMARK_REPORT_DIR/benchmark_models.txt
+
+function print_with_dots()
+{
+ local MSG=$1
+ pad=$(printf '%0.1s' "."{1..45})
+ padlength=$((PRINT_WIDTH- ${#MSG}))
+ printf '%s' "$MSG"
+ printf '%*.*s ' 0 $padlength "$pad"
+}
+
+function run_without_sched()
+{
+ local RESULT_SCH_INT=$1
+ local REPORT_MODEL_DIR=$2
+ local MODEL=$3
+ local EXECUTOR=$4
+ local BACKEND=$5
+
+ LOG_FILE=$REPORT_MODEL_DIR/tflite_${EXECUTOR,,}_$BACKEND.txt
+ export OP_BACKEND_ALLOPS=$BACKEND
+ export EXECUTOR=$EXECUTOR
+
+ print_with_dots "$EXECUTOR $BACKEND without scheduler"
+
+ RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $BENCHMARK_DRIVER_BIN $MODEL $LOG_FILE)
+
+ printf -v RESULT_INT '%d' $RESULT 2>/dev/null
+ PERCENTAGE=$((100-RESULT_SCH_INT*100/RESULT_INT))
+ echo "$RESULT ms. Parallel scheduler is $PERCENTAGE% faster"
+}
+
+function run_benchmark_test()
+{
+ local LOG_FILE=
+ local RESULT=
+ local REPORT_MODEL_DIR=
+
+ export COUNT=5
+ echo "============================================"
+ local i=0
+ export USE_NNAPI=1
+ export BACKENDS="acl_cl;acl_neon;cpu"
+ # Remove metrics so that profiler can get metrics for operations
+ # with input&output sizes the same as the model
+ rm "exec_time.json" 2>/dev/null
+ for MODEL in $BENCHMARK_MODEL_LIST; do
+
+ echo "Benchmark test with `basename $BENCHMARK_DRIVER_BIN` & `echo $MODEL`"
+ echo $MODEL >> $BENCHMARK_MODELS_FILE
+
+ REPORT_MODEL_DIR=$BENCHMARK_REPORT_DIR/scheduler_benchmark/$MODEL
+ mkdir -p $REPORT_MODEL_DIR
+
+##################################################################################
+ # Get metrics by running profiler
+##################################################################################
+ export USE_SCHEDULER=1
+ export PROFILING_MODE=1
+ export EXECUTOR="Dataflow"
+ export NEURUN_LOG_ENABLE=1
+ for ((j = 1 ; j <= $PROFILING_RUN_CNT ; j++)); do
+ # Save the verbose log of each run
+ LOG_FILE=$REPORT_MODEL_DIR/tflite_profiling_$j.txt
+
+ print_with_dots "Profiling run #$j out of $PROFILING_RUN_CNT"
+
+ $RUN_TEST_SH --driverbin=$BENCHMARK_DRIVER_BIN $MODEL > $LOG_FILE 2>&1
+ RET=$?
+ if [[ $RET -ne 0 ]]; then
+ echo "Profiling $MODEL aborted in run#$j... exit code: $RET"xX
+ exit $RET
+ fi
+ echo "finished"
+ # Save the exec_time.json of each run
+ cp "exec_time.json" $REPORT_MODEL_DIR/"exec_time_$j.json"
+ done
+ unset NEURUN_LOG_ENABLE
+
+
+##################################################################################
+ # Turn off profiling
+##################################################################################
+ export PROFILING_MODE=0
+
+##################################################################################
+ # Run ParallelExecutor with scheduler
+##################################################################################
+ LOG_FILE=$REPORT_MODEL_DIR/tflite_parallel_with_scheduler.txt
+ export EXECUTOR="Parallel"
+ export GRAPH_DOT_DUMP=1
+ print_with_dots "Parallel with scheduler"
+
+ RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $BENCHMARK_DRIVER_BIN $MODEL $LOG_FILE)
+ echo "$RESULT ms"
+
+ printf -v RESULT_SCH_INT '%d' $RESULT 2>/dev/null
+
+ mv "after_lower.dot" $REPORT_MODEL_DIR/"after_lower_parallel.dot"
+
+##################################################################################
+ # Run Linear executor with scheduler
+##################################################################################
+ LOG_FILE=$REPORT_MODEL_DIR/tflite_linear_with_scheduler.txt
+ export EXECUTOR="Linear"
+ export GRAPH_DOT_DUMP=1
+ print_with_dots "Linear with scheduler"
+
+ RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $BENCHMARK_DRIVER_BIN $MODEL $LOG_FILE)
+
+ printf -v RESULT_INT '%d' $RESULT 2>/dev/null
+ PERCENTAGE=$((100-RESULT_SCH_INT*100/RESULT_INT))
+ echo "$RESULT ms. Parallel scheduler is $PERCENTAGE% faster"
+
+ # Remove metrics so that for next model in profiler can get metrics
+ # for operations with input&output sizes the same as the model
+ mv "exec_time.json" $REPORT_MODEL_DIR
+ # Save the dot graph
+ mv "after_lower.dot" $REPORT_MODEL_DIR/"after_lower_linear.dot"
+ unset GRAPH_DOT_DUMP
+
+##################################################################################
+ # Turn off scheduler
+##################################################################################
+ export USE_SCHEDULER=0
+
+ # Run LinearExecutor on acl_cl without scheduler
+ run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Linear" "acl_cl"
+
+ # Run LinearExecutor on acl_neon without scheduler
+ run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Linear" "acl_neon"
+
+ # Run ParallelExecutor on acl_cl without scheduler
+ run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Parallel" "acl_cl"
+
+ # Run ParallelExecutor on acl_neon without scheduler
+ run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Parallel" "acl_neon"
+
+ if [[ $i -ne $(echo $BENCHMARK_MODEL_LIST | wc -w)-1 ]]; then
+ echo ""
+ fi
+ i=$((i+1))
+
+ unset USE_SCHEDULER
+ unset PROFILING_MODE
+ unset EXECUTOR
+ unset OP_BACKEND_ALLOPS
+ done
+ unset BACKENDS
+ echo "============================================"
+ unset COUNT
+ unset USE_NNAPI
+
+}
+
+echo ""
+run_benchmark_test
+echo ""
diff --git a/tests/tools/CMakeLists.txt b/tests/tools/CMakeLists.txt
index b1eea12f9..5ea6cdadd 100644
--- a/tests/tools/CMakeLists.txt
+++ b/tests/tools/CMakeLists.txt
@@ -1,8 +1 @@
-set(EXCLUDE_DIR "")
-
-if(OBS_BUILD)
- list(APPEND EXCLUDE_DIR tflite_benchmark_model)
- list(APPEND EXCLUDE_DIR tflite_run)
-endif(OBS_BUILD)
-
-add_subdirectories(EXCLUDES ${EXCLUDE_DIR})
+add_subdirectories()
diff --git a/tests/tools/nnapi_test/src/nnapi_test.cc b/tests/tools/nnapi_test/src/nnapi_test.cc
index 73e80f01f..799188f66 100644
--- a/tests/tools/nnapi_test/src/nnapi_test.cc
+++ b/tests/tools/nnapi_test/src/nnapi_test.cc
@@ -15,7 +15,7 @@
*/
#include "tflite/ext/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
+#include "tensorflow/lite/model.h"
#include "tflite/interp/FlatBufferBuilder.h"
#include "tflite/Diff.h"
diff --git a/tests/tools/nnpackage_run/CMakeLists.txt b/tests/tools/nnpackage_run/CMakeLists.txt
new file mode 100644
index 000000000..5fd7136cc
--- /dev/null
+++ b/tests/tools/nnpackage_run/CMakeLists.txt
@@ -0,0 +1,32 @@
+if(NOT BUILD_NNPACKAGE_RUN)
+ return()
+endif(NOT BUILD_NNPACKAGE_RUN)
+
+if(NOT BUILD_NEURUN)
+ return()
+endif(NOT BUILD_NEURUN)
+
+nnfw_find_package(HDF5 QUIET)
+if(NOT HDF5_FOUND)
+ message(WARNING "HDF5 NOT found. Install libhdf5-dev to build nnpackage_run.")
+ return()
+endif(NOT HDF5_FOUND)
+
+list(APPEND NNPACKAGE_RUN_SRCS "src/nnpackage_run.cc")
+list(APPEND NNPACKAGE_RUN_SRCS "src/args.cc")
+list(APPEND NNPACKAGE_RUN_SRCS "src/tensor_dumper.cc")
+
+nnfw_find_package(Boost REQUIRED)
+
+add_executable(nnpackage_run ${NNPACKAGE_RUN_SRCS})
+target_include_directories(nnpackage_run PRIVATE src)
+target_include_directories(nnpackage_run PRIVATE ${Boost_INCLUDE_DIRS})
+target_include_directories(nnpackage_run PRIVATE ${HDF5_INCLUDE_DIRS})
+
+target_link_libraries(nnpackage_run neurun_core neurun tflite_loader)
+target_link_libraries(nnpackage_run tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_tflite)
+target_link_libraries(nnpackage_run nnfw-dev)
+target_link_libraries(nnpackage_run boost_program_options boost_system boost_filesystem)
+target_link_libraries(nnpackage_run ${HDF5_LIBRARIES})
+
+install(TARGETS nnpackage_run DESTINATION bin)
diff --git a/tests/tools/nnpackage_run/README.md b/tests/tools/nnpackage_run/README.md
new file mode 100644
index 000000000..c7167b2d4
--- /dev/null
+++ b/tests/tools/nnpackage_run/README.md
@@ -0,0 +1,22 @@
+# nnpackage_run
+
+`nnpackage_run` is a tool to run `nnpackage`.
+
+It takes `nnpackage` as input. It uses **runtime API** internally.
+
+## Usage
+
+### Simple run
+
+This will run with random input data
+
+```
+$ ./nnpackage_run path_to_nnpackage_directory
+```
+
+Output would look like:
+
+```
+nnfw_prepare takes 425.235 sec
+nnfw_run takes 2.525 sec
+```
diff --git a/tests/tools/nnpackage_run/src/args.cc b/tests/tools/nnpackage_run/src/args.cc
new file mode 100644
index 000000000..a7593fabb
--- /dev/null
+++ b/tests/tools/nnpackage_run/src/args.cc
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "args.h"
+
+#include <iostream>
+#include <boost/filesystem.hpp>
+
+namespace NNPackageRun
+{
+
+Args::Args(const int argc, char **argv) noexcept
+{
+ Initialize();
+ Parse(argc, argv);
+}
+
+void Args::Initialize(void)
+{
+ // General options
+ po::options_description general("General options");
+
+ // clang-format off
+ general.add_options()
+ ("help,h", "Display available options")
+ ("nnpackage", po::value<std::string>()->required())
+ ("dump,d", po::value<std::string>()->default_value(""), "Output filename")
+ ("load,l", po::value<std::string>()->default_value(""), "Input filename");
+ // clang-format on
+
+ _options.add(general);
+ _positional.add("nnpackage", 1);
+}
+
+void Args::Parse(const int argc, char **argv)
+{
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(_options).positional(_positional).run(),
+ vm);
+
+ {
+ auto conflicting_options = [&](const std::string &o1, const std::string &o2) {
+ if ((vm.count(o1) && !vm[o1].defaulted()) && (vm.count(o2) && !vm[o2].defaulted()))
+ {
+ throw boost::program_options::error(std::string("Two options '") + o1 + "' and '" + o2 +
+ "' cannot be given at once.");
+ }
+ };
+ }
+
+ if (vm.count("help"))
+ {
+ std::cout << "nnpackage_run\n\n";
+ std::cout << "Usage: " << argv[0] << " path to nnpackage root directory [<options>]\n\n";
+ std::cout << _options;
+ std::cout << "\n";
+
+ exit(0);
+ }
+
+ po::notify(vm);
+
+ if (vm.count("dump"))
+ {
+ _dump_filename = vm["dump"].as<std::string>();
+ }
+
+ if (vm.count("load"))
+ {
+ _load_filename = vm["load"].as<std::string>();
+ }
+
+ if (vm.count("nnpackage"))
+ {
+ _package_filename = vm["nnpackage"].as<std::string>();
+
+ if (_package_filename.empty())
+ {
+ // TODO Print usage instead of the below message
+ std::cerr << "Please specify nnpackage file. Run with `--help` for usage."
+ << "\n";
+
+ exit(1);
+ }
+ else
+ {
+ if (!boost::filesystem::exists(_package_filename))
+ {
+ std::cerr << "nnpackage not found: " << _package_filename << "\n";
+ }
+ }
+ }
+}
+
+} // end of namespace NNPackageRun
diff --git a/tests/tools/nnpackage_run/src/args.h b/tests/tools/nnpackage_run/src/args.h
new file mode 100644
index 000000000..d064d77e7
--- /dev/null
+++ b/tests/tools/nnpackage_run/src/args.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNPACKAGE_RUN_ARGS_H__
+#define __NNPACKAGE_RUN_ARGS_H__
+
+#include <string>
+#include <boost/program_options.hpp>
+
+namespace po = boost::program_options;
+
+namespace NNPackageRun
+{
+
+class Args
+{
+public:
+ Args(const int argc, char **argv) noexcept;
+ void print(void);
+
+ const std::string &getPackageFilename(void) const { return _package_filename; }
+ const std::string &getDumpFilename(void) const { return _dump_filename; }
+ const std::string &getLoadFilename(void) const { return _load_filename; }
+
+private:
+ void Initialize();
+ void Parse(const int argc, char **argv);
+
+private:
+ po::positional_options_description _positional;
+ po::options_description _options;
+
+ std::string _package_filename;
+ std::string _dump_filename;
+ std::string _load_filename;
+};
+
+} // end of namespace NNPackageRun
+
+#endif // __NNPACKAGE_RUN_ARGS_H__
diff --git a/tests/tools/nnpackage_run/src/nnpackage_run.cc b/tests/tools/nnpackage_run/src/nnpackage_run.cc
new file mode 100644
index 000000000..97edebf4c
--- /dev/null
+++ b/tests/tools/nnpackage_run/src/nnpackage_run.cc
@@ -0,0 +1,259 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "args.h"
+#include "tflite/Diff.h"
+#include "tensor_dumper.h"
+#include "hdf5.h"
+#include "nnfw.h"
+
+#include <assert.h>
+#include <iostream>
+
+#include <chrono>
+
+#define NNPR_ENSURE_STATUS(a) \
+ do \
+ { \
+ if ((a) != NNFW_STATUS_NO_ERROR) \
+ { \
+ exit(-1); \
+ } \
+ } while (0)
+
+uint64_t NowMicros()
+{
+ auto time_point = std::chrono::high_resolution_clock::now();
+ auto since_epoch = time_point.time_since_epoch();
+ // default precision of high resolution clock is 10e-9 (nanoseconds)
+ return std::chrono::duration_cast<std::chrono::microseconds>(since_epoch).count();
+}
+
+uint64_t num_elems(const nnfw_tensorinfo *ti)
+{
+ uint64_t n = 1;
+ for (uint32_t i = 0; i < ti->rank; ++i)
+ {
+ assert(ti->dims[i] >= 0);
+ n *= ti->dims[i];
+ }
+ return n;
+};
+
+std::vector<float> randomData(RandomGenerator &randgen, uint64_t size)
+{
+ std::vector<float> vec(size);
+ for (uint64_t i = 0; i < size; i++)
+ vec[i] = randgen.generate<float>();
+ return vec;
+}
+
+static const char *h5_value_grpname = "value";
+
+int main(const int argc, char **argv)
+{
+ NNPackageRun::Args args(argc, argv);
+ auto nnpackage_path = args.getPackageFilename();
+
+ nnfw_session *session = nullptr;
+ NNPR_ENSURE_STATUS(nnfw_create_session(&session));
+ NNPR_ENSURE_STATUS(nnfw_load_model_from_file(session, nnpackage_path.c_str()));
+
+ uint32_t num_inputs;
+ NNPR_ENSURE_STATUS(nnfw_input_size(session, &num_inputs));
+
+ // verify input and output
+
+ if (num_inputs == 0)
+ {
+ std::cerr << "[ ERROR ] "
+ << "No inputs in model => execution is not possible" << std::endl;
+ exit(1);
+ }
+
+ auto verifyInputTypes = [session]() {
+ uint32_t sz;
+ NNPR_ENSURE_STATUS(nnfw_input_size(session, &sz));
+ for (uint32_t i = 0; i < sz; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_input_tensorinfo(session, i, &ti));
+ if (ti.dtype != NNFW_TYPE_TENSOR_FLOAT32)
+ {
+ std::cerr << "Only float 32bit is supported." << std::endl;
+ exit(-1);
+ }
+ }
+ };
+
+ auto verifyOutputTypes = [session]() {
+ uint32_t sz;
+ NNPR_ENSURE_STATUS(nnfw_output_size(session, &sz));
+
+ for (uint32_t i = 0; i < sz; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_output_tensorinfo(session, i, &ti));
+ if (ti.dtype != NNFW_TYPE_TENSOR_FLOAT32)
+ {
+ std::cerr << "Only float 32bit is supported." << std::endl;
+ exit(-1);
+ }
+ }
+ };
+
+ verifyInputTypes();
+ verifyOutputTypes();
+
+ // prepare execution
+
+ uint64_t prepare_ms = NowMicros();
+ NNPR_ENSURE_STATUS(nnfw_prepare(session));
+ prepare_ms = NowMicros() - prepare_ms;
+
+ // prepare input
+
+ std::vector<std::vector<float>> inputs(num_inputs);
+
+ auto loadInputs = [session, num_inputs, &inputs](std::string filename) {
+ hid_t file_id = H5Fopen(filename.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
+ if (file_id < 0)
+ {
+ std::cerr << "error during opening file " << filename << "." << std::endl;
+ exit(-1);
+ }
+ hid_t valgrp_id = H5Gopen(file_id, h5_value_grpname, H5P_DEFAULT);
+ if (valgrp_id < 0)
+ {
+ std::cerr << "error during opening group " << h5_value_grpname << "." << std::endl;
+ H5Fclose(file_id);
+ exit(-1);
+ }
+ for (uint32_t i = 0; i < num_inputs; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_input_tensorinfo(session, i, &ti));
+
+ hid_t dset_id = H5Dopen(valgrp_id, std::to_string(i).c_str(), H5P_DEFAULT);
+ if (dset_id < 0)
+ {
+ std::cerr << "error during opening dataset " << std::to_string(i) << "." << std::endl;
+ H5Gclose(valgrp_id);
+ H5Fclose(file_id);
+ exit(-1);
+ }
+
+ // check type
+ hid_t type = H5Dget_type(dset_id);
+ if (!H5Tequal(type, H5T_IEEE_F32BE))
+ {
+ std::cerr << "h5 input has non-float32 type. nnpkg_run supports float32 only." << std::endl;
+ H5Dclose(dset_id);
+ H5Gclose(valgrp_id);
+ H5Fclose(file_id);
+ exit(-1);
+ }
+ // allocate memory for data
+ auto sz = num_elems(&ti);
+ inputs[i].resize(sz);
+ // read data
+ H5Dread(dset_id, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, inputs[i].data());
+
+ NNPR_ENSURE_STATUS(nnfw_set_input(session, i, NNFW_TYPE_TENSOR_FLOAT32, inputs[i].data(),
+ sizeof(float) * num_elems(&ti)));
+ // clean up
+ H5Dclose(dset_id);
+ }
+ H5Gclose(valgrp_id);
+ H5Fclose(file_id);
+ };
+
+ auto generateInputs = [session, num_inputs, &inputs]() {
+ // generate random data
+ const int seed = 1;
+ RandomGenerator randgen{seed, 0.0f, 2.0f};
+ for (uint32_t i = 0; i < num_inputs; ++i)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_input_tensorinfo(session, i, &ti));
+ auto input_num_elements = num_elems(&ti);
+ inputs[i] = randomData(randgen, input_num_elements);
+ NNPR_ENSURE_STATUS(nnfw_set_input(session, i, NNFW_TYPE_TENSOR_FLOAT32, inputs[i].data(),
+ sizeof(float) * input_num_elements));
+ }
+ };
+
+ if (!args.getLoadFilename().empty())
+ loadInputs(args.getLoadFilename());
+ else
+ generateInputs();
+
+ // prepare output
+
+ uint32_t num_outputs = 0;
+ NNPR_ENSURE_STATUS(nnfw_output_size(session, &num_outputs));
+ std::vector<std::vector<float>> outputs(num_outputs);
+
+ for (uint32_t i = 0; i < num_outputs; i++)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_output_tensorinfo(session, i, &ti));
+ auto output_num_elements = num_elems(&ti);
+ outputs[i].resize(output_num_elements);
+ NNPR_ENSURE_STATUS(nnfw_set_output(session, i, NNFW_TYPE_TENSOR_FLOAT32, outputs[i].data(),
+ sizeof(float) * output_num_elements));
+ }
+
+ uint64_t run_ms = NowMicros();
+ NNPR_ENSURE_STATUS(nnfw_run(session));
+ run_ms = NowMicros() - run_ms;
+
+ // dump output tensors
+
+ auto dumpOutputs = [session, num_outputs, &outputs](std::string filename) {
+ hid_t file_id = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ for (uint32_t i = 0; i < num_outputs; i++)
+ {
+ nnfw_tensorinfo ti;
+ NNPR_ENSURE_STATUS(nnfw_output_tensorinfo(session, i, &ti));
+ std::vector<hsize_t> dims;
+ dims.resize(ti.rank);
+ for (uint32_t j = 0; j < ti.rank; ++j)
+ {
+ assert(ti.dims[j] >= 0);
+ dims[j] = ti.dims[j];
+ }
+ hid_t valgrp_id = H5Gcreate(file_id, h5_value_grpname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ hid_t dsp_id = H5Screate_simple(ti.rank, dims.data(), NULL);
+ hid_t dset_id = H5Dcreate2(valgrp_id, std::to_string(i).c_str(), H5T_IEEE_F32BE, dsp_id,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ H5Dwrite(dset_id, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, outputs[i].data());
+ H5Dclose(dset_id);
+ H5Sclose(dsp_id);
+ }
+ H5Fclose(file_id);
+ };
+
+ if (!args.getDumpFilename().empty())
+ dumpOutputs(args.getDumpFilename());
+
+ std::cout << "nnfw_prepare takes " << prepare_ms / 1e3 << " sec" << std::endl;
+ std::cout << "nnfw_run takes " << run_ms / 1e3 << " sec" << std::endl;
+
+ NNPR_ENSURE_STATUS(nnfw_close_session(session));
+
+ return 0;
+}
diff --git a/tests/tools/nnpackage_run/src/tensor_dumper.cc b/tests/tools/nnpackage_run/src/tensor_dumper.cc
new file mode 100644
index 000000000..ae4ed9518
--- /dev/null
+++ b/tests/tools/nnpackage_run/src/tensor_dumper.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tensor_dumper.h"
+
+#include <fstream>
+#include <iostream>
+#include <cstring>
+
+namespace NNPackageRun
+{
+TensorDumper::TensorDumper(const std::string &filename)
+{
+ // TODO Handle file open/write error
+ file_.open(filename, std::ios::out | std::ios::binary);
+ dumpInt32(version);
+}
+
+TensorDumper::~TensorDumper() { file_.close(); }
+
+void TensorDumper::dumpInt32(int32_t i)
+{
+ file_.write(reinterpret_cast<const char *>(&i), sizeof(i));
+}
+
+void TensorDumper::dumpSizeT(size_t i)
+{
+ file_.write(reinterpret_cast<const char *>(&i), sizeof(i));
+}
+
+void TensorDumper::dumpTensor(const nnfw_tensorinfo ti, void *buffer, size_t bytes)
+{
+ dumpInt32(ti.dtype);
+ dumpInt32(ti.rank);
+ for (uint i = 0; i < ti.rank; ++i)
+ dumpInt32(ti.dims[i]);
+ dumpSizeT(bytes);
+ file_.write(static_cast<char *>(buffer), bytes);
+}
+
+} // end of namespace NNPackageRun \ No newline at end of file
diff --git a/tests/tools/nnpackage_run/src/tensor_dumper.h b/tests/tools/nnpackage_run/src/tensor_dumper.h
new file mode 100644
index 000000000..12cc22f18
--- /dev/null
+++ b/tests/tools/nnpackage_run/src/tensor_dumper.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __NNPACKAGE_RUN_TENSOR_DUMPER_H__
+#define __NNPACKAGE_RUN_TENSOR_DUMPER_H__
+
+#include <memory>
+#include <string>
+#include <vector>
+#include <stddef.h>
+#include <fstream>
+
+#include "nnfw.h"
+
+namespace NNPackageRun
+{
+
+class TensorDumper
+{
+public:
+ TensorDumper(const std::string &filename);
+ void dumpTensor(const nnfw_tensorinfo ti, void *buffer, size_t bytes);
+ void dumpInt32(int32_t i);
+ void dumpSizeT(size_t i);
+ ~TensorDumper();
+
+private:
+ static constexpr int version = 1;
+ std::ofstream file_;
+};
+
+} // end of namespace NNPackageRun
+
+#endif // __NNPACKAGE_RUN_TENSOR_DUMPER_H__
diff --git a/tests/tools/tflite_benchmark/CMakeLists.txt b/tests/tools/tflite_benchmark/CMakeLists.txt
index 56421a294..634d451bc 100644
--- a/tests/tools/tflite_benchmark/CMakeLists.txt
+++ b/tests/tools/tflite_benchmark/CMakeLists.txt
@@ -1,5 +1,10 @@
list(APPEND SOURCES "src/tflite_benchmark.cc")
+nnfw_find_package(Boost REQUIRED)
+
add_executable(tflite_benchmark ${SOURCES})
target_link_libraries(tflite_benchmark nnfw_lib_tflite tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_misc)
+
+target_include_directories(tflite_benchmark PRIVATE ${Boost_INCLUDE_DIRS})
+
install(TARGETS tflite_benchmark DESTINATION bin)
diff --git a/tests/tools/tflite_benchmark/src/tflite_benchmark.cc b/tests/tools/tflite_benchmark/src/tflite_benchmark.cc
index b77afc189..1fde0c449 100644
--- a/tests/tools/tflite_benchmark/src/tflite_benchmark.cc
+++ b/tests/tools/tflite_benchmark/src/tflite_benchmark.cc
@@ -15,7 +15,7 @@
*/
#include "tflite/ext/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
+#include "tensorflow/lite/model.h"
#include "tflite/Assert.h"
#include "tflite/Session.h"
@@ -30,9 +30,11 @@
#include <boost/accumulators/statistics/max.hpp>
#include <boost/accumulators/statistics/mean.hpp>
+#include <chrono>
#include <iostream>
+#include <thread>
-#include "misc/environment.h"
+#include "misc/EnvVar.h"
#include "misc/benchmark.h"
using namespace tflite;
@@ -50,14 +52,41 @@ void help(std::ostream &out, const int argc, char **argv)
bool checkParams(const int argc, char **argv)
{
- if (argc < 2)
+ try
+ {
+ if (argc < 2)
+ {
+ help(std::cerr, argc, argv);
+ return false;
+ }
+ }
+ catch (const std::exception &e)
{
- help(std::cerr, argc, argv);
+ std::cerr << e.what() << std::endl;
+
return false;
}
+
return true;
}
+// Verifies whether the model is a flatbuffer file.
+class BMFlatBufferVerifier : public tflite::TfLiteVerifier
+{
+public:
+ bool Verify(const char *data, int length, tflite::ErrorReporter *reporter) override
+ {
+
+ flatbuffers::Verifier verifier(reinterpret_cast<const uint8_t *>(data), length);
+ if (!tflite::VerifyModelBuffer(verifier))
+ {
+ reporter->Report("The model is not a valid Flatbuffer file");
+ return false;
+ }
+ return true;
+ }
+};
+
int main(const int argc, char **argv)
{
@@ -68,8 +97,9 @@ int main(const int argc, char **argv)
const auto filename = argv[1];
- const bool use_nnapi = nnfw::misc::get_env_bool("USE_NNAPI");
- const auto thread_count = nnfw::misc::get_env_int("THREAD", -1);
+ const bool use_nnapi = nnfw::misc::EnvVar("USE_NNAPI").asBool(false);
+ const auto thread_count = nnfw::misc::EnvVar("THREAD").asInt(-1);
+ const auto pause = nnfw::misc::EnvVar("PAUSE").asInt(0);
std::cout << "Num threads: " << thread_count << std::endl;
if (use_nnapi)
@@ -77,9 +107,17 @@ int main(const int argc, char **argv)
std::cout << "Use NNAPI" << std::endl;
}
+ assert(pause >= 0);
+ if (pause > 0)
+ {
+ std::cout << "Insert " << pause << "s pause between iterations" << std::endl;
+ }
+
StderrReporter error_reporter;
- auto model = FlatBufferModel::BuildFromFile(filename, &error_reporter);
+ std::unique_ptr<tflite::TfLiteVerifier> verifier{new BMFlatBufferVerifier};
+
+ auto model = FlatBufferModel::VerifyAndBuildFromFile(filename, verifier.get(), &error_reporter);
if (model == nullptr)
{
std::cerr << "Cannot create model" << std::endl;
@@ -206,7 +244,7 @@ int main(const int argc, char **argv)
//
// Measure
//
- const auto cnt = nnfw::misc::get_env_int("COUNT", 1);
+ const auto cnt = nnfw::misc::EnvVar("COUNT").asInt(1);
using namespace boost::accumulators;
@@ -228,6 +266,12 @@ int main(const int argc, char **argv)
acc(elapsed.count());
std::cout << "Iteration " << n << ": " << elapsed.count() << "ms" << std::endl;
+
+ // Insert "pause"
+ if ((n != cnt - 1) && (pause > 0))
+ {
+ std::this_thread::sleep_for(std::chrono::seconds(pause));
+ }
}
std::cout << "--------" << std::endl;
diff --git a/tests/tools/tflite_benchmark_model/CMakeLists.txt b/tests/tools/tflite_benchmark_model/CMakeLists.txt
index c48f658c1..ea4986a8c 100644
--- a/tests/tools/tflite_benchmark_model/CMakeLists.txt
+++ b/tests/tools/tflite_benchmark_model/CMakeLists.txt
@@ -2,10 +2,16 @@ if (NOT BUILD_TFLITE_BENCHMARK_MODEL)
return()
endif(NOT BUILD_TFLITE_BENCHMARK_MODEL)
+nnfw_find_package(TensorFlowLite REQUIRED)
+
+# TODO Remove this target_compile_definitions command, and just check its presence.
+# This change is prerequisites on pre-built tensorflow-lite package support
+target_compile_definitions(tensorflow-lite PUBLIC "TFLITE_PROFILING_ENABLED")
+
file(GLOB_RECURSE SOURCES "*.cc")
nnfw_find_package(TensorFlowSource REQUIRED)
-set(TENSORFLOW_LITE_BASE "${TensorFlowSource_DIR}/tensorflow/contrib/lite")
+set(TENSORFLOW_LITE_BASE "${TensorFlowSource_DIR}/tensorflow/lite")
list(APPEND SOURCES "${TENSORFLOW_LITE_BASE}/tools/benchmark/benchmark_main.cc"
"${TENSORFLOW_LITE_BASE}/tools/benchmark/benchmark_model.cc"
"${TENSORFLOW_LITE_BASE}/tools/benchmark/benchmark_params.cc"
diff --git a/tests/tools/tflite_benchmark_model/README.md b/tests/tools/tflite_benchmark_model/README.md
index 8d997639f..a71a2fa1c 100644
--- a/tests/tools/tflite_benchmark_model/README.md
+++ b/tests/tools/tflite_benchmark_model/README.md
@@ -9,7 +9,7 @@ of runs. Aggregrate latency statistics are reported after running the benchmark.
The instructions below are for running the binary on Desktop and Android,
for iOS please use the
-[iOS benchmark app](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/lite/tools/benchmark/ios).
+[iOS benchmark app](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/tools/benchmark/ios).
## Parameters
@@ -45,14 +45,14 @@ and the following optional parameters:
bazel build -c opt \
--config=android_arm \
--cxxopt='--std=c++11' \
- tensorflow/contrib/lite/tools/benchmark:benchmark_model
+ tensorflow/lite/tools/benchmark:benchmark_model
```
(2) Connect your phone. Push the binary to your phone with adb push
(make the directory if required):
```
-adb push bazel-bin/tensorflow/contrib/lite/tools/benchmark/benchmark_model /data/local/tmp
+adb push bazel-bin/tensorflow/lite/tools/benchmark/benchmark_model /data/local/tmp
```
(3) Make the binary executable.
@@ -79,14 +79,14 @@ adb shell /data/local/tmp/benchmark_model \
(1) build the binary
```
-bazel build -c opt tensorflow/contrib/lite/tools/benchmark:benchmark_model
+bazel build -c opt tensorflow/lite/tools/benchmark:benchmark_model
```
(2) Run on your compute graph, similar to the Android case but without the need of adb shell.
For example:
```
-bazel-bin/tensorflow/contrib/lite/tools/benchmark/benchmark_model \
+bazel-bin/tensorflow/lite/tools/benchmark/benchmark_model \
--graph=mobilenet_quant_v1_224.tflite \
--num_threads=4
```
@@ -126,7 +126,7 @@ bazel build -c opt \
--config=android_arm \
--cxxopt='--std=c++11' \
--copt=-DTFLITE_PROFILING_ENABLED \
- tensorflow/contrib/lite/tools/benchmark:benchmark_model
+ tensorflow/lite/tools/benchmark:benchmark_model
```
This compiles TFLite with profiling enabled, now you can run the benchmark binary like before. The binary will produce detailed statistics for each operation similar to those shown below:
diff --git a/tests/tools/tflite_benchmark_model/benchmark_tflite_model.cc b/tests/tools/tflite_benchmark_model/benchmark_tflite_model.cc
index efc8bae52..f6dda2628 100644
--- a/tests/tools/tflite_benchmark_model/benchmark_tflite_model.cc
+++ b/tests/tools/tflite_benchmark_model/benchmark_tflite_model.cc
@@ -29,7 +29,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-#include "tensorflow/contrib/lite/tools/benchmark/benchmark_tflite_model.h"
+#include "tensorflow/lite/tools/benchmark/benchmark_tflite_model.h"
#include <cstdarg>
#include <cstdlib>
@@ -40,13 +40,13 @@ limitations under the License.
#include <vector>
#ifdef TFLITE_FLEX
-#include "tensorflow/contrib/lite/delegates/flex/delegate.h"
+#include "tensorflow/lite/delegates/flex/delegate.h"
#endif // TFLITE_FLEX
#include "tflite/ext/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/op_resolver.h"
-#include "tensorflow/contrib/lite/string_util.h"
-#include "tensorflow/contrib/lite/tools/benchmark/logging.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/op_resolver.h"
+#include "tensorflow/lite/string_util.h"
+#include "tensorflow/lite/tools/benchmark/logging.h"
// For profiling nnapi_delegate
#include "profiling/profiling.h"
@@ -321,7 +321,7 @@ void BenchmarkTfLiteModel::Init() {
::profiling::Context::get().setProfiler(interpreter->GetProfiler());
auto enable_sync = std::getenv("PROFILING_OP_SYNC");
- if (enable_sync && std::atoi(enable_sync) != 0)
+ if (enable_sync && std::strtol(enable_sync, NULL, 0) != 0)
{
::profiling::Context::get().setSync();
}
diff --git a/tests/tools/tflite_benchmark_model/profile_summarizer.cc b/tests/tools/tflite_benchmark_model/profile_summarizer.cc
index ce19b0c98..b547c7095 100644
--- a/tests/tools/tflite_benchmark_model/profile_summarizer.cc
+++ b/tests/tools/tflite_benchmark_model/profile_summarizer.cc
@@ -29,11 +29,11 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-#include "tensorflow/contrib/lite/profiling/profile_summarizer.h"
+#include "tensorflow/lite/profiling/profile_summarizer.h"
#include <sstream>
-#include "tensorflow/contrib/lite/schema/schema_generated.h"
+#include "tensorflow/lite/schema/schema_generated.h"
namespace tflite {
namespace profiling {
diff --git a/tests/tools/tflite_loader/CMakeLists.txt b/tests/tools/tflite_loader/CMakeLists.txt
new file mode 100644
index 000000000..2705537d8
--- /dev/null
+++ b/tests/tools/tflite_loader/CMakeLists.txt
@@ -0,0 +1,23 @@
+if(NOT BUILD_TFLITE_LOADER_TEST_TOOL)
+ message("skipping tflite loader tool build")
+ return()
+endif(NOT BUILD_TFLITE_LOADER_TEST_TOOL)
+
+if(NOT BUILD_NEURUN)
+ message("skipping tflite loader tool build: neurun is not built")
+ return()
+endif(NOT BUILD_NEURUN)
+
+list(APPEND SOURCES "src/tflite_loader.cc")
+list(APPEND SOURCES "src/args.cc")
+
+nnfw_find_package(Boost REQUIRED)
+
+add_executable(tflite_loader_test_tool ${SOURCES})
+target_include_directories(tflite_loader_test_tool PRIVATE ${Boost_INCLUDE_DIRS})
+
+target_link_libraries(tflite_loader_test_tool neurun_core neurun tflite_loader)
+target_link_libraries(tflite_loader_test_tool nnfw_lib_tflite tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_misc)
+target_link_libraries(tflite_loader_test_tool boost_program_options boost_system boost_filesystem)
+
+install(TARGETS tflite_loader_test_tool DESTINATION bin)
diff --git a/tests/tools/tflite_loader/src/args.cc b/tests/tools/tflite_loader/src/args.cc
new file mode 100644
index 000000000..3fe1d0bf4
--- /dev/null
+++ b/tests/tools/tflite_loader/src/args.cc
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "args.h"
+
+#include <iostream>
+
+#include <boost/filesystem.hpp>
+
+namespace TFLiteRun
+{
+
+Args::Args(const int argc, char **argv) noexcept
+{
+ Initialize();
+ Parse(argc, argv);
+}
+
+void Args::Initialize(void)
+{
+ // General options
+ po::options_description general("General options");
+
+ // clang-format off
+ general.add_options()
+ ("help,h", "Display available options")
+ ("tflite", po::value<std::string>()->default_value("")->required(), "Input tflite model file for serialization")
+ ("data,d", po::value<std::vector<std::string>>()->multitoken()->default_value(std::vector<std::string>{}, ""), "Input data file for model");
+ // clang-format on
+
+ _options.add(general);
+ _positional.add("tflite", 1);
+}
+
+void Args::print(char **argv)
+{
+ std::cout << "tflite_loader" << std::endl << std::endl;
+ std::cout << "Load tflite model by Loader and TFLite and compare their output" << std::endl;
+ std::cout << "Usage:" << std::endl;
+ std::cout << argv[0] << " --tflite model_file.tflite --data input_data.dat" << std::endl;
+ std::cout << _options;
+ std::cout << std::endl;
+}
+
+void Args::Parse(const int argc, char **argv)
+{
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(_options).positional(_positional).run(),
+ vm);
+ po::notify(vm);
+
+ if (vm.count("help"))
+ {
+ print(argv);
+
+ exit(0);
+ }
+
+ if (vm.count("tflite"))
+ {
+ _tflite_filename = vm["tflite"].as<std::string>();
+ }
+
+ if (vm.count("data"))
+ {
+ _data_filenames = vm["data"].as<std::vector<std::string>>();
+ }
+}
+
+} // end of namespace TFLiteRun
diff --git a/tests/tools/tflite_loader/src/args.h b/tests/tools/tflite_loader/src/args.h
new file mode 100644
index 000000000..4d0e8ff41
--- /dev/null
+++ b/tests/tools/tflite_loader/src/args.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TFLITE_LOADER_TOOLS_SRC_ARGS_H__
+#define __TFLITE_LOADER_TOOLS_SRC_ARGS_H__
+
+#include <string>
+#include <boost/program_options.hpp>
+
+namespace po = boost::program_options;
+
+namespace TFLiteRun
+{
+
+class Args
+{
+public:
+ Args(const int argc, char **argv) noexcept;
+ void print(char **argv);
+
+ const std::string &getTFLiteFilename(void) const { return _tflite_filename; }
+ const std::vector<std::string> &getDataFilenames(void) const { return _data_filenames; }
+
+private:
+ void Initialize();
+ void Parse(const int argc, char **argv);
+
+private:
+ po::options_description _options;
+ po::positional_options_description _positional;
+
+ std::string _tflite_filename;
+ std::vector<std::string> _data_filenames;
+};
+
+} // namespace TFLiteRun
+
+#endif // __TFLITE_LOADER_TOOLS_SRC_ARGS_H__
diff --git a/tests/tools/tflite_loader/src/tflite_loader.cc b/tests/tools/tflite_loader/src/tflite_loader.cc
new file mode 100644
index 000000000..c2388f3cc
--- /dev/null
+++ b/tests/tools/tflite_loader/src/tflite_loader.cc
@@ -0,0 +1,289 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "tflite/ext/kernels/register.h"
+
+#include "args.h"
+#include "tflite/InterpreterSession.h"
+#include "tflite/Assert.h"
+#include "tflite/Diff.h"
+#include "misc/tensor/IndexIterator.h"
+
+#include <iostream>
+#include <fstream>
+
+#include "compiler/Compiler.h"
+#include "exec/Execution.h"
+#include "graph/Graph.h"
+
+#include "loader.h"
+
+#include "cpp14/memory.h"
+
+const int RUN_FAILED = 1;
+
+using namespace tflite;
+using namespace nnfw::tflite;
+
+const int FILE_ERROR = 2;
+const float DIFFERENCE_THRESHOLD = 10e-5;
+
+// Read vector of floats from selected file
+std::vector<float> readData(const string &path)
+{
+ std::ifstream in(path);
+ if (!in.good())
+ {
+ std::cerr << "can not open data file " << path << "\n";
+ exit(FILE_ERROR);
+ }
+ in.seekg(0, std::ifstream::end);
+ size_t len = in.tellg();
+ in.seekg(0, std::ifstream::beg);
+ assert(len % sizeof(float) == 0);
+ size_t size = len / sizeof(float);
+ std::vector<float> vec(size);
+ for (size_t i = 0; i < size; ++i)
+ {
+ in.read(reinterpret_cast<char *>(&vec[i]), sizeof(float));
+ }
+ return vec;
+}
+
+std::vector<float> randomData(RandomGenerator &randgen, const uint64_t size)
+{
+ std::vector<float> vec(size);
+ for (uint64_t i = 0; i < size; i++)
+ {
+ vec[i] = randgen.generate<float>();
+ }
+ return vec;
+}
+
+void executeGraph(const std::shared_ptr<neurun::graph::Graph> &g,
+ const std::vector<std::vector<float>> &inputs,
+ std::vector<std::vector<float>> &outputs)
+{
+ auto compiler = new neurun::compiler::Compiler(g);
+ // Compilation
+ try
+ {
+ compiler->compile();
+ }
+ catch (const std::exception &e)
+ {
+ std::cerr << "[Execution] Can't compile model" << std::endl;
+ std::cerr << e.what() << std::endl;
+ exit(-1);
+ }
+
+ std::cout << "[Execution] Graph compiled!" << std::endl;
+
+ std::shared_ptr<neurun::exec::IExecutor> executor;
+ compiler->release(executor);
+ auto execution = std::make_shared<neurun::exec::Execution>(executor);
+
+ // Verify input shapes
+ auto num_inputs = inputs.size();
+ for (size_t i = 0; i < num_inputs; i++)
+ {
+ auto input_operand_idx = g->getInputs().at(i);
+ auto input_shape = g->operands().at(input_operand_idx).shape();
+ assert(inputs[i].size() == input_shape.num_elements());
+ }
+
+ // Set output shapes
+ auto num_outputs = g->getOutputs().size();
+ outputs.resize(num_outputs);
+ for (uint32_t i = 0; i < num_outputs; i++)
+ {
+ auto output_operand_idx = g->getOutputs().at(i);
+ auto output_shape = g->operands().at(output_operand_idx).shape();
+ outputs[i].resize(output_shape.num_elements());
+ }
+
+ // Setting IO
+ try
+ {
+ for (size_t i = 0; i < num_inputs; i++)
+ execution->setInput(neurun::model::IOIndex(i), inputs[i].data(),
+ inputs[i].size() * sizeof(float));
+ for (uint32_t i = 0; i < num_outputs; i++)
+ execution->setOutput(neurun::model::IOIndex(i), outputs[i].data(),
+ outputs[i].size() * sizeof(float));
+ }
+ catch (const std::exception &e)
+ {
+ std::cerr << "[Execution] Can't set model IO" << std::endl;
+ std::cerr << e.what() << '\n';
+ exit(-1);
+ }
+
+ execution->execute();
+ std::cout << "[Execution] Done!" << std::endl;
+
+ delete compiler;
+}
+
+int main(const int argc, char **argv)
+{
+ TFLiteRun::Args args(argc, argv);
+
+ auto tflite_file = args.getTFLiteFilename();
+ auto data_files = args.getDataFilenames();
+
+ if (tflite_file.empty())
+ {
+ args.print(argv);
+ return RUN_FAILED;
+ }
+
+ std::cout << "[Execution] Stage start!" << std::endl;
+ auto test_model = nnfw::cpp14::make_unique<neurun::model::Model>();
+ auto test_graph = std::make_shared<neurun::graph::Graph>(std::move(test_model));
+ // Loading
+ try
+ {
+ tflite_loader::Loader loader(*test_graph);
+ loader.loadFromFile(tflite_file.c_str());
+ }
+ catch (std::exception &e)
+ {
+ std::cerr << "[ ERROR ] "
+ << "Failure during model load" << std::endl;
+ std::cerr << e.what() << std::endl;
+ exit(-1);
+ }
+
+ // TODO: Support another input/output types
+ for (const auto &input_idx : test_graph->getInputs())
+ {
+ const auto input_type = test_graph->operands().at(input_idx).typeInfo().type();
+ assert(input_type == neurun::model::DataType::FLOAT32 && "Only FLOAT32 inputs are supported");
+ }
+ for (const auto &output_idx : test_graph->getOutputs())
+ {
+ const auto output_type = test_graph->operands().at(output_idx).typeInfo().type();
+ assert(output_type == neurun::model::DataType::FLOAT32 && "Only FLOAT32 outputs are supported");
+ }
+
+ std::cout << "[Execution] Model is deserialized!" << std::endl;
+ auto num_inputs = test_graph->getInputs().size();
+ std::vector<std::vector<float>> inputs(num_inputs);
+ bool generate_data = data_files.empty();
+ bool read_data = data_files.size() == num_inputs;
+ if (num_inputs == 0)
+ {
+ std::cerr << "[ ERROR ] "
+ << "No inputs in model => execution is not possible" << std::endl;
+ exit(1);
+ }
+ if (!generate_data && !read_data)
+ {
+ std::cerr << "[ ERROR ] "
+ << "Wrong number of input files." << std::endl;
+ exit(1);
+ }
+
+ const int seed = 1; /* TODO Add an option for seed value */
+ RandomGenerator randgen{seed, 0.0f, 2.0f};
+ for (uint32_t i = 0; i < num_inputs; i++)
+ {
+ if (generate_data)
+ {
+ uint64_t sz = test_graph->operands().at(test_graph->getInputs().at(i)).shape().num_elements();
+ inputs[i] = randomData(randgen, sz);
+ }
+ else /* read_data */
+ inputs[i] = readData(data_files[i]);
+ }
+ std::cout << "[Execution] Input data is defined!" << std::endl;
+ std::vector<std::vector<float>> outputs;
+ // Run graph
+ executeGraph(test_graph, inputs, outputs);
+ // Compare with tflite
+ std::cout << "[Comparison] Stage start!" << std::endl;
+ // Read tflite model
+ StderrReporter error_reporter;
+ auto model = FlatBufferModel::BuildFromFile(tflite_file.c_str(), &error_reporter);
+
+ BuiltinOpResolver resolver;
+ InterpreterBuilder builder(*model, resolver);
+
+ std::unique_ptr<Interpreter> interpreter;
+ try
+ {
+ TFLITE_ENSURE(builder(&interpreter));
+ }
+ catch (const std::exception &e)
+ {
+ std::cerr << e.what() << std::endl;
+ exit(FILE_ERROR);
+ }
+ interpreter->SetNumThreads(2);
+
+ auto sess = std::make_shared<nnfw::tflite::InterpreterSession>(interpreter.get());
+ sess->prepare();
+ // Set input and run
+ for (uint32_t i = 0; i < num_inputs; i++)
+ {
+ auto input_tensor = interpreter->tensor(interpreter->inputs().at(i));
+ memcpy(input_tensor->data.f, inputs[i].data(), inputs[i].size() * sizeof(float));
+ }
+ if (!sess->run())
+ {
+ std::cout << "[Comparison] TFLite run failed!" << std::endl;
+ assert(0 && "Run failed!");
+ }
+ std::cout << "[Comparison] TFLite run done!" << std::endl;
+
+ // Calculate max difference over all outputs
+ float max_difference = 0.0f;
+ auto num_outputs = test_graph->getOutputs().size();
+ for (uint32_t out_idx = 0; out_idx < num_outputs; out_idx++)
+ {
+ const auto &tflite_output_tensor = interpreter->tensor(interpreter->outputs().at(out_idx));
+ const auto &nnfw_output_tensor = outputs[out_idx];
+
+ if (nnfw_output_tensor.size() != tflite_output_tensor->bytes / sizeof(float))
+ std::cout << "[Comparison] Different size of outputs!" << std::endl;
+ // Check max difference
+ float *tflite_out_ptr = tflite_output_tensor->data.f;
+ for (const auto &nnfw_out : nnfw_output_tensor)
+ {
+ if (std::abs(nnfw_out - *tflite_out_ptr) > max_difference)
+ max_difference = std::abs(nnfw_out - *tflite_out_ptr);
+
+ tflite_out_ptr++;
+ }
+ }
+
+ // Print results
+ std::cout << "[Comparison] Max difference: " << max_difference << std::endl;
+ int ret = 0;
+ if (max_difference > DIFFERENCE_THRESHOLD)
+ {
+ std::cout << "[Comparison] Outputs is not equal!" << std::endl;
+ ret = 1;
+ }
+ else
+ {
+ std::cout << "[Comparison] Outputs is equal!" << std::endl;
+ }
+ std::cout << "[Comparison] Done!" << std::endl;
+
+ return ret;
+}
diff --git a/tests/tools/tflite_run/CMakeLists.txt b/tests/tools/tflite_run/CMakeLists.txt
index 49d87318f..1887d7cbf 100644
--- a/tests/tools/tflite_run/CMakeLists.txt
+++ b/tests/tools/tflite_run/CMakeLists.txt
@@ -1,11 +1,18 @@
+if(NOT BUILD_TFLITE_RUN)
+ return()
+endif(NOT BUILD_TFLITE_RUN)
+
list(APPEND TFLITE_RUN_SRCS "src/tflite_run.cc")
-list(APPEND TFLITE_RUN_SRCS "src/bin_image.cc")
list(APPEND TFLITE_RUN_SRCS "src/args.cc")
list(APPEND TFLITE_RUN_SRCS "src/tensor_dumper.cc")
list(APPEND TFLITE_RUN_SRCS "src/tensor_loader.cc")
+nnfw_find_package(Boost REQUIRED)
+
add_executable(tflite_run ${TFLITE_RUN_SRCS})
target_include_directories(tflite_run PRIVATE src)
+target_include_directories(tflite_run PRIVATE ${Boost_INCLUDE_DIRS})
+
target_link_libraries(tflite_run tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_tflite)
target_link_libraries(tflite_run boost_program_options boost_system boost_filesystem)
diff --git a/tests/tools/tflite_run/src/args.cc b/tests/tools/tflite_run/src/args.cc
index 713a0a9d2..6c85d884e 100644
--- a/tests/tools/tflite_run/src/args.cc
+++ b/tests/tools/tflite_run/src/args.cc
@@ -23,7 +23,7 @@
namespace TFLiteRun
{
-Args::Args(const int argc, char **argv)
+Args::Args(const int argc, char **argv) noexcept
{
Initialize();
Parse(argc, argv);
@@ -38,7 +38,9 @@ void Args::Initialize(void)
// clang-format off
general.add_options()
("help,h", "Display available options")
+ ("input,i", po::value<std::string>()->default_value(""), "Input filename")
("dump,d", po::value<std::string>()->default_value(""), "Output filename")
+ ("ishapes", po::value<std::vector<int>>()->multitoken(), "Input shapes")
("compare,c", po::value<std::string>()->default_value(""), "filename to be compared with")
("tflite", po::value<std::string>()->required());
// clang-format on
@@ -52,9 +54,7 @@ void Args::Parse(const int argc, char **argv)
po::variables_map vm;
po::store(po::command_line_parser(argc, argv).options(_options).positional(_positional).run(),
vm);
- po::notify(vm);
-#if 0 // Enable this when we have mutually conflicting options
{
auto conflicting_options = [&](const std::string &o1, const std::string &o2) {
if ((vm.count(o1) && !vm[o1].defaulted()) && (vm.count(o2) && !vm[o2].defaulted()))
@@ -66,7 +66,6 @@ void Args::Parse(const int argc, char **argv)
conflicting_options("input", "compare");
}
-#endif
if (vm.count("help"))
{
@@ -78,6 +77,8 @@ void Args::Parse(const int argc, char **argv)
exit(0);
}
+ po::notify(vm);
+
if (vm.count("dump"))
{
_dump_filename = vm["dump"].as<std::string>();
@@ -88,6 +89,28 @@ void Args::Parse(const int argc, char **argv)
_compare_filename = vm["compare"].as<std::string>();
}
+ if (vm.count("input"))
+ {
+ _input_filename = vm["input"].as<std::string>();
+
+ if (!_input_filename.empty())
+ {
+ if (!boost::filesystem::exists(_input_filename))
+ {
+ std::cerr << "input image file not found: " << _input_filename << "\n";
+ }
+ }
+ }
+
+ if (vm.count("ishapes"))
+ {
+ _input_shapes.resize(vm["ishapes"].as<std::vector<int>>().size());
+ for (auto i = 0; i < _input_shapes.size(); i++)
+ {
+ _input_shapes[i] = vm["ishapes"].as<std::vector<int>>()[i];
+ }
+ }
+
if (vm.count("tflite"))
{
_tflite_filename = vm["tflite"].as<std::string>();
diff --git a/tests/tools/tflite_run/src/args.h b/tests/tools/tflite_run/src/args.h
index 5561544eb..25fd77a63 100644
--- a/tests/tools/tflite_run/src/args.h
+++ b/tests/tools/tflite_run/src/args.h
@@ -28,12 +28,14 @@ namespace TFLiteRun
class Args
{
public:
- Args(const int argc, char **argv);
+ Args(const int argc, char **argv) noexcept;
void print(void);
const std::string &getTFLiteFilename(void) const { return _tflite_filename; }
const std::string &getDumpFilename(void) const { return _dump_filename; }
const std::string &getCompareFilename(void) const { return _compare_filename; }
+ const std::string &getInputFilename(void) const { return _input_filename; }
+ const std::vector<int> &getInputShapes(void) const { return _input_shapes; }
private:
void Initialize();
@@ -46,6 +48,8 @@ private:
std::string _tflite_filename;
std::string _dump_filename;
std::string _compare_filename;
+ std::string _input_filename;
+ std::vector<int> _input_shapes;
};
} // end of namespace TFLiteRun
diff --git a/tests/tools/tflite_run/src/bin_image.h b/tests/tools/tflite_run/src/bin_image.h
index 845011be6..5fc813e94 100644
--- a/tests/tools/tflite_run/src/bin_image.h
+++ b/tests/tools/tflite_run/src/bin_image.h
@@ -20,7 +20,7 @@
#include <string>
#include <vector>
-#include "tensorflow/contrib/lite/context.h"
+#include "tensorflow/lite/context.h"
class BinImage
{
diff --git a/tests/tools/tflite_run/src/tensor_dumper.cc b/tests/tools/tflite_run/src/tensor_dumper.cc
index 8568c9b67..4ccd4e11a 100644
--- a/tests/tools/tflite_run/src/tensor_dumper.cc
+++ b/tests/tools/tflite_run/src/tensor_dumper.cc
@@ -1,10 +1,26 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
#include "tensor_dumper.h"
#include <fstream>
#include <iostream>
#include <cstring>
-#include "tensorflow/contrib/lite/interpreter.h"
+#include "tensorflow/lite/interpreter.h"
namespace TFLiteRun
{
diff --git a/tests/tools/tflite_run/src/tensor_dumper.h b/tests/tools/tflite_run/src/tensor_dumper.h
index 2805f1076..5fdcc54f7 100644
--- a/tests/tools/tflite_run/src/tensor_dumper.h
+++ b/tests/tools/tflite_run/src/tensor_dumper.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
#ifndef __TFLITE_RUN_TENSOR_DUMPER_H__
#define __TFLITE_RUN_TENSOR_DUMPER_H__
diff --git a/tests/tools/tflite_run/src/tensor_loader.cc b/tests/tools/tflite_run/src/tensor_loader.cc
index 934b78f40..de605bacf 100644
--- a/tests/tools/tflite_run/src/tensor_loader.cc
+++ b/tests/tools/tflite_run/src/tensor_loader.cc
@@ -14,7 +14,7 @@ TensorLoader::TensorLoader(tflite::Interpreter &interpreter)
{
}
-void TensorLoader::load(const std::string &filename)
+void TensorLoader::loadDumpedTensors(const std::string &filename)
{
// TODO Handle file open/read error
std::ifstream file(filename, std::ios::ate | std::ios::binary);
@@ -28,9 +28,37 @@ void TensorLoader::load(const std::string &filename)
file.read(reinterpret_cast<char *>(tensor_indices_raw), sizeof(tensor_indices_raw));
std::vector<int> tensor_indices(tensor_indices_raw, tensor_indices_raw + num_tensors);
- _raw_data = std::unique_ptr<float>(new float[file_size]);
+ _raw_data = std::unique_ptr<float[]>(new float[file_size]);
file.read(reinterpret_cast<char *>(_raw_data.get()), file_size);
+ file.close();
+
+ size_t read_bytes = loadTensorsFromRawData(tensor_indices);
+
+ // The file size and total output tensor size must match
+ assert(file_size ==
+ sizeof(num_tensors) + sizeof(tensor_indices_raw) + read_bytes * sizeof(float));
+}
+
+void TensorLoader::loadRawTensors(const std::string &filename,
+ const std::vector<int> &tensor_indices)
+{
+ // TODO Handle file open/read error
+ std::ifstream file(filename, std::ios::ate | std::ios::binary);
+ size_t file_size = file.tellg();
+ file.seekg(0, std::ios::beg);
+ _raw_data = std::unique_ptr<float[]>(new float[file_size]);
+ file.read(reinterpret_cast<char *>(_raw_data.get()), file_size);
+ file.close();
+
+ size_t read_bytes = loadTensorsFromRawData(tensor_indices);
+
+ // The file size and total output tensor size must match
+ assert(file_size == read_bytes * sizeof(float));
+}
+
+size_t TensorLoader::loadTensorsFromRawData(const std::vector<int> &tensor_indices)
+{
size_t offset = 0;
for (const auto &o : tensor_indices)
{
@@ -51,10 +79,7 @@ void TensorLoader::load(const std::string &filename)
_tensor_map.insert(std::make_pair(o, nnfw::tflite::TensorView<float>(shape, base)));
}
- // The file size and total output tensor size must match
- assert(file_size == sizeof(num_tensors) + sizeof(tensor_indices_raw) + offset * sizeof(float));
-
- file.close();
+ return offset;
}
const nnfw::tflite::TensorView<float> &TensorLoader::get(int tensor_idx) const
diff --git a/tests/tools/tflite_run/src/tensor_loader.h b/tests/tools/tflite_run/src/tensor_loader.h
index fc4a37a08..2e671aa8a 100644
--- a/tests/tools/tflite_run/src/tensor_loader.h
+++ b/tests/tools/tflite_run/src/tensor_loader.h
@@ -20,13 +20,15 @@ class TensorLoader
{
public:
TensorLoader(tflite::Interpreter &interpreter);
- void load(const std::string &filename);
+ void loadDumpedTensors(const std::string &filename);
+ void loadRawTensors(const std::string &filename, const std::vector<int> &tensor_indices);
const nnfw::tflite::TensorView<float> &get(int tensor_idx) const;
size_t getNums() const { return _tensor_map.size(); }
private:
+ size_t loadTensorsFromRawData(const std::vector<int> &tensor_indices);
tflite::Interpreter &_interpreter;
- std::unique_ptr<float> _raw_data;
+ std::unique_ptr<float[]> _raw_data;
std::unordered_map<int, nnfw::tflite::TensorView<float>> _tensor_map;
};
diff --git a/tests/tools/tflite_run/src/tflite_run.cc b/tests/tools/tflite_run/src/tflite_run.cc
index 5be6909e5..deed12856 100644
--- a/tests/tools/tflite_run/src/tflite_run.cc
+++ b/tests/tools/tflite_run/src/tflite_run.cc
@@ -15,14 +15,13 @@
*/
#include "tflite/ext/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
+#include "tensorflow/lite/model.h"
-#include "bin_image.h"
#include "args.h"
#include "tensor_dumper.h"
#include "tensor_loader.h"
#include "misc/benchmark.h"
-#include "misc/environment.h"
+#include "misc/EnvVar.h"
#include "misc/fp32.h"
#include "tflite/Diff.h"
#include "tflite/Assert.h"
@@ -65,15 +64,23 @@ int main(const int argc, char **argv)
std::chrono::milliseconds t_prepare(0);
std::chrono::milliseconds t_invoke(0);
- nnfw::misc::benchmark::measure(t_prepare) << [&](void) {
- BuiltinOpResolver resolver;
+ try
+ {
+ nnfw::misc::benchmark::measure(t_prepare) << [&](void) {
+ BuiltinOpResolver resolver;
- InterpreterBuilder builder(*model, resolver);
+ InterpreterBuilder builder(*model, resolver);
- TFLITE_ENSURE(builder(&interpreter))
+ TFLITE_ENSURE(builder(&interpreter))
- interpreter->SetNumThreads(1);
- };
+ interpreter->SetNumThreads(nnfw::misc::EnvVar("THREAD").asInt(-1));
+ };
+ }
+ catch (const std::exception &e)
+ {
+ std::cerr << e.what() << '\n';
+ return 1;
+ }
std::shared_ptr<nnfw::tflite::Session> sess;
@@ -88,12 +95,45 @@ int main(const int argc, char **argv)
sess->prepare();
+ if (args.getInputShapes().size() != 0)
+ {
+ const int dim_values = args.getInputShapes().size();
+ int offset = 0;
+
+ for (const auto &id : interpreter->inputs())
+ {
+ TfLiteTensor *tensor = interpreter->tensor(id);
+ std::vector<int32_t> new_dim;
+ new_dim.resize(tensor->dims->size);
+
+ for (uint32_t axis = 0; axis < tensor->dims->size; axis++, offset++)
+ {
+ new_dim[axis] =
+ ((offset < dim_values) ? args.getInputShapes()[offset] : tensor->dims->data[axis]);
+ }
+
+ interpreter->ResizeInputTensor(id, new_dim);
+
+ if (offset >= dim_values)
+ break;
+ }
+ interpreter->AllocateTensors();
+ }
+
TFLiteRun::TensorLoader tensor_loader(*interpreter);
- // Load input from dumped tensor file.
- if (!args.getCompareFilename().empty())
+ // Load input from raw or dumped tensor file.
+ // Two options are exclusive and will be checked from Args.
+ if (!args.getInputFilename().empty() || !args.getCompareFilename().empty())
{
- tensor_loader.load(args.getCompareFilename());
+ if (!args.getInputFilename().empty())
+ {
+ tensor_loader.loadRawTensors(args.getInputFilename(), interpreter->inputs());
+ }
+ else
+ {
+ tensor_loader.loadDumpedTensors(args.getCompareFilename());
+ }
for (const auto &o : interpreter->inputs())
{
@@ -226,8 +266,7 @@ int main(const int argc, char **argv)
// TODO Code duplication (copied from RandomTestRunner)
- int tolerance = 1;
- nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
+ int tolerance = nnfw::misc::EnvVar("TOLERANCE").asInt(1);
auto equals = [tolerance](float lhs, float rhs) {
// NOTE Hybrid approach
diff --git a/tools/CMakeLists.txt b/tools/CMakeLists.txt
index 7181629f3..ba69aafd2 100644
--- a/tools/CMakeLists.txt
+++ b/tools/CMakeLists.txt
@@ -1,6 +1,5 @@
-if(OBS_BUILD)
+if(NOT BUILD_TOOLS)
return()
-endif(OBS_BUILD)
+endif(NOT BUILD_TOOLS)
-add_subdirectory(nnapi_quickcheck)
-add_subdirectory(opencl_tool)
+add_subdirectories()
diff --git a/tools/cross/build_android_rootfs.sh b/tools/cross/build_android_rootfs.sh
index d0b4afd7b..035e5110e 100755
--- a/tools/cross/build_android_rootfs.sh
+++ b/tools/cross/build_android_rootfs.sh
@@ -101,7 +101,7 @@ cp -rv $__ToolchainDir/$BOOST_BASENAME/boost $__RootfsDir/sysroot/usr/include
if [[ "$__ACL" == 1 ]]; then
echo "Installing arm compute library"
- ACL_VERSION=18.03
+ ACL_VERSION=19.05
ACL_BASENAME=arm_compute-v$ACL_VERSION-bin-android
wget -nv -nc https://github.com/ARM-software/ComputeLibrary/releases/download/v$ACL_VERSION/$ACL_BASENAME.tar.gz -O $__ToolchainDir/$ACL_BASENAME.tar.gz
diff --git a/tools/cross/build_rootfs.sh b/tools/cross/build_rootfs.sh
index 3d0837bb6..3aa242bed 100755
--- a/tools/cross/build_rootfs.sh
+++ b/tools/cross/build_rootfs.sh
@@ -28,6 +28,7 @@ __UbuntuPackages="build-essential"
# other development supports
__UbuntuPackages+=" libboost-all-dev ocl-icd-opencl-dev"
+__UbuntuPackages+=" libhdf5-dev"
# symlinks fixer
__UbuntuPackages+=" symlinks"
@@ -116,6 +117,9 @@ if [[ -n $__LinuxCodeName ]]; then
chroot $__RootfsDir apt-get update
chroot $__RootfsDir apt-get -f -y install
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
+ machine=$(chroot $__RootfsDir gcc -dumpmachine)
+ chroot $__RootfsDir ln -s /usr/lib/${machine}/libhdf5_serial.a /usr/lib/${machine}/libhdf5.a
+ chroot $__RootfsDir ln -s /usr/lib/${machine}/libhdf5_serial.so /usr/lib/${machine}/libhdf5.so
chroot $__RootfsDir symlinks -cr /usr
if [ $__SkipUnmount == 0 ]; then
diff --git a/tools/extract_weights_from_tflite/extract.py b/tools/extract_weights_from_tflite/extract.py
index afde08c69..7559ba7e8 100755
--- a/tools/extract_weights_from_tflite/extract.py
+++ b/tools/extract_weights_from_tflite/extract.py
@@ -41,9 +41,9 @@ buffer_name_map = {}
for t in tensors:
if 'buffer' in t:
if t['buffer'] in buffer_name_map:
- print 'find conflict!!'
- print t
- print buffer_name_map
+ print('find conflict!!')
+ print(t)
+ print(buffer_name_map)
comps = t['name'].split('/')
names = []
if len(comps) > 1 and comps[0] == comps[1]:
@@ -60,7 +60,8 @@ for i in range(len(j['buffers'])):
b = j['buffers'][i]
if 'data' in b:
if i not in buffer_name_map:
- print "buffer %d is not found in buffer_name_map. skip printing the buffer..."
+ print("buffer %d is not found in buffer_name_map. skip printing the buffer..."
+ % i)
continue
filename = "%s.npy" % (buffer_name_map[i]['name'])
@@ -83,8 +84,8 @@ for i in range(len(j['buffers'])):
elif len(shape) == 1:
pass
else:
- print "Undefined length: conversion skipped. shape=", shape
+ print("Undefined length: conversion skipped. shape=", shape)
#print shape, filename, n.shape
np.save(filename, n)
-print "Done."
+print("Done.")
diff --git a/tools/extract_weights_from_tflite/extract_from_tflite.sh b/tools/extract_weights_from_tflite/extract_from_tflite.sh
index be84f25f3..7bc870bec 100755
--- a/tools/extract_weights_from_tflite/extract_from_tflite.sh
+++ b/tools/extract_weights_from_tflite/extract_from_tflite.sh
@@ -27,5 +27,5 @@ TFLITE_FILE=$1
TFLITE_FILENAME=${TFLITE_FILE##*\/}
TFLITE_JSON=${TFLITE_FILENAME%\.tflite}.json
-$FLATC --json --strict-json $ROOT_PATH/externals/tensorflow/tensorflow/contrib/lite/schema/schema.fbs -- $TFLITE_FILE
+$FLATC --json --strict-json $ROOT_PATH/externals/tensorflow/tensorflow/lite/schema/schema.fbs -- $TFLITE_FILE
$SCRIPT_PATH/extract.py $TFLITE_JSON
diff --git a/tools/extract_weights_from_tflite/print_op.py b/tools/extract_weights_from_tflite/print_op.py
index 16aff9720..a946b926c 100755
--- a/tools/extract_weights_from_tflite/print_op.py
+++ b/tools/extract_weights_from_tflite/print_op.py
@@ -48,11 +48,11 @@ for o in operators:
elif 'builtin_options_type' in o:
# if we cannot find opcode_index, print option type instead.
op_name = o['builtin_options_type']
- print "Layer:", op_name
+ print("Layer:", op_name)
- print " Input shapes ---"
+ print(" Input shapes ---")
for inp in o['inputs']:
- print " ", tensors[inp]['shape']
- print " Output shapes ---"
+ print(" ", tensors[inp]['shape'])
+ print(" Output shapes ---")
for outp in o['outputs']:
- print " ", tensors[outp]['shape']
+ print(" ", tensors[outp]['shape'])
diff --git a/tools/image_importer/image_importer.py b/tools/image_importer/image_importer.py
index 77508e1b6..f849e8879 100755
--- a/tools/image_importer/image_importer.py
+++ b/tools/image_importer/image_importer.py
@@ -25,9 +25,9 @@ if (len(sys.argv) < 3):
img = Image.open(sys.argv[1])
outfile = sys.argv[2]
-print "Image format = ", img.bits, img.size, img.format
+print("Image format = ", img.bits, img.size, img.format)
with open(outfile, 'wb') as f:
f.write(img.tobytes())
-print "Done."
+print("Done.")
diff --git a/tools/kbenchmark/Args.cc b/tools/kbenchmark/Args.cc
new file mode 100644
index 000000000..204810b87
--- /dev/null
+++ b/tools/kbenchmark/Args.cc
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Args.h"
+
+#include <iostream>
+#include <boost/filesystem.hpp>
+
+namespace kbenchmark
+{
+
+Args::Args(const int argc, char **argv) noexcept { Initialize(argc, argv); }
+
+void Args::Initialize(const int argc, char **argv)
+{
+ // General options
+ po::options_description general("General options");
+ // clang-format off
+ general.add_options()("help,h", "Display available options")
+ ("config,c", po::value<std::string>(&_config)->required(), "Configuration filename")
+ ("kernel,k", po::value<std::vector<std::string>>(&_kernel)->multitoken()->composing()->required(), "Kernel library name, support multiple kernel libraries")
+ ("reporter,r", po::value<std::string>(&_reporter)->default_value("standard"), "Set reporter types(standard, html, junit, csv)")
+ ("filter,f", po::value<std::string>(&_filter)->default_value(".*"), "Only run benchmarks whose name matches the regular expression pattern")
+ ("verbose,v", po::value<int>(&_verbose)->default_value(0)->implicit_value(true), "Show verbose output")
+ ("output,o", po::value<std::string>(&_output)->default_value(""), "Set additional strings for output file name")
+ ;
+ // clang-format on
+
+ po::variables_map vm;
+ po::store(po::parse_command_line(argc, argv, general), vm);
+
+ try
+ {
+ po::notify(vm);
+ }
+ catch (const boost::program_options::required_option &e)
+ {
+ if (vm.count("help"))
+ {
+ std::cout << general << std::endl;
+ exit(0);
+ }
+ else
+ {
+ throw e;
+ }
+ }
+
+ if (vm.count("help"))
+ {
+ std::cout << general << std::endl;
+ exit(0);
+ }
+
+ if (vm.count("config"))
+ {
+ if (_config.substr(_config.find_last_of(".") + 1) != "config")
+ {
+ std::cerr << "Please specify .config file" << std::endl;
+ exit(1);
+ }
+
+ if (!boost::filesystem::exists(_config))
+ {
+ std::cerr << _config << " file not found" << std::endl;
+ exit(1);
+ }
+ }
+
+ if (vm.count("kernel"))
+ {
+ for (auto &k : _kernel)
+ {
+ if (!boost::filesystem::exists(k))
+ {
+ std::cerr << k << " file not found" << std::endl;
+ exit(1);
+ }
+ }
+ }
+
+ if (vm.count("reporter"))
+ {
+ if (_reporter != "junit" && _reporter != "csv" && _reporter != "html" &&
+ _reporter != "standard")
+ {
+ std::cerr << "Invalid reporter" << std::endl;
+ exit(1);
+ }
+ }
+}
+
+} // namespace kbenchmark
diff --git a/tools/kbenchmark/Args.h b/tools/kbenchmark/Args.h
new file mode 100644
index 000000000..ff1400c12
--- /dev/null
+++ b/tools/kbenchmark/Args.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_ARGS_H__
+#define __KBENCHMARK_ARGS_H__
+
+#include <string>
+#include <vector>
+#include <boost/program_options.hpp>
+
+namespace po = boost::program_options;
+
+namespace kbenchmark
+{
+
+class Args
+{
+public:
+ Args(const int argc, char **argv) noexcept;
+
+ const std::string &config(void) { return _config; }
+ const std::vector<std::string> &kernel(void) { return _kernel; }
+ const std::string &reporter(void) { return _reporter; }
+ const std::string &filter(void) { return _filter; }
+ const std::string &output(void) { return _output; }
+ int verbose(void) { return _verbose; }
+
+private:
+ void Initialize(const int argc, char **argv);
+
+private:
+ std::string _config;
+ std::vector<std::string> _kernel;
+ std::string _reporter;
+ std::string _filter;
+ std::string _output;
+ int _verbose;
+};
+
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_ARGS_H__
diff --git a/tools/kbenchmark/CMakeLists.txt b/tools/kbenchmark/CMakeLists.txt
new file mode 100644
index 000000000..ebf9a6725
--- /dev/null
+++ b/tools/kbenchmark/CMakeLists.txt
@@ -0,0 +1,29 @@
+if(NOT BUILD_KBENCHMARK)
+ return()
+endif(NOT BUILD_KBENCHMARK)
+
+nnfw_find_package(Nonius QUIET)
+
+if(NOT Nonius_FOUND)
+ return()
+endif(NOT Nonius_FOUND)
+
+nnfw_find_package(Boost QUIET)
+
+if(NOT Boost_FOUND)
+ return()
+endif(NOT Boost_FOUND)
+
+# driver
+file(GLOB_RECURSE SOURCES "*.cc")
+
+add_executable(kbenchmark ${SOURCES})
+target_compile_options(kbenchmark PRIVATE -Wno-psabi)
+target_include_directories(kbenchmark PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
+target_link_libraries(kbenchmark PUBLIC nonius)
+target_link_libraries(kbenchmark PUBLIC dl)
+target_link_libraries(kbenchmark PUBLIC pthread boost_program_options boost_system boost_filesystem)
+install(TARGETS kbenchmark DESTINATION bin)
+
+# kernel libraries
+add_subdirectory(kernels)
diff --git a/tools/kbenchmark/ConfigFile.h b/tools/kbenchmark/ConfigFile.h
new file mode 100644
index 000000000..d6365a400
--- /dev/null
+++ b/tools/kbenchmark/ConfigFile.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_CONFIG_FILE_H__
+#define __KBENCHMARK_CONFIG_FILE_H__
+
+#include <fstream>
+#include <string>
+#include <algorithm>
+#include <regex>
+#include <map>
+
+namespace
+{
+
+std::string getName(const std::string &src)
+{
+ std::string name{src.substr(0, src.find_last_of("."))};
+
+ std::string op;
+ size_t pos = 0;
+ std::string token{"Main_model_"};
+ if ((pos = name.find(token)) != std::string::npos)
+ {
+ op = name.substr(pos + token.size());
+ }
+ else if ((pos = name.find("Model_#")) != std::string::npos)
+ {
+ op = std::regex_replace(name, std::regex("[^0-9]*([0-9]+)_*"), "$2");
+ }
+ return op;
+}
+
+std::string trim(const std::string &src, const std::string &delims)
+{
+ std::string str{src};
+ for (int i = 0; i < delims.size(); ++i)
+ {
+ str.erase(std::remove(str.begin(), str.end(), delims[i]), str.end());
+ }
+ return str;
+}
+
+} // namespace
+
+namespace kbenchmark
+{
+
+using OperationInfo = std::map<std::string, std::string>;
+
+class ConfigFile
+{
+public:
+ using iterator = std::map<int, OperationInfo>::iterator;
+ using const_iterator = std::map<int, OperationInfo>::const_iterator;
+
+public:
+ ConfigFile(const std::string &config) : _name{getName(config)}
+ {
+ std::ifstream file(config.c_str());
+
+ std::string line;
+ int id;
+ std::string key;
+ std::string value;
+ size_t pos;
+
+ while (std::getline(file, line))
+ {
+ if (!line.length())
+ continue;
+ if (line[0] == '#')
+ continue;
+ if (line[0] == '[')
+ {
+ id = std::stoi(line.substr(1, line.find(']') - 1));
+ continue;
+ }
+ pos = line.find(':');
+ key = line.substr(0, pos);
+ value = trim(line.substr(pos + 1), " []");
+ _contents[id][key] = value;
+ }
+ }
+
+ const std::string name(void) { return _name; }
+
+ iterator begin(void) { return _contents.begin(); }
+ iterator end(void) { return _contents.end(); }
+ const_iterator begin(void) const { return _contents.begin(); }
+ const_iterator end(void) const { return _contents.end(); }
+ const_iterator cbegin(void) const { return _contents.cbegin(); }
+ const_iterator cend(void) const { return _contents.cend(); }
+
+private:
+ std::string _name;
+ std::map<int, OperationInfo> _contents;
+};
+
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_CONFIG_FILE_H__
diff --git a/tools/kbenchmark/Driver.cc b/tools/kbenchmark/Driver.cc
new file mode 100644
index 000000000..f2874d533
--- /dev/null
+++ b/tools/kbenchmark/Driver.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Args.h"
+#include "ConfigFile.h"
+#include "OperationLoader.h"
+
+#include <nonius/nonius.h++>
+
+#include <iostream>
+#include <string>
+
+// NOTE dlfcn.h is not a standard library
+#include <dlfcn.h>
+
+using namespace kbenchmark;
+
+int main(int argc, char *argv[])
+{
+ Args args(argc, argv);
+
+ // nonius::benchmark_registry aka std::vector<nonius::benchmark>
+ nonius::benchmark_registry benchmarks;
+
+ // Load kernel library
+ const std::vector<std::string> &kernel_list = args.kernel();
+ std::vector<void *> khandle_list;
+
+ for (auto &k : kernel_list)
+ {
+ void *khandle;
+ typedef nonius::benchmark_registry &(*benchmark_entry)(void);
+ benchmark_entry kbenchmark_entry;
+ // TODO Check if the paramters are valid
+
+ khandle = dlopen(k.c_str(), RTLD_LAZY);
+ if (khandle == nullptr)
+ {
+ std::cerr << "Fail to dlopen " << k << std::endl;
+ return EINVAL;
+ }
+ char *error;
+ kbenchmark_entry = reinterpret_cast<benchmark_entry>(dlsym(khandle, "benchmark_functions"));
+ if ((error = dlerror()) != nullptr)
+ {
+ dlclose(khandle);
+ std::cerr << error << std::endl;
+ return EINVAL;
+ }
+
+ // Save khandle for dlclose
+ khandle_list.push_back(khandle);
+
+ // Add current kernel benchmark functions to gloal benchmark list
+ nonius::benchmark_registry &kbenchmarks = kbenchmark_entry();
+ benchmarks.insert(std::end(benchmarks), std::begin(kbenchmarks), std::end(kbenchmarks));
+ }
+
+ // Set default test name
+ std::string config_name{args.config()};
+ config_name = config_name.substr(config_name.find_last_of("/") + 1);
+ config_name = config_name.substr(0, config_name.find_last_of("."));
+ std::string test_name{"test_benchmark_" + config_name};
+ if (!args.output().empty())
+ {
+ test_name += (std::string{"_"} + args.output());
+ }
+ std::cout << "Benchmark test name\n " << test_name << std::endl;
+
+ if (args.verbose())
+ {
+ std::cout << "benchmark functions list:" << std::endl;
+ for (auto &&f : benchmarks)
+ {
+ if (std::regex_match(f.name, std::regex(args.filter())))
+ {
+ std::cout << " " << f.name << std::endl;
+ }
+ }
+ }
+
+ std::string reporter{args.reporter()};
+ std::string ext{"." + reporter};
+ if (reporter == "standard")
+ {
+ ext = ".txt";
+ }
+
+ // Set noninus configuration
+ nonius::configuration cfg;
+ cfg.reporter = reporter;
+ cfg.filter_pattern = args.filter();
+ cfg.verbose = args.verbose();
+ cfg.title = test_name;
+ cfg.output_file = test_name + ext;
+ cfg.summary = true;
+
+ // Create ConfigFile object from config file
+ ConfigFile cf(args.config());
+
+ // Get OperationLoader instance
+ OperationLoader &opl = OperationLoader::getInstance();
+
+ if (!opl.is_valid(cf.name()))
+ {
+ std::cerr << cf.name() << " is not valid operation" << std::endl;
+ }
+ else
+ {
+ for (auto &c : cf)
+ {
+ if (reporter != "html")
+ {
+ std::string temp_name{test_name + std::string{"_"} + std::to_string(c.first)};
+ cfg.title = temp_name;
+ cfg.output_file = temp_name + ext;
+ }
+
+ nonius::parameters op_params = opl[cf.name()]->params(c.first, c.second);
+ cfg.params.map = cfg.params.map.merged(op_params);
+
+ nonius::go(cfg, benchmarks);
+ }
+ }
+
+ // Release kernel library
+ benchmarks.clear();
+ for (auto khandle : khandle_list)
+ {
+ dlclose(khandle);
+ }
+
+ return 0;
+}
diff --git a/tools/kbenchmark/Operation.h b/tools/kbenchmark/Operation.h
new file mode 100644
index 000000000..4bf1e8c94
--- /dev/null
+++ b/tools/kbenchmark/Operation.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_OPERATION_H__
+#define __KBENCHMARK_OPERATION_H__
+
+#include <nonius/param.h++>
+
+#include "ConfigFile.h"
+
+namespace kbenchmark
+{
+
+class Operation
+{
+public:
+ Operation() = default;
+
+ virtual nonius::parameters params(int layer_num, OperationInfo &info) = 0;
+};
+
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_OPERATION_H__
diff --git a/tools/kbenchmark/OperationLoader.h b/tools/kbenchmark/OperationLoader.h
new file mode 100644
index 000000000..3659c307c
--- /dev/null
+++ b/tools/kbenchmark/OperationLoader.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_OPERATION_LOADER_H__
+#define __KBENCHMARK_OPERATION_LOADER_H__
+
+#include <string>
+#include <unordered_map>
+
+#include "Operation.h"
+#include "operations/Convolution.h"
+#include "operations/TransposeConv.h"
+
+namespace kbenchmark
+{
+
+class OperationLoader
+{
+public:
+ static OperationLoader &getInstance(void)
+ {
+ static OperationLoader instance;
+ return instance;
+ }
+
+ Operation *operator[](const std::string &name) { return _map[name]; }
+ bool is_valid(const std::string &name) { return _map.count(name); }
+
+private:
+ OperationLoader(void)
+ {
+#define OP(ConfigName, OperationName) _map[ConfigName] = new operation::OperationName();
+#include "Operations.lst"
+#undef OP
+ }
+
+ ~OperationLoader() = default;
+ OperationLoader(const OperationLoader &) = delete;
+ OperationLoader &operator=(const OperationLoader &) = delete;
+
+private:
+ std::unordered_map<std::string, Operation *> _map;
+};
+
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_OPERATION_LOADER_H__
diff --git a/tools/kbenchmark/Operations.lst b/tools/kbenchmark/Operations.lst
new file mode 100644
index 000000000..6e51af654
--- /dev/null
+++ b/tools/kbenchmark/Operations.lst
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OP
+#error Define OP before including this file
+#endif
+
+// Config Name Operation Name
+OP("CONV_2D", Convolution)
+OP("TRANSPOSE_CONV", TransposeConv)
diff --git a/tools/kbenchmark/README.md b/tools/kbenchmark/README.md
new file mode 100644
index 000000000..45e4970f0
--- /dev/null
+++ b/tools/kbenchmark/README.md
@@ -0,0 +1,81 @@
+# kbenchmark
+
+## Purpose
+
+Most people can use several neural network kernel libraries to inference AI models on Linux system. Each kernel library is implemented in a number of ways, which can give different performance to different configurations. This benchmark tool can help you measure the system's performance with each configuration, and identify possible issues on your system.
+
+## Prerequisites
+
+### Relevant libraries
+This script is based on [libnonius](https://github.com/libnonius/nonius) micro-benchmarking framework. This micro-benchmarking framework will be downloaded automatically if you set the `BUILD_KBENCHMARK` configuration to on and build nnfw. If you want to download manually, please use the following path.
+* [Download Nonius header files](https://github.com/libnonius/nonius/releases)
+
+### Configuration file
+This tool works depending on a configuration file generated from [summarize_tflite.sh](nnfw/blob/master/tools/tflkit/summarize_tflite.sh). You can make a configuration file using the following commands.
+```
+nnfw$ cd tools/tflkit
+nnfw/tools/tflkit$ ./summarize_tflite.sh [tflite model file] -c -p [file prefix]
+```
+Or
+```
+nnfw$ cd tools/tflitefile_tool
+nnfw/tools/tflitefile_tool$ python model_parser.py [tflite model file] -c -p [file prefix]
+```
+
+The generated configuration file will have the following format:
+```
+tools/tflitefile_tool$ cat inceptionv3_slim_Main_model_CONV_2D.config | head -n 25
+# CONV_2D, Total count: 95
+
+[0]
+input: [1, 299, 299, 3]
+input_type: FLOAT32
+weights: [32, 3, 3, 3]
+weights_type: FLOAT32
+bias: [32]
+bias_type: FLOAT32
+output_counts: 1
+output0: [1, 149, 149, 32]
+output0_type: FLOAT32
+stride_w: 2
+stride_h: 2
+dilation_w: 1
+dilation_h: 1
+padding: VALID
+fused_act: RELU
+
+[1]
+input: [1, 149, 149, 32]
+input_type: FLOAT32
+weights: [32, 3, 3, 32]
+weights_type: FLOAT32
+bias: [32]
+```
+
+### Benchmark kernel library
+This tool needs kernel benchmark libraries. The kernel benchmark library depends on `nonius` c++ micro-benchmarking framework. You can get the detail guideline in [libnonius/nonius](https://github.com/libnonius/nonius) github repository. The `nonius` library uses morden C++ and is header only. The kernel benchmark libraries will be linked to `kbenchmark` tool using dynamic linking loader. So, it should export the `nonius::benchmark_registry &benchmark_functions(void)` symbol. This symbol should return the nonius benchmark test lists. You can see all benchmark test lists that are executed using `--verbose` option as log.
+
+## kbenchmark
+
+### Available commands
+The binary takes the following required parameters:
+
+* `config`: `string` \
+ The path to the configuration file.
+* `kernel`: `string` \
+ The path to the benchmark kernel library file. It allows multiple kernel libraries either by using space or by repeatedly calling `--kernel`.
+
+and the following optional parameters:
+
+* `reporter`: `string` \
+ Set the reporter types among `standard`, `html`, `junit` or `csv`. Default reporter type is `standard`.
+* `output`: `string` \
+ Set the additional strings for output file name.
+* `help`: \
+ Display available options.
+* `verbose`: \
+ Show verbose messages.
+
+### Operations
+The `OperationLoader` loads each operation information from configuration file. This loader takes the last string of the configuration file name as a key of `OperationLoader` map. So the configuration file should not be changed. For example, if the configuration file name is a `inceptionv3_slim_Main_model_CONV_2D.test.config`, the `OperationLoader` takes `CONV_2D` as a key of map. The `CONV_2D` key is connected to `Convolution` class in `operations/Convolution.h`. This related information is described in `Operations.lst` file. Each operation class will return the `nonius::parameters` from `OperationInfo` in `ConfigFile` class.
+
diff --git a/tools/kbenchmark/Utils.h b/tools/kbenchmark/Utils.h
new file mode 100644
index 000000000..cda6c3263
--- /dev/null
+++ b/tools/kbenchmark/Utils.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_UTILS_H__
+#define __KBENCHMARK_UTILS_H__
+
+#include <sstream>
+#include <string>
+#include <vector>
+#include <cassert>
+
+namespace kbenchmark
+{
+
+void check_valid_key(const std::string &key, OperationInfo &info)
+{
+ OperationInfo::const_iterator it;
+ it = info.find(key);
+ assert(it != info.end());
+}
+
+std::vector<int> dims(const std::string &src)
+{
+ std::vector<int> dim;
+
+ std::stringstream ss(src);
+ int i;
+ while (ss >> i)
+ {
+ dim.push_back(i);
+ if (ss.peek() == ',')
+ ss.ignore();
+ }
+ return dim;
+}
+
+std::vector<int> get_key_dims(const std::string &key, OperationInfo &info)
+{
+ check_valid_key(key, info);
+ return dims(info[key]);
+}
+
+int get_key_int(const std::string &key, OperationInfo &info)
+{
+ check_valid_key(key, info);
+ return std::stoi(info[key]);
+}
+
+std::string get_key_string(const std::string &key, OperationInfo &info)
+{
+ check_valid_key(key, info);
+ return info[key];
+}
+
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_UTILS_H__
diff --git a/tools/kbenchmark/kernels/CMakeLists.txt b/tools/kbenchmark/kernels/CMakeLists.txt
new file mode 100644
index 000000000..5ea6cdadd
--- /dev/null
+++ b/tools/kbenchmark/kernels/CMakeLists.txt
@@ -0,0 +1 @@
+add_subdirectories()
diff --git a/tools/kbenchmark/kernels/acl_cl/CMakeLists.txt b/tools/kbenchmark/kernels/acl_cl/CMakeLists.txt
new file mode 100644
index 000000000..c727905a6
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_cl/CMakeLists.txt
@@ -0,0 +1,20 @@
+nnfw_find_package(ARMCompute QUIET)
+if(NOT ARMCompute_FOUND)
+ return()
+endif(NOT ARMCompute_FOUND)
+
+function(add_kben_acl_cl_library)
+ cmake_parse_arguments(ARG "" "NAME" "SOURCES" ${ARGN})
+
+ add_library(${ARG_NAME} SHARED ${ARG_SOURCES})
+ target_compile_options(${ARG_NAME} PRIVATE -Wno-psabi)
+ target_include_directories(${ARG_NAME} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/..)
+ target_link_libraries(${ARG_NAME} nonius)
+ target_link_libraries(${ARG_NAME} arm_compute)
+ target_link_libraries(${ARG_NAME} arm_compute_ex)
+ target_link_libraries(${ARG_NAME} pthread)
+ install(TARGETS ${ARG_NAME} DESTINATION lib/kben)
+endfunction(add_kben_acl_cl_library)
+
+add_kben_acl_cl_library(NAME kben_acl_cl_conv SOURCES Convolution.cpp)
+add_kben_acl_cl_library(NAME kben_acl_cl_transpose_conv SOURCES TransposeConv.cpp)
diff --git a/tools/kbenchmark/kernels/acl_cl/Convolution.cpp b/tools/kbenchmark/kernels/acl_cl/Convolution.cpp
new file mode 100644
index 000000000..37d179ac3
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_cl/Convolution.cpp
@@ -0,0 +1,514 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Conv2D benchmark with various algorithms (draft version)
+ */
+
+#include <nonius/nonius.h++>
+
+#include <arm_compute/core/Types.h>
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include <arm_compute/runtime/CL/CLFunctions.h>
+
+#include <cstdint>
+#include <cassert>
+#include <stdexcept>
+
+using namespace arm_compute;
+
+//
+// Helpers
+//
+namespace
+{
+
+enum Layout
+{
+ NCHW,
+ NHWC
+};
+
+struct Initializer
+{
+ Initializer() { CLScheduler::get().default_init(); }
+};
+
+Initializer initializer;
+
+TensorInfo make_info(uint32_t N)
+{
+ TensorShape shape{N};
+ return TensorInfo{shape, 1, DataType::F32};
+}
+
+template <enum Layout> TensorInfo make_info(uint32_t N, uint32_t C, uint32_t H, uint32_t W);
+
+template <> TensorInfo make_info<NCHW>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{W, H, C, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NCHW);
+ return info;
+}
+
+template <> TensorInfo make_info<NHWC>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{C, W, H, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NHWC);
+ return info;
+}
+
+inline void check(const Status &status)
+{
+ if (!status)
+ {
+ std::cerr << status.error_description() << std::endl;
+ throw std::runtime_error{"ERROR"};
+ }
+}
+
+inline bool is_odd(uint32_t n) { return (n % 2 != 0) ? true : false; }
+
+} // namespace
+
+//
+// Benchmark Parameters
+//
+NONIUS_PARAM(BATCH, 1);
+
+NONIUS_PARAM(IFM_C, 3);
+NONIUS_PARAM(IFM_H, 244);
+NONIUS_PARAM(IFM_W, 244);
+
+NONIUS_PARAM(OFM_C, 3);
+NONIUS_PARAM(OFM_H, 244);
+NONIUS_PARAM(OFM_W, 244);
+
+NONIUS_PARAM(KER_H, 3);
+NONIUS_PARAM(KER_W, 3);
+
+NONIUS_PARAM(STRIDE_H, 1);
+NONIUS_PARAM(STRIDE_W, 1);
+
+NONIUS_PARAM(PADDING, std::string{"SAME"})
+NONIUS_PARAM(FUSED_ACT, std::string{"RELU"})
+
+//
+// Configuration Helpers
+//
+namespace
+{
+
+struct Configuration
+{
+ uint32_t ifm_N;
+ uint32_t ifm_C;
+ uint32_t ifm_H;
+ uint32_t ifm_W;
+
+ uint32_t ofm_N;
+ uint32_t ofm_C;
+ uint32_t ofm_H;
+ uint32_t ofm_W;
+
+ uint32_t ker_N;
+ uint32_t ker_C;
+ uint32_t ker_H;
+ uint32_t ker_W;
+
+ uint32_t vertical_stride;
+ uint32_t horizontal_stride;
+
+ std::string padding;
+ std::string fused_act;
+
+ uint32_t top_padding;
+ uint32_t bottom_padding;
+ uint32_t left_padding;
+ uint32_t right_padding;
+
+ Configuration(nonius::chronometer meter)
+ {
+ ifm_N = meter.param<BATCH>();
+ ifm_C = meter.param<IFM_C>();
+ ifm_H = meter.param<IFM_H>();
+ ifm_W = meter.param<IFM_W>();
+
+ ofm_N = meter.param<BATCH>();
+ ofm_C = meter.param<OFM_C>();
+ ofm_H = meter.param<OFM_H>();
+ ofm_W = meter.param<OFM_W>();
+
+ ker_N = meter.param<OFM_C>();
+ ker_C = meter.param<IFM_C>();
+ ker_H = meter.param<KER_H>();
+ ker_W = meter.param<KER_W>();
+
+ vertical_stride = meter.param<STRIDE_H>();
+ horizontal_stride = meter.param<STRIDE_W>();
+
+ padding = meter.param<PADDING>();
+ fused_act = meter.param<FUSED_ACT>();
+
+ assert((ifm_H - ker_H) % vertical_stride == 0);
+ assert((ifm_W - ker_H) % horizontal_stride == 0);
+
+ uint32_t const effective_ofm_H = (ifm_H - ker_H) / vertical_stride + 1;
+ uint32_t const effective_ofm_W = (ifm_W - ker_H) / horizontal_stride + 1;
+
+ assert(ofm_H >= effective_ofm_H);
+ assert(ofm_W >= effective_ofm_W);
+
+ uint32_t const pad_H = ofm_H - effective_ofm_H;
+ uint32_t const pad_W = ofm_W - effective_ofm_W;
+
+ top_padding = pad_H / 2;
+ bottom_padding = pad_H / 2;
+ left_padding = pad_W / 2;
+ right_padding = pad_W / 2;
+
+ if (is_odd(pad_H))
+ top_padding += 1;
+ if (is_odd(pad_W))
+ left_padding += 1;
+ }
+
+ template <Layout L> TensorInfo src_info() const
+ {
+ return make_info<L>(ifm_N, ifm_C, ifm_H, ifm_W);
+ }
+ template <Layout L> TensorInfo dst_info() const
+ {
+ return make_info<L>(ofm_N, ofm_C, ofm_H, ofm_W);
+ }
+ template <Layout L> TensorInfo ker_info() const
+ {
+ return make_info<L>(ker_N, ker_C, ker_H, ker_W);
+ }
+ TensorInfo bias_info(void) const { return make_info(ker_N); }
+
+ PadStrideInfo pad_stride_info(void) const
+ {
+ return PadStrideInfo{horizontal_stride,
+ vertical_stride,
+ left_padding,
+ right_padding,
+ top_padding,
+ bottom_padding,
+ DimensionRoundingType::FLOOR};
+ }
+};
+
+} // namespace
+
+//
+// Benchmark Implementations
+//
+namespace
+{
+
+inline nonius::benchmark_registry &local_benchmark_registry()
+{
+ static nonius::benchmark_registry registry;
+ return registry;
+}
+
+} // namespace
+
+#define NONIUS_LOCAL_BENCHMARK(name, ...) \
+ namespace \
+ { \
+ static ::nonius::benchmark_registrar \
+ NONIUS_DETAIL_UNIQUE_NAME(benchmark_registrar)(local_benchmark_registry(), name, \
+ __VA_ARGS__); \
+ }
+
+NONIUS_LOCAL_BENCHMARK("CLDirectConvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ CLDirectConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLDirectConvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ CLDirectConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLGEMMConvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ CLGEMMConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLGEMMConvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ CLGEMMConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLWinogradConvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ CLWinogradConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLWinogradConvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ CLWinogradConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+ CLTensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) {
+ conv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+extern "C" nonius::benchmark_registry &benchmark_functions(void)
+{
+ return local_benchmark_registry();
+}
diff --git a/tools/kbenchmark/kernels/acl_cl/TransposeConv.cpp b/tools/kbenchmark/kernels/acl_cl/TransposeConv.cpp
new file mode 100644
index 000000000..8278a61d3
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_cl/TransposeConv.cpp
@@ -0,0 +1,393 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file TransposeConv benchmark with various algorithms
+ */
+
+#include <nonius/nonius.h++>
+
+#include <arm_compute/core/Types.h>
+#include <arm_compute/runtime/CL/CLScheduler.h>
+#include <arm_compute/runtime/CL/CLFunctions.h>
+#include <arm_compute/runtime/CL/CLFunctionsEx.h>
+
+#include <cstdint>
+#include <cassert>
+#include <stdexcept>
+
+#include "acl_common/Utils.h"
+
+using namespace arm_compute;
+using namespace kbenchmark::kernels::acl_common;
+
+//
+// Helpers
+//
+namespace
+{
+
+enum Layout
+{
+ NCHW,
+ NHWC
+};
+
+struct Initializer
+{
+ Initializer() { CLScheduler::get().default_init(); }
+};
+
+Initializer initializer;
+
+TensorInfo make_info(uint32_t N)
+{
+ TensorShape shape{N};
+ return TensorInfo{shape, 1, DataType::F32};
+}
+
+template <enum Layout> TensorInfo make_info(uint32_t N, uint32_t C, uint32_t H, uint32_t W);
+
+template <> TensorInfo make_info<NCHW>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{W, H, C, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NCHW);
+ return info;
+}
+
+template <> TensorInfo make_info<NHWC>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{C, W, H, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NHWC);
+ return info;
+}
+
+inline void check(const Status &status)
+{
+ if (!status)
+ {
+ std::cerr << status.error_description() << std::endl;
+ throw std::runtime_error{"ERROR"};
+ }
+}
+
+inline bool is_odd(uint32_t n) { return (n % 2 != 0) ? true : false; }
+
+} // namespace
+
+//
+// Benchmark Parameters
+//
+NONIUS_PARAM(BATCH, 1);
+
+NONIUS_PARAM(IFM_C, 3);
+NONIUS_PARAM(IFM_H, 244);
+NONIUS_PARAM(IFM_W, 244);
+
+NONIUS_PARAM(OFM_C, 3);
+NONIUS_PARAM(OFM_H, 244);
+NONIUS_PARAM(OFM_W, 244);
+
+NONIUS_PARAM(KER_H, 3);
+NONIUS_PARAM(KER_W, 3);
+
+NONIUS_PARAM(STRIDE_H, 1);
+NONIUS_PARAM(STRIDE_W, 1);
+
+NONIUS_PARAM(PADDING, std::string{"SAME"})
+
+//
+// Configuration Helpers
+//
+namespace
+{
+
+struct Configuration
+{
+ uint32_t ifm_N;
+ uint32_t ifm_C;
+ uint32_t ifm_H;
+ uint32_t ifm_W;
+
+ uint32_t ofm_N;
+ uint32_t ofm_C;
+ uint32_t ofm_H;
+ uint32_t ofm_W;
+
+ uint32_t ker_N;
+ uint32_t ker_C;
+ uint32_t ker_H;
+ uint32_t ker_W;
+
+ uint32_t vertical_stride;
+ uint32_t horizontal_stride;
+
+ PadStrideInfo deconv_info;
+
+ uint32_t inner_border_right;
+ uint32_t inner_border_top;
+
+ Configuration(nonius::chronometer meter)
+ {
+ ifm_N = meter.param<BATCH>();
+ ifm_C = meter.param<IFM_C>();
+ ifm_H = meter.param<IFM_H>();
+ ifm_W = meter.param<IFM_W>();
+
+ ofm_N = meter.param<BATCH>();
+ ofm_C = meter.param<OFM_C>();
+ ofm_H = meter.param<OFM_H>();
+ ofm_W = meter.param<OFM_W>();
+
+ ker_N = meter.param<OFM_C>();
+ ker_C = meter.param<IFM_C>();
+ ker_H = meter.param<KER_H>();
+ ker_W = meter.param<KER_W>();
+
+ vertical_stride = meter.param<STRIDE_H>();
+ horizontal_stride = meter.param<STRIDE_W>();
+
+ // NOTE The padding calculation formula of TransposeConv is opposite to Conv.
+ // So the location of ifm and ofm is changed.
+ auto padding_info = calculatePadding(meter.param<PADDING>(), ofm_H, ofm_W, ifm_H, ifm_W,
+ vertical_stride, horizontal_stride, ker_H, ker_W);
+
+ inner_border_right = padding_info.right - padding_info.left;
+ inner_border_top = padding_info.bottom - padding_info.top;
+
+ padding_info.left = padding_info.right;
+ padding_info.top = padding_info.bottom;
+
+ deconv_info = asPadStrideInfo(padding_info, vertical_stride, horizontal_stride);
+ }
+
+ template <Layout L> TensorInfo src_info() const
+ {
+ return make_info<L>(ifm_N, ifm_C, ifm_H, ifm_W);
+ }
+ template <Layout L> TensorInfo dst_info() const
+ {
+ return make_info<L>(ofm_N, ofm_C, ofm_H, ofm_W);
+ }
+ template <Layout L> TensorInfo ker_info() const
+ {
+ return make_info<L>(ker_N, ker_C, ker_H, ker_W);
+ }
+ TensorInfo bias_info(void) const { return make_info(ker_N); }
+};
+
+} // namespace
+
+//
+// Benchmark Implementations
+//
+namespace
+{
+
+inline nonius::benchmark_registry &local_benchmark_registry()
+{
+ static nonius::benchmark_registry registry;
+ return registry;
+}
+
+} // namespace
+
+#define NONIUS_LOCAL_BENCHMARK(name, ...) \
+ namespace \
+ { \
+ static ::nonius::benchmark_registrar \
+ NONIUS_DETAIL_UNIQUE_NAME(benchmark_registrar)(local_benchmark_registry(), name, \
+ __VA_ARGS__); \
+ }
+
+NONIUS_LOCAL_BENCHMARK("CLDeconvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ CLDeconvolutionLayer deconv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+
+ try
+ {
+ check(deconv.validate(src_tensor.info(), ker_tensor.info(), nullptr, dst_tensor.info(),
+ p.deconv_info, p.inner_border_right, p.inner_border_top));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ deconv.configure(&src_tensor, &ker_tensor, nullptr, &dst_tensor, p.deconv_info,
+ p.inner_border_right, p.inner_border_top);
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ deconv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLDeconvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ CLDeconvolutionLayer deconv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+
+ try
+ {
+ check(deconv.validate(src_tensor.info(), ker_tensor.info(), nullptr, dst_tensor.info(),
+ p.deconv_info, p.inner_border_right, p.inner_border_top));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ deconv.configure(&src_tensor, &ker_tensor, nullptr, &dst_tensor, p.deconv_info,
+ p.inner_border_right, p.inner_border_top);
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ deconv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLDeconvolutionLayerEx_NCHW", [](nonius::chronometer meter) {
+ CLDeconvolutionLayerEx deconv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+
+ try
+ {
+ check(deconv.validate(src_tensor.info(), ker_tensor.info(), nullptr, dst_tensor.info(),
+ p.deconv_info, p.inner_border_right, p.inner_border_top));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ deconv.configure(&src_tensor, &ker_tensor, nullptr, &dst_tensor, p.deconv_info,
+ p.inner_border_right, p.inner_border_top);
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ deconv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+NONIUS_LOCAL_BENCHMARK("CLDeconvolutionLayerEx_NHWC", [](nonius::chronometer meter) {
+ CLDeconvolutionLayerEx deconv;
+
+ // Configure
+ Configuration p{meter};
+
+ CLTensor src_tensor{};
+ CLTensor dst_tensor{};
+ CLTensor ker_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+
+ try
+ {
+ check(deconv.validate(src_tensor.info(), ker_tensor.info(), nullptr, dst_tensor.info(),
+ p.deconv_info, p.inner_border_right, p.inner_border_top));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ deconv.configure(&src_tensor, &ker_tensor, nullptr, &dst_tensor, p.deconv_info,
+ p.inner_border_right, p.inner_border_top);
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) {
+ deconv.run();
+ CLScheduler::get().sync();
+ });
+})
+
+extern "C" nonius::benchmark_registry &benchmark_functions(void)
+{
+ return local_benchmark_registry();
+}
diff --git a/tools/kbenchmark/kernels/acl_common/Utils.h b/tools/kbenchmark/kernels/acl_common/Utils.h
new file mode 100644
index 000000000..2f3648d9b
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_common/Utils.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_KERNELS_ACL_COMMON_UTILS_H__
+#define __KBENCHMARK_KERNELS_ACL_COMMON_UTILS_H__
+
+#include <algorithm>
+
+using namespace arm_compute;
+
+namespace kbenchmark
+{
+namespace kernels
+{
+namespace acl_common
+{
+
+struct PaddingInfo
+{
+ uint32_t top;
+ uint32_t bottom;
+ uint32_t left;
+ uint32_t right;
+};
+
+PaddingInfo calculatePadding(const std::string &padding_name, const uint32_t ifm_H,
+ const uint32_t ifm_W, const uint32_t ofm_H, const uint32_t ofm_W,
+ const uint32_t vertical_stride, const uint32_t horizontal_stride,
+ const uint32_t ker_H, const uint32_t ker_W)
+{
+ uint32_t top;
+ uint32_t bottom;
+ uint32_t left;
+ uint32_t right;
+
+ if (padding_name == "VALID")
+ {
+ top = bottom = left = right = 0;
+ }
+ else if (padding_name == "SAME")
+ {
+ const int32_t vertical_needed_input = (ofm_H - 1) * vertical_stride + ker_H;
+ const int32_t vertical_total_padding = std::max(0, vertical_needed_input - (int32_t)ifm_H);
+
+ const int32_t horizontal_needed_input = (ofm_W - 1) * horizontal_stride + ker_W;
+ const int32_t horizontal_total_padding = std::max(0, horizontal_needed_input - (int32_t)ifm_W);
+
+ top = vertical_total_padding / 2;
+ bottom = (vertical_total_padding + 1) / 2;
+ left = horizontal_total_padding / 2;
+ right = (horizontal_total_padding + 1) / 2;
+ }
+
+ return PaddingInfo{top, bottom, left, right};
+}
+
+PadStrideInfo asPadStrideInfo(const PaddingInfo &padding, uint32_t vertical_stride,
+ uint32_t horizontal_stride)
+{
+ return PadStrideInfo{horizontal_stride,
+ vertical_stride,
+ padding.left,
+ padding.right,
+ padding.top,
+ padding.bottom,
+ DimensionRoundingType::FLOOR};
+}
+
+ActivationLayerInfo asActivationLayerInfo(const std::string &act_name)
+{
+ if (act_name == "NONE")
+ {
+ return ActivationLayerInfo{};
+ }
+ else if (act_name == "RELU")
+ {
+ return ActivationLayerInfo{ActivationLayerInfo::ActivationFunction::RELU};
+ }
+ else
+ {
+ throw std::runtime_error{"Not support activation layer info"};
+ }
+}
+
+} // namespace acl_common
+} // namespace kernels
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_KERNELS_ACL_COMMON_UTILS_H__
diff --git a/tools/kbenchmark/kernels/acl_neon/CMakeLists.txt b/tools/kbenchmark/kernels/acl_neon/CMakeLists.txt
new file mode 100644
index 000000000..54afdab09
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_neon/CMakeLists.txt
@@ -0,0 +1,19 @@
+nnfw_find_package(ARMCompute QUIET)
+if(NOT ARMCompute_FOUND)
+ return()
+endif(NOT ARMCompute_FOUND)
+
+function(add_kben_acl_neon_library)
+ cmake_parse_arguments(ARG "" "NAME" "SOURCES" ${ARGN})
+
+ add_library(${ARG_NAME} SHARED ${ARG_SOURCES})
+ target_compile_options(${ARG_NAME} PRIVATE -Wno-psabi)
+ target_include_directories(${ARG_NAME} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/..)
+ target_link_libraries(${ARG_NAME} nonius)
+ target_link_libraries(${ARG_NAME} arm_compute)
+ target_link_libraries(${ARG_NAME} pthread)
+ install(TARGETS ${ARG_NAME} DESTINATION lib/kben)
+endfunction(add_kben_acl_neon_library)
+
+add_kben_acl_neon_library(NAME kben_acl_neon_conv SOURCES Convolution.cpp)
+add_kben_acl_neon_library(NAME kben_acl_neon_transpose_conv SOURCES TransposeConv.cpp)
diff --git a/tools/kbenchmark/kernels/acl_neon/Convolution.cpp b/tools/kbenchmark/kernels/acl_neon/Convolution.cpp
new file mode 100644
index 000000000..2d19cb21e
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_neon/Convolution.cpp
@@ -0,0 +1,489 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file Conv2D benchmark with various algorithms (draft version)
+ */
+
+#include <nonius/nonius.h++>
+
+#include <arm_compute/core/Types.h>
+#include <arm_compute/runtime/NEON/NEScheduler.h>
+#include <arm_compute/runtime/NEON/NEFunctions.h>
+
+#include <cstdint>
+#include <cassert>
+#include <stdexcept>
+
+using namespace arm_compute;
+
+//
+// Helpers
+//
+namespace
+{
+
+enum Layout
+{
+ NCHW,
+ NHWC
+};
+
+TensorInfo make_info(uint32_t N)
+{
+ TensorShape shape{N};
+ return TensorInfo{shape, 1, DataType::F32};
+}
+
+template <enum Layout> TensorInfo make_info(uint32_t N, uint32_t C, uint32_t H, uint32_t W);
+
+template <> TensorInfo make_info<NCHW>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{W, H, C, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NCHW);
+ return info;
+}
+
+template <> TensorInfo make_info<NHWC>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{C, W, H, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NHWC);
+ return info;
+}
+
+inline void check(const Status &status)
+{
+ if (!status)
+ {
+ std::cerr << status.error_description() << std::endl;
+ throw std::runtime_error{"ERROR"};
+ }
+}
+
+inline bool is_odd(uint32_t n) { return (n % 2 != 0) ? true : false; }
+
+} // namespace
+
+//
+// Benchmark Parameters
+//
+NONIUS_PARAM(BATCH, 1);
+
+NONIUS_PARAM(IFM_C, 3);
+NONIUS_PARAM(IFM_H, 244);
+NONIUS_PARAM(IFM_W, 244);
+
+NONIUS_PARAM(OFM_C, 3);
+NONIUS_PARAM(OFM_H, 244);
+NONIUS_PARAM(OFM_W, 244);
+
+NONIUS_PARAM(KER_H, 3);
+NONIUS_PARAM(KER_W, 3);
+
+NONIUS_PARAM(STRIDE_H, 1);
+NONIUS_PARAM(STRIDE_W, 1);
+
+NONIUS_PARAM(PADDING, std::string{"SAME"})
+NONIUS_PARAM(FUSED_ACT, std::string{"RELU"})
+
+//
+// Configuration Helpers
+//
+namespace
+{
+
+struct Configuration
+{
+ uint32_t ifm_N;
+ uint32_t ifm_C;
+ uint32_t ifm_H;
+ uint32_t ifm_W;
+
+ uint32_t ofm_N;
+ uint32_t ofm_C;
+ uint32_t ofm_H;
+ uint32_t ofm_W;
+
+ uint32_t ker_N;
+ uint32_t ker_C;
+ uint32_t ker_H;
+ uint32_t ker_W;
+
+ uint32_t vertical_stride;
+ uint32_t horizontal_stride;
+
+ std::string padding;
+ std::string fused_act;
+
+ uint32_t top_padding;
+ uint32_t bottom_padding;
+ uint32_t left_padding;
+ uint32_t right_padding;
+
+ Configuration(nonius::chronometer meter)
+ {
+ ifm_N = meter.param<BATCH>();
+ ifm_C = meter.param<IFM_C>();
+ ifm_H = meter.param<IFM_H>();
+ ifm_W = meter.param<IFM_W>();
+
+ ofm_N = meter.param<BATCH>();
+ ofm_C = meter.param<OFM_C>();
+ ofm_H = meter.param<OFM_H>();
+ ofm_W = meter.param<OFM_W>();
+
+ ker_N = meter.param<OFM_C>();
+ ker_C = meter.param<IFM_C>();
+ ker_H = meter.param<KER_H>();
+ ker_W = meter.param<KER_W>();
+
+ vertical_stride = meter.param<STRIDE_H>();
+ horizontal_stride = meter.param<STRIDE_W>();
+
+ padding = meter.param<PADDING>();
+ fused_act = meter.param<FUSED_ACT>();
+
+ assert((ifm_H - ker_H) % vertical_stride == 0);
+ assert((ifm_W - ker_H) % horizontal_stride == 0);
+
+ uint32_t const effective_ofm_H = (ifm_H - ker_H) / vertical_stride + 1;
+ uint32_t const effective_ofm_W = (ifm_W - ker_H) / horizontal_stride + 1;
+
+ assert(ofm_H >= effective_ofm_H);
+ assert(ofm_W >= effective_ofm_W);
+
+ uint32_t const pad_H = ofm_H - effective_ofm_H;
+ uint32_t const pad_W = ofm_W - effective_ofm_W;
+
+ top_padding = pad_H / 2;
+ bottom_padding = pad_H / 2;
+ left_padding = pad_W / 2;
+ right_padding = pad_W / 2;
+
+ if (is_odd(pad_H))
+ top_padding += 1;
+ if (is_odd(pad_W))
+ left_padding += 1;
+ }
+
+ template <Layout L> TensorInfo src_info() const
+ {
+ return make_info<L>(ifm_N, ifm_C, ifm_H, ifm_W);
+ }
+ template <Layout L> TensorInfo dst_info() const
+ {
+ return make_info<L>(ofm_N, ofm_C, ofm_H, ofm_W);
+ }
+ template <Layout L> TensorInfo ker_info() const
+ {
+ return make_info<L>(ker_N, ker_C, ker_H, ker_W);
+ }
+ TensorInfo bias_info(void) const { return make_info(ker_N); }
+
+ PadStrideInfo pad_stride_info(void) const
+ {
+ return PadStrideInfo{horizontal_stride,
+ vertical_stride,
+ left_padding,
+ right_padding,
+ top_padding,
+ bottom_padding,
+ DimensionRoundingType::FLOOR};
+ }
+};
+
+} // namespace
+
+//
+// Benchmark Implementations
+//
+namespace
+{
+
+inline nonius::benchmark_registry &local_benchmark_registry()
+{
+ static nonius::benchmark_registry registry;
+ return registry;
+}
+
+} // namespace
+
+#define NONIUS_LOCAL_BENCHMARK(name, ...) \
+ namespace \
+ { \
+ static ::nonius::benchmark_registrar \
+ NONIUS_DETAIL_UNIQUE_NAME(benchmark_registrar)(local_benchmark_registry(), name, \
+ __VA_ARGS__); \
+ }
+
+NONIUS_LOCAL_BENCHMARK("NEDirectConvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ NEDirectConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+ Tensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) { conv.run(); });
+})
+
+NONIUS_LOCAL_BENCHMARK("NEDirectConvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ NEDirectConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+ Tensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) { conv.run(); });
+})
+
+NONIUS_LOCAL_BENCHMARK("NEGEMMConvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ NEGEMMConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+ Tensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) { conv.run(); });
+})
+
+NONIUS_LOCAL_BENCHMARK("NEGEMMConvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ NEGEMMConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+ Tensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) { conv.run(); });
+})
+
+NONIUS_LOCAL_BENCHMARK("NEWinogradConvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ NEWinogradConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+ Tensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) { conv.run(); });
+})
+
+NONIUS_LOCAL_BENCHMARK("NEWinogradConvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ NEWinogradConvolutionLayer conv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+ Tensor bias_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+ bias_tensor.allocator()->init(p.bias_info());
+
+ try
+ {
+ check(conv.validate(src_tensor.info(), ker_tensor.info(), bias_tensor.info(), dst_tensor.info(),
+ p.pad_stride_info()));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ conv.configure(&src_tensor, &ker_tensor, &bias_tensor, &dst_tensor, p.pad_stride_info());
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ bias_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run
+ meter.measure([&](int) { conv.run(); });
+})
+
+extern "C" nonius::benchmark_registry &benchmark_functions(void)
+{
+ return local_benchmark_registry();
+}
diff --git a/tools/kbenchmark/kernels/acl_neon/TransposeConv.cpp b/tools/kbenchmark/kernels/acl_neon/TransposeConv.cpp
new file mode 100644
index 000000000..0878499e4
--- /dev/null
+++ b/tools/kbenchmark/kernels/acl_neon/TransposeConv.cpp
@@ -0,0 +1,293 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file TransposeConv benchmark with various algorithms
+ */
+
+#include <nonius/nonius.h++>
+
+#include <arm_compute/core/Types.h>
+#include <arm_compute/runtime/NEON/NEScheduler.h>
+#include <arm_compute/runtime/NEON/NEFunctions.h>
+
+#include <cstdint>
+#include <cassert>
+#include <stdexcept>
+
+#include "acl_common/Utils.h"
+
+using namespace arm_compute;
+using namespace kbenchmark::kernels::acl_common;
+
+//
+// Helpers
+//
+namespace
+{
+
+enum Layout
+{
+ NCHW,
+ NHWC
+};
+
+TensorInfo make_info(uint32_t N)
+{
+ TensorShape shape{N};
+ return TensorInfo{shape, 1, DataType::F32};
+}
+
+template <enum Layout> TensorInfo make_info(uint32_t N, uint32_t C, uint32_t H, uint32_t W);
+
+template <> TensorInfo make_info<NCHW>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{W, H, C, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NCHW);
+ return info;
+}
+
+template <> TensorInfo make_info<NHWC>(uint32_t N, uint32_t C, uint32_t H, uint32_t W)
+{
+ TensorShape shape{C, W, H, N};
+ TensorInfo info{shape, 1, DataType::F32};
+ info.set_data_layout(DataLayout::NHWC);
+ return info;
+}
+
+inline void check(const Status &status)
+{
+ if (!status)
+ {
+ std::cerr << status.error_description() << std::endl;
+ throw std::runtime_error{"ERROR"};
+ }
+}
+
+inline bool is_odd(uint32_t n) { return (n % 2 != 0) ? true : false; }
+
+} // namespace
+
+//
+// Benchmark Parameters
+//
+NONIUS_PARAM(BATCH, 1);
+
+NONIUS_PARAM(IFM_C, 3);
+NONIUS_PARAM(IFM_H, 244);
+NONIUS_PARAM(IFM_W, 244);
+
+NONIUS_PARAM(OFM_C, 3);
+NONIUS_PARAM(OFM_H, 244);
+NONIUS_PARAM(OFM_W, 244);
+
+NONIUS_PARAM(KER_H, 3);
+NONIUS_PARAM(KER_W, 3);
+
+NONIUS_PARAM(STRIDE_H, 1);
+NONIUS_PARAM(STRIDE_W, 1);
+
+NONIUS_PARAM(PADDING, std::string{"SAME"})
+
+//
+// Configuration Helpers
+//
+namespace
+{
+
+struct Configuration
+{
+ uint32_t ifm_N;
+ uint32_t ifm_C;
+ uint32_t ifm_H;
+ uint32_t ifm_W;
+
+ uint32_t ofm_N;
+ uint32_t ofm_C;
+ uint32_t ofm_H;
+ uint32_t ofm_W;
+
+ uint32_t ker_N;
+ uint32_t ker_C;
+ uint32_t ker_H;
+ uint32_t ker_W;
+
+ uint32_t vertical_stride;
+ uint32_t horizontal_stride;
+
+ PadStrideInfo deconv_info;
+
+ uint32_t inner_border_right;
+ uint32_t inner_border_top;
+
+ Configuration(nonius::chronometer meter)
+ {
+ ifm_N = meter.param<BATCH>();
+ ifm_C = meter.param<IFM_C>();
+ ifm_H = meter.param<IFM_H>();
+ ifm_W = meter.param<IFM_W>();
+
+ ofm_N = meter.param<BATCH>();
+ ofm_C = meter.param<OFM_C>();
+ ofm_H = meter.param<OFM_H>();
+ ofm_W = meter.param<OFM_W>();
+
+ ker_N = meter.param<OFM_C>();
+ ker_C = meter.param<IFM_C>();
+ ker_H = meter.param<KER_H>();
+ ker_W = meter.param<KER_W>();
+
+ vertical_stride = meter.param<STRIDE_H>();
+ horizontal_stride = meter.param<STRIDE_W>();
+
+ // NOTE The padding calculation formula of TransposeConv is opposite to Conv.
+ // So the location of ifm and ofm is changed.
+ auto padding_info = calculatePadding(meter.param<PADDING>(), ofm_H, ofm_W, ifm_H, ifm_W,
+ vertical_stride, horizontal_stride, ker_H, ker_W);
+
+ inner_border_right = padding_info.right - padding_info.left;
+ inner_border_top = padding_info.bottom - padding_info.top;
+
+ padding_info.left = padding_info.right;
+ padding_info.top = padding_info.bottom;
+
+ deconv_info = asPadStrideInfo(padding_info, vertical_stride, horizontal_stride);
+ }
+
+ template <Layout L> TensorInfo src_info() const
+ {
+ return make_info<L>(ifm_N, ifm_C, ifm_H, ifm_W);
+ }
+ template <Layout L> TensorInfo dst_info() const
+ {
+ return make_info<L>(ofm_N, ofm_C, ofm_H, ofm_W);
+ }
+ template <Layout L> TensorInfo ker_info() const
+ {
+ return make_info<L>(ker_N, ker_C, ker_H, ker_W);
+ }
+ TensorInfo bias_info(void) const { return make_info(ker_N); }
+};
+
+} // namespace
+
+//
+// Benchmark Implementations
+//
+namespace
+{
+
+inline nonius::benchmark_registry &local_benchmark_registry()
+{
+ static nonius::benchmark_registry registry;
+ return registry;
+}
+
+} // namespace
+
+#define NONIUS_LOCAL_BENCHMARK(name, ...) \
+ namespace \
+ { \
+ static ::nonius::benchmark_registrar \
+ NONIUS_DETAIL_UNIQUE_NAME(benchmark_registrar)(local_benchmark_registry(), name, \
+ __VA_ARGS__); \
+ }
+
+NONIUS_LOCAL_BENCHMARK("NEDeconvolutionLayer_NCHW", [](nonius::chronometer meter) {
+ NEDeconvolutionLayer deconv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NCHW>());
+ dst_tensor.allocator()->init(p.dst_info<NCHW>());
+ ker_tensor.allocator()->init(p.ker_info<NCHW>());
+
+ try
+ {
+ check(deconv.validate(src_tensor.info(), ker_tensor.info(), nullptr, dst_tensor.info(),
+ p.deconv_info, p.inner_border_right, p.inner_border_top));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ deconv.configure(&src_tensor, &ker_tensor, nullptr, &dst_tensor, p.deconv_info,
+ p.inner_border_right, p.inner_border_top);
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) { deconv.run(); });
+})
+
+NONIUS_LOCAL_BENCHMARK("NEDeconvolutionLayer_NHWC", [](nonius::chronometer meter) {
+ NEDeconvolutionLayer deconv;
+
+ // Configure
+ Configuration p{meter};
+
+ Tensor src_tensor{};
+ Tensor dst_tensor{};
+ Tensor ker_tensor{};
+
+ src_tensor.allocator()->init(p.src_info<NHWC>());
+ dst_tensor.allocator()->init(p.dst_info<NHWC>());
+ ker_tensor.allocator()->init(p.ker_info<NHWC>());
+
+ try
+ {
+ check(deconv.validate(src_tensor.info(), ker_tensor.info(), nullptr, dst_tensor.info(),
+ p.deconv_info, p.inner_border_right, p.inner_border_top));
+ }
+ catch (...)
+ {
+ meter.measure([&](int) {
+ // DO NOTHING
+ volatile int x = 0;
+ return x;
+ });
+ return;
+ }
+
+ deconv.configure(&src_tensor, &ker_tensor, nullptr, &dst_tensor, p.deconv_info,
+ p.inner_border_right, p.inner_border_top);
+
+ src_tensor.allocator()->allocate();
+ ker_tensor.allocator()->allocate();
+ dst_tensor.allocator()->allocate();
+
+ // Run!
+ meter.measure([&](int) { deconv.run(); });
+})
+
+extern "C" nonius::benchmark_registry &benchmark_functions(void)
+{
+ return local_benchmark_registry();
+}
diff --git a/tools/kbenchmark/operations/Convolution.h b/tools/kbenchmark/operations/Convolution.h
new file mode 100644
index 000000000..a3dc4166c
--- /dev/null
+++ b/tools/kbenchmark/operations/Convolution.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_OPERATIONS_CONVOLUTION_H__
+#define __KBENCHMARK_OPERATIONS_CONVOLUTION_H__
+
+#include "Operation.h"
+#include "Utils.h"
+
+namespace kbenchmark
+{
+namespace operation
+{
+
+class Convolution final : public Operation
+{
+public:
+ Convolution() = default;
+
+ nonius::parameters params(int layer_num, OperationInfo &info) override
+ {
+ nonius::parameters params;
+
+ params.insert({"LAYER", nonius::param{layer_num}});
+
+ params.insert({"BATCH", nonius::param{1}});
+
+ auto _input = get_key_dims({"input"}, info);
+ params.insert({"IFM_C", nonius::param{_input[3]}});
+ params.insert({"IFM_H", nonius::param{_input[1]}});
+ params.insert({"IFM_W", nonius::param{_input[2]}});
+
+ auto _output0 = get_key_dims({"output0"}, info);
+ params.insert({"OFM_C", nonius::param{_output0[3]}});
+ params.insert({"OFM_H", nonius::param{_output0[1]}});
+ params.insert({"OFM_W", nonius::param{_output0[2]}});
+
+ auto _weights = get_key_dims({"weights"}, info);
+ params.insert({"KER_H", nonius::param{_weights[1]}});
+ params.insert({"KER_W", nonius::param{_weights[2]}});
+
+ auto _stride_h = get_key_int({"stride_h"}, info);
+ auto _stride_w = get_key_int({"stride_w"}, info);
+ params.insert({"STRIDE_H", nonius::param{_stride_h}});
+ params.insert({"STRIDE_W", nonius::param{_stride_w}});
+
+ auto _pad = get_key_string({"padding"}, info);
+ params.insert({"PADDING", nonius::param{_pad}});
+
+ auto _act = get_key_string({"fused_act"}, info);
+ params.insert({"FUSED_ACT", nonius::param{_act}});
+
+ return params;
+ }
+};
+
+} // namespace operation
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_OPERATIONS_CONVOLUTION_H__
diff --git a/tools/kbenchmark/operations/TransposeConv.h b/tools/kbenchmark/operations/TransposeConv.h
new file mode 100644
index 000000000..69ab307bd
--- /dev/null
+++ b/tools/kbenchmark/operations/TransposeConv.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __KBENCHMARK_OPERATIONS_TRANSPOSE_CONV_H__
+#define __KBENCHMARK_OPERATIONS_TRANSPOSE_CONV_H__
+
+#include "Operation.h"
+#include "Utils.h"
+
+namespace kbenchmark
+{
+namespace operation
+{
+
+class TransposeConv final : public Operation
+{
+public:
+ TransposeConv() = default;
+
+ nonius::parameters params(int layer_num, OperationInfo &info) override
+ {
+ nonius::parameters params;
+
+ params.insert({"LAYER", nonius::param{layer_num}});
+
+ params.insert({"BATCH", nonius::param{1}});
+
+ // TODO The output_shape will be used in Dynamic tensor case
+ auto _output_shape = get_key_dims({"input0"}, info);
+
+ auto _weights = get_key_dims({"input1"}, info);
+ params.insert({"KER_H", nonius::param{_weights[1]}});
+ params.insert({"KER_W", nonius::param{_weights[2]}});
+
+ auto _input = get_key_dims({"input2"}, info);
+ params.insert({"IFM_H", nonius::param{_input[1]}});
+ params.insert({"IFM_W", nonius::param{_input[2]}});
+ params.insert({"IFM_C", nonius::param{_input[3]}});
+
+ auto _output = get_key_dims({"output0"}, info);
+ params.insert({"OFM_H", nonius::param{_output[1]}});
+ params.insert({"OFM_W", nonius::param{_output[2]}});
+ params.insert({"OFM_C", nonius::param{_output[3]}});
+
+ auto _stride_h = get_key_int({"stride_h"}, info);
+ auto _stride_w = get_key_int({"stride_w"}, info);
+ params.insert({"STRIDE_H", nonius::param{_stride_h}});
+ params.insert({"STRIDE_W", nonius::param{_stride_w}});
+
+ auto _pad = get_key_string({"padding"}, info);
+ params.insert({"PADDING", nonius::param{_pad}});
+
+ return params;
+ }
+};
+
+} // namespace operation
+} // namespace kbenchmark
+
+#endif // __KBENCHMARK_OPERATIONS_TRANSPOSE_CONV_H__
diff --git a/tools/lcov-to-cobertura-xml/.FORMATDENY b/tools/lcov-to-cobertura-xml/.FORMATDENY
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tools/lcov-to-cobertura-xml/.FORMATDENY
diff --git a/externals/lcov-to-cobertura-xml/README.md b/tools/lcov-to-cobertura-xml/README.md
index 2fc4cff42..2fc4cff42 100644
--- a/externals/lcov-to-cobertura-xml/README.md
+++ b/tools/lcov-to-cobertura-xml/README.md
diff --git a/externals/lcov-to-cobertura-xml/lcov_cobertura.py b/tools/lcov-to-cobertura-xml/lcov_cobertura.py
index 7aae6d115..7aae6d115 100755..100644
--- a/externals/lcov-to-cobertura-xml/lcov_cobertura.py
+++ b/tools/lcov-to-cobertura-xml/lcov_cobertura.py
diff --git a/tools/nnapi_quickcheck/lib/env.cpp b/tools/nnapi_quickcheck/lib/env.cpp
index 758516752..005e876c2 100644
--- a/tools/nnapi_quickcheck/lib/env.cpp
+++ b/tools/nnapi_quickcheck/lib/env.cpp
@@ -16,14 +16,14 @@
#include "env.h"
-#include "util/environment.h"
+#include "misc/environment.h"
//
// Integer variable
//
IntVar::IntVar(const std::string &name, int32_t value) : _value{value}
{
- nnfw::util::env::IntAccessor{name}.access(_value);
+ nnfw::misc::env::IntAccessor{name}.access(_value);
}
//
@@ -31,7 +31,7 @@ IntVar::IntVar(const std::string &name, int32_t value) : _value{value}
//
FloatVar::FloatVar(const std::string &name, float value) : _value{value}
{
- nnfw::util::env::FloatAccessor{name}.access(_value);
+ nnfw::misc::env::FloatAccessor{name}.access(_value);
}
//
diff --git a/tools/nnapi_quickcheck/tests/add_1.cpp b/tools/nnapi_quickcheck/tests/add_1.cpp
index 098b8591b..f5363f918 100644
--- a/tools/nnapi_quickcheck/tests/add_1.cpp
+++ b/tools/nnapi_quickcheck/tests/add_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_1.lst"
@@ -146,7 +146,7 @@ TEST(NNAPI_Quickcheck_add_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_2.cpp b/tools/nnapi_quickcheck/tests/add_2.cpp
index 12851f1b0..fe4d12f5d 100644
--- a/tools/nnapi_quickcheck/tests/add_2.cpp
+++ b/tools/nnapi_quickcheck/tests/add_2.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_2.lst"
@@ -164,7 +164,7 @@ TEST(NNAPI_Quickcheck_add_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_3.cpp b/tools/nnapi_quickcheck/tests/add_3.cpp
index a7b2f436e..ce409ccbc 100644
--- a/tools/nnapi_quickcheck/tests/add_3.cpp
+++ b/tools/nnapi_quickcheck/tests/add_3.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/TensorShapeUtils.h"
@@ -36,14 +36,14 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_3, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -52,9 +52,9 @@ TEST(NNAPI_Quickcheck_add_3, simple_test)
#include "add_3.lst"
#undef STR_VALUE
- const auto LHS_SHAPE = nnfw::util::tensor::Shape::from(LHS_SHAPE_Value());
- const auto RHS_SHAPE = nnfw::util::tensor::Shape::from(RHS_SHAPE_Value());
- const auto OUT_SHAPE = nnfw::support::tflite::broadcast(LHS_SHAPE, RHS_SHAPE);
+ const auto LHS_SHAPE = nnfw::misc::tensor::Shape::from(LHS_SHAPE_Value());
+ const auto RHS_SHAPE = nnfw::misc::tensor::Shape::from(RHS_SHAPE_Value());
+ const auto OUT_SHAPE = nnfw::tflite::broadcast(LHS_SHAPE, RHS_SHAPE);
std::cout << "Configurations:" << std::endl;
#define PRINT_NEWLINE() \
@@ -75,7 +75,7 @@ TEST(NNAPI_Quickcheck_add_3, simple_test)
#undef PRINT_NEWLINE
auto setup = [&](Interpreter &interp) {
- using nnfw::support::tflite::as_dims;
+ using nnfw::tflite::as_dims;
// Comment from 'context.h'
//
@@ -121,15 +121,15 @@ TEST(NNAPI_Quickcheck_add_3, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
param.verbose = 0;
param.tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(param.verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(param.tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(param.verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(param.tolerance);
int res = RandomTestRunner{SEED, param}.run(builder);
diff --git a/tools/nnapi_quickcheck/tests/add_4.cpp b/tools/nnapi_quickcheck/tests/add_4.cpp
index 217438fb1..b1231dd3f 100644
--- a/tools/nnapi_quickcheck/tests/add_4.cpp
+++ b/tools/nnapi_quickcheck/tests/add_4.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_4, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_4.lst"
@@ -146,7 +146,7 @@ TEST(NNAPI_Quickcheck_add_4, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_5.cpp b/tools/nnapi_quickcheck/tests/add_5.cpp
index 2f02c8292..f900153c1 100644
--- a/tools/nnapi_quickcheck/tests/add_5.cpp
+++ b/tools/nnapi_quickcheck/tests/add_5.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_5, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_5.lst"
@@ -139,7 +139,7 @@ TEST(NNAPI_Quickcheck_add_5, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_6.cpp b/tools/nnapi_quickcheck/tests/add_6.cpp
index 3af2c4af3..83b87ef32 100644
--- a/tools/nnapi_quickcheck/tests/add_6.cpp
+++ b/tools/nnapi_quickcheck/tests/add_6.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_6, simple_test)
{
int verbose = 1;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_6.lst"
@@ -131,7 +131,7 @@ TEST(NNAPI_Quickcheck_add_6, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_7.cpp b/tools/nnapi_quickcheck/tests/add_7.cpp
index 1ebf18e32..732320f4a 100644
--- a/tools/nnapi_quickcheck/tests/add_7.cpp
+++ b/tools/nnapi_quickcheck/tests/add_7.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_7, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_7.lst"
@@ -139,7 +139,7 @@ TEST(NNAPI_Quickcheck_add_7, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_8.cpp b/tools/nnapi_quickcheck/tests/add_8.cpp
index d4a5e4838..d89e977d5 100644
--- a/tools/nnapi_quickcheck/tests/add_8.cpp
+++ b/tools/nnapi_quickcheck/tests/add_8.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_8, simple_test)
{
int verbose = 1;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_8.lst"
@@ -177,7 +177,7 @@ TEST(NNAPI_Quickcheck_add_8, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_9.cpp b/tools/nnapi_quickcheck/tests/add_9.cpp
index 6c0cc2240..fd4e1f9c1 100644
--- a/tools/nnapi_quickcheck/tests/add_9.cpp
+++ b/tools/nnapi_quickcheck/tests/add_9.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_9, simple_test)
{
int verbose = 1;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_9.lst"
@@ -174,7 +174,7 @@ TEST(NNAPI_Quickcheck_add_9, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/add_quan_1.cpp b/tools/nnapi_quickcheck/tests/add_quan_1.cpp
index daf15e091..e3d85122f 100644
--- a/tools/nnapi_quickcheck/tests/add_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/add_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_add_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "add_quan_1.lst"
@@ -149,7 +149,7 @@ TEST(NNAPI_Quickcheck_add_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/avg_pool_1.cpp b/tools/nnapi_quickcheck/tests/avg_pool_1.cpp
index e68cfc5b3..052c68915 100644
--- a/tools/nnapi_quickcheck/tests/avg_pool_1.cpp
+++ b/tools/nnapi_quickcheck/tests/avg_pool_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_avg_pool_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "avg_pool_1.lst"
@@ -137,7 +137,7 @@ TEST(NNAPI_Quickcheck_avg_pool_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/avg_pool_quan_1.cpp b/tools/nnapi_quickcheck/tests/avg_pool_quan_1.cpp
index b24733ab0..86f35f76d 100644
--- a/tools/nnapi_quickcheck/tests/avg_pool_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/avg_pool_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_avg_pool_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "avg_pool_quan_1.lst"
@@ -136,7 +136,7 @@ TEST(NNAPI_Quickcheck_avg_pool_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/cast_1.cpp b/tools/nnapi_quickcheck/tests/cast_1.cpp
index bf493d091..788cd575f 100644
--- a/tools/nnapi_quickcheck/tests/cast_1.cpp
+++ b/tools/nnapi_quickcheck/tests/cast_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -34,20 +34,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_cast_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "cast_1.lst"
@@ -123,7 +123,7 @@ TEST(NNAPI_Quickcheck_cast_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/cast_2.cpp b/tools/nnapi_quickcheck/tests/cast_2.cpp
index d85781d0c..a9e99ee2b 100644
--- a/tools/nnapi_quickcheck/tests/cast_2.cpp
+++ b/tools/nnapi_quickcheck/tests/cast_2.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_cast_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "cast_2.lst"
@@ -121,7 +121,7 @@ TEST(NNAPI_Quickcheck_cast_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/cast_q_to_f_1.cpp b/tools/nnapi_quickcheck/tests/cast_q_to_f_1.cpp
index ede763701..4af6c772f 100644
--- a/tools/nnapi_quickcheck/tests/cast_q_to_f_1.cpp
+++ b/tools/nnapi_quickcheck/tests/cast_q_to_f_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -34,20 +34,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_cast_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "cast_q_to_f_1.lst"
@@ -123,7 +123,7 @@ TEST(NNAPI_Quickcheck_cast_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/concat_1.cpp b/tools/nnapi_quickcheck/tests/concat_1.cpp
index cc14d083a..d2cb1aada 100644
--- a/tools/nnapi_quickcheck/tests/concat_1.cpp
+++ b/tools/nnapi_quickcheck/tests/concat_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_concat_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "concat_1.lst"
@@ -148,7 +148,7 @@ TEST(NNAPI_Quickcheck_concat_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/concat_quan_1.cpp b/tools/nnapi_quickcheck/tests/concat_quan_1.cpp
index f804b819b..f861ac857 100644
--- a/tools/nnapi_quickcheck/tests/concat_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/concat_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_concat_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "concat_quan_1.lst"
@@ -150,7 +150,7 @@ TEST(NNAPI_Quickcheck_concat_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/conv_1.cpp b/tools/nnapi_quickcheck/tests/conv_1.cpp
index 57b6abdfd..b5b145ccb 100644
--- a/tools/nnapi_quickcheck/tests/conv_1.cpp
+++ b/tools/nnapi_quickcheck/tests/conv_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_conv_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "conv_1.lst"
@@ -194,7 +194,7 @@ TEST(NNAPI_Quickcheck_conv_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/conv_quan_1.cpp b/tools/nnapi_quickcheck/tests/conv_quan_1.cpp
index 025cccadd..28245477b 100644
--- a/tools/nnapi_quickcheck/tests/conv_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/conv_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_conv_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "conv_quan_1.lst"
@@ -198,7 +198,7 @@ TEST(NNAPI_Quickcheck_conv_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/dconv_1.cpp b/tools/nnapi_quickcheck/tests/dconv_1.cpp
index 21d648331..36ec7a943 100644
--- a/tools/nnapi_quickcheck/tests/dconv_1.cpp
+++ b/tools/nnapi_quickcheck/tests/dconv_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_dconv_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "dconv_1.lst"
@@ -192,7 +192,7 @@ TEST(NNAPI_Quickcheck_dconv_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/dconv_quan_1.cpp b/tools/nnapi_quickcheck/tests/dconv_quan_1.cpp
index 7cb9383e4..8305ad140 100644
--- a/tools/nnapi_quickcheck/tests/dconv_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/dconv_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_dconv_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "dconv_quan_1.lst"
@@ -196,7 +196,7 @@ TEST(NNAPI_Quickcheck_dconv_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/dequantize_1.cpp b/tools/nnapi_quickcheck/tests/dequantize_1.cpp
index 467392dc3..e725fa220 100644
--- a/tools/nnapi_quickcheck/tests/dequantize_1.cpp
+++ b/tools/nnapi_quickcheck/tests/dequantize_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -34,20 +34,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_dequantize_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "dequantize_1.lst"
@@ -123,7 +123,7 @@ TEST(NNAPI_Quickcheck_dequantize_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/div_1.cpp b/tools/nnapi_quickcheck/tests/div_1.cpp
index 1fe0f7a28..26dfbbe53 100644
--- a/tools/nnapi_quickcheck/tests/div_1.cpp
+++ b/tools/nnapi_quickcheck/tests/div_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_div_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "div_1.lst"
@@ -146,7 +146,7 @@ TEST(NNAPI_Quickcheck_div_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/div_2.cpp b/tools/nnapi_quickcheck/tests/div_2.cpp
index 883167e95..df4efa4ff 100644
--- a/tools/nnapi_quickcheck/tests/div_2.cpp
+++ b/tools/nnapi_quickcheck/tests/div_2.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_div_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "div_2.lst"
@@ -139,7 +139,7 @@ TEST(NNAPI_Quickcheck_div_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/fully_connected_1.cpp b/tools/nnapi_quickcheck/tests/fully_connected_1.cpp
index ea5df23d2..43cd0a470 100644
--- a/tools/nnapi_quickcheck/tests/fully_connected_1.cpp
+++ b/tools/nnapi_quickcheck/tests/fully_connected_1.cpp
@@ -16,12 +16,12 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -34,7 +34,7 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
template <typename T> T *make_malloc(void) { return reinterpret_cast<T *>(malloc(sizeof(T))); }
@@ -43,13 +43,13 @@ TEST(NNAPI_Quickcheck_fully_connected_1, simple_test)
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "conv_1.lst"
@@ -174,7 +174,7 @@ TEST(NNAPI_Quickcheck_fully_connected_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/fully_connected_quan_1.cpp b/tools/nnapi_quickcheck/tests/fully_connected_quan_1.cpp
index c434d75f8..2c6883546 100644
--- a/tools/nnapi_quickcheck/tests/fully_connected_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/fully_connected_quan_1.cpp
@@ -16,12 +16,12 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -34,7 +34,7 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
template <typename T> T *make_malloc(void) { return reinterpret_cast<T *>(malloc(sizeof(T))); }
@@ -43,13 +43,13 @@ TEST(NNAPI_Quickcheck_fully_connected_1, simple_test)
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "fully_connected_quan_1.lst"
@@ -176,7 +176,7 @@ TEST(NNAPI_Quickcheck_fully_connected_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/gather_1.cpp b/tools/nnapi_quickcheck/tests/gather_1.cpp
index b916676c7..4ab164ea1 100644
--- a/tools/nnapi_quickcheck/tests/gather_1.cpp
+++ b/tools/nnapi_quickcheck/tests/gather_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -31,21 +31,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_gather_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "gather_1.lst"
@@ -119,7 +119,7 @@ TEST(NNAPI_Quickcheck_gather_1, simple_test)
interp.SetOutputs({2});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/gather_2.cpp b/tools/nnapi_quickcheck/tests/gather_2.cpp
index 4b115685d..ac9ec8b3b 100644
--- a/tools/nnapi_quickcheck/tests/gather_2.cpp
+++ b/tools/nnapi_quickcheck/tests/gather_2.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -31,21 +31,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_gather_2, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "gather_2.lst"
@@ -123,7 +123,7 @@ TEST(NNAPI_Quickcheck_gather_2, simple_test)
interp.SetOutputs({2});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/logistic_quan_1.cpp b/tools/nnapi_quickcheck/tests/logistic_quan_1.cpp
index e9e3bc1f7..0b0a69029 100644
--- a/tools/nnapi_quickcheck/tests/logistic_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/logistic_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_logistic_quan_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "logistic_quan_1.lst"
@@ -127,7 +127,7 @@ TEST(NNAPI_Quickcheck_logistic_quan_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/max_pool_1.cpp b/tools/nnapi_quickcheck/tests/max_pool_1.cpp
index 07fa9ac5b..62f985d65 100644
--- a/tools/nnapi_quickcheck/tests/max_pool_1.cpp
+++ b/tools/nnapi_quickcheck/tests/max_pool_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_max_pool_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "max_pool_1.lst"
@@ -143,7 +143,7 @@ TEST(NNAPI_Quickcheck_max_pool_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/max_pool_quan_1.cpp b/tools/nnapi_quickcheck/tests/max_pool_quan_1.cpp
index d241c43ba..2c05a7d22 100644
--- a/tools/nnapi_quickcheck/tests/max_pool_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/max_pool_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_max_pool_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "max_pool_quan_1.lst"
@@ -145,7 +145,7 @@ TEST(NNAPI_Quickcheck_max_pool_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/mul_1.cpp b/tools/nnapi_quickcheck/tests/mul_1.cpp
index cf7282bc7..57ab71350 100644
--- a/tools/nnapi_quickcheck/tests/mul_1.cpp
+++ b/tools/nnapi_quickcheck/tests/mul_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -34,20 +34,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_mul_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "mul_1.lst"
@@ -137,7 +137,7 @@ TEST(NNAPI_Quickcheck_mul_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/mul_2.cpp b/tools/nnapi_quickcheck/tests/mul_2.cpp
index 734be8e3f..a692616e0 100644
--- a/tools/nnapi_quickcheck/tests/mul_2.cpp
+++ b/tools/nnapi_quickcheck/tests/mul_2.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -34,20 +34,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_mul_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "mul_2.lst"
@@ -137,7 +137,7 @@ TEST(NNAPI_Quickcheck_mul_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/mul_quan_1.cpp b/tools/nnapi_quickcheck/tests/mul_quan_1.cpp
index 72903fe42..5f0061e8d 100644
--- a/tools/nnapi_quickcheck/tests/mul_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/mul_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/interp/FunctionBuilder.h"
@@ -34,20 +34,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_mul_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "mul_1.lst"
@@ -139,7 +139,7 @@ TEST(NNAPI_Quickcheck_mul_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu1_1.cpp b/tools/nnapi_quickcheck/tests/relu1_1.cpp
index b729c50c3..25e71dc55 100644
--- a/tools/nnapi_quickcheck/tests/relu1_1.cpp
+++ b/tools/nnapi_quickcheck/tests/relu1_1.cpp
@@ -14,14 +14,14 @@
* limitations under the License.
*/
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -33,15 +33,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
int main(int argc, char **argv)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu1_1.lst"
@@ -53,7 +53,7 @@ int main(int argc, char **argv)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -110,7 +110,7 @@ int main(int argc, char **argv)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu6_1.cpp b/tools/nnapi_quickcheck/tests/relu6_1.cpp
index cafbe8377..43e8383f3 100644
--- a/tools/nnapi_quickcheck/tests/relu6_1.cpp
+++ b/tools/nnapi_quickcheck/tests/relu6_1.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_relu6_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu6_1.lst"
@@ -55,7 +55,7 @@ TEST(NNAPI_Quickcheck_relu6_1, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -112,7 +112,7 @@ TEST(NNAPI_Quickcheck_relu6_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu6_quan_1.cpp b/tools/nnapi_quickcheck/tests/relu6_quan_1.cpp
index 3d8ce4f7c..8356442ce 100644
--- a/tools/nnapi_quickcheck/tests/relu6_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/relu6_quan_1.cpp
@@ -14,14 +14,14 @@
* limitations under the License.
*/
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -33,15 +33,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
int main(int argc, char **argv)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu6_quan_1.lst"
@@ -53,7 +53,7 @@ int main(int argc, char **argv)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -112,7 +112,7 @@ int main(int argc, char **argv)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu_1.cpp b/tools/nnapi_quickcheck/tests/relu_1.cpp
index c7141ca6a..decd0ddfb 100644
--- a/tools/nnapi_quickcheck/tests/relu_1.cpp
+++ b/tools/nnapi_quickcheck/tests/relu_1.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_relu_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu_1.lst"
@@ -55,7 +55,7 @@ TEST(NNAPI_Quickcheck_relu_1, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -112,7 +112,7 @@ TEST(NNAPI_Quickcheck_relu_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu_2.cpp b/tools/nnapi_quickcheck/tests/relu_2.cpp
index 74ab113c2..ccb9f06c4 100644
--- a/tools/nnapi_quickcheck/tests/relu_2.cpp
+++ b/tools/nnapi_quickcheck/tests/relu_2.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_relu_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu_2.lst"
@@ -56,7 +56,7 @@ TEST(NNAPI_Quickcheck_relu_2, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -115,7 +115,7 @@ TEST(NNAPI_Quickcheck_relu_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu_3.cpp b/tools/nnapi_quickcheck/tests/relu_3.cpp
index f876e18ee..59a856041 100644
--- a/tools/nnapi_quickcheck/tests/relu_3.cpp
+++ b/tools/nnapi_quickcheck/tests/relu_3.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_relu_3, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu_3.lst"
@@ -57,7 +57,7 @@ TEST(NNAPI_Quickcheck_relu_3, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -118,7 +118,7 @@ TEST(NNAPI_Quickcheck_relu_3, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/relu_quan_1.cpp b/tools/nnapi_quickcheck/tests/relu_quan_1.cpp
index 7bc6181ad..303080ef5 100644
--- a/tools/nnapi_quickcheck/tests/relu_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/relu_quan_1.cpp
@@ -14,14 +14,14 @@
* limitations under the License.
*/
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -33,15 +33,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
int main(int argc, char **argv)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "relu_quan_1.lst"
@@ -53,7 +53,7 @@ int main(int argc, char **argv)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -112,7 +112,7 @@ int main(int argc, char **argv)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/reshape_1.cpp b/tools/nnapi_quickcheck/tests/reshape_1.cpp
index a35623ced..54cfce2f7 100644
--- a/tools/nnapi_quickcheck/tests/reshape_1.cpp
+++ b/tools/nnapi_quickcheck/tests/reshape_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_reshape_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "max_pool_1.lst"
@@ -128,7 +128,7 @@ TEST(NNAPI_Quickcheck_reshape_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/reshape_quan_1.cpp b/tools/nnapi_quickcheck/tests/reshape_quan_1.cpp
index fb1db77e7..8eb0bf387 100644
--- a/tools/nnapi_quickcheck/tests/reshape_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/reshape_quan_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_reshape_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "reshape_quan_1.lst"
@@ -130,7 +130,7 @@ TEST(NNAPI_Quickcheck_reshape_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/resize_bilinear_1.cpp b/tools/nnapi_quickcheck/tests/resize_bilinear_1.cpp
index 3c7ce28b1..5b2d7b634 100644
--- a/tools/nnapi_quickcheck/tests/resize_bilinear_1.cpp
+++ b/tools/nnapi_quickcheck/tests/resize_bilinear_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_resize_bilinear_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "resize_bilinear_1.lst"
@@ -128,7 +128,7 @@ TEST(NNAPI_Quickcheck_resize_bilinear_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/softmax_1.cpp b/tools/nnapi_quickcheck/tests/softmax_1.cpp
index b9cc6bf0f..71424755a 100644
--- a/tools/nnapi_quickcheck/tests/softmax_1.cpp
+++ b/tools/nnapi_quickcheck/tests/softmax_1.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_softmax_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "softmax_1.lst"
@@ -56,12 +56,12 @@ TEST(NNAPI_Quickcheck_softmax_1, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
- const nnfw::util::feature::Shape ifm_shape{IFM_C, IFM_H, IFM_W};
+ const nnfw::misc::feature::Shape ifm_shape{IFM_C, IFM_H, IFM_W};
const int32_t OFM_C = IFM_C;
const int32_t OFM_H = IFM_H;
@@ -107,7 +107,7 @@ TEST(NNAPI_Quickcheck_softmax_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/softmax_2.cpp b/tools/nnapi_quickcheck/tests/softmax_2.cpp
index 402435718..df1ff2731 100644
--- a/tools/nnapi_quickcheck/tests/softmax_2.cpp
+++ b/tools/nnapi_quickcheck/tests/softmax_2.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_softmax_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define FLOAT_VALUE(NAME, VALUE) FloatVar NAME##_Value(#NAME, VALUE);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
@@ -75,12 +75,12 @@ TEST(NNAPI_Quickcheck_softmax_2, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
- const nnfw::util::feature::Shape ifm_shape{IFM_C, IFM_H, IFM_W};
+ const nnfw::misc::feature::Shape ifm_shape{IFM_C, IFM_H, IFM_W};
const int32_t OFM_C = IFM_C;
const int32_t OFM_H = IFM_H;
@@ -126,7 +126,7 @@ TEST(NNAPI_Quickcheck_softmax_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/softmax_quan_1.cpp b/tools/nnapi_quickcheck/tests/softmax_quan_1.cpp
index 2c12e1104..5d38f7727 100644
--- a/tools/nnapi_quickcheck/tests/softmax_quan_1.cpp
+++ b/tools/nnapi_quickcheck/tests/softmax_quan_1.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_softmax_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "softmax_quan_1.lst"
@@ -56,12 +56,12 @@ TEST(NNAPI_Quickcheck_softmax_1, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
- const nnfw::util::feature::Shape ifm_shape{IFM_C, IFM_H, IFM_W};
+ const nnfw::misc::feature::Shape ifm_shape{IFM_C, IFM_H, IFM_W};
const int32_t OFM_C = IFM_C;
const int32_t OFM_H = IFM_H;
@@ -109,7 +109,7 @@ TEST(NNAPI_Quickcheck_softmax_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/split_1.cpp b/tools/nnapi_quickcheck/tests/split_1.cpp
index 735a0777f..95a7aa842 100644
--- a/tools/nnapi_quickcheck/tests/split_1.cpp
+++ b/tools/nnapi_quickcheck/tests/split_1.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_split_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "split_1.lst"
@@ -59,7 +59,7 @@ TEST(NNAPI_Quickcheck_split_1, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -140,7 +140,7 @@ TEST(NNAPI_Quickcheck_split_1, simple_test)
interp.SetOutputs(ofm_indexes);
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/split_2.cpp b/tools/nnapi_quickcheck/tests/split_2.cpp
index 0d4bf3e13..eb06ea0f2 100644
--- a/tools/nnapi_quickcheck/tests/split_2.cpp
+++ b/tools/nnapi_quickcheck/tests/split_2.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_split_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "split_2.lst"
@@ -59,7 +59,7 @@ TEST(NNAPI_Quickcheck_split_2, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -140,7 +140,7 @@ TEST(NNAPI_Quickcheck_split_2, simple_test)
interp.SetOutputs(ofm_indexes);
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/split_3.cpp b/tools/nnapi_quickcheck/tests/split_3.cpp
index eaa65bdc2..e3beb5bd2 100644
--- a/tools/nnapi_quickcheck/tests/split_3.cpp
+++ b/tools/nnapi_quickcheck/tests/split_3.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_split_3, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "split_3.lst"
@@ -57,7 +57,7 @@ TEST(NNAPI_Quickcheck_split_3, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -134,7 +134,7 @@ TEST(NNAPI_Quickcheck_split_3, simple_test)
interp.SetOutputs(ofm_indexes);
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/split_4.cpp b/tools/nnapi_quickcheck/tests/split_4.cpp
index 394c392b6..e098973d2 100644
--- a/tools/nnapi_quickcheck/tests/split_4.cpp
+++ b/tools/nnapi_quickcheck/tests/split_4.cpp
@@ -16,14 +16,14 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
-#include "util/feature/Shape.h"
+#include "misc/environment.h"
+#include "misc/feature/Shape.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,15 +35,15 @@
#include <cassert>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_split_4, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "split_4.lst"
@@ -57,7 +57,7 @@ TEST(NNAPI_Quickcheck_split_4, simple_test)
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Initialize random number generator
std::minstd_rand random(SEED);
@@ -134,7 +134,7 @@ TEST(NNAPI_Quickcheck_split_4, simple_test)
interp.SetOutputs(ofm_indexes);
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/sub_1.cpp b/tools/nnapi_quickcheck/tests/sub_1.cpp
index 9e4ff4ce7..8bc4208c5 100644
--- a/tools/nnapi_quickcheck/tests/sub_1.cpp
+++ b/tools/nnapi_quickcheck/tests/sub_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_sub_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "sub_1.lst"
@@ -146,7 +146,7 @@ TEST(NNAPI_Quickcheck_sub_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/sub_2.cpp b/tools/nnapi_quickcheck/tests/sub_2.cpp
index 2702edbf1..423e105f2 100644
--- a/tools/nnapi_quickcheck/tests/sub_2.cpp
+++ b/tools/nnapi_quickcheck/tests/sub_2.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_sub_2, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "sub_2.lst"
@@ -139,7 +139,7 @@ TEST(NNAPI_Quickcheck_sub_2, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/sub_3.cpp b/tools/nnapi_quickcheck/tests/sub_3.cpp
index 6122e2486..7bb6ab4c0 100644
--- a/tools/nnapi_quickcheck/tests/sub_3.cpp
+++ b/tools/nnapi_quickcheck/tests/sub_3.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_sub_3, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "sub_3.lst"
@@ -131,7 +131,7 @@ TEST(NNAPI_Quickcheck_sub_3, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/sub_4.cpp b/tools/nnapi_quickcheck/tests/sub_4.cpp
index 28922fe4b..7fc857746 100644
--- a/tools/nnapi_quickcheck/tests/sub_4.cpp
+++ b/tools/nnapi_quickcheck/tests/sub_4.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_sub_4, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "sub_1.lst"
@@ -139,7 +139,7 @@ TEST(NNAPI_Quickcheck_sub_4, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/sub_5.cpp b/tools/nnapi_quickcheck/tests/sub_5.cpp
index e421a12c2..19f95b616 100644
--- a/tools/nnapi_quickcheck/tests/sub_5.cpp
+++ b/tools/nnapi_quickcheck/tests/sub_5.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_sub_5, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "sub_5.lst"
@@ -175,7 +175,7 @@ TEST(NNAPI_Quickcheck_sub_5, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/sub_6.cpp b/tools/nnapi_quickcheck/tests/sub_6.cpp
index 00bfc5423..66b167eb3 100644
--- a/tools/nnapi_quickcheck/tests/sub_6.cpp
+++ b/tools/nnapi_quickcheck/tests/sub_6.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_sub_6, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "sub_6.lst"
@@ -175,7 +175,7 @@ TEST(NNAPI_Quickcheck_sub_6, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/tanh_1.cpp b/tools/nnapi_quickcheck/tests/tanh_1.cpp
index 6a9a29933..7dd92613c 100644
--- a/tools/nnapi_quickcheck/tests/tanh_1.cpp
+++ b/tools/nnapi_quickcheck/tests/tanh_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -35,20 +35,20 @@
#include <random>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_tanh_1, simple_test)
{
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "tanh_1.lst"
@@ -121,7 +121,7 @@ TEST(NNAPI_Quickcheck_tanh_1, simple_test)
interp.SetOutputs({0});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnapi_quickcheck/tests/topk_v2_1.cpp b/tools/nnapi_quickcheck/tests/topk_v2_1.cpp
index 4b145e5a7..c47af57cc 100644
--- a/tools/nnapi_quickcheck/tests/topk_v2_1.cpp
+++ b/tools/nnapi_quickcheck/tests/topk_v2_1.cpp
@@ -16,13 +16,13 @@
#include "gtest/gtest.h"
-#include "tflite/kernels/register.h"
-#include "tensorflow/contrib/lite/model.h"
-#include "tensorflow/contrib/lite/builtin_op_data.h"
+#include "tflite/ext/kernels/register.h"
+#include "tensorflow/lite/model.h"
+#include "tensorflow/lite/builtin_op_data.h"
#include "env.h"
#include "memory.h"
-#include "util/environment.h"
+#include "misc/environment.h"
#include "tflite/Diff.h"
#include "tflite/Quantization.h"
@@ -32,21 +32,21 @@
#include <iostream>
using namespace tflite;
-using namespace tflite::ops::builtin;
+using namespace nnfw::tflite;
TEST(NNAPI_Quickcheck_topk_v2_1, simple_test)
{
// Set random seed
int SEED = std::chrono::system_clock::now().time_since_epoch().count();
- nnfw::util::env::IntAccessor("SEED").access(SEED);
+ nnfw::misc::env::IntAccessor("SEED").access(SEED);
// Set random test parameters
int verbose = 0;
int tolerance = 1;
- nnfw::util::env::IntAccessor("VERBOSE").access(verbose);
- nnfw::util::env::IntAccessor("TOLERANCE").access(tolerance);
+ nnfw::misc::env::IntAccessor("VERBOSE").access(verbose);
+ nnfw::misc::env::IntAccessor("TOLERANCE").access(tolerance);
#define INT_VALUE(NAME, VALUE) IntVar NAME##_Value(#NAME, VALUE);
#include "topk_v2_1.lst"
@@ -125,7 +125,7 @@ TEST(NNAPI_Quickcheck_topk_v2_1, simple_test)
interp.SetOutputs({2, 3});
};
- const nnfw::support::tflite::interp::FunctionBuilder builder(setup);
+ const nnfw::tflite::FunctionBuilder builder(setup);
RandomTestParam param;
diff --git a/tools/nnpackage_tool/model2nnpkg/README.md b/tools/nnpackage_tool/model2nnpkg/README.md
new file mode 100644
index 000000000..716f4f8d5
--- /dev/null
+++ b/tools/nnpackage_tool/model2nnpkg/README.md
@@ -0,0 +1,21 @@
+# model2nnpkg
+
+`model2nnpkg` is a tool to convert model (either `tflite` or `circle`) to `nnpackage`.
+
+It takes `modelfile` as input and generates `nnpackage`.
+
+## Usage
+
+```
+Usage: model2nnpkg.sh [options] modelfile
+Convert modelfile (either tflite or circle) to nnpackage.
+
+Options:
+ -h show this help
+ -o set nnpackage output directory (default=.)
+
+Examples:
+ model2nnpkg.sh add.tflite => create nnpackage in ./
+ model2nnpkg.sh -o out add.tflite => create nnpackage in out/
+
+```
diff --git a/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh b/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh
new file mode 100644
index 000000000..87cd7878b
--- /dev/null
+++ b/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh
@@ -0,0 +1,70 @@
+#!/bin/bash
+
+set -eu
+
+progname=$(basename "${BASH_SOURCE[0]}")
+outdir="."
+
+usage() {
+ echo "Usage: $progname [options] modelfile"
+ echo "Convert modelfile (either tflite or circle) to nnpackage."
+ echo ""
+ echo "Options:"
+ echo " -h show this help"
+ echo " -o set nnpackage output directory (default=$outdir)"
+ echo ""
+ echo "Examples:"
+ echo " $progname add.tflite => create nnpackage in $outdir/"
+ echo " $progname -o out add.tflite => create nnpackage in out/"
+ exit 1
+}
+
+if [ $# -eq 0 ]; then
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+while getopts "ho:" OPTION; do
+case "${OPTION}" in
+ h) usage;;
+ o) outdir=$OPTARG;;
+ ?) exit 1;;
+esac
+done
+
+shift $((OPTIND-1))
+
+if [ $# -ne 1 ]; then
+ echo "error: wrong argument (no argument or too many arguments)."
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+modelfile=$(basename "$1")
+
+if [[ "$modelfile" != *.* ]]; then
+ echo "error: modelfile does not have extension."
+ echo "Please provide extension so that $progname can identify what type of model you use."
+ exit 1
+fi
+
+if [ ! -e $1 ]; then
+ echo "error: "$1" does not exist."
+ exit 1
+fi
+
+name=${modelfile%.*}
+extension=${modelfile##*.}
+
+echo "Generating nnpackage "$name" in "$outdir""
+mkdir -p "$outdir"/"$name"/metadata
+cat > "$outdir"/"$name"/metadata/MANIFEST <<-EOF
+{
+ "major-version" : "1",
+ "minor-version" : "0",
+ "patch-version" : "0",
+ "models" : [ "$name.$extension" ],
+ "model-types" : [ "$extension" ]
+}
+EOF
+cp "$1" "$outdir"/"$name"
diff --git a/tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/README.md b/tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/README.md
new file mode 100644
index 000000000..9e5ae2938
--- /dev/null
+++ b/tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/README.md
@@ -0,0 +1,26 @@
+# nncc-tc-to-nnpkg-tc
+
+`model2nnpkg` is a tool to convert model (either `tflite` or `circle`) to `nnpackage`.
+
+It takes `modelfile` as input and generates `nnpackage`.
+
+## Usage
+
+```
+Usage: nncc-tc-to-nnpkg-tc.sh [options] nncc_tc_name
+Convert nncc testcase to nnpackage testcase.
+
+Options:
+ -h show this help
+ -i set input directory (default=.)
+ -o set nnpackage testcase output directory (default=.)
+
+Env:
+ model2nnpkg path to model2nnpkg tool (default={this_script_home}/../model2nnpkg)
+
+Examples:
+ nncc-tc-to-nnpkg-tc.sh -i build/compiler/tf2tflite UNIT_Add_000
+ => create nnpackage testcase in ./ from build/compiler/tf2tflite/UNIT_Add_000.*
+ nncc-tc-to-nnpkg-tc.sh -o out UNIT_Add_000
+ => create nnpackage testcase in out/ using ./UNIT_Add_000.*
+```
diff --git a/tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh b/tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh
new file mode 100644
index 000000000..01ce1476f
--- /dev/null
+++ b/tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+set -eu
+
+progname=$(basename "${BASH_SOURCE[0]}")
+script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+model2nnpkg=${model2nnpkg:-"$script_dir"/../model2nnpkg/model2nnpkg.sh}
+
+indir="."
+outdir="."
+
+usage() {
+ echo "Usage: $progname [options] nncc_tc_name"
+ echo "Convert nncc testcase to nnpackage testcase."
+ echo ""
+ echo "Options:"
+ echo " -h show this help"
+ echo " -i set input directory (default=$indir)"
+ echo " -o set nnpackage testcase output directory (default=$outdir)"
+ echo ""
+ echo "Env:"
+ echo " model2nnpkg path to model2nnpkg tool (default={this_script_home}/../model2nnpkg)"
+ echo ""
+ echo "Examples:"
+ echo " $progname -i build/compiler/tf2tflite UNIT_Add_000"
+ echo " => create nnpackage testcase in $outdir/ from build/compiler/tf2tflite/UNIT_Add_000.*"
+ echo " $progname -o out UNIT_Add_000"
+ echo " => create nnpackage testcase in out/ using $indir/UNIT_Add_000.*"
+ exit 1
+}
+
+if [ $# -eq 0 ]; then
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+while getopts "hi:o:" OPTION; do
+case "${OPTION}" in
+ h) usage;;
+ i) indir=$OPTARG;;
+ o) outdir=$OPTARG;;
+ ?) exit 1;;
+esac
+done
+
+shift $((OPTIND-1))
+
+if [ $# -ne 1 ]; then
+ echo "error: wrong argument (no argument or too many arguments)."
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+tcname=$1
+
+supported_model_types="
+circle
+tflite
+"
+
+model_type=""
+
+for ext in $supported_model_types; do
+ [ -e "$indir/$tcname"."$ext" ] && model_type=$ext
+done;
+
+if [[ "$model_type" == "" ]]; then
+ echo "error: No modelfile is found in $indir/$tcname*"
+ exit 1
+fi
+
+$model2nnpkg -o "$outdir" "$indir/$tcname"."$model_type"
+
+extensions="
+expected.h5
+input.h5
+"
+
+destdir="$outdir/$tcname/metadata/tc"
+mkdir -p "$destdir"
+for ext in $extensions; do
+ cp "$indir/$tcname.$ext" "$destdir/$ext"
+done;
+
diff --git a/tools/nnpackage_tool/nnpackager/nnpackager.py b/tools/nnpackage_tool/nnpackager/nnpackager.py
new file mode 100644
index 000000000..8e2805aa7
--- /dev/null
+++ b/tools/nnpackage_tool/nnpackager/nnpackager.py
@@ -0,0 +1,65 @@
+#!/usr/bin/python3
+
+import json
+import os
+from collections import OrderedDict
+import sys
+import argparse
+import shutil
+
+
+def verify(path):
+ nnpackage_root_path = path
+
+ # Check nnpackage_root existence
+ if not os.path.isdir(nnpackage_root_path):
+ print("Error: nnpackage_root {} does not exist.".format(nnpackage_root_path))
+ sys.exit(-1)
+
+ # Check MANIFEST existence
+ manifest_path = os.path.join(nnpackage_root_path, "metadata", "MANIFEST")
+ if not os.path.exists(manifest_path):
+ print("Error: MANIFEST {} does not exist.".format(manifest_path))
+ sys.exit(-1)
+
+ # Check MANIFEST
+ with open(manifest_path, "r") as f:
+ try:
+ json_dict = json.load(f, object_pairs_hook=OrderedDict)
+ # Check models attributes
+ for m in json_dict["models"]:
+ model_path = os.path.join(nnpackage_root_path, m)
+ if not os.path.exists(model_path):
+ print("Error: model {} does not exist.".format(model_path))
+ sys.exit(-1)
+ print("nnpackage validation check passed.")
+ except ValueError:
+ print("MANIFEST is not valid JSON.")
+ except KeyError:
+ print("models attribute does not exist.")
+
+
+def compress(path):
+ nnpackage_name = os.path.basename(os.path.normpath(path))
+ shutil.make_archive(nnpackage_name, 'zip', path)
+ print("nnpackage compression is done")
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('path', type=str, help='the path to nnpackage')
+ parser.add_argument(
+ '-v', '--verify', action='store_true', help="verify nnpackage (default: false)")
+ parser.add_argument(
+ '-c',
+ '--compress',
+ action='store_true',
+ help="compress nnpackage (default: false)")
+
+ args = parser.parse_args()
+
+ if args.verify:
+ verify(args.path)
+
+ if args.compress:
+ compress(args.path)
diff --git a/tools/nnpackage_tool/nnpkg_test/README.md b/tools/nnpackage_tool/nnpkg_test/README.md
new file mode 100644
index 000000000..916877a3d
--- /dev/null
+++ b/tools/nnpackage_tool/nnpkg_test/README.md
@@ -0,0 +1,42 @@
+# nnpkg_test
+
+`nnpkg_test` is a tool to run an nnpackage testcase.
+
+`nnpackage testcase` is an nnpackage with additional data:
+
+- input.h5 (input data)
+- expected.h5 (expected outpute data)
+
+`nnpkg_test` uses `nnpackage_run` internally to run `nnpackage`.
+
+Then, it compares through `difftool` (either `i5diff` or `h5diff`).
+
+`nnpkg_test` returns `0` on success, `non-zero` otherwise.
+
+## Usage
+
+```
+$ tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh -h
+Usage: nnpkg_test.sh [options] nnpackage_test
+Run an nnpackage testcase
+
+Returns
+ 0 success
+ non-zero failure
+
+Options:
+ -h show this help
+ -i set input directory (default=.)
+ -o set output directory (default=.)
+ -d delete dumped file on failure.
+ (dumped file are always deleted on success) (default=0)
+
+Environment variables:
+ nnpackage_run path to nnpackage_run (default=Product/out/bin/nnpackage_run)
+ difftool path to i5diff or h5diff (default=h5diff)
+
+Examples:
+ nnpkg_test.sh Add_000 => run ./Add_000 and check output
+ nnpkg_test.sh -i nnpkg-tcs Add_000 => run nnpkg-tcs/Add_000 and check output
+
+```
diff --git a/tools/nnpackage_tool/nnpkg_test/list b/tools/nnpackage_tool/nnpkg_test/list
new file mode 100644
index 000000000..b4dcc882c
--- /dev/null
+++ b/tools/nnpackage_tool/nnpkg_test/list
@@ -0,0 +1,22 @@
+UNIT_Add_000
+UNIT_AvgPool_000
+UNIT_AvgPool_001
+UNIT_BiasAdd_002
+UNIT_Concat_002
+UNIT_Conv2D_000
+UNIT_DepthwiseConv2D_000
+UNIT_DepthwiseConv2D_001
+UNIT_MaxPool_000
+UNIT_MaxPool_001
+UNIT_RealDiv_000
+UNIT_Relu_000
+UNIT_Relu6_000
+UNIT_Reshape_000
+UNIT_Rsqrt_000
+UNIT_Softmax_001
+UNIT_Sqrt_000
+UNIT_Squeeze_000
+UNIT_Squeeze_001
+UNIT_Squeeze_002
+UNIT_Sub_000
+UNIT_Tanh_000
diff --git a/tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh b/tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh
new file mode 100644
index 000000000..e4b81a09f
--- /dev/null
+++ b/tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+
+set -u
+
+command_exists() {
+ command -v "$@" > /dev/null 2>&1
+}
+
+progname=$(basename "${BASH_SOURCE[0]}")
+indir="."
+outdir="."
+nnpkg_run=${nnpkg_run:-"Product/out/bin/nnpackage_run"}
+difftool=${difftool:-"h5diff"}
+delete_dumped_on_failure=0
+
+usage() {
+ echo "Usage: $progname [options] nnpackage_test"
+ echo "Run an nnpackage testcase"
+ echo ""
+ echo "Returns"
+ echo " 0 success"
+ echo " non-zero failure"
+ echo ""
+ echo "Options:"
+ echo " -h show this help"
+ echo " -i set input directory (default=$indir)"
+ echo " -o set output directory (default=$outdir)"
+ echo " -d delete dumped file on failure."
+ echo " (dumped file are always deleted on success) (default=$delete_dumped_on_failure)"
+ echo ""
+ echo "Environment variables:"
+ echo " nnpackage_run path to nnpackage_run (default=Product/out/bin/nnpackage_run)"
+ echo " difftool path to i5diff or h5diff (default=h5diff)"
+ echo ""
+ echo "Examples:"
+ echo " $progname Add_000 => run $indir/Add_000 and check output"
+ echo " $progname -i nnpkg-tcs Add_000 => run nnpkg-tcs/Add_000 and check output"
+ exit 1
+}
+
+if [ $# -eq 0 ]; then
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+while getopts "hdi:" OPTION; do
+case "${OPTION}" in
+ h) usage;;
+ d) delete_dumped_on_failure=1;;
+ i) indir=$OPTARG;;
+ ?) exit 1;;
+esac
+done
+
+shift $((OPTIND-1))
+
+if [ $# -ne 1 ]; then
+ echo "error: wrong argument (no argument or too many arguments)."
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+if [ ! -e Product ]; then
+ echo "error: please make sure to run this script in nnfw home."
+ exit 1
+fi
+
+tcname=$(basename "$1")
+nnpkg="$indir/$tcname"
+
+# run
+
+if [ ! -e $nnpkg ]; then
+ echo "error: nnpackage "$nnpkg" does not exist."
+ exit 1
+fi
+
+if ! command_exists $nnpkg_run; then
+ echo "error: runner "$nnpkg_run" does not exist."
+ exit 1
+fi
+
+dumped="$outdir/$tcname".out.h5
+
+echo -n "[ Run ] $nnpkg "
+
+if LD_LIBRARY_PATH=Product/out/lib $nnpkg_run \
+--nnpackage "$nnpkg" \
+--load "$nnpkg/metadata/tc/input.h5" \
+--dump "$dumped" >& /dev/null > "$dumped.log" 2>&1 ; then
+ echo -e "\tPass"
+ rm "$dumped.log"
+else
+ echo -e "\tFail"
+ echo ""
+ cat "$dumped.log"
+ echo ""
+ rm "$dumped.log"
+ exit 2
+fi
+
+# diff
+
+if ! command_exists $difftool; then
+ echo "error: difftool "$difftool" does not exist."
+ exit 1
+fi
+
+expected="$nnpkg/metadata/tc/expected.h5"
+
+echo -n "[Compare] $nnpkg "
+
+if $difftool -d 0.001 -v "$dumped" "$expected" /value >& "$dumped.log"; then
+ echo -e "\tPass"
+ rm "$dumped" "$dumped.log"
+else
+ echo -e "\tFail"
+ [ $delete_dumped_on_failure ] && rm "$dumped"
+ cat "$dumped.log"
+ rm "$dumped.log"
+ exit 3
+fi
diff --git a/tools/nnpackage_tool/tflite2circle/README.md b/tools/nnpackage_tool/tflite2circle/README.md
new file mode 100644
index 000000000..94ef5068c
--- /dev/null
+++ b/tools/nnpackage_tool/tflite2circle/README.md
@@ -0,0 +1,28 @@
+# tflite2circle
+
+`tflite2circle` is a tool to convert tflite into circle.
+
+## Usage
+
+```
+Usage: tflite2circle.sh [options] tflite
+Convert tflite to circle
+
+Returns
+ 0 success
+ non-zero failure
+
+Options:
+ -h show this help
+ -o set output directory (default=.)
+
+Environment variables:
+ flatc path to flatc
+ (default=./build/externals/FLATBUFFERS/build/flatc)
+ tflite_schema path to schema.fbs
+ (default=./externals/TENSORFLOW-1.12/tensorflow/contrib/lite/schema/schema.fbs)
+
+Examples:
+ tflite2circle.sh Add_000.tflite => convert Add_000.tflite into Add_000.circle
+ tflite2circle.sh -o my/circles Add_000 => convert Add_000.tflite into my/circles/Add_000.circle
+```
diff --git a/tools/nnpackage_tool/tflite2circle/tflite2circle.sh b/tools/nnpackage_tool/tflite2circle/tflite2circle.sh
new file mode 100644
index 000000000..259e57d83
--- /dev/null
+++ b/tools/nnpackage_tool/tflite2circle/tflite2circle.sh
@@ -0,0 +1,73 @@
+#!/bin/bash
+
+set -u
+
+progname=$(basename "${BASH_SOURCE[0]}")
+script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+outdir="."
+flatc=${flatc:-"./build/externals/FLATBUFFERS/build/flatc"}
+tflite_schema=${tflite_schema:-"./externals/TENSORFLOW-1.12/tensorflow/contrib/lite/schema/schema.fbs"}
+circle_schema=${circle_schema:-"./nnpackage/schema/circle_schema.fbs"}
+
+usage() {
+ echo "Usage: $progname [options] tflite"
+ echo "Convert tflite to circle"
+ echo ""
+ echo "Returns"
+ echo " 0 success"
+ echo " non-zero failure"
+ echo ""
+ echo "Options:"
+ echo " -h show this help"
+ echo " -o set output directory (default=$outdir)"
+ echo ""
+ echo "Environment variables:"
+ echo " flatc path to flatc"
+ echo " (default=./build/externals/FLATBUFFERS/build/flatc)"
+ echo " tflite_schema path to tflite schema (i.e. schema.fbs)"
+ echo " (default=./externals/TENSORFLOW-1.12/tensorflow/contrib/lite/schema/schema.fbs)"
+ echo " circle_schema path to circle schema"
+ echo " (default=./nnpackage/schema/circle_schema.fbs)"
+ echo ""
+ echo "Examples:"
+ echo " $progname Add_000.tflite => convert Add_000.tflite into Add_000.circle"
+ echo " $progname -o my/circles Add_000 => convert Add_000.tflite into my/circles/Add_000.circle"
+ exit 1
+}
+
+if [ $# -eq 0 ]; then
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+while getopts "ho:" OPTION; do
+case "${OPTION}" in
+ h) usage;;
+ o) outdir=$OPTARG;;
+ ?) exit 1;;
+esac
+done
+
+shift $((OPTIND-1))
+
+if [ $# -ne 1 ]; then
+ echo "error: wrong argument (no argument or too many arguments)."
+ echo "For help, type $progname -h"
+ exit 1
+fi
+
+if [ ! -e Product ]; then
+ echo "error: please make sure to run this script in nnfw home."
+ exit 1
+fi
+
+tflite_base=$(basename "$1")
+name=${tflite_base%.*}
+
+# convert
+
+mkdir -p "${outdir}"
+${flatc} -o ${outdir} --defaults-json --strict-json -t ${tflite_schema} -- $1
+${script_dir}/tflitejson2circlejson.py "${outdir}/${name}.json" > "${outdir}/${name}.circle"
+${flatc} -o ${outdir} -b ${circle_schema} "${outdir}/${name}.circle"
+rm -f ${outdir}/${name}.json
diff --git a/tools/nnpackage_tool/tflite2circle/tflitejson2circlejson.py b/tools/nnpackage_tool/tflite2circle/tflitejson2circlejson.py
new file mode 100644
index 000000000..c20a0c53e
--- /dev/null
+++ b/tools/nnpackage_tool/tflite2circle/tflitejson2circlejson.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python3
+
+import json
+import os
+import sys
+from collections import OrderedDict
+
+
+def usage():
+ script = os.path.basename(os.path.basename(__file__))
+ print("Usage: {} path_to_tflite_in_json".format(script))
+ sys.exit(-1)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) != 2:
+ usage()
+
+ json_path = sys.argv[1]
+ with open(json_path, "r") as f:
+ try:
+ json_dict = json.load(f, object_pairs_hook=OrderedDict)
+ for subgraph in json_dict["subgraphs"]:
+ subgraph["data_format"] = "CHANNELS_LAST"
+ print(json.dumps(json_dict, indent=2))
+ except KeyError:
+ print("subgraphs attribute does not exist.")
+ sys.exit(-2)
diff --git a/tools/opencl_tool/CMakeLists.txt b/tools/opencl_tool/CMakeLists.txt
index 66b92854c..f1fcc8702 100644
--- a/tools/opencl_tool/CMakeLists.txt
+++ b/tools/opencl_tool/CMakeLists.txt
@@ -4,9 +4,9 @@ endif(NOT ${TARGET_ARCH_BASE} STREQUAL "arm")
list(APPEND OPENCL_INFO_SOURCE "src/opencl_info.cc")
+nnfw_find_package(ARMCompute REQUIRED)
+
add_executable(opencl_info ${OPENCL_INFO_SOURCE})
-target_include_directories(opencl_info PUBLIC ${CMAKE_SOURCE_DIR}/externals/acl)
-target_include_directories(opencl_info PUBLIC ${CMAKE_SOURCE_DIR}/externals/acl/include)
target_link_libraries(opencl_info arm_compute)
install(TARGETS opencl_info DESTINATION bin)
diff --git a/tools/pbfile_tool/convert_pb_to_pbtxt.py b/tools/pbfile_tool/convert_pb_to_pbtxt.py
new file mode 100644
index 000000000..28a3da4c2
--- /dev/null
+++ b/tools/pbfile_tool/convert_pb_to_pbtxt.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# This tool converts frozen pb file in tensorflow to pbtxt text file
+#
+
+import os
+import argparse
+
+from tensorflow.python.platform import gfile
+import tensorflow as tf
+
+
+def convert(pb_path):
+
+ directory = os.path.dirname(pb_path)
+ filename = os.path.basename(pb_path)
+
+ with gfile.GFile(pb_path, 'rb') as f:
+ content = f.read()
+
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(content)
+ tf.import_graph_def(graph_def, name='')
+
+ tf.train.write_graph(graph_def, directory, filename + '.pbtxt', as_text=True)
+
+ return os.path.join(directory, filename + '.pbtxt')
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(
+ description='convert pb (binary) file to pbtxt (text) file')
+ parser.add_argument("path", help="path of Tensorflow frozen model file in .pb format")
+
+ args = parser.parse_args()
+ pb_path = args.path
+
+ pbtxt_path = convert(pb_path)
+
+ print("converted: " + pbtxt_path)
diff --git a/tools/pbfile_tool/pb_info.py b/tools/pbfile_tool/pb_info.py
index 110b15e02..7add94fa8 100755
--- a/tools/pbfile_tool/pb_info.py
+++ b/tools/pbfile_tool/pb_info.py
@@ -16,7 +16,6 @@
import tensorflow as tf
from google.protobuf import text_format
-from tensorflow.python.platform import gfile
from tensorflow.python.tools import freeze_graph
from tensorflow.python.tools import optimize_for_inference_lib
@@ -40,7 +39,7 @@ def importGraphIntoSession(sess, filename):
assert sess
(_, _, ext) = splitDirFilenameExt(filename)
if (ext.lower() == 'pb'):
- with gfile.FastGFile(filename, 'rb') as f:
+ with tf.gfile.GFile(filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
diff --git a/tools/tensorflow_model_freezer/base_freezer.py b/tools/tensorflow_model_freezer/base_freezer.py
index 96d011718..a365a7806 100644
--- a/tools/tensorflow_model_freezer/base_freezer.py
+++ b/tools/tensorflow_model_freezer/base_freezer.py
@@ -91,18 +91,17 @@ class BaseFreezer(object):
return tf_tensor
def saveRelatedFiles(self, sess, input_node_list, output_node_list, fn_prefix):
- ''' saves pb, pbtxt, chpt files and then freeze graph under top_node_name into directory '''
- ''' produce pb, pbtxt, and ckpt files '''
+ # saves pb, pbtxt, chpt files and then freeze graph under top_node_name into directory
+ # produce pb, pbtxt, and ckpt files
(pb_path, pbtxt_path, checkpoint_path) = util.savePbAndCkpt(
sess, self.getOutputDirectory(), fn_prefix)
print("")
print("# 1. Created Tensorflow model files :\n\t-{}\n\t-{}\n\t-{}\n".format(
pb_path, pbtxt_path, checkpoint_path))
- '''
- produce frozen files
- include only nodes below softmax node. nodes for gradient descent (reduce_mean, GradientDescentOptimizer, ...) will not be included
- '''
+
+ # produce frozen files
+ # include only nodes below softmax node. nodes for gradient descent (reduce_mean, GradientDescentOptimizer, ...) will not be included
sess.close()
output_node_name = fn_prefix
@@ -115,27 +114,25 @@ class BaseFreezer(object):
self.generateTensorboardLog(pb_path, frozen_pb_path, fn_prefix)
print("")
- ''' generate tflite file. '''
- # manually put back imported modules. refer to https://github.com/tensorflow/tensorflow/issues/15410#issuecomment-352189481
- import tempfile
- import subprocess
- tf.contrib.lite.tempfile = tempfile
- tf.contrib.lite.subprocess = subprocess
+ # generate tflite file.
+ # code from https://www.tensorflow.org/lite/convert/python_api
tflite_path = os.path.join(self.getOutputDirectory(), fn_prefix + ".tflite")
+ try:
+ # sometime, name of node "input" contains "input:0". Let's remove ":0"
+ pure_name = lambda complex_name: complex_name.name.split(":")[0]
- tf.reset_default_graph()
- sess = tf.Session()
+ # returns list after mapping pure_name(element in tensor_list)
+ name_list = lambda tensor_list: map(pure_name, tensor_list)
- util.importGraphIntoSession(sess, frozen_pb_path, "")
- try:
- converter = tf.contrib.lite.TFLiteConverter.from_session(
- sess, input_node_list, output_node_list)
+ converter = tf.contrib.lite.TFLiteConverter.from_frozen_graph(
+ frozen_pb_path, name_list(input_node_list), name_list(output_node_list))
tflite_model = converter.convert()
open(tflite_path, "wb").write(tflite_model)
- print("# 3. TOCO : Created TFLITE file :\n\t-{}\n".format(tflite_path))
+ print("# 3. TensorFlow Lite Converter : converted PB file to {}\n".format(
+ tflite_path))
except Exception:
- print("# 3. TOCO failed\n")
+ print("# 3. TensorFlow Lite Converter failed\n")
print(traceback.format_exc())
return (pb_path, frozen_pb_path, tflite_path)
@@ -175,7 +172,8 @@ class BaseFreezer(object):
# build model
(input_node_list, output_node_list) = self.buildModel(
sess, test_cases.get(tc_name), tc_name)
- ''' Now, save to proto buffer format and checkpoint '''
+
+ # Now, save to proto buffer format and checkpoint
(pb_path, frozen_pb_path, tflite_path) = self.saveRelatedFiles(
sess, input_node_list, output_node_list, tc_name)
@@ -195,7 +193,8 @@ class BaseFreezer(object):
# build model
(input_node_list, output_node_list) = self.buildModel(
sess, test_cases.get(tc_name), tc_name)
- ''' Now, save to proto buffer format and checkpoint '''
+
+ # Now, save to proto buffer format and checkpoint
(pb_path, frozen_pb_path, tflite_path) = self.saveRelatedFiles(
sess, input_node_list, output_node_list, tc_name)
diff --git a/tools/tensorflow_model_freezer/model_freezer_util.py b/tools/tensorflow_model_freezer/model_freezer_util.py
index 3b847f043..3fdbba785 100644
--- a/tools/tensorflow_model_freezer/model_freezer_util.py
+++ b/tools/tensorflow_model_freezer/model_freezer_util.py
@@ -92,8 +92,6 @@ def convertPbtxt2Pb(pbtxtPath):
def convertPb2Pbtxt(pbPath):
''' convert pb file to pbtxt file. e.g., /tmp/a.pb --> /tmp/a.pbtxt '''
- from tensorflow.python.platform import gfile
-
(directory, filename, ext) = splitDirFilenameExt(pbPath)
with gfile.FastGFile(pbPath, 'rb') as f:
diff --git a/tools/tensorflow_model_freezer/sample/ARGMAX_gen.py b/tools/tensorflow_model_freezer/sample/ARGMAX_gen.py
index 68e2262c0..0fa019a2a 100755
--- a/tools/tensorflow_model_freezer/sample/ARGMAX_gen.py
+++ b/tools/tensorflow_model_freezer/sample/ARGMAX_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/ARGMIN_gen.py b/tools/tensorflow_model_freezer/sample/ARGMIN_gen.py
index 68b399234..9406d4255 100755
--- a/tools/tensorflow_model_freezer/sample/ARGMIN_gen.py
+++ b/tools/tensorflow_model_freezer/sample/ARGMIN_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/DIV_gen.py b/tools/tensorflow_model_freezer/sample/DIV_gen.py
index d1b794cd7..f4f319f73 100755
--- a/tools/tensorflow_model_freezer/sample/DIV_gen.py
+++ b/tools/tensorflow_model_freezer/sample/DIV_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/LOGICAL_AND_gen.py b/tools/tensorflow_model_freezer/sample/LOGICAL_AND_gen.py
index 912af65b2..fdd4c45a7 100755
--- a/tools/tensorflow_model_freezer/sample/LOGICAL_AND_gen.py
+++ b/tools/tensorflow_model_freezer/sample/LOGICAL_AND_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/LOGICAL_NOT_gen.py b/tools/tensorflow_model_freezer/sample/LOGICAL_NOT_gen.py
index 34c0994ea..297a5aca2 100755
--- a/tools/tensorflow_model_freezer/sample/LOGICAL_NOT_gen.py
+++ b/tools/tensorflow_model_freezer/sample/LOGICAL_NOT_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/LOGICAL_OR_gen.py b/tools/tensorflow_model_freezer/sample/LOGICAL_OR_gen.py
index 714a52e8d..70b897989 100755
--- a/tools/tensorflow_model_freezer/sample/LOGICAL_OR_gen.py
+++ b/tools/tensorflow_model_freezer/sample/LOGICAL_OR_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/MUL_gen.py b/tools/tensorflow_model_freezer/sample/MUL_gen.py
index 596898dbb..0c7056b82 100755
--- a/tools/tensorflow_model_freezer/sample/MUL_gen.py
+++ b/tools/tensorflow_model_freezer/sample/MUL_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/Operation_gen.py b/tools/tensorflow_model_freezer/sample/Operation_gen.py
index be4d74b2c..3a810e53d 100644
--- a/tools/tensorflow_model_freezer/sample/Operation_gen.py
+++ b/tools/tensorflow_model_freezer/sample/Operation_gen.py
@@ -27,7 +27,7 @@ import model_freezer_util as util
# see MUL_gen.py for details usage and sample
class GenFloor(base.BaseFreezer):
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(GenFloor, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
@@ -66,7 +66,7 @@ class GenFloor(base.BaseFreezer):
class GenPad(base.BaseFreezer):
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(GenPad, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
@@ -111,7 +111,7 @@ class GenPad(base.BaseFreezer):
class GenSqueeze(base.BaseFreezer):
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(GenSqueeze, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
@@ -150,7 +150,7 @@ class GenSqueeze(base.BaseFreezer):
class GenTranspose(base.BaseFreezer):
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(GenTranspose, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
@@ -187,13 +187,11 @@ class GenTranspose(base.BaseFreezer):
return (input_list, [output_node])
-'''
-How to run
-$ chmod +x tools/tensorflow_model_freezer/sample/name_of_this_file.py
-$ PYTHONPATH=$PYTHONPATH:./tools/tensorflow_model_freezer/ \
- tools/tensorflow_model_freezer/sample/name_of_this_file.py \
- ~/temp # directory where model files are saved
-'''
+# How to run
+# $ chmod +x tools/tensorflow_model_freezer/sample/name_of_this_file.py
+# $ PYTHONPATH=$PYTHONPATH:./tools/tensorflow_model_freezer/ \
+# tools/tensorflow_model_freezer/sample/name_of_this_file.py \
+# ~/temp # directory where model files are saved
# --------
if __name__ == "__main__":
diff --git a/tools/tensorflow_model_freezer/sample/SQUEEZE_gen.py b/tools/tensorflow_model_freezer/sample/SQUEEZE_gen.py
index 12fb5122e..a4d802ec5 100755
--- a/tools/tensorflow_model_freezer/sample/SQUEEZE_gen.py
+++ b/tools/tensorflow_model_freezer/sample/SQUEEZE_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/STACK_gen.py b/tools/tensorflow_model_freezer/sample/STACK_gen.py
index 2bea40698..3eee6240c 100755
--- a/tools/tensorflow_model_freezer/sample/STACK_gen.py
+++ b/tools/tensorflow_model_freezer/sample/STACK_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/TOPK_gen.py b/tools/tensorflow_model_freezer/sample/TOPK_gen.py
index 8f1882bd1..27b6f60a8 100755
--- a/tools/tensorflow_model_freezer/sample/TOPK_gen.py
+++ b/tools/tensorflow_model_freezer/sample/TOPK_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tensorflow_model_freezer/sample/UNSTACK_gen.py b/tools/tensorflow_model_freezer/sample/UNSTACK_gen.py
index c5bce0d18..3cee7459f 100644
--- a/tools/tensorflow_model_freezer/sample/UNSTACK_gen.py
+++ b/tools/tensorflow_model_freezer/sample/UNSTACK_gen.py
@@ -30,7 +30,7 @@ class Gen(base.BaseFreezer):
'''
def __init__(self, path):
- super(self.__class__, self).__init__(path)
+ super(Gen, self).__init__(path)
def getOutputDirectory(self):
return os.path.join(self.root_output_path,
diff --git a/tools/tflite_accuracy/CMakeLists.txt b/tools/tflite_accuracy/CMakeLists.txt
new file mode 100644
index 000000000..b545ac6bd
--- /dev/null
+++ b/tools/tflite_accuracy/CMakeLists.txt
@@ -0,0 +1,13 @@
+if(NOT BUILD_TFLITE_ACCURACY)
+ return()
+endif(NOT BUILD_TFLITE_ACCURACY)
+
+list(APPEND TFLITE_ACCURACY_SRCS "src/tflite_accuracy.cc")
+
+add_executable(tflite_accuracy ${TFLITE_ACCURACY_SRCS})
+target_include_directories(tflite_accuracy PRIVATE src)
+target_link_libraries(tflite_accuracy tensorflow-lite ${LIB_PTHREAD} dl nnfw_lib_tflite)
+target_link_libraries(tflite_accuracy boost_program_options boost_system boost_filesystem)
+
+install(TARGETS tflite_accuracy DESTINATION bin)
+
diff --git a/tools/tflite_accuracy/README.md b/tools/tflite_accuracy/README.md
new file mode 100644
index 000000000..22804e140
--- /dev/null
+++ b/tools/tflite_accuracy/README.md
@@ -0,0 +1,37 @@
+Using the batch execution tool
+==============================
+
+The batch execution tool (`tflite_accuracy`) can be used to run experiments
+where execution time and accuracy are to be measured on a test set.
+`tflite_accuracy` reads a neural network model from a file and a series of
+input images from a directory, runs each image through the network,
+and collect statistics, such as execution time and accuracy.
+
+In order to run this tool, you'll need:
+
+* a model in `.tflite` format;
+* a set of preprocessed input images in binary format, properly named
+(see below).
+
+`tflite_accuracy` expects all the input images to be located in the same directory
+in the file system. Each image file is the binary dump of the network's
+input tensor. So, if the network's input tensor is a `float32` tensor of
+format (1, 224, 224, 3) containing 1 image of height 224, width 224, and
+3 channels, each image file is expected to be a series of 224 * 224 * 3
+`float32` values.
+
+`tflite_accuracy` does **not** perform any preprocessing on the input tensor
+(e.g., subtraction of mean or division by standard deviation). Each image
+file is treated as the final value of the input tensor, so all the
+necessary preprocessing should be done prior to invoking the tool.
+
+In order to calculate accuracy on the image set, `tflite_accuracy` needs to know
+the correct label corresponding to each image. This information is
+extracted from the file's name: the first four characters in the name are
+assumed to be the numerical code of the image's class. So, a file named
+`0123_0123456789.bin` is assumed to represent an image belonging to class
+`123`. The remainder of the name (`0123456789` in the example) is assumed
+to be an identifier of the image itself.
+
+The width and height each image can be informed via the command line
+argument `--imgsize`, whose default value is 224. \ No newline at end of file
diff --git a/tools/tflite_accuracy/src/labels.h b/tools/tflite_accuracy/src/labels.h
new file mode 100644
index 000000000..1e5170e06
--- /dev/null
+++ b/tools/tflite_accuracy/src/labels.h
@@ -0,0 +1,1023 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LABELS_H_
+#define LABELS_H_
+
+// Labels used for image classification (imagenet dataset)
+static const char *labels[] = {"background",
+ "tench",
+ "goldfish",
+ "great white shark",
+ "tiger shark",
+ "hammerhead",
+ "electric ray",
+ "stingray",
+ "cock",
+ "hen",
+ "ostrich",
+ "brambling",
+ "goldfinch",
+ "house finch",
+ "junco",
+ "indigo bunting",
+ "robin",
+ "bulbul",
+ "jay",
+ "magpie",
+ "chickadee",
+ "water ouzel",
+ "kite",
+ "bald eagle",
+ "vulture",
+ "great grey owl",
+ "European fire salamander",
+ "common newt",
+ "eft",
+ "spotted salamander",
+ "axolotl",
+ "bullfrog",
+ "tree frog",
+ "tailed frog",
+ "loggerhead",
+ "leatherback turtle",
+ "mud turtle",
+ "terrapin",
+ "box turtle",
+ "banded gecko",
+ "common iguana",
+ "American chameleon",
+ "whiptail",
+ "agama",
+ "frilled lizard",
+ "alligator lizard",
+ "Gila monster",
+ "green lizard",
+ "African chameleon",
+ "Komodo dragon",
+ "African crocodile",
+ "American alligator",
+ "triceratops",
+ "thunder snake",
+ "ringneck snake",
+ "hognose snake",
+ "green snake",
+ "king snake",
+ "garter snake",
+ "water snake",
+ "vine snake",
+ "night snake",
+ "boa constrictor",
+ "rock python",
+ "Indian cobra",
+ "green mamba",
+ "sea snake",
+ "horned viper",
+ "diamondback",
+ "sidewinder",
+ "trilobite",
+ "harvestman",
+ "scorpion",
+ "black and gold garden spider",
+ "barn spider",
+ "garden spider",
+ "black widow",
+ "tarantula",
+ "wolf spider",
+ "tick",
+ "centipede",
+ "black grouse",
+ "ptarmigan",
+ "ruffed grouse",
+ "prairie chicken",
+ "peacock",
+ "quail",
+ "partridge",
+ "African grey",
+ "macaw",
+ "sulphur-crested cockatoo",
+ "lorikeet",
+ "coucal",
+ "bee eater",
+ "hornbill",
+ "hummingbird",
+ "jacamar",
+ "toucan",
+ "drake",
+ "red-breasted merganser",
+ "goose",
+ "black swan",
+ "tusker",
+ "echidna",
+ "platypus",
+ "wallaby",
+ "koala",
+ "wombat",
+ "jellyfish",
+ "sea anemone",
+ "brain coral",
+ "flatworm",
+ "nematode",
+ "conch",
+ "snail",
+ "slug",
+ "sea slug",
+ "chiton",
+ "chambered nautilus",
+ "Dungeness crab",
+ "rock crab",
+ "fiddler crab",
+ "king crab",
+ "American lobster",
+ "spiny lobster",
+ "crayfish",
+ "hermit crab",
+ "isopod",
+ "white stork",
+ "black stork",
+ "spoonbill",
+ "flamingo",
+ "little blue heron",
+ "American egret",
+ "bittern",
+ "crane",
+ "limpkin",
+ "European gallinule",
+ "American coot",
+ "bustard",
+ "ruddy turnstone",
+ "red-backed sandpiper",
+ "redshank",
+ "dowitcher",
+ "oystercatcher",
+ "pelican",
+ "king penguin",
+ "albatross",
+ "grey whale",
+ "killer whale",
+ "dugong",
+ "sea lion",
+ "Chihuahua",
+ "Japanese spaniel",
+ "Maltese dog",
+ "Pekinese",
+ "Shih-Tzu",
+ "Blenheim spaniel",
+ "papillon",
+ "toy terrier",
+ "Rhodesian ridgeback",
+ "Afghan hound",
+ "basset",
+ "beagle",
+ "bloodhound",
+ "bluetick",
+ "black-and-tan coonhound",
+ "Walker hound",
+ "English foxhound",
+ "redbone",
+ "borzoi",
+ "Irish wolfhound",
+ "Italian greyhound",
+ "whippet",
+ "Ibizan hound",
+ "Norwegian elkhound",
+ "otterhound",
+ "Saluki",
+ "Scottish deerhound",
+ "Weimaraner",
+ "Staffordshire bullterrier",
+ "American Staffordshire terrier",
+ "Bedlington terrier",
+ "Border terrier",
+ "Kerry blue terrier",
+ "Irish terrier",
+ "Norfolk terrier",
+ "Norwich terrier",
+ "Yorkshire terrier",
+ "wire-haired fox terrier",
+ "Lakeland terrier",
+ "Sealyham terrier",
+ "Airedale",
+ "cairn",
+ "Australian terrier",
+ "Dandie Dinmont",
+ "Boston bull",
+ "miniature schnauzer",
+ "giant schnauzer",
+ "standard schnauzer",
+ "Scotch terrier",
+ "Tibetan terrier",
+ "silky terrier",
+ "soft-coated wheaten terrier",
+ "West Highland white terrier",
+ "Lhasa",
+ "flat-coated retriever",
+ "curly-coated retriever",
+ "golden retriever",
+ "Labrador retriever",
+ "Chesapeake Bay retriever",
+ "German short-haired pointer",
+ "vizsla",
+ "English setter",
+ "Irish setter",
+ "Gordon setter",
+ "Brittany spaniel",
+ "clumber",
+ "English springer",
+ "Welsh springer spaniel",
+ "cocker spaniel",
+ "Sussex spaniel",
+ "Irish water spaniel",
+ "kuvasz",
+ "schipperke",
+ "groenendael",
+ "malinois",
+ "briard",
+ "kelpie",
+ "komondor",
+ "Old English sheepdog",
+ "Shetland sheepdog",
+ "collie",
+ "Border collie",
+ "Bouvier des Flandres",
+ "Rottweiler",
+ "German shepherd",
+ "Doberman",
+ "miniature pinscher",
+ "Greater Swiss Mountain dog",
+ "Bernese mountain dog",
+ "Appenzeller",
+ "EntleBucher",
+ "boxer",
+ "bull mastiff",
+ "Tibetan mastiff",
+ "French bulldog",
+ "Great Dane",
+ "Saint Bernard",
+ "Eskimo dog",
+ "malamute",
+ "Siberian husky",
+ "dalmatian",
+ "affenpinscher",
+ "basenji",
+ "pug",
+ "Leonberg",
+ "Newfoundland",
+ "Great Pyrenees",
+ "Samoyed",
+ "Pomeranian",
+ "chow",
+ "keeshond",
+ "Brabancon griffon",
+ "Pembroke",
+ "Cardigan",
+ "toy poodle",
+ "miniature poodle",
+ "standard poodle",
+ "Mexican hairless",
+ "timber wolf",
+ "white wolf",
+ "red wolf",
+ "coyote",
+ "dingo",
+ "dhole",
+ "African hunting dog",
+ "hyena",
+ "red fox",
+ "kit fox",
+ "Arctic fox",
+ "grey fox",
+ "tabby",
+ "tiger cat",
+ "Persian cat",
+ "Siamese cat",
+ "Egyptian cat",
+ "cougar",
+ "lynx",
+ "leopard",
+ "snow leopard",
+ "jaguar",
+ "lion",
+ "tiger",
+ "cheetah",
+ "brown bear",
+ "American black bear",
+ "ice bear",
+ "sloth bear",
+ "mongoose",
+ "meerkat",
+ "tiger beetle",
+ "ladybug",
+ "ground beetle",
+ "long-horned beetle",
+ "leaf beetle",
+ "dung beetle",
+ "rhinoceros beetle",
+ "weevil",
+ "fly",
+ "bee",
+ "ant",
+ "grasshopper",
+ "cricket",
+ "walking stick",
+ "cockroach",
+ "mantis",
+ "cicada",
+ "leafhopper",
+ "lacewing",
+ "dragonfly",
+ "damselfly",
+ "admiral",
+ "ringlet",
+ "monarch",
+ "cabbage butterfly",
+ "sulphur butterfly",
+ "lycaenid",
+ "starfish",
+ "sea urchin",
+ "sea cucumber",
+ "wood rabbit",
+ "hare",
+ "Angora",
+ "hamster",
+ "porcupine",
+ "fox squirrel",
+ "marmot",
+ "beaver",
+ "guinea pig",
+ "sorrel",
+ "zebra",
+ "hog",
+ "wild boar",
+ "warthog",
+ "hippopotamus",
+ "ox",
+ "water buffalo",
+ "bison",
+ "ram",
+ "bighorn",
+ "ibex",
+ "hartebeest",
+ "impala",
+ "gazelle",
+ "Arabian camel",
+ "llama",
+ "weasel",
+ "mink",
+ "polecat",
+ "black-footed ferret",
+ "otter",
+ "skunk",
+ "badger",
+ "armadillo",
+ "three-toed sloth",
+ "orangutan",
+ "gorilla",
+ "chimpanzee",
+ "gibbon",
+ "siamang",
+ "guenon",
+ "patas",
+ "baboon",
+ "macaque",
+ "langur",
+ "colobus",
+ "proboscis monkey",
+ "marmoset",
+ "capuchin",
+ "howler monkey",
+ "titi",
+ "spider monkey",
+ "squirrel monkey",
+ "Madagascar cat",
+ "indri",
+ "Indian elephant",
+ "African elephant",
+ "lesser panda",
+ "giant panda",
+ "barracouta",
+ "eel",
+ "coho",
+ "rock beauty",
+ "anemone fish",
+ "sturgeon",
+ "gar",
+ "lionfish",
+ "puffer",
+ "abacus",
+ "abaya",
+ "academic gown",
+ "accordion",
+ "acoustic guitar",
+ "aircraft carrier",
+ "airliner",
+ "airship",
+ "altar",
+ "ambulance",
+ "amphibian",
+ "analog clock",
+ "apiary",
+ "apron",
+ "ashcan",
+ "assault rifle",
+ "backpack",
+ "bakery",
+ "balance beam",
+ "balloon",
+ "ballpoint",
+ "Band Aid",
+ "banjo",
+ "bannister",
+ "barbell",
+ "barber chair",
+ "barbershop",
+ "barn",
+ "barometer",
+ "barrel",
+ "barrow",
+ "baseball",
+ "basketball",
+ "bassinet",
+ "bassoon",
+ "bathing cap",
+ "bath towel",
+ "bathtub",
+ "beach wagon",
+ "beacon",
+ "beaker",
+ "bearskin",
+ "beer bottle",
+ "beer glass",
+ "bell cote",
+ "bib",
+ "bicycle-built-for-two",
+ "bikini",
+ "binder",
+ "binoculars",
+ "birdhouse",
+ "boathouse",
+ "bobsled",
+ "bolo tie",
+ "bonnet",
+ "bookcase",
+ "bookshop",
+ "bottlecap",
+ "bow",
+ "bow tie",
+ "brass",
+ "brassiere",
+ "breakwater",
+ "breastplate",
+ "broom",
+ "bucket",
+ "buckle",
+ "bulletproof vest",
+ "bullet train",
+ "butcher shop",
+ "cab",
+ "caldron",
+ "candle",
+ "cannon",
+ "canoe",
+ "can opener",
+ "cardigan",
+ "car mirror",
+ "carousel",
+ "carpenter's kit",
+ "carton",
+ "car wheel",
+ "cash machine",
+ "cassette",
+ "cassette player",
+ "castle",
+ "catamaran",
+ "CD player",
+ "cello",
+ "cellular telephone",
+ "chain",
+ "chainlink fence",
+ "chain mail",
+ "chain saw",
+ "chest",
+ "chiffonier",
+ "chime",
+ "china cabinet",
+ "Christmas stocking",
+ "church",
+ "cinema",
+ "cleaver",
+ "cliff dwelling",
+ "cloak",
+ "clog",
+ "cocktail shaker",
+ "coffee mug",
+ "coffeepot",
+ "coil",
+ "combination lock",
+ "computer keyboard",
+ "confectionery",
+ "container ship",
+ "convertible",
+ "corkscrew",
+ "cornet",
+ "cowboy boot",
+ "cowboy hat",
+ "cradle",
+ "crane",
+ "crash helmet",
+ "crate",
+ "crib",
+ "Crock Pot",
+ "croquet ball",
+ "crutch",
+ "cuirass",
+ "dam",
+ "desk",
+ "desktop computer",
+ "dial telephone",
+ "diaper",
+ "digital clock",
+ "digital watch",
+ "dining table",
+ "dishrag",
+ "dishwasher",
+ "disk brake",
+ "dock",
+ "dogsled",
+ "dome",
+ "doormat",
+ "drilling platform",
+ "drum",
+ "drumstick",
+ "dumbbell",
+ "Dutch oven",
+ "electric fan",
+ "electric guitar",
+ "electric locomotive",
+ "entertainment center",
+ "envelope",
+ "espresso maker",
+ "face powder",
+ "feather boa",
+ "file",
+ "fireboat",
+ "fire engine",
+ "fire screen",
+ "flagpole",
+ "flute",
+ "folding chair",
+ "football helmet",
+ "forklift",
+ "fountain",
+ "fountain pen",
+ "four-poster",
+ "freight car",
+ "French horn",
+ "frying pan",
+ "fur coat",
+ "garbage truck",
+ "gasmask",
+ "gas pump",
+ "goblet",
+ "go-kart",
+ "golf ball",
+ "golfcart",
+ "gondola",
+ "gong",
+ "gown",
+ "grand piano",
+ "greenhouse",
+ "grille",
+ "grocery store",
+ "guillotine",
+ "hair slide",
+ "hair spray",
+ "half track",
+ "hammer",
+ "hamper",
+ "hand blower",
+ "hand-held computer",
+ "handkerchief",
+ "hard disc",
+ "harmonica",
+ "harp",
+ "harvester",
+ "hatchet",
+ "holster",
+ "home theater",
+ "honeycomb",
+ "hook",
+ "hoopskirt",
+ "horizontal bar",
+ "horse cart",
+ "hourglass",
+ "iPod",
+ "iron",
+ "jack-o'-lantern",
+ "jean",
+ "jeep",
+ "jersey",
+ "jigsaw puzzle",
+ "jinrikisha",
+ "joystick",
+ "kimono",
+ "knee pad",
+ "knot",
+ "lab coat",
+ "ladle",
+ "lampshade",
+ "laptop",
+ "lawn mower",
+ "lens cap",
+ "letter opener",
+ "library",
+ "lifeboat",
+ "lighter",
+ "limousine",
+ "liner",
+ "lipstick",
+ "Loafer",
+ "lotion",
+ "loudspeaker",
+ "loupe",
+ "lumbermill",
+ "magnetic compass",
+ "mailbag",
+ "mailbox",
+ "maillot",
+ "maillot",
+ "manhole cover",
+ "maraca",
+ "marimba",
+ "mask",
+ "matchstick",
+ "maypole",
+ "maze",
+ "measuring cup",
+ "medicine chest",
+ "megalith",
+ "microphone",
+ "microwave",
+ "military uniform",
+ "milk can",
+ "minibus",
+ "miniskirt",
+ "minivan",
+ "missile",
+ "mitten",
+ "mixing bowl",
+ "mobile home",
+ "Model T",
+ "modem",
+ "monastery",
+ "monitor",
+ "moped",
+ "mortar",
+ "mortarboard",
+ "mosque",
+ "mosquito net",
+ "motor scooter",
+ "mountain bike",
+ "mountain tent",
+ "mouse",
+ "mousetrap",
+ "moving van",
+ "muzzle",
+ "nail",
+ "neck brace",
+ "necklace",
+ "nipple",
+ "notebook",
+ "obelisk",
+ "oboe",
+ "ocarina",
+ "odometer",
+ "oil filter",
+ "organ",
+ "oscilloscope",
+ "overskirt",
+ "oxcart",
+ "oxygen mask",
+ "packet",
+ "paddle",
+ "paddlewheel",
+ "padlock",
+ "paintbrush",
+ "pajama",
+ "palace",
+ "panpipe",
+ "paper towel",
+ "parachute",
+ "parallel bars",
+ "park bench",
+ "parking meter",
+ "passenger car",
+ "patio",
+ "pay-phone",
+ "pedestal",
+ "pencil box",
+ "pencil sharpener",
+ "perfume",
+ "Petri dish",
+ "photocopier",
+ "pick",
+ "pickelhaube",
+ "picket fence",
+ "pickup",
+ "pier",
+ "piggy bank",
+ "pill bottle",
+ "pillow",
+ "ping-pong ball",
+ "pinwheel",
+ "pirate",
+ "pitcher",
+ "plane",
+ "planetarium",
+ "plastic bag",
+ "plate rack",
+ "plow",
+ "plunger",
+ "Polaroid camera",
+ "pole",
+ "police van",
+ "poncho",
+ "pool table",
+ "pop bottle",
+ "pot",
+ "potter's wheel",
+ "power drill",
+ "prayer rug",
+ "printer",
+ "prison",
+ "projectile",
+ "projector",
+ "puck",
+ "punching bag",
+ "purse",
+ "quill",
+ "quilt",
+ "racer",
+ "racket",
+ "radiator",
+ "radio",
+ "radio telescope",
+ "rain barrel",
+ "recreational vehicle",
+ "reel",
+ "reflex camera",
+ "refrigerator",
+ "remote control",
+ "restaurant",
+ "revolver",
+ "rifle",
+ "rocking chair",
+ "rotisserie",
+ "rubber eraser",
+ "rugby ball",
+ "rule",
+ "running shoe",
+ "safe",
+ "safety pin",
+ "saltshaker",
+ "sandal",
+ "sarong",
+ "sax",
+ "scabbard",
+ "scale",
+ "school bus",
+ "schooner",
+ "scoreboard",
+ "screen",
+ "screw",
+ "screwdriver",
+ "seat belt",
+ "sewing machine",
+ "shield",
+ "shoe shop",
+ "shoji",
+ "shopping basket",
+ "shopping cart",
+ "shovel",
+ "shower cap",
+ "shower curtain",
+ "ski",
+ "ski mask",
+ "sleeping bag",
+ "slide rule",
+ "sliding door",
+ "slot",
+ "snorkel",
+ "snowmobile",
+ "snowplow",
+ "soap dispenser",
+ "soccer ball",
+ "sock",
+ "solar dish",
+ "sombrero",
+ "soup bowl",
+ "space bar",
+ "space heater",
+ "space shuttle",
+ "spatula",
+ "speedboat",
+ "spider web",
+ "spindle",
+ "sports car",
+ "spotlight",
+ "stage",
+ "steam locomotive",
+ "steel arch bridge",
+ "steel drum",
+ "stethoscope",
+ "stole",
+ "stone wall",
+ "stopwatch",
+ "stove",
+ "strainer",
+ "streetcar",
+ "stretcher",
+ "studio couch",
+ "stupa",
+ "submarine",
+ "suit",
+ "sundial",
+ "sunglass",
+ "sunglasses",
+ "sunscreen",
+ "suspension bridge",
+ "swab",
+ "sweatshirt",
+ "swimming trunks",
+ "swing",
+ "switch",
+ "syringe",
+ "table lamp",
+ "tank",
+ "tape player",
+ "teapot",
+ "teddy",
+ "television",
+ "tennis ball",
+ "thatch",
+ "theater curtain",
+ "thimble",
+ "thresher",
+ "throne",
+ "tile roof",
+ "toaster",
+ "tobacco shop",
+ "toilet seat",
+ "torch",
+ "totem pole",
+ "tow truck",
+ "toyshop",
+ "tractor",
+ "trailer truck",
+ "tray",
+ "trench coat",
+ "tricycle",
+ "trimaran",
+ "tripod",
+ "triumphal arch",
+ "trolleybus",
+ "trombone",
+ "tub",
+ "turnstile",
+ "typewriter keyboard",
+ "umbrella",
+ "unicycle",
+ "upright",
+ "vacuum",
+ "vase",
+ "vault",
+ "velvet",
+ "vending machine",
+ "vestment",
+ "viaduct",
+ "violin",
+ "volleyball",
+ "waffle iron",
+ "wall clock",
+ "wallet",
+ "wardrobe",
+ "warplane",
+ "washbasin",
+ "washer",
+ "water bottle",
+ "water jug",
+ "water tower",
+ "whiskey jug",
+ "whistle",
+ "wig",
+ "window screen",
+ "window shade",
+ "Windsor tie",
+ "wine bottle",
+ "wing",
+ "wok",
+ "wooden spoon",
+ "wool",
+ "worm fence",
+ "wreck",
+ "yawl",
+ "yurt",
+ "web site",
+ "comic book",
+ "crossword puzzle",
+ "street sign",
+ "traffic light",
+ "book jacket",
+ "menu",
+ "plate",
+ "guacamole",
+ "consomme",
+ "hot pot",
+ "trifle",
+ "ice cream",
+ "ice lolly",
+ "French loaf",
+ "bagel",
+ "pretzel",
+ "cheeseburger",
+ "hotdog",
+ "mashed potato",
+ "head cabbage",
+ "broccoli",
+ "cauliflower",
+ "zucchini",
+ "spaghetti squash",
+ "acorn squash",
+ "butternut squash",
+ "cucumber",
+ "artichoke",
+ "bell pepper",
+ "cardoon",
+ "mushroom",
+ "Granny Smith",
+ "strawberry",
+ "orange",
+ "lemon",
+ "fig",
+ "pineapple",
+ "banana",
+ "jackfruit",
+ "custard apple",
+ "pomegranate",
+ "hay",
+ "carbonara",
+ "chocolate sauce",
+ "dough",
+ "meat loaf",
+ "pizza",
+ "potpie",
+ "burrito",
+ "red wine",
+ "espresso",
+ "cup",
+ "eggnog",
+ "alp",
+ "bubble",
+ "cliff",
+ "coral reef",
+ "geyser",
+ "lakeside",
+ "promontory",
+ "sandbar",
+ "seashore",
+ "valley",
+ "volcano",
+ "ballplayer",
+ "groom",
+ "scuba diver",
+ "rapeseed",
+ "daisy",
+ "yellow lady's slipper",
+ "corn",
+ "acorn",
+ "hip",
+ "buckeye",
+ "coral fungus",
+ "agaric",
+ "gyromitra",
+ "stinkhorn",
+ "earthstar",
+ "hen-of-the-woods",
+ "bolete",
+ "ear",
+ "toilet tissue"};
+
+#endif /* LABELS_H_ */
diff --git a/tools/tflite_accuracy/src/tflite_accuracy.cc b/tools/tflite_accuracy/src/tflite_accuracy.cc
new file mode 100644
index 000000000..3b7d21e82
--- /dev/null
+++ b/tools/tflite_accuracy/src/tflite_accuracy.cc
@@ -0,0 +1,494 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <atomic>
+#include <chrono>
+#include <forward_list>
+#include <fstream>
+#include <iostream>
+#include <memory>
+#include <numeric>
+#include <stdexcept>
+#include <string>
+#include <thread>
+
+#include <boost/filesystem.hpp>
+#include <boost/format.hpp>
+#include <boost/program_options.hpp>
+
+#include <cmath>
+#include <cstdint>
+#include <signal.h>
+
+#include <tensorflow/lite/context.h>
+#include <tensorflow/lite/interpreter.h>
+#include <tensorflow/lite/model.h>
+
+#include "labels.h"
+#include "tflite/ext/nnapi_delegate.h"
+#include "tflite/ext/kernels/register.h"
+
+const std::string kDefaultImagesDir = "res/input/";
+const std::string kDefaultModelFile = "res/model.tflite";
+
+template <typename... Args> void Print(const char *fmt, Args... args)
+{
+#if __cplusplus >= 201703L
+ std::cerr << boost::str(boost::format(fmt) % ... % std::forward<Args>(args)) << std::endl;
+#else
+ boost::format f(fmt);
+ using unroll = int[];
+ unroll{0, (f % std::forward<Args>(args), 0)...};
+ std::cerr << boost::str(f) << std::endl;
+#endif
+}
+
+template <typename DataType> struct BaseLabelData
+{
+ explicit BaseLabelData(int label = -1, DataType confidence = 0)
+ : label(label), confidence(confidence)
+ {
+ }
+
+ static std::vector<BaseLabelData<DataType>> FindLabels(const DataType *output_tensor,
+ unsigned int top_n = 5)
+ {
+ top_n = top_n > 1000 ? 1000 : top_n;
+ size_t n = 0;
+ std::vector<size_t> indices(1000);
+ std::generate(indices.begin(), indices.end(), [&n]() { return n++; });
+ std::sort(indices.begin(), indices.end(), [output_tensor](const size_t &i1, const size_t &i2) {
+ return output_tensor[i1] > output_tensor[i2];
+ });
+ std::vector<BaseLabelData<DataType>> results(top_n);
+ for (unsigned int i = 0; i < top_n; ++i)
+ {
+ results[i].label = indices[i];
+ results[i].confidence = output_tensor[indices[i]];
+ }
+ return results;
+ }
+
+ int label;
+ DataType confidence;
+};
+
+class BaseRunner
+{
+public:
+ virtual ~BaseRunner() = default;
+
+ /**
+ * @brief Run a model for each file in a directory, and collect and print
+ * statistics.
+ */
+ virtual void IterateInDirectory(const std::string &dir_path, const int labels_offset) = 0;
+
+ /**
+ * @brief Request that the iteration be stopped after the current file.
+ */
+ virtual void ScheduleInterruption() = 0;
+};
+
+template <typename DataType_> class Runner : public BaseRunner
+{
+public:
+ using DataType = DataType_;
+ using LabelData = BaseLabelData<DataType>;
+
+ const int kInputSize;
+ const int KOutputSize = 1001 * sizeof(DataType);
+
+ Runner(std::unique_ptr<tflite::Interpreter> interpreter,
+ std::unique_ptr<tflite::FlatBufferModel> model,
+ std::unique_ptr<::nnfw::tflite::NNAPIDelegate> delegate, unsigned img_size)
+ : interpreter(std::move(interpreter)), model(std::move(model)), delegate(std::move(delegate)),
+ interrupted(false), kInputSize(1 * img_size * img_size * 3 * sizeof(DataType))
+ {
+ inference_times.reserve(500);
+ top1.reserve(500);
+ top5.reserve(500);
+ }
+
+ virtual ~Runner() = default;
+
+ /**
+ * @brief Get the model's input tensor.
+ */
+ virtual DataType *GetInputTensor() = 0;
+
+ /**
+ * @brief Get the model's output tensor.
+ */
+ virtual DataType *GetOutputTensor() = 0;
+
+ /**
+ * @brief Load Image file into tensor.
+ * @return Class number if present in filename, -1 otherwise.
+ */
+ virtual int LoadFile(const boost::filesystem::path &input_file)
+ {
+ DataType *input_tensor = GetInputTensor();
+ if (input_file.extension() == ".bin")
+ {
+ // Load data as raw tensor
+ std::ifstream input_stream(input_file.string(), std::ifstream::binary);
+ input_stream.read(reinterpret_cast<char *>(input_tensor), kInputSize);
+ input_stream.close();
+ int class_num = boost::lexical_cast<int>(input_file.filename().string().substr(0, 4));
+ return class_num;
+ }
+ else
+ {
+ // Load data as image file
+ throw std::runtime_error("Runner can only load *.bin files");
+ }
+ }
+
+ void Invoke()
+ {
+ TfLiteStatus status;
+ if (delegate)
+ {
+ status = delegate->Invoke(interpreter.get());
+ }
+ else
+ {
+ status = interpreter->Invoke();
+ }
+ if (status != kTfLiteOk)
+ {
+ throw std::runtime_error("Failed to invoke interpreter.");
+ }
+ }
+
+ int Process()
+ {
+ auto t0 = std::chrono::high_resolution_clock::now();
+ Invoke();
+ auto t1 = std::chrono::high_resolution_clock::now();
+ std::chrono::duration<double> fs = t1 - t0;
+ auto d = std::chrono::duration_cast<std::chrono::milliseconds>(fs);
+ inference_times.push_back(d.count());
+ if (d > std::chrono::milliseconds(10))
+ {
+ Print(" -- inference duration: %lld ms", d.count());
+ }
+ else
+ {
+ auto du = std::chrono::duration_cast<std::chrono::microseconds>(fs);
+ Print(" -- inference duration: %lld us", du.count());
+ }
+ return 0;
+ }
+
+ void DumpOutputTensor(const std::string &output_file)
+ {
+ DataType *output_tensor = GetOutputTensor();
+ std::ofstream output_stream(output_file, std::ofstream::binary);
+ output_stream.write(reinterpret_cast<char *>(output_tensor), KOutputSize);
+ }
+
+ void PrintExecutionSummary() const
+ {
+ Print("Execution summary:");
+ Print(" -- # of processed images: %d", num_images);
+ if (num_images == 0)
+ {
+ return;
+ }
+ // Inference time - mean
+ double mean = std::accumulate(inference_times.begin(), inference_times.end(), 0.0) / num_images;
+ Print(" -- mean inference time: %.1f ms", mean);
+ // Inference time - std
+ std::vector<double> diff(num_images);
+ std::transform(inference_times.begin(), inference_times.end(), diff.begin(),
+ [mean](size_t n) { return n - mean; });
+ double sq_sum = std::inner_product(diff.begin(), diff.end(), diff.begin(), 0.0);
+ double std_inference_time = std::sqrt(sq_sum / num_images);
+ Print(" -- std inference time: %.1f ms", std_inference_time);
+ // Top-1 and Top-5 accuracies
+ float num_top1 = std::accumulate(top1.begin(), top1.end(), 0);
+ float num_top5 = std::accumulate(top5.begin(), top5.end(), 0);
+ Print(" -- top1: %.3f, top5: %.3f", num_top1 / num_images, num_top5 / num_images);
+ }
+
+ virtual void ScheduleInterruption() override { interrupted = true; }
+
+ virtual void IterateInDirectory(const std::string &dir_path, const int labels_offset) override
+ {
+ interrupted = false;
+ namespace fs = boost::filesystem;
+ if (!fs::is_directory(dir_path))
+ {
+ throw std::runtime_error("Could not open input directory.");
+ }
+
+ inference_times.clear();
+ top1.clear();
+ top5.clear();
+ int class_num;
+ num_images = 0;
+ std::vector<LabelData> lds;
+ fs::directory_iterator end;
+ for (auto it = fs::directory_iterator(dir_path); it != end; ++it)
+ {
+ if (interrupted)
+ {
+ break;
+ }
+ if (!fs::is_regular_file(*it))
+ {
+ continue;
+ }
+ Print("File : %s", it->path().string());
+ try
+ {
+ class_num = LoadFile(*it) + labels_offset;
+ Print("Class: %d", class_num);
+ }
+ catch (std::exception &e)
+ {
+ Print("%s", e.what());
+ continue;
+ }
+ int status = Process();
+ if (status == 0)
+ {
+ DataType *output_tensor = GetOutputTensor();
+ lds = LabelData::FindLabels(output_tensor, 5);
+ bool is_top1 = lds[0].label == class_num;
+ bool is_top5 = false;
+ for (const auto &ld : lds)
+ {
+ is_top5 = is_top5 || (ld.label == class_num);
+ Print(" -- label: %s (%d), prob: %.3f", ld.label >= 0 ? labels[ld.label] : "", ld.label,
+ static_cast<float>(ld.confidence));
+ }
+ Print(" -- top1: %d, top5: %d", is_top1, is_top5);
+ top1.push_back(is_top1);
+ top5.push_back(is_top5);
+ }
+ ++num_images;
+ }
+ PrintExecutionSummary();
+ }
+
+protected:
+ std::unique_ptr<tflite::Interpreter> interpreter;
+ std::unique_ptr<tflite::FlatBufferModel> model;
+ std::unique_ptr<::nnfw::tflite::NNAPIDelegate> delegate;
+
+ std::vector<size_t> inference_times;
+ std::vector<bool> top1;
+ std::vector<bool> top5;
+ uint num_images;
+ std::atomic_bool interrupted;
+};
+
+class FloatRunner : public Runner<float>
+{
+public:
+ using Runner<float>::DataType;
+
+ FloatRunner(std::unique_ptr<tflite::Interpreter> interpreter,
+ std::unique_ptr<tflite::FlatBufferModel> model,
+ std::unique_ptr<::nnfw::tflite::NNAPIDelegate> delegate, unsigned img_size)
+ : Runner<float>(std::move(interpreter), std::move(model), std::move(delegate), img_size)
+ {
+ }
+
+ virtual ~FloatRunner() = default;
+
+ virtual DataType *GetInputTensor() override
+ {
+ return interpreter->tensor(interpreter->inputs()[0])->data.f;
+ }
+
+ virtual DataType *GetOutputTensor() override
+ {
+ return interpreter->tensor(interpreter->outputs()[0])->data.f;
+ }
+};
+
+class QuantizedRunner : public Runner<uint8_t>
+{
+public:
+ using Runner<uint8_t>::DataType;
+
+ QuantizedRunner(std::unique_ptr<tflite::Interpreter> interpreter,
+ std::unique_ptr<tflite::FlatBufferModel> model,
+ std::unique_ptr<::nnfw::tflite::NNAPIDelegate> delegate, unsigned img_size)
+ : Runner<uint8_t>(std::move(interpreter), std::move(model), std::move(delegate), img_size)
+ {
+ }
+
+ virtual ~QuantizedRunner() = default;
+
+ virtual DataType *GetInputTensor() override
+ {
+ return interpreter->tensor(interpreter->inputs()[0])->data.uint8;
+ }
+
+ virtual DataType *GetOutputTensor() override
+ {
+ return interpreter->tensor(interpreter->outputs()[0])->data.uint8;
+ }
+};
+
+enum class Target
+{
+ TfLiteCpu, /**< Use Tensorflow Lite's CPU kernels. */
+ TfLiteDelegate, /**< Use Tensorflow Lite's NN API delegate. */
+ NnfwDelegate /**< Use NNFW's NN API delegate. */
+};
+
+std::unique_ptr<BaseRunner> MakeRunner(const std::string &model_path, unsigned img_size,
+ Target target = Target::NnfwDelegate)
+{
+ auto model = tflite::FlatBufferModel::BuildFromFile(model_path.c_str());
+ if (not model)
+ {
+ throw std::runtime_error(model_path + ": file not found or corrupted.");
+ }
+ Print("Model loaded.");
+
+ std::unique_ptr<tflite::Interpreter> interpreter;
+ nnfw::tflite::BuiltinOpResolver resolver;
+ tflite::InterpreterBuilder(*model, resolver)(&interpreter);
+ if (not interpreter)
+ {
+ throw std::runtime_error("interpreter construction failed.");
+ }
+ if (target == Target::TfLiteCpu)
+ {
+ interpreter->SetNumThreads(std::max(std::thread::hardware_concurrency(), 1U));
+ }
+ else
+ {
+ interpreter->SetNumThreads(1);
+ }
+ if (target == Target::TfLiteDelegate)
+ {
+ interpreter->UseNNAPI(true);
+ }
+
+ int input_index = interpreter->inputs()[0];
+ interpreter->ResizeInputTensor(input_index,
+ {1, static_cast<int>(img_size), static_cast<int>(img_size), 3});
+ if (interpreter->AllocateTensors() != kTfLiteOk)
+ {
+ throw std::runtime_error("tensor allocation failed.");
+ }
+
+ if (target == Target::TfLiteDelegate)
+ {
+ // Do a fake run to load NN API functions.
+ interpreter->Invoke();
+ }
+
+ std::unique_ptr<::nnfw::tflite::NNAPIDelegate> delegate;
+ if (target == Target::NnfwDelegate)
+ {
+ delegate.reset(new ::nnfw::tflite::NNAPIDelegate);
+ delegate->BuildGraph(interpreter.get());
+ }
+
+ if (interpreter->tensor(input_index)->type == kTfLiteFloat32)
+ {
+ return std::unique_ptr<FloatRunner>(
+ new FloatRunner(std::move(interpreter), std::move(model), std::move(delegate), img_size));
+ }
+ else if (interpreter->tensor(input_index)->type == kTfLiteUInt8)
+ {
+ return std::unique_ptr<QuantizedRunner>(new QuantizedRunner(
+ std::move(interpreter), std::move(model), std::move(delegate), img_size));
+ }
+ throw std::invalid_argument("data type of model's input tensor is not supported.");
+}
+
+Target GetTarget(const std::string &str)
+{
+ static const std::map<std::string, Target> target_names{
+ {"tflite-cpu", Target::TfLiteCpu},
+ {"tflite-delegate", Target::TfLiteDelegate},
+ {"nnfw-delegate", Target::NnfwDelegate}};
+ if (target_names.find(str) == target_names.end())
+ {
+ throw std::invalid_argument(
+ str + ": invalid target. Run with --help for a list of available targets.");
+ }
+ return target_names.at(str);
+}
+
+// We need a global pointer to the runner for the SIGINT handler
+BaseRunner *runner_ptr = nullptr;
+void HandleSigInt(int)
+{
+ if (runner_ptr != nullptr)
+ {
+ Print("Interrupted. Execution will stop after current image.");
+ runner_ptr->ScheduleInterruption();
+ runner_ptr = nullptr;
+ }
+ else
+ {
+ exit(1);
+ }
+}
+
+int main(int argc, char *argv[]) try
+{
+ namespace po = boost::program_options;
+ po::options_description desc("Run a model on multiple binary images and print"
+ " statistics");
+ desc.add_options()("help", "print this message and quit")(
+ "model", po::value<std::string>()->default_value(kDefaultModelFile), "tflite file")(
+ "input", po::value<std::string>()->default_value(kDefaultImagesDir),
+ "directory with input images")("offset", po::value<int>()->default_value(1), "labels offset")(
+ "target", po::value<std::string>()->default_value("nnfw-delegate"),
+ "how the model will be run (available targets: tflite-cpu, "
+ "tflite-delegate, nnfw-delegate)")("imgsize", po::value<unsigned>()->default_value(224),
+ "the width and height of the image");
+ po::variables_map vm;
+ po::store(po::parse_command_line(argc, argv, desc), vm);
+ if (vm.count("help"))
+ {
+ std::cerr << desc << std::endl;
+ return 0;
+ }
+
+ auto runner = MakeRunner(vm["model"].as<std::string>(), vm["imgsize"].as<unsigned>(),
+ GetTarget(vm["target"].as<std::string>()));
+ runner_ptr = runner.get();
+
+ struct sigaction sigint_handler;
+ sigint_handler.sa_handler = HandleSigInt;
+ sigemptyset(&sigint_handler.sa_mask);
+ sigint_handler.sa_flags = 0;
+ sigaction(SIGINT, &sigint_handler, nullptr);
+
+ Print("Running TensorFlow Lite...");
+ runner->IterateInDirectory(vm["input"].as<std::string>(), vm["offset"].as<int>());
+ Print("Done.");
+ return 0;
+}
+catch (std::exception &e)
+{
+ Print("%s: %s", argv[0], e.what());
+ return 1;
+}
diff --git a/tools/tflitefile_tool/config_saver.py b/tools/tflitefile_tool/config_saver.py
new file mode 100644
index 000000000..8abd3cf8b
--- /dev/null
+++ b/tools/tflitefile_tool/config_saver.py
@@ -0,0 +1,131 @@
+#!/usr/bin/python
+
+# Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from operator_wrapping import Operator
+from tensor_printer import TensorPrinter
+from option_printer import OptionPrinter
+from perf_predictor import PerfPredictor
+
+
+class ConfigSaver(object):
+ def __init__(self, file_name, operator):
+ self.file_name = file_name
+ self.operator = operator
+ # Set self.verbose to 2 level to print more information
+ self.verbose = 2
+ self.op_idx = operator.operator_idx
+ self.op_name = operator.opcode_str
+
+ self.f = open(file_name, 'at')
+
+ def __del__(self):
+ self.f.close()
+
+ def SaveInfo(self):
+ self.f.write("[{}]\n".format(self.op_idx))
+ if (self.op_name == 'CONV_2D'):
+ self.SaveConv2DInputs()
+ else:
+ self.SaveInputs()
+
+ self.SaveOutputs()
+
+ self.SaveAttributes()
+
+ self.f.write('\n')
+
+ def SaveConv2DInputs(self):
+ if (len(self.operator.inputs) != 3):
+ raise AssertionError('Conv2D input count should be 3')
+
+ inputs = self.operator.inputs[0]
+ weights = self.operator.inputs[1]
+ bias = self.operator.inputs[2]
+
+ self.f.write("input: {}\n".format(
+ TensorPrinter(self.verbose, inputs).GetShapeString()))
+ self.f.write("input_type: {}\n".format(inputs.type_name))
+ self.f.write("weights: {}\n".format(
+ TensorPrinter(self.verbose, weights).GetShapeString()))
+ self.f.write("weights_type: {}\n".format(weights.type_name))
+ self.f.write("bias: {}\n".format(
+ TensorPrinter(self.verbose, bias).GetShapeString()))
+ self.f.write("bias_type: {}\n".format(bias.type_name))
+
+ def SaveInputs(self):
+ total = len(self.operator.inputs)
+ self.f.write("input_counts: {}\n".format(total))
+ for idx in range(total):
+ tensor = self.operator.inputs[idx]
+ input_shape_str = TensorPrinter(self.verbose, tensor).GetShapeString()
+ self.f.write("input{}: {}\n".format(idx, input_shape_str))
+ self.f.write("input{}_type: {}\n".format(idx, tensor.type_name))
+
+ def SaveOutputs(self):
+ total = len(self.operator.outputs)
+ self.f.write("output_counts: {}\n".format(total))
+ for idx in range(total):
+ tensor = self.operator.outputs[idx]
+ output_shape_str = TensorPrinter(self.verbose, tensor).GetShapeString()
+ self.f.write("output{}: {}\n".format(idx, output_shape_str))
+ self.f.write("output{}_type: {}\n".format(idx, tensor.type_name))
+
+ def SaveFilter(self):
+ self.f.write("filter_w: {}\n".format(self.operator.options.FilterWidth()))
+ self.f.write("filter_h: {}\n".format(self.operator.options.FilterHeight()))
+
+ def SaveStride(self):
+ self.f.write("stride_w: {}\n".format(self.operator.options.StrideW()))
+ self.f.write("stride_h: {}\n".format(self.operator.options.StrideH()))
+
+ def SaveDilation(self):
+ self.f.write("dilation_w: {}\n".format(self.operator.options.DilationWFactor()))
+ self.f.write("dilation_h: {}\n".format(self.operator.options.DilationHFactor()))
+
+ def SavePadding(self):
+ if self.operator.options.Padding() == 0:
+ self.f.write("padding: SAME\n")
+ elif self.operator.options.Padding() == 1:
+ self.f.write("padding: VALID\n")
+
+ def SaveFusedAct(self):
+ if self.operator.fused_activation is not "NONE":
+ self.f.write("fused_act: {}\n".format(self.operator.fused_activation))
+
+ def SaveAttributes(self):
+ # operator option
+ # Some operations does not have option. In such case no option is printed
+ option_str = OptionPrinter(self.verbose, self.op_name,
+ self.operator.options).GetOptionString()
+ if self.op_name == 'AVERAGE_POOL_2D' or self.op_name == 'MAX_POOL_2D':
+ self.SaveFilter()
+ self.SaveStride()
+ self.SavePadding()
+ elif self.op_name == 'CONV_2D':
+ self.SaveStride()
+ self.SaveDilation()
+ self.SavePadding()
+ elif self.op_name == 'TRANSPOSE_CONV':
+ self.SaveStride()
+ self.SavePadding()
+ elif self.op_name == 'DEPTHWISE_CONV_2D':
+ self.SaveStride()
+ self.SaveDilation()
+ self.SavePadding()
+ self.f.write("depthmultiplier: {}\n".format(
+ self.operator.options.DepthMultiplier()))
+
+ self.SaveFusedAct()
diff --git a/tools/tflitefile_tool/model_parser.py b/tools/tflitefile_tool/model_parser.py
index 0edabbba1..6f9e1c616 100755
--- a/tools/tflitefile_tool/model_parser.py
+++ b/tools/tflitefile_tool/model_parser.py
@@ -29,6 +29,7 @@ import tflite.SubGraph
import argparse
from operator_parser import OperatorParser
from model_printer import ModelPrinter
+from model_saver import ModelSaver
from perf_predictor import PerfPredictor
@@ -62,6 +63,15 @@ class TFLiteModelFileParser(object):
for operator_index in args.operator:
self.print_operator_index.append(int(operator_index))
+ # Set config option
+ self.save = False
+ if args.config:
+ self.save = True
+ self.save_config = True
+
+ if self.save == True:
+ self.save_prefix = args.prefix
+
def PrintModel(self, model_name, op_parser):
printer = ModelPrinter(self.print_level, op_parser, model_name)
@@ -73,6 +83,12 @@ class TFLiteModelFileParser(object):
printer.PrintInfo()
+ def SaveModel(self, model_name, op_parser):
+ saver = ModelSaver(model_name, op_parser)
+
+ if self.save_config == True:
+ saver.SaveConfigInfo(self.save_prefix)
+
def main(self):
# Generate Model: top structure of tflite model file
buf = self.tflite_file.read()
@@ -81,18 +97,22 @@ class TFLiteModelFileParser(object):
# Model file can have many models
# 1st subgraph is main model
- model_name = "Main model"
+ model_name = "Main_model"
for subgraph_index in range(tf_model.SubgraphsLength()):
tf_subgraph = tf_model.Subgraphs(subgraph_index)
if (subgraph_index != 0):
- model_name = "Model #" + str(subgraph_index)
+ model_name = "Model_#" + str(subgraph_index)
# Parse Operators
op_parser = OperatorParser(tf_model, tf_subgraph, PerfPredictor())
op_parser.Parse()
- # print all of operators or requested objects
- self.PrintModel(model_name, op_parser)
+ if self.save == False:
+ # print all of operators or requested objects
+ self.PrintModel(model_name, op_parser)
+ else:
+ # save all of operators in this model
+ self.SaveModel(model_name, op_parser)
if __name__ == '__main__':
@@ -109,6 +129,13 @@ if __name__ == '__main__':
'--operator',
nargs='*',
help="operator ID to print information (default: all)")
+ arg_parser.add_argument(
+ '-c',
+ '--config',
+ action='store_true',
+ help="Save the configuration file per operator")
+ arg_parser.add_argument(
+ '-p', '--prefix', help="file prefix to be saved (with -c/--config option)")
args = arg_parser.parse_args()
# Call main function
diff --git a/tools/tflitefile_tool/model_printer.py b/tools/tflitefile_tool/model_printer.py
index ad06fa7a7..0c11d01bd 100644
--- a/tools/tflitefile_tool/model_printer.py
+++ b/tools/tflitefile_tool/model_printer.py
@@ -134,9 +134,13 @@ class ModelPrinter(object):
def PrintTotalMemory(self):
total_memory = 0
+ filled_memory = 0 # only memory for constant
for tensor in self.op_parser.GetAllTensors():
+ if tensor.tf_buffer.DataLength() != 0:
+ filled_memory += tensor.memory_size
total_memory += tensor.memory_size
from tensor_printer import ConvertBytesToHuman
- print("Expected total memory for allocating all tensors: {0}".format(
- ConvertBytesToHuman(total_memory)))
+ print("Expected TOTAL memory: {0}".format(ConvertBytesToHuman(total_memory)))
+ print("Expected FILLED memory: {0}".format(ConvertBytesToHuman(filled_memory)))
+ print('')
diff --git a/tools/tflitefile_tool/model_saver.py b/tools/tflitefile_tool/model_saver.py
new file mode 100644
index 000000000..15037a1fe
--- /dev/null
+++ b/tools/tflitefile_tool/model_saver.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from config_saver import ConfigSaver
+
+
+class ModelSaver(object):
+ def __init__(self, model_name, op_parser):
+ self.model_name = model_name
+ self.op_parser = op_parser
+
+ def SaveConfigInfo(self, prefix):
+ print("Save model configuration file")
+ for type_str, oper_list in self.op_parser.operators_per_type.items():
+ if prefix:
+ file_name = "{}_{}_{}.config".format(prefix, self.model_name, type_str)
+ else:
+ file_name = "{}_{}.config".format(self.model_name, type_str)
+ print("{} file is generated".format(file_name))
+ with open(file_name, 'wt') as f:
+ f.write("# {}, Total count: {}\n\n".format(type_str, len(oper_list)))
+ for operator in oper_list:
+ ConfigSaver(file_name, operator).SaveInfo()
diff --git a/tools/tflitefile_tool/operation.py b/tools/tflitefile_tool/operation.py
index 127d6c566..6aa752772 100755
--- a/tools/tflitefile_tool/operation.py
+++ b/tools/tflitefile_tool/operation.py
@@ -20,13 +20,11 @@ import tflite.BuiltinOptions
import tflite.Tensor
from tensor_wrapping import Tensor
import math
-'''
-NOTICE
-- an internal class. do not import outside this file.
-- REF: https://stackoverflow.com/questions/551038/private-implementation-class-in-python
-'''
+# NOTICE
+# - an internal class. do not import outside this file.
+# - REF: https://stackoverflow.com/questions/551038/private-implementation-class-in-python
class _OperationComputeMethod(object):
'''
NOTE: How to count operations of convolution(and also pooling)?
@@ -55,7 +53,7 @@ class _OperationComputeMethod(object):
Anyway, we can calculate total operations on this way. This can apply to the way of pooling.
'''
- def ComputeOperationForConv2D(tf_operator, inputs, outputs):
+ def ComputeOperationForConv2D(self, tf_operator, inputs, outputs):
assert (
tf_operator.BuiltinOptionsType() == tflite.BuiltinOptions.BuiltinOptions()
.Conv2DOptions)
@@ -83,16 +81,14 @@ class _OperationComputeMethod(object):
nonlinear_instr_num = 0
return (add_instr_num, mul_instr_num, nonlinear_instr_num)
- '''
- NOTE: Reference the comment 'NOTE' of ComputeOperationForConv2D
- '''
+ # NOTE: Reference the comment 'NOTE' of ComputeOperationForConv2D
- def ComputeOperationForPooling(tf_operator, inputs, outputs):
+ def ComputeOperationForPooling(self, tf_operator, inputs, outputs):
assert (
tf_operator.BuiltinOptionsType() == tflite.BuiltinOptions.BuiltinOptions()
.Pool2DOptions)
- input_tensor = inputs[0].tf_tensor
+ dummy_input_tensor = inputs[0].tf_tensor
output_tensor = outputs[0].tf_tensor
pool2d_options = tflite.Pool2DOptions.Pool2DOptions()
@@ -113,14 +109,14 @@ class _OperationComputeMethod(object):
nonlinear_instr_num = 0
return (add_instr_num, mul_instr_num, nonlinear_instr_num)
- def ComputeOperationForSoftmax(tf_operator, inputs, outputs):
+ def ComputeOperationForSoftmax(self, tf_operator, inputs, outputs):
assert (
tf_operator.BuiltinOptionsType() == tflite.BuiltinOptions.BuiltinOptions()
.SoftmaxOptions)
input_tensor = inputs[0].tf_tensor
- batch_size = input_tensor.Shape(0)
+ dummy_batch_size = input_tensor.Shape(0)
input_dim = input_tensor.Shape(1)
# Softmax(x_i) = exp(x_i) / sum of exp(x)
@@ -129,7 +125,7 @@ class _OperationComputeMethod(object):
nonlinear_instr_num = input_dim + input_dim # sum of exp(x) and exp(x_i)
return (add_instr_num, mul_instr_num, nonlinear_instr_num)
- def ComputeOperationForFullyConnected(tf_operator, inputs, outputs):
+ def ComputeOperationForFullyConnected(self, tf_operator, inputs, outputs):
assert (
tf_operator.BuiltinOptionsType() == tflite.BuiltinOptions.BuiltinOptions()
.FullyConnectedOptions)
@@ -150,13 +146,13 @@ class _OperationComputeMethod(object):
nonlinear_instr_num = 0
return (add_instr_num, mul_instr_num, nonlinear_instr_num)
- def ComputeOperationForNothing(tf_operator, inputs, outputs):
+ def ComputeOperationForNothing(self, tf_operator, inputs, outputs):
add_instr_num = 0
mul_instr_num = 0
nonlinear_instr_num = 0
return (add_instr_num, mul_instr_num, nonlinear_instr_num)
- def NYI_ComputeOperation(tf_operator, inputs, outputs):
+ def NYI_ComputeOperation(self, tf_operator, inputs, outputs):
pass
operation_to_method_map = {
@@ -167,7 +163,7 @@ class _OperationComputeMethod(object):
"SOFTMAX": ComputeOperationForSoftmax,
"FULLY_CONNECTED": ComputeOperationForFullyConnected,
"CONCATENATION": ComputeOperationForNothing,
- # ADAS
+ # Extension
"TOPK_V2": NYI_ComputeOperation,
"SUB": NYI_ComputeOperation,
"STRIDED_SLICE": NYI_ComputeOperation,
@@ -207,7 +203,7 @@ class Operation(object):
return
self.add_instr_num, self.mul_instr_num, self.nonlinear_instr_num = method(
- self.tf_operator, self.inputs, self.outputs)
+ _OperationComputeMethod(), self.tf_operator, self.inputs, self.outputs)
def TotalInstrNum(self):
return (self.add_instr_num + self.mul_instr_num + self.nonlinear_instr_num)
diff --git a/tools/tflitefile_tool/option_printer.py b/tools/tflitefile_tool/option_printer.py
index 08754f1ce..15265adf2 100644
--- a/tools/tflitefile_tool/option_printer.py
+++ b/tools/tflitefile_tool/option_printer.py
@@ -35,35 +35,33 @@ class OptionPrinter(object):
if (self.options == 0):
return
- if (self.op_name == "AVERAGE_POOL_2D" or self.op_name == "MAX_POOL_2D"):
+ option_str = self.GetOptionString()
+ if option_str:
print("{}Options".format(tab))
+ print("{}\t{}".format(tab, option_str))
- print("{}\t{}, {}, {}".format(
- tab, "Filter W:H = {}:{}".format(self.options.FilterWidth(),
- self.options.FilterHeight()),
+ def GetOptionString(self):
+ if (self.op_name == "AVERAGE_POOL_2D" or self.op_name == "MAX_POOL_2D"):
+ return "{}, {}, {}".format(
+ "Filter W:H = {}:{}".format(self.options.FilterWidth(),
+ self.options.FilterHeight()),
"Stride W:H = {}:{}".format(self.options.StrideW(),
self.options.StrideH()),
- "Padding = {}".format(self.GetPadding())))
-
+ "Padding = {}".format(self.GetPadding()))
elif (self.op_name == "CONV_2D"):
- print("{}Options".format(tab))
-
- print("{}\t{}, {}, {}".format(
- tab, "Stride W:H = {}:{}".format(self.options.StrideW(),
- self.options.StrideH()),
+ return "{}, {}, {}".format(
+ "Stride W:H = {}:{}".format(self.options.StrideW(),
+ self.options.StrideH()),
"Dilation W:H = {}:{}".format(self.options.DilationWFactor(),
self.options.DilationHFactor()),
- "Padding = {}".format(self.GetPadding())))
-
+ "Padding = {}".format(self.GetPadding()))
elif (self.op_name == "DEPTHWISE_CONV_2D"):
- print("{}Options".format(tab))
-
# yapf: disable
- print("{}\t{}, {}, {}, {}".format(
- tab, "Stride W:H = {}:{}".format(self.options.StrideW(),
+ return "{}, {}, {}, {}".format(
+ "Stride W:H = {}:{}".format(self.options.StrideW(),
self.options.StrideH()),
"Dilation W:H = {}:{}".format(self.options.DilationWFactor(),
self.options.DilationHFactor()),
"Padding = {}".format(self.GetPadding()),
- "DepthMultiplier = {}".format(self.options.DepthMultiplier())))
+ "DepthMultiplier = {}".format(self.options.DepthMultiplier()))
# yapf: enable
diff --git a/tools/tflitefile_tool/select_operator.py b/tools/tflitefile_tool/select_operator.py
index c5d311d59..892341029 100644
--- a/tools/tflitefile_tool/select_operator.py
+++ b/tools/tflitefile_tool/select_operator.py
@@ -141,7 +141,7 @@ def GenerateQuantization(new_builder, selected_quantization):
tflite.QuantizationParameters.QuantizationParametersStartScaleVector(
new_builder, zeropoint_num)
for zeropoint_idx in reversed(range(zeropoint_num)):
- new_builder.PrependFloat32(selected_quantization.ZeroPoint(zeropoint_idx))
+ new_builder.PrependInt64(selected_quantization.ZeroPoint(zeropoint_idx))
new_zeropoint = new_builder.EndVector(zeropoint_num)
# Create quantization
diff --git a/tools/tflitefile_tool/tflite/AbsOptions.py b/tools/tflitefile_tool/tflite/AbsOptions.py
new file mode 100644
index 000000000..748509293
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/AbsOptions.py
@@ -0,0 +1,28 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class AbsOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsAbsOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = AbsOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # AbsOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+
+def AbsOptionsStart(builder):
+ builder.StartObject(0)
+
+
+def AbsOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/BidirectionalSequenceLSTMOptions.py b/tools/tflitefile_tool/tflite/BidirectionalSequenceLSTMOptions.py
new file mode 100644
index 000000000..eac2e27ab
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/BidirectionalSequenceLSTMOptions.py
@@ -0,0 +1,74 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class BidirectionalSequenceLSTMOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsBidirectionalSequenceLSTMOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = BidirectionalSequenceLSTMOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # BidirectionalSequenceLSTMOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # BidirectionalSequenceLSTMOptions
+ def FusedActivationFunction(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
+ return 0
+
+ # BidirectionalSequenceLSTMOptions
+ def CellClip(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
+ return 0.0
+
+ # BidirectionalSequenceLSTMOptions
+ def ProjClip(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
+ return 0.0
+
+ # BidirectionalSequenceLSTMOptions
+ def MergeOutputs(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
+ if o != 0:
+ return bool(
+ self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
+ return False
+
+
+def BidirectionalSequenceLSTMOptionsStart(builder):
+ builder.StartObject(4)
+
+
+def BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder,
+ fusedActivationFunction):
+ builder.PrependInt8Slot(0, fusedActivationFunction, 0)
+
+
+def BidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip):
+ builder.PrependFloat32Slot(1, cellClip, 0.0)
+
+
+def BidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip):
+ builder.PrependFloat32Slot(2, projClip, 0.0)
+
+
+def BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, mergeOutputs):
+ builder.PrependBoolSlot(3, mergeOutputs, 0)
+
+
+def BidirectionalSequenceLSTMOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/BidirectionalSequenceRNNOptions.py b/tools/tflitefile_tool/tflite/BidirectionalSequenceRNNOptions.py
index 474ee4ba2..e1d4f6553 100644
--- a/tools/tflitefile_tool/tflite/BidirectionalSequenceRNNOptions.py
+++ b/tools/tflitefile_tool/tflite/BidirectionalSequenceRNNOptions.py
@@ -34,9 +34,17 @@ class BidirectionalSequenceRNNOptions(object):
return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
return 0
+ # BidirectionalSequenceRNNOptions
+ def MergeOutputs(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
+ if o != 0:
+ return bool(
+ self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
+ return False
+
def BidirectionalSequenceRNNOptionsStart(builder):
- builder.StartObject(2)
+ builder.StartObject(3)
def BidirectionalSequenceRNNOptionsAddTimeMajor(builder, timeMajor):
@@ -48,5 +56,9 @@ def BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder,
builder.PrependInt8Slot(1, fusedActivationFunction, 0)
+def BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, mergeOutputs):
+ builder.PrependBoolSlot(2, mergeOutputs, 0)
+
+
def BidirectionalSequenceRNNOptionsEnd(builder):
return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/BuiltinOperator.py b/tools/tflitefile_tool/tflite/BuiltinOperator.py
index 8e2f9c680..481eeffe8 100644
--- a/tools/tflitefile_tool/tflite/BuiltinOperator.py
+++ b/tools/tflitefile_tool/tflite/BuiltinOperator.py
@@ -98,3 +98,11 @@ class BuiltinOperator(object):
SQUARE = 92
ZEROS_LIKE = 93
FILL = 94
+ FLOOR_MOD = 95
+ RANGE = 96
+ RESIZE_NEAREST_NEIGHBOR = 97
+ LEAKY_RELU = 98
+ SQUARED_DIFFERENCE = 99
+ MIRROR_PAD = 100
+ ABS = 101
+ SPLIT_V = 102
diff --git a/tools/tflitefile_tool/tflite/BuiltinOptions.py b/tools/tflitefile_tool/tflite/BuiltinOptions.py
index 7e1eb34ac..ad115814a 100644
--- a/tools/tflitefile_tool/tflite/BuiltinOptions.py
+++ b/tools/tflitefile_tool/tflite/BuiltinOptions.py
@@ -73,3 +73,14 @@ class BuiltinOptions(object):
SquareOptions = 66
ZerosLikeOptions = 67
FillOptions = 68
+ BidirectionalSequenceLSTMOptions = 69
+ BidirectionalSequenceRNNOptions = 70
+ UnidirectionalSequenceLSTMOptions = 71
+ FloorModOptions = 72
+ RangeOptions = 73
+ ResizeNearestNeighborOptions = 74
+ LeakyReluOptions = 75
+ SquaredDifferenceOptions = 76
+ MirrorPadOptions = 77
+ AbsOptions = 78
+ SplitVOptions = 79
diff --git a/tools/tflitefile_tool/tflite/CustomQuantization.py b/tools/tflitefile_tool/tflite/CustomQuantization.py
new file mode 100644
index 000000000..8cc1db1e2
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/CustomQuantization.py
@@ -0,0 +1,61 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class CustomQuantization(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsCustomQuantization(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = CustomQuantization()
+ x.Init(buf, n + offset)
+ return x
+
+ # CustomQuantization
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # CustomQuantization
+ def Custom(self, j):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ a = self._tab.Vector(o)
+ return self._tab.Get(
+ flatbuffers.number_types.Uint8Flags,
+ a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1))
+ return 0
+
+ # CustomQuantization
+ def CustomAsNumpy(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o)
+ return 0
+
+ # CustomQuantization
+ def CustomLength(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.VectorLen(o)
+ return 0
+
+
+def CustomQuantizationStart(builder):
+ builder.StartObject(1)
+
+
+def CustomQuantizationAddCustom(builder, custom):
+ builder.PrependUOffsetTRelativeSlot(
+ 0, flatbuffers.number_types.UOffsetTFlags.py_type(custom), 0)
+
+
+def CustomQuantizationStartCustomVector(builder, numElems):
+ return builder.StartVector(1, numElems, 1)
+
+
+def CustomQuantizationEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/FakeQuantOptions.py b/tools/tflitefile_tool/tflite/FakeQuantOptions.py
index c266bfc9d..6a208e765 100644
--- a/tools/tflitefile_tool/tflite/FakeQuantOptions.py
+++ b/tools/tflitefile_tool/tflite/FakeQuantOptions.py
@@ -53,12 +53,12 @@ def FakeQuantOptionsStart(builder):
builder.StartObject(4)
-def FakeQuantOptionsAddMin(builder, min):
- builder.PrependFloat32Slot(0, min, 0.0)
+def FakeQuantOptionsAddMin(builder, minSlot):
+ builder.PrependFloat32Slot(0, minSlot, 0.0)
-def FakeQuantOptionsAddMax(builder, max):
- builder.PrependFloat32Slot(1, max, 0.0)
+def FakeQuantOptionsAddMax(builder, maxSlot):
+ builder.PrependFloat32Slot(1, maxSlot, 0.0)
def FakeQuantOptionsAddNumBits(builder, numBits):
diff --git a/tools/tflitefile_tool/tflite/FloorModOptions.py b/tools/tflitefile_tool/tflite/FloorModOptions.py
new file mode 100644
index 000000000..d2f7b3b48
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/FloorModOptions.py
@@ -0,0 +1,28 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class FloorModOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsFloorModOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = FloorModOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # FloorModOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+
+def FloorModOptionsStart(builder):
+ builder.StartObject(0)
+
+
+def FloorModOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/LSHProjectionOptions.py b/tools/tflitefile_tool/tflite/LSHProjectionOptions.py
index 055eb75ff..ea1e20bbe 100644
--- a/tools/tflitefile_tool/tflite/LSHProjectionOptions.py
+++ b/tools/tflitefile_tool/tflite/LSHProjectionOptions.py
@@ -31,8 +31,8 @@ def LSHProjectionOptionsStart(builder):
builder.StartObject(1)
-def LSHProjectionOptionsAddType(builder, type):
- builder.PrependInt8Slot(0, type, 0)
+def LSHProjectionOptionsAddType(builder, typeSlot):
+ builder.PrependInt8Slot(0, typeSlot, 0)
def LSHProjectionOptionsEnd(builder):
diff --git a/tools/tflitefile_tool/tflite/LeakyReluOptions.py b/tools/tflitefile_tool/tflite/LeakyReluOptions.py
new file mode 100644
index 000000000..a4ffa3759
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/LeakyReluOptions.py
@@ -0,0 +1,39 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class LeakyReluOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsLeakyReluOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = LeakyReluOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # LeakyReluOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # LeakyReluOptions
+ def Alpha(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
+ return 0.0
+
+
+def LeakyReluOptionsStart(builder):
+ builder.StartObject(1)
+
+
+def LeakyReluOptionsAddAlpha(builder, alpha):
+ builder.PrependFloat32Slot(0, alpha, 0.0)
+
+
+def LeakyReluOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/MirrorPadMode.py b/tools/tflitefile_tool/tflite/MirrorPadMode.py
new file mode 100644
index 000000000..85718ebf2
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/MirrorPadMode.py
@@ -0,0 +1,8 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+
+class MirrorPadMode(object):
+ REFLECT = 0
+ SYMMETRIC = 1
diff --git a/tools/tflitefile_tool/tflite/MirrorPadOptions.py b/tools/tflitefile_tool/tflite/MirrorPadOptions.py
new file mode 100644
index 000000000..4d908e15b
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/MirrorPadOptions.py
@@ -0,0 +1,39 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class MirrorPadOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsMirrorPadOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = MirrorPadOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # MirrorPadOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # MirrorPadOptions
+ def Mode(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
+ return 0
+
+
+def MirrorPadOptionsStart(builder):
+ builder.StartObject(1)
+
+
+def MirrorPadOptionsAddMode(builder, mode):
+ builder.PrependInt8Slot(0, mode, 0)
+
+
+def MirrorPadOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/QuantizationDetails.py b/tools/tflitefile_tool/tflite/QuantizationDetails.py
new file mode 100644
index 000000000..93e322e18
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/QuantizationDetails.py
@@ -0,0 +1,8 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+
+class QuantizationDetails(object):
+ NONE = 0
+ CustomQuantization = 1
diff --git a/tools/tflitefile_tool/tflite/QuantizationParameters.py b/tools/tflitefile_tool/tflite/QuantizationParameters.py
index 7d5e53072..9a752d7e5 100644
--- a/tools/tflitefile_tool/tflite/QuantizationParameters.py
+++ b/tools/tflitefile_tool/tflite/QuantizationParameters.py
@@ -115,23 +115,40 @@ class QuantizationParameters(object):
return self._tab.VectorLen(o)
return 0
+ # QuantizationParameters
+ def DetailsType(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
+ return 0
+
+ # QuantizationParameters
+ def Details(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
+ if o != 0:
+ from flatbuffers.table import Table
+ obj = Table(bytearray(), 0)
+ self._tab.Union(obj, o)
+ return obj
+ return None
+
def QuantizationParametersStart(builder):
- builder.StartObject(4)
+ builder.StartObject(6)
-def QuantizationParametersAddMin(builder, min):
+def QuantizationParametersAddMin(builder, minSlot):
builder.PrependUOffsetTRelativeSlot(
- 0, flatbuffers.number_types.UOffsetTFlags.py_type(min), 0)
+ 0, flatbuffers.number_types.UOffsetTFlags.py_type(minSlot), 0)
def QuantizationParametersStartMinVector(builder, numElems):
return builder.StartVector(4, numElems, 4)
-def QuantizationParametersAddMax(builder, max):
+def QuantizationParametersAddMax(builder, maxSlot):
builder.PrependUOffsetTRelativeSlot(
- 1, flatbuffers.number_types.UOffsetTFlags.py_type(max), 0)
+ 1, flatbuffers.number_types.UOffsetTFlags.py_type(maxSlot), 0)
def QuantizationParametersStartMaxVector(builder, numElems):
@@ -156,5 +173,14 @@ def QuantizationParametersStartZeroPointVector(builder, numElems):
return builder.StartVector(8, numElems, 8)
+def QuantizationParametersAddDetailsType(builder, detailsType):
+ builder.PrependUint8Slot(4, detailsType, 0)
+
+
+def QuantizationParametersAddDetails(builder, details):
+ builder.PrependUOffsetTRelativeSlot(
+ 5, flatbuffers.number_types.UOffsetTFlags.py_type(details), 0)
+
+
def QuantizationParametersEnd(builder):
return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/RangeOptions.py b/tools/tflitefile_tool/tflite/RangeOptions.py
new file mode 100644
index 000000000..917795f6a
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/RangeOptions.py
@@ -0,0 +1,28 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class RangeOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsRangeOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = RangeOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # RangeOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+
+def RangeOptionsStart(builder):
+ builder.StartObject(0)
+
+
+def RangeOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/ResizeNearestNeighborOptions.py b/tools/tflitefile_tool/tflite/ResizeNearestNeighborOptions.py
new file mode 100644
index 000000000..5ce4f75ba
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/ResizeNearestNeighborOptions.py
@@ -0,0 +1,40 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class ResizeNearestNeighborOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsResizeNearestNeighborOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = ResizeNearestNeighborOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # ResizeNearestNeighborOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # ResizeNearestNeighborOptions
+ def AlignCorners(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return bool(
+ self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
+ return False
+
+
+def ResizeNearestNeighborOptionsStart(builder):
+ builder.StartObject(1)
+
+
+def ResizeNearestNeighborOptionsAddAlignCorners(builder, alignCorners):
+ builder.PrependBoolSlot(0, alignCorners, 0)
+
+
+def ResizeNearestNeighborOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/SplitVOptions.py b/tools/tflitefile_tool/tflite/SplitVOptions.py
new file mode 100644
index 000000000..37816ce5b
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/SplitVOptions.py
@@ -0,0 +1,39 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class SplitVOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsSplitVOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = SplitVOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # SplitVOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # SplitVOptions
+ def NumSplits(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
+ return 0
+
+
+def SplitVOptionsStart(builder):
+ builder.StartObject(1)
+
+
+def SplitVOptionsAddNumSplits(builder, numSplits):
+ builder.PrependInt32Slot(0, numSplits, 0)
+
+
+def SplitVOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/SquaredDifferenceOptions.py b/tools/tflitefile_tool/tflite/SquaredDifferenceOptions.py
new file mode 100644
index 000000000..17e022f34
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/SquaredDifferenceOptions.py
@@ -0,0 +1,28 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class SquaredDifferenceOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsSquaredDifferenceOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = SquaredDifferenceOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # SquaredDifferenceOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+
+def SquaredDifferenceOptionsStart(builder):
+ builder.StartObject(0)
+
+
+def SquaredDifferenceOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflitefile_tool/tflite/Tensor.py b/tools/tflitefile_tool/tflite/Tensor.py
index e5f13301c..120cce50b 100644
--- a/tools/tflitefile_tool/tflite/Tensor.py
+++ b/tools/tflitefile_tool/tflite/Tensor.py
@@ -97,8 +97,8 @@ def TensorStartShapeVector(builder, numElems):
return builder.StartVector(4, numElems, 4)
-def TensorAddType(builder, type):
- builder.PrependInt8Slot(1, type, 0)
+def TensorAddType(builder, typeSlot):
+ builder.PrependInt8Slot(1, typeSlot, 0)
def TensorAddBuffer(builder, buffer):
diff --git a/tools/tflitefile_tool/tflite/TensorType.py b/tools/tflitefile_tool/tflite/TensorType.py
index e375c65ee..ac7fe2014 100644
--- a/tools/tflitefile_tool/tflite/TensorType.py
+++ b/tools/tflitefile_tool/tflite/TensorType.py
@@ -13,3 +13,4 @@ class TensorType(object):
BOOL = 6
INT16 = 7
COMPLEX64 = 8
+ INT8 = 9
diff --git a/tools/tflitefile_tool/tflite/UnidirectionalSequenceLSTMOptions.py b/tools/tflitefile_tool/tflite/UnidirectionalSequenceLSTMOptions.py
new file mode 100644
index 000000000..ffbb6378f
--- /dev/null
+++ b/tools/tflitefile_tool/tflite/UnidirectionalSequenceLSTMOptions.py
@@ -0,0 +1,74 @@
+# automatically generated by the FlatBuffers compiler, do not modify
+
+# namespace: tflite
+
+import flatbuffers
+
+
+class UnidirectionalSequenceLSTMOptions(object):
+ __slots__ = ['_tab']
+
+ @classmethod
+ def GetRootAsUnidirectionalSequenceLSTMOptions(cls, buf, offset):
+ n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
+ x = UnidirectionalSequenceLSTMOptions()
+ x.Init(buf, n + offset)
+ return x
+
+ # UnidirectionalSequenceLSTMOptions
+ def Init(self, buf, pos):
+ self._tab = flatbuffers.table.Table(buf, pos)
+
+ # UnidirectionalSequenceLSTMOptions
+ def FusedActivationFunction(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
+ return 0
+
+ # UnidirectionalSequenceLSTMOptions
+ def CellClip(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
+ return 0.0
+
+ # UnidirectionalSequenceLSTMOptions
+ def ProjClip(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
+ if o != 0:
+ return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos)
+ return 0.0
+
+ # UnidirectionalSequenceLSTMOptions
+ def TimeMajor(self):
+ o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
+ if o != 0:
+ return bool(
+ self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
+ return False
+
+
+def UnidirectionalSequenceLSTMOptionsStart(builder):
+ builder.StartObject(4)
+
+
+def UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(
+ builder, fusedActivationFunction):
+ builder.PrependInt8Slot(0, fusedActivationFunction, 0)
+
+
+def UnidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip):
+ builder.PrependFloat32Slot(1, cellClip, 0.0)
+
+
+def UnidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip):
+ builder.PrependFloat32Slot(2, projClip, 0.0)
+
+
+def UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor):
+ builder.PrependBoolSlot(3, timeMajor, 0)
+
+
+def UnidirectionalSequenceLSTMOptionsEnd(builder):
+ return builder.EndObject()
diff --git a/tools/tflkit/freeze_graph.sh b/tools/tflkit/freeze_graph.sh
index c491ba4d2..ae771cf80 100755
--- a/tools/tflkit/freeze_graph.sh
+++ b/tools/tflkit/freeze_graph.sh
@@ -4,13 +4,11 @@ usage()
{
echo "usage : $0"
echo " --info=Information file"
- echo " --tensorflow_path=TensorFlow path (Use externals/tensorflow by default)"
+ echo " [--tensorflow_path=TensorFlow path] (If omitted, the module installed in system will be used by default.)"
}
SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-TF_DIR="${SCRIPT_PATH}/../../externals/tensorflow"
-
for i in "$@"
do
case $i in
@@ -37,11 +35,6 @@ if [ -z "$INFO" ]; then
usage
exit 1
fi
-if [ -z "$TF_DIR" ]; then
- echo "tensorflow_path is unset or set to the empty string"
- usage
- exit 1
-fi
if [ ! -x "$(command -v bazel)" ]; then
echo "Cannot find bazel. Please install bazel."
@@ -74,17 +67,22 @@ fi
CUR_DIR=$(pwd)
{
- echo "Enter $TF_DIR"
- pushd $TF_DIR > /dev/null
+ if [ -e "$TF_DIR" ]; then
+ echo "Enter $TF_DIR"
+ pushd $TF_DIR > /dev/null
+ FREEZE_GRAPH="bazel run tensorflow/python/tools:freeze_graph -- "
+ else
+ FREEZE_GRAPH="python -m tensorflow.python.tools.freeze_graph "
+ fi
if [ ! -z $SAVED_MODEL ]; then
- bazel run tensorflow/python/tools:freeze_graph -- \
+ $FREEZE_GRAPH \
--input_saved_model_dir="$SAVED_MODEL" \
--input_binary=True \
--output_node_names="$OUTPUT" \
--output_graph="$FROZEN_PATH"
else
- bazel run tensorflow/python/tools:freeze_graph -- \
+ $FREEZE_GRAPH \
--input_meta_graph="$META_GRAPH" \
--input_checkpoint="$CKPT_PATH" \
--input_binary=True \
@@ -92,7 +90,9 @@ CUR_DIR=$(pwd)
--output_graph="$FROZEN_PATH"
fi
- popd
+ if [ -e "$TF_DIR" ]; then
+ popd
+ fi
echo "OUTPUT FILE : $FROZEN_PATH"
}
diff --git a/tools/tflkit/summarize_pb.py b/tools/tflkit/summarize_pb.py
index 633804114..bdc6b252c 100644
--- a/tools/tflkit/summarize_pb.py
+++ b/tools/tflkit/summarize_pb.py
@@ -40,7 +40,7 @@ def PrintInput(data):
def PrintOutput(data):
print("Outputs")
- sub = re.findall('\((.*?)\)', data)
+ sub = re.findall(r'\((.*?)\)', data)
for i in sub:
print('\t' + i)
diff --git a/tools/tflkit/tflite_convert.sh b/tools/tflkit/tflite_convert.sh
index 2056797ab..f5b94ede0 100755
--- a/tools/tflkit/tflite_convert.sh
+++ b/tools/tflkit/tflite_convert.sh
@@ -3,15 +3,12 @@
usage()
{
echo "usage : $0"
- echo " --info=Information file"
- echo " --tensorflow_path=TensorFlow path (Use externals/tensorflow by default)"
- echo " --tensorflow_version=TensorFlow version (Must be entered)"
+ echo " --info=<infroamtion file>"
+ echo " [ --tensorflow_path=<path> --tensorflow_version=<version> ] (If omitted, the module installed in system will be used by default.)"
}
SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-TF_DIR="${SCRIPT_PATH}/../../externals/tensorflow"
-
for i in "$@"
do
case $i in
@@ -41,15 +38,16 @@ if [ -z "$INFO" ]; then
usage
exit 1
fi
-if [ -z "$TF_DIR" ]; then
- echo "tensorflow_path is unset or set to the empty string"
- usage
- exit 1
-fi
+
if [ -z "$TF_VERSION" ]; then
- echo "tensorflow_version is unset or set to the empty string"
- usage
- exit 1
+ if [ -z "$TF_DIR" ]; then
+ TF_VERSION=$(python -c 'import tensorflow as tf; print(tf.__version__)')
+ echo "TensorFlow version detected : $TF_VERSION"
+ else
+ echo "tensorflow_version is unset or set to the empty string"
+ usage
+ exit 1
+ fi
fi
if [ ! -x "$(command -v bazel)" ]; then
@@ -87,8 +85,13 @@ fi
CUR_DIR=$(pwd)
{
- echo "Enter $TF_DIR"
- pushd $TF_DIR > /dev/null
+ if [ -e "$TF_DIR" ]; then
+ echo "Enter $TF_DIR"
+ pushd $TF_DIR > /dev/null
+ TFLITE_CONVERT="bazel run tensorflow/lite/python:tflite_convert -- "
+ else
+ TFLITE_CONVERT="python -m tensorflow.lite.python.tflite_convert "
+ fi
NAME_LIST=()
INPUT_SHAPE_LIST=()
@@ -111,7 +114,7 @@ CUR_DIR=$(pwd)
for (( i=0; i < ${#NAME_LIST[@]}; ++i )); do
if [ "${TF_VERSION%%.*}" = "2" ]; then
- bazel run tensorflow/lite/python:tflite_convert -- \
+ $TFLITE_CONVERT \
--output_file="${NAME_LIST[$i]}" \
--graph_def_file="$GRAPHDEF_PATH" \
--input_arrays="$INPUT" \
@@ -119,7 +122,7 @@ CUR_DIR=$(pwd)
--output_arrays="$OUTPUT" \
--allow_custom_ops=true
else
- bazel run tensorflow/contrib/lite/python:tflite_convert -- \
+ $TFLITE_CONVERT \
--output_file="${NAME_LIST[$i]}" \
--graph_def_file="$GRAPHDEF_PATH" \
--input_arrays="$INPUT" \
@@ -128,7 +131,10 @@ CUR_DIR=$(pwd)
--allow_custom_ops
fi
done
- popd
+
+ if [ -e "$TF_DIR" ]; then
+ popd
+ fi
for (( i=0; i < ${#NAME_LIST[@]}; ++i )); do
echo "OUTPUT FILE : ${NAME_LIST[$i]}"